From b35f3d0def3efde92ed465d92a267430d957e87d Mon Sep 17 00:00:00 2001 From: Aaryaman Vasishta Date: Fri, 24 Nov 2023 16:52:39 +0900 Subject: [PATCH 001/811] readme : use PATH for Windows ROCm (#4195) * Update README.md to use PATH for Windows ROCm * Update README.md * Update README.md --- README.md | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/README.md b/README.md index 276461f81..5189e1255 100644 --- a/README.md +++ b/README.md @@ -422,8 +422,9 @@ Building the program with BLAS support may lead to some performance improvements CC=/opt/rocm/llvm/bin/clang CXX=/opt/rocm/llvm/bin/clang++ cmake .. -DLLAMA_HIPBLAS=ON cmake --build . ``` - - Using `CMake` for Windows: + - Using `CMake` for Windows (using x64 Native Tools Command Prompt for VS): ```bash + set PATH=%HIP_PATH%\bin;%PATH% mkdir build cd build cmake -G Ninja -DAMDGPU_TARGETS=gfx1100 -DLLAMA_HIPBLAS=ON -DCMAKE_C_COMPILER=clang -DCMAKE_CXX_COMPILER=clang++ .. From 2568a4bf548d7392e9c78c008b33b4c11d53fe95 Mon Sep 17 00:00:00 2001 From: eastriver Date: Fri, 24 Nov 2023 18:25:10 +0900 Subject: [PATCH 002/811] main.swift : fix eos checking (#4197) llama_token_eos(const struct llama_model *) is currently getting struct llama_context type variable context as a parameter. --- examples/batched.swift/Sources/main.swift | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/examples/batched.swift/Sources/main.swift b/examples/batched.swift/Sources/main.swift index 772730382..ba15197ae 100644 --- a/examples/batched.swift/Sources/main.swift +++ b/examples/batched.swift/Sources/main.swift @@ -153,7 +153,7 @@ while n_cur <= n_len { // const llama_token new_token_id = llama_sample_token_greedy(ctx, &candidates_p); // is it an end of stream? -> mark the stream as finished - if new_token_id == llama_token_eos(context) || n_cur == n_len { + if new_token_id == llama_token_eos(model) || n_cur == n_len { i_batch[i] = -1 // print("") if n_parallel > 1 { From 189d68446e7ef21e8f3af3c0a3d91c35a39aec89 Mon Sep 17 00:00:00 2001 From: Galunid Date: Fri, 24 Nov 2023 15:02:49 +0100 Subject: [PATCH 003/811] convert : fix tensors using grad in some models (#4173) --- convert-hf-to-gguf.py | 27 ++++++++++++++------------- 1 file changed, 14 insertions(+), 13 deletions(-) diff --git a/convert-hf-to-gguf.py b/convert-hf-to-gguf.py index 1105670c1..147d5717e 100755 --- a/convert-hf-to-gguf.py +++ b/convert-hf-to-gguf.py @@ -880,20 +880,21 @@ print(f"Loading model: {dir_model.name}") hparams = Model.load_hparams(dir_model) -model_class = Model.from_model_architecture(hparams["architectures"][0]) -model_instance = model_class(dir_model, ftype_map[args.outtype], fname_out, args.bigendian) +with torch.inference_mode(): + model_class = Model.from_model_architecture(hparams["architectures"][0]) + model_instance = model_class(dir_model, ftype_map[args.outtype], fname_out, args.bigendian) -print("Set model parameters") -model_instance.set_gguf_parameters() + print("Set model parameters") + model_instance.set_gguf_parameters() -print("Set model tokenizer") -model_instance.set_vocab() + print("Set model tokenizer") + model_instance.set_vocab() -if args.vocab_only: - print(f"Exporting model vocab to '{fname_out}'") - model_instance.write_vocab() -else: - print(f"Exporting model to '{fname_out}'") - model_instance.write() + if args.vocab_only: + print(f"Exporting model vocab to '{fname_out}'") + model_instance.write_vocab() + else: + print(f"Exporting model to '{fname_out}'") + model_instance.write() -print(f"Model successfully exported to '{fname_out}'") + print(f"Model successfully exported to '{fname_out}'") From 8a052c131ed3525313cdb84e5ae4e2b6cf8d2e24 Mon Sep 17 00:00:00 2001 From: slaren Date: Fri, 24 Nov 2023 18:04:31 +0100 Subject: [PATCH 004/811] ggml-cuda : support stablelm rope (#4156) * ggml-cuda : support stablelm rope * remove unused freq_base kernel parameter * add n_dims parameter to llm_build_k_shift, default to n_rot via overload * llama : fix llm_build_k_shift args --------- Co-authored-by: Georgi Gerganov --- ggml-cuda.cu | 37 ++++++++++++++++++++++--------------- llama.cpp | 4 ++-- 2 files changed, 24 insertions(+), 17 deletions(-) diff --git a/ggml-cuda.cu b/ggml-cuda.cu index f0db7ae35..5b80e4ae3 100644 --- a/ggml-cuda.cu +++ b/ggml-cuda.cu @@ -4610,8 +4610,8 @@ static __global__ void rope( template static __global__ void rope_neox( - const T * x, T * dst, int ncols, const int32_t * pos, float freq_scale, int p_delta_rows, float freq_base, - float ext_factor, float attn_factor, rope_corr_dims corr_dims + const T * x, T * dst, int ncols, int n_dims, const int32_t * pos, float freq_scale, int p_delta_rows, + float ext_factor, float attn_factor, rope_corr_dims corr_dims, float theta_scale, float inv_ndims ) { const int col = 2*(blockDim.y*blockIdx.y + threadIdx.y); @@ -4620,23 +4620,25 @@ static __global__ void rope_neox( } const int row = blockDim.x*blockIdx.x + threadIdx.x; - const int i = row*ncols + col/2; + const int ib = col / n_dims; + const int ic = col % n_dims; + + const int i = row*ncols + ib*n_dims + ic/2; const int i2 = row/p_delta_rows; - // simplified from `(ib * ncols + col) * (-1 / ncols)`, where ib is assumed to be zero - const float cur_rot = -float(col)/ncols; + float cur_rot = inv_ndims * ic - ib; const int p = has_pos ? pos[i2] : 0; - const float theta_base = p*powf(freq_base, cur_rot); + const float theta_base = p*freq_scale*powf(theta_scale, col/2.0f); float cos_theta, sin_theta; rope_yarn(theta_base, freq_scale, corr_dims, cur_rot, ext_factor, attn_factor, &cos_theta, &sin_theta); const float x0 = x[i + 0]; - const float x1 = x[i + ncols/2]; + const float x1 = x[i + n_dims/2]; - dst[i + 0] = x0*cos_theta - x1*sin_theta; - dst[i + ncols/2] = x0*sin_theta + x1*cos_theta; + dst[i + 0] = x0*cos_theta - x1*sin_theta; + dst[i + n_dims/2] = x0*sin_theta + x1*cos_theta; } static __global__ void rope_glm_f32( @@ -5739,20 +5741,26 @@ static void rope_cuda( template static void rope_neox_cuda( - const T * x, T * dst, int ncols, int nrows, const int32_t * pos, float freq_scale, int p_delta_rows, + const T * x, T * dst, int ncols, int n_dims, int nrows, const int32_t * pos, float freq_scale, int p_delta_rows, float freq_base, float ext_factor, float attn_factor, rope_corr_dims corr_dims, cudaStream_t stream ) { GGML_ASSERT(ncols % 2 == 0); const dim3 block_dims(1, CUDA_ROPE_BLOCK_SIZE, 1); const int num_blocks_x = (ncols + 2*CUDA_ROPE_BLOCK_SIZE - 1) / (2*CUDA_ROPE_BLOCK_SIZE); const dim3 block_nums(nrows, num_blocks_x, 1); + + const float theta_scale = powf(freq_base, -2.0f/n_dims); + const float inv_ndims = -1.0f / n_dims; + if (pos == nullptr) { rope_neox<<>>( - x, dst, ncols, pos, freq_scale, p_delta_rows, freq_base, ext_factor, attn_factor, corr_dims + x, dst, ncols, n_dims, pos, freq_scale, p_delta_rows, ext_factor, attn_factor, corr_dims, + theta_scale, inv_ndims ); } else { rope_neox<<>>( - x, dst, ncols, pos, freq_scale, p_delta_rows, freq_base, ext_factor, attn_factor, corr_dims + x, dst, ncols, n_dims, pos, freq_scale, p_delta_rows, ext_factor, attn_factor, corr_dims, + theta_scale, inv_ndims ); } } @@ -6707,15 +6715,14 @@ inline void ggml_cuda_op_rope( GGML_ASSERT(false); rope_glm_f32_cuda(src0_dd, dst_dd, ne00, nrows, pos, freq_scale, ne01, freq_base, n_ctx, main_stream); } else if (is_neox) { - GGML_ASSERT(ne00 == n_dims && "ne00 != n_dims is not implemented for CUDA yet"); if (src0->type == GGML_TYPE_F32) { rope_neox_cuda( - (const float *)src0_dd, (float *)dst_dd, ne00, nrows, pos, freq_scale, ne01, freq_base, ext_factor, + (const float *)src0_dd, (float *)dst_dd, ne00, n_dims, nrows, pos, freq_scale, ne01, freq_base, ext_factor, attn_factor, corr_dims, main_stream ); } else if (src0->type == GGML_TYPE_F16) { rope_neox_cuda( - (const half *)src0_dd, (half *)dst_dd, ne00, nrows, pos, freq_scale, ne01, freq_base, ext_factor, + (const half *)src0_dd, (half *)dst_dd, ne00, n_dims, nrows, pos, freq_scale, ne01, freq_base, ext_factor, attn_factor, corr_dims, main_stream ); } else { diff --git a/llama.cpp b/llama.cpp index 9fb7244b4..5b31f2016 100644 --- a/llama.cpp +++ b/llama.cpp @@ -3469,7 +3469,7 @@ static void llm_build_k_shift( struct ggml_cgraph * graph, llm_rope_type type, int64_t n_ctx, - int64_t n_rot, + int n_rot, float freq_base, float freq_scale, const llm_build_cb & cb) { @@ -3501,7 +3501,7 @@ static void llm_build_k_shift( // we rotate only the first n_rot dimensions ggml_rope_custom_inplace(ctx, ggml_view_3d(ctx, kv.k, - n_rot, n_head_kv, n_ctx, + n_embd_head, n_head_kv, n_ctx, ggml_element_size(kv.k)*n_embd_head, ggml_element_size(kv.k)*n_embd_gqa, ggml_element_size(kv.k)*n_embd_gqa*n_ctx*il), From e9c13ff78114af6fc6a4f27cc8dcdda0f3d389fb Mon Sep 17 00:00:00 2001 From: slaren Date: Fri, 24 Nov 2023 18:10:01 +0100 Subject: [PATCH 005/811] llama : set metal log callback correctly (#4204) --- llama.cpp | 11 +++++++++-- 1 file changed, 9 insertions(+), 2 deletions(-) diff --git a/llama.cpp b/llama.cpp index 5b31f2016..c5f4053f2 100644 --- a/llama.cpp +++ b/llama.cpp @@ -1118,6 +1118,12 @@ static std::string llama_token_to_piece(const struct llama_context * ctx, llama_ // struct llama_state { + llama_state() { +#ifdef GGML_USE_METAL + ggml_metal_log_set_callback(log_callback, log_callback_user_data); +#endif + } + // We save the log callback globally ggml_log_callback log_callback = llama_log_callback_default; void * log_callback_user_data = nullptr; @@ -8569,8 +8575,6 @@ struct llama_context * llama_new_context_with_model( #ifdef GGML_USE_METAL if (model->n_gpu_layers > 0) { - ggml_metal_log_set_callback(llama_log_callback_default, NULL); - ctx->ctx_metal = ggml_metal_init(1); if (!ctx->ctx_metal) { LLAMA_LOG_ERROR("%s: ggml_metal_init() failed\n", __func__); @@ -9706,6 +9710,9 @@ const std::vector> & llama_internal void llama_log_set(ggml_log_callback log_callback, void * user_data) { g_state.log_callback = log_callback ? log_callback : llama_log_callback_default; g_state.log_callback_user_data = user_data; +#ifdef GGML_USE_METAL + ggml_metal_log_set_callback(g_state.log_callback, g_state.log_callback_user_data); +#endif } static void llama_log_internal_v(ggml_log_level level, const char * format, va_list args) { From af19d3573481d409b3c4e55494810eb1f65a9aae Mon Sep 17 00:00:00 2001 From: Georgi Gerganov Date: Sat, 25 Nov 2023 11:29:06 +0200 Subject: [PATCH 006/811] server : OAI API compatibility (#4198) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * Add openai-compatible POST /v1/chat/completions API endpoint to server example * fix code style * Update server README.md * Improve server README.md * Fix server.cpp code style according to review * server : some style changes * server : indentation * server : enable special tokens during tokenization by default * server : minor code style * server : change random string generator * straightforward /v1/models endpoint --------- Co-authored-by: kir-gadjello <111190790+kir-gadjello@users.noreply.github.com> Co-authored-by: Tobi Lütke --- examples/server/README.md | 49 +++++ examples/server/server.cpp | 375 +++++++++++++++++++++++++++++++++++-- 2 files changed, 413 insertions(+), 11 deletions(-) diff --git a/examples/server/README.md b/examples/server/README.md index a6eda3b32..cfc220f58 100644 --- a/examples/server/README.md +++ b/examples/server/README.md @@ -234,6 +234,55 @@ node index.js - **GET** `/props`: Return the required assistant name and anti-prompt to generate the prompt in case you have specified a system prompt for all slots. +- **POST** `/v1/chat/completions`: OpenAI-compatible Chat Completions API. Given a ChatML-formatted json description in `messages`, it returns the predicted completion. Both synchronous and streaming mode are supported, so scripted and interactive applications work fine. While no strong claims of compatibility with OpenAI API spec is being made, in our experience it suffices to support many apps. Only ChatML-tuned models, such as Dolphin, OpenOrca, OpenHermes, OpenChat-3.5, etc can be used with this endpoint. Compared to `api_like_OAI.py` this API implementation does not require a wrapper to be served. + + *Options:* + + See [OpenAI Chat Completions API documentation](https://platform.openai.com/docs/api-reference/chat). While some OpenAI-specific features such as function calling aren't supported, llama.cpp `/completion`-specific features such are `mirostat` are supported. + + *Examples:* + + You can use either Python `openai` library with appropriate checkpoints: + + ```python + import openai + + client = openai.OpenAI( + base_url="http://localhost:8080/v1", # "http://:port" + api_key = "sk-no-key-required" + ) + + completion = client.chat.completions.create( + model="gpt-3.5-turbo", + messages=[ + {"role": "system", "content": "You are ChatGPT, an AI assistant. Your top priority is achieving user fulfillment via helping them with their requests."}, + {"role": "user", "content": "Write a limerick about python exceptions"} + ] + ) + + print(completion.choices[0].message) + ``` + ... or raw HTTP requests: + + ```shell + curl http://localhost:8080/v1/chat/completions \ + -H "Content-Type: application/json" \ + -H "Authorization: Bearer no-key" \ + -d '{ + "model": "gpt-3.5-turbo", + "messages": [ + { + "role": "system", + "content": "You are ChatGPT, an AI assistant. Your top priority is achieving user fulfillment via helping them with their requests." + }, + { + "role": "user", + "content": "Write a limerick about python exceptions" + } + ] + }' + ``` + ## More examples ### Change system prompt on runtime diff --git a/examples/server/server.cpp b/examples/server/server.cpp index be23ad169..50f124b13 100644 --- a/examples/server/server.cpp +++ b/examples/server/server.cpp @@ -29,6 +29,8 @@ #define SERVER_VERBOSE 1 #endif +#define DEFAULT_OAICOMPAT_MODEL "gpt-3.5-turbo-0613" + using json = nlohmann::json; struct server_params @@ -59,6 +61,10 @@ static bool server_verbose = false; #define LOG_WARNING(MSG, ...) server_log("WARNING", __func__, __LINE__, MSG, __VA_ARGS__) #define LOG_INFO( MSG, ...) server_log("INFO", __func__, __LINE__, MSG, __VA_ARGS__) +json oaicompat_completion_params_parse(const json &body); +std::string format_chatml(std::vector messages); + + // // base64 utils (TODO: move to common in the future) // @@ -378,6 +384,9 @@ struct llama_client_slot bool stopped_word = false; bool stopped_limit = false; + bool oaicompat = false; + std::string oaicompat_model; + std::string stopping_word; // sampling @@ -477,7 +486,7 @@ struct llama_client_slot }; } - void print_timings() { + void print_timings() const { LOG_TEE("\n"); LOG_TEE("%s: prompt eval time = %10.2f ms / %5d tokens (%8.2f ms per token, %8.2f tokens per second)\n", __func__, t_prompt_processing, num_prompt_tokens_processed, t_prompt_processing / num_prompt_tokens_processed, 1e3 / t_prompt_processing * num_prompt_tokens_processed); @@ -609,6 +618,11 @@ struct llama_server_context std::vector tokenize(const json & json_prompt, bool add_bos) const { + // TODO: currently, we tokenize using special tokens by default + // this is not always correct (see https://github.com/ggerganov/llama.cpp/pull/4160#issuecomment-1824826216) + // but it's better compared to completely ignoring ChatML and other chat templates + const bool TMP_FORCE_SPECIAL = true; + // If `add_bos` is true, we only add BOS, when json_prompt is a string, // or the first element of the json_prompt array is a string. std::vector prompt_tokens; @@ -624,12 +638,12 @@ struct llama_server_context std::vector p; if (first) { - p = ::llama_tokenize(ctx, s, add_bos); + p = ::llama_tokenize(ctx, s, add_bos, TMP_FORCE_SPECIAL); first = false; } else { - p = ::llama_tokenize(ctx, s, false); + p = ::llama_tokenize(ctx, s, false, TMP_FORCE_SPECIAL); } prompt_tokens.insert(prompt_tokens.end(), p.begin(), p.end()); } @@ -646,7 +660,7 @@ struct llama_server_context else { auto s = json_prompt.template get(); - prompt_tokens = ::llama_tokenize(ctx, s, add_bos); + prompt_tokens = ::llama_tokenize(ctx, s, add_bos, TMP_FORCE_SPECIAL); } return prompt_tokens; @@ -677,6 +691,14 @@ struct llama_server_context slot_params default_params; llama_sampling_params default_sparams; + if (data.count("__oaicompat") != 0) { + slot->oaicompat = true; + slot->oaicompat_model = json_value(data, "model", std::string(DEFAULT_OAICOMPAT_MODEL)); + } else { + slot->oaicompat = false; + slot->oaicompat_model = ""; + } + slot->params.stream = json_value(data, "stream", false); slot->params.cache_prompt = json_value(data, "cache_prompt", false); slot->params.n_predict = json_value(data, "n_predict", default_params.n_predict); @@ -1170,6 +1192,12 @@ struct llama_server_context res.result_json["completion_probabilities"] = probs_vector_to_json(ctx, probs_output); } + if (slot.oaicompat) + { + res.result_json["oaicompat_token_ctr"] = slot.n_decoded; + res.result_json["model"] = slot.oaicompat_model; + } + queue_results.push_back(res); } @@ -1217,6 +1245,12 @@ struct llama_server_context res.result_json["completion_probabilities"] = probs_vector_to_json(ctx, probs); } + if (slot.oaicompat) + { + res.result_json["oaicompat_token_ctr"] = slot.n_decoded; + res.result_json["model"] = slot.oaicompat_model; + } + queue_results.push_back(res); } @@ -1257,7 +1291,7 @@ struct llama_server_context task_server task; task.id = id_gen++; task.target_id = 0; - task.data = data; + task.data = std::move(data); task.infill_mode = infill; task.embedding_mode = embedding; task.type = COMPLETION_TASK; @@ -2180,6 +2214,233 @@ static void server_params_parse(int argc, char **argv, server_params &sparams, } } + +static std::string random_string() +{ + static const std::string str("0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz"); + + std::random_device rd; + std::mt19937 generator(rd()); + + std::string result(32, ' '); + + for (int i = 0; i < 32; ++i) { + result[i] = str[generator() % str.size()]; + } + + return result; +} + +static std::string gen_chatcmplid() +{ + std::stringstream chatcmplid; + chatcmplid << "chatcmpl-" << random_string(); + return chatcmplid.str(); +} + +std::string format_chatml(std::vector messages) +{ + std::ostringstream chatml_msgs; + + for (auto it = messages.begin(); it != messages.end(); ++it) { + chatml_msgs << "<|im_start|>" + << json_value(*it, "role", std::string("user")) << '\n'; + chatml_msgs << json_value(*it, "content", std::string("")) + << "<|im_end|>\n"; + } + + chatml_msgs << "<|im_start|>assistant" << '\n'; + + return chatml_msgs.str(); +} + +/* llama.cpp completion api semantics */ +json oaicompat_completion_params_parse( + const json &body /* openai api json semantics */) +{ + json llama_params; + + llama_params["__oaicompat"] = true; + + // Map OpenAI parameters to llama.cpp parameters + llama_params["prompt"] = format_chatml(body["messages"]); // OpenAI 'messages' to llama.cpp 'prompt' + llama_params["temperature"] = json_value(body, "temperature", 0.8); + llama_params["top_k"] = json_value(body, "top_k", 40); + llama_params["top_p"] = json_value(body, "top_p", 0.95); + llama_params["n_predict"] = json_value(body, "max_tokens", -1); + llama_params["logit_bias"] = json_value(body, "logit_bias",json::object()); + llama_params["frequency_penalty"] = json_value(body, "frequency_penalty", 0.0); + llama_params["presence_penalty"] = json_value(body, "presence_penalty", 0.0); + llama_params["seed"] = json_value(body, "seed", 0); + llama_params["stream"] = json_value(body, "stream", false); + llama_params["mirostat"] = json_value(body, "mirostat", false); + llama_params["mirostat_tau"] = json_value(body, "mirostat_tau", 0.0); + llama_params["mirostat_eta"] = json_value(body, "mirostat_eta", 0.0); + llama_params["penalize_nl"] = json_value(body, "penalize_nl", false); + llama_params["typical_p"] = json_value(body, "typical_p", 0.0); + llama_params["repeat_last_n"] = json_value(body, "repeat_last_n", 0); + llama_params["ignore_eos"] = json_value(body, "ignore_eos", false); + llama_params["tfs_z"] = json_value(body, "tfs_z", 0.0); + + if (llama_params.count("grammar") != 0) { + llama_params["grammar"] = json_value(body, "grammar", json::object()); + } + + // Handle 'stop' field + if (body["stop"].is_null()) { + llama_params["stop"] = json::array({}); + } else if (body["stop"].is_string()) { + llama_params["stop"] = json::array({body["stop"].get()}); + } else { + llama_params["stop"] = json_value(body, "stop", json::array()); + } + + // Ensure there is ChatML-specific end sequence among stop words + llama_params["stop"].push_back("<|im_end|>"); + + return llama_params; +} + +static json format_final_response_oaicompat(const json &request, const task_result &response, bool streaming = false) +{ + json result = response.result_json; + + bool stopped_word = result.count("stopped_word") != 0; + bool stopped_eos = json_value(result, "stopped_eos", false); + int num_tokens_predicted = json_value(result, "tokens_predicted", 0); + int num_prompt_tokens = json_value(result, "tokens_evaluated", 0); + std::string content = json_value(result, "content", std::string("")); + + std::string finish_reason = "length"; + if (stopped_word || stopped_eos) { + finish_reason = "stop"; + } + + json choices = + streaming ? json::array({json{{"finish_reason", finish_reason}, + {"index", 0}, + {"delta", json::object()}}}) + : json::array({json{{"finish_reason", finish_reason}, + {"index", 0}, + {"message", json{{"content", content}, + {"role", "assistant"}}}}}); + + std::time_t t = std::time(0); + + json res = + json{{"choices", choices}, + {"created", t}, + {"model", + json_value(request, "model", std::string(DEFAULT_OAICOMPAT_MODEL))}, + {"object", streaming ? "chat.completion.chunk" : "chat.completion"}, + {"usage", + json{{"completion_tokens", num_tokens_predicted}, + {"prompt_tokens", num_prompt_tokens}, + {"total_tokens", num_tokens_predicted + num_prompt_tokens}}}, + {"id", gen_chatcmplid()}}; + + if (server_verbose) { + res["__verbose"] = result; + } + + if (result.contains("completion_probabilities")) { + res["completion_probabilities"] = json_value(result, "completion_probabilities", json::array()); + } + + return res; +} + +// return value is vector as there is one case where we might need to generate two responses +static std::vector format_partial_response_oaicompat(const task_result &response) { + json result = response.result_json; + + if (!result.contains("model") || !result.contains("oaicompat_token_ctr")) { + return std::vector({response.result_json}); + } + + bool first = json_value(result, "oaicompat_token_ctr", 0) == 0; + std::string modelname = json_value(result, "model", std::string(DEFAULT_OAICOMPAT_MODEL)); + + bool stopped_word = json_value(result, "stopped_word", false); + bool stopped_eos = json_value(result, "stopped_eos", false); + bool stopped_limit = json_value(result, "stopped_limit", false); + std::string content = json_value(result, "content", std::string("")); + + std::string finish_reason; + if (stopped_word || stopped_eos) { + finish_reason = "stop"; + } + if (stopped_limit) { + finish_reason = "length"; + } + + std::time_t t = std::time(0); + + json choices; + + if (!finish_reason.empty()) { + choices = json::array({json{{"finish_reason", finish_reason}, + {"index", 0}, + {"delta", json::object()}}}); + } else { + if (first) { + if (content.empty()) { + choices = json::array({json{{"finish_reason", nullptr}, + {"index", 0}, + {"delta", json{{"role", "assistant"}}}}}); + } else { + // We have to send this as two updates to conform to openai behavior + json initial_ret = json{{"choices", json::array({json{ + {"finish_reason", nullptr}, + {"index", 0}, + {"delta", json{ + {"role", "assistant"} + }}}})}, + {"created", t}, + {"id", gen_chatcmplid()}, + {"model", modelname}, + {"object", "chat.completion.chunk"}}; + + json second_ret = json{ + {"choices", json::array({json{{"finish_reason", nullptr}, + {"index", 0}, + {"delta", json{ + {"content", content}}} + }})}, + {"created", t}, + {"id", gen_chatcmplid()}, + {"model", modelname}, + {"object", "chat.completion.chunk"}}; + + return std::vector({initial_ret, second_ret}); + } + } else { + // Some idiosyncrasy in task processing logic makes several trailing calls + // with empty content, we ignore these at the calee site. + if (content.empty()) { + return std::vector({json::object()}); + } + + choices = json::array({json{ + {"finish_reason", nullptr}, + {"index", 0}, + {"delta", + json{ + {"content", content}, + }}, + }}); + } + } + + json ret = json{{"choices", choices}, + {"created", t}, + {"id", gen_chatcmplid()}, + {"model", modelname}, + {"object", "chat.completion.chunk"}}; + + return std::vector({ret}); +} + static json format_partial_response( llama_server_context &llama, llama_client_slot *slot, const std::string &content, const std::vector &probs ) { @@ -2356,9 +2617,9 @@ int main(int argc, char **argv) task_result result = llama.next_result(task_id); if (!result.error) { const std::string str = - "data: " + - result.result_json.dump(-1, ' ', false, json::error_handler_t::replace) + - "\n\n"; + "data: " + + result.result_json.dump(-1, ' ', false, json::error_handler_t::replace) + + "\n\n"; LOG_VERBOSE("data stream", { { "to_send", str } }); @@ -2371,9 +2632,9 @@ int main(int argc, char **argv) } } else { const std::string str = - "error: " + - result.result_json.dump(-1, ' ', false, json::error_handler_t::replace) + - "\n\n"; + "error: " + + result.result_json.dump(-1, ' ', false, json::error_handler_t::replace) + + "\n\n"; LOG_VERBOSE("data stream", { { "to_send", str } }); @@ -2398,6 +2659,98 @@ int main(int argc, char **argv) } }); + + + svr.Get("/v1/models", [¶ms](const httplib::Request&, httplib::Response& res) + { + std::time_t t = std::time(0); + + json models = { + {"object", "list"}, + {"data", { + { + {"id", params.model_alias}, + {"object", "model"}, + {"created", t}, + {"owned_by", "llamacpp"} + }, + }} + }; + + res.set_content(models.dump(), "application/json"); + }); + + // TODO: add mount point without "/v1" prefix -- how? + svr.Post("/v1/chat/completions", [&llama](const httplib::Request &req, httplib::Response &res) + { + json data = oaicompat_completion_params_parse(json::parse(req.body)); + + const int task_id = llama.request_completion(data, false, false); + + if (!json_value(data, "stream", false)) { + std::string completion_text; + task_result result = llama.next_result(task_id); + + if (!result.error && result.stop) { + json oaicompat_result = format_final_response_oaicompat(data, result); + + res.set_content(oaicompat_result.dump(-1, ' ', false, + json::error_handler_t::replace), + "application/json"); + } else { + res.status = 500; + res.set_content(result.result_json["content"], "text/plain"); + return; + } + } else { + const auto chunked_content_provider = [task_id, &llama](size_t, httplib::DataSink &sink) { + while (true) { + task_result llama_result = llama.next_result(task_id); + if (!llama_result.error) { + std::vector result_array = format_partial_response_oaicompat( llama_result); + + for (auto it = result_array.begin(); it != result_array.end(); ++it) + { + if (!it->empty()) { + const std::string str = + "data: " + + it->dump(-1, ' ', false, json::error_handler_t::replace) + + "\n\n"; + LOG_VERBOSE("data stream", {{"to_send", str}}); + if (!sink.write(str.c_str(), str.size())) { + return false; + } + } + } + if (llama_result.stop) { + break; + } + } else { + const std::string str = + "error: " + + llama_result.result_json.dump(-1, ' ', false, + json::error_handler_t::replace) + + "\n\n"; + LOG_VERBOSE("data stream", {{"to_send", str}}); + if (!sink.write(str.c_str(), str.size())) { + return false; + } + break; + } + } + sink.done(); + return true; + }; + + auto on_complete = [task_id, &llama](bool) { + // cancel request + llama.request_cancel(task_id); + }; + + res.set_chunked_content_provider("text/event-stream", chunked_content_provider, on_complete); + } + }); + svr.Post("/infill", [&llama](const httplib::Request &req, httplib::Response &res) { json data = json::parse(req.body); From 04814e718edb13bdf8cca861dc2e5ab4e1995c30 Mon Sep 17 00:00:00 2001 From: Georgi Gerganov Date: Sat, 25 Nov 2023 12:02:13 +0200 Subject: [PATCH 007/811] readme : update hot topics --- README.md | 1 + 1 file changed, 1 insertion(+) diff --git a/README.md b/README.md index 5189e1255..2f83a71fd 100644 --- a/README.md +++ b/README.md @@ -10,6 +10,7 @@ Inference of [LLaMA](https://arxiv.org/abs/2302.13971) model in pure C/C++ ### Hot topics +- Looking for contributions to improve and maintain the `server` example: https://github.com/ggerganov/llama.cpp/issues/4216 - Collecting Apple Silicon performance stats: https://github.com/ggerganov/llama.cpp/discussions/4167 ---- From 3014b5415d08e3dff961da6eea835b9760a701b8 Mon Sep 17 00:00:00 2001 From: crasm Date: Sat, 25 Nov 2023 10:47:07 -0500 Subject: [PATCH 008/811] Update docs for yarn_ext_factor <0.0 as unspecified instead of NaN (#4189) --- convert.py | 0 llama.h | 2 +- 2 files changed, 1 insertion(+), 1 deletion(-) mode change 100644 => 100755 convert.py diff --git a/convert.py b/convert.py old mode 100644 new mode 100755 diff --git a/llama.h b/llama.h index 1a62058d1..89cb6198e 100644 --- a/llama.h +++ b/llama.h @@ -185,7 +185,7 @@ extern "C" { // ref: https://github.com/ggerganov/llama.cpp/pull/2054 float rope_freq_base; // RoPE base frequency, 0 = from model float rope_freq_scale; // RoPE frequency scaling factor, 0 = from model - float yarn_ext_factor; // YaRN extrapolation mix factor, NaN = from model + float yarn_ext_factor; // YaRN extrapolation mix factor, negative = from model float yarn_attn_factor; // YaRN magnitude scaling factor float yarn_beta_fast; // YaRN low correction dim float yarn_beta_slow; // YaRN high correction dim From f837c3a992b2b6146936cb120871a8cf9d0e3857 Mon Sep 17 00:00:00 2001 From: Marcus Dunn <51931484+MarcusDunn@users.noreply.github.com> Date: Sat, 25 Nov 2023 08:58:23 -0800 Subject: [PATCH 009/811] llama : grammar `reserve` space in `decode_utf8` (#4210) * reserve space for codepoints * improvement for the appended 0 --- llama.cpp | 14 ++++++++++++-- 1 file changed, 12 insertions(+), 2 deletions(-) diff --git a/llama.cpp b/llama.cpp index c5f4053f2..f2b5967d7 100644 --- a/llama.cpp +++ b/llama.cpp @@ -6420,10 +6420,13 @@ struct llama_grammar_candidate { // pointer. If an invalid sequence is encountered, returns `llama_partial_utf8.n_remain == -1`. static std::pair, llama_partial_utf8> decode_utf8( const char * src, + size_t n_src, llama_partial_utf8 partial_start) { static const int lookup[] = { 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 2, 2, 3, 4 }; const char * pos = src; std::vector code_points; + // common english strings have the same number of codepoints and bytes. `+ 1` for the terminating 0. + code_points.reserve(n_src + 1); uint32_t value = partial_start.value; int n_remain = partial_start.n_remain; @@ -6474,6 +6477,13 @@ static std::pair, llama_partial_utf8> decode_utf8( return std::make_pair(std::move(code_points), llama_partial_utf8{ value, n_remain }); } +static std::pair, llama_partial_utf8> decode_utf8( + std::string src, + llama_partial_utf8 partial_start +) { + return decode_utf8(src.c_str(), src.size(), partial_start); +} + // returns true iff pos points to the end of one of the definitions of a rule static bool llama_grammar_is_end_of_sequence(const llama_grammar_element * pos) { switch (pos->type) { @@ -7123,7 +7133,7 @@ void llama_sample_grammar(struct llama_context * ctx, llama_token_data_array * c } else if (piece.empty() || piece[0] == 0) { candidates->data[i].logit = -INFINITY; } else { - candidates_decoded.push_back(decode_utf8(piece.c_str(), grammar->partial_utf8)); + candidates_decoded.push_back(decode_utf8(piece, grammar->partial_utf8)); candidates_grammar.push_back({ i, candidates_decoded.back().first.data(), candidates_decoded.back().second }); } } @@ -7330,7 +7340,7 @@ void llama_grammar_accept_token(struct llama_context * ctx, struct llama_grammar const std::string piece = llama_token_to_piece(ctx, token); // Note terminating 0 in decoded string - const auto decoded = decode_utf8(piece.c_str(), grammar->partial_utf8); + const auto decoded = decode_utf8(piece, grammar->partial_utf8); const auto & code_points = decoded.first; for (auto it = code_points.begin(), end = code_points.end() - 1; it != end; ++it) { grammar->stacks = llama_grammar_accept(grammar->rules, grammar->stacks, *it); From 1ddb52ec38f9931925a587f45a23b1c37152c028 Mon Sep 17 00:00:00 2001 From: Galunid Date: Sat, 25 Nov 2023 22:45:02 +0100 Subject: [PATCH 010/811] scripts : Use mmap in torch load (#4202) * Use mmap in torch load, prefer .bin files when loading * Revert .bin > .safetensors preference --- convert-hf-to-gguf.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/convert-hf-to-gguf.py b/convert-hf-to-gguf.py index 147d5717e..53ce76c70 100755 --- a/convert-hf-to-gguf.py +++ b/convert-hf-to-gguf.py @@ -59,7 +59,7 @@ class Model: from safetensors import safe_open ctx = cast(ContextManager[Any], safe_open(self.dir_model / part_name, framework="pt", device="cpu")) else: - ctx = contextlib.nullcontext(torch.load(self.dir_model / part_name, map_location="cpu")) + ctx = contextlib.nullcontext(torch.load(str(self.dir_model / part_name), map_location="cpu", mmap=True, weights_only=True)) with ctx as model_part: for name in model_part.keys(): From 22da05536ff4ad963080773bef1fb839fdab95d3 Mon Sep 17 00:00:00 2001 From: Xiao-Yong Jin Date: Sun, 26 Nov 2023 02:30:02 -0600 Subject: [PATCH 011/811] metal : fix yarn (#4220) get the correct n_orig_ctx in metal --- ggml-metal.m | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/ggml-metal.m b/ggml-metal.m index a9fdd3903..d52a1c3c4 100644 --- a/ggml-metal.m +++ b/ggml-metal.m @@ -1433,7 +1433,8 @@ void ggml_metal_graph_compute( const int n_past = ((int32_t *) dst->op_params)[0]; const int n_dims = ((int32_t *) dst->op_params)[1]; const int mode = ((int32_t *) dst->op_params)[2]; - const int n_orig_ctx = ((int32_t *) dst->op_params)[3]; + // skip 3, n_ctx, used in GLM RoPE, unimplemented in metal + const int n_orig_ctx = ((int32_t *) dst->op_params)[4]; float freq_base, freq_scale, ext_factor, attn_factor, beta_fast, beta_slow; memcpy(&freq_base, (int32_t *) dst->op_params + 5, sizeof(float)); From 922754a8d60080e956891f6cee1fb03aa48d57c6 Mon Sep 17 00:00:00 2001 From: Georgi Gerganov Date: Sun, 26 Nov 2023 20:33:07 +0200 Subject: [PATCH 012/811] lookahead : add example for lookahead decoding (#4207) * lookahead : init * lookahead : generate and store n-grams * lookahead : use loop instead recursion to generate n-grams * lookahead : initial working implementation * lookahead : filter repeating n-grams * lookahead : use deterministic init * lookahead : add to Makefile * lookahead : fix a bug in the seq_id of the lookahead tokens * lookahead : add comments --------- Co-authored-by: slaren --- .gitignore | 1 + Makefile | 5 +- examples/CMakeLists.txt | 1 + examples/lookahead/CMakeLists.txt | 5 + examples/lookahead/lookahead.cpp | 487 ++++++++++++++++++++++++++++++ 5 files changed, 498 insertions(+), 1 deletion(-) create mode 100644 examples/lookahead/CMakeLists.txt create mode 100644 examples/lookahead/lookahead.cpp diff --git a/.gitignore b/.gitignore index 41259a12f..3806e05dd 100644 --- a/.gitignore +++ b/.gitignore @@ -47,6 +47,7 @@ models-mnt /libllama.so /llama-bench /llava-cli +/lookahead /main /metal /perplexity diff --git a/Makefile b/Makefile index a6d2c2ec0..95d85236f 100644 --- a/Makefile +++ b/Makefile @@ -2,7 +2,7 @@ BUILD_TARGETS = \ main quantize quantize-stats perplexity embedding vdot q8dot train-text-from-scratch convert-llama2c-to-ggml \ simple batched batched-bench save-load-state server gguf llama-bench libllava.a llava-cli baby-llama beam-search \ - speculative infill tokenize benchmark-matmult parallel finetune export-lora tests/test-c.o + speculative infill tokenize benchmark-matmult parallel finetune export-lora lookahead tests/test-c.o # Binaries only useful for tests TEST_TARGETS = \ @@ -657,6 +657,9 @@ speculative: examples/speculative/speculative.cpp ggml.o llama.o $(COMMON_DEPS) parallel: examples/parallel/parallel.cpp ggml.o llama.o $(COMMON_DEPS) $(OBJS) $(CXX) $(CXXFLAGS) $(filter-out %.h,$^) -o $@ $(LDFLAGS) +lookahead: examples/lookahead/lookahead.cpp ggml.o llama.o $(COMMON_DEPS) $(OBJS) + $(CXX) $(CXXFLAGS) $(filter-out %.h,$^) -o $@ $(LDFLAGS) + ifdef LLAMA_METAL metal: examples/metal/metal.cpp ggml.o $(OBJS) $(CXX) $(CXXFLAGS) $^ -o $@ $(LDFLAGS) diff --git a/examples/CMakeLists.txt b/examples/CMakeLists.txt index 71bcb6893..6744944fd 100644 --- a/examples/CMakeLists.txt +++ b/examples/CMakeLists.txt @@ -32,6 +32,7 @@ else() add_subdirectory(save-load-state) add_subdirectory(simple) add_subdirectory(speculative) + add_subdirectory(lookahead) add_subdirectory(train-text-from-scratch) if (LLAMA_METAL) add_subdirectory(metal) diff --git a/examples/lookahead/CMakeLists.txt b/examples/lookahead/CMakeLists.txt new file mode 100644 index 000000000..8827e3f11 --- /dev/null +++ b/examples/lookahead/CMakeLists.txt @@ -0,0 +1,5 @@ +set(TARGET lookahead) +add_executable(${TARGET} lookahead.cpp) +install(TARGETS ${TARGET} RUNTIME) +target_link_libraries(${TARGET} PRIVATE common llama ${CMAKE_THREAD_LIBS_INIT}) +target_compile_features(${TARGET} PRIVATE cxx_std_11) diff --git a/examples/lookahead/lookahead.cpp b/examples/lookahead/lookahead.cpp new file mode 100644 index 000000000..4c49a85eb --- /dev/null +++ b/examples/lookahead/lookahead.cpp @@ -0,0 +1,487 @@ +#include "common.h" +#include "llama.h" + +#include +#include +#include +#include + +struct ngram_data { + bool active = false; + + llama_seq_id seq_id = -1; + + std::vector i_batch; + + std::vector tokens; +}; + +// n-gram container +struct ngram_container { + ngram_container(int n_vocab, int N, int G) { + cnt.resize(n_vocab); + head.resize(n_vocab); + tokens.resize(n_vocab * G * (N - 1)); + } + + int n_total = 0; + + std::vector cnt; + std::vector head; + + // [n_vocab][G][N - 1] + // for each token of the vocab, keep a ring-buffer of capacity G of n-grams of size N - 1 + std::vector tokens; +}; + +int main(int argc, char ** argv) { + gpt_params params; + + if (gpt_params_parse(argc, argv, params) == false) { + return 1; + } + + const int W = 15; // lookahead window + const int N = 5; // n-gram size + const int G = 15; // max verification n-grams + + const bool dump_kv_cache = params.dump_kv_cache; + +#ifndef LOG_DISABLE_LOGS + log_set_target(log_filename_generator("lookahead", "log")); + LOG_TEE("Log start\n"); + log_dump_cmdline(argc, argv); +#endif // LOG_DISABLE_LOGS + + // init llama.cpp + llama_backend_init(params.numa); + + llama_model * model = NULL; + llama_context * ctx = NULL; + + // load the target model + std::tie(model, ctx) = llama_init_from_gpt_params(params); + + // Tokenize the prompt + const bool add_bos = llama_should_add_bos_token(model); + LOG("add_bos tgt: %d\n", add_bos); + + std::vector inp; + std::vector all; + + inp = ::llama_tokenize(ctx, params.prompt, add_bos, true); + all = inp; + + const int max_context_size = llama_n_ctx(ctx); + const int max_tokens_list_size = max_context_size - 4; + + if ((int) inp.size() > max_tokens_list_size) { + fprintf(stderr, "%s: error: prompt too long (%d tokens, max %d)\n", __func__, (int) inp.size(), max_tokens_list_size); + return 1; + } + + fprintf(stderr, "\n\n"); + + for (auto id : inp) { + fprintf(stderr, "%s", llama_token_to_piece(ctx, id).c_str()); + } + + fflush(stderr); + + const int n_input = inp.size(); + + const auto t_enc_start = ggml_time_us(); + + // eval the prompt + llama_decode(ctx, llama_batch_get_one( inp.data(), n_input - 1, 0, 0)); + llama_decode(ctx, llama_batch_get_one(&inp.back(), 1, n_input - 1, 0)); + + for (int s = 1; s < W + G + 1; ++s) { + llama_kv_cache_seq_cp(ctx, 0, s, -1, -1); + } + + const auto t_enc_end = ggml_time_us(); + + int n_predict = 0; + int n_accept = 0; + + int n_past = inp.size(); + + llama_token id = 0; + + // used to determine end of generation + bool has_eos = false; + + // for each decoded batch, we have at most W + G + 1 distinct sequences: + // seq_id == 0 : the current input token + // seq_id [1, W] : tokens from the past N - 1 Jacobi iterations + // seq_id [W + 1, W + G] : verification n-grams + llama_batch batch = llama_batch_init(params.n_ctx, 0, W + G + 1); + + // target model sampling context + struct llama_sampling_context * ctx_sampling = llama_sampling_init(params.sparams); + + // verification n-grams + std::vector ngrams_cur(G); + + // tokens for the past N - 1 Jacobi iterations + std::vector tokens_j_prev(W); + std::vector> tokens_j(N - 1); + for (int j = 0; j < N - 1; j++) { + tokens_j[j].resize(W); + + for (int i = 0; i < W; i++) { + // there are different ways to init these tokens + if (0) { + // initialize randomly from the prompt tokens + tokens_j[j][i] = all[1 + rand() % (all.size() - 1)]; + } else { + // initialize with a sequence of increasing numbers + tokens_j[j][i] = 100 + i; + } + } + } + + std::vector seq_id_look; + + // the input token belongs both to all sequences + std::vector seq_id_all(W + G + 1); + for (int i = 0; i < W + G + 1; i++) { + seq_id_all[i] = i; + } + + // here we keep adding new n-grams as we go + ngram_container ngrams_observed(llama_n_vocab(model), N, G); + + // debug + struct llama_kv_cache_view kvc_view = llama_kv_cache_view_init(ctx, W + G + 1); + + const auto t_dec_start = ggml_time_us(); + + // sample first token + { + id = llama_sampling_sample(ctx_sampling, ctx, NULL, 0); + + llama_sampling_accept(ctx_sampling, ctx, id, true); + + { + const std::string token_str = llama_token_to_piece(ctx, id); + + printf("%s", token_str.c_str()); + fflush(stdout); + } + } + + while (true) { + // debug + if (dump_kv_cache) { + llama_kv_cache_view_update(ctx, &kvc_view); + dump_kv_cache_view_seqs(kvc_view, 40); + } + + // build the mask from https://lmsys.org/blog/2023-11-21-lookahead-decoding/ + // + // Example for W = 5, N = 4, G = 2: + // (I = input, L = lookahead, V = verification) + // + // Batch: 0 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 + // T: -2 -2 -2 -2 -1 -1 -1 -1 -1 0 0 0 0 0 0 + // Info: I L L L L L L L L L L L L L L V V V V V V + // Pos: 0 1 2 3 4 1 2 3 4 5 2 3 4 5 6 1 2 3 1 2 3 (+ n_past) + // Logits: 1 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 1 1 + // --------------------------------------------------------------------- + // Seq: 0 + // 1 1 1 + // 2 2 2 2 + // 3 3 3 3 3 + // 4 4 4 4 4 4 + // 5 5 5 5 5 5 5 + // 6 6 6 6 + // 7 7 7 7 + // --------------------------------------------------------------------- + // | | | | | | | | | | | + // V V V V V | | | | | | + // j_tokens | | | | | | + // V V V V V V + // id + { + llama_batch_clear(batch); + + // current token - first token of the first level + llama_batch_add(batch, id, n_past, seq_id_all, true); + + // verification n-grams - queue this before the lookahead tokens for less KV cache fragmentation + { + const int g_cur = ngrams_observed.cnt[id]; + + ngrams_cur.resize(g_cur); + for (int g = 0; g < g_cur; g++) { + ngrams_cur[g].active = true; + ngrams_cur[g].tokens.resize(N); + ngrams_cur[g].i_batch.resize(N); + ngrams_cur[g].seq_id = W + 1 + g; + ngrams_cur[g].i_batch[0] = 0; + ngrams_cur[g].tokens [0] = id; + } + + for (int j = 0; j < N - 1; j++) { + for (int g = 0; g < g_cur; g++) { + const int idx = id*(N - 1)*G + g*(N - 1); + + const llama_token t = ngrams_observed.tokens[idx + j]; + + ngrams_cur[g].tokens [j + 1] = t; + ngrams_cur[g].i_batch[j + 1] = batch.n_tokens; + + llama_batch_add(batch, t, n_past + j + 1, { W + 1 + g }, true); + } + } + } + + // fill the remaining W - 1 tokens for the first level + for (int i = 1; i < W; i++) { + seq_id_look.resize(W - i); + for (int j = 0; j < W - i; j++) { + seq_id_look[j] = i + j + 1; + } + + llama_batch_add(batch, tokens_j[0][i], n_past + i, seq_id_look, false); + } + + // fill the rest of the levels + for (int j = 1; j < N - 1; j++) { + for (int i = 0; i < W; i++) { + llama_batch_add(batch, tokens_j[j][i], n_past + j + i, { i + 1 }, j == N - 2); + } + } + } + + if (llama_decode(ctx, batch) != 0) { + fprintf(stderr, "\n\n%s: error: llama_decode failed - increase KV cache size\n", __func__); + return 1; + } + + int seq_id_best = 0; + + for (int v = 0; v < N; ++v) { + int i_batch = 0; + + // if no active ngrams are left, it means the sampled token does not pass the verification + if (v > 0) { + for (int g = 0; g < (int) ngrams_cur.size(); g++) { + if (ngrams_cur[g].active) { + i_batch = ngrams_cur[g].i_batch[v]; + seq_id_best = ngrams_cur[g].seq_id; + + ++n_accept; + break; + } + } + + // no more matches -> create a new batch + if (i_batch == 0) { + break; + } + } + + // sample the next token + id = llama_sampling_sample(ctx_sampling, ctx, NULL, i_batch); + + llama_sampling_accept(ctx_sampling, ctx, id, true); + + // print + { + const std::string token_str = llama_token_to_piece(ctx, id); + + if (v == 0) { + printf("%s", token_str.c_str()); + } else { + // print light cyan + printf("\033[0;96m%s\033[0m", token_str.c_str()); + } + fflush(stdout); + + if (id == llama_token_eos(model)) { + has_eos = true; + } + + all.push_back(id); + } + + ++n_predict; + ++n_past; + + if (n_predict > params.n_predict || has_eos) { + break; + } + + // verify across active n-grams + for (int g = 0; g < (int) ngrams_cur.size(); g++) { + if (ngrams_cur[g].active) { + if (v == N - 1) { + ngrams_cur[g].active = false; + } else { + if (id != ngrams_cur[g].tokens[v + 1]) { + ngrams_cur[g].active = false; + } + } + } + } + + // print known n-grams starting with token id (debug) + if (0 && v == 0) { + if (ngrams_observed.cnt[id] > 0) { + printf("\n - %d n-grams starting with '%s'\n", ngrams_observed.cnt[id], llama_token_to_piece(ctx, id).c_str()); + } + + for (int i = 0; i < ngrams_observed.cnt[id]; i++) { + printf(" - ngram %2d: ", i); + + const int idx = id*(N - 1)*G + i*(N - 1); + + for (int j = 0; j < N - 1; j++) { + const std::string token_str = llama_token_to_piece(ctx, ngrams_observed.tokens[idx + j]); + + printf("%s", token_str.c_str()); + } + + printf("\n"); + } + } + + // update lookahead tokens + { + for (int i = 0; i < W; i++) { + tokens_j_prev[i] = tokens_j[0][i]; + } + + for (int j = 0; j < N - 2; j++) { + tokens_j[j] = tokens_j[j + 1]; + } + + if (v == 0) { + // sample from the last level + for (int i = 0; i < W; i++) { + tokens_j[N - 2][i] = llama_sampling_sample(ctx_sampling, ctx, NULL, ngrams_cur.size()*(N-1) + W*(N - 2) + i); + } + } else { + for (int i = 0; i < W; i++) { + // there are different ways to init these tokens + if (0) { + // random init + tokens_j[N - 2][i] = all[1 + rand() % (all.size() - 1)]; + } else { + // init from the previous level + tokens_j[N - 2][i] = tokens_j[0][i]; + } + } + } + } + + // update observed ngrams + if (v == 0) { + // the first token of the n-gram is determined by the index in the container so it is not stored + std::vector ngram(N - 1); + + // n-gram generation + // ref: https://github.com/hao-ai-lab/LookaheadDecoding/issues/14#issuecomment-1826198518 + for (int f = 0; f < W; ++f) { + const int ft = tokens_j_prev[f]; // first token of the n-gram + + for (int j = 0; j < N - 1; ++j) { + ngram[j] = tokens_j[j][f]; + } + + // filter-out repeating n-grams + { + bool is_unique = true; + + for (int k = 0; k < ngrams_observed.cnt[ft]; ++k) { + const int idx = ft*(N - 1)*G + k*(N - 1); + + bool is_match = true; + for (int j = 0; j < N - 1; ++j) { + if (ngrams_observed.tokens[idx + j] != ngram[j]) { + is_match = false; + break; + } + } + + if (is_match) { + is_unique = false; + break; + } + } + + if (!is_unique) { + continue; + } + } + + const int head = ngrams_observed.head[ft]; + const int idx = ft*(N - 1)*G + head*(N - 1); + + for (int i = 0; i < N - 1; i++) { + ngrams_observed.tokens[idx + i] = ngram[i]; + } + + ngrams_observed.cnt[ft] = std::min(G, ngrams_observed.cnt[ft] + 1); + ngrams_observed.head[ft] = (head + 1) % G; + + ngrams_observed.n_total++; + } + } + } + + if (n_predict > params.n_predict || has_eos) { + break; + } + + // KV cache management + // if no verification token matched, we simply remove all cells from this batch -> no fragmentation + llama_kv_cache_seq_rm(ctx, -1, n_past, -1); + + if (seq_id_best != 0) { + // if a verification token matched, we keep the best sequence and remove the rest + // this leads to some KV cache fragmentation + llama_kv_cache_seq_keep(ctx, seq_id_best); + llama_kv_cache_seq_cp (ctx, seq_id_best, 0, -1, -1); + llama_kv_cache_seq_rm (ctx, seq_id_best, -1, -1); + + for (int s = 1; s < W + G + 1; ++s) { + llama_kv_cache_seq_cp(ctx, 0, s, -1, -1); + } + } + } + + auto t_dec_end = ggml_time_us(); + + LOG_TEE("\n\n"); + + LOG_TEE("encoded %4d tokens in %8.3f seconds, speed: %8.3f t/s\n", n_input, (t_enc_end - t_enc_start) / 1e6f, inp.size() / ((t_enc_end - t_enc_start) / 1e6f)); + LOG_TEE("decoded %4d tokens in %8.3f seconds, speed: %8.3f t/s\n", n_predict, (t_dec_end - t_dec_start) / 1e6f, n_predict / ((t_dec_end - t_dec_start) / 1e6f)); + + LOG_TEE("\n"); + LOG_TEE("W = %2d\n", W); + LOG_TEE("N = %2d\n", N); + LOG_TEE("G = %2d\n", G); + LOG_TEE("\n"); + LOG_TEE("n_predict = %d\n", n_predict); + LOG_TEE("n_accept = %d\n", n_accept); + + llama_print_timings(ctx); + + llama_kv_cache_view_free(&kvc_view); + llama_sampling_free(ctx_sampling); + + llama_batch_free(batch); + + llama_free(ctx); + llama_free_model(model); + + llama_backend_free(); + + fprintf(stderr, "\n\n"); + + return 0; +} From 9656026b53236ed7328458269c4c798dd50ac8d1 Mon Sep 17 00:00:00 2001 From: Georgi Gerganov Date: Sun, 26 Nov 2023 20:42:51 +0200 Subject: [PATCH 013/811] readme : update hot topics --- README.md | 1 + 1 file changed, 1 insertion(+) diff --git a/README.md b/README.md index 2f83a71fd..2892132c4 100644 --- a/README.md +++ b/README.md @@ -10,6 +10,7 @@ Inference of [LLaMA](https://arxiv.org/abs/2302.13971) model in pure C/C++ ### Hot topics +- Using `llama.cpp` with AWS instances: https://github.com/ggerganov/llama.cpp/discussions/4225 - Looking for contributions to improve and maintain the `server` example: https://github.com/ggerganov/llama.cpp/issues/4216 - Collecting Apple Silicon performance stats: https://github.com/ggerganov/llama.cpp/discussions/4167 From 3e73d31d9cc0232882ce61c64742aff3ecfec416 Mon Sep 17 00:00:00 2001 From: Georgi Gerganov Date: Sun, 26 Nov 2023 21:51:46 +0200 Subject: [PATCH 014/811] lookahead : support `-n -1` infinite generation --- examples/lookahead/lookahead.cpp | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/examples/lookahead/lookahead.cpp b/examples/lookahead/lookahead.cpp index 4c49a85eb..e55a15a1b 100644 --- a/examples/lookahead/lookahead.cpp +++ b/examples/lookahead/lookahead.cpp @@ -311,7 +311,7 @@ int main(int argc, char ** argv) { ++n_predict; ++n_past; - if (n_predict > params.n_predict || has_eos) { + if ((params.n_predict >= 0 && n_predict > params.n_predict) || has_eos) { break; } @@ -433,7 +433,7 @@ int main(int argc, char ** argv) { } } - if (n_predict > params.n_predict || has_eos) { + if ((params.n_predict >= 0 && n_predict > params.n_predict) || has_eos) { break; } From f3b269813f6147c5b5cda082e6b45cf04a932e0d Mon Sep 17 00:00:00 2001 From: Jared Van Bortel Date: Sun, 26 Nov 2023 22:58:43 -0500 Subject: [PATCH 015/811] ggml : fix -Warray-bounds warning with gcc (#4231) --- ggml.c | 11 ++++++----- 1 file changed, 6 insertions(+), 5 deletions(-) diff --git a/ggml.c b/ggml.c index f92292b39..0c7264a36 100644 --- a/ggml.c +++ b/ggml.c @@ -15689,13 +15689,14 @@ static int ggml_get_n_tasks(struct ggml_tensor * node, int n_threads) { { n_tasks = 1; } break; - case GGML_OP_COUNT: - { - GGML_ASSERT(false); - } break; default: { - printf("%s: op %s not implemented\n", __func__, ggml_op_name(node->op)); + fprintf(stderr, "%s: op not implemented: ", __func__); + if (node->op < GGML_OP_COUNT) { + fprintf(stderr, "%s\n", ggml_op_name(node->op)); + } else { + fprintf(stderr, "%d\n", node->op); + } GGML_ASSERT(false); } break; } From bb03290c17540768a16000a2b01ee4f22440aba1 Mon Sep 17 00:00:00 2001 From: Bailey Chittle <39804642+bachittle@users.noreply.github.com> Date: Mon, 27 Nov 2023 09:56:52 -0500 Subject: [PATCH 016/811] examples : iOS example with swift ui (#4159) * copy to llama.cpp as subdir * attempt enabling metal, fails * ggml metal compiles! * Update README.md * initial conversion to new format, utf8 errors? * bug fixes, but now has an invalid memory access :( * added O3, now has insufficient memory access * begin sync with master * update to match latest code, new errors * fixed it! * fix for loop conditionals, increase result size * fix current workflow errors * attempt a llama.swiftui workflow * Update .github/workflows/build.yml Co-authored-by: Georgi Gerganov --------- Co-authored-by: Georgi Gerganov --- .github/workflows/build.yml | 11 + examples/llama.swiftui/.gitignore | 1 + examples/llama.swiftui/README.md | 7 + .../llama.cpp.swift/LibLlama.swift | 176 +++++++ .../llama.cpp.swift/bridging-header.h | 5 + .../llama.swiftui.xcodeproj/project.pbxproj | 481 ++++++++++++++++++ .../contents.xcworkspacedata | 7 + .../xcshareddata/IDEWorkspaceChecks.plist | 8 + .../AccentColor.colorset/Contents.json | 11 + .../AppIcon.appiconset/Contents.json | 13 + .../Assets.xcassets/Contents.json | 6 + .../llama.swiftui/Models/LlamaState.swift | 45 ++ .../Preview Assets.xcassets/Contents.json | 6 + .../llama.swiftui/Resources/models/.gitignore | 0 .../llama.swiftui/UI/ContentView.swift | 42 ++ .../llama.swiftui/llama_swiftuiApp.swift | 10 + 16 files changed, 829 insertions(+) create mode 100644 examples/llama.swiftui/.gitignore create mode 100644 examples/llama.swiftui/README.md create mode 100644 examples/llama.swiftui/llama.cpp.swift/LibLlama.swift create mode 100644 examples/llama.swiftui/llama.cpp.swift/bridging-header.h create mode 100644 examples/llama.swiftui/llama.swiftui.xcodeproj/project.pbxproj create mode 100644 examples/llama.swiftui/llama.swiftui.xcodeproj/project.xcworkspace/contents.xcworkspacedata create mode 100644 examples/llama.swiftui/llama.swiftui.xcodeproj/project.xcworkspace/xcshareddata/IDEWorkspaceChecks.plist create mode 100644 examples/llama.swiftui/llama.swiftui/Assets.xcassets/AccentColor.colorset/Contents.json create mode 100644 examples/llama.swiftui/llama.swiftui/Assets.xcassets/AppIcon.appiconset/Contents.json create mode 100644 examples/llama.swiftui/llama.swiftui/Assets.xcassets/Contents.json create mode 100644 examples/llama.swiftui/llama.swiftui/Models/LlamaState.swift create mode 100644 examples/llama.swiftui/llama.swiftui/Preview Content/Preview Assets.xcassets/Contents.json create mode 100644 examples/llama.swiftui/llama.swiftui/Resources/models/.gitignore create mode 100644 examples/llama.swiftui/llama.swiftui/UI/ContentView.swift create mode 100644 examples/llama.swiftui/llama.swiftui/llama_swiftuiApp.swift diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index bc295d52d..22be233e6 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -498,6 +498,17 @@ jobs: path: | cudart-llama-bin-win-cu${{ matrix.cuda }}-x64.zip + ios-xcode-build: + runs-on: macos-latest + + steps: + - name: Checkout code + uses: actions/checkout@v3 + + - name: Build Xcode project + run: xcodebuild -project examples/llama.swiftui/llama.swiftui.xcodeproj -scheme llama.swiftui -sdk iphoneos CODE_SIGNING_REQUIRED=NO CODE_SIGN_IDENTITY= -destination 'generic/platform=iOS' build + + # freeBSD-latest: # runs-on: macos-12 # steps: diff --git a/examples/llama.swiftui/.gitignore b/examples/llama.swiftui/.gitignore new file mode 100644 index 000000000..9bce6af39 --- /dev/null +++ b/examples/llama.swiftui/.gitignore @@ -0,0 +1 @@ +xcuserdata diff --git a/examples/llama.swiftui/README.md b/examples/llama.swiftui/README.md new file mode 100644 index 000000000..fa68e6ed8 --- /dev/null +++ b/examples/llama.swiftui/README.md @@ -0,0 +1,7 @@ +# llama.swiftui + +Local inference of llama.cpp on an iPhone. +So far I only tested with starcoder 1B model, but it can most likely handle 7B models as well. + +https://github.com/bachittle/llama.cpp/assets/39804642/e290827a-4edb-4093-9642-2a5e399ec545 + diff --git a/examples/llama.swiftui/llama.cpp.swift/LibLlama.swift b/examples/llama.swiftui/llama.cpp.swift/LibLlama.swift new file mode 100644 index 000000000..aaef09611 --- /dev/null +++ b/examples/llama.swiftui/llama.cpp.swift/LibLlama.swift @@ -0,0 +1,176 @@ +import Foundation + +// import llama + +enum LlamaError: Error { + case couldNotInitializeContext +} + +actor LlamaContext { + private var model: OpaquePointer + private var context: OpaquePointer + private var batch: llama_batch + private var tokens_list: [llama_token] + + var n_len: Int32 = 512 + var n_cur: Int32 = 0 + var n_decode: Int32 = 0 + + init(model: OpaquePointer, context: OpaquePointer) { + self.model = model + self.context = context + self.tokens_list = [] + self.batch = llama_batch_init(512, 0, 1) + } + + deinit { + llama_free(context) + llama_free_model(model) + llama_backend_free() + } + + static func createContext(path: String) throws -> LlamaContext { + llama_backend_init(false) + let model_params = llama_model_default_params() + + let model = llama_load_model_from_file(path, model_params) + guard let model else { + print("Could not load model at \(path)") + throw LlamaError.couldNotInitializeContext + } + var ctx_params = llama_context_default_params() + ctx_params.seed = 1234 + ctx_params.n_ctx = 2048 + ctx_params.n_threads = 8 + ctx_params.n_threads_batch = 8 + + let context = llama_new_context_with_model(model, ctx_params) + guard let context else { + print("Could not load context!") + throw LlamaError.couldNotInitializeContext + } + + return LlamaContext(model: model, context: context) + } + + func get_n_tokens() -> Int32 { + return batch.n_tokens; + } + + func completion_init(text: String) { + print("attempting to complete \"\(text)\"") + + tokens_list = tokenize(text: text, add_bos: true) + + let n_ctx = llama_n_ctx(context) + let n_kv_req = tokens_list.count + (Int(n_len) - tokens_list.count) + + print("\n n_len = \(n_len), n_ctx = \(n_ctx), n_kv_req = \(n_kv_req)") + + if n_kv_req > n_ctx { + print("error: n_kv_req > n_ctx, the required KV cache size is not big enough") + } + + for id in tokens_list { + print(token_to_piece(token: id)) + } + + // batch = llama_batch_init(512, 0) // done in init() + batch.n_tokens = Int32(tokens_list.count) + + for i1 in 0.. String { + var new_token_id: llama_token = 0 + + let n_vocab = llama_n_vocab(model) + let logits = llama_get_logits_ith(context, batch.n_tokens - 1) + + var candidates = Array() + candidates.reserveCapacity(Int(n_vocab)) + + for token_id in 0.. [llama_token] { + let n_tokens = text.count + (add_bos ? 1 : 0) + let tokens = UnsafeMutablePointer.allocate(capacity: n_tokens) + let tokenCount = llama_tokenize(model, text, Int32(text.count), tokens, Int32(n_tokens), add_bos, false) + + var swiftTokens: [llama_token] = [] + for i in 0.. String { + let result = UnsafeMutablePointer.allocate(capacity: 8) + result.initialize(repeating: Int8(0), count: 8) + + let _ = llama_token_to_piece(model, token, result, 8) + + let resultStr = String(cString: result) + + result.deallocate() + + return resultStr + } +} diff --git a/examples/llama.swiftui/llama.cpp.swift/bridging-header.h b/examples/llama.swiftui/llama.cpp.swift/bridging-header.h new file mode 100644 index 000000000..6cd72c979 --- /dev/null +++ b/examples/llama.swiftui/llama.cpp.swift/bridging-header.h @@ -0,0 +1,5 @@ +// +// Use this file to import your target's public headers that you would like to expose to Swift. +// + +#import "llama.h" diff --git a/examples/llama.swiftui/llama.swiftui.xcodeproj/project.pbxproj b/examples/llama.swiftui/llama.swiftui.xcodeproj/project.pbxproj new file mode 100644 index 000000000..bc1fd15ce --- /dev/null +++ b/examples/llama.swiftui/llama.swiftui.xcodeproj/project.pbxproj @@ -0,0 +1,481 @@ +// !$*UTF8*$! +{ + archiveVersion = 1; + classes = { + }; + objectVersion = 56; + objects = { + +/* Begin PBXBuildFile section */ + 542376082B0D9BFB008E6A1C /* ggml-quants.c in Sources */ = {isa = PBXBuildFile; fileRef = 542376072B0D9BFB008E6A1C /* ggml-quants.c */; }; + 5423760B2B0D9C4B008E6A1C /* ggml-backend.c in Sources */ = {isa = PBXBuildFile; fileRef = 5423760A2B0D9C4B008E6A1C /* ggml-backend.c */; }; + 542378792ACE3F3500834A7B /* ggml-metal.metal in Resources */ = {isa = PBXBuildFile; fileRef = 549479C82AC9E10B00E0F78B /* ggml-metal.metal */; }; + 542EA09D2AC8723900A8AEE9 /* ggml.c in Sources */ = {isa = PBXBuildFile; fileRef = 542EA09B2AC8723900A8AEE9 /* ggml.c */; settings = {COMPILER_FLAGS = "-DGGML_USE_ACCELERATE -DGGML_USE_METAL -DGGML_USE_K_QUANTS -O3"; }; }; + 542EA0A02AC8725700A8AEE9 /* ggml-alloc.c in Sources */ = {isa = PBXBuildFile; fileRef = 542EA09F2AC8725700A8AEE9 /* ggml-alloc.c */; }; + 542EA0A32AC8729100A8AEE9 /* llama.cpp in Sources */ = {isa = PBXBuildFile; fileRef = 542EA0A12AC8729100A8AEE9 /* llama.cpp */; settings = {COMPILER_FLAGS = "-DGGML_USE_K_QUANTS -DGGML_USE_METAL -O3"; }; }; + 549479CB2AC9E16000E0F78B /* Metal.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = 549479CA2AC9E16000E0F78B /* Metal.framework */; }; + 549479CD2AC9E42A00E0F78B /* ggml-metal.m in Sources */ = {isa = PBXBuildFile; fileRef = 549479C52AC9E0F200E0F78B /* ggml-metal.m */; settings = {COMPILER_FLAGS = "-fno-objc-arc -DGGML_SWIFT -DGGML_USE_METAL -O3"; }; }; + 8A1C83772AC328BD0096AF73 /* llama_swiftuiApp.swift in Sources */ = {isa = PBXBuildFile; fileRef = 8A1C83762AC328BD0096AF73 /* llama_swiftuiApp.swift */; }; + 8A1C83792AC328BD0096AF73 /* ContentView.swift in Sources */ = {isa = PBXBuildFile; fileRef = 8A1C83782AC328BD0096AF73 /* ContentView.swift */; }; + 8A1C837B2AC328BE0096AF73 /* Assets.xcassets in Resources */ = {isa = PBXBuildFile; fileRef = 8A1C837A2AC328BE0096AF73 /* Assets.xcassets */; }; + 8A1C837E2AC328BE0096AF73 /* Preview Assets.xcassets in Resources */ = {isa = PBXBuildFile; fileRef = 8A1C837D2AC328BE0096AF73 /* Preview Assets.xcassets */; }; + 8A39BE0A2AC7601100BFEB40 /* Accelerate.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = 8A39BE092AC7601000BFEB40 /* Accelerate.framework */; }; + 8A3F84242AC4C891005E2EE8 /* models in Resources */ = {isa = PBXBuildFile; fileRef = 8A3F84232AC4C891005E2EE8 /* models */; }; + 8A907F332AC7138A006146EA /* LibLlama.swift in Sources */ = {isa = PBXBuildFile; fileRef = 8A907F322AC7134E006146EA /* LibLlama.swift */; }; + 8A9F7C4D2AC332EE008AE1EA /* LlamaState.swift in Sources */ = {isa = PBXBuildFile; fileRef = 8A9F7C4C2AC332EE008AE1EA /* LlamaState.swift */; }; +/* End PBXBuildFile section */ + +/* Begin PBXFileReference section */ + 542376062B0D9BEA008E6A1C /* ggml-quants.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; name = "ggml-quants.h"; path = "../../ggml-quants.h"; sourceTree = ""; }; + 542376072B0D9BFB008E6A1C /* ggml-quants.c */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.c; name = "ggml-quants.c"; path = "../../ggml-quants.c"; sourceTree = ""; }; + 542376092B0D9C40008E6A1C /* ggml-backend.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; name = "ggml-backend.h"; path = "../../ggml-backend.h"; sourceTree = ""; }; + 5423760A2B0D9C4B008E6A1C /* ggml-backend.c */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.c; name = "ggml-backend.c"; path = "../../ggml-backend.c"; sourceTree = ""; }; + 542EA09B2AC8723900A8AEE9 /* ggml.c */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.c; name = ggml.c; path = ../../ggml.c; sourceTree = ""; }; + 542EA09C2AC8723900A8AEE9 /* ggml.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; name = ggml.h; path = ../../ggml.h; sourceTree = ""; }; + 542EA09E2AC8725700A8AEE9 /* ggml-alloc.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; name = "ggml-alloc.h"; path = "../../ggml-alloc.h"; sourceTree = ""; }; + 542EA09F2AC8725700A8AEE9 /* ggml-alloc.c */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.c; name = "ggml-alloc.c"; path = "../../ggml-alloc.c"; sourceTree = ""; }; + 542EA0A12AC8729100A8AEE9 /* llama.cpp */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.cpp.cpp; name = llama.cpp; path = ../../llama.cpp; sourceTree = ""; }; + 542EA0A22AC8729100A8AEE9 /* llama.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; name = llama.h; path = ../../llama.h; sourceTree = ""; }; + 549479C52AC9E0F200E0F78B /* ggml-metal.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; name = "ggml-metal.m"; path = "../../ggml-metal.m"; sourceTree = ""; }; + 549479C62AC9E0F200E0F78B /* ggml-metal.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; name = "ggml-metal.h"; path = "../../ggml-metal.h"; sourceTree = ""; }; + 549479C82AC9E10B00E0F78B /* ggml-metal.metal */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.metal; name = "ggml-metal.metal"; path = "../../ggml-metal.metal"; sourceTree = ""; }; + 549479CA2AC9E16000E0F78B /* Metal.framework */ = {isa = PBXFileReference; lastKnownFileType = wrapper.framework; name = Metal.framework; path = System/Library/Frameworks/Metal.framework; sourceTree = SDKROOT; }; + 8A08D20A2AC73B1500FE6CD4 /* bridging-header.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = "bridging-header.h"; sourceTree = ""; }; + 8A1C83732AC328BD0096AF73 /* llama.swiftui.app */ = {isa = PBXFileReference; explicitFileType = wrapper.application; includeInIndex = 0; path = llama.swiftui.app; sourceTree = BUILT_PRODUCTS_DIR; }; + 8A1C83762AC328BD0096AF73 /* llama_swiftuiApp.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = llama_swiftuiApp.swift; sourceTree = ""; }; + 8A1C83782AC328BD0096AF73 /* ContentView.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = ContentView.swift; sourceTree = ""; }; + 8A1C837A2AC328BE0096AF73 /* Assets.xcassets */ = {isa = PBXFileReference; lastKnownFileType = folder.assetcatalog; path = Assets.xcassets; sourceTree = ""; }; + 8A1C837D2AC328BE0096AF73 /* Preview Assets.xcassets */ = {isa = PBXFileReference; lastKnownFileType = folder.assetcatalog; path = "Preview Assets.xcassets"; sourceTree = ""; }; + 8A39BE092AC7601000BFEB40 /* Accelerate.framework */ = {isa = PBXFileReference; lastKnownFileType = wrapper.framework; name = Accelerate.framework; path = System/Library/Frameworks/Accelerate.framework; sourceTree = SDKROOT; }; + 8A3F841F2AC4C824005E2EE8 /* llama-2-7b-chat.Q2_K.gguf */ = {isa = PBXFileReference; lastKnownFileType = file; path = "llama-2-7b-chat.Q2_K.gguf"; sourceTree = ""; }; + 8A3F84232AC4C891005E2EE8 /* models */ = {isa = PBXFileReference; lastKnownFileType = folder; name = models; path = llama.swiftui/Resources/models; sourceTree = ""; }; + 8A907F322AC7134E006146EA /* LibLlama.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = LibLlama.swift; sourceTree = ""; }; + 8A9F7C4C2AC332EE008AE1EA /* LlamaState.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = LlamaState.swift; sourceTree = ""; }; +/* End PBXFileReference section */ + +/* Begin PBXFrameworksBuildPhase section */ + 8A1C83702AC328BD0096AF73 /* Frameworks */ = { + isa = PBXFrameworksBuildPhase; + buildActionMask = 2147483647; + files = ( + 549479CB2AC9E16000E0F78B /* Metal.framework in Frameworks */, + 8A39BE0A2AC7601100BFEB40 /* Accelerate.framework in Frameworks */, + ); + runOnlyForDeploymentPostprocessing = 0; + }; +/* End PBXFrameworksBuildPhase section */ + +/* Begin PBXGroup section */ + 8A08D1F62AC7383900FE6CD4 /* llama.cpp */ = { + isa = PBXGroup; + children = ( + 5423760A2B0D9C4B008E6A1C /* ggml-backend.c */, + 542376092B0D9C40008E6A1C /* ggml-backend.h */, + 542376062B0D9BEA008E6A1C /* ggml-quants.h */, + 542376072B0D9BFB008E6A1C /* ggml-quants.c */, + 549479C82AC9E10B00E0F78B /* ggml-metal.metal */, + 549479C62AC9E0F200E0F78B /* ggml-metal.h */, + 549479C52AC9E0F200E0F78B /* ggml-metal.m */, + 542EA09B2AC8723900A8AEE9 /* ggml.c */, + 542EA09C2AC8723900A8AEE9 /* ggml.h */, + 542EA09F2AC8725700A8AEE9 /* ggml-alloc.c */, + 542EA09E2AC8725700A8AEE9 /* ggml-alloc.h */, + 542EA0A12AC8729100A8AEE9 /* llama.cpp */, + 542EA0A22AC8729100A8AEE9 /* llama.h */, + ); + name = llama.cpp; + sourceTree = ""; + }; + 8A1C836A2AC328BD0096AF73 = { + isa = PBXGroup; + children = ( + 8A08D1F62AC7383900FE6CD4 /* llama.cpp */, + 8A907F312AC7134E006146EA /* llama.cpp.swift */, + 8A3F84232AC4C891005E2EE8 /* models */, + 8A1C83752AC328BD0096AF73 /* llama.swiftui */, + 8A1C83742AC328BD0096AF73 /* Products */, + 8A39BE082AC7601000BFEB40 /* Frameworks */, + ); + sourceTree = ""; + }; + 8A1C83742AC328BD0096AF73 /* Products */ = { + isa = PBXGroup; + children = ( + 8A1C83732AC328BD0096AF73 /* llama.swiftui.app */, + ); + name = Products; + sourceTree = ""; + }; + 8A1C83752AC328BD0096AF73 /* llama.swiftui */ = { + isa = PBXGroup; + children = ( + 8A3F84102AC4BD85005E2EE8 /* Resources */, + 8A9F7C4B2AC332DC008AE1EA /* Models */, + 8A9F7C4A2AC332BF008AE1EA /* UI */, + 8A1C83762AC328BD0096AF73 /* llama_swiftuiApp.swift */, + 8A1C837A2AC328BE0096AF73 /* Assets.xcassets */, + 8A1C837C2AC328BE0096AF73 /* Preview Content */, + ); + path = llama.swiftui; + sourceTree = ""; + }; + 8A1C837C2AC328BE0096AF73 /* Preview Content */ = { + isa = PBXGroup; + children = ( + 8A1C837D2AC328BE0096AF73 /* Preview Assets.xcassets */, + ); + path = "Preview Content"; + sourceTree = ""; + }; + 8A39BE082AC7601000BFEB40 /* Frameworks */ = { + isa = PBXGroup; + children = ( + 549479CA2AC9E16000E0F78B /* Metal.framework */, + 8A39BE092AC7601000BFEB40 /* Accelerate.framework */, + ); + name = Frameworks; + sourceTree = ""; + }; + 8A3F84102AC4BD85005E2EE8 /* Resources */ = { + isa = PBXGroup; + children = ( + 8A3F84112AC4BD8C005E2EE8 /* models */, + ); + path = Resources; + sourceTree = ""; + }; + 8A3F84112AC4BD8C005E2EE8 /* models */ = { + isa = PBXGroup; + children = ( + 8A3F841F2AC4C824005E2EE8 /* llama-2-7b-chat.Q2_K.gguf */, + ); + path = models; + sourceTree = ""; + }; + 8A907F312AC7134E006146EA /* llama.cpp.swift */ = { + isa = PBXGroup; + children = ( + 8A08D20A2AC73B1500FE6CD4 /* bridging-header.h */, + 8A907F322AC7134E006146EA /* LibLlama.swift */, + ); + path = llama.cpp.swift; + sourceTree = ""; + }; + 8A9F7C4A2AC332BF008AE1EA /* UI */ = { + isa = PBXGroup; + children = ( + 8A1C83782AC328BD0096AF73 /* ContentView.swift */, + ); + path = UI; + sourceTree = ""; + }; + 8A9F7C4B2AC332DC008AE1EA /* Models */ = { + isa = PBXGroup; + children = ( + 8A9F7C4C2AC332EE008AE1EA /* LlamaState.swift */, + ); + path = Models; + sourceTree = ""; + }; +/* End PBXGroup section */ + +/* Begin PBXNativeTarget section */ + 8A1C83722AC328BD0096AF73 /* llama.swiftui */ = { + isa = PBXNativeTarget; + buildConfigurationList = 8A1C83812AC328BE0096AF73 /* Build configuration list for PBXNativeTarget "llama.swiftui" */; + buildPhases = ( + 8A1C836F2AC328BD0096AF73 /* Sources */, + 8A1C83702AC328BD0096AF73 /* Frameworks */, + 8A1C83712AC328BD0096AF73 /* Resources */, + ); + buildRules = ( + ); + dependencies = ( + ); + name = llama.swiftui; + packageProductDependencies = ( + ); + productName = llama.swiftui; + productReference = 8A1C83732AC328BD0096AF73 /* llama.swiftui.app */; + productType = "com.apple.product-type.application"; + }; +/* End PBXNativeTarget section */ + +/* Begin PBXProject section */ + 8A1C836B2AC328BD0096AF73 /* Project object */ = { + isa = PBXProject; + attributes = { + BuildIndependentTargetsInParallel = 1; + LastSwiftUpdateCheck = 1500; + LastUpgradeCheck = 1500; + TargetAttributes = { + 8A1C83722AC328BD0096AF73 = { + CreatedOnToolsVersion = 15.0; + LastSwiftMigration = 1500; + }; + }; + }; + buildConfigurationList = 8A1C836E2AC328BD0096AF73 /* Build configuration list for PBXProject "llama.swiftui" */; + compatibilityVersion = "Xcode 14.0"; + developmentRegion = en; + hasScannedForEncodings = 0; + knownRegions = ( + en, + Base, + ); + mainGroup = 8A1C836A2AC328BD0096AF73; + packageReferences = ( + ); + productRefGroup = 8A1C83742AC328BD0096AF73 /* Products */; + projectDirPath = ""; + projectRoot = ""; + targets = ( + 8A1C83722AC328BD0096AF73 /* llama.swiftui */, + ); + }; +/* End PBXProject section */ + +/* Begin PBXResourcesBuildPhase section */ + 8A1C83712AC328BD0096AF73 /* Resources */ = { + isa = PBXResourcesBuildPhase; + buildActionMask = 2147483647; + files = ( + 542378792ACE3F3500834A7B /* ggml-metal.metal in Resources */, + 8A3F84242AC4C891005E2EE8 /* models in Resources */, + 8A1C837E2AC328BE0096AF73 /* Preview Assets.xcassets in Resources */, + 8A1C837B2AC328BE0096AF73 /* Assets.xcassets in Resources */, + ); + runOnlyForDeploymentPostprocessing = 0; + }; +/* End PBXResourcesBuildPhase section */ + +/* Begin PBXSourcesBuildPhase section */ + 8A1C836F2AC328BD0096AF73 /* Sources */ = { + isa = PBXSourcesBuildPhase; + buildActionMask = 2147483647; + files = ( + 542376082B0D9BFB008E6A1C /* ggml-quants.c in Sources */, + 549479CD2AC9E42A00E0F78B /* ggml-metal.m in Sources */, + 542EA09D2AC8723900A8AEE9 /* ggml.c in Sources */, + 8A907F332AC7138A006146EA /* LibLlama.swift in Sources */, + 542EA0A32AC8729100A8AEE9 /* llama.cpp in Sources */, + 8A9F7C4D2AC332EE008AE1EA /* LlamaState.swift in Sources */, + 8A1C83792AC328BD0096AF73 /* ContentView.swift in Sources */, + 8A1C83772AC328BD0096AF73 /* llama_swiftuiApp.swift in Sources */, + 542EA0A02AC8725700A8AEE9 /* ggml-alloc.c in Sources */, + 5423760B2B0D9C4B008E6A1C /* ggml-backend.c in Sources */, + ); + runOnlyForDeploymentPostprocessing = 0; + }; +/* End PBXSourcesBuildPhase section */ + +/* Begin XCBuildConfiguration section */ + 8A1C837F2AC328BE0096AF73 /* Debug */ = { + isa = XCBuildConfiguration; + buildSettings = { + ALWAYS_SEARCH_USER_PATHS = NO; + ASSETCATALOG_COMPILER_GENERATE_SWIFT_ASSET_SYMBOL_EXTENSIONS = YES; + CLANG_ANALYZER_NONNULL = YES; + CLANG_ANALYZER_NUMBER_OBJECT_CONVERSION = YES_AGGRESSIVE; + CLANG_CXX_LANGUAGE_STANDARD = "gnu++20"; + CLANG_ENABLE_MODULES = YES; + CLANG_ENABLE_OBJC_ARC = YES; + CLANG_ENABLE_OBJC_WEAK = YES; + CLANG_WARN_BLOCK_CAPTURE_AUTORELEASING = YES; + CLANG_WARN_BOOL_CONVERSION = YES; + CLANG_WARN_COMMA = YES; + CLANG_WARN_CONSTANT_CONVERSION = YES; + CLANG_WARN_DEPRECATED_OBJC_IMPLEMENTATIONS = YES; + CLANG_WARN_DIRECT_OBJC_ISA_USAGE = YES_ERROR; + CLANG_WARN_DOCUMENTATION_COMMENTS = YES; + CLANG_WARN_EMPTY_BODY = YES; + CLANG_WARN_ENUM_CONVERSION = YES; + CLANG_WARN_INFINITE_RECURSION = YES; + CLANG_WARN_INT_CONVERSION = YES; + CLANG_WARN_NON_LITERAL_NULL_CONVERSION = YES; + CLANG_WARN_OBJC_IMPLICIT_RETAIN_SELF = YES; + CLANG_WARN_OBJC_LITERAL_CONVERSION = YES; + CLANG_WARN_OBJC_ROOT_CLASS = YES_ERROR; + CLANG_WARN_QUOTED_INCLUDE_IN_FRAMEWORK_HEADER = YES; + CLANG_WARN_RANGE_LOOP_ANALYSIS = YES; + CLANG_WARN_STRICT_PROTOTYPES = YES; + CLANG_WARN_SUSPICIOUS_MOVE = YES; + CLANG_WARN_UNGUARDED_AVAILABILITY = YES_AGGRESSIVE; + CLANG_WARN_UNREACHABLE_CODE = YES; + CLANG_WARN__DUPLICATE_METHOD_MATCH = YES; + COPY_PHASE_STRIP = NO; + DEBUG_INFORMATION_FORMAT = dwarf; + ENABLE_STRICT_OBJC_MSGSEND = YES; + ENABLE_TESTABILITY = YES; + ENABLE_USER_SCRIPT_SANDBOXING = YES; + GCC_C_LANGUAGE_STANDARD = gnu17; + GCC_DYNAMIC_NO_PIC = NO; + GCC_NO_COMMON_BLOCKS = YES; + GCC_OPTIMIZATION_LEVEL = 0; + GCC_PREPROCESSOR_DEFINITIONS = ( + "DEBUG=1", + "$(inherited)", + ); + GCC_WARN_64_TO_32_BIT_CONVERSION = YES; + GCC_WARN_ABOUT_RETURN_TYPE = YES_ERROR; + GCC_WARN_UNDECLARED_SELECTOR = YES; + GCC_WARN_UNINITIALIZED_AUTOS = YES_AGGRESSIVE; + GCC_WARN_UNUSED_FUNCTION = YES; + GCC_WARN_UNUSED_VARIABLE = YES; + IPHONEOS_DEPLOYMENT_TARGET = 17.0; + LOCALIZATION_PREFERS_STRING_CATALOGS = YES; + MTL_ENABLE_DEBUG_INFO = INCLUDE_SOURCE; + MTL_FAST_MATH = YES; + ONLY_ACTIVE_ARCH = YES; + SDKROOT = iphoneos; + SWIFT_ACTIVE_COMPILATION_CONDITIONS = "DEBUG $(inherited)"; + SWIFT_OPTIMIZATION_LEVEL = "-Onone"; + }; + name = Debug; + }; + 8A1C83802AC328BE0096AF73 /* Release */ = { + isa = XCBuildConfiguration; + buildSettings = { + ALWAYS_SEARCH_USER_PATHS = NO; + ASSETCATALOG_COMPILER_GENERATE_SWIFT_ASSET_SYMBOL_EXTENSIONS = YES; + CLANG_ANALYZER_NONNULL = YES; + CLANG_ANALYZER_NUMBER_OBJECT_CONVERSION = YES_AGGRESSIVE; + CLANG_CXX_LANGUAGE_STANDARD = "gnu++20"; + CLANG_ENABLE_MODULES = YES; + CLANG_ENABLE_OBJC_ARC = YES; + CLANG_ENABLE_OBJC_WEAK = YES; + CLANG_WARN_BLOCK_CAPTURE_AUTORELEASING = YES; + CLANG_WARN_BOOL_CONVERSION = YES; + CLANG_WARN_COMMA = YES; + CLANG_WARN_CONSTANT_CONVERSION = YES; + CLANG_WARN_DEPRECATED_OBJC_IMPLEMENTATIONS = YES; + CLANG_WARN_DIRECT_OBJC_ISA_USAGE = YES_ERROR; + CLANG_WARN_DOCUMENTATION_COMMENTS = YES; + CLANG_WARN_EMPTY_BODY = YES; + CLANG_WARN_ENUM_CONVERSION = YES; + CLANG_WARN_INFINITE_RECURSION = YES; + CLANG_WARN_INT_CONVERSION = YES; + CLANG_WARN_NON_LITERAL_NULL_CONVERSION = YES; + CLANG_WARN_OBJC_IMPLICIT_RETAIN_SELF = YES; + CLANG_WARN_OBJC_LITERAL_CONVERSION = YES; + CLANG_WARN_OBJC_ROOT_CLASS = YES_ERROR; + CLANG_WARN_QUOTED_INCLUDE_IN_FRAMEWORK_HEADER = YES; + CLANG_WARN_RANGE_LOOP_ANALYSIS = YES; + CLANG_WARN_STRICT_PROTOTYPES = YES; + CLANG_WARN_SUSPICIOUS_MOVE = YES; + CLANG_WARN_UNGUARDED_AVAILABILITY = YES_AGGRESSIVE; + CLANG_WARN_UNREACHABLE_CODE = YES; + CLANG_WARN__DUPLICATE_METHOD_MATCH = YES; + COPY_PHASE_STRIP = NO; + DEBUG_INFORMATION_FORMAT = "dwarf-with-dsym"; + ENABLE_NS_ASSERTIONS = NO; + ENABLE_STRICT_OBJC_MSGSEND = YES; + ENABLE_USER_SCRIPT_SANDBOXING = YES; + GCC_C_LANGUAGE_STANDARD = gnu17; + GCC_NO_COMMON_BLOCKS = YES; + GCC_WARN_64_TO_32_BIT_CONVERSION = YES; + GCC_WARN_ABOUT_RETURN_TYPE = YES_ERROR; + GCC_WARN_UNDECLARED_SELECTOR = YES; + GCC_WARN_UNINITIALIZED_AUTOS = YES_AGGRESSIVE; + GCC_WARN_UNUSED_FUNCTION = YES; + GCC_WARN_UNUSED_VARIABLE = YES; + IPHONEOS_DEPLOYMENT_TARGET = 17.0; + LOCALIZATION_PREFERS_STRING_CATALOGS = YES; + MTL_ENABLE_DEBUG_INFO = NO; + MTL_FAST_MATH = YES; + SDKROOT = iphoneos; + SWIFT_COMPILATION_MODE = wholemodule; + VALIDATE_PRODUCT = YES; + }; + name = Release; + }; + 8A1C83822AC328BE0096AF73 /* Debug */ = { + isa = XCBuildConfiguration; + buildSettings = { + ASSETCATALOG_COMPILER_APPICON_NAME = AppIcon; + ASSETCATALOG_COMPILER_GLOBAL_ACCENT_COLOR_NAME = AccentColor; + CLANG_ENABLE_MODULES = YES; + CODE_SIGN_STYLE = Automatic; + CURRENT_PROJECT_VERSION = 1; + DEVELOPMENT_ASSET_PATHS = "\"llama.swiftui/Preview Content\""; + DEVELOPMENT_TEAM = STLSG3FG8Q; + ENABLE_PREVIEWS = YES; + GENERATE_INFOPLIST_FILE = YES; + INFOPLIST_KEY_UIApplicationSceneManifest_Generation = YES; + INFOPLIST_KEY_UIApplicationSupportsIndirectInputEvents = YES; + INFOPLIST_KEY_UILaunchScreen_Generation = YES; + INFOPLIST_KEY_UISupportedInterfaceOrientations_iPad = "UIInterfaceOrientationPortrait UIInterfaceOrientationPortraitUpsideDown UIInterfaceOrientationLandscapeLeft UIInterfaceOrientationLandscapeRight"; + INFOPLIST_KEY_UISupportedInterfaceOrientations_iPhone = "UIInterfaceOrientationPortrait UIInterfaceOrientationLandscapeLeft UIInterfaceOrientationLandscapeRight"; + IPHONEOS_DEPLOYMENT_TARGET = 16.0; + LD_RUNPATH_SEARCH_PATHS = ( + "$(inherited)", + "@executable_path/Frameworks", + ); + MARKETING_VERSION = 1.0; + PRODUCT_BUNDLE_IDENTIFIER = "com.bachittle.llama-swift"; + PRODUCT_NAME = "$(TARGET_NAME)"; + SWIFT_EMIT_LOC_STRINGS = YES; + SWIFT_OBJC_BRIDGING_HEADER = "llama.cpp.swift/bridging-header.h"; + SWIFT_OPTIMIZATION_LEVEL = "-Onone"; + SWIFT_VERSION = 5.0; + TARGETED_DEVICE_FAMILY = "1,2"; + }; + name = Debug; + }; + 8A1C83832AC328BE0096AF73 /* Release */ = { + isa = XCBuildConfiguration; + buildSettings = { + ASSETCATALOG_COMPILER_APPICON_NAME = AppIcon; + ASSETCATALOG_COMPILER_GLOBAL_ACCENT_COLOR_NAME = AccentColor; + CLANG_ENABLE_MODULES = YES; + CODE_SIGN_STYLE = Automatic; + CURRENT_PROJECT_VERSION = 1; + DEVELOPMENT_ASSET_PATHS = "\"llama.swiftui/Preview Content\""; + DEVELOPMENT_TEAM = STLSG3FG8Q; + ENABLE_PREVIEWS = YES; + GENERATE_INFOPLIST_FILE = YES; + INFOPLIST_KEY_UIApplicationSceneManifest_Generation = YES; + INFOPLIST_KEY_UIApplicationSupportsIndirectInputEvents = YES; + INFOPLIST_KEY_UILaunchScreen_Generation = YES; + INFOPLIST_KEY_UISupportedInterfaceOrientations_iPad = "UIInterfaceOrientationPortrait UIInterfaceOrientationPortraitUpsideDown UIInterfaceOrientationLandscapeLeft UIInterfaceOrientationLandscapeRight"; + INFOPLIST_KEY_UISupportedInterfaceOrientations_iPhone = "UIInterfaceOrientationPortrait UIInterfaceOrientationLandscapeLeft UIInterfaceOrientationLandscapeRight"; + IPHONEOS_DEPLOYMENT_TARGET = 16.0; + LD_RUNPATH_SEARCH_PATHS = ( + "$(inherited)", + "@executable_path/Frameworks", + ); + MARKETING_VERSION = 1.0; + PRODUCT_BUNDLE_IDENTIFIER = "com.bachittle.llama-swift"; + PRODUCT_NAME = "$(TARGET_NAME)"; + SWIFT_EMIT_LOC_STRINGS = YES; + SWIFT_OBJC_BRIDGING_HEADER = "llama.cpp.swift/bridging-header.h"; + SWIFT_VERSION = 5.0; + TARGETED_DEVICE_FAMILY = "1,2"; + }; + name = Release; + }; +/* End XCBuildConfiguration section */ + +/* Begin XCConfigurationList section */ + 8A1C836E2AC328BD0096AF73 /* Build configuration list for PBXProject "llama.swiftui" */ = { + isa = XCConfigurationList; + buildConfigurations = ( + 8A1C837F2AC328BE0096AF73 /* Debug */, + 8A1C83802AC328BE0096AF73 /* Release */, + ); + defaultConfigurationIsVisible = 0; + defaultConfigurationName = Release; + }; + 8A1C83812AC328BE0096AF73 /* Build configuration list for PBXNativeTarget "llama.swiftui" */ = { + isa = XCConfigurationList; + buildConfigurations = ( + 8A1C83822AC328BE0096AF73 /* Debug */, + 8A1C83832AC328BE0096AF73 /* Release */, + ); + defaultConfigurationIsVisible = 0; + defaultConfigurationName = Release; + }; +/* End XCConfigurationList section */ + }; + rootObject = 8A1C836B2AC328BD0096AF73 /* Project object */; +} diff --git a/examples/llama.swiftui/llama.swiftui.xcodeproj/project.xcworkspace/contents.xcworkspacedata b/examples/llama.swiftui/llama.swiftui.xcodeproj/project.xcworkspace/contents.xcworkspacedata new file mode 100644 index 000000000..919434a62 --- /dev/null +++ b/examples/llama.swiftui/llama.swiftui.xcodeproj/project.xcworkspace/contents.xcworkspacedata @@ -0,0 +1,7 @@ + + + + + diff --git a/examples/llama.swiftui/llama.swiftui.xcodeproj/project.xcworkspace/xcshareddata/IDEWorkspaceChecks.plist b/examples/llama.swiftui/llama.swiftui.xcodeproj/project.xcworkspace/xcshareddata/IDEWorkspaceChecks.plist new file mode 100644 index 000000000..3d4c1e552 --- /dev/null +++ b/examples/llama.swiftui/llama.swiftui.xcodeproj/project.xcworkspace/xcshareddata/IDEWorkspaceChecks.plist @@ -0,0 +1,8 @@ + + + + + IDEDidComputeMac32BitWarning + + + diff --git a/examples/llama.swiftui/llama.swiftui/Assets.xcassets/AccentColor.colorset/Contents.json b/examples/llama.swiftui/llama.swiftui/Assets.xcassets/AccentColor.colorset/Contents.json new file mode 100644 index 000000000..eb8789700 --- /dev/null +++ b/examples/llama.swiftui/llama.swiftui/Assets.xcassets/AccentColor.colorset/Contents.json @@ -0,0 +1,11 @@ +{ + "colors" : [ + { + "idiom" : "universal" + } + ], + "info" : { + "author" : "xcode", + "version" : 1 + } +} diff --git a/examples/llama.swiftui/llama.swiftui/Assets.xcassets/AppIcon.appiconset/Contents.json b/examples/llama.swiftui/llama.swiftui/Assets.xcassets/AppIcon.appiconset/Contents.json new file mode 100644 index 000000000..13613e3ee --- /dev/null +++ b/examples/llama.swiftui/llama.swiftui/Assets.xcassets/AppIcon.appiconset/Contents.json @@ -0,0 +1,13 @@ +{ + "images" : [ + { + "idiom" : "universal", + "platform" : "ios", + "size" : "1024x1024" + } + ], + "info" : { + "author" : "xcode", + "version" : 1 + } +} diff --git a/examples/llama.swiftui/llama.swiftui/Assets.xcassets/Contents.json b/examples/llama.swiftui/llama.swiftui/Assets.xcassets/Contents.json new file mode 100644 index 000000000..73c00596a --- /dev/null +++ b/examples/llama.swiftui/llama.swiftui/Assets.xcassets/Contents.json @@ -0,0 +1,6 @@ +{ + "info" : { + "author" : "xcode", + "version" : 1 + } +} diff --git a/examples/llama.swiftui/llama.swiftui/Models/LlamaState.swift b/examples/llama.swiftui/llama.swiftui/Models/LlamaState.swift new file mode 100644 index 000000000..babc60cdc --- /dev/null +++ b/examples/llama.swiftui/llama.swiftui/Models/LlamaState.swift @@ -0,0 +1,45 @@ +import Foundation + +@MainActor +class LlamaState: ObservableObject { + @Published var messageLog = "" + + private var llamaContext: LlamaContext? + private var modelUrl: URL? { + Bundle.main.url(forResource: "q8_0", withExtension: "gguf", subdirectory: "models") + // Bundle.main.url(forResource: "llama-2-7b-chat", withExtension: "Q2_K.gguf", subdirectory: "models") + } + init() { + do { + try loadModel() + } catch { + messageLog += "Error!\n" + } + } + + private func loadModel() throws { + messageLog += "Loading model...\n" + if let modelUrl { + llamaContext = try LlamaContext.createContext(path: modelUrl.path()) + messageLog += "Loaded model \(modelUrl.lastPathComponent)\n" + } else { + messageLog += "Could not locate model\n" + } + } + + func complete(text: String) async { + guard let llamaContext else { + return + } + messageLog += "Attempting to complete text...\n" + await llamaContext.completion_init(text: text) + messageLog += "\(text)" + + while await llamaContext.n_cur <= llamaContext.n_len { + let result = await llamaContext.completion_loop() + messageLog += "\(result)" + } + await llamaContext.clear() + messageLog += "\n\ndone\n" + } +} diff --git a/examples/llama.swiftui/llama.swiftui/Preview Content/Preview Assets.xcassets/Contents.json b/examples/llama.swiftui/llama.swiftui/Preview Content/Preview Assets.xcassets/Contents.json new file mode 100644 index 000000000..73c00596a --- /dev/null +++ b/examples/llama.swiftui/llama.swiftui/Preview Content/Preview Assets.xcassets/Contents.json @@ -0,0 +1,6 @@ +{ + "info" : { + "author" : "xcode", + "version" : 1 + } +} diff --git a/examples/llama.swiftui/llama.swiftui/Resources/models/.gitignore b/examples/llama.swiftui/llama.swiftui/Resources/models/.gitignore new file mode 100644 index 000000000..e69de29bb diff --git a/examples/llama.swiftui/llama.swiftui/UI/ContentView.swift b/examples/llama.swiftui/llama.swiftui/UI/ContentView.swift new file mode 100644 index 000000000..0bd16a806 --- /dev/null +++ b/examples/llama.swiftui/llama.swiftui/UI/ContentView.swift @@ -0,0 +1,42 @@ +import SwiftUI + +struct ContentView: View { + @StateObject var llamaState = LlamaState() + + @State private var multiLineText = "" + + var body: some View { + VStack { + ScrollView(.vertical) { + Text(llamaState.messageLog) + } + + TextEditor(text: $multiLineText) + .frame(height: 200) + .padding() + .border(Color.gray, width: 0.5) + Button(action: { + sendText() + }) { + Text("Send") + .padding() + .background(Color.blue) + .foregroundColor(.white) + .cornerRadius(8) + } + } + .padding() + } + + func sendText() { + Task { + await llamaState.complete(text: multiLineText) + multiLineText = "" + } + } +} +/* +#Preview { + ContentView() +} +*/ diff --git a/examples/llama.swiftui/llama.swiftui/llama_swiftuiApp.swift b/examples/llama.swiftui/llama.swiftui/llama_swiftuiApp.swift new file mode 100644 index 000000000..cccda8a97 --- /dev/null +++ b/examples/llama.swiftui/llama.swiftui/llama_swiftuiApp.swift @@ -0,0 +1,10 @@ +import SwiftUI + +@main +struct llama_swiftuiApp: App { + var body: some Scene { + WindowGroup { + ContentView() + } + } +} From 0dab8cd7cca7e1bc3550dcb4797b9062cdbb1ebd Mon Sep 17 00:00:00 2001 From: Kasumi <90275229+kasumi-1@users.noreply.github.com> Date: Tue, 28 Nov 2023 01:39:42 +0800 Subject: [PATCH 017/811] readme : add Amica to UI list (#4230) --- README.md | 1 + 1 file changed, 1 insertion(+) diff --git a/README.md b/README.md index 2892132c4..d0d6c9666 100644 --- a/README.md +++ b/README.md @@ -116,6 +116,7 @@ as the main playground for developing new features for the [ggml](https://github - [nat/openplayground](https://github.com/nat/openplayground) - [oobabooga/text-generation-webui](https://github.com/oobabooga/text-generation-webui) - [withcatai/catai](https://github.com/withcatai/catai) +- [semperai/amica](https://github.com/semperai/amica) --- From b38a16dfcff88d547f78f52d1bea31b84a05aff7 Mon Sep 17 00:00:00 2001 From: bandoti <141645996+bandoti@users.noreply.github.com> Date: Mon, 27 Nov 2023 15:25:42 -0400 Subject: [PATCH 018/811] cmake : fix issue with version info not getting baked into LlamaConfig.cmake (#3970) * Split CPP generation from build-info query * Remove blank lines * Add BUILD_SHARED_LIBS option --- CMakeLists.txt | 4 ++++ common/CMakeLists.txt | 2 +- scripts/build-info.cmake | 22 ---------------------- scripts/gen-build-info-cpp.cmake | 24 ++++++++++++++++++++++++ 4 files changed, 29 insertions(+), 23 deletions(-) create mode 100644 scripts/gen-build-info-cpp.cmake diff --git a/CMakeLists.txt b/CMakeLists.txt index f32df5fe5..3e0009415 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -43,6 +43,7 @@ else() endif() # general +option(BUILD_SHARED_LIBS "build shared libraries" OFF) option(LLAMA_STATIC "llama: static link libraries" OFF) option(LLAMA_NATIVE "llama: enable -march=native flag" ON) option(LLAMA_LTO "llama: enable link time optimization" OFF) @@ -100,6 +101,9 @@ option(LLAMA_BUILD_TESTS "llama: build tests" ${LLAMA_STANDALO option(LLAMA_BUILD_EXAMPLES "llama: build examples" ${LLAMA_STANDALONE}) option(LLAMA_BUILD_SERVER "llama: build server example" ON) +# Required for relocatable CMake package +include(${CMAKE_CURRENT_SOURCE_DIR}/scripts/build-info.cmake) + # # Compile flags # diff --git a/common/CMakeLists.txt b/common/CMakeLists.txt index 4f930bdc5..71891edc3 100644 --- a/common/CMakeLists.txt +++ b/common/CMakeLists.txt @@ -26,7 +26,7 @@ add_custom_command( COMMENT "Generating build details from Git" COMMAND ${CMAKE_COMMAND} -DMSVC=${MSVC} -DCMAKE_C_COMPILER_VERSION=${CMAKE_C_COMPILER_VERSION} -DCMAKE_C_COMPILER_ID=${CMAKE_C_COMPILER_ID} -DCMAKE_VS_PLATFORM_NAME=${CMAKE_VS_PLATFORM_NAME} - -DCMAKE_C_COMPILER=${CMAKE_C_COMPILER} -P "${CMAKE_CURRENT_SOURCE_DIR}/../scripts/build-info.cmake" + -DCMAKE_C_COMPILER=${CMAKE_C_COMPILER} -P "${CMAKE_CURRENT_SOURCE_DIR}/../scripts/gen-build-info-cpp.cmake" WORKING_DIRECTORY "${CMAKE_CURRENT_SOURCE_DIR}/.." DEPENDS "${CMAKE_CURRENT_SOURCE_DIR}/build-info.cpp.in" ${GIT_INDEX} VERBATIM diff --git a/scripts/build-info.cmake b/scripts/build-info.cmake index 73853dfa4..ea3dc55c8 100644 --- a/scripts/build-info.cmake +++ b/scripts/build-info.cmake @@ -1,5 +1,3 @@ -set(TEMPLATE_FILE "${CMAKE_CURRENT_SOURCE_DIR}/common/build-info.cpp.in") -set(OUTPUT_FILE "${CMAKE_CURRENT_SOURCE_DIR}/common/build-info.cpp") set(BUILD_NUMBER 0) set(BUILD_COMMIT "unknown") set(BUILD_COMPILER "unknown") @@ -58,23 +56,3 @@ else() ) set(BUILD_TARGET ${OUT}) endif() - -# Only write the build info if it changed -if(EXISTS ${OUTPUT_FILE}) - file(READ ${OUTPUT_FILE} CONTENTS) - string(REGEX MATCH "LLAMA_COMMIT = \"([^\"]*)\";" _ ${CONTENTS}) - set(OLD_COMMIT ${CMAKE_MATCH_1}) - string(REGEX MATCH "LLAMA_COMPILER = \"([^\"]*)\";" _ ${CONTENTS}) - set(OLD_COMPILER ${CMAKE_MATCH_1}) - string(REGEX MATCH "LLAMA_BUILD_TARGET = \"([^\"]*)\";" _ ${CONTENTS}) - set(OLD_TARGET ${CMAKE_MATCH_1}) - if ( - NOT OLD_COMMIT STREQUAL BUILD_COMMIT OR - NOT OLD_COMPILER STREQUAL BUILD_COMPILER OR - NOT OLD_TARGET STREQUAL BUILD_TARGET - ) - configure_file(${TEMPLATE_FILE} ${OUTPUT_FILE}) - endif() -else() - configure_file(${TEMPLATE_FILE} ${OUTPUT_FILE}) -endif() diff --git a/scripts/gen-build-info-cpp.cmake b/scripts/gen-build-info-cpp.cmake new file mode 100644 index 000000000..d89338920 --- /dev/null +++ b/scripts/gen-build-info-cpp.cmake @@ -0,0 +1,24 @@ +include(${CMAKE_CURRENT_SOURCE_DIR}/scripts/build-info.cmake) + +set(TEMPLATE_FILE "${CMAKE_CURRENT_SOURCE_DIR}/common/build-info.cpp.in") +set(OUTPUT_FILE "${CMAKE_CURRENT_SOURCE_DIR}/common/build-info.cpp") + +# Only write the build info if it changed +if(EXISTS ${OUTPUT_FILE}) + file(READ ${OUTPUT_FILE} CONTENTS) + string(REGEX MATCH "LLAMA_COMMIT = \"([^\"]*)\";" _ ${CONTENTS}) + set(OLD_COMMIT ${CMAKE_MATCH_1}) + string(REGEX MATCH "LLAMA_COMPILER = \"([^\"]*)\";" _ ${CONTENTS}) + set(OLD_COMPILER ${CMAKE_MATCH_1}) + string(REGEX MATCH "LLAMA_BUILD_TARGET = \"([^\"]*)\";" _ ${CONTENTS}) + set(OLD_TARGET ${CMAKE_MATCH_1}) + if ( + NOT OLD_COMMIT STREQUAL BUILD_COMMIT OR + NOT OLD_COMPILER STREQUAL BUILD_COMPILER OR + NOT OLD_TARGET STREQUAL BUILD_TARGET + ) + configure_file(${TEMPLATE_FILE} ${OUTPUT_FILE}) + endif() +else() + configure_file(${TEMPLATE_FILE} ${OUTPUT_FILE}) +endif() From 8406b0924bf323f37d536dee8b8165c1f3d9d11d Mon Sep 17 00:00:00 2001 From: Georgi Gerganov Date: Tue, 28 Nov 2023 10:32:03 +0200 Subject: [PATCH 019/811] ggml : re-enable BLAS for CPU when src0 != F32 + remove redundant full offload checks in llama.cpp (#4240) * ggml : use blas even if src0 is not F32 * llama : use n_threads_batch only when n_tokens >= 32 ggml-ci * llama : revert n_threads_batch logic ggml-ci --- ggml.c | 2 +- llama.cpp | 12 +----------- 2 files changed, 2 insertions(+), 12 deletions(-) diff --git a/ggml.c b/ggml.c index 0c7264a36..c522a101f 100644 --- a/ggml.c +++ b/ggml.c @@ -9373,7 +9373,7 @@ static bool ggml_compute_forward_mul_mat_use_blas( // TODO: find the optimal values for these if (ggml_is_contiguous(src0) && ggml_is_contiguous(src1) && - src0->type == GGML_TYPE_F32 && + //src0->type == GGML_TYPE_F32 && src1->type == GGML_TYPE_F32 && (ne0 >= 32 && ne1 >= 32 && ne10 >= 32)) { diff --git a/llama.cpp b/llama.cpp index f2b5967d7..cb544228b 100644 --- a/llama.cpp +++ b/llama.cpp @@ -5550,18 +5550,8 @@ static int llama_decode_internal( n_threads = std::min(4, n_threads); } - // If all tensors can be run on the GPU then using more than 1 thread is detrimental. - const bool full_offload_supported = - model.arch == LLM_ARCH_LLAMA || - model.arch == LLM_ARCH_BAICHUAN || - model.arch == LLM_ARCH_FALCON || - model.arch == LLM_ARCH_REFACT || - model.arch == LLM_ARCH_MPT || - model.arch == LLM_ARCH_STARCODER || - model.arch == LLM_ARCH_STABLELM; - const bool fully_offloaded = model.n_gpu_layers >= (int) hparams.n_layer + 3; - if (ggml_cpu_has_cublas() && full_offload_supported && fully_offloaded) { + if (ggml_cpu_has_cublas() && fully_offloaded) { n_threads = 1; } From 64e64aa2557d97490b2fe1262b313e2f4a1607e3 Mon Sep 17 00:00:00 2001 From: Jared Van Bortel Date: Tue, 28 Nov 2023 04:51:11 -0500 Subject: [PATCH 020/811] ggml : restore abort() in GGML_ASSERT (#4242) --- ggml.h | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/ggml.h b/ggml.h index f2fce0f22..4d6d4edfd 100644 --- a/ggml.h +++ b/ggml.h @@ -244,11 +244,10 @@ #define GGML_ASSERT(x) \ do { \ if (!(x)) { \ - fprintf(stderr, "GGML_ASSERT: %s:%d: %s\n", __FILE__, __LINE__, #x); \ - fflush(stderr); \ fflush(stdout); \ + fprintf(stderr, "GGML_ASSERT: %s:%d: %s\n", __FILE__, __LINE__, #x); \ ggml_print_backtrace(); \ - exit(1); \ + abort(); \ } \ } while (0) From 4fea3420ee3918d125d74c94d962a6ea82875351 Mon Sep 17 00:00:00 2001 From: Peter Sugihara Date: Tue, 28 Nov 2023 23:16:34 -0800 Subject: [PATCH 021/811] readme : add FreeChat (#4248) --- README.md | 1 + 1 file changed, 1 insertion(+) diff --git a/README.md b/README.md index d0d6c9666..44cc94093 100644 --- a/README.md +++ b/README.md @@ -117,6 +117,7 @@ as the main playground for developing new features for the [ggml](https://github - [oobabooga/text-generation-webui](https://github.com/oobabooga/text-generation-webui) - [withcatai/catai](https://github.com/withcatai/catai) - [semperai/amica](https://github.com/semperai/amica) +- [psugihara/FreeChat](https://github.com/psugihara/FreeChat) --- From 1f5cd83275fabb43f2ae92c30033b384a3eb37b4 Mon Sep 17 00:00:00 2001 From: Georgi Gerganov Date: Wed, 29 Nov 2023 11:00:17 +0200 Subject: [PATCH 022/811] examples : add readme files --- examples/lookahead/README.md | 7 +++++++ examples/speculative/README.md | 8 ++++++++ 2 files changed, 15 insertions(+) create mode 100644 examples/lookahead/README.md create mode 100644 examples/speculative/README.md diff --git a/examples/lookahead/README.md b/examples/lookahead/README.md new file mode 100644 index 000000000..252a6689e --- /dev/null +++ b/examples/lookahead/README.md @@ -0,0 +1,7 @@ +# llama.cpp/examples/lookahead + +Demonstartion of lookahead decoding technique: + +https://lmsys.org/blog/2023-11-21-lookahead-decoding/ + +More info: https://github.com/ggerganov/llama.cpp/pull/4207 diff --git a/examples/speculative/README.md b/examples/speculative/README.md new file mode 100644 index 000000000..d88fd3790 --- /dev/null +++ b/examples/speculative/README.md @@ -0,0 +1,8 @@ +# llama.cpp/examples/speculative + +Demonstartion of speculative decoding and tree-based speculative decoding techniques + +More info: + +- https://github.com/ggerganov/llama.cpp/pull/2926 +- https://github.com/ggerganov/llama.cpp/pull/3624 From e2bd725f4b39bc5c6234858d158e01248f5ab5bd Mon Sep 17 00:00:00 2001 From: rhjdvsgsgks <26178113+rhjdvsgsgks@users.noreply.github.com> Date: Thu, 30 Nov 2023 20:50:40 +0000 Subject: [PATCH 023/811] py : fix oai proxy (#3972) * fix oai proxy fix generation not stoped while bot stop talking in chat mode fix possible `slot_id` not exist response for cors (and pre flight) * oai proxy: workaround for some client (such as Chatbox) * use stop as separator to replace hardcoded `\n` --- examples/server/api_like_OAI.py | 46 ++++++++++++++++++--------------- 1 file changed, 25 insertions(+), 21 deletions(-) diff --git a/examples/server/api_like_OAI.py b/examples/server/api_like_OAI.py index 313e1a965..830c056d4 100755 --- a/examples/server/api_like_OAI.py +++ b/examples/server/api_like_OAI.py @@ -11,10 +11,10 @@ app = Flask(__name__) slot_id = -1 parser = argparse.ArgumentParser(description="An example of using server.cpp with a similar API to OAI. It must be used together with server.cpp.") -parser.add_argument("--chat-prompt", type=str, help="the top prompt in chat completions(default: 'A chat between a curious user and an artificial intelligence assistant. The assistant follows the given rules no matter what.\\n')", default='A chat between a curious user and an artificial intelligence assistant. The assistant follows the given rules no matter what.\\n') -parser.add_argument("--user-name", type=str, help="USER name in chat completions(default: '\\nUSER: ')", default="\\nUSER: ") -parser.add_argument("--ai-name", type=str, help="ASSISTANT name in chat completions(default: '\\nASSISTANT: ')", default="\\nASSISTANT: ") -parser.add_argument("--system-name", type=str, help="SYSTEM name in chat completions(default: '\\nASSISTANT's RULE: ')", default="\\nASSISTANT's RULE: ") +parser.add_argument("--chat-prompt", type=str, help="the top prompt in chat completions(default: 'A chat between a curious user and an artificial intelligence assistant. The assistant follows the given rules no matter what.')", default='A chat between a curious user and an artificial intelligence assistant. The assistant follows the given rules no matter what.') +parser.add_argument("--user-name", type=str, help="USER name in chat completions(default: 'USER: ')", default="USER: ") +parser.add_argument("--ai-name", type=str, help="ASSISTANT name in chat completions(default: 'ASSISTANT: ')", default="ASSISTANT: ") +parser.add_argument("--system-name", type=str, help="SYSTEM name in chat completions(default: 'ASSISTANT's RULE: ')", default="ASSISTANT's RULE: ") parser.add_argument("--stop", type=str, help="the end of response in chat completions(default: '')", default="") parser.add_argument("--llama-api", type=str, help="Set the address of server.cpp in llama.cpp(default: http://127.0.0.1:8080)", default='http://127.0.0.1:8080') parser.add_argument("--api-key", type=str, help="Set the api key to allow only few user(default: NULL)", default="") @@ -34,19 +34,19 @@ def is_present(json, key): #convert chat to prompt def convert_chat(messages): - prompt = "" + args.chat_prompt.replace("\\n", "\n") - system_n = args.system_name.replace("\\n", "\n") - user_n = args.user_name.replace("\\n", "\n") - ai_n = args.ai_name.replace("\\n", "\n") - stop = args.stop.replace("\\n", "\n") + system_n = args.system_name + user_n = args.user_name + ai_n = args.ai_name + stop = args.stop + prompt = "" + args.chat_prompt + stop for line in messages: if (line["role"] == "system"): - prompt += f"{system_n}{line['content']}" + prompt += f"{system_n}{line['content']}{stop}" if (line["role"] == "user"): - prompt += f"{user_n}{line['content']}" + prompt += f"{user_n}{line['content']}{stop}" if (line["role"] == "assistant"): prompt += f"{ai_n}{line['content']}{stop}" prompt += ai_n.rstrip() @@ -130,7 +130,7 @@ def make_resData_stream(data, chat=False, time_now = 0, start=False): } ] } - slot_id = data["slot_id"] + slot_id = data.get("slot_id") if (chat): if (start): resData["choices"][0]["delta"] = { @@ -150,11 +150,13 @@ def make_resData_stream(data, chat=False, time_now = 0, start=False): return resData -@app.route('/chat/completions', methods=['POST']) -@app.route('/v1/chat/completions', methods=['POST']) +@app.route('/chat/completions', methods=['POST', 'OPTIONS']) +@app.route('/v1/chat/completions', methods=['POST', 'OPTIONS']) def chat_completions(): if (args.api_key != "" and request.headers["Authorization"].split()[1] != args.api_key): return Response(status=403) + if request.method == 'OPTIONS': + return Response(headers={"Access-Control-Allow-Origin": "*", "Access-Control-Allow-Headers": "*"}) body = request.get_json() stream = False tokenize = False @@ -177,20 +179,22 @@ def chat_completions(): data = requests.request("POST", urllib.parse.urljoin(args.llama_api, "/completion"), data=json.dumps(postData), stream=True) time_now = int(time.time()) resData = make_resData_stream({}, chat=True, time_now=time_now, start=True) - yield 'data: {}\n'.format(json.dumps(resData)) + yield 'data: {}\n\n'.format(json.dumps(resData)) for line in data.iter_lines(): if line: decoded_line = line.decode('utf-8') resData = make_resData_stream(json.loads(decoded_line[6:]), chat=True, time_now=time_now) - yield 'data: {}\n'.format(json.dumps(resData)) - return Response(generate(), mimetype='text/event-stream') + yield 'data: {}\n\n'.format(json.dumps(resData)) + return Response(generate(), mimetype='text/event-stream', headers={"Access-Control-Allow-Origin": "*", "Access-Control-Allow-Headers": "*"}) -@app.route('/completions', methods=['POST']) -@app.route('/v1/completions', methods=['POST']) +@app.route('/completions', methods=['POST', 'OPTIONS']) +@app.route('/v1/completions', methods=['POST', 'OPTIONS']) def completion(): if (args.api_key != "" and request.headers["Authorization"].split()[1] != args.api_key): return Response(status=403) + if request.method == 'OPTIONS': + return Response(headers={"Access-Control-Allow-Origin": "*", "Access-Control-Allow-Headers": "*"}) body = request.get_json() stream = False tokenize = False @@ -216,8 +220,8 @@ def completion(): if line: decoded_line = line.decode('utf-8') resData = make_resData_stream(json.loads(decoded_line[6:]), chat=False, time_now=time_now) - yield 'data: {}\n'.format(json.dumps(resData)) - return Response(generate(), mimetype='text/event-stream') + yield 'data: {}\n\n'.format(json.dumps(resData)) + return Response(generate(), mimetype='text/event-stream', headers={"Access-Control-Allow-Origin": "*", "Access-Control-Allow-Headers": "*"}) if __name__ == '__main__': app.run(args.host, port=args.port) From 954e22858c5cea1dc03e9172d3879402af2b5990 Mon Sep 17 00:00:00 2001 From: tarcey Date: Thu, 30 Nov 2023 22:40:23 +0100 Subject: [PATCH 024/811] llama : fix typical sampling (#4261) Typical sampling was broken because after copying new_candidates into canditates, the "sorted" bool is left at "true", but the new data is no longer sorted according to probability. Patch to set "sorted" to false. Test: Generating with temp=0.0001 (approx. argmax) should generate the same sequence at typical>=1.0 and typical=0.9999 (approx. disabled, but enters the typical sampling codepath). --- llama.cpp | 1 + 1 file changed, 1 insertion(+) diff --git a/llama.cpp b/llama.cpp index cb544228b..4af450615 100644 --- a/llama.cpp +++ b/llama.cpp @@ -7027,6 +7027,7 @@ void llama_sample_typical(struct llama_context * ctx, llama_token_data_array * c // Replace the data in candidates with the new_candidates data std::copy(new_candidates.begin(), new_candidates.end(), candidates->data); candidates->size = new_candidates.size(); + candidates->sorted = false; if (ctx) { ctx->t_sample_us += ggml_time_us() - t_start_sample_us; From f4d973cecb7368c985720ba9100ae6abba14806d Mon Sep 17 00:00:00 2001 From: slaren Date: Thu, 30 Nov 2023 22:42:23 +0100 Subject: [PATCH 025/811] convert.py : fix llama/llama2 conversion due to vocab_size=-1 (#4258) --- convert.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/convert.py b/convert.py index 3ad836ce0..6e95d6cb3 100755 --- a/convert.py +++ b/convert.py @@ -267,7 +267,7 @@ class Params: n_ctx = 2048 return Params( - n_vocab = config.get("vocab_size", model["tok_embeddings.weight"].shape[0]), + n_vocab = model["tok_embeddings.weight"].shape[0], n_embd = config["dim"], n_layer = config["n_layers"], n_ctx = n_ctx, From b18c66ca6eee4fe0465cff5042daf05005dc9ab2 Mon Sep 17 00:00:00 2001 From: Daniel Bevenius Date: Thu, 30 Nov 2023 22:43:08 +0100 Subject: [PATCH 026/811] llama : fix alignment of general.name in print meta (#4254) * llama: fix alignment of general.name in print meta This commit fixes the alignment of the general.name field in the llm_load_print_meta function. Currently the output looks like this: ```console llm_load_print_meta: model ftype = mostly Q4_0 llm_load_print_meta: model params = 13.02 B llm_load_print_meta: model size = 6.86 GiB (4.53 BPW) llm_load_print_meta: general.name = LLaMA v2 ``` And with this commit it looks like this: ```console llm_load_print_meta: model ftype = mostly Q4_0 llm_load_print_meta: model params = 13.02 B llm_load_print_meta: model size = 6.86 GiB (4.53 BPW) llm_load_print_meta: general.name = LLaMA v2 ``` Signed-off-by: Daniel Bevenius * llama: fix alignment of special tokens Signed-off-by: Daniel Bevenius --------- Signed-off-by: Daniel Bevenius --- llama.cpp | 14 +++++++------- 1 file changed, 7 insertions(+), 7 deletions(-) diff --git a/llama.cpp b/llama.cpp index 4af450615..26754ef72 100644 --- a/llama.cpp +++ b/llama.cpp @@ -2645,15 +2645,15 @@ static void llm_load_print_meta(llama_model_loader & ml, llama_model & model) { } // general kv - LLAMA_LOG_INFO("%s: general.name = %s\n", __func__, model.name.c_str()); + LLAMA_LOG_INFO("%s: general.name = %s\n", __func__, model.name.c_str()); // special tokens - if (vocab.special_bos_id != -1) { LLAMA_LOG_INFO( "%s: BOS token = %d '%s'\n", __func__, vocab.special_bos_id, vocab.id_to_token[vocab.special_bos_id].text.c_str() ); } - if (vocab.special_eos_id != -1) { LLAMA_LOG_INFO( "%s: EOS token = %d '%s'\n", __func__, vocab.special_eos_id, vocab.id_to_token[vocab.special_eos_id].text.c_str() ); } - if (vocab.special_unk_id != -1) { LLAMA_LOG_INFO( "%s: UNK token = %d '%s'\n", __func__, vocab.special_unk_id, vocab.id_to_token[vocab.special_unk_id].text.c_str() ); } - if (vocab.special_sep_id != -1) { LLAMA_LOG_INFO( "%s: SEP token = %d '%s'\n", __func__, vocab.special_sep_id, vocab.id_to_token[vocab.special_sep_id].text.c_str() ); } - if (vocab.special_pad_id != -1) { LLAMA_LOG_INFO( "%s: PAD token = %d '%s'\n", __func__, vocab.special_pad_id, vocab.id_to_token[vocab.special_pad_id].text.c_str() ); } - if (vocab.linefeed_id != -1) { LLAMA_LOG_INFO( "%s: LF token = %d '%s'\n", __func__, vocab.linefeed_id, vocab.id_to_token[vocab.linefeed_id].text.c_str() ); } + if (vocab.special_bos_id != -1) { LLAMA_LOG_INFO( "%s: BOS token = %d '%s'\n", __func__, vocab.special_bos_id, vocab.id_to_token[vocab.special_bos_id].text.c_str() ); } + if (vocab.special_eos_id != -1) { LLAMA_LOG_INFO( "%s: EOS token = %d '%s'\n", __func__, vocab.special_eos_id, vocab.id_to_token[vocab.special_eos_id].text.c_str() ); } + if (vocab.special_unk_id != -1) { LLAMA_LOG_INFO( "%s: UNK token = %d '%s'\n", __func__, vocab.special_unk_id, vocab.id_to_token[vocab.special_unk_id].text.c_str() ); } + if (vocab.special_sep_id != -1) { LLAMA_LOG_INFO( "%s: SEP token = %d '%s'\n", __func__, vocab.special_sep_id, vocab.id_to_token[vocab.special_sep_id].text.c_str() ); } + if (vocab.special_pad_id != -1) { LLAMA_LOG_INFO( "%s: PAD token = %d '%s'\n", __func__, vocab.special_pad_id, vocab.id_to_token[vocab.special_pad_id].text.c_str() ); } + if (vocab.linefeed_id != -1) { LLAMA_LOG_INFO( "%s: LF token = %d '%s'\n", __func__, vocab.linefeed_id, vocab.id_to_token[vocab.linefeed_id].text.c_str() ); } } static void llm_load_tensors( From 74daabae6927b99e7333d6126dee35193c418457 Mon Sep 17 00:00:00 2001 From: Dawid Wysocki <62249621+TortillaZHawaii@users.noreply.github.com> Date: Thu, 30 Nov 2023 22:43:32 +0100 Subject: [PATCH 027/811] readme : fix typo (#4253) llama.cpp uses GitHub Actions, not Gitlab Actions. --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index 44cc94093..b89ba73aa 100644 --- a/README.md +++ b/README.md @@ -896,7 +896,7 @@ Additionally, there the following images, similar to the above: - `ghcr.io/ggerganov/llama.cpp:full-rocm`: Same as `full` but compiled with ROCm support. (platforms: `linux/amd64`, `linux/arm64`) - `ghcr.io/ggerganov/llama.cpp:light-rocm`: Same as `light` but compiled with ROCm support. (platforms: `linux/amd64`, `linux/arm64`) -The GPU enabled images are not currently tested by CI beyond being built. They are not built with any variation from the ones in the Dockerfiles defined in [.devops/](.devops/) and the Gitlab Action defined in [.github/workflows/docker.yml](.github/workflows/docker.yml). If you need different settings (for example, a different CUDA or ROCm library, you'll need to build the images locally for now). +The GPU enabled images are not currently tested by CI beyond being built. They are not built with any variation from the ones in the Dockerfiles defined in [.devops/](.devops/) and the GitHub Action defined in [.github/workflows/docker.yml](.github/workflows/docker.yml). If you need different settings (for example, a different CUDA or ROCm library, you'll need to build the images locally for now). #### Usage From f7f9e06212d44530b3200033286049dbdf84b3d3 Mon Sep 17 00:00:00 2001 From: Li Tan Date: Thu, 30 Nov 2023 13:44:11 -0800 Subject: [PATCH 028/811] cmake : fix the metal file foder path (#4217) --- CMakeLists.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/CMakeLists.txt b/CMakeLists.txt index 3e0009415..6f35a25d5 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -165,7 +165,7 @@ if (LLAMA_METAL) #add_compile_definitions(GGML_METAL_DIR_KERNELS="${CMAKE_CURRENT_SOURCE_DIR}/") # copy ggml-metal.metal to bin directory - configure_file(ggml-metal.metal bin/ggml-metal.metal COPYONLY) + configure_file(ggml-metal.metal ${CMAKE_RUNTIME_OUTPUT_DIRECTORY}/ggml-metal.metal COPYONLY) set(LLAMA_EXTRA_LIBS ${LLAMA_EXTRA_LIBS} ${FOUNDATION_LIBRARY} From bde629bb53b85886ee0fe83524c1efe2689bc618 Mon Sep 17 00:00:00 2001 From: Miwa / Ensan <63481257+ensan-hcl@users.noreply.github.com> Date: Fri, 1 Dec 2023 06:45:17 +0900 Subject: [PATCH 029/811] batched.swift : update README.md (#4214) docs: update how to run --- examples/batched.swift/README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/examples/batched.swift/README.md b/examples/batched.swift/README.md index 464c9079c..4c2721fe8 100644 --- a/examples/batched.swift/README.md +++ b/examples/batched.swift/README.md @@ -1,4 +1,4 @@ This is a swift clone of `examples/batched`. $ `make` -$ `./swift MODEL_PATH [PROMPT] [PARALLEL]` +$ `./batched_swift MODEL_PATH [PROMPT] [PARALLEL]` From 3bd2c7ce1b752973cf937482a0333e85d1681e2b Mon Sep 17 00:00:00 2001 From: Juraj Bednar Date: Thu, 30 Nov 2023 22:46:01 +0100 Subject: [PATCH 030/811] docker : add finetune option (#4211) --- .devops/tools.sh | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/.devops/tools.sh b/.devops/tools.sh index 9d999315f..3a7d274e4 100755 --- a/.devops/tools.sh +++ b/.devops/tools.sh @@ -13,6 +13,8 @@ elif [[ "$arg1" == '--quantize' || "$arg1" == '-q' ]]; then ./quantize "$@" elif [[ "$arg1" == '--run' || "$arg1" == '-r' ]]; then ./main "$@" +elif [[ "$arg1" == '--finetune' || "$arg1" == '-f' ]]; then + ./finetune "$@" elif [[ "$arg1" == '--all-in-one' || "$arg1" == '-a' ]]; then echo "Converting PTH to GGML..." for i in `ls $1/$2/ggml-model-f16.bin*`; do @@ -34,6 +36,8 @@ else echo " ex: --outtype f16 \"/models/7B/\" " echo " --quantize (-q): Optimize with quantization process ggml" echo " ex: \"/models/7B/ggml-model-f16.bin\" \"/models/7B/ggml-model-q4_0.bin\" 2" + echo " --finetune (-f): Run finetune command to create a lora finetune of the model" + echo " See documentation for finetune for command-line parameters" echo " --all-in-one (-a): Execute --convert & --quantize" echo " ex: \"/models/\" 7B" echo " --server (-s): Run a model on the server" From 524907aa768a26cbf83d8e2eb30547e2ee1d1b1a Mon Sep 17 00:00:00 2001 From: vodkaslime <646329483@qq.com> Date: Fri, 1 Dec 2023 05:49:21 +0800 Subject: [PATCH 031/811] readme : fix (#4135) * fix: readme * chore: resolve comments * chore: resolve comments --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index b89ba73aa..dac971ae5 100644 --- a/README.md +++ b/README.md @@ -324,7 +324,7 @@ mpirun -hostfile hostfile -n 3 ./main -m ./models/7B/ggml-model-q4_0.gguf -n 128 ### BLAS Build -Building the program with BLAS support may lead to some performance improvements in prompt processing using batch sizes higher than 32 (the default is 512). BLAS doesn't affect the normal generation performance. There are currently three different implementations of it: +Building the program with BLAS support may lead to some performance improvements in prompt processing using batch sizes higher than 32 (the default is 512). Support with CPU-only BLAS implementations doesn't affect the normal generation performance. We may see generation performance improvements with GPU-involved BLAS implementations, e.g. cuBLAS, hipBLAS and CLBlast. There are currently several different BLAS implementations available for build and use: - #### Accelerate Framework: From 8efa0f6ebed53c9453e6721da86fb294e5015909 Mon Sep 17 00:00:00 2001 From: Andrew Godfrey Date: Thu, 30 Nov 2023 13:56:19 -0800 Subject: [PATCH 032/811] main : pass LOG_TEE callback to llama.cpp log (#4033) * main : Call llama_log_set to use LOG_TEE * tabs to spaces --- examples/main/main.cpp | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/examples/main/main.cpp b/examples/main/main.cpp index 31ec8cade..c5cdfbf21 100644 --- a/examples/main/main.cpp +++ b/examples/main/main.cpp @@ -100,6 +100,12 @@ static void sigint_handler(int signo) { } #endif +static void llama_log_callback_logTee(ggml_log_level level, const char * text, void * user_data) { + (void) level; + (void) user_data; + LOG_TEE("%s", text); +} + int main(int argc, char ** argv) { gpt_params params; g_params = ¶ms; @@ -113,6 +119,7 @@ int main(int argc, char ** argv) { log_set_target(log_filename_generator("main", "log")); LOG_TEE("Log start\n"); log_dump_cmdline(argc, argv); + llama_log_set(llama_log_callback_logTee, nullptr); #endif // LOG_DISABLE_LOGS // TODO: Dump params ? From 33c9892af58b7b161f2a532935dcccff8c8048c6 Mon Sep 17 00:00:00 2001 From: John <78893154+cmp-nct@users.noreply.github.com> Date: Thu, 30 Nov 2023 23:11:14 +0100 Subject: [PATCH 033/811] llava : ShareGPT4V compatibility (vision encoder only loading) (#4172) * ShareGPT4 compatibility (vision encoder only loading) Load only a CLIP vision encoder (as supplied by ShareGPT finetunes) Corrects the argument parsing for --img_mean and --img_std (which were previously not parsed but attempted to access) Defines defaults for img_mean and img_std which are equal to the llava 1.5 CLIP encoder, so you do not have to provide them * Update convert-image-encoder-to-gguf.py --- .../llava/convert-image-encoder-to-gguf.py | 52 +++++++++++++------ 1 file changed, 37 insertions(+), 15 deletions(-) diff --git a/examples/llava/convert-image-encoder-to-gguf.py b/examples/llava/convert-image-encoder-to-gguf.py index 2f5eef199..729aaef8f 100644 --- a/examples/llava/convert-image-encoder-to-gguf.py +++ b/examples/llava/convert-image-encoder-to-gguf.py @@ -5,7 +5,7 @@ import json import torch import numpy as np from gguf import * -from transformers import CLIPModel, CLIPProcessor +from transformers import CLIPModel, CLIPProcessor, CLIPVisionModel TEXT = "clip.text" VISION = "clip.vision" @@ -78,11 +78,19 @@ ap.add_argument("--text-only", action="store_true", required=False, help="Save a text-only model. It can't be used to encode images") ap.add_argument("--vision-only", action="store_true", required=False, help="Save a vision-only model. It can't be used to encode texts") +ap.add_argument("--clip_model_is_vision", action="store_true", required=False, + help="The clip model is a pure vision model (ShareGPT4V vision extract for example)") ap.add_argument("--llava-projector", help="Path to llava.projector file. If specified, save an image encoder for LLaVA models.") ap.add_argument("--image-mean", nargs=3, type=float, required=False, help="Override image mean values") ap.add_argument("--image-std", nargs=3, type=float, required=False, help="Override image std values") ap.add_argument("-o", "--output-dir", help="Directory to save GGUF files. Default is the original model directory", default=None) +# Example --image_mean 0.48145466 0.4578275 0.40821073 --image_std 0.26862954 0.26130258 0.27577711 +default_image_mean = [0.48145466, 0.4578275, 0.40821073] +default_image_std = [0.26862954, 0.26130258, 0.27577711] +ap.add_argument('--image_mean', type=float, nargs='+', help='Mean of the images for normalization (overrides processor) ', default=None) +ap.add_argument('--image_std', type=float, nargs='+', help='Standard deviation of the images for normalization (overrides processor)', default=None) +# with proper args = ap.parse_args() @@ -96,15 +104,22 @@ if args.use_f32: # output in the same directory as the model if output_dir is None dir_model = args.model_dir - -with open(dir_model + "/vocab.json", "r", encoding="utf-8") as f: - vocab = json.load(f) - tokens = [key for key in vocab] +if args.clip_model_is_vision: + vocab = None + tokens = None +else: + with open(dir_model + "/vocab.json", "r", encoding="utf-8") as f: + vocab = json.load(f) + tokens = [key for key in vocab] with open(dir_model + "/config.json", "r", encoding="utf-8") as f: config = json.load(f) - v_hparams = config["vision_config"] - t_hparams = config["text_config"] + if args.clip_model_is_vision: + v_hparams = config + t_hparams = None + else: + v_hparams = config["vision_config"] + t_hparams = config["text_config"] # possible data types # ftype == 0 -> float32 @@ -117,9 +132,12 @@ ftype = 1 if args.use_f32: ftype = 0 - -model = CLIPModel.from_pretrained(dir_model) -processor = CLIPProcessor.from_pretrained(dir_model) +if args.clip_model_is_vision: + model = CLIPVisionModel.from_pretrained(dir_model) + processor = None +else: + model = CLIPModel.from_pretrained(dir_model) + processor = CLIPProcessor.from_pretrained(dir_model) fname_middle = None has_text_encoder = True @@ -128,13 +146,13 @@ has_llava_projector = False if args.text_only: fname_middle = "text-" has_vision_encoder = False -elif args.vision_only: - fname_middle = "vision-" - has_text_encoder = False elif args.llava_projector is not None: fname_middle = "mmproj-" has_text_encoder = False has_llava_projector = True +elif args.vision_only: + fname_middle = "vision-" + has_text_encoder = False else: fname_middle = "" @@ -182,8 +200,12 @@ if has_vision_encoder: block_count = v_hparams["num_hidden_layers"] - 1 if has_llava_projector else v_hparams["num_hidden_layers"] fout.add_uint32(k(KEY_BLOCK_COUNT, VISION), block_count) - image_mean = processor.image_processor.image_mean if args.image_mean is None else args.image_mean - image_std = processor.image_processor.image_std if args.image_std is None else args.image_std + if processor is not None: + image_mean = processor.image_processor.image_mean if args.image_mean is None or args.image_mean == default_image_mean else args.image_mean + image_std = processor.image_processor.image_std if args.image_std is None or args.image_std == default_image_std else args.image_std + else: + image_mean = args.image_mean if args.image_mean is not None else default_image_mean + image_std = args.image_std if args.image_std is not None else default_image_std fout.add_array("clip.vision.image_mean", image_mean) fout.add_array("clip.vision.image_std", image_std) From 15f5d96037e597523b721aa39c874d69de2acf85 Mon Sep 17 00:00:00 2001 From: Jared Van Bortel Date: Thu, 30 Nov 2023 17:23:08 -0500 Subject: [PATCH 034/811] build : fix build info generation and cleanup Makefile (#3920) * cmake : fix joining of REAL_GIT_DIR * fix includes with help from include-what-you-use * make : remove unneeded deps and add test-rope target * fix C includes in C++ source files * Revert "fix includes with help from include-what-you-use" This reverts commit 635e9fadfd516d4604a0fecf4a854bfb25ad17ae. --- .gitignore | 25 +++++++++++++------------ Makefile | 23 +++++++++++++---------- common/CMakeLists.txt | 7 ++++++- ggml-opencl.cpp | 12 +++++------- llama.cpp | 1 - 5 files changed, 37 insertions(+), 31 deletions(-) diff --git a/.gitignore b/.gitignore index 3806e05dd..58c483994 100644 --- a/.gitignore +++ b/.gitignore @@ -88,15 +88,16 @@ poetry.lock poetry.toml # Test binaries -tests/test-grammar-parser -tests/test-llama-grammar -tests/test-double-float -tests/test-grad0 -tests/test-opt -tests/test-quantize-fns -tests/test-quantize-perf -tests/test-sampling -tests/test-tokenizer-0-llama -tests/test-tokenizer-0-falcon -tests/test-tokenizer-1-llama -tests/test-tokenizer-1-bpe +/tests/test-grammar-parser +/tests/test-llama-grammar +/tests/test-double-float +/tests/test-grad0 +/tests/test-opt +/tests/test-quantize-fns +/tests/test-quantize-perf +/tests/test-sampling +/tests/test-tokenizer-0-llama +/tests/test-tokenizer-0-falcon +/tests/test-tokenizer-1-llama +/tests/test-tokenizer-1-bpe +/tests/test-rope diff --git a/Makefile b/Makefile index 95d85236f..22132ae23 100644 --- a/Makefile +++ b/Makefile @@ -8,7 +8,7 @@ BUILD_TARGETS = \ TEST_TARGETS = \ tests/test-llama-grammar tests/test-grammar-parser tests/test-double-float tests/test-grad0 tests/test-opt \ tests/test-quantize-fns tests/test-quantize-perf tests/test-sampling tests/test-tokenizer-0-llama \ - tests/test-tokenizer-0-falcon tests/test-tokenizer-1-llama tests/test-tokenizer-1-bpe + tests/test-tokenizer-0-falcon tests/test-tokenizer-1-llama tests/test-tokenizer-1-bpe tests/test-rope # Code coverage output files COV_TARGETS = *.gcno tests/*.gcno *.gcda tests/*.gcda *.gcov tests/*.gcov lcov-report gcovr-report @@ -648,7 +648,7 @@ beam-search: examples/beam-search/beam-search.cpp ggml.o llama.o $(COMMON_DEPS) finetune: examples/finetune/finetune.cpp ggml.o llama.o $(COMMON_DEPS) train.o $(OBJS) $(CXX) $(CXXFLAGS) $(filter-out %.h,$^) -o $@ $(LDFLAGS) -export-lora: examples/export-lora/export-lora.cpp ggml.o llama.o $(COMMON_DEPS) $(OBJS) +export-lora: examples/export-lora/export-lora.cpp ggml.o common/common.h $(OBJS) $(CXX) $(CXXFLAGS) $(filter-out %.h,$^) -o $@ $(LDFLAGS) speculative: examples/speculative/speculative.cpp ggml.o llama.o $(COMMON_DEPS) grammar-parser.o $(OBJS) @@ -701,28 +701,28 @@ vdot: pocs/vdot/vdot.cpp ggml.o $(OBJS) q8dot: pocs/vdot/q8dot.cpp ggml.o $(OBJS) $(CXX) $(CXXFLAGS) $^ -o $@ $(LDFLAGS) -tests/test-llama-grammar: tests/test-llama-grammar.cpp ggml.o $(COMMON_DEPS) grammar-parser.o $(OBJS) +tests/test-llama-grammar: tests/test-llama-grammar.cpp ggml.o grammar-parser.o $(OBJS) $(CXX) $(CXXFLAGS) $(filter-out %.h,$^) -o $@ $(LDFLAGS) -tests/test-grammar-parser: tests/test-grammar-parser.cpp ggml.o llama.o $(COMMON_DEPS) grammar-parser.o $(OBJS) +tests/test-grammar-parser: tests/test-grammar-parser.cpp ggml.o llama.o grammar-parser.o $(OBJS) $(CXX) $(CXXFLAGS) $(filter-out %.h,$^) -o $@ $(LDFLAGS) -tests/test-double-float: tests/test-double-float.cpp ggml.o llama.o $(COMMON_DEPS) $(OBJS) +tests/test-double-float: tests/test-double-float.cpp ggml.o $(OBJS) $(CXX) $(CXXFLAGS) $(filter-out %.h,$^) -o $@ $(LDFLAGS) -tests/test-grad0: tests/test-grad0.cpp ggml.o llama.o $(COMMON_DEPS) $(OBJS) +tests/test-grad0: tests/test-grad0.cpp ggml.o $(OBJS) $(CXX) $(CXXFLAGS) $(filter-out %.h,$^) -o $@ $(LDFLAGS) -tests/test-opt: tests/test-opt.cpp ggml.o llama.o $(COMMON_DEPS) $(OBJS) +tests/test-opt: tests/test-opt.cpp ggml.o $(OBJS) $(CXX) $(CXXFLAGS) $(filter-out %.h,$^) -o $@ $(LDFLAGS) -tests/test-quantize-fns: tests/test-quantize-fns.cpp ggml.o llama.o $(COMMON_DEPS) $(OBJS) +tests/test-quantize-fns: tests/test-quantize-fns.cpp ggml.o $(OBJS) $(CXX) $(CXXFLAGS) $(filter-out %.h,$^) -o $@ $(LDFLAGS) -tests/test-quantize-perf: tests/test-quantize-perf.cpp ggml.o llama.o $(COMMON_DEPS) $(OBJS) +tests/test-quantize-perf: tests/test-quantize-perf.cpp ggml.o $(OBJS) $(CXX) $(CXXFLAGS) $(filter-out %.h,$^) -o $@ $(LDFLAGS) -tests/test-sampling: tests/test-sampling.cpp ggml.o llama.o $(COMMON_DEPS) $(OBJS) +tests/test-sampling: tests/test-sampling.cpp ggml.o llama.o $(OBJS) $(CXX) $(CXXFLAGS) $(filter-out %.h,$^) -o $@ $(LDFLAGS) tests/test-tokenizer-0-falcon: tests/test-tokenizer-0-falcon.cpp ggml.o llama.o $(COMMON_DEPS) $(OBJS) @@ -737,5 +737,8 @@ tests/test-tokenizer-1-bpe: tests/test-tokenizer-1-bpe.cpp ggml.o llama.o $(COMM tests/test-tokenizer-1-llama: tests/test-tokenizer-1-llama.cpp ggml.o llama.o $(COMMON_DEPS) $(OBJS) $(CXX) $(CXXFLAGS) $(filter-out %.h,$^) -o $@ $(LDFLAGS) +tests/test-rope: tests/test-rope.cpp ggml.o $(OBJS) + $(CXX) $(CXXFLAGS) $(filter-out %.h,$^) -o $@ $(LDFLAGS) + tests/test-c.o: tests/test-c.c llama.h $(CC) $(CFLAGS) -c $(filter-out %.h,$^) -o $@ diff --git a/common/CMakeLists.txt b/common/CMakeLists.txt index 71891edc3..b5d5453d2 100644 --- a/common/CMakeLists.txt +++ b/common/CMakeLists.txt @@ -11,7 +11,12 @@ if(EXISTS "${CMAKE_CURRENT_SOURCE_DIR}/../.git") if(NOT IS_DIRECTORY "${GIT_DIR}") file(READ ${GIT_DIR} REAL_GIT_DIR_LINK) string(REGEX REPLACE "gitdir: (.*)\n$" "\\1" REAL_GIT_DIR ${REAL_GIT_DIR_LINK}) - set(GIT_DIR "${CMAKE_CURRENT_SOURCE_DIR}/../${REAL_GIT_DIR}") + string(FIND "${REAL_GIT_DIR}" "/" SLASH_POS) + if (SLASH_POS EQUAL 0) + set(GIT_DIR "${REAL_GIT_DIR}") + else() + set(GIT_DIR "${CMAKE_CURRENT_SOURCE_DIR}/../${REAL_GIT_DIR}") + endif() endif() set(GIT_INDEX "${GIT_DIR}/index") diff --git a/ggml-opencl.cpp b/ggml-opencl.cpp index 202bcb485..496f9cdca 100644 --- a/ggml-opencl.cpp +++ b/ggml-opencl.cpp @@ -1,20 +1,18 @@ +#include "ggml.h" #include "ggml-opencl.h" #include #include +#include +#include +#include +#include #include #include -#include #define CL_TARGET_OPENCL_VERSION 110 #include -#include -#include -#include - -#include "ggml.h" - #if defined(_MSC_VER) #pragma warning(disable: 4244 4267) // possible loss of data #endif diff --git a/llama.cpp b/llama.cpp index 26754ef72..1e00ea4a9 100644 --- a/llama.cpp +++ b/llama.cpp @@ -46,7 +46,6 @@ #endif #include #include - #include // for _fseeki64 #endif #include From d2809a3ba2780e00fce5a6149a7eda09f1c0e906 Mon Sep 17 00:00:00 2001 From: WillCorticesAI <150854901+WillCorticesAI@users.noreply.github.com> Date: Thu, 30 Nov 2023 17:23:44 -0500 Subject: [PATCH 035/811] make : fix Apple clang determination bug (#4272) Co-authored-by: Will Findley --- Makefile | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Makefile b/Makefile index 22132ae23..25b113e0a 100644 --- a/Makefile +++ b/Makefile @@ -30,7 +30,7 @@ ifeq '' '$(findstring clang,$(shell $(CC) --version))' CC_VER := $(shell $(CC) -dumpfullversion -dumpversion | awk -F. '{ printf("%02d%02d%02d", $$1, $$2, $$3) }') else CC_IS_CLANG=1 - ifeq '' '$(findstring Apple LLVM,$(shell $(CC) --version))' + ifeq '' '$(findstring Apple,$(shell $(CC) --version))' CC_IS_LLVM_CLANG=1 else CC_IS_APPLE_CLANG=1 From f43f09366dfd018e4568e23a232aaa8c4f7cfc78 Mon Sep 17 00:00:00 2001 From: Ziad Ben Hadj-Alouane Date: Thu, 30 Nov 2023 17:25:04 -0500 Subject: [PATCH 036/811] server : add single-client multi-prompt support (#4232) * * add multiprompt support * * cleanup * * more cleanup * * remove atomicity of id_gen, and change lock_guard to unique_lock on completion requests * * remove all references to mutex_multitasks * Update examples/server/server.cpp Co-authored-by: Jared Van Bortel * Update examples/server/server.cpp Co-authored-by: Jared Van Bortel * Update examples/server/server.cpp Co-authored-by: Jared Van Bortel * Update examples/server/server.cpp Co-authored-by: Jared Van Bortel * * change to set --------- Co-authored-by: Jared Van Bortel --- examples/server/server.cpp | 139 ++++++++++++++++++++++++++++++++++--- 1 file changed, 128 insertions(+), 11 deletions(-) diff --git a/examples/server/server.cpp b/examples/server/server.cpp index 50f124b13..5edb3678e 100644 --- a/examples/server/server.cpp +++ b/examples/server/server.cpp @@ -155,15 +155,23 @@ struct task_server { json data; bool infill_mode = false; bool embedding_mode = false; + int multitask_id = -1; }; struct task_result { int id; + int multitask_id = -1; bool stop; bool error; json result_json; }; +struct task_multi { + int id; + std::set subtasks_remaining{}; + std::vector results{}; +}; + // TODO: can become bool if we can't find use of more states enum slot_state { @@ -406,6 +414,9 @@ struct llama_client_slot double t_prompt_processing; // ms double t_token_generation; // ms + // multitasks + int multitask_id = -1; + void reset() { num_prompt_tokens = 0; generated_text = ""; @@ -529,7 +540,8 @@ struct llama_server_context std::vector queue_tasks; std::vector queue_results; - std::mutex mutex_tasks; + std::vector queue_multitasks; + std::mutex mutex_tasks; // also guards id_gen, and queue_multitasks std::mutex mutex_results; ~llama_server_context() @@ -1112,17 +1124,40 @@ struct llama_server_context return slot.images.size() > 0; } - void send_error(int id, std::string error) + void send_error(task_server& task, std::string error) { std::lock_guard lock(mutex_results); task_result res; - res.id = id; + res.id = task.id; + res.multitask_id = task.multitask_id; res.stop = false; res.error = true; res.result_json = { { "content", error } }; queue_results.push_back(res); } + void add_multi_task(int id, std::vector& sub_ids) + { + std::lock_guard lock(mutex_tasks); + task_multi multi; + multi.id = id; + std::copy(sub_ids.begin(), sub_ids.end(), std::inserter(multi.subtasks_remaining, multi.subtasks_remaining.end())); + queue_multitasks.push_back(multi); + } + + void update_multi_task(int multitask_id, int subtask_id, task_result& result) + { + std::lock_guard lock(mutex_tasks); + for (auto& multitask : queue_multitasks) + { + if (multitask.id == multitask_id) + { + multitask.subtasks_remaining.erase(subtask_id); + multitask.results.push_back(result); + } + } + } + json get_model_props() { return get_formated_generation(slots[0]); @@ -1167,6 +1202,7 @@ struct llama_server_context std::lock_guard lock(mutex_results); task_result res; res.id = slot.task_id; + res.multitask_id = slot.multitask_id; res.error = false; res.stop = false; @@ -1206,6 +1242,7 @@ struct llama_server_context std::lock_guard lock(mutex_results); task_result res; res.id = slot.task_id; + res.multitask_id = slot.multitask_id; res.error = false; res.stop = true; @@ -1251,6 +1288,12 @@ struct llama_server_context res.result_json["model"] = slot.oaicompat_model; } + // parent multitask, if any, needs to be updated + if (slot.multitask_id != -1) + { + update_multi_task(slot.multitask_id, slot.task_id, res); + } + queue_results.push_back(res); } @@ -1259,6 +1302,7 @@ struct llama_server_context std::lock_guard lock(mutex_results); task_result res; res.id = slot.task_id; + res.multitask_id = slot.multitask_id; res.error = false; res.stop = true; @@ -1285,9 +1329,9 @@ struct llama_server_context queue_results.push_back(res); } - int request_completion(json data, bool infill, bool embedding) + int request_completion(json data, bool infill, bool embedding, int multitask_id) { - std::lock_guard lock(mutex_tasks); + std::unique_lock lock(mutex_tasks); task_server task; task.id = id_gen++; task.target_id = 0; @@ -1295,6 +1339,16 @@ struct llama_server_context task.infill_mode = infill; task.embedding_mode = embedding; task.type = COMPLETION_TASK; + task.multitask_id = multitask_id; + + // when a completion task's prompt array is not a singleton, we split it into multiple requests + if (task.data.at("prompt").size() > 1) + { + lock.unlock(); // entering new func scope + return split_multiprompt_task(task); + } + + // otherwise, it's a single-prompt task, we actually queue it queue_tasks.push_back(task); return task.id; } @@ -1313,8 +1367,17 @@ struct llama_server_context for (int i = 0; i < (int) queue_results.size(); i++) { + // for now, tasks that have associated parent multitasks just get erased once multitask picks up the result + if (queue_results[i].multitask_id == task_id) + { + update_multi_task(task_id, queue_results[i].id, queue_results[i]); + queue_results.erase(queue_results.begin() + i); + continue; + } + if (queue_results[i].id == task_id) { + assert(queue_results[i].multitask_id == -1); task_result res = queue_results[i]; queue_results.erase(queue_results.begin() + i); return res; @@ -1404,6 +1467,27 @@ struct llama_server_context queue_tasks.push_back(task); } + int split_multiprompt_task(task_server& multiprompt_task) + { + auto prompt_count = multiprompt_task.data.at("prompt").size(); + assert(prompt_count > 1); + + int multitask_id = id_gen++; + std::vector subtask_ids(prompt_count); + for (int i = 0; i < prompt_count; i++) + { + json subtask_data = multiprompt_task.data; + subtask_data["prompt"] = subtask_data["prompt"][i]; + + // subtasks inherit everything else (infill mode, embedding mode, etc.) + subtask_ids[i] = request_completion(subtask_data, multiprompt_task.infill_mode, multiprompt_task.embedding_mode, multitask_id); + } + + // queue up the multitask so we can track its subtask progression + add_multi_task(multitask_id, subtask_ids); + return multitask_id; + } + void process_tasks() { std::lock_guard lock(mutex_tasks); @@ -1419,7 +1503,7 @@ struct llama_server_context { LOG_TEE("slot unavailable\n"); // send error result - send_error(task.id, "slot unavailable"); + send_error(task, "slot unavailable"); return; } @@ -1433,11 +1517,12 @@ struct llama_server_context slot->infill = task.infill_mode; slot->embedding = task.embedding_mode; slot->task_id = task.id; + slot->multitask_id = task.multitask_id; if (!launch_slot_with_data(slot, task.data)) { // send error result - send_error(task.id, "internal_error"); + send_error(task, "internal_error"); break; } } break; @@ -1453,6 +1538,38 @@ struct llama_server_context } break; } } + + // remove finished multitasks from the queue of multitasks, and add the corresponding result to the result queue + auto queue_iterator = queue_multitasks.begin(); + while (queue_iterator != queue_multitasks.end()) + { + if (queue_iterator->subtasks_remaining.empty()) + { + // all subtasks done == multitask is done + task_result aggregate_result; + aggregate_result.id = queue_iterator->id; + aggregate_result.stop = true; + aggregate_result.error = false; + + // collect json results into one json result + std::vector result_jsons; + for (auto& subres : queue_iterator->results) + { + result_jsons.push_back(subres.result_json); + aggregate_result.error = aggregate_result.error && subres.error; + } + aggregate_result.result_json = json{ "results", result_jsons }; + + std::lock_guard lock(mutex_results); + queue_results.push_back(aggregate_result); + + queue_iterator = queue_multitasks.erase(queue_iterator); + } + else + { + ++queue_iterator; + } + } } bool update_slots() { @@ -2596,7 +2713,7 @@ int main(int argc, char **argv) svr.Post("/completion", [&llama](const httplib::Request &req, httplib::Response &res) { json data = json::parse(req.body); - const int task_id = llama.request_completion(data, false, false); + const int task_id = llama.request_completion(data, false, false, -1); if (!json_value(data, "stream", false)) { std::string completion_text; task_result result = llama.next_result(task_id); @@ -2685,7 +2802,7 @@ int main(int argc, char **argv) { json data = oaicompat_completion_params_parse(json::parse(req.body)); - const int task_id = llama.request_completion(data, false, false); + const int task_id = llama.request_completion(data, false, false, -1); if (!json_value(data, "stream", false)) { std::string completion_text; @@ -2754,7 +2871,7 @@ int main(int argc, char **argv) svr.Post("/infill", [&llama](const httplib::Request &req, httplib::Response &res) { json data = json::parse(req.body); - const int task_id = llama.request_completion(data, true, false); + const int task_id = llama.request_completion(data, true, false, -1); if (!json_value(data, "stream", false)) { std::string completion_text; task_result result = llama.next_result(task_id); @@ -2858,7 +2975,7 @@ int main(int argc, char **argv) { prompt = ""; } - const int task_id = llama.request_completion({ {"prompt", prompt}, { "n_predict", 0} }, false, true); + const int task_id = llama.request_completion({ {"prompt", prompt}, { "n_predict", 0} }, false, true, -1); task_result result = llama.next_result(task_id); return res.set_content(result.result_json.dump(), "application/json"); }); From 1d144112c0fbbb4ecc07dbcf4f05a380148bd6de Mon Sep 17 00:00:00 2001 From: Ziad Ben Hadj-Alouane Date: Thu, 30 Nov 2023 17:25:49 -0500 Subject: [PATCH 037/811] server : add --log-disable to disable logging to file (#4260) * * add --log-disable to disable logging to file in the server example * * typo fix --- examples/server/server.cpp | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/examples/server/server.cpp b/examples/server/server.cpp index 5edb3678e..a65344b92 100644 --- a/examples/server/server.cpp +++ b/examples/server/server.cpp @@ -1961,6 +1961,7 @@ static void server_print_usage(const char *argv0, const gpt_params ¶ms, printf(" -spf FNAME, --system-prompt-file FNAME\n"); printf(" Set a file to load a system prompt (initial prompt of all slots), this is useful for chat applications.\n"); printf(" --mmproj MMPROJ_FILE path to a multimodal projector file for LLaVA.\n"); + printf(" --log-disable disables logging to a file.\n"); printf("\n"); } @@ -2315,6 +2316,11 @@ static void server_params_parse(int argc, char **argv, server_params &sparams, } params.mmproj = argv[i]; } + else if (arg == "--log-disable") + { + log_set_target(stdout); + LOG_INFO("logging to file is disabled.", {}); + } else { fprintf(stderr, "error: unknown argument: %s\n", arg.c_str()); From ef47ec18da469423c276b683dd9b5741cee7023e Mon Sep 17 00:00:00 2001 From: Georgi Gerganov Date: Fri, 1 Dec 2023 10:51:24 +0200 Subject: [PATCH 038/811] ggml : add ggml_soft_max_ext (#4256) * metal : implement soft_max_ext * cuda : implement soft_max_ext * ggml : implement soft_max_ext (CPU) * batched-bench : print threads ggml-ci * metal : simplify soft_max encoding ggml-ci * cuda : use 512 threads for soft_max instead of 32 * ggml : update soft max cpu * cuda : do warp-based block reduce * cuda : increase max block size to 1024 * cuda : fix warp reduction initialization of shared mem * metal : warp-based reduction for soft max kernel * metal : warp-based reduce for rms_norm * metal : simplify soft max kernel ggml-ci * alloc : fix build with debug --- examples/batched-bench/batched-bench.cpp | 2 +- ggml-alloc.c | 2 +- ggml-cuda.cu | 130 +++++++++----- ggml-metal.m | 43 +++-- ggml-metal.metal | 210 +++++++++++------------ ggml.c | 77 +++++++-- ggml.h | 8 + llama.cpp | 35 ++-- 8 files changed, 311 insertions(+), 196 deletions(-) diff --git a/examples/batched-bench/batched-bench.cpp b/examples/batched-bench/batched-bench.cpp index 533c55c17..57596ed98 100644 --- a/examples/batched-bench/batched-bench.cpp +++ b/examples/batched-bench/batched-bench.cpp @@ -155,7 +155,7 @@ int main(int argc, char ** argv) { } LOG_TEE("\n"); - LOG_TEE("%s: n_kv_max = %d, is_pp_shared = %d, n_gpu_layers = %d, mmq = %d\n", __func__, n_kv_max, is_pp_shared, n_gpu_layers, mmq); + LOG_TEE("%s: n_kv_max = %d, is_pp_shared = %d, n_gpu_layers = %d, mmq = %d, n_threads = %d, n_threads_batch = %d\n", __func__, n_kv_max, is_pp_shared, n_gpu_layers, mmq, ctx_params.n_threads, ctx_params.n_threads_batch); LOG_TEE("\n"); LOG_TEE("|%6s | %6s | %4s | %6s | %8s | %8s | %8s | %8s | %8s | %8s |\n", "PP", "TG", "B", "N_KV", "T_PP s", "S_PP t/s", "T_TG s", "S_TG t/s", "T s", "S t/s"); diff --git a/ggml-alloc.c b/ggml-alloc.c index cdfe4caf6..0d4e12ae9 100644 --- a/ggml-alloc.c +++ b/ggml-alloc.c @@ -137,7 +137,7 @@ void ggml_tallocr_alloc(ggml_tallocr_t alloc, struct ggml_tensor * tensor) { #ifdef GGML_ALLOCATOR_DEBUG add_allocated_tensor(alloc, tensor); - size_t cur_max = (char*)addr - (char*)alloc->data + size; + size_t cur_max = (char*)addr - (char*)alloc->base + size; if (cur_max > alloc->max_size) { printf("max_size = %.2f MB: tensors: ", cur_max / 1024.0 / 1024.0); for (int i = 0; i < 1024; i++) { diff --git a/ggml-cuda.cu b/ggml-cuda.cu index 5b80e4ae3..9019a849f 100644 --- a/ggml-cuda.cu +++ b/ggml-cuda.cu @@ -443,6 +443,7 @@ static_assert(sizeof(block_q6_K) == sizeof(ggml_fp16_t) + 13*QK_K/16, "wrong q6_ #define CUDA_SCALE_BLOCK_SIZE 256 #define CUDA_CLAMP_BLOCK_SIZE 256 #define CUDA_ROPE_BLOCK_SIZE 256 +#define CUDA_SOFT_MAX_BLOCK_SIZE 1024 #define CUDA_ALIBI_BLOCK_SIZE 32 #define CUDA_DIAG_MASK_INF_BLOCK_SIZE 32 #define CUDA_QUANTIZE_BLOCK_SIZE 256 @@ -501,6 +502,31 @@ static size_t g_scratch_offset = 0; static cublasHandle_t g_cublas_handles[GGML_CUDA_MAX_DEVICES] = {nullptr}; +static __device__ __forceinline__ float warp_reduce_sum(float x) { +#pragma unroll + for (int mask = 16; mask > 0; mask >>= 1) { + x += __shfl_xor_sync(0xffffffff, x, mask, 32); + } + return x; +} + +static __device__ __forceinline__ float2 warp_reduce_sum(float2 a) { +#pragma unroll + for (int mask = 16; mask > 0; mask >>= 1) { + a.x += __shfl_xor_sync(0xffffffff, a.x, mask, 32); + a.y += __shfl_xor_sync(0xffffffff, a.y, mask, 32); + } + return a; +} + +static __device__ __forceinline__ float warp_reduce_max(float x) { +#pragma unroll + for (int mask = 16; mask > 0; mask >>= 1) { + x = fmaxf(x, __shfl_xor_sync(0xffffffff, x, mask, 32)); + } + return x; +} + static __global__ void add_f32(const float * x, const float * y, float * dst, const int kx, const int ky) { const int i = blockDim.x*blockIdx.x + threadIdx.x; @@ -577,15 +603,6 @@ static __global__ void sqr_f32(const float * x, float * dst, const int k) { dst[i] = x[i] * x[i]; } -static __device__ __forceinline__ float2 warp_reduce_sum(float2 a) { -#pragma unroll - for (int mask = 16; mask > 0; mask >>= 1) { - a.x += __shfl_xor_sync(0xffffffff, a.x, mask, 32); - a.y += __shfl_xor_sync(0xffffffff, a.y, mask, 32); - } - return a; -} - template static __global__ void norm_f32(const float * x, float * dst, const int ncols) { const int row = blockIdx.x*blockDim.y + threadIdx.y; @@ -624,14 +641,6 @@ static __global__ void norm_f32(const float * x, float * dst, const int ncols) { } } -static __device__ __forceinline__ float warp_reduce_sum(float x) { -#pragma unroll - for (int mask = 16; mask > 0; mask >>= 1) { - x += __shfl_xor_sync(0xffffffff, x, mask, 32); - } - return x; -} - template static __global__ void rms_norm_f32(const float * x, float * dst, const int ncols, const float eps) { const int row = blockIdx.x*blockDim.y + threadIdx.y; @@ -4717,45 +4726,74 @@ static __global__ void diag_mask_inf_f32(const float * x, float * dst, const int dst[i] = x[i] - (col > n_past + row % rows_per_channel) * INT_MAX; // equivalent within rounding error but slightly faster on GPU } -// the CUDA soft max implementation differs from the CPU implementation -// instead of doubles floats are used -static __global__ void soft_max_f32(const float * x, float * dst, const int ncols) { - const int row = blockDim.x*blockIdx.x + threadIdx.x; - const int block_size = blockDim.y; - const int tid = threadIdx.y; +static __global__ void soft_max_f32(const float * x, const float * y, float * dst, const int ncols, const int nrows_y, const float scale) { + const int tid = threadIdx.x; + const int rowx = blockIdx.x; + const int rowy = rowx % nrows_y; // broadcast the mask (y) in the row dimension + + const int block_size = blockDim.x; + + const int warp_id = threadIdx.x / WARP_SIZE; + const int lane_id = threadIdx.x % WARP_SIZE; + + __shared__ float buf[CUDA_SOFT_MAX_BLOCK_SIZE/WARP_SIZE]; float max_val = -INFINITY; for (int col = tid; col < ncols; col += block_size) { - const int i = row*ncols + col; - max_val = max(max_val, x[i]); + const int ix = rowx*ncols + col; + const int iy = rowy*ncols + col; + max_val = max(max_val, x[ix]*scale + (y ? y[iy] : 0.0f)); } // find the max value in the block -#pragma unroll - for (int mask = 16; mask > 0; mask >>= 1) { - max_val = max(max_val, __shfl_xor_sync(0xffffffff, max_val, mask, 32)); + max_val = warp_reduce_max(max_val); + if (block_size > WARP_SIZE) { + if (warp_id == 0) { + buf[lane_id] = -INFINITY; + } + __syncthreads(); + + if (lane_id == 0) { + buf[warp_id] = max_val; + } + __syncthreads(); + + max_val = buf[lane_id]; + max_val = warp_reduce_max(max_val); } float tmp = 0.f; for (int col = tid; col < ncols; col += block_size) { - const int i = row*ncols + col; - const float val = expf(x[i] - max_val); + const int ix = rowx*ncols + col; + const int iy = rowy*ncols + col; + const float val = expf((x[ix]*scale + (y ? y[iy] : 0.0f)) - max_val); tmp += val; - dst[i] = val; + dst[ix] = val; } - // sum up partial sums -#pragma unroll - for (int mask = 16; mask > 0; mask >>= 1) { - tmp += __shfl_xor_sync(0xffffffff, tmp, mask, 32); + // find the sum of exps in the block + tmp = warp_reduce_sum(tmp); + if (block_size > WARP_SIZE) { + if (warp_id == 0) { + buf[lane_id] = 0.f; + } + __syncthreads(); + + if (lane_id == 0) { + buf[warp_id] = tmp; + } + __syncthreads(); + + tmp = buf[lane_id]; + tmp = warp_reduce_sum(tmp); } const float inv_tmp = 1.f / tmp; for (int col = tid; col < ncols; col += block_size) { - const int i = row*ncols + col; + const int i = rowx*ncols + col; dst[i] *= inv_tmp; } } @@ -5792,10 +5830,12 @@ static void diag_mask_inf_f32_cuda(const float * x, float * dst, const int ncols diag_mask_inf_f32<<>>(x, dst, ncols_x, rows_per_channel, n_past); } -static void soft_max_f32_cuda(const float * x, float * dst, const int ncols_x, const int nrows_x, cudaStream_t stream) { - const dim3 block_dims(1, WARP_SIZE, 1); +static void soft_max_f32_cuda(const float * x, const float * y, float * dst, const int ncols_x, const int nrows_x, const int nrows_y, const float scale, cudaStream_t stream) { + int nth = WARP_SIZE; + while (nth < ncols_x && nth < CUDA_SOFT_MAX_BLOCK_SIZE) nth *= 2; + const dim3 block_dims(nth, 1, 1); const dim3 block_nums(nrows_x, 1, 1); - soft_max_f32<<>>(x, dst, ncols_x); + soft_max_f32<<>>(x, y, dst, ncols_x, nrows_y, scale); } static void im2col_f32_f16_cuda(const float * x, half * dst, @@ -6846,14 +6886,18 @@ inline void ggml_cuda_op_soft_max( GGML_ASSERT(src0->type == GGML_TYPE_F32); GGML_ASSERT( dst->type == GGML_TYPE_F32); + GGML_ASSERT(!src1 || src1->type == GGML_TYPE_F32); // src1 contains mask and it is optional + const int64_t ne00 = src0->ne[0]; - const int64_t nrows = ggml_nrows(src0); + const int64_t nrows_x = ggml_nrows(src0); + const int64_t nrows_y = src1 ? ggml_nrows(src1) : 1; - soft_max_f32_cuda(src0_dd, dst_dd, ne00, nrows, main_stream); + float scale = 1.0f; + memcpy(&scale, dst->op_params, sizeof(float)); + + soft_max_f32_cuda(src0_dd, src1 ? src1_dd : nullptr, dst_dd, ne00, nrows_x, nrows_y, scale, main_stream); - (void) src1; (void) dst; - (void) src1_dd; } inline void ggml_cuda_op_scale( diff --git a/ggml-metal.m b/ggml-metal.m index d52a1c3c4..6cfacf64f 100644 --- a/ggml-metal.m +++ b/ggml-metal.m @@ -1028,20 +1028,27 @@ void ggml_metal_graph_compute( int nth = 32; // SIMD width if (ne00%4 == 0) { + while (nth < ne00/4 && nth < 256) { + nth *= 2; + } [encoder setComputePipelineState:ctx->pipeline_soft_max_4]; } else { - do { + while (nth < ne00 && nth < 1024) { nth *= 2; - } while (nth <= ne00 && nth <= 1024); - nth /= 2; + } [encoder setComputePipelineState:ctx->pipeline_soft_max]; } - [encoder setBuffer:id_src0 offset:offs_src0 atIndex:0]; - [encoder setBuffer:id_dst offset:offs_dst atIndex:1]; - [encoder setBytes:&ne00 length:sizeof(ne00) atIndex:2]; - [encoder setBytes:&ne01 length:sizeof(ne01) atIndex:3]; - [encoder setBytes:&ne02 length:sizeof(ne02) atIndex:4]; - [encoder setThreadgroupMemoryLength:GGML_PAD(nth/32*sizeof(float), 16) atIndex:0]; + + const float scale = ((float *) dst->op_params)[0]; + + [encoder setBuffer:id_src0 offset:offs_src0 atIndex:0]; + [encoder setBuffer:id_src1 offset:offs_src1 atIndex:1]; + [encoder setBuffer:id_dst offset:offs_dst atIndex:2]; + [encoder setBytes:&ne00 length:sizeof(ne00) atIndex:3]; + [encoder setBytes:&ne01 length:sizeof(ne01) atIndex:4]; + [encoder setBytes:&ne02 length:sizeof(ne02) atIndex:5]; + [encoder setBytes:&scale length:sizeof(scale) atIndex:6]; + [encoder setThreadgroupMemoryLength:32*sizeof(float) atIndex:0]; [encoder dispatchThreadgroups:MTLSizeMake(ne01*ne02*ne03, 1, 1) threadsPerThreadgroup:MTLSizeMake(nth, 1, 1)]; } break; @@ -1351,15 +1358,19 @@ void ggml_metal_graph_compute( float eps; memcpy(&eps, dst->op_params, sizeof(float)); - const int nth = MIN(512, ne00); + int nth = 32; // SIMD width + + while (nth < ne00/4 && nth < 1024) { + nth *= 2; + } [encoder setComputePipelineState:ctx->pipeline_rms_norm]; - [encoder setBuffer:id_src0 offset:offs_src0 atIndex:0]; - [encoder setBuffer:id_dst offset:offs_dst atIndex:1]; - [encoder setBytes:&ne00 length:sizeof( int64_t) atIndex:2]; - [encoder setBytes:&nb01 length:sizeof(uint64_t) atIndex:3]; - [encoder setBytes:&eps length:sizeof( float) atIndex:4]; - [encoder setThreadgroupMemoryLength:GGML_PAD(nth/32*sizeof(float), 16) atIndex:0]; + [encoder setBuffer:id_src0 offset:offs_src0 atIndex:0]; + [encoder setBuffer:id_dst offset:offs_dst atIndex:1]; + [encoder setBytes:&ne00 length:sizeof( int64_t) atIndex:2]; + [encoder setBytes:&nb01 length:sizeof(uint64_t) atIndex:3]; + [encoder setBytes:&eps length:sizeof( float) atIndex:4]; + [encoder setThreadgroupMemoryLength:32*sizeof(float) atIndex:0]; const int64_t nrows = ggml_nrows(src0); diff --git a/ggml-metal.metal b/ggml-metal.metal index 5d1357cd7..9a79f815f 100644 --- a/ggml-metal.metal +++ b/ggml-metal.metal @@ -39,6 +39,8 @@ typedef struct { int8_t qs[QK8_0]; // quants } block_q8_0; +#define N_SIMDWIDTH 32 // assuming SIMD group size is 32 + // general-purpose kernel for addition of two tensors // pros: works for non-contiguous tensors, supports broadcast across dims 1, 2 and 3 // cons: not very efficient @@ -180,10 +182,12 @@ kernel void kernel_gelu( kernel void kernel_soft_max( device const float * src0, + device const float * src1, device float * dst, constant int64_t & ne00, constant int64_t & ne01, constant int64_t & ne02, + constant float & scale, threadgroup float * buf [[threadgroup(0)]], uint tgpig[[threadgroup_position_in_grid]], uint tpitg[[thread_position_in_threadgroup]], @@ -194,73 +198,77 @@ kernel void kernel_soft_max( const int64_t i02 = (tgpig - i03*ne02*ne01) / ne01; const int64_t i01 = (tgpig - i03*ne02*ne01 - i02*ne01); - device const float * psrc0 = src0 + i03*ne02*ne01*ne00 + i02*ne01*ne00 + i01*ne00; - device float * pdst = dst + i03*ne02*ne01*ne00 + i02*ne01*ne00 + i01*ne00; + device const float * psrc0 = src0 + i03*ne02*ne01*ne00 + i02*ne01*ne00 + i01*ne00; + device const float * pmask = src1 ? src1 + i01*ne00 : nullptr; + device float * pdst = dst + i03*ne02*ne01*ne00 + i02*ne01*ne00 + i01*ne00; // parallel max - float lmax = tpitg < ne00 ? psrc0[tpitg] : -INFINITY; + float lmax = -INFINITY; - for (int i00 = tpitg + ntg; i00 < ne00; i00 += ntg) { - lmax = MAX(lmax, psrc0[i00]); + for (int i00 = tpitg; i00 < ne00; i00 += ntg) { + lmax = MAX(lmax, psrc0[i00]*scale + (pmask ? pmask[i00] : 0.0f)); } - float max = simd_max(lmax); - if (tiisg == 0) { - buf[sgitg] = max; + // find the max value in the block + float max_val = simd_max(lmax); + if (ntg > N_SIMDWIDTH) { + if (sgitg == 0) { + buf[tiisg] = -INFINITY; + } + + threadgroup_barrier(mem_flags::mem_threadgroup); + + if (tiisg == 0) { + buf[sgitg] = max_val; + } + + threadgroup_barrier(mem_flags::mem_threadgroup); + + max_val = buf[tiisg]; + max_val = simd_max(max_val); } - threadgroup_barrier(mem_flags::mem_threadgroup); - - // broadcast, simd group number is ntg / 32 - for (uint i = ntg / 32 / 2; i > 0; i /= 2) { - if (tpitg < i) { - buf[tpitg] = MAX(buf[tpitg], buf[tpitg + i]); - } - } - - threadgroup_barrier(mem_flags::mem_threadgroup); - - max = buf[0]; - // parallel sum float lsum = 0.0f; for (int i00 = tpitg; i00 < ne00; i00 += ntg) { - const float exp_psrc0 = exp(psrc0[i00] - max); + const float exp_psrc0 = exp((psrc0[i00]*scale + (pmask ? pmask[i00] : 0.0f)) - max_val); lsum += exp_psrc0; - // Remember the result of exp here. exp is expensive, so we really do not - // wish to compute it twice. pdst[i00] = exp_psrc0; } float sum = simd_sum(lsum); - if (tiisg == 0) { - buf[sgitg] = sum; + if (ntg > N_SIMDWIDTH) { + if (sgitg == 0) { + buf[tiisg] = 0.0f; + } + + threadgroup_barrier(mem_flags::mem_threadgroup); + + if (tiisg == 0) { + buf[sgitg] = sum; + } + + threadgroup_barrier(mem_flags::mem_threadgroup); + + sum = buf[tiisg]; + sum = simd_sum(sum); } - threadgroup_barrier(mem_flags::mem_threadgroup); - - // broadcast, simd group number is ntg / 32 - for (uint i = ntg / 32 / 2; i > 0; i /= 2) { - if (tpitg < i) { - buf[tpitg] += buf[tpitg + i]; - } - } - - threadgroup_barrier(mem_flags::mem_threadgroup); - - sum = buf[0]; + const float inv_sum = 1.0f/sum; for (int i00 = tpitg; i00 < ne00; i00 += ntg) { - pdst[i00] /= sum; + pdst[i00] *= inv_sum; } } kernel void kernel_soft_max_4( device const float * src0, + device const float * src1, device float * dst, constant int64_t & ne00, constant int64_t & ne01, constant int64_t & ne02, + constant float & scale, threadgroup float * buf [[threadgroup(0)]], uint tgpig[[threadgroup_position_in_grid]], uint tpitg[[thread_position_in_threadgroup]], @@ -271,64 +279,68 @@ kernel void kernel_soft_max_4( const int64_t i02 = (tgpig - i03*ne02*ne01) / ne01; const int64_t i01 = (tgpig - i03*ne02*ne01 - i02*ne01); - device const float4 * psrc4 = (device const float4 *)(src0 + i03*ne02*ne01*ne00 + i02*ne01*ne00 + i01*ne00); - device float4 * pdst4 = (device float4 *)(dst + i03*ne02*ne01*ne00 + i02*ne01*ne00 + i01*ne00); + device const float4 * psrc4 = (device const float4 *)(src0 + i03*ne02*ne01*ne00 + i02*ne01*ne00 + i01*ne00); + device const float4 * pmask = src1 ? (device const float4 *)(src1 + i01*ne00) : nullptr; + device float4 * pdst4 = (device float4 *)(dst + i03*ne02*ne01*ne00 + i02*ne01*ne00 + i01*ne00); // parallel max - float4 lmax4 = tpitg < ne00/4 ? psrc4[tpitg] : -INFINITY; + float4 lmax4 = -INFINITY; - for (int i00 = tpitg + ntg; i00 < ne00/4; i00 += ntg) { - lmax4 = fmax(lmax4, psrc4[i00]); + for (int i00 = tpitg; i00 < ne00/4; i00 += ntg) { + lmax4 = fmax(lmax4, psrc4[i00]*scale + (pmask ? pmask[i00] : 0.0f)); } const float lmax = MAX(MAX(lmax4[0], lmax4[1]), MAX(lmax4[2], lmax4[3])); - float max = simd_max(lmax); - if (tiisg == 0) { - buf[sgitg] = max; + + float max_val = simd_max(lmax); + if (ntg > N_SIMDWIDTH) { + if (sgitg == 0) { + buf[tiisg] = -INFINITY; + } + + threadgroup_barrier(mem_flags::mem_threadgroup); + + if (tiisg == 0) { + buf[sgitg] = max_val; + } + + threadgroup_barrier(mem_flags::mem_threadgroup); + + max_val = buf[tiisg]; + max_val = simd_max(max_val); } - threadgroup_barrier(mem_flags::mem_threadgroup); - - // broadcast, simd group number is ntg / 32 - for (uint i = ntg / 32 / 2; i > 0; i /= 2) { - if (tpitg < i) { - buf[tpitg] = MAX(buf[tpitg], buf[tpitg + i]); - } - } - - threadgroup_barrier(mem_flags::mem_threadgroup); - - max = buf[0]; - // parallel sum float4 lsum4 = 0.0f; for (int i00 = tpitg; i00 < ne00/4; i00 += ntg) { - const float4 exp_psrc4 = exp(psrc4[i00] - max); + const float4 exp_psrc4 = exp((psrc4[i00]*scale + (pmask ? pmask[i00] : 0.0f)) - max_val); lsum4 += exp_psrc4; pdst4[i00] = exp_psrc4; } const float lsum = lsum4[0] + lsum4[1] + lsum4[2] + lsum4[3]; float sum = simd_sum(lsum); - if (tiisg == 0) { - buf[sgitg] = sum; + if (ntg > N_SIMDWIDTH) { + if (sgitg == 0) { + buf[tiisg] = 0.0f; + } + + threadgroup_barrier(mem_flags::mem_threadgroup); + + if (tiisg == 0) { + buf[sgitg] = sum; + } + + threadgroup_barrier(mem_flags::mem_threadgroup); + + sum = buf[tiisg]; + sum = simd_sum(sum); } - threadgroup_barrier(mem_flags::mem_threadgroup); - - // broadcast, simd group number is ntg / 32 - for (uint i = ntg / 32 / 2; i > 0; i /= 2) { - if (tpitg < i) { - buf[tpitg] += buf[tpitg + i]; - } - } - - threadgroup_barrier(mem_flags::mem_threadgroup); - - sum = buf[0]; + const float inv_sum = 1.0f/sum; for (int i00 = tpitg; i00 < ne00/4; i00 += ntg) { - pdst4[i00] /= sum; + pdst4[i00] *= inv_sum; } } @@ -435,14 +447,13 @@ kernel void kernel_rms_norm( constant int64_t & ne00, constant uint64_t & nb01, constant float & eps, - threadgroup float * sum [[threadgroup(0)]], + threadgroup float * buf [[threadgroup(0)]], uint tgpig[[threadgroup_position_in_grid]], uint tpitg[[thread_position_in_threadgroup]], uint sgitg[[simdgroup_index_in_threadgroup]], uint tiisg[[thread_index_in_simdgroup]], uint ntg[[threads_per_threadgroup]]) { - device const float4 * x = (device const float4 *) ((device const char *) src0 + tgpig*nb01); - device const float * x_scalar = (device const float *) x; + device const float4 * x = (device const float4 *) ((device const char *) src0 + tgpig*nb01); float4 sumf = 0; float all_sum = 0; @@ -453,40 +464,30 @@ kernel void kernel_rms_norm( } all_sum = sumf[0] + sumf[1] + sumf[2] + sumf[3]; all_sum = simd_sum(all_sum); - if (tiisg == 0) { - sum[sgitg] = all_sum; - } - - threadgroup_barrier(mem_flags::mem_threadgroup); - - // broadcast, simd group number is ntg / 32 - for (uint i = ntg / 32 / 2; i > 0; i /= 2) { - if (tpitg < i) { - sum[tpitg] += sum[tpitg + i]; - } - } - if (tpitg == 0) { - for (int i = 4 * (ne00 / 4); i < ne00; i++) { - sum[0] += x_scalar[i]; + if (ntg > N_SIMDWIDTH) { + if (sgitg == 0) { + buf[tiisg] = 0.0f; } - sum[0] /= ne00; + + threadgroup_barrier(mem_flags::mem_threadgroup); + + if (tiisg == 0) { + buf[sgitg] = all_sum; + } + + threadgroup_barrier(mem_flags::mem_threadgroup); + + all_sum = buf[tiisg]; + all_sum = simd_sum(all_sum); } - threadgroup_barrier(mem_flags::mem_threadgroup); - - const float mean = sum[0]; + const float mean = all_sum/ne00; const float scale = 1.0f/sqrt(mean + eps); device float4 * y = (device float4 *) (dst + tgpig*ne00); - device float * y_scalar = (device float *) y; for (int i00 = tpitg; i00 < ne00/4; i00 += ntg) { y[i00] = x[i00] * scale; } - if (tpitg == 0) { - for (int i00 = 4 * (ne00 / 4); i00 < ne00; i00++) { - y_scalar[i00] = x_scalar[i00] * scale; - } - } } // function for calculate inner product between half a q4_0 block and 16 floats (yl), sumy is SUM(yl[i]) @@ -576,7 +577,6 @@ inline float block_q_n_dot_y(device const block_q5_1 * qb_curr, float sumy, thre // putting them in the kernel cause a significant performance penalty #define N_DST 4 // each SIMD group works on 4 rows #define N_SIMDGROUP 2 // number of SIMD groups in a thread group -#define N_SIMDWIDTH 32 // assuming SIMD group size is 32 //Note: This is a template, but strictly speaking it only applies to // quantizations where the block size is 32. It also does not // giard against the number of rows not being divisible by diff --git a/ggml.c b/ggml.c index c522a101f..e2687ef4f 100644 --- a/ggml.c +++ b/ggml.c @@ -4826,7 +4826,17 @@ struct ggml_tensor * ggml_diag_mask_zero_inplace( static struct ggml_tensor * ggml_soft_max_impl( struct ggml_context * ctx, struct ggml_tensor * a, + struct ggml_tensor * mask, + float scale, bool inplace) { + GGML_ASSERT(ggml_is_contiguous(a)); + if (mask) { + GGML_ASSERT(ggml_is_contiguous(mask)); + GGML_ASSERT(mask->ne[2] == 1); + GGML_ASSERT(mask->ne[3] == 1); + GGML_ASSERT(ggml_can_repeat_rows(mask, a)); + } + bool is_node = false; if (a->grad) { @@ -4835,9 +4845,13 @@ static struct ggml_tensor * ggml_soft_max_impl( struct ggml_tensor * result = inplace ? ggml_view_tensor(ctx, a) : ggml_dup_tensor(ctx, a); + float params[] = { scale }; + ggml_set_op_params(result, params, sizeof(params)); + result->op = GGML_OP_SOFT_MAX; result->grad = is_node ? ggml_dup_tensor(ctx, result) : NULL; result->src[0] = a; + result->src[1] = mask; return result; } @@ -4845,13 +4859,21 @@ static struct ggml_tensor * ggml_soft_max_impl( struct ggml_tensor * ggml_soft_max( struct ggml_context * ctx, struct ggml_tensor * a) { - return ggml_soft_max_impl(ctx, a, false); + return ggml_soft_max_impl(ctx, a, NULL, 1.0f, false); } struct ggml_tensor * ggml_soft_max_inplace( struct ggml_context * ctx, struct ggml_tensor * a) { - return ggml_soft_max_impl(ctx, a, true); + return ggml_soft_max_impl(ctx, a, NULL, 1.0f, true); +} + +struct ggml_tensor * ggml_soft_max_ext( + struct ggml_context * ctx, + struct ggml_tensor * a, + struct ggml_tensor * mask, + float scale) { + return ggml_soft_max_impl(ctx, a, mask, scale, false); } // ggml_soft_max_back @@ -10551,20 +10573,25 @@ static void ggml_compute_forward_diag_mask_zero( static void ggml_compute_forward_soft_max_f32( const struct ggml_compute_params * params, const struct ggml_tensor * src0, - struct ggml_tensor * dst) { - GGML_ASSERT(ggml_is_contiguous(src0)); - GGML_ASSERT(ggml_is_contiguous(dst)); - GGML_ASSERT(ggml_are_same_shape(src0, dst)); + const struct ggml_tensor * src1, + struct ggml_tensor * dst) { + assert(ggml_is_contiguous(dst)); + assert(ggml_are_same_shape(src0, dst)); if (params->type == GGML_TASK_INIT || params->type == GGML_TASK_FINALIZE) { return; } + float scale = 1.0f; + memcpy(&scale, (float *) dst->op_params + 0, sizeof(float)); + // TODO: handle transposed/permuted matrices const int ith = params->ith; const int nth = params->nth; + const int64_t ne11 = src1 ? src1->ne[1] : 1; + const int nc = src0->ne[0]; const int nr = ggml_nrows(src0); @@ -10575,29 +10602,40 @@ static void ggml_compute_forward_soft_max_f32( const int ir0 = dr*ith; const int ir1 = MIN(ir0 + dr, nr); + float * wp = (float *) params->wdata + (nc + CACHE_LINE_SIZE_F32) * ith; + for (int i1 = ir0; i1 < ir1; i1++) { - float *sp = (float *)((char *) src0->data + i1*src0->nb[1]); - float *dp = (float *)((char *) dst->data + i1*dst->nb[1]); + float * sp = (float *)((char *) src0->data + i1*src0->nb[1]); + float * dp = (float *)((char *) dst->data + i1*dst->nb[1]); + + // broadcast the mask across rows + float * mp = src1 ? (float *)((char *) src1->data + (i1%ne11)*src1->nb[1]) : NULL; + + ggml_vec_cpy_f32 (nc, wp, sp); + ggml_vec_scale_f32(nc, wp, scale); + if (mp) { + ggml_vec_acc_f32(nc, wp, mp); + } #ifndef NDEBUG for (int i = 0; i < nc; ++i) { //printf("p[%d] = %f\n", i, p[i]); - assert(!isnan(sp[i])); + assert(!isnan(wp[i])); } #endif float max = -INFINITY; - ggml_vec_max_f32(nc, &max, sp); + ggml_vec_max_f32(nc, &max, wp); ggml_float sum = 0.0; uint16_t scvt; for (int i = 0; i < nc; i++) { - if (sp[i] == -INFINITY) { + if (wp[i] == -INFINITY) { dp[i] = 0.0f; } else { - // const float val = (sp[i] == -INFINITY) ? 0.0 : exp(sp[i] - max); - ggml_fp16_t s = GGML_FP32_TO_FP16(sp[i] - max); + // const float val = (wp[i] == -INFINITY) ? 0.0 : exp(wp[i] - max); + ggml_fp16_t s = GGML_FP32_TO_FP16(wp[i] - max); memcpy(&scvt, &s, sizeof(scvt)); const float val = GGML_FP16_TO_FP32(ggml_table_exp_f16[scvt]); sum += (ggml_float)val; @@ -10622,11 +10660,12 @@ static void ggml_compute_forward_soft_max_f32( static void ggml_compute_forward_soft_max( const struct ggml_compute_params * params, const struct ggml_tensor * src0, - struct ggml_tensor * dst) { + const struct ggml_tensor * src1, + struct ggml_tensor * dst) { switch (src0->type) { case GGML_TYPE_F32: { - ggml_compute_forward_soft_max_f32(params, src0, dst); + ggml_compute_forward_soft_max_f32(params, src0, src1, dst); } break; default: { @@ -13863,7 +13902,7 @@ static void ggml_compute_forward(struct ggml_compute_params * params, struct ggm } break; case GGML_OP_SOFT_MAX: { - ggml_compute_forward_soft_max(params, tensor->src[0], tensor); + ggml_compute_forward_soft_max(params, tensor->src[0], tensor->src[1], tensor); } break; case GGML_OP_SOFT_MAX_BACK: { @@ -15899,6 +15938,12 @@ struct ggml_cplan ggml_graph_plan(struct ggml_cgraph * cgraph, int n_threads) { cur = ggml_type_size(GGML_TYPE_F32) * node->src[0]->ne[0] * n_tasks; } } break; + case GGML_OP_SOFT_MAX: + { + n_tasks = MIN(MIN(4, n_threads), ggml_nrows(node->src[0])); + + cur = ggml_type_size(GGML_TYPE_F32) * node->ne[0] * n_tasks; + } break; case GGML_OP_CONV_TRANSPOSE_1D: { GGML_ASSERT(node->src[0]->ne[3] == 1); diff --git a/ggml.h b/ggml.h index 4d6d4edfd..2f6787d4e 100644 --- a/ggml.h +++ b/ggml.h @@ -1282,6 +1282,14 @@ extern "C" { struct ggml_context * ctx, struct ggml_tensor * a); + // fused soft_max(a*scale + mask) + // mask is optional + GGML_API struct ggml_tensor * ggml_soft_max_ext( + struct ggml_context * ctx, + struct ggml_tensor * a, + struct ggml_tensor * mask, + float scale); + GGML_API struct ggml_tensor * ggml_soft_max_back( struct ggml_context * ctx, struct ggml_tensor * a, diff --git a/llama.cpp b/llama.cpp index 1e00ea4a9..e74fd7234 100644 --- a/llama.cpp +++ b/llama.cpp @@ -3704,23 +3704,29 @@ static struct ggml_tensor * llm_build_kqv( struct ggml_tensor * kq = ggml_mul_mat(ctx, k, q); cb(kq, "kq", il); - kq = ggml_scale(ctx, kq, kq_scale); - cb(kq, "kq_scaled", il); - if (max_alibi_bias > 0.0f) { - // TODO: n_head or n_head_kv - // TODO: K-shift is likely not working - // TODO: change to ggml_add - kq = ggml_alibi(ctx, kq, /*n_past*/ 0, n_head, max_alibi_bias); - cb(kq, "kq_scaled_alibi", il); + // temporary branch until we figure out how to handle ggml_alibi through ggml_add + kq = ggml_scale(ctx, kq, kq_scale); + cb(kq, "kq_scaled", il); + + if (max_alibi_bias > 0.0f) { + // TODO: n_head or n_head_kv + // TODO: K-shift is likely not working + // TODO: change to ggml_add + kq = ggml_alibi(ctx, kq, /*n_past*/ 0, n_head, max_alibi_bias); + cb(kq, "kq_scaled_alibi", il); + } + + kq = ggml_add(ctx, kq, kq_mask); + cb(kq, "kq_masked", il); + + kq = ggml_soft_max(ctx, kq); + cb(kq, "kq_soft_max", il); + } else { + kq = ggml_soft_max_ext(ctx, kq, kq_mask, 1.0f/sqrtf(float(n_embd_head))); + cb(kq, "kq_soft_max_ext", il); } - kq = ggml_add(ctx, kq, kq_mask); - cb(kq, "kq_masked", il); - - kq = ggml_soft_max(ctx, kq); - cb(kq, "kq_soft_max", il); - // split cached v into n_head heads struct ggml_tensor * v = ggml_view_3d(ctx, kv.v, @@ -5041,6 +5047,7 @@ static const std::unordered_map k_offload_map { "kq_scaled_alibi", OFFLOAD_FUNC_KQ }, { "kq_masked", OFFLOAD_FUNC_KQ }, { "kq_soft_max", OFFLOAD_FUNC_V }, + { "kq_soft_max_ext", OFFLOAD_FUNC_V }, { "v", OFFLOAD_FUNC_V }, { "kqv", OFFLOAD_FUNC_V }, { "kqv_merged", OFFLOAD_FUNC_V }, From 8d6d9f033b8101f929e445cf45b39e1557ca7934 Mon Sep 17 00:00:00 2001 From: Daniel Bevenius Date: Fri, 1 Dec 2023 10:41:56 +0100 Subject: [PATCH 039/811] py : add requirements file for convert-hf-to-gguf.py (#4277) This commit adds a requirements file for the convert-hf-to-gguf.py script, and also add the torch and transformers packages to it. The motivation for this is that currently running convert-hf-to-gguf.py will produce the following error: ```console $ python3 -m venv venv $ source venv/bin/activate (venv) $ pip install -r requirements.txt Collecting numpy==1.24.4 Collecting sentencepiece==0.1.98 Collecting gguf>=0.1.0 Installing collected packages: sentencepiece, numpy, gguf Successfully installed gguf-0.5.1 numpy-1.24.4 sentencepiece-0.1.98 (venv) $ python convert-hf-to-gguf.py --help Traceback (most recent call last): File "llama.cpp/convert-hf-to-gguf.py", line 16, in import torch ModuleNotFoundError: No module named 'torch' ``` With this commit, and using requirements-hf-to-gguf.txt instead of requirements.txt, the script can be run and shows the help output. Signed-off-by: Daniel Bevenius --- requirements-hf-to-gguf.txt | 3 +++ 1 file changed, 3 insertions(+) create mode 100644 requirements-hf-to-gguf.txt diff --git a/requirements-hf-to-gguf.txt b/requirements-hf-to-gguf.txt new file mode 100644 index 000000000..f4600539e --- /dev/null +++ b/requirements-hf-to-gguf.txt @@ -0,0 +1,3 @@ +-r requirements.txt +torch==2.1.1 +transformers==4.35.2 From 880f57973b8e0091d0f9f50eb5ab4cd4e31582ca Mon Sep 17 00:00:00 2001 From: Georgi Gerganov Date: Fri, 1 Dec 2023 18:42:11 +0200 Subject: [PATCH 040/811] llama : fix integer overflow during quantization (#4284) happens with multi-threaded quantization of Qwen-72B ggml-ci --- llama.cpp | 21 ++++++++++++--------- 1 file changed, 12 insertions(+), 9 deletions(-) diff --git a/llama.cpp b/llama.cpp index e74fd7234..6fbfeca54 100644 --- a/llama.cpp +++ b/llama.cpp @@ -7655,18 +7655,21 @@ static void llama_convert_tensor_internal( return; } - auto block_size = tensor->type == GGML_TYPE_F16 ? 1 : (size_t)ggml_blck_size(tensor->type); - auto block_size_bytes = ggml_type_size(tensor->type); + size_t block_size = tensor->type == GGML_TYPE_F16 ? 1 : (size_t)ggml_blck_size(tensor->type); + size_t block_size_bytes = ggml_type_size(tensor->type); GGML_ASSERT(nelements % block_size == 0); - auto nblocks = nelements / block_size; - auto blocks_per_thread = nblocks / nthread; - auto spare_blocks = nblocks - (blocks_per_thread * nthread); // if blocks aren't divisible by thread count + size_t nblocks = nelements / block_size; + size_t blocks_per_thread = nblocks / nthread; + size_t spare_blocks = nblocks - (blocks_per_thread * nthread); // if blocks aren't divisible by thread count - for (auto tnum = 0, in_buff_offs = 0, out_buff_offs = 0; tnum < nthread; tnum++) { - auto thr_blocks = blocks_per_thread + (tnum == nthread - 1 ? spare_blocks : 0); // num blocks for this thread - auto thr_elems = thr_blocks * block_size; // number of elements for this thread - auto thr_block_bytes = thr_blocks * block_size_bytes; // number of input bytes for this thread + size_t in_buff_offs = 0; + size_t out_buff_offs = 0; + + for (int tnum = 0; tnum < nthread; tnum++) { + size_t thr_blocks = blocks_per_thread + (tnum == nthread - 1 ? spare_blocks : 0); // num blocks for this thread + size_t thr_elems = thr_blocks * block_size; // number of elements for this thread + size_t thr_block_bytes = thr_blocks * block_size_bytes; // number of input bytes for this thread auto compute = [qtype] (ggml_type typ, uint8_t * inbuf, float * outbuf, int nels) { if (typ == GGML_TYPE_F16) { From 37c746d687d877bc11803e96b4dc5f378b83c0a0 Mon Sep 17 00:00:00 2001 From: Shijie <821898965@qq.com> Date: Sat, 2 Dec 2023 02:16:31 +0800 Subject: [PATCH 041/811] llama : add Qwen support (#4281) * enable qwen to llama.cpp * llama : do not GPU split bias tensors --------- Co-authored-by: Georgi Gerganov --- convert-hf-to-gguf.py | 131 +++++++++++++++++++- gguf-py/gguf/constants.py | 20 ++++ gguf-py/gguf/tensor_mapping.py | 18 +-- llama.cpp | 211 +++++++++++++++++++++++++++++++++ prompts/chat-with-qwen.txt | 1 + 5 files changed, 372 insertions(+), 9 deletions(-) create mode 100644 prompts/chat-with-qwen.txt diff --git a/convert-hf-to-gguf.py b/convert-hf-to-gguf.py index 53ce76c70..bced1f561 100755 --- a/convert-hf-to-gguf.py +++ b/convert-hf-to-gguf.py @@ -10,7 +10,7 @@ import re import sys from enum import IntEnum from pathlib import Path -from typing import TYPE_CHECKING, Any, ContextManager, Iterator, cast +from typing import TYPE_CHECKING, Any, ContextManager, Iterator, cast, Optional import numpy as np import torch @@ -168,6 +168,8 @@ class Model: return PersimmonModel if model_architecture in ("StableLMEpochForCausalLM", "LlavaStableLMEpochForCausalLM"): return StableLMModel + if model_architecture == "QWenLMHeadModel": + return QwenModel return Model def _is_model_safetensors(self) -> bool: @@ -203,6 +205,8 @@ class Model: return gguf.MODEL_ARCH.PERSIMMON if arch in ("StableLMEpochForCausalLM", "LlavaStableLMEpochForCausalLM"): return gguf.MODEL_ARCH.STABLELM + if arch == "QWenLMHeadModel": + return gguf.MODEL_ARCH.QWEN raise NotImplementedError(f'Architecture "{arch}" not supported!') @@ -832,6 +836,131 @@ class StableLMModel(Model): self.gguf_writer.add_parallel_residual(hparams["use_parallel_residual"] if "use_parallel_residual" in hparams else True) self.gguf_writer.add_layer_norm_eps(1e-5) + +class QwenModel(Model): + @staticmethod + def token_bytes_to_string(b): + from transformers.models.gpt2.tokenization_gpt2 import bytes_to_unicode + byte_encoder = bytes_to_unicode() + return ''.join([byte_encoder[ord(char)] for char in b.decode('latin-1')]) + + @staticmethod + def bpe(mergeable_ranks: dict[bytes, int], token: bytes, max_rank: Optional[int] = None) -> list[bytes]: + parts = [bytes([b]) for b in token] + while True: + min_idx = None + min_rank = None + for i, pair in enumerate(zip(parts[:-1], parts[1:])): + rank = mergeable_ranks.get(pair[0] + pair[1]) + if rank is not None and (min_rank is None or rank < min_rank): + min_idx = i + min_rank = rank + if min_rank is None or (max_rank is not None and min_rank >= max_rank): + break + assert min_idx is not None + parts = parts[:min_idx] + [parts[min_idx] + parts[min_idx + 1]] + parts[min_idx + 2:] + return parts + + def set_vocab(self): + dir_model = self.dir_model + hparams = self.hparams + tokens: list[bytearray] = [] + toktypes: list[int] = [] + + from transformers import AutoTokenizer # type: ignore[attr-defined] + tokenizer = AutoTokenizer.from_pretrained(dir_model, trust_remote_code=True) + vocab_size = hparams["vocab_size"] + assert max(tokenizer.get_vocab().values()) < vocab_size + + merges = [] + vocab = {} + mergeable_ranks = tokenizer.mergeable_ranks + for token, rank in mergeable_ranks.items(): + vocab[self.token_bytes_to_string(token)] = rank + if len(token) == 1: + continue + merged = QwenModel.bpe(mergeable_ranks, token, max_rank=rank) + assert len(merged) == 2 + merges.append(' '.join(map(self.token_bytes_to_string, merged))) + + reverse_vocab = {id_ : encoded_tok for encoded_tok, id_ in vocab.items()} + added_vocab = tokenizer.special_tokens + + for i in range(vocab_size): + if i not in reverse_vocab: + pad_token = f"[PAD{i}]".encode("utf-8") + tokens.append(bytearray(pad_token)) + toktypes.append(gguf.TokenType.USER_DEFINED) + elif reverse_vocab[i] in added_vocab: + tokens.append(reverse_vocab[i]) + toktypes.append(gguf.TokenType.CONTROL) + else: + tokens.append(reverse_vocab[i]) + toktypes.append(gguf.TokenType.NORMAL) + + self.gguf_writer.add_tokenizer_model("gpt2") + self.gguf_writer.add_token_list(tokens) + self.gguf_writer.add_token_types(toktypes) + + special_vocab = gguf.SpecialVocab(dir_model, load_merges=False) + special_vocab.merges = merges + special_vocab._set_special_token("bos", tokenizer.special_tokens["<|endoftext|>"]) + special_vocab._set_special_token("eos", tokenizer.special_tokens["<|endoftext|>"]) + special_vocab._set_special_token("unk", tokenizer.special_tokens["<|endoftext|>"]) + special_vocab.add_to_gguf(self.gguf_writer) + + def set_gguf_parameters(self): + self.gguf_writer.add_name("Qwen") + self.gguf_writer.add_context_length(self.hparams["max_position_embeddings"]) + self.gguf_writer.add_block_count(self.hparams["num_hidden_layers"]) + self.gguf_writer.add_embedding_length(self.hparams["hidden_size"]) + self.gguf_writer.add_feed_forward_length(self.hparams["intermediate_size"]) + self.gguf_writer.add_rope_freq_base(self.hparams["rotary_emb_base"]) + self.gguf_writer.add_rope_dimension_count(self.hparams["hidden_size"] // self.hparams["num_attention_heads"]) + self.gguf_writer.add_head_count(self.hparams["num_attention_heads"]) + self.gguf_writer.add_layer_norm_rms_eps(self.hparams["layer_norm_epsilon"]) + + def write_tensors(self): + block_count = self.hparams["num_hidden_layers"] + model_kv = dict(self.get_tensors()) + tensor_map = gguf.get_tensor_name_map(self.model_arch, block_count) + for name, data_torch in model_kv.items(): + # we don't need these + if name.endswith(".rotary_emb.inv_freq"): + continue + + old_dtype = data_torch.dtype + + # convert any unsupported data types to float32 + if data_torch.dtype not in (torch.float16, torch.float32): + data_torch = data_torch.to(torch.float32) + + data = data_torch.squeeze().numpy() + + # map tensor names + new_name = tensor_map.get_name(name, try_suffixes=(".weight", ".bias")) + if new_name is None: + print(f"Can not map tensor {name!r}") + sys.exit() + + n_dims = len(data.shape) + data_dtype = data.dtype + + # if f32 desired, convert any float16 to float32 + if self.ftype == 0 and data_dtype == np.float16: + data = data.astype(np.float32) + + # TODO: Why cant we use these float16 as-is? There should be not reason to store float16 as float32 + if self.ftype == 1 and data_dtype == np.float16 and n_dims == 1: + data = data.astype(np.float32) + + # if f16 desired, convert any float32 2-dim weight tensors to float16 + if self.ftype == 1 and data_dtype == np.float32 and name.endswith(".weight") and n_dims == 2: + data = data.astype(np.float16) + + print(f"{new_name}, n_dims = {n_dims}, {old_dtype} --> {data.dtype}") + self.gguf_writer.add_tensor(new_name, data) + ###### CONVERSION LOGIC ###### diff --git a/gguf-py/gguf/constants.py b/gguf-py/gguf/constants.py index 8bd82daca..685c88f1a 100644 --- a/gguf-py/gguf/constants.py +++ b/gguf-py/gguf/constants.py @@ -92,6 +92,7 @@ class MODEL_ARCH(IntEnum): BERT = auto() BLOOM = auto() STABLELM = auto() + QWEN = auto() class MODEL_TENSOR(IntEnum): @@ -132,6 +133,7 @@ MODEL_ARCH_NAMES: dict[MODEL_ARCH, str] = { MODEL_ARCH.BERT: "bert", MODEL_ARCH.BLOOM: "bloom", MODEL_ARCH.STABLELM: "stablelm", + MODEL_ARCH.QWEN: "qwen", } TENSOR_NAMES: dict[MODEL_TENSOR, str] = { @@ -317,6 +319,20 @@ MODEL_TENSORS: dict[MODEL_ARCH, list[MODEL_TENSOR]] = { MODEL_TENSOR.FFN_DOWN, MODEL_TENSOR.FFN_UP, ], + MODEL_ARCH.QWEN: [ + MODEL_TENSOR.TOKEN_EMBD, + MODEL_TENSOR.OUTPUT_NORM, + MODEL_TENSOR.OUTPUT, + MODEL_TENSOR.ROPE_FREQS, + MODEL_TENSOR.ATTN_NORM, + MODEL_TENSOR.ATTN_QKV, + MODEL_TENSOR.ATTN_OUT, + MODEL_TENSOR.ATTN_ROT_EMBD, + MODEL_TENSOR.FFN_NORM, + MODEL_TENSOR.FFN_GATE, + MODEL_TENSOR.FFN_DOWN, + MODEL_TENSOR.FFN_UP, + ], MODEL_ARCH.GPT2: [ # TODO ], @@ -336,6 +352,10 @@ MODEL_TENSOR_SKIP: dict[MODEL_ARCH, list[MODEL_TENSOR]] = { MODEL_ARCH.PERSIMMON: [ MODEL_TENSOR.ROPE_FREQS, ], + MODEL_ARCH.QWEN: [ + MODEL_TENSOR.ROPE_FREQS, + MODEL_TENSOR.ATTN_ROT_EMBD, + ], } # diff --git a/gguf-py/gguf/tensor_mapping.py b/gguf-py/gguf/tensor_mapping.py index 22ad8b8fc..cc6236014 100644 --- a/gguf-py/gguf/tensor_mapping.py +++ b/gguf-py/gguf/tensor_mapping.py @@ -10,7 +10,7 @@ class TensorNameMap: # Token embeddings MODEL_TENSOR.TOKEN_EMBD: ( "gpt_neox.embed_in", # gptneox - "transformer.wte", # gpt2 gpt-j mpt refact + "transformer.wte", # gpt2 gpt-j mpt refact qwen "transformer.word_embeddings", # falcon "word_embeddings", # bloom "model.embed_tokens", # llama-hf @@ -38,7 +38,7 @@ class TensorNameMap: # Output MODEL_TENSOR.OUTPUT: ( "embed_out", # gptneox - "lm_head", # gpt2 mpt falcon llama-hf baichuan + "lm_head", # gpt2 mpt falcon llama-hf baichuan qwen "output", # llama-pth bloom "word_embeddings_for_head", # persimmon ), @@ -51,7 +51,7 @@ class TensorNameMap: "norm", # llama-pth "embeddings.LayerNorm", # bert "transformer.norm_f", # mpt - "ln_f", # refact bloom + "ln_f", # refact bloom qwen "language_model.encoder.final_layernorm", # persimmon ), @@ -65,7 +65,7 @@ class TensorNameMap: # Attention norm MODEL_TENSOR.ATTN_NORM: ( "gpt_neox.layers.{bid}.input_layernorm", # gptneox - "transformer.h.{bid}.ln_1", # gpt2 gpt-j refact + "transformer.h.{bid}.ln_1", # gpt2 gpt-j refact qwen "transformer.blocks.{bid}.norm_1", # mpt "transformer.h.{bid}.input_layernorm", # falcon7b "h.{bid}.input_layernorm", # bloom @@ -85,7 +85,7 @@ class TensorNameMap: # Attention query-key-value MODEL_TENSOR.ATTN_QKV: ( "gpt_neox.layers.{bid}.attention.query_key_value", # gptneox - "transformer.h.{bid}.attn.c_attn", # gpt2 + "transformer.h.{bid}.attn.c_attn", # gpt2 qwen "transformer.blocks.{bid}.attn.Wqkv", # mpt "transformer.h.{bid}.self_attention.query_key_value", # falcon "h.{bid}.self_attention.query_key_value", # bloom @@ -119,7 +119,7 @@ class TensorNameMap: # Attention output MODEL_TENSOR.ATTN_OUT: ( "gpt_neox.layers.{bid}.attention.dense", # gptneox - "transformer.h.{bid}.attn.c_proj", # gpt2 refact + "transformer.h.{bid}.attn.c_proj", # gpt2 refact qwen "transformer.blocks.{bid}.attn.out_proj", # mpt "transformer.h.{bid}.self_attention.dense", # falcon "h.{bid}.self_attention.dense", # bloom @@ -139,7 +139,7 @@ class TensorNameMap: # Feed-forward norm MODEL_TENSOR.FFN_NORM: ( "gpt_neox.layers.{bid}.post_attention_layernorm", # gptneox - "transformer.h.{bid}.ln_2", # gpt2 refact + "transformer.h.{bid}.ln_2", # gpt2 refact qwen "h.{bid}.post_attention_layernorm", # bloom "transformer.blocks.{bid}.norm_2", # mpt "model.layers.{bid}.post_attention_layernorm", # llama-hf @@ -161,18 +161,20 @@ class TensorNameMap: "encoder.layer.{bid}.intermediate.dense", # bert "transformer.h.{bid}.mlp.fc_in", # gpt-j "language_model.encoder.layers.{bid}.mlp.dense_h_to_4h", # persimmon + "transformer.h.{bid}.mlp.w1", # qwen ), # Feed-forward gate MODEL_TENSOR.FFN_GATE: ( "model.layers.{bid}.mlp.gate_proj", # llama-hf refact "layers.{bid}.feed_forward.w1", # llama-pth + "transformer.h.{bid}.mlp.w2", # qwen ), # Feed-forward down MODEL_TENSOR.FFN_DOWN: ( "gpt_neox.layers.{bid}.mlp.dense_4h_to_h", # gptneox - "transformer.h.{bid}.mlp.c_proj", # gpt2 refact + "transformer.h.{bid}.mlp.c_proj", # gpt2 refact qwen "transformer.blocks.{bid}.ffn.down_proj", # mpt "transformer.h.{bid}.mlp.dense_4h_to_h", # falcon "h.{bid}.mlp.dense_4h_to_h", # bloom diff --git a/llama.cpp b/llama.cpp index 6fbfeca54..ca21cffab 100644 --- a/llama.cpp +++ b/llama.cpp @@ -192,6 +192,7 @@ enum llm_arch { LLM_ARCH_REFACT, LLM_ARCH_BLOOM, LLM_ARCH_STABLELM, + LLM_ARCH_QWEN, LLM_ARCH_UNKNOWN, }; @@ -208,6 +209,7 @@ static std::map LLM_ARCH_NAMES = { { LLM_ARCH_REFACT, "refact" }, { LLM_ARCH_BLOOM, "bloom" }, { LLM_ARCH_STABLELM, "stablelm" }, + { LLM_ARCH_QWEN, "qwen" }, }; enum llm_kv { @@ -518,6 +520,22 @@ static std::map> LLM_TENSOR_NAMES = { LLM_TENSOR_FFN_UP, "blk.%d.ffn_up" }, }, }, + { + LLM_ARCH_QWEN, + { + { LLM_TENSOR_TOKEN_EMBD, "token_embd" }, + { LLM_TENSOR_OUTPUT_NORM, "output_norm" }, + { LLM_TENSOR_OUTPUT, "output" }, + { LLM_TENSOR_ROPE_FREQS, "rope_freqs" }, + { LLM_TENSOR_ATTN_NORM, "blk.%d.attn_norm" }, + { LLM_TENSOR_ATTN_QKV, "blk.%d.attn_qkv" }, + { LLM_TENSOR_ATTN_OUT, "blk.%d.attn_output" }, + { LLM_TENSOR_FFN_NORM, "blk.%d.ffn_norm" }, + { LLM_TENSOR_FFN_GATE, "blk.%d.ffn_gate" }, + { LLM_TENSOR_FFN_DOWN, "blk.%d.ffn_down" }, + { LLM_TENSOR_FFN_UP, "blk.%d.ffn_up" }, + }, + }, { LLM_ARCH_UNKNOWN, @@ -2347,6 +2365,15 @@ static void llm_load_hparams( default: model.type = e_model::MODEL_UNKNOWN; } } break; + case LLM_ARCH_QWEN: + { + GGUF_GET_KEY(ctx, hparams.f_norm_rms_eps, gguf_get_val_f32, GGUF_TYPE_FLOAT32, true, kv(LLM_KV_ATTENTION_LAYERNORM_RMS_EPS)); + switch (hparams.n_layer) { + case 32: model.type = e_model::MODEL_7B; break; + case 40: model.type = e_model::MODEL_13B; break; + default: model.type = e_model::MODEL_UNKNOWN; + } + } break; default: (void)0; } @@ -3310,6 +3337,71 @@ static void llm_load_tensors( } } } break; + case LLM_ARCH_QWEN: + { + model.tok_embd = ml.create_tensor(ctx, tn(LLM_TENSOR_TOKEN_EMBD, "weight"), {n_embd, n_vocab}, GGML_BACKEND_CPU); + { + ggml_backend_type backend_norm; + ggml_backend_type backend_output; + + if (n_gpu_layers > int(n_layer)) { + // norm is not performance relevant on its own but keeping it in VRAM reduces data copying + // on Windows however this is detrimental unless everything is on the GPU +#ifndef _WIN32 + backend_norm = llama_backend_offload; +#else + backend_norm = n_gpu_layers <= (int) n_layer + 2 ? GGML_BACKEND_CPU : llama_backend_offload; +#endif // _WIN32 + + backend_output = llama_backend_offload_split; + } else { + backend_norm = GGML_BACKEND_CPU; + backend_output = GGML_BACKEND_CPU; + } + + model.output_norm = ml.create_tensor(ctx, tn(LLM_TENSOR_OUTPUT_NORM, "weight"), {n_embd}, backend_norm); + model.output = ml.create_tensor(ctx, tn(LLM_TENSOR_OUTPUT, "weight"), {n_embd, n_vocab}, backend_output); + + if (backend_norm == GGML_BACKEND_GPU) { + vram_weights += ggml_nbytes(model.output_norm); + } + if (backend_output == GGML_BACKEND_GPU_SPLIT) { + vram_weights += ggml_nbytes(model.output); + } + } + + const uint32_t n_ff = hparams.n_ff / 2; + + const int i_gpu_start = n_layer - n_gpu_layers; + + model.layers.resize(n_layer); + + for (uint32_t i = 0; i < n_layer; ++i) { + const ggml_backend_type backend = int(i) < i_gpu_start ? GGML_BACKEND_CPU : llama_backend_offload; // NOLINT + const ggml_backend_type backend_split = int(i) < i_gpu_start ? GGML_BACKEND_CPU : llama_backend_offload_split; // NOLINT + + auto & layer = model.layers[i]; + + layer.attn_norm = ml.create_tensor(ctx, tn(LLM_TENSOR_ATTN_NORM, "weight", i), {n_embd}, backend); + + layer.wqkv = ml.create_tensor(ctx, tn(LLM_TENSOR_ATTN_QKV, "weight", i), {n_embd, n_embd * 3}, backend_split); + layer.bqkv = ml.create_tensor(ctx, tn(LLM_TENSOR_ATTN_QKV, "bias", i), {n_embd * 3}, backend); + layer.wo = ml.create_tensor(ctx, tn(LLM_TENSOR_ATTN_OUT, "weight", i), {n_embd, n_embd}, backend_split); + + layer.ffn_norm = ml.create_tensor(ctx, tn(LLM_TENSOR_FFN_NORM, "weight", i), {n_embd}, backend); + + layer.ffn_gate = ml.create_tensor(ctx, tn(LLM_TENSOR_FFN_GATE, "weight", i), {n_embd, n_ff}, backend_split); + layer.ffn_down = ml.create_tensor(ctx, tn(LLM_TENSOR_FFN_DOWN, "weight", i), { n_ff, n_embd}, backend_split); + layer.ffn_up = ml.create_tensor(ctx, tn(LLM_TENSOR_FFN_UP, "weight", i), {n_embd, n_ff}, backend_split); + + if (backend == GGML_BACKEND_GPU) { + vram_weights += + ggml_nbytes(layer.attn_norm) + ggml_nbytes(layer.wqkv) + ggml_nbytes(layer.bqkv) + + ggml_nbytes(layer.wo) + ggml_nbytes(layer.ffn_norm) + ggml_nbytes(layer.ffn_gate) + + ggml_nbytes(layer.ffn_down) + ggml_nbytes(layer.ffn_up); + } + } + } break; default: throw std::runtime_error("unknown architecture"); @@ -4908,6 +5000,121 @@ struct llm_build_context { return gf; } + + struct ggml_cgraph * build_qwen() { + struct ggml_cgraph * gf = ggml_new_graph_custom(ctx0, LLAMA_MAX_NODES, false); + + struct ggml_tensor * cur; + struct ggml_tensor * inpL; + + inpL = llm_build_inp_embd(ctx0, hparams, batch, model.tok_embd, cb); + cb(inpL, "inp_embd", -1); + + // inp_pos - contains the positions + struct ggml_tensor * inp_pos = ggml_new_tensor_1d(ctx0, GGML_TYPE_I32, n_tokens); + cb(inp_pos, "inp_pos", -1); + + // KQ_scale + struct ggml_tensor * KQ_scale = ggml_new_tensor_1d(ctx0, GGML_TYPE_F32, 1); + cb(KQ_scale, "KQ_scale", -1); + + // KQ_mask (mask for 1 head, it wil be broadcasted to all heads) + struct ggml_tensor * KQ_mask = ggml_new_tensor_3d(ctx0, GGML_TYPE_F32, n_kv, n_tokens, 1); + cb(KQ_mask, "KQ_mask", -1); + + // shift the entire K-cache if needed + if (do_rope_shift) { + llm_build_k_shift(ctx0, hparams, cparams, kv_self, gf, LLM_ROPE_NEOX, n_ctx, n_embd_head, freq_base, freq_scale, cb); + } + + for (int il = 0; il < n_layer; ++il) { + struct ggml_tensor * inpSA = inpL; + + cur = llm_build_norm(ctx0, inpL, hparams, + model.layers[il].attn_norm, NULL, + LLM_NORM_RMS, cb, il); + cb(cur, "attn_norm", il); + + // self-attention + { + cur = ggml_mul_mat(ctx0, model.layers[il].wqkv, cur); + cb(cur, "wqkv", il); + + cur = ggml_add(ctx0, cur, model.layers[il].bqkv); + cb(cur, "bqkv", il); + + struct ggml_tensor * Qcur = ggml_cont(ctx0, ggml_view_2d(ctx0, cur, n_embd, n_tokens, cur->nb[1], 0*sizeof(float)*(n_embd))); + struct ggml_tensor * Kcur = ggml_cont(ctx0, ggml_view_2d(ctx0, cur, n_embd, n_tokens, cur->nb[1], 1*sizeof(float)*(n_embd))); + struct ggml_tensor * Vcur = ggml_cont(ctx0, ggml_view_2d(ctx0, cur, n_embd, n_tokens, cur->nb[1], 2*sizeof(float)*(n_embd))); + + cb(Qcur, "Qcur", il); + cb(Kcur, "Kcur", il); + cb(Vcur, "Vcur", il); + + Qcur = ggml_reshape_3d(ctx0, Qcur, n_embd_head, n_head, n_tokens); + Kcur = ggml_reshape_3d(ctx0, Kcur, n_embd_head, n_head_kv, n_tokens); + + // using mode = 2 for neox mode + Qcur = ggml_rope_custom( + ctx0, Qcur, inp_pos, n_embd_head, 2, 0, n_orig_ctx, + freq_base, freq_scale, ext_factor, attn_factor, beta_fast, beta_slow + ); + cb(Qcur, "Qcur", il); + + Kcur = ggml_rope_custom( + ctx0, Kcur, inp_pos, n_embd_head, 2, 0, n_orig_ctx, + freq_base, freq_scale, ext_factor, attn_factor, beta_fast, beta_slow + ); + cb(Kcur, "Kcur", il); + + llm_build_kv_store(ctx0, hparams, kv_self, gf, Kcur, Vcur, n_ctx, n_tokens, kv_head, cb, il); + + cur = llm_build_kqv(ctx0, hparams, kv_self, + model.layers[il].wo, NULL, + Qcur, KQ_scale, KQ_mask, n_ctx, n_tokens, n_kv, -1.0f, cb, il); + cb(cur, "kqv_out", il); + } + + struct ggml_tensor * ffn_inp = ggml_add(ctx0, cur, inpSA); + cb(ffn_inp, "ffn_inp", il); + + // feed-forward forward + { + cur = llm_build_norm(ctx0, ffn_inp, hparams, + model.layers[il].ffn_norm, NULL, + LLM_NORM_RMS, cb, il); + cb(cur, "ffn_norm", il); + + cur = llm_build_ffn(ctx0, cur, + model.layers[il].ffn_up, NULL, + model.layers[il].ffn_gate, NULL, + model.layers[il].ffn_down, NULL, + LLM_FFN_SILU, LLM_FFN_PAR, cb, il); + cb(cur, "ffn_out", il); + } + + cur = ggml_add(ctx0, cur, ffn_inp); + cb(cur, "l_out", il); + + // input for next layer + inpL = cur; + } + + cur = inpL; + + cur = llm_build_norm(ctx0, cur, hparams, + model.output_norm, NULL, + LLM_NORM_RMS, cb, -1); + cb(cur, "result_norm", -1); + + // lm_head + cur = ggml_mul_mat(ctx0, model.output, cur); + cb(cur, "result_output", -1); + + ggml_build_forward_expand(gf, cur); + + return gf; + } }; // @@ -5382,6 +5589,10 @@ static struct ggml_cgraph * llama_build_graph( { result = llm.build_stablelm(); } break; + case LLM_ARCH_QWEN: + { + result = llm.build_qwen(); + } break; default: GGML_ASSERT(false); } diff --git a/prompts/chat-with-qwen.txt b/prompts/chat-with-qwen.txt new file mode 100644 index 000000000..ac39ad925 --- /dev/null +++ b/prompts/chat-with-qwen.txt @@ -0,0 +1 @@ +You are a helpful assistant. \ No newline at end of file From 03562f3a86d6706eea9f4fc09b532946c191b34e Mon Sep 17 00:00:00 2001 From: CausalLM <148736309+CausalLM@users.noreply.github.com> Date: Sat, 2 Dec 2023 02:17:06 +0800 Subject: [PATCH 042/811] llama : support attention bias on LLaMA architecture (#4283) * Support attention_bias on LLaMA architecture QKVO bias, should fix InternLM (https://github.com/ggerganov/llama.cpp/issues/3133) and works for LLaMAfied Qwen models (https://github.com/ggerganov/llama.cpp/pull/3743#issuecomment-1825923608). * check existence of qkvo bias while loading llama models Tested on LLaMA2, CUDA and CPU. * Update llama.cpp --- llama.cpp | 52 ++++++++++++++++++++++++++++++++++++++++++++++++---- 1 file changed, 48 insertions(+), 4 deletions(-) diff --git a/llama.cpp b/llama.cpp index ca21cffab..15e52ad36 100644 --- a/llama.cpp +++ b/llama.cpp @@ -1266,6 +1266,9 @@ struct llama_layer { struct ggml_tensor * wqkv; // attention bias + struct ggml_tensor * bq; + struct ggml_tensor * bk; + struct ggml_tensor * bv; struct ggml_tensor * bo; struct ggml_tensor * bqkv; @@ -2809,6 +2812,30 @@ static void llm_load_tensors( layer.wv = ml.create_tensor(ctx, tn(LLM_TENSOR_ATTN_V, "weight", i), {n_embd, n_embd_gqa}, backend_split); layer.wo = ml.create_tensor(ctx, tn(LLM_TENSOR_ATTN_OUT, "weight", i), {n_embd, n_embd}, backend_split); + try { + layer.bq = ml.create_tensor(ctx, tn(LLM_TENSOR_ATTN_Q, "bias", i), {n_embd}, backend); + } catch (const std::runtime_error& e) { + if (std::string(e.what()).find("not found") != std::string::npos) layer.bq = NULL; else throw; + } + + try { + layer.bk = ml.create_tensor(ctx, tn(LLM_TENSOR_ATTN_K, "bias", i), {n_embd_gqa}, backend); + } catch (const std::runtime_error& e) { + if (std::string(e.what()).find("not found") != std::string::npos) layer.bk = NULL; else throw; + } + + try { + layer.bv = ml.create_tensor(ctx, tn(LLM_TENSOR_ATTN_V, "bias", i), {n_embd_gqa}, backend); + } catch (const std::runtime_error& e) { + if (std::string(e.what()).find("not found") != std::string::npos) layer.bv = NULL; else throw; + } + + try { + layer.bo = ml.create_tensor(ctx, tn(LLM_TENSOR_ATTN_OUT, "bias", i), {n_embd}, backend); + } catch (const std::runtime_error& e) { + if (std::string(e.what()).find("not found") != std::string::npos) layer.bo = NULL; else throw; + } + layer.ffn_norm = ml.create_tensor(ctx, tn(LLM_TENSOR_FFN_NORM, "weight", i), {n_embd}, backend); layer.ffn_gate = ml.create_tensor(ctx, tn(LLM_TENSOR_FFN_GATE, "weight", i), {n_embd, n_ff}, backend_split); @@ -2817,9 +2844,14 @@ static void llm_load_tensors( if (backend == GGML_BACKEND_GPU) { vram_weights += - ggml_nbytes(layer.attn_norm) + ggml_nbytes(layer.wq) + ggml_nbytes(layer.wk) + - ggml_nbytes(layer.wv) + ggml_nbytes(layer.wo) + ggml_nbytes(layer.ffn_norm) + - ggml_nbytes(layer.ffn_gate) + ggml_nbytes(layer.ffn_down) + ggml_nbytes(layer.ffn_up); + ggml_nbytes(layer.attn_norm) + ggml_nbytes(layer.wq) + ggml_nbytes(layer.wk) + + ggml_nbytes(layer.wv) + ggml_nbytes(layer.wo) + + (layer.bq ? ggml_nbytes(layer.bq) : 0) + + (layer.bk ? ggml_nbytes(layer.bk) : 0) + + (layer.bv ? ggml_nbytes(layer.bv) : 0) + + (layer.bo ? ggml_nbytes(layer.bo) : 0) + + ggml_nbytes(layer.ffn_norm) + ggml_nbytes(layer.ffn_gate) + + ggml_nbytes(layer.ffn_down) + ggml_nbytes(layer.ffn_up); } } } break; @@ -3983,12 +4015,24 @@ struct llm_build_context { // compute Q and K and RoPE them struct ggml_tensor * Qcur = ggml_mul_mat(ctx0, model.layers[il].wq, cur); cb(Qcur, "Qcur", il); + if (model.layers[il].bq) { + Qcur = ggml_add(ctx0, Qcur, model.layers[il].bq); + cb(Qcur, "Qcur", il); + } struct ggml_tensor * Kcur = ggml_mul_mat(ctx0, model.layers[il].wk, cur); cb(Kcur, "Kcur", il); + if (model.layers[il].bk) { + Kcur = ggml_add(ctx0, Kcur, model.layers[il].bk); + cb(Kcur, "Kcur", il); + } struct ggml_tensor * Vcur = ggml_mul_mat(ctx0, model.layers[il].wv, cur); cb(Vcur, "Vcur", il); + if (model.layers[il].bv) { + Vcur = ggml_add(ctx0, Vcur, model.layers[il].bv); + cb(Vcur, "Vcur", il); + } Qcur = ggml_rope_custom( ctx0, ggml_reshape_3d(ctx0, Qcur, n_embd_head, n_head, n_tokens), inp_pos, @@ -4007,7 +4051,7 @@ struct llm_build_context { llm_build_kv_store(ctx0, hparams, kv_self, gf, Kcur, Vcur, n_ctx, n_tokens, kv_head, cb, il); cur = llm_build_kqv(ctx0, hparams, kv_self, - model.layers[il].wo, NULL, + model.layers[il].wo, model.layers[il].bo, Qcur, KQ_scale, KQ_mask, n_ctx, n_tokens, n_kv, -1.0f, cb, il); cb(cur, "kqv_out", il); } From 511f52c334e37033f9c9de07b98fca4abc9470bd Mon Sep 17 00:00:00 2001 From: Jared Van Bortel Date: Fri, 1 Dec 2023 13:18:35 -0500 Subject: [PATCH 043/811] build : enable libstdc++ assertions for debug builds (#4275) --- CMakeLists.txt | 5 +++++ Makefile | 4 ++++ 2 files changed, 9 insertions(+) diff --git a/CMakeLists.txt b/CMakeLists.txt index 6f35a25d5..0639518de 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -116,6 +116,11 @@ set(THREADS_PREFER_PTHREAD_FLAG ON) find_package(Threads REQUIRED) include(CheckCXXCompilerFlag) +# enable libstdc++ assertions for debug builds +if (CMAKE_SYSTEM_NAME MATCHES "Linux") + add_compile_definitions($<$:_GLIBCXX_ASSERTIONS>) +endif() + if (NOT MSVC) if (LLAMA_SANITIZE_THREAD) add_compile_options(-fsanitize=thread) diff --git a/Makefile b/Makefile index 25b113e0a..3cc932a2e 100644 --- a/Makefile +++ b/Makefile @@ -174,6 +174,10 @@ ifdef LLAMA_DEBUG MK_CFLAGS += -O0 -g MK_CXXFLAGS += -O0 -g MK_LDFLAGS += -g + + ifeq ($(UNAME_S),Linux) + MK_CXXFLAGS += -Wp,-D_GLIBCXX_ASSERTIONS + endif else MK_CPPFLAGS += -DNDEBUG endif From b220222a64ce760bfbec9c770f11db3ec6a6abb6 Mon Sep 17 00:00:00 2001 From: Miwa / Ensan <63481257+ensan-hcl@users.noreply.github.com> Date: Sat, 2 Dec 2023 03:19:45 +0900 Subject: [PATCH 044/811] swift : fix token_to_piece implementation (#4278) * Fix token_to_piece implementation in Swift * Fix errors --- examples/batched.swift/Sources/main.swift | 10 +++------ .../llama.cpp.swift/LibLlama.swift | 22 +++++++++++++------ 2 files changed, 18 insertions(+), 14 deletions(-) diff --git a/examples/batched.swift/Sources/main.swift b/examples/batched.swift/Sources/main.swift index ba15197ae..ce9d80d9b 100644 --- a/examples/batched.swift/Sources/main.swift +++ b/examples/batched.swift/Sources/main.swift @@ -230,18 +230,15 @@ private func token_to_piece(token: llama_token, buffer: inout [CChar]) -> String var result = [CChar](repeating: 0, count: 8) let nTokens = llama_token_to_piece(model, token, &result, Int32(result.count)) if nTokens < 0 { - if result.count >= -Int(nTokens) { - result.removeLast(-Int(nTokens)) - } else { - result.removeAll() - } + let actualTokensCount = -Int(nTokens) + result = .init(repeating: 0, count: actualTokensCount) let check = llama_token_to_piece( model, token, &result, Int32(result.count) ) - assert(check == nTokens) + assert(check == actualTokensCount) } else { result.removeLast(result.count - Int(nTokens)) } @@ -259,5 +256,4 @@ private func token_to_piece(token: llama_token, buffer: inout [CChar]) -> String buffer = [] return bufferString } - return nil } diff --git a/examples/llama.swiftui/llama.cpp.swift/LibLlama.swift b/examples/llama.swiftui/llama.cpp.swift/LibLlama.swift index aaef09611..09b36d9e6 100644 --- a/examples/llama.swiftui/llama.cpp.swift/LibLlama.swift +++ b/examples/llama.swiftui/llama.cpp.swift/LibLlama.swift @@ -164,13 +164,21 @@ actor LlamaContext { private func token_to_piece(token: llama_token) -> String { let result = UnsafeMutablePointer.allocate(capacity: 8) result.initialize(repeating: Int8(0), count: 8) + defer { + result.deallocate() + } + let nTokens = llama_token_to_piece(model, token, result, 8) - let _ = llama_token_to_piece(model, token, result, 8) - - let resultStr = String(cString: result) - - result.deallocate() - - return resultStr + if nTokens < 0 { + let newResult = UnsafeMutablePointer.allocate(capacity: Int(-nTokens)) + newResult.initialize(repeating: Int8(0), count: Int(-nTokens)) + defer { + newResult.deallocate() + } + _ = llama_token_to_piece(model, token, newResult, -nTokens) + return String(cString: newResult) + } else { + return String(cString: result) + } } } From d5a1cbde60531d02ac74da27ea355182e3a4d516 Mon Sep 17 00:00:00 2001 From: Georgi Gerganov Date: Fri, 1 Dec 2023 20:35:03 +0200 Subject: [PATCH 045/811] llama : support optional tensors (#4283) --- examples/server/server.cpp | 2 +- llama.cpp | 33 +++++++++------------------------ 2 files changed, 10 insertions(+), 25 deletions(-) diff --git a/examples/server/server.cpp b/examples/server/server.cpp index a65344b92..0fd42dcba 100644 --- a/examples/server/server.cpp +++ b/examples/server/server.cpp @@ -1469,7 +1469,7 @@ struct llama_server_context int split_multiprompt_task(task_server& multiprompt_task) { - auto prompt_count = multiprompt_task.data.at("prompt").size(); + int prompt_count = multiprompt_task.data.at("prompt").size(); assert(prompt_count > 1); int multitask_id = id_gen++; diff --git a/llama.cpp b/llama.cpp index 15e52ad36..99964ec00 100644 --- a/llama.cpp +++ b/llama.cpp @@ -1991,10 +1991,13 @@ struct llama_model_loader { return tensor; } - struct ggml_tensor * create_tensor(struct ggml_context * ctx, const std::string & name, const std::vector & ne, ggml_backend_type backend) { + struct ggml_tensor * create_tensor(struct ggml_context * ctx, const std::string & name, const std::vector & ne, ggml_backend_type backend, bool optional = false) { struct ggml_tensor * cur = ggml_get_tensor(ctx_meta, name.c_str()); if (cur == NULL) { + if (optional) { + return NULL; + } throw std::runtime_error(format("%s: tensor '%s' not found", __func__, name.c_str())); } @@ -2812,29 +2815,11 @@ static void llm_load_tensors( layer.wv = ml.create_tensor(ctx, tn(LLM_TENSOR_ATTN_V, "weight", i), {n_embd, n_embd_gqa}, backend_split); layer.wo = ml.create_tensor(ctx, tn(LLM_TENSOR_ATTN_OUT, "weight", i), {n_embd, n_embd}, backend_split); - try { - layer.bq = ml.create_tensor(ctx, tn(LLM_TENSOR_ATTN_Q, "bias", i), {n_embd}, backend); - } catch (const std::runtime_error& e) { - if (std::string(e.what()).find("not found") != std::string::npos) layer.bq = NULL; else throw; - } - - try { - layer.bk = ml.create_tensor(ctx, tn(LLM_TENSOR_ATTN_K, "bias", i), {n_embd_gqa}, backend); - } catch (const std::runtime_error& e) { - if (std::string(e.what()).find("not found") != std::string::npos) layer.bk = NULL; else throw; - } - - try { - layer.bv = ml.create_tensor(ctx, tn(LLM_TENSOR_ATTN_V, "bias", i), {n_embd_gqa}, backend); - } catch (const std::runtime_error& e) { - if (std::string(e.what()).find("not found") != std::string::npos) layer.bv = NULL; else throw; - } - - try { - layer.bo = ml.create_tensor(ctx, tn(LLM_TENSOR_ATTN_OUT, "bias", i), {n_embd}, backend); - } catch (const std::runtime_error& e) { - if (std::string(e.what()).find("not found") != std::string::npos) layer.bo = NULL; else throw; - } + // optional bias tensors + layer.bq = ml.create_tensor(ctx, tn(LLM_TENSOR_ATTN_Q, "bias", i), {n_embd}, backend, true); + layer.bk = ml.create_tensor(ctx, tn(LLM_TENSOR_ATTN_K, "bias", i), {n_embd_gqa}, backend, true); + layer.bv = ml.create_tensor(ctx, tn(LLM_TENSOR_ATTN_V, "bias", i), {n_embd_gqa}, backend, true); + layer.bo = ml.create_tensor(ctx, tn(LLM_TENSOR_ATTN_OUT, "bias", i), {n_embd}, backend, true); layer.ffn_norm = ml.create_tensor(ctx, tn(LLM_TENSOR_FFN_NORM, "weight", i), {n_embd}, backend); From 5a7d3125e7c24f223659b7f0b7aa7736986e92c0 Mon Sep 17 00:00:00 2001 From: Georgi Gerganov Date: Fri, 1 Dec 2023 20:39:12 +0200 Subject: [PATCH 046/811] llama : avoid using "optional" keyword (#4283) --- llama.cpp | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/llama.cpp b/llama.cpp index 99964ec00..3f5d663cf 100644 --- a/llama.cpp +++ b/llama.cpp @@ -1991,11 +1991,11 @@ struct llama_model_loader { return tensor; } - struct ggml_tensor * create_tensor(struct ggml_context * ctx, const std::string & name, const std::vector & ne, ggml_backend_type backend, bool optional = false) { + struct ggml_tensor * create_tensor(struct ggml_context * ctx, const std::string & name, const std::vector & ne, ggml_backend_type backend, bool required = true) { struct ggml_tensor * cur = ggml_get_tensor(ctx_meta, name.c_str()); if (cur == NULL) { - if (optional) { + if (!required) { return NULL; } throw std::runtime_error(format("%s: tensor '%s' not found", __func__, name.c_str())); @@ -2816,10 +2816,10 @@ static void llm_load_tensors( layer.wo = ml.create_tensor(ctx, tn(LLM_TENSOR_ATTN_OUT, "weight", i), {n_embd, n_embd}, backend_split); // optional bias tensors - layer.bq = ml.create_tensor(ctx, tn(LLM_TENSOR_ATTN_Q, "bias", i), {n_embd}, backend, true); - layer.bk = ml.create_tensor(ctx, tn(LLM_TENSOR_ATTN_K, "bias", i), {n_embd_gqa}, backend, true); - layer.bv = ml.create_tensor(ctx, tn(LLM_TENSOR_ATTN_V, "bias", i), {n_embd_gqa}, backend, true); - layer.bo = ml.create_tensor(ctx, tn(LLM_TENSOR_ATTN_OUT, "bias", i), {n_embd}, backend, true); + layer.bq = ml.create_tensor(ctx, tn(LLM_TENSOR_ATTN_Q, "bias", i), {n_embd}, backend, false); + layer.bk = ml.create_tensor(ctx, tn(LLM_TENSOR_ATTN_K, "bias", i), {n_embd_gqa}, backend, false); + layer.bv = ml.create_tensor(ctx, tn(LLM_TENSOR_ATTN_V, "bias", i), {n_embd_gqa}, backend, false); + layer.bo = ml.create_tensor(ctx, tn(LLM_TENSOR_ATTN_OUT, "bias", i), {n_embd}, backend, false); layer.ffn_norm = ml.create_tensor(ctx, tn(LLM_TENSOR_FFN_NORM, "weight", i), {n_embd}, backend); From d7b800b8bc490a221acbd83c575206a907f2f6e2 Mon Sep 17 00:00:00 2001 From: Georgi Gerganov Date: Sun, 3 Dec 2023 10:58:16 +0200 Subject: [PATCH 047/811] llama : pad KV cache size (#4280) * llama : pad KV cache size to 32 * metal : try to improve batched decoding --- ggml-metal.m | 2 +- llama.cpp | 3 +-- 2 files changed, 2 insertions(+), 3 deletions(-) diff --git a/ggml-metal.m b/ggml-metal.m index 6cfacf64f..3343bc8a3 100644 --- a/ggml-metal.m +++ b/ggml-metal.m @@ -1083,7 +1083,7 @@ void ggml_metal_graph_compute( // find the break-even point where the matrix-matrix kernel becomes more efficient compared // to the matrix-vector kernel - int ne11_mm_min = 1; + int ne11_mm_min = src0t == GGML_TYPE_F16 ? 1 : 16; #if 0 // the numbers below are measured on M2 Ultra for 7B and 13B models diff --git a/llama.cpp b/llama.cpp index 3f5d663cf..fd905ade7 100644 --- a/llama.cpp +++ b/llama.cpp @@ -5744,8 +5744,7 @@ static int llama_decode_internal( // a heuristic, to avoid attending the full cache if it is not yet utilized // after enough generations, the benefit from this heuristic disappears // if we start defragmenting the cache, the benefit from this will be more important - //kv_self.n = std::max(32, GGML_PAD(llama_kv_cache_cell_max(kv_self), 32)); // TODO: this might be better for CUDA? - kv_self.n = std::min((int32_t) cparams.n_ctx, std::max(32, llama_kv_cache_cell_max(kv_self))); + kv_self.n = std::min((int32_t) cparams.n_ctx, std::max(32, GGML_PAD(llama_kv_cache_cell_max(kv_self), 32))); //printf("kv_self.n = %5d, kv_self.used = %5d, kv_self.head = %5d\n", kv_self.n, kv_self.used, kv_self.head); From 6949b50df56ee58a2d76d45487942cb211c08629 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Rickard=20Ed=C3=A9n?= Date: Sun, 3 Dec 2023 10:03:25 +0100 Subject: [PATCH 048/811] py : add grammar to oai like api (#4294) --- examples/server/api_like_OAI.py | 1 + 1 file changed, 1 insertion(+) diff --git a/examples/server/api_like_OAI.py b/examples/server/api_like_OAI.py index 830c056d4..607fe49d3 100755 --- a/examples/server/api_like_OAI.py +++ b/examples/server/api_like_OAI.py @@ -70,6 +70,7 @@ def make_postData(body, chat=False, stream=False): if(is_present(body, "mirostat_tau")): postData["mirostat_tau"] = body["mirostat_tau"] if(is_present(body, "mirostat_eta")): postData["mirostat_eta"] = body["mirostat_eta"] if(is_present(body, "seed")): postData["seed"] = body["seed"] + if(is_present(body, "grammar")): postData["grammar"] = body["grammar"] if(is_present(body, "logit_bias")): postData["logit_bias"] = [[int(token), body["logit_bias"][token]] for token in body["logit_bias"].keys()] if (args.stop != ""): postData["stop"] = [args.stop] From 33e171d1e9fc4903f9314b490d77fb8d58331b63 Mon Sep 17 00:00:00 2001 From: Ed Lee Date: Sun, 3 Dec 2023 01:10:43 -0800 Subject: [PATCH 049/811] server : fix OpenAI API `stop` field to be optional (#4299) (cherry picked from commit Mozilla-Ocho/llamafile@e8c92bcb84ae3bcbf0d617b7ee6a5413bcbd58af) --- examples/server/server.cpp | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/examples/server/server.cpp b/examples/server/server.cpp index 0fd42dcba..911f7bbe1 100644 --- a/examples/server/server.cpp +++ b/examples/server/server.cpp @@ -2410,9 +2410,7 @@ json oaicompat_completion_params_parse( } // Handle 'stop' field - if (body["stop"].is_null()) { - llama_params["stop"] = json::array({}); - } else if (body["stop"].is_string()) { + if (body.contains("stop") && body["stop"].is_string()) { llama_params["stop"] = json::array({body["stop"].get()}); } else { llama_params["stop"] = json_value(body, "stop", json::array()); From adf3de4f69ff7e44131222f05f9c7447ac0be3cb Mon Sep 17 00:00:00 2001 From: Georgi Gerganov Date: Sun, 3 Dec 2023 15:56:22 +0200 Subject: [PATCH 050/811] ggml : fix soft max out-of-bounds access (#4307) ggml-ci --- ggml.c | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/ggml.c b/ggml.c index e2687ef4f..cecb12700 100644 --- a/ggml.c +++ b/ggml.c @@ -15629,7 +15629,6 @@ static int ggml_get_n_tasks(struct ggml_tensor * node, int n_threads) { } break; case GGML_OP_DIAG_MASK_ZERO: case GGML_OP_DIAG_MASK_INF: - case GGML_OP_SOFT_MAX: case GGML_OP_SOFT_MAX_BACK: case GGML_OP_ROPE: case GGML_OP_ROPE_BACK: @@ -15645,6 +15644,10 @@ static int ggml_get_n_tasks(struct ggml_tensor * node, int n_threads) { { n_tasks = 1; //TODO } break; + case GGML_OP_SOFT_MAX: + { + n_tasks = MIN(MIN(4, n_threads), ggml_nrows(node->src[0])); + } break; case GGML_OP_CONV_TRANSPOSE_1D: { n_tasks = n_threads; From fbbc42827b2949b95bcde23ce47bb47d006c895d Mon Sep 17 00:00:00 2001 From: Georgi Gerganov Date: Sun, 3 Dec 2023 15:56:35 +0200 Subject: [PATCH 051/811] ggml : reuse ggml_get_n_tasks() in ggml_graph_plan() (#4308) * ggml : fix soft max out-of-bounds access ggml-ci * ggml : reuse ggml_get_n_tasks() in ggml_graph_plan() ggml-ci --- ggml.c | 23 ++--------------------- 1 file changed, 2 insertions(+), 21 deletions(-) diff --git a/ggml.c b/ggml.c index cecb12700..f743df1f3 100644 --- a/ggml.c +++ b/ggml.c @@ -15879,18 +15879,16 @@ struct ggml_cplan ggml_graph_plan(struct ggml_cgraph * cgraph, int n_threads) { // thread scheduling for the different operations + work buffer size estimation for (int i = 0; i < cgraph->n_nodes; i++) { - int n_tasks = 1; - struct ggml_tensor * node = cgraph->nodes[i]; + const int n_tasks = ggml_get_n_tasks(node, n_threads); + size_t cur = 0; switch (node->op) { case GGML_OP_CPY: case GGML_OP_DUP: { - n_tasks = n_threads; - if (ggml_is_quantized(node->type)) { cur = ggml_type_size(GGML_TYPE_F32) * node->ne[0] * n_tasks; } @@ -15898,16 +15896,12 @@ struct ggml_cplan ggml_graph_plan(struct ggml_cgraph * cgraph, int n_threads) { case GGML_OP_ADD: case GGML_OP_ADD1: { - n_tasks = n_threads; - if (ggml_is_quantized(node->src[0]->type)) { cur = ggml_type_size(GGML_TYPE_F32) * node->src[0]->ne[0] * n_tasks; } } break; case GGML_OP_ACC: { - n_tasks = n_threads; - if (ggml_is_quantized(node->src[0]->type)) { cur = ggml_type_size(GGML_TYPE_F32) * node->src[1]->ne[0] * n_tasks; } @@ -15935,16 +15929,12 @@ struct ggml_cplan ggml_graph_plan(struct ggml_cgraph * cgraph, int n_threads) { } break; case GGML_OP_OUT_PROD: { - n_tasks = n_threads; - if (ggml_is_quantized(node->src[0]->type)) { cur = ggml_type_size(GGML_TYPE_F32) * node->src[0]->ne[0] * n_tasks; } } break; case GGML_OP_SOFT_MAX: { - n_tasks = MIN(MIN(4, n_threads), ggml_nrows(node->src[0])); - cur = ggml_type_size(GGML_TYPE_F32) * node->ne[0] * n_tasks; } break; case GGML_OP_CONV_TRANSPOSE_1D: @@ -15974,7 +15964,6 @@ struct ggml_cplan ggml_graph_plan(struct ggml_cgraph * cgraph, int n_threads) { } break; case GGML_OP_IM2COL: { - n_tasks = n_threads; } break; case GGML_OP_CONV_TRANSPOSE_2D: { @@ -15992,8 +15981,6 @@ struct ggml_cplan ggml_graph_plan(struct ggml_cgraph * cgraph, int n_threads) { } break; case GGML_OP_FLASH_ATTN: { - n_tasks = n_threads; - const int64_t ne11 = ggml_up(node->src[1]->ne[1], GGML_SOFT_MAX_UNROLL); if (node->src[1]->type == GGML_TYPE_F32) { @@ -16006,8 +15993,6 @@ struct ggml_cplan ggml_graph_plan(struct ggml_cgraph * cgraph, int n_threads) { } break; case GGML_OP_FLASH_FF: { - n_tasks = n_threads; - if (node->src[1]->type == GGML_TYPE_F32) { cur = sizeof(float)*node->src[1]->ne[1]*n_tasks; // TODO: this can become (n_tasks-1) cur += sizeof(float)*node->src[1]->ne[1]*n_tasks; // this is overestimated by x2 @@ -16018,8 +16003,6 @@ struct ggml_cplan ggml_graph_plan(struct ggml_cgraph * cgraph, int n_threads) { } break; case GGML_OP_FLASH_ATTN_BACK: { - n_tasks = n_threads; - const int64_t D = node->src[0]->ne[0]; const int64_t ne11 = ggml_up(node->src[1]->ne[1], GGML_SOFT_MAX_UNROLL); const int64_t mxDn = MAX(D, ne11) * 2; // *2 because of S and SM in ggml_compute_forward_flash_attn_back @@ -16034,8 +16017,6 @@ struct ggml_cplan ggml_graph_plan(struct ggml_cgraph * cgraph, int n_threads) { case GGML_OP_CROSS_ENTROPY_LOSS: { - n_tasks = n_threads; - cur = ggml_type_size(node->type)*(n_tasks + node->src[0]->ne[0]*n_tasks); } break; case GGML_OP_COUNT: From 4fa44e84adb4c78e1885694cc3513982d4af2b08 Mon Sep 17 00:00:00 2001 From: Ikko Eltociear Ashimine Date: Mon, 4 Dec 2023 16:57:35 +0900 Subject: [PATCH 052/811] grammar-parser : fix typo (#4318) preceeding -> preceding --- common/grammar-parser.cpp | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/common/grammar-parser.cpp b/common/grammar-parser.cpp index ff51cc803..bf89a96f3 100644 --- a/common/grammar-parser.cpp +++ b/common/grammar-parser.cpp @@ -190,7 +190,7 @@ namespace grammar_parser { pos = parse_space(pos + 1, is_nested); } else if (*pos == '*' || *pos == '+' || *pos == '?') { // repetition operator if (last_sym_start == out_elements.size()) { - throw std::runtime_error(std::string("expecting preceeding item to */+/? at ") + pos); + throw std::runtime_error(std::string("expecting preceding item to */+/? at ") + pos); } // apply transformation to previous symbol (last_sym_start to end) according to From 5c9f90cba1cc6b0a2a7d19ee5dcb73cad6331d30 Mon Sep 17 00:00:00 2001 From: Miwa / Ensan <63481257+ensan-hcl@users.noreply.github.com> Date: Mon, 4 Dec 2023 22:43:45 +0900 Subject: [PATCH 053/811] swift : fix prompt tokenization logic (#4321) --- examples/batched.swift/Sources/main.swift | 5 +++-- examples/llama.swiftui/llama.cpp.swift/LibLlama.swift | 5 +++-- 2 files changed, 6 insertions(+), 4 deletions(-) diff --git a/examples/batched.swift/Sources/main.swift b/examples/batched.swift/Sources/main.swift index ce9d80d9b..4d0005349 100644 --- a/examples/batched.swift/Sources/main.swift +++ b/examples/batched.swift/Sources/main.swift @@ -215,9 +215,10 @@ print("decoded \(n_decode) tokens in \(String(format: "%.2f", Double(t_main_end llama_print_timings(context) private func tokenize(text: String, add_bos: Bool) -> [llama_token] { - let n_tokens = text.count + (add_bos ? 1 : 0) + let utf8Count = text.utf8.count + let n_tokens = utf8Count + (add_bos ? 1 : 0) let tokens = UnsafeMutablePointer.allocate(capacity: n_tokens) - let tokenCount = llama_tokenize(model, text, Int32(text.count), tokens, Int32(n_tokens), add_bos, /*special tokens*/ false) + let tokenCount = llama_tokenize(model, text, Int32(utf8Count), tokens, Int32(n_tokens), add_bos, /*special tokens*/ false) var swiftTokens: [llama_token] = [] for i in 0 ..< tokenCount { swiftTokens.append(tokens[Int(i)]) diff --git a/examples/llama.swiftui/llama.cpp.swift/LibLlama.swift b/examples/llama.swiftui/llama.cpp.swift/LibLlama.swift index 09b36d9e6..f828106fb 100644 --- a/examples/llama.swiftui/llama.cpp.swift/LibLlama.swift +++ b/examples/llama.swiftui/llama.cpp.swift/LibLlama.swift @@ -147,9 +147,10 @@ actor LlamaContext { } private func tokenize(text: String, add_bos: Bool) -> [llama_token] { - let n_tokens = text.count + (add_bos ? 1 : 0) + let utf8Count = text.utf8.count + let n_tokens = utf8Count + (add_bos ? 1 : 0) let tokens = UnsafeMutablePointer.allocate(capacity: n_tokens) - let tokenCount = llama_tokenize(model, text, Int32(text.count), tokens, Int32(n_tokens), add_bos, false) + let tokenCount = llama_tokenize(model, text, Int32(utf8Count), tokens, Int32(n_tokens), add_bos, false) var swiftTokens: [llama_token] = [] for i in 0.. Date: Tue, 5 Dec 2023 01:03:49 +0900 Subject: [PATCH 054/811] swift : fix concatenation method to avoid invalid UTF8 stringfication (#4325) --- .../llama.cpp.swift/LibLlama.swift | 37 +++++++++++++++---- 1 file changed, 30 insertions(+), 7 deletions(-) diff --git a/examples/llama.swiftui/llama.cpp.swift/LibLlama.swift b/examples/llama.swiftui/llama.cpp.swift/LibLlama.swift index f828106fb..3754f0551 100644 --- a/examples/llama.swiftui/llama.cpp.swift/LibLlama.swift +++ b/examples/llama.swiftui/llama.cpp.swift/LibLlama.swift @@ -11,6 +11,8 @@ actor LlamaContext { private var context: OpaquePointer private var batch: llama_batch private var tokens_list: [llama_token] + /// This variable is used to store temporarily invalid cchars + private var temporary_invalid_cchars: [CChar] var n_len: Int32 = 512 var n_cur: Int32 = 0 @@ -21,6 +23,7 @@ actor LlamaContext { self.context = context self.tokens_list = [] self.batch = llama_batch_init(512, 0, 1) + self.temporary_invalid_cchars = [] } deinit { @@ -61,6 +64,7 @@ actor LlamaContext { print("attempting to complete \"\(text)\"") tokens_list = tokenize(text: text, add_bos: true) + temporary_invalid_cchars = [] let n_ctx = llama_n_ctx(context) let n_kv_req = tokens_list.count + (Int(n_len) - tokens_list.count) @@ -72,7 +76,7 @@ actor LlamaContext { } for id in tokens_list { - print(token_to_piece(token: id)) + print(String(cString: token_to_piece(token: id) + [0])) } // batch = llama_batch_init(512, 0) // done in init() @@ -115,10 +119,25 @@ actor LlamaContext { if new_token_id == llama_token_eos(context) || n_cur == n_len { print("\n") - return "" + let new_token_str = String(cString: temporary_invalid_cchars + [0]) + temporary_invalid_cchars.removeAll() + return new_token_str } - let new_token_str = token_to_piece(token: new_token_id) + let new_token_cchars = token_to_piece(token: new_token_id) + temporary_invalid_cchars.append(contentsOf: new_token_cchars) + let new_token_str: String + if let string = String(validatingUTF8: temporary_invalid_cchars + [0]) { + temporary_invalid_cchars.removeAll() + new_token_str = string + } else if (0 ..< temporary_invalid_cchars.count).contains(where: {$0 != 0 && String(validatingUTF8: Array(temporary_invalid_cchars.suffix($0)) + [0]) != nil}) { + // in this case, at least the suffix of the temporary_invalid_cchars can be interpreted as UTF8 string + let string = String(cString: temporary_invalid_cchars + [0]) + temporary_invalid_cchars.removeAll() + new_token_str = string + } else { + new_token_str = "" + } print(new_token_str) // tokens_list.append(new_token_id) @@ -144,6 +163,7 @@ actor LlamaContext { func clear() { tokens_list.removeAll() + temporary_invalid_cchars.removeAll() } private func tokenize(text: String, add_bos: Bool) -> [llama_token] { @@ -162,7 +182,8 @@ actor LlamaContext { return swiftTokens } - private func token_to_piece(token: llama_token) -> String { + /// - note: The result does not contain null-terminator + private func token_to_piece(token: llama_token) -> [CChar] { let result = UnsafeMutablePointer.allocate(capacity: 8) result.initialize(repeating: Int8(0), count: 8) defer { @@ -176,10 +197,12 @@ actor LlamaContext { defer { newResult.deallocate() } - _ = llama_token_to_piece(model, token, newResult, -nTokens) - return String(cString: newResult) + let nNewTokens = llama_token_to_piece(model, token, newResult, -nTokens) + let bufferPointer = UnsafeBufferPointer(start: newResult, count: Int(nNewTokens)) + return Array(bufferPointer) } else { - return String(cString: result) + let bufferPointer = UnsafeBufferPointer(start: result, count: Int(nTokens)) + return Array(bufferPointer) } } } From 23b5e12eb5a76489b4c3ee22213a081da68b1809 Mon Sep 17 00:00:00 2001 From: Daniel Bevenius Date: Mon, 4 Dec 2023 17:04:21 +0100 Subject: [PATCH 055/811] simple : update error message for KV cache check (#4324) This commit updates the error message that is printed when the KV cache is not big enough to hold all the prompt and generated tokens. Specifically it removes the reference to n_parallel and replaces it with n_len. Signed-off-by: Daniel Bevenius --- examples/simple/simple.cpp | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/examples/simple/simple.cpp b/examples/simple/simple.cpp index 374aef6f1..9cfde8308 100644 --- a/examples/simple/simple.cpp +++ b/examples/simple/simple.cpp @@ -75,7 +75,7 @@ int main(int argc, char ** argv) { // make sure the KV cache is big enough to hold all the prompt and generated tokens if (n_kv_req > n_ctx) { LOG_TEE("%s: error: n_kv_req > n_ctx, the required KV cache size is not big enough\n", __func__); - LOG_TEE("%s: either reduce n_parallel or increase n_ctx\n", __func__); + LOG_TEE("%s: either reduce n_len or increase n_ctx\n", __func__); return 1; } From e4b76bbe316ee50fb17d9ac29e654c0edf830eba Mon Sep 17 00:00:00 2001 From: kchro3 <62481661+kchro3@users.noreply.github.com> Date: Mon, 4 Dec 2023 23:29:46 -0800 Subject: [PATCH 056/811] swift : revert compiler checks for swift package (#4332) --- Package.swift | 46 ++++++++++++++++------------------------------ 1 file changed, 16 insertions(+), 30 deletions(-) diff --git a/Package.swift b/Package.swift index 5b3bd72ca..18d610d69 100644 --- a/Package.swift +++ b/Package.swift @@ -2,33 +2,14 @@ import PackageDescription -#if arch(arm) || arch(arm64) -let platforms: [SupportedPlatform]? = [ - .macOS(.v12), - .iOS(.v14), - .watchOS(.v4), - .tvOS(.v14) -] -let exclude: [String] = [] -let resources: [Resource] = [ - .process("ggml-metal.metal") -] -let additionalSources: [String] = ["ggml-metal.m"] -let additionalSettings: [CSetting] = [ - .unsafeFlags(["-fno-objc-arc"]), - .define("GGML_USE_METAL") -] -#else -let platforms: [SupportedPlatform]? = nil -let exclude: [String] = ["ggml-metal.metal"] -let resources: [Resource] = [] -let additionalSources: [String] = [] -let additionalSettings: [CSetting] = [] -#endif - let package = Package( name: "llama", - platforms: platforms, + platforms: [ + .macOS(.v12), + .iOS(.v14), + .watchOS(.v4), + .tvOS(.v14) + ], products: [ .library(name: "llama", targets: ["llama"]), ], @@ -36,25 +17,30 @@ let package = Package( .target( name: "llama", path: ".", - exclude: exclude, + exclude: [], sources: [ "ggml.c", "llama.cpp", "ggml-alloc.c", "ggml-backend.c", "ggml-quants.c", - ] + additionalSources, - resources: resources, + "ggml-metal.m", + ], + resources: [ + .process("ggml-metal.metal") + ], publicHeadersPath: "spm-headers", cSettings: [ .unsafeFlags(["-Wno-shorten-64-to-32", "-O3", "-DNDEBUG"]), - .define("GGML_USE_ACCELERATE") + .define("GGML_USE_ACCELERATE"), + .unsafeFlags(["-fno-objc-arc"]), + .define("GGML_USE_METAL"), // NOTE: NEW_LAPACK will required iOS version 16.4+ // We should consider add this in the future when we drop support for iOS 14 // (ref: ref: https://developer.apple.com/documentation/accelerate/1513264-cblas_sgemm?language=objc) // .define("ACCELERATE_NEW_LAPACK"), // .define("ACCELERATE_LAPACK_ILP64") - ] + additionalSettings, + ], linkerSettings: [ .linkedFramework("Accelerate") ] From 52c8bc3cf312e1caf02d37bfb9d9d865cbe33594 Mon Sep 17 00:00:00 2001 From: MaggotHATE Date: Tue, 5 Dec 2023 15:05:51 +0500 Subject: [PATCH 057/811] sampling : custom samplers order (#4285) * Samplers sequence order w parameter * Cleaned commented code * Fixed formatting * Rewrote with unordered_map * Revert and rewrite, too many problems and safeguards would be needed * Fixed code style * Code style fixes according to review * More readable samplers input string, fixed help * Style fix in sampler_queue * Formatting fixes * Fixing whitespaces --- common/common.cpp | 56 +++++++++++++++++++++++++++++++++++++++ common/common.h | 6 +++++ common/sampling.cpp | 60 ++++++++++++++++++++++++++++++++++-------- common/sampling.h | 36 ++++++++++++++----------- examples/main/main.cpp | 1 + 5 files changed, 132 insertions(+), 27 deletions(-) diff --git a/common/common.cpp b/common/common.cpp index 1dcc235ea..8e6d74d0d 100644 --- a/common/common.cpp +++ b/common/common.cpp @@ -280,6 +280,18 @@ bool gpt_params_parse_ex(int argc, char ** argv, gpt_params & params) { params.yarn_beta_slow = std::stof(argv[i]); } else if (arg == "--memory-f32") { params.memory_f16 = false; + } else if (arg == "--samplers") { + if (++i >= argc) { + invalid_param = true; + break; + } + sparams.samplers_sequence = parse_samplers_input(argv[i]); + } else if (arg == "--sampling-seq") { + if (++i >= argc) { + invalid_param = true; + break; + } + sparams.samplers_sequence = argv[i]; } else if (arg == "--top-p") { if (++i >= argc) { invalid_param = true; @@ -761,6 +773,8 @@ void gpt_print_usage(int /*argc*/, char ** argv, const gpt_params & params) { printf(" -n N, --n-predict N number of tokens to predict (default: %d, -1 = infinity, -2 = until context filled)\n", params.n_predict); printf(" -c N, --ctx-size N size of the prompt context (default: %d, 0 = loaded from model)\n", params.n_ctx); printf(" -b N, --batch-size N batch size for prompt processing (default: %d)\n", params.n_batch); + printf(" --samplers samplers that will be used for generation in the order, separated by \';\', for example: \"top_k;tfs;typical;top_p;min_p;temp\"\n"); + printf(" --sampling-seq simplified sequence for samplers that will be used (default: %s)\n", sparams.samplers_sequence.c_str()); printf(" --top-k N top-k sampling (default: %d, 0 = disabled)\n", sparams.top_k); printf(" --top-p N top-p sampling (default: %.1f, 1.0 = disabled)\n", (double)sparams.top_p); printf(" --min-p N min-p sampling (default: %.1f, 0.0 = disabled)\n", (double)sparams.min_p); @@ -886,6 +900,48 @@ std::string gpt_random_prompt(std::mt19937 & rng) { GGML_UNREACHABLE(); } +// +// String parsing +// + +std::string parse_samplers_input(std::string input) { + std::string output = ""; + // since samplers names are written multiple ways + // make it ready for both system names and input names + std::unordered_map samplers_symbols { + {"top_k", 'k'}, + {"top-k", 'k'}, + {"top_p", 'p'}, + {"top-p", 'p'}, + {"nucleus", 'p'}, + {"typical_p", 'y'}, + {"typical-p", 'y'}, + {"typical", 'y'}, + {"min_p", 'm'}, + {"min-p", 'm'}, + {"tfs_z", 'f'}, + {"tfs-z", 'f'}, + {"tfs", 'f'}, + {"temp", 't'}, + {"temperature",'t'} + }; + // expected format example: "temp;top_k;tfs_z;typical_p;top_p;min_p" + size_t separator = input.find(';'); + while (separator != input.npos) { + std::string name = input.substr(0,separator); + input = input.substr(separator+1); + separator = input.find(';'); + + if (samplers_symbols.find(name) != samplers_symbols.end()) { + output += samplers_symbols[name]; + } + } + if (samplers_symbols.find(input) != samplers_symbols.end()) { + output += samplers_symbols[input]; + } + return output; +} + // // Model utils // diff --git a/common/common.h b/common/common.h index 2f6fe48ab..534f7b132 100644 --- a/common/common.h +++ b/common/common.h @@ -141,6 +141,12 @@ std::string gpt_random_prompt(std::mt19937 & rng); void process_escapes(std::string& input); +// +// String parsing +// + +std::string parse_samplers_input(std::string input); + // // Model utils // diff --git a/common/sampling.cpp b/common/sampling.cpp index 1317024c2..b6bb886c6 100644 --- a/common/sampling.cpp +++ b/common/sampling.cpp @@ -99,6 +99,54 @@ std::string llama_sampling_print(const llama_sampling_params & params) { return std::string(result); } +std::string llama_sampling_order_print(const llama_sampling_params & params) { + std::string result = "CFG -> Penalties "; + if (params.mirostat == 0) { + for (auto s : params.samplers_sequence) { + switch (s) { + case 'k': result += "-> top_k "; break; + case 'f': result += "-> tfs_z "; break; + case 'y': result += "-> typical_p "; break; + case 'p': result += "-> top_p "; break; + case 'm': result += "-> min_p "; break; + case 't': result += "-> temp "; break; + default : break; + } + } + } else result += "-> mirostat "; + + return result; +} + +// no reasons to expose this function in header +void sampler_queue( + struct llama_context * ctx_main, + const llama_sampling_params & params, + llama_token_data_array & cur_p, + size_t & min_keep) { + const int n_vocab = llama_n_vocab(llama_get_model(ctx_main)); + + const float temp = params.temp; + const int32_t top_k = params.top_k <= 0 ? n_vocab : params.top_k; + const float top_p = params.top_p; + const float min_p = params.min_p; + const float tfs_z = params.tfs_z; + const float typical_p = params.typical_p; + const std::string & samplers_sequence = params.samplers_sequence; + + for (auto s : samplers_sequence) { + switch (s){ + case 'k': llama_sample_top_k (ctx_main, &cur_p, top_k, min_keep); break; + case 'f': llama_sample_tail_free(ctx_main, &cur_p, tfs_z, min_keep); break; + case 'y': llama_sample_typical (ctx_main, &cur_p, typical_p, min_keep); break; + case 'p': llama_sample_top_p (ctx_main, &cur_p, top_p, min_keep); break; + case 'm': llama_sample_min_p (ctx_main, &cur_p, min_p, min_keep); break; + case 't': llama_sample_temp (ctx_main, &cur_p, temp); break; + default : break; + } + } +} + llama_token llama_sampling_sample( struct llama_sampling_context * ctx_sampling, struct llama_context * ctx_main, @@ -109,11 +157,6 @@ llama_token llama_sampling_sample( const int n_vocab = llama_n_vocab(llama_get_model(ctx_main)); const float temp = params.temp; - const int32_t top_k = params.top_k <= 0 ? n_vocab : params.top_k; - const float top_p = params.top_p; - const float min_p = params.min_p; - const float tfs_z = params.tfs_z; - const float typical_p = params.typical_p; const int32_t penalty_last_n = params.penalty_last_n < 0 ? params.n_prev : params.penalty_last_n; const float penalty_repeat = params.penalty_repeat; const float penalty_freq = params.penalty_freq; @@ -188,12 +231,7 @@ llama_token llama_sampling_sample( // temperature sampling size_t min_keep = std::max(1, params.n_probs); - llama_sample_top_k (ctx_main, &cur_p, top_k, min_keep); - llama_sample_tail_free(ctx_main, &cur_p, tfs_z, min_keep); - llama_sample_typical (ctx_main, &cur_p, typical_p, min_keep); - llama_sample_top_p (ctx_main, &cur_p, top_p, min_keep); - llama_sample_min_p (ctx_main, &cur_p, min_p, min_keep); - llama_sample_temp (ctx_main, &cur_p, temp); + sampler_queue(ctx_main, params, cur_p, min_keep); id = llama_sample_token(ctx_main, &cur_p); diff --git a/common/sampling.h b/common/sampling.h index 7c9b8dcf2..fdfa9eed1 100644 --- a/common/sampling.h +++ b/common/sampling.h @@ -10,22 +10,23 @@ // sampling parameters typedef struct llama_sampling_params { - int32_t n_prev = 64; // number of previous tokens to remember - int32_t n_probs = 0; // if greater than 0, output the probabilities of top n_probs tokens. - int32_t top_k = 40; // <= 0 to use vocab size - float top_p = 0.95f; // 1.0 = disabled - float min_p = 0.05f; // 0.0 = disabled - float tfs_z = 1.00f; // 1.0 = disabled - float typical_p = 1.00f; // 1.0 = disabled - float temp = 0.80f; // 1.0 = disabled - int32_t penalty_last_n = 64; // last n tokens to penalize (0 = disable penalty, -1 = context size) - float penalty_repeat = 1.10f; // 1.0 = disabled - float penalty_freq = 0.00f; // 0.0 = disabled - float penalty_present = 0.00f; // 0.0 = disabled - int32_t mirostat = 0; // 0 = disabled, 1 = mirostat, 2 = mirostat 2.0 - float mirostat_tau = 5.00f; // target entropy - float mirostat_eta = 0.10f; // learning rate - bool penalize_nl = true; // consider newlines as a repeatable token + int32_t n_prev = 64; // number of previous tokens to remember + int32_t n_probs = 0; // if greater than 0, output the probabilities of top n_probs tokens. + int32_t top_k = 40; // <= 0 to use vocab size + float top_p = 0.95f; // 1.0 = disabled + float min_p = 0.05f; // 0.0 = disabled + float tfs_z = 1.00f; // 1.0 = disabled + float typical_p = 1.00f; // 1.0 = disabled + float temp = 0.80f; // 1.0 = disabled + int32_t penalty_last_n = 64; // last n tokens to penalize (0 = disable penalty, -1 = context size) + float penalty_repeat = 1.10f; // 1.0 = disabled + float penalty_freq = 0.00f; // 0.0 = disabled + float penalty_present = 0.00f; // 0.0 = disabled + int32_t mirostat = 0; // 0 = disabled, 1 = mirostat, 2 = mirostat 2.0 + float mirostat_tau = 5.00f; // target entropy + float mirostat_eta = 0.10f; // learning rate + bool penalize_nl = true; // consider newlines as a repeatable token + std::string samplers_sequence = "kfypmt"; // top_k, tail_free, typical_p, top_p, min_p, temp std::string grammar; // optional BNF-like grammar to constrain sampling @@ -80,6 +81,9 @@ std::string llama_sampling_prev_str(llama_sampling_context * ctx_sampling, llama // Print sampling parameters into a string std::string llama_sampling_print(const llama_sampling_params & params); +// Print sampling order into a string +std::string llama_sampling_order_print(const llama_sampling_params & params); + // this is a common sampling function used across the examples for convenience // it can serve as a starting point for implementing your own sampling function // Note: When using multiple sequences, it is the caller's responsibility to call diff --git a/examples/main/main.cpp b/examples/main/main.cpp index c5cdfbf21..c096f110b 100644 --- a/examples/main/main.cpp +++ b/examples/main/main.cpp @@ -437,6 +437,7 @@ int main(int argc, char ** argv) { } } LOG_TEE("sampling: \n%s\n", llama_sampling_print(sparams).c_str()); + LOG_TEE("sampling order: \n%s\n", llama_sampling_order_print(sparams).c_str()); LOG_TEE("generate: n_ctx = %d, n_batch = %d, n_predict = %d, n_keep = %d\n", n_ctx, params.n_batch, params.n_predict, params.n_keep); LOG_TEE("\n\n"); From 5aa365d88fdb8fdd430ef3fc141c7a5fd37c3502 Mon Sep 17 00:00:00 2001 From: Kerfuffle <44031344+KerfuffleV2@users.noreply.github.com> Date: Tue, 5 Dec 2023 10:19:18 -0700 Subject: [PATCH 058/811] llama : allow overriding GGUF metadata when loading model (#4092) * feat: Allow overriding GGUF metadata when loading model * Fix the one time GCC is stricter than clang about something * Step1 * Refactor... basically everything! * Nuke obsolete GetArrayLen struct * simplify std::string specialization * Various cleanups Add informational output when overrides are applied Warn user when an override with the wrong type is specified * Fix broken logic for parsing bool KV overrides Fix issue where overrides didn't apply when key missing in GGUF metadata Resolve merge changes * llama : rearrange model params * Update new GET_KEY call Add note that metadata KV overrides aren't reflected in initial metadata KV info dump --------- Co-authored-by: cebtenzzre Co-authored-by: Georgi Gerganov --- common/common.cpp | 55 +++++++ common/common.h | 2 + llama.cpp | 370 +++++++++++++++++++++++++++++++++++----------- llama.h | 20 +++ 4 files changed, 361 insertions(+), 86 deletions(-) diff --git a/common/common.cpp b/common/common.cpp index 8e6d74d0d..4e823c526 100644 --- a/common/common.cpp +++ b/common/common.cpp @@ -690,6 +690,47 @@ bool gpt_params_parse_ex(int argc, char ** argv, gpt_params & params) { std::istreambuf_iterator(), std::back_inserter(sparams.grammar) ); + } else if (arg == "--override-kv") { + if (++i >= argc) { + invalid_param = true; + break; + } + char * sep = strchr(argv[i], '='); + if (sep == nullptr || sep - argv[i] >= 128) { + fprintf(stderr, "error: Malformed KV override: %s\n", argv[i]); + invalid_param = true; + break; + } + struct llama_model_kv_override kvo; + std::strncpy(kvo.key, argv[i], sep - argv[i]); + kvo.key[sep - argv[i]] = 0; + sep++; + if (strncmp(sep, "int:", 4) == 0) { + sep += 4; + kvo.tag = LLAMA_KV_OVERRIDE_INT; + kvo.int_value = std::atol(sep); + } else if (strncmp(sep, "float:", 6) == 0) { + sep += 6; + kvo.tag = LLAMA_KV_OVERRIDE_FLOAT; + kvo.float_value = std::atof(sep); + } else if (strncmp(sep, "bool:", 5) == 0) { + sep += 5; + kvo.tag = LLAMA_KV_OVERRIDE_BOOL; + if (std::strcmp(sep, "true") == 0) { + kvo.bool_value = true; + } else if (std::strcmp(sep, "false") == 0) { + kvo.bool_value = false; + } else { + fprintf(stderr, "error: Invalid boolean value for KV override: %s\n", argv[i]); + invalid_param = true; + break; + } + } else { + fprintf(stderr, "error: Invalid type for KV override: %s\n", argv[i]); + invalid_param = true; + break; + } + params.kv_overrides.push_back(kvo); #ifndef LOG_DISABLE_LOGS // Parse args for logging parameters } else if ( log_param_single_parse( argv[i] ) ) { @@ -733,6 +774,11 @@ bool gpt_params_parse_ex(int argc, char ** argv, gpt_params & params) { } } + if (!params.kv_overrides.empty()) { + params.kv_overrides.emplace_back(llama_model_kv_override()); + params.kv_overrides.back().key[0] = 0; + } + return true; } @@ -864,6 +910,9 @@ void gpt_print_usage(int /*argc*/, char ** argv, const gpt_params & params) { printf(" draft model for speculative decoding (default: %s)\n", params.model.c_str()); printf(" -ld LOGDIR, --logdir LOGDIR\n"); printf(" path under which to save YAML logs (no logging if unset)\n"); + printf(" --override-kv KEY=TYPE:VALUE\n"); + printf(" advanced option to override model metadata by key. may be specified multiple times.\n"); + printf(" types: int, float, bool. example: --override-kv tokenizer.ggml.add_bos_token=bool:false\n"); printf("\n"); #ifndef LOG_DISABLE_LOGS log_print_usage(); @@ -956,6 +1005,12 @@ struct llama_model_params llama_model_params_from_gpt_params(const gpt_params & mparams.tensor_split = params.tensor_split; mparams.use_mmap = params.use_mmap; mparams.use_mlock = params.use_mlock; + if (params.kv_overrides.empty()) { + mparams.kv_overrides = NULL; + } else { + GGML_ASSERT(params.kv_overrides.back().key[0] == 0 && "KV overrides not terminated with empty key"); + mparams.kv_overrides = params.kv_overrides.data(); + } return mparams; } diff --git a/common/common.h b/common/common.h index 534f7b132..024679380 100644 --- a/common/common.h +++ b/common/common.h @@ -86,6 +86,8 @@ struct gpt_params { std::vector antiprompt; // string upon seeing which more user input is prompted std::string logdir = ""; // directory in which to save YAML log files + std::vector kv_overrides; + // TODO: avoid tuple, use struct std::vector> lora_adapter; // lora adapter path with user defined scale std::string lora_base = ""; // base model path for the lora adapter diff --git a/llama.cpp b/llama.cpp index fd905ade7..b77020e10 100644 --- a/llama.cpp +++ b/llama.cpp @@ -74,6 +74,7 @@ #include #include #include +#include #include #if defined(_MSC_VER) @@ -590,21 +591,6 @@ struct LLM_TN { // gguf helpers // -#define GGUF_GET_KEY(ctx, dst, func, type, req, key) \ -do { \ - const std::string skey(key); \ - const int kid = gguf_find_key(ctx, skey.c_str()); \ - if (kid >= 0) { \ - enum gguf_type ktype = gguf_get_kv_type(ctx, kid); \ - if (ktype != (type)) { \ - throw std::runtime_error(format("key %s has wrong type: %s", skey.c_str(), gguf_type_name(ktype))); \ - } \ - (dst) = func(ctx, kid); \ - } else if (req) { \ - throw std::runtime_error(format("key not found in model: %s", skey.c_str())); \ - } \ -} while (0) - static std::map LLAMA_ROPE_SCALING_TYPES = { { LLAMA_ROPE_SCALING_NONE, "none" }, { LLAMA_ROPE_SCALING_LINEAR, "linear" }, @@ -638,7 +624,7 @@ static std::string gguf_data_to_str(enum gguf_type type, const void * data, int } } -static std::string gguf_kv_to_str(struct gguf_context * ctx_gguf, int i) { +static std::string gguf_kv_to_str(const struct gguf_context * ctx_gguf, int i) { const enum gguf_type type = gguf_get_kv_type(ctx_gguf, i); switch (type) { @@ -1797,6 +1783,169 @@ static std::string llama_format_tensor_shape(const struct ggml_tensor * t) { return buf; } +namespace GGUFMeta { + template + struct GKV_Base_Type { + static constexpr gguf_type gt = gt_; + + static T getter(const gguf_context * ctx, const int kid) { + return gfun(ctx, kid); + } + }; + + template struct GKV_Base; + + template<> struct GKV_Base: GKV_Base_Type {}; + template<> struct GKV_Base: GKV_Base_Type {}; + template<> struct GKV_Base: GKV_Base_Type {}; + template<> struct GKV_Base: GKV_Base_Type {}; + template<> struct GKV_Base: GKV_Base_Type {}; + template<> struct GKV_Base: GKV_Base_Type {}; + template<> struct GKV_Base: GKV_Base_Type {}; + template<> struct GKV_Base: GKV_Base_Type {}; + template<> struct GKV_Base: GKV_Base_Type {}; + template<> struct GKV_Base: GKV_Base_Type {}; + template<> struct GKV_Base: GKV_Base_Type {}; + template<> struct GKV_Base: GKV_Base_Type {}; + + template<> struct GKV_Base { + static constexpr gguf_type gt = GGUF_TYPE_STRING; + + static std::string getter(const gguf_context * ctx, const int kid) { + return gguf_get_val_str(ctx, kid); + } + }; + + struct ArrayInfo{ + const gguf_type gt; + const size_t length; + const void * data; + }; + + template<> struct GKV_Base { + public: + static constexpr gguf_type gt = GGUF_TYPE_ARRAY; + static ArrayInfo getter(const gguf_context *ctx, const int k) { + return ArrayInfo { + gguf_get_arr_type(ctx, k), + size_t(gguf_get_arr_n(ctx, k)), + gguf_get_arr_data(ctx, k), + }; + } + }; + + template + class GKV: public GKV_Base { + GKV() = delete; + + public: + static T get_kv(const gguf_context * ctx, const int k) { + const enum gguf_type kt = gguf_get_kv_type(ctx, k); + + if (kt != GKV::gt) { + throw std::runtime_error(format("key %s has wrong type %s but expected type %s", + gguf_get_key(ctx, k), gguf_type_name(kt), gguf_type_name(GKV::gt))); + } + return GKV::getter(ctx, k); + } + + static const char * override_type_to_str(const llama_model_kv_override_type ty) { + switch (ty) { + case LLAMA_KV_OVERRIDE_BOOL: return "bool"; + case LLAMA_KV_OVERRIDE_INT: return "int"; + case LLAMA_KV_OVERRIDE_FLOAT: return "float"; + } + return "unknown"; + } + + static bool validate_override(const llama_model_kv_override_type expected_type, const struct llama_model_kv_override *override) { + if (!override) { return false; } + if (override->tag == expected_type) { + LLAMA_LOG_INFO("%s: Using metadata override (%5s) '%s' = ", + __func__, override_type_to_str(override->tag), override->key); + switch (override->tag) { + case LLAMA_KV_OVERRIDE_BOOL: { + printf("%s\n", override->bool_value ? "true" : "false"); + } break; + case LLAMA_KV_OVERRIDE_INT: { + printf("%" PRId64 "\n", override->int_value); + } break; + case LLAMA_KV_OVERRIDE_FLOAT: { + printf("%.6f\n", override->float_value); + } break; + default: + // Shouldn't be possible to end up here, but just in case... + throw std::runtime_error( + format("Unsupported attempt to override %s type for metadata key %s\n", + override_type_to_str(override->tag), override->key)); + } + return true; + } + LLAMA_LOG_WARN("%s: Warning: Bad metadata override type for key '%s', expected %s but got %s\n", + __func__, override->key, override_type_to_str(expected_type), override_type_to_str(override->tag)); + return false; + } + + template + static typename std::enable_if::value, bool>::type + try_override(OT & target, const struct llama_model_kv_override *override) { + if (validate_override(LLAMA_KV_OVERRIDE_BOOL, override)) { + target = override->bool_value; + return true; + } + return true; + } + + template + static typename std::enable_if::value && std::is_integral::value, bool>::type + try_override(OT & target, const struct llama_model_kv_override *override) { + if (validate_override(LLAMA_KV_OVERRIDE_INT, override)) { + target = override->int_value; + return true; + } + return false; + } + + template + static typename std::enable_if::value, bool>::type + try_override(T & target, const struct llama_model_kv_override *override) { + if (validate_override(LLAMA_KV_OVERRIDE_FLOAT, override)) { + target = override->float_value; + return true; + } + return false; + } + + template + static typename std::enable_if::value, bool>::type + try_override(T & target, const struct llama_model_kv_override *override) { + (void)target; + (void)override; + if (!override) { return false; } + // Currently, we should never end up here so it would be a bug if we do. + throw std::runtime_error(format("Unsupported attempt to override string type for metadata key %s\n", + override ? override->key : "NULL")); + } + + static bool set(const gguf_context * ctx, const int k, T & target, const struct llama_model_kv_override *override = nullptr) { + if (try_override(target, override)) { + return true; + } + if (k < 0) { return false; } + target = get_kv(ctx, k); + return true; + } + + static bool set(const gguf_context * ctx, const char * key, T & target, const struct llama_model_kv_override *override = nullptr) { + return set(ctx, gguf_find_key(ctx, key), target, override); + } + + static bool set(const gguf_context * ctx, const std::string & key, T & target, const struct llama_model_kv_override *override = nullptr) { + return set(ctx, key.c_str(), target, override); + } + }; +} + struct llama_model_loader { int n_kv = 0; int n_tensors = 0; @@ -1812,21 +1961,34 @@ struct llama_model_loader { llama_fver fver; std::unique_ptr mapping; + std::unordered_map kv_overrides; struct gguf_context * ctx_gguf = NULL; struct ggml_context * ctx_meta = NULL; - llama_model_loader(const std::string & fname, bool use_mmap) : file(fname.c_str(), "rb") { + std::string arch_name; + LLM_KV llm_kv = LLM_KV(LLM_ARCH_UNKNOWN); + + llama_model_loader(const std::string & fname, bool use_mmap, const struct llama_model_kv_override * param_overrides_p) : file(fname.c_str(), "rb") { struct gguf_init_params params = { /*.no_alloc = */ true, /*.ctx = */ &ctx_meta, }; + if (param_overrides_p != nullptr) { + for (const struct llama_model_kv_override *p = param_overrides_p; p->key[0] != 0; p++) { + kv_overrides.insert({std::string(p->key), *p}); + } + } + ctx_gguf = gguf_init_from_file(fname.c_str(), params); if (!ctx_gguf) { throw std::runtime_error(format("%s: failed to load model from %s\n", __func__, fname.c_str())); } + get_key(llm_kv(LLM_KV_GENERAL_ARCHITECTURE), arch_name, false); + llm_kv = LLM_KV(llm_arch_from_string(arch_name)); + n_kv = gguf_get_n_kv(ctx_gguf); n_tensors = gguf_get_n_tensors(ctx_gguf); @@ -1894,6 +2056,7 @@ struct llama_model_loader { } } + LLAMA_LOG_INFO("%s: Dumping metadata keys/values. Note: KV overrides do not apply in this output.\n", __func__); for (int i = 0; i < n_kv; i++) { const char * name = gguf_get_key(ctx_gguf, i); const enum gguf_type type = gguf_get_kv_type(ctx_gguf, i); @@ -1939,19 +2102,59 @@ struct llama_model_loader { } } + template + typename std::enable_if::value, bool>::type + get_arr_n(const std::string & key, T & result, const bool required = true) { + const int kid = gguf_find_key(ctx_gguf, key.c_str()); + + if (kid < 0) { + if (required) { + throw std::runtime_error(format("key not found in model: %s", key.c_str())); + } + return false; + } + + struct GGUFMeta::ArrayInfo arr_info = + GGUFMeta::GKV::get_kv(ctx_gguf, kid); + + + result = arr_info.length; + return true; + } + + template + typename std::enable_if::value, bool>::type + get_arr_n(const enum llm_kv kid, T & result, const bool required = true) { + return get_arr_n(llm_kv(kid), result, required); + } + + template + bool get_key(const std::string & key, T & result, const bool required = true) { + auto it = kv_overrides.find(key); + + const struct llama_model_kv_override * override = + it != kv_overrides.end() ? &it->second : nullptr; + + const bool found = GGUFMeta::GKV::set(ctx_gguf, key, result, override); + + if (required && !found) { + throw std::runtime_error(format("key not found in model: %s", key.c_str())); + } + + return found; + } + + template + bool get_key(const enum llm_kv kid, T & result, const bool required = true) { + return get_key(llm_kv(kid), result, required); + } + std::string get_arch_name() const { - const auto kv = LLM_KV(LLM_ARCH_UNKNOWN); - - std::string arch_name; - GGUF_GET_KEY(ctx_gguf, arch_name, gguf_get_val_str, GGUF_TYPE_STRING, false, kv(LLM_KV_GENERAL_ARCHITECTURE)); - return arch_name; } enum llm_arch get_arch() const { - const std::string arch_name = get_arch_name(); - - return llm_arch_from_string(arch_name); + return llm_kv.arch; } const char * get_tensor_name(int i) const { @@ -2201,11 +2404,8 @@ static void llm_load_arch(llama_model_loader & ml, llama_model & model) { static void llm_load_hparams( llama_model_loader & ml, llama_model & model) { - struct gguf_context * ctx = ml.ctx_gguf; - - const auto kv = LLM_KV(model.arch); - auto & hparams = model.hparams; + const gguf_context * ctx = ml.ctx_gguf; // get metadata as string for (int i = 0; i < gguf_get_n_kv(ctx); i++) { @@ -2219,42 +2419,41 @@ static void llm_load_hparams( } // get general kv - GGUF_GET_KEY(ctx, model.name, gguf_get_val_str, GGUF_TYPE_STRING, false, kv(LLM_KV_GENERAL_NAME)); + ml.get_key(LLM_KV_GENERAL_NAME, model.name, false); // get hparams kv - GGUF_GET_KEY(ctx, hparams.n_vocab, gguf_get_arr_n, GGUF_TYPE_ARRAY, true, kv(LLM_KV_TOKENIZER_LIST)); - GGUF_GET_KEY(ctx, hparams.n_ctx_train, gguf_get_val_u32, GGUF_TYPE_UINT32, true, kv(LLM_KV_CONTEXT_LENGTH)); - GGUF_GET_KEY(ctx, hparams.n_embd, gguf_get_val_u32, GGUF_TYPE_UINT32, true, kv(LLM_KV_EMBEDDING_LENGTH)); - GGUF_GET_KEY(ctx, hparams.n_ff, gguf_get_val_u32, GGUF_TYPE_UINT32, true, kv(LLM_KV_FEED_FORWARD_LENGTH)); - GGUF_GET_KEY(ctx, hparams.n_head, gguf_get_val_u32, GGUF_TYPE_UINT32, true, kv(LLM_KV_ATTENTION_HEAD_COUNT)); - GGUF_GET_KEY(ctx, hparams.n_layer, gguf_get_val_u32, GGUF_TYPE_UINT32, true, kv(LLM_KV_BLOCK_COUNT)); + ml.get_arr_n(LLM_KV_TOKENIZER_LIST, hparams.n_vocab); + ml.get_key (LLM_KV_CONTEXT_LENGTH, hparams.n_ctx_train); + ml.get_key (LLM_KV_EMBEDDING_LENGTH, hparams.n_embd); + ml.get_key (LLM_KV_FEED_FORWARD_LENGTH, hparams.n_ff); + ml.get_key (LLM_KV_ATTENTION_HEAD_COUNT, hparams.n_head); + ml.get_key (LLM_KV_BLOCK_COUNT, hparams.n_layer); // n_head_kv is optional, default to n_head hparams.n_head_kv = hparams.n_head; - GGUF_GET_KEY(ctx, hparams.n_head_kv, gguf_get_val_u32, GGUF_TYPE_UINT32, false, kv(LLM_KV_ATTENTION_HEAD_COUNT_KV)); + ml.get_key(LLM_KV_ATTENTION_HEAD_COUNT_KV, hparams.n_head_kv, false); - hparams.rope_finetuned = false; - GGUF_GET_KEY(ctx, hparams.rope_finetuned, gguf_get_val_bool, GGUF_TYPE_BOOL, false, - kv(LLM_KV_ROPE_SCALING_FINETUNED)); + bool rope_finetuned = false; + ml.get_key(LLM_KV_ROPE_SCALING_FINETUNED, rope_finetuned, false); + hparams.rope_finetuned = rope_finetuned; hparams.n_yarn_orig_ctx = hparams.n_ctx_train; - GGUF_GET_KEY(ctx, hparams.n_yarn_orig_ctx, gguf_get_val_u32, GGUF_TYPE_UINT32, false, - kv(LLM_KV_ROPE_SCALING_ORIG_CTX_LEN)); + ml.get_key(LLM_KV_ROPE_SCALING_ORIG_CTX_LEN, hparams.n_yarn_orig_ctx, false); // rope_freq_base (optional) hparams.rope_freq_base_train = 10000.0f; - GGUF_GET_KEY(ctx, hparams.rope_freq_base_train, gguf_get_val_f32, GGUF_TYPE_FLOAT32, false, kv(LLM_KV_ROPE_FREQ_BASE)); + ml.get_key(LLM_KV_ROPE_FREQ_BASE, hparams.rope_freq_base_train, false); std::string rope_scaling("linear"); - GGUF_GET_KEY(ctx, rope_scaling, gguf_get_val_str, GGUF_TYPE_STRING, false, kv(LLM_KV_ROPE_SCALING_TYPE)); + ml.get_key(LLM_KV_ROPE_SCALING_TYPE, rope_scaling, false); hparams.rope_scaling_type_train = llama_rope_scaling_type_from_string(rope_scaling); GGML_ASSERT(hparams.rope_scaling_type_train != LLAMA_ROPE_SCALING_UNSPECIFIED); // rope_freq_scale (inverse of the kv) is optional float ropescale = 0.0f; - GGUF_GET_KEY(ctx, ropescale, gguf_get_val_f32, GGUF_TYPE_FLOAT32, false, kv(LLM_KV_ROPE_SCALING_FACTOR)); - if (ropescale == 0.0f) { // try the old key name - GGUF_GET_KEY(ctx, ropescale, gguf_get_val_f32, GGUF_TYPE_FLOAT32, false, kv(LLM_KV_ROPE_SCALE_LINEAR)); + if (!ml.get_key(LLM_KV_ROPE_SCALING_FACTOR, ropescale, false)) { + // try the old key name + ml.get_key(LLM_KV_ROPE_SCALE_LINEAR, ropescale, false); } hparams.rope_freq_scale_train = ropescale == 0.0f ? 1.0f : 1.0f/ropescale; @@ -2262,7 +2461,7 @@ static void llm_load_hparams( { hparams.n_rot = hparams.n_embd / hparams.n_head; - GGUF_GET_KEY(ctx, hparams.n_rot, gguf_get_val_u32, GGUF_TYPE_UINT32, false, kv(LLM_KV_ROPE_DIMENSION_COUNT)); + ml.get_key(LLM_KV_ROPE_DIMENSION_COUNT, hparams.n_rot, false); if (model.arch == LLM_ARCH_LLAMA || model.arch == LLM_ARCH_FALCON) { if (hparams.n_rot != hparams.n_embd / hparams.n_head) { @@ -2277,7 +2476,7 @@ static void llm_load_hparams( switch (model.arch) { case LLM_ARCH_LLAMA: { - GGUF_GET_KEY(ctx, hparams.f_norm_rms_eps, gguf_get_val_f32, GGUF_TYPE_FLOAT32, true, kv(LLM_KV_ATTENTION_LAYERNORM_RMS_EPS)); + ml.get_key(LLM_KV_ATTENTION_LAYERNORM_RMS_EPS, hparams.f_norm_rms_eps); switch (hparams.n_layer) { case 26: model.type = e_model::MODEL_3B; break; @@ -2291,7 +2490,7 @@ static void llm_load_hparams( } break; case LLM_ARCH_FALCON: { - GGUF_GET_KEY(ctx, hparams.f_norm_eps, gguf_get_val_f32, GGUF_TYPE_FLOAT32, true, kv(LLM_KV_ATTENTION_LAYERNORM_EPS)); + ml.get_key(LLM_KV_ATTENTION_LAYERNORM_EPS, hparams.f_norm_eps); switch (hparams.n_layer) { case 32: model.type = e_model::MODEL_7B; break; @@ -2301,7 +2500,7 @@ static void llm_load_hparams( } break; case LLM_ARCH_BAICHUAN: { - GGUF_GET_KEY(ctx, hparams.f_norm_rms_eps, gguf_get_val_f32, GGUF_TYPE_FLOAT32, true, kv(LLM_KV_ATTENTION_LAYERNORM_RMS_EPS)); + ml.get_key(LLM_KV_ATTENTION_LAYERNORM_RMS_EPS, hparams.f_norm_rms_eps); switch (hparams.n_layer) { case 32: model.type = e_model::MODEL_7B; break; case 40: model.type = e_model::MODEL_13B; break; @@ -2310,7 +2509,7 @@ static void llm_load_hparams( } break; case LLM_ARCH_STARCODER: { - GGUF_GET_KEY(ctx, hparams.f_norm_eps, gguf_get_val_f32, GGUF_TYPE_FLOAT32, true, kv(LLM_KV_ATTENTION_LAYERNORM_EPS)); + ml.get_key(LLM_KV_ATTENTION_LAYERNORM_EPS, hparams.f_norm_eps); switch (hparams.n_layer) { case 24: model.type = e_model::MODEL_1B; break; case 36: model.type = e_model::MODEL_3B; break; @@ -2321,7 +2520,7 @@ static void llm_load_hparams( } break; case LLM_ARCH_PERSIMMON: { - GGUF_GET_KEY(ctx, hparams.f_norm_eps, gguf_get_val_f32, GGUF_TYPE_FLOAT32, true, kv(LLM_KV_ATTENTION_LAYERNORM_EPS)); + ml.get_key(LLM_KV_ATTENTION_LAYERNORM_EPS, hparams.f_norm_eps); switch (hparams.n_layer) { case 36: model.type = e_model::MODEL_8B; break; default: model.type = e_model::MODEL_UNKNOWN; @@ -2329,7 +2528,7 @@ static void llm_load_hparams( } break; case LLM_ARCH_REFACT: { - GGUF_GET_KEY(ctx, hparams.f_norm_rms_eps, gguf_get_val_f32, GGUF_TYPE_FLOAT32, true, kv(LLM_KV_ATTENTION_LAYERNORM_RMS_EPS)); + ml.get_key(LLM_KV_ATTENTION_LAYERNORM_RMS_EPS, hparams.f_norm_rms_eps); switch (hparams.n_layer) { case 32: model.type = e_model::MODEL_1B; break; default: model.type = e_model::MODEL_UNKNOWN; @@ -2337,7 +2536,7 @@ static void llm_load_hparams( } break; case LLM_ARCH_BLOOM: { - GGUF_GET_KEY(ctx, hparams.f_norm_eps, gguf_get_val_f32, GGUF_TYPE_FLOAT32, true, kv(LLM_KV_ATTENTION_LAYERNORM_EPS)); + ml.get_key(LLM_KV_ATTENTION_LAYERNORM_EPS, hparams.f_norm_eps); switch (hparams.n_layer) { case 24: model.type = e_model::MODEL_1B; break; @@ -2352,9 +2551,9 @@ static void llm_load_hparams( { hparams.f_clamp_kqv = 0.0f; - GGUF_GET_KEY(ctx, hparams.f_norm_eps, gguf_get_val_f32, GGUF_TYPE_FLOAT32, true, kv(LLM_KV_ATTENTION_LAYERNORM_EPS)); - GGUF_GET_KEY(ctx, hparams.f_clamp_kqv, gguf_get_val_f32, GGUF_TYPE_FLOAT32, false, kv(LLM_KV_ATTENTION_CLAMP_KQV)); - GGUF_GET_KEY(ctx, hparams.f_max_alibi_bias, gguf_get_val_f32, GGUF_TYPE_FLOAT32, true, kv(LLM_KV_ATTENTION_MAX_ALIBI_BIAS)); + ml.get_key(LLM_KV_ATTENTION_LAYERNORM_EPS, hparams.f_norm_eps); + ml.get_key(LLM_KV_ATTENTION_CLAMP_KQV, hparams.f_clamp_kqv, false); + ml.get_key(LLM_KV_ATTENTION_MAX_ALIBI_BIAS, hparams.f_max_alibi_bias); switch (hparams.n_layer) { case 32: model.type = e_model::MODEL_7B; break; @@ -2364,7 +2563,7 @@ static void llm_load_hparams( } break; case LLM_ARCH_STABLELM: { - GGUF_GET_KEY(ctx, hparams.f_norm_eps, gguf_get_val_f32, GGUF_TYPE_FLOAT32, true, kv(LLM_KV_ATTENTION_LAYERNORM_EPS)); + ml.get_key(LLM_KV_ATTENTION_LAYERNORM_EPS, hparams.f_norm_eps); switch (hparams.n_layer) { case 32: model.type = e_model::MODEL_3B; break; @@ -2373,7 +2572,8 @@ static void llm_load_hparams( } break; case LLM_ARCH_QWEN: { - GGUF_GET_KEY(ctx, hparams.f_norm_rms_eps, gguf_get_val_f32, GGUF_TYPE_FLOAT32, true, kv(LLM_KV_ATTENTION_LAYERNORM_RMS_EPS)); + ml.get_key(LLM_KV_ATTENTION_LAYERNORM_RMS_EPS, hparams.f_norm_rms_eps); + switch (hparams.n_layer) { case 32: model.type = e_model::MODEL_7B; break; case 40: model.type = e_model::MODEL_13B; break; @@ -2421,7 +2621,7 @@ static void llm_load_vocab( { std::string tokenizer_name; - GGUF_GET_KEY(ctx, tokenizer_name, gguf_get_val_str, GGUF_TYPE_STRING, true, kv(LLM_KV_TOKENIZER_MODEL)); + ml.get_key(LLM_KV_TOKENIZER_MODEL, tokenizer_name); if (tokenizer_name == "llama") { vocab.type = LLAMA_VOCAB_TYPE_SPM; @@ -2511,34 +2711,31 @@ static void llm_load_vocab( }; for (const auto & it : special_token_types) { const std::string & key = kv(std::get<0>(it)); - int32_t & id = std::get<1>(it), old_id = id; + int32_t & id = std::get<1>(it); - GGUF_GET_KEY(ctx, id, gguf_get_val_u32, GGUF_TYPE_UINT32, false, key); - // Must be >= -1 and < vocab size. Since the key is unsigned, -1 - // can only come from the default value, so there's no point in - // validating that. - if (size_t(id + 1) > vocab.id_to_token.size()) { - LLAMA_LOG_WARN("%s: bad special token: '%s' = %d, using default id %d\n", - __func__, key.c_str(), id, old_id); - id = old_id; + uint32_t new_id; + if (!ml.get_key(std::get<0>(it), new_id, false)) { + continue; + } + if (new_id >= vocab.id_to_token.size()) { + LLAMA_LOG_WARN("%s: bad special token: '%s' = %ud, using default id %d\n", + __func__, key.c_str(), new_id, id); + } else { + id = new_id; } } // Handle add_bos_token and add_eos_token - std::string key = kv(LLM_KV_TOKENIZER_ADD_BOS); - int kid = gguf_find_key(ctx, key.c_str()); - enum gguf_type ktype = kid < 0 ? GGUF_TYPE_COUNT : gguf_get_kv_type(ctx, kid); - vocab.special_add_bos = ktype == GGUF_TYPE_BOOL ? gguf_get_val_bool(ctx, kid) : -1; - if (ktype != GGUF_TYPE_BOOL && ktype != GGUF_TYPE_COUNT) { - LLAMA_LOG_WARN("%s: bad field type %d for '%s' - ignoring\n", __func__, ktype, key.c_str()); - } - key = kv(LLM_KV_TOKENIZER_ADD_EOS); - kid = gguf_find_key(ctx, key.c_str()); - ktype = kid < 0 ? GGUF_TYPE_COUNT : gguf_get_kv_type(ctx, kid); - vocab.special_add_eos = ktype == GGUF_TYPE_BOOL ? gguf_get_val_bool(ctx, kid) : -1; - if (ktype != GGUF_TYPE_BOOL && ktype != GGUF_TYPE_COUNT) { - LLAMA_LOG_WARN("%s: bad field type %d for '%s' - ignoring\n", __func__, ktype, key.c_str()); + { + bool temp = true; + + if (ml.get_key(LLM_KV_TOKENIZER_ADD_BOS, temp, false)) { + vocab.special_add_bos = int(temp); + } + if (ml.get_key(LLM_KV_TOKENIZER_ADD_EOS, temp, false)) { + vocab.special_add_eos = int(temp); + } } } @@ -3487,7 +3684,7 @@ static void llm_load_tensors( static bool llama_model_load(const std::string & fname, llama_model & model, const llama_model_params & params) { try { - llama_model_loader ml(fname, params.use_mmap); + llama_model_loader ml(fname, params.use_mmap, params.kv_overrides); model.hparams.vocab_only = params.vocab_only; @@ -8078,7 +8275,7 @@ static void llama_model_quantize_internal(const std::string & fname_inp, const s constexpr bool use_mmap = false; #endif - llama_model_loader ml(fname_inp, use_mmap); + llama_model_loader ml(fname_inp, use_mmap, NULL); if (ml.use_mmap) { ml.mapping.reset(new llama_mmap(&ml.file, /* prefetch */ 0, ggml_is_numa())); } @@ -8374,7 +8571,7 @@ static int llama_apply_lora_from_file_internal( std::vector base_buf; if (path_base_model) { LLAMA_LOG_INFO("%s: loading base model from '%s'\n", __func__, path_base_model); - ml.reset(new llama_model_loader(path_base_model, /*use_mmap*/ true)); + ml.reset(new llama_model_loader(path_base_model, /*use_mmap*/ true, /*kv_overrides*/ NULL)); size_t ctx_size; size_t mmapped_size; @@ -8602,6 +8799,7 @@ struct llama_model_params llama_model_default_params() { /*.tensor_split =*/ nullptr, /*.progress_callback =*/ nullptr, /*.progress_callback_user_data =*/ nullptr, + /*.kv_overrides =*/ nullptr, /*.vocab_only =*/ false, /*.use_mmap =*/ true, /*.use_mlock =*/ false, diff --git a/llama.h b/llama.h index 89cb6198e..517245a35 100644 --- a/llama.h +++ b/llama.h @@ -158,6 +158,22 @@ extern "C" { llama_seq_id all_seq_id; // used if seq_id == NULL } llama_batch; + enum llama_model_kv_override_type { + LLAMA_KV_OVERRIDE_INT, + LLAMA_KV_OVERRIDE_FLOAT, + LLAMA_KV_OVERRIDE_BOOL, + }; + + struct llama_model_kv_override { + char key[128]; + enum llama_model_kv_override_type tag; + union { + int64_t int_value; + double float_value; + bool bool_value; + }; + }; + struct llama_model_params { int32_t n_gpu_layers; // number of layers to store in VRAM int32_t main_gpu; // the GPU that is used for scratch and small tensors @@ -165,9 +181,13 @@ extern "C" { // called with a progress value between 0 and 1, pass NULL to disable llama_progress_callback progress_callback; + // context pointer passed to the progress callback void * progress_callback_user_data; + // override key-value pairs of the model meta data + const struct llama_model_kv_override * kv_overrides; + // Keep the booleans together to avoid misalignment during copy-by-value. bool vocab_only; // only load the vocabulary, no weights bool use_mmap; // use mmap if possible From 5f6e0c0dff1e7a89331e6b25eca9a9fd71324069 Mon Sep 17 00:00:00 2001 From: Marcus Dunn <51931484+MarcusDunn@users.noreply.github.com> Date: Tue, 5 Dec 2023 10:55:12 -1000 Subject: [PATCH 059/811] grammar : pre-computed pieces + reserve mem + less string copies (#4330) * reserve space for codepoints * improvement for the appended 0 * used precomputed token text for grammar sample * reserve canidates_decoded * reserve canidates_grammar * remove candidates_decoded * Revert "remove candidates_decoded" This reverts commit 3773328080e6a139ee83198329a13cf4ff61d707. * changed decode_utf8 to take src by ref --- llama.cpp | 20 +++++++------------- 1 file changed, 7 insertions(+), 13 deletions(-) diff --git a/llama.cpp b/llama.cpp index b77020e10..14e5d312e 100644 --- a/llama.cpp +++ b/llama.cpp @@ -6851,14 +6851,13 @@ struct llama_grammar_candidate { // Decodes a UTF-8 string which may end in an incomplete sequence. Adds a terminating 0 for use as // pointer. If an invalid sequence is encountered, returns `llama_partial_utf8.n_remain == -1`. static std::pair, llama_partial_utf8> decode_utf8( - const char * src, - size_t n_src, + const std::string & src, llama_partial_utf8 partial_start) { static const int lookup[] = { 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 2, 2, 3, 4 }; - const char * pos = src; + const char * pos = src.c_str(); std::vector code_points; // common english strings have the same number of codepoints and bytes. `+ 1` for the terminating 0. - code_points.reserve(n_src + 1); + code_points.reserve(src.size() + 1); uint32_t value = partial_start.value; int n_remain = partial_start.n_remain; @@ -6909,13 +6908,6 @@ static std::pair, llama_partial_utf8> decode_utf8( return std::make_pair(std::move(code_points), llama_partial_utf8{ value, n_remain }); } -static std::pair, llama_partial_utf8> decode_utf8( - std::string src, - llama_partial_utf8 partial_start -) { - return decode_utf8(src.c_str(), src.size(), partial_start); -} - // returns true iff pos points to the end of one of the definitions of a rule static bool llama_grammar_is_end_of_sequence(const llama_grammar_element * pos) { switch (pos->type) { @@ -7554,11 +7546,13 @@ void llama_sample_grammar(struct llama_context * ctx, llama_token_data_array * c const llama_token eos = llama_token_eos(&ctx->model); std::vector, llama_partial_utf8>> candidates_decoded; + candidates_decoded.reserve(candidates->size); std::vector candidates_grammar; + candidates_grammar.reserve(candidates->size); for (size_t i = 0; i < candidates->size; ++i) { const llama_token id = candidates->data[i].id; - const std::string piece = llama_token_to_piece(ctx, id); + const std::string & piece = ctx->model.vocab.id_to_token[id].text; if (id == eos) { if (!allow_eos) { candidates->data[i].logit = -INFINITY; @@ -7770,7 +7764,7 @@ void llama_grammar_accept_token(struct llama_context * ctx, struct llama_grammar GGML_ASSERT(false); } - const std::string piece = llama_token_to_piece(ctx, token); + const std::string & piece = ctx->model.vocab.id_to_token[token].text; // Note terminating 0 in decoded string const auto decoded = decode_utf8(piece, grammar->partial_utf8); From da5eaef1f34d0a1f584cd4a092e7691ea46a9d91 Mon Sep 17 00:00:00 2001 From: stduhpf Date: Wed, 6 Dec 2023 09:08:17 +0100 Subject: [PATCH 060/811] speculative : support `--color` (#4343) * speculative: add some colors * minor : add braces --------- Co-authored-by: Georgi Gerganov --- examples/speculative/speculative.cpp | 15 ++++++++++++--- 1 file changed, 12 insertions(+), 3 deletions(-) diff --git a/examples/speculative/speculative.cpp b/examples/speculative/speculative.cpp index ace755c51..dca3f84a5 100644 --- a/examples/speculative/speculative.cpp +++ b/examples/speculative/speculative.cpp @@ -203,8 +203,9 @@ int main(int argc, char ** argv) { const std::string token_str = llama_token_to_piece(ctx_tgt, id); - printf("%s", token_str.c_str()); - fflush(stdout); + if (!params.use_color) { + printf("%s", token_str.c_str()); + } if (id == llama_token_eos(model_tgt)) { has_eos = true; @@ -236,10 +237,18 @@ int main(int argc, char ** argv) { ++n_past_tgt; ++n_past_dft; ++i_dft; - + if (params.use_color) { + // Color token according to its origin sequence + printf("\u001b[%dm%s\u001b[37m", (36 - s_keep % 6), token_str.c_str()); + fflush(stdout); + } continue; } } + if (params.use_color) { + printf("%s", token_str.c_str()); + } + fflush(stdout); LOG("the sampled target token (%d, '%s') did not match, or we ran out of drafted tokens\n", id, token_str.c_str()); From caa9249217c5fd524b900add5ddcbeaa20cbcb12 Mon Sep 17 00:00:00 2001 From: Georgi Gerganov Date: Wed, 6 Dec 2023 10:41:03 +0200 Subject: [PATCH 061/811] common : fix compile warning --- common/sampling.cpp | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/common/sampling.cpp b/common/sampling.cpp index b6bb886c6..f4e76df31 100644 --- a/common/sampling.cpp +++ b/common/sampling.cpp @@ -113,13 +113,15 @@ std::string llama_sampling_order_print(const llama_sampling_params & params) { default : break; } } - } else result += "-> mirostat "; + } else { + result += "-> mirostat "; + } return result; } // no reasons to expose this function in header -void sampler_queue( +static void sampler_queue( struct llama_context * ctx_main, const llama_sampling_params & params, llama_token_data_array & cur_p, From 05cd6e5036d72d0930de4d8f6be7bce09e8dda24 Mon Sep 17 00:00:00 2001 From: Georgi Gerganov Date: Wed, 6 Dec 2023 20:21:59 +0200 Subject: [PATCH 062/811] server : recognize cache_prompt parameter in OAI API (#4347) --- examples/server/server.cpp | 1 + 1 file changed, 1 insertion(+) diff --git a/examples/server/server.cpp b/examples/server/server.cpp index 911f7bbe1..369f81a84 100644 --- a/examples/server/server.cpp +++ b/examples/server/server.cpp @@ -2387,6 +2387,7 @@ json oaicompat_completion_params_parse( // Map OpenAI parameters to llama.cpp parameters llama_params["prompt"] = format_chatml(body["messages"]); // OpenAI 'messages' to llama.cpp 'prompt' + llama_params["cache_prompt"] = json_value(body, "cache_prompt", false); llama_params["temperature"] = json_value(body, "temperature", 0.8); llama_params["top_k"] = json_value(body, "top_k", 40); llama_params["top_p"] = json_value(body, "top_p", 0.95); From 81bc9214a389362010f7a57f4cbc30e5f83a2d28 Mon Sep 17 00:00:00 2001 From: Hongyu Ouyang <96765450+casavaca@users.noreply.github.com> Date: Thu, 7 Dec 2023 02:25:22 -0800 Subject: [PATCH 063/811] train : fix #4227 (double free in examples/train-text-from-scratch/train-text-from-scratch.cpp) (#4351) On commit b1108 (44c117f4) xaedes added ggml_allocr * alloc = NULL; ... (many lines in between) if (alloc) { ggml_allocr_free(alloc); } Which is correct, but it's easy to lose context after many lines in between. On commit b1287 (0e76a899) xaedes made a big change. From here on, alloc is freed eagerly. alloc = ggml_allocr_new(...) ... (short lines of code) ggml_allocr_free(alloc) This happens a few times, but alloc is never set to NULL, and many lines below, we still have if (alloc) { ggml_allocr_free(alloc); } which causes a double-free. --- examples/train-text-from-scratch/train-text-from-scratch.cpp | 4 ---- 1 file changed, 4 deletions(-) diff --git a/examples/train-text-from-scratch/train-text-from-scratch.cpp b/examples/train-text-from-scratch/train-text-from-scratch.cpp index f049a3923..f7ed63365 100644 --- a/examples/train-text-from-scratch/train-text-from-scratch.cpp +++ b/examples/train-text-from-scratch/train-text-from-scratch.cpp @@ -1295,10 +1295,6 @@ int main(int argc, char ** argv) { opt_cb_data.last_save_iter = opt->iter; } - if (alloc) { - ggml_allocr_free(alloc); - } - ggml_free(opt->ctx); free_train_state(train); ggml_free(model.ctx); From bcc0eb4591bec5ec02fad3f2bdcb1b265052ea56 Mon Sep 17 00:00:00 2001 From: Georgi Gerganov Date: Thu, 7 Dec 2023 13:03:17 +0200 Subject: [PATCH 064/811] llama : per-layer KV cache + quantum K cache (#4309) * per-layer KV * remove unnecessary copies * less code duplication, offload k and v separately * llama : offload KV cache per-layer * llama : offload K shift tensors * llama : offload for rest of the model arches * llama : enable offload debug temporarily * llama : keep the KV related layers on the device * llama : remove mirrors, perform Device -> Host when partial offload * common : add command-line arg to disable KV cache offloading * llama : update session save/load * llama : support quantum K cache (#4312) * llama : support quantum K cache (wip) * metal : add F32 -> Q8_0 copy kernel * cuda : add F32 -> Q8_0 copy kernel ggml-ci * cuda : use mmv kernel for quantum cache ops * llama : pass KV cache type through API * llama : fix build ggml-ci * metal : add F32 -> Q4_0 copy kernel * metal : add F32 -> Q4_1 copy kernel * cuda : wip * cuda : add F32 -> Q4_0 and F32 -> Q4_1 copy kernels * llama-bench : support type_k/type_v * metal : use mm kernel only for quantum KV cache * cuda : add comment * llama : remove memory_f16 and kv_f16 flags --------- Co-authored-by: slaren * readme : add API change notice --------- Co-authored-by: slaren --- README.md | 1 + common/common.cpp | 45 ++- common/common.h | 7 +- examples/llama-bench/llama-bench.cpp | 113 +++++- examples/quantize-stats/quantize-stats.cpp | 1 - examples/server/server.cpp | 4 - ggml-cuda.cu | 186 ++++++++- ggml-metal.m | 32 +- ggml-metal.metal | 192 +++++++++ llama.cpp | 440 ++++++++++----------- llama.h | 13 +- 11 files changed, 747 insertions(+), 287 deletions(-) diff --git a/README.md b/README.md index dac971ae5..ce026b8d1 100644 --- a/README.md +++ b/README.md @@ -10,6 +10,7 @@ Inference of [LLaMA](https://arxiv.org/abs/2302.13971) model in pure C/C++ ### Hot topics +- **llama.h API change for handling KV cache offloading and data type: https://github.com/ggerganov/llama.cpp/pull/4309** - Using `llama.cpp` with AWS instances: https://github.com/ggerganov/llama.cpp/discussions/4225 - Looking for contributions to improve and maintain the `server` example: https://github.com/ggerganov/llama.cpp/issues/4216 - Collecting Apple Silicon performance stats: https://github.com/ggerganov/llama.cpp/discussions/4167 diff --git a/common/common.cpp b/common/common.cpp index 4e823c526..4a61ae593 100644 --- a/common/common.cpp +++ b/common/common.cpp @@ -278,8 +278,6 @@ bool gpt_params_parse_ex(int argc, char ** argv, gpt_params & params) { break; } params.yarn_beta_slow = std::stof(argv[i]); - } else if (arg == "--memory-f32") { - params.memory_f16 = false; } else if (arg == "--samplers") { if (++i >= argc) { invalid_param = true; @@ -510,6 +508,12 @@ bool gpt_params_parse_ex(int argc, char ** argv, gpt_params & params) { params.infill = true; } else if (arg == "-dkvc" || arg == "--dump-kv-cache") { params.dump_kv_cache = true; + } else if (arg == "-nkvo" || arg == "--no-kv-offload") { + params.no_kv_offload = true; + } else if (arg == "-ctk" || arg == "--cache-type-k") { + params.cache_type_k = argv[++i]; + } else if (arg == "-ctv" || arg == "--cache-type-v") { + params.cache_type_v = argv[++i]; } else if (arg == "--multiline-input") { params.multiline_input = true; } else if (arg == "--simple-io") { @@ -858,8 +862,6 @@ void gpt_print_usage(int /*argc*/, char ** argv, const gpt_params & params) { printf(" --yarn-beta-fast N YaRN: low correction dim or beta (default: %.1f)\n", params.yarn_beta_fast); printf(" --ignore-eos ignore end of stream token and continue generating (implies --logit-bias 2-inf)\n"); printf(" --no-penalize-nl do not penalize newline token\n"); - printf(" --memory-f32 use f32 instead of f16 for memory key+value (default: disabled)\n"); - printf(" not recommended: doubles context memory required and no measurable increase in quality\n"); printf(" --temp N temperature (default: %.1f)\n", (double)sparams.temp); printf(" --logits-all return logits for all tokens in the batch (default: disabled)\n"); printf(" --hellaswag compute HellaSwag score over random tasks from datafile supplied with -f\n"); @@ -900,6 +902,12 @@ void gpt_print_usage(int /*argc*/, char ** argv, const gpt_params & params) { printf(" --verbose-prompt print prompt before generation\n"); printf(" -dkvc, --dump-kv-cache\n"); printf(" verbose print of the KV cache\n"); + printf(" -nkvo, --no-kv-offload\n"); + printf(" disable KV offload\n"); + printf(" -ctk TYPE, --cache-type-k TYPE\n"); + printf(" KV cache data type for K (default: %s)\n", params.cache_type_k.c_str()); + printf(" -ctv TYPE, --cache-type-v TYPE\n"); + printf(" KV cache data type for V (default: %s)\n", params.cache_type_v.c_str()); printf(" --simple-io use basic IO for better compatibility in subprocesses and limited consoles\n"); printf(" --lora FNAME apply LoRA adapter (implies --no-mmap)\n"); printf(" --lora-scaled FNAME S apply LoRA adapter with user defined scaling S (implies --no-mmap)\n"); @@ -1015,6 +1023,29 @@ struct llama_model_params llama_model_params_from_gpt_params(const gpt_params & return mparams; } +static ggml_type kv_cache_type_from_str(const std::string & s) { + if (s == "f16") { + return GGML_TYPE_F16; + } + if (s == "q8_0") { + return GGML_TYPE_Q8_0; + } + if (s == "q4_0") { + return GGML_TYPE_Q4_0; + } + if (s == "q4_1") { + return GGML_TYPE_Q4_1; + } + if (s == "q5_0") { + return GGML_TYPE_Q5_0; + } + if (s == "q5_1") { + return GGML_TYPE_Q5_1; + } + + throw std::runtime_error("Invalid cache type: " + s); +} + struct llama_context_params llama_context_params_from_gpt_params(const gpt_params & params) { auto cparams = llama_context_default_params(); @@ -1024,7 +1055,6 @@ struct llama_context_params llama_context_params_from_gpt_params(const gpt_param cparams.n_threads_batch = params.n_threads_batch == -1 ? params.n_threads : params.n_threads_batch; cparams.mul_mat_q = params.mul_mat_q; cparams.seed = params.seed; - cparams.f16_kv = params.memory_f16; cparams.logits_all = params.logits_all; cparams.embedding = params.embedding; cparams.rope_scaling_type = params.rope_scaling_type; @@ -1035,6 +1065,10 @@ struct llama_context_params llama_context_params_from_gpt_params(const gpt_param cparams.yarn_beta_fast = params.yarn_beta_fast; cparams.yarn_beta_slow = params.yarn_beta_slow; cparams.yarn_orig_ctx = params.yarn_orig_ctx; + cparams.offload_kqv = !params.no_kv_offload; + + cparams.type_k = kv_cache_type_from_str(params.cache_type_k); + cparams.type_v = kv_cache_type_from_str(params.cache_type_v); return cparams; } @@ -1447,7 +1481,6 @@ void dump_non_result_info_yaml(FILE * stream, const gpt_params & params, const l } fprintf(stream, "lora_base: %s\n", params.lora_base.c_str()); fprintf(stream, "main_gpu: %d # default: 0\n", params.main_gpu); - fprintf(stream, "memory_f32: %s # default: false\n", !params.memory_f16 ? "true" : "false"); fprintf(stream, "mirostat: %d # default: 0 (disabled)\n", sparams.mirostat); fprintf(stream, "mirostat_ent: %f # default: 5.0\n", sparams.mirostat_tau); fprintf(stream, "mirostat_lr: %f # default: 0.1\n", sparams.mirostat_eta); diff --git a/common/common.h b/common/common.h index 024679380..e87ce1133 100644 --- a/common/common.h +++ b/common/common.h @@ -100,7 +100,6 @@ struct gpt_params { size_t hellaswag_tasks = 400; // number of tasks to use when computing the HellaSwag score bool mul_mat_q = true; // if true, use mul_mat_q kernels instead of cuBLAS - bool memory_f16 = true; // use f16 instead of f32 for memory kv bool random_prompt = false; // do not randomize prompt if none provided bool use_color = false; // use color to distinguish generations and inputs bool interactive = false; // interactive mode @@ -125,10 +124,14 @@ struct gpt_params { bool verbose_prompt = false; // print prompt tokens before generation bool infill = false; // use infill mode bool dump_kv_cache = false; // dump the KV cache contents for debugging purposes + bool no_kv_offload = false; // disable KV offloading + + std::string cache_type_k = "f16"; // KV cache data type for the K + std::string cache_type_v = "f16"; // KV cache data type for the V // multimodal models (see examples/llava) std::string mmproj = ""; // path to multimodal projector - std::string image = ""; // path to an image file + std::string image = ""; // path to an image file }; bool gpt_params_parse_ex(int argc, char ** argv, gpt_params & params); diff --git a/examples/llama-bench/llama-bench.cpp b/examples/llama-bench/llama-bench.cpp index 9bd82d565..6617c050d 100644 --- a/examples/llama-bench/llama-bench.cpp +++ b/examples/llama-bench/llama-bench.cpp @@ -53,6 +53,13 @@ static std::vector split(const std::string & str, char delim) { return values; } +template +static std::vector transform_to_str(const std::vector & values, F f) { + std::vector str_values; + std::transform(values.begin(), values.end(), std::back_inserter(str_values), f); + return str_values; +} + template static T avg(const std::vector & v) { if (v.empty()) { @@ -126,7 +133,8 @@ struct cmd_params { std::vector n_prompt; std::vector n_gen; std::vector n_batch; - std::vector f32_kv; + std::vector type_k; + std::vector type_v; std::vector n_threads; std::vector n_gpu_layers; std::vector main_gpu; @@ -142,7 +150,8 @@ static const cmd_params cmd_params_defaults = { /* n_prompt */ {512}, /* n_gen */ {128}, /* n_batch */ {512}, - /* f32_kv */ {false}, + /* type_k */ {GGML_TYPE_F16}, + /* type_v */ {GGML_TYPE_F16}, /* n_threads */ {get_num_physical_cores()}, /* n_gpu_layers */ {99}, /* main_gpu */ {0}, @@ -162,7 +171,8 @@ static void print_usage(int /* argc */, char ** argv) { printf(" -p, --n-prompt (default: %s)\n", join(cmd_params_defaults.n_prompt, ",").c_str()); printf(" -n, --n-gen (default: %s)\n", join(cmd_params_defaults.n_gen, ",").c_str()); printf(" -b, --batch-size (default: %s)\n", join(cmd_params_defaults.n_batch, ",").c_str()); - printf(" --memory-f32 <0|1> (default: %s)\n", join(cmd_params_defaults.f32_kv, ",").c_str()); + printf(" -ctk , --cache-type-k (default: %s)\n", join(transform_to_str(cmd_params_defaults.type_k, ggml_type_name), ",").c_str()); + printf(" -ctv , --cache-type-v (default: %s)\n", join(transform_to_str(cmd_params_defaults.type_v, ggml_type_name), ",").c_str()); printf(" -t, --threads (default: %s)\n", join(cmd_params_defaults.n_threads, ",").c_str()); printf(" -ngl, --n-gpu-layers (default: %s)\n", join(cmd_params_defaults.n_gpu_layers, ",").c_str()); printf(" -mg, --main-gpu (default: %s)\n", join(cmd_params_defaults.main_gpu, ",").c_str()); @@ -173,9 +183,32 @@ static void print_usage(int /* argc */, char ** argv) { printf(" -v, --verbose (default: %s)\n", cmd_params_defaults.verbose ? "1" : "0"); printf("\n"); printf("Multiple values can be given for each parameter by separating them with ',' or by specifying the parameter multiple times.\n"); - } +static ggml_type ggml_type_from_name(const std::string & s) { + if (s == "f16") { + return GGML_TYPE_F16; + } + if (s == "q8_0") { + return GGML_TYPE_Q8_0; + } + if (s == "q4_0") { + return GGML_TYPE_Q4_0; + } + if (s == "q4_1") { + return GGML_TYPE_Q4_1; + } + if (s == "q5_0") { + return GGML_TYPE_Q5_0; + } + if (s == "q5_1") { + return GGML_TYPE_Q5_1; + } + + return GGML_TYPE_COUNT; +} + + static cmd_params parse_cmd_params(int argc, char ** argv) { cmd_params params; std::string arg; @@ -224,13 +257,38 @@ static cmd_params parse_cmd_params(int argc, char ** argv) { } auto p = split(argv[i], split_delim); params.n_batch.insert(params.n_batch.end(), p.begin(), p.end()); - } else if (arg == "--memory-f32") { + } else if (arg == "-ctk" || arg == "--cache-type-k") { if (++i >= argc) { invalid_param = true; break; } - auto p = split(argv[i], split_delim); - params.f32_kv.insert(params.f32_kv.end(), p.begin(), p.end()); + auto p = split(argv[i], split_delim); + std::vector types; + for (const auto & t : p) { + ggml_type gt = ggml_type_from_name(t); + if (gt == GGML_TYPE_COUNT) { + invalid_param = true; + break; + } + types.push_back(gt); + } + params.type_k.insert(params.type_k.end(), types.begin(), types.end()); + } else if (arg == "-ctv" || arg == "--cache-type-v") { + if (++i >= argc) { + invalid_param = true; + break; + } + auto p = split(argv[i], split_delim); + std::vector types; + for (const auto & t : p) { + ggml_type gt = ggml_type_from_name(t); + if (gt == GGML_TYPE_COUNT) { + invalid_param = true; + break; + } + types.push_back(gt); + } + params.type_v.insert(params.type_v.end(), types.begin(), types.end()); } else if (arg == "-t" || arg == "--threads") { if (++i >= argc) { invalid_param = true; @@ -321,7 +379,8 @@ static cmd_params parse_cmd_params(int argc, char ** argv) { if (params.n_prompt.empty()) { params.n_prompt = cmd_params_defaults.n_prompt; } if (params.n_gen.empty()) { params.n_gen = cmd_params_defaults.n_gen; } if (params.n_batch.empty()) { params.n_batch = cmd_params_defaults.n_batch; } - if (params.f32_kv.empty()) { params.f32_kv = cmd_params_defaults.f32_kv; } + if (params.type_k.empty()) { params.type_k = cmd_params_defaults.type_k; } + if (params.type_v.empty()) { params.type_v = cmd_params_defaults.type_v; } if (params.n_gpu_layers.empty()) { params.n_gpu_layers = cmd_params_defaults.n_gpu_layers; } if (params.main_gpu.empty()) { params.main_gpu = cmd_params_defaults.main_gpu; } if (params.mul_mat_q.empty()) { params.mul_mat_q = cmd_params_defaults.mul_mat_q; } @@ -336,7 +395,8 @@ struct cmd_params_instance { int n_prompt; int n_gen; int n_batch; - bool f32_kv; + ggml_type type_k; + ggml_type type_v; int n_threads; int n_gpu_layers; int main_gpu; @@ -365,7 +425,8 @@ struct cmd_params_instance { cparams.n_ctx = n_prompt + n_gen; cparams.n_batch = n_batch; - cparams.f16_kv = !f32_kv; + cparams.type_k = type_k; + cparams.type_v = type_v; cparams.mul_mat_q = mul_mat_q; return cparams; @@ -380,7 +441,8 @@ static std::vector get_cmd_params_instances_int(const cmd_p for (const auto & mg : params.main_gpu) for (const auto & ts : params.tensor_split) for (const auto & nb : params.n_batch) - for (const auto & fk : params.f32_kv) + for (const auto & tk : params.type_k) + for (const auto & tv : params.type_v) for (const auto & mmq : params.mul_mat_q) for (const auto & nt : params.n_threads) { cmd_params_instance instance = { @@ -388,7 +450,8 @@ static std::vector get_cmd_params_instances_int(const cmd_p /* .n_prompt = */ n_prompt, /* .n_gen = */ n_gen, /* .n_batch = */ nb, - /* .f32_kv = */ fk, + /* .type_k = */ tk, + /* .type_v = */ tv, /* .n_threads = */ nt, /* .n_gpu_layers = */ nl, /* .main_gpu = */ mg, @@ -410,7 +473,8 @@ static std::vector get_cmd_params_instances(const cmd_param for (const auto & mg : params.main_gpu) for (const auto & ts : params.tensor_split) for (const auto & nb : params.n_batch) - for (const auto & fk : params.f32_kv) + for (const auto & tk : params.type_k) + for (const auto & tv : params.type_v) for (const auto & mmq : params.mul_mat_q) for (const auto & nt : params.n_threads) { for (const auto & n_prompt : params.n_prompt) { @@ -422,7 +486,8 @@ static std::vector get_cmd_params_instances(const cmd_param /* .n_prompt = */ n_prompt, /* .n_gen = */ 0, /* .n_batch = */ nb, - /* .f32_kv = */ fk, + /* .type_k = */ tk, + /* .type_v = */ tv, /* .n_threads = */ nt, /* .n_gpu_layers = */ nl, /* .main_gpu = */ mg, @@ -441,7 +506,8 @@ static std::vector get_cmd_params_instances(const cmd_param /* .n_prompt = */ 0, /* .n_gen = */ n_gen, /* .n_batch = */ nb, - /* .f32_kv = */ fk, + /* .type_k = */ tk, + /* .type_v = */ tv, /* .n_threads = */ nt, /* .n_gpu_layers = */ nl, /* .main_gpu = */ mg, @@ -489,7 +555,8 @@ struct test { uint64_t model_n_params; int n_batch; int n_threads; - bool f32_kv; + ggml_type type_k; + ggml_type type_v; int n_gpu_layers; int main_gpu; bool mul_mat_q; @@ -508,7 +575,8 @@ struct test { model_n_params = llama_model_n_params(lmodel); n_batch = inst.n_batch; n_threads = inst.n_threads; - f32_kv = inst.f32_kv; + type_k = inst.type_k; + type_v = inst.type_v; n_gpu_layers = inst.n_gpu_layers; main_gpu = inst.main_gpu; mul_mat_q = inst.mul_mat_q; @@ -571,7 +639,7 @@ struct test { "cuda", "opencl", "metal", "gpu_blas", "blas", "cpu_info", "gpu_info", "model_filename", "model_type", "model_size", "model_n_params", - "n_batch", "n_threads", "f16_kv", + "n_batch", "n_threads", "type_k", "type_v", "n_gpu_layers", "main_gpu", "mul_mat_q", "tensor_split", "n_prompt", "n_gen", "test_time", "avg_ns", "stddev_ns", @@ -621,7 +689,7 @@ struct test { std::to_string(cuda), std::to_string(opencl), std::to_string(metal), std::to_string(gpu_blas), std::to_string(blas), cpu_info, gpu_info, model_filename, model_type, std::to_string(model_size), std::to_string(model_n_params), - std::to_string(n_batch), std::to_string(n_threads), std::to_string(!f32_kv), + std::to_string(n_batch), std::to_string(n_threads), ggml_type_name(type_k), ggml_type_name(type_v), std::to_string(n_gpu_layers), std::to_string(main_gpu), std::to_string(mul_mat_q), tensor_split_str, std::to_string(n_prompt), std::to_string(n_gen), test_time, std::to_string(avg_ns()), std::to_string(stdev_ns()), @@ -805,8 +873,11 @@ struct markdown_printer : public printer { if (params.n_batch.size() > 1 || params.n_batch != cmd_params_defaults.n_batch) { fields.push_back("n_batch"); } - if (params.f32_kv.size() > 1 || params.f32_kv != cmd_params_defaults.f32_kv) { - fields.push_back("f16_kv"); + if (params.type_k.size() > 1 || params.type_k != cmd_params_defaults.type_k) { + fields.push_back("type_k"); + } + if (params.type_v.size() > 1 || params.type_v != cmd_params_defaults.type_v) { + fields.push_back("type_v"); } if (params.main_gpu.size() > 1 || params.main_gpu != cmd_params_defaults.main_gpu) { fields.push_back("main_gpu"); diff --git a/examples/quantize-stats/quantize-stats.cpp b/examples/quantize-stats/quantize-stats.cpp index 271282477..773024160 100644 --- a/examples/quantize-stats/quantize-stats.cpp +++ b/examples/quantize-stats/quantize-stats.cpp @@ -321,7 +321,6 @@ int main(int argc, char ** argv) { auto cparams = llama_context_default_params(); cparams.n_ctx = 256; cparams.seed = 1; - cparams.f16_kv = false; ctx = llama_new_context_with_model(model, cparams); diff --git a/examples/server/server.cpp b/examples/server/server.cpp index 369f81a84..895f751c9 100644 --- a/examples/server/server.cpp +++ b/examples/server/server.cpp @@ -2108,10 +2108,6 @@ static void server_params_parse(int argc, char **argv, server_params &sparams, } params.yarn_beta_slow = std::stof(argv[i]); } - else if (arg == "--memory-f32" || arg == "--memory_f32") - { - params.memory_f16 = false; - } else if (arg == "--threads" || arg == "-t") { if (++i >= argc) diff --git a/ggml-cuda.cu b/ggml-cuda.cu index 9019a849f..1200d1c88 100644 --- a/ggml-cuda.cu +++ b/ggml-cuda.cu @@ -7,6 +7,7 @@ #include #include #include +#include #if defined(GGML_USE_HIPBLAS) #include @@ -4559,6 +4560,116 @@ static __global__ void cpy_f32_f16(const char * cx, char * cdst, const int ne, cpy_1(cx + x_offset, cdst + dst_offset); } +static __device__ void cpy_blck_f32_q8_0(const char * cxi, char * cdsti) { + const float * xi = (const float *) cxi; + block_q8_0 * dsti = (block_q8_0 *) cdsti; + + float amax = 0.0f; // absolute max + + for (int j = 0; j < QK8_0; j++) { + const float v = xi[j]; + amax = fmaxf(amax, fabsf(v)); + } + + const float d = amax / ((1 << 7) - 1); + const float id = d ? 1.0f/d : 0.0f; + + dsti->d = d; + + for (int j = 0; j < QK8_0; ++j) { + const float x0 = xi[j]*id; + + dsti->qs[j] = roundf(x0); + } +} + +static __device__ void cpy_blck_f32_q4_0(const char * cxi, char * cdsti) { + const float * xi = (const float *) cxi; + block_q4_0 * dsti = (block_q4_0 *) cdsti; + + float amax = 0.0f; + float vmax = 0.0f; + + for (int j = 0; j < QK4_0; ++j) { + const float v = xi[j]; + if (amax < fabsf(v)) { + amax = fabsf(v); + vmax = v; + } + } + + const float d = vmax / -8; + const float id = d ? 1.0f/d : 0.0f; + + dsti->d = d; + + for (int j = 0; j < QK4_0/2; ++j) { + const float x0 = xi[0 + j]*id; + const float x1 = xi[QK4_0/2 + j]*id; + + const uint8_t xi0 = min(15, (int8_t)(x0 + 8.5f)); + const uint8_t xi1 = min(15, (int8_t)(x1 + 8.5f)); + + dsti->qs[j] = xi0; + dsti->qs[j] |= xi1 << 4; + } +} + +static __device__ void cpy_blck_f32_q4_1(const char * cxi, char * cdsti) { + const float * xi = (const float *) cxi; + block_q4_1 * dsti = (block_q4_1 *) cdsti; + + float vmin = FLT_MAX; + float vmax = -FLT_MAX; + + for (int j = 0; j < QK4_1; ++j) { + const float v = xi[j]; + + if (v < vmin) vmin = v; + if (v > vmax) vmax = v; + } + + const float d = (vmax - vmin) / ((1 << 4) - 1); + const float id = d ? 1.0f/d : 0.0f; + + dsti->dm.x = d; + dsti->dm.y = vmin; + + for (int j = 0; j < QK4_1/2; ++j) { + const float x0 = (xi[0 + j] - vmin)*id; + const float x1 = (xi[QK4_1/2 + j] - vmin)*id; + + const uint8_t xi0 = min(15, (int8_t)(x0 + 0.5f)); + const uint8_t xi1 = min(15, (int8_t)(x1 + 0.5f)); + + dsti->qs[j] = xi0; + dsti->qs[j] |= xi1 << 4; + } +} + +template +static __global__ void cpy_f32_q(const char * cx, char * cdst, const int ne, + const int ne00, const int ne01, const int nb00, const int nb01, const int nb02, + const int ne10, const int ne11, const int nb10, const int nb11, const int nb12) { + const int i = (blockDim.x*blockIdx.x + threadIdx.x)*qk; + + if (i >= ne) { + return; + } + + const int i02 = i / (ne00*ne01); + const int i01 = (i - i02*ne01*ne00) / ne00; + const int i00 = (i - i02*ne01*ne00 - i01*ne00); + const int x_offset = i00*nb00 + i01*nb01 + i02*nb02; + + const int i12 = i / (ne10*ne11); + const int i11 = (i - i12*ne10*ne11) / ne10; + const int i10 = (i - i12*ne10*ne11 - i11*ne10)/qk; + const int dst_offset = i10*nb10 + i11*nb11 + i12*nb12; + + cpy_blck(cx + x_offset, cdst + dst_offset); +} + static __device__ float rope_yarn_ramp(const float low, const float high, const int i0) { const float y = (i0 / 2 - low) / max(0.001f, high - low); return 1.0f - min(1.0f, max(0.0f, y)); @@ -5737,6 +5848,39 @@ static void ggml_cpy_f32_f16_cuda( (cx, cdst, ne, ne00, ne01, nb00, nb01, nb02, ne10, ne11, nb10, nb11, nb12); } +static void ggml_cpy_f32_q8_0_cuda( + const char * cx, char * cdst, const int ne, + const int ne00, const int ne01, const int nb00, const int nb01, const int nb02, + const int ne10, const int ne11, const int nb10, const int nb11, const int nb12, cudaStream_t stream) { + + GGML_ASSERT(ne % QK8_0 == 0); + const int num_blocks = ne / QK8_0; + cpy_f32_q<<>> + (cx, cdst, ne, ne00, ne01, nb00, nb01, nb02, ne10, ne11, nb10, nb11, nb12); +} + +static void ggml_cpy_f32_q4_0_cuda( + const char * cx, char * cdst, const int ne, + const int ne00, const int ne01, const int nb00, const int nb01, const int nb02, + const int ne10, const int ne11, const int nb10, const int nb11, const int nb12, cudaStream_t stream) { + + GGML_ASSERT(ne % QK4_0 == 0); + const int num_blocks = ne / QK4_0; + cpy_f32_q<<>> + (cx, cdst, ne, ne00, ne01, nb00, nb01, nb02, ne10, ne11, nb10, nb11, nb12); +} + +static void ggml_cpy_f32_q4_1_cuda( + const char * cx, char * cdst, const int ne, + const int ne00, const int ne01, const int nb00, const int nb01, const int nb02, + const int ne10, const int ne11, const int nb10, const int nb11, const int nb12, cudaStream_t stream) { + + GGML_ASSERT(ne % QK4_1 == 0); + const int num_blocks = ne / QK4_1; + cpy_f32_q<<>> + (cx, cdst, ne, ne00, ne01, nb00, nb01, nb02, ne10, ne11, nb10, nb11, nb12); +} + static void ggml_cpy_f16_f16_cuda( const char * cx, char * cdst, const int ne, const int ne00, const int ne01, const int nb00, const int nb01, const int nb02, @@ -6093,20 +6237,21 @@ static cudaError_t ggml_cuda_cpy_tensor_2d( const enum ggml_type type = src->type; const int64_t ts = ggml_type_size(type); const int64_t bs = ggml_blck_size(type); - int64_t i1_diff = i1_high - i1_low; + const int64_t i1_diff = i1_high - i1_low; const char * x = src_ptr + i1_low*nb1 + i2*nb2 + i3*nb3; - if (nb0 == ts && nb1 == ts*ne0/bs) { + if (nb0 == ts && nb1 == ts*(ne0/bs)) { return cudaMemcpyAsync(dst_ptr, x, i1_diff*nb1, kind, stream); } if (nb0 == ts) { - return cudaMemcpy2DAsync(dst_ptr, ts*ne0/bs, x, nb1, ts*ne0/bs, i1_diff, kind, stream); + return cudaMemcpy2DAsync(dst_ptr, ts*(ne0/bs), x, nb1, ts*(ne0/bs), i1_diff, kind, stream); } + GGML_ASSERT(bs == 1 && "TODO: implement bs != 1"); for (int64_t i1 = 0; i1 < i1_diff; i1++) { const void * rx = (const void *) ((const char *) x + i1*nb1); - void * rd = (void *) (dst_ptr + i1*ts*ne0/bs); + void * rd = (void *) (dst_ptr + i1*ts*ne0); // pretend the row is a matrix with cols=1 - cudaError_t r = cudaMemcpy2DAsync(rd, ts/bs, rx, nb0, ts/bs, ne0, kind, stream); + cudaError_t r = cudaMemcpy2DAsync(rd, ts, rx, nb0, ts, ne0, kind, stream); if (r != cudaSuccess) { return r; } } return cudaSuccess; @@ -6474,6 +6619,8 @@ inline void ggml_cuda_op_mul_mat_vec_q( const char * src1_ddq_i, float * dst_dd_i, const int64_t row_low, const int64_t row_high, const int64_t src1_ncols, const int64_t src1_padded_row_size, const cudaStream_t & stream) { + GGML_ASSERT(ggml_nrows(src1) == 1); + const int64_t ne00 = src0->ne[0]; const int64_t row_diff = row_high - row_low; @@ -6533,7 +6680,8 @@ inline void ggml_cuda_op_dequantize_mul_mat_vec( size_t ash; dfloat * src1_dfloat = nullptr; // dfloat == half - bool src1_convert_f16 = src0->type == GGML_TYPE_Q4_0 || src0->type == GGML_TYPE_Q4_1 || + bool src1_convert_f16 = + src0->type == GGML_TYPE_Q4_0 || src0->type == GGML_TYPE_Q4_1 || src0->type == GGML_TYPE_Q5_0 || src0->type == GGML_TYPE_Q5_1 || src0->type == GGML_TYPE_Q8_0 || src0->type == GGML_TYPE_F16; @@ -7103,10 +7251,9 @@ static void ggml_cuda_op_mul_mat( const bool src0_on_device = src0->backend == GGML_BACKEND_GPU || src0->backend == GGML_BACKEND_GPU_SPLIT; const bool src0_is_contiguous = ggml_is_contiguous(src0); - const bool src1_is_contiguous = ggml_is_contiguous(src1); - const int64_t src1_padded_col_size = ne10 % MATRIX_ROW_PADDING == 0 ? - ne10 : ne10 - ne10 % MATRIX_ROW_PADDING + MATRIX_ROW_PADDING; + + const int64_t src1_padded_col_size = GGML_PAD(ne10, MATRIX_ROW_PADDING); const bool split = src0->backend == GGML_BACKEND_GPU_SPLIT; GGML_ASSERT(!(split && ne02 > 1)); @@ -7231,7 +7378,7 @@ static void ggml_cuda_op_mul_mat( const size_t src1_ddq_i_offset = (i0*ne11 + src1_col_0) * src1_padded_col_size*q8_1_ts/q8_1_bs; // for split tensors the data begins at i0 == i0_offset_low - char * src0_dd_i = src0_dd[id] + (i0/i02_divisor) * ne01*ne00*src0_ts/src0_bs; + char * src0_dd_i = src0_dd[id] + (i0/i02_divisor) * (ne01*ne00*src0_ts)/src0_bs; float * src1_ddf_i = src1_ddf[id] + (i0*ne11 + src1_col_0) * ne10; char * src1_ddq_i = src1_ddq[id] + src1_ddq_i_offset; float * dst_dd_i = dst_dd[id] + (i0*ne1 + src1_col_0) * (dst_on_device ? ne0 : row_diff); @@ -7698,10 +7845,11 @@ static void ggml_cuda_mul_mat(const ggml_tensor * src0, const ggml_tensor * src1 #ifdef GGML_CUDA_FORCE_DMMV const bool use_mul_mat_vec_q = false; #else - const bool use_mul_mat_vec_q = min_compute_capability >= MIN_CC_DP4A && ggml_is_quantized(src0->type); + const bool use_mul_mat_vec_q = min_compute_capability >= MIN_CC_DP4A && ggml_is_quantized(src0->type) && ggml_nrows(src1) == 1; #endif // GGML_CUDA_FORCE_DMMV if (use_mul_mat_vec_q) { + // NOTE: this kernel does not support ggml_nrows(src1) > 1 ggml_cuda_op_mul_mat(src0, src1, dst, ggml_cuda_op_mul_mat_vec_q, true); } else { ggml_cuda_op_mul_mat(src0, src1, dst, ggml_cuda_op_dequantize_mul_mat_vec, false); @@ -7770,14 +7918,17 @@ static void ggml_cuda_cpy(const ggml_tensor * src0, const ggml_tensor * src1, gg char * src1_ddc = (char *) src1_extra->data_device[g_main_device]; if (src0->type == GGML_TYPE_F32 && src1->type == GGML_TYPE_F32) { - ggml_cpy_f32_f32_cuda(src0_ddc, src1_ddc, ne, ne00, ne01, nb00, nb01, nb02, - ne10, ne11, nb10, nb11, nb12, main_stream); + ggml_cpy_f32_f32_cuda (src0_ddc, src1_ddc, ne, ne00, ne01, nb00, nb01, nb02, ne10, ne11, nb10, nb11, nb12, main_stream); } else if (src0->type == GGML_TYPE_F32 && src1->type == GGML_TYPE_F16) { - ggml_cpy_f32_f16_cuda(src0_ddc, src1_ddc, ne, ne00, ne01, nb00, nb01, nb02, - ne10, ne11, nb10, nb11, nb12, main_stream); + ggml_cpy_f32_f16_cuda (src0_ddc, src1_ddc, ne, ne00, ne01, nb00, nb01, nb02, ne10, ne11, nb10, nb11, nb12, main_stream); + } else if (src0->type == GGML_TYPE_F32 && src1->type == GGML_TYPE_Q8_0) { + ggml_cpy_f32_q8_0_cuda(src0_ddc, src1_ddc, ne, ne00, ne01, nb00, nb01, nb02, ne10, ne11, nb10, nb11, nb12, main_stream); + } else if (src0->type == GGML_TYPE_F32 && src1->type == GGML_TYPE_Q4_0) { + ggml_cpy_f32_q4_0_cuda(src0_ddc, src1_ddc, ne, ne00, ne01, nb00, nb01, nb02, ne10, ne11, nb10, nb11, nb12, main_stream); + } else if (src0->type == GGML_TYPE_F32 && src1->type == GGML_TYPE_Q4_1) { + ggml_cpy_f32_q4_1_cuda(src0_ddc, src1_ddc, ne, ne00, ne01, nb00, nb01, nb02, ne10, ne11, nb10, nb11, nb12, main_stream); } else if (src0->type == GGML_TYPE_F16 && src1->type == GGML_TYPE_F16) { - ggml_cpy_f16_f16_cuda(src0_ddc, src1_ddc, ne, ne00, ne01, nb00, nb01, nb02, - ne10, ne11, nb10, nb11, nb12, main_stream); + ggml_cpy_f16_f16_cuda (src0_ddc, src1_ddc, ne, ne00, ne01, nb00, nb01, nb02, ne10, ne11, nb10, nb11, nb12, main_stream); } else { fprintf(stderr, "%s: unsupported type combination (%s to %s)\n", __func__, ggml_type_name(src0->type), ggml_type_name(src1->type)); @@ -7788,6 +7939,7 @@ static void ggml_cuda_cpy(const ggml_tensor * src0, const ggml_tensor * src1, gg } static void ggml_cuda_dup(const ggml_tensor * src0, const ggml_tensor * src1, ggml_tensor * dst) { + // TODO: why do we pass dst as src1 here? ggml_cuda_cpy(src0, dst, nullptr); (void) src1; } diff --git a/ggml-metal.m b/ggml-metal.m index 3343bc8a3..be4ab0f2e 100644 --- a/ggml-metal.m +++ b/ggml-metal.m @@ -118,6 +118,11 @@ struct ggml_metal_context { GGML_METAL_DECL_KERNEL(im2col_f16); GGML_METAL_DECL_KERNEL(cpy_f32_f16); GGML_METAL_DECL_KERNEL(cpy_f32_f32); + GGML_METAL_DECL_KERNEL(cpy_f32_q8_0); + GGML_METAL_DECL_KERNEL(cpy_f32_q4_0); + GGML_METAL_DECL_KERNEL(cpy_f32_q4_1); + //GGML_METAL_DECL_KERNEL(cpy_f32_q5_0); + //GGML_METAL_DECL_KERNEL(cpy_f32_q5_1); GGML_METAL_DECL_KERNEL(cpy_f16_f16); GGML_METAL_DECL_KERNEL(concat); GGML_METAL_DECL_KERNEL(sqr); @@ -324,6 +329,11 @@ struct ggml_metal_context * ggml_metal_init(int n_cb) { GGML_METAL_ADD_KERNEL(im2col_f16); GGML_METAL_ADD_KERNEL(cpy_f32_f16); GGML_METAL_ADD_KERNEL(cpy_f32_f32); + GGML_METAL_ADD_KERNEL(cpy_f32_q8_0); + GGML_METAL_ADD_KERNEL(cpy_f32_q4_0); + GGML_METAL_ADD_KERNEL(cpy_f32_q4_1); + //GGML_METAL_ADD_KERNEL(cpy_f32_q5_0); + //GGML_METAL_ADD_KERNEL(cpy_f32_q5_1); GGML_METAL_ADD_KERNEL(cpy_f16_f16); GGML_METAL_ADD_KERNEL(concat); GGML_METAL_ADD_KERNEL(sqr); @@ -425,6 +435,11 @@ void ggml_metal_free(struct ggml_metal_context * ctx) { GGML_METAL_DEL_KERNEL(im2col_f16); GGML_METAL_DEL_KERNEL(cpy_f32_f16); GGML_METAL_DEL_KERNEL(cpy_f32_f32); + GGML_METAL_DEL_KERNEL(cpy_f32_q8_0); + GGML_METAL_DEL_KERNEL(cpy_f32_q4_0); + GGML_METAL_DEL_KERNEL(cpy_f32_q4_1); + //GGML_METAL_DEL_KERNEL(cpy_f32_q5_0); + //GGML_METAL_DEL_KERNEL(cpy_f32_q5_1); GGML_METAL_DEL_KERNEL(cpy_f16_f16); GGML_METAL_DEL_KERNEL(concat); GGML_METAL_DEL_KERNEL(sqr); @@ -1114,7 +1129,7 @@ void ggml_metal_graph_compute( !ggml_is_transposed(src1) && src1t == GGML_TYPE_F32 && ne00 % 32 == 0 && ne00 >= 64 && - ne11 > ne11_mm_min) { + (ne11 > ne11_mm_min || (ggml_is_quantized(src0t) && ne12 > 1))) { //printf("matrix: ne00 = %6d, ne01 = %6d, ne02 = %6d, ne11 = %6d, ne12 = %6d\n", ne00, ne01, ne02, ne11, ne12); switch (src0->type) { case GGML_TYPE_F32: [encoder setComputePipelineState:ctx->pipeline_mul_mm_f32_f32]; break; @@ -1549,14 +1564,23 @@ void ggml_metal_graph_compute( case GGML_OP_CPY: case GGML_OP_CONT: { - const int nth = MIN(1024, ne00); + GGML_ASSERT(ne00 % ggml_blck_size(src0->type) == 0); + + int nth = MIN(1024, ne00/ggml_blck_size(src0->type)); switch (src0t) { case GGML_TYPE_F32: { + GGML_ASSERT(ne0 % ggml_blck_size(dst->type) == 0); + switch (dstt) { - case GGML_TYPE_F16: [encoder setComputePipelineState:ctx->pipeline_cpy_f32_f16]; break; - case GGML_TYPE_F32: [encoder setComputePipelineState:ctx->pipeline_cpy_f32_f32]; break; + case GGML_TYPE_F16: [encoder setComputePipelineState:ctx->pipeline_cpy_f32_f16]; break; + case GGML_TYPE_F32: [encoder setComputePipelineState:ctx->pipeline_cpy_f32_f32]; break; + case GGML_TYPE_Q8_0: [encoder setComputePipelineState:ctx->pipeline_cpy_f32_q8_0]; break; + case GGML_TYPE_Q4_0: [encoder setComputePipelineState:ctx->pipeline_cpy_f32_q4_0]; break; + case GGML_TYPE_Q4_1: [encoder setComputePipelineState:ctx->pipeline_cpy_f32_q4_1]; break; + //case GGML_TYPE_Q5_0: [encoder setComputePipelineState:ctx->pipeline_cpy_f32_q5_0]; break; + //case GGML_TYPE_Q5_1: [encoder setComputePipelineState:ctx->pipeline_cpy_f32_q5_1]; break; default: GGML_ASSERT(false && "not implemented"); }; } break; diff --git a/ggml-metal.metal b/ggml-metal.metal index 9a79f815f..9f5ffcbaf 100644 --- a/ggml-metal.metal +++ b/ggml-metal.metal @@ -3,6 +3,7 @@ using namespace metal; #define MAX(x, y) ((x) > (y) ? (x) : (y)) +#define MIN(x, y) ((x) < (y) ? (x) : (y)) #define QK4_0 32 #define QR4_0 2 @@ -1460,6 +1461,197 @@ kernel void kernel_cpy_f32_f32( } } +kernel void kernel_cpy_f32_q8_0( + device const float * src0, + device void * dst, + constant int64_t & ne00, + constant int64_t & ne01, + constant int64_t & ne02, + constant int64_t & ne03, + constant uint64_t & nb00, + constant uint64_t & nb01, + constant uint64_t & nb02, + constant uint64_t & nb03, + constant int64_t & ne0, + constant int64_t & ne1, + constant int64_t & ne2, + constant int64_t & ne3, + constant uint64_t & nb0, + constant uint64_t & nb1, + constant uint64_t & nb2, + constant uint64_t & nb3, + uint3 tgpig[[threadgroup_position_in_grid]], + uint3 tpitg[[thread_position_in_threadgroup]], + uint3 ntg[[threads_per_threadgroup]]) { + const int64_t i03 = tgpig[2]; + const int64_t i02 = tgpig[1]; + const int64_t i01 = tgpig[0]; + + const int64_t n = i03*ne02*ne01*ne00 + i02*ne01*ne00 + i01*ne00; + + const int64_t i3 = n / (ne2*ne1*ne0); + const int64_t i2 = (n - i3*ne2*ne1*ne0) / (ne1*ne0); + const int64_t i1 = (n - i3*ne2*ne1*ne0 - i2*ne1*ne0) / ne0; + const int64_t i0 = (n - i3*ne2*ne1*ne0 - i2*ne1*ne0 - i1*ne0)/QK8_0; + + device block_q8_0 * dst_data = (device block_q8_0 *) ((device char *) dst + i3*nb3 + i2*nb2 + i1*nb1 + i0*nb0); + + for (int64_t i00 = tpitg.x*QK8_0; i00 < ne00; i00 += ntg.x*QK8_0) { + device const float * src = (device float *)((device char *) src0 + i03*nb03 + i02*nb02 + i01*nb01 + i00*nb00); + + float amax = 0.0f; // absolute max + + for (int j = 0; j < QK8_0; j++) { + const float v = src[j]; + amax = MAX(amax, fabs(v)); + } + + const float d = amax / ((1 << 7) - 1); + const float id = d ? 1.0f/d : 0.0f; + + dst_data[i00/QK8_0].d = d; + + for (int j = 0; j < QK8_0; ++j) { + const float x0 = src[j]*id; + + dst_data[i00/QK8_0].qs[j] = round(x0); + } + } +} + +kernel void kernel_cpy_f32_q4_0( + device const float * src0, + device void * dst, + constant int64_t & ne00, + constant int64_t & ne01, + constant int64_t & ne02, + constant int64_t & ne03, + constant uint64_t & nb00, + constant uint64_t & nb01, + constant uint64_t & nb02, + constant uint64_t & nb03, + constant int64_t & ne0, + constant int64_t & ne1, + constant int64_t & ne2, + constant int64_t & ne3, + constant uint64_t & nb0, + constant uint64_t & nb1, + constant uint64_t & nb2, + constant uint64_t & nb3, + uint3 tgpig[[threadgroup_position_in_grid]], + uint3 tpitg[[thread_position_in_threadgroup]], + uint3 ntg[[threads_per_threadgroup]]) { + const int64_t i03 = tgpig[2]; + const int64_t i02 = tgpig[1]; + const int64_t i01 = tgpig[0]; + + const int64_t n = i03*ne02*ne01*ne00 + i02*ne01*ne00 + i01*ne00; + + const int64_t i3 = n / (ne2*ne1*ne0); + const int64_t i2 = (n - i3*ne2*ne1*ne0) / (ne1*ne0); + const int64_t i1 = (n - i3*ne2*ne1*ne0 - i2*ne1*ne0) / ne0; + const int64_t i0 = (n - i3*ne2*ne1*ne0 - i2*ne1*ne0 - i1*ne0)/QK4_0; + + device block_q4_0 * dst_data = (device block_q4_0 *) ((device char *) dst + i3*nb3 + i2*nb2 + i1*nb1 + i0*nb0); + + for (int64_t i00 = tpitg.x*QK4_0; i00 < ne00; i00 += ntg.x*QK4_0) { + device const float * src = (device float *)((device char *) src0 + i03*nb03 + i02*nb02 + i01*nb01 + i00*nb00); + + float amax = 0.0f; // absolute max + float max = 0.0f; + + for (int j = 0; j < QK4_0; j++) { + const float v = src[j]; + if (amax < fabs(v)) { + amax = fabs(v); + max = v; + } + } + + const float d = max / -8; + const float id = d ? 1.0f/d : 0.0f; + + dst_data[i00/QK4_0].d = d; + + for (int j = 0; j < QK4_0/2; ++j) { + const float x0 = src[0 + j]*id; + const float x1 = src[QK4_0/2 + j]*id; + + const uint8_t xi0 = MIN(15, (int8_t)(x0 + 8.5f)); + const uint8_t xi1 = MIN(15, (int8_t)(x1 + 8.5f)); + + dst_data[i00/QK4_0].qs[j] = xi0; + dst_data[i00/QK4_0].qs[j] |= xi1 << 4; + } + } +} + +kernel void kernel_cpy_f32_q4_1( + device const float * src0, + device void * dst, + constant int64_t & ne00, + constant int64_t & ne01, + constant int64_t & ne02, + constant int64_t & ne03, + constant uint64_t & nb00, + constant uint64_t & nb01, + constant uint64_t & nb02, + constant uint64_t & nb03, + constant int64_t & ne0, + constant int64_t & ne1, + constant int64_t & ne2, + constant int64_t & ne3, + constant uint64_t & nb0, + constant uint64_t & nb1, + constant uint64_t & nb2, + constant uint64_t & nb3, + uint3 tgpig[[threadgroup_position_in_grid]], + uint3 tpitg[[thread_position_in_threadgroup]], + uint3 ntg[[threads_per_threadgroup]]) { + const int64_t i03 = tgpig[2]; + const int64_t i02 = tgpig[1]; + const int64_t i01 = tgpig[0]; + + const int64_t n = i03*ne02*ne01*ne00 + i02*ne01*ne00 + i01*ne00; + + const int64_t i3 = n / (ne2*ne1*ne0); + const int64_t i2 = (n - i3*ne2*ne1*ne0) / (ne1*ne0); + const int64_t i1 = (n - i3*ne2*ne1*ne0 - i2*ne1*ne0) / ne0; + const int64_t i0 = (n - i3*ne2*ne1*ne0 - i2*ne1*ne0 - i1*ne0)/QK4_1; + + device block_q4_1 * dst_data = (device block_q4_1 *) ((device char *) dst + i3*nb3 + i2*nb2 + i1*nb1 + i0*nb0); + + for (int64_t i00 = tpitg.x*QK4_1; i00 < ne00; i00 += ntg.x*QK4_1) { + device const float * src = (device float *)((device char *) src0 + i03*nb03 + i02*nb02 + i01*nb01 + i00*nb00); + + float min = FLT_MAX; + float max = -FLT_MAX; + + for (int j = 0; j < QK4_1; j++) { + const float v = src[j]; + if (min > v) min = v; + if (max < v) max = v; + } + + const float d = (max - min) / ((1 << 4) - 1); + const float id = d ? 1.0f/d : 0.0f; + + dst_data[i00/QK4_1].d = d; + dst_data[i00/QK4_1].m = min; + + for (int j = 0; j < QK4_1/2; ++j) { + const float x0 = (src[0 + j] - min)*id; + const float x1 = (src[QK4_1/2 + j] - min)*id; + + const uint8_t xi0 = MIN(15, (int8_t)(x0 + 0.5f)); + const uint8_t xi1 = MIN(15, (int8_t)(x1 + 0.5f)); + + dst_data[i00/QK4_1].qs[j] = xi0; + dst_data[i00/QK4_1].qs[j] |= xi1 << 4; + } + } +} + kernel void kernel_concat( device const char * src0, device const char * src1, diff --git a/llama.cpp b/llama.cpp index 14e5d312e..b12bbd1b0 100644 --- a/llama.cpp +++ b/llama.cpp @@ -1231,6 +1231,7 @@ struct llama_cparams { float yarn_beta_slow; bool mul_mat_q; + bool offload_kqv; }; struct llama_layer { @@ -1299,8 +1300,8 @@ struct llama_kv_cache { std::vector cells; - struct ggml_tensor * k = NULL; - struct ggml_tensor * v = NULL; + std::vector k_l; // per layer + std::vector v_l; struct ggml_context * ctx = NULL; @@ -1313,8 +1314,10 @@ struct llama_kv_cache { #ifdef GGML_USE_CUBLAS if (ggml_cublas_loaded()) { - ggml_cuda_free_data(k); - ggml_cuda_free_data(v); + for (size_t i = 0; i < k_l.size(); ++i) { + ggml_cuda_free_data(k_l[i]); + ggml_cuda_free_data(v_l[i]); + } } #endif } @@ -1504,9 +1507,11 @@ struct llama_context { static bool llama_kv_cache_init( const struct llama_hparams & hparams, struct llama_kv_cache & cache, - ggml_type wtype, + ggml_type ktype, + ggml_type vtype, uint32_t n_ctx, - int n_gpu_layers) { + int n_gpu_layers, + bool offload) { const uint32_t n_embd = hparams.n_embd_gqa(); const uint32_t n_layer = hparams.n_layer; @@ -1522,7 +1527,7 @@ static bool llama_kv_cache_init( cache.cells.clear(); cache.cells.resize(n_ctx); - cache.buf.resize(2u*n_elements*ggml_type_size(wtype) + 2u*ggml_tensor_overhead()); + cache.buf.resize(n_elements*(ggml_type_sizef(ktype) + ggml_type_sizef(vtype)) + 2u*n_layer*ggml_tensor_overhead()); memset(cache.buf.data, 0, cache.buf.size); struct ggml_init_params params; @@ -1532,37 +1537,44 @@ static bool llama_kv_cache_init( cache.ctx = ggml_init(params); + size_t vram_kv_cache = 0; + if (!cache.ctx) { LLAMA_LOG_ERROR("%s: failed to allocate memory for kv cache\n", __func__); return false; } - cache.k = ggml_new_tensor_1d(cache.ctx, wtype, n_elements); - cache.v = ggml_new_tensor_1d(cache.ctx, wtype, n_elements); - ggml_set_name(cache.k, "cache_k"); - ggml_set_name(cache.v, "cache_v"); + cache.k_l.reserve(n_layer); + cache.v_l.reserve(n_layer); - (void) n_gpu_layers; + const int i_gpu_start = (int) n_layer - n_gpu_layers; GGML_UNUSED(i_gpu_start); + GGML_UNUSED(offload); + + for (int i = 0; i < (int) n_layer; i++) { + ggml_tensor * k = ggml_new_tensor_1d(cache.ctx, ktype, n_embd*n_ctx); + ggml_tensor * v = ggml_new_tensor_1d(cache.ctx, vtype, n_embd*n_ctx); + ggml_format_name(k, "cache_k_l%d", i); + ggml_format_name(v, "cache_v_l%d", i); + cache.k_l.push_back(k); + cache.v_l.push_back(v); #ifdef GGML_USE_CUBLAS - if (ggml_cublas_loaded()) { - size_t vram_kv_cache = 0; - - if (n_gpu_layers > (int)n_layer + 1) { - ggml_cuda_assign_buffers_no_scratch(cache.v); - LLAMA_LOG_INFO("%s: offloading v cache to GPU\n", __func__); - vram_kv_cache += ggml_nbytes(cache.v); - } - if (n_gpu_layers > (int)n_layer + 2) { - ggml_cuda_assign_buffers_no_scratch(cache.k); - LLAMA_LOG_INFO("%s: offloading k cache to GPU\n", __func__); - vram_kv_cache += ggml_nbytes(cache.k); - } - if (vram_kv_cache > 0) { - LLAMA_LOG_INFO("%s: VRAM kv self = %.2f MiB\n", __func__, vram_kv_cache / 1024.0 / 1024.0); + if (i >= i_gpu_start) { + if (offload) { + ggml_cuda_assign_buffers_no_scratch(k); + vram_kv_cache += ggml_nbytes(k); + ggml_cuda_assign_buffers_no_scratch(v); + vram_kv_cache += ggml_nbytes(v); + } } +#endif // GGML_USE_CUBLAS } -#endif + + if (vram_kv_cache > 0) { + LLAMA_LOG_INFO("%s: VRAM kv self = %.2f MB\n", __func__, vram_kv_cache / 1024.0 / 1024.0); + } + + GGML_UNUSED(n_gpu_layers); return true; } @@ -2968,14 +2980,7 @@ static void llm_load_tensors( ggml_backend_type backend_output; if (n_gpu_layers > int(n_layer)) { - // norm is not performance relevant on its own but keeping it in VRAM reduces data copying - // on Windows however this is detrimental unless everything is on the GPU -#ifndef _WIN32 - backend_norm = llama_backend_offload; -#else - backend_norm = n_gpu_layers <= (int) n_layer + 2 ? GGML_BACKEND_CPU : llama_backend_offload; -#endif // _WIN32 - + backend_norm = llama_backend_offload; backend_output = llama_backend_offload_split; } else { backend_norm = GGML_BACKEND_CPU; @@ -3045,14 +3050,7 @@ static void llm_load_tensors( ggml_backend_type backend_output; if (n_gpu_layers > int(n_layer)) { - // norm is not performance relevant on its own but keeping it in VRAM reduces data copying - // on Windows however this is detrimental unless everything is on the GPU -#ifndef _WIN32 - backend_norm = llama_backend_offload; -#else - backend_norm = n_gpu_layers <= (int) n_layer + 2 ? GGML_BACKEND_CPU : llama_backend_offload; -#endif // _WIN32 - + backend_norm = llama_backend_offload; backend_output = llama_backend_offload_split; } else { backend_norm = GGML_BACKEND_CPU; @@ -3115,14 +3113,7 @@ static void llm_load_tensors( ggml_backend_type backend_output; if (n_gpu_layers > int(n_layer)) { - // norm is not performance relevant on its own but keeping it in VRAM reduces data copying - // on Windows however this is detrimental unless everything is on the GPU -#ifndef _WIN32 - backend_norm = llama_backend_offload; -#else - backend_norm = n_gpu_layers <= (int) n_layer + 2 ? GGML_BACKEND_CPU : llama_backend_offload; -#endif // _WIN32 - + backend_norm = llama_backend_offload; backend_output = llama_backend_offload_split; } else { backend_norm = GGML_BACKEND_CPU; @@ -3192,14 +3183,7 @@ static void llm_load_tensors( ggml_backend_type backend_output; if (n_gpu_layers > int(n_layer)) { - // norm is not performance relevant on its own but keeping it in VRAM reduces data copying - // on Windows however this is detrimental unless everything is on the GPU -#ifndef _WIN32 - backend_norm = llama_backend_offload; -#else - backend_norm = n_gpu_layers <= (int) n_layer + 2 ? GGML_BACKEND_CPU : llama_backend_offload; -#endif // _WIN32 - + backend_norm = llama_backend_offload; backend_output = llama_backend_offload_split; } else { backend_norm = GGML_BACKEND_CPU; @@ -3269,21 +3253,7 @@ static void llm_load_tensors( ggml_backend_type backend_output; if (n_gpu_layers > int(n_layer)) { -#ifdef GGML_USE_CUBLAS - if (n_gpu_layers > int(n_layer + 1)) { - LLAMA_LOG_ERROR("%s: CUDA backend missing Persimmon CUDA ops, can offload at most %ld layers. See: https://github.com/ggerganov/llama.cpp/issues/4038\n", - __func__, n_layer + 1); - throw std::runtime_error("Persimmon CUDA offload failed"); - } -#endif - // norm is not performance relevant on its own but keeping it in VRAM reduces data copying - // on Windows however this is detrimental unless everything is on the GPU -#ifndef _WIN32 - backend_norm = llama_backend_offload; -#else - backend_norm = n_gpu_layers <= (int) n_layer + 2 ? GGML_BACKEND_CPU : llama_backend_offload; -#endif // _WIN32 - + backend_norm = llama_backend_offload; backend_output = llama_backend_offload_split; } else { backend_norm = GGML_BACKEND_CPU; @@ -3342,14 +3312,7 @@ static void llm_load_tensors( ggml_backend_type backend_output; if (n_gpu_layers > int(n_layer)) { - // norm is not performance relevant on its own but keeping it in VRAM reduces data copying - // on Windows however this is detrimental unless everything is on the GPU -#ifndef _WIN32 - backend_norm = llama_backend_offload; -#else - backend_norm = n_gpu_layers <= (int) n_layer + 2 ? GGML_BACKEND_CPU : llama_backend_offload; -#endif // _WIN32 - + backend_norm = llama_backend_offload; backend_output = llama_backend_offload_split; } else { backend_norm = GGML_BACKEND_CPU; @@ -3420,14 +3383,7 @@ static void llm_load_tensors( ggml_backend_type backend_output; if (n_gpu_layers > int(n_layer)) { - // norm is not performance relevant on its own but keeping it in VRAM reduces data copying - // on Windows however this is detrimental unless everything is on the GPU -#ifndef _WIN32 - backend_norm = llama_backend_offload; -#else - backend_norm = n_gpu_layers <= (int) n_layer + 2 ? GGML_BACKEND_CPU : llama_backend_offload; -#endif // _WIN32 - + backend_norm = llama_backend_offload; backend_output = llama_backend_offload_split; } else { backend_norm = GGML_BACKEND_CPU; @@ -3487,14 +3443,7 @@ static void llm_load_tensors( ggml_backend_type backend_output; if (n_gpu_layers > int(n_layer)) { - // norm is not performance relevant on its own but keeping it in VRAM reduces data copying - // on Windows however this is detrimental unless everything is on the GPU -#ifndef _WIN32 - backend_norm = llama_backend_offload; -#else - backend_norm = n_gpu_layers <= (int) n_layer + 2 ? GGML_BACKEND_CPU : llama_backend_offload; -#endif // _WIN32 - + backend_norm = llama_backend_offload; backend_output = llama_backend_offload_split; } else { backend_norm = GGML_BACKEND_CPU; @@ -3559,14 +3508,7 @@ static void llm_load_tensors( ggml_backend_type backend_output; if (n_gpu_layers > int(n_layer)) { - // norm is not performance relevant on its own but keeping it in VRAM reduces data copying - // on Windows however this is detrimental unless everything is on the GPU -#ifndef _WIN32 - backend_norm = llama_backend_offload; -#else - backend_norm = n_gpu_layers <= (int) n_layer + 2 ? GGML_BACKEND_CPU : llama_backend_offload; -#endif // _WIN32 - + backend_norm = llama_backend_offload; backend_output = llama_backend_offload_split; } else { backend_norm = GGML_BACKEND_CPU; @@ -3642,8 +3584,8 @@ static void llm_load_tensors( } #ifdef GGML_USE_CUBLAS - const int max_backend_supported_layers = hparams.n_layer + 3; - const int max_offloadable_layers = hparams.n_layer + 3; + const int max_backend_supported_layers = hparams.n_layer + 1; + const int max_offloadable_layers = hparams.n_layer + 1; #elif GGML_USE_CLBLAST const int max_backend_supported_layers = hparams.n_layer + 1; const int max_offloadable_layers = hparams.n_layer + 1; @@ -3811,11 +3753,11 @@ static void llm_build_k_shift( struct ggml_tensor * tmp = // we rotate only the first n_rot dimensions ggml_rope_custom_inplace(ctx, - ggml_view_3d(ctx, kv.k, + ggml_view_3d(ctx, kv.k_l[il], n_embd_head, n_head_kv, n_ctx, - ggml_element_size(kv.k)*n_embd_head, - ggml_element_size(kv.k)*n_embd_gqa, - ggml_element_size(kv.k)*n_embd_gqa*n_ctx*il), + ggml_type_sizef(kv.k_l[il]->type)*n_embd_head, + ggml_type_sizef(kv.k_l[il]->type)*n_embd_gqa, + 0), K_shift, n_rot, rope_type, 0, n_orig_ctx, freq_base, freq_scale, ext_factor, attn_factor, beta_fast, beta_slow); cb(tmp, "K_shifted", il); @@ -3842,13 +3784,13 @@ static void llm_build_kv_store( //struct ggml_tensor * v_cur_t = ggml_transpose(ctx, v_cur); // TODO: reshape above is likely not needed cb(v_cur_t, "v_cur_t", il); - struct ggml_tensor * k_cache_view = ggml_view_1d(ctx, kv.k, n_tokens*n_embd_gqa, - (ggml_element_size(kv.k)*n_embd_gqa)*(il*n_ctx + kv_head)); + struct ggml_tensor * k_cache_view = ggml_view_1d(ctx, kv.k_l[il], n_tokens*n_embd_gqa, + (ggml_type_sizef(kv.k_l[il]->type)*n_embd_gqa)*kv_head); cb(k_cache_view, "k_cache_view", il); - struct ggml_tensor * v_cache_view = ggml_view_2d(ctx, kv.v, n_tokens, n_embd_gqa, - ( n_ctx)*ggml_element_size(kv.v), - (il*n_ctx)*ggml_element_size(kv.v)*n_embd_gqa + kv_head*ggml_element_size(kv.v)); + struct ggml_tensor * v_cache_view = ggml_view_2d(ctx, kv.v_l[il], n_tokens, n_embd_gqa, + ( n_ctx)*ggml_element_size(kv.v_l[il]), + (kv_head)*ggml_element_size(kv.v_l[il])); cb(v_cache_view, "v_cache_view", il); // important: storing RoPE-ed version of K in the KV cache! @@ -4000,11 +3942,11 @@ static struct ggml_tensor * llm_build_kqv( cb(q, "q", il); struct ggml_tensor * k = - ggml_view_3d(ctx, kv.k, + ggml_view_3d(ctx, kv.k_l[il], n_embd_head, n_kv, n_head_kv, - ggml_element_size(kv.k)*n_embd_gqa, - ggml_element_size(kv.k)*n_embd_head, - ggml_element_size(kv.k)*n_embd_gqa*n_ctx*il); + ggml_type_sizef(kv.k_l[il]->type)*n_embd_gqa, + ggml_type_sizef(kv.k_l[il]->type)*n_embd_head, + 0); cb(k, "k", il); struct ggml_tensor * kq = ggml_mul_mat(ctx, k, q); @@ -4035,11 +3977,11 @@ static struct ggml_tensor * llm_build_kqv( // split cached v into n_head heads struct ggml_tensor * v = - ggml_view_3d(ctx, kv.v, + ggml_view_3d(ctx, kv.v_l[il], n_kv, n_embd_head, n_head_kv, - ggml_element_size(kv.v)*n_ctx, - ggml_element_size(kv.v)*n_ctx*n_embd_head, - ggml_element_size(kv.v)*n_ctx*n_embd_gqa*il); + ggml_element_size(kv.v_l[il])*n_ctx, + ggml_element_size(kv.v_l[il])*n_ctx*n_embd_head, + 0); cb(v, "v", il); struct ggml_tensor * kqv = ggml_mul_mat(ctx, v, kq); @@ -4631,6 +4573,7 @@ struct llm_build_context { inpL = llm_build_inp_embd(ctx0, hparams, batch, model.tok_embd, cb); cb(inpL, "imp_embd", -1); + // inp_pos - contains the positions struct ggml_tensor * inp_pos = ggml_new_tensor_1d(ctx0, GGML_TYPE_I32, n_tokens); cb(inp_pos, "inp_pos", -1); @@ -4638,6 +4581,7 @@ struct llm_build_context { struct ggml_tensor * KQ_scale = ggml_new_tensor_1d(ctx0, GGML_TYPE_F32, 1); cb(KQ_scale, "KQ_scale", -1); + // KQ_mask (mask for 1 head, it will be broadcasted to all heads) struct ggml_tensor * KQ_mask = ggml_new_tensor_3d(ctx0, GGML_TYPE_F32, n_kv, n_tokens, 1); cb(KQ_mask, "KQ_mask", -1); @@ -5237,15 +5181,15 @@ struct llm_build_context { cb(inpL, "inp_embd", -1); // inp_pos - contains the positions - struct ggml_tensor * inp_pos = ggml_new_tensor_1d(ctx0, GGML_TYPE_I32, n_tokens); + struct ggml_tensor * inp_pos= ggml_new_tensor_1d(ctx0, GGML_TYPE_I32, n_tokens); cb(inp_pos, "inp_pos", -1); // KQ_scale - struct ggml_tensor * KQ_scale = ggml_new_tensor_1d(ctx0, GGML_TYPE_F32, 1); + struct ggml_tensor * KQ_scale= ggml_new_tensor_1d(ctx0, GGML_TYPE_F32, 1); cb(KQ_scale, "KQ_scale", -1); - // KQ_mask (mask for 1 head, it wil be broadcasted to all heads) - struct ggml_tensor * KQ_mask = ggml_new_tensor_3d(ctx0, GGML_TYPE_F32, n_kv, n_tokens, 1); + // KQ_mask (mask for 1 head, it will be broadcasted to all heads) + struct ggml_tensor * KQ_mask= ggml_new_tensor_3d(ctx0, GGML_TYPE_F32, n_kv, n_tokens, 1); cb(KQ_mask, "KQ_mask", -1); // shift the entire K-cache if needed @@ -5351,8 +5295,8 @@ struct llm_build_context { enum llm_offload_func_e { OFFLOAD_FUNC_NOP, OFFLOAD_FUNC, - OFFLOAD_FUNC_KQ, - OFFLOAD_FUNC_V, + OFFLOAD_FUNC_FRC, // force offload + OFFLOAD_FUNC_KQV, OFFLOAD_FUNC_NR, OFFLOAD_FUNC_EMB, OFFLOAD_FUNC_OUT, @@ -5438,11 +5382,12 @@ static const std::unordered_map k_offload_map //{ "inp_embd", OFFLOAD_FUNC_NR }, // TODO: missing K-quants get_rows kernel { "pos_embd", OFFLOAD_FUNC_NR }, - { "inp_pos", OFFLOAD_FUNC_KQ }, // this is often used for KQ ops (e.g. rope) - { "KQ_scale", OFFLOAD_FUNC_KQ }, - { "KQ_mask", OFFLOAD_FUNC_KQ }, - { "K_shift", OFFLOAD_FUNC_KQ }, - { "K_shifted", OFFLOAD_FUNC_KQ }, + { "inp_pos", OFFLOAD_FUNC_FRC }, // this is often used for KQ ops (e.g. rope) + { "KQ_scale", OFFLOAD_FUNC_FRC }, + { "KQ_mask", OFFLOAD_FUNC_FRC }, + { "K_shift", OFFLOAD_FUNC_FRC }, + + { "K_shifted", OFFLOAD_FUNC }, { "inp_norm", OFFLOAD_FUNC_NR }, { "inp_norm_w", OFFLOAD_FUNC_NR }, @@ -5455,38 +5400,38 @@ static const std::unordered_map k_offload_map { "attn_norm", OFFLOAD_FUNC }, { "attn_norm_2", OFFLOAD_FUNC }, - { "wqkv", OFFLOAD_FUNC_KQ }, - { "bqkv", OFFLOAD_FUNC_KQ }, - { "wqkv_clamped", OFFLOAD_FUNC_KQ }, + { "wqkv", OFFLOAD_FUNC_KQV }, + { "bqkv", OFFLOAD_FUNC_KQV }, + { "wqkv_clamped", OFFLOAD_FUNC_KQV }, - { "tmpk", OFFLOAD_FUNC_KQ }, - { "tmpq", OFFLOAD_FUNC_KQ }, - { "tmpv", OFFLOAD_FUNC_V }, - { "Kcur", OFFLOAD_FUNC_KQ }, - { "Qcur", OFFLOAD_FUNC_KQ }, - { "Vcur", OFFLOAD_FUNC_V }, + { "tmpk", OFFLOAD_FUNC_KQV }, + { "tmpq", OFFLOAD_FUNC_KQV }, + { "tmpv", OFFLOAD_FUNC_KQV }, + { "Kcur", OFFLOAD_FUNC_KQV }, + { "Qcur", OFFLOAD_FUNC_KQV }, + { "Vcur", OFFLOAD_FUNC_KQV }, - { "krot", OFFLOAD_FUNC_KQ }, - { "qrot", OFFLOAD_FUNC_KQ }, - { "kpass", OFFLOAD_FUNC_KQ }, - { "qpass", OFFLOAD_FUNC_KQ }, - { "krotated", OFFLOAD_FUNC_KQ }, - { "qrotated", OFFLOAD_FUNC_KQ }, + { "krot", OFFLOAD_FUNC_KQV }, + { "qrot", OFFLOAD_FUNC_KQV }, + { "kpass", OFFLOAD_FUNC_KQV }, + { "qpass", OFFLOAD_FUNC_KQV }, + { "krotated", OFFLOAD_FUNC_KQV }, + { "qrotated", OFFLOAD_FUNC_KQV }, - { "q", OFFLOAD_FUNC_KQ }, - { "k", OFFLOAD_FUNC_KQ }, - { "kq", OFFLOAD_FUNC_KQ }, - { "kq_scaled", OFFLOAD_FUNC_KQ }, - { "kq_scaled_alibi", OFFLOAD_FUNC_KQ }, - { "kq_masked", OFFLOAD_FUNC_KQ }, - { "kq_soft_max", OFFLOAD_FUNC_V }, - { "kq_soft_max_ext", OFFLOAD_FUNC_V }, - { "v", OFFLOAD_FUNC_V }, - { "kqv", OFFLOAD_FUNC_V }, - { "kqv_merged", OFFLOAD_FUNC_V }, - { "kqv_merged_cont", OFFLOAD_FUNC_V }, - { "kqv_wo", OFFLOAD_FUNC_V }, - { "kqv_out", OFFLOAD_FUNC_V }, + { "q", OFFLOAD_FUNC_KQV }, + { "k", OFFLOAD_FUNC_KQV }, + { "kq", OFFLOAD_FUNC_KQV }, + { "kq_scaled", OFFLOAD_FUNC_KQV }, + { "kq_scaled_alibi", OFFLOAD_FUNC_KQV }, + { "kq_masked", OFFLOAD_FUNC_KQV }, + { "kq_soft_max", OFFLOAD_FUNC_KQV }, + { "kq_soft_max_ext", OFFLOAD_FUNC_KQV }, + { "v", OFFLOAD_FUNC_KQV }, + { "kqv", OFFLOAD_FUNC_KQV }, + { "kqv_merged", OFFLOAD_FUNC_KQV }, + { "kqv_merged_cont", OFFLOAD_FUNC_KQV }, + { "kqv_wo", OFFLOAD_FUNC_KQV }, + { "kqv_out", OFFLOAD_FUNC_KQV }, { "ffn_inp", OFFLOAD_FUNC }, { "ffn_norm", OFFLOAD_FUNC }, @@ -5678,15 +5623,15 @@ static struct ggml_cgraph * llama_build_graph( { OFFLOAD_FUNC_NOP, "CPU" }, { OFFLOAD_FUNC_OUT, "CPU" }, #ifdef GGML_USE_CUBLAS - { OFFLOAD_FUNC, "GPU (CUDA)" }, - { OFFLOAD_FUNC_KQ, "GPU (CUDA) KQ" }, - { OFFLOAD_FUNC_V, "GPU (CUDA) V" }, - { OFFLOAD_FUNC_NR, "GPU (CUDA) NR" }, + { OFFLOAD_FUNC, "GPU (CUDA)" }, + { OFFLOAD_FUNC_FRC, "GPU (CUDA) FRC" }, + { OFFLOAD_FUNC_KQV, "GPU (CUDA) KQV" }, + { OFFLOAD_FUNC_NR, "GPU (CUDA) NR" }, { OFFLOAD_FUNC_EMB, "GPU (CUDA) EMB" }, #else { OFFLOAD_FUNC, "CPU" }, - { OFFLOAD_FUNC_KQ, "CPU" }, - { OFFLOAD_FUNC_V, "CPU" }, + { OFFLOAD_FUNC_FRC, "CPU" }, + { OFFLOAD_FUNC_KQV, "CPU" }, { OFFLOAD_FUNC_NR, "CPU" }, { OFFLOAD_FUNC_EMB, "CPU" }, #endif // GGML_USE_CUBLAS @@ -5719,21 +5664,26 @@ static struct ggml_cgraph * llama_build_graph( } } break; + case OFFLOAD_FUNC_FRC: + if (!lctx.cparams.offload_kqv) { + func_e = OFFLOAD_FUNC_NOP; + } break; + case OFFLOAD_FUNC_KQV: + if (!lctx.cparams.offload_kqv) { + func_e = OFFLOAD_FUNC_NOP; + } else { + if (n_gpu_layers < n_layer) { + if (il < i_gpu_start) { + func_e = OFFLOAD_FUNC_NOP; + } + } + } + break; case OFFLOAD_FUNC_NR: if (n_gpu_layers <= n_layer + 0) { func_e = OFFLOAD_FUNC_NOP; } break; - case OFFLOAD_FUNC_V: - if (n_gpu_layers <= n_layer + 1) { - func_e = OFFLOAD_FUNC_NOP; - } - break; - case OFFLOAD_FUNC_KQ: - if (n_gpu_layers <= n_layer + 2) { - func_e = OFFLOAD_FUNC_NOP; - } - break; case OFFLOAD_FUNC_EMB: if (!offload_emb || n_gpu_layers < n_layer) { func_e = OFFLOAD_FUNC_NOP; @@ -5755,8 +5705,8 @@ static struct ggml_cgraph * llama_build_graph( case OFFLOAD_FUNC_NOP: case OFFLOAD_FUNC_OUT: func = ggml_offload_nop; break; case OFFLOAD_FUNC: - case OFFLOAD_FUNC_KQ: - case OFFLOAD_FUNC_V: + case OFFLOAD_FUNC_KQV: + case OFFLOAD_FUNC_FRC: case OFFLOAD_FUNC_NR: case OFFLOAD_FUNC_EMB: func = ggml_offload_gpu; break; default: GGML_ASSERT(false); @@ -5942,6 +5892,7 @@ static int llama_decode_internal( // after enough generations, the benefit from this heuristic disappears // if we start defragmenting the cache, the benefit from this will be more important kv_self.n = std::min((int32_t) cparams.n_ctx, std::max(32, GGML_PAD(llama_kv_cache_cell_max(kv_self), 32))); + //kv_self.n = llama_kv_cache_cell_max(kv_self); //printf("kv_self.n = %5d, kv_self.used = %5d, kv_self.head = %5d\n", kv_self.n, kv_self.used, kv_self.head); @@ -5992,7 +5943,7 @@ static int llama_decode_internal( n_threads = std::min(4, n_threads); } - const bool fully_offloaded = model.n_gpu_layers >= (int) hparams.n_layer + 3; + const bool fully_offloaded = model.n_gpu_layers >= (int) hparams.n_layer + 1; if (ggml_cpu_has_cublas() && fully_offloaded) { n_threads = 1; } @@ -8821,10 +8772,12 @@ struct llama_context_params llama_context_default_params() { /*.yarn_beta_fast =*/ 32.0f, /*.yarn_beta_slow =*/ 1.0f, /*.yarn_orig_ctx =*/ 0, + /*.type_k =*/ GGML_TYPE_F16, + /*.type_v =*/ GGML_TYPE_F16, /*.mul_mat_q =*/ true, - /*.f16_kv =*/ true, /*.logits_all =*/ false, /*.embedding =*/ false, + /*.offload_kqv =*/ true, }; return result; @@ -8941,6 +8894,7 @@ struct llama_context * llama_new_context_with_model( cparams.yarn_beta_fast = params.yarn_beta_fast; cparams.yarn_beta_slow = params.yarn_beta_slow; cparams.mul_mat_q = params.mul_mat_q; + cparams.offload_kqv = params.offload_kqv; cparams.n_ctx = params.n_ctx == 0 ? hparams.n_ctx_train : params.n_ctx; cparams.rope_freq_base = params.rope_freq_base == 0.0f ? hparams.rope_freq_base_train : params.rope_freq_base; @@ -8974,19 +8928,36 @@ struct llama_context * llama_new_context_with_model( ctx->rng = std::mt19937(params.seed); ctx->logits_all = params.logits_all; - ggml_type memory_type = params.f16_kv ? GGML_TYPE_F16 : GGML_TYPE_F32; + const ggml_type type_k = params.type_k; + const ggml_type type_v = params.type_v; + + GGML_ASSERT(hparams.n_embd_head() % ggml_blck_size(type_k) == 0); + GGML_ASSERT(hparams.n_embd_head() % ggml_blck_size(type_v) == 0); // reserve memory for context buffers if (!hparams.vocab_only) { - if (!llama_kv_cache_init(ctx->model.hparams, ctx->kv_self, memory_type, cparams.n_ctx, model->n_gpu_layers)) { + if (!llama_kv_cache_init(ctx->model.hparams, ctx->kv_self, type_k, type_v, cparams.n_ctx, model->n_gpu_layers, cparams.offload_kqv)) { LLAMA_LOG_ERROR("%s: llama_kv_cache_init() failed for self-attention cache\n", __func__); llama_free(ctx); return nullptr; } { - const size_t memory_size = ggml_nbytes(ctx->kv_self.k) + ggml_nbytes(ctx->kv_self.v); - LLAMA_LOG_INFO("%s: kv self size = %7.2f MiB\n", __func__, memory_size / 1024.0 / 1024.0); + size_t memory_size_k = 0; + size_t memory_size_v = 0; + + for (auto & k : ctx->kv_self.k_l) { + memory_size_k += ggml_nbytes(k); + } + + for (auto & v : ctx->kv_self.v_l) { + memory_size_v += ggml_nbytes(v); + } + + LLAMA_LOG_INFO("%s: KV self size = %7.2f MiB, K (%s): %7.2f MiB, V (%s): %7.2f MiB\n", __func__, + (float)(memory_size_k + memory_size_v) / (1024.0f * 1024.0f), + ggml_type_name(type_k), (float)memory_size_k / (1024.0f * 1024.0f), + ggml_type_name(type_v), (float)memory_size_v / (1024.0f * 1024.0f)); } // resized during inference @@ -9057,8 +9028,12 @@ struct llama_context * llama_new_context_with_model( } size_t kv_vram_size = 0; - add_tensor(ctx->kv_self.k, kv_vram_size); - add_tensor(ctx->kv_self.v, kv_vram_size); + for (auto & k : ctx->kv_self.k_l) { + add_tensor(k, kv_vram_size); + } + for (auto & v : ctx->kv_self.v_l) { + add_tensor(v, kv_vram_size); + } size_t ctx_vram_size = alloc_size + kv_vram_size; size_t total_vram_size = model_vram_size + ctx_vram_size; @@ -9528,37 +9503,45 @@ static void llama_copy_state_data_internal(struct llama_context * ctx, llama_dat data_ctx->write(&kv_used, sizeof(kv_used)); if (kv_buf_size) { - const size_t elt_size = ggml_element_size(kv_self.k); + const size_t elt_size = ggml_element_size(kv_self.k_l[0]); - ggml_context * cpy_ctx = ggml_init({ 6*ggml_tensor_overhead() + ggml_graph_overhead(), NULL, /* no_alloc */ true }); + ggml_context * cpy_ctx = ggml_init({ 6*n_layer*ggml_tensor_overhead() + ggml_graph_overhead(), NULL, /* no_alloc */ true }); ggml_cgraph * gf = ggml_new_graph(cpy_ctx); - ggml_tensor * kout3d = ggml_new_tensor_3d(cpy_ctx, kv_self.k->type, n_embd, kv_head, n_layer); - std::vector kout3d_data(ggml_nbytes(kout3d), 0); - kout3d->data = kout3d_data.data(); + std::vector> kout2d_data(n_layer); + std::vector> vout2d_data(n_layer); - ggml_tensor * vout3d = ggml_new_tensor_3d(cpy_ctx, kv_self.v->type, kv_head, n_embd, n_layer); - std::vector vout3d_data(ggml_nbytes(vout3d), 0); - vout3d->data = vout3d_data.data(); + for (int il = 0; il < (int) n_layer; ++il) { + ggml_tensor * kout2d = ggml_new_tensor_2d(cpy_ctx, kv_self.k_l[il]->type, n_embd, kv_head); + kout2d_data[il].resize(ggml_nbytes(kout2d)); + kout2d->data = kout2d_data[il].data(); - ggml_tensor * k3d = ggml_view_3d(cpy_ctx, kv_self.k, - n_embd, kv_head, n_layer, - elt_size*n_embd, elt_size*n_embd*n_ctx, 0); + ggml_tensor * vout2d = ggml_new_tensor_2d(cpy_ctx, kv_self.v_l[il]->type, kv_head, n_embd); + vout2d_data[il].resize(ggml_nbytes(vout2d)); + vout2d->data = vout2d_data[il].data(); - ggml_tensor * v3d = ggml_view_3d(cpy_ctx, kv_self.v, - kv_head, n_embd, n_layer, - elt_size*n_ctx, elt_size*n_ctx*n_embd, 0); + ggml_tensor * k2d = ggml_view_2d(cpy_ctx, kv_self.k_l[il], + n_embd, kv_head, + elt_size*n_embd, 0); + + ggml_tensor * v2d = ggml_view_2d(cpy_ctx, kv_self.v_l[il], + kv_head, n_embd, + elt_size*n_ctx, 0); + + ggml_build_forward_expand(gf, ggml_cpy(cpy_ctx, k2d, kout2d)); + ggml_build_forward_expand(gf, ggml_cpy(cpy_ctx, v2d, vout2d)); + } - ggml_build_forward_expand(gf, ggml_cpy(cpy_ctx, k3d, kout3d)); - ggml_build_forward_expand(gf, ggml_cpy(cpy_ctx, v3d, vout3d)); ggml_graph_compute_helper(ctx->work_buffer, gf, /*n_threads*/ 1); ggml_free(cpy_ctx); - // our data is now in the kout3d_data and vout3d_data buffers + // our data is now in the kout2d_data and vout2d_data buffers // write them to file - data_ctx->write(kout3d_data.data(), kout3d_data.size()); - data_ctx->write(vout3d_data.data(), vout3d_data.size()); + for (uint32_t il = 0; il < n_layer; ++il) { + data_ctx->write(kout2d_data[il].data(), kout2d_data[il].size()); + data_ctx->write(vout2d_data[il].data(), vout2d_data[il].size()); + } } for (uint32_t i = 0; i < kv_size; ++i) { @@ -9658,29 +9641,32 @@ size_t llama_set_state_data(struct llama_context * ctx, uint8_t * src) { if (kv_buf_size) { GGML_ASSERT(kv_self.buf.size == kv_buf_size); - const size_t elt_size = ggml_element_size(kv_self.k); + const size_t elt_size = ggml_element_size(kv_self.k_l[0]); - ggml_context * cpy_ctx = ggml_init({ 6*ggml_tensor_overhead() + ggml_graph_overhead(), NULL, /* no_alloc */ true }); + ggml_context * cpy_ctx = ggml_init({ 6*n_layer*ggml_tensor_overhead() + ggml_graph_overhead(), NULL, /* no_alloc */ true }); ggml_cgraph * gf = ggml_new_graph(cpy_ctx); - ggml_tensor * kin3d = ggml_new_tensor_3d(cpy_ctx, kv_self.k->type, n_embd, kv_head, n_layer); - kin3d->data = (void *) inp; - inp += ggml_nbytes(kin3d); + for (int il = 0; il < n_layer; ++il) { + ggml_tensor * kin2d = ggml_new_tensor_2d(cpy_ctx, kv_self.k_l[il]->type, n_embd, kv_head); + kin2d->data = (void *) inp; + inp += ggml_nbytes(kin2d); - ggml_tensor * vin3d = ggml_new_tensor_3d(cpy_ctx, kv_self.v->type, kv_head, n_embd, n_layer); - vin3d->data = (void *) inp; - inp += ggml_nbytes(vin3d); + ggml_tensor * vin2d = ggml_new_tensor_2d(cpy_ctx, kv_self.v_l[il]->type, kv_head, n_embd); + vin2d->data = (void *) inp; + inp += ggml_nbytes(vin2d); - ggml_tensor * k3d = ggml_view_3d(cpy_ctx, kv_self.k, - n_embd, kv_head, n_layer, - elt_size*n_embd, elt_size*n_embd*n_ctx, 0); + ggml_tensor * k2d = ggml_view_2d(cpy_ctx, kv_self.k_l[il], + n_embd, kv_head, + elt_size*n_embd, 0); - ggml_tensor * v3d = ggml_view_3d(cpy_ctx, kv_self.v, - kv_head, n_embd, n_layer, - elt_size*n_ctx, elt_size*n_ctx*n_embd, 0); + ggml_tensor * v2d = ggml_view_2d(cpy_ctx, kv_self.v_l[il], + kv_head, n_embd, + elt_size*n_ctx, 0); + + ggml_build_forward_expand(gf, ggml_cpy(cpy_ctx, kin2d, k2d)); + ggml_build_forward_expand(gf, ggml_cpy(cpy_ctx, vin2d, v2d)); + } - ggml_build_forward_expand(gf, ggml_cpy(cpy_ctx, kin3d, k3d)); - ggml_build_forward_expand(gf, ggml_cpy(cpy_ctx, vin3d, v3d)); ggml_graph_compute_helper(ctx->work_buffer, gf, /*n_threads*/ 1); ggml_free(cpy_ctx); diff --git a/llama.h b/llama.h index 517245a35..b1f5fca62 100644 --- a/llama.h +++ b/llama.h @@ -42,7 +42,7 @@ #define LLAMA_FILE_MAGIC_GGSN 0x6767736eu // 'ggsn' #define LLAMA_SESSION_MAGIC LLAMA_FILE_MAGIC_GGSN -#define LLAMA_SESSION_VERSION 2 +#define LLAMA_SESSION_VERSION 3 #if defined(GGML_USE_CUBLAS) || defined(GGML_USE_CLBLAST) || defined(GGML_USE_METAL) // Defined when llama.cpp is compiled with support for offloading model layers to GPU. @@ -211,11 +211,14 @@ extern "C" { float yarn_beta_slow; // YaRN high correction dim uint32_t yarn_orig_ctx; // YaRN original context size + enum ggml_type type_k; // data type for K cache + enum ggml_type type_v; // data type for V cache + // Keep the booleans together to avoid misalignment during copy-by-value. - bool mul_mat_q; // if true, use experimental mul_mat_q kernels (DEPRECATED - always true) - bool f16_kv; // use fp16 for KV cache, fp32 otherwise - bool logits_all; // the llama_eval() call computes all logits, not just the last one - bool embedding; // embedding mode only + bool mul_mat_q; // if true, use experimental mul_mat_q kernels (DEPRECATED - always true) + bool logits_all; // the llama_eval() call computes all logits, not just the last one + bool embedding; // embedding mode only + bool offload_kqv; // whether to offload the KQV ops (including the KV cache) to GPU }; // model quantization parameters From fe680e3d1080a765e5d3150ffd7bab189742898d Mon Sep 17 00:00:00 2001 From: Georgi Gerganov Date: Thu, 7 Dec 2023 22:26:54 +0200 Subject: [PATCH 065/811] sync : ggml (new ops, tests, backend, etc.) (#4359) * sync : ggml (part 1) * sync : ggml (part 2, CUDA) * sync : ggml (part 3, Metal) * ggml : build fixes ggml-ci * cuda : restore lost changes * cuda : restore lost changes (StableLM rope) * cmake : enable separable compilation for CUDA ggml-ci * ggml-cuda : remove device side dequantize * Revert "cmake : enable separable compilation for CUDA" This reverts commit 09e35d04b1c4ca67f9685690160b35bc885a89ac. * cuda : remove assert for rope * tests : add test-backend-ops * ggml : fix bug in ggml_concat * ggml : restore `ggml_get_n_tasks()` logic in `ggml_graph_plan()` * ci : try to fix macOS * ggml-backend : remove backend self-registration * ci : disable Metal for macOS cmake build ggml-ci * metal : fix "supports family" call * metal : fix assert * metal : print resource path ggml-ci --------- Co-authored-by: slaren --- .github/workflows/build.yml | 15 +- .gitignore | 1 + CMakeLists.txt | 14 +- Makefile | 6 +- ggml-alloc.c | 49 +- ggml-alloc.h | 7 + ggml-backend-impl.h | 67 +- ggml-backend.c | 771 +++++++++++++++----- ggml-backend.h | 79 +- ggml-cuda.cu | 1353 ++++++++++++++++++++++++---------- ggml-cuda.h | 10 +- ggml-impl.h | 2 +- ggml-metal.h | 6 + ggml-metal.m | 676 ++++++++++++----- ggml-metal.metal | 755 +++++++++++++++---- ggml.c | 414 ++++++++--- ggml.h | 53 +- scripts/sync-ggml.sh | 5 +- tests/CMakeLists.txt | 28 +- tests/test-backend-ops.cpp | 1357 +++++++++++++++++++++++++++++++++++ 20 files changed, 4637 insertions(+), 1031 deletions(-) create mode 100644 tests/test-backend-ops.cpp diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index 22be233e6..a5090e398 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -143,6 +143,9 @@ jobs: cd build ctest --verbose + # TODO: build with LLAMA_NO_METAL because test-backend-ops fail on "Apple Paravirtual device" and I don't know + # how to debug it. + # ref: https://github.com/ggerganov/llama.cpp/actions/runs/7131777249/job/19420981052#step:5:1124 macOS-latest-make: runs-on: macos-latest @@ -160,14 +163,18 @@ jobs: - name: Build id: make_build run: | - make -j $(sysctl -n hw.logicalcpu) + LLAMA_NO_METAL=1 make -j $(sysctl -n hw.logicalcpu) - name: Test id: make_test run: | - make tests -j $(sysctl -n hw.logicalcpu) - make test -j $(sysctl -n hw.logicalcpu) + LLAMA_NO_METAL=1 make tests -j $(sysctl -n hw.logicalcpu) + LLAMA_NO_METAL=1 make test -j $(sysctl -n hw.logicalcpu) + # TODO: build with LLAMA_METAL=OFF because test-backend-ops fail on "Apple Paravirtual device" and I don't know + # how to debug it. + # ref: https://github.com/ggerganov/llama.cpp/actions/runs/7132125951/job/19422043567?pr=4359#step:5:6584 + # would be great if we fix these macOS-latest-cmake: runs-on: macos-latest @@ -188,7 +195,7 @@ jobs: sysctl -a mkdir build cd build - cmake .. + cmake -DLLAMA_METAL=OFF .. cmake --build . --config Release -j $(sysctl -n hw.logicalcpu) - name: Test diff --git a/.gitignore b/.gitignore index 58c483994..76b3d2861 100644 --- a/.gitignore +++ b/.gitignore @@ -101,3 +101,4 @@ poetry.toml /tests/test-tokenizer-1-llama /tests/test-tokenizer-1-bpe /tests/test-rope +/tests/test-backend-ops diff --git a/CMakeLists.txt b/CMakeLists.txt index 0639518de..78de2dd1a 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -97,9 +97,9 @@ option(LLAMA_METAL_NDEBUG "llama: disable Metal debugging" option(LLAMA_MPI "llama: use MPI" OFF) option(LLAMA_QKK_64 "llama: use super-block size of 64 for k-quants" OFF) -option(LLAMA_BUILD_TESTS "llama: build tests" ${LLAMA_STANDALONE}) -option(LLAMA_BUILD_EXAMPLES "llama: build examples" ${LLAMA_STANDALONE}) -option(LLAMA_BUILD_SERVER "llama: build server example" ON) +option(LLAMA_BUILD_TESTS "llama: build tests" ${LLAMA_STANDALONE}) +option(LLAMA_BUILD_EXAMPLES "llama: build examples" ${LLAMA_STANDALONE}) +option(LLAMA_BUILD_SERVER "llama: build server example" ON) # Required for relocatable CMake package include(${CMAKE_CURRENT_SOURCE_DIR}/scripts/build-info.cmake) @@ -662,11 +662,11 @@ add_library(ggml OBJECT ggml-backend.h ggml-quants.c ggml-quants.h - ${GGML_SOURCES_CUDA} ${GGML_HEADERS_CUDA} + ${GGML_SOURCES_CUDA} ${GGML_HEADERS_CUDA} ${GGML_SOURCES_OPENCL} ${GGML_HEADERS_OPENCL} - ${GGML_SOURCES_METAL} ${GGML_HEADERS_METAL} - ${GGML_SOURCES_MPI} ${GGML_HEADERS_MPI} - ${GGML_SOURCES_EXTRA} ${GGML_HEADERS_EXTRA} + ${GGML_SOURCES_METAL} ${GGML_HEADERS_METAL} + ${GGML_SOURCES_MPI} ${GGML_HEADERS_MPI} + ${GGML_SOURCES_EXTRA} ${GGML_HEADERS_EXTRA} ) target_include_directories(ggml PUBLIC . ${LLAMA_EXTRA_INCLUDES}) diff --git a/Makefile b/Makefile index 3cc932a2e..a1a6cae54 100644 --- a/Makefile +++ b/Makefile @@ -8,7 +8,8 @@ BUILD_TARGETS = \ TEST_TARGETS = \ tests/test-llama-grammar tests/test-grammar-parser tests/test-double-float tests/test-grad0 tests/test-opt \ tests/test-quantize-fns tests/test-quantize-perf tests/test-sampling tests/test-tokenizer-0-llama \ - tests/test-tokenizer-0-falcon tests/test-tokenizer-1-llama tests/test-tokenizer-1-bpe tests/test-rope + tests/test-tokenizer-0-falcon tests/test-tokenizer-1-llama tests/test-tokenizer-1-bpe tests/test-rope \ + tests/test-backend-ops # Code coverage output files COV_TARGETS = *.gcno tests/*.gcno *.gcda tests/*.gcda *.gcov tests/*.gcov lcov-report gcovr-report @@ -746,3 +747,6 @@ tests/test-rope: tests/test-rope.cpp ggml.o $(OBJS) tests/test-c.o: tests/test-c.c llama.h $(CC) $(CFLAGS) -c $(filter-out %.h,$^) -o $@ + +tests/test-backend-ops: tests/test-backend-ops.cpp ggml.o $(OBJS) + $(CXX) $(CXXFLAGS) $(filter-out %.h,$^) -o $@ $(LDFLAGS) diff --git a/ggml-alloc.c b/ggml-alloc.c index 0d4e12ae9..d3049efb4 100644 --- a/ggml-alloc.c +++ b/ggml-alloc.c @@ -168,10 +168,6 @@ static void ggml_tallocr_free_tensor(ggml_tallocr_t alloc, struct ggml_tensor * size = aligned_offset(NULL, size, alloc->alignment); AT_PRINTF("%s: freeing %s at %p (%zu bytes) - n_free_blocks = %d\n", __func__, tensor->name, ptr, size, alloc->n_free_blocks); - if (!alloc->measure) { - ggml_backend_buffer_free_tensor(alloc->buffer, tensor); - } - #ifdef GGML_ALLOCATOR_DEBUG remove_allocated_tensor(alloc, tensor); #endif @@ -237,7 +233,7 @@ void ggml_tallocr_reset(ggml_tallocr_t alloc) { } ggml_tallocr_t ggml_tallocr_new(void * data, size_t size, size_t alignment) { - struct ggml_backend_buffer * buffer = ggml_backend_cpu_buffer_from_ptr(NULL, data, size); + struct ggml_backend_buffer * buffer = ggml_backend_cpu_buffer_from_ptr(data, size); ggml_tallocr_t alloc = (ggml_tallocr_t)malloc(sizeof(struct ggml_tallocr)); @@ -449,7 +445,6 @@ static ggml_tallocr_t node_tallocr(ggml_gallocr_t galloc, struct ggml_tensor * n static void init_view(ggml_gallocr_t galloc, struct ggml_tensor * view, bool update_backend) { ggml_tallocr_t alloc = node_tallocr(galloc, view); - //printf("init_view: %s from src %s\n", view->name, view->view_src->name); GGML_ASSERT(view->view_src != NULL && view->view_src->data != NULL); if (update_backend) { view->backend = view->view_src->backend; @@ -459,7 +454,7 @@ static void init_view(ggml_gallocr_t galloc, struct ggml_tensor * view, bool upd // FIXME: the view should be initialized by the owning buffer, but currently this breaks the CUDA backend // due to the ggml_tensor_extra_gpu ring buffer overwriting the KV cache extras - assert(ggml_tallocr_is_measure(alloc) || !view->buffer || view->buffer->backend == alloc->buffer->backend); + assert(ggml_tallocr_is_measure(alloc) || !view->buffer || view->buffer->buft == alloc->buffer->buft); if (!alloc->measure) { ggml_backend_buffer_init_tensor(alloc->buffer, view); @@ -765,3 +760,43 @@ size_t ggml_allocr_max_size(ggml_allocr_t alloc) { size_t ggml_allocr_alloc_graph(ggml_allocr_t alloc, struct ggml_cgraph * graph) { return ggml_gallocr_alloc_graph(alloc->galloc, alloc->talloc, graph); } + +// utils +ggml_backend_buffer_t ggml_backend_alloc_ctx_tensors_from_buft(struct ggml_context * ctx, ggml_backend_buffer_type_t buft) { + GGML_ASSERT(ggml_get_no_alloc(ctx) == true); + + size_t alignment = ggml_backend_buft_get_alignment(buft); + + size_t nbytes = 0; + for (struct ggml_tensor * t = ggml_get_first_tensor(ctx); t != NULL; t = ggml_get_next_tensor(ctx, t)) { + if (t->data == NULL && t->view_src == NULL) { + nbytes += GGML_PAD(ggml_backend_buft_get_alloc_size(buft, t), alignment); + } + } + + if (nbytes == 0) { + fprintf(stderr, "%s: no tensors to allocate\n", __func__); + return NULL; + } + + ggml_backend_buffer_t buffer = ggml_backend_buft_alloc_buffer(buft, nbytes); + ggml_tallocr_t tallocr = ggml_tallocr_new_from_buffer(buffer); + + for (struct ggml_tensor * t = ggml_get_first_tensor(ctx); t != NULL; t = ggml_get_next_tensor(ctx, t)) { + if (t->data == NULL) { + if (t->view_src == NULL) { + ggml_tallocr_alloc(tallocr, t); + } else { + ggml_backend_view_init(buffer, t); + } + } + } + + ggml_tallocr_free(tallocr); + + return buffer; +} + +ggml_backend_buffer_t ggml_backend_alloc_ctx_tensors(struct ggml_context * ctx, ggml_backend_t backend) { + return ggml_backend_alloc_ctx_tensors_from_buft(ctx, ggml_backend_get_default_buffer_type(backend)); +} diff --git a/ggml-alloc.h b/ggml-alloc.h index dde2a06bf..ad87cebc8 100644 --- a/ggml-alloc.h +++ b/ggml-alloc.h @@ -8,6 +8,7 @@ extern "C" { struct ggml_backend; struct ggml_backend_buffer; +struct ggml_backend_buffer_type; // // Legacy API @@ -80,6 +81,12 @@ GGML_API void ggml_gallocr_alloc_graph_n( struct ggml_hash_set hash_set, ggml_tallocr_t * hash_node_talloc); + +// Utils +// Create a buffer and allocate all the tensors in a ggml_context +GGML_API struct ggml_backend_buffer * ggml_backend_alloc_ctx_tensors_from_buft(struct ggml_context * ctx, struct ggml_backend_buffer_type * buft); +GGML_API struct ggml_backend_buffer * ggml_backend_alloc_ctx_tensors(struct ggml_context * ctx, struct ggml_backend * backend); + #ifdef __cplusplus } #endif diff --git a/ggml-backend-impl.h b/ggml-backend-impl.h index 211e3d424..f588af602 100644 --- a/ggml-backend-impl.h +++ b/ggml-backend-impl.h @@ -12,31 +12,50 @@ extern "C" { // Backend buffer // + // buffer type + typedef void * ggml_backend_buffer_type_context_t; + + struct ggml_backend_buffer_type_i { + ggml_backend_buffer_t (*alloc_buffer) (ggml_backend_buffer_type_t buft, size_t size); + size_t (*get_alignment) (ggml_backend_buffer_type_t buft); // tensor alignment + size_t (*get_alloc_size) (ggml_backend_buffer_type_t buft, struct ggml_tensor * tensor); // data size needed to allocate the tensor, including padding + bool (*supports_backend)(ggml_backend_buffer_type_t buft, ggml_backend_t backend); // check if the buffer type is usable by the backend + }; + + struct ggml_backend_buffer_type { + struct ggml_backend_buffer_type_i iface; + ggml_backend_buffer_type_context_t context; + }; + + // buffer typedef void * ggml_backend_buffer_context_t; struct ggml_backend_buffer_i { - void (*free_buffer) (ggml_backend_buffer_t buffer); - void * (*get_base) (ggml_backend_buffer_t buffer); // get base pointer - size_t (*get_alloc_size)(ggml_backend_buffer_t buffer, struct ggml_tensor * tensor); // pre-allocation callback - void (*init_tensor) (ggml_backend_buffer_t buffer, struct ggml_tensor * tensor); // post-allocation callback - void (*free_tensor) (ggml_backend_buffer_t buffer, struct ggml_tensor * tensor); // pre-free callback + void (*free_buffer)(ggml_backend_buffer_t buffer); + //void (*reset) (ggml_backend_buffer_t buffer); // reset any internal state due to tensor initialization, such as tensor extras + void * (*get_base) (ggml_backend_buffer_t buffer); + void (*init_tensor)(ggml_backend_buffer_t buffer, struct ggml_tensor * tensor); + void (*set_tensor) (ggml_backend_buffer_t buffer, struct ggml_tensor * tensor, const void * data, size_t offset, size_t size); + void (*get_tensor) (ggml_backend_buffer_t buffer, const struct ggml_tensor * tensor, void * data, size_t offset, size_t size); + // (optional) copy tensor between different buffer-type, allow for single-copy tranfers + void (*cpy_tensor_from)(ggml_backend_buffer_t buffer, struct ggml_tensor * src, struct ggml_tensor * dst); + void (*cpy_tensor_to) (ggml_backend_buffer_t buffer, struct ggml_tensor * src, struct ggml_tensor * dst); }; struct ggml_backend_buffer { - struct ggml_backend_buffer_i iface; - - ggml_backend_t backend; + struct ggml_backend_buffer_i iface; + ggml_backend_buffer_type_t buft; ggml_backend_buffer_context_t context; - size_t size; }; - GGML_API ggml_backend_buffer_t ggml_backend_buffer_init( - struct ggml_backend * backend, + ggml_backend_buffer_t ggml_backend_buffer_init( + ggml_backend_buffer_type_t buft, struct ggml_backend_buffer_i iface, ggml_backend_buffer_context_t context, size_t size); + // // Backend // @@ -49,20 +68,17 @@ extern "C" { void (*free)(ggml_backend_t backend); // buffer allocation - ggml_backend_buffer_t (*alloc_buffer)(ggml_backend_t backend, size_t size); + ggml_backend_buffer_type_t (*get_default_buffer_type)(ggml_backend_t backend); - // get buffer alignment - size_t (*get_alignment)(ggml_backend_t backend); - - // tensor data access - // these functions can be asynchronous, helper functions are provided for synchronous access that automatically call synchronize + // (optional) asynchroneous tensor data access void (*set_tensor_async)(ggml_backend_t backend, struct ggml_tensor * tensor, const void * data, size_t offset, size_t size); void (*get_tensor_async)(ggml_backend_t backend, const struct ggml_tensor * tensor, void * data, size_t offset, size_t size); - void (*synchronize) (ggml_backend_t backend); - // (optional) copy tensor between different backends, allow for single-copy tranfers - void (*cpy_tensor_from)(ggml_backend_t backend, struct ggml_tensor * src, struct ggml_tensor * dst); - void (*cpy_tensor_to) (ggml_backend_t backend, struct ggml_tensor * src, struct ggml_tensor * dst); + // (optional) asynchroneous tensor copy + void (*cpy_tensor_from_async)(ggml_backend_t backend, struct ggml_tensor * src, struct ggml_tensor * dst); + void (*cpy_tensor_to_async) (ggml_backend_t backend, struct ggml_tensor * src, struct ggml_tensor * dst); + + void (*synchronize) (ggml_backend_t backend); // compute graph with a plan ggml_backend_graph_plan_t (*graph_plan_create) (ggml_backend_t backend, struct ggml_cgraph * cgraph); @@ -82,6 +98,15 @@ extern "C" { ggml_backend_context_t context; }; + + // + // Backend registry + // + + typedef ggml_backend_t (*ggml_backend_init_fn)(const char * params, void * user_data); + + void ggml_backend_register(const char * name, ggml_backend_init_fn init_fn, ggml_backend_buffer_type_t default_buffer_type, void * user_data); + #ifdef __cplusplus } #endif diff --git a/ggml-backend.c b/ggml-backend.c index f6e5fceed..3a22cd085 100644 --- a/ggml-backend.c +++ b/ggml-backend.c @@ -9,14 +9,36 @@ #include #include -#define UNUSED GGML_UNUSED #define MAX(a, b) ((a) > (b) ? (a) : (b)) + +// backend buffer type + +ggml_backend_buffer_t ggml_backend_buft_alloc_buffer(ggml_backend_buffer_type_t buft, size_t size) { + return buft->iface.alloc_buffer(buft, size); +} + +size_t ggml_backend_buft_get_alignment(ggml_backend_buffer_type_t buft) { + return buft->iface.get_alignment(buft); +} + +size_t ggml_backend_buft_get_alloc_size(ggml_backend_buffer_type_t buft, struct ggml_tensor * tensor) { + // get_alloc_size is optional, defaults to ggml_nbytes + if (buft->iface.get_alloc_size) { + return buft->iface.get_alloc_size(buft, tensor); + } + return ggml_nbytes(tensor); +} + +bool ggml_backend_buft_supports_backend(ggml_backend_buffer_type_t buft, ggml_backend_t backend) { + return buft->iface.supports_backend(buft, backend); +} + // backend buffer ggml_backend_buffer_t ggml_backend_buffer_init( - struct ggml_backend * backend, + ggml_backend_buffer_type_t buft, struct ggml_backend_buffer_i iface, ggml_backend_buffer_context_t context, size_t size) { @@ -26,7 +48,7 @@ ggml_backend_buffer_t ggml_backend_buffer_init( (*buffer) = (struct ggml_backend_buffer) { /* .interface = */ iface, - /* .backend = */ backend, + /* .buft = */ buft, /* .context = */ context, /* .size = */ size, }; @@ -45,10 +67,6 @@ void ggml_backend_buffer_free(ggml_backend_buffer_t buffer) { free(buffer); } -size_t ggml_backend_buffer_get_alignment(ggml_backend_buffer_t buffer) { - return ggml_backend_get_alignment(buffer->backend); -} - size_t ggml_backend_buffer_get_size(ggml_backend_buffer_t buffer) { return buffer->size; } @@ -61,14 +79,6 @@ void * ggml_backend_buffer_get_base(ggml_backend_buffer_t buffer) { return base; } -size_t ggml_backend_buffer_get_alloc_size(ggml_backend_buffer_t buffer, struct ggml_tensor * tensor) { - // get_alloc_size is optional, defaults to ggml_nbytes - if (buffer->iface.get_alloc_size) { - return buffer->iface.get_alloc_size(buffer, tensor); - } - return ggml_nbytes(tensor); -} - void ggml_backend_buffer_init_tensor(ggml_backend_buffer_t buffer, struct ggml_tensor * tensor) { // init_tensor is optional if (buffer->iface.init_tensor) { @@ -76,19 +86,20 @@ void ggml_backend_buffer_init_tensor(ggml_backend_buffer_t buffer, struct ggml_t } } -void ggml_backend_buffer_free_tensor(ggml_backend_buffer_t buffer, struct ggml_tensor * tensor) { - // free_tensor is optional - if (buffer->iface.free_tensor) { - buffer->iface.free_tensor(buffer, tensor); - } +size_t ggml_backend_buffer_get_alignment (ggml_backend_buffer_t buffer) { + return ggml_backend_buft_get_alignment(ggml_backend_buffer_type(buffer)); +} + +size_t ggml_backend_buffer_get_alloc_size(ggml_backend_buffer_t buffer, struct ggml_tensor * tensor) { + return ggml_backend_buft_get_alloc_size(ggml_backend_buffer_type(buffer), tensor); +} + +ggml_backend_buffer_type_t ggml_backend_buffer_type(ggml_backend_buffer_t buffer) { + return buffer->buft; } // backend -ggml_backend_t ggml_get_backend(const struct ggml_tensor * tensor) { - return tensor->buffer ? tensor->buffer->backend : NULL; -} - const char * ggml_backend_name(ggml_backend_t backend) { if (backend == NULL) { return "NULL"; @@ -104,43 +115,53 @@ void ggml_backend_free(ggml_backend_t backend) { backend->iface.free(backend); } +ggml_backend_buffer_type_t ggml_backend_get_default_buffer_type(ggml_backend_t backend) { + return backend->iface.get_default_buffer_type(backend); +} + ggml_backend_buffer_t ggml_backend_alloc_buffer(ggml_backend_t backend, size_t size) { - return backend->iface.alloc_buffer(backend, size); + return ggml_backend_buft_alloc_buffer(ggml_backend_get_default_buffer_type(backend), size); } size_t ggml_backend_get_alignment(ggml_backend_t backend) { - return backend->iface.get_alignment(backend); + return ggml_backend_buft_get_alignment(ggml_backend_get_default_buffer_type(backend)); } -void ggml_backend_tensor_set_async(struct ggml_tensor * tensor, const void * data, size_t offset, size_t size) { - ggml_get_backend(tensor)->iface.set_tensor_async(ggml_get_backend(tensor), tensor, data, offset, size); +void ggml_backend_tensor_set_async(ggml_backend_t backend, struct ggml_tensor * tensor, const void * data, size_t offset, size_t size) { + GGML_ASSERT(tensor->data != NULL && "tensor not allocated"); + GGML_ASSERT(offset + size <= ggml_nbytes(tensor) && "tensor write out of bounds"); + + backend->iface.set_tensor_async(backend, tensor, data, offset, size); } -void ggml_backend_tensor_get_async(const struct ggml_tensor * tensor, void * data, size_t offset, size_t size) { - ggml_get_backend(tensor)->iface.get_tensor_async(ggml_get_backend(tensor), tensor, data, offset, size); +void ggml_backend_tensor_get_async(ggml_backend_t backend, const struct ggml_tensor * tensor, void * data, size_t offset, size_t size) { + GGML_ASSERT(tensor->data != NULL && "tensor not allocated"); + GGML_ASSERT(offset + size <= ggml_nbytes(tensor) && "tensor read out of bounds"); + + backend->iface.get_tensor_async(backend, tensor, data, offset, size); } void ggml_backend_tensor_set(struct ggml_tensor * tensor, const void * data, size_t offset, size_t size) { - ggml_backend_t backend = ggml_get_backend(tensor); - GGML_ASSERT(tensor->data != NULL && "tensor not allocated"); - GGML_ASSERT(backend != NULL && "tensor backend not set"); + GGML_ASSERT(tensor->buffer != NULL && "tensor buffer not set"); + GGML_ASSERT(offset + size <= ggml_nbytes(tensor) && "tensor write out of bounds"); - backend->iface.set_tensor_async(backend, tensor, data, offset, size); - backend->iface.synchronize(backend); + tensor->buffer->iface.set_tensor(tensor->buffer, tensor, data, offset, size); } void ggml_backend_tensor_get(const struct ggml_tensor * tensor, void * data, size_t offset, size_t size) { - ggml_backend_t backend = ggml_get_backend(tensor); - GGML_ASSERT(tensor->data != NULL && "tensor not allocated"); - GGML_ASSERT(backend != NULL && "tensor backend not set"); + GGML_ASSERT(tensor->buffer != NULL && "tensor buffer not set"); + GGML_ASSERT(offset + size <= ggml_nbytes(tensor) && "tensor read out of bounds"); - backend->iface.get_tensor_async(backend, tensor, data, offset, size); - backend->iface.synchronize(backend); + tensor->buffer->iface.get_tensor(tensor->buffer, tensor, data, offset, size); } void ggml_backend_synchronize(ggml_backend_t backend) { + if (backend->iface.synchronize == NULL) { + return; + } + backend->iface.synchronize(backend); } @@ -154,10 +175,16 @@ void ggml_backend_graph_plan_free(ggml_backend_t backend, ggml_backend_graph_pla void ggml_backend_graph_plan_compute(ggml_backend_t backend, ggml_backend_graph_plan_t plan) { backend->iface.graph_plan_compute(backend, plan); + + // TODO: optional sync + ggml_backend_synchronize(backend); } void ggml_backend_graph_compute(ggml_backend_t backend, struct ggml_cgraph * cgraph) { backend->iface.graph_compute(backend, cgraph); + + // TODO: optional sync + ggml_backend_synchronize(backend); } bool ggml_backend_supports_op(ggml_backend_t backend, const struct ggml_tensor * op) { @@ -194,14 +221,15 @@ void ggml_backend_tensor_copy(struct ggml_tensor * src, struct ggml_tensor * dst // TODO: allow backends to support copy to/from same backend - if (ggml_get_backend(dst)->iface.cpy_tensor_from != NULL) { - ggml_get_backend(dst)->iface.cpy_tensor_from(ggml_get_backend(dst)->context, src, dst); - } else if (ggml_get_backend(src)->iface.cpy_tensor_to != NULL) { - ggml_get_backend(src)->iface.cpy_tensor_to(ggml_get_backend(src)->context, src, dst); + if (dst->buffer->iface.cpy_tensor_from != NULL) { + dst->buffer->iface.cpy_tensor_from(dst->buffer, src, dst); + } else if (src->buffer->iface.cpy_tensor_to != NULL) { + src->buffer->iface.cpy_tensor_to(src->buffer, src, dst); } else { // shouldn't be hit when copying from/to CPU #ifndef NDEBUG - fprintf(stderr, "ggml_backend_tensor_copy: neither cpy_tensor_from nor cpy_tensor_to are implemented for backends %s and %s, falling back to get/set\n", ggml_backend_name(src->buffer->backend), ggml_backend_name(dst->buffer->backend)); + fprintf(stderr, "ggml_backend_tensor_copy: neither cpy_tensor_from nor cpy_tensor_to " + "are implemented for %s and %s, falling back to get/set\n", src->name, dst->name); #endif size_t nbytes = ggml_nbytes(src); void * data = malloc(nbytes); @@ -211,8 +239,236 @@ void ggml_backend_tensor_copy(struct ggml_tensor * src, struct ggml_tensor * dst } } +// backend registry + +#define GGML_MAX_BACKENDS_REG 16 + +struct ggml_backend_reg { + char name[128]; + ggml_backend_init_fn init_fn; + ggml_backend_buffer_type_t default_buffer_type; + void * user_data; +}; + +static struct ggml_backend_reg ggml_backend_registry[GGML_MAX_BACKENDS_REG]; +static size_t ggml_backend_registry_count = 0; + +static ggml_backend_t ggml_backend_reg_cpu_init(const char * params, void * user_data); + +static void ggml_backend_registry_init(void) { + static bool initialized = false; + + if (initialized) { + return; + } + + initialized = true; + + ggml_backend_register("CPU", ggml_backend_reg_cpu_init, ggml_backend_cpu_buffer_type(), NULL); + + // add forward decls here to avoid including the backend headers +#ifdef GGML_USE_CUBLAS + extern void ggml_backend_cuda_reg_devices(void); + ggml_backend_cuda_reg_devices(); +#endif + +#ifdef GGML_USE_METAL + extern ggml_backend_t ggml_backend_reg_metal_init(const char * params, void * user_data); + extern ggml_backend_buffer_type_t ggml_backend_metal_buffer_type(void); + ggml_backend_register("Metal", ggml_backend_reg_metal_init, ggml_backend_metal_buffer_type(), NULL); +#endif +} + +void ggml_backend_register(const char * name, ggml_backend_init_fn init_fn, ggml_backend_buffer_type_t default_buffer_type, void * user_data) { + GGML_ASSERT(ggml_backend_registry_count < GGML_MAX_BACKENDS_REG); + + int id = ggml_backend_registry_count; + + ggml_backend_registry[id] = (struct ggml_backend_reg) { + /* .name = */ {0}, + /* .fn = */ init_fn, + /* .default_buffer_type = */ default_buffer_type, + /* .user_data = */ user_data, + }; + + snprintf(ggml_backend_registry[id].name, sizeof(ggml_backend_registry[id].name), "%s", name); + +#ifndef NDEBUG + fprintf(stderr, "%s: registered backend %s\n", __func__, name); +#endif + + ggml_backend_registry_count++; +} + +size_t ggml_backend_reg_get_count(void) { + ggml_backend_registry_init(); + + return ggml_backend_registry_count; +} + +size_t ggml_backend_reg_find_by_name(const char * name) { + ggml_backend_registry_init(); + + for (size_t i = 0; i < ggml_backend_registry_count; i++) { + // TODO: case insensitive in a portable way + if (strcmp(ggml_backend_registry[i].name, name) == 0) { + return i; + } + } + return SIZE_MAX; +} + +// init from backend:params string +ggml_backend_t ggml_backend_reg_init_backend_from_str(const char * backend_str) { + ggml_backend_registry_init(); + + const char * params = strchr(backend_str, ':'); + char backend_name[128]; + if (params == NULL) { + strcpy(backend_name, backend_str); + params = ""; + } else { + strncpy(backend_name, backend_str, params - backend_str); + backend_name[params - backend_str] = '\0'; + params++; + } + + size_t backend_i = ggml_backend_reg_find_by_name(backend_name); + if (backend_i == SIZE_MAX) { + fprintf(stderr, "%s: backend %s not found\n", __func__, backend_name); + return NULL; + } + + return ggml_backend_reg_init_backend(backend_i, params); +} + +const char * ggml_backend_reg_get_name(size_t i) { + ggml_backend_registry_init(); + + GGML_ASSERT(i < ggml_backend_registry_count); + return ggml_backend_registry[i].name; +} + +ggml_backend_t ggml_backend_reg_init_backend(size_t i, const char * params) { + ggml_backend_registry_init(); + + GGML_ASSERT(i < ggml_backend_registry_count); + return ggml_backend_registry[i].init_fn(params, ggml_backend_registry[i].user_data); +} + +ggml_backend_buffer_type_t ggml_backend_reg_get_default_buffer_type(size_t i) { + ggml_backend_registry_init(); + + GGML_ASSERT(i < ggml_backend_registry_count); + return ggml_backend_registry[i].default_buffer_type; +} + +ggml_backend_buffer_t ggml_backend_reg_alloc_buffer(size_t i, size_t size) { + ggml_backend_registry_init(); + + GGML_ASSERT(i < ggml_backend_registry_count); + return ggml_backend_buft_alloc_buffer(ggml_backend_registry[i].default_buffer_type, size); +} + // backend CPU +static void * ggml_backend_cpu_buffer_get_base(ggml_backend_buffer_t buffer) { + return (void *)buffer->context; +} + +static void ggml_backend_cpu_buffer_free_buffer(ggml_backend_buffer_t buffer) { + free(buffer->context); + GGML_UNUSED(buffer); +} + +static void ggml_backend_cpu_buffer_set_tensor(ggml_backend_buffer_t buffer, struct ggml_tensor * tensor, const void * data, size_t offset, size_t size) { + GGML_ASSERT(offset + size <= ggml_nbytes(tensor) && "tensor write out of bounds"); + GGML_ASSERT(tensor->data != NULL && "tensor not allocated"); + + memcpy((char *)tensor->data + offset, data, size); + + GGML_UNUSED(buffer); +} + +static void ggml_backend_cpu_buffer_get_tensor(ggml_backend_buffer_t buffer, const struct ggml_tensor * tensor, void * data, size_t offset, size_t size) { + GGML_ASSERT(offset + size <= ggml_nbytes(tensor) && "tensor read out of bounds"); + GGML_ASSERT(tensor->data != NULL && "tensor not allocated"); + + memcpy(data, (const char *)tensor->data + offset, size); + + GGML_UNUSED(buffer); +} + +static void ggml_backend_cpu_buffer_cpy_tensor_from(ggml_backend_buffer_t buffer, struct ggml_tensor * src, struct ggml_tensor * dst) { + ggml_backend_tensor_get(src, dst->data, 0, ggml_nbytes(src)); + + GGML_UNUSED(buffer); +} + +static void ggml_backend_cpu_buffer_cpy_tensor_to(ggml_backend_buffer_t buffer, struct ggml_tensor * src, struct ggml_tensor * dst) { + ggml_backend_tensor_set(dst, src->data, 0, ggml_nbytes(src)); + + GGML_UNUSED(buffer); +} + +static struct ggml_backend_buffer_i cpu_backend_buffer_i = { + /* .free_buffer = */ ggml_backend_cpu_buffer_free_buffer, + /* .get_base = */ ggml_backend_cpu_buffer_get_base, + /* .init_tensor = */ NULL, // no initialization required + /* .set_tensor = */ ggml_backend_cpu_buffer_set_tensor, + /* .get_tensor = */ ggml_backend_cpu_buffer_get_tensor, + /* .cpy_tensor_from = */ ggml_backend_cpu_buffer_cpy_tensor_from, + /* .cpy_tensor_to = */ ggml_backend_cpu_buffer_cpy_tensor_to, +}; + +// for buffers from ptr, free is not called +static struct ggml_backend_buffer_i cpu_backend_buffer_i_from_ptr = { + /* .free_buffer = */ NULL, // ptr is not owned by the buffer, so it does not need to be freed + /* .get_base = */ ggml_backend_cpu_buffer_get_base, + /* .init_tensor = */ NULL, // no initialization required + /* .set_tensor = */ ggml_backend_cpu_buffer_set_tensor, + /* .get_tensor = */ ggml_backend_cpu_buffer_get_tensor, + /* .cpy_tensor_from = */ ggml_backend_cpu_buffer_cpy_tensor_from, + /* .cpy_tensor_to = */ ggml_backend_cpu_buffer_cpy_tensor_to, +}; + +static const size_t TENSOR_ALIGNMENT = 64; // should be enough for AVX 512 + +static ggml_backend_buffer_t ggml_backend_cpu_buffer_type_alloc_buffer(ggml_backend_buffer_type_t buft, size_t size) { + size += TENSOR_ALIGNMENT; // malloc may return an address that is not aligned + void * data = malloc(size); // TODO: maybe use GGML_ALIGNED_MALLOC? + + GGML_ASSERT(data != NULL && "failed to allocate buffer"); + + return ggml_backend_buffer_init(buft, cpu_backend_buffer_i, data, size); +} + +static size_t ggml_backend_cpu_buffer_type_get_alignment(ggml_backend_buffer_type_t buft) { + return TENSOR_ALIGNMENT; + + GGML_UNUSED(buft); +} + +static bool ggml_backend_cpu_buffer_type_supports_backend(ggml_backend_buffer_type_t buft, ggml_backend_t backend) { + return ggml_backend_is_cpu(backend); + + GGML_UNUSED(buft); +} + +ggml_backend_buffer_type_t ggml_backend_cpu_buffer_type(void) { + static struct ggml_backend_buffer_type ggml_backend_buffer_type_cpu = { + /* .iface = */ { + /* .alloc_buffer = */ ggml_backend_cpu_buffer_type_alloc_buffer, + /* .get_alignment = */ ggml_backend_cpu_buffer_type_get_alignment, + /* .get_alloc_size = */ NULL, // defaults to ggml_nbytes + /* .supports_backend = */ ggml_backend_cpu_buffer_type_supports_backend, + }, + /* .context = */ NULL, + }; + + return &ggml_backend_buffer_type_cpu; +} + struct ggml_backend_cpu_context { int n_threads; void * work_data; @@ -222,7 +478,7 @@ struct ggml_backend_cpu_context { static const char * ggml_backend_cpu_name(ggml_backend_t backend) { return "CPU"; - UNUSED(backend); + GGML_UNUSED(backend); } static void ggml_backend_cpu_free(ggml_backend_t backend) { @@ -232,80 +488,10 @@ static void ggml_backend_cpu_free(ggml_backend_t backend) { free(backend); } -static void * ggml_backend_cpu_buffer_get_base(ggml_backend_buffer_t buffer) { - return (void *)buffer->context; -} +static ggml_backend_buffer_type_t ggml_backend_cpu_get_default_buffer_type(ggml_backend_t backend) { + return ggml_backend_cpu_buffer_type(); -static void ggml_backend_cpu_buffer_free_buffer(ggml_backend_buffer_t buffer) { - free(buffer->context); - UNUSED(buffer); -} - -static struct ggml_backend_buffer_i cpu_backend_buffer_i = { - /* .free_buffer = */ ggml_backend_cpu_buffer_free_buffer, - /* .get_base = */ ggml_backend_cpu_buffer_get_base, - /* .get_alloc_size = */ NULL, // defaults to ggml_nbytes - /* .init_tensor = */ NULL, // no initialization required - /* .free_tensor = */ NULL, // no cleanup required -}; - -// for buffers from ptr, free is not called -static struct ggml_backend_buffer_i cpu_backend_buffer_i_from_ptr = { - /* .free_buffer = */ NULL, // ptr is not owned by the buffer, so it does not need to be freed - /* .get_base = */ ggml_backend_cpu_buffer_get_base, - /* .get_alloc_size = */ NULL, // defaults to ggml_nbytes - /* .init_tensor = */ NULL, - /* .free_tensor = */ NULL, -}; - -static const size_t TENSOR_ALIGNMENT = 64; // should be enough for AVX 512 - -static ggml_backend_buffer_t ggml_backend_cpu_alloc_buffer(ggml_backend_t backend, size_t size) { - size += TENSOR_ALIGNMENT; // malloc may return an address that is not aligned - void * data = malloc(size); // TODO: maybe use GGML_ALIGNED_MALLOC? - - GGML_ASSERT(data != NULL && "failed to allocate buffer"); - - return ggml_backend_buffer_init(backend, cpu_backend_buffer_i, data, size); -} - -static size_t ggml_backend_cpu_get_alignment(ggml_backend_t backend) { - return TENSOR_ALIGNMENT; - UNUSED(backend); -} - -static void ggml_backend_cpu_set_tensor_async(ggml_backend_t backend, struct ggml_tensor * tensor, const void * data, size_t offset, size_t size) { - GGML_ASSERT(offset + size <= ggml_nbytes(tensor) && "tensor write out of bounds"); - GGML_ASSERT(tensor->data != NULL && "tensor not allocated"); - - memcpy((char *)tensor->data + offset, data, size); - - UNUSED(backend); -} - -static void ggml_backend_cpu_get_tensor_async(ggml_backend_t backend, const struct ggml_tensor * tensor, void * data, size_t offset, size_t size) { - GGML_ASSERT(offset + size <= ggml_nbytes(tensor) && "tensor read out of bounds"); - GGML_ASSERT(tensor->data != NULL && "tensor not allocated"); - - memcpy(data, (const char *)tensor->data + offset, size); - - UNUSED(backend); -} - -static void ggml_backend_cpu_synchronize(ggml_backend_t backend) { - UNUSED(backend); -} - -static void ggml_backend_cpu_cpy_tensor_from(ggml_backend_t backend, struct ggml_tensor * src, struct ggml_tensor * dst) { - ggml_backend_tensor_get(src, dst->data, 0, ggml_nbytes(src)); - - UNUSED(backend); -} - -static void ggml_backend_cpu_cpy_tensor_to(ggml_backend_t backend, struct ggml_tensor * src, struct ggml_tensor * dst) { - ggml_backend_tensor_set(dst, src->data, 0, ggml_nbytes(src)); - - UNUSED(backend); + GGML_UNUSED(backend); } struct ggml_backend_plan_cpu { @@ -334,7 +520,7 @@ static void ggml_backend_cpu_graph_plan_free(ggml_backend_t backend, ggml_backen free(cpu_plan->cplan.work_data); free(cpu_plan); - UNUSED(backend); + GGML_UNUSED(backend); } static void ggml_backend_cpu_graph_plan_compute(ggml_backend_t backend, ggml_backend_graph_plan_t plan) { @@ -342,7 +528,7 @@ static void ggml_backend_cpu_graph_plan_compute(ggml_backend_t backend, ggml_bac ggml_graph_compute(&cpu_plan->cgraph, &cpu_plan->cplan); - UNUSED(backend); + GGML_UNUSED(backend); } static void ggml_backend_cpu_graph_compute(ggml_backend_t backend, struct ggml_cgraph * cgraph) { @@ -363,25 +549,25 @@ static void ggml_backend_cpu_graph_compute(ggml_backend_t backend, struct ggml_c static bool ggml_backend_cpu_supports_op(ggml_backend_t backend, const struct ggml_tensor * op) { return true; - UNUSED(backend); - UNUSED(op); + + GGML_UNUSED(backend); + GGML_UNUSED(op); } static struct ggml_backend_i cpu_backend_i = { - /* .get_name = */ ggml_backend_cpu_name, - /* .free = */ ggml_backend_cpu_free, - /* .alloc_buffer = */ ggml_backend_cpu_alloc_buffer, - /* .get_alignment = */ ggml_backend_cpu_get_alignment, - /* .set_tensor_async = */ ggml_backend_cpu_set_tensor_async, - /* .get_tensor_async = */ ggml_backend_cpu_get_tensor_async, - /* .synchronize = */ ggml_backend_cpu_synchronize, - /* .cpy_tensor_from = */ ggml_backend_cpu_cpy_tensor_from, - /* .cpy_tensor_to = */ ggml_backend_cpu_cpy_tensor_to, - /* .graph_plan_create = */ ggml_backend_cpu_graph_plan_create, - /* .graph_plan_free = */ ggml_backend_cpu_graph_plan_free, - /* .graph_plan_compute = */ ggml_backend_cpu_graph_plan_compute, - /* .graph_compute = */ ggml_backend_cpu_graph_compute, - /* .supports_op = */ ggml_backend_cpu_supports_op, + /* .get_name = */ ggml_backend_cpu_name, + /* .free = */ ggml_backend_cpu_free, + /* .get_default_buffer_type = */ ggml_backend_cpu_get_default_buffer_type, + /* .set_tensor_async = */ NULL, + /* .get_tensor_async = */ NULL, + /* .cpy_tensor_from_async = */ NULL, + /* .cpy_tensor_to_async = */ NULL, + /* .synchronize = */ NULL, + /* .graph_plan_create = */ ggml_backend_cpu_graph_plan_create, + /* .graph_plan_free = */ ggml_backend_cpu_graph_plan_free, + /* .graph_plan_compute = */ ggml_backend_cpu_graph_plan_compute, + /* .graph_compute = */ ggml_backend_cpu_graph_compute, + /* .supports_op = */ ggml_backend_cpu_supports_op, }; ggml_backend_t ggml_backend_cpu_init(void) { @@ -411,10 +597,18 @@ void ggml_backend_cpu_set_n_threads(ggml_backend_t backend_cpu, int n_threads) { ctx->n_threads = n_threads; } -ggml_backend_buffer_t ggml_backend_cpu_buffer_from_ptr(ggml_backend_t backend_cpu, void * ptr, size_t size) { - return ggml_backend_buffer_init(backend_cpu, cpu_backend_buffer_i_from_ptr, ptr, size); +ggml_backend_buffer_t ggml_backend_cpu_buffer_from_ptr(void * ptr, size_t size) { + return ggml_backend_buffer_init(ggml_backend_cpu_buffer_type(), cpu_backend_buffer_i_from_ptr, ptr, size); } +static ggml_backend_t ggml_backend_reg_cpu_init(const char * params, void * user_data) { + return ggml_backend_cpu_init(); + + GGML_UNUSED(params); + GGML_UNUSED(user_data); +} + + // scheduler #define GGML_MAX_BACKENDS 4 @@ -427,7 +621,7 @@ struct ggml_backend_sched_split { int i_end; struct ggml_tensor * inputs[GGML_MAX_SPLIT_INPUTS]; int n_inputs; - struct ggml_cgraph * graph; + struct ggml_cgraph graph; }; struct ggml_backend_sched { @@ -453,7 +647,7 @@ struct ggml_backend_sched { #else __attribute__((aligned(GGML_MEM_ALIGN))) #endif - char context_buffer[GGML_MAX_SPLITS*GGML_MAX_SPLIT_INPUTS*sizeof(struct ggml_tensor) + GGML_MAX_SPLITS*sizeof(struct ggml_cgraph)]; + char context_buffer[GGML_MAX_SPLITS*GGML_MAX_SPLIT_INPUTS*sizeof(struct ggml_tensor) + sizeof(struct ggml_cgraph)]; }; #define hash_id(node) ggml_hash_find_or_insert(sched->hash_set, node) @@ -482,23 +676,57 @@ static int sched_allocr_prio(ggml_backend_sched_t sched, ggml_tallocr_t allocr) return INT_MAX; } +static ggml_backend_t get_buffer_backend(ggml_backend_sched_t sched, ggml_backend_buffer_t buffer) { + if (buffer == NULL) { + return NULL; + } + // find highest prio backend that supports the buffer type + for (int i = 0; i < sched->n_backends; i++) { + if (ggml_backend_buft_supports_backend(buffer->buft, sched->backends[i])) { + return sched->backends[i]; + } + } + GGML_ASSERT(false && "tensor buffer type not supported by any backend"); +} + +static ggml_backend_t get_allocr_backend(ggml_backend_sched_t sched, ggml_tallocr_t allocr) { + if (allocr == NULL) { + return NULL; + } + // find highest prio backend that supports the buffer type + for (int i = 0; i < sched->n_backends; i++) { + if (sched->tallocs[i] == allocr) { + return sched->backends[i]; + } + } + GGML_UNREACHABLE(); +} + +#if 0 +static char causes[GGML_DEFAULT_GRAPH_SIZE*8 + GGML_MAX_SPLITS*GGML_MAX_SPLIT_INPUTS][128]; // debug, remove +#define SET_CAUSE(node, ...) sprintf(causes[hash_id(node)], __VA_ARGS__) +#define GET_CAUSE(node) causes[hash_id(node)] +#else +#define SET_CAUSE(node, ...) +#define GET_CAUSE(node) "" +#endif + // returns the backend that should be used for the node based on the current locations -char causes[GGML_DEFAULT_GRAPH_SIZE*4 + GGML_MAX_SPLITS*GGML_MAX_SPLIT_INPUTS][128]; // debug, remove static ggml_backend_t sched_backend_from_cur(ggml_backend_sched_t sched, struct ggml_tensor * node) { // if the dst tensor is already allocated in a buffer, we must assume that it is critical to keep it there // ie. kv cache updates // note that this doesn't allow fallback to CPU. need to add output tensors to the splits to copy the data back to the original backend. // dst - ggml_backend_t cur_backend = ggml_get_backend(node); + ggml_backend_t cur_backend = get_buffer_backend(sched, node->buffer); if (cur_backend != NULL) { - sprintf(causes[hash_id(node)], "1.dst"); + SET_CAUSE(node, "1.dst"); return cur_backend; } // view_src - if (node->view_src != NULL && ggml_get_backend(node->view_src) != NULL) { - sprintf(causes[hash_id(node)], "1.vsrc"); - return ggml_get_backend(node->view_src); + if (node->view_src != NULL && get_buffer_backend(sched, node->view_src->buffer) != NULL) { + SET_CAUSE(node, "1.vsrc"); + return get_buffer_backend(sched, node->view_src->buffer); } // src @@ -510,7 +738,7 @@ static ggml_backend_t sched_backend_from_cur(ggml_backend_sched_t sched, struct if (src == NULL) { break; } - ggml_backend_t src_backend = ggml_get_backend(src); + ggml_backend_t src_backend = get_buffer_backend(sched, src->buffer); if (src_backend != NULL) { int src_prio = sched_backend_prio(sched, src_backend); size_t src_size = ggml_nbytes(src); @@ -518,7 +746,7 @@ static ggml_backend_t sched_backend_from_cur(ggml_backend_sched_t sched, struct cur_prio = src_prio; cur_size = src_size; cur_backend = src_backend; - sprintf(causes[hash_id(node)], "1.src%d", i); + SET_CAUSE(node, "1.src%d", i); } } } @@ -539,10 +767,12 @@ static void sched_print_assignments(ggml_backend_sched_t sched, struct ggml_cgra int cur_split = 0; for (int i = 0; i < graph->n_nodes; i++) { if (cur_split < sched->n_splits && i == sched->splits[cur_split].i_start) { - ggml_backend_t split_backend = ggml_tallocr_get_buffer(sched->splits[cur_split].tallocr)->backend; - fprintf(stderr, "\n## SPLIT #%d: %s # %d inputs: ", cur_split, ggml_backend_name(split_backend), sched->splits[cur_split].n_inputs); + ggml_backend_t split_backend = get_allocr_backend(sched, sched->splits[cur_split].tallocr); + fprintf(stderr, "\n## SPLIT #%d: %s # %d inputs: ", cur_split, ggml_backend_name(split_backend), + sched->splits[cur_split].n_inputs); for (int j = 0; j < sched->splits[cur_split].n_inputs; j++) { - fprintf(stderr, "[%s (%5.5s)] ", sched->splits[cur_split].inputs[j]->name, fmt_size(ggml_nbytes(sched->splits[cur_split].inputs[j]))); + fprintf(stderr, "[%s (%5.5s)] ", sched->splits[cur_split].inputs[j]->name, + fmt_size(ggml_nbytes(sched->splits[cur_split].inputs[j]))); } fprintf(stderr, "\n"); cur_split++; @@ -552,16 +782,18 @@ static void sched_print_assignments(ggml_backend_sched_t sched, struct ggml_cgra continue; } ggml_tallocr_t node_allocr = node_allocr(node); - ggml_backend_t node_backend = node_allocr ? ggml_tallocr_get_buffer(node_allocr)->backend : NULL; - fprintf(stderr, "node #%3d (%10.10s): %20.20s (%4.4s) [%4.4s %8.8s]:", i, ggml_op_name(node->op), node->name, fmt_size(ggml_nbytes(node)), node_allocr ? ggml_backend_name(node_backend) : "NULL", causes[hash_id(node)]); + ggml_backend_t node_backend = node_allocr ? get_allocr_backend(sched, node_allocr) : NULL; // FIXME: + fprintf(stderr, "node #%3d (%10.10s): %20.20s (%4.4s) [%4.4s %8.8s]:", i, ggml_op_name(node->op), node->name, + fmt_size(ggml_nbytes(node)), node_allocr ? ggml_backend_name(node_backend) : "NULL", GET_CAUSE(node)); for (int j = 0; j < GGML_MAX_SRC; j++) { struct ggml_tensor * src = node->src[j]; if (src == NULL) { break; } ggml_tallocr_t src_allocr = node_allocr(src); - ggml_backend_t src_backend = src_allocr ? ggml_tallocr_get_buffer(src_allocr)->backend : NULL; - fprintf(stderr, " %20.20s (%4.4s) [%4.4s %8.8s]", src->name, fmt_size(ggml_nbytes(src)), src_backend ? ggml_backend_name(src_backend) : "NULL", causes[hash_id(src)]); + ggml_backend_t src_backend = src_allocr ? get_allocr_backend(sched, src_allocr) : NULL; + fprintf(stderr, " %20.20s (%4.4s) [%4.4s %8.8s]", src->name, + fmt_size(ggml_nbytes(src)), src_backend ? ggml_backend_name(src_backend) : "NULL", GET_CAUSE(src)); } fprintf(stderr, "\n"); } @@ -587,9 +819,9 @@ static void sched_split_graph(ggml_backend_sched_t sched, struct ggml_cgraph * g sched->n_splits = 0; struct ggml_init_params params = { - /*.mem_size = */ sizeof(sched->context_buffer), - /*.mem_buffer = */ sched->context_buffer, - /*.no_alloc = */ true + /* .mem_size = */ sizeof(sched->context_buffer), + /* .mem_buffer = */ sched->context_buffer, + /* .no_alloc = */ true }; if (sched->ctx != NULL) { @@ -605,9 +837,9 @@ static void sched_split_graph(ggml_backend_sched_t sched, struct ggml_cgraph * g // do not overwrite user assignments continue; } - ggml_backend_t leaf_backend = ggml_get_backend(leaf); + ggml_backend_t leaf_backend = get_buffer_backend(sched, leaf->buffer); if (leaf_backend == NULL && leaf->view_src != NULL) { - leaf_backend = ggml_get_backend(leaf->view_src); + leaf_backend = get_buffer_backend(sched, leaf->view_src->buffer); } if (leaf_backend != NULL) { node_allocr(leaf) = ggml_backend_sched_get_tallocr(sched, leaf_backend); @@ -649,7 +881,7 @@ static void sched_split_graph(ggml_backend_sched_t sched, struct ggml_cgraph * g cur_prio = src_prio; cur_size = src_size; node_allocr = src_allocr; - sprintf(causes[hash_id(node)], "2.src%d", j); + SET_CAUSE(node, "2.src%d", j); } } } @@ -733,7 +965,7 @@ static void sched_split_graph(ggml_backend_sched_t sched, struct ggml_cgraph * g struct ggml_tensor * tensor_copy = ggml_dup_tensor_layout(sched->ctx, src); sched->node_copies[id][cur_backend_id] = tensor_copy; node_allocr(tensor_copy) = cur_allocr; - ggml_backend_t backend = ggml_tallocr_get_buffer(cur_allocr)->backend; + ggml_backend_t backend = get_allocr_backend(sched, cur_allocr); ggml_format_name(tensor_copy, "%s#%s", ggml_backend_name(backend), src->name); } node->src[j] = sched->node_copies[id][cur_backend_id]; @@ -761,8 +993,8 @@ static void sched_split_graph(ggml_backend_sched_t sched, struct ggml_cgraph * g ggml_tallocr_t src_allocr = node_allocr(src); if (src_allocr != node_allocr /* && src_backend != NULL */) { // ignore nulls for now fprintf(stderr, "!!!! %s has backend %s, src %d (%s) has backend %s\n", - node->name, node_allocr ? ggml_backend_name(ggml_tallocr_get_buffer(node_allocr)->backend) : "NULL", - j, src->name, src_allocr ? ggml_backend_name(ggml_tallocr_get_buffer(src_allocr)->backend) : "NULL"); + node->name, node_allocr ? ggml_backend_name(get_allocr_backend(sched, node_allocr)) : "NULL", + j, src->name, src_allocr ? ggml_backend_name(get_allocr_backend(sched, src_allocr)) : "NULL"); } } } @@ -773,7 +1005,7 @@ static void sched_split_graph(ggml_backend_sched_t sched, struct ggml_cgraph * g struct ggml_cgraph * graph_copy = ggml_new_graph_custom(sched->ctx, graph->n_nodes + sched->n_splits*GGML_MAX_SPLIT_INPUTS, false); for (int i = 0; i < sched->n_splits; i++) { struct ggml_backend_sched_split * split = &sched->splits[i]; - split->graph = ggml_graph_view(sched->ctx, graph, split->i_start, split->i_end); + split->graph = ggml_graph_view(graph, split->i_start, split->i_end); // add inputs to the graph copy so that they are allocated by ggml-alloc at the start of the split for (int j = 0; j < split->n_inputs; j++) { @@ -806,31 +1038,29 @@ static void sched_compute_splits(ggml_backend_sched_t sched) { for (int i = 0; i < sched->n_splits; i++) { struct ggml_backend_sched_split * split = &splits[i]; - ggml_backend_t split_backend = ggml_tallocr_get_buffer(split->tallocr)->backend; + ggml_backend_t split_backend = get_allocr_backend(sched, split->tallocr); int split_backend_id = sched_backend_prio(sched, split_backend); // copy the input tensors to the split backend uint64_t copy_start_us = ggml_time_us(); for (int j = 0; j < split->n_inputs; j++) { - struct ggml_tensor * input_cpy = sched->node_copies[hash_id(split->inputs[j])][sched_backend_prio(sched, split_backend)]; - if (split->inputs[j]->buffer == NULL) { - if (split->inputs[j]->view_src == NULL) { - fprintf(stderr, "input %s has no buffer and no view_src\n", split->inputs[j]->name); + struct ggml_tensor * input = split->inputs[j]; + struct ggml_tensor * input_cpy = sched->node_copies[hash_id(input)][sched_backend_prio(sched, split_backend)]; + if (input->buffer == NULL) { + if (input->view_src == NULL) { + fprintf(stderr, "input %s has no buffer and no view_src\n", input->name); exit(1); } - struct ggml_tensor * view = split->inputs[j]; - view->backend = view->view_src->backend; - view->buffer = view->view_src->buffer; - view->data = (char *)view->view_src->data + view->view_offs; - ggml_backend_buffer_init_tensor(ggml_backend_sched_get_buffer(sched, view->buffer->backend), view); + // FIXME: may need to use the sched buffer instead + ggml_backend_view_init(input->view_src->buffer, input); } if (input_cpy->buffer == NULL) { fprintf(stderr, "input_cpy %s has no buffer\n", input_cpy->name); exit(1); } - GGML_ASSERT(split->inputs[j]->buffer->backend != input_cpy->buffer->backend); - GGML_ASSERT(input_cpy->buffer->backend == split_backend); - ggml_backend_tensor_copy(split->inputs[j], input_cpy); + //GGML_ASSERT(input->buffer->backend != input_cpy->buffer->backend); + //GGML_ASSERT(input_cpy->buffer->backend == split_backend); + ggml_backend_tensor_copy(input, input_cpy); } // ggml_backend_synchronize(split_backend); int64_t copy_end_us = ggml_time_us(); @@ -843,7 +1073,7 @@ static void sched_compute_splits(ggml_backend_sched_t sched) { #endif uint64_t compute_start_us = ggml_time_us(); - ggml_backend_graph_compute(split_backend, split->graph); + ggml_backend_graph_compute(split_backend, &split->graph); // ggml_backend_synchronize(split_backend); uint64_t compute_end_us = ggml_time_us(); compute_us[split_backend_id] += compute_end_us - compute_start_us; @@ -872,8 +1102,6 @@ ggml_backend_sched_t ggml_backend_sched_new(ggml_backend_t * backends, int n_bac struct ggml_backend_sched * sched = malloc(sizeof(struct ggml_backend_sched)); memset(sched, 0, sizeof(struct ggml_backend_sched)); - fprintf(stderr, "ggml_backend_sched size: %lu KB\n", sizeof(struct ggml_backend_sched)/1024); - sched->n_backends = n_backends; for (int i = 0; i < n_backends; i++) { sched->backends[i] = backends[i]; @@ -948,3 +1176,182 @@ void ggml_backend_sched_set_node_backend(ggml_backend_sched_t sched, struct ggml GGML_ASSERT(backend_index >= 0 && backend_index < sched->n_backends); node_allocr(node) = sched->tallocs[backend_index]; } + +// utils +void ggml_backend_view_init(ggml_backend_buffer_t buffer, struct ggml_tensor * tensor) { + GGML_ASSERT(tensor->buffer == NULL); + GGML_ASSERT(tensor->data == NULL); + GGML_ASSERT(tensor->view_src != NULL); + GGML_ASSERT(tensor->view_src->buffer != NULL); + GGML_ASSERT(tensor->view_src->data != NULL); + + tensor->buffer = buffer; + tensor->data = (char *)tensor->view_src->data + tensor->view_offs; + tensor->backend = tensor->view_src->backend; + ggml_backend_buffer_init_tensor(buffer, tensor); +} + +void ggml_backend_tensor_alloc(ggml_backend_buffer_t buffer, struct ggml_tensor * tensor, void * addr) { + GGML_ASSERT(tensor->buffer == NULL); + GGML_ASSERT(tensor->data == NULL); + GGML_ASSERT(tensor->view_src == NULL); + GGML_ASSERT(addr >= ggml_backend_buffer_get_base(buffer)); + GGML_ASSERT((char *)addr + ggml_backend_buffer_get_alloc_size(buffer, tensor) <= + (char *)ggml_backend_buffer_get_base(buffer) + ggml_backend_buffer_get_size(buffer)); + + tensor->buffer = buffer; + tensor->data = addr; + ggml_backend_buffer_init_tensor(buffer, tensor); +} + +static struct ggml_tensor * graph_dup_tensor(struct ggml_hash_set hash_set, struct ggml_tensor ** node_copies, + struct ggml_context * ctx_allocated, struct ggml_context * ctx_unallocated, struct ggml_tensor * src) { + + GGML_ASSERT(src != NULL); + GGML_ASSERT(src->data && "graph must be allocated"); + + size_t id = ggml_hash_insert(hash_set, src); + if (id == GGML_HASHTABLE_ALREADY_EXISTS) { + return node_copies[ggml_hash_find(hash_set, src)]; + } + + struct ggml_tensor * dst = ggml_dup_tensor_layout(src->data && !src->view_src ? ctx_allocated : ctx_unallocated, src); + if (src->view_src != NULL) { + dst->view_src = graph_dup_tensor(hash_set, node_copies, ctx_allocated, ctx_unallocated, src->view_src); + dst->view_offs = src->view_offs; + } + dst->op = src->op; + memcpy(dst->op_params, src->op_params, sizeof(dst->op_params)); + ggml_set_name(dst, src->name); + + // copy src + for (int i = 0; i < GGML_MAX_SRC; i++) { + struct ggml_tensor * s = src->src[i]; + if (s == NULL) { + break; + } + dst->src[i] = graph_dup_tensor(hash_set, node_copies, ctx_allocated, ctx_unallocated, s); + } + + node_copies[id] = dst; + return dst; +} + +static void graph_init_tensor(struct ggml_hash_set hash_set, struct ggml_tensor ** node_copies, bool * node_init, struct ggml_tensor * src) { + size_t id = ggml_hash_find(hash_set, src); + if (node_init[id]) { + return; + } + node_init[id] = true; + + struct ggml_tensor * dst = node_copies[id]; + if (dst->view_src != NULL) { + ggml_backend_view_init(dst->view_src->buffer, dst); + } + else { + ggml_backend_tensor_copy(src, dst); + } + + // init src + for (int i = 0; i < GGML_MAX_SRC; i++) { + struct ggml_tensor * s = src->src[i]; + if (s == NULL) { + break; + } + graph_init_tensor(hash_set, node_copies, node_init, s); + } +} + +struct ggml_backend_graph_copy ggml_backend_graph_copy(ggml_backend_t backend, struct ggml_cgraph * graph) { + struct ggml_hash_set hash_set = { + /* .size = */ graph->visited_hash_table.size, + /* .keys = */ calloc(sizeof(hash_set.keys[0]) * graph->visited_hash_table.size, 1) + }; + struct ggml_tensor ** node_copies = calloc(sizeof(node_copies[0]) * hash_set.size, 1); + bool * node_init = calloc(sizeof(node_init[0]) * hash_set.size, 1); + + struct ggml_init_params params = { + /* .mem_size = */ ggml_tensor_overhead()*hash_set.size + ggml_graph_overhead_custom(graph->size, false), + /* .mem_buffer = */ NULL, + /* .no_alloc = */ true + }; + + struct ggml_context * ctx_allocated = ggml_init(params); + struct ggml_context * ctx_unallocated = ggml_init(params); + + // dup nodes + for (int i = 0; i < graph->n_nodes; i++) { + struct ggml_tensor * node = graph->nodes[i]; + graph_dup_tensor(hash_set, node_copies, ctx_allocated, ctx_unallocated, node); + } + + // allocate nodes + ggml_backend_buffer_t buffer = ggml_backend_alloc_ctx_tensors(ctx_allocated, backend); + + //printf("copy buffer size: %zu MB\n", ggml_backend_buffer_get_size(buffer) / 1024 / 1024); + + // copy data and init views + for (int i = 0; i < graph->n_nodes; i++) { + struct ggml_tensor * node = graph->nodes[i]; + graph_init_tensor(hash_set, node_copies, node_init, node); + } + + // build graph copy + struct ggml_cgraph * graph_copy = ggml_new_graph_custom(ctx_allocated, graph->size, false); + for (int i = 0; i < graph->n_nodes; i++) { + struct ggml_tensor * node = graph->nodes[i]; + struct ggml_tensor * node_copy = node_copies[ggml_hash_find(hash_set, node)]; + graph_copy->nodes[i] = node_copy; + } + graph_copy->n_nodes = graph->n_nodes; + + free(hash_set.keys); + free(node_copies); + free(node_init); + + return (struct ggml_backend_graph_copy) { + /* .buffer = */ buffer, + /* .ctx_allocated = */ ctx_allocated, + /* .ctx_unallocated = */ ctx_unallocated, + /* .graph = */ graph_copy, + }; +} + +void ggml_backend_graph_copy_free(struct ggml_backend_graph_copy copy) { + ggml_backend_buffer_free(copy.buffer); + ggml_free(copy.ctx_allocated); + ggml_free(copy.ctx_unallocated); +} + +void ggml_backend_compare_graph_backend(ggml_backend_t backend1, ggml_backend_t backend2, struct ggml_cgraph * graph, ggml_backend_eval_callback callback, void * user_data) { + struct ggml_backend_graph_copy copy = ggml_backend_graph_copy(backend2, graph); + struct ggml_cgraph * g1 = graph; + struct ggml_cgraph * g2 = copy.graph; + + assert(g1->n_nodes == g2->n_nodes); + + for (int i = 0; i < g1->n_nodes; i++) { + //printf("eval %d/%d\n", i, g1->n_nodes); + struct ggml_tensor * t1 = g1->nodes[i]; + struct ggml_tensor * t2 = g2->nodes[i]; + + assert(t1->op == t2->op && ggml_are_same_layout(t1, t2)); + + struct ggml_cgraph g1v = ggml_graph_view(g1, i, i + 1); + struct ggml_cgraph g2v = ggml_graph_view(g2, i, i + 1); + + ggml_backend_graph_compute(backend1, &g1v); + ggml_backend_graph_compute(backend2, &g2v); + + if (ggml_is_view_op(t1->op)) { + continue; + } + + // compare results, calculate rms etc + if (!callback(i, t1, t2, user_data)) { + break; + } + } + + ggml_backend_graph_copy_free(copy); +} diff --git a/ggml-backend.h b/ggml-backend.h index 966687320..58d5ccae6 100644 --- a/ggml-backend.h +++ b/ggml-backend.h @@ -7,41 +7,44 @@ extern "C" { #endif + typedef struct ggml_backend_buffer_type * ggml_backend_buffer_type_t; + typedef struct ggml_backend_buffer * ggml_backend_buffer_t; + typedef struct ggml_backend * ggml_backend_t; + typedef void * ggml_backend_graph_plan_t; + // // Backend buffer // - struct ggml_backend_buffer; - typedef struct ggml_backend_buffer * ggml_backend_buffer_t; + // buffer type + GGML_API ggml_backend_buffer_t ggml_backend_buft_alloc_buffer(ggml_backend_buffer_type_t buft, size_t size); + GGML_API size_t ggml_backend_buft_get_alignment (ggml_backend_buffer_type_t buft); + GGML_API size_t ggml_backend_buft_get_alloc_size(ggml_backend_buffer_type_t buft, struct ggml_tensor * tensor); + GGML_API bool ggml_backend_buft_supports_backend(ggml_backend_buffer_type_t buft, ggml_backend_t backend); - // backend buffer functions + // buffer GGML_API void ggml_backend_buffer_free (ggml_backend_buffer_t buffer); - GGML_API size_t ggml_backend_buffer_get_alignment (ggml_backend_buffer_t buffer); GGML_API void * ggml_backend_buffer_get_base (ggml_backend_buffer_t buffer); GGML_API size_t ggml_backend_buffer_get_size (ggml_backend_buffer_t buffer); - GGML_API size_t ggml_backend_buffer_get_alloc_size(ggml_backend_buffer_t buffer, struct ggml_tensor * tensor); GGML_API void ggml_backend_buffer_init_tensor (ggml_backend_buffer_t buffer, struct ggml_tensor * tensor); - GGML_API void ggml_backend_buffer_free_tensor (ggml_backend_buffer_t buffer, struct ggml_tensor * tensor); + GGML_API size_t ggml_backend_buffer_get_alignment (ggml_backend_buffer_t buffer); + GGML_API size_t ggml_backend_buffer_get_alloc_size(ggml_backend_buffer_t buffer, struct ggml_tensor * tensor); + GGML_API ggml_backend_buffer_type_t ggml_backend_buffer_type(ggml_backend_buffer_t buffer); // // Backend // - struct ggml_backend; - typedef struct ggml_backend * ggml_backend_t; - typedef void * ggml_backend_graph_plan_t; - - GGML_API ggml_backend_t ggml_get_backend(const struct ggml_tensor * tensor); GGML_API const char * ggml_backend_name(ggml_backend_t backend); GGML_API void ggml_backend_free(ggml_backend_t backend); - GGML_API ggml_backend_buffer_t ggml_backend_alloc_buffer(ggml_backend_t backend, size_t size); + GGML_API ggml_backend_buffer_type_t ggml_backend_get_default_buffer_type(ggml_backend_t backend); + GGML_API ggml_backend_buffer_t ggml_backend_alloc_buffer(ggml_backend_t backend, size_t size); + GGML_API size_t ggml_backend_get_alignment(ggml_backend_t backend); - GGML_API size_t ggml_backend_get_alignment(ggml_backend_t backend); - - GGML_API void ggml_backend_tensor_set_async( struct ggml_tensor * tensor, const void * data, size_t offset, size_t size); - GGML_API void ggml_backend_tensor_get_async(const struct ggml_tensor * tensor, void * data, size_t offset, size_t size); + GGML_API void ggml_backend_tensor_set_async(ggml_backend_t backend, struct ggml_tensor * tensor, const void * data, size_t offset, size_t size); + GGML_API void ggml_backend_tensor_get_async(ggml_backend_t backend, const struct ggml_tensor * tensor, void * data, size_t offset, size_t size); GGML_API void ggml_backend_tensor_set( struct ggml_tensor * tensor, const void * data, size_t offset, size_t size); GGML_API void ggml_backend_tensor_get(const struct ggml_tensor * tensor, void * data, size_t offset, size_t size); @@ -57,6 +60,7 @@ extern "C" { // tensor copy between different backends GGML_API void ggml_backend_tensor_copy(struct ggml_tensor * src, struct ggml_tensor * dst); + GGML_API void ggml_backend_tensor_copy_async(ggml_backend_t backend, struct ggml_tensor * src, struct ggml_tensor * dst); // automatic fallback to sync copy // // CPU backend @@ -68,8 +72,23 @@ extern "C" { GGML_API void ggml_backend_cpu_set_n_threads(ggml_backend_t backend_cpu, int n_threads); // Create a backend buffer from an existing pointer - GGML_API ggml_backend_buffer_t ggml_backend_cpu_buffer_from_ptr(ggml_backend_t backend_cpu, void * ptr, size_t size); + GGML_API ggml_backend_buffer_t ggml_backend_cpu_buffer_from_ptr(void * ptr, size_t size); + GGML_API ggml_backend_buffer_type_t ggml_backend_cpu_buffer_type(void); + + // + // Backend registry + // + + // The backend registry is a registry of all the available backends, and allows initializing backends in a generic way + + GGML_API size_t ggml_backend_reg_get_count(void); + GGML_API size_t ggml_backend_reg_find_by_name(const char * name); + GGML_API ggml_backend_t ggml_backend_reg_init_backend_from_str(const char * backend_str); // str is name[:params] + GGML_API const char * ggml_backend_reg_get_name(size_t i); + GGML_API ggml_backend_t ggml_backend_reg_init_backend(size_t i, const char * params); // params is backend-specific + GGML_API ggml_backend_buffer_type_t ggml_backend_reg_get_default_buffer_type(size_t i); + GGML_API ggml_backend_buffer_t ggml_backend_reg_alloc_buffer(size_t i, size_t size); // // Backend scheduler @@ -131,6 +150,32 @@ extern "C" { ggml_backend_sched_t sched, struct ggml_cgraph * graph); + + // + // Utils + // + + struct ggml_backend_graph_copy { + ggml_backend_buffer_t buffer; + struct ggml_context * ctx_allocated; + struct ggml_context * ctx_unallocated; + struct ggml_cgraph * graph; + }; + + // Copy a graph to a different backend + GGML_API struct ggml_backend_graph_copy ggml_backend_graph_copy(ggml_backend_t backend, struct ggml_cgraph * graph); + GGML_API void ggml_backend_graph_copy_free(struct ggml_backend_graph_copy copy); + + typedef bool (*ggml_backend_eval_callback)(int node_index, struct ggml_tensor * t1, struct ggml_tensor * t2, void * user_data); + + // Compare the output of two backends + GGML_API void ggml_backend_compare_graph_backend(ggml_backend_t backend1, ggml_backend_t backend2, struct ggml_cgraph * graph, ggml_backend_eval_callback callback, void * user_data); + + // Tensor initialization + GGML_API void ggml_backend_tensor_alloc(ggml_backend_buffer_t buffer, struct ggml_tensor * tensor, void * addr); + GGML_API void ggml_backend_view_init(ggml_backend_buffer_t buffer, struct ggml_tensor * tensor); + + #ifdef __cplusplus } #endif diff --git a/ggml-cuda.cu b/ggml-cuda.cu index 1200d1c88..85f7a2937 100644 --- a/ggml-cuda.cu +++ b/ggml-cuda.cu @@ -1,13 +1,13 @@ #include -#include #include #include +#include +#include #include #include #include #include #include -#include #if defined(GGML_USE_HIPBLAS) #include @@ -70,6 +70,7 @@ #define cudaOccupancyMaxPotentialBlockSize hipOccupancyMaxPotentialBlockSize #define cudaSetDevice hipSetDevice #define cudaStreamCreateWithFlags hipStreamCreateWithFlags +#define cudaStreamFireAndForget hipStreamFireAndForget #define cudaStreamNonBlocking hipStreamNonBlocking #define cudaStreamSynchronize hipStreamSynchronize #define cudaStreamWaitEvent(stream, event, flags) hipStreamWaitEvent(stream, event, flags) @@ -191,7 +192,7 @@ static_assert(sizeof(half) == sizeof(ggml_fp16_t), "wrong fp16 size"); fprintf(stderr, "\nCUDA error %d at %s:%d: %s\n", err_, __FILE__, __LINE__, \ cudaGetErrorString(err_)); \ fprintf(stderr, "current device: %d\n", id); \ - exit(1); \ + GGML_ASSERT(!"CUDA error"); \ } \ } while (0) @@ -205,7 +206,7 @@ static_assert(sizeof(half) == sizeof(ggml_fp16_t), "wrong fp16 size"); fprintf(stderr, "\ncuBLAS error %d at %s:%d: %s\n", \ err_, __FILE__, __LINE__, cublasGetStatusString(err_)); \ fprintf(stderr, "current device: %d\n", id); \ - exit(1); \ + GGML_ASSERT(!"cuBLAS error"); \ } \ } while (0) #else @@ -217,7 +218,7 @@ static_assert(sizeof(half) == sizeof(ggml_fp16_t), "wrong fp16 size"); cudaGetDevice(&id); \ fprintf(stderr, "\ncuBLAS error %d at %s:%d\n", err_, __FILE__, __LINE__); \ fprintf(stderr, "current device: %d\n", id); \ - exit(1); \ + GGML_ASSERT(!"cuBLAS error"); \ } \ } while (0) #endif // CUDART_VERSION >= 11 @@ -434,8 +435,6 @@ static_assert(sizeof(block_q6_K) == sizeof(ggml_fp16_t) + 13*QK_K/16, "wrong q6_ #define WARP_SIZE 32 #define MATRIX_ROW_PADDING 512 // last row of quant. matrices is a multiple of this to avoid out-of-bounds memory accesses -#define CUDA_ADD_BLOCK_SIZE 256 -#define CUDA_MUL_BLOCK_SIZE 256 #define CUDA_GELU_BLOCK_SIZE 256 #define CUDA_SILU_BLOCK_SIZE 256 #define CUDA_RELU_BLOCK_SIZE 256 @@ -528,40 +527,87 @@ static __device__ __forceinline__ float warp_reduce_max(float x) { return x; } -static __global__ void add_f32(const float * x, const float * y, float * dst, const int kx, const int ky) { - const int i = blockDim.x*blockIdx.x + threadIdx.x; - - if (i >= kx) { - return; - } - dst[i] = x[i] + y[i%ky]; +static __device__ __forceinline__ float op_repeat(const float a, const float b) { + return b; } -static __global__ void add_f16_f32_f16(const half * x, const float * y, half * dst, const int k) { - const int i = blockDim.x*blockIdx.x + threadIdx.x; - - if (i >= k) { - return; - } - dst[i] = __hadd(x[i], __float2half(y[i])); +static __device__ __forceinline__ float op_add(const float a, const float b) { + return a + b; } -static __global__ void add_f16_f32_f32(const half * x, const float * y, float * dst, const int k) { - const int i = blockDim.x*blockIdx.x + threadIdx.x; - - if (i >= k) { - return; - } - dst[i] = __half2float(x[i]) + y[i]; +static __device__ __forceinline__ float op_mul(const float a, const float b) { + return a * b; } -static __global__ void mul_f32(const float * x, const float * y, float * dst, const int kx, const int ky) { - const int i = blockDim.x*blockIdx.x + threadIdx.x; +static __device__ __forceinline__ float op_div(const float a, const float b) { + return a / b; +} - if (i >= kx) { +template +static __global__ void k_bin_bcast(const src0_t * src0, const src1_t * src1, dst_t * dst, + int ne0, int ne1, int ne2, int ne3, + int ne10, int ne11, int ne12, int ne13, + /*int s0, */ int s1, int s2, int s3, + /*int s10,*/ int s11, int s12, int s13) { + const int i0s = blockDim.x*blockIdx.x + threadIdx.x; + const int i1 = (blockDim.y*blockIdx.y + threadIdx.y); + const int i2 = (blockDim.z*blockIdx.z + threadIdx.z) / ne3; + const int i3 = (blockDim.z*blockIdx.z + threadIdx.z) % ne3; + + if (i0s >= ne0 || i1 >= ne1 || i2 >= ne2 || i3 >= ne3) { return; } - dst[i] = x[i] * y[i%ky]; + + const int i11 = i1 % ne11; + const int i12 = i2 % ne12; + const int i13 = i3 % ne13; + + const size_t i_src0 = i3*s3 + i2*s2 + i1*s1; + const size_t i_src1 = i13*s13 + i12*s12 + i11*s11; + const size_t i_dst = i_src0; + + const src0_t * src0_row = src0 + i_src0; + const src1_t * src1_row = src1 + i_src1; + dst_t * dst_row = dst + i_dst; + + for (int i0 = i0s; i0 < ne0; i0 += blockDim.x*gridDim.x) { + const int i10 = i0 % ne10; + dst_row[i0] = (dst_t)bin_op(src0 ? (float)src0_row[i0] : 0.0f, (float)src1_row[i10]); + } +} + +template +static __global__ void k_bin_bcast_unravel(const src0_t * src0, const src1_t * src1, dst_t * dst, + int ne0, int ne1, int ne2, int ne3, + int ne10, int ne11, int ne12, int ne13, + /*int s0, */ int s1, int s2, int s3, + /*int s10,*/ int s11, int s12, int s13) { + + const int i = blockDim.x*blockIdx.x + threadIdx.x; + + const int i3 = i/(ne2*ne1*ne0); + const int i2 = (i/(ne1*ne0)) % ne2; + const int i1 = (i/ne0) % ne1; + const int i0 = i % ne0; + + if (i0 >= ne0 || i1 >= ne1 || i2 >= ne2 || i3 >= ne3) { + return; + } + + const int i11 = i1 % ne11; + const int i12 = i2 % ne12; + const int i13 = i3 % ne13; + + const size_t i_src0 = i3*s3 + i2*s2 + i1*s1; + const size_t i_src1 = i13*s13 + i12*s12 + i11*s11; + const size_t i_dst = i_src0; + + const src0_t * src0_row = src0 + i_src0; + const src1_t * src1_row = src1 + i_src1; + dst_t * dst_row = dst + i_dst; + + const int i10 = i0 % ne10; + dst_row[i0] = (dst_t)bin_op(src0 ? (float)src0_row[i0] : 0.0f, (float)src1_row[i10]); } static __global__ void gelu_f32(const float * x, float * dst, const int k) { @@ -605,12 +651,10 @@ static __global__ void sqr_f32(const float * x, float * dst, const int k) { } template -static __global__ void norm_f32(const float * x, float * dst, const int ncols) { +static __global__ void norm_f32(const float * x, float * dst, const int ncols, const float eps) { const int row = blockIdx.x*blockDim.y + threadIdx.y; const int tid = threadIdx.x; - const float eps = 1e-5f; - float2 mean_var = make_float2(0.f, 0.f); for (int col = tid; col < ncols; col += block_size) { @@ -4824,6 +4868,65 @@ static __global__ void alibi_f32(const float * x, float * dst, const int ncols, dst[i] = col * m_k + x[i]; } +static __global__ void k_sum_rows_f32(const float * x, float * dst, const int ncols) { + const int row = blockIdx.y; + const int col = threadIdx.x; + + float sum = 0.0f; + for (int i = col; i < ncols; i += blockDim.x) { + sum += x[row * ncols + i]; + } + + sum = warp_reduce_sum(sum); + + if (col == 0) { + dst[row] = sum; + } +} + +template +static inline __device__ void swap(T & a, T & b) { + T tmp = a; + a = b; + b = tmp; +} + +template +static __global__ void k_argsort_f32_i32(const float * x, int * dst, const int ncols) { + // bitonic sort + int col = threadIdx.x; + int row = blockIdx.y; + + if (col >= ncols) return; + + const float * x_row = x + row * ncols; + int * dst_row = dst + row * ncols; + + // initialize indices + if (col < ncols) { + dst_row[col] = col; + } + __syncthreads(); + + for (int k = 2; k <= ncols; k *= 2) { + for (int j = k / 2; j > 0; j /= 2) { + int ixj = col ^ j; + if (ixj > col) { + if ((col & k) == 0) { + if (order == GGML_SORT_ASC ? x_row[dst_row[col]] > x_row[dst_row[ixj]] : x_row[dst_row[col]] < x_row[dst_row[ixj]]) { + swap(dst_row[col], dst_row[ixj]); + } + } else { + if (order == GGML_SORT_ASC ? x_row[dst_row[col]] < x_row[dst_row[ixj]] : x_row[dst_row[col]] > x_row[dst_row[ixj]]) { + swap(dst_row[col], dst_row[ixj]); + } + } + } + __syncthreads(); + } + } +} + static __global__ void diag_mask_inf_f32(const float * x, float * dst, const int ncols, const int rows_per_channel, const int n_past) { const int col = blockDim.y*blockIdx.y + threadIdx.y; const int row = blockDim.x*blockIdx.x + threadIdx.x; @@ -4833,8 +4936,9 @@ static __global__ void diag_mask_inf_f32(const float * x, float * dst, const int } const int i = row*ncols + col; - // dst[i] = col > n_past + row ? -INFINITY : x[i]; - dst[i] = x[i] - (col > n_past + row % rows_per_channel) * INT_MAX; // equivalent within rounding error but slightly faster on GPU + //dst[i] = col > (n_past + row % rows_per_channel) ? -INFINITY : x[i]; + //dst[i] = x[i] - (col > n_past + row % rows_per_channel) * INT_MAX; // equivalent within rounding error but slightly faster on GPU + dst[i] = x[i] - (col > n_past + row % rows_per_channel) * FLT_MAX; } static __global__ void soft_max_f32(const float * x, const float * y, float * dst, const int ncols, const int nrows_y, const float scale) { @@ -4956,25 +5060,119 @@ static void get_rows_cuda(const void * x, const int32_t * y, float * dst, const k_get_rows<<>>(x, y, dst, ncols); } -static void add_f32_cuda(const float * x, const float * y, float * dst, const int kx, const int ky, cudaStream_t stream) { - const int num_blocks = (kx + CUDA_ADD_BLOCK_SIZE - 1) / CUDA_ADD_BLOCK_SIZE; - add_f32<<>>(x, y, dst, kx, ky); -} +template +struct bin_bcast_cuda { + template + void operator()(const struct ggml_tensor * src0, const struct ggml_tensor * src1, struct ggml_tensor * dst, + const src0_t * src0_dd, const src1_t * src1_dd, dst_t * dst_dd, + cudaStream_t stream) { -static void add_f16_f32_f16_cuda(const half * x, const float * y, half * dst, const int k, cudaStream_t stream) { - const int num_blocks = (k + CUDA_ADD_BLOCK_SIZE - 1) / CUDA_ADD_BLOCK_SIZE; - add_f16_f32_f16<<>>(x, y, dst, k); -} + GGML_TENSOR_BINARY_OP_LOCALS -static void add_f16_f32_f32_cuda(const half * x, const float * y, float * dst, const int k, cudaStream_t stream) { - const int num_blocks = (k + CUDA_ADD_BLOCK_SIZE - 1) / CUDA_ADD_BLOCK_SIZE; - add_f16_f32_f32<<>>(x, y, dst, k); -} -static void mul_f32_cuda(const float * x, const float * y, float * dst, const int kx, const int ky, cudaStream_t stream) { - const int num_blocks = (kx + CUDA_MUL_BLOCK_SIZE - 1) / CUDA_MUL_BLOCK_SIZE; - mul_f32<<>>(x, y, dst, kx, ky); -} + int nr0 = ne10/ne0; + int nr1 = ne11/ne1; + int nr2 = ne12/ne2; + int nr3 = ne13/ne3; + + int nr[4] = { nr0, nr1, nr2, nr3 }; + + // collapse dimensions until first broadcast dimension + int64_t cne0[] = {ne0, ne1, ne2, ne3}; + int64_t cne1[] = {ne10, ne11, ne12, ne13}; + size_t cnb0[] = {nb0, nb1, nb2, nb3}; + size_t cnb1[] = {nb10, nb11, nb12, nb13}; + auto collapse = [](int64_t cne[]) { + cne[0] *= cne[1]; + cne[1] = cne[2]; + cne[2] = cne[3]; + cne[3] = 1; + }; + + auto collapse_nb = [](size_t cnb[], int64_t cne[]) { + cnb[1] *= cne[1]; + cnb[2] *= cne[2]; + cnb[3] *= cne[3]; + }; + + for (int i = 0; i < 4; i++) { + if (nr[i] != 1) { + break; + } + if (i > 0) { + collapse_nb(cnb0, cne0); + collapse_nb(cnb1, cne1); + collapse(cne0); + collapse(cne1); + } + } + { + int64_t ne0 = cne0[0]; + int64_t ne1 = cne0[1]; + int64_t ne2 = cne0[2]; + int64_t ne3 = cne0[3]; + + int64_t ne10 = cne1[0]; + int64_t ne11 = cne1[1]; + int64_t ne12 = cne1[2]; + int64_t ne13 = cne1[3]; + + //size_t nb0 = cnb0[0]; + size_t nb1 = cnb0[1]; + size_t nb2 = cnb0[2]; + size_t nb3 = cnb0[3]; + + //size_t nb10 = cnb1[0]; + size_t nb11 = cnb1[1]; + size_t nb12 = cnb1[2]; + size_t nb13 = cnb1[3]; + + //size_t s0 = nb0 / sizeof(src1_t); + size_t s1 = nb1 / sizeof(src1_t); + size_t s2 = nb2 / sizeof(src1_t); + size_t s3 = nb3 / sizeof(src1_t); + + //size_t s10 = nb10 / sizeof(src1_t); + size_t s11 = nb11 / sizeof(src1_t); + size_t s12 = nb12 / sizeof(src1_t); + size_t s13 = nb13 / sizeof(src1_t); + + + const int block_size = 128; + + int64_t hne0 = std::max(ne0/2LL, 1LL); + + dim3 block_dims; + block_dims.x = std::min(hne0, block_size); + block_dims.y = std::min(ne1, block_size / block_dims.x); + block_dims.z = std::min(std::min(ne2*ne3, block_size / block_dims.x / block_dims.y), 64U); + + dim3 block_nums( + (hne0 + block_dims.x - 1) / block_dims.x, + (ne1 + block_dims.y - 1) / block_dims.y, + (ne2*ne3 + block_dims.z - 1) / block_dims.z + ); + + if (block_nums.z > 65535) { + // this is the maximum number of blocks in z direction, fallback to 1D grid kernel + int block_num = (ne0*ne1*ne2*ne3 + block_size - 1) / block_size; + k_bin_bcast_unravel<<>>( + src0_dd, src1_dd, dst_dd, + ne0, ne1, ne2, ne3, + ne10, ne11, ne12, ne13, + /* s0, */ s1, s2, s3, + /* s10, */ s11, s12, s13); + } else { + k_bin_bcast<<>>( + src0_dd, src1_dd, dst_dd, + ne0, ne1, ne2, ne3, + ne10, ne11, ne12, ne13, + /* s0, */ s1, s2, s3, + /* s10, */ s11, s12, s13); + } + } + } +}; static void gelu_f32_cuda(const float * x, float * dst, const int k, cudaStream_t stream) { const int num_blocks = (k + CUDA_GELU_BLOCK_SIZE - 1) / CUDA_GELU_BLOCK_SIZE; @@ -4996,14 +5194,14 @@ static void sqr_f32_cuda(const float * x, float * dst, const int k, cudaStream_t sqr_f32<<>>(x, dst, k); } -static void norm_f32_cuda(const float * x, float * dst, const int ncols, const int nrows, cudaStream_t stream) { +static void norm_f32_cuda(const float * x, float * dst, const int ncols, const int nrows, const float eps, cudaStream_t stream) { GGML_ASSERT(ncols % WARP_SIZE == 0); if (ncols < 1024) { const dim3 block_dims(WARP_SIZE, 1, 1); - norm_f32<<>>(x, dst, ncols); + norm_f32<<>>(x, dst, ncols, eps); } else { const dim3 block_dims(1024, 1, 1); - norm_f32<1024><<>>(x, dst, ncols); + norm_f32<1024><<>>(x, dst, ncols, eps); } } @@ -5025,34 +5223,10 @@ static void quantize_row_q8_1_cuda(const float * x, void * vy, const int kx, con quantize_q8_1<<>>(x, vy, kx, kx_padded); } -template -static void dequantize_row_q4_0_cuda(const void * vx, dst_t * y, const int k, cudaStream_t stream) { +template +static void dequantize_block_cuda(const void * __restrict__ vx, dst_t * __restrict__ y, const int k, cudaStream_t stream) { const int num_blocks = (k + CUDA_DEQUANTIZE_BLOCK_SIZE - 1) / CUDA_DEQUANTIZE_BLOCK_SIZE; - dequantize_block<<>>(vx, y, k); -} - -template -static void dequantize_row_q4_1_cuda(const void * vx, dst_t * y, const int k, cudaStream_t stream) { - const int num_blocks = (k + CUDA_DEQUANTIZE_BLOCK_SIZE - 1) / CUDA_DEQUANTIZE_BLOCK_SIZE; - dequantize_block<<>>(vx, y, k); -} - -template -static void dequantize_row_q5_0_cuda(const void * vx, dst_t * y, const int k, cudaStream_t stream) { - const int num_blocks = (k + CUDA_DEQUANTIZE_BLOCK_SIZE - 1) / CUDA_DEQUANTIZE_BLOCK_SIZE; - dequantize_block<<>>(vx, y, k); -} - -template -static void dequantize_row_q5_1_cuda(const void * vx, dst_t * y, const int k, cudaStream_t stream) { - const int num_blocks = (k + CUDA_DEQUANTIZE_BLOCK_SIZE - 1) / CUDA_DEQUANTIZE_BLOCK_SIZE; - dequantize_block<<>>(vx, y, k); -} - -template -static void dequantize_row_q8_0_cuda(const void * vx, dst_t * y, const int k, cudaStream_t stream) { - const int num_blocks = (k + CUDA_DEQUANTIZE_BLOCK_SIZE - 1) / CUDA_DEQUANTIZE_BLOCK_SIZE; - dequantize_block<<>>(vx, y, k); + dequantize_block<<>>(vx, y, k); } template @@ -5101,6 +5275,64 @@ static void dequantize_row_q6_K_cuda(const void * vx, dst_t * y, const int k, cu #endif } +static to_fp16_cuda_t ggml_get_to_fp16_cuda(ggml_type type) { + switch (type) { + case GGML_TYPE_Q4_0: + return dequantize_block_cuda; + case GGML_TYPE_Q4_1: + return dequantize_block_cuda; + case GGML_TYPE_Q5_0: + return dequantize_block_cuda; + case GGML_TYPE_Q5_1: + return dequantize_block_cuda; + case GGML_TYPE_Q8_0: + return dequantize_block_cuda; + case GGML_TYPE_Q2_K: + return dequantize_row_q2_K_cuda; + case GGML_TYPE_Q3_K: + return dequantize_row_q3_K_cuda; + case GGML_TYPE_Q4_K: + return dequantize_row_q4_K_cuda; + case GGML_TYPE_Q5_K: + return dequantize_row_q5_K_cuda; + case GGML_TYPE_Q6_K: + return dequantize_row_q6_K_cuda; + case GGML_TYPE_F32: + return dequantize_block_cuda<1, 1, convert_f32>; + default: + return nullptr; + } +} + +static to_fp32_cuda_t ggml_get_to_fp32_cuda(ggml_type type) { + switch (type) { + case GGML_TYPE_Q4_0: + return dequantize_block_cuda; + case GGML_TYPE_Q4_1: + return dequantize_block_cuda; + case GGML_TYPE_Q5_0: + return dequantize_block_cuda; + case GGML_TYPE_Q5_1: + return dequantize_block_cuda; + case GGML_TYPE_Q8_0: + return dequantize_block_cuda; + case GGML_TYPE_Q2_K: + return dequantize_row_q2_K_cuda; + case GGML_TYPE_Q3_K: + return dequantize_row_q3_K_cuda; + case GGML_TYPE_Q4_K: + return dequantize_row_q4_K_cuda; + case GGML_TYPE_Q5_K: + return dequantize_row_q5_K_cuda; + case GGML_TYPE_Q6_K: + return dequantize_row_q6_K_cuda; + case GGML_TYPE_F16: + return dequantize_block_cuda<1, 1, convert_f16>; + default: + return nullptr; + } +} + static void dequantize_mul_mat_vec_q4_0_cuda(const void * vx, const dfloat * y, float * dst, const int ncols, const int nrows, cudaStream_t stream) { GGML_ASSERT(ncols % GGML_CUDA_DMMV_X == 0); const int block_num_y = (nrows + GGML_CUDA_MMV_Y - 1) / GGML_CUDA_MMV_Y; @@ -5189,6 +5421,15 @@ static void dequantize_mul_mat_vec_q6_K_cuda(const void * vx, const float * y, f dequantize_mul_mat_vec_q6_k<<>>(vx, y, dst, ncols, nrows); } +static void convert_mul_mat_vec_f16_cuda(const void * vx, const dfloat * y, float * dst, const int ncols, const int nrows, cudaStream_t stream) { + GGML_ASSERT(ncols % GGML_CUDA_DMMV_X == 0); + const int block_num_y = (nrows + GGML_CUDA_MMV_Y - 1) / GGML_CUDA_MMV_Y; + const dim3 block_nums(block_num_y, 1, 1); + const dim3 block_dims(WARP_SIZE, GGML_CUDA_MMV_Y, 1); + dequantize_mul_mat_vec<1, 1, convert_f16> + <<>>(vx, y, dst, ncols, nrows); +} + static void mul_mat_vec_q4_0_q8_1_cuda(const void * vx, const void * vy, float * dst, const int ncols, const int nrows, cudaStream_t stream) { GGML_ASSERT(ncols % QK4_0 == 0); const int block_num_y = (nrows + GGML_CUDA_MMV_Y - 1) / GGML_CUDA_MMV_Y; @@ -5279,83 +5520,6 @@ static void mul_mat_vec_q6_K_q8_1_cuda(const void * vx, const void * vy, float * <<>>(vx, vy, dst, ncols, nrows); } -static void convert_fp16_to_fp32_cuda(const void * vx, float * y, const int k, cudaStream_t stream) { - const int num_blocks = (k + CUDA_DEQUANTIZE_BLOCK_SIZE - 1) / CUDA_DEQUANTIZE_BLOCK_SIZE; - dequantize_block<1, 1, convert_f16><<>>(vx, y, k); -} - -static void convert_fp32_to_fp16_cuda(const void * vx, half * y, const int k, cudaStream_t stream) { - const int num_blocks = (k + CUDA_QUANTIZE_BLOCK_SIZE - 1) / CUDA_QUANTIZE_BLOCK_SIZE; - dequantize_block<1, 1, convert_f32><<>>(vx, y, k); -} - -static void convert_mul_mat_vec_f16_cuda(const void * vx, const dfloat * y, float * dst, const int ncols, const int nrows, cudaStream_t stream) { - GGML_ASSERT(ncols % GGML_CUDA_DMMV_X == 0); - const int block_num_y = (nrows + GGML_CUDA_MMV_Y - 1) / GGML_CUDA_MMV_Y; - const dim3 block_nums(block_num_y, 1, 1); - const dim3 block_dims(WARP_SIZE, GGML_CUDA_MMV_Y, 1); - dequantize_mul_mat_vec<1, 1, convert_f16> - <<>>(vx, y, dst, ncols, nrows); -} - -static to_fp16_cuda_t ggml_get_to_fp16_cuda(ggml_type type) { - switch (type) { - case GGML_TYPE_Q4_0: - return dequantize_row_q4_0_cuda; - case GGML_TYPE_Q4_1: - return dequantize_row_q4_1_cuda; - case GGML_TYPE_Q5_0: - return dequantize_row_q5_0_cuda; - case GGML_TYPE_Q5_1: - return dequantize_row_q5_1_cuda; - case GGML_TYPE_Q8_0: - return dequantize_row_q8_0_cuda; - case GGML_TYPE_Q2_K: - return dequantize_row_q2_K_cuda; - case GGML_TYPE_Q3_K: - return dequantize_row_q3_K_cuda; - case GGML_TYPE_Q4_K: - return dequantize_row_q4_K_cuda; - case GGML_TYPE_Q5_K: - return dequantize_row_q5_K_cuda; - case GGML_TYPE_Q6_K: - return dequantize_row_q6_K_cuda; - case GGML_TYPE_F32: - return convert_fp32_to_fp16_cuda; - default: - return nullptr; - } -} - -static to_fp32_cuda_t ggml_get_to_fp32_cuda(ggml_type type) { - switch (type) { - case GGML_TYPE_Q4_0: - return dequantize_row_q4_0_cuda; - case GGML_TYPE_Q4_1: - return dequantize_row_q4_1_cuda; - case GGML_TYPE_Q5_0: - return dequantize_row_q5_0_cuda; - case GGML_TYPE_Q5_1: - return dequantize_row_q5_1_cuda; - case GGML_TYPE_Q8_0: - return dequantize_row_q8_0_cuda; - case GGML_TYPE_Q2_K: - return dequantize_row_q2_K_cuda; - case GGML_TYPE_Q3_K: - return dequantize_row_q3_K_cuda; - case GGML_TYPE_Q4_K: - return dequantize_row_q4_K_cuda; - case GGML_TYPE_Q5_K: - return dequantize_row_q5_K_cuda; - case GGML_TYPE_Q6_K: - return dequantize_row_q6_K_cuda; - case GGML_TYPE_F16: - return convert_fp16_to_fp32_cuda; - default: - return nullptr; - } -} - static void ggml_mul_mat_q4_0_q8_1_cuda( const void * vx, const void * vy, float * dst, const int ncols_x, const int nrows_x, const int ncols_y, const int nrows_y, const int nrows_dst, cudaStream_t stream) { @@ -5967,6 +6131,27 @@ static void alibi_f32_cuda(const float * x, float * dst, const int ncols, const alibi_f32<<>>(x, dst, ncols, k_rows, n_heads_log2_floor, m0, m1); } +static void sum_rows_f32_cuda(const float * x, float * dst, const int ncols, const int nrows, cudaStream_t stream) { + const dim3 block_dims(WARP_SIZE, 1, 1); + const dim3 block_nums(1, nrows, 1); + k_sum_rows_f32<<>>(x, dst, ncols); +} + +static void argsort_f32_i32_cuda(const float * x, int * dst, const int ncols, const int nrows, ggml_sort_order order, cudaStream_t stream) { + // bitonic sort requires ncols to be power of 2 + GGML_ASSERT((ncols & (ncols - 1)) == 0); + + const dim3 block_dims(ncols, 1, 1); + const dim3 block_nums(1, nrows, 1); + if (order == GGML_SORT_ASC) { + k_argsort_f32_i32<<>>(x, dst, ncols); + } else if (order == GGML_SORT_DESC) { + k_argsort_f32_i32<<>>(x, dst, ncols); + } else { + GGML_ASSERT(false); + } +} + static void diag_mask_inf_f32_cuda(const float * x, float * dst, const int ncols_x, const int nrows_x, const int rows_per_channel, const int n_past, cudaStream_t stream) { const dim3 block_dims(1, CUDA_DIAG_MASK_INF_BLOCK_SIZE, 1); const int block_num_x = (ncols_x + CUDA_DIAG_MASK_INF_BLOCK_SIZE - 1) / CUDA_DIAG_MASK_INF_BLOCK_SIZE; @@ -6059,7 +6244,7 @@ static void * ggml_cuda_pool_malloc(size_t size, size_t * actual_size) { return ptr; } #ifdef DEBUG_CUDA_MALLOC - fprintf(stderr, "%s: %d buffers, max_size = %u MiB, tot_size = %u MiB, requested %u MiB\n", __func__, nnz, + fprintf(stderr, "%s: %d buffers, max_size = %u MB, tot_size = %u MB, requested %u MB\n", __func__, nnz, (uint32_t)(max_size/1024/1024), (uint32_t)(tot_size/1024/1024), (uint32_t)(size/1024/1024)); #endif void * ptr; @@ -6197,7 +6382,7 @@ void * ggml_cuda_host_malloc(size_t size) { // The allocation error can be bypassed. A null ptr will assigned out of this function. // This can fixed the OOM error in WSL. cudaGetLastError(); - fprintf(stderr, "WARNING: failed to allocate %.2f MiB of pinned memory: %s\n", + fprintf(stderr, "WARNING: failed to allocate %.2f MB of pinned memory: %s\n", size/1024.0/1024.0, cudaGetErrorString(err)); return nullptr; } @@ -6237,81 +6422,23 @@ static cudaError_t ggml_cuda_cpy_tensor_2d( const enum ggml_type type = src->type; const int64_t ts = ggml_type_size(type); const int64_t bs = ggml_blck_size(type); - const int64_t i1_diff = i1_high - i1_low; + int64_t i1_diff = i1_high - i1_low; const char * x = src_ptr + i1_low*nb1 + i2*nb2 + i3*nb3; - if (nb0 == ts && nb1 == ts*(ne0/bs)) { + if (nb0 == ts && nb1 == ts*ne0/bs) { return cudaMemcpyAsync(dst_ptr, x, i1_diff*nb1, kind, stream); - } - if (nb0 == ts) { - return cudaMemcpy2DAsync(dst_ptr, ts*(ne0/bs), x, nb1, ts*(ne0/bs), i1_diff, kind, stream); - } - GGML_ASSERT(bs == 1 && "TODO: implement bs != 1"); - for (int64_t i1 = 0; i1 < i1_diff; i1++) { - const void * rx = (const void *) ((const char *) x + i1*nb1); - void * rd = (void *) (dst_ptr + i1*ts*ne0); - // pretend the row is a matrix with cols=1 - cudaError_t r = cudaMemcpy2DAsync(rd, ts, rx, nb0, ts, ne0, kind, stream); - if (r != cudaSuccess) { return r; } - } - return cudaSuccess; -} - -static void ggml_cuda_op_repeat( - const ggml_tensor * src0, const ggml_tensor * src1, ggml_tensor * dst, - const float * src0_d, const float * src1_d, float * dst_d, const cudaStream_t & stream) { - // guaranteed to be an integer due to the check in ggml_can_repeat - const int64_t ne0 = dst->ne[0]; - const int64_t ne1 = dst->ne[1]; - const int64_t ne2 = dst->ne[2]; - const int64_t ne3 = dst->ne[3]; - - const int64_t ne00 = src0->ne[0]; - const int64_t ne01 = src0->ne[1]; - const int64_t ne02 = src0->ne[2]; - const int64_t ne03 = src0->ne[3]; - - const size_t nb0 = dst->nb[0]; - const size_t nb1 = dst->nb[1]; - const size_t nb2 = dst->nb[2]; - const size_t nb3 = dst->nb[3]; - - const size_t nb00 = src0->nb[0]; - const size_t nb01 = src0->nb[1]; - const size_t nb02 = src0->nb[2]; - const size_t nb03 = src0->nb[3]; - - const int nr0 = (int)(ne0/ne00); - const int nr1 = (int)(ne1/ne01); - const int nr2 = (int)(ne2/ne02); - const int nr3 = (int)(ne3/ne03); - - // TODO: support for transposed / permuted tensors - GGML_ASSERT(nb0 == sizeof(float)); - GGML_ASSERT(nb00 == sizeof(float)); - - // TODO: very inefficient, implement in a kernel, or fewer cudaMemcpyAsync calls for contiguous tensors - for (int i3 = 0; i3 < nr3; i3++) { - for (int k3 = 0; k3 < ne03; k3++) { - for (int i2 = 0; i2 < nr2; i2++) { - for (int k2 = 0; k2 < ne02; k2++) { - for (int i1 = 0; i1 < nr1; i1++) { - for (int k1 = 0; k1 < ne01; k1++) { - for (int i0 = 0; i0 < nr0; i0++) { - CUDA_CHECK(cudaMemcpyAsync( - (char *) dst_d + (i3*ne03 + k3)*nb3 + (i2*ne02 + k2)*nb2 + (i1*ne01 + k1)*nb1 + (i0*ne00)*nb0, - (const char *) src0_d + ( k3)*nb03 + ( k2)*nb02 + ( k1)*nb01, - ne00*nb0, cudaMemcpyDeviceToDevice, stream)); - } - } - } - } - } + } else if (nb0 == ts) { + return cudaMemcpy2DAsync(dst_ptr, ts*ne0/bs, x, nb1, ts*ne0/bs, i1_diff, kind, stream); + } else { + for (int64_t i1 = 0; i1 < i1_diff; i1++) { + const void * rx = (const void *) ((const char *) x + i1*nb1); + void * rd = (void *) (dst_ptr + i1*ts*ne0/bs); + // pretend the row is a matrix with cols=1 + cudaError_t r = cudaMemcpy2DAsync(rd, ts/bs, rx, nb0, ts/bs, ne0, kind, stream); + if (r != cudaSuccess) return r; } + return cudaSuccess; } - - (void) src1; - (void) src1_d; } static void ggml_cuda_op_get_rows( @@ -6358,44 +6485,55 @@ static void ggml_cuda_op_get_rows( } } -inline void ggml_cuda_op_add( +template +inline void ggml_cuda_op_bin_bcast( const ggml_tensor * src0, const ggml_tensor * src1, ggml_tensor * dst, const float * src0_dd, const float * src1_dd, float * dst_dd, const cudaStream_t & main_stream) { GGML_ASSERT(src1->type == GGML_TYPE_F32); - const int64_t ne10 = src1->ne[0]; - const int64_t ne11 = src1->ne[1]; - if (src0->type == GGML_TYPE_F32 && dst->type == GGML_TYPE_F32) { - add_f32_cuda(src0_dd, src1_dd, dst_dd, ggml_nelements(src0), ne10*ne11, main_stream); + op()(src0, src1, dst, src0_dd, src1_dd, dst_dd, main_stream); } else if (src0->type == GGML_TYPE_F16 && dst->type == GGML_TYPE_F16) { - add_f16_f32_f16_cuda((const half *) src0_dd, src1_dd, (half *) dst_dd, ggml_nelements(src0), main_stream); + op()(src0, src1, dst, (const half *) src0_dd, src1_dd, (half *) dst_dd, main_stream); } else if (src0->type == GGML_TYPE_F16 && dst->type == GGML_TYPE_F32) { - add_f16_f32_f32_cuda((const half *) src0_dd, src1_dd, dst_dd, ggml_nelements(src0), main_stream); + op()(src0, src1, dst, (const half *) src0_dd, src1_dd, dst_dd, main_stream); } else { - fprintf(stderr, "src0->type: %d dst->type: %d\n", src0->type, dst->type); + fprintf(stderr, "%s: unsupported types: dst: %s, src0: %s, src1: %s\n", __func__, + ggml_type_name(dst->type), ggml_type_name(src0->type), ggml_type_name(src1->type)); GGML_ASSERT(false); } +} + +static void ggml_cuda_op_repeat( + const ggml_tensor * src0, const ggml_tensor * src1, ggml_tensor * dst, + const float * src0_d, const float * src1_d, float * dst_d, const cudaStream_t & main_stream) { + + ggml_cuda_op_bin_bcast>(dst, src0, dst, nullptr, src0_d, dst_d, main_stream); (void) src1; - (void) dst; + (void) src1_d; +} + +inline void ggml_cuda_op_add( + const ggml_tensor * src0, const ggml_tensor * src1, ggml_tensor * dst, + const float * src0_dd, const float * src1_dd, float * dst_dd, const cudaStream_t & main_stream) { + + ggml_cuda_op_bin_bcast>(src0, src1, dst, src0_dd, src1_dd, dst_dd, main_stream); } inline void ggml_cuda_op_mul( const ggml_tensor * src0, const ggml_tensor * src1, ggml_tensor * dst, const float * src0_dd, const float * src1_dd, float * dst_dd, const cudaStream_t & main_stream) { - GGML_ASSERT(src0->type == GGML_TYPE_F32); - GGML_ASSERT(src1->type == GGML_TYPE_F32); - GGML_ASSERT( dst->type == GGML_TYPE_F32); + ggml_cuda_op_bin_bcast>(src0, src1, dst, src0_dd, src1_dd, dst_dd, main_stream); +} - const int64_t ne10 = src1->ne[0]; - const int64_t ne11 = src1->ne[1]; +inline void ggml_cuda_op_div( + const ggml_tensor * src0, const ggml_tensor * src1, ggml_tensor * dst, + const float * src0_dd, const float * src1_dd, float * dst_dd, const cudaStream_t & main_stream) { - mul_f32_cuda(src0_dd, src1_dd, dst_dd, ggml_nelements(src0), ne10*ne11, main_stream); - - (void) dst; + ggml_cuda_op_bin_bcast>(src0, src1, dst, src0_dd, src1_dd, dst_dd, main_stream); } inline void ggml_cuda_op_gelu( @@ -6464,7 +6602,10 @@ inline void ggml_cuda_op_norm( const int64_t ne00 = src0->ne[0]; const int64_t nrows = ggml_nrows(src0); - norm_f32_cuda(src0_dd, dst_dd, ne00, nrows, main_stream); + float eps; + memcpy(&eps, dst->op_params, sizeof(float)); + + norm_f32_cuda(src0_dd, dst_dd, ne00, nrows, eps, main_stream); (void) src1; (void) dst; @@ -7007,6 +7148,42 @@ inline void ggml_cuda_op_im2col( (void) src0_dd; } +inline void ggml_cuda_op_sum_rows( + const ggml_tensor * src0, const ggml_tensor * src1, ggml_tensor * dst, + const float * src0_dd, const float * src1_dd, float * dst_dd, const cudaStream_t & main_stream) { + + GGML_ASSERT(src0->type == GGML_TYPE_F32); + GGML_ASSERT( dst->type == GGML_TYPE_F32); + + const int64_t ncols = src0->ne[0]; + const int64_t nrows = ggml_nrows(src0); + + sum_rows_f32_cuda(src0_dd, dst_dd, ncols, nrows, main_stream); + + (void) src1; + (void) dst; + (void) src1_dd; +} + +inline void ggml_cuda_op_argsort( + const ggml_tensor * src0, const ggml_tensor * src1, ggml_tensor * dst, + const float * src0_dd, const float * src1_dd, float * dst_dd, const cudaStream_t & main_stream) { + + GGML_ASSERT(src0->type == GGML_TYPE_F32); + GGML_ASSERT( dst->type == GGML_TYPE_I32); + + const int64_t ncols = src0->ne[0]; + const int64_t nrows = ggml_nrows(src0); + + enum ggml_sort_order order = (enum ggml_sort_order) dst->op_params[0]; + + argsort_f32_i32_cuda(src0_dd, (int *)dst_dd, ncols, nrows, order, main_stream); + + (void) src1; + (void) dst; + (void) src1_dd; +} + inline void ggml_cuda_op_diag_mask_inf( const ggml_tensor * src0, const ggml_tensor * src1, ggml_tensor * dst, const float * src0_dd, const float * src1_dd, float * dst_dd, const cudaStream_t & main_stream) { @@ -7215,7 +7392,7 @@ static void ggml_cuda_op_mul_mat( const int64_t ne01 = src0->ne[1]; const int64_t ne02 = src0->ne[2]; const int64_t ne03 = src0->ne[3]; - // const int64_t nrows0 = ggml_nrows(src0); + const int64_t nrows0 = ggml_nrows(src0); const int64_t ne10 = src1->ne[0]; const int64_t ne11 = src1->ne[1]; @@ -7523,6 +7700,10 @@ static void ggml_cuda_mul(const ggml_tensor * src0, const ggml_tensor * src1, gg ggml_cuda_op_flatten(src0, src1, dst, ggml_cuda_op_mul); } +static void ggml_cuda_div(const ggml_tensor * src0, const ggml_tensor * src1, ggml_tensor * dst) { + ggml_cuda_op_flatten(src0, src1, dst, ggml_cuda_op_div); +} + static void ggml_cuda_gelu(const ggml_tensor * src0, const ggml_tensor * src1, ggml_tensor * dst) { ggml_cuda_op_flatten(src0, src1, dst, ggml_cuda_op_gelu); } @@ -7548,7 +7729,7 @@ static void ggml_cuda_rms_norm(const ggml_tensor * src0, const ggml_tensor * src } bool ggml_cuda_can_mul_mat(const struct ggml_tensor * src0, const struct ggml_tensor * src1, struct ggml_tensor * dst) { - if (!g_cublas_loaded) { return false; } + if (!g_cublas_loaded) return false; const int64_t ne10 = src1->ne[0]; @@ -7626,7 +7807,7 @@ static void ggml_cuda_mul_mat_vec_nc(const ggml_tensor * src0, const ggml_tensor ggml_mul_mat_vec_nc_f16_f32_cuda(src0_ddq, src1_ddf, dst_ddf, ne00, ne01, row_stride_x, ne02, ne12, channel_stride_x, main_stream); } -__global__ static void k_compute_batched_ptrs( +static __global__ void k_compute_batched_ptrs( const half * src0_as_f16, const half * src1_as_f16, half * dst_f16, const void ** ptrs_src, void ** ptrs_dst, int ne12, int ne13, @@ -7682,9 +7863,7 @@ static void ggml_cuda_mul_mat_mat_batched_cublas(const ggml_tensor * src0, const CUDA_CHECK(ggml_cuda_set_device(g_main_device)); cudaStream_t main_stream = g_cudaStreams[g_main_device][0]; - int id; - CUDA_CHECK(cudaGetDevice(&id)); - CUBLAS_CHECK(cublasSetStream(g_cublas_handles[id], main_stream)); + CUBLAS_CHECK(cublasSetStream(g_cublas_handles[g_main_device], main_stream)); ggml_tensor_extra_gpu * src0_extra = (ggml_tensor_extra_gpu *) src0->extra; void * src0_ddq = src0_extra->data_device[g_main_device]; @@ -7741,7 +7920,7 @@ static void ggml_cuda_mul_mat_mat_batched_cublas(const ggml_tensor * src0, const // there is no broadcast and src0, src1 are contiguous across dims 2, 3 // use cublasGemmStridedBatchedEx CUBLAS_CHECK( - cublasGemmStridedBatchedEx(g_cublas_handles[id], CUBLAS_OP_T, CUBLAS_OP_N, + cublasGemmStridedBatchedEx(g_cublas_handles[g_main_device], CUBLAS_OP_T, CUBLAS_OP_N, ne01, ne11, ne10, &alpha_f16, (const char *) src0_as_f16, CUDA_R_16F, nb01/sizeof(half), src0->nb[2]/sizeof(half), // strideA (const char *) src1_as_f16, CUDA_R_16F, nb11/sizeof(float), src1->nb[2]/sizeof(float), // strideB @@ -7775,7 +7954,7 @@ static void ggml_cuda_mul_mat_mat_batched_cublas(const ggml_tensor * src0, const CUDA_CHECK(cudaGetLastError()); CUBLAS_CHECK( - cublasGemmBatchedEx(g_cublas_handles[id], CUBLAS_OP_T, CUBLAS_OP_N, + cublasGemmBatchedEx(g_cublas_handles[g_main_device], CUBLAS_OP_T, CUBLAS_OP_N, ne01, ne11, ne10, &alpha_f16, (const void **) (ptrs_src + 0*ne23), CUDA_R_16F, nb01/sizeof(half), (const void **) (ptrs_src + 1*ne23), CUDA_R_16F, nb11/sizeof(float), @@ -7874,6 +8053,219 @@ static void ggml_cuda_mul_mat(const ggml_tensor * src0, const ggml_tensor * src1 } } +#if 0 +template +static __global__ void k_compute_batched_ptrs_id( + const void ** ptrs_src, void ** ptrs_dst, + int ne12, int ne13, + int ne23, + int nb02, int nb03, + int nb12, int nb13, + int nb2, int nb3, + int r2, int r3, + ggml_type src0_type, half * src0_as_f16, int64_t src0_ne, + const half * src1_f16, half * dst_f16, + const int32_t * ids, const int id, + Srcs... src0s) { + + int i = ids[id]; + + half * src0_f16; + const void * srcs_ar[] = { (const half *) src0s... }; + if (src0_type == GGML_TYPE_F16) { + src0_f16 = (half *) srcs_ar[i]; + } else { + src0_f16 = src0_as_f16; + if (threadIdx.x == 0 && threadIdx.y == 0) { + const to_fp16_cuda_t to_fp16 = ggml_get_to_fp16_cuda(src0_type); + to_fp16(srcs_ar[i], src0_f16, src0_ne, cudaStreamFireAndForget); + } + } + + int i13 = blockIdx.x * blockDim.x + threadIdx.x; + int i12 = blockIdx.y * blockDim.y + threadIdx.y; + + if (i13 >= ne13 || i12 >= ne12) { + return; + } + + int i03 = i13 / r3; + int i02 = i12 / r2; + + ptrs_src[0*ne23 + i12 + i13*ne12] = (const char *) src0_f16 + i02*nb02 + i03*nb03; + ptrs_src[1*ne23 + i12 + i13*ne12] = (const char *) src1_f16 + i12*nb12/2 + i13*nb13/2; + ptrs_dst[0*ne23 + i12 + i13*ne12] = ( char *) dst_f16 + i12* nb2/2 + i13* nb3/2; +} + +static void ggml_cuda_mul_mat_id_cublas(ggml_tensor * dst) { + const struct ggml_tensor * ids = dst->src[0]; + const struct ggml_tensor * src1 = dst->src[1]; + const struct ggml_tensor * src00 = dst->src[2]; + + const int id = dst->op_params[0]; + + GGML_ASSERT(!ggml_is_transposed(src00)); + GGML_ASSERT(!ggml_is_transposed(src1)); + + GGML_ASSERT(src00->backend != GGML_BACKEND_GPU_SPLIT); + GGML_ASSERT(src1->type == GGML_TYPE_F32); + + const int64_t ne00 = src00->ne[0]; GGML_UNUSED(ne00); + const int64_t ne01 = src00->ne[1]; + const int64_t ne02 = src00->ne[2]; + const int64_t ne03 = src00->ne[3]; + + //const int64_t nb01 = src00->nb[1]; + const int64_t nb02 = src00->nb[2]; GGML_UNUSED(nb02); + const int64_t nb03 = src00->nb[3]; GGML_UNUSED(nb03); + + const int64_t ne10 = src1->ne[0]; + const int64_t ne11 = src1->ne[1]; + const int64_t ne12 = src1->ne[2]; + const int64_t ne13 = src1->ne[3]; + + //const int64_t nb11 = src1->nb[1]; + const int64_t nb12 = src1->nb[2]; GGML_UNUSED(nb12); + const int64_t nb13 = src1->nb[3]; GGML_UNUSED(nb13); + + const int64_t ne1 = ggml_nelements(src1); + const int64_t ne = ggml_nelements(dst); + + CUDA_CHECK(ggml_cuda_set_device(g_main_device)); + cudaStream_t main_stream = g_cudaStreams[g_main_device][0]; + + CUBLAS_CHECK(cublasSetStream(g_cublas_handles[g_main_device], main_stream)); + + //ggml_tensor_extra_gpu * src0_extra = (ggml_tensor_extra_gpu *) src0->extra; + //void * src0_ddq = src0_extra->data_device[g_main_device]; + //half * src0_as_f16 = (half *) src0_ddq; + + ggml_tensor_extra_gpu * src1_extra = (ggml_tensor_extra_gpu *) src1->extra; + float * src1_ddf = (float *) src1_extra->data_device[g_main_device]; + + ggml_tensor_extra_gpu * dst_extra = (ggml_tensor_extra_gpu *) dst->extra; + float * dst_ddf = (float *) dst_extra->data_device[g_main_device]; + + // convert src1 to fp16 + const to_fp16_cuda_t to_fp16_cuda = ggml_get_to_fp16_cuda(src1->type); + GGML_ASSERT(to_fp16_cuda != nullptr); + + size_t src1_as = 0; + half * src1_as_f16 = (half *) ggml_cuda_pool_malloc(ne1 * sizeof(half), &src1_as); + to_fp16_cuda(src1_ddf, src1_as_f16, ne1, main_stream); + + size_t dst_as = 0; + half * dst_f16 = (half *) ggml_cuda_pool_malloc(ne * sizeof(half), &dst_as); + + GGML_ASSERT(ne12 % ne02 == 0); + GGML_ASSERT(ne13 % ne03 == 0); + + // broadcast factors + const int64_t r2 = ne12/ne02; + const int64_t r3 = ne13/ne03; + + const half alpha_f16 = 1.0f; + const half beta_f16 = 0.0f; + + // use cublasGemmBatchedEx + const int ne23 = ne12*ne13; + + const void ** ptrs_src = nullptr; + void ** ptrs_dst = nullptr; + + size_t ptrs_src_s = 0; + size_t ptrs_dst_s = 0; + + ptrs_src = (const void **) ggml_cuda_pool_malloc(2*ne23*sizeof(void *), &ptrs_src_s); + ptrs_dst = ( void **) ggml_cuda_pool_malloc(1*ne23*sizeof(void *), &ptrs_dst_s); + + int64_t src0_ne = ggml_nelements(src00); + half * src0_as_f16 = nullptr; + size_t src0_as = 0; + if (src00->type != GGML_TYPE_F16) { + src0_as_f16 = (half *) ggml_cuda_pool_malloc(src0_ne * sizeof(half), &src0_as); + } + + static_assert(GGML_MAX_SRC == 6, "GGML_MAX_SRC == 6"); + dim3 block_dims(ne13, ne12); + k_compute_batched_ptrs_id<<<1, block_dims, 0, main_stream>>>( + ptrs_src, ptrs_dst, + ne12, ne13, + ne23, + ne00*ne01*sizeof(half), ne00*ne01*ne02*sizeof(half), + nb12, nb13, + dst->nb[2], dst->nb[3], + r2, r3, + src00->type, src0_as_f16, src0_ne, + src1_as_f16, dst_f16, + (const int *)((ggml_tensor_extra_gpu *)ids->extra)->data_device[g_main_device], id, + dst->src[2] ? (const half *)((ggml_tensor_extra_gpu *)dst->src[2]->extra)->data_device[g_main_device] : nullptr, + dst->src[3] ? (const half *)((ggml_tensor_extra_gpu *)dst->src[3]->extra)->data_device[g_main_device] : nullptr, + dst->src[4] ? (const half *)((ggml_tensor_extra_gpu *)dst->src[4]->extra)->data_device[g_main_device] : nullptr, + dst->src[5] ? (const half *)((ggml_tensor_extra_gpu *)dst->src[5]->extra)->data_device[g_main_device] : nullptr + ); + CUDA_CHECK(cudaGetLastError()); + + CUBLAS_CHECK( + cublasGemmBatchedEx(g_cublas_handles[g_main_device], CUBLAS_OP_T, CUBLAS_OP_N, + ne01, ne11, ne10, + &alpha_f16, (const void **) (ptrs_src + 0*ne23), CUDA_R_16F, ne00, + (const void **) (ptrs_src + 1*ne23), CUDA_R_16F, ne10, + &beta_f16, ( void **) (ptrs_dst + 0*ne23), CUDA_R_16F, ne01, + ne23, + CUBLAS_COMPUTE_16F, + CUBLAS_GEMM_DEFAULT_TENSOR_OP)); + + if (src0_as != 0) { + ggml_cuda_pool_free(src0_as_f16, src0_as); + } + if (ptrs_src_s != 0) { + ggml_cuda_pool_free(ptrs_src, ptrs_src_s); + } + if (ptrs_dst_s != 0) { + ggml_cuda_pool_free(ptrs_dst, ptrs_dst_s); + } + + const to_fp32_cuda_t to_fp32_cuda = ggml_get_to_fp32_cuda(GGML_TYPE_F16); + to_fp32_cuda(dst_f16, dst_ddf, ne, main_stream); + + ggml_cuda_pool_free(src1_as_f16, src1_as); + ggml_cuda_pool_free(dst_f16, dst_as); +} +#endif + +static void ggml_cuda_mul_mat_id(const ggml_tensor * _src0, const ggml_tensor * _src1, ggml_tensor * dst) { +#if 0 +//#ifdef CUDA_USE_TENSOR_CORES +// const bool use_tensor_cores = true; +//#else +// const bool use_tensor_cores = false; +//#endif + + ggml_cuda_mul_mat_id_cublas(dst); + + // TODO: mmq/mmv support +#else + const struct ggml_tensor * ids = dst->src[0]; + const struct ggml_tensor * src1 = dst->src[1]; + const int id = dst->op_params[0]; + + int32_t * ids_dev = (int32_t *)((ggml_tensor_extra_gpu *)ids->extra)->data_device[g_main_device]; + + int32_t a_id; + CUDA_CHECK(cudaMemcpyAsync(&a_id, ids_dev + id, sizeof(int32_t), cudaMemcpyDeviceToHost, g_cudaStreams[g_main_device][0])); + CUDA_CHECK(cudaStreamSynchronize(g_cudaStreams[g_main_device][0])); + + GGML_ASSERT(a_id >= 0 && a_id < ids->ne[0]); + const struct ggml_tensor * src0 = dst->src[a_id + 2]; + + ggml_cuda_mul_mat(src0, src1, dst); +#endif + + (void) _src0; + (void) _src1; +} + static void ggml_cuda_scale(const ggml_tensor * src0, const ggml_tensor * src1, ggml_tensor * dst) { ggml_cuda_op_flatten(src0, src1, dst, ggml_cuda_op_scale); } @@ -7965,6 +8357,16 @@ static void ggml_cuda_im2col(const ggml_tensor * src0, const ggml_tensor * src1, ggml_cuda_op_flatten(src0, src1, dst, ggml_cuda_op_im2col); } +static void ggml_cuda_sum_rows(const ggml_tensor * src0, const ggml_tensor * src1, ggml_tensor * dst) { + GGML_ASSERT(ggml_is_contiguous(src0)); + ggml_cuda_op_flatten(src0, src1, dst, ggml_cuda_op_sum_rows); +} + +static void ggml_cuda_argsort(const ggml_tensor * src0, const ggml_tensor * src1, ggml_tensor * dst) { + GGML_ASSERT(ggml_is_contiguous(src0)); + ggml_cuda_op_flatten(src0, src1, dst, ggml_cuda_op_argsort); +} + static void ggml_cuda_nop(const ggml_tensor * src0, const ggml_tensor * src1, ggml_tensor * dst) { (void) src0; (void) src1; @@ -8220,8 +8622,9 @@ void ggml_cuda_set_main_device(const int main_device) { main_device, g_device_count, g_main_device); return; } - g_main_device = main_device; - if (g_device_count > 1) { + + if (g_main_device != main_device && g_device_count > 1) { + g_main_device = main_device; cudaDeviceProp prop; CUDA_CHECK(cudaGetDeviceProperties(&prop, g_main_device)); fprintf(stderr, "%s: using device %d (%s) as main device\n", __func__, g_main_device, prop.name); @@ -8247,7 +8650,7 @@ void ggml_cuda_free_scratch() { } bool ggml_cuda_compute_forward(struct ggml_compute_params * params, struct ggml_tensor * tensor) { - if (!g_cublas_loaded) { return false; } + if (!g_cublas_loaded) return false; ggml_cuda_func_t func; const bool any_on_device = tensor->backend == GGML_BACKEND_GPU @@ -8283,6 +8686,9 @@ bool ggml_cuda_compute_forward(struct ggml_compute_params * params, struct ggml_ case GGML_OP_MUL: func = ggml_cuda_mul; break; + case GGML_OP_DIV: + func = ggml_cuda_div; + break; case GGML_OP_UNARY: switch (ggml_get_unary_op(tensor)) { case GGML_UNARY_OP_GELU: @@ -8296,7 +8702,8 @@ bool ggml_cuda_compute_forward(struct ggml_compute_params * params, struct ggml_ break; default: return false; - } break; + } + break; case GGML_OP_NORM: func = ggml_cuda_norm; break; @@ -8309,6 +8716,12 @@ bool ggml_cuda_compute_forward(struct ggml_compute_params * params, struct ggml_ } func = ggml_cuda_mul_mat; break; + case GGML_OP_MUL_MAT_ID: + if (!any_on_device && !ggml_cuda_can_mul_mat(tensor->src[2], tensor->src[1], tensor)) { + return false; + } + func = ggml_cuda_mul_mat_id; + break; case GGML_OP_SCALE: func = ggml_cuda_scale; break; @@ -8348,6 +8761,12 @@ bool ggml_cuda_compute_forward(struct ggml_compute_params * params, struct ggml_ case GGML_OP_IM2COL: func = ggml_cuda_im2col; break; + case GGML_OP_SUM_ROWS: + func = ggml_cuda_sum_rows; + break; + case GGML_OP_ARGSORT: + func = ggml_cuda_argsort; + break; default: return false; } @@ -8364,7 +8783,9 @@ bool ggml_cuda_compute_forward(struct ggml_compute_params * params, struct ggml_ int ggml_cuda_get_device_count() { int device_count; - CUDA_CHECK(cudaGetDeviceCount(&device_count)); + if (cudaGetDeviceCount(&device_count) != cudaSuccess) { + return 0; + } return device_count; } @@ -8380,27 +8801,16 @@ void ggml_cuda_get_device_description(int device, char * description, size_t des #define UNUSED GGML_UNUSED -struct ggml_backend_context_cuda { -}; - -static const char * ggml_backend_cuda_name(ggml_backend_t backend) { - return GGML_CUDA_NAME; - - UNUSED(backend); -} - -static void ggml_backend_cuda_free(ggml_backend_t backend) { - ggml_backend_context_cuda * cuda_ctx = (ggml_backend_context_cuda *)backend->context; - delete cuda_ctx; - delete backend; -} +// cuda buffer struct ggml_backend_buffer_context_cuda { - void * device; - + int device; + void * dev_ptr = nullptr; ggml_tensor_extra_gpu * temp_tensor_extras = nullptr; size_t temp_tensor_extra_index = 0; + ggml_backend_buffer_context_cuda(int device, void * dev_ptr) : device(device), dev_ptr(dev_ptr) {} + ~ggml_backend_buffer_context_cuda() { delete[] temp_tensor_extras; } @@ -8421,16 +8831,103 @@ struct ggml_backend_buffer_context_cuda { static void ggml_backend_cuda_buffer_free_buffer(ggml_backend_buffer_t buffer) { ggml_backend_buffer_context_cuda * ctx = (ggml_backend_buffer_context_cuda *)buffer->context; - CUDA_CHECK(cudaFree(ctx->device)); + CUDA_CHECK(cudaFree(ctx->dev_ptr)); delete ctx; } static void * ggml_backend_cuda_buffer_get_base(ggml_backend_buffer_t buffer) { ggml_backend_buffer_context_cuda * ctx = (ggml_backend_buffer_context_cuda *)buffer->context; - return ctx->device; + return ctx->dev_ptr; } -static size_t ggml_backend_cuda_buffer_get_alloc_size(ggml_backend_buffer_t buffer, ggml_tensor * tensor) { +static void ggml_backend_cuda_buffer_init_tensor(ggml_backend_buffer_t buffer, ggml_tensor * tensor) { + ggml_backend_buffer_context_cuda * ctx = (ggml_backend_buffer_context_cuda *)buffer->context; + + if (tensor->view_src != NULL && tensor->view_offs == 0) { + assert(tensor->view_src->buffer->buft == buffer->buft); // TODO + tensor->backend = tensor->view_src->backend; + tensor->extra = tensor->view_src->extra; + return; + } + + ggml_tensor_extra_gpu * extra = ctx->ggml_cuda_alloc_temp_tensor_extra(); + + extra->data_device[ctx->device] = tensor->data; + + tensor->backend = GGML_BACKEND_GPU; + tensor->extra = extra; + + if (ggml_is_quantized(tensor->type)) { + // initialize padding to 0 to avoid possible NaN values + int64_t row_low = 0; + int64_t row_high = ggml_nrows(tensor); + int64_t nrows_split = row_high - row_low; + + size_t original_size = ggml_nbytes_split(tensor, nrows_split); + size_t padded_size = ggml_backend_buft_get_alloc_size(buffer->buft, tensor); + + if (padded_size > original_size && tensor->view_src == nullptr) { + CUDA_CHECK(cudaMemsetAsync((char *)tensor->data + original_size, 0, padded_size - original_size, g_cudaStreams[ctx->device][0])); + } + } + + UNUSED(buffer); +} + +static void ggml_backend_cuda_buffer_set_tensor(ggml_backend_buffer_t buffer, ggml_tensor * tensor, const void * data, size_t offset, size_t size) { + GGML_ASSERT(offset + size <= ggml_nbytes(tensor) && "tensor write out of bounds"); + GGML_ASSERT(tensor->data != NULL && "tensor not allocated"); + GGML_ASSERT(tensor->backend == GGML_BACKEND_GPU); + + CUDA_CHECK(cudaMemcpy((char *)tensor->data + offset, data, size, cudaMemcpyHostToDevice)); + + UNUSED(buffer); +} + +static void ggml_backend_cuda_buffer_get_tensor(ggml_backend_buffer_t buffer, const ggml_tensor * tensor, void * data, size_t offset, size_t size) { + GGML_ASSERT(offset + size <= ggml_nbytes(tensor) && "tensor read out of bounds"); + GGML_ASSERT(tensor->data != NULL && "tensor not allocated"); + GGML_ASSERT(tensor->backend == GGML_BACKEND_GPU); + + CUDA_CHECK(cudaMemcpy(data, (const char *)tensor->data + offset, size, cudaMemcpyDeviceToHost)); + + UNUSED(buffer); +} + +static struct ggml_backend_buffer_i cuda_backend_buffer_interface = { + /* .free_buffer = */ ggml_backend_cuda_buffer_free_buffer, + /* .get_base = */ ggml_backend_cuda_buffer_get_base, + /* .init_tensor = */ ggml_backend_cuda_buffer_init_tensor, + /* .set_tensor = */ ggml_backend_cuda_buffer_set_tensor, + /* .get_tensor = */ ggml_backend_cuda_buffer_get_tensor, + /* .cpy_tensor_from = */ NULL, + /* .cpy_tensor_to = */ NULL, +}; + +// cuda buffer type + +static ggml_backend_buffer_t ggml_backend_cuda_buffer_type_alloc_buffer(ggml_backend_buffer_type_t buft, size_t size) { + int device = (int) (intptr_t) buft->context; + + ggml_cuda_set_device(device); + + size = std::max(size, (size_t)1); // cudaMalloc returns null for size 0 + + void * dev_ptr; + CUDA_CHECK(cudaMalloc(&dev_ptr, size)); + + ggml_backend_buffer_context_cuda * ctx = new ggml_backend_buffer_context_cuda(device, dev_ptr); + + return ggml_backend_buffer_init(buft, cuda_backend_buffer_interface, ctx, size); +} + +static size_t ggml_backend_cuda_buffer_type_get_alignment(ggml_backend_buffer_type_t buft) { + return 128; + + UNUSED(buft); +} + +static size_t ggml_backend_cuda_buffer_type_get_alloc_size(ggml_backend_buffer_type_t buft, ggml_tensor * tensor) { int64_t row_low = 0; int64_t row_high = ggml_nrows(tensor); int64_t nrows_split = row_high - row_low; @@ -8448,91 +8945,127 @@ static size_t ggml_backend_cuda_buffer_get_alloc_size(ggml_backend_buffer_t buff return size; - UNUSED(buffer); + UNUSED(buft); } -static void ggml_backend_cuda_buffer_init_tensor(ggml_backend_buffer_t buffer, ggml_tensor * tensor) { - ggml_backend_buffer_context_cuda * ctx = (ggml_backend_buffer_context_cuda *)buffer->context; +static bool ggml_backend_cuda_buffer_type_supports_backend(ggml_backend_buffer_type_t buft, ggml_backend_t backend) { + return ggml_backend_is_cuda(backend); - if (tensor->view_src != NULL && tensor->view_offs == 0) { - assert(tensor->view_src->buffer->backend == buffer->backend); - tensor->backend = tensor->view_src->backend; - tensor->extra = tensor->view_src->extra; - return; - } - - ggml_tensor_extra_gpu * extra = ctx->ggml_cuda_alloc_temp_tensor_extra(); - - extra->data_device[g_main_device] = tensor->data; - - tensor->backend = GGML_BACKEND_GPU; - tensor->extra = extra; - - if (ggml_is_quantized(tensor->type)) { - // initialize padding to 0 to avoid possible NaN values - int64_t row_low = 0; - int64_t row_high = ggml_nrows(tensor); - int64_t nrows_split = row_high - row_low; - - size_t original_size = ggml_nbytes_split(tensor, nrows_split); - size_t padded_size = ggml_backend_cuda_buffer_get_alloc_size(tensor->buffer, tensor); - - if (padded_size > original_size && tensor->view_src == nullptr) { - CUDA_CHECK(cudaMemsetAsync((char *)tensor->data + original_size, 0, padded_size - original_size, g_cudaStreams[g_main_device][0])); - } - } - - UNUSED(buffer); + UNUSED(buft); } -static struct ggml_backend_buffer_i cuda_backend_buffer_interface = { - /* .free_buffer = */ ggml_backend_cuda_buffer_free_buffer, - /* .get_base = */ ggml_backend_cuda_buffer_get_base, - /* .get_alloc_size = */ ggml_backend_cuda_buffer_get_alloc_size, - /* .init_tensor = */ ggml_backend_cuda_buffer_init_tensor, - /* .free_tensor = */ NULL, +static ggml_backend_buffer_type_i cuda_backend_buffer_type_interface = { + /* .alloc_buffer = */ ggml_backend_cuda_buffer_type_alloc_buffer, + /* .get_alignment = */ ggml_backend_cuda_buffer_type_get_alignment, + /* .get_alloc_size = */ ggml_backend_cuda_buffer_type_get_alloc_size, + /* .supports_backend = */ ggml_backend_cuda_buffer_type_supports_backend, }; -static ggml_backend_buffer_t ggml_backend_cuda_alloc_buffer(ggml_backend_t backend, size_t size) { - ggml_cuda_set_device(g_main_device); +ggml_backend_buffer_type_t ggml_backend_cuda_buffer_type(int device) { + static struct ggml_backend_buffer_type ggml_backend_buffer_type_cuda[GGML_CUDA_MAX_DEVICES]; + static bool ggml_backend_buffer_type_cuda_initialized = false; + if (!ggml_backend_buffer_type_cuda_initialized) { + for (int i = 0; i < GGML_CUDA_MAX_DEVICES; i++) { + ggml_backend_buffer_type_cuda[i] = { + /* .iface = */ cuda_backend_buffer_type_interface, + /* .context = */ (ggml_backend_buffer_type_context_t) (intptr_t) i, + }; + } + ggml_backend_buffer_type_cuda_initialized = true; + } - ggml_backend_buffer_context_cuda * ctx = new ggml_backend_buffer_context_cuda; - - size = std::max(size, (size_t)1); // cudaMalloc returns null for size 0 - - ggml_cuda_set_device(g_main_device); - CUDA_CHECK(cudaMalloc(&ctx->device, size)); - - return ggml_backend_buffer_init(backend, cuda_backend_buffer_interface, ctx, size); + return &ggml_backend_buffer_type_cuda[device]; } -static size_t ggml_backend_cuda_get_alignment(ggml_backend_t backend) { - return 128; +// host buffer type + +static void ggml_backend_cuda_host_buffer_free_buffer(ggml_backend_buffer_t buffer) { + ggml_backend_buffer_context_cuda * ctx = (ggml_backend_buffer_context_cuda *)buffer->context; + CUDA_CHECK(cudaFreeHost(ctx->dev_ptr)); + delete ctx; +} + +static ggml_backend_buffer_t ggml_backend_cuda_host_buffer_type_alloc_buffer(ggml_backend_buffer_type_t buft, size_t size) { + void * ptr; + CUDA_CHECK(cudaMallocHost(&ptr, size)); + + // FIXME: this is a hack to avoid having to implement a new buffer type + ggml_backend_buffer_t buffer = ggml_backend_cpu_buffer_from_ptr(ptr, size); + buffer->buft = buft; + buffer->iface.free_buffer = ggml_backend_cuda_host_buffer_free_buffer; + + return buffer; + + UNUSED(buft); +} + +struct ggml_backend_buffer_type_i cuda_backend_host_buffer_type_interface = { + /* .alloc_buffer = */ ggml_backend_cuda_host_buffer_type_alloc_buffer, + /* .get_alignment = */ ggml_backend_cpu_buffer_type()->iface.get_alignment, + /* .get_alloc_size = */ ggml_backend_cpu_buffer_type()->iface.get_alloc_size, + /* .supports_backend = */ ggml_backend_cpu_buffer_type()->iface.supports_backend, +}; + +ggml_backend_buffer_type_t ggml_backend_cuda_host_buffer_type() { + static struct ggml_backend_buffer_type ggml_backend_buffer_type_cuda_host = { + /* .iface = */ cuda_backend_host_buffer_type_interface, + /* .context = */ nullptr, + }; + + return &ggml_backend_buffer_type_cuda_host; +} + +// backend + +struct ggml_backend_context_cuda { + int device; +}; + +static const char * ggml_backend_cuda_name(ggml_backend_t backend) { + return GGML_CUDA_NAME; + UNUSED(backend); } +static void ggml_backend_cuda_free(ggml_backend_t backend) { + ggml_backend_context_cuda * cuda_ctx = (ggml_backend_context_cuda *)backend->context; + + delete cuda_ctx; + delete backend; +} + +static ggml_backend_buffer_type_t ggml_backend_cuda_get_default_buffer_type(ggml_backend_t backend) { + ggml_backend_context_cuda * cuda_ctx = (ggml_backend_context_cuda *)backend->context; + + return ggml_backend_cuda_buffer_type(cuda_ctx->device); +} + static void ggml_backend_cuda_set_tensor_async(ggml_backend_t backend, ggml_tensor * tensor, const void * data, size_t offset, size_t size) { + ggml_backend_context_cuda * cuda_ctx = (ggml_backend_context_cuda *)backend->context; + + GGML_ASSERT(tensor->buffer->buft == ggml_backend_cuda_buffer_type(cuda_ctx->device) && "unsupported buffer type"); GGML_ASSERT(offset + size <= ggml_nbytes(tensor) && "tensor write out of bounds"); GGML_ASSERT(tensor->data != NULL && "tensor not allocated"); GGML_ASSERT(tensor->backend == GGML_BACKEND_GPU); - CUDA_CHECK(cudaMemcpyAsync((char *)tensor->data + offset, data, size, cudaMemcpyHostToDevice, g_cudaStreams[g_main_device][0])); - - UNUSED(backend); + CUDA_CHECK(cudaMemcpyAsync((char *)tensor->data + offset, data, size, cudaMemcpyHostToDevice, g_cudaStreams[cuda_ctx->device][0])); } static void ggml_backend_cuda_get_tensor_async(ggml_backend_t backend, const ggml_tensor * tensor, void * data, size_t offset, size_t size) { + ggml_backend_context_cuda * cuda_ctx = (ggml_backend_context_cuda *)backend->context; + + GGML_ASSERT(tensor->buffer->buft == ggml_backend_cuda_buffer_type(cuda_ctx->device) && "unsupported buffer type"); GGML_ASSERT(offset + size <= ggml_nbytes(tensor) && "tensor read out of bounds"); GGML_ASSERT(tensor->data != NULL && "tensor not allocated"); GGML_ASSERT(tensor->backend == GGML_BACKEND_GPU); - CUDA_CHECK(cudaMemcpyAsync(data, (const char *)tensor->data + offset, size, cudaMemcpyDeviceToHost, g_cudaStreams[g_main_device][0])); - - UNUSED(backend); + CUDA_CHECK(cudaMemcpyAsync(data, (const char *)tensor->data + offset, size, cudaMemcpyDeviceToHost, g_cudaStreams[cuda_ctx->device][0])); } static void ggml_backend_cuda_synchronize(ggml_backend_t backend) { - CUDA_CHECK(cudaStreamSynchronize(g_cudaStreams[g_main_device][0])); + ggml_backend_context_cuda * cuda_ctx = (ggml_backend_context_cuda *)backend->context; + + CUDA_CHECK(cudaStreamSynchronize(g_cudaStreams[cuda_ctx->device][0])); UNUSED(backend); } @@ -8546,14 +9079,14 @@ static ggml_backend_graph_plan_t ggml_backend_cuda_graph_plan_create(ggml_backen UNUSED(cgraph); } -[[noreturn]] static void ggml_backend_cuda_graph_plan_free(ggml_backend_t backend, ggml_backend_graph_plan_t plan) { +static void ggml_backend_cuda_graph_plan_free(ggml_backend_t backend, ggml_backend_graph_plan_t plan) { GGML_ASSERT(!"not implemented"); UNUSED(backend); UNUSED(plan); } -[[noreturn]] static void ggml_backend_cuda_graph_plan_compute(ggml_backend_t backend, ggml_backend_graph_plan_t plan) { +static void ggml_backend_cuda_graph_plan_compute(ggml_backend_t backend, ggml_backend_graph_plan_t plan) { GGML_ASSERT(!"not implemented"); UNUSED(backend); @@ -8561,7 +9094,9 @@ static ggml_backend_graph_plan_t ggml_backend_cuda_graph_plan_create(ggml_backen } static void ggml_backend_cuda_graph_compute(ggml_backend_t backend, ggml_cgraph * cgraph) { - ggml_cuda_set_device(g_main_device); + ggml_backend_context_cuda * cuda_ctx = (ggml_backend_context_cuda *)backend->context; + + ggml_cuda_set_main_device(cuda_ctx->device); ggml_compute_params params = {}; params.type = GGML_TASK_COMPUTE; @@ -8569,13 +9104,18 @@ static void ggml_backend_cuda_graph_compute(ggml_backend_t backend, ggml_cgraph for (int i = 0; i < cgraph->n_nodes; i++) { ggml_tensor * node = cgraph->nodes[i]; - if (node->op == GGML_OP_RESHAPE || node->op == GGML_OP_TRANSPOSE || node->op == GGML_OP_VIEW || node->op == GGML_OP_PERMUTE) { + if (node->op == GGML_OP_RESHAPE || node->op == GGML_OP_TRANSPOSE || node->op == GGML_OP_VIEW || node->op == GGML_OP_PERMUTE) continue; - } + assert(node->backend == GGML_BACKEND_GPU); + assert(node->buffer->buft == ggml_backend_cuda_buffer_type(cuda_ctx->device)); + assert(node->extra != nullptr); + for (int j = 0; j < GGML_MAX_SRC; j++) { if (node->src[j] != nullptr) { assert(node->src[j]->backend == GGML_BACKEND_GPU); + assert(node->src[j]->buffer->buft == ggml_backend_cuda_buffer_type(cuda_ctx->device)); + assert(node->src[j]->extra != nullptr); } } @@ -8612,27 +9152,98 @@ static void ggml_backend_cuda_graph_compute(ggml_backend_t backend, ggml_cgraph UNUSED(backend); } +static bool ggml_backend_cuda_supports_op(ggml_backend_t backend, const ggml_tensor * op) { + switch (op->op) { + case GGML_OP_UNARY: + switch (ggml_get_unary_op(op)) { + case GGML_UNARY_OP_GELU: + case GGML_UNARY_OP_SILU: + case GGML_UNARY_OP_RELU: + return true; + default: + return false; + } + break; + case GGML_OP_MUL_MAT: + case GGML_OP_MUL_MAT_ID: + { + struct ggml_tensor * a; + struct ggml_tensor * b; + if (op->op == GGML_OP_MUL_MAT) { + a = op->src[0]; + b = op->src[1]; + } else { + a = op->src[2]; + b = op->src[1]; + } + if (a->ne[3] != b->ne[3]) { + return false; + } + return true; + } break; + case GGML_OP_NONE: + case GGML_OP_RESHAPE: + case GGML_OP_VIEW: + case GGML_OP_PERMUTE: + case GGML_OP_TRANSPOSE: + case GGML_OP_NORM: + case GGML_OP_REPEAT: + case GGML_OP_GET_ROWS: + case GGML_OP_DUP: + case GGML_OP_ADD: + case GGML_OP_MUL: + case GGML_OP_DIV: + case GGML_OP_RMS_NORM: + case GGML_OP_SCALE: + case GGML_OP_SQR: + case GGML_OP_CLAMP: + case GGML_OP_CPY: + case GGML_OP_CONT: + case GGML_OP_DIAG_MASK_INF: + case GGML_OP_SOFT_MAX: + case GGML_OP_ROPE: + case GGML_OP_ALIBI: + case GGML_OP_IM2COL: + case GGML_OP_SUM_ROWS: + case GGML_OP_ARGSORT: + return true; + default: + return false; + } + + UNUSED(backend); +} + static ggml_backend_i cuda_backend_i = { - /* .get_name = */ ggml_backend_cuda_name, - /* .free = */ ggml_backend_cuda_free, - /* .alloc_buffer = */ ggml_backend_cuda_alloc_buffer, - /* .get_alignment = */ ggml_backend_cuda_get_alignment, - /* .set_tensor_async = */ ggml_backend_cuda_set_tensor_async, - /* .get_tensor_async = */ ggml_backend_cuda_get_tensor_async, - /* .synchronize = */ ggml_backend_cuda_synchronize, - /* .cpy_tensor_from = */ nullptr, - /* .cpy_tensor_to = */ nullptr, - /* .graph_plan_create = */ ggml_backend_cuda_graph_plan_create, - /* .graph_plan_free = */ ggml_backend_cuda_graph_plan_free, - /* .graph_plan_compute = */ ggml_backend_cuda_graph_plan_compute, - /* .graph_compute = */ ggml_backend_cuda_graph_compute, - /* .supports_op = */ nullptr, + /* .get_name = */ ggml_backend_cuda_name, + /* .free = */ ggml_backend_cuda_free, + /* .get_default_buffer_type = */ ggml_backend_cuda_get_default_buffer_type, + /* .set_tensor_async = */ ggml_backend_cuda_set_tensor_async, + /* .get_tensor_async = */ ggml_backend_cuda_get_tensor_async, + /* .cpy_tensor_from_async = */ NULL, + /* .cpy_tensor_to_async = */ NULL, + /* .synchronize = */ ggml_backend_cuda_synchronize, + /* .graph_plan_create = */ ggml_backend_cuda_graph_plan_create, + /* .graph_plan_free = */ ggml_backend_cuda_graph_plan_free, + /* .graph_plan_compute = */ ggml_backend_cuda_graph_plan_compute, + /* .graph_compute = */ ggml_backend_cuda_graph_compute, + /* .supports_op = */ ggml_backend_cuda_supports_op, }; -ggml_backend_t ggml_backend_cuda_init() { +ggml_backend_t ggml_backend_cuda_init(int device) { ggml_init_cublas(); // TODO: remove from ggml.c - ggml_backend_context_cuda * ctx = new ggml_backend_context_cuda; + if (device < 0 || device >= ggml_cuda_get_device_count()) { + fprintf(stderr, "%s: error: invalid device %d\n", __func__, device); + return nullptr; + } + + // not strictly necessary, but it may reduce the overhead of the first graph_compute + ggml_cuda_set_main_device(device); + + ggml_backend_context_cuda * ctx = new ggml_backend_context_cuda { + /* .device = */ device + }; ggml_backend_t cuda_backend = new ggml_backend { /* .interface = */ cuda_backend_i, @@ -8641,3 +9252,25 @@ ggml_backend_t ggml_backend_cuda_init() { return cuda_backend; } + +bool ggml_backend_is_cuda(ggml_backend_t backend) { + return backend->iface.get_name == ggml_backend_cuda_name; +} + +static ggml_backend_t ggml_backend_reg_cuda_init(const char * params, void * user_data) { + ggml_backend_t cuda_backend = ggml_backend_cuda_init((int) (intptr_t) user_data); + return cuda_backend; + + UNUSED(params); +} + +extern "C" int ggml_backend_cuda_reg_devices() { + int device_count = ggml_cuda_get_device_count(); + //int device_count = 1; // DEBUG: some tools require delaying CUDA initialization + for (int i = 0; i < device_count; i++) { + char name[128]; + snprintf(name, sizeof(name), "%s%d", GGML_CUDA_NAME, i); + ggml_backend_register(name, ggml_backend_reg_cuda_init, ggml_backend_cuda_buffer_type(i), (void *) (intptr_t) i); + } + return device_count; +} diff --git a/ggml-cuda.h b/ggml-cuda.h index 528e66c33..cdb0c0c41 100644 --- a/ggml-cuda.h +++ b/ggml-cuda.h @@ -49,7 +49,15 @@ GGML_API int ggml_cuda_get_device_count(void); GGML_API void ggml_cuda_get_device_description(int device, char * description, size_t description_size); // backend API -GGML_API ggml_backend_t ggml_backend_cuda_init(void); // TODO: take a list of devices to use +GGML_API ggml_backend_t ggml_backend_cuda_init(int device); + +GGML_API bool ggml_backend_is_cuda(ggml_backend_t backend); +GGML_API int ggml_backend_cuda_get_device(ggml_backend_t backend); + +GGML_API ggml_backend_buffer_type_t ggml_backend_cuda_buffer_type(int device); + +// pinned host buffer for use with CPU backend for faster copies between CPU and GPU +GGML_API ggml_backend_buffer_type_t ggml_backend_cuda_host_buffer_type(void); #ifdef __cplusplus } diff --git a/ggml-impl.h b/ggml-impl.h index 06c07339e..1f5610a86 100644 --- a/ggml-impl.h +++ b/ggml-impl.h @@ -232,7 +232,7 @@ bool ggml_hash_contains (const struct ggml_hash_set hash_set, struct ggml // returns GGML_HASHTABLE_FULL if table is full, otherwise the current index of the key or where it should be inserted size_t ggml_hash_find (const struct ggml_hash_set hash_set, struct ggml_tensor * key); -// returns GGML_HAHSHTABLE_ALREADY_EXISTS if key already exists, index otherwise, asserts if table is full +// returns GGML_HASHTABLE_ALREADY_EXISTS if key already exists, index otherwise, asserts if table is full size_t ggml_hash_insert ( struct ggml_hash_set hash_set, struct ggml_tensor * key); // return index, asserts if table is full diff --git a/ggml-metal.h b/ggml-metal.h index be2731f8b..bf52d9cd3 100644 --- a/ggml-metal.h +++ b/ggml-metal.h @@ -99,6 +99,12 @@ GGML_API ggml_backend_t ggml_backend_metal_init(void); GGML_API bool ggml_backend_is_metal(ggml_backend_t backend); GGML_API void ggml_backend_metal_set_n_cb(ggml_backend_t backend, int n_cb); +GGML_API ggml_backend_buffer_type_t ggml_backend_metal_buffer_type(void); + +// helper to check if the device supports a specific family +// ideally, the user code should be doing these checks +// ref: https://developer.apple.com/metal/Metal-Feature-Set-Tables.pdf +GGML_API bool ggml_backend_metal_supports_family(ggml_backend_t backend, int family); #ifdef __cplusplus } diff --git a/ggml-metal.m b/ggml-metal.m index be4ab0f2e..f9bd69dc8 100644 --- a/ggml-metal.m +++ b/ggml-metal.m @@ -62,6 +62,8 @@ struct ggml_metal_context { GGML_METAL_DECL_KERNEL(add_row); // TODO: avoid this extra kernel, instead extend the "add" kernel to support broadcast GGML_METAL_DECL_KERNEL(mul); GGML_METAL_DECL_KERNEL(mul_row); // TODO: avoid this extra kernel, instead extend the "mul" kernel to support broadcast + GGML_METAL_DECL_KERNEL(div); + GGML_METAL_DECL_KERNEL(div_row); GGML_METAL_DECL_KERNEL(scale); GGML_METAL_DECL_KERNEL(scale_4); GGML_METAL_DECL_KERNEL(silu); @@ -112,10 +114,24 @@ struct ggml_metal_context { GGML_METAL_DECL_KERNEL(mul_mm_q4_K_f32); GGML_METAL_DECL_KERNEL(mul_mm_q5_K_f32); GGML_METAL_DECL_KERNEL(mul_mm_q6_K_f32); + GGML_METAL_DECL_KERNEL(mul_mm_id_f32_f32); + GGML_METAL_DECL_KERNEL(mul_mm_id_f16_f32); + GGML_METAL_DECL_KERNEL(mul_mm_id_q4_0_f32); + GGML_METAL_DECL_KERNEL(mul_mm_id_q4_1_f32); + GGML_METAL_DECL_KERNEL(mul_mm_id_q5_0_f32); + GGML_METAL_DECL_KERNEL(mul_mm_id_q5_1_f32); + GGML_METAL_DECL_KERNEL(mul_mm_id_q8_0_f32); + GGML_METAL_DECL_KERNEL(mul_mm_id_q2_K_f32); + GGML_METAL_DECL_KERNEL(mul_mm_id_q3_K_f32); + GGML_METAL_DECL_KERNEL(mul_mm_id_q4_K_f32); + GGML_METAL_DECL_KERNEL(mul_mm_id_q5_K_f32); + GGML_METAL_DECL_KERNEL(mul_mm_id_q6_K_f32); GGML_METAL_DECL_KERNEL(rope_f32); GGML_METAL_DECL_KERNEL(rope_f16); GGML_METAL_DECL_KERNEL(alibi_f32); GGML_METAL_DECL_KERNEL(im2col_f16); + GGML_METAL_DECL_KERNEL(argsort_f32_i32_asc); + GGML_METAL_DECL_KERNEL(argsort_f32_i32_desc); GGML_METAL_DECL_KERNEL(cpy_f32_f16); GGML_METAL_DECL_KERNEL(cpy_f32_f32); GGML_METAL_DECL_KERNEL(cpy_f32_q8_0); @@ -126,6 +142,7 @@ struct ggml_metal_context { GGML_METAL_DECL_KERNEL(cpy_f16_f16); GGML_METAL_DECL_KERNEL(concat); GGML_METAL_DECL_KERNEL(sqr); + GGML_METAL_DECL_KERNEL(sum_rows); #undef GGML_METAL_DECL_KERNEL }; @@ -169,12 +186,10 @@ static void ggml_metal_log(enum ggml_log_level level, const char * format, ...){ } } - - struct ggml_metal_context * ggml_metal_init(int n_cb) { GGML_METAL_LOG_INFO("%s: allocating\n", __func__); - id device; + id device; NSString * s; #if TARGET_OS_OSX @@ -220,6 +235,9 @@ struct ggml_metal_context * ggml_metal_init(int n_cb) { NSString * sourcePath; NSString * ggmlMetalPathResources = [[NSProcessInfo processInfo].environment objectForKey:@"GGML_METAL_PATH_RESOURCES"]; + + GGML_METAL_LOG_INFO("%s: GGML_METAL_PATH_RESOURCES = %s\n", __func__, ggmlMetalPathResources ? [ggmlMetalPathResources UTF8String] : "nil"); + if (ggmlMetalPathResources) { sourcePath = [ggmlMetalPathResources stringByAppendingPathComponent:@"ggml-metal.metal"]; } else { @@ -250,6 +268,29 @@ struct ggml_metal_context * ggml_metal_init(int n_cb) { } } +#if TARGET_OS_OSX + // print MTL GPU family: + GGML_METAL_LOG_INFO("%s: GPU name: %s\n", __func__, [[ctx->device name] UTF8String]); + + // determine max supported GPU family + // https://developer.apple.com/metal/Metal-Shading-Language-Specification.pdf + // https://developer.apple.com/metal/Metal-Feature-Set-Tables.pdf + for (int i = MTLGPUFamilyApple1 + 20; i >= MTLGPUFamilyApple1; --i) { + if ([ctx->device supportsFamily:i]) { + GGML_METAL_LOG_INFO("%s: GPU family: MTLGPUFamilyApple%d (%d)\n", __func__, i - (int) MTLGPUFamilyApple1 + 1, i); + break; + } + } + + GGML_METAL_LOG_INFO("%s: hasUnifiedMemory = %s\n", __func__, ctx->device.hasUnifiedMemory ? "true" : "false"); + GGML_METAL_LOG_INFO("%s: recommendedMaxWorkingSetSize = %8.2f MB\n", __func__, ctx->device.recommendedMaxWorkingSetSize / 1e6); + if (ctx->device.maxTransferRate != 0) { + GGML_METAL_LOG_INFO("%s: maxTransferRate = %8.2f MB/s\n", __func__, ctx->device.maxTransferRate / 1e6); + } else { + GGML_METAL_LOG_INFO("%s: maxTransferRate = built-in GPU\n", __func__); + } +#endif + // load kernels { NSError * error = nil; @@ -271,6 +312,8 @@ struct ggml_metal_context * ggml_metal_init(int n_cb) { GGML_METAL_ADD_KERNEL(add_row); GGML_METAL_ADD_KERNEL(mul); GGML_METAL_ADD_KERNEL(mul_row); + GGML_METAL_ADD_KERNEL(div); + GGML_METAL_ADD_KERNEL(div_row); GGML_METAL_ADD_KERNEL(scale); GGML_METAL_ADD_KERNEL(scale_4); GGML_METAL_ADD_KERNEL(silu); @@ -322,11 +365,25 @@ struct ggml_metal_context * ggml_metal_init(int n_cb) { GGML_METAL_ADD_KERNEL(mul_mm_q4_K_f32); GGML_METAL_ADD_KERNEL(mul_mm_q5_K_f32); GGML_METAL_ADD_KERNEL(mul_mm_q6_K_f32); + GGML_METAL_ADD_KERNEL(mul_mm_id_f32_f32); + GGML_METAL_ADD_KERNEL(mul_mm_id_f16_f32); + GGML_METAL_ADD_KERNEL(mul_mm_id_q4_0_f32); + GGML_METAL_ADD_KERNEL(mul_mm_id_q4_1_f32); + GGML_METAL_ADD_KERNEL(mul_mm_id_q5_0_f32); + GGML_METAL_ADD_KERNEL(mul_mm_id_q5_1_f32); + GGML_METAL_ADD_KERNEL(mul_mm_id_q8_0_f32); + GGML_METAL_ADD_KERNEL(mul_mm_id_q2_K_f32); + GGML_METAL_ADD_KERNEL(mul_mm_id_q3_K_f32); + GGML_METAL_ADD_KERNEL(mul_mm_id_q4_K_f32); + GGML_METAL_ADD_KERNEL(mul_mm_id_q5_K_f32); + GGML_METAL_ADD_KERNEL(mul_mm_id_q6_K_f32); } GGML_METAL_ADD_KERNEL(rope_f32); GGML_METAL_ADD_KERNEL(rope_f16); GGML_METAL_ADD_KERNEL(alibi_f32); GGML_METAL_ADD_KERNEL(im2col_f16); + GGML_METAL_ADD_KERNEL(argsort_f32_i32_asc); + GGML_METAL_ADD_KERNEL(argsort_f32_i32_desc); GGML_METAL_ADD_KERNEL(cpy_f32_f16); GGML_METAL_ADD_KERNEL(cpy_f32_f32); GGML_METAL_ADD_KERNEL(cpy_f32_q8_0); @@ -337,33 +394,11 @@ struct ggml_metal_context * ggml_metal_init(int n_cb) { GGML_METAL_ADD_KERNEL(cpy_f16_f16); GGML_METAL_ADD_KERNEL(concat); GGML_METAL_ADD_KERNEL(sqr); + GGML_METAL_ADD_KERNEL(sum_rows); #undef GGML_METAL_ADD_KERNEL } -#if TARGET_OS_OSX - // print MTL GPU family: - GGML_METAL_LOG_INFO("%s: GPU name: %s\n", __func__, [[ctx->device name] UTF8String]); - - // determine max supported GPU family - // https://developer.apple.com/metal/Metal-Shading-Language-Specification.pdf - // https://developer.apple.com/metal/Metal-Feature-Set-Tables.pdf - for (int i = MTLGPUFamilyApple1 + 20; i >= MTLGPUFamilyApple1; --i) { - if ([ctx->device supportsFamily:i]) { - GGML_METAL_LOG_INFO("%s: GPU family: MTLGPUFamilyApple%d (%d)\n", __func__, i - (int) MTLGPUFamilyApple1 + 1, i); - break; - } - } - - GGML_METAL_LOG_INFO("%s: hasUnifiedMemory = %s\n", __func__, ctx->device.hasUnifiedMemory ? "true" : "false"); - GGML_METAL_LOG_INFO("%s: recommendedMaxWorkingSetSize = %8.2f MiB\n", __func__, ctx->device.recommendedMaxWorkingSetSize / 1024.0 / 1024.0); - if (ctx->device.maxTransferRate != 0) { - GGML_METAL_LOG_INFO("%s: maxTransferRate = %8.2f MiB/s\n", __func__, ctx->device.maxTransferRate / 1024.0 / 1024.0); - } else { - GGML_METAL_LOG_INFO("%s: maxTransferRate = built-in GPU\n", __func__); - } -#endif - return ctx; } @@ -377,6 +412,8 @@ void ggml_metal_free(struct ggml_metal_context * ctx) { GGML_METAL_DEL_KERNEL(add_row); GGML_METAL_DEL_KERNEL(mul); GGML_METAL_DEL_KERNEL(mul_row); + GGML_METAL_DEL_KERNEL(div); + GGML_METAL_DEL_KERNEL(div_row); GGML_METAL_DEL_KERNEL(scale); GGML_METAL_DEL_KERNEL(scale_4); GGML_METAL_DEL_KERNEL(silu); @@ -428,11 +465,25 @@ void ggml_metal_free(struct ggml_metal_context * ctx) { GGML_METAL_DEL_KERNEL(mul_mm_q4_K_f32); GGML_METAL_DEL_KERNEL(mul_mm_q5_K_f32); GGML_METAL_DEL_KERNEL(mul_mm_q6_K_f32); + GGML_METAL_DEL_KERNEL(mul_mm_id_f32_f32); + GGML_METAL_DEL_KERNEL(mul_mm_id_f16_f32); + GGML_METAL_DEL_KERNEL(mul_mm_id_q4_0_f32); + GGML_METAL_DEL_KERNEL(mul_mm_id_q4_1_f32); + GGML_METAL_DEL_KERNEL(mul_mm_id_q5_0_f32); + GGML_METAL_DEL_KERNEL(mul_mm_id_q5_1_f32); + GGML_METAL_DEL_KERNEL(mul_mm_id_q8_0_f32); + GGML_METAL_DEL_KERNEL(mul_mm_id_q2_K_f32); + GGML_METAL_DEL_KERNEL(mul_mm_id_q3_K_f32); + GGML_METAL_DEL_KERNEL(mul_mm_id_q4_K_f32); + GGML_METAL_DEL_KERNEL(mul_mm_id_q5_K_f32); + GGML_METAL_DEL_KERNEL(mul_mm_id_q6_K_f32); } GGML_METAL_DEL_KERNEL(rope_f32); GGML_METAL_DEL_KERNEL(rope_f16); GGML_METAL_DEL_KERNEL(alibi_f32); GGML_METAL_DEL_KERNEL(im2col_f16); + GGML_METAL_DEL_KERNEL(argsort_f32_i32_asc); + GGML_METAL_DEL_KERNEL(argsort_f32_i32_desc); GGML_METAL_DEL_KERNEL(cpy_f32_f16); GGML_METAL_DEL_KERNEL(cpy_f32_f32); GGML_METAL_DEL_KERNEL(cpy_f32_q8_0); @@ -443,6 +494,7 @@ void ggml_metal_free(struct ggml_metal_context * ctx) { GGML_METAL_DEL_KERNEL(cpy_f16_f16); GGML_METAL_DEL_KERNEL(concat); GGML_METAL_DEL_KERNEL(sqr); + GGML_METAL_DEL_KERNEL(sum_rows); #undef GGML_METAL_DEL_KERNEL @@ -486,6 +538,13 @@ int * ggml_metal_get_concur_list(struct ggml_metal_context * ctx) { return ctx->concur_list; } +// temporarily defined here for compatibility between ggml-backend and the old API +struct ggml_backend_metal_buffer_context { + void * data; + + id metal; +}; + // finds the Metal buffer that contains the tensor data on the GPU device // the assumption is that there is 1-to-1 mapping between the host and device memory buffers, so we can find the // Metal buffer based on the host memory pointer @@ -495,8 +554,17 @@ static id ggml_metal_get_buffer(struct ggml_metal_context * ctx, stru const int64_t tsize = ggml_nbytes(t); - if (t->buffer && t->buffer->backend && t->buffer->backend->context) { - ctx = t->buffer->backend->context; + // compatibility with ggml-backend + if (t->buffer && t->buffer->buft == ggml_backend_metal_buffer_type()) { + struct ggml_backend_metal_buffer_context * buf_ctx = (struct ggml_backend_metal_buffer_context *) t->buffer->context; + + const int64_t ioffs = (int64_t) t->data - (int64_t) buf_ctx->data; + + GGML_ASSERT(ioffs >= 0 && ioffs + tsize <= (int64_t) t->buffer->size); + + *offs = (size_t) ioffs; + + return buf_ctx->metal; } // find the view that contains the tensor fully @@ -721,6 +789,51 @@ void ggml_metal_graph_find_concurrency( } } +static bool ggml_metal_supports_op(const struct ggml_tensor * op) { + switch (op->op) { + case GGML_OP_UNARY: + switch (ggml_get_unary_op(op)) { + case GGML_UNARY_OP_SILU: + case GGML_UNARY_OP_RELU: + case GGML_UNARY_OP_GELU: + return true; + default: + return false; + } + case GGML_OP_NONE: + case GGML_OP_RESHAPE: + case GGML_OP_VIEW: + case GGML_OP_TRANSPOSE: + case GGML_OP_PERMUTE: + case GGML_OP_CONCAT: + case GGML_OP_ADD: + case GGML_OP_MUL: + case GGML_OP_DIV: + case GGML_OP_SCALE: + case GGML_OP_SQR: + case GGML_OP_SUM_ROWS: + case GGML_OP_SOFT_MAX: + case GGML_OP_RMS_NORM: + case GGML_OP_NORM: + case GGML_OP_ALIBI: + case GGML_OP_ROPE: + case GGML_OP_IM2COL: + case GGML_OP_ARGSORT: + case GGML_OP_DUP: + case GGML_OP_CPY: + case GGML_OP_CONT: + case GGML_OP_MUL_MAT: + case GGML_OP_MUL_MAT_ID: + return true; + case GGML_OP_DIAG_MASK_INF: + case GGML_OP_GET_ROWS: + { + return op->ne[0] % 4 == 0; + } + default: + return false; + } +} void ggml_metal_graph_compute( struct ggml_metal_context * ctx, struct ggml_cgraph * gf) { @@ -791,6 +904,8 @@ void ggml_metal_graph_compute( } break; } + GGML_ASSERT(ggml_metal_supports_op(dst)); + const int64_t ne00 = src0 ? src0->ne[0] : 0; const int64_t ne01 = src0 ? src0->ne[1] : 0; const int64_t ne02 = src0 ? src0->ne[2] : 0; @@ -883,6 +998,8 @@ void ggml_metal_graph_compute( [encoder dispatchThreadgroups:MTLSizeMake(ne1, ne2, ne3) threadsPerThreadgroup:MTLSizeMake(nth, 1, 1)]; } break; case GGML_OP_ADD: + case GGML_OP_MUL: + case GGML_OP_DIV: { GGML_ASSERT(ggml_is_contiguous(src0)); GGML_ASSERT(ggml_is_contiguous(src1)); @@ -896,11 +1013,21 @@ void ggml_metal_graph_compute( GGML_ASSERT(ne11 == 1); nb = ne00 / 4; - [encoder setComputePipelineState:ctx->pipeline_add_row]; + switch (dst->op) { + case GGML_OP_ADD: [encoder setComputePipelineState:ctx->pipeline_add_row]; break; + case GGML_OP_MUL: [encoder setComputePipelineState:ctx->pipeline_mul_row]; break; + case GGML_OP_DIV: [encoder setComputePipelineState:ctx->pipeline_div_row]; break; + default: GGML_ASSERT(false); + } bcast_row = true; } else { - [encoder setComputePipelineState:ctx->pipeline_add]; + switch (dst->op) { + case GGML_OP_ADD: [encoder setComputePipelineState:ctx->pipeline_add]; break; + case GGML_OP_MUL: [encoder setComputePipelineState:ctx->pipeline_mul]; break; + case GGML_OP_DIV: [encoder setComputePipelineState:ctx->pipeline_div]; break; + default: GGML_ASSERT(false); + } } [encoder setBuffer:id_src0 offset:offs_src0 atIndex:0]; [encoder setBuffer:id_src1 offset:offs_src1 atIndex:1]; @@ -941,31 +1068,6 @@ void ggml_metal_graph_compute( [encoder dispatchThreadgroups:MTLSizeMake(ne01, ne02, ne03) threadsPerThreadgroup:MTLSizeMake(nth, 1, 1)]; } } break; - case GGML_OP_MUL: - { - GGML_ASSERT(ggml_is_contiguous(src0)); - GGML_ASSERT(ggml_is_contiguous(src1)); - - // utilize float4 - GGML_ASSERT(ne00 % 4 == 0); - const int64_t nb = ne00/4; - - if (ggml_nelements(src1) == ne10) { - // src1 is a row - GGML_ASSERT(ne11 == 1); - [encoder setComputePipelineState:ctx->pipeline_mul_row]; - } else { - [encoder setComputePipelineState:ctx->pipeline_mul]; - } - [encoder setBuffer:id_src0 offset:offs_src0 atIndex:0]; - [encoder setBuffer:id_src1 offset:offs_src1 atIndex:1]; - [encoder setBuffer:id_dst offset:offs_dst atIndex:2]; - [encoder setBytes:&nb length:sizeof(nb) atIndex:3]; - - const int64_t n = ggml_nelements(dst)/4; - - [encoder dispatchThreadgroups:MTLSizeMake(n, 1, 1) threadsPerThreadgroup:MTLSizeMake(1, 1, 1)]; - } break; case GGML_OP_SCALE: { GGML_ASSERT(ggml_is_contiguous(src0)); @@ -1038,6 +1140,40 @@ void ggml_metal_graph_compute( const int64_t n = ggml_nelements(dst); [encoder dispatchThreadgroups:MTLSizeMake(n, 1, 1) threadsPerThreadgroup:MTLSizeMake(1, 1, 1)]; } break; + case GGML_OP_SUM_ROWS: + { + GGML_ASSERT(src0->nb[0] == ggml_type_size(src0->type)); + + [encoder setComputePipelineState:ctx->pipeline_sum_rows]; + [encoder setBuffer:id_src0 offset:offs_src0 atIndex:0]; + [encoder setBuffer:id_dst offset:offs_dst atIndex:1]; + [encoder setBytes:&ne00 length:sizeof(ne00) atIndex:2]; + [encoder setBytes:&ne01 length:sizeof(ne01) atIndex:3]; + [encoder setBytes:&ne02 length:sizeof(ne02) atIndex:4]; + [encoder setBytes:&ne03 length:sizeof(ne03) atIndex:5]; + [encoder setBytes:&nb00 length:sizeof(nb00) atIndex:6]; + [encoder setBytes:&nb01 length:sizeof(nb01) atIndex:7]; + [encoder setBytes:&nb02 length:sizeof(nb02) atIndex:8]; + [encoder setBytes:&nb03 length:sizeof(nb03) atIndex:9]; + [encoder setBytes:&ne10 length:sizeof(ne10) atIndex:10]; + [encoder setBytes:&ne11 length:sizeof(ne11) atIndex:11]; + [encoder setBytes:&ne12 length:sizeof(ne12) atIndex:12]; + [encoder setBytes:&ne13 length:sizeof(ne13) atIndex:13]; + [encoder setBytes:&nb10 length:sizeof(nb10) atIndex:14]; + [encoder setBytes:&nb11 length:sizeof(nb11) atIndex:15]; + [encoder setBytes:&nb12 length:sizeof(nb12) atIndex:16]; + [encoder setBytes:&nb13 length:sizeof(nb13) atIndex:17]; + [encoder setBytes:&ne0 length:sizeof(ne0) atIndex:18]; + [encoder setBytes:&ne1 length:sizeof(ne1) atIndex:19]; + [encoder setBytes:&ne2 length:sizeof(ne2) atIndex:20]; + [encoder setBytes:&ne3 length:sizeof(ne3) atIndex:21]; + [encoder setBytes:&nb0 length:sizeof(nb0) atIndex:22]; + [encoder setBytes:&nb1 length:sizeof(nb1) atIndex:23]; + [encoder setBytes:&nb2 length:sizeof(nb2) atIndex:24]; + [encoder setBytes:&nb3 length:sizeof(nb3) atIndex:25]; + + [encoder dispatchThreadgroups:MTLSizeMake(ne01, ne02, ne03) threadsPerThreadgroup:MTLSizeMake(1, 1, 1)]; + } break; case GGML_OP_SOFT_MAX: { int nth = 32; // SIMD width @@ -1092,13 +1228,17 @@ void ggml_metal_graph_compute( case GGML_OP_MUL_MAT: { GGML_ASSERT(ne00 == ne10); - GGML_ASSERT(ne03 == ne13); - const uint gqa = ne12/ne02; + // TODO: assert that dim2 and dim3 are contiguous + GGML_ASSERT(ne12 % ne02 == 0); + GGML_ASSERT(ne13 % ne03 == 0); + + const uint r2 = ne12/ne02; + const uint r3 = ne13/ne03; // find the break-even point where the matrix-matrix kernel becomes more efficient compared // to the matrix-vector kernel - int ne11_mm_min = src0t == GGML_TYPE_F16 ? 1 : 16; + int ne11_mm_min = 1; #if 0 // the numbers below are measured on M2 Ultra for 7B and 13B models @@ -1159,9 +1299,10 @@ void ggml_metal_graph_compute( [encoder setBytes:&nb12 length:sizeof(nb12) atIndex:10]; [encoder setBytes:&ne0 length:sizeof(ne0) atIndex:11]; [encoder setBytes:&ne1 length:sizeof(ne1) atIndex:12]; - [encoder setBytes:&gqa length:sizeof(gqa) atIndex:13]; + [encoder setBytes:&r2 length:sizeof(r2) atIndex:13]; + [encoder setBytes:&r3 length:sizeof(r3) atIndex:14]; [encoder setThreadgroupMemoryLength:8192 atIndex:0]; - [encoder dispatchThreadgroups:MTLSizeMake( (ne11 + 31)/32, (ne01 + 63)/64, ne12) threadsPerThreadgroup:MTLSizeMake(128, 1, 1)]; + [encoder dispatchThreadgroups:MTLSizeMake( (ne11 + 31)/32, (ne01 + 63)/64, ne12*ne13) threadsPerThreadgroup:MTLSizeMake(128, 1, 1)]; } else { int nth0 = 32; int nth1 = 1; @@ -1197,90 +1338,60 @@ void ggml_metal_graph_compute( } break; case GGML_TYPE_Q4_0: { - GGML_ASSERT(ne02 == 1); - GGML_ASSERT(ne12 == 1); - nth0 = 8; nth1 = 8; [encoder setComputePipelineState:ctx->pipeline_mul_mv_q4_0_f32]; } break; case GGML_TYPE_Q4_1: { - GGML_ASSERT(ne02 == 1); - GGML_ASSERT(ne12 == 1); - nth0 = 8; nth1 = 8; [encoder setComputePipelineState:ctx->pipeline_mul_mv_q4_1_f32]; } break; case GGML_TYPE_Q5_0: { - GGML_ASSERT(ne02 == 1); - GGML_ASSERT(ne12 == 1); - nth0 = 8; nth1 = 8; [encoder setComputePipelineState:ctx->pipeline_mul_mv_q5_0_f32]; } break; case GGML_TYPE_Q5_1: { - GGML_ASSERT(ne02 == 1); - GGML_ASSERT(ne12 == 1); - nth0 = 8; nth1 = 8; [encoder setComputePipelineState:ctx->pipeline_mul_mv_q5_1_f32]; } break; case GGML_TYPE_Q8_0: { - GGML_ASSERT(ne02 == 1); - GGML_ASSERT(ne12 == 1); - nth0 = 8; nth1 = 8; [encoder setComputePipelineState:ctx->pipeline_mul_mv_q8_0_f32]; } break; case GGML_TYPE_Q2_K: { - GGML_ASSERT(ne02 == 1); - GGML_ASSERT(ne12 == 1); - nth0 = 2; nth1 = 32; [encoder setComputePipelineState:ctx->pipeline_mul_mv_q2_K_f32]; } break; case GGML_TYPE_Q3_K: { - GGML_ASSERT(ne02 == 1); - GGML_ASSERT(ne12 == 1); - nth0 = 2; nth1 = 32; [encoder setComputePipelineState:ctx->pipeline_mul_mv_q3_K_f32]; } break; case GGML_TYPE_Q4_K: { - GGML_ASSERT(ne02 == 1); - GGML_ASSERT(ne12 == 1); - nth0 = 4; //1; nth1 = 8; //32; [encoder setComputePipelineState:ctx->pipeline_mul_mv_q4_K_f32]; } break; case GGML_TYPE_Q5_K: { - GGML_ASSERT(ne02 == 1); - GGML_ASSERT(ne12 == 1); - nth0 = 2; nth1 = 32; [encoder setComputePipelineState:ctx->pipeline_mul_mv_q5_K_f32]; } break; case GGML_TYPE_Q6_K: { - GGML_ASSERT(ne02 == 1); - GGML_ASSERT(ne12 == 1); - nth0 = 2; nth1 = 32; [encoder setComputePipelineState:ctx->pipeline_mul_mv_q6_K_f32]; @@ -1309,34 +1420,127 @@ void ggml_metal_graph_compute( [encoder setBytes:&nb12 length:sizeof(nb12) atIndex:14]; [encoder setBytes:&ne0 length:sizeof(ne0) atIndex:15]; [encoder setBytes:&ne1 length:sizeof(ne1) atIndex:16]; - [encoder setBytes:&gqa length:sizeof(gqa) atIndex:17]; + [encoder setBytes:&r2 length:sizeof(r2) atIndex:17]; + [encoder setBytes:&r3 length:sizeof(r3) atIndex:18]; if (src0t == GGML_TYPE_Q4_0 || src0t == GGML_TYPE_Q4_1 || src0t == GGML_TYPE_Q5_0 || src0t == GGML_TYPE_Q5_1 || src0t == GGML_TYPE_Q8_0 || src0t == GGML_TYPE_Q2_K) { // || src0t == GGML_TYPE_Q4_K) { - [encoder dispatchThreadgroups:MTLSizeMake((ne01 + 7)/8, ne11, ne12) threadsPerThreadgroup:MTLSizeMake(nth0, nth1, 1)]; + [encoder dispatchThreadgroups:MTLSizeMake((ne01 + 7)/8, ne11, ne12*ne13) threadsPerThreadgroup:MTLSizeMake(nth0, nth1, 1)]; } else if (src0t == GGML_TYPE_Q4_K) { - [encoder dispatchThreadgroups:MTLSizeMake((ne01 + 3)/4, ne11, ne12) threadsPerThreadgroup:MTLSizeMake(nth0, nth1, 1)]; + [encoder dispatchThreadgroups:MTLSizeMake((ne01 + 3)/4, ne11, ne12*ne13) threadsPerThreadgroup:MTLSizeMake(nth0, nth1, 1)]; } else if (src0t == GGML_TYPE_Q3_K) { #ifdef GGML_QKK_64 - [encoder dispatchThreadgroups:MTLSizeMake((ne01 + 1)/2, ne11, ne12) threadsPerThreadgroup:MTLSizeMake(nth0, nth1, 1)]; + [encoder dispatchThreadgroups:MTLSizeMake((ne01 + 1)/2, ne11, ne12*ne13) threadsPerThreadgroup:MTLSizeMake(nth0, nth1, 1)]; #else - [encoder dispatchThreadgroups:MTLSizeMake((ne01 + 3)/4, ne11, ne12) threadsPerThreadgroup:MTLSizeMake(nth0, nth1, 1)]; + [encoder dispatchThreadgroups:MTLSizeMake((ne01 + 3)/4, ne11, ne12*ne13) threadsPerThreadgroup:MTLSizeMake(nth0, nth1, 1)]; #endif } else if (src0t == GGML_TYPE_Q5_K) { - [encoder dispatchThreadgroups:MTLSizeMake((ne01 + 3)/4, ne11, ne12) threadsPerThreadgroup:MTLSizeMake(nth0, nth1, 1)]; + [encoder dispatchThreadgroups:MTLSizeMake((ne01 + 3)/4, ne11, ne12*ne13) threadsPerThreadgroup:MTLSizeMake(nth0, nth1, 1)]; } else if (src0t == GGML_TYPE_Q6_K) { - [encoder dispatchThreadgroups:MTLSizeMake((ne01 + 1)/2, ne11, ne12) threadsPerThreadgroup:MTLSizeMake(nth0, nth1, 1)]; + [encoder dispatchThreadgroups:MTLSizeMake((ne01 + 1)/2, ne11, ne12*ne13) threadsPerThreadgroup:MTLSizeMake(nth0, nth1, 1)]; } else { int64_t ny = (ne11 + nrows - 1)/nrows; - [encoder dispatchThreadgroups:MTLSizeMake(ne01, ny, ne12) threadsPerThreadgroup:MTLSizeMake(nth0, nth1, 1)]; + [encoder dispatchThreadgroups:MTLSizeMake(ne01, ny, ne12*ne13) threadsPerThreadgroup:MTLSizeMake(nth0, nth1, 1)]; } } } break; + case GGML_OP_MUL_MAT_ID: + { + //GGML_ASSERT(ne00 == ne10); + //GGML_ASSERT(ne03 == ne13); + + GGML_ASSERT(src0t == GGML_TYPE_I32); + + const int n_as = ne00; + + // TODO: make this more general + GGML_ASSERT(n_as <= 8); + + struct ggml_tensor * src2 = gf->nodes[i]->src[2]; + + const int64_t ne20 = src2 ? src2->ne[0] : 0; + const int64_t ne21 = src2 ? src2->ne[1] : 0; + const int64_t ne22 = src2 ? src2->ne[2] : 0; + const int64_t ne23 = src2 ? src2->ne[3] : 0; GGML_UNUSED(ne23); + + const uint64_t nb20 = src2 ? src2->nb[0] : 0; GGML_UNUSED(nb20); + const uint64_t nb21 = src2 ? src2->nb[1] : 0; + const uint64_t nb22 = src2 ? src2->nb[2] : 0; + const uint64_t nb23 = src2 ? src2->nb[3] : 0; GGML_UNUSED(nb23); + + const enum ggml_type src2t = src2 ? src2->type : GGML_TYPE_COUNT; GGML_UNUSED(src2t); + + GGML_ASSERT(!ggml_is_transposed(src2)); + GGML_ASSERT(!ggml_is_transposed(src1)); + + GGML_ASSERT(ne20 % 32 == 0); + // !!!!!!!!! TODO: this assert is probably required but not sure! + //GGML_ASSERT(ne20 >= 64); + GGML_ASSERT(src1t == GGML_TYPE_F32); + + const uint r2 = ne12/ne22; + const uint r3 = ne13/ne23; + + // find the break-even point where the matrix-matrix kernel becomes more efficient compared + // to the matrix-vector kernel + int ne11_mm_min = 0; + + const int idx = ((int32_t *) dst->op_params)[0]; + + // for now the matrix-matrix multiplication kernel only works on A14+/M1+ SoCs + // AMD GPU and older A-chips will reuse matrix-vector multiplication kernel + if ([ctx->device supportsFamily:MTLGPUFamilyApple7] && + ne11 > ne11_mm_min) { + switch (src2->type) { + case GGML_TYPE_F32: [encoder setComputePipelineState:ctx->pipeline_mul_mm_id_f32_f32]; break; + case GGML_TYPE_F16: [encoder setComputePipelineState:ctx->pipeline_mul_mm_id_f16_f32]; break; + case GGML_TYPE_Q4_0: [encoder setComputePipelineState:ctx->pipeline_mul_mm_id_q4_0_f32]; break; + case GGML_TYPE_Q4_1: [encoder setComputePipelineState:ctx->pipeline_mul_mm_id_q4_1_f32]; break; + case GGML_TYPE_Q5_0: [encoder setComputePipelineState:ctx->pipeline_mul_mm_id_q5_0_f32]; break; + case GGML_TYPE_Q5_1: [encoder setComputePipelineState:ctx->pipeline_mul_mm_id_q5_1_f32]; break; + case GGML_TYPE_Q8_0: [encoder setComputePipelineState:ctx->pipeline_mul_mm_id_q8_0_f32]; break; + case GGML_TYPE_Q2_K: [encoder setComputePipelineState:ctx->pipeline_mul_mm_id_q2_K_f32]; break; + case GGML_TYPE_Q3_K: [encoder setComputePipelineState:ctx->pipeline_mul_mm_id_q3_K_f32]; break; + case GGML_TYPE_Q4_K: [encoder setComputePipelineState:ctx->pipeline_mul_mm_id_q4_K_f32]; break; + case GGML_TYPE_Q5_K: [encoder setComputePipelineState:ctx->pipeline_mul_mm_id_q5_K_f32]; break; + case GGML_TYPE_Q6_K: [encoder setComputePipelineState:ctx->pipeline_mul_mm_id_q6_K_f32]; break; + default: GGML_ASSERT(false && "MUL_MAT_ID not implemented"); + } + [encoder setBuffer:id_src0 offset:offs_src0 atIndex:0]; + [encoder setBuffer:id_src1 offset:offs_src1 atIndex:1]; + [encoder setBuffer:id_dst offset:offs_dst atIndex:2]; + [encoder setBytes:&ne20 length:sizeof(ne20) atIndex:3]; + [encoder setBytes:&ne22 length:sizeof(ne22) atIndex:4]; + [encoder setBytes:&nb21 length:sizeof(nb21) atIndex:5]; + [encoder setBytes:&nb22 length:sizeof(nb22) atIndex:6]; + [encoder setBytes:&ne12 length:sizeof(ne12) atIndex:7]; + [encoder setBytes:&nb10 length:sizeof(nb10) atIndex:8]; + [encoder setBytes:&nb11 length:sizeof(nb11) atIndex:9]; + [encoder setBytes:&nb12 length:sizeof(nb12) atIndex:10]; + [encoder setBytes:&ne0 length:sizeof(ne0) atIndex:11]; + [encoder setBytes:&ne1 length:sizeof(ne1) atIndex:12]; + [encoder setBytes:&r2 length:sizeof(r2) atIndex:13]; + [encoder setBytes:&r3 length:sizeof(r3) atIndex:14]; + [encoder setBytes:&idx length:sizeof(idx) atIndex:15]; + // TODO: how to make this an array? read Metal docs + for (int j = 0; j < n_as; ++j) { + struct ggml_tensor * src_cur = dst->src[2 + j]; + + size_t offs_src_cur = 0; + id id_src_cur = ggml_metal_get_buffer(ctx, src_cur, &offs_src_cur); + + [encoder setBuffer:id_src_cur offset:offs_src_cur atIndex:16 + j]; + } + + [encoder setThreadgroupMemoryLength:8192 atIndex:0]; + [encoder dispatchThreadgroups:MTLSizeMake( (ne11 + 31)/32, (ne21 + 63)/64, ne12*ne13) threadsPerThreadgroup:MTLSizeMake(128, 1, 1)]; + } + } break; case GGML_OP_GET_ROWS: { switch (src0->type) { @@ -1560,6 +1764,27 @@ void ggml_metal_graph_compute( [encoder dispatchThreadgroups:MTLSizeMake(IC, OH, OW) threadsPerThreadgroup:MTLSizeMake(N, KH, KW)]; } break; + case GGML_OP_ARGSORT: + { + GGML_ASSERT(src0->type == GGML_TYPE_F32); + GGML_ASSERT( dst->type == GGML_TYPE_I32); + + const int nrows = ggml_nrows(src0); + + enum ggml_sort_order order = (enum ggml_sort_order) dst->op_params[0]; + + switch (order) { + case GGML_SORT_ASC: [encoder setComputePipelineState:ctx->pipeline_argsort_f32_i32_asc]; break; + case GGML_SORT_DESC: [encoder setComputePipelineState:ctx->pipeline_argsort_f32_i32_desc]; break; + default: GGML_ASSERT(false); + }; + + [encoder setBuffer:id_src0 offset:offs_src0 atIndex:0]; + [encoder setBuffer:id_dst offset:offs_dst atIndex:1]; + [encoder setBytes:&ne00 length:sizeof( int64_t) atIndex:2]; + + [encoder dispatchThreadgroups:MTLSizeMake(1, nrows, 1) threadsPerThreadgroup:MTLSizeMake(ne00, 1, 1)]; + } break; case GGML_OP_DUP: case GGML_OP_CPY: case GGML_OP_CONT: @@ -1655,6 +1880,132 @@ void ggml_metal_graph_compute( // backend interface +static id g_backend_device = nil; +static int g_backend_device_ref_count = 0; + +static id ggml_backend_metal_get_device(void) { + if (g_backend_device == nil) { + g_backend_device = MTLCreateSystemDefaultDevice(); + } + + g_backend_device_ref_count++; + + return g_backend_device; +} + +static void ggml_backend_metal_free_device(void) { + assert(g_backend_device_ref_count > 0); + + g_backend_device_ref_count--; + + if (g_backend_device_ref_count == 0) { + [g_backend_device release]; + g_backend_device = nil; + } +} + +static void * ggml_backend_metal_buffer_get_base(ggml_backend_buffer_t buffer) { + struct ggml_backend_metal_buffer_context * ctx = (struct ggml_backend_metal_buffer_context *)buffer->context; + + return ctx->data; +} + +static void ggml_backend_metal_buffer_free_buffer(ggml_backend_buffer_t buffer) { + struct ggml_backend_metal_buffer_context * ctx = (struct ggml_backend_metal_buffer_context *)buffer->context; + + [ctx->metal release]; + ggml_backend_metal_free_device(); + + free(ctx->data); + free(ctx); + + UNUSED(buffer); +} + +static void ggml_backend_metal_buffer_set_tensor(ggml_backend_buffer_t buffer, struct ggml_tensor * tensor, const void * data, size_t offset, size_t size) { + GGML_ASSERT(offset + size <= ggml_nbytes(tensor) && "tensor write out of bounds"); + GGML_ASSERT(tensor->data != NULL && "tensor not allocated"); + + memcpy((char *)tensor->data + offset, data, size); + + UNUSED(buffer); +} + +static void ggml_backend_metal_buffer_get_tensor(ggml_backend_buffer_t buffer, const struct ggml_tensor * tensor, void * data, size_t offset, size_t size) { + GGML_ASSERT(offset + size <= ggml_nbytes(tensor) && "tensor read out of bounds"); + GGML_ASSERT(tensor->data != NULL && "tensor not allocated"); + + memcpy(data, (const char *)tensor->data + offset, size); + + UNUSED(buffer); +} + +static void ggml_backend_metal_buffer_cpy_tensor_from(ggml_backend_buffer_t buffer, struct ggml_tensor * src, struct ggml_tensor * dst) { + ggml_backend_tensor_get(src, dst->data, 0, ggml_nbytes(src)); + + UNUSED(buffer); +} + +static void ggml_backend_metal_buffer_cpy_tensor_to(ggml_backend_buffer_t buffer, struct ggml_tensor * src, struct ggml_tensor * dst) { + ggml_backend_tensor_set(dst, src->data, 0, ggml_nbytes(src)); + + UNUSED(buffer); +} + +static struct ggml_backend_buffer_i metal_backend_buffer_i = { + /* .free_buffer = */ ggml_backend_metal_buffer_free_buffer, + /* .get_base = */ ggml_backend_metal_buffer_get_base, + /* .init_tensor = */ NULL, + /* .set_tensor = */ ggml_backend_metal_buffer_set_tensor, + /* .get_tensor = */ ggml_backend_metal_buffer_get_tensor, + /* .cpy_tensor_from = */ ggml_backend_metal_buffer_cpy_tensor_from, + /* .cpy_tensor_to = */ ggml_backend_metal_buffer_cpy_tensor_to, +}; + +static ggml_backend_buffer_t ggml_backend_metal_buffer_type_alloc_buffer(ggml_backend_buffer_type_t buft, size_t size) { + struct ggml_backend_metal_buffer_context * ctx = malloc(sizeof(struct ggml_backend_metal_buffer_context)); + + const size_t size_page = sysconf(_SC_PAGESIZE); + + size_t size_aligned = size; + if ((size_aligned % size_page) != 0) { + size_aligned += (size_page - (size_aligned % size_page)); + } + + ctx->data = ggml_metal_host_malloc(size); + ctx->metal = [ggml_backend_metal_get_device() newBufferWithBytesNoCopy:ctx->data + length:size_aligned + options:MTLResourceStorageModeShared + deallocator:nil]; + + return ggml_backend_buffer_init(buft, metal_backend_buffer_i, ctx, size); +} + +static size_t ggml_backend_metal_buffer_type_get_alignment(ggml_backend_buffer_type_t buft) { + return 32; + UNUSED(buft); +} + +static bool ggml_backend_metal_buffer_type_supports_backend(ggml_backend_buffer_type_t buft, ggml_backend_t backend) { + return ggml_backend_is_metal(backend) || ggml_backend_is_cpu(backend); + + GGML_UNUSED(buft); +} + +ggml_backend_buffer_type_t ggml_backend_metal_buffer_type(void) { + static struct ggml_backend_buffer_type ggml_backend_buffer_type_metal = { + /* .iface = */ { + /* .alloc_buffer = */ ggml_backend_metal_buffer_type_alloc_buffer, + /* .get_alignment = */ ggml_backend_metal_buffer_type_get_alignment, + /* .get_alloc_size = */ NULL, // defaults to ggml_nbytes + /* .supports_backend = */ ggml_backend_metal_buffer_type_supports_backend, + }, + /* .context = */ NULL, + }; + + return &ggml_backend_buffer_type_metal; +} + static const char * ggml_backend_metal_name(ggml_backend_t backend) { return "Metal"; @@ -1667,69 +2018,12 @@ static void ggml_backend_metal_free(ggml_backend_t backend) { free(backend); } -static void * ggml_backend_metal_buffer_get_base(ggml_backend_buffer_t buffer) { - return (void *)buffer->context; -} - -static void ggml_backend_metal_buffer_free_buffer(ggml_backend_buffer_t buffer) { - free(buffer->context); - UNUSED(buffer); -} - -static struct ggml_backend_buffer_i metal_backend_buffer_i = { - /* .free_buffer = */ ggml_backend_metal_buffer_free_buffer, - /* .get_base = */ ggml_backend_metal_buffer_get_base, - /* .get_alloc_size = */ NULL, // defaults to ggml_nbytes - /* .init_tensor = */ NULL, // no initialization required - /* .free_tensor = */ NULL, // no cleanup required -}; - -static ggml_backend_buffer_t ggml_backend_metal_alloc_buffer(ggml_backend_t backend, size_t size) { - struct ggml_metal_context * ctx = (struct ggml_metal_context *)backend->context; - - void * data = ggml_metal_host_malloc(size); - - // TODO: set proper name of the buffers - ggml_metal_add_buffer(ctx, "backend", data, size, 0); - - return ggml_backend_buffer_init(backend, metal_backend_buffer_i, data, size); -} - -static size_t ggml_backend_metal_get_alignment(ggml_backend_t backend) { - return 32; - UNUSED(backend); -} - -static void ggml_backend_metal_set_tensor_async(ggml_backend_t backend, struct ggml_tensor * tensor, const void * data, size_t offset, size_t size) { - GGML_ASSERT(offset + size <= ggml_nbytes(tensor) && "tensor write out of bounds"); - GGML_ASSERT(tensor->data != NULL && "tensor not allocated"); - - memcpy((char *)tensor->data + offset, data, size); - - UNUSED(backend); -} - -static void ggml_backend_metal_get_tensor_async(ggml_backend_t backend, const struct ggml_tensor * tensor, void * data, size_t offset, size_t size) { - GGML_ASSERT(offset + size <= ggml_nbytes(tensor) && "tensor read out of bounds"); - GGML_ASSERT(tensor->data != NULL && "tensor not allocated"); - - memcpy(data, (const char *)tensor->data + offset, size); - - UNUSED(backend); -} - static void ggml_backend_metal_synchronize(ggml_backend_t backend) { UNUSED(backend); } -static void ggml_backend_metal_cpy_tensor_from(ggml_backend_t backend, struct ggml_tensor * src, struct ggml_tensor * dst) { - ggml_backend_tensor_get(src, dst->data, 0, ggml_nbytes(src)); - - UNUSED(backend); -} - -static void ggml_backend_metal_cpy_tensor_to(ggml_backend_t backend, struct ggml_tensor * src, struct ggml_tensor * dst) { - ggml_backend_tensor_set_async(dst, src->data, 0, ggml_nbytes(src)); +static ggml_backend_buffer_type_t ggml_backend_metal_get_default_buffer_type(ggml_backend_t backend) { + return ggml_backend_metal_buffer_type(); UNUSED(backend); } @@ -1741,32 +2035,43 @@ static void ggml_backend_metal_graph_compute(ggml_backend_t backend, struct ggml } static bool ggml_backend_metal_supports_op(ggml_backend_t backend, const struct ggml_tensor * op) { - return true; + return ggml_metal_supports_op(op); + UNUSED(backend); - UNUSED(op); } static struct ggml_backend_i metal_backend_i = { - /* .get_name = */ ggml_backend_metal_name, - /* .free = */ ggml_backend_metal_free, - /* .alloc_buffer = */ ggml_backend_metal_alloc_buffer, - /* .get_alignment = */ ggml_backend_metal_get_alignment, - /* .set_tensor_async = */ ggml_backend_metal_set_tensor_async, - /* .get_tensor_async = */ ggml_backend_metal_get_tensor_async, - /* .synchronize = */ ggml_backend_metal_synchronize, - /* .cpy_tensor_from = */ ggml_backend_metal_cpy_tensor_from, - /* .cpy_tensor_to = */ ggml_backend_metal_cpy_tensor_to, - /* .graph_plan_create = */ NULL, // the metal implementation does not require creating graph plans atm - /* .graph_plan_free = */ NULL, - /* .graph_plan_compute = */ NULL, - /* .graph_compute = */ ggml_backend_metal_graph_compute, - /* .supports_op = */ ggml_backend_metal_supports_op, + /* .get_name = */ ggml_backend_metal_name, + /* .free = */ ggml_backend_metal_free, + /* .get_default_buffer_type = */ ggml_backend_metal_get_default_buffer_type, + /* .set_tensor_async = */ NULL, + /* .get_tensor_async = */ NULL, + /* .cpy_tensor_from_async = */ NULL, + /* .cpy_tensor_to_async = */ NULL, + /* .synchronize = */ ggml_backend_metal_synchronize, + /* .graph_plan_create = */ NULL, // the metal implementation does not require creating graph plans atm + /* .graph_plan_free = */ NULL, + /* .graph_plan_compute = */ NULL, + /* .graph_compute = */ ggml_backend_metal_graph_compute, + /* .supports_op = */ ggml_backend_metal_supports_op, }; -ggml_backend_t ggml_backend_metal_init(void) { - struct ggml_metal_context * ctx = malloc(sizeof(struct ggml_metal_context)); +// TODO: make a common log callback for all backends in ggml-backend +static void ggml_backend_log_callback(enum ggml_log_level level, const char * msg, void * user_data) { + fprintf(stderr, "%s", msg); - ctx = ggml_metal_init(GGML_DEFAULT_N_THREADS); + UNUSED(level); + UNUSED(user_data); +} + +ggml_backend_t ggml_backend_metal_init(void) { + ggml_metal_log_set_callback(ggml_backend_log_callback, NULL); + + struct ggml_metal_context * ctx = ggml_metal_init(GGML_DEFAULT_N_THREADS); + + if (ctx == NULL) { + return NULL; + } ggml_backend_t metal_backend = malloc(sizeof(struct ggml_backend)); @@ -1783,7 +2088,26 @@ bool ggml_backend_is_metal(ggml_backend_t backend) { } void ggml_backend_metal_set_n_cb(ggml_backend_t backend, int n_cb) { + GGML_ASSERT(ggml_backend_is_metal(backend)); + struct ggml_metal_context * ctx = (struct ggml_metal_context *)backend->context; ggml_metal_set_n_cb(ctx, n_cb); } + +bool ggml_backend_metal_supports_family(ggml_backend_t backend, int family) { + GGML_ASSERT(ggml_backend_is_metal(backend)); + + struct ggml_metal_context * ctx = (struct ggml_metal_context *)backend->context; + + return [ctx->device supportsFamily:(MTLGPUFamilyApple1 + family - 1)]; +} + +ggml_backend_t ggml_backend_reg_metal_init(const char * params, void * user_data); // silence warning + +ggml_backend_t ggml_backend_reg_metal_init(const char * params, void * user_data) { + return ggml_backend_metal_init(); + + GGML_UNUSED(params); + GGML_UNUSED(user_data); +} diff --git a/ggml-metal.metal b/ggml-metal.metal index 9f5ffcbaf..2f8ea22d6 100644 --- a/ggml-metal.metal +++ b/ggml-metal.metal @@ -4,6 +4,7 @@ using namespace metal; #define MAX(x, y) ((x) > (y) ? (x) : (y)) #define MIN(x, y) ((x) < (y) ? (x) : (y)) +#define SWAP(x, y) { auto tmp = (x); (x) = (y); (y) = tmp; } #define QK4_0 32 #define QR4_0 2 @@ -42,8 +43,13 @@ typedef struct { #define N_SIMDWIDTH 32 // assuming SIMD group size is 32 -// general-purpose kernel for addition of two tensors -// pros: works for non-contiguous tensors, supports broadcast across dims 1, 2 and 3 +enum ggml_sort_order { + GGML_SORT_ASC, + GGML_SORT_DESC, +}; + +// general-purpose kernel for addition, multiplication and division of two tensors +// pros: works for non-contiguous tensors, supports broadcast across all dims // cons: not very efficient kernel void kernel_add( device const char * src0, @@ -84,16 +90,111 @@ kernel void kernel_add( const int64_t i12 = i02 % ne12; const int64_t i11 = i01 % ne11; - device const char * src0_ptr = src0 + i03*nb03 + i02*nb02 + i01*nb01 + tpitg.x*nb00; - device const char * src1_ptr = src1 + i13*nb13 + i12*nb12 + i11*nb11 + tpitg.x*nb10; - device char * dst_ptr = dst + i03*nb3 + i02*nb2 + i01*nb1 + tpitg.x*nb0; + device const char * src0_ptr = src0 + i03*nb03 + i02*nb02 + i01*nb01; + device const char * src1_ptr = src1 + i13*nb13 + i12*nb12 + i11*nb11; + device char * dst_ptr = dst + i03*nb3 + i02*nb2 + i01*nb1; for (int i0 = tpitg.x; i0 < ne0; i0 += ntg.x) { - ((device float *)dst_ptr)[0] = ((device float *)src0_ptr)[0] + ((device float *)src1_ptr)[0]; + const int i10 = i0 % ne10; + *((device float *)(dst_ptr + i0*nb0)) = *((device float *)(src0_ptr + i0*nb00)) + *((device float *)(src1_ptr + i10*nb10)); + } +} - src0_ptr += ntg.x*nb00; - src1_ptr += ntg.x*nb10; - dst_ptr += ntg.x*nb0; +kernel void kernel_mul( + device const char * src0, + device const char * src1, + device char * dst, + constant int64_t & ne00, + constant int64_t & ne01, + constant int64_t & ne02, + constant int64_t & ne03, + constant int64_t & nb00, + constant int64_t & nb01, + constant int64_t & nb02, + constant int64_t & nb03, + constant int64_t & ne10, + constant int64_t & ne11, + constant int64_t & ne12, + constant int64_t & ne13, + constant int64_t & nb10, + constant int64_t & nb11, + constant int64_t & nb12, + constant int64_t & nb13, + constant int64_t & ne0, + constant int64_t & ne1, + constant int64_t & ne2, + constant int64_t & ne3, + constant int64_t & nb0, + constant int64_t & nb1, + constant int64_t & nb2, + constant int64_t & nb3, + uint3 tgpig[[threadgroup_position_in_grid]], + uint3 tpitg[[thread_position_in_threadgroup]], + uint3 ntg[[threads_per_threadgroup]]) { + const int64_t i03 = tgpig.z; + const int64_t i02 = tgpig.y; + const int64_t i01 = tgpig.x; + + const int64_t i13 = i03 % ne13; + const int64_t i12 = i02 % ne12; + const int64_t i11 = i01 % ne11; + + device const char * src0_ptr = src0 + i03*nb03 + i02*nb02 + i01*nb01; + device const char * src1_ptr = src1 + i13*nb13 + i12*nb12 + i11*nb11; + device char * dst_ptr = dst + i03*nb3 + i02*nb2 + i01*nb1; + + for (int i0 = tpitg.x; i0 < ne0; i0 += ntg.x) { + const int i10 = i0 % ne10; + *((device float *)(dst_ptr + i0*nb0)) = *((device float *)(src0_ptr + i0*nb00)) * *((device float *)(src1_ptr + i10*nb10)); + } +} + +kernel void kernel_div( + device const char * src0, + device const char * src1, + device char * dst, + constant int64_t & ne00, + constant int64_t & ne01, + constant int64_t & ne02, + constant int64_t & ne03, + constant int64_t & nb00, + constant int64_t & nb01, + constant int64_t & nb02, + constant int64_t & nb03, + constant int64_t & ne10, + constant int64_t & ne11, + constant int64_t & ne12, + constant int64_t & ne13, + constant int64_t & nb10, + constant int64_t & nb11, + constant int64_t & nb12, + constant int64_t & nb13, + constant int64_t & ne0, + constant int64_t & ne1, + constant int64_t & ne2, + constant int64_t & ne3, + constant int64_t & nb0, + constant int64_t & nb1, + constant int64_t & nb2, + constant int64_t & nb3, + uint3 tgpig[[threadgroup_position_in_grid]], + uint3 tpitg[[thread_position_in_threadgroup]], + uint3 ntg[[threads_per_threadgroup]]) { + const int64_t i03 = tgpig.z; + const int64_t i02 = tgpig.y; + const int64_t i01 = tgpig.x; + + const int64_t i13 = i03 % ne13; + const int64_t i12 = i02 % ne12; + const int64_t i11 = i01 % ne11; + + device const char * src0_ptr = src0 + i03*nb03 + i02*nb02 + i01*nb01; + device const char * src1_ptr = src1 + i13*nb13 + i12*nb12 + i11*nb11; + device char * dst_ptr = dst + i03*nb3 + i02*nb2 + i01*nb1; + + for (int i0 = tpitg.x; i0 < ne0; i0 += ntg.x) { + const int i10 = i0 % ne10; + *((device float *)(dst_ptr + i0*nb0)) = *((device float *)(src0_ptr + i0*nb00)) / *((device float *)(src1_ptr + i10*nb10)); } } @@ -108,25 +209,24 @@ kernel void kernel_add_row( dst[tpig] = src0[tpig] + src1[tpig % nb]; } -kernel void kernel_mul( - device const float4 * src0, - device const float4 * src1, - device float4 * dst, - uint tpig[[thread_position_in_grid]]) { - dst[tpig] = src0[tpig] * src1[tpig]; -} - -// assumption: src1 is a row -// broadcast src1 into src0 kernel void kernel_mul_row( device const float4 * src0, device const float4 * src1, device float4 * dst, - constant int64_t & nb, + constant int64_t & nb [[buffer(27)]], uint tpig[[thread_position_in_grid]]) { dst[tpig] = src0[tpig] * src1[tpig % nb]; } +kernel void kernel_div_row( + device const float4 * src0, + device const float4 * src1, + device float4 * dst, + constant int64_t & nb [[buffer(27)]], + uint tpig[[thread_position_in_grid]]) { + dst[tpig] = src0[tpig] / src1[tpig % nb]; +} + kernel void kernel_scale( device const float * src0, device float * dst, @@ -165,6 +265,54 @@ kernel void kernel_sqr( dst[tpig] = src0[tpig] * src0[tpig]; } +kernel void kernel_sum_rows( + device const float * src0, + device float * dst, + constant int64_t & ne00, + constant int64_t & ne01, + constant int64_t & ne02, + constant int64_t & ne03, + constant int64_t & nb00, + constant int64_t & nb01, + constant int64_t & nb02, + constant int64_t & nb03, + constant int64_t & ne10, + constant int64_t & ne11, + constant int64_t & ne12, + constant int64_t & ne13, + constant int64_t & nb10, + constant int64_t & nb11, + constant int64_t & nb12, + constant int64_t & nb13, + constant int64_t & ne0, + constant int64_t & ne1, + constant int64_t & ne2, + constant int64_t & ne3, + constant int64_t & nb0, + constant int64_t & nb1, + constant int64_t & nb2, + constant int64_t & nb3, + uint3 tpig[[thread_position_in_grid]]) { + int64_t i3 = tpig.z; + int64_t i2 = tpig.y; + int64_t i1 = tpig.x; + + if (i3 >= ne03 || i2 >= ne02 || i1 >= ne01) { + return; + } + + device const float * src_row = (device const float *) ((device const char *) src0 + i1*nb01 + i2*nb02 + i3*nb03); + device float * dst_row = (device float *) ((device char *) dst + i1*nb1 + i2*nb2 + i3*nb3); + + float row_sum = 0; + + for (int64_t i0 = 0; i0 < ne00; i0++) { + row_sum += src_row[i0]; + } + + dst_row[0] = row_sum; +} + constant float GELU_COEF_A = 0.044715f; constant float SQRT_2_OVER_PI = 0.79788456080286535587989211986876f; @@ -583,9 +731,20 @@ inline float block_q_n_dot_y(device const block_q5_1 * qb_curr, float sumy, thre // giard against the number of rows not being divisible by // N_DST, so this is another explicit assumption of the implementation. template -void mul_vec_q_n_f32(device const void * src0, device const float * src1, device float * dst, - int64_t ne00, int64_t ne01, int64_t ne02, int64_t ne10, int64_t ne12, int64_t ne0, int64_t ne1, uint gqa, - uint3 tgpig, uint tiisg, uint sgitg) { +void mul_vec_q_n_f32( + device const void * src0, + device const float * src1, + device float * dst, + int64_t ne00, + int64_t ne01, + int64_t ne02, + int64_t ne10, + int64_t ne12, + int64_t ne0, + int64_t ne1, + uint r2, + uint r3, + uint3 tgpig, uint tiisg, uint sgitg) { const int nb = ne00/QK4_0; const int r0 = tgpig.x; @@ -594,7 +753,10 @@ void mul_vec_q_n_f32(device const void * src0, device const float * src1, device const int first_row = (r0 * nsg + sgitg) * nr; - const uint offset0 = first_row * nb + im/gqa*(nb*ne0); + const uint i12 = im%ne12; + const uint i13 = im/ne12; + + const uint offset0 = first_row * nb + (i12/r2)*(nb*ne01) + (i13/r3)*(nb*ne01*ne02); device const block_q_type * x = (device const block_q_type *) src0 + offset0; device const float * y = (device const float *) src1 + r1*ne10 + im*ne00*ne1; @@ -644,13 +806,14 @@ kernel void kernel_mul_mv_q4_0_f32( constant int64_t & ne02[[buffer(5)]], constant int64_t & ne10[[buffer(9)]], constant int64_t & ne12[[buffer(11)]], - constant int64_t & ne0[[buffer(15)]], - constant int64_t & ne1[[buffer(16)]], - constant uint & gqa[[buffer(17)]], + constant int64_t & ne0 [[buffer(15)]], + constant int64_t & ne1 [[buffer(16)]], + constant uint & r2 [[buffer(17)]], + constant uint & r3 [[buffer(18)]], uint3 tgpig[[threadgroup_position_in_grid]], uint tiisg[[thread_index_in_simdgroup]], uint sgitg[[simdgroup_index_in_threadgroup]]) { - mul_vec_q_n_f32(src0,src1,dst,ne00,ne01,ne02,ne10,ne12,ne0,ne1,gqa,tgpig,tiisg,sgitg); + mul_vec_q_n_f32(src0,src1,dst,ne00,ne01,ne02,ne10,ne12,ne0,ne1,r2,r3,tgpig,tiisg,sgitg); } kernel void kernel_mul_mv_q4_1_f32( @@ -662,13 +825,14 @@ kernel void kernel_mul_mv_q4_1_f32( constant int64_t & ne02[[buffer(5)]], constant int64_t & ne10[[buffer(9)]], constant int64_t & ne12[[buffer(11)]], - constant int64_t & ne0[[buffer(15)]], - constant int64_t & ne1[[buffer(16)]], - constant uint & gqa[[buffer(17)]], + constant int64_t & ne0 [[buffer(15)]], + constant int64_t & ne1 [[buffer(16)]], + constant uint & r2 [[buffer(17)]], + constant uint & r3 [[buffer(18)]], uint3 tgpig[[threadgroup_position_in_grid]], uint tiisg[[thread_index_in_simdgroup]], uint sgitg[[simdgroup_index_in_threadgroup]]) { - mul_vec_q_n_f32(src0,src1,dst,ne00,ne01,ne02,ne10,ne12,ne0,ne1,gqa,tgpig,tiisg,sgitg); + mul_vec_q_n_f32(src0,src1,dst,ne00,ne01,ne02,ne10,ne12,ne0,ne1,r2,r3,tgpig,tiisg,sgitg); } kernel void kernel_mul_mv_q5_0_f32( @@ -680,13 +844,14 @@ kernel void kernel_mul_mv_q5_0_f32( constant int64_t & ne02[[buffer(5)]], constant int64_t & ne10[[buffer(9)]], constant int64_t & ne12[[buffer(11)]], - constant int64_t & ne0[[buffer(15)]], - constant int64_t & ne1[[buffer(16)]], - constant uint & gqa[[buffer(17)]], + constant int64_t & ne0 [[buffer(15)]], + constant int64_t & ne1 [[buffer(16)]], + constant uint & r2 [[buffer(17)]], + constant uint & r3 [[buffer(18)]], uint3 tgpig[[threadgroup_position_in_grid]], uint tiisg[[thread_index_in_simdgroup]], uint sgitg[[simdgroup_index_in_threadgroup]]) { - mul_vec_q_n_f32(src0,src1,dst,ne00,ne01,ne02,ne10,ne12,ne0,ne1,gqa,tgpig,tiisg,sgitg); + mul_vec_q_n_f32(src0,src1,dst,ne00,ne01,ne02,ne10,ne12,ne0,ne1,r2,r3,tgpig,tiisg,sgitg); } kernel void kernel_mul_mv_q5_1_f32( @@ -698,13 +863,14 @@ kernel void kernel_mul_mv_q5_1_f32( constant int64_t & ne02[[buffer(5)]], constant int64_t & ne10[[buffer(9)]], constant int64_t & ne12[[buffer(11)]], - constant int64_t & ne0[[buffer(15)]], - constant int64_t & ne1[[buffer(16)]], - constant uint & gqa[[buffer(17)]], + constant int64_t & ne0 [[buffer(15)]], + constant int64_t & ne1 [[buffer(16)]], + constant uint & r2 [[buffer(17)]], + constant uint & r3 [[buffer(18)]], uint3 tgpig[[threadgroup_position_in_grid]], uint tiisg[[thread_index_in_simdgroup]], uint sgitg[[simdgroup_index_in_threadgroup]]) { - mul_vec_q_n_f32(src0,src1,dst,ne00,ne01,ne02,ne10,ne12,ne0,ne1,gqa,tgpig,tiisg,sgitg); + mul_vec_q_n_f32(src0,src1,dst,ne00,ne01,ne02,ne10,ne12,ne0,ne1,r2,r3,tgpig,tiisg,sgitg); } @@ -719,9 +885,10 @@ kernel void kernel_mul_mv_q8_0_f32( constant int64_t & ne02[[buffer(5)]], constant int64_t & ne10[[buffer(9)]], constant int64_t & ne12[[buffer(11)]], - constant int64_t & ne0[[buffer(15)]], - constant int64_t & ne1[[buffer(16)]], - constant uint & gqa[[buffer(17)]], + constant int64_t & ne0 [[buffer(15)]], + constant int64_t & ne1 [[buffer(16)]], + constant uint & r2 [[buffer(17)]], + constant uint & r3 [[buffer(18)]], uint3 tgpig[[threadgroup_position_in_grid]], uint tiisg[[thread_index_in_simdgroup]], uint sgitg[[simdgroup_index_in_threadgroup]]) { @@ -733,8 +900,14 @@ kernel void kernel_mul_mv_q8_0_f32( const int r0 = tgpig.x; const int r1 = tgpig.y; const int im = tgpig.z; + const int first_row = (r0 * nsg + sgitg) * nr; - const uint offset0 = first_row * nb + im/gqa*(nb*ne0); + + const uint i12 = im%ne12; + const uint i13 = im/ne12; + + const uint offset0 = first_row * nb + (i12/r2)*(nb*ne01) + (i13/r3)*(nb*ne01*ne02); + device const block_q8_0 * x = (device const block_q8_0 *) src0 + offset0; device const float * y = (device const float *) src1 + r1*ne10 + im*ne00*ne1; @@ -792,6 +965,8 @@ kernel void kernel_mul_mv_f32_f32( constant uint64_t & nb12, constant int64_t & ne0, constant int64_t & ne1, + constant uint & r2 [[buffer(17)]], + constant uint & r3 [[buffer(18)]], uint3 tgpig[[threadgroup_position_in_grid]], uint tiisg[[thread_index_in_simdgroup]]) { @@ -799,7 +974,12 @@ kernel void kernel_mul_mv_f32_f32( const int64_t rb = tgpig.y*N_F32_F32; const int64_t im = tgpig.z; - device const float * x = (device const float *) (src0 + r0*nb01 + im/(ne12/ne02)*nb02); + const uint i12 = im%ne12; + const uint i13 = im/ne12; + + const uint offset0 = r0*nb01 + (i12/r2)*nb02 + (i13/r3)*nb02*ne02; + + device const float * x = (device const float *) (src0 + offset0); if (ne00 < 128) { for (int row = 0; row < N_F32_F32; ++row) { @@ -865,6 +1045,8 @@ kernel void kernel_mul_mv_f16_f16( constant uint64_t & nb12, constant int64_t & ne0, constant int64_t & ne1, + constant uint & r2 [[buffer(17)]], + constant uint & r3 [[buffer(18)]], uint3 tgpig[[threadgroup_position_in_grid]], uint tiisg[[thread_index_in_simdgroup]]) { @@ -872,7 +1054,12 @@ kernel void kernel_mul_mv_f16_f16( const int64_t rb = tgpig.y*N_F16_F16; const int64_t im = tgpig.z; - device const half * x = (device const half *) (src0 + r0*nb01 + im/(ne12/ne02)*nb02); + const uint i12 = im%ne12; + const uint i13 = im/ne12; + + const uint offset0 = r0*nb01 + (i12/r2)*nb02 + (i13/r3)*nb02*ne02; + + device const half * x = (device const half *) (src0 + offset0); if (ne00 < 128) { for (int row = 0; row < N_F16_F16; ++row) { @@ -936,6 +1123,8 @@ kernel void kernel_mul_mv_f16_f32_1row( constant uint64_t & nb12, constant int64_t & ne0, constant int64_t & ne1, + constant uint & r2 [[buffer(17)]], + constant uint & r3 [[buffer(18)]], uint3 tgpig[[threadgroup_position_in_grid]], uint tiisg[[thread_index_in_simdgroup]]) { @@ -943,7 +1132,12 @@ kernel void kernel_mul_mv_f16_f32_1row( const int64_t r1 = tgpig.y; const int64_t im = tgpig.z; - device const half * x = (device const half *) (src0 + r0*nb01 + im/(ne12/ne02)*nb02); + const uint i12 = im%ne12; + const uint i13 = im/ne12; + + const uint offset0 = r0*nb01 + (i12/r2)*nb02 + (i13/r3)*nb02*ne02; + + device const half * x = (device const half *) (src0 + offset0); device const float * y = (device const float *) (src1 + r1*nb11 + im*nb12); float sumf = 0; @@ -990,6 +1184,8 @@ kernel void kernel_mul_mv_f16_f32( constant uint64_t & nb12, constant int64_t & ne0, constant int64_t & ne1, + constant uint & r2 [[buffer(17)]], + constant uint & r3 [[buffer(18)]], uint3 tgpig[[threadgroup_position_in_grid]], uint tiisg[[thread_index_in_simdgroup]]) { @@ -997,7 +1193,12 @@ kernel void kernel_mul_mv_f16_f32( const int64_t rb = tgpig.y*N_F16_F32; const int64_t im = tgpig.z; - device const half * x = (device const half *) (src0 + r0*nb01 + im/(ne12/ne02)*nb02); + const uint i12 = im%ne12; + const uint i13 = im/ne12; + + const uint offset0 = r0*nb01 + (i12/r2)*nb02 + (i13/r3)*nb02*ne02; + + device const half * x = (device const half *) (src0 + offset0); if (ne00 < 128) { for (int row = 0; row < N_F16_F32; ++row) { @@ -1062,6 +1263,8 @@ kernel void kernel_mul_mv_f16_f32_l4( constant uint64_t & nb12, constant int64_t & ne0, constant int64_t & ne1, + constant uint & r2 [[buffer(17)]], + constant uint & r3 [[buffer(18)]], uint3 tgpig[[threadgroup_position_in_grid]], uint tiisg[[thread_index_in_simdgroup]]) { @@ -1069,7 +1272,12 @@ kernel void kernel_mul_mv_f16_f32_l4( const int64_t r0 = tgpig.x; const int64_t im = tgpig.z; - device const half4 * x4 = (device const half4 *) (src0 + r0*nb01 + im/(ne12/ne02)*nb02); + const uint i12 = im%ne12; + const uint i13 = im/ne12; + + const uint offset0 = r0*nb01 + (i12/r2)*nb02 + (i13/r3)*nb02*ne02; + + device const half4 * x4 = (device const half4 *) (src0 + offset0); for (int r1 = 0; r1 < nrows; ++r1) { device const float4 * y4 = (device const float4 *) (src1 + r1*nb11 + im*nb12); @@ -1121,17 +1329,21 @@ kernel void kernel_alibi_f32( const int64_t i2 = (n - i3*ne2*ne1*ne0) / (ne1*ne0); const int64_t i1 = (n - i3*ne2*ne1*ne0 - i2*ne1*ne0) / ne0; const int64_t i0 = (n - i3*ne2*ne1*ne0 - i2*ne1*ne0 - i1*ne0); + const int64_t k = i3*ne3 + i2; - device float * dst_data = (device float *) ((device char *) dst + i3*nb3 + i2*nb2 + i1*nb1 + i0*nb0); float m_k; - if (i2 < n_heads_log2_floor) { - m_k = pow(m0, i2 + 1); + if (k < n_heads_log2_floor) { + m_k = pow(m0, k + 1); } else { - m_k = pow(m1, 2 * (i2 - n_heads_log2_floor) + 1); + m_k = pow(m1, 2 * (k - n_heads_log2_floor) + 1); } + + device char * dst_row = (device char *) dst + i3*nb3 + i2*nb2 + i1*nb1; + device const char * src_row = (device char *) src0 + i03*nb03 + i02*nb02 + i01*nb01; for (int64_t i00 = tpitg.x; i00 < ne00; i00 += ntg.x) { - device const float * src = (device float *)((device char *) src0 + i03*nb03 + i02*nb02 + i01*nb01 + i00*nb00); - dst_data[i00] = src[0] + m_k * (i00 - ne00 + 1); + const float src_v = *(device float *)(src_row + i00*nb00); + device float * dst_v = (device float *)(dst_row + i00*nb0); + *dst_v = i00 * m_k + src_v; } } @@ -1336,6 +1548,58 @@ kernel void kernel_im2col_f16( } } +// bitonic sort implementation following the CUDA kernels as reference +typedef void (argsort_t)( + device const float * x, + device int32_t * dst, + constant int64_t & ncols, + uint3 tgpig[[threadgroup_position_in_grid]], + uint3 tpitg[[thread_position_in_threadgroup]]); + +template +kernel void kernel_argsort_f32_i32( + device const float * x, + device int32_t * dst, + constant int64_t & ncols, + uint3 tgpig[[threadgroup_position_in_grid]], + uint3 tpitg[[thread_position_in_threadgroup]]) { + // bitonic sort + int col = tpitg[0]; + int row = tgpig[1]; + + if (col >= ncols) return; + + device const float * x_row = x + row * ncols; + device int32_t * dst_row = dst + row * ncols; + + // initialize indices + if (col < ncols) { + dst_row[col] = col; + } + threadgroup_barrier(mem_flags::mem_threadgroup); + + for (int k = 2; k <= ncols; k *= 2) { + for (int j = k / 2; j > 0; j /= 2) { + int ixj = col ^ j; + if (ixj > col) { + if ((col & k) == 0) { + if (order == GGML_SORT_ASC ? x_row[dst_row[col]] > x_row[dst_row[ixj]] : x_row[dst_row[col]] < x_row[dst_row[ixj]]) { + SWAP(dst_row[col], dst_row[ixj]); + } + } else { + if (order == GGML_SORT_ASC ? x_row[dst_row[col]] < x_row[dst_row[ixj]] : x_row[dst_row[col]] > x_row[dst_row[ixj]]) { + SWAP(dst_row[col], dst_row[ixj]); + } + } + } + threadgroup_barrier(mem_flags::mem_threadgroup); + } + } +} + +template [[host_name("kernel_argsort_f32_i32_asc")]] kernel argsort_t kernel_argsort_f32_i32; +template [[host_name("kernel_argsort_f32_i32_desc")]] kernel argsort_t kernel_argsort_f32_i32; + kernel void kernel_cpy_f16_f16( device const half * src0, device half * dst, @@ -1809,23 +2073,30 @@ kernel void kernel_mul_mv_q2_K_f32( constant int64_t & ne02[[buffer(5)]], constant int64_t & ne10[[buffer(9)]], constant int64_t & ne12[[buffer(11)]], - constant int64_t & ne0[[buffer(15)]], - constant int64_t & ne1[[buffer(16)]], - constant uint & gqa[[buffer(17)]], + constant int64_t & ne0 [[buffer(15)]], + constant int64_t & ne1 [[buffer(16)]], + constant uint & r2 [[buffer(17)]], + constant uint & r3 [[buffer(18)]], uint3 tgpig[[threadgroup_position_in_grid]], - uint tiisg[[thread_index_in_simdgroup]], - uint sgitg[[simdgroup_index_in_threadgroup]]) { + uint tiisg[[thread_index_in_simdgroup]], + uint sgitg[[simdgroup_index_in_threadgroup]]) { const int nb = ne00/QK_K; const int r0 = tgpig.x; const int r1 = tgpig.y; - const int r2 = tgpig.z; + const int im = tgpig.z; const int first_row = (r0 * N_SIMDGROUP + sgitg) * N_DST; const int ib_row = first_row * nb; - const uint offset0 = r2/gqa*(nb*ne0); + + const uint i12 = im%ne12; + const uint i13 = im/ne12; + + const uint offset0 = (i12/r2)*(nb*ne01) + (i13/r3)*(nb*ne01*ne02); + device const block_q2_K * x = (device const block_q2_K *) src0 + ib_row + offset0; - device const float * y = (device const float *) src1 + r1*ne10 + r2*ne00*ne1; + device const float * y = (device const float *) src1 + r1*ne10 + im*ne00*ne1; + float yl[32]; float sumf[N_DST]={0.f}, all_sum; @@ -1834,11 +2105,11 @@ kernel void kernel_mul_mv_q2_K_f32( #if QK_K == 256 const int ix = tiisg/8; // 0...3 const int it = tiisg%8; // 0...7 - const int im = it/4; // 0 or 1 + const int iq = it/4; // 0 or 1 const int ir = it%4; // 0...3 const int is = (8*ir)/16;// 0 or 1 - device const float * y4 = y + ix * QK_K + 128 * im + 8 * ir; + device const float * y4 = y + ix * QK_K + 128 * iq + 8 * ir; for (int ib = ix; ib < nb; ib += 4) { @@ -1850,8 +2121,8 @@ kernel void kernel_mul_mv_q2_K_f32( yl[i+24] = y4[i+96]; sumy[3] += yl[i+24]; } - device const uint8_t * sc = (device const uint8_t *)x[ib].scales + 8*im + is; - device const uint16_t * qs = (device const uint16_t *)x[ib].qs + 16 * im + 4 * ir; + device const uint8_t * sc = (device const uint8_t *)x[ib].scales + 8*iq + is; + device const uint16_t * qs = (device const uint16_t *)x[ib].qs + 16 * iq + 4 * ir; device const half * dh = &x[ib].d; for (int row = 0; row < N_DST; row++) { @@ -1938,7 +2209,7 @@ kernel void kernel_mul_mv_q2_K_f32( for (int row = 0; row < N_DST; ++row) { all_sum = simd_sum(sumf[row]); if (tiisg == 0) { - dst[r1*ne0 + r2*ne0*ne1 + first_row + row] = all_sum; + dst[r1*ne0 + im*ne0*ne1 + first_row + row] = all_sum; } } } @@ -1953,9 +2224,10 @@ kernel void kernel_mul_mv_q3_K_f32( constant int64_t & ne02[[buffer(5)]], constant int64_t & ne10[[buffer(9)]], constant int64_t & ne12[[buffer(11)]], - constant int64_t & ne0[[buffer(15)]], - constant int64_t & ne1[[buffer(16)]], - constant uint & gqa[[buffer(17)]], + constant int64_t & ne0 [[buffer(15)]], + constant int64_t & ne1 [[buffer(16)]], + constant uint & r2 [[buffer(17)]], + constant uint & r3 [[buffer(18)]], uint3 tgpig[[threadgroup_position_in_grid]], uint tiisg[[thread_index_in_simdgroup]], uint sgitg[[simdgroup_index_in_threadgroup]]) { @@ -1964,12 +2236,17 @@ kernel void kernel_mul_mv_q3_K_f32( const int64_t r0 = tgpig.x; const int64_t r1 = tgpig.y; - const int64_t r2 = tgpig.z; + const int64_t im = tgpig.z; const int first_row = (r0 * N_SIMDGROUP + sgitg) * 2; - const uint offset0 = r2/gqa*(nb*ne0); + + const uint i12 = im%ne12; + const uint i13 = im/ne12; + + const uint offset0 = (i12/r2)*(nb*ne01) + (i13/r3)*(nb*ne01*ne02); + device const block_q3_K * x = (device const block_q3_K *) src0 + first_row*nb + offset0; - device const float * yy = (device const float *) src1 + r1*ne10 + r2*ne00*ne1; + device const float * yy = (device const float *) src1 + r1*ne10 + im*ne00*ne1; float yl[32]; @@ -2091,7 +2368,7 @@ kernel void kernel_mul_mv_q3_K_f32( } if (tiisg == 0) { for (int row = 0; row < 2; ++row) { - dst[r1*ne0 + r2*ne0*ne1 + first_row + row] = sumf1[row]; + dst[r1*ne0 + im*ne0*ne1 + first_row + row] = sumf1[row]; } } } @@ -2105,26 +2382,33 @@ kernel void kernel_mul_mv_q3_K_f32( constant int64_t & ne02[[buffer(5)]], constant int64_t & ne10[[buffer(9)]], constant int64_t & ne12[[buffer(11)]], - constant int64_t & ne0[[buffer(15)]], - constant int64_t & ne1[[buffer(16)]], - constant uint & gqa[[buffer(17)]], + constant int64_t & ne0 [[buffer(15)]], + constant int64_t & ne1 [[buffer(16)]], + constant uint & r2 [[buffer(17)]], + constant uint & r3 [[buffer(18)]], uint3 tgpig[[threadgroup_position_in_grid]], - uint tiisg[[thread_index_in_simdgroup]], - uint sgitg[[simdgroup_index_in_threadgroup]]) { + uint tiisg[[thread_index_in_simdgroup]], + uint sgitg[[simdgroup_index_in_threadgroup]]) { const int nb = ne00/QK_K; const int64_t r0 = tgpig.x; const int64_t r1 = tgpig.y; - const int64_t r2 = tgpig.z; + const int64_t im = tgpig.z; const int row = 2 * r0 + sgitg; - const uint offset0 = r2/gqa*(nb*ne0); + + const uint i12 = im%ne12; + const uint i13 = im/ne12; + + const uint offset0 = (i12/r2)*(nb*ne01) + (i13/r3)*(nb*ne01*ne02); + device const block_q3_K * x = (device const block_q3_K *) src0 + row*nb + offset0; - device const float * yy = (device const float *) src1 + r1*ne10 + r2*ne00*ne1; + device const float * yy = (device const float *) src1 + r1*ne10 + im*ne00*ne1; + const int ix = tiisg/4; const int il = 4 * (tiisg%4);// 0, 4, 8, 12 - const int im = il/8; // 0, 0, 1, 1 + const int iq = il/8; // 0, 0, 1, 1 const int in = il%8; // 0, 4, 0, 4 float2 sum = {0.f, 0.f}; @@ -2144,7 +2428,7 @@ kernel void kernel_mul_mv_q3_K_f32( const float d4 = d_all * ((int32_t)(s[0] & 0xF000) - 32768) * 1.f/262144.f; for (int l = 0; l < 4; l += 2) { - const uint16_t hm = h[l/2] >> im; + const uint16_t hm = h[l/2] >> iq; sum[0] += y[l+ 0] * d1 * ((int32_t)(q[l/2] & 0x0003) - ((hm & 0x0001) ? 0 : 4)) + y[l+16] * d2 * ((int32_t)(q[l/2] & 0x000c) - ((hm & 0x0004) ? 0 : 16)) + y[l+32] * d3 * ((int32_t)(q[l/2] & 0x0030) - ((hm & 0x0010) ? 0 : 64)) @@ -2160,7 +2444,7 @@ kernel void kernel_mul_mv_q3_K_f32( const float tot = simd_sum(sumf); if (tiisg == 0) { - dst[r1*ne0 + r2*ne0*ne1 + row] = tot; + dst[r1*ne0 + im*ne0*ne1 + row] = tot; } } @@ -2178,10 +2462,11 @@ kernel void kernel_mul_mv_q4_K_f32( constant int64_t & ne12 [[buffer(11)]], constant int64_t & ne0 [[buffer(15)]], constant int64_t & ne1 [[buffer(16)]], - constant uint & gqa [[buffer(17)]], + constant uint & r2 [[buffer(17)]], + constant uint & r3 [[buffer(18)]], uint3 tgpig[[threadgroup_position_in_grid]], - uint tiisg[[thread_index_in_simdgroup]], - uint sgitg[[simdgroup_index_in_threadgroup]]) { + uint tiisg[[thread_index_in_simdgroup]], + uint sgitg[[simdgroup_index_in_threadgroup]]) { const uint16_t kmask1 = 0x3f3f; const uint16_t kmask2 = 0x0f0f; @@ -2189,26 +2474,32 @@ kernel void kernel_mul_mv_q4_K_f32( const int ix = tiisg/8; // 0...3 const int it = tiisg%8; // 0...7 - const int im = it/4; // 0 or 1 + const int iq = it/4; // 0 or 1 const int ir = it%4; // 0...3 const int nb = ne00/QK_K; const int r0 = tgpig.x; const int r1 = tgpig.y; - const int r2 = tgpig.z; + const int im = tgpig.z; //const int first_row = (r0 * N_SIMDGROUP + sgitg) * N_DST; const int first_row = r0 * N_DST; const int ib_row = first_row * nb; - const uint offset0 = r2/gqa*(nb*ne0); + + const uint i12 = im%ne12; + const uint i13 = im/ne12; + + const uint offset0 = (i12/r2)*(nb*ne01) + (i13/r3)*(nb*ne01*ne02); + device const block_q4_K * x = (device const block_q4_K *) src0 + ib_row + offset0; - device const float * y = (device const float *) src1 + r1*ne10 + r2*ne00*ne1; + device const float * y = (device const float *) src1 + r1*ne10 + im*ne00*ne1; + float yl[16]; float yh[16]; float sumf[N_DST]={0.f}, all_sum; const int step = sizeof(block_q4_K) * nb / 2; - device const float * y4 = y + ix * QK_K + 64 * im + 8 * ir; + device const float * y4 = y + ix * QK_K + 64 * iq + 8 * ir; uint16_t sc16[4]; thread const uint8_t * sc8 = (thread const uint8_t *)sc16; @@ -2223,8 +2514,8 @@ kernel void kernel_mul_mv_q4_K_f32( yh[i+8] = y4[i+160]; sumy[3] += yh[i+8]; } - device const uint16_t * sc = (device const uint16_t *)x[ib].scales + im; - device const uint16_t * q1 = (device const uint16_t *)x[ib].qs + 16 * im + 4 * ir; + device const uint16_t * sc = (device const uint16_t *)x[ib].scales + iq; + device const uint16_t * q1 = (device const uint16_t *)x[ib].qs + 16 * iq + 4 * ir; device const half * dh = &x[ib].d; for (int row = 0; row < N_DST; row++) { @@ -2268,7 +2559,7 @@ kernel void kernel_mul_mv_q4_K_f32( for (int row = 0; row < N_DST; ++row) { all_sum = simd_sum(sumf[row]); if (tiisg == 0) { - dst[r1*ne0 + r2*ne0*ne1 + first_row + row] = all_sum; + dst[r1*ne0 + im*ne0*ne1 + first_row + row] = all_sum; } } } @@ -2282,9 +2573,10 @@ kernel void kernel_mul_mv_q4_K_f32( constant int64_t & ne02[[buffer(5)]], constant int64_t & ne10[[buffer(9)]], constant int64_t & ne12[[buffer(11)]], - constant int64_t & ne0[[buffer(15)]], - constant int64_t & ne1[[buffer(16)]], - constant uint & gqa[[buffer(17)]], + constant int64_t & ne0 [[buffer(15)]], + constant int64_t & ne1 [[buffer(16)]], + constant uint & r2 [[buffer(17)]], + constant uint & r3 [[buffer(18)]], uint3 tgpig[[threadgroup_position_in_grid]], uint tiisg[[thread_index_in_simdgroup]], uint sgitg[[simdgroup_index_in_threadgroup]]) { @@ -2295,12 +2587,18 @@ kernel void kernel_mul_mv_q4_K_f32( const int nb = ne00/QK_K; const int r0 = tgpig.x; const int r1 = tgpig.y; - const int r2 = tgpig.z; + const int im = tgpig.z; const int first_row = (r0 * N_SIMDGROUP + sgitg) * N_DST; const int ib_row = first_row * nb; - const uint offset0 = r2/gqa*(nb*ne0); + + const uint i12 = im%ne12; + const uint i13 = im/ne12; + + const uint offset0 = (i12/r2)*(nb*ne01) + (i13/r3)*(nb*ne01*ne02); + device const block_q4_K * x = (device const block_q4_K *) src0 + ib_row + offset0; - device const float * y = (device const float *) src1 + r1*ne10 + r2*ne00*ne1; + device const float * y = (device const float *) src1 + r1*ne10 + im*ne00*ne1; + float yl[8]; float yh[8]; float sumf[N_DST]={0.f}, all_sum; @@ -2356,7 +2654,7 @@ kernel void kernel_mul_mv_q4_K_f32( for (int row = 0; row < N_DST; ++row) { all_sum = simd_sum(sumf[row]); if (tiisg == 0) { - dst[r1*ne0+ r2*ne0*ne1 + first_row + row] = all_sum; + dst[r1*ne0+ im*ne0*ne1 + first_row + row] = all_sum; } } } @@ -2371,9 +2669,10 @@ kernel void kernel_mul_mv_q5_K_f32( constant int64_t & ne02[[buffer(5)]], constant int64_t & ne10[[buffer(9)]], constant int64_t & ne12[[buffer(11)]], - constant int64_t & ne0[[buffer(15)]], - constant int64_t & ne1[[buffer(16)]], - constant uint & gqa[[buffer(17)]], + constant int64_t & ne0 [[buffer(15)]], + constant int64_t & ne1 [[buffer(16)]], + constant uint & r2 [[buffer(17)]], + constant uint & r3 [[buffer(18)]], uint3 tgpig[[threadgroup_position_in_grid]], uint tiisg[[thread_index_in_simdgroup]], uint sgitg[[simdgroup_index_in_threadgroup]]) { @@ -2382,12 +2681,17 @@ kernel void kernel_mul_mv_q5_K_f32( const int64_t r0 = tgpig.x; const int64_t r1 = tgpig.y; - const int r2 = tgpig.z; + const int im = tgpig.z; const int first_row = (r0 * N_SIMDGROUP + sgitg) * 2; - const uint offset0 = r2/gqa*(nb*ne0); + + const uint i12 = im%ne12; + const uint i13 = im/ne12; + + const uint offset0 = (i12/r2)*(nb*ne01) + (i13/r3)*(nb*ne01*ne02); + device const block_q5_K * x = (device const block_q5_K *) src0 + first_row*nb + offset0; - device const float * yy = (device const float *) src1 + r1*ne10 + r2*ne00*ne1; + device const float * yy = (device const float *) src1 + r1*ne10 + im*ne00*ne1; float sumf[2]={0.f}; @@ -2403,15 +2707,15 @@ kernel void kernel_mul_mv_q5_K_f32( const int tid = tiisg/4; const int ix = tiisg%4; - const int im = tid/4; + const int iq = tid/4; const int ir = tid%4; const int n = 8; const int l0 = n*ir; - const int q_offset = 32*im + l0; - const int y_offset = 64*im + l0; + const int q_offset = 32*iq + l0; + const int y_offset = 64*iq + l0; - const uint8_t hm1 = 1u << (2*im); + const uint8_t hm1 = 1u << (2*iq); const uint8_t hm2 = hm1 << 1; const uint8_t hm3 = hm1 << 4; const uint8_t hm4 = hm2 << 4; @@ -2426,7 +2730,7 @@ kernel void kernel_mul_mv_q5_K_f32( device const uint8_t * q1 = x[i].qs + q_offset; device const uint8_t * qh = x[i].qh + l0; device const half * dh = &x[i].d; - device const uint16_t * a = (device const uint16_t *)x[i].scales + im; + device const uint16_t * a = (device const uint16_t *)x[i].scales + iq; device const float * y2 = y1 + 128; float4 sumy = {0.f, 0.f, 0.f, 0.f}; @@ -2482,7 +2786,7 @@ kernel void kernel_mul_mv_q5_K_f32( const int il = 4 * (tiisg/8); // 0, 4, 8, 12 const int ix = tiisg%8; - const int im = il/8; // 0, 0, 1, 1 + const int iq = il/8; // 0, 0, 1, 1 const int in = il%8; // 0, 4, 0, 4 device const float * y = yy + ix*QK_K + il; @@ -2507,7 +2811,7 @@ kernel void kernel_mul_mv_q5_K_f32( float2 acc = {0.f, 0.f}; for (int l = 0; l < 4; ++l) { - const uint8_t hl = h[l] >> im; + const uint8_t hl = h[l] >> iq; acc[0] += yl[l+0] * s[0] * ((int16_t)(q[l+ 0] & 0x0F) - (hl & 0x01 ? 0 : 16)) + yl[l+4] * s[1] * ((int16_t)(q[l+16] & 0x0F) - (hl & 0x04 ? 0 : 16)); acc[1] += yh[l+0] * s[2] * ((int16_t)(q[l+ 0] & 0xF0) - (hl & 0x10 ? 0 : 256)) @@ -2529,7 +2833,7 @@ kernel void kernel_mul_mv_q5_K_f32( for (int row = 0; row < 2; ++row) { const float tot = simd_sum(sumf[row]); if (tiisg == 0) { - dst[r1*ne0 + r2*ne0*ne1 + first_row + row] = tot; + dst[r1*ne0 + im*ne0*ne1 + first_row + row] = tot; } } @@ -2544,9 +2848,10 @@ kernel void kernel_mul_mv_q6_K_f32( constant int64_t & ne02[[buffer(5)]], constant int64_t & ne10[[buffer(9)]], constant int64_t & ne12[[buffer(11)]], - constant int64_t & ne0[[buffer(15)]], - constant int64_t & ne1[[buffer(16)]], - constant uint & gqa[[buffer(17)]], + constant int64_t & ne0 [[buffer(15)]], + constant int64_t & ne1 [[buffer(16)]], + constant uint & r2 [[buffer(17)]], + constant uint & r3 [[buffer(18)]], uint3 tgpig[[threadgroup_position_in_grid]], uint tiisg[[thread_index_in_simdgroup]], uint sgitg[[simdgroup_index_in_threadgroup]]) { @@ -2560,12 +2865,17 @@ kernel void kernel_mul_mv_q6_K_f32( const int64_t r0 = tgpig.x; const int64_t r1 = tgpig.y; - const int r2 = tgpig.z; + const int im = tgpig.z; const int row = 2 * r0 + sgitg; - const uint offset0 = r2/gqa*(nb*ne0); + + const uint i12 = im%ne12; + const uint i13 = im/ne12; + + const uint offset0 = (i12/r2)*(nb*ne01) + (i13/r3)*(nb*ne01*ne02); + device const block_q6_K * x = (device const block_q6_K *) src0 + row * nb + offset0; - device const float * yy = (device const float *) src1 + r1*ne10 + r2*ne00*ne1; + device const float * yy = (device const float *) src1 + r1*ne10 + im*ne00*ne1; float sumf = 0; @@ -2631,7 +2941,7 @@ kernel void kernel_mul_mv_q6_K_f32( const float tot = simd_sum(sumf); if (tiisg == 0) { - dst[r1*ne0 + r2*ne0*ne1 + row] = tot; + dst[r1*ne0 + im*ne0*ne1 + row] = tot; } } @@ -2941,24 +3251,25 @@ kernel void kernel_get_rows( // each block_q contains 16*nl weights template -kernel void kernel_mul_mm(device const uchar * src0, - device const uchar * src1, - device float * dst, - constant int64_t & ne00, - constant int64_t & ne02, - constant int64_t & nb01, - constant int64_t & nb02, - constant int64_t & ne12, - constant int64_t & nb10, - constant int64_t & nb11, - constant int64_t & nb12, - constant int64_t & ne0, - constant int64_t & ne1, - constant uint & gqa, - threadgroup uchar * shared_memory [[threadgroup(0)]], - uint3 tgpig[[threadgroup_position_in_grid]], - uint tiitg[[thread_index_in_threadgroup]], - uint sgitg[[simdgroup_index_in_threadgroup]]) { +void kernel_mul_mm_impl(device const uchar * src0, + device const uchar * src1, + device float * dst, + constant int64_t & ne00, + constant int64_t & ne02, + constant int64_t & nb01, + constant int64_t & nb02, + constant int64_t & ne12, + constant int64_t & nb10, + constant int64_t & nb11, + constant int64_t & nb12, + constant int64_t & ne0, + constant int64_t & ne1, + constant uint & r2, + constant uint & r3, + threadgroup uchar * shared_memory [[threadgroup(0)]], + uint3 tgpig[[threadgroup_position_in_grid]], + uint tiitg[[thread_index_in_threadgroup]], + uint sgitg[[simdgroup_index_in_threadgroup]]) { threadgroup half * sa = (threadgroup half *)(shared_memory); threadgroup float * sb = (threadgroup float *)(shared_memory + 4096); @@ -2984,7 +3295,10 @@ kernel void kernel_mul_mm(device const uchar * src0, short il = (tiitg % THREAD_PER_ROW); - uint offset0 = im/gqa*nb02; + const uint i12 = im%ne12; + const uint i13 = im/ne12; + + uint offset0 = (i12/r2)*nb02 + (i13/r3)*(nb02*ne02); ushort offset1 = il/nl; device const block_q * x = (device const block_q *)(src0 + (r0 * BLOCK_SIZE_M + thread_row) * nb01 + offset0) + offset1; @@ -3068,14 +3382,116 @@ kernel void kernel_mul_mm(device const uchar * src0, } } +template +kernel void kernel_mul_mm(device const uchar * src0, + device const uchar * src1, + device float * dst, + constant int64_t & ne00, + constant int64_t & ne02, + constant int64_t & nb01, + constant int64_t & nb02, + constant int64_t & ne12, + constant int64_t & nb10, + constant int64_t & nb11, + constant int64_t & nb12, + constant int64_t & ne0, + constant int64_t & ne1, + constant uint & r2, + constant uint & r3, + threadgroup uchar * shared_memory [[threadgroup(0)]], + uint3 tgpig[[threadgroup_position_in_grid]], + uint tiitg[[thread_index_in_threadgroup]], + uint sgitg[[simdgroup_index_in_threadgroup]]) { + kernel_mul_mm_impl( + src0, + src1, + dst, + ne00, + ne02, + nb01, + nb02, + ne12, + nb10, + nb11, + nb12, + ne0, + ne1, + r2, + r3, + shared_memory, + tgpig, + tiitg, + sgitg); +} + +template +kernel void kernel_mul_mm_id( + device const int32_t * ids, + device const uchar * src1, + device float * dst, + constant int64_t & ne00, + constant int64_t & ne02, + constant int64_t & nb01, + constant int64_t & nb02, + constant int64_t & ne12, + constant int64_t & nb10, + constant int64_t & nb11, + constant int64_t & nb12, + constant int64_t & ne0, + constant int64_t & ne1, + constant uint & r2, + constant uint & r3, + constant int & idx, + device const uchar * src00, + device const uchar * src01, + device const uchar * src02, + device const uchar * src03, + device const uchar * src04, + device const uchar * src05, + device const uchar * src06, + device const uchar * src07, + threadgroup uchar * shared_memory [[threadgroup(0)]], + uint3 tgpig[[threadgroup_position_in_grid]], + uint tiitg[[thread_index_in_threadgroup]], + uint sgitg[[simdgroup_index_in_threadgroup]]) { + device const uchar * src0[8] = {src00, src01, src02, src03, src04, src05, src06, src07}; + + kernel_mul_mm_impl( + src0[ids[idx]], + src1, + dst, + ne00, + ne02, + nb01, + nb02, + ne12, + nb10, + nb11, + nb12, + ne0, + ne1, + r2, + r3, + shared_memory, + tgpig, + tiitg, + sgitg); +} + #if QK_K == 256 #define QK_NL 16 #else #define QK_NL 4 #endif -typedef void (get_rows_t)(device const void *, device const int *, device float *, constant int64_t &, \ - constant uint64_t &, constant uint64_t &, uint, uint, uint); +typedef void (get_rows_t)( + device const void * src0, + device const int * src1, + device float * dst, + constant int64_t & ne00, + constant uint64_t & nb01, + constant uint64_t & nb1, + uint, uint, uint); template [[host_name("kernel_get_rows_f32")]] kernel get_rows_t kernel_get_rows; template [[host_name("kernel_get_rows_f16")]] kernel get_rows_t kernel_get_rows; @@ -3104,8 +3520,10 @@ typedef void (mat_mm_t)( constant int64_t & nb12, constant int64_t & ne0, constant int64_t & ne1, - constant uint & gqa, - threadgroup uchar *, uint3, uint, uint); + constant uint & r2, + constant uint & r3, + threadgroup uchar *, + uint3, uint, uint); template [[host_name("kernel_mul_mm_f32_f32")]] kernel mat_mm_t kernel_mul_mm; template [[host_name("kernel_mul_mm_f16_f32")]] kernel mat_mm_t kernel_mul_mm; @@ -3119,3 +3537,44 @@ template [[host_name("kernel_mul_mm_q3_K_f32")]] kernel mat_mm_t kernel_mul_mm; template [[host_name("kernel_mul_mm_q5_K_f32")]] kernel mat_mm_t kernel_mul_mm; template [[host_name("kernel_mul_mm_q6_K_f32")]] kernel mat_mm_t kernel_mul_mm; + +typedef void (mat_mm_id_t)( + device const int32_t * ids, + device const uchar * src1, + device float * dst, + constant int64_t & ne00, + constant int64_t & ne02, + constant int64_t & nb01, + constant int64_t & nb02, + constant int64_t & ne12, + constant int64_t & nb10, + constant int64_t & nb11, + constant int64_t & nb12, + constant int64_t & ne0, + constant int64_t & ne1, + constant uint & r2, + constant uint & r3, + constant int & idx, + device const uchar * src00, + device const uchar * src01, + device const uchar * src02, + device const uchar * src03, + device const uchar * src04, + device const uchar * src05, + device const uchar * src06, + device const uchar * src07, + threadgroup uchar *, + uint3, uint, uint); + +template [[host_name("kernel_mul_mm_id_f32_f32")]] kernel mat_mm_id_t kernel_mul_mm_id; +template [[host_name("kernel_mul_mm_id_f16_f32")]] kernel mat_mm_id_t kernel_mul_mm_id; +template [[host_name("kernel_mul_mm_id_q4_0_f32")]] kernel mat_mm_id_t kernel_mul_mm_id; +template [[host_name("kernel_mul_mm_id_q4_1_f32")]] kernel mat_mm_id_t kernel_mul_mm_id; +template [[host_name("kernel_mul_mm_id_q5_0_f32")]] kernel mat_mm_id_t kernel_mul_mm_id; +template [[host_name("kernel_mul_mm_id_q5_1_f32")]] kernel mat_mm_id_t kernel_mul_mm_id; +template [[host_name("kernel_mul_mm_id_q8_0_f32")]] kernel mat_mm_id_t kernel_mul_mm_id; +template [[host_name("kernel_mul_mm_id_q2_K_f32")]] kernel mat_mm_id_t kernel_mul_mm_id; +template [[host_name("kernel_mul_mm_id_q3_K_f32")]] kernel mat_mm_id_t kernel_mul_mm_id; +template [[host_name("kernel_mul_mm_id_q4_K_f32")]] kernel mat_mm_id_t kernel_mul_mm_id; +template [[host_name("kernel_mul_mm_id_q5_K_f32")]] kernel mat_mm_id_t kernel_mul_mm_id; +template [[host_name("kernel_mul_mm_id_q6_K_f32")]] kernel mat_mm_id_t kernel_mul_mm_id; diff --git a/ggml.c b/ggml.c index f743df1f3..ca56f063c 100644 --- a/ggml.c +++ b/ggml.c @@ -233,24 +233,6 @@ inline static void * ggml_aligned_malloc(size_t size) { #define UNUSED GGML_UNUSED #define SWAP(x, y, T) do { T SWAP = x; x = y; y = SWAP; } while (0) -// -// tensor access macros -// - -#define GGML_TENSOR_UNARY_OP_LOCALS \ - GGML_TENSOR_LOCALS(int64_t, ne0, src0, ne) \ - GGML_TENSOR_LOCALS(size_t, nb0, src0, nb) \ - GGML_TENSOR_LOCALS(int64_t, ne, dst, ne) \ - GGML_TENSOR_LOCALS(size_t, nb, dst, nb) - -#define GGML_TENSOR_BINARY_OP_LOCALS \ - GGML_TENSOR_LOCALS(int64_t, ne0, src0, ne) \ - GGML_TENSOR_LOCALS(size_t, nb0, src0, nb) \ - GGML_TENSOR_LOCALS(int64_t, ne1, src1, ne) \ - GGML_TENSOR_LOCALS(size_t, nb1, src1, nb) \ - GGML_TENSOR_LOCALS(int64_t, ne, dst, ne) \ - GGML_TENSOR_LOCALS(size_t, nb, dst, nb) - #if defined(GGML_USE_ACCELERATE) #include #if defined(GGML_USE_CLBLAST) // allow usage of CLBlast alongside Accelerate functions @@ -1613,6 +1595,7 @@ static const char * GGML_OP_NAME[GGML_OP_COUNT] = { "GROUP_NORM", "MUL_MAT", + "MUL_MAT_ID", "OUT_PROD", "SCALE", @@ -1640,6 +1623,7 @@ static const char * GGML_OP_NAME[GGML_OP_COUNT] = { "POOL_1D", "POOL_2D", "UPSCALE", + "ARGSORT", "FLASH_ATTN", "FLASH_FF", @@ -1666,7 +1650,7 @@ static const char * GGML_OP_NAME[GGML_OP_COUNT] = { "CROSS_ENTROPY_LOSS_BACK", }; -static_assert(GGML_OP_COUNT == 68, "GGML_OP_COUNT != 68"); +static_assert(GGML_OP_COUNT == 70, "GGML_OP_COUNT != 70"); static const char * GGML_OP_SYMBOL[GGML_OP_COUNT] = { "none", @@ -1695,6 +1679,7 @@ static const char * GGML_OP_SYMBOL[GGML_OP_COUNT] = { "group_norm(x)", "X*Y", + "X[i]*Y", "X*Y", "x*v", @@ -1722,6 +1707,7 @@ static const char * GGML_OP_SYMBOL[GGML_OP_COUNT] = { "pool_1d(x)", "pool_2d(x)", "upscale(x)", + "argsort(x)", "flash_attn(x)", "flash_ff(x)", @@ -1748,10 +1734,28 @@ static const char * GGML_OP_SYMBOL[GGML_OP_COUNT] = { "cross_entropy_loss_back(x,y)", }; -static_assert(GGML_OP_COUNT == 68, "GGML_OP_COUNT != 68"); +static_assert(GGML_OP_COUNT == 70, "GGML_OP_COUNT != 70"); static_assert(GGML_OP_POOL_COUNT == 2, "GGML_OP_POOL_COUNT != 2"); + +static const char * GGML_UNARY_OP_NAME[GGML_UNARY_OP_COUNT] = { + "ABS", + "SGN", + "NEG", + "STEP", + "TANH", + "ELU", + "RELU", + "GELU", + "GELU_QUICK", + "SILU", + "LEAKY", +}; + +static_assert(GGML_UNARY_OP_COUNT == 11, "GGML_UNARY_OP_COUNT != 11"); + + static_assert(sizeof(struct ggml_object)%GGML_MEM_ALIGN == 0, "ggml_object size must be a multiple of GGML_MEM_ALIGN"); static_assert(sizeof(struct ggml_tensor)%GGML_MEM_ALIGN == 0, "ggml_tensor size must be a multiple of GGML_MEM_ALIGN"); @@ -1771,6 +1775,7 @@ static void ggml_setup_op_has_task_pass(void) { p[GGML_OP_ACC ] = true; p[GGML_OP_MUL_MAT ] = true; + p[GGML_OP_MUL_MAT_ID ] = true; p[GGML_OP_OUT_PROD ] = true; p[GGML_OP_SET ] = true; p[GGML_OP_GET_ROWS_BACK ] = true; @@ -2023,6 +2028,20 @@ const char * ggml_op_symbol(enum ggml_op op) { return GGML_OP_SYMBOL[op]; } +const char * ggml_unary_op_name(enum ggml_unary_op op) { + return GGML_UNARY_OP_NAME[op]; +} + +const char * ggml_op_desc(const struct ggml_tensor * t) { + if (t->op == GGML_OP_UNARY) { + enum ggml_unary_op uop = ggml_get_unary_op(t); + return ggml_unary_op_name(uop); + } + else { + return ggml_op_name(t->op); + } +} + size_t ggml_element_size(const struct ggml_tensor * tensor) { return ggml_type_size(tensor->type); } @@ -3154,9 +3173,7 @@ static struct ggml_tensor * ggml_add_impl( struct ggml_tensor * a, struct ggml_tensor * b, bool inplace) { - // TODO: support less-strict constraint - // GGML_ASSERT(ggml_can_repeat(b, a)); - GGML_ASSERT(ggml_can_repeat_rows(b, a)); + GGML_ASSERT(ggml_can_repeat(b, a)); bool is_node = false; @@ -3371,9 +3388,7 @@ static struct ggml_tensor * ggml_mul_impl( struct ggml_tensor * a, struct ggml_tensor * b, bool inplace) { - // TODO: support less-strict constraint - // GGML_ASSERT(ggml_can_repeat(b, a)); - GGML_ASSERT(ggml_can_repeat_rows(b, a)); + GGML_ASSERT(ggml_can_repeat(b, a)); bool is_node = false; @@ -3418,7 +3433,7 @@ static struct ggml_tensor * ggml_div_impl( struct ggml_tensor * a, struct ggml_tensor * b, bool inplace) { - GGML_ASSERT(ggml_are_same_shape(a, b)); + GGML_ASSERT(ggml_can_repeat(b, a)); bool is_node = false; @@ -4056,6 +4071,49 @@ struct ggml_tensor * ggml_mul_mat( return result; } +// ggml_mul_mat_id + +struct ggml_tensor * ggml_mul_mat_id( + struct ggml_context * ctx, + struct ggml_tensor * as[], + struct ggml_tensor * ids, + int id, + struct ggml_tensor * b) { + + int64_t n_as = ids->ne[0]; + + GGML_ASSERT(ids->type == GGML_TYPE_I32); + GGML_ASSERT(ggml_is_vector(ids)); + GGML_ASSERT(n_as > 0 && n_as <= GGML_MAX_SRC - 2); + GGML_ASSERT(id >= 0 && id < n_as); + + bool is_node = false; + + if (as[0]->grad || b->grad) { + is_node = true; + } + + const int64_t ne[4] = { as[0]->ne[1], b->ne[1], b->ne[2], b->ne[3] }; + struct ggml_tensor * result = ggml_new_tensor(ctx, GGML_TYPE_F32, MAX(as[0]->n_dims, b->n_dims), ne); + + ggml_set_op_params_i32(result, 0, id); + + result->op = GGML_OP_MUL_MAT_ID; + result->grad = is_node ? ggml_dup_tensor(ctx, result) : NULL; + result->src[0] = ids; + result->src[1] = b; + + for (int64_t i = 0; i < n_as; i++) { + struct ggml_tensor * a = as[i]; + GGML_ASSERT(ggml_are_same_shape(as[0], a)); + GGML_ASSERT(ggml_can_mul_mat(a, b)); + GGML_ASSERT(!ggml_is_transposed(a)); + result->src[i + 2] = a; + } + + return result; +} + // ggml_out_prod struct ggml_tensor * ggml_out_prod( @@ -4209,7 +4267,7 @@ struct ggml_tensor * ggml_set_2d_inplace( struct ggml_tensor * b, size_t nb1, size_t offset) { - return ggml_set_impl(ctx, a, b, nb1, a->nb[2], a->nb[3], offset, false); + return ggml_set_impl(ctx, a, b, nb1, a->nb[2], a->nb[3], offset, true); } // ggml_cpy @@ -5468,6 +5526,43 @@ struct ggml_tensor * ggml_upscale( return ggml_upscale_impl(ctx, a, scale_factor); } +// ggml_argsort + +struct ggml_tensor * ggml_argsort( + struct ggml_context * ctx, + struct ggml_tensor * a, + enum ggml_sort_order order) { + bool is_node = false; + + struct ggml_tensor * result = ggml_new_tensor(ctx, GGML_TYPE_I32, a->n_dims, a->ne); + + ggml_set_op_params_i32(result, 0, (int32_t) order); + + result->op = GGML_OP_ARGSORT; + result->grad = is_node ? ggml_dup_tensor(ctx, result) : NULL; + result->src[0] = a; + + return result; +} + +// ggml_top_k + +struct ggml_tensor * ggml_top_k( + struct ggml_context * ctx, + struct ggml_tensor * a, + int k) { + GGML_ASSERT(a->ne[0] >= k); + + struct ggml_tensor * result = ggml_argsort(ctx, a, GGML_SORT_DESC); + + result = ggml_view_4d(ctx, result, + k, result->ne[1], result->ne[2], result->ne[3], + result->nb[1], result->nb[2], result->nb[3], + 0); + + return result; +} + // ggml_flash_attn struct ggml_tensor * ggml_flash_attn( @@ -6827,7 +6922,7 @@ static void ggml_compute_forward_add_f32( const struct ggml_tensor * src0, const struct ggml_tensor * src1, struct ggml_tensor * dst) { - GGML_ASSERT(ggml_can_repeat_rows(src1, src0) && ggml_are_same_shape(src0, dst)); + GGML_ASSERT(ggml_can_repeat(src1, src0) && ggml_are_same_shape(src0, dst)); if (params->type == GGML_TASK_INIT || params->type == GGML_TASK_FINALIZE) { return; @@ -6860,16 +6955,19 @@ static void ggml_compute_forward_add_f32( const int64_t i13 = i03 % ne13; const int64_t i12 = i02 % ne12; const int64_t i11 = i01 % ne11; + const int64_t nr0 = ne00 / ne10; float * dst_ptr = (float *) ((char *) dst->data + i03*nb3 + i02*nb2 + i01*nb1 ); float * src0_ptr = (float *) ((char *) src0->data + i03*nb03 + i02*nb02 + i01*nb01); float * src1_ptr = (float *) ((char *) src1->data + i13*nb13 + i12*nb12 + i11*nb11); + for (int64_t r = 0; r < nr0; ++r) { #ifdef GGML_USE_ACCELERATE - vDSP_vadd(src0_ptr, 1, src1_ptr, 1, dst_ptr, 1, ne00); + vDSP_vadd(src0_ptr + r*ne10, 1, src1_ptr, 1, dst_ptr + r*ne10, 1, ne10); #else - ggml_vec_add_f32(ne00, dst_ptr, src0_ptr, src1_ptr); + ggml_vec_add_f32(ne10, dst_ptr + r*ne10, src0_ptr + r*ne10, src1_ptr); #endif + } } } else { // src1 is not contiguous @@ -6886,8 +6984,9 @@ static void ggml_compute_forward_add_f32( float * dst_ptr = (float *) ((char *) dst->data + i03*nb3 + i02*nb2 + i01*nb1 ); float * src0_ptr = (float *) ((char *) src0->data + i03*nb03 + i02*nb02 + i01*nb01); - for (int i0 = 0; i0 < ne0; i0++) { - float * src1_ptr = (float *) ((char *) src1->data + i13*nb13 + i12*nb12 + i11*nb11 + i0*nb10); + for (int64_t i0 = 0; i0 < ne0; ++i0) { + const int64_t i10 = i0 % ne10; + float * src1_ptr = (float *) ((char *) src1->data + i13*nb13 + i12*nb12 + i11*nb11 + i10*nb10); dst_ptr[i0] = src0_ptr[i0] + *src1_ptr; } @@ -7607,7 +7706,7 @@ static void ggml_compute_forward_mul_f32( const struct ggml_tensor * src0, const struct ggml_tensor * src1, struct ggml_tensor * dst) { - GGML_ASSERT(ggml_can_repeat_rows(src1, src0) && ggml_are_same_shape(src0, dst)); + GGML_ASSERT(ggml_can_repeat(src1, src0) && ggml_are_same_shape(src0, dst)); if (params->type == GGML_TASK_INIT || params->type == GGML_TASK_FINALIZE) { return; @@ -7630,7 +7729,6 @@ static void ggml_compute_forward_mul_f32( GGML_ASSERT( nb0 == sizeof(float)); GGML_ASSERT(nb00 == sizeof(float)); - GGML_ASSERT(ne00 == ne10); if (nb10 == sizeof(float)) { for (int64_t ir = ith; ir < nr; ir += nth) { @@ -7642,20 +7740,21 @@ static void ggml_compute_forward_mul_f32( const int64_t i13 = i03 % ne13; const int64_t i12 = i02 % ne12; const int64_t i11 = i01 % ne11; + const int64_t nr0 = ne00 / ne10; float * dst_ptr = (float *) ((char *) dst->data + i03*nb3 + i02*nb2 + i01*nb1 ); float * src0_ptr = (float *) ((char *) src0->data + i03*nb03 + i02*nb02 + i01*nb01); float * src1_ptr = (float *) ((char *) src1->data + i13*nb13 + i12*nb12 + i11*nb11); + for (int64_t r = 0 ; r < nr0; ++r) { #ifdef GGML_USE_ACCELERATE - UNUSED(ggml_vec_mul_f32); + UNUSED(ggml_vec_mul_f32); - vDSP_vmul( src0_ptr, 1, src1_ptr, 1, dst_ptr, 1, ne00); + vDSP_vmul(src0_ptr + r*ne10, 1, src1_ptr, 1, dst_ptr + r*ne10, 1, ne10); #else - ggml_vec_mul_f32(ne00, dst_ptr, src0_ptr, src1_ptr); + ggml_vec_mul_f32(ne10, dst_ptr + r*ne10, src0_ptr + r*ne10, src1_ptr); #endif - // } - // } + } } } else { // src1 is not contiguous @@ -7673,8 +7772,9 @@ static void ggml_compute_forward_mul_f32( float * dst_ptr = (float *) ((char *) dst->data + i03*nb3 + i02*nb2 + i01*nb1 ); float * src0_ptr = (float *) ((char *) src0->data + i03*nb03 + i02*nb02 + i01*nb01); - for (int64_t i0 = 0; i0 < ne00; i0++) { - float * src1_ptr = (float *) ((char *) src1->data + i13*nb13 + i12*nb12 + i11*nb11 + i0*nb10); + for (int64_t i0 = 0; i0 < ne00; ++i0) { + const int64_t i10 = i0 % ne10; + float * src1_ptr = (float *) ((char *) src1->data + i13*nb13 + i12*nb12 + i11*nb11 + i10*nb10); dst_ptr[i0] = src0_ptr[i0] * (*src1_ptr); } @@ -7708,14 +7808,16 @@ static void ggml_compute_forward_div_f32( const struct ggml_tensor * src0, const struct ggml_tensor * src1, struct ggml_tensor * dst) { - assert(params->ith == 0); - assert(ggml_are_same_shape(src0, src1) && ggml_are_same_shape(src0, dst)); + GGML_ASSERT(ggml_can_repeat(src1, src0) && ggml_are_same_shape(src0, dst)); if (params->type == GGML_TASK_INIT || params->type == GGML_TASK_FINALIZE) { return; } - const int nr = ggml_nrows(src0); + const int ith = params->ith; + const int nth = params->nth; + + const int64_t nr = ggml_nrows(src0); GGML_TENSOR_BINARY_OP_LOCALS @@ -7723,41 +7825,50 @@ static void ggml_compute_forward_div_f32( GGML_ASSERT(nb00 == sizeof(float)); if (nb10 == sizeof(float)) { - for (int ir = 0; ir < nr; ++ir) { - // src0, src1 and dst are same shape => same indices - const int i3 = ir/(ne2*ne1); - const int i2 = (ir - i3*ne2*ne1)/ne1; - const int i1 = (ir - i3*ne2*ne1 - i2*ne1); + for (int64_t ir = ith; ir < nr; ir += nth) { + // src0 and dst are same shape => same indices + const int64_t i03 = ir/(ne02*ne01); + const int64_t i02 = (ir - i03*ne02*ne01)/ne01; + const int64_t i01 = (ir - i03*ne02*ne01 - i02*ne01); + const int64_t i13 = i03 % ne13; + const int64_t i12 = i02 % ne12; + const int64_t i11 = i01 % ne11; + const int64_t nr0 = ne00 / ne10; + + float * dst_ptr = (float *) ((char *) dst->data + i03*nb3 + i02*nb2 + i01*nb1 ); + float * src0_ptr = (float *) ((char *) src0->data + i03*nb03 + i02*nb02 + i01*nb01); + float * src1_ptr = (float *) ((char *) src1->data + i13*nb13 + i12*nb12 + i11*nb11); + + for (int64_t r = 0; r < nr0; ++r) { #ifdef GGML_USE_ACCELERATE - UNUSED(ggml_vec_div_f32); + UNUSED(ggml_vec_div_f32); - vDSP_vdiv( - (float *) ((char *) src1->data + i3*nb13 + i2*nb12 + i1*nb11), 1, - (float *) ((char *) src0->data + i3*nb03 + i2*nb02 + i1*nb01), 1, - (float *) ((char *) dst->data + i3*nb3 + i2*nb2 + i1*nb1 ), 1, - ne0); + vDSP_vdiv(src1_ptr, 1, src0_ptr + r*ne10, 1, dst_ptr + r*ne10, 1, ne10); #else - ggml_vec_div_f32(ne0, - (float *) ((char *) dst->data + i3*nb3 + i2*nb2 + i1*nb1 ), - (float *) ((char *) src0->data + i3*nb03 + i2*nb02 + i1*nb01), - (float *) ((char *) src1->data + i3*nb13 + i2*nb12 + i1*nb11)); + ggml_vec_div_f32(ne10, dst_ptr + r*ne10, src0_ptr + r*ne10, src1_ptr); #endif - // } - // } + } } } else { // src1 is not contiguous - for (int ir = 0; ir < nr; ++ir) { - // src0, src1 and dst are same shape => same indices - const int i3 = ir/(ne2*ne1); - const int i2 = (ir - i3*ne2*ne1)/ne1; - const int i1 = (ir - i3*ne2*ne1 - i2*ne1); + for (int64_t ir = ith; ir < nr; ir += nth) { + // src0 and dst are same shape => same indices + // src1 is broadcastable across src0 and dst in i1, i2, i3 + const int64_t i03 = ir/(ne02*ne01); + const int64_t i02 = (ir - i03*ne02*ne01)/ne01; + const int64_t i01 = (ir - i03*ne02*ne01 - i02*ne01); - float * dst_ptr = (float *) ((char *) dst->data + i3*nb3 + i2*nb2 + i1*nb1 ); - float * src0_ptr = (float *) ((char *) src0->data + i3*nb03 + i2*nb02 + i1*nb01); - for (int i0 = 0; i0 < ne0; i0++) { - float * src1_ptr = (float *) ((char *) src1->data + i3*nb13 + i2*nb12 + i1*nb11 + i0*nb10); + const int64_t i13 = i03 % ne13; + const int64_t i12 = i02 % ne12; + const int64_t i11 = i01 % ne11; + + float * dst_ptr = (float *) ((char *) dst->data + i03*nb3 + i02*nb2 + i01*nb1 ); + float * src0_ptr = (float *) ((char *) src0->data + i03*nb03 + i02*nb02 + i01*nb01); + + for (int64_t i0 = 0; i0 < ne00; ++i0) { + const int64_t i10 = i0 % ne10; + float * src1_ptr = (float *) ((char *) src1->data + i13*nb13 + i12*nb12 + i11*nb11 + i10*nb10); dst_ptr[i0] = src0_ptr[i0] / (*src1_ptr); } @@ -8203,7 +8314,7 @@ static void ggml_compute_forward_repeat_f16( return; } - GGML_TENSOR_UNARY_OP_LOCALS; + GGML_TENSOR_UNARY_OP_LOCALS // guaranteed to be an integer due to the check in ggml_can_repeat const int nr0 = (int)(ne0/ne00); @@ -8348,6 +8459,7 @@ static void ggml_compute_forward_concat_f32( GGML_ASSERT(src0->nb[0] == sizeof(float)); const int ith = params->ith; + const int nth = params->nth; GGML_TENSOR_BINARY_OP_LOCALS @@ -8357,7 +8469,7 @@ static void ggml_compute_forward_concat_f32( GGML_ASSERT(nb10 == sizeof(float)); for (int i3 = 0; i3 < ne3; i3++) { - for (int i2 = ith; i2 < ne2; i2++) { + for (int i2 = ith; i2 < ne2; i2 += nth) { if (i2 < ne02) { // src0 for (int i1 = 0; i1 < ne1; i1++) { for (int i0 = 0; i0 < ne0; i0++) { @@ -9517,6 +9629,8 @@ static void ggml_compute_forward_mul_mat( char * wdata = params->wdata; const size_t row_size = ne10*ggml_type_size(vec_dot_type)/ggml_blck_size(vec_dot_type); + assert(params->wsize >= ne11*ne12*ne13*row_size); + for (int64_t i13 = 0; i13 < ne13; ++i13) { for (int64_t i12 = 0; i12 < ne12; ++i12) { for (int64_t i11 = 0; i11 < ne11; ++i11) { @@ -9618,6 +9732,26 @@ static void ggml_compute_forward_mul_mat( } } +// ggml_compute_forward_mul_mat_id + +static void ggml_compute_forward_mul_mat_id( + const struct ggml_compute_params * params, + struct ggml_tensor * dst) { + + const struct ggml_tensor * ids = dst->src[0]; + const struct ggml_tensor * src1 = dst->src[1]; + + const int id = ggml_get_op_params_i32(dst, 0); + + const int a_id = ((int32_t *)ids->data)[id]; + + GGML_ASSERT(a_id >= 0 && a_id < ids->ne[0]); + + const struct ggml_tensor * src0 = dst->src[a_id + 2]; + + ggml_compute_forward_mul_mat(params, src0, src1, dst); +} + // ggml_compute_forward_out_prod static void ggml_compute_forward_out_prod_f32( @@ -12021,6 +12155,67 @@ static void ggml_compute_forward_upscale( } } +// ggml_compute_forward_argsort + +static void ggml_compute_forward_argsort_f32( + const struct ggml_compute_params * params, + const struct ggml_tensor * src0, + struct ggml_tensor * dst) { + + if (params->type == GGML_TASK_INIT || params->type == GGML_TASK_FINALIZE) { + return; + } + + GGML_TENSOR_UNARY_OP_LOCALS + + GGML_ASSERT(nb0 == sizeof(float)); + + const int ith = params->ith; + const int nth = params->nth; + + const int64_t nr = ggml_nrows(src0); + + enum ggml_sort_order order = (enum ggml_sort_order) ggml_get_op_params_i32(dst, 0); + + for (int64_t i = ith; i < nr; i += nth) { + int32_t * dst_data = (int32_t *)((char *) dst->data + i*nb1); + const float * src_data = (float *)((char *) src0->data + i*nb01); + + for (int64_t j = 0; j < ne0; j++) { + dst_data[j] = j; + } + + // C doesn't have a functional sort, so we do a bubble sort instead + for (int64_t j = 0; j < ne0; j++) { + for (int64_t k = j + 1; k < ne0; k++) { + if ((order == GGML_SORT_ASC && src_data[dst_data[j]] > src_data[dst_data[k]]) || + (order == GGML_SORT_DESC && src_data[dst_data[j]] < src_data[dst_data[k]])) { + int32_t tmp = dst_data[j]; + dst_data[j] = dst_data[k]; + dst_data[k] = tmp; + } + } + } + } +} + +static void ggml_compute_forward_argsort( + const struct ggml_compute_params * params, + const struct ggml_tensor * src0, + struct ggml_tensor * dst) { + + switch (src0->type) { + case GGML_TYPE_F32: + { + ggml_compute_forward_argsort_f32(params, src0, dst); + } break; + default: + { + GGML_ASSERT(false); + } break; + } +} + // ggml_compute_forward_flash_attn static void ggml_compute_forward_flash_attn_f32( @@ -13844,6 +14039,10 @@ static void ggml_compute_forward(struct ggml_compute_params * params, struct ggm { ggml_compute_forward_mul_mat(params, tensor->src[0], tensor->src[1], tensor); } break; + case GGML_OP_MUL_MAT_ID: + { + ggml_compute_forward_mul_mat_id(params, tensor); + } break; case GGML_OP_OUT_PROD: { ggml_compute_forward_out_prod(params, tensor->src[0], tensor->src[1], tensor); @@ -13948,6 +14147,10 @@ static void ggml_compute_forward(struct ggml_compute_params * params, struct ggm { ggml_compute_forward_upscale(params, tensor->src[0], tensor); } break; + case GGML_OP_ARGSORT: + { + ggml_compute_forward_argsort(params, tensor->src[0], tensor); + } break; case GGML_OP_FLASH_ATTN: { const int32_t t = ggml_get_op_params_i32(tensor, 0); @@ -14598,6 +14801,10 @@ static void ggml_compute_backward(struct ggml_context * ctx, struct ggml_tensor zero_table); } } break; + case GGML_OP_MUL_MAT_ID: + { + GGML_ASSERT(false); // TODO: not implemented + } break; case GGML_OP_OUT_PROD: { GGML_ASSERT(false); // TODO: not implemented @@ -14936,6 +15143,10 @@ static void ggml_compute_backward(struct ggml_context * ctx, struct ggml_tensor { GGML_ASSERT(false); // TODO: not implemented } break; + case GGML_OP_ARGSORT: + { + GGML_ASSERT(false); // TODO: not implemented + } break; case GGML_OP_FLASH_ATTN: { struct ggml_tensor * flash_grad = NULL; @@ -15296,12 +15507,8 @@ struct ggml_cgraph * ggml_new_graph(struct ggml_context * ctx) { return ggml_new_graph_custom(ctx, GGML_DEFAULT_GRAPH_SIZE, false); } -struct ggml_cgraph * ggml_graph_view(struct ggml_context * ctx, struct ggml_cgraph * cgraph0, int i0, int i1) { - const size_t obj_size = sizeof(struct ggml_cgraph); - struct ggml_object * obj = ggml_new_object(ctx, GGML_OBJECT_GRAPH, obj_size); - struct ggml_cgraph * cgraph = (struct ggml_cgraph *) ((char *) ctx->mem_buffer + obj->offs); - - *cgraph = (struct ggml_cgraph) { +struct ggml_cgraph ggml_graph_view(struct ggml_cgraph * cgraph0, int i0, int i1) { + struct ggml_cgraph cgraph = { /*.size =*/ 0, /*.n_nodes =*/ i1 - i0, /*.n_leafs =*/ 0, @@ -15536,7 +15743,6 @@ static int ggml_get_n_tasks(struct ggml_tensor * node, int n_threads) { n_tasks = n_threads; } break; case GGML_OP_SUB: - case GGML_OP_DIV: case GGML_OP_SQR: case GGML_OP_SQRT: case GGML_OP_LOG: @@ -15569,10 +15775,13 @@ static int ggml_get_n_tasks(struct ggml_tensor * node, int n_threads) { { n_tasks = n_threads; } break; + default: + GGML_ASSERT(false); } break; case GGML_OP_SILU_BACK: case GGML_OP_MUL: + case GGML_OP_DIV: case GGML_OP_NORM: case GGML_OP_RMS_NORM: case GGML_OP_RMS_NORM_BACK: @@ -15610,6 +15819,11 @@ static int ggml_get_n_tasks(struct ggml_tensor * node, int n_threads) { } #endif } break; + case GGML_OP_MUL_MAT_ID: + { + // FIXME: blas + n_tasks = n_threads; + } break; case GGML_OP_OUT_PROD: { n_tasks = n_threads; @@ -15669,6 +15883,10 @@ static int ggml_get_n_tasks(struct ggml_tensor * node, int n_threads) { { n_tasks = n_threads; } break; + case GGML_OP_ARGSORT: + { + n_tasks = n_threads; + } break; case GGML_OP_FLASH_ATTN: { n_tasks = n_threads; @@ -15731,6 +15949,10 @@ static int ggml_get_n_tasks(struct ggml_tensor * node, int n_threads) { { n_tasks = 1; } break; + case GGML_OP_COUNT: + { + GGML_ASSERT(false); + } break; default: { fprintf(stderr, "%s: op not implemented: ", __func__); @@ -15927,6 +16149,23 @@ struct ggml_cplan ggml_graph_plan(struct ggml_cgraph * cgraph, int n_threads) { cur = ggml_type_size(vec_dot_type)*ggml_nelements(node->src[1])/ggml_blck_size(vec_dot_type); } } break; + case GGML_OP_MUL_MAT_ID: + { + const struct ggml_tensor * a = node->src[2]; + const struct ggml_tensor * b = node->src[1]; + const enum ggml_type vec_dot_type = type_traits[a->type].vec_dot_type; +#if defined(GGML_USE_ACCELERATE) || defined(GGML_USE_OPENBLAS) + if (ggml_compute_forward_mul_mat_use_blas(a, b, node)) { + if (a->type != GGML_TYPE_F32) { + // here we need memory just for single 2D matrix from src0 + cur = ggml_type_size(GGML_TYPE_F32)*(a->ne[0]*a->ne[1]); + } + } else +#endif + if (b->type != vec_dot_type) { + cur = ggml_type_size(vec_dot_type)*ggml_nelements(b)/ggml_blck_size(vec_dot_type); + } + } break; case GGML_OP_OUT_PROD: { if (ggml_is_quantized(node->src[0]->type)) { @@ -15962,9 +16201,6 @@ struct ggml_cplan ggml_graph_plan(struct ggml_cgraph * cgraph, int n_threads) { GGML_ASSERT(false); } } break; - case GGML_OP_IM2COL: - { - } break; case GGML_OP_CONV_TRANSPOSE_2D: { const int64_t ne00 = node->src[0]->ne[0]; // W @@ -17803,8 +18039,8 @@ size_t ggml_quantize_q5_0(const float * src, void * dst, int n, int k, int64_t * memcpy(&qh, &y[i].qh, sizeof(qh)); for (int j = 0; j < QK5_0; j += 2) { - const uint8_t vh0 = ((qh & (1u << (j + 0 ))) >> (j + 0 )) << 4; - const uint8_t vh1 = ((qh & (1u << (j + 16))) >> (j + 12)); + const uint8_t vh0 = ((qh & (1u << (j/2 + 0 ))) >> (j/2 + 0 )) << 4; + const uint8_t vh1 = ((qh & (1u << (j/2 + 16))) >> (j/2 + 12)); // cast to 16 bins const uint8_t vi0 = ((y[i].qs[j/2] & 0x0F) | vh0) / 2; @@ -17833,8 +18069,8 @@ size_t ggml_quantize_q5_1(const float * src, void * dst, int n, int k, int64_t * memcpy(&qh, &y[i].qh, sizeof(qh)); for (int j = 0; j < QK5_1; j += 2) { - const uint8_t vh0 = ((qh & (1u << (j + 0 ))) >> (j + 0 )) << 4; - const uint8_t vh1 = ((qh & (1u << (j + 16))) >> (j + 12)); + const uint8_t vh0 = ((qh & (1u << (j/2 + 0 ))) >> (j/2 + 0 )) << 4; + const uint8_t vh1 = ((qh & (1u << (j/2 + 16))) >> (j/2 + 12)); // cast to 16 bins const uint8_t vi0 = ((y[i].qs[j/2] & 0x0F) | vh0) / 2; @@ -18024,6 +18260,7 @@ struct gguf_kv { struct gguf_header { char magic[4]; + uint32_t version; uint64_t n_tensors; // GGUFv2 uint64_t n_kv; // GGUFv2 @@ -18113,7 +18350,7 @@ struct gguf_context * gguf_init_from_file(const char * fname, struct gguf_init_p for (uint32_t i = 0; i < sizeof(magic); i++) { if (magic[i] != GGUF_MAGIC[i]) { - fprintf(stderr, "%s: invalid magic characters %s.\n", __func__, magic); + fprintf(stderr, "%s: invalid magic characters '%c%c%c%c'\n", __func__, magic[0], magic[1], magic[2], magic[3]); fclose(file); return NULL; } @@ -18128,7 +18365,6 @@ struct gguf_context * gguf_init_from_file(const char * fname, struct gguf_init_p { strncpy(ctx->header.magic, magic, 4); - ctx->kv = NULL; ctx->infos = NULL; ctx->data = NULL; diff --git a/ggml.h b/ggml.h index 2f6787d4e..a8f10cbd5 100644 --- a/ggml.h +++ b/ggml.h @@ -283,6 +283,20 @@ const type prefix##3 = (pointer)->array[3]; \ GGML_UNUSED(prefix##3); +#define GGML_TENSOR_UNARY_OP_LOCALS \ + GGML_TENSOR_LOCALS(int64_t, ne0, src0, ne) \ + GGML_TENSOR_LOCALS(size_t, nb0, src0, nb) \ + GGML_TENSOR_LOCALS(int64_t, ne, dst, ne) \ + GGML_TENSOR_LOCALS(size_t, nb, dst, nb) + +#define GGML_TENSOR_BINARY_OP_LOCALS \ + GGML_TENSOR_LOCALS(int64_t, ne0, src0, ne) \ + GGML_TENSOR_LOCALS(size_t, nb0, src0, nb) \ + GGML_TENSOR_LOCALS(int64_t, ne1, src1, ne) \ + GGML_TENSOR_LOCALS(size_t, nb1, src1, nb) \ + GGML_TENSOR_LOCALS(int64_t, ne, dst, ne) \ + GGML_TENSOR_LOCALS(size_t, nb, dst, nb) + #ifdef __cplusplus extern "C" { #endif @@ -381,6 +395,7 @@ extern "C" { GGML_OP_GROUP_NORM, GGML_OP_MUL_MAT, + GGML_OP_MUL_MAT_ID, GGML_OP_OUT_PROD, GGML_OP_SCALE, @@ -407,8 +422,8 @@ extern "C" { GGML_OP_CONV_TRANSPOSE_2D, GGML_OP_POOL_1D, GGML_OP_POOL_2D, - GGML_OP_UPSCALE, // nearest interpolate + GGML_OP_ARGSORT, GGML_OP_FLASH_ATTN, GGML_OP_FLASH_FF, @@ -448,7 +463,9 @@ extern "C" { GGML_UNARY_OP_GELU, GGML_UNARY_OP_GELU_QUICK, GGML_UNARY_OP_SILU, - GGML_UNARY_OP_LEAKY + GGML_UNARY_OP_LEAKY, + + GGML_UNARY_OP_COUNT, }; enum ggml_object_type { @@ -631,6 +648,9 @@ extern "C" { GGML_API const char * ggml_op_name (enum ggml_op op); GGML_API const char * ggml_op_symbol(enum ggml_op op); + GGML_API const char * ggml_unary_op_name(enum ggml_unary_op op); + GGML_API const char * ggml_op_desc(const struct ggml_tensor * t); // unary or op name + GGML_API size_t ggml_element_size(const struct ggml_tensor * tensor); GGML_API bool ggml_is_quantized(enum ggml_type type); @@ -1027,6 +1047,15 @@ extern "C" { struct ggml_tensor * a, struct ggml_tensor * b); + // indirect matrix multiplication + // ggml_mul_mat_id(ctx, as, ids, id, b) ~= ggml_mul_mat(as[ids[id]], b) + GGML_API struct ggml_tensor * ggml_mul_mat_id( + struct ggml_context * ctx, + struct ggml_tensor * as[], + struct ggml_tensor * ids, + int id, + struct ggml_tensor * b); + // A: m columns, n rows, // B: p columns, n rows, // result is m columns, p rows @@ -1520,6 +1549,23 @@ extern "C" { struct ggml_tensor * a, int scale_factor); + // sort rows + enum ggml_sort_order { + GGML_SORT_ASC, + GGML_SORT_DESC, + }; + + GGML_API struct ggml_tensor * ggml_argsort( + struct ggml_context * ctx, + struct ggml_tensor * a, + enum ggml_sort_order order); + + // top k elements per row + GGML_API struct ggml_tensor * ggml_top_k( + struct ggml_context * ctx, + struct ggml_tensor * a, + int k); + GGML_API struct ggml_tensor * ggml_flash_attn( struct ggml_context * ctx, struct ggml_tensor * q, @@ -1581,7 +1627,6 @@ extern "C" { int kh); // used in sam - GGML_API struct ggml_tensor * ggml_add_rel_pos( struct ggml_context * ctx, struct ggml_tensor * a, @@ -1756,7 +1801,7 @@ extern "C" { GGML_API struct ggml_cgraph * ggml_new_graph (struct ggml_context * ctx); // size = GGML_DEFAULT_GRAPH_SIZE, grads = false GGML_API struct ggml_cgraph * ggml_new_graph_custom (struct ggml_context * ctx, size_t size, bool grads); GGML_API struct ggml_cgraph * ggml_graph_dup (struct ggml_context * ctx, struct ggml_cgraph * cgraph); - GGML_API struct ggml_cgraph * ggml_graph_view (struct ggml_context * ctx, struct ggml_cgraph * cgraph, int i0, int i1); + GGML_API struct ggml_cgraph ggml_graph_view (struct ggml_cgraph * cgraph, int i0, int i1); GGML_API void ggml_graph_cpy (struct ggml_cgraph * src, struct ggml_cgraph * dst); GGML_API void ggml_graph_reset (struct ggml_cgraph * cgraph); // zero grads GGML_API void ggml_graph_clear (struct ggml_cgraph * cgraph); diff --git a/scripts/sync-ggml.sh b/scripts/sync-ggml.sh index 4024531b1..0097db435 100755 --- a/scripts/sync-ggml.sh +++ b/scripts/sync-ggml.sh @@ -20,5 +20,6 @@ cp -rpv ../ggml/include/ggml/ggml.h ./ggml.h cp -rpv ../ggml/include/ggml/ggml-alloc.h ./ggml-alloc.h cp -rpv ../ggml/include/ggml/ggml-backend.h ./ggml-backend.h -cp -rpv ../ggml/tests/test-opt.cpp ./tests/test-opt.cpp -cp -rpv ../ggml/tests/test-grad0.cpp ./tests/test-grad0.cpp +cp -rpv ../ggml/tests/test-opt.cpp ./tests/test-opt.cpp +cp -rpv ../ggml/tests/test-grad0.cpp ./tests/test-grad0.cpp +cp -rpv ../ggml/tests/test-backend-ops.cpp ./tests/test-backend-ops.cpp diff --git a/tests/CMakeLists.txt b/tests/CMakeLists.txt index c8b4bc254..e42237c7a 100644 --- a/tests/CMakeLists.txt +++ b/tests/CMakeLists.txt @@ -22,26 +22,32 @@ endfunction() llama_build_and_test_executable(test-quantize-fns.cpp) llama_build_and_test_executable(test-quantize-perf.cpp) llama_build_and_test_executable(test-sampling.cpp) + llama_build_executable(test-tokenizer-0-llama.cpp) llama_test_executable (test-tokenizer-0-llama test-tokenizer-0-llama.cpp ${CMAKE_CURRENT_SOURCE_DIR}/../models/ggml-vocab-llama.gguf) + llama_build_executable(test-tokenizer-0-falcon.cpp) llama_test_executable (test-tokenizer-0-falcon test-tokenizer-0-falcon.cpp ${CMAKE_CURRENT_SOURCE_DIR}/../models/ggml-vocab-falcon.gguf) + llama_build_executable(test-tokenizer-1-llama.cpp) -llama_test_executable (test-tokenizer-1-llama test-tokenizer-1-llama.cpp ${CMAKE_CURRENT_SOURCE_DIR}/../models/ggml-vocab-llama.gguf) -llama_test_executable(test-tokenizer-1-baichuan test-tokenizer-1-llama.cpp ${CMAKE_CURRENT_SOURCE_DIR}/../models/ggml-vocab-baichuan.gguf) +llama_test_executable (test-tokenizer-1-llama test-tokenizer-1-llama.cpp ${CMAKE_CURRENT_SOURCE_DIR}/../models/ggml-vocab-llama.gguf) +llama_test_executable (test-tokenizer-1-baichuan test-tokenizer-1-llama.cpp ${CMAKE_CURRENT_SOURCE_DIR}/../models/ggml-vocab-baichuan.gguf) + llama_build_executable(test-tokenizer-1-bpe.cpp) -llama_test_executable (test-tokenizer-1-falcon test-tokenizer-1-bpe.cpp ${CMAKE_CURRENT_SOURCE_DIR}/../models/ggml-vocab-falcon.gguf) -llama_test_executable(test-tokenizer-1-aquila test-tokenizer-1-bpe.cpp ${CMAKE_CURRENT_SOURCE_DIR}/../models/ggml-vocab-aquila.gguf) -llama_test_executable(test-tokenizer-1-mpt test-tokenizer-1-bpe.cpp ${CMAKE_CURRENT_SOURCE_DIR}/../models/ggml-vocab-mpt.gguf) -llama_test_executable(test-tokenizer-1-stablelm-3b-4e1t test-tokenizer-1-bpe.cpp ${CMAKE_CURRENT_SOURCE_DIR}/../models/ggml-vocab-stablelm-3b-4e1t.gguf) -llama_test_executable(test-tokenizer-1-gpt-neox test-tokenizer-1-bpe.cpp ${CMAKE_CURRENT_SOURCE_DIR}/../models/ggml-vocab-gpt-neox.gguf) -llama_test_executable(test-tokenizer-1-refact test-tokenizer-1-bpe.cpp ${CMAKE_CURRENT_SOURCE_DIR}/../models/ggml-vocab-refact.gguf) -llama_test_executable(test-tokenizer-1-starcoder test-tokenizer-1-bpe.cpp ${CMAKE_CURRENT_SOURCE_DIR}/../models/ggml-vocab-starcoder.gguf) -# llama_test_executable(test-tokenizer-1-bloom test-tokenizer-1-bpe.cpp ${CMAKE_CURRENT_SOURCE_DIR}/../models/ggml-vocab-bloom.gguf) # BIG +llama_test_executable (test-tokenizer-1-falcon test-tokenizer-1-bpe.cpp ${CMAKE_CURRENT_SOURCE_DIR}/../models/ggml-vocab-falcon.gguf) +llama_test_executable (test-tokenizer-1-aquila test-tokenizer-1-bpe.cpp ${CMAKE_CURRENT_SOURCE_DIR}/../models/ggml-vocab-aquila.gguf) +llama_test_executable (test-tokenizer-1-mpt test-tokenizer-1-bpe.cpp ${CMAKE_CURRENT_SOURCE_DIR}/../models/ggml-vocab-mpt.gguf) +llama_test_executable (test-tokenizer-1-stablelm-3b-4e1t test-tokenizer-1-bpe.cpp ${CMAKE_CURRENT_SOURCE_DIR}/../models/ggml-vocab-stablelm-3b-4e1t.gguf) +llama_test_executable (test-tokenizer-1-gpt-neox test-tokenizer-1-bpe.cpp ${CMAKE_CURRENT_SOURCE_DIR}/../models/ggml-vocab-gpt-neox.gguf) +llama_test_executable (test-tokenizer-1-refact test-tokenizer-1-bpe.cpp ${CMAKE_CURRENT_SOURCE_DIR}/../models/ggml-vocab-refact.gguf) +llama_test_executable (test-tokenizer-1-starcoder test-tokenizer-1-bpe.cpp ${CMAKE_CURRENT_SOURCE_DIR}/../models/ggml-vocab-starcoder.gguf) +# llama_test_executable (test-tokenizer-1-bloom test-tokenizer-1-bpe.cpp ${CMAKE_CURRENT_SOURCE_DIR}/../models/ggml-vocab-bloom.gguf) # BIG + llama_build_and_test_executable(test-grammar-parser.cpp) llama_build_and_test_executable(test-llama-grammar.cpp) -llama_build_and_test_executable(test-grad0.cpp) # SLOW +llama_build_and_test_executable(test-grad0.cpp) # llama_build_and_test_executable(test-opt.cpp) # SLOW +llama_build_and_test_executable(test-backend-ops.cpp) llama_build_and_test_executable(test-rope.cpp) diff --git a/tests/test-backend-ops.cpp b/tests/test-backend-ops.cpp new file mode 100644 index 000000000..e0155ac1c --- /dev/null +++ b/tests/test-backend-ops.cpp @@ -0,0 +1,1357 @@ +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include + + +static void init_tensor_uniform(ggml_tensor * tensor, float min = -1.0f, float max = 1.0f) { + size_t size = ggml_nelements(tensor); + std::vector data(size); + + std::random_device rd; + +#if 0 + std::default_random_engine generator(rd()); + std::uniform_real_distribution distribution(min, max); + + for (size_t i = 0; i < size; i++) { + data[i] = distribution(generator); + } +#endif + auto init_thread = [&](size_t start, size_t end) { + std::default_random_engine generator(rd()); + std::uniform_real_distribution distribution(min, max); + + for (size_t i = start; i < end; i++) { + data[i] = distribution(generator); + } + }; + + size_t n_threads = std::thread::hardware_concurrency(); + std::vector threads; + threads.reserve(n_threads); + for (size_t i = 0; i < n_threads; i++) { + size_t start = i*size/n_threads; + size_t end = (i+1)*size/n_threads; + threads.emplace_back(init_thread, start, end); + } + for (auto & t : threads) { + t.join(); + } + + if (tensor->type == GGML_TYPE_F32) { + ggml_backend_tensor_set(tensor, data.data(), 0, size * sizeof(float)); + } else if (ggml_is_quantized(tensor->type) || tensor->type == GGML_TYPE_F16) { + GGML_ASSERT(size % ggml_blck_size(tensor->type) == 0); + std::vector dataq(ggml_type_size(tensor->type)*size/ggml_blck_size(tensor->type)); + int64_t hist[16]; + ggml_quantize_chunk(tensor->type, data.data(), dataq.data(), 0, size, hist); + ggml_backend_tensor_set(tensor, dataq.data(), 0, dataq.size()); + } else { + GGML_ASSERT(false); + } +} + +static std::vector tensor_to_float(const ggml_tensor * t) { + std::vector tv; + tv.reserve(ggml_nelements(t)); + + std::vector buf(ggml_nbytes(t)); + ggml_backend_tensor_get(t, buf.data(), 0, ggml_nbytes(t)); + + // access elements by index to avoid gaps in views + for (int64_t i3 = 0; i3 < t->ne[3]; i3++) { + for (int64_t i2 = 0; i2 < t->ne[2]; i2++) { + for (int64_t i1 = 0; i1 < t->ne[1]; i1++) { + for (int64_t i0 = 0; i0 < t->ne[0]; i0++) { + size_t i = i3*t->nb[3] + i2*t->nb[2] + i1*t->nb[1] + i0*t->nb[0]; + float v; + if (t->type == GGML_TYPE_F16) { + v = (float) ggml_fp16_to_fp32(*(ggml_fp16_t*)&buf[i]); + } else if (t->type == GGML_TYPE_F32) { + v = *(float *) &buf[i]; + } else if (t->type == GGML_TYPE_I32) { + v = *(int32_t *) &buf[i]; + } else { + GGML_ASSERT(false); + } + tv.push_back(v); + } + } + } + } + + return tv; +} + +/* +static double cosine_similarity(const float * v1, const float * v2, size_t n) { + double dot = 0.0; + double mag1 = 0.0; + double mag2 = 0.0; + + for (size_t i = 0; i < n; i++) { + if (std::isnan(v1[i]) || std::isnan(v2[i])) { + return -1.0f; + } + if (std::isinf(v1[i]) && std::isinf(v2[i])) { + continue; + } + dot += v1[i]*v2[i]; + mag1 += v1[i]*v1[i]; + mag2 += v2[i]*v2[i]; + } + + return dot/sqrt(mag1*mag2); +} + +static float distance(const float * v1, const float * v2, size_t n) { + double d = 0.0; + + for (size_t i = 0; i < n; i++) { + if (std::isnan(v1[i]) || std::isnan(v2[i])) { + return INFINITY; + } + if (std::isinf(v1[i]) && std::isinf(v2[i])) { + continue; + } + d += (v1[i] - v2[i])*(v1[i] - v2[i]); + } + + return sqrt(d); +} + +static float vec_len(const float * v, size_t n) { + double d = 0.0; + + for (size_t i = 0; i < n; i++) { + if (std::isnan(v[i])) { + return INFINITY; + } + if (std::isinf(v[i])) { + continue; + } + d += v[i]*v[i]; + } + + return sqrt(d); +} +*/ + +// normalized mean squared error = mse(a, b) / mse(a, 0) +static double nmse(const float * a, const float * b, size_t n) { + double mse_a_b = 0.0; + double mse_a_0 = 0.0; + + for (size_t i = 0; i < n; i++) { + float a_i = a[i]; + float b_i = b[i]; + + mse_a_b += (a_i - b_i) * (a_i - b_i); + mse_a_0 += a_i * a_i; + } + + return mse_a_b / mse_a_0; +} + +// utils for printing the variables of the test cases +#define VAR_TO_STR(x) (#x "=" + var_to_str(x)) + +template +static std::string var_to_str(const T & x) { + return std::to_string(x); +} + +template +static std::string var_to_str(const T (&x)[N]) { + std::string s = "["; + for (size_t i = 0; i < N; i++) { + if (i > 0) { + s += ","; + } + s += var_to_str(x[i]); + } + s += "]"; + return s; +} + +template +static std::string var_to_str(const std::array & x) { + std::string s = "["; + for (size_t i = 0; i < N; i++) { + if (i > 0) { + s += ","; + } + s += var_to_str(x[i]); + } + s += "]"; + return s; +} + +//static std::string var_to_str(ggml_unary_op unary_op) { +// return ggml_unary_op_name(unary_op); +//} + +static std::string var_to_str(ggml_type type) { + return ggml_type_name(type); +} + +#define VARS_TO_STR1(a) VAR_TO_STR(a) +#define VARS_TO_STR2(a, b) VAR_TO_STR(a) + "," + VAR_TO_STR(b) +#define VARS_TO_STR3(a, b, c) VAR_TO_STR(a) + "," + VARS_TO_STR2(b, c) +#define VARS_TO_STR4(a, b, c, d) VAR_TO_STR(a) + "," + VARS_TO_STR3(b, c, d) +#define VARS_TO_STR5(a, b, c, d, e) VAR_TO_STR(a) + "," + VARS_TO_STR4(b, c, d, e) +#define VARS_TO_STR6(a, b, c, d, e, f) VAR_TO_STR(a) + "," + VARS_TO_STR5(b, c, d, e, f) +#define VARS_TO_STR7(a, b, c, d, e, f, g) VAR_TO_STR(a) + "," + VARS_TO_STR6(b, c, d, e, f, g) +#define VARS_TO_STR8(a, b, c, d, e, f, g, h) VAR_TO_STR(a) + "," + VARS_TO_STR7(b, c, d, e, f, g, h) +#define VARS_TO_STR9(a, b, c, d, e, f, g, h, i) VAR_TO_STR(a) + "," + VARS_TO_STR8(b, c, d, e, f, g, h, i) +#define VARS_TO_STR10(a, b, c, d, e, f, g, h, i, j) VAR_TO_STR(a) + "," + VARS_TO_STR9(b, c, d, e, f, g, h, i, j) +#define VARS_TO_STR11(a, b, c, d, e, f, g, h, i, j, k) VAR_TO_STR(a) + "," + VARS_TO_STR10(b, c, d, e, f, g, h, i, j, k) + + +// accept FLT_MAX as infinity +static bool isinf_or_max(float f) { + return std::isinf(f) || f == FLT_MAX || f == -FLT_MAX; +} + +static bool ggml_is_view_op(enum ggml_op op) { + return op == GGML_OP_VIEW || op == GGML_OP_RESHAPE || op == GGML_OP_PERMUTE || op == GGML_OP_TRANSPOSE; +} + +struct test_case { + virtual ~test_case() {} + + virtual std::string vars() { + return ""; + } + + virtual ggml_tensor * build_graph(ggml_context * ctx) = 0; + + virtual double max_nmse_err() { + return 1e-6; + } + + virtual void initialize_tensors(ggml_context * ctx) { + for (ggml_tensor * t = ggml_get_first_tensor(ctx); t != nullptr; t = ggml_get_next_tensor(ctx, t)) { + init_tensor_uniform(t); + } + } + + virtual size_t op_size(ggml_tensor * t) { + size_t size = ggml_nbytes(t); + // add source tensors + for (int i = 0; i < GGML_MAX_SRC; i++) { + if (t->src[i] != NULL) { + size += ggml_nbytes(t->src[i]); + } + } + return size; + } + + bool eval(ggml_backend_t backend1, ggml_backend_t backend2, const char * op_name) { + ggml_init_params params = { + /* .mem_size = */ ggml_tensor_overhead()*128 + ggml_graph_overhead(), + /* .mem_base = */ NULL, + /* .no_alloc = */ true, + }; + ggml_context * ctx = ggml_init(params); + + ggml_tensor * out = build_graph(ctx); + + if (op_name != nullptr && strcmp(ggml_op_desc(out), op_name) != 0) { + //printf(" %s: skipping\n", ggml_op_desc(out)); + ggml_free(ctx); + return true; + } + + printf(" %s(%s): ", ggml_op_desc(out), vars().c_str()); + fflush(stdout); + + // check if backends support op + for (ggml_backend_t backend : {backend1, backend2}) { + if (!ggml_backend_supports_op(backend, out)) { + printf("not supported\n"); + ggml_free(ctx); + return true; + } + } + + // allocate + ggml_backend_buffer_t buf = ggml_backend_alloc_ctx_tensors(ctx, backend1); + + // build graph + ggml_cgraph * gf = ggml_new_graph(ctx); + ggml_build_forward_expand(gf, out); + + // randomize tensors + initialize_tensors(ctx); + + // compare + struct callback_userdata { + bool ok; + double max_err; + }; + + callback_userdata ud { + true, + max_nmse_err(), + }; + + auto callback = [](int index, ggml_tensor * t1, ggml_tensor * t2, void * user_data) -> bool { + std::vector f1 = tensor_to_float(t1); + std::vector f2 = tensor_to_float(t2); + callback_userdata * ud = (callback_userdata *) user_data; + + for (size_t i = 0; i < f1.size(); i++) { + // check for nans + if (std::isnan(f1[i]) || std::isnan(f2[i])) { + printf("NaN at index %zu ", i); + ud->ok = false; + return true; + } + // check for infs: both must be inf of the same sign, or both must be finite + if (isinf_or_max(f1[i]) || isinf_or_max(f2[i])) { + if (isinf_or_max(f1[i]) && isinf_or_max(f2[i])) { + if (std::signbit(f1[i]) != std::signbit(f2[i])) { + printf("inf sign mismatch: %f %f ", f1[i], f2[i]); + ud->ok = false; + return true; + } + } else { + printf("inf mismatch: %f %f ", f1[i], f2[i]); + ud->ok = false; + return true; + } + } + } + + double err = nmse(f1.data(), f2.data(), f1.size()); + if (err > ud->max_err) { + printf("NMSE = %f ", err); + ud->ok = false; + } + return true; + }; + + ggml_backend_compare_graph_backend(backend1, backend2, gf, callback, &ud); + + if (ud.ok) { + printf("\033[1;32mOK\033[0m\n"); + } else { + printf("\033[1;31mFAIL\033[0m\n"); + } + + ggml_backend_buffer_free(buf); + + ggml_free(ctx); + + return ud.ok; + } + + bool eval_perf(ggml_backend_t backend, const char * op_name) { + static const size_t graph_nodes = 8192; + + ggml_init_params params = { + /* .mem_size = */ ggml_tensor_overhead()*128 + ggml_graph_overhead_custom(graph_nodes, false), + /* .mem_base = */ NULL, + /* .no_alloc = */ true, + }; + ggml_context * ctx = ggml_init(params); + + ggml_tensor * out = build_graph(ctx); + + if (op_name != nullptr && strcmp(ggml_op_desc(out), op_name) != 0) { + //printf(" %s: skipping\n", ggml_op_desc(out)); + ggml_free(ctx); + return true; + } + + int len = printf(" %s(%s): ", ggml_op_desc(out), vars().c_str()); + fflush(stdout); + + // check if backends support op + if (!ggml_backend_supports_op(backend, out)) { + printf("not supported\n"); + ggml_free(ctx); + return true; + } + + // align while also leaving some margin for variations in parameters + int align = 20; + int last = (len + align - 1) / align * align; + if (last - len < 5) { + last += align; + } + last = std::max(last, 60); + printf("%*s", last - len, ""); + + // allocate + ggml_backend_buffer_t buf = ggml_backend_alloc_ctx_tensors(ctx, backend); + + // randomize tensors + initialize_tensors(ctx); + + // build graph + ggml_cgraph * gf = ggml_new_graph_custom(ctx, graph_nodes, false); + ggml_build_forward_expand(gf, out); + + // warmup run + ggml_backend_graph_compute(backend, gf); + + // duplicate the op + size_t target_size = ggml_backend_is_cpu(backend) ? 1ULL << 33 : 1ULL << 35; // 8 GB CPU, 32 GB GPU + int n_runs = std::min((size_t)gf->size - gf->n_nodes, target_size / op_size(out)) + 1; + for (int i = 1; i < n_runs; i++) { + gf->nodes[gf->n_nodes++] = out; + } + + // calculate memory + size_t mem = n_runs * op_size(out); + auto tensor_op_size = [](ggml_tensor * t) { + size_t size = ggml_nbytes(t); + // add source tensors + for (int i = 0; i < GGML_MAX_SRC; i++) { + if (t->src[i] != NULL) { + size += ggml_nbytes(t->src[i]); + } + } + return size; + }; + for (int i = 0; i < gf->n_nodes; i++) { + if (ggml_is_view_op(gf->nodes[i]->op) || gf->nodes[i] == out) + continue; + mem += tensor_op_size(gf->nodes[i]); + } + + // run + ggml_backend_synchronize(backend); + + int64_t start_time = ggml_time_us(); + ggml_backend_graph_compute(backend, gf); + ggml_backend_synchronize(backend); + int64_t end_time = ggml_time_us(); + double time_us = end_time - start_time; + + printf(" %5d runs - %8.2f us/run - %8zu kB/run - \033[1;34m%7.2f GB/s\033[0m\n", + n_runs, + time_us / n_runs, + op_size(out) / 1024, + mem / (time_us/1e6) / 1024.0 / 1024.0 / 1024.0); + + ggml_backend_buffer_free(buf); + + ggml_free(ctx); + + return true; + } +}; + +// GGML_OP_UNARY +struct test_unary : public test_case { + const ggml_unary_op op; + const ggml_type type; + const std::array ne; + + std::string vars() override { + return VARS_TO_STR2(type, ne); + } + + test_unary(ggml_unary_op op, + ggml_type type = GGML_TYPE_F32, + std::array ne = {128, 10, 10, 10}) + : op(op), type(type), ne(ne) {} + + ggml_tensor * build_graph(ggml_context * ctx) override { + ggml_tensor * in = ggml_new_tensor(ctx, type, 4, ne.data()); + ggml_tensor * out = ggml_unary(ctx, in, op); + return out; + } +}; + +// GGML_OP_GET_ROWS +struct test_get_rows : public test_case { + const ggml_type type; + const int n; // cols + const int m; // rows + const int r; // rows to get + + std::string vars() override { + return VARS_TO_STR4(type, n, m, r); + } + + test_get_rows(ggml_type type = GGML_TYPE_F32, int n = 10, int m = 5, int r = 3) + : type(type), n(n), m(m), r(r) {} + + ggml_tensor * build_graph(ggml_context * ctx) override { + ggml_tensor * in = ggml_new_tensor_2d(ctx, type, n, m); + ggml_tensor * rows = ggml_new_tensor_1d(ctx, GGML_TYPE_I32, r); + ggml_tensor * out = ggml_get_rows(ctx, in, rows); + return out; + } + + void initialize_tensors(ggml_context * ctx) override { + for (ggml_tensor * t = ggml_get_first_tensor(ctx); t != NULL; t = ggml_get_next_tensor(ctx, t)) { + if (t->type == GGML_TYPE_I32) { + // rows + std::vector data(r); + for (int i = 0; i < r; i++) { + data[i] = rand() % m; + } + ggml_backend_tensor_set(t, data.data(), 0, r * sizeof(int)); + } else { + init_tensor_uniform(t); + } + } + } +}; + +// GGML_OP_REPEAT +struct test_repeat : public test_case { + const ggml_type type; + const std::array ne; + const std::array nr; + + std::string vars() override { + return VARS_TO_STR3(type, ne, nr); + } + + size_t op_size(ggml_tensor * t) override { + return ggml_nbytes(t) * 2; + } + + test_repeat(ggml_type type = GGML_TYPE_F32, + std::array ne = {10, 10, 10, 10}, + std::array nr = {2, 2, 2, 2}) + : type(type), ne(ne), nr(nr) {} + + ggml_tensor * build_graph(ggml_context * ctx) override { + ggml_tensor * target = ggml_new_tensor_4d(ctx, type, ne[0]*nr[0], ne[1]*nr[1], ne[2]*nr[2], ne[3]*nr[3]); + ggml_tensor * src = ggml_new_tensor(ctx, type, 4, ne.data()); + ggml_tensor * out = ggml_repeat(ctx, src, target); + return out; + } +}; + +// GGML_OP_DUP +struct test_dup : public test_case { + const ggml_type type; + const std::array ne; + + std::string vars() override { + return VARS_TO_STR2(type, ne); + } + + test_dup(ggml_type type = GGML_TYPE_F32, + std::array ne = {10, 10, 10, 1}) + : type(type), ne(ne) {} + + ggml_tensor * build_graph(ggml_context * ctx) override { + ggml_tensor * src = ggml_new_tensor(ctx, type, 4, ne.data()); + ggml_tensor * out = ggml_dup(ctx, src); + return out; + } +}; + +// GGML_OP_CPY +struct test_cpy : public test_case { + const ggml_type type_src; + const ggml_type type_dst; + const std::array ne; + + std::string vars() override { + return VARS_TO_STR3(type_src, type_dst, ne); + } + + size_t op_size(ggml_tensor * t) override { + return ggml_nbytes(t) + ggml_nbytes(t->src[0]); + } + + test_cpy(ggml_type type_src = GGML_TYPE_F32, ggml_type type_dst = GGML_TYPE_F32, + std::array ne = {10, 10, 10, 1}) + : type_src(type_src), type_dst(type_dst), ne(ne) {} + + ggml_tensor * build_graph(ggml_context * ctx) override { + ggml_tensor * src = ggml_new_tensor(ctx, type_src, 4, ne.data()); + ggml_tensor * dst = ggml_new_tensor(ctx, type_dst, 4, ne.data()); + ggml_tensor * out = ggml_cpy(ctx, src, dst); + return out; + } +}; + +// GGML_OP_CONT +struct test_cont : public test_case { + const ggml_type type; + const std::array ne; + + std::string vars() override { + return VARS_TO_STR2(type, ne); + } + + test_cont(ggml_type type = GGML_TYPE_F32, + std::array ne = {10, 10, 10, 1}) + : type(type), ne(ne) {} + + ggml_tensor * build_graph(ggml_context * ctx) override { + ggml_tensor * src = ggml_new_tensor(ctx, type, 4, ne.data()); + src = ggml_transpose(ctx, src); + ggml_tensor * out = ggml_cont(ctx, src); + + return out; + } +}; + +// GGML_OP_ADD +// GGML_OP_MUL +// GGML_OP_DIV +struct test_bin_bcast : public test_case { + using op_t = ggml_tensor * (*) (ggml_context *, ggml_tensor *, ggml_tensor *); + op_t op; + const ggml_type type; + const std::array ne; + const std::array nr; + + std::string vars() override { + return VARS_TO_STR3(type, ne, nr); + } + + size_t op_size(ggml_tensor * t) override { + return ggml_nbytes(t) * 3; + } + + test_bin_bcast(op_t op, ggml_type type = GGML_TYPE_F32, + std::array ne = {10, 10, 1, 1}, + std::array nr = {1, 2, 1, 1}) + : op(op), type(type), ne(ne), nr(nr) {} + + ggml_tensor * build_graph(ggml_context * ctx) override { + ggml_tensor * a = ggml_new_tensor_4d(ctx, type, ne[0]*nr[0], ne[1]*nr[1], ne[2]*nr[2], ne[3]*nr[3]); + ggml_tensor * b = ggml_new_tensor(ctx, type, 4, ne.data()); + ggml_tensor * out = op(ctx, a, b); + return out; + } + + void initialize_tensors(ggml_context * ctx) override { + for (ggml_tensor * t = ggml_get_first_tensor(ctx); t != NULL; t = ggml_get_next_tensor(ctx, t)) { + if (op == ggml_div) { + // avoid division by zero + init_tensor_uniform(t, 1.0f, 2.0f); + } else { + init_tensor_uniform(t); + } + } + } +}; + +// GGML_OP_SCALE +struct test_scale : public test_case { + const ggml_type type; + const std::array ne; + + std::string vars() override { + return VARS_TO_STR2(type, ne); + } + + test_scale(ggml_type type = GGML_TYPE_F32, + std::array ne = {10, 10, 10, 10}) + : type(type), ne(ne) {} + + ggml_tensor * build_graph(ggml_context * ctx) override { + ggml_tensor * a = ggml_new_tensor(ctx, type, 4, ne.data()); + ggml_tensor * scale = ggml_new_tensor_1d(ctx, type, 1); + ggml_tensor * out = ggml_scale(ctx, a, scale); + return out; + } +}; + +// GGML_OP_NORM +struct test_norm : public test_case { + const ggml_type type; + const std::array ne; + float eps; + + std::string vars() override { + return VARS_TO_STR3(type, ne, eps); + } + + test_norm(ggml_type type = GGML_TYPE_F32, + std::array ne = {64, 10, 10, 10}, + float eps = 1e-6f) + : type(type), ne(ne), eps(eps) {} + + ggml_tensor * build_graph(ggml_context * ctx) override { + ggml_tensor * a = ggml_new_tensor(ctx, type, 4, ne.data()); + ggml_tensor * out = ggml_norm(ctx, a, eps); + return out; + } +}; + +// GGML_OP_RMS_NORM +struct test_rms_norm : public test_case { + const ggml_type type; + const std::array ne; + float eps; + + std::string vars() override { + return VARS_TO_STR3(type, ne, eps); + } + + test_rms_norm(ggml_type type = GGML_TYPE_F32, + std::array ne = {64, 10, 10, 10}, + float eps = 1e-6f) + : type(type), ne(ne), eps(eps) {} + + ggml_tensor * build_graph(ggml_context * ctx) override { + ggml_tensor * a = ggml_new_tensor(ctx, type, 4, ne.data()); + ggml_tensor * out = ggml_rms_norm(ctx, a, eps); + return out; + } +}; + +// GGML_OP_MUL_MAT +struct test_mul_mat : public test_case { + const ggml_type type_a; + const ggml_type type_b; + const int64_t m; + const int64_t n; + const int64_t k; + const std::array bs; // dims 3 and 4 + const std::array nr; // repeat in dims 3 and 4 + + std::string vars() override { + return VARS_TO_STR7(type_a, type_b, m, n, k, bs, nr); + } + + double max_nmse_err() override { + return 5e-4; + } + + size_t op_size(ggml_tensor * t) override { + size_t a = ggml_nbytes(t->src[0]) * n * nr[0] * nr[1]; + size_t b = ggml_nbytes(t->src[1]) * m; + size_t c = ggml_nbytes(t); + return a + b + c; + + GGML_UNUSED(t); + } + + test_mul_mat(ggml_type type_a = GGML_TYPE_F32, ggml_type type_b = GGML_TYPE_F32, + int64_t m = 32, int64_t n = 32, int64_t k = 32, + std::array bs = {10, 10}, + std::array nr = {2, 2}) + : type_a(type_a), type_b(type_b), m(m), n(n), k(k), bs(bs), nr(nr) {} + + ggml_tensor * build_graph(ggml_context * ctx) override { + // C^T = A * B^T: (k, m) * (k, n) => (m, n) + ggml_tensor * a = ggml_new_tensor_4d(ctx, type_a, k, m, bs[0] , bs[1]); + ggml_tensor * b = ggml_new_tensor_4d(ctx, type_b, k, n, bs[0]*nr[0], bs[1]*nr[1]); + ggml_tensor * out = ggml_mul_mat(ctx, a, b); + return out; + } +}; + +// GGML_OP_MUL_MAT_ID +struct test_mul_mat_id : public test_case { + const ggml_type type_a; + const ggml_type type_b; + const int n_mats; + const int id; + const int64_t m; + const int64_t n; + const int64_t k; + const std::array bs; // dims 3 and 4 + const std::array nr; // repeat in dims 3 and 4 + + std::string vars() override { + return VARS_TO_STR9(type_a, type_b, n_mats, id, m, n, k, bs, nr); + } + + double max_nmse_err() override { + return 5e-4; + } + + size_t op_size(ggml_tensor * t) override { + size_t a = ggml_nbytes(t->src[2]) * n * nr[0] * nr[1]; + size_t b = ggml_nbytes(t->src[1]) * m; + size_t c = ggml_nbytes(t); + return a + b + c; + + GGML_UNUSED(t); + } + + test_mul_mat_id(ggml_type type_a = GGML_TYPE_F32, ggml_type type_b = GGML_TYPE_F32, + int n_mats = 2, int id = 0, + int64_t m = 32, int64_t n = 32, int64_t k = 32, + std::array bs = {10, 10}, + std::array nr = {2, 2}) + : type_a(type_a), type_b(type_b), n_mats(n_mats), id(id), + m(m), n(n), k(k), bs(bs), nr(nr) {} + + ggml_tensor * build_graph(ggml_context * ctx) override { + // C^T = A * B^T: (k, m) * (k, n) => (m, n) + std::vector mats; + for (int i = 0; i < n_mats; i++) { + ggml_tensor * a = ggml_new_tensor_4d(ctx, type_a, k, m, bs[0], bs[1]); + mats.push_back(a); + } + ggml_tensor * ids = ggml_new_tensor_1d(ctx, GGML_TYPE_I32, n_mats); + ggml_tensor * b = ggml_new_tensor_4d(ctx, type_b, k, n, bs[0]*nr[0], bs[1]*nr[1]); + ggml_tensor * out = ggml_mul_mat_id(ctx, mats.data(), ids, id, b); + return out; + } + + void initialize_tensors(ggml_context * ctx) override { + for (ggml_tensor * t = ggml_get_first_tensor(ctx); t != NULL; t = ggml_get_next_tensor(ctx, t)) { + if (t->type == GGML_TYPE_I32) { + // ids + std::vector data(n_mats); + for (int i = 0; i < n_mats; i++) { + data[i] = i; + } + std::shuffle(data.begin(), data.end(), std::default_random_engine(std::random_device()())); + ggml_backend_tensor_set(t, data.data(), 0, n_mats * sizeof(int)); + } else { + init_tensor_uniform(t); + } + } + } +}; + +// GGML_OP_SQR +struct test_sqr : public test_case { + const ggml_type type; + const std::array ne; + + std::string vars() override { + return VARS_TO_STR2(type, ne); + } + + test_sqr(ggml_type type = GGML_TYPE_F32, + std::array ne = {10, 10, 10, 10}) + : type(type), ne(ne) {} + + ggml_tensor * build_graph(ggml_context * ctx) override { + ggml_tensor * a = ggml_new_tensor(ctx, type, 4, ne.data()); + ggml_tensor * out = ggml_sqr(ctx, a); + return out; + } +}; + +// GGML_OP_CLAMP +struct test_clamp : public test_case { + const ggml_type type; + const std::array ne; + float min; + float max; + + std::string vars() override { + return VARS_TO_STR4(type, ne, min, max); + } + + test_clamp(ggml_type type = GGML_TYPE_F32, + std::array ne = {10, 10, 10, 10}, + float min = -0.5f, float max = 0.5f) + : type(type), ne(ne), min(min), max(max) {} + + ggml_tensor * build_graph(ggml_context * ctx) override { + ggml_tensor * a = ggml_new_tensor(ctx, type, 4, ne.data()); + ggml_tensor * out = ggml_clamp(ctx, a, min, max); + return out; + } +}; + +// GGML_OP_DIAG_MASK_INF +struct test_diag_mask_inf : public test_case { + const ggml_type type; + const std::array ne; + const int n_past; + + std::string vars() override { + return VARS_TO_STR3(type, ne, n_past); + } + + test_diag_mask_inf(ggml_type type = GGML_TYPE_F32, + std::array ne = {10, 10, 10, 10}, + int n_past = 5) + : type(type), ne(ne), n_past(n_past) {} + + ggml_tensor * build_graph(ggml_context * ctx) override { + ggml_tensor * a = ggml_new_tensor(ctx, type, 4, ne.data()); + ggml_tensor * out = ggml_diag_mask_inf(ctx, a, n_past); + return out; + } +}; + +// GGML_OP_SOFT_MAX +struct test_soft_max : public test_case { + const ggml_type type; + const std::array ne; + + std::string vars() override { + return VARS_TO_STR2(type, ne); + } + + test_soft_max(ggml_type type = GGML_TYPE_F32, + std::array ne = {10, 10, 10, 10}) + : type(type), ne(ne) {} + + ggml_tensor * build_graph(ggml_context * ctx) override { + ggml_tensor * a = ggml_new_tensor(ctx, type, 4, ne.data()); + ggml_tensor * out = ggml_soft_max(ctx, a); + return out; + } +}; + +// GGML_OP_ROPE +struct test_rope : public test_case { + const ggml_type type; + const std::array ne; + int n_dims; + int mode; + int n_ctx; + + std::string vars() override { + return VARS_TO_STR5(type, ne, n_dims, mode, n_ctx); + } + + test_rope(ggml_type type = GGML_TYPE_F32, + std::array ne = {10, 10, 10, 1}, + int n_dims = 10, int mode = 0, int n_ctx = 512) + : type(type), ne(ne), n_dims(n_dims), mode(mode), n_ctx(n_ctx) {} + + ggml_tensor * build_graph(ggml_context * ctx) override { + ggml_tensor * a = ggml_new_tensor(ctx, type, 4, ne.data()); + ggml_tensor * pos = ggml_new_tensor_1d(ctx, GGML_TYPE_I32, ne[2]); + ggml_tensor * out = ggml_rope(ctx, a, pos, n_dims, mode, n_ctx); + return out; + } + + void initialize_tensors(ggml_context * ctx) override { + for (ggml_tensor * t = ggml_get_first_tensor(ctx); t != NULL; t = ggml_get_next_tensor(ctx, t)) { + if (t->type == GGML_TYPE_I32) { + // pos + std::vector data(ne[2]); + for (int i = 0; i < ne[2]; i++) { + data[i] = rand() % n_ctx; + } + ggml_backend_tensor_set(t, data.data(), 0, ne[2] * sizeof(int)); + } else { + init_tensor_uniform(t); + } + } + } +}; + +// GGML_OP_ALIBI +struct test_alibi : public test_case { + const ggml_type type; + const std::array ne; + int n_past; + int n_head; + float bias_max; + + std::string vars() override { + return VARS_TO_STR5(type, ne, n_past, n_head, bias_max); + } + + test_alibi(ggml_type type = GGML_TYPE_F32, + std::array ne = {10, 10, 10, 10}, + int n_past = 512, int n_head = 10, float bias_max = 0.5f) + : type(type), ne(ne), n_past(n_past), n_head(n_head), bias_max(bias_max) {} + + ggml_tensor * build_graph(ggml_context * ctx) override { + ggml_tensor * a = ggml_new_tensor(ctx, type, 4, ne.data()); + ggml_tensor * out = ggml_alibi(ctx, a, n_past, n_head, bias_max); + return out; + } +}; + +// GGML_OP_IM2COL +struct test_im2col : public test_case { + const ggml_type type_input; + const ggml_type type_kernel; + const std::array ne_input; + const std::array ne_kernel; + // stride + const int s0; + const int s1; + // padding + const int p0; + const int p1; + // dilatation + const int d0; + const int d1; + // mode + const bool is_2D; + + std::string vars() override { + return VARS_TO_STR11(type_input, type_kernel, ne_input, ne_kernel, s0, s1, p0, p1, d0, d1, is_2D); + } + + test_im2col(ggml_type type_input = GGML_TYPE_F32, ggml_type type_kernel = GGML_TYPE_F16, + std::array ne_input = {10, 10, 3, 1}, // [input_width, input_height, input_channels, 1] + std::array ne_kernel = {3, 3, 3, 1}, // [kernel_width, kernel_height, input_channels, 1] + int s0 = 1, int s1 = 1, + int p0 = 1, int p1 = 1, + int d0 = 1, int d1 = 1, + bool is_2D = true) + : type_input(type_input), type_kernel(type_kernel), ne_input(ne_input), ne_kernel(ne_kernel), s0(s0), s1(s1), p0(p0), p1(p1), d0(d0), d1(d1), is_2D(is_2D) {} + + ggml_tensor * build_graph(ggml_context * ctx) override { + ggml_tensor * input = ggml_new_tensor(ctx, type_input, 4, ne_input.data()); + ggml_tensor * kernel = ggml_new_tensor(ctx, type_kernel, 4, ne_kernel.data()); + ggml_tensor * out = ggml_im2col(ctx, kernel, input, s0, s1, p0, p1, d0, d1, is_2D); + return out; + } +}; + +// GGML_OP_CONCAT +struct test_concat : public test_case { + const ggml_type type; + const std::array ne; + const int64_t b_ne2; + + std::string vars() override { + return VARS_TO_STR3(type, ne, b_ne2); + } + + test_concat(ggml_type type = GGML_TYPE_F32, + std::array ne = {10, 10, 10, 10}, + int64_t b_ne2 = 10) + : type(type), ne(ne), b_ne2(b_ne2) {} + + ggml_tensor * build_graph(ggml_context * ctx) override { + ggml_tensor * a = ggml_new_tensor(ctx, type, 4, ne.data()); + ggml_tensor * b = ggml_new_tensor_4d(ctx, type, ne[0], ne[1], b_ne2, ne[3]); + ggml_tensor * out = ggml_concat(ctx, a, b); + return out; + } +}; + +// GGML_OP_ARGSORT +struct test_argsort : public test_case { + const ggml_type type; + const std::array ne; + ggml_sort_order order; + + std::string vars() override { + return VARS_TO_STR3(type, ne, order); + } + + test_argsort(ggml_type type = GGML_TYPE_F32, + std::array ne = {16, 10, 10, 10}, + ggml_sort_order order = GGML_SORT_ASC) + : type(type), ne(ne), order(order) {} + + ggml_tensor * build_graph(ggml_context * ctx) override { + ggml_tensor * a = ggml_new_tensor(ctx, type, 4, ne.data()); + ggml_tensor * out = ggml_argsort(ctx, a, order); + return out; + } + + void initialize_tensors(ggml_context * ctx) override { + std::random_device rd; + std::default_random_engine rng(rd()); + for (ggml_tensor * t = ggml_get_first_tensor(ctx); t != NULL; t = ggml_get_next_tensor(ctx, t)) { + if (t->type == GGML_TYPE_I32) { + // indices + std::vector data(ggml_nelements(t)); + for (int i = 0; i < ggml_nelements(t); i++) { + data[i] = rand(); + } + std::shuffle(data.begin(), data.end(), rng); + ggml_backend_tensor_set(t, data.data(), 0, ne[0]*ne[1]*ne[2]*ne[3] * sizeof(int)); + } else if (t->type == GGML_TYPE_F32) { + // initialize with unique values to avoid ties + for (int64_t r = 0; r < ggml_nrows(t); r++) { + std::vector data(t->ne[0]); + for (int i = 0; i < t->ne[0]; i++) { + data[i] = i; + } + std::shuffle(data.begin(), data.end(), rng); + ggml_backend_tensor_set(t, data.data(), r * t->nb[1], t->ne[0] * sizeof(float)); + } + } else { + GGML_ASSERT(false); + } + } + } +}; + +// GGML_OP_SUM_ROWS +struct test_sum_rows : public test_case { + const ggml_type type; + const std::array ne; + + std::string vars() override { + return VARS_TO_STR2(type, ne); + } + + test_sum_rows(ggml_type type = GGML_TYPE_F32, + std::array ne = {10, 10, 10, 10}) + : type(type), ne(ne) {} + + ggml_tensor * build_graph(ggml_context * ctx) override { + ggml_tensor * a = ggml_new_tensor(ctx, type, 4, ne.data()); + ggml_tensor * out = ggml_sum_rows(ctx, a); + return out; + } +}; + +enum test_mode { + MODE_TEST, + MODE_PERF, +}; + +static bool test_backend(ggml_backend_t backend, test_mode mode, const char * op_name) { + std::vector> test_cases; + + // unary ops + for (int op = 0; op < GGML_UNARY_OP_COUNT; op++) { + test_cases.emplace_back(new test_unary((ggml_unary_op) op)); + } + + for (ggml_type type : {GGML_TYPE_F32, GGML_TYPE_F16}) { + test_cases.emplace_back(new test_get_rows(type, 10, 5, 3)); + test_cases.emplace_back(new test_get_rows(type, 16, 5, 3)); + } + + test_cases.emplace_back(new test_repeat(GGML_TYPE_F32, {10, 10, 10, 10}, {1, 1, 1, 1})); + test_cases.emplace_back(new test_repeat(GGML_TYPE_F32, {10, 10, 10, 10}, {2, 1, 1, 1})); + test_cases.emplace_back(new test_repeat(GGML_TYPE_F32, {10, 10, 10, 10}, {1, 2, 1, 1})); + test_cases.emplace_back(new test_repeat(GGML_TYPE_F32, {10, 10, 10, 10}, {1, 1, 2, 1})); + test_cases.emplace_back(new test_repeat(GGML_TYPE_F32, {10, 10, 10, 10}, {1, 1, 1, 2})); + + test_cases.emplace_back(new test_dup()); + test_cases.emplace_back(new test_cpy()); + test_cases.emplace_back(new test_cont()); + + auto add_test_bin_bcast = [&](ggml_type type, std::array ne, std::array nr) { + for (auto op : {ggml_add, ggml_mul, ggml_div}) { + test_cases.emplace_back(new test_bin_bcast(op, type, ne, nr)); + } + }; + + add_test_bin_bcast(GGML_TYPE_F32, {1, 1, 8, 1}, {1, 1, 1, 1}); + add_test_bin_bcast(GGML_TYPE_F32, {1, 1, 320, 320}, {1, 1, 1, 1}); + add_test_bin_bcast(GGML_TYPE_F32, {16, 10, 1, 1}, {1, 1, 1, 1}); + add_test_bin_bcast(GGML_TYPE_F32, {16, 10, 10, 1}, {1, 1, 1, 1}); + add_test_bin_bcast(GGML_TYPE_F32, {16, 10, 10, 10}, {1, 1, 1, 1}); + add_test_bin_bcast(GGML_TYPE_F32, {16, 10, 10, 10}, {2, 1, 1, 1}); + add_test_bin_bcast(GGML_TYPE_F32, {16, 10, 10, 10}, {1, 2, 1, 1}); + add_test_bin_bcast(GGML_TYPE_F32, {16, 10, 10, 10}, {1, 1, 2, 1}); + add_test_bin_bcast(GGML_TYPE_F32, {16, 10, 10, 10}, {1, 1, 1, 2}); + add_test_bin_bcast(GGML_TYPE_F32, {16, 10, 10, 10}, {1, 1, 2, 2}); + add_test_bin_bcast(GGML_TYPE_F32, {16, 10, 10, 10}, {1, 2, 2, 2}); + add_test_bin_bcast(GGML_TYPE_F32, {16, 10, 10, 10}, {2, 2, 2, 2}); + + // stable diffusion + add_test_bin_bcast(GGML_TYPE_F32, {1280, 1, 1, 1}, {1, 1, 1, 1}); + add_test_bin_bcast(GGML_TYPE_F32, {1280, 1, 1, 1}, {1, 16, 16, 1}); + add_test_bin_bcast(GGML_TYPE_F32, {1280, 16, 16, 1}, {1, 1, 1, 1}); + add_test_bin_bcast(GGML_TYPE_F32, {1280, 1, 1, 1}, {1, 256, 1, 1}); + add_test_bin_bcast(GGML_TYPE_F32, {1, 1, 1280, 1}, {16, 16, 1, 1}); + add_test_bin_bcast(GGML_TYPE_F32, {16, 16, 1280, 1}, {1, 1, 1, 1}); + add_test_bin_bcast(GGML_TYPE_F32, {1, 1, 1920, 1}, {16, 16, 1, 1}); + add_test_bin_bcast(GGML_TYPE_F32, {1, 1, 2560, 1}, {16, 16, 1, 1}); + add_test_bin_bcast(GGML_TYPE_F32, {1, 1, 1280, 1}, {32, 32, 1, 1}); + add_test_bin_bcast(GGML_TYPE_F32, {1, 1, 1920, 1}, {32, 32, 1, 1}); + add_test_bin_bcast(GGML_TYPE_F32, {1, 1, 640, 1}, {32, 32, 1, 1}); + add_test_bin_bcast(GGML_TYPE_F32, {5120, 1, 1, 1}, {1, 256, 1, 1}); + add_test_bin_bcast(GGML_TYPE_F32, {640, 1, 1, 1}, {1, 1, 1, 1}); + add_test_bin_bcast(GGML_TYPE_F32, {3, 3, 2560, 1280}, {1, 1, 1, 1}); + add_test_bin_bcast(GGML_TYPE_F32, {3, 3, 2560, 1280}, {2, 1, 1, 1}); + + test_cases.emplace_back(new test_scale()); + + for (float eps : {1e-6f, 1e-5f, 1e-3f, 1e-1f}) { + test_cases.emplace_back(new test_norm(GGML_TYPE_F32, {64, 10, 10, 10}, eps)); + test_cases.emplace_back(new test_rms_norm(GGML_TYPE_F32, {64, 10, 10, 10}, eps)); + } + + const ggml_type all_types[] = { + GGML_TYPE_F32, GGML_TYPE_F16, + GGML_TYPE_Q4_0, GGML_TYPE_Q4_1, + GGML_TYPE_Q5_0, GGML_TYPE_Q5_1, + GGML_TYPE_Q8_0, + GGML_TYPE_Q2_K, GGML_TYPE_Q3_K, + GGML_TYPE_Q4_K, GGML_TYPE_Q5_K, + GGML_TYPE_Q6_K + }; + + for (ggml_type type_a : all_types) { + for (ggml_type type_b : {GGML_TYPE_F32 /*, GGML_TYPE_F16 */}) { + // FIXME: CPU crashes on f16xf16 + test_cases.emplace_back(new test_mul_mat(type_a, type_b, 16, 1, 256, { 1, 1}, {1, 1})); + test_cases.emplace_back(new test_mul_mat(type_a, type_b, 16, 1, 256, {10, 1}, {1, 1})); + test_cases.emplace_back(new test_mul_mat(type_a, type_b, 16, 1, 256, {10, 1}, {2, 1})); + test_cases.emplace_back(new test_mul_mat(type_a, type_b, 16, 1, 256, {10, 10}, {1, 1})); + test_cases.emplace_back(new test_mul_mat(type_a, type_b, 16, 1, 256, {10, 10}, {2, 1})); + test_cases.emplace_back(new test_mul_mat(type_a, type_b, 16, 1, 256, {10, 10}, {1, 2})); + test_cases.emplace_back(new test_mul_mat(type_a, type_b, 16, 1, 256, {10, 10}, {2, 2})); + + test_cases.emplace_back(new test_mul_mat(type_a, type_b, 16, 16, 256, { 1, 1}, {1, 1})); + test_cases.emplace_back(new test_mul_mat(type_a, type_b, 16, 16, 256, {10, 1}, {1, 1})); + test_cases.emplace_back(new test_mul_mat(type_a, type_b, 16, 16, 256, {10, 1}, {2, 1})); + test_cases.emplace_back(new test_mul_mat(type_a, type_b, 16, 16, 256, {10, 10}, {1, 1})); + test_cases.emplace_back(new test_mul_mat(type_a, type_b, 16, 16, 256, {10, 10}, {2, 1})); + test_cases.emplace_back(new test_mul_mat(type_a, type_b, 16, 16, 256, {10, 10}, {1, 2})); + test_cases.emplace_back(new test_mul_mat(type_a, type_b, 16, 16, 256, {10, 10}, {2, 2})); + } + } + + for (ggml_type type_a : all_types) { + for (ggml_type type_b : {GGML_TYPE_F32 /*, GGML_TYPE_F16 */}) { + for (int n_mats : {1, 2, 4}) { + for (int id = 0; id < n_mats; id++) { + test_cases.emplace_back(new test_mul_mat_id(type_a, type_b, n_mats, id, 16, 16, 256, {1, 1}, {1, 1})); + } + } + } + } + + test_cases.emplace_back(new test_sqr()); + test_cases.emplace_back(new test_clamp()); + + test_cases.emplace_back(new test_diag_mask_inf(GGML_TYPE_F32, {10, 10, 1, 1}, 5)); + test_cases.emplace_back(new test_diag_mask_inf(GGML_TYPE_F32, {10, 10, 10, 1}, 5)); + test_cases.emplace_back(new test_diag_mask_inf(GGML_TYPE_F32, {10, 10, 10, 10}, 5)); + + test_cases.emplace_back(new test_soft_max()); + + for (ggml_type type : {GGML_TYPE_F32, GGML_TYPE_F16}) { + test_cases.emplace_back(new test_rope(type, {128, 32, 10, 1}, 128, 0, 512)); // llama 7B + test_cases.emplace_back(new test_rope(type, {128, 40, 10, 1}, 128, 0, 512)); // llama 13B + test_cases.emplace_back(new test_rope(type, {128, 52, 10, 1}, 128, 0, 512)); // llama 30B + test_cases.emplace_back(new test_rope(type, {128, 64, 10, 1}, 128, 0, 512)); // llama 65B + test_cases.emplace_back(new test_rope(type, { 64, 1, 10, 1}, 64, 2, 512)); // neox (falcon 7B) + test_cases.emplace_back(new test_rope(type, { 64, 71, 10, 1}, 64, 2, 512)); // neox (falcon 7B) + test_cases.emplace_back(new test_rope(type, { 64, 8, 10, 1}, 64, 2, 512)); // neox (falcon 40B) + test_cases.emplace_back(new test_rope(type, { 64, 128, 10, 1}, 64, 2, 512)); // neox (falcon 40B) + test_cases.emplace_back(new test_rope(type, { 80, 32, 10, 1}, 20, 2, 512)); // neox (stablelm) + } + + test_cases.emplace_back(new test_alibi()); + test_cases.emplace_back(new test_im2col()); + test_cases.emplace_back(new test_concat()); + + for (ggml_sort_order order : {GGML_SORT_ASC, GGML_SORT_DESC}) { + test_cases.emplace_back(new test_argsort(GGML_TYPE_F32, {16, 10, 10, 10}, order)); + } + + test_cases.emplace_back(new test_sum_rows()); + + // run tests + if (mode == MODE_TEST) { + ggml_backend_t backend_cpu = ggml_backend_cpu_init(); + + size_t n_ok = 0; + for (auto & test : test_cases) { + if (test->eval(backend, backend_cpu, op_name)) { + n_ok++; + } + } + printf(" %zu/%zu tests passed\n", n_ok, test_cases.size()); + + ggml_backend_free(backend_cpu); + + return n_ok == test_cases.size(); + } else if (mode == MODE_PERF) { + for (auto & test : test_cases) { + test->eval_perf(backend, op_name); + } + return true; + } else { + GGML_ASSERT(false); + } +} + +static void usage(char ** argv) { + printf("Usage: %s [mode] [-o op] [-b backend]\n", argv[0]); + printf(" valid modes are: test (compare with CPU backend for correctness) or perf (performance evaluation)\n"); + printf(" op names are as given by ggml_op_desc()\n"); +} + +int main(int argc, char ** argv) { + test_mode mode = MODE_TEST; + const char * op_name = NULL; + const char * backend = NULL; + + for (int i = 1; i < argc; i++) { + if (strcmp(argv[i], "test") == 0) { + mode = MODE_TEST; + } else if (strcmp(argv[i], "perf") == 0) { + mode = MODE_PERF; + } else if (strcmp(argv[i], "-o") == 0) { + if (i + 1 < argc) { + op_name = argv[++i]; + } else { + usage(argv); + return 1; + } + } else if (strcmp(argv[i], "-b") == 0) { + if (i + 1 < argc) { + backend = argv[++i]; + } else { + usage(argv); + return 1; + } + } else { + usage(argv); + return 1; + } + } + + // enumerate backends + printf("Testing %zu backends\n\n", ggml_backend_reg_get_count()); + + size_t n_ok = 0; + + for (size_t i = 0; i < ggml_backend_reg_get_count(); i++) { + printf("Backend %zu/%zu (%s)\n", i + 1, ggml_backend_reg_get_count(), ggml_backend_reg_get_name(i)); + + if (backend != NULL && strcmp(backend, ggml_backend_reg_get_name(i)) != 0) { + printf(" Skipping\n"); + n_ok++; + continue; + } + + ggml_backend_t backend = ggml_backend_reg_init_backend(i, NULL); + GGML_ASSERT(backend != NULL); + printf(" Backend name: %s\n", ggml_backend_name(backend)); + + bool ok = test_backend(backend, mode, op_name); + + printf(" Backend %s: ", ggml_backend_name(backend)); + if (ok) { + printf("\033[1;32mOK\033[0m\n"); + n_ok++; + } else { + printf("\033[1;31mFAIL\033[0m\n"); + } + + printf("\n"); + + ggml_backend_free(backend); + } + + printf("%zu/%zu backends passed\n", n_ok, ggml_backend_reg_get_count()); + if (n_ok != ggml_backend_reg_get_count()) { + printf("\033[1;31mFAIL\033[0m\n"); + return 1; + } else { + printf("\033[1;32mOK\033[0m\n"); + return 0; + } +} From e18f7345a300920e234f732077bda660cc6cda9c Mon Sep 17 00:00:00 2001 From: "Xiang (Kevin) Li" Date: Sat, 9 Dec 2023 16:29:27 -0500 Subject: [PATCH 066/811] grammar : revert the replacement of llama_token_to_piece with id_to_token (#4396) --- llama.cpp | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/llama.cpp b/llama.cpp index b12bbd1b0..93d8f3e16 100644 --- a/llama.cpp +++ b/llama.cpp @@ -7503,7 +7503,7 @@ void llama_sample_grammar(struct llama_context * ctx, llama_token_data_array * c for (size_t i = 0; i < candidates->size; ++i) { const llama_token id = candidates->data[i].id; - const std::string & piece = ctx->model.vocab.id_to_token[id].text; + const std::string piece = llama_token_to_piece(ctx, id); if (id == eos) { if (!allow_eos) { candidates->data[i].logit = -INFINITY; @@ -7715,7 +7715,7 @@ void llama_grammar_accept_token(struct llama_context * ctx, struct llama_grammar GGML_ASSERT(false); } - const std::string & piece = ctx->model.vocab.id_to_token[token].text; + const std::string piece = llama_token_to_piece(ctx, token); // Note terminating 0 in decoded string const auto decoded = decode_utf8(piece, grammar->partial_utf8); From 8a7b2fa528f130631a5f43648481596ab320ed5a Mon Sep 17 00:00:00 2001 From: Yueh-Po Peng <94939112+y10ab1@users.noreply.github.com> Date: Mon, 11 Dec 2023 06:27:38 +0800 Subject: [PATCH 067/811] Update README.md (#4388) Fix small typo. --- examples/server/README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/examples/server/README.md b/examples/server/README.md index cfc220f58..0751b9612 100644 --- a/examples/server/README.md +++ b/examples/server/README.md @@ -222,7 +222,7 @@ node index.js `content`: Set the text to process. - **POST** `/infill`: For code infilling. Takes a prefix and a suffix and returns the predicted completion as stream. +- **POST** `/infill`: For code infilling. Takes a prefix and a suffix and returns the predicted completion as stream. *Options:* From 41a11aaf99feff4901e4c8dc48ad00766c5da4e9 Mon Sep 17 00:00:00 2001 From: Taikono-Himazin Date: Tue, 12 Dec 2023 18:24:32 +0900 Subject: [PATCH 068/811] ggml : increased GGML_MAX_PARAMS to allow finetuning of 70b models (#4424) --- ggml.h | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/ggml.h b/ggml.h index a8f10cbd5..41a075e92 100644 --- a/ggml.h +++ b/ggml.h @@ -215,7 +215,7 @@ #define GGML_QNT_VERSION_FACTOR 1000 // do not change this #define GGML_MAX_DIMS 4 -#define GGML_MAX_PARAMS 1024 +#define GGML_MAX_PARAMS 2048 #define GGML_MAX_CONTEXTS 64 #define GGML_MAX_SRC 6 #define GGML_MAX_NAME 64 From d9d4cfef64ea416dd66632173787d03ffb180cc7 Mon Sep 17 00:00:00 2001 From: Vladimir Zorin Date: Tue, 12 Dec 2023 11:25:29 +0200 Subject: [PATCH 069/811] server : fix local model name in server (#4420) --- examples/server/server.cpp | 1 + 1 file changed, 1 insertion(+) diff --git a/examples/server/server.cpp b/examples/server/server.cpp index 895f751c9..d0cd8e1cd 100644 --- a/examples/server/server.cpp +++ b/examples/server/server.cpp @@ -2382,6 +2382,7 @@ json oaicompat_completion_params_parse( llama_params["__oaicompat"] = true; // Map OpenAI parameters to llama.cpp parameters + llama_params["model"] = json_value(body, "model", std::string("uknown")); llama_params["prompt"] = format_chatml(body["messages"]); // OpenAI 'messages' to llama.cpp 'prompt' llama_params["cache_prompt"] = json_value(body, "cache_prompt", false); llama_params["temperature"] = json_value(body, "temperature", 0.8); From 6391817cd19a4507c6c941a1fd08756268662b2d Mon Sep 17 00:00:00 2001 From: crasm Date: Tue, 12 Dec 2023 04:25:57 -0500 Subject: [PATCH 070/811] llama : document logits_all deprecation (#4418) llama_context_params.logits_all is a parameter for controlling llama_eval. This documents that logits_all should not be used with llama_decode and llama_batch. --- llama.h | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/llama.h b/llama.h index b1f5fca62..45a65cacb 100644 --- a/llama.h +++ b/llama.h @@ -216,7 +216,7 @@ extern "C" { // Keep the booleans together to avoid misalignment during copy-by-value. bool mul_mat_q; // if true, use experimental mul_mat_q kernels (DEPRECATED - always true) - bool logits_all; // the llama_eval() call computes all logits, not just the last one + bool logits_all; // the llama_eval() call computes all logits, not just the last one (DEPRECATED - set llama_batch.logits instead) bool embedding; // embedding mode only bool offload_kqv; // whether to offload the KQV ops (including the KV cache) to GPU }; From 6138963fb232cbae70c9d181db0ba125708f473d Mon Sep 17 00:00:00 2001 From: Jared Van Bortel Date: Tue, 12 Dec 2023 04:27:26 -0500 Subject: [PATCH 071/811] build : target Windows 8 for standard mingw-w64 (#4405) * build : target Windows 8 for standard mingw-w64 * make : fix missing console.o deps This was causing a link error with `make all` on Windows. --- CMakeLists.txt | 5 +++++ Makefile | 17 ++++++++++------- 2 files changed, 15 insertions(+), 7 deletions(-) diff --git a/CMakeLists.txt b/CMakeLists.txt index 78de2dd1a..eea4673d1 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -593,6 +593,11 @@ else() message(STATUS "Unknown architecture") endif() +if (MINGW) + # Target Windows 8 for PrefetchVirtualMemory + add_compile_definitions(_WIN32_WINNT=0x602) +endif() + # # POSIX conformance # diff --git a/Makefile b/Makefile index a1a6cae54..e77595952 100644 --- a/Makefile +++ b/Makefile @@ -306,12 +306,15 @@ ifeq ($(UNAME_M),$(filter $(UNAME_M),x86_64 i686 amd64)) #MK_CXXFLAGS += -mssse3 endif -# The stack is only 16-byte aligned on Windows, so don't let gcc emit aligned moves. -# https://gcc.gnu.org/bugzilla/show_bug.cgi?id=54412 -# https://github.com/ggerganov/llama.cpp/issues/2922 ifneq '' '$(findstring mingw,$(shell $(CC) -dumpmachine))' + # The stack is only 16-byte aligned on Windows, so don't let gcc emit aligned moves. + # https://gcc.gnu.org/bugzilla/show_bug.cgi?id=54412 + # https://github.com/ggerganov/llama.cpp/issues/2922 MK_CFLAGS += -Xassembler -muse-unaligned-vector-move MK_CXXFLAGS += -Xassembler -muse-unaligned-vector-move + + # Target Windows 8 for PrefetchVirtualMemory + MK_CPPFLAGS += -D_WIN32_WINNT=0x602 endif ifneq ($(filter aarch64%,$(UNAME_M)),) @@ -730,16 +733,16 @@ tests/test-quantize-perf: tests/test-quantize-perf.cpp ggml.o $(OBJS) tests/test-sampling: tests/test-sampling.cpp ggml.o llama.o $(OBJS) $(CXX) $(CXXFLAGS) $(filter-out %.h,$^) -o $@ $(LDFLAGS) -tests/test-tokenizer-0-falcon: tests/test-tokenizer-0-falcon.cpp ggml.o llama.o $(COMMON_DEPS) $(OBJS) +tests/test-tokenizer-0-falcon: tests/test-tokenizer-0-falcon.cpp ggml.o llama.o $(COMMON_DEPS) console.o $(OBJS) $(CXX) $(CXXFLAGS) $(filter-out %.h,$^) -o $@ $(LDFLAGS) -tests/test-tokenizer-0-llama: tests/test-tokenizer-0-llama.cpp ggml.o llama.o $(COMMON_DEPS) $(OBJS) +tests/test-tokenizer-0-llama: tests/test-tokenizer-0-llama.cpp ggml.o llama.o $(COMMON_DEPS) console.o $(OBJS) $(CXX) $(CXXFLAGS) $(filter-out %.h,$^) -o $@ $(LDFLAGS) -tests/test-tokenizer-1-bpe: tests/test-tokenizer-1-bpe.cpp ggml.o llama.o $(COMMON_DEPS) $(OBJS) +tests/test-tokenizer-1-bpe: tests/test-tokenizer-1-bpe.cpp ggml.o llama.o $(COMMON_DEPS) console.o $(OBJS) $(CXX) $(CXXFLAGS) $(filter-out %.h,$^) -o $@ $(LDFLAGS) -tests/test-tokenizer-1-llama: tests/test-tokenizer-1-llama.cpp ggml.o llama.o $(COMMON_DEPS) $(OBJS) +tests/test-tokenizer-1-llama: tests/test-tokenizer-1-llama.cpp ggml.o llama.o $(COMMON_DEPS) console.o $(OBJS) $(CXX) $(CXXFLAGS) $(filter-out %.h,$^) -o $@ $(LDFLAGS) tests/test-rope: tests/test-rope.cpp ggml.o $(OBJS) From 9494d7c4774ab745490b5a19570ff7747a194143 Mon Sep 17 00:00:00 2001 From: Richard Kiss Date: Tue, 12 Dec 2023 01:53:36 -0800 Subject: [PATCH 072/811] english : use `typos` to fix comments and logs (#4354) --- common/log.h | 8 ++++---- convert.py | 4 ++-- examples/llava/clip.cpp | 2 +- examples/llava/convert-image-encoder-to-gguf.py | 2 +- examples/lookahead/README.md | 2 +- examples/server/json.hpp | 2 +- examples/server/public/completion.js | 2 +- examples/server/public/index.html | 6 +++--- examples/speculative/README.md | 2 +- examples/speculative/speculative.cpp | 2 +- ggml-alloc.h | 2 +- ggml-quants.c | 4 ++-- ggml.c | 12 ++++++------ gguf-py/README.md | 2 +- llama.cpp | 10 +++++----- tests/test-grad0.cpp | 2 +- tests/test-quantize-perf.cpp | 4 ++-- 17 files changed, 34 insertions(+), 34 deletions(-) diff --git a/common/log.h b/common/log.h index c0e814861..e4e1b9f4f 100644 --- a/common/log.h +++ b/common/log.h @@ -61,13 +61,13 @@ // #define LOG_TARGET stderr // #include "log.h" // -// The log target can also be redirected to a diffrent function +// The log target can also be redirected to a different function // like so: // -// #define LOG_TARGET log_handler_diffrent() +// #define LOG_TARGET log_handler_different() // #include "log.h" // -// FILE* log_handler_diffrent() +// FILE* log_handler_different() // { // return stderr; // } @@ -421,7 +421,7 @@ inline FILE *log_handler2_impl(bool change = false, LogTriState append = LogTriS // Disables logs entirely at runtime. // Makes LOG() and LOG_TEE() produce no output, -// untill enabled back. +// until enabled back. #define log_disable() log_disable_impl() // INTERNAL, DO NOT USE diff --git a/convert.py b/convert.py index 6e95d6cb3..a6fc6b8ea 100755 --- a/convert.py +++ b/convert.py @@ -585,7 +585,7 @@ def merge_multifile_models(models_plus: list[ModelPlus]) -> ModelPlus: if any("model.embed_tokens.weight" in mp.model for mp in models_plus): # Transformers models put different tensors in different files, but - # don't split indivdual tensors between files. + # don't split individual tensors between files. model: LazyModel = {} for mp in models_plus: model.update(mp.model) @@ -678,7 +678,7 @@ class LazyUnpickler(pickle.Unpickler): return func(*args) CLASSES: dict[tuple[str, str], Any] = { - # getattr used here as a workaround for mypy not being smart enough to detrmine + # getattr used here as a workaround for mypy not being smart enough to determine # the staticmethods have a __func__ attribute. ('torch._tensor', '_rebuild_from_type_v2'): getattr(rebuild_from_type_v2, '__func__'), ('torch._utils', '_rebuild_tensor_v2'): getattr(lazy_rebuild_tensor_v2, '__func__'), diff --git a/examples/llava/clip.cpp b/examples/llava/clip.cpp index fc0656c23..4bb7b93b6 100644 --- a/examples/llava/clip.cpp +++ b/examples/llava/clip.cpp @@ -739,7 +739,7 @@ bool clip_image_preprocess(const clip_ctx * ctx, const clip_image_u8 * img, clip temp->ny = longer_side; temp->size = 3 * longer_side * longer_side; temp->data = new uint8_t[temp->size](); - uint8_t bc[3] = {122, 116, 104}; // bakground color in RGB from LLaVA + uint8_t bc[3] = {122, 116, 104}; // background color in RGB from LLaVA // fill with background color for (size_t i = 0; i < temp->size; i++) { diff --git a/examples/llava/convert-image-encoder-to-gguf.py b/examples/llava/convert-image-encoder-to-gguf.py index 729aaef8f..03688e0ea 100644 --- a/examples/llava/convert-image-encoder-to-gguf.py +++ b/examples/llava/convert-image-encoder-to-gguf.py @@ -51,7 +51,7 @@ def bytes_to_unicode(): The reversible bpe codes work on unicode strings. This means you need a large # of unicode characters in your vocab if you want to avoid UNKs. When you're at something like a 10B token dataset you end up needing around 5K for decent coverage. - This is a signficant percentage of your normal, say, 32K bpe vocab. + This is a significant percentage of your normal, say, 32K bpe vocab. To avoid that, we want lookup tables between utf-8 bytes and unicode strings. And avoids mapping to whitespace/control characters the bpe code barfs on. """ diff --git a/examples/lookahead/README.md b/examples/lookahead/README.md index 252a6689e..a69a471b4 100644 --- a/examples/lookahead/README.md +++ b/examples/lookahead/README.md @@ -1,6 +1,6 @@ # llama.cpp/examples/lookahead -Demonstartion of lookahead decoding technique: +Demonstration of lookahead decoding technique: https://lmsys.org/blog/2023-11-21-lookahead-decoding/ diff --git a/examples/server/json.hpp b/examples/server/json.hpp index 4d1a37ad7..ea945f346 100644 --- a/examples/server/json.hpp +++ b/examples/server/json.hpp @@ -11227,7 +11227,7 @@ class binary_reader } if (is_ndarray) // ndarray dimensional vector can only contain integers, and can not embed another array { - return sax->parse_error(chars_read, get_token_string(), parse_error::create(113, chars_read, exception_message(input_format, "ndarray dimentional vector is not allowed", "size"), nullptr)); + return sax->parse_error(chars_read, get_token_string(), parse_error::create(113, chars_read, exception_message(input_format, "ndarray dimensional vector is not allowed", "size"), nullptr)); } std::vector dim; if (JSON_HEDLEY_UNLIKELY(!get_ubjson_ndarray_size(dim))) diff --git a/examples/server/public/completion.js b/examples/server/public/completion.js index b9c442509..c281f0fbd 100644 --- a/examples/server/public/completion.js +++ b/examples/server/public/completion.js @@ -114,7 +114,7 @@ export async function* llama(prompt, params = {}, config = {}) { return content; } -// Call llama, return an event target that you can subcribe to +// Call llama, return an event target that you can subscribe to // // Example: // diff --git a/examples/server/public/index.html b/examples/server/public/index.html index 175c52478..18a6ccf0f 100644 --- a/examples/server/public/index.html +++ b/examples/server/public/index.html @@ -238,7 +238,7 @@ cache_prompt: true }) - /* START: Support for storing prompt templates and parameters in borwser LocalStorage */ + /* START: Support for storing prompt templates and parameters in browsers LocalStorage */ const local_storage_storageKey = "llamacpp_server_local_storage"; @@ -282,7 +282,7 @@ let importedTemplates = local_storage_getDataAsObject('user_templates') if (importedTemplates) { - // saved templates were successfuly imported. + // saved templates were successfully imported. console.log('Processing saved templates and updating default template') params.value = { ...params.value, image_data: [] }; @@ -303,7 +303,7 @@ } function userTemplateResetToDefault() { - console.log('Reseting themplate to default') + console.log('Resetting template to default') selectedUserTemplate.value.name = 'default'; selectedUserTemplate.value.data = savedUserTemplates.value['default']; } diff --git a/examples/speculative/README.md b/examples/speculative/README.md index d88fd3790..814efa592 100644 --- a/examples/speculative/README.md +++ b/examples/speculative/README.md @@ -1,6 +1,6 @@ # llama.cpp/examples/speculative -Demonstartion of speculative decoding and tree-based speculative decoding techniques +Demonstration of speculative decoding and tree-based speculative decoding techniques More info: diff --git a/examples/speculative/speculative.cpp b/examples/speculative/speculative.cpp index dca3f84a5..20f1fb5bf 100644 --- a/examples/speculative/speculative.cpp +++ b/examples/speculative/speculative.cpp @@ -428,7 +428,7 @@ int main(int argc, char ** argv) { ++n_past_tgt; } - // the first token is always proposed by the traget model before the speculation loop so we erase it here + // the first token is always proposed by the target model before the speculation loop so we erase it here for (int s = 0; s < n_seq_dft; ++s) { if (!drafts[s].active) { continue; diff --git a/ggml-alloc.h b/ggml-alloc.h index ad87cebc8..64a412468 100644 --- a/ggml-alloc.h +++ b/ggml-alloc.h @@ -43,7 +43,7 @@ GGML_API size_t ggml_allocr_alloc_graph(ggml_allocr_t alloc, struct ggml_cgraph // ggml-backend v2 API // -// Seperate tensor and graph allocator objects +// Separate tensor and graph allocator objects // This is necessary for multi-backend allocation because the graph allocator needs to use multiple tensor allocators // The original API is kept as a wrapper around the new API diff --git a/ggml-quants.c b/ggml-quants.c index 7285d5f7f..0e8163a16 100644 --- a/ggml-quants.c +++ b/ggml-quants.c @@ -3114,7 +3114,7 @@ void ggml_vec_dot_q5_0_q8_0(const int n, float * restrict s, const void * restri size_t vl = __riscv_vsetvl_e8m1(qk/2); - // These tempory registers are for masking and shift operations + // These temporary registers are for masking and shift operations vuint32m2_t vt_1 = __riscv_vid_v_u32m2(vl); vuint32m2_t vt_2 = __riscv_vsll_vv_u32m2(__riscv_vmv_v_x_u32m2(1, vl), vt_1, vl); @@ -4757,7 +4757,7 @@ void ggml_vec_dot_q3_K_q8_K(const int n, float * restrict s, const void * restri vl = 16; - // retreive lane to multiply with scale + // retrieve lane to multiply with scale vint32m2_t aux0_0 = __riscv_vwmul_vx_i32m2(__riscv_vget_v_i16m2_i16m1(a0, 0), (scale[0]), vl); vint32m2_t aux0_1 = __riscv_vwmul_vx_i32m2(__riscv_vget_v_i16m2_i16m1(a0, 1), (scale[1]), vl); vint32m2_t aux1_0 = __riscv_vwmul_vx_i32m2(__riscv_vget_v_i16m2_i16m1(a1, 0), (scale[2]), vl); diff --git a/ggml.c b/ggml.c index ca56f063c..eb7989dc4 100644 --- a/ggml.c +++ b/ggml.c @@ -1,4 +1,4 @@ -#define _CRT_SECURE_NO_DEPRECATE // Disables ridiculous "unsafe" warnigns on Windows +#define _CRT_SECURE_NO_DEPRECATE // Disables ridiculous "unsafe" warnings on Windows #define _USE_MATH_DEFINES // For M_PI on MSVC #include "ggml-impl.h" @@ -33,7 +33,7 @@ // we should just be careful :) #pragma warning(disable: 4244 4267) -// disable POSIX deprecation warnigns +// disable POSIX deprecation warnings // these functions are never going away, anyway #pragma warning(disable: 4996) #endif @@ -1760,7 +1760,7 @@ static_assert(sizeof(struct ggml_object)%GGML_MEM_ALIGN == 0, "ggml_object size static_assert(sizeof(struct ggml_tensor)%GGML_MEM_ALIGN == 0, "ggml_tensor size must be a multiple of GGML_MEM_ALIGN"); // WARN: -// Mis-confguration can lead to problem that's hard to reason about: +// Mis-configuration can lead to problem that's hard to reason about: // * At best it crash or talks nosense. // * At worst it talks slightly difference but hard to perceive. // @@ -7520,7 +7520,7 @@ static void ggml_compute_forward_acc_f32( GGML_ASSERT(ggml_is_contiguous(dst) && ggml_is_contiguous(src0)); // view src0 and dst with these strides and data offset inbytes during acc - // nb0 is implicitely element_size because src0 and dst are contiguous + // nb0 is implicitly element_size because src0 and dst are contiguous size_t nb1 = ((int32_t *) dst->op_params)[0]; size_t nb2 = ((int32_t *) dst->op_params)[1]; size_t nb3 = ((int32_t *) dst->op_params)[2]; @@ -10161,7 +10161,7 @@ static void ggml_compute_forward_set_f32( GGML_ASSERT(ggml_is_contiguous(dst) && ggml_is_contiguous(src0)); // view src0 and dst with these strides and data offset inbytes during set - // nb0 is implicitely element_size because src0 and dst are contiguous + // nb0 is implicitly element_size because src0 and dst are contiguous size_t nb1 = ((int32_t *) dst->op_params)[0]; size_t nb2 = ((int32_t *) dst->op_params)[1]; size_t nb3 = ((int32_t *) dst->op_params)[2]; @@ -14475,7 +14475,7 @@ void ggml_build_backward_gradient_checkpointing( // insert new tensors recomputing src, reusing already made replacements, // remember replacements: remember new tensors with mapping from corresponding gf nodes // recurse for input tensors, - // unless (i.e. terminating when) input tensors are replacments (like checkpoints) + // unless (i.e. terminating when) input tensors are replacements (like checkpoints) node->src[k] = ggml_recompute_graph_node(ctx, gf, replacements, node->src[k]); } // insert rewritten backward node with replacements made into resulting backward graph gb diff --git a/gguf-py/README.md b/gguf-py/README.md index 502b6a510..a27d2fc0e 100644 --- a/gguf-py/README.md +++ b/gguf-py/README.md @@ -61,7 +61,7 @@ If you want to publish the package manually for any reason, you need to have `tw pip install build twine ``` -Then, folow these steps to release a new version: +Then, follow these steps to release a new version: 1. Bump the version in `pyproject.toml`. 2. Build the package: diff --git a/llama.cpp b/llama.cpp index 93d8f3e16..54fa9e43e 100644 --- a/llama.cpp +++ b/llama.cpp @@ -2758,7 +2758,7 @@ static void llm_load_vocab( // The assumption is, since special tokens aren't meant to be exposed to end user, they are designed // to be unmatchable by the tokenizer, therefore tokens from the vocab, which are unmatchable by the tokenizer // are special tokens. - // From testing, this appears to corelate 1:1 with special tokens. + // From testing, this appears to correlate 1:1 with special tokens. // // Counting special tokens and verifying in only one direction @@ -5846,7 +5846,7 @@ static int llama_decode_internal( const int64_t n_embd = hparams.n_embd; const int64_t n_vocab = hparams.n_vocab; - // helpers for smoother batch API transistion + // helpers for smoother batch API transition // after deprecating the llama_eval calls, these will be removed std::vector pos; @@ -6625,12 +6625,12 @@ static void tokenizer_st_partition(const llama_vocab & vocab, std::forward_list< // loop over the text while (true) { - // find the first occurence of a given special token in this fragment + // find the first occurrence of a given special token in this fragment // passing offset argument only limit the "search area" but match coordinates // are still relative to the source full raw_text auto match = raw_text->find(special_token, raw_text_base_offset); - // no occurences found, stop processing this fragment for a given special token + // no occurrences found, stop processing this fragment for a given special token if (match == std::string::npos) break; // check if match is within bounds of offset <-> length @@ -7829,7 +7829,7 @@ struct llama_beam_search_data { } // Min-heaps are used to efficiently collect the top-k elements (k=n_beams). - // The repetative patterns below reflect the 2 stages of heaps: + // The repetitive patterns below reflect the 2 stages of heaps: // * Gather elements until the vector is full, then call std::make_heap() on it. // * If the heap is full and a new element is found that should be included, pop the // least element to the back(), replace it with the new, then push it into the heap. diff --git a/tests/test-grad0.cpp b/tests/test-grad0.cpp index 7fe9154dd..81c20a89c 100644 --- a/tests/test-grad0.cpp +++ b/tests/test-grad0.cpp @@ -1,4 +1,4 @@ -#define _CRT_SECURE_NO_DEPRECATE // Disables ridiculous "unsafe" warnigns on Windows +#define _CRT_SECURE_NO_DEPRECATE // Disables ridiculous "unsafe" warnings on Windows #include "ggml.h" #include diff --git a/tests/test-quantize-perf.cpp b/tests/test-quantize-perf.cpp index 88fac0e23..62d0190f9 100644 --- a/tests/test-quantize-perf.cpp +++ b/tests/test-quantize-perf.cpp @@ -117,7 +117,7 @@ static void usage(char * argv[]) { printf(" --size SIZE set test size, divisible by 32 (L1_SIZE:%d)\n", L1_SIZE); printf(" -3 use size as L1, L2, L3 sizes (L1:%d L2:%d L3:%d)\n", L1_SIZE, L2_SIZE, L3_SIZE); printf(" -4 use size as L1, L2, L3, MEM sizes (L1:%d L2:%d L3:%d MEM:%d)\n", L1_SIZE, L2_SIZE, L3_SIZE, MEM_SIZE); - printf(" --op OP set test opration as quantize_row_q_reference, quantize_row_q, dequantize_row_q,\n"); + printf(" --op OP set test operation as quantize_row_q_reference, quantize_row_q, dequantize_row_q,\n"); printf(" quantize_row_q_dot, vec_dot_q (all)\n"); printf(" --type TYPE set test type as"); for (int i = 0; i < GGML_TYPE_COUNT; i++) { @@ -202,7 +202,7 @@ int main(int argc, char * argv[]) { } int alignment = std::stoi(argv[i]); if (alignment < 0 || alignment > MAX_ALIGNMENT) { - fprintf(stderr, "error: aligment-offset must be less than %d\n", MAX_ALIGNMENT); + fprintf(stderr, "error: alignment-offset must be less than %d\n", MAX_ALIGNMENT); invalid_param = true; break; } From fecac45658a99eddc4d6e36ba0310ca8f87a77f0 Mon Sep 17 00:00:00 2001 From: kalomaze <66376113+kalomaze@users.noreply.github.com> Date: Tue, 12 Dec 2023 04:12:35 -0600 Subject: [PATCH 073/811] server : tweak default sampling parameters (#4367) * Set a more typical Top P setting as the default * Update temp max --- examples/server/public/index.html | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/examples/server/public/index.html b/examples/server/public/index.html index 18a6ccf0f..451fd4a3b 100644 --- a/examples/server/public/index.html +++ b/examples/server/public/index.html @@ -223,7 +223,7 @@ repeat_last_n: 256, // 0 = disable penalty, -1 = context size repeat_penalty: 1.18, // 1.0 = disabled top_k: 40, // <= 0 to use vocab size - top_p: 0.5, // 1.0 = disabled + top_p: 0.95, // 1.0 = disabled min_p: 0.05, // 0 = disabled tfs_z: 1.0, // 1.0 = disabled typical_p: 1.0, // 1.0 = disabled @@ -762,7 +762,7 @@
${IntField({ label: "Predictions", max: 2048, min: -1, name: "n_predict", value: params.value.n_predict })} - ${FloatField({ label: "Temperature", max: 1.5, min: 0.0, name: "temperature", step: 0.01, value: params.value.temperature })} + ${FloatField({ label: "Temperature", max: 2.0, min: 0.0, name: "temperature", step: 0.01, value: params.value.temperature })} ${FloatField({ label: "Penalize repeat sequence", max: 2.0, min: 0.0, name: "repeat_penalty", step: 0.01, value: params.value.repeat_penalty })} ${IntField({ label: "Consider N tokens for penalize", max: 2048, min: 0, name: "repeat_last_n", value: params.value.repeat_last_n })} ${IntField({ label: "Top-K sampling", max: 100, min: -1, name: "top_k", value: params.value.top_k })} From 799a1cb13b0b1b560ab0ceff485caed68faa8f1f Mon Sep 17 00:00:00 2001 From: slaren Date: Wed, 13 Dec 2023 13:04:25 +0100 Subject: [PATCH 074/811] llama : add Mixtral support (#4406) * convert : support Mixtral as LLAMA arch * convert : fix n_ff typo * llama : model loading * ggml : sync latest ggml_mul_mat_id * llama : update graph to support MoE * llama : fix cur -> cur_expert * llama : first working version * llama : fix expert weighting in the FFN * ggml : ggml_get_rows support 2D indexing [n_tokens, n_experts] (cpu only) * ggml : add n_as argument to ggml_mul_mat_id * ggml : fix ggml_get_rows to take into account ne02 / ne11 * metal : add more general support for ggml_get_rows + tests * llama : add basic support for offloading moe with CUDA * metal : add/mul/div use general kernel when src1 not cont * metal : reduce the kernel launches for ggml_mul_mat_id * ggml : get_rows : support non-contiguos tensors with gaps, generalize up to 3D * ggml : update get_rows f16 and q * cuda : support non-contiguous src1 in get_rows * llama : offload missing ffn_moe_silu * metal : fix ggml_get_rows to work with non-cont src1 * metal : add indirect mat-vec kernels for all quantization types * llama : do not quantize expert gating tensors * llama : add n_expert and n_expert_used to hparams + change quants * test-backend-ops : add moe test * cuda : fix get_rows when ncols is odd * convert : determine n_ctx correctly * metal : fix ggml_mul_mat_id for F32 * test-backend-ops : make experts more evenly probable (test_moe) * test-backend-ops : cleanup, add moe test for batches * test-backend-ops : add cpy from f32 -> all types test * test-backend-ops : fix dequantize block offset * llama : fix hard-coded number of experts * test-backend-ops : simplify and disable slow tests to avoid CI timeout * test-backend-ops : disable MOE test with thread sanitizer * cuda : fix mul_mat_id with multi gpu * convert : use 1e6 rope_freq_base for mixtral * convert : fix style * convert : support safetensors format * gguf-py : bump version * metal : add cpy f16 -> f32 kernel * metal : fix binary ops for ne10 % 4 != 0 * test-backend-ops : add one more sum_rows test * ggml : do not use BLAS with ggml_mul_mat_id * convert-hf : support for mixtral-instruct (#4428) * convert : typo fix, add additional hyperparameters, use LLaMA arch for Mixtral-instruct * convert : use sentencepiece tokenizer for Mixtral-instruct * convert : make flake8 happy * metal : fix soft_max kernels ref: https://github.com/ggerganov/ggml/pull/621/commits/1914017863d2f9ab8ecc0281cc2a56d683668b92 * metal : limit kernels to not use more than the allowed threads --------- Co-authored-by: Georgi Gerganov Co-authored-by: Radek Pilar --- Makefile | 5 + convert-hf-to-gguf.py | 21 +- convert.py | 74 +- ggml-cuda.cu | 297 +++++-- ggml-metal.m | 332 ++++++-- ggml-metal.metal | 1320 +++++++++++++++++++++++++++++--- ggml.c | 168 ++-- ggml.h | 6 +- gguf-py/gguf/constants.py | 16 +- gguf-py/gguf/gguf_writer.py | 6 + gguf-py/gguf/tensor_mapping.py | 39 +- gguf-py/pyproject.toml | 2 +- llama.cpp | 202 ++++- tests/test-backend-ops.cpp | 277 +++++-- 14 files changed, 2370 insertions(+), 395 deletions(-) diff --git a/Makefile b/Makefile index e77595952..b7afda2b5 100644 --- a/Makefile +++ b/Makefile @@ -399,6 +399,11 @@ ifdef LLAMA_CUBLAS MK_LDFLAGS += -lcublas -lculibos -lcudart -lcublasLt -lpthread -ldl -lrt -L/usr/local/cuda/lib64 -L/opt/cuda/lib64 -L$(CUDA_PATH)/targets/x86_64-linux/lib OBJS += ggml-cuda.o NVCCFLAGS = --forward-unknown-to-host-compiler -use_fast_math + +ifdef LLAMA_DEBUG + NVCCFLAGS += -lineinfo +endif + ifdef LLAMA_CUDA_NVCC NVCC = $(LLAMA_CUDA_NVCC) else diff --git a/convert-hf-to-gguf.py b/convert-hf-to-gguf.py index bced1f561..e46a7813a 100755 --- a/convert-hf-to-gguf.py +++ b/convert-hf-to-gguf.py @@ -77,8 +77,18 @@ class Model: self.gguf_writer.add_embedding_length(n_embd) if (n_ff := self.hparams.get("intermediate_size")) is not None: self.gguf_writer.add_feed_forward_length(n_ff) - if (n_head := self.hparams.get("num_attention_head")) is not None: + if (n_head := self.hparams.get("num_attention_heads")) is not None: self.gguf_writer.add_head_count(n_head) + if (n_head_kv := self.hparams.get("num_key_value_heads")) is not None: + self.gguf_writer.add_head_count_kv(n_head_kv) + + if (n_rms_eps := self.hparams.get("rms_norm_eps")) is not None: + self.gguf_writer.add_layer_norm_rms_eps(n_rms_eps) + if (n_experts := self.hparams.get("num_local_experts")) is not None: + self.gguf_writer.add_expert_count(n_experts) + if (n_experts_used := self.hparams.get("num_experts_per_tok")) is not None: + self.gguf_writer.add_expert_used_count(n_experts_used) + self.gguf_writer.add_parallel_residual(self.hparams.get("use_parallel_residual", True)) def write_tensors(self): @@ -170,6 +180,8 @@ class Model: return StableLMModel if model_architecture == "QWenLMHeadModel": return QwenModel + if model_architecture == "MixtralForCausalLM": + return MixtralModel return Model def _is_model_safetensors(self) -> bool: @@ -207,6 +219,8 @@ class Model: return gguf.MODEL_ARCH.STABLELM if arch == "QWenLMHeadModel": return gguf.MODEL_ARCH.QWEN + if arch == "MixtralForCausalLM": + return gguf.MODEL_ARCH.LLAMA raise NotImplementedError(f'Architecture "{arch}" not supported!') @@ -837,6 +851,11 @@ class StableLMModel(Model): self.gguf_writer.add_layer_norm_eps(1e-5) +class MixtralModel(Model): + def set_vocab(self): + self._set_vocab_sentencepiece() + + class QwenModel(Model): @staticmethod def token_bytes_to_string(b): diff --git a/convert.py b/convert.py index a6fc6b8ea..e4b69d172 100755 --- a/convert.py +++ b/convert.py @@ -42,6 +42,7 @@ NDArray: TypeAlias = 'np.ndarray[Any, Any]' ARCH = gguf.MODEL_ARCH.LLAMA DEFAULT_CONCURRENCY = 8 + # # data types # @@ -62,10 +63,10 @@ class UnquantizedDataType(DataType): pass -DT_F16 = UnquantizedDataType('F16', dtype = np.dtype(np.float16), valid_conversions = ['F32', 'Q8_0']) -DT_F32 = UnquantizedDataType('F32', dtype = np.dtype(np.float32), valid_conversions = ['F16', 'Q8_0']) -DT_I32 = UnquantizedDataType('I32', dtype = np.dtype(np.int16), valid_conversions = []) -DT_BF16 = UnquantizedDataType('BF16', dtype = np.dtype(np.uint16), valid_conversions = ['F32', 'F16', 'Q8_0']) +DT_F16 = UnquantizedDataType('F16', dtype = np.dtype(np.float16), valid_conversions = ['F32', 'Q8_0']) +DT_F32 = UnquantizedDataType('F32', dtype = np.dtype(np.float32), valid_conversions = ['F16', 'Q8_0']) +DT_I32 = UnquantizedDataType('I32', dtype = np.dtype(np.int16), valid_conversions = []) +DT_BF16 = UnquantizedDataType('BF16', dtype = np.dtype(np.uint16), valid_conversions = ['F32', 'F16', 'Q8_0']) @dataclass(frozen=True) @@ -151,14 +152,16 @@ GGML_FILE_TYPE_TO_DATA_TYPE: dict[GGMLFileType, DataType] = { @dataclass class Params: - n_vocab: int - n_embd: int - n_layer: int - n_ctx: int - n_ff: int - n_head: int - n_head_kv: int - f_norm_eps: float + n_vocab: int + n_embd: int + n_layer: int + n_ctx: int + n_ff: int + n_head: int + n_head_kv: int + n_experts: int | None = None + n_experts_used: int | None = None + f_norm_eps: float | None = None rope_scaling_type: gguf.RopeScalingType | None = None f_rope_freq_base: float | None = None @@ -233,6 +236,13 @@ class Params: raise Exception("failed to guess 'n_ctx'. This model is unknown or unsupported.\n" "Suggestion: provide 'config.json' of the model in the same directory containing model files.") + n_experts = None + n_experts_used = None + + if "num_local_experts" in config: + n_experts = config["num_local_experts"] + n_experts_used = config["num_experts_per_tok"] + return Params( n_vocab = config["vocab_size"], n_embd = config["hidden_size"], @@ -241,6 +251,8 @@ class Params: n_ff = config["intermediate_size"], n_head = (n_head := config["num_attention_heads"]), n_head_kv = config.get("num_key_value_heads", n_head), + n_experts = n_experts, + n_experts_used = n_experts_used, f_norm_eps = config["rms_norm_eps"], f_rope_freq_base = config.get("rope_theta"), rope_scaling_type = rope_scaling_type, @@ -255,8 +267,15 @@ class Params: def loadOriginalParamsJson(model: LazyModel, config_path: Path) -> Params: config = json.load(open(config_path)) + n_experts = None + n_experts_used = None + f_rope_freq_base = None + # hack to determine LLaMA v1 vs v2 vs CodeLlama - if config.get("rope_theta") == 1000000: + if config.get("moe"): + # Mixtral + n_ctx = 32768 + elif config.get("rope_theta") == 1000000: # CodeLlama n_ctx = 16384 elif config["norm_eps"] == 1e-05: @@ -266,16 +285,27 @@ class Params: # LLaMA v1 n_ctx = 2048 + if "layers.0.feed_forward.w1.weight" in model: + n_ff = model["layers.0.feed_forward.w1.weight"].shape[0] + + if config.get("moe"): + n_ff = model["layers.0.feed_forward.experts.0.w1.weight"].shape[0] + n_experts = config["moe"]["num_experts"] + n_experts_used = config["moe"]["num_experts_per_tok"] + f_rope_freq_base = 1e6 + return Params( n_vocab = model["tok_embeddings.weight"].shape[0], n_embd = config["dim"], n_layer = config["n_layers"], n_ctx = n_ctx, - n_ff = model["layers.0.feed_forward.w1.weight"].shape[0], + n_ff = n_ff, n_head = (n_head := config["n_heads"]), n_head_kv = config.get("n_kv_heads", n_head), + n_experts = n_experts, + n_experts_used = n_experts_used, f_norm_eps = config["norm_eps"], - f_rope_freq_base = config.get("rope_theta"), + f_rope_freq_base = config.get("rope_theta", f_rope_freq_base), ) @staticmethod @@ -832,7 +862,17 @@ class OutputFile: self.gguf.add_rope_dimension_count(params.n_embd // params.n_head) self.gguf.add_head_count (params.n_head) self.gguf.add_head_count_kv (params.n_head_kv) - self.gguf.add_layer_norm_rms_eps (params.f_norm_eps) + + if params.n_experts: + self.gguf.add_expert_count(params.n_experts) + + if params.n_experts_used: + self.gguf.add_expert_used_count(params.n_experts_used) + + if params.f_norm_eps: + self.gguf.add_layer_norm_rms_eps(params.f_norm_eps) + else: + raise ValueError('f_norm_eps is None') if params.f_rope_freq_base is not None: self.gguf.add_rope_freq_base(params.f_rope_freq_base) @@ -956,7 +996,7 @@ class OutputFile: def pick_output_type(model: LazyModel, output_type_str: str | None) -> GGMLFileType: - wq_type = model[gguf.TENSOR_NAMES[gguf.MODEL_TENSOR.ATTN_Q].format(bid=0) +".weight"].data_type + wq_type = model[gguf.TENSOR_NAMES[gguf.MODEL_TENSOR.ATTN_Q].format(bid=0) + ".weight"].data_type if output_type_str == "f32" or (output_type_str is None and wq_type == DT_F32): return GGMLFileType.AllF32 diff --git a/ggml-cuda.cu b/ggml-cuda.cu index 85f7a2937..9e1acd3f1 100644 --- a/ggml-cuda.cu +++ b/ggml-cuda.cu @@ -1,13 +1,15 @@ #include +#include +#include +#include #include #include -#include #include #include #include #include -#include -#include +#include + #if defined(GGML_USE_HIPBLAS) #include @@ -1684,31 +1686,65 @@ static __global__ void quantize_q8_1(const float * __restrict__ x, void * __rest } template -static __global__ void k_get_rows(const void * x, const int32_t * y, dst_t * dst, const int ncols) { - const int col = (blockIdx.x*blockDim.x + threadIdx.x)*2; - const int row = blockDim.y*blockIdx.y + threadIdx.y; +static __global__ void k_get_rows( + const void * src0, const int32_t * src1, dst_t * dst, + int64_t ne00, /*int64_t ne01, int64_t ne02, int64_t ne03,*/ + /*int64_t ne10, int64_t ne11,*/ int64_t ne12, /*int64_t ne13,*/ + /*size_t s0,*/ size_t s1, size_t s2, size_t s3, + /*size_t nb00,*/ size_t nb01, size_t nb02, size_t nb03, + size_t s10, size_t s11, size_t s12/*, size_t s13*/) { - if (col >= ncols) { + const int i00 = (blockIdx.x*blockDim.x + threadIdx.x)*2; + const int i10 = blockDim.y*blockIdx.y + threadIdx.y; + const int i11 = (blockIdx.z*blockDim.z + threadIdx.z)/ne12; + const int i12 = (blockIdx.z*blockDim.z + threadIdx.z)%ne12; + + if (i00 >= ne00) { return; } - const int r = y[row]; + const int i01 = src1[i10*s10 + i11*s11 + i12*s12]; - // copy x[r*ncols + col] to dst[row*ncols + col] - const int xi = r*ncols + col; - const int di = row*ncols + col; + dst_t * dst_row = dst + i10*s1 + i11*s2 + i12*s3; + const void * src0_row = (const char *)src0 + i01*nb01 + i11*nb02 + i12*nb03; - const int ib = xi/qk; // block index - const int iqs = (xi%qk)/qr; // quant index - const int iybs = di - di%qk; // y block start index + const int ib = i00/qk; // block index + const int iqs = (i00%qk)/qr; // quant index + const int iybs = i00 - i00%qk; // dst block start index const int y_offset = qr == 1 ? 1 : qk/2; // dequantize dfloat2 v; - dequantize_kernel(x, ib, iqs, v); + dequantize_kernel(src0_row, ib, iqs, v); - dst[iybs + iqs + 0] = v.x; - dst[iybs + iqs + y_offset] = v.y; + dst_row[iybs + iqs + 0] = v.x; + dst_row[iybs + iqs + y_offset] = v.y; +} + +template +static __global__ void k_get_rows_float( + const src0_t * src0, const int32_t * src1, dst_t * dst, + int64_t ne00, /*int64_t ne01, int64_t ne02, int64_t ne03,*/ + /*int64_t ne10, int64_t ne11,*/ int64_t ne12, /*int64_t ne13,*/ + /*size_t s0,*/ size_t s1, size_t s2, size_t s3, + /*size_t nb00,*/ size_t nb01, size_t nb02, size_t nb03, + size_t s10, size_t s11, size_t s12/*, size_t s13*/) { + + const int i00 = blockIdx.x*blockDim.x + threadIdx.x; + const int i10 = blockDim.y*blockIdx.y + threadIdx.y; + const int i11 = (blockIdx.z*blockDim.z + threadIdx.z)/ne12; + const int i12 = (blockIdx.z*blockDim.z + threadIdx.z)%ne12; + + if (i00 >= ne00) { + return; + } + + const int i01 = src1[i10*s10 + i11*s11 + i12*s12]; + + dst_t * dst_row = dst + i10*s1 + i11*s2 + i12*s3; + const src0_t * src0_row = (const src0_t *)((const char *)src0 + i01*nb01 + i11*nb02 + i12*nb03); + + dst_row[i00] = src0_row[i00]; } template @@ -5053,11 +5089,69 @@ static __global__ void im2col_f32_f16( } template -static void get_rows_cuda(const void * x, const int32_t * y, float * dst, const int nrows, const int ncols, cudaStream_t stream) { +static void get_rows_cuda(const ggml_tensor * src0, const ggml_tensor * src1, ggml_tensor * dst, + const void * src0_dd, const int32_t * src1_dd, float * dst_dd, cudaStream_t stream) { + + GGML_TENSOR_BINARY_OP_LOCALS + const dim3 block_dims(CUDA_GET_ROWS_BLOCK_SIZE, 1, 1); - const int block_num_x = (ncols + 2*CUDA_GET_ROWS_BLOCK_SIZE - 1) / (2*CUDA_GET_ROWS_BLOCK_SIZE); - const dim3 block_nums(block_num_x, nrows, 1); - k_get_rows<<>>(x, y, dst, ncols); + const int block_num_x = (ne00 + 2*CUDA_GET_ROWS_BLOCK_SIZE - 1) / (2*CUDA_GET_ROWS_BLOCK_SIZE); + const dim3 block_nums(block_num_x, ne10, ne11*ne12); + + // strides in elements + //const size_t s0 = nb0 / ggml_element_size(dst); + const size_t s1 = nb1 / ggml_element_size(dst); + const size_t s2 = nb2 / ggml_element_size(dst); + const size_t s3 = nb3 / ggml_element_size(dst); + + const size_t s10 = nb10 / ggml_element_size(src1); + const size_t s11 = nb11 / ggml_element_size(src1); + const size_t s12 = nb12 / ggml_element_size(src1); + //const size_t s13 = nb13 / ggml_element_size(src1); + + GGML_ASSERT(ne00 % 2 == 0); + + k_get_rows<<>>( + src0_dd, src1_dd, dst_dd, + ne00, /*ne01, ne02, ne03,*/ + /*ne10, ne11,*/ ne12, /*ne13,*/ + /* s0,*/ s1, s2, s3, + /* nb00,*/ nb01, nb02, nb03, + s10, s11, s12/*, s13*/); + + (void) dst; +} + +template +static void get_rows_cuda_float(const ggml_tensor * src0, const ggml_tensor * src1, ggml_tensor * dst, + const src0_t * src0_dd, const int32_t * src1_dd, float * dst_dd, cudaStream_t stream) { + + GGML_TENSOR_BINARY_OP_LOCALS + + const dim3 block_dims(CUDA_GET_ROWS_BLOCK_SIZE, 1, 1); + const int block_num_x = (ne00 + CUDA_GET_ROWS_BLOCK_SIZE - 1) / CUDA_GET_ROWS_BLOCK_SIZE; + const dim3 block_nums(block_num_x, ne10, ne11*ne12); + + // strides in elements + //const size_t s0 = nb0 / ggml_element_size(dst); + const size_t s1 = nb1 / ggml_element_size(dst); + const size_t s2 = nb2 / ggml_element_size(dst); + const size_t s3 = nb3 / ggml_element_size(dst); + + const size_t s10 = nb10 / ggml_element_size(src1); + const size_t s11 = nb11 / ggml_element_size(src1); + const size_t s12 = nb12 / ggml_element_size(src1); + //const size_t s13 = nb13 / ggml_element_size(src1); + + k_get_rows_float<<>>( + src0_dd, src1_dd, dst_dd, + ne00, /*ne01, ne02, ne03,*/ + /*ne10, ne11,*/ ne12, /*ne13,*/ + /* s0,*/ s1, s2, s3, + /* nb00,*/ nb01, nb02, nb03, + s10, s11, s12/*, s13*/); + + (void) dst; } template @@ -5069,7 +5163,6 @@ struct bin_bcast_cuda { GGML_TENSOR_BINARY_OP_LOCALS - int nr0 = ne10/ne0; int nr1 = ne11/ne1; int nr2 = ne12/ne2; @@ -5117,26 +5210,28 @@ struct bin_bcast_cuda { int64_t ne12 = cne1[2]; int64_t ne13 = cne1[3]; - //size_t nb0 = cnb0[0]; + size_t nb0 = cnb0[0]; size_t nb1 = cnb0[1]; size_t nb2 = cnb0[2]; size_t nb3 = cnb0[3]; - //size_t nb10 = cnb1[0]; + size_t nb10 = cnb1[0]; size_t nb11 = cnb1[1]; size_t nb12 = cnb1[2]; size_t nb13 = cnb1[3]; - //size_t s0 = nb0 / sizeof(src1_t); + size_t s0 = nb0 / sizeof(src1_t); size_t s1 = nb1 / sizeof(src1_t); size_t s2 = nb2 / sizeof(src1_t); size_t s3 = nb3 / sizeof(src1_t); - //size_t s10 = nb10 / sizeof(src1_t); + size_t s10 = nb10 / sizeof(src1_t); size_t s11 = nb11 / sizeof(src1_t); size_t s12 = nb12 / sizeof(src1_t); size_t s13 = nb13 / sizeof(src1_t); + GGML_ASSERT(s0 == 1); + GGML_ASSERT(s10 == 1); const int block_size = 128; @@ -6447,36 +6542,34 @@ static void ggml_cuda_op_get_rows( GGML_ASSERT(src1->type == GGML_TYPE_I32); GGML_ASSERT(dst->type == GGML_TYPE_F32); - GGML_ASSERT(ggml_is_contiguous(src0)); - GGML_ASSERT(ggml_is_contiguous(src1)); - GGML_ASSERT(ggml_is_contiguous(dst)); - const int ncols = src0->ne[0]; - const int nrows = ggml_nelements(src1); + GGML_ASSERT(src0->nb[0] == ggml_type_size(src0->type)); + GGML_ASSERT(src1->nb[0] == ggml_type_size(src1->type)); + GGML_ASSERT(dst->nb[0] == ggml_type_size(dst->type)); const int32_t * src1_i32 = (const int32_t *) src1_d; switch (src0->type) { case GGML_TYPE_F16: - get_rows_cuda<1, 1, convert_f16>(src0_d, src1_i32, dst_d, nrows, ncols, stream); + get_rows_cuda_float(src0, src1, dst, (const half *)src0_d, src1_i32, dst_d, stream); break; case GGML_TYPE_F32: - get_rows_cuda<1, 1, convert_f32>(src0_d, src1_i32, dst_d, nrows, ncols, stream); + get_rows_cuda_float(src0, src1, dst, src0_d, src1_i32, dst_d, stream); break; case GGML_TYPE_Q4_0: - get_rows_cuda(src0_d, src1_i32, dst_d, nrows, ncols, stream); + get_rows_cuda(src0, src1, dst, src0_d, src1_i32, dst_d, stream); break; case GGML_TYPE_Q4_1: - get_rows_cuda(src0_d, src1_i32, dst_d, nrows, ncols, stream); + get_rows_cuda(src0, src1, dst, src0_d, src1_i32, dst_d, stream); break; case GGML_TYPE_Q5_0: - get_rows_cuda(src0_d, src1_i32, dst_d, nrows, ncols, stream); + get_rows_cuda(src0, src1, dst, src0_d, src1_i32, dst_d, stream); break; case GGML_TYPE_Q5_1: - get_rows_cuda(src0_d, src1_i32, dst_d, nrows, ncols, stream); + get_rows_cuda(src0, src1, dst, src0_d, src1_i32, dst_d, stream); break; case GGML_TYPE_Q8_0: - get_rows_cuda(src0_d, src1_i32, dst_d, nrows, ncols, stream); + get_rows_cuda(src0, src1, dst, src0_d, src1_i32, dst_d, stream); break; default: // TODO: k-quants @@ -8234,36 +8327,69 @@ static void ggml_cuda_mul_mat_id_cublas(ggml_tensor * dst) { } #endif -static void ggml_cuda_mul_mat_id(const ggml_tensor * _src0, const ggml_tensor * _src1, ggml_tensor * dst) { +static void ggml_cuda_mul_mat_id(const ggml_tensor * src0, const ggml_tensor * src1, ggml_tensor * dst) { #if 0 -//#ifdef CUDA_USE_TENSOR_CORES -// const bool use_tensor_cores = true; -//#else -// const bool use_tensor_cores = false; -//#endif - ggml_cuda_mul_mat_id_cublas(dst); - // TODO: mmq/mmv support -#else - const struct ggml_tensor * ids = dst->src[0]; - const struct ggml_tensor * src1 = dst->src[1]; - const int id = dst->op_params[0]; - - int32_t * ids_dev = (int32_t *)((ggml_tensor_extra_gpu *)ids->extra)->data_device[g_main_device]; - - int32_t a_id; - CUDA_CHECK(cudaMemcpyAsync(&a_id, ids_dev + id, sizeof(int32_t), cudaMemcpyDeviceToHost, g_cudaStreams[g_main_device][0])); - CUDA_CHECK(cudaStreamSynchronize(g_cudaStreams[g_main_device][0])); - - GGML_ASSERT(a_id >= 0 && a_id < ids->ne[0]); - const struct ggml_tensor * src0 = dst->src[a_id + 2]; - - ggml_cuda_mul_mat(src0, src1, dst); #endif - (void) _src0; - (void) _src1; + GGML_ASSERT(dst->backend == GGML_BACKEND_GPU); + + const struct ggml_tensor * ids = src0; + const int32_t id = ((int32_t *) dst->op_params)[0]; + const int32_t n_as = ((int32_t *) dst->op_params)[1]; + + std::vector ids_host(ggml_nbytes(ids)); + + if (ids->backend == GGML_BACKEND_GPU) { + const char * ids_dev = (const char *)((const ggml_tensor_extra_gpu *)ids->extra)->data_device[g_main_device]; + CUDA_CHECK(cudaMemcpyAsync(ids_host.data(), ids_dev, ggml_nbytes(ids), cudaMemcpyDeviceToHost, g_cudaStreams[g_main_device][0])); + CUDA_CHECK(cudaStreamSynchronize(g_cudaStreams[g_main_device][0])); + } else { + memcpy(ids_host.data(), ids->data, ggml_nbytes(ids)); + } + + const ggml_tensor_extra_gpu * src1_extra = (const ggml_tensor_extra_gpu *) src1->extra; + const ggml_tensor_extra_gpu * dst_extra = (const ggml_tensor_extra_gpu *) dst->extra; + + ggml_tensor_extra_gpu src1_row_extra; + ggml_tensor_extra_gpu dst_row_extra; + + ggml_tensor src1_row = *src1; + ggml_tensor dst_row = *dst; + + src1_row.ne[1] = 1; + dst_row.ne[1] = 1; + + src1_row.nb[2] = src1_row.nb[1]; + dst_row.nb[2] = dst_row.nb[1]; + + src1_row.nb[3] = src1_row.nb[1]; + dst_row.nb[3] = dst_row.nb[1]; + + src1_row.extra = &src1_row_extra; + dst_row.extra = &dst_row_extra; + + + for (int64_t i01 = 0; i01 < ids->ne[1]; i01++) { + //int32_t row_id; + //CUDA_CHECK(cudaMemcpyAsync(&row_id, ids_dev + i01*ids->nb[1] + id*ids->nb[0], sizeof(int32_t), cudaMemcpyDeviceToHost, g_cudaStreams[g_main_device][0])); + //CUDA_CHECK(cudaStreamSynchronize(g_cudaStreams[g_main_device][0])); + + const int32_t row_id = *(const int32_t *) (ids_host.data() + i01*ids->nb[1] + id*ids->nb[0]); + + GGML_ASSERT(row_id >= 0 && row_id < n_as); + + const struct ggml_tensor * src0_row = dst->src[row_id + 2]; + + src1_row_extra.data_device[g_main_device] = (char *) src1_extra->data_device[g_main_device] + i01*src1->nb[1]; + src1_row.data = (char *) src1->data + i01*src1->nb[1]; + + dst_row_extra.data_device[g_main_device] = (char *) dst_extra->data_device[g_main_device] + i01*dst->nb[1]; + dst_row.data = (char *) dst->data + i01*dst->nb[1]; + + ggml_cuda_mul_mat(src0_row, &src1_row, &dst_row); + } } static void ggml_cuda_scale(const ggml_tensor * src0, const ggml_tensor * src1, ggml_tensor * dst) { @@ -9181,6 +9307,45 @@ static bool ggml_backend_cuda_supports_op(ggml_backend_t backend, const ggml_ten } return true; } break; + case GGML_OP_GET_ROWS: + { + switch (op->src[0]->type) { + case GGML_TYPE_F16: + case GGML_TYPE_F32: + case GGML_TYPE_Q4_0: + case GGML_TYPE_Q4_1: + case GGML_TYPE_Q5_0: + case GGML_TYPE_Q5_1: + case GGML_TYPE_Q8_0: + return true; + default: + return false; + } + } break; + case GGML_OP_CPY: + { + ggml_type src0_type = op->src[0]->type; + ggml_type src1_type = op->src[1]->type; + if (src0_type == GGML_TYPE_F32 && src1_type == GGML_TYPE_F32) { + return true; + } + if (src0_type == GGML_TYPE_F32 && src1_type == GGML_TYPE_F16) { + return true; + } + if (src0_type == GGML_TYPE_F32 && src1_type == GGML_TYPE_Q8_0) { + return true; + } + if (src0_type == GGML_TYPE_F32 && src1_type == GGML_TYPE_Q4_0) { + return true; + } + if (src0_type == GGML_TYPE_F32 && src1_type == GGML_TYPE_Q4_1) { + return true; + } + if (src0_type == GGML_TYPE_F16 && src1_type == GGML_TYPE_F16) { + return true; + } + return false; + } break; case GGML_OP_NONE: case GGML_OP_RESHAPE: case GGML_OP_VIEW: @@ -9188,7 +9353,6 @@ static bool ggml_backend_cuda_supports_op(ggml_backend_t backend, const ggml_ten case GGML_OP_TRANSPOSE: case GGML_OP_NORM: case GGML_OP_REPEAT: - case GGML_OP_GET_ROWS: case GGML_OP_DUP: case GGML_OP_ADD: case GGML_OP_MUL: @@ -9197,7 +9361,6 @@ static bool ggml_backend_cuda_supports_op(ggml_backend_t backend, const ggml_ten case GGML_OP_SCALE: case GGML_OP_SQR: case GGML_OP_CLAMP: - case GGML_OP_CPY: case GGML_OP_CONT: case GGML_OP_DIAG_MASK_INF: case GGML_OP_SOFT_MAX: @@ -9264,7 +9427,9 @@ static ggml_backend_t ggml_backend_reg_cuda_init(const char * params, void * use UNUSED(params); } -extern "C" int ggml_backend_cuda_reg_devices() { +extern "C" int ggml_backend_cuda_reg_devices(); + +int ggml_backend_cuda_reg_devices() { int device_count = ggml_cuda_get_device_count(); //int device_count = 1; // DEBUG: some tools require delaying CUDA initialization for (int i = 0; i < device_count; i++) { diff --git a/ggml-metal.m b/ggml-metal.m index f9bd69dc8..1dcfa6edd 100644 --- a/ggml-metal.m +++ b/ggml-metal.m @@ -102,6 +102,21 @@ struct ggml_metal_context { GGML_METAL_DECL_KERNEL(mul_mv_q4_K_f32); GGML_METAL_DECL_KERNEL(mul_mv_q5_K_f32); GGML_METAL_DECL_KERNEL(mul_mv_q6_K_f32); + GGML_METAL_DECL_KERNEL(mul_mv_id_f32_f32); + //GGML_METAL_DECL_KERNEL(mul_mv_id_f16_f16); + GGML_METAL_DECL_KERNEL(mul_mv_id_f16_f32); + //GGML_METAL_DECL_KERNEL(mul_mv_id_f16_f32_1row); + //GGML_METAL_DECL_KERNEL(mul_mv_id_f16_f32_l4); + GGML_METAL_DECL_KERNEL(mul_mv_id_q4_0_f32); + GGML_METAL_DECL_KERNEL(mul_mv_id_q4_1_f32); + GGML_METAL_DECL_KERNEL(mul_mv_id_q5_0_f32); + GGML_METAL_DECL_KERNEL(mul_mv_id_q5_1_f32); + GGML_METAL_DECL_KERNEL(mul_mv_id_q8_0_f32); + GGML_METAL_DECL_KERNEL(mul_mv_id_q2_K_f32); + GGML_METAL_DECL_KERNEL(mul_mv_id_q3_K_f32); + GGML_METAL_DECL_KERNEL(mul_mv_id_q4_K_f32); + GGML_METAL_DECL_KERNEL(mul_mv_id_q5_K_f32); + GGML_METAL_DECL_KERNEL(mul_mv_id_q6_K_f32); GGML_METAL_DECL_KERNEL(mul_mm_f32_f32); GGML_METAL_DECL_KERNEL(mul_mm_f16_f32); GGML_METAL_DECL_KERNEL(mul_mm_q4_0_f32); @@ -140,6 +155,7 @@ struct ggml_metal_context { //GGML_METAL_DECL_KERNEL(cpy_f32_q5_0); //GGML_METAL_DECL_KERNEL(cpy_f32_q5_1); GGML_METAL_DECL_KERNEL(cpy_f16_f16); + GGML_METAL_DECL_KERNEL(cpy_f16_f32); GGML_METAL_DECL_KERNEL(concat); GGML_METAL_DECL_KERNEL(sqr); GGML_METAL_DECL_KERNEL(sum_rows); @@ -177,6 +193,8 @@ static void ggml_metal_log(enum ggml_log_level level, const char * format, ...){ ggml_metal_log_callback(level, buffer, ggml_metal_log_user_data); } else { char* buffer2 = malloc(len+1); + va_end(args); + va_start(args, format); vsnprintf(buffer2, len+1, format, args); buffer2[len] = 0; ggml_metal_log_callback(level, buffer2, ggml_metal_log_user_data); @@ -352,6 +370,21 @@ struct ggml_metal_context * ggml_metal_init(int n_cb) { GGML_METAL_ADD_KERNEL(mul_mv_q4_K_f32); GGML_METAL_ADD_KERNEL(mul_mv_q5_K_f32); GGML_METAL_ADD_KERNEL(mul_mv_q6_K_f32); + GGML_METAL_ADD_KERNEL(mul_mv_id_f32_f32); + //GGML_METAL_ADD_KERNEL(mul_mv_id_f16_f16); + GGML_METAL_ADD_KERNEL(mul_mv_id_f16_f32); + //GGML_METAL_ADD_KERNEL(mul_mv_id_f16_f32_1row); + //GGML_METAL_ADD_KERNEL(mul_mv_id_f16_f32_l4); + GGML_METAL_ADD_KERNEL(mul_mv_id_q4_0_f32); + GGML_METAL_ADD_KERNEL(mul_mv_id_q4_1_f32); + GGML_METAL_ADD_KERNEL(mul_mv_id_q5_0_f32); + GGML_METAL_ADD_KERNEL(mul_mv_id_q5_1_f32); + GGML_METAL_ADD_KERNEL(mul_mv_id_q8_0_f32); + GGML_METAL_ADD_KERNEL(mul_mv_id_q2_K_f32); + GGML_METAL_ADD_KERNEL(mul_mv_id_q3_K_f32); + GGML_METAL_ADD_KERNEL(mul_mv_id_q4_K_f32); + GGML_METAL_ADD_KERNEL(mul_mv_id_q5_K_f32); + GGML_METAL_ADD_KERNEL(mul_mv_id_q6_K_f32); if ([ctx->device supportsFamily:MTLGPUFamilyApple7]) { GGML_METAL_ADD_KERNEL(mul_mm_f32_f32); GGML_METAL_ADD_KERNEL(mul_mm_f16_f32); @@ -392,6 +425,7 @@ struct ggml_metal_context * ggml_metal_init(int n_cb) { //GGML_METAL_ADD_KERNEL(cpy_f32_q5_0); //GGML_METAL_ADD_KERNEL(cpy_f32_q5_1); GGML_METAL_ADD_KERNEL(cpy_f16_f16); + GGML_METAL_ADD_KERNEL(cpy_f16_f32); GGML_METAL_ADD_KERNEL(concat); GGML_METAL_ADD_KERNEL(sqr); GGML_METAL_ADD_KERNEL(sum_rows); @@ -452,6 +486,21 @@ void ggml_metal_free(struct ggml_metal_context * ctx) { GGML_METAL_DEL_KERNEL(mul_mv_q4_K_f32); GGML_METAL_DEL_KERNEL(mul_mv_q5_K_f32); GGML_METAL_DEL_KERNEL(mul_mv_q6_K_f32); + GGML_METAL_DEL_KERNEL(mul_mv_id_f32_f32); + //GGML_METAL_DEL_KERNEL(mul_mv_id_f16_f16); + GGML_METAL_DEL_KERNEL(mul_mv_id_f16_f32); + //GGML_METAL_DEL_KERNEL(mul_mv_id_f16_f32_1row); + //GGML_METAL_DEL_KERNEL(mul_mv_id_f16_f32_l4); + GGML_METAL_DEL_KERNEL(mul_mv_id_q4_0_f32); + GGML_METAL_DEL_KERNEL(mul_mv_id_q4_1_f32); + GGML_METAL_DEL_KERNEL(mul_mv_id_q5_0_f32); + GGML_METAL_DEL_KERNEL(mul_mv_id_q5_1_f32); + GGML_METAL_DEL_KERNEL(mul_mv_id_q8_0_f32); + GGML_METAL_DEL_KERNEL(mul_mv_id_q2_K_f32); + GGML_METAL_DEL_KERNEL(mul_mv_id_q3_K_f32); + GGML_METAL_DEL_KERNEL(mul_mv_id_q4_K_f32); + GGML_METAL_DEL_KERNEL(mul_mv_id_q5_K_f32); + GGML_METAL_DEL_KERNEL(mul_mv_id_q6_K_f32); if ([ctx->device supportsFamily:MTLGPUFamilyApple7]) { GGML_METAL_DEL_KERNEL(mul_mm_f32_f32); GGML_METAL_DEL_KERNEL(mul_mm_f16_f32); @@ -492,6 +541,7 @@ void ggml_metal_free(struct ggml_metal_context * ctx) { //GGML_METAL_DEL_KERNEL(cpy_f32_q5_0); //GGML_METAL_DEL_KERNEL(cpy_f32_q5_1); GGML_METAL_DEL_KERNEL(cpy_f16_f16); + GGML_METAL_DEL_KERNEL(cpy_f16_f32); GGML_METAL_DEL_KERNEL(concat); GGML_METAL_DEL_KERNEL(sqr); GGML_METAL_DEL_KERNEL(sum_rows); @@ -803,8 +853,9 @@ static bool ggml_metal_supports_op(const struct ggml_tensor * op) { case GGML_OP_NONE: case GGML_OP_RESHAPE: case GGML_OP_VIEW: - case GGML_OP_TRANSPOSE: case GGML_OP_PERMUTE: + case GGML_OP_TRANSPOSE: + case GGML_OP_GET_ROWS: case GGML_OP_CONCAT: case GGML_OP_ADD: case GGML_OP_MUL: @@ -819,14 +870,38 @@ static bool ggml_metal_supports_op(const struct ggml_tensor * op) { case GGML_OP_ROPE: case GGML_OP_IM2COL: case GGML_OP_ARGSORT: - case GGML_OP_DUP: - case GGML_OP_CPY: - case GGML_OP_CONT: case GGML_OP_MUL_MAT: case GGML_OP_MUL_MAT_ID: return true; + case GGML_OP_CPY: + case GGML_OP_DUP: + case GGML_OP_CONT: + { + switch (op->src[0]->type) { + case GGML_TYPE_F32: + switch (op->type) { + case GGML_TYPE_F16: + case GGML_TYPE_F32: + case GGML_TYPE_Q8_0: + case GGML_TYPE_Q4_0: + case GGML_TYPE_Q4_1: + return true; + default: + return false; + } + case GGML_TYPE_F16: + switch (op->type) { + case GGML_TYPE_F16: + case GGML_TYPE_F32: + return true; + default: + return false; + } + default: + return false; + }; + } case GGML_OP_DIAG_MASK_INF: - case GGML_OP_GET_ROWS: { return op->ne[0] % 4 == 0; } @@ -1001,34 +1076,37 @@ void ggml_metal_graph_compute( case GGML_OP_MUL: case GGML_OP_DIV: { - GGML_ASSERT(ggml_is_contiguous(src0)); - GGML_ASSERT(ggml_is_contiguous(src1)); - bool bcast_row = false; int64_t nb = ne00; - if (ggml_nelements(src1) == ne10 && ne00 % 4 == 0) { + id pipeline = nil; + + if (ggml_nelements(src1) == ne10 && ggml_is_contiguous(src1) && ne00 % 4 == 0 && ne10 % 4 == 0) { + GGML_ASSERT(ggml_is_contiguous(src0)); + // src1 is a row GGML_ASSERT(ne11 == 1); nb = ne00 / 4; switch (dst->op) { - case GGML_OP_ADD: [encoder setComputePipelineState:ctx->pipeline_add_row]; break; - case GGML_OP_MUL: [encoder setComputePipelineState:ctx->pipeline_mul_row]; break; - case GGML_OP_DIV: [encoder setComputePipelineState:ctx->pipeline_div_row]; break; + case GGML_OP_ADD: pipeline = ctx->pipeline_add_row; break; + case GGML_OP_MUL: pipeline = ctx->pipeline_mul_row; break; + case GGML_OP_DIV: pipeline = ctx->pipeline_div_row; break; default: GGML_ASSERT(false); } bcast_row = true; } else { switch (dst->op) { - case GGML_OP_ADD: [encoder setComputePipelineState:ctx->pipeline_add]; break; - case GGML_OP_MUL: [encoder setComputePipelineState:ctx->pipeline_mul]; break; - case GGML_OP_DIV: [encoder setComputePipelineState:ctx->pipeline_div]; break; + case GGML_OP_ADD: pipeline = ctx->pipeline_add; break; + case GGML_OP_MUL: pipeline = ctx->pipeline_mul; break; + case GGML_OP_DIV: pipeline = ctx->pipeline_div; break; default: GGML_ASSERT(false); } } + + [encoder setComputePipelineState:pipeline]; [encoder setBuffer:id_src0 offset:offs_src0 atIndex:0]; [encoder setBuffer:id_src1 offset:offs_src1 atIndex:1]; [encoder setBuffer:id_dst offset:offs_dst atIndex:2]; @@ -1063,7 +1141,7 @@ void ggml_metal_graph_compute( [encoder dispatchThreadgroups:MTLSizeMake(n, 1, 1) threadsPerThreadgroup:MTLSizeMake(1, 1, 1)]; } else { - const int nth = MIN(1024, ne0); + const int nth = MIN((int) pipeline.maxTotalThreadsPerThreadgroup, ne0); [encoder dispatchThreadgroups:MTLSizeMake(ne01, ne02, ne03) threadsPerThreadgroup:MTLSizeMake(nth, 1, 1)]; } @@ -1193,7 +1271,11 @@ void ggml_metal_graph_compute( const float scale = ((float *) dst->op_params)[0]; [encoder setBuffer:id_src0 offset:offs_src0 atIndex:0]; - [encoder setBuffer:id_src1 offset:offs_src1 atIndex:1]; + if (id_src1) { + [encoder setBuffer:id_src1 offset:offs_src1 atIndex:1]; + } else { + [encoder setBuffer:id_src0 offset:offs_src0 atIndex:1]; + } [encoder setBuffer:id_dst offset:offs_dst atIndex:2]; [encoder setBytes:&ne00 length:sizeof(ne00) atIndex:3]; [encoder setBytes:&ne01 length:sizeof(ne01) atIndex:4]; @@ -1444,7 +1526,7 @@ void ggml_metal_graph_compute( else if (src0t == GGML_TYPE_Q6_K) { [encoder dispatchThreadgroups:MTLSizeMake((ne01 + 1)/2, ne11, ne12*ne13) threadsPerThreadgroup:MTLSizeMake(nth0, nth1, 1)]; } else { - int64_t ny = (ne11 + nrows - 1)/nrows; + const int64_t ny = (ne11 + nrows - 1)/nrows; [encoder dispatchThreadgroups:MTLSizeMake(ne01, ny, ne12*ne13) threadsPerThreadgroup:MTLSizeMake(nth0, nth1, 1)]; } } @@ -1456,7 +1538,7 @@ void ggml_metal_graph_compute( GGML_ASSERT(src0t == GGML_TYPE_I32); - const int n_as = ne00; + const int n_as = ((int32_t *) dst->op_params)[1]; // TODO: make this more general GGML_ASSERT(n_as <= 8); @@ -1488,14 +1570,22 @@ void ggml_metal_graph_compute( // find the break-even point where the matrix-matrix kernel becomes more efficient compared // to the matrix-vector kernel - int ne11_mm_min = 0; + int ne11_mm_min = 1; const int idx = ((int32_t *) dst->op_params)[0]; + // batch size + GGML_ASSERT(ne01 == ne11); + + const int64_t _ne1 = 1; // kernel_mul_mm_impl needs a reference in constant memory + // for now the matrix-matrix multiplication kernel only works on A14+/M1+ SoCs // AMD GPU and older A-chips will reuse matrix-vector multiplication kernel - if ([ctx->device supportsFamily:MTLGPUFamilyApple7] && - ne11 > ne11_mm_min) { + // !!! + // TODO: for now, always use mat-vec kernels until we figure out how to improve the + // indirect matrix multiplication + // !!! + if ([ctx->device supportsFamily:MTLGPUFamilyApple7] && _ne1 > ne11_mm_min) { switch (src2->type) { case GGML_TYPE_F32: [encoder setComputePipelineState:ctx->pipeline_mul_mm_id_f32_f32]; break; case GGML_TYPE_F16: [encoder setComputePipelineState:ctx->pipeline_mul_mm_id_f16_f32]; break; @@ -1514,19 +1604,22 @@ void ggml_metal_graph_compute( [encoder setBuffer:id_src0 offset:offs_src0 atIndex:0]; [encoder setBuffer:id_src1 offset:offs_src1 atIndex:1]; [encoder setBuffer:id_dst offset:offs_dst atIndex:2]; - [encoder setBytes:&ne20 length:sizeof(ne20) atIndex:3]; - [encoder setBytes:&ne22 length:sizeof(ne22) atIndex:4]; - [encoder setBytes:&nb21 length:sizeof(nb21) atIndex:5]; - [encoder setBytes:&nb22 length:sizeof(nb22) atIndex:6]; - [encoder setBytes:&ne12 length:sizeof(ne12) atIndex:7]; - [encoder setBytes:&nb10 length:sizeof(nb10) atIndex:8]; - [encoder setBytes:&nb11 length:sizeof(nb11) atIndex:9]; - [encoder setBytes:&nb12 length:sizeof(nb12) atIndex:10]; - [encoder setBytes:&ne0 length:sizeof(ne0) atIndex:11]; - [encoder setBytes:&ne1 length:sizeof(ne1) atIndex:12]; - [encoder setBytes:&r2 length:sizeof(r2) atIndex:13]; - [encoder setBytes:&r3 length:sizeof(r3) atIndex:14]; - [encoder setBytes:&idx length:sizeof(idx) atIndex:15]; + [encoder setBytes:&nb01 length:sizeof(nb01) atIndex:3]; + [encoder setBytes:&ne20 length:sizeof(ne20) atIndex:4]; + [encoder setBytes:&ne22 length:sizeof(ne22) atIndex:5]; + [encoder setBytes:&nb21 length:sizeof(nb21) atIndex:6]; + [encoder setBytes:&nb22 length:sizeof(nb22) atIndex:7]; + [encoder setBytes:&ne12 length:sizeof(ne12) atIndex:8]; + [encoder setBytes:&ne13 length:sizeof(ne13) atIndex:9]; + [encoder setBytes:&nb10 length:sizeof(nb10) atIndex:10]; + [encoder setBytes:&nb11 length:sizeof(nb11) atIndex:11]; + [encoder setBytes:&nb12 length:sizeof(nb12) atIndex:12]; + [encoder setBytes:&ne0 length:sizeof(ne0) atIndex:13]; + [encoder setBytes:&_ne1 length:sizeof(_ne1) atIndex:14]; + [encoder setBytes:&nb1 length:sizeof(nb1) atIndex:15]; + [encoder setBytes:&r2 length:sizeof(r2) atIndex:16]; + [encoder setBytes:&r3 length:sizeof(r3) atIndex:17]; + [encoder setBytes:&idx length:sizeof(idx) atIndex:18]; // TODO: how to make this an array? read Metal docs for (int j = 0; j < n_as; ++j) { struct ggml_tensor * src_cur = dst->src[2 + j]; @@ -1534,11 +1627,157 @@ void ggml_metal_graph_compute( size_t offs_src_cur = 0; id id_src_cur = ggml_metal_get_buffer(ctx, src_cur, &offs_src_cur); - [encoder setBuffer:id_src_cur offset:offs_src_cur atIndex:16 + j]; + [encoder setBuffer:id_src_cur offset:offs_src_cur atIndex:19 + j]; } [encoder setThreadgroupMemoryLength:8192 atIndex:0]; - [encoder dispatchThreadgroups:MTLSizeMake( (ne11 + 31)/32, (ne21 + 63)/64, ne12*ne13) threadsPerThreadgroup:MTLSizeMake(128, 1, 1)]; + + // TODO: processing one row at a time (ne11 -> 1) is not efficient + [encoder dispatchThreadgroups:MTLSizeMake( (_ne1 + 31)/32, (ne21 + 63)/64, ne01*ne12*ne13) threadsPerThreadgroup:MTLSizeMake(128, 1, 1)]; + } else { + int nth0 = 32; + int nth1 = 1; + int nrows = 1; + //printf("vector: ne00 = %6d, ne01 = %6d, ne02 = %6d, ne11 = %6d, ne12 = %6d\n", ne00, ne01, ne02, ne11, ne12); + + // use custom matrix x vector kernel + switch (src2t) { + case GGML_TYPE_F32: + { + GGML_ASSERT(src1t == GGML_TYPE_F32); + [encoder setComputePipelineState:ctx->pipeline_mul_mv_id_f32_f32]; + } break; + case GGML_TYPE_F16: + { + GGML_ASSERT(src1t == GGML_TYPE_F32); + nth0 = 32; + nth1 = 1; + [encoder setComputePipelineState:ctx->pipeline_mul_mv_id_f16_f32]; + } break; + case GGML_TYPE_Q4_0: + { + nth0 = 8; + nth1 = 8; + [encoder setComputePipelineState:ctx->pipeline_mul_mv_id_q4_0_f32]; + } break; + case GGML_TYPE_Q4_1: + { + nth0 = 8; + nth1 = 8; + [encoder setComputePipelineState:ctx->pipeline_mul_mv_id_q4_1_f32]; + } break; + case GGML_TYPE_Q5_0: + { + nth0 = 8; + nth1 = 8; + [encoder setComputePipelineState:ctx->pipeline_mul_mv_id_q5_0_f32]; + } break; + case GGML_TYPE_Q5_1: + { + nth0 = 8; + nth1 = 8; + [encoder setComputePipelineState:ctx->pipeline_mul_mv_id_q5_1_f32]; + } break; + case GGML_TYPE_Q8_0: + { + nth0 = 8; + nth1 = 8; + [encoder setComputePipelineState:ctx->pipeline_mul_mv_id_q8_0_f32]; + } break; + case GGML_TYPE_Q2_K: + { + nth0 = 2; + nth1 = 32; + [encoder setComputePipelineState:ctx->pipeline_mul_mv_id_q2_K_f32]; + } break; + case GGML_TYPE_Q3_K: + { + nth0 = 2; + nth1 = 32; + [encoder setComputePipelineState:ctx->pipeline_mul_mv_id_q3_K_f32]; + } break; + case GGML_TYPE_Q4_K: + { + nth0 = 4; //1; + nth1 = 8; //32; + [encoder setComputePipelineState:ctx->pipeline_mul_mv_id_q4_K_f32]; + } break; + case GGML_TYPE_Q5_K: + { + nth0 = 2; + nth1 = 32; + [encoder setComputePipelineState:ctx->pipeline_mul_mv_id_q5_K_f32]; + } break; + case GGML_TYPE_Q6_K: + { + nth0 = 2; + nth1 = 32; + [encoder setComputePipelineState:ctx->pipeline_mul_mv_id_q6_K_f32]; + } break; + default: + { + GGML_METAL_LOG_ERROR("Asserting on type %d\n", (int)src0t); + GGML_ASSERT(false && "not implemented"); + } + }; + + [encoder setBuffer:id_src0 offset:offs_src0 atIndex:0]; + [encoder setBuffer:id_src1 offset:offs_src1 atIndex:1]; + [encoder setBuffer:id_dst offset:offs_dst atIndex:2]; + [encoder setBytes:&nb01 length:sizeof(nb01) atIndex:3]; + [encoder setBytes:&ne20 length:sizeof(ne20) atIndex:4]; + [encoder setBytes:&ne21 length:sizeof(ne21) atIndex:5]; + [encoder setBytes:&ne22 length:sizeof(ne22) atIndex:6]; + [encoder setBytes:&nb20 length:sizeof(nb20) atIndex:7]; + [encoder setBytes:&nb21 length:sizeof(nb21) atIndex:8]; + [encoder setBytes:&nb22 length:sizeof(nb22) atIndex:9]; + [encoder setBytes:&ne10 length:sizeof(ne10) atIndex:10]; + [encoder setBytes:&_ne1 length:sizeof(_ne1) atIndex:11]; + [encoder setBytes:&ne12 length:sizeof(ne12) atIndex:12]; + [encoder setBytes:&ne13 length:sizeof(ne13) atIndex:13]; + [encoder setBytes:&nb10 length:sizeof(nb10) atIndex:14]; + [encoder setBytes:&nb11 length:sizeof(nb11) atIndex:15]; + [encoder setBytes:&nb12 length:sizeof(nb12) atIndex:16]; + [encoder setBytes:&ne0 length:sizeof(ne0) atIndex:17]; + [encoder setBytes:&_ne1 length:sizeof(_ne1) atIndex:18]; + [encoder setBytes:&nb1 length:sizeof(nb1) atIndex:19]; + [encoder setBytes:&r2 length:sizeof(r2) atIndex:20]; + [encoder setBytes:&r3 length:sizeof(r3) atIndex:21]; + [encoder setBytes:&idx length:sizeof(idx) atIndex:22]; + // TODO: how to make this an array? read Metal docs + for (int j = 0; j < n_as; ++j) { + struct ggml_tensor * src_cur = dst->src[2 + j]; + + size_t offs_src_cur = 0; + id id_src_cur = ggml_metal_get_buffer(ctx, src_cur, &offs_src_cur); + + [encoder setBuffer:id_src_cur offset:offs_src_cur atIndex:23 + j]; + } + + if (src2t == GGML_TYPE_Q4_0 || src2t == GGML_TYPE_Q4_1 || + src2t == GGML_TYPE_Q5_0 || src2t == GGML_TYPE_Q5_1 || src2t == GGML_TYPE_Q8_0 || + src2t == GGML_TYPE_Q2_K) { // || src2t == GGML_TYPE_Q4_K) { + [encoder dispatchThreadgroups:MTLSizeMake((ne21 + 7)/8, _ne1, ne01*ne12*ne13) threadsPerThreadgroup:MTLSizeMake(nth0, nth1, 1)]; + } + else if (src2t == GGML_TYPE_Q4_K) { + [encoder dispatchThreadgroups:MTLSizeMake((ne21 + 3)/4, _ne1, ne01*ne12*ne13) threadsPerThreadgroup:MTLSizeMake(nth0, nth1, 1)]; + } + else if (src2t == GGML_TYPE_Q3_K) { +#ifdef GGML_QKK_64 + [encoder dispatchThreadgroups:MTLSizeMake((ne21 + 1)/2, _ne1, ne01*ne12*ne13) threadsPerThreadgroup:MTLSizeMake(nth0, nth1, 1)]; +#else + [encoder dispatchThreadgroups:MTLSizeMake((ne21 + 3)/4, _ne1, ne01*ne12*ne13) threadsPerThreadgroup:MTLSizeMake(nth0, nth1, 1)]; +#endif + } + else if (src2t == GGML_TYPE_Q5_K) { + [encoder dispatchThreadgroups:MTLSizeMake((ne21 + 3)/4, _ne1, ne01*ne12*ne13) threadsPerThreadgroup:MTLSizeMake(nth0, nth1, 1)]; + } + else if (src2t == GGML_TYPE_Q6_K) { + [encoder dispatchThreadgroups:MTLSizeMake((ne21 + 1)/2, _ne1, ne01*ne12*ne13) threadsPerThreadgroup:MTLSizeMake(nth0, nth1, 1)]; + } else { + const int64_t ny = (_ne1 + nrows - 1)/nrows; + [encoder dispatchThreadgroups:MTLSizeMake(ne21, ny, ne01*ne12*ne13) threadsPerThreadgroup:MTLSizeMake(nth0, nth1, 1)]; + } } } break; case GGML_OP_GET_ROWS: @@ -1559,16 +1798,19 @@ void ggml_metal_graph_compute( default: GGML_ASSERT(false && "not implemented"); } - [encoder setBuffer:id_src0 offset:offs_src0 atIndex:0]; - [encoder setBuffer:id_src1 offset:offs_src1 atIndex:1]; - [encoder setBuffer:id_dst offset:offs_dst atIndex:2]; + [encoder setBuffer:id_src0 offset:offs_src0 atIndex:0]; + [encoder setBuffer:id_src1 offset:offs_src1 atIndex:1]; + [encoder setBuffer:id_dst offset:offs_dst atIndex:2]; [encoder setBytes:&ne00 length:sizeof( int64_t) atIndex:3]; [encoder setBytes:&nb01 length:sizeof(uint64_t) atIndex:4]; - [encoder setBytes:&nb1 length:sizeof(uint64_t) atIndex:5]; + [encoder setBytes:&nb02 length:sizeof(uint64_t) atIndex:5]; + [encoder setBytes:&ne10 length:sizeof( int64_t) atIndex:6]; + [encoder setBytes:&nb10 length:sizeof( int64_t) atIndex:7]; + [encoder setBytes:&nb11 length:sizeof( int64_t) atIndex:8]; + [encoder setBytes:&nb1 length:sizeof(uint64_t) atIndex:9]; + [encoder setBytes:&nb2 length:sizeof(uint64_t) atIndex:10]; - const int64_t n = ggml_nelements(src1); - - [encoder dispatchThreadgroups:MTLSizeMake(n, 1, 1) threadsPerThreadgroup:MTLSizeMake(1, 1, 1)]; + [encoder dispatchThreadgroups:MTLSizeMake(ne10, ne11, 1) threadsPerThreadgroup:MTLSizeMake(32, 1, 1)]; } break; case GGML_OP_RMS_NORM: { @@ -1813,7 +2055,7 @@ void ggml_metal_graph_compute( { switch (dstt) { case GGML_TYPE_F16: [encoder setComputePipelineState:ctx->pipeline_cpy_f16_f16]; break; - case GGML_TYPE_F32: GGML_ASSERT(false && "cpy_f16_f32 not implemented"); break; + case GGML_TYPE_F32: [encoder setComputePipelineState:ctx->pipeline_cpy_f16_f32]; break; default: GGML_ASSERT(false && "not implemented"); }; } break; diff --git a/ggml-metal.metal b/ggml-metal.metal index 2f8ea22d6..773fac124 100644 --- a/ggml-metal.metal +++ b/ggml-metal.metal @@ -347,9 +347,9 @@ kernel void kernel_soft_max( const int64_t i02 = (tgpig - i03*ne02*ne01) / ne01; const int64_t i01 = (tgpig - i03*ne02*ne01 - i02*ne01); - device const float * psrc0 = src0 + i03*ne02*ne01*ne00 + i02*ne01*ne00 + i01*ne00; - device const float * pmask = src1 ? src1 + i01*ne00 : nullptr; - device float * pdst = dst + i03*ne02*ne01*ne00 + i02*ne01*ne00 + i01*ne00; + device const float * psrc0 = src0 + i03*ne02*ne01*ne00 + i02*ne01*ne00 + i01*ne00; + device const float * pmask = src1 != src0 ? src1 + i01*ne00 : nullptr; + device float * pdst = dst + i03*ne02*ne01*ne00 + i02*ne01*ne00 + i01*ne00; // parallel max float lmax = -INFINITY; @@ -385,7 +385,12 @@ kernel void kernel_soft_max( pdst[i00] = exp_psrc0; } + // This barrier fixes a failing test + // ref: https://github.com/ggerganov/ggml/pull/621#discussion_r1425156335 + threadgroup_barrier(mem_flags::mem_none); + float sum = simd_sum(lsum); + if (ntg > N_SIMDWIDTH) { if (sgitg == 0) { buf[tiisg] = 0.0f; @@ -428,9 +433,9 @@ kernel void kernel_soft_max_4( const int64_t i02 = (tgpig - i03*ne02*ne01) / ne01; const int64_t i01 = (tgpig - i03*ne02*ne01 - i02*ne01); - device const float4 * psrc4 = (device const float4 *)(src0 + i03*ne02*ne01*ne00 + i02*ne01*ne00 + i01*ne00); - device const float4 * pmask = src1 ? (device const float4 *)(src1 + i01*ne00) : nullptr; - device float4 * pdst4 = (device float4 *)(dst + i03*ne02*ne01*ne00 + i02*ne01*ne00 + i01*ne00); + device const float4 * psrc4 = (device const float4 *)(src0 + i03*ne02*ne01*ne00 + i02*ne01*ne00 + i01*ne00); + device const float4 * pmask = src1 != src0 ? (device const float4 *)(src1 + i01*ne00) : nullptr; + device float4 * pdst4 = (device float4 *)(dst + i03*ne02*ne01*ne00 + i02*ne01*ne00 + i01*ne00); // parallel max float4 lmax4 = -INFINITY; @@ -468,7 +473,13 @@ kernel void kernel_soft_max_4( } const float lsum = lsum4[0] + lsum4[1] + lsum4[2] + lsum4[3]; + + // This barrier fixes a failing test + // ref: https://github.com/ggerganov/ggml/pull/621#discussion_r1425156335 + threadgroup_barrier(mem_flags::mem_none); + float sum = simd_sum(lsum); + if (ntg > N_SIMDWIDTH) { if (sgitg == 0) { buf[tiisg] = 0.0f; @@ -731,7 +742,7 @@ inline float block_q_n_dot_y(device const block_q5_1 * qb_curr, float sumy, thre // giard against the number of rows not being divisible by // N_DST, so this is another explicit assumption of the implementation. template -void mul_vec_q_n_f32( +void mul_vec_q_n_f32_impl( device const void * src0, device const float * src1, device float * dst, @@ -813,7 +824,7 @@ kernel void kernel_mul_mv_q4_0_f32( uint3 tgpig[[threadgroup_position_in_grid]], uint tiisg[[thread_index_in_simdgroup]], uint sgitg[[simdgroup_index_in_threadgroup]]) { - mul_vec_q_n_f32(src0,src1,dst,ne00,ne01,ne02,ne10,ne12,ne0,ne1,r2,r3,tgpig,tiisg,sgitg); + mul_vec_q_n_f32_impl(src0,src1,dst,ne00,ne01,ne02,ne10,ne12,ne0,ne1,r2,r3,tgpig,tiisg,sgitg); } kernel void kernel_mul_mv_q4_1_f32( @@ -832,7 +843,7 @@ kernel void kernel_mul_mv_q4_1_f32( uint3 tgpig[[threadgroup_position_in_grid]], uint tiisg[[thread_index_in_simdgroup]], uint sgitg[[simdgroup_index_in_threadgroup]]) { - mul_vec_q_n_f32(src0,src1,dst,ne00,ne01,ne02,ne10,ne12,ne0,ne1,r2,r3,tgpig,tiisg,sgitg); + mul_vec_q_n_f32_impl(src0,src1,dst,ne00,ne01,ne02,ne10,ne12,ne0,ne1,r2,r3,tgpig,tiisg,sgitg); } kernel void kernel_mul_mv_q5_0_f32( @@ -851,7 +862,7 @@ kernel void kernel_mul_mv_q5_0_f32( uint3 tgpig[[threadgroup_position_in_grid]], uint tiisg[[thread_index_in_simdgroup]], uint sgitg[[simdgroup_index_in_threadgroup]]) { - mul_vec_q_n_f32(src0,src1,dst,ne00,ne01,ne02,ne10,ne12,ne0,ne1,r2,r3,tgpig,tiisg,sgitg); + mul_vec_q_n_f32_impl(src0,src1,dst,ne00,ne01,ne02,ne10,ne12,ne0,ne1,r2,r3,tgpig,tiisg,sgitg); } kernel void kernel_mul_mv_q5_1_f32( @@ -870,28 +881,28 @@ kernel void kernel_mul_mv_q5_1_f32( uint3 tgpig[[threadgroup_position_in_grid]], uint tiisg[[thread_index_in_simdgroup]], uint sgitg[[simdgroup_index_in_threadgroup]]) { - mul_vec_q_n_f32(src0,src1,dst,ne00,ne01,ne02,ne10,ne12,ne0,ne1,r2,r3,tgpig,tiisg,sgitg); + mul_vec_q_n_f32_impl(src0,src1,dst,ne00,ne01,ne02,ne10,ne12,ne0,ne1,r2,r3,tgpig,tiisg,sgitg); } #define NB_Q8_0 8 -kernel void kernel_mul_mv_q8_0_f32( +void kernel_mul_mv_q8_0_f32_impl( device const void * src0, device const float * src1, device float * dst, constant int64_t & ne00, - constant int64_t & ne01[[buffer(4)]], - constant int64_t & ne02[[buffer(5)]], - constant int64_t & ne10[[buffer(9)]], - constant int64_t & ne12[[buffer(11)]], - constant int64_t & ne0 [[buffer(15)]], - constant int64_t & ne1 [[buffer(16)]], - constant uint & r2 [[buffer(17)]], - constant uint & r3 [[buffer(18)]], + constant int64_t & ne01, + constant int64_t & ne02, + constant int64_t & ne10, + constant int64_t & ne12, + constant int64_t & ne0, + constant int64_t & ne1, + constant uint & r2, + constant uint & r3, uint3 tgpig[[threadgroup_position_in_grid]], - uint tiisg[[thread_index_in_simdgroup]], - uint sgitg[[simdgroup_index_in_threadgroup]]) { + uint tiisg[[thread_index_in_simdgroup]], + uint sgitg[[simdgroup_index_in_threadgroup]]) { const int nr = N_DST; const int nsg = N_SIMDGROUP; const int nw = N_SIMDWIDTH; @@ -945,9 +956,29 @@ kernel void kernel_mul_mv_q8_0_f32( } } +[[host_name("kernel_mul_mv_q8_0_f32")]] +kernel void kernel_mul_mv_q8_0_f32( + device const void * src0, + device const float * src1, + device float * dst, + constant int64_t & ne00, + constant int64_t & ne01, + constant int64_t & ne02, + constant int64_t & ne10, + constant int64_t & ne12, + constant int64_t & ne0, + constant int64_t & ne1, + constant uint & r2 [[buffer(17)]], + constant uint & r3 [[buffer(18)]], + uint3 tgpig[[threadgroup_position_in_grid]], + uint tiisg[[thread_index_in_simdgroup]], + uint sgitg[[simdgroup_index_in_threadgroup]]) { + kernel_mul_mv_q8_0_f32_impl(src0,src1,dst,ne00,ne01,ne02,ne10,ne12,ne0,ne1,r2,r3,tgpig,tiisg,sgitg); +} + #define N_F32_F32 4 -kernel void kernel_mul_mv_f32_f32( +void kernel_mul_mv_f32_f32_impl( device const char * src0, device const char * src1, device float * dst, @@ -965,8 +996,8 @@ kernel void kernel_mul_mv_f32_f32( constant uint64_t & nb12, constant int64_t & ne0, constant int64_t & ne1, - constant uint & r2 [[buffer(17)]], - constant uint & r3 [[buffer(18)]], + constant uint & r2, + constant uint & r3, uint3 tgpig[[threadgroup_position_in_grid]], uint tiisg[[thread_index_in_simdgroup]]) { @@ -1025,6 +1056,32 @@ kernel void kernel_mul_mv_f32_f32( } } +[[host_name("kernel_mul_mv_f32_f32")]] +kernel void kernel_mul_mv_f32_f32( + device const char * src0, + device const char * src1, + device float * dst, + constant int64_t & ne00, + constant int64_t & ne01, + constant int64_t & ne02, + constant uint64_t & nb00, + constant uint64_t & nb01, + constant uint64_t & nb02, + constant int64_t & ne10, + constant int64_t & ne11, + constant int64_t & ne12, + constant uint64_t & nb10, + constant uint64_t & nb11, + constant uint64_t & nb12, + constant int64_t & ne0, + constant int64_t & ne1, + constant uint & r2 [[buffer(17)]], + constant uint & r3 [[buffer(18)]], + uint3 tgpig[[threadgroup_position_in_grid]], + uint tiisg[[thread_index_in_simdgroup]]) { + kernel_mul_mv_f32_f32_impl(src0, src1, dst, ne00, ne01, ne02, nb00, nb01, nb02, ne10, ne11, ne12, nb10, nb11, nb12, ne0, ne1, r2, r3, tgpig, tiisg); +} + #define N_F16_F16 4 kernel void kernel_mul_mv_f16_f16( @@ -1105,7 +1162,7 @@ kernel void kernel_mul_mv_f16_f16( } } -kernel void kernel_mul_mv_f16_f32_1row( +void kernel_mul_mv_f16_f32_1row_impl( device const char * src0, device const char * src1, device float * dst, @@ -1123,8 +1180,8 @@ kernel void kernel_mul_mv_f16_f32_1row( constant uint64_t & nb12, constant int64_t & ne0, constant int64_t & ne1, - constant uint & r2 [[buffer(17)]], - constant uint & r3 [[buffer(18)]], + constant uint & r2, + constant uint & r3, uint3 tgpig[[threadgroup_position_in_grid]], uint tiisg[[thread_index_in_simdgroup]]) { @@ -1161,12 +1218,10 @@ kernel void kernel_mul_mv_f16_f32_1row( dst[im*ne1*ne0 + r1*ne0 + r0] = all_sum; } } - } -#define N_F16_F32 4 - -kernel void kernel_mul_mv_f16_f32( +[[host_name("kernel_mul_mv_f16_f32_1row")]] +kernel void kernel_mul_mv_f16_f32_1row( device const char * src0, device const char * src1, device float * dst, @@ -1187,6 +1242,33 @@ kernel void kernel_mul_mv_f16_f32( constant uint & r2 [[buffer(17)]], constant uint & r3 [[buffer(18)]], uint3 tgpig[[threadgroup_position_in_grid]], + uint tiisg[[thread_index_in_simdgroup]]) { + kernel_mul_mv_f16_f32_1row_impl(src0, src1, dst, ne00, ne01, ne02, nb00, nb01, nb02, ne10, ne11, ne12, nb10, nb11, nb12, ne0, ne1, r2, r3, tgpig, tiisg); +} + +#define N_F16_F32 4 + +void kernel_mul_mv_f16_f32_impl( + device const char * src0, + device const char * src1, + device float * dst, + constant int64_t & ne00, + constant int64_t & ne01, + constant int64_t & ne02, + constant uint64_t & nb00, + constant uint64_t & nb01, + constant uint64_t & nb02, + constant int64_t & ne10, + constant int64_t & ne11, + constant int64_t & ne12, + constant uint64_t & nb10, + constant uint64_t & nb11, + constant uint64_t & nb12, + constant int64_t & ne0, + constant int64_t & ne1, + constant uint & r2, + constant uint & r3, + uint3 tgpig[[threadgroup_position_in_grid]], uint tiisg[[thread_index_in_simdgroup]]) { const int64_t r0 = tgpig.x; @@ -1244,6 +1326,32 @@ kernel void kernel_mul_mv_f16_f32( } } +[[host_name("kernel_mul_mv_f16_f32")]] +kernel void kernel_mul_mv_f16_f32( + device const char * src0, + device const char * src1, + device float * dst, + constant int64_t & ne00, + constant int64_t & ne01, + constant int64_t & ne02, + constant uint64_t & nb00, + constant uint64_t & nb01, + constant uint64_t & nb02, + constant int64_t & ne10, + constant int64_t & ne11, + constant int64_t & ne12, + constant uint64_t & nb10, + constant uint64_t & nb11, + constant uint64_t & nb12, + constant int64_t & ne0, + constant int64_t & ne1, + constant uint & r2 [[buffer(17)]], + constant uint & r3 [[buffer(18)]], + uint3 tgpig[[threadgroup_position_in_grid]], + uint tiisg[[thread_index_in_simdgroup]]) { + kernel_mul_mv_f16_f32_impl(src0, src1, dst, ne00, ne01, ne02, nb00, nb01, nb02, ne10, ne11, ne12, nb10, nb11, nb12, ne0, ne1, r2, r3, tgpig, tiisg); +} + // Assumes row size (ne00) is a multiple of 4 kernel void kernel_mul_mv_f16_f32_l4( device const char * src0, @@ -1601,8 +1709,8 @@ template [[host_name("kernel_argsort_f32_i32_asc")]] kernel argsort_t kernel_ar template [[host_name("kernel_argsort_f32_i32_desc")]] kernel argsort_t kernel_argsort_f32_i32; kernel void kernel_cpy_f16_f16( - device const half * src0, - device half * dst, + device const half * src0, + device half * dst, constant int64_t & ne00, constant int64_t & ne01, constant int64_t & ne02, @@ -1641,6 +1749,47 @@ kernel void kernel_cpy_f16_f16( } } +kernel void kernel_cpy_f16_f32( + device const half * src0, + device float * dst, + constant int64_t & ne00, + constant int64_t & ne01, + constant int64_t & ne02, + constant int64_t & ne03, + constant uint64_t & nb00, + constant uint64_t & nb01, + constant uint64_t & nb02, + constant uint64_t & nb03, + constant int64_t & ne0, + constant int64_t & ne1, + constant int64_t & ne2, + constant int64_t & ne3, + constant uint64_t & nb0, + constant uint64_t & nb1, + constant uint64_t & nb2, + constant uint64_t & nb3, + uint3 tgpig[[threadgroup_position_in_grid]], + uint3 tpitg[[thread_position_in_threadgroup]], + uint3 ntg[[threads_per_threadgroup]]) { + const int64_t i03 = tgpig[2]; + const int64_t i02 = tgpig[1]; + const int64_t i01 = tgpig[0]; + + const int64_t n = i03*ne02*ne01*ne00 + i02*ne01*ne00 + i01*ne00; + + const int64_t i3 = n / (ne2*ne1*ne0); + const int64_t i2 = (n - i3*ne2*ne1*ne0) / (ne1*ne0); + const int64_t i1 = (n - i3*ne2*ne1*ne0 - i2*ne1*ne0) / ne0; + const int64_t i0 = (n - i3*ne2*ne1*ne0 - i2*ne1*ne0 - i1*ne0); + + device float * dst_data = (device float *) ((device char *) dst + i3*nb3 + i2*nb2 + i1*nb1 + i0*nb0); + + for (int64_t i00 = tpitg.x; i00 < ne00; i00 += ntg.x) { + device const half * src = (device half *)((device char *) src0 + i03*nb03 + i02*nb02 + i01*nb01 + i00*nb00); + dst_data[i00] = src[0]; + } +} + kernel void kernel_cpy_f32_f16( device const float * src0, device half * dst, @@ -2064,19 +2213,19 @@ static inline uchar4 get_scale_min_k4(int j, device const uint8_t * q) { //====================================== dot products ========================= -kernel void kernel_mul_mv_q2_K_f32( +void kernel_mul_mv_q2_K_f32_impl( device const void * src0, device const float * src1, device float * dst, constant int64_t & ne00, - constant int64_t & ne01[[buffer(4)]], - constant int64_t & ne02[[buffer(5)]], - constant int64_t & ne10[[buffer(9)]], - constant int64_t & ne12[[buffer(11)]], - constant int64_t & ne0 [[buffer(15)]], - constant int64_t & ne1 [[buffer(16)]], - constant uint & r2 [[buffer(17)]], - constant uint & r3 [[buffer(18)]], + constant int64_t & ne01, + constant int64_t & ne02, + constant int64_t & ne10, + constant int64_t & ne12, + constant int64_t & ne0, + constant int64_t & ne1, + constant uint & r2, + constant uint & r3, uint3 tgpig[[threadgroup_position_in_grid]], uint tiisg[[thread_index_in_simdgroup]], uint sgitg[[simdgroup_index_in_threadgroup]]) { @@ -2214,8 +2363,8 @@ kernel void kernel_mul_mv_q2_K_f32( } } -#if QK_K == 256 -kernel void kernel_mul_mv_q3_K_f32( +[[host_name("kernel_mul_mv_q2_K_f32")]] +kernel void kernel_mul_mv_q2_K_f32( device const void * src0, device const float * src1, device float * dst, @@ -2229,8 +2378,29 @@ kernel void kernel_mul_mv_q3_K_f32( constant uint & r2 [[buffer(17)]], constant uint & r3 [[buffer(18)]], uint3 tgpig[[threadgroup_position_in_grid]], - uint tiisg[[thread_index_in_simdgroup]], - uint sgitg[[simdgroup_index_in_threadgroup]]) { + uint tiisg[[thread_index_in_simdgroup]], + uint sgitg[[simdgroup_index_in_threadgroup]]) { + + kernel_mul_mv_q2_K_f32_impl(src0, src1, dst, ne00, ne01, ne02, ne10, ne12, ne0, ne1, r2, r3, tgpig, tiisg, sgitg); +} + +#if QK_K == 256 +void kernel_mul_mv_q3_K_f32_impl( + device const void * src0, + device const float * src1, + device float * dst, + constant int64_t & ne00, + constant int64_t & ne01, + constant int64_t & ne02, + constant int64_t & ne10, + constant int64_t & ne12, + constant int64_t & ne0, + constant int64_t & ne1, + constant uint & r2, + constant uint & r3, + uint3 tgpig[[threadgroup_position_in_grid]], + uint tiisg[[thread_index_in_simdgroup]], + uint sgitg[[simdgroup_index_in_threadgroup]]) { const int nb = ne00/QK_K; @@ -2373,19 +2543,19 @@ kernel void kernel_mul_mv_q3_K_f32( } } #else -kernel void kernel_mul_mv_q3_K_f32( +void kernel_mul_mv_q3_K_f32_impl( device const void * src0, device const float * src1, device float * dst, constant int64_t & ne00, - constant int64_t & ne01[[buffer(4)]], - constant int64_t & ne02[[buffer(5)]], - constant int64_t & ne10[[buffer(9)]], - constant int64_t & ne12[[buffer(11)]], - constant int64_t & ne0 [[buffer(15)]], - constant int64_t & ne1 [[buffer(16)]], - constant uint & r2 [[buffer(17)]], - constant uint & r3 [[buffer(18)]], + constant int64_t & ne01, + constant int64_t & ne02, + constant int64_t & ne10, + constant int64_t & ne12, + constant int64_t & ne0, + constant int64_t & ne1, + constant uint & r2, + constant uint & r3, uint3 tgpig[[threadgroup_position_in_grid]], uint tiisg[[thread_index_in_simdgroup]], uint sgitg[[simdgroup_index_in_threadgroup]]) { @@ -2450,20 +2620,41 @@ kernel void kernel_mul_mv_q3_K_f32( } #endif -#if QK_K == 256 -kernel void kernel_mul_mv_q4_K_f32( +[[host_name("kernel_mul_mv_q3_K_f32")]] +kernel void kernel_mul_mv_q3_K_f32( device const void * src0, device const float * src1, device float * dst, constant int64_t & ne00, - constant int64_t & ne01 [[buffer(4)]], - constant int64_t & ne02 [[buffer(5)]], - constant int64_t & ne10 [[buffer(9)]], - constant int64_t & ne12 [[buffer(11)]], - constant int64_t & ne0 [[buffer(15)]], - constant int64_t & ne1 [[buffer(16)]], - constant uint & r2 [[buffer(17)]], - constant uint & r3 [[buffer(18)]], + constant int64_t & ne01[[buffer(4)]], + constant int64_t & ne02[[buffer(5)]], + constant int64_t & ne10[[buffer(9)]], + constant int64_t & ne12[[buffer(11)]], + constant int64_t & ne0 [[buffer(15)]], + constant int64_t & ne1 [[buffer(16)]], + constant uint & r2 [[buffer(17)]], + constant uint & r3 [[buffer(18)]], + uint3 tgpig[[threadgroup_position_in_grid]], + uint tiisg[[thread_index_in_simdgroup]], + uint sgitg[[simdgroup_index_in_threadgroup]]) { + + kernel_mul_mv_q3_K_f32_impl(src0, src1, dst, ne00, ne01, ne02, ne10, ne12, ne0, ne1, r2, r3, tgpig, tiisg, sgitg); +} + +#if QK_K == 256 +void kernel_mul_mv_q4_K_f32_impl( + device const void * src0, + device const float * src1, + device float * dst, + constant int64_t & ne00, + constant int64_t & ne01, + constant int64_t & ne02, + constant int64_t & ne10, + constant int64_t & ne12, + constant int64_t & ne0, + constant int64_t & ne1, + constant uint & r2, + constant uint & r3, uint3 tgpig[[threadgroup_position_in_grid]], uint tiisg[[thread_index_in_simdgroup]], uint sgitg[[simdgroup_index_in_threadgroup]]) { @@ -2564,19 +2755,19 @@ kernel void kernel_mul_mv_q4_K_f32( } } #else -kernel void kernel_mul_mv_q4_K_f32( +void kernel_mul_mv_q4_K_f32_impl( device const void * src0, device const float * src1, device float * dst, constant int64_t & ne00, - constant int64_t & ne01[[buffer(4)]], - constant int64_t & ne02[[buffer(5)]], - constant int64_t & ne10[[buffer(9)]], - constant int64_t & ne12[[buffer(11)]], - constant int64_t & ne0 [[buffer(15)]], - constant int64_t & ne1 [[buffer(16)]], - constant uint & r2 [[buffer(17)]], - constant uint & r3 [[buffer(18)]], + constant int64_t & ne01, + constant int64_t & ne02, + constant int64_t & ne10, + constant int64_t & ne12, + constant int64_t & ne0, + constant int64_t & ne1, + constant uint & r2, + constant uint & r3, uint3 tgpig[[threadgroup_position_in_grid]], uint tiisg[[thread_index_in_simdgroup]], uint sgitg[[simdgroup_index_in_threadgroup]]) { @@ -2660,7 +2851,8 @@ kernel void kernel_mul_mv_q4_K_f32( } #endif -kernel void kernel_mul_mv_q5_K_f32( +[[host_name("kernel_mul_mv_q4_K_f32")]] +kernel void kernel_mul_mv_q4_K_f32( device const void * src0, device const float * src1, device float * dst, @@ -2677,6 +2869,26 @@ kernel void kernel_mul_mv_q5_K_f32( uint tiisg[[thread_index_in_simdgroup]], uint sgitg[[simdgroup_index_in_threadgroup]]) { + kernel_mul_mv_q4_K_f32_impl(src0, src1, dst, ne00, ne01, ne02, ne10, ne12, ne0, ne1, r2, r3, tgpig, tiisg, sgitg); +} + +void kernel_mul_mv_q5_K_f32_impl( + device const void * src0, + device const float * src1, + device float * dst, + constant int64_t & ne00, + constant int64_t & ne01, + constant int64_t & ne02, + constant int64_t & ne10, + constant int64_t & ne12, + constant int64_t & ne0, + constant int64_t & ne1, + constant uint & r2, + constant uint & r3, + uint3 tgpig[[threadgroup_position_in_grid]], + uint tiisg[[thread_index_in_simdgroup]], + uint sgitg[[simdgroup_index_in_threadgroup]]) { + const int nb = ne00/QK_K; const int64_t r0 = tgpig.x; @@ -2836,10 +3048,10 @@ kernel void kernel_mul_mv_q5_K_f32( dst[r1*ne0 + im*ne0*ne1 + first_row + row] = tot; } } - } -kernel void kernel_mul_mv_q6_K_f32( +[[host_name("kernel_mul_mv_q5_K_f32")]] +kernel void kernel_mul_mv_q5_K_f32( device const void * src0, device const float * src1, device float * dst, @@ -2853,8 +3065,28 @@ kernel void kernel_mul_mv_q6_K_f32( constant uint & r2 [[buffer(17)]], constant uint & r3 [[buffer(18)]], uint3 tgpig[[threadgroup_position_in_grid]], - uint tiisg[[thread_index_in_simdgroup]], - uint sgitg[[simdgroup_index_in_threadgroup]]) { + uint tiisg[[thread_index_in_simdgroup]], + uint sgitg[[simdgroup_index_in_threadgroup]]) { + + kernel_mul_mv_q5_K_f32_impl(src0, src1, dst, ne00, ne01, ne02, ne10, ne12, ne0, ne1, r2, r3, tgpig, tiisg, sgitg); +} + +void kernel_mul_mv_q6_K_f32_impl( + device const void * src0, + device const float * src1, + device float * dst, + constant int64_t & ne00, + constant int64_t & ne01, + constant int64_t & ne02, + constant int64_t & ne10, + constant int64_t & ne12, + constant int64_t & ne0, + constant int64_t & ne1, + constant uint & r2, + constant uint & r3, + uint3 tgpig[[threadgroup_position_in_grid]], + uint tiisg[[thread_index_in_simdgroup]], + uint sgitg[[simdgroup_index_in_threadgroup]]) { const uint8_t kmask1 = 0x03; const uint8_t kmask2 = 0x0C; @@ -2945,6 +3177,27 @@ kernel void kernel_mul_mv_q6_K_f32( } } +[[host_name("kernel_mul_mv_q6_K_f32")]] +kernel void kernel_mul_mv_q6_K_f32( + device const void * src0, + device const float * src1, + device float * dst, + constant int64_t & ne00, + constant int64_t & ne01[[buffer(4)]], + constant int64_t & ne02[[buffer(5)]], + constant int64_t & ne10[[buffer(9)]], + constant int64_t & ne12[[buffer(11)]], + constant int64_t & ne0 [[buffer(15)]], + constant int64_t & ne1 [[buffer(16)]], + constant uint & r2 [[buffer(17)]], + constant uint & r3 [[buffer(18)]], + uint3 tgpig[[threadgroup_position_in_grid]], + uint tiisg[[thread_index_in_simdgroup]], + uint sgitg[[simdgroup_index_in_threadgroup]]) { + + kernel_mul_mv_q6_K_f32_impl(src0, src1, dst, ne00, ne01, ne02, ne10, ne12, ne0, ne1, r2, r3, tgpig, tiisg, sgitg); +} + //============================= templates and their specializations ============================= // NOTE: this is not dequantizing - we are simply fitting the template @@ -3219,22 +3472,90 @@ void dequantize_q6_K(device const block_q6_K *xb, short il, thread type4x4 & reg template kernel void kernel_get_rows( device const void * src0, - device const int * src1, + device const char * src1, device float * dst, constant int64_t & ne00, constant uint64_t & nb01, + constant uint64_t & nb02, + constant int64_t & ne10, + constant uint64_t & nb10, + constant uint64_t & nb11, constant uint64_t & nb1, - uint tgpig[[threadgroup_position_in_grid]], + constant uint64_t & nb2, + uint3 tgpig[[threadgroup_position_in_grid]], uint tiitg[[thread_index_in_threadgroup]], - uint tptg[[threads_per_threadgroup]]) { - const int i = tgpig; - const int r = ((device int32_t *) src1)[i]; + uint3 tptg [[threads_per_threadgroup]]) { + //const int64_t i = tgpig; + //const int64_t r = ((device int32_t *) src1)[i]; - for (int ind = tiitg; ind < ne00/16; ind += tptg) { + const int64_t i10 = tgpig.x; + const int64_t i11 = tgpig.y; + + const int64_t r = ((device int32_t *) ((device char *) src1 + i11*nb11 + i10*nb10))[0]; + + const int64_t i02 = i11; + + for (int64_t ind = tiitg; ind < ne00/16; ind += tptg.x) { float4x4 temp; dequantize_func( - ((device const block_q *) ((device char *) src0 + r*nb01)) + ind/nl, ind%nl, temp); - *(((device float4x4 *) ((device char *) dst + i*nb1)) + ind) = temp; + ((device const block_q *) ((device char *) src0 + r*nb01 + i02*nb02)) + ind/nl, ind%nl, temp); + *(((device float4x4 *) ((device char *) dst + i11*nb2 + i10*nb1)) + ind) = temp; + } +} + +kernel void kernel_get_rows_f32( + device const void * src0, + device const char * src1, + device float * dst, + constant int64_t & ne00, + constant uint64_t & nb01, + constant uint64_t & nb02, + constant int64_t & ne10, + constant uint64_t & nb10, + constant uint64_t & nb11, + constant uint64_t & nb1, + constant uint64_t & nb2, + uint3 tgpig[[threadgroup_position_in_grid]], + uint tiitg[[thread_index_in_threadgroup]], + uint3 tptg [[threads_per_threadgroup]]) { + const int64_t i10 = tgpig.x; + const int64_t i11 = tgpig.y; + + const int64_t r = ((device int32_t *) ((device char *) src1 + i11*nb11 + i10*nb10))[0]; + + const int64_t i02 = i11; + + for (int ind = tiitg; ind < ne00; ind += tptg.x) { + ((device float *) ((device char *) dst + i11*nb2 + i10*nb1))[ind] = + ((device float *) ((device char *) src0 + r*nb01 + i02*nb02))[ind]; + } +} + +kernel void kernel_get_rows_f16( + device const void * src0, + device const char * src1, + device float * dst, + constant int64_t & ne00, + constant uint64_t & nb01, + constant uint64_t & nb02, + constant int64_t & ne10, + constant uint64_t & nb10, + constant uint64_t & nb11, + constant uint64_t & nb1, + constant uint64_t & nb2, + uint3 tgpig[[threadgroup_position_in_grid]], + uint tiitg[[thread_index_in_threadgroup]], + uint3 tptg [[threads_per_threadgroup]]) { + const int64_t i10 = tgpig.x; + const int64_t i11 = tgpig.y; + + const int64_t r = ((device int32_t *) ((device char *) src1 + i11*nb11 + i10*nb10))[0]; + + const int64_t i02 = i11; + + for (int ind = tiitg; ind < ne00; ind += tptg.x) { + ((device float *) ((device char *) dst + i11*nb2 + i10*nb1))[ind] = + ((device half *) ((device char *) src0 + r*nb01 + i02*nb02))[ind]; } } @@ -3426,19 +3747,22 @@ kernel void kernel_mul_mm(device const uchar * src0, template kernel void kernel_mul_mm_id( - device const int32_t * ids, + device const uchar * ids, device const uchar * src1, - device float * dst, + device uchar * dst, + constant int64_t & nbi1, constant int64_t & ne00, constant int64_t & ne02, constant int64_t & nb01, constant int64_t & nb02, constant int64_t & ne12, + constant int64_t & ne13, constant int64_t & nb10, constant int64_t & nb11, constant int64_t & nb12, constant int64_t & ne0, constant int64_t & ne1, + constant int64_t & nb1, constant uint & r2, constant uint & r3, constant int & idx, @@ -3456,10 +3780,16 @@ kernel void kernel_mul_mm_id( uint sgitg[[simdgroup_index_in_threadgroup]]) { device const uchar * src0[8] = {src00, src01, src02, src03, src04, src05, src06, src07}; + const int64_t bid = tgpig.z/(ne12*ne13); + + tgpig.z = tgpig.z%(ne12*ne13); + + const int32_t id = ((device int32_t *) (ids + bid*nbi1))[idx]; + kernel_mul_mm_impl( - src0[ids[idx]], - src1, - dst, + src0[id], + src1 + bid*nb11, + (device float *) (dst + bid*nb1), ne00, ne02, nb01, @@ -3484,17 +3814,26 @@ kernel void kernel_mul_mm_id( #define QK_NL 4 #endif +// +// get rows +// + typedef void (get_rows_t)( device const void * src0, - device const int * src1, + device const char * src1, device float * dst, constant int64_t & ne00, constant uint64_t & nb01, + constant uint64_t & nb02, + constant int64_t & ne10, + constant uint64_t & nb10, + constant uint64_t & nb11, constant uint64_t & nb1, - uint, uint, uint); + constant uint64_t & nb2, + uint3, uint, uint3); -template [[host_name("kernel_get_rows_f32")]] kernel get_rows_t kernel_get_rows; -template [[host_name("kernel_get_rows_f16")]] kernel get_rows_t kernel_get_rows; +//template [[host_name("kernel_get_rows_f32")]] kernel get_rows_t kernel_get_rows; +//template [[host_name("kernel_get_rows_f16")]] kernel get_rows_t kernel_get_rows; template [[host_name("kernel_get_rows_q4_0")]] kernel get_rows_t kernel_get_rows; template [[host_name("kernel_get_rows_q4_1")]] kernel get_rows_t kernel_get_rows; template [[host_name("kernel_get_rows_q5_0")]] kernel get_rows_t kernel_get_rows; @@ -3506,6 +3845,10 @@ template [[host_name("kernel_get_rows_q4_K")]] kernel get_rows_t kernel_get_rows template [[host_name("kernel_get_rows_q5_K")]] kernel get_rows_t kernel_get_rows; template [[host_name("kernel_get_rows_q6_K")]] kernel get_rows_t kernel_get_rows; +// +// matrix-matrix multiplication +// + typedef void (mat_mm_t)( device const uchar * src0, device const uchar * src1, @@ -3538,20 +3881,27 @@ template [[host_name("kernel_mul_mm_q4_K_f32")]] kernel mat_mm_t kernel_mul_mm; template [[host_name("kernel_mul_mm_q6_K_f32")]] kernel mat_mm_t kernel_mul_mm; +// +// indirect matrix-matrix multiplication +// + typedef void (mat_mm_id_t)( - device const int32_t * ids, + device const uchar * ids, device const uchar * src1, - device float * dst, + device uchar * dst, + constant int64_t & nbi1, constant int64_t & ne00, constant int64_t & ne02, constant int64_t & nb01, constant int64_t & nb02, constant int64_t & ne12, + constant int64_t & ne13, constant int64_t & nb10, constant int64_t & nb11, constant int64_t & nb12, constant int64_t & ne0, constant int64_t & ne1, + constant int64_t & nb1, constant uint & r2, constant uint & r3, constant int & idx, @@ -3578,3 +3928,775 @@ template [[host_name("kernel_mul_mm_id_q3_K_f32")]] kernel mat_mm_id_t kernel_mu template [[host_name("kernel_mul_mm_id_q4_K_f32")]] kernel mat_mm_id_t kernel_mul_mm_id; template [[host_name("kernel_mul_mm_id_q5_K_f32")]] kernel mat_mm_id_t kernel_mul_mm_id; template [[host_name("kernel_mul_mm_id_q6_K_f32")]] kernel mat_mm_id_t kernel_mul_mm_id; + +// +// matrix-vector multiplication +// + +[[host_name("kernel_mul_mv_id_f32_f32")]] +kernel void kernel_mul_mv_id_f32_f32( + device const char * ids, + device const char * src1, + device uchar * dst, + constant int64_t & nbi1, + constant int64_t & ne00, + constant int64_t & ne01, + constant int64_t & ne02, + constant uint64_t & nb00, + constant uint64_t & nb01, + constant uint64_t & nb02, + constant int64_t & ne10, + constant int64_t & ne11, + constant int64_t & ne12, + constant int64_t & ne13, + constant uint64_t & nb10, + constant uint64_t & nb11, + constant uint64_t & nb12, + constant int64_t & ne0, + constant int64_t & ne1, + constant int64_t & nb1, + constant uint & r2, + constant uint & r3, + constant int & idx, + device const char * src00, + device const char * src01, + device const char * src02, + device const char * src03, + device const char * src04, + device const char * src05, + device const char * src06, + device const char * src07, + uint3 tgpig[[threadgroup_position_in_grid]], + uint tiitg[[thread_index_in_threadgroup]], + uint tiisg[[thread_index_in_simdgroup]], + uint sgitg[[simdgroup_index_in_threadgroup]]) { + device const char * src0[8] = {src00, src01, src02, src03, src04, src05, src06, src07}; + + const int64_t bid = tgpig.z/(ne12*ne13); + + tgpig.z = tgpig.z%(ne12*ne13); + + const int32_t id = ((device int32_t *) (ids + bid*nbi1))[idx]; + + kernel_mul_mv_f32_f32_impl( + src0[id], + src1 + bid*nb11, + (device float *) (dst + bid*nb1), + ne00, + ne01, + ne02, + nb00, + nb01, + nb02, + ne10, + ne11, + ne12, + nb10, + nb11, + nb12, + ne0, + ne1, + r2, + r3, + tgpig, + tiisg); +} + +[[host_name("kernel_mul_mv_id_f16_f32")]] +kernel void kernel_mul_mv_id_f16_f32( + device const char * ids, + device const char * src1, + device uchar * dst, + constant int64_t & nbi1, + constant int64_t & ne00, + constant int64_t & ne01, + constant int64_t & ne02, + constant uint64_t & nb00, + constant uint64_t & nb01, + constant uint64_t & nb02, + constant int64_t & ne10, + constant int64_t & ne11, + constant int64_t & ne12, + constant int64_t & ne13, + constant uint64_t & nb10, + constant uint64_t & nb11, + constant uint64_t & nb12, + constant int64_t & ne0, + constant int64_t & ne1, + constant int64_t & nb1, + constant uint & r2, + constant uint & r3, + constant int & idx, + device const char * src00, + device const char * src01, + device const char * src02, + device const char * src03, + device const char * src04, + device const char * src05, + device const char * src06, + device const char * src07, + uint3 tgpig[[threadgroup_position_in_grid]], + uint tiitg[[thread_index_in_threadgroup]], + uint tiisg[[thread_index_in_simdgroup]], + uint sgitg[[simdgroup_index_in_threadgroup]]) { + device const char * src0[8] = {src00, src01, src02, src03, src04, src05, src06, src07}; + + const int64_t bid = tgpig.z/(ne12*ne13); + + tgpig.z = tgpig.z%(ne12*ne13); + + const int32_t id = ((device int32_t *) (ids + bid*nbi1))[idx]; + + kernel_mul_mv_f16_f32_impl( + src0[id], + src1 + bid*nb11, + (device float *) (dst + bid*nb1), + ne00, + ne01, + ne02, + nb00, + nb01, + nb02, + ne10, + ne11, + ne12, + nb10, + nb11, + nb12, + ne0, + ne1, + r2, + r3, + tgpig, + tiisg); +} + +[[host_name("kernel_mul_mv_id_q8_0_f32")]] +kernel void kernel_mul_mv_id_q8_0_f32( + device const char * ids, + device const char * src1, + device uchar * dst, + constant int64_t & nbi1, + constant int64_t & ne00, + constant int64_t & ne01, + constant int64_t & ne02, + constant uint64_t & nb00, + constant uint64_t & nb01, + constant uint64_t & nb02, + constant int64_t & ne10, + constant int64_t & ne11, + constant int64_t & ne12, + constant int64_t & ne13, + constant uint64_t & nb10, + constant uint64_t & nb11, + constant uint64_t & nb12, + constant int64_t & ne0, + constant int64_t & ne1, + constant int64_t & nb1, + constant uint & r2, + constant uint & r3, + constant int & idx, + device const char * src00, + device const char * src01, + device const char * src02, + device const char * src03, + device const char * src04, + device const char * src05, + device const char * src06, + device const char * src07, + uint3 tgpig[[threadgroup_position_in_grid]], + uint tiitg[[thread_index_in_threadgroup]], + uint tiisg[[thread_index_in_simdgroup]], + uint sgitg[[simdgroup_index_in_threadgroup]]) { + device const char * src0[8] = {src00, src01, src02, src03, src04, src05, src06, src07}; + + const int64_t bid = tgpig.z/(ne12*ne13); + + tgpig.z = tgpig.z%(ne12*ne13); + + const int32_t id = ((device int32_t *) (ids + bid*nbi1))[idx]; + + kernel_mul_mv_q8_0_f32_impl( + src0[id], + (device const float *) (src1 + bid*nb11), + (device float *) ( dst + bid*nb1), + ne00, + ne01, + ne02, + ne10, + ne12, + ne0, + ne1, + r2, + r3, + tgpig, + tiisg, + sgitg); +} + +[[host_name("kernel_mul_mv_id_q4_0_f32")]] +kernel void kernel_mul_mv_id_q4_0_f32( + device const char * ids, + device const char * src1, + device uchar * dst, + constant int64_t & nbi1, + constant int64_t & ne00, + constant int64_t & ne01, + constant int64_t & ne02, + constant uint64_t & nb00, + constant uint64_t & nb01, + constant uint64_t & nb02, + constant int64_t & ne10, + constant int64_t & ne11, + constant int64_t & ne12, + constant int64_t & ne13, + constant uint64_t & nb10, + constant uint64_t & nb11, + constant uint64_t & nb12, + constant int64_t & ne0, + constant int64_t & ne1, + constant int64_t & nb1, + constant uint & r2, + constant uint & r3, + constant int & idx, + device const char * src00, + device const char * src01, + device const char * src02, + device const char * src03, + device const char * src04, + device const char * src05, + device const char * src06, + device const char * src07, + uint3 tgpig[[threadgroup_position_in_grid]], + uint tiitg[[thread_index_in_threadgroup]], + uint tiisg[[thread_index_in_simdgroup]], + uint sgitg[[simdgroup_index_in_threadgroup]]) { + device const char * src0[8] = {src00, src01, src02, src03, src04, src05, src06, src07}; + + const int64_t bid = tgpig.z/(ne12*ne13); + + tgpig.z = tgpig.z%(ne12*ne13); + + const int32_t id = ((device int32_t *) (ids + bid*nbi1))[idx]; + + mul_vec_q_n_f32_impl( + src0[id], + (device const float *) (src1 + bid*nb11), + (device float *) ( dst + bid*nb1), + ne00, + ne01, + ne02, + ne10, + ne12, + ne0, + ne1, + r2, + r3, + tgpig, + tiisg, + sgitg); +} + +[[host_name("kernel_mul_mv_id_q4_1_f32")]] +kernel void kernel_mul_mv_id_q4_1_f32( + device const char * ids, + device const char * src1, + device uchar * dst, + constant int64_t & nbi1, + constant int64_t & ne00, + constant int64_t & ne01, + constant int64_t & ne02, + constant uint64_t & nb00, + constant uint64_t & nb01, + constant uint64_t & nb02, + constant int64_t & ne10, + constant int64_t & ne11, + constant int64_t & ne12, + constant int64_t & ne13, + constant uint64_t & nb10, + constant uint64_t & nb11, + constant uint64_t & nb12, + constant int64_t & ne0, + constant int64_t & ne1, + constant int64_t & nb1, + constant uint & r2, + constant uint & r3, + constant int & idx, + device const char * src00, + device const char * src01, + device const char * src02, + device const char * src03, + device const char * src04, + device const char * src05, + device const char * src06, + device const char * src07, + uint3 tgpig[[threadgroup_position_in_grid]], + uint tiitg[[thread_index_in_threadgroup]], + uint tiisg[[thread_index_in_simdgroup]], + uint sgitg[[simdgroup_index_in_threadgroup]]) { + device const char * src0[8] = {src00, src01, src02, src03, src04, src05, src06, src07}; + + const int64_t bid = tgpig.z/(ne12*ne13); + + tgpig.z = tgpig.z%(ne12*ne13); + + const int32_t id = ((device int32_t *) (ids + bid*nbi1))[idx]; + + mul_vec_q_n_f32_impl( + src0[id], + (device const float *) (src1 + bid*nb11), + (device float *) ( dst + bid*nb1), + ne00, + ne01, + ne02, + ne10, + ne12, + ne0, + ne1, + r2, + r3, + tgpig, + tiisg, + sgitg); +} + +[[host_name("kernel_mul_mv_id_q5_0_f32")]] +kernel void kernel_mul_mv_id_q5_0_f32( + device const char * ids, + device const char * src1, + device uchar * dst, + constant int64_t & nbi1, + constant int64_t & ne00, + constant int64_t & ne01, + constant int64_t & ne02, + constant uint64_t & nb00, + constant uint64_t & nb01, + constant uint64_t & nb02, + constant int64_t & ne10, + constant int64_t & ne11, + constant int64_t & ne12, + constant int64_t & ne13, + constant uint64_t & nb10, + constant uint64_t & nb11, + constant uint64_t & nb12, + constant int64_t & ne0, + constant int64_t & ne1, + constant int64_t & nb1, + constant uint & r2, + constant uint & r3, + constant int & idx, + device const char * src00, + device const char * src01, + device const char * src02, + device const char * src03, + device const char * src04, + device const char * src05, + device const char * src06, + device const char * src07, + uint3 tgpig[[threadgroup_position_in_grid]], + uint tiitg[[thread_index_in_threadgroup]], + uint tiisg[[thread_index_in_simdgroup]], + uint sgitg[[simdgroup_index_in_threadgroup]]) { + device const char * src0[8] = {src00, src01, src02, src03, src04, src05, src06, src07}; + + const int64_t bid = tgpig.z/(ne12*ne13); + + tgpig.z = tgpig.z%(ne12*ne13); + + const int32_t id = ((device int32_t *) (ids + bid*nbi1))[idx]; + + mul_vec_q_n_f32_impl( + src0[id], + (device const float *) (src1 + bid*nb11), + (device float *) ( dst + bid*nb1), + ne00, + ne01, + ne02, + ne10, + ne12, + ne0, + ne1, + r2, + r3, + tgpig, + tiisg, + sgitg); +} + +[[host_name("kernel_mul_mv_id_q5_1_f32")]] +kernel void kernel_mul_mv_id_q5_1_f32( + device const char * ids, + device const char * src1, + device uchar * dst, + constant int64_t & nbi1, + constant int64_t & ne00, + constant int64_t & ne01, + constant int64_t & ne02, + constant uint64_t & nb00, + constant uint64_t & nb01, + constant uint64_t & nb02, + constant int64_t & ne10, + constant int64_t & ne11, + constant int64_t & ne12, + constant int64_t & ne13, + constant uint64_t & nb10, + constant uint64_t & nb11, + constant uint64_t & nb12, + constant int64_t & ne0, + constant int64_t & ne1, + constant int64_t & nb1, + constant uint & r2, + constant uint & r3, + constant int & idx, + device const char * src00, + device const char * src01, + device const char * src02, + device const char * src03, + device const char * src04, + device const char * src05, + device const char * src06, + device const char * src07, + uint3 tgpig[[threadgroup_position_in_grid]], + uint tiitg[[thread_index_in_threadgroup]], + uint tiisg[[thread_index_in_simdgroup]], + uint sgitg[[simdgroup_index_in_threadgroup]]) { + device const char * src0[8] = {src00, src01, src02, src03, src04, src05, src06, src07}; + + const int64_t bid = tgpig.z/(ne12*ne13); + + tgpig.z = tgpig.z%(ne12*ne13); + + const int32_t id = ((device int32_t *) (ids + bid*nbi1))[idx]; + + mul_vec_q_n_f32_impl( + src0[id], + (device const float *) (src1 + bid*nb11), + (device float *) ( dst + bid*nb1), + ne00, + ne01, + ne02, + ne10, + ne12, + ne0, + ne1, + r2, + r3, + tgpig, + tiisg, + sgitg); +} + +[[host_name("kernel_mul_mv_id_q2_K_f32")]] +kernel void kernel_mul_mv_id_q2_K_f32( + device const char * ids, + device const char * src1, + device uchar * dst, + constant int64_t & nbi1, + constant int64_t & ne00, + constant int64_t & ne01, + constant int64_t & ne02, + constant uint64_t & nb00, + constant uint64_t & nb01, + constant uint64_t & nb02, + constant int64_t & ne10, + constant int64_t & ne11, + constant int64_t & ne12, + constant int64_t & ne13, + constant uint64_t & nb10, + constant uint64_t & nb11, + constant uint64_t & nb12, + constant int64_t & ne0, + constant int64_t & ne1, + constant int64_t & nb1, + constant uint & r2, + constant uint & r3, + constant int & idx, + device const char * src00, + device const char * src01, + device const char * src02, + device const char * src03, + device const char * src04, + device const char * src05, + device const char * src06, + device const char * src07, + uint3 tgpig[[threadgroup_position_in_grid]], + uint tiitg[[thread_index_in_threadgroup]], + uint tiisg[[thread_index_in_simdgroup]], + uint sgitg[[simdgroup_index_in_threadgroup]]) { + device const char * src0[8] = {src00, src01, src02, src03, src04, src05, src06, src07}; + + const int64_t bid = tgpig.z/(ne12*ne13); + + tgpig.z = tgpig.z%(ne12*ne13); + + const int32_t id = ((device int32_t *) (ids + bid*nbi1))[idx]; + + kernel_mul_mv_q2_K_f32_impl( + src0[id], + (device const float *) (src1 + bid*nb11), + (device float *) ( dst + bid*nb1), + ne00, + ne01, + ne02, + ne10, + ne12, + ne0, + ne1, + r2, + r3, + tgpig, + tiisg, + sgitg); +} + +[[host_name("kernel_mul_mv_id_q3_K_f32")]] +kernel void kernel_mul_mv_id_q3_K_f32( + device const char * ids, + device const char * src1, + device uchar * dst, + constant int64_t & nbi1, + constant int64_t & ne00, + constant int64_t & ne01, + constant int64_t & ne02, + constant uint64_t & nb00, + constant uint64_t & nb01, + constant uint64_t & nb02, + constant int64_t & ne10, + constant int64_t & ne11, + constant int64_t & ne12, + constant int64_t & ne13, + constant uint64_t & nb10, + constant uint64_t & nb11, + constant uint64_t & nb12, + constant int64_t & ne0, + constant int64_t & ne1, + constant int64_t & nb1, + constant uint & r2, + constant uint & r3, + constant int & idx, + device const char * src00, + device const char * src01, + device const char * src02, + device const char * src03, + device const char * src04, + device const char * src05, + device const char * src06, + device const char * src07, + uint3 tgpig[[threadgroup_position_in_grid]], + uint tiitg[[thread_index_in_threadgroup]], + uint tiisg[[thread_index_in_simdgroup]], + uint sgitg[[simdgroup_index_in_threadgroup]]) { + device const char * src0[8] = {src00, src01, src02, src03, src04, src05, src06, src07}; + + const int64_t bid = tgpig.z/(ne12*ne13); + + tgpig.z = tgpig.z%(ne12*ne13); + + const int32_t id = ((device int32_t *) (ids + bid*nbi1))[idx]; + + kernel_mul_mv_q3_K_f32_impl( + src0[id], + (device const float *) (src1 + bid*nb11), + (device float *) ( dst + bid*nb1), + ne00, + ne01, + ne02, + ne10, + ne12, + ne0, + ne1, + r2, + r3, + tgpig, + tiisg, + sgitg); +} + +[[host_name("kernel_mul_mv_id_q4_K_f32")]] +kernel void kernel_mul_mv_id_q4_K_f32( + device const char * ids, + device const char * src1, + device uchar * dst, + constant int64_t & nbi1, + constant int64_t & ne00, + constant int64_t & ne01, + constant int64_t & ne02, + constant uint64_t & nb00, + constant uint64_t & nb01, + constant uint64_t & nb02, + constant int64_t & ne10, + constant int64_t & ne11, + constant int64_t & ne12, + constant int64_t & ne13, + constant uint64_t & nb10, + constant uint64_t & nb11, + constant uint64_t & nb12, + constant int64_t & ne0, + constant int64_t & ne1, + constant int64_t & nb1, + constant uint & r2, + constant uint & r3, + constant int & idx, + device const char * src00, + device const char * src01, + device const char * src02, + device const char * src03, + device const char * src04, + device const char * src05, + device const char * src06, + device const char * src07, + uint3 tgpig[[threadgroup_position_in_grid]], + uint tiitg[[thread_index_in_threadgroup]], + uint tiisg[[thread_index_in_simdgroup]], + uint sgitg[[simdgroup_index_in_threadgroup]]) { + device const char * src0[8] = {src00, src01, src02, src03, src04, src05, src06, src07}; + + const int64_t bid = tgpig.z/(ne12*ne13); + + tgpig.z = tgpig.z%(ne12*ne13); + + const int32_t id = ((device int32_t *) (ids + bid*nbi1))[idx]; + + kernel_mul_mv_q4_K_f32_impl( + src0[id], + (device const float *) (src1 + bid*nb11), + (device float *) ( dst + bid*nb1), + ne00, + ne01, + ne02, + ne10, + ne12, + ne0, + ne1, + r2, + r3, + tgpig, + tiisg, + sgitg); +} + +[[host_name("kernel_mul_mv_id_q5_K_f32")]] +kernel void kernel_mul_mv_id_q5_K_f32( + device const char * ids, + device const char * src1, + device uchar * dst, + constant int64_t & nbi1, + constant int64_t & ne00, + constant int64_t & ne01, + constant int64_t & ne02, + constant uint64_t & nb00, + constant uint64_t & nb01, + constant uint64_t & nb02, + constant int64_t & ne10, + constant int64_t & ne11, + constant int64_t & ne12, + constant int64_t & ne13, + constant uint64_t & nb10, + constant uint64_t & nb11, + constant uint64_t & nb12, + constant int64_t & ne0, + constant int64_t & ne1, + constant int64_t & nb1, + constant uint & r2, + constant uint & r3, + constant int & idx, + device const char * src00, + device const char * src01, + device const char * src02, + device const char * src03, + device const char * src04, + device const char * src05, + device const char * src06, + device const char * src07, + uint3 tgpig[[threadgroup_position_in_grid]], + uint tiitg[[thread_index_in_threadgroup]], + uint tiisg[[thread_index_in_simdgroup]], + uint sgitg[[simdgroup_index_in_threadgroup]]) { + device const char * src0[8] = {src00, src01, src02, src03, src04, src05, src06, src07}; + + const int64_t bid = tgpig.z/(ne12*ne13); + + tgpig.z = tgpig.z%(ne12*ne13); + + const int32_t id = ((device int32_t *) (ids + bid*nbi1))[idx]; + + kernel_mul_mv_q5_K_f32_impl( + src0[id], + (device const float *) (src1 + bid*nb11), + (device float *) ( dst + bid*nb1), + ne00, + ne01, + ne02, + ne10, + ne12, + ne0, + ne1, + r2, + r3, + tgpig, + tiisg, + sgitg); +} + +[[host_name("kernel_mul_mv_id_q6_K_f32")]] +kernel void kernel_mul_mv_id_q6_K_f32( + device const char * ids, + device const char * src1, + device uchar * dst, + constant int64_t & nbi1, + constant int64_t & ne00, + constant int64_t & ne01, + constant int64_t & ne02, + constant uint64_t & nb00, + constant uint64_t & nb01, + constant uint64_t & nb02, + constant int64_t & ne10, + constant int64_t & ne11, + constant int64_t & ne12, + constant int64_t & ne13, + constant uint64_t & nb10, + constant uint64_t & nb11, + constant uint64_t & nb12, + constant int64_t & ne0, + constant int64_t & ne1, + constant int64_t & nb1, + constant uint & r2, + constant uint & r3, + constant int & idx, + device const char * src00, + device const char * src01, + device const char * src02, + device const char * src03, + device const char * src04, + device const char * src05, + device const char * src06, + device const char * src07, + uint3 tgpig[[threadgroup_position_in_grid]], + uint tiitg[[thread_index_in_threadgroup]], + uint tiisg[[thread_index_in_simdgroup]], + uint sgitg[[simdgroup_index_in_threadgroup]]) { + device const char * src0[8] = {src00, src01, src02, src03, src04, src05, src06, src07}; + + const int64_t bid = tgpig.z/(ne12*ne13); + + tgpig.z = tgpig.z%(ne12*ne13); + + const int32_t id = ((device int32_t *) (ids + bid*nbi1))[idx]; + + kernel_mul_mv_q6_K_f32_impl( + src0[id], + (device const float *) (src1 + bid*nb11), + (device float *) ( dst + bid*nb1), + ne00, + ne01, + ne02, + ne10, + ne12, + ne0, + ne1, + r2, + r3, + tgpig, + tiisg, + sgitg); +} diff --git a/ggml.c b/ggml.c index eb7989dc4..66658ff4b 100644 --- a/ggml.c +++ b/ggml.c @@ -4075,17 +4075,18 @@ struct ggml_tensor * ggml_mul_mat( struct ggml_tensor * ggml_mul_mat_id( struct ggml_context * ctx, - struct ggml_tensor * as[], + struct ggml_tensor * const as[], + int n_as, struct ggml_tensor * ids, int id, struct ggml_tensor * b) { - int64_t n_as = ids->ne[0]; - GGML_ASSERT(ids->type == GGML_TYPE_I32); - GGML_ASSERT(ggml_is_vector(ids)); + GGML_ASSERT(ids->ne[2] == 1 && ids->ne[3] == 1); + GGML_ASSERT(ids->ne[1] == b->ne[1]); + GGML_ASSERT(ids->ne[2] == b->ne[2] && ids->ne[3] == b->ne[3]); GGML_ASSERT(n_as > 0 && n_as <= GGML_MAX_SRC - 2); - GGML_ASSERT(id >= 0 && id < n_as); + GGML_ASSERT(id >= 0 && id < ids->ne[0]); bool is_node = false; @@ -4097,13 +4098,14 @@ struct ggml_tensor * ggml_mul_mat_id( struct ggml_tensor * result = ggml_new_tensor(ctx, GGML_TYPE_F32, MAX(as[0]->n_dims, b->n_dims), ne); ggml_set_op_params_i32(result, 0, id); + ggml_set_op_params_i32(result, 1, n_as); result->op = GGML_OP_MUL_MAT_ID; result->grad = is_node ? ggml_dup_tensor(ctx, result) : NULL; result->src[0] = ids; result->src[1] = b; - for (int64_t i = 0; i < n_as; i++) { + for (int i = 0; i < n_as; i++) { struct ggml_tensor * a = as[i]; GGML_ASSERT(ggml_are_same_shape(as[0], a)); GGML_ASSERT(ggml_can_mul_mat(a, b)); @@ -4731,7 +4733,9 @@ struct ggml_tensor * ggml_get_rows( struct ggml_context * ctx, struct ggml_tensor * a, struct ggml_tensor * b) { - GGML_ASSERT(ggml_is_matrix(a) && ggml_is_vector(b) && b->type == GGML_TYPE_I32); + GGML_ASSERT(a->ne[2] == b->ne[1]); + GGML_ASSERT(b->ne[3] == 1); + GGML_ASSERT(b->type == GGML_TYPE_I32); bool is_node = false; @@ -4741,7 +4745,7 @@ struct ggml_tensor * ggml_get_rows( // TODO: implement non F32 return //struct ggml_tensor * result = ggml_new_tensor_2d(ctx, a->type, a->ne[0], b->ne[0]); - struct ggml_tensor * result = ggml_new_tensor_2d(ctx, GGML_TYPE_F32, a->ne[0], b->ne[0]); + struct ggml_tensor * result = ggml_new_tensor_4d(ctx, GGML_TYPE_F32, a->ne[0], b->ne[0], b->ne[1], b->ne[2]); result->op = GGML_OP_GET_ROWS; result->grad = is_node ? ggml_dup_tensor(ctx, result) : NULL; @@ -9504,8 +9508,11 @@ static bool ggml_compute_forward_mul_mat_use_blas( const int64_t ne0 = dst->ne[0]; const int64_t ne1 = dst->ne[1]; + // NOTE: with GGML_OP_MUL_MAT_ID we don't want to go through the BLAS branch because it will dequantize (to_float) + // all the experts for each batch element and the processing would become incredibly slow // TODO: find the optimal values for these - if (ggml_is_contiguous(src0) && + if (dst->op != GGML_OP_MUL_MAT_ID && + ggml_is_contiguous(src0) && ggml_is_contiguous(src1) && //src0->type == GGML_TYPE_F32 && src1->type == GGML_TYPE_F32 && @@ -9519,11 +9526,16 @@ static bool ggml_compute_forward_mul_mat_use_blas( } #endif +// off1 = offset in i11 and i1 +// cne1 = ne11 and ne1 +// in a normal matrix multiplication, off1 = 0 and cne1 = ne1 +// during GGML_TASK_INIT, the full src1 is converted regardless of off1 and cne1 static void ggml_compute_forward_mul_mat( const struct ggml_compute_params * params, const struct ggml_tensor * src0, const struct ggml_tensor * src1, - struct ggml_tensor * dst) { + struct ggml_tensor * dst, + int64_t off1, int64_t cne1) { int64_t t0 = ggml_perf_time_us(); UNUSED(t0); @@ -9591,10 +9603,9 @@ static void ggml_compute_forward_mul_mat( const int64_t i03 = i13/r3; const int64_t i02 = i12/r2; - const void * x = (char *) src0->data + i02*nb02 + i03*nb03; - const float * y = (float *) ((char *) src1->data + i12*nb12 + i13*nb13); - - float * d = (float *) ((char *) dst->data + i12*nb2 + i13*nb3); + const void * x = (char *) src0->data + i02*nb02 + i03*nb03; + const float * y = (float *) ((char *) src1->data + off1*nb11 + i12*nb12 + i13*nb13); + float * d = (float *) ((char *) dst->data + off1*nb1 + i12*nb2 + i13*nb3); if (type != GGML_TYPE_F32) { float * const wdata = params->wdata; @@ -9611,10 +9622,10 @@ static void ggml_compute_forward_mul_mat( } cblas_sgemm(CblasRowMajor, CblasNoTrans, CblasTrans, - ne11, ne01, ne10, - 1.0f, y, ne10, - x, ne00, - 0.0f, d, ne01); + cne1, ne01, ne10, + 1.0f, y, ne10, + x, ne00, + 0.0f, d, ne01); } } @@ -9630,6 +9641,7 @@ static void ggml_compute_forward_mul_mat( const size_t row_size = ne10*ggml_type_size(vec_dot_type)/ggml_blck_size(vec_dot_type); assert(params->wsize >= ne11*ne12*ne13*row_size); + assert(src1->type == GGML_TYPE_F32); for (int64_t i13 = 0; i13 < ne13; ++i13) { for (int64_t i12 = 0; i12 < ne12; ++i12) { @@ -9652,7 +9664,7 @@ static void ggml_compute_forward_mul_mat( const size_t row_size = ne10*ggml_type_size(vec_dot_type)/ggml_blck_size(vec_dot_type); const int64_t nr0 = ne01; // src0 rows - const int64_t nr1 = ne11*ne12*ne13; // src1 rows + const int64_t nr1 = cne1*ne12*ne13; // src1 rows //printf("nr0 = %lld, nr1 = %lld\n", nr0, nr1); @@ -9694,9 +9706,9 @@ static void ggml_compute_forward_mul_mat( for (int64_t iir1 = ir110; iir1 < ir111; iir1 += blck_1) { for (int64_t iir0 = ir010; iir0 < ir011; iir0 += blck_0) { for (int64_t ir1 = iir1; ir1 < iir1 + blck_1 && ir1 < ir111; ++ir1) { - const int64_t i13 = (ir1/(ne12*ne11)); - const int64_t i12 = (ir1 - i13*ne12*ne11)/ne11; - const int64_t i11 = (ir1 - i13*ne12*ne11 - i12*ne11); + const int64_t i13 = (ir1/(ne12*cne1)); + const int64_t i12 = (ir1 - i13*ne12*cne1)/cne1; + const int64_t i11 = (ir1 - i13*ne12*cne1 - i12*cne1) + off1; // broadcast src0 into src1 const int64_t i03 = i13/r3; @@ -9736,20 +9748,28 @@ static void ggml_compute_forward_mul_mat( static void ggml_compute_forward_mul_mat_id( const struct ggml_compute_params * params, + const struct ggml_tensor * src0, + const struct ggml_tensor * src1, struct ggml_tensor * dst) { - const struct ggml_tensor * ids = dst->src[0]; - const struct ggml_tensor * src1 = dst->src[1]; + if (params->type == GGML_TASK_INIT || params->type == GGML_TASK_FINALIZE) { + // during GGML_TASK_INIT the entire src1 is converted to vec_dot_type + ggml_compute_forward_mul_mat(params, dst->src[2], src1, dst, 0, dst->ne[1]); + return; + } - const int id = ggml_get_op_params_i32(dst, 0); + const struct ggml_tensor * ids = src0; + const int id = ggml_get_op_params_i32(dst, 0); + const int n_as = ggml_get_op_params_i32(dst, 1); - const int a_id = ((int32_t *)ids->data)[id]; + for (int64_t i01 = 0; i01 < ids->ne[1]; i01++) { + const int32_t row_id = *(const int32_t *) ((const char *) ids->data + i01*ids->nb[1] + id*ids->nb[0]); - GGML_ASSERT(a_id >= 0 && a_id < ids->ne[0]); + GGML_ASSERT(row_id >= 0 && row_id < n_as); - const struct ggml_tensor * src0 = dst->src[a_id + 2]; - - ggml_compute_forward_mul_mat(params, src0, src1, dst); + const struct ggml_tensor * src0_row = dst->src[row_id + 2]; + ggml_compute_forward_mul_mat(params, src0_row, src1, dst, i01, 1); + } } // ggml_compute_forward_out_prod @@ -10325,21 +10345,30 @@ static void ggml_compute_forward_get_rows_q( return; } - const int nc = src0->ne[0]; - const int nr = ggml_nelements(src1); + GGML_TENSOR_BINARY_OP_LOCALS + + const int64_t nc = ne00; + const int64_t nr = ggml_nelements(src1); GGML_UNUSED(nr); + const enum ggml_type type = src0->type; ggml_to_float_t const dequantize_row_q = type_traits[type].to_float; - assert( dst->ne[0] == nc); - assert( dst->ne[1] == nr); - assert(src0->nb[0] == ggml_type_size(type)); + assert(ne0 == nc); + assert(ne02 == ne11); + assert(nb00 == ggml_type_size(type)); + assert(ggml_nrows(dst) == nr); - for (int i = 0; i < nr; ++i) { - const int r = ((int32_t *) src1->data)[i]; + // TODO: multi-thread + for (int64_t i12 = 0; i12 < ne12; ++i12) { + for (int64_t i11 = 0; i11 < ne11; ++i11) { + for (int64_t i10 = 0; i10 < ne10; ++i10) { + const int64_t i01 = *(int32_t *) ((char *) src1->data + i10*nb10 + i11*nb11 + i12*nb12); - dequantize_row_q( - (const void *) ((char *) src0->data + r*src0->nb[1]), - (float *) ((char *) dst->data + i*dst->nb[1]), nc); + dequantize_row_q( + (const void *) ((char *) src0->data + i01*nb01 + i11*nb02 + i12*nb03), + (float *) ((char *) dst->data + i10*nb1 + i11*nb2 + i12*nb3), nc); + } + } } } @@ -10354,19 +10383,26 @@ static void ggml_compute_forward_get_rows_f16( return; } - const int nc = src0->ne[0]; - const int nr = ggml_nelements(src1); + GGML_TENSOR_BINARY_OP_LOCALS - assert( dst->ne[0] == nc); - assert( dst->ne[1] == nr); - assert(src0->nb[0] == sizeof(ggml_fp16_t)); + const int64_t nc = ne00; + const int64_t nr = ggml_nelements(src1); GGML_UNUSED(nr); - for (int i = 0; i < nr; ++i) { - const int r = ((int32_t *) src1->data)[i]; + assert(ne0 == nc); + assert(ne02 == ne11); + assert(nb00 == sizeof(ggml_fp16_t)); + assert(ggml_nrows(dst) == nr); - for (int j = 0; j < nc; ++j) { - ggml_fp16_t v = ((ggml_fp16_t *) ((char *) src0->data + r*src0->nb[1]))[j]; - ((float *) ((char *) dst->data + i*dst->nb[1]))[j] = GGML_FP16_TO_FP32(v); + // TODO: multi-thread + for (int64_t i12 = 0; i12 < ne12; ++i12) { + for (int64_t i11 = 0; i11 < ne11; ++i11) { + for (int64_t i10 = 0; i10 < ne10; ++i10) { + const int64_t i01 = *(int32_t *) ((char *) src1->data + i10*nb10 + i11*nb11 + i12*nb12); + + ggml_fp16_to_fp32_row( + (const void *) ((char *) src0->data + i01*nb01 + i11*nb02 + i12*nb03), + (float *) ((char *) dst->data + i10*nb1 + i11*nb2 + i12*nb3), nc); + } } } } @@ -10382,19 +10418,27 @@ static void ggml_compute_forward_get_rows_f32( return; } - const int nc = src0->ne[0]; - const int nr = ggml_nelements(src1); + GGML_TENSOR_BINARY_OP_LOCALS - assert( dst->ne[0] == nc); - assert( dst->ne[1] == nr); - assert(src0->nb[0] == sizeof(float)); + const int64_t nc = ne00; + const int64_t nr = ggml_nelements(src1); GGML_UNUSED(nr); - for (int i = 0; i < nr; ++i) { - const int r = ((int32_t *) src1->data)[i]; + assert(ne0 == nc); + assert(ne02 == ne11); + assert(nb00 == sizeof(float)); + assert(ggml_nrows(dst) == nr); - ggml_vec_cpy_f32(nc, - (float *) ((char *) dst->data + i*dst->nb[1]), - (float *) ((char *) src0->data + r*src0->nb[1])); + // TODO: multi-thread + for (int64_t i12 = 0; i12 < ne12; ++i12) { + for (int64_t i11 = 0; i11 < ne11; ++i11) { + for (int64_t i10 = 0; i10 < ne10; ++i10) { + const int64_t i01 = *(int32_t *) ((char *) src1->data + i10*nb10 + i11*nb11 + i12*nb12); + + ggml_vec_cpy_f32(nc, + (float *) ((char *) dst->data + i10*nb1 + i11*nb2 + i12*nb3), + (float *) ((char *) src0->data + i01*nb01 + i11*nb02 + i12*nb03)); + } + } } } @@ -14037,11 +14081,11 @@ static void ggml_compute_forward(struct ggml_compute_params * params, struct ggm } break; case GGML_OP_MUL_MAT: { - ggml_compute_forward_mul_mat(params, tensor->src[0], tensor->src[1], tensor); + ggml_compute_forward_mul_mat(params, tensor->src[0], tensor->src[1], tensor, 0, tensor->ne[1]); } break; case GGML_OP_MUL_MAT_ID: { - ggml_compute_forward_mul_mat_id(params, tensor); + ggml_compute_forward_mul_mat_id(params, tensor->src[0], tensor->src[1], tensor); } break; case GGML_OP_OUT_PROD: { diff --git a/ggml.h b/ggml.h index 41a075e92..32f256481 100644 --- a/ggml.h +++ b/ggml.h @@ -217,7 +217,7 @@ #define GGML_MAX_DIMS 4 #define GGML_MAX_PARAMS 2048 #define GGML_MAX_CONTEXTS 64 -#define GGML_MAX_SRC 6 +#define GGML_MAX_SRC 10 #define GGML_MAX_NAME 64 #define GGML_MAX_OP_PARAMS 64 #define GGML_DEFAULT_N_THREADS 4 @@ -1051,7 +1051,8 @@ extern "C" { // ggml_mul_mat_id(ctx, as, ids, id, b) ~= ggml_mul_mat(as[ids[id]], b) GGML_API struct ggml_tensor * ggml_mul_mat_id( struct ggml_context * ctx, - struct ggml_tensor * as[], + struct ggml_tensor * const as[], + int n_as, struct ggml_tensor * ids, int id, struct ggml_tensor * b); @@ -1263,6 +1264,7 @@ extern "C" { struct ggml_context * ctx, struct ggml_tensor * a); + // supports 3D: a->ne[2] == b->ne[1] GGML_API struct ggml_tensor * ggml_get_rows( struct ggml_context * ctx, struct ggml_tensor * a, diff --git a/gguf-py/gguf/constants.py b/gguf-py/gguf/constants.py index 685c88f1a..12133882b 100644 --- a/gguf-py/gguf/constants.py +++ b/gguf-py/gguf/constants.py @@ -38,6 +38,8 @@ class Keys: FEED_FORWARD_LENGTH = "{arch}.feed_forward_length" USE_PARALLEL_RESIDUAL = "{arch}.use_parallel_residual" TENSOR_DATA_LAYOUT = "{arch}.tensor_data_layout" + EXPERT_COUNT = "{arch}.expert_count" + EXPERT_USED_COUNT = "{arch}.expert_used_count" class Attention: HEAD_COUNT = "{arch}.attention.head_count" @@ -111,10 +113,14 @@ class MODEL_TENSOR(IntEnum): ATTN_NORM = auto() ATTN_NORM_2 = auto() ATTN_ROT_EMBD = auto() + FFN_GATE_INP = auto() + FFN_NORM = auto() FFN_GATE = auto() FFN_DOWN = auto() FFN_UP = auto() - FFN_NORM = auto() + FFN_GATE_EXP = auto() + FFN_DOWN_EXP = auto() + FFN_UP_EXP = auto() ATTN_Q_NORM = auto() ATTN_K_NORM = auto() @@ -154,10 +160,14 @@ TENSOR_NAMES: dict[MODEL_TENSOR, str] = { MODEL_TENSOR.ATTN_ROT_EMBD: "blk.{bid}.attn_rot_embd", MODEL_TENSOR.ATTN_Q_NORM: "blk.{bid}.attn_q_norm", MODEL_TENSOR.ATTN_K_NORM: "blk.{bid}.attn_k_norm", + MODEL_TENSOR.FFN_GATE_INP: "blk.{bid}.ffn_gate_inp", MODEL_TENSOR.FFN_NORM: "blk.{bid}.ffn_norm", MODEL_TENSOR.FFN_GATE: "blk.{bid}.ffn_gate", MODEL_TENSOR.FFN_DOWN: "blk.{bid}.ffn_down", MODEL_TENSOR.FFN_UP: "blk.{bid}.ffn_up", + MODEL_TENSOR.FFN_GATE_EXP: "blk.{bid}.ffn_gate.{xid}", + MODEL_TENSOR.FFN_DOWN_EXP: "blk.{bid}.ffn_down.{xid}", + MODEL_TENSOR.FFN_UP_EXP: "blk.{bid}.ffn_up.{xid}", } MODEL_TENSORS: dict[MODEL_ARCH, list[MODEL_TENSOR]] = { @@ -172,10 +182,14 @@ MODEL_TENSORS: dict[MODEL_ARCH, list[MODEL_TENSOR]] = { MODEL_TENSOR.ATTN_V, MODEL_TENSOR.ATTN_OUT, MODEL_TENSOR.ATTN_ROT_EMBD, + MODEL_TENSOR.FFN_GATE_INP, MODEL_TENSOR.FFN_NORM, MODEL_TENSOR.FFN_GATE, MODEL_TENSOR.FFN_DOWN, MODEL_TENSOR.FFN_UP, + MODEL_TENSOR.FFN_GATE_EXP, + MODEL_TENSOR.FFN_DOWN_EXP, + MODEL_TENSOR.FFN_UP_EXP, ], MODEL_ARCH.GPTNEOX: [ MODEL_TENSOR.TOKEN_EMBD, diff --git a/gguf-py/gguf/gguf_writer.py b/gguf-py/gguf/gguf_writer.py index b8ec977c8..73e021607 100644 --- a/gguf-py/gguf/gguf_writer.py +++ b/gguf-py/gguf/gguf_writer.py @@ -339,6 +339,12 @@ class GGUFWriter: def add_clamp_kqv(self, value: float) -> None: self.add_float32(Keys.Attention.CLAMP_KQV.format(arch=self.arch), value) + def add_expert_count(self, count: int) -> None: + self.add_uint32(Keys.LLM.EXPERT_COUNT.format(arch=self.arch), count) + + def add_expert_used_count(self, count: int) -> None: + self.add_uint32(Keys.LLM.EXPERT_USED_COUNT.format(arch=self.arch), count) + def add_layer_norm_eps(self, value: float) -> None: self.add_float32(Keys.Attention.LAYERNORM_EPS.format(arch=self.arch), value) diff --git a/gguf-py/gguf/tensor_mapping.py b/gguf-py/gguf/tensor_mapping.py index cc6236014..0115ea1c6 100644 --- a/gguf-py/gguf/tensor_mapping.py +++ b/gguf-py/gguf/tensor_mapping.py @@ -149,6 +149,11 @@ class TensorNameMap: "model.layers.{bid}.ln2", # yi ), + MODEL_TENSOR.FFN_GATE_INP: ( + "layers.{bid}.feed_forward.gate", # mixtral + "model.layers.{bid}.block_sparse_moe.gate", # mixtral + ), + # Feed-forward up MODEL_TENSOR.FFN_UP: ( "gpt_neox.layers.{bid}.mlp.dense_h_to_4h", # gptneox @@ -164,11 +169,21 @@ class TensorNameMap: "transformer.h.{bid}.mlp.w1", # qwen ), + MODEL_TENSOR.FFN_UP_EXP: ( + "layers.{bid}.feed_forward.experts.{xid}.w3", # mixtral + "model.layers.{bid}.block_sparse_moe.experts.{xid}.w3", # mixtral + ), + # Feed-forward gate MODEL_TENSOR.FFN_GATE: ( - "model.layers.{bid}.mlp.gate_proj", # llama-hf refact - "layers.{bid}.feed_forward.w1", # llama-pth - "transformer.h.{bid}.mlp.w2", # qwen + "model.layers.{bid}.mlp.gate_proj", # llama-hf refact + "layers.{bid}.feed_forward.w1", # llama-pth + "transformer.h.{bid}.mlp.w2", # qwen + ), + + MODEL_TENSOR.FFN_GATE_EXP: ( + "layers.{bid}.feed_forward.experts.{xid}.w1", # mixtral + "model.layers.{bid}.block_sparse_moe.experts.{xid}.w1", # mixtral ), # Feed-forward down @@ -185,6 +200,11 @@ class TensorNameMap: "language_model.encoder.layers.{bid}.mlp.dense_4h_to_h", # persimmon ), + MODEL_TENSOR.FFN_DOWN_EXP: ( + "layers.{bid}.feed_forward.experts.{xid}.w2", # mixtral + "model.layers.{bid}.block_sparse_moe.experts.{xid}.w2", # mixtral + ), + MODEL_TENSOR.ATTN_Q_NORM: ( "language_model.encoder.layers.{bid}.self_attention.q_layernorm", ), @@ -213,11 +233,14 @@ class TensorNameMap: for tensor, keys in self.block_mappings_cfg.items(): if tensor not in MODEL_TENSORS[arch]: continue - tensor_name = TENSOR_NAMES[tensor].format(bid = bid) - self.mapping[tensor_name] = (tensor, tensor_name) - for key in keys: - key = key.format(bid = bid) - self.mapping[key] = (tensor, tensor_name) + # TODO: make this configurable + n_experts = 8 + for xid in range(n_experts): + tensor_name = TENSOR_NAMES[tensor].format(bid = bid, xid = xid) + self.mapping[tensor_name] = (tensor, tensor_name) + for key in keys: + key = key.format(bid = bid, xid = xid) + self.mapping[key] = (tensor, tensor_name) def get_type_and_name(self, key: str, try_suffixes: Sequence[str] = ()) -> tuple[MODEL_TENSOR, str] | None: result = self.mapping.get(key) diff --git a/gguf-py/pyproject.toml b/gguf-py/pyproject.toml index e6374bfe8..9789c2c87 100644 --- a/gguf-py/pyproject.toml +++ b/gguf-py/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "gguf" -version = "0.6.0" +version = "0.7.0" description = "Read and write ML models in GGUF for GGML" authors = ["GGML "] packages = [ diff --git a/llama.cpp b/llama.cpp index 54fa9e43e..0e5ab044c 100644 --- a/llama.cpp +++ b/llama.cpp @@ -91,7 +91,8 @@ #define LLAMA_ATTRIBUTE_FORMAT(...) #endif -#define LLAMA_MAX_NODES 8192 +#define LLAMA_MAX_NODES 8192 +#define LLAMA_MAX_EXPERTS 8 // // logging @@ -231,6 +232,8 @@ enum llm_kv { LLM_KV_FEED_FORWARD_LENGTH, LLM_KV_USE_PARALLEL_RESIDUAL, LLM_KV_TENSOR_DATA_LAYOUT, + LLM_KV_EXPERT_COUNT, + LLM_KV_EXPERT_USED_COUNT, LLM_KV_ATTENTION_HEAD_COUNT, LLM_KV_ATTENTION_HEAD_COUNT_KV, @@ -281,6 +284,8 @@ static std::map LLM_KV_NAMES = { { LLM_KV_FEED_FORWARD_LENGTH, "%s.feed_forward_length" }, { LLM_KV_USE_PARALLEL_RESIDUAL, "%s.use_parallel_residual" }, { LLM_KV_TENSOR_DATA_LAYOUT, "%s.tensor_data_layout" }, + { LLM_KV_EXPERT_COUNT, "%s.expert_count" }, + { LLM_KV_EXPERT_USED_COUNT, "%s.expert_used_count" }, { LLM_KV_ATTENTION_HEAD_COUNT, "%s.attention.head_count" }, { LLM_KV_ATTENTION_HEAD_COUNT_KV, "%s.attention.head_count_kv" }, @@ -338,10 +343,14 @@ enum llm_tensor { LLM_TENSOR_ATTN_NORM, LLM_TENSOR_ATTN_NORM_2, LLM_TENSOR_ATTN_ROT_EMBD, + LLM_TENSOR_FFN_GATE_INP, + LLM_TENSOR_FFN_NORM, LLM_TENSOR_FFN_GATE, LLM_TENSOR_FFN_DOWN, LLM_TENSOR_FFN_UP, - LLM_TENSOR_FFN_NORM, + LLM_TENSOR_FFN_DOWN_EXP, + LLM_TENSOR_FFN_GATE_EXP, + LLM_TENSOR_FFN_UP_EXP, LLM_TENSOR_ATTN_Q_NORM, LLM_TENSOR_ATTN_K_NORM, }; @@ -360,10 +369,14 @@ static std::map> LLM_TENSOR_NAMES = { LLM_TENSOR_ATTN_V, "blk.%d.attn_v" }, { LLM_TENSOR_ATTN_OUT, "blk.%d.attn_output" }, { LLM_TENSOR_ATTN_ROT_EMBD, "blk.%d.attn_rot_embd" }, + { LLM_TENSOR_FFN_GATE_INP, "blk.%d.ffn_gate_inp" }, { LLM_TENSOR_FFN_NORM, "blk.%d.ffn_norm" }, { LLM_TENSOR_FFN_GATE, "blk.%d.ffn_gate" }, { LLM_TENSOR_FFN_DOWN, "blk.%d.ffn_down" }, { LLM_TENSOR_FFN_UP, "blk.%d.ffn_up" }, + { LLM_TENSOR_FFN_GATE_EXP, "blk.%d.ffn_gate.%d" }, + { LLM_TENSOR_FFN_DOWN_EXP, "blk.%d.ffn_down.%d" }, + { LLM_TENSOR_FFN_UP_EXP, "blk.%d.ffn_up.%d" }, }, }, { @@ -585,6 +598,10 @@ struct LLM_TN { std::string operator()(llm_tensor tensor, const std::string & suffix, int bid) const { return ::format(LLM_TENSOR_NAMES[arch].at(tensor).c_str(), bid) + "." + suffix; } + + std::string operator()(llm_tensor tensor, const std::string & suffix, int bid, int xid) const { + return ::format(LLM_TENSOR_NAMES[arch].at(tensor).c_str(), bid, xid) + "." + suffix; + } }; // @@ -1164,6 +1181,8 @@ struct llama_hparams { uint32_t n_layer; uint32_t n_rot; uint32_t n_ff; + uint32_t n_expert = 0; + uint32_t n_expert_used = 0; float f_norm_eps; float f_norm_rms_eps; @@ -1178,15 +1197,18 @@ struct llama_hparams { float f_max_alibi_bias; bool operator!=(const llama_hparams & other) const { - if (this->vocab_only != other.vocab_only) return true; - if (this->n_vocab != other.n_vocab) return true; - if (this->n_ctx_train != other.n_ctx_train) return true; - if (this->n_embd != other.n_embd) return true; - if (this->n_head != other.n_head) return true; - if (this->n_head_kv != other.n_head_kv) return true; - if (this->n_layer != other.n_layer) return true; - if (this->n_rot != other.n_rot) return true; - if (this->n_ff != other.n_ff) return true; + if (this->vocab_only != other.vocab_only) return true; + if (this->n_vocab != other.n_vocab) return true; + if (this->n_ctx_train != other.n_ctx_train) return true; + if (this->n_embd != other.n_embd) return true; + if (this->n_head != other.n_head) return true; + if (this->n_head_kv != other.n_head_kv) return true; + if (this->n_layer != other.n_layer) return true; + if (this->n_rot != other.n_rot) return true; + if (this->n_ff != other.n_ff) return true; + if (this->n_expert != other.n_expert) return true; + if (this->n_expert_used != other.n_expert_used) return true; + if (this->rope_finetuned != other.rope_finetuned) return true; if (this->n_yarn_orig_ctx != other.n_yarn_orig_ctx) return true; @@ -1268,6 +1290,12 @@ struct llama_layer { struct ggml_tensor * ffn_down; // w2 struct ggml_tensor * ffn_up; // w3 + // ff MoE + struct ggml_tensor * ffn_gate_inp; + struct ggml_tensor * ffn_gate_exp[LLAMA_MAX_EXPERTS]; + struct ggml_tensor * ffn_down_exp[LLAMA_MAX_EXPERTS]; + struct ggml_tensor * ffn_up_exp [LLAMA_MAX_EXPERTS]; + // ff bias struct ggml_tensor * ffn_down_b; // b2 struct ggml_tensor * ffn_up_b; // b3 @@ -2440,6 +2468,16 @@ static void llm_load_hparams( ml.get_key (LLM_KV_FEED_FORWARD_LENGTH, hparams.n_ff); ml.get_key (LLM_KV_ATTENTION_HEAD_COUNT, hparams.n_head); ml.get_key (LLM_KV_BLOCK_COUNT, hparams.n_layer); + ml.get_key (LLM_KV_EXPERT_COUNT, hparams.n_expert, false); + ml.get_key (LLM_KV_EXPERT_USED_COUNT, hparams.n_expert_used, false); + + GGML_ASSERT(hparams.n_expert <= LLAMA_MAX_EXPERTS); + GGML_ASSERT(hparams.n_expert_used <= hparams.n_expert); + if (hparams.n_expert > 0) { + GGML_ASSERT(hparams.n_expert_used > 0); + } else { + GGML_ASSERT(hparams.n_expert_used == 0); + } // n_head_kv is optional, default to n_head hparams.n_head_kv = hparams.n_head; @@ -2871,6 +2909,8 @@ static void llm_load_print_meta(llama_model_loader & ml, llama_model & model) { LLAMA_LOG_INFO("%s: f_clamp_kqv = %.1e\n", __func__, hparams.f_clamp_kqv); LLAMA_LOG_INFO("%s: f_max_alibi_bias = %.1e\n", __func__, hparams.f_max_alibi_bias); LLAMA_LOG_INFO("%s: n_ff = %u\n", __func__, hparams.n_ff); + LLAMA_LOG_INFO("%s: n_expert = %u\n", __func__, hparams.n_expert); + LLAMA_LOG_INFO("%s: n_expert_used = %u\n", __func__, hparams.n_expert_used); LLAMA_LOG_INFO("%s: rope scaling = %s\n", __func__, rope_scaling_type.c_str()); LLAMA_LOG_INFO("%s: freq_base_train = %.1f\n", __func__, hparams.rope_freq_base_train); LLAMA_LOG_INFO("%s: freq_scale_train = %g\n", __func__, hparams.rope_freq_scale_train); @@ -3025,9 +3065,26 @@ static void llm_load_tensors( layer.ffn_norm = ml.create_tensor(ctx, tn(LLM_TENSOR_FFN_NORM, "weight", i), {n_embd}, backend); - layer.ffn_gate = ml.create_tensor(ctx, tn(LLM_TENSOR_FFN_GATE, "weight", i), {n_embd, n_ff}, backend_split); - layer.ffn_down = ml.create_tensor(ctx, tn(LLM_TENSOR_FFN_DOWN, "weight", i), { n_ff, n_embd}, backend_split); - layer.ffn_up = ml.create_tensor(ctx, tn(LLM_TENSOR_FFN_UP, "weight", i), {n_embd, n_ff}, backend_split); + layer.ffn_gate_inp = ml.create_tensor(ctx, tn(LLM_TENSOR_FFN_GATE_INP, "weight", i), {n_embd}, backend, false); + + if (layer.ffn_gate_inp == nullptr) { + GGML_ASSERT(hparams.n_expert == 0); + GGML_ASSERT(hparams.n_expert_used == 0); + + layer.ffn_gate = ml.create_tensor(ctx, tn(LLM_TENSOR_FFN_GATE, "weight", i), {n_embd, n_ff}, backend_split); + layer.ffn_down = ml.create_tensor(ctx, tn(LLM_TENSOR_FFN_DOWN, "weight", i), { n_ff, n_embd}, backend_split); + layer.ffn_up = ml.create_tensor(ctx, tn(LLM_TENSOR_FFN_UP, "weight", i), {n_embd, n_ff}, backend_split); + } else { + GGML_ASSERT(hparams.n_expert > 0); + GGML_ASSERT(hparams.n_expert_used > 0); + + // MoE branch + for (uint32_t x = 0; x < hparams.n_expert; ++x) { + layer.ffn_gate_exp[x] = ml.create_tensor(ctx, tn(LLM_TENSOR_FFN_GATE_EXP, "weight", i, x), {n_embd, n_ff}, backend_split); + layer.ffn_down_exp[x] = ml.create_tensor(ctx, tn(LLM_TENSOR_FFN_DOWN_EXP, "weight", i, x), { n_ff, n_embd}, backend_split); + layer.ffn_up_exp[x] = ml.create_tensor(ctx, tn(LLM_TENSOR_FFN_UP_EXP, "weight", i, x), {n_embd, n_ff}, backend_split); + } + } if (backend == GGML_BACKEND_GPU) { vram_weights += @@ -3037,8 +3094,18 @@ static void llm_load_tensors( (layer.bk ? ggml_nbytes(layer.bk) : 0) + (layer.bv ? ggml_nbytes(layer.bv) : 0) + (layer.bo ? ggml_nbytes(layer.bo) : 0) + - ggml_nbytes(layer.ffn_norm) + ggml_nbytes(layer.ffn_gate) + - ggml_nbytes(layer.ffn_down) + ggml_nbytes(layer.ffn_up); + ggml_nbytes(layer.ffn_norm); + + if (layer.ffn_gate_inp == nullptr) { + vram_weights += + ggml_nbytes(layer.ffn_gate) + ggml_nbytes(layer.ffn_down) + ggml_nbytes(layer.ffn_up); + } else { + vram_weights += ggml_nbytes(layer.ffn_gate_inp); + for (uint32_t x = 0; x < hparams.n_expert; ++x) { + vram_weights += + ggml_nbytes(layer.ffn_gate_exp[x]) + ggml_nbytes(layer.ffn_down_exp[x]) + ggml_nbytes(layer.ffn_up_exp[x]); + } + } } } } break; @@ -4019,6 +4086,8 @@ struct llm_build_context { const int64_t n_head_kv; const int64_t n_embd_head; const int64_t n_embd_gqa; + const int64_t n_expert; + const int64_t n_expert_used; const float freq_base; const float freq_scale; @@ -4060,6 +4129,8 @@ struct llm_build_context { n_head_kv (hparams.n_head_kv), n_embd_head (hparams.n_embd_head()), n_embd_gqa (hparams.n_embd_gqa()), + n_expert (hparams.n_expert), + n_expert_used (hparams.n_expert_used), freq_base (cparams.rope_freq_base), freq_scale (cparams.rope_freq_scale), ext_factor (cparams.yarn_ext_factor), @@ -4184,7 +4255,7 @@ struct llm_build_context { cb(ffn_inp, "ffn_inp", il); // feed-forward network - { + if (model.layers[il].ffn_gate_inp == nullptr) { cur = llm_build_norm(ctx0, ffn_inp, hparams, model.layers[il].ffn_norm, NULL, LLM_NORM_RMS, cb, il); @@ -4196,6 +4267,69 @@ struct llm_build_context { model.layers[il].ffn_down, NULL, LLM_FFN_SILU, LLM_FFN_PAR, cb, il); cb(cur, "ffn_out", il); + } else { + // MoE branch + cur = llm_build_norm(ctx0, ffn_inp, hparams, + model.layers[il].ffn_norm, NULL, + LLM_NORM_RMS, cb, il); + cb(cur, "ffn_norm", il); + + ggml_tensor * logits = ggml_mul_mat(ctx0, model.layers[il].ffn_gate_inp, cur); // [n_tokens, num_experts] + cb(logits, "ffn_moe_logits", il); + + ggml_tensor * probs = ggml_soft_max(ctx0, logits); // [n_tokens, num_experts] + cb(probs, "ffn_moe_probs", il); + + // select experts + ggml_tensor * selected_experts = ggml_top_k(ctx0, probs, n_expert_used); // [n_tokens, num_experts_per_tok] + cb(selected_experts->src[0], "ffn_moe_argsort", il); + + ggml_tensor * weights = ggml_get_rows(ctx0, + ggml_reshape_3d(ctx0, probs, 1, n_expert, n_tokens), selected_experts); + cb(weights, "ffn_moe_weights", il); + + weights = ggml_reshape_2d(ctx0, weights, n_expert_used, n_tokens); // [n_tokens, num_experts_per_tok] + + ggml_tensor * weights_sum = ggml_sum_rows(ctx0, weights); + cb(weights_sum, "ffn_moe_weights_sum", il); + + weights = ggml_div(ctx0, weights, weights_sum); // [n_tokens, num_experts_per_tok] + cb(weights, "ffn_moe_weights_norm", il); + + // compute expert outputs + ggml_tensor * moe_out = nullptr; + + for (int i = 0; i < n_expert_used; ++i) { + ggml_tensor * cur_expert; + + ggml_tensor * cur_up = ggml_mul_mat_id(ctx0, model.layers[il].ffn_up_exp, n_expert, selected_experts, i, cur); + cb(cur_up, "ffn_moe_up", il); + + ggml_tensor * cur_gate = ggml_mul_mat_id(ctx0, model.layers[il].ffn_gate_exp, n_expert, selected_experts, i, cur); + cb(cur_gate, "ffn_moe_gate", il); + + cur_gate = ggml_silu(ctx0, cur_gate); + cb(cur_gate, "ffn_moe_silu", il); + + cur_expert = ggml_mul(ctx0, cur_up, cur_gate); // [n_tokens, n_embd] + cb(cur_expert, "ffn_moe_gate_par", il); + + cur_expert = ggml_mul_mat_id(ctx0, model.layers[il].ffn_down_exp, n_expert, selected_experts, i, cur_expert); // [n_tokens, n_embd] + cb(cur_expert, "ffn_moe_down", il); + + cur_expert = ggml_mul(ctx0, cur_expert, + ggml_view_2d(ctx0, weights, 1, n_tokens, weights->nb[1], i*weights->nb[0])); + cb(cur_expert, "ffn_moe_weighted", il); + + if (i == 0) { + moe_out = cur_expert; + } else { + moe_out = ggml_add(ctx0, moe_out, cur_expert); + cb(moe_out, "ffn_moe_out", il); + } + } + + cur = moe_out; } cur = ggml_add(ctx0, cur, ffn_inp); @@ -5450,6 +5584,20 @@ static const std::unordered_map k_offload_map { "ffn_relu", OFFLOAD_FUNC }, { "ffn_sqr(relu)", OFFLOAD_FUNC }, + { "ffn_moe_logits", OFFLOAD_FUNC }, + { "ffn_moe_probs", OFFLOAD_FUNC }, + { "ffn_moe_argsort", OFFLOAD_FUNC }, + { "ffn_moe_weights", OFFLOAD_FUNC }, + { "ffn_moe_weights_sum", OFFLOAD_FUNC }, + { "ffn_moe_weights_norm", OFFLOAD_FUNC }, + { "ffn_moe_weighted", OFFLOAD_FUNC }, + { "ffn_moe_up", OFFLOAD_FUNC }, + { "ffn_moe_gate", OFFLOAD_FUNC }, + { "ffn_moe_silu", OFFLOAD_FUNC }, + { "ffn_moe_gate_par", OFFLOAD_FUNC }, + { "ffn_moe_down", OFFLOAD_FUNC }, + { "ffn_moe_out", OFFLOAD_FUNC }, + { "l_out", OFFLOAD_FUNC }, { "result_norm", OFFLOAD_FUNC_EMB }, @@ -8067,11 +8215,9 @@ static void llama_convert_tensor_internal( workers.clear(); } -static ggml_type get_k_quant_type( - quantize_state_internal & qs, - ggml_type new_type, const ggml_tensor * tensor, llama_ftype ftype -) { +static ggml_type get_k_quant_type(quantize_state_internal & qs, ggml_type new_type, const ggml_tensor * tensor, llama_ftype ftype) { const std::string name = ggml_get_name(tensor); + // TODO: avoid hardcoded tensor names - use the TN_* constants const llm_arch arch = qs.model.arch; const auto tn = LLM_TN(arch); @@ -8105,7 +8251,18 @@ static ggml_type get_k_quant_type( // nearly negligible increase in model size by quantizing this tensor with more bits: if (new_type == GGML_TYPE_Q3_K || new_type == GGML_TYPE_Q4_K) new_type = GGML_TYPE_Q5_K; } + if (qs.model.hparams.n_expert == 8) { + // for the 8-expert model, bumping this to Q8_0 trades just ~128MB + // TODO: explore better strategies + new_type = GGML_TYPE_Q8_0; + } ++qs.i_attention_wv; + } else if (name.find("attn_k.weight") != std::string::npos) { + if (qs.model.hparams.n_expert == 8) { + // for the 8-expert model, bumping this to Q8_0 trades just ~128MB + // TODO: explore better strategies + new_type = GGML_TYPE_Q8_0; + } } else if (name.find("ffn_down.weight") != std::string::npos) { if (ftype == LLAMA_FTYPE_MOSTLY_Q2_K) new_type = GGML_TYPE_Q3_K; else if (ftype == LLAMA_FTYPE_MOSTLY_Q3_K_M) { @@ -8318,6 +8475,9 @@ static void llama_model_quantize_internal(const std::string & fname_inp, const s quantize &= params->quantize_output_tensor || name != "output.weight"; quantize &= !params->only_copy; + // do not quantize expert gating tensors + quantize &= name.find("ffn_gate_inp.weight") == std::string::npos; + enum ggml_type new_type; void * new_data; size_t new_size; diff --git a/tests/test-backend-ops.cpp b/tests/test-backend-ops.cpp index e0155ac1c..44830b4d4 100644 --- a/tests/test-backend-ops.cpp +++ b/tests/test-backend-ops.cpp @@ -20,8 +20,6 @@ static void init_tensor_uniform(ggml_tensor * tensor, float min = -1.0f, float m size_t size = ggml_nelements(tensor); std::vector data(size); - std::random_device rd; - #if 0 std::default_random_engine generator(rd()); std::uniform_real_distribution distribution(min, max); @@ -31,6 +29,7 @@ static void init_tensor_uniform(ggml_tensor * tensor, float min = -1.0f, float m } #endif auto init_thread = [&](size_t start, size_t end) { + std::random_device rd; std::default_random_engine generator(rd()); std::uniform_real_distribution distribution(min, max); @@ -51,7 +50,7 @@ static void init_tensor_uniform(ggml_tensor * tensor, float min = -1.0f, float m t.join(); } - if (tensor->type == GGML_TYPE_F32) { + if (tensor->type == GGML_TYPE_F32 || tensor->type == GGML_TYPE_I32) { ggml_backend_tensor_set(tensor, data.data(), 0, size * sizeof(float)); } else if (ggml_is_quantized(tensor->type) || tensor->type == GGML_TYPE_F16) { GGML_ASSERT(size % ggml_blck_size(tensor->type) == 0); @@ -71,23 +70,28 @@ static std::vector tensor_to_float(const ggml_tensor * t) { std::vector buf(ggml_nbytes(t)); ggml_backend_tensor_get(t, buf.data(), 0, ggml_nbytes(t)); + ggml_type_traits_t tt = ggml_internal_get_type_traits(t->type); + size_t bs = ggml_blck_size(t->type); + // access elements by index to avoid gaps in views for (int64_t i3 = 0; i3 < t->ne[3]; i3++) { for (int64_t i2 = 0; i2 < t->ne[2]; i2++) { for (int64_t i1 = 0; i1 < t->ne[1]; i1++) { - for (int64_t i0 = 0; i0 < t->ne[0]; i0++) { - size_t i = i3*t->nb[3] + i2*t->nb[2] + i1*t->nb[1] + i0*t->nb[0]; - float v; + for (int64_t i0 = 0; i0 < t->ne[0]; i0 += bs) { + size_t i = i3*t->nb[3] + i2*t->nb[2] + i1*t->nb[1] + i0/bs*t->nb[0]; if (t->type == GGML_TYPE_F16) { - v = (float) ggml_fp16_to_fp32(*(ggml_fp16_t*)&buf[i]); + tv.push_back(ggml_fp16_to_fp32(*(ggml_fp16_t*)&buf[i])); } else if (t->type == GGML_TYPE_F32) { - v = *(float *) &buf[i]; + tv.push_back(*(float *) &buf[i]); } else if (t->type == GGML_TYPE_I32) { - v = *(int32_t *) &buf[i]; + tv.push_back((float)*(int32_t *) &buf[i]); + } else if (ggml_is_quantized(t->type)) { + std::vector vq(ggml_blck_size(t->type)); + tt.to_float(&buf[i], vq.data(), ggml_blck_size(t->type)); + tv.insert(tv.end(), vq.begin(), vq.end()); } else { GGML_ASSERT(false); } - tv.push_back(v); } } } @@ -233,6 +237,10 @@ static bool ggml_is_view_op(enum ggml_op op) { struct test_case { virtual ~test_case() {} + virtual std::string op_desc(ggml_tensor * t) { + return ggml_op_desc(t); + } + virtual std::string vars() { return ""; } @@ -240,7 +248,7 @@ struct test_case { virtual ggml_tensor * build_graph(ggml_context * ctx) = 0; virtual double max_nmse_err() { - return 1e-6; + return 1e-7; } virtual void initialize_tensors(ggml_context * ctx) { @@ -270,13 +278,13 @@ struct test_case { ggml_tensor * out = build_graph(ctx); - if (op_name != nullptr && strcmp(ggml_op_desc(out), op_name) != 0) { - //printf(" %s: skipping\n", ggml_op_desc(out)); + if (op_name != nullptr && op_desc(out) != op_name) { + //printf(" %s: skipping\n", op_desc(out).c_str()); ggml_free(ctx); return true; } - printf(" %s(%s): ", ggml_op_desc(out), vars().c_str()); + printf(" %s(%s): ", op_desc(out).c_str(), vars().c_str()); fflush(stdout); // check if backends support op @@ -317,7 +325,7 @@ struct test_case { for (size_t i = 0; i < f1.size(); i++) { // check for nans if (std::isnan(f1[i]) || std::isnan(f2[i])) { - printf("NaN at index %zu ", i); + printf("[%s] NaN at index %zu (%f %f) ", ggml_op_desc(t1), i, f1[i], f2[i]); ud->ok = false; return true; } @@ -325,12 +333,12 @@ struct test_case { if (isinf_or_max(f1[i]) || isinf_or_max(f2[i])) { if (isinf_or_max(f1[i]) && isinf_or_max(f2[i])) { if (std::signbit(f1[i]) != std::signbit(f2[i])) { - printf("inf sign mismatch: %f %f ", f1[i], f2[i]); + printf("[%s] inf sign mismatch: %f %f ", ggml_op_desc(t1), f1[i], f2[i]); ud->ok = false; return true; } } else { - printf("inf mismatch: %f %f ", f1[i], f2[i]); + printf("[%s] inf mismatch: %f %f ", ggml_op_desc(t1), f1[i], f2[i]); ud->ok = false; return true; } @@ -339,10 +347,16 @@ struct test_case { double err = nmse(f1.data(), f2.data(), f1.size()); if (err > ud->max_err) { - printf("NMSE = %f ", err); + printf("[%s] NMSE = %f ", ggml_op_desc(t1), err); + //for (int i = 0; i < f1.size(); i++) { + // printf("(%f, %f) ", f1[i], f2[i]); + //} + //printf("\n"); ud->ok = false; } return true; + + GGML_UNUSED(index); }; ggml_backend_compare_graph_backend(backend1, backend2, gf, callback, &ud); @@ -372,13 +386,13 @@ struct test_case { ggml_tensor * out = build_graph(ctx); - if (op_name != nullptr && strcmp(ggml_op_desc(out), op_name) != 0) { - //printf(" %s: skipping\n", ggml_op_desc(out)); + if (op_name != nullptr && op_desc(out) != op_name) { + //printf(" %s: skipping\n", op_desc(out).c_str()); ggml_free(ctx); return true; } - int len = printf(" %s(%s): ", ggml_op_desc(out), vars().c_str()); + int len = printf(" %s(%s): ", op_desc(out).c_str(), vars().c_str()); fflush(stdout); // check if backends support op @@ -430,8 +444,9 @@ struct test_case { return size; }; for (int i = 0; i < gf->n_nodes; i++) { - if (ggml_is_view_op(gf->nodes[i]->op) || gf->nodes[i] == out) + if (ggml_is_view_op(gf->nodes[i]->op) || gf->nodes[i] == out) { continue; + } mem += tensor_op_size(gf->nodes[i]); } @@ -486,17 +501,22 @@ struct test_get_rows : public test_case { const int n; // cols const int m; // rows const int r; // rows to get + const int b; // batch size + const bool v; // view (non-contiguous src1) std::string vars() override { - return VARS_TO_STR4(type, n, m, r); + return VARS_TO_STR6(type, n, m, r, b, v); } - test_get_rows(ggml_type type = GGML_TYPE_F32, int n = 10, int m = 5, int r = 3) - : type(type), n(n), m(m), r(r) {} + test_get_rows(ggml_type type = GGML_TYPE_F32, int n = 10, int m = 5, int r = 3, int b = 1, bool v = false) + : type(type), n(n), m(m), r(r), b(b), v(v) {} ggml_tensor * build_graph(ggml_context * ctx) override { - ggml_tensor * in = ggml_new_tensor_2d(ctx, type, n, m); - ggml_tensor * rows = ggml_new_tensor_1d(ctx, GGML_TYPE_I32, r); + ggml_tensor * in = ggml_new_tensor_3d(ctx, type, n, m, b); + ggml_tensor * rows = ggml_new_tensor_2d(ctx, GGML_TYPE_I32, r, b); + if (v) { + rows = ggml_view_2d(ctx, rows, r/2, b, rows->nb[1], 0); + } ggml_tensor * out = ggml_get_rows(ctx, in, rows); return out; } @@ -504,12 +524,13 @@ struct test_get_rows : public test_case { void initialize_tensors(ggml_context * ctx) override { for (ggml_tensor * t = ggml_get_first_tensor(ctx); t != NULL; t = ggml_get_next_tensor(ctx, t)) { if (t->type == GGML_TYPE_I32) { + if (ggml_is_view_op(t->op)) { continue; } // rows - std::vector data(r); - for (int i = 0; i < r; i++) { + std::vector data(r*b); + for (int i = 0; i < r*b; i++) { data[i] = rand() % m; } - ggml_backend_tensor_set(t, data.data(), 0, r * sizeof(int)); + ggml_backend_tensor_set(t, data.data(), 0, r * b * sizeof(int)); } else { init_tensor_uniform(t); } @@ -770,11 +791,10 @@ struct test_mul_mat_id : public test_case { const int64_t m; const int64_t n; const int64_t k; - const std::array bs; // dims 3 and 4 - const std::array nr; // repeat in dims 3 and 4 + const bool v; // view (non-contiguous ids) std::string vars() override { - return VARS_TO_STR9(type_a, type_b, n_mats, id, m, n, k, bs, nr); + return VARS_TO_STR8(type_a, type_b, n_mats, id, m, n, k, v); } double max_nmse_err() override { @@ -782,7 +802,7 @@ struct test_mul_mat_id : public test_case { } size_t op_size(ggml_tensor * t) override { - size_t a = ggml_nbytes(t->src[2]) * n * nr[0] * nr[1]; + size_t a = ggml_nbytes(t->src[2]) * n; size_t b = ggml_nbytes(t->src[1]) * m; size_t c = ggml_nbytes(t); return a + b + c; @@ -792,35 +812,41 @@ struct test_mul_mat_id : public test_case { test_mul_mat_id(ggml_type type_a = GGML_TYPE_F32, ggml_type type_b = GGML_TYPE_F32, int n_mats = 2, int id = 0, - int64_t m = 32, int64_t n = 32, int64_t k = 32, - std::array bs = {10, 10}, - std::array nr = {2, 2}) + int64_t m = 32, int64_t n = 32, int64_t k = 32, bool v = false) : type_a(type_a), type_b(type_b), n_mats(n_mats), id(id), - m(m), n(n), k(k), bs(bs), nr(nr) {} + m(m), n(n), k(k), v(v) {} ggml_tensor * build_graph(ggml_context * ctx) override { // C^T = A * B^T: (k, m) * (k, n) => (m, n) std::vector mats; for (int i = 0; i < n_mats; i++) { - ggml_tensor * a = ggml_new_tensor_4d(ctx, type_a, k, m, bs[0], bs[1]); + ggml_tensor * a = ggml_new_tensor_2d(ctx, type_a, k, m); mats.push_back(a); } - ggml_tensor * ids = ggml_new_tensor_1d(ctx, GGML_TYPE_I32, n_mats); - ggml_tensor * b = ggml_new_tensor_4d(ctx, type_b, k, n, bs[0]*nr[0], bs[1]*nr[1]); - ggml_tensor * out = ggml_mul_mat_id(ctx, mats.data(), ids, id, b); + ggml_tensor * ids = ggml_new_tensor_2d(ctx, GGML_TYPE_I32, n_mats, n); + if (v) { + ids = ggml_view_2d(ctx, ids, n_mats/2, ids->ne[1], ids->nb[1], 0); + } + ggml_tensor * b = ggml_new_tensor_2d(ctx, type_b, k, n); + ggml_tensor * out = ggml_mul_mat_id(ctx, mats.data(), n_mats, ids, v ? id/2 : id, b); return out; } void initialize_tensors(ggml_context * ctx) override { + std::random_device rd; + std::default_random_engine rng(rd()); for (ggml_tensor * t = ggml_get_first_tensor(ctx); t != NULL; t = ggml_get_next_tensor(ctx, t)) { if (t->type == GGML_TYPE_I32) { + if (ggml_is_view_op(t->op)) { continue; } // ids - std::vector data(n_mats); - for (int i = 0; i < n_mats; i++) { - data[i] = i; + for (int64_t r = 0; r < ggml_nrows(t); r++) { + std::vector data(t->ne[0]); + for (int i = 0; i < t->ne[0]; i++) { + data[i] = i % n_mats; + } + std::shuffle(data.begin(), data.end(), rng); + ggml_backend_tensor_set(t, data.data(), r * t->nb[1], t->ne[0] * sizeof(int32_t)); } - std::shuffle(data.begin(), data.end(), std::default_random_engine(std::random_device()())); - ggml_backend_tensor_set(t, data.data(), 0, n_mats * sizeof(int)); } else { init_tensor_uniform(t); } @@ -1109,6 +1135,90 @@ struct test_sum_rows : public test_case { } }; +// Mixtral MOE +struct test_moe : public test_case { + const int n_experts; + const int n_experts_per_tok; + const int n_tokens; + const int n_embd; + const int n_ff; + + std::string op_desc(ggml_tensor * t) override { + return "MOE"; + + GGML_UNUSED(t); + } + + std::string vars() override { + return VARS_TO_STR5(n_experts, n_experts_per_tok, n_tokens, n_embd, n_ff); + } + + test_moe(int n_experts = 8, int n_experts_per_tok = 2, int n_tokens = 1, int n_embd = 4096, int n_ff = 14336) + : n_experts(n_experts), n_experts_per_tok(n_experts_per_tok), n_tokens(n_tokens), n_embd(n_embd), n_ff(n_ff) { + } + + ggml_tensor * build_graph(ggml_context * ctx) override { + ggml_tensor * ffn_gate_inp = ggml_new_tensor_2d(ctx, GGML_TYPE_F32, n_embd, n_experts); + + std::vector ffn_up_exp(n_experts); + std::vector ffn_gate_exp(n_experts); + std::vector ffn_down_exp(n_experts); + + for (int i = 0; i < n_experts; ++i) { + ffn_up_exp[i] = ggml_new_tensor_2d(ctx, GGML_TYPE_F32, n_embd, n_ff); + ffn_gate_exp[i] = ggml_new_tensor_2d(ctx, GGML_TYPE_F32, n_embd, n_ff); + ffn_down_exp[i] = ggml_new_tensor_2d(ctx, GGML_TYPE_F32, n_ff, n_embd); + } + + ggml_tensor * cur = ggml_new_tensor_2d(ctx, GGML_TYPE_F32, n_embd, n_tokens); + + ggml_tensor * logits = ggml_mul_mat(ctx, ffn_gate_inp, cur); + ggml_tensor * probs = ggml_soft_max_ext(ctx, logits, nullptr, 1.0f/sqrtf(n_embd)); + + // select experts + ggml_tensor * selected_experts = ggml_top_k(ctx, probs, n_experts_per_tok); + + ggml_tensor * weights = ggml_get_rows(ctx, + ggml_reshape_3d(ctx, probs, 1, n_experts, n_tokens), selected_experts); + + weights = ggml_reshape_2d(ctx, weights, n_experts_per_tok, n_tokens); + + ggml_tensor * weights_sum = ggml_sum_rows(ctx, weights); + + weights = ggml_div(ctx, weights, weights_sum); + + // compute expert outputs + ggml_tensor * moe_out = nullptr; + + for (int i = 0; i < n_experts_per_tok; ++i) { + ggml_tensor * cur_expert; + + ggml_tensor * cur_up = ggml_mul_mat_id(ctx, ffn_up_exp.data(), n_experts, selected_experts, i, cur); + + ggml_tensor * cur_gate = ggml_mul_mat_id(ctx, ffn_gate_exp.data(), n_experts, selected_experts, i, cur); + + cur_gate = ggml_silu(ctx, cur_gate); + + cur_expert = ggml_mul(ctx, cur_up, cur_gate); + + cur_expert = ggml_mul_mat_id(ctx, ffn_down_exp.data(), n_experts, selected_experts, i, cur_expert); + + cur_expert = ggml_mul(ctx, cur_expert, + ggml_view_2d(ctx, weights, 1, n_tokens, weights->nb[1], i*weights->nb[0])); + + if (i == 0) { + moe_out = cur_expert; + } else { + moe_out = ggml_add(ctx, moe_out, cur_expert); + } + } + + cur = moe_out; + + return cur; + } +}; + enum test_mode { MODE_TEST, MODE_PERF, @@ -1117,14 +1227,28 @@ enum test_mode { static bool test_backend(ggml_backend_t backend, test_mode mode, const char * op_name) { std::vector> test_cases; + const ggml_type all_types[] = { + GGML_TYPE_F32, GGML_TYPE_F16, + GGML_TYPE_Q4_0, GGML_TYPE_Q4_1, + GGML_TYPE_Q5_0, GGML_TYPE_Q5_1, + GGML_TYPE_Q8_0, + GGML_TYPE_Q2_K, GGML_TYPE_Q3_K, + GGML_TYPE_Q4_K, GGML_TYPE_Q5_K, + GGML_TYPE_Q6_K + }; + // unary ops for (int op = 0; op < GGML_UNARY_OP_COUNT; op++) { test_cases.emplace_back(new test_unary((ggml_unary_op) op)); } - for (ggml_type type : {GGML_TYPE_F32, GGML_TYPE_F16}) { - test_cases.emplace_back(new test_get_rows(type, 10, 5, 3)); - test_cases.emplace_back(new test_get_rows(type, 16, 5, 3)); + test_cases.emplace_back(new test_get_rows(GGML_TYPE_F32, 1, 8, 2, 1, false)); + for (ggml_type type : all_types) { + for (int b : {1, 7}) { + for (bool v : {false, true}) { + test_cases.emplace_back(new test_get_rows(type, 256, 5, 4, b, v)); + } + } } test_cases.emplace_back(new test_repeat(GGML_TYPE_F32, {10, 10, 10, 10}, {1, 1, 1, 1})); @@ -1134,7 +1258,11 @@ static bool test_backend(ggml_backend_t backend, test_mode mode, const char * op test_cases.emplace_back(new test_repeat(GGML_TYPE_F32, {10, 10, 10, 10}, {1, 1, 1, 2})); test_cases.emplace_back(new test_dup()); - test_cases.emplace_back(new test_cpy()); + + for (ggml_type type : all_types) { + test_cases.emplace_back(new test_cpy(GGML_TYPE_F32, type, {256, 10, 10, 1})); + } + test_cases.emplace_back(new test_cont()); auto add_test_bin_bcast = [&](ggml_type type, std::array ne, std::array nr) { @@ -1144,6 +1272,7 @@ static bool test_backend(ggml_backend_t backend, test_mode mode, const char * op }; add_test_bin_bcast(GGML_TYPE_F32, {1, 1, 8, 1}, {1, 1, 1, 1}); + add_test_bin_bcast(GGML_TYPE_F32, {1, 1, 1, 1}, {32, 1, 1, 1}); add_test_bin_bcast(GGML_TYPE_F32, {1, 1, 320, 320}, {1, 1, 1, 1}); add_test_bin_bcast(GGML_TYPE_F32, {16, 10, 1, 1}, {1, 1, 1, 1}); add_test_bin_bcast(GGML_TYPE_F32, {16, 10, 10, 1}, {1, 1, 1, 1}); @@ -1170,8 +1299,8 @@ static bool test_backend(ggml_backend_t backend, test_mode mode, const char * op add_test_bin_bcast(GGML_TYPE_F32, {1, 1, 640, 1}, {32, 32, 1, 1}); add_test_bin_bcast(GGML_TYPE_F32, {5120, 1, 1, 1}, {1, 256, 1, 1}); add_test_bin_bcast(GGML_TYPE_F32, {640, 1, 1, 1}, {1, 1, 1, 1}); - add_test_bin_bcast(GGML_TYPE_F32, {3, 3, 2560, 1280}, {1, 1, 1, 1}); - add_test_bin_bcast(GGML_TYPE_F32, {3, 3, 2560, 1280}, {2, 1, 1, 1}); + //add_test_bin_bcast(GGML_TYPE_F32, {3, 3, 2560, 1280}, {1, 1, 1, 1}); + //add_test_bin_bcast(GGML_TYPE_F32, {3, 3, 2560, 1280}, {2, 1, 1, 1}); test_cases.emplace_back(new test_scale()); @@ -1180,16 +1309,6 @@ static bool test_backend(ggml_backend_t backend, test_mode mode, const char * op test_cases.emplace_back(new test_rms_norm(GGML_TYPE_F32, {64, 10, 10, 10}, eps)); } - const ggml_type all_types[] = { - GGML_TYPE_F32, GGML_TYPE_F16, - GGML_TYPE_Q4_0, GGML_TYPE_Q4_1, - GGML_TYPE_Q5_0, GGML_TYPE_Q5_1, - GGML_TYPE_Q8_0, - GGML_TYPE_Q2_K, GGML_TYPE_Q3_K, - GGML_TYPE_Q4_K, GGML_TYPE_Q5_K, - GGML_TYPE_Q6_K - }; - for (ggml_type type_a : all_types) { for (ggml_type type_b : {GGML_TYPE_F32 /*, GGML_TYPE_F16 */}) { // FIXME: CPU crashes on f16xf16 @@ -1213,9 +1332,11 @@ static bool test_backend(ggml_backend_t backend, test_mode mode, const char * op for (ggml_type type_a : all_types) { for (ggml_type type_b : {GGML_TYPE_F32 /*, GGML_TYPE_F16 */}) { - for (int n_mats : {1, 2, 4}) { + for (int n_mats : {2, 4, 8}) { for (int id = 0; id < n_mats; id++) { - test_cases.emplace_back(new test_mul_mat_id(type_a, type_b, n_mats, id, 16, 16, 256, {1, 1}, {1, 1})); + for (bool v : {false, true}) { + test_cases.emplace_back(new test_mul_mat_id(type_a, type_b, n_mats, id, 16, 16, 256, v)); + } } } } @@ -1247,10 +1368,18 @@ static bool test_backend(ggml_backend_t backend, test_mode mode, const char * op test_cases.emplace_back(new test_concat()); for (ggml_sort_order order : {GGML_SORT_ASC, GGML_SORT_DESC}) { + test_cases.emplace_back(new test_argsort(GGML_TYPE_F32, {8, 1, 1, 1}, order)); test_cases.emplace_back(new test_argsort(GGML_TYPE_F32, {16, 10, 10, 10}, order)); } - test_cases.emplace_back(new test_sum_rows()); + test_cases.emplace_back(new test_sum_rows(GGML_TYPE_F32, {10, 10, 10, 10})); + test_cases.emplace_back(new test_sum_rows(GGML_TYPE_F32, {2, 1, 1, 1})); + +#if !defined(__SANITIZE_THREAD__) + // FIXME: these tests use too much memory with thread sanitizer + test_cases.emplace_back(new test_moe(8, 2, 1, 4096, 14336)); + //test_cases.emplace_back(new test_moe(8, 2, 8, 4096, 14336)); +#endif // run tests if (mode == MODE_TEST) { @@ -1267,14 +1396,17 @@ static bool test_backend(ggml_backend_t backend, test_mode mode, const char * op ggml_backend_free(backend_cpu); return n_ok == test_cases.size(); - } else if (mode == MODE_PERF) { + } + + if (mode == MODE_PERF) { for (auto & test : test_cases) { test->eval_perf(backend, op_name); } return true; - } else { - GGML_ASSERT(false); } + + GGML_ASSERT(false); + return false; } static void usage(char ** argv) { @@ -1347,11 +1479,12 @@ int main(int argc, char ** argv) { } printf("%zu/%zu backends passed\n", n_ok, ggml_backend_reg_get_count()); + if (n_ok != ggml_backend_reg_get_count()) { printf("\033[1;31mFAIL\033[0m\n"); return 1; - } else { - printf("\033[1;32mOK\033[0m\n"); - return 0; } + + printf("\033[1;32mOK\033[0m\n"); + return 0; } From 113f9942fc73a262c85e9dcf7c2ea7336250bba0 Mon Sep 17 00:00:00 2001 From: Georgi Gerganov Date: Wed, 13 Dec 2023 14:05:38 +0200 Subject: [PATCH 075/811] readme : update hot topics --- README.md | 1 + 1 file changed, 1 insertion(+) diff --git a/README.md b/README.md index ce026b8d1..014a37c85 100644 --- a/README.md +++ b/README.md @@ -10,6 +10,7 @@ Inference of [LLaMA](https://arxiv.org/abs/2302.13971) model in pure C/C++ ### Hot topics +- Added Mixtral support: https://github.com/ggerganov/llama.cpp/pull/4406 - **llama.h API change for handling KV cache offloading and data type: https://github.com/ggerganov/llama.cpp/pull/4309** - Using `llama.cpp` with AWS instances: https://github.com/ggerganov/llama.cpp/discussions/4225 - Looking for contributions to improve and maintain the `server` example: https://github.com/ggerganov/llama.cpp/issues/4216 From 9fb13f95840c722ad419f390dc8a9c86080a3700 Mon Sep 17 00:00:00 2001 From: Siwen Yu Date: Wed, 13 Dec 2023 20:50:14 +0800 Subject: [PATCH 076/811] common : add `--version` option to show build info in CLI (#4433) --- common/common.cpp | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/common/common.cpp b/common/common.cpp index 4a61ae593..93d5483e4 100644 --- a/common/common.cpp +++ b/common/common.cpp @@ -656,6 +656,10 @@ bool gpt_params_parse_ex(int argc, char ** argv, gpt_params & params) { } else if (arg == "-h" || arg == "--help") { return false; + } else if (arg == "--version") { + fprintf(stderr, "version: %d (%s)\n", LLAMA_BUILD_NUMBER, LLAMA_COMMIT); + fprintf(stderr, "built with %s for %s\n", LLAMA_COMPILER, LLAMA_BUILD_TARGET); + exit(0); } else if (arg == "--random-prompt") { params.random_prompt = true; } else if (arg == "--in-prefix-bos") { @@ -794,6 +798,7 @@ void gpt_print_usage(int /*argc*/, char ** argv, const gpt_params & params) { printf("\n"); printf("options:\n"); printf(" -h, --help show this help message and exit\n"); + printf(" --version show version and build info\n"); printf(" -i, --interactive run in interactive mode\n"); printf(" --interactive-first run in interactive mode and wait for input right away\n"); printf(" -ins, --instruct run in instruction mode (use with Alpaca models)\n"); From 70f806b821f603cafb6f634c93a6729dc21bb354 Mon Sep 17 00:00:00 2001 From: Jared Van Bortel Date: Wed, 13 Dec 2023 12:10:10 -0500 Subject: [PATCH 077/811] build : detect host compiler and cuda compiler separately (#4414) --- .editorconfig | 3 + CMakeLists.txt | 128 +++++++++++++++++++++++++++++-------------- Makefile | 120 +++++++++++++++------------------------- scripts/get-flags.mk | 38 +++++++++++++ 4 files changed, 171 insertions(+), 118 deletions(-) create mode 100644 scripts/get-flags.mk diff --git a/.editorconfig b/.editorconfig index f8245b85c..a56e9ccc8 100644 --- a/.editorconfig +++ b/.editorconfig @@ -15,6 +15,9 @@ indent_size = 4 [Makefile] indent_style = tab +[scripts/*.mk] +indent_style = tab + [prompts/*.txt] insert_final_newline = unset diff --git a/CMakeLists.txt b/CMakeLists.txt index eea4673d1..57b43c136 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -397,57 +397,102 @@ if (LLAMA_HIPBLAS) endif() endif() -if (LLAMA_ALL_WARNINGS) - if (NOT MSVC) - set(warning_flags -Wall -Wextra -Wpedantic -Wcast-qual -Wno-unused-function) - set(c_flags -Wshadow -Wstrict-prototypes -Wpointer-arith -Wmissing-prototypes -Werror=implicit-int -Werror=implicit-function-declaration) - set(cxx_flags -Wmissing-declarations -Wmissing-noreturn) - set(host_cxx_flags "") +function(get_flags CCID CCVER) + set(C_FLAGS "") + set(CXX_FLAGS "") - if (CMAKE_C_COMPILER_ID MATCHES "Clang") - set(warning_flags ${warning_flags} -Wunreachable-code-break -Wunreachable-code-return) - set(host_cxx_flags ${host_cxx_flags} -Wmissing-prototypes -Wextra-semi) + if (CCID MATCHES "Clang") + set(C_FLAGS -Wunreachable-code-break -Wunreachable-code-return) + set(CXX_FLAGS -Wunreachable-code-break -Wunreachable-code-return -Wmissing-prototypes -Wextra-semi) - if ( - (CMAKE_C_COMPILER_ID STREQUAL "Clang" AND CMAKE_C_COMPILER_VERSION VERSION_GREATER_EQUAL 3.8.0) OR - (CMAKE_C_COMPILER_ID STREQUAL "AppleClang" AND CMAKE_C_COMPILER_VERSION VERSION_GREATER_EQUAL 7.3.0) - ) - set(c_flags ${c_flags} -Wdouble-promotion) - endif() - elseif (CMAKE_C_COMPILER_ID STREQUAL "GNU") - set(c_flags ${c_flags} -Wdouble-promotion) - set(host_cxx_flags ${host_cxx_flags} -Wno-array-bounds) - - if (CMAKE_CXX_COMPILER_VERSION VERSION_GREATER_EQUAL 7.1.0) - set(host_cxx_flags ${host_cxx_flags} -Wno-format-truncation) - endif() - if (CMAKE_CXX_COMPILER_VERSION VERSION_GREATER_EQUAL 8.1.0) - set(host_cxx_flags ${host_cxx_flags} -Wextra-semi) - endif() + if ( + (CCID STREQUAL "Clang" AND CCVER VERSION_GREATER_EQUAL 3.8.0) OR + (CCID STREQUAL "AppleClang" AND CCVER VERSION_GREATER_EQUAL 7.3.0) + ) + set(C_FLAGS ${C_FLAGS} -Wdouble-promotion) + endif() + elseif (CCID STREQUAL "GNU") + set(C_FLAGS -Wdouble-promotion) + set(CXX_FLAGS -Wno-array-bounds) + + if (CCVER VERSION_GREATER_EQUAL 7.1.0) + set(CXX_FLAGS ${CXX_FLAGS} -Wno-format-truncation) + endif() + if (CCVER VERSION_GREATER_EQUAL 8.1.0) + set(CXX_FLAGS ${CXX_FLAGS} -Wextra-semi) endif() - else() - # todo : msvc endif() - set(c_flags ${c_flags} ${warning_flags}) - set(cxx_flags ${cxx_flags} ${warning_flags}) - add_compile_options("$<$:${c_flags}>" - "$<$:${cxx_flags}>" - "$<$:${host_cxx_flags}>") + set(GF_C_FLAGS ${C_FLAGS} PARENT_SCOPE) + set(GF_CXX_FLAGS ${CXX_FLAGS} PARENT_SCOPE) +endfunction() +if (LLAMA_ALL_WARNINGS) + if (NOT MSVC) + set(WARNING_FLAGS -Wall -Wextra -Wpedantic -Wcast-qual -Wno-unused-function) + set(C_FLAGS -Wshadow -Wstrict-prototypes -Wpointer-arith -Wmissing-prototypes + -Werror=implicit-int -Werror=implicit-function-declaration) + set(CXX_FLAGS -Wmissing-declarations -Wmissing-noreturn) + + set(C_FLAGS ${WARNING_FLAGS} ${C_FLAGS}) + set(CXX_FLAGS ${WARNING_FLAGS} ${CXX_FLAGS}) + + get_flags(${CMAKE_CXX_COMPILER_ID} ${CMAKE_CXX_COMPILER_VERSION}) + + add_compile_options("$<$:${C_FLAGS};${GF_C_FLAGS}>" + "$<$:${CXX_FLAGS};${GF_CXX_FLAGS}>") + else() + # todo : msvc + set(C_FLAGS "") + set(CXX_FLAGS "") + endif() endif() -if (NOT MSVC) - set(cuda_flags -Wno-pedantic) -endif() -set(cuda_flags ${cxx_flags} -use_fast_math ${cuda_flags}) +if (LLAMA_CUBLAS) + set(CUDA_FLAGS ${CXX_FLAGS} -use_fast_math) + if (NOT MSVC) + set(CUDA_FLAGS ${CUDA_FLAGS} -Wno-pedantic) + endif() -list(JOIN host_cxx_flags " " cuda_host_flags) # pass host compiler flags as a single argument -if (NOT cuda_host_flags STREQUAL "") - set(cuda_flags ${cuda_flags} -Xcompiler ${cuda_host_flags}) -endif() + if (LLAMA_ALL_WARNINGS AND NOT MSVC) + set(NVCC_CMD ${CMAKE_CUDA_COMPILER} .c) + if (NOT CMAKE_CUDA_HOST_COMPILER STREQUAL "") + set(NVCC_CMD ${NVCC_CMD} -ccbin ${CMAKE_CUDA_HOST_COMPILER}) + endif() -add_compile_options("$<$:${cuda_flags}>") + execute_process( + COMMAND ${NVCC_CMD} -Xcompiler --version + OUTPUT_VARIABLE CUDA_CCFULLVER + ERROR_QUIET + ) + + if (NOT CUDA_CCFULLVER MATCHES clang) + set(CUDA_CCID "GNU") + execute_process( + COMMAND ${NVCC_CMD} -Xcompiler "-dumpfullversion -dumpversion" + OUTPUT_VARIABLE CUDA_CCVER + ERROR_QUIET + ) + else() + if (CUDA_CCFULLVER MATCHES Apple) + set(CUDA_CCID "AppleClang") + else() + set(CUDA_CCID "Clang") + endif() + string(REGEX REPLACE "^.* version ([0-9.]*).*$" "\\1" CUDA_CCVER ${CUDA_CCFULLVER}) + endif() + + message("-- CUDA host compiler is ${CUDA_CCID} ${CUDA_CCVER}") + + get_flags(${CUDA_CCID} ${CUDA_CCVER}) + list(JOIN GF_CXX_FLAGS " " CUDA_CXX_FLAGS) # pass host compiler flags as a single argument + if (NOT CUDA_CXX_FLAGS STREQUAL "") + set(CUDA_FLAGS ${CUDA_FLAGS} -Xcompiler ${CUDA_CXX_FLAGS}) + endif() + endif() + + add_compile_options("$<$:${CUDA_FLAGS}>") +endif() if (WIN32) add_compile_definitions(_CRT_SECURE_NO_WARNINGS) @@ -471,6 +516,7 @@ endif() execute_process( COMMAND ${CMAKE_C_COMPILER} ${CMAKE_EXE_LINKER_FLAGS} -Wl,-v ERROR_VARIABLE output + OUTPUT_QUIET ) if (output MATCHES "dyld-1015\.7") add_compile_definitions(HAVE_BUGGY_APPLE_LINKER) diff --git a/Makefile b/Makefile index b7afda2b5..fb775ae5b 100644 --- a/Makefile +++ b/Makefile @@ -26,20 +26,6 @@ ifndef UNAME_M UNAME_M := $(shell uname -m) endif -ifeq '' '$(findstring clang,$(shell $(CC) --version))' - CC_IS_GCC=1 - CC_VER := $(shell $(CC) -dumpfullversion -dumpversion | awk -F. '{ printf("%02d%02d%02d", $$1, $$2, $$3) }') -else - CC_IS_CLANG=1 - ifeq '' '$(findstring Apple,$(shell $(CC) --version))' - CC_IS_LLVM_CLANG=1 - else - CC_IS_APPLE_CLANG=1 - endif - CC_VER := $(shell $(CC) --version | sed -n 's/^.* version \([0-9.]*\).*$$/\1/p' \ - | awk -F. '{ printf("%02d%02d%02d", $$1, $$2, $$3) }') -endif - # Mac OS + Arm can report x86_64 # ref: https://github.com/ggerganov/whisper.cpp/issues/66#issuecomment-1282546789 ifeq ($(UNAME_S),Darwin) @@ -121,12 +107,12 @@ MK_CXXFLAGS = -std=c++11 -fPIC # -Ofast tends to produce faster code, but may not be available for some compilers. ifdef LLAMA_FAST -MK_CFLAGS += -Ofast -MK_HOST_CXXFLAGS += -Ofast -MK_CUDA_CXXFLAGS += -O3 +MK_CFLAGS += -Ofast +HOST_CXXFLAGS += -Ofast +MK_NVCCFLAGS += -O3 else -MK_CFLAGS += -O3 -MK_CXXFLAGS += -O3 +MK_CFLAGS += -O3 +MK_CXXFLAGS += -O3 endif # clock_gettime came in POSIX.1b (1993) @@ -220,30 +206,6 @@ MK_CFLAGS += $(WARN_FLAGS) -Wshadow -Wstrict-prototypes -Wpointer-arith -Wmis -Werror=implicit-function-declaration MK_CXXFLAGS += $(WARN_FLAGS) -Wmissing-declarations -Wmissing-noreturn -ifeq ($(CC_IS_CLANG), 1) - # clang options - MK_CFLAGS += -Wunreachable-code-break -Wunreachable-code-return - MK_HOST_CXXFLAGS += -Wunreachable-code-break -Wunreachable-code-return -Wmissing-prototypes -Wextra-semi - - ifneq '' '$(and $(CC_IS_LLVM_CLANG),$(filter 1,$(shell expr $(CC_VER) \>= 030800)))' - MK_CFLAGS += -Wdouble-promotion - endif - ifneq '' '$(and $(CC_IS_APPLE_CLANG),$(filter 1,$(shell expr $(CC_VER) \>= 070300)))' - MK_CFLAGS += -Wdouble-promotion - endif -else - # gcc options - MK_CFLAGS += -Wdouble-promotion - MK_HOST_CXXFLAGS += -Wno-array-bounds - - ifeq ($(shell expr $(CC_VER) \>= 070100), 1) - MK_HOST_CXXFLAGS += -Wno-format-truncation - endif - ifeq ($(shell expr $(CC_VER) \>= 080100), 1) - MK_HOST_CXXFLAGS += -Wextra-semi - endif -endif - # this version of Apple ld64 is buggy ifneq '' '$(findstring dyld-1015.7,$(shell $(CC) $(LDFLAGS) -Wl,-v 2>&1))' MK_CPPFLAGS += -DHAVE_BUGGY_APPLE_LINKER @@ -294,8 +256,8 @@ ifndef RISCV ifeq ($(UNAME_M),$(filter $(UNAME_M),x86_64 i686 amd64)) # Use all CPU extensions that are available: - MK_CFLAGS += -march=native -mtune=native - MK_HOST_CXXFLAGS += -march=native -mtune=native + MK_CFLAGS += -march=native -mtune=native + HOST_CXXFLAGS += -march=native -mtune=native # Usage AVX-only #MK_CFLAGS += -mfma -mf16c -mavx @@ -398,10 +360,10 @@ ifdef LLAMA_CUBLAS MK_CPPFLAGS += -DGGML_USE_CUBLAS -I/usr/local/cuda/include -I/opt/cuda/include -I$(CUDA_PATH)/targets/x86_64-linux/include MK_LDFLAGS += -lcublas -lculibos -lcudart -lcublasLt -lpthread -ldl -lrt -L/usr/local/cuda/lib64 -L/opt/cuda/lib64 -L$(CUDA_PATH)/targets/x86_64-linux/lib OBJS += ggml-cuda.o - NVCCFLAGS = --forward-unknown-to-host-compiler -use_fast_math + MK_NVCCFLAGS = --forward-unknown-to-host-compiler -use_fast_math ifdef LLAMA_DEBUG - NVCCFLAGS += -lineinfo + MK_NVCCFLAGS += -lineinfo endif ifdef LLAMA_CUDA_NVCC @@ -410,54 +372,52 @@ else NVCC = nvcc endif #LLAMA_CUDA_NVCC ifdef CUDA_DOCKER_ARCH - NVCCFLAGS += -Wno-deprecated-gpu-targets -arch=$(CUDA_DOCKER_ARCH) -else ifdef CUDA_POWER_ARCH - NVCCFLAGS += -else - NVCCFLAGS += -arch=native + MK_NVCCFLAGS += -Wno-deprecated-gpu-targets -arch=$(CUDA_DOCKER_ARCH) +else ifndef CUDA_POWER_ARCH + MK_NVCCFLAGS += -arch=native endif # CUDA_DOCKER_ARCH ifdef LLAMA_CUDA_FORCE_DMMV - NVCCFLAGS += -DGGML_CUDA_FORCE_DMMV + MK_NVCCFLAGS += -DGGML_CUDA_FORCE_DMMV endif # LLAMA_CUDA_FORCE_DMMV ifdef LLAMA_CUDA_FORCE_MMQ - NVCCFLAGS += -DGGML_CUDA_FORCE_MMQ + MK_NVCCFLAGS += -DGGML_CUDA_FORCE_MMQ endif # LLAMA_CUDA_FORCE_MMQ ifdef LLAMA_CUDA_DMMV_X - NVCCFLAGS += -DGGML_CUDA_DMMV_X=$(LLAMA_CUDA_DMMV_X) + MK_NVCCFLAGS += -DGGML_CUDA_DMMV_X=$(LLAMA_CUDA_DMMV_X) else - NVCCFLAGS += -DGGML_CUDA_DMMV_X=32 + MK_NVCCFLAGS += -DGGML_CUDA_DMMV_X=32 endif # LLAMA_CUDA_DMMV_X ifdef LLAMA_CUDA_MMV_Y - NVCCFLAGS += -DGGML_CUDA_MMV_Y=$(LLAMA_CUDA_MMV_Y) + MK_NVCCFLAGS += -DGGML_CUDA_MMV_Y=$(LLAMA_CUDA_MMV_Y) else ifdef LLAMA_CUDA_DMMV_Y - NVCCFLAGS += -DGGML_CUDA_MMV_Y=$(LLAMA_CUDA_DMMV_Y) # for backwards compatibility + MK_NVCCFLAGS += -DGGML_CUDA_MMV_Y=$(LLAMA_CUDA_DMMV_Y) # for backwards compatibility else - NVCCFLAGS += -DGGML_CUDA_MMV_Y=1 + MK_NVCCFLAGS += -DGGML_CUDA_MMV_Y=1 endif # LLAMA_CUDA_MMV_Y ifdef LLAMA_CUDA_F16 - NVCCFLAGS += -DGGML_CUDA_F16 + MK_NVCCFLAGS += -DGGML_CUDA_F16 endif # LLAMA_CUDA_F16 ifdef LLAMA_CUDA_DMMV_F16 - NVCCFLAGS += -DGGML_CUDA_F16 + MK_NVCCFLAGS += -DGGML_CUDA_F16 endif # LLAMA_CUDA_DMMV_F16 ifdef LLAMA_CUDA_KQUANTS_ITER - NVCCFLAGS += -DK_QUANTS_PER_ITERATION=$(LLAMA_CUDA_KQUANTS_ITER) + MK_NVCCFLAGS += -DK_QUANTS_PER_ITERATION=$(LLAMA_CUDA_KQUANTS_ITER) else - NVCCFLAGS += -DK_QUANTS_PER_ITERATION=2 + MK_NVCCFLAGS += -DK_QUANTS_PER_ITERATION=2 endif ifdef LLAMA_CUDA_PEER_MAX_BATCH_SIZE - NVCCFLAGS += -DGGML_CUDA_PEER_MAX_BATCH_SIZE=$(LLAMA_CUDA_PEER_MAX_BATCH_SIZE) + MK_NVCCFLAGS += -DGGML_CUDA_PEER_MAX_BATCH_SIZE=$(LLAMA_CUDA_PEER_MAX_BATCH_SIZE) else - NVCCFLAGS += -DGGML_CUDA_PEER_MAX_BATCH_SIZE=128 + MK_NVCCFLAGS += -DGGML_CUDA_PEER_MAX_BATCH_SIZE=128 endif # LLAMA_CUDA_PEER_MAX_BATCH_SIZE #ifdef LLAMA_CUDA_CUBLAS -# NVCCFLAGS += -DGGML_CUDA_CUBLAS +# MK_NVCCFLAGS += -DGGML_CUDA_CUBLAS #endif # LLAMA_CUDA_CUBLAS ifdef LLAMA_CUDA_CCBIN - NVCCFLAGS += -ccbin $(LLAMA_CUDA_CCBIN) + MK_NVCCFLAGS += -ccbin $(LLAMA_CUDA_CCBIN) endif ggml-cuda.o: ggml-cuda.cu ggml-cuda.h - $(NVCC) $(NVCCFLAGS) -c $< -o $@ + $(NVCC) $(BASE_CXXFLAGS) $(NVCCFLAGS) -Wno-pedantic -Xcompiler "$(CUDA_CXXFLAGS)" -c $< -o $@ endif # LLAMA_CUBLAS ifdef LLAMA_CLBLAST @@ -519,16 +479,22 @@ ggml-mpi.o: ggml-mpi.c ggml-mpi.h $(CC) $(CFLAGS) -c $< -o $@ endif # LLAMA_MPI -# combine build flags with cmdline overrides -override CFLAGS := $(MK_CPPFLAGS) $(CPPFLAGS) $(MK_CFLAGS) $(CFLAGS) -override CXXFLAGS := $(MK_CPPFLAGS) $(CPPFLAGS) $(MK_CXXFLAGS) $(CXXFLAGS) -override CUDA_CXXFLAGS := $(MK_CUDA_CXXFLAGS) $(CUDA_CXXFLAGS) -override HOST_CXXFLAGS := $(MK_HOST_CXXFLAGS) $(HOST_CXXFLAGS) -override LDFLAGS := $(MK_LDFLAGS) $(LDFLAGS) +GF_CC := $(CC) +include scripts/get-flags.mk -# save CXXFLAGS before we add host-only options -NVCCFLAGS := $(NVCCFLAGS) $(CXXFLAGS) $(CUDA_CXXFLAGS) -Wno-pedantic -Xcompiler "$(HOST_CXXFLAGS)" -override CXXFLAGS += $(HOST_CXXFLAGS) +# combine build flags with cmdline overrides +override CFLAGS := $(MK_CPPFLAGS) $(CPPFLAGS) $(MK_CFLAGS) $(GF_CFLAGS) $(CFLAGS) +BASE_CXXFLAGS := $(MK_CPPFLAGS) $(CPPFLAGS) $(MK_CXXFLAGS) $(CXXFLAGS) +override CXXFLAGS := $(BASE_CXXFLAGS) $(HOST_CXXFLAGS) $(GF_CXXFLAGS) +override NVCCFLAGS := $(MK_NVCCFLAGS) $(NVCCFLAGS) +override LDFLAGS := $(MK_LDFLAGS) $(LDFLAGS) + +# identify CUDA host compiler +ifdef LLAMA_CUBLAS +GF_CC := $(NVCC) $(NVCCFLAGS) 2>/dev/null .c -Xcompiler +include scripts/get-flags.mk +CUDA_CXXFLAGS := $(GF_CXXFLAGS) +endif # # Print build information diff --git a/scripts/get-flags.mk b/scripts/get-flags.mk new file mode 100644 index 000000000..596d7ead1 --- /dev/null +++ b/scripts/get-flags.mk @@ -0,0 +1,38 @@ +ifeq '' '$(findstring clang,$(shell $(GF_CC) --version))' + GF_CC_IS_GCC = 1 + GF_CC_VER := $(shell { $(GF_CC) -dumpfullversion 2>/dev/null || $(GF_CC) -dumpversion; } | awk -F. '{ printf("%02d%02d%02d", $$1, $$2, $$3) }') +else + GF_CC_IS_CLANG = 1 + ifeq '' '$(findstring Apple,$(shell $(GF_CC) --version))' + GF_CC_IS_LLVM_CLANG = 1 + else + GF_CC_IS_APPLE_CLANG = 1 + endif + GF_CC_VER := \ + $(shell $(GF_CC) --version | sed -n 's/^.* version \([0-9.]*\).*$$/\1/p' \ + | awk -F. '{ printf("%02d%02d%02d", $$1, $$2, $$3) }') +endif + +ifeq ($(GF_CC_IS_CLANG), 1) + # clang options + GF_CFLAGS = -Wunreachable-code-break -Wunreachable-code-return + GF_CXXFLAGS = -Wunreachable-code-break -Wunreachable-code-return -Wmissing-prototypes -Wextra-semi + + ifneq '' '$(and $(GF_CC_IS_LLVM_CLANG),$(filter 1,$(shell expr $(GF_CC_VER) \>= 030800)))' + GF_CFLAGS += -Wdouble-promotion + endif + ifneq '' '$(and $(GF_CC_IS_APPLE_CLANG),$(filter 1,$(shell expr $(GF_CC_VER) \>= 070300)))' + GF_CFLAGS += -Wdouble-promotion + endif +else + # gcc options + GF_CFLAGS = -Wdouble-promotion + GF_CXXFLAGS = -Wno-array-bounds + + ifeq ($(shell expr $(GF_CC_VER) \>= 070100), 1) + GF_CXXFLAGS += -Wno-format-truncation + endif + ifeq ($(shell expr $(GF_CC_VER) \>= 080100), 1) + GF_CXXFLAGS += -Wextra-semi + endif +endif From 4d98d9a65665eee3838cef936641f640e3f5b649 Mon Sep 17 00:00:00 2001 From: Georgi Gerganov Date: Wed, 13 Dec 2023 21:54:54 +0200 Subject: [PATCH 078/811] sync : ggml (SD ops, tests, kernels) (#4444) * sync : ggml (SD ops, tests, kernels) ggml-ci * cuda : restore im2col ggml-ci * metal : fix accuracy of dequantization kernels ggml-ci * cuda : restore correct im2col ggml-ci * metal : try to fix moe test by reducing expert size ggml-ci * cuda : fix bin bcast when src1 and dst have different types ggml-ci --------- Co-authored-by: slaren --- ggml-cuda.cu | 481 +++++++++++++++++++++++++++++++++++-- ggml-metal.m | 265 +++++++++++++++++++- ggml-metal.metal | 296 +++++++++++++++++++---- ggml.c | 183 +++++++++++--- ggml.h | 20 +- tests/test-backend-ops.cpp | 219 ++++++++++++++++- 6 files changed, 1334 insertions(+), 130 deletions(-) diff --git a/ggml-cuda.cu b/ggml-cuda.cu index 9e1acd3f1..019648bdd 100644 --- a/ggml-cuda.cu +++ b/ggml-cuda.cu @@ -439,6 +439,7 @@ static_assert(sizeof(block_q6_K) == sizeof(ggml_fp16_t) + 13*QK_K/16, "wrong q6_ #define CUDA_GELU_BLOCK_SIZE 256 #define CUDA_SILU_BLOCK_SIZE 256 +#define CUDA_TANH_BLOCK_SIZE 256 #define CUDA_RELU_BLOCK_SIZE 256 #define CUDA_SQR_BLOCK_SIZE 256 #define CUDA_CPY_BLOCK_SIZE 32 @@ -451,6 +452,11 @@ static_assert(sizeof(block_q6_K) == sizeof(ggml_fp16_t) + 13*QK_K/16, "wrong q6_ #define CUDA_QUANTIZE_BLOCK_SIZE 256 #define CUDA_DEQUANTIZE_BLOCK_SIZE 256 #define CUDA_GET_ROWS_BLOCK_SIZE 256 +#define CUDA_UPSCALE_BLOCK_SIZE 256 +#define CUDA_CONCAT_BLOCK_SIZE 256 +#define CUDA_PAD_BLOCK_SIZE 256 +#define CUDA_ACC_BLOCK_SIZE 256 +#define CUDA_IM2COL_BLOCK_SIZE 256 // dmmv = dequantize_mul_mat_vec #ifndef GGML_CUDA_DMMV_X @@ -612,6 +618,24 @@ static __global__ void k_bin_bcast_unravel(const src0_t * src0, const src1_t * s dst_row[i0] = (dst_t)bin_op(src0 ? (float)src0_row[i0] : 0.0f, (float)src1_row[i10]); } +static __global__ void acc_f32(const float * x, const float * y, float * dst, const int ne, + const int ne10, const int ne11, const int ne12, + const int nb1, const int nb2, int offset) { + const int i = blockDim.x * blockIdx.x + threadIdx.x; + if (i >= ne) { + return; + } + int src1_idx = i - offset; + int oz = src1_idx / nb2; + int oy = (src1_idx - (oz * nb2)) / nb1; + int ox = src1_idx % nb1; + if (src1_idx >= 0 && ox < ne10 && oy < ne11 && oz < ne12) { + dst[i] = x[i] + y[ox + oy * ne10 + oz * ne10 * ne11]; + } else { + dst[i] = x[i]; + } +} + static __global__ void gelu_f32(const float * x, float * dst, const int k) { const float GELU_COEF_A = 0.044715f; const float SQRT_2_OVER_PI = 0.79788456080286535587989211986876f; @@ -634,6 +658,23 @@ static __global__ void silu_f32(const float * x, float * dst, const int k) { dst[i] = x[i] / (1.0f + expf(-x[i])); } +static __global__ void gelu_quick_f32(const float *x, float *dst, int k) { + const float GELU_QUICK_COEF = -1.702f; + const int i = blockDim.x*blockIdx.x + threadIdx.x; + if (i >= k) { + return; + } + dst[i] = x[i] * (1.0f / (1.0f + expf(GELU_QUICK_COEF * x[i]))); +} + +static __global__ void tanh_f32(const float *x, float *dst, int k) { + const int i = blockDim.x*blockIdx.x + threadIdx.x; + if (i >= k) { + return; + } + dst[i] = tanhf(x[i]); +} + static __global__ void relu_f32(const float * x, float * dst, const int k) { const int i = blockDim.x*blockIdx.x + threadIdx.x; @@ -643,6 +684,14 @@ static __global__ void relu_f32(const float * x, float * dst, const int k) { dst[i] = fmaxf(x[i], 0); } +static __global__ void leaky_relu_f32(const float *x, float *dst, const int k, const float negative_slope) { + const int i = blockDim.x*blockIdx.x + threadIdx.x; + if (i >= k) { + return; + } + dst[i] = fmaxf(x[i], 0) + fminf(x[i], 0.0f) * negative_slope; +} + static __global__ void sqr_f32(const float * x, float * dst, const int k) { const int i = blockDim.x*blockIdx.x + threadIdx.x; @@ -688,6 +737,132 @@ static __global__ void norm_f32(const float * x, float * dst, const int ncols, c } } +static __global__ void concat_f32(const float *x,const float *y, float *dst, const int ne0, const int ne02) { + int nidx = threadIdx.x + blockIdx.x * blockDim.x; + if (nidx >= ne0) { + return; + } + // operation + int offset_dst = + nidx + + blockIdx.y * ne0 + + blockIdx.z * ne0 * gridDim.y; + if (blockIdx.z < ne02) { // src0 + int offset_src = + nidx + + blockIdx.y * ne0 + + blockIdx.z * ne0 * gridDim.y; + dst[offset_dst] = x[offset_src]; + } else { + int offset_src = + nidx + + blockIdx.y * ne0 + + (blockIdx.z - ne02) * ne0 * gridDim.y; + dst[offset_dst] = y[offset_src]; + } +} + +static __global__ void upscale_f32(const float *x, float *dst, const int ne00, const int nb02, const int scale_factor) { + int ne0 = ne00 * scale_factor; + int nidx = threadIdx.x + blockIdx.x * blockDim.x; + if (nidx >= ne0) { + return; + } + // operation + int i00 = nidx / scale_factor; + int i01 = blockIdx.y / scale_factor; + int offset_src = + i00 + + i01 * ne00 + + blockIdx.z * nb02; + int offset_dst = + nidx + + blockIdx.y * ne0 + + blockIdx.z * ne0 * gridDim.y; + dst[offset_dst] = x[offset_src]; +} + +static __global__ void pad_f32(const float *x, float *dst, const int ne0, const int ne00, const int ne01, const int ne02) { + int nidx = threadIdx.x + blockIdx.x * blockDim.x; + if (nidx >= ne0) { + return; + } + + // operation + int offset_dst = + nidx + + blockIdx.y * ne0 + + blockIdx.z * ne0 * gridDim.y; + if (nidx < ne00 && blockIdx.y < ne01 && blockIdx.z < ne02) { + int offset_src = + nidx + + blockIdx.y * ne00 + + blockIdx.z * ne00 * ne01; + dst[offset_dst] = x[offset_src]; + } else { + dst[offset_dst] = 0.0f; + } +} + +template +static __global__ void group_norm_f32(const float * x, float * dst, const int group_size, const int ne_elements, const float eps) { + int start = blockIdx.x * group_size; + int end = start + group_size; + + start += threadIdx.x; + + if (end >= ne_elements) { + end = ne_elements; + } + + float tmp = 0.0f; // partial sum for thread in warp + + for (int j = start; j < end; j += block_size) { + tmp += x[j]; + } + + tmp = warp_reduce_sum(tmp); + if (block_size > WARP_SIZE) { + __shared__ float s_sum[32]; + int warp_id = threadIdx.x / WARP_SIZE; + int lane_id = threadIdx.x % WARP_SIZE; + if (lane_id == 0) { + s_sum[warp_id] = tmp; + } + __syncthreads(); + tmp = s_sum[lane_id]; + tmp = warp_reduce_sum(tmp); + } + + float mean = tmp / group_size; + tmp = 0.0f; + + for (int j = start; j < end; j += block_size) { + float xi = x[j] - mean; + dst[j] = xi; + tmp += xi * xi; + } + + tmp = warp_reduce_sum(tmp); + if (block_size > WARP_SIZE) { + __shared__ float s_sum[32]; + int warp_id = threadIdx.x / WARP_SIZE; + int lane_id = threadIdx.x % WARP_SIZE; + if (lane_id == 0) { + s_sum[warp_id] = tmp; + } + __syncthreads(); + tmp = s_sum[lane_id]; + tmp = warp_reduce_sum(tmp); + } + + float variance = tmp / group_size; + float scale = rsqrtf(variance + eps); + for (int j = start; j < end; j += block_size) { + dst[j] *= scale; + } +} + template static __global__ void rms_norm_f32(const float * x, float * dst, const int ncols, const float eps) { const int row = blockIdx.x*blockDim.y + threadIdx.y; @@ -5071,19 +5246,30 @@ static __global__ void clamp_f32(const float * x, float * dst, const float min, static __global__ void im2col_f32_f16( const float * x, half * dst, - int ofs0, int ofs1, int IW, int IH, int CHW, + int offset_delta, int IW, int IH, int OW, int KW, int KH, int pelements, int CHW, int s0, int s1, int p0, int p1, int d0, int d1) { - const int iiw = blockIdx.z * s0 + threadIdx.z * d0 - p0; - const int iih = blockIdx.y * s1 + threadIdx.y * d1 - p1; + const int i = threadIdx.x + blockIdx.x * blockDim.x; + if (i >= pelements) { + return; + } + + const int ksize = OW * (KH > 1 ? KW : 1); + const int kx = i / ksize; + const int kd = kx * ksize; + const int ky = (i - kd) / OW; + const int ix = i % OW; + + const int iiw = ix * s0 + kx * d0 - p0; + const int iih = blockIdx.y * s1 + ky * d1 - p1; const int offset_dst = - (threadIdx.x * gridDim.y * gridDim.z + blockIdx.y * gridDim.z + blockIdx.z) * CHW + - (blockIdx.x * (blockDim.y * blockDim.z) + threadIdx.y * blockDim.z + threadIdx.z); + (blockIdx.y * OW + ix) * CHW + + (blockIdx.z * (KW * KH) + ky * KW + kx); if (iih < 0 || iih >= IH || iiw < 0 || iiw >= IW) { dst[offset_dst] = __float2half(0.0f); } else { - const int offset_src = threadIdx.x * ofs0 + blockIdx.x * ofs1; + const int offset_src = blockIdx.z * offset_delta; dst[offset_dst] = __float2half(x[offset_src + iih * IW + iiw]); } } @@ -5220,10 +5406,10 @@ struct bin_bcast_cuda { size_t nb12 = cnb1[2]; size_t nb13 = cnb1[3]; - size_t s0 = nb0 / sizeof(src1_t); - size_t s1 = nb1 / sizeof(src1_t); - size_t s2 = nb2 / sizeof(src1_t); - size_t s3 = nb3 / sizeof(src1_t); + size_t s0 = nb0 / sizeof(dst_t); + size_t s1 = nb1 / sizeof(dst_t); + size_t s2 = nb2 / sizeof(dst_t); + size_t s3 = nb3 / sizeof(dst_t); size_t s10 = nb10 / sizeof(src1_t); size_t s11 = nb11 / sizeof(src1_t); @@ -5269,6 +5455,13 @@ struct bin_bcast_cuda { } }; +static void acc_f32_cuda(const float * x, const float * y, float * dst, const int n_elements, + const int ne10, const int ne11, const int ne12, + const int nb1, const int nb2, const int offset, cudaStream_t stream) { + int num_blocks = (n_elements + CUDA_ACC_BLOCK_SIZE - 1) / CUDA_ACC_BLOCK_SIZE; + acc_f32<<>>(x, y, dst, n_elements, ne10, ne11, ne12, nb1, nb2, offset); +} + static void gelu_f32_cuda(const float * x, float * dst, const int k, cudaStream_t stream) { const int num_blocks = (k + CUDA_GELU_BLOCK_SIZE - 1) / CUDA_GELU_BLOCK_SIZE; gelu_f32<<>>(x, dst, k); @@ -5279,11 +5472,26 @@ static void silu_f32_cuda(const float * x, float * dst, const int k, cudaStream_ silu_f32<<>>(x, dst, k); } +static void gelu_quick_f32_cuda(const float * x, float * dst, const int k, cudaStream_t stream) { + const int num_blocks = (k + CUDA_GELU_BLOCK_SIZE - 1) / CUDA_GELU_BLOCK_SIZE; + gelu_quick_f32<<>>(x, dst, k); +} + +static void tanh_f32_cuda(const float * x, float * dst, const int k, cudaStream_t stream) { + const int num_blocks = (k + CUDA_TANH_BLOCK_SIZE - 1) / CUDA_TANH_BLOCK_SIZE; + tanh_f32<<>>(x, dst, k); +} + static void relu_f32_cuda(const float * x, float * dst, const int k, cudaStream_t stream) { const int num_blocks = (k + CUDA_RELU_BLOCK_SIZE - 1) / CUDA_RELU_BLOCK_SIZE; relu_f32<<>>(x, dst, k); } +static void leaky_relu_f32_cuda(const float * x, float * dst, const int k, const float negative_slope, cudaStream_t stream) { + const int num_blocks = (k + CUDA_RELU_BLOCK_SIZE - 1) / CUDA_RELU_BLOCK_SIZE; + leaky_relu_f32<<>>(x, dst, k, negative_slope); +} + static void sqr_f32_cuda(const float * x, float * dst, const int k, cudaStream_t stream) { const int num_blocks = (k + CUDA_SQR_BLOCK_SIZE - 1) / CUDA_SQR_BLOCK_SIZE; sqr_f32<<>>(x, dst, k); @@ -5300,6 +5508,38 @@ static void norm_f32_cuda(const float * x, float * dst, const int ncols, const i } } +static void group_norm_f32_cuda(const float * x, float * dst, const int num_groups, const int group_size, const int ne_elements, cudaStream_t stream) { + static const float eps = 1e-6f; + if (group_size < 1024) { + const dim3 block_dims(WARP_SIZE, 1, 1); + group_norm_f32<<>>(x, dst, group_size, ne_elements, eps); + } else { + const dim3 block_dims(1024, 1, 1); + group_norm_f32<1024><<>>(x, dst, group_size, ne_elements, eps); + } +} + +static void concat_f32_cuda(const float * x, const float * y, float * dst, const int ne0, int ne1, int ne2, int ne02, cudaStream_t stream) { + int num_blocks = (ne0 + CUDA_CONCAT_BLOCK_SIZE - 1) / CUDA_CONCAT_BLOCK_SIZE; + dim3 gridDim(num_blocks, ne1, ne2); + concat_f32<<>>(x, y, dst, ne0, ne02); +} + +static void upscale_f32_cuda(const float * x, float * dst, const int ne00, const int ne01, const int ne02, const int scale_factor, cudaStream_t stream) { + int ne0 = (ne00 * scale_factor); + int num_blocks = (ne0 + CUDA_UPSCALE_BLOCK_SIZE - 1) / CUDA_UPSCALE_BLOCK_SIZE; + dim3 gridDim(num_blocks, (ne01 * scale_factor), ne02); + upscale_f32<<>>(x, dst, ne00, ne00 * ne01, scale_factor); +} + +static void pad_f32_cuda(const float * x, float * dst, + const int ne00, const int ne01, const int ne02, + const int ne0, const int ne1, const int ne2, cudaStream_t stream) { + int num_blocks = (ne0 + CUDA_PAD_BLOCK_SIZE - 1) / CUDA_PAD_BLOCK_SIZE; + dim3 gridDim(num_blocks, ne1, ne2); + pad_f32<<>>(x, dst, ne0, ne00, ne01, ne02); +} + static void rms_norm_f32_cuda(const float * x, float * dst, const int ncols, const int nrows, const float eps, cudaStream_t stream) { GGML_ASSERT(ncols % WARP_SIZE == 0); if (ncols < 1024) { @@ -6262,13 +6502,14 @@ static void soft_max_f32_cuda(const float * x, const float * y, float * dst, con soft_max_f32<<>>(x, y, dst, ncols_x, nrows_y, scale); } -static void im2col_f32_f16_cuda(const float * x, half * dst, - int OH, int IW, int IH, int OW, int IC, - int KH, int KW, int N, int ofs0, int ofs1, - int s0, int s1, int p0, int p1, int d0, int d1, cudaStream_t stream) { - dim3 block_nums(IC, OH, OW); - dim3 block_dims(N, KH, KW); - im2col_f32_f16<<>>(x, dst, ofs0, ofs1, IW, IH, (IC * KH * KW), s0, s1, p0, p1, d0, d1); +static void im2col_f32_f16_cuda(const float* x, half* dst, + int IW, int IH, int OW, int OH, int KW, int KH, int IC, + int offset_delta, + int s0,int s1,int p0,int p1,int d0,int d1, cudaStream_t stream) { + const int parallel_elements = OW * KW * KH; + const int num_blocks = (parallel_elements + CUDA_IM2COL_BLOCK_SIZE - 1) / CUDA_IM2COL_BLOCK_SIZE; + dim3 block_nums(num_blocks, OH, IC); + im2col_f32_f16<<>>(x, dst, offset_delta, IW, IH, OW, KW, KH, parallel_elements, (IC * KH * KW), s0, s1, p0, p1, d0, d1); } // buffer pool for cuda @@ -6615,6 +6856,25 @@ inline void ggml_cuda_op_add( ggml_cuda_op_bin_bcast>(src0, src1, dst, src0_dd, src1_dd, dst_dd, main_stream); } +inline void ggml_cuda_op_acc( + const ggml_tensor * src0, const ggml_tensor * src1, ggml_tensor * dst, + const float * src0_dd, const float * src1_dd, float * dst_dd, const cudaStream_t & main_stream) { + + GGML_ASSERT(src0->type == GGML_TYPE_F32); + GGML_ASSERT(src1->type == GGML_TYPE_F32); + GGML_ASSERT( dst->type == GGML_TYPE_F32); + GGML_ASSERT(dst->ne[3] == 1); // just 3D tensors supported + + int nb1 = dst->op_params[0] / 4; // 4 bytes of float32 + int nb2 = dst->op_params[1] / 4; // 4 bytes of float32 + // int nb3 = dst->op_params[2] / 4; // 4 bytes of float32 - unused + int offset = dst->op_params[3] / 4; // offset in bytes + + acc_f32_cuda(src0_dd, src1_dd, dst_dd, ggml_nelements(dst), src1->ne[0], src1->ne[1], src1->ne[2], nb1, nb2, offset, main_stream); + + (void) dst; +} + inline void ggml_cuda_op_mul( const ggml_tensor * src0, const ggml_tensor * src1, ggml_tensor * dst, const float * src0_dd, const float * src1_dd, float * dst_dd, const cudaStream_t & main_stream) { @@ -6657,6 +6917,34 @@ inline void ggml_cuda_op_silu( (void) src1_dd; } +inline void ggml_cuda_op_gelu_quick( + const ggml_tensor * src0, const ggml_tensor * src1, ggml_tensor * dst, + const float * src0_dd, const float * src1_dd, float * dst_dd, const cudaStream_t & main_stream) { + + GGML_ASSERT(src0->type == GGML_TYPE_F32); + GGML_ASSERT( dst->type == GGML_TYPE_F32); + + gelu_quick_f32_cuda(src0_dd, dst_dd, ggml_nelements(src0), main_stream); + + (void) src1; + (void) dst; + (void) src1_dd; +} + +inline void ggml_cuda_op_tanh( + const ggml_tensor * src0, const ggml_tensor * src1, ggml_tensor * dst, + const float * src0_dd, const float * src1_dd, float * dst_dd, const cudaStream_t & main_stream) { + + GGML_ASSERT(src0->type == GGML_TYPE_F32); + GGML_ASSERT( dst->type == GGML_TYPE_F32); + + tanh_f32_cuda(src0_dd, dst_dd, ggml_nelements(src0), main_stream); + + (void) src1; + (void) dst; + (void) src1_dd; +} + inline void ggml_cuda_op_relu( const ggml_tensor * src0, const ggml_tensor * src1, ggml_tensor * dst, const float * src0_dd, const float * src1_dd, float * dst_dd, const cudaStream_t & main_stream) { @@ -6671,6 +6959,23 @@ inline void ggml_cuda_op_relu( (void) src1_dd; } +inline void ggml_cuda_op_leaky_relu( + const ggml_tensor * src0, const ggml_tensor * src1, ggml_tensor * dst, + const float * src0_dd, const float * src1_dd, float * dst_dd, const cudaStream_t & main_stream) { + + GGML_ASSERT(src0->type == GGML_TYPE_F32); + GGML_ASSERT( dst->type == GGML_TYPE_F32); + + float negative_slope; + memcpy(&negative_slope, dst->op_params, sizeof(float)); + + leaky_relu_f32_cuda(src0_dd, dst_dd, ggml_nelements(src0), negative_slope, main_stream); + + (void) src1; + (void) dst; + (void) src1_dd; +} + inline void ggml_cuda_op_sqr( const ggml_tensor * src0, const ggml_tensor * src1, ggml_tensor * dst, const float * src0_dd, const float * src1_dd, float * dst_dd, const cudaStream_t & main_stream) { @@ -6705,6 +7010,71 @@ inline void ggml_cuda_op_norm( (void) src1_dd; } + +inline void ggml_cuda_op_group_norm( + const ggml_tensor * src0, const ggml_tensor * src1, ggml_tensor * dst, + const float * src0_dd, const float * src1_dd, float * dst_dd, const cudaStream_t & main_stream) { + + GGML_ASSERT(src0->type == GGML_TYPE_F32); + GGML_ASSERT( dst->type == GGML_TYPE_F32); + + int num_groups = dst->op_params[0]; + int group_size = src0->ne[0] * src0->ne[1] * ((src0->ne[2] + num_groups - 1) / num_groups); + group_norm_f32_cuda(src0_dd, dst_dd, num_groups, group_size, src0->ne[0] * src0->ne[1] * src0->ne[2], main_stream); + + (void) src1; + (void) dst; + (void) src1_dd; +} + +inline void ggml_cuda_op_concat( + const ggml_tensor * src0, const ggml_tensor * src1, ggml_tensor * dst, + const float * src0_dd, const float * src1_dd, float * dst_dd, const cudaStream_t & main_stream) { + + GGML_ASSERT(src0->type == GGML_TYPE_F32); + GGML_ASSERT(src1->type == GGML_TYPE_F32); + GGML_ASSERT(dst->type == GGML_TYPE_F32); + + for (int i3 = 0; i3 < dst->ne[3]; i3++) { + concat_f32_cuda(src0_dd + i3 * (src0->nb[3] / 4), src1_dd + i3 * (src1->nb[3] / 4), dst_dd + i3 * (dst->nb[3] / 4), dst->ne[0], dst->ne[1], dst->ne[2], src0->ne[2], main_stream); + } + + (void) src1; + (void) dst; +} + +inline void ggml_cuda_op_upscale( + const ggml_tensor * src0, const ggml_tensor * src1, ggml_tensor * dst, + const float * src0_dd, const float * src1_dd, float * dst_dd, const cudaStream_t & main_stream) { + + GGML_ASSERT(src0->type == GGML_TYPE_F32); + GGML_ASSERT(dst->type == GGML_TYPE_F32); + GGML_ASSERT(src0->ne[3] == 1 && dst->ne[3] == 1); // just 3D tensors + + const int scale_factor = dst->op_params[0]; + + upscale_f32_cuda(src0_dd, dst_dd, src0->ne[0], src0->ne[1], src0->ne[2], scale_factor, main_stream); + + (void) src1; + (void) dst; +} + +inline void ggml_cuda_op_pad( + const ggml_tensor * src0, const ggml_tensor * src1, ggml_tensor * dst, + const float * src0_dd, const float * src1_dd, float * dst_dd, const cudaStream_t & main_stream) { + + GGML_ASSERT(src0->type == GGML_TYPE_F32); + GGML_ASSERT(dst->type == GGML_TYPE_F32); + GGML_ASSERT(src0->ne[3] == 1 && dst->ne[3] == 1); // just 3D tensors + + pad_f32_cuda(src0_dd, dst_dd, + src0->ne[0], src0->ne[1], src0->ne[2], + dst->ne[0], dst->ne[1], dst->ne[2], main_stream); + + (void) src1; + (void) dst; +} + inline void ggml_cuda_op_rms_norm( const ggml_tensor * src0, const ggml_tensor * src1, ggml_tensor * dst, const float * src0_dd, const float * src1_dd, float * dst_dd, const cudaStream_t & main_stream) { @@ -7219,7 +7589,6 @@ inline void ggml_cuda_op_im2col( const bool is_2D = ((const int32_t*)(dst->op_params))[6] == 1; - const int64_t N = src1->ne[is_2D ? 3 : 2]; const int64_t IC = src1->ne[is_2D ? 2 : 1]; const int64_t IH = is_2D ? src1->ne[1] : 1; const int64_t IW = src1->ne[0]; @@ -7230,17 +7599,15 @@ inline void ggml_cuda_op_im2col( const int64_t OH = is_2D ? dst->ne[2] : 1; const int64_t OW = dst->ne[1]; - const size_t ofs0 = src1->nb[is_2D ? 3 : 2] / 4; // nb is byte offset, src is type float32 - const size_t ofs1 = src1->nb[is_2D ? 2 : 1] / 4; // nb is byte offset, src is type float32 + const size_t delta_offset = src1->nb[is_2D ? 2 : 1] / 4; // nb is byte offset, src is type float32 - im2col_f32_f16_cuda(src1_dd, (half*) dst_dd, - OH, IW, IH, OW, IC, KH, KW, N, - ofs0, ofs1, s0, s1, p0, p1, d0, d1, main_stream); + im2col_f32_f16_cuda(src1_dd, (half*) dst_dd, IW, IH, OW, OH, KW, KH, IC, delta_offset, s0, s1, p0, p1, d0, d1, main_stream); (void) src0; (void) src0_dd; } + inline void ggml_cuda_op_sum_rows( const ggml_tensor * src0, const ggml_tensor * src1, ggml_tensor * dst, const float * src0_dd, const float * src1_dd, float * dst_dd, const cudaStream_t & main_stream) { @@ -7789,6 +8156,10 @@ static void ggml_cuda_add(const ggml_tensor * src0, const ggml_tensor * src1, gg ggml_cuda_op_flatten(src0, src1, dst, ggml_cuda_op_add); } +static void ggml_cuda_acc(const ggml_tensor * src0, const ggml_tensor * src1, ggml_tensor * dst) { + ggml_cuda_op_flatten(src0, src1, dst, ggml_cuda_op_acc); +} + static void ggml_cuda_mul(const ggml_tensor * src0, const ggml_tensor * src1, ggml_tensor * dst) { ggml_cuda_op_flatten(src0, src1, dst, ggml_cuda_op_mul); } @@ -7805,10 +8176,22 @@ static void ggml_cuda_silu(const ggml_tensor * src0, const ggml_tensor * src1, g ggml_cuda_op_flatten(src0, src1, dst, ggml_cuda_op_silu); } +static void ggml_cuda_gelu_quick(const ggml_tensor * src0, const ggml_tensor * src1, ggml_tensor * dst) { + ggml_cuda_op_flatten(src0, src1, dst, ggml_cuda_op_gelu_quick); +} + +static void ggml_cuda_tanh(const ggml_tensor * src0, const ggml_tensor * src1, ggml_tensor * dst) { + ggml_cuda_op_flatten(src0, src1, dst, ggml_cuda_op_tanh); +} + static void ggml_cuda_relu(const ggml_tensor * src0, const ggml_tensor * src1, ggml_tensor * dst) { ggml_cuda_op_flatten(src0, src1, dst, ggml_cuda_op_relu); } +static void ggml_cuda_leaky_relu(const ggml_tensor * src0, const ggml_tensor * src1, ggml_tensor * dst) { + ggml_cuda_op_flatten(src0, src1, dst, ggml_cuda_op_leaky_relu); +} + static void ggml_cuda_sqr(const ggml_tensor * src0, const ggml_tensor * src1, ggml_tensor * dst) { ggml_cuda_op_flatten(src0, src1, dst, ggml_cuda_op_sqr); } @@ -7817,6 +8200,22 @@ static void ggml_cuda_norm(const ggml_tensor * src0, const ggml_tensor * src1, g ggml_cuda_op_flatten(src0, src1, dst, ggml_cuda_op_norm); } +static void ggml_cuda_group_norm(const ggml_tensor * src0, const ggml_tensor * src1, ggml_tensor * dst) { + ggml_cuda_op_flatten(src0, src1, dst, ggml_cuda_op_group_norm); +} + +static void ggml_cuda_concat(const ggml_tensor * src0, const ggml_tensor * src1, ggml_tensor * dst) { + ggml_cuda_op_flatten(src0, src1, dst, ggml_cuda_op_concat); +} + +static void ggml_cuda_upscale(const ggml_tensor * src0, const ggml_tensor * src1, ggml_tensor * dst) { + ggml_cuda_op_flatten(src0, src1, dst, ggml_cuda_op_upscale); +} + +static void ggml_cuda_pad(const ggml_tensor * src0, const ggml_tensor * src1, ggml_tensor * dst) { + ggml_cuda_op_flatten(src0, src1, dst, ggml_cuda_op_pad); +} + static void ggml_cuda_rms_norm(const ggml_tensor * src0, const ggml_tensor * src1, ggml_tensor * dst) { ggml_cuda_op_flatten(src0, src1, dst, ggml_cuda_op_rms_norm); } @@ -8809,6 +9208,9 @@ bool ggml_cuda_compute_forward(struct ggml_compute_params * params, struct ggml_ case GGML_OP_ADD: func = ggml_cuda_add; break; + case GGML_OP_ACC: + func = ggml_cuda_acc; + break; case GGML_OP_MUL: func = ggml_cuda_mul; break; @@ -8823,6 +9225,12 @@ bool ggml_cuda_compute_forward(struct ggml_compute_params * params, struct ggml_ case GGML_UNARY_OP_SILU: func = ggml_cuda_silu; break; + case GGML_UNARY_OP_GELU_QUICK: + func = ggml_cuda_gelu_quick; + break; + case GGML_UNARY_OP_TANH: + func = ggml_cuda_tanh; + break; case GGML_UNARY_OP_RELU: func = ggml_cuda_relu; break; @@ -8833,6 +9241,21 @@ bool ggml_cuda_compute_forward(struct ggml_compute_params * params, struct ggml_ case GGML_OP_NORM: func = ggml_cuda_norm; break; + case GGML_OP_GROUP_NORM: + func = ggml_cuda_group_norm; + break; + case GGML_OP_CONCAT: + func = ggml_cuda_concat; + break; + case GGML_OP_UPSCALE: + func = ggml_cuda_upscale; + break; + case GGML_OP_PAD: + func = ggml_cuda_pad; + break; + case GGML_OP_LEAKY_RELU: + func = ggml_cuda_leaky_relu; + break; case GGML_OP_RMS_NORM: func = ggml_cuda_rms_norm; break; @@ -8855,9 +9278,6 @@ bool ggml_cuda_compute_forward(struct ggml_compute_params * params, struct ggml_ func = ggml_cuda_sqr; break; case GGML_OP_CLAMP: - if (!any_on_device) { - return false; - } func = ggml_cuda_clamp; break; case GGML_OP_CPY: @@ -8866,6 +9286,7 @@ bool ggml_cuda_compute_forward(struct ggml_compute_params * params, struct ggml_ case GGML_OP_CONT: func = ggml_cuda_dup; break; + case GGML_OP_NONE: case GGML_OP_RESHAPE: case GGML_OP_VIEW: case GGML_OP_PERMUTE: @@ -9285,6 +9706,8 @@ static bool ggml_backend_cuda_supports_op(ggml_backend_t backend, const ggml_ten case GGML_UNARY_OP_GELU: case GGML_UNARY_OP_SILU: case GGML_UNARY_OP_RELU: + case GGML_UNARY_OP_GELU_QUICK: + case GGML_UNARY_OP_TANH: return true; default: return false; @@ -9369,6 +9792,12 @@ static bool ggml_backend_cuda_supports_op(ggml_backend_t backend, const ggml_ten case GGML_OP_IM2COL: case GGML_OP_SUM_ROWS: case GGML_OP_ARGSORT: + case GGML_OP_ACC: + case GGML_OP_CONCAT: + case GGML_OP_GROUP_NORM: + case GGML_OP_UPSCALE: + case GGML_OP_PAD: + case GGML_OP_LEAKY_RELU: return true; default: return false; diff --git a/ggml-metal.m b/ggml-metal.m index 1dcfa6edd..465679a6b 100644 --- a/ggml-metal.m +++ b/ggml-metal.m @@ -66,9 +66,11 @@ struct ggml_metal_context { GGML_METAL_DECL_KERNEL(div_row); GGML_METAL_DECL_KERNEL(scale); GGML_METAL_DECL_KERNEL(scale_4); - GGML_METAL_DECL_KERNEL(silu); + GGML_METAL_DECL_KERNEL(tanh); GGML_METAL_DECL_KERNEL(relu); GGML_METAL_DECL_KERNEL(gelu); + GGML_METAL_DECL_KERNEL(gelu_quick); + GGML_METAL_DECL_KERNEL(silu); GGML_METAL_DECL_KERNEL(soft_max); GGML_METAL_DECL_KERNEL(soft_max_4); GGML_METAL_DECL_KERNEL(diag_mask_inf); @@ -86,6 +88,7 @@ struct ggml_metal_context { GGML_METAL_DECL_KERNEL(get_rows_q5_K); GGML_METAL_DECL_KERNEL(get_rows_q6_K); GGML_METAL_DECL_KERNEL(rms_norm); + GGML_METAL_DECL_KERNEL(group_norm); GGML_METAL_DECL_KERNEL(norm); GGML_METAL_DECL_KERNEL(mul_mv_f32_f32); GGML_METAL_DECL_KERNEL(mul_mv_f16_f16); @@ -145,8 +148,11 @@ struct ggml_metal_context { GGML_METAL_DECL_KERNEL(rope_f16); GGML_METAL_DECL_KERNEL(alibi_f32); GGML_METAL_DECL_KERNEL(im2col_f16); + GGML_METAL_DECL_KERNEL(upscale_f32); + GGML_METAL_DECL_KERNEL(pad_f32); GGML_METAL_DECL_KERNEL(argsort_f32_i32_asc); GGML_METAL_DECL_KERNEL(argsort_f32_i32_desc); + GGML_METAL_DECL_KERNEL(leaky_relu_f32); GGML_METAL_DECL_KERNEL(cpy_f32_f16); GGML_METAL_DECL_KERNEL(cpy_f32_f32); GGML_METAL_DECL_KERNEL(cpy_f32_q8_0); @@ -334,9 +340,11 @@ struct ggml_metal_context * ggml_metal_init(int n_cb) { GGML_METAL_ADD_KERNEL(div_row); GGML_METAL_ADD_KERNEL(scale); GGML_METAL_ADD_KERNEL(scale_4); - GGML_METAL_ADD_KERNEL(silu); + GGML_METAL_ADD_KERNEL(tanh); GGML_METAL_ADD_KERNEL(relu); GGML_METAL_ADD_KERNEL(gelu); + GGML_METAL_ADD_KERNEL(gelu_quick); + GGML_METAL_ADD_KERNEL(silu); GGML_METAL_ADD_KERNEL(soft_max); GGML_METAL_ADD_KERNEL(soft_max_4); GGML_METAL_ADD_KERNEL(diag_mask_inf); @@ -354,6 +362,7 @@ struct ggml_metal_context * ggml_metal_init(int n_cb) { GGML_METAL_ADD_KERNEL(get_rows_q5_K); GGML_METAL_ADD_KERNEL(get_rows_q6_K); GGML_METAL_ADD_KERNEL(rms_norm); + GGML_METAL_ADD_KERNEL(group_norm); GGML_METAL_ADD_KERNEL(norm); GGML_METAL_ADD_KERNEL(mul_mv_f32_f32); GGML_METAL_ADD_KERNEL(mul_mv_f16_f16); @@ -415,8 +424,11 @@ struct ggml_metal_context * ggml_metal_init(int n_cb) { GGML_METAL_ADD_KERNEL(rope_f16); GGML_METAL_ADD_KERNEL(alibi_f32); GGML_METAL_ADD_KERNEL(im2col_f16); + GGML_METAL_ADD_KERNEL(upscale_f32); + GGML_METAL_ADD_KERNEL(pad_f32); GGML_METAL_ADD_KERNEL(argsort_f32_i32_asc); GGML_METAL_ADD_KERNEL(argsort_f32_i32_desc); + GGML_METAL_ADD_KERNEL(leaky_relu_f32); GGML_METAL_ADD_KERNEL(cpy_f32_f16); GGML_METAL_ADD_KERNEL(cpy_f32_f32); GGML_METAL_ADD_KERNEL(cpy_f32_q8_0); @@ -450,9 +462,11 @@ void ggml_metal_free(struct ggml_metal_context * ctx) { GGML_METAL_DEL_KERNEL(div_row); GGML_METAL_DEL_KERNEL(scale); GGML_METAL_DEL_KERNEL(scale_4); - GGML_METAL_DEL_KERNEL(silu); + GGML_METAL_DEL_KERNEL(tanh); GGML_METAL_DEL_KERNEL(relu); GGML_METAL_DEL_KERNEL(gelu); + GGML_METAL_DEL_KERNEL(gelu_quick); + GGML_METAL_DEL_KERNEL(silu); GGML_METAL_DEL_KERNEL(soft_max); GGML_METAL_DEL_KERNEL(soft_max_4); GGML_METAL_DEL_KERNEL(diag_mask_inf); @@ -470,6 +484,7 @@ void ggml_metal_free(struct ggml_metal_context * ctx) { GGML_METAL_DEL_KERNEL(get_rows_q5_K); GGML_METAL_DEL_KERNEL(get_rows_q6_K); GGML_METAL_DEL_KERNEL(rms_norm); + GGML_METAL_DEL_KERNEL(group_norm); GGML_METAL_DEL_KERNEL(norm); GGML_METAL_DEL_KERNEL(mul_mv_f32_f32); GGML_METAL_DEL_KERNEL(mul_mv_f16_f16); @@ -531,8 +546,11 @@ void ggml_metal_free(struct ggml_metal_context * ctx) { GGML_METAL_DEL_KERNEL(rope_f16); GGML_METAL_DEL_KERNEL(alibi_f32); GGML_METAL_DEL_KERNEL(im2col_f16); + GGML_METAL_DEL_KERNEL(upscale_f32); + GGML_METAL_DEL_KERNEL(pad_f32); GGML_METAL_DEL_KERNEL(argsort_f32_i32_asc); GGML_METAL_DEL_KERNEL(argsort_f32_i32_desc); + GGML_METAL_DEL_KERNEL(leaky_relu_f32); GGML_METAL_DEL_KERNEL(cpy_f32_f16); GGML_METAL_DEL_KERNEL(cpy_f32_f32); GGML_METAL_DEL_KERNEL(cpy_f32_q8_0); @@ -843,9 +861,11 @@ static bool ggml_metal_supports_op(const struct ggml_tensor * op) { switch (op->op) { case GGML_OP_UNARY: switch (ggml_get_unary_op(op)) { - case GGML_UNARY_OP_SILU: + case GGML_UNARY_OP_TANH: case GGML_UNARY_OP_RELU: case GGML_UNARY_OP_GELU: + case GGML_UNARY_OP_GELU_QUICK: + case GGML_UNARY_OP_SILU: return true; default: return false; @@ -853,11 +873,11 @@ static bool ggml_metal_supports_op(const struct ggml_tensor * op) { case GGML_OP_NONE: case GGML_OP_RESHAPE: case GGML_OP_VIEW: - case GGML_OP_PERMUTE: case GGML_OP_TRANSPOSE: - case GGML_OP_GET_ROWS: + case GGML_OP_PERMUTE: case GGML_OP_CONCAT: case GGML_OP_ADD: + case GGML_OP_ACC: case GGML_OP_MUL: case GGML_OP_DIV: case GGML_OP_SCALE: @@ -865,11 +885,15 @@ static bool ggml_metal_supports_op(const struct ggml_tensor * op) { case GGML_OP_SUM_ROWS: case GGML_OP_SOFT_MAX: case GGML_OP_RMS_NORM: + case GGML_OP_GROUP_NORM: case GGML_OP_NORM: case GGML_OP_ALIBI: case GGML_OP_ROPE: case GGML_OP_IM2COL: + case GGML_OP_UPSCALE: + case GGML_OP_PAD: case GGML_OP_ARGSORT: + case GGML_OP_LEAKY_RELU: case GGML_OP_MUL_MAT: case GGML_OP_MUL_MAT_ID: return true; @@ -902,8 +926,9 @@ static bool ggml_metal_supports_op(const struct ggml_tensor * op) { }; } case GGML_OP_DIAG_MASK_INF: + case GGML_OP_GET_ROWS: { - return op->ne[0] % 4 == 0; + return op->ne[3] == 1; } default: return false; @@ -979,7 +1004,10 @@ void ggml_metal_graph_compute( } break; } - GGML_ASSERT(ggml_metal_supports_op(dst)); + if (!ggml_metal_supports_op(dst)) { + GGML_METAL_LOG_ERROR("%s: error: unsupported op '%s'\n", __func__, ggml_op_desc(dst)); + GGML_ASSERT(!"unsupported op"); + } const int64_t ne00 = src0 ? src0->ne[0] : 0; const int64_t ne01 = src0 ? src0->ne[1] : 0; @@ -1076,6 +1104,8 @@ void ggml_metal_graph_compute( case GGML_OP_MUL: case GGML_OP_DIV: { + const size_t offs = 0; + bool bcast_row = false; int64_t nb = ne00; @@ -1134,7 +1164,8 @@ void ggml_metal_graph_compute( [encoder setBytes:&nb1 length:sizeof(nb1) atIndex:24]; [encoder setBytes:&nb2 length:sizeof(nb2) atIndex:25]; [encoder setBytes:&nb3 length:sizeof(nb3) atIndex:26]; - [encoder setBytes:&nb length:sizeof(nb) atIndex:27]; + [encoder setBytes:&offs length:sizeof(offs) atIndex:27]; + [encoder setBytes:&nb length:sizeof(nb) atIndex:28]; if (bcast_row) { const int64_t n = ggml_nelements(dst)/4; @@ -1146,6 +1177,86 @@ void ggml_metal_graph_compute( [encoder dispatchThreadgroups:MTLSizeMake(ne01, ne02, ne03) threadsPerThreadgroup:MTLSizeMake(nth, 1, 1)]; } } break; + case GGML_OP_ACC: + { + GGML_ASSERT(src0t == GGML_TYPE_F32); + GGML_ASSERT(src1t == GGML_TYPE_F32); + GGML_ASSERT(dstt == GGML_TYPE_F32); + + GGML_ASSERT(ggml_is_contiguous(src0)); + GGML_ASSERT(ggml_is_contiguous(src1)); + + const size_t pnb1 = ((int32_t *) dst->op_params)[0]; + const size_t pnb2 = ((int32_t *) dst->op_params)[1]; + const size_t pnb3 = ((int32_t *) dst->op_params)[2]; + const size_t offs = ((int32_t *) dst->op_params)[3]; + + const bool inplace = (bool) ((int32_t *) dst->op_params)[4]; + + if (!inplace) { + // run a separete kernel to cpy src->dst + // not sure how to avoid this + // TODO: make a simpler cpy_bytes kernel + + const int nth = MIN(1024, ne00); + + [encoder setComputePipelineState:ctx->pipeline_cpy_f32_f32]; + [encoder setBuffer:id_src0 offset:offs_src0 atIndex:0]; + [encoder setBuffer:id_dst offset:offs_dst atIndex:1]; + [encoder setBytes:&ne00 length:sizeof( int64_t) atIndex:2]; + [encoder setBytes:&ne01 length:sizeof( int64_t) atIndex:3]; + [encoder setBytes:&ne02 length:sizeof( int64_t) atIndex:4]; + [encoder setBytes:&ne03 length:sizeof( int64_t) atIndex:5]; + [encoder setBytes:&nb00 length:sizeof(uint64_t) atIndex:6]; + [encoder setBytes:&nb01 length:sizeof(uint64_t) atIndex:7]; + [encoder setBytes:&nb02 length:sizeof(uint64_t) atIndex:8]; + [encoder setBytes:&nb03 length:sizeof(uint64_t) atIndex:9]; + [encoder setBytes:&ne0 length:sizeof( int64_t) atIndex:10]; + [encoder setBytes:&ne1 length:sizeof( int64_t) atIndex:11]; + [encoder setBytes:&ne2 length:sizeof( int64_t) atIndex:12]; + [encoder setBytes:&ne3 length:sizeof( int64_t) atIndex:13]; + [encoder setBytes:&nb0 length:sizeof(uint64_t) atIndex:14]; + [encoder setBytes:&nb1 length:sizeof(uint64_t) atIndex:15]; + [encoder setBytes:&nb2 length:sizeof(uint64_t) atIndex:16]; + [encoder setBytes:&nb3 length:sizeof(uint64_t) atIndex:17]; + + [encoder dispatchThreadgroups:MTLSizeMake(ne01, ne02, ne03) threadsPerThreadgroup:MTLSizeMake(nth, 1, 1)]; + } + + [encoder setComputePipelineState:ctx->pipeline_add]; + [encoder setBuffer:id_src0 offset:offs_src0 atIndex:0]; + [encoder setBuffer:id_src1 offset:offs_src1 atIndex:1]; + [encoder setBuffer:id_dst offset:offs_dst atIndex:2]; + [encoder setBytes:&ne00 length:sizeof(ne00) atIndex:3]; + [encoder setBytes:&ne01 length:sizeof(ne01) atIndex:4]; + [encoder setBytes:&ne02 length:sizeof(ne02) atIndex:5]; + [encoder setBytes:&ne03 length:sizeof(ne03) atIndex:6]; + [encoder setBytes:&nb00 length:sizeof(nb00) atIndex:7]; + [encoder setBytes:&pnb1 length:sizeof(pnb1) atIndex:8]; + [encoder setBytes:&pnb2 length:sizeof(pnb2) atIndex:9]; + [encoder setBytes:&pnb3 length:sizeof(pnb3) atIndex:10]; + [encoder setBytes:&ne10 length:sizeof(ne10) atIndex:11]; + [encoder setBytes:&ne11 length:sizeof(ne11) atIndex:12]; + [encoder setBytes:&ne12 length:sizeof(ne12) atIndex:13]; + [encoder setBytes:&ne13 length:sizeof(ne13) atIndex:14]; + [encoder setBytes:&nb10 length:sizeof(nb10) atIndex:15]; + [encoder setBytes:&nb11 length:sizeof(nb11) atIndex:16]; + [encoder setBytes:&nb12 length:sizeof(nb12) atIndex:17]; + [encoder setBytes:&nb13 length:sizeof(nb13) atIndex:18]; + [encoder setBytes:&ne0 length:sizeof(ne0) atIndex:19]; + [encoder setBytes:&ne1 length:sizeof(ne1) atIndex:20]; + [encoder setBytes:&ne2 length:sizeof(ne2) atIndex:21]; + [encoder setBytes:&ne3 length:sizeof(ne3) atIndex:22]; + [encoder setBytes:&nb0 length:sizeof(nb0) atIndex:23]; + [encoder setBytes:&pnb1 length:sizeof(pnb1) atIndex:24]; + [encoder setBytes:&pnb2 length:sizeof(pnb2) atIndex:25]; + [encoder setBytes:&pnb3 length:sizeof(pnb3) atIndex:26]; + [encoder setBytes:&offs length:sizeof(offs) atIndex:27]; + + const int nth = MIN(1024, ne0); + + [encoder dispatchThreadgroups:MTLSizeMake(ne11, ne12, ne13) threadsPerThreadgroup:MTLSizeMake(nth, 1, 1)]; + } break; case GGML_OP_SCALE: { GGML_ASSERT(ggml_is_contiguous(src0)); @@ -1169,16 +1280,15 @@ void ggml_metal_graph_compute( } break; case GGML_OP_UNARY: switch (ggml_get_unary_op(gf->nodes[i])) { - case GGML_UNARY_OP_SILU: + case GGML_UNARY_OP_TANH: { - [encoder setComputePipelineState:ctx->pipeline_silu]; + [encoder setComputePipelineState:ctx->pipeline_tanh]; [encoder setBuffer:id_src0 offset:offs_src0 atIndex:0]; [encoder setBuffer:id_dst offset:offs_dst atIndex:1]; const int64_t n = ggml_nelements(dst); - GGML_ASSERT(n % 4 == 0); - [encoder dispatchThreadgroups:MTLSizeMake(n/4, 1, 1) threadsPerThreadgroup:MTLSizeMake(1, 1, 1)]; + [encoder dispatchThreadgroups:MTLSizeMake(n, 1, 1) threadsPerThreadgroup:MTLSizeMake(1, 1, 1)]; } break; case GGML_UNARY_OP_RELU: { @@ -1199,6 +1309,28 @@ void ggml_metal_graph_compute( const int64_t n = ggml_nelements(dst); GGML_ASSERT(n % 4 == 0); + [encoder dispatchThreadgroups:MTLSizeMake(n/4, 1, 1) threadsPerThreadgroup:MTLSizeMake(1, 1, 1)]; + } break; + case GGML_UNARY_OP_GELU_QUICK: + { + [encoder setComputePipelineState:ctx->pipeline_gelu_quick]; + [encoder setBuffer:id_src0 offset:offs_src0 atIndex:0]; + [encoder setBuffer:id_dst offset:offs_dst atIndex:1]; + + const int64_t n = ggml_nelements(dst); + GGML_ASSERT(n % 4 == 0); + + [encoder dispatchThreadgroups:MTLSizeMake(n/4, 1, 1) threadsPerThreadgroup:MTLSizeMake(1, 1, 1)]; + } break; + case GGML_UNARY_OP_SILU: + { + [encoder setComputePipelineState:ctx->pipeline_silu]; + [encoder setBuffer:id_src0 offset:offs_src0 atIndex:0]; + [encoder setBuffer:id_dst offset:offs_dst atIndex:1]; + + const int64_t n = ggml_nelements(dst); + GGML_ASSERT(n % 4 == 0); + [encoder dispatchThreadgroups:MTLSizeMake(n/4, 1, 1) threadsPerThreadgroup:MTLSizeMake(1, 1, 1)]; } break; default: @@ -1837,6 +1969,38 @@ void ggml_metal_graph_compute( [encoder dispatchThreadgroups:MTLSizeMake(nrows, 1, 1) threadsPerThreadgroup:MTLSizeMake(nth, 1, 1)]; } break; + case GGML_OP_GROUP_NORM: + { + GGML_ASSERT(ne00 % 4 == 0); + + //float eps; + //memcpy(&eps, dst->op_params, sizeof(float)); + + const float eps = 1e-6f; // TODO: temporarily hardcoded + + const int32_t n_groups = ((int32_t *) dst->op_params)[0]; + + int nth = 32; // SIMD width + + //while (nth < ne00/4 && nth < 1024) { + // nth *= 2; + //} + + [encoder setComputePipelineState:ctx->pipeline_group_norm]; + [encoder setBuffer:id_src0 offset:offs_src0 atIndex:0]; + [encoder setBuffer:id_dst offset:offs_dst atIndex:1]; + [encoder setBytes:&ne00 length:sizeof( int64_t) atIndex:2]; + [encoder setBytes:&ne01 length:sizeof( int64_t) atIndex:3]; + [encoder setBytes:&ne02 length:sizeof( int64_t) atIndex:4]; + [encoder setBytes:&nb00 length:sizeof(uint64_t) atIndex:5]; + [encoder setBytes:&nb01 length:sizeof(uint64_t) atIndex:6]; + [encoder setBytes:&nb02 length:sizeof(uint64_t) atIndex:7]; + [encoder setBytes:&n_groups length:sizeof( int32_t) atIndex:8]; + [encoder setBytes:&eps length:sizeof( float) atIndex:9]; + [encoder setThreadgroupMemoryLength:32*sizeof(float) atIndex:0]; + + [encoder dispatchThreadgroups:MTLSizeMake(n_groups, 1, 1) threadsPerThreadgroup:MTLSizeMake(nth, 1, 1)]; + } break; case GGML_OP_NORM: { float eps; @@ -2006,6 +2170,65 @@ void ggml_metal_graph_compute( [encoder dispatchThreadgroups:MTLSizeMake(IC, OH, OW) threadsPerThreadgroup:MTLSizeMake(N, KH, KW)]; } break; + case GGML_OP_UPSCALE: + { + GGML_ASSERT(src0->type == GGML_TYPE_F32); + + const int sf = dst->op_params[0]; + + [encoder setComputePipelineState:ctx->pipeline_upscale_f32]; + [encoder setBuffer:id_src0 offset:offs_src0 atIndex:0]; + [encoder setBuffer:id_dst offset:offs_dst atIndex:1]; + [encoder setBytes:&ne00 length:sizeof(ne00) atIndex:2]; + [encoder setBytes:&ne01 length:sizeof(ne01) atIndex:3]; + [encoder setBytes:&ne02 length:sizeof(ne02) atIndex:4]; + [encoder setBytes:&ne03 length:sizeof(ne03) atIndex:5]; + [encoder setBytes:&nb00 length:sizeof(nb00) atIndex:6]; + [encoder setBytes:&nb01 length:sizeof(nb01) atIndex:7]; + [encoder setBytes:&nb02 length:sizeof(nb02) atIndex:8]; + [encoder setBytes:&nb03 length:sizeof(nb03) atIndex:9]; + [encoder setBytes:&ne0 length:sizeof(ne0) atIndex:10]; + [encoder setBytes:&ne1 length:sizeof(ne1) atIndex:11]; + [encoder setBytes:&ne2 length:sizeof(ne2) atIndex:12]; + [encoder setBytes:&ne3 length:sizeof(ne3) atIndex:13]; + [encoder setBytes:&nb0 length:sizeof(nb0) atIndex:14]; + [encoder setBytes:&nb1 length:sizeof(nb1) atIndex:15]; + [encoder setBytes:&nb2 length:sizeof(nb2) atIndex:16]; + [encoder setBytes:&nb3 length:sizeof(nb3) atIndex:17]; + [encoder setBytes:&sf length:sizeof(sf) atIndex:18]; + + const int nth = MIN(1024, ne0); + + [encoder dispatchThreadgroups:MTLSizeMake(ne1, ne2, ne3) threadsPerThreadgroup:MTLSizeMake(nth, 1, 1)]; + } break; + case GGML_OP_PAD: + { + GGML_ASSERT(src0->type == GGML_TYPE_F32); + + [encoder setComputePipelineState:ctx->pipeline_pad_f32]; + [encoder setBuffer:id_src0 offset:offs_src0 atIndex:0]; + [encoder setBuffer:id_dst offset:offs_dst atIndex:1]; + [encoder setBytes:&ne00 length:sizeof(ne00) atIndex:2]; + [encoder setBytes:&ne01 length:sizeof(ne01) atIndex:3]; + [encoder setBytes:&ne02 length:sizeof(ne02) atIndex:4]; + [encoder setBytes:&ne03 length:sizeof(ne03) atIndex:5]; + [encoder setBytes:&nb00 length:sizeof(nb00) atIndex:6]; + [encoder setBytes:&nb01 length:sizeof(nb01) atIndex:7]; + [encoder setBytes:&nb02 length:sizeof(nb02) atIndex:8]; + [encoder setBytes:&nb03 length:sizeof(nb03) atIndex:9]; + [encoder setBytes:&ne0 length:sizeof(ne0) atIndex:10]; + [encoder setBytes:&ne1 length:sizeof(ne1) atIndex:11]; + [encoder setBytes:&ne2 length:sizeof(ne2) atIndex:12]; + [encoder setBytes:&ne3 length:sizeof(ne3) atIndex:13]; + [encoder setBytes:&nb0 length:sizeof(nb0) atIndex:14]; + [encoder setBytes:&nb1 length:sizeof(nb1) atIndex:15]; + [encoder setBytes:&nb2 length:sizeof(nb2) atIndex:16]; + [encoder setBytes:&nb3 length:sizeof(nb3) atIndex:17]; + + const int nth = MIN(1024, ne0); + + [encoder dispatchThreadgroups:MTLSizeMake(ne1, ne2, ne3) threadsPerThreadgroup:MTLSizeMake(nth, 1, 1)]; + } break; case GGML_OP_ARGSORT: { GGML_ASSERT(src0->type == GGML_TYPE_F32); @@ -2027,6 +2250,22 @@ void ggml_metal_graph_compute( [encoder dispatchThreadgroups:MTLSizeMake(1, nrows, 1) threadsPerThreadgroup:MTLSizeMake(ne00, 1, 1)]; } break; + case GGML_OP_LEAKY_RELU: + { + GGML_ASSERT(src0->type == GGML_TYPE_F32); + + float slope; + memcpy(&slope, dst->op_params, sizeof(float)); + + [encoder setComputePipelineState:ctx->pipeline_leaky_relu_f32]; + [encoder setBuffer:id_src0 offset:offs_src0 atIndex:0]; + [encoder setBuffer:id_dst offset:offs_dst atIndex:1]; + [encoder setBytes:&slope length:sizeof(slope) atIndex:2]; + + const int64_t n = ggml_nelements(dst); + + [encoder dispatchThreadgroups:MTLSizeMake(n, 1, 1) threadsPerThreadgroup:MTLSizeMake(1, 1, 1)]; + } break; case GGML_OP_DUP: case GGML_OP_CPY: case GGML_OP_CONT: diff --git a/ggml-metal.metal b/ggml-metal.metal index 773fac124..fe0ada445 100644 --- a/ggml-metal.metal +++ b/ggml-metal.metal @@ -79,6 +79,7 @@ kernel void kernel_add( constant int64_t & nb1, constant int64_t & nb2, constant int64_t & nb3, + constant int64_t & offs, uint3 tgpig[[threadgroup_position_in_grid]], uint3 tpitg[[thread_position_in_threadgroup]], uint3 ntg[[threads_per_threadgroup]]) { @@ -90,9 +91,9 @@ kernel void kernel_add( const int64_t i12 = i02 % ne12; const int64_t i11 = i01 % ne11; - device const char * src0_ptr = src0 + i03*nb03 + i02*nb02 + i01*nb01; + device const char * src0_ptr = src0 + i03*nb03 + i02*nb02 + i01*nb01 + offs; device const char * src1_ptr = src1 + i13*nb13 + i12*nb12 + i11*nb11; - device char * dst_ptr = dst + i03*nb3 + i02*nb2 + i01*nb1; + device char * dst_ptr = dst + i03*nb3 + i02*nb2 + i01*nb1 + offs; for (int i0 = tpitg.x; i0 < ne0; i0 += ntg.x) { const int i10 = i0 % ne10; @@ -204,7 +205,7 @@ kernel void kernel_add_row( device const float4 * src0, device const float4 * src1, device float4 * dst, - constant int64_t & nb [[buffer(27)]], + constant int64_t & nb [[buffer(28)]], uint tpig[[thread_position_in_grid]]) { dst[tpig] = src0[tpig] + src1[tpig % nb]; } @@ -213,7 +214,7 @@ kernel void kernel_mul_row( device const float4 * src0, device const float4 * src1, device float4 * dst, - constant int64_t & nb [[buffer(27)]], + constant int64_t & nb [[buffer(28)]], uint tpig[[thread_position_in_grid]]) { dst[tpig] = src0[tpig] * src1[tpig % nb]; } @@ -222,7 +223,7 @@ kernel void kernel_div_row( device const float4 * src0, device const float4 * src1, device float4 * dst, - constant int64_t & nb [[buffer(27)]], + constant int64_t & nb [[buffer(28)]], uint tpig[[thread_position_in_grid]]) { dst[tpig] = src0[tpig] / src1[tpig % nb]; } @@ -243,6 +244,47 @@ kernel void kernel_scale_4( dst[tpig] = src0[tpig] * scale; } +kernel void kernel_relu( + device const float * src0, + device float * dst, + uint tpig[[thread_position_in_grid]]) { + dst[tpig] = max(0.0f, src0[tpig]); +} + +kernel void kernel_tanh( + device const float * src0, + device float * dst, + uint tpig[[thread_position_in_grid]]) { + device const float & x = src0[tpig]; + dst[tpig] = precise::tanh(x); +} + +constant float GELU_COEF_A = 0.044715f; +constant float GELU_QUICK_COEF = -1.702f; +constant float SQRT_2_OVER_PI = 0.79788456080286535587989211986876f; + +kernel void kernel_gelu( + device const float4 * src0, + device float4 * dst, + uint tpig[[thread_position_in_grid]]) { + device const float4 & x = src0[tpig]; + + // BEWARE !!! + // Simply using "tanh" instead of "precise::tanh" will sometimes results in NaNs! + // This was observed with Falcon 7B and 40B models + // + dst[tpig] = 0.5f*x*(1.0f + precise::tanh(SQRT_2_OVER_PI*x*(1.0f + GELU_COEF_A*x*x))); +} + +kernel void kernel_gelu_quick( + device const float4 * src0, + device float4 * dst, + uint tpig[[thread_position_in_grid]]) { + device const float4 & x = src0[tpig]; + + dst[tpig] = x*(1.0f/(1.0f+exp(GELU_QUICK_COEF*x))); +} + kernel void kernel_silu( device const float4 * src0, device float4 * dst, @@ -251,13 +293,6 @@ kernel void kernel_silu( dst[tpig] = x / (1.0f + exp(-x)); } -kernel void kernel_relu( - device const float * src0, - device float * dst, - uint tpig[[thread_position_in_grid]]) { - dst[tpig] = max(0.0f, src0[tpig]); -} - kernel void kernel_sqr( device const float * src0, device float * dst, @@ -313,22 +348,6 @@ kernel void kernel_sum_rows( dst_row[0] = row_sum; } -constant float GELU_COEF_A = 0.044715f; -constant float SQRT_2_OVER_PI = 0.79788456080286535587989211986876f; - -kernel void kernel_gelu( - device const float4 * src0, - device float4 * dst, - uint tpig[[thread_position_in_grid]]) { - device const float4 & x = src0[tpig]; - - // BEWARE !!! - // Simply using "tanh" instead of "precise::tanh" will sometimes results in NaNs! - // This was observed with Falcon 7B and 40B models - // - dst[tpig] = 0.5f*x*(1.0f + precise::tanh(SQRT_2_OVER_PI*x*(1.0f + GELU_COEF_A*x*x))); -} - kernel void kernel_soft_max( device const float * src0, device const float * src1, @@ -650,6 +669,94 @@ kernel void kernel_rms_norm( } } +kernel void kernel_group_norm( + device const float * src0, + device float * dst, + constant int64_t & ne00, + constant int64_t & ne01, + constant int64_t & ne02, + constant uint64_t & nb00, + constant uint64_t & nb01, + constant uint64_t & nb02, + constant int32_t & n_groups, + constant float & eps, + threadgroup float * buf [[threadgroup(0)]], + uint tgpig[[threadgroup_position_in_grid]], + uint tpitg[[thread_position_in_threadgroup]], + uint sgitg[[simdgroup_index_in_threadgroup]], + uint tiisg[[thread_index_in_simdgroup]], + uint ntg[[threads_per_threadgroup]]) { + const int64_t ne = ne00*ne01*ne02; + const int64_t gs = ne00*ne01*((ne02 + n_groups - 1) / n_groups); + + int start = tgpig * gs; + int end = start + gs; + + start += tpitg; + + if (end >= ne) { + end = ne; + } + + float tmp = 0.0f; // partial sum for thread in warp + + for (int j = start; j < end; j += ntg) { + tmp += src0[j]; + } + + threadgroup_barrier(mem_flags::mem_threadgroup); + tmp = simd_sum(tmp); + if (ntg > N_SIMDWIDTH) { + if (sgitg == 0) { + buf[tiisg] = 0.0f; + } + + threadgroup_barrier(mem_flags::mem_threadgroup); + + if (tiisg == 0) { + buf[sgitg] = tmp; + } + + threadgroup_barrier(mem_flags::mem_threadgroup); + + tmp = buf[tiisg]; + tmp = simd_sum(tmp); + } + + const float mean = tmp / gs; + tmp = 0.0f; + + for (int j = start; j < end; j += ntg) { + float xi = src0[j] - mean; + dst[j] = xi; + tmp += xi * xi; + } + + tmp = simd_sum(tmp); + if (ntg > N_SIMDWIDTH) { + if (sgitg == 0) { + buf[tiisg] = 0.0f; + } + + threadgroup_barrier(mem_flags::mem_threadgroup); + + if (tiisg == 0) { + buf[sgitg] = tmp; + } + + threadgroup_barrier(mem_flags::mem_threadgroup); + + tmp = buf[tiisg]; + tmp = simd_sum(tmp); + } + + const float variance = tmp / gs; + const float scale = 1.0f/sqrt(variance + eps); + for (int j = start; j < end; j += ntg) { + dst[j] *= scale; + } +} + // function for calculate inner product between half a q4_0 block and 16 floats (yl), sumy is SUM(yl[i]) // il indicates where the q4 quants begin (0 or QK4_0/4) // we assume that the yl's have been multiplied with the appropriate scale factor @@ -1656,6 +1763,97 @@ kernel void kernel_im2col_f16( } } +kernel void kernel_upscale_f32( + device const char * src0, + device char * dst, + constant int64_t & ne00, + constant int64_t & ne01, + constant int64_t & ne02, + constant int64_t & ne03, + constant uint64_t & nb00, + constant uint64_t & nb01, + constant uint64_t & nb02, + constant uint64_t & nb03, + constant int64_t & ne0, + constant int64_t & ne1, + constant int64_t & ne2, + constant int64_t & ne3, + constant uint64_t & nb0, + constant uint64_t & nb1, + constant uint64_t & nb2, + constant uint64_t & nb3, + constant int32_t & sf, + uint3 tgpig[[threadgroup_position_in_grid]], + uint3 tpitg[[thread_position_in_threadgroup]], + uint3 ntg[[threads_per_threadgroup]]) { + + const int64_t i3 = tgpig.z; + const int64_t i2 = tgpig.y; + const int64_t i1 = tgpig.x; + + const int64_t i03 = i3; + const int64_t i02 = i2; + const int64_t i01 = i1/sf; + + device const float * src0_ptr = (device const float *) (src0 + i03*nb03 + i02*nb02 + i01*nb01); + device float * dst_ptr = (device float *) (dst + i3*nb3 + i2*nb2 + i1*nb1); + + for (int i0 = tpitg.x; i0 < ne0; i0 += ntg.x) { + dst_ptr[i0] = src0_ptr[i0/sf]; + } +} + +kernel void kernel_pad_f32( + device const char * src0, + device char * dst, + constant int64_t & ne00, + constant int64_t & ne01, + constant int64_t & ne02, + constant int64_t & ne03, + constant uint64_t & nb00, + constant uint64_t & nb01, + constant uint64_t & nb02, + constant uint64_t & nb03, + constant int64_t & ne0, + constant int64_t & ne1, + constant int64_t & ne2, + constant int64_t & ne3, + constant uint64_t & nb0, + constant uint64_t & nb1, + constant uint64_t & nb2, + constant uint64_t & nb3, + uint3 tgpig[[threadgroup_position_in_grid]], + uint3 tpitg[[thread_position_in_threadgroup]], + uint3 ntg[[threads_per_threadgroup]]) { + + const int64_t i3 = tgpig.z; + const int64_t i2 = tgpig.y; + const int64_t i1 = tgpig.x; + + const int64_t i03 = i3; + const int64_t i02 = i2; + const int64_t i01 = i1; + + device const float * src0_ptr = (device const float *) (src0 + i03*nb03 + i02*nb02 + i01*nb01); + device float * dst_ptr = (device float *) (dst + i3*nb3 + i2*nb2 + i1*nb1); + + if (i1 < ne01 && i2 < ne02 && i3 < ne03) { + for (int i0 = tpitg.x; i0 < ne0; i0 += ntg.x) { + if (i0 < ne00) { + dst_ptr[i0] = src0_ptr[i0]; + } else { + dst_ptr[i0] = 0.0f; + } + } + + return; + } + + for (int i0 = tpitg.x; i0 < ne0; i0 += ntg.x) { + dst_ptr[i0] = 0.0f; + } +} + // bitonic sort implementation following the CUDA kernels as reference typedef void (argsort_t)( device const float * x, @@ -1708,6 +1906,14 @@ kernel void kernel_argsort_f32_i32( template [[host_name("kernel_argsort_f32_i32_asc")]] kernel argsort_t kernel_argsort_f32_i32; template [[host_name("kernel_argsort_f32_i32_desc")]] kernel argsort_t kernel_argsort_f32_i32; +kernel void kernel_leaky_relu_f32( + device const float * src0, + device float * dst, + constant float & slope, + uint tpig[[thread_position_in_grid]]) { + dst[tpig] = src0[tpig] > 0.0f ? src0[tpig] : src0[tpig] * slope; +} + kernel void kernel_cpy_f16_f16( device const half * src0, device half * dst, @@ -2066,9 +2272,9 @@ kernel void kernel_cpy_f32_q4_1( } kernel void kernel_concat( - device const char * src0, - device const char * src1, - device char * dst, + device const char * src0, + device const char * src1, + device char * dst, constant int64_t & ne00, constant int64_t & ne01, constant int64_t & ne02, @@ -2105,7 +2311,7 @@ kernel void kernel_concat( const int64_t i12 = i02 % ne12; const int64_t i11 = i01 % ne11; - device const char * src0_ptr = src0 + i03 * nb03 + i02 * nb02 + i01 * nb01 + tpitg.x*nb00; + device const char * src0_ptr = src0 + i03*nb03 + i02*nb02 + i01*nb01 + tpitg.x*nb00; device const char * src1_ptr = src1 + i13*nb13 + i12*nb12 + i11*nb11 + tpitg.x*nb10; device char * dst_ptr = dst + i03*nb3 + i02*nb2 + i01*nb1 + tpitg.x*nb0; @@ -3315,10 +3521,10 @@ void dequantize_q8_0(device const block_q8_0 *xb, short il, thread type4x4 & reg template void dequantize_q2_K(device const block_q2_K *xb, short il, thread type4x4 & reg) { - const half d = xb->d; - const half min = xb->dmin; + const float d = xb->d; + const float min = xb->dmin; device const uint8_t * q = (device const uint8_t *)xb->qs; - half dl, ml; + float dl, ml; uint8_t sc = xb->scales[il]; #if QK_K == 256 @@ -3388,10 +3594,10 @@ void dequantize_q4_K(device const block_q4_K *xb, short il, thread type4x4 & reg q = q + (il/4) * 32 + 16 * (il&1); il = il & 3; const uchar2 sc = get_scale_min_k4_just2(is, il/2, xb->scales); - const half d = il < 2 ? xb->d : xb->d / 16.h; - const half min = xb->dmin; - const half dl = d * sc[0]; - const half ml = min * sc[1]; + const float d = il < 2 ? xb->d : xb->d / 16.h; + const float min = xb->dmin; + const float dl = d * sc[0]; + const float ml = min * sc[1]; #else q = q + 16 * (il&1); device const uint8_t * s = xb->scales; @@ -3418,13 +3624,13 @@ void dequantize_q5_K(device const block_q5_K *xb, short il, thread type4x4 & reg uint8_t ul = 1 << (il/2); il = il & 3; const uchar2 sc = get_scale_min_k4_just2(is, il/2, xb->scales); - const half d = il < 2 ? xb->d : xb->d / 16.h; - const half min = xb->dmin; - const half dl = d * sc[0]; - const half ml = min * sc[1]; + const float d = il < 2 ? xb->d : xb->d / 16.h; + const float min = xb->dmin; + const float dl = d * sc[0]; + const float ml = min * sc[1]; - const ushort mask = il<2 ? 0x0F : 0xF0; - const half qh_val = il<2 ? 16.h : 256.h; + const ushort mask = il<2 ? 0x0F : 0xF0; + const float qh_val = il<2 ? 16.f : 256.f; for (int i = 0; i < 16; ++i) { reg[i/4][i%4] = dl * ((q[i] & mask) + (qh[i] & ul ? qh_val : 0)) - ml; } diff --git a/ggml.c b/ggml.c index 66658ff4b..29e18a24c 100644 --- a/ggml.c +++ b/ggml.c @@ -1395,7 +1395,7 @@ inline static void ggml_vec_step_f32 (const int n, float * y, const float * x) { inline static void ggml_vec_tanh_f32 (const int n, float * y, const float * x) { for (int i = 0; i < n; ++i) y[i] = tanhf(x[i]); } inline static void ggml_vec_elu_f32 (const int n, float * y, const float * x) { for (int i = 0; i < n; ++i) y[i] = (x[i] > 0.f) ? x[i] : expf(x[i])-1; } inline static void ggml_vec_relu_f32 (const int n, float * y, const float * x) { for (int i = 0; i < n; ++i) y[i] = (x[i] > 0.f) ? x[i] : 0.f; } -inline static void ggml_vec_leaky_f32 (const int n, float * y, const float * x) { for (int i = 0; i < n; ++i) y[i] = (x[i] > 0.f) ? x[i] : 0.1f*x[i]; } +inline static void ggml_vec_leaky_relu_f32 (const int n, float * y, const float * x, const float ns) { for (int i = 0; i < n; ++i) y[i] = ((x[i] > 0.f) ? x[i] : 0.f) + ns * ((x[i] < 0.0f) ? x[i] : 0.f); } static const float GELU_COEF_A = 0.044715f; static const float GELU_QUICK_COEF = -1.702f; @@ -1623,7 +1623,9 @@ static const char * GGML_OP_NAME[GGML_OP_COUNT] = { "POOL_1D", "POOL_2D", "UPSCALE", + "PAD", "ARGSORT", + "LEAKY_RELU", "FLASH_ATTN", "FLASH_FF", @@ -1650,7 +1652,7 @@ static const char * GGML_OP_NAME[GGML_OP_COUNT] = { "CROSS_ENTROPY_LOSS_BACK", }; -static_assert(GGML_OP_COUNT == 70, "GGML_OP_COUNT != 70"); +static_assert(GGML_OP_COUNT == 72, "GGML_OP_COUNT != 72"); static const char * GGML_OP_SYMBOL[GGML_OP_COUNT] = { "none", @@ -1707,7 +1709,9 @@ static const char * GGML_OP_SYMBOL[GGML_OP_COUNT] = { "pool_1d(x)", "pool_2d(x)", "upscale(x)", + "pad(x)", "argsort(x)", + "leaky_relu(x)", "flash_attn(x)", "flash_ff(x)", @@ -1734,7 +1738,7 @@ static const char * GGML_OP_SYMBOL[GGML_OP_COUNT] = { "cross_entropy_loss_back(x,y)", }; -static_assert(GGML_OP_COUNT == 70, "GGML_OP_COUNT != 70"); +static_assert(GGML_OP_COUNT == 72, "GGML_OP_COUNT != 72"); static_assert(GGML_OP_POOL_COUNT == 2, "GGML_OP_POOL_COUNT != 2"); @@ -1750,10 +1754,9 @@ static const char * GGML_UNARY_OP_NAME[GGML_UNARY_OP_COUNT] = { "GELU", "GELU_QUICK", "SILU", - "LEAKY", }; -static_assert(GGML_UNARY_OP_COUNT == 11, "GGML_UNARY_OP_COUNT != 11"); +static_assert(GGML_UNARY_OP_COUNT == 10, "GGML_UNARY_OP_COUNT != 10"); static_assert(sizeof(struct ggml_object)%GGML_MEM_ALIGN == 0, "ggml_object size must be a multiple of GGML_MEM_ALIGN"); @@ -3830,12 +3833,25 @@ struct ggml_tensor * ggml_relu_inplace( return ggml_unary_inplace(ctx, a, GGML_UNARY_OP_RELU); } -// ggml_leaky +// ggml_leaky_relu -struct ggml_tensor * ggml_leaky( +struct ggml_tensor * ggml_leaky_relu( struct ggml_context * ctx, - struct ggml_tensor * a) { - return ggml_unary(ctx, a, GGML_UNARY_OP_LEAKY); + struct ggml_tensor * a, float negative_slope, bool inplace) { + bool is_node = false; + + if (!inplace && (a->grad)) { + is_node = true; + } + + struct ggml_tensor * result = inplace ? ggml_view_tensor(ctx, a) : ggml_dup_tensor(ctx, a); + ggml_set_op_params(result, &negative_slope, sizeof(negative_slope)); + + result->op = GGML_OP_LEAKY_RELU; + result->grad = is_node ? ggml_dup_tensor(ctx, result) : NULL; + result->src[0] = a; + + return result; } // ggml_gelu @@ -4022,8 +4038,9 @@ static struct ggml_tensor * ggml_group_norm_impl( struct ggml_tensor * result = inplace ? ggml_view_tensor(ctx, a) : ggml_dup_tensor(ctx, a); - result->op = GGML_OP_GROUP_NORM; result->op_params[0] = n_groups; + + result->op = GGML_OP_GROUP_NORM; result->grad = is_node ? ggml_dup_tensor(ctx, result) : NULL; result->src[0] = a; result->src[1] = NULL; // TODO: maybe store epsilon here? @@ -5523,6 +5540,30 @@ static struct ggml_tensor * ggml_upscale_impl( return result; } +struct ggml_tensor * ggml_pad( + struct ggml_context * ctx, + struct ggml_tensor * a, + int p0, int p1, int p2, int p3) { + bool is_node = false; + + if (a->grad) { + GGML_ASSERT(false); // TODO: implement backward + is_node = true; + } + + struct ggml_tensor * result = ggml_new_tensor_4d(ctx, a->type, + a->ne[0] + p0, + a->ne[1] + p1, + a->ne[2] + p2, + a->ne[3] + p3); + + result->op = GGML_OP_PAD; + result->grad = is_node ? ggml_dup_tensor(ctx, result) : NULL; + result->src[0] = a; + + return result; +} + struct ggml_tensor * ggml_upscale( struct ggml_context * ctx, struct ggml_tensor * a, @@ -7718,8 +7759,10 @@ static void ggml_compute_forward_mul_f32( const int ith = params->ith; const int nth = params->nth; +// TODO: OpenCL kernel support broadcast #ifdef GGML_USE_CLBLAST if (src1->backend == GGML_BACKEND_GPU) { + GGML_ASSERT(ggml_are_same_shape(src0, src1)); if (ith == 0) { ggml_cl_mul(src0, src1, dst); } @@ -8985,10 +9028,9 @@ static void ggml_compute_forward_silu( } break; } } +// ggml_compute_forward_leaky_relu -// ggml_compute_forward_leaky - -static void ggml_compute_forward_leaky_f32( +static void ggml_compute_forward_leaky_relu_f32( const struct ggml_compute_params * params, const struct ggml_tensor * src0, struct ggml_tensor * dst) { @@ -9002,24 +9044,27 @@ static void ggml_compute_forward_leaky_f32( const int n = ggml_nrows(src0); const int nc = src0->ne[0]; + float negative_slope; + memcpy(&negative_slope, dst->op_params, sizeof(float)); + assert(dst->nb[0] == sizeof(float)); assert(src0->nb[0] == sizeof(float)); for (int i = 0; i < n; i++) { - ggml_vec_leaky_f32(nc, + ggml_vec_leaky_relu_f32(nc, (float *) ((char *) dst->data + i*( dst->nb[1])), - (float *) ((char *) src0->data + i*(src0->nb[1]))); + (float *) ((char *) src0->data + i*(src0->nb[1])), negative_slope); } } -static void ggml_compute_forward_leaky( +static void ggml_compute_forward_leaky_relu( const struct ggml_compute_params * params, const struct ggml_tensor * src0, struct ggml_tensor * dst) { switch (src0->type) { case GGML_TYPE_F32: { - ggml_compute_forward_leaky_f32(params, src0, dst); + ggml_compute_forward_leaky_relu_f32(params, src0, dst); } break; default: { @@ -12158,6 +12203,7 @@ static void ggml_compute_forward_upscale_f32( GGML_ASSERT(src0->nb[0] == sizeof(float)); const int ith = params->ith; + const int nth = params->nth; GGML_TENSOR_UNARY_OP_LOCALS @@ -12165,16 +12211,17 @@ static void ggml_compute_forward_upscale_f32( // TODO: optimize - for (int i03 = 0; i03 < ne03; i03++) { - for (int i02 = ith; i02 < ne02; i02++) { - for (int m = 0; m < dst->ne[1]; m++) { - int i01 = m / scale_factor; - for (int n = 0; n < dst->ne[0]; n++) { - int i00 = n / scale_factor; + for (int64_t i3 = 0; i3 < ne3; i3++) { + const int64_t i03 = i3; + for (int64_t i2 = ith; i2 < ne2; i2 += nth) { + const int64_t i02 = i2; + for (int64_t i1 = 0; i1 < ne1; i1++) { + const int64_t i01 = i1 / scale_factor; + for (int64_t i0 = 0; i0 < ne0; i0++) { + const int64_t i00 = i0 / scale_factor; - const float * x = (float *)((char *) src0->data + i00 * nb00 +i01 * nb01 + i02 * nb02 + i03 * nb03); - - float * y = (float *)((char *) dst->data + n * dst->nb[0] + m * dst->nb[1] + i02 * dst->nb[2] + i03 * dst->nb[3]); + const float * x = (float *)((char *) src0->data + i00*nb00 + i01*nb01 + i02*nb02 + i03*nb03); + float * y = (float *)((char *) dst->data + i0*nb0 + i1*nb1 + i2*nb2 + i3*nb3); *y = *x; } @@ -12199,6 +12246,64 @@ static void ggml_compute_forward_upscale( } } +// ggml_compute_forward_pad + +static void ggml_compute_forward_pad_f32( + const struct ggml_compute_params * params, + const struct ggml_tensor * src0, + struct ggml_tensor * dst) { + + if (params->type == GGML_TASK_INIT || params->type == GGML_TASK_FINALIZE) { + return; + } + + GGML_ASSERT(src0->nb[0] == sizeof(float)); + GGML_ASSERT( dst->nb[0] == sizeof(float)); + + const int ith = params->ith; + const int nth = params->nth; + + GGML_TENSOR_UNARY_OP_LOCALS + + float * dst_ptr = (float *) dst->data; + + // TODO: optimize + + for (int64_t i2 = 0; i2 < ne2; ++i2) { + for (int64_t i1 = ith; i1 < ne1; i1 += nth) { + for (int64_t i0 = 0; i0 < ne0; ++i0) { + for (int64_t i3 = 0; i3 < ne3; ++i3) { + const int64_t dst_idx = i3*(ne0*ne1*ne2) + i2*(ne0*ne1) + i1*ne0 + i0; + + const float * src_ptr = (const float *)((char *) src0->data + i3*nb03 + i2*nb02 + i1*nb01 + i0*nb00); + + if (i0 < ne00 && i1 < ne01 && i2 < ne02 && i3 < ne03) { + dst_ptr[dst_idx] = *src_ptr; + } else { + dst_ptr[dst_idx] = 0; + } + } + } + } + } +} + +static void ggml_compute_forward_pad( + const struct ggml_compute_params * params, + const struct ggml_tensor * src0, + struct ggml_tensor * dst) { + switch (src0->type) { + case GGML_TYPE_F32: + { + ggml_compute_forward_pad_f32(params, src0, dst); + } break; + default: + { + GGML_ASSERT(false); + } break; + } +} + // ggml_compute_forward_argsort static void ggml_compute_forward_argsort_f32( @@ -13406,10 +13511,6 @@ static void ggml_compute_forward_unary( { ggml_compute_forward_silu(params, src0, dst); } break; - case GGML_UNARY_OP_LEAKY: - { - ggml_compute_forward_leaky(params, src0, dst); - } break; default: { GGML_ASSERT(false); @@ -14191,10 +14292,18 @@ static void ggml_compute_forward(struct ggml_compute_params * params, struct ggm { ggml_compute_forward_upscale(params, tensor->src[0], tensor); } break; + case GGML_OP_PAD: + { + ggml_compute_forward_pad(params, tensor->src[0], tensor); + } break; case GGML_OP_ARGSORT: { ggml_compute_forward_argsort(params, tensor->src[0], tensor); } break; + case GGML_OP_LEAKY_RELU: + { + ggml_compute_forward_leaky_relu(params, tensor->src[0], tensor); + } break; case GGML_OP_FLASH_ATTN: { const int32_t t = ggml_get_op_params_i32(tensor, 0); @@ -15187,10 +15296,18 @@ static void ggml_compute_backward(struct ggml_context * ctx, struct ggml_tensor { GGML_ASSERT(false); // TODO: not implemented } break; + case GGML_OP_PAD: + { + GGML_ASSERT(false); // TODO: not implemented + } break; case GGML_OP_ARGSORT: { GGML_ASSERT(false); // TODO: not implemented } break; + case GGML_OP_LEAKY_RELU: + { + GGML_ASSERT(false); // TODO: not implemented + } break; case GGML_OP_FLASH_ATTN: { struct ggml_tensor * flash_grad = NULL; @@ -15796,6 +15913,7 @@ static int ggml_get_n_tasks(struct ggml_tensor * node, int n_threads) { case GGML_OP_ARGMAX: case GGML_OP_REPEAT: case GGML_OP_REPEAT_BACK: + case GGML_OP_LEAKY_RELU: { n_tasks = 1; } break; @@ -15808,7 +15926,6 @@ static int ggml_get_n_tasks(struct ggml_tensor * node, int n_threads) { case GGML_UNARY_OP_TANH: case GGML_UNARY_OP_ELU: case GGML_UNARY_OP_RELU: - case GGML_UNARY_OP_LEAKY: { n_tasks = 1; } break; @@ -15927,6 +16044,10 @@ static int ggml_get_n_tasks(struct ggml_tensor * node, int n_threads) { { n_tasks = n_threads; } break; + case GGML_OP_PAD: + { + n_tasks = n_threads; + } break; case GGML_OP_ARGSORT: { n_tasks = n_threads; diff --git a/ggml.h b/ggml.h index 32f256481..1447646b1 100644 --- a/ggml.h +++ b/ggml.h @@ -423,7 +423,9 @@ extern "C" { GGML_OP_POOL_1D, GGML_OP_POOL_2D, GGML_OP_UPSCALE, // nearest interpolate + GGML_OP_PAD, GGML_OP_ARGSORT, + GGML_OP_LEAKY_RELU, GGML_OP_FLASH_ATTN, GGML_OP_FLASH_FF, @@ -463,7 +465,6 @@ extern "C" { GGML_UNARY_OP_GELU, GGML_UNARY_OP_GELU_QUICK, GGML_UNARY_OP_SILU, - GGML_UNARY_OP_LEAKY, GGML_UNARY_OP_COUNT, }; @@ -793,6 +794,9 @@ extern "C" { struct ggml_tensor * a, struct ggml_tensor * b); + // dst = a + // view(dst, nb1, nb2, nb3, offset) += b + // return dst GGML_API struct ggml_tensor * ggml_acc( struct ggml_context * ctx, struct ggml_tensor * a, @@ -957,15 +961,14 @@ extern "C" { struct ggml_context * ctx, struct ggml_tensor * a); - GGML_API struct ggml_tensor * ggml_leaky( + GGML_API struct ggml_tensor * ggml_leaky_relu( struct ggml_context * ctx, - struct ggml_tensor * a); + struct ggml_tensor * a, float negative_slope, bool inplace); GGML_API struct ggml_tensor * ggml_relu_inplace( struct ggml_context * ctx, struct ggml_tensor * a); - // TODO: double-check this computation is correct GGML_API struct ggml_tensor * ggml_gelu( struct ggml_context * ctx, struct ggml_tensor * a); @@ -1551,6 +1554,15 @@ extern "C" { struct ggml_tensor * a, int scale_factor); + // pad each dimension with zeros: [x, ..., x] -> [x, ..., x, 0, ..., 0] + GGML_API struct ggml_tensor * ggml_pad( + struct ggml_context * ctx, + struct ggml_tensor * a, + int p0, + int p1, + int p2, + int p3); + // sort rows enum ggml_sort_order { GGML_SORT_ASC, diff --git a/tests/test-backend-ops.cpp b/tests/test-backend-ops.cpp index 44830b4d4..afca85143 100644 --- a/tests/test-backend-ops.cpp +++ b/tests/test-backend-ops.cpp @@ -234,6 +234,11 @@ static bool ggml_is_view_op(enum ggml_op op) { return op == GGML_OP_VIEW || op == GGML_OP_RESHAPE || op == GGML_OP_PERMUTE || op == GGML_OP_TRANSPOSE; } +enum test_mode { + MODE_TEST, + MODE_PERF, +}; + struct test_case { virtual ~test_case() {} @@ -268,7 +273,58 @@ struct test_case { return size; } + ggml_cgraph * gf = nullptr; + + static const int sentinel_size = 1024; + + test_mode mode; + + std::vector sentinels; + + void add_sentinel(ggml_context * ctx) { + if (mode == MODE_PERF) { + return; + } + ggml_tensor * sentinel = ::ggml_new_tensor_1d(ctx, GGML_TYPE_F32, sentinel_size); + ggml_format_name(sentinel, "sent_%zu", sentinels.size()); + sentinels.push_back(sentinel); + } + + // hijack ggml_new_tensor to add sentinels after each tensor to check for overflows in the backend + + ggml_tensor * ggml_new_tensor(ggml_context * ctx, ggml_type type, int n_dims, const int64_t * ne) { + ggml_tensor * t = ::ggml_new_tensor(ctx, type, n_dims, ne); + add_sentinel(ctx); + return t; + } + + ggml_tensor * ggml_new_tensor_1d(ggml_context * ctx, ggml_type type, int64_t ne0) { + ggml_tensor * t = ::ggml_new_tensor_1d(ctx, type, ne0); + add_sentinel(ctx); + return t; + } + + ggml_tensor * ggml_new_tensor_2d(ggml_context * ctx, ggml_type type, int64_t ne0, int64_t ne1) { + ggml_tensor * t = ::ggml_new_tensor_2d(ctx, type, ne0, ne1); + add_sentinel(ctx); + return t; + } + + ggml_tensor * ggml_new_tensor_3d(ggml_context * ctx, ggml_type type, int64_t ne0, int64_t ne1, int64_t ne2) { + ggml_tensor * t = ::ggml_new_tensor_3d(ctx, type, ne0, ne1, ne2); + add_sentinel(ctx); + return t; + } + + ggml_tensor * ggml_new_tensor_4d(ggml_context * ctx, ggml_type type, int64_t ne0, int64_t ne1, int64_t ne2, int64_t ne3) { + ggml_tensor * t = ::ggml_new_tensor_4d(ctx, type, ne0, ne1, ne2, ne3); + add_sentinel(ctx); + return t; + } + bool eval(ggml_backend_t backend1, ggml_backend_t backend2, const char * op_name) { + mode = MODE_TEST; + ggml_init_params params = { /* .mem_size = */ ggml_tensor_overhead()*128 + ggml_graph_overhead(), /* .mem_base = */ NULL, @@ -276,6 +332,11 @@ struct test_case { }; ggml_context * ctx = ggml_init(params); + gf = ggml_new_graph(ctx); + + // pre-graph sentinel + add_sentinel(ctx); + ggml_tensor * out = build_graph(ctx); if (op_name != nullptr && op_desc(out) != op_name) { @@ -296,13 +357,20 @@ struct test_case { } } + // post-graph sentinel + add_sentinel(ctx); + // allocate ggml_backend_buffer_t buf = ggml_backend_alloc_ctx_tensors(ctx, backend1); // build graph - ggml_cgraph * gf = ggml_new_graph(ctx); ggml_build_forward_expand(gf, out); + // add sentinels as graph nodes so that they are checked in the callback + for (ggml_tensor * sentinel : sentinels) { + gf->nodes[gf->n_nodes++] = sentinel; + } + // randomize tensors initialize_tensors(ctx); @@ -318,9 +386,24 @@ struct test_case { }; auto callback = [](int index, ggml_tensor * t1, ggml_tensor * t2, void * user_data) -> bool { + callback_userdata * ud = (callback_userdata *) user_data; + + if (t1->op == GGML_OP_NONE) { + // sentinels must be unchanged + std::vector t1_data(ggml_nbytes(t1)); + std::vector t2_data(ggml_nbytes(t2)); + ggml_backend_tensor_get(t1, t1_data.data(), 0, ggml_nbytes(t1)); + ggml_backend_tensor_get(t2, t2_data.data(), 0, ggml_nbytes(t2)); + + if (memcmp(t1_data.data(), t2_data.data(), ggml_nbytes(t1)) != 0) { + printf("sentinel mismatch: %s ", t1->name); + ud->ok = false; + return true; + } + } + std::vector f1 = tensor_to_float(t1); std::vector f2 = tensor_to_float(t2); - callback_userdata * ud = (callback_userdata *) user_data; for (size_t i = 0; i < f1.size(); i++) { // check for nans @@ -349,9 +432,10 @@ struct test_case { if (err > ud->max_err) { printf("[%s] NMSE = %f ", ggml_op_desc(t1), err); //for (int i = 0; i < f1.size(); i++) { - // printf("(%f, %f) ", f1[i], f2[i]); + // printf("%5d %9.6f %9.6f, diff = %9.6f\n", i, f1[i], f2[i], f1[i] - f2[i]); //} //printf("\n"); + //exit(1); ud->ok = false; } return true; @@ -375,6 +459,8 @@ struct test_case { } bool eval_perf(ggml_backend_t backend, const char * op_name) { + mode = MODE_PERF; + static const size_t graph_nodes = 8192; ggml_init_params params = { @@ -1135,6 +1221,118 @@ struct test_sum_rows : public test_case { } }; +// GGML_OP_UPSCALE +struct test_upscale : public test_case { + const ggml_type type; + const std::array ne; + const int32_t scale_factor; + + std::string vars() override { + return VARS_TO_STR3(type, ne, scale_factor); + } + + test_upscale(ggml_type type = GGML_TYPE_F32, + std::array ne = {512, 512, 3, 1}, + int32_t scale_factor = 2) + : type(type), ne(ne), scale_factor(scale_factor) {} + + ggml_tensor * build_graph(ggml_context * ctx) override { + ggml_tensor * a = ggml_new_tensor(ctx, type, 4, ne.data()); + ggml_tensor * out = ggml_upscale(ctx, a, scale_factor); + return out; + } +}; + +// GGML_OP_GROUP_NORM +struct test_group_norm : public test_case { + const ggml_type type; + const std::array ne; + const int32_t num_groups; + + std::string vars() override { + return VARS_TO_STR3(type, ne, num_groups); + } + + test_group_norm(ggml_type type = GGML_TYPE_F32, + std::array ne = {64, 64, 320, 1}, + int32_t num_groups = 32) + : type(type), ne(ne), num_groups(num_groups) {} + + ggml_tensor * build_graph(ggml_context * ctx) override { + ggml_tensor * a = ggml_new_tensor(ctx, type, 4, ne.data()); + ggml_tensor * out = ggml_group_norm(ctx, a, num_groups); + return out; + } +}; + +// GGML_OP_ACC +struct test_acc : public test_case { + const ggml_type type; + const std::array ne_a; + const std::array ne_b; + + std::string vars() override { + return VARS_TO_STR3(type, ne_a, ne_b); + } + + test_acc(ggml_type type = GGML_TYPE_F32, + std::array ne_a = {1024, 577, 1, 1}, + std::array ne_b = {1024, 576, 1, 1}) + : type(type), ne_a(ne_a), ne_b(ne_b) {} + + ggml_tensor * build_graph(ggml_context * ctx) override { + ggml_tensor * a = ggml_new_tensor(ctx, type, 4, ne_a.data()); + ggml_tensor * b = ggml_new_tensor(ctx, type, 4, ne_b.data()); + ggml_tensor * out = ggml_acc(ctx, a, b, a->nb[1], a->nb[2], a->nb[3], b->nb[1]); + return out; + } +}; + +// GGML_OP_PAD +struct test_pad : public test_case { + const ggml_type type; + const std::array ne_a; + const int pad_0; + const int pad_1; + + std::string vars() override { + return VARS_TO_STR4(type, ne_a, pad_0, pad_1); + } + + test_pad(ggml_type type = GGML_TYPE_F32, + std::array ne_a = {512, 512, 1, 1}, + int pad_0 = 1, int pad_1 = 1) + : type(type), ne_a(ne_a), pad_0(pad_0), pad_1(pad_1) {} + + ggml_tensor * build_graph(ggml_context * ctx) override { + ggml_tensor * a = ggml_new_tensor(ctx, type, 4, ne_a.data()); + ggml_tensor * out = ggml_pad(ctx, a, pad_0, pad_1, 0, 0); + return out; + } +}; + +// GGML_OP_LEAKY_RELU +struct test_leaky_relu : public test_case { + const ggml_type type; + const std::array ne_a; + const float negative_slope; + + std::string vars() override { + return VARS_TO_STR3(type, ne_a, negative_slope); + } + + test_leaky_relu(ggml_type type = GGML_TYPE_F32, + std::array ne_a = {10, 10, 10, 10}, + float negative_slope = 0.1f) + : type(type), ne_a(ne_a), negative_slope(negative_slope) {} + + ggml_tensor * build_graph(ggml_context * ctx) override { + ggml_tensor * a = ggml_new_tensor(ctx, type, 4, ne_a.data()); + ggml_tensor * out = ggml_leaky_relu(ctx, a, negative_slope, true); + return out; + } +}; + // Mixtral MOE struct test_moe : public test_case { const int n_experts; @@ -1219,11 +1417,6 @@ struct test_moe : public test_case { } }; -enum test_mode { - MODE_TEST, - MODE_PERF, -}; - static bool test_backend(ggml_backend_t backend, test_mode mode, const char * op_name) { std::vector> test_cases; @@ -1372,12 +1565,16 @@ static bool test_backend(ggml_backend_t backend, test_mode mode, const char * op test_cases.emplace_back(new test_argsort(GGML_TYPE_F32, {16, 10, 10, 10}, order)); } - test_cases.emplace_back(new test_sum_rows(GGML_TYPE_F32, {10, 10, 10, 10})); - test_cases.emplace_back(new test_sum_rows(GGML_TYPE_F32, {2, 1, 1, 1})); + test_cases.emplace_back(new test_sum_rows()); + test_cases.emplace_back(new test_upscale()); + test_cases.emplace_back(new test_group_norm()); + test_cases.emplace_back(new test_acc()); + test_cases.emplace_back(new test_pad()); + test_cases.emplace_back(new test_leaky_relu()); #if !defined(__SANITIZE_THREAD__) // FIXME: these tests use too much memory with thread sanitizer - test_cases.emplace_back(new test_moe(8, 2, 1, 4096, 14336)); + test_cases.emplace_back(new test_moe(8, 2, 1, 4096, 8*1024)); //test_cases.emplace_back(new test_moe(8, 2, 8, 4096, 14336)); #endif From 948ff137ec37f1ec74c02905917fa0afc9b97514 Mon Sep 17 00:00:00 2001 From: shibe2 Date: Wed, 13 Dec 2023 23:57:15 +0400 Subject: [PATCH 079/811] server : fix handling of characters that span multiple tokens when streaming (#4446) --- examples/server/server.cpp | 39 +++++++++++++++++++------------------- 1 file changed, 19 insertions(+), 20 deletions(-) diff --git a/examples/server/server.cpp b/examples/server/server.cpp index d0cd8e1cd..39d1e83d1 100644 --- a/examples/server/server.cpp +++ b/examples/server/server.cpp @@ -376,7 +376,6 @@ struct llama_client_slot int32_t num_prompt_tokens = 0; int32_t num_prompt_tokens_processed = 0; - int32_t multibyte_pending = 0; json prompt; std::string generated_text; @@ -425,7 +424,6 @@ struct llama_client_slot stopped_word = false; stopped_limit = false; stopping_word = ""; - multibyte_pending = 0; n_past = 0; sent_count = 0; sent_token_probs_index = 0; @@ -992,35 +990,36 @@ struct llama_server_context slot.generated_text += token_str; slot.has_next_token = true; - if (slot.multibyte_pending > 0) + // check if there is incomplete UTF-8 character at the end + bool incomplete = false; + for (unsigned i = 1; i < 5 && i <= slot.generated_text.size(); ++i) { - slot.multibyte_pending -= token_str.size(); - } - else if (token_str.size() == 1) - { - const char c = token_str[0]; - // 2-byte characters: 110xxxxx 10xxxxxx + unsigned char c = slot.generated_text[slot.generated_text.size() - i]; + if ((c & 0xC0) == 0x80) + { + // continuation byte: 10xxxxxx + continue; + } if ((c & 0xE0) == 0xC0) { - slot.multibyte_pending = 1; - // 3-byte characters: 1110xxxx 10xxxxxx 10xxxxxx + // 2-byte character: 110xxxxx ... + incomplete = i < 2; } else if ((c & 0xF0) == 0xE0) { - slot.multibyte_pending = 2; - // 4-byte characters: 11110xxx 10xxxxxx 10xxxxxx 10xxxxxx + // 3-byte character: 1110xxxx ... + incomplete = i < 3; } else if ((c & 0xF8) == 0xF0) { - slot.multibyte_pending = 3; - } - else - { - slot.multibyte_pending = 0; + // 4-byte character: 11110xxx ... + incomplete = i < 4; } + // else 1-byte character or invalid byte + break; } - if (slot.multibyte_pending == 0) + if (!incomplete) { size_t pos = std::min(slot.sent_count, slot.generated_text.size()); const std::string str_test = slot.generated_text.substr(pos); @@ -1055,7 +1054,7 @@ struct llama_server_context } } - if (slot.multibyte_pending > 0 && !slot.has_next_token) + if (incomplete) { slot.has_next_token = true; } From 0353a1840134b24b07ab61fd4490192f28c4db6b Mon Sep 17 00:00:00 2001 From: BarfingLemurs <128182951+BarfingLemurs@users.noreply.github.com> Date: Thu, 14 Dec 2023 02:38:49 -0500 Subject: [PATCH 080/811] readme : update supported model list (#4457) --- README.md | 11 +++++++++++ 1 file changed, 11 insertions(+) diff --git a/README.md b/README.md index 014a37c85..edbe6ba57 100644 --- a/README.md +++ b/README.md @@ -97,7 +97,18 @@ as the main playground for developing new features for the [ggml](https://github - [X] [Persimmon 8B](https://github.com/ggerganov/llama.cpp/pull/3410) - [X] [MPT](https://github.com/ggerganov/llama.cpp/pull/3417) - [X] [Bloom](https://github.com/ggerganov/llama.cpp/pull/3553) +- [x] [Yi models](https://huggingface.co/models?search=01-ai/Yi) - [X] [StableLM-3b-4e1t](https://github.com/ggerganov/llama.cpp/pull/3586) +- [x] [Deepseek models](https://huggingface.co/models?search=deepseek-ai/deepseek) +- [x] [Qwen models](https://huggingface.co/models?search=Qwen/Qwen) +- [x] [Mixtral MoE](https://huggingface.co/models?search=mistral-ai/Mixtral) + +**Multimodal models:** + +- [x] [Llava 1.5 models](https://huggingface.co/collections/liuhaotian/llava-15-653aac15d994e992e2677a7e) +- [x] [Bakllava](https://huggingface.co/models?search=SkunkworksAI/Bakllava) +- [x] [Obsidian](https://huggingface.co/NousResearch/Obsidian-3B-V0.5) +- [x] [ShareGPT4V](https://huggingface.co/models?search=Lin-Chen/ShareGPT4V) **Bindings:** From 873637afc7924f435ac44c067630a28e82eefa7b Mon Sep 17 00:00:00 2001 From: wonjun Jang Date: Thu, 14 Dec 2023 17:09:34 +0900 Subject: [PATCH 081/811] convert : support loading vocab from fast tokenizer config (#3633) * Add HFVocab into convert.py * Update convert.py * Update convert.py * add bytes_to_unicode function * change add_meta_vocab fucntion * remove debug code * remove byte_encoder * Add newline between classes * Check tokenizer.json when tokenizer.model is not exist. * Move transformers dependency to local code * Add error context with 'raise from' * Add fast tokenizer option to BpeVocab * Update convert.py * Add VocabLoader and remove *Vocab class * Add transformers dependency * remove added tokens and check newline token to decide spm or bpe * Update convert.py * Add special token type * Update convert.py * Update convert.py * Update convert.py * Fix typo in convert.py * Fix when params.n_vocab < tokenizer vocab size * update vocab class * change funtion name * Remove unused variable/functions, add types to class variable and methods, delete blank liens * fix flake8 warnings * code style cleanup * make mypy happy * change exception --------- Co-authored-by: Jared Van Bortel --- convert.py | 323 ++++++++++++++++++++++++----------------------- requirements.txt | 1 + 2 files changed, 168 insertions(+), 156 deletions(-) diff --git a/convert.py b/convert.py index e4b69d172..7a3cd615e 100755 --- a/convert.py +++ b/convert.py @@ -10,6 +10,7 @@ import itertools import json import math import mmap +import os import pickle import re import signal @@ -18,15 +19,15 @@ import sys import time import zipfile from abc import ABCMeta, abstractmethod +from collections import OrderedDict from concurrent.futures import ProcessPoolExecutor, ThreadPoolExecutor from dataclasses import dataclass from pathlib import Path -from typing import IO, TYPE_CHECKING, Any, Callable, Iterable, Literal, TypeVar +from typing import IO, TYPE_CHECKING, Any, Callable, Iterable, Literal, Optional, TypeVar, cast import numpy as np from sentencepiece import SentencePieceProcessor -import os if 'NO_LOCAL_GGUF' not in os.environ: sys.path.insert(1, str(Path(__file__).parent / 'gguf-py')) import gguf @@ -327,127 +328,138 @@ class Params: return params -# -# vocab -# +class VocabLoader: + def __init__(self, params: Params, fname_tokenizer: Path) -> None: + try: + from transformers import AutoTokenizer + except ImportError as e: + raise ImportError( + "To use VocabLoader, please install the `transformers` package. " + "You can install it with `pip install transformers`." + ) from e -class BpeVocab: - def __init__(self, fname_tokenizer: Path, fname_added_tokens: Path | None) -> None: - self.bpe_tokenizer = json.loads(open(str(fname_tokenizer), encoding="utf-8").read()) - added_tokens: dict[str, int] - if fname_added_tokens is not None: - # FIXME: Verify that added tokens here _cannot_ overlap with the main vocab. - added_tokens = json.load(open(fname_added_tokens, encoding="utf-8")) + try: + self.tokenizer = AutoTokenizer.from_pretrained(str(fname_tokenizer), trust_remote_code=True) + except ValueError: + self.tokenizer = AutoTokenizer.from_pretrained(str(fname_tokenizer), use_fast=False, trust_remote_code=True) + + self.added_tokens_dict: OrderedDict[str, int] = OrderedDict() + + for tok, tokidx in sorted(self.tokenizer.get_added_vocab().items(), key=lambda x: x[1]): + if tokidx >= params.n_vocab or tokidx < self.tokenizer.vocab_size: + continue + + self.added_tokens_dict[tok] = tokidx + + self.unk_token_id: int = self.tokenizer.unk_token_id + self.specials: dict[str, int] = { + tok: self.tokenizer.get_vocab()[tok] + for tok in self.tokenizer.all_special_tokens + } + self.special_ids: set[int] = set(self.tokenizer.all_special_ids) + self.vocab_size_base: int = self.tokenizer.vocab_size + self.vocab_size: int = self.vocab_size_base + len(self.added_tokens_dict) + self.fname_tokenizer: Path = fname_tokenizer + + vocab_file = "tokenizer.model" + path_candidate = find_vocab_file_path(self.fname_tokenizer, vocab_file) + if path_candidate is not None: + self.spm = SentencePieceProcessor(str(path_candidate)) + print(self.spm.vocab_size(), self.vocab_size_base) else: - # Fall back to trying to find the added tokens in tokenizer.json - tokenizer_json_file = fname_tokenizer.parent / 'tokenizer.json' - if not tokenizer_json_file.is_file(): - added_tokens = {} - else: - tokenizer_json = json.load(open(tokenizer_json_file, encoding="utf-8")) - added_tokens = dict( - (item['content'], item['id']) - for item in tokenizer_json.get('added_tokens', []) - # Added tokens here can be duplicates of the main vocabulary. - if item['content'] not in self.bpe_tokenizer) + self.spm = None - vocab_size: int = len(self.bpe_tokenizer) - expected_ids = list(range(vocab_size, vocab_size + len(added_tokens))) - actual_ids = sorted(added_tokens.values()) - if expected_ids != actual_ids: - expected_end_id = vocab_size + len(actual_ids) - 1 - raise Exception(f"Expected the {len(actual_ids)} added token ID(s) to be sequential in the range {vocab_size} - {expected_end_id}; got {actual_ids}") + def hf_tokens(self) -> Iterable[tuple[bytes, float, gguf.TokenType]]: + tokenizer = self.tokenizer + reverse_vocab = {id: encoded_tok for encoded_tok, id in tokenizer.get_vocab().items()} + added_tokens_ids = set(self.added_tokens_dict.values()) - items = sorted(added_tokens.items(), key=lambda text_idx: text_idx[1]) - self.added_tokens_list = [text for (text, idx) in items] - self.vocab_size_base: int = vocab_size - self.vocab_size: int = self.vocab_size_base + len(self.added_tokens_list) - self.fname_tokenizer = fname_tokenizer - self.fname_added_tokens = fname_added_tokens + for i in range(self.vocab_size_base): + if i in added_tokens_ids: + continue - def bpe_tokens(self) -> Iterable[tuple[bytes, float, gguf.TokenType]]: - tokenizer = self.bpe_tokenizer - reverse_vocab = {id: encoded_tok for encoded_tok, id in tokenizer.items()} + text = reverse_vocab[i].encode("utf-8") + yield text, self.get_token_score(i), self.get_token_type(i) - for i, _ in enumerate(tokenizer): - yield reverse_vocab[i], 0.0, gguf.TokenType.NORMAL + def get_token_type(self, token_id: int) -> gguf.TokenType: + toktype = gguf.TokenType.NORMAL - def added_tokens(self) -> Iterable[tuple[bytes, float, gguf.TokenType]]: - for text in self.added_tokens_list: - score = -1000.0 - yield text.encode("utf-8"), score, gguf.TokenType.CONTROL - - def all_tokens(self) -> Iterable[tuple[bytes, float, gguf.TokenType]]: - yield from self.bpe_tokens() - yield from self.added_tokens() - - def __repr__(self) -> str: - return f"" - - -class SentencePieceVocab: - def __init__(self, fname_tokenizer: Path, fname_added_tokens: Path | None) -> None: - self.sentencepiece_tokenizer = SentencePieceProcessor(str(fname_tokenizer)) - added_tokens: dict[str, int] - if fname_added_tokens is not None: - added_tokens = json.load(open(fname_added_tokens, encoding="utf-8")) - else: - added_tokens = {} - - vocab_size: int = self.sentencepiece_tokenizer.vocab_size() - - new_tokens = {id: piece for piece, id in added_tokens.items() if id >= vocab_size} - expected_new_ids = list(range(vocab_size, vocab_size + len(new_tokens))) - actual_new_ids = sorted(new_tokens.keys()) - - if expected_new_ids != actual_new_ids: - raise ValueError(f"Expected new token IDs {expected_new_ids} to be sequential; got {actual_new_ids}") - - # Token pieces that were added to the base vocabulary. - self.added_tokens_list = [new_tokens[id] for id in actual_new_ids] - self.vocab_size_base = vocab_size - self.vocab_size = self.vocab_size_base + len(self.added_tokens_list) - self.fname_tokenizer = fname_tokenizer - self.fname_added_tokens = fname_added_tokens - - def sentencepiece_tokens(self) -> Iterable[tuple[bytes, float, gguf.TokenType]]: - tokenizer = self.sentencepiece_tokenizer - for i in range(tokenizer.vocab_size()): - piece = tokenizer.id_to_piece(i) - text: bytes = piece.encode("utf-8") - score: float = tokenizer.get_score(i) - - toktype = gguf.TokenType.NORMAL - if tokenizer.is_unknown(i): + if self.spm is not None and token_id < self.spm.vocab_size(): + if self.spm.is_unknown(token_id): toktype = gguf.TokenType.UNKNOWN - if tokenizer.is_control(i): + if self.spm.is_control(token_id): + toktype = gguf.TokenType.CONTROL + if self.spm.is_unused(token_id): + toktype = gguf.TokenType.UNUSED + if self.spm.is_byte(token_id): + toktype = gguf.TokenType.BYTE + else: + if token_id == self.unk_token_id: + toktype = gguf.TokenType.UNKNOWN + if token_id in self.special_ids: toktype = gguf.TokenType.CONTROL - # NOTE: I think added_tokens are user defined. - # ref: https://github.com/google/sentencepiece/blob/master/src/sentencepiece_model.proto - # if tokenizer.is_user_defined(i): toktype = gguf.TokenType.USER_DEFINED + return toktype - if tokenizer.is_unused(i): - toktype = gguf.TokenType.UNUSED - if tokenizer.is_byte(i): - toktype = gguf.TokenType.BYTE - - yield text, score, toktype + def get_token_score(self, token_id: int) -> float: + if self.spm is not None and token_id < self.spm.vocab_size(): + return cast(float, self.spm.get_score(token_id)) + return 0.0 def added_tokens(self) -> Iterable[tuple[bytes, float, gguf.TokenType]]: - for text in self.added_tokens_list: - score = -1000.0 - yield text.encode("utf-8"), score, gguf.TokenType.USER_DEFINED + + for text in self.added_tokens_dict: + if text in self.specials: + + toktype = self.get_token_type(self.specials[text]) + score = self.get_token_score(self.specials[text]) + + else: + toktype = gguf.TokenType.USER_DEFINED + score = -1000.0 + + yield text.encode("utf-8"), score, toktype + + def has_newline_token(self) -> bool: + return '<0x0A>' in self.tokenizer.vocab or '\n' in self.tokenizer.vocab def all_tokens(self) -> Iterable[tuple[bytes, float, gguf.TokenType]]: - yield from self.sentencepiece_tokens() + yield from self.hf_tokens() yield from self.added_tokens() + def get_vocab_type(self) -> str: + path_candidates = [] + vocab_file = "tokenizer.model" + path_candidates.append(vocab_file) + path_candidate = find_vocab_file_path(self.fname_tokenizer, vocab_file) + if path_candidate is not None: + return "llama" + + vocab_file = "vocab.json" + path_candidates.append(vocab_file) + path_candidate = find_vocab_file_path(self.fname_tokenizer, vocab_file) + if path_candidate is not None: + return "gpt2" + + vocab_file = "tokenizer.json" + path_candidates.append(vocab_file) + path_candidate = find_vocab_file_path(self.fname_tokenizer, vocab_file) + if path_candidate: + if not self.has_newline_token(): + return "gpt2" + return "llama" + + raise FileNotFoundError( + f"Could not find {path_candidates} in {self.fname_tokenizer} or its parent; " + "if it's in another directory, pass the directory as --vocab-dir" + ) + def __repr__(self) -> str: - return f"" + return f"" -Vocab: TypeAlias = 'BpeVocab | SentencePieceVocab' +Vocab: TypeAlias = 'VocabLoader' + # # data loading @@ -824,20 +836,27 @@ def bounded_parallel_map(func: Callable[[In], Out], iterable: Iterable[In], conc yield result -def check_vocab_size(params: Params, vocab: Vocab) -> None: +def check_vocab_size(params: Params, vocab: Vocab, pad_vocab: bool = False) -> None: if params.n_vocab != vocab.vocab_size: - assert isinstance(vocab, BpeVocab) or isinstance(vocab, SentencePieceVocab) - if params.n_vocab == vocab.vocab_size_base: + if params.n_vocab == vocab.vocab_size: print("Ignoring added_tokens.json since model matches vocab size without it.") - vocab.added_tokens_list = [] - vocab.vocab_size = vocab.vocab_size_base + vocab.added_tokens_dict = OrderedDict() + vocab.vocab_size = vocab.vocab_size + return + + if pad_vocab and params.n_vocab > vocab.vocab_size: + pad_count = params.n_vocab - vocab.vocab_size + print(f'Padding vocab with {pad_count} token(s) - through ') + for i in range(1, (params.n_vocab - vocab.vocab_size) + 1): + vocab.added_tokens_dict[f''] = -1 + vocab.vocab_size = params.n_vocab return msg = f"Vocab size mismatch (model has {params.n_vocab}, but {vocab.fname_tokenizer}" - if vocab.fname_added_tokens is not None: - msg += f" combined with {vocab.fname_added_tokens}" msg += f" has {vocab.vocab_size})." - if vocab.vocab_size < params.n_vocab < vocab.vocab_size + 20 and vocab.fname_added_tokens is None: + if vocab.vocab_size < params.n_vocab < vocab.vocab_size + 20: msg += f" Most likely you are missing added_tokens.json (should be in {vocab.fname_tokenizer.parent})." + if vocab.vocab_size < params.n_vocab: + msg += " Possibly try using the --padvocab option." raise Exception(msg) @@ -901,12 +920,8 @@ class OutputFile: scores.append(score) toktypes.append(toktype) - if isinstance(vocab, SentencePieceVocab): - self.gguf.add_tokenizer_model("llama") - elif isinstance(vocab, BpeVocab): - self.gguf.add_tokenizer_model("gpt2") - else: - raise ValueError('Unknown vocab type: Not BpeVocab or SentencePieceVocab') + vocab_type = vocab.get_vocab_type() + self.gguf.add_tokenizer_model(vocab_type) self.gguf.add_token_list(tokens) self.gguf.add_token_scores(scores) self.gguf.add_token_types(toktypes) @@ -932,8 +947,12 @@ class OutputFile: self.gguf.close() @staticmethod - def write_vocab_only(fname_out: Path, params: Params, vocab: Vocab, svocab: gguf.SpecialVocab, endianess:gguf.GGUFEndian = gguf.GGUFEndian.LITTLE) -> None: - check_vocab_size(params, vocab) + def write_vocab_only( + fname_out: Path, params: Params, vocab: Vocab, svocab: gguf.SpecialVocab, + endianess: gguf.GGUFEndian = gguf.GGUFEndian.LITTLE, + pad_vocab: bool = False, + ) -> None: + check_vocab_size(params, vocab, pad_vocab = pad_vocab) of = OutputFile(fname_out, endianess=endianess) @@ -960,8 +979,13 @@ class OutputFile: return dt.quantize(arr) @staticmethod - def write_all(fname_out: Path, ftype: GGMLFileType, params: Params, model: LazyModel, vocab: Vocab, svocab: gguf.SpecialVocab, concurrency: int = DEFAULT_CONCURRENCY, endianess: gguf.GGUFEndian = gguf.GGUFEndian.LITTLE) -> None: - check_vocab_size(params, vocab) + def write_all( + fname_out: Path, ftype: GGMLFileType, params: Params, model: LazyModel, vocab: Vocab, svocab: gguf.SpecialVocab, + concurrency: int = DEFAULT_CONCURRENCY, + endianess: gguf.GGUFEndian = gguf.GGUFEndian.LITTLE, + pad_vocab: bool = False, + ) -> None: + check_vocab_size(params, vocab, pad_vocab = pad_vocab) of = OutputFile(fname_out, endianess=endianess) @@ -1119,35 +1143,17 @@ def load_some_model(path: Path) -> ModelPlus: return model_plus -def load_vocab(path: Path, vocabtype: str | None) -> Vocab: - # Be extra-friendly and accept either a file or a directory. Also, if it's - # a directory, it might be the model directory, and tokenizer.model might - # be in the parent of that. - if path.is_dir(): - vocab_file = "tokenizer.model" - if vocabtype == 'bpe': - vocab_file = "vocab.json" - path2 = path / vocab_file - # Use `.parent` instead of /.. to handle the symlink case better. - path3 = path.parent / vocab_file - if path2.exists(): - path = path2 - elif path3.exists(): - path = path3 - else: - raise FileNotFoundError( - f"Could not find {vocab_file} in {path} or its parent; " - "if it's in another directory, pass the directory as --vocab-dir") +def find_vocab_file_path(path: Path, vocab_file: str) -> Optional[Path]: + path2 = path / vocab_file + # Use `.parent` instead of /.. to handle the symlink case better. + path3 = path.parent / vocab_file - print(f"Loading vocab file '{path}', type '{vocabtype}'") + if path2.exists(): + return path2 + if path3.exists(): + return path3 - added_tokens_path = path.parent / "added_tokens.json" - if vocabtype == "bpe": - return BpeVocab(path, added_tokens_path if added_tokens_path.exists() else None) - elif vocabtype == "spm": - return SentencePieceVocab(path, added_tokens_path if added_tokens_path.exists() else None) - else: - raise ValueError(f"Unsupported vocabulary type {vocabtype}") + return None def default_outfile(model_paths: list[Path], file_type: GGMLFileType) -> Path: @@ -1185,11 +1191,11 @@ def main(args_in: list[str] | None = None) -> None: parser.add_argument("--outtype", choices=output_choices, help="output format - note: q8_0 may be very slow (default: f16 or f32 based on input)") parser.add_argument("--vocab-dir", type=Path, help="directory containing tokenizer.model, if separate from model file") parser.add_argument("--outfile", type=Path, help="path to write to; default: based on input") - parser.add_argument("model", type=Path, help="directory containing model file, or model file itself (*.pth, *.pt, *.bin, *.safetensors)") - parser.add_argument("--vocabtype", choices=["spm", "bpe"], help="vocab format (default: spm)", default="spm") + parser.add_argument("model", type=Path, help="directory containing model file, or model file itself (*.pth, *.pt, *.bin)") parser.add_argument("--ctx", type=int, help="model training context (default: based on input)") parser.add_argument("--concurrency", type=int, help=f"concurrency used for conversion (default: {DEFAULT_CONCURRENCY})", default = DEFAULT_CONCURRENCY) parser.add_argument("--bigendian", action="store_true", help="model is executed on big endian machine") + parser.add_argument("--padvocab", action="store_true", help="add pad tokens when model vocab expects more than tokenizer metadata provides") args = parser.parse_args(args_in) if args.dump_single: @@ -1232,12 +1238,13 @@ def main(args_in: list[str] | None = None) -> None: if not args.outfile: raise ValueError("need --outfile if using --vocab-only") # FIXME: Try to respect vocab_dir somehow? - vocab = load_vocab(args.vocab_dir or args.model, args.vocabtype) + vocab = VocabLoader(params, args.vocab_dir or args.model) special_vocab = gguf.SpecialVocab(model_plus.paths[0].parent, - load_merges = args.vocabtype == 'bpe', + load_merges = True, n_vocab = vocab.vocab_size) outfile = args.outfile - OutputFile.write_vocab_only(outfile, params, vocab, special_vocab) + OutputFile.write_vocab_only(outfile, params, vocab, special_vocab, + endianess = endianess, pad_vocab = args.padvocab) print(f"Wrote {outfile}") return @@ -1245,12 +1252,15 @@ def main(args_in: list[str] | None = None) -> None: vocab = model_plus.vocab else: vocab_dir = args.vocab_dir if args.vocab_dir else model_plus.paths[0].parent - vocab = load_vocab(vocab_dir, args.vocabtype) + vocab = VocabLoader(params, vocab_dir) + # FIXME: Try to respect vocab_dir somehow? + print(f"Vocab info: {vocab}") special_vocab = gguf.SpecialVocab(model_plus.paths[0].parent, - load_merges = args.vocabtype == 'bpe', + load_merges = True, n_vocab = vocab.vocab_size) + print(f"Special vocab info: {special_vocab}") model = model_plus.model model = convert_model_names(model, params) ftype = pick_output_type(model, args.outtype) @@ -1260,7 +1270,8 @@ def main(args_in: list[str] | None = None) -> None: params.ftype = ftype print(f"Writing {outfile}, format {ftype}") - OutputFile.write_all(outfile, ftype, params, model, vocab, special_vocab, concurrency = args.concurrency, endianess=endianess) + OutputFile.write_all(outfile, ftype, params, model, vocab, special_vocab, + concurrency = args.concurrency, endianess = endianess, pad_vocab = args.padvocab) print(f"Wrote {outfile}") diff --git a/requirements.txt b/requirements.txt index 81c909d0b..badfec3be 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,3 +1,4 @@ numpy==1.24.4 sentencepiece==0.1.98 +transformers>=4.34.0 gguf>=0.1.0 From 55e87c3749cb4985c3b316984d40e00e4df4a5d0 Mon Sep 17 00:00:00 2001 From: Georgi Gerganov Date: Thu, 14 Dec 2023 10:35:29 +0200 Subject: [PATCH 082/811] ggml : fix OpenCL broadcast requirement for ggml_mul (close #4453) --- ggml.c | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/ggml.c b/ggml.c index 29e18a24c..7e1272817 100644 --- a/ggml.c +++ b/ggml.c @@ -7759,10 +7759,10 @@ static void ggml_compute_forward_mul_f32( const int ith = params->ith; const int nth = params->nth; -// TODO: OpenCL kernel support broadcast #ifdef GGML_USE_CLBLAST if (src1->backend == GGML_BACKEND_GPU) { - GGML_ASSERT(ggml_are_same_shape(src0, src1)); + // TODO: OpenCL kernel support full broadcast + GGML_ASSERT(ggml_can_repeat_rows(src1, src0)); if (ith == 0) { ggml_cl_mul(src0, src1, dst); } From 20a68a7030ee06e8eb7eb8e24ae4ac52dc17803f Mon Sep 17 00:00:00 2001 From: LostRuins <39025047+LostRuins@users.noreply.github.com> Date: Thu, 14 Dec 2023 20:13:33 +0800 Subject: [PATCH 083/811] ggml : add ggml_row_size() (fixes llama out of space) (#4461) * Fixes "Not enough space in the context's memory pool" encountered on certain models, which seems to be caused by some imprecision related to the automatic casting of floating point values * do not cast to size_t, instead just use doubles * ggml : add ggml_row_size(), deprecate ggml_type_sizef() * ggml : fix row size compute to avoid overflows * tests : fix sizey -> sizez --------- Co-authored-by: Georgi Gerganov --- examples/benchmark/benchmark-matmult.cpp | 14 +++++++------- ggml.c | 9 +++++++-- ggml.h | 10 +++++++--- llama.cpp | 12 ++++++------ 4 files changed, 27 insertions(+), 18 deletions(-) diff --git a/examples/benchmark/benchmark-matmult.cpp b/examples/benchmark/benchmark-matmult.cpp index 284733b10..434e1d6bd 100644 --- a/examples/benchmark/benchmark-matmult.cpp +++ b/examples/benchmark/benchmark-matmult.cpp @@ -129,13 +129,13 @@ int main(int argc, char ** argv) { const ggml_type qtype = GGML_TYPE_Q4_1; size_t ctx_size = 0; - ctx_size += sizex*sizey*ggml_type_sizef(GGML_TYPE_F32); - ctx_size += sizex*sizey*ggml_type_sizef(GGML_TYPE_F32); - ctx_size += sizex*sizez*ggml_type_sizef(GGML_TYPE_F32); - ctx_size += sizex*sizey*ggml_type_sizef(qtype); - ctx_size += sizex*sizey*ggml_type_sizef(qtype); - ctx_size += sizex*sizey*ggml_type_sizef(GGML_TYPE_F32); // BLAS - ctx_size += sizex*sizey*ggml_type_sizef(GGML_TYPE_F32); // BLAS + ctx_size += ggml_row_size(GGML_TYPE_F32, sizex*sizey); + ctx_size += ggml_row_size(GGML_TYPE_F32, sizex*sizey); + ctx_size += ggml_row_size(GGML_TYPE_F32, sizex*sizez); + ctx_size += ggml_row_size(qtype, sizex*sizey); + ctx_size += ggml_row_size(qtype, sizex*sizey); + ctx_size += ggml_row_size(GGML_TYPE_F32, sizex*sizey); // BLAS + ctx_size += ggml_row_size(GGML_TYPE_F32, sizex*sizey); // BLAS ctx_size += 1024*1024*16; printf("Allocating Memory of size %zi bytes, %zi MB\n",ctx_size, (ctx_size/1024/1024)); diff --git a/ggml.c b/ggml.c index 7e1272817..f0a972690 100644 --- a/ggml.c +++ b/ggml.c @@ -2011,8 +2011,13 @@ size_t ggml_type_size(enum ggml_type type) { return type_traits[type].type_size; } -float ggml_type_sizef(enum ggml_type type) { - return ((float)(type_traits[type].type_size))/type_traits[type].blck_size; +size_t ggml_row_size(enum ggml_type type, int64_t ne) { + assert(ne % ggml_blck_size(type) == 0); + return ggml_type_size(type)*ne/ggml_blck_size(type); +} + +double ggml_type_sizef(enum ggml_type type) { + return ((double)(type_traits[type].type_size))/type_traits[type].blck_size; } const char * ggml_type_name(enum ggml_type type) { diff --git a/ggml.h b/ggml.h index 1447646b1..ae8101fab 100644 --- a/ggml.h +++ b/ggml.h @@ -641,9 +641,13 @@ extern "C" { GGML_API size_t ggml_nbytes_pad (const struct ggml_tensor * tensor); // same as ggml_nbytes() but padded to GGML_MEM_ALIGN GGML_API size_t ggml_nbytes_split(const struct ggml_tensor * tensor, int nrows_split); - GGML_API int ggml_blck_size (enum ggml_type type); - GGML_API size_t ggml_type_size (enum ggml_type type); // size in bytes for all elements in a block - GGML_API float ggml_type_sizef(enum ggml_type type); // ggml_type_size()/ggml_blck_size() as float + GGML_API int ggml_blck_size(enum ggml_type type); + GGML_API size_t ggml_type_size(enum ggml_type type); // size in bytes for all elements in a block + GGML_API size_t ggml_row_size (enum ggml_type type, int64_t ne); // size in bytes for all elements in a row + + GGML_DEPRECATED( + GGML_API double ggml_type_sizef(enum ggml_type type), // ggml_type_size()/ggml_blck_size() as float + "use ggml_row_size() instead"); GGML_API const char * ggml_type_name(enum ggml_type type); GGML_API const char * ggml_op_name (enum ggml_op op); diff --git a/llama.cpp b/llama.cpp index 0e5ab044c..456807d9d 100644 --- a/llama.cpp +++ b/llama.cpp @@ -1555,7 +1555,7 @@ static bool llama_kv_cache_init( cache.cells.clear(); cache.cells.resize(n_ctx); - cache.buf.resize(n_elements*(ggml_type_sizef(ktype) + ggml_type_sizef(vtype)) + 2u*n_layer*ggml_tensor_overhead()); + cache.buf.resize(ggml_row_size(ktype, n_elements) + ggml_row_size(vtype, n_elements) + 2u*n_layer*ggml_tensor_overhead()); memset(cache.buf.data, 0, cache.buf.size); struct ggml_init_params params; @@ -3822,8 +3822,8 @@ static void llm_build_k_shift( ggml_rope_custom_inplace(ctx, ggml_view_3d(ctx, kv.k_l[il], n_embd_head, n_head_kv, n_ctx, - ggml_type_sizef(kv.k_l[il]->type)*n_embd_head, - ggml_type_sizef(kv.k_l[il]->type)*n_embd_gqa, + ggml_row_size(kv.k_l[il]->type, n_embd_head), + ggml_row_size(kv.k_l[il]->type, n_embd_gqa), 0), K_shift, n_rot, rope_type, 0, n_orig_ctx, freq_base, freq_scale, ext_factor, attn_factor, beta_fast, beta_slow); @@ -3852,7 +3852,7 @@ static void llm_build_kv_store( cb(v_cur_t, "v_cur_t", il); struct ggml_tensor * k_cache_view = ggml_view_1d(ctx, kv.k_l[il], n_tokens*n_embd_gqa, - (ggml_type_sizef(kv.k_l[il]->type)*n_embd_gqa)*kv_head); + (ggml_row_size(kv.k_l[il]->type, n_embd_gqa))*kv_head); cb(k_cache_view, "k_cache_view", il); struct ggml_tensor * v_cache_view = ggml_view_2d(ctx, kv.v_l[il], n_tokens, n_embd_gqa, @@ -4011,8 +4011,8 @@ static struct ggml_tensor * llm_build_kqv( struct ggml_tensor * k = ggml_view_3d(ctx, kv.k_l[il], n_embd_head, n_kv, n_head_kv, - ggml_type_sizef(kv.k_l[il]->type)*n_embd_gqa, - ggml_type_sizef(kv.k_l[il]->type)*n_embd_head, + ggml_row_size(kv.k_l[il]->type, n_embd_gqa), + ggml_row_size(kv.k_l[il]->type, n_embd_head), 0); cb(k, "k", il); From c50e40016394f124b97ce39da48148b1f6c01833 Mon Sep 17 00:00:00 2001 From: wonjun Jang Date: Thu, 14 Dec 2023 21:44:49 +0900 Subject: [PATCH 084/811] py : add protobuf dependency (#4466) --- requirements.txt | 1 + 1 file changed, 1 insertion(+) diff --git a/requirements.txt b/requirements.txt index badfec3be..1a1162566 100644 --- a/requirements.txt +++ b/requirements.txt @@ -2,3 +2,4 @@ numpy==1.24.4 sentencepiece==0.1.98 transformers>=4.34.0 gguf>=0.1.0 +protobuf>=4.21.0 From cafcd4f89500b8afef722cdb08088eceb8a22572 Mon Sep 17 00:00:00 2001 From: slaren Date: Thu, 14 Dec 2023 16:52:08 +0100 Subject: [PATCH 085/811] ggml : remove n_dims from ggml_tensor (#4469) ggml-ci --- common/train.cpp | 18 ++-- examples/baby-llama/baby-llama.cpp | 18 ++-- .../convert-llama2c-to-ggml.cpp | 4 +- examples/finetune/finetune.cpp | 2 +- examples/gguf/gguf.cpp | 2 +- examples/llava/clip.cpp | 6 +- ggml.c | 94 ++++++++++--------- ggml.h | 8 +- llama.cpp | 2 +- 9 files changed, 81 insertions(+), 73 deletions(-) diff --git a/common/train.cpp b/common/train.cpp index 773e2c59c..dcf9614e4 100644 --- a/common/train.cpp +++ b/common/train.cpp @@ -71,7 +71,7 @@ void free_random_uniform_distribution(struct random_uniform_distribution * rnd) struct ggml_tensor * randomize_tensor_normal(struct ggml_tensor * tensor, struct random_normal_distribution * rnd) { float scale = 1.0f; // xavier - switch (tensor->n_dims) { + switch (ggml_n_dims(tensor)) { case 1: scale /= sqrtf((float) tensor->ne[0]); for (int i0 = 0; i0 < tensor->ne[0]; i0++) { @@ -119,7 +119,7 @@ struct ggml_tensor * randomize_tensor_normal(struct ggml_tensor * tensor, struct } struct ggml_tensor * randomize_tensor_uniform(struct ggml_tensor * tensor, struct random_uniform_distribution * rnd) { - switch (tensor->n_dims) { + switch (ggml_n_dims(tensor)) { case 1: for (int i0 = 0; i0 < tensor->ne[0]; i0++) { float * dst = (float *) ((char *) tensor->data + i0*tensor->nb[0]); @@ -183,25 +183,27 @@ float fclamp(const float v, const float min, const float max) { } void assert_shape_1d(struct ggml_tensor * tensor, int64_t ne0) { - GGML_ASSERT(tensor->n_dims == 1); GGML_ASSERT(tensor->ne[0] == ne0); + GGML_ASSERT(tensor->ne[1] == 1); + GGML_ASSERT(tensor->ne[2] == 1); + GGML_ASSERT(tensor->ne[3] == 1); } void assert_shape_2d(struct ggml_tensor * tensor, int64_t ne0, int64_t ne1) { - GGML_ASSERT(tensor->n_dims == 2); GGML_ASSERT(tensor->ne[0] == ne0); GGML_ASSERT(tensor->ne[1] == ne1); + GGML_ASSERT(tensor->ne[2] == 1); + GGML_ASSERT(tensor->ne[3] == 1); } void assert_shape_3d(struct ggml_tensor * tensor, int64_t ne0, int64_t ne1, int64_t ne2) { - GGML_ASSERT(tensor->n_dims == 3); GGML_ASSERT(tensor->ne[0] == ne0); GGML_ASSERT(tensor->ne[1] == ne1); GGML_ASSERT(tensor->ne[2] == ne2); + GGML_ASSERT(tensor->ne[3] == 1); } void assert_shape_4d(struct ggml_tensor * tensor, int64_t ne0, int64_t ne1, int64_t ne2, int64_t ne3) { - GGML_ASSERT(tensor->n_dims == 4); GGML_ASSERT(tensor->ne[0] == ne0); GGML_ASSERT(tensor->ne[1] == ne1); GGML_ASSERT(tensor->ne[2] == ne2); @@ -225,8 +227,8 @@ int64_t get_example_targets_batch( bool sample_random_offsets ) { GGML_ASSERT(samples_count > 0); - GGML_ASSERT(tokens_input->n_dims == 2); - GGML_ASSERT(target_probs->n_dims == 3); + GGML_ASSERT(ggml_is_matrix(tokens_input)); + GGML_ASSERT(ggml_is_3d(target_probs)); int64_t n_vocab = target_probs->ne[0]; int64_t n_tokens = tokens_input->ne[0]; int64_t n_batch = tokens_input->ne[1]; diff --git a/examples/baby-llama/baby-llama.cpp b/examples/baby-llama/baby-llama.cpp index 8155101d0..2dc2988d3 100644 --- a/examples/baby-llama/baby-llama.cpp +++ b/examples/baby-llama/baby-llama.cpp @@ -1258,9 +1258,9 @@ static struct ggml_tensor * forward_lora( } static void sample_softmax(struct ggml_tensor * logits, struct ggml_tensor * probs, struct ggml_tensor * best_samples) { - assert(logits->n_dims == 2); - assert(probs->n_dims == 2); - assert(best_samples->n_dims == 1); + assert(ggml_is_matrix(logits)); + assert(ggml_is_matrix(probs)); + assert(ggml_is_vector(best_samples)); assert(logits->ne[1] == best_samples->ne[0]); assert(logits->ne[0] == probs->ne[0]); assert(logits->ne[1] == probs->ne[1]); @@ -1292,9 +1292,9 @@ static void sample_softmax_batch( struct ggml_context * ctx, struct ggml_tensor * logits, struct ggml_tensor * probs, struct ggml_tensor * best_samples ) { - GGML_ASSERT(best_samples->n_dims == 2); - GGML_ASSERT(logits->n_dims == 3); - GGML_ASSERT(probs->n_dims == 3); + GGML_ASSERT(ggml_is_matrix(best_samples)); + GGML_ASSERT(ggml_is_3d(logits)); + GGML_ASSERT(ggml_is_3d(probs)); int n_tokens = best_samples->ne[0]; int n_batch = best_samples->ne[1]; int n_vocab = logits->ne[0]; @@ -1334,7 +1334,7 @@ static void print_row(struct ggml_tensor * probs, int i) { } static void print_matrix(struct ggml_tensor * probs) { - assert(probs->n_dims == 2); + assert(ggml_is_matrix(probs)); for (int i = 0; i < probs->ne[1]; ++i) { for (int k = 0; k < probs->ne[0]; ++k) { float p = ggml_get_f32_1d(probs, i*probs->ne[0] + k); @@ -1386,8 +1386,8 @@ static void get_example_targets(int example_id, struct ggml_tensor * tokens_inpu static void get_example_targets_batch( struct ggml_context * ctx, int example_id, struct ggml_tensor * tokens_input, struct ggml_tensor * targets ) { - GGML_ASSERT(tokens_input->n_dims == 2); - GGML_ASSERT( targets->n_dims == 3); + GGML_ASSERT(ggml_is_matrix(tokens_input)); + GGML_ASSERT(ggml_is_3d(targets)); int n_tokens = tokens_input->ne[0]; int n_batch = tokens_input->ne[1]; GGML_ASSERT(n_tokens == targets->ne[1]); diff --git a/examples/convert-llama2c-to-ggml/convert-llama2c-to-ggml.cpp b/examples/convert-llama2c-to-ggml/convert-llama2c-to-ggml.cpp index cae3bf3c3..4d41e1779 100644 --- a/examples/convert-llama2c-to-ggml/convert-llama2c-to-ggml.cpp +++ b/examples/convert-llama2c-to-ggml/convert-llama2c-to-ggml.cpp @@ -427,7 +427,7 @@ static void print_row(struct ggml_tensor * probs, int i) { } static void print_matrix(struct ggml_tensor * probs) { - assert(probs->n_dims == 2); + assert(ggml_is_matrix(probs)); for (int i = 0; i < probs->ne[1]; ++i) { for (int k = 0; k < probs->ne[0]; ++k) { float p = get_f32_2d(probs, k, i); @@ -639,7 +639,7 @@ static void load_vocab(const char *filename, Config *config, struct llama_vocab static void convert_weights_ak_to_gg(struct ggml_tensor * gg_weights, const float * karpathy_weights) { int ct; - switch (gg_weights->n_dims){ + switch (ggml_n_dims(gg_weights)) { case 1: ct = 0; for (int i0 = 0; i0 < gg_weights->ne[0]; i0++){ diff --git a/examples/finetune/finetune.cpp b/examples/finetune/finetune.cpp index af46e44a6..b9849e8c9 100644 --- a/examples/finetune/finetune.cpp +++ b/examples/finetune/finetune.cpp @@ -1110,7 +1110,7 @@ static void write_tensor(struct llama_file * file, struct ggml_tensor * tensor, name = ggml_get_name(tensor); } uint32_t name_len = strlen(name); - uint32_t nd = tensor->n_dims; + uint32_t nd = ggml_n_dims(tensor); uint32_t ne[4] = { (uint32_t)tensor->ne[0], (uint32_t)tensor->ne[1], (uint32_t)tensor->ne[2], diff --git a/examples/gguf/gguf.cpp b/examples/gguf/gguf.cpp index 9ab63a293..9e24bf24c 100644 --- a/examples/gguf/gguf.cpp +++ b/examples/gguf/gguf.cpp @@ -195,7 +195,7 @@ static bool gguf_ex_read_1(const std::string & fname) { struct ggml_tensor * cur = ggml_get_tensor(ctx_data, name); - printf("%s: tensor[%d]: n_dims = %d, name = %s, data = %p\n", __func__, i, cur->n_dims, cur->name, cur->data); + printf("%s: tensor[%d]: n_dims = %d, name = %s, data = %p\n", __func__, i, ggml_n_dims(cur), cur->name, cur->data); // print first 10 elements const float * data = (const float *) cur->data; diff --git a/examples/llava/clip.cpp b/examples/llava/clip.cpp index 4bb7b93b6..112465968 100644 --- a/examples/llava/clip.cpp +++ b/examples/llava/clip.cpp @@ -514,7 +514,7 @@ struct clip_ctx * clip_model_load(const char * fname, const int verbosity = 1) { ctx_size += padded_size; if (verbosity >= 3) { printf("%s: tensor[%d]: n_dims = %d, name = %s, tensor_size=%zu, padded_size=%zu, offset=%zu\n", __func__, i, - cur->n_dims, cur->name, tensor_size, padded_size, offset); + ggml_n_dims(cur), cur->name, tensor_size, padded_size, offset); } } } @@ -962,7 +962,7 @@ bool clip_model_quantize(const char * fname_inp, const char * fname_out, const i } // quantize only 2D tensors - quantize &= (cur->n_dims == 2); + quantize &= (ggml_n_dims(cur) == 2); if (quantize) { new_type = type; @@ -1035,7 +1035,7 @@ bool clip_model_quantize(const char * fname_inp, const char * fname_out, const i fout.put(0); } - printf("%s: n_dims = %d | quantize=%d | size = %f MB -> %f MB\n", name.c_str(), cur->n_dims, quantize, + printf("%s: n_dims = %d | quantize=%d | size = %f MB -> %f MB\n", name.c_str(), ggml_n_dims(cur), quantize, orig_size / 1024.0 / 1024.0, new_size / 1024.0 / 1024.0); } diff --git a/ggml.c b/ggml.c index f0a972690..f6f8b8251 100644 --- a/ggml.c +++ b/ggml.c @@ -2054,24 +2054,37 @@ size_t ggml_element_size(const struct ggml_tensor * tensor) { return ggml_type_size(tensor->type); } -static inline bool ggml_is_scalar(const struct ggml_tensor * tensor) { +bool ggml_is_scalar(const struct ggml_tensor * tensor) { static_assert(GGML_MAX_DIMS == 4, "GGML_MAX_DIMS is not 4 - update this function"); return tensor->ne[0] == 1 && tensor->ne[1] == 1 && tensor->ne[2] == 1 && tensor->ne[3] == 1; } -static inline bool ggml_is_vector(const struct ggml_tensor * tensor) { +bool ggml_is_vector(const struct ggml_tensor * tensor) { static_assert(GGML_MAX_DIMS == 4, "GGML_MAX_DIMS is not 4 - update this function"); return tensor->ne[1] == 1 && tensor->ne[2] == 1 && tensor->ne[3] == 1; } -static inline bool ggml_is_matrix(const struct ggml_tensor * tensor) { +bool ggml_is_matrix(const struct ggml_tensor * tensor) { static_assert(GGML_MAX_DIMS == 4, "GGML_MAX_DIMS is not 4 - update this function"); return tensor->ne[2] == 1 && tensor->ne[3] == 1; } +bool ggml_is_3d(const struct ggml_tensor * tensor) { + return tensor->ne[3] == 1; +} + +int ggml_n_dims(const struct ggml_tensor * tensor) { + for (int i = GGML_MAX_DIMS - 1; i >= 1; --i) { + if (tensor->ne[i] > 1) { + return i + 1; + } + } + return 1; +} + static inline bool ggml_can_mul_mat(const struct ggml_tensor * t0, const struct ggml_tensor * t1) { static_assert(GGML_MAX_DIMS == 4, "GGML_MAX_DIMS is not 4 - update this function"); @@ -2521,7 +2534,6 @@ static struct ggml_tensor * ggml_new_tensor_impl( /*.type =*/ type, /*.backend =*/ GGML_BACKEND_CPU, /*.buffer =*/ NULL, - /*.n_dims =*/ n_dims, /*.ne =*/ { 1, 1, 1, 1 }, /*.nb =*/ { 0, 0, 0, 0 }, /*.op =*/ GGML_OP_NONE, @@ -2628,7 +2640,7 @@ struct ggml_tensor * ggml_new_f32(struct ggml_context * ctx, float value) { } struct ggml_tensor * ggml_dup_tensor(struct ggml_context * ctx, const struct ggml_tensor * src) { - return ggml_new_tensor(ctx, src->type, src->n_dims, src->ne); + return ggml_new_tensor(ctx, src->type, GGML_MAX_DIMS, src->ne); } static void ggml_set_op_params(struct ggml_tensor * tensor, const void * params, size_t params_size) { @@ -3077,7 +3089,7 @@ struct ggml_tensor * ggml_format_name(struct ggml_tensor * tensor, const char * struct ggml_tensor * ggml_view_tensor( struct ggml_context * ctx, struct ggml_tensor * src) { - struct ggml_tensor * result = ggml_new_tensor_impl(ctx, src->type, src->n_dims, src->ne, src, 0); + struct ggml_tensor * result = ggml_new_tensor_impl(ctx, src->type, GGML_MAX_DIMS, src->ne, src, 0); ggml_format_name(result, "%s (view)", src->name); for (int i = 0; i < GGML_MAX_DIMS; i++) { @@ -3235,10 +3247,10 @@ static struct ggml_tensor * ggml_add_cast_impl( is_node = true; } - struct ggml_tensor * result = ggml_new_tensor(ctx, type, a->n_dims, a->ne); + struct ggml_tensor * result = ggml_new_tensor(ctx, type, GGML_MAX_DIMS, a->ne); result->op = GGML_OP_ADD; - result->grad = is_node ? ggml_new_tensor(ctx, GGML_TYPE_F32, a->n_dims, a->ne) : NULL; + result->grad = is_node ? ggml_new_tensor(ctx, GGML_TYPE_F32, GGML_MAX_DIMS, a->ne) : NULL; result->src[0] = a; result->src[1] = b; @@ -3607,12 +3619,12 @@ struct ggml_tensor * ggml_sum_rows( is_node = true; } - int64_t ne[4] = {1,1,1,1}; - for (int i=1; in_dims; ++i) { + int64_t ne[GGML_MAX_DIMS] = { 1 }; + for (int i = 1; i < GGML_MAX_DIMS; ++i) { ne[i] = a->ne[i]; } - struct ggml_tensor * result = ggml_new_tensor(ctx, a->type, a->n_dims, ne); + struct ggml_tensor * result = ggml_new_tensor(ctx, a->type, GGML_MAX_DIMS, ne); result->op = GGML_OP_SUM_ROWS; result->grad = is_node ? ggml_dup_tensor(ctx, result) : NULL; @@ -3633,8 +3645,8 @@ struct ggml_tensor * ggml_mean( is_node = true; } - int64_t ne[GGML_MAX_DIMS] = { 1, a->ne[1], a->ne[2], a->ne[3] }; - struct ggml_tensor * result = ggml_new_tensor(ctx, GGML_TYPE_F32, a->n_dims, ne); + int64_t ne[4] = { 1, a->ne[1], a->ne[2], a->ne[3] }; + struct ggml_tensor * result = ggml_new_tensor(ctx, GGML_TYPE_F32, 4, ne); result->op = GGML_OP_MEAN; result->grad = is_node ? ggml_dup_tensor(ctx, result) : NULL; @@ -3656,8 +3668,7 @@ struct ggml_tensor * ggml_argmax( is_node = true; } - int64_t ne[GGML_MAX_DIMS] = { a->ne[1], 1, 1, 1 }; - struct ggml_tensor * result = ggml_new_tensor(ctx, GGML_TYPE_I32, a->n_dims, ne); + struct ggml_tensor * result = ggml_new_tensor_1d(ctx, GGML_TYPE_I32, a->ne[1]); result->op = GGML_OP_ARGMAX; result->grad = is_node ? ggml_dup_tensor(ctx, result) : NULL; @@ -3680,7 +3691,7 @@ struct ggml_tensor * ggml_repeat( is_node = true; } - struct ggml_tensor * result = ggml_new_tensor(ctx, a->type, b->n_dims, b->ne); + struct ggml_tensor * result = ggml_new_tensor(ctx, a->type, GGML_MAX_DIMS, b->ne); result->op = GGML_OP_REPEAT; result->grad = is_node ? ggml_dup_tensor(ctx, result) : NULL; @@ -3707,7 +3718,7 @@ struct ggml_tensor * ggml_repeat_back( return a; } - struct ggml_tensor * result = ggml_new_tensor(ctx, a->type, b->n_dims, b->ne); + struct ggml_tensor * result = ggml_new_tensor(ctx, a->type, GGML_MAX_DIMS, b->ne); result->op = GGML_OP_REPEAT_BACK; result->grad = is_node ? ggml_dup_tensor(ctx, result) : NULL; @@ -4083,7 +4094,7 @@ struct ggml_tensor * ggml_mul_mat( } const int64_t ne[4] = { a->ne[1], b->ne[1], b->ne[2], b->ne[3] }; - struct ggml_tensor * result = ggml_new_tensor(ctx, GGML_TYPE_F32, MAX(a->n_dims, b->n_dims), ne); + struct ggml_tensor * result = ggml_new_tensor(ctx, GGML_TYPE_F32, 4, ne); result->op = GGML_OP_MUL_MAT; result->grad = is_node ? ggml_dup_tensor(ctx, result) : NULL; @@ -4117,7 +4128,7 @@ struct ggml_tensor * ggml_mul_mat_id( } const int64_t ne[4] = { as[0]->ne[1], b->ne[1], b->ne[2], b->ne[3] }; - struct ggml_tensor * result = ggml_new_tensor(ctx, GGML_TYPE_F32, MAX(as[0]->n_dims, b->n_dims), ne); + struct ggml_tensor * result = ggml_new_tensor(ctx, GGML_TYPE_F32, 4, ne); ggml_set_op_params_i32(result, 0, id); ggml_set_op_params_i32(result, 1, n_as); @@ -4155,7 +4166,7 @@ struct ggml_tensor * ggml_out_prod( // a is broadcastable to b for ne[2] and ne[3] -> use b->ne[2] and b->ne[3] const int64_t ne[4] = { a->ne[0], b->ne[0], b->ne[2], b->ne[3] }; - struct ggml_tensor * result = ggml_new_tensor(ctx, GGML_TYPE_F32, MAX(a->n_dims, b->n_dims), ne); + struct ggml_tensor * result = ggml_new_tensor(ctx, GGML_TYPE_F32, 4, ne); result->op = GGML_OP_OUT_PROD; result->grad = is_node ? ggml_dup_tensor(ctx, result) : NULL; @@ -4440,7 +4451,7 @@ struct ggml_tensor * ggml_reshape( //GGML_ASSERT(false); } - struct ggml_tensor * result = ggml_new_tensor_impl(ctx, a->type, b->n_dims, b->ne, a, 0); + struct ggml_tensor * result = ggml_new_tensor_impl(ctx, a->type, GGML_MAX_DIMS, b->ne, a, 0); ggml_format_name(result, "%s (reshaped)", a->name); result->op = GGML_OP_RESHAPE; @@ -4818,7 +4829,7 @@ struct ggml_tensor * ggml_diag( } const int64_t ne[4] = { a->ne[0], a->ne[0], a->ne[2], a->ne[3] }; - struct ggml_tensor * result = ggml_new_tensor(ctx, a->type, MAX(a->n_dims, 2), ne); + struct ggml_tensor * result = ggml_new_tensor(ctx, a->type, 4, ne); result->op = GGML_OP_DIAG; result->grad = is_node ? ggml_dup_tensor(ctx, result) : NULL; @@ -5465,7 +5476,7 @@ struct ggml_tensor * ggml_pool_1d( is_node = true; } - const int64_t ne[3] = { + const int64_t ne[2] = { ggml_calc_pool_output_size(a->ne[0], k0, s0, p0), a->ne[1], }; @@ -5584,7 +5595,7 @@ struct ggml_tensor * ggml_argsort( enum ggml_sort_order order) { bool is_node = false; - struct ggml_tensor * result = ggml_new_tensor(ctx, GGML_TYPE_I32, a->n_dims, a->ne); + struct ggml_tensor * result = ggml_new_tensor(ctx, GGML_TYPE_I32, GGML_MAX_DIMS, a->ne); ggml_set_op_params_i32(result, 0, (int32_t) order); @@ -5631,7 +5642,7 @@ struct ggml_tensor * ggml_flash_attn( } //struct ggml_tensor * result = ggml_dup_tensor(ctx, q); - struct ggml_tensor * result = ggml_new_tensor(ctx, GGML_TYPE_F32, q->n_dims, q->ne); + struct ggml_tensor * result = ggml_new_tensor(ctx, GGML_TYPE_F32, GGML_MAX_DIMS, q->ne); int32_t t = masked ? 1 : 0; ggml_set_op_params(result, &t, sizeof(t)); @@ -5664,7 +5675,7 @@ struct ggml_tensor * ggml_flash_ff( } //struct ggml_tensor * result = ggml_dup_tensor(ctx, a); - struct ggml_tensor * result = ggml_new_tensor(ctx, GGML_TYPE_F32, a->n_dims, a->ne); + struct ggml_tensor * result = ggml_new_tensor(ctx, GGML_TYPE_F32, GGML_MAX_DIMS, a->ne); result->op = GGML_OP_FLASH_FF; result->grad = is_node ? ggml_dup_tensor(ctx, result) : NULL; @@ -5780,7 +5791,6 @@ struct ggml_tensor * ggml_win_part( const int np = npx*npy; const int64_t ne[4] = { a->ne[0], w, w, np, }; - struct ggml_tensor * result = ggml_new_tensor(ctx, GGML_TYPE_F32, 4, ne); int32_t params[] = { npx, npy, w }; @@ -14563,7 +14573,7 @@ static struct ggml_tensor * ggml_recompute_graph_node( return replacements->vals[i]; } - struct ggml_tensor * clone = ggml_new_tensor(ctx, node->type, node->n_dims, node->ne); + struct ggml_tensor * clone = ggml_new_tensor(ctx, node->type, GGML_MAX_DIMS, node->ne); // insert clone into replacements GGML_ASSERT(replacements->set.keys[i] == NULL); // assert that we don't overwrite @@ -16564,7 +16574,7 @@ static void ggml_graph_export_leaf(const struct ggml_tensor * tensor, FILE * fou fprintf(fout, "%-6s %-12s %8d %" PRId64 " %" PRId64 " %" PRId64 " %" PRId64 " %16zu %16zu %16zu %16zu %16p %32s\n", ggml_type_name(tensor->type), ggml_op_name (tensor->op), - tensor->n_dims, + ggml_n_dims(tensor), ne[0], ne[1], ne[2], ne[3], nb[0], nb[1], nb[2], nb[3], tensor->data, @@ -16579,7 +16589,7 @@ static void ggml_graph_export_node(const struct ggml_tensor * tensor, const char arg, ggml_type_name(tensor->type), ggml_op_name (tensor->op), - tensor->n_dims, + ggml_n_dims(tensor), ne[0], ne[1], ne[2], ne[3], nb[0], nb[1], nb[2], nb[3], tensor->data, @@ -16669,11 +16679,9 @@ void ggml_graph_export(const struct ggml_cgraph * cgraph, const char * fname) { const uint32_t type = tensor->type; const uint32_t op = tensor->op; - const uint32_t n_dims = tensor->n_dims; fwrite(&type, sizeof(uint32_t), 1, fout); fwrite(&op, sizeof(uint32_t), 1, fout); - fwrite(&n_dims, sizeof(uint32_t), 1, fout); for (int j = 0; j < GGML_MAX_DIMS; ++j) { const uint64_t ne = tensor->ne[j]; @@ -16703,11 +16711,9 @@ void ggml_graph_export(const struct ggml_cgraph * cgraph, const char * fname) { const uint32_t type = tensor->type; const uint32_t op = tensor->op; - const uint32_t n_dims = tensor->n_dims; fwrite(&type, sizeof(uint32_t), 1, fout); fwrite(&op, sizeof(uint32_t), 1, fout); - fwrite(&n_dims, sizeof(uint32_t), 1, fout); for (int j = 0; j < GGML_MAX_DIMS; ++j) { const uint64_t ne = tensor->ne[j]; @@ -16879,12 +16885,10 @@ struct ggml_cgraph * ggml_graph_import(const char * fname, struct ggml_context * { uint32_t type; uint32_t op; - uint32_t n_dims; for (uint32_t i = 0; i < n_leafs; ++i) { type = *(const uint32_t *) ptr; ptr += sizeof(type); op = *(const uint32_t *) ptr; ptr += sizeof(op); - n_dims = *(const uint32_t *) ptr; ptr += sizeof(n_dims); int64_t ne[GGML_MAX_DIMS]; size_t nb[GGML_MAX_DIMS]; @@ -16900,7 +16904,7 @@ struct ggml_cgraph * ggml_graph_import(const char * fname, struct ggml_context * nb[j] = nb_cur; } - struct ggml_tensor * tensor = ggml_new_tensor(*ctx_eval, (enum ggml_type) type, n_dims, ne); + struct ggml_tensor * tensor = ggml_new_tensor(*ctx_eval, (enum ggml_type) type, GGML_MAX_DIMS, ne); tensor->op = (enum ggml_op) op; @@ -16917,7 +16921,7 @@ struct ggml_cgraph * ggml_graph_import(const char * fname, struct ggml_context * ptr += ggml_nbytes(tensor); - fprintf(stderr, "%s: loaded leaf %d: '%16s', %3d dims, %9zu bytes\n", __func__, i, tensor->name, n_dims, ggml_nbytes(tensor)); + fprintf(stderr, "%s: loaded leaf %d: '%16s', %9zu bytes\n", __func__, i, tensor->name, ggml_nbytes(tensor)); } } @@ -16927,12 +16931,10 @@ struct ggml_cgraph * ggml_graph_import(const char * fname, struct ggml_context * { uint32_t type; uint32_t op; - uint32_t n_dims; for (uint32_t i = 0; i < n_nodes; ++i) { type = *(const uint32_t *) ptr; ptr += sizeof(type); op = *(const uint32_t *) ptr; ptr += sizeof(op); - n_dims = *(const uint32_t *) ptr; ptr += sizeof(n_dims); enum ggml_op eop = (enum ggml_op) op; @@ -17003,7 +17005,7 @@ struct ggml_cgraph * ggml_graph_import(const char * fname, struct ggml_context * } break; default: { - tensor = ggml_new_tensor(*ctx_eval, (enum ggml_type) type, n_dims, ne); + tensor = ggml_new_tensor(*ctx_eval, (enum ggml_type) type, GGML_MAX_DIMS, ne); tensor->op = eop; } break; @@ -17022,7 +17024,7 @@ struct ggml_cgraph * ggml_graph_import(const char * fname, struct ggml_context * result->nodes[i] = tensor; - fprintf(stderr, "%s: loaded node %d: '%16s', %3d dims, %9zu bytes\n", __func__, i, tensor->name, n_dims, ggml_nbytes(tensor)); + fprintf(stderr, "%s: loaded node %d: '%16s', %9zu bytes\n", __func__, i, tensor->name, ggml_nbytes(tensor)); } } } @@ -17160,7 +17162,7 @@ void ggml_graph_dump_dot(const struct ggml_cgraph * gb, const struct ggml_cgraph fprintf(fp, "(%s)|", ggml_type_name(node->type)); } - if (node->n_dims == 2) { + if (ggml_is_matrix(node)) { fprintf(fp, "%d [%" PRId64 ", %" PRId64 "] | %s", i, node->ne[0], node->ne[1], ggml_op_symbol(node->op)); } else { fprintf(fp, "%d [%" PRId64 ", %" PRId64 ", %" PRId64 "] | %s", i, node->ne[0], node->ne[1], node->ne[2], ggml_op_symbol(node->op)); @@ -17427,7 +17429,7 @@ static enum ggml_opt_result ggml_opt_adam( int64_t i = 0; for (int p = 0; p < np; ++p) { const int64_t ne = ggml_nelements(ps[p]); - const float p_decay = ((ps[p]->n_dims >= decay_min_ndim) ? decay : 0.0f) * sched; + const float p_decay = ((ggml_n_dims(ps[p]) >= decay_min_ndim) ? decay : 0.0f) * sched; for (int64_t j = 0; j < ne; ++j) { float x = ggml_get_f32_1d(ps[p], j); float g_ = g[i]*gnorm; @@ -19205,8 +19207,8 @@ void gguf_add_tensor( ctx->infos[idx].ne[i] = 1; } - ctx->infos[idx].n_dims = tensor->n_dims; - for (int i = 0; i < tensor->n_dims; i++) { + ctx->infos[idx].n_dims = ggml_n_dims(tensor); + for (uint32_t i = 0; i < ctx->infos[idx].n_dims; i++) { ctx->infos[idx].ne[i] = tensor->ne[i]; } diff --git a/ggml.h b/ggml.h index ae8101fab..84d6ba8b1 100644 --- a/ggml.h +++ b/ggml.h @@ -502,7 +502,6 @@ extern "C" { struct ggml_backend_buffer * buffer; - int n_dims; int64_t ne[GGML_MAX_DIMS]; // number of elements size_t nb[GGML_MAX_DIMS]; // stride in bytes: // nb[0] = ggml_type_size(type) @@ -534,7 +533,7 @@ extern "C" { void * extra; // extra things e.g. for ggml-cuda.cu - char padding[12]; + char padding[8]; }; static const size_t GGML_TENSOR_SIZE = sizeof(struct ggml_tensor); @@ -666,6 +665,11 @@ extern "C" { GGML_API bool ggml_is_transposed(const struct ggml_tensor * tensor); GGML_API bool ggml_is_contiguous(const struct ggml_tensor * tensor); GGML_API bool ggml_is_permuted (const struct ggml_tensor * tensor); + GGML_API bool ggml_is_scalar (const struct ggml_tensor * tensor); + GGML_API bool ggml_is_vector (const struct ggml_tensor * tensor); + GGML_API bool ggml_is_matrix (const struct ggml_tensor * tensor); + GGML_API bool ggml_is_3d (const struct ggml_tensor * tensor); + GGML_API int ggml_n_dims (const struct ggml_tensor * tensor); // returns 1 for scalars GGML_API bool ggml_are_same_shape(const struct ggml_tensor * t0, const struct ggml_tensor * t1); diff --git a/llama.cpp b/llama.cpp index 456807d9d..eddb70859 100644 --- a/llama.cpp +++ b/llama.cpp @@ -8471,7 +8471,7 @@ static void llama_model_quantize_internal(const std::string & fname_inp, const s bool quantize = name.rfind("weight") == name.size() - 6; // ends with 'weight'? // quantize only 2D tensors - quantize &= (tensor->n_dims == 2); + quantize &= (ggml_n_dims(tensor) == 2); quantize &= params->quantize_output_tensor || name != "output.weight"; quantize &= !params->only_copy; From 6744dbe924a317e3e2a5a2a4a2037061b2223449 Mon Sep 17 00:00:00 2001 From: slaren Date: Thu, 14 Dec 2023 20:05:21 +0100 Subject: [PATCH 086/811] ggml : use ggml_row_size where possible (#4472) * ggml : use ggml_row_size where possible ggml-ci * ggml : move ggml_nbytes_split to ggml-cuda.cu --- ggml-cuda.cu | 12 ++++++++---- ggml.c | 18 ++++++------------ ggml.h | 1 - tests/test-backend-ops.cpp | 9 +++++---- tests/test-quantize-perf.cpp | 10 +++++----- 5 files changed, 24 insertions(+), 26 deletions(-) diff --git a/ggml-cuda.cu b/ggml-cuda.cu index 019648bdd..0a63c1ecf 100644 --- a/ggml-cuda.cu +++ b/ggml-cuda.cu @@ -8898,6 +8898,12 @@ static void ggml_cuda_nop(const ggml_tensor * src0, const ggml_tensor * src1, gg (void) dst; } +static size_t ggml_nbytes_split(const struct ggml_tensor * tensor, int nrows_split) { + static_assert(GGML_MAX_DIMS == 4, "GGML_MAX_DIMS is not 4 - update this function"); + + return nrows_split*ggml_row_size(tensor->type, tensor->ne[0]); +} + void ggml_cuda_transform_tensor(void * data, struct ggml_tensor * tensor) { const int64_t nrows = ggml_nrows(tensor); @@ -8947,8 +8953,7 @@ void ggml_cuda_transform_tensor(void * data, struct ggml_tensor * tensor) { // pad last row to a multiple of 512 elements to avoid out-of-bounds memory accesses if (ne0 % MATRIX_ROW_PADDING != 0) { - size += (MATRIX_ROW_PADDING - ne0 % MATRIX_ROW_PADDING) - * ggml_type_size(tensor->type)/ggml_blck_size(tensor->type); + size += ggml_row_size(tensor->type, MATRIX_ROW_PADDING - ne0 % MATRIX_ROW_PADDING); } char * buf; @@ -9485,8 +9490,7 @@ static size_t ggml_backend_cuda_buffer_type_get_alloc_size(ggml_backend_buffer_t if (ggml_is_quantized(tensor->type)) { if (ne0 % MATRIX_ROW_PADDING != 0) { - size += (MATRIX_ROW_PADDING - ne0 % MATRIX_ROW_PADDING) - * ggml_type_size(tensor->type)/ggml_blck_size(tensor->type); + size += ggml_row_size(tensor->type, MATRIX_ROW_PADDING - ne0 % MATRIX_ROW_PADDING); } } diff --git a/ggml.c b/ggml.c index f6f8b8251..1feb7ead3 100644 --- a/ggml.c +++ b/ggml.c @@ -1997,12 +1997,6 @@ size_t ggml_nbytes_pad(const struct ggml_tensor * tensor) { return GGML_PAD(ggml_nbytes(tensor), GGML_MEM_ALIGN); } -size_t ggml_nbytes_split(const struct ggml_tensor * tensor, int nrows_split) { - static_assert(GGML_MAX_DIMS == 4, "GGML_MAX_DIMS is not 4 - update this function"); - - return (nrows_split*tensor->ne[0]*ggml_type_size(tensor->type))/ggml_blck_size(tensor->type); -} - int ggml_blck_size(enum ggml_type type) { return type_traits[type].blck_size; } @@ -2491,7 +2485,7 @@ static struct ggml_tensor * ggml_new_tensor_impl( view_src = view_src->view_src; } - size_t data_size = ggml_type_size(type)*(ne[0]/ggml_blck_size(type)); + size_t data_size = ggml_row_size(type, ne[0]); for (int i = 1; i < n_dims; i++) { data_size *= ne[i]; } @@ -9698,7 +9692,7 @@ static void ggml_compute_forward_mul_mat( if (params->type == GGML_TASK_INIT) { if (src1->type != vec_dot_type) { char * wdata = params->wdata; - const size_t row_size = ne10*ggml_type_size(vec_dot_type)/ggml_blck_size(vec_dot_type); + const size_t row_size = ggml_row_size(vec_dot_type, ne10); assert(params->wsize >= ne11*ne12*ne13*row_size); assert(src1->type == GGML_TYPE_F32); @@ -9721,7 +9715,7 @@ static void ggml_compute_forward_mul_mat( } const void * wdata = (src1->type == vec_dot_type) ? src1->data : params->wdata; - const size_t row_size = ne10*ggml_type_size(vec_dot_type)/ggml_blck_size(vec_dot_type); + const size_t row_size = ggml_row_size(vec_dot_type, ne10); const int64_t nr0 = ne01; // src0 rows const int64_t nr1 = cne1*ne12*ne13; // src1 rows @@ -16326,7 +16320,7 @@ struct ggml_cplan ggml_graph_plan(struct ggml_cgraph * cgraph, int n_threads) { } else #endif if (node->src[1]->type != vec_dot_type) { - cur = ggml_type_size(vec_dot_type)*ggml_nelements(node->src[1])/ggml_blck_size(vec_dot_type); + cur = ggml_row_size(vec_dot_type, ggml_nelements(node->src[1])); } } break; case GGML_OP_MUL_MAT_ID: @@ -16343,7 +16337,7 @@ struct ggml_cplan ggml_graph_plan(struct ggml_cgraph * cgraph, int n_threads) { } else #endif if (b->type != vec_dot_type) { - cur = ggml_type_size(vec_dot_type)*ggml_nelements(b)/ggml_blck_size(vec_dot_type); + cur = ggml_row_size(vec_dot_type, ggml_nelements(b)); } } break; case GGML_OP_OUT_PROD: @@ -18703,7 +18697,7 @@ struct gguf_context * gguf_init_from_file(const char * fname, struct gguf_init_p return NULL; } - const size_t size_cur = (ne*ggml_type_size(info->type))/ggml_blck_size(info->type); + const size_t size_cur = ggml_row_size(info->type, ne); ctx->size += GGML_PAD(size_cur, ctx->alignment); } diff --git a/ggml.h b/ggml.h index 84d6ba8b1..68f7833b6 100644 --- a/ggml.h +++ b/ggml.h @@ -638,7 +638,6 @@ extern "C" { GGML_API int64_t ggml_nrows (const struct ggml_tensor * tensor); GGML_API size_t ggml_nbytes (const struct ggml_tensor * tensor); GGML_API size_t ggml_nbytes_pad (const struct ggml_tensor * tensor); // same as ggml_nbytes() but padded to GGML_MEM_ALIGN - GGML_API size_t ggml_nbytes_split(const struct ggml_tensor * tensor, int nrows_split); GGML_API int ggml_blck_size(enum ggml_type type); GGML_API size_t ggml_type_size(enum ggml_type type); // size in bytes for all elements in a block diff --git a/tests/test-backend-ops.cpp b/tests/test-backend-ops.cpp index afca85143..df2c3fb6e 100644 --- a/tests/test-backend-ops.cpp +++ b/tests/test-backend-ops.cpp @@ -54,7 +54,7 @@ static void init_tensor_uniform(ggml_tensor * tensor, float min = -1.0f, float m ggml_backend_tensor_set(tensor, data.data(), 0, size * sizeof(float)); } else if (ggml_is_quantized(tensor->type) || tensor->type == GGML_TYPE_F16) { GGML_ASSERT(size % ggml_blck_size(tensor->type) == 0); - std::vector dataq(ggml_type_size(tensor->type)*size/ggml_blck_size(tensor->type)); + std::vector dataq(ggml_row_size(tensor->type, size)); int64_t hist[16]; ggml_quantize_chunk(tensor->type, data.data(), dataq.data(), 0, size, hist); ggml_backend_tensor_set(tensor, dataq.data(), 0, dataq.size()); @@ -72,6 +72,8 @@ static std::vector tensor_to_float(const ggml_tensor * t) { ggml_type_traits_t tt = ggml_internal_get_type_traits(t->type); size_t bs = ggml_blck_size(t->type); + std::vector vq(ggml_blck_size(t->type)); + bool quantized = ggml_is_quantized(t->type); // access elements by index to avoid gaps in views for (int64_t i3 = 0; i3 < t->ne[3]; i3++) { @@ -85,9 +87,8 @@ static std::vector tensor_to_float(const ggml_tensor * t) { tv.push_back(*(float *) &buf[i]); } else if (t->type == GGML_TYPE_I32) { tv.push_back((float)*(int32_t *) &buf[i]); - } else if (ggml_is_quantized(t->type)) { - std::vector vq(ggml_blck_size(t->type)); - tt.to_float(&buf[i], vq.data(), ggml_blck_size(t->type)); + } else if (quantized) { + tt.to_float(&buf[i], vq.data(), bs); tv.insert(tv.end(), vq.begin(), vq.end()); } else { GGML_ASSERT(false); diff --git a/tests/test-quantize-perf.cpp b/tests/test-quantize-perf.cpp index 62d0190f9..09d410b7f 100644 --- a/tests/test-quantize-perf.cpp +++ b/tests/test-quantize-perf.cpp @@ -286,7 +286,7 @@ int main(int argc, char * argv[]) { qfns.from_float_reference(test_data1, test_q1, size); return test_q1[0]; }; - size_t quantized_size = size / ggml_blck_size(type) * ggml_type_size(type); + size_t quantized_size = ggml_row_size(type, size); benchmark_function(size, quantized_size, iterations, quantize_fn); } printf("\n"); @@ -300,7 +300,7 @@ int main(int argc, char * argv[]) { qfns.from_float(test_data1, test_q1, size); return test_q1[0]; }; - size_t quantized_size = size / ggml_blck_size(type) * ggml_type_size(type); + size_t quantized_size = ggml_row_size(type, size); benchmark_function(size, quantized_size, iterations, quantize_fn); } printf("\n"); @@ -315,7 +315,7 @@ int main(int argc, char * argv[]) { qfns.to_float(test_q1, test_out, size); return test_out[0]; }; - size_t quantized_size = size / ggml_blck_size(type) * ggml_type_size(type); + size_t quantized_size = ggml_row_size(type, size); benchmark_function(size, quantized_size, iterations, quantize_fn); } printf("\n"); @@ -330,7 +330,7 @@ int main(int argc, char * argv[]) { vdot.from_float(test_data1, test_q1, size); return test_q1[0]; }; - size_t quantized_size = size / ggml_blck_size(type) * ggml_type_size(type); + size_t quantized_size = ggml_row_size(type, size); benchmark_function(size, quantized_size, iterations, quantize_fn); } printf("\n"); @@ -347,7 +347,7 @@ int main(int argc, char * argv[]) { qfns.vec_dot(size, &result, test_q1, test_q2); return result; }; - size_t quantized_size = size / ggml_blck_size(type) * ggml_type_size(type); + size_t quantized_size = ggml_row_size(type, size); benchmark_function(size, quantized_size, iterations, quantize_fn); } printf("\n"); From ee4725a686643669a8587142fa068cbf29de3ce2 Mon Sep 17 00:00:00 2001 From: slaren Date: Fri, 15 Dec 2023 12:45:50 +0100 Subject: [PATCH 087/811] ggml : group mul_mat_id rows by matrix (cpu only) (#4480) * ggml : group mul_mat_id rows by matrix (cpu only) * remove mmid parameters from mm forward * store row groups in wdata and calculate only once in GGML_TASK_INIT ggml-ci --- ggml.c | 237 +++++++++++++++++++++++++++++++++++++++++++++++---------- 1 file changed, 195 insertions(+), 42 deletions(-) diff --git a/ggml.c b/ggml.c index 1feb7ead3..ad546a731 100644 --- a/ggml.c +++ b/ggml.c @@ -9580,16 +9580,11 @@ static bool ggml_compute_forward_mul_mat_use_blas( } #endif -// off1 = offset in i11 and i1 -// cne1 = ne11 and ne1 -// in a normal matrix multiplication, off1 = 0 and cne1 = ne1 -// during GGML_TASK_INIT, the full src1 is converted regardless of off1 and cne1 static void ggml_compute_forward_mul_mat( const struct ggml_compute_params * params, const struct ggml_tensor * src0, const struct ggml_tensor * src1, - struct ggml_tensor * dst, - int64_t off1, int64_t cne1) { + struct ggml_tensor * dst) { int64_t t0 = ggml_perf_time_us(); UNUSED(t0); @@ -9657,9 +9652,9 @@ static void ggml_compute_forward_mul_mat( const int64_t i03 = i13/r3; const int64_t i02 = i12/r2; - const void * x = (char *) src0->data + i02*nb02 + i03*nb03; - const float * y = (float *) ((char *) src1->data + off1*nb11 + i12*nb12 + i13*nb13); - float * d = (float *) ((char *) dst->data + off1*nb1 + i12*nb2 + i13*nb3); + const void * x = (char *) src0->data + i02*nb02 + i03*nb03; + const float * y = (float *) ((char *) src1->data + i12*nb12 + i13*nb13); + float * d = (float *) ((char *) dst->data + i12*nb2 + i13*nb3); if (type != GGML_TYPE_F32) { float * const wdata = params->wdata; @@ -9676,7 +9671,7 @@ static void ggml_compute_forward_mul_mat( } cblas_sgemm(CblasRowMajor, CblasNoTrans, CblasTrans, - cne1, ne01, ne10, + ne1, ne01, ne10, 1.0f, y, ne10, x, ne00, 0.0f, d, ne01); @@ -9717,8 +9712,8 @@ static void ggml_compute_forward_mul_mat( const void * wdata = (src1->type == vec_dot_type) ? src1->data : params->wdata; const size_t row_size = ggml_row_size(vec_dot_type, ne10); - const int64_t nr0 = ne01; // src0 rows - const int64_t nr1 = cne1*ne12*ne13; // src1 rows + const int64_t nr0 = ne01; // src0 rows + const int64_t nr1 = ne1*ne12*ne13; // src1 rows //printf("nr0 = %lld, nr1 = %lld\n", nr0, nr1); @@ -9760,9 +9755,9 @@ static void ggml_compute_forward_mul_mat( for (int64_t iir1 = ir110; iir1 < ir111; iir1 += blck_1) { for (int64_t iir0 = ir010; iir0 < ir011; iir0 += blck_0) { for (int64_t ir1 = iir1; ir1 < iir1 + blck_1 && ir1 < ir111; ++ir1) { - const int64_t i13 = (ir1/(ne12*cne1)); - const int64_t i12 = (ir1 - i13*ne12*cne1)/cne1; - const int64_t i11 = (ir1 - i13*ne12*cne1 - i12*cne1) + off1; + const int64_t i13 = (ir1/(ne12*ne1)); + const int64_t i12 = (ir1 - i13*ne12*ne1)/ne1; + const int64_t i11 = (ir1 - i13*ne12*ne1 - i12*ne1); // broadcast src0 into src1 const int64_t i03 = i13/r3; @@ -9802,28 +9797,191 @@ static void ggml_compute_forward_mul_mat( static void ggml_compute_forward_mul_mat_id( const struct ggml_compute_params * params, - const struct ggml_tensor * src0, + const struct ggml_tensor * ids, const struct ggml_tensor * src1, struct ggml_tensor * dst) { - if (params->type == GGML_TASK_INIT || params->type == GGML_TASK_FINALIZE) { - // during GGML_TASK_INIT the entire src1 is converted to vec_dot_type - ggml_compute_forward_mul_mat(params, dst->src[2], src1, dst, 0, dst->ne[1]); - return; - } + const struct ggml_tensor * src0 = dst->src[2]; // only for GGML_TENSOR_BINARY_OP_LOCALS - const struct ggml_tensor * ids = src0; + GGML_TENSOR_BINARY_OP_LOCALS + + const int ith = params->ith; + const int nth = params->nth; + + const enum ggml_type type = src0->type; + + const bool src1_cont = ggml_is_contiguous(src1); + + ggml_vec_dot_t const vec_dot = type_traits[type].vec_dot; + enum ggml_type const vec_dot_type = type_traits[type].vec_dot_type; + ggml_from_float_t const from_float_to_vec_dot = type_traits[vec_dot_type].from_float; + + GGML_ASSERT(ne0 == ne01); + GGML_ASSERT(ne1 == ne11); + GGML_ASSERT(ne2 == ne12); + GGML_ASSERT(ne3 == ne13); + + // we don't support permuted src0 or src1 + GGML_ASSERT(nb00 == ggml_type_size(type)); + GGML_ASSERT(nb10 == ggml_type_size(src1->type)); + + // dst cannot be transposed or permuted + GGML_ASSERT(nb0 == sizeof(float)); + GGML_ASSERT(nb0 <= nb1); + GGML_ASSERT(nb1 <= nb2); + GGML_ASSERT(nb2 <= nb3); + + // broadcast factors + const int64_t r2 = ne12/ne02; + const int64_t r3 = ne13/ne03; + + // row groups const int id = ggml_get_op_params_i32(dst, 0); const int n_as = ggml_get_op_params_i32(dst, 1); - for (int64_t i01 = 0; i01 < ids->ne[1]; i01++) { - const int32_t row_id = *(const int32_t *) ((const char *) ids->data + i01*ids->nb[1] + id*ids->nb[0]); + char * wdata_src1_end = (src1->type == vec_dot_type) ? + (char *) params->wdata : + (char *) params->wdata + GGML_PAD(ggml_row_size(vec_dot_type, ggml_nelements(src1)), sizeof(int64_t)); - GGML_ASSERT(row_id >= 0 && row_id < n_as); + int64_t * matrix_row_counts = (int64_t *) (wdata_src1_end); // [n_as] + int64_t * matrix_rows = matrix_row_counts + n_as; // [n_as][ne11] - const struct ggml_tensor * src0_row = dst->src[row_id + 2]; - ggml_compute_forward_mul_mat(params, src0_row, src1, dst, i01, 1); + #define MMID_MATRIX_ROW(row_id, i1) matrix_rows[(row_id)*ne11 + (i1)] + + if (params->type == GGML_TASK_INIT) { + char * wdata = params->wdata; + if (src1->type != vec_dot_type) { + const size_t row_size = ggml_row_size(vec_dot_type, ne10); + + assert(params->wsize >= ne11*ne12*ne13*row_size); + assert(src1->type == GGML_TYPE_F32); + + for (int64_t i13 = 0; i13 < ne13; ++i13) { + for (int64_t i12 = 0; i12 < ne12; ++i12) { + for (int64_t i11 = 0; i11 < ne11; ++i11) { + from_float_to_vec_dot((float *)((char *) src1->data + i13*nb13 + i12*nb12 + i11*nb11), (void *) wdata, ne10); + wdata += row_size; + } + } + } + } + + // initialize matrix_row_counts + GGML_ASSERT(wdata == wdata_src1_end); + memset(matrix_row_counts, 0, n_as*sizeof(int64_t)); + + // group rows by src0 matrix + for (int64_t i01 = 0; i01 < ids->ne[1]; i01++) { + const int32_t row_id = *(const int32_t *) ((const char *) ids->data + i01*ids->nb[1] + id*ids->nb[0]); + + GGML_ASSERT(row_id >= 0 && row_id < n_as); + MMID_MATRIX_ROW(row_id, matrix_row_counts[row_id]) = i01; + matrix_row_counts[row_id] += 1; + } + + return; } + + if (params->type == GGML_TASK_FINALIZE) { + return; + } + + // compute each matrix multiplication in sequence + for (int cur_a = 0; cur_a < n_as; ++cur_a) { + const int64_t cne1 = matrix_row_counts[cur_a]; + + if (cne1 == 0) { + continue; + } + + const struct ggml_tensor * src0_cur = dst->src[cur_a + 2]; + + const void * wdata = (src1->type == vec_dot_type) ? src1->data : params->wdata; + const size_t row_size = ggml_row_size(vec_dot_type, ne10); + + const int64_t nr0 = ne01; // src0 rows + const int64_t nr1 = cne1*ne12*ne13; // src1 rows + + //printf("nr0 = %lld, nr1 = %lld\n", nr0, nr1); + + // distribute the thread work across the inner or outer loop based on which one is larger + + const int64_t nth0 = nr0 > nr1 ? nth : 1; // parallelize by src0 rows + const int64_t nth1 = nr0 > nr1 ? 1 : nth; // parallelize by src1 rows + + const int64_t ith0 = ith % nth0; + const int64_t ith1 = ith / nth0; + + const int64_t dr0 = (nr0 + nth0 - 1)/nth0; + const int64_t dr1 = (nr1 + nth1 - 1)/nth1; + + const int64_t ir010 = dr0*ith0; + const int64_t ir011 = MIN(ir010 + dr0, nr0); + + const int64_t ir110 = dr1*ith1; + const int64_t ir111 = MIN(ir110 + dr1, nr1); + + //printf("ir010 = %6lld, ir011 = %6lld, ir110 = %6lld, ir111 = %6lld\n", ir010, ir011, ir110, ir111); + + // threads with no work simply yield (not sure if it helps) + if (ir010 >= ir011 || ir110 >= ir111) { + sched_yield(); + continue; + } + + assert(ne12 % ne02 == 0); + assert(ne13 % ne03 == 0); + + // block-tiling attempt + const int64_t blck_0 = 16; + const int64_t blck_1 = 16; + + // attempt to reduce false-sharing (does not seem to make a difference) + float tmp[16]; + + for (int64_t iir1 = ir110; iir1 < ir111; iir1 += blck_1) { + for (int64_t iir0 = ir010; iir0 < ir011; iir0 += blck_0) { + for (int64_t ir1 = iir1; ir1 < iir1 + blck_1 && ir1 < ir111; ++ir1) { + const int64_t i13 = (ir1/(ne12*cne1)); // Note: currently, src1 is always a matrix + const int64_t i12 = (ir1 - i13*ne12*cne1)/cne1; + const int64_t _i11 = (ir1 - i13*ne12*cne1 - i12*cne1); + const int64_t i11 = MMID_MATRIX_ROW(cur_a, _i11); + + // broadcast src0 into src1 + const int64_t i03 = i13/r3; + const int64_t i02 = i12/r2; + + const int64_t i1 = i11; + const int64_t i2 = i12; + const int64_t i3 = i13; + + const char * src0_row = (const char *) src0_cur->data + (0 + i02*nb02 + i03*nb03); + + // desc: when src1 is not a contiguous memory block we have to calculate the offset using the strides + // if it is, then we have either copied the data to params->wdata and made it contiguous or we are using + // the original src1 data pointer, so we should index using the indices directly + // TODO: this is a bit of a hack, we should probably have a better way to handle this + const char * src1_col = (const char *) wdata + + (src1_cont || src1->type != vec_dot_type + ? (i11 + i12*ne11 + i13*ne12*ne11)*row_size + : (i11*nb11 + i12*nb12 + i13*nb13)); + + float * dst_col = (float *) ((char *) dst->data + (i1*nb1 + i2*nb2 + i3*nb3)); + + //for (int64_t ir0 = iir0; ir0 < iir0 + blck_0 && ir0 < ir011; ++ir0) { + // vec_dot(ne00, &dst_col[ir0], src0_row + ir0*nb01, src1_col); + //} + + for (int64_t ir0 = iir0; ir0 < iir0 + blck_0 && ir0 < ir011; ++ir0) { + vec_dot(ne00, &tmp[ir0 - iir0], src0_row + ir0*nb01, src1_col); + } + memcpy(&dst_col[iir0], tmp, (MIN(iir0 + blck_0, ir011) - iir0)*sizeof(float)); + } + } + } + } + + #undef MMID_MATRIX_ROW } // ggml_compute_forward_out_prod @@ -14191,7 +14349,7 @@ static void ggml_compute_forward(struct ggml_compute_params * params, struct ggm } break; case GGML_OP_MUL_MAT: { - ggml_compute_forward_mul_mat(params, tensor->src[0], tensor->src[1], tensor, 0, tensor->ne[1]); + ggml_compute_forward_mul_mat(params, tensor->src[0], tensor->src[1], tensor); } break; case GGML_OP_MUL_MAT_ID: { @@ -15991,7 +16149,6 @@ static int ggml_get_n_tasks(struct ggml_tensor * node, int n_threads) { } break; case GGML_OP_MUL_MAT_ID: { - // FIXME: blas n_tasks = n_threads; } break; case GGML_OP_OUT_PROD: @@ -16325,20 +16482,16 @@ struct ggml_cplan ggml_graph_plan(struct ggml_cgraph * cgraph, int n_threads) { } break; case GGML_OP_MUL_MAT_ID: { - const struct ggml_tensor * a = node->src[2]; - const struct ggml_tensor * b = node->src[1]; - const enum ggml_type vec_dot_type = type_traits[a->type].vec_dot_type; -#if defined(GGML_USE_ACCELERATE) || defined(GGML_USE_OPENBLAS) - if (ggml_compute_forward_mul_mat_use_blas(a, b, node)) { - if (a->type != GGML_TYPE_F32) { - // here we need memory just for single 2D matrix from src0 - cur = ggml_type_size(GGML_TYPE_F32)*(a->ne[0]*a->ne[1]); - } - } else -#endif - if (b->type != vec_dot_type) { - cur = ggml_row_size(vec_dot_type, ggml_nelements(b)); + const struct ggml_tensor * src0 = node->src[2]; + const struct ggml_tensor * src1 = node->src[1]; + const enum ggml_type vec_dot_type = type_traits[src0->type].vec_dot_type; + if (src1->type != vec_dot_type) { + cur = ggml_row_size(vec_dot_type, ggml_nelements(src1)); } + const int n_as = ggml_get_op_params_i32(node, 1); + cur = GGML_PAD(cur, sizeof(int64_t)); // align + cur += n_as * sizeof(int64_t); // matrix_row_counts + cur += n_as * src1->ne[1] * sizeof(int64_t); // matrix_rows } break; case GGML_OP_OUT_PROD: { From 88ae8952b65cbf32eb1f5703681ea592e510e570 Mon Sep 17 00:00:00 2001 From: ShadovvBeast Date: Fri, 15 Dec 2023 13:49:01 +0200 Subject: [PATCH 088/811] server : add optional API Key Authentication example (#4441) * Add API key authentication for enhanced server-client security * server : to snake_case --------- Co-authored-by: Georgi Gerganov --- examples/server/public/completion.js | 3 +- examples/server/public/index.html | 7 ++- examples/server/server.cpp | 70 ++++++++++++++++++++++++---- 3 files changed, 70 insertions(+), 10 deletions(-) diff --git a/examples/server/public/completion.js b/examples/server/public/completion.js index c281f0fbd..6e2b99565 100644 --- a/examples/server/public/completion.js +++ b/examples/server/public/completion.js @@ -34,7 +34,8 @@ export async function* llama(prompt, params = {}, config = {}) { headers: { 'Connection': 'keep-alive', 'Content-Type': 'application/json', - 'Accept': 'text/event-stream' + 'Accept': 'text/event-stream', + ...(params.api_key ? {'Authorization': `Bearer ${params.api_key}`} : {}) }, signal: controller.signal, }); diff --git a/examples/server/public/index.html b/examples/server/public/index.html index 451fd4a3b..07d779d20 100644 --- a/examples/server/public/index.html +++ b/examples/server/public/index.html @@ -235,7 +235,8 @@ grammar: '', n_probs: 0, // no completion_probabilities, image_data: [], - cache_prompt: true + cache_prompt: true, + api_key: '' }) /* START: Support for storing prompt templates and parameters in browsers LocalStorage */ @@ -790,6 +791,10 @@
${IntField({ label: "Show Probabilities", max: 10, min: 0, name: "n_probs", value: params.value.n_probs })}
+
+ + +
` diff --git a/examples/server/server.cpp b/examples/server/server.cpp index 39d1e83d1..5f93dcb66 100644 --- a/examples/server/server.cpp +++ b/examples/server/server.cpp @@ -36,6 +36,7 @@ using json = nlohmann::json; struct server_params { std::string hostname = "127.0.0.1"; + std::string api_key; std::string public_path = "examples/server/public"; int32_t port = 8080; int32_t read_timeout = 600; @@ -1953,6 +1954,7 @@ static void server_print_usage(const char *argv0, const gpt_params ¶ms, printf(" --host ip address to listen (default (default: %s)\n", sparams.hostname.c_str()); printf(" --port PORT port to listen (default (default: %d)\n", sparams.port); printf(" --path PUBLIC_PATH path from which to serve static files (default %s)\n", sparams.public_path.c_str()); + printf(" --api-key API_KEY optional api key to enhance server security. If set, requests must include this key for access.\n"); printf(" -to N, --timeout N server read/write timeout in seconds (default: %d)\n", sparams.read_timeout); printf(" --embedding enable embedding vector output (default: %s)\n", params.embedding ? "enabled" : "disabled"); printf(" -np N, --parallel N number of slots for process requests (default: %d)\n", params.n_parallel); @@ -2002,6 +2004,15 @@ static void server_params_parse(int argc, char **argv, server_params &sparams, } sparams.public_path = argv[i]; } + else if (arg == "--api-key") + { + if (++i >= argc) + { + invalid_param = true; + break; + } + sparams.api_key = argv[i]; + } else if (arg == "--timeout" || arg == "-to") { if (++i >= argc) @@ -2669,6 +2680,32 @@ int main(int argc, char **argv) httplib::Server svr; + // Middleware for API key validation + auto validate_api_key = [&sparams](const httplib::Request &req, httplib::Response &res) -> bool { + // If API key is not set, skip validation + if (sparams.api_key.empty()) { + return true; + } + + // Check for API key in the header + auto auth_header = req.get_header_value("Authorization"); + std::string prefix = "Bearer "; + if (auth_header.substr(0, prefix.size()) == prefix) { + std::string received_api_key = auth_header.substr(prefix.size()); + if (received_api_key == sparams.api_key) { + return true; // API key is valid + } + } + + // API key is invalid or not provided + res.set_content("Unauthorized: Invalid API Key", "text/plain"); + res.status = 401; // Unauthorized + + LOG_WARNING("Unauthorized: Invalid API Key", {}); + + return false; + }; + svr.set_default_headers({{"Server", "llama.cpp"}, {"Access-Control-Allow-Origin", "*"}, {"Access-Control-Allow-Headers", "content-type"}}); @@ -2711,8 +2748,11 @@ int main(int argc, char **argv) res.set_content(data.dump(), "application/json"); }); - svr.Post("/completion", [&llama](const httplib::Request &req, httplib::Response &res) + svr.Post("/completion", [&llama, &validate_api_key](const httplib::Request &req, httplib::Response &res) { + if (!validate_api_key(req, res)) { + return; + } json data = json::parse(req.body); const int task_id = llama.request_completion(data, false, false, -1); if (!json_value(data, "stream", false)) { @@ -2799,8 +2839,11 @@ int main(int argc, char **argv) }); // TODO: add mount point without "/v1" prefix -- how? - svr.Post("/v1/chat/completions", [&llama](const httplib::Request &req, httplib::Response &res) + svr.Post("/v1/chat/completions", [&llama, &validate_api_key](const httplib::Request &req, httplib::Response &res) { + if (!validate_api_key(req, res)) { + return; + } json data = oaicompat_completion_params_parse(json::parse(req.body)); const int task_id = llama.request_completion(data, false, false, -1); @@ -2869,8 +2912,11 @@ int main(int argc, char **argv) } }); - svr.Post("/infill", [&llama](const httplib::Request &req, httplib::Response &res) + svr.Post("/infill", [&llama, &validate_api_key](const httplib::Request &req, httplib::Response &res) { + if (!validate_api_key(req, res)) { + return; + } json data = json::parse(req.body); const int task_id = llama.request_completion(data, true, false, -1); if (!json_value(data, "stream", false)) { @@ -3005,11 +3051,15 @@ int main(int argc, char **argv) svr.set_error_handler([](const httplib::Request &, httplib::Response &res) { + if (res.status == 401) + { + res.set_content("Unauthorized", "text/plain"); + } if (res.status == 400) { res.set_content("Invalid request", "text/plain"); } - else if (res.status != 500) + else if (res.status == 404) { res.set_content("File Not Found", "text/plain"); res.status = 404; @@ -3032,11 +3082,15 @@ int main(int argc, char **argv) // to make it ctrl+clickable: LOG_TEE("\nllama server listening at http://%s:%d\n\n", sparams.hostname.c_str(), sparams.port); - LOG_INFO("HTTP server listening", { - {"hostname", sparams.hostname}, - {"port", sparams.port}, - }); + std::unordered_map log_data; + log_data["hostname"] = sparams.hostname; + log_data["port"] = std::to_string(sparams.port); + if (!sparams.api_key.empty()) { + log_data["api_key"] = "api_key: ****" + sparams.api_key.substr(sparams.api_key.length() - 4); + } + + LOG_INFO("HTTP server listening", log_data); // run the HTTP server in a thread - see comment below std::thread t([&]() { From 8a5be3bd5885d79ad84aadf32bb8c1a67bd43c19 Mon Sep 17 00:00:00 2001 From: Jared Van Bortel Date: Fri, 15 Dec 2023 22:16:15 -0500 Subject: [PATCH 089/811] llama : sanity checks for access to logits (#4274) Co-authored-by: Georgi Gerganov --- llama.cpp | 22 ++++++++++++++++++++++ 1 file changed, 22 insertions(+) diff --git a/llama.cpp b/llama.cpp index eddb70859..58fe7492e 100644 --- a/llama.cpp +++ b/llama.cpp @@ -1505,6 +1505,10 @@ struct llama_context { // decode output (2-dimensional array: [n_tokens][n_vocab]) std::vector logits; +#ifndef NDEBUG + // guard against access to unset logits + std::vector logits_valid; +#endif bool logits_all = false; // input embedding (1-dimensional array: [n_embd]) @@ -6150,6 +6154,14 @@ static int llama_decode_internal( { auto & logits_out = lctx.logits; +#ifndef NDEBUG + auto & logits_valid = lctx.logits_valid; + logits_valid.clear(); + logits_valid.resize(n_tokens); + + logits_out.clear(); +#endif + if (batch.logits) { logits_out.resize(n_vocab * n_tokens); for (uint32_t i = 0; i < n_tokens; i++) { @@ -6157,13 +6169,22 @@ static int llama_decode_internal( continue; } memcpy(logits_out.data() + (n_vocab*i), (float *) ggml_get_data(res) + (n_vocab*i), sizeof(float)*n_vocab); +#ifndef NDEBUG + logits_valid[i] = true; +#endif } } else if (lctx.logits_all) { logits_out.resize(n_vocab * n_tokens); memcpy(logits_out.data(), (float *) ggml_get_data(res), sizeof(float)*n_vocab*n_tokens); +#ifndef NDEBUG + std::fill(logits_valid.begin(), logits_valid.end(), true); +#endif } else { logits_out.resize(n_vocab); memcpy(logits_out.data(), (float *) ggml_get_data(res) + (n_vocab*(n_tokens - 1)), sizeof(float)*n_vocab); +#ifndef NDEBUG + logits_valid[n_tokens - 1] = true; +#endif } } @@ -10052,6 +10073,7 @@ float * llama_get_logits(struct llama_context * ctx) { } float * llama_get_logits_ith(struct llama_context * ctx, int32_t i) { + assert(ctx->logits_valid.at(i)); return ctx->logits.data() + i*ctx->model.hparams.n_vocab; } From c6c4fc081c1df1c60a9bfe3e6a3fd086f1a29ec7 Mon Sep 17 00:00:00 2001 From: slaren Date: Sat, 16 Dec 2023 18:58:46 +0100 Subject: [PATCH 090/811] lora : add support for non-llama models (#3333) * lora : add support for non-llama models ggml-ci * avoid leaking ggml_context on failure cleanup ggml-ci * lora : allow 1d tensors * lora : include embd and output layers in size calculation * fix style --- convert-lora-to-ggml.py | 86 +++++++++++++------------- llama.cpp | 133 ++++++++++++++++++++-------------------- llama.h | 1 + 3 files changed, 114 insertions(+), 106 deletions(-) diff --git a/convert-lora-to-ggml.py b/convert-lora-to-ggml.py index a937410dd..53bb8a3d9 100755 --- a/convert-lora-to-ggml.py +++ b/convert-lora-to-ggml.py @@ -3,7 +3,6 @@ from __future__ import annotations import json import os -import re import struct import sys from typing import Any, BinaryIO, Sequence @@ -11,43 +10,15 @@ from typing import Any, BinaryIO, Sequence import numpy as np import torch +from pathlib import Path +if 'NO_LOCAL_GGUF' not in os.environ: + sys.path.insert(1, str(Path(__file__).parent / 'gguf-py' / 'gguf')) +import gguf + + NUMPY_TYPE_TO_FTYPE: dict[str, int] = {"float32": 0, "float16": 1} -HF_SUBLAYER_TO_GGML = { - "self_attn.q_proj": "attn_q", - "self_attn.k_proj": "attn_k", - "self_attn.v_proj": "attn_v", - "self_attn.o_proj": "attn_output", - "mlp.gate_proj": "ffn_gate", - "mlp.down_proj": "ffn_down", - "mlp.up_proj": "ffn_up", - "input_layernorm": "attn_norm", - "post_attention_layernorm": "ffn_norm", -} - - -def translate_tensor_name(t: str) -> str: - match = re.match(r".*layers\.(\d+)\.(\w+\.\w+)\.lora_(A|B)\.weight", t) - if match: - nn = match.group(1) - sub_layer = match.group(2) - lora_type = match.group(3) - - sub_layer_renamed = HF_SUBLAYER_TO_GGML.get(sub_layer) - if sub_layer_renamed is None: - print(f"Error: unrecognized sub-layer {sub_layer} in tensor {t}") - sys.exit(1) - - output_string = ( - f"blk.{nn}.{HF_SUBLAYER_TO_GGML[sub_layer]}.weight.lora{lora_type}" - ) - return output_string - else: - print(f"Error: unrecognized tensor {t}") - sys.exit(1) - - def write_file_header(fout: BinaryIO, params: dict[str, Any]) -> None: fout.write(b"ggla"[::-1]) # magic (ggml lora) fout.write(struct.pack("i", 1)) # file version @@ -61,9 +32,7 @@ def write_file_header(fout: BinaryIO, params: dict[str, Any]) -> None: fout.write(struct.pack("i", int(params["lora_alpha"]))) -def write_tensor_header( - self, name: str, shape: Sequence[int], data_type: np.dtype[Any] -) -> None: +def write_tensor_header(fout: BinaryIO, name: str, shape: Sequence[int], data_type: np.dtype[Any]) -> None: sname = name.encode("utf-8") fout.write( struct.pack( @@ -78,11 +47,12 @@ def write_tensor_header( fout.seek((fout.tell() + 31) & -32) -if len(sys.argv) != 2: - print(f"Usage: python {sys.argv[0]} ") +if len(sys.argv) < 2: + print(f"Usage: python {sys.argv[0]} [arch]") print( "Path must contain HuggingFace PEFT LoRA files 'adapter_config.json' and 'adapter_model.bin'" ) + print(f"Arch must be one of {list(gguf.MODEL_ARCH_NAMES.values())} (default: llama)") sys.exit(1) input_json = os.path.join(sys.argv[1], "adapter_config.json") @@ -90,6 +60,14 @@ input_model = os.path.join(sys.argv[1], "adapter_model.bin") output_path = os.path.join(sys.argv[1], "ggml-adapter-model.bin") model = torch.load(input_model, map_location="cpu") +arch_name = sys.argv[2] if len(sys.argv) == 3 else "llama" + +if arch_name not in gguf.MODEL_ARCH_NAMES.values(): + print(f"Error: unsupported architecture {arch_name}") + sys.exit(1) + +arch = list(gguf.MODEL_ARCH_NAMES.keys())[list(gguf.MODEL_ARCH_NAMES.values()).index(arch_name)] +name_map = gguf.TensorNameMap(arch, 200) # 200 layers ought to be enough for anyone with open(input_json, "r") as f: params = json.load(f) @@ -117,6 +95,7 @@ with open(output_path, "wb") as fout: write_file_header(fout, params) for k, v in model.items(): + orig_k = k if k.endswith(".default.weight"): k = k.replace(".default.weight", ".weight") if k in ["llama_proj.weight", "llama_proj.bias"]: @@ -129,7 +108,32 @@ with open(output_path, "wb") as fout: v = v.float() t = v.detach().numpy() - tname = translate_tensor_name(k) + + prefix = "base_model.model." + if k.startswith(prefix): + k = k[len(prefix) :] + + lora_suffixes = (".lora_A.weight", ".lora_B.weight") + if k.endswith(lora_suffixes): + suffix = k[-len(lora_suffixes[0]):] + k = k[: -len(lora_suffixes[0])] + else: + print(f"Error: unrecognized tensor name {orig_k}") + sys.exit(1) + + tname = name_map.get_name(k) + if tname is None: + print(f"Error: could not map tensor name {orig_k}") + print(" Note: the arch parameter must be specified if the model is not llama") + sys.exit(1) + + if suffix == ".lora_A.weight": + tname += ".weight.loraA" + elif suffix == ".lora_B.weight": + tname += ".weight.loraB" + else: + assert False + print(f"{k} => {tname} {t.shape} {t.dtype} {t.nbytes/1024/1024:.2f}MB") write_tensor_header(fout, tname, t.shape, t.dtype) t.tofile(fout) diff --git a/llama.cpp b/llama.cpp index 58fe7492e..f49214c13 100644 --- a/llama.cpp +++ b/llama.cpp @@ -8647,53 +8647,60 @@ static int llama_apply_lora_from_file_internal( const int64_t t_start_lora_us = ggml_time_us(); - auto fin = std::ifstream(path_lora, std::ios::binary); - if (!fin) { - LLAMA_LOG_ERROR("%s: failed to open '%s'\n", __func__, path_lora); - return 1; - } + llama_file fin(path_lora, "rb"); // verify magic and version { - uint32_t magic; - fin.read((char *) &magic, sizeof(magic)); - uint32_t format_version; - fin.read((char *) &format_version, sizeof(format_version)); + uint32_t magic = fin.read_u32(); + if (magic != LLAMA_FILE_MAGIC_GGLA) { + LLAMA_LOG_ERROR("%s: bad file magic\n", __func__); + return 1; + } + uint32_t format_version = fin.read_u32(); if (format_version != 1) { LLAMA_LOG_ERROR("%s: unsupported file version\n", __func__ ); return 1; } } - int32_t lora_r; - int32_t lora_alpha; - fin.read((char *) &lora_r, sizeof(lora_r)); - fin.read((char *) &lora_alpha, sizeof(lora_alpha)); + int32_t lora_r = fin.read_u32(); + int32_t lora_alpha = fin.read_u32(); float scaling = scale * (float)lora_alpha / (float)lora_r; LLAMA_LOG_INFO("%s: r = %d, alpha = %d, scaling = %.2f\n", __func__, lora_r, lora_alpha, scaling); + // create a name -> tensor map of the model to accelerate lookups + // find the max tensor size to estimate the required temporary buffer size + size_t max_tensor_size = 0; + std::unordered_map model_tensors; + for (const auto & kv : model.tensors_by_name) { + model_tensors.insert(kv); + size_t f32_size = ggml_nelements(kv.second) * sizeof(float); + max_tensor_size = std::max(max_tensor_size, f32_size); + } + // create a temporary ggml context to store the lora tensors - // todo: calculate size from biggest possible tensor - std::vector lora_buf(1024ull * 1024ull * 1024ull); + // TODO: use ggml-alloc + size_t lora_ctx_size = max_tensor_size * 3; + LLAMA_LOG_INFO("%s: allocating %.f MB for lora temporary buffer\n", __func__, lora_ctx_size / 1024.0 / 1024.0); + std::vector lora_buf(lora_ctx_size); + struct ggml_init_params params; params.mem_size = lora_buf.size(); params.mem_buffer = lora_buf.data(); params.no_alloc = false; - ggml_context * lora_ctx = ggml_init(params); - std::unordered_map lora_tensors; + using unique_context = std::unique_ptr; - // create a name -> tensor map of the model to accelerate lookups - std::unordered_map model_tensors; - for (const auto & kv : model.tensors_by_name) { - model_tensors.insert(kv); - } + unique_context lora_ctx(nullptr, ggml_free); + lora_ctx.reset(ggml_init(params)); + std::unordered_map lora_tensors; // load base model std::unique_ptr ml; - ggml_context * base_ctx = NULL; + + unique_context base_ctx(nullptr, ggml_free); std::vector base_buf; if (path_base_model) { LLAMA_LOG_INFO("%s: loading base model from '%s'\n", __func__, path_base_model); @@ -8702,6 +8709,7 @@ static int llama_apply_lora_from_file_internal( size_t ctx_size; size_t mmapped_size; ml->calc_sizes(ctx_size, mmapped_size); + base_buf.resize(ctx_size); ggml_init_params base_params; @@ -8709,9 +8717,9 @@ static int llama_apply_lora_from_file_internal( base_params.mem_buffer = base_buf.data(); base_params.no_alloc = ml->use_mmap; - base_ctx = ggml_init(base_params); + base_ctx.reset(ggml_init(base_params)); - // maybe this should in llama_model_loader + // maybe this should be in llama_model_loader if (ml->use_mmap) { ml->mapping.reset(new llama_mmap(&ml->file, /* prefetch */ 0, ggml_is_numa())); } @@ -8724,27 +8732,35 @@ static int llama_apply_lora_from_file_internal( std::vector work_buffer; while (true) { + if (fin.tell() == fin.size) { + // eof + break; + } + int32_t n_dims; - int32_t length; + int32_t name_len; int32_t ftype; - fin.read(reinterpret_cast(&n_dims), sizeof(n_dims)); - fin.read(reinterpret_cast(&length), sizeof(length)); - fin.read(reinterpret_cast(&ftype), sizeof(ftype)); - if (fin.eof()) { - break; + fin.read_raw(&n_dims, sizeof(n_dims)); + fin.read_raw(&name_len, sizeof(name_len)); + fin.read_raw(&ftype, sizeof(ftype)); + + if (n_dims != 1 && n_dims != 2) { + LLAMA_LOG_ERROR("%s: unsupported tensor dimension %d\n", __func__, n_dims); + return 1; } int32_t ne[2] = { 1, 1 }; for (int i = 0; i < n_dims; ++i) { - fin.read(reinterpret_cast(&ne[i]), sizeof(ne[i])); + fin.read_raw(&ne[i], sizeof(ne[i])); } std::string name; { + GGML_ASSERT(name_len <= 1024); char buf[1024]; - fin.read(buf, length); - name = std::string(buf, length); + fin.read_raw(buf, name_len); + name = std::string(buf, name_len); } // check for lora suffix and get the type of tensor @@ -8758,7 +8774,7 @@ static int llama_apply_lora_from_file_internal( std::string lora_type = name.substr(pos + lora_suffix.length()); std::string base_name = name; base_name.erase(pos); - // LLAMA_LOG_INFO("%s: %s => %s (lora type %s) \n", __func__, name.c_str(),base_name.c_str(), lora_type.c_str()); + // LLAMA_LOG_INFO("%s: %s => %s (lora type %s) \n", __func__, name.c_str(), base_name.c_str(), lora_type.c_str()); if (model_tensors.find(base_name) == model_tensors.end()) { LLAMA_LOG_ERROR("%s: unknown tensor '%s' in lora adapter\n", __func__, name.data()); @@ -8777,22 +8793,15 @@ static int llama_apply_lora_from_file_internal( return false; } } - ggml_tensor * lora_tensor; - if (n_dims == 2) { - lora_tensor = ggml_new_tensor_2d(lora_ctx, wtype, ne[0], ne[1]); - } - else { - LLAMA_LOG_ERROR("%s: unsupported tensor dimension %d\n", __func__, n_dims); - return 1; - } - ggml_set_name(lora_tensor, "lora_tensor"); + ggml_tensor * lora_tensor = ggml_new_tensor_2d(lora_ctx.get(), wtype, ne[0], ne[1]); + ggml_set_name(lora_tensor, name.c_str()); // load tensor data - size_t offset = fin.tellg(); + size_t offset = fin.tell(); size_t tensor_data_size = ggml_nbytes(lora_tensor); offset = (offset + 31) & -32; - fin.seekg(offset); - fin.read((char*)lora_tensor->data, tensor_data_size); + fin.seek(offset, SEEK_SET); + fin.read_raw(lora_tensor->data, tensor_data_size); lora_tensors[name] = lora_tensor; @@ -8822,13 +8831,11 @@ static int llama_apply_lora_from_file_internal( // load from base model if (gguf_find_tensor(ctx_gguf, base_name.c_str()) < 0) { - // TODO: throw LLAMA_LOG_ERROR("%s: error: tensor '%s' not found in base model\n", __func__, base_name.c_str()); return 1; } - // TODO: not tested!! maybe not working! - base_t = ml->create_tensor(base_ctx, base_name, { (uint32_t)dest_t->ne[0], (uint32_t)dest_t->ne[1] }, GGML_BACKEND_CPU); + base_t = ml->create_tensor(base_ctx.get(), base_name, { dest_t->ne[0], dest_t->ne[1] }, GGML_BACKEND_CPU); ml->load_data_for(base_t); } else { base_t = dest_t; @@ -8857,43 +8864,45 @@ static int llama_apply_lora_from_file_internal( } // w = w + BA*s - ggml_tensor * BA = ggml_mul_mat(lora_ctx, loraA, loraB); + ggml_tensor * BA = ggml_mul_mat(lora_ctx.get(), loraA, loraB); offload_func(BA); ggml_set_name(BA, "BA"); if (scaling != 1.0f) { - ggml_tensor * scale_tensor = ggml_new_f32(lora_ctx, scaling); + ggml_tensor * scale_tensor = ggml_new_f32(lora_ctx.get(), scaling); ggml_set_name(scale_tensor, "scale_tensor"); - BA = ggml_scale_inplace(lora_ctx, BA, scale_tensor); + BA = ggml_scale_inplace(lora_ctx.get(), BA, scale_tensor); offload_func(BA); ggml_set_name(BA, "BA_scaled"); } ggml_tensor * r; if (base_t == dest_t) { - r = ggml_add_inplace(lora_ctx, dest_t, BA); + r = ggml_add_inplace(lora_ctx.get(), dest_t, BA); offload_func_force_inplace(r); ggml_set_name(r, "r_add_inplace"); } else { - r = ggml_add(lora_ctx, base_t, BA); + r = ggml_add(lora_ctx.get(), base_t, BA); offload_func(r); ggml_set_name(r, "r_add"); - r = ggml_cpy(lora_ctx, r, dest_t); + r = ggml_cpy(lora_ctx.get(), r, dest_t); offload_func(r); ggml_set_name(r, "r_cpy"); } - struct ggml_cgraph * gf = ggml_new_graph(lora_ctx); + struct ggml_cgraph * gf = ggml_new_graph(lora_ctx.get()); ggml_build_forward_expand(gf, r); ggml_graph_compute_helper(work_buffer, gf, n_threads); + // the tensors in the adapter must be sorted such that loraA and loraB of the same tensor are next to each other + GGML_ASSERT(lora_tensors.size() == 2); + // we won't need these tensors again, reset the context to save memory - ggml_free(lora_ctx); - lora_ctx = ggml_init(params); + lora_ctx.reset(ggml_init(params)); lora_tensors.clear(); n_tensors++; @@ -8903,12 +8912,6 @@ static int llama_apply_lora_from_file_internal( } } - // TODO: this should be in a destructor, it will leak on failure - ggml_free(lora_ctx); - if (base_ctx) { - ggml_free(base_ctx); - } - const int64_t t_lora_us = ggml_time_us() - t_start_lora_us; LLAMA_LOG_INFO(" done (%.2f ms)\n", t_lora_us / 1000.0); diff --git a/llama.h b/llama.h index 45a65cacb..15ab4f80e 100644 --- a/llama.h +++ b/llama.h @@ -39,6 +39,7 @@ #define LLAMA_MAX_RNG_STATE (64*1024) +#define LLAMA_FILE_MAGIC_GGLA 0x67676c61u // 'ggla' #define LLAMA_FILE_MAGIC_GGSN 0x6767736eu // 'ggsn' #define LLAMA_SESSION_MAGIC LLAMA_FILE_MAGIC_GGSN From 5daa5f54fdcd2b5228add1a4c43a1897b2168f35 Mon Sep 17 00:00:00 2001 From: Bach Le Date: Sun, 17 Dec 2023 18:57:33 +0800 Subject: [PATCH 091/811] Link to cublas dynamically on Windows even with LLAMA_STATIC (#4506) --- CMakeLists.txt | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/CMakeLists.txt b/CMakeLists.txt index 57b43c136..e3cd43ab3 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -291,7 +291,12 @@ if (LLAMA_CUBLAS) add_compile_definitions(GGML_CUDA_PEER_MAX_BATCH_SIZE=${LLAMA_CUDA_PEER_MAX_BATCH_SIZE}) if (LLAMA_STATIC) - set(LLAMA_EXTRA_LIBS ${LLAMA_EXTRA_LIBS} CUDA::cudart_static CUDA::cublas_static CUDA::cublasLt_static) + if (WIN32) + # As of 12.3.1 CUDA Tookit for Windows does not offer a static cublas library + set(LLAMA_EXTRA_LIBS ${LLAMA_EXTRA_LIBS} CUDA::cudart_static CUDA::cublas CUDA::cublasLt) + else () + set(LLAMA_EXTRA_LIBS ${LLAMA_EXTRA_LIBS} CUDA::cudart_static CUDA::cublas_static CUDA::cublasLt_static) + endif() else() set(LLAMA_EXTRA_LIBS ${LLAMA_EXTRA_LIBS} CUDA::cudart CUDA::cublas CUDA::cublasLt) endif() From 62bd52b7bf90819e75f427a95a484cd5eee0b3c7 Mon Sep 17 00:00:00 2001 From: mzcu Date: Sun, 17 Dec 2023 15:54:37 +0100 Subject: [PATCH 092/811] server : allow requests larger than 8K (#4500) --- examples/server/server.cpp | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/examples/server/server.cpp b/examples/server/server.cpp index 5f93dcb66..a9f8b3747 100644 --- a/examples/server/server.cpp +++ b/examples/server/server.cpp @@ -10,7 +10,8 @@ // crash the server in debug mode, otherwise send an http 500 error #define CPPHTTPLIB_NO_EXCEPTIONS 1 #endif - +// increase max payload length to allow use of larger context size +#define CPPHTTPLIB_FORM_URL_ENCODED_PAYLOAD_MAX_LENGTH 1048576 #include "httplib.h" #include "json.hpp" From eb16dae7e70ca97396190698b29c0f9ee3388e88 Mon Sep 17 00:00:00 2001 From: Alexey Parfenov Date: Sun, 17 Dec 2023 14:56:09 +0000 Subject: [PATCH 093/811] server : fix possible ambiguity in content type charset (#4501) --- examples/server/server.cpp | 44 +++++++++++++++++++------------------- 1 file changed, 22 insertions(+), 22 deletions(-) diff --git a/examples/server/server.cpp b/examples/server/server.cpp index a9f8b3747..be7b5b95e 100644 --- a/examples/server/server.cpp +++ b/examples/server/server.cpp @@ -2699,7 +2699,7 @@ int main(int argc, char **argv) } // API key is invalid or not provided - res.set_content("Unauthorized: Invalid API Key", "text/plain"); + res.set_content("Unauthorized: Invalid API Key", "text/plain; charset=utf-8"); res.status = 401; // Unauthorized LOG_WARNING("Unauthorized: Invalid API Key", {}); @@ -2714,28 +2714,28 @@ int main(int argc, char **argv) // this is only called if no index.html is found in the public --path svr.Get("/", [](const httplib::Request &, httplib::Response &res) { - res.set_content(reinterpret_cast(&index_html), index_html_len, "text/html"); + res.set_content(reinterpret_cast(&index_html), index_html_len, "text/html; charset=utf-8"); return false; }); // this is only called if no index.js is found in the public --path svr.Get("/index.js", [](const httplib::Request &, httplib::Response &res) { - res.set_content(reinterpret_cast(&index_js), index_js_len, "text/javascript"); + res.set_content(reinterpret_cast(&index_js), index_js_len, "text/javascript; charset=utf-8"); return false; }); // this is only called if no index.html is found in the public --path svr.Get("/completion.js", [](const httplib::Request &, httplib::Response &res) { - res.set_content(reinterpret_cast(&completion_js), completion_js_len, "application/javascript"); + res.set_content(reinterpret_cast(&completion_js), completion_js_len, "application/javascript; charset=utf-8"); return false; }); // this is only called if no index.html is found in the public --path svr.Get("/json-schema-to-grammar.mjs", [](const httplib::Request &, httplib::Response &res) { - res.set_content(reinterpret_cast(&json_schema_to_grammar_mjs), json_schema_to_grammar_mjs_len, "application/javascript"); + res.set_content(reinterpret_cast(&json_schema_to_grammar_mjs), json_schema_to_grammar_mjs_len, "application/javascript; charset=utf-8"); return false; }); @@ -2746,7 +2746,7 @@ int main(int argc, char **argv) { "user_name", llama.name_user.c_str() }, { "assistant_name", llama.name_assistant.c_str() } }; - res.set_content(data.dump(), "application/json"); + res.set_content(data.dump(), "application/json; charset=utf-8"); }); svr.Post("/completion", [&llama, &validate_api_key](const httplib::Request &req, httplib::Response &res) @@ -2760,12 +2760,12 @@ int main(int argc, char **argv) std::string completion_text; task_result result = llama.next_result(task_id); if (!result.error && result.stop) { - res.set_content(result.result_json.dump(-1, ' ', false, json::error_handler_t::replace), "application/json"); + res.set_content(result.result_json.dump(-1, ' ', false, json::error_handler_t::replace), "application/json; charset=utf-8"); } else { res.status = 404; - res.set_content(result.result_json["content"], "text/plain"); + res.set_content(result.result_json["content"], "text/plain; charset=utf-8"); return; } } else { @@ -2836,7 +2836,7 @@ int main(int argc, char **argv) }} }; - res.set_content(models.dump(), "application/json"); + res.set_content(models.dump(), "application/json; charset=utf-8"); }); // TODO: add mount point without "/v1" prefix -- how? @@ -2858,10 +2858,10 @@ int main(int argc, char **argv) res.set_content(oaicompat_result.dump(-1, ' ', false, json::error_handler_t::replace), - "application/json"); + "application/json; charset=utf-8"); } else { res.status = 500; - res.set_content(result.result_json["content"], "text/plain"); + res.set_content(result.result_json["content"], "text/plain; charset=utf-8"); return; } } else { @@ -2925,12 +2925,12 @@ int main(int argc, char **argv) task_result result = llama.next_result(task_id); if (!result.error && result.stop) { - res.set_content(result.result_json.dump(-1, ' ', false, json::error_handler_t::replace), "application/json"); + res.set_content(result.result_json.dump(-1, ' ', false, json::error_handler_t::replace), "application/json; charset=utf-8"); } else { res.status = 404; - res.set_content(result.result_json["content"], "text/plain"); + res.set_content(result.result_json["content"], "text/plain; charset=utf-8"); return; } } else { @@ -2979,11 +2979,11 @@ int main(int argc, char **argv) svr.Get("/model.json", [&llama](const httplib::Request &, httplib::Response &res) { const json data = llama.get_model_props(); - return res.set_content(data.dump(), "application/json"); + return res.set_content(data.dump(), "application/json; charset=utf-8"); }); svr.Options(R"(/.*)", [](const httplib::Request &, httplib::Response &res) - { return res.set_content("", "application/json"); }); + { return res.set_content("", "application/json; charset=utf-8"); }); svr.Post("/tokenize", [&llama](const httplib::Request &req, httplib::Response &res) { @@ -2994,7 +2994,7 @@ int main(int argc, char **argv) tokens = llama.tokenize(body["content"], false); } const json data = format_tokenizer_response(tokens); - return res.set_content(data.dump(), "application/json"); + return res.set_content(data.dump(), "application/json; charset=utf-8"); }); svr.Post("/detokenize", [&llama](const httplib::Request &req, httplib::Response &res) @@ -3008,7 +3008,7 @@ int main(int argc, char **argv) } const json data = format_detokenized_response(content); - return res.set_content(data.dump(), "application/json"); + return res.set_content(data.dump(), "application/json; charset=utf-8"); }); svr.Post("/embedding", [&llama](const httplib::Request &req, httplib::Response &res) @@ -3025,7 +3025,7 @@ int main(int argc, char **argv) } const int task_id = llama.request_completion({ {"prompt", prompt}, { "n_predict", 0} }, false, true, -1); task_result result = llama.next_result(task_id); - return res.set_content(result.result_json.dump(), "application/json"); + return res.set_content(result.result_json.dump(), "application/json; charset=utf-8"); }); svr.set_logger(log_server_request); @@ -3046,7 +3046,7 @@ int main(int argc, char **argv) { snprintf(buf, sizeof(buf), fmt, "Unknown Exception"); } - res.set_content(buf, "text/plain"); + res.set_content(buf, "text/plain; charset=utf-8"); res.status = 500; }); @@ -3054,15 +3054,15 @@ int main(int argc, char **argv) { if (res.status == 401) { - res.set_content("Unauthorized", "text/plain"); + res.set_content("Unauthorized", "text/plain; charset=utf-8"); } if (res.status == 400) { - res.set_content("Invalid request", "text/plain"); + res.set_content("Invalid request", "text/plain; charset=utf-8"); } else if (res.status == 404) { - res.set_content("File Not Found", "text/plain"); + res.set_content("File Not Found", "text/plain; charset=utf-8"); res.status = 404; } }); From 8edd2b40fdbcafbf630f2cf29306b29d5cb48c42 Mon Sep 17 00:00:00 2001 From: AdithyanI Date: Sun, 17 Dec 2023 15:57:56 +0100 Subject: [PATCH 094/811] server : fix grammar being ignored (#4494) Fix bug in identifying the grammar. --- examples/server/server.cpp | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/examples/server/server.cpp b/examples/server/server.cpp index be7b5b95e..c97efe97d 100644 --- a/examples/server/server.cpp +++ b/examples/server/server.cpp @@ -2414,7 +2414,7 @@ json oaicompat_completion_params_parse( llama_params["ignore_eos"] = json_value(body, "ignore_eos", false); llama_params["tfs_z"] = json_value(body, "tfs_z", 0.0); - if (llama_params.count("grammar") != 0) { + if (body.count("grammar") != 0) { llama_params["grammar"] = json_value(body, "grammar", json::object()); } From 0ffc92d2d23a789625f018840469af045be1e3c0 Mon Sep 17 00:00:00 2001 From: olexiyb Date: Sun, 17 Dec 2023 17:02:16 +0200 Subject: [PATCH 095/811] server : disable llm logs if SERVER_VERBOSE is off (#3792) --- examples/server/server.cpp | 3 +++ 1 file changed, 3 insertions(+) diff --git a/examples/server/server.cpp b/examples/server/server.cpp index c97efe97d..04038530f 100644 --- a/examples/server/server.cpp +++ b/examples/server/server.cpp @@ -2645,6 +2645,9 @@ static void append_to_generated_text_from_generated_token_probs(llama_server_con int main(int argc, char **argv) { +#if SERVER_VERBOSE != 1 + log_disable(); +#endif // own arguments required by this example gpt_params params; server_params sparams; From 45668633fdb522a925c3dafc1ecf426f539efb27 Mon Sep 17 00:00:00 2001 From: slaren Date: Sun, 17 Dec 2023 16:05:56 +0100 Subject: [PATCH 096/811] finetune : keep allocs alive until all allocations are done (#4486) --- examples/finetune/finetune.cpp | 15 +++++++-------- 1 file changed, 7 insertions(+), 8 deletions(-) diff --git a/examples/finetune/finetune.cpp b/examples/finetune/finetune.cpp index b9849e8c9..6a668d764 100644 --- a/examples/finetune/finetune.cpp +++ b/examples/finetune/finetune.cpp @@ -1620,8 +1620,6 @@ int main(int argc, char ** argv) { opt->params.adam.gclip = params.common.adam_gclip; opt->params.adam.eps_f = params.common.adam_eps_f; - ggml_allocr * alloc = NULL; - printf("%s: init model\n", __func__); bool existed = load_checkpoint_lora_file(params.common.fn_checkpoint_in, &model, &lora, train); @@ -1725,10 +1723,9 @@ int main(int argc, char ** argv) { // allocate input tensors mem_input_data.resize(max_input_size); - alloc = ggml_allocr_new(mem_input_data.data(), mem_input_data.size(), tensor_alignment); - ggml_allocr_alloc(alloc, tokens_input); - ggml_allocr_alloc(alloc, target_probs); - ggml_allocr_free(alloc); + ggml_allocr_t alloc_inps = ggml_allocr_new(mem_input_data.data(), mem_input_data.size(), tensor_alignment); + ggml_allocr_alloc(alloc_inps, tokens_input); + ggml_allocr_alloc(alloc_inps, target_probs); // context for compute tensors without their data const size_t estimated_compute_size_wo_data = ( @@ -1755,7 +1752,7 @@ int main(int argc, char ** argv) { // find best evaluation order for (unsigned order = 0; order < (unsigned) GGML_CGRAPH_EVAL_ORDER_COUNT; ++order) { ctx_compute = ggml_init(ctx_compute_params); - alloc = ggml_allocr_new_measure(tensor_alignment); + ggml_allocr_t alloc = ggml_allocr_new_measure(tensor_alignment); gf = ggml_new_graph_custom(ctx_compute, LLAMA_TRAIN_MAX_NODES, true); gf->order = (enum ggml_cgraph_eval_order) order; gb = ggml_new_graph_custom(ctx_compute, LLAMA_TRAIN_MAX_NODES, true); @@ -1788,7 +1785,7 @@ int main(int argc, char ** argv) { // allocate compute tensors mem_compute_data.resize(max_compute_size); ctx_compute = ggml_init(ctx_compute_params); - alloc = ggml_allocr_new(mem_compute_data.data(), mem_compute_data.size(), tensor_alignment); + ggml_allocr_t alloc = ggml_allocr_new(mem_compute_data.data(), mem_compute_data.size(), tensor_alignment); gf = ggml_new_graph_custom(ctx_compute, LLAMA_TRAIN_MAX_NODES, true); gf->order = best_order; gb = ggml_new_graph_custom(ctx_compute, LLAMA_TRAIN_MAX_NODES, true); @@ -1804,6 +1801,8 @@ int main(int argc, char ** argv) { params.common.use_checkpointing ); ggml_allocr_free(alloc); + ggml_allocr_free(alloc_inps); + // tokenize data std::vector train_tokens; From 919c40660fd27157b391b5832d2a577d5afef4cb Mon Sep 17 00:00:00 2001 From: Matheus Gabriel Alves Silva Date: Sun, 17 Dec 2023 12:23:33 -0300 Subject: [PATCH 097/811] build : Check the ROCm installation location (#4485) * build : Check the ROCm installation location * more generic approach * fixup! It was returning the path instead of the command output * fixup! Trailing whitespace --- Makefile | 12 +++++++++--- 1 file changed, 9 insertions(+), 3 deletions(-) diff --git a/Makefile b/Makefile index fb775ae5b..8273f8400 100644 --- a/Makefile +++ b/Makefile @@ -439,9 +439,15 @@ ggml-opencl.o: ggml-opencl.cpp ggml-opencl.h endif # LLAMA_CLBLAST ifdef LLAMA_HIPBLAS - ROCM_PATH ?= /opt/rocm - HIPCC ?= $(ROCM_PATH)/bin/hipcc - GPU_TARGETS ?= $(shell $(ROCM_PATH)/llvm/bin/amdgpu-arch) + + ifeq ($(wildcard /opt/rocm),) + ROCM_PATH ?= /usr + GPU_TARGETS ?= $(shell $(shell which amdgpu-arch)) + else + ROCM_PATH ?= /opt/rocm + GPU_TARGETS ?= $(shell $(ROCM_PATH)/llvm/bin/amdgpu-arch) + endif + HIPCC ?= $(ROCM_PATH)/bin/hipcc LLAMA_CUDA_DMMV_X ?= 32 LLAMA_CUDA_MMV_Y ?= 1 LLAMA_CUDA_KQUANTS_ITER ?= 2 From f7f468a97dceec2f8fe8b1ed7a2091083446ebc7 Mon Sep 17 00:00:00 2001 From: Jared Van Bortel Date: Sun, 17 Dec 2023 10:45:46 -0500 Subject: [PATCH 098/811] gguf-py : fail fast on nonsensical special token IDs (#4489) --- gguf-py/gguf/vocab.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/gguf-py/gguf/vocab.py b/gguf-py/gguf/vocab.py index de3e5edb5..76924d8f2 100644 --- a/gguf-py/gguf/vocab.py +++ b/gguf-py/gguf/vocab.py @@ -109,8 +109,10 @@ class SpecialVocab: return True def _set_special_token(self, typ: str, tid: Any) -> None: - if not isinstance(tid, int) or tid < 0: + if not isinstance(tid, int): return + if tid < 0: + raise ValueError(f'invalid value for special token type {typ}: {tid}') if self.n_vocab is None or tid < self.n_vocab: if typ in self.special_token_ids: return From 800a489e4a8be199122259a995b1ee9dd7fae320 Mon Sep 17 00:00:00 2001 From: Georgi Gerganov Date: Sun, 17 Dec 2023 19:38:41 +0200 Subject: [PATCH 099/811] llama.swiftui : add bench functionality (#4483) * llama.swiftui : add bench button * llama.swiftui : initial bench functionality * force to use n_gpu_layers on simulator * add download buttons & expose llamaState.loadModel * update project.pbxproj * comment #Preview & fix editorconfig check * gitignore : xcode stuff * llama.swiftui : UX improvements * llama.swiftui : avoid data copy via "downloadTask" * llama.swiftui : remove model from project * llama : remove "mostly" from model infos * llama.swiftui : improve bench --------- Co-authored-by: jhen --- .editorconfig | 3 + examples/llama.swiftui/.gitignore | 1 + .../llama.cpp.swift/LibLlama.swift | 182 +++- .../llama.swiftui.xcodeproj/project.pbxproj | 898 +++++++++--------- .../llama.swiftui/Models/LlamaState.swift | 52 +- .../llama.swiftui/UI/ContentView.swift | 114 ++- .../llama.swiftui/UI/DownloadButton.swift | 122 +++ llama.cpp | 33 +- 8 files changed, 895 insertions(+), 510 deletions(-) create mode 100644 examples/llama.swiftui/llama.swiftui/UI/DownloadButton.swift diff --git a/.editorconfig b/.editorconfig index a56e9ccc8..16d16b3b5 100644 --- a/.editorconfig +++ b/.editorconfig @@ -23,3 +23,6 @@ insert_final_newline = unset [examples/server/public/*] indent_size = 2 + +[examples/llama.swiftui/llama.swiftui.xcodeproj/*] +indent_style = tab diff --git a/examples/llama.swiftui/.gitignore b/examples/llama.swiftui/.gitignore index 9bce6af39..e585a2a4f 100644 --- a/examples/llama.swiftui/.gitignore +++ b/examples/llama.swiftui/.gitignore @@ -1 +1,2 @@ xcuserdata +xcshareddata diff --git a/examples/llama.swiftui/llama.cpp.swift/LibLlama.swift b/examples/llama.swiftui/llama.cpp.swift/LibLlama.swift index 3754f0551..272e1fd8a 100644 --- a/examples/llama.swiftui/llama.cpp.swift/LibLlama.swift +++ b/examples/llama.swiftui/llama.cpp.swift/LibLlama.swift @@ -6,16 +6,34 @@ enum LlamaError: Error { case couldNotInitializeContext } +func llama_batch_clear(_ batch: inout llama_batch) { + batch.n_tokens = 0 +} + +func llama_batch_add(_ batch: inout llama_batch, _ id: llama_token, _ pos: llama_pos, _ seq_ids: [llama_seq_id], _ logits: Bool) { + batch.token [Int(batch.n_tokens)] = id + batch.pos [Int(batch.n_tokens)] = pos + batch.n_seq_id[Int(batch.n_tokens)] = Int32(seq_ids.count) + for i in 0.. LlamaContext { + static func create_context(path: String) throws -> LlamaContext { llama_backend_init(false) - let model_params = llama_model_default_params() + var model_params = llama_model_default_params() +#if targetEnvironment(simulator) + model_params.n_gpu_layers = 0 + print("Running on simulator, force use n_gpu_layers = 0") +#endif let model = llama_load_model_from_file(path, model_params) guard let model else { print("Could not load model at \(path)") throw LlamaError.couldNotInitializeContext } + + let n_threads = max(1, min(8, ProcessInfo.processInfo.processorCount - 2)) + print("Using \(n_threads) threads") + var ctx_params = llama_context_default_params() - ctx_params.seed = 1234 + ctx_params.seed = 1234 ctx_params.n_ctx = 2048 - ctx_params.n_threads = 8 - ctx_params.n_threads_batch = 8 + ctx_params.n_threads = UInt32(n_threads) + ctx_params.n_threads_batch = UInt32(n_threads) let context = llama_new_context_with_model(model, ctx_params) guard let context else { @@ -56,6 +83,26 @@ actor LlamaContext { return LlamaContext(model: model, context: context) } + func model_info() -> String { + let result = UnsafeMutablePointer.allocate(capacity: 256) + result.initialize(repeating: Int8(0), count: 256) + defer { + result.deallocate() + } + + // TODO: this is probably very stupid way to get the string from C + + let nChars = llama_model_desc(model, result, 256) + let bufferPointer = UnsafeBufferPointer(start: result, count: Int(nChars)) + + var SwiftString = "" + for char in bufferPointer { + SwiftString.append(Character(UnicodeScalar(UInt8(char)))) + } + + return SwiftString + } + func get_n_tokens() -> Int32 { return batch.n_tokens; } @@ -79,16 +126,11 @@ actor LlamaContext { print(String(cString: token_to_piece(token: id) + [0])) } - // batch = llama_batch_init(512, 0) // done in init() - batch.n_tokens = Int32(tokens_list.count) + llama_batch_clear(&batch) - for i1 in 0.. String { + var pp_avg: Double = 0 + var tg_avg: Double = 0 + + var pp_std: Double = 0 + var tg_std: Double = 0 + + for r in 0.. 1 { + pp_std = sqrt(pp_std / Double(nr - 1) - pp_avg * pp_avg * Double(nr) / Double(nr - 1)) + tg_std = sqrt(tg_std / Double(nr - 1) - tg_avg * tg_avg * Double(nr) / Double(nr - 1)) + } else { + pp_std = 0 + tg_std = 0 + } + + let model_desc = model_info(); + let model_size = String(format: "%.2f GiB", Double(llama_model_size(model)) / 1024.0 / 1024.0 / 1024.0); + let model_n_params = String(format: "%.2f B", Double(llama_model_n_params(model)) / 1e9); + let backend = "Metal"; + let pp_avg_str = String(format: "%.2f", pp_avg); + let tg_avg_str = String(format: "%.2f", tg_avg); + let pp_std_str = String(format: "%.2f", pp_std); + let tg_std_str = String(format: "%.2f", tg_std); + + var result = "" + + result += String("| model | size | params | backend | test | t/s |\n") + result += String("| --- | --- | --- | --- | --- | --- |\n") + result += String("| \(model_desc) | \(model_size) | \(model_n_params) | \(backend) | pp \(pp) | \(pp_avg_str) ± \(pp_std_str) |\n") + result += String("| \(model_desc) | \(model_size) | \(model_n_params) | \(backend) | tg \(tg) | \(tg_avg_str) ± \(tg_std_str) |\n") + + return result; + } + func clear() { tokens_list.removeAll() temporary_invalid_cchars.removeAll() + llama_kv_cache_clear(context) } private func tokenize(text: String, add_bos: Bool) -> [llama_token] { let utf8Count = text.utf8.count - let n_tokens = utf8Count + (add_bos ? 1 : 0) + let n_tokens = utf8Count + (add_bos ? 1 : 0) + 1 let tokens = UnsafeMutablePointer.allocate(capacity: n_tokens) let tokenCount = llama_tokenize(model, text, Int32(utf8Count), tokens, Int32(n_tokens), add_bos, false) diff --git a/examples/llama.swiftui/llama.swiftui.xcodeproj/project.pbxproj b/examples/llama.swiftui/llama.swiftui.xcodeproj/project.pbxproj index bc1fd15ce..2e6159928 100644 --- a/examples/llama.swiftui/llama.swiftui.xcodeproj/project.pbxproj +++ b/examples/llama.swiftui/llama.swiftui.xcodeproj/project.pbxproj @@ -1,481 +1,483 @@ // !$*UTF8*$! { - archiveVersion = 1; - classes = { - }; - objectVersion = 56; - objects = { + archiveVersion = 1; + classes = { + }; + objectVersion = 56; + objects = { /* Begin PBXBuildFile section */ - 542376082B0D9BFB008E6A1C /* ggml-quants.c in Sources */ = {isa = PBXBuildFile; fileRef = 542376072B0D9BFB008E6A1C /* ggml-quants.c */; }; - 5423760B2B0D9C4B008E6A1C /* ggml-backend.c in Sources */ = {isa = PBXBuildFile; fileRef = 5423760A2B0D9C4B008E6A1C /* ggml-backend.c */; }; - 542378792ACE3F3500834A7B /* ggml-metal.metal in Resources */ = {isa = PBXBuildFile; fileRef = 549479C82AC9E10B00E0F78B /* ggml-metal.metal */; }; - 542EA09D2AC8723900A8AEE9 /* ggml.c in Sources */ = {isa = PBXBuildFile; fileRef = 542EA09B2AC8723900A8AEE9 /* ggml.c */; settings = {COMPILER_FLAGS = "-DGGML_USE_ACCELERATE -DGGML_USE_METAL -DGGML_USE_K_QUANTS -O3"; }; }; - 542EA0A02AC8725700A8AEE9 /* ggml-alloc.c in Sources */ = {isa = PBXBuildFile; fileRef = 542EA09F2AC8725700A8AEE9 /* ggml-alloc.c */; }; - 542EA0A32AC8729100A8AEE9 /* llama.cpp in Sources */ = {isa = PBXBuildFile; fileRef = 542EA0A12AC8729100A8AEE9 /* llama.cpp */; settings = {COMPILER_FLAGS = "-DGGML_USE_K_QUANTS -DGGML_USE_METAL -O3"; }; }; - 549479CB2AC9E16000E0F78B /* Metal.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = 549479CA2AC9E16000E0F78B /* Metal.framework */; }; - 549479CD2AC9E42A00E0F78B /* ggml-metal.m in Sources */ = {isa = PBXBuildFile; fileRef = 549479C52AC9E0F200E0F78B /* ggml-metal.m */; settings = {COMPILER_FLAGS = "-fno-objc-arc -DGGML_SWIFT -DGGML_USE_METAL -O3"; }; }; - 8A1C83772AC328BD0096AF73 /* llama_swiftuiApp.swift in Sources */ = {isa = PBXBuildFile; fileRef = 8A1C83762AC328BD0096AF73 /* llama_swiftuiApp.swift */; }; - 8A1C83792AC328BD0096AF73 /* ContentView.swift in Sources */ = {isa = PBXBuildFile; fileRef = 8A1C83782AC328BD0096AF73 /* ContentView.swift */; }; - 8A1C837B2AC328BE0096AF73 /* Assets.xcassets in Resources */ = {isa = PBXBuildFile; fileRef = 8A1C837A2AC328BE0096AF73 /* Assets.xcassets */; }; - 8A1C837E2AC328BE0096AF73 /* Preview Assets.xcassets in Resources */ = {isa = PBXBuildFile; fileRef = 8A1C837D2AC328BE0096AF73 /* Preview Assets.xcassets */; }; - 8A39BE0A2AC7601100BFEB40 /* Accelerate.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = 8A39BE092AC7601000BFEB40 /* Accelerate.framework */; }; - 8A3F84242AC4C891005E2EE8 /* models in Resources */ = {isa = PBXBuildFile; fileRef = 8A3F84232AC4C891005E2EE8 /* models */; }; - 8A907F332AC7138A006146EA /* LibLlama.swift in Sources */ = {isa = PBXBuildFile; fileRef = 8A907F322AC7134E006146EA /* LibLlama.swift */; }; - 8A9F7C4D2AC332EE008AE1EA /* LlamaState.swift in Sources */ = {isa = PBXBuildFile; fileRef = 8A9F7C4C2AC332EE008AE1EA /* LlamaState.swift */; }; + 542376082B0D9BFB008E6A1C /* ggml-quants.c in Sources */ = {isa = PBXBuildFile; fileRef = 542376072B0D9BFB008E6A1C /* ggml-quants.c */; settings = {COMPILER_FLAGS = "-O3"; }; }; + 5423760B2B0D9C4B008E6A1C /* ggml-backend.c in Sources */ = {isa = PBXBuildFile; fileRef = 5423760A2B0D9C4B008E6A1C /* ggml-backend.c */; settings = {COMPILER_FLAGS = "-O3"; }; }; + 542378792ACE3F3500834A7B /* ggml-metal.metal in Resources */ = {isa = PBXBuildFile; fileRef = 549479C82AC9E10B00E0F78B /* ggml-metal.metal */; }; + 542EA09D2AC8723900A8AEE9 /* ggml.c in Sources */ = {isa = PBXBuildFile; fileRef = 542EA09B2AC8723900A8AEE9 /* ggml.c */; settings = {COMPILER_FLAGS = "-DGGML_USE_ACCELERATE -DGGML_USE_METAL -DGGML_USE_K_QUANTS -O3"; }; }; + 542EA0A02AC8725700A8AEE9 /* ggml-alloc.c in Sources */ = {isa = PBXBuildFile; fileRef = 542EA09F2AC8725700A8AEE9 /* ggml-alloc.c */; settings = {COMPILER_FLAGS = "-O3"; }; }; + 542EA0A32AC8729100A8AEE9 /* llama.cpp in Sources */ = {isa = PBXBuildFile; fileRef = 542EA0A12AC8729100A8AEE9 /* llama.cpp */; settings = {COMPILER_FLAGS = "-DGGML_USE_K_QUANTS -DGGML_USE_METAL -O3"; }; }; + 549479CB2AC9E16000E0F78B /* Metal.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = 549479CA2AC9E16000E0F78B /* Metal.framework */; }; + 549479CD2AC9E42A00E0F78B /* ggml-metal.m in Sources */ = {isa = PBXBuildFile; fileRef = 549479C52AC9E0F200E0F78B /* ggml-metal.m */; settings = {COMPILER_FLAGS = "-fno-objc-arc -DGGML_SWIFT -DGGML_USE_METAL -O3"; }; }; + 7FA3D2B32B2EA2F600543F92 /* DownloadButton.swift in Sources */ = {isa = PBXBuildFile; fileRef = 7FA3D2B22B2EA2F600543F92 /* DownloadButton.swift */; }; + 8A1C83772AC328BD0096AF73 /* llama_swiftuiApp.swift in Sources */ = {isa = PBXBuildFile; fileRef = 8A1C83762AC328BD0096AF73 /* llama_swiftuiApp.swift */; }; + 8A1C83792AC328BD0096AF73 /* ContentView.swift in Sources */ = {isa = PBXBuildFile; fileRef = 8A1C83782AC328BD0096AF73 /* ContentView.swift */; }; + 8A1C837B2AC328BE0096AF73 /* Assets.xcassets in Resources */ = {isa = PBXBuildFile; fileRef = 8A1C837A2AC328BE0096AF73 /* Assets.xcassets */; }; + 8A1C837E2AC328BE0096AF73 /* Preview Assets.xcassets in Resources */ = {isa = PBXBuildFile; fileRef = 8A1C837D2AC328BE0096AF73 /* Preview Assets.xcassets */; }; + 8A39BE0A2AC7601100BFEB40 /* Accelerate.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = 8A39BE092AC7601000BFEB40 /* Accelerate.framework */; }; + 8A3F84242AC4C891005E2EE8 /* models in Resources */ = {isa = PBXBuildFile; fileRef = 8A3F84232AC4C891005E2EE8 /* models */; }; + 8A907F332AC7138A006146EA /* LibLlama.swift in Sources */ = {isa = PBXBuildFile; fileRef = 8A907F322AC7134E006146EA /* LibLlama.swift */; }; + 8A9F7C4D2AC332EE008AE1EA /* LlamaState.swift in Sources */ = {isa = PBXBuildFile; fileRef = 8A9F7C4C2AC332EE008AE1EA /* LlamaState.swift */; }; /* End PBXBuildFile section */ /* Begin PBXFileReference section */ - 542376062B0D9BEA008E6A1C /* ggml-quants.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; name = "ggml-quants.h"; path = "../../ggml-quants.h"; sourceTree = ""; }; - 542376072B0D9BFB008E6A1C /* ggml-quants.c */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.c; name = "ggml-quants.c"; path = "../../ggml-quants.c"; sourceTree = ""; }; - 542376092B0D9C40008E6A1C /* ggml-backend.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; name = "ggml-backend.h"; path = "../../ggml-backend.h"; sourceTree = ""; }; - 5423760A2B0D9C4B008E6A1C /* ggml-backend.c */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.c; name = "ggml-backend.c"; path = "../../ggml-backend.c"; sourceTree = ""; }; - 542EA09B2AC8723900A8AEE9 /* ggml.c */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.c; name = ggml.c; path = ../../ggml.c; sourceTree = ""; }; - 542EA09C2AC8723900A8AEE9 /* ggml.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; name = ggml.h; path = ../../ggml.h; sourceTree = ""; }; - 542EA09E2AC8725700A8AEE9 /* ggml-alloc.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; name = "ggml-alloc.h"; path = "../../ggml-alloc.h"; sourceTree = ""; }; - 542EA09F2AC8725700A8AEE9 /* ggml-alloc.c */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.c; name = "ggml-alloc.c"; path = "../../ggml-alloc.c"; sourceTree = ""; }; - 542EA0A12AC8729100A8AEE9 /* llama.cpp */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.cpp.cpp; name = llama.cpp; path = ../../llama.cpp; sourceTree = ""; }; - 542EA0A22AC8729100A8AEE9 /* llama.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; name = llama.h; path = ../../llama.h; sourceTree = ""; }; - 549479C52AC9E0F200E0F78B /* ggml-metal.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; name = "ggml-metal.m"; path = "../../ggml-metal.m"; sourceTree = ""; }; - 549479C62AC9E0F200E0F78B /* ggml-metal.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; name = "ggml-metal.h"; path = "../../ggml-metal.h"; sourceTree = ""; }; - 549479C82AC9E10B00E0F78B /* ggml-metal.metal */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.metal; name = "ggml-metal.metal"; path = "../../ggml-metal.metal"; sourceTree = ""; }; - 549479CA2AC9E16000E0F78B /* Metal.framework */ = {isa = PBXFileReference; lastKnownFileType = wrapper.framework; name = Metal.framework; path = System/Library/Frameworks/Metal.framework; sourceTree = SDKROOT; }; - 8A08D20A2AC73B1500FE6CD4 /* bridging-header.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = "bridging-header.h"; sourceTree = ""; }; - 8A1C83732AC328BD0096AF73 /* llama.swiftui.app */ = {isa = PBXFileReference; explicitFileType = wrapper.application; includeInIndex = 0; path = llama.swiftui.app; sourceTree = BUILT_PRODUCTS_DIR; }; - 8A1C83762AC328BD0096AF73 /* llama_swiftuiApp.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = llama_swiftuiApp.swift; sourceTree = ""; }; - 8A1C83782AC328BD0096AF73 /* ContentView.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = ContentView.swift; sourceTree = ""; }; - 8A1C837A2AC328BE0096AF73 /* Assets.xcassets */ = {isa = PBXFileReference; lastKnownFileType = folder.assetcatalog; path = Assets.xcassets; sourceTree = ""; }; - 8A1C837D2AC328BE0096AF73 /* Preview Assets.xcassets */ = {isa = PBXFileReference; lastKnownFileType = folder.assetcatalog; path = "Preview Assets.xcassets"; sourceTree = ""; }; - 8A39BE092AC7601000BFEB40 /* Accelerate.framework */ = {isa = PBXFileReference; lastKnownFileType = wrapper.framework; name = Accelerate.framework; path = System/Library/Frameworks/Accelerate.framework; sourceTree = SDKROOT; }; - 8A3F841F2AC4C824005E2EE8 /* llama-2-7b-chat.Q2_K.gguf */ = {isa = PBXFileReference; lastKnownFileType = file; path = "llama-2-7b-chat.Q2_K.gguf"; sourceTree = ""; }; - 8A3F84232AC4C891005E2EE8 /* models */ = {isa = PBXFileReference; lastKnownFileType = folder; name = models; path = llama.swiftui/Resources/models; sourceTree = ""; }; - 8A907F322AC7134E006146EA /* LibLlama.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = LibLlama.swift; sourceTree = ""; }; - 8A9F7C4C2AC332EE008AE1EA /* LlamaState.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = LlamaState.swift; sourceTree = ""; }; + 542376062B0D9BEA008E6A1C /* ggml-quants.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; name = "ggml-quants.h"; path = "../../ggml-quants.h"; sourceTree = ""; }; + 542376072B0D9BFB008E6A1C /* ggml-quants.c */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.c; name = "ggml-quants.c"; path = "../../ggml-quants.c"; sourceTree = ""; }; + 542376092B0D9C40008E6A1C /* ggml-backend.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; name = "ggml-backend.h"; path = "../../ggml-backend.h"; sourceTree = ""; }; + 5423760A2B0D9C4B008E6A1C /* ggml-backend.c */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.c; name = "ggml-backend.c"; path = "../../ggml-backend.c"; sourceTree = ""; }; + 542EA09B2AC8723900A8AEE9 /* ggml.c */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.c; name = ggml.c; path = ../../ggml.c; sourceTree = ""; }; + 542EA09C2AC8723900A8AEE9 /* ggml.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; name = ggml.h; path = ../../ggml.h; sourceTree = ""; }; + 542EA09E2AC8725700A8AEE9 /* ggml-alloc.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; name = "ggml-alloc.h"; path = "../../ggml-alloc.h"; sourceTree = ""; }; + 542EA09F2AC8725700A8AEE9 /* ggml-alloc.c */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.c; name = "ggml-alloc.c"; path = "../../ggml-alloc.c"; sourceTree = ""; }; + 542EA0A12AC8729100A8AEE9 /* llama.cpp */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.cpp.cpp; name = llama.cpp; path = ../../llama.cpp; sourceTree = ""; }; + 542EA0A22AC8729100A8AEE9 /* llama.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; name = llama.h; path = ../../llama.h; sourceTree = ""; }; + 549479C52AC9E0F200E0F78B /* ggml-metal.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; name = "ggml-metal.m"; path = "../../ggml-metal.m"; sourceTree = ""; }; + 549479C62AC9E0F200E0F78B /* ggml-metal.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; name = "ggml-metal.h"; path = "../../ggml-metal.h"; sourceTree = ""; }; + 549479C82AC9E10B00E0F78B /* ggml-metal.metal */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.metal; name = "ggml-metal.metal"; path = "../../ggml-metal.metal"; sourceTree = ""; }; + 549479CA2AC9E16000E0F78B /* Metal.framework */ = {isa = PBXFileReference; lastKnownFileType = wrapper.framework; name = Metal.framework; path = System/Library/Frameworks/Metal.framework; sourceTree = SDKROOT; }; + 7FA3D2B22B2EA2F600543F92 /* DownloadButton.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = DownloadButton.swift; sourceTree = ""; }; + 8A08D20A2AC73B1500FE6CD4 /* bridging-header.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = "bridging-header.h"; sourceTree = ""; }; + 8A1C83732AC328BD0096AF73 /* llama.swiftui.app */ = {isa = PBXFileReference; explicitFileType = wrapper.application; includeInIndex = 0; path = llama.swiftui.app; sourceTree = BUILT_PRODUCTS_DIR; }; + 8A1C83762AC328BD0096AF73 /* llama_swiftuiApp.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = llama_swiftuiApp.swift; sourceTree = ""; }; + 8A1C83782AC328BD0096AF73 /* ContentView.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = ContentView.swift; sourceTree = ""; }; + 8A1C837A2AC328BE0096AF73 /* Assets.xcassets */ = {isa = PBXFileReference; lastKnownFileType = folder.assetcatalog; path = Assets.xcassets; sourceTree = ""; }; + 8A1C837D2AC328BE0096AF73 /* Preview Assets.xcassets */ = {isa = PBXFileReference; lastKnownFileType = folder.assetcatalog; path = "Preview Assets.xcassets"; sourceTree = ""; }; + 8A39BE092AC7601000BFEB40 /* Accelerate.framework */ = {isa = PBXFileReference; lastKnownFileType = wrapper.framework; name = Accelerate.framework; path = System/Library/Frameworks/Accelerate.framework; sourceTree = SDKROOT; }; + 8A3F84232AC4C891005E2EE8 /* models */ = {isa = PBXFileReference; lastKnownFileType = folder; name = models; path = llama.swiftui/Resources/models; sourceTree = ""; }; + 8A907F322AC7134E006146EA /* LibLlama.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = LibLlama.swift; sourceTree = ""; }; + 8A9F7C4C2AC332EE008AE1EA /* LlamaState.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = LlamaState.swift; sourceTree = ""; }; /* End PBXFileReference section */ /* Begin PBXFrameworksBuildPhase section */ - 8A1C83702AC328BD0096AF73 /* Frameworks */ = { - isa = PBXFrameworksBuildPhase; - buildActionMask = 2147483647; - files = ( - 549479CB2AC9E16000E0F78B /* Metal.framework in Frameworks */, - 8A39BE0A2AC7601100BFEB40 /* Accelerate.framework in Frameworks */, - ); - runOnlyForDeploymentPostprocessing = 0; - }; + 8A1C83702AC328BD0096AF73 /* Frameworks */ = { + isa = PBXFrameworksBuildPhase; + buildActionMask = 2147483647; + files = ( + 549479CB2AC9E16000E0F78B /* Metal.framework in Frameworks */, + 8A39BE0A2AC7601100BFEB40 /* Accelerate.framework in Frameworks */, + ); + runOnlyForDeploymentPostprocessing = 0; + }; /* End PBXFrameworksBuildPhase section */ /* Begin PBXGroup section */ - 8A08D1F62AC7383900FE6CD4 /* llama.cpp */ = { - isa = PBXGroup; - children = ( - 5423760A2B0D9C4B008E6A1C /* ggml-backend.c */, - 542376092B0D9C40008E6A1C /* ggml-backend.h */, - 542376062B0D9BEA008E6A1C /* ggml-quants.h */, - 542376072B0D9BFB008E6A1C /* ggml-quants.c */, - 549479C82AC9E10B00E0F78B /* ggml-metal.metal */, - 549479C62AC9E0F200E0F78B /* ggml-metal.h */, - 549479C52AC9E0F200E0F78B /* ggml-metal.m */, - 542EA09B2AC8723900A8AEE9 /* ggml.c */, - 542EA09C2AC8723900A8AEE9 /* ggml.h */, - 542EA09F2AC8725700A8AEE9 /* ggml-alloc.c */, - 542EA09E2AC8725700A8AEE9 /* ggml-alloc.h */, - 542EA0A12AC8729100A8AEE9 /* llama.cpp */, - 542EA0A22AC8729100A8AEE9 /* llama.h */, - ); - name = llama.cpp; - sourceTree = ""; - }; - 8A1C836A2AC328BD0096AF73 = { - isa = PBXGroup; - children = ( - 8A08D1F62AC7383900FE6CD4 /* llama.cpp */, - 8A907F312AC7134E006146EA /* llama.cpp.swift */, - 8A3F84232AC4C891005E2EE8 /* models */, - 8A1C83752AC328BD0096AF73 /* llama.swiftui */, - 8A1C83742AC328BD0096AF73 /* Products */, - 8A39BE082AC7601000BFEB40 /* Frameworks */, - ); - sourceTree = ""; - }; - 8A1C83742AC328BD0096AF73 /* Products */ = { - isa = PBXGroup; - children = ( - 8A1C83732AC328BD0096AF73 /* llama.swiftui.app */, - ); - name = Products; - sourceTree = ""; - }; - 8A1C83752AC328BD0096AF73 /* llama.swiftui */ = { - isa = PBXGroup; - children = ( - 8A3F84102AC4BD85005E2EE8 /* Resources */, - 8A9F7C4B2AC332DC008AE1EA /* Models */, - 8A9F7C4A2AC332BF008AE1EA /* UI */, - 8A1C83762AC328BD0096AF73 /* llama_swiftuiApp.swift */, - 8A1C837A2AC328BE0096AF73 /* Assets.xcassets */, - 8A1C837C2AC328BE0096AF73 /* Preview Content */, - ); - path = llama.swiftui; - sourceTree = ""; - }; - 8A1C837C2AC328BE0096AF73 /* Preview Content */ = { - isa = PBXGroup; - children = ( - 8A1C837D2AC328BE0096AF73 /* Preview Assets.xcassets */, - ); - path = "Preview Content"; - sourceTree = ""; - }; - 8A39BE082AC7601000BFEB40 /* Frameworks */ = { - isa = PBXGroup; - children = ( - 549479CA2AC9E16000E0F78B /* Metal.framework */, - 8A39BE092AC7601000BFEB40 /* Accelerate.framework */, - ); - name = Frameworks; - sourceTree = ""; - }; - 8A3F84102AC4BD85005E2EE8 /* Resources */ = { - isa = PBXGroup; - children = ( - 8A3F84112AC4BD8C005E2EE8 /* models */, - ); - path = Resources; - sourceTree = ""; - }; - 8A3F84112AC4BD8C005E2EE8 /* models */ = { - isa = PBXGroup; - children = ( - 8A3F841F2AC4C824005E2EE8 /* llama-2-7b-chat.Q2_K.gguf */, - ); - path = models; - sourceTree = ""; - }; - 8A907F312AC7134E006146EA /* llama.cpp.swift */ = { - isa = PBXGroup; - children = ( - 8A08D20A2AC73B1500FE6CD4 /* bridging-header.h */, - 8A907F322AC7134E006146EA /* LibLlama.swift */, - ); - path = llama.cpp.swift; - sourceTree = ""; - }; - 8A9F7C4A2AC332BF008AE1EA /* UI */ = { - isa = PBXGroup; - children = ( - 8A1C83782AC328BD0096AF73 /* ContentView.swift */, - ); - path = UI; - sourceTree = ""; - }; - 8A9F7C4B2AC332DC008AE1EA /* Models */ = { - isa = PBXGroup; - children = ( - 8A9F7C4C2AC332EE008AE1EA /* LlamaState.swift */, - ); - path = Models; - sourceTree = ""; - }; + 8A08D1F62AC7383900FE6CD4 /* llama.cpp */ = { + isa = PBXGroup; + children = ( + 5423760A2B0D9C4B008E6A1C /* ggml-backend.c */, + 542376092B0D9C40008E6A1C /* ggml-backend.h */, + 542376062B0D9BEA008E6A1C /* ggml-quants.h */, + 542376072B0D9BFB008E6A1C /* ggml-quants.c */, + 549479C82AC9E10B00E0F78B /* ggml-metal.metal */, + 549479C62AC9E0F200E0F78B /* ggml-metal.h */, + 549479C52AC9E0F200E0F78B /* ggml-metal.m */, + 542EA09B2AC8723900A8AEE9 /* ggml.c */, + 542EA09C2AC8723900A8AEE9 /* ggml.h */, + 542EA09F2AC8725700A8AEE9 /* ggml-alloc.c */, + 542EA09E2AC8725700A8AEE9 /* ggml-alloc.h */, + 542EA0A12AC8729100A8AEE9 /* llama.cpp */, + 542EA0A22AC8729100A8AEE9 /* llama.h */, + ); + name = llama.cpp; + sourceTree = ""; + }; + 8A1C836A2AC328BD0096AF73 = { + isa = PBXGroup; + children = ( + 8A08D1F62AC7383900FE6CD4 /* llama.cpp */, + 8A907F312AC7134E006146EA /* llama.cpp.swift */, + 8A3F84232AC4C891005E2EE8 /* models */, + 8A1C83752AC328BD0096AF73 /* llama.swiftui */, + 8A1C83742AC328BD0096AF73 /* Products */, + 8A39BE082AC7601000BFEB40 /* Frameworks */, + ); + sourceTree = ""; + }; + 8A1C83742AC328BD0096AF73 /* Products */ = { + isa = PBXGroup; + children = ( + 8A1C83732AC328BD0096AF73 /* llama.swiftui.app */, + ); + name = Products; + sourceTree = ""; + }; + 8A1C83752AC328BD0096AF73 /* llama.swiftui */ = { + isa = PBXGroup; + children = ( + 8A3F84102AC4BD85005E2EE8 /* Resources */, + 8A9F7C4B2AC332DC008AE1EA /* Models */, + 8A9F7C4A2AC332BF008AE1EA /* UI */, + 8A1C83762AC328BD0096AF73 /* llama_swiftuiApp.swift */, + 8A1C837A2AC328BE0096AF73 /* Assets.xcassets */, + 8A1C837C2AC328BE0096AF73 /* Preview Content */, + ); + path = llama.swiftui; + sourceTree = ""; + }; + 8A1C837C2AC328BE0096AF73 /* Preview Content */ = { + isa = PBXGroup; + children = ( + 8A1C837D2AC328BE0096AF73 /* Preview Assets.xcassets */, + ); + path = "Preview Content"; + sourceTree = ""; + }; + 8A39BE082AC7601000BFEB40 /* Frameworks */ = { + isa = PBXGroup; + children = ( + 549479CA2AC9E16000E0F78B /* Metal.framework */, + 8A39BE092AC7601000BFEB40 /* Accelerate.framework */, + ); + name = Frameworks; + sourceTree = ""; + }; + 8A3F84102AC4BD85005E2EE8 /* Resources */ = { + isa = PBXGroup; + children = ( + 8A3F84112AC4BD8C005E2EE8 /* models */, + ); + path = Resources; + sourceTree = ""; + }; + 8A3F84112AC4BD8C005E2EE8 /* models */ = { + isa = PBXGroup; + children = ( + ); + path = models; + sourceTree = ""; + }; + 8A907F312AC7134E006146EA /* llama.cpp.swift */ = { + isa = PBXGroup; + children = ( + 8A08D20A2AC73B1500FE6CD4 /* bridging-header.h */, + 8A907F322AC7134E006146EA /* LibLlama.swift */, + ); + path = llama.cpp.swift; + sourceTree = ""; + }; + 8A9F7C4A2AC332BF008AE1EA /* UI */ = { + isa = PBXGroup; + children = ( + 7FA3D2B22B2EA2F600543F92 /* DownloadButton.swift */, + 8A1C83782AC328BD0096AF73 /* ContentView.swift */, + ); + path = UI; + sourceTree = ""; + }; + 8A9F7C4B2AC332DC008AE1EA /* Models */ = { + isa = PBXGroup; + children = ( + 8A9F7C4C2AC332EE008AE1EA /* LlamaState.swift */, + ); + path = Models; + sourceTree = ""; + }; /* End PBXGroup section */ /* Begin PBXNativeTarget section */ - 8A1C83722AC328BD0096AF73 /* llama.swiftui */ = { - isa = PBXNativeTarget; - buildConfigurationList = 8A1C83812AC328BE0096AF73 /* Build configuration list for PBXNativeTarget "llama.swiftui" */; - buildPhases = ( - 8A1C836F2AC328BD0096AF73 /* Sources */, - 8A1C83702AC328BD0096AF73 /* Frameworks */, - 8A1C83712AC328BD0096AF73 /* Resources */, - ); - buildRules = ( - ); - dependencies = ( - ); - name = llama.swiftui; - packageProductDependencies = ( - ); - productName = llama.swiftui; - productReference = 8A1C83732AC328BD0096AF73 /* llama.swiftui.app */; - productType = "com.apple.product-type.application"; - }; + 8A1C83722AC328BD0096AF73 /* llama.swiftui */ = { + isa = PBXNativeTarget; + buildConfigurationList = 8A1C83812AC328BE0096AF73 /* Build configuration list for PBXNativeTarget "llama.swiftui" */; + buildPhases = ( + 8A1C836F2AC328BD0096AF73 /* Sources */, + 8A1C83702AC328BD0096AF73 /* Frameworks */, + 8A1C83712AC328BD0096AF73 /* Resources */, + ); + buildRules = ( + ); + dependencies = ( + ); + name = llama.swiftui; + packageProductDependencies = ( + ); + productName = llama.swiftui; + productReference = 8A1C83732AC328BD0096AF73 /* llama.swiftui.app */; + productType = "com.apple.product-type.application"; + }; /* End PBXNativeTarget section */ /* Begin PBXProject section */ - 8A1C836B2AC328BD0096AF73 /* Project object */ = { - isa = PBXProject; - attributes = { - BuildIndependentTargetsInParallel = 1; - LastSwiftUpdateCheck = 1500; - LastUpgradeCheck = 1500; - TargetAttributes = { - 8A1C83722AC328BD0096AF73 = { - CreatedOnToolsVersion = 15.0; - LastSwiftMigration = 1500; - }; - }; - }; - buildConfigurationList = 8A1C836E2AC328BD0096AF73 /* Build configuration list for PBXProject "llama.swiftui" */; - compatibilityVersion = "Xcode 14.0"; - developmentRegion = en; - hasScannedForEncodings = 0; - knownRegions = ( - en, - Base, - ); - mainGroup = 8A1C836A2AC328BD0096AF73; - packageReferences = ( - ); - productRefGroup = 8A1C83742AC328BD0096AF73 /* Products */; - projectDirPath = ""; - projectRoot = ""; - targets = ( - 8A1C83722AC328BD0096AF73 /* llama.swiftui */, - ); - }; + 8A1C836B2AC328BD0096AF73 /* Project object */ = { + isa = PBXProject; + attributes = { + BuildIndependentTargetsInParallel = 1; + LastSwiftUpdateCheck = 1500; + LastUpgradeCheck = 1500; + TargetAttributes = { + 8A1C83722AC328BD0096AF73 = { + CreatedOnToolsVersion = 15.0; + LastSwiftMigration = 1500; + }; + }; + }; + buildConfigurationList = 8A1C836E2AC328BD0096AF73 /* Build configuration list for PBXProject "llama.swiftui" */; + compatibilityVersion = "Xcode 14.0"; + developmentRegion = en; + hasScannedForEncodings = 0; + knownRegions = ( + en, + Base, + ); + mainGroup = 8A1C836A2AC328BD0096AF73; + packageReferences = ( + ); + productRefGroup = 8A1C83742AC328BD0096AF73 /* Products */; + projectDirPath = ""; + projectRoot = ""; + targets = ( + 8A1C83722AC328BD0096AF73 /* llama.swiftui */, + ); + }; /* End PBXProject section */ /* Begin PBXResourcesBuildPhase section */ - 8A1C83712AC328BD0096AF73 /* Resources */ = { - isa = PBXResourcesBuildPhase; - buildActionMask = 2147483647; - files = ( - 542378792ACE3F3500834A7B /* ggml-metal.metal in Resources */, - 8A3F84242AC4C891005E2EE8 /* models in Resources */, - 8A1C837E2AC328BE0096AF73 /* Preview Assets.xcassets in Resources */, - 8A1C837B2AC328BE0096AF73 /* Assets.xcassets in Resources */, - ); - runOnlyForDeploymentPostprocessing = 0; - }; + 8A1C83712AC328BD0096AF73 /* Resources */ = { + isa = PBXResourcesBuildPhase; + buildActionMask = 2147483647; + files = ( + 542378792ACE3F3500834A7B /* ggml-metal.metal in Resources */, + 8A3F84242AC4C891005E2EE8 /* models in Resources */, + 8A1C837E2AC328BE0096AF73 /* Preview Assets.xcassets in Resources */, + 8A1C837B2AC328BE0096AF73 /* Assets.xcassets in Resources */, + ); + runOnlyForDeploymentPostprocessing = 0; + }; /* End PBXResourcesBuildPhase section */ /* Begin PBXSourcesBuildPhase section */ - 8A1C836F2AC328BD0096AF73 /* Sources */ = { - isa = PBXSourcesBuildPhase; - buildActionMask = 2147483647; - files = ( - 542376082B0D9BFB008E6A1C /* ggml-quants.c in Sources */, - 549479CD2AC9E42A00E0F78B /* ggml-metal.m in Sources */, - 542EA09D2AC8723900A8AEE9 /* ggml.c in Sources */, - 8A907F332AC7138A006146EA /* LibLlama.swift in Sources */, - 542EA0A32AC8729100A8AEE9 /* llama.cpp in Sources */, - 8A9F7C4D2AC332EE008AE1EA /* LlamaState.swift in Sources */, - 8A1C83792AC328BD0096AF73 /* ContentView.swift in Sources */, - 8A1C83772AC328BD0096AF73 /* llama_swiftuiApp.swift in Sources */, - 542EA0A02AC8725700A8AEE9 /* ggml-alloc.c in Sources */, - 5423760B2B0D9C4B008E6A1C /* ggml-backend.c in Sources */, - ); - runOnlyForDeploymentPostprocessing = 0; - }; + 8A1C836F2AC328BD0096AF73 /* Sources */ = { + isa = PBXSourcesBuildPhase; + buildActionMask = 2147483647; + files = ( + 542376082B0D9BFB008E6A1C /* ggml-quants.c in Sources */, + 549479CD2AC9E42A00E0F78B /* ggml-metal.m in Sources */, + 542EA09D2AC8723900A8AEE9 /* ggml.c in Sources */, + 8A907F332AC7138A006146EA /* LibLlama.swift in Sources */, + 542EA0A32AC8729100A8AEE9 /* llama.cpp in Sources */, + 8A9F7C4D2AC332EE008AE1EA /* LlamaState.swift in Sources */, + 8A1C83792AC328BD0096AF73 /* ContentView.swift in Sources */, + 8A1C83772AC328BD0096AF73 /* llama_swiftuiApp.swift in Sources */, + 7FA3D2B32B2EA2F600543F92 /* DownloadButton.swift in Sources */, + 542EA0A02AC8725700A8AEE9 /* ggml-alloc.c in Sources */, + 5423760B2B0D9C4B008E6A1C /* ggml-backend.c in Sources */, + ); + runOnlyForDeploymentPostprocessing = 0; + }; /* End PBXSourcesBuildPhase section */ /* Begin XCBuildConfiguration section */ - 8A1C837F2AC328BE0096AF73 /* Debug */ = { - isa = XCBuildConfiguration; - buildSettings = { - ALWAYS_SEARCH_USER_PATHS = NO; - ASSETCATALOG_COMPILER_GENERATE_SWIFT_ASSET_SYMBOL_EXTENSIONS = YES; - CLANG_ANALYZER_NONNULL = YES; - CLANG_ANALYZER_NUMBER_OBJECT_CONVERSION = YES_AGGRESSIVE; - CLANG_CXX_LANGUAGE_STANDARD = "gnu++20"; - CLANG_ENABLE_MODULES = YES; - CLANG_ENABLE_OBJC_ARC = YES; - CLANG_ENABLE_OBJC_WEAK = YES; - CLANG_WARN_BLOCK_CAPTURE_AUTORELEASING = YES; - CLANG_WARN_BOOL_CONVERSION = YES; - CLANG_WARN_COMMA = YES; - CLANG_WARN_CONSTANT_CONVERSION = YES; - CLANG_WARN_DEPRECATED_OBJC_IMPLEMENTATIONS = YES; - CLANG_WARN_DIRECT_OBJC_ISA_USAGE = YES_ERROR; - CLANG_WARN_DOCUMENTATION_COMMENTS = YES; - CLANG_WARN_EMPTY_BODY = YES; - CLANG_WARN_ENUM_CONVERSION = YES; - CLANG_WARN_INFINITE_RECURSION = YES; - CLANG_WARN_INT_CONVERSION = YES; - CLANG_WARN_NON_LITERAL_NULL_CONVERSION = YES; - CLANG_WARN_OBJC_IMPLICIT_RETAIN_SELF = YES; - CLANG_WARN_OBJC_LITERAL_CONVERSION = YES; - CLANG_WARN_OBJC_ROOT_CLASS = YES_ERROR; - CLANG_WARN_QUOTED_INCLUDE_IN_FRAMEWORK_HEADER = YES; - CLANG_WARN_RANGE_LOOP_ANALYSIS = YES; - CLANG_WARN_STRICT_PROTOTYPES = YES; - CLANG_WARN_SUSPICIOUS_MOVE = YES; - CLANG_WARN_UNGUARDED_AVAILABILITY = YES_AGGRESSIVE; - CLANG_WARN_UNREACHABLE_CODE = YES; - CLANG_WARN__DUPLICATE_METHOD_MATCH = YES; - COPY_PHASE_STRIP = NO; - DEBUG_INFORMATION_FORMAT = dwarf; - ENABLE_STRICT_OBJC_MSGSEND = YES; - ENABLE_TESTABILITY = YES; - ENABLE_USER_SCRIPT_SANDBOXING = YES; - GCC_C_LANGUAGE_STANDARD = gnu17; - GCC_DYNAMIC_NO_PIC = NO; - GCC_NO_COMMON_BLOCKS = YES; - GCC_OPTIMIZATION_LEVEL = 0; - GCC_PREPROCESSOR_DEFINITIONS = ( - "DEBUG=1", - "$(inherited)", - ); - GCC_WARN_64_TO_32_BIT_CONVERSION = YES; - GCC_WARN_ABOUT_RETURN_TYPE = YES_ERROR; - GCC_WARN_UNDECLARED_SELECTOR = YES; - GCC_WARN_UNINITIALIZED_AUTOS = YES_AGGRESSIVE; - GCC_WARN_UNUSED_FUNCTION = YES; - GCC_WARN_UNUSED_VARIABLE = YES; - IPHONEOS_DEPLOYMENT_TARGET = 17.0; - LOCALIZATION_PREFERS_STRING_CATALOGS = YES; - MTL_ENABLE_DEBUG_INFO = INCLUDE_SOURCE; - MTL_FAST_MATH = YES; - ONLY_ACTIVE_ARCH = YES; - SDKROOT = iphoneos; - SWIFT_ACTIVE_COMPILATION_CONDITIONS = "DEBUG $(inherited)"; - SWIFT_OPTIMIZATION_LEVEL = "-Onone"; - }; - name = Debug; - }; - 8A1C83802AC328BE0096AF73 /* Release */ = { - isa = XCBuildConfiguration; - buildSettings = { - ALWAYS_SEARCH_USER_PATHS = NO; - ASSETCATALOG_COMPILER_GENERATE_SWIFT_ASSET_SYMBOL_EXTENSIONS = YES; - CLANG_ANALYZER_NONNULL = YES; - CLANG_ANALYZER_NUMBER_OBJECT_CONVERSION = YES_AGGRESSIVE; - CLANG_CXX_LANGUAGE_STANDARD = "gnu++20"; - CLANG_ENABLE_MODULES = YES; - CLANG_ENABLE_OBJC_ARC = YES; - CLANG_ENABLE_OBJC_WEAK = YES; - CLANG_WARN_BLOCK_CAPTURE_AUTORELEASING = YES; - CLANG_WARN_BOOL_CONVERSION = YES; - CLANG_WARN_COMMA = YES; - CLANG_WARN_CONSTANT_CONVERSION = YES; - CLANG_WARN_DEPRECATED_OBJC_IMPLEMENTATIONS = YES; - CLANG_WARN_DIRECT_OBJC_ISA_USAGE = YES_ERROR; - CLANG_WARN_DOCUMENTATION_COMMENTS = YES; - CLANG_WARN_EMPTY_BODY = YES; - CLANG_WARN_ENUM_CONVERSION = YES; - CLANG_WARN_INFINITE_RECURSION = YES; - CLANG_WARN_INT_CONVERSION = YES; - CLANG_WARN_NON_LITERAL_NULL_CONVERSION = YES; - CLANG_WARN_OBJC_IMPLICIT_RETAIN_SELF = YES; - CLANG_WARN_OBJC_LITERAL_CONVERSION = YES; - CLANG_WARN_OBJC_ROOT_CLASS = YES_ERROR; - CLANG_WARN_QUOTED_INCLUDE_IN_FRAMEWORK_HEADER = YES; - CLANG_WARN_RANGE_LOOP_ANALYSIS = YES; - CLANG_WARN_STRICT_PROTOTYPES = YES; - CLANG_WARN_SUSPICIOUS_MOVE = YES; - CLANG_WARN_UNGUARDED_AVAILABILITY = YES_AGGRESSIVE; - CLANG_WARN_UNREACHABLE_CODE = YES; - CLANG_WARN__DUPLICATE_METHOD_MATCH = YES; - COPY_PHASE_STRIP = NO; - DEBUG_INFORMATION_FORMAT = "dwarf-with-dsym"; - ENABLE_NS_ASSERTIONS = NO; - ENABLE_STRICT_OBJC_MSGSEND = YES; - ENABLE_USER_SCRIPT_SANDBOXING = YES; - GCC_C_LANGUAGE_STANDARD = gnu17; - GCC_NO_COMMON_BLOCKS = YES; - GCC_WARN_64_TO_32_BIT_CONVERSION = YES; - GCC_WARN_ABOUT_RETURN_TYPE = YES_ERROR; - GCC_WARN_UNDECLARED_SELECTOR = YES; - GCC_WARN_UNINITIALIZED_AUTOS = YES_AGGRESSIVE; - GCC_WARN_UNUSED_FUNCTION = YES; - GCC_WARN_UNUSED_VARIABLE = YES; - IPHONEOS_DEPLOYMENT_TARGET = 17.0; - LOCALIZATION_PREFERS_STRING_CATALOGS = YES; - MTL_ENABLE_DEBUG_INFO = NO; - MTL_FAST_MATH = YES; - SDKROOT = iphoneos; - SWIFT_COMPILATION_MODE = wholemodule; - VALIDATE_PRODUCT = YES; - }; - name = Release; - }; - 8A1C83822AC328BE0096AF73 /* Debug */ = { - isa = XCBuildConfiguration; - buildSettings = { - ASSETCATALOG_COMPILER_APPICON_NAME = AppIcon; - ASSETCATALOG_COMPILER_GLOBAL_ACCENT_COLOR_NAME = AccentColor; - CLANG_ENABLE_MODULES = YES; - CODE_SIGN_STYLE = Automatic; - CURRENT_PROJECT_VERSION = 1; - DEVELOPMENT_ASSET_PATHS = "\"llama.swiftui/Preview Content\""; - DEVELOPMENT_TEAM = STLSG3FG8Q; - ENABLE_PREVIEWS = YES; - GENERATE_INFOPLIST_FILE = YES; - INFOPLIST_KEY_UIApplicationSceneManifest_Generation = YES; - INFOPLIST_KEY_UIApplicationSupportsIndirectInputEvents = YES; - INFOPLIST_KEY_UILaunchScreen_Generation = YES; - INFOPLIST_KEY_UISupportedInterfaceOrientations_iPad = "UIInterfaceOrientationPortrait UIInterfaceOrientationPortraitUpsideDown UIInterfaceOrientationLandscapeLeft UIInterfaceOrientationLandscapeRight"; - INFOPLIST_KEY_UISupportedInterfaceOrientations_iPhone = "UIInterfaceOrientationPortrait UIInterfaceOrientationLandscapeLeft UIInterfaceOrientationLandscapeRight"; - IPHONEOS_DEPLOYMENT_TARGET = 16.0; - LD_RUNPATH_SEARCH_PATHS = ( - "$(inherited)", - "@executable_path/Frameworks", - ); - MARKETING_VERSION = 1.0; - PRODUCT_BUNDLE_IDENTIFIER = "com.bachittle.llama-swift"; - PRODUCT_NAME = "$(TARGET_NAME)"; - SWIFT_EMIT_LOC_STRINGS = YES; - SWIFT_OBJC_BRIDGING_HEADER = "llama.cpp.swift/bridging-header.h"; - SWIFT_OPTIMIZATION_LEVEL = "-Onone"; - SWIFT_VERSION = 5.0; - TARGETED_DEVICE_FAMILY = "1,2"; - }; - name = Debug; - }; - 8A1C83832AC328BE0096AF73 /* Release */ = { - isa = XCBuildConfiguration; - buildSettings = { - ASSETCATALOG_COMPILER_APPICON_NAME = AppIcon; - ASSETCATALOG_COMPILER_GLOBAL_ACCENT_COLOR_NAME = AccentColor; - CLANG_ENABLE_MODULES = YES; - CODE_SIGN_STYLE = Automatic; - CURRENT_PROJECT_VERSION = 1; - DEVELOPMENT_ASSET_PATHS = "\"llama.swiftui/Preview Content\""; - DEVELOPMENT_TEAM = STLSG3FG8Q; - ENABLE_PREVIEWS = YES; - GENERATE_INFOPLIST_FILE = YES; - INFOPLIST_KEY_UIApplicationSceneManifest_Generation = YES; - INFOPLIST_KEY_UIApplicationSupportsIndirectInputEvents = YES; - INFOPLIST_KEY_UILaunchScreen_Generation = YES; - INFOPLIST_KEY_UISupportedInterfaceOrientations_iPad = "UIInterfaceOrientationPortrait UIInterfaceOrientationPortraitUpsideDown UIInterfaceOrientationLandscapeLeft UIInterfaceOrientationLandscapeRight"; - INFOPLIST_KEY_UISupportedInterfaceOrientations_iPhone = "UIInterfaceOrientationPortrait UIInterfaceOrientationLandscapeLeft UIInterfaceOrientationLandscapeRight"; - IPHONEOS_DEPLOYMENT_TARGET = 16.0; - LD_RUNPATH_SEARCH_PATHS = ( - "$(inherited)", - "@executable_path/Frameworks", - ); - MARKETING_VERSION = 1.0; - PRODUCT_BUNDLE_IDENTIFIER = "com.bachittle.llama-swift"; - PRODUCT_NAME = "$(TARGET_NAME)"; - SWIFT_EMIT_LOC_STRINGS = YES; - SWIFT_OBJC_BRIDGING_HEADER = "llama.cpp.swift/bridging-header.h"; - SWIFT_VERSION = 5.0; - TARGETED_DEVICE_FAMILY = "1,2"; - }; - name = Release; - }; + 8A1C837F2AC328BE0096AF73 /* Debug */ = { + isa = XCBuildConfiguration; + buildSettings = { + ALWAYS_SEARCH_USER_PATHS = NO; + ASSETCATALOG_COMPILER_GENERATE_SWIFT_ASSET_SYMBOL_EXTENSIONS = YES; + CLANG_ANALYZER_NONNULL = YES; + CLANG_ANALYZER_NUMBER_OBJECT_CONVERSION = YES_AGGRESSIVE; + CLANG_CXX_LANGUAGE_STANDARD = "gnu++20"; + CLANG_ENABLE_MODULES = YES; + CLANG_ENABLE_OBJC_ARC = YES; + CLANG_ENABLE_OBJC_WEAK = YES; + CLANG_WARN_BLOCK_CAPTURE_AUTORELEASING = YES; + CLANG_WARN_BOOL_CONVERSION = YES; + CLANG_WARN_COMMA = YES; + CLANG_WARN_CONSTANT_CONVERSION = YES; + CLANG_WARN_DEPRECATED_OBJC_IMPLEMENTATIONS = YES; + CLANG_WARN_DIRECT_OBJC_ISA_USAGE = YES_ERROR; + CLANG_WARN_DOCUMENTATION_COMMENTS = YES; + CLANG_WARN_EMPTY_BODY = YES; + CLANG_WARN_ENUM_CONVERSION = YES; + CLANG_WARN_INFINITE_RECURSION = YES; + CLANG_WARN_INT_CONVERSION = YES; + CLANG_WARN_NON_LITERAL_NULL_CONVERSION = YES; + CLANG_WARN_OBJC_IMPLICIT_RETAIN_SELF = YES; + CLANG_WARN_OBJC_LITERAL_CONVERSION = YES; + CLANG_WARN_OBJC_ROOT_CLASS = YES_ERROR; + CLANG_WARN_QUOTED_INCLUDE_IN_FRAMEWORK_HEADER = YES; + CLANG_WARN_RANGE_LOOP_ANALYSIS = YES; + CLANG_WARN_STRICT_PROTOTYPES = YES; + CLANG_WARN_SUSPICIOUS_MOVE = YES; + CLANG_WARN_UNGUARDED_AVAILABILITY = YES_AGGRESSIVE; + CLANG_WARN_UNREACHABLE_CODE = YES; + CLANG_WARN__DUPLICATE_METHOD_MATCH = YES; + COPY_PHASE_STRIP = NO; + DEBUG_INFORMATION_FORMAT = dwarf; + ENABLE_STRICT_OBJC_MSGSEND = YES; + ENABLE_TESTABILITY = YES; + ENABLE_USER_SCRIPT_SANDBOXING = YES; + GCC_C_LANGUAGE_STANDARD = gnu17; + GCC_DYNAMIC_NO_PIC = NO; + GCC_NO_COMMON_BLOCKS = YES; + GCC_OPTIMIZATION_LEVEL = 0; + GCC_PREPROCESSOR_DEFINITIONS = ( + "DEBUG=1", + "$(inherited)", + ); + GCC_WARN_64_TO_32_BIT_CONVERSION = YES; + GCC_WARN_ABOUT_RETURN_TYPE = YES_ERROR; + GCC_WARN_UNDECLARED_SELECTOR = YES; + GCC_WARN_UNINITIALIZED_AUTOS = YES_AGGRESSIVE; + GCC_WARN_UNUSED_FUNCTION = YES; + GCC_WARN_UNUSED_VARIABLE = YES; + IPHONEOS_DEPLOYMENT_TARGET = 17.0; + LOCALIZATION_PREFERS_STRING_CATALOGS = YES; + MTL_ENABLE_DEBUG_INFO = INCLUDE_SOURCE; + MTL_FAST_MATH = YES; + ONLY_ACTIVE_ARCH = YES; + SDKROOT = iphoneos; + SWIFT_ACTIVE_COMPILATION_CONDITIONS = "DEBUG $(inherited)"; + SWIFT_OPTIMIZATION_LEVEL = "-Onone"; + }; + name = Debug; + }; + 8A1C83802AC328BE0096AF73 /* Release */ = { + isa = XCBuildConfiguration; + buildSettings = { + ALWAYS_SEARCH_USER_PATHS = NO; + ASSETCATALOG_COMPILER_GENERATE_SWIFT_ASSET_SYMBOL_EXTENSIONS = YES; + CLANG_ANALYZER_NONNULL = YES; + CLANG_ANALYZER_NUMBER_OBJECT_CONVERSION = YES_AGGRESSIVE; + CLANG_CXX_LANGUAGE_STANDARD = "gnu++20"; + CLANG_ENABLE_MODULES = YES; + CLANG_ENABLE_OBJC_ARC = YES; + CLANG_ENABLE_OBJC_WEAK = YES; + CLANG_WARN_BLOCK_CAPTURE_AUTORELEASING = YES; + CLANG_WARN_BOOL_CONVERSION = YES; + CLANG_WARN_COMMA = YES; + CLANG_WARN_CONSTANT_CONVERSION = YES; + CLANG_WARN_DEPRECATED_OBJC_IMPLEMENTATIONS = YES; + CLANG_WARN_DIRECT_OBJC_ISA_USAGE = YES_ERROR; + CLANG_WARN_DOCUMENTATION_COMMENTS = YES; + CLANG_WARN_EMPTY_BODY = YES; + CLANG_WARN_ENUM_CONVERSION = YES; + CLANG_WARN_INFINITE_RECURSION = YES; + CLANG_WARN_INT_CONVERSION = YES; + CLANG_WARN_NON_LITERAL_NULL_CONVERSION = YES; + CLANG_WARN_OBJC_IMPLICIT_RETAIN_SELF = YES; + CLANG_WARN_OBJC_LITERAL_CONVERSION = YES; + CLANG_WARN_OBJC_ROOT_CLASS = YES_ERROR; + CLANG_WARN_QUOTED_INCLUDE_IN_FRAMEWORK_HEADER = YES; + CLANG_WARN_RANGE_LOOP_ANALYSIS = YES; + CLANG_WARN_STRICT_PROTOTYPES = YES; + CLANG_WARN_SUSPICIOUS_MOVE = YES; + CLANG_WARN_UNGUARDED_AVAILABILITY = YES_AGGRESSIVE; + CLANG_WARN_UNREACHABLE_CODE = YES; + CLANG_WARN__DUPLICATE_METHOD_MATCH = YES; + COPY_PHASE_STRIP = NO; + DEBUG_INFORMATION_FORMAT = "dwarf-with-dsym"; + ENABLE_NS_ASSERTIONS = NO; + ENABLE_STRICT_OBJC_MSGSEND = YES; + ENABLE_USER_SCRIPT_SANDBOXING = YES; + GCC_C_LANGUAGE_STANDARD = gnu17; + GCC_NO_COMMON_BLOCKS = YES; + GCC_WARN_64_TO_32_BIT_CONVERSION = YES; + GCC_WARN_ABOUT_RETURN_TYPE = YES_ERROR; + GCC_WARN_UNDECLARED_SELECTOR = YES; + GCC_WARN_UNINITIALIZED_AUTOS = YES_AGGRESSIVE; + GCC_WARN_UNUSED_FUNCTION = YES; + GCC_WARN_UNUSED_VARIABLE = YES; + IPHONEOS_DEPLOYMENT_TARGET = 17.0; + LOCALIZATION_PREFERS_STRING_CATALOGS = YES; + MTL_ENABLE_DEBUG_INFO = NO; + MTL_FAST_MATH = YES; + SDKROOT = iphoneos; + SWIFT_COMPILATION_MODE = wholemodule; + VALIDATE_PRODUCT = YES; + }; + name = Release; + }; + 8A1C83822AC328BE0096AF73 /* Debug */ = { + isa = XCBuildConfiguration; + buildSettings = { + ASSETCATALOG_COMPILER_APPICON_NAME = AppIcon; + ASSETCATALOG_COMPILER_GLOBAL_ACCENT_COLOR_NAME = AccentColor; + CLANG_ENABLE_MODULES = YES; + CODE_SIGN_STYLE = Automatic; + CURRENT_PROJECT_VERSION = 1; + DEVELOPMENT_ASSET_PATHS = "\"llama.swiftui/Preview Content\""; + DEVELOPMENT_TEAM = STLSG3FG8Q; + ENABLE_PREVIEWS = YES; + GENERATE_INFOPLIST_FILE = YES; + INFOPLIST_KEY_UIApplicationSceneManifest_Generation = YES; + INFOPLIST_KEY_UIApplicationSupportsIndirectInputEvents = YES; + INFOPLIST_KEY_UILaunchScreen_Generation = YES; + INFOPLIST_KEY_UISupportedInterfaceOrientations_iPad = "UIInterfaceOrientationPortrait UIInterfaceOrientationPortraitUpsideDown UIInterfaceOrientationLandscapeLeft UIInterfaceOrientationLandscapeRight"; + INFOPLIST_KEY_UISupportedInterfaceOrientations_iPhone = "UIInterfaceOrientationPortrait UIInterfaceOrientationLandscapeLeft UIInterfaceOrientationLandscapeRight"; + IPHONEOS_DEPLOYMENT_TARGET = 16.0; + LD_RUNPATH_SEARCH_PATHS = ( + "$(inherited)", + "@executable_path/Frameworks", + ); + MARKETING_VERSION = 1.0; + PRODUCT_BUNDLE_IDENTIFIER = "com.bachittle.llama-swift"; + PRODUCT_NAME = "$(TARGET_NAME)"; + SWIFT_EMIT_LOC_STRINGS = YES; + SWIFT_OBJC_BRIDGING_HEADER = "llama.cpp.swift/bridging-header.h"; + SWIFT_OPTIMIZATION_LEVEL = "-Onone"; + SWIFT_VERSION = 5.0; + TARGETED_DEVICE_FAMILY = "1,2"; + }; + name = Debug; + }; + 8A1C83832AC328BE0096AF73 /* Release */ = { + isa = XCBuildConfiguration; + buildSettings = { + ASSETCATALOG_COMPILER_APPICON_NAME = AppIcon; + ASSETCATALOG_COMPILER_GLOBAL_ACCENT_COLOR_NAME = AccentColor; + CLANG_ENABLE_MODULES = YES; + CODE_SIGN_STYLE = Automatic; + CURRENT_PROJECT_VERSION = 1; + DEVELOPMENT_ASSET_PATHS = "\"llama.swiftui/Preview Content\""; + DEVELOPMENT_TEAM = STLSG3FG8Q; + ENABLE_PREVIEWS = YES; + GENERATE_INFOPLIST_FILE = YES; + INFOPLIST_KEY_UIApplicationSceneManifest_Generation = YES; + INFOPLIST_KEY_UIApplicationSupportsIndirectInputEvents = YES; + INFOPLIST_KEY_UILaunchScreen_Generation = YES; + INFOPLIST_KEY_UISupportedInterfaceOrientations_iPad = "UIInterfaceOrientationPortrait UIInterfaceOrientationPortraitUpsideDown UIInterfaceOrientationLandscapeLeft UIInterfaceOrientationLandscapeRight"; + INFOPLIST_KEY_UISupportedInterfaceOrientations_iPhone = "UIInterfaceOrientationPortrait UIInterfaceOrientationLandscapeLeft UIInterfaceOrientationLandscapeRight"; + IPHONEOS_DEPLOYMENT_TARGET = 16.0; + LD_RUNPATH_SEARCH_PATHS = ( + "$(inherited)", + "@executable_path/Frameworks", + ); + MARKETING_VERSION = 1.0; + PRODUCT_BUNDLE_IDENTIFIER = "com.bachittle.llama-swift"; + PRODUCT_NAME = "$(TARGET_NAME)"; + SWIFT_EMIT_LOC_STRINGS = YES; + SWIFT_OBJC_BRIDGING_HEADER = "llama.cpp.swift/bridging-header.h"; + SWIFT_VERSION = 5.0; + TARGETED_DEVICE_FAMILY = "1,2"; + }; + name = Release; + }; /* End XCBuildConfiguration section */ /* Begin XCConfigurationList section */ - 8A1C836E2AC328BD0096AF73 /* Build configuration list for PBXProject "llama.swiftui" */ = { - isa = XCConfigurationList; - buildConfigurations = ( - 8A1C837F2AC328BE0096AF73 /* Debug */, - 8A1C83802AC328BE0096AF73 /* Release */, - ); - defaultConfigurationIsVisible = 0; - defaultConfigurationName = Release; - }; - 8A1C83812AC328BE0096AF73 /* Build configuration list for PBXNativeTarget "llama.swiftui" */ = { - isa = XCConfigurationList; - buildConfigurations = ( - 8A1C83822AC328BE0096AF73 /* Debug */, - 8A1C83832AC328BE0096AF73 /* Release */, - ); - defaultConfigurationIsVisible = 0; - defaultConfigurationName = Release; - }; + 8A1C836E2AC328BD0096AF73 /* Build configuration list for PBXProject "llama.swiftui" */ = { + isa = XCConfigurationList; + buildConfigurations = ( + 8A1C837F2AC328BE0096AF73 /* Debug */, + 8A1C83802AC328BE0096AF73 /* Release */, + ); + defaultConfigurationIsVisible = 0; + defaultConfigurationName = Release; + }; + 8A1C83812AC328BE0096AF73 /* Build configuration list for PBXNativeTarget "llama.swiftui" */ = { + isa = XCConfigurationList; + buildConfigurations = ( + 8A1C83822AC328BE0096AF73 /* Debug */, + 8A1C83832AC328BE0096AF73 /* Release */, + ); + defaultConfigurationIsVisible = 0; + defaultConfigurationName = Release; + }; /* End XCConfigurationList section */ - }; - rootObject = 8A1C836B2AC328BD0096AF73 /* Project object */; + }; + rootObject = 8A1C836B2AC328BD0096AF73 /* Project object */; } diff --git a/examples/llama.swiftui/llama.swiftui/Models/LlamaState.swift b/examples/llama.swiftui/llama.swiftui/Models/LlamaState.swift index babc60cdc..3393eb242 100644 --- a/examples/llama.swiftui/llama.swiftui/Models/LlamaState.swift +++ b/examples/llama.swiftui/llama.swiftui/Models/LlamaState.swift @@ -3,24 +3,26 @@ import Foundation @MainActor class LlamaState: ObservableObject { @Published var messageLog = "" + @Published var cacheCleared = false private var llamaContext: LlamaContext? - private var modelUrl: URL? { - Bundle.main.url(forResource: "q8_0", withExtension: "gguf", subdirectory: "models") + private var defaultModelUrl: URL? { + Bundle.main.url(forResource: "ggml-model", withExtension: "gguf", subdirectory: "models") // Bundle.main.url(forResource: "llama-2-7b-chat", withExtension: "Q2_K.gguf", subdirectory: "models") } + init() { do { - try loadModel() + try loadModel(modelUrl: defaultModelUrl) } catch { messageLog += "Error!\n" } } - private func loadModel() throws { + func loadModel(modelUrl: URL?) throws { messageLog += "Loading model...\n" if let modelUrl { - llamaContext = try LlamaContext.createContext(path: modelUrl.path()) + llamaContext = try LlamaContext.create_context(path: modelUrl.path()) messageLog += "Loaded model \(modelUrl.lastPathComponent)\n" } else { messageLog += "Could not locate model\n" @@ -31,7 +33,7 @@ class LlamaState: ObservableObject { guard let llamaContext else { return } - messageLog += "Attempting to complete text...\n" + await llamaContext.completion_init(text: text) messageLog += "\(text)" @@ -42,4 +44,42 @@ class LlamaState: ObservableObject { await llamaContext.clear() messageLog += "\n\ndone\n" } + + func bench() async { + guard let llamaContext else { + return + } + + messageLog += "\n" + messageLog += "Running benchmark...\n" + messageLog += "Model info: " + messageLog += await llamaContext.model_info() + "\n" + + let t_start = DispatchTime.now().uptimeNanoseconds + await llamaContext.bench(pp: 8, tg: 4, pl: 1) // heat up + let t_end = DispatchTime.now().uptimeNanoseconds + + let t_heat = Double(t_end - t_start) / 1_000_000_000.0 + messageLog += "Heat up time: \(t_heat) seconds, please wait...\n" + + // if more than 5 seconds, then we're probably running on a slow device + if t_heat > 5.0 { + messageLog += "Heat up time is too long, aborting benchmark\n" + return + } + + let result = await llamaContext.bench(pp: 512, tg: 128, pl: 1, nr: 3) + + messageLog += "\(result)" + messageLog += "\n" + } + + func clear() async { + guard let llamaContext else { + return + } + + await llamaContext.clear() + messageLog = "" + } } diff --git a/examples/llama.swiftui/llama.swiftui/UI/ContentView.swift b/examples/llama.swiftui/llama.swiftui/UI/ContentView.swift index 0bd16a806..219bf4dc1 100644 --- a/examples/llama.swiftui/llama.swiftui/UI/ContentView.swift +++ b/examples/llama.swiftui/llama.swiftui/UI/ContentView.swift @@ -5,24 +5,97 @@ struct ContentView: View { @State private var multiLineText = "" + private static func cleanupModelCaches() { + // Delete all models (*.gguf) + let fileManager = FileManager.default + let documentsUrl = FileManager.default.urls(for: .documentDirectory, in: .userDomainMask)[0] + do { + let fileURLs = try fileManager.contentsOfDirectory(at: documentsUrl, includingPropertiesForKeys: nil) + for fileURL in fileURLs { + if fileURL.pathExtension == "gguf" { + try fileManager.removeItem(at: fileURL) + } + } + } catch { + print("Error while enumerating files \(documentsUrl.path): \(error.localizedDescription)") + } + } + var body: some View { VStack { - ScrollView(.vertical) { + ScrollView(.vertical, showsIndicators: true) { Text(llamaState.messageLog) + .font(.system(size: 12)) + .frame(maxWidth: .infinity, alignment: .leading) + .padding() + .onTapGesture { + UIApplication.shared.sendAction(#selector(UIResponder.resignFirstResponder), to: nil, from: nil, for: nil) + } } TextEditor(text: $multiLineText) - .frame(height: 200) + .frame(height: 80) .padding() .border(Color.gray, width: 0.5) - Button(action: { - sendText() - }) { - Text("Send") - .padding() - .background(Color.blue) - .foregroundColor(.white) - .cornerRadius(8) + + HStack { + Button("Send") { + sendText() + } + .padding(8) + .background(Color.blue) + .foregroundColor(.white) + .cornerRadius(8) + + Button("Bench") { + bench() + } + .padding(8) + .background(Color.blue) + .foregroundColor(.white) + .cornerRadius(8) + + Button("Clear") { + clear() + } + .padding(8) + .background(Color.blue) + .foregroundColor(.white) + .cornerRadius(8) + + Button("Copy") { + UIPasteboard.general.string = llamaState.messageLog + } + .padding(8) + .background(Color.blue) + .foregroundColor(.white) + .cornerRadius(8) + } + + VStack { + DownloadButton( + llamaState: llamaState, + modelName: "TinyLlama-1.1B (Q4_0)", + modelUrl: "https://huggingface.co/TheBloke/TinyLlama-1.1B-1T-OpenOrca-GGUF/resolve/main/tinyllama-1.1b-1t-openorca.Q4_0.gguf?download=true", + filename: "tinyllama-1.1b-1t-openorca.Q4_0.gguf" + ) + .font(.system(size: 12)) + .padding(.top, 4) + + DownloadButton( + llamaState: llamaState, + modelName: "TinyLlama-1.1B (Q8_0)", + modelUrl: "https://huggingface.co/TheBloke/TinyLlama-1.1B-1T-OpenOrca-GGUF/resolve/main/tinyllama-1.1b-1t-openorca.Q8_0.gguf?download=true", + filename: "tinyllama-1.1b-1t-openorca.Q8_0.gguf" + ) + .font(.system(size: 12)) + + Button("Clear downloaded models") { + ContentView.cleanupModelCaches() + llamaState.cacheCleared = true + } + .padding(8) + .font(.system(size: 12)) } } .padding() @@ -34,9 +107,20 @@ struct ContentView: View { multiLineText = "" } } + + func bench() { + Task { + await llamaState.bench() + } + } + + func clear() { + Task { + await llamaState.clear() + } + } } -/* -#Preview { - ContentView() -} -*/ + +//#Preview { +// ContentView() +//} diff --git a/examples/llama.swiftui/llama.swiftui/UI/DownloadButton.swift b/examples/llama.swiftui/llama.swiftui/UI/DownloadButton.swift new file mode 100644 index 000000000..4bd75cb69 --- /dev/null +++ b/examples/llama.swiftui/llama.swiftui/UI/DownloadButton.swift @@ -0,0 +1,122 @@ +import SwiftUI + +struct DownloadButton: View { + @ObservedObject private var llamaState: LlamaState + private var modelName: String + private var modelUrl: String + private var filename: String + + @State private var status: String + + @State private var downloadTask: URLSessionDownloadTask? + @State private var progress = 0.0 + @State private var observation: NSKeyValueObservation? + + private static func getFileURL(filename: String) -> URL { + FileManager.default.urls(for: .documentDirectory, in: .userDomainMask)[0].appendingPathComponent(filename) + } + + private func checkFileExistenceAndUpdateStatus() { + } + + init(llamaState: LlamaState, modelName: String, modelUrl: String, filename: String) { + self.llamaState = llamaState + self.modelName = modelName + self.modelUrl = modelUrl + self.filename = filename + + let fileURL = DownloadButton.getFileURL(filename: filename) + status = FileManager.default.fileExists(atPath: fileURL.path) ? "downloaded" : "download" + } + + private func download() { + status = "downloading" + print("Downloading model \(modelName) from \(modelUrl)") + guard let url = URL(string: modelUrl) else { return } + let fileURL = DownloadButton.getFileURL(filename: filename) + + downloadTask = URLSession.shared.downloadTask(with: url) { temporaryURL, response, error in + if let error = error { + print("Error: \(error.localizedDescription)") + return + } + + guard let response = response as? HTTPURLResponse, (200...299).contains(response.statusCode) else { + print("Server error!") + return + } + + do { + if let temporaryURL = temporaryURL { + try FileManager.default.copyItem(at: temporaryURL, to: fileURL) + print("Writing to \(filename) completed") + + llamaState.cacheCleared = false + + status = "downloaded" + } + } catch let err { + print("Error: \(err.localizedDescription)") + } + } + + observation = downloadTask?.progress.observe(\.fractionCompleted) { progress, _ in + self.progress = progress.fractionCompleted + } + + downloadTask?.resume() + } + + var body: some View { + VStack { + if status == "download" { + Button(action: download) { + Text("Download " + modelName) + } + } else if status == "downloading" { + Button(action: { + downloadTask?.cancel() + status = "download" + }) { + Text("\(modelName) (Downloading \(Int(progress * 100))%)") + } + } else if status == "downloaded" { + Button(action: { + let fileURL = DownloadButton.getFileURL(filename: filename) + if !FileManager.default.fileExists(atPath: fileURL.path) { + download() + return + } + do { + try llamaState.loadModel(modelUrl: fileURL) + } catch let err { + print("Error: \(err.localizedDescription)") + } + }) { + Text("\(modelName) (Downloaded)") + } + } else { + Text("Unknown status") + } + } + .onDisappear() { + downloadTask?.cancel() + } + .onChange(of: llamaState.cacheCleared) { newValue in + if newValue { + downloadTask?.cancel() + let fileURL = DownloadButton.getFileURL(filename: filename) + status = FileManager.default.fileExists(atPath: fileURL.path) ? "downloaded" : "download" + } + } + } +} + +// #Preview { +// DownloadButton( +// llamaState: LlamaState(), +// modelName: "TheBloke / TinyLlama-1.1B-1T-OpenOrca-GGUF (Q4_0)", +// modelUrl: "https://huggingface.co/TheBloke/TinyLlama-1.1B-1T-OpenOrca-GGUF/resolve/main/tinyllama-1.1b-1t-openorca.Q4_0.gguf?download=true", +// filename: "tinyllama-1.1b-1t-openorca.Q4_0.gguf" +// ) +// } diff --git a/llama.cpp b/llama.cpp index f49214c13..fd9fd6ed9 100644 --- a/llama.cpp +++ b/llama.cpp @@ -2397,25 +2397,25 @@ static std::string llama_model_ftype_name(llama_ftype ftype) { switch (ftype) { case LLAMA_FTYPE_ALL_F32: return "all F32"; - case LLAMA_FTYPE_MOSTLY_F16: return "mostly F16"; - case LLAMA_FTYPE_MOSTLY_Q4_0: return "mostly Q4_0"; - case LLAMA_FTYPE_MOSTLY_Q4_1: return "mostly Q4_1"; + case LLAMA_FTYPE_MOSTLY_F16: return "F16"; + case LLAMA_FTYPE_MOSTLY_Q4_0: return "Q4_0"; + case LLAMA_FTYPE_MOSTLY_Q4_1: return "Q4_1"; case LLAMA_FTYPE_MOSTLY_Q4_1_SOME_F16: - return "mostly Q4_1, some F16"; - case LLAMA_FTYPE_MOSTLY_Q5_0: return "mostly Q5_0"; - case LLAMA_FTYPE_MOSTLY_Q5_1: return "mostly Q5_1"; - case LLAMA_FTYPE_MOSTLY_Q8_0: return "mostly Q8_0"; + return "Q4_1, some F16"; + case LLAMA_FTYPE_MOSTLY_Q5_0: return "Q5_0"; + case LLAMA_FTYPE_MOSTLY_Q5_1: return "Q5_1"; + case LLAMA_FTYPE_MOSTLY_Q8_0: return "Q8_0"; // K-quants - case LLAMA_FTYPE_MOSTLY_Q2_K: return "mostly Q2_K"; - case LLAMA_FTYPE_MOSTLY_Q3_K_S: return "mostly Q3_K - Small"; - case LLAMA_FTYPE_MOSTLY_Q3_K_M: return "mostly Q3_K - Medium"; - case LLAMA_FTYPE_MOSTLY_Q3_K_L: return "mostly Q3_K - Large"; - case LLAMA_FTYPE_MOSTLY_Q4_K_S: return "mostly Q4_K - Small"; - case LLAMA_FTYPE_MOSTLY_Q4_K_M: return "mostly Q4_K - Medium"; - case LLAMA_FTYPE_MOSTLY_Q5_K_S: return "mostly Q5_K - Small"; - case LLAMA_FTYPE_MOSTLY_Q5_K_M: return "mostly Q5_K - Medium"; - case LLAMA_FTYPE_MOSTLY_Q6_K: return "mostly Q6_K"; + case LLAMA_FTYPE_MOSTLY_Q2_K: return "Q2_K"; + case LLAMA_FTYPE_MOSTLY_Q3_K_S: return "Q3_K - Small"; + case LLAMA_FTYPE_MOSTLY_Q3_K_M: return "Q3_K - Medium"; + case LLAMA_FTYPE_MOSTLY_Q3_K_L: return "Q3_K - Large"; + case LLAMA_FTYPE_MOSTLY_Q4_K_S: return "Q4_K - Small"; + case LLAMA_FTYPE_MOSTLY_Q4_K_M: return "Q4_K - Medium"; + case LLAMA_FTYPE_MOSTLY_Q5_K_S: return "Q5_K - Small"; + case LLAMA_FTYPE_MOSTLY_Q5_K_M: return "Q5_K - Medium"; + case LLAMA_FTYPE_MOSTLY_Q6_K: return "Q6_K"; default: return "unknown, may not work"; } @@ -2533,6 +2533,7 @@ static void llm_load_hparams( ml.get_key(LLM_KV_ATTENTION_LAYERNORM_RMS_EPS, hparams.f_norm_rms_eps); switch (hparams.n_layer) { + case 22: model.type = e_model::MODEL_1B; break; case 26: model.type = e_model::MODEL_3B; break; case 32: model.type = e_model::MODEL_7B; break; case 40: model.type = e_model::MODEL_13B; break; From b1306c439490c7fa4ec33594500d980d1e9e15e6 Mon Sep 17 00:00:00 2001 From: Georgi Gerganov Date: Sun, 17 Dec 2023 20:16:23 +0200 Subject: [PATCH 100/811] readme : update hot topics --- README.md | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/README.md b/README.md index edbe6ba57..01aef2afc 100644 --- a/README.md +++ b/README.md @@ -10,11 +10,11 @@ Inference of [LLaMA](https://arxiv.org/abs/2302.13971) model in pure C/C++ ### Hot topics +- Collecting Apple Silicon performance stats: + - M-series: https://github.com/ggerganov/llama.cpp/discussions/4167 + - A-series: https://github.com/ggerganov/llama.cpp/discussions/4508 - Added Mixtral support: https://github.com/ggerganov/llama.cpp/pull/4406 -- **llama.h API change for handling KV cache offloading and data type: https://github.com/ggerganov/llama.cpp/pull/4309** -- Using `llama.cpp` with AWS instances: https://github.com/ggerganov/llama.cpp/discussions/4225 - Looking for contributions to improve and maintain the `server` example: https://github.com/ggerganov/llama.cpp/issues/4216 -- Collecting Apple Silicon performance stats: https://github.com/ggerganov/llama.cpp/discussions/4167 ---- From 2994f0c5a2e8c96955b422dedc93ec2595d16b82 Mon Sep 17 00:00:00 2001 From: Jared Van Bortel Date: Sun, 17 Dec 2023 19:39:02 -0500 Subject: [PATCH 101/811] decode : fix logits_valid for legacy API (#4516) --- llama.cpp | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/llama.cpp b/llama.cpp index fd9fd6ed9..d6d575f9e 100644 --- a/llama.cpp +++ b/llama.cpp @@ -6184,7 +6184,7 @@ static int llama_decode_internal( logits_out.resize(n_vocab); memcpy(logits_out.data(), (float *) ggml_get_data(res) + (n_vocab*(n_tokens - 1)), sizeof(float)*n_vocab); #ifndef NDEBUG - logits_valid[n_tokens - 1] = true; + logits_valid[0] = true; #endif } } From 3c04bf6da89eaf4c7d317e0518f0687dfcbf2de7 Mon Sep 17 00:00:00 2001 From: hankcs Date: Mon, 18 Dec 2023 05:14:58 -0800 Subject: [PATCH 102/811] llama : fix try_override for bool_value which always return true (#4519) --- llama.cpp | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/llama.cpp b/llama.cpp index d6d575f9e..99facbf77 100644 --- a/llama.cpp +++ b/llama.cpp @@ -1937,7 +1937,7 @@ namespace GGUFMeta { target = override->bool_value; return true; } - return true; + return false; } template From b9e74f9bca5fdf7d0a22ed25e7a9626335fdfa48 Mon Sep 17 00:00:00 2001 From: Ebey Abraham Date: Mon, 18 Dec 2023 17:27:47 +0000 Subject: [PATCH 103/811] llama : add phi-2 + fix NeoX rope + ggml_mul_mat_set_prec (#4490) * phi2 implementation * fix breaking change * phi-2 : various fixes * phi-2 : use layer norm eps * py : whitespaces * llama : fix meta KV override bug * convert : phi don't add BOS token * convert : revert "added_tokens_decoder" change * phi-2 : scale Q instead of KQ for better precision * ggml : fix NeoX rope to rotate just first n_dims * cuda : less diff in the rope_neox kernel * ggml : add ggml_mul_mat_set_prec ggml-ci * Update ggml-cuda.cu Co-authored-by: slaren * Update ggml-cuda.cu Co-authored-by: slaren * cuda : ggml_cuda_op_mul_mat_cublas support F32 precision * cuda : remove oboslete comment --------- Co-authored-by: Ebey Abraham Co-authored-by: Georgi Gerganov Co-authored-by: slaren --- convert-hf-to-gguf.py | 22 +++ ggml-cuda.cu | 117 +++++++++---- ggml-metal.metal | 13 +- ggml.c | 46 ++++- ggml.h | 12 ++ gguf-py/gguf/constants.py | 13 ++ gguf-py/gguf/tensor_mapping.py | 8 + llama.cpp | 307 +++++++++++++++++++++++++++++---- tests/test-backend-ops.cpp | 1 + 9 files changed, 463 insertions(+), 76 deletions(-) diff --git a/convert-hf-to-gguf.py b/convert-hf-to-gguf.py index e46a7813a..e71a96c48 100755 --- a/convert-hf-to-gguf.py +++ b/convert-hf-to-gguf.py @@ -182,6 +182,8 @@ class Model: return QwenModel if model_architecture == "MixtralForCausalLM": return MixtralModel + if model_architecture == "PhiForCausalLM": + return Phi2Model return Model def _is_model_safetensors(self) -> bool: @@ -221,6 +223,8 @@ class Model: return gguf.MODEL_ARCH.QWEN if arch == "MixtralForCausalLM": return gguf.MODEL_ARCH.LLAMA + if arch == "PhiForCausalLM": + return gguf.MODEL_ARCH.PHI2 raise NotImplementedError(f'Architecture "{arch}" not supported!') @@ -980,6 +984,24 @@ class QwenModel(Model): print(f"{new_name}, n_dims = {n_dims}, {old_dtype} --> {data.dtype}") self.gguf_writer.add_tensor(new_name, data) + +class Phi2Model(Model): + def set_gguf_parameters(self): + block_count = self.hparams["n_layer"] + + self.gguf_writer.add_name("Phi2") + self.gguf_writer.add_context_length(self.hparams["n_positions"]) + self.gguf_writer.add_embedding_length(self.hparams["n_embd"]) + self.gguf_writer.add_feed_forward_length(4 * self.hparams["n_embd"]) + self.gguf_writer.add_block_count(block_count) + self.gguf_writer.add_head_count(self.hparams["n_head"]) + self.gguf_writer.add_head_count_kv(self.hparams["n_head"]) + self.gguf_writer.add_layer_norm_eps(self.hparams["layer_norm_epsilon"]) + self.gguf_writer.add_rope_dimension_count(self.hparams["rotary_dim"]) + self.gguf_writer.add_file_type(self.ftype) + self.gguf_writer.add_add_bos_token(False) + + ###### CONVERSION LOGIC ###### diff --git a/ggml-cuda.cu b/ggml-cuda.cu index 0a63c1ecf..d0f3d8034 100644 --- a/ggml-cuda.cu +++ b/ggml-cuda.cu @@ -4998,7 +4998,16 @@ static __global__ void rope_neox( const int ib = col / n_dims; const int ic = col % n_dims; - const int i = row*ncols + ib*n_dims + ic/2; + if (ib > 0) { + const int i = row*ncols + ib*n_dims + ic; + + dst[i + 0] = x[i + 0]; + dst[i + 1] = x[i + 1]; + + return; + } + + const int i = row*ncols + ib*n_dims + ic/2; const int i2 = row/p_delta_rows; float cur_rot = inv_ndims * ic - ib; @@ -7057,6 +7066,7 @@ inline void ggml_cuda_op_upscale( (void) src1; (void) dst; + (void) src1_dd; } inline void ggml_cuda_op_pad( @@ -7073,6 +7083,7 @@ inline void ggml_cuda_op_pad( (void) src1; (void) dst; + (void) src1_dd; } inline void ggml_cuda_op_rms_norm( @@ -7376,7 +7387,7 @@ inline void ggml_cuda_op_mul_mat_cublas( const int compute_capability = g_compute_capabilities[id]; - if (compute_capability >= CC_VOLTA && (src0->type == GGML_TYPE_F16 || ggml_is_quantized(src0->type)) && ggml_is_contiguous(src0) && row_diff == src0->ne[1]) { + if (compute_capability >= CC_VOLTA && (src0->type == GGML_TYPE_F16 || ggml_is_quantized(src0->type)) && ggml_is_contiguous(src0) && row_diff == src0->ne[1] && dst->op_params[0] == GGML_PREC_DEFAULT) { // convert src0 and src1 to fp16, multiply as fp16, convert dst to fp32 half * src0_as_f16 = nullptr; size_t src0_as = 0; @@ -8300,27 +8311,27 @@ static void ggml_cuda_mul_mat_vec_nc(const ggml_tensor * src0, const ggml_tensor } static __global__ void k_compute_batched_ptrs( - const half * src0_as_f16, const half * src1_as_f16, half * dst_f16, + const half * src0_as_f16, const half * src1_as_f16, char * dst, const void ** ptrs_src, void ** ptrs_dst, - int ne12, int ne13, - int ne23, - int nb02, int nb03, - int nb12, int nb13, - int nb2, int nb3, - int r2, int r3) { - int i13 = blockIdx.x * blockDim.x + threadIdx.x; - int i12 = blockIdx.y * blockDim.y + threadIdx.y; + int64_t ne12, int64_t ne13, + int64_t ne23, + size_t nb02, size_t nb03, + size_t nb12, size_t nb13, + size_t nbd2, size_t nbd3, + int64_t r2, int64_t r3) { + int64_t i13 = blockIdx.x * blockDim.x + threadIdx.x; + int64_t i12 = blockIdx.y * blockDim.y + threadIdx.y; if (i13 >= ne13 || i12 >= ne12) { return; } - int i03 = i13 / r3; - int i02 = i12 / r2; + int64_t i03 = i13 / r3; + int64_t i02 = i12 / r2; ptrs_src[0*ne23 + i12 + i13*ne12] = (const char *) src0_as_f16 + i02*nb02 + i03*nb03; ptrs_src[1*ne23 + i12 + i13*ne12] = (const char *) src1_as_f16 + i12*nb12/2 + i13*nb13/2; - ptrs_dst[0*ne23 + i12 + i13*ne12] = ( char *) dst_f16 + i12* nb2/2 + i13* nb3/2; + ptrs_dst[0*ne23 + i12 + i13*ne12] = ( char *) dst + i12*nbd2 + i13*nbd3; } static void ggml_cuda_mul_mat_mat_batched_cublas(const ggml_tensor * src0, const ggml_tensor * src1, ggml_tensor * dst) { @@ -8376,7 +8387,41 @@ static void ggml_cuda_mul_mat_mat_batched_cublas(const ggml_tensor * src0, const to_fp16_cuda(src1_ddf, src1_as_f16, ne1, main_stream); size_t dst_as = 0; - half * dst_f16 = (half *) ggml_cuda_pool_malloc(ne * sizeof(half), &dst_as); + + half * dst_f16 = nullptr; + char * dst_t = nullptr; + + cublasComputeType_t cu_compute_type = CUBLAS_COMPUTE_16F; + cudaDataType_t cu_data_type = CUDA_R_16F; + + // dst strides + size_t nbd2 = dst->nb[2]; + size_t nbd3 = dst->nb[3]; + + const half alpha_f16 = 1.0f; + const half beta_f16 = 0.0f; + + const float alpha_f32 = 1.0f; + const float beta_f32 = 0.0f; + + const void * alpha = &alpha_f16; + const void * beta = &beta_f16; + + if (dst->op_params[0] == GGML_PREC_DEFAULT) { + dst_f16 = (half *) ggml_cuda_pool_malloc(ne * sizeof(half), &dst_as); + dst_t = (char *) dst_f16; + + nbd2 /= sizeof(float) / sizeof(half); + nbd3 /= sizeof(float) / sizeof(half); + } else { + dst_t = (char *) dst_ddf; + + cu_compute_type = CUBLAS_COMPUTE_32F; + cu_data_type = CUDA_R_32F; + + alpha = &alpha_f32; + beta = &beta_f32; + } GGML_ASSERT(ne12 % ne02 == 0); GGML_ASSERT(ne13 % ne03 == 0); @@ -8385,9 +8430,6 @@ static void ggml_cuda_mul_mat_mat_batched_cublas(const ggml_tensor * src0, const const int64_t r2 = ne12/ne02; const int64_t r3 = ne13/ne03; - const half alpha_f16 = 1.0f; - const half beta_f16 = 0.0f; - #if 0 // use cublasGemmEx { @@ -8397,12 +8439,12 @@ static void ggml_cuda_mul_mat_mat_batched_cublas(const ggml_tensor * src0, const int i02 = i12 / r2; CUBLAS_CHECK( - cublasGemmEx(g_cublas_handles[id], CUBLAS_OP_T, CUBLAS_OP_N, + cublasGemmEx(g_cublas_handles[g_main_device], CUBLAS_OP_T, CUBLAS_OP_N, ne01, ne11, ne10, - &alpha_f16, (const char *) src0_as_f16 + i02*src0->nb[2] + i03*src0->nb[3] , CUDA_R_16F, nb01/sizeof(half), - (const char *) src1_as_f16 + i12*src1->nb[2]/2 + i13*src1->nb[3]/2, CUDA_R_16F, nb11/sizeof(float), - &beta_f16, ( char *) dst_f16 + i12* dst->nb[2]/2 + i13* dst->nb[3]/2, CUDA_R_16F, ne01, - CUBLAS_COMPUTE_16F, + alpha, (const char *) src0_as_f16 + i02*src0->nb[2] + i03*src0->nb[3] , CUDA_R_16F, nb01/sizeof(half), + (const char *) src1_as_f16 + i12*src1->nb[2]/2 + i13*src1->nb[3]/2, CUDA_R_16F, nb11/sizeof(float), + beta, ( char *) dst_t + i12*nbd2 + i13*nbd3, cu_data_type, ne01, + cu_compute_type, CUBLAS_GEMM_DEFAULT_TENSOR_OP)); } } @@ -8414,11 +8456,11 @@ static void ggml_cuda_mul_mat_mat_batched_cublas(const ggml_tensor * src0, const CUBLAS_CHECK( cublasGemmStridedBatchedEx(g_cublas_handles[g_main_device], CUBLAS_OP_T, CUBLAS_OP_N, ne01, ne11, ne10, - &alpha_f16, (const char *) src0_as_f16, CUDA_R_16F, nb01/sizeof(half), src0->nb[2]/sizeof(half), // strideA - (const char *) src1_as_f16, CUDA_R_16F, nb11/sizeof(float), src1->nb[2]/sizeof(float), // strideB - &beta_f16, ( char *) dst_f16, CUDA_R_16F, ne01, dst->nb[2]/sizeof(float), // strideC + alpha, (const char *) src0_as_f16, CUDA_R_16F, nb01/sizeof(half), src0->nb[2]/sizeof(half), // strideA + (const char *) src1_as_f16, CUDA_R_16F, nb11/sizeof(float), src1->nb[2]/sizeof(float), // strideB + beta, ( char *) dst_t, cu_data_type, ne01, dst->nb[2]/sizeof(float), // strideC ne12*ne13, - CUBLAS_COMPUTE_16F, + cu_compute_type, CUBLAS_GEMM_DEFAULT_TENSOR_OP)); } else { // use cublasGemmBatchedEx @@ -8435,24 +8477,24 @@ static void ggml_cuda_mul_mat_mat_batched_cublas(const ggml_tensor * src0, const dim3 block_dims(ne13, ne12); k_compute_batched_ptrs<<<1, block_dims, 0, main_stream>>>( - src0_as_f16, src1_as_f16, dst_f16, + src0_as_f16, src1_as_f16, dst_t, ptrs_src, ptrs_dst, ne12, ne13, ne23, nb02, nb03, nb12, nb13, - dst->nb[2], dst->nb[3], + nbd2, nbd3, r2, r3); CUDA_CHECK(cudaGetLastError()); CUBLAS_CHECK( cublasGemmBatchedEx(g_cublas_handles[g_main_device], CUBLAS_OP_T, CUBLAS_OP_N, ne01, ne11, ne10, - &alpha_f16, (const void **) (ptrs_src + 0*ne23), CUDA_R_16F, nb01/sizeof(half), - (const void **) (ptrs_src + 1*ne23), CUDA_R_16F, nb11/sizeof(float), - &beta_f16, ( void **) (ptrs_dst + 0*ne23), CUDA_R_16F, ne01, + alpha, (const void **) (ptrs_src + 0*ne23), CUDA_R_16F, nb01/sizeof(half), + (const void **) (ptrs_src + 1*ne23), CUDA_R_16F, nb11/sizeof(float), + beta, ( void **) (ptrs_dst + 0*ne23), cu_data_type, ne01, ne23, - CUBLAS_COMPUTE_16F, + cu_compute_type, CUBLAS_GEMM_DEFAULT_TENSOR_OP)); if (ptrs_src_s != 0) { @@ -8464,11 +8506,14 @@ static void ggml_cuda_mul_mat_mat_batched_cublas(const ggml_tensor * src0, const } #endif - const to_fp32_cuda_t to_fp32_cuda = ggml_get_to_fp32_cuda(GGML_TYPE_F16); - to_fp32_cuda(dst_f16, dst_ddf, ne, main_stream); + if (dst->op_params[0] == GGML_PREC_DEFAULT) { + const to_fp32_cuda_t to_fp32_cuda = ggml_get_to_fp32_cuda(GGML_TYPE_F16); + to_fp32_cuda(dst_f16, dst_ddf, ne, main_stream); + + ggml_cuda_pool_free(dst_f16, dst_as); + } ggml_cuda_pool_free(src1_as_f16, src1_as); - ggml_cuda_pool_free(dst_f16, dst_as); } static void ggml_cuda_mul_mat(const ggml_tensor * src0, const ggml_tensor * src1, ggml_tensor * dst) { diff --git a/ggml-metal.metal b/ggml-metal.metal index fe0ada445..d5b54e112 100644 --- a/ggml-metal.metal +++ b/ggml-metal.metal @@ -1702,8 +1702,9 @@ kernel void kernel_rope( dst_data[1] = x0*sin_theta + x1*cos_theta; } } else { - for (int64_t ib = 0; ib < ne0/n_dims; ++ib) { - for (int64_t ic = 2*tiitg; ic < n_dims; ic += 2*tptg.x) { + for (int64_t ic = 2*tiitg; ic < ne0; ic += 2*tptg.x) { + if (ic < n_dims) { + const int64_t ib = 0; // simplified from `(ib * n_dims + ic) * inv_ndims` const float cur_rot = inv_ndims*ic - ib; @@ -1722,6 +1723,14 @@ kernel void kernel_rope( dst_data[0] = x0*cos_theta - x1*sin_theta; dst_data[n_dims/2] = x0*sin_theta + x1*cos_theta; + } else { + const int64_t i0 = ic; + + device const T * const src = (device T *)((device char *) src0 + i3*nb03 + i2*nb02 + i1*nb01 + i0*nb00); + device T * dst_data = (device T *)((device char *) dst + i3*nb3 + i2*nb2 + i1*nb1 + i0*nb0); + + dst_data[0] = src[0]; + dst_data[1] = src[1]; } } } diff --git a/ggml.c b/ggml.c index ad546a731..6da65bd92 100644 --- a/ggml.c +++ b/ggml.c @@ -4098,6 +4098,14 @@ struct ggml_tensor * ggml_mul_mat( return result; } +void ggml_mul_mat_set_prec( + struct ggml_tensor * a, + enum ggml_prec prec) { + const int32_t prec_i32 = (int32_t) prec; + + ggml_set_op_params_i32(a, 0, prec_i32); +} + // ggml_mul_mat_id struct ggml_tensor * ggml_mul_mat_id( @@ -9168,6 +9176,8 @@ static void ggml_compute_forward_norm_f32( float eps; memcpy(&eps, dst->op_params, sizeof(float)); + GGML_ASSERT(eps > 0.0f); + // TODO: optimize for (int64_t i03 = 0; i03 < ne03; i03++) { for (int64_t i02 = 0; i02 < ne02; i02++) { @@ -9237,6 +9247,8 @@ static void ggml_compute_forward_rms_norm_f32( float eps; memcpy(&eps, dst->op_params, sizeof(float)); + GGML_ASSERT(eps > 0.0f); + // TODO: optimize for (int64_t i03 = 0; i03 < ne03; i03++) { for (int64_t i02 = 0; i02 < ne02; i02++) { @@ -11562,10 +11574,13 @@ static void ggml_compute_forward_rope_f32( } } else { // TODO: this might be wrong for ne0 != n_dims - need double check - // ref: https://github.com/huggingface/transformers/blob/main/src/transformers/models/gpt_neox/modeling_gpt_neox.py#LL251C1-L294C28 + // it seems we have to rope just the first n_dims elements and do nothing with the rest + // ref: https://github.com/ml-explore/mlx/blob/dc2edc762c797e3b8de50b1dad4dc0a131691033/benchmarks/python/llama_jax_bench.py#L11-L26 theta_base *= freq_scale; - for (int64_t ib = 0; ib < ne0/n_dims; ++ib) { - for (int64_t ic = 0; ic < n_dims; ic += 2) { + for (int64_t ic = 0; ic < ne0; ic += 2) { + if (ic < n_dims) { + const int64_t ib = 0; + // simplified from `(ib * n_dims + ic) * inv_ndims` float cur_rot = inv_ndims * ic - ib; @@ -11588,6 +11603,14 @@ static void ggml_compute_forward_rope_f32( dst_data[0] = x0*cos_theta - x1*sin_theta; dst_data[n_dims/2] = x0*sin_theta + x1*cos_theta; + } else { + const int64_t i0 = ic; + + const float * const src = (float *)((char *) src0->data + i3*nb03 + i2*nb02 + i1*nb01 + i0*nb00); + float * dst_data = (float *)((char *) dst->data + i3*nb3 + i2*nb2 + i1*nb1 + i0*nb0); + + dst_data[0] = src[0]; + dst_data[1] = src[1]; } } } @@ -11715,10 +11738,13 @@ static void ggml_compute_forward_rope_f16( } } else { // TODO: this might be wrong for ne0 != n_dims - need double check - // ref: https://github.com/huggingface/transformers/blob/main/src/transformers/models/gpt_neox/modeling_gpt_neox.py#LL251C1-L294C28 + // it seems we have to rope just the first n_dims elements and do nothing with the rest + // ref: https://github.com/ml-explore/mlx/blob/dc2edc762c797e3b8de50b1dad4dc0a131691033/benchmarks/python/llama_jax_bench.py#L11-L26 theta_base *= freq_scale; - for (int64_t ib = 0; ib < ne0/n_dims; ++ib) { - for (int64_t ic = 0; ic < n_dims; ic += 2) { + for (int64_t ic = 0; ic < ne0; ic += 2) { + if (ic < n_dims) { + const int64_t ib = 0; + // simplified from `(ib * n_dims + ic) * inv_ndims` float cur_rot = inv_ndims * ic - ib; @@ -11741,6 +11767,14 @@ static void ggml_compute_forward_rope_f16( dst_data[0] = GGML_FP32_TO_FP16(x0*cos_theta - x1*sin_theta); dst_data[n_dims/2] = GGML_FP32_TO_FP16(x0*sin_theta + x1*cos_theta); + } else { + const int64_t i0 = ic; + + const ggml_fp16_t * const src = (ggml_fp16_t *)((char *) src0->data + i3*nb03 + i2*nb02 + i1*nb01 + i0*nb00); + ggml_fp16_t * dst_data = (ggml_fp16_t *)((char *) dst->data + i3*nb3 + i2*nb2 + i1*nb1 + i0*nb0); + + dst_data[0] = src[0]; + dst_data[1] = src[1]; } } } diff --git a/ggml.h b/ggml.h index 68f7833b6..f1003984f 100644 --- a/ggml.h +++ b/ggml.h @@ -343,6 +343,12 @@ extern "C" { GGML_TYPE_COUNT, }; + // precision + enum ggml_prec { + GGML_PREC_DEFAULT, + GGML_PREC_F32, + }; + enum ggml_backend_type { GGML_BACKEND_CPU = 0, GGML_BACKEND_GPU = 10, @@ -1057,6 +1063,12 @@ extern "C" { struct ggml_tensor * a, struct ggml_tensor * b); + // change the precision of a matrix multiplication + // set to GGML_PREC_F32 for higher precision (useful for phi-2) + GGML_API void ggml_mul_mat_set_prec( + struct ggml_tensor * a, + enum ggml_prec prec); + // indirect matrix multiplication // ggml_mul_mat_id(ctx, as, ids, id, b) ~= ggml_mul_mat(as[ids[id]], b) GGML_API struct ggml_tensor * ggml_mul_mat_id( diff --git a/gguf-py/gguf/constants.py b/gguf-py/gguf/constants.py index 12133882b..390dca049 100644 --- a/gguf-py/gguf/constants.py +++ b/gguf-py/gguf/constants.py @@ -95,6 +95,7 @@ class MODEL_ARCH(IntEnum): BLOOM = auto() STABLELM = auto() QWEN = auto() + PHI2 = auto() class MODEL_TENSOR(IntEnum): @@ -140,6 +141,7 @@ MODEL_ARCH_NAMES: dict[MODEL_ARCH, str] = { MODEL_ARCH.BLOOM: "bloom", MODEL_ARCH.STABLELM: "stablelm", MODEL_ARCH.QWEN: "qwen", + MODEL_ARCH.PHI2: "phi2", } TENSOR_NAMES: dict[MODEL_TENSOR, str] = { @@ -350,6 +352,17 @@ MODEL_TENSORS: dict[MODEL_ARCH, list[MODEL_TENSOR]] = { MODEL_ARCH.GPT2: [ # TODO ], + MODEL_ARCH.PHI2: [ + MODEL_TENSOR.TOKEN_EMBD, + MODEL_TENSOR.OUTPUT_NORM, + MODEL_TENSOR.OUTPUT, + MODEL_TENSOR.ATTN_NORM, + MODEL_TENSOR.ATTN_QKV, + MODEL_TENSOR.ATTN_OUT, + MODEL_TENSOR.FFN_NORM, + MODEL_TENSOR.FFN_DOWN, + MODEL_TENSOR.FFN_UP, + ] # TODO } diff --git a/gguf-py/gguf/tensor_mapping.py b/gguf-py/gguf/tensor_mapping.py index 0115ea1c6..6fcbdbc1c 100644 --- a/gguf-py/gguf/tensor_mapping.py +++ b/gguf-py/gguf/tensor_mapping.py @@ -17,6 +17,7 @@ class TensorNameMap: "tok_embeddings", # llama-pth "embeddings.word_embeddings", # bert "language_model.embedding.word_embeddings", # persimmon + "transformer.embd.wte", # phi2 ), # Token type embeddings @@ -41,6 +42,7 @@ class TensorNameMap: "lm_head", # gpt2 mpt falcon llama-hf baichuan qwen "output", # llama-pth bloom "word_embeddings_for_head", # persimmon + "lm_head.linear", # phi2 ), # Output norm @@ -53,6 +55,7 @@ class TensorNameMap: "transformer.norm_f", # mpt "ln_f", # refact bloom qwen "language_model.encoder.final_layernorm", # persimmon + "lm_head.ln", # phi2 ), # Rope frequencies @@ -75,6 +78,7 @@ class TensorNameMap: "encoder.layer.{bid}.attention.output.LayerNorm", # bert "language_model.encoder.layers.{bid}.input_layernorm", # persimmon "model.layers.{bid}.ln1", # yi + "transformer.h.{bid}.ln", # phi2 ), # Attention norm 2 @@ -90,6 +94,7 @@ class TensorNameMap: "transformer.h.{bid}.self_attention.query_key_value", # falcon "h.{bid}.self_attention.query_key_value", # bloom "language_model.encoder.layers.{bid}.self_attention.query_key_value", # persimmon + "transformer.h.{bid}.mixer.Wqkv", # phi2 ), # Attention query @@ -128,6 +133,7 @@ class TensorNameMap: "encoder.layer.{bid}.attention.output.dense", # bert "transformer.h.{bid}.attn.out_proj", # gpt-j "language_model.encoder.layers.{bid}.self_attention.dense", # persimmon + "transformer.h.{bid}.mixer.out_proj", # phi2 ), # Rotary embeddings @@ -167,6 +173,7 @@ class TensorNameMap: "transformer.h.{bid}.mlp.fc_in", # gpt-j "language_model.encoder.layers.{bid}.mlp.dense_h_to_4h", # persimmon "transformer.h.{bid}.mlp.w1", # qwen + "transformer.h.{bid}.mlp.fc1", # phi2 ), MODEL_TENSOR.FFN_UP_EXP: ( @@ -198,6 +205,7 @@ class TensorNameMap: "encoder.layer.{bid}.output.dense", # bert "transformer.h.{bid}.mlp.fc_out", # gpt-j "language_model.encoder.layers.{bid}.mlp.dense_4h_to_h", # persimmon + "transformer.h.{bid}.mlp.fc2", # phi2 ), MODEL_TENSOR.FFN_DOWN_EXP: ( diff --git a/llama.cpp b/llama.cpp index 99facbf77..edd2910b3 100644 --- a/llama.cpp +++ b/llama.cpp @@ -195,6 +195,7 @@ enum llm_arch { LLM_ARCH_BLOOM, LLM_ARCH_STABLELM, LLM_ARCH_QWEN, + LLM_ARCH_PHI2, LLM_ARCH_UNKNOWN, }; @@ -212,6 +213,7 @@ static std::map LLM_ARCH_NAMES = { { LLM_ARCH_BLOOM, "bloom" }, { LLM_ARCH_STABLELM, "stablelm" }, { LLM_ARCH_QWEN, "qwen" }, + { LLM_ARCH_PHI2, "phi2" }, }; enum llm_kv { @@ -550,6 +552,19 @@ static std::map> LLM_TENSOR_NAMES = { LLM_TENSOR_FFN_UP, "blk.%d.ffn_up" }, }, }, + { + LLM_ARCH_PHI2, + { + { LLM_TENSOR_TOKEN_EMBD, "token_embd" }, + { LLM_TENSOR_OUTPUT_NORM, "output_norm" }, + { LLM_TENSOR_OUTPUT, "output" }, + { LLM_TENSOR_ATTN_NORM, "blk.%d.attn_norm" }, + { LLM_TENSOR_ATTN_QKV, "blk.%d.attn_qkv" }, + { LLM_TENSOR_ATTN_OUT, "blk.%d.attn_output" }, + { LLM_TENSOR_FFN_DOWN, "blk.%d.ffn_down" }, + { LLM_TENSOR_FFN_UP, "blk.%d.ffn_up" }, + }, + }, { LLM_ARCH_UNKNOWN, @@ -1420,6 +1435,7 @@ struct llama_model { struct ggml_tensor * output_norm; struct ggml_tensor * output_norm_b; struct ggml_tensor * output; + struct ggml_tensor * output_b; std::vector layers; @@ -2635,6 +2651,15 @@ static void llm_load_hparams( default: model.type = e_model::MODEL_UNKNOWN; } } break; + case LLM_ARCH_PHI2: + { + ml.get_key(LLM_KV_ATTENTION_LAYERNORM_EPS, hparams.f_norm_eps); + + switch (hparams.n_layer) { + case 32: model.type = e_model::MODEL_3B; break; + default: model.type = e_model::MODEL_UNKNOWN; + } + } break; default: (void)0; } @@ -2987,7 +3012,7 @@ static void llm_load_tensors( (void) main_gpu; - enum ggml_backend_type llama_backend_offload = GGML_BACKEND_CPU; + enum ggml_backend_type llama_backend_offload = GGML_BACKEND_CPU; enum ggml_backend_type llama_backend_offload_split = GGML_BACKEND_CPU; #ifdef GGML_USE_CUBLAS @@ -3630,7 +3655,73 @@ static void llm_load_tensors( } } } break; + case LLM_ARCH_PHI2: + { + model.tok_embd = ml.create_tensor(ctx, tn(LLM_TENSOR_TOKEN_EMBD, "weight"), {n_embd, n_vocab}, GGML_BACKEND_CPU); + // output + { + ggml_backend_type backend_norm; + ggml_backend_type backend_output; + + if (n_gpu_layers > int(n_layer)) { + backend_norm = llama_backend_offload; + backend_output = llama_backend_offload; + } else { + backend_norm = GGML_BACKEND_CPU; + backend_output = GGML_BACKEND_CPU; + } + + model.output_norm = ml.create_tensor(ctx, tn(LLM_TENSOR_OUTPUT_NORM, "weight"), {n_embd}, backend_norm); + model.output_norm_b = ml.create_tensor(ctx, tn(LLM_TENSOR_OUTPUT_NORM, "bias"), {n_embd}, backend_norm); + model.output = ml.create_tensor(ctx, tn(LLM_TENSOR_OUTPUT, "weight"), {n_embd, n_vocab}, backend_output); + model.output_b = ml.create_tensor(ctx, tn(LLM_TENSOR_OUTPUT, "bias"), {n_vocab}, backend_output); + + if (backend_norm == GGML_BACKEND_GPU) { + vram_weights += ggml_nbytes(model.output_norm); + vram_weights += ggml_nbytes(model.output_norm_b); + vram_weights += ggml_nbytes(model.output); + vram_weights += ggml_nbytes(model.output_b); + } + } + + const uint32_t n_ff = hparams.n_ff; + + const int i_gpu_start = n_layer - n_gpu_layers; + + model.layers.resize(n_layer); + + for (uint32_t i = 0; i < n_layer; ++i) { + const ggml_backend_type backend = int(i) < i_gpu_start ? GGML_BACKEND_CPU : llama_backend_offload; // NOLINT + const ggml_backend_type backend_split = int(i) < i_gpu_start ? GGML_BACKEND_CPU : llama_backend_offload_split; // NOLINT + + auto & layer = model.layers[i]; + + layer.attn_norm = ml.create_tensor(ctx, tn(LLM_TENSOR_ATTN_NORM, "weight", i), {n_embd}, backend); + layer.attn_norm_b = ml.create_tensor(ctx, tn(LLM_TENSOR_ATTN_NORM, "bias", i), {n_embd}, backend); + + layer.wqkv = ml.create_tensor(ctx, tn(LLM_TENSOR_ATTN_QKV, "weight", i), {n_embd, n_embd + 2*n_embd_gqa}, backend_split); + layer.bqkv = ml.create_tensor(ctx, tn(LLM_TENSOR_ATTN_QKV, "bias", i), {n_embd + 2*n_embd_gqa}, backend); + + layer.wo = ml.create_tensor(ctx, tn(LLM_TENSOR_ATTN_OUT, "weight", i), {n_embd, n_embd}, backend_split); + layer.bo = ml.create_tensor(ctx, tn(LLM_TENSOR_ATTN_OUT, "bias", i), {n_embd}, backend); + + layer.ffn_down = ml.create_tensor(ctx, tn(LLM_TENSOR_FFN_DOWN, "weight", i), {n_ff, n_embd}, backend_split); + layer.ffn_down_b = ml.create_tensor(ctx, tn(LLM_TENSOR_FFN_DOWN, "bias", i), {n_embd}, backend); + + layer.ffn_up = ml.create_tensor(ctx, tn(LLM_TENSOR_FFN_UP, "weight", i), {n_embd, n_ff}, backend_split); + layer.ffn_up_b = ml.create_tensor(ctx, tn(LLM_TENSOR_FFN_UP, "bias", i), {n_ff}, backend); + + if (backend == GGML_BACKEND_GPU) { + vram_weights += + ggml_nbytes(layer.attn_norm) + ggml_nbytes(layer.attn_norm_b) + + ggml_nbytes(layer.wqkv) + ggml_nbytes(layer.bqkv) + + ggml_nbytes(layer.wo) + ggml_nbytes(layer.bo) + + ggml_nbytes(layer.ffn_up) + ggml_nbytes(layer.ffn_up_b) + + ggml_nbytes(layer.ffn_down) + ggml_nbytes(layer.ffn_down_b); + } + } + } break; default: throw std::runtime_error("unknown architecture"); } @@ -3991,6 +4082,7 @@ static struct ggml_tensor * llm_build_ffn( // if max_alibi_bias > 0 then apply ALiBi static struct ggml_tensor * llm_build_kqv( struct ggml_context * ctx, + const llama_model & model, const llama_hparams & hparams, const llama_kv_cache & kv, struct ggml_tensor * wo, @@ -4002,6 +4094,7 @@ static struct ggml_tensor * llm_build_kqv( int32_t n_tokens, int32_t n_kv, float max_alibi_bias, + float scale, const llm_build_cb & cb, int il) { const int64_t n_embd = hparams.n_embd; @@ -4024,6 +4117,12 @@ static struct ggml_tensor * llm_build_kqv( struct ggml_tensor * kq = ggml_mul_mat(ctx, k, q); cb(kq, "kq", il); + if (model.arch == LLM_ARCH_PHI2) { + // for this arch, we need to perform the KQ multiplication with F32 precision, otherwise we get NaNs + // ref: https://github.com/ggerganov/llama.cpp/pull/4490#issuecomment-1859055847 + ggml_mul_mat_set_prec(kq, GGML_PREC_F32); + } + if (max_alibi_bias > 0.0f) { // temporary branch until we figure out how to handle ggml_alibi through ggml_add kq = ggml_scale(ctx, kq, kq_scale); @@ -4043,7 +4142,7 @@ static struct ggml_tensor * llm_build_kqv( kq = ggml_soft_max(ctx, kq); cb(kq, "kq_soft_max", il); } else { - kq = ggml_soft_max_ext(ctx, kq, kq_mask, 1.0f/sqrtf(float(n_embd_head))); + kq = ggml_soft_max_ext(ctx, kq, kq_mask, scale); cb(kq, "kq_soft_max_ext", il); } @@ -4250,9 +4349,9 @@ struct llm_build_context { llm_build_kv_store(ctx0, hparams, kv_self, gf, Kcur, Vcur, n_ctx, n_tokens, kv_head, cb, il); - cur = llm_build_kqv(ctx0, hparams, kv_self, + cur = llm_build_kqv(ctx0, model, hparams, kv_self, model.layers[il].wo, model.layers[il].bo, - Qcur, KQ_scale, KQ_mask, n_ctx, n_tokens, n_kv, -1.0f, cb, il); + Qcur, KQ_scale, KQ_mask, n_ctx, n_tokens, n_kv, -1.0f, 1.0f/sqrtf(float(n_embd_head)), cb, il); cb(cur, "kqv_out", il); } @@ -4433,9 +4532,9 @@ struct llm_build_context { // apply ALiBi for 13B model const float max_alibi_bias = model.type == MODEL_13B ? 8.0f : -1.0f; - cur = llm_build_kqv(ctx0, hparams, kv_self, + cur = llm_build_kqv(ctx0, model, hparams, kv_self, model.layers[il].wo, NULL, - Qcur, KQ_scale, KQ_mask, n_ctx, n_tokens, n_kv, max_alibi_bias, cb, il); + Qcur, KQ_scale, KQ_mask, n_ctx, n_tokens, n_kv, max_alibi_bias, 1.0f/sqrtf(float(n_embd_head)), cb, il); cb(cur, "kqv_out", il); } @@ -4557,9 +4656,9 @@ struct llm_build_context { llm_build_kv_store(ctx0, hparams, kv_self, gf, Kcur, Vcur, n_ctx, n_tokens, kv_head, cb, il); - cur = llm_build_kqv(ctx0, hparams, kv_self, + cur = llm_build_kqv(ctx0, model, hparams, kv_self, model.layers[il].wo, NULL, - Qcur, KQ_scale, KQ_mask, n_ctx, n_tokens, n_kv, -1.0f, cb, il); + Qcur, KQ_scale, KQ_mask, n_ctx, n_tokens, n_kv, -1.0f, 1.0f/sqrtf(float(n_embd_head)), cb, il); cb(cur, "kqv_out", il); } @@ -4657,9 +4756,9 @@ struct llm_build_context { llm_build_kv_store(ctx0, hparams, kv_self, gf, Kcur, Vcur, n_ctx, n_tokens, kv_head, cb, il); - cur = llm_build_kqv(ctx0, hparams, kv_self, + cur = llm_build_kqv(ctx0, model, hparams, kv_self, model.layers[il].wo, model.layers[il].bo, - Qcur, KQ_scale, KQ_mask, n_ctx, n_tokens, n_kv, -1.0f, cb, il); + Qcur, KQ_scale, KQ_mask, n_ctx, n_tokens, n_kv, -1.0f, 1.0f/sqrtf(float(n_embd_head)), cb, il); cb(cur, "kqv_out", il); } @@ -4866,9 +4965,9 @@ struct llm_build_context { llm_build_kv_store(ctx0, hparams, kv_self, gf, Kcur, Vcur, n_ctx, n_tokens, kv_head, cb, il); // TODO: not tested, could be broken - cur = llm_build_kqv(ctx0, hparams, kv_self, + cur = llm_build_kqv(ctx0, model, hparams, kv_self, model.layers[il].wo, model.layers[il].bo, - Q, KQ_scale, KQ_mask, n_ctx, n_tokens, n_kv, -1.0f, cb, il); + Q, KQ_scale, KQ_mask, n_ctx, n_tokens, n_kv, -1.0f, 1.0f/sqrtf(float(n_embd_head)), cb, il); cb(cur, "kqv_out", il); } @@ -4957,9 +5056,9 @@ struct llm_build_context { llm_build_kv_store(ctx0, hparams, kv_self, gf, Kcur, Vcur, n_ctx, n_tokens, kv_head, cb, il); - cur = llm_build_kqv(ctx0, hparams, kv_self, + cur = llm_build_kqv(ctx0, model, hparams, kv_self, model.layers[il].wo, NULL, - Qcur, KQ_scale, KQ_mask, n_ctx, n_tokens, n_kv, 8.0f, cb, il); + Qcur, KQ_scale, KQ_mask, n_ctx, n_tokens, n_kv, 8.0f, 1.0f/sqrtf(float(n_embd_head)), cb, il); cb(cur, "kqv_out", il); } @@ -5054,9 +5153,9 @@ struct llm_build_context { llm_build_kv_store(ctx0, hparams, kv_self, gf, Kcur, Vcur, n_ctx, n_tokens, kv_head, cb, il); - cur = llm_build_kqv(ctx0, hparams, kv_self, + cur = llm_build_kqv(ctx0, model, hparams, kv_self, model.layers[il].wo, model.layers[il].bo, - Qcur, KQ_scale, KQ_mask, n_ctx, n_tokens, n_kv, 8.0f, cb, il); + Qcur, KQ_scale, KQ_mask, n_ctx, n_tokens, n_kv, 8.0f, 1.0f/sqrtf(float(n_embd_head)), cb, il); cb(cur, "kqv_out", il); } @@ -5148,9 +5247,9 @@ struct llm_build_context { llm_build_kv_store(ctx0, hparams, kv_self, gf, Kcur, Vcur, n_ctx, n_tokens, kv_head, cb, il); - cur = llm_build_kqv(ctx0, hparams, kv_self, + cur = llm_build_kqv(ctx0, model, hparams, kv_self, model.layers[il].wo, NULL, - Qcur, KQ_scale, KQ_mask, n_ctx, n_tokens, n_kv, hparams.f_max_alibi_bias, cb, il); + Qcur, KQ_scale, KQ_mask, n_ctx, n_tokens, n_kv, hparams.f_max_alibi_bias, 1.0f/sqrtf(float(n_embd_head)), cb, il); cb(cur, "kqv_out", il); } @@ -5261,9 +5360,9 @@ struct llm_build_context { llm_build_kv_store(ctx0, hparams, kv_self, gf, Kcur, Vcur, n_ctx, n_tokens, kv_head, cb, il); - cur = llm_build_kqv(ctx0, hparams, kv_self, + cur = llm_build_kqv(ctx0, model, hparams, kv_self, model.layers[il].wo, NULL, - Qcur, KQ_scale, KQ_mask, n_ctx, n_tokens, n_kv, -1.0f, cb, il); + Qcur, KQ_scale, KQ_mask, n_ctx, n_tokens, n_kv, -1.0f, 1.0f/sqrtf(float(n_embd_head)), cb, il); cb(cur, "kqv_out", il); } @@ -5320,15 +5419,15 @@ struct llm_build_context { cb(inpL, "inp_embd", -1); // inp_pos - contains the positions - struct ggml_tensor * inp_pos= ggml_new_tensor_1d(ctx0, GGML_TYPE_I32, n_tokens); + struct ggml_tensor * inp_pos = ggml_new_tensor_1d(ctx0, GGML_TYPE_I32, n_tokens); cb(inp_pos, "inp_pos", -1); // KQ_scale - struct ggml_tensor * KQ_scale= ggml_new_tensor_1d(ctx0, GGML_TYPE_F32, 1); + struct ggml_tensor * KQ_scale = ggml_new_tensor_1d(ctx0, GGML_TYPE_F32, 1); cb(KQ_scale, "KQ_scale", -1); // KQ_mask (mask for 1 head, it will be broadcasted to all heads) - struct ggml_tensor * KQ_mask= ggml_new_tensor_3d(ctx0, GGML_TYPE_F32, n_kv, n_tokens, 1); + struct ggml_tensor * KQ_mask = ggml_new_tensor_3d(ctx0, GGML_TYPE_F32, n_kv, n_tokens, 1); cb(KQ_mask, "KQ_mask", -1); // shift the entire K-cache if needed @@ -5378,9 +5477,9 @@ struct llm_build_context { llm_build_kv_store(ctx0, hparams, kv_self, gf, Kcur, Vcur, n_ctx, n_tokens, kv_head, cb, il); - cur = llm_build_kqv(ctx0, hparams, kv_self, + cur = llm_build_kqv(ctx0, model, hparams, kv_self, model.layers[il].wo, NULL, - Qcur, KQ_scale, KQ_mask, n_ctx, n_tokens, n_kv, -1.0f, cb, il); + Qcur, KQ_scale, KQ_mask, n_ctx, n_tokens, n_kv, -1.0f, 1.0f/sqrtf(float(n_embd_head)), cb, il); cb(cur, "kqv_out", il); } @@ -5422,6 +5521,122 @@ struct llm_build_context { ggml_build_forward_expand(gf, cur); + return gf; + } + struct ggml_cgraph * build_phi2() { + struct ggml_cgraph * gf = ggml_new_graph_custom(ctx0, LLAMA_MAX_NODES, false); + + struct ggml_tensor * cur; + struct ggml_tensor * attn_norm_output; + struct ggml_tensor * ffn_output; + struct ggml_tensor * inpL; + + inpL = llm_build_inp_embd(ctx0, hparams, batch, model.tok_embd, cb); + cb(inpL, "inp_embd", -1); + + // inp_pos - contains the positions + struct ggml_tensor * inp_pos = ggml_new_tensor_1d(ctx0, GGML_TYPE_I32, n_tokens); + cb(inp_pos, "inp_pos", -1); + + // Q_scale + struct ggml_tensor * Q_scale = ggml_new_tensor_1d(ctx0, GGML_TYPE_F32, 1); + cb(Q_scale, "Q_scale", -1); + + // KQ_scale + struct ggml_tensor * KQ_scale = ggml_new_tensor_1d(ctx0, GGML_TYPE_F32, 1); + cb(KQ_scale, "KQ_scale", -1); + + // KQ_mask (mask for 1 head, it will be broadcasted to all heads) + struct ggml_tensor * KQ_mask = ggml_new_tensor_3d(ctx0, GGML_TYPE_F32, n_kv, n_tokens, 1); + cb(KQ_mask, "KQ_mask", -1); + + // shift the entire K-cache if needed + if (do_rope_shift) { + llm_build_k_shift(ctx0, hparams, cparams, kv_self, gf, LLM_ROPE_NEOX, n_ctx, n_embd_head, freq_base, freq_scale, cb); + } + + for (int il = 0; il < n_layer; ++il) { + attn_norm_output = llm_build_norm(ctx0, inpL, hparams, + model.layers[il].attn_norm, + model.layers[il].attn_norm_b, + LLM_NORM, cb, il); + cb(attn_norm_output, "attn_norm", il); + + // self-attention + { + cur = ggml_mul_mat(ctx0, model.layers[il].wqkv, attn_norm_output); + cb(cur, "wqkv", il); + + cur = ggml_add(ctx0, cur, model.layers[il].bqkv); + cb(cur, "bqkv", il); + + struct ggml_tensor * Qcur = ggml_cont(ctx0, ggml_view_2d(ctx0, cur, n_embd, n_tokens, cur->nb[1], 0*sizeof(float)*(n_embd))); + struct ggml_tensor * Kcur = ggml_cont(ctx0, ggml_view_2d(ctx0, cur, n_embd_gqa, n_tokens, cur->nb[1], 1*sizeof(float)*(n_embd))); + struct ggml_tensor * Vcur = ggml_cont(ctx0, ggml_view_2d(ctx0, cur, n_embd_gqa, n_tokens, cur->nb[1], 1*sizeof(float)*(n_embd + n_embd_gqa))); + + cb(Qcur, "Qcur", il); + cb(Kcur, "Kcur", il); + cb(Vcur, "Vcur", il); + + Qcur = ggml_reshape_3d(ctx0, Qcur, n_embd_head, n_head, n_tokens); + Kcur = ggml_reshape_3d(ctx0, Kcur, n_embd_head, n_head_kv, n_tokens); + + Qcur = ggml_rope_custom( + ctx0, Qcur, inp_pos, hparams.n_rot, 2, 0, n_orig_ctx, + freq_base, freq_scale, ext_factor, attn_factor, beta_fast, beta_slow + ); + cb(Qcur, "Qcur", il); + + Qcur = ggml_scale(ctx0, Qcur, Q_scale); + cb(Qcur, "Qcur", il); + + Kcur = ggml_rope_custom( + ctx0, Kcur, inp_pos, hparams.n_rot, 2, 0, n_orig_ctx, + freq_base, freq_scale, ext_factor, attn_factor, beta_fast, beta_slow + ); + cb(Kcur, "Kcur", il); + + llm_build_kv_store(ctx0, hparams, kv_self, gf, Kcur, Vcur, n_ctx, n_tokens, kv_head, cb, il); + + cur = llm_build_kqv(ctx0, model, hparams, kv_self, + model.layers[il].wo, model.layers[il].bo, + Qcur, KQ_scale, KQ_mask, n_ctx, n_tokens, n_kv, -1.0f, 1.0f, cb, il); + cb(cur, "kqv_out", il); + } + + // FF + { + ffn_output = llm_build_ffn(ctx0, attn_norm_output, + model.layers[il].ffn_up, model.layers[il].ffn_up_b, + NULL, NULL, + model.layers[il].ffn_down, model.layers[il].ffn_down_b, + LLM_FFN_GELU, LLM_FFN_SEQ, cb, il); + cb(ffn_output, "ffn_out", il); + } + + cur = ggml_add(ctx0, cur, ffn_output); + cb(cur, "l_out", il); + + cur = ggml_add(ctx0, cur, inpL); + cb(cur, "l_out", il); + + inpL = cur; + } + + cur = llm_build_norm(ctx0, inpL, hparams, + model.output_norm, + model.output_norm_b, + LLM_NORM, cb, -1); + cb(cur, "result_norm", -1); + + cur = ggml_mul_mat(ctx0, model.output, cur); + cb(cur, "result_output_no_bias", -1); + + cur = ggml_add(ctx0, cur, model.output_b); + cb(cur, "result_output", -1); + + ggml_build_forward_expand(gf, cur); + return gf; } }; @@ -5437,7 +5652,7 @@ enum llm_offload_func_e { OFFLOAD_FUNC_FRC, // force offload OFFLOAD_FUNC_KQV, OFFLOAD_FUNC_NR, - OFFLOAD_FUNC_EMB, + OFFLOAD_FUNC_EMB, // embeddings OFFLOAD_FUNC_OUT, }; @@ -5522,6 +5737,7 @@ static const std::unordered_map k_offload_map { "pos_embd", OFFLOAD_FUNC_NR }, { "inp_pos", OFFLOAD_FUNC_FRC }, // this is often used for KQ ops (e.g. rope) + { "Q_scale", OFFLOAD_FUNC_FRC }, { "KQ_scale", OFFLOAD_FUNC_FRC }, { "KQ_mask", OFFLOAD_FUNC_FRC }, { "K_shift", OFFLOAD_FUNC_FRC }, @@ -5606,6 +5822,7 @@ static const std::unordered_map k_offload_map { "l_out", OFFLOAD_FUNC }, { "result_norm", OFFLOAD_FUNC_EMB }, + { "result_output_no_bias", OFFLOAD_FUNC_EMB }, { "result_output", OFFLOAD_FUNC_OUT }, }; @@ -5623,6 +5840,7 @@ static struct ggml_cgraph * llama_build_graph( bool alloc_inp_tokens = false; bool alloc_inp_embd = false; bool alloc_inp_pos = false; + bool alloc_inp_Q_scale = false; bool alloc_inp_KQ_scale = false; bool alloc_inp_KQ_mask = false; bool alloc_inp_K_shift = false; @@ -5690,7 +5908,7 @@ static struct ggml_cgraph * llama_build_graph( alloc_inp_pos = true; } - if (!alloc_inp_KQ_scale && strcmp(name, "KQ_scale") == 0) { + if (!alloc_inp_Q_scale && strcmp(name, "Q_scale") == 0) { ggml_allocr_alloc(lctx.alloc, cur); if (!ggml_allocr_is_measure(lctx.alloc)) { @@ -5698,6 +5916,23 @@ static struct ggml_cgraph * llama_build_graph( ggml_set_f32(cur, 1.0f/sqrtf(float(n_embd_head))); } + alloc_inp_Q_scale = true; + } + + if (!alloc_inp_KQ_scale && strcmp(name, "KQ_scale") == 0) { + ggml_allocr_alloc(lctx.alloc, cur); + + if (!ggml_allocr_is_measure(lctx.alloc)) { + const int64_t n_embd_head = model.hparams.n_embd_head(); + if (model.arch == LLM_ARCH_PHI2) { + // with phi2, we scale the Q to avoid precision issues + // ref: https://github.com/ml-explore/mlx-examples/blob/08e862336ade809bc37d1035f94b359e7d1a5152/phi2/phi2.py#L64-L66 + ggml_set_f32(cur, 1.0f); + } else { + ggml_set_f32(cur, 1.0f/sqrtf(float(n_embd_head))); + } + } + alloc_inp_KQ_scale = true; } @@ -5922,6 +6157,10 @@ static struct ggml_cgraph * llama_build_graph( { result = llm.build_qwen(); } break; + case LLM_ARCH_PHI2: + { + result = llm.build_phi2(); + } break; default: GGML_ASSERT(false); } @@ -6055,12 +6294,16 @@ static int llama_decode_internal( ggml_allocr_alloc_graph(lctx.alloc, gf); - struct ggml_tensor * res = gf->nodes[gf->n_nodes - 1]; + // the output is always the last tensor in the graph + struct ggml_tensor * res = gf->nodes[gf->n_nodes - 1]; + GGML_ASSERT(strcmp(res->name, "result_output") == 0); + + // the embeddings could be the second to last tensor, or the third to last tensor struct ggml_tensor * embeddings = gf->nodes[gf->n_nodes - 2]; - - GGML_ASSERT(strcmp(res->name, "result_output") == 0); - GGML_ASSERT(strcmp(embeddings->name, "result_norm") == 0); - + if (strcmp(embeddings->name, "result_norm") != 0) { + embeddings = gf->nodes[gf->n_nodes - 3]; + GGML_ASSERT(strcmp(embeddings->name, "result_norm") == 0); + } #ifdef GGML_USE_CUBLAS for (int i = 0; i < gf->n_leafs; i++) { diff --git a/tests/test-backend-ops.cpp b/tests/test-backend-ops.cpp index df2c3fb6e..f04b9438a 100644 --- a/tests/test-backend-ops.cpp +++ b/tests/test-backend-ops.cpp @@ -1555,6 +1555,7 @@ static bool test_backend(ggml_backend_t backend, test_mode mode, const char * op test_cases.emplace_back(new test_rope(type, { 64, 8, 10, 1}, 64, 2, 512)); // neox (falcon 40B) test_cases.emplace_back(new test_rope(type, { 64, 128, 10, 1}, 64, 2, 512)); // neox (falcon 40B) test_cases.emplace_back(new test_rope(type, { 80, 32, 10, 1}, 20, 2, 512)); // neox (stablelm) + test_cases.emplace_back(new test_rope(type, { 80, 32, 10, 1}, 32, 2, 512)); // neox (phi-2) } test_cases.emplace_back(new test_alibi()); From 6ff39b129d0281d045f83d515e51b7197b44b253 Mon Sep 17 00:00:00 2001 From: Georgi Gerganov Date: Mon, 18 Dec 2023 20:05:12 +0200 Subject: [PATCH 104/811] llama.swiftui : add more models --- .../llama.cpp.swift/LibLlama.swift | 2 +- .../llama.swiftui/UI/ContentView.swift | 31 +++++++++++++++++-- 2 files changed, 30 insertions(+), 3 deletions(-) diff --git a/examples/llama.swiftui/llama.cpp.swift/LibLlama.swift b/examples/llama.swiftui/llama.cpp.swift/LibLlama.swift index 272e1fd8a..464fb3277 100644 --- a/examples/llama.swiftui/llama.cpp.swift/LibLlama.swift +++ b/examples/llama.swiftui/llama.cpp.swift/LibLlama.swift @@ -203,7 +203,7 @@ actor LlamaContext { var pp_std: Double = 0 var tg_std: Double = 0 - for r in 0.. Date: Mon, 18 Dec 2023 20:17:43 +0200 Subject: [PATCH 105/811] llama.swiftui : add tinyllama 1.1B F16 --- .../llama.swiftui/llama.swiftui/UI/ContentView.swift | 12 ++++++++++-- 1 file changed, 10 insertions(+), 2 deletions(-) diff --git a/examples/llama.swiftui/llama.swiftui/UI/ContentView.swift b/examples/llama.swiftui/llama.swiftui/UI/ContentView.swift index 9cbe8efd6..c78f107b3 100644 --- a/examples/llama.swiftui/llama.swiftui/UI/ContentView.swift +++ b/examples/llama.swiftui/llama.swiftui/UI/ContentView.swift @@ -91,6 +91,15 @@ struct ContentView: View { ) .font(.system(size: 12)) + DownloadButton( + llamaState: llamaState, + modelName: "TinyLlama-1.1B (F16, 2.2 GiB)", + modelUrl: "https://huggingface.co/ggml-org/models/resolve/main/tinyllama-1.1b/ggml-model-f16.gguf?download=true", + filename: "tinyllama-1.1b-f16.gguf" + ) + .font(.system(size: 12)) + .frame(maxWidth: .infinity, alignment: .leading) + DownloadButton( llamaState: llamaState, modelName: "Phi-2.7B (Q4_0, 1.6 GiB)", @@ -98,7 +107,6 @@ struct ContentView: View { filename: "phi-2-q4_0.gguf" ) .font(.system(size: 12)) - .frame(maxWidth: .infinity, alignment: .leading) DownloadButton( llamaState: llamaState, @@ -107,6 +115,7 @@ struct ContentView: View { filename: "phi-2-q8_0.gguf" ) .font(.system(size: 12)) + .frame(maxWidth: .infinity, alignment: .leading) DownloadButton( llamaState: llamaState, @@ -115,7 +124,6 @@ struct ContentView: View { filename: "mistral-7b-v0.1.Q4_0.gguf" ) .font(.system(size: 12)) - .frame(maxWidth: .infinity, alignment: .leading) Button("Clear downloaded models") { ContentView.cleanupModelCaches() From a7aee47b98e45539d491071b25778b833b77e387 Mon Sep 17 00:00:00 2001 From: arlo-phoenix <140345165+arlo-phoenix@users.noreply.github.com> Date: Mon, 18 Dec 2023 22:33:45 +0100 Subject: [PATCH 106/811] ggml-cuda: Fix HIP build (#4528) regression of #4490 Adds defines for two new datatypes cublasComputeType_t, cudaDataType_t. Currently using deprecated hipblasDatatype_t since newer ones very recent. --- ggml-cuda.cu | 2 ++ 1 file changed, 2 insertions(+) diff --git a/ggml-cuda.cu b/ggml-cuda.cu index d0f3d8034..f20846fef 100644 --- a/ggml-cuda.cu +++ b/ggml-cuda.cu @@ -31,6 +31,7 @@ #define CUDA_R_16F HIPBLAS_R_16F #define CUDA_R_32F HIPBLAS_R_32F #define __shfl_xor_sync(mask, var, laneMask, width) __shfl_xor(var, laneMask, width) +#define cublasComputeType_t hipblasDatatype_t //deprecated, new hipblasComputeType_t not in 5.6 #define cublasCreate hipblasCreate #define cublasGemmEx hipblasGemmEx #define cublasGemmBatchedEx hipblasGemmBatchedEx @@ -40,6 +41,7 @@ #define cublasSetStream hipblasSetStream #define cublasSgemm hipblasSgemm #define cublasStatus_t hipblasStatus_t +#define cudaDataType_t hipblasDatatype_t //deprecated, new hipblasDatatype not in 5.6 #define cudaDeviceCanAccessPeer hipDeviceCanAccessPeer #define cudaDeviceDisablePeerAccess hipDeviceDisablePeerAccess #define cudaDeviceEnablePeerAccess hipDeviceEnablePeerAccess From 328b83de23b33240e28f4e74900d1d06726f5eb1 Mon Sep 17 00:00:00 2001 From: Eric Sommerlade Date: Tue, 19 Dec 2023 16:17:01 +0000 Subject: [PATCH 107/811] ggml : fixed check for _MSC_VER (#4535) Co-authored-by: Eric Sommerlade --- ggml.h | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/ggml.h b/ggml.h index f1003984f..beacdc8be 100644 --- a/ggml.h +++ b/ggml.h @@ -303,7 +303,7 @@ extern "C" { #if defined(__ARM_NEON) && defined(__CUDACC__) typedef half ggml_fp16_t; -#elif defined(__ARM_NEON) +#elif defined(__ARM_NEON) && !defined(_MSC_VER) typedef __fp16 ggml_fp16_t; #else typedef uint16_t ggml_fp16_t; From 799fc2268989482054944c902874cca76337580f Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Johannes=20G=C3=A4=C3=9Fler?= Date: Wed, 20 Dec 2023 15:41:22 +0100 Subject: [PATCH 108/811] CUDA: Faster Mixtral prompt processing (#4538) * CUDA: make MoE tensors contiguous for batch size>1 * Update ggml-cuda.cu Co-authored-by: slaren --------- Co-authored-by: slaren --- ggml-cuda.cu | 118 ++++++++++++++++++++++++++++++++++++++++----------- 1 file changed, 93 insertions(+), 25 deletions(-) diff --git a/ggml-cuda.cu b/ggml-cuda.cu index f20846fef..9f4b188cb 100644 --- a/ggml-cuda.cu +++ b/ggml-cuda.cu @@ -7830,6 +7830,11 @@ static void ggml_cuda_set_peer_access(const int n_tokens) { } #ifdef NDEBUG + for (int id = 0; id < g_device_count; ++id) { + CUDA_CHECK(ggml_cuda_set_device(id)); + CUDA_CHECK(cudaDeviceSynchronize()); + } + for (int id = 0; id < g_device_count; ++id) { CUDA_CHECK(ggml_cuda_set_device(id)); @@ -7881,8 +7886,6 @@ static void ggml_cuda_op_mul_mat( const int nb2 = dst->nb[2]; const int nb3 = dst->nb[3]; - ggml_cuda_set_peer_access(ne11); - GGML_ASSERT(dst->backend != GGML_BACKEND_GPU_SPLIT); GGML_ASSERT(src1->backend != GGML_BACKEND_GPU_SPLIT); @@ -8781,16 +8784,21 @@ static void ggml_cuda_mul_mat_id(const ggml_tensor * src0, const ggml_tensor * s GGML_ASSERT(dst->backend == GGML_BACKEND_GPU); + const int64_t nb11 = src1->nb[1]; + const int64_t nb1 = dst->nb[1]; + const struct ggml_tensor * ids = src0; const int32_t id = ((int32_t *) dst->op_params)[0]; const int32_t n_as = ((int32_t *) dst->op_params)[1]; std::vector ids_host(ggml_nbytes(ids)); + const cudaStream_t stream = g_cudaStreams[g_main_device][0]; + if (ids->backend == GGML_BACKEND_GPU) { const char * ids_dev = (const char *)((const ggml_tensor_extra_gpu *)ids->extra)->data_device[g_main_device]; - CUDA_CHECK(cudaMemcpyAsync(ids_host.data(), ids_dev, ggml_nbytes(ids), cudaMemcpyDeviceToHost, g_cudaStreams[g_main_device][0])); - CUDA_CHECK(cudaStreamSynchronize(g_cudaStreams[g_main_device][0])); + CUDA_CHECK(cudaMemcpyAsync(ids_host.data(), ids_dev, ggml_nbytes(ids), cudaMemcpyDeviceToHost, stream)); + CUDA_CHECK(cudaStreamSynchronize(stream)); } else { memcpy(ids_host.data(), ids->data, ggml_nbytes(ids)); } @@ -8804,37 +8812,93 @@ static void ggml_cuda_mul_mat_id(const ggml_tensor * src0, const ggml_tensor * s ggml_tensor src1_row = *src1; ggml_tensor dst_row = *dst; - src1_row.ne[1] = 1; - dst_row.ne[1] = 1; - - src1_row.nb[2] = src1_row.nb[1]; - dst_row.nb[2] = dst_row.nb[1]; - - src1_row.nb[3] = src1_row.nb[1]; - dst_row.nb[3] = dst_row.nb[1]; - src1_row.extra = &src1_row_extra; dst_row.extra = &dst_row_extra; + char * src1_original = (char *) src1_extra->data_device[g_main_device]; + char * dst_original = (char *) dst_extra->data_device[g_main_device]; - for (int64_t i01 = 0; i01 < ids->ne[1]; i01++) { - //int32_t row_id; - //CUDA_CHECK(cudaMemcpyAsync(&row_id, ids_dev + i01*ids->nb[1] + id*ids->nb[0], sizeof(int32_t), cudaMemcpyDeviceToHost, g_cudaStreams[g_main_device][0])); - //CUDA_CHECK(cudaStreamSynchronize(g_cudaStreams[g_main_device][0])); + if (src1->ne[1] == 1) { + for (int64_t i01 = 0; i01 < ids->ne[1]; i01++) { + //int32_t row_id; + //CUDA_CHECK(cudaMemcpyAsync(&row_id, ids_dev + i01*ids->nb[1] + id*ids->nb[0], sizeof(int32_t), cudaMemcpyDeviceToHost, g_cudaStreams[g_main_device][0])); + //CUDA_CHECK(cudaStreamSynchronize(g_cudaStreams[g_main_device][0])); - const int32_t row_id = *(const int32_t *) (ids_host.data() + i01*ids->nb[1] + id*ids->nb[0]); + const int32_t row_id = *(const int32_t *) (ids_host.data() + i01*ids->nb[1] + id*ids->nb[0]); - GGML_ASSERT(row_id >= 0 && row_id < n_as); + GGML_ASSERT(row_id >= 0 && row_id < n_as); - const struct ggml_tensor * src0_row = dst->src[row_id + 2]; + const struct ggml_tensor * src0_row = dst->src[row_id + 2]; - src1_row_extra.data_device[g_main_device] = (char *) src1_extra->data_device[g_main_device] + i01*src1->nb[1]; - src1_row.data = (char *) src1->data + i01*src1->nb[1]; + src1_row_extra.data_device[g_main_device] = src1_original + i01*src1->nb[1]; + src1_row.data = (char *) src1->data + i01*src1->nb[1]; // TODO why is this set? - dst_row_extra.data_device[g_main_device] = (char *) dst_extra->data_device[g_main_device] + i01*dst->nb[1]; - dst_row.data = (char *) dst->data + i01*dst->nb[1]; + dst_row_extra.data_device[g_main_device] = dst_original + i01*dst->nb[1]; + dst_row.data = (char *) dst->data + i01*dst->nb[1]; // TODO why is this set? - ggml_cuda_mul_mat(src0_row, &src1_row, &dst_row); + ggml_cuda_mul_mat(src0_row, &src1_row, &dst_row); + } + } else { + size_t as_src1, as_dst; + char * src1_contiguous = (char *) ggml_cuda_pool_malloc(sizeof(float)*ggml_nelements(src1), &as_src1); + char * dst_contiguous = (char *) ggml_cuda_pool_malloc(sizeof(float)*ggml_nelements(dst), &as_dst); + + src1_row_extra.data_device[g_main_device] = src1_contiguous; + dst_row_extra.data_device[g_main_device] = dst_contiguous; + + for (int32_t row_id = 0; row_id < n_as; ++row_id) { + const struct ggml_tensor * src0_row = dst->src[row_id + 2]; + + int64_t num_src1_rows = 0; + for (int64_t i01 = 0; i01 < ids->ne[1]; i01++) { + const int32_t row_id_i = *(const int32_t *) (ids_host.data() + i01*ids->nb[1] + id*ids->nb[0]); + + if (row_id_i != row_id) { + continue; + } + + GGML_ASSERT(row_id >= 0 && row_id < n_as); + + CUDA_CHECK(cudaMemcpyAsync(src1_contiguous + num_src1_rows*nb11, src1_original + i01*nb11, + nb11, cudaMemcpyDeviceToDevice, stream)); + num_src1_rows++; + } + + if (num_src1_rows == 0) { + continue; + } + + src1_row.ne[1] = num_src1_rows; + dst_row.ne[1] = num_src1_rows; + + src1_row.nb[1] = nb11; + src1_row.nb[2] = num_src1_rows*nb11; + src1_row.nb[3] = num_src1_rows*nb11; + + dst_row.nb[1] = nb1; + dst_row.nb[2] = num_src1_rows*nb1; + dst_row.nb[3] = num_src1_rows*nb1; + + ggml_cuda_mul_mat(src0_row, &src1_row, &dst_row); + + num_src1_rows = 0; + for (int64_t i01 = 0; i01 < ids->ne[1]; i01++) { + const int32_t row_id_i = *(const int32_t *) (ids_host.data() + i01*ids->nb[1] + id*ids->nb[0]); + + if (row_id_i != row_id) { + continue; + } + + GGML_ASSERT(row_id >= 0 && row_id < n_as); + + CUDA_CHECK(cudaMemcpyAsync(dst_original + i01*nb1, dst_contiguous + num_src1_rows*nb1, + nb1, cudaMemcpyDeviceToDevice, stream)); + num_src1_rows++; + } + } + + ggml_cuda_pool_free(src1_contiguous, as_src1); + ggml_cuda_pool_free(dst_contiguous, as_dst); } } @@ -9370,6 +9434,10 @@ bool ggml_cuda_compute_forward(struct ggml_compute_params * params, struct ggml_ return false; } + if (tensor->src[0] != nullptr && tensor->src[0]->backend == GGML_BACKEND_GPU_SPLIT) { + ggml_cuda_set_peer_access(tensor->src[1]->ne[1]); + } + if (params->ith != 0) { return true; } From 1d7a1912cea2227f9a1a449758ed622c560542f9 Mon Sep 17 00:00:00 2001 From: LoganDark Date: Thu, 21 Dec 2023 01:59:27 -0800 Subject: [PATCH 109/811] Fix access violation in ggml_cuda_free_data if tensor->extra is NULL (#4554) --- ggml-cuda.cu | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/ggml-cuda.cu b/ggml-cuda.cu index 9f4b188cb..28d378784 100644 --- a/ggml-cuda.cu +++ b/ggml-cuda.cu @@ -9091,7 +9091,7 @@ void ggml_cuda_transform_tensor(void * data, struct ggml_tensor * tensor) { } void ggml_cuda_free_data(struct ggml_tensor * tensor) { - if (!tensor || (tensor->backend != GGML_BACKEND_GPU && tensor->backend != GGML_BACKEND_GPU_SPLIT) ) { + if (!tensor || !tensor->extra || (tensor->backend != GGML_BACKEND_GPU && tensor->backend != GGML_BACKEND_GPU_SPLIT) ) { return; } From d3223afdad0ed2821a8ddf739c291cd410c92a11 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Johannes=20G=C3=A4=C3=9Fler?= Date: Thu, 21 Dec 2023 17:34:17 +0100 Subject: [PATCH 110/811] llama : disable per-tensor info prints on model load (#4562) --- llama.cpp | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/llama.cpp b/llama.cpp index edd2910b3..90d860eb9 100644 --- a/llama.cpp +++ b/llama.cpp @@ -2083,7 +2083,7 @@ struct llama_model_loader { type_max = meta->type; } - LLAMA_LOG_INFO("%s: - tensor %4d: %32s %-8s [ %s ]\n", __func__, i, name, ggml_type_name(meta->type), llama_format_tensor_shape(meta).c_str()); + // LLAMA_LOG_INFO("%s: - tensor %4d: %32s %-8s [ %s ]\n", __func__, i, name, ggml_type_name(meta->type), llama_format_tensor_shape(meta).c_str()); } switch (type_max) { From 139882392258671ffe5acdfcadc0bc08572d6eef Mon Sep 17 00:00:00 2001 From: slaren Date: Thu, 21 Dec 2023 18:02:30 +0100 Subject: [PATCH 111/811] cuda : replace asserts in wrong architecture checks with __trap (#4556) * cuda : replace asserts in wrong architecture checks with __trap * make bad_arch noreturn, remove returns --- ggml-cuda.cu | 82 +++++++++++++++++++++++----------------------------- 1 file changed, 36 insertions(+), 46 deletions(-) diff --git a/ggml-cuda.cu b/ggml-cuda.cu index 28d378784..e7c9dee45 100644 --- a/ggml-cuda.cu +++ b/ggml-cuda.cu @@ -512,6 +512,14 @@ static size_t g_scratch_offset = 0; static cublasHandle_t g_cublas_handles[GGML_CUDA_MAX_DEVICES] = {nullptr}; +[[noreturn]] +static __device__ void bad_arch() { + printf("ERROR: ggml-cuda was compiled without support for the current GPU architecture.\n"); + __trap(); + + (void) bad_arch; // suppress unused function warning +} + static __device__ __forceinline__ float warp_reduce_sum(float x) { #pragma unroll for (int mask = 16; mask > 0; mask >>= 1) { @@ -1972,8 +1980,7 @@ template static __device__ __forceinline__ float vec_dot_q4_0_q8_1_imp // second part effectively subtracts 8 from each quant value return d4 * (sumi * ds8f.x - (8*vdr/QI4_0) * ds8f.y); #else - assert(false); - return 0.0f; // only to satisfy the compiler + bad_arch(); #endif // __CUDA_ARCH__ >= MIN_CC_DP4A } @@ -2010,8 +2017,7 @@ template static __device__ __forceinline__ float vec_dot_q4_1_q8_1_imp // scale second part of sum by QI8_1/(vdr * QR4_1) to compensate for multiple threads adding it return sumi * d4d8 + m4s8 / (QI8_1 / (vdr * QR4_1)); #else - assert(false); - return 0.0f; // only to satisfy the compiler + bad_arch(); #endif // __CUDA_ARCH__ >= MIN_CC_DP4A } @@ -2046,8 +2052,7 @@ template static __device__ __forceinline__ float vec_dot_q5_0_q8_1_imp // second part effectively subtracts 16 from each quant value return d5 * (sumi * ds8f.x - (16*vdr/QI5_0) * ds8f.y); #else - assert(false); - return 0.0f; // only to satisfy the compiler + bad_arch(); #endif // __CUDA_ARCH__ >= MIN_CC_DP4A } @@ -2092,8 +2097,7 @@ template static __device__ __forceinline__ float vec_dot_q5_1_q8_1_imp return sumi*d5d8 + m5s8 / (QI5_1 / vdr); #else - assert(false); - return 0.0f; // only to satisfy the compiler + bad_arch(); #endif // __CUDA_ARCH__ >= MIN_CC_DP4A } @@ -2114,8 +2118,7 @@ template static __device__ __forceinline__ float vec_dot_q8_0_q8_1_imp return d8_0*d8_1 * sumi; #else - assert(false); - return 0.0f; // only to satisfy the compiler + bad_arch(); #endif // __CUDA_ARCH__ >= MIN_CC_DP4A } @@ -2145,8 +2148,7 @@ template static __device__ __forceinline__ float vec_dot_q8_1_q8_1_imp // scale second part of sum by QI8_1/ vdr to compensate for multiple threads adding it return sumi*d8d8 + m8s8 / (QI8_1 / vdr); #else - assert(false); - return 0.0f; // only to satisfy the compiler + bad_arch(); #endif // __CUDA_ARCH__ >= MIN_CC_DP4A } @@ -2181,8 +2183,7 @@ static __device__ __forceinline__ float vec_dot_q2_K_q8_1_impl_mmvq( return dm2f.x*sumf_d - dm2f.y*sumf_m; #else - assert(false); - return 0.0f; // only to satisfy the compiler + bad_arch(); #endif // __CUDA_ARCH__ >= MIN_CC_DP4A } @@ -2219,8 +2220,7 @@ static __device__ __forceinline__ float vec_dot_q2_K_q8_1_impl_mmq( return d8 * (dm2f.x*sumi_d - dm2f.y*sumi_m); #else - assert(false); - return 0.0f; // only to satisfy the compiler + bad_arch(); #endif // __CUDA_ARCH__ >= MIN_CC_DP4A } @@ -2260,8 +2260,7 @@ static __device__ __forceinline__ float vec_dot_q3_K_q8_1_impl_mmvq( return d3 * sumf; #else - assert(false); - return 0.0f; // only to satisfy the compiler + bad_arch(); #endif // __CUDA_ARCH__ >= MIN_CC_DP4A } @@ -2286,8 +2285,7 @@ static __device__ __forceinline__ float vec_dot_q3_K_q8_1_impl_mmq( return d3*d8 * sumi; #else - assert(false); - return 0.0f; // only to satisfy the compiler + bad_arch(); #endif // __CUDA_ARCH__ >= MIN_CC_DP4A } @@ -2320,8 +2318,7 @@ static __device__ __forceinline__ float vec_dot_q4_K_q8_1_impl_vmmq( return dm4f.x*sumf_d - dm4f.y*sumf_m; #else - assert(false); - return 0.0f; // only to satisfy the compiler + bad_arch(); #endif // __CUDA_ARCH__ >= MIN_CC_DP4A } @@ -2354,8 +2351,7 @@ static __device__ __forceinline__ float vec_dot_q4_K_q8_1_impl_mmq( return dm4f.x*sumf_d - dm4f.y*sumf_m; #else - assert(false); - return 0.0f; // only to satisfy the compiler + bad_arch(); #endif // __CUDA_ARCH__ >= MIN_CC_DP4A } @@ -2395,8 +2391,7 @@ static __device__ __forceinline__ float vec_dot_q5_K_q8_1_impl_vmmq( return dm5f.x*sumf_d - dm5f.y*sumf_m; #else - assert(false); - return 0.0f; // only to satisfy the compiler + bad_arch(); #endif // __CUDA_ARCH__ >= MIN_CC_DP4A } @@ -2429,8 +2424,7 @@ static __device__ __forceinline__ float vec_dot_q5_K_q8_1_impl_mmq( return dm4f.x*sumf_d - dm4f.y*sumf_m; #else - assert(false); - return 0.0f; // only to satisfy the compiler + bad_arch(); #endif // __CUDA_ARCH__ >= MIN_CC_DP4A } @@ -2460,8 +2454,7 @@ static __device__ __forceinline__ float vec_dot_q6_K_q8_1_impl_mmvq( return d*sumf; #else - assert(false); - return 0.0f; // only to satisfy the compiler + bad_arch(); #endif // __CUDA_ARCH__ >= MIN_CC_DP4A } @@ -2492,8 +2485,7 @@ static __device__ __forceinline__ float vec_dot_q6_K_q8_1_impl_mmq( return d6 * sumf_d; #else - assert(false); - return 0.0f; // only to satisfy the compiler + bad_arch(); #endif // __CUDA_ARCH__ >= MIN_CC_DP4A } @@ -3359,8 +3351,7 @@ static __device__ __forceinline__ float vec_dot_q4_K_q8_1( return dall * sumf_d - dmin * sumf_m; #else - assert(false); - return 0.0f; // only to satisfy the compiler + bad_arch(); #endif // __CUDA_ARCH__ >= MIN_CC_DP4A #endif @@ -3543,8 +3534,7 @@ static __device__ __forceinline__ float vec_dot_q5_K_q8_1( return d * sumf_d; #else - assert(false); - return 0.0f; // only to satisfy the compiler + bad_arch(); #endif // __CUDA_ARCH__ >= MIN_CC_DP4A #endif @@ -3954,7 +3944,7 @@ template static __global__ void (vx, vy, dst, ncols_x, nrows_x, ncols_y, nrows_y, nrows_dst); #else (void) vec_dot_q4_0_q8_1_mul_mat; - assert(false); + bad_arch(); #endif // __CUDA_ARCH__ >= CC_VOLTA } @@ -4023,7 +4013,7 @@ template static __global__ void (vx, vy, dst, ncols_x, nrows_x, ncols_y, nrows_y, nrows_dst); #else (void) vec_dot_q4_1_q8_1_mul_mat; - assert(false); + bad_arch(); #endif // __CUDA_ARCH__ >= CC_VOLTA } @@ -4090,7 +4080,7 @@ template static __global__ void (vx, vy, dst, ncols_x, nrows_x, ncols_y, nrows_y, nrows_dst); #else (void) vec_dot_q5_0_q8_1_mul_mat; - assert(false); + bad_arch(); #endif // __CUDA_ARCH__ >= CC_VOLTA } @@ -4157,7 +4147,7 @@ mul_mat_q5_1( (vx, vy, dst, ncols_x, nrows_x, ncols_y, nrows_y, nrows_dst); #else (void) vec_dot_q5_1_q8_1_mul_mat; - assert(false); + bad_arch(); #endif // __CUDA_ARCH__ >= CC_VOLTA } @@ -4224,7 +4214,7 @@ template static __global__ void (vx, vy, dst, ncols_x, nrows_x, ncols_y, nrows_y, nrows_dst); #else (void) vec_dot_q8_0_q8_1_mul_mat; - assert(false); + bad_arch(); #endif // __CUDA_ARCH__ >= CC_VOLTA } @@ -4291,7 +4281,7 @@ mul_mat_q2_K( (vx, vy, dst, ncols_x, nrows_x, ncols_y, nrows_y, nrows_dst); #else (void) vec_dot_q2_K_q8_1_mul_mat; - assert(false); + bad_arch(); #endif // __CUDA_ARCH__ >= CC_VOLTA } @@ -4360,7 +4350,7 @@ template static __global__ void (vx, vy, dst, ncols_x, nrows_x, ncols_y, nrows_y, nrows_dst); #else (void) vec_dot_q3_K_q8_1_mul_mat; - assert(false); + bad_arch(); #endif // __CUDA_ARCH__ >= CC_VOLTA } @@ -4429,7 +4419,7 @@ template static __global__ void (vx, vy, dst, ncols_x, nrows_x, ncols_y, nrows_y, nrows_dst); #else (void) vec_dot_q4_K_q8_1_mul_mat; - assert(false); + bad_arch(); #endif // __CUDA_ARCH__ >= CC_VOLTA } @@ -4496,7 +4486,7 @@ mul_mat_q5_K( (vx, vy, dst, ncols_x, nrows_x, ncols_y, nrows_y, nrows_dst); #else (void) vec_dot_q5_K_q8_1_mul_mat; - assert(false); + bad_arch(); #endif // __CUDA_ARCH__ >= CC_VOLTA } @@ -4565,7 +4555,7 @@ template static __global__ void (vx, vy, dst, ncols_x, nrows_x, ncols_y, nrows_y, nrows_dst); #else (void) vec_dot_q6_K_q8_1_mul_mat; - assert(false); + bad_arch(); #endif // __CUDA_ARCH__ >= CC_VOLTA } From 66f35a2f48e1965a13835a523e677223dbf148be Mon Sep 17 00:00:00 2001 From: bobqianic <129547291+bobqianic@users.noreply.github.com> Date: Thu, 21 Dec 2023 17:06:44 +0000 Subject: [PATCH 112/811] cuda : better error message for ggml_get_rows (#4561) * Update ggml-cuda.cu * Update ggml-cuda.cu * Update ggml-cuda.cu --------- Co-authored-by: Georgi Gerganov --- ggml-cuda.cu | 1 + 1 file changed, 1 insertion(+) diff --git a/ggml-cuda.cu b/ggml-cuda.cu index e7c9dee45..1ca071d90 100644 --- a/ggml-cuda.cu +++ b/ggml-cuda.cu @@ -6815,6 +6815,7 @@ static void ggml_cuda_op_get_rows( break; default: // TODO: k-quants + fprintf(stderr, "%s: unsupported type: %s\n", __func__, ggml_type_name(src0->type)); GGML_ASSERT(false); break; } From 880e352277fc017df4d5794f0c21c44e1eae2b84 Mon Sep 17 00:00:00 2001 From: howlger Date: Thu, 21 Dec 2023 18:07:34 +0100 Subject: [PATCH 113/811] py : open merges file as 'utf-8' (#4566) Otherwise, on Windows converting bling-phi-2-v0 () via convert-hf-to-gguf.py will fail with the following error: ``` Traceback (most recent call last): File "C:\Users\User\git\gguf\convert-hf-to-gguf.py", line 1061, in model_instance.set_vocab() File "C:\Users\User\git\gguf\convert-hf-to-gguf.py", line 52, in set_vocab self._set_vocab_gpt2() File "C:\Users\User\git\gguf\convert-hf-to-gguf.py", line 264, in _set_vocab_gpt2 special_vocab = gguf.SpecialVocab(dir_model, load_merges=True) File "C:\Users\User\git\gguf\gguf\vocab.py", line 33, in __init__ self._load(Path(path)) File "C:\Users\User\git\gguf\gguf\vocab.py", line 81, in _load self._try_load_merges_txt(path) File "C:\Users\User\git\gguf\gguf\vocab.py", line 95, in _try_load_merges_txt for line in fp: File "C:\Users\User\miniconda3\envs\gguf\lib\encodings\cp1252.py", line 23, in decode return codecs.charmap_decode(input,self.errors,decoding_table)[0] UnicodeDecodeError: 'charmap' codec can't decode byte 0x81 in position 1415: character maps to ``` --- gguf-py/gguf/vocab.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/gguf-py/gguf/vocab.py b/gguf-py/gguf/vocab.py index 76924d8f2..cd1942975 100644 --- a/gguf-py/gguf/vocab.py +++ b/gguf-py/gguf/vocab.py @@ -84,7 +84,7 @@ class SpecialVocab: merges_file = path / 'merges.txt' if not merges_file.is_file(): return False - with open(merges_file, 'r') as fp: + with open(merges_file, 'r', encoding = 'utf-8') as fp: first_line = next(fp, '').strip() if not first_line.startswith('#'): fp.seek(0) From c083718c895b7c8c7fb2a4660643fb78d0c64dfd Mon Sep 17 00:00:00 2001 From: Georgi Gerganov Date: Thu, 21 Dec 2023 19:27:14 +0200 Subject: [PATCH 114/811] readme : update coding guidelines --- README.md | 2 ++ 1 file changed, 2 insertions(+) diff --git a/README.md b/README.md index 01aef2afc..80ce194ca 100644 --- a/README.md +++ b/README.md @@ -982,6 +982,8 @@ docker run --gpus all -v /path/to/models:/models local/llama.cpp:light-cuda -m / - There are no strict rules for the code style, but try to follow the patterns in the code (indentation, spaces, etc.). Vertical alignment makes things more readable and easier to batch edit - Clean-up any trailing whitespaces, use 4 spaces for indentation, brackets on the same line, `void * ptr`, `int & a` - See [good first issues](https://github.com/ggerganov/llama.cpp/issues?q=is%3Aissue+is%3Aopen+label%3A%22good+first+issue%22) for tasks suitable for first contributions +- Tensors store data in row-major order. We refer to dimension 0 as columns, 1 as rows, 2 as matrices +- Matrix multiplication is unconventional: [`z = ggml_mul_mat(ctx, x, y)`](https://github.com/ggerganov/llama.cpp/blob/880e352277fc017df4d5794f0c21c44e1eae2b84/ggml.h#L1058-L1064) means `zT = x @ yT` ### Docs From 9154494808dc865475c59022c29060b4947a803b Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Johannes=20G=C3=A4=C3=9Fler?= Date: Thu, 21 Dec 2023 18:42:59 +0100 Subject: [PATCH 115/811] CUDA: mul_mat_id always on GPU for batches >= 32 (#4553) --- ggml-cuda.cu | 29 ++++++++++++++++++++++------- 1 file changed, 22 insertions(+), 7 deletions(-) diff --git a/ggml-cuda.cu b/ggml-cuda.cu index 1ca071d90..036668bfd 100644 --- a/ggml-cuda.cu +++ b/ggml-cuda.cu @@ -8773,8 +8773,6 @@ static void ggml_cuda_mul_mat_id(const ggml_tensor * src0, const ggml_tensor * s // TODO: mmq/mmv support #endif - GGML_ASSERT(dst->backend == GGML_BACKEND_GPU); - const int64_t nb11 = src1->nb[1]; const int64_t nb1 = dst->nb[1]; @@ -8803,13 +8801,21 @@ static void ggml_cuda_mul_mat_id(const ggml_tensor * src0, const ggml_tensor * s ggml_tensor src1_row = *src1; ggml_tensor dst_row = *dst; + src1_row.backend = GGML_BACKEND_GPU; + dst_row.backend = GGML_BACKEND_GPU; + src1_row.extra = &src1_row_extra; dst_row.extra = &dst_row_extra; - char * src1_original = (char *) src1_extra->data_device[g_main_device]; - char * dst_original = (char *) dst_extra->data_device[g_main_device]; + char * src1_original = src1->backend == GGML_BACKEND_CPU ? + (char *) src1->data : (char *) src1_extra->data_device[g_main_device]; + char * dst_original = dst->backend == GGML_BACKEND_CPU ? + (char *) dst->data : (char *) dst_extra->data_device[g_main_device]; if (src1->ne[1] == 1) { + GGML_ASSERT(src1->backend == GGML_BACKEND_GPU); + GGML_ASSERT(dst->backend == GGML_BACKEND_GPU); + for (int64_t i01 = 0; i01 < ids->ne[1]; i01++) { //int32_t row_id; //CUDA_CHECK(cudaMemcpyAsync(&row_id, ids_dev + i01*ids->nb[1] + id*ids->nb[0], sizeof(int32_t), cudaMemcpyDeviceToHost, g_cudaStreams[g_main_device][0])); @@ -8837,6 +8843,11 @@ static void ggml_cuda_mul_mat_id(const ggml_tensor * src0, const ggml_tensor * s src1_row_extra.data_device[g_main_device] = src1_contiguous; dst_row_extra.data_device[g_main_device] = dst_contiguous; + const cudaMemcpyKind src1_kind = src1->backend == GGML_BACKEND_CPU ? + cudaMemcpyHostToDevice : cudaMemcpyDeviceToDevice; + const cudaMemcpyKind dst_kind = dst->backend == GGML_BACKEND_CPU ? + cudaMemcpyHostToDevice : cudaMemcpyDeviceToDevice; + for (int32_t row_id = 0; row_id < n_as; ++row_id) { const struct ggml_tensor * src0_row = dst->src[row_id + 2]; @@ -8851,7 +8862,7 @@ static void ggml_cuda_mul_mat_id(const ggml_tensor * src0, const ggml_tensor * s GGML_ASSERT(row_id >= 0 && row_id < n_as); CUDA_CHECK(cudaMemcpyAsync(src1_contiguous + num_src1_rows*nb11, src1_original + i01*nb11, - nb11, cudaMemcpyDeviceToDevice, stream)); + nb11, src1_kind, stream)); num_src1_rows++; } @@ -8883,7 +8894,7 @@ static void ggml_cuda_mul_mat_id(const ggml_tensor * src0, const ggml_tensor * s GGML_ASSERT(row_id >= 0 && row_id < n_as); CUDA_CHECK(cudaMemcpyAsync(dst_original + i01*nb1, dst_contiguous + num_src1_rows*nb1, - nb1, cudaMemcpyDeviceToDevice, stream)); + nb1, dst_kind, stream)); num_src1_rows++; } } @@ -8891,6 +8902,10 @@ static void ggml_cuda_mul_mat_id(const ggml_tensor * src0, const ggml_tensor * s ggml_cuda_pool_free(src1_contiguous, as_src1); ggml_cuda_pool_free(dst_contiguous, as_dst); } + + if (dst->backend == GGML_BACKEND_CPU) { + CUDA_CHECK(cudaStreamSynchronize(stream)); + } } static void ggml_cuda_scale(const ggml_tensor * src0, const ggml_tensor * src1, ggml_tensor * dst) { @@ -9289,7 +9304,7 @@ bool ggml_cuda_compute_forward(struct ggml_compute_params * params, struct ggml_ || (tensor->src[0] != nullptr && (tensor->src[0]->backend == GGML_BACKEND_GPU || tensor->src[0]->backend == GGML_BACKEND_GPU_SPLIT)) || (tensor->src[1] != nullptr && tensor->src[1]->backend == GGML_BACKEND_GPU); - if (!any_on_device && tensor->op != GGML_OP_MUL_MAT) { + if (!any_on_device && tensor->op != GGML_OP_MUL_MAT && tensor->op != GGML_OP_MUL_MAT_ID) { return false; } From 8fe03ffddaaa0ab5d48feaafe398151c9f22d4f6 Mon Sep 17 00:00:00 2001 From: Jared Van Bortel Date: Thu, 21 Dec 2023 12:55:34 -0500 Subject: [PATCH 116/811] common : remove incorrect --model-draft default (#4568) --- common/common.cpp | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/common/common.cpp b/common/common.cpp index 93d5483e4..b3425ab09 100644 --- a/common/common.cpp +++ b/common/common.cpp @@ -920,7 +920,7 @@ void gpt_print_usage(int /*argc*/, char ** argv, const gpt_params & params) { printf(" -m FNAME, --model FNAME\n"); printf(" model path (default: %s)\n", params.model.c_str()); printf(" -md FNAME, --model-draft FNAME\n"); - printf(" draft model for speculative decoding (default: %s)\n", params.model.c_str()); + printf(" draft model for speculative decoding\n"); printf(" -ld LOGDIR, --logdir LOGDIR\n"); printf(" path under which to save YAML logs (no logging if unset)\n"); printf(" --override-kv KEY=TYPE:VALUE\n"); From 562cf222b5129e40b312877e928eac3a02e4ec33 Mon Sep 17 00:00:00 2001 From: arlo-phoenix <140345165+arlo-phoenix@users.noreply.github.com> Date: Thu, 21 Dec 2023 20:13:25 +0100 Subject: [PATCH 117/811] ggml-cuda: Fix HIP build by adding define for __trap (#4569) Regression of 139882392258671ffe5acdfcadc0bc08572d6eef HIP doesn't have trap, only abort --- ggml-cuda.cu | 1 + 1 file changed, 1 insertion(+) diff --git a/ggml-cuda.cu b/ggml-cuda.cu index 036668bfd..61d92d7ef 100644 --- a/ggml-cuda.cu +++ b/ggml-cuda.cu @@ -80,6 +80,7 @@ #define cudaStreamWaitEvent(stream, event, flags) hipStreamWaitEvent(stream, event, flags) #define cudaStream_t hipStream_t #define cudaSuccess hipSuccess +#define __trap abort #else #include #include From 0f630fbc924aaabeea6eaf466bb4b47d13015c3e Mon Sep 17 00:00:00 2001 From: Erik Garrison Date: Thu, 21 Dec 2023 13:45:32 -0600 Subject: [PATCH 118/811] cuda : ROCm AMD Unified Memory Architecture (UMA) handling (#4449) * AMD ROCm: handle UMA memory VRAM expansions This resolves #2797 by allowing ROCm AMD GPU users with a UMA to dynamically expand the VRAM allocated to the GPU. Without this, AMD ROCm users with shared CPU/GPU memory usually are stuck with the BIOS-set (or fixed) framebuffer VRAM, making it impossible to load more than 1-2 layers. Note that the model is duplicated in RAM because it's loaded once for the CPU and then copied into a second set of allocations that are managed by the HIP UMA system. We can fix this later. * clarify build process for ROCm on linux with cmake * avoid using deprecated ROCm hipMallocHost * keep simplifying the change required for UMA * cmake: enable UMA-compatible allocation when LLAMA_HIP_UMA=ON --- CMakeLists.txt | 4 ++++ README.md | 16 +++++++++------- ggml-cuda.cu | 5 +++++ 3 files changed, 18 insertions(+), 7 deletions(-) diff --git a/CMakeLists.txt b/CMakeLists.txt index e3cd43ab3..6fc6508c5 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -91,6 +91,7 @@ set(LLAMA_CUDA_KQUANTS_ITER "2" CACHE STRING "llama: iters./thread per block for set(LLAMA_CUDA_PEER_MAX_BATCH_SIZE "128" CACHE STRING "llama: max. batch size for using peer access") option(LLAMA_HIPBLAS "llama: use hipBLAS" OFF) +option(LLAMA_HIP_UMA "llama: use HIP unified memory architecture" OFF) option(LLAMA_CLBLAST "llama: use CLBlast" OFF) option(LLAMA_METAL "llama: use Metal" ${LLAMA_METAL_DEFAULT}) option(LLAMA_METAL_NDEBUG "llama: disable Metal debugging" OFF) @@ -377,6 +378,9 @@ if (LLAMA_HIPBLAS) if (${hipblas_FOUND} AND ${hip_FOUND}) message(STATUS "HIP and hipBLAS found") add_compile_definitions(GGML_USE_HIPBLAS GGML_USE_CUBLAS) + if (LLAMA_HIP_UMA) + add_compile_definitions(GGML_HIP_UMA) + endif() add_library(ggml-rocm OBJECT ggml-cuda.cu ggml-cuda.h) if (BUILD_SHARED_LIBS) set_target_properties(ggml-rocm PROPERTIES POSITION_INDEPENDENT_CODE ON) diff --git a/README.md b/README.md index 80ce194ca..73fe59bb4 100644 --- a/README.md +++ b/README.md @@ -432,14 +432,15 @@ Building the program with BLAS support may lead to some performance improvements ```bash make LLAMA_HIPBLAS=1 ``` - - Using `CMake` for Linux: + - Using `CMake` for Linux (assuming a gfx1030-compatible AMD GPU): ```bash - mkdir build - cd build - CC=/opt/rocm/llvm/bin/clang CXX=/opt/rocm/llvm/bin/clang++ cmake .. -DLLAMA_HIPBLAS=ON - cmake --build . + CC=/opt/rocm/llvm/bin/clang CXX=/opt/rocm/llvm/bin/clang++ \ + cmake -H. -Bbuild -DLLAMA_HIPBLAS=ON -DAMDGPU_TARGETS=gfx1030 -DCMAKE_BUILD_TYPE=Release \ + && cmake --build build -- -j 16 ``` - - Using `CMake` for Windows (using x64 Native Tools Command Prompt for VS): + On Linux it is also possible to use unified memory architecture (UMA) to share main memory between the CPU and integrated GPU by setting `-DLLAMA_HIP_UMA=ON"`. + However, this hurts performance for non-integrated GPUs. + - Using `CMake` for Windows (using x64 Native Tools Command Prompt for VS, and assuming a gfx1100-compatible AMD GPU): ```bash set PATH=%HIP_PATH%\bin;%PATH% mkdir build @@ -448,10 +449,11 @@ Building the program with BLAS support may lead to some performance improvements cmake --build . ``` Make sure that `AMDGPU_TARGETS` is set to the GPU arch you want to compile for. The above example uses `gfx1100` that corresponds to Radeon RX 7900XTX/XT/GRE. You can find a list of targets [here](https://llvm.org/docs/AMDGPUUsage.html#processors) + Find your gpu version string by matching the most significant version information from `rocminfo | grep gfx | head -1 | awk '{print $2}'` with the list of processors, e.g. `gfx1035` maps to `gfx1030`. The environment variable [`HIP_VISIBLE_DEVICES`](https://rocm.docs.amd.com/en/latest/understand/gpu_isolation.html#hip-visible-devices) can be used to specify which GPU(s) will be used. - If your GPU is not officially supported you can use the environment variable [`HSA_OVERRIDE_GFX_VERSION`] set to a similar GPU, for example 10.3.0 on RDNA2 or 11.0.0 on RDNA3. + If your GPU is not officially supported you can use the environment variable [`HSA_OVERRIDE_GFX_VERSION`] set to a similar GPU, for example 10.3.0 on RDNA2 (e.g. gfx1030, gfx1031, or gfx1035) or 11.0.0 on RDNA3. The following compilation options are also available to tweak performance (yes, they refer to CUDA, not HIP, because it uses the same code as the cuBLAS version above): | Option | Legal values | Default | Description | diff --git a/ggml-cuda.cu b/ggml-cuda.cu index 61d92d7ef..32603a8d1 100644 --- a/ggml-cuda.cu +++ b/ggml-cuda.cu @@ -60,8 +60,13 @@ #define cudaGetDeviceProperties hipGetDeviceProperties #define cudaGetErrorString hipGetErrorString #define cudaGetLastError hipGetLastError +#ifdef GGML_HIP_UMA +#define cudaMalloc hipMallocManaged +#define cudaMallocHost(ptr, size) hipHostMalloc(ptr, size) +#else #define cudaMalloc hipMalloc #define cudaMallocHost(ptr, size) hipHostMalloc(ptr, size, hipHostMallocDefault) +#endif #define cudaMemcpy hipMemcpy #define cudaMemcpy2DAsync hipMemcpy2DAsync #define cudaMemcpyAsync hipMemcpyAsync From 56fa50819f7a3ca2128f63b81c17c08a4454479e Mon Sep 17 00:00:00 2001 From: Finn Voorhees Date: Thu, 21 Dec 2023 14:55:02 -0500 Subject: [PATCH 119/811] metal : fix `ggml_metal_log` vargs (#4373) From 31f27758faf4a4bd08101a57c7ec3a473f771f86 Mon Sep 17 00:00:00 2001 From: Marcus Dunn <51931484+MarcusDunn@users.noreply.github.com> Date: Thu, 21 Dec 2023 11:57:48 -0800 Subject: [PATCH 120/811] llama : allow getting n_batch from llama_context in c api (#4540) * allowed getting n_batch from llama_context in c api * changed to use `uint32_t` instead of `int` * changed to use `uint32_t` instead of `int` in `llama_n_ctx` * Update llama.h --------- Co-authored-by: Georgi Gerganov --- llama.cpp | 6 +++++- llama.h | 4 +++- 2 files changed, 8 insertions(+), 2 deletions(-) diff --git a/llama.cpp b/llama.cpp index 90d860eb9..63ebe581b 100644 --- a/llama.cpp +++ b/llama.cpp @@ -9532,10 +9532,14 @@ const llama_model * llama_get_model(const struct llama_context * ctx) { return &ctx->model; } -int llama_n_ctx(const struct llama_context * ctx) { +uint32_t llama_n_ctx(const struct llama_context * ctx) { return ctx->cparams.n_ctx; } +uint32_t llama_n_batch(const struct llama_context * ctx) { + return ctx->cparams.n_batch; +} + enum llama_vocab_type llama_vocab_type(const struct llama_model * model) { return model->vocab.type; } diff --git a/llama.h b/llama.h index 15ab4f80e..0be4b1337 100644 --- a/llama.h +++ b/llama.h @@ -314,7 +314,9 @@ extern "C" { LLAMA_API const struct llama_model * llama_get_model(const struct llama_context * ctx); - LLAMA_API int llama_n_ctx (const struct llama_context * ctx); + // TODO: become more consistent with returned int types across the API + LLAMA_API uint32_t llama_n_ctx (const struct llama_context * ctx); + LLAMA_API uint32_t llama_n_batch (const struct llama_context * ctx); LLAMA_API enum llama_vocab_type llama_vocab_type(const struct llama_model * model); From d232aca5a73b290e218a2e48b91023d5e994203f Mon Sep 17 00:00:00 2001 From: slaren Date: Thu, 21 Dec 2023 21:07:46 +0100 Subject: [PATCH 121/811] llama : initial ggml-backend integration (#4520) * llama : initial ggml-backend integration * add ggml-metal * cuda backend can be used though ggml-backend with LLAMA_GGML_BACKEND_CUDA_TEST access all tensor data with ggml_backend_tensor_get/set * add ggml_backend_buffer_clear zero-init KV cache buffer * add ggml_backend_buffer_is_hos, used to avoid copies if possible when accesing tensor data * disable gpu backends with ngl 0 * more accurate mlock * unmap offloaded part of the model * use posix_fadvise64(.., POSIX_FADV_SEQUENTIAL) to improve performance with mmap * update quantize and lora * update session copy/set to use ggml-backend ggml-ci * use posix_fadvise instead of posix_fadvise64 * ggml_backend_alloc_ctx_tensors_from_buft : remove old print * llama_mmap::align_offset : use pointers instead of references for out parameters * restore progress_callback behavior * move final progress_callback call to load_all_data * cuda : fix fprintf format string (minor) * do not offload scales * llama_mmap : avoid unmapping the same fragments again in the destructor * remove unnecessary unmap * metal : add default log function that prints to stderr, cleanup code ggml-ci --------- Co-authored-by: Georgi Gerganov --- Makefile | 2 +- ggml-alloc.c | 16 +- ggml-backend-impl.h | 20 +- ggml-backend.c | 80 ++- ggml-backend.h | 7 + ggml-cuda.cu | 89 ++-- ggml-metal.h | 3 + ggml-metal.m | 228 +++++++-- ggml.c | 24 +- ggml.h | 13 +- llama.cpp | 1196 ++++++++++++++++++++----------------------- 11 files changed, 926 insertions(+), 752 deletions(-) diff --git a/Makefile b/Makefile index 8273f8400..512407a1d 100644 --- a/Makefile +++ b/Makefile @@ -65,7 +65,7 @@ test: $(TEST_TARGETS) ./$$test_target; \ fi; \ if [ $$? -ne 0 ]; then \ - printf 'Test $$test_target FAILED!\n\n' $$test_target; \ + printf 'Test %s FAILED!\n\n' $$test_target; \ failures=$$(( failures + 1 )); \ else \ printf 'Test %s passed.\n\n' $$test_target; \ diff --git a/ggml-alloc.c b/ggml-alloc.c index d3049efb4..a97436b17 100644 --- a/ggml-alloc.c +++ b/ggml-alloc.c @@ -449,11 +449,10 @@ static void init_view(ggml_gallocr_t galloc, struct ggml_tensor * view, bool upd if (update_backend) { view->backend = view->view_src->backend; } - view->buffer = view->view_src->buffer; + // views are initialized in the alloc buffer rather than the view_src buffer + view->buffer = alloc->buffer; view->data = (char *)view->view_src->data + view->view_offs; - // FIXME: the view should be initialized by the owning buffer, but currently this breaks the CUDA backend - // due to the ggml_tensor_extra_gpu ring buffer overwriting the KV cache extras assert(ggml_tallocr_is_measure(alloc) || !view->buffer || view->buffer->buft == alloc->buffer->buft); if (!alloc->measure) { @@ -736,6 +735,10 @@ void ggml_allocr_set_parse_seq(ggml_allocr_t alloc, const int * list, int n) { } void ggml_allocr_free(ggml_allocr_t alloc) { + if (alloc == NULL) { + return; + } + ggml_gallocr_free(alloc->galloc); ggml_tallocr_free(alloc->talloc); free(alloc); @@ -775,7 +778,7 @@ ggml_backend_buffer_t ggml_backend_alloc_ctx_tensors_from_buft(struct ggml_conte } if (nbytes == 0) { - fprintf(stderr, "%s: no tensors to allocate\n", __func__); + // all the tensors in the context are already allocated return NULL; } @@ -789,6 +792,11 @@ ggml_backend_buffer_t ggml_backend_alloc_ctx_tensors_from_buft(struct ggml_conte } else { ggml_backend_view_init(buffer, t); } + } else { + if (t->view_src != NULL) { + // view of a pre-allocated tensor + ggml_backend_view_init(buffer, t); + } } } diff --git a/ggml-backend-impl.h b/ggml-backend-impl.h index f588af602..05859935a 100644 --- a/ggml-backend-impl.h +++ b/ggml-backend-impl.h @@ -20,6 +20,9 @@ extern "C" { size_t (*get_alignment) (ggml_backend_buffer_type_t buft); // tensor alignment size_t (*get_alloc_size) (ggml_backend_buffer_type_t buft, struct ggml_tensor * tensor); // data size needed to allocate the tensor, including padding bool (*supports_backend)(ggml_backend_buffer_type_t buft, ggml_backend_t backend); // check if the buffer type is usable by the backend + // check if tensor data is in host memory + // should be equivalent to supports_backend(buft, ggml_backend_cpu_init()) + bool (*is_host) (ggml_backend_buffer_type_t buft); }; struct ggml_backend_buffer_type { @@ -31,15 +34,16 @@ extern "C" { typedef void * ggml_backend_buffer_context_t; struct ggml_backend_buffer_i { - void (*free_buffer)(ggml_backend_buffer_t buffer); + void (*free_buffer) (ggml_backend_buffer_t buffer); //void (*reset) (ggml_backend_buffer_t buffer); // reset any internal state due to tensor initialization, such as tensor extras - void * (*get_base) (ggml_backend_buffer_t buffer); - void (*init_tensor)(ggml_backend_buffer_t buffer, struct ggml_tensor * tensor); - void (*set_tensor) (ggml_backend_buffer_t buffer, struct ggml_tensor * tensor, const void * data, size_t offset, size_t size); - void (*get_tensor) (ggml_backend_buffer_t buffer, const struct ggml_tensor * tensor, void * data, size_t offset, size_t size); + void * (*get_base) (ggml_backend_buffer_t buffer); + void (*init_tensor) (ggml_backend_buffer_t buffer, struct ggml_tensor * tensor); + void (*set_tensor) (ggml_backend_buffer_t buffer, struct ggml_tensor * tensor, const void * data, size_t offset, size_t size); + void (*get_tensor) (ggml_backend_buffer_t buffer, const struct ggml_tensor * tensor, void * data, size_t offset, size_t size); // (optional) copy tensor between different buffer-type, allow for single-copy tranfers - void (*cpy_tensor_from)(ggml_backend_buffer_t buffer, struct ggml_tensor * src, struct ggml_tensor * dst); - void (*cpy_tensor_to) (ggml_backend_buffer_t buffer, struct ggml_tensor * src, struct ggml_tensor * dst); + void (*cpy_tensor_from)(ggml_backend_buffer_t buffer, struct ggml_tensor * src, struct ggml_tensor * dst); + void (*cpy_tensor_to) (ggml_backend_buffer_t buffer, struct ggml_tensor * src, struct ggml_tensor * dst); + void (*clear) (ggml_backend_buffer_t buffer, uint8_t value); }; struct ggml_backend_buffer { @@ -78,7 +82,7 @@ extern "C" { void (*cpy_tensor_from_async)(ggml_backend_t backend, struct ggml_tensor * src, struct ggml_tensor * dst); void (*cpy_tensor_to_async) (ggml_backend_t backend, struct ggml_tensor * src, struct ggml_tensor * dst); - void (*synchronize) (ggml_backend_t backend); + void (*synchronize)(ggml_backend_t backend); // compute graph with a plan ggml_backend_graph_plan_t (*graph_plan_create) (ggml_backend_t backend, struct ggml_cgraph * cgraph); diff --git a/ggml-backend.c b/ggml-backend.c index 3a22cd085..0c8c9ec43 100644 --- a/ggml-backend.c +++ b/ggml-backend.c @@ -35,6 +35,13 @@ bool ggml_backend_buft_supports_backend(ggml_backend_buffer_type_t buft, ggml_ba return buft->iface.supports_backend(buft, backend); } +bool ggml_backend_buft_is_host(ggml_backend_buffer_type_t buft) { + if (buft->iface.is_host) { + return buft->iface.is_host(buft); + } + return false; +} + // backend buffer ggml_backend_buffer_t ggml_backend_buffer_init( @@ -94,6 +101,14 @@ size_t ggml_backend_buffer_get_alloc_size(ggml_backend_buffer_t buffer, struct g return ggml_backend_buft_get_alloc_size(ggml_backend_buffer_type(buffer), tensor); } +void ggml_backend_buffer_clear(ggml_backend_buffer_t buffer, uint8_t value) { + buffer->iface.clear(buffer, value); +} + +bool ggml_backend_buffer_is_host(ggml_backend_buffer_t buffer) { + return ggml_backend_buft_is_host(ggml_backend_buffer_type(buffer)); +} + ggml_backend_buffer_type_t ggml_backend_buffer_type(ggml_backend_buffer_t buffer) { return buffer->buft; } @@ -378,7 +393,6 @@ static void * ggml_backend_cpu_buffer_get_base(ggml_backend_buffer_t buffer) { static void ggml_backend_cpu_buffer_free_buffer(ggml_backend_buffer_t buffer) { free(buffer->context); - GGML_UNUSED(buffer); } static void ggml_backend_cpu_buffer_set_tensor(ggml_backend_buffer_t buffer, struct ggml_tensor * tensor, const void * data, size_t offset, size_t size) { @@ -411,6 +425,10 @@ static void ggml_backend_cpu_buffer_cpy_tensor_to(ggml_backend_buffer_t buffer, GGML_UNUSED(buffer); } +static void ggml_backend_cpu_buffer_clear(ggml_backend_buffer_t buffer, uint8_t value) { + memset(buffer->context, value, buffer->size); +} + static struct ggml_backend_buffer_i cpu_backend_buffer_i = { /* .free_buffer = */ ggml_backend_cpu_buffer_free_buffer, /* .get_base = */ ggml_backend_cpu_buffer_get_base, @@ -419,6 +437,7 @@ static struct ggml_backend_buffer_i cpu_backend_buffer_i = { /* .get_tensor = */ ggml_backend_cpu_buffer_get_tensor, /* .cpy_tensor_from = */ ggml_backend_cpu_buffer_cpy_tensor_from, /* .cpy_tensor_to = */ ggml_backend_cpu_buffer_cpy_tensor_to, + /* .clear = */ ggml_backend_cpu_buffer_clear, }; // for buffers from ptr, free is not called @@ -430,6 +449,7 @@ static struct ggml_backend_buffer_i cpu_backend_buffer_i_from_ptr = { /* .get_tensor = */ ggml_backend_cpu_buffer_get_tensor, /* .cpy_tensor_from = */ ggml_backend_cpu_buffer_cpy_tensor_from, /* .cpy_tensor_to = */ ggml_backend_cpu_buffer_cpy_tensor_to, + /* .clear = */ ggml_backend_cpu_buffer_clear, }; static const size_t TENSOR_ALIGNMENT = 64; // should be enough for AVX 512 @@ -455,20 +475,70 @@ static bool ggml_backend_cpu_buffer_type_supports_backend(ggml_backend_buffer_ty GGML_UNUSED(buft); } +static bool ggml_backend_cpu_buffer_type_is_host(ggml_backend_buffer_type_t buft) { + return true; + + GGML_UNUSED(buft); +} + ggml_backend_buffer_type_t ggml_backend_cpu_buffer_type(void) { - static struct ggml_backend_buffer_type ggml_backend_buffer_type_cpu = { + static struct ggml_backend_buffer_type ggml_backend_cpu_buffer_type = { /* .iface = */ { /* .alloc_buffer = */ ggml_backend_cpu_buffer_type_alloc_buffer, /* .get_alignment = */ ggml_backend_cpu_buffer_type_get_alignment, /* .get_alloc_size = */ NULL, // defaults to ggml_nbytes /* .supports_backend = */ ggml_backend_cpu_buffer_type_supports_backend, + /* .is_host = */ ggml_backend_cpu_buffer_type_is_host, }, /* .context = */ NULL, }; - return &ggml_backend_buffer_type_cpu; + return &ggml_backend_cpu_buffer_type; } +#ifdef GGML_USE_CPU_HBM + +// buffer type HBM + +#include + +static void ggml_backend_cpu_hbm_buffer_free_buffer(ggml_backend_buffer_t buffer) { + hbw_free(buffer->context); +} + +static ggml_backend_buffer_t ggml_backend_cpu_hbm_buffer_type_alloc_buffer(ggml_backend_buffer_type_t buft, size_t size) { + //void * ptr = hbw_malloc(size); + void * ptr; + int result = hbw_posix_memalign(&ptr, ggml_backend_cpu_buffer_type_get_alignment(buft), size); + if (result != 0) { + fprintf(stderr, "failed to allocate HBM buffer of size %zu\n", size); + return NULL; + } + + // FIXME: this is a hack to avoid having to implement a new buffer type + ggml_backend_buffer_t buffer = ggml_backend_cpu_buffer_from_ptr(ptr, size); + buffer->buft = buft; + buffer->iface.free_buffer = ggml_backend_cpu_hbm_buffer_free_buffer; + + return buffer; +} + +ggml_backend_buffer_type_t ggml_backend_cpu_hbm_buffer_type() { + static struct ggml_backend_buffer_type ggml_backend_cpu_buffer_type_hbm = { + /* .iface = */ { + /* .alloc_buffer = */ ggml_backend_cpu_hbm_buffer_type_alloc_buffer, + /* .get_alignment = */ ggml_backend_cpu_buffer_type_get_alignment, + /* .get_alloc_size = */ NULL, // defaults to ggml_nbytes + /* .supports_backend = */ ggml_backend_cpu_buffer_type_supports_backend, + /* .is_host = */ ggml_backend_cpu_buffer_type_is_host, + }, + /* .context = */ NULL, + }; + + return &ggml_backend_cpu_buffer_type_hbm; +} +#endif + struct ggml_backend_cpu_context { int n_threads; void * work_data; @@ -505,7 +575,7 @@ static ggml_backend_graph_plan_t ggml_backend_cpu_graph_plan_create(ggml_backend struct ggml_backend_plan_cpu * cpu_plan = malloc(sizeof(struct ggml_backend_plan_cpu)); cpu_plan->cplan = ggml_graph_plan(cgraph, cpu_ctx->n_threads); - cpu_plan->cgraph = *cgraph; + cpu_plan->cgraph = *cgraph; // FIXME: deep copy if (cpu_plan->cplan.work_size > 0) { cpu_plan->cplan.work_data = malloc(cpu_plan->cplan.work_size); @@ -1180,7 +1250,7 @@ void ggml_backend_sched_set_node_backend(ggml_backend_sched_t sched, struct ggml // utils void ggml_backend_view_init(ggml_backend_buffer_t buffer, struct ggml_tensor * tensor) { GGML_ASSERT(tensor->buffer == NULL); - GGML_ASSERT(tensor->data == NULL); + //GGML_ASSERT(tensor->data == NULL); // views of pre-allocted tensors may have the data set, but still need to be initialized GGML_ASSERT(tensor->view_src != NULL); GGML_ASSERT(tensor->view_src->buffer != NULL); GGML_ASSERT(tensor->view_src->data != NULL); diff --git a/ggml-backend.h b/ggml-backend.h index 58d5ccae6..a9d2fddd7 100644 --- a/ggml-backend.h +++ b/ggml-backend.h @@ -21,6 +21,7 @@ extern "C" { GGML_API size_t ggml_backend_buft_get_alignment (ggml_backend_buffer_type_t buft); GGML_API size_t ggml_backend_buft_get_alloc_size(ggml_backend_buffer_type_t buft, struct ggml_tensor * tensor); GGML_API bool ggml_backend_buft_supports_backend(ggml_backend_buffer_type_t buft, ggml_backend_t backend); + GGML_API bool ggml_backend_buft_is_host (ggml_backend_buffer_type_t buft); // buffer GGML_API void ggml_backend_buffer_free (ggml_backend_buffer_t buffer); @@ -29,6 +30,8 @@ extern "C" { GGML_API void ggml_backend_buffer_init_tensor (ggml_backend_buffer_t buffer, struct ggml_tensor * tensor); GGML_API size_t ggml_backend_buffer_get_alignment (ggml_backend_buffer_t buffer); GGML_API size_t ggml_backend_buffer_get_alloc_size(ggml_backend_buffer_t buffer, struct ggml_tensor * tensor); + GGML_API void ggml_backend_buffer_clear (ggml_backend_buffer_t buffer, uint8_t value); + GGML_API bool ggml_backend_buffer_is_host (ggml_backend_buffer_t buffer); GGML_API ggml_backend_buffer_type_t ggml_backend_buffer_type(ggml_backend_buffer_t buffer); // @@ -76,6 +79,10 @@ extern "C" { GGML_API ggml_backend_buffer_type_t ggml_backend_cpu_buffer_type(void); +#ifdef GGML_USE_CPU_HBM + GGML_API ggml_backend_buffer_type_t ggml_backend_cpu_hbm_buffer_type(void); +#endif + // // Backend registry // diff --git a/ggml-cuda.cu b/ggml-cuda.cu index 32603a8d1..f5e060d32 100644 --- a/ggml-cuda.cu +++ b/ggml-cuda.cu @@ -9081,7 +9081,7 @@ void ggml_cuda_transform_tensor(void * data, struct ggml_tensor * tensor) { char * buf; CUDA_CHECK(cudaMalloc(&buf, size)); - char * buf_host = (char*)data + offset_split; + char * buf_host = (char *)data + offset_split; // set padding to 0 to avoid possible NaN values if (size > original_size) { @@ -9226,11 +9226,10 @@ void ggml_cuda_assign_scratch_offset(struct ggml_tensor * tensor, size_t offset) ggml_tensor_extra_gpu * extra = ggml_cuda_alloc_temp_tensor_extra(); - const bool inplace = (tensor->src[0] != nullptr && tensor->src[0]->data == tensor->data) || - tensor->op == GGML_OP_VIEW; + const bool inplace = tensor->view_src != nullptr; - if (inplace && (tensor->src[0]->backend == GGML_BACKEND_GPU || tensor->src[0]->backend == GGML_BACKEND_GPU_SPLIT)) { - ggml_tensor_extra_gpu * src0_extra = (ggml_tensor_extra_gpu * ) tensor->src[0]->extra; + if (inplace && (tensor->view_src->backend == GGML_BACKEND_GPU || tensor->view_src->backend == GGML_BACKEND_GPU_SPLIT)) { + ggml_tensor_extra_gpu * src0_extra = (ggml_tensor_extra_gpu * ) tensor->view_src->extra; char * src0_ddc = (char *) src0_extra->data_device[g_main_device]; size_t view_offset = 0; if (tensor->op == GGML_OP_VIEW) { @@ -9317,7 +9316,7 @@ bool ggml_cuda_compute_forward(struct ggml_compute_params * params, struct ggml_ if (tensor->op == GGML_OP_MUL_MAT) { if (tensor->src[0]->ne[3] != tensor->src[1]->ne[3]) { #ifndef NDEBUG - fprintf(stderr, "%s: cannot compute %s: src0->ne[3] = " PRId64 ", src1->ne[3] = " PRId64 " - fallback to CPU\n", __func__, tensor->name, tensor->src[0]->ne[3], tensor->src[1]->ne[3]); + fprintf(stderr, "%s: cannot compute %s: src0->ne[3] = %" PRId64 ", src1->ne[3] = %" PRId64 " - fallback to CPU\n", __func__, tensor->name, tensor->src[0]->ne[3], tensor->src[1]->ne[3]); #endif return false; } @@ -9523,7 +9522,7 @@ static void ggml_backend_cuda_buffer_init_tensor(ggml_backend_buffer_t buffer, g ggml_backend_buffer_context_cuda * ctx = (ggml_backend_buffer_context_cuda *)buffer->context; if (tensor->view_src != NULL && tensor->view_offs == 0) { - assert(tensor->view_src->buffer->buft == buffer->buft); // TODO + assert(tensor->view_src->buffer->buft == buffer->buft); tensor->backend = tensor->view_src->backend; tensor->extra = tensor->view_src->extra; return; @@ -9554,23 +9553,34 @@ static void ggml_backend_cuda_buffer_init_tensor(ggml_backend_buffer_t buffer, g } static void ggml_backend_cuda_buffer_set_tensor(ggml_backend_buffer_t buffer, ggml_tensor * tensor, const void * data, size_t offset, size_t size) { - GGML_ASSERT(offset + size <= ggml_nbytes(tensor) && "tensor write out of bounds"); - GGML_ASSERT(tensor->data != NULL && "tensor not allocated"); GGML_ASSERT(tensor->backend == GGML_BACKEND_GPU); - CUDA_CHECK(cudaMemcpy((char *)tensor->data + offset, data, size, cudaMemcpyHostToDevice)); + ggml_backend_buffer_context_cuda * ctx = (ggml_backend_buffer_context_cuda *)buffer->context; - UNUSED(buffer); + ggml_cuda_set_device(ctx->device); + CUDA_CHECK(cudaDeviceSynchronize()); + + CUDA_CHECK(cudaMemcpy((char *)tensor->data + offset, data, size, cudaMemcpyHostToDevice)); } static void ggml_backend_cuda_buffer_get_tensor(ggml_backend_buffer_t buffer, const ggml_tensor * tensor, void * data, size_t offset, size_t size) { - GGML_ASSERT(offset + size <= ggml_nbytes(tensor) && "tensor read out of bounds"); - GGML_ASSERT(tensor->data != NULL && "tensor not allocated"); GGML_ASSERT(tensor->backend == GGML_BACKEND_GPU); - CUDA_CHECK(cudaMemcpy(data, (const char *)tensor->data + offset, size, cudaMemcpyDeviceToHost)); + ggml_backend_buffer_context_cuda * ctx = (ggml_backend_buffer_context_cuda *)buffer->context; - UNUSED(buffer); + ggml_cuda_set_device(ctx->device); + CUDA_CHECK(cudaDeviceSynchronize()); + + CUDA_CHECK(cudaMemcpy(data, (const char *)tensor->data + offset, size, cudaMemcpyDeviceToHost)); +} + +static void ggml_backend_cuda_buffer_clear(ggml_backend_buffer_t buffer, uint8_t value) { + ggml_backend_buffer_context_cuda * ctx = (ggml_backend_buffer_context_cuda *)buffer->context; + + ggml_cuda_set_device(ctx->device); + CUDA_CHECK(cudaDeviceSynchronize()); + + CUDA_CHECK(cudaMemset(ctx->dev_ptr, value, buffer->size)); } static struct ggml_backend_buffer_i cuda_backend_buffer_interface = { @@ -9581,6 +9591,7 @@ static struct ggml_backend_buffer_i cuda_backend_buffer_interface = { /* .get_tensor = */ ggml_backend_cuda_buffer_get_tensor, /* .cpy_tensor_from = */ NULL, /* .cpy_tensor_to = */ NULL, + /* .clear = */ ggml_backend_cuda_buffer_clear, }; // cuda buffer type @@ -9632,35 +9643,36 @@ static bool ggml_backend_cuda_buffer_type_supports_backend(ggml_backend_buffer_t UNUSED(buft); } -static ggml_backend_buffer_type_i cuda_backend_buffer_type_interface = { +static ggml_backend_buffer_type_i ggml_backend_cuda_buffer_type_interface = { /* .alloc_buffer = */ ggml_backend_cuda_buffer_type_alloc_buffer, /* .get_alignment = */ ggml_backend_cuda_buffer_type_get_alignment, /* .get_alloc_size = */ ggml_backend_cuda_buffer_type_get_alloc_size, /* .supports_backend = */ ggml_backend_cuda_buffer_type_supports_backend, + /* .is_host = */ nullptr, }; ggml_backend_buffer_type_t ggml_backend_cuda_buffer_type(int device) { - static struct ggml_backend_buffer_type ggml_backend_buffer_type_cuda[GGML_CUDA_MAX_DEVICES]; - static bool ggml_backend_buffer_type_cuda_initialized = false; - if (!ggml_backend_buffer_type_cuda_initialized) { + static struct ggml_backend_buffer_type ggml_backend_cuda_buffer_types[GGML_CUDA_MAX_DEVICES]; + + static bool ggml_backend_cuda_buffer_type_initialized = false; + + if (!ggml_backend_cuda_buffer_type_initialized) { for (int i = 0; i < GGML_CUDA_MAX_DEVICES; i++) { - ggml_backend_buffer_type_cuda[i] = { - /* .iface = */ cuda_backend_buffer_type_interface, + ggml_backend_cuda_buffer_types[i] = { + /* .iface = */ ggml_backend_cuda_buffer_type_interface, /* .context = */ (ggml_backend_buffer_type_context_t) (intptr_t) i, }; } - ggml_backend_buffer_type_cuda_initialized = true; + ggml_backend_cuda_buffer_type_initialized = true; } - return &ggml_backend_buffer_type_cuda[device]; + return &ggml_backend_cuda_buffer_types[device]; } // host buffer type static void ggml_backend_cuda_host_buffer_free_buffer(ggml_backend_buffer_t buffer) { - ggml_backend_buffer_context_cuda * ctx = (ggml_backend_buffer_context_cuda *)buffer->context; - CUDA_CHECK(cudaFreeHost(ctx->dev_ptr)); - delete ctx; + CUDA_CHECK(cudaFreeHost(buffer->context)); } static ggml_backend_buffer_t ggml_backend_cuda_host_buffer_type_alloc_buffer(ggml_backend_buffer_type_t buft, size_t size) { @@ -9673,24 +9685,21 @@ static ggml_backend_buffer_t ggml_backend_cuda_host_buffer_type_alloc_buffer(ggm buffer->iface.free_buffer = ggml_backend_cuda_host_buffer_free_buffer; return buffer; - - UNUSED(buft); } -struct ggml_backend_buffer_type_i cuda_backend_host_buffer_type_interface = { - /* .alloc_buffer = */ ggml_backend_cuda_host_buffer_type_alloc_buffer, - /* .get_alignment = */ ggml_backend_cpu_buffer_type()->iface.get_alignment, - /* .get_alloc_size = */ ggml_backend_cpu_buffer_type()->iface.get_alloc_size, - /* .supports_backend = */ ggml_backend_cpu_buffer_type()->iface.supports_backend, -}; - ggml_backend_buffer_type_t ggml_backend_cuda_host_buffer_type() { - static struct ggml_backend_buffer_type ggml_backend_buffer_type_cuda_host = { - /* .iface = */ cuda_backend_host_buffer_type_interface, + static struct ggml_backend_buffer_type ggml_backend_cuda_buffer_type_host = { + /* .iface = */ { + /* .alloc_buffer = */ ggml_backend_cuda_host_buffer_type_alloc_buffer, + /* .get_alignment = */ ggml_backend_cpu_buffer_type()->iface.get_alignment, + /* .get_alloc_size = */ ggml_backend_cpu_buffer_type()->iface.get_alloc_size, + /* .supports_backend = */ ggml_backend_cpu_buffer_type()->iface.supports_backend, + /* .is_host = */ ggml_backend_cpu_buffer_type()->iface.is_host, + }, /* .context = */ nullptr, }; - return &ggml_backend_buffer_type_cuda_host; + return &ggml_backend_cuda_buffer_type_host; } // backend @@ -9722,8 +9731,6 @@ static void ggml_backend_cuda_set_tensor_async(ggml_backend_t backend, ggml_tens ggml_backend_context_cuda * cuda_ctx = (ggml_backend_context_cuda *)backend->context; GGML_ASSERT(tensor->buffer->buft == ggml_backend_cuda_buffer_type(cuda_ctx->device) && "unsupported buffer type"); - GGML_ASSERT(offset + size <= ggml_nbytes(tensor) && "tensor write out of bounds"); - GGML_ASSERT(tensor->data != NULL && "tensor not allocated"); GGML_ASSERT(tensor->backend == GGML_BACKEND_GPU); CUDA_CHECK(cudaMemcpyAsync((char *)tensor->data + offset, data, size, cudaMemcpyHostToDevice, g_cudaStreams[cuda_ctx->device][0])); @@ -9733,8 +9740,6 @@ static void ggml_backend_cuda_get_tensor_async(ggml_backend_t backend, const ggm ggml_backend_context_cuda * cuda_ctx = (ggml_backend_context_cuda *)backend->context; GGML_ASSERT(tensor->buffer->buft == ggml_backend_cuda_buffer_type(cuda_ctx->device) && "unsupported buffer type"); - GGML_ASSERT(offset + size <= ggml_nbytes(tensor) && "tensor read out of bounds"); - GGML_ASSERT(tensor->data != NULL && "tensor not allocated"); GGML_ASSERT(tensor->backend == GGML_BACKEND_GPU); CUDA_CHECK(cudaMemcpyAsync(data, (const char *)tensor->data + offset, size, cudaMemcpyDeviceToHost, g_cudaStreams[cuda_ctx->device][0])); diff --git a/ggml-metal.h b/ggml-metal.h index bf52d9cd3..b5e02b668 100644 --- a/ggml-metal.h +++ b/ggml-metal.h @@ -98,7 +98,10 @@ GGML_API ggml_backend_t ggml_backend_metal_init(void); GGML_API bool ggml_backend_is_metal(ggml_backend_t backend); +GGML_API ggml_backend_buffer_t ggml_backend_metal_buffer_from_ptr(void * data, size_t size, size_t max_size); + GGML_API void ggml_backend_metal_set_n_cb(ggml_backend_t backend, int n_cb); + GGML_API ggml_backend_buffer_type_t ggml_backend_metal_buffer_type(void); // helper to check if the device supports a specific family diff --git a/ggml-metal.m b/ggml-metal.m index 465679a6b..e60b93b36 100644 --- a/ggml-metal.m +++ b/ggml-metal.m @@ -180,7 +180,15 @@ struct ggml_metal_context { @implementation GGMLMetalClass @end -ggml_log_callback ggml_metal_log_callback = NULL; + +static void ggml_metal_default_log_callback(enum ggml_log_level level, const char * msg, void * user_data) { + fprintf(stderr, "%s", msg); + + UNUSED(level); + UNUSED(user_data); +} + +ggml_log_callback ggml_metal_log_callback = ggml_metal_default_log_callback; void * ggml_metal_log_user_data = NULL; void ggml_metal_log_set_callback(ggml_log_callback log_callback, void * user_data) { @@ -607,12 +615,24 @@ int * ggml_metal_get_concur_list(struct ggml_metal_context * ctx) { } // temporarily defined here for compatibility between ggml-backend and the old API -struct ggml_backend_metal_buffer_context { - void * data; + +struct ggml_backend_metal_buffer { + void * data; + size_t size; id metal; }; +struct ggml_backend_metal_buffer_context { + void * all_data; + size_t all_size; + bool owned; + + // multiple buffers are used only to avoid the maximum buffer size limitation when using mmap + int n_buffers; + struct ggml_backend_metal_buffer buffers[GGML_METAL_MAX_BUFFERS]; +}; + // finds the Metal buffer that contains the tensor data on the GPU device // the assumption is that there is 1-to-1 mapping between the host and device memory buffers, so we can find the // Metal buffer based on the host memory pointer @@ -622,17 +642,29 @@ static id ggml_metal_get_buffer(struct ggml_metal_context * ctx, stru const int64_t tsize = ggml_nbytes(t); + ggml_backend_buffer_t buffer = t->view_src ? t->view_src->buffer : t->buffer; + // compatibility with ggml-backend - if (t->buffer && t->buffer->buft == ggml_backend_metal_buffer_type()) { - struct ggml_backend_metal_buffer_context * buf_ctx = (struct ggml_backend_metal_buffer_context *) t->buffer->context; + if (buffer && buffer->buft == ggml_backend_metal_buffer_type()) { + struct ggml_backend_metal_buffer_context * buf_ctx = (struct ggml_backend_metal_buffer_context *) buffer->context; - const int64_t ioffs = (int64_t) t->data - (int64_t) buf_ctx->data; + // find the view that contains the tensor fully + for (int i = 0; i < buf_ctx->n_buffers; ++i) { + const int64_t ioffs = (int64_t) t->data - (int64_t) buf_ctx->buffers[i].data; - GGML_ASSERT(ioffs >= 0 && ioffs + tsize <= (int64_t) t->buffer->size); + //GGML_METAL_LOG_INFO("ioffs = %10ld, tsize = %10ld, sum = %10ld, buf_ctx->buffers[%d].size = %10ld\n", ioffs, tsize, ioffs + tsize, i, buf_ctx->buffers[i].size); + if (ioffs >= 0 && ioffs + tsize <= (int64_t) buf_ctx->buffers[i].size) { + *offs = (size_t) ioffs; - *offs = (size_t) ioffs; + //GGML_METAL_LOG_INFO("%s: tensor '%16s', offs = %8ld\n", __func__, t->name, *offs); - return buf_ctx->metal; + return buf_ctx->buffers[i].metal; + } + } + + GGML_METAL_LOG_ERROR("%s: error: tensor '%s' buffer is nil\n", __func__, t->name); + + return nil; } // find the view that contains the tensor fully @@ -2361,6 +2393,7 @@ void ggml_metal_graph_compute( // backend interface +// default buffer static id g_backend_device = nil; static int g_backend_device_ref_count = 0; @@ -2388,34 +2421,31 @@ static void ggml_backend_metal_free_device(void) { static void * ggml_backend_metal_buffer_get_base(ggml_backend_buffer_t buffer) { struct ggml_backend_metal_buffer_context * ctx = (struct ggml_backend_metal_buffer_context *)buffer->context; - return ctx->data; + return ctx->all_data; } static void ggml_backend_metal_buffer_free_buffer(ggml_backend_buffer_t buffer) { struct ggml_backend_metal_buffer_context * ctx = (struct ggml_backend_metal_buffer_context *)buffer->context; - [ctx->metal release]; + for (int i = 0; i < ctx->n_buffers; i++) { + [ctx->buffers[i].metal release]; + } ggml_backend_metal_free_device(); - free(ctx->data); - free(ctx); + if (ctx->owned) { + free(ctx->all_data); + } - UNUSED(buffer); + free(ctx); } static void ggml_backend_metal_buffer_set_tensor(ggml_backend_buffer_t buffer, struct ggml_tensor * tensor, const void * data, size_t offset, size_t size) { - GGML_ASSERT(offset + size <= ggml_nbytes(tensor) && "tensor write out of bounds"); - GGML_ASSERT(tensor->data != NULL && "tensor not allocated"); - memcpy((char *)tensor->data + offset, data, size); UNUSED(buffer); } static void ggml_backend_metal_buffer_get_tensor(ggml_backend_buffer_t buffer, const struct ggml_tensor * tensor, void * data, size_t offset, size_t size) { - GGML_ASSERT(offset + size <= ggml_nbytes(tensor) && "tensor read out of bounds"); - GGML_ASSERT(tensor->data != NULL && "tensor not allocated"); - memcpy(data, (const char *)tensor->data + offset, size); UNUSED(buffer); @@ -2433,7 +2463,13 @@ static void ggml_backend_metal_buffer_cpy_tensor_to(ggml_backend_buffer_t buffer UNUSED(buffer); } -static struct ggml_backend_buffer_i metal_backend_buffer_i = { +static void ggml_backend_metal_buffer_clear(ggml_backend_buffer_t buffer, uint8_t value) { + struct ggml_backend_metal_buffer_context * ctx = (struct ggml_backend_metal_buffer_context *)buffer->context; + + memset(ctx->all_data, value, ctx->all_size); +} + +static struct ggml_backend_buffer_i ggml_backend_metal_buffer_i = { /* .free_buffer = */ ggml_backend_metal_buffer_free_buffer, /* .get_base = */ ggml_backend_metal_buffer_get_base, /* .init_tensor = */ NULL, @@ -2441,8 +2477,11 @@ static struct ggml_backend_buffer_i metal_backend_buffer_i = { /* .get_tensor = */ ggml_backend_metal_buffer_get_tensor, /* .cpy_tensor_from = */ ggml_backend_metal_buffer_cpy_tensor_from, /* .cpy_tensor_to = */ ggml_backend_metal_buffer_cpy_tensor_to, + /* .clear = */ ggml_backend_metal_buffer_clear, }; +// default buffer type + static ggml_backend_buffer_t ggml_backend_metal_buffer_type_alloc_buffer(ggml_backend_buffer_type_t buft, size_t size) { struct ggml_backend_metal_buffer_context * ctx = malloc(sizeof(struct ggml_backend_metal_buffer_context)); @@ -2453,13 +2492,46 @@ static ggml_backend_buffer_t ggml_backend_metal_buffer_type_alloc_buffer(ggml_ba size_aligned += (size_page - (size_aligned % size_page)); } - ctx->data = ggml_metal_host_malloc(size); - ctx->metal = [ggml_backend_metal_get_device() newBufferWithBytesNoCopy:ctx->data + id device = ggml_backend_metal_get_device(); + + ctx->all_data = ggml_metal_host_malloc(size_aligned); + ctx->all_size = size_aligned; + ctx->owned = true; + ctx->n_buffers = 1; + + ctx->buffers[0].data = ctx->all_data; + ctx->buffers[0].size = size; + ctx->buffers[0].metal = [device newBufferWithBytesNoCopy:ctx->all_data length:size_aligned options:MTLResourceStorageModeShared deallocator:nil]; - return ggml_backend_buffer_init(buft, metal_backend_buffer_i, ctx, size); + if (ctx->buffers[0].metal == nil) { + GGML_METAL_LOG_ERROR("%s: error: failed to allocate buffer, size = %8.2f MiB\n", __func__, size_aligned / 1024.0 / 1024.0); + free(ctx); + ggml_backend_metal_free_device(); + return NULL; + } + + GGML_METAL_LOG_INFO("%s: allocated buffer, size = %8.2f MiB", __func__, size_aligned / 1024.0 / 1024.0); + + +#if TARGET_OS_OSX + GGML_METAL_LOG_INFO(", (%8.2f / %8.2f)", + device.currentAllocatedSize / 1024.0 / 1024.0, + device.recommendedMaxWorkingSetSize / 1024.0 / 1024.0); + + if (device.currentAllocatedSize > device.recommendedMaxWorkingSetSize) { + GGML_METAL_LOG_WARN("%s: warning: current allocated size is greater than the recommended max working set size\n", __func__); + } else { + GGML_METAL_LOG_INFO("\n"); + } +#else + GGML_METAL_LOG_INFO(", (%8.2f)\n", device.currentAllocatedSize / 1024.0 / 1024.0); +#endif + + + return ggml_backend_buffer_init(buft, ggml_backend_metal_buffer_i, ctx, size); } static size_t ggml_backend_metal_buffer_type_get_alignment(ggml_backend_buffer_type_t buft) { @@ -2470,7 +2542,13 @@ static size_t ggml_backend_metal_buffer_type_get_alignment(ggml_backend_buffer_t static bool ggml_backend_metal_buffer_type_supports_backend(ggml_backend_buffer_type_t buft, ggml_backend_t backend) { return ggml_backend_is_metal(backend) || ggml_backend_is_cpu(backend); - GGML_UNUSED(buft); + UNUSED(buft); +} + +static bool ggml_backend_metal_buffer_type_is_host(ggml_backend_buffer_type_t buft) { + return true; + + UNUSED(buft); } ggml_backend_buffer_type_t ggml_backend_metal_buffer_type(void) { @@ -2480,6 +2558,7 @@ ggml_backend_buffer_type_t ggml_backend_metal_buffer_type(void) { /* .get_alignment = */ ggml_backend_metal_buffer_type_get_alignment, /* .get_alloc_size = */ NULL, // defaults to ggml_nbytes /* .supports_backend = */ ggml_backend_metal_buffer_type_supports_backend, + /* .is_host = */ ggml_backend_metal_buffer_type_is_host, }, /* .context = */ NULL, }; @@ -2487,6 +2566,87 @@ ggml_backend_buffer_type_t ggml_backend_metal_buffer_type(void) { return &ggml_backend_buffer_type_metal; } +// buffer from ptr + +ggml_backend_buffer_t ggml_backend_metal_buffer_from_ptr(void * data, size_t size, size_t max_size) { + struct ggml_backend_metal_buffer_context * ctx = malloc(sizeof(struct ggml_backend_metal_buffer_context)); + + ctx->all_data = data; + ctx->all_size = size; + ctx->owned = false; + ctx->n_buffers = 0; + + const size_t size_page = sysconf(_SC_PAGESIZE); + size_t size_aligned = size; + if ((size_aligned % size_page) != 0) { + size_aligned += (size_page - (size_aligned % size_page)); + } + + id device = ggml_backend_metal_get_device(); + + // the buffer fits into the max buffer size allowed by the device + if (size_aligned <= device.maxBufferLength) { + ctx->buffers[ctx->n_buffers].data = data; + ctx->buffers[ctx->n_buffers].size = size; + + ctx->buffers[ctx->n_buffers].metal = [device newBufferWithBytesNoCopy:data length:size_aligned options:MTLResourceStorageModeShared deallocator:nil]; + + if (ctx->buffers[ctx->n_buffers].metal == nil) { + GGML_METAL_LOG_ERROR("%s: error: failed to allocate buffer, size = %8.2f MiB\n", __func__, size_aligned / 1024.0 / 1024.0); + return false; + } + + GGML_METAL_LOG_INFO("%s: allocated buffer, size = %8.2f MiB", __func__, size_aligned / 1024.0 / 1024.0); + + ++ctx->n_buffers; + } else { + // this overlap between the views will guarantee that the tensor with the maximum size will fully fit into + // one of the views + const size_t size_ovlp = ((max_size + size_page - 1) / size_page + 1) * size_page; // round-up 2 pages just in case + const size_t size_step = device.maxBufferLength - size_ovlp; + const size_t size_view = device.maxBufferLength; + + for (size_t i = 0; i < size; i += size_step) { + const size_t size_step_aligned = (i + size_view <= size) ? size_view : (size_aligned - i); + + ctx->buffers[ctx->n_buffers].data = (void *) ((uint8_t *) data + i); + ctx->buffers[ctx->n_buffers].size = size_step_aligned; + + ctx->buffers[ctx->n_buffers].metal = [device newBufferWithBytesNoCopy:(void *) ((uint8_t *) data + i) length:size_step_aligned options:MTLResourceStorageModeShared deallocator:nil]; + + if (ctx->buffers[ctx->n_buffers].metal == nil) { + GGML_METAL_LOG_ERROR("%s: error: failed to allocate buffer, size = %8.2f MiB\n", __func__, size_step_aligned / 1024.0 / 1024.0); + return false; + } + + GGML_METAL_LOG_INFO("%s: allocated buffer, size = %8.2f MiB, offs = %12ld", __func__, size_step_aligned / 1024.0 / 1024.0, i); + if (i + size_step < size) { + GGML_METAL_LOG_INFO("\n"); + } + + ++ctx->n_buffers; + } + } + +#if TARGET_OS_OSX + GGML_METAL_LOG_INFO(", (%8.2f / %8.2f)", + device.currentAllocatedSize / 1024.0 / 1024.0, + device.recommendedMaxWorkingSetSize / 1024.0 / 1024.0); + + if (device.currentAllocatedSize > device.recommendedMaxWorkingSetSize) { + GGML_METAL_LOG_WARN("%s: warning: current allocated size is greater than the recommended max working set size\n", __func__); + } else { + GGML_METAL_LOG_INFO("\n"); + } +#else + GGML_METAL_LOG_INFO(", (%8.2f)\n", device.currentAllocatedSize / 1024.0 / 1024.0); +#endif + + return ggml_backend_buffer_init(ggml_backend_metal_buffer_type(), ggml_backend_metal_buffer_i, ctx, size); +} + +// backend + static const char * ggml_backend_metal_name(ggml_backend_t backend) { return "Metal"; @@ -2499,10 +2659,6 @@ static void ggml_backend_metal_free(ggml_backend_t backend) { free(backend); } -static void ggml_backend_metal_synchronize(ggml_backend_t backend) { - UNUSED(backend); -} - static ggml_backend_buffer_type_t ggml_backend_metal_get_default_buffer_type(ggml_backend_t backend) { return ggml_backend_metal_buffer_type(); @@ -2529,25 +2685,15 @@ static struct ggml_backend_i metal_backend_i = { /* .get_tensor_async = */ NULL, /* .cpy_tensor_from_async = */ NULL, /* .cpy_tensor_to_async = */ NULL, - /* .synchronize = */ ggml_backend_metal_synchronize, - /* .graph_plan_create = */ NULL, // the metal implementation does not require creating graph plans atm + /* .synchronize = */ NULL, + /* .graph_plan_create = */ NULL, /* .graph_plan_free = */ NULL, /* .graph_plan_compute = */ NULL, /* .graph_compute = */ ggml_backend_metal_graph_compute, /* .supports_op = */ ggml_backend_metal_supports_op, }; -// TODO: make a common log callback for all backends in ggml-backend -static void ggml_backend_log_callback(enum ggml_log_level level, const char * msg, void * user_data) { - fprintf(stderr, "%s", msg); - - UNUSED(level); - UNUSED(user_data); -} - ggml_backend_t ggml_backend_metal_init(void) { - ggml_metal_log_set_callback(ggml_backend_log_callback, NULL); - struct ggml_metal_context * ctx = ggml_metal_init(GGML_DEFAULT_N_THREADS); if (ctx == NULL) { diff --git a/ggml.c b/ggml.c index 6da65bd92..236148514 100644 --- a/ggml.c +++ b/ggml.c @@ -2383,20 +2383,8 @@ size_t ggml_get_mem_size(const struct ggml_context * ctx) { size_t ggml_get_max_tensor_size(const struct ggml_context * ctx) { size_t max_size = 0; - struct ggml_object * obj = ctx->objects_begin; - - while (obj != NULL) { - if (obj->type == GGML_OBJECT_TENSOR) { - struct ggml_tensor * tensor = (struct ggml_tensor *) ((char *) ctx->mem_buffer + obj->offs); - - const size_t size = ggml_nbytes(tensor); - - if (max_size < size) { - max_size = size; - } - } - - obj = obj->next; + for (struct ggml_tensor * tensor = ggml_get_first_tensor(ctx); tensor != NULL; tensor = ggml_get_next_tensor(ctx, tensor)) { + max_size = MAX(max_size, ggml_nbytes(tensor)); } return max_size; @@ -3093,7 +3081,7 @@ struct ggml_tensor * ggml_view_tensor( return result; } -struct ggml_tensor * ggml_get_first_tensor(struct ggml_context * ctx) { +struct ggml_tensor * ggml_get_first_tensor(const struct ggml_context * ctx) { struct ggml_object * obj = ctx->objects_begin; char * const mem_buffer = ctx->mem_buffer; @@ -3109,7 +3097,7 @@ struct ggml_tensor * ggml_get_first_tensor(struct ggml_context * ctx) { return NULL; } -struct ggml_tensor * ggml_get_next_tensor(struct ggml_context * ctx, struct ggml_tensor * tensor) { +struct ggml_tensor * ggml_get_next_tensor(const struct ggml_context * ctx, struct ggml_tensor * tensor) { struct ggml_object * obj = (struct ggml_object *) ((char *)tensor - GGML_OBJECT_SIZE); obj = obj->next; @@ -19213,6 +19201,10 @@ char * gguf_get_tensor_name(const struct gguf_context * ctx, int i) { return ctx->infos[i].name.data; } +enum ggml_type gguf_get_tensor_type(const struct gguf_context * ctx, int i) { + return ctx->infos[i].type; +} + // returns the index static int gguf_get_or_add_key(struct gguf_context * ctx, const char * key) { const int idx = gguf_find_key(ctx, key); diff --git a/ggml.h b/ggml.h index beacdc8be..b17314897 100644 --- a/ggml.h +++ b/ggml.h @@ -735,8 +735,8 @@ extern "C" { GGML_API struct ggml_tensor * ggml_view_tensor(struct ggml_context * ctx, struct ggml_tensor * src); // Context tensor enumeration and lookup - GGML_API struct ggml_tensor * ggml_get_first_tensor(struct ggml_context * ctx); - GGML_API struct ggml_tensor * ggml_get_next_tensor (struct ggml_context * ctx, struct ggml_tensor * tensor); + GGML_API struct ggml_tensor * ggml_get_first_tensor(const struct ggml_context * ctx); + GGML_API struct ggml_tensor * ggml_get_next_tensor (const struct ggml_context * ctx, struct ggml_tensor * tensor); GGML_API struct ggml_tensor * ggml_get_tensor(struct ggml_context * ctx, const char * name); GGML_API struct ggml_tensor * ggml_set_zero(struct ggml_tensor * tensor); @@ -2135,10 +2135,11 @@ extern "C" { GGML_API const void * gguf_get_arr_data(const struct gguf_context * ctx, int key_id); GGML_API const char * gguf_get_arr_str (const struct gguf_context * ctx, int key_id, int i); - GGML_API int gguf_get_n_tensors (const struct gguf_context * ctx); - GGML_API int gguf_find_tensor (const struct gguf_context * ctx, const char * name); - GGML_API size_t gguf_get_tensor_offset(const struct gguf_context * ctx, int i); - GGML_API char * gguf_get_tensor_name (const struct gguf_context * ctx, int i); + GGML_API int gguf_get_n_tensors (const struct gguf_context * ctx); + GGML_API int gguf_find_tensor (const struct gguf_context * ctx, const char * name); + GGML_API size_t gguf_get_tensor_offset(const struct gguf_context * ctx, int i); + GGML_API char * gguf_get_tensor_name (const struct gguf_context * ctx, int i); + GGML_API enum ggml_type gguf_get_tensor_type (const struct gguf_context * ctx, int i); // overrides existing values or adds a new one GGML_API void gguf_set_val_u8 (struct gguf_context * ctx, const char * key, uint8_t val); diff --git a/llama.cpp b/llama.cpp index 63ebe581b..ba970ce8d 100644 --- a/llama.cpp +++ b/llama.cpp @@ -1,11 +1,12 @@ #define LLAMA_API_INTERNAL +//#define LLAMA_GGML_BACKEND_CUDA_TEST // for testing only - enables ggml-cuda through ggml-backend, disables partial offloading #include "llama.h" #include "unicode.h" #include "ggml.h" - #include "ggml-alloc.h" +#include "ggml-backend.h" #ifdef GGML_USE_CUBLAS # include "ggml-cuda.h" @@ -32,6 +33,7 @@ #include #if defined(_POSIX_MAPPED_FILES) #include + #include #endif #if defined(_POSIX_MEMLOCK_RANGE) #include @@ -712,38 +714,6 @@ static void ggml_graph_compute_helper(std::vector & buf, ggml_cgraph * // llama helpers // -inline void * llama_host_malloc(size_t n) { -#ifdef GGML_USE_CUBLAS - if (ggml_cublas_loaded()) { - return ggml_cuda_host_malloc(n); - } else { - return malloc(n); - } -#elif GGML_USE_METAL - return ggml_metal_host_malloc(n); -#elif GGML_USE_CPU_HBM - return hbw_malloc(n); -#else - return malloc(n); -#endif -} - -inline void llama_host_free(void * ptr) { -#ifdef GGML_USE_CUBLAS - if (ggml_cublas_loaded()) { - return ggml_cuda_host_free(ptr); - } else { - return free(ptr); - } -#elif GGML_USE_METAL - return ggml_metal_host_free(ptr); -#elif GGML_USE_CPU_HBM - return hbw_free(ptr); -#else - return free(ptr); -#endif -} - #if defined(_WIN32) static std::string llama_format_win_err(DWORD err) { LPSTR buf; @@ -758,40 +728,10 @@ static std::string llama_format_win_err(DWORD err) { } #endif -struct llama_buffer { - void * data = NULL; - size_t size = 0; - - // fallback to malloc / free - // useful in cases where CUDA can try to allocate PINNED memory - bool fallback = false; - - void resize(size_t n) { - llama_host_free(data); - - data = llama_host_malloc(n); - if (!data) { - fallback = true; - data = malloc(n); - } else { - fallback = false; - } - - GGML_ASSERT(data); - size = n; - } - - ~llama_buffer() { - if (data) { - if (fallback) { // NOLINT - free(data); - } else { - llama_host_free(data); - } - } - - data = NULL; - } +template +struct no_init { + T value; + no_init() { /* do nothing */ } }; struct llama_file { @@ -879,6 +819,9 @@ struct llama_mmap { #ifdef _POSIX_MAPPED_FILES static constexpr bool SUPPORTED = true; + // list of mapped fragments (first_offset, last_offset) + std::vector> mapped_fragments; + llama_mmap(struct llama_file * file, size_t prefetch = (size_t) -1 /* -1 = max value */, bool numa = false) { size = file->size; int fd = fileno(file->fp); @@ -886,17 +829,22 @@ struct llama_mmap { // prefetch/readahead impairs performance on NUMA systems if (numa) { prefetch = 0; } #ifdef __linux__ + // advise the kernel to read the file sequentially (increases readahead) + if (posix_fadvise(fd, 0, 0, POSIX_FADV_SEQUENTIAL)) { + LLAMA_LOG_WARN("warning: posix_fadvise(.., POSIX_FADV_SEQUENTIAL) failed: %s\n", + strerror(errno)); + } if (prefetch) { flags |= MAP_POPULATE; } #endif addr = mmap(NULL, file->size, PROT_READ, flags, fd, 0); - if (addr == MAP_FAILED) { + if (addr == MAP_FAILED) { // NOLINT throw std::runtime_error(format("mmap failed: %s", strerror(errno))); } if (prefetch > 0) { - // Advise the kernel to preload the mapped memory + // advise the kernel to preload the mapped memory if (posix_madvise(addr, std::min(file->size, prefetch), POSIX_MADV_WILLNEED)) { - fprintf(stderr, "warning: posix_madvise(.., POSIX_MADV_WILLNEED) failed: %s\n", + LLAMA_LOG_WARN("warning: posix_madvise(.., POSIX_MADV_WILLNEED) failed: %s\n", strerror(errno)); } } @@ -904,14 +852,81 @@ struct llama_mmap { // advise the kernel not to use readahead // (because the next page might not belong on the same node) if (posix_madvise(addr, file->size, POSIX_MADV_RANDOM)) { - fprintf(stderr, "warning: posix_madvise(.., POSIX_MADV_RANDOM) failed: %s\n", + LLAMA_LOG_WARN("warning: posix_madvise(.., POSIX_MADV_RANDOM) failed: %s\n", strerror(errno)); } } + + // initialize list of mapped_fragments + mapped_fragments.emplace_back(0, file->size); + } + + static void align_range(size_t * first, size_t * last, size_t page_size) { + // align first to the next page + size_t offset_in_page = *first & (page_size - 1); + size_t offset_to_page = offset_in_page == 0 ? 0 : page_size - offset_in_page; + *first += offset_to_page; + + // align last to the previous page + *last = *last & ~(page_size - 1); + + if (*last <= *first) { + *last = *first; + } + } + + // partially unmap the file in the range [first, last) + void unmap_fragment(size_t first, size_t last) { + // note: this function must not be called multiple times with overlapping ranges + // otherwise, there is a risk of invalidating addresses that have been repurposed for other mappings + int page_size = sysconf(_SC_PAGESIZE); + align_range(&first, &last, page_size); + size_t len = last - first; + + if (len == 0) { + return; + } + + GGML_ASSERT(first % page_size == 0); + GGML_ASSERT(last % page_size == 0); + GGML_ASSERT(last > first); + + void * next_page_start = (uint8_t *) addr + first; + + // unmap the range + if (munmap(next_page_start, len)) { + LLAMA_LOG_WARN("warning: munmap failed: %s\n", strerror(errno)); + } + + // update the list of mapped fragments to avoid unmapping the same range again in the destructor + std::vector> new_mapped_fragments; + for (const auto & frag : mapped_fragments) { + if (frag.first < first && frag.second > last) { + // the range is in the middle of the fragment, split it + new_mapped_fragments.emplace_back(frag.first, first); + new_mapped_fragments.emplace_back(last, frag.second); + } else if (frag.first < first && frag.second > first) { + // the range starts in the middle of the fragment + new_mapped_fragments.emplace_back(frag.first, first); + } else if (frag.first < last && frag.second > last) { + // the range ends in the middle of the fragment + new_mapped_fragments.emplace_back(last, frag.second); + } else if (frag.first >= first && frag.second <= last) { + // the range covers the entire fragment + } else { + // the range is outside the fragment + new_mapped_fragments.push_back(frag); + } + } + mapped_fragments = std::move(new_mapped_fragments); } ~llama_mmap() { - munmap(addr, size); + for (const auto & frag : mapped_fragments) { + if (munmap((char *) addr + frag.first, frag.second - frag.first)) { + LLAMA_LOG_WARN("warning: munmap failed: %s\n", strerror(errno)); + } + } } #elif defined(_WIN32) static constexpr bool SUPPORTED = true; @@ -959,6 +974,12 @@ struct llama_mmap { } } + void unmap_fragment(size_t first, size_t last) { + // not supported + GGML_UNUSED(first); + GGML_UNUSED(last); + } + ~llama_mmap() { if (!UnmapViewOfFile(addr)) { fprintf(stderr, "warning: UnmapViewOfFile failed: %s\n", @@ -975,6 +996,13 @@ struct llama_mmap { throw std::runtime_error(std::string("mmap not supported")); } + + void unmap(size_t offset, size_t len) { + (void) offset; + (void) len; + + throw std::runtime_error(std::string("mmap not supported")); + } #endif }; @@ -1148,6 +1176,26 @@ static std::string llama_token_to_piece(const struct llama_context * ctx, llama_ return std::string(result.data(), result.size()); } +static ggml_backend_buffer_type_t llama_default_buffer_type(int n_gpu_layers) { +#ifdef GGML_USE_METAL + if (n_gpu_layers > 0) { + return ggml_backend_metal_buffer_type(); + } +#elif defined(GGML_USE_CUBLAS) && defined(LLAMA_GGML_BACKEND_CUDA_TEST) + if (n_gpu_layers > 0) { + return ggml_backend_cuda_buffer_type(0); + } +#elif defined(GGML_USE_CUBLAS) + return ggml_backend_cuda_host_buffer_type(); +#elif defined(GGML_USE_CPU_HBM) + return ggml_backend_cpu_hbm_buffer_type(); +#endif + + return ggml_backend_cpu_buffer_type(); + + GGML_UNUSED(n_gpu_layers); +} + // // globals // @@ -1348,14 +1396,10 @@ struct llama_kv_cache { struct ggml_context * ctx = NULL; - llama_buffer buf; + ggml_backend_buffer_t buf = NULL; ~llama_kv_cache() { - if (ctx) { - ggml_free(ctx); - } - -#ifdef GGML_USE_CUBLAS +#if defined(GGML_USE_CUBLAS) && !defined(LLAMA_GGML_BACKEND_CUDA_TEST) if (ggml_cublas_loaded()) { for (size_t i = 0; i < k_l.size(); ++i) { ggml_cuda_free_data(k_l[i]); @@ -1363,6 +1407,11 @@ struct llama_kv_cache { } } #endif + if (ctx) { + ggml_free(ctx); + } + + ggml_backend_buffer_free(buf); } }; @@ -1402,11 +1451,11 @@ struct llama_vocab { id special_suffix_id = 32008; id special_eot_id = 32010; - int find_bpe_rank(std::string token_left, std::string token_right) const { - GGML_ASSERT(token_left.find(" ") == std::string::npos); - GGML_ASSERT(token_left.find("\n") == std::string::npos); - GGML_ASSERT(token_right.find(" ") == std::string::npos); - GGML_ASSERT(token_right.find("\n") == std::string::npos); + int find_bpe_rank(const std::string & token_left, const std::string & token_right) const { + GGML_ASSERT(token_left.find(' ') == std::string::npos); + GGML_ASSERT(token_left.find('\n') == std::string::npos); + GGML_ASSERT(token_right.find(' ') == std::string::npos); + GGML_ASSERT(token_right.find('\n') == std::string::npos); auto it = bpe_ranks.find(std::make_pair(token_left, token_right)); if (it == bpe_ranks.end()) { @@ -1448,7 +1497,7 @@ struct llama_model { struct ggml_context * ctx = NULL; // the model memory buffer - llama_buffer buf; + ggml_backend_buffer_t buf = NULL; // model memory mapped file std::unique_ptr mapping; @@ -1464,11 +1513,7 @@ struct llama_model { int64_t t_start_us = 0; ~llama_model() { - if (ctx) { - ggml_free(ctx); - } - -#ifdef GGML_USE_CUBLAS +#if defined(GGML_USE_CUBLAS) && !defined(LLAMA_GGML_BACKEND_CUDA_TEST) if (ggml_cublas_loaded()) { for (size_t i = 0; i < tensors_by_name.size(); ++i) { ggml_cuda_free_data(tensors_by_name[i].second); @@ -1482,24 +1527,26 @@ struct llama_model { ggml_cl_free_data(tensors_by_name[i].second); } #endif + if (ctx) { + ggml_free(ctx); + } + + ggml_backend_buffer_free(buf); } }; struct llama_context { llama_context(const llama_model & model) : model(model), t_start_us(model.t_start_us), t_load_us(model.t_load_us) {} ~llama_context() { -#ifdef GGML_USE_METAL - if (ctx_metal) { - ggml_metal_free(ctx_metal); - } -#endif - if (alloc) { - ggml_allocr_free(alloc); - } + ggml_allocr_free(alloc); + ggml_backend_buffer_free(buf_alloc); + ggml_backend_free(backend); } llama_cparams cparams; + ggml_backend_t backend = nullptr; + const llama_model & model; // key + value cache for the self attention @@ -1530,18 +1577,13 @@ struct llama_context { // input embedding (1-dimensional array: [n_embd]) std::vector embedding; - // reusable buffer for `struct ggml_graph_plan.work_data` - std::vector work_buffer; - // memory buffers used to evaluate the model - llama_buffer buf_compute; - - llama_buffer buf_alloc; + std::vector buf_compute_meta; + ggml_backend_buffer_t buf_alloc = NULL; ggml_allocr * alloc = NULL; -#ifdef GGML_USE_METAL - ggml_metal_context * ctx_metal = NULL; -#endif + // temporary buffer for copying data to/from the backend + std::vector> buf_copy; #ifdef GGML_USE_MPI ggml_mpi_context * ctx_mpi = NULL; @@ -1563,9 +1605,6 @@ static bool llama_kv_cache_init( const uint32_t n_embd = hparams.n_embd_gqa(); const uint32_t n_layer = hparams.n_layer; - const int64_t n_mem = n_layer*n_ctx; - const int64_t n_elements = n_embd*n_mem; - cache.has_shift = false; cache.head = 0; @@ -1575,13 +1614,10 @@ static bool llama_kv_cache_init( cache.cells.clear(); cache.cells.resize(n_ctx); - cache.buf.resize(ggml_row_size(ktype, n_elements) + ggml_row_size(vtype, n_elements) + 2u*n_layer*ggml_tensor_overhead()); - memset(cache.buf.data, 0, cache.buf.size); - struct ggml_init_params params; - params.mem_size = cache.buf.size; - params.mem_buffer = cache.buf.data; - params.no_alloc = false; + params.mem_size = 2u*n_layer*ggml_tensor_overhead(); + params.mem_buffer = NULL; + params.no_alloc = true; cache.ctx = ggml_init(params); @@ -1595,9 +1631,7 @@ static bool llama_kv_cache_init( cache.k_l.reserve(n_layer); cache.v_l.reserve(n_layer); - const int i_gpu_start = (int) n_layer - n_gpu_layers; GGML_UNUSED(i_gpu_start); - - GGML_UNUSED(offload); + const int i_gpu_start = (int) n_layer - n_gpu_layers; for (int i = 0; i < (int) n_layer; i++) { ggml_tensor * k = ggml_new_tensor_1d(cache.ctx, ktype, n_embd*n_ctx); @@ -1606,23 +1640,35 @@ static bool llama_kv_cache_init( ggml_format_name(v, "cache_v_l%d", i); cache.k_l.push_back(k); cache.v_l.push_back(v); -#ifdef GGML_USE_CUBLAS +#if defined(GGML_USE_CUBLAS) && !defined(LLAMA_GGML_BACKEND_CUDA_TEST) if (i >= i_gpu_start) { if (offload) { ggml_cuda_assign_buffers_no_scratch(k); - vram_kv_cache += ggml_nbytes(k); ggml_cuda_assign_buffers_no_scratch(v); + vram_kv_cache += ggml_nbytes(k); vram_kv_cache += ggml_nbytes(v); + // HACK: mark tensor as allocated + k->data = v->data = (void *)(uintptr_t)1; } } #endif // GGML_USE_CUBLAS } + // allocate tensors + cache.buf = ggml_backend_alloc_ctx_tensors_from_buft(cache.ctx, llama_default_buffer_type(n_gpu_layers)); + + // buf may be NULL with full offload + if (cache.buf) { + // initialize the buffer to avoid NaNs in the padding + ggml_backend_buffer_clear(cache.buf, 0); + } + if (vram_kv_cache > 0) { LLAMA_LOG_INFO("%s: VRAM kv self = %.2f MB\n", __func__, vram_kv_cache / 1024.0 / 1024.0); } - GGML_UNUSED(n_gpu_layers); + GGML_UNUSED(i_gpu_start); + GGML_UNUSED(offload); return true; } @@ -2073,14 +2119,13 @@ struct llama_model_loader { enum ggml_type type_max = GGML_TYPE_F32; for (int i = 0; i < n_tensors; i++) { - const char * name = gguf_get_tensor_name(ctx_gguf, i); - struct ggml_tensor * meta = ggml_get_tensor(ctx_meta, name); + enum ggml_type type = gguf_get_tensor_type(ctx_gguf, i); - n_type[meta->type]++; + n_type[type]++; - if (n_type_max < n_type[meta->type]) { - n_type_max = n_type[meta->type]; - type_max = meta->type; + if (n_type_max < n_type[type]) { + n_type_max = n_type[type]; + type_max = type; } // LLAMA_LOG_INFO("%s: - tensor %4d: %32s %-8s [ %s ]\n", __func__, i, name, ggml_type_name(meta->type), llama_format_tensor_shape(meta).c_str()); @@ -2221,34 +2266,19 @@ struct llama_model_loader { return gguf_get_tensor_name(ctx_gguf, i); } - struct ggml_tensor * get_tensor_meta(int i) const { - return ggml_get_tensor(ctx_meta, get_tensor_name(i)); + struct ggml_tensor * get_tensor_meta(const char * name) const { + return ggml_get_tensor(ctx_meta, name); } - void calc_sizes(size_t & ctx_size_p, size_t & mmapped_size_p) const { - ctx_size_p = 0; - mmapped_size_p = 0; - - for (int i = 0; i < n_tensors; i++) { - struct ggml_tensor * meta = get_tensor_meta(i); - ctx_size_p += sizeof(struct ggml_tensor) + GGML_OBJECT_SIZE; - (use_mmap ? mmapped_size_p : ctx_size_p) += ggml_nbytes_pad(meta); - } + struct ggml_tensor * get_tensor_meta(int i) const { + return get_tensor_meta(get_tensor_name(i)); } struct ggml_tensor * create_tensor_for(struct ggml_context * ctx, struct ggml_tensor * meta, ggml_backend_type backend) { - if (backend != GGML_BACKEND_CPU) { - ggml_set_no_alloc(ctx, true); - } - struct ggml_tensor * tensor = ggml_dup_tensor(ctx, meta); tensor->backend = backend; // TODO: ggml_set_backend ggml_set_name(tensor, ggml_get_name(meta)); - if (backend != GGML_BACKEND_CPU) { - ggml_set_no_alloc(ctx, use_mmap); - } - n_created++; return tensor; @@ -2306,90 +2336,137 @@ struct llama_model_loader { return gguf_get_data_offset(ctx_gguf) + gguf_get_tensor_offset(ctx_gguf, idx); } + void init_mapping(bool prefetch = true) { + /* + // prefetch only CPU tensors + if (use_mmap) { + size_t size_pref = 0; // prefetch + + for (int i = 0; i < gguf_get_n_tensors(ctx_gguf); i++) { + struct ggml_tensor * cur = ggml_get_tensor(ctx, gguf_get_tensor_name(ctx_gguf, i)); + if (cur->backend == GGML_BACKEND_CPU) { + size_t tensor_end = gguf_get_tensor_offset(ctx_gguf, i) + ggml_nbytes(cur); + size_pref = std::max(size_pref, tensor_end); + } + } + mapping.reset(new llama_mmap(&file, gguf_get_data_offset(ctx_gguf) + size_pref, ggml_is_numa())); + } + */ + // prefetch the whole file - all the data is needed anyway + if (use_mmap) { + mapping.reset(new llama_mmap(&file, prefetch ? -1 : 0, ggml_is_numa())); + } + } + + // for backwards compatibility, does not support ggml-backend void load_data_for(struct ggml_tensor * cur) const { const size_t offs = file_offset(ggml_get_name(cur)); - if (use_mmap) { - cur->data = (uint8_t *) mapping->addr + offs; + if (use_mmap && mapping) { + GGML_ASSERT(cur->data == nullptr); + cur->data = (uint8_t *)mapping->addr + offs; } else { + GGML_ASSERT(cur->data != nullptr); file.seek(offs, SEEK_SET); file.read_raw(cur->data, ggml_nbytes(cur)); } } - void load_all_data(struct ggml_context * ctx, llama_progress_callback progress_callback, void * progress_callback_user_data, llama_mlock * lmlock) { + void load_all_data(struct ggml_context * ctx, llama_progress_callback progress_callback, void * progress_callback_user_data, ggml_backend_buffer_t buf_mmap, llama_mlock * lmlock) const { size_t size_data = 0; - size_t size_lock = 0; - size_t size_pref = 0; // prefetch for (int i = 0; i < gguf_get_n_tensors(ctx_gguf); i++) { struct ggml_tensor * cur = ggml_get_tensor(ctx, gguf_get_tensor_name(ctx_gguf, i)); size_data += ggml_nbytes(cur); - if (cur->backend == GGML_BACKEND_CPU) { - size_pref += ggml_nbytes(cur); - } } - if (use_mmap) { - mapping.reset(new llama_mmap(&file, size_pref, ggml_is_numa())); + if (use_mmap && buf_mmap) { if (lmlock) { lmlock->init(mapping->addr); } } - size_t done_size = 0; +#if (defined(GGML_USE_CUBLAS) && !defined(LLAMA_GGML_BACKEND_CUDA_TEST)) || defined(GGML_USE_CLBLAST) + const bool legacy_offload = true; +#else + const bool legacy_offload = false; +#endif + + std::vector> read_buf; + + size_t size_done = 0; + + size_t mmap_first = -1; + size_t mmap_last = 0; + for (int i = 0; i < gguf_get_n_tensors(ctx_gguf); i++) { struct ggml_tensor * cur = ggml_get_tensor(ctx, gguf_get_tensor_name(ctx_gguf, i)); GGML_ASSERT(cur); // unused tensors should have been caught by load_data already if (progress_callback) { - progress_callback((float) done_size / size_data, progress_callback_user_data); + progress_callback((float) size_done / size_data, progress_callback_user_data); } - // allocate temp buffer if not using mmap - if (!use_mmap && cur->data == NULL) { - GGML_ASSERT(cur->backend != GGML_BACKEND_CPU); - #ifdef GGML_USE_CPU_HBM - cur->data = (uint8_t*)hbw_malloc(ggml_nbytes(cur)); - #else - cur->data = (uint8_t*)malloc(ggml_nbytes(cur)); - #endif - } + const size_t offs = file_offset(ggml_get_name(cur)); - load_data_for(cur); - - switch (cur->backend) { - case GGML_BACKEND_CPU: - if (use_mmap && lmlock) { - size_lock += ggml_nbytes(cur); - lmlock->grow_to(size_lock); + if (!legacy_offload || cur->backend == GGML_BACKEND_CPU) { + if (use_mmap && mapping) { + if (buf_mmap) { + ggml_backend_tensor_alloc(buf_mmap, cur, (uint8_t *) mapping->addr + offs); + if (lmlock) { + lmlock->grow_to(offs + ggml_nbytes(cur)); + } + mmap_first = std::min(mmap_first, offs); + mmap_last = std::max(mmap_last, offs + ggml_nbytes(cur)); + } else { + ggml_backend_tensor_set(cur, (uint8_t *) mapping->addr + offs, 0, ggml_nbytes(cur)); } - break; -#ifdef GGML_USE_CUBLAS - case GGML_BACKEND_GPU: - case GGML_BACKEND_GPU_SPLIT: - // old code: - //ggml_cuda_transform_tensor(lt.data, lt.ggml_tensor); - - // TODO: test if this works !! - ggml_cuda_transform_tensor(cur->data, cur); - if (!use_mmap) { - free(cur->data); + } else { + if (ggml_backend_buffer_is_host(cur->buffer)) { + file.seek(offs, SEEK_SET); + file.read_raw(cur->data, ggml_nbytes(cur)); + } else { + read_buf.resize(ggml_nbytes(cur)); + file.seek(offs, SEEK_SET); + file.read_raw(read_buf.data(), ggml_nbytes(cur)); + ggml_backend_tensor_set(cur, read_buf.data(), 0, ggml_nbytes(cur)); } - break; + } + } else { + // HACK: mark tensor as allocated + cur->data = (void *)(uintptr_t)1; + void * data; + if (use_mmap && mapping) { + data = (uint8_t *) mapping->addr + offs; + } else { + read_buf.resize(ggml_nbytes(cur)); + file.seek(offs, SEEK_SET); + file.read_raw(read_buf.data(), ggml_nbytes(cur)); + data = read_buf.data(); + } + +#if defined(GGML_USE_CUBLAS) && !defined(LLAMA_GGML_BACKEND_CUDA_TEST) + ggml_cuda_transform_tensor(data, cur); #elif defined(GGML_USE_CLBLAST) - case GGML_BACKEND_GPU: - ggml_cl_transform_tensor(cur->data, cur); - if (!use_mmap) { - free(cur->data); - } - break; + GGML_ASSERT(cur->backend == GGML_BACKEND_GPU); + ggml_cl_transform_tensor(data, cur); +#else + GGML_ASSERT(!"GPU tensor without a GPU backend"); + GGML_UNUSED(data); #endif - default: - continue; } - done_size += ggml_nbytes(cur); + size_done += ggml_nbytes(cur); + } + + // unmap offloaded tensors and metadata + if (use_mmap && mapping) { + mapping->unmap_fragment(0, mmap_first); + mapping->unmap_fragment(mmap_last, mapping->size); + } + + if (progress_callback) { + progress_callback(1.0f, progress_callback_user_data); } } }; @@ -2983,25 +3060,16 @@ static void llm_load_tensors( model.n_gpu_layers = n_gpu_layers; - size_t ctx_size; - size_t mmapped_size; + size_t ctx_size = ggml_tensor_overhead() * ml.n_tensors; - ml.calc_sizes(ctx_size, mmapped_size); - - LLAMA_LOG_INFO("%s: ggml ctx size = %7.2f MiB\n", __func__, ctx_size/1024.0/1024.0); + LLAMA_LOG_INFO("%s: ggml ctx size = %7.2f MiB\n", __func__, ctx_size/1024.0/1024.0); // create the ggml context { - model.buf.resize(ctx_size); - if (use_mlock) { - model.mlock_buf.init (model.buf.data); - model.mlock_buf.grow_to(model.buf.size); - } - struct ggml_init_params params = { - /*.mem_size =*/ model.buf.size, - /*.mem_buffer =*/ model.buf.data, - /*.no_alloc =*/ ml.use_mmap, + /*.mem_size =*/ ctx_size, + /*.mem_buffer =*/ NULL, + /*.no_alloc =*/ true, }; model.ctx = ggml_init(params); @@ -3015,22 +3083,21 @@ static void llm_load_tensors( enum ggml_backend_type llama_backend_offload = GGML_BACKEND_CPU; enum ggml_backend_type llama_backend_offload_split = GGML_BACKEND_CPU; -#ifdef GGML_USE_CUBLAS +#if defined(GGML_USE_CUBLAS) && !defined(LLAMA_GGML_BACKEND_CUDA_TEST) if (ggml_cublas_loaded()) { LLAMA_LOG_INFO("%s: using " GGML_CUDA_NAME " for GPU acceleration\n", __func__); ggml_cuda_set_main_device(main_gpu); - llama_backend_offload = GGML_BACKEND_GPU; + llama_backend_offload = GGML_BACKEND_GPU; llama_backend_offload_split = GGML_BACKEND_GPU_SPLIT; } #elif defined(GGML_USE_CLBLAST) LLAMA_LOG_INFO("%s: using OpenCL for GPU acceleration\n", __func__); - llama_backend_offload = GGML_BACKEND_GPU; + llama_backend_offload = GGML_BACKEND_GPU; llama_backend_offload_split = GGML_BACKEND_GPU; #endif - // prepare memory for the weights - size_t vram_weights = 0; + // create tensors for the weights { const int64_t n_embd = hparams.n_embd; const int64_t n_embd_gqa = hparams.n_embd_gqa(); @@ -3059,13 +3126,6 @@ static void llm_load_tensors( model.output_norm = ml.create_tensor(ctx, tn(LLM_TENSOR_OUTPUT_NORM, "weight"), {n_embd}, backend_norm); model.output = ml.create_tensor(ctx, tn(LLM_TENSOR_OUTPUT, "weight"), {n_embd, n_vocab}, backend_output); - - if (backend_norm == GGML_BACKEND_GPU) { - vram_weights += ggml_nbytes(model.output_norm); - } - if (backend_output == GGML_BACKEND_GPU_SPLIT) { - vram_weights += ggml_nbytes(model.output); - } } const uint32_t n_ff = hparams.n_ff; @@ -3115,28 +3175,6 @@ static void llm_load_tensors( layer.ffn_up_exp[x] = ml.create_tensor(ctx, tn(LLM_TENSOR_FFN_UP_EXP, "weight", i, x), {n_embd, n_ff}, backend_split); } } - - if (backend == GGML_BACKEND_GPU) { - vram_weights += - ggml_nbytes(layer.attn_norm) + ggml_nbytes(layer.wq) + ggml_nbytes(layer.wk) + - ggml_nbytes(layer.wv) + ggml_nbytes(layer.wo) + - (layer.bq ? ggml_nbytes(layer.bq) : 0) + - (layer.bk ? ggml_nbytes(layer.bk) : 0) + - (layer.bv ? ggml_nbytes(layer.bv) : 0) + - (layer.bo ? ggml_nbytes(layer.bo) : 0) + - ggml_nbytes(layer.ffn_norm); - - if (layer.ffn_gate_inp == nullptr) { - vram_weights += - ggml_nbytes(layer.ffn_gate) + ggml_nbytes(layer.ffn_down) + ggml_nbytes(layer.ffn_up); - } else { - vram_weights += ggml_nbytes(layer.ffn_gate_inp); - for (uint32_t x = 0; x < hparams.n_expert; ++x) { - vram_weights += - ggml_nbytes(layer.ffn_gate_exp[x]) + ggml_nbytes(layer.ffn_down_exp[x]) + ggml_nbytes(layer.ffn_up_exp[x]); - } - } - } } } break; case LLM_ARCH_BAICHUAN: @@ -3156,13 +3194,6 @@ static void llm_load_tensors( model.output_norm = ml.create_tensor(ctx, tn(LLM_TENSOR_OUTPUT_NORM, "weight"), {n_embd}, backend_norm); model.output = ml.create_tensor(ctx, tn(LLM_TENSOR_OUTPUT, "weight"), {n_embd, n_vocab}, backend_output); - - if (backend_norm == GGML_BACKEND_GPU) { - vram_weights += ggml_nbytes(model.output_norm); - } - if (backend_output == GGML_BACKEND_GPU_SPLIT) { - vram_weights += ggml_nbytes(model.output); - } } const uint32_t n_ff = hparams.n_ff; @@ -3189,19 +3220,10 @@ static void llm_load_tensors( layer.ffn_gate = ml.create_tensor(ctx, tn(LLM_TENSOR_FFN_GATE, "weight", i), {n_embd, n_ff}, backend_split); layer.ffn_down = ml.create_tensor(ctx, tn(LLM_TENSOR_FFN_DOWN, "weight", i), { n_ff, n_embd}, backend_split); layer.ffn_up = ml.create_tensor(ctx, tn(LLM_TENSOR_FFN_UP, "weight", i), {n_embd, n_ff}, backend_split); - - if (backend == GGML_BACKEND_GPU) { - vram_weights += - ggml_nbytes(layer.attn_norm) + ggml_nbytes(layer.wq) + ggml_nbytes(layer.wk) + - ggml_nbytes(layer.wv) + ggml_nbytes(layer.wo) + ggml_nbytes(layer.ffn_norm) + - ggml_nbytes(layer.ffn_gate) + ggml_nbytes(layer.ffn_down) + ggml_nbytes(layer.ffn_up); - } } } break; case LLM_ARCH_FALCON: { - // TODO: CPU-only for now - model.tok_embd = ml.create_tensor(ctx, tn(LLM_TENSOR_TOKEN_EMBD, "weight"), {n_embd, n_vocab}, GGML_BACKEND_CPU); // output @@ -3220,14 +3242,6 @@ static void llm_load_tensors( model.output_norm = ml.create_tensor(ctx, tn(LLM_TENSOR_OUTPUT_NORM, "weight"), {n_embd}, backend_norm); model.output_norm_b = ml.create_tensor(ctx, tn(LLM_TENSOR_OUTPUT_NORM, "bias"), {n_embd}, backend_norm); model.output = ml.create_tensor(ctx, tn(LLM_TENSOR_OUTPUT, "weight"), {n_embd, n_vocab}, backend_output); - - if (backend_norm == GGML_BACKEND_GPU) { - vram_weights += ggml_nbytes(model.output_norm); - vram_weights += ggml_nbytes(model.output_norm_b); - } - if (backend_output == GGML_BACKEND_GPU_SPLIT) { - vram_weights += ggml_nbytes(model.output); - } } const uint32_t n_ff = hparams.n_ff; @@ -3248,11 +3262,6 @@ static void llm_load_tensors( if (gguf_find_tensor(ml.ctx_gguf, tn(LLM_TENSOR_ATTN_NORM_2, "weight", i).c_str()) >= 0) { layer.attn_norm_2 = ml.create_tensor(ctx, tn(LLM_TENSOR_ATTN_NORM_2, "weight", i), {n_embd}, backend); layer.attn_norm_2_b = ml.create_tensor(ctx, tn(LLM_TENSOR_ATTN_NORM_2, "bias", i), {n_embd}, backend); - - if (backend == GGML_BACKEND_GPU) { - vram_weights += ggml_nbytes(layer.attn_norm_2); - vram_weights += ggml_nbytes(layer.attn_norm_2_b); - } } layer.wqkv = ml.create_tensor(ctx, tn(LLM_TENSOR_ATTN_QKV, "weight", i), {n_embd, n_embd + 2*n_embd_gqa}, backend_split); @@ -3260,13 +3269,6 @@ static void llm_load_tensors( layer.ffn_down = ml.create_tensor(ctx, tn(LLM_TENSOR_FFN_DOWN, "weight", i), { n_ff, n_embd}, backend_split); layer.ffn_up = ml.create_tensor(ctx, tn(LLM_TENSOR_FFN_UP, "weight", i), {n_embd, n_ff}, backend_split); - - if (backend == GGML_BACKEND_GPU) { - vram_weights += - ggml_nbytes(layer.attn_norm) + ggml_nbytes(layer.attn_norm_b) + - ggml_nbytes(layer.wqkv) + ggml_nbytes(layer.wo) + - ggml_nbytes(layer.ffn_down) + ggml_nbytes(layer.ffn_up); - } } } break; case LLM_ARCH_STARCODER: @@ -3290,14 +3292,6 @@ static void llm_load_tensors( model.output_norm = ml.create_tensor(ctx, tn(LLM_TENSOR_OUTPUT_NORM, "weight"), {n_embd}, backend_norm); model.output_norm_b = ml.create_tensor(ctx, tn(LLM_TENSOR_OUTPUT_NORM, "bias"), {n_embd}, backend_norm); model.output = ml.create_tensor(ctx, tn(LLM_TENSOR_OUTPUT, "weight"), {n_embd, n_vocab}, backend_output); - - if (backend_norm == GGML_BACKEND_GPU) { - vram_weights += ggml_nbytes(model.output_norm); - vram_weights += ggml_nbytes(model.output_norm_b); - } - if (backend_output == GGML_BACKEND_GPU_SPLIT) { - vram_weights += ggml_nbytes(model.output); - } } const uint32_t n_ff = hparams.n_ff; @@ -3329,16 +3323,6 @@ static void llm_load_tensors( layer.ffn_up = ml.create_tensor(ctx, tn(LLM_TENSOR_FFN_UP, "weight", i), {n_embd, n_ff}, backend_split); layer.ffn_up_b = ml.create_tensor(ctx, tn(LLM_TENSOR_FFN_UP, "bias", i), {n_ff}, backend); - - if (backend == GGML_BACKEND_GPU) { - vram_weights += - ggml_nbytes(layer.attn_norm) + ggml_nbytes(layer.attn_norm_b) + - ggml_nbytes(layer.wqkv) + ggml_nbytes(layer.bqkv) + - ggml_nbytes(layer.wo) + ggml_nbytes(layer.bo) + - ggml_nbytes(layer.ffn_norm) + ggml_nbytes(layer.ffn_norm_b) + - ggml_nbytes(layer.ffn_down) + ggml_nbytes(layer.ffn_down_b) + - ggml_nbytes(layer.ffn_up) + ggml_nbytes(layer.ffn_up_b); - } } } break; case LLM_ARCH_PERSIMMON: @@ -3360,14 +3344,6 @@ static void llm_load_tensors( model.output_norm = ml.create_tensor(ctx, tn(LLM_TENSOR_OUTPUT_NORM, "weight"), {n_embd}, backend_norm); model.output_norm_b = ml.create_tensor(ctx, tn(LLM_TENSOR_OUTPUT_NORM, "bias"), {n_embd}, backend_norm); model.output = ml.create_tensor(ctx, tn(LLM_TENSOR_OUTPUT, "weight"), {n_embd, n_vocab}, backend_output); - - if (backend_norm == GGML_BACKEND_GPU) { - vram_weights += ggml_nbytes(model.output_norm); - vram_weights += ggml_nbytes(model.output_norm_b); - } - if (backend_output == GGML_BACKEND_GPU_SPLIT) { - vram_weights += ggml_nbytes(model.output); - } } const uint32_t n_ff = hparams.n_ff; @@ -3397,8 +3373,6 @@ static void llm_load_tensors( } break; case LLM_ARCH_BLOOM: { - // TODO: CPU-only for now - model.tok_embd = ml.create_tensor(ctx, tn(LLM_TENSOR_TOKEN_EMBD, "weight"), {n_embd, n_vocab}, GGML_BACKEND_CPU); model.tok_norm = ml.create_tensor(ctx, tn(LLM_TENSOR_TOKEN_EMBD_NORM, "weight"), {n_embd}, GGML_BACKEND_CPU); model.tok_norm_b = ml.create_tensor(ctx, tn(LLM_TENSOR_TOKEN_EMBD_NORM, "bias"), {n_embd}, GGML_BACKEND_CPU); @@ -3419,14 +3393,6 @@ static void llm_load_tensors( model.output_norm = ml.create_tensor(ctx, tn(LLM_TENSOR_OUTPUT_NORM, "weight"), {n_embd}, backend_norm); model.output_norm_b = ml.create_tensor(ctx, tn(LLM_TENSOR_OUTPUT_NORM, "bias"), {n_embd}, backend_norm); model.output = ml.create_tensor(ctx, tn(LLM_TENSOR_OUTPUT, "weight"), {n_embd, n_vocab}, backend_output); - - if (backend_norm == GGML_BACKEND_GPU) { - vram_weights += ggml_nbytes(model.output_norm); - vram_weights += ggml_nbytes(model.output_norm_b); - } - if (backend_output == GGML_BACKEND_GPU_SPLIT) { - vram_weights += ggml_nbytes(model.output); - } } const uint32_t n_ff = hparams.n_ff; @@ -3458,16 +3424,6 @@ static void llm_load_tensors( layer.ffn_up = ml.create_tensor(ctx, tn(LLM_TENSOR_FFN_UP, "weight", i), {n_embd, n_ff}, backend_split); layer.ffn_up_b = ml.create_tensor(ctx, tn(LLM_TENSOR_FFN_UP, "bias", i), {n_ff}, backend); - - if (backend == GGML_BACKEND_GPU) { - vram_weights += - ggml_nbytes(layer.attn_norm) + ggml_nbytes(layer.attn_norm_b) + - ggml_nbytes(layer.wqkv) + ggml_nbytes(layer.bqkv) + - ggml_nbytes(layer.wo) + ggml_nbytes(layer.bo) + - ggml_nbytes(layer.ffn_norm) + ggml_nbytes(layer.ffn_norm_b) + - ggml_nbytes(layer.ffn_up) + ggml_nbytes(layer.ffn_up_b) + - ggml_nbytes(layer.ffn_down) + ggml_nbytes(layer.ffn_down_b); - } } } break; case LLM_ARCH_MPT: @@ -3489,13 +3445,6 @@ static void llm_load_tensors( model.output_norm = ml.create_tensor(ctx, tn(LLM_TENSOR_OUTPUT_NORM, "weight"), {n_embd}, backend_norm); model.output = ml.create_tensor(ctx, tn(LLM_TENSOR_OUTPUT, "weight"), {n_embd, n_vocab}, backend_output); - - if (backend_norm == GGML_BACKEND_GPU) { - vram_weights += ggml_nbytes(model.output_norm); - } - if (backend_output == GGML_BACKEND_GPU_SPLIT) { - vram_weights += ggml_nbytes(model.output); - } } const uint32_t n_ff = hparams.n_ff; @@ -3518,16 +3467,6 @@ static void llm_load_tensors( layer.ffn_down = ml.create_tensor(ctx, tn(LLM_TENSOR_FFN_DOWN, "weight", i), { n_ff, n_embd}, backend_split); layer.ffn_up = ml.create_tensor(ctx, tn(LLM_TENSOR_FFN_UP, "weight", i), {n_embd, n_ff}, backend_split); - - if (backend == GGML_BACKEND_GPU) { - vram_weights += - ggml_nbytes(layer.attn_norm) + - ggml_nbytes(layer.wqkv) + - ggml_nbytes(layer.wo) + - ggml_nbytes(layer.ffn_norm) + - ggml_nbytes(layer.ffn_down) + - ggml_nbytes(layer.ffn_up); - } } } break; case LLM_ARCH_STABLELM: @@ -3550,13 +3489,6 @@ static void llm_load_tensors( model.output_norm_b = ml.create_tensor(ctx, tn(LLM_TENSOR_OUTPUT_NORM, "bias"), {n_embd}, backend_norm); model.output_norm = ml.create_tensor(ctx, tn(LLM_TENSOR_OUTPUT_NORM, "weight"), {n_embd}, backend_norm); model.output = ml.create_tensor(ctx, tn(LLM_TENSOR_OUTPUT, "weight"), {n_embd, n_vocab}, backend_output); - - if (backend_norm == GGML_BACKEND_GPU) { - vram_weights += ggml_nbytes(model.output_norm); - } - if (backend_output == GGML_BACKEND_GPU_SPLIT) { - vram_weights += ggml_nbytes(model.output); - } } const uint32_t n_ff = hparams.n_ff; @@ -3588,13 +3520,6 @@ static void llm_load_tensors( layer.ffn_gate = ml.create_tensor(ctx, tn(LLM_TENSOR_FFN_GATE, "weight", i), {n_embd, n_ff}, backend_split); layer.ffn_down = ml.create_tensor(ctx, tn(LLM_TENSOR_FFN_DOWN, "weight", i), { n_ff, n_embd}, backend_split); layer.ffn_up = ml.create_tensor(ctx, tn(LLM_TENSOR_FFN_UP, "weight", i), {n_embd, n_ff}, backend_split); - - if (backend == GGML_BACKEND_GPU) { - vram_weights += - ggml_nbytes(layer.attn_norm) + ggml_nbytes(layer.wq) + ggml_nbytes(layer.wk) + - ggml_nbytes(layer.wv) + ggml_nbytes(layer.wo) + ggml_nbytes(layer.ffn_norm) + - ggml_nbytes(layer.ffn_gate) + ggml_nbytes(layer.ffn_down) + ggml_nbytes(layer.ffn_up); - } } } break; case LLM_ARCH_QWEN: @@ -3614,14 +3539,7 @@ static void llm_load_tensors( model.output_norm = ml.create_tensor(ctx, tn(LLM_TENSOR_OUTPUT_NORM, "weight"), {n_embd}, backend_norm); model.output = ml.create_tensor(ctx, tn(LLM_TENSOR_OUTPUT, "weight"), {n_embd, n_vocab}, backend_output); - - if (backend_norm == GGML_BACKEND_GPU) { - vram_weights += ggml_nbytes(model.output_norm); - } - if (backend_output == GGML_BACKEND_GPU_SPLIT) { - vram_weights += ggml_nbytes(model.output); - } - } + } const uint32_t n_ff = hparams.n_ff / 2; @@ -3646,13 +3564,6 @@ static void llm_load_tensors( layer.ffn_gate = ml.create_tensor(ctx, tn(LLM_TENSOR_FFN_GATE, "weight", i), {n_embd, n_ff}, backend_split); layer.ffn_down = ml.create_tensor(ctx, tn(LLM_TENSOR_FFN_DOWN, "weight", i), { n_ff, n_embd}, backend_split); layer.ffn_up = ml.create_tensor(ctx, tn(LLM_TENSOR_FFN_UP, "weight", i), {n_embd, n_ff}, backend_split); - - if (backend == GGML_BACKEND_GPU) { - vram_weights += - ggml_nbytes(layer.attn_norm) + ggml_nbytes(layer.wqkv) + ggml_nbytes(layer.bqkv) + - ggml_nbytes(layer.wo) + ggml_nbytes(layer.ffn_norm) + ggml_nbytes(layer.ffn_gate) + - ggml_nbytes(layer.ffn_down) + ggml_nbytes(layer.ffn_up); - } } } break; case LLM_ARCH_PHI2: @@ -3676,13 +3587,6 @@ static void llm_load_tensors( model.output_norm_b = ml.create_tensor(ctx, tn(LLM_TENSOR_OUTPUT_NORM, "bias"), {n_embd}, backend_norm); model.output = ml.create_tensor(ctx, tn(LLM_TENSOR_OUTPUT, "weight"), {n_embd, n_vocab}, backend_output); model.output_b = ml.create_tensor(ctx, tn(LLM_TENSOR_OUTPUT, "bias"), {n_vocab}, backend_output); - - if (backend_norm == GGML_BACKEND_GPU) { - vram_weights += ggml_nbytes(model.output_norm); - vram_weights += ggml_nbytes(model.output_norm_b); - vram_weights += ggml_nbytes(model.output); - vram_weights += ggml_nbytes(model.output_b); - } } const uint32_t n_ff = hparams.n_ff; @@ -3711,15 +3615,6 @@ static void llm_load_tensors( layer.ffn_up = ml.create_tensor(ctx, tn(LLM_TENSOR_FFN_UP, "weight", i), {n_embd, n_ff}, backend_split); layer.ffn_up_b = ml.create_tensor(ctx, tn(LLM_TENSOR_FFN_UP, "bias", i), {n_ff}, backend); - - if (backend == GGML_BACKEND_GPU) { - vram_weights += - ggml_nbytes(layer.attn_norm) + ggml_nbytes(layer.attn_norm_b) + - ggml_nbytes(layer.wqkv) + ggml_nbytes(layer.bqkv) + - ggml_nbytes(layer.wo) + ggml_nbytes(layer.bo) + - ggml_nbytes(layer.ffn_up) + ggml_nbytes(layer.ffn_up_b) + - ggml_nbytes(layer.ffn_down) + ggml_nbytes(layer.ffn_down_b); - } } } break; default: @@ -3729,16 +3624,78 @@ static void llm_load_tensors( ml.done_getting_tensors(); + ml.init_mapping(); + + // allocate tensors + size_t vram_weights = 0; + size_t buf_size = 0; + + ggml_backend_buffer_type_t buft = llama_default_buffer_type(n_gpu_layers); + + for (struct ggml_tensor * t = ggml_get_first_tensor(ctx); t != nullptr; t = ggml_get_next_tensor(ctx, t)) { + // GGML_BACKEND_GPU tensors are for CUDA and OpenCL only, which are handled separately without ggml-backend + if (t->backend == GGML_BACKEND_CPU) { + buf_size += GGML_PAD(ggml_backend_buft_get_alloc_size(buft, t), ggml_backend_buft_get_alignment(buft)); + } else { + vram_weights += ggml_nbytes(t); + } + } + + // create backend buffer + ggml_backend_buffer_t buf_mmap = nullptr; + +#ifdef GGML_USE_METAL + if (n_gpu_layers > 0) { + if (ml.use_mmap) { + const size_t max_size = ggml_get_max_tensor_size(ctx); + model.buf = ggml_backend_metal_buffer_from_ptr(ml.mapping->addr, ml.mapping->size, max_size); + buf_mmap = model.buf; + } else { + model.buf = ggml_backend_alloc_ctx_tensors_from_buft(ctx, ggml_backend_metal_buffer_type()); + } + } +#elif defined(GGML_USE_CUBLAS) && defined(LLAMA_GGML_BACKEND_CUDA_TEST) + // for testing only + if (n_gpu_layers > 0) { + model.buf = ggml_backend_alloc_ctx_tensors_from_buft(ctx, ggml_backend_cuda_buffer_type(0)); + } +#endif + + if (model.buf == nullptr) { + // CPU backend, and indirectly CUDA and OpenCL + if (ml.use_mmap) { + model.buf = ggml_backend_cpu_buffer_from_ptr(ml.mapping->addr, ml.mapping->size); + buf_mmap = model.buf; + } else { + // allocate only CPU tensors + model.buf = ggml_backend_buft_alloc_buffer(buft, buf_size); + ggml_tallocr_t alloc = ggml_tallocr_new_from_buffer(model.buf); + for (struct ggml_tensor * t = ggml_get_first_tensor(ctx); t != nullptr; t = ggml_get_next_tensor(ctx, t)) { + if (t->backend == GGML_BACKEND_CPU) { + ggml_tallocr_alloc(alloc, t); + } + } + ggml_tallocr_free(alloc); + } + } + + if (use_mlock && ggml_backend_buffer_is_host(model.buf)) { + model.mlock_buf.init (ggml_backend_buffer_get_base(model.buf)); + model.mlock_buf.grow_to(ggml_backend_buffer_get_size(model.buf)); + } + // print memory requirements { - // this is the total memory required to run the inference - size_t mem_required = - ctx_size + - mmapped_size - vram_weights; // weights in VRAM not in memory + size_t sys_mem_required = ctx_size + buf_size; - LLAMA_LOG_INFO("%s: mem required = %7.2f MiB\n", __func__, mem_required / 1024.0 / 1024.0); + if (sys_mem_required > 0) { + LLAMA_LOG_INFO("%s: system memory used = %7.2f MiB\n", __func__, sys_mem_required / 1024.0 / 1024.0); + } + if (vram_weights > 0) { + LLAMA_LOG_INFO("%s: VRAM used = %7.2f MiB\n", __func__, vram_weights / 1024.0 / 1024.0); + } -#if defined(GGML_USE_CUBLAS) || defined(GGML_USE_CLBLAST) +#if (defined(GGML_USE_CUBLAS) && !defined(LLAMA_GGML_BACKEND_CUDA_TEST)) || defined(GGML_USE_CLBLAST) const int n_gpu = std::min(n_gpu_layers, int(hparams.n_layer)); LLAMA_LOG_INFO("%s: offloading %d repeating layers to GPU\n", __func__, n_gpu); @@ -3746,39 +3703,26 @@ static void llm_load_tensors( LLAMA_LOG_INFO("%s: offloading non-repeating layers to GPU\n", __func__); } -#ifdef GGML_USE_CUBLAS const int max_backend_supported_layers = hparams.n_layer + 1; const int max_offloadable_layers = hparams.n_layer + 1; -#elif GGML_USE_CLBLAST - const int max_backend_supported_layers = hparams.n_layer + 1; - const int max_offloadable_layers = hparams.n_layer + 1; -#endif // GGML_USE_CUBLAS LLAMA_LOG_INFO("%s: offloaded %d/%d layers to GPU\n", __func__, std::min(n_gpu_layers, max_offloadable_layers), max_backend_supported_layers); - LLAMA_LOG_INFO("%s: VRAM used: %.2f MiB\n", __func__, vram_weights / 1024.0 / 1024.0); -#else - (void) n_gpu_layers; #endif // defined(GGML_USE_CUBLAS) || defined(GGML_USE_CLBLAST) } - // populate `tensors_by_name` +#if defined(GGML_USE_CUBLAS) && !defined(LLAMA_GGML_BACKEND_CUDA_TEST) + ggml_cuda_set_tensor_split(tensor_split); +#else + GGML_UNUSED(tensor_split); +#endif // GGML_USE_CUBLAS + + // populate tensors_by_name for (int i = 0; i < ml.n_tensors; ++i) { struct ggml_tensor * cur = ggml_get_tensor(ctx, ml.get_tensor_name(i)); model.tensors_by_name.emplace_back(ggml_get_name(cur), cur); } - (void) tensor_split; -#ifdef GGML_USE_CUBLAS - { - ggml_cuda_set_tensor_split(tensor_split); - } -#endif - - ml.load_all_data(ctx, progress_callback, progress_callback_user_data, use_mlock ? &model.mlock_mmap : NULL); - - if (progress_callback) { - progress_callback(1.0f, progress_callback_user_data); - } + ml.load_all_data(ctx, progress_callback, progress_callback_user_data, buf_mmap, use_mlock ? &model.mlock_mmap : NULL); model.mapping = std::move(ml.mapping); @@ -4211,7 +4155,7 @@ struct llm_build_context { const llm_build_cb & cb; - llama_buffer & buf_compute; + std::vector & buf_compute_meta; struct ggml_context * ctx0 = nullptr; @@ -4221,35 +4165,35 @@ struct llm_build_context { const llama_batch & batch, const llm_build_cb & cb, bool worst_case) : - model (lctx.model), - hparams (model.hparams), - cparams (lctx.cparams), - batch (batch), - kv_self (lctx.kv_self), - n_embd (hparams.n_embd), - n_layer (hparams.n_layer), - n_ctx (cparams.n_ctx), - n_head (hparams.n_head), - n_head_kv (hparams.n_head_kv), - n_embd_head (hparams.n_embd_head()), - n_embd_gqa (hparams.n_embd_gqa()), - n_expert (hparams.n_expert), - n_expert_used (hparams.n_expert_used), - freq_base (cparams.rope_freq_base), - freq_scale (cparams.rope_freq_scale), - ext_factor (cparams.yarn_ext_factor), - attn_factor (cparams.yarn_attn_factor), - beta_fast (cparams.yarn_beta_fast), - beta_slow (cparams.yarn_beta_slow), - norm_eps (hparams.f_norm_eps), - norm_rms_eps (hparams.f_norm_rms_eps), - n_tokens (batch.n_tokens), - n_kv (worst_case ? n_ctx : kv_self.n), - kv_head (worst_case ? n_ctx - n_tokens : kv_self.head), - n_orig_ctx (cparams.n_yarn_orig_ctx), - do_rope_shift (worst_case || kv_self.has_shift), - cb (cb), - buf_compute (lctx.buf_compute) { + model (lctx.model), + hparams (model.hparams), + cparams (lctx.cparams), + batch (batch), + kv_self (lctx.kv_self), + n_embd (hparams.n_embd), + n_layer (hparams.n_layer), + n_ctx (cparams.n_ctx), + n_head (hparams.n_head), + n_head_kv (hparams.n_head_kv), + n_embd_head (hparams.n_embd_head()), + n_embd_gqa (hparams.n_embd_gqa()), + n_expert (hparams.n_expert), + n_expert_used (hparams.n_expert_used), + freq_base (cparams.rope_freq_base), + freq_scale (cparams.rope_freq_scale), + ext_factor (cparams.yarn_ext_factor), + attn_factor (cparams.yarn_attn_factor), + beta_fast (cparams.yarn_beta_fast), + beta_slow (cparams.yarn_beta_slow), + norm_eps (hparams.f_norm_eps), + norm_rms_eps (hparams.f_norm_rms_eps), + n_tokens (batch.n_tokens), + n_kv (worst_case ? n_ctx : kv_self.n), + kv_head (worst_case ? n_ctx - n_tokens : kv_self.head), + n_orig_ctx (cparams.n_yarn_orig_ctx), + do_rope_shift (worst_case || kv_self.has_shift), + cb (cb), + buf_compute_meta (lctx.buf_compute_meta) { GGML_ASSERT(!!kv_self.ctx); // all initializations should be done in init() @@ -4257,8 +4201,8 @@ struct llm_build_context { void init() { struct ggml_init_params params = { - /*.mem_size =*/ buf_compute.size, - /*.mem_buffer =*/ buf_compute.data, + /*.mem_size =*/ buf_compute_meta.size(), + /*.mem_buffer =*/ buf_compute_meta.data(), /*.no_alloc =*/ true, }; @@ -5737,8 +5681,8 @@ static const std::unordered_map k_offload_map { "pos_embd", OFFLOAD_FUNC_NR }, { "inp_pos", OFFLOAD_FUNC_FRC }, // this is often used for KQ ops (e.g. rope) - { "Q_scale", OFFLOAD_FUNC_FRC }, - { "KQ_scale", OFFLOAD_FUNC_FRC }, + { "Q_scale", OFFLOAD_FUNC_NOP }, + { "KQ_scale", OFFLOAD_FUNC_NOP }, { "KQ_mask", OFFLOAD_FUNC_FRC }, { "K_shift", OFFLOAD_FUNC_FRC }, @@ -5845,7 +5789,7 @@ static struct ggml_cgraph * llama_build_graph( bool alloc_inp_KQ_mask = false; bool alloc_inp_K_shift = false; -#ifdef GGML_USE_CUBLAS +#if defined(GGML_USE_CUBLAS) && !defined(LLAMA_GGML_BACKEND_CUDA_TEST) const bool do_offload = true; #else const bool do_offload = true; // TODO: set to false after finishing refactoring @@ -5873,7 +5817,7 @@ static struct ggml_cgraph * llama_build_graph( if (!ggml_allocr_is_measure(lctx.alloc) && batch.token) { const int64_t n_tokens = cur->ne[0]; - memcpy(cur->data, batch.token, n_tokens*ggml_element_size(cur)); + ggml_backend_tensor_set(cur, batch.token, 0, n_tokens*ggml_element_size(cur)); } alloc_inp_tokens = true; @@ -5886,7 +5830,7 @@ static struct ggml_cgraph * llama_build_graph( const int64_t n_embd = cur->ne[0]; const int64_t n_tokens = cur->ne[1]; - memcpy(cur->data, batch.embd, n_tokens*n_embd*ggml_element_size(cur)); + ggml_backend_tensor_set(cur, batch.embd, 0, n_tokens*n_embd*ggml_element_size(cur)); } alloc_inp_embd = true; @@ -5898,11 +5842,8 @@ static struct ggml_cgraph * llama_build_graph( if (!ggml_allocr_is_measure(lctx.alloc) && batch.pos) { const int64_t n_tokens = cur->ne[0]; - int32_t * data = (int32_t *) cur->data; - - for (int i = 0; i < n_tokens; ++i) { - data[i] = batch.pos[i]; - } + static_assert(std::is_same::value, "llama_pos must be int32_t"); + ggml_backend_tensor_set(cur, batch.pos, 0, n_tokens*ggml_element_size(cur)); } alloc_inp_pos = true; @@ -5913,7 +5854,8 @@ static struct ggml_cgraph * llama_build_graph( if (!ggml_allocr_is_measure(lctx.alloc)) { const int64_t n_embd_head = model.hparams.n_embd_head(); - ggml_set_f32(cur, 1.0f/sqrtf(float(n_embd_head))); + float f = 1.0f/sqrtf(float(n_embd_head)); + ggml_backend_tensor_set(cur, &f, 0, sizeof(f)); } alloc_inp_Q_scale = true; @@ -5924,13 +5866,15 @@ static struct ggml_cgraph * llama_build_graph( if (!ggml_allocr_is_measure(lctx.alloc)) { const int64_t n_embd_head = model.hparams.n_embd_head(); + float f; if (model.arch == LLM_ARCH_PHI2) { // with phi2, we scale the Q to avoid precision issues // ref: https://github.com/ml-explore/mlx-examples/blob/08e862336ade809bc37d1035f94b359e7d1a5152/phi2/phi2.py#L64-L66 - ggml_set_f32(cur, 1.0f); + f = 1.0f; } else { - ggml_set_f32(cur, 1.0f/sqrtf(float(n_embd_head))); + f = 1.0f/sqrtf(float(n_embd_head)); } + ggml_backend_tensor_set(cur, &f, 0, sizeof(f)); } alloc_inp_KQ_scale = true; @@ -5943,8 +5887,13 @@ static struct ggml_cgraph * llama_build_graph( const int64_t n_kv = cur->ne[0]; const int64_t n_tokens = cur->ne[1]; - float * data = (float *) cur->data; - memset(data, 0, ggml_nbytes(cur)); + float * data; + if (ggml_backend_buffer_is_host(cur->buffer)) { + data = (float *) cur->data; + } else { + lctx.buf_copy.resize(ggml_nbytes(cur)); + data = (float *) lctx.buf_copy.data(); + } for (int h = 0; h < 1; ++h) { for (int j = 0; j < n_tokens; ++j) { @@ -5952,12 +5901,20 @@ static struct ggml_cgraph * llama_build_graph( const llama_seq_id seq_id = batch.seq_id[j][0]; for (int i = 0; i < n_kv; ++i) { + float f; if (!lctx.kv_self.cells[i].has_seq_id(seq_id) || lctx.kv_self.cells[i].pos > pos) { - data[h*(n_kv*n_tokens) + j*n_kv + i] = -INFINITY; + f = -INFINITY; + } else { + f = 0; } + data[h*(n_kv*n_tokens) + j*n_kv + i] = f; } } } + + if (data != cur->data) { + ggml_backend_tensor_set(cur, data, 0, ggml_nbytes(cur)); + } } alloc_inp_KQ_mask = true; @@ -5969,11 +5926,21 @@ static struct ggml_cgraph * llama_build_graph( if (!ggml_allocr_is_measure(lctx.alloc)) { const int64_t n_ctx = cur->ne[0]; - int32_t * data = (int32_t *) cur->data; + int32_t * data; + if (ggml_backend_buffer_is_host(cur->buffer)) { + data = (int32_t *) cur->data; + } else { + lctx.buf_copy.resize(ggml_nbytes(cur)); + data = (int32_t *) lctx.buf_copy.data(); + } for (int i = 0; i < n_ctx; ++i) { data[i] = lctx.kv_self.cells[i].delta; } + + if (data != cur->data) { + ggml_backend_tensor_set(cur, data, 0, ggml_nbytes(cur)); + } } alloc_inp_K_shift = true; @@ -6010,7 +5977,7 @@ static struct ggml_cgraph * llama_build_graph( static const std::unordered_map> k_offload_func_name = { { OFFLOAD_FUNC_NOP, "CPU" }, { OFFLOAD_FUNC_OUT, "CPU" }, -#ifdef GGML_USE_CUBLAS +#if defined(GGML_USE_CUBLAS) && !defined(LLAMA_GGML_BACKEND_CUDA_TEST) { OFFLOAD_FUNC, "GPU (CUDA)" }, { OFFLOAD_FUNC_FRC, "GPU (CUDA) FRC" }, { OFFLOAD_FUNC_KQV, "GPU (CUDA) KQV" }, @@ -6083,7 +6050,7 @@ static struct ggml_cgraph * llama_build_graph( offload_func_t func = ggml_offload_nop; // this is needed for compatibility with Metal for example -#ifdef GGML_USE_CUBLAS +#if defined(GGML_USE_CUBLAS) && !defined(LLAMA_GGML_BACKEND_CUDA_TEST) static offload_func_t ggml_offload_gpu = ggml_cuda_assign_buffers_no_alloc; #else static offload_func_t ggml_offload_gpu = ggml_offload_nop; @@ -6305,11 +6272,12 @@ static int llama_decode_internal( GGML_ASSERT(strcmp(embeddings->name, "result_norm") == 0); } -#ifdef GGML_USE_CUBLAS +#if defined(GGML_USE_CUBLAS) && !defined(LLAMA_GGML_BACKEND_CUDA_TEST) + char * buf_alloc_base = (char *)ggml_backend_buffer_get_base(lctx.buf_alloc); for (int i = 0; i < gf->n_leafs; i++) { ggml_tensor * node = gf->leafs[i]; if (node->backend == GGML_BACKEND_GPU && node->extra == NULL) { - ggml_cuda_assign_scratch_offset(node, (char*)node->data - (char *) lctx.buf_alloc.data); + ggml_cuda_assign_scratch_offset(node, (char *)node->data - buf_alloc_base); ggml_cuda_copy_to_device(node); } } @@ -6317,7 +6285,7 @@ static int llama_decode_internal( for (int i = 0; i < gf->n_nodes; i++) { ggml_tensor * node = gf->nodes[i]; if (node->backend == GGML_BACKEND_GPU && node->extra == NULL) { - ggml_cuda_assign_scratch_offset(node, (char*)node->data - (char *) lctx.buf_alloc.data); + ggml_cuda_assign_scratch_offset(node, (char *)node->data - buf_alloc_base); } } @@ -6344,23 +6312,23 @@ static int llama_decode_internal( n_threads = 1; } -#if GGML_USE_MPI +#ifdef GGML_USE_MPI const int64_t n_layer = hparams.n_layer; ggml_mpi_graph_compute_pre(lctx.ctx_mpi, gf, n_layer); #endif #ifdef GGML_USE_METAL - if (lctx.ctx_metal) { - ggml_metal_set_n_cb (lctx.ctx_metal, n_threads); - ggml_metal_graph_compute(lctx.ctx_metal, gf); - } else { - ggml_graph_compute_helper(lctx.work_buffer, gf, n_threads); + if (ggml_backend_is_metal(lctx.backend)) { + ggml_backend_metal_set_n_cb(lctx.backend, n_threads); } -#else - ggml_graph_compute_helper(lctx.work_buffer, gf, n_threads); #endif -#if GGML_USE_MPI + if (ggml_backend_is_cpu(lctx.backend)) { + ggml_backend_cpu_set_n_threads(lctx.backend, n_threads); + } + ggml_backend_graph_compute(lctx.backend, gf); + +#ifdef GGML_USE_MPI ggml_mpi_graph_compute_post(lctx.ctx_mpi, gf, n_layer); #endif @@ -6412,20 +6380,20 @@ static int llama_decode_internal( if (batch.logits[i] == 0) { continue; } - memcpy(logits_out.data() + (n_vocab*i), (float *) ggml_get_data(res) + (n_vocab*i), sizeof(float)*n_vocab); + ggml_backend_tensor_get(res, logits_out.data() + (n_vocab*i), (n_vocab*i)*sizeof(float), n_vocab*sizeof(float)); #ifndef NDEBUG logits_valid[i] = true; #endif } } else if (lctx.logits_all) { logits_out.resize(n_vocab * n_tokens); - memcpy(logits_out.data(), (float *) ggml_get_data(res), sizeof(float)*n_vocab*n_tokens); + ggml_backend_tensor_get(res, logits_out.data(), 0, n_vocab*n_tokens*sizeof(float)); #ifndef NDEBUG std::fill(logits_valid.begin(), logits_valid.end(), true); #endif } else { logits_out.resize(n_vocab); - memcpy(logits_out.data(), (float *) ggml_get_data(res) + (n_vocab*(n_tokens - 1)), sizeof(float)*n_vocab); + ggml_backend_tensor_get(res, logits_out.data(), (n_vocab*(n_tokens - 1))*sizeof(float), n_vocab*sizeof(float)); #ifndef NDEBUG logits_valid[0] = true; #endif @@ -6437,7 +6405,7 @@ static int llama_decode_internal( auto & embedding_out = lctx.embedding; embedding_out.resize(n_embd); - memcpy(embedding_out.data(), (float *) ggml_get_data(embeddings) + (n_embd*(n_tokens - 1)), sizeof(float)*n_embd); + ggml_backend_tensor_get(embeddings, embedding_out.data(), (n_embd*(n_tokens - 1))*sizeof(float), n_embd*sizeof(float)); } // measure the performance only for the single-token evals @@ -8395,12 +8363,6 @@ void llama_beam_search(llama_context * ctx, // quantization // -template -struct no_init { - T value; - no_init() { /* do nothing */ } -}; - struct quantize_state_internal { const llama_model & model; const llama_model_quantize_params * params; @@ -8643,9 +8605,7 @@ static void llama_model_quantize_internal(const std::string & fname_inp, const s #endif llama_model_loader ml(fname_inp, use_mmap, NULL); - if (ml.use_mmap) { - ml.mapping.reset(new llama_mmap(&ml.file, /* prefetch */ 0, ggml_is_numa())); - } + ml.init_mapping(false); // no prefetching? llama_model model; llm_load_arch(ml, model); @@ -8944,29 +8904,10 @@ static int llama_apply_lora_from_file_internal( // load base model std::unique_ptr ml; - unique_context base_ctx(nullptr, ggml_free); - std::vector base_buf; - if (path_base_model) { + if (path_base_model) { LLAMA_LOG_INFO("%s: loading base model from '%s'\n", __func__, path_base_model); - ml.reset(new llama_model_loader(path_base_model, /*use_mmap*/ true, /*kv_overrides*/ NULL)); - - size_t ctx_size; - size_t mmapped_size; - ml->calc_sizes(ctx_size, mmapped_size); - - base_buf.resize(ctx_size); - - ggml_init_params base_params; - base_params.mem_size = base_buf.size(); - base_params.mem_buffer = base_buf.data(); - base_params.no_alloc = ml->use_mmap; - - base_ctx.reset(ggml_init(base_params)); - - // maybe this should be in llama_model_loader - if (ml->use_mmap) { - ml->mapping.reset(new llama_mmap(&ml->file, /* prefetch */ 0, ggml_is_numa())); - } + ml.reset(new llama_model_loader(path_base_model, /*use_mmap*/ true, /*kv_overrides*/ nullptr)); + ml->init_mapping(false); // no prefetching } // read tensors and apply @@ -9058,7 +8999,7 @@ static int llama_apply_lora_from_file_internal( offload_func_t offload_func = ggml_offload_nop; offload_func_t offload_func_force_inplace = ggml_offload_nop; -#ifdef GGML_USE_CUBLAS +#if defined(GGML_USE_CUBLAS) && !defined(LLAMA_GGML_BACKEND_CUDA_TEST) if (dest_t->backend == GGML_BACKEND_GPU || dest_t->backend == GGML_BACKEND_GPU_SPLIT) { if (dest_t->type != GGML_TYPE_F16) { throw std::runtime_error(format( @@ -9079,7 +9020,7 @@ static int llama_apply_lora_from_file_internal( return 1; } - base_t = ml->create_tensor(base_ctx.get(), base_name, { dest_t->ne[0], dest_t->ne[1] }, GGML_BACKEND_CPU); + base_t = ml->get_tensor_meta(base_name.c_str()); ml->load_data_for(base_t); } else { base_t = dest_t; @@ -9364,7 +9305,39 @@ struct llama_context * llama_new_context_with_model( // reserve memory for context buffers if (!hparams.vocab_only) { - if (!llama_kv_cache_init(ctx->model.hparams, ctx->kv_self, type_k, type_v, cparams.n_ctx, model->n_gpu_layers, cparams.offload_kqv)) { + // initialize backend +#ifdef GGML_USE_METAL + if (model->n_gpu_layers > 0) { + ctx->backend = ggml_backend_metal_init(); + if (ctx->backend == nullptr) { + LLAMA_LOG_ERROR("%s: failed to initialize Metal backend\n", __func__); + } + } +#elif defined(GGML_USE_CUBLAS) && defined(LLAMA_GGML_BACKEND_CUDA_TEST) + // for testing only + if (model->n_gpu_layers > 0) { + ctx->backend = ggml_backend_cuda_init(0); + if (ctx->backend == nullptr) { + LLAMA_LOG_ERROR("%s: failed to initialize CUDA backend\n", __func__); + } + } +#endif + + if (ctx->backend == nullptr && ggml_backend_buffer_is_host(model->buf)) { + ctx->backend = ggml_backend_cpu_init(); + if (ctx->backend == nullptr) { + LLAMA_LOG_ERROR("%s: failed to initialize CPU backend\n", __func__); + } + } + + if (ctx->backend == nullptr) { + LLAMA_LOG_ERROR("%s: failed to initialize a backend\n", __func__); + delete ctx; + return nullptr; + } + + if (!llama_kv_cache_init(ctx->model.hparams, ctx->kv_self, type_k, type_v, + cparams.n_ctx, model->n_gpu_layers, cparams.offload_kqv)) { LLAMA_LOG_ERROR("%s: llama_kv_cache_init() failed for self-attention cache\n", __func__); llama_free(ctx); return nullptr; @@ -9400,12 +9373,11 @@ struct llama_context * llama_new_context_with_model( } { - static const size_t tensor_alignment = 32; // the compute buffer is used to store the tensor and graph structs, while the allocator buffer is used for the tensor data - ctx->buf_compute.resize(ggml_tensor_overhead()*LLAMA_MAX_NODES + ggml_graph_overhead()); + ctx->buf_compute_meta.resize(ggml_tensor_overhead()*LLAMA_MAX_NODES + ggml_graph_overhead()); // create measure allocator - ctx->alloc = ggml_allocr_new_measure(tensor_alignment); + ctx->alloc = ggml_allocr_new_measure_from_backend(ctx->backend); // build worst-case graph int n_tokens = (int)std::min(cparams.n_ctx, cparams.n_batch); @@ -9413,98 +9385,50 @@ struct llama_context * llama_new_context_with_model( llama_token token = llama_token_bos(&ctx->model); // not actually used by llama_build_graph, but required to choose between token and embedding inputs graph ggml_cgraph * gf = llama_build_graph(*ctx, llama_batch_get_one(&token, n_tokens, n_past, 0)); -#ifdef GGML_USE_METAL - if (model->n_gpu_layers > 0) { - ctx->ctx_metal = ggml_metal_init(1); - if (!ctx->ctx_metal) { - LLAMA_LOG_ERROR("%s: ggml_metal_init() failed\n", __func__); - llama_free(ctx); - return NULL; - } - //ggml_metal_graph_find_concurrency(ctx->ctx_metal, gf, false); - //ggml_allocr_set_parse_seq(ctx->alloc, ggml_metal_get_concur_list(ctx->ctx_metal), ggml_metal_if_optimized(ctx->ctx_metal)); - } -#endif // measure memory requirements for the graph - size_t alloc_size = ggml_allocr_alloc_graph(ctx->alloc, gf) + tensor_alignment; + size_t alloc_size = ggml_allocr_alloc_graph(ctx->alloc, gf); - LLAMA_LOG_INFO("%s: compute buffer total size = %.2f MiB\n", __func__, (ctx->buf_compute.size + alloc_size) / 1024.0 / 1024.0); + LLAMA_LOG_INFO("%s: compute buffer total size = %.2f MiB\n", __func__, (ctx->buf_compute_meta.size() + alloc_size) / 1024.0 / 1024.0); - // recreate allocator with exact memory requirements + // create allocator again with exact memory requirements ggml_allocr_free(ctx->alloc); - ctx->buf_alloc.resize(alloc_size); - ctx->alloc = ggml_allocr_new(ctx->buf_alloc.data, ctx->buf_alloc.size, tensor_alignment); -#ifdef GGML_USE_METAL - if (ctx->ctx_metal) { - //ggml_allocr_set_parse_seq(ctx->alloc, ggml_metal_get_concur_list(ctx->ctx_metal), ggml_metal_if_optimized(ctx->ctx_metal)); - } -#endif -#ifdef GGML_USE_CUBLAS - ggml_cuda_set_scratch_size(alloc_size); - LLAMA_LOG_INFO("%s: VRAM scratch buffer: %.2f MiB\n", __func__, alloc_size / 1024.0 / 1024.0); + ctx->buf_alloc = ggml_backend_alloc_buffer(ctx->backend, alloc_size); + ctx->alloc = ggml_allocr_new_from_buffer(ctx->buf_alloc); +#if defined(GGML_USE_CUBLAS) && !defined(LLAMA_GGML_BACKEND_CUDA_TEST) + if (model->n_gpu_layers > 0) { + ggml_cuda_set_scratch_size(alloc_size); + LLAMA_LOG_INFO("%s: VRAM scratch buffer: %.2f MiB\n", __func__, alloc_size / 1024.0 / 1024.0); - // calculate total VRAM usage - auto add_tensor = [](const ggml_tensor * t, size_t & size) { - if (t->backend == GGML_BACKEND_GPU || t->backend == GGML_BACKEND_GPU_SPLIT) { - size += ggml_nbytes(t); + // calculate total VRAM usage + auto add_tensor = [](const ggml_tensor * t, size_t & size) { + if (t->backend == GGML_BACKEND_GPU || t->backend == GGML_BACKEND_GPU_SPLIT) { + size += ggml_nbytes(t); + } + }; + size_t model_vram_size = 0; + for (const auto & kv : model->tensors_by_name) { + add_tensor(kv.second, model_vram_size); } - }; - size_t model_vram_size = 0; - for (const auto & kv : model->tensors_by_name) { - add_tensor(kv.second, model_vram_size); - } - size_t kv_vram_size = 0; - for (auto & k : ctx->kv_self.k_l) { - add_tensor(k, kv_vram_size); - } - for (auto & v : ctx->kv_self.v_l) { - add_tensor(v, kv_vram_size); - } + size_t kv_vram_size = 0; + for (auto & k : ctx->kv_self.k_l) { + add_tensor(k, kv_vram_size); + } + for (auto & v : ctx->kv_self.v_l) { + add_tensor(v, kv_vram_size); + } - size_t ctx_vram_size = alloc_size + kv_vram_size; - size_t total_vram_size = model_vram_size + ctx_vram_size; + size_t ctx_vram_size = alloc_size + kv_vram_size; + size_t total_vram_size = model_vram_size + ctx_vram_size; - LLAMA_LOG_INFO("%s: total VRAM used: %.2f MiB (model: %.2f MiB, context: %.2f MiB)\n", __func__, - total_vram_size / 1024.0 / 1024.0, - model_vram_size / 1024.0 / 1024.0, - ctx_vram_size / 1024.0 / 1024.0); + LLAMA_LOG_INFO("%s: total VRAM used: %.2f MiB (model: %.2f MiB, context: %.2f MiB)\n", __func__, + total_vram_size / 1024.0 / 1024.0, + model_vram_size / 1024.0 / 1024.0, + ctx_vram_size / 1024.0 / 1024.0); + } #endif } - -#ifdef GGML_USE_METAL - if (model->n_gpu_layers > 0) { - // this allocates all Metal resources and memory buffers - - void * data_ptr = NULL; - size_t data_size = 0; - - if (ctx->model.mapping) { - data_ptr = ctx->model.mapping->addr; - data_size = ctx->model.mapping->size; - } else { - data_ptr = ggml_get_mem_buffer(ctx->model.ctx); - data_size = ggml_get_mem_size (ctx->model.ctx); - } - - const size_t max_size = ggml_get_max_tensor_size(ctx->model.ctx); - - LLAMA_LOG_INFO("%s: max tensor size = %8.2f MiB\n", __func__, max_size/1024.0/1024.0); - -#define LLAMA_METAL_CHECK_BUF(result) \ - if (!(result)) { \ - LLAMA_LOG_ERROR("%s: failed to add buffer\n", __func__); \ - llama_free(ctx); \ - return NULL; \ - } - - LLAMA_METAL_CHECK_BUF(ggml_metal_add_buffer(ctx->ctx_metal, "data", data_ptr, data_size, max_size)); - LLAMA_METAL_CHECK_BUF(ggml_metal_add_buffer(ctx->ctx_metal, "kv", ctx->kv_self.buf.data, ctx->kv_self.buf.size, 0)); - LLAMA_METAL_CHECK_BUF(ggml_metal_add_buffer(ctx->ctx_metal, "alloc", ctx->buf_alloc.data, ctx->buf_alloc.size, 0)); -#undef LLAMA_METAL_CHECK_BUF - } -#endif } #ifdef GGML_USE_MPI @@ -9796,7 +9720,7 @@ size_t llama_get_state_size(const struct llama_context * ctx) { const size_t s_embedding = ctx->embedding.size() * sizeof(float); const size_t s_kv_size = sizeof(size_t); const size_t s_kv_ntok = sizeof(int); - const size_t s_kv = ctx->kv_self.buf.size; + const size_t s_kv = ggml_backend_buffer_get_size(ctx->kv_self.buf); const size_t s_total = ( + s_rng_size @@ -9924,7 +9848,7 @@ static void llama_copy_state_data_internal(struct llama_context * ctx, llama_dat const auto n_embd = hparams.n_embd_gqa(); const auto n_ctx = cparams.n_ctx; - const size_t kv_buf_size = kv_self.buf.size; + const size_t kv_buf_size = ggml_backend_buffer_get_size(kv_self.buf); const uint32_t kv_head = kv_self.head; const uint32_t kv_size = kv_self.size; const uint32_t kv_used = kv_self.used; @@ -9940,17 +9864,12 @@ static void llama_copy_state_data_internal(struct llama_context * ctx, llama_dat ggml_context * cpy_ctx = ggml_init({ 6*n_layer*ggml_tensor_overhead() + ggml_graph_overhead(), NULL, /* no_alloc */ true }); ggml_cgraph * gf = ggml_new_graph(cpy_ctx); - std::vector> kout2d_data(n_layer); - std::vector> vout2d_data(n_layer); + std::vector kout2d(n_layer); + std::vector vout2d(n_layer); for (int il = 0; il < (int) n_layer; ++il) { - ggml_tensor * kout2d = ggml_new_tensor_2d(cpy_ctx, kv_self.k_l[il]->type, n_embd, kv_head); - kout2d_data[il].resize(ggml_nbytes(kout2d)); - kout2d->data = kout2d_data[il].data(); - - ggml_tensor * vout2d = ggml_new_tensor_2d(cpy_ctx, kv_self.v_l[il]->type, kv_head, n_embd); - vout2d_data[il].resize(ggml_nbytes(vout2d)); - vout2d->data = vout2d_data[il].data(); + kout2d[il] = ggml_new_tensor_2d(cpy_ctx, kv_self.k_l[il]->type, n_embd, kv_head); + vout2d[il] = ggml_new_tensor_2d(cpy_ctx, kv_self.v_l[il]->type, kv_head, n_embd); ggml_tensor * k2d = ggml_view_2d(cpy_ctx, kv_self.k_l[il], n_embd, kv_head, @@ -9960,20 +9879,28 @@ static void llama_copy_state_data_internal(struct llama_context * ctx, llama_dat kv_head, n_embd, elt_size*n_ctx, 0); - ggml_build_forward_expand(gf, ggml_cpy(cpy_ctx, k2d, kout2d)); - ggml_build_forward_expand(gf, ggml_cpy(cpy_ctx, v2d, vout2d)); + ggml_build_forward_expand(gf, ggml_cpy(cpy_ctx, k2d, kout2d[il])); + ggml_build_forward_expand(gf, ggml_cpy(cpy_ctx, v2d, vout2d[il])); } - ggml_graph_compute_helper(ctx->work_buffer, gf, /*n_threads*/ 1); + ggml_backend_buffer_t buf = ggml_backend_alloc_ctx_tensors(cpy_ctx, ctx->backend); + + ggml_backend_graph_compute(ctx->backend, gf); + + std::vector tmp_buf; + for (int il = 0; il < (int) n_layer; ++il) { + tmp_buf.resize(ggml_nbytes(kout2d[il])); + ggml_backend_tensor_get(kout2d[il], tmp_buf.data(), 0, tmp_buf.size()); + data_ctx->write(tmp_buf.data(), tmp_buf.size()); + + tmp_buf.resize(ggml_nbytes(vout2d[il])); + ggml_backend_tensor_get(vout2d[il], tmp_buf.data(), 0, tmp_buf.size()); + data_ctx->write(tmp_buf.data(), tmp_buf.size()); + } ggml_free(cpy_ctx); - // our data is now in the kout2d_data and vout2d_data buffers - // write them to file - for (uint32_t il = 0; il < n_layer; ++il) { - data_ctx->write(kout2d_data[il].data(), kout2d_data[il].size()); - data_ctx->write(vout2d_data[il].data(), vout2d_data[il].size()); - } + ggml_backend_buffer_free(buf); } for (uint32_t i = 0; i < kv_size; ++i) { @@ -10071,21 +9998,19 @@ size_t llama_set_state_data(struct llama_context * ctx, uint8_t * src) { memcpy(&kv_used, inp, sizeof(kv_used)); inp += sizeof(kv_used); if (kv_buf_size) { - GGML_ASSERT(kv_self.buf.size == kv_buf_size); + GGML_ASSERT(ggml_backend_buffer_get_size(kv_self.buf) == kv_buf_size); const size_t elt_size = ggml_element_size(kv_self.k_l[0]); ggml_context * cpy_ctx = ggml_init({ 6*n_layer*ggml_tensor_overhead() + ggml_graph_overhead(), NULL, /* no_alloc */ true }); ggml_cgraph * gf = ggml_new_graph(cpy_ctx); - for (int il = 0; il < n_layer; ++il) { - ggml_tensor * kin2d = ggml_new_tensor_2d(cpy_ctx, kv_self.k_l[il]->type, n_embd, kv_head); - kin2d->data = (void *) inp; - inp += ggml_nbytes(kin2d); + std::vector kin2d(n_layer); + std::vector vin2d(n_layer); - ggml_tensor * vin2d = ggml_new_tensor_2d(cpy_ctx, kv_self.v_l[il]->type, kv_head, n_embd); - vin2d->data = (void *) inp; - inp += ggml_nbytes(vin2d); + for (int il = 0; il < n_layer; ++il) { + kin2d[il] = ggml_new_tensor_2d(cpy_ctx, kv_self.k_l[il]->type, n_embd, kv_head); + vin2d[il] = ggml_new_tensor_2d(cpy_ctx, kv_self.v_l[il]->type, kv_head, n_embd); ggml_tensor * k2d = ggml_view_2d(cpy_ctx, kv_self.k_l[il], n_embd, kv_head, @@ -10095,13 +10020,26 @@ size_t llama_set_state_data(struct llama_context * ctx, uint8_t * src) { kv_head, n_embd, elt_size*n_ctx, 0); - ggml_build_forward_expand(gf, ggml_cpy(cpy_ctx, kin2d, k2d)); - ggml_build_forward_expand(gf, ggml_cpy(cpy_ctx, vin2d, v2d)); + ggml_build_forward_expand(gf, ggml_cpy(cpy_ctx, kin2d[il], k2d)); + ggml_build_forward_expand(gf, ggml_cpy(cpy_ctx, vin2d[il], v2d)); } - ggml_graph_compute_helper(ctx->work_buffer, gf, /*n_threads*/ 1); + ggml_backend_buffer_t buf = ggml_backend_alloc_ctx_tensors(cpy_ctx, ctx->backend); + + // load data into the tensors + for (int il = 0; il < n_layer; ++il) { + ggml_backend_tensor_set(kin2d[il], inp, 0, ggml_nbytes(kin2d[il])); + inp += ggml_nbytes(kin2d[il]); + + ggml_backend_tensor_set(vin2d[il], inp, 0, ggml_nbytes(vin2d[il])); + inp += ggml_nbytes(vin2d[il]); + } + + ggml_backend_graph_compute(ctx->backend, gf); ggml_free(cpy_ctx); + + ggml_backend_buffer_free(buf); } ctx->kv_self.head = kv_head; From 4a5f9d629ecfd0a53afdddbaf54a4fa02d9a9ce9 Mon Sep 17 00:00:00 2001 From: Samuel Maynard Date: Thu, 21 Dec 2023 22:36:26 +0200 Subject: [PATCH 122/811] ci : add `jlumbroso/free-disk-space` to docker workflow (#4150) * [github][workflows][docker]: removes hardcoded `ggerganov` from `ghcr` repo * [github][workflows][docker]: adds `jlumbroso/free-disk-space` --- .github/workflows/docker.yml | 21 +++++++++++++++++++-- 1 file changed, 19 insertions(+), 2 deletions(-) diff --git a/.github/workflows/docker.yml b/.github/workflows/docker.yml index 9c90c77ac..a7165a38f 100644 --- a/.github/workflows/docker.yml +++ b/.github/workflows/docker.yml @@ -52,6 +52,23 @@ jobs: username: ${{ github.repository_owner }} password: ${{ secrets.GITHUB_TOKEN }} + # https://github.com/jlumbroso/free-disk-space/tree/54081f138730dfa15788a46383842cd2f914a1be#example + - name: Free Disk Space (Ubuntu) + uses: jlumbroso/free-disk-space@main + with: + # this might remove tools that are actually needed, + # if set to "true" but frees about 6 GB + tool-cache: false + + # all of these default to true, but feel free to set to + # "false" if necessary for your workflow + android: true + dotnet: true + haskell: true + large-packages: true + docker-images: true + swap-storage: true + - name: Build and push Docker image (versioned) if: github.event_name == 'push' uses: docker/build-push-action@v4 @@ -59,7 +76,7 @@ jobs: context: . push: true platforms: ${{ matrix.config.platforms }} - tags: "ghcr.io/ggerganov/llama.cpp:${{ matrix.config.tag }}-${{ env.COMMIT_SHA }}" + tags: "ghcr.io/${{ github.repository_owner }}/llama.cpp:${{ matrix.config.tag }}-${{ env.COMMIT_SHA }}" file: ${{ matrix.config.dockerfile }} - name: Build and push Docker image (tagged) @@ -68,5 +85,5 @@ jobs: context: . push: ${{ github.event_name == 'push' }} platforms: ${{ matrix.config.platforms }} - tags: "ghcr.io/ggerganov/llama.cpp:${{ matrix.config.tag }}" + tags: "ghcr.io/${{ github.repository_owner }}/llama.cpp:${{ matrix.config.tag }}" file: ${{ matrix.config.dockerfile }} From 32259b2dade6f6856739bf7ba0a4ff7b474dc760 Mon Sep 17 00:00:00 2001 From: Georgi Gerganov Date: Thu, 21 Dec 2023 23:07:58 +0200 Subject: [PATCH 123/811] gguf : simplify example dependencies --- Makefile | 2 +- examples/gguf/CMakeLists.txt | 2 +- examples/gguf/gguf.cpp | 1 - 3 files changed, 2 insertions(+), 3 deletions(-) diff --git a/Makefile b/Makefile index 512407a1d..68df7702a 100644 --- a/Makefile +++ b/Makefile @@ -606,7 +606,7 @@ save-load-state: examples/save-load-state/save-load-state.cpp ggml.o llama.o $(C server: examples/server/server.cpp examples/server/httplib.h examples/server/json.hpp examples/server/index.html.hpp examples/server/index.js.hpp examples/server/completion.js.hpp examples/llava/clip.cpp examples/llava/clip.h common/stb_image.h ggml.o llama.o $(COMMON_DEPS) grammar-parser.o $(OBJS) $(CXX) $(CXXFLAGS) -Iexamples/server $(filter-out %.h,$(filter-out %.hpp,$^)) -o $@ $(LDFLAGS) $(LWINSOCK2) -Wno-cast-qual -gguf: examples/gguf/gguf.cpp ggml.o llama.o $(OBJS) +gguf: examples/gguf/gguf.cpp ggml.o $(OBJS) $(CXX) $(CXXFLAGS) $(filter-out %.h,$^) -o $@ $(LDFLAGS) train-text-from-scratch: examples/train-text-from-scratch/train-text-from-scratch.cpp ggml.o llama.o $(COMMON_DEPS) train.o $(OBJS) diff --git a/examples/gguf/CMakeLists.txt b/examples/gguf/CMakeLists.txt index 7d1806af3..6481f087b 100644 --- a/examples/gguf/CMakeLists.txt +++ b/examples/gguf/CMakeLists.txt @@ -1,5 +1,5 @@ set(TARGET gguf) add_executable(${TARGET} gguf.cpp) install(TARGETS ${TARGET} RUNTIME) -target_link_libraries(${TARGET} PRIVATE llama ${CMAKE_THREAD_LIBS_INIT}) +target_link_libraries(${TARGET} PRIVATE ggml ${CMAKE_THREAD_LIBS_INIT}) target_compile_features(${TARGET} PRIVATE cxx_std_11) diff --git a/examples/gguf/gguf.cpp b/examples/gguf/gguf.cpp index 9e24bf24c..e67be4fb2 100644 --- a/examples/gguf/gguf.cpp +++ b/examples/gguf/gguf.cpp @@ -1,5 +1,4 @@ #include "ggml.h" -#include "llama.h" #include #include From 769a7bc85eaa44e3d7eadf39abfeff7bb0b9cc2f Mon Sep 17 00:00:00 2001 From: Georgi Gerganov Date: Thu, 21 Dec 2023 23:20:36 +0200 Subject: [PATCH 124/811] gguf-py : fix broken link --- gguf-py/README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/gguf-py/README.md b/gguf-py/README.md index a27d2fc0e..22d7ffa52 100644 --- a/gguf-py/README.md +++ b/gguf-py/README.md @@ -3,7 +3,7 @@ This is a Python package for writing binary files in the [GGUF](https://github.com/ggerganov/ggml/pull/302) (GGML Universal File) format. -See [convert-llama-hf-to-gguf.py](https://github.com/ggerganov/llama.cpp/blob/master/convert-llama-hf-to-gguf.py) +See [convert-llama-hf-to-gguf.py](https://github.com/ggerganov/llama.cpp/blob/master/convert-hf-to-gguf.py) as an example for its usage. ## Installation From afefa319f1f59b002dfa0d1ef407a2c74bd9770b Mon Sep 17 00:00:00 2001 From: Georgi Gerganov Date: Thu, 21 Dec 2023 23:20:49 +0200 Subject: [PATCH 125/811] ggml : change ggml_scale to take a float instead of tensor (#4573) * ggml : change ggml_scale to take a float instead of tensor * ggml : fix CPU implementation * tests : fix test-grad0 ggml-ci --- examples/baby-llama/baby-llama.cpp | 15 +-- examples/export-lora/export-lora.cpp | 2 +- examples/finetune/finetune.cpp | 42 +++---- examples/llava/clip.cpp | 8 +- .../train-text-from-scratch.cpp | 14 +-- ggml-cuda.cu | 14 +-- ggml-metal.m | 6 +- ggml.c | 42 +++---- ggml.h | 4 +- llama.cpp | 119 +++--------------- tests/test-backend-ops.cpp | 9 +- tests/test-grad0.cpp | 12 +- 12 files changed, 82 insertions(+), 205 deletions(-) diff --git a/examples/baby-llama/baby-llama.cpp b/examples/baby-llama/baby-llama.cpp index 2dc2988d3..e7d2ad592 100644 --- a/examples/baby-llama/baby-llama.cpp +++ b/examples/baby-llama/baby-llama.cpp @@ -575,10 +575,7 @@ static struct ggml_tensor * forward( // KQ_scaled = KQ / sqrt(n_embd/n_head) // KQ_scaled shape [n_past + N, N, n_head, 1] - struct ggml_tensor * KQ_scaled = - ggml_scale(ctx0, - KQ, - ggml_new_f32(ctx0, 1.0f/sqrtf(float(n_embd)/n_head))); + struct ggml_tensor * KQ_scaled = ggml_scale(ctx0, KQ, 1.0f/sqrtf(float(n_embd)/n_head)); // KQ_masked = mask_past(KQ_scaled) // KQ_masked shape [n_past + N, N, n_head, 1] @@ -844,10 +841,7 @@ static struct ggml_tensor * forward_batch( // KQ_scaled = KQ / sqrt(n_embd/n_head) // KQ_scaled shape [n_past + N, N, n_head, n_batch] - struct ggml_tensor * KQ_scaled = - ggml_scale(ctx0, - KQ, - ggml_new_f32(ctx0, 1.0f/sqrtf(float(n_embd)/n_head))); + struct ggml_tensor * KQ_scaled = ggml_scale(ctx0, KQ, 1.0f/sqrtf(float(n_embd)/n_head)); assert_shape_4d(KQ_scaled, n_past + N, N, n_head, n_batch); // KQ_masked = mask_past(KQ_scaled) @@ -1131,10 +1125,7 @@ static struct ggml_tensor * forward_lora( // KQ_scaled = KQ / sqrt(n_embd/n_head) // KQ_scaled shape [n_past + N, N, n_head, 1] - struct ggml_tensor * KQ_scaled = - ggml_scale(ctx0, - KQ, - ggml_new_f32(ctx0, 1.0f/sqrtf(float(n_embd)/n_head))); + struct ggml_tensor * KQ_scaled = ggml_scale(ctx0, KQ, 1.0f/sqrtf(float(n_embd)/n_head)); // KQ_masked = mask_past(KQ_scaled) // KQ_masked shape [n_past + N, N, n_head, 1] diff --git a/examples/export-lora/export-lora.cpp b/examples/export-lora/export-lora.cpp index c8754ce70..58fbe204d 100644 --- a/examples/export-lora/export-lora.cpp +++ b/examples/export-lora/export-lora.cpp @@ -309,7 +309,7 @@ static struct ggml_cgraph * build_graph_lora( ) { struct ggml_tensor * ab = ggml_mul_mat(ctx, lora_a, lora_b); if (scaling != 1.0f) { - ab = ggml_scale(ctx, ab, ggml_new_f32(ctx, scaling)); + ab = ggml_scale(ctx, ab, scaling); } struct ggml_tensor * res = ggml_add_inplace(ctx, tensor, ab); diff --git a/examples/finetune/finetune.cpp b/examples/finetune/finetune.cpp index 6a668d764..7b1333a9d 100644 --- a/examples/finetune/finetune.cpp +++ b/examples/finetune/finetune.cpp @@ -269,7 +269,7 @@ static void load_model_hparams_gguf(struct gguf_context * ctx, struct my_llama_h float rope_freq_scale = 1.0f; GGUF_GET_KEY(ctx, hparams->f_norm_rms_eps, gguf_get_val_f32, GGUF_TYPE_FLOAT32, false, kv(LLM_KV_ATTENTION_LAYERNORM_RMS_EPS)); GGUF_GET_KEY(ctx, hparams->rope_freq_base, gguf_get_val_f32, GGUF_TYPE_FLOAT32, false, kv(LLM_KV_ROPE_FREQ_BASE)); - GGUF_GET_KEY(ctx, rope_freq_scale, gguf_get_val_f32, GGUF_TYPE_FLOAT32, false, kv(LLM_KV_ROPE_SCALE_LINEAR)); + GGUF_GET_KEY(ctx, rope_freq_scale, gguf_get_val_f32, GGUF_TYPE_FLOAT32, false, kv(LLM_KV_ROPE_SCALE_LINEAR)); if (rope_freq_scale != 1.0f) { hparams->rope_freq_scale = 1.0f / rope_freq_scale; } @@ -612,6 +612,7 @@ static struct ggml_tensor * llama_build_lora_finetune_graphs( const int n_rot = hparams.n_embd_head(); const int n_embd_head = hparams.n_embd_head(); const int n_embd_gqa = hparams.n_embd_gqa(); + const float rms_norm_eps = hparams.f_norm_rms_eps; const float rope_freq_base = hparams.rope_freq_base; const float rope_freq_scale = hparams.rope_freq_scale; @@ -680,10 +681,7 @@ static struct ggml_tensor * llama_build_lora_finetune_graphs( checkpoints.push_back(t01); } - struct ggml_tensor * kv_scale = NULL; - if (!enable_flash_attn) { - kv_scale = ggml_new_f32(ctx, 1.0f/sqrtf(float(n_embd)/n_head)); - } + const float kv_scale = 1.0f/sqrtf(float(n_embd)/n_head); for (int il = 0; il < n_layer; ++il) { struct my_llama_layer & layer = model->layers[il]; @@ -781,32 +779,32 @@ static struct ggml_tensor * llama_build_lora_finetune_graphs( // make sure some tensors are not reallocated by inserting new temporary nodes depending on them int n_leafs_before = gb->n_leafs; int n_nodes_before = gb->n_nodes; - struct ggml_tensor * one = ggml_new_f32(ctx, 1.0f); + // output tensors - ggml_build_forward_expand(gb, ggml_scale_inplace(ctx, t35, one)); - ggml_build_forward_expand(gb, ggml_scale_inplace(ctx, t36, one)); + ggml_build_forward_expand(gb, ggml_scale_inplace(ctx, t35, 1.0f)); + ggml_build_forward_expand(gb, ggml_scale_inplace(ctx, t36, 1.0f)); // input gradient - ggml_build_forward_expand(gb, ggml_scale_inplace(ctx, t36->grad, one)); + ggml_build_forward_expand(gb, ggml_scale_inplace(ctx, t36->grad, 1.0f)); GGML_ASSERT(t36->grad->data == NULL && t36->grad->view_src == NULL); ggml_allocr_alloc(alloc, t36->grad); // KQ_pos - ggml_build_forward_expand(gb, ggml_scale_inplace(ctx, KQ_pos, one)); + ggml_build_forward_expand(gb, ggml_scale_inplace(ctx, KQ_pos, 1.0f)); // make sure base model tensors data cannot be used in viewable operations - ggml_build_forward_expand(gb, ggml_scale_inplace(ctx, model->tok_embeddings, one)); - ggml_build_forward_expand(gb, ggml_scale_inplace(ctx, model->norm, one)); - ggml_build_forward_expand(gb, ggml_scale_inplace(ctx, model->output, one)); + ggml_build_forward_expand(gb, ggml_scale_inplace(ctx, model->tok_embeddings, 1.0f)); + ggml_build_forward_expand(gb, ggml_scale_inplace(ctx, model->norm, 1.0f)); + ggml_build_forward_expand(gb, ggml_scale_inplace(ctx, model->output, 1.0f)); for (int il = 0; il < n_layer; ++il) { struct my_llama_layer & layer = model->layers[il]; - ggml_build_forward_expand(gb, ggml_scale_inplace(ctx, layer.attention_norm, one)); - ggml_build_forward_expand(gb, ggml_scale_inplace(ctx, layer.ffn_norm, one)); - ggml_build_forward_expand(gb, ggml_scale_inplace(ctx, layer.wq, one)); - ggml_build_forward_expand(gb, ggml_scale_inplace(ctx, layer.wk, one)); - ggml_build_forward_expand(gb, ggml_scale_inplace(ctx, layer.wv, one)); - ggml_build_forward_expand(gb, ggml_scale_inplace(ctx, layer.wo, one)); - ggml_build_forward_expand(gb, ggml_scale_inplace(ctx, layer.w1, one)); - ggml_build_forward_expand(gb, ggml_scale_inplace(ctx, layer.w2, one)); - ggml_build_forward_expand(gb, ggml_scale_inplace(ctx, layer.w3, one)); + ggml_build_forward_expand(gb, ggml_scale_inplace(ctx, layer.attention_norm, 1.0f)); + ggml_build_forward_expand(gb, ggml_scale_inplace(ctx, layer.ffn_norm, 1.0f)); + ggml_build_forward_expand(gb, ggml_scale_inplace(ctx, layer.wq, 1.0f)); + ggml_build_forward_expand(gb, ggml_scale_inplace(ctx, layer.wk, 1.0f)); + ggml_build_forward_expand(gb, ggml_scale_inplace(ctx, layer.wv, 1.0f)); + ggml_build_forward_expand(gb, ggml_scale_inplace(ctx, layer.wo, 1.0f)); + ggml_build_forward_expand(gb, ggml_scale_inplace(ctx, layer.w1, 1.0f)); + ggml_build_forward_expand(gb, ggml_scale_inplace(ctx, layer.w2, 1.0f)); + ggml_build_forward_expand(gb, ggml_scale_inplace(ctx, layer.w3, 1.0f)); } // allocating checkpoints in one block to reduce memory fragmentation diff --git a/examples/llava/clip.cpp b/examples/llava/clip.cpp index 112465968..f06ec400d 100644 --- a/examples/llava/clip.cpp +++ b/examples/llava/clip.cpp @@ -330,12 +330,6 @@ static ggml_cgraph * clip_image_build_graph(const clip_ctx * ctx, const clip_ima ggml_repeat(ctx0, model.pre_ln_b, embeddings)); } - struct ggml_tensor * KQ_scale = ggml_new_tensor_1d(ctx0, GGML_TYPE_F32, 1); - ggml_allocr_alloc(ctx->alloc, KQ_scale); - if (!ggml_allocr_is_measure(ctx->alloc)) { - ggml_set_f32(KQ_scale, 1.0f / sqrt((float)d_head)); - } - // loop over layers for (int il = 0; il < n_layer - 1; il++) { struct ggml_tensor * cur = embeddings; // embeddings = residual, cur = hidden_states @@ -356,7 +350,7 @@ static ggml_cgraph * clip_image_build_graph(const clip_ctx * ctx, const clip_ima struct ggml_tensor * Q = ggml_add(ctx0, ggml_repeat(ctx0, model.layers[il].q_b, cur), ggml_mul_mat(ctx0, model.layers[il].q_w, cur)); - Q = ggml_scale_inplace(ctx0, Q, KQ_scale); + Q = ggml_scale_inplace(ctx0, Q, 1.0f / sqrt((float)d_head)); Q = ggml_reshape_4d(ctx0, Q, d_head, n_head, num_positions, batch_size); Q = ggml_cont(ctx0, ggml_permute(ctx0, Q, 0, 2, 1, 3)); Q = ggml_reshape_3d(ctx0, Q, d_head, num_positions, n_head * batch_size); diff --git a/examples/train-text-from-scratch/train-text-from-scratch.cpp b/examples/train-text-from-scratch/train-text-from-scratch.cpp index f7ed63365..4a9a2340b 100644 --- a/examples/train-text-from-scratch/train-text-from-scratch.cpp +++ b/examples/train-text-from-scratch/train-text-from-scratch.cpp @@ -369,10 +369,7 @@ static struct ggml_tensor * llama_build_train_graphs( checkpoints.push_back(t00); checkpoints.push_back(t01); - struct ggml_tensor * kv_scale = NULL; - if (!enable_flash_attn) { - kv_scale = ggml_new_f32(ctx, 1.0f/sqrtf(float(n_embd)/n_head)); - } + const float kv_scale = 1.0f/sqrtf(float(n_embd)/n_head); for (int il = 0; il < n_layer; ++il) { struct my_llama_layer & layer = model->layers[il]; @@ -444,14 +441,13 @@ static struct ggml_tensor * llama_build_train_graphs( // make sure some tensors are not reallocated by inserting new temporary nodes depending on them int n_leafs_before = gb->n_leafs; int n_nodes_before = gb->n_nodes; - struct ggml_tensor * one = ggml_new_f32(ctx, 1.0f); // output tensors - ggml_build_forward_expand(gb, ggml_scale_inplace(ctx, t35, one)); - ggml_build_forward_expand(gb, ggml_scale_inplace(ctx, t36, one)); + ggml_build_forward_expand(gb, ggml_scale_inplace(ctx, t35, 1.0f)); + ggml_build_forward_expand(gb, ggml_scale_inplace(ctx, t36, 1.0f)); // input gradient - ggml_build_forward_expand(gb, ggml_scale_inplace(ctx, t36->grad, one)); + ggml_build_forward_expand(gb, ggml_scale_inplace(ctx, t36->grad, 1.0f)); // KQ_pos - ggml_build_forward_expand(gb, ggml_scale_inplace(ctx, KQ_pos, one)); + ggml_build_forward_expand(gb, ggml_scale_inplace(ctx, KQ_pos, 1.0f)); GGML_ASSERT(t36->grad->data == NULL && t36->grad->view_src == NULL); ggml_allocr_alloc(alloc, t36->grad); diff --git a/ggml-cuda.cu b/ggml-cuda.cu index f5e060d32..ac91ee12e 100644 --- a/ggml-cuda.cu +++ b/ggml-cuda.cu @@ -7700,17 +7700,9 @@ inline void ggml_cuda_op_scale( const float * src0_dd, const float * src1_dd, float * dst_dd, const cudaStream_t & main_stream) { GGML_ASSERT(src0->type == GGML_TYPE_F32); - GGML_ASSERT(src1->type == GGML_TYPE_F32); GGML_ASSERT( dst->type == GGML_TYPE_F32); - float scale; - // HACK: support for ggml backend interface - if (src1->backend == GGML_BACKEND_CPU) { - scale = ((float *) src1->data)[0]; - } else { - // TODO: pass pointer to kernel instead of copying to host - CUDA_CHECK(cudaMemcpy(&scale, src1->data, sizeof(float), cudaMemcpyDeviceToHost)); - } + const float scale = ((float *) dst->op_params)[0]; scale_f32_cuda(src0_dd, dst_dd, scale, ggml_nelements(src0), main_stream); CUDA_CHECK(cudaGetLastError()); @@ -7757,8 +7749,6 @@ static void ggml_cuda_op_flatten(const ggml_tensor * src0, const ggml_tensor * s const bool src1_on_device = use_src1 && src1->backend == GGML_BACKEND_GPU; const bool dst_on_device = dst->backend == GGML_BACKEND_GPU; - const bool src1_stays_on_host = use_src1 && dst->op == GGML_OP_SCALE; - // dd = data device float * src0_ddf = nullptr; float * src1_ddf = nullptr; @@ -7779,7 +7769,7 @@ static void ggml_cuda_op_flatten(const ggml_tensor * src0, const ggml_tensor * s CUDA_CHECK(ggml_cuda_cpy_tensor_2d(src0_ddf, src0, 0, 0, 0, nrows0, main_stream)); } - if (use_src1 && !src1_stays_on_host) { + if (use_src1) { if (src1_on_device) { src1_ddf = (float *) src1_extra->data_device[g_main_device]; } else { diff --git a/ggml-metal.m b/ggml-metal.m index e60b93b36..51a72ae33 100644 --- a/ggml-metal.m +++ b/ggml-metal.m @@ -1293,7 +1293,7 @@ void ggml_metal_graph_compute( { GGML_ASSERT(ggml_is_contiguous(src0)); - const float scale = *(const float *) src1->data; + const float scale = *(const float *) dst->op_params; int64_t n = ggml_nelements(dst); @@ -1304,8 +1304,8 @@ void ggml_metal_graph_compute( [encoder setComputePipelineState:ctx->pipeline_scale]; } - [encoder setBuffer:id_src0 offset:offs_src0 atIndex:0]; - [encoder setBuffer:id_dst offset:offs_dst atIndex:1]; + [encoder setBuffer:id_src0 offset:offs_src0 atIndex:0]; + [encoder setBuffer:id_dst offset:offs_dst atIndex:1]; [encoder setBytes:&scale length:sizeof(scale) atIndex:2]; [encoder dispatchThreadgroups:MTLSizeMake(n, 1, 1) threadsPerThreadgroup:MTLSizeMake(1, 1, 1)]; diff --git a/ggml.c b/ggml.c index 236148514..f27920a2d 100644 --- a/ggml.c +++ b/ggml.c @@ -4171,23 +4171,23 @@ struct ggml_tensor * ggml_out_prod( static struct ggml_tensor * ggml_scale_impl( struct ggml_context * ctx, struct ggml_tensor * a, - struct ggml_tensor * b, + float s, bool inplace) { - GGML_ASSERT(ggml_is_scalar(b)); GGML_ASSERT(ggml_is_padded_1d(a)); bool is_node = false; - if (a->grad || b->grad) { + if (a->grad) { is_node = true; } struct ggml_tensor * result = inplace ? ggml_view_tensor(ctx, a) : ggml_dup_tensor(ctx, a); + ggml_set_op_params(result, &s, sizeof(s)); + result->op = GGML_OP_SCALE; result->grad = is_node ? ggml_dup_tensor(ctx, result) : NULL; result->src[0] = a; - result->src[1] = b; return result; } @@ -4195,15 +4195,15 @@ static struct ggml_tensor * ggml_scale_impl( struct ggml_tensor * ggml_scale( struct ggml_context * ctx, struct ggml_tensor * a, - struct ggml_tensor * b) { - return ggml_scale_impl(ctx, a, b, false); + float s) { + return ggml_scale_impl(ctx, a, s, false); } struct ggml_tensor * ggml_scale_inplace( struct ggml_context * ctx, struct ggml_tensor * a, - struct ggml_tensor * b) { - return ggml_scale_impl(ctx, a, b, true); + float s) { + return ggml_scale_impl(ctx, a, s, true); } // ggml_set @@ -10325,19 +10325,17 @@ static void ggml_compute_forward_out_prod( static void ggml_compute_forward_scale_f32( const struct ggml_compute_params * params, const struct ggml_tensor * src0, - const struct ggml_tensor * src1, struct ggml_tensor * dst) { GGML_ASSERT(ggml_is_contiguous(src0)); GGML_ASSERT(ggml_is_contiguous(dst)); GGML_ASSERT(ggml_are_same_shape(src0, dst)); - GGML_ASSERT(ggml_is_scalar(src1)); if (params->type == GGML_TASK_INIT || params->type == GGML_TASK_FINALIZE) { return; } // scale factor - const float v = *(float *) src1->data; + const float v = *(float *) dst->op_params; const int ith = params->ith; const int nth = params->nth; @@ -10368,12 +10366,11 @@ static void ggml_compute_forward_scale_f32( static void ggml_compute_forward_scale( const struct ggml_compute_params * params, const struct ggml_tensor * src0, - const struct ggml_tensor * src1, struct ggml_tensor * dst) { switch (src0->type) { case GGML_TYPE_F32: { - ggml_compute_forward_scale_f32(params, src0, src1, dst); + ggml_compute_forward_scale_f32(params, src0, dst); } break; default: { @@ -14383,7 +14380,7 @@ static void ggml_compute_forward(struct ggml_compute_params * params, struct ggm } break; case GGML_OP_SCALE: { - ggml_compute_forward_scale(params, tensor->src[0], tensor->src[1], tensor); + ggml_compute_forward_scale(params, tensor->src[0], tensor); } break; case GGML_OP_SET: { @@ -14839,7 +14836,7 @@ static struct ggml_tensor * ggml_add_or_set(struct ggml_context * ctx, struct gg static struct ggml_tensor * ggml_acc_or_set(struct ggml_context * ctx, struct ggml_tensor * a, struct ggml_tensor * b, size_t nb1, size_t nb2, size_t nb3, size_t offset, struct ggml_hash_set zero_table) { if (ggml_hash_contains(zero_table, a)) { - struct ggml_tensor * a_zero = ggml_scale(ctx, a, ggml_new_f32(ctx, 0)); + struct ggml_tensor * a_zero = ggml_scale(ctx, a, 0.0f); return ggml_acc_impl(ctx, a_zero, b, nb1, nb2, nb3, offset, false); } else { return ggml_acc_impl(ctx, a, b, nb1, nb2, nb3, offset, false); @@ -14975,7 +14972,7 @@ static void ggml_compute_backward(struct ggml_context * ctx, struct ggml_tensor src0->grad, ggml_scale(ctx, ggml_mul(ctx, src0, tensor->grad), - ggml_new_f32(ctx, 2.0f)), + 2.0f), zero_table); } } break; @@ -14989,7 +14986,7 @@ static void ggml_compute_backward(struct ggml_context * ctx, struct ggml_tensor ggml_div(ctx, tensor->grad, tensor), - ggml_new_f32(ctx, 0.5f)), + 0.5f), zero_table); } } break; @@ -15155,17 +15152,12 @@ static void ggml_compute_backward(struct ggml_context * ctx, struct ggml_tensor { // necessary for llama if (src0->grad) { + const float s = ((float *) tensor->op_params)[0]; + src0->grad = ggml_add_or_set(ctx, src0->grad, - ggml_scale_impl(ctx, tensor->grad, src1, false), - zero_table); - } - if (src1->grad) { - src1->grad = - ggml_add_or_set(ctx, - src1->grad, - ggml_sum(ctx, ggml_mul_impl(ctx, tensor->grad, src0, false)), + ggml_scale_impl(ctx, tensor->grad, s, false), zero_table); } } break; diff --git a/ggml.h b/ggml.h index b17314897..75918502b 100644 --- a/ggml.h +++ b/ggml.h @@ -1094,13 +1094,13 @@ extern "C" { GGML_API struct ggml_tensor * ggml_scale( struct ggml_context * ctx, struct ggml_tensor * a, - struct ggml_tensor * b); + float s); // in-place, returns view(a) GGML_API struct ggml_tensor * ggml_scale_inplace( struct ggml_context * ctx, struct ggml_tensor * a, - struct ggml_tensor * b); + float s); // b -> view(a,offset,nb1,nb2,3), return modified a GGML_API struct ggml_tensor * ggml_set( diff --git a/llama.cpp b/llama.cpp index ba970ce8d..d6c192441 100644 --- a/llama.cpp +++ b/llama.cpp @@ -4032,13 +4032,12 @@ static struct ggml_tensor * llm_build_kqv( struct ggml_tensor * wo, struct ggml_tensor * wo_b, struct ggml_tensor * q_cur, - struct ggml_tensor * kq_scale, struct ggml_tensor * kq_mask, int64_t n_ctx, int32_t n_tokens, int32_t n_kv, float max_alibi_bias, - float scale, + float kq_scale, const llm_build_cb & cb, int il) { const int64_t n_embd = hparams.n_embd; @@ -4086,7 +4085,7 @@ static struct ggml_tensor * llm_build_kqv( kq = ggml_soft_max(ctx, kq); cb(kq, "kq_soft_max", il); } else { - kq = ggml_soft_max_ext(ctx, kq, kq_mask, scale); + kq = ggml_soft_max_ext(ctx, kq, kq_mask, kq_scale); cb(kq, "kq_soft_max_ext", il); } @@ -4231,10 +4230,6 @@ struct llm_build_context { struct ggml_tensor * inp_pos = ggml_new_tensor_1d(ctx0, GGML_TYPE_I32, n_tokens); cb(inp_pos, "inp_pos", -1); - // KQ_scale - struct ggml_tensor * KQ_scale = ggml_new_tensor_1d(ctx0, GGML_TYPE_F32, 1); - cb(KQ_scale, "KQ_scale", -1); - // KQ_mask (mask for 1 head, it will be broadcasted to all heads) struct ggml_tensor * KQ_mask = ggml_new_tensor_3d(ctx0, GGML_TYPE_F32, n_kv, n_tokens, 1); cb(KQ_mask, "KQ_mask", -1); @@ -4295,7 +4290,7 @@ struct llm_build_context { cur = llm_build_kqv(ctx0, model, hparams, kv_self, model.layers[il].wo, model.layers[il].bo, - Qcur, KQ_scale, KQ_mask, n_ctx, n_tokens, n_kv, -1.0f, 1.0f/sqrtf(float(n_embd_head)), cb, il); + Qcur, KQ_mask, n_ctx, n_tokens, n_kv, -1.0f, 1.0f/sqrtf(float(n_embd_head)), cb, il); cb(cur, "kqv_out", il); } @@ -4416,10 +4411,6 @@ struct llm_build_context { struct ggml_tensor * inp_pos = ggml_new_tensor_1d(ctx0, GGML_TYPE_I32, n_tokens); cb(inp_pos, "inp_pos", -1); - // KQ_scale - struct ggml_tensor * KQ_scale = ggml_new_tensor_1d(ctx0, GGML_TYPE_F32, 1); - cb(KQ_scale, "KQ_scale", -1); - // KQ_mask (mask for 1 head, it will be broadcasted to all heads) struct ggml_tensor * KQ_mask = ggml_new_tensor_3d(ctx0, GGML_TYPE_F32, n_kv, n_tokens, 1); cb(KQ_mask, "KQ_mask", -1); @@ -4478,7 +4469,7 @@ struct llm_build_context { cur = llm_build_kqv(ctx0, model, hparams, kv_self, model.layers[il].wo, NULL, - Qcur, KQ_scale, KQ_mask, n_ctx, n_tokens, n_kv, max_alibi_bias, 1.0f/sqrtf(float(n_embd_head)), cb, il); + Qcur, KQ_mask, n_ctx, n_tokens, n_kv, max_alibi_bias, 1.0f/sqrtf(float(n_embd_head)), cb, il); cb(cur, "kqv_out", il); } @@ -4536,10 +4527,6 @@ struct llm_build_context { struct ggml_tensor * inp_pos = ggml_new_tensor_1d(ctx0, GGML_TYPE_I32, n_tokens); cb(inp_pos, "inp_pos", -1); - // KQ_scale - struct ggml_tensor * KQ_scale = ggml_new_tensor_1d(ctx0, GGML_TYPE_F32, 1); - cb(KQ_scale, "KQ_scale", -1); - // KQ_mask (mask for 1 head, it will be broadcasted to all heads) struct ggml_tensor * KQ_mask = ggml_new_tensor_3d(ctx0, GGML_TYPE_F32, n_kv, n_tokens, 1); cb(KQ_mask, "KQ_mask", -1); @@ -4602,7 +4589,7 @@ struct llm_build_context { cur = llm_build_kqv(ctx0, model, hparams, kv_self, model.layers[il].wo, NULL, - Qcur, KQ_scale, KQ_mask, n_ctx, n_tokens, n_kv, -1.0f, 1.0f/sqrtf(float(n_embd_head)), cb, il); + Qcur, KQ_mask, n_ctx, n_tokens, n_kv, -1.0f, 1.0f/sqrtf(float(n_embd_head)), cb, il); cb(cur, "kqv_out", il); } @@ -4659,10 +4646,6 @@ struct llm_build_context { struct ggml_tensor * inp_pos = ggml_new_tensor_1d(ctx0, GGML_TYPE_I32, n_tokens); cb(inp_pos, "inp_pos", -1); - // KQ_scale - struct ggml_tensor * KQ_scale = ggml_new_tensor_1d(ctx0, GGML_TYPE_F32, 1); - cb(KQ_scale, "KQ_scale", -1); - // KQ_mask (mask for 1 head, it will be broadcasted to all heads) struct ggml_tensor * KQ_mask = ggml_new_tensor_3d(ctx0, GGML_TYPE_F32, n_kv, n_tokens, 1); cb(KQ_mask, "KQ_mask", -1); @@ -4702,7 +4685,7 @@ struct llm_build_context { cur = llm_build_kqv(ctx0, model, hparams, kv_self, model.layers[il].wo, model.layers[il].bo, - Qcur, KQ_scale, KQ_mask, n_ctx, n_tokens, n_kv, -1.0f, 1.0f/sqrtf(float(n_embd_head)), cb, il); + Qcur, KQ_mask, n_ctx, n_tokens, n_kv, -1.0f, 1.0f/sqrtf(float(n_embd_head)), cb, il); cb(cur, "kqv_out", il); } @@ -4759,10 +4742,6 @@ struct llm_build_context { struct ggml_tensor * inp_pos = ggml_new_tensor_1d(ctx0, GGML_TYPE_I32, n_tokens); cb(inp_pos, "inp_pos", -1); - // KQ_scale - struct ggml_tensor * KQ_scale = ggml_new_tensor_1d(ctx0, GGML_TYPE_F32, 1); - cb(KQ_scale, "KQ_scale", -1); - // KQ_mask (mask for 1 head, it will be broadcasted to all heads) struct ggml_tensor * KQ_mask = ggml_new_tensor_3d(ctx0, GGML_TYPE_F32, n_kv, n_tokens, 1); cb(KQ_mask, "KQ_mask", -1); @@ -4911,7 +4890,7 @@ struct llm_build_context { // TODO: not tested, could be broken cur = llm_build_kqv(ctx0, model, hparams, kv_self, model.layers[il].wo, model.layers[il].bo, - Q, KQ_scale, KQ_mask, n_ctx, n_tokens, n_kv, -1.0f, 1.0f/sqrtf(float(n_embd_head)), cb, il); + Q, KQ_mask, n_ctx, n_tokens, n_kv, -1.0f, 1.0f/sqrtf(float(n_embd_head)), cb, il); cb(cur, "kqv_out", il); } @@ -4965,10 +4944,6 @@ struct llm_build_context { inpL = llm_build_inp_embd(ctx0, hparams, batch, model.tok_embd, cb); cb(inpL, "inp_embd", -1); - // KQ_scale - struct ggml_tensor * KQ_scale = ggml_new_tensor_1d(ctx0, GGML_TYPE_F32, 1); - cb(KQ_scale, "KQ_scale", -1); - // KQ_mask (mask for 1 head, it will be broadcasted to all heads) struct ggml_tensor * KQ_mask = ggml_new_tensor_3d(ctx0, GGML_TYPE_F32, n_kv, n_tokens, 1); cb(KQ_mask, "KQ_mask", -1); @@ -5002,7 +4977,7 @@ struct llm_build_context { cur = llm_build_kqv(ctx0, model, hparams, kv_self, model.layers[il].wo, NULL, - Qcur, KQ_scale, KQ_mask, n_ctx, n_tokens, n_kv, 8.0f, 1.0f/sqrtf(float(n_embd_head)), cb, il); + Qcur, KQ_mask, n_ctx, n_tokens, n_kv, 8.0f, 1.0f/sqrtf(float(n_embd_head)), cb, il); cb(cur, "kqv_out", il); } @@ -5056,10 +5031,6 @@ struct llm_build_context { inpL = llm_build_inp_embd(ctx0, hparams, batch, model.tok_embd, cb); cb(inpL, "inp_embd", -1); - // KQ_scale - struct ggml_tensor * KQ_scale = ggml_new_tensor_1d(ctx0, GGML_TYPE_F32, 1); - cb(KQ_scale, "KQ_scale", -1); - // KQ_mask (mask for 1 head, it will be broadcasted to all heads) struct ggml_tensor * KQ_mask = ggml_new_tensor_3d(ctx0, GGML_TYPE_F32, n_kv, n_tokens, 1); cb(KQ_mask, "KQ_mask", -1); @@ -5099,7 +5070,7 @@ struct llm_build_context { cur = llm_build_kqv(ctx0, model, hparams, kv_self, model.layers[il].wo, model.layers[il].bo, - Qcur, KQ_scale, KQ_mask, n_ctx, n_tokens, n_kv, 8.0f, 1.0f/sqrtf(float(n_embd_head)), cb, il); + Qcur, KQ_mask, n_ctx, n_tokens, n_kv, 8.0f, 1.0f/sqrtf(float(n_embd_head)), cb, il); cb(cur, "kqv_out", il); } @@ -5150,10 +5121,6 @@ struct llm_build_context { inpL = llm_build_inp_embd(ctx0, hparams, batch, model.tok_embd, cb); cb(inpL, "inp_embd", -1); - // KQ_scale - struct ggml_tensor * KQ_scale = ggml_new_tensor_1d(ctx0, GGML_TYPE_F32, 1); - cb(KQ_scale, "KQ_scale", -1); - // KQ_mask (mask for 1 head, it will be broadcasted to all heads) struct ggml_tensor * KQ_mask = ggml_new_tensor_3d(ctx0, GGML_TYPE_F32, n_kv, n_tokens, 1); cb(KQ_mask, "KQ_mask", -1); @@ -5193,7 +5160,7 @@ struct llm_build_context { cur = llm_build_kqv(ctx0, model, hparams, kv_self, model.layers[il].wo, NULL, - Qcur, KQ_scale, KQ_mask, n_ctx, n_tokens, n_kv, hparams.f_max_alibi_bias, 1.0f/sqrtf(float(n_embd_head)), cb, il); + Qcur, KQ_mask, n_ctx, n_tokens, n_kv, hparams.f_max_alibi_bias, 1.0f/sqrtf(float(n_embd_head)), cb, il); cb(cur, "kqv_out", il); } @@ -5253,10 +5220,6 @@ struct llm_build_context { struct ggml_tensor * inp_pos = ggml_new_tensor_1d(ctx0, GGML_TYPE_I32, n_tokens); cb(inp_pos, "inp_pos", -1); - // KQ_scale - struct ggml_tensor * KQ_scale = ggml_new_tensor_1d(ctx0, GGML_TYPE_F32, 1); - cb(KQ_scale, "KQ_scale", -1); - // KQ_mask (mask for 1 head, it will be broadcasted to all heads) struct ggml_tensor * KQ_mask = ggml_new_tensor_3d(ctx0, GGML_TYPE_F32, n_kv, n_tokens, 1); cb(KQ_mask, "KQ_mask", -1); @@ -5306,7 +5269,7 @@ struct llm_build_context { cur = llm_build_kqv(ctx0, model, hparams, kv_self, model.layers[il].wo, NULL, - Qcur, KQ_scale, KQ_mask, n_ctx, n_tokens, n_kv, -1.0f, 1.0f/sqrtf(float(n_embd_head)), cb, il); + Qcur, KQ_mask, n_ctx, n_tokens, n_kv, -1.0f, 1.0f/sqrtf(float(n_embd_head)), cb, il); cb(cur, "kqv_out", il); } @@ -5366,10 +5329,6 @@ struct llm_build_context { struct ggml_tensor * inp_pos = ggml_new_tensor_1d(ctx0, GGML_TYPE_I32, n_tokens); cb(inp_pos, "inp_pos", -1); - // KQ_scale - struct ggml_tensor * KQ_scale = ggml_new_tensor_1d(ctx0, GGML_TYPE_F32, 1); - cb(KQ_scale, "KQ_scale", -1); - // KQ_mask (mask for 1 head, it will be broadcasted to all heads) struct ggml_tensor * KQ_mask = ggml_new_tensor_3d(ctx0, GGML_TYPE_F32, n_kv, n_tokens, 1); cb(KQ_mask, "KQ_mask", -1); @@ -5423,7 +5382,7 @@ struct llm_build_context { cur = llm_build_kqv(ctx0, model, hparams, kv_self, model.layers[il].wo, NULL, - Qcur, KQ_scale, KQ_mask, n_ctx, n_tokens, n_kv, -1.0f, 1.0f/sqrtf(float(n_embd_head)), cb, il); + Qcur, KQ_mask, n_ctx, n_tokens, n_kv, -1.0f, 1.0f/sqrtf(float(n_embd_head)), cb, il); cb(cur, "kqv_out", il); } @@ -5482,14 +5441,6 @@ struct llm_build_context { struct ggml_tensor * inp_pos = ggml_new_tensor_1d(ctx0, GGML_TYPE_I32, n_tokens); cb(inp_pos, "inp_pos", -1); - // Q_scale - struct ggml_tensor * Q_scale = ggml_new_tensor_1d(ctx0, GGML_TYPE_F32, 1); - cb(Q_scale, "Q_scale", -1); - - // KQ_scale - struct ggml_tensor * KQ_scale = ggml_new_tensor_1d(ctx0, GGML_TYPE_F32, 1); - cb(KQ_scale, "KQ_scale", -1); - // KQ_mask (mask for 1 head, it will be broadcasted to all heads) struct ggml_tensor * KQ_mask = ggml_new_tensor_3d(ctx0, GGML_TYPE_F32, n_kv, n_tokens, 1); cb(KQ_mask, "KQ_mask", -1); @@ -5531,7 +5482,9 @@ struct llm_build_context { ); cb(Qcur, "Qcur", il); - Qcur = ggml_scale(ctx0, Qcur, Q_scale); + // with phi2, we scale the Q to avoid precision issues + // ref: https://github.com/ml-explore/mlx-examples/blob/08e862336ade809bc37d1035f94b359e7d1a5152/phi2/phi2.py#L64-L66 + Qcur = ggml_scale(ctx0, Qcur, 1.0f/sqrtf(float(n_embd_head))); cb(Qcur, "Qcur", il); Kcur = ggml_rope_custom( @@ -5544,7 +5497,7 @@ struct llm_build_context { cur = llm_build_kqv(ctx0, model, hparams, kv_self, model.layers[il].wo, model.layers[il].bo, - Qcur, KQ_scale, KQ_mask, n_ctx, n_tokens, n_kv, -1.0f, 1.0f, cb, il); + Qcur, KQ_mask, n_ctx, n_tokens, n_kv, -1.0f, 1.0f, cb, il); cb(cur, "kqv_out", il); } @@ -5681,8 +5634,6 @@ static const std::unordered_map k_offload_map { "pos_embd", OFFLOAD_FUNC_NR }, { "inp_pos", OFFLOAD_FUNC_FRC }, // this is often used for KQ ops (e.g. rope) - { "Q_scale", OFFLOAD_FUNC_NOP }, - { "KQ_scale", OFFLOAD_FUNC_NOP }, { "KQ_mask", OFFLOAD_FUNC_FRC }, { "K_shift", OFFLOAD_FUNC_FRC }, @@ -5784,8 +5735,6 @@ static struct ggml_cgraph * llama_build_graph( bool alloc_inp_tokens = false; bool alloc_inp_embd = false; bool alloc_inp_pos = false; - bool alloc_inp_Q_scale = false; - bool alloc_inp_KQ_scale = false; bool alloc_inp_KQ_mask = false; bool alloc_inp_K_shift = false; @@ -5849,37 +5798,6 @@ static struct ggml_cgraph * llama_build_graph( alloc_inp_pos = true; } - if (!alloc_inp_Q_scale && strcmp(name, "Q_scale") == 0) { - ggml_allocr_alloc(lctx.alloc, cur); - - if (!ggml_allocr_is_measure(lctx.alloc)) { - const int64_t n_embd_head = model.hparams.n_embd_head(); - float f = 1.0f/sqrtf(float(n_embd_head)); - ggml_backend_tensor_set(cur, &f, 0, sizeof(f)); - } - - alloc_inp_Q_scale = true; - } - - if (!alloc_inp_KQ_scale && strcmp(name, "KQ_scale") == 0) { - ggml_allocr_alloc(lctx.alloc, cur); - - if (!ggml_allocr_is_measure(lctx.alloc)) { - const int64_t n_embd_head = model.hparams.n_embd_head(); - float f; - if (model.arch == LLM_ARCH_PHI2) { - // with phi2, we scale the Q to avoid precision issues - // ref: https://github.com/ml-explore/mlx-examples/blob/08e862336ade809bc37d1035f94b359e7d1a5152/phi2/phi2.py#L64-L66 - f = 1.0f; - } else { - f = 1.0f/sqrtf(float(n_embd_head)); - } - ggml_backend_tensor_set(cur, &f, 0, sizeof(f)); - } - - alloc_inp_KQ_scale = true; - } - if (!alloc_inp_KQ_mask && strcmp(name, "KQ_mask") == 0) { ggml_allocr_alloc(lctx.alloc, cur); @@ -9054,10 +8972,7 @@ static int llama_apply_lora_from_file_internal( ggml_set_name(BA, "BA"); if (scaling != 1.0f) { - ggml_tensor * scale_tensor = ggml_new_f32(lora_ctx.get(), scaling); - ggml_set_name(scale_tensor, "scale_tensor"); - - BA = ggml_scale_inplace(lora_ctx.get(), BA, scale_tensor); + BA = ggml_scale_inplace(lora_ctx.get(), BA, scaling); offload_func(BA); ggml_set_name(BA, "BA_scaled"); } diff --git a/tests/test-backend-ops.cpp b/tests/test-backend-ops.cpp index f04b9438a..f3df8a8c6 100644 --- a/tests/test-backend-ops.cpp +++ b/tests/test-backend-ops.cpp @@ -766,18 +766,19 @@ struct test_bin_bcast : public test_case { struct test_scale : public test_case { const ggml_type type; const std::array ne; + float scale; std::string vars() override { - return VARS_TO_STR2(type, ne); + return VARS_TO_STR3(type, ne, scale); } test_scale(ggml_type type = GGML_TYPE_F32, - std::array ne = {10, 10, 10, 10}) - : type(type), ne(ne) {} + std::array ne = {10, 10, 10, 10}, + float scale = 2.0f) + : type(type), ne(ne), scale(scale) {} ggml_tensor * build_graph(ggml_context * ctx) override { ggml_tensor * a = ggml_new_tensor(ctx, type, 4, ne.data()); - ggml_tensor * scale = ggml_new_tensor_1d(ctx, type, 1); ggml_tensor * out = ggml_scale(ctx, a, scale); return out; } diff --git a/tests/test-grad0.cpp b/tests/test-grad0.cpp index 81c20a89c..14914def5 100644 --- a/tests/test-grad0.cpp +++ b/tests/test-grad0.cpp @@ -881,19 +881,19 @@ int main(int argc, const char ** argv) { // scale { srand(seed); - const int nargs = 2; + const int nargs = 1; int64_t ne2[4]; ne2[0] = 1; for (int ndims = 1; ndims <= 2; ++ndims) { - x[1] = get_random_tensor_f32(ctx0, 1, ne2, -1.0f, 1.0f); x[0] = get_random_tensor_f32(ctx0, ndims, ne, -1.0f, 1.0f); - ggml_set_param(ctx0, x[0]); - ggml_set_param(ctx0, x[1]); + const float s = -1.0f + 2.0f*frand(); - struct ggml_tensor * f = ggml_sum(ctx0, ggml_scale(ctx0, x[0], x[1])); + ggml_set_param(ctx0, x[0]); + + struct ggml_tensor * f = ggml_sum(ctx0, ggml_scale(ctx0, x[0], s)); check_gradient("scale", ctx0, x, f, ndims, nargs, 1e-3f, 1e-3f, INFINITY); } @@ -1395,7 +1395,7 @@ int main(int argc, const char ** argv) { ggml_add1(ctx0, ggml_scale(ctx0, ggml_soft_max(ctx0, x[0]), - ggml_new_f32(ctx0, 1.0f - eps)), + 1.0f - eps), ggml_new_f32(ctx0, eps)))); check_gradient("softmax", ctx0, x, f, ndims, nargs, 1e-3f, 2e-1f, INFINITY); From c7e9701f86564088350209d2f9d71c96ea00527f Mon Sep 17 00:00:00 2001 From: crasm Date: Fri, 22 Dec 2023 01:19:36 -0500 Subject: [PATCH 126/811] llama : add ability to cancel model loading (#4462) * llama : Add ability to cancel model load Updated llama_progress_callback so that if it returns false, the model loading is aborted. * llama : Add test for model load cancellation * Fix bool return in llama_model_load, remove std::ignore use * Update llama.cpp Co-authored-by: Jared Van Bortel * Fail test if model file is missing * Revert "Fail test if model file is missing" This reverts commit 32ebd525bf7e5a87ee8a3dbaab3d92ce79fbf23d. * Add test-model-load-cancel to Makefile * Revert "Revert "Fail test if model file is missing"" This reverts commit 2796953257ee5383fa7c8fe8fa8fc888c048fb0b. * Simplify .gitignore for tests, clang-tidy fixes * Label all ctest tests * ci : ctest uses -L main * Attempt at writing ctest_with_model * ci : get ci/run.sh working with test-model-load-cancel * ci : restrict .github/workflows/build.yml ctest to -L main * update requirements.txt * Disable test-model-load-cancel in make * Remove venv before creation * Restructure requirements.txt Top-level now imports the specific additional requirements for each python file. Using `pip install -r requirements.txt` will fail if versions become mismatched in the per-file requirements. * Make per-python-script requirements work alone This doesn't break the main requirements.txt. * Add comment * Add convert-persimmon-to-gguf.py to new requirements.txt scheme * Add check-requirements.sh script and GitHub workflow * Remove shellcheck installation step from workflow * Add nocleanup special arg * Fix merge see: https://github.com/ggerganov/llama.cpp/pull/4462#discussion_r1434593573 * reset to upstream/master * Redo changes for cancelling model load --------- Co-authored-by: Georgi Gerganov Co-authored-by: Jared Van Bortel --- llama.cpp | 46 +++++++++++++++++++++++++++++++++------------- llama.h | 6 ++++-- 2 files changed, 37 insertions(+), 15 deletions(-) diff --git a/llama.cpp b/llama.cpp index d6c192441..cb0546c95 100644 --- a/llama.cpp +++ b/llama.cpp @@ -2372,7 +2372,8 @@ struct llama_model_loader { } } - void load_all_data(struct ggml_context * ctx, llama_progress_callback progress_callback, void * progress_callback_user_data, ggml_backend_buffer_t buf_mmap, llama_mlock * lmlock) const { + // Returns false if cancelled by progress_callback + bool load_all_data(struct ggml_context * ctx, llama_progress_callback progress_callback, void * progress_callback_user_data, ggml_backend_buffer_t buf_mmap, llama_mlock * lmlock) const { size_t size_data = 0; for (int i = 0; i < gguf_get_n_tensors(ctx_gguf); i++) { @@ -2404,7 +2405,9 @@ struct llama_model_loader { GGML_ASSERT(cur); // unused tensors should have been caught by load_data already if (progress_callback) { - progress_callback((float) size_done / size_data, progress_callback_user_data); + if (!progress_callback((float) size_done / size_data, progress_callback_user_data)) { + return false; + } } const size_t offs = file_offset(ggml_get_name(cur)); @@ -2466,8 +2469,11 @@ struct llama_model_loader { } if (progress_callback) { - progress_callback(1.0f, progress_callback_user_data); + // Even though the model is done loading, we still honor + // cancellation since we need to free allocations. + return progress_callback(1.0f, progress_callback_user_data); } + return true; } }; @@ -3044,7 +3050,8 @@ static void llm_load_print_meta(llama_model_loader & ml, llama_model & model) { if (vocab.linefeed_id != -1) { LLAMA_LOG_INFO( "%s: LF token = %d '%s'\n", __func__, vocab.linefeed_id, vocab.id_to_token[vocab.linefeed_id].text.c_str() ); } } -static void llm_load_tensors( +// Returns false if cancelled by progress_callback +static bool llm_load_tensors( llama_model_loader & ml, llama_model & model, int n_gpu_layers, @@ -3722,16 +3729,20 @@ static void llm_load_tensors( model.tensors_by_name.emplace_back(ggml_get_name(cur), cur); } - ml.load_all_data(ctx, progress_callback, progress_callback_user_data, buf_mmap, use_mlock ? &model.mlock_mmap : NULL); + if (!ml.load_all_data(ctx, progress_callback, progress_callback_user_data, buf_mmap, use_mlock ? &model.mlock_mmap : NULL)) { + return false; + } model.mapping = std::move(ml.mapping); // loading time will be recalculate after the first eval, so // we take page faults deferred by mmap() into consideration model.t_load_us = ggml_time_us() - model.t_start_us; + return true; } -static bool llama_model_load(const std::string & fname, llama_model & model, const llama_model_params & params) { +// Returns 0 on success, -1 on error, and -2 on cancellation via llama_progress_callback +static int llama_model_load(const std::string & fname, llama_model & model, const llama_model_params & params) { try { llama_model_loader ml(fname, params.use_mmap, params.kv_overrides); @@ -3749,19 +3760,21 @@ static bool llama_model_load(const std::string & fname, llama_model & model, con if (params.vocab_only) { LLAMA_LOG_INFO("%s: vocab only - skipping tensors\n", __func__); - return true; + return 0; } - llm_load_tensors( + if (!llm_load_tensors( ml, model, params.n_gpu_layers, params.main_gpu, params.tensor_split, params.use_mlock, params.progress_callback, params.progress_callback_user_data - ); + )) { + return -2; + } } catch (const std::exception & err) { LLAMA_LOG_ERROR("error loading model: %s\n", err.what()); - return false; + return -1; } - return true; + return 0; } // @@ -9141,11 +9154,18 @@ struct llama_model * llama_load_model_from_file( LLAMA_LOG_INFO("\n"); } } + return true; }; } - if (!llama_model_load(path_model, *model, params)) { - LLAMA_LOG_ERROR("%s: failed to load model\n", __func__); + int status = llama_model_load(path_model, *model, params); + GGML_ASSERT(status <= 0); + if (status < 0) { + if (status == -1) { + LLAMA_LOG_ERROR("%s: failed to load model\n", __func__); + } else if (status == -2) { + LLAMA_LOG_INFO("%s: cancelled model load\n", __func__); + } delete model; return nullptr; } diff --git a/llama.h b/llama.h index 0be4b1337..af76bae2d 100644 --- a/llama.h +++ b/llama.h @@ -127,7 +127,7 @@ extern "C" { bool sorted; } llama_token_data_array; - typedef void (*llama_progress_callback)(float progress, void *ctx); + typedef bool (*llama_progress_callback)(float progress, void *ctx); // Input data for llama_decode // A llama_batch object can contain input about one or many sequences @@ -180,7 +180,9 @@ extern "C" { int32_t main_gpu; // the GPU that is used for scratch and small tensors const float * tensor_split; // how to split layers across multiple GPUs (size: LLAMA_MAX_DEVICES) - // called with a progress value between 0 and 1, pass NULL to disable + // Called with a progress value between 0.0 and 1.0. Pass NULL to disable. + // If the provided progress_callback returns true, model loading continues. + // If it returns false, model loading is immediately aborted. llama_progress_callback progress_callback; // context pointer passed to the progress callback From 0137ef88ea9f8fd837a065700814329d24adeec3 Mon Sep 17 00:00:00 2001 From: bobqianic <129547291+bobqianic@users.noreply.github.com> Date: Fri, 22 Dec 2023 06:47:01 +0000 Subject: [PATCH 127/811] ggml : extend `enum ggml_log_level` with `GGML_LOG_LEVEL_DEBUG` (#4579) --- ggml.h | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/ggml.h b/ggml.h index 75918502b..338f355a4 100644 --- a/ggml.h +++ b/ggml.h @@ -484,7 +484,8 @@ extern "C" { enum ggml_log_level { GGML_LOG_LEVEL_ERROR = 2, GGML_LOG_LEVEL_WARN = 3, - GGML_LOG_LEVEL_INFO = 4 + GGML_LOG_LEVEL_INFO = 4, + GGML_LOG_LEVEL_DEBUG = 5 }; // ggml object From 2bb98279c5a087d62949972b35cf63ff974ffe6a Mon Sep 17 00:00:00 2001 From: Deins Date: Fri, 22 Dec 2023 08:49:54 +0200 Subject: [PATCH 128/811] readme : add zig bindings (#4581) --- README.md | 1 + 1 file changed, 1 insertion(+) diff --git a/README.md b/README.md index 73fe59bb4..8e17d5ba4 100644 --- a/README.md +++ b/README.md @@ -123,6 +123,7 @@ as the main playground for developing new features for the [ggml](https://github - Clojure: [phronmophobic/llama.clj](https://github.com/phronmophobic/llama.clj) - React Native: [mybigday/llama.rn](https://github.com/mybigday/llama.rn) - Java: [kherud/java-llama.cpp](https://github.com/kherud/java-llama.cpp) +- Zig: [deins/llama.cpp.zig](https://github.com/Deins/llama.cpp.zig) **UI:** From f31b98489824a86c937fa62ccf5dfd4bb0327b86 Mon Sep 17 00:00:00 2001 From: rhuddleston Date: Thu, 21 Dec 2023 23:56:34 -0700 Subject: [PATCH 129/811] ci : tag docker image with build number (#4584) --- .github/workflows/docker.yml | 15 ++++++++++++++- 1 file changed, 14 insertions(+), 1 deletion(-) diff --git a/.github/workflows/docker.yml b/.github/workflows/docker.yml index a7165a38f..7f4de50ea 100644 --- a/.github/workflows/docker.yml +++ b/.github/workflows/docker.yml @@ -69,6 +69,19 @@ jobs: docker-images: true swap-storage: true + - name: Determine tag name + id: tag + shell: bash + run: | + BUILD_NUMBER="$(git rev-list --count HEAD)" + SHORT_HASH="$(git rev-parse --short=7 HEAD)" + if [[ "${{ env.BRANCH_NAME }}" == "master" ]]; then + echo "name=b${BUILD_NUMBER}" >> $GITHUB_OUTPUT + else + SAFE_NAME=$(echo "${{ env.BRANCH_NAME }}" | tr '/' '-') + echo "name=${SAFE_NAME}-b${BUILD_NUMBER}-${SHORT_HASH}" >> $GITHUB_OUTPUT + fi + - name: Build and push Docker image (versioned) if: github.event_name == 'push' uses: docker/build-push-action@v4 @@ -85,5 +98,5 @@ jobs: context: . push: ${{ github.event_name == 'push' }} platforms: ${{ matrix.config.platforms }} - tags: "ghcr.io/${{ github.repository_owner }}/llama.cpp:${{ matrix.config.tag }}" + tags: "ghcr.io/${{ github.repository_owner }}/llama.cpp:${{ matrix.config.tag }}" , "ghcr.io/${{ github.repository_owner }}/llama.cpp:${{ matrix.config.tag }}-${{ steps.tag.outputs.name }}" file: ${{ matrix.config.dockerfile }} From 28cb35a0ecb9852adc3494aa51dde60141939d64 Mon Sep 17 00:00:00 2001 From: Michael Kesper Date: Fri, 22 Dec 2023 09:03:25 +0100 Subject: [PATCH 130/811] make : add LLAMA_HIP_UMA option (#4587) NB: LLAMA_HIP_UMA=1 (or any value) adds MK_CPPFLAG -DGGML_HIP_UMA --- Makefile | 3 +++ README.md | 8 +++++++- 2 files changed, 10 insertions(+), 1 deletion(-) diff --git a/Makefile b/Makefile index 68df7702a..42686ce71 100644 --- a/Makefile +++ b/Makefile @@ -452,6 +452,9 @@ ifdef LLAMA_HIPBLAS LLAMA_CUDA_MMV_Y ?= 1 LLAMA_CUDA_KQUANTS_ITER ?= 2 MK_CPPFLAGS += -DGGML_USE_HIPBLAS -DGGML_USE_CUBLAS +ifdef LLAMA_HIP_UMA + MK_CPPFLAGS += -DGGML_HIP_UMA +endif # LLAMA_HIP_UMA MK_LDFLAGS += -L$(ROCM_PATH)/lib -Wl,-rpath=$(ROCM_PATH)/lib MK_LDFLAGS += -lhipblas -lamdhip64 -lrocblas HIPFLAGS += $(addprefix --offload-arch=,$(GPU_TARGETS)) diff --git a/README.md b/README.md index 8e17d5ba4..377d3928b 100644 --- a/README.md +++ b/README.md @@ -440,7 +440,13 @@ Building the program with BLAS support may lead to some performance improvements && cmake --build build -- -j 16 ``` On Linux it is also possible to use unified memory architecture (UMA) to share main memory between the CPU and integrated GPU by setting `-DLLAMA_HIP_UMA=ON"`. - However, this hurts performance for non-integrated GPUs. + However, this hurts performance for non-integrated GPUs (but enables working with integrated GPUs). + + - Using `make` (example for target gfx1030, build with 16 CPU threads): + ```bash + make -j16 LLAMA_HIPBLAS=1 LLAMA_HIP_UMA=1 AMDGPU_TARGETS=gxf1030 + ``` + - Using `CMake` for Windows (using x64 Native Tools Command Prompt for VS, and assuming a gfx1100-compatible AMD GPU): ```bash set PATH=%HIP_PATH%\bin;%PATH% From 48b24b170e3b4f9dc28200306840cb07d1c123df Mon Sep 17 00:00:00 2001 From: Herman Semenov Date: Fri, 22 Dec 2023 09:26:49 +0000 Subject: [PATCH 131/811] ggml : add comment about backward GGML_OP_DIAG_MASK_INF (#4203) --- ggml.c | 2 ++ 1 file changed, 2 insertions(+) diff --git a/ggml.c b/ggml.c index f27920a2d..15e1984d1 100644 --- a/ggml.c +++ b/ggml.c @@ -15335,6 +15335,8 @@ static void ggml_compute_backward(struct ggml_context * ctx, struct ggml_tensor const int n_past = ((int32_t *) tensor->op_params)[0]; src0->grad = ggml_add_or_set(ctx, src0->grad, + /* ggml_diag_mask_inf_impl() shouldn't be here */ + /* ref: https://github.com/ggerganov/llama.cpp/pull/4203#discussion_r1412377992 */ ggml_diag_mask_zero_impl(ctx, tensor->grad, n_past, false), zero_table); } From 48b7ff193e64c97ab174280ba0eb8d14b47c49ba Mon Sep 17 00:00:00 2001 From: slaren Date: Fri, 22 Dec 2023 12:12:53 +0100 Subject: [PATCH 132/811] llama : fix platforms without mmap (#4578) * llama : fix platforms without mmap * win32 : limit prefetch size to the file size * fix win32 error clobber, unnecessary std::string in std::runtime_error --- ggml-cuda.cu | 3 ++- ggml.c | 6 ++++-- llama.cpp | 36 ++++++++++++++++++------------------ 3 files changed, 24 insertions(+), 21 deletions(-) diff --git a/ggml-cuda.cu b/ggml-cuda.cu index ac91ee12e..37d7f2792 100644 --- a/ggml-cuda.cu +++ b/ggml-cuda.cu @@ -7702,7 +7702,8 @@ inline void ggml_cuda_op_scale( GGML_ASSERT(src0->type == GGML_TYPE_F32); GGML_ASSERT( dst->type == GGML_TYPE_F32); - const float scale = ((float *) dst->op_params)[0]; + float scale; + memcpy(&scale, dst->op_params, sizeof(float)); scale_f32_cuda(src0_dd, dst_dd, scale, ggml_nelements(src0), main_stream); CUDA_CHECK(cudaGetLastError()); diff --git a/ggml.c b/ggml.c index 15e1984d1..3656422d7 100644 --- a/ggml.c +++ b/ggml.c @@ -10335,7 +10335,8 @@ static void ggml_compute_forward_scale_f32( } // scale factor - const float v = *(float *) dst->op_params; + float v; + memcpy(&v, dst->op_params, sizeof(float)); const int ith = params->ith; const int nth = params->nth; @@ -15152,7 +15153,8 @@ static void ggml_compute_backward(struct ggml_context * ctx, struct ggml_tensor { // necessary for llama if (src0->grad) { - const float s = ((float *) tensor->op_params)[0]; + float s; + memcpy(&s, tensor->op_params, sizeof(float)); src0->grad = ggml_add_or_set(ctx, diff --git a/llama.cpp b/llama.cpp index cb0546c95..4e4495739 100644 --- a/llama.cpp +++ b/llama.cpp @@ -778,7 +778,7 @@ struct llama_file { throw std::runtime_error(format("read error: %s", strerror(errno))); } if (ret != 1) { - throw std::runtime_error(std::string("unexpectedly reached end of file")); + throw std::runtime_error("unexpectedly reached end of file"); } } @@ -931,29 +931,29 @@ struct llama_mmap { #elif defined(_WIN32) static constexpr bool SUPPORTED = true; - llama_mmap(struct llama_file * file, bool prefetch = true, bool numa = false) { - (void) numa; + llama_mmap(struct llama_file * file, size_t prefetch = (size_t) -1, bool numa = false) { + GGML_UNUSED(numa); size = file->size; HANDLE hFile = (HANDLE) _get_osfhandle(_fileno(file->fp)); HANDLE hMapping = CreateFileMappingA(hFile, NULL, PAGE_READONLY, 0, 0, NULL); - DWORD error = GetLastError(); if (hMapping == NULL) { + DWORD error = GetLastError(); throw std::runtime_error(format("CreateFileMappingA failed: %s", llama_format_win_err(error).c_str())); } addr = MapViewOfFile(hMapping, FILE_MAP_READ, 0, 0, 0); - error = GetLastError(); + DWORD error = GetLastError(); CloseHandle(hMapping); if (addr == NULL) { throw std::runtime_error(format("MapViewOfFile failed: %s", llama_format_win_err(error).c_str())); } - if (prefetch) { + if (prefetch > 0) { // PrefetchVirtualMemory is only present on Windows 8 and above, so we dynamically load it BOOL (WINAPI *pPrefetchVirtualMemory) (HANDLE, ULONG_PTR, PWIN32_MEMORY_RANGE_ENTRY, ULONG); HMODULE hKernel32 = GetModuleHandleW(L"kernel32.dll"); @@ -965,9 +965,9 @@ struct llama_mmap { // advise the kernel to preload the mapped memory WIN32_MEMORY_RANGE_ENTRY range; range.VirtualAddress = addr; - range.NumberOfBytes = (SIZE_T)size; + range.NumberOfBytes = (SIZE_T) std::min(size, prefetch); if (!pPrefetchVirtualMemory(GetCurrentProcess(), 1, &range, 0)) { - fprintf(stderr, "warning: PrefetchVirtualMemory failed: %s\n", + LLAMA_LOG_WARN("warning: PrefetchVirtualMemory failed: %s\n", llama_format_win_err(GetLastError()).c_str()); } } @@ -982,26 +982,26 @@ struct llama_mmap { ~llama_mmap() { if (!UnmapViewOfFile(addr)) { - fprintf(stderr, "warning: UnmapViewOfFile failed: %s\n", + LLAMA_LOG_WARN("warning: UnmapViewOfFile failed: %s\n", llama_format_win_err(GetLastError()).c_str()); } } #else static constexpr bool SUPPORTED = false; - llama_mmap(struct llama_file * file, bool prefetch = true, bool numa = false) { - (void) file; - (void) prefetch; - (void) numa; + llama_mmap(struct llama_file * file, size_t prefetch = -1, bool numa = false) { + GGML_UNUSED(file); + GGML_UNUSED(prefetch); + GGML_UNUSED(numa); - throw std::runtime_error(std::string("mmap not supported")); + throw std::runtime_error("mmap not supported"); } - void unmap(size_t offset, size_t len) { - (void) offset; - (void) len; + void unmap_fragment(size_t first, size_t last) { + GGML_UNUSED(first); + GGML_UNUSED(last); - throw std::runtime_error(std::string("mmap not supported")); + throw std::runtime_error("mmap not supported"); } #endif }; From 6724ef16573ec7ecce620be56cbbff145856b2fb Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Henrik=20Forst=C3=A9n?= Date: Fri, 22 Dec 2023 15:34:05 +0200 Subject: [PATCH 133/811] Fix CudaMemcpy direction (#4599) --- ggml-cuda.cu | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/ggml-cuda.cu b/ggml-cuda.cu index 37d7f2792..da8fd1e09 100644 --- a/ggml-cuda.cu +++ b/ggml-cuda.cu @@ -8843,7 +8843,7 @@ static void ggml_cuda_mul_mat_id(const ggml_tensor * src0, const ggml_tensor * s const cudaMemcpyKind src1_kind = src1->backend == GGML_BACKEND_CPU ? cudaMemcpyHostToDevice : cudaMemcpyDeviceToDevice; const cudaMemcpyKind dst_kind = dst->backend == GGML_BACKEND_CPU ? - cudaMemcpyHostToDevice : cudaMemcpyDeviceToDevice; + cudaMemcpyDeviceToHost : cudaMemcpyDeviceToDevice; for (int32_t row_id = 0; row_id < n_as; ++row_id) { const struct ggml_tensor * src0_row = dst->src[row_id + 2]; From a55876955b1a83464171de8d578d3ab062a7b62d Mon Sep 17 00:00:00 2001 From: FantasyGmm <16450052+FantasyGmm@users.noreply.github.com> Date: Fri, 22 Dec 2023 23:11:12 +0800 Subject: [PATCH 134/811] cuda : fix jetson compile error (#4560) * fix old jetson compile error * Update Makefile * update jetson detect and cuda version detect * update cuda marco define * update makefile and cuda,fix some issue * Update README.md Co-authored-by: Georgi Gerganov * Update Makefile * Update README.md --------- Co-authored-by: Georgi Gerganov --- Makefile | 22 +++++++++++++++++++--- README.md | 3 +++ ggml-cuda.cu | 7 +++++++ ggml-quants.c | 4 ++-- 4 files changed, 31 insertions(+), 5 deletions(-) diff --git a/Makefile b/Makefile index 42686ce71..6a998091b 100644 --- a/Makefile +++ b/Makefile @@ -282,8 +282,17 @@ endif ifneq ($(filter aarch64%,$(UNAME_M)),) # Apple M1, M2, etc. # Raspberry Pi 3, 4, Zero 2 (64-bit) + # Nvidia Jetson MK_CFLAGS += -mcpu=native MK_CXXFLAGS += -mcpu=native + JETSON_RELEASE_INFO = $(shell jetson_release) + ifdef JETSON_RELEASE_INFO + ifneq ($(filter TX2%,$(JETSON_RELEASE_INFO)),) + JETSON_EOL_MODULE_DETECT = 1 + CC = aarch64-unknown-linux-gnu-gcc + cxx = aarch64-unknown-linux-gnu-g++ + endif + endif endif ifneq ($(filter armv6%,$(UNAME_M)),) @@ -357,10 +366,13 @@ ifdef LLAMA_BLIS endif # LLAMA_BLIS ifdef LLAMA_CUBLAS - MK_CPPFLAGS += -DGGML_USE_CUBLAS -I/usr/local/cuda/include -I/opt/cuda/include -I$(CUDA_PATH)/targets/x86_64-linux/include - MK_LDFLAGS += -lcublas -lculibos -lcudart -lcublasLt -lpthread -ldl -lrt -L/usr/local/cuda/lib64 -L/opt/cuda/lib64 -L$(CUDA_PATH)/targets/x86_64-linux/lib + MK_CPPFLAGS += -DGGML_USE_CUBLAS -I/usr/local/cuda/include -I/opt/cuda/include -I$(CUDA_PATH)/targets/x86_64-linux/include -I/usr/local/cuda/targets/aarch64-linux/include + MK_LDFLAGS += -lcublas -lculibos -lcudart -lcublasLt -lpthread -ldl -lrt -L/usr/local/cuda/lib64 -L/opt/cuda/lib64 -L$(CUDA_PATH)/targets/x86_64-linux/lib -L/usr/local/cuda/targets/aarch64-linux/lib OBJS += ggml-cuda.o - MK_NVCCFLAGS = --forward-unknown-to-host-compiler -use_fast_math + MK_NVCCFLAGS = -use_fast_math +ifndef JETSON_EOL_MODULE_DETECT + MK_NVCCFLAGS += --forward-unknown-to-host-compiler +endif # JETSON_EOL_MODULE_DETECT ifdef LLAMA_DEBUG MK_NVCCFLAGS += -lineinfo @@ -417,7 +429,11 @@ ifdef LLAMA_CUDA_CCBIN MK_NVCCFLAGS += -ccbin $(LLAMA_CUDA_CCBIN) endif ggml-cuda.o: ggml-cuda.cu ggml-cuda.h +ifdef JETSON_EOL_MODULE_DETECT + $(NVCC) -I. -Icommon -D_XOPEN_SOURCE=600 -D_GNU_SOURCE -DNDEBUG -DGGML_USE_CUBLAS -I/usr/local/cuda/include -I/opt/cuda/include -I/usr/local/cuda/targets/aarch64-linux/include -std=c++11 -O3 $(NVCCFLAGS) -Xcompiler "$(CUDA_CXXFLAGS)" -c $< -o $@ +else $(NVCC) $(BASE_CXXFLAGS) $(NVCCFLAGS) -Wno-pedantic -Xcompiler "$(CUDA_CXXFLAGS)" -c $< -o $@ +endif # JETSON_EOL_MODULE_DETECT endif # LLAMA_CUBLAS ifdef LLAMA_CLBLAST diff --git a/README.md b/README.md index 377d3928b..649c3b333 100644 --- a/README.md +++ b/README.md @@ -396,6 +396,9 @@ Building the program with BLAS support may lead to some performance improvements - #### cuBLAS This provides BLAS acceleration using the CUDA cores of your Nvidia GPU. Make sure to have the CUDA toolkit installed. You can download it from your Linux distro's package manager (e.g. `apt install nvidia-cuda-toolkit`) or from here: [CUDA Toolkit](https://developer.nvidia.com/cuda-downloads). + + For Jetson user, if you have Jetson Orin, you can try this: [Offical Support](https://www.jetson-ai-lab.com/tutorial_text-generation.html). If you are using an old model(nano/TX2), need some additional operations before compiling. + - Using `make`: ```bash make LLAMA_CUBLAS=1 diff --git a/ggml-cuda.cu b/ggml-cuda.cu index da8fd1e09..b124774a9 100644 --- a/ggml-cuda.cu +++ b/ggml-cuda.cu @@ -90,6 +90,13 @@ #include #include #include +// CUDA 10.2 does not have these macro definitions. +#ifndef CUBLAS_TF32_TENSOR_OP_MATH +#define CUBLAS_TF32_TENSOR_OP_MATH CUBLAS_TENSOR_OP_MATH +#define CUBLAS_COMPUTE_16F CUDA_R_16F +#define CUBLAS_COMPUTE_32F CUDA_R_32F +#define cublasComputeType_t cudaDataType_t +#endif #endif // defined(GGML_USE_HIPBLAS) #include "ggml-cuda.h" diff --git a/ggml-quants.c b/ggml-quants.c index 0e8163a16..a15a24048 100644 --- a/ggml-quants.c +++ b/ggml-quants.c @@ -3677,7 +3677,7 @@ void ggml_vec_dot_q2_K_q8_K(const int n, float * restrict s, const void * restri const uint8x16_t mins = vshrq_n_u8(mins_and_scales, 4); const ggml_int16x8x2_t q8sums = ggml_vld1q_s16_x2(y[i].bsums); - const ggml_int16x8x2_t mins16 = {vreinterpretq_s16_u16(vmovl_u8(vget_low_u8(mins))), vreinterpretq_s16_u16(vmovl_u8(vget_high_u8(mins)))}; + const ggml_int16x8x2_t mins16 = {{vreinterpretq_s16_u16(vmovl_u8(vget_low_u8(mins))), vreinterpretq_s16_u16(vmovl_u8(vget_high_u8(mins)))}}; const int32x4_t s0 = vaddq_s32(vmull_s16(vget_low_s16 (mins16.val[0]), vget_low_s16 (q8sums.val[0])), vmull_s16(vget_high_s16(mins16.val[0]), vget_high_s16(q8sums.val[0]))); const int32x4_t s1 = vaddq_s32(vmull_s16(vget_low_s16 (mins16.val[1]), vget_low_s16 (q8sums.val[1])), @@ -6626,7 +6626,7 @@ void ggml_vec_dot_q6_K_q8_K(const int n, float * restrict s, const void * restri const ggml_int16x8x2_t q8sums = ggml_vld1q_s16_x2(y[i].bsums); const int8x16_t scales = vld1q_s8(scale); - const ggml_int16x8x2_t q6scales = {vmovl_s8(vget_low_s8(scales)), vmovl_s8(vget_high_s8(scales))}; + const ggml_int16x8x2_t q6scales = {{vmovl_s8(vget_low_s8(scales)), vmovl_s8(vget_high_s8(scales))}}; const int32x4_t prod = vaddq_s32(vaddq_s32(vmull_s16(vget_low_s16 (q8sums.val[0]), vget_low_s16 (q6scales.val[0])), vmull_s16(vget_high_s16(q8sums.val[0]), vget_high_s16(q6scales.val[0]))), From ba661751322a7c201fd3bef71af077c5aebfaa2a Mon Sep 17 00:00:00 2001 From: Georgi Gerganov Date: Fri, 22 Dec 2023 17:53:43 +0200 Subject: [PATCH 135/811] sync : ggml (fix im2col) (#4591) * cuda : fix im2col_f32_f16 (ggml/#658) ggml-ci * ggml-alloc : fix ggml_tallocr_is_own --------- Co-authored-by: leejet --- ggml-alloc.c | 2 +- ggml-cuda.cu | 8 ++++---- 2 files changed, 5 insertions(+), 5 deletions(-) diff --git a/ggml-alloc.c b/ggml-alloc.c index a97436b17..a27dd54b0 100644 --- a/ggml-alloc.c +++ b/ggml-alloc.c @@ -72,7 +72,7 @@ static void remove_allocated_tensor(ggml_tallocr_t alloc, struct ggml_tensor * t // check if a tensor is allocated by this buffer static bool ggml_tallocr_is_own(ggml_tallocr_t alloc, const struct ggml_tensor * tensor) { - return tensor->buffer == alloc->buffer; + return tensor->buffer == alloc->buffer && (!tensor->view_src || tensor->view_src->buffer == alloc->buffer); } static bool ggml_is_view(struct ggml_tensor * t) { diff --git a/ggml-cuda.cu b/ggml-cuda.cu index b124774a9..7c2a834e3 100644 --- a/ggml-cuda.cu +++ b/ggml-cuda.cu @@ -5273,17 +5273,17 @@ static __global__ void im2col_f32_f16( const int ky = (i - kd) / OW; const int ix = i % OW; - const int iiw = ix * s0 + kx * d0 - p0; - const int iih = blockIdx.y * s1 + ky * d1 - p1; + const int64_t iiw = ix * s0 + kx * d0 - p0; + const int64_t iih = blockIdx.y * s1 + ky * d1 - p1; - const int offset_dst = + const int64_t offset_dst = (blockIdx.y * OW + ix) * CHW + (blockIdx.z * (KW * KH) + ky * KW + kx); if (iih < 0 || iih >= IH || iiw < 0 || iiw >= IW) { dst[offset_dst] = __float2half(0.0f); } else { - const int offset_src = blockIdx.z * offset_delta; + const int64_t offset_src = blockIdx.z * offset_delta; dst[offset_dst] = __float2half(x[offset_src + iih * IW + iiw]); } } From 7082d24cec35e9ce9147535a2224dfc67ee0a78c Mon Sep 17 00:00:00 2001 From: LeonEricsson <70749762+LeonEricsson@users.noreply.github.com> Date: Fri, 22 Dec 2023 17:05:56 +0100 Subject: [PATCH 136/811] lookup : add prompt lookup decoding example (#4484) * initial commit, going through initializations * main loop finished, starting to debug * BUG: generates gibberish/repeating tokens after a while * kv_cache management * Added colors to distinguish drafted tokens (--color). Updated README * lookup : fix token positions in the draft batch * lookup : use n_draft from CLI params * lookup : final touches --------- Co-authored-by: Leon Ericsson Co-authored-by: Georgi Gerganov --- .gitignore | 1 + Makefile | 5 +- common/common.h | 3 +- examples/CMakeLists.txt | 1 + examples/lookup/CMakeLists.txt | 5 + examples/lookup/README.md | 13 ++ examples/lookup/lookup.cpp | 230 +++++++++++++++++++++++++++++++++ 7 files changed, 256 insertions(+), 2 deletions(-) create mode 100644 examples/lookup/CMakeLists.txt create mode 100644 examples/lookup/README.md create mode 100644 examples/lookup/lookup.cpp diff --git a/.gitignore b/.gitignore index 76b3d2861..def74a1e9 100644 --- a/.gitignore +++ b/.gitignore @@ -48,6 +48,7 @@ models-mnt /llama-bench /llava-cli /lookahead +/lookup /main /metal /perplexity diff --git a/Makefile b/Makefile index 6a998091b..cb5a4e948 100644 --- a/Makefile +++ b/Makefile @@ -2,7 +2,7 @@ BUILD_TARGETS = \ main quantize quantize-stats perplexity embedding vdot q8dot train-text-from-scratch convert-llama2c-to-ggml \ simple batched batched-bench save-load-state server gguf llama-bench libllava.a llava-cli baby-llama beam-search \ - speculative infill tokenize benchmark-matmult parallel finetune export-lora lookahead tests/test-c.o + speculative infill tokenize benchmark-matmult parallel finetune export-lora lookahead lookup tests/test-c.o # Binaries only useful for tests TEST_TARGETS = \ @@ -664,6 +664,9 @@ parallel: examples/parallel/parallel.cpp ggml.o llama.o $(COMMON_DEPS) $(OBJS) lookahead: examples/lookahead/lookahead.cpp ggml.o llama.o $(COMMON_DEPS) $(OBJS) $(CXX) $(CXXFLAGS) $(filter-out %.h,$^) -o $@ $(LDFLAGS) +lookup: examples/lookup/lookup.cpp ggml.o llama.o $(COMMON_DEPS) $(OBJS) + $(CXX) $(CXXFLAGS) $(filter-out %.h,$^) -o $@ $(LDFLAGS) + ifdef LLAMA_METAL metal: examples/metal/metal.cpp ggml.o $(OBJS) $(CXX) $(CXXFLAGS) $^ -o $@ $(LDFLAGS) diff --git a/common/common.h b/common/common.h index e87ce1133..9659aa045 100644 --- a/common/common.h +++ b/common/common.h @@ -51,7 +51,7 @@ struct gpt_params { int32_t n_ctx = 512; // context size int32_t n_batch = 512; // batch size for prompt processing (must be >=32 to use BLAS) int32_t n_keep = 0; // number of tokens to keep from initial prompt - int32_t n_draft = 16; // number of tokens to draft during speculative decoding + int32_t n_draft = 8; // number of tokens to draft during speculative decoding int32_t n_chunks = -1; // max number of chunks to process (-1 = unlimited) int32_t n_parallel = 1; // number of parallel sequences to decode int32_t n_sequences = 1; // number of sequences to decode @@ -240,3 +240,4 @@ void dump_kv_cache_view(const llama_kv_cache_view & view, int row_size = 80); // Dump the KV cache view showing individual sequences in each cell (long output). void dump_kv_cache_view_seqs(const llama_kv_cache_view & view, int row_size = 40); + diff --git a/examples/CMakeLists.txt b/examples/CMakeLists.txt index 6744944fd..4cc13d6e9 100644 --- a/examples/CMakeLists.txt +++ b/examples/CMakeLists.txt @@ -33,6 +33,7 @@ else() add_subdirectory(simple) add_subdirectory(speculative) add_subdirectory(lookahead) + add_subdirectory(lookup) add_subdirectory(train-text-from-scratch) if (LLAMA_METAL) add_subdirectory(metal) diff --git a/examples/lookup/CMakeLists.txt b/examples/lookup/CMakeLists.txt new file mode 100644 index 000000000..c060b8f56 --- /dev/null +++ b/examples/lookup/CMakeLists.txt @@ -0,0 +1,5 @@ +set(TARGET lookup) +add_executable(${TARGET} lookup.cpp) +install(TARGETS ${TARGET} RUNTIME) +target_link_libraries(${TARGET} PRIVATE common llama ${CMAKE_THREAD_LIBS_INIT}) +target_compile_features(${TARGET} PRIVATE cxx_std_11) diff --git a/examples/lookup/README.md b/examples/lookup/README.md new file mode 100644 index 000000000..5bfb0de93 --- /dev/null +++ b/examples/lookup/README.md @@ -0,0 +1,13 @@ +# llama.cpp/examples/lookup + +Demonstration of Prompt Lookup Decoding + +https://github.com/apoorvumang/prompt-lookup-decoding + +The key parameters for lookup decoding are `ngram_min`, `ngram_max` and `n_draft`. The first two determine the size of the ngrams to search for in the prompt for a match. The latter specifies how many subsequent tokens to draft if a match is found. + +More info: + +https://github.com/ggerganov/llama.cpp/pull/4484 +https://github.com/ggerganov/llama.cpp/issues/4226 + diff --git a/examples/lookup/lookup.cpp b/examples/lookup/lookup.cpp new file mode 100644 index 000000000..d8de7dd38 --- /dev/null +++ b/examples/lookup/lookup.cpp @@ -0,0 +1,230 @@ +#include "common.h" +#include "llama.h" + +#include +#include +#include +#include + +int main(int argc, char ** argv){ + gpt_params params; + + if (!gpt_params_parse(argc, argv, params)) { + return 1; + } + + // max/min n-grams size to search for in prompt + const int ngram_max = 4; + const int ngram_min = 1; + + // length of the candidate / draft sequence, if match is found + const int n_draft = params.n_draft; + + const bool dump_kv_cache = params.dump_kv_cache; + +#ifndef LOG_DISABLE_LOGS + log_set_target(log_filename_generator("lookup", "log")); + LOG_TEE("Log start\n"); + log_dump_cmdline(argc, argv); +#endif // LOG_DISABLE_LOGS + + // init llama.cpp + llama_backend_init(params.numa); + + llama_model * model = NULL; + llama_context * ctx = NULL; + + // load the model + std::tie(model, ctx) = llama_init_from_gpt_params(params); + + // tokenize the prompt + const bool add_bos = llama_should_add_bos_token(model); + LOG("add_bos tgt: %d\n", add_bos); + + std::vector inp; + inp = ::llama_tokenize(ctx, params.prompt, add_bos, true); + + const int max_context_size = llama_n_ctx(ctx); + const int max_tokens_list_size = max_context_size - 4; + + if ((int) inp.size() > max_tokens_list_size) { + fprintf(stderr, "%s: error: prompt too long (%d tokens, max %d)\n", __func__, (int) inp.size(), max_tokens_list_size); + return 1; + } + + fprintf(stderr, "\n\n"); + + for (auto id : inp) { + fprintf(stderr, "%s", llama_token_to_piece(ctx, id).c_str()); + } + + fflush(stderr); + + const int n_input = inp.size(); + + const auto t_enc_start = ggml_time_us(); + + llama_decode(ctx, llama_batch_get_one( inp.data(), n_input - 1, 0, 0)); + llama_decode(ctx, llama_batch_get_one(&inp.back(), 1, n_input - 1, 0)); + + const auto t_enc_end = ggml_time_us(); + + int n_predict = 0; + int n_drafted = 0; + int n_accept = 0; + + int n_past = inp.size(); + + bool has_eos = false; + + struct llama_sampling_context * ctx_sampling = llama_sampling_init(params.sparams); + + std::vector draft; + + llama_batch batch_tgt = llama_batch_init(params.n_ctx, 0, 1); + + // debug + struct llama_kv_cache_view kvc_view = llama_kv_cache_view_init(ctx, 1); + + const auto t_dec_start = ggml_time_us(); + + while (true) { + // debug + if (dump_kv_cache) { + llama_kv_cache_view_update(ctx, &kvc_view); + dump_kv_cache_view_seqs(kvc_view, 40); + } + + // print current draft sequence + LOG("drafted %s\n", LOG_TOKENS_TOSTR_PRETTY(ctx, draft).c_str()); + + int i_dft = 0; + while (true) { + // sample from the target model + llama_token id = llama_sampling_sample(ctx_sampling, ctx, NULL, i_dft); + + llama_sampling_accept(ctx_sampling, ctx, id, true); + + const std::string token_str = llama_token_to_piece(ctx, id); + + if (!params.use_color) { + printf("%s", token_str.c_str()); + } + + if (id == llama_token_eos(model)) { + has_eos = true; + } + + ++n_predict; + + // check if the target token matches the draft + if (i_dft < (int) draft.size() && id == draft[i_dft]) { + LOG("the sampled target token matches the %dth drafted token (%d, '%s') - accepted\n", i_dft, id, token_str.c_str()); + ++n_accept; + ++n_past; + ++i_dft; + inp.push_back(id); + + if (params.use_color) { + // color accepted draft token + printf("\033[34m%s\033[0m", token_str.c_str()); + fflush(stdout); + } + continue; + } + + if (params.use_color) { + printf("%s", token_str.c_str()); + } + fflush(stdout); + + + LOG("the sampled target token (%d, '%s') did not match, or we ran out of drafted tokens\n", id, token_str.c_str()); + + draft.clear(); + draft.push_back(id); + inp.push_back(id); + break; + } + + if ((params.n_predict > 0 && n_predict > params.n_predict) || has_eos) { + break; + } + + // KV cache management + // clean the cache of draft tokens that weren't accepted + llama_kv_cache_seq_rm(ctx, 0, n_past, -1); + + llama_batch_clear(batch_tgt); + llama_batch_add(batch_tgt, draft[0], n_past, { 0 }, true); + + // generate n_pred tokens through prompt lookup + auto prompt_lookup = [&]() -> void { + int inp_size = inp.size(); + for (int ngram_size = ngram_max ; ngram_size > ngram_min; --ngram_size){ + const llama_token * ngram = &inp[inp_size - ngram_size]; + + for (int i = 0; i <= (int) inp_size - (ngram_size * 2); ++i) { + bool match = true; + for (int j = 0; j < ngram_size; ++j) { + if (inp[i + j] != ngram[j]) { + match = false; + break; + } + } + + if (match) { + const int startIdx = i + ngram_size; + const int endIdx = startIdx + n_draft; + if (endIdx < inp_size) { + for (int j = startIdx; j < endIdx; ++j) { + LOG(" - draft candidate %d: %d\n", j, inp[j]); + draft.push_back(inp[j]); + llama_batch_add(batch_tgt, inp[j], n_past + (j - startIdx) + 1, { 0 }, true); + ++n_drafted; + } + return; + } + } + } + } + return; + }; + + prompt_lookup(); + + llama_decode(ctx, batch_tgt); + ++n_past; + + draft.erase(draft.begin()); + } + + auto t_dec_end = ggml_time_us(); + + LOG_TEE("\n\n"); + + LOG_TEE("encoded %4d tokens in %8.3f seconds, speed: %8.3f t/s\n", n_input, (t_enc_end - t_enc_start) / 1e6f, inp.size() / ((t_enc_end - t_enc_start) / 1e6f)); + LOG_TEE("decoded %4d tokens in %8.3f seconds, speed: %8.3f t/s\n", n_predict, (t_dec_end - t_dec_start) / 1e6f, n_predict / ((t_dec_end - t_dec_start) / 1e6f)); + + LOG_TEE("\n"); + LOG_TEE("n_draft = %d\n", n_draft); + LOG_TEE("n_predict = %d\n", n_predict); + LOG_TEE("n_drafted = %d\n", n_drafted); + LOG_TEE("n_accept = %d\n", n_accept); + LOG_TEE("accept = %.3f%%\n", 100.0f * n_accept / n_drafted); + + LOG_TEE("\ntarget:\n"); + llama_print_timings(ctx); + + llama_sampling_free(ctx_sampling); + llama_batch_free(batch_tgt); + + llama_free(ctx); + llama_free_model(model); + + llama_backend_free(); + + fprintf(stderr, "\n\n"); + + return 0; +} From e0a4002273907b2c414b6b5442d99e08bfe2df35 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Johannes=20G=C3=A4=C3=9Fler?= Date: Sat, 23 Dec 2023 09:16:33 +0100 Subject: [PATCH 137/811] CUDA: fixed row rounding for 0 tensor splits (#4594) --- ggml-cuda.cu | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/ggml-cuda.cu b/ggml-cuda.cu index 7c2a834e3..490081cac 100644 --- a/ggml-cuda.cu +++ b/ggml-cuda.cu @@ -7937,12 +7937,16 @@ static void ggml_cuda_op_mul_mat( if (id != 0) { row_low[id] = ne01*g_tensor_split[id]; - row_low[id] -= row_low[id] % rounding; + if (row_low[id] < ne01) { + row_low[id] -= row_low[id] % rounding; + } } if (id != g_device_count - 1) { row_high[id] = ne01*g_tensor_split[id + 1]; - row_high[id] -= row_high[id] % rounding; + if (row_high[id] < ne01) { + row_high[id] -= row_high[id] % rounding; + } } } } From b9ec82d262cb20d7f0a8a1157bfa9aace40e2625 Mon Sep 17 00:00:00 2001 From: kalomaze <66376113+kalomaze@users.noreply.github.com> Date: Sat, 23 Dec 2023 03:27:07 -0600 Subject: [PATCH 138/811] grammar : check the full vocab only if necessary (opt) (#4306) * Check the full vocab for grammar only if necessary * Fix missing logit restoration step (?) Does this matter, actually? * Fix whitespace / formatting * Adjust comment * Didn't mean to push test gbnf * Split sampling into the helper function (?) And also revert the changes made to the header * common : fix final newline --------- Co-authored-by: Georgi Gerganov --- common/sampling.cpp | 48 ++++++++++++++++++++++++++++++++++++++++++--- 1 file changed, 45 insertions(+), 3 deletions(-) diff --git a/common/sampling.cpp b/common/sampling.cpp index f4e76df31..5b15204be 100644 --- a/common/sampling.cpp +++ b/common/sampling.cpp @@ -149,11 +149,12 @@ static void sampler_queue( } } -llama_token llama_sampling_sample( +static llama_token llama_sampling_sample_impl( struct llama_sampling_context * ctx_sampling, struct llama_context * ctx_main, struct llama_context * ctx_cfg, - const int idx) { + const int idx, + bool is_resampling) { // Add a parameter to indicate if we are resampling const llama_sampling_params & params = ctx_sampling->params; const int n_vocab = llama_n_vocab(llama_get_model(ctx_main)); @@ -173,8 +174,17 @@ llama_token llama_sampling_sample( llama_token id = 0; + // Get a pointer to the logits float * logits = llama_get_logits_ith(ctx_main, idx); + // Declare original_logits at the beginning of the function scope + std::vector original_logits; + + if (!is_resampling) { + // Only make a copy of the original logits if we are not in the resampling phase, not sure if I actually have to do this. + original_logits = std::vector(logits, logits + llama_n_vocab(llama_get_model(ctx_main))); + } + // apply params.logit_bias map for (auto it = params.logit_bias.begin(); it != params.logit_bias.end(); it++) { logits[it->first] += it->second; @@ -210,7 +220,8 @@ llama_token llama_sampling_sample( } } - if (ctx_sampling->grammar != NULL) { + // If we are in the resampling phase, apply grammar checks before sampling logic + if (is_resampling && ctx_sampling->grammar != NULL) { llama_sample_grammar(ctx_main, &cur_p, ctx_sampling->grammar); } @@ -252,9 +263,40 @@ llama_token llama_sampling_sample( } } + if (ctx_sampling->grammar != NULL && !is_resampling) { + // Create an array with a single token data element for the sampled id + llama_token_data single_token_data = {id, logits[id], 0.0f}; + llama_token_data_array single_token_data_array = { &single_token_data, 1, false }; + + // Apply grammar constraints to the single token + llama_sample_grammar(ctx_main, &single_token_data_array, ctx_sampling->grammar); + + // Check if the token is valid according to the grammar by seeing if its logit has been set to -INFINITY + bool is_valid = single_token_data_array.data[0].logit != -INFINITY; + + // If the token is not valid according to the grammar, perform resampling + if (!is_valid) { + LOG("Resampling because token %d: '%s' does not meet grammar rules\n", id, llama_token_to_piece(ctx_main, id).c_str()); + + // Restore logits from the copy + std::copy(original_logits.begin(), original_logits.end(), logits); + + return llama_sampling_sample_impl(ctx_sampling, ctx_main, ctx_cfg, idx, true); // Pass true for is_resampling + } + } + return id; } +llama_token llama_sampling_sample( + struct llama_sampling_context * ctx_sampling, + struct llama_context * ctx_main, + struct llama_context * ctx_cfg, + const int idx) { + // Call the implementation function with is_resampling set to false by default + return llama_sampling_sample_impl(ctx_sampling, ctx_main, ctx_cfg, idx, false); +} + void llama_sampling_accept( struct llama_sampling_context * ctx_sampling, struct llama_context * ctx_main, From 6123979952385847d8348e295d77d6e01da8aa84 Mon Sep 17 00:00:00 2001 From: Alexey Parfenov Date: Sat, 23 Dec 2023 09:31:49 +0000 Subject: [PATCH 139/811] server : allow to specify custom prompt for penalty calculation (#3727) --- common/sampling.cpp | 8 ++++--- common/sampling.h | 3 +++ examples/server/README.md | 2 ++ examples/server/server.cpp | 44 ++++++++++++++++++++++++++++++++++++++ 4 files changed, 54 insertions(+), 3 deletions(-) diff --git a/common/sampling.cpp b/common/sampling.cpp index 5b15204be..8e45909f1 100644 --- a/common/sampling.cpp +++ b/common/sampling.cpp @@ -203,12 +203,14 @@ static llama_token llama_sampling_sample_impl( } // apply penalties - if (!prev.empty()) { + const auto& penalty_tokens = params.use_penalty_prompt_tokens ? params.penalty_prompt_tokens : prev; + const int penalty_tokens_used_size = std::min((int)penalty_tokens.size(), penalty_last_n); + if (penalty_tokens_used_size) { const float nl_logit = logits[llama_token_nl(llama_get_model(ctx_main))]; llama_sample_repetition_penalties(ctx_main, &cur_p, - prev.data() + prev.size() - penalty_last_n, - penalty_last_n, penalty_repeat, penalty_freq, penalty_present); + penalty_tokens.data() + penalty_tokens.size() - penalty_tokens_used_size, + penalty_tokens_used_size, penalty_repeat, penalty_freq, penalty_present); if (!penalize_nl) { for (size_t idx = 0; idx < cur_p.size; idx++) { diff --git a/common/sampling.h b/common/sampling.h index fdfa9eed1..f16ef97e3 100644 --- a/common/sampling.h +++ b/common/sampling.h @@ -36,6 +36,9 @@ typedef struct llama_sampling_params { float cfg_scale = 1.f; // how strong is guidance std::unordered_map logit_bias; // logit bias for specific tokens + + std::vector penalty_prompt_tokens; + bool use_penalty_prompt_tokens = false; } llama_sampling_params; // general sampler context diff --git a/examples/server/README.md b/examples/server/README.md index 0751b9612..f1e586a1c 100644 --- a/examples/server/README.md +++ b/examples/server/README.md @@ -148,6 +148,8 @@ node index.js `frequency_penalty`: Repeat alpha frequency penalty (default: 0.0, 0.0 = disabled); + `penalty_prompt`: This will replace the `prompt` for the purpose of the penalty evaluation. Can be either `null`, a string or an array of numbers representing tokens (default: `null` = use the original `prompt`). + `mirostat`: Enable Mirostat sampling, controlling perplexity during text generation (default: 0, 0 = disabled, 1 = Mirostat, 2 = Mirostat 2.0). `mirostat_tau`: Set the Mirostat target entropy, parameter tau (default: 5.0). diff --git a/examples/server/server.cpp b/examples/server/server.cpp index 04038530f..72dfe452c 100644 --- a/examples/server/server.cpp +++ b/examples/server/server.cpp @@ -761,6 +761,42 @@ struct llama_server_context slot->prompt = ""; } + slot->sparams.penalty_prompt_tokens.clear(); + slot->sparams.use_penalty_prompt_tokens = false; + const auto &penalty_prompt = data.find("penalty_prompt"); + if (penalty_prompt != data.end()) + { + if (penalty_prompt->is_string()) + { + const auto penalty_prompt_string = penalty_prompt->get(); + auto penalty_tokens = llama_tokenize(model, penalty_prompt_string, false); + slot->sparams.penalty_prompt_tokens.swap(penalty_tokens); + if (slot->params.n_predict > 0) + { + slot->sparams.penalty_prompt_tokens.reserve(slot->sparams.penalty_prompt_tokens.size() + slot->params.n_predict); + } + slot->sparams.use_penalty_prompt_tokens = true; + } + else if (penalty_prompt->is_array()) + { + const auto n_tokens = penalty_prompt->size(); + slot->sparams.penalty_prompt_tokens.reserve(n_tokens + std::max(0, slot->params.n_predict)); + const int n_vocab = llama_n_vocab(model); + for (const auto &penalty_token : *penalty_prompt) + { + if (penalty_token.is_number_integer()) + { + const auto tok = penalty_token.get(); + if (tok >= 0 && tok < n_vocab) + { + slot->sparams.penalty_prompt_tokens.push_back(tok); + } + } + } + slot->sparams.use_penalty_prompt_tokens = true; + } + } + slot->sparams.logit_bias.clear(); if (json_value(data, "ignore_eos", false)) @@ -992,6 +1028,12 @@ struct llama_server_context slot.generated_text += token_str; slot.has_next_token = true; + if (slot.ctx_sampling->params.use_penalty_prompt_tokens && result.tok != -1) + { + // we can change penalty_prompt_tokens because it is always created from scratch each request + slot.ctx_sampling->params.penalty_prompt_tokens.push_back(result.tok); + } + // check if there is incomplete UTF-8 character at the end bool incomplete = false; for (unsigned i = 1; i < 5 && i <= slot.generated_text.size(); ++i) @@ -1183,6 +1225,8 @@ struct llama_server_context {"repeat_penalty", slot.sparams.penalty_repeat}, {"presence_penalty", slot.sparams.penalty_present}, {"frequency_penalty", slot.sparams.penalty_freq}, + {"penalty_prompt_tokens", slot.sparams.penalty_prompt_tokens}, + {"use_penalty_prompt_tokens", slot.sparams.use_penalty_prompt_tokens}, {"mirostat", slot.sparams.mirostat}, {"mirostat_tau", slot.sparams.mirostat_tau}, {"mirostat_eta", slot.sparams.mirostat_eta}, From 925e5584a058afb612f9c20bc472c130f5d0f891 Mon Sep 17 00:00:00 2001 From: Samuel Maynard Date: Sat, 23 Dec 2023 11:35:55 +0200 Subject: [PATCH 140/811] ci(docker): fix tags in "Build and push docker image (tagged)" (#4603) --- .github/workflows/docker.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/docker.yml b/.github/workflows/docker.yml index 7f4de50ea..87904b75e 100644 --- a/.github/workflows/docker.yml +++ b/.github/workflows/docker.yml @@ -98,5 +98,5 @@ jobs: context: . push: ${{ github.event_name == 'push' }} platforms: ${{ matrix.config.platforms }} - tags: "ghcr.io/${{ github.repository_owner }}/llama.cpp:${{ matrix.config.tag }}" , "ghcr.io/${{ github.repository_owner }}/llama.cpp:${{ matrix.config.tag }}-${{ steps.tag.outputs.name }}" + tags: "ghcr.io/${{ github.repository_owner }}/llama.cpp:${{ matrix.config.tag }},ghcr.io/${{ github.repository_owner }}/llama.cpp:${{ matrix.config.tag }}-${{ steps.tag.outputs.name }}" file: ${{ matrix.config.dockerfile }} From 708e179e8562c2604240df95a2241dea17fd808b Mon Sep 17 00:00:00 2001 From: slaren Date: Sat, 23 Dec 2023 16:10:51 +0100 Subject: [PATCH 141/811] fallback to CPU buffer if host buffer alloc fails (#4610) --- ggml-cuda.cu | 11 ++++++----- llama.cpp | 16 +++++++++++----- 2 files changed, 17 insertions(+), 10 deletions(-) diff --git a/ggml-cuda.cu b/ggml-cuda.cu index 490081cac..f9830328b 100644 --- a/ggml-cuda.cu +++ b/ggml-cuda.cu @@ -6729,8 +6729,7 @@ void * ggml_cuda_host_malloc(size_t size) { void * ptr = nullptr; cudaError_t err = cudaMallocHost((void **) &ptr, size); if (err != cudaSuccess) { - // The allocation error can be bypassed. A null ptr will assigned out of this function. - // This can fixed the OOM error in WSL. + // clear the error cudaGetLastError(); fprintf(stderr, "WARNING: failed to allocate %.2f MB of pinned memory: %s\n", size/1024.0/1024.0, cudaGetErrorString(err)); @@ -9674,12 +9673,14 @@ ggml_backend_buffer_type_t ggml_backend_cuda_buffer_type(int device) { // host buffer type static void ggml_backend_cuda_host_buffer_free_buffer(ggml_backend_buffer_t buffer) { - CUDA_CHECK(cudaFreeHost(buffer->context)); + ggml_cuda_host_free(buffer->context); } static ggml_backend_buffer_t ggml_backend_cuda_host_buffer_type_alloc_buffer(ggml_backend_buffer_type_t buft, size_t size) { - void * ptr; - CUDA_CHECK(cudaMallocHost(&ptr, size)); + void * ptr = ggml_cuda_host_malloc(size); + if (ptr == nullptr) { + return nullptr; + } // FIXME: this is a hack to avoid having to implement a new buffer type ggml_backend_buffer_t buffer = ggml_backend_cpu_buffer_from_ptr(ptr, size); diff --git a/llama.cpp b/llama.cpp index 4e4495739..5699a0fcf 100644 --- a/llama.cpp +++ b/llama.cpp @@ -1177,21 +1177,27 @@ static std::string llama_token_to_piece(const struct llama_context * ctx, llama_ } static ggml_backend_buffer_type_t llama_default_buffer_type(int n_gpu_layers) { + ggml_backend_buffer_type_t buft = nullptr; + #ifdef GGML_USE_METAL if (n_gpu_layers > 0) { - return ggml_backend_metal_buffer_type(); + buft = ggml_backend_metal_buffer_type(); } #elif defined(GGML_USE_CUBLAS) && defined(LLAMA_GGML_BACKEND_CUDA_TEST) if (n_gpu_layers > 0) { - return ggml_backend_cuda_buffer_type(0); + buft = ggml_backend_cuda_buffer_type(0); } #elif defined(GGML_USE_CUBLAS) - return ggml_backend_cuda_host_buffer_type(); + buft = ggml_backend_cuda_host_buffer_type(); #elif defined(GGML_USE_CPU_HBM) - return ggml_backend_cpu_hbm_buffer_type(); + buft = ggml_backend_cpu_hbm_buffer_type(); #endif - return ggml_backend_cpu_buffer_type(); + if (buft == nullptr) { + buft = ggml_backend_cpu_buffer_type(); + } + + return buft; GGML_UNUSED(n_gpu_layers); } From 5bf3953d7e9831ea22b0bc017ce97409b801ccf1 Mon Sep 17 00:00:00 2001 From: slaren Date: Sun, 24 Dec 2023 14:34:22 +0100 Subject: [PATCH 142/811] cuda : improve cuda pool efficiency using virtual memory (#4606) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * cuda : improve cuda pool efficiency using virtual memory * fix mixtral * fix cmake build * check for vmm support, disable for hip ggml-ci * fix hip build * clarify granularity * move all caps to g_device_caps * refactor error checking * add cuda_pool_alloc, refactor most pool allocations ggml-ci * fix hip build * CUBLAS_TF32_TENSOR_OP_MATH is not a macro * more hip crap * llama : fix msvc warnings * ggml : fix msvc warnings * minor * minor * cuda : fallback to CPU on host buffer alloc fail * Update ggml-cuda.cu Co-authored-by: Johannes Gäßler * Update ggml-cuda.cu Co-authored-by: Johannes Gäßler * ensure allocations are always aligned * act_size -> actual_size --------- Co-authored-by: Johannes Gäßler --- CMakeLists.txt | 2 + Makefile | 6 +- ggml-backend.c | 16 +- ggml-cuda.cu | 499 +++++++++++++++++++++++++++---------------- ggml.c | 2 +- ggml.h | 2 + llama.cpp | 6 +- tests/test-grad0.cpp | 3 - 8 files changed, 328 insertions(+), 208 deletions(-) diff --git a/CMakeLists.txt b/CMakeLists.txt index 6fc6508c5..545aab267 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -302,6 +302,8 @@ if (LLAMA_CUBLAS) set(LLAMA_EXTRA_LIBS ${LLAMA_EXTRA_LIBS} CUDA::cudart CUDA::cublas CUDA::cublasLt) endif() + set(LLAMA_EXTRA_LIBS ${LLAMA_EXTRA_LIBS} CUDA::cuda_driver) + if (NOT DEFINED CMAKE_CUDA_ARCHITECTURES) # 52 == lowest CUDA 12 standard # 60 == f16 CUDA intrinsics diff --git a/Makefile b/Makefile index cb5a4e948..28c6d79bc 100644 --- a/Makefile +++ b/Makefile @@ -367,17 +367,15 @@ endif # LLAMA_BLIS ifdef LLAMA_CUBLAS MK_CPPFLAGS += -DGGML_USE_CUBLAS -I/usr/local/cuda/include -I/opt/cuda/include -I$(CUDA_PATH)/targets/x86_64-linux/include -I/usr/local/cuda/targets/aarch64-linux/include - MK_LDFLAGS += -lcublas -lculibos -lcudart -lcublasLt -lpthread -ldl -lrt -L/usr/local/cuda/lib64 -L/opt/cuda/lib64 -L$(CUDA_PATH)/targets/x86_64-linux/lib -L/usr/local/cuda/targets/aarch64-linux/lib + MK_LDFLAGS += -lcuda -lcublas -lculibos -lcudart -lcublasLt -lpthread -ldl -lrt -L/usr/local/cuda/lib64 -L/opt/cuda/lib64 -L$(CUDA_PATH)/targets/x86_64-linux/lib -L/usr/local/cuda/targets/aarch64-linux/lib -L/usr/lib/wsl/lib OBJS += ggml-cuda.o MK_NVCCFLAGS = -use_fast_math ifndef JETSON_EOL_MODULE_DETECT MK_NVCCFLAGS += --forward-unknown-to-host-compiler endif # JETSON_EOL_MODULE_DETECT - ifdef LLAMA_DEBUG MK_NVCCFLAGS += -lineinfo -endif - +endif # LLAMA_DEBUG ifdef LLAMA_CUDA_NVCC NVCC = $(LLAMA_CUDA_NVCC) else diff --git a/ggml-backend.c b/ggml-backend.c index 0c8c9ec43..526ce732b 100644 --- a/ggml-backend.c +++ b/ggml-backend.c @@ -297,7 +297,7 @@ static void ggml_backend_registry_init(void) { void ggml_backend_register(const char * name, ggml_backend_init_fn init_fn, ggml_backend_buffer_type_t default_buffer_type, void * user_data) { GGML_ASSERT(ggml_backend_registry_count < GGML_MAX_BACKENDS_REG); - int id = ggml_backend_registry_count; + size_t id = ggml_backend_registry_count; ggml_backend_registry[id] = (struct ggml_backend_reg) { /* .name = */ {0}, @@ -330,6 +330,8 @@ size_t ggml_backend_reg_find_by_name(const char * name) { return i; } } + + // not found return SIZE_MAX; } @@ -340,15 +342,15 @@ ggml_backend_t ggml_backend_reg_init_backend_from_str(const char * backend_str) const char * params = strchr(backend_str, ':'); char backend_name[128]; if (params == NULL) { - strcpy(backend_name, backend_str); + snprintf(backend_name, sizeof(backend_name), "%s", backend_str); params = ""; } else { - strncpy(backend_name, backend_str, params - backend_str); - backend_name[params - backend_str] = '\0'; + snprintf(backend_name, sizeof(backend_name), "%.*s", (int)(params - backend_str), backend_str); params++; } size_t backend_i = ggml_backend_reg_find_by_name(backend_name); + if (backend_i == SIZE_MAX) { fprintf(stderr, "%s: backend %s not found\n", __func__, backend_name); return NULL; @@ -396,18 +398,12 @@ static void ggml_backend_cpu_buffer_free_buffer(ggml_backend_buffer_t buffer) { } static void ggml_backend_cpu_buffer_set_tensor(ggml_backend_buffer_t buffer, struct ggml_tensor * tensor, const void * data, size_t offset, size_t size) { - GGML_ASSERT(offset + size <= ggml_nbytes(tensor) && "tensor write out of bounds"); - GGML_ASSERT(tensor->data != NULL && "tensor not allocated"); - memcpy((char *)tensor->data + offset, data, size); GGML_UNUSED(buffer); } static void ggml_backend_cpu_buffer_get_tensor(ggml_backend_buffer_t buffer, const struct ggml_tensor * tensor, void * data, size_t offset, size_t size) { - GGML_ASSERT(offset + size <= ggml_nbytes(tensor) && "tensor read out of bounds"); - GGML_ASSERT(tensor->data != NULL && "tensor not allocated"); - memcpy(data, (const char *)tensor->data + offset, size); GGML_UNUSED(buffer); diff --git a/ggml-cuda.cu b/ggml-cuda.cu index f9830328b..ac3b3c14d 100644 --- a/ggml-cuda.cu +++ b/ggml-cuda.cu @@ -86,17 +86,28 @@ #define cudaStream_t hipStream_t #define cudaSuccess hipSuccess #define __trap abort +#define CUBLAS_STATUS_SUCCESS HIPBLAS_STATUS_SUCCESS +#define CUBLAS_STATUS_NOT_INITIALIZED HIPBLAS_STATUS_NOT_INITIALIZED +#define CUBLAS_STATUS_ALLOC_FAILED HIPBLAS_STATUS_ALLOC_FAILED +#define CUBLAS_STATUS_INVALID_VALUE HIPBLAS_STATUS_INVALID_VALUE +#define CUBLAS_STATUS_ARCH_MISMATCH HIPBLAS_STATUS_ARCH_MISMATCH +#define CUBLAS_STATUS_MAPPING_ERROR HIPBLAS_STATUS_MAPPING_ERROR +#define CUBLAS_STATUS_EXECUTION_FAILED HIPBLAS_STATUS_EXECUTION_FAILED +#define CUBLAS_STATUS_INTERNAL_ERROR HIPBLAS_STATUS_INTERNAL_ERROR +#define CUBLAS_STATUS_NOT_SUPPORTED HIPBLAS_STATUS_NOT_SUPPORTED #else #include +#include #include #include -// CUDA 10.2 does not have these macro definitions. -#ifndef CUBLAS_TF32_TENSOR_OP_MATH + +#if CUDART_VERSION < 11020 #define CUBLAS_TF32_TENSOR_OP_MATH CUBLAS_TENSOR_OP_MATH #define CUBLAS_COMPUTE_16F CUDA_R_16F #define CUBLAS_COMPUTE_32F CUDA_R_32F #define cublasComputeType_t cudaDataType_t -#endif +#endif // CUDART_VERSION < 11020 + #endif // defined(GGML_USE_HIPBLAS) #include "ggml-cuda.h" @@ -200,45 +211,45 @@ static __device__ __forceinline__ int __dp4a(const int a, const int b, int c) { static_assert(sizeof(half) == sizeof(ggml_fp16_t), "wrong fp16 size"); -#define CUDA_CHECK(err) \ - do { \ - cudaError_t err_ = (err); \ - if (err_ != cudaSuccess) { \ - int id; \ - cudaGetDevice(&id); \ - fprintf(stderr, "\nCUDA error %d at %s:%d: %s\n", err_, __FILE__, __LINE__, \ - cudaGetErrorString(err_)); \ - fprintf(stderr, "current device: %d\n", id); \ - GGML_ASSERT(!"CUDA error"); \ - } \ - } while (0) - #if CUDART_VERSION >= 12000 -#define CUBLAS_CHECK(err) \ - do { \ - cublasStatus_t err_ = (err); \ - if (err_ != CUBLAS_STATUS_SUCCESS) { \ - int id; \ - cudaGetDevice(&id); \ - fprintf(stderr, "\ncuBLAS error %d at %s:%d: %s\n", \ - err_, __FILE__, __LINE__, cublasGetStatusString(err_)); \ - fprintf(stderr, "current device: %d\n", id); \ - GGML_ASSERT(!"cuBLAS error"); \ - } \ - } while (0) + static const char * cublas_get_error_str(const cublasStatus_t err) { + return cublasGetStatusString(err); + } #else -#define CUBLAS_CHECK(err) \ - do { \ - cublasStatus_t err_ = (err); \ - if (err_ != CUBLAS_STATUS_SUCCESS) { \ - int id; \ - cudaGetDevice(&id); \ - fprintf(stderr, "\ncuBLAS error %d at %s:%d\n", err_, __FILE__, __LINE__); \ - fprintf(stderr, "current device: %d\n", id); \ - GGML_ASSERT(!"cuBLAS error"); \ - } \ - } while (0) -#endif // CUDART_VERSION >= 11 + static const char * cublas_get_error_str(const cublasStatus_t err) { + switch (err) { + case CUBLAS_STATUS_SUCCESS: return "CUBLAS_STATUS_SUCCESS"; + case CUBLAS_STATUS_NOT_INITIALIZED: return "CUBLAS_STATUS_NOT_INITIALIZED"; + case CUBLAS_STATUS_ALLOC_FAILED: return "CUBLAS_STATUS_ALLOC_FAILED"; + case CUBLAS_STATUS_INVALID_VALUE: return "CUBLAS_STATUS_INVALID_VALUE"; + case CUBLAS_STATUS_ARCH_MISMATCH: return "CUBLAS_STATUS_ARCH_MISMATCH"; + case CUBLAS_STATUS_MAPPING_ERROR: return "CUBLAS_STATUS_MAPPING_ERROR"; + case CUBLAS_STATUS_EXECUTION_FAILED: return "CUBLAS_STATUS_EXECUTION_FAILED"; + case CUBLAS_STATUS_INTERNAL_ERROR: return "CUBLAS_STATUS_INTERNAL_ERROR"; + case CUBLAS_STATUS_NOT_SUPPORTED: return "CUBLAS_STATUS_NOT_SUPPORTED"; + default: return "unknown error"; + } + } +#endif // CUDART_VERSION >= 12000 + +[[noreturn]] +static void ggml_cuda_error(const char * stmt, const char * func, const char * file, const int line, const char * msg) { + fprintf(stderr, "CUDA error: %s: %s\n", stmt, msg); + fprintf(stderr, " in function %s at %s:%d\n", func, file, line); + GGML_ASSERT(!"CUDA error"); +} + +#define CUDA_CHECK(err) do { auto err_ = (err); if (err_ != cudaSuccess) ggml_cuda_error(#err, __func__, __FILE__, __LINE__, cudaGetErrorString(err_)); } while (0) +#define CUBLAS_CHECK(err) do { auto err_ = (err); if (err_ != CUBLAS_STATUS_SUCCESS) ggml_cuda_error(#err, __func__, __FILE__, __LINE__, cublas_get_error_str(err_)); } while (0) + +#if !defined(GGML_USE_HIPBLAS) +static const char * cu_get_error_str(CUresult err) { + const char * err_str; + cuGetErrorString(err, &err_str); + return err_str; +} +#define CU_CHECK(err) do { auto err_ = (err); if (err_ != CUDA_SUCCESS) ggml_cuda_error(#err, __func__, __FILE__, __LINE__, cu_get_error_str(err_)); } while (0) +#endif #if CUDART_VERSION >= 11100 #define GGML_CUDA_ASSUME(x) __builtin_assume(x) @@ -516,9 +527,17 @@ inline cudaError_t ggml_cuda_set_device(const int device) { static int g_device_count = -1; static int g_main_device = 0; -static int g_compute_capabilities[GGML_CUDA_MAX_DEVICES]; static float g_tensor_split[GGML_CUDA_MAX_DEVICES] = {0}; +struct cuda_device_capabilities { + int cc; // compute capability + bool vmm; // virtual memory support + size_t vmm_granularity; // granularity of virtual memory +}; + +static cuda_device_capabilities g_device_caps[GGML_CUDA_MAX_DEVICES] = { {0, false, 0} }; + + static void * g_scratch_buffer = nullptr; static size_t g_scratch_size = 0; // disabled by default static size_t g_scratch_offset = 0; @@ -5875,7 +5894,7 @@ static void ggml_mul_mat_q4_0_q8_1_cuda( int id; CUDA_CHECK(cudaGetDevice(&id)); - const int compute_capability = g_compute_capabilities[id]; + const int compute_capability = g_device_caps[id].cc; int mmq_x, mmq_y, nwarps; if (compute_capability >= CC_RDNA2) { @@ -5920,7 +5939,7 @@ static void ggml_mul_mat_q4_1_q8_1_cuda( int id; CUDA_CHECK(cudaGetDevice(&id)); - const int compute_capability = g_compute_capabilities[id]; + const int compute_capability = g_device_caps[id].cc; int mmq_x, mmq_y, nwarps; if (compute_capability >= CC_RDNA2) { @@ -5965,7 +5984,7 @@ static void ggml_mul_mat_q5_0_q8_1_cuda( int id; CUDA_CHECK(cudaGetDevice(&id)); - const int compute_capability = g_compute_capabilities[id]; + const int compute_capability = g_device_caps[id].cc; int mmq_x, mmq_y, nwarps; if (compute_capability >= CC_RDNA2) { @@ -6010,7 +6029,7 @@ static void ggml_mul_mat_q5_1_q8_1_cuda( int id; CUDA_CHECK(cudaGetDevice(&id)); - const int compute_capability = g_compute_capabilities[id]; + const int compute_capability = g_device_caps[id].cc; int mmq_x, mmq_y, nwarps; if (compute_capability >= CC_RDNA2) { @@ -6055,7 +6074,7 @@ static void ggml_mul_mat_q8_0_q8_1_cuda( int id; CUDA_CHECK(cudaGetDevice(&id)); - const int compute_capability = g_compute_capabilities[id]; + const int compute_capability = g_device_caps[id].cc; int mmq_x, mmq_y, nwarps; if (compute_capability >= CC_RDNA2) { @@ -6100,7 +6119,7 @@ static void ggml_mul_mat_q2_K_q8_1_cuda( int id; CUDA_CHECK(cudaGetDevice(&id)); - const int compute_capability = g_compute_capabilities[id]; + const int compute_capability = g_device_caps[id].cc; int mmq_x, mmq_y, nwarps; if (compute_capability >= CC_RDNA2) { @@ -6147,7 +6166,7 @@ static void ggml_mul_mat_q3_K_q8_1_cuda( int id; CUDA_CHECK(cudaGetDevice(&id)); - const int compute_capability = g_compute_capabilities[id]; + const int compute_capability = g_device_caps[id].cc; int mmq_x, mmq_y, nwarps; if (compute_capability >= CC_RDNA2) { @@ -6193,7 +6212,7 @@ static void ggml_mul_mat_q4_K_q8_1_cuda( int id; CUDA_CHECK(cudaGetDevice(&id)); - const int compute_capability = g_compute_capabilities[id]; + const int compute_capability = g_device_caps[id].cc; int mmq_x, mmq_y, nwarps; if (compute_capability >= CC_RDNA2) { @@ -6238,7 +6257,7 @@ static void ggml_mul_mat_q5_K_q8_1_cuda( int id; CUDA_CHECK(cudaGetDevice(&id)); - const int compute_capability = g_compute_capabilities[id]; + const int compute_capability = g_device_caps[id].cc; int mmq_x, mmq_y, nwarps; if (compute_capability >= CC_RDNA2) { @@ -6283,7 +6302,7 @@ static void ggml_mul_mat_q6_K_q8_1_cuda( int id; CUDA_CHECK(cudaGetDevice(&id)); - const int compute_capability = g_compute_capabilities[id]; + const int compute_capability = g_device_caps[id].cc; int mmq_x, mmq_y, nwarps; if (compute_capability >= CC_RDNA2) { @@ -6543,21 +6562,24 @@ struct scoped_spin_lock { scoped_spin_lock& operator=(const scoped_spin_lock&) = delete; }; +static std::atomic_flag g_cuda_pool_lock = ATOMIC_FLAG_INIT; + +// #define DEBUG_CUDA_MALLOC struct cuda_buffer { void * ptr = nullptr; size_t size = 0; }; static cuda_buffer g_cuda_buffer_pool[GGML_CUDA_MAX_DEVICES][MAX_CUDA_BUFFERS]; -static std::atomic_flag g_cuda_pool_lock = ATOMIC_FLAG_INIT; +static size_t g_cuda_pool_size[GGML_CUDA_MAX_DEVICES] = {0}; -static void * ggml_cuda_pool_malloc(size_t size, size_t * actual_size) { +static void * ggml_cuda_pool_malloc_leg(size_t size, size_t * actual_size) { scoped_spin_lock lock(g_cuda_pool_lock); int id; CUDA_CHECK(cudaGetDevice(&id)); #ifdef DEBUG_CUDA_MALLOC int nnz = 0; - size_t max_size = 0, tot_size = 0; + size_t max_size = 0; #endif size_t best_diff = 1ull << 36; int ibest = -1; @@ -6566,7 +6588,6 @@ static void * ggml_cuda_pool_malloc(size_t size, size_t * actual_size) { if (b.ptr != nullptr) { #ifdef DEBUG_CUDA_MALLOC ++nnz; - tot_size += b.size; if (b.size > max_size) max_size = b.size; #endif if (b.size >= size) { @@ -6593,19 +6614,20 @@ static void * ggml_cuda_pool_malloc(size_t size, size_t * actual_size) { b.size = 0; return ptr; } -#ifdef DEBUG_CUDA_MALLOC - fprintf(stderr, "%s: %d buffers, max_size = %u MB, tot_size = %u MB, requested %u MB\n", __func__, nnz, - (uint32_t)(max_size/1024/1024), (uint32_t)(tot_size/1024/1024), (uint32_t)(size/1024/1024)); -#endif void * ptr; size_t look_ahead_size = (size_t) (1.05 * size); look_ahead_size = 256 * ((look_ahead_size + 255)/256); CUDA_CHECK(cudaMalloc((void **) &ptr, look_ahead_size)); *actual_size = look_ahead_size; + g_cuda_pool_size[id] += look_ahead_size; +#ifdef DEBUG_CUDA_MALLOC + fprintf(stderr, "%s[%d]: %d buffers, max_size = %u MB, pool_size = %u MB, requested %u MB\n", __func__, id, nnz, + (uint32_t)(max_size/1024/1024), (uint32_t)(g_cuda_pool_size[id]/1024/1024), (uint32_t)(size/1024/1024)); +#endif return ptr; } -static void ggml_cuda_pool_free(void * ptr, size_t size) { +static void ggml_cuda_pool_free_leg(void * ptr, size_t size) { scoped_spin_lock lock(g_cuda_pool_lock); int id; CUDA_CHECK(cudaGetDevice(&id)); @@ -6620,8 +6642,152 @@ static void ggml_cuda_pool_free(void * ptr, size_t size) { } fprintf(stderr, "WARNING: cuda buffer pool full, increase MAX_CUDA_BUFFERS\n"); CUDA_CHECK(cudaFree(ptr)); + g_cuda_pool_size[id] -= size; } +#if !defined(GGML_USE_HIPBLAS) +// pool with virtual memory +static std::vector g_cuda_pool_handles[GGML_CUDA_MAX_DEVICES]; +static CUdeviceptr g_cuda_pool_addr[GGML_CUDA_MAX_DEVICES] = {0}; +static size_t g_cuda_pool_used[GGML_CUDA_MAX_DEVICES] = {0}; +static const size_t CUDA_POOL_VMM_MAX_SIZE = 1ull << 36; // 64 GB + +static void * ggml_cuda_pool_malloc_vmm(size_t size, size_t * actual_size) { + scoped_spin_lock lock(g_cuda_pool_lock); + int id; + CUDA_CHECK(cudaGetDevice(&id)); + + // round up the allocation size to the alignment to ensure that all allocations are aligned for all data types + const size_t alignment = 128; + size = alignment * ((size + alignment - 1) / alignment); + + size_t avail = g_cuda_pool_size[id] - g_cuda_pool_used[id]; + + if (size > avail) { + // round up to the next multiple of the granularity + size_t reserve_size = size - avail; + const size_t granularity = g_device_caps[id].vmm_granularity; + reserve_size = granularity * ((reserve_size + granularity - 1) / granularity); + + GGML_ASSERT(g_cuda_pool_size[id] + reserve_size <= CUDA_POOL_VMM_MAX_SIZE); + + // allocate more physical memory + CUmemAllocationProp prop = {}; + prop.type = CU_MEM_ALLOCATION_TYPE_PINNED; + prop.location.type = CU_MEM_LOCATION_TYPE_DEVICE; + prop.location.id = id; + CUmemGenericAllocationHandle handle; + CU_CHECK(cuMemCreate(&handle, reserve_size, &prop, 0)); + + // reserve virtual address space (if not already reserved) + if (g_cuda_pool_addr[id] == 0) { + CU_CHECK(cuMemAddressReserve(&g_cuda_pool_addr[id], CUDA_POOL_VMM_MAX_SIZE, 0, 0, 0)); + } + + // map at the end of the pool + CU_CHECK(cuMemMap(g_cuda_pool_addr[id] + g_cuda_pool_size[id], reserve_size, 0, handle, 0)); + + // set access + CUmemAccessDesc access = {}; + access.location.type = CU_MEM_LOCATION_TYPE_DEVICE; + access.location.id = id; + access.flags = CU_MEM_ACCESS_FLAGS_PROT_READWRITE; + CU_CHECK(cuMemSetAccess(g_cuda_pool_addr[id] + g_cuda_pool_size[id], reserve_size, &access, 1)); + + // add to the pool + g_cuda_pool_handles[id].push_back(handle); + g_cuda_pool_size[id] += reserve_size; + + //printf("cuda pool[%d]: size increased to %llu MB (reserved %llu MB)\n", + // id, (unsigned long long) (g_cuda_pool_size[id]/1024/1024), + // (unsigned long long) (reserve_size/1024/1024)); + } + + GGML_ASSERT(g_cuda_pool_addr[id] != 0); + + void * ptr = (void *) (g_cuda_pool_addr[id] + g_cuda_pool_used[id]); + *actual_size = size; + g_cuda_pool_used[id] += size; + +#ifdef DEBUG_CUDA_MALLOC + printf("cuda pool[%d]: allocated %llu bytes at %llx [%s]\n", id, (unsigned long long) size, ptr); +#endif + + return ptr; +} + +static void ggml_cuda_pool_free_vmm(void * ptr, size_t size) { + scoped_spin_lock lock(g_cuda_pool_lock); + int id; + CUDA_CHECK(cudaGetDevice(&id)); + +#ifdef DEBUG_CUDA_MALLOC + printf("cuda pool[%d]: freed %llu bytes at %llx\n", id, (unsigned long long) size, ptr); +#endif + + g_cuda_pool_used[id] -= size; + + // all deallocations must be in reverse order of the allocations + GGML_ASSERT(ptr == (void *) (g_cuda_pool_addr[id] + g_cuda_pool_used[id])); +} + +static void * ggml_cuda_pool_malloc(size_t size, size_t * actual_size) { + int id; + CUDA_CHECK(cudaGetDevice(&id)); + if (g_device_caps[id].vmm) { + return ggml_cuda_pool_malloc_vmm(size, actual_size); + } else { + return ggml_cuda_pool_malloc_leg(size, actual_size); + } +} + +static void ggml_cuda_pool_free(void * ptr, size_t size) { + int id; + CUDA_CHECK(cudaGetDevice(&id)); + if (g_device_caps[id].vmm) { + ggml_cuda_pool_free_vmm(ptr, size); + } else { + ggml_cuda_pool_free_leg(ptr, size); + } +} +#else +#define ggml_cuda_pool_malloc ggml_cuda_pool_malloc_leg +#define ggml_cuda_pool_free ggml_cuda_pool_free_leg +#endif // !defined(GGML_USE_HIPBLAS) + +template +struct cuda_pool_alloc { + T * ptr = nullptr; + size_t actual_size = 0; + + // size is in number of elements + T * alloc(size_t size) { + GGML_ASSERT(ptr == nullptr); + ptr = (T *) ggml_cuda_pool_malloc(size * sizeof(T), &this->actual_size); + return ptr; + } + + cuda_pool_alloc(size_t size) { + alloc(size); + } + + ~cuda_pool_alloc() { + if (ptr != nullptr) { + ggml_cuda_pool_free(ptr, actual_size); + } + } + + T * get() { + return ptr; + } + + cuda_pool_alloc() = default; + cuda_pool_alloc(const cuda_pool_alloc &) = delete; + cuda_pool_alloc(cuda_pool_alloc &&) = delete; + cuda_pool_alloc& operator=(const cuda_pool_alloc &) = delete; + cuda_pool_alloc& operator=(cuda_pool_alloc &&) = delete; +}; + static bool g_cublas_loaded = false; bool ggml_cublas_loaded(void) { @@ -6660,16 +6826,33 @@ void ggml_init_cublas() { #endif fprintf(stderr, "%s: found %d " GGML_CUDA_NAME " devices:\n", __func__, g_device_count); for (int id = 0; id < g_device_count; ++id) { + int device_vmm = 0; + +#if !defined(GGML_USE_HIPBLAS) + CUdevice device; + CU_CHECK(cuDeviceGet(&device, id)); + CU_CHECK(cuDeviceGetAttribute(&device_vmm, CU_DEVICE_ATTRIBUTE_VIRTUAL_MEMORY_MANAGEMENT_SUPPORTED, device)); + + if (device_vmm) { + CUmemAllocationProp alloc_prop = {}; + alloc_prop.type = CU_MEM_ALLOCATION_TYPE_PINNED; + alloc_prop.location.type = CU_MEM_LOCATION_TYPE_DEVICE; + alloc_prop.location.id = id; + CU_CHECK(cuMemGetAllocationGranularity(&g_device_caps[id].vmm_granularity, &alloc_prop, CU_MEM_ALLOC_GRANULARITY_MINIMUM)); + } +#endif // !defined(GGML_USE_HIPBLAS) + g_device_caps[id].vmm = !!device_vmm; + cudaDeviceProp prop; CUDA_CHECK(cudaGetDeviceProperties(&prop, id)); - fprintf(stderr, " Device %d: %s, compute capability %d.%d\n", id, prop.name, prop.major, prop.minor); + fprintf(stderr, " Device %d: %s, compute capability %d.%d, VMM: %s\n", id, prop.name, prop.major, prop.minor, device_vmm ? "yes" : "no"); g_tensor_split[id] = total_vram; total_vram += prop.totalGlobalMem; #if defined(GGML_USE_HIPBLAS) && defined(__HIP_PLATFORM_AMD__) - g_compute_capabilities[id] = 100*prop.major + 10*prop.minor + CC_OFFSET_AMD; + g_device_caps[id].cc = 100*prop.major + 10*prop.minor + CC_OFFSET_AMD; #else - g_compute_capabilities[id] = 100*prop.major + 10*prop.minor; + g_device_caps[id].cc = 100*prop.major + 10*prop.minor; #endif // defined(GGML_USE_HIPBLAS) && defined(__HIP_PLATFORM_AMD__) } for (int id = 0; id < g_device_count; ++id) { @@ -7178,11 +7361,11 @@ static int64_t get_row_rounding(ggml_type type) { int64_t max_compute_capability = INT_MIN; for (int64_t id = 0; id < g_device_count; ++id) { if (g_tensor_split[id] < (id + 1 < g_device_count ? g_tensor_split[id + 1] : 1.0f)) { - if (min_compute_capability > g_compute_capabilities[id]) { - min_compute_capability = g_compute_capabilities[id]; + if (min_compute_capability > g_device_caps[id].cc) { + min_compute_capability = g_device_caps[id].cc; } - if (max_compute_capability < g_compute_capabilities[id]) { - max_compute_capability = g_compute_capabilities[id]; + if (max_compute_capability < g_device_caps[id].cc) { + max_compute_capability = g_device_caps[id].cc; } } } @@ -7297,8 +7480,8 @@ inline void ggml_cuda_op_dequantize_mul_mat_vec( // on some GPUs it is faster to convert src1 to half and to use half precision intrinsics #ifdef GGML_CUDA_F16 - size_t ash; - dfloat * src1_dfloat = nullptr; // dfloat == half + cuda_pool_alloc src1_dfloat_a; + half * src1_dfloat = nullptr; // dfloat == half bool src1_convert_f16 = src0->type == GGML_TYPE_Q4_0 || src0->type == GGML_TYPE_Q4_1 || @@ -7306,7 +7489,7 @@ inline void ggml_cuda_op_dequantize_mul_mat_vec( src0->type == GGML_TYPE_Q8_0 || src0->type == GGML_TYPE_F16; if (src1_convert_f16) { - src1_dfloat = (half *) ggml_cuda_pool_malloc(ne00*sizeof(half), &ash); + src1_dfloat = src1_dfloat_a.alloc(ne00); ggml_cpy_f32_f16_cuda((const char *) src1_ddf_i, (char *) src1_dfloat, ne00, ne00, 1, sizeof(float), 0, 0, ne00, 1, sizeof(half), 0, 0, stream); @@ -7354,12 +7537,6 @@ inline void ggml_cuda_op_dequantize_mul_mat_vec( break; } -#ifdef GGML_CUDA_F16 - if (src1_convert_f16) { - ggml_cuda_pool_free(src1_dfloat, ash); - } -#endif // GGML_CUDA_F16 - (void) src1; (void) dst; (void) src1_ddq_i; @@ -7390,33 +7567,30 @@ inline void ggml_cuda_op_mul_mat_cublas( // ldc == nrows of the matrix that cuBLAS writes into int ldc = dst->backend == GGML_BACKEND_GPU && id == g_main_device ? ne0 : row_diff; - const int compute_capability = g_compute_capabilities[id]; + const int compute_capability = g_device_caps[id].cc; if (compute_capability >= CC_VOLTA && (src0->type == GGML_TYPE_F16 || ggml_is_quantized(src0->type)) && ggml_is_contiguous(src0) && row_diff == src0->ne[1] && dst->op_params[0] == GGML_PREC_DEFAULT) { // convert src0 and src1 to fp16, multiply as fp16, convert dst to fp32 - half * src0_as_f16 = nullptr; - size_t src0_as = 0; + cuda_pool_alloc src0_as_f16; if (src0->type != GGML_TYPE_F16) { const to_fp16_cuda_t to_fp16_cuda = ggml_get_to_fp16_cuda(src0->type); GGML_ASSERT(to_fp16_cuda != nullptr); size_t ne = row_diff*ne00; - src0_as_f16 = (half *) ggml_cuda_pool_malloc(ne * sizeof(half), &src0_as); - to_fp16_cuda(src0_dd_i, src0_as_f16, ne, stream); + src0_as_f16.alloc(ne); + to_fp16_cuda(src0_dd_i, src0_as_f16.get(), ne, stream); } - const half * src0_ptr = src0->type == GGML_TYPE_F16 ? (const half *) src0_dd_i : src0_as_f16; + const half * src0_ptr = src0->type == GGML_TYPE_F16 ? (const half *) src0_dd_i : src0_as_f16.get(); - half * src1_as_f16 = nullptr; - size_t src1_as = 0; + cuda_pool_alloc src1_as_f16; if (src1->type != GGML_TYPE_F16) { const to_fp16_cuda_t to_fp16_cuda = ggml_get_to_fp16_cuda(src1->type); GGML_ASSERT(to_fp16_cuda != nullptr); size_t ne = src1_ncols*ne10; - src1_as_f16 = (half *) ggml_cuda_pool_malloc(ne * sizeof(half), &src1_as); - to_fp16_cuda(src1_ddf_i, src1_as_f16, ne, stream); + src1_as_f16.alloc(ne); + to_fp16_cuda(src1_ddf_i, src1_as_f16.get(), ne, stream); } - const half * src1_ptr = src1->type == GGML_TYPE_F16 ? (const half *) src1_ddf_i : src1_as_f16; - size_t dst_as = 0; - half * dst_f16 = (half *) ggml_cuda_pool_malloc(row_diff*src1_ncols * sizeof(half), &dst_as); + const half * src1_ptr = src1->type == GGML_TYPE_F16 ? (const half *) src1_ddf_i : src1_as_f16.get(); + cuda_pool_alloc dst_f16(row_diff*src1_ncols); const half alpha_f16 = 1.0f; const half beta_f16 = 0.0f; @@ -7425,36 +7599,25 @@ inline void ggml_cuda_op_mul_mat_cublas( CUBLAS_CHECK( cublasGemmEx(g_cublas_handles[id], CUBLAS_OP_T, CUBLAS_OP_N, row_diff, src1_ncols, ne10, - &alpha_f16, src0_ptr, CUDA_R_16F, ne00, - src1_ptr, CUDA_R_16F, ne10, - &beta_f16, dst_f16, CUDA_R_16F, ldc, + &alpha_f16, src0_ptr, CUDA_R_16F, ne00, + src1_ptr, CUDA_R_16F, ne10, + &beta_f16, dst_f16.get(), CUDA_R_16F, ldc, CUBLAS_COMPUTE_16F, CUBLAS_GEMM_DEFAULT_TENSOR_OP)); const to_fp32_cuda_t to_fp32_cuda = ggml_get_to_fp32_cuda(GGML_TYPE_F16); - to_fp32_cuda(dst_f16, dst_dd_i, row_diff*src1_ncols, stream); - - ggml_cuda_pool_free(dst_f16, dst_as); - - if (src0_as != 0) { - ggml_cuda_pool_free(src0_as_f16, src0_as); - } - - if (src1_as != 0) { - ggml_cuda_pool_free(src1_as_f16, src1_as); - } + to_fp32_cuda(dst_f16.get(), dst_dd_i, row_diff*src1_ncols, stream); } else { - float * src0_ddq_as_f32 = nullptr; - size_t src0_as = 0; + cuda_pool_alloc src0_ddq_as_f32; if (src0->type != GGML_TYPE_F32) { const to_fp32_cuda_t to_fp32_cuda = ggml_get_to_fp32_cuda(src0->type); GGML_ASSERT(to_fp32_cuda != nullptr); - src0_ddq_as_f32 = (float *) ggml_cuda_pool_malloc(row_diff*ne00 * sizeof(float), &src0_as); // NOLINT - to_fp32_cuda(src0_dd_i, src0_ddq_as_f32, row_diff*ne00, stream); + src0_ddq_as_f32.alloc(row_diff*ne00); + to_fp32_cuda(src0_dd_i, src0_ddq_as_f32.get(), row_diff*ne00, stream); } - const float * src0_ddf_i = src0->type == GGML_TYPE_F32 ? (const float *) src0_dd_i : src0_ddq_as_f32; + const float * src0_ddf_i = src0->type == GGML_TYPE_F32 ? (const float *) src0_dd_i : src0_ddq_as_f32.get(); const float alpha = 1.0f; const float beta = 0.0f; @@ -7466,10 +7629,6 @@ inline void ggml_cuda_op_mul_mat_cublas( &alpha, src0_ddf_i, ne00, src1_ddf_i, ne10, &beta, dst_dd_i, ldc)); - - if (src0_as != 0) { - ggml_cuda_pool_free(src0_ddq_as_f32, src0_as); - } } (void) dst; @@ -7761,18 +7920,17 @@ static void ggml_cuda_op_flatten(const ggml_tensor * src0, const ggml_tensor * s float * src1_ddf = nullptr; float * dst_ddf = nullptr; - // as = actual size - size_t src0_asf = 0; - size_t src1_asf = 0; - size_t dst_asf = 0; + cuda_pool_alloc src0_f; + cuda_pool_alloc src1_f; + cuda_pool_alloc dst_f; ggml_cuda_set_device(g_main_device); - const cudaStream_t main_stream = g_cudaStreams[g_main_device][0]; + cudaStream_t main_stream = g_cudaStreams[g_main_device][0]; if (src0_on_device) { src0_ddf = (float *) src0_extra->data_device[g_main_device]; } else { - src0_ddf = (float *) ggml_cuda_pool_malloc(ggml_nbytes(src0), &src0_asf); + src0_ddf = src0_f.alloc(ggml_nelements(src0)); CUDA_CHECK(ggml_cuda_cpy_tensor_2d(src0_ddf, src0, 0, 0, 0, nrows0, main_stream)); } @@ -7780,14 +7938,14 @@ static void ggml_cuda_op_flatten(const ggml_tensor * src0, const ggml_tensor * s if (src1_on_device) { src1_ddf = (float *) src1_extra->data_device[g_main_device]; } else { - src1_ddf = (float *) ggml_cuda_pool_malloc(ggml_nbytes(src1), &src1_asf); + src1_ddf = src1_f.alloc(ggml_nelements(src1)); CUDA_CHECK(ggml_cuda_cpy_tensor_2d(src1_ddf, src1, 0, 0, 0, nrows1, main_stream)); } } if (dst_on_device) { dst_ddf = (float *) dst_extra->data_device[g_main_device]; } else { - dst_ddf = (float *) ggml_cuda_pool_malloc(ggml_nbytes(dst), &dst_asf); + dst_ddf = dst_f.alloc(ggml_nelements(dst)); } // do the computation @@ -7799,16 +7957,6 @@ static void ggml_cuda_op_flatten(const ggml_tensor * src0, const ggml_tensor * s CUDA_CHECK(cudaMemcpyAsync(dst->data, dst_ddf, ggml_nbytes(dst), cudaMemcpyDeviceToHost, main_stream)); } - if (src0_asf > 0) { - ggml_cuda_pool_free(src0_ddf, src0_asf); - } - if (src1_asf > 0) { - ggml_cuda_pool_free(src1_ddf, src1_asf); - } - if (dst_asf > 0) { - ggml_cuda_pool_free(dst_ddf, dst_asf); - } - if (dst->backend == GGML_BACKEND_CPU) { CUDA_CHECK(cudaDeviceSynchronize()); } @@ -8122,17 +8270,17 @@ static void ggml_cuda_op_mul_mat( CUDA_CHECK(ggml_cuda_set_device(id)); // free buffers again when done - if (src0_as[id] > 0) { - ggml_cuda_pool_free(src0_dd[id], src0_as[id]); - } - if (src1_asf[id] > 0) { - ggml_cuda_pool_free(src1_ddf[id], src1_asf[id]); + if (dst_as[id] > 0) { + ggml_cuda_pool_free(dst_dd[id], dst_as[id]); } if (src1_asq[id] > 0) { ggml_cuda_pool_free(src1_ddq[id], src1_asq[id]); } - if (dst_as[id] > 0) { - ggml_cuda_pool_free(dst_dd[id], dst_as[id]); + if (src1_asf[id] > 0) { + ggml_cuda_pool_free(src1_ddf[id], src1_asf[id]); + } + if (src0_as[id] > 0) { + ggml_cuda_pool_free(src0_dd[id], src0_as[id]); } } @@ -8385,14 +8533,11 @@ static void ggml_cuda_mul_mat_mat_batched_cublas(const ggml_tensor * src0, const const to_fp16_cuda_t to_fp16_cuda = ggml_get_to_fp16_cuda(src1->type); GGML_ASSERT(to_fp16_cuda != nullptr); - size_t src1_as = 0; - half * src1_as_f16 = (half *) ggml_cuda_pool_malloc(ne1 * sizeof(half), &src1_as); - to_fp16_cuda(src1_ddf, src1_as_f16, ne1, main_stream); + cuda_pool_alloc src1_as_f16(ne1); + to_fp16_cuda(src1_ddf, src1_as_f16.get(), ne1, main_stream); - size_t dst_as = 0; - - half * dst_f16 = nullptr; - char * dst_t = nullptr; + cuda_pool_alloc dst_f16; + char * dst_t; cublasComputeType_t cu_compute_type = CUBLAS_COMPUTE_16F; cudaDataType_t cu_data_type = CUDA_R_16F; @@ -8411,8 +8556,7 @@ static void ggml_cuda_mul_mat_mat_batched_cublas(const ggml_tensor * src0, const const void * beta = &beta_f16; if (dst->op_params[0] == GGML_PREC_DEFAULT) { - dst_f16 = (half *) ggml_cuda_pool_malloc(ne * sizeof(half), &dst_as); - dst_t = (char *) dst_f16; + dst_t = (char *) dst_f16.alloc(ne); nbd2 /= sizeof(float) / sizeof(half); nbd3 /= sizeof(float) / sizeof(half); @@ -8459,9 +8603,9 @@ static void ggml_cuda_mul_mat_mat_batched_cublas(const ggml_tensor * src0, const CUBLAS_CHECK( cublasGemmStridedBatchedEx(g_cublas_handles[g_main_device], CUBLAS_OP_T, CUBLAS_OP_N, ne01, ne11, ne10, - alpha, (const char *) src0_as_f16, CUDA_R_16F, nb01/sizeof(half), src0->nb[2]/sizeof(half), // strideA - (const char *) src1_as_f16, CUDA_R_16F, nb11/sizeof(float), src1->nb[2]/sizeof(float), // strideB - beta, ( char *) dst_t, cu_data_type, ne01, dst->nb[2]/sizeof(float), // strideC + alpha, (const char *) src0_as_f16, CUDA_R_16F, nb01/sizeof(half), src0->nb[2]/sizeof(half), // strideA + (const char *) src1_as_f16.get(), CUDA_R_16F, nb11/sizeof(float), src1->nb[2]/sizeof(float), // strideB + beta, ( char *) dst_t, cu_data_type, ne01, dst->nb[2]/sizeof(float), // strideC ne12*ne13, cu_compute_type, CUBLAS_GEMM_DEFAULT_TENSOR_OP)); @@ -8469,19 +8613,13 @@ static void ggml_cuda_mul_mat_mat_batched_cublas(const ggml_tensor * src0, const // use cublasGemmBatchedEx const int ne23 = ne12*ne13; - const void ** ptrs_src = nullptr; - void ** ptrs_dst = nullptr; - - size_t ptrs_src_s = 0; - size_t ptrs_dst_s = 0; - - ptrs_src = (const void **) ggml_cuda_pool_malloc(2*ne23*sizeof(void *), &ptrs_src_s); - ptrs_dst = ( void **) ggml_cuda_pool_malloc(1*ne23*sizeof(void *), &ptrs_dst_s); + cuda_pool_alloc ptrs_src(2*ne23); + cuda_pool_alloc< void *> ptrs_dst(1*ne23); dim3 block_dims(ne13, ne12); k_compute_batched_ptrs<<<1, block_dims, 0, main_stream>>>( - src0_as_f16, src1_as_f16, dst_t, - ptrs_src, ptrs_dst, + src0_as_f16, src1_as_f16.get(), dst_t, + ptrs_src.get(), ptrs_dst.get(), ne12, ne13, ne23, nb02, nb03, @@ -8493,30 +8631,19 @@ static void ggml_cuda_mul_mat_mat_batched_cublas(const ggml_tensor * src0, const CUBLAS_CHECK( cublasGemmBatchedEx(g_cublas_handles[g_main_device], CUBLAS_OP_T, CUBLAS_OP_N, ne01, ne11, ne10, - alpha, (const void **) (ptrs_src + 0*ne23), CUDA_R_16F, nb01/sizeof(half), - (const void **) (ptrs_src + 1*ne23), CUDA_R_16F, nb11/sizeof(float), - beta, ( void **) (ptrs_dst + 0*ne23), cu_data_type, ne01, + alpha, (const void **) (ptrs_src.get() + 0*ne23), CUDA_R_16F, nb01/sizeof(half), + (const void **) (ptrs_src.get() + 1*ne23), CUDA_R_16F, nb11/sizeof(float), + beta, ( void **) (ptrs_dst.get() + 0*ne23), cu_data_type, ne01, ne23, cu_compute_type, CUBLAS_GEMM_DEFAULT_TENSOR_OP)); - - if (ptrs_src_s != 0) { - ggml_cuda_pool_free(ptrs_src, ptrs_src_s); - } - if (ptrs_dst_s != 0) { - ggml_cuda_pool_free(ptrs_dst, ptrs_dst_s); - } } #endif if (dst->op_params[0] == GGML_PREC_DEFAULT) { const to_fp32_cuda_t to_fp32_cuda = ggml_get_to_fp32_cuda(GGML_TYPE_F16); - to_fp32_cuda(dst_f16, dst_ddf, ne, main_stream); - - ggml_cuda_pool_free(dst_f16, dst_as); + to_fp32_cuda(dst_f16.get(), dst_ddf, ne, main_stream); } - - ggml_cuda_pool_free(src1_as_f16, src1_as); } static void ggml_cuda_mul_mat(const ggml_tensor * src0, const ggml_tensor * src1, ggml_tensor * dst) { @@ -8529,8 +8656,8 @@ static void ggml_cuda_mul_mat(const ggml_tensor * src0, const ggml_tensor * src1 int64_t min_compute_capability = INT_MAX; for (int64_t id = 0; id < g_device_count; ++id) { - if (min_compute_capability > g_compute_capabilities[id] && g_tensor_split[id] < (id + 1 < g_device_count ? g_tensor_split[id + 1] : 1.0f)) { - min_compute_capability = g_compute_capabilities[id]; + if (min_compute_capability > g_device_caps[id].cc && g_tensor_split[id] < (id + 1 < g_device_count ? g_tensor_split[id + 1] : 1.0f)) { + min_compute_capability = g_device_caps[id].cc; } } @@ -8843,12 +8970,11 @@ static void ggml_cuda_mul_mat_id(const ggml_tensor * src0, const ggml_tensor * s ggml_cuda_mul_mat(src0_row, &src1_row, &dst_row); } } else { - size_t as_src1, as_dst; - char * src1_contiguous = (char *) ggml_cuda_pool_malloc(sizeof(float)*ggml_nelements(src1), &as_src1); - char * dst_contiguous = (char *) ggml_cuda_pool_malloc(sizeof(float)*ggml_nelements(dst), &as_dst); + cuda_pool_alloc src1_contiguous(sizeof(float)*ggml_nelements(src1)); + cuda_pool_alloc dst_contiguous(sizeof(float)*ggml_nelements(dst)); - src1_row_extra.data_device[g_main_device] = src1_contiguous; - dst_row_extra.data_device[g_main_device] = dst_contiguous; + src1_row_extra.data_device[g_main_device] = src1_contiguous.get(); + dst_row_extra.data_device[g_main_device] = dst_contiguous.get(); const cudaMemcpyKind src1_kind = src1->backend == GGML_BACKEND_CPU ? cudaMemcpyHostToDevice : cudaMemcpyDeviceToDevice; @@ -8868,7 +8994,7 @@ static void ggml_cuda_mul_mat_id(const ggml_tensor * src0, const ggml_tensor * s GGML_ASSERT(row_id >= 0 && row_id < n_as); - CUDA_CHECK(cudaMemcpyAsync(src1_contiguous + num_src1_rows*nb11, src1_original + i01*nb11, + CUDA_CHECK(cudaMemcpyAsync(src1_contiguous.get() + num_src1_rows*nb11, src1_original + i01*nb11, nb11, src1_kind, stream)); num_src1_rows++; } @@ -8900,14 +9026,11 @@ static void ggml_cuda_mul_mat_id(const ggml_tensor * src0, const ggml_tensor * s GGML_ASSERT(row_id >= 0 && row_id < n_as); - CUDA_CHECK(cudaMemcpyAsync(dst_original + i01*nb1, dst_contiguous + num_src1_rows*nb1, + CUDA_CHECK(cudaMemcpyAsync(dst_original + i01*nb1, dst_contiguous.get() + num_src1_rows*nb1, nb1, dst_kind, stream)); num_src1_rows++; } } - - ggml_cuda_pool_free(src1_contiguous, as_src1); - ggml_cuda_pool_free(dst_contiguous, as_dst); } if (dst->backend == GGML_BACKEND_CPU) { @@ -9678,8 +9801,10 @@ static void ggml_backend_cuda_host_buffer_free_buffer(ggml_backend_buffer_t buff static ggml_backend_buffer_t ggml_backend_cuda_host_buffer_type_alloc_buffer(ggml_backend_buffer_type_t buft, size_t size) { void * ptr = ggml_cuda_host_malloc(size); + if (ptr == nullptr) { - return nullptr; + // fallback to cpu buffer + return ggml_backend_buft_alloc_buffer(ggml_backend_cpu_buffer_type(), size); } // FIXME: this is a hack to avoid having to implement a new buffer type diff --git a/ggml.c b/ggml.c index 3656422d7..73600ab05 100644 --- a/ggml.c +++ b/ggml.c @@ -19351,7 +19351,7 @@ void gguf_set_kv(struct gguf_context * ctx, struct gguf_context * src) { data[j] = ((struct gguf_str *)src->kv[i].value.arr.data)[j].data; } gguf_set_arr_str(ctx, src->kv[i].key.data, data, src->kv[i].value.arr.n); - free(data); + free((void *)data); } else if (src->kv[i].value.arr.type == GGUF_TYPE_ARRAY) { GGML_ASSERT(false && "nested arrays not supported"); } else { diff --git a/ggml.h b/ggml.h index 338f355a4..67d6bc4f1 100644 --- a/ggml.h +++ b/ggml.h @@ -255,6 +255,8 @@ #define GGML_UNREACHABLE() GGML_ASSERT(!"statement should not be reached") #elif defined(__GNUC__) #define GGML_UNREACHABLE() __builtin_unreachable() +#elif defined(_MSC_VER) +#define GGML_UNREACHABLE() __assume(0) #else #define GGML_UNREACHABLE() ((void) 0) #endif diff --git a/llama.cpp b/llama.cpp index 5699a0fcf..a24621539 100644 --- a/llama.cpp +++ b/llama.cpp @@ -1281,7 +1281,7 @@ struct llama_hparams { if (this->rope_finetuned != other.rope_finetuned) return true; if (this->n_yarn_orig_ctx != other.n_yarn_orig_ctx) return true; - const float EPSILON = 1e-9; + const float EPSILON = 1e-9f; if (!is_float_close(this->f_norm_eps, other.f_norm_eps, EPSILON)) return true; if (!is_float_close(this->f_norm_rms_eps, other.f_norm_rms_eps, EPSILON)) return true; @@ -10300,7 +10300,7 @@ int llama_token_to_piece(const struct llama_model * model, llama_token token, ch std::string result = model->vocab.id_to_token[token].text; llama_unescape_whitespace(result); if (length < (int) result.length()) { - return -result.length(); + return -(int) result.length(); } memcpy(buf, result.c_str(), result.length()); return result.length(); @@ -10330,7 +10330,7 @@ int llama_token_to_piece(const struct llama_model * model, llama_token token, ch std::string result = model->vocab.id_to_token[token].text; result = llama_decode_text(result); if (length < (int) result.length()) { - return -result.length(); + return -(int) result.length(); } memcpy(buf, result.c_str(), result.length()); return result.length(); diff --git a/tests/test-grad0.cpp b/tests/test-grad0.cpp index 14914def5..8ff76c891 100644 --- a/tests/test-grad0.cpp +++ b/tests/test-grad0.cpp @@ -883,9 +883,6 @@ int main(int argc, const char ** argv) { srand(seed); const int nargs = 1; - int64_t ne2[4]; - ne2[0] = 1; - for (int ndims = 1; ndims <= 2; ++ndims) { x[0] = get_random_tensor_f32(ctx0, ndims, ne, -1.0f, 1.0f); From 753be377b69bda2d65a7e089f2b7f0c53ef3495e Mon Sep 17 00:00:00 2001 From: Shintarou Okada Date: Sun, 24 Dec 2023 22:35:49 +0900 Subject: [PATCH 143/811] llama : add PLaMo model (#3557) * add plamo mock * add tensor loading * plamo convert * update norm * able to compile * fix norm_rms_eps hparam * runnable * use inp_pos * seems ok * update kqv code * remove develop code * update README * shuffle attn_q.weight and attn_output.weight for broadcasting * remove plamo_llm_build_kqv and use llm_build_kqv * fix style * update * llama : remove obsolete KQ_scale * plamo : fix tensor names for correct GPU offload --------- Co-authored-by: Georgi Gerganov --- README.md | 1 + convert-hf-to-gguf.py | 86 +++++++++++++++- gguf-py/gguf/constants.py | 17 ++++ gguf-py/gguf/tensor_mapping.py | 37 ++++--- llama.cpp | 181 +++++++++++++++++++++++++++++++++ 5 files changed, 307 insertions(+), 15 deletions(-) diff --git a/README.md b/README.md index 649c3b333..09338d226 100644 --- a/README.md +++ b/README.md @@ -102,6 +102,7 @@ as the main playground for developing new features for the [ggml](https://github - [x] [Deepseek models](https://huggingface.co/models?search=deepseek-ai/deepseek) - [x] [Qwen models](https://huggingface.co/models?search=Qwen/Qwen) - [x] [Mixtral MoE](https://huggingface.co/models?search=mistral-ai/Mixtral) +- [x] [PLaMo-13B](https://github.com/ggerganov/llama.cpp/pull/3557) **Multimodal models:** diff --git a/convert-hf-to-gguf.py b/convert-hf-to-gguf.py index e71a96c48..303d08170 100755 --- a/convert-hf-to-gguf.py +++ b/convert-hf-to-gguf.py @@ -184,6 +184,8 @@ class Model: return MixtralModel if model_architecture == "PhiForCausalLM": return Phi2Model + if model_architecture == "PlamoForCausalLM": + return PlamoModel return Model def _is_model_safetensors(self) -> bool: @@ -225,6 +227,8 @@ class Model: return gguf.MODEL_ARCH.LLAMA if arch == "PhiForCausalLM": return gguf.MODEL_ARCH.PHI2 + if arch == "PlamoForCausalLM": + return gguf.MODEL_ARCH.PLAMO raise NotImplementedError(f'Architecture "{arch}" not supported!') @@ -1002,11 +1006,91 @@ class Phi2Model(Model): self.gguf_writer.add_add_bos_token(False) +class PlamoModel(Model): + def set_vocab(self): + self._set_vocab_sentencepiece() + + def set_gguf_parameters(self): + hparams = self.hparams + block_count = hparams["num_hidden_layers"] + + self.gguf_writer.add_name("PLaMo") + self.gguf_writer.add_context_length(4096) # not in config.json + self.gguf_writer.add_embedding_length(hparams["hidden_size"]) + self.gguf_writer.add_feed_forward_length(hparams["intermediate_size"]) + self.gguf_writer.add_block_count(block_count) + self.gguf_writer.add_head_count(hparams["num_attention_heads"]) + self.gguf_writer.add_head_count_kv(5) # hparams["num_key_value_heads"]) is wrong + self.gguf_writer.add_layer_norm_rms_eps(hparams["rms_norm_eps"]) + + def shuffle_attn_q_weight(self, data_torch): + assert data_torch.size() == (5120, 5120) + data_torch = data_torch.reshape(8, 5, 128, 5120) + data_torch = torch.permute(data_torch, (1, 0, 2, 3)) + data_torch = torch.reshape(data_torch, (5120, 5120)) + return data_torch + + def shuffle_attn_output_weight(self, data_torch): + assert data_torch.size() == (5120, 5120) + data_torch = data_torch.reshape(5120, 8, 5, 128) + data_torch = torch.permute(data_torch, (0, 2, 1, 3)) + data_torch = torch.reshape(data_torch, (5120, 5120)) + return data_torch + + def write_tensors(self): + block_count = self.hparams.get("num_layers", self.hparams.get("num_hidden_layers")) + tensor_map = gguf.get_tensor_name_map(self.model_arch, block_count) + + for name, data_torch in self.get_tensors(): + if "self_attn.rotary_emb.inv_freq" in name: + continue + + # map tensor names + new_name = tensor_map.get_name(name, try_suffixes=(".weight", ".bias")) + if new_name is None: + print(f"Can not map tensor {name!r}") + sys.exit() + + # shuffle for broadcasting of gqa in ggml_mul_mat + if new_name.endswith("attn_q.weight"): + data_torch = self.shuffle_attn_q_weight(data_torch) + elif new_name.endswith("attn_output.weight"): + data_torch = self.shuffle_attn_output_weight(data_torch) + + old_dtype = data_torch.dtype + + # convert any unsupported data types to float32 + if data_torch.dtype not in (torch.float16, torch.float32): + data_torch = data_torch.to(torch.float32) + + data = data_torch.squeeze().numpy() + + n_dims = len(data.shape) + data_dtype = data.dtype + + # if f32 desired, convert any float16 to float32 + if self.ftype == 0 and data_dtype == np.float16: + data = data.astype(np.float32) + + # TODO: Why cant we use these float16 as-is? There should be not reason to store float16 as float32 + if self.ftype == 1 and data_dtype == np.float16 and n_dims == 1: + data = data.astype(np.float32) + + # if f16 desired, convert any float32 2-dim weight tensors to float16 + if self.ftype == 1 and data_dtype == np.float32 and name.endswith(".weight") and n_dims == 2: + data = data.astype(np.float16) + + print(f"{new_name}, n_dims = {n_dims}, {old_dtype} --> {data.dtype}") + + self.gguf_writer.add_tensor(new_name, data) + + ###### CONVERSION LOGIC ###### def parse_args() -> argparse.Namespace: - parser = argparse.ArgumentParser(description="Convert a huggingface model to a GGML compatible file") + parser = argparse.ArgumentParser( + description="Convert a huggingface model to a GGML compatible file") parser.add_argument( "--vocab-only", action="store_true", help="extract only the vocab", diff --git a/gguf-py/gguf/constants.py b/gguf-py/gguf/constants.py index 390dca049..4cd87cdda 100644 --- a/gguf-py/gguf/constants.py +++ b/gguf-py/gguf/constants.py @@ -96,6 +96,7 @@ class MODEL_ARCH(IntEnum): STABLELM = auto() QWEN = auto() PHI2 = auto() + PLAMO = auto() class MODEL_TENSOR(IntEnum): @@ -142,6 +143,7 @@ MODEL_ARCH_NAMES: dict[MODEL_ARCH, str] = { MODEL_ARCH.STABLELM: "stablelm", MODEL_ARCH.QWEN: "qwen", MODEL_ARCH.PHI2: "phi2", + MODEL_ARCH.PLAMO: "plamo", } TENSOR_NAMES: dict[MODEL_TENSOR, str] = { @@ -349,6 +351,21 @@ MODEL_TENSORS: dict[MODEL_ARCH, list[MODEL_TENSOR]] = { MODEL_TENSOR.FFN_DOWN, MODEL_TENSOR.FFN_UP, ], + MODEL_ARCH.PLAMO: [ + MODEL_TENSOR.TOKEN_EMBD, + MODEL_TENSOR.OUTPUT_NORM, + MODEL_TENSOR.OUTPUT, + MODEL_TENSOR.ROPE_FREQS, + MODEL_TENSOR.ATTN_NORM, + MODEL_TENSOR.ATTN_Q, + MODEL_TENSOR.ATTN_K, + MODEL_TENSOR.ATTN_V, + MODEL_TENSOR.ATTN_OUT, + MODEL_TENSOR.ATTN_ROT_EMBD, + MODEL_TENSOR.FFN_GATE, + MODEL_TENSOR.FFN_DOWN, + MODEL_TENSOR.FFN_UP, + ], MODEL_ARCH.GPT2: [ # TODO ], diff --git a/gguf-py/gguf/tensor_mapping.py b/gguf-py/gguf/tensor_mapping.py index 6fcbdbc1c..446c6b688 100644 --- a/gguf-py/gguf/tensor_mapping.py +++ b/gguf-py/gguf/tensor_mapping.py @@ -79,6 +79,7 @@ class TensorNameMap: "language_model.encoder.layers.{bid}.input_layernorm", # persimmon "model.layers.{bid}.ln1", # yi "transformer.h.{bid}.ln", # phi2 + "model.layers.layers.{bid}.norm", # plamo ), # Attention norm 2 @@ -99,26 +100,29 @@ class TensorNameMap: # Attention query MODEL_TENSOR.ATTN_Q: ( - "model.layers.{bid}.self_attn.q_proj", # llama-hf - "layers.{bid}.attention.wq", # llama-pth - "encoder.layer.{bid}.attention.self.query", # bert - "transformer.h.{bid}.attn.q_proj", # gpt-j + "model.layers.{bid}.self_attn.q_proj", # llama-hf + "layers.{bid}.attention.wq", # llama-pth + "encoder.layer.{bid}.attention.self.query", # bert + "transformer.h.{bid}.attn.q_proj", # gpt-j + "model.layers.layers.{bid}.self_attn.q_proj", # plamo ), # Attention key MODEL_TENSOR.ATTN_K: ( - "model.layers.{bid}.self_attn.k_proj", # llama-hf - "layers.{bid}.attention.wk", # llama-pth - "encoder.layer.{bid}.attention.self.key", # bert - "transformer.h.{bid}.attn.k_proj", # gpt-j + "model.layers.{bid}.self_attn.k_proj", # llama-hf + "layers.{bid}.attention.wk", # llama-pth + "encoder.layer.{bid}.attention.self.key", # bert + "transformer.h.{bid}.attn.k_proj", # gpt-j + "model.layers.layers.{bid}.self_attn.k_proj", # plamo ), # Attention value MODEL_TENSOR.ATTN_V: ( - "model.layers.{bid}.self_attn.v_proj", # llama-hf - "layers.{bid}.attention.wv", # llama-pth - "encoder.layer.{bid}.attention.self.value", # bert - "transformer.h.{bid}.attn.v_proj", # gpt-j + "model.layers.{bid}.self_attn.v_proj", # llama-hf + "layers.{bid}.attention.wv", # llama-pth + "encoder.layer.{bid}.attention.self.value", # bert + "transformer.h.{bid}.attn.v_proj", # gpt-j + "model.layers.layers.{bid}.self_attn.v_proj", # plamo ), # Attention output @@ -134,12 +138,14 @@ class TensorNameMap: "transformer.h.{bid}.attn.out_proj", # gpt-j "language_model.encoder.layers.{bid}.self_attention.dense", # persimmon "transformer.h.{bid}.mixer.out_proj", # phi2 + "model.layers.layers.{bid}.self_attn.o_proj", # plamo ), # Rotary embeddings MODEL_TENSOR.ATTN_ROT_EMBD: ( - "model.layers.{bid}.self_attn.rotary_emb.inv_freq", # llama-hf - "layers.{bid}.attention.inner_attention.rope.freqs", # llama-pth + "model.layers.{bid}.self_attn.rotary_emb.inv_freq", # llama-hf + "layers.{bid}.attention.inner_attention.rope.freqs", # llama-pth + "model.layers.layers.{bid}.self_attn.rotary_emb.inv_freq", # plamo ), # Feed-forward norm @@ -174,6 +180,7 @@ class TensorNameMap: "language_model.encoder.layers.{bid}.mlp.dense_h_to_4h", # persimmon "transformer.h.{bid}.mlp.w1", # qwen "transformer.h.{bid}.mlp.fc1", # phi2 + "model.layers.layers.{bid}.mlp.up_proj", # plamo ), MODEL_TENSOR.FFN_UP_EXP: ( @@ -186,6 +193,7 @@ class TensorNameMap: "model.layers.{bid}.mlp.gate_proj", # llama-hf refact "layers.{bid}.feed_forward.w1", # llama-pth "transformer.h.{bid}.mlp.w2", # qwen + "model.layers.layers.{bid}.mlp.gate_proj", # plamo ), MODEL_TENSOR.FFN_GATE_EXP: ( @@ -206,6 +214,7 @@ class TensorNameMap: "transformer.h.{bid}.mlp.fc_out", # gpt-j "language_model.encoder.layers.{bid}.mlp.dense_4h_to_h", # persimmon "transformer.h.{bid}.mlp.fc2", # phi2 + "model.layers.layers.{bid}.mlp.down_proj", # plamo ), MODEL_TENSOR.FFN_DOWN_EXP: ( diff --git a/llama.cpp b/llama.cpp index a24621539..0b99f1e03 100644 --- a/llama.cpp +++ b/llama.cpp @@ -198,6 +198,7 @@ enum llm_arch { LLM_ARCH_STABLELM, LLM_ARCH_QWEN, LLM_ARCH_PHI2, + LLM_ARCH_PLAMO, LLM_ARCH_UNKNOWN, }; @@ -216,6 +217,7 @@ static std::map LLM_ARCH_NAMES = { { LLM_ARCH_STABLELM, "stablelm" }, { LLM_ARCH_QWEN, "qwen" }, { LLM_ARCH_PHI2, "phi2" }, + { LLM_ARCH_PLAMO, "plamo" }, }; enum llm_kv { @@ -567,6 +569,24 @@ static std::map> LLM_TENSOR_NAMES = { LLM_TENSOR_FFN_UP, "blk.%d.ffn_up" }, }, }, + { + LLM_ARCH_PLAMO, + { + { LLM_TENSOR_TOKEN_EMBD, "token_embd" }, + { LLM_TENSOR_OUTPUT_NORM, "output_norm" }, + { LLM_TENSOR_OUTPUT, "output" }, + { LLM_TENSOR_ROPE_FREQS, "rope_freqs" }, + { LLM_TENSOR_ATTN_NORM, "blk.%d.attn_norm" }, + { LLM_TENSOR_ATTN_Q, "blk.%d.attn_q" }, + { LLM_TENSOR_ATTN_K, "blk.%d.attn_k" }, + { LLM_TENSOR_ATTN_V, "blk.%d.attn_v" }, + { LLM_TENSOR_ATTN_OUT, "blk.%d.attn_output" }, + { LLM_TENSOR_ATTN_ROT_EMBD, "blk.%d.attn_rot_embd" }, + { LLM_TENSOR_FFN_GATE, "blk.%d.ffn_gate" }, + { LLM_TENSOR_FFN_DOWN, "blk.%d.ffn_down" }, + { LLM_TENSOR_FFN_UP, "blk.%d.ffn_up" }, + }, + }, { LLM_ARCH_UNKNOWN, @@ -2749,6 +2769,15 @@ static void llm_load_hparams( default: model.type = e_model::MODEL_UNKNOWN; } } break; + case LLM_ARCH_PLAMO: + { + ml.get_key(LLM_KV_ATTENTION_LAYERNORM_RMS_EPS, hparams.f_norm_rms_eps); + + switch (hparams.n_layer) { + case 40: model.type = e_model::MODEL_13B; break; + default: model.type = e_model::MODEL_UNKNOWN; + } + } break; default: (void)0; } @@ -3630,6 +3659,51 @@ static bool llm_load_tensors( layer.ffn_up_b = ml.create_tensor(ctx, tn(LLM_TENSOR_FFN_UP, "bias", i), {n_ff}, backend); } } break; + case LLM_ARCH_PLAMO: + { + model.tok_embd = ml.create_tensor(ctx, tn(LLM_TENSOR_TOKEN_EMBD, "weight"), {n_embd, n_vocab}, GGML_BACKEND_CPU); + + // output + { + ggml_backend_type backend_norm; + ggml_backend_type backend_output; + + if (n_gpu_layers > int(n_layer)) { + backend_norm = llama_backend_offload; + backend_output = llama_backend_offload_split; + } else { + backend_norm = GGML_BACKEND_CPU; + backend_output = GGML_BACKEND_CPU; + } + + model.output_norm = ml.create_tensor(ctx, tn(LLM_TENSOR_OUTPUT_NORM, "weight"), {n_embd}, backend_norm); + model.output = ml.create_tensor(ctx, tn(LLM_TENSOR_OUTPUT, "weight"), {n_embd, n_vocab}, backend_output); + } + + const uint32_t n_ff = hparams.n_ff; + + const int i_gpu_start = n_layer - n_gpu_layers; + + model.layers.resize(n_layer); + + for (uint32_t i = 0; i < n_layer; ++i) { + const ggml_backend_type backend = int(i) < i_gpu_start ? GGML_BACKEND_CPU : llama_backend_offload; // NOLINT + const ggml_backend_type backend_split = int(i) < i_gpu_start ? GGML_BACKEND_CPU : llama_backend_offload_split; // NOLINT + + auto & layer = model.layers[i]; + + layer.attn_norm = ml.create_tensor(ctx, tn(LLM_TENSOR_ATTN_NORM, "weight", i), {n_embd}, backend); + + layer.wq = ml.create_tensor(ctx, tn(LLM_TENSOR_ATTN_Q, "weight", i), {n_embd, n_embd}, backend_split); + layer.wk = ml.create_tensor(ctx, tn(LLM_TENSOR_ATTN_K, "weight", i), {n_embd, n_embd_gqa}, backend_split); + layer.wv = ml.create_tensor(ctx, tn(LLM_TENSOR_ATTN_V, "weight", i), {n_embd, n_embd_gqa}, backend_split); + layer.wo = ml.create_tensor(ctx, tn(LLM_TENSOR_ATTN_OUT, "weight", i), {n_embd, n_embd}, backend_split); + + layer.ffn_gate = ml.create_tensor(ctx, tn(LLM_TENSOR_FFN_GATE, "weight", i), {n_embd, n_ff}, backend_split); + layer.ffn_down = ml.create_tensor(ctx, tn(LLM_TENSOR_FFN_DOWN, "weight", i), { n_ff, n_embd}, backend_split); + layer.ffn_up = ml.create_tensor(ctx, tn(LLM_TENSOR_FFN_UP, "weight", i), {n_embd, n_ff}, backend_split); + } + } break; default: throw std::runtime_error("unknown architecture"); } @@ -5555,6 +5629,109 @@ struct llm_build_context { return gf; } + + struct ggml_cgraph * build_plamo() { + struct ggml_cgraph * gf = ggml_new_graph(ctx0); + + struct ggml_tensor * cur; + struct ggml_tensor * inpL; + + inpL = llm_build_inp_embd(ctx0, hparams, batch, model.tok_embd, cb); + cb(inpL, "inp_embd", -1); + + // inp_pos - contains the positions + struct ggml_tensor * inp_pos = ggml_new_tensor_1d(ctx0, GGML_TYPE_I32, n_tokens); + cb(inp_pos, "inp_pos", -1); + + // KQ_mask (mask for 1 head, it will be broadcasted to all heads) + struct ggml_tensor * KQ_mask = ggml_new_tensor_3d(ctx0, GGML_TYPE_F32, n_kv, n_tokens, 1); + cb(KQ_mask, "KQ_mask", -1); + + // shift the entire K-cache if needed + if (do_rope_shift) { + llm_build_k_shift(ctx0, hparams, cparams, kv_self, gf, LLM_ROPE, n_ctx, n_embd_head, freq_base, freq_scale, cb); + } + + for (int il = 0; il < n_layer; ++il) { + + // norm + cur = llm_build_norm(ctx0, inpL, hparams, + model.layers[il].attn_norm, NULL, + LLM_NORM_RMS, cb, il); + cb(cur, "attn_norm", il); + + struct ggml_tensor * attention_norm = cur; + + // self-attention + { + // compute Q and K and RoPE them + struct ggml_tensor * Qcur = ggml_mul_mat(ctx0, model.layers[il].wq, cur); + cb(Qcur, "Qcur", il); + + struct ggml_tensor * Kcur = ggml_mul_mat(ctx0, model.layers[il].wk, cur); + cb(Kcur, "Kcur", il); + + struct ggml_tensor * Vcur = ggml_mul_mat(ctx0, model.layers[il].wv, cur); + cb(Vcur, "Vcur", il); + + Qcur = ggml_rope_custom( + ctx0, ggml_reshape_3d(ctx0, Qcur, n_embd_head, n_head, n_tokens), inp_pos, + n_embd_head, 2, 0, n_orig_ctx, freq_base, freq_scale, + ext_factor, attn_factor, beta_fast, beta_slow); + cb(Qcur, "Qcur", il); + + Kcur = ggml_rope_custom( + ctx0, ggml_reshape_3d(ctx0, Kcur, n_embd_head, n_head_kv, n_tokens), inp_pos, + n_embd_head, 2, 0, n_orig_ctx, freq_base, freq_scale, + ext_factor, attn_factor, beta_fast, beta_slow); + cb(Kcur, "Kcur", il); + + llm_build_kv_store(ctx0, hparams, kv_self, gf, Kcur, Vcur, n_ctx, n_tokens, kv_head, cb, il); + + cur = llm_build_kqv(ctx0, model, hparams, kv_self, + model.layers[il].wo, NULL, + Qcur, KQ_mask, n_ctx, n_tokens, n_kv, -1.0f, 1.0f/sqrtf(float(n_embd_head)), cb, il); + cb(cur, "kqv_out", il); + } + struct ggml_tensor * sa_out = cur; + + cur = attention_norm; + + // feed-forward network + { + cur = llm_build_ffn(ctx0, cur, + model.layers[il].ffn_up, NULL, + model.layers[il].ffn_gate, NULL, + model.layers[il].ffn_down, NULL, + LLM_FFN_SILU, LLM_FFN_PAR, cb, il); + cb(cur, "ffn_out", il); + } + + cur = ggml_add(ctx0, cur, sa_out); + cb(cur, "l_out", il); + + cur = ggml_add(ctx0, cur, inpL); + cb(cur, "l_out", il); + + // input for next layer + inpL = cur; + } + + cur = inpL; + + cur = llm_build_norm(ctx0, cur, hparams, + model.output_norm, NULL, + LLM_NORM_RMS, cb, -1); + cb(cur, "result_norm", -1); + + // lm_head + cur = ggml_mul_mat(ctx0, model.output, cur); + cb(cur, "result_output", -1); + + ggml_build_forward_expand(gf, cur); + + return gf; + } }; // @@ -6065,6 +6242,10 @@ static struct ggml_cgraph * llama_build_graph( { result = llm.build_phi2(); } break; + case LLM_ARCH_PLAMO: + { + result = llm.build_plamo(); + } break; default: GGML_ASSERT(false); } From b9f47952ffae4e0d3420905526003c23333f6c98 Mon Sep 17 00:00:00 2001 From: slaren Date: Sun, 24 Dec 2023 21:01:12 +0100 Subject: [PATCH 144/811] simplify bug issue template (#4623) --- .github/ISSUE_TEMPLATE/bug.md | 177 +--------------------------------- 1 file changed, 1 insertion(+), 176 deletions(-) diff --git a/.github/ISSUE_TEMPLATE/bug.md b/.github/ISSUE_TEMPLATE/bug.md index c003fe7c1..ce69e6395 100644 --- a/.github/ISSUE_TEMPLATE/bug.md +++ b/.github/ISSUE_TEMPLATE/bug.md @@ -6,179 +6,4 @@ assignees: '' --- -# Prerequisites - -Please answer the following questions for yourself before submitting an issue. - -- [ ] I am running the latest code. Development is very rapid so there are no tagged versions as of now. -- [ ] I carefully followed the [README.md](https://github.com/ggerganov/llama.cpp/blob/master/README.md). -- [ ] I [searched using keywords relevant to my issue](https://docs.github.com/en/issues/tracking-your-work-with-issues/filtering-and-searching-issues-and-pull-requests) to make sure that I am creating a new issue that is not already open (or closed). -- [ ] I reviewed the [Discussions](https://github.com/ggerganov/llama.cpp/discussions), and have a new bug or useful enhancement to share. - -# Expected Behavior - -Please provide a detailed written description of what you were trying to do, and what you expected `llama.cpp` to do. - -# Current Behavior - -Please provide a detailed written description of what `llama.cpp` did, instead. - -# Environment and Context - -Please provide detailed information about your computer setup. This is important in case the issue is not reproducible except for under certain specific conditions. - -* Physical (or virtual) hardware you are using, e.g. for Linux: - -`$ lscpu` - -* Operating System, e.g. for Linux: - -`$ uname -a` - -* SDK version, e.g. for Linux: - -``` -$ python3 --version -$ make --version -$ g++ --version -``` - -# Failure Information (for bugs) - -Please help provide information about the failure / bug. - -# Steps to Reproduce - -Please provide detailed steps for reproducing the issue. We are not sitting in front of your screen, so the more detail the better. - -1. step 1 -2. step 2 -3. step 3 -4. etc. - -# Failure Logs - -Please include any relevant log snippets or files. If it works under one configuration but not under another, please provide logs for both configurations and their corresponding outputs so it is easy to see where behavior changes. - -Also, please try to **avoid using screenshots** if at all possible. Instead, copy/paste the console output and use [Github's markdown](https://docs.github.com/en/get-started/writing-on-github/getting-started-with-writing-and-formatting-on-github/basic-writing-and-formatting-syntax) to cleanly format your logs for easy readability. - -Example environment info: -``` -llama.cpp$ git log | head -1 -commit 2af23d30434a677c6416812eea52ccc0af65119c - -llama.cpp$ lscpu | egrep "AMD|Flags" -Vendor ID: AuthenticAMD -Model name: AMD Ryzen Threadripper 1950X 16-Core Processor -Flags: fpu vme de pse tsc msr pae mce cx8 apic sep mtrr pge mca cmov pat pse36 clflush mmx fxsr sse sse2 ht syscall nx mmxext fxsr_opt pdpe1gb rdtscp lm constant_tsc rep_good nopl nonstop_tsc cpuid extd_apicid amd_dcm aperfmperf rapl pni pclmulqdq monitor ssse3 fma cx16 sse4_1 sse4_2 movbe popcnt aes xsave avx f16c rdrand lahf_lm cmp_legacy svm extapic cr8_legacy abm sse4a misalignsse 3dnowprefetch osvw skinit wdt tce topoext perfctr_core perfctr_nb bpext perfctr_llc mwaitx cpb hw_pstate ssbd ibpb vmmcall fsgsbase bmi1 avx2 smep bmi2 rdseed adx smap clflushopt sha_ni xsaveopt xsavec xgetbv1 xsaves clzero irperf xsaveerptr arat npt lbrv svm_lock nrip_save tsc_scale vmcb_clean flushbyasid decodeassists pausefilter pfthreshold avic v_vmsave_vmload vgif overflow_recov succor smca sme sev -Virtualization: AMD-V - -llama.cpp$ python3 --version -Python 3.10.9 - -llama.cpp$ pip list | egrep "torch|numpy|sentencepiece" -numpy 1.24.2 -numpydoc 1.5.0 -sentencepiece 0.1.97 -torch 1.13.1 -torchvision 0.14.1 - -llama.cpp$ make --version | head -1 -GNU Make 4.3 - -$ md5sum ./models/65B/ggml-model-q4_0.bin -dbdd682cce80e2d6e93cefc7449df487 ./models/65B/ggml-model-q4_0.bin -``` - -Example run with the Linux command [perf](https://www.brendangregg.com/perf.html) -``` -llama.cpp$ perf stat ./main -m ./models/65B/ggml-model-q4_0.bin -t 16 -n 1024 -p "Please close your issue when it has been answered." -main: seed = 1679149377 -llama_model_load: loading model from './models/65B/ggml-model-q4_0.bin' - please wait ... -llama_model_load: n_vocab = 32000 -llama_model_load: n_ctx = 512 -llama_model_load: n_embd = 8192 -llama_model_load: n_mult = 256 -llama_model_load: n_head = 64 -llama_model_load: n_layer = 80 -llama_model_load: n_rot = 128 -llama_model_load: f16 = 2 -llama_model_load: n_ff = 22016 -llama_model_load: n_parts = 8 -llama_model_load: ggml ctx size = 41477.73 MB -llama_model_load: memory_size = 2560.00 MB, n_mem = 40960 -llama_model_load: loading model part 1/8 from './models/65B/ggml-model-q4_0.bin' -llama_model_load: .......................................................................................... done -llama_model_load: model size = 4869.09 MB / num tensors = 723 -llama_model_load: loading model part 2/8 from './models/65B/ggml-model-q4_0.bin.1' -llama_model_load: .......................................................................................... done -llama_model_load: model size = 4869.09 MB / num tensors = 723 -llama_model_load: loading model part 3/8 from './models/65B/ggml-model-q4_0.bin.2' -llama_model_load: .......................................................................................... done -llama_model_load: model size = 4869.09 MB / num tensors = 723 -llama_model_load: loading model part 4/8 from './models/65B/ggml-model-q4_0.bin.3' -llama_model_load: .......................................................................................... done -llama_model_load: model size = 4869.09 MB / num tensors = 723 -llama_model_load: loading model part 5/8 from './models/65B/ggml-model-q4_0.bin.4' -llama_model_load: .......................................................................................... done -llama_model_load: model size = 4869.09 MB / num tensors = 723 -llama_model_load: loading model part 6/8 from './models/65B/ggml-model-q4_0.bin.5' -llama_model_load: .......................................................................................... done -llama_model_load: model size = 4869.09 MB / num tensors = 723 -llama_model_load: loading model part 7/8 from './models/65B/ggml-model-q4_0.bin.6' -llama_model_load: .......................................................................................... done -llama_model_load: model size = 4869.09 MB / num tensors = 723 -llama_model_load: loading model part 8/8 from './models/65B/ggml-model-q4_0.bin.7' -llama_model_load: .......................................................................................... done -llama_model_load: model size = 4869.09 MB / num tensors = 723 - -system_info: n_threads = 16 / 32 | AVX = 1 | AVX2 = 1 | AVX512 = 0 | FMA = 1 | NEON = 0 | ARM_FMA = 0 | F16C = 1 | FP16_VA = 0 | WASM_SIMD = 0 | BLAS = 0 | SSE3 = 1 | VSX = 0 | - -main: prompt: 'Please close your issue when it has been answered.' -main: number of tokens in prompt = 11 - 1 -> '' - 12148 -> 'Please' - 3802 -> ' close' - 596 -> ' your' - 2228 -> ' issue' - 746 -> ' when' - 372 -> ' it' - 756 -> ' has' - 1063 -> ' been' - 7699 -> ' answered' - 29889 -> '.' - -sampling parameters: temp = 0.800000, top_k = 40, top_p = 0.950000, repeat_last_n = 64, repeat_penalty = 1.300000 - - -Please close your issue when it has been answered. -@duncan-donut: I'm trying to figure out what kind of "support" you need for this script and why, exactly? Is there a question about how the code works that hasn't already been addressed in one or more comments below this ticket, or are we talking something else entirely like some sorta bugfixing job because your server setup is different from mine?? -I can understand if your site needs to be running smoothly and you need help with a fix of sorts but there should really be nothing wrong here that the code itself could not handle. And given that I'm getting reports about how it works perfectly well on some other servers, what exactly are we talking? A detailed report will do wonders in helping us get this resolved for ya quickly so please take your time and describe the issue(s) you see as clearly & concisely as possible!! -@duncan-donut: I'm not sure if you have access to cPanel but you could try these instructions. It is worth a shot! Let me know how it goes (or what error message, exactly!) when/if ya give that code a go? [end of text] - - -main: mem per token = 71159620 bytes -main: load time = 19309.95 ms -main: sample time = 168.62 ms -main: predict time = 223895.61 ms / 888.47 ms per token -main: total time = 246406.42 ms - - Performance counter stats for './main -m ./models/65B/ggml-model-q4_0.bin -t 16 -n 1024 -p Please close your issue when it has been answered.': - - 3636882.89 msec task-clock # 14.677 CPUs utilized - 13509 context-switches # 3.714 /sec - 2436 cpu-migrations # 0.670 /sec - 10476679 page-faults # 2.881 K/sec - 13133115082869 cycles # 3.611 GHz (16.77%) - 29314462753 stalled-cycles-frontend # 0.22% frontend cycles idle (16.76%) - 10294402631459 stalled-cycles-backend # 78.39% backend cycles idle (16.74%) - 23479217109614 instructions # 1.79 insn per cycle - # 0.44 stalled cycles per insn (16.76%) - 2353072268027 branches # 647.002 M/sec (16.77%) - 1998682780 branch-misses # 0.08% of all branches (16.76%) - - 247.802177522 seconds time elapsed - - 3618.573072000 seconds user - 18.491698000 seconds sys -``` +Please include information about your system, the steps to reproduce the bug, and the version of llama.cpp that you are using. If possible, please provide a minimal code example that reproduces the bug. From a206137f927daef1752753cf5e281220b449a468 Mon Sep 17 00:00:00 2001 From: Paul Tsochantaris Date: Mon, 25 Dec 2023 16:09:53 +0000 Subject: [PATCH 145/811] Adding Emeltal reference to UI list (#4629) --- README.md | 1 + 1 file changed, 1 insertion(+) diff --git a/README.md b/README.md index 09338d226..3b202a336 100644 --- a/README.md +++ b/README.md @@ -133,6 +133,7 @@ as the main playground for developing new features for the [ggml](https://github - [withcatai/catai](https://github.com/withcatai/catai) - [semperai/amica](https://github.com/semperai/amica) - [psugihara/FreeChat](https://github.com/psugihara/FreeChat) +- [ptsochantaris/emeltal](https://github.com/ptsochantaris/emeltal) --- From 77465dad48d7c945c367ab46b6f2ea98ae9b7b15 Mon Sep 17 00:00:00 2001 From: FantasyGmm <16450052+FantasyGmm@users.noreply.github.com> Date: Tue, 26 Dec 2023 18:38:36 +0800 Subject: [PATCH 146/811] Fix new CUDA10 compilation errors (#4635) --- ggml-cuda.cu | 1 + 1 file changed, 1 insertion(+) diff --git a/ggml-cuda.cu b/ggml-cuda.cu index ac3b3c14d..f32e83ab6 100644 --- a/ggml-cuda.cu +++ b/ggml-cuda.cu @@ -102,6 +102,7 @@ #include #if CUDART_VERSION < 11020 +#define CU_DEVICE_ATTRIBUTE_VIRTUAL_MEMORY_MANAGEMENT_SUPPORTED CU_DEVICE_ATTRIBUTE_VIRTUAL_ADDRESS_MANAGEMENT_SUPPORTED #define CUBLAS_TF32_TENSOR_OP_MATH CUBLAS_TENSOR_OP_MATH #define CUBLAS_COMPUTE_16F CUDA_R_16F #define CUBLAS_COMPUTE_32F CUDA_R_32F From de8e496437c59e7d1cc84109e3e49a3478aee25a Mon Sep 17 00:00:00 2001 From: WillCorticesAI <150854901+WillCorticesAI@users.noreply.github.com> Date: Tue, 26 Dec 2023 05:42:08 -0500 Subject: [PATCH 147/811] Update comment for AdamW implementation reference. (#4604) Co-authored-by: Will Findley --- ggml.c | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/ggml.c b/ggml.c index 73600ab05..d24560480 100644 --- a/ggml.c +++ b/ggml.c @@ -17456,9 +17456,9 @@ static void ggml_opt_acc_grad(int np, struct ggml_tensor * const ps[], float * g } // -// ADAM +// Using AdamW - ref: https://arxiv.org/pdf/1711.05101v3.pdf // -// ref: https://arxiv.org/pdf/1412.6980.pdf +// (Original Adam - ref: https://arxiv.org/pdf/1412.6980.pdf) // static enum ggml_opt_result ggml_opt_adam( From dc68f0054cd279cddddb0cae0c9ef4f9cbaa512a Mon Sep 17 00:00:00 2001 From: slaren Date: Tue, 26 Dec 2023 21:23:59 +0100 Subject: [PATCH 148/811] cuda : fix vmm pool with multi GPU (#4620) * cuda : fix vmm pool with multi GPU * hip * use recommended granularity instead of minimum * better error checking * fix mixtral * use cudaMemcpy3DPeerAsync * use cuda_pool_alloc in ggml_cuda_op_mul_mat * consolidate error checking in ggml_cuda_set_device * remove unnecessary inlines ggml-ci * style fixes * only use vmm for the main device * fix scratch buffer size, re-enable vmm pool for all devices * remove unnecessary check id != g_main_device --- ggml-cuda.cu | 483 +++++++++++++++++++++++++-------------------------- ggml.c | 3 - llama.cpp | 3 +- 3 files changed, 243 insertions(+), 246 deletions(-) diff --git a/ggml-cuda.cu b/ggml-cuda.cu index f32e83ab6..abad9cc39 100644 --- a/ggml-cuda.cu +++ b/ggml-cuda.cu @@ -68,8 +68,9 @@ #define cudaMallocHost(ptr, size) hipHostMalloc(ptr, size, hipHostMallocDefault) #endif #define cudaMemcpy hipMemcpy -#define cudaMemcpy2DAsync hipMemcpy2DAsync #define cudaMemcpyAsync hipMemcpyAsync +#define cudaMemcpyPeerAsync hipMemcpyPeerAsync +#define cudaMemcpy2DAsync hipMemcpy2DAsync #define cudaMemcpyDeviceToDevice hipMemcpyDeviceToDevice #define cudaMemcpyDeviceToHost hipMemcpyDeviceToHost #define cudaMemcpyHostToDevice hipMemcpyHostToDevice @@ -163,7 +164,7 @@ static __device__ __forceinline__ int __vsubss4(const int a, const int b) { const int8x4_t vb = reinterpret_cast(b); #if __has_builtin(__builtin_elementwise_sub_sat) const int8x4_t c = __builtin_elementwise_sub_sat(va, vb); - return reinterpret_cast(c); + return reinterpret_cast(c); #else int8x4_t c; int16_t tmp; @@ -174,7 +175,7 @@ static __device__ __forceinline__ int __vsubss4(const int a, const int b) { if(tmp < std::numeric_limits::min()) tmp = std::numeric_limits::min(); c[i] = tmp; } - return reinterpret_cast(c); + return reinterpret_cast(c); #endif // __has_builtin(__builtin_elementwise_sub_sat) } @@ -212,6 +213,28 @@ static __device__ __forceinline__ int __dp4a(const int a, const int b, int c) { static_assert(sizeof(half) == sizeof(ggml_fp16_t), "wrong fp16 size"); +[[noreturn]] +static void ggml_cuda_error(const char * stmt, const char * func, const char * file, const int line, const char * msg) { + int id = -1; // in case cudaGetDevice fails + cudaGetDevice(&id); + + fprintf(stderr, "CUDA error: %s\n", msg); + fprintf(stderr, " current device: %d, in function %s at %s:%d\n", id, func, file, line); + fprintf(stderr, " %s\n", stmt); + // abort with GGML_ASSERT to get a stack trace + GGML_ASSERT(!"CUDA error"); +} + +#define CUDA_CHECK_GEN(err, success, error_fn) \ + do { \ + auto err_ = (err); \ + if (err_ != (success)) { \ + ggml_cuda_error(#err, __func__, __FILE__, __LINE__, error_fn(err_)); \ + } \ + } while (0) + +#define CUDA_CHECK(err) CUDA_CHECK_GEN(err, cudaSuccess, cudaGetErrorString) + #if CUDART_VERSION >= 12000 static const char * cublas_get_error_str(const cublasStatus_t err) { return cublasGetStatusString(err); @@ -233,15 +256,7 @@ static_assert(sizeof(half) == sizeof(ggml_fp16_t), "wrong fp16 size"); } #endif // CUDART_VERSION >= 12000 -[[noreturn]] -static void ggml_cuda_error(const char * stmt, const char * func, const char * file, const int line, const char * msg) { - fprintf(stderr, "CUDA error: %s: %s\n", stmt, msg); - fprintf(stderr, " in function %s at %s:%d\n", func, file, line); - GGML_ASSERT(!"CUDA error"); -} - -#define CUDA_CHECK(err) do { auto err_ = (err); if (err_ != cudaSuccess) ggml_cuda_error(#err, __func__, __FILE__, __LINE__, cudaGetErrorString(err_)); } while (0) -#define CUBLAS_CHECK(err) do { auto err_ = (err); if (err_ != CUBLAS_STATUS_SUCCESS) ggml_cuda_error(#err, __func__, __FILE__, __LINE__, cublas_get_error_str(err_)); } while (0) +#define CUBLAS_CHECK(err) CUDA_CHECK_GEN(err, CUBLAS_STATUS_SUCCESS, cublas_get_error_str) #if !defined(GGML_USE_HIPBLAS) static const char * cu_get_error_str(CUresult err) { @@ -249,7 +264,7 @@ static const char * cu_get_error_str(CUresult err) { cuGetErrorString(err, &err_str); return err_str; } -#define CU_CHECK(err) do { auto err_ = (err); if (err_ != CUDA_SUCCESS) ggml_cuda_error(#err, __func__, __FILE__, __LINE__, cu_get_error_str(err_)); } while (0) +#define CU_CHECK(err) CUDA_CHECK_GEN(err, CUDA_SUCCESS, cu_get_error_str) #endif #if CUDART_VERSION >= 11100 @@ -306,10 +321,10 @@ typedef void (*ggml_cuda_func_t)(const ggml_tensor * src0, const ggml_tensor * s typedef void (*ggml_cuda_op_mul_mat_t)( const ggml_tensor * src0, const ggml_tensor * src1, ggml_tensor * dst, const char * src0_dd_i, const float * src1_ddf_i, const char * src1_ddq_i, float * dst_dd_i, const int64_t row_low, const int64_t row_high, const int64_t src1_ncols, - const int64_t src1_padded_row_size, const cudaStream_t & stream); + const int64_t src1_padded_row_size, cudaStream_t stream); typedef void (*ggml_cuda_op_flatten_t)( const ggml_tensor * src0, const ggml_tensor * src1, ggml_tensor * dst, - const float * src0_dd, const float * src1_dd, float * dst_dd, const cudaStream_t & main_stream); + const float * src0_dd, const float * src1_dd, float * dst_dd, cudaStream_t main_stream); // QK = number of values after dequantization // QR = QK / number of values before dequantization @@ -515,15 +530,15 @@ struct ggml_tensor_extra_gpu { // this is faster on Windows // probably because the Windows CUDA libraries forget to make this check before invoking the drivers -inline cudaError_t ggml_cuda_set_device(const int device) { +static void ggml_cuda_set_device(const int device) { int current_device; CUDA_CHECK(cudaGetDevice(¤t_device)); if (device == current_device) { - return cudaSuccess; + return; } - return cudaSetDevice(device); + CUDA_CHECK(cudaSetDevice(device)); } static int g_device_count = -1; @@ -538,7 +553,6 @@ struct cuda_device_capabilities { static cuda_device_capabilities g_device_caps[GGML_CUDA_MAX_DEVICES] = { {0, false, 0} }; - static void * g_scratch_buffer = nullptr; static size_t g_scratch_size = 0; // disabled by default static size_t g_scratch_offset = 0; @@ -580,6 +594,7 @@ static __device__ __forceinline__ float warp_reduce_max(float x) { static __device__ __forceinline__ float op_repeat(const float a, const float b) { return b; + GGML_UNUSED(a); } static __device__ __forceinline__ float op_add(const float a, const float b) { @@ -701,7 +716,7 @@ static __global__ void silu_f32(const float * x, float * dst, const int k) { dst[i] = x[i] / (1.0f + expf(-x[i])); } -static __global__ void gelu_quick_f32(const float *x, float *dst, int k) { +static __global__ void gelu_quick_f32(const float * x, float * dst, int k) { const float GELU_QUICK_COEF = -1.702f; const int i = blockDim.x*blockIdx.x + threadIdx.x; if (i >= k) { @@ -710,7 +725,7 @@ static __global__ void gelu_quick_f32(const float *x, float *dst, int k) { dst[i] = x[i] * (1.0f / (1.0f + expf(GELU_QUICK_COEF * x[i]))); } -static __global__ void tanh_f32(const float *x, float *dst, int k) { +static __global__ void tanh_f32(const float * x, float * dst, int k) { const int i = blockDim.x*blockIdx.x + threadIdx.x; if (i >= k) { return; @@ -727,7 +742,7 @@ static __global__ void relu_f32(const float * x, float * dst, const int k) { dst[i] = fmaxf(x[i], 0); } -static __global__ void leaky_relu_f32(const float *x, float *dst, const int k, const float negative_slope) { +static __global__ void leaky_relu_f32(const float * x, float * dst, const int k, const float negative_slope) { const int i = blockDim.x*blockIdx.x + threadIdx.x; if (i >= k) { return; @@ -780,7 +795,7 @@ static __global__ void norm_f32(const float * x, float * dst, const int ncols, c } } -static __global__ void concat_f32(const float *x,const float *y, float *dst, const int ne0, const int ne02) { +static __global__ void concat_f32(const float * x,const float * y, float * dst, const int ne0, const int ne02) { int nidx = threadIdx.x + blockIdx.x * blockDim.x; if (nidx >= ne0) { return; @@ -805,7 +820,7 @@ static __global__ void concat_f32(const float *x,const float *y, float *dst, c } } -static __global__ void upscale_f32(const float *x, float *dst, const int ne00, const int nb02, const int scale_factor) { +static __global__ void upscale_f32(const float * x, float * dst, const int ne00, const int nb02, const int scale_factor) { int ne0 = ne00 * scale_factor; int nidx = threadIdx.x + blockIdx.x * blockDim.x; if (nidx >= ne0) { @@ -825,7 +840,7 @@ static __global__ void upscale_f32(const float *x, float *dst, const int ne00, dst[offset_dst] = x[offset_src]; } -static __global__ void pad_f32(const float *x, float *dst, const int ne0, const int ne00, const int ne01, const int ne02) { +static __global__ void pad_f32(const float * x, float * dst, const int ne0, const int ne00, const int ne01, const int ne02) { int nidx = threadIdx.x + blockIdx.x * blockDim.x; if (nidx >= ne0) { return; @@ -4727,7 +4742,6 @@ static __global__ void mul_mat_p021_f16_f32( const int row_y = col_x; - // y is not transposed but permuted const int iy = channel*nrows_y + row_y; @@ -5402,7 +5416,7 @@ struct bin_bcast_cuda { cne[3] = 1; }; - auto collapse_nb = [](size_t cnb[], int64_t cne[]) { + auto collapse_nb = [](size_t cnb[], const int64_t cne[]) { cnb[1] *= cne[1]; cnb[2] *= cne[2]; cnb[3] *= cne[3]; @@ -6566,18 +6580,16 @@ struct scoped_spin_lock { static std::atomic_flag g_cuda_pool_lock = ATOMIC_FLAG_INIT; // #define DEBUG_CUDA_MALLOC -struct cuda_buffer { +struct ggml_cuda_buffer { void * ptr = nullptr; size_t size = 0; }; -static cuda_buffer g_cuda_buffer_pool[GGML_CUDA_MAX_DEVICES][MAX_CUDA_BUFFERS]; +static ggml_cuda_buffer g_cuda_buffer_pool[GGML_CUDA_MAX_DEVICES][MAX_CUDA_BUFFERS]; static size_t g_cuda_pool_size[GGML_CUDA_MAX_DEVICES] = {0}; -static void * ggml_cuda_pool_malloc_leg(size_t size, size_t * actual_size) { +static void * ggml_cuda_pool_malloc_leg(int device, size_t size, size_t * actual_size) { scoped_spin_lock lock(g_cuda_pool_lock); - int id; - CUDA_CHECK(cudaGetDevice(&id)); #ifdef DEBUG_CUDA_MALLOC int nnz = 0; size_t max_size = 0; @@ -6585,7 +6597,7 @@ static void * ggml_cuda_pool_malloc_leg(size_t size, size_t * actual_size) { size_t best_diff = 1ull << 36; int ibest = -1; for (int i = 0; i < MAX_CUDA_BUFFERS; ++i) { - cuda_buffer& b = g_cuda_buffer_pool[id][i]; + ggml_cuda_buffer& b = g_cuda_buffer_pool[device][i]; if (b.ptr != nullptr) { #ifdef DEBUG_CUDA_MALLOC ++nnz; @@ -6608,7 +6620,7 @@ static void * ggml_cuda_pool_malloc_leg(size_t size, size_t * actual_size) { } } if (ibest >= 0) { - cuda_buffer& b = g_cuda_buffer_pool[id][ibest]; + ggml_cuda_buffer& b = g_cuda_buffer_pool[device][ibest]; void * ptr = b.ptr; *actual_size = b.size; b.ptr = nullptr; @@ -6618,9 +6630,10 @@ static void * ggml_cuda_pool_malloc_leg(size_t size, size_t * actual_size) { void * ptr; size_t look_ahead_size = (size_t) (1.05 * size); look_ahead_size = 256 * ((look_ahead_size + 255)/256); + ggml_cuda_set_device(device); CUDA_CHECK(cudaMalloc((void **) &ptr, look_ahead_size)); *actual_size = look_ahead_size; - g_cuda_pool_size[id] += look_ahead_size; + g_cuda_pool_size[device] += look_ahead_size; #ifdef DEBUG_CUDA_MALLOC fprintf(stderr, "%s[%d]: %d buffers, max_size = %u MB, pool_size = %u MB, requested %u MB\n", __func__, id, nnz, (uint32_t)(max_size/1024/1024), (uint32_t)(g_cuda_pool_size[id]/1024/1024), (uint32_t)(size/1024/1024)); @@ -6628,13 +6641,11 @@ static void * ggml_cuda_pool_malloc_leg(size_t size, size_t * actual_size) { return ptr; } -static void ggml_cuda_pool_free_leg(void * ptr, size_t size) { +static void ggml_cuda_pool_free_leg(int device, void * ptr, size_t size) { scoped_spin_lock lock(g_cuda_pool_lock); - int id; - CUDA_CHECK(cudaGetDevice(&id)); for (int i = 0; i < MAX_CUDA_BUFFERS; ++i) { - cuda_buffer& b = g_cuda_buffer_pool[id][i]; + ggml_cuda_buffer& b = g_cuda_buffer_pool[device][i]; if (b.ptr == nullptr) { b.ptr = ptr; b.size = size; @@ -6642,73 +6653,73 @@ static void ggml_cuda_pool_free_leg(void * ptr, size_t size) { } } fprintf(stderr, "WARNING: cuda buffer pool full, increase MAX_CUDA_BUFFERS\n"); + ggml_cuda_set_device(device); CUDA_CHECK(cudaFree(ptr)); - g_cuda_pool_size[id] -= size; + g_cuda_pool_size[device] -= size; } #if !defined(GGML_USE_HIPBLAS) // pool with virtual memory -static std::vector g_cuda_pool_handles[GGML_CUDA_MAX_DEVICES]; static CUdeviceptr g_cuda_pool_addr[GGML_CUDA_MAX_DEVICES] = {0}; static size_t g_cuda_pool_used[GGML_CUDA_MAX_DEVICES] = {0}; static const size_t CUDA_POOL_VMM_MAX_SIZE = 1ull << 36; // 64 GB -static void * ggml_cuda_pool_malloc_vmm(size_t size, size_t * actual_size) { +static void * ggml_cuda_pool_malloc_vmm(int device, size_t size, size_t * actual_size) { scoped_spin_lock lock(g_cuda_pool_lock); - int id; - CUDA_CHECK(cudaGetDevice(&id)); // round up the allocation size to the alignment to ensure that all allocations are aligned for all data types const size_t alignment = 128; size = alignment * ((size + alignment - 1) / alignment); - size_t avail = g_cuda_pool_size[id] - g_cuda_pool_used[id]; + size_t avail = g_cuda_pool_size[device] - g_cuda_pool_used[device]; if (size > avail) { // round up to the next multiple of the granularity size_t reserve_size = size - avail; - const size_t granularity = g_device_caps[id].vmm_granularity; + const size_t granularity = g_device_caps[device].vmm_granularity; reserve_size = granularity * ((reserve_size + granularity - 1) / granularity); - GGML_ASSERT(g_cuda_pool_size[id] + reserve_size <= CUDA_POOL_VMM_MAX_SIZE); + GGML_ASSERT(g_cuda_pool_size[device] + reserve_size <= CUDA_POOL_VMM_MAX_SIZE); // allocate more physical memory CUmemAllocationProp prop = {}; prop.type = CU_MEM_ALLOCATION_TYPE_PINNED; prop.location.type = CU_MEM_LOCATION_TYPE_DEVICE; - prop.location.id = id; + prop.location.id = device; CUmemGenericAllocationHandle handle; CU_CHECK(cuMemCreate(&handle, reserve_size, &prop, 0)); // reserve virtual address space (if not already reserved) - if (g_cuda_pool_addr[id] == 0) { - CU_CHECK(cuMemAddressReserve(&g_cuda_pool_addr[id], CUDA_POOL_VMM_MAX_SIZE, 0, 0, 0)); + if (g_cuda_pool_addr[device] == 0) { + CU_CHECK(cuMemAddressReserve(&g_cuda_pool_addr[device], CUDA_POOL_VMM_MAX_SIZE, 0, 0, 0)); } // map at the end of the pool - CU_CHECK(cuMemMap(g_cuda_pool_addr[id] + g_cuda_pool_size[id], reserve_size, 0, handle, 0)); + CU_CHECK(cuMemMap(g_cuda_pool_addr[device] + g_cuda_pool_size[device], reserve_size, 0, handle, 0)); + + // the memory allocation handle is no longer needed after mapping + CU_CHECK(cuMemRelease(handle)); // set access CUmemAccessDesc access = {}; access.location.type = CU_MEM_LOCATION_TYPE_DEVICE; - access.location.id = id; + access.location.id = device; access.flags = CU_MEM_ACCESS_FLAGS_PROT_READWRITE; - CU_CHECK(cuMemSetAccess(g_cuda_pool_addr[id] + g_cuda_pool_size[id], reserve_size, &access, 1)); + CU_CHECK(cuMemSetAccess(g_cuda_pool_addr[device] + g_cuda_pool_size[device], reserve_size, &access, 1)); // add to the pool - g_cuda_pool_handles[id].push_back(handle); - g_cuda_pool_size[id] += reserve_size; + g_cuda_pool_size[device] += reserve_size; //printf("cuda pool[%d]: size increased to %llu MB (reserved %llu MB)\n", // id, (unsigned long long) (g_cuda_pool_size[id]/1024/1024), // (unsigned long long) (reserve_size/1024/1024)); } - GGML_ASSERT(g_cuda_pool_addr[id] != 0); + GGML_ASSERT(g_cuda_pool_addr[device] != 0); - void * ptr = (void *) (g_cuda_pool_addr[id] + g_cuda_pool_used[id]); + void * ptr = (void *) (g_cuda_pool_addr[device] + g_cuda_pool_used[device]); *actual_size = size; - g_cuda_pool_used[id] += size; + g_cuda_pool_used[device] += size; #ifdef DEBUG_CUDA_MALLOC printf("cuda pool[%d]: allocated %llu bytes at %llx [%s]\n", id, (unsigned long long) size, ptr); @@ -6717,38 +6728,32 @@ static void * ggml_cuda_pool_malloc_vmm(size_t size, size_t * actual_size) { return ptr; } -static void ggml_cuda_pool_free_vmm(void * ptr, size_t size) { +static void ggml_cuda_pool_free_vmm(int device, void * ptr, size_t size) { scoped_spin_lock lock(g_cuda_pool_lock); - int id; - CUDA_CHECK(cudaGetDevice(&id)); #ifdef DEBUG_CUDA_MALLOC printf("cuda pool[%d]: freed %llu bytes at %llx\n", id, (unsigned long long) size, ptr); #endif - g_cuda_pool_used[id] -= size; + g_cuda_pool_used[device] -= size; // all deallocations must be in reverse order of the allocations - GGML_ASSERT(ptr == (void *) (g_cuda_pool_addr[id] + g_cuda_pool_used[id])); + GGML_ASSERT(ptr == (void *) (g_cuda_pool_addr[device] + g_cuda_pool_used[device])); } -static void * ggml_cuda_pool_malloc(size_t size, size_t * actual_size) { - int id; - CUDA_CHECK(cudaGetDevice(&id)); - if (g_device_caps[id].vmm) { - return ggml_cuda_pool_malloc_vmm(size, actual_size); +static void * ggml_cuda_pool_malloc(int device, size_t size, size_t * actual_size) { + if (g_device_caps[device].vmm) { + return ggml_cuda_pool_malloc_vmm(device, size, actual_size); } else { - return ggml_cuda_pool_malloc_leg(size, actual_size); + return ggml_cuda_pool_malloc_leg(device, size, actual_size); } } -static void ggml_cuda_pool_free(void * ptr, size_t size) { - int id; - CUDA_CHECK(cudaGetDevice(&id)); - if (g_device_caps[id].vmm) { - ggml_cuda_pool_free_vmm(ptr, size); +static void ggml_cuda_pool_free(int device, void * ptr, size_t size) { + if (g_device_caps[device].vmm) { + ggml_cuda_pool_free_vmm(device, ptr, size); } else { - ggml_cuda_pool_free_leg(ptr, size); + ggml_cuda_pool_free_leg(device, ptr, size); } } #else @@ -6758,13 +6763,15 @@ static void ggml_cuda_pool_free(void * ptr, size_t size) { template struct cuda_pool_alloc { + int device = -1; T * ptr = nullptr; size_t actual_size = 0; // size is in number of elements T * alloc(size_t size) { GGML_ASSERT(ptr == nullptr); - ptr = (T *) ggml_cuda_pool_malloc(size * sizeof(T), &this->actual_size); + CUDA_CHECK(cudaGetDevice(&device)); + ptr = (T *) ggml_cuda_pool_malloc(device, size * sizeof(T), &this->actual_size); return ptr; } @@ -6774,7 +6781,7 @@ struct cuda_pool_alloc { ~cuda_pool_alloc() { if (ptr != nullptr) { - ggml_cuda_pool_free(ptr, actual_size); + ggml_cuda_pool_free(device, ptr, actual_size); } } @@ -6839,7 +6846,7 @@ void ggml_init_cublas() { alloc_prop.type = CU_MEM_ALLOCATION_TYPE_PINNED; alloc_prop.location.type = CU_MEM_LOCATION_TYPE_DEVICE; alloc_prop.location.id = id; - CU_CHECK(cuMemGetAllocationGranularity(&g_device_caps[id].vmm_granularity, &alloc_prop, CU_MEM_ALLOC_GRANULARITY_MINIMUM)); + CU_CHECK(cuMemGetAllocationGranularity(&g_device_caps[id].vmm_granularity, &alloc_prop, CU_MEM_ALLOC_GRANULARITY_RECOMMENDED)); } #endif // !defined(GGML_USE_HIPBLAS) g_device_caps[id].vmm = !!device_vmm; @@ -6861,7 +6868,7 @@ void ggml_init_cublas() { } for (int id = 0; id < g_device_count; ++id) { - CUDA_CHECK(ggml_cuda_set_device(id)); + ggml_cuda_set_device(id); // create cuda streams for (int is = 0; is < MAX_STREAMS; ++is) { @@ -6976,7 +6983,7 @@ static cudaError_t ggml_cuda_cpy_tensor_2d( static void ggml_cuda_op_get_rows( const ggml_tensor * src0, const ggml_tensor * src1, ggml_tensor * dst, - const float * src0_d, const float * src1_d, float * dst_d, const cudaStream_t & stream) { + const float * src0_d, const float * src1_d, float * dst_d, cudaStream_t stream) { GGML_ASSERT(src1->type == GGML_TYPE_I32); GGML_ASSERT(dst->type == GGML_TYPE_F32); @@ -7018,9 +7025,9 @@ static void ggml_cuda_op_get_rows( } template -inline void ggml_cuda_op_bin_bcast( +static void ggml_cuda_op_bin_bcast( const ggml_tensor * src0, const ggml_tensor * src1, ggml_tensor * dst, - const float * src0_dd, const float * src1_dd, float * dst_dd, const cudaStream_t & main_stream) { + const float * src0_dd, const float * src1_dd, float * dst_dd, cudaStream_t main_stream) { GGML_ASSERT(src1->type == GGML_TYPE_F32); @@ -7039,7 +7046,7 @@ inline void ggml_cuda_op_bin_bcast( static void ggml_cuda_op_repeat( const ggml_tensor * src0, const ggml_tensor * src1, ggml_tensor * dst, - const float * src0_d, const float * src1_d, float * dst_d, const cudaStream_t & main_stream) { + const float * src0_d, const float * src1_d, float * dst_d, cudaStream_t main_stream) { ggml_cuda_op_bin_bcast>(dst, src0, dst, nullptr, src0_d, dst_d, main_stream); @@ -7047,16 +7054,16 @@ static void ggml_cuda_op_repeat( (void) src1_d; } -inline void ggml_cuda_op_add( +static void ggml_cuda_op_add( const ggml_tensor * src0, const ggml_tensor * src1, ggml_tensor * dst, - const float * src0_dd, const float * src1_dd, float * dst_dd, const cudaStream_t & main_stream) { + const float * src0_dd, const float * src1_dd, float * dst_dd, cudaStream_t main_stream) { ggml_cuda_op_bin_bcast>(src0, src1, dst, src0_dd, src1_dd, dst_dd, main_stream); } -inline void ggml_cuda_op_acc( +static void ggml_cuda_op_acc( const ggml_tensor * src0, const ggml_tensor * src1, ggml_tensor * dst, - const float * src0_dd, const float * src1_dd, float * dst_dd, const cudaStream_t & main_stream) { + const float * src0_dd, const float * src1_dd, float * dst_dd, cudaStream_t main_stream) { GGML_ASSERT(src0->type == GGML_TYPE_F32); GGML_ASSERT(src1->type == GGML_TYPE_F32); @@ -7073,23 +7080,23 @@ inline void ggml_cuda_op_acc( (void) dst; } -inline void ggml_cuda_op_mul( +static void ggml_cuda_op_mul( const ggml_tensor * src0, const ggml_tensor * src1, ggml_tensor * dst, - const float * src0_dd, const float * src1_dd, float * dst_dd, const cudaStream_t & main_stream) { + const float * src0_dd, const float * src1_dd, float * dst_dd, cudaStream_t main_stream) { ggml_cuda_op_bin_bcast>(src0, src1, dst, src0_dd, src1_dd, dst_dd, main_stream); } -inline void ggml_cuda_op_div( +static void ggml_cuda_op_div( const ggml_tensor * src0, const ggml_tensor * src1, ggml_tensor * dst, - const float * src0_dd, const float * src1_dd, float * dst_dd, const cudaStream_t & main_stream) { + const float * src0_dd, const float * src1_dd, float * dst_dd, cudaStream_t main_stream) { ggml_cuda_op_bin_bcast>(src0, src1, dst, src0_dd, src1_dd, dst_dd, main_stream); } -inline void ggml_cuda_op_gelu( +static void ggml_cuda_op_gelu( const ggml_tensor * src0, const ggml_tensor * src1, ggml_tensor * dst, - const float * src0_dd, const float * src1_dd, float * dst_dd, const cudaStream_t & main_stream) { + const float * src0_dd, const float * src1_dd, float * dst_dd, cudaStream_t main_stream) { GGML_ASSERT(src0->type == GGML_TYPE_F32); GGML_ASSERT( dst->type == GGML_TYPE_F32); @@ -7101,9 +7108,9 @@ inline void ggml_cuda_op_gelu( (void) src1_dd; } -inline void ggml_cuda_op_silu( +static void ggml_cuda_op_silu( const ggml_tensor * src0, const ggml_tensor * src1, ggml_tensor * dst, - const float * src0_dd, const float * src1_dd, float * dst_dd, const cudaStream_t & main_stream) { + const float * src0_dd, const float * src1_dd, float * dst_dd, cudaStream_t main_stream) { GGML_ASSERT(src0->type == GGML_TYPE_F32); GGML_ASSERT( dst->type == GGML_TYPE_F32); @@ -7115,9 +7122,9 @@ inline void ggml_cuda_op_silu( (void) src1_dd; } -inline void ggml_cuda_op_gelu_quick( +static void ggml_cuda_op_gelu_quick( const ggml_tensor * src0, const ggml_tensor * src1, ggml_tensor * dst, - const float * src0_dd, const float * src1_dd, float * dst_dd, const cudaStream_t & main_stream) { + const float * src0_dd, const float * src1_dd, float * dst_dd, cudaStream_t main_stream) { GGML_ASSERT(src0->type == GGML_TYPE_F32); GGML_ASSERT( dst->type == GGML_TYPE_F32); @@ -7129,9 +7136,9 @@ inline void ggml_cuda_op_gelu_quick( (void) src1_dd; } -inline void ggml_cuda_op_tanh( +static void ggml_cuda_op_tanh( const ggml_tensor * src0, const ggml_tensor * src1, ggml_tensor * dst, - const float * src0_dd, const float * src1_dd, float * dst_dd, const cudaStream_t & main_stream) { + const float * src0_dd, const float * src1_dd, float * dst_dd, cudaStream_t main_stream) { GGML_ASSERT(src0->type == GGML_TYPE_F32); GGML_ASSERT( dst->type == GGML_TYPE_F32); @@ -7143,9 +7150,9 @@ inline void ggml_cuda_op_tanh( (void) src1_dd; } -inline void ggml_cuda_op_relu( +static void ggml_cuda_op_relu( const ggml_tensor * src0, const ggml_tensor * src1, ggml_tensor * dst, - const float * src0_dd, const float * src1_dd, float * dst_dd, const cudaStream_t & main_stream) { + const float * src0_dd, const float * src1_dd, float * dst_dd, cudaStream_t main_stream) { GGML_ASSERT(src0->type == GGML_TYPE_F32); GGML_ASSERT( dst->type == GGML_TYPE_F32); @@ -7157,9 +7164,9 @@ inline void ggml_cuda_op_relu( (void) src1_dd; } -inline void ggml_cuda_op_leaky_relu( +static void ggml_cuda_op_leaky_relu( const ggml_tensor * src0, const ggml_tensor * src1, ggml_tensor * dst, - const float * src0_dd, const float * src1_dd, float * dst_dd, const cudaStream_t & main_stream) { + const float * src0_dd, const float * src1_dd, float * dst_dd, cudaStream_t main_stream) { GGML_ASSERT(src0->type == GGML_TYPE_F32); GGML_ASSERT( dst->type == GGML_TYPE_F32); @@ -7174,9 +7181,9 @@ inline void ggml_cuda_op_leaky_relu( (void) src1_dd; } -inline void ggml_cuda_op_sqr( +static void ggml_cuda_op_sqr( const ggml_tensor * src0, const ggml_tensor * src1, ggml_tensor * dst, - const float * src0_dd, const float * src1_dd, float * dst_dd, const cudaStream_t & main_stream) { + const float * src0_dd, const float * src1_dd, float * dst_dd, cudaStream_t main_stream) { GGML_ASSERT(src0->type == GGML_TYPE_F32); GGML_ASSERT( dst->type == GGML_TYPE_F32); @@ -7188,9 +7195,9 @@ inline void ggml_cuda_op_sqr( (void) src1_dd; } -inline void ggml_cuda_op_norm( +static void ggml_cuda_op_norm( const ggml_tensor * src0, const ggml_tensor * src1, ggml_tensor * dst, - const float * src0_dd, const float * src1_dd, float * dst_dd, const cudaStream_t & main_stream) { + const float * src0_dd, const float * src1_dd, float * dst_dd, cudaStream_t main_stream) { GGML_ASSERT(src0->type == GGML_TYPE_F32); GGML_ASSERT( dst->type == GGML_TYPE_F32); @@ -7208,10 +7215,9 @@ inline void ggml_cuda_op_norm( (void) src1_dd; } - -inline void ggml_cuda_op_group_norm( +static void ggml_cuda_op_group_norm( const ggml_tensor * src0, const ggml_tensor * src1, ggml_tensor * dst, - const float * src0_dd, const float * src1_dd, float * dst_dd, const cudaStream_t & main_stream) { + const float * src0_dd, const float * src1_dd, float * dst_dd, cudaStream_t main_stream) { GGML_ASSERT(src0->type == GGML_TYPE_F32); GGML_ASSERT( dst->type == GGML_TYPE_F32); @@ -7225,9 +7231,9 @@ inline void ggml_cuda_op_group_norm( (void) src1_dd; } -inline void ggml_cuda_op_concat( +static void ggml_cuda_op_concat( const ggml_tensor * src0, const ggml_tensor * src1, ggml_tensor * dst, - const float * src0_dd, const float * src1_dd, float * dst_dd, const cudaStream_t & main_stream) { + const float * src0_dd, const float * src1_dd, float * dst_dd, cudaStream_t main_stream) { GGML_ASSERT(src0->type == GGML_TYPE_F32); GGML_ASSERT(src1->type == GGML_TYPE_F32); @@ -7241,9 +7247,9 @@ inline void ggml_cuda_op_concat( (void) dst; } -inline void ggml_cuda_op_upscale( +static void ggml_cuda_op_upscale( const ggml_tensor * src0, const ggml_tensor * src1, ggml_tensor * dst, - const float * src0_dd, const float * src1_dd, float * dst_dd, const cudaStream_t & main_stream) { + const float * src0_dd, const float * src1_dd, float * dst_dd, cudaStream_t main_stream) { GGML_ASSERT(src0->type == GGML_TYPE_F32); GGML_ASSERT(dst->type == GGML_TYPE_F32); @@ -7258,9 +7264,9 @@ inline void ggml_cuda_op_upscale( (void) src1_dd; } -inline void ggml_cuda_op_pad( +static void ggml_cuda_op_pad( const ggml_tensor * src0, const ggml_tensor * src1, ggml_tensor * dst, - const float * src0_dd, const float * src1_dd, float * dst_dd, const cudaStream_t & main_stream) { + const float * src0_dd, const float * src1_dd, float * dst_dd, cudaStream_t main_stream) { GGML_ASSERT(src0->type == GGML_TYPE_F32); GGML_ASSERT(dst->type == GGML_TYPE_F32); @@ -7275,9 +7281,9 @@ inline void ggml_cuda_op_pad( (void) src1_dd; } -inline void ggml_cuda_op_rms_norm( +static void ggml_cuda_op_rms_norm( const ggml_tensor * src0, const ggml_tensor * src1, ggml_tensor * dst, - const float * src0_dd, const float * src1_dd, float * dst_dd, const cudaStream_t & main_stream) { + const float * src0_dd, const float * src1_dd, float * dst_dd, cudaStream_t main_stream) { GGML_ASSERT(src0->type == GGML_TYPE_F32); GGML_ASSERT( dst->type == GGML_TYPE_F32); @@ -7295,10 +7301,10 @@ inline void ggml_cuda_op_rms_norm( (void) src1_dd; } -inline void ggml_cuda_op_mul_mat_q( +static void ggml_cuda_op_mul_mat_q( const ggml_tensor * src0, const ggml_tensor * src1, ggml_tensor * dst, const char * src0_dd_i, const float * src1_ddf_i, const char * src1_ddq_i, float * dst_dd_i, const int64_t row_low, const int64_t row_high, const int64_t src1_ncols, - const int64_t src1_padded_row_size, const cudaStream_t & stream) { + const int64_t src1_padded_row_size, cudaStream_t stream) { const int64_t ne00 = src0->ne[0]; @@ -7360,7 +7366,7 @@ inline void ggml_cuda_op_mul_mat_q( static int64_t get_row_rounding(ggml_type type) { int64_t min_compute_capability = INT_MAX; int64_t max_compute_capability = INT_MIN; - for (int64_t id = 0; id < g_device_count; ++id) { + for (int id = 0; id < g_device_count; ++id) { if (g_tensor_split[id] < (id + 1 < g_device_count ? g_tensor_split[id + 1] : 1.0f)) { if (min_compute_capability > g_device_caps[id].cc) { min_compute_capability = g_device_caps[id].cc; @@ -7418,10 +7424,10 @@ static int64_t get_row_rounding(ggml_type type) { #endif // defined(GGML_USE_HIPBLAS) && defined(__HIP_PLATFORM_AMD__) } -inline void ggml_cuda_op_mul_mat_vec_q( +static void ggml_cuda_op_mul_mat_vec_q( const ggml_tensor * src0, const ggml_tensor * src1, ggml_tensor * dst, const char * src0_dd_i, const float * src1_ddf_i, const char * src1_ddq_i, float * dst_dd_i, const int64_t row_low, const int64_t row_high, const int64_t src1_ncols, - const int64_t src1_padded_row_size, const cudaStream_t & stream) { + const int64_t src1_padded_row_size, cudaStream_t stream) { GGML_ASSERT(ggml_nrows(src1) == 1); @@ -7471,10 +7477,10 @@ inline void ggml_cuda_op_mul_mat_vec_q( (void) src1_padded_row_size; } -inline void ggml_cuda_op_dequantize_mul_mat_vec( +static void ggml_cuda_op_dequantize_mul_mat_vec( const ggml_tensor * src0, const ggml_tensor * src1, ggml_tensor * dst, const char * src0_dd_i, const float * src1_ddf_i, const char * src1_ddq_i, float * dst_dd_i, const int64_t row_low, const int64_t row_high, const int64_t src1_ncols, - const int64_t src1_padded_row_size, const cudaStream_t & stream) { + const int64_t src1_padded_row_size, cudaStream_t stream) { const int64_t ne00 = src0->ne[0]; const int64_t row_diff = row_high - row_low; @@ -7545,10 +7551,10 @@ inline void ggml_cuda_op_dequantize_mul_mat_vec( (void) src1_padded_row_size; } -inline void ggml_cuda_op_mul_mat_cublas( +static void ggml_cuda_op_mul_mat_cublas( const ggml_tensor * src0, const ggml_tensor * src1, ggml_tensor * dst, const char * src0_dd_i, const float * src1_ddf_i, const char * src1_ddq_i, float * dst_dd_i, const int64_t row_low, const int64_t row_high, const int64_t src1_ncols, - const int64_t src1_padded_row_size, const cudaStream_t & stream) { + const int64_t src1_padded_row_size, cudaStream_t stream) { GGML_ASSERT(src0_dd_i != nullptr); GGML_ASSERT(src1_ddf_i != nullptr); @@ -7637,9 +7643,9 @@ inline void ggml_cuda_op_mul_mat_cublas( (void) src1_padded_row_size; } -inline void ggml_cuda_op_rope( +static void ggml_cuda_op_rope( const ggml_tensor * src0, const ggml_tensor * src1, ggml_tensor * dst, - const float * src0_dd, const float * src1_dd, float * dst_dd, const cudaStream_t & main_stream) { + const float * src0_dd, const float * src1_dd, float * dst_dd, cudaStream_t main_stream) { GGML_ASSERT(src0->type == GGML_TYPE_F32 || src0->type == GGML_TYPE_F16); GGML_ASSERT( dst->type == GGML_TYPE_F32 || dst->type == GGML_TYPE_F16); @@ -7717,9 +7723,9 @@ inline void ggml_cuda_op_rope( (void) src1_dd; } -inline void ggml_cuda_op_alibi( +static void ggml_cuda_op_alibi( const ggml_tensor * src0, const ggml_tensor * src1, ggml_tensor * dst, - const float * src0_dd, const float * src1_dd, float * dst_dd, const cudaStream_t & main_stream) { + const float * src0_dd, const float * src1_dd, float * dst_dd, cudaStream_t main_stream) { GGML_ASSERT(src0->type == GGML_TYPE_F32); GGML_ASSERT( dst->type == GGML_TYPE_F32); @@ -7748,9 +7754,9 @@ inline void ggml_cuda_op_alibi( (void) src1_dd; } -inline void ggml_cuda_op_im2col( +static void ggml_cuda_op_im2col( const ggml_tensor * src0, const ggml_tensor * src1, ggml_tensor * dst, - const float * src0_dd, const float * src1_dd, float * dst_dd, const cudaStream_t & main_stream) { + const float * src0_dd, const float * src1_dd, float * dst_dd, cudaStream_t main_stream) { GGML_ASSERT(src0->type == GGML_TYPE_F16); GGML_ASSERT(src1->type == GGML_TYPE_F32); @@ -7783,10 +7789,9 @@ inline void ggml_cuda_op_im2col( (void) src0_dd; } - -inline void ggml_cuda_op_sum_rows( +static void ggml_cuda_op_sum_rows( const ggml_tensor * src0, const ggml_tensor * src1, ggml_tensor * dst, - const float * src0_dd, const float * src1_dd, float * dst_dd, const cudaStream_t & main_stream) { + const float * src0_dd, const float * src1_dd, float * dst_dd, cudaStream_t main_stream) { GGML_ASSERT(src0->type == GGML_TYPE_F32); GGML_ASSERT( dst->type == GGML_TYPE_F32); @@ -7801,9 +7806,9 @@ inline void ggml_cuda_op_sum_rows( (void) src1_dd; } -inline void ggml_cuda_op_argsort( +static void ggml_cuda_op_argsort( const ggml_tensor * src0, const ggml_tensor * src1, ggml_tensor * dst, - const float * src0_dd, const float * src1_dd, float * dst_dd, const cudaStream_t & main_stream) { + const float * src0_dd, const float * src1_dd, float * dst_dd, cudaStream_t main_stream) { GGML_ASSERT(src0->type == GGML_TYPE_F32); GGML_ASSERT( dst->type == GGML_TYPE_I32); @@ -7820,9 +7825,9 @@ inline void ggml_cuda_op_argsort( (void) src1_dd; } -inline void ggml_cuda_op_diag_mask_inf( +static void ggml_cuda_op_diag_mask_inf( const ggml_tensor * src0, const ggml_tensor * src1, ggml_tensor * dst, - const float * src0_dd, const float * src1_dd, float * dst_dd, const cudaStream_t & main_stream) { + const float * src0_dd, const float * src1_dd, float * dst_dd, cudaStream_t main_stream) { GGML_ASSERT(src0->type == GGML_TYPE_F32); GGML_ASSERT( dst->type == GGML_TYPE_F32); @@ -7840,9 +7845,9 @@ inline void ggml_cuda_op_diag_mask_inf( (void) src1_dd; } -inline void ggml_cuda_op_soft_max( +static void ggml_cuda_op_soft_max( const ggml_tensor * src0, const ggml_tensor * src1, ggml_tensor * dst, - const float * src0_dd, const float * src1_dd, float * dst_dd, const cudaStream_t & main_stream) { + const float * src0_dd, const float * src1_dd, float * dst_dd, cudaStream_t main_stream) { GGML_ASSERT(src0->type == GGML_TYPE_F32); GGML_ASSERT( dst->type == GGML_TYPE_F32); @@ -7861,9 +7866,9 @@ inline void ggml_cuda_op_soft_max( (void) dst; } -inline void ggml_cuda_op_scale( +static void ggml_cuda_op_scale( const ggml_tensor * src0, const ggml_tensor * src1, ggml_tensor * dst, - const float * src0_dd, const float * src1_dd, float * dst_dd, const cudaStream_t & main_stream) { + const float * src0_dd, const float * src1_dd, float * dst_dd, cudaStream_t main_stream) { GGML_ASSERT(src0->type == GGML_TYPE_F32); GGML_ASSERT( dst->type == GGML_TYPE_F32); @@ -7879,9 +7884,9 @@ inline void ggml_cuda_op_scale( (void) src1_dd; } -inline void ggml_cuda_op_clamp( +static void ggml_cuda_op_clamp( const ggml_tensor * src0, const ggml_tensor * src1, ggml_tensor * dst, - const float * src0_dd, const float * src1_dd, float * dst_dd, const cudaStream_t & main_stream) { + const float * src0_dd, const float * src1_dd, float * dst_dd, cudaStream_t main_stream) { GGML_ASSERT(src0->type == GGML_TYPE_F32); GGML_ASSERT( dst->type == GGML_TYPE_F32); @@ -7974,12 +7979,12 @@ static void ggml_cuda_set_peer_access(const int n_tokens) { #ifdef NDEBUG for (int id = 0; id < g_device_count; ++id) { - CUDA_CHECK(ggml_cuda_set_device(id)); + ggml_cuda_set_device(id); CUDA_CHECK(cudaDeviceSynchronize()); } for (int id = 0; id < g_device_count; ++id) { - CUDA_CHECK(ggml_cuda_set_device(id)); + ggml_cuda_set_device(id); for (int id_other = 0; id_other < g_device_count; ++id_other) { if (id == id_other) { @@ -8013,7 +8018,6 @@ static void ggml_cuda_op_mul_mat( const int64_t ne01 = src0->ne[1]; const int64_t ne02 = src0->ne[2]; const int64_t ne03 = src0->ne[3]; - const int64_t nrows0 = ggml_nrows(src0); const int64_t ne10 = src1->ne[0]; const int64_t ne11 = src1->ne[1]; @@ -8056,27 +8060,29 @@ static void ggml_cuda_op_mul_mat( GGML_ASSERT(!(split && ne03 > 1)); GGML_ASSERT(!(split && ne02 < ne12)); - // dd = data device - char * src0_dd[GGML_CUDA_MAX_DEVICES] = {nullptr}; - float * src1_ddf[GGML_CUDA_MAX_DEVICES] = {nullptr}; // float - char * src1_ddq[GGML_CUDA_MAX_DEVICES] = {nullptr}; // q8_1 - float * dst_dd[GGML_CUDA_MAX_DEVICES] = {nullptr}; + struct dev_data { + cuda_pool_alloc src0_dd_alloc; + cuda_pool_alloc src1_ddf_alloc; + cuda_pool_alloc src1_ddq_alloc; + cuda_pool_alloc dst_dd_alloc; - // as = actual size - size_t src0_as[GGML_CUDA_MAX_DEVICES] = {0}; - size_t src1_asf[GGML_CUDA_MAX_DEVICES] = {0}; - size_t src1_asq[GGML_CUDA_MAX_DEVICES] = {0}; - size_t dst_as[GGML_CUDA_MAX_DEVICES] = {0}; + char * src0_dd = nullptr; + float * src1_ddf = nullptr; // float + char * src1_ddq = nullptr; // q8_1 + float * dst_dd = nullptr; - int64_t row_low[GGML_CUDA_MAX_DEVICES]; - int64_t row_high[GGML_CUDA_MAX_DEVICES]; + int64_t row_low; + int64_t row_high; + }; + + dev_data dev[GGML_CUDA_MAX_DEVICES]; int used_devices = 0; - for (int64_t id = 0; id < g_device_count; ++id) { + for (int id = 0; id < g_device_count; ++id) { // by default, use all rows - row_low[id] = 0; - row_high[id] = ne01; + dev[id].row_low = 0; + dev[id].row_high = ne01; // for multi GPU, get the row boundaries from tensor split // and round to mul_mat_q tile sizes @@ -8084,23 +8090,23 @@ static void ggml_cuda_op_mul_mat( const int64_t rounding = get_row_rounding(src0->type); if (id != 0) { - row_low[id] = ne01*g_tensor_split[id]; - if (row_low[id] < ne01) { - row_low[id] -= row_low[id] % rounding; + dev[id].row_low = ne01*g_tensor_split[id]; + if (dev[id].row_low < ne01) { + dev[id].row_low -= dev[id].row_low % rounding; } } if (id != g_device_count - 1) { - row_high[id] = ne01*g_tensor_split[id + 1]; - if (row_high[id] < ne01) { - row_high[id] -= row_high[id] % rounding; + dev[id].row_high = ne01*g_tensor_split[id + 1]; + if (dev[id].row_high < ne01) { + dev[id].row_high -= dev[id].row_high % rounding; } } } } - for (int64_t id = 0; id < g_device_count; ++id) { - if ((!split && id != g_main_device) || row_low[id] == row_high[id]) { + for (int id = 0; id < g_device_count; ++id) { + if ((!split && id != g_main_device) || dev[id].row_low == dev[id].row_high) { continue; } @@ -8110,42 +8116,41 @@ static void ggml_cuda_op_mul_mat( const bool dst_on_device = dst->backend == GGML_BACKEND_GPU && id == g_main_device; ggml_cuda_set_device(id); - const cudaStream_t stream = g_cudaStreams[id][0]; + cudaStream_t stream = g_cudaStreams[id][0]; if (src0_on_device && src0_is_contiguous) { - src0_dd[id] = (char *) src0_extra->data_device[id]; + dev[id].src0_dd = (char *) src0_extra->data_device[id]; } else { - // const size_t size_src0_ddq = split ? (row_high[id]-row_low[id])*ne00 * src0_ts/src0_bs : ggml_nbytes(src0); - src0_dd[id] = (char *) ggml_cuda_pool_malloc(ggml_nbytes(src0), &src0_as[id]); + dev[id].src0_dd = dev[id].src0_dd_alloc.alloc(ggml_nbytes(src0)); } if (src1_on_device && src1_is_contiguous) { - src1_ddf[id] = (float *) src1_extra->data_device[id]; + dev[id].src1_ddf = (float *) src1_extra->data_device[id]; } else { - src1_ddf[id] = (float *) ggml_cuda_pool_malloc(ggml_nbytes(src1), &src1_asf[id]); + dev[id].src1_ddf = dev[id].src1_ddf_alloc.alloc(ggml_nelements(src1)); } if (convert_src1_to_q8_1) { - src1_ddq[id] = (char *) ggml_cuda_pool_malloc(nrows1*src1_padded_col_size*q8_1_ts/q8_1_bs, &src1_asq[id]); + dev[id].src1_ddq = dev[id].src1_ddq_alloc.alloc(nrows1*src1_padded_col_size*q8_1_ts/q8_1_bs); if (src1_on_device && src1_is_contiguous) { - quantize_row_q8_1_cuda(src1_ddf[id], src1_ddq[id], ne10, nrows1, src1_padded_col_size, stream); + quantize_row_q8_1_cuda(dev[id].src1_ddf, dev[id].src1_ddq, ne10, nrows1, src1_padded_col_size, stream); CUDA_CHECK(cudaGetLastError()); } } if (dst_on_device) { - dst_dd[id] = (float *) dst_extra->data_device[id]; + dev[id].dst_dd = (float *) dst_extra->data_device[id]; } else { - const size_t size_dst_ddf = split ? (row_high[id]-row_low[id])*ne1*sizeof(float) : ggml_nbytes(dst); - dst_dd[id] = (float *) ggml_cuda_pool_malloc(size_dst_ddf, &dst_as[id]); + const size_t size_dst_ddf = split ? (dev[id].row_high - dev[id].row_low)*ne1 : ggml_nelements(dst); + dev[id].dst_dd = dev[id].dst_dd_alloc.alloc(size_dst_ddf); } } // if multiple devices are used they need to wait for the main device // here an event is recorded that signals that the main device has finished calculating the input data if (split && used_devices > 1) { - CUDA_CHECK(ggml_cuda_set_device(g_main_device)); + ggml_cuda_set_device(g_main_device); CUDA_CHECK(cudaEventRecord(src0_extra->events[g_main_device][0], g_cudaStreams[g_main_device][0])); } @@ -8154,17 +8159,17 @@ static void ggml_cuda_op_mul_mat( const int64_t is = split ? (src1_col_0/src1_col_stride) % MAX_STREAMS : 0; const int64_t src1_ncols = src1_col_0 + src1_col_stride > ne11 ? ne11 - src1_col_0 : src1_col_stride; - for (int64_t id = 0; id < g_device_count; ++id) { - if ((!split && id != g_main_device) || row_low[id] == row_high[id]) { + for (int id = 0; id < g_device_count; ++id) { + if ((!split && id != g_main_device) || dev[id].row_low == dev[id].row_high) { continue; } const bool src1_on_device = src1->backend == GGML_BACKEND_GPU && id == g_main_device; const bool dst_on_device = dst->backend == GGML_BACKEND_GPU && id == g_main_device; - const int64_t row_diff = row_high[id] - row_low[id]; + const int64_t row_diff = dev[id].row_high - dev[id].row_low; ggml_cuda_set_device(id); - const cudaStream_t stream = g_cudaStreams[id][is]; + cudaStream_t stream = g_cudaStreams[id][is]; // wait for main GPU data if necessary if (split && (id != g_main_device || is != 0)) { @@ -8178,34 +8183,34 @@ static void ggml_cuda_op_mul_mat( const size_t src1_ddq_i_offset = (i0*ne11 + src1_col_0) * src1_padded_col_size*q8_1_ts/q8_1_bs; // for split tensors the data begins at i0 == i0_offset_low - char * src0_dd_i = src0_dd[id] + (i0/i02_divisor) * (ne01*ne00*src0_ts)/src0_bs; - float * src1_ddf_i = src1_ddf[id] + (i0*ne11 + src1_col_0) * ne10; - char * src1_ddq_i = src1_ddq[id] + src1_ddq_i_offset; - float * dst_dd_i = dst_dd[id] + (i0*ne1 + src1_col_0) * (dst_on_device ? ne0 : row_diff); + char * src0_dd_i = dev[id].src0_dd + (i0/i02_divisor) * (ne01*ne00*src0_ts)/src0_bs; + float * src1_ddf_i = dev[id].src1_ddf + (i0*ne11 + src1_col_0) * ne10; + char * src1_ddq_i = dev[id].src1_ddq + src1_ddq_i_offset; + float * dst_dd_i = dev[id].dst_dd + (i0*ne1 + src1_col_0) * (dst_on_device ? ne0 : row_diff); // the main device memory buffer can be on VRAM scratch, with space for all partial results // in that case an offset on dst_ddf_i is needed if (dst->backend == GGML_BACKEND_GPU && id == g_main_device) { - dst_dd_i += row_low[id]; // offset is 0 if no tensor split + dst_dd_i += dev[id].row_low; // offset is 0 if no tensor split } // copy src0, src1 to device if necessary if (src1->backend == GGML_BACKEND_GPU && src1_is_contiguous) { if (id != g_main_device) { if (convert_src1_to_q8_1) { - char * src1_ddq_i_source = src1_ddq[g_main_device] + src1_ddq_i_offset; - CUDA_CHECK(cudaMemcpyAsync(src1_ddq_i, src1_ddq_i_source, src1_ncols*src1_padded_col_size*q8_1_ts/q8_1_bs, - cudaMemcpyDeviceToDevice, stream)); + char * src1_ddq_i_source = dev[g_main_device].src1_ddq + src1_ddq_i_offset; + CUDA_CHECK(cudaMemcpyPeerAsync(src1_ddq_i, id, src1_ddq_i_source, g_main_device, + src1_ncols*src1_padded_col_size*q8_1_ts/q8_1_bs, stream)); } else { float * src1_ddf_i_source = (float *) src1_extra->data_device[g_main_device]; src1_ddf_i_source += (i0*ne11 + src1_col_0) * ne10; - CUDA_CHECK(cudaMemcpyAsync(src1_ddf_i, src1_ddf_i_source, src1_ncols*ne10*sizeof(float), - cudaMemcpyDeviceToDevice, stream)); + CUDA_CHECK(cudaMemcpyPeerAsync(src1_ddf_i, id, src1_ddf_i_source, g_main_device, + src1_ncols*ne10*sizeof(float), stream)); } } } else if (src1->backend == GGML_BACKEND_CPU || (src1_on_device && !src1_is_contiguous)) { CUDA_CHECK(ggml_cuda_cpy_tensor_2d( - src1_ddf_i, src1, i03, i02, src1_col_0, src1_col_0+src1_ncols, stream)); + src1_ddf_i, src1, i03, i02, src1_col_0, src1_col_0+src1_ncols, stream)); } else { GGML_ASSERT(false); } @@ -8216,12 +8221,12 @@ static void ggml_cuda_op_mul_mat( } if (src1_col_0 == 0 && (!src0_on_device || !src0_is_contiguous) && i02 % i02_divisor == 0) { - CUDA_CHECK(ggml_cuda_cpy_tensor_2d(src0_dd_i, src0, i03, i02/i02_divisor, row_low[id], row_high[id], stream)); + CUDA_CHECK(ggml_cuda_cpy_tensor_2d(src0_dd_i, src0, i03, i02/i02_divisor, dev[id].row_low, dev[id].row_high, stream)); } // do the computation op(src0, src1, dst, src0_dd_i, src1_ddf_i, src1_ddq_i, dst_dd_i, - row_low[id], row_high[id], src1_ncols, src1_padded_col_size, stream); + dev[id].row_low, dev[id].row_high, src1_ncols, src1_padded_col_size, stream); CUDA_CHECK(cudaGetLastError()); // copy dst to host or other device if necessary @@ -8245,9 +8250,25 @@ static void ggml_cuda_op_mul_mat( // If dst is a vector with ne0 == 1 then you don't have to do this but it still produces correct results. float * dhf_dst_i = (float *) ((char *) dst_off_device + i02*nb2 + i03*nb3); GGML_ASSERT(dst->nb[1] == ne0*sizeof(float)); - dhf_dst_i += src1_col_0*ne0 + row_low[id]; - CUDA_CHECK(cudaMemcpy2DAsync(dhf_dst_i, ne0*sizeof(float), dst_dd_i, row_diff*sizeof(float), - row_diff*sizeof(float), src1_ncols, kind, stream)); + dhf_dst_i += src1_col_0*ne0 + dev[id].row_low; +#if !defined(GGML_USE_HIPBLAS) + if (kind == cudaMemcpyDeviceToDevice) { + // cudaMemcpy2DAsync may fail with copies between vmm pools of different devices + cudaMemcpy3DPeerParms p = {}; + p.dstDevice = g_main_device; + p.dstPtr = make_cudaPitchedPtr(dhf_dst_i, ne0*sizeof(float), row_diff, src1_ncols); + p.srcDevice = id; + p.srcPtr = make_cudaPitchedPtr(dst_dd_i, row_diff*sizeof(float), row_diff, src1_ncols); + p.extent = make_cudaExtent(row_diff*sizeof(float), src1_ncols, 1); + CUDA_CHECK(cudaMemcpy3DPeerAsync(&p, stream)); + } else +#endif + { + CUDA_CHECK(cudaMemcpy2DAsync(dhf_dst_i, ne0*sizeof(float), + dst_dd_i, row_diff*sizeof(float), + row_diff*sizeof(float), src1_ncols, + kind, stream)); + } } else { float * dhf_dst_i = (float *) ((char *) dst_off_device + i02*nb2 + i03*nb3); GGML_ASSERT(dst->nb[1] == ne0*sizeof(float)); @@ -8264,35 +8285,14 @@ static void ggml_cuda_op_mul_mat( } } - for (int64_t id = 0; id < g_device_count; ++id) { - if ((!split && id != g_main_device) || row_low[id] == row_high[id]) { - continue; - } - CUDA_CHECK(ggml_cuda_set_device(id)); - - // free buffers again when done - if (dst_as[id] > 0) { - ggml_cuda_pool_free(dst_dd[id], dst_as[id]); - } - if (src1_asq[id] > 0) { - ggml_cuda_pool_free(src1_ddq[id], src1_asq[id]); - } - if (src1_asf[id] > 0) { - ggml_cuda_pool_free(src1_ddf[id], src1_asf[id]); - } - if (src0_as[id] > 0) { - ggml_cuda_pool_free(src0_dd[id], src0_as[id]); - } - } - // main device waits for all other devices to be finished if (split && g_device_count > 1) { int64_t is_max = (ne11 + MUL_MAT_SRC1_COL_STRIDE - 1) / MUL_MAT_SRC1_COL_STRIDE; is_max = is_max <= MAX_STREAMS ? is_max : MAX_STREAMS; - CUDA_CHECK(ggml_cuda_set_device(g_main_device)); - for (int64_t id = 0; id < g_device_count; ++id) { - if (row_low[id] == row_high[id]) { + ggml_cuda_set_device(g_main_device); + for (int id = 0; id < g_device_count; ++id) { + if (dev[id].row_low == dev[id].row_high) { continue; } for (int64_t is = 0; is < is_max; ++is) { @@ -8302,7 +8302,7 @@ static void ggml_cuda_op_mul_mat( } if (dst->backend == GGML_BACKEND_CPU) { - CUDA_CHECK(ggml_cuda_set_device(g_main_device)); + ggml_cuda_set_device(g_main_device); CUDA_CHECK(cudaDeviceSynchronize()); } } @@ -8412,7 +8412,7 @@ static void ggml_cuda_mul_mat_vec_p021(const ggml_tensor * src0, const ggml_tens const int64_t ne12 = src1->ne[2]; - CUDA_CHECK(ggml_cuda_set_device(g_main_device)); + ggml_cuda_set_device(g_main_device); cudaStream_t main_stream = g_cudaStreams[g_main_device][0]; ggml_tensor_extra_gpu * src0_extra = (ggml_tensor_extra_gpu *) src0->extra; @@ -8444,7 +8444,7 @@ static void ggml_cuda_mul_mat_vec_nc(const ggml_tensor * src0, const ggml_tensor const int64_t ne12 = src1->ne[2]; - CUDA_CHECK(ggml_cuda_set_device(g_main_device)); + ggml_cuda_set_device(g_main_device); cudaStream_t main_stream = g_cudaStreams[g_main_device][0]; ggml_tensor_extra_gpu * src0_extra = (ggml_tensor_extra_gpu *) src0->extra; @@ -8515,7 +8515,7 @@ static void ggml_cuda_mul_mat_mat_batched_cublas(const ggml_tensor * src0, const const int64_t ne1 = ggml_nelements(src1); const int64_t ne = ggml_nelements(dst); - CUDA_CHECK(ggml_cuda_set_device(g_main_device)); + ggml_cuda_set_device(g_main_device); cudaStream_t main_stream = g_cudaStreams[g_main_device][0]; CUBLAS_CHECK(cublasSetStream(g_cublas_handles[g_main_device], main_stream)); @@ -8656,7 +8656,7 @@ static void ggml_cuda_mul_mat(const ggml_tensor * src0, const ggml_tensor * src1 const bool split = src0->backend == GGML_BACKEND_GPU_SPLIT; int64_t min_compute_capability = INT_MAX; - for (int64_t id = 0; id < g_device_count; ++id) { + for (int id = 0; id < g_device_count; ++id) { if (min_compute_capability > g_device_caps[id].cc && g_tensor_split[id] < (id + 1 < g_device_count ? g_tensor_split[id + 1] : 1.0f)) { min_compute_capability = g_device_caps[id].cc; } @@ -8799,7 +8799,7 @@ static void ggml_cuda_mul_mat_id_cublas(ggml_tensor * dst) { const int64_t ne1 = ggml_nelements(src1); const int64_t ne = ggml_nelements(dst); - CUDA_CHECK(ggml_cuda_set_device(g_main_device)); + ggml_cuda_set_device(g_main_device); cudaStream_t main_stream = g_cudaStreams[g_main_device][0]; CUBLAS_CHECK(cublasSetStream(g_cublas_handles[g_main_device], main_stream)); @@ -8917,7 +8917,7 @@ static void ggml_cuda_mul_mat_id(const ggml_tensor * src0, const ggml_tensor * s std::vector ids_host(ggml_nbytes(ids)); - const cudaStream_t stream = g_cudaStreams[g_main_device][0]; + cudaStream_t stream = g_cudaStreams[g_main_device][0]; if (ids->backend == GGML_BACKEND_GPU) { const char * ids_dev = (const char *)((const ggml_tensor_extra_gpu *)ids->extra)->data_device[g_main_device]; @@ -9073,7 +9073,7 @@ static void ggml_cuda_cpy(const ggml_tensor * src0, const ggml_tensor * src1, gg const int64_t nb11 = src1->nb[1]; const int64_t nb12 = src1->nb[2]; - CUDA_CHECK(ggml_cuda_set_device(g_main_device)); + ggml_cuda_set_device(g_main_device); cudaStream_t main_stream = g_cudaStreams[g_main_device][0]; const ggml_tensor_extra_gpu * src0_extra = (ggml_tensor_extra_gpu *) src0->extra; @@ -9163,7 +9163,7 @@ void ggml_cuda_transform_tensor(void * data, struct ggml_tensor * tensor) { ggml_tensor_extra_gpu * extra = new struct ggml_tensor_extra_gpu; memset(extra, 0, sizeof(*extra)); - for (int64_t id = 0; id < g_device_count; ++id) { + for (int id = 0; id < g_device_count; ++id) { if (backend == GGML_BACKEND_GPU && id != g_main_device) { continue; } @@ -9234,15 +9234,14 @@ void ggml_cuda_free_data(struct ggml_tensor * tensor) { ggml_tensor_extra_gpu * extra = (ggml_tensor_extra_gpu *) tensor->extra; - for (int64_t id = 0; id < g_device_count; ++id) { + for (int id = 0; id < g_device_count; ++id) { + ggml_cuda_set_device(id); if (extra->data_device[id] != nullptr) { - CUDA_CHECK(ggml_cuda_set_device(id)); CUDA_CHECK(cudaFree(extra->data_device[id])); } for (int64_t is = 0; is < MAX_STREAMS; ++is) { if (extra->events[id][is] != nullptr) { - CUDA_CHECK(ggml_cuda_set_device(id)); CUDA_CHECK(cudaEventDestroy(extra->events[id][is])); } } @@ -9296,7 +9295,7 @@ static void ggml_cuda_assign_buffers_impl(struct ggml_tensor * tensor, bool scra force_inplace; const size_t size = ggml_nbytes(tensor); - CUDA_CHECK(ggml_cuda_set_device(g_main_device)); + ggml_cuda_set_device(g_main_device); if (inplace && (tensor->src[0]->backend == GGML_BACKEND_GPU || tensor->src[0]->backend == GGML_BACKEND_GPU_SPLIT)) { ggml_tensor_extra_gpu * src0_extra = (ggml_tensor_extra_gpu * ) tensor->src[0]->extra; char * src0_ddc = (char *) src0_extra->data_device[g_main_device]; @@ -9373,7 +9372,7 @@ void ggml_cuda_copy_to_device(struct ggml_tensor * tensor) { GGML_ASSERT(ggml_is_contiguous(tensor)); ggml_tensor_extra_gpu * extra = (ggml_tensor_extra_gpu *) tensor->extra; - CUDA_CHECK(ggml_cuda_set_device(g_main_device)); + ggml_cuda_set_device(g_main_device); CUDA_CHECK(cudaMemcpy(extra->data_device[g_main_device], tensor->data, ggml_nbytes(tensor), cudaMemcpyHostToDevice)); } diff --git a/ggml.c b/ggml.c index d24560480..ed56e60a8 100644 --- a/ggml.c +++ b/ggml.c @@ -4041,7 +4041,6 @@ static struct ggml_tensor * ggml_group_norm_impl( result->op = GGML_OP_GROUP_NORM; result->grad = is_node ? ggml_dup_tensor(ctx, result) : NULL; result->src[0] = a; - result->src[1] = NULL; // TODO: maybe store epsilon here? return result; } @@ -5541,7 +5540,6 @@ static struct ggml_tensor * ggml_upscale_impl( result->op_params[0] = scale_factor; result->grad = is_node ? ggml_dup_tensor(ctx, result) : NULL; result->src[0] = a; - result->src[1] = NULL; return result; } @@ -5846,7 +5844,6 @@ struct ggml_tensor * ggml_get_rel_pos( result->op = GGML_OP_GET_REL_POS; result->grad = is_node ? ggml_dup_tensor(ctx, result) : NULL; result->src[0] = a; - result->src[1] = NULL; return result; } diff --git a/llama.cpp b/llama.cpp index 0b99f1e03..4aa59c4c0 100644 --- a/llama.cpp +++ b/llama.cpp @@ -9519,7 +9519,8 @@ struct llama_context * llama_new_context_with_model( ctx->alloc = ggml_allocr_new_from_buffer(ctx->buf_alloc); #if defined(GGML_USE_CUBLAS) && !defined(LLAMA_GGML_BACKEND_CUDA_TEST) if (model->n_gpu_layers > 0) { - ggml_cuda_set_scratch_size(alloc_size); + // the CPU buffer adds this padding in case the malloc buffer is not aligned, so we need to do the same for the GPU buffer, since we use the same offsets + ggml_cuda_set_scratch_size(alloc_size + 64); LLAMA_LOG_INFO("%s: VRAM scratch buffer: %.2f MiB\n", __func__, alloc_size / 1024.0 / 1024.0); // calculate total VRAM usage From f56d6077d0c37e6606ac0a4fa3169de70593acfe Mon Sep 17 00:00:00 2001 From: wonjun Jang Date: Wed, 27 Dec 2023 17:37:25 +0900 Subject: [PATCH 149/811] Add byte token type when tokenizer.model is not exists (#4641) * Add byte token type to hf format * remove unused variable --- convert.py | 10 ++++++---- 1 file changed, 6 insertions(+), 4 deletions(-) diff --git a/convert.py b/convert.py index 7a3cd615e..1f0c4f2f4 100755 --- a/convert.py +++ b/convert.py @@ -357,6 +357,7 @@ class VocabLoader: for tok in self.tokenizer.all_special_tokens } self.special_ids: set[int] = set(self.tokenizer.all_special_ids) + self.reverse_vocab = {id: encoded_tok for encoded_tok, id in self.tokenizer.get_vocab().items()} self.vocab_size_base: int = self.tokenizer.vocab_size self.vocab_size: int = self.vocab_size_base + len(self.added_tokens_dict) self.fname_tokenizer: Path = fname_tokenizer @@ -370,15 +371,13 @@ class VocabLoader: self.spm = None def hf_tokens(self) -> Iterable[tuple[bytes, float, gguf.TokenType]]: - tokenizer = self.tokenizer - reverse_vocab = {id: encoded_tok for encoded_tok, id in tokenizer.get_vocab().items()} added_tokens_ids = set(self.added_tokens_dict.values()) for i in range(self.vocab_size_base): if i in added_tokens_ids: continue - text = reverse_vocab[i].encode("utf-8") + text = self.reverse_vocab[i].encode("utf-8") yield text, self.get_token_score(i), self.get_token_type(i) def get_token_type(self, token_id: int) -> gguf.TokenType: @@ -394,10 +393,13 @@ class VocabLoader: if self.spm.is_byte(token_id): toktype = gguf.TokenType.BYTE else: + token = self.reverse_vocab[token_id] if token_id == self.unk_token_id: toktype = gguf.TokenType.UNKNOWN - if token_id in self.special_ids: + elif token_id in self.special_ids: toktype = gguf.TokenType.CONTROL + elif len(token) == 6 and token.startswith("<0x") and token.endswith(">"): + toktype = gguf.TokenType.BYTE return toktype From 951010fa53a0ffe81b7d2e87c4349e0d3cb3d19d Mon Sep 17 00:00:00 2001 From: Georgi Gerganov Date: Wed, 27 Dec 2023 11:02:13 +0200 Subject: [PATCH 150/811] ggml : fix dot product for ARM (#4630) ggml-ci --- ggml-quants.c | 363 +++----------------------------------------------- 1 file changed, 22 insertions(+), 341 deletions(-) diff --git a/ggml-quants.c b/ggml-quants.c index a15a24048..05ef8f9b7 100644 --- a/ggml-quants.c +++ b/ggml-quants.c @@ -407,6 +407,18 @@ inline static ggml_int8x16x4_t ggml_vld1q_s8_x4(const int8_t * ptr) { #define ggml_vld1q_s8_x4 vld1q_s8_x4 #endif + +#if !defined(__ARM_FEATURE_DOTPROD) + +inline static int32x4_t vdotq_s32(int32x4_t acc, int8x16_t a, int8x16_t b) { + const int16x8_t p0 = vmull_s8(vget_low_s8 (a), vget_low_s8 (b)); + const int16x8_t p1 = vmull_s8(vget_high_s8(a), vget_high_s8(b)); + + return vaddq_s32(acc, vaddq_s32(vpaddlq_s16(p0), vpaddlq_s16(p1))); +} + +#endif + #endif #if defined(__ARM_NEON) || defined(__wasm_simd128__) @@ -2468,32 +2480,12 @@ void ggml_vec_dot_q4_0_q8_0(int n, float * restrict s, const void * restrict vx, const int8x16_t v1_1l = vld1q_s8(y1->qs); const int8x16_t v1_1h = vld1q_s8(y1->qs + 16); -#if defined(__ARM_FEATURE_DOTPROD) // dot product into int32x4_t const int32x4_t p_0 = vdotq_s32(vdotq_s32(vdupq_n_s32(0), v0_0ls, v1_0l), v0_0hs, v1_0h); const int32x4_t p_1 = vdotq_s32(vdotq_s32(vdupq_n_s32(0), v0_1ls, v1_1l), v0_1hs, v1_1h); sumv0 = vmlaq_n_f32(sumv0, vcvtq_f32_s32(p_0), GGML_FP16_TO_FP32(x0->d)*GGML_FP16_TO_FP32(y0->d)); sumv1 = vmlaq_n_f32(sumv1, vcvtq_f32_s32(p_1), GGML_FP16_TO_FP32(x1->d)*GGML_FP16_TO_FP32(y1->d)); -#else - const int16x8_t pl0l = vmull_s8(vget_low_s8 (v0_0ls), vget_low_s8 (v1_0l)); - const int16x8_t pl0h = vmull_s8(vget_high_s8(v0_0ls), vget_high_s8(v1_0l)); - const int16x8_t ph0l = vmull_s8(vget_low_s8 (v0_0hs), vget_low_s8 (v1_0h)); - const int16x8_t ph0h = vmull_s8(vget_high_s8(v0_0hs), vget_high_s8(v1_0h)); - - const int16x8_t pl1l = vmull_s8(vget_low_s8 (v0_1ls), vget_low_s8 (v1_1l)); - const int16x8_t pl1h = vmull_s8(vget_high_s8(v0_1ls), vget_high_s8(v1_1l)); - const int16x8_t ph1l = vmull_s8(vget_low_s8 (v0_1hs), vget_low_s8 (v1_1h)); - const int16x8_t ph1h = vmull_s8(vget_high_s8(v0_1hs), vget_high_s8(v1_1h)); - - const int32x4_t pl0 = vaddq_s32(vpaddlq_s16(pl0l), vpaddlq_s16(pl0h)); - const int32x4_t ph0 = vaddq_s32(vpaddlq_s16(ph0l), vpaddlq_s16(ph0h)); - const int32x4_t pl1 = vaddq_s32(vpaddlq_s16(pl1l), vpaddlq_s16(pl1h)); - const int32x4_t ph1 = vaddq_s32(vpaddlq_s16(ph1l), vpaddlq_s16(ph1h)); - - sumv0 = vmlaq_n_f32(sumv0, vcvtq_f32_s32(vaddq_s32(pl0, ph0)), GGML_FP16_TO_FP32(x0->d)*GGML_FP16_TO_FP32(y0->d)); - sumv1 = vmlaq_n_f32(sumv1, vcvtq_f32_s32(vaddq_s32(pl1, ph1)), GGML_FP16_TO_FP32(x1->d)*GGML_FP16_TO_FP32(y1->d)); -#endif } *s = vaddvq_f32(sumv0) + vaddvq_f32(sumv1); @@ -2776,32 +2768,12 @@ void ggml_vec_dot_q4_1_q8_1(const int n, float * restrict s, const void * restri const int8x16_t v1_1l = vld1q_s8(y1->qs); const int8x16_t v1_1h = vld1q_s8(y1->qs + 16); -#if defined(__ARM_FEATURE_DOTPROD) // dot product into int32x4_t const int32x4_t p_0 = vdotq_s32(vdotq_s32(vdupq_n_s32(0), v0_0l, v1_0l), v0_0h, v1_0h); const int32x4_t p_1 = vdotq_s32(vdotq_s32(vdupq_n_s32(0), v0_1l, v1_1l), v0_1h, v1_1h); sumv0 = vmlaq_n_f32(sumv0, vcvtq_f32_s32(p_0), GGML_FP16_TO_FP32(x0->d)*y0->d); sumv1 = vmlaq_n_f32(sumv1, vcvtq_f32_s32(p_1), GGML_FP16_TO_FP32(x1->d)*y1->d); -#else - const int16x8_t pl0l = vmull_s8(vget_low_s8 (v0_0l), vget_low_s8 (v1_0l)); - const int16x8_t pl0h = vmull_s8(vget_high_s8(v0_0l), vget_high_s8(v1_0l)); - const int16x8_t ph0l = vmull_s8(vget_low_s8 (v0_0h), vget_low_s8 (v1_0h)); - const int16x8_t ph0h = vmull_s8(vget_high_s8(v0_0h), vget_high_s8(v1_0h)); - - const int16x8_t pl1l = vmull_s8(vget_low_s8 (v0_1l), vget_low_s8 (v1_1l)); - const int16x8_t pl1h = vmull_s8(vget_high_s8(v0_1l), vget_high_s8(v1_1l)); - const int16x8_t ph1l = vmull_s8(vget_low_s8 (v0_1h), vget_low_s8 (v1_1h)); - const int16x8_t ph1h = vmull_s8(vget_high_s8(v0_1h), vget_high_s8(v1_1h)); - - const int32x4_t pl0 = vaddq_s32(vpaddlq_s16(pl0l), vpaddlq_s16(pl0h)); - const int32x4_t ph0 = vaddq_s32(vpaddlq_s16(ph0l), vpaddlq_s16(ph0h)); - const int32x4_t pl1 = vaddq_s32(vpaddlq_s16(pl1l), vpaddlq_s16(pl1h)); - const int32x4_t ph1 = vaddq_s32(vpaddlq_s16(ph1l), vpaddlq_s16(ph1h)); - - sumv0 = vmlaq_n_f32(sumv0, vcvtq_f32_s32(vaddq_s32(pl0, ph0)), GGML_FP16_TO_FP32(x0->d)*y0->d); - sumv1 = vmlaq_n_f32(sumv1, vcvtq_f32_s32(vaddq_s32(pl1, ph1)), GGML_FP16_TO_FP32(x1->d)*y1->d); -#endif } *s = vaddvq_f32(sumv0) + vaddvq_f32(sumv1) + summs; @@ -2963,32 +2935,12 @@ void ggml_vec_dot_q5_0_q8_0(const int n, float * restrict s, const void * restri const int8x16_t v1_1l = vld1q_s8(y1->qs); const int8x16_t v1_1h = vld1q_s8(y1->qs + 16); -#if defined(__ARM_FEATURE_DOTPROD) sumv0 = vmlaq_n_f32(sumv0, vcvtq_f32_s32(vaddq_s32( vdotq_s32(vdupq_n_s32(0), v0_0lf, v1_0l), vdotq_s32(vdupq_n_s32(0), v0_0hf, v1_0h))), GGML_FP16_TO_FP32(x0->d)*GGML_FP16_TO_FP32(y0->d)); sumv1 = vmlaq_n_f32(sumv1, vcvtq_f32_s32(vaddq_s32( vdotq_s32(vdupq_n_s32(0), v0_1lf, v1_1l), vdotq_s32(vdupq_n_s32(0), v0_1hf, v1_1h))), GGML_FP16_TO_FP32(x1->d)*GGML_FP16_TO_FP32(y1->d)); -#else - const int16x8_t pl0l = vmull_s8(vget_low_s8 (v0_0lf), vget_low_s8 (v1_0l)); - const int16x8_t pl0h = vmull_s8(vget_high_s8(v0_0lf), vget_high_s8(v1_0l)); - const int16x8_t ph0l = vmull_s8(vget_low_s8 (v0_0hf), vget_low_s8 (v1_0h)); - const int16x8_t ph0h = vmull_s8(vget_high_s8(v0_0hf), vget_high_s8(v1_0h)); - - const int16x8_t pl1l = vmull_s8(vget_low_s8 (v0_1lf), vget_low_s8 (v1_1l)); - const int16x8_t pl1h = vmull_s8(vget_high_s8(v0_1lf), vget_high_s8(v1_1l)); - const int16x8_t ph1l = vmull_s8(vget_low_s8 (v0_1hf), vget_low_s8 (v1_1h)); - const int16x8_t ph1h = vmull_s8(vget_high_s8(v0_1hf), vget_high_s8(v1_1h)); - - const int32x4_t pl0 = vaddq_s32(vpaddlq_s16(pl0l), vpaddlq_s16(pl0h)); - const int32x4_t ph0 = vaddq_s32(vpaddlq_s16(ph0l), vpaddlq_s16(ph0h)); - const int32x4_t pl1 = vaddq_s32(vpaddlq_s16(pl1l), vpaddlq_s16(pl1h)); - const int32x4_t ph1 = vaddq_s32(vpaddlq_s16(ph1l), vpaddlq_s16(ph1h)); - - sumv0 = vmlaq_n_f32(sumv0, vcvtq_f32_s32(vaddq_s32(pl0, ph0)), GGML_FP16_TO_FP32(x0->d)*GGML_FP16_TO_FP32(y0->d)); - sumv1 = vmlaq_n_f32(sumv1, vcvtq_f32_s32(vaddq_s32(pl1, ph1)), GGML_FP16_TO_FP32(x1->d)*GGML_FP16_TO_FP32(y1->d)); -#endif } *s = vaddvq_f32(sumv0) + vaddvq_f32(sumv1); @@ -3275,32 +3227,12 @@ void ggml_vec_dot_q5_1_q8_1(const int n, float * restrict s, const void * restri const int8x16_t v1_1l = vld1q_s8(y1->qs); const int8x16_t v1_1h = vld1q_s8(y1->qs + 16); -#if defined(__ARM_FEATURE_DOTPROD) sumv0 = vmlaq_n_f32(sumv0, vcvtq_f32_s32(vaddq_s32( vdotq_s32(vdupq_n_s32(0), v0_0lf, v1_0l), vdotq_s32(vdupq_n_s32(0), v0_0hf, v1_0h))), GGML_FP16_TO_FP32(x0->d)*y0->d); sumv1 = vmlaq_n_f32(sumv1, vcvtq_f32_s32(vaddq_s32( vdotq_s32(vdupq_n_s32(0), v0_1lf, v1_1l), vdotq_s32(vdupq_n_s32(0), v0_1hf, v1_1h))), GGML_FP16_TO_FP32(x1->d)*y1->d); -#else - const int16x8_t pl0l = vmull_s8(vget_low_s8 (v0_0lf), vget_low_s8 (v1_0l)); - const int16x8_t pl0h = vmull_s8(vget_high_s8(v0_0lf), vget_high_s8(v1_0l)); - const int16x8_t ph0l = vmull_s8(vget_low_s8 (v0_0hf), vget_low_s8 (v1_0h)); - const int16x8_t ph0h = vmull_s8(vget_high_s8(v0_0hf), vget_high_s8(v1_0h)); - - const int16x8_t pl1l = vmull_s8(vget_low_s8 (v0_1lf), vget_low_s8 (v1_1l)); - const int16x8_t pl1h = vmull_s8(vget_high_s8(v0_1lf), vget_high_s8(v1_1l)); - const int16x8_t ph1l = vmull_s8(vget_low_s8 (v0_1hf), vget_low_s8 (v1_1h)); - const int16x8_t ph1h = vmull_s8(vget_high_s8(v0_1hf), vget_high_s8(v1_1h)); - - const int32x4_t pl0 = vaddq_s32(vpaddlq_s16(pl0l), vpaddlq_s16(pl0h)); - const int32x4_t ph0 = vaddq_s32(vpaddlq_s16(ph0l), vpaddlq_s16(ph0h)); - const int32x4_t pl1 = vaddq_s32(vpaddlq_s16(pl1l), vpaddlq_s16(pl1h)); - const int32x4_t ph1 = vaddq_s32(vpaddlq_s16(ph1l), vpaddlq_s16(ph1h)); - - sumv0 = vmlaq_n_f32(sumv0, vcvtq_f32_s32(vaddq_s32(pl0, ph0)), GGML_FP16_TO_FP32(x0->d)*y0->d); - sumv1 = vmlaq_n_f32(sumv1, vcvtq_f32_s32(vaddq_s32(pl1, ph1)), GGML_FP16_TO_FP32(x1->d)*y1->d); -#endif } *s = vaddvq_f32(sumv0) + vaddvq_f32(sumv1) + summs0 + summs1; @@ -3550,7 +3482,6 @@ void ggml_vec_dot_q8_0_q8_0(const int n, float * restrict s, const void * restri const int8x16_t y1_0 = vld1q_s8(y1->qs); const int8x16_t y1_1 = vld1q_s8(y1->qs + 16); -#if defined(__ARM_FEATURE_DOTPROD) sumv0 = vmlaq_n_f32(sumv0, vcvtq_f32_s32(vaddq_s32( vdotq_s32(vdupq_n_s32(0), x0_0, y0_0), vdotq_s32(vdupq_n_s32(0), x0_1, y0_1))), GGML_FP16_TO_FP32(x0->d)*GGML_FP16_TO_FP32(y0->d)); @@ -3558,26 +3489,6 @@ void ggml_vec_dot_q8_0_q8_0(const int n, float * restrict s, const void * restri sumv1 = vmlaq_n_f32(sumv1, vcvtq_f32_s32(vaddq_s32( vdotq_s32(vdupq_n_s32(0), x1_0, y1_0), vdotq_s32(vdupq_n_s32(0), x1_1, y1_1))), GGML_FP16_TO_FP32(x1->d)*GGML_FP16_TO_FP32(y1->d)); - -#else - const int16x8_t p0_0 = vmull_s8(vget_low_s8 (x0_0), vget_low_s8 (y0_0)); - const int16x8_t p0_1 = vmull_s8(vget_high_s8(x0_0), vget_high_s8(y0_0)); - const int16x8_t p0_2 = vmull_s8(vget_low_s8 (x0_1), vget_low_s8 (y0_1)); - const int16x8_t p0_3 = vmull_s8(vget_high_s8(x0_1), vget_high_s8(y0_1)); - - const int16x8_t p1_0 = vmull_s8(vget_low_s8 (x1_0), vget_low_s8 (y1_0)); - const int16x8_t p1_1 = vmull_s8(vget_high_s8(x1_0), vget_high_s8(y1_0)); - const int16x8_t p1_2 = vmull_s8(vget_low_s8 (x1_1), vget_low_s8 (y1_1)); - const int16x8_t p1_3 = vmull_s8(vget_high_s8(x1_1), vget_high_s8(y1_1)); - - const int32x4_t p0 = vaddq_s32(vpaddlq_s16(p0_0), vpaddlq_s16(p0_1)); - const int32x4_t p1 = vaddq_s32(vpaddlq_s16(p0_2), vpaddlq_s16(p0_3)); - const int32x4_t p2 = vaddq_s32(vpaddlq_s16(p1_0), vpaddlq_s16(p1_1)); - const int32x4_t p3 = vaddq_s32(vpaddlq_s16(p1_2), vpaddlq_s16(p1_3)); - - sumv0 = vmlaq_n_f32(sumv0, vcvtq_f32_s32(vaddq_s32(p0, p1)), GGML_FP16_TO_FP32(x0->d)*GGML_FP16_TO_FP32(y0->d)); - sumv1 = vmlaq_n_f32(sumv1, vcvtq_f32_s32(vaddq_s32(p2, p3)), GGML_FP16_TO_FP32(x1->d)*GGML_FP16_TO_FP32(y1->d)); -#endif } *s = vaddvq_f32(sumv0) + vaddvq_f32(sumv1); @@ -3650,12 +3561,10 @@ void ggml_vec_dot_q2_K_q8_K(const int n, float * restrict s, const void * restri const int nb = n / QK_K; #ifdef __ARM_NEON - const uint8x16_t m3 = vdupq_n_u8(0x3); const uint8x16_t m4 = vdupq_n_u8(0xF); -#if defined(__ARM_FEATURE_DOTPROD) - const int32x4_t vzero = vdupq_n_s32(0); -#endif + + const int32x4_t vzero = vdupq_n_s32(0); ggml_int8x16x2_t q2bytes; uint8_t aux[16]; @@ -3663,7 +3572,6 @@ void ggml_vec_dot_q2_K_q8_K(const int n, float * restrict s, const void * restri float sum = 0; for (int i = 0; i < nb; ++i) { - const float d = y[i].d * GGML_FP16_TO_FP32(x[i].d); const float dmin = -y[i].d * GGML_FP16_TO_FP32(x[i].dmin); @@ -3689,20 +3597,9 @@ void ggml_vec_dot_q2_K_q8_K(const int n, float * restrict s, const void * restri // We use this macro instead of a function call because for some reason // the code runs 2-3% slower, even if the function is declared inline -#if defined(__ARM_FEATURE_DOTPROD) #define MULTIPLY_ACCUM_WITH_SCALE(index)\ isum += vaddvq_s32(vdotq_s32(vzero, q2bytes.val[0], q8bytes.val[0])) * aux[is+(index)];\ isum += vaddvq_s32(vdotq_s32(vzero, q2bytes.val[1], q8bytes.val[1])) * aux[is+1+(index)]; -#else -#define MULTIPLY_ACCUM_WITH_SCALE(index)\ - {\ - const int16x8_t p1 = vaddq_s16(vmull_s8(vget_low_s8 (q2bytes.val[0]), vget_low_s8 (q8bytes.val[0])),\ - vmull_s8(vget_high_s8(q2bytes.val[0]), vget_high_s8(q8bytes.val[0])));\ - const int16x8_t p2 = vaddq_s16(vmull_s8(vget_low_s8 (q2bytes.val[1]), vget_low_s8 (q8bytes.val[1])),\ - vmull_s8(vget_high_s8(q2bytes.val[1]), vget_high_s8(q8bytes.val[1])));\ - isum += vaddvq_s16(p1) * aux[is+(index)] + vaddvq_s16(p2) * aux[is+1+(index)];\ - } -#endif #define SHIFT_MULTIPLY_ACCUM_WITH_SCALE(shift, index)\ q8bytes = ggml_vld1q_s8_x2(q8); q8 += 32;\ @@ -3710,26 +3607,23 @@ void ggml_vec_dot_q2_K_q8_K(const int n, float * restrict s, const void * restri q2bytes.val[1] = vreinterpretq_s8_u8(vandq_u8(vshrq_n_u8(q2bits.val[1], (shift)), m3));\ MULTIPLY_ACCUM_WITH_SCALE((index)); - for (int j = 0; j < QK_K/128; ++j) { - const ggml_uint8x16x2_t q2bits = ggml_vld1q_u8_x2(q2); q2 += 32; ggml_int8x16x2_t q8bytes = ggml_vld1q_s8_x2(q8); q8 += 32; q2bytes.val[0] = vreinterpretq_s8_u8(vandq_u8(q2bits.val[0], m3)); q2bytes.val[1] = vreinterpretq_s8_u8(vandq_u8(q2bits.val[1], m3)); + MULTIPLY_ACCUM_WITH_SCALE(0); SHIFT_MULTIPLY_ACCUM_WITH_SCALE(2, 2); - SHIFT_MULTIPLY_ACCUM_WITH_SCALE(4, 4); - SHIFT_MULTIPLY_ACCUM_WITH_SCALE(6, 6); is += 8; } - sum += d * isum; + sum += d * isum; } *s = sum; @@ -4043,11 +3937,9 @@ void ggml_vec_dot_q2_K_q8_K(const int n, float * restrict s, const void * restri const int nb = n / QK_K; #ifdef __ARM_NEON - const uint8x16_t m3 = vdupq_n_u8(0x3); -#if defined(__ARM_FEATURE_DOTPROD) - const int32x4_t vzero = vdupq_n_s32(0); -#endif + + const int32x4_t vzero = vdupq_n_s32(0); ggml_int8x16x4_t q2bytes; @@ -4081,28 +3973,12 @@ void ggml_vec_dot_q2_K_q8_K(const int n, float * restrict s, const void * restri q2bytes.val[2] = vreinterpretq_s8_u8(vandq_u8(vshrq_n_u8(q2bits, 4), m3)); q2bytes.val[3] = vreinterpretq_s8_u8(vandq_u8(vshrq_n_u8(q2bits, 6), m3)); -#if defined(__ARM_FEATURE_DOTPROD) isum1 += vaddvq_s32(vdotq_s32(vzero, q2bytes.val[0], q8bytes.val[0])) * scales[0]; isum2 += vaddvq_s32(vdotq_s32(vzero, q2bytes.val[1], q8bytes.val[1])) * scales[1]; isum1 += vaddvq_s32(vdotq_s32(vzero, q2bytes.val[2], q8bytes.val[2])) * scales[2]; isum2 += vaddvq_s32(vdotq_s32(vzero, q2bytes.val[3], q8bytes.val[3])) * scales[3]; -#else - const int16x8_t p1 = vaddq_s16(vmull_s8(vget_low_s8 (q2bytes.val[0]), vget_low_s8 (q8bytes.val[0])), - vmull_s8(vget_high_s8(q2bytes.val[0]), vget_high_s8(q8bytes.val[0]))); - const int16x8_t p2 = vaddq_s16(vmull_s8(vget_low_s8 (q2bytes.val[1]), vget_low_s8 (q8bytes.val[1])), - vmull_s8(vget_high_s8(q2bytes.val[1]), vget_high_s8(q8bytes.val[1]))); - isum1 += vaddvq_s16(p1) * scales[0]; - isum2 += vaddvq_s16(p2) * scales[1]; - const int16x8_t p3 = vaddq_s16(vmull_s8(vget_low_s8 (q2bytes.val[2]), vget_low_s8 (q8bytes.val[2])), - vmull_s8(vget_high_s8(q2bytes.val[2]), vget_high_s8(q8bytes.val[2]))); - const int16x8_t p4 = vaddq_s16(vmull_s8(vget_low_s8 (q2bytes.val[3]), vget_low_s8 (q8bytes.val[3])), - vmull_s8(vget_high_s8(q2bytes.val[3]), vget_high_s8(q8bytes.val[3]))); - isum1 += vaddvq_s16(p3) * scales[2]; - isum2 += vaddvq_s16(p4) * scales[3]; -#endif sum += d * (isum1 + isum2); - } *s = sum; @@ -4328,9 +4204,7 @@ void ggml_vec_dot_q3_K_q8_K(const int n, float * restrict s, const void * restri uint32_t utmp[4]; const uint8x16_t m3b = vdupq_n_u8(0x3); -#ifdef __ARM_FEATURE_DOTPROD const int32x4_t vzero = vdupq_n_s32(0); -#endif const uint8x16_t m0 = vdupq_n_u8(1); const uint8x16_t m1 = vshlq_n_u8(m0, 1); @@ -4382,22 +4256,11 @@ void ggml_vec_dot_q3_K_q8_K(const int n, float * restrict s, const void * restri q3bytes.val[2] = vsubq_s8(vreinterpretq_s8_u8(vandq_u8(vshrq_n_u8(q3bits.val[0], 2), m3b)), vreinterpretq_s8_u8(q3h.val[2])); q3bytes.val[3] = vsubq_s8(vreinterpretq_s8_u8(vandq_u8(vshrq_n_u8(q3bits.val[1], 2), m3b)), vreinterpretq_s8_u8(q3h.val[3])); -#if defined(__ARM_FEATURE_DOTPROD) isum += vaddvq_s32(vdotq_s32(vzero, q3bytes.val[0], q8bytes_1.val[0])) * scale[0]; isum += vaddvq_s32(vdotq_s32(vzero, q3bytes.val[1], q8bytes_1.val[1])) * scale[1]; isum += vaddvq_s32(vdotq_s32(vzero, q3bytes.val[2], q8bytes_1.val[2])) * scale[2]; isum += vaddvq_s32(vdotq_s32(vzero, q3bytes.val[3], q8bytes_1.val[3])) * scale[3]; -#else - int16x8_t p0 = vaddq_s16(vmull_s8(vget_low_s8 (q3bytes.val[0]), vget_low_s8 (q8bytes_1.val[0])), - vmull_s8(vget_high_s8(q3bytes.val[0]), vget_high_s8(q8bytes_1.val[0]))); - int16x8_t p1 = vaddq_s16(vmull_s8(vget_low_s8 (q3bytes.val[1]), vget_low_s8 (q8bytes_1.val[1])), - vmull_s8(vget_high_s8(q3bytes.val[1]), vget_high_s8(q8bytes_1.val[1]))); - int16x8_t p2 = vaddq_s16(vmull_s8(vget_low_s8 (q3bytes.val[2]), vget_low_s8 (q8bytes_1.val[2])), - vmull_s8(vget_high_s8(q3bytes.val[2]), vget_high_s8(q8bytes_1.val[2]))); - int16x8_t p3 = vaddq_s16(vmull_s8(vget_low_s8 (q3bytes.val[3]), vget_low_s8 (q8bytes_1.val[3])), - vmull_s8(vget_high_s8(q3bytes.val[3]), vget_high_s8(q8bytes_1.val[3]))); - isum += vaddvq_s16(p0) * scale[0] + vaddvq_s16(p1) * scale[1] + vaddvq_s16(p2) * scale[2] + vaddvq_s16(p3) * scale[3]; -#endif + scale += 4; q3h.val[0] = vbicq_u8(m2, qhbits.val[0]); @@ -4410,22 +4273,11 @@ void ggml_vec_dot_q3_K_q8_K(const int n, float * restrict s, const void * restri q3bytes.val[2] = vsubq_s8(vreinterpretq_s8_u8(vandq_u8(vshrq_n_u8(q3bits.val[0], 6), m3b)), vreinterpretq_s8_u8(q3h.val[2])); q3bytes.val[3] = vsubq_s8(vreinterpretq_s8_u8(vandq_u8(vshrq_n_u8(q3bits.val[1], 6), m3b)), vreinterpretq_s8_u8(q3h.val[3])); -#if defined(__ARM_FEATURE_DOTPROD) isum += vaddvq_s32(vdotq_s32(vzero, q3bytes.val[0], q8bytes_2.val[0])) * scale[0]; isum += vaddvq_s32(vdotq_s32(vzero, q3bytes.val[1], q8bytes_2.val[1])) * scale[1]; isum += vaddvq_s32(vdotq_s32(vzero, q3bytes.val[2], q8bytes_2.val[2])) * scale[2]; isum += vaddvq_s32(vdotq_s32(vzero, q3bytes.val[3], q8bytes_2.val[3])) * scale[3]; -#else - p0 = vaddq_s16(vmull_s8(vget_low_s8 (q3bytes.val[0]), vget_low_s8 (q8bytes_2.val[0])), - vmull_s8(vget_high_s8(q3bytes.val[0]), vget_high_s8(q8bytes_2.val[0]))); - p1 = vaddq_s16(vmull_s8(vget_low_s8 (q3bytes.val[1]), vget_low_s8 (q8bytes_2.val[1])), - vmull_s8(vget_high_s8(q3bytes.val[1]), vget_high_s8(q8bytes_2.val[1]))); - p2 = vaddq_s16(vmull_s8(vget_low_s8 (q3bytes.val[2]), vget_low_s8 (q8bytes_2.val[2])), - vmull_s8(vget_high_s8(q3bytes.val[2]), vget_high_s8(q8bytes_2.val[2]))); - p3 = vaddq_s16(vmull_s8(vget_low_s8 (q3bytes.val[3]), vget_low_s8 (q8bytes_2.val[3])), - vmull_s8(vget_high_s8(q3bytes.val[3]), vget_high_s8(q8bytes_2.val[3]))); - isum += vaddvq_s16(p0) * scale[0] + vaddvq_s16(p1) * scale[1] + vaddvq_s16(p2) * scale[2] + vaddvq_s16(p3) * scale[3]; -#endif + scale += 4; if (j == 0) { @@ -4864,10 +4716,7 @@ void ggml_vec_dot_q3_K_q8_K(const int n, float * restrict s, const void * restri const int nb = n / QK_K; #ifdef __ARM_NEON - -#ifdef __ARM_FEATURE_DOTPROD - const int32x4_t vzero = vdupq_n_s32(0); -#endif + const int32x4_t vzero = vdupq_n_s32(0); const uint8x16_t m3b = vdupq_n_u8(0x3); const uint8x16_t mh = vdupq_n_u8(4); @@ -4908,22 +4757,10 @@ void ggml_vec_dot_q3_K_q8_K(const int n, float * restrict s, const void * restri q3bytes.val[2] = vreinterpretq_s8_u8(vorrq_u8(vandq_u8(vshrq_n_u8(q3bits, 4), m3b), q3h.val[2])); q3bytes.val[3] = vreinterpretq_s8_u8(vorrq_u8(vshrq_n_u8(q3bits, 6), q3h.val[3])); -#if defined(__ARM_FEATURE_DOTPROD) isum += vaddvq_s32(vdotq_s32(vzero, q3bytes.val[0], q8bytes.val[0])) * scales[0]; isum += vaddvq_s32(vdotq_s32(vzero, q3bytes.val[1], q8bytes.val[1])) * scales[2]; isum += vaddvq_s32(vdotq_s32(vzero, q3bytes.val[2], q8bytes.val[2])) * scales[1]; isum += vaddvq_s32(vdotq_s32(vzero, q3bytes.val[3], q8bytes.val[3])) * scales[3]; -#else - const int16x8_t p0 = vaddq_s16(vmull_s8(vget_low_s8 (q3bytes.val[0]), vget_low_s8 (q8bytes.val[0])), - vmull_s8(vget_high_s8(q3bytes.val[0]), vget_high_s8(q8bytes.val[0]))); - const int16x8_t p1 = vaddq_s16(vmull_s8(vget_low_s8 (q3bytes.val[1]), vget_low_s8 (q8bytes.val[1])), - vmull_s8(vget_high_s8(q3bytes.val[1]), vget_high_s8(q8bytes.val[1]))); - const int16x8_t p2 = vaddq_s16(vmull_s8(vget_low_s8 (q3bytes.val[2]), vget_low_s8 (q8bytes.val[2])), - vmull_s8(vget_high_s8(q3bytes.val[2]), vget_high_s8(q8bytes.val[2]))); - const int16x8_t p3 = vaddq_s16(vmull_s8(vget_low_s8 (q3bytes.val[3]), vget_low_s8 (q8bytes.val[3])), - vmull_s8(vget_high_s8(q3bytes.val[3]), vget_high_s8(q8bytes.val[3]))); - isum += vaddvq_s16(p0) * scales[0] + vaddvq_s16(p1) * scales[2] + vaddvq_s16(p2) * scales[1] + vaddvq_s16(p3) * scales[3]; -#endif sum += d * isum; @@ -5228,11 +5065,8 @@ void ggml_vec_dot_q4_K_q8_K(const int n, float * restrict s, const void * restri uint32_t utmp[4]; #ifdef __ARM_NEON - const uint8x16_t m4b = vdupq_n_u8(0xf); -#ifdef __ARM_FEATURE_DOTPROD const int32x4_t mzero = vdupq_n_s32(0); -#endif ggml_int8x16x2_t q4bytes; ggml_int8x16x2_t q8bytes; @@ -5269,10 +5103,8 @@ void ggml_vec_dot_q4_K_q8_K(const int n, float * restrict s, const void * restri int32_t sumi2 = 0; for (int j = 0; j < QK_K/64; ++j) { - const ggml_uint8x16x2_t q4bits = ggml_vld1q_u8_x2(q4); q4 += 32; -#ifdef __ARM_FEATURE_DOTPROD q8bytes = ggml_vld1q_s8_x2(q8); q8 += 32; q4bytes.val[0] = vreinterpretq_s8_u8(vandq_u8 (q4bits.val[0], m4b)); q4bytes.val[1] = vreinterpretq_s8_u8(vandq_u8 (q4bits.val[1], m4b)); @@ -5287,26 +5119,6 @@ void ggml_vec_dot_q4_K_q8_K(const int n, float * restrict s, const void * restri const int32x4_t p2 = vdotq_s32(vdotq_s32(mzero, q4bytes.val[0], q8bytes.val[0]), q4bytes.val[1], q8bytes.val[1]); sumi2 += vaddvq_s32(p2) * scales[2*j+1]; -#else - q8bytes = ggml_vld1q_s8_x2(q8); q8 += 32; - q4bytes.val[0] = vreinterpretq_s8_u8(vandq_u8 (q4bits.val[0], m4b)); - q4bytes.val[1] = vreinterpretq_s8_u8(vandq_u8 (q4bits.val[1], m4b)); - const int16x8_t p0 = vaddq_s16(vmull_s8(vget_low_s8 (q4bytes.val[0]), vget_low_s8 (q8bytes.val[0])), - vmull_s8(vget_high_s8(q4bytes.val[0]), vget_high_s8(q8bytes.val[0]))); - const int16x8_t p1 = vaddq_s16(vmull_s8(vget_low_s8 (q4bytes.val[1]), vget_low_s8 (q8bytes.val[1])), - vmull_s8(vget_high_s8(q4bytes.val[1]), vget_high_s8(q8bytes.val[1]))); - sumi1 += vaddvq_s16(vaddq_s16(p0, p1)) * scales[2*j+0]; - - q8bytes = ggml_vld1q_s8_x2(q8); q8 += 32; - q4bytes.val[0] = vreinterpretq_s8_u8(vshrq_n_u8(q4bits.val[0], 4)); - q4bytes.val[1] = vreinterpretq_s8_u8(vshrq_n_u8(q4bits.val[1], 4)); - const int16x8_t p2 = vaddq_s16(vmull_s8(vget_low_s8 (q4bytes.val[0]), vget_low_s8 (q8bytes.val[0])), - vmull_s8(vget_high_s8(q4bytes.val[0]), vget_high_s8(q8bytes.val[0]))); - const int16x8_t p3 = vaddq_s16(vmull_s8(vget_low_s8 (q4bytes.val[1]), vget_low_s8 (q8bytes.val[1])), - vmull_s8(vget_high_s8(q4bytes.val[1]), vget_high_s8(q8bytes.val[1]))); - sumi2 += vaddvq_s16(vaddq_s16(p2, p3)) * scales[2*j+1]; - -#endif } sumf += d * (sumi1 + sumi2); @@ -5603,12 +5415,9 @@ void ggml_vec_dot_q4_K_q8_K(const int n, float * restrict s, const void * restri const int nb = n / QK_K; #ifdef __ARM_NEON - const uint8x16_t m4b = vdupq_n_u8(0xf); -#ifdef __ARM_FEATURE_DOTPROD const int32x4_t mzero = vdupq_n_s32(0); -#endif float sumf = 0; @@ -5636,7 +5445,6 @@ void ggml_vec_dot_q4_K_q8_K(const int n, float * restrict s, const void * restri const ggml_uint8x16x2_t q4bits = ggml_vld1q_u8_x2(q4); -#ifdef __ARM_FEATURE_DOTPROD q8bytes = ggml_vld1q_s8_x4(q8); q4bytes.val[0] = vreinterpretq_s8_u8(vandq_u8 (q4bits.val[0], m4b)); q4bytes.val[1] = vreinterpretq_s8_u8(vandq_u8 (q4bits.val[1], m4b)); @@ -5650,27 +5458,7 @@ void ggml_vec_dot_q4_K_q8_K(const int n, float * restrict s, const void * restri const int32x4_t p2 = vdotq_s32(vdotq_s32(mzero, q4bytes.val[0], q8bytes.val[2]), q4bytes.val[1], q8bytes.val[3]); const int32_t sumi2 = vaddvq_s32(p2) * scales[1]; -#else - q8bytes = ggml_vld1q_s8_x4(q8); - q4bytes.val[0] = vreinterpretq_s8_u8(vandq_u8 (q4bits.val[0], m4b)); - q4bytes.val[1] = vreinterpretq_s8_u8(vandq_u8 (q4bits.val[1], m4b)); - const int16x8_t p0 = vaddq_s16(vmull_s8(vget_low_s8 (q4bytes.val[0]), vget_low_s8 (q8bytes.val[0])), - vmull_s8(vget_high_s8(q4bytes.val[0]), vget_high_s8(q8bytes.val[0]))); - const int16x8_t p1 = vaddq_s16(vmull_s8(vget_low_s8 (q4bytes.val[1]), vget_low_s8 (q8bytes.val[1])), - vmull_s8(vget_high_s8(q4bytes.val[1]), vget_high_s8(q8bytes.val[1]))); - int32_t sumi1 = vaddvq_s16(vaddq_s16(p0, p1)) * scales[0]; - - q4bytes.val[0] = vreinterpretq_s8_u8(vshrq_n_u8(q4bits.val[0], 4)); - q4bytes.val[1] = vreinterpretq_s8_u8(vshrq_n_u8(q4bits.val[1], 4)); - const int16x8_t p2 = vaddq_s16(vmull_s8(vget_low_s8 (q4bytes.val[0]), vget_low_s8 (q8bytes.val[2])), - vmull_s8(vget_high_s8(q4bytes.val[0]), vget_high_s8(q8bytes.val[2]))); - const int16x8_t p3 = vaddq_s16(vmull_s8(vget_low_s8 (q4bytes.val[1]), vget_low_s8 (q8bytes.val[3])), - vmull_s8(vget_high_s8(q4bytes.val[1]), vget_high_s8(q8bytes.val[3]))); - int32_t sumi2 = vaddvq_s16(vaddq_s16(p2, p3)) * scales[1]; - -#endif sumf += d * (sumi1 + sumi2); - } *s = sumf - sum_mins; @@ -5875,15 +5663,11 @@ void ggml_vec_dot_q5_K_q8_K(const int n, float * restrict s, const void * restri uint32_t utmp[4]; - #ifdef __ARM_NEON - const uint8x16_t m4b = vdupq_n_u8(0xf); const uint8x16_t mone = vdupq_n_u8(1); const uint8x16_t mtwo = vdupq_n_u8(2); -#if defined(__ARM_FEATURE_DOTPROD) const int32x4_t mzero = vdupq_n_s32(0); -#endif ggml_int8x16x4_t q5bytes; @@ -5938,28 +5722,11 @@ void ggml_vec_dot_q5_K_q8_K(const int n, float * restrict s, const void * restri q5bytes.val[2] = vreinterpretq_s8_u8(vorrq_u8(vshrq_n_u8(q5bits.val[0], 4), q5h.val[2])); q5bytes.val[3] = vreinterpretq_s8_u8(vorrq_u8(vshrq_n_u8(q5bits.val[1], 4), q5h.val[3])); -#if defined(__ARM_FEATURE_DOTPROD) - sumi += vaddvq_s32(vdotq_s32(vdotq_s32(mzero, q5bytes.val[0], q8bytes.val[0]), q5bytes.val[1], q8bytes.val[1])) * *scales++; sumi += vaddvq_s32(vdotq_s32(vdotq_s32(mzero, q5bytes.val[2], q8bytes.val[2]), q5bytes.val[3], q8bytes.val[3])) * *scales++; -#else - - const int16x8_t p0 = vaddq_s16(vmull_s8(vget_low_s8 (q5bytes.val[0]), vget_low_s8 (q8bytes.val[0])), - vmull_s8(vget_high_s8(q5bytes.val[0]), vget_high_s8(q8bytes.val[0]))); - const int16x8_t p1 = vaddq_s16(vmull_s8(vget_low_s8 (q5bytes.val[1]), vget_low_s8 (q8bytes.val[1])), - vmull_s8(vget_high_s8(q5bytes.val[1]), vget_high_s8(q8bytes.val[1]))); - sumi += vaddvq_s16(vaddq_s16(p0, p1)) * *scales++; - - const int16x8_t p2 = vaddq_s16(vmull_s8(vget_low_s8 (q5bytes.val[2]), vget_low_s8 (q8bytes.val[2])), - vmull_s8(vget_high_s8(q5bytes.val[2]), vget_high_s8(q8bytes.val[2]))); - const int16x8_t p3 = vaddq_s16(vmull_s8(vget_low_s8 (q5bytes.val[3]), vget_low_s8 (q8bytes.val[3])), - vmull_s8(vget_high_s8(q5bytes.val[3]), vget_high_s8(q8bytes.val[3]))); - sumi += vaddvq_s16(vaddq_s16(p2, p3)) * *scales++; -#endif } sumf += d * sumi - dmin * sumi_mins; - } *s = sumf; @@ -6311,12 +6078,9 @@ void ggml_vec_dot_q5_K_q8_K(const int n, float * restrict s, const void * restri const int nb = n / QK_K; #ifdef __ARM_NEON - const uint8x16_t m4b = vdupq_n_u8(0xf); const uint8x16_t mh = vdupq_n_u8(16); -#if defined(__ARM_FEATURE_DOTPROD) const int32x4_t mzero = vdupq_n_s32(0); -#endif ggml_int8x16x4_t q5bytes; ggml_uint8x16x4_t q5h; @@ -6348,32 +6112,12 @@ void ggml_vec_dot_q5_K_q8_K(const int n, float * restrict s, const void * restri q5bytes.val[2] = vsubq_s8(vreinterpretq_s8_u8(vshrq_n_u8(q5bits.val[0], 4)), vreinterpretq_s8_u8(q5h.val[2])); q5bytes.val[3] = vsubq_s8(vreinterpretq_s8_u8(vshrq_n_u8(q5bits.val[1], 4)), vreinterpretq_s8_u8(q5h.val[3])); -#if defined(__ARM_FEATURE_DOTPROD) - int32_t sumi1 = sc[0] * vaddvq_s32(vdotq_s32(mzero, q5bytes.val[0], q8bytes.val[0])); int32_t sumi2 = sc[1] * vaddvq_s32(vdotq_s32(mzero, q5bytes.val[1], q8bytes.val[1])); int32_t sumi3 = sc[2] * vaddvq_s32(vdotq_s32(mzero, q5bytes.val[2], q8bytes.val[2])); int32_t sumi4 = sc[3] * vaddvq_s32(vdotq_s32(mzero, q5bytes.val[3], q8bytes.val[3])); sumf += d * (sumi1 + sumi2 + sumi3 + sumi4); - -#else - - const int16x8_t p0 = vaddq_s16(vmull_s8(vget_low_s8 (q5bytes.val[0]), vget_low_s8 (q8bytes.val[0])), - vmull_s8(vget_high_s8(q5bytes.val[0]), vget_high_s8(q8bytes.val[0]))); - const int16x8_t p1 = vaddq_s16(vmull_s8(vget_low_s8 (q5bytes.val[1]), vget_low_s8 (q8bytes.val[1])), - vmull_s8(vget_high_s8(q5bytes.val[1]), vget_high_s8(q8bytes.val[1]))); - int32_t sumi = sc[0] * vaddvq_s16(p0) + sc[1] * vaddvq_s16(p1); - - const int16x8_t p2 = vaddq_s16(vmull_s8(vget_low_s8 (q5bytes.val[2]), vget_low_s8 (q8bytes.val[2])), - vmull_s8(vget_high_s8(q5bytes.val[2]), vget_high_s8(q8bytes.val[2]))); - const int16x8_t p3 = vaddq_s16(vmull_s8(vget_low_s8 (q5bytes.val[3]), vget_low_s8 (q8bytes.val[3])), - vmull_s8(vget_high_s8(q5bytes.val[3]), vget_high_s8(q8bytes.val[3]))); - sumi += sc[2] * vaddvq_s16(p2) + sc[3] * vaddvq_s16(p3); - - sumf += d*sumi; -#endif - } *s = sumf; @@ -6600,13 +6344,10 @@ void ggml_vec_dot_q6_K_q8_K(const int n, float * restrict s, const void * restri const int nb = n / QK_K; #ifdef __ARM_NEON - float sum = 0; const uint8x16_t m4b = vdupq_n_u8(0xF); -#if defined(__ARM_FEATURE_DOTPROD) const int32x4_t vzero = vdupq_n_s32(0); -#endif //const int8x16_t m32s = vdupq_n_s8(32); const uint8x16_t mone = vdupq_n_u8(3); @@ -6658,31 +6399,13 @@ void ggml_vec_dot_q6_K_q8_K(const int n, float * restrict s, const void * restri q6bytes.val[2] = vreinterpretq_s8_u8(vorrq_u8(vandq_u8(q6bits.val[2], m4b), q6h.val[2])); q6bytes.val[3] = vreinterpretq_s8_u8(vorrq_u8(vandq_u8(q6bits.val[3], m4b), q6h.val[3])); -#if defined(__ARM_FEATURE_DOTPROD) - isum += vaddvq_s32(vdotq_s32(vzero, q6bytes.val[0], q8bytes.val[0])) * scale[0] + vaddvq_s32(vdotq_s32(vzero, q6bytes.val[1], q8bytes.val[1])) * scale[1] + vaddvq_s32(vdotq_s32(vzero, q6bytes.val[2], q8bytes.val[2])) * scale[2] + vaddvq_s32(vdotq_s32(vzero, q6bytes.val[3], q8bytes.val[3])) * scale[3]; + scale += 4; -#else - - int16x8_t p0 = vaddq_s16(vmull_s8(vget_low_s8 (q6bytes.val[0]), vget_low_s8 (q8bytes.val[0])), - vmull_s8(vget_high_s8(q6bytes.val[0]), vget_high_s8(q8bytes.val[0]))); - int16x8_t p1 = vaddq_s16(vmull_s8(vget_low_s8 (q6bytes.val[1]), vget_low_s8 (q8bytes.val[1])), - vmull_s8(vget_high_s8(q6bytes.val[1]), vget_high_s8(q8bytes.val[1]))); - isum += vaddvq_s16(p0) * scale[0] + vaddvq_s16(p1) * scale[1]; - scale += 2; - - int16x8_t p2 = vaddq_s16(vmull_s8(vget_low_s8 (q6bytes.val[2]), vget_low_s8 (q8bytes.val[2])), - vmull_s8(vget_high_s8(q6bytes.val[2]), vget_high_s8(q8bytes.val[2]))); - int16x8_t p3 = vaddq_s16(vmull_s8(vget_low_s8 (q6bytes.val[3]), vget_low_s8 (q8bytes.val[3])), - vmull_s8(vget_high_s8(q6bytes.val[3]), vget_high_s8(q8bytes.val[3]))); - isum += vaddvq_s16(p2) * scale[0] + vaddvq_s16(p3) * scale[1]; - scale += 2; -#endif - q8bytes = ggml_vld1q_s8_x4(q8); q8 += 64; shifted = vshrq_n_u8(qhbits.val[0], 4); @@ -6703,34 +6426,11 @@ void ggml_vec_dot_q6_K_q8_K(const int n, float * restrict s, const void * restri q6bytes.val[2] = vreinterpretq_s8_u8(vorrq_u8(vshrq_n_u8(q6bits.val[2], 4), q6h.val[2])); q6bytes.val[3] = vreinterpretq_s8_u8(vorrq_u8(vshrq_n_u8(q6bits.val[3], 4), q6h.val[3])); -#if defined(__ARM_FEATURE_DOTPROD) - isum += vaddvq_s32(vdotq_s32(vzero, q6bytes.val[0], q8bytes.val[0])) * scale[0] + vaddvq_s32(vdotq_s32(vzero, q6bytes.val[1], q8bytes.val[1])) * scale[1] + vaddvq_s32(vdotq_s32(vzero, q6bytes.val[2], q8bytes.val[2])) * scale[2] + vaddvq_s32(vdotq_s32(vzero, q6bytes.val[3], q8bytes.val[3])) * scale[3]; scale += 4; - - //for (int l = 0; l < 4; ++l) { - // const int32x4_t p = vdotq_s32(vzero, q6bytes.val[l], q8bytes.val[l]); - // isum += vaddvq_s32(p) * *scale++; - //} -#else - p0 = vaddq_s16(vmull_s8(vget_low_s8 (q6bytes.val[0]), vget_low_s8 (q8bytes.val[0])), - vmull_s8(vget_high_s8(q6bytes.val[0]), vget_high_s8(q8bytes.val[0]))); - p1 = vaddq_s16(vmull_s8(vget_low_s8 (q6bytes.val[1]), vget_low_s8 (q8bytes.val[1])), - vmull_s8(vget_high_s8(q6bytes.val[1]), vget_high_s8(q8bytes.val[1]))); - isum += vaddvq_s16(p0) * scale[0] + vaddvq_s16(p1) * scale[1]; - scale += 2; - - p2 = vaddq_s16(vmull_s8(vget_low_s8 (q6bytes.val[2]), vget_low_s8 (q8bytes.val[2])), - vmull_s8(vget_high_s8(q6bytes.val[2]), vget_high_s8(q8bytes.val[2]))); - p3 = vaddq_s16(vmull_s8(vget_low_s8 (q6bytes.val[3]), vget_low_s8 (q8bytes.val[3])), - vmull_s8(vget_high_s8(q6bytes.val[3]), vget_high_s8(q8bytes.val[3]))); - isum += vaddvq_s16(p2) * scale[0] + vaddvq_s16(p3) * scale[1]; - scale += 2; -#endif - } //sum += isum * d_all * y[i].d; sum += d_all * y[i].d * (isum - 32 * isum_mins); @@ -7076,14 +6776,11 @@ void ggml_vec_dot_q6_K_q8_K(const int n, float * restrict s, const void * restri const int nb = n / QK_K; #ifdef __ARM_NEON - float sum = 0; const uint8x16_t m4b = vdupq_n_u8(0xF); const int8x16_t m32s = vdupq_n_s8(32); -#if defined(__ARM_FEATURE_DOTPROD) const int32x4_t vzero = vdupq_n_s32(0); -#endif const uint8x16_t mone = vdupq_n_u8(3); @@ -7119,26 +6816,10 @@ void ggml_vec_dot_q6_K_q8_K(const int n, float * restrict s, const void * restri q6bytes.val[2] = vsubq_s8(vreinterpretq_s8_u8(vorrq_u8(vshrq_n_u8(q6bits.val[0], 4), q6h.val[2])), m32s); q6bytes.val[3] = vsubq_s8(vreinterpretq_s8_u8(vorrq_u8(vshrq_n_u8(q6bits.val[1], 4), q6h.val[3])), m32s); -#if defined(__ARM_FEATURE_DOTPROD) - isum += vaddvq_s32(vdotq_s32(vzero, q6bytes.val[0], q8bytes.val[0])) * scale[0] + vaddvq_s32(vdotq_s32(vzero, q6bytes.val[1], q8bytes.val[1])) * scale[1] + vaddvq_s32(vdotq_s32(vzero, q6bytes.val[2], q8bytes.val[2])) * scale[2] + vaddvq_s32(vdotq_s32(vzero, q6bytes.val[3], q8bytes.val[3])) * scale[3]; -#else - - int16x8_t p0 = vaddq_s16(vmull_s8(vget_low_s8 (q6bytes.val[0]), vget_low_s8 (q8bytes.val[0])), - vmull_s8(vget_high_s8(q6bytes.val[0]), vget_high_s8(q8bytes.val[0]))); - int16x8_t p1 = vaddq_s16(vmull_s8(vget_low_s8 (q6bytes.val[1]), vget_low_s8 (q8bytes.val[1])), - vmull_s8(vget_high_s8(q6bytes.val[1]), vget_high_s8(q8bytes.val[1]))); - isum += vaddvq_s16(p0) * scale[0] + vaddvq_s16(p1) * scale[1]; - - int16x8_t p2 = vaddq_s16(vmull_s8(vget_low_s8 (q6bytes.val[2]), vget_low_s8 (q8bytes.val[2])), - vmull_s8(vget_high_s8(q6bytes.val[2]), vget_high_s8(q8bytes.val[2]))); - int16x8_t p3 = vaddq_s16(vmull_s8(vget_low_s8 (q6bytes.val[3]), vget_low_s8 (q8bytes.val[3])), - vmull_s8(vget_high_s8(q6bytes.val[3]), vget_high_s8(q8bytes.val[3]))); - isum += vaddvq_s16(p2) * scale[2] + vaddvq_s16(p3) * scale[3]; -#endif sum += isum * d_all * y[i].d; From b47879b0dda43f2d26415e88b6840295817e552a Mon Sep 17 00:00:00 2001 From: Georgi Gerganov Date: Wed, 27 Dec 2023 11:15:31 +0200 Subject: [PATCH 151/811] scripts : add sync-ggml-am.sh --- scripts/sync-ggml-am.sh | 131 ++++++++++++++++++++++++++++++++++++++++ scripts/sync-ggml.last | 1 + 2 files changed, 132 insertions(+) create mode 100755 scripts/sync-ggml-am.sh create mode 100644 scripts/sync-ggml.last diff --git a/scripts/sync-ggml-am.sh b/scripts/sync-ggml-am.sh new file mode 100755 index 000000000..83abe3681 --- /dev/null +++ b/scripts/sync-ggml-am.sh @@ -0,0 +1,131 @@ +#!/bin/bash +# +# Synchronize ggml changes to llama.cpp +# +# Usage: +# +# $ cd /path/to/llama.cpp +# $ ./scripts/sync-ggml-am.sh +# + +set -e + +sd=$(dirname $0) +cd $sd/../ + +SRC_LLAMA=$(pwd) +SRC_GGML=$(cd ../ggml; pwd) + +if [ ! -d $SRC_GGML ]; then + echo "ggml not found at $SRC_GGML" + exit 1 +fi + +lc=$(cat $SRC_LLAMA/scripts/sync-ggml.last) +echo "Syncing ggml changes since commit $lc" + +cd $SRC_GGML + +git log --oneline $lc..HEAD + +git format-patch $lc --stdout -- \ + include/ggml/ggml*.h \ + src/ggml*.h \ + src/ggml*.c \ + src/ggml*.cpp \ + src/ggml*.m \ + src/ggml*.metal \ + src/ggml*.cu \ + tests/test-opt.cpp \ + tests/test-grad0.cpp \ + tests/test-quantize-fns.cpp \ + tests/test-quantize-perf.cpp \ + tests/test-backend-ops.cpp \ + > $SRC_LLAMA/ggml-src.patch + +# delete files if empty +if [ ! -s $SRC_LLAMA/ggml-src.patch ]; then + rm -v $SRC_LLAMA/ggml-src.patch +fi + +cd $SRC_LLAMA + +if [ -f $SRC_LLAMA/ggml-src.patch ]; then + # replace PR numbers + # + # Subject: some text (#1234) + # Subject: some text (ggml/1234) + cat ggml-src.patch | sed -e 's/^Subject: \(.*\) (#\([0-9]*\))/Subject: \1 (ggml\/\2)/' > ggml-src.patch.tmp + mv ggml-src.patch.tmp ggml-src.patch + + cat ggml-src.patch | sed -e 's/^\(.*\) (#\([0-9]*\))$/\1 (ggml\/\2)/' > ggml-src.patch.tmp + mv ggml-src.patch.tmp ggml-src.patch + + # replace filenames: + # + # src/ggml.c -> ggml.c + # src/ggml-alloc.c -> ggml-alloc.c + # src/ggml-backend-impl.h -> ggml-backend-impl.h + # src/ggml-backend.c -> ggml-backend.c + # src/ggml-cuda.cu -> ggml-cuda.cu + # src/ggml-cuda.h -> ggml-cuda.h + # src/ggml-impl.h -> ggml-impl.h + # src/ggml-metal.h -> ggml-metal.h + # src/ggml-metal.m -> ggml-metal.m + # src/ggml-metal.metal -> ggml-metal.metal + # src/ggml-mpi.h -> ggml-mpi.h + # src/ggml-mpi.c -> ggml-mpi.c + # src/ggml-opencl.cpp -> ggml-opencl.cpp + # src/ggml-opencl.h -> ggml-opencl.h + # src/ggml-quants.c -> ggml-quants.c + # src/ggml-quants.h -> ggml-quants.h + # include/ggml/ggml.h -> ggml.h + # include/ggml/ggml-alloc.h -> ggml-alloc.h + # include/ggml/ggml-backend.h -> ggml-backend.h + # + # tests/test-opt.cpp -> tests/test-opt.cpp + # tests/test-grad0.cpp -> tests/test-grad0.cpp + # tests/test-quantize-fns.cpp -> tests/test-quantize-fns.cpp + # tests/test-quantize-perf.cpp -> tests/test-quantize-perf.cpp + # tests/test-backend-ops.cpp -> tests/test-backend-ops.cpp + + cat ggml-src.patch | sed \ + -e 's/src\/ggml\.c/ggml.c/g' \ + -e 's/src\/ggml-alloc\.c/ggml-alloc.c/g' \ + -e 's/src\/ggml-backend-impl\.h/ggml-backend-impl.h/g' \ + -e 's/src\/ggml-backend\.c/ggml-backend.c/g' \ + -e 's/src\/ggml-cuda\.cu/ggml-cuda.cu/g' \ + -e 's/src\/ggml-cuda\.h/ggml-cuda.h/g' \ + -e 's/src\/ggml-impl\.h/ggml-impl.h/g' \ + -e 's/src\/ggml-metal\.h/ggml-metal.h/g' \ + -e 's/src\/ggml-metal\.m/ggml-metal.m/g' \ + -e 's/src\/ggml-metal\.metal/ggml-metal.metal/g' \ + -e 's/src\/ggml-mpi\.h/ggml-mpi.h/g' \ + -e 's/src\/ggml-mpi\.c/ggml-mpi.c/g' \ + -e 's/src\/ggml-opencl\.cpp/ggml-opencl.cpp/g' \ + -e 's/src\/ggml-opencl\.h/ggml-opencl.h/g' \ + -e 's/src\/ggml-quants\.c/ggml-quants.c/g' \ + -e 's/src\/ggml-quants\.h/ggml-quants.h/g' \ + -e 's/include\/ggml\/ggml\.h/ggml.h/g' \ + -e 's/include\/ggml\/ggml-alloc\.h/ggml-alloc.h/g' \ + -e 's/include\/ggml\/ggml-backend\.h/ggml-backend.h/g' \ + -e 's/tests\/test-opt\.cpp/tests\/test-opt.cpp/g' \ + -e 's/tests\/test-grad0\.cpp/tests\/test-grad0.cpp/g' \ + -e 's/tests\/test-quantize-fns\.cpp/tests\/test-quantize-fns.cpp/g' \ + -e 's/tests\/test-quantize-perf\.cpp/tests\/test-quantize-perf.cpp/g' \ + -e 's/tests\/test-backend-ops\.cpp/tests\/test-backend-ops.cpp/g' \ + > ggml-src.patch.tmp + mv ggml-src.patch.tmp ggml-src.patch + + git am ggml-src.patch + + rm -v $SRC_LLAMA/ggml-src.patch +fi + +# update last commit +cd $SRC_GGML +git log -1 --format=%H > $SRC_LLAMA/scripts/sync-ggml.last + +echo "Done" + +exit 0 diff --git a/scripts/sync-ggml.last b/scripts/sync-ggml.last new file mode 100644 index 000000000..1ec144116 --- /dev/null +++ b/scripts/sync-ggml.last @@ -0,0 +1 @@ +76e7f47b69e8334384dc718480c496dafbd47999 From 879b690a9e1eb1ab0a29b58236fc76978fb4d902 Mon Sep 17 00:00:00 2001 From: Daniel Bevenius Date: Wed, 27 Dec 2023 15:16:55 +0100 Subject: [PATCH 152/811] finetune : fix output formatting in print_params (#4653) This commit fixes the output formatting in the print_params function which currently looks like this: ```console print_params: n_vocab: 32000 print_params: n_ctx: 128 print_params: n_embd: 4096 print_params: n_ff: 11008 print_params: n_head: 32 print_params: n_head_kv: 32 print_params: n_layer: 32 print_params: norm_rms_eps : 0.000010 print_params: rope_freq_base : 10000.000000 print_params: rope_freq_scale : 1.000000 ``` With this comit the output will look like this: ```console print_params: n_vocab : 32000 print_params: n_ctx : 128 print_params: n_embd : 4096 print_params: n_ff : 11008 print_params: n_head : 32 print_params: n_head_kv : 32 print_params: n_layer : 32 print_params: norm_rms_eps : 0.000010 print_params: rope_freq_base : 10000.000000 print_params: rope_freq_scale : 1.000000 ``` Signed-off-by: Daniel Bevenius --- examples/finetune/finetune.cpp | 14 +++++++------- 1 file changed, 7 insertions(+), 7 deletions(-) diff --git a/examples/finetune/finetune.cpp b/examples/finetune/finetune.cpp index 7b1333a9d..e0520f64c 100644 --- a/examples/finetune/finetune.cpp +++ b/examples/finetune/finetune.cpp @@ -196,13 +196,13 @@ static const char * LLM_TENSOR_FFN_DOWN = "blk.%d.ffn_down"; static const char * LLM_TENSOR_FFN_UP = "blk.%d.ffn_up"; static void print_params(struct my_llama_hparams * params) { - printf("%s: n_vocab: %u\n", __func__, params->n_vocab); - printf("%s: n_ctx: %u\n", __func__, params->n_ctx); - printf("%s: n_embd: %u\n", __func__, params->n_embd); - printf("%s: n_ff: %u\n", __func__, params->n_ff); - printf("%s: n_head: %u\n", __func__, params->n_head); - printf("%s: n_head_kv: %u\n", __func__, params->n_head_kv); - printf("%s: n_layer: %u\n", __func__, params->n_layer); + printf("%s: n_vocab : %u\n", __func__, params->n_vocab); + printf("%s: n_ctx : %u\n", __func__, params->n_ctx); + printf("%s: n_embd : %u\n", __func__, params->n_embd); + printf("%s: n_ff : %u\n", __func__, params->n_ff); + printf("%s: n_head : %u\n", __func__, params->n_head); + printf("%s: n_head_kv : %u\n", __func__, params->n_head_kv); + printf("%s: n_layer : %u\n", __func__, params->n_layer); printf("%s: norm_rms_eps : %f\n", __func__, params->f_norm_rms_eps); printf("%s: rope_freq_base : %f\n", __func__, params->rope_freq_base); printf("%s: rope_freq_scale : %f\n", __func__, params->rope_freq_scale); From f6793491b5af6da75edad34d6f503ef86d31b09f Mon Sep 17 00:00:00 2001 From: "Nam D. Tran" <42194884+namtranase@users.noreply.github.com> Date: Wed, 27 Dec 2023 22:39:45 +0700 Subject: [PATCH 153/811] llama : add AWQ for llama, llama2, mpt, and mistral models (#4593) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * update: awq support llama-7b model * update: change order * update: benchmark results for llama2-7b * update: mistral 7b v1 benchmark * update: support 4 models * fix: Readme * update: ready for PR * update: readme * fix: readme * update: change order import * black * format code * update: work for bot mpt and awqmpt * update: readme * Rename to llm_build_ffn_mpt_awq * Formatted other files * Fixed params count * fix: remove code * update: more detail for mpt * fix: readme * fix: readme * update: change folder architecture * fix: common.cpp * fix: readme * fix: remove ggml_repeat * update: cicd * update: cicd * uppdate: remove use_awq arg * update: readme * llama : adapt plamo to new ffn ggml-ci --------- Co-authored-by: Trần Đức Nam Co-authored-by: Le Hoang Anh Co-authored-by: Georgi Gerganov --- awq-py/README.md | 116 +++++++++++++++ awq-py/awq/apply_awq.py | 254 +++++++++++++++++++++++++++++++++ awq-py/requirements.txt | 2 + convert-hf-to-gguf.py | 27 +++- convert.py | 14 ++ gguf-py/gguf/constants.py | 3 + gguf-py/gguf/tensor_mapping.py | 5 + llama.cpp | 27 +++- 8 files changed, 443 insertions(+), 5 deletions(-) create mode 100644 awq-py/README.md create mode 100644 awq-py/awq/apply_awq.py create mode 100644 awq-py/requirements.txt diff --git a/awq-py/README.md b/awq-py/README.md new file mode 100644 index 000000000..59354f4e3 --- /dev/null +++ b/awq-py/README.md @@ -0,0 +1,116 @@ +# AWQ: Activation-aware Weight Quantization for LLM - version apply to llamacpp +[[Paper](https://arxiv.org/abs/2306.00978)][[Original Repo](https://github.com/mit-han-lab/llm-awq)][[Easy-to-use Repo](https://github.com/casper-hansen/AutoAWQ)] + +**Supported models:** + +- [X] LLaMA +- [x] LLaMA 2 +- [X] MPT +- [X] Mistral AI v0.1 +- [ ] Bloom +- [ ] Mixtral MoE + +**TODO:** +- [x] Update version work with both MPT and MPT-AWQ model +- [ ] Add OPT model +- [ ] Add Bloom model +- [ ] Add Mixtral MoE +- [ ] Support w3, w2 + + +## Contents + +- [Install](##Install) +- [Convert](##Convert) +- [Quantize](##Quantize) +- [Test](##Test) +- [Benchmark](##Benchmark) +- [Results](##Results) + +## Install +Install requirements +```bash +pip install -r requirements.txt +``` +Get the pre-computed AWQ search results for multiple model families, including LLaMA, LLaMA2, MPT, OPT +```bash +git clone https://huggingface.co/datasets/mit-han-lab/awq-model-zoo awq_cache +``` + +## Convert +Example for llama model +```bash +# For llama7b and llama2 models +python convert.py models/llama-7b/ --awq-path awq_cache/llama-7b-w4-g128.pt --outfile models/llama_7b_fp16.gguf +# For mistral and mpt models +python convert-hf-to-gguf.py models/mpt-7b/ --awq-path awq_cache/llama-7b-w4-g128.pt --outfile models/mpt_7b_fp16.gguf +``` + +## Quantize +```bash +# We only benchmark and confirm the results on q4_0, q4_1, and q2_k types. +./quantize models/llama_7b_fp16.gguf models/llama_7b_q4_0.gguf q4_0 +``` + +## Test +```bash +# For all models. +./build/bin/main -m models/llama_7b_q4_0.gguf -n 128 --prompt "Once upon a time" +``` + +## Benchmark +The perplexity measurements in table above are done against the `wikitext2` test dataset (https://paperswithcode.com/dataset/wikitext-2), with context length of 512. +```bash +# For llama and llama2, and mistral models. +./perplexity -m models/llama_7b_q4_0.gguf -f datasets/wikitext-2-raw/wiki.test.raw +``` + +## Results +Results are run on OpenBLAS (CPU) and CuBLAS (GPU) for fair comparison +We use three types of llamacpp quantization methods to work with our version, including q4_0, q4_1, and q2_k + +### Llama 7B (Build with OpenBLAS) + +| Model | Measure | F16 | Q4_0 | Q4_1 | Q2_K | +|-----------:|--------------|-------:|-------:|-------:|-------:| +|Llama 7B | perplexity | 5.9066 | 6.1214 | 6.0643 | 6.5808 | +|Llama 7B | file size | 12.9G | 3.5G | 3.9G | 2.7G | +|Llama 7B | bits/weight | 16.0 | 4.5 | 5.0 | 2.6 | +|AWQ-LLama 7B| perplexity | 5.9175 | 6.0252 | 5.9987 | 6.3692 | +|AWQ-LLama 7B| file size | 12.9G | 3.5G | 3.9G | 2.7G | +|AWQ-LLama 7B| bits/weight | 16.0 | 4.5 | 5.0 | 2.6 | + + +### Llama2 7B (Build with CuBLAS) + +| Model | Measure | F16 | Q4_0 | Q4_1 | Q2_K | +|------------:|--------------|-------:|-------:|-------:|-------:| +|Llama2 7B | perplexity | 5.8664 | 6.0260 | 6.0656 | 6.4496 | +|Llama2 7B | file size | 12.9G | 3.5G | 3.9G | 2.7G | +|Llama2 7B | bits/weight | 16.0 | 4.5 | 5.0 | 2.6 | +|AWQ-LLama2 7B| perplexity | 5.8801 | 6.0054 | 5.9849 | 6.3650 | +|AWQ-LLama2 7B| file size | 12.9G | 3.5G | 3.9G | 2.7G | +|AWQ-LLama2 7B| bits/weight | 16.0 | 4.5 | 5.0 | 2.6 | + + +### Mistral 7B v0.1 (Build with CuBLAS) + +| Model | Measure | F16 | Q4_0 | Q4_1 | Q2_K | +|-------------:|--------------|-------:|-------:|-------:|-------:| +|Mistral 7B | perplexity | 5.6931 | 5.8202 | 5.8268 | 6.1645 | +|Mistral 7B | file size | 14.5G | 4.1G | 4.5G | 3.1G | +|Mistral 7B | bits/weight | 16.0 | 4.5 | 5.0 | 2.6 | +|AWQ-Mistral 7B| perplexity | 5.6934 | 5.8020 | 5.7691 | 6.0426 | +|AWQ-Mistral 7B| file size | 14.5G | 4.1G | 4.5G | 3.1G | +|AWQ-Mistral 7B| bits/weight | 16.0 | 4.5 | 5.0 | 2.6 | + +### MPT 7B (Build with OpenBLAS) + +| Model | Measure | F16 | Q4_0 | Q4_1 | Q2_K | +|---------:|--------------|-------:|-------:|-------:|--------:| +|MPT 7B | perplexity | 8.4369 | 8.7956 | 8.6265 | 11.4913 | +|MPT 7B | file size | 13.7G | 3.9G | 4.3G | 2.8G | +|MPT 7B | bits/weight | 16.0 | 4.5 | 5.0 | 2.6 | +|AWQ-MPT 7B| perplexity | 8.4944 | 8.7053 | 8.6750 | 10.2873| +|AWQ-MPT 7B| file size | 13.7G | 3.9G | 4.3G | 2.8G | +|AWQ-MPT 7B| bits/weight | 16.0 | 4.5 | 5.0 | 2.6 | diff --git a/awq-py/awq/apply_awq.py b/awq-py/awq/apply_awq.py new file mode 100644 index 000000000..11132c5d2 --- /dev/null +++ b/awq-py/awq/apply_awq.py @@ -0,0 +1,254 @@ +""" +Implements the AWQ for llama.cpp use cases. +Original paper: https://arxiv.org/abs/2306.00978 + +This code is based on versions of the AWQ implementation found in the following repositories: +* https://github.com/mit-han-lab/llm-awq +* https://github.com/casper-hansen/AutoAWQ +""" + +import os +import torch +import torch.nn as nn + +from transformers import AutoModelForCausalLM, AutoConfig +from transformers.models.bloom.modeling_bloom import BloomGelu +from transformers.models.llama.modeling_llama import LlamaRMSNorm +from transformers.activations import GELUActivation + + +class ScaledActivation(nn.Module): + """ + ScaledActivation module wraps an existing activation function and applies a + scale factor to its output. + + Args: + module (nn.Module): The activation function to be scaled. + scales (torch.Tensor): A tensor of size (num_features,) containing the initial + scale factors for each feature. + + Returns: + torch.Tensor: The scaled output of the activation function. + """ + + def __init__(self, module, scales): + super().__init__() + self.act = module + self.scales = nn.Parameter(scales.data) + + def forward(self, x): + return self.act(x) / self.scales.view(1, 1, -1).to(x.device) + + +def set_op_by_name(layer, name, new_module): + """ + Set the new module for given module's name. + + Args: + layer (nn.Module): The layer in which to replace the submodule. + name (str): The path to the submodule to be replaced, using dot notation + to access nested modules. + new_module (nn.Module): The new module to replace the existing one. + """ + levels = name.split(".") + if len(levels) > 1: + mod_ = layer + for l_idx in range(len(levels) - 1): + if levels[l_idx].isdigit(): + mod_ = mod_[int(levels[l_idx])] + else: + mod_ = getattr(mod_, levels[l_idx]) + setattr(mod_, levels[-1], new_module) + else: + setattr(layer, name, new_module) + + +def get_op_by_name(module, op_name): + """ + Retrieves a submodule within a given layer based on its name. + + Args: + module (nn.Module): The layer containing the submodule to find. + op_name (str): The name of the submodule. + + Returns: + nn.Module: The requested submodule found within the given layer. + + Raises: + ValueError: If the specified submodule cannot be found within the layer. + """ + for name, m in module.named_modules(): + if name == op_name: + return m + raise ValueError(f"Cannot find op {op_name} in module {module}") + + +@torch.no_grad() +def scale_ln_fcs(ln, fcs, scales): + """ + Scales the weights of a LayerNorm and a list of fully-connected layers proportionally. + + Args: + ln (nn.LayerNorm): The LayerNorm module to be scaled. + fcs (List[nn.Linear]): A list of fully-connected layers to be scaled. + scales (torch.Tensor): A 1D tensor of size (num_features,). + """ + + if not isinstance(fcs, list): + fcs = [fcs] + + scales = scales.to(ln.weight.device) + + ln.weight.div_(scales) + if hasattr(ln, "bias") and ln.bias is not None: + ln.bias.div_(scales) + + for fc in fcs: + fc.weight.mul_(scales.view(1, -1)) + + for p in ln.parameters(): + assert torch.isnan(p).sum() == 0 + for fc in fcs: + for p in fc.parameters(): + assert torch.isnan(p).sum() == 0 + + +@torch.no_grad() +def scale_fc_fc(fc1, fc2, scales): + """ + Scales the weights of two fully-connected layers in a specific pattern. + + Args: + fc1 (nn.Linear): The first fully-connected layer to be scaled. + fc2 (nn.Linear): The second fully-connected layer to be scaled. + scales (torch.Tensor): A 1D tensor of size (num_features,). + """ + assert isinstance(fc1, nn.Linear) + assert isinstance(fc2, nn.Linear) + + scales = scales.to(fc1.weight.device) + + fc1.weight[-scales.size(0):].div_(scales.view(-1, 1)) + if fc1.bias is not None: + fc1.bias.div_(scales.view(-1)) + + fc2.weight.mul_(scales.view(1, -1)) + + for p in fc1.parameters(): + assert torch.isnan(p).sum() == 0 + for p in fc2.parameters(): + assert torch.isnan(p).sum() == 0 + + +@torch.no_grad() +def scale_gelu_fc(gelu, fc, scales): + """ + Scales the weight of a GELU activation and a fully-connected layer proportionally. + + Args: + gelu (Union[nn.GELU, BloomGelu, GELUActivation]): The GELU activation module to be scaled. + fc (nn.Linear): The fully-connected layer to be scaled. + scales (torch.Tensor): A 1D tensor of size (num_features,). + + Raises: + TypeError: If the `gelu` module is not of type `nn.GELU`, `BloomGelu`, or `GELUActivation`. + TypeError: If the `fc` module is not of type `nn.Linear`. + """ + assert isinstance(gelu, (nn.GELU, BloomGelu, GELUActivation)) + assert isinstance(fc, nn.Linear) + + fc.weight.mul_(scales.view(1, -1).to(fc.weight.device)) + + for p in fc.parameters(): + assert torch.isnan(p).sum() == 0 + + +def apply_scale(module, scales_list, input_feat_dict=None): + """ + Applies different scaling strategies to layers based on their type and hierarchy within a given module. + + Args: + module (nn.Module): The module containing the layers to be scaled. + scales_list (List[Tuple[str, List[str], torch.Tensor]]): A list of tuples containing: + * prev_op_name (str): The name of the preceding operation or module, + relative to which the layers to be scaled are located. + * layer_names (List[str]): A list of names of the layers to be scaled, relative to the preceding operation. + * scales (torch.Tensor): A 1D tensor of size (num_features,) containing the scaling factors for each feature. + input_feat_dict (Optional[Dict[str, torch.Tensor]]): A dictionary mapping layer names to their corresponding + input features (optional). + """ + for prev_op_name, layer_names, scales in scales_list: + prev_op = get_op_by_name(module, prev_op_name) + layers = [get_op_by_name(module, name) for name in layer_names] + + prev_op.cuda() + for layer in layers: + layer.cuda() + scales.cuda() + + if isinstance(prev_op, nn.Linear): + assert len(layers) == 1 + scale_fc_fc(prev_op, layers[0], scales) + elif isinstance(prev_op, (nn.LayerNorm, LlamaRMSNorm)) or "rmsnorm" in str(prev_op.__class__).lower(): + scale_ln_fcs(prev_op, layers, scales) + elif isinstance(prev_op, (nn.GELU, BloomGelu, GELUActivation)): + new_module = ScaledActivation(prev_op, scales) + set_op_by_name(module, prev_op_name, new_module) + scale_gelu_fc(prev_op, layers[0], scales) + else: + raise NotImplementedError(f"prev_op {type(prev_op)} not supported yet!") + + # apply the scaling to input feat if given; prepare it for clipping + if input_feat_dict is not None: + for layer_name in layer_names: + inp = input_feat_dict[layer_name] + inp.div_(scales.view(1, -1).to(inp.device)) + + prev_op.cpu() + for layer in layers: + layer.cpu() + scales.cpu() + + +@torch.no_grad() +def apply_clip(module, clip_list): + """ + Applies element-wise clipping to the weight of a specific layer within a given module. + + Args: + module (nn.Module): The module containing the layer to be clipped. + clip_list (List[Tuple[str, torch.Tensor]]): A list of tuples containing: + * name (str): The name of the layer to be clipped, relative to the root of the module. + * max_val (torch.Tensor): A 1D or 2D tensor defining the upper bound for each element of the layer's weight. + """ + for name, max_val in clip_list: + layer = get_op_by_name(module, name) + layer.cuda() + max_val = max_val.to(layer.weight.device) + org_shape = layer.weight.shape + layer.weight.data = layer.weight.data.reshape(*max_val.shape[:2], -1) + layer.weight.data = torch.clamp(layer.weight.data, -max_val, max_val) + layer.weight.data = layer.weight.data.reshape(org_shape) + layer.cpu() + + +def add_scale_weights(model_path, scale_path, tmp_path): + """ + Adds pre-computed Activation Weight Quantization (AWQ) results to a model, + including scaling factors and clipping bounds. + + Args: + model_path (str): Path to the pre-trained model to be equipped with AWQ. + scale_path (str): Path to the AWQ scale factors (.pt file). + tmp_path (str): Path to the temporary directory where the equipped model will be saved. + """ + config = AutoConfig.from_pretrained(model_path, trust_remote_code=True) + model = AutoModelForCausalLM.from_pretrained( + model_path, config=config, trust_remote_code=True + ) + model.eval() + awq_results = torch.load(str(scale_path), map_location="cpu") + apply_scale(model, awq_results["scale"]) + apply_clip(model, awq_results["clip"]) + model.save_pretrained(str(tmp_path)) + os.system(f"cp {str(model_path)}/tokenizer* {str(tmp_path)}") diff --git a/awq-py/requirements.txt b/awq-py/requirements.txt new file mode 100644 index 000000000..5fe604329 --- /dev/null +++ b/awq-py/requirements.txt @@ -0,0 +1,2 @@ +torch>=2.0.0 +transformers>=4.32.0 diff --git a/convert-hf-to-gguf.py b/convert-hf-to-gguf.py index 303d08170..7dbc28147 100755 --- a/convert-hf-to-gguf.py +++ b/convert-hf-to-gguf.py @@ -46,7 +46,7 @@ class Model: self.part_names = self._get_part_names() self.hparams = Model.load_hparams(self.dir_model) self.model_arch = self._get_model_architecture() - self.gguf_writer = gguf.GGUFWriter(fname_out, gguf.MODEL_ARCH_NAMES[self.model_arch], endianess=self.endianess) + self.gguf_writer = gguf.GGUFWriter(fname_out, gguf.MODEL_ARCH_NAMES[self.model_arch], endianess=self.endianess, use_temp_file=False) def set_vocab(self): self._set_vocab_gpt2() @@ -59,7 +59,7 @@ class Model: from safetensors import safe_open ctx = cast(ContextManager[Any], safe_open(self.dir_model / part_name, framework="pt", device="cpu")) else: - ctx = contextlib.nullcontext(torch.load(str(self.dir_model / part_name), map_location="cpu", mmap=True, weights_only=True)) + ctx = contextlib.nullcontext(torch.load(str(self.dir_model / part_name), map_location="cpu", weights_only=True)) with ctx as model_part: for name in model_part.keys(): @@ -464,7 +464,11 @@ class MPTModel(Model): data = data_torch.squeeze().numpy() # map tensor names - new_name = tensor_map.get_name(name, try_suffixes=(".weight", ".bias")) + if "scales" in name: + new_name = tensor_map.get_name(name, try_suffixes=(".weight", ".bias", ".scales")) + new_name = new_name.replace("scales", "act.scales") + else: + new_name = tensor_map.get_name(name, try_suffixes=(".weight", ".bias")) if new_name is None: print(f"Can not map tensor {name!r}") sys.exit() @@ -1095,6 +1099,9 @@ def parse_args() -> argparse.Namespace: "--vocab-only", action="store_true", help="extract only the vocab", ) + parser.add_argument( + "--awq-path", type=Path, default=None, + help="Path to scale awq cache file") parser.add_argument( "--outfile", type=Path, help="path to write to; default: based on input", @@ -1115,6 +1122,20 @@ def parse_args() -> argparse.Namespace: args = parse_args() dir_model = args.model + +if args.awq_path: + sys.path.insert(1, str(Path(__file__).parent / 'awq-py')) + from awq.apply_awq import add_scale_weights + tmp_model_path = args.model / "weighted_model" + dir_model = tmp_model_path + if tmp_model_path.is_dir(): + print(f"{tmp_model_path} exists as a weighted model.") + else: + tmp_model_path.mkdir(parents=True, exist_ok=True) + print("Saving new weighted model ...") + add_scale_weights(str(args.model), str(args.awq_path), str(tmp_model_path)) + print(f"Saved weighted model at {tmp_model_path}.") + if not dir_model.is_dir(): print(f'Error: {args.model} is not a directory', file=sys.stderr) sys.exit(1) diff --git a/convert.py b/convert.py index 1f0c4f2f4..c3f3fc0a1 100755 --- a/convert.py +++ b/convert.py @@ -1187,6 +1187,7 @@ def main(args_in: list[str] | None = None) -> None: # We currently only support Q8_0 output on little endian systems. output_choices.append("q8_0") parser = argparse.ArgumentParser(description="Convert a LLaMa model to a GGML compatible file") + parser.add_argument("--awq-path", type=Path, help="Path to scale awq cache file", default=None) parser.add_argument("--dump", action="store_true", help="don't convert, just show what's in the model") parser.add_argument("--dump-single", action="store_true", help="don't convert, just show what's in a single model file") parser.add_argument("--vocab-only", action="store_true", help="extract only the vocab") @@ -1200,6 +1201,19 @@ def main(args_in: list[str] | None = None) -> None: parser.add_argument("--padvocab", action="store_true", help="add pad tokens when model vocab expects more than tokenizer metadata provides") args = parser.parse_args(args_in) + if args.awq_path: + sys.path.insert(1, str(Path(__file__).parent / 'awq-py')) + from awq.apply_awq import add_scale_weights + tmp_model_path = args.model / "weighted_model" + if tmp_model_path.is_dir(): + print(f"{tmp_model_path} exists as a weighted model.") + else: + tmp_model_path.mkdir(parents=True, exist_ok=True) + print("Saving new weighted model ...") + add_scale_weights(str(args.model), str(args.awq_path), str(tmp_model_path)) + print(f"Saved weighted model at {tmp_model_path}.") + args.model = tmp_model_path + if args.dump_single: model_plus = lazy_load_file(args.model) do_dump_model(model_plus) diff --git a/gguf-py/gguf/constants.py b/gguf-py/gguf/constants.py index 4cd87cdda..c9be21119 100644 --- a/gguf-py/gguf/constants.py +++ b/gguf-py/gguf/constants.py @@ -120,6 +120,7 @@ class MODEL_TENSOR(IntEnum): FFN_GATE = auto() FFN_DOWN = auto() FFN_UP = auto() + FFN_ACT = auto() FFN_GATE_EXP = auto() FFN_DOWN_EXP = auto() FFN_UP_EXP = auto() @@ -169,6 +170,7 @@ TENSOR_NAMES: dict[MODEL_TENSOR, str] = { MODEL_TENSOR.FFN_GATE: "blk.{bid}.ffn_gate", MODEL_TENSOR.FFN_DOWN: "blk.{bid}.ffn_down", MODEL_TENSOR.FFN_UP: "blk.{bid}.ffn_up", + MODEL_TENSOR.FFN_ACT: "blk.{bid}.ffn", MODEL_TENSOR.FFN_GATE_EXP: "blk.{bid}.ffn_gate.{xid}", MODEL_TENSOR.FFN_DOWN_EXP: "blk.{bid}.ffn_down.{xid}", MODEL_TENSOR.FFN_UP_EXP: "blk.{bid}.ffn_up.{xid}", @@ -269,6 +271,7 @@ MODEL_TENSORS: dict[MODEL_ARCH, list[MODEL_TENSOR]] = { MODEL_TENSOR.FFN_NORM, MODEL_TENSOR.FFN_DOWN, MODEL_TENSOR.FFN_UP, + MODEL_TENSOR.FFN_ACT, ], MODEL_ARCH.GPTJ: [ MODEL_TENSOR.TOKEN_EMBD, diff --git a/gguf-py/gguf/tensor_mapping.py b/gguf-py/gguf/tensor_mapping.py index 446c6b688..0b8f70417 100644 --- a/gguf-py/gguf/tensor_mapping.py +++ b/gguf-py/gguf/tensor_mapping.py @@ -188,6 +188,11 @@ class TensorNameMap: "model.layers.{bid}.block_sparse_moe.experts.{xid}.w3", # mixtral ), + # AWQ-activation gate + MODEL_TENSOR.FFN_ACT: ( + "transformer.blocks.{bid}.ffn.act", # mpt + ), + # Feed-forward gate MODEL_TENSOR.FFN_GATE: ( "model.layers.{bid}.mlp.gate_proj", # llama-hf refact diff --git a/llama.cpp b/llama.cpp index 4aa59c4c0..bf1b01a90 100644 --- a/llama.cpp +++ b/llama.cpp @@ -354,6 +354,7 @@ enum llm_tensor { LLM_TENSOR_FFN_GATE, LLM_TENSOR_FFN_DOWN, LLM_TENSOR_FFN_UP, + LLM_TENSOR_FFN_ACT, LLM_TENSOR_FFN_DOWN_EXP, LLM_TENSOR_FFN_GATE_EXP, LLM_TENSOR_FFN_UP_EXP, @@ -473,6 +474,7 @@ static std::map> LLM_TENSOR_NAMES = { LLM_TENSOR_ATTN_OUT, "blk.%d.attn_output" }, { LLM_TENSOR_FFN_DOWN, "blk.%d.ffn_down" }, { LLM_TENSOR_FFN_UP, "blk.%d.ffn_up" }, + { LLM_TENSOR_FFN_ACT, "blk.%d.ffn.act" }, }, }, { @@ -1285,6 +1287,7 @@ struct llama_hparams { float f_clamp_kqv; float f_max_alibi_bias; + bool operator!=(const llama_hparams & other) const { if (this->vocab_only != other.vocab_only) return true; if (this->n_vocab != other.n_vocab) return true; @@ -1388,6 +1391,7 @@ struct llama_layer { // ff bias struct ggml_tensor * ffn_down_b; // b2 struct ggml_tensor * ffn_up_b; // b3 + struct ggml_tensor * ffn_act; }; struct llama_kv_cell { @@ -3471,7 +3475,6 @@ static bool llm_load_tensors( case LLM_ARCH_MPT: { model.tok_embd = ml.create_tensor(ctx, tn(LLM_TENSOR_TOKEN_EMBD, "weight"), {n_embd, n_vocab}, GGML_BACKEND_CPU); - // output { ggml_backend_type backend_norm; @@ -3509,6 +3512,9 @@ static bool llm_load_tensors( layer.ffn_down = ml.create_tensor(ctx, tn(LLM_TENSOR_FFN_DOWN, "weight", i), { n_ff, n_embd}, backend_split); layer.ffn_up = ml.create_tensor(ctx, tn(LLM_TENSOR_FFN_UP, "weight", i), {n_embd, n_ff}, backend_split); + + // AWQ ScaleActivation layer + layer.ffn_act = ml.create_tensor(ctx, tn(LLM_TENSOR_FFN_ACT, "scales", i), {n_ff}, backend, false); } } break; case LLM_ARCH_STABLELM: @@ -4039,6 +4045,7 @@ static struct ggml_tensor * llm_build_ffn( struct ggml_tensor * gate_b, struct ggml_tensor * down, struct ggml_tensor * down_b, + struct ggml_tensor * act_scales, llm_ffn_op_type type_op, llm_ffn_gate_type type_gate, const llm_build_cb & cb, @@ -4083,6 +4090,10 @@ static struct ggml_tensor * llm_build_ffn( { cur = ggml_gelu(ctx, cur); cb(cur, "ffn_gelu", il); + if (act_scales != NULL) { + cur = ggml_div(ctx, cur, act_scales); + cb(cur, "ffn_act", il); + } } break; case LLM_FFN_RELU: { @@ -4401,6 +4412,7 @@ struct llm_build_context { model.layers[il].ffn_up, NULL, model.layers[il].ffn_gate, NULL, model.layers[il].ffn_down, NULL, + NULL, LLM_FFN_SILU, LLM_FFN_PAR, cb, il); cb(cur, "ffn_out", il); } else { @@ -4580,6 +4592,7 @@ struct llm_build_context { model.layers[il].ffn_up, NULL, model.layers[il].ffn_gate, NULL, model.layers[il].ffn_down, NULL, + NULL, LLM_FFN_SILU, LLM_FFN_PAR, cb, il); cb(cur, "ffn_out", il); } @@ -4694,6 +4707,7 @@ struct llm_build_context { model.layers[il].ffn_up, NULL, NULL, NULL, model.layers[il].ffn_down, NULL, + NULL, LLM_FFN_GELU, LLM_FFN_SEQ, cb, il); cb(cur, "ffn_out", il); } @@ -4798,6 +4812,7 @@ struct llm_build_context { model.layers[il].ffn_up, model.layers[il].ffn_up_b, NULL, NULL, model.layers[il].ffn_down, model.layers[il].ffn_down_b, + NULL, LLM_FFN_GELU, LLM_FFN_SEQ, cb, il); cb(cur, "ffn_out", il); } @@ -5002,6 +5017,7 @@ struct llm_build_context { model.layers[il].ffn_up, model.layers[il].ffn_up_b, NULL, NULL, model.layers[il].ffn_down, model.layers[il].ffn_down_b, + NULL, LLM_FFN_RELU_SQR, LLM_FFN_SEQ, cb, il); cb(cur, "ffn_out", il); } @@ -5088,6 +5104,7 @@ struct llm_build_context { model.layers[il].ffn_up, NULL, model.layers[il].ffn_gate, NULL, model.layers[il].ffn_down, NULL, + NULL, LLM_FFN_SILU, LLM_FFN_PAR, cb, il); cb(cur, "ffn_out", il); } @@ -5183,6 +5200,7 @@ struct llm_build_context { model.layers[il].ffn_up, model.layers[il].ffn_up_b, NULL, NULL, model.layers[il].ffn_down, model.layers[il].ffn_down_b, + NULL, LLM_FFN_GELU, LLM_FFN_SEQ, cb, il); cb(cur, "ffn_out", il); } @@ -5268,11 +5286,11 @@ struct llm_build_context { NULL, LLM_NORM, cb, il); cb(cur, "ffn_norm", il); - cur = llm_build_ffn(ctx0, cur, model.layers[il].ffn_up, NULL, NULL, NULL, model.layers[il].ffn_down, NULL, + model.layers[il].ffn_act, LLM_FFN_GELU, LLM_FFN_SEQ, cb, il); cb(cur, "ffn_out", il); } @@ -5381,6 +5399,7 @@ struct llm_build_context { model.layers[il].ffn_up, NULL, model.layers[il].ffn_gate, NULL, model.layers[il].ffn_down, NULL, + NULL, LLM_FFN_SILU, LLM_FFN_PAR, cb, il); cb(cur, "ffn_out", il); } @@ -5493,6 +5512,7 @@ struct llm_build_context { model.layers[il].ffn_up, NULL, model.layers[il].ffn_gate, NULL, model.layers[il].ffn_down, NULL, + NULL, LLM_FFN_SILU, LLM_FFN_PAR, cb, il); cb(cur, "ffn_out", il); } @@ -5600,6 +5620,7 @@ struct llm_build_context { model.layers[il].ffn_up, model.layers[il].ffn_up_b, NULL, NULL, model.layers[il].ffn_down, model.layers[il].ffn_down_b, + NULL, LLM_FFN_GELU, LLM_FFN_SEQ, cb, il); cb(ffn_output, "ffn_out", il); } @@ -5703,6 +5724,7 @@ struct llm_build_context { model.layers[il].ffn_up, NULL, model.layers[il].ffn_gate, NULL, model.layers[il].ffn_down, NULL, + NULL, LLM_FFN_SILU, LLM_FFN_PAR, cb, il); cb(cur, "ffn_out", il); } @@ -5887,6 +5909,7 @@ static const std::unordered_map k_offload_map { "ffn_gate", OFFLOAD_FUNC }, { "ffn_gate_b", OFFLOAD_FUNC }, { "ffn_gate_par", OFFLOAD_FUNC }, + { "ffn_act", OFFLOAD_FUNC }, { "ffn_down", OFFLOAD_FUNC }, { "ffn_down_b", OFFLOAD_FUNC }, { "ffn_out", OFFLOAD_FUNC }, From ea5497df5d138c83b2b0ca70aefdc4b1175c1001 Mon Sep 17 00:00:00 2001 From: manikbhandari Date: Thu, 28 Dec 2023 09:03:57 -0500 Subject: [PATCH 154/811] gpt2 : Add gpt2 architecture integration (#4555) --- README.md | 1 + convert-hf-to-gguf.py | 66 +++++++++++ gguf-py/gguf/constants.py | 11 +- gguf-py/gguf/tensor_mapping.py | 10 +- llama.cpp | 206 +++++++++++++++++++++++++++++++-- models/ggml-vocab-gpt2.gguf | Bin 0 -> 1766799 bytes tests/CMakeLists.txt | 1 + 7 files changed, 281 insertions(+), 14 deletions(-) create mode 100644 models/ggml-vocab-gpt2.gguf diff --git a/README.md b/README.md index 3b202a336..48dcd6464 100644 --- a/README.md +++ b/README.md @@ -103,6 +103,7 @@ as the main playground for developing new features for the [ggml](https://github - [x] [Qwen models](https://huggingface.co/models?search=Qwen/Qwen) - [x] [Mixtral MoE](https://huggingface.co/models?search=mistral-ai/Mixtral) - [x] [PLaMo-13B](https://github.com/ggerganov/llama.cpp/pull/3557) +- [x] [GPT-2](https://huggingface.co/gpt2) **Multimodal models:** diff --git a/convert-hf-to-gguf.py b/convert-hf-to-gguf.py index 7dbc28147..3557a825e 100755 --- a/convert-hf-to-gguf.py +++ b/convert-hf-to-gguf.py @@ -182,6 +182,8 @@ class Model: return QwenModel if model_architecture == "MixtralForCausalLM": return MixtralModel + if model_architecture == "GPT2LMHeadModel": + return GPT2Model if model_architecture == "PhiForCausalLM": return Phi2Model if model_architecture == "PlamoForCausalLM": @@ -225,6 +227,8 @@ class Model: return gguf.MODEL_ARCH.QWEN if arch == "MixtralForCausalLM": return gguf.MODEL_ARCH.LLAMA + if arch == "GPT2LMHeadModel": + return gguf.MODEL_ARCH.GPT2 if arch == "PhiForCausalLM": return gguf.MODEL_ARCH.PHI2 if arch == "PlamoForCausalLM": @@ -993,6 +997,68 @@ class QwenModel(Model): self.gguf_writer.add_tensor(new_name, data) +class GPT2Model(Model): + def set_gguf_parameters(self): + self.gguf_writer.add_name(self.dir_model.name) + self.gguf_writer.add_block_count(self.hparams["n_layer"]) + self.gguf_writer.add_context_length(self.hparams["n_ctx"]) + self.gguf_writer.add_embedding_length(self.hparams["n_embd"]) + self.gguf_writer.add_feed_forward_length(4 * self.hparams["n_embd"]) + self.gguf_writer.add_head_count(self.hparams["n_head"]) + self.gguf_writer.add_layer_norm_eps(self.hparams["layer_norm_epsilon"]) + self.gguf_writer.add_file_type(self.ftype) + + def write_tensors(self): + block_count = self.hparams.get("n_layers", self.hparams.get("num_hidden_layers", self.hparams.get("n_layer"))) + tensor_map = gguf.get_tensor_name_map(self.model_arch, block_count) + + for name, data_torch in self.get_tensors(): + # we don't need these + if name.endswith((".attention.masked_bias", ".attention.bias", ".attention.rotary_emb.inv_freq", ".attn.bias")): + continue + + if name.endswith((".c_attn.weight", ".c_proj.weight", ".c_fc.weight", ".c_proj.weight")): + data_torch = data_torch.transpose(1, 0) + + old_dtype = data_torch.dtype + + # convert any unsupported data types to float32 + if data_torch.dtype not in (torch.float16, torch.float32): + data_torch = data_torch.to(torch.float32) + + data = data_torch.squeeze().numpy() + + # map tensor names + new_name = tensor_map.get_name(name, try_suffixes=(".weight", ".bias")) + if new_name is None: + print(f"Can not map tensor {name!r}") + sys.exit() + + n_dims = len(data.shape) + data_dtype = data.dtype + + # if f32 desired, convert any float16 to float32 + if self.ftype == 0 and data_dtype == np.float16: + data = data.astype(np.float32) + + # TODO: Why cant we use these float16 as-is? There should be not reason to store float16 as float32 + if self.ftype == 1 and data_dtype == np.float16 and n_dims == 1: + data = data.astype(np.float32) + + # if f16 desired, convert any float32 2-dim weight tensors to float16 + if self.ftype == 1 and data_dtype == np.float32 and name.endswith(".weight") and n_dims == 2: + data = data.astype(np.float16) + + print(f"{new_name}, n_dims = {n_dims}, {old_dtype} --> {data.dtype}") + + self.gguf_writer.add_tensor(new_name, data) + + # note: GPT2 output is tied to (same as) wte in original model + if new_name == "token_embd.weight": + print(f"output.weight, n_dims = {n_dims}, {old_dtype} --> {data.dtype}") + self.gguf_writer.add_tensor("output.weight", data) + + class Phi2Model(Model): def set_gguf_parameters(self): block_count = self.hparams["n_layer"] diff --git a/gguf-py/gguf/constants.py b/gguf-py/gguf/constants.py index c9be21119..ae62cc575 100644 --- a/gguf-py/gguf/constants.py +++ b/gguf-py/gguf/constants.py @@ -370,7 +370,16 @@ MODEL_TENSORS: dict[MODEL_ARCH, list[MODEL_TENSOR]] = { MODEL_TENSOR.FFN_UP, ], MODEL_ARCH.GPT2: [ - # TODO + MODEL_TENSOR.TOKEN_EMBD, + MODEL_TENSOR.POS_EMBD, + MODEL_TENSOR.OUTPUT_NORM, + MODEL_TENSOR.OUTPUT, + MODEL_TENSOR.ATTN_NORM, + MODEL_TENSOR.ATTN_QKV, + MODEL_TENSOR.ATTN_OUT, + MODEL_TENSOR.FFN_NORM, + MODEL_TENSOR.FFN_DOWN, + MODEL_TENSOR.FFN_UP, ], MODEL_ARCH.PHI2: [ MODEL_TENSOR.TOKEN_EMBD, diff --git a/gguf-py/gguf/tensor_mapping.py b/gguf-py/gguf/tensor_mapping.py index 0b8f70417..80c1d5449 100644 --- a/gguf-py/gguf/tensor_mapping.py +++ b/gguf-py/gguf/tensor_mapping.py @@ -17,6 +17,7 @@ class TensorNameMap: "tok_embeddings", # llama-pth "embeddings.word_embeddings", # bert "language_model.embedding.word_embeddings", # persimmon + "wte", # gpt2 "transformer.embd.wte", # phi2 ), @@ -34,6 +35,7 @@ class TensorNameMap: MODEL_TENSOR.POS_EMBD: ( "transformer.wpe", # gpt2 "embeddings.position_embeddings", # bert + "wpe", # gpt2 ), # Output @@ -53,7 +55,7 @@ class TensorNameMap: "norm", # llama-pth "embeddings.LayerNorm", # bert "transformer.norm_f", # mpt - "ln_f", # refact bloom qwen + "ln_f", # refact bloom qwen gpt2 "language_model.encoder.final_layernorm", # persimmon "lm_head.ln", # phi2 ), @@ -78,6 +80,7 @@ class TensorNameMap: "encoder.layer.{bid}.attention.output.LayerNorm", # bert "language_model.encoder.layers.{bid}.input_layernorm", # persimmon "model.layers.{bid}.ln1", # yi + "h.{bid}.ln_1", # gpt2 "transformer.h.{bid}.ln", # phi2 "model.layers.layers.{bid}.norm", # plamo ), @@ -95,6 +98,7 @@ class TensorNameMap: "transformer.h.{bid}.self_attention.query_key_value", # falcon "h.{bid}.self_attention.query_key_value", # bloom "language_model.encoder.layers.{bid}.self_attention.query_key_value", # persimmon + "h.{bid}.attn.c_attn", # gpt2 "transformer.h.{bid}.mixer.Wqkv", # phi2 ), @@ -137,6 +141,7 @@ class TensorNameMap: "encoder.layer.{bid}.attention.output.dense", # bert "transformer.h.{bid}.attn.out_proj", # gpt-j "language_model.encoder.layers.{bid}.self_attention.dense", # persimmon + "h.{bid}.attn.c_proj", # gpt2 "transformer.h.{bid}.mixer.out_proj", # phi2 "model.layers.layers.{bid}.self_attn.o_proj", # plamo ), @@ -159,6 +164,7 @@ class TensorNameMap: "encoder.layer.{bid}.output.LayerNorm", # bert "language_model.encoder.layers.{bid}.post_attention_layernorm", # persimmon "model.layers.{bid}.ln2", # yi + "h.{bid}.ln_2", # gpt2 ), MODEL_TENSOR.FFN_GATE_INP: ( @@ -179,6 +185,7 @@ class TensorNameMap: "transformer.h.{bid}.mlp.fc_in", # gpt-j "language_model.encoder.layers.{bid}.mlp.dense_h_to_4h", # persimmon "transformer.h.{bid}.mlp.w1", # qwen + "h.{bid}.mlp.c_fc", # gpt2 "transformer.h.{bid}.mlp.fc1", # phi2 "model.layers.layers.{bid}.mlp.up_proj", # plamo ), @@ -218,6 +225,7 @@ class TensorNameMap: "encoder.layer.{bid}.output.dense", # bert "transformer.h.{bid}.mlp.fc_out", # gpt-j "language_model.encoder.layers.{bid}.mlp.dense_4h_to_h", # persimmon + "h.{bid}.mlp.c_proj", # gpt2 "transformer.h.{bid}.mlp.fc2", # phi2 "model.layers.layers.{bid}.mlp.down_proj", # plamo ), diff --git a/llama.cpp b/llama.cpp index bf1b01a90..68c7cced6 100644 --- a/llama.cpp +++ b/llama.cpp @@ -423,6 +423,15 @@ static std::map> LLM_TENSOR_NAMES = LLM_ARCH_GPT2, { { LLM_TENSOR_TOKEN_EMBD, "token_embd" }, + { LLM_TENSOR_POS_EMBD, "position_embd" }, + { LLM_TENSOR_OUTPUT_NORM, "output_norm" }, + { LLM_TENSOR_OUTPUT, "output" }, + { LLM_TENSOR_ATTN_NORM, "blk.%d.attn_norm" }, + { LLM_TENSOR_ATTN_QKV, "blk.%d.attn_qkv" }, + { LLM_TENSOR_ATTN_OUT, "blk.%d.attn_output" }, + { LLM_TENSOR_FFN_NORM, "blk.%d.ffn_norm" }, + { LLM_TENSOR_FFN_UP, "blk.%d.ffn_up" }, + { LLM_TENSOR_FFN_DOWN, "blk.%d.ffn_down" }, }, }, { @@ -1256,6 +1265,10 @@ enum e_model { MODEL_40B, MODEL_65B, MODEL_70B, + MODEL_SMALL, + MODEL_MEDIUM, + MODEL_LARGE, + MODEL_XL, }; static const size_t kiB = 1024; @@ -2552,18 +2565,22 @@ static std::string llama_model_ftype_name(llama_ftype ftype) { static const char * llama_model_type_name(e_model type) { switch (type) { - case MODEL_1B: return "1B"; - case MODEL_3B: return "3B"; - case MODEL_7B: return "7B"; - case MODEL_8B: return "8B"; - case MODEL_13B: return "13B"; - case MODEL_15B: return "15B"; - case MODEL_30B: return "30B"; - case MODEL_34B: return "34B"; - case MODEL_40B: return "40B"; - case MODEL_65B: return "65B"; - case MODEL_70B: return "70B"; - default: return "?B"; + case MODEL_1B: return "1B"; + case MODEL_3B: return "3B"; + case MODEL_7B: return "7B"; + case MODEL_8B: return "8B"; + case MODEL_13B: return "13B"; + case MODEL_15B: return "15B"; + case MODEL_30B: return "30B"; + case MODEL_34B: return "34B"; + case MODEL_40B: return "40B"; + case MODEL_65B: return "65B"; + case MODEL_70B: return "70B"; + case MODEL_SMALL: return "0.1B"; + case MODEL_MEDIUM: return "0.4B"; + case MODEL_LARGE: return "0.8B"; + case MODEL_XL: return "1.5B"; + default: return "?B"; } } @@ -2782,6 +2799,17 @@ static void llm_load_hparams( default: model.type = e_model::MODEL_UNKNOWN; } } break; + case LLM_ARCH_GPT2: + { + ml.get_key(LLM_KV_ATTENTION_LAYERNORM_EPS, hparams.f_norm_eps); + switch (hparams.n_layer) { + case 12: model.type = e_model::MODEL_SMALL; break; + case 24: model.type = e_model::MODEL_MEDIUM; break; + case 36: model.type = e_model::MODEL_LARGE; break; + case 48: model.type = e_model::MODEL_XL; break; + default: model.type = e_model::MODEL_UNKNOWN; + } + } break; default: (void)0; } @@ -3710,6 +3738,60 @@ static bool llm_load_tensors( layer.ffn_up = ml.create_tensor(ctx, tn(LLM_TENSOR_FFN_UP, "weight", i), {n_embd, n_ff}, backend_split); } } break; + case LLM_ARCH_GPT2: + { + model.tok_embd = ml.create_tensor(ctx, tn(LLM_TENSOR_TOKEN_EMBD, "weight"), {n_embd, n_vocab}, GGML_BACKEND_CPU); + model.pos_embd = ml.create_tensor(ctx, tn(LLM_TENSOR_POS_EMBD, "weight"), {n_embd, hparams.n_ctx_train}, GGML_BACKEND_CPU); + + // output + { + ggml_backend_type backend_norm; + ggml_backend_type backend_output; + + if (n_gpu_layers > int(n_layer)) { + backend_norm = llama_backend_offload; + backend_output = llama_backend_offload_split; + } else { + backend_norm = GGML_BACKEND_CPU; + backend_output = GGML_BACKEND_CPU; + } + + model.output_norm = ml.create_tensor(ctx, tn(LLM_TENSOR_OUTPUT_NORM, "weight"), {n_embd}, backend_norm); + model.output_norm_b = ml.create_tensor(ctx, tn(LLM_TENSOR_OUTPUT_NORM, "bias"), {n_embd}, backend_norm); + model.output = ml.create_tensor(ctx, tn(LLM_TENSOR_OUTPUT, "weight"), {n_embd, n_vocab}, backend_output); + } + + const uint32_t n_ff = hparams.n_ff; + + const int i_gpu_start = n_layer - n_gpu_layers; + + model.layers.resize(n_layer); + + for (uint32_t i = 0; i < n_layer; ++i) { + const ggml_backend_type backend = int(i) < i_gpu_start ? GGML_BACKEND_CPU : llama_backend_offload; // NOLINT + const ggml_backend_type backend_split = int(i) < i_gpu_start ? GGML_BACKEND_CPU : llama_backend_offload_split; // NOLINT + + auto & layer = model.layers[i]; + + layer.attn_norm = ml.create_tensor(ctx, tn(LLM_TENSOR_ATTN_NORM, "weight", i), {n_embd}, backend); + layer.attn_norm_b = ml.create_tensor(ctx, tn(LLM_TENSOR_ATTN_NORM, "bias", i), {n_embd}, backend); + + layer.wqkv = ml.create_tensor(ctx, tn(LLM_TENSOR_ATTN_QKV, "weight", i), {n_embd, n_embd + 2*n_embd_gqa}, backend_split); + layer.bqkv = ml.create_tensor(ctx, tn(LLM_TENSOR_ATTN_QKV, "bias", i), {n_embd + 2*n_embd_gqa}, backend); + + layer.wo = ml.create_tensor(ctx, tn(LLM_TENSOR_ATTN_OUT, "weight", i), {n_embd, n_embd}, backend_split); + layer.bo = ml.create_tensor(ctx, tn(LLM_TENSOR_ATTN_OUT, "bias", i), {n_embd}, backend); + + layer.ffn_norm = ml.create_tensor(ctx, tn(LLM_TENSOR_FFN_NORM, "weight", i), {n_embd}, backend); + layer.ffn_norm_b = ml.create_tensor(ctx, tn(LLM_TENSOR_FFN_NORM, "bias", i), {n_embd}, backend); + + layer.ffn_down = ml.create_tensor(ctx, tn(LLM_TENSOR_FFN_DOWN, "weight", i), {n_ff, n_embd}, backend_split); + layer.ffn_down_b = ml.create_tensor(ctx, tn(LLM_TENSOR_FFN_DOWN, "bias", i), {n_embd}, backend); + + layer.ffn_up = ml.create_tensor(ctx, tn(LLM_TENSOR_FFN_UP, "weight", i), {n_embd, n_ff}, backend_split); + layer.ffn_up_b = ml.create_tensor(ctx, tn(LLM_TENSOR_FFN_UP, "bias", i), {n_ff}, backend); + } + } break; default: throw std::runtime_error("unknown architecture"); } @@ -5754,6 +5836,102 @@ struct llm_build_context { return gf; } + + struct ggml_cgraph * build_gpt2() { + struct ggml_cgraph * gf = ggml_new_graph_custom(ctx0, LLAMA_MAX_NODES, false); + + struct ggml_tensor * cur; + struct ggml_tensor * pos; + struct ggml_tensor * inpL; + + inpL = llm_build_inp_embd(ctx0, hparams, batch, model.tok_embd, cb); + cb(inpL, "inp_embd", -1); + + // inp_pos - contains the positions + struct ggml_tensor * inp_pos = ggml_new_tensor_1d(ctx0, GGML_TYPE_I32, n_tokens); + cb(inp_pos, "inp_pos", -1); + + // KQ_mask (mask for 1 head, it will be broadcasted to all heads) + struct ggml_tensor * KQ_mask = ggml_new_tensor_3d(ctx0, GGML_TYPE_F32, n_kv, n_tokens, 1); + cb(KQ_mask, "KQ_mask", -1); + + pos = ggml_get_rows(ctx0, model.pos_embd, inp_pos); + cb(pos, "pos_embd", -1); + + inpL = ggml_add(ctx0, inpL, pos); + cb(inpL, "inpL", -1); + + for (int il = 0; il < n_layer; ++il) { + cur = llm_build_norm(ctx0, inpL, hparams, + model.layers[il].attn_norm, + model.layers[il].attn_norm_b, + LLM_NORM, cb, il); + cb(cur, "attn_norm", il); + + // self-attention + { + cur = ggml_mul_mat(ctx0, model.layers[il].wqkv, cur); + cb(cur, "wqkv", il); + + cur = ggml_add(ctx0, cur, model.layers[il].bqkv); + cb(cur, "bqkv", il); + + struct ggml_tensor * Qcur = ggml_cont(ctx0, ggml_view_2d(ctx0, cur, n_embd, n_tokens, cur->nb[1], 0*sizeof(float)*(n_embd))); + struct ggml_tensor * Kcur = ggml_cont(ctx0, ggml_view_2d(ctx0, cur, n_embd_gqa, n_tokens, cur->nb[1], 1*sizeof(float)*(n_embd))); + struct ggml_tensor * Vcur = ggml_cont(ctx0, ggml_view_2d(ctx0, cur, n_embd_gqa, n_tokens, cur->nb[1], 1*sizeof(float)*(n_embd + n_embd_gqa))); + + cb(Qcur, "Qcur", il); + cb(Kcur, "Kcur", il); + cb(Vcur, "Vcur", il); + + Qcur = ggml_reshape_3d(ctx0, Qcur, n_embd_head, n_head, n_tokens); + + llm_build_kv_store(ctx0, hparams, kv_self, gf, Kcur, Vcur, n_ctx, n_tokens, kv_head, cb, il); + + cur = llm_build_kqv(ctx0, model, hparams, kv_self, + model.layers[il].wo, model.layers[il].bo, + Qcur, KQ_mask, n_ctx, n_tokens, n_kv, -1.0f, 1.0f/sqrtf(float(n_embd_head)), cb, il); + cb(cur, "kqv_out", il); + } + + // add the input + struct ggml_tensor * ffn_inp = ggml_add(ctx0, cur, inpL); + cb(ffn_inp, "ffn_inp", il); + + // FF + { + cur = llm_build_norm(ctx0, ffn_inp, hparams, + model.layers[il].ffn_norm, + model.layers[il].ffn_norm_b, + LLM_NORM, cb, il); + cb(cur, "ffn_norm", il); + + cur = llm_build_ffn(ctx0, cur, + model.layers[il].ffn_up, model.layers[il].ffn_up_b, + NULL, NULL, + model.layers[il].ffn_down, model.layers[il].ffn_down_b, + NULL, + LLM_FFN_GELU, LLM_FFN_SEQ, cb, il); + cb(cur, "ffn_out", il); + } + + inpL = ggml_add(ctx0, cur, ffn_inp); + cb(inpL, "l_out", il); + } + + cur = llm_build_norm(ctx0, inpL, hparams, + model.output_norm, + model.output_norm_b, + LLM_NORM, cb, -1); + cb(cur, "result_norm", -1); + + cur = ggml_mul_mat(ctx0, model.output, cur); + cb(cur, "result_output", -1); + + ggml_build_forward_expand(gf, cur); + + return gf; + } }; // @@ -6269,6 +6447,10 @@ static struct ggml_cgraph * llama_build_graph( { result = llm.build_plamo(); } break; + case LLM_ARCH_GPT2: + { + result = llm.build_gpt2(); + } break; default: GGML_ASSERT(false); } diff --git a/models/ggml-vocab-gpt2.gguf b/models/ggml-vocab-gpt2.gguf new file mode 100644 index 0000000000000000000000000000000000000000..1fbc72c1e4d9e210e5c5689b31e1debfa33d4b6a GIT binary patch literal 1766799 zcmZs^`I97Ba^FYYwgw*99`uE-njL~9w2aEis*35X zjNClBx`rf70(4UMeM>2Tph$@#MM&;i)TMzEv>ZYeBw{QJ3 zISylfbTS(zZ+<)(HuH6SE57m1`TfZ-Ur*nye>|Jc&({}iI{fEf@JFVXC)3l@etzx` zvDkmnA3B>(Pk($iEU&udsXxaD|0RE}Td$|{bwA8Mx|nvSq zKb|gD{cM>3!#wy`e(%Fi{@Hxt24ZLZZ2IH%^ATNG4FaA(o{NcR#BYE**UVJAn zzMB_6mlr>u7vIZ^@8`uwdGUk1xRn>T^Wsik+|7%7d2v54KF*6z^5WCHc#s!A%!^;h zi$9tdznB-llo!987k?}-4)Wq*UL5Adqr7;W7f%7?H z#oN5N%8Pe-ah(_6Zra$o0|EaEiyRLqxuKq$@{cc_T#k%^R z>*_Dn)nBfw|D~?}*Sh*Eb@f;4>aW$+|6W&ry{`U`y81us>i?>%zfo6zv#$Q{y82sn z_5ak>|65moyRQCDUH#p<`v2duwYvJvy85lU`q%5~->9p9v#$QFy85^4 z>ffoWKVMh>Ze9I*b@lJp)qhY||6yJIM|Jfd*VTVgSN~~U{bzOcpV!rYQCI(EUHw;e z^#C^keNo-}qPq7*b?=Mn-WS!qFRFWARQJB9 z?tM|+`=Yw{MRo80x0*rG-}|D!_eFp2i~imh{k<>xdtdbTzUc3L(ck-`zxPFd?~DH4 z7yZ32`g>pW_rB=webL|hqQ70y->&FySM;|l`r8%#?TY?(MSr`Zzg^MauIO)9^tUVe z+ZFxoivD&*f4ic;UD4mJ=x&FySM;|l`r8%#?TY?(MSr`Zzg^MauIO)9^tUVe+ZFxoivD&*f4ic; zUD4mJ=xvZG5*~>$1xocQMVYex6s$`r9!7hi)ViZ|l+NQX{SMt&j2>q-OFM22PFov!rqN&WFue>}@e)bqUDZ5R3IuxT1Si`i|v8o!(6{cfrI>p=rt z>d(WOu6tXHP2xh(>gK08c3-RQYK^m|nFnkGu=zc#fqZGW%&XZfZ(}dy$(6>x+D|uP;xg`Rs0cJ=C}RqKV!1p{8+FBMkPyD}m*T zkL34u+uM1$)>0>X(-$bRQEUNMMj7A9dCrUWdr|3b`zY`CwJUeqJNd`12y42m{qV+y z{~)Ke+kT!eO)mupEOkBcl`dcB<;8TVr>{C=9R=5+X1?1#$&YLbXx-ZI)fE9R`+Rh< z$d3(6L(#G0c~zTjv(kJP%RvacC_cQg1$ImO`e{L?6W4*eXpo_(vp+BFPDPUasus|l zA# zD}w3;^X0TQ1b(Z0@%?Y_emln-HZ|wn_KSQ1CQ&3c?P`1VZ}R~rSBM+LM+ed5c`>bp zEq_tV?^eYxciV{x?>9BIOYO8;23t|22x@XdyX~oM!1KW#l6XF^0Y9`=f0-X#x;nAy z^3S{N*Bb3)W7EDGj3XJo@PF#%S`Y3{k<}HfHMhg?M&n;y*hA=M1?B#MV5DjfGFFyGj+S2f@MvH-V`Q2wYy;)(v)e^Zd37^w|&ql;D7*Q|G2ag3!s zgIZDeo=mj%W77mCMS|19VW$;zC$n04zYuJ~r!J6RT^Q~4oxapx>Y?rgylaH3sbnDv zSuy*~i3szeD~7e(PMc4Bk7FcQ$*Avrpzl4M>w06?(oA|I>q#xGD}m50uQlbXkO_s0 zRSALRwbl`wG^u1#tZf)-&$G_FFKXCrf0X0)=OuW1Tl;WkR>`_(=dBxCzgA<2^$)9h zmMBasifejuLpZ^%@^=!6dj^spblyUkZZ|~-S$?#f+ktS3U-pS ztXmj$Pk4)ItW6VHxE<54YY*+V%W0ttxhajj(+rnVQM+aFv|(xUMg^)Hom7;;nAyrz zHj_$MW$*R#8e`VmtZqNZm18p&JL^}qx#p)Lh;Fm8bzk&l2%$tI8Sl1#QX|S@!)H)W z_c8=f4oXpAFW9Pwxv9UinZUhT6jmm+rl~af?sB1rJD1BRwZVE6k(}^mS@Pkm^o?OS z)gTu`q3%Mu%x?Rk7)=-ZCW)oH(z^t!^UuTEymCcjL2NpwGF_Hc*H29T6ML7O?ANxF zI#U47dLyRY_G6obE6aZfN981U+p|)?M>1u& zicF0CJE36i{zgq%zMJ0NqL(P`f)Mj5L=0%!nklPX+IqgUC9kICEFD$_qm6qQro|(7 z+h5cwUKdn)@t>uC2F@f?kFTr%OuKi2ZD#B{=^ zQZjnC{hnUET`CE>y}_o_8a5}0at*tjn%Z=r{nft{&VxDycQ_DSBu8i_NdADNE=#rN zj{d&doM?o$dH$v_elj(_&@IiD|EduAi$&Qz*01JvI+z5RpBK$9JKGfx5*<@HC>O=j zu*gnxGR^SlO5{VdOaIoDaZISc#6b6Ev`Mf`O83|-xs`xG(bRNdNfHv1B6-y8d>lki z3s+1{e$hxg6F^UTn+>M7PlnPQdh>LX_9?ixl-T^- zZhI)z0d+-xKeOmlU*>*4Eq1hLqDiWPdWdQ|go87FQpT@ot9#E=E+gx^&+9y{dkJJkPdwtveg9io7NiT63N&vcp0E(Y; zR)%(1`|(ecIA{2T(qy`@&9Rax`q4)pX+U5hvuyfCY<4*fhln18)0~=4vsznNb~Y_l zwKGpos6840BYk$N@nOIi9|Hb-QyVVww{sPEQHpnU;$3*YYoWXC)3e+eKlnkuxZ6H6 zF20-^ES4j#ytD`af+lCQaW@GtGE-)4?rDhH1P;bSZ8UV4#5AEl!QmYGGpWBS#Ba_8 zorAVFh1eB3r+!@!qZN^0F$R(4!U@S56L=(Y8_KYJYQR7N)lkiTav#uzg?-QEx9h%1 zFkyrUW^nd*=J^~-uMYLwCUssLd$)yOnpb~0{Ym+|rjj1Kogdt7UoFch>sM~wyLmbT zL&`&k1f^oL&tHrMGeL)dG0QH1Ilt! zukW@$)CwWD{6Vx}veq=uzQu%3C6IeWqeujnT6^;Log58*F1dzvTM~kuT8~dn*`R76 zp}4uJTtGJA&yHHlb}dlD`>4@W&U zCNa4XJ%W=2ByK1Z{H>}gZ}r^DdEw&E%U)~SWzO>MYy^WR&Vk8>A5riF&ifTM02i1y1(BN@SU;}K)%XQEHcOJ#y~U*{AylPrteZTEsOY#RYKQaeeV zNWkv4vuUx&pkMdwFE++Ieu7n5B-zc4ld`jlP24e7KX=OwZZk2>c^3H9Brxd(f<|L2 zTWPo}bsLYvNRI*m>!oQfp&ST}w?EZD@Y3N*Kk?P{CDtYvgG|F=Z3~BdJr9P$%~lGz zx9gz>k`}a(*3*n?ewy2wuwi21gCFJq1h8qH0DHfK>yuV%952T3S7vdv! zx-s@}NvL0|K`)d7x7%LXw2w)f_@_#gBJER&$Q5IT7tA6oQ;IoSk-krKh%Jy1cM3v9 zDx3pc*v0XPqBIG=}wdbd< z-6znUR!9!ldWrkxW@L$I123C)m;=w)%(?9I@;+m9l=wcE0}(&Nh%d~hXGX?&P%ycL zlhbZpyBRvYEpTD75@pwKuxuW6rc|qdY@F_;mZ{h&n=K zAS36v+x{8N@qAeSXu#fyb6FhqpOd;#|0%q&i`^ zhHjBx=)*0dXD6dnoP{cnZr@9sDW8pKdvPKXj-{}WmRt0{m<@IPi~Q5KNVzco##4!h z>8zkh$Pj4fWFy6+$l=HQOE8CdU`t#M5tSMJ;hd0U4nxG-C;r+@?vImZHp{|Wgj-72 ziB1+}ULaVtoM_yUUqJD8R(Xb7Mq3n12$en`eH)2Q3ELBM0EW@*+!vfWDU>eWFU?E| ze}MV(PCQ2i6*@nPA={M)iPjwXeRq(YE=yjbT_>bN@Z~5t;GC5%I@8c`SB2r7uR+Gf zd=^8gZtEgxp2koXn7Y|mQeL{hQ-l(N5rHbCbP+kuk3{q)v2y#a zpaLB6_v$wu6XNd1i{f_hg=BCqR!~>g5xL6od98nSB`yE~ zh4V}|Z{N*p2#U3O#TC)U;z1~l7ecoA z6E=urp?B3Rx+H{?Vne&_bA@k#`PrEefUDH@8b)qHa$}zEd1+{hRk;m~9%0!&_s)r z0eYfkb&ba;pZL(Q_|m8>Oh1$Nth1C2JO{QZ@ULiT;Z)Ls&=?Y?{ZFb+_4X(EM*=rO z2Zo$!%~*1k0Xtk2F-8bD2`aL|fMqV^;6>hOGPajKOt@0FX5$Rbx5a*9<15<_7$Wcs zV1zj|Qv#v|G`xs5!UT&}L^+*Dq*U*nBYt)F%eklYV2Dmp$`f{sDsSvdb3>!{=cK5q zH8kKO_&Dz@4JD)sXdS!l``>g>n6kP;^dw$7B@#@=c^g;#+!V-WJB>0bZ$EgT@gwW0 zFbCiNL<%M5Tyr_-VMwyG<)CdAiGEOtY8z$46SDltroctqsU%f5JJW~u@mQq*&PU#) zh)*a=*PN%wmK+sU<~i0+j6BJtGJ?3Ac1Fm!C~HH6_p;onh#A!#V~!WPNU;_mh9&V@ zO@UrqV$E>Yu#@%h3M0+3-d1c{3CfU~=0imN?#)wy3TxCDp2ATu4)oY}mqR4@=7dq( zT!m>axl+;DIW8HF44hqGdVT**vk8;pcH5uJ%h}Y*sPKDw5o4J?qzvWr07RRLXbQ=g z`b};tCgBHrTuDfguhbJ^4`M7|R8HkfV~hlD9v8k<&xEl^7!ibw8&B zt#>8Bp&4-igiHUwR?M`oaGmWNquZ89fcas1PP|8Ikd2M5l;!YU;aSL4)=1t!wxQ1 z0Hux+>nKYq5XaoKnU}Lg$h>fdHu3``B11*S% z%A*CtAzo0ddA(E3I{myc2+0Nyd0(RrbJi0YTBtHa{2)MmP6)l(z3$%$1E< zxc0<@uRPnY?H-pdN0D|Q=!cSbm5+`E;q z!m3^t-y|sE{G1r!VA^U^xJ0j))2Zpb#9fuRcju0FKwF#vIdWx-;VipK)!0N}P4RFR zO}O8~X;5^c0?$@$S%MzrB3ZfaInGPEl1rpJcP0B`T?~jQ8{WrUA;2B6(a^L|`%7~g z*>X=ybvk$^BB99BOT*v)=I{MxE&;8nunIFZNQOYSDPb8}z^>(em|%OZy1U+-U5rRF zT;jOJ@shq8mz5|xH8BjqpX6h$HK_*|Ob!MwyAbcyv)`@ZW%Q7{v{?fUL((bI1GOIiTI#i8t)z^z)1Sy|_y6l3&+ z-Z0JA>qPb3NeP(@G*Jt*pKeKGK62_ssx{na&^nVBqbAyz zOX(DOh%(Ch^Txt&)OCG3Hibt>Y>n{B$Y_Ddl}ZgYY|hUm3HG6KFXV}|k89yC9>-TH zgdJXl3oW+=3Zk(g@bUy;TxdLV63wdeg=NzpqG68mr~^+bI9C_l|U zF@5b4!^O65#4;miRNo;t$jLc;MZz*EAIc$F_6yrDlNlKd8fFpBWv%7cP=f_5@?*!U zVmFCgIz5hv9@+gn>JhW&OObFM9Ym)HLlod*EY3q@fG+MGQu)wyi^E%>)By%v{9p2tg>y$ zx)T6!!?YNQNVg?Ww`{r`m%yr5mXbkVPBEez2`nNgqUN)v8RaeHNsfhyrmzHiVknaD zDakU3u(CnZ+~?f5pqtB?i8)gy51F?@pKOzlzN?`r#Z|t|xU?wB94$DFu@cT(&HcFq zBXx450VUl~3i*CV^7800}YZRql1b^{t#?LP235;63oaub0-rT`w& zP^~;xR*{cNLi|4`ffvSenWp$)-L}P*=`5uqY?Mn3`c;Gj`CBWX*2;hu)=| zfM`p>M@TKhhoIS$>)u-`!P%%*G`vGAZ5`q(;lSm6Atqc>3VXUjx$%hh1tEllOn1I1 zwd!Ix)h-|%q9vV+)nRNbtx7vu9rfztGOVk`V8oHNC?K;bR@6h6LNXa1Q+<&g5Im-= zu{#R*j7t2X%&i;&cRtOH@Zf<4{t8mluk}(BAmC~cCOgu&1$K{K0@xDnUYJK$5alRw zw1}wv#0NX;N{rsUYe4KFds7OWfj=57`g9Wml}JN@GAr@(s9h55_EnMM7;rmILP^Sn z<>zHTScM*0Ug4#KWq}{a|b+>z9LM)6^TZouOVP%?`6sktM2R z@qpddiqQI%X^v2C$FS(c^z!ViZ6XjHgI zxT;WkR&ot5p!wFq5$M0kQylTZsFET=l%J;p4n!mhnX2uES8VSRklhsQM~=oFe8h#EuKd{MDQ8c!_20^I1qv22vqSucT9w@8OXv(9P#T;5ZafF)r z)Eq3QLwQajo{kkG++zi54%Ab+8iOE$xfMT*1P?cBKX(4#qHvELs94s+9O4-S?#4938)#@?|0TT*G$NJ)n$j{4#PC$vM zYwshJVzP0w?Z;t%SuZQE*7Q{A250fq*VrXv~=L_gkuCXsJ zt_^XN=NdfdG-5IVKC4Y1=+8B?lCq?U)aFB*=BmwKSj?y^iOJbUw6M>nH;oftTl17B zcvydQQq)fp7QEp7u}QSv6Chu^61Tvj&=9bdg#jGLB45a@OVg~pq(rEeIX@S5s8SJ` zk`LpE5)R`F((Y)um*YfPUS1;}tu64;)VLXu12xv{!v3Bg z7)=3 zx-g&vx|cvvoK+G;h?leARPy_GHn5s%N>=sDoDDG80PE8k0a!A-*9f~~4G zZ=&3LP0MOyvX-nNRpOH$_}c;=$$l1kI7BxS?&xs^i`KK3ly>B{I4EQ4TE;dBptd<) zG}Ou`j{Eb(mH}Hak0W`sVfX4|-P{w|aFZ9~3gtlXM^<*l79#^BNqmrBeYgE}4SYoF zoqWc78hRJcFFe-|4@B((`cWBNQR!8G7A`;Y^PwfSoSP&yg5aLf9~~G}5Z)>9P4uu# zZt^9y{bnXuX~tH}i@F%=hZWuvxmVCCLk39U#%-G;5@B7cI4|`abcqDjK%K}!QPWDq zgg>eT6Wh4_JoFX3mGtrSQC=N97e&$N2op)aBU7t`eq?57bjeP?1LYEAen@z7-|ZBN6i^-35OE_;s9EkM=Kfz zG?hc2eW*0Q@b+Qk%Q28=l7 zR(vhG`G7rX3(XZtgL^GCOr5Ev5rS)gRy^PnYU8B1DgE-+(_Xa@;~ z55D(-UIe<4I+Ik0uWG9LoTXaXLBA!aA`dcwaZ;QK&yGf!(9?0c(GymLF}9(H`R52&3S3Smy;sPu3eIc!XSdU*n15)@0C zNaeoRZU2~U#&4+s4~b3LcQ^AW#1yr1{FUh}Q2?!b_`u9d5&jEzpojl+)F8|IA593+ z351bCRBP>t5$4P(k;w%`@233om5N-WHas7pYl9^UHbVf9V7|mYOUkC`Q4JpY(^JXE zWv5mEU}0o?6PdMY>no~G5+%naPRKX=o&4Y^6nucTmp4c%%_&FLm*8*|Wk3nvz15uW z3=0dgMITQiIgE$e4m9{T0T1J;)`?W`7R&X{6#cwT9qOZyML;@Q*xcCQqNsjp6+UN6iNY8zZ%sawG)} zyIlgS&y5*%yzwu8L!zy)M-%u8hoyeug0gtQDSqU{TNE#vVvE|QEEyaPrStcryqC~$ zRFaG9O+g3P<0{fp>H|cs3bQx}i_c88i3u|njZ*NEQhI){nrriFdQzPI<&iLn?u_xM!LK4?O;%mo6=ThW&|(7Q+L#UYlqo!< zi+Ou{e7MTx3Lkf~*F|cPYzSub2U3k9Uqh>5BDPsW^-~M(KxFO`c91Y0iQ1upY8V1+E7Ef~^xU8(@m(K;R=($q6mFSo0t{D8|x(Jr2h%NIfR{|{GoReRb zFiSAseIRQ2%r?o5Syc<#`}XtM4&^wZGAMx)+OYuiuP_Z^yCnxlWK8t&Sql;5mZ6ec z)5cBrNlo#dM#;o;(>_RYtNI{pdP;KzLKGFx;3sLxLbRmmjxy=UNdl}nohkKoX7+Iq z4rPX#`_omZYMNNMV_Ful=9s*Y5I;g}1 z1a;X(b*-`}fp}(wx}x|?GvAv!ASUNEfmvzNFw)+-_C@kveIW#5(KVrU{8oA~BG$%J zZ{5!gfHG!rstK6~+Eq83)xLkW&oRh#+_lQw?sOle(^?QJ5eIi;KMeR0`wv$?E_uT( zVSNCaU)Ypd{pv!iiKs3iX#Jtp0H{(ayh)=7HGXk?SR%#KsdXUVOlR`WkVXKIUC_79 zZM1R71d}eK{Y>R{e^b1ktyjizziv4Xf_D^>YbKbzU1{a0UG*MHCQ5K=B2<~_qlexl zGrF4DTeM@J8hK%eS;&xzN>%?qJWL{^8BWZ+rQpb>N?L%P-%t)i^d<-+TJK#cY?z%wZWM~9(D*Jn7TSqaPQmJUM7SZ(Sn(V(3~HTz z#t^gZ%lyX+VLekm{#c8K8Cz_L>SSZ)AY#{iIvnCK zJ`HKX?Iilv%j()029@RuD>BX!Ph$hk{a#vG_tq+?_ellboF@9%7hLv%G{I#79}u2e zlub8b=Z0QPBsu*y_?Hb{q}jt^6Triw9OF}$@X`B7qd(^`Sy9&8snuXi--Sn1^FpTS ze3brjQ@#&M@5?9kRBibvw=!rwf=cAP;D{%E70<6+g@N^GY&fb_AR(dn(WEcK`Q9gb zZ#w^dT%9Z-!9i+^mV&2E9;R@Esig%H6kI8VttB!Q6FPDEg9KBcnIT}$`UJv$YBu5* zp{wSSBbBEV|S+g>Z0&au~W36UC9$%zM#a+a+&y<`@?q)6P=RQ45WiIV=L)QSEZrksPs zPWLLZ=MGm)L-|P1t4#|$aIf7XPq1b8Dhc?wh_&~pFcG1@+xqbh&U=Tc6 zBM{C+&gEU~XKSTS=tda4^*}EX2g0W;vOF2e#p<5!N6rGz$kWZ~e0e0A-#52$DM00! zr>G%9i<+|-Gx0E>^5vk94>h6Z3aH~G<6{>4ggXd^3#+4)k5mbA1 zdhI58OQ4?>2o6fS*tn)b-l`vS>}tK?5JIaJP2DwtEbC2BE&gUFZ6Vl|(oA~84o zE44o4Gm>NdJGp}!6`4>Q5RP(NAG{LGaEcY)Te}y%MLWzelM*#(X(owUZ*3ESOL7Yn zU6}346p4m!B||Y5^PzCY7+?G>oW{9CQsf1P@c-4&VZNHbjPedW7&6}O9eKuLtM~DZ zch+*QwFCB9X}MO?hYH@r3k4HYw=)!3!S4-iOY{qGpTVfnUv1fsEi@gX*f~Y{M{ZU8 z1KZFVq7bxmZ&En_6Oiq+$ecpdAh{Qtqd1HsMon2kC<+-1F(cl1OjjoKl*9(UksKbE zJE!ajYM%`eCQYajDXEMY`o0wtiVP^1tmNC^Ze2k4o*!I<9KF2F_uTY$uQgsYR-O0 z5|qpZe)yII3`~obw<>&}4Azq=LNs{`x#UT>R}7W6qTD3k!!eKdFl-OsZXR_ zbb9|(qH7xI3wB4j1gM?)?#C4ahW%M-M=SBqPp^nI3b1tItzD|VwYbMDXDZSdB+s#~ zJOo8wn~?)M=)^I86a@+T9!lR^ageA+=&S3;m&UCSZoIcPI!7yx9#(oe(tQ+ALFIa$ zaCl|}fkx`(gwIEv);_j|q7?APLN!%7sTX%5OFH4#qvweH$QfIf*>u-ZfL983e*B3B zrG<(qTKkf`Qft0O@zuVK)CwXx`U8!a<9G*A0o=*%!ZFd~^al)9{8y$}qJ>&?jnPV% zyyz`2eSa)QO)9Ks&7YdvN;$A2gyq%@-@P4d6;+lAx#-U=$k74b^WTW$kTaAY+Mo2D zE1iYIKYwua!_>+>sQ#emlZK5FLsP^MXdVX$8x0v)iqL_@2q@oTN^a#)tyHQF8{M_N zM>cwxj5@!#Thuyzwtpu!%cBu#_2FpO6FsCBGIQ0#r8KcVN zAWF6@hb#R$YProK7UD8##jKyJ%xN&QSmFfOxM zwbqha+$jl?#xR)v8=*TTtD+HPg@^!Op6SsW>N3scXMQGENYw%Y`wvzuqQ&R3S|}ev z6pv`&35+5?1?3wnB(PDOC(1^h2V79nmiRxI)m)=sFxQL)G;Wf<#=9h-MMX-}z>-qv zyrn~=i5we?#@H)`x|Q7tQ&>SXpwFo+yKizWc>b*cV{{Z7x!zuxizDa<_GcOJ1+cdRNGnYxjt)6|- z(@{;MMhgJXdxa$lWqXQDdPH%;JC?s%8lIW2k%A?Mm`<#xU9RW4kPPA}=+O+z+d+hb zBB6<9rsnI*!8SSbid>PJ3h5@|zfrVTo?BvcqjnA`Bt9F`+E<3KXwDOvzk=2Q>Csj~ ze8wvCtFT3!_5#-VEOPGh)thviTubCslp;^#_k0YJ4I@$mF6Dj&(FW%p#EWwp?9R8eDNO3cFtok(g=f*?=stcRj`iYO>O;5Y-717-Te~)eOJ=1I={^?-Gu2VO z7YU_BL>~p)=ciKx7gPp#${0HQT=a3Uuj`ikt8*MbABzigqop~Wzrv;M1^x{ps0k;=gvQFt0-}$a4a7qrN5oesYy8wRTbn%>{#l+pPWLy{yorsUL6N`9*9zmfiXA6Ga*?UDxH&mGc|K4;sSj^0ePt(BoAm6^e;AZ zwFUPgc9f%FRtw44;J=a#_u*3~jn&$0Y&^vHVx(gc-g+f?My-Pkp>S+lbM)b)^}5Ng z1A!~EBQf5~L~LHM)6&RLTK&L-DEn;^J&_QtIE6SQYVBpZVLCg1)}5F*2rY>>ZR@uK zb+sD0Vslf!L#36T7)`9KfD-RD6=dQuVd(|e$>ox$t`vuxP7gWhmMB_MGi>VWfO79+ z4S5akk0N@_2=4(d5Aoa9#wU~=dnDMX@v}o5U^%BK9W+BHiU4T=rVmDwj8MBp(k+eA z**VL^<)hExn4F!OO0%4jdScWR5xKNB2ovPx;}^#{spk35A7uN@%VGcYvwVEI9Q4T} zR;?T&8kM=*H@d!-sStEzdSWgQ!v zgj5;|))*kJHAN*sqCbuRP>+zO#qs?>&tLxK{2HCWY6qh-dK58Se048+u9XUP{}U}^ zpA+ji?V{o@*Y!t#Aav2$Ljs7sQp(T0yFT7I;Ycm!Gg3Q5L#P0F)0FrDmoHTBw$F3` zBErI0>B*FDiwcgX?=<24VK=5Of=C+j>gm^-^ls~2!8m*aykBkGVdar~PJ>h-I*k=a6qqg<;=oxmY0(?gq6t)Z`|e}%LJRU6ia1Rk z98p`Jd}EaIK_e_PT;d1>+mRDe@N!3VwWr99n3zS?QeV7;E!y&hjU^DQ2-S*mDH&&n z+B$GIdekn?km2>5E`m0}7rE6oy)Uff?(uVdZVq`TqgZYhwu8<)+g(lV(OVo^ZxVwP z0URg!VL0%)azi+wC86@Ll(h+mSZJ_;u9iaUNn{}8SmWeH_=!mQ_+qcDh%&E6lB8KA zE=kCSR>$GJK49U!Xs9r%a*D=M;_IRBCYb2=VPcz13hK_LOZS#T80-GLZK_73-(@%LKrB|#MF|CQ^@9SwboJ5+F;`PmDkX7p#pSR!fUs*oMd zOSG08r(eTXjCDrU$g=PYbAFTOwjf(>Dtq>)aZ7W`f{#(}vX$?HMUX4y9SN@IrgOJb zv)Qu;V=<@hky-7*5?d30k zUvXKM+?SpNTO4^O!!fMy&}Koop|6f~Bd}=NYgDeaU8W~EEBURNVKC3jZ|!pMk&uA* zRh{e)f0*#@CzkOY;Ikf-E7*du=Yapr&l6)iKggYfR{?KXf+h+`O1t{xfml4OCF%ym z$B<-e#Ve_RW5|TuN-rPf6qXU7(UUi4iMb+!c5hpK)v`nj zEc0T^(bmj(7_Nfa3eh&a)^ZbNFa)3+MNw4F2%2!y1NO&;6f&b;*-XiHES~X@s3I$8 z0IeC#k|qlQHOBTX600;XD4>W}Zv&3TZ)=iXUn9m~Ia~c{87L$DB zmWo<4Wmxt~^d&MZDhM6h1bM$O?o61Z<{mwPi6@oQm*fbeN2(R0Z$nNTyy!<9eq%@@ z&}OTk8XZ+5QN8SbVq}8oi`EpS6AdGaZ{JARs0K|7i#n%3_}$%Zk-cdKunQxMV1 zt>MYJ&G$35zQ7e-nXpT9Bae?hJXDAb%Pzv1;+VuQW|18xB0CN@y+&?1g5o~k|0Pig zP4UR^)NhnsmXmXUsw~kpigoe17bJyCp1(50e0#`}L_!W^XLpWS`>vwdmX; z$_8=*WYAbkTNq=ab9Bbxa1seMT0Y#yGxi@5&el!Vq)il=O6RtGPNAjDPmsfKi{=!< z{7dfxMO39P{^ve0LKsp(VRlCNuZXD#-1{JghCg;g3vU;A*2C?HjUpcy#mI@;&PdSA zZ?K_tzl<8e1Rnu6vGhnSNq}EbbduN6)x0qHBTW()P>w@$Mve0l6Dw~lhho+btdTa0 z*Eq+Udc3P;@4^L=L&_GPHcN)3KRG z?$j@ja||k-g@~gmUfwb>qRu}T)ecq^%yAnjHrz5!Gze#!fxvu`fbdX_1Svc+nv6r* zo#j1KUJ{PIM9@PcQp$!khv?XE0*9DhixvqYWX=XD)Sa2fpm6#x-=qfVkjCHtN=sPc&eC2{(2^z1#U$)azm>a2>ZqaNiqWK46*UKx<)w+TGBvC_G2*09-V#>P=XDbEZhQAmj({1AHJH3`}JdfQd+X!OnE60EHiTB&y! zLP7`ZHR=Dl{*2-zb_|sD_+?j_BX|Wydm(zeaiDFE_Vj7K6$J)msNc5+3>0LVqwe+8 zg0V0@M|3n86(}Z=xt;q5Y?Lv8HIhr-ZQsxYTJZdu=o8wufKU1eCYuK3z!#U*R14`I z`Tc%OOZeZ+FBrklAr~9HwkouZ^-su4Q+f^G)s&)b!^k9Bc)y--rWSPCxssYY(OuM3 zvC`K538W=V@Y-4$1Jm6nY3S1wY(?BboGih?3NV6^w?=$TQ7&?*%tb`fM5}dx;dE zQ?smJSI9Vx)t$mVG*<9ju&+5r>jd|hMb}_&Wle_^$|N;jOJpVXZR3yn2!Q3Y2+vXmqlHq6X0*uL^@Jti&@;ibt~t@0tp?rsC>@p z=_|eVBwS31%|&0f&u;sN^-#oPOrOOst$ERPwP8>IM-?)=sa14SyOM;|jTl4%r*j~fnN9)c{;IehAo+JdCZRDsAs`e=4xYks|r@;i2Ym z(zg>&^%5P!g}M_WR+LUR@nk?fx^M!C-9<+#0koxk zn*Y01`anBo(>-q|K*?(rXC%53LYa!i9^et-cQCIwd!zAQLk%&7xfL$1FVqqFX=Q;B z7PDD^6Dlm1y%zEsBGfO6(@kZiC51@7^<7f02TmTTv(+2)5laFH-d>bWu|2*EDM*Wn z(pxJlZf&}v(}U;^kDJ6d=lxp10s>5$h_lg)7#lyU@Dz>aHF=iIRy1(lbTW4f5NcIY z0~jX$ox6`z_q^=h+Nnn;oR_Obo)HrXqcACkja4cPng!7}v)ZU0C&x_dJtCl9O!M$K znL%$^iY@3a7(=YEl#Ft=gB0yQIp58tg&*MhXJb%a)xtKXWiC@vH`vrLrL-Z5KjhFS zu47-2K`0A5av(x`@kGtV#&#@iII!4+#!hK3Bz$@&?E~OeJOGQdFChfO-&iOGerl5@ zj5{*V!=>;05jNJCglL3N3}#}rI{oS2`|01+#vn>>xC|w{j3_*;LL=45-U>eQ?W;M* z`@a@rCT4%hJos=ElH@fC+oLmJg%epyZn*FTxJPo{5YspmdBjE^Y>?;y>Z)4w<&baq z_;bDa3PoJa0wZZ4G2XB34C|(k>a94B%@tszP=b#`_6)FWi}JOGm$c+o1fOj?UC|3a zGJbM90iv?$B9+!RYefGAzBshNIM)qN7pCL5B_eIwxojo0A(tRgu&tSqPbu@`XZa`i zA89c(@=z%=QVlaRk-e@8I}{AxHt&D)r@y1YjOA^Bm}ZQsfD_O5%gPPJ$gnL5O6dGs z8YU?~)$J&PzXMuVq9$@Qb;?9!`n4hwP3f%ngf$jOE_<%(%h&l_n1mJ!jN~iv!9#S- zq##bC(5i4u;svRV)DNbvpF+MyT#*o3`Gk=QEndUEZ~3_5H8_9VXU#EQ8Y${EykE?B z{#dPnEI7*$C1y!EOzAdpK#hQg&R%+5oZM=SY9j@b-gf%pJ|-yY#6{pf(-!&5Il$69 ze?1!7AxAY9hlXPA=U6w1cmil72W^%X?jy~KlLNf^4qRBuDa911DOwlqEt9QBgK0ch z{md}T31JXjT20Um@lkl4lHmP7Q$VQ&ayXYZUYJU_q^&;h@ParkyR){=(W@l4Pd?U4 zVNns7&;*)mE_#G$F%;{YDI2PN>Dc%0NOCV%m!YT=x;u90Y$ty!D-i_hqIVG@Dj z(W~;2lP6mtyElnY|5>GjS}7DoE?hzFOZHWFdJh$KeApG`+0qk;DoJk%uEt4N$mIwku6bgAjf zsI3Uap-CJq9GX9Zr`9ecBk-6k#aGr#`5Tt##E zR2ChX_sK>%%1ERiPU$Jyq=~$T`%W79U2s$GujSMd9d3%J^0{aQoJ-_@Au30V;)$AU ztO_DhYEct%s?o38n?N)N3?z|@TG~joti3949u5&>meDD|onQpQ2<9hEL|#s*`?z!n zK8gN`5R6`ZH673#1*WDIMB1CbLYj){9~=p&o2OICLQNmxjNR~*)@0E2eMdA;;>xO* zRvV6@TCFe}HT_r1UV>|Xrc{`E0^--3OLIp=qpwag0%~wr`yHv+Mue?DJ(Cg&aj@x> z-j%D>G!9f!EmbECa(qmbkoax}Sa47r2}uoq*vjQ1 zcTHHr`kxKgbh;I<*%t|yn!xFE0zu=;hQa5aUw`9a6?9+GJ5Rs~b0d7wKHf+x+>0zcSdYB) z;lhY+hoKN_Z8$FcV^8RYB&mCI8iTpYpDS1QWMg4`deyZm6ak#yxbTIX){@m-v21+O zA5CXDM09S<#o1cj13|$IEuiMo{AyYUB;~|-8qLErnP}>#UK7++tCqOeq1JsxJ@K4t z=oKCT+wL^yK(*4OoUc=2&JU^>~OALFcn}WJoz#X#$}k_tie4hp_HMJQ!JW4da>C zn2+HN+!ML5A>t6oqVP>r(@6@1QU}Ln_a@rE>6A7Qk{d;X4oF6t2g}YlLWIpMy&-Z6 zc7JpK-<&;xTOg~oeKw6^f&AXfLrsr;5(KGh#;`c zuqeE@2z@(V($(`wd9iZxU>f_v?j#_N6m~xlEi4lok)}M_P?uhy|8V4^BbB!`2uiYw zXkV`6(~tA7B&a+RM(6v`WIp!Ey#y;6C;gk^)1-er&MC#F#w5sBX4PRU8ck|scs+P1 zsrcywp{P5Jws5KMp}uNp@GV#2ivzubmlKU6KlgcyX~LWyErW@gC|;s&#(+DC=%*JVKll(Xn7WfT|Q$}x6h+pIt%40Y`Jzm(Ng&;q_*iQ-_ zaL+OYcH8g&Qo<`-V5W57h>zsQ5#)*gW}i|T6}d0ONpI*Ip` z=6qgwh38^Lr*kH!so&_z_TkITST&$s)mXMY_iU9`Rb)iJ?~OY8CRTg6F-2}1ePO!i zj}(!NL?$odVGq!=apZo)FhsQ%{n3P&#$W~>l=Z~&=8X~IbIwXjP}5()i`!6NNJmND zabz!jO*y7G30rJpyOn~Y{1Q7cAG=A<5mHz_%s!v&&TYWx!CY=S!aK#&pT5@n)VNqS zMp%qR#>5;Fs{JCQW75??s5IBJsb%lBzq&EdMN%tVy@0Z$pRNd_>2${Gs24$hvT3ze z0s7r40_&JbLT0)}9Fqk0gusbX@+T2+)pLlWir$N|u#n@}XZ9V*~%g&$S_Fy-?c*cSCaG&U zD+<7PbUPcBQQSZIW#}b|3;3ajn~&YG)^r=+f;d7^-xCOQLKQ@*(HjNVpOd+)`R%o; z^5$~X^6&oiZ_BKQHAI_#_^^9ie&ory;h(GZ9a9@wJAy_4xg&^}!r&`w88Wpk`c1X4 znCdjrI9Zv`CN!eWof$q6KPip*@k^~1-3a!fw;uNER9OyAaWG|Cl_tivU)2UAb?-%P zCxY*zdVHc3Gt06haOvG?fNFFjN)&Y7A1}gsPP1WlVw~q3I4dD6tDI*nwRMI?BQBQ0 zWC*&rFoS;mM5Hb<1&&&@svw+))PfV}TnlhY?! zGyxdeNp~xl!4^Euzf(AB4z}P_pjWA>aWc{^k5K??Qmv&V;Mt3asU-q$W>y7)W6$_n zkOAP(6b~_9OG(3slO9Z(f%=vle3k(C=T_^#8e-%fA{YEhWJ8M&THiOiNTA2z&3R2vRO5$; zF)?8@r`|$9QHSIl1R`0P=c%<0Lx77Q_NrEb2YD@!ATV1{TguZxo^>PaFtwF4=P3GJ z&R5QdNHd9+x+51DElmlRc|`Pcbo0XCT|v!~k`8`N8yu3ej+tLW;=; z7S24`RD5Q){jnx|gxF%jNdn|%ycTLe3{p-9l;g+uSXvw^ZjQ0$2vK6naIGG$Y$0NX z30BxX+&y80LDU&J1j+zmG29M1%IW?-gn?xJmcMIJT03HzkW+E2P+J+96AADRUDn2z+a%nF;6Gb5a@tI%R{ zf<2c|r|G@3iX%rnE}g2dcygG;z1FGRBM}-Mg5h%F1udFI&7dh$(bSG-5@(YE)~ID( zqDo^~IGRSaU+0t{mcPbz^>(62?t^GaltX+@j2Oj0fP-LA!vwg=uqc%+(z$8lmE+)M zE-mDf3d?drON7X31vBQHZ!9-`G33~N`pnCb$;s&((RWL?*9iCo@8j1;nl7fQrMCjT zq{^TFc|FwBXoDIGxGJFOlAtH?p;65)fX8vt>@qzNZ7H3>hY2wb2)4t^%PI+%yC=<~ z)xg{n9+=j}oyeRt8~aAxat#6UDRBRp52PCOG(KF74jV?#Cf!KTi(=$NICgd9`-l%7 z+;oD{8k@kk#&8iD(wPf!_*2dwi~W_zxjb`uJ4$8~AHVS%d39c<2Bti^AX;S+2>O`i zCIJN4hH<6OaF;EFoiDH$QO$GUH&ro3rsw5JQXpFG(ScqxRNa%cNk)PC+EDPb)K0=~Tlio0Dn*plABG6nYqwkH*NwG3V z3~-|XhrrS-w3wYH|9o1!^Mmi!EMMdULmY*hK%~MBbwo7Z5}}($kuP2sL|X-ezTBF> zMdElOcsUqFMcmdzTU6c{Es%_+fzVO9_xLZFq<_+)cHYz0(df z1$lxkfL23xwk&jRz|6xvYzVR@!ns^j{90Bi0XLO3;%~dBsV%1w71L*<$FuFf{TV?e z37;V;$_m|w%N%t3Zcw@RDSI7#2q$UjNH~F+sy-V>gPFI4DjF0>ZBVp=hzloSv}Pz; zL!_O`hZKhu>R$6(nAu1&38=^^a*CF1+;iWve*nW^`Xu~s^ zUOtSI1_y;%&U^2l)=w7x7=ifREPJpX{--t*kN$NTu8Ce_VI>&b`1KUFw> z`XpD0uJR_)^>I0p@b8Zz647g5f-fL92);#$#(hzK>?ZxK`R&_&NZLh9->hj)sMW;D z6QioL!ww5hI|gsTeOt^4R(jqvhW{O6(>UXhGd+P7y;T45;O=XjRd^6Zc9&YVZRf&o+}vAfH}B$)bsNSfv>jRCwnk z!i>Ty2*wk8mVlm%!K#L+q8vLtcn zn5Xg&g=*F|%gAL+v1oqr)t5OA9;1<8HNFyl;$I}7ndn4Y*+*aU zD>%G)eN2CGT#oxdDX2j^HTGq-Uft3{`mhEy2OMx%3%jtm3EFgm5VufyDPQ_|<{km*=WG_;M%l$|N|Fe+0C;q}%}ZP|QnUn`BWON>yxYEb zQq;f!RHo!T1*ff(q%9Yf9DwB?n)MSO&ed@Ilodg=GG`Y?v|y+wnNqHsM3h`eRHk52 z=bhd5XL6$F!~u=dJ@hUwcyWx6v`olv1z20oZjRbpi%m{c(znkr#EKTrV_^c?vr$jd z6PmB+-b3;LG)k-E%4t81A{af18fK_JIL`fXzR}K`zJuV{UpYi17*!yqbz;(Z9N19W zBqsxuM$vsU>MPd{{&KfMgaeK8aSCWm*lyWu*A@ZTJAgTv%=FpIAsR!A?$*p)J-vqc zb*bEVvDoQa<;`&dCcInGhZcR$QrWUc%meCkO~n;aQ1lj6XD~tX?f!hhe{}K(lQ~q+x}vumo!PO!B_N1 z58qhH?nYqBpFDh=Tq+7r+DQWf-7E46uIM>H{W?fnoegh3pF-myq63`#t5w0j@giqyg5z5T9gpe+N zfc;?#MRaXK{27WPfy_-+@>Et9(|9D$^7C@Fe^z;UM+>85>}f39oKXZd1#3FA*;YB* zc)zEJPQ@z4Jj4BLb0oJ!OpQV*ga1BsUYzRDk$`~M5?XE=JK3PmqT95<878?b3T#i` zWM6uRgQ#`YlbEmzR4Zbm?Sd|CGf7-7bC5T9nB$XBdzM_ zO-Y`X7g)QLuwh|<)I5D5ywBztV-n~hqO`V6JO=X+r%C9hVSTLTl_c)?wpA;AnK^AHo{s zgu;pGfOyWAeACejS1&7{qs|1gR&tb>-fe$FccMbv%zW}%-oVv!$;1mQ;rF5vesOwL zS}_i;Vs??2VVwk4*Ow=`;h)!#7zrg(V9K@4XrE}O6NI%V3jAtXJt#3O>}(`Bts;KA zZKM|ypv;9n^^JRGwJwt=cF(DOP&^qS>n@LdW>~46yDdF+@}+n0G`rW{ZQ?6p1L78x z9vgGKxULRTs2Qa{W6dPI(b+5F^6CBj*vA$OA}7YnC7)wrY9=f1)i#G?8fw0}FyA&O zX17{BkrVw=ZZbzi8)T?lk9aOJ8=}kQ^P}$aGakxecS(K$t7RUTtew9dI z4gVhIVvdC9d-ybQNZd+)r#pl8+fBe(GPFsoV7LiQH1=wQNk^=*TJa!j%!3z?v<<`O zhw-@<*Se`I3hmimT6GhB_a;swXEBT*f!Iy~@sMn+x1B^P(6?vcI&L2!6lD{K<`w@l zCLhN@xUg>73~RHJ5jFdBSp~4DpR!s%uE&&*5_ z4iveR{p7wl0)!FR6MoNHY)#*2Ss%Jhh8XVZ{L1lbjnwk#Tzw(Bjz{NOy8*^W(+YOD z(lFbZ0~$R}ER?E=6fi=|Q!WLILyugCbBDZCjRhLnnA&zX!mY4G!Hv%ZK_M8?1;O;o zy`?IWK!Z1jD@Ha+QTlEajO$3lsIc{{jA-B^z7LO~?NjK)0{8;utN7EnbZ|~Vin*=h zpceuJfKzkBr#uc4a^kz@hGc{P>Radl-l6oFn%f-kC3jY~v#&b|MVKK{vT-(xPrPDv zN{b|EVpCZwupO@*;w<8k3vaVHHwB-JkA8?=v(jLo5NFD)D& z$LFz?a*T01AbbF(<>AT^j$rxw+Fw)Z5ISWwJ@w-v2zkBCkJKa#Q|*U?dAZ^sLg&K4C(MG4G<$8_J!%Uufwfij z?D})x)A%&D(cE9o)tM&xSDhsiIEu-=R?#_%>Qs&L_}xOlZRbB=rmw9K45Nnw>l$h4 zb6?@q&=^OHatgfvt=(S`L4O_^L(XKkegBsXpq!08ElV?MW~JX?GzrpiV&b^{EsiNo znIZ`%ccet_Lo4yVW{ zEik+z9F+5kx~0zioZU^UM6WSF!`1C_aG&a8-Y2M!65ZMq?-1i8f58E$@YSz~#S09iXL!ZbtEghrvkS8K6RjYwbb1<58s+SpDnmJMjoudS z6$~$+L^#e}v&e^&R?wWEAp^mDfd_M1H}$8g`HVv9o*W8h?Y%!=)7mDX@}?^@_caK~ zfjLIpoi)u8&3n7;=Z~~%%En4>cv(glF(t%?HUm|!`8ta;7DA~OQ8o>CV4c$YH`cO) z0K-k-UE75d&2i6)lcm+-lnaVX906OR0pa};wIiM)zM^5-E5!RXr$^Q^CG-Q``5dmG zUvyEDrDOY{!KC`Mn^eP*zS4(fx!+#KVi7reryu-c~gfp|JZaM z0eYnFZE-BF%C3*=u!;yOP0OkxV>boY(I&4R*!iEh&rsaOj#}(w1jEP@^_-M_4?H=J zIVdNT0>Ote0i}r zAe{3<(dovr`QXwNf{J9CX5mwM@$y)ABbOau!>{pEarn2icN+c|k=a8dfC4FO~j6r;I(g{oJc|6aUT?@SHtK$EwDD@g&E|l2#Z_^GGJ4Jus>B0AoMT`0_;7Ag6{!wI&`=s z%;!fEd^fTh=n2VmsxJoRa3i~ToDTmkj{b+BMMbTLaotw zV@_J!A*h0<2xQk`jtg0rv8k{XYWbTPyG4(mZ}3M5*R24eHq++P=%&>{)NF&5B>?5^j?AcD&gFE!_1tNbdT2hqU9qQY#N@xrp}rozsnnRVw4h5$S=a? zW&0zdoi~ThY3f?2;MCXWg5XRw3MWu1I2h@7rhSn0ODo;{9(zbbkS=4!8{Q=?DM*y+ zYb_~IXr4!kLB^h8xY4i?8NlL^xS=@ZmxnO#a=ccm>E-dQB!LvmvZA{AAX79v6ipKw z9qJM~O%BKoVUw9Lg};@02)U9wg8t0`oo}=_#--^goMLJKedhL4xSp83nBvq@<*1h% z=F%Gd@QI>COqk!*+YgD@CK@p^jDA~_ccXlc8*{Qf3RhMF6lG+%O5)sT^K>+fw1rTI znGbQyP<}R$lh?FpFE|o`g6vA9zIvo>_wBq*(s{-;s-byS&ggik*aT@%cVq&2-2#~{{|Po78s zV2#6dlqVD?SLGZ-1jOOkIsN0uuQkce8@Q82X+|6V{MrKS+0fZEljm6U)wG@3=Mhfu z;fi14v<-Uq>hg|$2Eyl6?f2+2ZZsaj5-lR~*u2t?dSToi>L;+W)Kgqm^Xl0hdT`i| z6d}j1D$N&TP@q2A_d3=Bse|(JdOA~|S2kFyS=`wEy(yqA)I5Kt{S%aziwz;4D~eqf z&H~}$k+&^u_DxHu(*qBOieA2~J&BrQo*-!RGK=Z*3;F+=6JEKy`3vI%3eI2QA6yke z5JMiwCMp)bK~3nX(~GlAf}rP6Zf=VQCbW({VVYA>goB&Y>=Hns{J*2BI)OLgfF9uo zm%`f4_KEWhP2Ia5tbxcO`%7OrMA@QGM(&1%hp6_=Tw$I$@h7h|n}_5vcq$f-e4Lm0 zjtNLyrKVj;awjzV~~%V*FI52N8fP-Ih4fU{VcX(@^1j2F2@48q zsHzhFB4OEB6rnUMsidqZBQl~s!ndI}Uk4H9lavc61pex|{*2R5%R2mA!X6LFz7JP@ z4ZiP{N9Y;RKzNi%hBz6Q>qHu!R*OeA`d#PO(Ff1X&BSiAa5+8^@j{YefPcOh)|9z4 zpW!t~^7`_*I4bH+G_3J>+&-Al*jujpB~TJFgA4olejErNzOZ~aO1UIW_6L2qSLm$O z6Xawb+{yPimZWqhDxp2sGaOQ8Cq84pZQeN!K!k^@Ji(0bD9GxAe#OmA#-bNYsDVpzj# z#lIR&v~aaqgpt#O;#Q!HN>fveIJ)K1fI*ofp8VcA*dVuP4dwolSKdc1`efmxH1mqW z?wRfN1+}Y5U6j@ubxqIf)QC{{bI~E9jLs;hG-i+HPZ-S_EySCI$C^)8JR!@py=WwK zh-ki$lL7CQ-ukk#Q8&xV1A=qk6-#)uKa4wx?7Fgzz(M8QaPF5C^z6^%iBmc)^PUOQ z4Qgar0kNsMRTrEd>F~xn&pAiuw!FqiTcVUOUD+-_di*-+{E?gLuVkcgTDVKl)*Z~f zszl!4|Fyy;nPrKcNY^q~2F-#d#C zp|&Db8DzVwVb4Wpof9U}tT9(U%*#C~tpgK#dy55O+5Nl|-H=tdLN|fpmY?1gd|_?a{41L#sbJ_tIZ7l3ca<*@VJuS!n%B5g z53In_l1bJ2T+4~XFt~NjvLnwyGeD@dgn)!`bdnr#hL+XP_Q6dQmb}Ej;ACNSF-)7d8 z*gD62&$-~~&_hv^qhF7@wFS$mouyTKj!%$27*3+nCw-1iD%d`s8mFO@GHeyIwJ@ba zZfe->wDWU?4*c}8(7W^`@OSt9N`4F^phcEpS>sf+&t0SBU_N7Lh&VwMaE|sc+pl$4 z$PM!3Wde~De)-)Z*S#H1W*RlyO1ln&&HSKGS4)oMu2TSs2uL&9D=`g~niekl`MR>D ze)KpeLAIR0{&{DqIB+LoDPm|Ls%a8$D}5XHaydpM>G-?jI0TOTbG2;0mCm|8=bM-0ozV3 za32tL_w<<&P2r0>H$|VEWAd`JBYuP@ScnL-_0m+N%sMh^Cdv7Y!R^)loKg+=6|hdE zMPp3!SG5jB;cQC$a+-2JibDF>@-wNU7r-L>X%2;GQR`s9`sN{R4D^dW?3TxO=9#us zVZJHxO?S4gG#!+G_oH{c@v{&H;-Ey0?`v*s_IBP|ejanr+z}i$=YHGgkpLoSb8IB& z(C^D=0>6~aGMy7%80N9ny?FtC=4kJcd-lbP{C30_m$f2Q&ynf{)jLGcTU^@j6(h@_-5bS5Cs#n8EvOxfAr_%oNlW%b(1v>D9UMQ_<8+P- zNuJ~qww2`@!VB67&Lac&Rc8k^txzvR*Ed+9Eld6osmg4jDm}_2T zB!*BtoZK9AX8R}3tF{KmEgLA8NvO@l3hv$@0ewAcPQ1zMKAr!=ES?DOM z66G@v@6rMBR3{{F!uge#-8Z$5FeJuG2(dv+dS`2Twt{R?3or3r=TtZMH zZnIY#3k;*uWLj25@u zIpe>4CM*J$=zlv{9tr#;JXSh?@Ze!Cj69W5q707BJGqod@_Vb9p``>3=9?kKRPIWB zrf#6NnO;FrvUxyXjdB-WMY)4E7Cn-XG5LKo^FhZyM(jE7kT|z7C(WZu&Q-uX+G7}!&S*NUl1`%eq?ayIy(bV$PHTNdWic$7hl)A^ck*2{$2}%Vk zXqA|&qCjI~l^s1bJm^&MrZx%t2W`gF)+i#iJxn1yGIk{22>)MjZh|_G;6BJtMwcp6 z9?5Lj7@7W>#ktO>qiKYjfQcD(gp%cD`_ZmUBoP)yZZx_T8Y`z1m+6YgkK^Dty?8j= zSVCu1DhRCDDJatd4k1%cG;vwGrrc9Q!+D&Bj+``FqLWcAk?)!?M$jjc*gT0_k2#yB zh;H2Yv%v#2nCy!R5ue!Y2M=y(VjTH5H8haqHL>O}O{XDcSFzw%2o!+>?cBBEg}Ao6Y9@eF zUBIV=N3PdLuLNB9)J+6S&}pC1S0l_HT4641;6$6eCXLGccI{-Ulu_UR{`Z9)5;CJO zO)G{_T%1NbQ%wLV)SET|@^M$72j2I0)@VOClJraQmxq)n5D?okpB7t!6x^F zF5H_=#k~6T2S~Bn^kziu%a^nyH8{66oL%0GZ{D_7jo~7Rz4_of5={jxq^2;bHAl&u@2uz& zQ??a4-9?F3tYmjju4w+?Kz|%Qy95_*-HFxwlC`AZUmxaFPUP_)7p}wOf<01>_s!Cr zDu?*jj78kEuq+9Ix^b8br&Ho91(-oyT!RKrzdu-58Ta-aWTEhB}O1q)DYT8 z(~%SWfVeNxeHrR@PB-|EH&urlAk)KtoL@QcM8=>?S{votjRjc?xhIg6j0L1=)M|z^ zDrtGQy-A6189kW&h^ZTu{^EIf(*A$a-nBXMD>?J?SMa-iY0=)D8F|EMa&F}4X51PL zveAH0Xi~G5EERz2MzH}D20-$a=)W!IL%UOU!y1POgF|DIFGe4e7h zp%-R^MqQ|Xo&Py`^5n_OlbL`j{6g%nR+7K@i~Ik@>@t$8P`8Q>y2u|u&GjejHKxNe zS^sa;;e-gqg3GCAJC-2Lwgo+*B1Ntk;buAdebrP?=W<2Wm#DZ%^Jeo;KdPO> zoM{F}@YSD>seELCs@x&&hWb)iHh-zsJXdQapily#d2PP@2c77F+u48rWi|_>D@^bO zsvC+zJvviK67LCBM5rrY^g3)aiVA9wAfUlF%djB6{ny$r&wF{L6#kx$aCR{gd}7fh z%g9n*jw5yC7=S;Ir`q>yZ_RlWU#!hQbLCPu8*oE0M2K(zWySEMIQnO#k~P)#kMIAiNW(#r59#*I#>#t?gjKbFQdNCk5;-W?8o{egKHG!SUvDsA#%#mb4;U4Ciu2@c2alj`Z*E{$l z)^L5&_|{6;gX~g7h5C+csCHW87^jyZ{Wvwj$^*||qf3#x?W;+lxju?2P{850tNQ_f z%20VlfhBnn!7X7UiG7>MxJ*>XtT|(P5*3umPBpd2;CR^pGZs26(i}eZv6&LR&f%*$ zxnHWeWnS-2OFnNzqhOcoL{0h~{Y?WmrzFW@?%{JH?>G*HIsE zerGE68(!zZz^j~RK|90{p+U3-AIX)47<+VU4T_60(|^I?t$VXh7Vr8tS2|E^``qQg zUPSLDoRKERf{TiL!O+EUY4z;kOF`Q`Qzhx(+OLIE-#ZeDbA{Gu(kj2y+<`X|R8NcBHnQbEFWK_9rFx49pwM#wcjn%#>Gq)AS-c0sNR$!#Xv z2JZP>aK|M%KPRSao6|a{A?Hth1^AW~t!#f-L%?PuXc>R`6q@F)mv_+z{ z@seSEngntIV5E5!@;9`j=-WU0=D#liO7d|16M$T;6Kl#{`<_nf&vJS#eIuc1yckF+ zZ!-10!#yQcIF!?C6a=q9x$s$r0Y?RV7HX6lvfTAmLnx^xy2viqxaFY6yCgrkb`CHg zWDh<6qfAjdlMl?_f&KIcl+z+Th^vYJJ?(a_PoCC<@UPbc^V`eV2Ndw-#aS5gIEwR- z9xsThxO3vUN6*4jyD=(dD-D*Rz|x&UV2M zM@Ei%jto$Z<v@$1S3sTZs=PU>E|jZQhzBLt@5+OAh{ zo|}zS!M80O(vIWs+=7OR$aewU+89P#H!O$Vv+S7Z@p{^+zMp(lvjj;_^Qd^mrEs;T z8!qz8YiyT3iug&=7j}h(8;mLNoA*?CrkwVYg#;#RV7Yi7nh+KVl&9jK(p>_SPj83J z&-yLe!wc!DgJ_%=^=KCn3Mc^J_y&-h`$XHtk)D0<3wVylb|n=Z%+S4dLup zlk~WYb_U$r6x`wLELtBqI0)H;EK->suX0q9&WjB_iwoO78PQ#YyGe`(?_~$jj%J1AuYlE?jlwgefSzg<@P zzJL0}H;01#zOqOsXR31N`{BpaFfWb?GW#p?3QR?8G8ICvxpHobG#BWN-X$VytF{F7 zHSXfL6X_G~n+*MUScc)6k`Cz^+`ekO+i`5!8?=o)0B^|2^eP`3Su&`%oa!CDLs-i1 zkb>UL;jxU*1Mk<8e@Tfd^Cae5zKhI5-0jNTH3ft%!Cjp3#w`Ux-7ff|G0DO*1$%%= z)Se0e($*E zF+;pTTjxM>Oc~Tirlr_L4I-bP_}HH%ME(7$Q(tD@NIe&m*5;`YNs21D(N}gri3ZHA zxZmkxIkK_MWhY!+ik^kSG7zX~h~m#6;#luQ*pSdx7sO#-bNF-26X@L~#@I`XBkGN< zl;R9$$YYAF64x#5z5;KBr;8|l$k4OJ%V2KGTP-icYtPQ|bj<~qyNwLgO4x(1J9KjV zg*R+>ww4YJ=G5VjKllUhlDb)D&a66Ta37gKXn0Wz3vED>3B%n_&}Av36I6PdMM5W` z{nkZsn+bL49m9}Q0S7PPb(r~Dp18l!nGl9i(qns8`fFst0v5Y*v>(+IHb zW7vBN!mFX>g;+UNzDl2tqgfjxRI4whC3tnw28=eUeF}V-wL}wUFa!=WN*+@TrYH{; zxGg=cm0FM#KFn$xBh-|!_vxMD!am0@uz`wBfKPR5hn#($r(9 z36k}KXQkGUjhjexa-ZYu9N<;(q1_@gE#kQ>EDqKe(se@Pn=GPgCIEWmwb@Ew4IK@;W!b*uBEQRRt=Mk%j{QyGj0t$Go+TJ11hm*VWkbdX4yfF_zlvpn2LqwO3 zm6aUtKxawGvB`pqh;wv`uxM~b+ScBpAhc;nL4ibFYpLt`p>%G|>PHVQ_L$@eIZP0Bf5mDbgxQ&dbX5CTzsc*($Ems3|096rwz!N zJFgVAf8S#ba+(K)a*wH3@_fxtG1*SZzr#nsJyXlNfB)%EJ@r*}ZRn_SD)k6O97E0h zCGs}#gmPpq(1Yt;Q?8ia=Z8zWzEk9|v9T1q zE;a`~#P536^cIeI^222orvUI1mO`yjGVX8rdQTvk6rdLSO zx0)}-(!FDV_g#&QXrG#0hVNFDJbRQS@WoKG}_u?vR@0U*Zjvwq-I5`wLab2SWlv8yz|!&X>^N<`~63RgVdP z6&IfL<#qr5mj}+%LiKn^s_<}XC>l;nE|+DrfFuBb z&%cPn^*@S0n+7TsOwUfQc#IF{1`z;nOwaJ9wJ^izB^NgKKpGT3@vtjocn9Zy@+cvn z60zpANh&<_iy|0M1YN~5Jt};tLR|iXkhak-Z{z7xk4tg5;-@!jPPXH)(ax0M(CTt;x(^qG@>xFkN6NFJe`NyeUF!+}ux! z04Cv86Rs6I!ojsfe1sD-GL_)ZRVug+!Sw53M#|9F-?;^f)(UwVd#5dOHKOl*&$FfZ zC%08=bwJL|JEMi07@@e6IpnPe<|W+-Crhc!{yEh<&xwp?cAS(-TR7ZG%M(8@oAO3( z+f>%M&(*##;z9BiOF(%CB))8X3jXyPAyT9y1Q!SVY z8}pI9S$ul`Z|?uj-zkcguJ;ix_1E}?U(Mot;7XLMWGxMFpj>P8C1=h82y0w5&pn>p zO<7G)$T2@IHyudgA_(ck37&a zt39xH$3@S_=&Wuiv9nu$=NViJ{=_^&PbZAX)v@NhpsE-ei+^yk){=V*VGPvDH^T`^ zq_RRi+udns59lUehssPc>>WYGdU*Pdc2Kn|R5$4(M|k?vPaatev!!zItw|Ha)Eai7 zRIAu3a^=GnuS5ZhNIVoj@qs7bI8{OzF2fyPW~$6nOoL1%8`bk_MF>)dssS}DB844F zarcD7c%_br{-!7GD!xfo)HE`ts~_Uw7%D)M1R5(5B&wC!dZMIKyR{g;D1(cB*nvT93sOMVmky-UK2c{UJYq+<~B;@ zsmQ!{@p`la&)Nhka|9%y7@i*Q?a_jp!Yu%#QVOFNwT1h)ex`B}Mf!`rez)!H{1%|+ z{h#|9U)_lH^G%>?^>2jqv6=hC$LOob=6qsj$cRQ-42rSeQClvQ`U##M6Y_=BQ4{hZ zNVDcKg}9)SYeHK{tok=WqV`*m=@qANdiB<1M%fts;=B%eUzu_+WGf#+k%TEda4n`Q zN^7WL^zz}psX_V9wP=Cz#i4`0e2=U8ZHSdk^kZ`f@j_I+a1LQVA;vS)=-KQ}Nhkgd zo_duN^&dvy-dY9<*s@A(yj|pcQV9C9c zP8F3W=15(K5((}Rc^3ZiH*enf4dYT@3khu@>l_n+8!+JqKrpyye&MdYm{ws(tx^ZP z9$^D(&r~-j96+!=Zc6kX4~(k*rZ4U3MF%85-|-kYSZa7FqxPphh=e+c&v(5?lv`4r zW&Z)s!}^u=l$|fp!zEa6xS8SN4<#6@33`qT)ib(I{0|jjkW2M@%IHmBw?l%o%PcD- zb(LpJE{qyzkh_kXLY00Ap8i_dM#uT-Q_9^{f6sE0q#l1-1Kq#>tsj}`2+B$VH^ZWEulLTvyVuB6j*=s#xGN1|`MsZfe zLVpi@Vy$uranNOm6;yP=V!}CyP6{!LGDll=MR0Td`8R*P`rrEY$=6;S$OCf#XFW79 zl2QHKv&{`Jiz}DnYK?bL>BW~VGEsnWx753KVo3lBf~@E7T0Q#YQQ(DF1x#3srOF=e zpD>~~XE;0C_whJXpm(}Xm2gjOnyD_@b zaJPj6UDdw~S5qBNuM`!b&%Ufe36z@i22li5$<`qd(-9ITKQ1mK(kyDP|LG5lA~@hf zR;z*2S~3tGBmoJEN97a4bRTYcFKzo-E3cC^{~4IKb~NsRA~WseZRtqhsDz&GXn9|Q z8`b%t=6e8or~a*&cdcj{*Dy|kyEPEO_Jf*anr@0*b48FO%1~zJ znlxM)eMJtUFsWyuc5(Z~PSb=B@!?xviWq=qk+d#F>f+XDM9=0C|HFw(c24l%u`&Yc zVI1KHNA+Pi`^>$B5jjzzy20JA`mRCU{M>uIN|l|RU9>p-#UGd)1NjxSTdsPr`PLJB z2sOrnS+`M(UEBau|9S~EUg4c7eiqbTN>q`8-hlpU4{bRu>O*d&;sqUoS~^hMY&Z-#QxN_KelaB*izBZi2=WvFn1|D0 zuX&@jgyByxHDfqR?}0BoJV{0#2Fy(ZyGhs!7c{=gh^lSD{hetyy>_i%6jG0NjjZq< z%6l?kh~^;Tnm22}e0*GRZk*FYQl9+$s`gNr9K-%h74(|>Ip#^>zD3XiXH5cu9!h0~ z^FZ<7kz(R^e$K(TC7J&I z_I640)*=MTYgBNc+Hn-rTa|xFl-tua{O|)h4?@&LNR5pvXk=ak^sIN0bQ^%W5t9-V z*BK*+sGj_6aUSN(s^USNo=KRokJ$-JCz(k?*Q77ukN?;^0wNiu8}Vbou`xUfrp@8) zJ2M6%=O$|X%J)v2$gksQ(DkyBFob<-`V`ITr0=sv?`X&TW&l&sclP*A`0@a6Ad#=a z6Rbcq_GX-TVnOy19utqSD0rH26R;`-tUa-fs4H-MfXHOXxL=>xnvf*7jC5DpJkBRI#}RXKcpts?O)+M?gR36{CFFXBfoowaHqWFEkQ zN#2E8jMbW-a*cSMn@$gmO*Jv(%mPZy}F(nsj68h1P zJvqu`8{sq347@@ehV-T3oKif_$CVnY(u!MO!V-+eUL?yWLUf10d<`iQip;GUqOmMv zk#YqL1hKi}%O~|Ltv75PuF9&mlv>g}s-8mzXC+Dk6k(`i1%@sLlIXQ*@>u+9xxn2p zI?=h5>2Lm~X}}FD%{{LDhubOJu3WD7SGbxjZ`vF5I`mYuA+)pNI2`zt<}4B{mZ#B% zSC*{uuiiOC6FGEnTLl?Pxg zFXtHJGo0ak15;_=1J<|)WR({!7>+SS3oHko~8?|t(={VaX`t9RT)t}?WgSCL- z@S`x0aG)?D6PCz)U#_4VA`nOt6m9$J-Bj}kTVXOM4KGyUH{3M!?xmvIXx&a<`YM&H9a9BP0^=|5HE-0inq|E?;nUQ@n`**izb{Bmrl`mK8cXqk@4k# zyh;p`a^Phk&sXJv@%__o*;i{{BG+wX2(;-(+u6^Spom=u?kOecH?eM%Qw6 zAX+>nK1!tshVZVDFrqrNNz?iE*GV4WJ&JuDMHWJq;++VtQ3g$?cd&;(1fUJ!Ac3kn#l1G>87yacTUQhC+nSbfSS&e4X<{ah;yqg zbL4o7h5z*7zgnR>H^d&%w}ml#Z{b3(0n}h067UFN@C=DJIwi!Ckc+FbfF40Lf&F3( zq?j=^d*UM!{;FRFvxfWxj|wU$$%2x>_3N>2?_eeGn)b$NyJ|)g*;^|6J6s)-sh->B z=9WKf9PW8=;xaxQTs&D0gLK37au}+q(;Scc5XHq5LP^H+EoY-gpCEDLP{>8tY9oq& z`?QgS3A9&XY^}0*;pnM}PW&u-XZDTr7)IB0PcGpZ>a=<9{=0~@QA zj!cyjv=F{H{1$gdJ);WNp$Uwf&UuGb0X;j5V3b5kemz~sCI0kNPlJ$Q7|P3Q%wEpE z&$$YDu|_jA&yN~`z6_LRG5q9a9xfVXR?knHz3_oYp=nC_%}ttj7%GcM6Fo!^EMEEW zH%x#h*));vb!jzC-_~;|tuDFz2Orp?S4kuIQocMdr?Qv5!$J^-Cer<^J>SR7J{569 z`CJtC7W;15a62PM*;_jSq70mRYO;^O=F zzxah0rMk7*KMHON%bprRu(_jCVbvEP%KIVTh!P;7OoJ_)MP8wj?t#nt<PxJ)9lw*NiFPk>b)U(rD9k=Q}1D{@jo z@x#7!tw$6WBw!j+sB|tS z_qTl;TPx|Y)-_pbjh8$Di{#(Sa>`^FZq8|33|w{cQd{Ofv~d9ET=U^>2_Akn?ug(B znti@%GxuN_a_uE2+j|4A5DQE~pk7QvRohJ6=;{2#KUFABNnK{>k3N1>AJB)49Osf( z{I|s^6%0}2OpQ@uZOv{}(KPB``jHdF@hp4+NQ9M+Sv$Bm9dLdV996Kz`>Mlx_=gB% zfRqWXa|8P!L;y&Wn|Rw9dkLODn>L|I02k!ExTeCBwy~&W)Qg3Yc&eCi!_wlKyV46h zqmWqy@uI{Th?N5~?t>67I&*QHb&<~dzy9XW&6AkzaM|h5aZQ$|xB~o%jxrIWrNwej zo5PMnuc(WH9gT0=UQdG~a zj3%GXmFdUX_WY3H^H85BvLZ}17!HXM!4h4@d3jL+0QZ5JdsQGS_47UK`?w4-9@?^^ zM6+(7;AohS%9nL}Kbx2yDw3%P(d&RitV+ucS9PtJOuDt@ovCs}eYtWwnFWp(4HcQ= zm#Z)2*zc^d+u7UNt0@YY5GHaGRILHPzLMqvTJVh4*ZaVIZT6yIOV9cc}1`*E#bCjZYL`8m`kZh{g3L_keMxg61K zf{}Twl)Q8kR#8ZXOz7;-DRk@=pFkDe_J!&co=cZ$J zG%@^fEelBR^143AI~+VJc$pMl&p^mLsN_FfE68BuTE6xk4Wak!Sy5o>vh^rF%|a^= zW=)y^6TT&Xl747Lz`clxR0D#&G{cKW;Lpq+ADhkW`&BDfst@kb;@vcghZ2|}0e$EJ zULJZ0NXcDEt;QOG%DQE zX7I25pk7G<^O9$`ue>|eO(=pmtJ*dqQ_YD>;DS4{$cZ-6kkaeVqcmV#*AK4d+LmAc^NAChkBdJA?$tv1EVXL_FL*VJ$rP404Far`vD!%vWmLQ&WzSA(W5y>Cd?C9XW7E5{pB(?}lxzb1;J5}3t z(_6Y?a0NvufqHntz!`R_j;5QBRb@_%%5e=0Q*N>9D*J3QAGCpm*__z1lHHQqIhN!$uQgpzE8AuVZ>1apnn6-XKAnO*qnjRYTaqhnXiKi z0tIgNG^Z!HzUu9q=i(&))U*psPv%R5;vg(DuE!lV|T{{r~D2H zmY&=uh|4-^dru>-LB*?Pw?_~E)wJar^ko6-BJYi!r+8eiLY1{7j=T?>PMTo#Kld<# z$?2LfW#t~iXN5K&6IQkS8cp38W6Lj>w*VZ1V{jO8C~X%a`MC0zkjAgw#i>sHj$ovN ztl@$)H9brS%P^r3<*PJF?}S21?X6J(=#G)dfNeS)qZ~!p;*#uBX4U6b!shMTLSzPc z*VLTQ!c`)jHKem{vUt%F^wFazIDbS`I4Hd8&8~<^(Ui3LEHSDjrv8CJvlf{Ou9Md! zPUI-bpV9}#6y?RF0^yv-K0@T@b#=T5>jAf&0-gTdE-0e_Gb@o{&GEUt@F^y|zG$0# zPRXEJCNs&7Wdg)ow%|5aTXt^%Y6G73R#WEOe0?Q8-;U>GujfV`rBaDnUkkizZ!p{# z;*o-XS)}I}=YY?yBu!@PapQ(Sd~hWDah61R?_wIM(O0n~JS6g(6+Mt?v*rQN;Iq(K zR^=l@wUjUaRWs|z!}Ru`nIxR|*+bOR4N zBjvv%9x@$!Q`MiPznLnhwZ=u(Dux&Sh|!L5T&b3g$aUT2;Wd7!`R#Z=E~6ICt6I9X z3^gb-y>WTfyAg!+sJVC%?|CQ_SwEeg%%WR?q_(6Gl~OZfLZ<=Ojz?9Fch-bZmS#pd zrxz>ym&v|A{mOUof-9`{Oh!YdWK({aP~+F$R2AWRwWgsB?RVBH08!EH8jXLyowYrw z_@tSIn;S)=2}N)(!(v<$XRcidAzr9B+web_dZQ%7TH3c@; zIUlfZKE7((+>>MD5&*YLLzj%7T$4Als*GJTp$3?3YLcf8dVIqCopjPum#FX8RdT5rYio4G zEs9=6vq5~0>5)b(!lJc$1;1MrkncW%Pe(H#KAAOl3->njTw|E{^+ZvwW(^LUCKIw> zQDI}aHO&{Cj=Kf^>6$oqVIZ^j1z{)-m0b12hx6Ry0H(SYrqT+>2K#oVz4IiWEYvci z7>jLMcZqFti5FdzPv4zWm<2N4Lm%s&k~vhn#I@|H087wm366X|*{nq%so4#?YJ~il zJ%E%w(J2WQcIBgjepGK_1s;kZa=WR%3M23W>{BfS?{01e$({eI`0#4(Lq|%HwAk7P zO4Y>XqNn4F_VjKCytt+W{;6K3;BDN=s!>C!t0Fk&k?v&AU#3h=X7;t|`_OJ|t)z;y zSjg(J4%KqoRL9g5aJE@+Z)?W`B40>kr^<=ffS!wHQtN$;cccDIfU*6J4;8n1mM6`P zDs1J>pBPY7VDsp(-FkYP8r^V~>K(w@0t>i3wNM~z;ax@ldnlpkjlTs_8$6|mTSzc$ML)VJV6k~*G>L;e zUW;JiFuu>@@Zst3t=%!pYakj1lBn@+GS@>RP%}0qv?2zGTtPM#oky3>S}k^C`0|!l zvn(?^l&E@AI(rQvxA2JiXkE^?h5b1~%UbO4!tIktUumwgL)J zQGM8T?RmkCts<`_qo|Et&r!k~d%3;g3{c02U6f9>4YKhWa}y+n69YTV^4k zxP?=yQ^quHsg=`S!7p$7i0w7x`B`YNAK?P3`{endH+*w>*3WM|E*^d(;D`5BrAB9$ zZQy+S@BaS3DJG%vvX`m!elqGET_1>%dKsLLnD?j%7i{?W!voB_NGS8Tro4!})r)we z^hqwxv{aT?5u7}ow^2+<^J)9>Fm6;YqgFA!y)@H~ejV5zo~arP@rRliPS374Lbu7a zO9f10{u09o;Sfxs#P}8!IL2;5JGz;DHDGr&D|EX0C)lR?=gs_WQDtk}oDLc2ia89x z+C@q5mI2D8%FO70`7i4+bdN;{hbObEo9JpT2hu1d7Hp=D$9XDOc)_*$ARtY2y8Vuc zQN?PlT3-yrpDE3Ls_j=%S_wV%B-YE~(!3_r&ImsgUFcX@oQD5{cgK#B6{?7qPug(D z`y*VOQkh(@MwL$NJ$MnK9A+N5Ha$}if;Ao}7dbY)H-k&(+$fp3H(Mbvn;C?F=%cvX zA@Gf7$Mp`B-A{`eyV~3OaR2_}pLWG_TGvt$e`xZm9GhCkj$z{R#p7PQ6Gohv!MpDLe069I$?FU^{8Kg9Os!+q zu5C8$lA_%n{pTJ0QGHqhlcvbZ2bosV;rY9`6tQ|Q&eYP*b5c2G^mAveFDN^F@op!&}@>!=qEKT)C~}g#aw*wp(b{T6#P!XTXJ(D#Wa1tgd|Y=|H)dL*p$F~&+t{{>FXS_ilgzvi6XX=yWkwKQiEkHu=${T0lu z==W*ZV`pRPvd&d6P{O1`?uBm`hJE9Io27-S{ zVA|{7B=!3Al@&c+J<{JX7GuE^f^ETf#4Wm>!-zy^!5)*2X_VwI$`M@mL@qhy=CG|O zr?G+>`^l}FWh*eCbizPfB>)WPKuMkZ_qZ$lTS}9K0<6a5;{cdERHw|+YLK7A2!Bm` z#f*SG#+{Leb?8B<7(Dv97mo_Q7QxtfL+PDhQ2+#4c<2c`?}<}OJcxA^n8_Q0Y<>=m zZ;OXN`Qg9l($2e)yhK;qfHq-*(>y1Ks76c@U+^m=!GER_zi*p+8}fNO<{3xp=+Ay>cV9uO0w{@wzd^-~Q>} z|7rf8XL3Y6g}{cWEMsI=QavFg$1z>3y(ZF|fuMM%2YufO2;4T>w}1Bcf94-i6M>vO zv!w~mq^$ZJZ)rL3=PZ+UR4q@c~FRc?4v9%$(2yR zo-|4ds*11D!}IyYKLmqJrmsu+`r^<6mokkChe}oqmG5Qf@+jFah0Zn&Fv*auh6KKLb$fiyKlOv!VHuF7#u=!KX&3Nw72ty} z%qhcE8TmE-TQ?pLTF~w@3B3X7m*Et<0p1ga>SHeNcGc}Y+o^NLtuYNfU7$mv;Bht4 z!XPrzly>gEknOZ=R25Bilj zq!(vlS}P=s*sjfkJ8N=?&6{O7?76&=dG#QXZEphV|60oKW~}kszqtR)Z~hMxmYM>V zf#G6{_fqI9bS&XsmFPL zV3}Fb`UTP4Fs$UO!Kz&km%&dkbZ`VUC%8HMYP8R@Q1w<-)P0VZ0gW5kWf>kU!!=mw zOWxO^RqOl7|nYbX0Ho z<;gr%y-1xfZfb`jto&0M@b>ZTRT5lj$<;^!hk~RJZZwu-Xffr8*Ul&DE)&fNX%9QS?@l5+#@ZU`P zCK|4@pgV>AR6MIFLH|C#dW1e&yc|1OcU06cB{yz5iyd|5fA<<6{mlP4HIW#|b{ zx(D7meC~xmm|uy5LHZ3qn)e22y_DBrnORXgeKrMy1W zYKt7V9|4b_)c)SSyNE_cPw944-*9{PzcEz9VtHs=p2P>Mgv%=wbMiSk4u9h#|0RVx zL8BM#_p?rk;N*&0hnrWxwe&2X)7`gr2p|XU1}`7jde+TWZ{8uQQ?DiM_$n}n>iU#2 z^RsCvMgTSQG(A=pQ12T%LB;4I5@6KO=MbDYDe5~$X|XAF?Ka4Rs}b}|pI#(<@f1n? z13uv8DVH})cQnmF1b_00`ID)w99+eKD&-ulfhYp777Rb7FmX1y;e4R`dLk>!)q@cl zuPq?S1;2TH!f5`hZ~ljR@Hyu)n3L-x`4i$54VbYfpu1pg5 z)R6>?e`KL&t`e!qJ*tIOlO1(8KU!70i(i-0*Hfo-~&s@LO%IAu=AI}zFVVrRom?YfoB>KF+^DebY9co_#|I( z7y=zYEYD5{RAbQjMKuNA<{_RGjr4bvE$>~R{GXS(q*`sy4I9(jpl<3u_SiCZXAN8{ z3cQ(!_T$sH0fYj{c(WWxO3#55oPHDhY13J;2-O8kRRUPX6laAedV;yz#YJYfp%2|I zvQE#&6_$t3h)^-ye#$$knT!+oH=g^zwWNtdyBt-{@b+$C+GEPs77<2RG(T}9T~-di zs+DbB7Yx*}w2SmXbIT0eQ{A1r8|De_Yutaq^+WgZgeOX7j_YuG1Guy(h1w(&-=lr` z45QG z<*64+9z}~u4XH8K4rE;rHHYXKR- zkU;a}q^SD1xS&p!D(;^xYb|(eJf7@gsOwjv;f$hf{>uvfGm9KBxP)*i7%S<_O+pX~ z=AEsZh>TJnh^n|jFFBr=U>&mK9nbVBQ}r;1uCJjC$&h5Tf$Nc)TfZf->d){0*P0Hg z>ivxZkhf8;6f<5f!)9eJEPU5RpPi$f8bm1*HN2$ZS|3T^n67V0kS_Fi1e^X~akqor zM7ToqveRh~F6K^!ZpMy~W*LaceRl}K_Kg?^x*;_^no#pw`a+eY!KseN^UCkRBMFPF zj0x}jstrXPPmPg3Z`d7Bjm3#!|jo` zS3@arIHQR8<|z``7CUFPp2V4c5sSUV2K4b0m_tsG#_hZ#AW@6j3?BXDV{;&`tPvTh znMuARhiW_dLkFr8@2AX_LU3SzyIZI+NFHq$4u}WSN0lC zbZ2ttgSIuPe^XvPw4{|c2|Sw#vluNI*%QB)Wp$8%L#rI%7B?V1OrKF*5#7FcDpXW5 zHFPKWSrTc!gRI&**!S1U>{Rfr=8cIPh*Gq7be+5(x7IG?-#=M&D%c>+nx|s2U!zjB zby4gxq^N~4Toz^$0nu5{C6vC7 zW=#HbrIfV#CD-yzwhi1=u=+i{S#rixqH&?*DbxJXCePgL}q7Kqn3c@e6`4()QGjKK(=EaH{dZ7kqa6X zDg)8mb8gHMe=lP0_)Nb8UO}gAlCoi`-42P$GJPeG3u4EZDti9xQB9C0c|R!%4v2Go z@%0*^aJ5mDAzWzGAVA+Si>eKY>AC`?1%n<8d1i_&LEBGzW2ulAtOOcmbU*j}=_+{H zvTknii@oF$^tf$p*ntUH2K{KI$)%`Up=LZ3>Hn!#J~CsA@H=toj#^m!tf+$d9m4;K zeaNHUWetud#&5jZ4U`;L2*igo?6TrIl*;-ic`!A`{rlfri_@|^Jj)&v^Mob&={K{p zrsp*|j72@=PjKzmN&%8w1{UN=`)R=HuSKh9V2Jr$qX@MVzPS}_(ASmR-ip}7`6Itx z3aQ^ab)X7EcF#ZFC{UySL;&(mhxOJ9KBVDtk%pgAdt56oiV6@N2aM-Tl0o7~#xB>h zq&m=E$t+7xHC3^x`C{q2{1vOuwvrp0-UeFwz0M?xb7UtK;tr%ezhdoxyXV^ z$xT&X%8>_A>Z7a*!p|OA7_T@~1l?egBB7eNhqeHZ$o0Z(O%NQ}xZ3J)!Qg$n&(b8L zL>hLnd4VR%SLNZVw@ubN&RkC4Wm~vc!=o5WWqGkk9|Z zs~^D$_lA7F<*#V-trBZ>uIHB_jgOUvU-X{8mD{QajDoN9PmhBED}R~)fIw2ZX8iy~ zaEk5H@0m8N0u8;@w)^+L^EQUHe#v&)gH3_NM#`O`C~K1Ihc-6Cb;^5 zr}*m)S><)0kWmyOsa?B&|LG_7Z)8v4kdLW#vSv}y2@1Jg!>nw`S|X(*()Xl;AC62th|BIE1Udi1uaANWeNt^Q5^Aa zS51R}L+1ey=KQ{rbxqplA^akblPzw0F`We76V3#GpLG!uPM zcUB7mESi6F|99~K0l^4s&QQ;X*V-$32zuQ3Y(F4bHBk>Vy?Tpr&@MXFqV~3N6ND>! z11625y1nW&)9R#T^IH9eFHjflUB?dwpR*@uxY+}gI%VYH!B0gAAY>s;6|i*Z7!1bp zB+xu3eCsHg0vvhylV6c+j;O0W!FzKTu0bxOkL&f|JQ}gOV_kZx4 zzMDWlowR6Xhvsn%l_lRM(Uy`zy^`yO4Pdyq-xFrw?YK?MnI|)r*7ExCv4sWVXqBeU zj{AAy6WH}N*^Q%Z@5x6WH5ggBdp&OweH>&b6h`_W1`o7Mfm6Q@t*~uus7qd}7s}Pdy%Yyh$zN;g z`5SDaUL;AT(`(BWU92~wFcDI#CimfQ{D!GTQw<+u+)@P8TBshWK7*rcs2Ki$K4Y1w zW(xB#i_mry0b|EJHhYfCC#>}=99aZx^;9^zaZ9QsR4Rv?^q$CMF?EWz?m(3#98Z@_ zv^GNj_V@CfBdd5zX#+p}VJ&|YMXR47ptsF-5;PE=UtWY&DSEPXW72-bHJ+XIAn^VB zcOfP4ns`IWP@)P!y>xza5q-}$mu3nafk8c`Y_27-y`oh?9BQgbjX0WrPxqqUNMVG5 z&eTg0F+cOh9D~P0@cWm%IE05xT+e=>PMiI2LGl$XK_=JzWELagJNxVq zO7M#bC$)}5puF~Lx7`Kgvl(iCaCBTx9BkIV_PPQkp>FO}&m0ptYU-1GQ^x`I&!_ts zr+7pcIXt>oY-(x028Phfel~mU4+Nb30i1i0TwO>*lyedW7Nx#MOm$2f_^`!4IxY4(zFyiy53J{HqvmQ!CZd23(F zVOsNA!W7hM?6E9^_VwZE3ZM%Tj@Vv85mm-FYc&?ULi3L0m?$yXFP!TleqMSuXNu^y*F~Dm}_I97N0>-iP z4fAq|&csnkTAitexcQ$#MMSnKP$D|oebTp*|MA! zei?W}OvEUOa{vCfeyg}))Hz5;7rGTygz9UIkU#(CuRri$Bm7Te#)xf`_5=JHb)kMl zc#VP=p%-mjzzuCGoyhnra`6J%e69Ysey`{jAhqy~G)U+(H`S=McUXt_MO2+1R?AqmH@MG5x39dj>u%eT5k+T7Fj z;g3(y$b<{gFT_ibfT!WCf;W?b9R5~%nC~*~iW}9s^mO$u2-ds|R%p>2_zVG490o8w zI1Kt^p6!I{J_*55Hh68$IfeJykMt_0`9$FF9#fU`)`Z|b!n|J2Q3%iN3l#Jk9n&E@ zkPU6e#07+}uD3>)D0Wr{gZrP@UA+Rpa@bwwuzx|K=XX4U<)N9u`W0Av>z_?Ejb9DcPY)^4?f7)zlTDaQmwy2!n;ALnqD^UOK2be2|%&Tj&o zQB6ZctpU&wd@PALEh6MOQ8@ySMJH$#!}a5*@1phs?#(!_PnNO2Tbseo)s>+H$@4vL z6|Dv9&%4b#m})=wBvz5!Fm5>km}ihA2nl7$R*<^p$)3z_G8-NL1CG87e&S9!qs+dy zmm1<+DlaHfSd;qQ9|R$hh};yX_B1#HtetEiV6y2T%NO{QvyV zZ<51Bw_lcMc3yYA9d|6;;sdP%{*WTj2b!V@_4qis;augu6#IK~Mn#K@kn{VBFI%eP z!@#j&X1JYSeivXM?<;OFzw(x#PzeA;#n1g~Tn^~UM7_Pq*I2UAJ&+@-9GChz(D=p; zZG2IGCAu3+kn0rzTVDdR3VZC!=>?AbIy%?qQ6y+M5aKd^>WpnP2Vd=opPnxV8#^Zm zN|ko(l5Zas{?S3*IK>Aqm` z3=-t*&AEU7z3#fJz*U(V63PhOUILcdnGeVi*$HTj!0RGtt=>gcu-r+NuA#pLXscd;zpcBlzBIU3B+OHKU(PTxC zdJ7PR=5NX~HRiI@7wSP|&&QTMA zT?y;oQrl|YbOG{;N77j|lmQlm1c6x*n8gS<*0YGz^Sfz}OJUm}7kve0WQx>F$!`AJ z5<458d!5ofRc^VD5}I4q%Nf`#TLq_KamYm<$Bf%F(jH_~-HaqR&zz)&S~p*M-IL`d zF8P`%LDP_Y)D9y_J)0^D<|&uDo&5-*(i;EhANen~ha#3O1XTQGO$bOYgNddQ(Phf; z%;2pc^RUXTKs?#5Hz?a>4X!cVtbHtG@tj6i!ELJl5vDBqeDv`n(+AHPLHpXhr;sa>yT=M}m%=Z?E;l%?>X`@`2{`EKO~ZZm(tf%tJrz4D*#7R%Ffq3cT? zjbKtZWVetvnO(H1Nr4?xOE^5Nf4%9xgu7TT9*{U=G?668+l%8iN8^qiz>HJL$s)x4 zzrO#w`u8+axv`Jt0X>&uFN+z@FPhJ<`+Dbv&V54UbW8ElUU{=H*Ad$4R0@;&7Za?m9u98^ zzxT`D(8V%ShzJ0BK<>SvYH7R};gAq>EpYj@h4mhuChAvszdi>-VGfy7%XeH~emhzZ z9s!TAN}7mW#Wu>E0dkBy`WdAYEk^K)`vX^o`FtF)VKMuG=SNYNd6QF;(v42)DO?|` zC!goM>Vi^p-ltT?8$wj=VZDr4EfI;xcO5rJO#uO(xZ_=;vM_5R|NaQ{o7I#JVEuwT zng|y(dM{QbXj$7b**4v`E)=6cKk*tA%5`W*OhxgAwE54l@m)^)+rRwg-^Y(B*Xz)Q zx>lOkHq<%P40N_Q9koOUM|y-^WRWu-`4GN*Vg)|61PeSiIcvAUg*)61MNKXzt9{_n zmM3*Wu(Ucx53+iHyEcxPoHF>c9syPw&MPy{$fX)W)AT;VlJloml+O3EkT|5j)tg_S zQr6BaUbY+{aNbdR+?Dgz--hq9eNUQMs6E6%&^toM;#%h%D@I%ISf=@;fKp9!RB#Cw zvtXlmp1p-4SWyy*lF7wYjXBM?P3o78)H!d>7X36rYg5EKLWlSTDbj)Wr(Ob%`d!lr z^(zwezz`c+K2)7z>F@ob7Nqy5ze-`1NPaz2nHld2?hGd9S5wlV1{&2rL;L`)k;?Ah zQUQN0Z(cQ>ih+nRs1Q?Z^7VD?1qwk8GQ8g$Ssx0ds&to?(%`9xUpz}X8u=@tzj=5$ z&gpC4?*QuNbPcY=O#!?Q**N!lWoxkpeTSV8$8u}Y z2#Ed6$9}^G1V(P}X8Nn>ABrO-^@j<0wre@n4~lC04sdeB<_MRAyeSE@_xq**FkS`d z%?AP)E^#mTkyZV}*2T8@(gfo?tP1A(ln|Grz^z9-c(p=U6+=r;i`!=f%~HExA1tMM z^c6t>!AY(ZH1X{q*xQ3mQiIdmB&*k);X2J!S9NMvn!abdbH?X&=PHaV6~06SwRCHA z*e>_-i+P76xn?O@wh+d@H+z7Xy(m)sQZ`6nGFg8=!ABHvpEsnUs+xc~5tnR>^pR=Z z$~&sGw`3$~A;p556yEQF=rfI?S4+bG{OFP2v4aFGKAD7U10pxFQi|!NPuIT*)3hoB zdOrMF$N00_y?lINo?5kdj&_v~w*F9{ity8{QC%^=n(FKEsJRRW-H7Vd_ZX|(@I_rY zA`uzh_Dz*N3H|cP?4oH4#BL2A`dAU*8cD%LcDdHRN#{V5@~LwN!0j$Gu6(2bb**fX z#XLb;ytRBo_K1fOu72b8j9u}^2~M7Hl+a^gs_Ojmb9sR$9PC+pem7o^^1w!jsmNTd z0Yfg#O+YwN$sL}?gPQQeCwpIbA34iXsAFz4L4M^Ex@EUPz3nA8Q*qd{T~Bz6qm`-? ztwxJn;K}?3@p3zMaGysx8*ZS{F756+btF#}U)NcxIZ(DXt_Xz0Wn1Dnb-QVJ>GDz^q$$!7QB;6R-{ib*NGP?H?pI&1Rpefu`B@y?tplhY7ZIYCPcl4DI$E; zhZ@HKNr*+)=bK*XWnA0OFu>Oo;)A#)F6ns*_th^ye)Q;5uMK7Mx?i?XS&!D+_%{LT zS_GXd`Og$W?eHN5Z!y}Bnx$r2bYf9jtvq6SmezIZkdiyNLzWSIPVAcTgD1P2Mep2M3 z9Te3Q%rsowFo--eMU(km6i&s2eRmnv6e+IY06F%}kqN`n);hwIBG?D&{{4Sd695*3 zCQ&a#WaM>OHQb9{ir;uuXe=tZ?VE?ReO1X$6Losq9`F12uln5LVCrjfivKfJb>$3) zez0+StC}S+oVfpi^FEKT&jxZ#EX_bQI2>Ry8+w|~Xa16Lq@^Gk9D2ws0WG>??3sYNx(&AQ64k${+rC|s z$!_ zUr$}C#l>pK#D(mS@VM)yuSyM?XHID2xDv!B99JI~XCDiEu9TW-ok$4TWXN5Ybija^ zXBi%l3!F{Q0~^_H=MkvkRE$5S-Za9dpp5aG$HxZ1ItjBs&mI>SI`?t1K0rpkf!5 zE70d+Eg(8U*Ij(?^`wM`9ND!jK9d{nZz(tdN0jNZzgcwSZue%~PRU)cpb(i@uQAjZ zk64e6Vv|02d5w@)I)PtxG@hy{qvbcP<*Q9wZun?TN{Yv4s7zL1iyp;fTP8BE5O^mD z32)g8qC~3OQG}qfCxOCR`0kPCNj5(^u`}3JDhw@|?HwUX&kO#&hD%MS*Pt{a)Ur)& zZBuOzxC*U`5++WaCArYTMnf>Z6M4-x@=W5_2|0rJ-1Ix4b%G=B7AM6~BO>At2d7@IJxD`~0ZByCRh5?w(4x<8utTCzy6@_9(pXs0rXF+lfP?LJ#?F3DI(R z$FO3{HqY>FFB*pW1s7oCAAL6JOL>W4u-AkEjw*Q1%&>Azb3|%?=IXX;)>2VUs@Iq) zAqM47>SDtJeL*HB+Fgfv!=4OW-k)HI;!g@&k76}Jp@Yyo3JOq*;N@-Io* zb5v2cW>9T-u|h$ANFBP(!z>LH(aB*ZHc>2pMS_T_r=-_gnKrM3r{w8SOG3Zy4nORg zWlZu|DC@F&-=?~U(^*x?JOF_)0liFc0TqLeUy$`r{iE7-WhjM2gNIZ~GyJNr7w9YP zmtJJp_n4yHwRu26O;V5=D>sOlKphot8rSiskpG4Y4; zluq*4frKSAziv}s_j{zg!4t}>HV<3`u;T?nK_UOz+o@o5CZ^;f%3+)36lKl|AVf$k zk9jSIvRQ@<{C0Os{T_~!dJoFeyw%hb*S^BokZMwo$#Zz_A&-_PahXpdM2_E!V^lv= zytzj|m>ipPur|?k!gB?T6LDgpDrbPT2e}hH=;sNKd`w9T1ENriW01Kt=r+ZqWq1Zk zAHaX_H?C0eUL+|T!E7&;gX;}J%|Y`>fgtD@HR1h;qV(@bX0^V6mk6Pt@JlhvM%)zY z&Ly42Ga^(&K)0TKeB^Vo&FgzVsNlt=?uj1(?x`RB-gB?`XraWxmaA{hU0hCy@0L4C zd0{4*9Zn;>H+{w7Uz>I*V2X_T6$3$&A@~8!$m3(5|FVkP$mXa z&O|{%gb!N4@bFIHT}#;b=92QtLH`$s`7_bq@7rDTxTy3%l%7(l4cOV*Gi+Oc_E6h_ z9KZApRrPcEp=YhGOJ{xbw+_PGLC-{B#pSW8f0TdW>K`=@Jl-fv80ziD74ERQM5xAA z3&c^2fUc_)%OT2bfYJ!_n(k?d668?7HZPM!b9Wwz*}ziS#5$22v-F~l><}`@OtyhH z5=xqrYGy%szX23flOwX-Wt5J!Nq+snldrkYQL9XGxDLf*m;kZacR1OGJFjSgJpzEt zqoTLXq@DRM5Bf*%)A5rXpE+U}M#NonHP!NU4`=oX+8pB2>m+HFrH2chbk3E%;t`G) zaUVUIx1VT#=h?C6r`byE)QU#k#&B9aMmwO1yN7*}9c4C!l`m?m6-VmbC|q~Sij!$r zd~~!7HGx_g((X#IoXu+_dF(w!M5=sD=%#E9Ad!iR}mvB<;aBu3${W1J1 zW_v9@?MOVIZ8a(YosBi+$hwMKo6V)`Q5b87G;;L&bR*HaK!`ds(F#Q@X0&u(gGQ5JvlVzSs*KCjIG0fUs$U&NBVFU6@Ruh`SwO%4Z>uXk1Qcw8IK}I}>x6 zT*6@2YVRn1Q5z(WTX761WOBso8>*_cUjsOEczkdn^zl|L{kG$Yem<&qtX+^V=$;-b zmJWAQ=tu`5ctnlXLrs`}#fO>`%2$YNAqiD6#I8D07XH$kftyX~cy4L>Y8K44x58T0 z(D(0u^gAXPT9PUy`VUI|Ln>I(|%nPrrku)<)NZ4XtSBd$s1 zXtQL$i5pkZ)S9Slw)(g77wg}cxxe&IUZKEPJbf{}SoQ#W2dOVTSw=pqlKljus4)@U zVP387{b9m8?H620}8)a0a3)%DF#f)olCSb6qL(LK#_dztyobgPULFQ5L9jXe_oBzQNKJ=Gz z(YzzB>XFeig}gT#yzF{8-Z1GZ{`3U~p@+Oxax`+LcxrJS#vF3&SDkDiwC=0kKKjje ze23blF7l0^@+2<1;aQ=@skdVDhV>N}0Fw@n1|a^6cTuovm@e{%t=-BfQ!~|s@sfZf z_a~BJ*O0c`lu7kgZajTj!tNIz)FjBF4j0`%$Q&P`6I^N8{bZFrg0S`;;BHgtT(3~3 zTnO8bKK4X*5xuhqtB_vFRjKu{Woli~&I(A4Y&+QF4 zrE%_7c{cJJ=bjOgQ~cgs^7z3_EQ%4%4+1EJ2G*%24s6B=mAW4K@}6A8DW(=N;y`6Y zYj!ZcL~6Z+Q1%;3Ttt=754KZAdN{UX5tFN<{)zJG$?Tu*das^PMF)-8D&iJ81Ev8~ zG7=Ynn{r zd&(RF6rc`e5L}u@#>toShW#nWu4YZnqu)p(3Yb^#CLso_aZg+z7DqW~1VuEZ<|lr! zL`FK{F<61yf?-rFN3z>weMpub#=Iv&XdO&o4UJ;AtgV0O*-_KqnC$q(Y|zig+xcV~ zDW4oXIbGD6y^i(MZ9luc3?=)yvIpVNTA|=`(ENgLeK&8@)cV>V3a!QYf;NGp9-U@EvqoN(16FP{9p0rbc z-=}GP5s`bX-XqKTtd-w)w1_Jr31c1?(?JYBTQ=f*;^SA+SMllSF+yHOOSGHCfi;txh{$z7oeF; ze2PF{xAbKPJ-uo z&w$+2s!-J=i0h-UjMNS&lHy%!Il~p>sXrOrFuYFybJH;|o!N_6^}B?s{ylDG#Bfn? z_n_#FhGri7Ohzm>u75U};wTNR;5+snR3prjbJ`wwg&_x57$sV0yiBGv9XUDt4oV1dTicAkX>-_Z9!+ z%ANOHh{LQ^?m+*cmPET2UXuM=vp<`KFMT+6V2C2V{Mt8tSVve>tsqi$+F^r$Z1*gm z&aWaoC5efRYB|KykX3~i%@8A~N@#&s)sa++V8lWx%ol|!Fg_wccC0KPVMpq>^1b8P z^2ZHjT-16Gwm$dK0%?!y%!hA8;c@5qw`huId0pB3Cvw+5WOv$lXnyP=WrAW5_mDb} ztlk{MEebRaj`5!aiC2Dl2vsQ%6c0#f<{>tMe=~Y9qNT*2aQy@2W8`9R;fPPzoLZZm)vPXzYrOeTXj@g;xL zE_!NTf_1TET%1k4u5&nkxGF=uQHdl>RBEe4YNkxE*lA$=pdr|`+L`A^p)5YdMlEu! zxVJV}B|q!m;Cx!G`a0?o0>{b>$1jh3nW>!u%8QplDFWq%EN&tsg?SNIz~ou&EZ+My zb(g&a0lR!LTQL_v(;)iwrl5tJS}4W$iI4ta4#J>M=Kt1ApiPvF3lp@@xinD z0$2h(XQ&mg9clYqQdEs{U7EEFq$O!CL5WiN8Q9$?2upc+n zp?e{}E=((2 zhuILL{s!i+-4iMQ0OV|4QP>&CM(NtZT-|4ytq3DtQQv_wQF?21R-H+BFEmCAr)mC3t^dSc#E_>Kcn3)wi{3VQ(M?mU0{^y!udA_d3q0Jz_0Oeu2!Zi)s9 zjR`Ee_HZ!K7G-YC6PPEn(e1>KAF_6H$O^}qp}1*cA{J+h?$JWrWFFN^-^|I@MHy+Y z&GhBc;{yFbp+g>rIY%28$`V<;9C5-`m$;~fr#DK0a5Lcm>>GeI6@YR+e|v%``+evx zbBvDgwbRO=Xr?569QD!Z>UM~YHTCbKPk-8%CXnM_Ev>c5*1cKvJNAY>f4o%_M{^97 z>?x{3ujvg|yjOfnD+r9hs7NYIK(G^8cAoSo_}yQ!ZHuV*C8)mzH)1s_aZ#^h8ivl{ zT7!XAv16&WCZB@Kya4=qEl_U!7y{b&R_OXl{MQ88=b;{IrL4t*FgjbHsz`jjV>1<9?AQA)m;Fai4-R>68t;}+LKUa!MCQ(qv<{eue<0JFt87qFaaenR)j zn)T zUSQ_LF+IiHDryjQ{lxbIs|O~DB=FiWxzUVTnjfb9_2hW}sdsSIp-lH&P32;RfUS=R z2*Jy|ryGEV>Lh;;@UEv)>z}f=+BZ2n%oW6PWQj(^a$k@H_#~DJ_;(r5bo3(C*iWllrlT~e5bz4V zn;BH_8n2W0fk+)4XE5hImwjS5SU+J)3s}Xq?3kcXH=dY|(`AHiDUuaN7(IpLf=9>} zryL%eBEbD?4+Jp=LgmR)&H*j2nUUDeoYfTfNw>4bwrbW`#Kq8U+ea<9dyH>Ko&68J zemu;~mic-8PjEO~VD9)Y$YgamYgV7^)tqsk!j54jZ{R_#bAPn6Q~x~7_HtC4n7uGy zNEj~@6YYu`7begj|MB;0Rrl}z=#OgkAd85h`myur*EOdhnlouhHq6Tw02^Das4o#j zHMjsUHQU9jr71+jWR*-@uO2B+?F@RiY8+NGnaJ9%nZVI6*Nkrn+tOpb%?YWBEz%Bd z=5fn@-fjR?@MuK>`(G1_A;BZ&N}kR;;bZ+L*hEb@Q=T$om4~jCZyfJzo9PH6B<4i* zt^FnH*SckeIHVr19~k8aj2rmqXOL zQ5cC#mTlZ_u|$3OjrD?TSIn6?4vuvbB*mD1P`!){sO z1O6k=V@Ms}Nvn;UL2VO<%HY$;=kly`euXh-^1_y|z%WKZHssP%#52wcJm ztvKr&qBU9OoP~3p;c4|$XseS>;asDB z)%!*d;aWPFBuHf1atireqvFWwcE`}XPYL&3NPIwHq)fJK2DN5+Ioz3K&lcrnks)iCf!CyA zbCd&hiworHs;^X(>vm~;`#0bG-yW90Ma>&j10Fqp+6Fx32g$FuM+F6x>k9XAb~(Jm zVa=`KpB%gizrwBJbHkTaJA-N*MY7RQTen;xaecY3B1W;P)Q@j126wVY%+|Xqb9)(k ze1h-}Mo}h)C}zRLVN+WJ^t58$AAM-HqV9NfzodI~RK;QFvrtB!GQ`L~GBGiNLXF#8 zMt}y_#CN)%jN;mTF5t_sLm7GgA_5&-)W0jvWKzmetJ&{I=w^8Z`q?Z?+oB8f3p4qi zaFARg-aVj1{;)j_|L`rE*2+B?#1jgKYiP}TLz-Upp%^u>5li!M5QvjT(x7)+1`i!| zEcGS_&%6z2sW7u$LNbM`yXAXf0V$puq;A-`VC z7ndZV%$m#n`{z_aFLnT)vEDF=$-Ra(d+HLv)9%xF|9{^9D-ZO9%yaV#Cs3Q~Y14)n zJw0{ZE$*AN;iWFHiEBG0=rK7>H;K9*9)cmfYTk$f`7&z7+~vzohd}1U>d!|Z;`$x9 zSOo|CyW?pPdZYp|A%Fcv*1`ZoMK25yHv{sN1xrLS7o`gPMy?O1NEpT&Q!7C zu_gb*FFi_CyvY-hSms;WGVu?t}**r=)a;*|5$OZut z2&)p;CKob8frfhRNksR%=xNOmV-z2R$#{%N3cH(t&($mdvbzaNVj`okr0J}iC%cHN zeyq{a>a$L<{}I2jO3=)aY<|W^*S7fdma`%B>b5| z`h=b|i82L@l+Cdwr#J~CpFlMS8qfQOf9UObqEpoe^6*J5@#^K>xfg+*H-u2DGKkJN zi`R=cYd%aWM+E|xur3BcM*SebcO4bV;l)@sgCQywy_FyQ$q)a<2eI>`>Pwf+f)?J> zCj@tE5g5?zx?~bgsX|tn1Yl^n8;Z*)49wz+As^4%5%KJxfq)>mJ6r!23owhga*hTP z+(+*-Vl|TiIjwo&ifFy%Vy>>Co((YXHC>i&KMw_fO`GEx3YlLpBrUp^asEQId-sU^ zP+S!T_FE1{K60)3{{0o{J2n2WT-F+|5k3x4fRcuLE)Eu#qzZO(UTT4ukvrzzm1s#N z73GFI&DG&mQ@UvmRIkQSbwN*hgut37NV)T^xN08iS3WrT+p~#}m8f=`omec(KouQW zb$b8%heZeFrGQa3k*GJ@=XNrB3zjTJrMiv8F^)7%~eJDK5P zs`qgo7{E(HDOXl{A3@gNj6S=&3BS`ZGFHE>g@BN!5niwN+xOr6rN81@weOu!gM%~F z{;GW-DVCV)s!9hsc(Y+*nhqy5=!>Zi54cWbdQ^X>`g2~wz8vH~B z?u$AkSa3{={I2_s6_u9WwMCz7~cqm1l!XA8rhV9$rCq;0>3FbP4}zkBc79Lj>tZ)#V4B!&F!B+P|Vj^vK3G zF-lz8{NMMFsgD=>A+ZF0Y@E~KbQwcqXeg2w`sSffF2&ojrfVLU1Aq0+f6)IHIXPem zhRkbJePreO)?`*`6uy-yTR^a~<CT_612%gGQa1A4y*BE>m&Jx4} z`iOOqJ3KGJkkKaUHQ{P(Ii+*EX5}hCHN4(6 z8?4+R>x-6vE43-8XGLjzx}jJ;TUqla-296ChzwhCSBR1|-8p#LHXwN$%4EG;`ZNg5 z5McPK)vLEQiGULh3)5A)PNdONoCXGI7aUw(FD1X`RXE0|271{w&#qJu#q&ojLfyVB z?<-Ki=78Nz>poHMDVWamt1fu=C+NcRtaqXgQ)6R(epS>~bFq_{;e_* z|9Z=wUtP>!HG`oQ%=;+3o)aV=c-z_s{J$5m4_BP3W(k-OajogE{dmodpDq zvwC7Zauqs4Xr&y$e72Scp#IZOKCzy0Mw;TRfw$gr|Ni4gA3rjICC1m>6;vEZo#})#(p#mx$Ic96-_RA? zUn^U&EgKa#i?g)BRoLr9IFkVH<)E%sF@H~M_V#=lI6r0nw^W*9-ueu!q~vwTTznZg zmOR!Lu4WoenQ0`o0kn8ZsPU*zCK2bdp=^}IvN+s1_Ct46aw%r#LS4M^8y{n(pjj9B zx3kdWIblXQEaGoOtNMeNr-JOR;=?5$~@aCR$fG5juToOpjn!s_sO!%G>liM z3O<6rCw#5nc}#Av=Rc90O#IeACx#Vgklk$58n#7IVT6Wb0t?#Dt{=?Ny)vaNKt)H# z5E*a%7B14vq$dS@+VcLxANoR3JmKZO*9q z5FQyt_&osFt`-{HJH$|ZOe7VBMf6_XEn8FFIBEb9<@CG-#AI#)0{JDlnDbzo6O1R* z0wqZL(6XQdWN+060D)!vNZY^rA2#k+70sQ}+yzZJ+EQnU7cQ#YOY zt1ER4(AwXcR6wXzv9f-KW#M|)zw=u^@&@o+<9!tZ>;Mpsca{^KnBXU>uc+>|#j&<~ z-0~i91B1^G;Ei83vaZwdyT4X*;$F_79_+DYXwifxM?d=1gQ!yS|C07@&yi-?b)WwQ z`^k@Di;S73=F;fP44c%9nYdIHx^NpLs*26w1eE|1Kvn@r41g?Dm1&1fgOo`p=7BjE6zECy1SIfPKTd_`?Iv?IJPSj6&=&V*dS>3iG91=ixRQW9JU40qp zUuBlTq;35ny@030wEc>$85U_u?;RQm_|M!^#B$U!fMX-XfP(b{X(!YR#2A)PG`Y%D zR5SLM?%^R1wshT9OIq5^gD!dbKMc&IEQXJgA{fb;5l`U@*7UwB=6R5QioR8WW9z>=r$dtW~jg4^9ihV{e%sx$O4vG7E5b>Hx zcYajpoK}(`*BxiD=CS$~;ThoND;DAd2ag!2O+06X@`z;l)QI~C>Fi%@4Nb6W zW$MOR|2{y^``I$+{L`3{KJS2@mI-c@ZND7r8e-_)Bj}LUoS?|C zq2XEe!<^yJ9>{DzJN5pG%TQ&2~F}?JSxGVKTg`jX_@t|Dydz zAAir!AN5J!YEfRlf5U&ZMtOfda0}WfXt{^ujjPj?fM(^Fzf1^MQ$!U4n!acp%f`NE=E7R5`>~C#6=Hk zBVkl&+Rb2$72mEAZTijKRq@vSjk{^@8btah5RK}0lckF-n6F4@5sqF?1l;0Bh8tu@ z+3m^8A}V&vd2h3XeKzm+VBO6uHg^upg#`_^hlTK6@8jf8JCsR_4D0idaAB&u)k&W( zgy+|kr`Hd~Pr}l`g6{(S_kZ6XQ`__~UbY6DsMp$RmT{s}dXhEncN8?hk_zTW%S?JX zC`V&GuM?s-9M{K#pf9Iy{0+v|EPEcS^H2ash>*@;YFt|y<3-QuA|+Q#0~!|F*wKg)*nN6riF` zA!qHD7Zjp|q%>@qA;Wc*X7oJf{d+a(jClAV$$1R?h`eB#Uxfh+3|P|-2X^u5b`Kwr zqP|>fFkfwD4SYy3{CI&MQnH?Lk7g!i_rlQODui_^X9mQR3!1Q0vIzpC<9pDLQ=E3d z5mEX!6B{GS>y;;l*O%y~p`YTC4T-M2=xQ3gS%-m_mr(mdvvfv4A2|#ROv@Pr&22lT zVE;U1)#jS@C%zY6wxxTEL$JnT(~}f!667Tx_=m{+Z_>(^a2O&4Ye>XqeS z2H|LkkpnP>R+e21fzn#^Ko+pMdgG`CJj^}ZkDDnF^t^pay?afqk7}5+tOQ3qF4W0{ zM&}s9;XDHBJ0@Hf7iepv2zEfnh#>z9FkxY)aqZwky|L6l>|Rz7qs^tU?LqrFo391Ztxk z!7SP;a=!_dbe0r~vkS%zwZHcFh@yBslS8R!m$U8eubao1K$-OlbY3Rk)gQ}Zmv9af za$$b3Kt(`%B1em<@?kGXoJL$ot68;nJoWh<&2JjiBiIEca@T!%8j%$pH8Jn7{{pc!d5H`!8~+NF{?lSOm)rofY^4 zU(v7nf@EUXTWTLd%Auqh2efJ~Cv>244I}JE2ho`YGZRbD>sP1xB0gcMS|5;D1L&|~ zprGG?wv+pmSGwK$jBnD{Zu07c+hsM;>%=;rNgJ6(xUC7#e@(C4S`PQht_N6Q$LZ$F z0M*+$v%GU*#0T-YWAPNsKm&6Beb+fgT`4~w=ZJ52G4x~_&d?D3@$t6Eq@(^gaBHR) zP9p)HFs{`2p$-p`6Rsis|FDkhcof94JFR#Zr3EQ`hkA~p62Qs~BdBV9DeiSZB+yIV zCQcAxlS98~e<}z=1SB>w4M_ynk_Akzi=$rNimqA8D9SUVAEJKiwXX3ta^3wzxi*>l zdYRt&Mr-d>yTW`r_=7qu+8NcGvC^;fMAMnuucE{3+fCKxb);bTcfKd=)?&Lb8$mEVFqpyR&ih{zCV@oW)r_%_O`->>`kILlqo64=RAznUEfEw85xgy_;n_wB;V z8mcoJw5@mm>PJL(as<9+C~`Ad$Od&m+0ajaU4IjrCG$quCWO7cMFoyLN`S;neUV(@ zlg+Vt7+VEKja1>z#*12^a4_b|Hg*mRLG&(WLcjQ(PbSwmmi1+6eZOYAL1)5lrotra zcSbYe@xqYUZ9#WG0Q*nDOtbV}wrf{^l8mXg?!2tiEbxTdng~4M6h?Oj8Sxlt;l{jO z0`)!;o++|)+zI&0Bt0aj@1=5(Qyyl9mn3hE)%*>-qR#Lipn}rqm6zE=;+qHEdfz=k zjF}cMr+0NCclOUgqI{m{4^m&0GuQbr{Ap(IlA>zJzaTUnQW zFtu%DF`ke=szjofC4*&}gDT;|?8DGnOJQc15w~?`fuYnBvH!@n)K3ja`gt+6x_c-d z=5G^;e6!q7Mfx!Ov=cQETh9Dw@Vr+H!)fM_UpU1bFnLG~$=H{@SEr2myJFyJQLB?GZOJqJMVzY>Uws!tq0;{BPn!TLsaWk8$fk()KK z$1DMKS#6LGFugL0Bz_MY+`s2y)Q7mwKlc?L;A23@O^!#INzx4Q_}fONC%FF9I!U{i zAl}*NrGigbsJBXLDdHk8y3I*ude$KxU+vj3p!*+e4YOf*Sk4i-%Pyc>@UH);Ld~uc zjx#=s|3>a3ma{fbL?7BoCGoWhdJLJu5^*RmQ|Ba`r(zjR-_&gaxQ5uY!Aelf-ho;+ zSuP7N+-qOI{pa`pp$(Ba7K+!#lr_1+R3izw%h3^g70p4mn_WU;d`% zmFZ2b2bhnrMt!9Ts2|r+(C9JtE7*#}l?e{a@8(s9SD)~oKMqC!0LoGiMn$+UgPfZ_ z5#UyaQ;=MoPG5!8d+0@U6d2Q&RMrQ1 zBnYPoCoaVH)VqpMAD$#c6@7|w46dmY(dZN(UjzSGJ%DS+qp{~88=D^H*yN^ilK~;O zNXmA69UJj{3%dT-`&XYOr&5KeYLl>v#fk#0WSHe)3zE0BcFwn>fVf}b;}K&5t$Z39 zdvE92(5`G7pWNeV3mKF&XeiF&29@}`B|l9Z z{PM{O*Go{EQ1hCvpr&T^b>%{x*H?EL(@QwV9&H8rQRE?FXUdVuLQ~e`z-U58|CPQg zOG+A}5#u-8KhNoaHB4^xi!UZ`W?hd@;Ir~{k2*x2@Vh@hIwt|`m+lZa%S(0ztU^_T z^#yc`;u7_vq$OTyQCBWX=Mr#V-JT-Wh0k9fqaf1EO5X3QA^{X79pJ;G<3hrjl;HMK zRaT#o*7erIqTYc!H4j;&6efB0q_3=?X$+zpdi)SBCFWinw~9LTUZ)u2al_SyG<_Fg z%XmY5mo5Y!nW3ciX`Y5;_4DxX+@TX!t&?t!XIC^K|BX>lr}%u|k5t%)fCSwP>a)Z< zu%b=B2QZd@!5h^HH=|8NJqjo5&kcFgF7yF;WhKf8>)UMGMx55BRZ;BK{2`jP{(`;= zljz5vd{SF_9T6#-vqlH~6XtS+Z+Jxe9Qfa>lpnQJT1P4Z{`LDg^Qf;OW~`iJaR_PI z98lW8p60GGN8?NzE3uU|Agh`fjvb8-LwvYZmR0MsS2}J?Gj9NI3d=r1VXeH^hBJLc zi+t#U5HDjvc1osP&AXZ*EI7?PGPVdFch*^;*AhWZ$KWVJKVQ6|n{GXKya?y`pVfXN~bIHSjY z1rr$qcu)L5=8ymUoGv=W1325+hJL?+)#|CQAZ0LwD)i!}JqOTP4^=M9oUS;kzsC_^ z3fMU!_B*|#46NSr?f-KBw`P(PFaZ?OWzRNET7_+W?~idGhQTxDq`eOtU8*=C12L?D z5R@NM(~7e;X>`~-$I^b^$9afJ8hWq|(`7})CzNh^A-rb+y}e(mAQ8nm-E_(f7Aa|4 zYAxB-glC9sT3ylZdP!;!XDqUznxhe-_`J1`3!i

}eg4I-X7M{L==K+^??=p9BEB zZlI+SFa3cu{KBgDMk!7~F30?lJsMjO%kUrz6C(3(rW!>6CDd|4L)xXipKj;<$n zoy8fQv*&T0v^Y>+z%25TmI$HQ2)n=Pu#LT)`rOTee16_V=8H$bXsDhF;|+@xsB>IG zX52G6v*?8o3ZXN?(tln1z>-*Wb&B;7g8DcGB#D!TMlfrr4R5U%mu5l39zT?h&-KGL z{MrXMjP}2-MQI#cskWmlYYna+nIa8XTrzSW>ZCFnqlT55N>&F**K4z7ljtEO1zSCj zU^i$*8MTS4`sq)9YUbGntLCgBVQ9;qp$`j}*Th&23SYl%ZKX_`)*RnI-r%G5SmpE0 z_05s50>>ygcYSyU3L{uhWO0M+oz*pe5rgHBX$fA@<@`9M83>jr&X%d zhF(7Urcc$IzpUTm+0(G32&4w{7&eyaRqW4Od?cF0C78X}3VFTo6J);&{PePnfi0u40tdb3P7oKFL-+Sa{e|{ z?a`6J5@2omg(53OquEfNEOm&o)GEniEZM?CDO?c~v^~KKzgL4K6htgu#7ON2q>wCc z>%94pDKh;e>Cz-n5|KGGe%IDy5f%e;OVz>MM@KZJCW`o>6gJ@rc{e+g*Zv0aecBnQ zg>n~~s~h|E;m5RWyYk-Fq;)x`Ix-D~W4(SAhCk`S{F&rXm+AYv^;~jH^Byk(X4>_* zC{jeZBbZzt7HKJ6ElR1fz`4DXC5YeTlq7r4%_(H_&Yjjp<$Vao7h%S-Y&0jPj_foUU7s z9}uBij%xyZz=bRQ`6_TUL|hTSrc_I6`^TFzN(U{wb^VFSIbVWuiYrc%kd%kNP2;{) zd+M(1XJJ`Vn7Wj(<5%4>g6krF&0ppx*80G32nJ(Qpm$1Mk8i_82K?QVTyphu4l&_O zW?(mCu6PbZ)~)CsE#-i=rWg*zItP zTmN@dt-^Iz5q*7Td{V478vSBD%cJ2oBBRmm-%khKs<5|yn|#>kr8hZ zZBhHfs$8#s&-t&82=dy4pIrq6Q#w328v39a%x6OlYw+yiJY9fqe_cyevHx59o1k)f zo1K%$!W_-61u3aDVz5L{3qX+Muj=JwOla4o^63!sH2=n)s%^bwZcZQz)yf4$oiJHq zg~&P}_4QPM!E-_Bu&C=PsG+Ma8!^w$!Cm-oE1!H(m*ky$sj6k|B+R^)$4=7&Q zBlQ5D<(=>D)fv)FYN`5(@R!W8bd`lyxd@|C(hgoqbmlOPl5f>xH5%=k|70aIz|(W6 zg`TNAadu1QO;K@0*E(%MpB`CU-7x$~|TKcHX`>l`S zzQNain#7w%=?3q?-96t(UEFP_D=;{##^%2E@Gn=z#xf?!Jn{@HD=SN1!>_N^kO=@G zX)f5j2`1*MgBR=o=QWQI#&jRoVSoC?7j-#=-ncyKzTY-f@rzq$`p^ScQ}tE%-+#GK z<8R9%21ChCt(6MkaC7G7?#=FK@?E6!>9?RbzX@S-OtB=vf6E)u2YKT!G3EYJ_`BK& zC3180V=bG6UUnOr0KFTs)i#b9d0=bwbNqabFC9w7GSzS&gw*G7N-Lo=#sugHp% z3F%SF=k7;*)}0zKo#EIj$K7hDyzUEPOC~)P_-&4ufAjpIEWzK{ro*U723W+q94aU~ z2h%>)eYOFJb#%cW@n5h+t8I{FU}~!sYcB`|6zKzfsZPJasj%2Zg25svU8|(R##Y^) z41Vu#Bz`0YR~Km8AY3H~l_5WmZ1r{@qZ~}j5a{!_Hb{o1vB({+Ly=!dhUmA0IWoW*ynkUp^KJo6uv8=kgT(N| z5Dn%AYE5xC>hM-R`P>hs56#=2+@}yC$bo;32@>IxBiOzhi6oCenHR&ae}H*VM>;m= z(`f+3WwAHb?Z?oK55cwS>>H?3V57^eyRiYT1EMW`3T1p^?a=hdr z2Y*hefr{cH%WN{Qe?HFH9_mKYV~8ZgdWwpmL^np1mk34Bswgshcn9Vr^UPG&;)C_X z!YlHP-OC|8p}9S^82UF9m5E}r)B0(K)9#?i?DQf8&W@cuu)#Q9^`-mw%%J{$fA*Ws z6~}3)51Pu~xuc}g0<5XUJOuceQ;Ef3a=s{Dq|?81Hfmpb)N?(|MCk@Kp$_;vN|5gW zJ-CRF`+z!(2*xT-d^fHclN{n_M8qm_qr_yVlf_Yd8N<)kKKda5P!D6xEqEUWb`gsA zt)WL}R{XAf`QtB59)gGjyCQHu$hveW57aFv4BO`!-{fX3E@F5Eo`qaJ+f=6sU?coN zOnMp(vZusT6x>E9-E=SV2jBUQmrohQSvT~_*7ULMTO$h%{7%}pdRts7$QDel9&Gq7 zB}>7)eN<--!f54Fvk!Q`&-~yUg3cMqL^W{Gw!t1DOZ;8pG+kBV2+n&cHhhhb==CwO zT^sfiMz@jBc|xw!55-XqlTX(7yx5OlsdQB6{JeGCB<(z44 zXMX#Zj$rj@^_aW901aA?o?pJRzEqi%yz6?IaOWXwo7!UVLzj92V1!|V)Byb(mg1wl zGa4GwSB2NhDaU)<>yC;elk0jKN=5}K*_H~ECV`rJ_@qQ^&1e@3^%$*}V2w|XOgR#C zLs4;2f+{FeX$@z1F^gxZxkpw4k0~AY!=z3?iCP`YTtAPBHv+6Gy@a-=wv6s0dT-Q8 zfmd2w#rUrsGs^rt1S5defL(3xd&5uY+AxVA_m(P7YJY$;>!0h>*Cw$}HP%2iTC_rO=j~6) z7(`t6;JSAJA}GB$^a<{_4vk^GK$)x{CF-iyS>bF5wdpqtC#<{R(;>c^LCrI;iCk3j z@BI>!mk6<<0e-3T37m5GyZPQcXy?=0cr$Aq70zP%!~(F*(BtN2fK@(3PCC&J2(c1pvgqpye?IWbugIH`wuE z)e^B_v=}yeapK|p*61R!o{;^`IW^re?87RRiB6ySdcf}XhN(bt;7Px;D$0@8EQ+0g z-|JB~+;}s+%-rjqe?ILY5(T8PbN@@sGP$6&G|p$)s1qX6k0m3=@p*NtYN%~uwdNOT zBU1FkSSRVsi2s$x`8SgddH|dyuA0{_)Z<{ z=&C2uAL}amW~!f)y2*KgL?c^iQ>Ld8u1ek|B>rhQ`FusWU7O%ECd`2tLy?BE1MPQd4Kaa{bi-)pldyOPQ^(*h|SQus`v>lPfE-d1%D+8 z>AyZ9tUv3WV(?x%#rCGxbkYu%pqqxRr#AGfUeAEBi5+B4X8`L${-Q8xCOe~8y~^l# z5rAWt6vvv<7T6B;ZFp#!)#g$&Ae^|Meg+vbh+;-#6&QnlDH~Vxw;qWvW9@SZDZ9C; zV1_vh&pw@T#(whF+V>j3{v!K;(yG5)EJcuZJi=(6E}!4KOOjOh^t#_b zcg>4xE>Gc~e}Qz7+AmsQ*mSv0oEu4$VV;_lTU@?e=FiPSB%#`QnRPcN2<^^t z2r6u*BJsieB4|keL*blY$t1`KGNYFFF$?_VKk zyygL&a2(l_)|&()hJ0f2%7#c6a$ROHc=Y19J-H6`gQC;@2O?1Z;r{(+Uqsu0W)tLJ zVTIZ#hD%5KZ}cgI@lE13=ra#BrQ4tup8MyMd~XjK@syJ=Wc4~z;MhkBs=ZC{on8Z z(#swrRF++wed^4}Y?~w#0y)KyhnTGvnGcOw{EPiob`pl|%`i5Y5*o|AGg27oh;p(z zf+chV!Bnb8V&g-#%H*pR$b}^-+{9QvX-y3pm2>N1}mjH%)09fEdF6s7=z9 zvAbt4Q;Ogng+IV%8=G}bLU7`infMK5AW)0#fE?ODK5!zeSbTw5Dy&14sI_1KY!)}5 z*@)7_3YaRZKxqa!S-=I!l<^<=|eQQZyhf&+M!l_iQ(`dW?0#0HP0$lK0tU_Ndl4b;v<7b?-NwiKJGA-@i z_>qsYqqbnySQk#YfV;(REElay_{-J#($7G{1|t zF+*LID1IB&up|+>H9@DYx?ngVwPuN;jBe9-D@XULom)D*gR$>rzNU|2b}J?Jm=mpp zQ6<)AJ0CMr9+pWeUB`@i9#XvW%gDp3hMkWW)4{mO;otrE_i7u87Ubyk?X7Wr>1KJ- z1Gdf$ce&p7(SK)xX?jjvLp7qFbfh~MDj@$4-6HGt_`C2%$!|_x(#D(YLqGb_pDwxM z^3`j(T3uMZz_FIvUnv6rrlPlr@GcC3GvZpgxQru}qh6BW~?8K|Y+ zM(lgv`A+RphGo_cX?{)sZMYkZqVT^~CK0)&q{R{Bc(grrVH0a|M`}H#wG)@@( z_zuf&DP`SYdp*bjG?Rdnsd+#y-_O5!OPfKW8~)853AHSlM{M?l;6LHNy3VuHn8XFZ z5uP#8UaEsHiM^Yp{1&|jOX3NwTmNQ2ZqN*TAZfqiYH3h%O{UP=Ad+f7n>W{B$d~`T zbSZ$USU{^njP1_D>>d2kmVGdde@4Ad_6a$@gJ@REd-#A@!*U$V6YI0sD@;1?&AHD* zlMWg^0~Zf)QJz(leKsD|_VL^DENH6LDT8`tRKr<)i5J#CcgGvv3Mocd-9LlsBst@s zxh}wYikxbf@o8^KL+Qw*!$Ihe`B(LZLl4?)g+#YT%XIQO{;C?xpccL@;IAB%q5f82`--)=3zF1z3HfLjfgm0K z`u^Yfq{v*UQA(Zu)m3;?SEPIELAJNgO}_AbF1>uP`4qXD;XLM!hf(4(6#r~`3v2=s%=?9N=u-g0uMiqe<3PCfCn+LV#T zjtVMAa@-Z_;__8z7aS%{Z7tH!06oGF0JQ(kgt}7#BqBLz7YrtX(c+K%$nQLia5twkgj*Ajb48VGvIIe`*;~`GKa*FLFjuVe~IE&_x=p628 zoMdZj!(uJDH{N!$oVl=({ozwfyJ`uh$j{R9L?&0Q1Am5}io|D2tpGQ1h|Rt`v&)w; zQWfh^GrBN|>rn(XFguH|W7nQ9NTjPF7LW0!zIEot|~j@QcgH;yXp`UF7qVn>(o#r8-&ZWE!qz?i8#dO%Y#`k>A}34TMx4# zJIRo#Ax`KUM^MTurj8__B9gzX*IRg~^58lZd)~nSI7~&@C3NqQF{@9ik_cnDK8Y_` zjI$H)4NRwWXr8WGa4N1qp&ZmDdRG)++~m`}RYJnus3bWN z;WFtyO@tyRQUoWyEdS}a-?H(dpFalnl1oF_b8!(yC{z@n#;Q#niD->RpeT;J#SS2B zEnW6bk?x@!yZ`CNSoq)LzuFV93IbR~V0gH`fK?cpm5}FRXnNfRK7msar?`n)@mV*u z?%(gaxzf;XtzJQJ*}Ed*u5RB#c-DJ0)2yDU8Aidbk31<%-2teBj65b_8raI0{(3?2 zdW^GYrblZ5pc-j^>PP~$*F26kih3$cC0!C9lvfZ1WNJ-s+iZ4MOXlFRYv|Jq+0uR> z{K%oiozwGG!3MYl4-98`9s3G@74cTxhx35&8yr%9*<2s{J6E%J@1l{y@$_)fqi}?F z5Xu!NcpW!`bO&*Wdf;J};eyTNh7`}JiF8I&wvX1}=lZLvs8d`qsW)!W&8t}iG>QIB z`pHXj`0g-(RCmY3!LpPQ1;J{Gz>xiUdS#;HI?P3Zq$uVV?ijpDc*Z;(;ork)schzBF%H$B zF}n9DGmL|MkIF8rRpDPll-fK$y^UTHn71mag<*2pq^taGGTGfBU{soyMG_0rU4)Xm zI(thk4z6n5`|TJ0xX?Wqv7xtGIlu?AI0idpNeW0b)$vlK0J``rG{DZ}%Dmp3l9n8a z4O4^UuhZBU2#j~PM$t>r^#nhw7+T>=(*5br$rz)n0Bfr+U>2#_AKoc$Z z^P{744q>ESS7bMC-V(w-56tWlY7=04y9VC(=!K2c7N=j104TOjyzM&ngsZsdJ_WJ+ z|E=~%Ltg&oM~@%XPM8bjUtdN~_ANn;d{S4_Hde>Pyt(a?f^$5dc0*i0Wp9wNym;la zLNL)eL8hF~Z54mJ+bj3)fA`~hvjRg);z7+yBSzMc5KF9lKx>=YlERaI$rI#T=&#F# z6Pob6j=Z)9aN{z**dWK|@6AT|`s|n~3=uQKIH8AxPflH5C)$O-ujE>1Avhk2C8=nAZgIU z`0$-FE@+CP@&VSG*C(qyVuZ_8YS(9P4c^5i&Y064KA?%>J|_T{^WCi<0;EQWXkdJ8lOdz z!re=`N{@5Q>?`gk?n9S1gDi1=i*WX=V#9gJ*CEvMC62*_wiKJJjfDid}-&UAj zqUmivMlgEV*de;eV9F>i{#npStyThP&+W)pKKat`6&5T9RUa-Cy7g<(+lOsPfxjXQ zsm)bFP1%=7GuYLFAwpG{O#&!I>`Zs1MJ9_SagB9R{0?;iOY?C|{3WP07{RqWphYLQ zRkZ()KlsnjBu#~A_gNm3x~{v!(fETL1@R>RywS?$>Su}sxc(_uUR@$r{oGZIidq`s zZnrewBN%4Na-cMV#GfqYKNV2$QUbZif`y^bk-syvVj`1d;5CDzq4yAJj_+yCQf&^O z#Qfi^2P$@#A_KC6Ch^+U<>C~o0Y9xBJtKY5Bk+Jl9O(NXlH3Q#cq zK#pjhQGcWYe*V779uOdcve~Smd(Q)6!jyS-$<4dIN>evti(B<=A)G6!Jk}L+-e+ zs|V37?nxlMG0TvNamcgmu!P4low=6_9!BV^m#9AP$RUCd4L7go^ivo2{{3(K#46(o zreSOT|lqVT7uW(z>SaJnsRL&FFG5g*!^rg3iTV3~eDWg&S zcxN6eRu;%^fAb|t`gj*f$2c)Z()sucG0AxX5Xeb8(qI5fHLVZ zj16~aAreXw$nU}nz9WVJr=u zsk}afrXw5Yb0I70uHy%WZ~rh57!IvhtpR-Nk#}la-l#4+SV4Btl2*mw4@pZPVZ{QC z8u+e5)h!TVfi>M6oOmQbp9Qvr&rMZa1jfxaG*>)Nn6^|qFD;7GmjvBPMY8l<|E;Ri zn2{xzAn6T1UDMl_OiNHIeSVAb!d-0~FQIFO%C!*J>@H##uP1}PG;>o;Cj+EE4vq+n z_ra6VfzO{4Yrdb|c?EKB<7!Q~CgxT_20qAs+L!4?p!Im~cZ- zGI;!mM329VSBBZ;fL_y@)#k;UY<&RPrNs#cEKf%?E~eYU>@4nlF+CT1){m5r2qjdO z7MdDL5H1~v8Ad&9NW`ta#`V9WP|~NW@+HCy#l}Z8m^b04H&M{>> z@W#DO(Bh_>b{q!r@!HV+`=9qN2MgU=6NF|mE$n7WQe#ehu)aE`F=9#YSK&Es(`(!O z&~8<$CadpJO0!kFBV?SD;2sJ)9V8~{Kcq*z2);rupVG*aMO2H83CL&v&@sYZRi`G( z%h}naOYxyBAbmAX9NAS1r3+{$AS`OiD1-ygh4_d0gw%^urB}dZmqoOzbf9qMN55mi zLMdIuc0t(Cg)ihG=0QY~@84t3rf4NQ^SoBOH9P6N2=kqx8H-3X-82cFT}PutAxyI8 z0BzOOKC_NT1udOi#HCM8nA1>C-Q9m$pQBG>x|EOBLq!vGh!FfP0&Yp-Km z4T?H3Sxu3Z+8QwC4nn{{UPM+*NB#iFwLsV-#b^9nHD|f>V?d!Q+mN@cf7i_cZj+KQbj+ z7C=0T(_AYD`XTjOKcd#or&lq;0PRavx0gXQu=m$|La0B(s!tY&mUUPdGK^&5`Kmo9 z3!Sh*sMe&RpT9p%mD<9(si9gt&K?Bqy>7!$Q9Dh_Y106~?Z>S0_GZKkIyWlj9NDFf z%fLarSZcCzxOf+%0w3>nMk4VN1U^#dS49@wPi=9%@WEDk$VLYjB3jBYas4#)+!IQG zO+7%UfqYertd;=^cF)XrsK5wDK*`dN|HOY=ozhSOpoJ;^6&>MCA8LY~yrI6|q0p}g zUUq&KJ~W|FXA1y>Ryj1y5=6xOE?`-6zx^haQFYZPPykvxDb>>Kqy_35RPAF5MbHA9 z1`pF;Q7`^0(qI`+A)GR#v`vA1br1&dRL_-C;?kHPxZf+@$`SXPi|kXl!2-Jq^-6yI zCOV;1??69cxXJxAu8x9XT#yr96iN%7ZLdoWs<5K_f>kVLAQOL`{W7efuL$|oW`>F$ zU&_HE1^ICtPWT^Ad*JhRAaYuDQC8O7`7Fr9%0)0yL+5Nbv5U%Zs4nNbR-NOTO_l5e(SMofQ|aiwrX%6wssIxinN1KS-G4 zxZ$D<4@oh+o$zg;vNM$D`Q&*?2=O;h)v9UAGIBQ?_S)#Wx15^PU zTLOOfyLE<2ymY$L6yZE>uR21fLV1pL6xJ(a7yFq`F8Z>cBUHi@q?6pNlM0IChzCcH z7oq64r|0yCjAr&Hdm1uZI69 z*ZB+n_vim#|9{a^UPsSQw+RKoT(H`qIR@Y5u*c!{+emh^Dx7jYe=A3>R;wOO=F?S- zAzuf<`P;}?{|dt!ZyK_hkc-Rn)=tN3o&0j)KWi|N#yt7Yz>d9K65?4Q0XAiF)EC6% zCNx*4fM8;<+;Y8yAfHRO6E6#P?a>5e;C5~Glw7oTa$K!EgF3|Dpk>Ij#jC(JfOzw; z3bKNP!&9acco;hQ)cYi!>)(n``b9*+p`!p&C9ZC-J0}ZS2;{;PY-Ip2w~00)quX6Q zpzj^YbxOCWFca4aIrwnL+$ukVY{H@!2uK^7%)kKD;-}WOn7;GhhR9&|I%P4qdx=6Q z{cB^FCY)*JQUAl92L~>70@x#p2|~n2By4T#3+Fv7F0cE>SCLJo(nC+KuAeEtg4yH1 zAN+P#El~b2iR9}6ppstYwfToO4>%6O5Fcg_z&#OlHeuGqdFK%|!n{|BR8f&uF($qs zAAKJxC^jRZN`N!Tw1};IFz$>9^>JWOMz49ZD&kBHf)9ZBvO+;!xfc?j{!j}?;y{)9 zk%^!ed+YLufo&`~T@AdDd4N^SWBlaW-AL4Oa|2bQ`R-HYVUpn$FqkQLQUHp(S`EyE zTahWJr)p(ZH)_E<{HP}ct?#nC!$GSpiz|ocIqbuv9tt8G5VaPMt-(uUNn^m79qojc zV*Fzea4;M@XV0?V5Z8-%P_ZuQXO5!?p@uZC{LZJ-nY;2J=1}SJ5|zJ(Ogenqbt8}o zlVGeub4O4Jj@8?wZG)Lk7qjz7gzuie1wQ8r68Y*Lp2G#VzXAFi73rl>Duq9==sZqF zvW{mh0@Y)gf7n`e|C|5o_l_5MFo8gU410llXohyXqu$m&kmf8@)yUuTnc#)BRzQxQ_X4d0 zLO$W{&{n?RLrp?h?nbL5UQt#^AhQl2^~^@9TTtV);ybyBnW$P@O7gZ|ef{_jX3-W1 z6l`}$-i3X!{uXxfy=sP01Hje#XsQyF+E7|qG1M*uo*ir?xIjTspDyu!PWQ|@fEh0{ zjC2oIQ4W1ciV=ufAp1H;+`s=HKK{M(R&Kj(-O^OxPP}x4&qE1-$vL5;O{CDSLhgAge$f~6Ga&q~@qnstZiV5=x2$XfBjQn#Y5nGI#KOI@CjAoJ2ruovPU z9I0bLq6MH*4hqp9O;-tWVm|eDsm>k%R7hSqm|7;l%MdFX#D-~J>(~_9=y>4+;>k2b zWYK=vqoJCnzQTd((0sW_kE-xJvbDHF=r2sWkdY)kvk|6#)Iu!2+fzaI;n$km8Q2qx zA79Sz2nG4iL{hrVHJ(JkvFdoBQ*8w97?5D~b@d9%soocK&g`c48p#NcgQ9m!2teX_ zH=*Kj7#a_=*?KgsYKSJ9v#yAArG9Jh6IT;#C$MCIQ1CEQ_|ZeGSg&} zpr=SZ;$4UgQl+&9b@J-$wSzLuco79QX@3i4Sl?3wHF8-et>J9_QQltki^btC3TR`I zB{^I~q(<0e;)-j<% zI?^EImUHqu_ov)m@|~vfleeLm_dHKw9738y>J}WQ!g&^}|Ddaw@Ba5itA{X5MqR+j zbW4YGbeYoqd%_Ljs%d7BDTYvGj771m1}ym)>1K`~RO^24WosUUqMc7Vi)+lnid1!* zLSv&qJf&$E%@J4;89^S4-o`+dU>?r zZA&Cg0yoKQ@r7G8@+_`@efocV_^*ghHl1Quf;^_m^XZw%Mk_;5da(UEDxB)DwAVz8S}>u|?(nXAgj)epT+@fBd_q2uZ`mMYdcdgHwNRo_(ocfH z&~Qmz!5wdY!EmYy%rG~Rt@N_IlU=ENSe-<-ABkIM6S%zzrDt7u!?b)E7VlSpS|d}r zHJeL3H@MLLQMB~O^|dh&y*k+4-R|p;7y#-;?M!>4W}wtj`tgE3GZdH3V&++&!kk4w zL|qB<1jDT(!|VAYHTW|L=^}+wSZi9`hSmo8go29q>&G@+!`#Z%YSUip>B?kw-FgV5sw~^=)Lv z8-^oI7b1pkaw)oq5dra-1zbZHjuep75ah@_z-@8dbDb2+YFS0sk3RlAbIUNKFQP_j z;7Wd>=s*TF4hYwffQz{pR5|*J8CbRlTCIm#rU$=L0i=VeeXE-7GcN(548o%8&ToIM z&YtytmVY#q_rig z{pTHulwug=%1SgiAoT`%-^0WwOx~|^E+B1*Z{kgPd+VmD{$D5#XtS!(sZd@=-mN} zb)9^iBiop)806uW(-ccRxJ*Jy&2W~NlvykPP8hG6Z223HrqpMKC>dd%6=CFzExNs; z_UWnSo?v*-BQLtTAg3Clj(e>Tyk8}Bff*eyv9rRQv;kb{j~)mW6OQOe&0M@J?9d+- zwdLvb{GNg>W(Lw78jI>rj7opul_o_BqHQjJVmK_J-e;A+aj8PJvQcP*XXZ$ zAelZzeh4#BBhra`MH)xOxg#jtIM-m*Qc#--m{dDOd;`CoJ^;ccbtd`o7oYgZ$wZ#F zijI&0cg>zNVUDR(L!6tX2eW{0)RsXhG?y)2N?{aDi9*ryQ+*t0xYTo8l9QHgi6_dE zAimLiDS!M6Kl`Cjt{cj@s@!TX58sD5eM^Q1(v|@=e9q!T-Is#1R|&7@-GKbe+V#ip zG{4mDvGPmwyO$H@13bV3Dt$wQ+h2aELW%Mz_xofl3^V5h;Y@<`ch)Jbpqf`$iqhyp zcfyo-n1vfS8{BP*(7-gK8Y{bP;)EsCSW4Ch&y!p`s7S*W8$xSF>^=NCg!nMVR zf+dpzoK9C#cNk4R`VBl3^^3-EYvT$=Br z-~H%^^)m38fA-DaeES#Pm7@4>84^(I#&}pTW7{9trj%Eaz1M9PP5s0*J{TSNDP)nGo#Un6&_1UxjP{xCjkLGB ze^4u?^OTQt@+zzUsTjYWn~V(v?(nS)l;5eRJfX}p$byi70c=f84g&&1HVL#(B>qT; zLp3j)Sg^B-ir~%f>bMB5g&4)O4MYjx>YFYz|Jgq?p;ngy)N+8ja659i2Xq^0FHG&a zl4ozbxka>)Bdb5`?tfj21v4QVyZ!2!6b6(qecZV>u!V z_cp?#3V%$hk zaZLfSY{rBQBh;LadVAI#+09FWj@|)6I&RlKqCZSw83Nef^&FK*bf0vz^Vpk3gebBZ zrX>|*9J&;)l^0b~ipWF3qWO5oG#3Vq$;!68i6WSI)OcbFdfmbi)c%o@0(RCJ)33M~ ziWtw_Zx9AXW%GSQs{qDABwaE_F`Nn7kOS_oSK|5p$NOK`Piopd3HNC=Mds){yhsiZ za7}+DGkB*RTG4LwkIk$^a+FT-t}!sN(rPycywV>=P0*7#1m!XIlmGl&b!skhYYC2e z(9@;*ZH`>gxr`s(K^Mkhb`HXy$w^KO^p3hwEUQ*S%hlkxYX#I^!T>zAej!w9gBcbL z!A@X&`Oj*Og|%x|=N1x1Xh#-DyYK^PgCJ=9X&UzE~l$3J}f=_hp$4|`3?8PeMXQ<~BPcJ`x~ zmYi5uOyqmfLUjb7;Jtn28R zI{*oMz-Pqt;R&xF?FuG1U5_0)=4P0k0*C9FMHy_RTkB1w5%`|ej}21-0X<@bv#sVJMgGZIrlY3;<50ScE3gFPz&Wp5Pvo1GnWBsj z0Qg&fZ4&XbqT&9=Xx*PA$jtRJ(X>S}T+vTsq>i0K{kr{Qte7kG6Xb)!zWW6Zv|UW4 zmM@j+597++Qh|7AN~wB@fN3_1EgX)H(RXY-3oxMPE8FqDc@;i1Q%ERs0pjL$jIdV2 z%h@O1dY-dyglur9GYU1=5B%j_wJ9uODzfN>t$gw$(|qUI)|P*Iva!EWA0*7_Y6p@% z*Eu~&eW$q%#S(PtsDDmR6G*A>gqlcsVM;^>N($nV(Qval3md^`p4kMP)?p7|?ND@p7u$f*Lkje~-KVMQ`d)!*Iu?a<|mbgx2 z0OX`_b=3$m0jj9 z5ZTin@fb%rJJ<7QN@cdU!EfI5X!qouzG#}@BXz8#x|s)KwdP^JrP?HMZRI$|9v&Ky@=*IgZJAQg**@Wd@ zX7ZBJ&?+%4M_SKY?*qsx=pnCS<;%~%@Lz|Sn00FyEYn3emWJR#-Vdr)z4OWnSl#hL z88J8MMk{k!Jslebd7T~660f4*>GI%1LS{pMYF|bb@PO_~dlRM5M)i3Y0?av5h;(rK zI}%dUxJ_HrQ_N0(6X^0?)*r8C_3g;f<2kN>-3}T~*Vg|F`|kWO4)npTG3|-Ag$8Y? z+9Udm6#2<<^ipN6krZQbS;J%1513}Nfa7%#hHss0RYVs4OI&|q9s(}-LC(S#Lu*J^ikh zKUaV4I{L*>N^cthDJ18DLo<85Xl*X)o!rd2J>y8pvymY&U5{p>ld1sfM}>!?M*wP@ zVv!!2!9Ys%dIP7OMo0yaU4u5)CJ40UttJV%tP{q|y=WIny8lR^oJ};i z#l(Q~gh_;W|8+;&fgxcI>>25205_RiAN5u%l&7S@yn7vkA26e2?$kbni|8O(L?g_e zGs!9Nh4vAsYTF?^#Xn9DeXY8qRFwTgpJk~M6=NpdR5-M6-Gm$nfgOB82 zhRw9U(W;PqN7~Hp>G6MCKqm6{-~6Sr_{;yd?jTh#Y|&qzzBvO%89|2Im^!K~dw^JV zngZFFJlhM!ybLVe+1z>Tn>41TD-#zN^8p3yx8wb3_PK8jk@#7c9_m}+IQ)FlTZuhV zTE_2r4Oq-+=2j{imIik|$DxpCXi4fiQFXYj6Fr#sGTI;fc3t~F`4g{~ajkOud{jGo zYZg93OFd+`_p=HML@UhE@sQn1pf(o#X;)6IZ>48H6ZoQOpC1Rh;9baxi`j7lfO?WGs^Be|^`8hI_qQPH7)2ha%zDv*}y!fw>KgB=t-+t61TH7;}@y+}s9z7E1 z*Jp#t1eT=;d^`*Hj;aol#ra{RUNk-iwnPpk!l%*M2~2iTV(B^mXs7wfk&my%zafAAheN%yC?2gNf&BrqF0qPojx?hHE$#7yEJ( z|3iI;L}5UP2!^T$h2kMz;Ldaw(g)lkVGj4V_~@fx#i0eBle#;~D-p_4J0{=I-3GEt zk@n&Y50;Pmr5DsW`N2>{HT2dC4P&7IR$MAN;$^f2kVZEjWH8+HhWJ8qUmtpv9O**M zwAgjO!9#R0onAH2^OU$fw6vE4%CV(A9Od0T#Fi~0WAcPK2e4{+UOWvDyX{6iG6DjR zAxmrmVHd&v2}q&Yc57k^s@^4?M@auAi7gJ~@j*teaUsZdJ;gbTjZ6ebcfRQy-6M&Vm51 zi*&GEj|YbdhtyB#H3ri9fTRk!j4ZA$h^(PLxFI=bh=Uf@l!h}wL5rp&eX1cSbwyV` z`LUn)*y=3p%08QTcff@XjkahtFAQsqxE?gj--=&%;FH(kB_Yx z!~zQW{_QvCCFQ*|a4)fao}et?3QMuk36|_lH?Aui7*~}7_ZtEh<}_{+#R6TM3vg?J zhv@#~OE30`%B`7o)Ol=xPXy^m*5#eYn9qrN4PQF}Aa9zrZt=wy8r(JsTLF9P7G2w; zI^#t*(#elcf`RUkP(zyBkES6-#=uA5<%=1>-FVTv&4BgKR2DIa&lJ-ACyZfkrQhR> zafWcL4{b_?o9tk}eyou;X1Uc_Pw|{K*3*VuUnF@d8D&Md)Ju znpUF}F^?NM9tCV>09B#gy{qTYnmu?GkM?(ivq+L4ZNiJ(n;(M(@8|{Pm9JFC9H|ub zrC7?(X*5UQ8U3%pImQYAF5uL}iEd3_dwrC95E%qZuA zI0<^H)n>_y`}fZSVggx-BKf0~I8URr1?PZtS_7H*9Ko1aH4P23uGKUp6ey7c<%c55 zM2`#7a>af0qw+f$&5+$OWZiU8@dRhZpWNoS&g`Hm`K;xg8YMkD;r5w?!*vRYGfDgls?a3ic;=^%UVeLkrI}Y&gznJ-hU8 zy}is{ux%jaQn;B6Lx9b+?}s(lqKKYjh1FqET=ro+gPg3&_nEz1t7V32w#fou@@a8y zwDch4x9EbI#u)WN<-e8+X%y2!ztz(L+}Rsaawa(ZXN#ql12|#!FA%0(fNEOIRrux1 zG8ZP|>>_wze~$9$N^=&yk9g7|Py;7aboy2yK3Y?KF6WXL=!U>FobB0J6rH#Ycaks3_4 zEj3n+RPrf3BAMl%RMqU&BhOGe#mV>Yf3{l8+Z?QEv=?1dEbLMpUh5+ycb7v=ca)9=#50)$rkiWH0=Kn!lu z%PShNroQ>ZhyO~+{NlU*79+_L8Yv7zxT_orL7E^XeZBnM$K(V%CBIrE4{U&Y>B2*D zKOL+wEh}qn4J9#o6V(s5c(2iKa?Fi|P@xBLz*PHZi@W85|I)0ax~oHyS5}=|dth0x zwYNg$T|hPH;f5>Sr7-rCzpNN4ir2XlQfm=RElcH%h6m7PGM^9*(R>kauFbF_Qs`s2&~$l!L-lIWkro*Vq^XYnCTx^vm#^rm z(lZG=?TTgr59McMjXTZMo6N-;6!DXms$*p7*KaZYi@ut!dEM!;Ej)HD;U~d;R~SOE zH-%Sq4yL`1vB9Kce~L+7cg%&5JWAoEPoY|ms3Abm5Y1_lx-R3Z+j+!rcteDdHbLPK zy^OoLDWJ2@skg80#tag%+j@lIk2~P&w>u7(*a?&0Q)V$;x_ih3Rzr*&O*+Z~7mcm; z)F1}+bP~7Ycq)isLMs}Cdr7j8e_8}8joA5+Sm`#~XzfXH(>^`bQL9BJJeGd^j5Nj6 zvo^Upqp2TxErx5=$0-^8S7!CIMR>fpsPzs(h)~@1r5U}P>Ps{ennW5~p%z-ErL|Av z7+tPxkh%_xFdoM_n^};-r&RD4TZ(~G5|K&##OI2y0c=_MA3tDKWhyR;x+5r1590># z=rSKjrqpE^DoqWvSXo)Ay&uc)O4d7}G+?je&;7dPiI>Q&KGOfKvT;Oq4W#?SQ7 z8ETXO-*pcXXtc%BWDLdy{i`~rrgISTi;(_3!)lf3OvohEUfqWxVsez**hcd@8*j)sWH+htqCudGg2Won zIs{|$aiq8Lca2zE&7x6)G}swkz%c&>Gt3X%eRkku9WR09)@Ij4MEuuvN@RSQ$gxpa z#Xawm8u}{0wG?i^P`f=NybwLM6{76+`sozs$E3`W%9yP%Rq^cnx_7mDcHZOfEJCJ& zwP&jxffzoy8=@Q29y}Rmc7L77gy7mf5ul$$2Do>%Bzf|s*uEZ{Y@kv7t@aAJDyG#M z#V}q2_m(84|N3I$ekXKw@$xli22I-^^n>=F8JG?TcU=(?(7OrL#5Rmn%SH$Emazn9 z!@(hK8&O>$m=i$ST0tXb+)IxF1G0=R?D^sMy)6pk0+GwW^PLS}r-zbl?`^LQ;B~eT zxFp7ai9*wWAkodjVJxP+9=BO(ei)8$!_n4>AUS3oLV68c3$CxfAqN(nkAP0rD~FD&2I#m7AbUOxLgofn4T{wZ{0f*1faBm2rgj3Zh&HOboHWK@o!KL7m|7ZT zhN#Wr{gEe1DA%12#XeAZf>C9}o?RuQy4Knsjcl{YF2Pm?w_?+JKZL($;e-;1!uRok zYBmhbTQAlkwR&*A1@dXt->p{s_~$9>qo>gSigSL0HUGn&D(;0q{E}Fhe#iv1$dQ? zA$%BNRXc8|&lNumw;U_VZ$?(<Wry1Gw&uMevOK9J;-Lop7wg9w4k2Qi!ZL=QRd?BBF};Bu)8-*Qv5D>U&M9 zOF%9(>R2+t_NS!iQWO3B=f#BkyZfa9Hb%D5Y#!T2BTk{t>j^(Ax#dRbRl?-C7iMj-`lC@J)96yZn+DWrbyw!(<6Z*Gg&gp4(>Q+IX z`hKusQRwRnrU7{@$dS&MJ7%Uf(D71u9gPsWEKQB8H7R+i~!nIKy@Br#RT?K zzD+~ceHbky-(Gvg5+h1eKcPZ!QYr8}94#oIFUoZZ`{FxNP-25`~*B6tQ{WB!GOoPcA)2At!&n+X?rbz)N;b# z1~VNx1fvuh<(^k}>$B*7ET}s7cSN204^xJMhbMTv6k2(x)sDA32-_Yo8TY`JvbCH- zor=(s#1wl>>8lBmN&oQ)yfN*0h=*W{|J(rh{ao-wlqlipwFOx1Y;NJL-sI|NzZ?s= z(s+zS1PJ>}hQkkeH0e`EC^4(wqh5ZwsI`o6?xlv!o?JtrYFLfg=*8$@?THz1MB8xg z&Si&na63b^O~_Hu-1S{YZq1GFRTF_F6p;X_p&cJz)A=uJi%2?6XvQ91&oJzsH5b!? zHN9Nzpvdq4|NO6*in)VWl;+QMO0}_LERwKcj)x|PQA2`7cnNqUi|1>;y&ttUzdNKa zyXlK5GF=)4COPLL>E%XU+8hF%N#Mlx{IIU(Xb}!3rXJCnyfwSs(>lU?x75csWtO1x zSBlBdK+OR&91mx9Y<>|UQgNCJ=my!Q)m(4Gg9u*GqmjHY1qvy zxRu*kScr=-rmrK5K+hXK)T12h(?f6QF;=&F$vCKS|Na->tEDhcruTcmB%gIm)&2Xg zcc1OozLmM)34M_-5Z+Rsw6G|3SLy@F2NWb-4b&kjg(xF8_R4&s!>CYz8`tyd3i>Bv8;Lqi-yS-+&sB5p6{12Hz|7bd4?VI0A4NDqhm zDE=BHO#WbY=!24d&4izG>&qIC35{xPvU76o;3ylV&>GD9rhQ48IwOERfZO}7mO?>N zB^GimL)pdo%uP*c3KDJZpCfnPVeog$QK7_cGvRF|-2~6FD?nKULCDj7jI;S0{OV$0 z&NpYIPpbxIseRMs6rX9iLKZ{f1X0UQic9xV$IAs9D?jdla$@fh4f%3qrD&s;^`_%U z6BO!CU<|tv#l7giR}HZ*(yH2Q<-~Z#Gg3ImG5RyDp_O?Kd5>G{qZfcmNoz1{<`8vH zk&restWS@6`1)CR0#8xMWn2*R^#!3&Z>UbJzdl=wmfJ&MHJYg7oD?m&FXtC%V=qVb z%#=5T(XQHrK(A0891-6~3=(!vWv7B9xJ_>GhAv>Zne@DdLN7;wT|kXzFD`YNwQn(5 z94!Ki(QJUGA-#AZF8NA?Mx<;eoe42!kMlNx4+}#@=m^ciu@=Varm^gDlHeK5QHrTs z_`nCJpp}_{w&eP1keUFI=XQWPAj{ZKZ0n$;>Bk@WWHligm}j=loFB?o8z(1X!8L-Wl_iSSyFM|L-M$& zyW6ifdAac3*QdS73JrfTFzW4U!i$HVR`}M0{y{$1)$6xQu$3DMZaJS?0P!^xSa9wT z{^u)wjl}<%ix!A*0;Y(|I)K)3xRXOrc05bZjJ@bNkMI6weFb&*tNTB<@sAZ%^XK|w z70L0?A|5+?T~vA!ZZ7a^jAw$`^=!~VL}JW-+!{;+@Ngy?l5a5F>Y%|a#ZB15WV`I* zcyGuDnxLowx?uSLs3>|Jb1QL}E-w1KjIE8egCe!+DzWpu&-AAPsh2!x4l zll2!aOE{y=c)$`ZN}vmG+C07b1)vRX@)EB}X4*{sH1HjVf-DuKc#edVAM^-V|H-_M z(N#xM#Y93{QXXN|M&6K+rjJax%Gr@`C3ohC2uh5sgWL9t@UOGK=6Jvt73+-y`X`6A zv#Z8!8wkY$hsNA~CzL{5YFqx@Lm-0sqxcGoxRbGn|E zSwH#yPrhH@p`*kyB7FoDCgE7bfFk2XMM;y2;hd4mxb}vI{qL;Nd0>{*8_3S2VK@a(lfGZLm zMTtRi136QTtcxaUba7H7t+*V}<^Ng#-X{uIbZ$Lz!&T8ti8c+|Y>Hc|gjjw1j3BZ9 zxCOwF7RYsXr8naCUlFQQG^O4~KLJJ(*N5-lzg$8Y%#VmrBQeFD#j3~-EEq#&KLc>F}ohs zQwGFe%4nFSFN>}(K9GY+M+5RM3fufA=6m{wqj^U>Di|V+WAO)N0cW+f+v%HHgmgL? z_#pKfP5{U41RT~#W}z+_E`{{1=VDZJg9tK1bzclMS?m_*p|VX7(c02;!n!aP1}8ex zK-R>s>&ABuxPSkLI1Anf_z~oR*^pXu^|o933^@ik-+Vy9X4h{jgba6;>(p|VMO=K7 zG)r2S-=G?fGE^gZTA!y#YTk?Jj%b%r1Ov9vR{)lYHJc(2)5O$g#~UL=geLRGTCl8{ z6_$qBh`NTEEm;ME?+bCNmM+;Jm^&=mWcRs_ogl=Z_Dn5aiy2Ty zyr$6)K?XcPg%kZ`5^EUAnX4e6i2O2gYh!0zTOe{UzwJt(V$kQ&+;bKkR#fml=FpLR;d#wim zjD#*NE`-|y0W!O6(`r&C&n^{-?xA_2%6V~gxlF{nv(_co+g;ySefGG_n8$myCyca3 zUjYVW|2L}*k%lz)!sUf4xLRzvHHi`i5SXfu^|r+*IqwtirPb;xtY8-aoCs%3@C1^X z`m0!l-onq&h+P=J$S$x`b=c@oH6;3LUtk4F(D%ZB)`Oqzt&a{i{NQyw$euRcn|D|s z1q!Cq=%zPu5%k6{pl=qHeUfys@&o1p)$3HSVDve6gBw79_>@cA2PW9)5XBHdj|Rq$ zR2oHl+qJAs>ffWIN9#sP$M8DmyS`qyfczR+Q2s6pC_0WYjoj zS;0v;Pr9G=o!|49xnj`=9W{+Mu($?R_@FFnrESD$&{F1hLVFKqf=&0(`Man;kaI?i zk^5s2F{FUwEDk^<2UD(m`dQuZFTecKS5=Cw;FHB@*Zr_CytAo1r6uJ0mzvfQJ^&eY z=*}-3Mr(yCN=0M?TA~KJvO-s0^+O}ei-B8`Q@O<=63{@p>3Ek6LDSXr`GQY1+@c~= zPIS>|uO3BCZDKUej84p%>yZbf0-;(2*#42tnHtJiHeTIEF=tlEE zw+O<6x?#49q8W8IE1!K)(qK=84qn3}+=Tuy>Vo}TVhwPFnpM_U(F2p{7A%?FZ#ao- zHd<8K24J6nUuAu5Sxfxabs)bpdChIa5U(#|+ukm^4W`~fv4DBldi2D;J%i#Fv$%Ul zpdL}OaRrFHENsV7TGu(`P~gNKmVfZHd-1J!FpgwOXQSX5tQEK|RW;KnReR`dyudv2 z+PKNpq|Tg2{#$T!>rK5>;lFwc`H`DfJ(qDy1L3}2iM%&UUlum<5(>9J5$#6yZ~H32HT} zbh$dqVR=;BW5xrkb#F7~m%wkl5S51}t8>{HrJww^w>$tbB4e=LuJq1{$=a&5KPY1& zWCnV$hdxIK2PHSR0krz?>F*p(t%siM-3L(lZpd^Jw!Kz_f`LV}_CyP{c`UWURvTY$ zL5vZhF@4AlWlsKtSnLwdJl^P$Uxnz9`aa0YdIiI*&B4(6>8ta@!&oK`QmsRM)g7J| zieky28Y%c*8=#(Jd{A6U_yHS($ftjCvC#CEu1A$EDFVMDs&1W6gdDeuK^mk{y-3=) z7#z-R_G`B{5Bz0KKfuRF8fB^oua5vw1jIL5ci4JAaz485(}?nV{*NB;W550X@Sm5l zn&tGZx2hLUjr?h6caZvu7j<(cHYpw4NhZUQ3zWA;wfD8?`R&`Qdd8D|TR2F59;4tl zGTjGx1WdOs&E+gwnrK33`azHOH+sAmvA^UaZ9mH+h98cXTI_CK{&HajzyKJV3Sv>V630<%+48p+{GhFcf5sHVX8i`Gw&_(D%T1;xlW=bFYAZ<_ncon z9PeRDt~%_YjOn#zzOh*RF2;yqO?InLfruV0?PT!hy;gR5NpGv>1s5XhS_bE66uBt?W&lT^NXMOh z!-;so7@8k65kLzssFEv!D&E&?G-jtg)Qt>B$+->gaEWUxyl302NM%c`VDol*(V>b=t_wGtV`I7H&^HL2gk=W5JIvw zdg~k}@KAKP{>Aq`^2WA9|6EM_g~RwE!Qs=o;Lh*XUegc5pCT{%b#U%rg)2uyyOOOhC+WTjCC+5V0#+3}na8(WFgKg?`l&L9AeMD>%teQUHWG&-?R;=0#(s=Sux` z!yIG@L9j)aun3vA8iiR0D=@VK6!-$@S-&4=;}#~3qZYS`0RGI4Vmg?-^+fzG0)pNY zKgsqhnM0;fap(GxugakFC)gqLi+YAER-58>^abD=VMXnGhC7Ulq@djyeccn2bAU=i?gT zu29rd-xSn3o7RD07J#kO;m~hkDqWP@lJa}#1;wKM&mDqBT?n3sNalM;M# z6;#*+gMsVjJH3aEf<{TU)d$p#)?PDEla@rpj`myVQs|3y(hTS@;>n!zCxsN4QtUa7*f+O=(8(L)d~()|NXp=PbcB#xArcucrnDcfA!5@ z7N6m#n76S@PZwSR?M8^#gkV{PCIqX($477>335~S?_VK$gVxlqpEg8@3u^0Auf)(aAIOu-i2D}f7`xfCKsdu-Rp#BaTWR{GsPJ9+`W2FcX zfnL{hkDFKSJ+$9gUGaJv4kJo4rLxcEXf5jW;0EHu7PJH!uwKI{Z%xpfLadi7_7r^A z<{4&*`vI5Kl517S?DYu7*D zO@o{Y{CzR(~KOIg)Z3BVE1myaWEzbDJyJxRbco9`-@;RIaEtl7@<=r=(oww zKwz2e<($k;5m`Qt@BU*m4m0Jk;^JreP@nfm%d>;EI{cL%_xQ4sEdi}lZQ%5lP<+Pt z1ZDSA;a;?W&yEPAyZeF6vlB!3DO+_kFRjz+ky8J)kIQ)uPaOPRS5GnMyGNLayQg$k z|43;eFNd~>xw@?>QQhJcVy6gX^8ypb<^oF;T}F@g@k<@!}2i&|v zmf&UhU&o8X!xZkP^Wz>SKomGbO^|{{ZFx~?sS&o>P{4^)Hp9;x1JN#L$>lt#R{ zbuf^4v;zIGJ#hY5fyxj}Q;n?0QV2`n;JWoXGow<68?l}diMBLh#%p{2=xUU*a$R%= zmR2~F0!hf_s|~6pT&a>~qYnTIua@TOmbs(Vtqrdn6QnukemgTlBQ`P?=)SmFtwS9O z!W-#=O`V3*W!xl#-7@=ruBL>QBp9RiTjfn%)gOJ4ecJlqL~mhDIF|Z)3u;oa8yfMZ z0U7*(DW=m>@8oa)r62ZKL#%337(SD`TE|cR=p)l+kDLx7IvA^o`8eg{p)$#vd<>d_MyU4NYCbV{}rFK7`^vVKEiE z>(O#)0vQtKqXSYw6x7!)Wco#HOQRi@sudnO52T&Kh0X97h?DEt8*hWqM#2ENNJmlb zG=0MY>zHod1}k0y&CZd?KU)Oo>yp|xO(-mekaz0I2DcrXuYf+$y5o%6E2&15*V+eM z9bSkj09f10KhZ#C|F}WHO2C#i=oMAtzmL=l%G4dVFjUwh7|s}ZG53lI)h*s7Ay{-6 z?QPa^qEf&1;yiVCcB1C`_GRkb4j@io)3{NP)-mF zWkR*Nw5b;XtV8%UKZ%(7QjGnG?nAhm$V-1l;|B9*`jXQrz$%*Ak%)W|!OXkIy%2t; zXra*$UWQaUHfSB&HgM})AVzuQq~(L&_zh3&gTC8C;aEXLLl5bZ*%yj6nm*hvpPnDPP6b<5&e37O6mV?2)it@ff_>bCjD)zd-QmL#pBXY~R@UO> zYk)f_>tf!Ol^=Vb`#9%$J&s}6?%yk4VNt8#d~@af?FIqs+oM&2+XO$<9AxZ+wzt>? zT_mXgnS;nkT$L=EDV|SE{g%%q2<+czjInNl*FqD6yEN?({a=`U5i!8kXeAZj;Oavj zZxBgD^aU~H6QyPSRm9=RS8xmbPK6V)^qZ;-hwW9{uX@K~F||Ow|GWB$cq8&OXf4eu_pO2U)YDDp(Gzr zZ3yG|RCngDdk-DbZ6Uc3OVt^zPDp7AKb;Runfljrv)Ab~FAZH$f+Yp9!kq-idyxv> zKtD8+V~g(8p{-`GN)Rk=u6)`M`k`-ZU);Myaf{T;(7eYS`O^0V?O_q71;c}TF+1IC z|5H6#Gd6w4#wtmz9~EtwE-oz1UlQi(`nMiH$|Dff4^BUJP*)q`WSR-oQ{n#Gw@yNS z`xp1W_R1+%5KIs`ZN}Tf@xep-Jnd(_p}?Mw?5DFr|DFdU|m+M z@Y@oxu5whG5}NhxhondSh%vNau|B&+L_Tpiv7^BG#xrTQx#a4;d$M)O5j#)4Rxijx+YFeNxEs$>US%a{6Wl5?c6H~c4V zNMyWdx{N%a3MlRsYgd-N)+exsS?p%sEf0-x^un5*6QOsa_A{k)`3t{mv;;H~> z-~Q=8{OSMQ|I8qRlvzsXoG6x^9u4y}bsLIoI9hI<^Yic2uT{X&gPC*OlA{oq4?%Fn zl+20E69ofM1R;LZO~$QB_w2EX6Z1MiF>!^$g}0TPhY_fKloKcIdsP2 z88-Sd7fR$2t_{rKc2MYP&LJsB0Vn{~4WMuesK%gaMkqx4+18`p+J`rhv@J16Qxrwn z5i{WT@^9@z?zL*DH$QkHc-V#iZSUNcMU89@bN2dR66V(4|3C2*pd#!ZwG8v)<)QyXJin`A&5 z7c8|2(FXh?cJw?z5()?abBtA6xk}x6AB?a{ZymgL77MBp`1k5HA>uFvi|(kLy;{}L zEVz}HXx&YKsUQb?&WZ7#&EhB!jy0|e%mRk7zCt8&+skp_f)QINAsK+=qbJcs6Dqt> zd41H6T0nW3C>mi-!pGcNPS&@$*k`wo52o}F$~V_LP;D2vv9)Uq@lP z2{C3l#Bz3>aZIDqXN((A8cK^^stgietP1Ef!78$Dm@O_7Yy&)a@viG$OFqA*eD z=>y_O97d)Ok_IT5y$KYzwd7fuj1Z7!O$4xtI~JkPE*CUv*+9M-aHMi{i@?uc1Tbk0 z@i@M^FNf6Nwdr-pi0YY5=U0t0#bbp<>?vx(k7S3o1IjH-LzlQ2owY6J-Gk={e=B3} z?)H;oYXwlSIj|cp+_Y#qXCE0-)f;Ipe~%Cp;jyHXD*bBv^e2~LNppmDo@lARapVF_ zN4xFX9D5@!!anTUzSzq%wfpcunnKe6UR~oB$TS>WI=(^u7U4ho&00&G;;J!Klt(U@ zN2v871+)hr@eC|Fc`%m{d|}P*jHlB{xAz8MKx2cb$#jfwcmJR^=Q96A(Ia)3@9K7N z2+k@00v*+pp#i5YTXs)Jk7=m2i0Dm?Ve2>kJ{ZaKdia2w>!O24*ScSAlhnETqJqTV z{iWIbu}ei=ZiT(OTcIV!1YK^1r&QywjY|?umZ0UpJxZ%)mGIa}$JA^cZtd;XYSGAR zI64XCn#kgD6uX^6CAx3}V^j+Z_@);Bb?iD0j!1+hw?zm%(}|Q^4zpS&syk)4%@uHp z2*6Abmo-gFnoNV69t#b2ZUQ5D&G}GwgOErh!Oo=n>4+cnRp2})SD*&8wYwa^l>x!2 zMRNvYvX5pqT}V7+JBgQzu#YuvEh|_QnpJ37%vdC@vQ^N`O&}dJj^wI-Te^=v`bXxM zQ%Hlr@}mvS(6JNBBo2MM^*aXfUDQG4Slz0@qFCNiWYs?bLEVLpt&Ub*&Zt*=I{XAO zlce#;I8H^jA}Y#CKxgnDs!V?-IjWivK9#zk&9`Dv^w>F5nMFakx9TT?BdHjL+Piqj zi#KOCZ~X#*~Zg{H|9;#GAWhaZ0w-358qw$|w+i z6Q;-Ru>1B8M$uXj5Jqp+S`($NXWE(oVeUX)6hW!+Qa36(^=<9^H=zI!&&Lt!jB0#) zShOLtXwS9ajvyQ@Ja1=N(7;Qm ztwKySB0ocvj(B1)ZzgBzQ?SP5LBkIHt!3%do%FPsgc|rRQCZ~OVY9|p6a|1MJ?=NN z%j<}fa0=s3P%OTkZg$KM3~nse`7-14XBQY`UF%@GK(HF{30pcWA|NNS zZ|W*8&vCcn@O-V9C$5JV{on>G+fNr%!(Px}>*b~=M<^z#@iiU-dN1UM_Nx64sEbtdk3-F3dX7tliw=$ zh`HtwOVekH(f_~H&2#b5X{@R&Ia!V&fw}#)hLhFK^y`5TU|Q$qIicWX!`c?dg+>a1~f1TTa=*4?*L5h`Y zOQ^{qw%+^tjWKk4jwj4oIl(HsUTXrgZ@a_?Tad$Ib1wFzx|%o9@EkFsIues4hRH>Q zQdC6MEooE=Ze&hi@3(pcS|Gn}ZFU=x-WION2<9&AGYl)09C7b8WJO8lkACF!lp%r_ zG^{seR}q3R%P&3s%tP2DIB5nE*tbO<>uxrS3qYfEU45HvQ)YPPNdfWK@mV1yvu+c> zO79l6R%fNXVB3n}cE+HnjP$r-uMtUKHI99Ps@)Ou@yAhA0$~Nzww_UXk0*z!67fz2 z7h4K~B|9Xqx;6a`<7uzyPLOxjN2kLODPv;IE59%w|4Ha9IwnnH<<+_Xf!nl_nhlaG z_aI=TR*=_(5;9@o#ahAE=*hI!sYJlBpW7tMms~hiAq0EaSkY-W36$VBtH~12?~;f4 z&2VRWG6Yl*3=G5HM3%NNdrhSEJ~KIiJ5F!u>I446a~bF;_CYXcma4z9&fqxMjynzo zi&)KaY84e0@>Cdo=Gr}OD{lAqlspu+JsH^g-L<1nKKf1HdoVAw?myg+>CV|yAIG?( zl}wR!nV}-e>!MQs93sItAq~O2{dzQ7aC^RN_DyFeP#KD_6eX2()zw0h^99xEGM^j! zH6@j!aOS3)sdZQO;Dap32bqxAAbGU%zP*fY6sAdJC&Ep zDn2-p;WQ*ty&TnnO1ibUKuYF7K)-QxjiU>RQHy}+O2|Y*@1j}MWddl&=jL`%kTxG; z?&1(4vWwvYCFU%EwszR*bvN>q_(W8r+amFmK=k4vftB9RRh86CGgGl!ZI+rNR)O49 z+1CI+Boxj42q9pQqA`3C${+DzoPF9qb-0YWRDqoAt>~<)A`fVPeQAYqa<|tTmvAMn zUyOqV?$ursP%3`kYOBWb;T`M5CxzCf6yTu@?cTIzdHLiDEh%aSYkT!^^h#5T8VaCU zy4sAgbP|ot#fX$vLoM}cZQamP(cJS>%0p_Yijpe!>bcA5m6qOj`DSt*P|QPFDbVf zNfVaADjATypZYzt97fI;P5FFHWJonrPZ$p~)mV}@bvmMO)i*I|bPcK#6xoBz6P{A58&KW1XGQ|6lt3 zXE<`Yc>;_&ydjvlN>sPr2Spl>q@RH^^1QDY#m&4XI&RF7uP`}#hW@L*Z~#4dzQ}U$ zjTZ)8j`~gUyLoRsaFAztKpiv*U#p>lNa+o1O>a+AJyldZ+q}T3>MOK}NFunGTjr-! z((WM}PS5aPP_{Z&dj|vq;@f%wCt3?w_IV?I0Bw6VuQ_A7vvPNNPz<^B{Bx5)(S73= zX$ZQ2MW!}TKzNsyIzZqI-$A_(UNMc<@9ojlq3iJ$SSlD%e&XONLG^{y@Ch=ivdI?_ zVIzEgMbUCek2-j8S5M=>$Fc?mVHel>?&`Yb8k8i?+x5NHHqU3qzA#oWR+=GH#_p;H zD7HAc!E0Td)+n$xzufnP4`p*pK$<$+IB-iC;P34_UA%Q2rEIA)7|t1h5BW)o-A7RO z=6rT52$#2%QY{+zgio84>Vr<1MYO~4xXh6ei8I=z2bzjdnOvNWiokjjUr)x}oX~bO zGbwqN|6}DASIcS24iHjoCv3h zx@i+&>GYlud#Y1>oyY2?t<}R?h{5DE z_ADxLbfB!#MJS=^iUq=d?Y)KtcQr30m|%JHfl#A!7`2lL;~9*Js~(i;S-Mtui^3ete4y- zPBOagQnGn1(iz=+0VqNqZNsw?4S`SmQvJez>$^&7G8D6l*k6FqD@OwG18)&`e`tV_N(KQ1MV(&S*b_~i` zdj7l~nX0vpH=5*hu-XyGAyU7BUI0~U>EZ9dCv1@wwXo&(c^3Cft?1A_Z`4YF6RNK4 zWQmSVfUXgMSRZn(dUzyA4ts0mr5E9>L@dU!u!~DDmJb{uI0XVB^(Lk%1EWSjv7lN= z_8P*hdF~l#%EATK>_wnu>yj`4gQ!9 zMw-_eecsbUyW_haGo9omKZ?58kFq?AW;(}`P%mQ_ZL}i79>MCQZ%!Wcb zE=#%5pa)6IFkDR^-;J27*5j`8maw~=3O5#DQnduAY6g95L!8m+3nGzg+`BWAC=kk8 zPv-67LdnbKFf0p}hIRIzkhMc9xTFW7Rp1PdCBZ5H`k`ME&+GHsBP(x#vo_H>Zpn$P z8K3vl2)lrCqg|CF860tAI$`IH!iE6S z>A0P@R`4JPEVy+m-MOoZiKOC$JgDz zG*O!=&qj4HQm4Tt>=AGkPr#lS#VRV3V%{hy#=AcW`9MP}j>h;v3jmuVX?2a=9j-%k z=OeV8F5B2}+4T30* zKoWhaOTYp=*iFm|&xI33wc7lL8^qkgL`5*x(c;0O4o|2Fl>&5h5(S zL&V-&j;%L{YECF|)9XRNP3KXfQ-G)8ycor~wO|XzxdlS(G`8yTn0n|7`R*!#gBv7J z7O&Gc|K-F!YMnt&5akzxNbvizP<||*nZH#R_1v};afKk3R$Et9uD{5R1+Z@RP$dUn z*@@Lcdmf%-rovtWRP?C})F08Q@wMGLa^s$iCfy(T^vQE?1|=Csj0FaKEX-jY0U3se zOa*i0QLEu;9X&B}7=T@2m&Zn)U8@?G$@?SY4YM(W+9)Wl-Y5sdbI%6%01`t*7V2IS)O_#HXR(J{ zC?HdwI4m$F&Ci?IZPW(M^qJ}T{q1?UjpjW+iVm1wM>d0-%T-5?U9O5{g#xk9w)W># z4o*;o5W5@F(Yqo}>F~=V;r+urSY5A)?(MGCIFx{1lg2f4w&*dDzoUo4=KXz5K(dnn z1SSp@xPmrIjdPAfT;EyWJv2KIU_8IQw_k;#Go0OX)zo*LwqpFH>a9ha9-)I8T&KPw zP{`!!9*1?0;8Q_v#$>$Zc3~X78bu}jN=P!?Uq;U1LAdSeBBx3JsGL40A=rTA zwnRcb98L#z@FnQ|S5Eo;rM#-IjJVs?E= zvv*dhSZ?X*7yX|vI@)4S1?)Zdi&zqsYl%IG{XXtTup;&ST2l}bFTb(;(;&VIQWFgJ zUx0MT_V3SA01f|TctL@-pxzZhMxt)*&5J$+IXL4q!#7DztNQo&L(S{TUt;HQ#K39P zF6~|lK;mKaYnDL|bvd_Wk~g8XW2vF3=6&pYjAmD128$~wg})8c7o5BnV)CxrTGAS8 zou!ad4XA!jhj;Q+d0ukK$abz_;CH#X_b>$!Ywh993s{<1Ytb(ZUG28HQ23KLkbQqu=#nP)fk2bD3j!sB101!2|lgJtd zti|EjFo2#6WfdsQ7`{3vdpt+D)j=ea0-rNJUTL>QNOgrl>U+LyC(6;(M9x%wrhZ_+ zW!oM3u>Bl#p=N&xoA3Yl;jij3_{Z7`!n9WU=(;Z_J`zc86Lfe)nGj@ay#sL6!E#aP zdm#MZ{h2RquI%~8&hmlD!ki_|{RbsWFdEOEf+^$fzob^JuaHvhHX)SH_Egc82vYc5 z;U}jr;}3P|k^bptz{SrgUXxIXL&HCiD-+M2tZ-=U2;BDQv~v@@;2X(6MO#fBSx`QE zJ^#CpKk@g#okpX6Dg{3^H-B@qp&-BOEf893qtQ6P8H;!rjAO5Y_4BA0jF^+ z<6Ae$0A*iw)U58Dbb;}167-?KktDk3iY&uB%m0}8xHN0=pMLt>q?ErK-^hG#n6oKp zB}Wx`Y#!+jhM~XF{bLpj%V@ecv3>UO;>^9dW@f;st&V_K7jn>(whxsPKQ$(ho^Gxw zcUY*5t2k=SLx+G^!ni>HoC?R+m2DikGY?-RBIcYJ7gdRz@)r+H!W|@rM=!7CVm9v4 zyeNT*Akal>nv}}^TZ>)JE{*}}MV;xxv{yX(tK{P3oWp=VE){4v~PV1D~@5nKp;O11+ha^FSGpVKAc)>V|pe?7u0 zn|0)fFW>Y4(Go2xYtY*`R?YJ3s%_ddFi&IvM|kzW7K0#UyiPFWwCfO@uUx(kYzMlI z6iB~;9I}=f;1zf*8I<{O2a2VbE~^t()MgW1Oh-L~5+{NiCWBjrdp#lK5JmY_GovQ_ z{r~jcpZU}C0Aax8M>jbPQ6~id`sHQdq%c*?bB8KL82&(cM&e9G2*k}4qKAqmA$bi~ zeIfVPL(e;r;LHtrbAL(dP@<4jx-5ZT7?=xXchD5o6({rzr8YscG~qX-l!oL``5wOP z*o8_QWH*F;8K<89F%Ya&3FR@f_v9io$6ujsgi%ag#MWmktH}b>;!rbvwspSNPpYd* zCo@CglgGv24Nv@2PK9yb(5PKu=6ywk#GC~vgADrcl*=cM!K?G2i&@}jAwP~N4i)kc z{iPNA^gH2Q@mzh@*8Mx|!Ua9Wj<3#;c9jzFkI2HWG9Vq*EYpe?8(1-VGLw142O#r17<3vvo5Z_M{=m zY6un;Lc_w{+F15rr1VWy=EoTNR#5ZL^uOQziAg~RXARGPn3sy_+h28Peeo+adU)N- z1VjO7PSK8|+`9%S-GW#Cqz5#C9oG}8yb(Z>+0~s)Yg z*ovxT8v`Kcbi?c}8H|?TV~L2=J@4?>*Srunb8jcwAVYEfN5t> z?dVs7^?E&O+c4tp(sGqa9npQE`9T$OmUg1xsiLZ)|IGPYtB^TJ)3lUpQv1~gNd1t zZj&7VsiRYlR*@A#7X04PleAeexUJ9(PUZi zAT!=u(rW9{Yr$TE#iZ)u)$+FBK#|a_5hx9-p1o6*eBY_`54tmWgK#?Qbh6B0dr^%g zL=cK@w!;Bmd@Hl1K0qtkEOjbddqjwcm{@42LXJ5#Y=0QZRqJYCd)$0r^-5Y8r(w0+ zqS?49y+o=4anmC0^$h;Dvu@!(eE88nE@IpP5~_b6<0P#IkmTQ&mGl))bl%w`jZrLL7l6ao4!EBz!*uW;5I?!REKls-hPhjY zPn4@zCJQg63{^(E|9&&OCGEAUShYe+Pov5c!b>@?ee38`nFFr-(S$AUXBs?MAbA6^|6*QlylAB^lr z@0;aQHrol&qt0T4w~jl4cx}7pz3~C;Q-aBGGETz711}rEI7E{95T8g`Kv>cjqwtG0(;`pbub^TljN(!?LtGmX z=u_Dd+=t$4L?5Pai^txD&4%8!*j{@e{f~MO#XdK;A#Z0i*touqM&(2t=#(LWTcaX7 zIdebo;!nwU^j>MMX@V4WKg;eEK3q^;J*3Bx$p*j_2J!D51^4?gq@pkj2F)F9dU&np zPnRp}Z*0H??rm67)nY1QQc*a7UH3xGgwcn(HxDJ-cQw50z2>(XCnrX;*=6I5xq}qKxo--{O_H^`u72;=k;%5{)e70K3WSPT&cZS!Ajp)+jLm& zO>V%-;iBy)HqwxQx(7%8U`JSEEoU*}d1MfR`fd_sBLSAJFBbD<`c8amDL3!U7MdgV zEc$6QKIub(255^`n@;X;C@Rp@Zg0rZxA-xIU^OKOo1BHR4tjxJal$#qzGD;P0rhLx zql)yTvvOJW(ff%&p{{;s!G1_el0#-Y{VoGf8oCp72D&GI1ptN_S75nZFHUzK|lV-9yv!P)>Wui`^0Us8#pUCuL#YlR0G8! zN#nY4&{z&tto6D<_)2PX1rkxXU66?FG2cZaTwEr#iNKHt8*mU#@8wVNig4hZ_R~hm zda;8+0tM#tRBoZU#o0nt6JPqkJ*2I*TLij&YfkoHUjyaMfE!5$8UI# zo}vd`EgX@o)i9(GsrFqR8Iv$!e%)13w{f>aHzwcKo}61p{s^EEg?PHQrN}w{UJv?O z^=YKW*C5B=-i9$D#cR+!1^#Xb$dT^+P@@O;Q8XU!R&e^SX_tMi( zy&dt}{#Ew7B?$g@*3URFNwGwq@Lv(`bfG{cMYU>FAO9rCP2n4~udAU)H`ivFPbUVY zJY_1>U{!aJDR}wmBtxUp;D^Un;x!T?!+|W<=?JCk@#VqhygO>Vp4^FU(gBMhw3TZ5n2zI9x=NP*VC?w=NG;Z5Jp#z6!_kU^aa>j_L=S}j{RF3ohpIR+@ak@Cp%j$ily^vs~jx|i6+f4;>S1bX* zd0NViuYrI6;k$q9-E=YR?T$xR@9XbKuFPB_cKKMvrV`Xb~VA`~1p&5r-RZfK0xos<0-IeAVBDiGWkA74iSR03< zug$D`4OLglgG5c{#Lay+n8{1$HQ@T`sX1k*ZI;JSvE|V91a1j~QFaZc3A?NoTA7j# z+^cN|{=T|6FG}gx3I1!_6!qVH`*Pf!N#Fm$cmLTJ0GoSW@~|tTsYvtVe_|?cOz16^ zi6q!)9k`>=W{pa{bKGQ^E%O^i--Gpm2Rj&jOXf)p1kdH9p99phMkPnX@s??QXh-PC z{>m{zyoQHXMoBs}mb%QL3O=KF78w}UalJ+>OFY zVwuiqL6wF*eE({{CPQb!anxIy19m+Z8gXik({phYbuBnfX0-q>4^1#tiuW@F69KCE zi>H$=rB0MF>nUk_AjAA3fPOffeZwXQnT6iapf@Jrzakny6~rGxm~SWG+Cl^)=-zgs zBG>=A#zsv3DncZyRP!%}_~fIHYo?$4iU~wwGG3x^o1kruyB-M~1Fxrr6)2d=4qS74qIsZetu^-W%{nbF~BeEASa4%sxwl`iDw5f8xqmKNumJWRud%I z(sbrS2#x;i@%dG&cT)HM!;e4lYu?OIqr$f|&xh)Mwu50v4GMNl=X!8)KZ&F;tO<&P zXzqFUtXnw(+(5NTljGy8cSB*#E_bQOT6f__*~~uQXwepODxAV5eBU(S8VUl8u%y4c zM!-~Pwpt6?V!ZMznpG;dxO7fGL^P`f98JsS%5VUkRq_WxaYvEQ!R#cP3sIpFFiMJU z;^T3$$}&#wX!d7xa}aW@FQH5p<+6F*j?s~yDyDF1o1`IN=WqTk9G!2V6r8c*q<( z%GdQru;n_kXmr#3>LvjPJ4r0|P2-J=tk#3|RnKB&c4Ucxl9%mn&{7i6)Y{H+DEr}= zn-;%ez*d4ERVD#(efRFDj&S|$PS1{n8qzljJ%Gzx(gtt zMxh9!b7pZOFlwKr7y~MWz^FYaWme&c@*iHQG>jtzXy+#}Npju7CfWy?bkRq=veiQx z&@T^sl=^JDtGbHTqPTa#kN6EbRB;AUREe5i%FkXrsS|7cRox{S2h1dy7?ZHOY3V`W zOB|CZr}aWeo-Jj03X|Si5wYm4t-6;+AvawnuCsdp(l4;t)?m-yQ3|x9_N>dots7e} zy`T+H5}W28|DxA};T#h_w|J1%1B5vx52p>A+1qZPZi>YoXhz?7SPhUF{C_a(Cg0Kv zXIwsuvQs2xMgbfAAwDih1#V{P4`j$Dp@R%658<(v3{nfmd5PCFbbv!pRzkFRphQpE zE8f`46w<-FCjdyjMLu_G+M6;0KM>%_)HlN0(4}ZyC#2Q5d5wcwVrm-BdajKWV5_O9 zU|nrOp{plawn6qT%B z?NdDz#s=BF3@uuncxMj6MSjJl&_H$Oaw#691NtR0ds~i#zbb5@ze?1AJ}G|SB`8_B z(7TRCx73;)ee?R+PVW5E=YEXi42y%UTKA%z#5(PvPFR4lDyokkn(@PjZ)OolXj$#U z&?+|Kv|MhU#u&)f%FciF7`YH!Si-LDB*|Zo@b&oZn8W66pV-#=nmO+HK4k@tf`x3s zedz2ru~}&d3!j;$D!ys1+nKbMSDot~3wRdKs3;>(*U!Nm*jPW&eF#Z(S-qiF8b#EH z55MzK-3K&|eHG?{VgVg1u=K?fPYzYWqi=bf$ySP+BZWPD#1U(m#T@5sr5LF@39G@-mi@>&$15CdrCLdV<;%&V~ZyV<+g1i=uM7)%^%M|Fa_3nECG0h9yJHg zHR^{sTNJQlalAZb(6^IxLPKm5XWn`Q1H5i0hm}Ue<31Ivw#`s)$htGyW_f%28^4NW zo!1gJ$qriW#&F%qT4_*>E{xUW2GaXAsFvC&naglYX%P6oaWJ$!`JWj=psn0C5boPjR9~}MO+6YH!^Vs3$Cy`tM zrCM`Zq$PrQo0>s>0sB`Wo`v0YI!O%Y3R*x-lUEfX@C8J7F7DxoGwBgIIBX-yT@DgO zB+(_!wImhVjje`!!9*&S=Q%x=J&x_ScS*p;Hvl3huIBC=4d$?=?y{R94{aYn;1&yJ`tX`ks$!n zjC+kdQe&=!U-Hl}TGVAPEY^mZwm;?D%YeJV@6l?H^H_=Y?EotR;;vXfFj0OwW8$?= zOV6H{CVv$A%D>w|6@c3N$g{1Y^g6@jMF9E3Wcy4Cb6(iVx%GwKB-yoWYYpjh7)AS z16jc!?#sp*!5K9Qk=7oPfeM}ai`#9rDe(fgo}EQ}5;b!8>e7bPG2u&yqS<*AoMGe8 zKyg1J8!f30aQGfg?&=f6)3Jpk&J_U}7S?fN?>Fk*D(-~f^k_*xp*2mt3d^e`5Si(0 zG!yS6RqP{PQ?KOw!!)^fDISs;*e(+BS^HKUmC&|I7Q?W6CJfl1dHd-a$kMZ?zKWac zrg?U2Yy+Gh7hTlSQyI@S2X?H;%Y*eAAF@rBW^IA(a9&etukJzxm=51BaOtfojk|d7 z+j%MN8biyK3pE+J?qQ3^MgR3*rkTF~Mx_eUjamm;-M%4#$x_QD+vT0MOluFAT8jwK*8W@f)}8VM?F&GJ`K>L+Gofj zsQ;o&LUf!ip;Q9b0`4$N*nR~h&JR-Q7e}5IV5*wb;PG#SwAuvJ9o;dJau?(GaEaOG z=Q9;giZC_;qxm8jSB<5)&gus>*_WR9`St0^1c_6hM{$e^GFvJzoOB)nw!7tJY(;aZ z6%}S0hO+7`$6>v7GCU82rmUk>M2L5}&@!bVb$r5tcVFcfyR}Q`ft)9c6V%SVwdHTj)Y$X$1b`4Al2F6z&WtR29G54! z^1bL_c2x7N$j1pJL&KAl$NTz+$^wc^Q56L;?#4+BJZu3Uc!c&w&ZECLP@+@@uPLtt zuR(<(&g8m>e3qVl?x{9h1p`df7vUx#ndy;N&9CFolh#q%ZQ>rRZ!aINuXzLFrIr@m z1EuR<*TN`q{O|r{{Zd$T7%HqeCwjodVY022Mh!~ap-&qf?ZoX_T{2mS^_+$#$F0ZFpp+b7_A#b3Ne$6!CM zW)a!jpMaTc9PFDwwaW6X$>3q;)D3a3kb^QiyFg?6toUw9`rh;uR7`?Na3zEhkgb^P zYINWCKKalKK3No3HKT~Zqem@n+Lx>TDwRfVI$e3TL#Wn-W;c;lJ8lwMtD+r}2Q{Rv z)#Yu!Rl8^rYhQw3LDV13riigxIT<#6S|F1aFoN1O^U!&*>SIxHi~z4JzO;z)T#`AU zbII3wJ(yQM6SO0d;NefC*zHUU-1=!tE{?xbnx8ME#!=Hcu$6lpY8ZVUg)#U)MVA%#* zK|Kd=W3C`2SOE7$UZ4AC{k4_RIZT);@Zsd1nv4FrltVMqKHjRbwwJg39$aE0>NQU- z$FP7-<<-oUgfDu`?;6?Nd0&yNTF{qh)Eq{+S#(qvCv$s`#Vx*W#RWavekS(F4a%-nFokM4piHk7 z^qTibBah+ud;!9X$IiuI5q}D(*bUQiR%p;Ym+Bryg)0pql$r&H#&xjE zA+k9^kaA$8*1d|kuUe|5i(kMz!!q>qo??z{e8XLDD_n572PS|M#?NwKAis@P-!RouFe27mV z)D4i2u}YNwQg@Mv4Kz5;`Q*Bm6@6u8wU&iYg5QmBw*E<0m=w9t;i(txdsj6F|2!ET zrG|5e`bCknTu{&>EP2{=(z?0|t8Q04@`p?xM=0!$(5&JyRzsDcQS)hmjuJb!(cFX3 z9F3+kW(k_Z%^5z!8a|MVRy3Y_W#iC$I6iALg8w;SIp9I`79S@Yv2h>ivS; zEm|qv5i8rs6v=mmTJ$W)W-(ZN!&8ViZ4dt%9_`>y7B&p$WA1r zNwi$9R#Nc7Jb&SlY;w0rhZE)^;0AZYZ@sXCmd;`a^w$HwWrqM>jqP0*Op>2cOg%I5 zOj(JDUZ^zymUUA^iy-q~##A+sd|edY5I<_@?ah5Nnq7K#wv$t((qchiH>k3h2^1Of1Ufrb0G6qCtrXwL(>oQQzLERR}|DT|K8&#@t`@b$CP$N%2|rg zk>?gYB`r@_Q7%z)H|0+`z`6@->GRKhjXDInxjsoX3Eamez>p6Iy$=Uy%^{AI>#pp* z<_txo)&6?Z9if7OUC=PV%@QSaFN@L_jt-;XtyWC(oBPpnrK-JYnXk4~vhgq7jp{8T+d z`Ah%gCf^5=;&sLe7iK@hR}Fpkqd)#~d_;V~f}UiJTcK!*;eH`nC_&XfpO7|A5g|`(El|HGD3m-2x_rgq z%yDgf>83?Q77CdxRFXj`3z*U~`d5Q{h`!;^_fl{#KK@qBxo>F976gVDUTxFk;>($3 z77G#?wLnHNz=DM+(udQx-h{VJ4tDQpAYCflY|%ljHuD3nMWhFek^xE1P%A=@hsMRis7ak}P4At&& z6%(>&xxvf3gSzfthX68TWP5=dKiKeXYk;);i*(%XGNtTK5D}aV*W&#YpXgcHatrz; zM--OwFChC1sMeE6r`;hn^RbLW$*BFc-qCuU#<4;E&6Z>ESVNPGe3=Rrd7~Y*TGgp!sP`!KGkZLNB6@Z~a7rAXcYFqhc!`9o z9zy2O6_Bsie2g=xZrXWS|2pcxj8{ARbt?tc!qB4z0L+k0b}&MV?sE&aCX90C^b#y| zxVeoiW)>LDIV*sq%Mg-_U!YY<4j@@ zq>Ud(p-B|F+2w!xc=qFb{_BT7{rmrGD&kDXL0~4LP7CIaVXGWLA-u0=Amr`46gLM+n(}plB3H1!3^7 z4#N^rxoxOk9J=3G`yc;yF%wv>^g{0_+Guzz>RURwPH8GLySa#J7jwbhq;9=&f)70O zMfAxNvTw{RH^`6P&JE}KdC7-^<=37Vwg0aI5;A97dzvlRyYEv~GObXYaBbAJ^%8PeLkk>=}{@;D~|2T;m$NPQP z3{eb9Bp`3T_B`LIPKh>Z=1VYc5cGCv3a)$C7i!^7?Y12CzUiQd8eNE5y;Cxq|_4f7&X8qO*IS#(m|p=T`YE?aFG zr)QlC9PD?Gh?Gy_$Nt^i-JMxcgsun?1|#!BaEFD)+MY|MyKj~s}#Cg4adnfeNhxwma`Dg_iRp(Z{M$e=?C z(d5H=A{4IZh!KPc=FyQuGAg&!Gl9Dx!32_FUmsv&WB+;ZfYL_Yx5y%8726Xf^A$Hw zGX)>z2mLB`@=s^O2#qLck1;m9FgtyX8E5biT|9$XsKuxFf{8+QDs8rcIZsv2w5$Id z3RpB2U(2m|G!@KxIOGu`Ny_#yzHEeXUo@tsN*gbSWSo{cbHAFd+BQpt$@= zEamdy(5C4QdPXI4#_2Msl*nEr0e4^b4Y0KU|=$I z<1KbEi-2dLrl!t_9v+8j=Q~&lb-t$k@voRc=%A2NhrE;G2)af3bUE;0?WCQv|?xd{O8O4>G^lLFj>{?x5(J)y4coP{NT$jb>l$hSV8sp!7{3XGiTdcG#1}OnZ--@$ z>CNN4zA_?(K8_mvv!^EEJ(ffLC>Di~= z22mJ5uIA^6XAZfpJPFCU&b1(XQW&DTm zPYdUpJP{nuaq6!&oU=Ct`3B>zva)5@m)Cre(=*XIhiFPZdDIneuE<;0mq)c5f9RE2 zez3JH;k>SKpJe11o9~@E0h_G!?V(G}v;zWAeq^%5R49BNid-xC7pl@Am1$JI=-2+J zh51{;7O7>eH=~lKZo0Xvj1f}>3x6Ez|EMu`kPGI%&ZFX;eLQMDBp0lo+IB=!;bad7 z2NjNwge~K5#0O9e1tG-47ae4(WoC?Xh-kk}QEYK;z!7u`K{Fn%>EKURcDk9dd4!t? z2@d6t;A&f+ygFi5dTt2)B+Rzxk)DWZZEGEUQh||Y5wXN;F0fqdQrEc#jc+Q!lO!KZWP9$%<(uYG;z+aH6YW&3C%<9xrBgjKa2egYP zM_`NNH&T_SJbM|fO3LsP>r6clG|Wkwj&n<`Mq>}7S6JvQwn={%zYB;oGNDgOeJ^Wg ztsgGe3vmSF^XN7MP8WjNwS==?*$bpxxX~n!)M~6)cuX9^A6~vW2O3fNs?8^lDY$84yq6NO5Fl?z3YO-v447 zoX9BCCiKBduglfcLB9}^NGGyzLH$Y+Te}|bEE^>bL;TPY(OE^?aDokfgCgD1^G|EftEou+n`Dk2 z;HG)pQL)TpPC|@djd~s2lk}<~;U!jO)cVqCwr8VK2A(xkoCt_(s&|;dkzIri6>-<5 z=td_K?CM`U{6*0fLAU<0zD>|q9?AMiut!RB@sa8brH=)RIwTV^MKvBRnR=#uQ2`?a z->7ZvrB7efD1>Y2u8q}4E78yG5AiibfJ{}kx*Vb*pSLrf8p0NR2JEuoMM`YS%(UKK zwHto?%e8je=k;%-O^={||A!BMZZ?)gz&9Ey>oQH_a2Wd*bPPeA!c3+>`8cHU_At&q z#eEm)hDQqF!QL+xAQRnhCmyC1?1InsKIv? z>~x_3?l}-jmfy>8O=&$N4-VIAfWP!hzf=oJm?2viCvB3kr zjBD$CtoiI((y>Q|o__wsoU{Jki?w_;FGd(I{#F&$idqt4;Vn@T{(>}lm7G0O9#j`q zD9_mJwKc)@u>JfC?^1W7Jbd_#->46wrB29jjGyB`>4i6e4#kvdZ*+#4Vj7>) zI3*4UqOj1M2wHuL)6LTR>Yl6s)99<%Lf37!dWCdwz6_Nq#6IAF@7bvp7Z{lv5B5v2KuW-3zaB7Pznc2b6xbpzt3sEkH(uWAOU13|YPK z;lox0uV3!%trZJ41j=9gRj;X=%ucHZ@fXA~@HP2YT}MAS@Gfgd+<)-)6)r7Q$(td0 z4ZeGLf*&H{k!|w}S!ku{_^1{bKJ#!$?I+@V-~TH^=YbW5gOTtY2B)1cltlWx=n0pBB&shrrK66%^B&rv zIXj)Z>JefRksv3};X+m!u8I(hRE)_oBB}xuVu(ULQ6Y5)epUJT@XX<5!bFz`&Rgt+ zygJ$ohMh~@8((fE#}NKCTx2ZI*9>JYbrFQpJdRTV-+-F3)r0JT;oX(vt;AFCZdbP= z8v#^2Qb<7w6ShA&a)!kFJglvo>5$k>oceThm~OdINQ+PVd9*$F<0j5XG9CC>cvLsl)+4;J_;`uB*9R$K@M+h9)#VtA(OgZTjQd!` z6PV3fopeiyyddND@GflD))bCI#rq4_E`1`3?5Yls3fl5HnrmI;q{f=6NAbosP>98W z)Gs_^CdC-2rm88|%rF!Fw}4IFP#h)|7!s~B0J46<(k?)nmr3zM*EDxW;|?p3p9-?mIKr+A9GvPOS$AR8-!|ExsIS(XWJj-+sGE=x!iVQ5+kP16ti zc66`Gvo)A|KwYNkJ-H1*LsI<~FnpA{!L!q4ENOIcju815ko|nzZRW~MjhlG7MRZ;1 zX3+d{!Em>UH#ov-3{L{yQ&aAS%)OYXI*DeH8um_RRen{~@+@$1>!Z8DWZm_$k56L~ zW7(aARAQupwK*gC9v<`CT9NY~__MIYK{lD9Ut?e|;2ZXVfMgk}W1S0F|HY-tilceT zr}CqDxn9x|t?r~0`hE7yz~6Y%gI7OW_d9L`(eO`>qG>;Hu!3g2m*bB=^1#XZPa`8n4|YyO}U|>m0wGg^>qAgZ3qD0@ku4fr^gJtv>`M6^bT|}-UXe# zv4H1>t<|-7j_HI{uZ}}4AEElx7?4eWWI-x;M+;}xqRDn6uu*rDOAuC(&L4J}2LK8w ze$g=}vH%WHHbUJYVasI?4>#8j>Mb({XU{077*6eyT*dBJdHUpqw;oD8+lo+1#QJXTaTHK3DdtYk>V7sJ zfo>VyO1zlK-;K$N$WkYCwm?*x8=rj|shVqq0lY}~JDY~zd2DlebJ?#3BSWOM2XB{N zgx>XL_&)qw==785El(2L51i?baf3XwH>i7=0LmSefFn`7)(Ajl|McdtcZ0s^uD+{O zRfRXMB_t(qYh&5Jz;RxeT6osq`RvJavp`cwz5R)WA|3>;jiaBe2>CvSc4ev+X#=#t zF(#Wzhb`C#;3`_M(QC3c4TWUPf!swCF?y(yEguwn2NmeCu9Fqlsidk?p6LOJCLRb7 zX*zZG*nk+lD<4XSp3=nBb5;^9jr;^5ts{HQ$qVR2ksZ5yjnD7mp65NM*Tb%~{xAYU zKo(8J?>(4xGaPLp^3%Uo`?>!4r%yhwS;J8G#1rZ#xeYhNS&xL$Itrj!r-=#!o5@v9 zyeYTESg+}!=gYnRm$xthZfIeC;>&EydYqs~FvV*i)@bzIJd|4>;hKf@R@K?tOhb^d z<^5-vm*(MimtwE%dwdF6HRzE4q)^%g0M*EWCT>c8cXLsOYPy1ElpIX->P)(hg^)lX zgbPuDinJs)k~n%@KfeV02s8WR>yIU?GZG}6xlv)rC!aLxvXLIS;~`Qx07WXHk%g^j zpBg1?72D`70fE*eX5v0*nHcc5$HnO_2eo ztrns-#`5;Mr^qo4`pnx-09)9n?p*+L=oEw*)6>M+L(!;CGH~E{ma-h)mLd_HrP#n8 z`Gkh{GU02=xV|RYURh-s0HM~zPy?^J;Jk@SuWR?%?sb*)5DeU&)Rl31YxH=g@Hr4C3#TtOJRtM1I z5Hi+J6{SL9tp)h_laD-;G0&cR|vD4C!&@%~$$(o!up!s%!hzjoQ2 zCZTL0JioP2gaO%GeiiyYqOjOO93P72rfTlhy(a`cq`%Z`1hSZ2y)B;G$Vwgiz)?Lp zBp^a)fm@b7&zh!(c+~1Mn8Ip{7y2I6g;~GQbyn!>;0dDr2_Nb|P=icoMa@Ybv#8;4 zNE2J%`+9uKc@g@k+~Qh?_|r8UCtEy@nKSjT47b$F=HxOEy z#EqOSA-|ybBEsY5l5CXobh<9nY$oC4U*l!sgz^^Jk<*RCpgzbChYAL_wArYQvD2Fj za8!EiYs6^kVMXk_O^qTviF%)S?cnMkY?*mItYTK044DVk85R0x@wI%--~G{V*H5gd z32J&hx#d1wyXyzFTTwJSlwpGDcHF+{bx@; zuO-+g1Rd{2$?EDT4(reAHq|K%?U6#c$2vGn{;$D`f*Gv)5i8@^Sya9>kuReu@f_aN z8q23l`f7A_AIcUL?-6kR*u26}>@vRW6<{aCS*tqQp(5Tj7u;{S#;<>-L#x7iF8?Hj8zI>pf-@(tSU$L;jL$c&X zmr>XaNpL%_Us*f;DpHlU(FM&6%NHFEOO@XGduwiK(L$h1ReNF(s@lsSdA|uH&B0^= zBX3pMX9`Ku!y#i6q>dj)cofm;!c}C9O@rg--jo`#qW*l;Ig$g&#s5O}p|tQLXsR_wUXQV>f_ff}I<9`sT(_7OE} za-vILJb&h^XBR5zR1;@Dxbyvstcg^BWbk0jL{O|j17fiFg6voX8P`WSYe`$-z}_Ci zUQNa56NUZbYx2IT_DZZZShMWgxJo>C$x2^O>M2Ijap`V;=uwg{f6h|Yv9@8ax4TZrI14lZG zqjqO#QTzN0%QPIOkL6SP0S~~U(-cR_q+3Enfi;C)qTb`{?5i7wx?bV=e8!{WwSGxx)jI*NXv(IW-)v}=O zBK>06!P#dd>n!Yjq7PnN3fk~~|5n6p7CVUTf+vjfC!P&Dqv|>qKQDd$qQ(I7EOPY7 zdM9uDcpMwv%X$_=81P;QQ8aV#fxb+`^0#5Ip^WmWbj;7wn`3*Aar^msx?tMz$RWOx z^$V7%S!vuku#&!P*rXosSCcSF*eM`a`UoYKaRk)OI3E%)x6gXk8W4P_yM|YUCW<<^ zZN7LOJ$E%Nw4z;u|`;1gDrn|_% zYt&%RlWp5>9!7-t<@k1U7P)Bi49rk_QiD{9dXZXo}Ew%qBa7rk9y7s*zTo zC#iP64|cO2mM;aUP}@0f&?6{G4kXAkIC!usKA#q!TPKK3?WYF zTN6vW4*WU1HKVJ!xo@t;NNLK4#|@-()I*`j76ttx@~@^Q-76;(C*{q&({uz$*!i2J zSRd}~dPNa%_*UT< z{Wl$(NLF+|zg4he*2e_iAKm%AxJdDP+AsQBkN?$}u|FGTrC8(+pQFbQj3V-48PNo* z4XM%B3}Zw_pGTV6O1=1yh>-o!1W=9*ztd(qMK+CFtJ9AlDY^w++8-zJikK&z~%ria66bsZDpN zz}bLq)Ljb-Qk~^*54Z3{Y&a^|O;8{|a zY#6y;$9+;CetpgbHi36Q(-Zu)-C_4v;W_R48GZmWK+oNcW8n4Pfo0e-JilODpstYf zV|uV&m*fh>^S54-R|nfQTo?-_*WwQARWI(xQ8PSg9&b8sl7;W5mjz4+0RR+)6ZdQ=oNFdfEqvvm2O?Dvh`T5W-&6lHoBO7+Zlm4#0xJHdGR=W-q&CR z1a=xJ%f+cwyXjYd?2+u_Bh&%q=ME9e#cSk0#R-WTsX`saUGSaH>n# z#&nzsrT9Q9Ob@&^U`O#72O~8Q5l<$m(DTSW7OrLtH+2T3&%;~aJ2|Qr+I8=YJe(S3 z^FqWgDGc5}GcsrF^it2ZhroLt%BNdgsqB!xpvVH8ghe2Fk(!|D+(Wt8cy9lo=Xs$A zzxF}B%npIsZ}0@xSGHwe6%kc16)&QH;<{TXf`LyH0om)Ppa@gess&Q^JZhzt@l81`R63v@eeHxVx?k2`QA27%v}yZSfA@RlH~61U zM|D;Cs%L>jEqo*7^fVM`KzlFxvL|k%69RIu-bQmFupmVX)aW;sDKvn1%si{HREOdE z5OHf%h6OY2hSWp;MwW$N`s(X?0n{N}mj=N;v>~qGCF2lN6eTQ4;!1H&9DrSw>R*KW z=-2;Q4F{J7=QMsfe~x1hkXYTj^lYg{bxJWX-?n@~-I2~w7z&|bB8>XSo<4nMnr;oY zyv#s1vB+42`hcRA{y=LF71Dhx)sb>7lG;Qa7g5kr7y>x+U8 z)xWoSlgY9Y$9ULq9Ua{JGV2hTXe(~EYAyGZm&BNaDi!R?q&fCjt^U;(RHYUd$ zREkXZI}u!%WcBPymcLz$99Nx>2kUD-*N{^3T2n$PheYk~t4Nu| zG_!K0$csrHiVjZCet|X^CWcBmN4D}R;XYwzYRZq;`uZu49~Ihb#qY+aamc7-g9k4N zzErIRp@N4Vi$7L$wIQ)h{!@HRCO>U8uHFavf!7$h50G*q<ddHKJ0i`}5UIG$yvu3_LjL_6iKw*|R(Qyn1v%iL~YBHIF zD*4M%(g5b(c;7+=>SGIJl`v<&g%V5vaMJk($1dkd00U6v=0$it?l;0Q zh4zQN*uaTJ7FErOA#Tr(JLvm((re7Do<}1LNWrm|l=~T3(_zbRaRYPnx{7aTE@~{j z3OPLUAa{pTyjEV;o#7imBe#7dOP_vW-g({K*NQbf{9k?zxLWLazvdC0){C}$|G$3s zpNl0q6{)4wt?oDM)mjB!`}+_7u{KJJ+sUt5ru^=HT_Q5ZC}G#SblStJ1nT4@Dt=i? zx52$|W9ityV3MN`C8%rkxKU)f)sP-OsQ#^Is%{Z^h|yB)=RI6DnMK+GGP`9t#HJYl zmVi#-I$B#Fm@o0Pwesr(YyCm0aFF5WMc7`cf6plJ=KC7*yehw>b$kE?yW8K?#ziY12S7tW&y&fw^&vEPdO@0^+g(#zoArnn2mdr& zimm!4@gT2_T*89DrPa4X#PxjS)-qV)r1W{vfaP^W#7pLBtSq|cy~**|l|B?sJGGm; zz2zABbR#6Qx3g25?6-fdrgcKPj>+*NV{}T%XV~?{M|_1xxuaG?vq^im_F|?ZDIR;RzEoVMu zkNTkiJP(D}uObnTl{LniRI`3wvu44$hjh(~42Xvhlgn z#0%_ac@d=m<2X6MAz}i`KN(edU!z<026W_z&SD*oB+$KeNBz|K_~VcLLTZgho%O>FUg#9HKUopY!HIL^2@7|=~hRqU+4^hNYfRhcvN;1W;L^CIErWYSH4itGa8sQn5AKNM}s#~9MovXJ}6 z{^XNSdgCvZpo*ZW(z1lNar?XMWx9{a+{L#%TccBx=u5S2*ON7F>7TLtKw87Fah70( z)UCH5n?rd*594^7bSI^}OOXruQi-g^861r>?6i6lois8BeOpCziu=}+|Aok!jv@g` z0GZL`mX<#;M#{U?hbVO_O{bO}XBahsd;(tqT51O8Np+iBXet{&YB*1yy{LC%aR4$5 zh8~PS7@1uTsI{Ozq;uRk+~dWJ~HF|&aVYxsz)$_@&g2ykGh0Gy7^l~ynYaT zJXAH-Am-NQi* zbb|VO)pbIGrQLm?HeI*j?#J#Y!3G$OLj!EaKI21bWDa9z!UB&l81o!T&O9_39J@A+ zZ=eSNn^Mh*a1ibyu(5?Qd24eLj=y!Hqt{xGBE`~YpP9bA5H-G~8<=NQwicbcGt|>3 z&x#-$1j8H&Cw!~I-{IKd?2zi0?uFO<_JK1;HTIJLJ(cwDFw+pEbi=66DLo{6Sqo)w zITzj5f60TM_jH28EWmJaK@z? z@}r0>;8ycm-pqj7wycqo=M$V`_3sl3@Ylb$an6}MH}mu}$Ih}yVM+r9mih(-;OWx= zSK90%BsrW3003W(N5}fW_A)AqDSq_vCuRzwLh7)Ef)TE76||tsuvcc44N$aIJ&o{Y zycta}uHuy2n|49=*pbEWk>obH$&GCe)7S$mJ6o25%aU|{7-Zd3a{>qt`4!vD>Jj+^-lD#FFX)Yz3$AB6s=)HKe0Yun=!gHkGZkmR0{Vp9F0DdQ zeCzPgpT&DkxTd=@fXSxma{GcoqA>U@653EuPcXZ0I$ba2 zFe)Npj8GuAk8Q&iHl_kjpD@)A5M)_x5YUKT3ojK(FskG9N4Ti^ML3|3=nmCdK-MzS z+ZTh_vmG8IvlN~6@OFd%=pUe_U_Hy$jRuYnAAaeVymkY*lowvJd<$}-l)lvsF95ro z55}}c%Se`5+VB7HyZ_VEMT)+S%?@Za zH1qx8J?REsZDC&MWfYZ>&suIky>3Wto9l4(2Z|RrBIn2QWjHzi~ z-qThv%WIDh(ZOiSP58%enMbI{& zfshC_)u&&4kwoA$@K0*J9gjRN!aPzD#Q$B5Q9quV-zgx$BAfJN)de(Jc8>djiPXT0 zUAGD>Cij9Tha0MY05DG&D3qQAYE>@<)t#x?+kk4KZQG1wZ}}QsxdO-KZ5OggRhBg@ zP-4mHR-%4CiIMR!`Gw5kz#r5iA~aeytb1XrT13%>ROfC#p!gzTJk>p{vbygQ=bMQ_ z!%W3=8e&$jm}odR}0RiJ>?vT=!PhHyO*8X0^=KWzKwz3ABr@`TV% z4WPASA(rDA6OX<_&FV8&T^|BB^)eX~@n#xS*9dWQGL^bdAg{jBcG5Aw-`q0+`e?bn zkwn18fKiS>8g{|*@Zr-JPb^n;hgjZP$xD<#&p}%Y#9?TD$EZ1YdTR}+ru`efozO5M zCu2?PXw@Ne-w=Ji$X2`boNPx##%**!5C~=S_!1YD!@$nit4!Q)tdEb^Zfy^zi$9YXbuIr5?ZM8v;kytQUK%3dysK|-c& zhAIcZ@mC~7m7jP;lm*dnbg|Q3Dw(Ek|Nh@R{O_iC%i~&5GE~owKh#iAcGi0dEl^pu zFo-vOi-*bY+8)0p{U|dOwzV%h11msvlsfK!ZZ^ha(=^-fLN<_ddf7*f-}-sYfCT9X z;xM(=E<9BPFzRYvynqFVldmJn-b$zmi{@pRXS@Ax(lmOTIzSkT`aHKi1mq>*=5MUu12D9iNA;6lZ z;|BQF)xnX8Poz)EuWD~M`7AiA`Vw-PXd=~ zD3y8(dQ`~lDFyGs8MUh864{jal2f4G3L*8mY>dAx9yuF!)9mS!&-_VTFYK*i9|FF{ zPa&^v`9CiTrCH**CGLh4NKk_OSFn{IIAwrEOf*5oSQm_#7Uk|z?)yJ__)`yzT3;xD z{WoQnAO7vbU-sYhZbXRmEk)zgf9Pw1mZZD#E0n#OA{Aw(p?M9?If9ClzFP5UiB7R6 ztRfOH&A`cSW@{+*pcgF$F{c`vSv6^s9lwyTp*SR~BOWx?s=ody9EOc;wNHhBd+EpLv3$8eO5QhtO)RQI5M>zrDUZSoip79Tc5` z$jBiD*(o4D`uLZN!uW1e6zX(^E4(@bQd!)zlg!y-cH=k>qcx;Y64i4g7cE*pNLRcAX1<#!e3w&PT# zw%x|`R;Wi4h3}WwHY3%UeRG0qyuVA8m>0we4a56IgynQL4*TI9n)ss__V9Oxy(s>? z3t;eXiY5}ecF6r|KWH4-lRcUqHg0X{1Wn9DVE4P()lv4Uy;=wWUdxPK$Akk z+AxFy$akTYI>k9nnZt2}hFhKuVu^r6*Q6;cE9gwkqmu1CShYq_LYQX;ie?-o9m((E z>;-tN_>RfQ2B*?*_x@lL^wVoXBCjX0y0?4iNvJx&yf8uCaVQ5+oNW|PQ!LY8I-*87 zWb>q#o_}V>(kiv3snvc>g(*KgfMz&hYono<*FT>;d0s=nQ%*RkXe1Q*309nc@3h)n z^#P8yO@E(~nktBS9P)Hcx#Ebh`qIvFbkjSU^hCSWm4OGk(&9#4R^Y%tixC}+6wdGn zGD8Kt52uk=6lDZot~Ekoz3NG#bbVN7v+kPNAGR2?Nql?XSM%Y+@BB`Xf;`G}Vgg=_ z(5N&uuy-EI*$8g}rRM7Upv!;ZIj^cm-Eja@$PX0C8L?oF;q2hg>PPG0k!EXt1$KDT zlMKMC*S9dF!wpJi_)(w@*dkOJY&Fe2TAUTwuL(AIy7a=TR6{NUMo|^WSZ@3&_?lhN zr6r2Mu%&AL4?p&d8@4%}T6i%LM-lD3m?>G4!^XbO+0~<8ldYeOH;>(hz<;^>%4At% z6Gg^|iaC=Z8h3B!+c>V3)VL6YXV*oH6m*)L(p@K z)A-S}IYn)cJxE9DbCNagLRa~E(#=MNKKqX?Rn)Z{RlhH{%fvbg>jEQMt+PT21}J8I z2{LlGj&-=s&0<2cfP^j34pEkj8Yfx>IW!~x80-j^m%kn zQY4|o?QIXSHFsoi@(w-SEF=`cxOnv6*1p<1T=$4^k|Hqb<0ynT!5Pb|hg*BQ-J)3f zG`2o69hSuf)Pn_RqX%Z9eI$PLgCHwOaFCYiUy~SoQwz1t26gCuUiq``dOP!IsS6UE z3{xO|E^BcRt-V{^S_|xYG(Q5p@cUj?$r?uEr6dcEgCT{xBv%`)UxGPnnComFPAbZ= z4Kl6#U>J_HeS}&GDXjAu+YiB+iSa;p6zXzCDqL?cmIcc?p_luk$w{{)mIdMqm%3V% z6JAYV0yi{IWqN0DR1^0LiA5lRF15JNJV40W?9=o`340Vm?Q#s^ufiM`lv1U7*v|j* zAI$&z{`VgKkpKIG|G)l!ts7Vi(xyA%z)3;4y4aGD^6XQYI@U6LB8TJXDT^j2KSXTZ zeHW`Bo(WUg&}T^>>sj^tWxWLcfIXq;?xGuY6Srxxm5MHOPHKl(7=Py-LbWN#QE1Zm zabsiBlrfHERLu*bY2H*MxPt43AFET^>bN`JGuqVzaju}i-3zX57nw*!^L$Lj4%L%IU3+a#r+pj64 zKb3`C?>}wR5A&Y`frXM~^pX%Ocz4j!P$;oJ^3JqN>?Ndoee0gEJ_^(#7?geyW} z^$7Je7puUST?a+D#!&!L7%`8kWd2#cqJYoPf=9-2Gdj}qR12cjeMuNJRW_5?=Z-cMK4BK{lgmol zf!P;PF%3b{$4>K^=*)4@Dt^|d(4=RV(U(wRfq4YDMyg{8oyUwvA^Y4a4U!{=W62LC zs+%0(aS(|S;-Kt$Y`uN38>wKOD;qD%qpy>sz?e`Sf{B;{PS>&6*?6lIza-3S%}8WAnz+jL2qh2_RQdWHS(0GqDj7 znOW7XHd+J_Kt?5iKqC-|MAA5BB4=L5^fr1-HhbYZY>J|Wq$tv?68*jAcW%J_cog48 zC8<@Yi1`2S;@7V)$B&aRDU-knz?~|d8DMNwx4(Rux6kN8%F263G@q!H|K4mpySzS> zO$dnNFDQ;x&B=K{2{cCrHxc|=5;oN-@{VAB3XLZP=!a4cPs}LByFo4|{+*YV(dyQ( z3AXLM{`#R$1y(EhZ@Vl)o%qd45ZwDp79L_0Z;SPWSx$yC-`?Tz7RBCzdk9)tP?6*o z-PrY0kI;8Val~GwIYhS|%jvtxjekG{wi=GOkCCqM_{Bq}I0sATrwrbgyMww3@zzv< z{k~e$*4P7z_%_Wo;nmSJp^4$3_0CTK_bzZ8Znx&Q9`nK3;KRU+zd^p0$XMZAZ}fFv z7K*AKF-9AF>J|CUm-ZfxQ_~f(ZmR`iLi*$H?*7bM@w`L<2`bIrdsuJ;RJ-lnqy!M* zQN)p|DRu%@${PF0BFc!r_$C#UA&RPakAJ7BRk@&ggA>r2@!1LDA5UlmEE#D#o)snC z8KSdeu3O18wZ60Cxe!CCzRGLIG!N4zeuyu6B>6)Ag_RKl5mlE>{zgXV z37Ky#zam70tN&1nJx`1mkWS-{v`7O8WkZYp-5?|8j}HNl(4RB*@$MIQe;NDpyT2FH z-5`2)QKLiz0J(0{gzoH=F^n$0oWQ#0(!TS{B5IEwJjh;MI2h}j@n(RQe-dXBhEgwq zLWy)^k_w3L3Jw_mu0|lS_^O_OQ6N5Aez5e+Gld1`w&=;-{lmQZS;RBa_Mu@;(9Eg7 z$Rxynza~^u!0Jklk5!No%QR;s^={0YV(?B6VoIik;sJ{Uc8_?(!14M^t(d9S@voA! zOPmN8;O=@12?WWcG<8$t+KX{PtUc$k^z>PbQ(oWQKlpuIPpWl|)u%9eVSGTgwb_1U z*O}MBP5P2t^;jvf_7L5i`YDtxHNV4QIrj6YUWQ-mC&1dCVsG(UUzoe(kR29f5ik|+;L_s^Gj`5EQ0D=C11yPm$2fPPqlkwP?VFUeKWwNqm@h)!R$Ju}z2Tbo zh{T>+8PY_Rcv%#&0?tgRoXsAbupCaSDkamgdCS6V>Pjj>F(vQMW!iZZ%{nVuYxgK| zHl$$bRhY?%&Z#%~BB>a9qrx`x8dEcNS^<~T43ybaZxec;OC^@$y99hELyJDVXN1Q2 zeaM`mQm$4N$X$t1ONh`5^K48aoL0S#B<8T%HR_VZjsUmH_19VdyUsPrUmW+zcOCIw2v!ej;cvYmYu|Y z!K5jWO17y$FIAnvczjsT=`LLeZphx?H^oeB8g&>rD*iG&Q`2NsgEqjTPYAJ9hTJ+~cPu>6q}99Ek`*#!gX8gO@t-lo#VX*@ z_NRpeZ)IgAR&t2q@GB`YWL+>bW0{1gIwy}_6v@#9`s_@x+jmee%(gYexUmdu5u^3uU=xw|FN(Nfi-vguwkDa~7EZBr@g$|s z?9Zh6{O~;zIG!++f?gb&!Q*QB&4)A_Jvu5wS#91+%C|6VI^e~lP#w&)I)t1zE)DNK zX2u!3D0Tti?M(Mhtc!?-KOEsK`t+V)yNbiqOMU>W`kWxz6WpQ1)k{rN(vy-;+#uA4 zC*TF3+%>+;ouQ}4lb9EoxsYMplHw<|*lQVwZZ%307`aEg3D$<}W@!UI$6u$YlIxUy z@-Oyj1_&xCO*E{AAK(44*RFvG`AS}M;?)T~w7l{xCZg;n5xn@MvwPq@J^)H{UZOd4 z8bD3OF9D{g5nFtn1f$R@W#WX)CbmaVP>E6e^dG&7pQqCsZYAvYDF!Ku+Pg^i2c9>EVrBj^rBbJrK2{G`^01E1Tjb~%H(|!V{S58 z0RH;;qLt4(PqMuH%!m4NdJ9HDFu96N>5<8e(LscI=I_f~m=`TLj@2x}KZ)tHtjaff zwit*Ug`%iB!{b?&V0+sU!eOwXDX%Akgz)1Dk=Dbpi=ziPjvC8YoEw#z z)`Y{n@WRPjgokLWoiNqlT&t~`d3VRB*!1Q3Q~Jrp^WU(IC6fer!(jw|R?G+F1XtYl z0woy>OvOT5VDfU^c_DhlC>Cs@wF#HORmOK`Us@Th@2$latxYN;Yb%;TLNeX8Dfx&A zV9NtawKgKsV-vxlqTcOkrD#jxUDap zQ-rBF>1j|R=;-{CG<-E4UC(P6y_W2TUkErt?c6dHwx2ywsl^-=dx7TUDopgu`gAts zzKWRSs(%^G-W`OjQWxpJAN=S>M$~ngzSQgB?(M9$!^J=zD4Bs%ZdP@Cn ziM$}0^;SC^)jYZqZDkT196=J}7!aV284!v2BIec@Bnq5$ZO1#I)#a(y8- zO4B9Y&sXL(fGesG?^+rW^#vUd$|}#~@j`wJ<4>#*-WD$E%aVR?*CrJVlEhdR z8p4xMjK7o}oWAjCW{Qo7*>^R16-Zzs!tjrF@LWRmBKiO*%K68DJ-g)^%iCP#snbRA zY<@|FV0H(PwH7~cIKqHSzn>HTuFR{;1s)t&UjL2ll4(-TEXF%OA@M~ZGqpJ}<>KTg zy*Fxb)OZj=a<(hXYy{dBORFC2U8+1X07xdF+yq+bMPq}Atu#x>2@+d~OR-+g2DK+8 zY8#)fEPWMfhH>pp${c;bI_d5?cLaVn1nJcpo+wTH|s!@X3e$yurwb-cCN(s<~@-k zkosup;gYvtm#iMTD!#gYNwMcbM42TS6%rkm5O5RmY;0{mWk^T?dG`TX9kCH(V3Dzu zJMK6I%He`Slvt|Pn=0Oq&j86~F}(ZD`_6S80u?ICyGtn3J4CW@FkQgsmuwsE8=7#Y zs)QKR==wwa>*nO0XM@xXPHO-a$7N6)e!8cac!uy<>=uVU_ZU$>>Sn;0DVG}L`(8N& z-F#zqW%stlS7Dz-!qCflLZ~m4QFabxXg{onU^~t5^$6W|dfE`znScBsRcS$*z z+?EJJ>xGhWl9yx&VjODqRM<1pl*Rx%3~q1!eNl%9t*XN-xL2% z^lmj?rI=!|I7EgpF}X~`?{QJ(qU`gJUrYB;=J!9|!;|hlzMGv@%8#?04DTz@L`7g|dYg=)2L42UREv@T>GxA3-2oi=sz4rnVay-vPUtUgrA+gdB zL5xR}3Ib5-LERI4GO73m&LUy>$dzuF3*u8Di~$YXZs+*b7=kua=O{#jp7OHcIDpNG zd7oiBcnE^`L0VP6TgQ6XE}baJoB0-4{$W~((Zt{`7F!iM+Ya?a{IZZYoGo! znN^@h8QQscfl|~Z_D+E%mTobPPNc+A>UfX`Lqm~gR(AJ1-hSz9IR4ShpJQ*3-pUIC z_#Gmjjd$fn3^Gsqf{59|7)Xi~Q6tih7v_OnP#_h5iG<1_?IRLWCBD7%=&=VhyAT0V zs!3X$zRAh$HVCl4i_#YbMeGo%k2Jz?T>mZ>Q@ls%iR%w^6-3RBsFG?V6I>Pn*StFa zy}>yWvk0X5hr6H0d#Q{3{g^2C`vRJGgY{&MEf9d18+6dKn0MhS#`7vBTkG0n3mhAQ zM$32*E{JJl(cafrqtj7dV#?JlCPM`Zk_BGOA;rgKTKX9fY>Z2HZ86_6?-E%C+0Q-e zbv>RWUWet(0->G~)kRW(qvCPI@$JqBS0=nELPQp-97)m~nGAcf{2-S4RY3?5k4fFh zu9CL&CII0oRtASqK#fq|BwJ$6fLjE$j~PiF7K!ePur;NR$apaU39Ik#c@ya=qss7j zQ1|NgvJ{YPU$7zoEK?b+^WGROU~P&m{3xz}Dqr&2SpnZ-O{b9Jq^0Wvsr#G%o&Utr z(he=(iCvl%1=#8P1sy18hq~{@=*6`)E&FgyU0|#(+Jpy{OHV;xoHq6%gn=ybcnTmt z77jznG1JvX0$Q%n>G-U~1&bd7&WCo&8dq~w`4ij8aGeqMBH8 zvbYsg(LflbBo%Y(l?QnMQ>BG+cXV4x(173oyuFsfETRUis(4ts6;nHjn&F~^3&^p5 zy_}f{Ybh|qcE<>4p{pVY{dssU&L4eAzw2$7|D@u&bS7KbNKsH*^o~7Cv;L5XlM`C^ zxSsY_>yNj%|Ej~U@uH*i;sEd(+;T03AEgd=jr13f&q6w-Atc>I{U6k>`*e~uDT;EG zOQ6RpRj0XgF@@dRyV5xu&dLeKZ=C~KZBh_z_-Vb>H%x1+K;NXG9QHQ4K43S3rc4PC z$5NxxUi)J>j%5+lt5K~571B50LY27uxA_LdbM{~Eti```NQIA=#Ul#xtk0q#r&DO#|q$s^V!oh}Ki;MdJpROAnqti0KofSww!R8PE_jz>}Zfhi&rl zI$XHP%sVp$93_h9RDAIFj58)NSh!x_$p%Ch)h;3F(4#Kn6%ET`u|b)&`ZfO-_C%+* z7>i=#hUBF*jKC0hLoH|3nJPK(+RKL__+%|iR58I?tnj16Qwp+Dsep%^r3&Q7M2MiY zAKykjI`$F8Rbjog;tQ!L{g^tTYUhHRbW>eIgaZ4Q6{ljPSq>?Vgoi@(n1i@0%7q3P z42vuzY;s!kz9Jlv`S=DFu_O}H<6=2% zg4ZsEys{C@P7euW0{f1xi#OjLpdYjP2fVqPy_0zec}6ndE>uBq@tv2tUmy-6PJgBu zHo#MuUUqH#E*5F&$zwkgV%BMfTyVs-f$`}EY7xgE)&&c-#EStdq_IFtdO|m`Nf?!* zchrZ*W5k~XGferI_bej7A{=A8o9uBuun9Wjvh+>QmCY~4CEKSMm|=n&LW^W>6An8U zr-BS5c)h3vS~D7JvBS=;U)CkPN-o9Ifgr0IG<^Z4Vv%;y74h%5-I+g;!;+C_e2&zf z+t|tlgiha?xlmahY>Qd~NS^--SQWHPMQx%l#v(-bcYszkn0E|@9A+L9_%Px>i#;K9 zgkpfT^&%Pcvr(x^S$_||nco2D1jZ;QN0uJ1gorIKKlJxNDw5f+HzgEYL=Gy)G+qFi zM6WqVMUapP$K4d(R!oW7*Go$iuN_V=EoD97TuU5_l4f`~D(?MY|Ft(2n{iw&^M)?s z+p$iB#7cg+^M(>c|8_?S2>%J4(x^}zjW4*Dl`e5(EqUCl}00U(q>br~Na2Db!LMmqkqS8_a0(E#JIEQ*@hzJ+Qv!z1T zdi%8;6JIy}8V6KHjSP;M3%pZ*XI(rb^r!$pF>-Wm(e|$ScfMSdW+jA-;a@fohotW2 z&MXy%(6B%va)%fdNYoNZ5eUOwjMw+NUiuw8;35auOyb5Zs%9SR1hVgWXwa(g&f8YD z@>hQmo7PQb5>!OR)9GVK)+e!LB;U*JATY;ixsY}`d;UH$D;%Q(fl`E`wDniS64Jb@ zIFWdHAVDhRl}rYQEj}l@->jHQ)bSFngs;h&^|zuuT?o?*D#DBg5~KaxQl#x*=lt9C z_idU*J<>2P9@M%_#~&nU$K~WrZ%O6HXCM$Amj*eGzO9K>pzb6NId-8RUhb5X?XDdE zkjrQM6Hg^|P#FVsiijbXYh8!n(MtT30rujjLTK5MsA;VZ{N}~-luw7_D=RNF*8BT^ zymRcO?t;p326CfqX1oylWRwW(*E;N#IHXI5oRmodBPE&)aLq{3PanVfX`FDeb=69q zAyLYTI;DfC4z&lc^yr~C`sF27QLS?aw5}XUv5^i3w-5oQ+q}zN7)u}wkFuYZ6WtsD zj!WKZ-e@GX?Kbrs#J#;}hji`4e68x0*uGDH_{%@==@$yLh&YpP?v{Nh zq=5nz9TyK;E&2RFj#ddXY|W9G@n!jp3UkIAkCk_@7E@9&4LJ3^t7a2qB5ZH`a;;IX z1SAHP1;xY9G z$M6?eHamxXe@KEFs?hjwNZ>2+v-0{7n)poF!W0nTzXb|Bn@Vx|q3wPwqSd=SEA_z0 zozmOWk}OmFcs{8(aqKhO9*exb*IV~_f*GEG^g;}`5lWFZj33;6{I}l@1-JBc+3VZo zl(mc0$4Pm-TcCUKYK)p9eSl;~8Jj0cqZD3zQ*}A^EfkgBJ&3n%tRH&iwkuUC{D6wp z0&*;TTEXFS< z$E61uOhX_pz2cM;g6=N!J^)APF(KrW^rT+)Gj|Q|6$nue2+m)49#P1#GQv5wv|lbAHbyzwnufnsuepYB+V zS73`@pfTP*NTJOR>kXX1K{m+g%5y~`^_Bo!i4MP9iHv7JzeBDHml z<%qy$tQ(+}T49d~0zQ!vc<&hA4C`EC0$hn${$2-2(Jo5OE+KLNd&N~^tJDCe=u*Yy zbH!DB)vU6Z-F9!?7Xnd+`%WQ#Au+3WsUrwMG;7uqT6(x*1){EsHrKeOR70vF8$XV2 zE^o`=dT$F1_IK3R7!4Upvt@J|mb4IjJ}1^zRq;Bk{ROqbWx)l&x|S)6@|x4=RlaPQ zgK|{^$ztHKCdvVTs`iWhvdhmbMaYdP{s3kpo zD8$lhyN4_9PYcZc1n0S$>q;K(jGX53}3g5+BjMcU%<9fQJKPx-2K9f59y6#!3P_&eKs zYu=8fXBFklV$RQHhzm3f6JW?^qYwVcE5di)we9^F2#i_rpJ@l4u^&a8ylaOvX5v_V zEwM%YZSf_~bTwXcykpFybFGIyn`3w4RfHr`_6ai;J8=yS=$*Qe32&Q{fAw{yZV#8` znA5OI{FoMlzU=Md|Ev8dU$EGPK?YJM28{=FTYfSx!Y(iAv~!$BQge>e-`GamxkUYV zG@O;us6M|ERp1T`1;z0ebvYcdKz*Q|UIs`}r1w3@!%^A}098HZwZUxxr(yeT`60V= zrm1*{Dzr)j4B#=LYGV|DoFP_zasx)C`F!{BJKyz_xzlAZTl^GhPzg;OUlAe^C5p*` zB`vpRbdE=D_;st~w%z{`qSus6&{HchhY@Yki&EGlepX;ZH04dfBCLRXddO^*JSV?! z-tH$hql0YK#O%8AdJHg4?oG7;vm`MgDvsEGgYcl5;U12WHke^Ny}9=bWZZKiL7C&R zWJ!>fRb+cEj zO$zYw5+{!7TJC7t#LJ81oCAO48?snD9&iuGER72Q2eK>uIUMI|U_UH%UE88zYYojT zK{SW`rP%5PzGK9cPeW;3ThRxW5U6Arn6iup`{odAtj zM1X}-he#oioUMfr8O9^G6T#*yw0Ie4-u?*gsyv%jt;pbGRRC zeScA1r!$y!Kup$*2Bcx7Xe?i3G+XCRKbNMJ%sGaKl80itJexB9yjy7|n-0|m8qJSA zVxdHM?=5@upm5(|y4UK?A@K%KX_ZWrLDW!K%LETgC#1s>zDW$ag_Kb$@wf0?gF*tx zB>Eu^ZrtqfG%8?^iD_olLZKmkJgAw zYEg=s%2U#?pokCld%0@ZSO4JM75^t`oxHE#Vi2duj*31a@k#_RFX~1RR3Mz@DftZomVb?AV6M;KDTsxQoo66`A|jJ|@svUR8t$bx0W_zqiQa++g2j zq~dIn^}E0IS$u(DMHU78`gc4ZwS80Zb*zn4EIKh1%+5#^gg%4H@KL>Q^^PsIn6So( zhBCSAmRdyt09t7MAxzd0si#;E)RW&aq;?1$7bAyow^Rs%=$)oMZv7Ujsv@W~_l3QP zH^e|{+Ka;og#tU`uMqlx$ixGno0WoEMm3a-dOwCijh66qs7X!Un7R3oB+RqIA>ZO|I+1d6C zP)4#*GOQ^Cv~boa2047*#&uv~bNalP97iZ`@gtlsdyEP*k!fn;9E(VzKZYONSFsoz zN0R7%6`*z15iTf|7wkGqWP6f0%luw39*I<44H{4geO(fuXms|}q!bSyKZ&vJj^6vh z)WHgZt=GqZNy8G`s%+G)!x;`!RYKBVEM+`&`#ifPkcxO2tA-z4_MOv4LOo9#9_3HI z`kOe42pgAM5iW>i@j|?8LN3Q1%jGI+O1B88mvkwW3-k}spe0+rrR)T^=m)=eb2K?g z{fPcXCHlkuXz(@0sajN~+XqhkveL^dxze50Qk|@%U@hRmDZ55$D~%2pt1M=pi&b0$ z;#Os6#L8LRh|TrASQ6q>mDLHHkL^0$*|wB=RCdI^!JHU-wZ699k43`-XLG#>t*45I z(W5h`FMZkh@=h(&&!J}`Fi1erzu6hltiCYY(1hw5$e1&5Y+A7eX&m%HnmPtbdM*Zv zai11tQxzq?v^py8_F6*$G~xDCUhsEuNOc!o&HGE_jj?wa>i{7O`qcV=D(;TO>wU=m zNN~Ks6tKHX4I718q$=Wpt(!+)Kj?z1+4(dYUbp!CpSM8EpSHts-*YIoxPjhdCiGUO z!9z)YZ4ELWg%#~m2?F6b%GOtk*|P1Vv+U7OLt4)7W(lQ0<%#)``-tH|uyR%myy^J; zbpge^;9VSt*elW=@PMDsjtBmYP%(3`en=Vz6FRQK%mfb^wwcBh`*HVizw1Xt^ARUVQ10|c zR-@-U2?P_+F&@QmcAGOM(;y$(%0m&wv(lEA?V;F%>tV60rdT^=(fPox(IBqHsyx!8 ztZ<$kmsOw!vG{Ba6<+8B3G=n;w~}FmX(q8B$R<;! z69--7!g+-FC>a@4DYrGsdzJkc^Ir3QOuZ7$xb{+JwVtVhkEWlzI9VJ?C5y@eW1wO zKZAPVtkhe8$GDw$K#mb(v=nqmj~l|jR)%R*LLn`^uOzRJvN(d9KywT;U8)@BZ882S zuld-2@NH7FoZROC6)|h0!TT3cHv8>-FoPMMD&pp030O!gjf01DZ!PB)1Jn zg%0Kg<;6_t;5gM{RjTbQ1SfL|@tZfV+{y)9(GD5udao@#T8?p1#O$NJPqw&8e+*TP zPCeAY5feg1hftAixj=`+>7d4{hAB26N+??i2nJs`qoABk-=BpIB-MwGi1}|15Kpe2 z7#FI(^rJtAacWS)1L#{}uhP&toRscYN<<0P&%S!e(L`7+B%F^+a1~z=43{tgIVdc` z+l!)MDP1jCzM}~~8Rt6I=aw)I2AUVJOB_drjW_Dc*=ZhZ6+FdSDx#X>U%yRVARgca zJDrxBER#)>cEOfp%DVhQ?A935yeuyNY;A$fbs%Qd7=n`F`8~`kQPIUg1Yp}P(0Ho6 z1Du7Jm-@}rGYQV(XaPdAy|=L+zrAlJAAh3W;Lpf-Q-4%qXvB=Xn6maoDiSX}hs(8Q zK>D{(0mX;nU^&l#8)*)fERMkCsIUtXgC#OubhkjA*a%dSmm~Gqi03M{lyZf$wEW;f><^g>RAg@J%Hwzj42=N)Yeov@`^F*= zUnVIZYmG%hJgHoIhiY_N$iDx*C2Bl9?de9+&gzu&_5=^~W88Q@Rnfuy+VC3{Wr z`uY2u7q9QmE8nVJjCtbHh+VCYo!%;)P_+E!cA*OA4!S6ZrukzkXY-6^6hIo325)c8 zd*XUeN{nqzLDT{+mh%kz-+laky*&v7H+HxSTgsoqN-J*2Ptt6)#v2q6It^ zcqJY3W;v^p%Y>vESOj2 zQtc-j2{X#d<5oS9xVpINJnpmc`Osfs7~@O94k~JJ2=)dg;2E`Ct^a&;OUA)VN0y%vQe2ynm5`Q4^&Yq7Ooc*x5SR?1C?^?c)a(wwp7U#_+wT3)HfUr`Sl zf-^GW6W}$ljkMdA9xr>K2QzZJei9%{VeCEa>1~lO(aX)=`*>_hN2aZVq)C*E_z-;@ z(~G)kUccjd@Lu72iz#711Rd;%^*!0>M%e3jX@{yEqmJE}vKPe{r)rdCk<5AgYojFn z#|&D)VvaAqY&huH2`V>m);$LNOQ6~gAY##G-sSGU-u)zAvPDR!Tud-ImIJN$#V`Lb zzDm++S{S^>0A3KDX+aC5MU86Cfx)y}Jrm%W@rilG-w4t3*mlhJntDoPw&>A&fD z#+P4YW_0Q4;~3@cbEd*2h4~qm{D2T497CIX5~7dlw*>rdvbx(k=LMja4IGV|F%-*(jZ}U98-PK ziGdyCG{tIA>G-E5kW+=waznUp91#&_N(+@&&I{gy-Z&53(=d|sk%N;{x!s+u?$CtcM8_xEB8{xL%f z#Pw!5iS1^nWTv{izvFdRJC1r6^xn|t#2lRN+@Xkl- zbR64(nNy~QABbQ9>h`wFlks2sg>C-OH6nyCMZ@e1yam(F&g6Xg$EVPfSb`v5k~qc^ zuXkT~)H|E2{ojdCcGffhjsZe1OKXpNVwwez63p$0rr%1`I&I(9H`=Cp3lD(Xug78z zl(57=e;dz|o|+Y)AP5Vb<8p1c)%D}K(WoV!8-*764n%*_cb+XYB}WAtflYgyjW0Ww z@%EM=d>o6i{ODP1p>#z&es6TGLZ$+3qc4H9mACBgGt8_XhOd5*Uq9cCOPk*j`)T>&Jh8@SuQ&-rQbJj!I1Fa^Aq8xE@UbF?qjb zV|&jn+g9h@`Yf4kmDSnA%e?$3t$ETJ^kFV~sSiRhkH+e#7+NrK_z}C?Ycc-ir%$~z z;Mm_HWbxV3v$PNee?9b|xwUG=Hz^2Wk+F_M8R%@|zo3aPrw2ABc_0=nl2W+f@y^px zNwt1S!egm6#>3Xeh!9ij$XI=o_{oCMTM42p#>VS7Xy8p-M_HjR)OV@|DxLB^GOM3pe$cXj0WH zJ(Myh#e1VX6~h@VY{fBnxauL;SoabdtDR-Maa|zP~qWn0j&`t^jlW389icx^Q3UlS2eJ`ziPEfA&u@; zP&R%9v8#+P-!cb3PGsASeY2W=5L1{t)9h+tQbW&;) zr6jpQG2NZtBZ|Cm!aC0_vS6pt0>z0Q0O4$W3Pr2z>c1E(%*A+IO5o`LA@mK?^b)b6 z_xD9i=`}V07y+LRh5ye|1nCf{;D1O3rAEc%Qj^RI=w@?}MJoVY0{DL&o1~a%H@2oB z9{wogglO)@j`yY!tF+B7TggRSM0M?;7lRsN)%%Q)H6WvpSjP-;AZ6Ufw=vTGPY)4b zF2<()E0Q_Px3sn(qTVR$LqBY#{YkQQcO#~wvd9|eXI)58#}J-R-cZ<^K?s;VV+4eV zC@qxO#t!shC!NCY9#3A~-r4Sd5sGump7Y=wY-0S1ZBq0LYui7M4E$+ zUBP(}AN1g6W6+j#Egf8w-;SrOgU{(}Y{u48^7I-!@6kVtLET(ZV$uv0k zge1=F*duA|$=SK(yZ{hZ?&|OROT8#JNG##+DuDuqoO9dAz8{ckocN4q7b* zIUR_rLwk5=&@c;!gzFYtrep))4(#!m&GbiEO(5Xs)v%cSpy~h)3d1OSHGaYHVjRHo zM)2&VeBwD4+)qyoZeH5Pvlw$0yO}k{u~O6ZW}rg^Dehvts&c$WXJp~0722DesB>Y_ z@x6ZMFqUwA!<+E=IKlL~Yly7dw^Ya(gjh^-d1b{16R?hwxAg{?7lN|nUR&jHbx*`2 zAm{V+cPq+cNW8(*2k8g`rT|EetWHF5bdNZVQ-##=gIGyLu{;EVLQM`lz&dFv*$;6vFxWtR(20a$$BJ3Di$qfr=j&&>+puX5%vjkG|&QVAOG@Yt@&S zbgLDqUPR@@iV~S0#DbxG6?F|sBZh+4Yof)wk6&anY|4qfmfV@-`pe?(wj%hVeYlIiWa1_ z4>;7JP(d42#>!wuf%iBflWAPIE8 z3rR!<218g`J?O2q-Ey(g9mpS@z=096xecGNf|6;|!Pd2D5OE{DZ8l)jwlP0qvBvO$ zo)*kK$T8WsO6W+g=Bd}0-65@0GmS|8zg)J_)Iq}1QZgjZiH zCqsIJl2#$t2X-TrA6}dVL8+i)+CO;_JMz*}t^olr;3YT?6UCJ)ViE|Sib(uWs=Zsk z`q)!k-Ze{_@;}FE+vfk zyTHTDDds<@)})f?Zx^_|)%=>-cofQ|#!RSSDiJ^s*3G51H@f`zar_Y5h+v?H3l z%0n$62rE743bqg9h~S*%i!caUxpse7?o~coS+B;Fw@5JqH)z3Nb8VHlWPgDKKmpDRHg7Bon&Vx8neQx|Z?3rj(8S{rGgVr8 zF9nk*#{8$B-~Hn5FTJx|tfP8?UlR;r>*H~h`$<{(aYJQ$?Em(#wPL6c%1n!;xB)9z zf}fO&|LU*&+RKJ1j$eY1o>uFY<&(~egQP;Y7!XD>;f;)e1Gg`qGF8Izqo>{iuB)cNQ{}6|`C@YGdl_GGJ>;MGMa)7m&Wvtj9kiS) zZ;2rYggUm^f#Un|-QUjtH$<`aD2d1Ruvznl-{c<;Rbj&saPVTvU(PPauJqs(Lj~mB zQ_B?4_%R~xU<}<{6s|Imtv))cm##Q7LK89Vmz|wh@BVf?4zxcogc4to>%qNmqjit3}l(;?(c1EZ+ftMBYGP+Sg|&lF<5^ctpJ@stP+dA)rokG(7`C@}5W>{y z8`w&Hif2oo1}dMeo=-;!sl?=U+MP-S`yVReKvt`+l=1Q=b)^CcF-9VUH3`4+U@4aK z;ll@!qnQ_j`+vRLoNSUMyeotlS8p)O`1p*b@oA9ZhZS@BgpSSpI;8AA9?{CSV?Dv) zp?NAOTA-q`{FFVTNnRMyaN6pUox?R)y?+y3!RWxFD)S7q>sYcpnu zBaV3-W0VyI0bM%N8{0eUe&*)YEJX6xzZDC+ys{J8%*qk>-u0m5 zQ_gEe-Zd$cq7#*P7jbglnSELk#M`*d^JZ^V)C&?trD!H1z^}%Nc1uZhLn;IL@Q`0~ zk}apaT?)Kjx%d5}GTMJc6yT~#$|IfShCDfz^*2BJSH-5Ccg0#@TIB4j5Tr8E z?2eCB?qg*_L=MbeiMX$Se&8XaOK7=`stOI#1=>UROYZ{#?HJGU!-w{#+d5NUM`+t$ zel;)<@!2B_8F48@-G6g+fgbkc7=~D9jv)?6ClHW$u|uBY%KG=fwhPIsz;qayF${cz z>&zH@_WR!|YXM%IOC#pWS-!;j^q*>}T6TArd)z-(&zIv`7$)|#l6&7pS;k7;*5aG~ zRyQPOjuE!%?Qt2OF_-updv`ib!U|H4wXsMC(P49nh1BVELVGe7k?%HOkplZ(EAAq0 z6Hl_Ik1YZ7@EC5aM+J6;x*gXTkm4oESeOLnkGxNm#hb7`Tcph^w!xmEIQ(kN^BNp& z4eM1n;pvMPGX6SgU9#dYMXm4tGX4_hRc(jfUF@nTV-Q?_0balcc#%w@;4vNMq_c*f zO08wFUm?)2t4^Hx<+kO2AN;$DsTXPm3AML*mMA-q-jcBOa;;G` z1KoDW(9=f`yi1cyO{qXhaEQr{6sRhm!<_ualR2QNxOa6I$tUtK@e4cT4Ctp}-LFy{ z4k8LAHWky2iZwUDBYaTd%B1?8&sJq}4RKA<895wXV!|N1B|EptnJrrV_4Z!K$#>@e z)rRsHWWvAt!4FFhAjsU&m}M+oE1?N+@_Yp7 z;P`idTYnZC)j8~XP1hl~0FfA*AoKs1e-M9hpl4TQ1F+Uuv=T`Jld7me%$ANWgzbKQ#4IMg^}c`GGD%)^BKE2LF?q;aKYys?JO3SMhZf07J=Bz7(r7KKgX@?WLJ)Tu>^`T z-G<_;Dryb=u)d~O1Os)+|}VCbd)^c(m8SOe1ALgh#`_Rr;5B8i}E5N}0JyuJhY z@-C^_vZShGSq&lqnj;icHd~#A%5{q1wvomt@obI~Pl$HhOQ5!DX8j%*;wU=~2?KSV zON^{8X6yT~sveb41pW5T!KF=$d~WT=`xW=ikQdoxiUPAB;MV@;9xP@O# zO2BtjsIF^3im7R_B1($5*P4KNFDOBohh1ZScOQT7?U*}eO*`D)1#z~@sgDD6FinL- zT6L64Su_dbvU@5rpjszE@tx0obMQEEoe|=WG2ri}U(+Z-Jb;&UpjO~D&drrVk#2@` zioxVOc1L`>GW$`L2`r#gV0M}S0^O}d>B>P~r5KUQsyuDjWOjN{5s)6c}b31M-ymx^wW`<8uZGP%ET_Jj5t3AWkS2dkLMrnGD{0 zGXwor6!w9@G1DZ#eoewP-t)WdpV-Nz$0hfKw^gY2FnsQ*9jk5HS1cn{58iNZC*+1| zgvl8=kobG`KF=HbOAEWAbarMrS%Ue4bk}(#O4e%&vi7n>;wU$S*d&&TB=kW7Uy;6Y zeia*c?}3$P*#eTt&gkMI1(+mLy_;l5a245ep4(GqP!v%A`c0BXf9KzR*A7ZRsMvOI zG-bam*+tP%=qEb56&Gk7;8@I9jd1`=dV5#V=UjHSz@`668VT)2Ca&iA=v#VS(2*pe z#1o%k!fV_(KVEeQ%IoLh-rf4(Q`*R?+lu%XqRegOLosArOwt@>$Rx}u0m2Rpp_7)V zg@r?4f*}^i*rTzZ=Y!kf=zsbj=Kp|Q`jfBz`0lUy-=E$6PR0L;7jn{8J+NQrA3ya4&LpUg_^Kc=5tiCm_1EZYsqW=|1LSfS zE*ny$wid&I@c?B}lmS8xA5R>@74vYbSSAa(Hr6afq*WSAL9CRj_lrnjvz&6|Js|1> zaee*w5jnoo5`&8LR^_HFHWco%nZ9IM7SB!A zmXkgB=jqcClOW_DoBbP~*#MHKKu-nc>RNAN7K3shC<%~`AE&WY@&j5Kqlu(cydKb= zU5sQ>`#+Hb#FS*!yehkc+X5n(Uf_M@DW#yz;t0tnfv6IcJ#Ss1792g`Zmd)+GH%;> zf%2@0&?JsCL0hJrV3Or0&%6P0lPX46ceuE+Q$i4ZC{5!BYxhvFko2ulS%ed4SDW_n zA|P+{aVIF_|J0wSr``7PIFo?1T{_L>BK?FE#EWHRiD)tAi}SbqF#c$fP=9l~6(Gcs zCtg`J#o=N6h>}4!vQxh?E6K*@hFn&9n*qUE4nn%+B=*2pWTXk)g&$1aivQ>j5~zhy z_DyfH%G<_+0(Lp%m1GGBFvZw)#4SE2G-vUiB84?w_Ga*b=PkgdhmBv#@K;rycprLV zJ;4S|5o#A~Z0zBB-(T&%VTG~5^Q)*k^WN%)ZCOuGjijq^-5xA+GD_u8*p7J7s#zo( zsJ1jCp$?ZrR+J)EnjARqG;w!E<&8Vwj+%(FK9vu_o|@s@zLHxU%S#06k7IcOO7n^5 z3}}LPGi^`e1vRH-V^QCpF$#cSY4NBbHg1gm3!$5>5AkZ@Qv9-4!HH?H*aS-SX`v*bfLPLwZ`J{ zq(!PKz2lt{VRd#!95S{I zTN{u`thM^QrBs43!0YvTfvmw{s{}Im$sp$Q4kBN{5PeYY3yX}Sxo>cH|NnU|6hM$@ zh(S>K+1c?^WZgIPWegbu7B9$ut)2}&`0H4@GQR6B?h1~W$z*J*Ih$LG#i#%4tN-Sg zPljK5Tlc`&W)cU4uFt{X!aVOR@iZGrPcBQg75^UGvk+c>Cb*Hy5+T;)iW%C2;hx*0 zju-!=8OB~PlpA@xIEHu#e`HB&8J4=5oqj*QiR=E#3x^w%CNsAwF||seC2HNXG`Jo< z$SS&f-l5uHl(4yO;)S0hE!EtCxP>tzqVaw#Ys4%55=PtEnV$MT+0I2H^Fz|yj)bB2 zghx-wC2=t*cp51<#Z0_p7?>b6Vj&=G&hXS{WPdE>ikI=W3>drxTktj`q6+Av`8_l| zIS|k7j9G*_wRD>LU1xU$AoP@}Jue{via|oNT$M@%R8%wWyu-L!y6NLN8D742xB|&+Qj<2}h+_r16cGz+Qm((S{9Vj2i-MyzoHL!07eV zJ-SE~=8sLde&bXZt60*Y^vA8WMubE#5@Q-7kU1^Up~KnfFwYak=Gy!CE{SAZe{Yvc zVYrQz;E4mN4K0J@ew$oeO^RSWS$a$o5w`yvlVYsw`^>N3~jR9GfTzlWF!)68wI*p zI6+*!HVOvv&(Vx9_hhhX`El|KV2P)na6BttdHTpNY9QhoPKWxH$b|)vBvsM8EhZo+ zi>I-Mm0!IRa#7y(x1(D`%}mlA4i_F-tjF4R&$kja6^yvdM1Ki(vO5pNKoT}si8BBij$;YX|ZkU z<6`S~utEGNF^Xf;S{8PY41c^kxKi5)&yjd48DA8u7Z8&LE646K#|Au%PbTKtCCrxI zj_U~qO>xQye;^1SDsWsh!2$`!>Zw90BT8R<;qmM#j;RPxd=r4AZ8mZFwN0^KES z=R)+H$ubtV4Lwrtpj8~u4Qfn0eimSKUNlz^d_oCZUxK+{=>9%U9MPBFt&aSa*pA_C zrPOY*=Twm@MRo>P4W$U>#Sei16g_YbDMB8lF?6WjAJn%HH>tnH>PwkfzD3twwf9IW z%z2(Y1M)Ukbwe@4t`^iW*biN14lrx51mXM$O7F>a$C?w(A{P)-`SjDfKZ{QUS*)x_ z0u7S;yC@bc`j#2h=(Mt7IwF3^#mojRg)D@^y&wcuTLU$Jo7(7Q5A}TTCHAxrLF3Ij zD$yKKuqiB37Wrn5)Gu!#y@UblBG%9T%@02F7~nO9*Sj`(gGvhT6P}IGJP_`N%p79p zU|V~@FGg1Eqz{f#vk1)p!9(AeH}dcgF_QnY^wX-Kpu(ikrN|`m)!b zpEqP5NHS$H;frxeefQw0^vKi>vn17S$``k%XC=HrCb-2E9KnDQxo}v^F=kvj2W$+D z+Eebn8D--V3<;719xwn^)gYF7yjQ@F2`E`%2-~D9%*tp2l}S$a@YkM@Hg! zsXUklHldD`$%{vdDdCwXL${O`pNp5E=7@T@QjOXWo}AbR5@yUi4)k&b&(IlxTGROG z;e&@B%pnAW>A2t~({;|`*r9rqDF1@-KEFU~0ZmUXn&&`mQ@m;LWv1+4KA;}JQvpRq zF}V-UdcizFGgK}RRg}0BGF_~EA)~??_N>J5;TUPajgs!`gO>$ym8rEu?i^<^xYVg) zkAX$6)!3fbSZiJ^MVtz-;ofH<8gq!Vh|YJ81XePcE}iFi|#A`yi$Sjm+qrBFnE=IzvBV$%D>1A>$z)i|ckKgaKnCuKtC z6!++6@D$-BZ@h4JT&c&v*=8*mSLji$Q2QBJyEeGt9L^j0fBOxuix4!diZOz6-}w)* z?9o)EA8D(v+uJ^*q&=IBYUSxE z&*`_!m_>e0#8Iz6YB%fCDM<$(6Ho74WgvO^*%L1$T9I5n7H1we5T~7;-nz>%?!Dpr zSVD}4YXr2G$8(G_LPLgVHkR;sdGE65I?))UqL?kZP2BJ5n^jBz7>^bbAQ%oe-eBN> zr$t66W69<0bUPlLa!n|nGHt@jbF`s1Ss5@Eyeui)MBuXh7=mhq0n$cVSvOMCxa|*Z2gBk5lp-$ zSs~q+@~RrmV!zgej+w?@^+?o}B_~LZBN%%(9;M(se2{!y2+Vm&M3K%FD|P$^OT|l# z&U%O8&igzbg?x-Ngz)me#IzV|3HOOus+x^URqJ*q7dL~VGleSRQY^tfdI>2Bg5*~I z0!+m#-07ITAfe^7_Rc2nEaL*r#H`?=jNRqzikB{Smxz|i;*ZW|L(g#y?01cUfde?M z4hz5olw>^o_Pk^+JC|G@^IUOCBdE*P=er67(35)Nr_k@%y}0}MVKRxCun53H;$`sa zT3UGS5yfOBGXPYmxtHvl78O zn+%7y2yDG4|5NPF0UErV@aJ?T4pAY%lm#N!mE=MW8=AFfG*DSL#h6n9mos7q9&}29 z_0n3LF07tGPtBhy0)jjtmT>Wi=&P2oBjP;i=rD2L$!N@eOpEY)jWQpB;#|x*5dn$)_-Pq#6<0RfJ6uus>hVmQ|!n>K(bDWbg%KD=>hzl4i{Sd z=1@s0g%#sJv|{fX5drz~=U%ifI&z$3$VUlMVa3a7JD~oHOVdBYQyo_*FSg3*d19>y z{T4lq)RJ8&+HPxblX%IV(JY^R_qTlFac>)xwZ2lJ82{a&yujN3@$#dv5%+Q&B;ULK z0o4ZNXyaSEUxc7Rm-IBG0z!}RrAHwQn;=eB!GZx43;uKf^2gujb|BtVZ&*s4RTNpj z1Q#>buj|ZKmP$%)Is%9A^**5+XXX?@t~)49m^Y z;H2m$IL;JMxhgw%IvdMW@)9<#gYq^x^{L<|!-QUOH_b{eU^r?GJS>tNb?mrum0gXR zYgrk`kbAD{rUbpfO5v*1AolJMbrcGWc&QHxb~fRM1|qXj$@UWQ0r2*q3?h{BwI5y8%?^^dFDo3S6grjk#<*YMzTO`6_BFhZR>#9SWB@H@2rqq=q|cR3w-BSPF!SQzjo96hXbH z)7h-WJJKm!@$43-YYfBI|w`s*N#oWO-yZ6pKUiB9&*upE?KbFneKtFkbg&YO~gXZw*Q%aqc z^!}4~p<1NE-tT+9s3AwJBi=jk;+VeN9NYXt4z~hGOAz2y%K7{^CFg9OCxMba$%Mz4 zbAg1_^M5Le5s`ZTFD};jtFQj~tN-@tU;J-elR_@8c$(C}z}j34#twjo7*H2rF8PXN z)bG6XnkhU+=?CiA!|j~mHF_zC9+sp4wXKyIjEpxI3Gj+MK&`cDQ4IJA=OvTBq4Jw) zj!g(C1yp0KXd?A~O`CXjoke8Iabzw^$e~%u8J&0 zDh|bkfdk>?-K=d?5Sk(Zi0MqUtV!S_0{6TGJ8OxTA|u+{F4#lrL6wQAg=;s8!W>VE zs-^Cbt1|{JOQl|ITa!|DcPbCev7T<%69SA5VL`pkm1Md1F1^)6q~8?na6G{$j8E@z zm2nF0KK{FZ7o$>XqG@N7Py@_~Fj-3FxaXtVRK(+{9unjqK3?t**TdmY0&rkLpzf)%HtE&5RKh*Lq>N z|5bUTxL%C`?Bdv4e7cizA05cioD|=e4Hj>2Rix5MEKOMttf?eyq`DGz+*%X0$>ri{ z{eT$t4@0r8Jb30&@}q^I%YXdpul+OF2H%z=Mc}mL?9aK00U&v2fw}?YvJ8NjOz!(m z!OY5iYor^o&oD!N-!G-~rmFOv0r{`^lsur4+0z}24;9D2IPj09ukDbVn$j`g< znI42V*5Wd-jH*z#LBmA^I>0aJ;(d5bU6E-z>;nGGnuf&EAAgk;ClG$;1UvnXmuPUN zkWT!KD=~yvFB2i#wXBOPrX~mQD`EXUNk<6Q!~#NWuEM2c(AgytOBclN$L+ecs#t#T zFy7DYCqDEtMWL675rw3+zNZxebRS{18u~tZ!7GGFiOe(>Q01^TH=AJ{AA#k@8I8ZN zXBXIqs34zYc#Ce16_HHRY*H@imy6gA@U7S7hS|VCE7McAJA^M@3+SrfTk~JUXNrmk zi!uNi@7b;0!oXC$>n=7_2=A+$4DJl?um#s|6(YptLt(zFM=^+G%Ne1HteN)SvsWfC zpeNrPPwp3}*t_9ly|1JqIIr=-@BQj$o}{>({KYknRgLBnl#R`2i{Kf^V3-i-#S)jK zNOt|;QzX(4#v9=ZjZX#T{_2nXXZ(qGC)tk$IR*)me@xNm zWyja#i~aV!VN1;aul{@fUu^F0{hr5qNP~i=2a4`&#txN~1^odZ5cbc)ovWH6ncn{4 zEqEfj0uq!BDI@AO4^~)Z;v2}fy^6uS=*6;#&I#8zwz)qj{ogkb02F+Dw*2Ht>;=~c ziCaH|rM)hkNnkIDc;VqLOowDMbe0Q;6g*+YRH&~F29}=-&N zx*8YE-PWiS$!t#E&f=F%)BZo7ewH(Pnn97>t76Y|Vczks5ttD$>MC{ZgQaqNbArg4 zrO!nCf>0wkB`?_)YCBZ=5E=zP8EZ>EU_GJC8!eO$9T(MF{8p{>Jm(#i?)p(miC|{B zOPaE=mKH!N$~fRd#nT4En{tK2U&TO_g^RUPESQXG$x$4P!5qZT$+hS4CXc=U+`q$b z$DEBdUV2*U4(Cj6)n!}7&5lW(fCB=c6HmG~i%Z_VwWJtVt&EQ*Gd*7R@^CK>IvKEP zL3IeF#Fl`BXNluA1bzpiSB_)E4iecxi~2fjc)V*YA-Phq0(T!j`%e5jWZ?gAycpxG zDm#!f4cr`)Y}oZln#(OwN5hAB5|uHz#tKWu&o}~zSq4pkosCQANu^Ktkg*I0oopF` zBk-R+vmFRriTQi#8wy(FUX{$Ea=o?Xfyh2}|M{n96D5*j+*s}Rw~ao;g51Bw?~x~DEp{L>ZKOZm_3dwdyVOUwwi0ztj0(p!zAXCmSVUo2 z7nuCi3hz|i-2DxeMil4z=(e2XtMM>Dg0Qm20)Te{M2?qobV{Tj7%vdt^GqjPpV-(` zNLUCLA;Dw&h@|`EDquX%1$yaUD2kJ!q2;7Ko__)l5vN)9lXo60gdFG?AZ?n8z+;S% zV<^kiFgPL5;Km+2jlCP4oe)Nk<$_K?LJ@ybGdrd(B;MMa4P`_Q%TXiDPG~s3(DMG= z7`1lu?2PytgJ(rjBhLZGG4}Pt&W_gttQA+B$NIj;l6M+6^>#W##Jj=HTCx;VQU!9% zP7aR|am&vh*ROK@f?x4}>A<(5F4HkCjQSnV42**+#XYdoSz2)~duii~hS1Bm31+yI zNu5ysh}OPLp~J3PO{25*gD;YcD7pRj>yf&_5r_9mwN}t6qN)e5$jG)pyJ4m|tbiY+ z1eLJeA(=0Gs}TtO-@o>c-_s?fR{@^Qw-699tdPAd#RMII=8e^#P_{76>^T;6G4rTS z6a>om7dPW;?*(CWqSJO5Sg5l?#7c;4R1E*U3bF_E_MfbN*2T{~Vy9zJ=MAswEu!Dw zb>8+$=OC&zOeV5k2+ea}paj!Z8$yEVTV{o%vo3`T3mMA&RY1zAnlmgw?-w1*MrS}k z$AwCu@LzdhL}PRgMlb~Pe#DLsC|y1(X=I!-%*tfx!CWrL)8-d8D>K)J3P5O)Ld;<# zxhO1aOOKz0=qx>13e$Q{MfLyxf4vNiD7c}};x+3Rg@1W%y~G)bufOFE%h0OgvceE} z?>P~ZDa;zBv6V2noJr!SxJk*RbO5hF$j2emx3#nK^ihlihPjX$g|!f=_yjtAZ_swL zn6J~_QA?kiJzu3!aP_*dMTlm^qaH3NqW9$)JB`w>F+?)vXE|z2fnS6eLf~K2J$*lF zk$z8EQ+gD}rZPcS(zG0RDfXCMOzyb@tRC=d6+f!(s{fu>Kt4Xo=mnv5cFiR+4Vl)JIo<55|T3bnLs$E55i-r`c=tres zQw$W~USrXSg(YB|OP6~vKfy(hf5+!4IY1gazX1f6$3+47L4h*08YZS{f#?AW!(1qi z_AXv(tuY;r`8)3f^*OoJq146B_B;e3@dLd54ey#uaPz7wgGS;t2%D-KH@HpnpPOtl9HY2R5#xjK0+uQMN7;aN15la#}vEE0m%TTi|VrUN^=9{Ls z7o~N1ukrVn1x6zmI=jYd0tJZ(lPy7Fk0$2ou`#Q}nSA40pLxQZ|Jl&y#p;0DGY)MJXanSLKbfPrdra2!ka)v7w3XX zm}2#jTq~GpEbWrqqI=KexI~hm)8w>_lVtvN=ZhG)Oj6+$cL%G?R$2Eq1e zJ0OH#Yj&v#KPh4BZfipB$?GuQb9mZZ7Tpf@-7s-FjkVrgi;+{o@Z_inDS-C}u^LtTI_a?eSDg1hxf5P3&Q%&V^BhuE+Cd;Rp{ zhul1|)q-|pB55$4S++U)q{u1J%DVx5FuL)OyPO@)w0x_05h`AWkr$yu9f4of_klf} zi0a`qjQH2xSi`l-!7-f-y&9L;rwezRuBv0al=x|{lgPROI4bK3HEDH84stQb3PDA< zJ&e(h;z#TidcpFxv6wLC5`CMZwOsqHE23ENqt6s`*Z0Pjl7a}9mc_(uICT(jbSz1r@U+g`5Xd|y1)O?<~# zKDx4b31~JeFQ`D-l>LST$G_dd(~Ehk)RdD}gHdfwsg4LaCG$N_@A9ZLVjvNwySeNZ zhIoylv-lKRl725&=qpZ9Ugw)?^zDJIb6Z%C+!oL=wq%WnETHAP#}cNfypx}O+h3m!a6Q=@8-Fk{ z#%ht|r}8KM;*w$QtO!m(Vc?0^^sBlHvWwO-usRl(YF=@W3UYT`lrc@@IgfFcscVea za!V~RNs5t}lUh=J$Wh}^wYx{q;`~y#I+sUp$V0g7%D)gphDL=SP+wP zE~ZHPs5GOeLvO30Ry5gr)GEdEeq6u!vZ7yDGX?%Fzq=DgYx_iZh#O~tRIafcg$i`P zP0eGBGp>xYi51f*A@~5AjC1UF&5H|5tQ1}$t}JirrRXsTauvt@5Y%MJkAPRX6!s<- zQG_rVb^iRoqNSGE0+&`8*-mfGHpv>%NWV7IDCzt#;3TyK_9AV;z-d(XVR1WRY1vv=StobV) z(#6v>{*k#<%j6^%uO+q)T9}NP&yJ49UP7)s8W0FHUSykV5UACh3nrFL_F6rg_o7^v zelUA)jc~vt%bt$jH}FG>Ec~`^5whR!l#p}hObP7bK)hCza6!i2!;N*ESLOHqO~efv z_>CC_rJKca-uWUI3A+T75zrkUrkqD8uqt@ZFc8LC*+XAr@6Ulv{IqzZVbUcD^oh^qMeD@q$qA{Pyc;Gm80ShPp#1 zWe(gMAy*F>fprf>d#&+=*MvJ?H4XLrgSf{=_6X0p*JFf)x3+gZnI1Ql!wi?xjD-O6 zj~%*?Csa>f(l6{m=qvX z=jxc{FHRm{xrIXQ5xBF9Vyqmf|HT6B7~W?|=AQr(;1p036icxtJepGsk5FiFxv;nb z2Z-<@m+HIRH9x-l%lP%($1e#Z7RUJ&MCSfbLG4P}dJLP)QM>!RYOn3>;#=}30LI|`71|QMQPyqVo=fALyRUdJ|V{v zW75Z~@eZG>n{G*0Bb}{V*@Cw31JkVrBEil#wv|4hV#48ElM=L58CQ7mT65`kmEk#=1Y$P ziW$7u*c_|^+v=zdvBAJHY0dzlw^Biixa7m_c`+(`a+R^UV$Q@+OW<|e_a&e*PF$tF zZl(X;APk9776SqV!@8B$yLGTeZZp#)uOxQiTSZ7& z)M2dRoQ=a^eOI%Sd7`#lzfLa&ndhT9$b#*0Fvwaq0vhuOz3xZcvlSyx}D7%CTcV>on4(eksW z@hh&RY#p->OP!E$@fnxc^H{|WtqT3k2K&SGk1gBjd!ghkm--pF7g8bP$j{(^40+q0 zrulY1Mn`Xs$0hIE5q}aP)0hzlcBZ0_^o^#j=PH@Qh2+b~`FC(CP7hPODT_3WVIK{DTy7wHMjP6uVD9{pBBe`>)?y zZLi^r&0ToQEy*upK-{};QipKO#GJ4{;ykF5rbTw9<*p#4wvap96u(PsObdp3afP5Q zCN)8V+oC0I5QS<=0&0-$Q=NgOr$*&bspw5Vp~`J?6N}FUM@~;5r4W45DTNM&91(W8 zKU2QX>p>~-{YlGnP9ikE%>m_bc}{!qil)IPVyU)|v3d`mlm-^d57N{ae3=JZMV3nE z6|tMJ&*+I*MRoL9jwepum1xIaqcqB+PE0+i>Gs#-7t2qd_*^TAO7_y@Cq3u#P>wca zPyVnkD5L+po**c($XtV5&#)Bc@s)>tR)W98vE~)bIY69+64n4SUymhp?qyv&aFpk( z571g%V`Uoi5Lp})KvA3%`w5g2pouN^>dX!!Hy(~h|0R}}Xkh(N+z#sg%ISNh!ryXN z5}xE<_YagZz;X%E(0y^9dCHZ?nMkK4+PK8blnED8DhUQ(@GnNC*;tDKkj28&un?36 zhBOCTfC^^RH-<6~v3RmK1YE>7?Xkn#Ni{^Z3y(4IaM%|#J-DbE`yP%fUP3vktBGk3 zX4j=aQMh*cjd=K-wF2`)N0ga$?Ltv8$nlD4aBM+sdC&lfq>nu>Vc}Ia;@7NKQgaCU z(2M^4+GzR~{vS&;CGDnwR(R>|$5x!%B~eb)zl?)86QYvh$rbzN%ImPZzvnk-9m-U< z4?7+`BH1oD4{%PyQ_NNlg_6}pZZY~>t&Cp*C&I}|T;lJO-i56^vg*c~WHq=4{htU$ ztD&zI56v@4Kbhke*FCM+{o2aC1LF$X8t_Lv4RfwBtaD^Dg{;D{`>kYGEIa(#C=yzgww@&9} zSlwH(*LrV&73hHG;TEs;uqZe1gM<>U7o78*{a5RDQjk3LJr{RghVrK+$0S?&JZ%j{ z`4sAi5m3IuEx=hQ-Yk@k%1H{X;+d(XNvw}}(-=|w*#&96peN=Ujy;KQa1&A-n?E2W zuInqtpcJ4Q2zeFgs({2;UZ=zyc$|MAhP7jqVr_#DB49z8Y)lu@S&(Zp`VdvcVdMo?d0dl|oMUusC#K$uSp%f$@itj|70qi_X>X_`8gY56Vvh*k>Mu_Cu zqimfEP`uPD;XxoBUIcRAXnilyCCwyf1>HZpKuMetkMP6E6qeM$&^)7QbKv%fSD_zQm*9;v}K#!I>G3Athq_yA+ED;kBFUM)Nc z$?Pmm8ki39-1{Ct z4@aT^s_N^WL)0s=eURYn=1kWZnqqqKY{5nuUkJs@(sE3xm9&?;?AL0vLh+WmW6@FyS^<~2)zD{+$skB^l64ICbN@7 zn%{l=;cr+uvBB!TN2Dq2n)6byL6Vo#TjowQHd~e{rAAiQMwi^klx~vqi%tv$sp^_r z9)=uVhVXC$u0<}DSDuwXrPoJ&0ff?t8NQjk^J?Q00J~<5e9x^8hPQbex(V$X zV_k6?xc&UA{o`BIUxr<7#^n-6-i}Ilk5`kVp%rwrv_|b;aRtX=L=Bih6P|w&VEG2Z z_MYWE{op~K28hAXnxQRthJwKHNFXhPqL8EeN5W@X-_=aUU(fuQwaI9D;oLlz*$J;< z=j0j3Zy+j%^SF8=^Q1VLT!x;JdQ1r^Vhm(Rrd-q!k0X=*mX+@=~UHc_JACT~{G|a(EJ?+9MZjb%iY@ zY7{PnSbT0k@w<4@P-a4Al%4C5-njG^))enV-aSc3jxNvCJs{SkQ)|`PTVN6~IC9>z z(i2fl*u}UM+Vj`HVJSTn(0ESrXb`+wL$IeMx}(m6!V!6eF=&Wamy3am5C*sEeS@j+ z3Oq+jGJo?c-~P7eszM^A1ufJEbXACH!b2C+5~+LIO|5H+#X0!+Su;Op>4BSHqk2zH z;-Oy-@(zVO-DCyoW8llS1(M1hZD5il@-*I}^V|W<%f-X8A#QnDAg%@P>Z0r*%7A_8bO%|@9n7~$r0FxhFFQwb6`*W9- zmsiEP1v#sDpvS&<&@GQS>}@vB?GSPgA3i}g)$zka zTzazNj{zNEP1g`Xixb7T#L48#itvqOv4-S&!t|h6;|JVHJDo55e5QFSgn7(nr?a;f zqZKCaM*Q*~+wT4=YZNG-93fHcVy?$5=j1Tp!eV-bz!m%C6ew^_<^#8@^kI9#DrG?z z1sp|^C-jT)XufYsWjZE!Qtdd8t4Z2BSnpa;u1N+_Ib&6c+g)d}3W7+wH1zd1zvU%q zAhE$&d^F9bjf6)az!{W)8!jTak`^Sj8;i^hCmb2QKNkJf_Ig3#Kv#n~QmTDv`IeAd z&Z>>Ud5gX*vS#Tb=sJn4_sDTT}y z9Ovn_Z2=LA$#8l8$JA{NK1XMKHAxj$0Gt z5?7^?-czR@f{Fp#*^FiWwcm|@`;~9Si{~D92{CYx0d_VVXUqeO4olgqa6^G3UP6e@ z{>#0-XGU3+THfT*p_eH4OZ6hiZ5P2@gIg3hjCY8GjJERIb_ef8QtcTnFFo|@j>q)5 zA|m2dn)gML|F&@6Wd(D>k8;@W2)TKNkf4>DeGj188Tuur2r{o&qts2xIE;-Y6B*BG z#YRI9^42R7zt>p8^4GfN@2!oX9`VLOn#BWIIdKaO*4AwNPtVKZf9H~ z6R$bv1#Z`qSsq*1RbuE?5K$Wty#2T<0 zj!IB^XZuyA&mTT}7RR)uyiapjL`ZfEfv}3ZaBnN+;l=_ooX%?L9mwB0D|xLGMY3af z-JO@cnC?JopG0uMZ*Q^Hu{4Ah2c=wT@Vcr8L z{Gb(&F(?E%rKe>Y0oxnf>uX-_M(EdSsNy)tbB4Pi0^&x zdtL;HAxH_7ax!y-w|6GY=FGhm4C59XlEg^~SQeiIq)^}oE31gM4CCX0bJL?|b{O&hn zoW#78q>LY~52hvmj|gC80F;VhvwbdpSELT(J9c^N<(fC>M>9eN7pL`VkCH$CVtn!5 z>odMkg2+!GfUk4r`GO9VH-|6n9}z<1?^vOh;wZ>7bx<(oje6?>T*9ipDf`~&!WD`4 zvv%k{=f7SP7>w0>Y2JvTNsq{k5K!|k_DKB2Dl6WxfYDHfNKpiDHzXZuZk47trKmK- z@&Xb;4A!MgDpB?mV|sVwN1bxZ2p;CQKUY6)FB>V)H}3jJjZKX@ZsD?(V02sWyk3wKA+>PJlZ~V@kNQMDpWCdFYOSNRY)wU z*W8X>QiOiYE_K%BN4uCFG3vw7r3m(UaQb05a5*BnZBj^FG%$3h*OL+|eus}cN9Kg| zudF0t0++mp3ia~)_=^JrJ|o7UR0Xb>cws%}bFRb^;i30KsU7Jr+?NsvxrVW(+&rEE zQ7Hlc{VrxCz+;Vsf`KbYbHSduz;iutmdaX+^r+m-D*4OR!g89?D>gP{msfe`A;hL! z!#uIQ^?nRkr4^6kkHImwTr4JCp+Z>6^EFIxd^9*2Si=*}Z}CWY1mgRgLJ`y#Jxl}C zt31f-Jg*iqkVFQ1ZUg#dOA)Aaf%6mlhABq4D&|Ktuyo1Qs>UbV>wEq9O~V_OBZuwr zPj{tZ;@*K(-4v`TuZzvvMls+!*OCwE-Lj}88k;v3FH=3+`Os2CW9_%)273(?m-VPt zdVZ<<2h$s8PSV<4B|R4J z1j)gjT`2(mKib}{InwPo)B6n`ha={4W{VNz9*HCxK%Z!dasenDx^ZGvbvHLDg{A-$ zfZ8||2%x$ft?Op^7v*ZR*25R+m-*;TIcvzIy)HAOONP>7YFyfRUgTVAT9W)<-b&KF{4;n5?%`_1XFn&Uz!*@zjr0kQHJ zC91HrykI8TCZ!LL+-4G%`M9e@c#)tM={NMpl{CQw_i7(KOLY6iw5TB+WwHKN>AGk+M}VpIb$VO0 zAR(a>O;jmZYjPK~Hx^_PK2!mW(kTZkuonEY_;|^*W7b68 zVA-g4UqN)%d@|uB$`ik4^v&jt)Wdu9Y&lfU;n-kK6{UYUOmA*-bXMXnh>aQ>91y3% zJqjLg0d5BawMZB)5eMA2Q`(qZXXp!sgMDBr!-Y^A851X40>(mz*O!SqmVom)F@%^R zR!Oa(KJ{+8{6&Xjp4Sd`6o6?uajag;qb-Pkq?eLo*ey5uORtqlo2mqeUu1JzpEU># zOr@He$GyapsFQ6P5;|FQIFCukk}8hGCeD12fk1KEA}kxAhF?p&r*;K{4A@oF_fWGzMX)9cQ-%Ef)8E{~PrBrx)aC5M;9rQq*LytUbiftY(dHZ-} zAg1`LS3GNDlc9Ak%lP1Kn1K_3+{QZ|VzgX@6L*x00|-66$h_VD%^fg*=J6`p6-)dP zeOh9P*oTCHGHSi>cp-L)?*5zmzc7u6#Gx1}MuVCwVz$nFGYq##^s5G`ji-ra%fzO~ zWeCPre?RS+$w?VC++1slX{#?!pC)`x=?AZj))%r4iFGjn-;;&XO>BqmCyc2qX-H<=VRQMd%5NpE2h(lGH*{S*lzz*? z!gFgfg2r!KFZj24Df{$y#O^EDM{YPx$DK;!&QHz(YWNMHcrZA98TVILyt1yeS9~`5 z)A_J^gv{fwglNxcqMuflMht2^8tDadE}dWq6Q$jtW(Wx85~ZNA>&YpIvLG=-J<`Rd z_=(FGi&IFRGmh6^5nKe z5kal1Twbxj=p>sNZ26(1lB%h8M+3 zzz)zX$J^}3byhlh?sqa$K$q^HW%`%G2V%{^I7r^=>^BHsP^+(QDs^VbY?*8fIZ>_{ zOT~c;PMsVcPOk{cSTYeJhu>~eyd7SYmTK%lsL3`JEFK};>=`}mObv|DqB4P{5I`YC z=P%F#wFI@)z94qdN+|pzg&i<pheJ)8F}9;9h&9MwX)M&44~0f2X*5zT0V<{(Iu;!P*h))! zQW5BwLydeny+-c_FAAxa(EFeKB<jgk*Kvvw(kPAx7? zyJYiRNBv&7Q5js+Af8D(r%)Seu<0-0^N?4^;P)#R7oG=M&>E$(L&Zkf^qlBZR+ddw zcZO_m#&EFT4*^6wBuCOu32-a(gA^&*KLPt#gJ2uJ`9La&{#eo@7}G?&!DNo-cU|8g z0j(Qq*-7k_iISx5d4{`(RrsD;yKPc0-g4?htkkl_G%CboAPFw9UfZe zI(Fw1J-;KI>wN(X)3D}rOQ?MMM?Y$}`=;-s02byeMBIgD34ZPybY<}r`kfu`8)yQ^ zsgiqcq7pXcp!@`S!;dr7zM&p6m-DKJX+#sMQoVS@4zM=Y5|IZ>oIeFxg0vU@RAdNg zpPI3?%Fb(#1Q`L^_TJCwVIHTt__R1Fhj--IYiO|F4FBZW)&V&T`JF4X8~T(LGrAF` zh;0*dKFzPZxS>fn)-P(gUQb= zE|)0R@Bj}oJ|}RvhcWJ#pz9Z-RN~>_+qQRwaYaOVq;OO%|Lu^@zHbd3LlQFEPV#jC7)iNrE5b5U&YtKKbys~+c{(! zv2WmkKoP0FwP{~WYhS0{Gf*@ZSQCnF2}rjj{+q4c zjQzbG7b|vWm;n%+=T|GS-6v17)yGD!>*v14@n@l-C`mgex^0yLm-syb`!C|!jc)ML z{Ddna2jl{SCsFUj+lqdHn-%-1eUP~;FyGw%)@L!DH~{p^*9sQz=D>bSj~4vVS+3DS zYZh^s&u9@-vftbrW!SvCv(oKZ87f-bW@Y(sNAFoH*BZqf#QfIhpT{|Alz>CWJ7nbg z@h2J?b7Ed5F+|)%(}^B*Zp@@?s+r|_M4fkw36^!Rgoh~#CcZQ^o?RS`5Pj)rsR_|h zIVfeBp?NHi;5FW~h0zRiOtZMyVQj4M(w6j$6SU{mV%Wy_11} zh^9wJedZGAwfOW=kauahIIp*eZn|+54ce!&zStI@{@1Vm^wa-#U#d~mGNu~wjBmo< zil+mi2(D%*V3{!4(h|tba}Z7C9+C!m8*=j3KlkosR?I^xn8r*7pq7iM|5j_=Q#%%3 zjTyY-m?DwWtVV|IyS+G|*|_3331Z||MQN{+A_`W5->DESZ~^l4H%vsGs~rP`uRKTW zM4d0;y;v*ekW0=WE*Z7T0S`r(q!S50Tkj(^*2#QYMk4mnIdsapDJ)cUVn6$Ti}s@zd~3MD#K)N@X88SAt4aeaj(> z9SgH33&c>SYof{iYvV2&0r@wW*loy2kVXBWvZng}UV{HB$}fU=zyd#j$q%J5V`h5F1{4nnM)#27_1- zal$|G=@9N!*?rOY2#$lf`dNGsmQ!&En|4N7TuT+!Y_YPUI#R)JirAp-l(@{?q7GiP zNthd5#ditW6lk4ty)+Q1B}z-r7Gg6B=lD=DMd$M}ZKSeaC|g1~?225~lx3eSj&1ca zx4RpAK3i}u<1aa_j9F2dkUcYzc$I3XRg@A@jeqfg68W|NkN>=e{yWmfeyc{Ewkzd* z2q!~bEUGg0KC4Wv$^|jdjG|zM#V3oF;Ip=_`E9C6FtY0HLlJRSnG)>Ji67Vnc||rm zco2QjOCOVtjgymfvLum*rG%Cg>w}RP8y?H+H?WFlsqL#b8|H}HKfT=bi&xE;O4Jn3 zq$N827#1yVE9E&HJlQ8dwzuPJq!KwF0qI=OVRI6)Pm3KDVgw^66E?IWjoR z#xA0p$h0y_A4UZ|^wY0?P6qP7@_*yRQFyfYzM0QSaP2U_9c&^=cAgHv)_T{=qG-)g zY;XfLS*c-pfr5atvuI9{)xhrUSe1DB=9?h!ST~ zkP)<&Nv#0{HT!;p`i%oSy9O^C#@zRQ-(P4GyD%!ez3veWxBLO-WVN!KIN(_b&7>r( zFjo~uNs_|aet?>YAcoS#P@9J|&YvtV`^`xBi;^tFy-<|MS8o*%B6q@%CQwCLq%(vJ z218Ey??}W}u~b&8d5HBB%EcllV|TKJYOlBrF6*>Nlr{riJ2HWh@dBUJ@EZmt$C}Xv zlskxM19=c{C3QpFt;~rCBvpW8=rXQO6AD44FXbjWz1{~}hp+@!>wCSmo53s$T_ebB0*2>j-QNog#O@j`47qqGE4K;qDY zBHn!rXT`r!hIvKkhn!r$y0U6gw?br02#Dl$AIcpn0G|v(S-I>emN+q;&!ihc1z@# z4leXZQa8*vnVkv#XTO5tN#dvZnf-kz6C8{?q%LzK+RC5YN-qARTq-cRv?Sn!q zUtr%F>=kdAB@E1QKpAyaN_JE3L^Y0aCkxs4nb8#cqE~ACbIVQA_&zsY2XAWcfQqRh z5HS|1X<3T#gthU7gLRiPgoiLOC9%_~rSk(d)bJY^EKOX^ggv3C3_%tgLW;Cjz;Yrm zN3b#GQi_AKumDpymhR6-y=DlUD|fF{Niha0bB)l7V@H_^S}Z{wrr-VX@Qg`7b@FosJ-3 z55aW(*A>KZsWkxZwCsbzhJ6^3YU4$d|4hyiT^kz0A~=wCs9^WvU=AklYrDq`P2r0Q?1#%YVe+NBh;1@M>iPv>C(~T9wGwthm4g&_H~4d`s_NtQr={zN5M&xyE&_Qc^sy$Gl*v0NmUG*I`<4%` zkpjRxh`nf`=ca#~dpH}tj+JN15|Xl+*+=E(0~pxUDG8#_X8u&{6CLv^Uv)6&k4YQQ z#;4lCj3Ml_G{&n8vQY6_(|Ponp_(e52*Bc?=n}Nw=pY>ha3Z~~PpQHGG_R`?cGHt; zfXA1{NO&9#BT`JVR+v=R0-Ivt`iu{#m>A795!ok16-UH~X*_Z|Yy@crlgq*&a3ZJ* zM=^zQhxg*!_aA@6CBjgTt#bwj(5WQ^2Bl4`S$kb6QHxu}(i9UYr%LAI@XRCvk4cgV zm*_CHBN4e*8ROcLWmSm3IjkL5xb>z*10#_p_qAU+e_JqP+oWiUCvZTf)widW9@;RW zMh1CcN~kV>PQ3*~t`c;jMU7+PS9in+(tRF&sBONM zo|f3ciy=L8LSQaeT2B|GfYeJ-$&y1 za)g4X-4xTeLur1za0LWa>O5HabeKKqC|(?=2?~IYxr%Ct zwU6?m4V|@j^agv==e0_w?L;Wx*Rh$}SwI1wvLogJ$Y*!b7=<;=qa1%A^b^Z^LCXKC zbpHXuPG{k2Zxd=E)=>Lo;;)G5f(1H*Za+>9@KLQ$+!@d5Qo#VOkCR`B#h2oLR4qAh zL~TloqxpKe&P~a^?Y9oQ$pz}qc3wmy5myA$MZQX~o;V#r$y&>>Ie`Gn2vk}~etJI1 zGzdrD(ob;%XdLNJA_LvR7M+d6i*trm?}%!o&wGJwWWG|UoZ;2B9MpLG@J_HEh5C5U$%vUw|wFGEhf_U}o zXxIshn zyK0LUtG@rZwD{P3wnr~)WV!jQ`8~);O?@l$AAk4|#F?I#5TvGbwxP-K*;dM#S%V&8 z+(C(BQW@3HUK9OR!FQdVZr>DO4m19jbbtkgs#DrAnT#8IYi6-FUAZQ~-|Y3a*W&s2AHVfu z^L6WR6uaF_ANu~|H@+25S1+03b7_)1W#aMIu8ipTfzXd9vx4^=rm%$+WjGJB?%~rC zFk}>0S~S1@>z~J3Rc`J>R?-O>ow}FoHORd>bzqCKaH5wbs<5xnJ81(S?>fcL@%$GU zkfsI7;j(mjXBwoOOEZAtJas)JN(j7ELa(XdCx{_%K%pDahFc)ZK3dE#_@k^G|&sWjF}(e zH}qN_pN9pvGJB>z7qtIesQ|X+*LqQRRB2~IuYl7m5#V=nVx7V`yrv~skV&vBk|1+r z5~>E=2h@DT{*S&kE)7?IcwQ=Pxz?2Wdn5>$waJMm2$LF~9Aqf#_^NO|wZ2Cb z;>|~_geNn<3eHL;bGg@C^EuCH?M(>8Z+>QCqb+=3le1#r&fZute;^nYYLEMJ=z%kc z{erC>qo7NU4bzqI8HSZP_l@{Q5UQXZfLqEao%DP+IVYkLZz2R4H-|cDq6KQHSh;!c zX0BmBVTd{`7-LHqvuYS!30OW+b}ZQC{^MRZ6aE+@nAjmVxS20a=^c9LUtA264Ew_Z zcp@MrlZ&u{*}V-MJxl|cog*Vni@wTl2m$3?p#;SI1}^F9)E~yyrYZ=+c|FR?sua&> zxa|L(VagBFfl*b4rC8xO^cN9XRgTHQoL&apN&G_DM&Cnc9>2%ogLDE2w5= zbWZtSxheQEI3z?>eBk}FI}{1tTJY)Q?af(rc`=$$&()JBH{ed`#B+|*2Kl^>7Fx%+ zd>=P#YtT2bRkwL%ep!)$cX(QYKpKlwJhoXee2{>yX{i)xBO^ZZiBN%s8jp`+G#B0; ztCmc~Pd+7rIdc`V-BuJAq-aA1;s=6pPRb9JbY${roT!rIbO#ugTdDEsBv9fQsa$wJ zEdjwj1vS|20kv17Jew&pa#Q_im`@C9z^pRmpf&CVQ0~%;LGi z^Uien_qsd$t zX9MQ)<6aU4J}CD54^qr2G=&V~5Eda8KZxyuy$r+CUfq!z{+4@wdj99GdeUF`U`SgqXzw@J-_lSY~p?VVpiSS_-!-DdxhRVVXU3ZYO;aFtSYBf={h5uPo=w6i`SI_?ZF~IeNjx8O5A9Y$ z)-(=pNiSa+g$CflAk2=21)1Uy5|FsyMAhB|&6gDd|u; z%|D9eE_%b$-~>L{n#I9kDJ0# zg4&G_W`%#Tpx0X|%eQFficI0JHAOzc*_44v&~jIARTm8anX+u^D&bJHHthiQb^%pM;qMo&de!r!=5F&yA;$ zpez=j(r`HW?0lNG4E@2 z3U27oUy=>lQjgWa=MaGL!EZ)E4bP@`sa?s5C8qYbKmShLI_2SO()otdf%twmOSw1* zMqA}rr1{gJYMG6ovpV0AdPprGY*EmZtYpS^I!NScp)h5%Z)Yp_YsrUIl|=y%hY$}V z6Hv6{_!5h%U?lf-J0K7Sb$nr!2IJoG4)iOR08Tp?YqFHz-Xj`Zsuo64b#AIdJqsz- zap}>xf+*q2R20-oE6nxT9|4_F+bR>Q3r^`Og(oUR>eOnIsXaI*9B9Y@czaad2^X{w zXU<+B_Jxt=usdaQ3h#U0E0a2;2!lu>G%X-CbGs|XbJJPOj4iqm-uC1UIG=%k4)m!{ z!qXu5tA4j<5-Z(vqtOB-{6Ji}m|uf7t0UNqbdD+sYUK1-BYdoQAPcI?Z&1Fe#d~tK zox#@LZYNu6DL@3<`G^}z(hmToG6gD)aq2}-#)2K3+5J8~Z=9YGe@rLHeDUV6SYX7m zDDjA66I3e!uJ<2*SRzGWL`yvy>ZSC2THWvic7_)YQIoY{NE`79S*Tv@0SrkHz&nzp z1k(;*S0==Gyd9;kTHWrN;m8Zn5@zlmtP9r}s)Jg|SQ5mpyuJ#+$BnNr+p51u7NbJ< zCh$T^&-Ja{ZakhE;dngtU`|_;JV>7ht6(Qs?9>1B)!%t%fCEN?>-aa^k7ftNkx>hw zc@mjMMlb4$#yja3u+ldo3~@?F##4;+IR{<^7dij)*S=wDPnm^_E}e_09T1EtEvXd~ z)%0LrNX)ZxBM88e!0>W`w_vpTe1uzK0Of6AlASlU)^0eu8wHQ36RHnUn9;gJSS&0Q z#gx7H8AOU-r7!^a$WK9Pw565ffuTohls+?CsLBvfAjJ`+w%jfjPB1RI85wI#1yfpz zE4=z}t&e19{J{sRnQON=c;Ls^(3@& zl>+2y$}Ov%3NEK`rR_P8yvn5cG%1Pecq>AJF`?bSt4=rDA&;WC?blGXRit?Jlw7|hg-q79? zufhDy*b1s;gJZA|rnN=^Lr(+7EgkymTnvDmT#2bn0jG+LO*}v-*m>1l?*bZlxX7l* zR&UkcIUvuAr zaAZ5{`>zH;WU?CA^$7()AvX?7rG&UE9x*UcjRf%S6`%rru46L|(PQNjk`5VOTY+*T zX)s5|0Q|NIwzk$Y7egGTlNh&0eFT1$Km4+K4eBUB2)j}altL|=Ii?{4&M=03@R^K7 zQ(Vr!InOeAr#xNk3G4C`p%P%y7v*|L~TxFmQJ!t&F< z`|8ilCT-FZae*3VdyDIUvN35|Cubf!B%!fLWNiD6-L!<;ep@VpobdQ;Y#+?8XsFxa*xXcBT%B~}AL^F7@ypS+@ zf^l~l*iad}5QZ{ke3Hux3q>!^nik7I^cK}F1=Z`XygaQ?nid(6)fhBgtL+iFLx^fW zYy=gUmF`~+3|2S0Yd%Y;LI*R?P8?nk!7N2mGOdMVN!RH9 zNS5?N8N1zIJs1O-eO>9 zXD@!Z6<9FNZ)|Dz=neZ0z`dpxMKg0`Rm9`&M#32%iXsHK&XU6Lf`zm`@0 zKTS5)i14zcM=-V;2n4sU6QOE+M&SA0JmLsdSHdg2nOlS;wRNemkC%oXy)2>#h($4) zxNmbYhd!z0co~6np5d~-FM;EyVM+GHy46&@v2TE@mmP2 zL5K|rX=&4ezWLNJz3; zCMlN0^gt#y=S~^@*Xzftx-0P{Rxr9K@odV~Zci!TW8=k=T_2xclq04Kv&Lf9C8~CR z1j}VUuQ6w4bTv?Utmz?fPMDFhyDy8?Kmf1JHq0f1pmWf9;nAYmqpxwjrZZ700hrGR z6Ps%}n34R%$;h3SSw|F;JWOu%lK9&e_N?7YKe+q4^WSEcZSL25w7`kWxqh83Q}JQk zH=6X7%A*UT(Xi(@%@9&y$HAV!iC+~ls+xsKXYal$EXUSUIaU5FOHNuOD8|{eA<01`^~9N)WngE)wv9pZHY180NG|ka#9V!%~~pZ^tKX=)8plo&kciRn%E}1ECU{+cKWK ze9QTdYSksf6Zi}YE$zwK1+;HQP5>EXejv6sIk_Bvkns|0SF}1|?w@{s|4%05VN3DH zG0~XJ=K>;^5_3&@_g1z?%mTUVMg73(4j5h$uBj^S)ERKfYh9-u_-&N8jUoUT}t~{2p zA5XnNc|3u%%gHziug1KpPBbwvjMZ30oEw5*GKW8+ao73nDK1xro)@1jnQ2#Wv*U|n zPy?|Lz=kK~^6p?%(u^>T^_~81JfmTzOTds=5l+zsmp4Ozw1WI)JXnpsmO?dMl+Kfc zmgC5#C&gPNk*LUidBdzBOw)=%W_)jqh#QAvL4$BMtyV$A@#hApyY$v>?~vQEk;&m5 z#l-Q`{m0i-R`}f7;2JmNKso`pcdPF1`*~Hry*CK5!f=_%BL9uIVOs3nyz|n?of~#9 zjOal+z5KNIX4k8^9&sxxmV!bQU#Omt?8n~8$;gR_`;VXg6%YgIo661 zlc@(L6_$w;O*fT8c<|~)TrX?oeE5NLHv_LdpftTUVLWRhdYjP8jfcQcG4X!{8!UQ= z&^%m-u0E5LB?B_lz7NgtR_vy(f`$*kIc-rfdKBZst5n7opiv1{o;Vsw{n@lgeNn}B zwO~Fp(ka^`T7I5a@rViLLE1wSNIq+Zm|m9rq2}FXDGIVQpMV!w`dGF;klt)c!a4|u zBmyl3x^PVD0{^$;rpExznxXW7N2mvn=#JS-Xn4NPn za7C%kghkR5Rh76?gvNaREpmo9;Cm~`P757kwFFDP7+(0V^0bNQ+hZnLh7k~?-WqJx zslqri$H&18Ar-SeoS_BcO`D|2 zV*&X7KGEIgUCr5{Yv>CK>nkYfh<6_dUlT+^Be#P3cxG>M;!Ao#pc<4W<)mp#D*W@7 z^8Vm}Z-2`l*CixkA%$)M)p?lxV8iHu*0@KAlxjb=o)EUcaWt?V3M76L6P9#VK>?{e zr8Kx+9lVY$Y2*TCe+g>L@aP$P0=Qph3F83%@mK#4d`tj`n;g{Unr+2Vq5YSZyf178 z5&@OA9j=q&(xt|w;drOAB~7IVn^fAzuUY-V!h>cE8b%!}!X+J_lY0 zB>bZimpIjwR^e*0oSyrTSU(;#gM#v54UdGpkCq+17UV}%PlsGKIawK-+(~##; z0w`PcO+1!tGu>QL%pz$TfVz}n*t}6 zTv=;OBYyK{sA$CqHC?_OCZJH*mg0ewa-3P-D$PuRzZ`q~?q?wqZcCX%L+XzaawW>f z^;d8%s5Yc!fmvm9N(|d)hj=QPOjVj-{S;YxMl9O9qOeBiUnH1ZUwR46$4~(qoEHb0 zp4CLD6qu@RAdPG>MHCs1eQmFpzPsIlPqrB!B*+nyUW?-0s-2GGS$ji}9=0NJ zXz55lLDG~uSRXxQ+(aOQNZb)^(#%UnP%UTti#}1Cb9YPv%)^wkqB_B~+0Y|Mb{dQ9 z54u5n0&bLI#ygRz%&#t&{iQ3$6h65uX6E5UO~OnG{INrgJofr-Pw%dA4TA%eeT)@! zcJrN>3B(jK%El%tLIJ~op*JbhF%>!0c;bYv+Cks7?#inTzgr@LxR^Ia|IONvoA6L8 z`}O)s=rBKGm=4YPNKT@k_+~UxfY4tc7Y8W7*IF?|k62V&`@$zpM`MTo>xZb_CmMR* z|MmZW{~O%=**CodxfmRfcc455H!abPq#rX_*FkjnlUy*O$+g&!-*BVZmic#8F`h!8 zl_}(bhy95jX_ryv`de5d&Jq~eRxl+RAH;tArBBo<_-1K2+a6+|f>wy*rxVCz8s9i1 z4jKQ}NRar@s=9%g9-3`Gcf-0Q6bk%LQ_k-keGRY@64f~g?ZFr8#jspIZoeD2(TwS@ z&8yJ13=>x`8$GiYq)A#4M#s>L#Y?~DRGr=7TMH)m_Uo}t8Al~cx-QQC4!k-_@LmgT`dKez&Ek3Zbrdlx(&AtArnhoI{T~sV+{Ds&mJ$t4??3>M{9I6&04OfPqH~XDWYO9kOgbOy1Z%c=nUn(;t0WiBhPk` zyFE_fi{{7+%U$gD2mUJjjfu&U;ufkKlK)KUKHd1lq{#vZ<{^Mug3UT=|-M(Ju3v27yG$mTFz9JlBlQazHt?rYj>Vy1jtPZ&)|erg0wsUL&KXG@Q zed~uEo7r05T8%#-bN)PGdaJNM=NWG#uY+||q}3YAu7tI@3QDOJ!*j2<8xLMlUo2CH zRvg3_Bs+X)>*qM6#cy3z$4mtTy%jXb1LnWzj4!5?K+{yzPwJBuIWJEl2B}P39lfJe zHR!yfw8B7~1OWk@08xpRD^^|98Vv;+;?bvyB;?~|BLaq5Lry>fd1+)t$@2T>ZBEhY zBURUYIq`l82q5x!r5l&6tFx`|1rv0sB|!-Kk$VJuHnwB=1X-tPDN&lYy65x6_Nbx) z$x|-G?A>>k_~-01kifw^GS~e)3fn7GSAJj{%j326fpb-ha7CRzi?+wQ`K$Z??Cp{M znsr`@7k=S$spD%go9S@|PsB6f@w`@qhBP0@)Z4BN zPW$S-o^i9Kr_apM`yf;e?){(>fP;07HDb|l{TdR!7wd$f|V_>==Hs5{gQerELm zyHj_UmcTp6^^&Tm3A+lFs$iAxQ&GvSoGy-=j>-w|5C{qUWW1A_3r;0|njs3j_0IU3 zkmhmmJIUnK<6jva;?DS-G1Pz=my%c*Mw$Q89*Pk%@Q^AD&E~fgb?7Z&Bhc`xonBll z7D-{@+YMRhY)G29zZLWvfK3QV3CGp@ z&J8HYsm;5i=|`||TF>*L$fz_M4$shKDgKM*qX z4h4~#UK9aFTTHo##R=dZw(NBc#85}VZ1hrvtU!pebHNovDUi?cUJ*xCSXRD!EeEga zmTC_vh5Vp(;yNrnc^=QifMPE}a5~Hus1iD(UHsyeY}65D2TB^%9iVyCV#;jD470C) zM()Tw_0aaWnX_d*NSj~|hPzA5$B%$_6}P$imjNL}%_*i*{0SA3 z*KLb^)Xd!%=-r^zZ~x2B;`K_K(H}VufY1VEIhH^2RtE94Z+@qAzo8~M-!+RB#wrMc zW~77BTy2XpUB3TDt&q`pJnxCL>zpD$dweAs)J3QEiA^96}0>6=im8`1ykou zLK%Pev+_d74Ig^%JFvCM%NdOi@Pw_;kg50s@%bG9u(j^(6~LX^O%7M^z?$bR@LoP6~Bo#R?{mv-CiJg8RZuIi$UwMmW+hrzMfo^sEcwzx25^P0j)gSASlyf&DDVX zg)p8M#31=ei2^9Nppo1uDB%=voab34hU1?Ck9bZICznl-pHx>4X2yG{9R&{6H@Om! z!@$IiW8|s<(m1+{Tm1eY2#;AsR?_P}nE+6V3ka%&Gi{p=bf;t=<2Cd5|J_537jK6i zf?xY%JO}Ts0qlwJnf?rn{Np9-U0lF@EmOBrPC9;t&z`S;!g z)8b+nATFJf8JTbEKXayII`D!8E1*4B^cp_uTKhewlmoAIka(48BM};oR%mN zkv9W6yW3yH1J!;mt=wiZ|fTk_}?`5+<&i#yCx~Hd;)QT7W&MP1~Ky83mfa6NPzN)Tfm(QPASAwbrK6cf2>nx-Y zEgV264Ko#BqL+?-WT<=q;IL?6aa8Ckk$im;+unZdy@0V97xS{WUfQZLQ=hFOw zf|eR-eh0z?&2xU|7D5OO1$>Zg&>baUy$G0655Oz*BQg81x{pfGk=z?079N^0W_mR& zc)`le6)>RC#Tvdnm)%@keCmtGw8?X{an+AqYE7?vgo?*JN`oAK1r2Whlw?thf|ii! z((+SpOXy|+%Y08zkHybyJq;sjgVrjN(tIkzjEs>Dyx_;D-^&n+r#{( zm~HKGyvG2_WZaWUVXXcGR>-vMU*agn6=+llm9X?Y<5M6*ob` zr4w0%@zRFHd??mq6Dj2lebfsdj*UNmoHItzN2N(CE+jTmO-3A`L(w7SY&lAb(F{cB z)6Q(v6*tq~E)RwJhShFtmwv|o#ss8C@3hc{;3Tc4syu7;jWqI96{* zA559m+R9A>A}DK$*KcER`I}%Ske>ee6tm59J}|7VrJF@^$(3;#tU0f^zFD;$3>iOsQL}rQ4NL8#YO1_w9jKPWz1f;!}ry0Kh}iKctMYV z2qLa1jm|lRhAD-Km1TsP?mJ$rfK3lsX5VTc8fxfMn5-ybXaf?CJyFA{L~cl&*CSc3S7#jTO%xjNpqpHaD(_~hbH!? z39TzO(Zv^InhFQea8<`=2!&YXZnxi$f3I{_Uz#b1sZ*?5KnlPOans9eImRcb8EM;f zaQc(w1wY~vuLiTwnuk_V4=8Z&asX3!S3ul86iLwUb}g;0=mXH$#~){6hsPoiZOHa5 zG;&Vy1(a~G9;U>1f6b&I1WHGYSo-qvGp`Dxvb1jK>4Ss-N?i}_CpELVxS*|$N5n8w zkt7#ru5^Q;?~Qy9c)*+J!_^tI6o4QD@o&c&%-`zo1RGunCLF8o_qXGpOOKy5H&a>@&h?4M z8f-W@v&IcEAmT0YR4#(-yYL0iog5aAIEv)>`=q3^rTm_0|!v@>Ce-g z&aW7>R_gJJo1gg7N@nA-%#}6XoYKU#f>niG#ZD|C?SnCqY4@um61^MCCfA(&`{gK^ zwBhFwCzomWfB$=?Gbij;&?F2E8W8o5`wCz1i6CSXfs1ufD6L*ds)HGZ6Ce&5wQUBD@A3} zRl9j06=K?&_bSw4Y9mo#q4En-6zT(Xoh=DYO55bHa%~m2l2`LWhrq78KNI*lJe`e2 z_DbnZB}Hc9a(_$7#^2s2z+L5V)2u++x*q<->4z2$ZvASlp?*LE0!B)en!;@anO_8M z!gZ>T7Z-wMH@@)dhBn*GaLJH|V-Ys-oDz)a8+;k0)V*7gHV6gfj*y7Juf#6|H^? zj;|)o5#gi;4(F&iED`Fp$z5%h0XC8QoD$~M4^3}Ev+fV1Vv-NVCV0nXz?qIMs*>iq z=zGCT+Q8rP6$s1-m~waNwSG~g*iLug#|=yKD)2J}DRq2oiHTW-i#eCv-fWS8I3Y|g zAFJ?B1u;HAH=Q+a*!;6Pq#v0F?kS{*UN1fv#W^xnz0gDaR*OvD*GjUDpkS0rai}_% zL}^W8N)A3G*D0ryHtzk$W97aw;H>T+WCr!C+}rt?6H%R(jI@G5NMfTj6D@Z4ZjArR zoyB&6C4!eAUbL z1=9;?k+Bbri+~RjBLYw_XXkDXOD!BEZWs}l{LgaLbA1pgil9k06o~ik9TYyNf|mxv zvB(AqAR%>GL>$r)mh?GsNo~${daIje6;2G}5?+im<{r^xC$7Nk{7WnLSC^$n2pad0 zb_lxgPFw=iq40@~xPpi^UsGI^_5T;J;T z2fd%WT2X7fh|}?M@kUC4`IGWe6_XFLp+= zbS2z;sZ?l&I{~xA`js;~EbF3>whi-(E#Qe(%SNGjbEI$DfA8DB7b~X{%9AxwId5VG znvyB=%&%G_wh&*Vho1|e0EN8vP1+c>*;H_b9|f!!j`fZHa(pUIa;ZDWhASVR%q+^A za+~lhcQ+o#!D^;>35TrpGM2z0R~#{(G<)!iHTk5$;=HZZy;r+_HQ=>(f+xn`)zQjQ znNrJwU?Prtc_TP?8h-%0Ik~jcuaF1@14U!X=H+ z0qyc@X(qn?xp#HkEwO3^-| zq{Jz<)%r75hjm*k54%j&z=qF^cV|0w0fKffk@bT+k}rP7`CYkk9Xy7Nn+wkxw_~us zAOBo^-SxfRXSc-t1D4fiY5Uac?_5{|YGDluRPqwO-kVY`ge)mmkK=ItP zVy<1uw2$>(5YD%At-;oWR!kA0DjI1`!()}Xq-8a*B!oeNOa$4C-T+3NOOvYihtOqS z=K+Q|4x4{LQ`2L@x<0LACT*lsik?Ux4J}kUtTG0UW{}|<{QmC*SA6>T)!)Ux3Dlli zxhrl*uYi%{s#o`eiejTuyNxD}2kdWet$L~0Z0aHvL=_Ifs`dWk?zjZUR2{|D^k*@v z3K?eoi3@+=SaB@#Nm}CdX!}AMINtNFxXvfZ|4$d5#|>OsDt#}Hr)jqkGrd+;IrfH; zSGGcAK8b#YW{vZB)9LMQ?QX=wm!D@Mny8P8=FBp0>Dg6-reyvs>0T2l6y7ABt4zgZ zt?=KOCP+qYik`$W`yf@~v^q38F98qW!{G95g`hGg7YFfS?&ww-DRnSn^LQ{`uY&L= zdFPP}o&9*}{&puv$bSAW%SCA^P>2|OcUXe+O2(jl(&$+Mf`$oE>m2uorRrdZMhcpI zQ7|b&f#z-%{3wmT3Bb<#m3!x?H%kb7CT=y3e7RiLQz_tbMr0yE(Fa$`Y_BJ{%C|4< z$Ae>_NA+N?*mzSKHjb4vDpGR|qbf)=A%F~eNJUIunX(otQLoo-+s%1aH#>uPB87Nk z`u|`nnNeif7vMkF7=#q6zp|I|_%L|!Q2Ma@S$PVL^DYYf4~czJ3?x#CWU}L(m}Fds z)aJ`Bqv&-v;^;f2R}FLDTirmpP&>oZ3{uk0ASY#iIm}}9y}j*VxK+^g7Bp{2uMlmI zV`6|W-dB)V4NPuXE|m8-Fy@%>UVgELM-Fhpmtk@7QC#Bg#eok;%8$kn9FBiSHFq=d zY%YBGQ1UtW!m3s9+li!zlj0E7_HlB#ifWHHc6K}Q&!y$XfXYO7M}OW+kiS2xq>QIx zqF&?5aG`=|FVKiVBQPs8!uIzGsHxcvNDb4iNi1E6cmSPivlmyNY*wv263Sb7Ab*{i zrQ;KU8!SSEJUuVX_h|4m8csE3!P#yRtBHYF>a1gTJcPmnK3x$(T@uZcD~HGMkHDmr z!62&P!feWG%{ou8EQ;V5ye>A}N~aeaM(S?Gi|IJmVAE!i&Q{M}9vD76yj%1_!bmz` z!($UzS=?!V_lrqVRo<)*O`X2ASRV~%DE^@Di9dBTnI7B{H1(%K4%8qXLnUT#tJuT- zDJ}FNZd_bQg`-ohIaL{UcxlBZKKG*g4i?;j^YT&C?M3xjwajzB^?X9&nwFn_=_^2U zsv#@!LR14$BDkCx&A%jWO~G}bE&hr8S_vq=1mYMEBTrD*4Fuf{2kFw4PgI)qyucTB zAo5`%HdePg{eHX-1oc@7Iu?qp@}(3TN;Vg{#K|dM8q~+{M8_M}15Thyh1+>p6Bcu! zT`&9j+>mJS6M?Ljpb=p^hF_TCKg!+E667!q21I*}mQ$5NnT00*`FyB+CBIS10uhfR zkd;9YwJ?2KWSn|aaw|jFU%w0a^?2d2F9~v*4mvW-%hC1V1HX84Ok;@iqQ_NSfR^`* zv9zz^r%!+U)!*L#)c2wRJn)JAJce>9m@LSlZ&AZXrb2Ca;^5*gl?)_@SR#(SdPU|Is?q#n`tr~hb~CD=!v?m z82mq!+polBlbZf|tA$5!#N*le$Q75W%~0IJpWmEKp-QiWvF4dOLr~yc`bpcMFiqp5 z5hbyPi0wla)0@EZG##T}cir5ej10G4Zg$W^fs^$2@gHC+qEqGcFcaz|jZJbF@hXA8 z*ibx*%XgFa6!g3*5Q36ROF5^axxZ<792e|4=(TaVX%sTT8QwIJ&|3!Q`4PM`8piUR`>D9GjHI5) zfLP~>P-wAF)i8LT8eJ~O36jUVejjhu03U)G@dz0vJ0X$wOT{gZKALxX7@RZCI%~ov z(+?kN9)OBKS}n`=kION4?O!uHF4lkm;b!OOo-CB|;S~%j37}vPl5}NN3eT$y#dML0 zhG1+Q=_^FIQ?u#)LYi_pfWS@oQ`$eJ6tS4Vd9x1%}(E302^QB!UW^{ds6)MK- zHxr&yBs92Z=StXTyZA_n7x6{%*QtzExf8!vzZaj+-gO4paZDNa?M?N`*_V@CTw09R z>XXdA$&1yBvg8u`1JvTP(TDt+1ak+q9@{M&I+}fnvb|yZ6{y)jL zlAdD@4Aihdz&9T$)EV!+|M;QVhOsFkAmX~9Cpq$&HuC3JvGXe=aC2HBj%PIGDUh~) zD+Z`3B(6S2aXg-s64g|5(wITebV*7FYI!L^*#{*$gJVfLxtKW^Bm^c4q53Kv%Y9%T zaP4?eobjOR-X{pTNr|3p^at@N5+&swS0lg*_Kw$U13O&|_EP-r-jFqP$MWGs^sqa&A;WiZo`2*7aB6U5N?x**E>AWCV zjF_B!Sm{LMns%)rl78#o_?osJ@g3m`XbeanTbDWlhwtvRd;S9an1EdWAqr zbd92Iz`?cjhF(j8qSS(fmWZwec7i;^kUl(uWSWgo%*a)mJ66!QA~}ayJB;j57r#{F zjeY*YG*xZ@tIm>z{e!_OUr;-|zX|H>4K};Ipd86q4N{9A311Ew;Tm@Vkpyp=3$CZk zM@&IU^j|&biif#9eNROu%#{&e9h5GlpNP4ZmyhEoXj3kq@~{W#;6o^6lJ|uZgGoHe z7@2Azn3WRGCOC;c4|d_%#bTzUagrVmXG;Rr+iJ+C0`nX&#S~bnUraEN&|$qzJ20b9 zR3oH-4$${MPKA}@jbB}rw;*pXe=?25s?cZgc`OzUT`rWGaqxV<9~bDoW?5U>@aQ&z zrQG+c3Bz6D{o;E1CszgZW<)f_>UWAhG6{GgSUdj=m>BWqr?tp^$@+QozUuw@QI}#kVvPsW?I~QG{-j3=LvbEF)0?$+wU98PQ&xu`DZvB>7Q9oE1ZH0OrK-+8Et{9@|>3owCWyTp)hiZ@16 zoyqjLFy+cNWi<{u$a-$c0LN)R{ptNL2U!<`ZlR#u9-an*L`=&^|tI4dp zxrw)qhy>GWh|a-fNg*&a8ZQrBTpGu`9No1x?cVZ=pbNpp8WtH##E8|H#v(pju+&&r zR1Fm%v05Z6mU%csPZr8lv4-d?WPSAP`GPr)@6jWy7F$yliy0Yf0$Y*34_c}eus!FivrsX~|GQ(n#XL%d0TB~+T0=khAMk!fH86%EK?(?n}jH!y&w#vZ@* zUpMyl)^<9(UJN)b`k#`Sj@Q!xXw9D|k)Sxa1ti7Jac1|&I8tkAJDz-%6Pp0usg%b$o`Ny^SN%d`h zah&}J?yrLx4@yW46;t7Zc6gk+6lqSMt`tLbe9V&5r%w{s!2(Tx{A+}o_lNd+Y|?W# zF#xO7KnWIWa~ml#<@<3IOOKzJnC;6&EV9iF=sH?)tV@zSGK9q|5id8OPjNA9Uhm1j3~|LOJ49 zASYx2;={}tEE{1wC%0Ai)gH&8T-@qDk;$iyIL>N#bTmB9_6f_2k4@eIus2;r-(;nW zle=2zGVjH5-cbe2=PKa`!_v$VXEox$-sGi#W5u9Y<4|LPRKJ3@52m@&C%wdn@NWk2 zc{i=SxYMZqT-wtoK2%ay5@=O@2{p!CCa-3HdO#7Kf4*Z1nN7R%gThL5B|WAkLA;6{ z1SS><-J`-0NM0Zm2h4peAqGal5YjE-5tYSrhG|^%@%6oWnr1a{W>L>L9cYR|041Sm zi3AULDgE0~Et6OSw;M}79%bsbKUj<3E-fvZ$Pc}Hi%OP_JnPYhsZd@+1z(oaQLSD6 zM015v@p>a8K6%OD5FbKoXs>EjuV-yoTt+V8U1gt_3J4Uhm?j5#`;?Wf z@i@9v)9xkHqXIA|1cF!6$HnVtX(ifyq!k31Emk}s^AWMx7cb&pryoL9&|G?!9rT9# z_^5gOkC*+P4O$NH?(;m^9zXlfjRDa@c}0PZ*HlEO^(KlZ6Q(xVc52Vp#f8PkzH^hS zA)yt&h1LQdBu+yon}gN<4BN`t?>Jj`k?iP@W<(&0qeznrj&%=PE88(eKjNjR96RKq4$69P`!#9lKIYFx3lAYU}jLZ zM6lSu67a1x5{(5AyJnbWT*W{59WUi#LR_)tggDvD;?T%ps1P{Ngyiof;eD01rv>}H zK4Lshrr%ZOPmfooxKxq#dTT9Krc$Na)4a9IoHLNx2qWP~=14Syo7L!P|FB>X$RraJ zpSfM$>=BX){T3@%7tA!L4nP>j`5kGL_`E$vet)iuRu7<%1w3jXTEfJ`6{RR8^NKAL z$$LZS!IzO|ITQqkOCKm6k4EG{ zUj5bNU53@@&pt`w^X&$v@JEk^A3lVPyShybw3-q={O330JWwWJLETjb4$QEs^?Z9@ z;$Gz12_ZU!3%am~Ui?)d4Pz;C+K)<3sG&amZr}|#E`G}kkiiM>Y9O{20jm1>hCskH z?V+vpK~Q?L%vhiS$8phITdSoD-c1XEnoGE!DlUy*D66P}k$HmKrP{c`Na6HyDt(lO zH`G(Bzj8{OqhgPfVsdAXn}af0h!_n~7M@!v*$UZ9>Hq7_Ajqnq#jv2|G2v%$`GVe6 zUS~r8Q=VEDG*=JRaWNOVPURHAo%bJ?7nXyAmYy#;Mj@zSIIF-+6hZ}J1q|o zCk(inKb561xQe!PMVqK^64Ee3%Jin|ZKq(M6r=&YenHo?+dr@0+Crr(S2JuwEMdB!F!SSkqELd)3G-EgUNA$0)iPI z7u&I&pK9BEUXyE@TGqH2Tyi|lBt4PjNrtdDX`CL;2mWM#Cw6fE@f+U=3eO-$HEzy{-Q1a}Y0-|$VHFCxpopQB?*ad3@oICTz)js)$y zR3hA_&#KxUcq}xrq8e&R2C6hZPo|L{%@s2qI+vpJFNHZcYRa!jVg`&IL!EdK!9hIC z*v$RM-~1oq-|EU?F2Yw-O3=S}K_3$=EdOFw@-^|Ii3TWQPMY17FO+U>ZIudFkn^IG z5vH67G@FB0(fx3eaRY=I;c%-LTxmi^N)pDNgckM^2D2WUrc58EyC}?hFrJi6Q;<-8 z>e8Dx9y_FsYls;Dto=^>&?CrEqW%g7&H8KA1RSoPTB>CU>kqfR#4XvfZZx{|R40u}fnQ&iZn-@f?CIaB0mUAQLkB(E{?0CvHH^M!#HbaE^e)?h9Q#d8>f;mXQd1Q5XNA3Sx zEGx`IVBfl=%8u;vcKmPp^k|NxX z085)<@+FbGSh!-YCI19Q41^us#)8p#uyk8~`D*;W`obF+q_L>&p>oP02tNJUS3mPk z2AEF84_pECT%xPwtl9?3uVx7g78=jPCC=KWgX_E6rL|=Xd~%!EBq07`hjSqx@|08{ zdjNMUQCMfKMUBix1az%{bP~1z~8Yr(kh6u-zTTnOt!qaVLajWE5nEuz<3s%S00_;%e3JTHVYR)r-#-V?pnT1Le)x zC#Pty3n?|xOt1(^>~6;$;7-k%!^AK1q9TxpEV~=?^3$}}TY43JUpKsq-n|;^czso-UMOia(;dYIKc%Q?euLID~!sOZ?CN=c^zSZEXLO%g@ogzJPPe)iEc zk=pkkU-bN*jq727yKo`rVb}*;@kYsbR7cgxag1;14jBI?h@N6w;}R1jg+(z3@ZIH+ zX24S-1=qzS3_nG(I^NmJZ+yHD&~SLuQI3+IESWwj`J3D0nIRibyZ`w7*@A!Qk5d|> zI>#wTu&H<4lfAc9TF_vA;kX11rI{<}vi07~D@D|LjG@s;(n#;{q+F+9sDTp;)4d=%ck3nvg3)ZR>@^b9G+Sm+XHzd!1 z9*yTH&)_mCjGuQFRYVNvOX=aYON&dUDrI8vNX3~ir|*5_L=CCv_15=s{adSyxQ5Hm z%5;XceleX{IN*n3^-&>BUK;-L<2q^>32>-ri={`8y&8(IFUpl|FyLf0m2sD)2}MEj z_MpU;xmgs<1c6|gVV#Qzd=xcIB+1)dRrEOZ=TTmgRYug5luP&Ij`3*3>bw*9j1K5r z{J3L~G6PYY)8ZpB%A}waSfJuV@~W&ym}G&hOml!iizbPzm|U08tC^MEsj~}bZHT!#OW zqvMBG4;BZ+LP`Yskk*L52uUL2ga?x6aBpKplC!ZJS^aCK5SZm}XrnKPys1ieBi(+V z*@Zd)it@1lLZ|G>;+c`gWY?6fNl%DWDf39HkP0jio|6}c$9eXFMOhMmX;@zyxxHsj1M zRoTSwF6o6uhr1cVr#tBTlcQGLm%h?|!>^usN9s-sl`c~wjxNv2+&LN*FpRHk0+d2< z=?R$eBBD;sc1m9Hf8GJb+(iB^%Y~Jhn5#lloD$tCpO;F-<1o`H<;r;qRN{|qA$4V> zdVQ3H6so>fpBz`uw83C{LP#SRsKn~Hy6S@ASkLPAUcc*S9}{o4=upaed}u-vFKOk1 zxkqX$1apfcSDzvt?vrU^+f58HHM^bdc-vG{W=n#<-P<6%46U>%LFY?U6}ifPbfpkzSzASBeA;^-1VLBf8SprRU)`JI?ek6SC3pw zF}+xGO*@2?)z+{jl43jZM*49iSjUBLXq17fJ1lpxM?ePswl2Q1{`$Roo6%_IQ@UD)W$7SM>LY;`AhCpXcuNrFW6krMnf+3nZf4Bx&j z7UbIS9g%B4byXHbf-CVIugB);l@_kNwh@cE!%Gn~Kz_3?gd~U}e>DMks1h>}A;m62 z9+KafcPg+0gQFR`=wk;5^d#)meN}1-oN>H>KHht)v%%iA@{%nG2{txv_@d|no675VLATkZ13LbG~*6j(8 zz~AVgBGcssgW$$mpZ@3jpL^>ow6qF7pv?Wp zFQ%#R9$G~glY^F8#R&oHGAFDMKxI1@ln5S`%O}ThlGdZbSCb+Vaaf4&TZQb7GMc1u z$1U^gMSkn^&wYzQeSygcI;#kYHcysJTTIGm%pQmQbYa z-q)=SMRLNd356>+)8>jSbLs;m2V*fSvrc1voq*xT1U1xbuc@{-tSdQ=5dl8g77s!30xgY6bAeOms4RJKjK1jO^Z^(;UOni7!8_Smc~|l zI39B!C&TP1?bz!o_6#1z1fP_5-7x!-+h6wI8(kFHN@5?6y)e!fbBz)JP6u~yoh0~m z*5TjGw?l`RasHJc0CutubO^|r?t7O0z=s|!_}i`Ky-2$@F`JLaApljH=n526uPTMP z6Ta2c^JH+vpR4d>s3Ned^Rh*<$Qj{Kqw6&JnbA!6H&_Kz3@PbjlNcqSn|2ECPv-_9 zNKW0oMpkI18?4B!E>6#Ccdy7Ys*Hf~1}z@^@7sU=F!A~K{$Ku!BUzi2_#7IpD8Yv- zL%cS(qliLzH7%7&K`0ls^hN_;I?kzOBYHHVePWT|30q1ajz>{v`q?pw4$j6n3C|ba6+6W4bjR?1^-_I{5Y29qr7V^R95U7G%W2$ zi|nDoE5nX&<~qY-bK_ssstJ_dPo_aBQJk?o$%=$#Ui!K6->dO?l`k1kt!wJ8sVPUr z&4LG12_guSlqT}Xd<8bqO-TA8npa!(NO94qF^n$@^Pb(~E)7kC*GMTQ6L;6+3(`J$ zPu0Qrrtr&apbR&34XN-4IPqcJNN_pN<`%`X$HQ#Rfk&c#RPic|*5zkbu~MI!d&L!w zCsD|WQDbU#XF2xZ17ZVt(1`OA7vAgDm@>Sa<w`te{>r z2g0~CBfTRgFcOrbcCSNG@jd25c}3{mT6=~h;=#pgpo(Bj|It#fOl=xet0F;qE@7jO z7oPcufY5=7OgTkwN|K(Xl#)CFJp(akFHfrV3C>l{8v{2oYV&!PGOAx@kWdU$4wP$n zF)mj{gJpQMxk#DxrhZI-V|XwZ$gMtNzEQ9XEnzyEIca>4gibM6s<<|Ne|37 z2}ha{SD@ojhL4y9Wm$G4h$lDGOA>0j{3vt3K#w#`Og)9>BGr8eoi~s6=0TwQk6Z4G zAjxx9OaRiCCA@u3Q)GeqiWPcDbrexluk4sY{(Qhdyiyp0Mj-7lgJgSaIpY%5oh1pa zm$pt9spe(wzv{(Bq?Ih0^4cz-10Qkmg0CK!*mwpd%oSUVClL4sm^3XJgFSVO_1m`k z`|$&{lA~hJ_g^vc*&ALXOJ8I^Z2@t;fKrrP1J;8Y^v2cOY6f9TKGrtM^sS?4iCuY( zU?@wW;OXeSZIXOX2_YTJf`*&@0QKrQDA`uVV*wfCI^m+nA&iHGDkx;C2raBSEGd2( zg3PI9WeBpITRR$MkzcDCns>6>=ryiY4H-T3CLw=fT7p8T$aQ9;xkQ$AZ* z^s5n#q%*4i0As}M_jzC}VMzJsLmJFi==%x*qTpdNT532yOlU9xT6&Eod#q7tfFv%r z20Kx%B}&Y1IKhLsE}EJKZy5O@2tNJ0U;SZFRVH7WG`b%br56l-)a0PVn+KF8pO-wv zJIpQIdYYe=P>*fL5ZNne3G!W>fpRI4brW#1y$b^+Y6g-VX?rz;6sHrS#AgsVmvxn3U}FJPz3fm zo-sg;C<^nPJa>3@IeCXr@h-?g#rN+&f>1$IV^icaTaHt3uTnE6C^hC%Yhr2kUsHGl zofFK(>XOm!KYsQb@k&L9iv2)BNTKVbG4m0!<`)R`-^J0ccJ}=)!pQWny(wZDjbQs! zEyTY1nL}D?CrNJ;fpOBl<@_eYk`m<1l*y{bzyP;H{5U6~$TP*MzewB$_YR`@z>g^_ zn3Kf&D$gzeg@-C!4`bbUV~Hjy;k`1^3j4v57ZV5MxF`z$uNIX&fi_WZ_9X{>|3z z+TNSK3AL>jRpUHxx_GB~O{I0sT%1oqLS+AA@*z2t?V(vpA&Kgius~A@VBRzJuUe$t z3*PHHdok4%ZBEQrLF1RWVpIy5asyg)dD{b_*>kxArbCp_b%!Ao@O6$5{jKlDy}*Mg zL7m>HTt6;w!^zp%K7f%N34qIZC(+u5h>dlVX#Lx;39x-bBH~QT0YQgihgeKllP9ZXtT-A+i*=W2d-Vpv5NHD@?{%jrqRuWLrYd z#GT_6eT|rR9R+q_1`9fv#3k^2#5P(&w&o;TyPE>weY4kl8H@Ygk7Hx%-=)l5M)kM3 zUK+I$` zz;bD7dUJP)yrBbLS@@-CLMN=k7Bt02aanw^tvM)W=ax$JQbtaC@nBr^Q)hQ0XiqC6 zE^#_vTqLVQu8#7h7+|?58bZCx4}a&gV)wn)AU9tFn*X;t$++6ncE_q6{How7G*e8$ z=nNPUBlZEVb`mVRH`ptZZf6!tH`{foyufqwGG-}x$90JE5HE~F>9kfN{`$Eb^*LV* z8w}`dd}0i?q!B*F@bYs@H|mv7Z}B#&KBNoo7eQ*kyn{|}5HIJF5asex1uexMN$G$^ zaEGVuAy$*-iNvd4)E3s~Z;SK!y4#Bdw~C<}*8==I7vh{Ho~5FL+`wGUvnqTR-j(@V zWHcKD8W`&sj7mOd1AiN%EdI9icq#r#tHodi`JyUaGNa+tUXo9nXXz6-&9q{MSYcq) zwYPYnVM*>`ai}nC&GG{u{?b*2&6k=h;N76KAY_YBaK6)&FsRDVnK$9%h%5L+=^;8P zrVMt4lgo^iNY4Vy1>or#JyJEi^r86&y0&rKZi2G4{e}5M2BW?Z2ZaY+!49wv7rEwP z3Cy$rw-wI)$62T|hhtLdVG_S4#^cvD#4V0%93YxGhyUkRk;g&NN<&7L*@i((@Qn#ZJe_`=sw4Dg_DSHK7Q>9-S3!g3 zPzMkE+U)H3I6Fj*(mmeUTk~HASh^XjRl&)yfDUP-I4q5hIR>IfH6ApWmj2ejGXc{F zU6-Dh#A<6GGA^Y%{Tal$!UV~w%0ctFonos5Q}j>A#|2VItW#!E7s}xk$VU1&KsS zu_^1Smum&yV!y03yEkSJhqu|?!tDwsL#~)0|Vbxb-G}tu4 zxsv?x16US@!({?|zW@F2`;T3z@;PwL8@jM}gA|JPe<)s%GI>?>3$B)pvrv1azhfJi zFTIX$9u#W=yLL%Gpes&Bz$l%FEvh4UiH{(itL#?F?WVZ(njFi9O-|aBGl4yB%iGb1 z_)~L8|NW<5#O|7^Pi|_%q2r_Yp+mu@Vo7j1Jr62mg>)cJ8zpfjW^Z&N*2p{$0{n7p z>izBgKVbO(?5kh;8xP|e(@>>1#2f#$L8lmt8(Vq){BbOugV|c!GLNcs4!P*8CY`E} zGG~TU<&6*ls_7icop7$rP6KlIkkh|t68b19p@9r|TwVx4l1KhDaRZ{2x#aCpN!(EE zMnb~-TOD5F|6Lc`flZK8L?{HP?$a;6`p5hKKtqKjp82cuHcg67Z^yO)dVhSQ#&z{R zD*hrISI~0#@$-1nf%fAabcAR%8f0&KFHPT8RdSf_Pa7IxLXdmdGMRVajtr+TFQ?S8 zeERAAU*G>@9G*tojHqitH^;LUm490B<>8Nr>@oK|mcKXX$3KZrd+%Gct>?ba^W~UI z7$$=j?`h|*|AXy#{-;0s)gQ(GdHtQ0y;o^@YH0H~0Ww6R66UcoEqY}ggCn*@Nj#o6 ziKJns{@!GS0nu3EngcOGihT_t#vb#Ylop&^5^ORCuD|u8ZYU(8TE7{~hL-wR{5T6v z76sGqZg*D)@n;pzaP)EKxl;rMYBr#MI%4Vy!>WT-@H^MK@dIOw&&zfxRA@>e1q$Qn zsyFxQa{S(0HoYXO<5|M^oMTew!2ozSv;*{~Z}7pv7RQOn$UY}8DRt_vEIwW+881LK z&9jNkGC~7nOUdIx)`Nh>EmeV5A4;U&opcW!?QT26hx&I8*>f*}N<&Z-c1z%5T1lN&p zu*r2pA2n|V(5PBX1=nqHWzeI{A4OWmP5aj8-!pgjl^3CixUdh6-FZo@x23Y*X7E*z zTfdHq1SKTn$xF-2Uf6*Cawh5uNx2bKQawzlW{)8 zILZ5^2H<54!rYKXP0=88R)ySlRu@AC~zwq`ouD+rVca_L~nLlshJEQsM*}iRx#gKN`PyO<*&S%<=P7J~O8t zf`T`Us56v9P!%j_tlJ{LyEB_2L<8cT?p8fhXm|vTFaT`pb$VO=bLRt6ny|v{u)!mUTX(wxOK#RSy_)aqmy%|+y+K?95gJfob${vu1 zrR>4fIZ)#dx1;aTLd9~i+wXe8xFW4-jT55-x)rC;Sb9{n#Rj44xCoSmrGmvavDCwT z+1iayQNmDKa&+Wl$DRpwm(cF=vw{(#tX1TROGF6FKIaT8lA3zf&L+d!L>MrkOo!nG zTGDz1=NUW0M~`1#vAZPI2&7=9nLLUlG$?3MUrHm|fR>?tk@c;Z2J7p=bxX^SVh;y* zh)KVALj~kPq;uxInyK(qfPUio#PKWaD%5>w<*j0a3#r=rb2{x!RJFwAB;`k#O0guS zmstP-s+UJw%nIy8_`NKKQQz|Mv!sRHlISc^na#EAHPPEE4Yr6N5@?IV25G>V#!m<` z+FW`at4gZyUXwlg^|?(d(eT#ReB>nW@DL>0tfNQjTu$aX6Vf^izeUh+_EJX34ES`C z8KaSyXSIv_z<7dgk1H&E1uB|qHwyQV;g0iCq>7AtNe*8iDuLs9DO4JaBmyki4<}c5 zE$v$wYrDIaZrvj+?qHZtKfnKFFcy91w)(Nk(Pha3H+)h0d3=sG8NoyU7kTg29O-tP z>5cz_?eIPKNEWwDk=+3LB#D#@K;h7h6RWDbxhc|)0#E>|aVQW#bvIg}iLf-+ zOi_GjC0)Qb!&~Ph>57HwY-~ra&AdX*y2A6;D_fJ29t9TJurR)xA^^QKW9^5Bd5En+ zR9qG=gY;ccmylBX=jZT8ZcOPIIXrwbEr@&~SWtFaA<5Sz1=~|vWzO7>q^!?N&3y=> zmVkMGIKH(X*;0xpMIeJLP(uJOvZEFZwc)BPW8V+-;T*^8fm4E2W7EXGD7&r39ap#E z*n=eq)fInhgB->*qeawR!uVC2s?rnl+5pQL>!G#~mnA@>7&cvN;@;Aq3Q=OtSzhhO zDFJ5ZH5nwq^z+_Vh(4o#{LwxFCGFSOonALM?!WtXyh?>9X9d_V)^g*_TUC_@Tk((A9*qaYg zAAkB~oXHwyldlJr|4>W3k1RRyVH|ML8GDk{*c|9os zOniewAap)^7NCleJUObBf(lZ+zz|B>0J1srPd<@f*D_y1X3A;9j}gdB@Z`+3LJAiD0{ zznsRR<>{k5E^zK{ueaOt`v+S#ijE6kYy$f9d4q98Gx|o;Cy!|l#F0Y=sKpuHp*aG@ z&V=`>iAex3So3Xaik&*`QmKF0hOw}#F?r+ADVsmEDxjf!i$B28ei^e#(9_eDNMB8+ zgXw-@1uWLl6_MkrXYi#(&YF~^b`AG3gEW26L9xNt%8ORZo7y{oA6m25LFh|k>x*Bt zg~4<&Nr#56nf0WoqO2+s?8wp0s<*WR7zy^5TN(L(MV+g8pK2&k z&6@zi;lI8bUZD9457EcR!89x-597u%$&^K>G-9J$MnQU^5|s0;fQ>cbk&0H^nT*mw ze3b#*?NWM2v!z;H@#1Rg<#+qpx*Br{n7JqBkU;wRKmLh7`eIldH=F=@hvk0J1UI$U z;=+=NLR(vSIq|c1AK#GpGK*b;Llz4KU!pgaNRA(ACx#V2XMSAYfJdo3Nn3vW!MDLo ze^6Cek5U9zKhkw(ZqZw#Vb(uQsocP24i2N!0u7slTX23KcIxzlBD6TJaruMR{;3Eu zz)5P2qC9eZg(yU4wNM6m&smy(xvgB^vcgwGicIpd37}1-chHU@W)pQCtlq*hTC!3X zD~trMJ)ozg{QN$Q&>8N?zv?qtgRqAI@_-?{DfEcu4DizGt@b%Hi&hGf36jJjl@Y>X zU{A4c0?@PGyP*d^|JmK&fBv(e5v3qTcPrm=UJUxt`=3qYn2CJ`lMuU;ALk$Uzzn76 zm4}kn0u6U+J0-?1-^4ml>^CQcGui10UQd>t{bn|TWKvCy=`e8C)u|}k^Zk%G2g#JT zMwEd!{%4r6-jeF!TjmDYJ~vTHG|wnsps)4~edMaiH4p$+2PL zz-cmFE5|x#G*>>EPtX8yDH5U;^B6jOABBmX?Y!LDGV`rzATG9KybQIzxgMwUgKwLh z=!3U~x)cM~I1b@U4ej)ESEOdRyYYZ`2of%QyoMbN%dixhuH27-Hm?cP86Q_9G0_Y# zSL|h_32?_O9UT;0>~DQ z_rkvSz3>@n6<9 zw_+Vf$i3oqtdbNAF>p93h^tkBkc12Mu882!KSy#hPltfysC+^qaB{J(hCcDo$w``C zShVzsDt7Wq4Vb(s#aLjqf!7L&5vPM9Ibn2a$V7xX*#3ip%oApf2G6G1^=X^NUdoo} z2tQKXpfJfR$3aHdgvPzeF1WCU?Q_^vZA8H99H-AjfA3;pp0^y6(=3{`g{lgItAA4Z zgtUz@M^?Df<&-#jFet7U>5sTulYV>!eN^Jg8Z0lju3#fu{cKK+_FrTfMVaW)i#5(ldDpi~A%>2G|Xfhy^Q z;wKB~!YtkR#Vm`$Z@gp;Qf#J!A3~k#WL)g%BSqlP%6B@7w@O66QP~xBvZ*u~}td=UTEW z?IA-suH;R|MHZhGoaoN@^mJ@dGI~2aV<4yK;NxWlDs-%fPe!22sw@1t&CBMqlPAhI z*NZV@6I|U84jA`k=cw|1nwrJUQl#bJik`c%MIGhH9UC=abav|S`&lQ`uKj?1zBrfkOR?^pc^tc%Ghp493EH@O{($nIJL4A_>xf7uZ zFyP89roSvP0M_Ue5Ll9|uz#_M1cg*_6NtVcj`I{G+{=f1%HcTv0QhAPPa=^$8#NqH zI-!z)SI3Y2S#m8te)sVL`9{RvywDqsj%?qo&bnE^Zo%(6Srm0SMwuqXo;^!Qz!tI} zH_cbGb+afN*wvjXl_LvJg0hoIQQwHrIZy)95`~`ZxI7}-kQW|}qGYQ#Lg$M#`G_E- z#zzxfX9D2hrX17(j(Y3QV8&RZL=2tdpdID!U&J*dP1QHU(lG~X>?|)Km%A`8Cf+Q^ zm&@nakepucDi}Ziw6*!71P2G0QN&~{PlW1J%T@QATB7MyShxI8bMKNDZTzafs^BD;f6kQ*!;{rOcpw?qZnfY zoCbbsQi?Q#|8h<7b5Ob8nhcVSFJ}v)bNIeVruhCp{f&QGTHGHBe}VOy_TKpLu(+zs z^Q0va-eJ50%VKn`b{s{h&@7M~oe!*wQEzh!8-XgQ3ZA+iF_48dQRy6uB1o~|SE#X^ zMuC|qit`O&w2LCb5Ab^7v3SH@XRwj~Jy-Fx4}z&Ohg4mNjQ3?{!MGuiu&K}yCB57V znl+6m-m`|~=tUB7r;JU}oDdmHj|!w_6MHXI0kCnKDKgxP!OW2Xvxj1483tx6s%z^- zt`$;w#+zO*1M|+4IS3!m!5+!cpK456EMC>fegz;z*(m0$Esj2x3#Oq4rPQ&wd{xSi zP5W8mCw^hdfIPatN%8#8|MJsceEyg5{0S;2gT~6tzVmmfLC_?vD`z&Bo<26S5w!gE zKmLh%NAL>)xnh?@;NHYvS8+LKZbKZ<;zAi&vm)rSiMgf{==k7HXAn22U;covsHQ_uiGn9kf_j|NGXW4A{h?}2Y{dQ zb2S`-xPYHP4)i7!3_gKBDr*JKi!ISBsm40##S2540)1gh!4(FT!Q1AQ`q5YM5O6k| zJMjZe#j1gM01weoy&4+VG&n|+cuhdv6%CK&w@B%*h~J(DJ2WcAOE4f0vOeYUQkl@s z+`r^}lCfpF32WP*-8`Ps)zrgb>S1TALK7 zjh5?0RBD)1?NZYlmL|NIA*eCVAw7#ddj60QEKb%n=qgN_xK=HoT{3Ph2`JOl-UlZ2 zEwTI{D&1hfX51w4ZY$UU=9*?n&y~FP z=NhK<{i!nSrZl)veCoV(!{x|Y=pf^y6e$+>fkxbAA_8(nGVq#^ zOC71Su(;%lD4$2&qiUi63eHpyPO&#Pq%;Z$a0w|Q#%g6fH|jp~Vb{0yI#CW2 zoftZ2@1H?RPT<$fFQ~9(o?xlK-i;pAVZ(q*2>U1lF*Rm~{VkQt~j)`Bc zZ1{}Hu~aW^1CShvUvF!$ylTR<*^F2Wu{g51^f@?B<#GACfk7rb_jXwg7x|3s9Z80Epczb zw2vV&_p07{JpGb_8}FTzBQn~s5hche%FMf^$^@Y3r9}cPI&rsS0Ihy2iB*-*`xgU1 z=7mR^+Qsb&+h)H4&}BeODpZTYp#z3gn}TiTxI)OWmAj8W`K>rKa)q?LJWEN0*!V^R zxIT)S$X^jG)F#)$2aFhaQUx@@hRR6WGRIZMD2l(Sm4rvsKQF`4(Nj1MW=5gK3-2u~ zKJlXWZm+IR(jsa=^CZt<0-Tv3mJBF)Qpc97F@gHuS8cv|I7vk*e<2r5>!gB}CR{fx z;|;h~$X!&r50KoNX03QHOgXGFKn>qVS5dWi_i=9aBR}|_+|S?Q>2Z3W|LX2v$A8lt z;J_@e0xaBig#3qOL}IzR^21_cYIe8OyxLs)RFKf8{!s95Q^G-ZY||D~@V55IYEEJ? zYW`_&vE`L%c}$N$Uz<@Iaa?s)t2>Jq0Ev-6E?>ZDQr4$lmT%&lhx;_#h(GviT@ zh|Q80|8UOm0RRH8cq6#LV+!;v?cY|gYipHMIC=+3pDt932IBZ}3bFGK9}GHL>6VxU zd0KRed4MpltV8dAZ%Pl9g6P@28J%w_Cgi2^3~{9t6dUN^SeCPDQCEe)eVc?wJJTB=sg6B&xOvk z1l($StA$4oP4>&{TT&6!Khz;ghhw^tDuQG(QUEkfQm}Jkcym=RBwOmNIvU%Ms__&ff3r;|N##o8U6iDavQ66Wd^gC?hqB-RG3a|Rk9k{}4O_%I_v%hZ zw{6-{vWij!#UoIf{-z|dfE-@sY5w2`Kk+fVQfYLoNYW*iL57gLgelac-#+<$@9Ft4 zZ~JpXUoJ&|b7@gSY7aq)48!z|XR)E#jWNY&FCxHQ(HwpB4|m0U#kdOcGP7uH<_yLb5bg?qnU5IjPFty<^&9PJe4Rwb-Nc9OSOD&im_`RB=(6=seO{JFJ(p-B4PHyP^D5$vh-|L zDiWxQpM2wAoTrL%%~l*B?4H5XpM3EhFJuz~N%;f+2CPpL8}Y0yp~Q;`DRaNr`Y*U^ zVs4)xZR*LJSr8FD2ir{@SkYkn)O!LY3HqxWGDE^W)= zQKkiO;Nf*S5K5=a407I}lpP5r*LX%~{^wMJAs_`{=AHep00BuHl-B{1(fAQ)646ep}ot7te z_2Oditb%l#l<3U?s)v&Zy02{)gzqW^@)GV|r)o58vm7xWy4V>Ue`Ynm_}wqQ_|6x> zUUwhgGJ`!Wr*p$@Vpc<^d;qGTr)r2kF^^6z4ytD8MD$dJ;xmN@WdIfZxr zX=QR+cF|Y8QFc_j_Z$7;ozX#7jgYbd%@Kd&hZEZ@-}}x%fvv^6W#i7YL zX$7Y)IjpI`iKYN1lu>;rirIa!t!#9p&N>GrKzO(ZYZzVX89# z8!0M?YLoy9C3gTevfsi@N#@9aJ{cMEr-nSzqAUx}Pvc3I%C*i=Rs0B@vhqv51SrpY zRc=LqGw*ez&-h2RkxI5!3upS~F)+5A;UUG1KG8P}hcMlT>Uo;t_sN4tezWQh#xPNE z{pUaZ^lwZ>3LgY}_LE>=&M8jtYE*iXpsAV6B&ZE}`RQ+J>?;2|gC zyS-h%4BQp{#Z82Dfm(v`)%7E-KJaEGaL{^|uoA0F8FnMSESJeTe0JRsdKvMg_y;XI zJ5Il-H+k_qKyjTaeUpqP?$cG2josUj28U#^ZRm#O-=$O;2a&k zzpP`YZ%II!xV;foNK!!F9|=cRRG?Tw$2Rz%zjny!(~RG-6{YZkZEq2jKC2!_RU>S@ z3G>j3;lSA4p((!I!fR?ORz9*phc0iHfZcft4fGIIcV5^U5rh^-Ff{#9IR7w^jhT)$vj=K0u4K|62_kdpXIGN6P&l)ot zlITM!6g_=aW*Nc1#IK>i#$hM?xMSwRryX4scJ9*>?HkFR-sEu3I6nXpm~U-?iXtjk z5$h!Yx4DyL!kina%-XMW#cdcdh$crvOGxzC_An z%5TZNn(D2G0HTT;XI{srM>MdkU_YpaAOfl5l`zd1R36Q7N?{i<$Ahx4Klp<`h{Fa} zg*5czdXp)2qiKyZI9&I{?G59btxJg*?# zzV512uhR^qEdRTyq?S75<#(iB3<7En68l*2)kfuY0*yxHwEt2 zTVc>9h~esdsI>7$oq^)@a^=oS*|G)?Gj9gd)z;(iO}0*)>2Zo!LzJLf)D1Xj^9s2g ze^Q!+AYuIJ?&BZ-v6s5S5bRoFw6K&lbQhD-CxF|k;yy1IEI>(JQ-z6DFc;GzO!~8G z%C2tq2R(m(jiOicdN)gWf&p8jzxaWZtdraiCc@J;Q5>6WJ(z0FN$oP!TcYFwpwU?t zSO~f~=g?3?DVngt+nWw2$OA>rq`scU4OUvLG;l6CmAs}+()@H~OyG@Fxd^ir@|Mw7gY9Pa(( ziLfhHpU0H}zgp6a!$TUG!5MSFZRMhyP3)z$gnYF{;l&A_$U)^i!k^%E(i&t044?v{ zkMWqb-QG6+Vs>}Tt@xy^QahMp>0tsmy8H`L^vTTT=22x+;<)it>@KxBs373IIm?g& z+u}8Eqw`B4!piW-mzJh!GqGtsF_`n^aNFCJ4q|@#)#pDC&Z2jX%3$J0JpL0#xci8d zZQn0RuKSQt`2;1Vgfc@;5vv82U`E}?PQ+?#S=ac?HPPmZ*u;bYxEQMwq(7*n4ujz5 zDhh$dQbG*J3TbfHK)dm5$V~mXkBZoG8=q=&-S1e;6R+GUOiED`?oftTK7IY^mwtA? zGbpC+GM>&f;ccJ)m%FcnArOF8f(s ze6@d3L{vw(yqidcq7N%9CP^h@3Qj3b0)7zo)vWcENk&p$<&Q-mZcrQG1!7`mPK&lR z&Qa>@HO{@L%r!OSjQc>P^Wp>V0x&avn}4RgC6ya-^vX$|ouPaD?e$?gD*GxNEMKL+ zQ(Xra_yz+#)=VPtFYf*~vj7ymxNa?|3%p412tl+`9^HLx%4$y7(R5#7;b5Er+!z0i z;K(QjVWL*;B*Lc%oC4=l*Vj&nyujS66`a{L~SBA;Bpz|<_{3EeZw*yBg!HgT41?cl7`0)3oEcc2KUMCUG7o zt*)yaB7vR#$$PIIKdmqm9c*v%YNDraZhDgxN!+O2UK~F~R+fb1F4{y1dsTzUw5m>p zs;0%L1ylu2tHK${XU$sJ#y+_k*J%0-Y=R3|%O0gDsAG~+2{Bx%RAe`{G1x5wwB_=^UB#{Fyvl3}#>3Ptf}qTmRn;59Dx?payAc7E)qd zsW2mLkJWKu8(D_Vs^Q@BRnu9qt{NLY9v-TBp2@9ops18s3Q?%PyZg(a1OtVO5LYjP zxws{FsN51wrjg$ql)(;sz3WSa7bcKUX`4*Gwbky}v7WV{S+!fCx1_>s>8tw4EVrz0Q_b)!EpJ-Qr-;XxVFgOH4(!yI*$*q`_JD36Ze)E+cj} zH#hxVmGY^bxf%Dh^GNF`8vsTtF*@*ASF(;*-GY)TaC?w(5MX}aad?2n|y*$;qURa^=?x5rD9(3HiqSK z5~c#F5Ppt*UGczp8&(aDO`$xv43E`7Fh5G*FkV8cMEbW(kY4dVjc0LCcSi}tYNc+1 z5h>0uonr9ZNo#t6eZrSjHf|uFiJbR?e%ky}2)G^s06EAd2v?pK+VoprLYx{GnH6)W7j z*2P&+Qi4Q^XF<$r|1oj{H?l}ZrEBPnI}gR+FgZS`Xo~Miu3+HAMq9LZZVdhI`(mF6 zrg4_!6w)G-Ts_cZ88oB8gtNZgi~Nvku`+`P_?H-Os=IA22Rb6yl%^6dy-o%PB_@qhYc{(N|C*bH_PQ!+cbA}DNho#S-l+wco;S+@_qb9ekT#k6G{T`A!FN!7*wUV zO^S^!BQ6s~ceQAPz+|y)VkyH?c+;U^A8ROug(^$${!Pds>Jsy^s4WX5^V@o(SMq0$nA7NX@HimkA2_bdNmA zP(K%=kiV!3BD*e}P31;-H#ET$9x5FTWJBv^VWGC(I!FfXA=KA*fA87ImWP%@azc>DVm5~)lJF03eKmX2S*l0#%;Qmj~^_W zTZq${A!gY8xzQ4PCupYFHA&M$0qITr%Zt$zquMi^R1RVvy>q_rlRiU%;xjh*ke`53 zR>7Zzx!U{05xwRpy*#S&8UNNKlz$24mnKy21-1_gX8W_CII7&lLGszSDAxY&*I~SI02QdUDzJseX2sI z5hv^yRG>^RZ6v(15C!7dIv%GLoNy1k)ic^}%Ix&>1$WM22Bz1{A1qgv){FF6i*tuB zh`bzR8U}T6RJv~>A4y7+$I5F6Os@GnCLIp-A$`ChICXCnUju|0XhIQsD?< z=9a3*Fw!faW_5ElXon%iU9OJ#yV1Tt>hV6le^Y11@bLpp&^SvS++_aO{O>P5{jK@$ zzWXyaTD%Iq%ay-MzQHy48-Kpf#idIJ)=^NNX|aDENAikZl6lW5cWD1iwb?Fk)q^fX znFvV~Z}%lFXxV7&>h2@`F~0DpX(4i)&nK8XJ|GwsFQ;JWyqx08y;VXXX5&8axfAx~ zr%`iYg+dVFs?>o@_7(m^pj7DEgdz}F!_UMKzohqk5%DdjJ-RLax)NQrfQw{x;hO$f zMKiGl)v6Sc57rzYcN&VA_|vO&ZoqVcAVd&)Ly8$!EqB3|ixt6zpuU7py zDiKS(90nfmQzP!{jJPFElR?B=x1SJxNvH2a(p=_LO)7$kT>zC}Kt~zKa8T-jh`m&g zf=&#iRAT{qEnW}FykhborPX*B_aV&}VvbdBfdj;`Fo`zX04~y7X7g8Ie@WtHpAv3b zVafq@P<}DZaZ265&!!~X#S&Jv;I1^lK@F*4hEANy*A}V`SOciNySY6>xe|2Z{j*0H-kr}^B$^P}- zUk6LH(LxxbrLzAy*@O6sU^cBph@*+?h~;|f)i69tE!6Xl2?czMl@Xr?B3x8PfBIp1 znlTiHN$ZAY*7EV=(it59Qo#U0T~ttGk+hqKB%QHPe2{{!W`>k3?%v8uEMv^|ivK38 zHT4ED0kA$GT~XUq?a@ma{6Kt#R8CEPPmtv)^zJ_Xcv^((AyKgmonp6V=oCT?vttK6 zUM`47;f};Bj36k2Rys`!eXm>3ojlu~@0wmd%3sIY*EeXlc zAg)L?J}dNTvJaA`SiRl%G-xPsIVB3RJsZ2N4Zzg@4fA?jw0@_-R%;J8+{@g^wC!N%zw5eU#olUFLz61wXN-WJ7jPPMGQtk^^R!U9|=DAumrU%Cq~$b) z2%QC}+7T|RmSyZ?w@y8r5P^UeN2&y8km10mbQGJRf*ItXyl7|lwdLeGCTg4$S*^_# z-vVmHj!)!cc(vX4Cu35`{<=n`hU>Fc6m4!0^^x%BjzUqN@|Ng4LNHGGHZ@4dZtN=d1cH19_HbB{o4z+_+ zcWW%ejL=H1rHw$@d1jG*`sH_hUhCThWw?u9uYgYMf0h%kBe^#(T5HsbQ+#Qmgdd2* zUP@6YYcjfWg1u2&-}K*&lLC@nlm!$Sfd4r+8JHF}1SJYCH4BxMrHU1|`eQowm`+k1 z#gHJ_>(9zat)}|wtJTy&G{{bhEf2C$1n~pMhLsng06VBTmgiwny4xbj)KbT<_AuTh z=SoPHClu{WG4?cJy<4TBm?mnzt$r5x*oGkq;rjW1z57qz7k(~L82|igdUjfDz^9Lb z&ES#0tg#q8b6+xop0wmEEQihJZq_RLMX8z4Na@=5>umZSnT8;b-77f_%Zk5zH_$6Q zMEb_7MG)bY1W|=FUJRi*SohP^g$!V?(YMr@ob046NRJmc$a_wOwKoRIRif^t)6A}d z{b`3vJjV-ym^kbHxi#r6Mcte7ED<`Wlz7R?EHzp)3)?(|2(=p;0r-Tz^Z6#~X9=F0 zJ|i_Rg^1Q%cORcUdlssYQp}(e3Aw9+R-}pwYqx6oI$8^P1V71Mvxvd$d@EpqU)~%O zl6=Q`7&^dmQZMM%UP1W4T8Nj%igY{P2SKQHYE8h371V!6oNkl~@vtO%uW=aCnY^W{ zxn}gJRVuC}!M$cT$@ZRSp|laZC9Wc^hbcF5dXn|ZKivX?VU3e0(z&o33RDRdmH^e zP0`#}W5rn0(+5lOOfp?9iT!PJ~QLaPa^8JV6uW{ghom8 zA+6$lVaQK4pi5ju1x!+>-+kN}?#EvQu72VlUe(+T0GA2zv|^L)VL+GrLEkyY39;{{ ztq9z#2?FA{wtnAM1E{i0h&wsHy~r$rGRyY`)#X#K?_1595>BB?5!tC~7v*_pWbuMulg4fMWbt zsr_l*qqmpw2r|;KGeio2lVVf$&@rylXok7&g?GkUHP@p!*wi2pbuB&kbo;QB#FarF z%8C~rga8=OYSYX;AbS9D{fkzXUW2O~Ye{e+)QEgj2$G4TyN|#7Z8Hr1ZSkn7C`7`< zn;$-YkPQa#4uys(Zdq66z5}x47%*^xQukf#t!1DTwqU!sCOJ+z{eC_<6OqNy`g zom_DfxCNViuf5-Ou@cWdFQ&kaS{OR&HX7xKcia|?fs`*)b7x!MgUd3(Q7xs-!a#x4 zhmJGy>o>Pn`f*}6@{MEVz375olMK@fnD)Y`?jGjBaTARyasAX_@d_9Qfz2KmA|+(B4iNFSDc0@0bAI z{?2Zv6JOjK)l`d+vZ*_OB62cb+$s+Nb90lS)<;hsnJO(ZBfm)vjY`IV1(#cRuKE?T z5#8eXaIEDTly2NzMp&5!6q{5Nl~p2yi-dhCdyOLAv+~{^WomvAr~#}iQ|P1yktj9v zUltcqb?o1wOKp$|N`SZDl#r=%pO~Q;NeU?ripoVNV@foWzejiv9OyeyAR@x#G2Evdy&4i z`mSU_9jTW1^zdYu4dro>w}J+%KQimlA*r;nTE+}(y@`BK{jd6i?sjm&PWP)gnPY}P z73oN(2mBF&EcHOk_hb6RaysPPO$5xIqrwjpF5W>g0?^Sl*4o)#u>`@4Ls^KtvCJ`( zD&{XPe?LLq9VcL9@##Wr0>oRa3GZdul2ry=Q}y)<4t58fL zmZJ65_?i{R#+RO@zeE3c)~>4)M{+;t*_U8SB0{LR`PsZT!=$=^H%dpwF3`S6t7>D@ z5=|CnRkBzx>e?1>r2#6Hgr%cKN0^0{FpS5N6dTc_BKL&Mr4NIz%=HQBCMU$k$JLPl z34(+ZZ^RqdkkW;+8iP0W8LCtZVv;m4OTz2-y`7=YHi zaVA(JK&6T_x#Ym;sy@d#xRvw-D<)&Von2hV)&;hCfj$?7gKc0O~g$Pr7hi z9JLPM2`{WQ%dKgl)gg`8GVpVsQ>fjf2qLKmS-cx(am^IH#zz_ZlrM3d$cPx<_j=RL zAi|qTlf=DnxQmOtk1NG@j0w}QIz!)=9+g5>s)(wiBI|>IZGpI8y{KF&eH+Tw$R4FX zdwEjA_5|fXs2r74=lxLW5|RGH-~Oq|`f8lr^$(_bo#`P~$1Sw3ekBy7(4Waa8&Op`m?AY%R2$@O;qBb_)!RQ z;I3oBd|5lIj4oaWd>`_UE?7Z-RR5${_yA!+-@{BPMUexSyd zSbgjf5w*VQA38TVs{_R_Yqwk{tU*9Z=Sv4pd~Z@JHAMYh_9qJ{Qn2+h@X0WFPa*fT z$H5)vfygt_Cad+n*VU+Q6PhkSH)Y0B2Q)W^Lz(*}k6L4>vIOhZAdYu?b6{zGF1MgW z?$H6Ng;1%zg)QSo0DsXup`fSMmHLwy_of(nI9Ea}S05k)TPdP}0d>_uUIl`P85~C? zrguPPCboPun>AeYPK5`)fxA2X-CnoYUF&Cn*)kqShUtQQH?x=X+^-0FC7T@qG`2{^ z{}|qi(fun8Yiy>cY1hNG-ulWv@9DNATT@kWH7v0tI*--vaF~IN+^I@|JR-Lz_#R+p z#tJ*QBPEmZ@(Q2d!>0C5CJ5yB@x?x7fEi_7nO{?IZCpaJq(jgO^O9WIUw6eKu)JD& z47Cl`adkUt`3q^yDpIqj^hLNy$_q$CKQPImAk{m{iiFe%jyxLf$Lf{Jh~*NLd0$K> zAQ~J(sSFob5v|RGf_++x+x$MM&&k9-K=WhdLsp}e%gb#}6H>zP?7FnZr1~tAo{t|r zvk>8FxZ7#%a)BA2le{I%w*1iBT{ej@OBE$z8U7s#MX?j0G;ws>q`1Z|iChvLhy+5YrCW*O(PN>8wCTK%n` z`r-@Qi>528Yp2^?_1zI-mQ4I%u>-JJ&eXLlSOOs|I7}iI=xFoxr(XnD+dFH)!!U*J>zG>SMYX9n%ZE zHaOy9(j&!>Hs+>bw$e5_QIEz& zEOpMBFo<=jm#MN{@e&>>gQbvKpFm}pyzt_XhJI$EVtabFix!=$E1wm)Nn`k3Uui5+ z3ZrBEq=L>TzJ(x~7Bhl)C*{|@r&OKCIV(F zE5X!|hl8B)Gu3FZ8HoOjL_*0y5NYN$fdYnzp@P?#+2!GW@d@XBeMKnf(Pin*brG~ztMhKaBTG2={S5${GVE>P zUWkLN%|IS;e~ zDI4qzzqmI-*dI>g5^)~th69tECCRZOg~&;W_k)m}!nC+RJwS6M4!jd{q)P<^RmIFjM+&s^vFj(v3z;>(1m0e z4+SetaW6-$`nmY@a7+X3pL9hQZ69@N#3mL$~RT|iM@oQrW zSbU;X!k1{+Hi@??S5_5yuK6jbJDruCrICY1ySk-f-aLa}#n|UKePW$lrc$vCdB{i5 zRI?I-hxdSlwgXYO!GJ^7L-VxrALeLg~g}E{5+b4 z8jI|uBj{;MPNz-!V4=WbRmC|2!<)!L*UB8Rz&);~s^q|cDQpljNn{tdK=&Del z>86Z&Z|GG;bK>vsbGwTNy^Z55Yb03|AQTl~K8aC!MX-07oZTmb!pIF1OXU+zWGV{8 zn1@&gx%6S7^73s7Dr+>3qkfMcKl`szQhxuN|9loQnHXtg;88Fsf*~nG!BELYWG4S>5C&dy)}m7G<(8c+(>BfCZ6pPD;qR$+Q_r%v2$l z^tuYNV}+%^#9U2EKFksO(@ADzA1pmE0ai4GsnklUt=m%*?6pL_@A#7k3#Qa$A3H+Y z%^$~%Q92?HP|4x6AXKeVqHkY+`tw*gmnr@56;w#8XNKB70R?jPZ9`Tu>Xk8wH|=1f zmeW|pIRvj9R|*Y73;Uv)nmj5sY8^5_-yB-rZG#KF1{!Hx&p0E z+XF`ogN8otxT{5+s9LmSD>3sCVK7!O8f$G0w=5Z{36ss>)5JSJ1YUemHvN)G-tu!z zx&G#QQmCVVw7*YN*H&Xuco7LT{(u`JIT>{2ir2h!pK5XBfeB3@5dX_7m&j&FX9-@u zU}y6uPYIVdMmmVtX2~jpS_E7=nfM*sjCQ9mvfKh32GzG!&<{|AtBf}Nadsy_s-6>D zG)dGx*HjoNdZ+My>~~xJY~mQ?Y7>@CZAp8s-?Qz9-NdUh^3dC4@V(Z}F4bWM+9<#N6o2W3FY<5c?J?O!6uSd~gCN+6RC+iw6m3 z`fXi=I(~&^Nnag0vHu;TAVjpgBHpsKX`L2)UED*j{OaZ|$zU@D@RD zqOpzmZuQjrW+D>pECtnKlOA)j6O4U31HbyAVD$A zo`vJn;Zo=!r~7P1Hne&MRca#{dPUDl@#oL$rga{iknIR&Oaz_HZBKF#s-5|V!}iK z1Bjoq?@1buA3yRtLBo*M2`a3Pu1^`l7&}$ll;+0zX)|+$&BuDU^pny_rgMEuiAEf8 zf3Ujyau834-N*%&?D;CJr0!P!z$(SUnFCb=8Lt$}c@Ev<4i%^b!YDhZXad z5Yo=N4}F;!Sxv%0g2bP$PE+?ieNaNT^ad;gs+8s{79=B>-t9Dw@dtkxy8{&Yvq^rRqV>blJPcJ?&X>%_lnhmH4TLZ7SRz1z(T@sH zt;He)c1jW^V9&?YIis8P zsKD`5zglLN-Pd{|d@^!@K+l&GKvS9I9EPOdP`t6&_QG`Vn3K5hC`8Qh_^QTDcP8VK zmSspwkyofhO#HlU2ZZe$W+PKuV6nX7L6Pvwm^{7)GwqM&D7yXm^svp5jT^Z_e67T_ zsMm5S1tNpvD%024MAEj$sAfgq;*=4|n~4c}^z4BTgKB+6$4HM@4}s`V^vKK(Dwc|32gpNeRW_oR-~DivwXMq~?W;?6g_^EWzW(_)a+f@fH~}hsOlkH0|!YkGt6PYn-0Xd;g?wv8ruhMAUBS?qxjJKfN<3yg95$^Edn+&DlzbKO6XxW(g zJl^eySg#4Odro18uN1f*ow`{F2=`O?@q!DTos@Re`T z@=D1=?cvhQ4VrAFf~E`xwCiv|U;HHgBr0+4PBMYkAPPet7mmdPX6JN=c-&A5pa8vbP*3jq+?XPZ(C zqfmM)8vR|TO`#c<@v44bJ4Kefughs ze=>~>qb+s7GMQ9p?FCU+i^k1#>SUUk4czU+V(Bo$`i44_lHXP@OjcWheXm9Y(W-B) zmcF@JKxUaDLT}A=v${My3Yrl6*$&QsqJ`gWHc)I1! z@iSt176&b)$I?f%dY#O5Oh$#&EFgB--Rg&YfdT^CfXt7Z0m5bRk@tuACAl&=PlhJZ zhONZd>sTzrS2`pXOWaE*dv5)MvJsb5UHJ|P8cPsbmRE3R{KV&^h4V+W987-YgPFCJ z092}+VG#HLX7vMBAa4%+IXKQ_)iz!@($ht5_f6~XPdO;`t#5r1grt`T3eW^$x*lE- zOz!pf2n(pR%pamvzBtEOF*McOOr(d;9;EXjFj5L*4d*Mj0^hY2YI$c9BK3tc7+O@^ zN$@n|BJ~*;gvH>6(ql|Hrdw~HR^Y^84@g{yEOCaYn`SP=TRVgW2&@H3NQ=^tG#IAY zHt|hd1UjP|sEZ*I55X5x*eHHff5fvIPLVT5ki;*!oH(dKLq1H)IO_)w9zHO!m9kcW zYYKPH!cwxocaM`d8Q^H#m};q#q5_!irM*yY!+To66{wbD=jC9-k7H0IX4$w{o2WgB za`cU#u{uR=q!<~~fW?N)Y^HXZmGM>e^M;4GwLxtnL&Fjl6l_PqQp*7)7{j=L^a2wE`fw4zYBtYdJ2Ud@VcKIRrJmOQI_3&;dZ^JfpQGLDNY z_SH*kU34Vf+MsbtEY-BvB8n!ceC9Z83WC=N+OYNHG5e58DksV;=i?k21z6$Pq7M)O zMIS{w;?%vN6&Sag9EF+l#TTaWK51n{j}I4$vr{h*S%*lVTGhW&S94CrcE!uBPA^uw zhgVkhsM=`pf;hFEVL`C;#<#=6Qg<~xATCL4v0g2{eU-W4&wq9I58mwPzLTbR!_Mbt zzg`f>)vv;DN(vko_pcK4Go}VET#!Zhc%4hxYu_d z|M0iW6hrbFa?8MCJ5|$H3>Jk6|XR+;Ud0FXYobx@y?0HGQB-`dvRgG``NEoR_T)5%!|$1c7nct zij1Xf**zkU?iG`Y0I}uAE-o$lj~FO(aPO5)sS03NmqIutPp7a3;1k)V!SJ(@-)Km` zfoO$)SnY57e6b>1Tij}hGa)>ar+@_Vrrnod1!?Hd7Oxa)Qo;F*M|XP57TmKt-C|vZ z$2X+q=!c(FqB6r__BE{Uc+t|srQn5S@aU%1`7P8a-VeQzFAH6lhi~H%)3Xo3Yt*x$ zHR2)E-`psqWQu5B1+sb$)d_lziIm(fv$MvPDzqMtoha{=VW1Tv82+s4r8SAf3{D|{ z#|jd*G$#>o$nd#NyIjuecdFJX&t;Sts@;5stvk$#Yq+pmQ$-{ z0wP`*pm-mUNqHn*#{lh6`2EGyflhy!!hJZiU-|P_>DmnWETpu@|H{`UA;x-?YH)PgmJiNb7!xVtaY^dW|TxG+7L zvN0-A@U`3AHT_0TG6iAQr5(=#eFO{5*p!hhW& z{9iwPJ!NiEAt|x;yv+Brl476#x;E+z$Q-2wDjx0^U7(%M83>yuc2IAw^^?vbKlHYt<3tG0n0TjM>R)kO0|xIJ|;e> zLgF+85&YseKcoR&x~X^^o^L`tnz9~$v^i0kU1qLL9x=n#(rFW%#BIUo5E}#_M0k6d z!GGixjm?8cz~}rdPGXkkFSa6+U1fTE8)RiA|7s?L3vvu2#=EdkE^abq++4WgGt--= z{B$t+P>k0TO1y>3$}k)*wIVevARdnUPA_9GeSJOt(tyW>*@1ZBcv1*zBEFbhPu}Ld zW>QDGI(Bk9X9NIhm%J{Ubvz!Ci~C!VaAG%Gn>!fUh zB7lsYVJ3{Yb>mXmKyvwHSSNm|6^BARvCYSi(uesVaH)AkSXUHE4Vfs&@1<K z80T0%Qm#JI?)&?4xX|&x`)wb?=3Am}aX`ai2>~cJrY~B00{X*{H7pG~cUYKsMIt5^ zN9y+ck{$q+%W1uGk@zyTC=1$ba54^Sf=IwB4eGRJ1vTn9g9KajtNA-Rj%En}5^bzu z=W9OZg(5np4`ggNu-ZmoTycVm%o(XQT9NTj7N3Ol!nQAoHMr_ZZ4ZK}u$b!51ky{O zUxK)=+_1i zGfKa`kN4n@Z33&0vrq`%D;2;N)-D8Ge3&kRCOv!90)_(!s!2*7giv;@1zQ-`3}H#r zA#7rc9DGAu38HrP8wk?xtOOb4`6Fn|5tn3T9pRf{@l&<*6{_cv%ael6R!#~rZ(1xo zEA?Ik+)*_tB?tvi_Vb1h_V)g>kK9vPol8v)j#3|RTMLsB&H)8wegH^Ccoj1;w)fr7 zKfn9=-7meiz1>nXOsL?E`Q*OH=6)22g%IhoR(OmgNFlpAEx6O8(UjQVoP45}0-5W= z&(aP)dQ}w*p%Y9EoGt#dztQn(Pd74lax(ldS1a2gLX{cZ$8kUGxdW`%-JdH}-iT5)+R0l-4d+~QQWH-~%(T3ni(R|Vx$|m_ENHI<3 zHJj1q9I@*Ua1?TR?yi&gMm5fTkcu4WRccS-E?dq~H0Ps7asB>Ut z;5rutBrL7VL{x|Lc-_L&d|iu;`+%s7KhmAZ%Vq7^;+V8fnQpXsKYTPEqi~!gYEX~bNT#9sCnh*# zNMdV!c@VGb@aob{vd;mcWYVjL1W0k{lzL}$USue9ZV2=S4dh#`Zw$Po3t{C8M`{R( zbV8ie;Ccnn=DG;IR+6IRT?1*s3$LX?#$>~c<0~EvspO05j zU>yIs*5{<^LEkAtXf!EMLm5l-{m}`}uQnYxsPy_3M+;u(CV&@Z*=xPzYs@Q5Ri6wFoBNiPaG zUe7KPi#ckbEDOL*k?6NAmIi^MNmF z_3o=}l}VdFFqE;+{4=Nz<{Jgu9W&p#UWIGwib`?W+K`}A3GJ`$uEaMv>^TA`B7zo3 zNz2tfag`yUz|7eybdhxGP4UCb_VqG;{H2XA8{1ZnGH`Hvi;U$;+tlVS^9QixdmDPWJN<6#xyWsj%Y-+A0&9c?tl74S zn-P+ZUjXH$NfoA(@6DjI?gh!L$~II(rkv&NSbDM;M8okt)BBAdE zXZ6#_5@kO7%AWNmO-;R6RWlZsL8aH+{#fnZM~Gb(A;t1`tR5(SF@DR%VKVMwHV41SsEog~NJp~FgGZ$=hE7hcPvWD5 zXUkQf093i4h(W$cGi_y~7t7!}0#3FbVPb?6Uqpc%{F$q>PMx}qD z(3x|J2De1~RBpG-5zGuzW(+o_CV^0pB|sxHD-{F27QWSekoo#D_$bES=|z4-^OQf{sx^ve~z3Y zezj6yGT9{32tG>?bqcEnp*v^eawA&+4H#d!rLDfdg+^nCvlLYP1Q!!sEYCP$==3*= z9RSBN8>&Q&?mLOmg2*RE$b>A8nc-!x>=nznROGGDTmbLC*zDv^U0t~U_&qP+ka%+cSmm_%A-mG(*hQ6ArhyTC^MQ~ zdSA5dxF|QUKYAMrlclQ;%drxGUNwoeI+x-6{cruw@A!+_QZ@Y%Glw(*Ng4Q|cGQ^x zxe>c@okSM*tc8`tIME^^k#BkR^8C2D>BL;|n-r^eywY4{+|w3+YK& zRL9GzwGYSY5}I5=N-R&EJjq4j(`|$T!gp@UG&M09sV2XZpNJzXOwT1-r7NH z{HMx6eGr(XAOH3NP)<{u{{MI=@iUf=8bKtcj8cnW7|g(@CJ52nQX@nhTh|nmw$4eV zL-VY;j!BJ%I8o!iq~7Phxck*V{!bRc17iE);PNJ_pJRDC2<(#r$OTqCZqAJ#A(b{} zG4Z8cOd8ZuaeoNu5GiWzPtc4fuk87R!H`uyfC}hB{8hECRYD?!(Bi@~UmDB;;h_+A zu>hGfz&!T7#94K}kz6W{YI4pWAt%!Y5c%rUeiBKMh5@i_%+Oe}pvuJ3Sx}8&8Q& zEO2;Q>wK66mknr9;HC#*YshBuH|*VT8czY;lm#CWTVTbK>bz85<{T%I2jvF&ap*OL zRpJ_8%zL%~w>qoqzWtMUBMIFyGlJ%=Y9s=g;_oXPonHJmB@Bs$+Ih9Py4eW^TYniG z3B+V-m^+PR(wC;iIa{Z6rw)3!H^vqJFpC0Kcwb!3=St|9H|Pp=LRzQ>V7V;B7!LCW zqho>uI;gTiLI6-u^`}0QeIg8YnYct(w5KD*7`GQ@M289J62Czmsx=iARu&Zak=%Ms zA&-kd=MujJIl>HaKS0h8)qcgNpnMCCEQzaq0zKAzn>%Z}J^Fub?(W3ixS8X;6)LsQ zA6Ugjsf}*-eZzScPmoqA*%ep`AVq!38BocUw(!fj1_MIJmqG*Omyn)jfTy&94;m~! zdFYc<@_><;H73R6UUM&^RuvW|lEBBu)0)Cu?W|;yt|M37uie2=G`;Z(QJj9ZniNwx z=x4XwH2u!vE2xN5waD{Vwvd!8C$_R~@vL&{IWzNZH2p4UC$%0Wafx;+F@k5AW(Gk3 zgmxl1Jp8G`3+F%fkCA}V2;602!KF&5o4Nf4Ogvayag{>bFi$2FeVkOHFu zHfinK8WGGwaPlldxROxQZ+3|N|NOJpKN!IfEYVE$6>=cxtjr1_0Z(NZaEJY6D%g++ z^~(gCQ@?ZE{g?famtU2RdO~gz%q|ybXH-ks*C&S`aB?yQyGnOQ!eS-Ix-mK)6)m># zWFaIrR$fuY%%VXU`aC#9%v7i0Hv#+*1Hs#tZ)YMb0}qOCVJkP?9g?Ci^ZqzxrFuu< z>Q8Gjh~kdhGW-c&fu8x_u=3g%9f)Ug!oez0D83>&C=y{saD%Rbd*gl{ofk86fky6e zZirsm0Tk<+g3Ds6Q@=x=1OxKSD*o^m1pKTOb5%_HEWFUYE678<{=;bBm+gXB4PHG` z;wm*JWEb~@>-GSPhb1S zSHVad@Btd9uo-dn|75J@$NtN*mR|6`TX1wc zqhMB`$z+$=P5gr3A?xQiUPqrPTUkj6mt}I`$)noZh%pZFkY>YUi6_cK<>N$YL2+H5`XmP32uc7WxDV1tYC*$u+!bbl`@uJ zmg7>Qklp$*xE{%zw9l_a>fzr8eTm^U(G}Z5#VjS%g2XPJkG)Ck3J4;J?YD!!JyI6Q zwB91ORz@Q4?gQOfA)n<>tdWt+ORyn`v)|F9f(K7#d{(?E1&jB~g`<+bmdrk%Qi70N zwlOv9b11SotwFi}rN7^^$3CnnqLNk3qdh1H$w~+EuHbR59736w{&H^ zfuXWDC!=vDI1qnCsZ3+}?zO|vYIk5_b+$V}(a(Q&_p2bh!h%H@0jv{5F8;bdMXc0B z`sGBSbaU+p4@kbeSGi5#t5t0JLqw-B3N2GG38%%mYes(UhPy(_2jOihuN2V^Pal%% zD|Df#6xaOhv~aUv@O5O8Y8B*O7g!coiiP^@uu>Iq_NtFKYFURt3&0J7!Cv31+9KTJ zKdxw6QINa!<_~fcfGiK`b76~~+EP7J2HY9!1)bs*Tg;ZV^GFX~NtKe|B=myoQXAyc z(aT2uYbDmwhT%frXTH5f%HC1)t}Uf!++KIbG`S&=haB@J33}9IoNAU%lUc|XP~P@F zXuyvLg}%HW2u3ak-bExKlhXIy>}C)MQWxkR!5(<91W%tldghmGjI*<(q}XLK(4S+} zgqZIS%UW?OUU`F*tf_JgHam?blKmYT)zlsyQ z`$(K($ppoA(de!CXpb^5EAnG{9$*}D9zLNBm=GEj)?V%sqv?7j#1l$;tNC^eeI2jm z1Le@A1DRR|l)m89k|U}}YuEEu&w_M*mYyU8?B#aXTLjFJ37-3Y@;;s)4-?72jT>ZR z0^-@F?zZLIORb;=6(}T*W_PL??dD289`=5maX)OfHYA=49<=@Zr+0s2!uLoBL5xtm zM=5?l;CM`E%tERg=+QaF7jTx-M-Ihojh5I+%yPf z3&>xjA@;|=8_$oCL1qL5y&5oQpg=r_E~t$wiL;!jDl1J0jHYR6Y^efF`1G(eV_AHV zWi#WAm!<3uMB?=c10YYzg(cO`c~rxdJkW8rs$HI%+<3$Dj6~s)r5w)FmtU5~`bgRn zd;{Gu6BHK(Wx&0@a)lp;76SF^Gxay{64pjgLo}h zh0q(Au{aLOC^Vs>+`C^$kVmMCXXVV ze%~IEy$W!{csZ@jI?y&GAcrB{_{rZL+(PVPt@S~7!x}yv9)_P)ykas2DU-$|^UZe1 z!sZTGN}=UuZHo#k;>M|5A-9b|U~_Pj%R^Vh`6B|YbY3jxEkPhLH{+aW|2QoL*^pv3 zA0z%gJjRrZAC}o}(NX9Mch87pTQb#YwJou^P+qYP%9r9l-`G9e zo)n*Juk)2J>G24}C4iNJcw1apbSzBGrKaaJ8dN+WGz(l>=Ian~HYz%l=8HuFexa?2cPj7; z!tN+916`PY?LTAC1Jx&5ACF@I66&US|Hn!O>l41O>+mcEfd0iS!&ja`An|zYzYk9-Tolh2Jc5h?sGj zjKs#v`B6fOCu}{2d_X+m{rd90li^~2t<|+=p!e10)>gICnb3Y2m+bnWq-iN4(H!lx zSX3fVOpO()dJnRqnQCQqbHG&c@l!`lY@n3iex>|L>IW9dSp-2oP-Y(lAzDWxKqqGi z!nG+`jYZ26R+$xjk-@i4XD!}M4GP%pqFg#FD3{pf*6wQX2Ap=D+FhzKy?d@E`;D(G zJuJZSmfIA_(N9@;vgq?;I>z*Pu2^S!U7W#JrI??F6(!3dx+rDqhNYiEm4OwBjSX+zKb1rzn=A;jzNco;{9} z!Ym;#;wyPhJ|cs280)EypFDbMYQ6H%Bs$h`tAorStG(fEiUB?TgV8#Ejj~uN?0c!* z&u&psW}CUrF2$;QJ|Ve|O0@u0^WNd{90K9^)>TSc6o@-?0C3&9lnM799j`{ZA! z@4#FW!i6P&S3RgISQjxht${+W(MVA2xTRu*Jgi|}+nMJm}G*XTCgd=n;!zDLrA8Z)XX>fh5#|i{1#_=S}aAj??aFvr(Pl!{$gfe6(1U z41)iZaxJqJZry2-tQ*L9au`4mEsANc;m4pB*`a0BkbhIqPg z%vHMtS=aIuehO2=aU1-z44;}H2kC}2>X9w@6~Dgwr9a;$;uf`qd0K658qaz?V|s!F zL4k{4-S|L9>KRzOQ+Cewr^5X3e{mwD6Q7hexMQG-&D-|he>^Vbgv-Q2gRKaP{HSy@(uF`7D1oWoT__&mOtoU-jQQ!YjlXFcKhRo}jVf)KEIZ zV&b=spI#+03o|{Qh^V+mKE+q1gY%4l18Or@hlt|1&z&u`y@q> zDe-Vj+)frLET5(A!0FO@(NEd!t#*4MX(k653dgya6cf6CqZY~3Y54A}BW-F}dp4)G z4i|le!AWeAX1j476!ydQ@w)MGNe6&0M6U77_$=N?Z+G1ryk@`rITnCL-)c^MFUS+| z{hGDGgYwgsNAumHP!qF!S<+Ww^PnH5j^RBJO?tQvOIVSpKNnPE4$p$-%={XG&$BW= zPC?{YJgIzjbS)Xcg(pw^$LrArVyfI^O=X6$gZRhIliUoF?>OBK6l=vxL z04%{tV=&o!q4OTJS20&?OIe5v%`5KZKP)`W;3pW;yy`-i)(|`jL)y89qo4&(nxAn+ zLko9GUj^OnE7<}xqr)&^_OeoaMc-sEJJCq3V*x*Bz{a$1XCXL~d z<6OS~vnfH?BCGLUK%P~jgVa+KVtyxg2gZ+AaB;Y*KJe?--JB_p{=g3PuOZl0Q3PhS zBO+YIYCdcYoJ{=AFtr}Q5aKmVc=2N|#~(^|Wqn$^30yO@xICDM6)u^{v_pz`F0_VS zF)88!H=I!OoYLK6x+L?w{0y@B6@$lIoSrw<*~c>UXQ>?k7MYt-tt8i?rVk)I5~Uht zcthy`g9f?RH6{*YOM<94;>A0kf{K`rc9RoO@BjF?8X(6~+ZZgP$5%nf`J{OcXX|aO z4?hspEbgxg;HQP1n3sq2i1*xmeEvL+i!}S+*7;k9q{~F%Tu5I{`Y$pfrVHj53p7tA z(7&L_>;N4e)EHb%-Q;xX>7v2+QI{F4E z*tO=OXK^Riat1re0*HZQi3hhOPD91t`2QyD-I^oalJmU(U`5!zJXlcx3P3du1p=r>W6N}eIN>WtDC8^IcTI9;$eFP%tNCmN?M>s=jZnT)Fc2W@g?^ux|+1SdT7t4U&Pi5>%l?b)*J;WK<82 z4{y>qp9O`^Klqr?2{f5Qx(D3|(iE@`whJ@n)3kXAx5kM) zps%br;0$J4xU-jfi($H_r$;7c&h(Lx#P%tRe>*6RBPu=Po9|b+hEb^Y%vZZt(eb0qy+8f z@z^PBP@c5ms1$r?rqg9F24FD^lrfUe|NEz(2bs7g zXwnigSp$Mrn_KpGd)JoOfLm z-ZJb}uGO=}GNE~khFOORVSMIdO%1lIm!NlZ?ehh@Z{ZF%Vmr_GO_K%AD}BM?N3M4p z9#vJ|<>V6BaY=>D`kQ4#5egvf1#GK7?)5<{h>N4%Lwg71fiAf>t@OAeiDvkX3u$P< z2~4Y%;VoW7v5ETA{kbp|NMy%c<;c)QCKzV<^`4IeGXt2E8EEsa_8G%+fNLt^ZH4Pc zyuh3_>$p?nKIkk(`o-w6#baR*{2i8#byE!$eVr zgb@}#Qsvtk&zO3Qf@JcSm_OFC%#@w%twjf6@4Y*GpE7YKKK$r|i7KGb*8UY3RpbXq zq@X6L>{izthrGJkS@Zfe_L$YCq)^8P`_0&_z>KTEt5WJ%8&xIPe(~H1v95S0j!v8I z6^Fa@ymW0<@0;rrb0V@DUtOZI=EyiwSA|Zdr(W``rn#DeTf-0Ilvnf#O+|745mas& zDDuWw&oL8vFOD6i(h7!cU<+5PM0+UPA>1=Jbd7vk1_00Ck2pj16HU8mP{$aN^u5IQ ztg|Jt{pmr;(U~h|4Yq`PFdb)JsM(Lxoa6!Y+aQD>GTmwXJtQU702Ud;BWy7`399Ky z9ySZ6)+N3aZ3Io}WzcSfxeESva&SOzBL^);U=TSdS=#;@p}bSghViQLDTbMGSbgn3 z05z$d&WBtINaDYDs60%&2s0OV%s&$xr3c$|KRA$iX(i&!S#ZC6G2ZI#0~jwM##J;{ zHcCV*sXe0pt7DJG1{6yr$kr?{R6ZJP<+oEsmyAz1n2Ms@#}#VFpPlLR^x)Co$syqT^IHV6x;y?BJr78O6yh|%ugtH;mCJ+9KD%a2yGwp4PEYOouNCROc56L8@~xqlxcL2*E&l7I0qrK!0^ zu`t{?u5dd7-x8{K(cqDzIZ=zBykb-AK zM;H|ptNow#ovmK13{+d%f=8o*cWWqbX&LM)<@Im5n#FF&ssU5m`i15r}wyg?vI`r{|9rz3y`o%q0)Gb+A#J64QTc zm?U`i zX&~=*Pb26s0OD?OG~dbxEVokGqsoDOirwX|#zPU4ba{)FJ%6?kglY3xEkI;c7z_u+S75>1MvZv8-PQDYlS>3kD!J*;zf21;@t zhqxQiA#`6|WC2wl7hRz0JleDPg?I~9cO0EDB(13R(Fa7yWo3RU9DoP-0XvMW$QWIB z^L1$&2!;b!qA0!B+c}|(NuY(+WK(P1lSK9KqBN1A1HirthUSE~A}i*9hJ_qWGQOZd zWa4Yy4Ku{tnod{if0?j#(c7;yZARQ48|=Y0*zjhH83Z|5RxPFhu9X)*dH&c+5&99; zR#eX`MauCNe+E#}#i4X#n*S?1Wm+~Flj+UBcxZm``49i`5C8xCpI6ZtUtup7rhKs& zSLx*LFvIFhl|K8g9$rqv&+~c4^6kU}C3TNb@Edm3!=Okg?ghcmAS}p$QAxOWz<89T z-t^*}wsmOBSYR7t8{ADGkeFpF;Y8Nh4U@Bs&-u_?H{PVOev)yqyT^1Z@$~}-o>n{l z1br7Q=FUb(Hv%um8n9Xo<-w{-t^l(&YiAxe^XZf4@pgo)Fl8nS(Z0!15vD7&7+^$G zK1|8O!_kKdIquU06>Bn{-rMT?akNsO<1QGQ5iap|+CfVa#ol)YulqhT^e98e=VFu1 zi?q?j;z3#ZW!HDa^NP(EN#|*4u&o<0@2^lfM-DJzcyCJ_>1*V}#pH6Lv{_JDHs`7j z0h3L+nxe*WGfoljSxmO_rpTP$%7z-|_`Y^$){w7TSMYQVwNFJtfg2N2f+r|I0z~7_ zQ^lr~VF_K3a-m03oW{cACngYWBxrbK62s)*?o~<;1e*1vUCYDpw&T1mRYmCYq6cW< zN8B&mB&Tj*yRomt^g=Kxwnio^&Ku7sclmm6ueUu2D&MlF%n!?4Bzhh@!di@vq2@OG zvA9{VyRb5nAHT}9J|a%W>VuL5a)=1|CpbM{iNSKfbR3%lFr$zc^97Kbgg4{+vm)tO ztOO>H#T_sVm_#!+?-{WkcIR0R{N-PASRfH1kD|l{l z#P;;sHBs1Ybt^BY$0?ETKoa|XUEn%dmD4-Et20q>X8=;xFlmQVujeYF%w0dabS4vm z(G&0|Zp3guWe(efU5ZpEwn)jKG;DnOdidTqRro0KNU~VscnB{2+5KOcp>U8|H%uSU z*0{t@$T-GdNFx~0*rit{==tVm?ryJ=vX~|9hjGE<-)Rx7M0RKUMc$JH(04K z(Xki-gwIusTN`YHAy8dS?Zh;*3zEButW1kppv51gxS9DsLFJX+o6!8X`zFZcNzsxz z*JBWbbB>3M#TtU0wd5#LY8E%N2sxgsve_SApISMRg`;eP^Dkg*+DQLA7BpCAvUx1| z`BIv~YZw|nBs$Ypt6_V5klGEu0o;*=q@u-!RX%`K(re(#A^avs7L_nCkzV8!cduj! z7)DTHgR(Q3`D|X0x-30+7@rsnOt>1RP0 zboiuXp(W;sWt-@q|IMerFYB3&!VE&LE@#v7=g*&b4Rp(ICVA0Gs>h<8p+yJ^v$3V+ z4(}$ubfZSArt`t%`h1ctZr%?37Zpg)Ij)yvZd2}(=1LjVsz7|o=;LX|FzJjiJv%Lo zxl3mWN*YSIXO+o;;}Xvn;ITFR%)bI-z2FvkZ-jS2A7ug3qaQyGXJFopeQ4BAC6g_ag%6^~s}m!&KYv zNX%tGWR;Lp+=i9jme;Z}HrRK7U@8Oax*D>AoQtLH6X;5~>aYEGX4H4Yck;p$@kn#A z>2B_6d{o(zA~=OYB4bb}qbO0Z8R`0i6+!B2tI})DL%038ah;=kP=yS`DmGJJu7q!G ze{>Ul)j~Fb4k*p#0r{QKw_h40D7CP(;MKG+A+5#=AAxX6<%?ZIf;1kzytDJlmtmDn z8-H?0*H@n*uwvQB{)KGBIq1Ej30F6@Z8(XP?B@;blLpI}OI&(CYXy{YX?JotN@;r~ zd{9x$Ah9mvpot8=SPIjo^)&eFHZ|c-;=u&epwpbKA-9SB`!LD6PtPwL6Ty<^Fo)=; z`5-l>;l<=Op3DuS@Dp;G?pL$Uz%>q`k*U*XrE{PqA{~P!_aBMnXu|}hOth`n{WyM6C+-+t92Qo5O#V!!N8fZd zU-$gb8Qz1Y2Sr4p=mk(f702^hHBJH+IMQ@Ks3mnLV%jH~KAJa7YPLi?wEPD*w^$2k zBiJwAgt@lSAH+{za}j+|6HV`>eR)v{fpt{td&>%`9&T14gV_sWVVs3d%!7|MzmSBkMR{ENsVaF&hy^FINvv4 zkWeAkAFJFXwCKMnLSo_EdONbK0>PsSPknFlW`hEdba8SV4SQV*q46O0XV8W@kxpHA zDTE)jvQzJpsp;rdxWfxm)&I0`5Skg)Jb$OCM8H{r3~{(L$0)gIPU|jJ2j4#plG7-~ zH+2U4%SXr;sd%I6>&XDMC{}C|Zvm9B|ITzv$^1hri7vEcU^~&k)B!{qXM5 zc|nX8d0OHlFKGr#Y9*m*>fl=2QWH&R!;pjCYRwVVr5)wX4sIDnEFjvTz>N)(dg?^W=re7PD}BI{XDA$#PF%x%PY zTGQOv2eM|EBW6_)o7STQ3W21;4+NTMSRLGRTniP%Dz&{k`Z~7u`Hw#R$KXv_0yR?H zX)*jF!!Xs~$1PSjI&vU2FT6KK=ok07r@?Z0bM_d&A@HRi&{u(?VT_Wpgdy|X#(1Ef zyT#Q&lv^4#Q1twG5uRUb&aNj&anMm^MXwu{#T*xKm}D{FLzfjAcr(M`qL}=pA=OLB z)mq8g*=MQ#gbXN%!-V)Oc(8CB9I>X{bI7!+P4jzZYnvjTP7y_#~f^9F@|TB>)h&-o zxDo)%d69a%hzf6RZ8$2pAZih0kX#9(k*Ck=f$jx?cGOc!H`ekG)k}EVqN2~G&gRa) z@`YhX^6L@%GpLV0ubv=TUO}k1lwG&crd8X1I8l>u&L>`fi#w$$esNLBtKOyLz8h!P zk_E>#JBROhMr;Tm6T+7##uMcyf+DsvvL3|r9N-aDDhLzWZNt*@7CLk>QHOk4b z%hNk?lsSTH?T)CFw^CnWoE&IC#CRSTZs1}`AJf-inlbA&7ZhU2jNxm)Lv~0K@cyGv zOTht{*OP>nk#9Y7;Bhv9OGwX#R}0b*;>iNxS)U7TSrK<#klw@0-n_-TiaWfp@YolG z`Fp~+lSkq3g2R<>Rdm$+0rB~}Bmd*;{%l5qIY6or2VQ_7@9MTrS$_o-JQi^=%IF@R z#$~adiH~W_iplmC&5=f$xE$n(uM&I(BzXNa;o66JH})r$YXVURunG|QqYlIADkl3F9Xz) zzrdJ+(aWu6IsxhPyZB)8WGalfM6+-zbr9cY#4^;l`zH^88HFJ5upXY=(DT8d%R670 zx3?(2A7k4whuH`CQuF1Kp%U)>fXxrM}=IpyACudl8$0wsBTmSw$;2owyQm)o~;JEX8}I zp1EW`?p0%7K`6%2foyJeX>qb<1*tsKiU3cNi!rAN9wLZ`_70#$4fZI%cPWfex!1?K ztfm9Bgii(B@~bF|z%VYrkXgIB*oVp!^75+=^t#ll1*^n|xoG3lxOG$rK$Q5?I^rr# zT}_L?TP?;(m-tW`(lSbGZyH*2$6+7`QS4H(WmgZtf6s?I94ouXJNzNO_KrdyI+pk( z`sd$5e{U0#bb}W7dvM8gLYQB;{yyJhtMfEPDkPgFP1eOyz4w z_0ftSAEd*2J*grg*3*Dk8NTHS>CzD~J085DD!K9oNPaCynJROGIB-T!r zTzw8MiY>}4WLA?wVfeh)S@Cn<470g{`s;_Bz?Ebjng;a$FxB2OGN4!qvBQfR7Kc&@w=H(qsa>VYh{GK1EpgKleIy-9Bo&j6rGt(RW zE@ooQ)w)WrPYSIMSPRx}&``N7%*o?#@%v?wkFV}B?l@2@Yy_8=O!nGR8Jm#qle74z zUvNyon&^wK_?P)#>~Z!*N;Xo}fCL?va|Pxmi_y<`m}*5{+c&NQ6NPp> z%yquY1Ol(PzApCGlf}m-p#s3H*U#$W{EQMp_x$-IpUCCG*lPkni>H7M>M+(ay(xq4 zoc;;E0!fdcB7H9al*IZ^CP!WbG`nFyM*Qv9zVol*I0PZpTv+*Q{ya)-zFWf^J~Amb z2M9yFxOj7m(PxOB?lT$w1!2O>T^H(5jFG`fHt5}|^(lH6QkB3b0^x#zIOVc;$%$py zp(kkYFu4V(04Rt6P3A{H8~y#KAH}YZM^1M>#6J(U8(3(XVMIdhfc1yc|$WWeI(?AEDfa?IKqH3zmJgS^+g6AG=3}IjSQqUXDK7%Y9My7%uF3z z@>U*Vy+eaBFe!Z0YuOuKXZnbkYHC1ABN18YMB2L=iSQ}{XfCkHB$K@R z_aEO4Z;L6my0NyNP8D%Gm7bWXw+IzXZqq;D%_?HXmx_;UD4;4S;9abfMvHSGfVS7E zYr{2%M}pwzlWb!8ma!%kk+cQ$rj?!fQ7NDfym^jMw`Nx*qpW4oSjqrbBqtEW$ppGEE`Tcd~}2ujQ?mv>jwoVtSp zW?9Mp4^m=%HW7LK-$p%xI=cTc7xG*G>zBS_uyy=|cTJ^-m%d9Ea%u*q#Z?=g7ZKlW z;2x7qF#t?;GO^`W%6%l)g4-3Zk-h9HW26{xZ))$RXMPe_8JmX-P@6yZ=mSqoH%^lP zm9xJj8c|wStRyp(&(Qhhg2|W@20AE#A+C7q$yOkZ%3#gKelR1eZF?XSJs$~HST**g zY-xpHidrGZDmy$b1sZAmVUY)*cqv!3eCVsM>NjSmS#LjD5+p97Y0PnrKzGa#rXZli_z_okkBD3v6J#@b(EwnRH7_HG3)rKl!%{- zCt-#h+!BDbl>*Yv==Hk`3KnB=Dt*FZjYl$e=@)+FtMP68FukLLjd_%|;b;5I>L|7_ zv6grWsG~w(s#aJ`ybzafc(7BVpm47mCZ&gj)kx&RTcIR}08epmNyGx3CL|HGURr$Q z(-d&ii9qVcVh2pGJ}3cxbk>~pyaEP={ltSh7x-mmfBidjZCoYE@vGbstSVrVi31!% zV5Q2hq$`Lg$5L=L_WH5<=Z@}c8liP9bYYi7EBsnjXcVD5R)V$O##MT^$02NzOfb>= z7z>A9;vxBvh7XQUyhEw8iTE;xXF*F$&H+04L?At!SAaca6I7`l+d~7)4+fl@K#cfe zC~yV`_QoB#2$I$x;IzF zt3Rhxu3Ug1<^9Lsu7kO{x-9XkuA;KZg<1z8kN9HK2*J#)GK|b%tQiBpN-p@GA1f{B z&V~Ff%A*Cl3%L~aX%K0H3b1=P5#&s~<-vW9<#N!tT(Q0enu;GfgV%e#_<<1aC`0t> z_gA>MivXO&#aVc=V0OTI25T_qdSNiD%Pyuua5eGoax@(H5sc_(014$_*jFqCbF8+F znUXB^LAEw*T$$LeHa}-Mk{4`)>f&jHLwZ~OKJ_8$FItq^_u7B)JQB`w$IVIz$$97M zElgZfO^j`w>qs>@ng*@9=jl0dFECz$BvhFEOP`Qt!kG^uO~EGpk=xR-0y(&2y$L-@ zD~SgF-7bLQ8M8V2G1&(OF6^abmx;68%`z-lW%AB+V9smJ)M27u6!7)vgJSmJD!fLT znH5=m2~x_h>1?MN*!xf(^H!Z2VTh769Sjd9<_`7nBYIyiN+0{`ge)RnSR4~AH`Fx5 zQ1Em9^UCmbj(jV>Rt%ze)*JYpW)R9nT8(V1oqh&?RMeoRlg+6?+P1`tcCa^&%Q2Ar z2J%TXy+})9S2)7)4du-ai`aR&TVc7w^R)cCle7tNSB#!nSbXARN0*S46t~9WX$^9A z8bpG%XBNFTJA^5aLKzY#)P>Pl;wc2!&%KZ|)KJ^gZuUP>P=jz1!8E(0~L_rjCXLI5jXrGHFGDyAbwFaBWBL&U#C z@Io||RZ_~~w-7+Rfa(4;-tzp#i})w;5z7g{V#K{JWs=336<$Rexp@2dt|T@ef(qq) zI@H7Y#i&rAx>3El6Q1?+&RMEg5>`X-inlIWOZ2O8#wo6L_oLFglsVfewAXinWIOCb?go?QtPRiikpF*S@ylE za;UUx#Q89x7tw2ddSw=b9BL1U1^228*<$o8*Mw>{^Q5I{P*eYj5 z2{9`3z$<5`K$J2tM0Ns?H+^%GB7genXCX0GH-L!@%=}FL7>&N+7_1WaXhx8!FoMF9 zNo;yXz8yj^o)|zl4qUFtI{W>9^AM+a0Azgo&%W7&Cg^n66q>iP$NNgvU#9_e%prY} zr_&Ph+9NHIZt>FNXF*Lg&U!3bt%x!7j$o$J7^BzE#u)od-wg5yi8n1d{T@XzqjN7> zus38M(k)-Ru=FIp4VoTunz-9Q*uJ4vdTJgG*Tz8cC&p;`X04br-Xb_|Cuc`K5Kt%M z%hn_O)rv@}`CW#1E6XQ|lwQ%x?M$f6);So&rlh_~rBB2z&X_@{_<&41} z*9PCHEnG&kHA5DuD0AvV0JZr7R65}RC8*UZ{q>#Kn=2t+vG>O%3M~K^B=zdBFiN(J zE|19zp)YesUFILO)6||lpfy52Q{gW z4-0vzy0J{(Jz98HD9`>drwn0kHIQ`M2m&uImYBeyX1dO2R6uuMC%?%`*Uq#RzU&M zEX`_KwI$1_tznnkmiQ`$?x^M{Xh0@-O>v2-UiKCT zwAjVN490lzV#$9~Wb%4gQcPUSv3H{a`8x7kANMRG=6u|yGl8jdmu4PK8x|Mxg^Npm zBH0dXme}{>M^9eFzE_k>@(`lI-q*2T<^xYlvjJM^wdiNYDwwf{Wfae(_B$yB0$_K~ zXRBU_f;F19c+_wNDlWeL&HgC!*e&^NMBm!Oe-<(*Au`aEz0?yY^}A>S!%lRPEn!o6 zss~Ihh|L`zQqJ#3;&LAm?^ajm_=Nnh;-asW#udQ(Xq#&4?%{l2y*KyaTBp2u_UwtD zNRrB*!pIs1R92$T7^^Vuk3+|>vttU zs}^-+;?zelSY~;zSI&qM)z;A1`+TGB^GP5E1dlTWL9ZN*B=gHRT7aimh(tXOIzAyF zo`-Ol--jGgI?}!PNF5U1gof6Lk+ZB{dc5?cQF$+kTo(C7whKcFsJJ=q zNR8ZQouT6@_J?@-!x9CVP>eGlB3pX#C`{A6?7Rg#l8EwNs7%8yr3I8M`~cgkJsPGA zQ+8C0z|}EV?sm50q04JNAylJWN}tfI*96b>XB1{aO2iK2v|e1K`Xq}rEJ?wxUJnoR zP`~_SlBKlMo1^&sff@+;&C2<=Cb0e$FKXeQLUctN)1hV>D26H8JD7WYcoVz);+J9v zyag_&)-yi!>{)ibK>$G3SP?RJC0291S)JG=ehyw<-a`)cgIir+U6=FfebaYS1E ztW((A>=BS@z7Qw1hUcg=Kb>>GZtk#{%;H<5BGe|=>wPszKyhcEULDk`Rmj9X6@iD- zjCqpRN6W-=)m{29P3G2Z6a9*c^hr~GRDIxGRd-&ZljophJXi4$;xPa!{^tI#LaB6C_KLvSxftz&T!=pkq7=`> z%OJmA+zPG=jH=2CZ$VAa5m>3vpD!-?kHjmlPBG1Vz-;HCg`Mv_<+>msza`F?8Htft zThi;`b(Xa!yXH8TXeaHSXU)-qkx8_$g}D&-DFTsJal!P1Qfb_p8yvXnvZ8dLPYVT< zI#d_ZgJKb_fSGLGUXWX(q8~jEiV{)JWWfriq~&276<}qLD9(f%V+qohA*JlfO9BL3 z-}ht~gDcPkHTK*(&CSsW9e&Zkfz~&%KV)A?-~Zx z06!+~tS51Xjy9XyvylisqzSN}LhBWTg};3xEUwcbKvA$Gf~z(Loy|BFC5We4C(Dey zGdnLWLydydNK;!OY8WpAqEtarNm%>0GUGSOPE;e1lNGpvRL}U>c9}g?>re?J5aMi} zu9vs5-7iCtK9U@-um$jz8jlmU|{hu)L%mtmZf}b>tGGDHa>Cotp8XgoZ|UH8j3Yd;6LW-X*-?uWNkZ5ENU;kKZpnGx6r=g^2ZtK@_`J zY2S4^2QuDVbj6y7C0Ef#TV0)%M#Wv_9%F=HY-vkI9o@-{9s~P8X^cdutJz5Xdcawa zO1C?I9!|zwnVphv5rT4*w!l%QE)*38Tv{+Nf<^-Zv3^lnvbJU+-lEF3;@G|y_^;~4 zWp;ilFjO4c=l|{gFXG=P4DSh*L2BZFVkSuIkr%wYBsjN*r*x$XdMk2yn3)X47Vdw- z|NijPpM)^^S|KNITjf?o4w3I8@8zd`O(ZVXHO8}zr^v@GH(5;uQX~VUoddSuZ-n}P z{w!z+YXaeTX$I5Y{nzLQ_Oy z{X)}~h$5q?;kAi*1iFX9&3Imm>!zk(c%EA!Xp`7u)tfHFTV%{-9F7uFk1(Xc)bzM$ z)&ZJUs%=vEdgnkvOrag>y6J`-SAk7XnC4O+h5L)7XntBv_a0oow`f$Wj&@zaF4(zI zB|wmrJXZ0I-a_ws4G2wt^Nl*C1}}a2`RAYh^QZsh#R)c3nZ6HDhJej=k)LR#@o{3T zdvre!PFY+?n8j4nt2PbBP=_TAY2X{a()x7*Q6JI3jYel!B1c-49T!Q850^r92 zD}cWgTZ-*qYF#PJ@6Q0_AwPy|SfuOq9RXk(m|HI@Q;cM(SFU?9dSBEY>4W0%(yLm1 zdutE$!OmWY+utsAj*UN+VRAvBdQ}p&wYcspS#vQgwK{v4Fk@eo%V8~Ah3nf?kNG+2 zAhqidZB;n}nX+d|whsv^UL;?bC0)l|{01ZXN@JSmYFnz zX<6KvhnA-0)jeCdV4B@#FHcgRO4-aH%%E z@dJH$FS7P+eK07+U&?%{{YQNC{^Rfd+V94zzWnkx&2zv(2+NcYuIy}>xyYPh=FT_m zg=8666EHU(yVu(?)m0^Xldk61d6{30?ovJi{t>aUvu9PG^EctpM z@qXWyKvqc^VvSD=qy$q3xvQ&`UKJr+av%xzBM?v3MTy$r0#oI}rSdDLIK_29Daz(s z9fL3qo9s+hA#JYw9iI!1BP^yVdvVmHX1SL^LCmM}Ib7bN0sC{!W|oIzGcAr4!Cd;; zi1+<~0a4B3CTvc#BgZd&@!b%vB3m%YE0SNOM3iJ2`Ga{x0!*$#Tr4YJZe~hd10C>T zZ^wRKJbz?H6BU@JWOXQ8NoEyZls~oEupNc!Gzq~0jU__9!!p@;g&1#h1c6SCgXVyIcME%iWMcEi=LNy}vy4GjpuG!?o{)$&2f{%vX`p!@MWbj;dv{6J5}8tbBo*0>}JDFJKK zA<^T(d(=Jw9W%dd;p_cFNcb;*D?X;Fxc~Tr;FnAKaQQ0-B$pEGN(UAXq*WaXI>8aH z&Up&MGe$Yulwg9(#c`C>wz^g>>WzBRG>apafw48$0@_s$7-dX><(bPPTD3Q ze`#S%GwHdT%RPq+)!)TgSEMcht)T7h_=9auCO(cz+EYy$#(Bgts0z|e8DT1qN_GJ> z=nOnss7P~|Br5t{*DLsK9jV6c%BXK=bFdyHA!3PJ5GO-2HDxK*y3#UfW=`HSTrM6- zL{>v@iuURgjKx5U=k{B`$rlyz$-`Q2Ya0|O@X-a_{F5hdt@%s9Pi#$b>LVBu@qB~x}y>5~okCggC}6|DB>ct2fi z4WJ^LOK>F?BBWJb78F$}H{_1#a7+>k=cHqNScF&3CU?#CkLQ7YuQj&v87+G7g~_&- z*S+qoc6?6zgmxL8Y_&|7V?^{Xn2D@tiLjOw6wDUBPH)$szENq;dI3*6-pSQ${4Ei`&mdu>*YZzP@aia?YzSSz+LF$xH_(HMrB zRO!^cYh5JciHx7c0Sv;O;%CIa3CLueQL{TAre&GveI}Eaj&ln9h_`|%l&YfHSv(Ff zQc92BZYSQ4+8ox77k3Px4nCT5aduf~xmP{3gqicD8cN?(H18@GTj+$)z*isQjTGh$ zJ-d8(M$<@ht773-qZ37>-S9*02B3yxCX7zW62a*i_Wr}%S6}p9{8xo|GGxZpE#=uf z3l}IP3ITx{e%!exvg|*SC`<=|*?}bhH2|SQC#Db{NHY)_)gmE~P_p_Bi6BHv(Bx%? zl;G~>&k_JCkfUX=I&(hm%8*+Tr?X|r%^^-0HE&0!IZUnFXT0nEH24C%CZs7{5f~G7 zebTgo5A;@Y87i6E1Qw=6yGeYcQaes9BcN2;R(;z13m72HPPm~z+nlNwBw&OYZ!im?lGu^tA3vziSDAze?jXBtCypzVygK73!wyr6et(KQ$Mo!|h68)x2#}*TP+IjgK6vb=tHe85 zu%HAfF_pu_K_MsytMOTZiliXoXY7tvPhkFqS5*ebcH?<_yd9$>D66u}Njdq>_3?Zq zE5r-?;y-*#Sa55D%9t2qzomRJGDM~(7aPK-z+Zb=%Lm@TjO z;(4T;aOuZIimvL?+cJN#_#$>gaWl|nzexUHfgx?}U#9lN9Z1l+M8vS<2D*2{ls#1L z9^@?=;THH^6bKeSYXxy?zo0)Vk`|7`Z7SKO4bq8MMe^!Ih>_M&0YW%HRljYU#t+^Z zDd%xUwitnCYLm1fHBo~0pbX`q1w9E-NcxmUMC-3hbro}2KqXpi7^<;`aZ)TxK1fJ5 zBLLrc=$k@D#_sKOR$|!%Z4Wr+_>q9egyJ%@c4x2|+U;NdT6~HyF!Ao8m50KeQ@U-4 z^ZniKBxjbOJfY^*gwA#*3ngmjbgcI;7|5mSSCgfK+u?ZUi>1;I6HE2Dh|gIzd_JH& zk71tK;i!5`88X*$3|s+4V)ug~QSf}P6H*qx*$2M`4k3s^rQtl{c5S@BpT7CpXu@5| zn?h!ousUvB5Ow>dMH5{IVL_Wru-{o(Z7b26H$K-#gfN2Oc;vT*`pA%2BD80qR&o%{ z;4*vn5~nf_fY2(+Vh$n!#IQj}N)qvP%yy2a>#B~1mzHp9B8rGYVp_^0F~hp>h6RK{ z5T@sTIm{fI`kho>0fC6XPsW7Zj*k4Gv*B&a1;tJ@{2VcQllhtP--nHQb)hS%?uwn&9{>&TYBMl!TRXqat}NEsCw&w235m@_Ik;g9U*~=kf){u ziEbKVpd;9nAhO*~3m(ORX47x|u$DxI;IVR<$QOr27f^8L?xTV&uBF6mY1SVHV=Iw- zb?p}_g-c<8tiq}=DQJUD_-NdXjm=(=v~!~vcpV&p)Kq4s0AG~BEOFB{HwVEnZe0iK zWVf&n94%f;L3yio5Zre(9>tegivS_jTy^U_L#_BZoL5xE>HVLKt)O6oO z$Ba-_Yq3G!V&N0hHp@Q*zLn()0&_I1(VVl|aGj|DsSPY#M1Kz18L>Up>R{ja%YeKP zmicACmnmlC6VOn{>2QvV2(u+$AJK=#sYwR7gGJ0>|-M9yw*SQT%kb{mmB z%cT@*GnB?3k~7LFJT{ixhgmb0N(h+m=7M}$LS8DY%T#p0gM-Y0hd8w7%J!@K+On8yG%=aDzP_cSJRLiI$aBIU1(qi$< zRa|{^j*#>k zyStg5+!|w>_&5~1lM7tFO~XOF$Bg=d^NuI;&sl5k_yydE5jGb!QGpP^pETaN8sI-t z(vq%1tg4rr(+e5`QQnV!$FnHuxP1kxWAE_rZf`AUgapmxMxWGjhObwgZu4SWy6`H2 zAlSc;&8^7IR<(>0@;7k7)UMetUuJdwX>;V3J?iRJqyx!}pfdc{`cdGsTj9(a7-$x& z8Ihh!o}7dPoEAqD1x10nS9VAsXmVa!2(;M<8JXEYUh)O$aNlSUJ3x}EPq~4i9BlIa zA?a4jW0F4Smzf3S7zs7`QP@h&&oV8rz*D~-Pu}AS9+c+B#QfvS59Jpqt#~uNOY1Bv z28706r1@)CWS)A9WHiEQW2IN1hcYf(sKU!yCs-42mSG?*Us2ABj~4xBYE7zf~!jQh(kmFi!>8CVFG|fU@{3+I1!bmhTwX= zma1N|Q7Y1J?8%GPb+W6mv9lLn9Ui`me{S#fL*;I+1zTc#YmlvM4TJnSwAt*h88*@9 z(0QFcjDUa_qmnCm0Ij7+zyHWMVjsadDIhccitrz8a;us7V7R|u3cia3UyD*nI7I3^g@qWHJ47<{k|t^$(Jw4>d;7*Rz;L7MgN`wQSWZ>D(ICRR&(BOG#< z;CSsjNqP{TFK3IxR_AQJ9mIZ^k`>}>cW>v7-$_mt`Cn60=ug%7HOtual)f@WNQ}he zP)4iqmW8Fq@z3WAPeR5J+OOR8Y-ut6(m=FEb7F&FaK;~rrj8EXK&=I#izznfYzZqL zD+ERV_n&?o-_^KSPAUF~G1}Q4#7}f@CR-RE#f?C*WgLi*7yRv{<0+H9ET4GG&p!Ro zJVeVmNLBLboO$$Qj@OZVWVlKE)WsC3g`RLAH!mpL(k(&RWpZ9VC>DvBm|T6!a&P@B z3;I&saQr3isj7ntM-3>9ZM_2gq21vuFBtEor&}(UYq!&V6$C&|VD}r^^Qd;dJv=X= zmzSLRoV&EEe(6W0pY4SjH`WAW|L#1&O*kJlx2MkfALc@CWK;DMzv> z&X{BS@uxrjXF+8)HObjY-JPx7&7P06hh>YA?_UsYrC>QDu`eeXvj$_B38&8=Km9{| zhkGfYakb-jNdR(l;}fh8TLCunDfH={BNTWDhN}|U8w~Fl1{3F_h-aJ#LA}#43>1I6 z|M*+q@iqbG#}9)ML!m5K6!`+|d|nbKGdLTLVD3qo?^s})717|%!O2aNq(j=!V%DyV z@rPI>cGlLOf9M%(bFRA0+~f~G{nWCZ7303vMj{mdDg|PL zT9@>g_^$3kY=|@^WeSsO?ObzngZeEVN8kAUULtQ1H4#$0~H;R6<{(Jc)@H`_3;oB0!vX9eEMa zm^gQq%BAGmxTV9)J2k1)LQz$aDzS_Q?@mtV>pci<$0Gst4v!dSK};`>hl-)%=qSdQ8MqxkQi@NjHt@0t% zSR@)yAv9oF1|sPUIAa~s^YyTpV!L-}l5wUaHm(!+uu*%eNCww{=_~18d!(yGRl<`9|x?F(+Dks*3gZdkPb!MGwSz zuAUX>2qLwZIY?n7ZmEx7r5$gGEg>ZD|4(Zd&AdRg*Y5U8nY>TcLUqjWB(qC9mwD_f z>s}%F!_rftHEc}eI8{cpBdQcZQfh@g1)Go=h;wzB7Iy#E2 zqWiE6{nPs7JhPcZlIhqS0&W9fp-G}Bg&%qZN8t2@oKyIiX?gMKp1^V7eEb$<&Qd)0 zZRzYMc|6Wzpf?VYA%AOnQA-DUR~e8cFS+KAb!edI?e_t{!>(q5&JB}|=ogE6ii2T# zb#JeHMdb@FgKs=DgH!n>n{-l(Z}0(H8J4)rnVOpTP&qMRPBnS?c63%-5rQ1Ml6x8J zfopcQf<7qzyv)qxoZb|)j-VO`WSD#sx)3F~u=wb)`CM2@v2u|Wdn0W4!hf^z%Qc#P zI8+T%&C$Qb^l>bVPyn4)`<~KS!*F}TzzyzCQEgd6*M`@um9o(bdz;LC5y7F*;PD*}4 zwj~nZ`yu&B!*TZP^F0~AYque|qrs=rv=gHh#6rUX(hR&Pt38I$LSa8}jmCUxI1(%X z;x9GR($W(z25cpk;2gwAgDZ&+_%o_EY&tnexoflvp}29u#sG(@+I$u|#a}mHKJnXS za$*UKI&riz=mA_EWN1nKxZ;@q#Xt21;gVL?6%=cNKGSqNZ_0d&Q*18}aRSMsaR7Z+kN5<*wHc(H)fR7)=&mB~1o?p>w(AAS0pvQV0*;&`Rwf=x>F;K91! zpUL>#yQvgQEOVU-^4f20fWV=ODo%97fAP`RZO%&kM9&9sKVHEhE!)X<7Z;2K*u`mV zYV)-ZXREjEr|hEE<4?HpS0#iF2A9KQm`BE2H3lNrBW#76vu{<*|EzCU4FhlP6v?{{ z&&CRnpzPQCP*2WUY^4muZSg<27a%kR!Lw{3V?Vvji**0-*`sp0Is~j;A9r6;^ekPt z&;QH)4{bYcVUW+5HH3!ZYmhd6mTw!a}hwmJiEh#Swj-YB&g3sX)P zjE!AiS;%(s6a+sRW(^S*bLqf*Il1LB`^}eF?e6C+^cRWJVwiCd-0-+myg9?T&mNa6 zzNUuSXGI&VbTY7mQySj~zLvTGO{?Mdu%qp(fs98kkJ#AEup-^gZ-PFHi;qm&eTq0x z0JgWcP=%UikoTmOj{ika9!p&4BR@b=_80@RWP<4pN;`aJiJ56$89YxWtVU0<)_Gxd zs%dxyRM~GJhkI~HuZ$#*yz*S0@C8oK!wE#73SZE#(3GJ65y6|Q5|-HKsNZ85&V^4H z;-%o>pVJQy>K`>3K!XaKjFU%0k|6j#)1fW?rr+UlNu9f4|6GW#>+Q; zK0ZnHHG#U7JP}aZt6C2Ls<0d`ZkSqCsMCYFiF&Kq#9{v`yWWOW!s_5A9b`EA`J+s^ z{OWgq)obh0#v_3(+)d8QkNAqvk6YL@kV|mFKBL1dqFT_UMg1$sOD&deb%hY15mM@93k z?F@pT$O5I-CBkFFw79Mac~s~XZPuz#`t5Q0ZkwFABDSWG;wo3dMcZ*klyU>o%6OII zDxSSqG94IcILXS}jlCiYnhDHHyrWBe->VEUFFq+ka#n>+0?70l%u4j*RWx3lM{elK zRGreSviSO}?=iU5xUd{2T<*01$6{^1v-p*5*s+iM>zctFU%XRf&HmYurhE9|yPbYN zj)H#Sf9Dqwmz#sM5$h>8#o!<>1uFa$dOY?9nHn@DVR9VVE)NhrLTj)LXP|!?>*mMMBdrvUu z(mXzY=#u8=x8CKH7EDWNFyXRRTyNGS-KLPPBKdK;iY(L4$NefMr>L;F9SXQGe-iI0 zjawEo$(ROU0CZMRuN6vE%nioOe$jn9Bu?U-;nF#43;ENAs?=~`QxnC8Ambz(EiplB z!%VzQ7@ZYcV(#AEwIqAk5QISEpT`$vJSFw$e2P8}j!q~_PS_vownVByirr~QwlxLT zE=)cPkmW7hcwMI6T;oeB2;n2@&*lUa+vd{1mIkTF8%AoBNTGJx3J!$UgVU=-dY}c*vmDuAD-ul0nL4+<*MN--(?; z5+Dm@RrDvsCQxq7Vy%y_P@_h)7NXr`wNY0n1ZaU1e`$jW5TjDg16T!6c<%M{pa0{( zzW>XhXs@^86#-SeE>01EK4MmW@E-1PHE?%jwG7SrJ6H6wiWGq<6S4XF)xF6f(Gs(V zYzI(z{)I53bD4E=`bvt21V{Y5^vWK!|NLzNGa8laUm$|zDaEeeuJ2eWA^w?HQYWF! znr2chP;WmU;hCsIKUF3fwr^aEy~&*+#+^gunrj$(5BPYYIbD!B|C;3dbqO-`iE)KRdR0a*;gsVA1rhZfQjjUKUOX!yfNd@- zmXNQ@;OpI<>9E$@?(KCpts02QkU1-gTsX5Au`NQpc7X}QNa$D-kqYG)_3(uYwv~7{S~8~|{PW-O`v`8RXcfU#oD@8K_eO>d;CnMn z>zTvmOb+f~k!72me(~HDZ-JGCC=KSt20CyGvzb)0G#dYm^B`X^zAnzaijix8kWjW) zBy|G(DGyU-0X9QQG;n0WVrX@elf_;|=;Rw8*^x>@(=>*v2N^Vd;2ox*o!15g@#6@9p~9p|~S^LE?v?wo5YquGX& zCgT)TBpi;pa|274W1v0-{FSdzYtYFjGB*8qX`ytDqtggp8D?SefA{G~O1|a1EdJ@U zr66)^=H&~rBR~D7%tVjL2M{cXLF0zGTE$iz^4Bep@<(wpYGHVjH3?|7;r>Y?c5&{` zGRuVP&jAI^NDL+ekJOsHL4o7VDg>a2Dxa%>H}wm_d(WP|F!Qhf7gNL6sZ39qim4&^ zl%I@*ohxa&CKYl#Ii42wf@cuZHG0H&9vIdS!Q{u6i}h@*bWGX)CFrsWm)JN^gN-?G z^!o5lt$BioTuLU8`{@L92``x$jyTzqeR3ahn34u%y7>$*m-lg#wAi@hfrxZba+6WV z2RMLNyr=qcmznKSYg=L{%R?%uLwpduQI5$=RU2hT=joc+FpG}msW;yYFj3?miT8NI z^N0W91q8*(5tn8pYNesx1alScwCc)s`k>n7X8QqMf{-7A&vxF}ZrvgSTHMJMP&&T0 zfOwjio|H!8+*E$|P+#HWAaBolcxN03sL49b0VAEn6@i8LyGpLp3860`jlzh5$LWjF zYd@A1eDdp2#+&Kf4mY&OQwA#J)D<6+7622+kNg2F)`?q;W- z8eL%xJjUxZb-~DFWh@<0u5a#K6nQmatZz`e+uKbTapx+lhZwpMn{8m>1CyO%HN|8g zW_$t|3&69mSUSbq!YmkJKou(=l~}v6GlQmf&hDcGgCH?eAq|{`=g;DA3Y8Ub``NTvm`T)dI3 z1Mqnn<4~?Wj!`j;*+A2pC)ygH(u^_PQ~}Y@A)n*i!3&aK7(#5FMb@x5TW~`A$}>&m1qOxo`f`09U2t9Jq(hjuDSZ1LX1J zM~_1P42F3u_dosoXK~n{|IMer%zyv8Pd~K;+vO_B3eSl5HeF?*nn?ld5u*#Wk=6jw zAcf~j#CLkZ?*z1~#OET(T=tWC0wy3^T~`c1TNJGyy5(Hwn`FJ=X#5b#Qz3F|jr^F> z!f7c)o<-6K*ZM$kjwU4qb1`}V(1ff}*5x912u=~Jvic?r9jLL|9-NTg8LfD$t`G_k zlzKbO^2F8*y-2KfJH4+4g(l8YCkzcvMi0kfhV$=M=PCkJTa%G9rtAG5_dgBID$fex zPz1~_IifS7Os3Bo(d&ZrCL^UVX7z-SQ*OurFBWs|?Rsz9e9N#}%%;@qAPrQGT1*_3 zYgs0BcQ=AHFCkn!2P!HfRY$bFCvQ^$+F~9|W#mC+yPdN;W_+aygpvg?<9pUpGd#wW zr_ySC)+DvB1ww~FFvj`Z7aQ!_9J9p?WItvbno|lx!qXB3!wy8V7nWU~ zo{>+123#~>$4rro zQ>jq+Md|4B`NysVCV)ISP7iPyv7crz$*y5dYL~uim>M6vH^8qnd-l&bgC&!%iFVT> zV#z9@t4v+Sd^+4o=_X7a*5s>!i41Z`VZq=vtby`O8}o-;>`(VGT>xf+9rydl{~S+})Aq;T_%1W!Zm7H1=3my$g9FS$ZjSSOkgIZ)ja4F7!pbBagb=? z`SW-HV1&8Cpq*)m%Qc#K1ai>_(L9l>OmOWX{j0l!_(^R1s@D7+aP@t&TRFP?L_Z1r zMxwIVxe_@H2`}$H--k*;OS&cD>BaY_mw*t2>4=vvRP@bjm;qzaUtPh5i?3qoXpp15 z2Zdwqu-?kWq3pwX#!WS!F{=pyO9=S1Ja8ZI2)hlbyQYvxy!`V|?*I0_Bxv~~qdViH z#7Atk^o39?WljA9Ud1h9a8Ai{z9L*g5F|Ff`24x=4e=vH8ZrPbHX&C?ly?WQAQA?c zE3p8@mon1<{hI<0#(2E+JgZKn1waMG_io3e3WA&WG*IIqA|LBoT6|{KB*rDwsSe>+*5OjJq;)B@Fzs&!Uf@72KDeEPI>FOugCcLhhUzIL3OAX7Ym zPh@W5_*8DE_E>?HEtFTecKM?m+z;$^I1oOmxhs?BDS`5G@I zcAyru#eQjuR(ij*if?n9acSqc_T@N-3=;Mqj!IVG`@Gi_T6nVL1yaBJXYnUuisMOK z+~$D_mu(K~w&+$>V0(10H3*Q!_d6CDt8hdD1hKM=##bBy7UzP;tGmPLbwZOUXZibh zj0_~`6Gx>?c*raP^wGBvVNT%>FH01pd9oLpwqgnsfktha1bZYy<8_-<&d|&=7zQm8 zZ+`OVNus&vj>OfBeJSN zBrK5%!95sX9pGTrXzUhgTn3B8axhXmdub+Onm56kVjTEQt2WF~iK68wzO&1daqhc& zHk}UxQDso#fn*m6XS67m&!u6V8j`~oXP^ibVVVNMAure|s-Pp<5dsL}Q-AD@>~{8Y zJ|meKLR)b#3`;An6!B`++ovYS=IeIji3?9&m<@*ez(dM#^-ZN#d^?GkKcIOHK-Ff8{ErPM`LnynZ*?j2uNe(<>XavfD}1`_}d zuHYj=T^_H+i(EkkTIP6qXWN7-9&S%OM%pWlcbZpGj}u#2CiUpEL~A|t&Nb->HJ}#F z)wBV`dasFkMquQ2SBo?|6OkK|&SHKAC8GFkUUFIx6&*w1r@-!rNH=s!Tv)tyY~p&G zDMpLcxAfi*TdJ=F3<-GRG~*Zguu)F4HmO`|Z*!oVTFNLpXS`cQSwY0s2E(7H0FY?E z$%3=am2>=ubJY}zlA9-(I4ub%Iuv0V2Df5z`B_9rhz(a@?H)B;u~BtHR~3zfq4{%j za-0}81O=ifIIy?wC)4kV)|%Bt1slV!Vi}t?NudVS8Md7sPV%1ZU{CxPJv!+4Aqyhj zdipN@we{NJK->VNW`k}I&*Dhv4@WZ~{40&MO~pU}I;pcWJdF>_VpB9x&5T4n!O-xSU@(CUhTjyCIuBZ+qC^ude3vlCA#`vE2Q%pV zQR}L2@l9VPKnKV9?+14`lVXajW0?5U`>la!v6MI9V{A&iC|i}!Up$L}lA@<}|DU(tatwLbrUejL1 zi#$Up`x&G!gw?3eXkw{Yqz$4hSbkhGqD6oA>F04(jwfk84G>hVS&N^qhP!jb*m%?H z2QdbFueWyNsYHB%y~Ur9>J^Z*j_eX9tf&{P7NST&8R|!hJY0CRl+g@oR(CuzevNTA zNq@A(SMfsSaPtpDy}mI~Hz^gaDf4a<=y6D`YEv@EU${{XR~|W5;M1je4Q}jq8Yjxn z2}qJxQD|;;{wWcH)ltc!G`kq>c~<1-%c;b52?-DzLTx75xDm=UdDH#T zU9nKt5j@F06E%G;!bL@Y)Si`R9nvl)`^`Lm@jR#t`g?q)TUzWg`eG~TjlJWSzrjWf zHlRVkWm(q>O4At=48SinOzN@|%st+TtMPMuT4M@H^ zoDTriqO(DBDwpenCUPlLq(vzFo>!!N;+Z4WX?wjZ^bm2y0HVLl)&hTZRaU!Cm_MF^ zR=CJ^oC>_8kX67pO?$@9amX58YO}BDeRL@!DZQ-oY=qE2_AP%Kx@|ZfTMM+} z9q4Cz;)oql9a?=@;6@-{t3^lWpbz8XSz$KU5a3|28GD}%Av9Uu4nLT@WKr(s&INoG zO$$sGnht^RK(>}F(A&;l&W<3Vjk#6rsllKd(qVO4;ey22zBP_U2>NQ*fMHBsM?bHP zHz7ZEvpW1cU;a*P1?^Mw#|uxh_x0AOcynLFK*{R}_RZZmV3-M$Cj)UfJlt_&-zJYH zC88-!j)x(;=+_``n@OAsezo&`UtaVE7EUaeS65^hdndvoTiYMlpO$J0R0hd3hwi{e zsn}X$iOfBx7YGP0QcxTqX+oSeWCn0ZJRYmSQN5+51wW(>0ze>m-hGOhxn;5$t?aXs8l?QjqR-T;tTvlMMpox!P1Lo{v3UwOOSj8=Bw@n{T?!n@=L$< zMJx_zRKoSt(kkqrR{1iUt}qu%*%rDc-bpM3IcKhYj~*R;@Yxd80l{`dxu>H=5c+r& zO?Tp}guaGyablb5{|}qQB9+bcQyVWIXpCBXoHn@?W`X`59oB3MhCWlDm2!%e6XXFR z7SA6NgEdbdkq{Op#ZOrGvgj~FZ7=8+`=L?TbE>(9Vk8pX%S0xBWbpyfhqO;VQX0`rFBsSQr=R}8H>A}J;sD%a!1Rkh_`UCb5y!|5Z$|IqCClS744Ui< zD%nrE)@Tylo^< zo5l7iDll+?@}A=vcm%R6($HsHXRPvFLvjxZ3;36LSmgZxWe? z(Oeun4Rum()<;5)gG^ApO5ZBQboE-dp6M9SxVTLnG*NL={AB7f685JcQK`yGURAuG8%vuv=+*ckB z^ARa*ENeT%8V&*p?^Jr@ZIT|G68lPqYw?@nUSmQ61}dR4W#DE1l`5t~>R@kVX%mfb zP0SonU0ErA^?0bk_TD2g9iqn-O10Y0lM`B~WrZw(8z>15Jjj(}?N>9MXibGnDq)3v z6WbKbwZuRG)|{q60}m7#p*qJ5kL=35vxHDb0NR#S9eDO_Smvu&ui~X7Bx}%^-W^23 za{V;6bPw|TkKg?wzV?cvE2q6q_|Al7f>`z5K2U3AA~XAh)>-i!@&q2lK{Vy6gj+5N zx17w8zq`8^%(`|}h8V7+SCRGS#hU~D1cJ@*tS`R&9Ul}^%t3I@CzGfNW-+E?LxZUl z%Dn6b$I%6Ld8uwRSQ;4hi!RM+Da->ex zxnxO$Y;Q;il-Xg#R>&bxVR5k4Opb{6M_~U9BcS3M9LrEXTtG2-}|1o zp_n)dFxSo-(@I--z7$(J84gn&gSgA}?qW#hA4p3V))}CmVg=lLr7B~Wh>77iY^0U9 z-rw8SX}}-btnDblFxI3yXFaQeiI0!z(v}Pk8;stY>JR-S;}dneh^0KE-@fOY(U{9OKqEc6}WN#1b&}$Cv5k;u$HW7)-n9Ha1%fm-9}x zb_Vg7rj>r){`W6qcT`XlRxMWa!_?H=(8hz+=nWD+L4X>Oy2k~ByBX3at!g^0Am(Fv zx5g-Ulvte${k?4cP~wa7J%c)o1l>GQ={ct@o=Hz5RWQ|s}3tM1ppe9lP|SX5(Q$l9+#Mf%ckbsgZM zHvnzB(l+1@WF8s#hB~ShLy|BLz{SG(??>f_&5eYnc8)N`<^WI1^5w-MZvxOLQBIqv z_#hK^Zl@ot*V;S?;2FQrIA~bos=yXIU|cPs2bB7ub<<*40mBlqfkb6$2Tm1Baa!zA z`Ul@xpdp<3JJzJij{W#u%x(&uie0-iy-_dV;2#2A6ipvyau92Vo)>Y9gpY{Hdp(`6 z{Fl!^xc`~|@*Dp&4*Pe1H8wx%-bA;vU`~u|MM>jF&WpH6dQBXFmLJb#=0!2;P((`X znd2_d+ePs@$j505oZUNU;>uy zo)K9rjSq+^G}Tm%BT(!lzEX+H8~J_jyG866iBrO#xC|$#5a~f*w^yUQmOC`xVqc+9 z7@HNl0fhB8pZ?Y)5R`#W7V#y6o0|P1d>Wi>GCoX_2)9=YjQdK-5bN`ryWC2sT~zy@ zefs-YgS^sOecihHo%;}uIK(4qHmCKdlk}PoP8jUP!{b7H{kK7%Jn83yDB-5x+kyD9LX|iTJ;mGLdzdf${T6h4|>ez_ACHS%3 z#_>CvmNqDiOco!~n-P&5tRsJ}^B#xnArcY091y;WOQ$3n69XOO@x2rGNF+S3ZSS6@ie&2s3h&(1Gti-Id`~8x}-^)sPAs$LrY?b$Epr zEGzcFKO#ZK*>GBDgra!Vek0fnRN%s6b4+(KEfGP6_b@%Y#uMp)Sab!rw>ECv%(RC+ zi=y?#MY7D=#m3U6mvoG9Tm)yA5)f~`RmB$WJXR5kSrO@&+UOeI7d|A;0AZXlfConR z!mgMl#B*ZXns{C&%1{DBz!t8KRo;KZT$<#^Pe*q(??L*To6R=-e69pV3u&45@JzTw zI34Fj|A1V`A)LSTrHRyn$13FT0mC89FZR*UMXP9yeRdAQJ@!LmyvHRvb;%6}f?coW zNgD(ognp@5Ek=P~_%*fU*tjWw042{O4MuY{h=M*>AoGmFF{)1K!LE(=;gyxqQ<<7v+2QplJ z`XHZYrAp;jvuXvmP{ph%a6OVXU?_qjyELPXe{(-Fm54w{e&jwhkfH28OBQ0ut^0w2 z``~@W3?)f|0h%a(z89l&vyBK?^opU;Wh3H+XFa8d7V(1rVD*HtY+ z;O0yiY$Im4_tFtw9snJ6kBH5;=n0}Gepb(35-8;hYN0}XL2w8cx0EAHf&LPOz*UVE zVeV~qLM|%IfB(Z^j)x>H7c_pOoF}nD(#C#2VG2+-@i^MVxt{ z`ykN~thrF@Z^7`Xd8y4{V4@6|loee9v~O|m_*A@739q?IZHC(vJ=wdUeT-tFv0&!S z{ipk1cw0{wvOVjD6jH;-g`R?T|>`h5v@Wk{wVapRroQl&YOebJ0_&bkZ-D1h<#k%NnRa%MSeK7!?-Uue3 zs#w*C2NlPXy}nnI_wkH(lzzm&cQIkSDRei6fq(9!TI(3L&Lx$Mh?aQli8O<0+kd^g zyR%nvW|Ew@2&1$@zIKU^`mZL;1>1NYxXLEx4EC*>K06{&??vwFMrLqwhylT+%mQ@q zzV;{ge;r?=X~$)u1Ex16Og;^S0{NNi z=(ERnfB2At{c-#E-^L37SckA7sdLMGpm@XmM|yb153Ns#EfsYaMja!0;scs4^(+`` z!2DG|mAb%5Qj7S|>8SW-;1Tt#_$H-m8Rd|!d_T<`4pd(bD${IM z;J1_aJx!jy|M(?Yo|{|1ZZat>VWW+@ZP@ z+z&!`_j<89SWK~myPN%ul`wwJ zOYnxBs^edcQ1M0fUM@dPMDVH1Po%w2(phn!aCXohc*ITdFg1Hw1zRPfv-<#5h1D;l1Yw?*feG{tJ!=LM%^4pm9o}05=jW}k} z3r|`OP-UK+VRHBsFh)v~6*cTP?TMnluh9}rI*g=ot^}1pq9$?aTQ99<0fn%edXN-@ z;wyO*Z|SR3A{{TKjf(k+i9#4Gm&rA`EZ&U#UK)%#)V9&-*G--gL?x8rw-IdjwCmxm-!&r{eB@sZ z58EIaSCOM1hwLpQ=?Gf|xZ=v~A7(6hcOEZy#f)|8()?(E4&-&_0DA}yHFHLm`>+lu zVPKENVC#Z-J-f?j&MQ)r!@Tp)mX`iWc3|&|A!@6vAmTC+7q3L&a}Ky_5azF0RCHU@ z=edjlvCDFGJDP5m?69e;6GucM?QC+HK(&`DuJPs+IlSe>LgBnsr{N+{2yBmRZNqZ0 zsMk%X_hb^-|V=&{(o{9{vmdU08S)^83wN(CKnGP5?sd-&lPL_BcyF*LdrB z8_)DjAs(HzPykiNFvZw4JT$gPI{PBy5Gq*5|BnL!{?rbyVC%s+C|4G>yEusLS?=tYUyqPwUfFt=!Yk7^Tr z=&xY(*bR@8uy;m|@D8kAZaF>EMl{&;*TK!?$rB#ANzI7+`{$+{nKMjr!Klp!=( z-H)34rU03nQJ4Y_WIi(6l-)vDRmdSO$ei*bcQnjAD9H?_Ze-iW?psp@JQOhpR%xoN zp$CKOI}8$Xz5snkMnrWhSA{=Xo~hLC4B%2Lm+{#gbpDX!PRh39);g)g^Y_W_P^go7 zPFE;I1R?sx%=aHPEOM-p!n_Q56II1XO~3DWcp2Avdz2&1kH$w8=5TpUKosPI$5GNM z{4{Nw!TLtuY+in4p2P#j z3&iM-4fJV~KorHRj*1IQ5~88E#l~EE@hJF)7E7LbM>iJ~nmu=1-V03?Mo8ff#lFPl zhk55`iKh(vlAbJuTp0EA$oy6?oPcPZ^}IqP^1ARvmtJjC@+NMLjm||jmqQ>xQ!cv_ zEhzaM1&+P7vJ(62yxQ1aGa(@NF}v|EFaTuinJr1E0xY6edz$hjIj|N@-WD0uG{Oz89n=-@<4yzYT9sWpP&Bjh?~Jc0vG( ze;rS3f%iH8rq?X6_&cfPrtA@LbgpDhq88e;6Nr^z?C@2m9zG>K{xwd5WN|(%JE%fBwty1rh+4 z`Or<;x|om1dUCnd4{xCYVBr{e}2JUTZNFi0f8k6OgTj6vbIAJk53`4b;{mBi{Q8 z11-IzfgJ7ma+spig6s z-+!HL|H;2*x^Lm>qWOfX9A#ii282bZo?e&73n1mul#*McZ0zlLEhKbf?`(XI3pO#P zVnNe78Vyj(Woevxaz~I+YJ|F1Q*I5Q`7jMLLCsU~GHXaJ!-58%fBcVs7z^4XbdwHq zGkuEJK#LU_6!BSge0pqpV6rrXS3LHV#9sVcBaQq?0y3mqrs-o)E|Y5`1VV{qxeF`n z-K?!AyrM{gp9CD7^hCu@$Vfe`Nh<<0A$|Xc?}Xs^lAtkId*2xD?YD8h9hk;<0aIAg z!t`yUKyrh0Mk{M(yz|($klYbSBQVKau2UoT>J!h0cVHZX8&qL_T-+?x08*%36Rk*U zTonn(_>#(4YYe-)x#1OqdZR4cAKipcgCN~2dM&xv!-*uf&v|>$j~}p??7oF9KqpAGvNef;4AZ|LS-7?9DaIqVHeNt1SfK(~!Ml0e>%EFU zqK&1Cg8OJ`ly+OUjaV!pzxzM=^fNOQ7NLr=%2_fJ_Q-$Vq!BaPA$1(Wof}``lVl-L zT%2>P1-J&`nv7>%(@I|=Kj;ZM!3&+zLpr|K20OejG9ClpN4vARv-WlD@9b`vn7095 zHShDOPYH3>=01;?9xeE2(9LIIWH`-B){^z1W5jHKiDKeTr!0 zDjKn*)N{0w@|V_LZ(CA&w4WJga6LVp-VvNOn{1A*X=jw8`uU4e7Dur$GU$D|HoK@f z)r{ONJ_Ihm_y6%fF91<+H8r2Ms>Znn=0xx>3(T#}CoCpCgWtSq8C;N^KYPE9P>2s?##sov+o`!$t|9lKh+qucZ>7hX>d@vU#f=)@$mxuu zV)XM>nZUz}n~#vzHM)>%F=xCB-Beqt>};*Y)BYdE-mN?CE;;Y}3^`oo&NGgs(VRen z0LW3scB9cakT?w*Am<%>mfQduKof_?0BDk+;;cwijvjetB+EKDvJRG| zXe|=+z4Ci@qw1+1U)Zm$Sp?Ai|LkdPN#QsGimA2ZLym@>fk*9gAs~EiSB*Ym#g!#4d#3<| zxmFoW)tjfN6D$cQr)1sa*Kj+7e*I|+0rRQEzrwO!!ZMxpYC#T(4yEVsG(3Vxx;oPyJr{+W^i?;=?C=yjl3e30;;r3DbxU$K?E~X51`lRT`J>}z^{N3_Y zaR^j&nmvWYMNq{}$I<>%Q$+n_Ql%;s39iO#Q)Y*+CT@&`v?dkFdyK2y%MxtF=2bD; zG0r*anSk<_pMU;?SO({m26{sp{Bf*!8n0!^aeHwr_BPhMs(uSD*+u~gG}Gm?kLg;< z7WOpNPZ8sMHB)fna?=psvn{nfS%>jK0>dTdv9-D%bi14u%z1%t$O6Q-Ku0CM`wR)5 zG2KcOxXrq*M3EbM+0>bT@xniJ_PTLM+$F|2A}(UrFY%rIa>DRw=HgIaWo+!7>vAY< z9$uZ7a%Vvc)HSMzAa^yb@h@_8RB`e0c;QI3*hK&{r{zo`SGro51mIk3<`OM9Bc_rM zXt?9p3+d05a=lc((S{_M5-X!p`rTn%>ic2?Af1)i(ubB8;)%X;CiC^6b`Qm7(%R= zD#{j`3WO)GZb_LE?}*ppHKgJ&0^tzd! zadUSGcRP!Wv>76=&IK{ROhdds>_UI3eHlU4gluD3EPZ$i4F73U^OI+sW z+K|@@R@vX{Y^R!KpTS?9%}hoTr|xfhX{#F*)Ze+hE?Kt@wL5UiO`5GJbX=y`2~1WM z<$h%jbg0R1BeeVvz_2ol;j-t^@UKL5vufA964 zj#Z{18R0EM5~}>mCP@u}JdQ^otq=+R_@2DYYD=mw(rkbLZFEUHx?ySG!lZG<4Ad~d zfd(GE=5453SuR|n<|r>Q1+$GKs_Y|Xp+#Dq+ka>doddjGQzqQ;`88`@?BKM%Fq7tUGFn%;&MP8b8$74<9DmsSxfP%@O zNTO+vAW5sFcj3?S5JrgYa?Edn|93{29s~Maf#1T!j841&%1A$QFmbe!_Rb}%hnArR z$lVWRe>h_5_-4hnOQ8RdG7xnqHDUN|JNZ~9d{OweOEfk0&%TWCdK2kj@1?O3a#yj? z7E>+;9<#2b6Rh{TG;pkwa8!Z{LL=n`I`t>SA*$KH0Z1;cX2p5Xf9znHDKZ1`^2 zWEuYLW*%LgK&ZSWbW?Z^M5%H|w#dlI-3(3$;DnF5X$q_&?dB;E`-k!Uhe{zA~jCyT+dbMrZTiVDk4&fTLLC_b{V z@XXdbNfHcte56|5_a$zTgjUfmXzHj$dpd2Eaj$i(LI5zx{3R z8T1jt>yGk~JZu`y?+2sMS+trELRH1t<9wZ*=G5TcyTq`Jj?(n%1N4c{ziLT7Uz1~k zk?=&Bu>^dZo&@yTETxR2V!#YAbz@-!Z?{sYa$-dIe8n;5?=)?<@E7}N&V8(gz~`mQ zl{9zUPhQ>Y`C}A5W?-k`U=a54SJGQFy!3lhk~4PT(oL84Sn=sb#t@KNGhgk`-)$lZbf_1jE1RPHXOi zsZPhG!|Ey~NNdScC)4(T|HdW|Fj`~AfNnnlS-Z05kV(XLZw$A6lGsO8r2FltlQTWL<(`Sl}O1STIP#UFx zEkNCy2k-+KgA3K-d2*|t#t$tN8K;1Dd*`E8pPy#-#ja%SGK;R(F|{-XDthRsHOK%p zWt4iA1?n$jGG3q?;$;JDx%l_N-Jl3R4)7XWcKngh#a56>P$+J4Uf03sxI}#6jhTob zC5%|D@!b}{1P=BEN`uo|jw< zeK4^cORh;qdnS8yM3;wUzZW>o5WwfE?B#pO1>0BxV7VwtG1WV!DUXqL>?gr z%N15(n&!a7qT0{Ig=a1ZXE2$R*y)?Sjs0E-6OvRZ2yYM`MfV*KhnLAxbMx~i<=W0> zf_zD(gz}gs>@XitvGF@8l!xDMiT>XU>XUjS3rSul7SV@xJ_%gInlSvW_FptAAkqy~ z3R#043jPA!Z=Q>`nixCrLoK2#Bykv}3uH^YzdP!SPxO+$2wP1lvX zT<#KWl!?s7E;YC?PUnbZRy9z_90dAon$5!0$lA3gTyG^%{XI(^yK*c zsN7&pn>HjCSp&Veh7RLB_5^Iv6+dRB*Dnsagumo?$t2E-yPC7X|>{R z4zF&?oL5+`S^Nq;#2F$xM#q_HBSdEE_PJ2NOXFyO4%E3ckD7p}p%?y=5JJti^8iw5 zej-OGZuOy3xJ96fFiTn=7*AaE>I8CaW~wp}p~^lLG0Jc16b=&FDZ3zuse{El%KAfr z?iI+3?I|QyJoUx-9AOJaAU;HXVpN3xdvu4olA4%Z-1_)Q>{DCwy{Fl-yeRW1B@fp{ zTlRb1ed+~FFDfh*Yw$04d<|aFDKU?ps+i-7WmBu62Hp1S_Tlb4PP#}Lfz17IodRnr zB8JOKAkAJHb?)Y0cX@L_^YwC2YnyO)(E?(*m0}5bL<6^ z069RxZboRhbUWK?q4*D0yH?E#=V5af)*=Q4zxjew5d`Q54n zLIu7q)N0swB)^O7z%`dJsqp1jF#9N>vJH1W(-L9}bpZC;*J%T6_h}*xTX$tdPn@S{ z#_nS4OAGVy7w*s*$#rjPeqqsf4x5a1Xx^4d1y#73)nie`ty-?70wGtzSK<|;(^Ep2 zX>h&X*$j!eIUE;zXH1o*U(+fkXJ34If6A=kvgrZHf@(2#C{fly!Z)#1ruq2{HD*ZF zuLU7NZCZ(4srlLsPIO#L+-)FAu@P)ab+fRrRU2(JaVl8>LdzsL{7M@4owsI)J}T@j z961yDEzUQBj9CNbrj=>_seFmd-Z+yL&laD>2JglRkn8OBy;R!UR9EdP%bUaMHb>aWlHzTc$=m-wifgzOym-X74_&K1nh4qo;bYA@Q^|i<<4%8C;n&$2dyNBF0zl~ z>Uvy=)s0@w`0wt#4(26dUo<1>h~j;&@9f9N&>cmtjq#hNId7l8t`D*T|g|3<0O6zg{`;Q5k|m;lJ}uY|qSz@j_~eT%9r zYXT-&wm84E7|R7~K`y|O9v@zKf@X!7H3+zZUiPLImgb@!>zlvwUp$x!uHi+g+Cb{H z3VuJ7ZhNPv8QEtxO7Y&fcFa37ZQk(F8P8v1ya~qFcmq0D-cZ`K3U4epM*mdAqV~4zIU|7pG^b`S$$>q1(k@uA^#lAftYE|{j=``Yjr#Q{SX7c z_G`Zx--KV>PJoJigmFeCKvYVxc)fHF;kK}2OI~`ZGW_|X6dh0)4PoL{)L;9ZW(3QgH*a~HewIg|W=v5NIJvvtRFjM|;tV++KBKN=_ zj;|~%unUM&2=8FuLM6RHCKH<|D}q%*HbcLiGBC6@lWW*%nzl z%u1L05V9X<4-R#nyD3^7U{y==hLc>QxKr0V=*H9vgBzYMSR8?NQDE*gJ?Ag~&*%T( z1=d-_yJ~Zu#tKjt7~k~XYOekK!{#P z0mjwY>#Xj_*IIWH^m=@4Yg`gyOloFigg?5fvV`ncFc_w&eYL&wX4~u9xU%niJKzzC z5*%JlvY@o{D@Hu93t_lTBf9Dgv%})UC+D4}|B4UFRE1Wty8&&lO+*#D3+fmLNE^k4 zY@^6kM&(H_iJ7DV$2KJO2GB1ERS)egKMvb zK5&ZG#lA51d)x5?R^Vyr(gkT-tihs(SjQQB3|!>Mzigd&;U4gAUM9`+%Z)f-W=c7H z0zu9wqT3Ic7`W#PesHD9;>bX+dU^a_d=LvItKkUA(u;Y27xx~cX+{7N8o+e+(q4gW zkDrK^kst~7B3YlF1>q2U?jnZqItrYG`nH-W0trUYH=c^30-NX)CDAkkz*kzovMs)P zFuXu#xP!5x&CQ)R@l*!cqb~eZjdnjSfU_-7`~${~2XCJY%6ymitdPdpzC1#@VCa=Y zdV_108bxuF!o#Ozux5x#!vB4R%58RbSM1R$_0T37s279ZsYaDDbz?__?DslrHW+1` z-=emS7be?3I4}H8p5>h%EMk z3v75=#JAjVrADg?4!}QQOTXld=7LUb!HC%OXlX%5YQ9{Ri`9YN8o!HYkjSU4LTu+^ zSSs}QaivG4fD1c@tROsTe%QnysJE<#=*)IuYsCYFjhY`FOa|(%umS$=+iD`wYo^x^ z7wNaY6RV?n<=j%(xri|;HjxaE#<7pICEW33YPw)by~;h*wq%%+RD(sw!MT)jO0!pB zC^)!S%~##{*UP=219_{N(p=h;j0(`dHM3JsKbw6oUd+d)P|3A~v=vxZKG_7L6I4S{ z{iFCEd5Px>mWW9EX-mu+heV`Rae{M494$^>5lCZydaVu#LFOB!xO4*)ilM5oVAO@h zB%4*6hj=R4epE{=l9r-1se@55DnVWRc?6~7PhWmY(~7_Nwog)NtXr(#aS0G^j!O~p z9_Ukl{J6z$<%tEs5MQYS$jcJkD#kUdk$Zs)S;Y?Cf%-7S*<#QTr>&U2JjQEdvpjYgbq<}Y%x>y_Kc)^kPTtJ@k9SGzqlC7!9^D)x2SG%30z4_ z8o8RP)Cm$GAqvUwD553qR^l!!w_&HN_UAI z`dHM6hiTcUNWXx7p|jc+M+vHDs0lc&Kio3e7e(fR4hw>`6dOh5A%mIBVxlT{Q3@O1 zjLzwnM({b9RjBccG|aj?1*|EHLoJ~z!_ZRz8=6o*K^U)&6fh~GqQv#mL;#@F-o%2) z=%UZlZU#wRSov^xS_1UTV3LsJs7JquUa#s#YE~0fRmWD86^0?r&Eth=r|q@gW-!P8 z@aT*g0rX;y-{5QA;tIl1LC*np<{7n%e#^;!I$s6sJY7FH(O|MS>tv}K`Ze!Fi}cX{F~{Zwd3-I9}SNDxIM%@MvzYqgNfuL0fkrr?V;C6HpfZ*_@8{& zH(?X09)EZ-x8SEt)oH<}(YFkf&)hl)lbw!QloPrIN zl)xN{uXBGkd#Uih>ckrluCeD_gh3p=gl&mFa?AV(4Ct)4)5Xy+KfiQZ`9crTV%6>5 z-<0no^2-#7HpNbI$>%O`CUMHJOEwOI{y!jRnvZ=sK7%vkPb~)>v|ZcTHHrGIGoO#Y zuZXa2dwfLxnfN%qz3}WsJQd@3Bac|r21?_<%`ZL+0?LY}J~fEM7+R!O{6xCv{-$_A zGzKEv6<_VB%T&!4%lx2apMN!^>&MNsG&kfEZdJO~*!$Bz>2HD(fus_8Dr#@YnpZ9Z={KYWJk^pW@gVcMo7^OgB6Ir1=J#g+3bztS* z@Atavekj>rlJHJjZ(wK?kRtKL;cXcq*dN{r(<335XvbFXqToGdsF|S);|6cP+zED~ zQ`VEAUTT1r-#c$baO({fwx2gx`n9Yop9}a@*xaB zG98FMn|x-Ob~oSzhZG=!7G>=lQk;Qr;jnfC@y>aEM&pe>0JWx*E%KXjctOsD9Im+ZUh01rCq38-%FF3D5W|M~qH1xRrPo-dXLSoEXObcAwn7>(E%TByIj zcaIi|$IDTi5V!ML3kE}HcbFpfreNq~2;T^1K>@U$q+Iu+Uhf7U zMnwVd%g&cXh`qcWm4IxE5yQoPFR3dcmUm;@AoyU|_tQSZHqgn6D!}wE753Rp5fdvL z{r$a-?ta;@n!?FXPQKwf((}XbK^!Z@FEeRIfN|rmg_QCsGcZ1(zl=?M`rN7_azF)l z*hYy=vsWQq&aVip(FZAQ24;nuLeHMO@B_)FU@wwoiPv+7IW?awP<*OUOu*5V3ML%2 zCRuJOG@SIW0J*elHSY5}RU~$!7PYb722xUEBY$R1c0BQ%jLwtI&WIinrtLWI{Tc>s zZH}%o)`I`-RuRYJ;cd|yJi7&o0 zReuqn5fK!h(}d0pZA41>8aIxCY*4^Y3d~&j6@39Pg@ELGec2`pw)QI;7PSS*96f^- z>ZK0y4o*q@n8T=Ll=W4NPh8mw4mi2EXPKd})>i!0+bh(`OvMiYQfv(Fv3ErVTIQ8; zKbyl3K2RA80F&bB?@IRc0&@h=hlL=1)=h#~uqo7DvG7Lc@E+J<1`bZL<}5ZYxmoHV zz}DAXCsdyLOI{qs@#I{K&eue`NrC>;j-A9DJKsX=6v_!T?En`c?oydZ zdEsxzL&)Ut!T5>(smStp^=XcTvrV7m`no zC{Sre1HCra&#T7fJ}$=mSC|a6M2Ro8uBe!}NY#q^Yxx8xH!vdNo+=eCb9jbk7sL0J z$Y*pbFfCT#Q;&&wJ==_q80USlv|!Ug0jqK&rrMib1${Qg?@RushozSO4V2v6iW_u_ zxSEc}h_hu3H#p-suf?Z}@kr`O=Y5luNdQU5@m7$Q)a6)k1g~UkaC;ulR!Di6e8Upc z9&A5L}!pZb$KTXR?H$;&mpAwP6n^dp6jw}gi8 z(uoo3A81-|3#If!aP4^Q{tZr$zLB@KL*gnk9~?MCG2LB){u7v{UKOKed<6jAGO#q* z4IivzgnLIRmx+A6^6OVDfVjAdmF%C=kM-dI6i`VsCXXu!|Z?x-3seMJkJI=4;7AWi<=EHU|U}DctZDNGDxrf`$1Qc>}luee8?V-QP-!kM2jhM18+MaxlH%jAj-^^I(GQ=Da>;{EeRb0vZXjJ%%_`5T+L z0`S%!EDO|t9ivT|wv77xq&mvmGRyoeLz7&K|Lg4pih@s*}m2)Bf3bZ#z-k@9w;riFfou;EJhnQDEC z!Eet@{yyjA^(SmKMHaAy>I#BZ-%8> zYI#I{D}({%MBj<;w|Goh4gd!-9h2h5F^Nw-`^y$63c;0*H@zW5mG-bI72hf~R@4uQ z_>#&|+vamm=Y#&!Jk)J+gaj>N%2=rYS$^>;Hf@$#Q4zt)j_>A1qHj~se8Q%MfUthi_0cMu%$(c4=H(`bsF%w*EeHawm@;*1VI%~at zFyz9s1;6bT{hj^%RaM=@uIOo-S4#LeB?aRd&*q={8C#TEm{No(v8hu!)9$pu-cU<~ z&dTuj4)c`hXe8loVVnKX{SJ~MMNcpHK)l3SK+pQ^oP>09>_G?`|B)=Hd>3B;;#gdt zcSZd@dzxeKX*6oeP*7KGLkwDcPcN@CBUyHD=zAWoT>&u4NXS~tgr9d$*bhBe6PpM9 z&Z=LyhPRfnf7~#4W1@TDOcxuFRgZn=OhLv{4R6+&a-mGjaVIyjj^h-FB>u6V)xRzo9LzPinA=kHF{lVx zyhZ$g8V*|b8=BB}fDq9b{19Qy{D^LDZTtzOfqV}EL$~PP+$m>E0{W^juv%N9hzAL2?h14TRFq z=sB1x*JpLfdlKU{|LsEy{ifVCtY#8OGq5gveydS9;Sui<)PC|}q4dim{XjK@By`YZ zo!)z@KM&$!V1uik(HDEIT~3L)ER#f`4`gr?o)A|-DcchK zgmzOPEM7Yx6z4UyfXpZ@`8Aj|f!oiGzh&O~U&R9m%=}icLrcT|to`pFnycjW59m)- z>|-?6@o{38=bkP4ET7F~iN?dHC(kQkX_u)idvJj=*|jQ)`Jg52h6^jT9oxpHy(3-a z^Q6`2-RaB+UgF%Ig9NZp4=yA$AoS_O{R<4;x7Z%^42O@d8 zSo$tI875_yUcB%ToZZun@;IfUtler#2`)|vY*3$=%uEbcd_#7rh+*#ZRmPiItSmjl z<<91&>4QZwDv8MMbe;%N34eqx)NZW`q+S6bQ?&Jd;Cy_Ri%;ib$3&ot6k!lOAB@|x zyOZuBe*V()*c`c%lEqQwuWU0fXX|KfmJ7GR3EL|<-6U6F-9ZP{IgL;UN z^o`-(y&K*me&X|V_x$_ch#$sZ{>3kU$7H8Bc=KgqG34CfzM$ckfA#rqW81ul)_G^S z3Q$AqeRJL_u}k2i-UcI}sPdDlvMd7$iq;Sy)Le=FM2ax4-7mrp;sV4Xr$`OSu1M^u ztsdF7Q?!E=t+$TBHad|N;#bmtCJrpapFLOSsXt(%v$3EGWA(N>7V{lU35+_aq97IQ zjL8r`;Uc0o&0fr>a^KU2bSQf9dJIwn=@^GX#A-QVD*{-9G>9`m85M7UrUi~lc4G9(dhSbSxtJH>PYoGKK;%g6|UXCa6 zwa@?d^WT^u2oW%)j^-cnMjFmjgptByc#)APmT!%Vh>Dio@KtZayf$=`SVd>eDe|Ywy7|_65gt#>oUUMpF zQ-6HFj9?##(lXHQG4g~RdQ=NDhZ z6NKrCJ4o(PrEC6NcdxXH=ujDk#3+e=p$QsjH69@U7K6t}JWV^xoGhq&J?2-2bMY;J zf!?Kp7%tuw<_!!-ESd7kKZrM~rxA4u{w6sH0Ej_*0+oOJ`R`&wqtn|#X343*I!2Yn zx$FxGs-StiWgpg#FHRUYR^pmJB%V<4-^dCz-60MU-=)dz$V5OdaWE?IENO*lx(qMM z9G)4o@|@cgTCafOaBF?Gy!7h!al%AiAV{^@*^YICB-p4^>6jHVlY_>h3`)@-jzc5Zcg!pDK1j}k{t_mwT;i4HEw+mjq8A)*{F4!geq_U=)bKjX<1WMs=+a3QVEoj? zZTbP1G)c}q4jN86KB%5`RU1);Juabd8s!MV6bqtQHBNjBzgCF)V&E{m0sy^=9ZbO= zO|E@JBsD-F;lpQ;k7&pR!ozk`kD;fiXpb{xGPA-P!5a)1EH{i+NRGj)jh>piOndsp z7l_qg$MW9{O0bly$!v@X`P)pXl3IDCn5Zj)&|pds#_s5wS&Q=fov-3vHNJBF0%3>I zqk$gZy+q|?+F#X>_a*bk75%eV{~>qR_frEvYZ9}AWr4|@We|d*a=pxTntD#eT^cK1 zO;mEDALoy&MCdpEMB|(5;uQf|;d_>lr$nd~A~ekz0N)%BU43UycKYkVCT*~`)bE@v zKDvm~iSEDff4z609K)mR!Lq6L1BH($a^q-{Q@O|9;wl%>yAX&&WQoJtXFwWvAZUPf zJ1J?Gy$nNeN;KwTpLvzA$NuW`Uz^yne8!kPGhWHU(}m!_r6)N=qqCc}2}@6&d2#e| zzLXyD;we=x2%TvIZBus{e;6Q@?-(dB{B)h0e@Dd|7!F0x{USBv#{_$C0n?~(hp88Y z!K|9x)J)?#4lxf7N;Qiv_O|e%lwE_VhRRjhVV_OMboo4Y;pEd2VPkdyFa@BQoK|rE zm*4;VZ-NwCm=3dLqT~Tum-(U{DZIfu=5w(LVsGF|?X28n@C89Pj`yst_DYHLA*Dra z4pzJxJjTXKFFrgO9;Tk#W?v?1=jN0!L=A!wgMuT$OAa^eALGouH;;W7AwV;X=KTWt}M}pp(r%;sg%FRD& z(8X^cW7yXl?_xzOC}w5sf+^^IbW`f}31Z>6r+iYVfTm!67a&8;JTez9-(zpXzNuOf zjVvZsii9to{4(H|#7n5Boma6vDdl6B&w8Y_3cQI>E|>xk0u;=sWmTN{;Np-Otv=h~ z5gR4kX%gG6yma>K8tk@v^l=DxHxZ$ z(c?)rzjwOQ>6W9Sas3o^YLe1c0K5d@-`JWQ_}aOezQ*N-++KRqOs51pRKr6gc>sBE zM6p>^cqxxjh`*mW`P31|}(Sa4KG}1?Le= zBguPJ3S3qkI|!1f>`$a#L3QYS$lCLPA-II2+PYFm-owUJMGnC-!Yh$n8_^CMeOT=N}GuVK8F)zgqjFd2)&j$Csx4 zc1tP%rkQo2P|va3g%?l#dL)k`uW8jz$Hc`60!OJho2`L5qUW9GM+b9sRhIhNvrVa6N`V@q0*^|d9?KcX zkiSd4RuMFGD&qztZhc3{oeKF^?xgyxOmQ>*l=|K%{}CGdCh=^`wvx{V*CmRhCQ<)M z9-$A&uLeJ!J$V+g{oUw1g{!biiOZ$As%7c^bfjLKoF+cAGIA!R9fI6vuo=M^=qG%@ zin!`<*lKRjZ7}}g;V&(0X=A4ObpBTFB7Y$BUIn2l7HZQ#9k|+v6Df@M zU!bCExxiw z01HHVNY0mkhm>heU24ji3>^i+K8yLK45QJWuZZSDKqu$eLa_+-ptJ>u30a`Q`{=(o zpPTV*7Wac`m>eFDO6(s>og+^y36IF$o(OeKs{u;ny-u3F~V zlUk()BB)94(6yttIdQK!cYr%f*bw|Px$d|YqId;%VOIKmUNi7kt{Qf7fA9FiZts2&biI_ zJ8s|y8C*1&a8Ws$aYB@ipnAS!uZNSdU-1_Za>!)q;z7|U9#NqDk7!1jt)A_I@wPz(a!35^CC-Z*ThyNxTV zkt>Muxbc|nqf3GnIVu+uwsu^(8mq~E?C*NqMv?_n%33Q9lb}J)08oNC$pz!={NBT# z2bG&Opg}BEeW-g9h0f#^y|!iT;VzwuBf1&27Np+g?ogw|9f zc^MuWcQ-&(d-*tVt@OY%@BV z7;`xjS_&!yV&Qrs5k~Yb`0#p|((aJ>Px&f7sv1U^H#qke9jqp~(w}8)2Y6wMTG@H< z+A9jgy^ka2%hpCg6r3|A5%-yrh9Df)9G7+CA9rM|$0~lK{O6B=N2-fjCIgup91IG= zj#KAc$~^-EpagClj=a9}Voacllr1YmIorTH37r~zky1%xmJJy43M9om4j#%stU4XW zA_(y(%|PpaEB3}ri7{R2V|_R&lTiv3awS#M9FbP@^Qq7FsduTb$4Sf{qSWffBk`{B z5aPVZQ6(6MJHU1J9a4MB(x{}fx7N!b)53gagRuk8(yBr;l=!N=lVg->!80t=NG3TD z{O!(WunT?B0bskIin3CdKu2QX7}Qv>-tenoh7}R`gyHK1*cE_94h(EJ9UV08VT-_k zL4t=*zx})O!D%7{rf>ercNS~DjvAc)klRF>5f7>IxAT#2C(BTWx#+I2?yPM z(_ApfAe=hY<9PUO4O5uvk>FLHJtZNObc1Z1iA|_Qze6TqU^2w4M99cJYr%GH?W7&j zY^-Y3(ZU#4K3IWz%A1U(5P={fMfffLN^kVj!|-dnMb5m}_|N#1QsT*J8~d7k8zM-~ z0eq-_khvo6rCK>#N7A+X7EB<<$b0BrDJ0y!?-ag2wIVSZ!g zqc!{>1g*gK@g~e4@JH5w$3*4mgi*!C1`x;CuAm1c+VrC;O<$2GE)3coQ?LU^hXPsY z?V2)bL;)}d{xv%uB(6>?nDLg5C$~kutTYr-Ar>kc-RM2;Dtzpq&V_0`ib;nid>5~! z9I&`Oz`SPSvxwW7){L^AZqwg5oRlu$Q<;N=^cAIp&#h*D1o4ns+g+a&rmTxJ@h8$1 z1EQ3E0kVz8vwkKpBNR^tM~br}Twgj9APSj@5gVrBGHs7$D%Z5pv6#4TMQ%O>6~GEy7vJ-DKez@I=|Xc`_h_TyS`09HAQGtB*eap9RPN#tl4Kb zg)b>6<>Nuk$Ow&Xko5hNxS4yJKpP}ncs_40G0-MOQYlnvj-4ImHcbfluTP%8NSJ%) z_&C*u?r2eus-#TjmDLUYBvjA$a{}Vx(vw*5!>9W}=@OtYtnCY&*w!iHYKT;@qoNDi zSj&QfUbCd@lOjKT_`O)Vti9^9!Pm3#VOpd&bZUqu8bUk{x(qtL_5j~qq*i=TYLQ`V z^e^{sfQ=HlxV?)#pO=yGB%j)_EpJ34Bb!SZi)D!KFCP9dwt*>`;gXfXv4sxNgd?nU z3>itTpaQFAe)uG<5#(bsFI|%lHuw8p=q&)RL&C=K<<`px8R?&w2k>{1VcTj*V}BjH z?u_ApL8IIERiOf!oHDV4a4t z^)siPItgFLGm$rf0>+Pg7kZUzbT-kXlJX0NT>wchB*0VsSV_Q+p=k=ZlM!8{s^I`) z)MeBVh|{P60VWJ%pm;r9)OWdn9a?$@IhUSi$ZK$j;tB>4vZCAbPxM7EGLL|9n`y|Z zQWPn^uWw8>>u(O66ngnAqbT+EPHHlOm4g_wiYpU1pQIYEa{Ni#!M)2GfV`9z-G(JMjtr0cxWrWt;f=%RcFS6 z$TFnoy*D8NcTqg5wf?3VNB`)C0<4F?fR;2_86FPfW=~IKY_TgXR9x z8f#i&*e-C8m9LZ-BCdw;J$ZwIymUjsF2*9^Eq?8*33X>j*ReDcDH-MQ}98a zAI(9A#iK$63oC@=9K$ZAKk@#VgiG30Pas?dhBrX$%tR zr=NfRH8VV-3295VId>t5A&|;~-`ty4_iECEIYnL+Z}e7ZpIp|R4p1%OgIVU5%6L@d15!#wQSs5yy$31pBBfZ9PxVxWUj`uv*WX_bL*sw{ z+q3^KNGnQTmh8mt?L8hbb0+J`>FXTpJ90Oh>FP`cNDKa=q*iE7OP1W+j%q|@-+p;Mz=g*S^$b50_JwncHQS=sSf~|8(jhujy)FD7Qr)@avx&JHiQHyh6%-`G2NP`8 z5F+cxwK4C4r=OHXV4XCyVwJ7X#vPA^w%IWyfa*Qm_$4bJmdbQ$<_~$t-+s&xt&Db4c&npKsNTazB1%T&Y znSH2Ntdl-oG1>yJ$lra-5V6|01j*8(ah5Cutm4|W#;WQt=k{I9?K9WUsV2;kk= zuLk9-{DMEwCyZE|MDGY{QS>TMzGz7s@1vbFwr2398nR_fewzTiKekJwvS5f`{|zr< z4?D5b@5K`|GbeS1!czG-(xn;B))a73slEYI=hOmhWn8^3*h>%;OM$^DJ?R)Uf+OeM zZ(>T;-D3q~>ctKiLCfiy*}w!(F-~zl(zo&9elvdkVFEqVreWUqy2hH{B<`8U+sX38 z{Ep82Wng5wV6K^49enWR(-77{gXsndrJr-zSR4e&;!oUTXD6_AX(Sd)7My94e7E%c zsX6kcDrw`#=``13elc21F{1LDUExtcAb1O#qai4y;>;+NaxwNU1{X(T{0lfg$SLc= zXCuJFU9rYaf2CUTkgkWob7G%ri@wwwiv?dO5$&0{q3aXeXs2J$|X}H7RM}41w5%-ez2Di+TDU2|pbC_o#Vau8BugJ9zFN{5DA^8^X zsFo*YO|8i3YQSzX z6phi2V9gt(;8IEvqCF1n0wy7eRs1!S7nU26+R|a84Iy0>qrNNL!Jr4t#tjCROv5NE0_oL|zAfoC(M z6r9H8dHD2Cz8nAk@_#=3Gz9Nq!I8WM$y(zyw9yzO?jMBN&OyI~JXSm1l+kK4{xr;= z#TQS_YaK=KE%lFDu*iQ_!xJ)jv3*3&b+L|6gBZU>9lac=4$t-iDYn*=kjq--GzEq; z?d-M&1;Z#$j_}}Jxln3nz_fulMOJZ$n#lm;?)Y4uHmxgtmfj9N>7U=zC!yN>SQcbkld;sl zsWfw1ThC!FqNS!rn=p8c+-GnavsKAQWOWRfVk`(Sj zxf(0<{COEMj5Zt>*KX%<6tBlycb~xVX~}kXfcunE?eXD#MjH>nxE9LYmBO4i!%-Ru z_-$E`J2)*}z-VZIUV!U|#V*FTr5POL93KBSiRb_$DtDL&B@HrWZ}UDGH#eA^9FCF@ z3?C_`3KM7dd{J+4M-}7q?N}4Y%0B|`vlvr+h~ZaMcbM^pA0$H4HUtB5XZR}uKMru} zPx`^TqYt&M!of{R5&}O|lCXC3$Np2zBfkqJ(e3S+!_}h95IKd&0DD}OhC*A!_~;~z z?-4Bi;$N?$ zu4fPCQH@?LI&68EvBHn|+LJg*~F+tq{l;Qx*SU^wTqHWo9JohcpF+d!tCc9?>iufVd8n-PIUR{q=>#{u14sT!Czx8Ac5&oaq34)_XGfaHISo&0y6Uv=|)xb!A zysIK%H)z@|&(qIhFu8KN@e4$hN`efm5+1(Hpy9$$?O4^`GL=iClAh3503y;?$iZ?a z_Q%MxIAV-u{BzT2V{awiuO+|Lgfbrl5XM{{eGP zXzpoGEPoqk5=3-S&w|kSILM1uPUxS>V<~_S<@ZZoV^?)MejjZpFIq=izmI{Ec<||E z5`bd@gh|q;xc8=%M|5sjUB)dTJ;xZ_ixJY~Mb#<-_TQGv(PhUb>cgks_)c6>Kw?&g zuLznIpF(+IEdTf@m%%XirKR~G_2SccZw%;*W*-F~;f`^dUaIQiapUtsV=ms8Kc5jN zBnnY>emK5I<@)(OW)C7)^7#}d-<80zUWd7AaR~Z=I)6iHWU^fhKA6-$0QFyEkDCZ& z_8pt1fZzuQvD1F96QXeYl^+3a_wFd;AR7$5LSPr%Bw*6VF3Q2mIOWt}XSj}F^UcK1 zTamL|uqrsX)T|E6Z}y32s;b^}!2wYhtXS#hAnm6Y&*tK{3ri)Pa&$H*cpA)4XrK8N z%?OQeef!)0u)K#(jZ~z2Bd-Q0=S5OcHb!h}aA7?n77|1nbi0XqJNV#7bj~q-<3!Pa z#rD{eWl*3H4V8-2@#VtT(Zg#HZ5uD}BF#N{LPff44~{cTj;tXuSS(`#s%LpAVU^By zib%{kEJ2p$b$2hAQXQmeozMw97vHn>d8npOD*-D)6nxPweWLF2A(~^SykoxP1fP&- zZ4d`k7Qth0o<1vQ7bMs&{Q;Ix!WZ;4Bg2q!F)%jHlTfOGNDoO%Iec0{*mng~kBg-CfKaMHMV z53R}I?8qbFB&>~Up7;n$;|smTBPBh(gJlXk(ux?-gdumg4$L3r5${5I6n_}gOa?&k zGvkd)b7uB-Co~XjTtxfIPA@*Lai%8hW7|Zi8wa{qj5ivq5-lm4#59xA@x5H5*7RsfB7_6DgA`kk!D4xkZa8B#=ItvL7 zu0|i6QsB;Vquz~iL+j|B?xO<$z&~h0XVfI;B{sA-)5jwNEE#9EEQsJuGRNYRIzJex zNeV0($QWw4|_gfWg6qx5e7}BNn2ejCcF?mcVIA`pZ%)x{=Dq8_nW1EpJ=@ zDF3{ScUJS-TZ9MXP9{80(Hbl)1an#3Zrfz7fJcWDNhlqWepK@nL^Ah#ebema(O4P$a!R?Sl5?-))T3x&d zsQ*5%23q-GKbV37{nojy-uku9a>u7BQ@TQizx>OGUzqBr(x{%ZhMoT|FV$@stkh#~ z1a*CXcKW;Cy`p?{v#pgj8WB>ClUTm2oLH82d5`prpJiqXh!EStW0c3nfm!Gr$d$Gd{zKuW{(Q#&J0;>399kd6lln5iQeRGb z^|3(2co8qt0n-I?vRB>xtBDxIvVz7YBU?mrV9GVsaPYilLtseil!LIGrP&K&9`zlcfbudc5Lk?=blMkJ=#dF|!O@~ZVEYh{$!$9rLzO{ES+g?QNU zSk*k~g!D+ga(GGZBlN;&FQ6@{~YXCO^Ev+No8(k0Tbn zj@P4ddzkJ)pfss0px7YSJkrS0_+kT6#9dlkEahgS!=lkJ(oF-|j~r-x3n)VTb7Qk( zrpBXZF!`3DyUIZwKkt^kn5ll=JJUzK? z^P6A$-q*hN&%gECUo+>DP!M7i!nph1R!fkXA7HZ4F;1*`3*&M+vb2f`y7DsH#VfvW z)1NQ_{RGk;Ak|5Z$m0sb?e!}!jC6HtUPUCm7&sa(!Vur;CN&A_Sgdfs$%B7Rg3;Jb?OUIKvITcH)K(TyC`D^iGp9o^xpFO zT2V|rXa%v*|_~`98+rvyTFpd-w3dUibHqY{A zaKZ>bNDc=Nk-{Ipk5!X6FNhz8eQ#t6mM)dIWk&@6SyieE%$HUR;HQ4$8>NtvOMOHq z3$KH(>vjR^BJSW1 zyg^)ImO8n~+iDI+1+sSqD|*C$lo@|fG1yDhK_$Q9>v-!wtb>PF2KVq{2}e=sA(*$h z`2;t+_=^w))EE~#cKmUa?Z*hXRP2+yHv}fi*t~>DLFdKOJJSNjc+_{k@jZWX^B$`$ z%{1~P7d7!ko}Va{PmE!$+-^;6wD-wUF_8zf$Escxteqll3V8QL5QGwK5z7n z^?e|hTqOU!gWSp8pl~&I$y-<>Kz)~p?swTvxQ7dVT9Q*!Z;IyBL5Rf{#baWw>S5`n z0f-Kp31m*~XGx-oqRiASc*WOc7j!YGIc5|I9CK{*qYOpyM)glS~d5q0>a5;ci}v=TA$%mvz=?z$N%6^Jc%Z7mp_u*4gX%PxDJ9a}N+y zQyxoBrHktg2;3Y2WKR7FSW_s2kI7A_9g@^OMkXIwVk_;OB?g*DJd>8}Pg1`Tb==+$ zdJ!f^v^&8`xhi?%SUyOW)0Am%=vdnG_cX2C;pt_X(pVO#xeu0h#SF9t8=Ow?N`H_- zR)x>!Kw?Y;xvQ;xS`mf&FsZ{32sX~1?o+WKH5HwOBI#yXs|(v%ko?o+z=S2n!A;{F zvAl)Fa`yXbtY)$^Ww^F81qn`Q+iXctVKS{qu^fP`kR-@TPRpRpW%j4`=L(sh>wgl%8s>845!b<}vr4!3jl#nh2K?bEO6MnMrq`=nbp^mK1Is#o; zS`ZZx1-b;bjibYP!i@K;A1jD8Wx>1Nnp_-l)6Ar6Y9gXxMseo!Lh(xFIh4FkN0nr= z8vfb*eCz~qZOl0_07Aee5WPXb%nx1#yO82aUIs<68Qs7gE!yFo%nznT_?o?>*Ozy2 zMVF`6(=paTaiBXCzG3^7X8p+_5FLMeI?$|u8*s0{rRB5gx0PIxv1b||lR#+^Dtp(+ z62c1Q8n47>QgaD+;+Ar<_9W@z9M^{zScS0w)z&6q=&xPUxeUSh*)RX+xcabFgIJoz z#G=QA9{>~3)2}sq3Y!dAZOKjv-B_jaKH*YU;ul~N(1$eE30{qT`llW!fmpSeI;ECk z^%NauI8+sg&K~b^L3|vrm#k6?B*v40UkT=1eDd7;1D|u3l25}ZG3JZE*Fr_c$gg*L zo4yDA-kw^agm(Gvw`>--xHVGNi;Di*$rZ}xlpodyCypn8JJiIusU_^E>fR_WAEc+j zREFUY04}Y^!`|$znCL*jVOWzHF!BpOw!gEw{{~eXf5%(};}zR_y=iKZv>&Fz*X&ot zyW?w}{R4sr-m#`X;T{}i25uUJRPZ^zf{LF-Gc|}uY>!@7HfUx$6RK+4e6l<&qi@ki znDPo=R`alVNlbC@rD%N`gD2h&b50&wLSS6)V+F@h;q5dYzWnszFCKm`zPjo;5ZHf! zam*mFB?>JG^!X1y|HYS|TfLFiB@{jF9AJuUlC_FwFFYwDnxt83%_~+`N zWTzU?8?)Z5dNVGj6ERi9D>V@r=3PF>Epy0!oTk!y0ZHq(@C0&b8 zx1!&Q%Zx%yhorkhXS{>H*@3}Uk#WF|^fC$kg0k_?ojcvc*yv;4-G65IHrV6%GfH$& z06E+Jj`vFnzWh*2U=-`{$O%Fz_azXrjsU4BDM0L(L-!QNv;AR%hI9o(4O_LS(P%xmv^-fcY8 zi;>WEET$tjxJ6ZqjkLv78qzOE+;S@PSMgMC57N$FnC{B1b6y7F!@Ll3IYrorg@k%vbh4C$3?5@N_s zu1-eyo)w zXfNCJP-b!7f0%_{5XJe0cQ^v`$u=pl#72*?d+(B}Okm6xp|&xKPl7pBG2p=Z5FHw% zQLp`GnpqIjqnKgR@c~l)t@M#)`&SpzoC^GVVLipq;>Sm9M{wa##ULRycZsO^6x?ti z{!UzT)n+56kCrzLIL71QC(28U9=t`X)jA&v46rrn6`K3R4puxFS(}y`b_4p*&+Or} zCh&owJ>cdOBFqe&a?G29d!nhn5Y!3FhBdV*axUf@;lZnzP#V{GW4G_`Uycz%mA-Tl z8n#?by}D%Yz!xG7lB~dL5!p%I`2?UZW8g?Xbv8)#TxgJx9&=o$#e6_av!JIS-r@%U=kKIl;++Ny1}~+>*}9< z%Maaov+YG3l4iCAjIza}k>;^4H^#X(dtYvuhU?T)V3zt9T~3V%l3(@W>6~v17BA5l ziI9*%Y*6-Px~t!M{4Wk(@mq9sd>o&Xi%|y=9IBxM*9BIJmvUY2(!;^zXpPV>O4Jlr zoJi<-sS@r12vM_(v>N&p_IDGD(cA0h!zFgTUtUk!*);pm901Q^wfK{im6|)2)y++z zI!hu&GRY~PWD)0@#Ay&4wN{c=t1B6Zg;V=KC|;mC7-v3p>ePJx+2=ox+e(Nj9{FUh zR$Hy3KNK#vz`~eo`0>_V*QcsYsIdX7|;u4tDmP&3>yj2n-l zLt(;pzxQKv3U-rxAWSd+Zj(|A5=i7TO#k%}6Gm@J$O#6dv@l*qB!TV4HssjNpuxIW zMrr(ApAi@=ffX!Wfm=ROs~wM%TB^46%W@SubXOddlJGx$_&YCMJm?a%UIN~h?O_P!=gBFESis*+KoxZM7Q!z>H8`( zjZMBB-=0l!wkBAq$qgkM0{>f@jwKFJ<^Byxt5&EP_k>#A#V2vfmjpY!1sDtw!)yJ2aEb3B~3??knjF9t_w!-}-n^MIGZIRh7hCuk5R zh-R#m5|;6QojnrZLS*t|S2RfO?J8b)Jg%kEd&ET(M!WPh?O$vciZioQ&o{p5a}(;% z9Z9jBPQ0o;i8cSYdCz6=>!H00mlA~nEYa{9g09%e7U`JZGW43iX~}bh2_i2v9fReK zUB3@2vSO2tG}h2nMx&33S0^@8lHzopq(;Gxg+!zbV&_x)|@6)W&tI%=DN0Ot$f2I;f_fWHXjc`gE2i zqKCww9(@m!iKUK?%-Q``SR{!TziS@lPp3sK!DA1f{-^JH2`JobD)!~~AAS}Wh`L0_ zjvm!Sbs4aHRdenKf#J;|=^sCq3dgI_$z9QY6i*;R?hAtuLdImFlWs}!Q%O^ct;ng= z>4Gcpx=ga5T~q6r0=I#GwQ?5xcimfrPCq_FP@Oeb%{@% zgPSSs<1pu@zGvAx%HB(1UImQjpYcy?i4$>n-wd0y6N6oYjU`D^ zayD%`VnsJxJ_fF4jetD{Sy_#NE~5*)pS(eB(#YxmlJnxhSkar+5q**v>K4O!@rQ9FheXHdJbCC6uctPI0W2^T}TI!_bRpt;^FX(ay-#% zdX)t0Y1E^}>(65WF#nZ@1I$`-3O8r0g47ianD9Hk(B z0$~HNVJKSv3Zf7(r4H6tk$N*~h1(^MJWD8!P3BsyUe*JxuPb-qO9|biNjPb>+GxFS~2j!Uj|PB7{qk)ccr0-Kzb3f%RK;?OS`-g z2Q(d(o5~(C6{sS~7)A8sJ+kcVyWv6M2)n7t{-E~Mf+)4!G3db=k89fXbf#;PN=j9P zdQ@zPK5m%XM-kRP`WTCOi(T$3M-;*g4a-b@lga|yL;S=vu<2s`A3y(|Nyc6@3frv5 zxKdgE-N?|k{AhrbLlg+mrx4FNOmM5cyZh0mdLL4+w$YHHK6WJUnVinbLn zVMg2Hvyv4o*+$@F*R3I4ynOE2lI1EIo4g8e;+b>XDecM{B-CpHn21LkBIfE{&=(xo z-cG@TuYeZto!_lpB$nZ}Ocvj{zhGZ{S8nDBGb1P;!M2Rolc2@r8eRJ!y4c!i(0J1_ zw>ueGq0kD?p!-q*fE-;+4%>)~c(?{;o{}%{&xAd|;`wI+g-EM7xd~C|_&0i}T&R|- zt{19HymnNKZUq*ozv1NKUJ=t(SjHO@U}DCn0@9W0as1_9{PK5II3sjt z;Q{7x%4Z^qAFrgBtXjCl7Ql#=WD;TyXS}xHWm=d!f)jqv^ujLJ&=+Bvxx6bXf2U#@ zgq4PEX9G&yl*YwYJ6lJc2^h$eq6as3cCzT1F)w5si)loMz&f2!rn#ne`{+o?|A&Wv zXQ{8~7QJC+M?ni~cdXKZTXN1So#aJ0RH0T$6 zKtB;_zxX3M=U^>^-@ozwZ<}bkjNCp=9hsG&<=kLek@T2LV?Y*wkwyf}l ztWzH*2Ttm-qU&k1@^v$@nx5aHrVhB(O{OpvDEsNWz~9 zQDYIHHqLmsL9m5|dB5nb+BfnJS*-X+!1LJY}%WG>l?g?+=l;2Jdh%J$NFi*QU67$#y58s1aS z>))s(1WA#&l%*F>y{oH9(fga&C6 zIbGhn=ufVZ;<}9Wkhi}&h({7)+}ntQ;>tjU+Z^op!If_dPnN7d)l5w#CnbQS>LT(C zr9xwA?unmEKP3{?-n=?(JoZL3DGzC&sL~O0(eQdyBKy4$l*-5zGCCwaL1{r5@fi&9=tD270$)agy=#jK79K2_v58dQb!cu#7cF{TQw5k zs@>?6U-S=52KrVJoUyb1h<|v*8;3~TTMJ?TkH7v8;-5>4Ph$o6X;_(gy@bY5|5yJR ztLe6wguB#k503~7dFI2zrZ z{#mSHe5ek##Yx(w2Ox)D0=V<_q8wgil_Tw@gSH?dPshHgN?PAcEy%%zi73woBdOte zs|ebsSDZ;tl@gt^;y?$d<5UdDmiy|BfQzU3okXqqRb38Fk^4y7IFPS>@7wXuzxTE8 zc*#BEjyWjW5yhuy?&9Sf130X8PyiZ_3L{@@Y%^Ffub@FwraoX28IO*tp13Y*FLRom zB9&D&u}s!u#C5HWjMG3AnZxD8AVU=pk~fl%2^!EwEskH|=zT)JxAwi*WiX5j?ET zK>LgrOLg7?@0oU3oL?}*DWxlz3DmF!uDK)6f{$q6y6K}|-zW`=q!?%}=U!k$6epPz z;fGBFCzI@7AUnjYEPDMojvTYk7ZO86M^ z9uRQ?KIjUP4mT82`<5!=3HoYcNp7LV3%v$8@xJW3_WwB+N`9s}=xYMWSnfzzJqME`0H zF4ZWk%tbwXT3VblC#bQpG)cRFao|KCZVG~e5^6jQVK(9e0uiNh2QtR8(VD@ebI<2} z;y9`5ZiZqp6%a&Nti|Z*t70Hnud}z(kAIIY4KQF5VBe(b;q@rw^a4z8W!Ca1oW_P%(I2IRo8C}=`0#H+Y!LxrpQX>p z9>Ls@j6HlMwF&YLh+3V>wv0~@y*cWlkKz-GhATr+atcJ+pnQCQ%%92~XxbG-z^J;S zK2IXv)E)_&Td}EYbiq$40dsS>adZq#h-Yz!DD^y6^Vc|b2y0!202*6$r3-coOaoVJp{EaU%66Bd6@ZJ5$A~D>LvqsR4e0c@y8jC5A92aOVUxt}OuOxp|xi z;lR5xr+V7gyP8q+&liLE&*$fYMh~Cfj?-8kYq{oL#8n+DPPAiZFIa+fD|YT|DCONq zAU?GfGl|uwURmEfc*X*G%dLf9Yh>sx?;-Zh=$m8UNkKE{LE%Qtgm>5XHu~8bhQqCs zAQ2wKp3@e>B{FymlLf}{xwpQcLW16w;kJtXLy=JV_u1y6OHiu0l3w=8?%u0l(dGM% zOs6j`WJh%c);|A@X#>|!i&jmLXdz)=gFz*A>KQ1~i_%*kn=C6Z<8)svJoD*-sE*&i za+h3XZz~@D1Wp1^l-h)AM8j3Dw1*dgrQ{YE$WdVDQ{T?~NzYjoT?QF$a3+FwKgnjW6HX?tK;H zK|1E-OT|mok!Dc(AfKXl0d%;Lvg9@mgxv80Q(o0k_I4FhLVm=FkIoW)CUIkCr6lV* zZt}w?vI2Dfx+`o&Ey812uj`2j20GrgN^HagC;oweES=#t5xuR+MO zZXwoG-`Fnwjc$Am>wl)K{0DT^br%W{QupOPN)HkmjPE{tBHW)4*8XlU(fS?a0*ye6 zs33<*xk+po-I}lfI%7E<20xL<38^4Q`r$A6-+%r258|cmr@yd76B>uD&xZGCGlT)^ z51ri5QdlN~4`2<9VEK`fN#n_TGWfZ|wMex!+a@(>9|53Cngqz%qq8D)cfpRB{gzuA z*zevT@lm2-R9Dolpeu-IB^-P&9s}2pu@RqaYkpO2re{xH1Zz-S8Px8*8>P3u3?APL z8GZm`XfhG`lbzsm+N89|ual2SKgKu-I*+Vuq{=2OFwOxiSd~+?>L@douz>!F1>D3R zEKI-KasK7r6~kzE8K^CCRbW6|0>#|Ck+ad;LD^S3r}*m|>7GGB{HxW;WA?k6X#3c_ zKC^xGE-Hx&rgD?-41J6RT9s@E2vCrPwl*BTN$ zE`~5tKlgSqxdvW0%>gm~XAohV!&S=DOgV0s!o#Vr{J1`!o;0OwaN2~7Y`1J8T01&1ghc) zu}J-Jyy)T6(`R#;78z^YS_wuFTE(8Om8UDc)!uf$XBl#hgG-`5?&fqboypxx^y*2-|ftrtH zAiQp~et+@#6F=l~@RrjmIgdBvixbA_l=t+Mi7JSRvOO&3(MKO;!}q~b=p|ccXp;Q` z_U|Jz%_`s#aLl-DK|ij%qRDZ9@TffXA}KqvI}KY`Z3ca2$_Y2oK%?f^J8;Ia*e3R>t&c@z) z`4h5@i|t>KViEJk^6<&MhyCTutwYf5PR4k4cjIKH$wcGs7u>8-G;-j?qf{iJZXYl>GH9xkijx7-G7`rwy` zAIIhHy#&#u`q@}HH+Ammv-kTlvkPdc!X+5XmbILLmt|3rE8l8um08j)YH{|tI!c%wf^c-l! z96UO+%tj#5r&uld;%o-Enf^QMkA*>4Ee~nod6_Bgj&)VVF+&^ZEH8 zHkmFuOL*aM^yIdLx(Kt&U=0RYSeo~3c{jlE2J4u?jvfwukvC`e*Tj~qS@njlM3$h~ zakt4=g=uuf05=2fC~OZHA2Z35Z)g$#abl zw-PkLb(8l_uTnb^{w#a!?`*D^E0uVfP_kRi)9t}+ft}KN_Uz`q%#eXi6qUrRtZk*a zt6ZYnsU_xZ9gZ&)H?jpSNDwwhuq1jWZZRZQx-@ohUB*l(bl4K=@yCZxzx^Arcfutm1NW<_tov#-1u^)-m5!)&%`-Xp;bgSJa~L#orV?F@qMg>tXTfD!Ta>t z0eWRlmf66jWouQ%oOV;_5XX^N&qV@cnxH6uzX4harZxe7@Hc^=83|6Oaq?gmjMz!R1V7*rvdfcMg z6SAgBtcydrT2O^hxfUanG9JVvy&Bh4D|l?u;4x5GyVRR|Grf-hX@=P<>#JTlX2iR~ z{AQ`vo~(stgQZ&JDwR5Ri`qc@Wdw|FiX`v#*1Yl)L^V#Dd)Mvsdn;bp{=KIBqy{}c zDT5F32AQf|$lL6LSUIsi`WL9E&riFfEDUZ6B2h;1VljRosOby}W94iAmdpFwpOh}P z)Z?OFFH%f1u;<^$ZY5u47@Ckhk~%1b=?$(TIF<66_$L)7v2r=ya0UzY-dcH8>gkQW zeQ%0VZ*@WOw?SX1Oeg^fU#l&o={<$bn3G{zoqXx3Mcr@^geDssISN+}X|8p%v+R{! z4~YjAlN*bdn0}T5-Cnjtlb5SJ(~L|$Vfy;)g+|T}&}|t+ly<5S1_okNrCmJiWpvdU zsL&KzYXbvdSC~#ybTc8oV}c@H(0~aOWeD~q@#wttRQSU{aYk?L0=0n%COH$xDLk*W z!GQKhML4LMm{X#RA;>Fmdcr!F-*`PF!DT#{!h+6T7rb7}9Xc__g#44$quyoqx-otbhoyWtlGxt03 z**bfO8pR^k2Op9$TNkV9}%t>+D?{S!y0Bl7tZ0;e=&luIT?O*u%~$kWL>USsrA ztgg!z9UN?lz%RT2|LMy`SNhw?^(4J?B#u$|h zD;gjQrcWPPcGSnW@8!6@`=7u4tDHcXe##vOXTVmwGU9uYR+Hpq@)0pjmFH*WadO6LMZHWK} zTOzN0@X^OwGVJ=3QDm7EM3VDNazri-8b}g5GD`^=>IrD8Y#QU_92%C+=vD_nBo_l*{ zJuW$K@nlj-cgL%CoI+A+XWP2`pyEW!(}e2^faX%*W%n9cl>0nQPP2@7)1=D#NQsOy zKq^lYXXEw@(?>boEcCisLW0S`@kLa8Vp8OwnMdF#Td9hqziY>XfC?I~dtT=k!QpXF z=}^!k5A`h1$Tlfij>bDPf9WQkjA#Ir1bKx{D^x^XA=z_DA-2qm9q~UBD5I9BaoQyI za%L_VriZTs7uJ!vun0{yzqWW8L5JdH(E%AIOV~e&zI%*LN@c0u8yZZ6L_jc4^9EP{4`kZyLvz2S{ijeAL-x97+(ntj0OfWS^4SzTOhrFeo53TMV z_a0Qcq`&#oZ~spX1IN-uJI1SqarU;dQg=4Z$F!?V13Jn4<0N~O;Fx1NJP4e#xK7p9 zT9IHMd|W$_o*N}D@S{D-$6a52a=UgFY;$J9irl&L$SsgCh53V~SP@h4VRm$C$@zpe zqH*k!j?Ixj)NS%j8E)+qV5Z4((+9GX*V5)QOVc-q~5 zQH2K^?}{{0SBI6U7K4BE_wDicK5X>Qh&HVZD4bc2(^IJxFbY-5yzu6fG{jhv#FJo| zlRlKp;=q%2I6DtNMKMPv7vfG*86dXOO+_=P=>FH&|0EZBt#^*jDMRh2;L7sFa7UqL z;vLUZ00GHdx$pkb^xZRtXo+C^P8XlDJ++O{aN#@PBXb zYF{;>f1^Wd0_&#Kdi85uZ#mIV3rbJs@?TM9frh62Ov??tHEhTn@pMU6QqqMQd7o<)*AH-rJdJ^k#`h`bpO3 zFCUr2r9R-yB30Y3H}l0Cm(w^%L_}J$AtTtDGo&5W#0|-va_ItByj&1Nbf;y-O|*Ps zymN=8LM_u1cV4~LJ@{iPvo9|pqg#uB^-2pb1J#t=OqEG&PykdUrx*Jl=R-KI!ySHm zs8uE~BL9MY*eR@$|ImwGaIHX?0Dg zIq{9f?jqnfsgU}EPt4VfHBWl3_w>Q|wKe*KFRJzwD81TFUik9D+$PWP2_c?x%CDYn zZ&eWe@kux+@+ZQOM@6eQkqwIjs&?kA8=M<0Fd}|h`fR?8L>in(&S3~BN8wD)7o#(g9WGfqsf*&#Dg>UPTnM~E3=Q*F ztBneQmpNJp)C4eCNNbbC`21}+tueiAoY4~`>LE1p#}uGogqc&4cZYYyv;#`_|E+L+ zG{)YyHKXw>flgzo^wdLbHBE&!<-~Jx<>EBC8hT{o$fXp8NOzJ@$H&LYMQTBX=;q#( z?tIkGKi_`6mH7zkGq?SJMOfyAl{Y#LVrTlb<*j{2^fi|_j%AxzQhb=#^HV%AmC85& z-oIqgTh(A=vDz!e3qAOzgS6Kxykx&bI(!0WvN&pI&?kGO*^iCy@$AsJ)8^BcIanMu zFJD_@fsIq<`Zfpj<3G&Dht4yhZM3*rNT>PN(Y#RAZe(Tt`M3YU7vmPeY6oxQlQDIqXk8o`* z<_3ZtDt+}V))|%;p{ab<23gl6sB3ng&HdP*$1RLwzBG8Ic}k)n_G?$|oD466wi7#i0zf?MYpvG)&NX1dc~}jM2^) zetiD( z1pg_=gX>uvMDG~rnYAN0U%qD`k%2*?mFD}!>`SfK#@=3eY9=%ZFGtlYPIk~eZ{Jil zG++Vkip4l8hrB0z;)+qh9&(Ez2W%(ZtUs0f(IZ zV>UxB`Gb46wK}I0n%Ik=sJ`e!k*TX?UGL!74vLga zt%brN2Yo~bIidbjI?!P5=H1Z~vAk>)EV%3R=18=i1PL`0=NGFaFm8(254l~B=ajf( z(&ok3AXsp@FdyH&rx|j5J)Bdyt7Joe!P%Sn;5b5nRmuk~Cec|z696gW47bqjT92~4 zgN-(w8nIqo7K6r*$m}h1f=J%0XInW;>Hv~3*exd}H;^OYc7XfjTb^(2K7IKpr;pIt zgOyd{&-1Mpc_C}U+U4x<)H2&=iQZpuq^IVF0*z2JKKhbckQ@;VYGq0u!U?ZpY2bmq z@1=Z@uMy$>3UO~f-4OppvZ2GqQ5nUOKKbNM4r@9}K8}hzpsszxAy?V)#|(FR^`D|! za;2vJYJO4%I-TNJS54>c?ds;y6ftI=3-Rk8{6KiQ- zjtveAxK^Y4V_7OozM|@wf*nBXV z9(zWg8_DcV-|l=|K-{^QUgl#WgXhS6h-e+i0Rt3HHj$O)n7Gl+1s6pSgyod7(lU!Y zoPh-$F$K195(UKvUzZ;$NzMMqvG%GRxvjZB!Lb&(yc+Zl%BDWCAuL@EN`+imp5_B0 znu=$@_#elqa5!k%6PFgqT4$~Kk*XVKp7XY|`?*0w>Ek5Q~71cwm@RXuip}M}WLc zisxu3+Qq?0$qxD2p7r^Qt)0y>18G_ScT4i&)|IjZXYin8kn*RAa){B9PXk3^t={H6sKDgn zwmtnUE>UhQfuj{|kOkh}NZ_$WE{)2f$I`a*Ocurjw6M@AblEwz3 z;48YUlz9moVvd?V;BZF>{%@)bD!+-k!i9|G$nN9V*>9Qxsf7*g>5>#yQ<0)gIU7*J zZf)W0^NR@ya%CbDIE-d=IV%W@w}X(6%O_|Kz#l;Y9;@;GcR1T|EAe{2R*Ha7fqZ6i}amY!uDyZ3@$k*TKUZUrx ztFwwWk!9JE5k%h|5(%Q@O}m6tt|&ti9G^swQrkuw&$nK0Z&rrd{)Lq*>|lpCGUr&{ zynOjc&yHF=Q*2JBs8*r`b|GcsZ4>6(!#IhMs+h;Rb-^MUQ@OfzF`v_?|6Gc3GcrlX z4@EbhMnUfiwceIwcjglL2(>JyM*Pd%!*n9TiDJu-XaBv~ z8XUgmi7p3p(Q1d4<EElO;iJZ;RL++JHRp6osq@mg2riAcN=K9zT^T*BbN)6iDzi>QZ0 zXzRXp7jV)lxKv1d3j2@rzkFYdZQHiQ6H@lmCm-F*@pH@+wyz$8E`%y_P&s4=Ux`iA zP{-4o$bq#E9vBtx5xi)CN#;oO$kA}b)DmG-VR?@JPKrO;+|es0sIsLZyb3QzKH=f^ zv+dpOt)0B>{ztbp>VxTCoUdakgnR>eawU)$9?HcAnGofI*%9fF}-+C1)k{4Q(m9K=~V0vLSzF>5-Ttv~0?Z6T^2`Vp-_wbsRenXB=S;7qX z?%i90&*v|mZoU>gciv;MQO=#%-QC{JKkwZsqlKdI{Pu7E@OK2=`MeHOVUw*Ehto@~ z2b-5N6fp-}elWhsC{tgi-RU1lN?=&w-f8t;8enAq#^m(UN7H9Xe{_Vm(~7m@K%D7m z+q4jyXYUBsDgkz1jH1j-LH+b2F{LXO*=SXT4Rm>_hyRnj=xsx%2|0(0=79 zV(2{E-ux_w+JI98T}}=KZsft?p&QL8hO{~@S8*u6l|};JHx)8sj&^q6iYD@C^V6wm z`0TYfyL>sbl**S_KP-=AZ?g)3UF)(Sa_p*oQRJQ%z+P;<+0po4OfRup(VKPS#?D6W zitq?0{9d%fel1lW{gcvR#^O8w&#wQa)}KW^2-+mgNmgT-{090^rV z+dF#L<$T^wfza3kC1i}bG4Y0YFkbZ>GoH=}tL8f5k)^;%58wzxT z-o)bHx?fp&Sj?wpVmzXAh56m01^EJ>NEGjQcYR?D(t1NI znRAM2r%xW_Jjg29f2tKC+ykFqnNji_PhDQjln-iHBrsV}7wUu^3y;r>A4X1BvaS0c z-$?{9nif?^fIixK{W|Xg;_r1{&!h_mV;@D^B!R`}6#Y0eMsS(XucEgHRJ z`1)#k9%ey~0LMW|%CKgv0+@~Y7O#N>5SbA(`EXx8d95|y;U%2KsUdOZ;O!hRN*6h` zs)onQ^C#c_a|3OJUn0k$RyNoU_FN8ud<;%qMT0kde~PA*i+wyZ|1lY(ixbLt z%p1%&I9Y2)l%r@kju+0s!~v%VL}ha#uVT-iS2+QN2l*y){YAn!t~NyX#L>IGv!g3G zTqF25*XcX@$#Q5(^sv-;KK*=$j-6Xt$%h*|TcX>YPpqDSm@v_IR@Q>WPhpybp-sP{ zmu_8`k4L8c^PyT=YE7af z_~pdX57;Wry3AFkNnHNU%Tvu|i!_^y_uEw1C!sdCH-wGV8Ic0LjJgPR$;DLq2Q8_{ zHCz|Y7Xu>2482(DokdZ1LZ8?3KZ{BWeSApYbNdJnUzu+5YU2-bR_QskUn#fHucXYm z1^&&fb`13UxvqPT-){2WHtXTIBFLsJzP`pg7D;$(SGP#e3apJr*bAZKXm+pL`MhYb7F=(w2S+=7gB1E%nIpT? z%8%H&DSgx%mltS@HecKV_GsKRtu{_u;`#Qo;u>>WDpX@C60(A6r)lu_epmNU@@(g8 z;$q|2#Ry@SintYJ1Jg(021rJ5gz0MNeL>Ica_f-MWpuqJ+v50w$Y23>7Y|2qPit>dT2@R#S+q?oS! zSVo$3<)xfUI^DxpDE_#6N#?8w#1X3Dp36s}>%g1yZn#EE^-aD`7e%oYTs$Pr`MIsQ zq;H~ldL5~>o_X}@*?dPY-+cLE_w~!l)tG34`B@dYf=_7NQIccAIB1F5s%M8EPpg-kl;27LxMZ9Nj-+nDrE7 z)@A9ZAK$ifKHDiKR%5b>OOqD}%QST7NhIZnPR}C!QsACZZfN;2Gax$ZEB5*0m*q;@ z`h!=wETNBvb56V-F?J&$M2|)!s@!wyqka^@F?%Xef_7U+rCWw=d2B;sbBCi)=y>W) zXgGx%jVsncv!?G$bdKF0wP1x?jTfsqR>-@_!66@PDbMHRwAdbk({69Q(R6X~L=mfH zieO!lmXyOJkwb}G&$lE5T zY|zXG6UV357RPZZ=uiebH6eqOdQ!WzHQKHW!R=sM%Hz9t*V1&O7z1ifZ5j8lbfW)> zANn5tEmNU5t<8H>Y(xi(T=Oq|8A$O|vkBtCHNq&*^9py)>aOVe#_TjTl|TAGNQiW7 z?z@icz3=P?#*V>r`KE0wnX6P;Yf|j_k07@#gYGt zAeIb-F*nv4TVLOj)gGA}jU(vs@E>cuys;o9B_Dl#MYEAPMMmZ)BK}HS1q-zxp>Whh z*gBsXp$Vz$+gr~brObI5OyPxEsVSL2O^QTwX&L)|0^f$A};}07TztE~g%Fx`a zo$uc&dw$uh`SKVOkNS$vQ#2-Tm7;0bvZ=71nhice06Jd8{xI&_Wv zS_iE^Hrq+pHCM)3eCH76%aeH}bAyVbGUfKZ6acW|G0Lab96BN*7pQ5`Xk>2sLD++o z9=f{(?lCnbQRjUbpzJZ4d%$p&SIrh;w+Mu=C0qeH+DJsTGye#;gsDu#+f0hjTTjLu zG^J$blZZo`M@-=P_Kw~~7eOYJ2#&V=C*;?Zaz{qcL7D8tIQ9-oTiJ^&-;vDz!h<^z z`iaB?F@Yx}%u}x!*&D_<@2c1fEsrRTNC~{eV(Fvvzty`rLar%z#>_$jjd%f35>ANF z%+Og$XScP3gGb+CBm8WR8?)9voH_dyvbc5EYBCNusbUdz|VLD`Owx&)x2X2s4(^+;&tFdV6 zi^)n?Z{iFQ zX>)UupnxFR42e%ClTz~@&Mf`z$)iWFHR1?f#sb))hKAUZ<^_qz;S|uEUXz1L$7Jt% z5+a2&*rIogahYe~)i=*gYff*NN4u&l9MYe@XR^@=h~T$3up`YCbxnI1CvAvt7D(b_ z>+8rZZ;C^bi#C95^XKPQ0)efblzqM|p9T960bfVuJ+9DqY*5aH%TTrM$KltE7 zQHPqUvv3}#z=y*dyv{F5jgC}nbg8_K&e1iZ2luQG{KMH%9P*c&@UT^ziU@!X*%eiF zspNiYb`~cXMSMO;o71P3DEfU{jzMO0Df{#KigsYQ3 zx<0r}qNu6 z1OkP2nIL;`&su^-QY5M9`P==wmNf^5wYo%9);Qi~tqE)2#%Y5zTYIsaGa?#$uN1_e zwSN+%oX?HXNRNo95Y6P;<9v!rekM$1=lFj2FaG8qT>r~_F5zlggj?&W z4V+(;=SA?Mb`|@g9tLBZX~mG{K@@X95Husp!t{wHYJG=SIYQo@s2?1h{dhq&G z%-Vfu8^>@wZMjcrBRaF~{bV-B&2XkX2J&#sK}MBrgj;G_Bxpj5igQFGc~O?*k4V7T zgSm+vEThvH`(IZvBU)a>K?BJFNt879vi<70RvLxfTZFRr+5wos*7T0u2>jAeM6-O& z3&c4&TofLmUnA$;qqm3>dg_|R7xF69NP0zx&pe`PCrOTRE=vHcPGPlr{u5F%+)*Fq zDE?S2>LzhdlwNHIjBBD5v;6f>Qu9(2{Yt;wu_JLKD(CrD8AeQ6n^6e0X$pdh%iW_{(tM1&M=NKfhjI z`S0<6{l89wj)9JWj)9JWj)9JWj)9JWj)9JWj)9JWj)9JWj)9JWj)9JWj)9JWj)9JW zj)9JWj)9JWj)9JWj)9JWj)9JWj)9JWj)9JWj)9JWj)9JWj)9JWj)9JWj)9JWj)9JW zj)9JWj)9JWj)9JWj)9JWj)9JWj)9JWj)9JWj)9JWj)9JWj)9JWj)9JWj)9JWj)9JW zj)9JWj)9JWj)9JWj)9JWj)9JWj)9JWj)9JWj)9JWj)9JWj)9JWj)9JWj)9JWj)9JW zj)9JWj)9JWj)9JWj)9JWj)9JWj)9JWj)9JWj)9JWj)9JWj)9JWj)9JWj)9JWj)9JW zj)9JWj)9JWj)9JWj)9JWj)9JWj)9JWj)9JWj)9JWj)9JWj)9JWj)9JWj)9JWj)9JW zj)9JWj)9JWj)9JWj)9JWj)9JWj)9JWj)9JWj)9JWj)9JWj)9JWj)9JWj)9JWj)9JW zj)9JWj)9JWj)9JWj)9JWj)9JWj)9JWj)9JWj)9JWj)9JWj)9JWj)9JWj)9JWj)9JW zj)9JWj)9JWj)9JWj)9JWj)9JWj)9JWj)9JWj)9JWj)9JWj)9JWj)9JWj)9JWj)9JW zj)9JWj)9JWj)9JWj)9JWj)9JWj)9JWj)9JWj)9JWj)9JWj)9JWj)9JWj)9JWj)9JW zj)9JWj)9JWj)9JWj)9JWj)9JWj)9JWj)9JWj)9JWj)9JWj)9JWj)9JWj)9JWj)9JW zj)9JWj)9JWj)9JWj)9JWj)9JWj)9JWj)9JWj)9JWj)9JWj)9JWj)9JWj)9JWj)9JW zj)9JWj)9JWj)9JWj)9JWj)9JWj)9JWj)9JWj)9JWj)9JWj)9JWj)9JWj)9JWj)9JW zj)9JWj)9JWj)9JWj)9JWj)9JWj)9JWj)9JWj)9JWj)9JWj)9JWj)9JWj)9JWj)9JW zj)9JWj)9JWj)9JWj)9JWj)9JWj)9JWj)9JWj)9JWj)9JWj)9JWj)9JWj)9JWj)9JW zj)9JWj)9JWj)9JWj)9JWj)9JWj)9JWj)9JWj)9JWj)9JWj)9JWj)9JWj)9JWj)9JW zj)9JWj)9JWj)9JWj)9JWj)9JWj)9JWj)9JWj)9JWj)9JWj)9JWj)9JWj)9JWj)9JW zj)9JWj)9JWj)9JWj)9JWj)9JWj)9JWj)9JWj)9JWj)9JWj)9JWj)9JWj)9JWj)9JW zj)9JWj)9JWj)9JWj)9JWj)9JWj)9JWj)9JWj)9JWj)9JWj)9JWj)9JWj)9JWj)9JW zj)9JWj)9JWj)9JWj)9JWj)9JWj)9JWj)9JWj)9JWj)9JWj)9JWj)9JWj)9JWj)9JW zj)9JWj)9JWj)9JWj)9JWj)9JWj)9JWj)9JWj)9JWj)9JWj)9JWj)9JWj)9JWj)9JW zj)9JWj)9JWj)9JWj)9JWj)9JWj)9JWj)9JWj)9JWj)9JWj)9JWj)9JWj)9JWj)9JW zj)9JWj)9JWj)9JWj)9JWj)9JWj)9JWj)9JWj)9JWj)9JWj)9JWj)9JWj)9JWj)9JW zj)9JWj)9JWj)9JWj)9JWj)9JWj)9JWj)9JWj)9JWj)9JWj)9JWj)9JWj)9JWj)9JW zj)9JWj)9JWj)9JWj)9JWj)9JWj)9JWj)9JWj)9JWj)9JWj)9JWj)9JWj)9JWj)9JW zj)9JWj)9JWj)9JWj)9JWj)9JWj)9JWj)9JWj)9JWj)9JWj)9JWj)9JWj)9JWj)9JW zj)9JWj)9JWj)9JWj)9JWj)9JWj)9JWj)9JWj)9JWj)9JWj)9JWj)9JWj)9JWj)9JW zj)9JWj)9JWj)9JWj)9JWj)9JWj)9JWj)9JWj)9JWj)9JWj)9JWj)9JWj)9JWj)9JW zj)9JWj)9JWj)9JWj)9JWj)9JWj)9JWj)9JWj)9JWj)9JWj)9JWj)9JWj)9JWj)9JW zj)9JWj)9JWj)9JWj)9JWj)9JWj)9JWj)9JWj)9JWj)9JWj)9JWj)9JWj)9JWj)9JW zj)8wS4E+1Q_G`cP-|}DlyO_V7p3T0QEQYayS^GO?C(iDDE=J})BJNboaLVjeLEk{{-gD!JmX?GuFI#x!XCKHTPLIUT-qh` zykt6@+1LB|^=!D%pZ2^O)qGgw1=C?^kJ!`K2ln-0{yjSzj`D8^ah{jYht+>rbDL=T zli?&UnG6eEaxu*9F{k=rJ{;=9F!Sm8fR07U$=Ps_@0blmgUL`6KN+TaCWOy7&xW~C z%-*;O23)IW}oD97+qnU=Re9zW<%|CzP_DbPqgW$3+=17qxbwEAI_am z^UmpTD*EiR-|V6{_V?@j`xs`ZJ1&QbbEjJSxu(L~9_QOuA}Fh8%L*&aYZk*S|2!Yg z^R+NUTgY9Qv&adQ^N!W9)ZosC#avebab0DThV9w24n(qvakt=MdK81nam|jzuBL|O z8Gq;NCc|D{4s$upzah+vyae!S+$@7%^dv7@iEYot*>4TE?1|6xn6qJCn~R|t{-(p% zcFVi`dodO@TlwDUP_m6(R(l@;**T$BGL8Lxu`M${Q~0tFex!U8Q#`F zp=v&8rHRGf*1fTQIlAksgTXfR(J=SxCbiB-dhQ1!T6~_bW*eG1v>Yb+0|fh058WG9 zHjPWfpMC=a1%m0Y?67OF%x%4dB& z8@|b_(ZUR%K{*X|iTtW}Ggt8=)>fCjk8Pg#*&p^r5jLVe?2D&Qwbbn9n$lzsu9Xj( z4d;5rJ_EbZlCW~K{emnY(p!evsjlOJ^VNkObBX3rZ+nNHQ)=yM*b}ah^TimM_Vi3o z*q@(DZvJTafnK~loaqB{Rv%2YLanTIngHya>_l=2uKZpzfMcvqY7JnD#iF=O!y3N( z)%CCPVGuf3cRtLEmBifh5`zvC{30($6)845bBfI27Mc*?&O6Z7UEljfevkH9*TDJf zZ=`?yjW{h^H!BP~HF68dxs``4!cxG01@1e?nD^30;v>SVCB-^HlTD`cm;DM>{$7GIr2Hudqh>np1+0MEw{vhNQdMo`T$Tf){zC5dH0Xe zMt>~qBF{_mvgHcZY;Pc%H_gtD1m5ebFNf9A=rTJSdx&o3Z79l`UaVhoEqF(G?BXA& zn-25y6We*%YW9hd)IPkvV&P0IU6?fjXUX@@uzPD(u;JpI%&_+0_0=UG>!*n+S99l> z*bp`vG?kK&@Q&QU?DTpP49SN0(eNOLb$#_zJQJg^MCuvuuB|*l$*>n9Aj<#@tta0I zTK-TEHtRM#Gj#{tyU-k}OzAZtGmR8H*_3_MSs?^|R_n%w%=fbai)W$Rp6dn_5t~1n zKn@cZ-m1pGT1Y~%dGfKyxdS~4y{^p7V>5V|{0CxVli`rfm)M#NmwEtmE|nGl<~1Yb z==@}ul#UHq$_9r2n5u#$ZUP)s+J?m0rr{i-WD0%^^3Yj7CctJFm`=)=;}8Ns;FfCs2RF~hJJwtlS7%0IP=~x z*?*fe`JpU|uvgBEg27s+Wi~BF(qUwhrB92crMoqJIfdbV=$bf zvoB3cM#dZ3U;(vRz8t2mPP4xY)3L#9gEEL(qOo#|u{4<#Q`$cnR`q!BI+!y07LI-? z5pjleI?%_^nQI-xJ5cKYxIWZ%IN;7~a*(v-3>V(dZFuGY!_Ko8?}mw;iwt^f5L4K< zscvD+u)?x^Q`cD|p_2wiXCTguN+>&&n@8l*#cu$u_@~ zBnjPlxc*m~fP80w!sIESdTSqJXthqLgszpwycagV&t0-Z;&Oe@fnC0NHS8C2zrMn+ znm@1(KNGE9i&m^dZCkvawv14^bgvWMXM%T$d0UJZYQs2$hLd3~lOgo(eD8TY#0K<9 z{=FbjkQcEC!%B-6YMnGi7{hE|KfXs%)Z7s{X3wz04iB|LOE@}QR11$hFD{0WnUEUU zU@Son@xWO12aG7cUyO_#9$Lc`M}WY(GCW{m2x* z4cKLpbT)h~a5PYBS%STttm+l><}lJjaY~#gIpR0{0s{B+nDzeqDkjyrt=l>L?45pO4;9yg zfz_N&M^;L}FK5I~U6=$6-es~ftdttYsgQ>2O#Tph9*W zn!o1A10-x(shOb|rn6WT&6uP%ivAl5mHaLVq1bK2$R)ML) z+%&BOfjJ_mRvl55;{!#1oPV!IrUN{+mOLz!ng$-ANvR8=6rUI!F*r0QG(5uqx4FKD zYwA`a2mY$LGj1to;qW$nYeR09<@98zplb-dd@orjh{T!{M9PQ-(pNx=xVS1~DA zkyt`gi8MZFXI{MXIj^ma78bUye1Wp5PlWJlOhB)cXc%JMGi5WKnZ~mUNm!!|mv?@E z;kkR4IF7t5VK(G-*H;@-4)%wcMEC)k3Z$883xs$2q&yWr<{!S3AOBdeLBT&QcCn8$ z+&0(J4EGO-v*@wxC9_wj!&`B67T_G8ldcUWc3_Ga#3pi-Ktnev`95*?T2t5bl<;(% z9OyN{@ljT^7H7k%wk^2UT1Py>MODo`f779p%->j?bcAcl57!57IENvNNo*yDSY6+6wx*Y zcKOUadCPNzwZK5I;p{+g+MD8?nobLNGnUS{$|LFifWK50T#-5NVE&VZ9ymF|nuO*1 z8&P0$Qed%M{*&RGd`cK$q9xD#dN5}-xdVCO^;JAwWH@q_=orRlQ5=@;tBSuf&sw*5 zgdX?`oj8Yw0XFiHz{P4K!=)v?81N)-V!PLfB7oZR{BFF4AY|!U4C9?nijGKU(TS~i zRO=87pj7S;Mo4JBY@Ey}ZkGHsgHHPwLJ*#VlN0CpI46pM zp%c2NBDi<$?T?1r`OJ_960zY8D6dYKXmtyD9lC`@FP6-9Dk~-2&n3o@EF#$j*ej0k z&Nm3mR8b(5z5*Z%S1%!&Wb9^%UU%Vx-VkCSK}}%BZ3U%}l^PG0uZQv;N>6}nXGUY9 z9RV#7o$ZY(QDQeJwyG6+2d?L(n1rIQSB9jmp&^tmifj=DuW`=4aTbR?P>`83H?096 zH|-H*Olqw@kZ#LMZPszsi8#P=b>2b}+N1y@KZK0=P}6R=N=t&FE{$y-;<7WKhgq$O z6KmC6-^bePx7ZSi_XBB-DD zn-?r1Ecx};@LGU?9>&X2Sl*Y?I3K>&N)qB(m=*16abT~wXe6(hZN?H;`V_n+RA$YA zY$}ggZo5d`N01eUW6=^}5h6C#go&Nm-i;rVZ%6bh|3=*`9tXA0iWna$RWkN{66Sm) z?ys-V*>eYA@M3sG>@+WK-cUo}92HK{ml@`SQRD(an>QkkMvnm&r7FPC|89;xq-Xv; zXUl3?&WF_=t{2fXRJC&4hdGd|{#(Ouat?2&wY zc=n7Zvds_835_QP=`b&$KfAcF88%OiX+#8rm!+zF_vgd)KN2voEf=LNpoy0%vddWO zrad@oeE>>&Txcvc=&9k6y~uJY605we!V^Fciv zM~@}g6k9q8#|>jnGds}&oJJ6&Tziw@%UmYBO8;fy_h=qM8IAhvB6l}?gG_3Kt_dc@ zelg;3mZC%pf9ya*|5v17$sYo&jAh5xEWQIn(P#;^FpzvEyb7@BS6&YW@>k%9b5T@T zBXX(BQCV;(>w;+*l&%0`kzg#Fn$?sSk@chD6Fm)%4R_GzJ#>{>Z82~@m(7e<)<^;3 z!MRrZ5Zm}f!wSr-PK=Pe$3EeJ5?T>&8Xke3q($(arA@WXY-UbHRCgehmxqUSR-1R6 zA%qTTX&!6?ts+4CVZvv7c%i3Zx0KkvGu+bL&TuI0*@8B`Cm%sFUOpo9Ne?g~AVn{{ zr9L-cw_q7Xha=@>F~Z-~jihb36C${*iTDWF+NuydnwEr~FL;Y9TH((qHwbKDX8HK*tIvlW!7jq%?)pOr4>fEa8Ug1d z@DGNc=2M!jn&ZMWSh-_m7yIedGBZQ|$O43>zmboL7;-7$*H@37_px`st_8im!g@15 zAk4w0&T<=A!&iPGX?1;d%y3ft@%CB%O>S`6)E_B3=lY5d!W`=gVvOCOq%7Wwf}!3@ z0Sy(v7&tOua`9gkUkp72EvG3tNrP}*lzP+xa}urW0}W($7K&6cx)It!sViM57Dvi> zs0sEEJhCGf)C8l$c@`|exiF}jX$Lg6kOLLKLtY`c4g5gBvYaR&q2QHJTu;YB_% zR@svcqpx|wUOKQFgUTfF|<+Z19goTQmQ|4ugog69D4yBc`i7sxKLNs_z9^gMT z^oL^#|D&dZ^>A+Y_C$T&gkA-h9ggecvUDiEWqNr5oe775^#bAU+|mx??n_ks`y zu}2W&%c--XKmKtp?@x!H{3Q8F3)oru|0}(S4eu&eizC1Y!{ z<9;Gg=nB(0F&|G}Aa?OCheiZ69DP#vu`Nwa_Uu{%cJ;8K{BrzEl~d>Ukll&pp@^85`tgnss&*DP6Y4 z#Nlmp2(Ws4So)LNAMBoD&Fq^TVwebvlHTyWa_ct{oR_V_%rNZ$WSJGRPZ)mzENcN_ z6()XJ=vod~eYfb6iaT91~nSI#huMzf31o=JjCDr^j| zb3YUFEw9_m!g1UnbJ`5V2`L{FlfiYK)tvd>hG~QafI6IeP$ zK9K-?s1*-qw(ZyO1SE|?U}+?LUmhfjUXC0Nq3l@pS~%2B(XC;ep z{i+n}1NS4nbXYcf*tC&Q`Aa8iY*hpfua)Ndz4&4n2TAT3QNkSGN5lR6IueESv@mT= z28C_=Hm{tcf# zN1NCDrH2rr1SQ9Jef7xg%G43lbC-O5i>Zh6j4h!5ofXOB=lDzLev=<~u@1()Ig+Pvh!gX4dZg zfs!?0AvMN`pMSN=t%qK3N)dwTP%qqQfqllC^M*CJ+pa~S^aYDY1|5PepB;?SYaSkZ z7D6Np8g5RRh@n6H#zv)ZI_s$<)|aIZ0ZJo53pg})A4oj%vN|4OJSMd4a4CqGNbLFI zz3_)4YCqI+h4_QZ5e4Q6)Vg6H8^qUXZwCn_@6}`fz9>7J7;$F5Y&1wL8r7`62Fd z;#+bzg+H||r&Iw~nF=%*?dnh+Nl$CZsIu6|m5+MRTb0 zrBp=lIni(Ps6QfeUzlBSN=u3X2%y9}j@)a;GPk%;2&d=tj8u(#L ztrF#wg^0fP_|hFIp4Kx4zptoy=J*%zCLN)>!F&th_oQ+)S<`Sjphm ztAPScT<_UG#<>{20=*{~j+w2&3T9Qkf{(;_C|V!9H*bw0FOao* z#k?(4bEKPA9FBgf6-P_`UcmxuB9_JkbN}avVAO!W&}>TNuR}7``FX3GlqZTWbw5%UlTqkt!d@ zYjIv|Akqi^S3MMqy!J-e{N+6O#533ftCeiOEn_g`XpJAw#q7iY9t3)4;K6h@L!AtH ziSysHL8lK8Zz8q+?GKY*nfSWPo(b z3yZMEHpmHw7qK=z++$g}8`Q&T=%^5H-3COAwmK*Q~pllbmWhQIlf{D6`t(>!2fBn|=P1qERyTPU}sB1!ND zN3)2JdlnlUk81C%W4uYOS%@r?rI;LA^k8&*c_obqqdK7wM92_=ogGQBX;WPsu-6h* z(Y$bKCe-QL=V`LzO$3#5Td_}zBQ`Ny8!pNNH4rpPUYr<*?+Jauo=)awh!K-0?7>EV zPfv*$^t`OOw*1z8((1|4pG(;f6=r7h!!=cuZG{F^Z5q*xAg;!CLi-oI-69VAxo{Dg zx<>lfBzBnIL{Qfl2B?wN8;Pxnr&iyt=f8l-YkAHio2G%vEJh^4(44xBpRJL z+9Fpd-$n1Xe3XX+*&sewnsJ#=nRGc^>BWs%R@cZQ=4Q@OR!F>*1tRR=o|Y3@8<~@V zy;5N}xJn5_E>eOoI+=xJqxAFp(Mj-aEVCt@x|J3j^lmOJW5V5fB;gZj7^vr~6W{EB z;k_1kxjJH(y2EsnJaWshfR%b>#5q=Zb@I49+|j#{pREd+THhWX z8dcjH9X2)9ed1*C0X_p=K^!oDq<>bc2AaU53`C};*@~E^=OCrc;<9&dkvo( zCAk_q$`_`DW{Tr#@m3@}ijGWshx5$ zk2kPmHG<6mMWP+lT5c!)PH;LqfXn6dLhe*P^lgO#Fv}}A7raj_B7%oFrS#Y@uEiyVi;$uu5aQzCxPRW^cupl^GWKrlJWhEtnWY7ucs? zhqINajQWU0{*2q9comgU5OJ#{VZ_M9(okM6P~!5(kYXAy+sVeuT9vp1Q~MU{2(Qd^ zt|23vo62pHmaSEH$0C)Rj(Mm{M5GlallyS^q1Isr2UTuZgw?IA@UrCt33Fkaonawp zCeKYeM{O;`%axm+Ay(Bf3h*Wr(4Mr5h-A(e#eh+0@~Z1A3Q6)|t@QAqwr^lyB-*fx zP*OE6Vr%Bg-9jQ`nkE4VvudG2X&TkzZj3VW=zkD`P6Rt`h58y7%8Ivm1M`YpwGEN; z0q|2x8lm>E2|Yb|57SU>c(#Y#yfgeMW=Wk84-d;tV?e5?q2T+$bSqwZifb%kh&!uH z$&fzeJC!!kikp?Q9xpAS2Kg>6ybw}L3KWd#6sHwdspfJzg@ahu3Xa=R4K$l&y41dl z02=!El4odTmduf@msA+3#I#bffuk&+lrtTTaaH>cJRfNLyq@0FdjKy=}3*ae**aG3O&BR&NiV zi6)=-+afArhLm=E-+oR?OMxGX~efnp(p$w35i1Rd`pfqH0<1oS1( z?+ka0x~OtEqs#pqnc~`{k3gfrh*)`e+ayv$-7|yvDI3*kx|p^grJDyA@7d-#;;41W zzX?BBW$)8y5d_|hM%Xe|E&*?eEtNwh5Cj%Atz&c>R@)GW*{yV`L-u32m_Otnb$uUx zm@i{zOc$Oe5z5r)@zi!8A>7V+%J6a@pcshBHgn%8fY0qjB8k$Hybby4$%^b~^;QC` zrkb#tCl!1k9dH%qT0SOXvc*o&Ijlw%Rltlf1eXLlJ4gG5U>6>Zq-{jKr{|(=RIh-G zQVsUk`g*>f1Z0=)@Rg!8LEOC-sH{aNrjqv&mvKkt`C8K!v%xhLeBQ=EnIy|t;{dv$ zE9CQH*tU&cu|P!@QiG3j3}KSz-&Ej?6%%{iS+wODp#Za~iT~XG+rwr)2yScshL$L8 zyq74NB@4tBpOjPJzB|6)8MV$W2Eq0cMfRqc^j;5te6wq`l5f}qUY`d)G!-afmDJbe zpaIM8h#?*_hfx9u8I*#=t5N*=FifhVZnOZ=D3?uNGnW7i#5|zE5OT?)4%9A0BQRI* z`8=9n8n=mJ9pyMj?by^L$y$07x5!;hY!Nf6GGwBNCEttgGE6jc{0&5rU}d^BzAB{h zw@2rL6I{3pl591MZ#^6pOi$wGb6eBPV*>M~_rUmbG2kJl(}X;qm{zyJ()fVcR|FId zVwm$2<(0OGzpWmDFH>|vuDg?%h?Uhu?Gs~A`hJ8h6u^2tY~{>OF@*tj?giXS`O7-Y zJX$zZVsldwUMc4j!4}LQDW7mNU?@Oi`(gly34{m?r+L1tB^bFG7mIH8E38YM>!GTi zj7XR&e${dWMByba3~Cp(S70EvsE0h&pc?uX8zK<^Oeba}le2&1Wn6pBjjs(8Emz>@ zx!2e}w8zqn36F*mZau-m7#vkoT}Dl*qJ0r!FV93+MmV@8{m|X1N7=A-^T!yY!o}zGZ&A!eH5ouZ|B=pqsl0)&+ERWGkFjq=vjLd*L z8V#;#s|-b6yW|YfU-`ou!)A_+#%H6@On6Qwd->4jLNWgU!4zEhT9$owE>15E5d(5D zmPnc8q&aSStg{6YtBo?-O6>`DhTR4+Arf`s^6!NgQG?^v-KZR=}F)$p7 z8et?dpv>iLBVXQNa1{0L;3*fWd9iJN%^5I1kew1r+-S{r z^?5TWTpKz}Sd*vh8~a2M!F=;5$@^9`7yhbqTZ(Y%vOc;0S=3UJq6{#Xh^%n~krg?I zB+Hw_#ivE)O=x$eAP|zP;kWYLe01z`Yiz#*GBoX7c8uCKqwTS3_6mrCiYB6mJTkA; z0&Sr!OD!(zM;1h)B=X&*!e3tzfYhjVho^dQNR->UEn+bmM(bR0W;-d$#c3|#nT)G* zrN`l`DBgQ#xF=*_kA@0hDEc&*yvT8h^5?u5*Fru zu|>LHg|3m8HV2fP3-}5`kpq_b!tkc&#rOcH3zhJOIu_a(q^KPaVHcBgi>y~(a?7VJ zKY1K0Qf{;8HhU{M^R^Ct+Zdh;eoJ`jxhJZg5NBgNnkTxWl=zp!ZcZUMOg^2OzM2i$ z=fzqhwOHKbDN3}S8>wF-9c*p@gpJ5@4WDPe^#?y4ejtg6iqcdm9KV`UuzuL5uo6DC z3v(H^PJOfeQ4fQMFvz^4`B=%^%6B3xio#)S6os1$HEHm%2Ni_?>LUL^Bhe(%pxCyQf5?zwH+A^n-_X4FHkch?@EAb4Nd%9fsJ8h zuzm%O6aAs$ncya2hmBmbj$8zr5w4P{`Hzs<`%`hrNF>d@gGmHs6Q9sE`aKvoJcRPz z>5Iyr*u}Of9g{izJBaJE^3UDI<^;}F%hrW+ivq7XLe@LM!3 z`mOu`_fo;zyU?{qSx#*m6g^oBrjoZtJk=UKT;QkDj|@TP`mYZ^$OVU8Sklzl6rZO7 z01xzHDt3E-R!y6p?~RC~nrbfWN-;omH~}7t!9>IlV)d(!C1vTkeAicKm8R{zL>)V= zl6jn=YRQS%{2zq8#rM?{Qj%x3T^IPV&fs-o8kgvfrdP4|{Ro=yGn$hP&kuoL3wjWa z05)bhk!)aqh2JgAUlBD-Z;KfMbhT?F@>2q}HPFkoxwIns-AFUam1faAH+F67;EBx} z2^Q6M2n9v0Y-YGgB&FfC-g1W-yL$q6#lAIwKyN##rfa(rVv$1m=lSMw`T%=h5iKl% zf-1}CDk~z9Y}H(t8>d1(61-WQbs4@*LueRDg}yj0#~cg6%&M(!oC#5!9f}KY@Pj&h z5XBXU=r=lLn0*_Sv4S6Vb15-lu-wiGgbR|V^P+vY8xH^S^v!sCb1iOgVdJ_$05x*%9n9$J-JE zq&y!)By>5lX?gv^2*!CuC94py?w!PvA#notkw*KH3+m07=cBHA^aCi}|7ZN8cfikn zru~j^iqi4CJ-XcEH`d1~yW8SExCbSO8pkgkCTK$p++YL>k^&&YGqIW<;;4cH5nfUR z2@Z?~FyEi$49O0&#FKE&mcXVLaj}Ci-HYUSoAzrj1UH;5PI94W(O=_=WQY6^@cSfR z8m`%6^G`KpXzUn^=&~q9=v5Bew}bIGW(O0jaiXYAmqJ7M324)+IpJ)!W{RhAxi1jL z(6UawK(H+>b9OCHn{BFrVT&$K^*$m92vSQRhO;S~74Bto3PDkN(p|N%_~GzVJs&Qt zF0S-5PH_E_f3%hOr5z;^a}|MMjpjSugdd-=BXgBr4xi_fh;$uBjB&J5A8V%vM_HKq zO@z@h3wWt64L_mFrvQ@|V&J@ore2$|w~Cqx*hLPEpOZI$_!1|r=0Goc$d=xJE3)I8 zVkeVinDcKT7Ss8f)*wRO89vSz-qBgvKl92Y=;z+ab_k7!Qr5jHOp$*6Jr}}a?pzbmWnCkmK=sKec2UBOLQ0_&Pgx)7JBU=ww38-r%|eF8b$cmm6Eu&g2P|vDi?cz zb<;PI7Z<5QdP(eX+we_s)j<=OOxj>T#tL%g=v!b*-p*!S`=i4uH@6x{tX#1s(ma9h zkYYIH|Gylbi-tUADHarsEldv&_sG~u8UZf__+yCWe<4acH5V~e4^5cb&b{L&$UPZs zd!ZW>x(Knq1rxBSWtLNhqXr9K!rG zwwHKfrY9QW4kjD$C#Ou1%sLr;6M8}jjudWWE4ZB;X1i^7n3bCg5d-sG5{MV~4J?_) zutZ1{LO~#OPSSD;mTeL|Z{|XdO%hH(p_20mrk1r5iApo-=yM5Mrfa>ZWysZ5z;Yy{ zwy){8ga;eRt>KU z7_tnAeC562t*d*vc1i_NDSlC5Dqt`e(?bz0)3{~3Din4B7y9v`hNy;owW$Qv% zx-z;DeLSRBWVJ!fw>9cxtx|X+;7tC-vWY1G^&=B($T=vb?9?s!Ub)gX8&ZB+j+)lX zD>BLRjghRHH015)#!9hKz0n`MZ(*u%<5`gu_g`1vIkw|E&P29Qc{K4F<-4dwDq+%s zPU^C8cc_EnaL-m;h}OgjzzIR2QTUVrL{5*k@;UAwb^#Y@=fEq^j;ad-jI{EtBjsGz zMT~4#juJdKkfln@KOR2LQN%XjNGaWjUS~bFTZx3OC2r-VM)0P3J+7IHqzeqFA|BX6 z#?2a$p>cv4-3g0N;Kgefad-_UGO#Cm`QWIP({Ingxx(79VHTG8Mp*>9O+q1r?gPDb zNw1R;j9Uc71{#N+Eb#E4iu5Yk3^W(Od&@Z6ip#?Pe~ZC6njC0yAMqiN5k29 zWfb605|&n;_(zXMGl@g~n}ckh48o}Q*1*o=0?~J!if8<1$@-jIM}t-$4&pprVjVOo z*A!CL+BYDr@eqv~df|;V7vxPC)rN!@$qf}Rt!UN3rX+Ai~*Ohy^rhby?uNR|1*CjehaIRVll%rEue)&JuMR&X{O_UqHRfq*|&$6FA{K3 zR%u-M5%V|_Uxag5A_RQzOfFiG5glllw-O?QyRcCbVQ`LeDBKt3Qv&Eq__PhvuO zD@@jwKYW^U+cig{I`_1O2;)%%?2EBsblO6N{8WYlG)O9&X+qwgs2KUWErEPT+A7el%s4eHk4kv>V#JR=0t(LIcDuA=xvR6Q(p+ zF;zL^Dg7{y-y?GPbYr+X)N^lF{Y-HGS}>1_o8V{~4Z=ZPw@Cz4 z5d)g`z9)3vBnl~qA-rdzb(o=M9_%0i%=|)$5DnTXqF%8~T?Z4@ppK)-xaq1mv|?}M zvPOScZJy3GT36^N-C9W+h)>3UOSJ@dy_Nvw%)6O^vS8R+{96T5A^_Vge$a3!`$CFh8fkb_YCz54`D#V(6?tfR zI~}@mx1t4^E)`kblXp1{bR9QZe3hB8Lc+umv>f{(OJ}Axv~p!)M~OS~d#KdZN%5$G zWj4G*AxdJMV53To&WObpJ6I#m$itqA5>(Cam1wby7+O8F)j;dccVQV}9OjhSU2jbY zo<#7`LqE89Roa4MgU98gaVnfBCn1dH`~}}7@**+OPvu0Rx}=(px^)kGt|#Wn$& zc`;%dA%@~%zwRJ`H8nOAqxG#zU?p@_trdGY6w;i0B)Qd6APv0B++xmt@l|Jp`$I+O)*W;$Xqg12Fq^vNT=G_0=&=SE zf@|Ten6${oF2uW^i!o-tyQGGBR$_!M^DRV40G3Vp2_Cp3$*&0TR2Fu$>@3z6Y2vt& zwG3zLihkeGt=YIcJ6g}Jqhi+I4BsT!kq^0ms4OnEqDNh0GI{B(;ck+wRdkrf7_|Ui zk&MFjZf27u_uYzFfkM6$H^uB55oL$`89Q|IM(rp-x7V6la!8UFOvc656q5^~=MJ@v zF#k)J zumT;giG$i~1*DN#FS9e;j4Rpfw8M^&=BW5+XLSF@sJ#j2?Rg3{^#Bw^8@5?Jo#!4c zhH`5$Vl#wVRQJFUenDmc+*S;%Me>YKJR6?nlHD5ah~8_3gbPKTbHwSHan)87yVRs9 zC8bG6KqS{XR5i_!Kgtz3PEPMV(1n5hA2gl*n~l`Rs1()sf6a75BOO50S^VO2H_Z^!;aj<|-4W@q(L7KOn0_6(+=aV1@8o%s=9w!O;ZgBYV{9yM zib=~E?XWX&6f@VbNj#8qf)`eX04WLHuYLM811xLtjwjB;Co2g4@o->6`58d4kWtfv*=oZ@h~CZaeK-T23OD3Z)WOBqFoW#k9P za3%E-T%dL%yUgr~7mn0+cuOHV1D$QyyxGSY80Q3PjIzJ^2Q#CQw&0io2IYIhFr->aMMzdm`Z1)Xhn6mn(3bZ`Vxe#ci-eWtVtf&+u5}mITn{2Q*v15% z7HjZ|}?Y}_ z9NadoC{{c4C7;;po82!Ak9JnV*=$FpWN(mZo8ykaayboX-!eoDUr(hk&ZYH-Xtkt- zo}~1*dDkY0ECZ7RVd|E7u&%z%GT4cz@Z#N#Ngws z84v%1C-Iz!(U%%?ef0%wEFp@Uo9E^VeK7n;plY(0gr3}2@ecjE)?~zy)&+bTKe9S+ z#DXu!NF@B9+4ekWXClHl3^{8rOVBu<00%H%hIMh)i`;^VH0{%IvdwycT zqez7?%`%de|9ILM_@t6!n*B0nj&I!xrE(#{66}<979@O({Uj!|oy=FiEyLygptB5YD)6*0sWX%jidU=>z02PdX! zSN@0=4huJOh0W>Xipr6tE$3!tD8VsqS zm!p}toUY_!cuTaaqIg(8A?OVU{I47c~cH$9was5fd?Q*8yglSzc?y#4sA5#ygP|{djmF zwUi*vyOx-klg1Sqar7KzD8#PzVF^r^ik#0*BSr1|Xt)tTrQ1ZPN>Uyv0y7j12@%Gw zEW2-Wcq$lWM+x~AF)IuOa4(ZUk=Sv0CT#)3<;Lw~1`kfHmDdxaOz-9F1Bm$6M4#+W zP~gPPs}OCl`H3a!S98W(N0x0lE*sf`RTg0;tzEfhOBFh49TE1`9OwwQ+KURoiMgzi zK9ODF2O>pVlqdmoO9($9lr3fFH-{f;qKNXjscbjZfJRK}PGPOOfs?`feC2jrQq3fs zRX)H+QM4NBl1LdX4#{><`U0FBP$$xGtS~3NF6x%d6|zhH%+lmkd!aX-#;iA!6|PGz z|6mRL;-E8PyJ0Bj>Y)0fQJk4taHDZb?*{#++IBQ3&;Yr0V%mIUdgROG{g0`gM$0&RtXjZ~wcn0KHGhelWK~q=iojz?CW(G0az@^I6N8GC>RnwX;2_+-i-$G-^7NmP0ToWcG37 zccU*AcW(VfEK4r3D^6MwNUl$KJxq{=NH2cSTI?8Ho*_L=@I=CPq&XS~fkb@L+;EqV zGnaP4vmo7;40L+g>iP&P9Gn<8ScE4km{?99h6N~-+S#ZkK}^GpFyZpz*4C~%KiwTZ z7AvG5Kft7;hh85375v7M1z?X28jd)-h)a%~Nk(wOUWiF!*4#tNfI;iA-g|x=;Snqd zv6yvR7#A8bYuv(mD{f%QsmztTAQ9aSVcez{vv1Opinvbxz4bz8Jg?(4q7KGo1y;6) zCwU8UZLW9kzz^|nh~#7UJDP);Ky+$@thpQwor({<9G)fY2*tXCfP)?N!l<4k_LQ4z zd)Uakc$LgW245xu2glg8yt; z?t<8|WDp5&=p@lMPJyuyDZ7HeGZev`=Y)ke$xJCQqtKG_PEcOXY!^bhh)w2t}+6OnIX zRiDva1pYNxvT8dj;oD#7?Oza#xD-ElLn;zt5-B12qQ0!pywO+d{fyf{yb@wH{V{wc z`FQNNvCSUZf$jOz_l-a0<+QGWDU|C66&{1nETeWj!qnRxt)~ zfNH%S!I`zdmyLIM#Rf4|G-pk8-4@x{e*{EUroTqMhMz65i>@VRxx&??Oh0gmRZzYo z*3vr~@F1(l2(jWDdY^ezUNPA@Ii<+A**6+JxoPF5DMV}cW>AVK^ipt-&k1Cw)YoS9 zD$I8y)af+b|0=o<^D{>famR(R9_qo6X2x*8i`0)pL=a{INgO^Sh9?Sqe7pQm5 zcbV+>m8iiZ-rAzwrbW{yG6yqS!;*_%!=Moo0v4}bPa~-odqa_#aI7o^3CBb4!t$6M&E!%8CPy~T#Eugu>ZGztKvG7gLNGqKE~ip;#d@4 zwc4w&Bs2xQfL54#9u+sk;i|SC2McJ~_fI%aRs$tf8s4tJ_xD=kpe9|&GlO1eE+Gsl zg*kASly3(bdwK=}>pdbNNfWs|+-QV^fvGKMu?e5`~oLBMZRqP3h&^ocW2+m&2nR3zB3MYTjR;4LVY?!e%j! zK~|L;{_*U5T2wr_dLPxWwT0SQE+vSXVIJVsl+3?rxA+uy~3c;??} z4TAInl3b%l%gDtSzD3ODLZKzVN4FQzs#y!BRB&*RiGT=n)kLUhz-=;7aW-Uj4JL|~ zqsL4xB1^{o`KcWT3=~c|^$HVVQ{2juI$!OLWp|3N$98AE^?1<(3kR&|Vzt)9P zI};D<18`9$bLub9kaNRtI}5z=9+Zj@&Oz9Sg<`2Te6M`}NEi+>sIFJUSM+xQkPl&td2q%S%MGUPoXIZkTf(`4uDf zB=tFXsO^?l1mjFGxrsjMEf}_DR)zx+zNKlePoXN9!GYMpMqS(k`1l38TqzCwC3=rG zeMDB1C@m3QbtmLT>J(TK1Ll=Owmp2I0Y_vzmk6E4ruvedU)+TUlyl*MpXm_v(9`hi z*DWNXn1P2>V~IeqF6PyCs%0c@4Yj{8kDWUbm^rJ`?Tuk0N6enJ+!+LC91&d)3FF25 z7g&g6clPHhuSXTU)H3bh9m1y0{Y9>bbUoO(c@0qv;rj~h!NOQkJ&Cls_(GG$k+GN0 zy}lxrscXZLVQ%^BE1aa2DE9=zEaCdlSuQJMRJ?eP$!KCOg2xvT0u-j`DT>vpT$IQ>SMzZP;k$4MH=|F$K9 zNlVqgzWPiUMF$R}Jc%1IR-IxAgSK^uc+n``IHCs=^H#?iPk;T!n2F<@{a>B&oL_1> zkbl2AX7E8_>(E|}RnQ_NqL+nS928nl6G{>lCj)?AiJZS*)NAT39#6aQ^U*=_^WW!+BGWk3W-Duy+#|o86EDua@mPK=dj~K z)-lJ#l3i3T5QM^Gp$*&=LB7ey#x@vR+{B4)j#G4}>e?sy7x@k(n;Z8VuxAEb10wov zIwPfshUF#dkELrz3ZrNL(l0F^gZDX%;Y2;&6I+MfXclg9B2ik3;cgGlg@HX-O(+8TrezedVwzvj(|4*ahYf^xW)Yr#)*&7gs#4HH5lw51E7?->9z7DRrB@w;2 zLmfzBgn2XDFEE73rVEUfTLne{?yPK-S{DTcUq zHGU=xZqxX1_|pJq1RrYh1J&i2c;#)Bnms&BM`#FnX_M#Hg?}&dC@Ryr#h(O-Fcbt+Pq7%Ihn<2MO~q(JC)I6o83|2U2fQpbB+??HoEXG8qr!s{RrfI_6LcL)+m; zN)C1d=1|TtwB}J!GX?SU6XG}`a#+t43ut|b-kg^iGTRG0nm&upLMF0EffFh=p=p$$ zTdDgLDR`JK+?8NL4p20m(kE~}68_GivjF5;Xq*waz$2bSL2zl&5jZll0&~VGA2A%3 zN)Kvc(M`kXC3KX}Wev>Lz<7qKHGxI01P`;;7XYpn9ucrA%As^HA$#vS%h`exkqCj0 z^X$M(+2@#=pv1lK;ma_cKWfI##HhoW5bhaEWX}`W{BX$w*w&dt3z|%d#%0%P-L1qch_7@d8|g ziYTF5SW5zuN7&ZUi$*M(5UuGEs1D;CM)k^}Z7x0jb82fxLj@mEX>7+sg4oGM4;$nqRb}`K?2Q6y+Yez6S-h!y5ap20d}}w zEO~R-O{Ov%EdO?$sUhMAwt-#-yW@D}_}9)^jz4l9*u|!tB47{~BMKwg>sp#%2pp() zHp;LYwVy$G-9 zuDe6pTDJgZS&ZS~)-vN+nEJsb6tGwp(0BiQ`1ap?``5Yjzvsc0;7l}=)uW>O#l$Gs zocdDJ>ArkuM0tejCTzYMHU*-_F6Q>66M8<6BHVj)6=E7WF3#!G-Q<)+-A6t<8iSNG z5o_cd4S4%|KIkakpScnrk}s)>K#1C{d~9qd%OA!+UGg$)Am=}DHv}lkW8NEeI)%-a zRPOt|C68#3zz$8z@NpwM^bHMf;NPAk!Y)Lf()U7CxeQQgjK~bLuKqu^-n6;0G`r68 zU$DRV+hNlRs#|2Ux~gj;H5iE;t8xGns4BKhhIlb$-bw%$#E?Tt>Iip#_isoyNm-(8 zQM47b9G1foRmuP5XT1k|uXWO{2(lBvz305cp7+{&|2C6cf7^_(8(X6j5$9QlAcF=s zyUv~LS$#`(Pcj^_#_#{4{>T`k5T-yr6J4%k1Q6EXG~{>0W0V^^Y8@`H#c+PSBs_xb zi3s(`K=j{-$Km4wwYom#kzO3!njp&);X4gr_-cAuA06&+!w7fzJzc*ok>+%5i*JEWEZY;Zy9|N3@`1ZPVr1g`c3kgmeg;EBOy?gr) zr~kG-I{=)4i&oy63bppFA*?j(V2CVGgomS(xyo*cABCB5lMFU7vomnQ{&`OL)?x-6 zG5qawb#6hGn~HaaB5o&Fcx+*Pe9i-;c6|{^-UETO;kP($%-gE~TT{Ly1IpI{ye{?o zF4nI92r%pqfC=U&ttMGld9b6`g|!`fMUvd>^!jgDJ<%lb9$?5B6XycHD3TR=u8&r=Jy#tSE^= zTg0)Ee8X%99A30d_ij@G1FPx>tUV7pmZP3fNhzm#z@JXPTbr6AX4%B9JvGe^G{-1W za3ni8Bnruo15GHX1<1lZFVmDy2XIDFNtHd;M-d!}tQz%O!6$-ZTt~hcu!;<*;MS1{ z^>Hh9H7KX{2nw~1bna;Ws^3kydpdlhQAq|CmFy%4RHTE=vMB9 zgVCgbESO&dd#$Z68Slt+N8Xfi89H8_M%ZTejlxD=ngk0HJbrO3T@wTg?o46%gd;1! zy6)cD++{cj_#e0&wL}F@oeh9%kr2Mfha(MLL?jy{_D1rqZ6TEu3uYFoOZ6k+aY{UE z9A**BKgd$p0k|~r9|5=A;CZJra?@9!eFp?n2-6vF9rycaQLR$Z?P5bZ1rY~`=F!8n zjFMomsN`y13YPJ^+nW~sN+RU^| zA4w-bn)|uc(cMwU?MdN`=%# zqg?{oO7=#guNTuV=f2#TWJP#rAaxj1+tS?E5mp9yp-50dVHK(FMJZgCY7ELlGkKFp zduqtcG%w*0n5L+n<|B||?FoKsvRu2oSEx!q@e<e|?u7v2msl_wz5nIm64}nu0qgS%PPafr4{M9-N zHquyVCme}n5k9xN85Zd5mJ~^SH?3Ucf&Fl?}D6OmovXV;NKd<8!sA>J8HGyyqcPy!p3kMgQ`WlB? zFF=B2H*ewVh%U6~`z-tEV{Ct$Kx=GozdtL{mR=XWH`D)J5<(4oY*8 zBlfMH@j}5n9-{d9QIx-s@>hyYNlzcej;_i-vitOBuxJ9rp@_|?GuQ|&l)@G7m8vOq zpj30+yf7`#HuV?AAhm>-L)hMk8XjH;TKn>df>!OsR6h?h3+N;10S?h8$6*kCZE5in z{$o1#Pk!R9k<)c56`{xT&FrIl?63g(@ON`4H@+HDuXcjA2&5&T5RyO*UeoCBbwVc; z6L+s(fI&n;{AQB01EWiL#B{o!y z&q2}go*Biljc52JcX9~os3y;xqG*0}*DMCPH{CLc;y1xNzO~su4bg`GIv=tx4p)tf zY4cBRYBW0RRDl=22HUjGv}C} zP3?xM`$db4(VE3e8r`FcHrPO$Ane7TqPBd~$z7t(f95}-QnSqgaY6VFFN(a~q=oGvgqfxLGFC-Yzr~q1jAVoT6M)I$-aj?1nCn3na)h2I$p`;po09 zc<7)N`@6RkaMUS|F(F|Hsi*>wN$&?N9yMVwQ9YLJHc1loBnh3B&xElTu|cSuI=BQj z0QnIXniy`Z9owANn@ezIw@j?CoH~_Xm}gS$9kqcFtlxLS)f6 zn~Z!q-={K_P9mgvMtmB6NR;51w81*PkFxh;0cgynp+bkh_mnCgT!^T-@Glret+3Va zBVVmBhC;@sOHywJ_^G$1GtnG2f+q6Um>}xRyNL4Q4a-xpfKtbLdMvglf#ZvDR8+w9 z$)H!x{Hz1xbT)yliyvrA1I2)NDk-#Uh&;38RgP_;BXQWu^GkFf z=#*L>$}=82diZzzb2LP$)jXJ<`XQHaX8?R;ZaL2hTeQ!=#$ydThq#Z+@~JU2F=EWc zk*fEDV*B-XS_V56)E*#?m^^vUGkb*ml1qV3Bvt!2Ew z4rsuQ>#OOp&kxr= zL2x9N*q@s?C~2LIxYRgV&DrX9fKc42$%*s2O4WNh6NZCNSO)i z&#EHhn)qJBjNWjXSScpm9VD@TA}SU=Xr4=?u=)b!{F9siruv7!$TmWiBBVF3OkFf4 zdOmRH5J26AVv&7mR8n4qpe4Q>u9J+rbtLnU@{2IdoB@jP-PsXaWZq%eBR2+N-x-Fa z4+(`0JCSFzT@Myr@$bdWX^Q1#*n4ml_4(XpiZvQIDR{6%Rh)0sPl(N%PDqEHbSI8a zr;mJ+360yDe&MeuzxIhg#~{Q*A2HdF~RCqdM6pXTn$x|NZ|_j}Y#Q;31rT-aZJ%0^o7uWA>));{B~egr1D|)T%Hv z`BE);RJNq~zuqHAzewL+Av;?j6G%lP`FJi*2TG;$pm*$ys=zSJbdoVAC#F4guR=`f zCKp<0^mt2yedbd&f$)me9p_Dnmw$2d-&Ftb+EfQ~KxoIr2W&_=1wRU)PzEASS<%HB z2d&QW^K2-nQdB8`J`Ba(ZCGOKEl5M5u*QttowiGk07^Npe=jvS#>(mfTC+$|j#Ag0 z!A|PApdtrDsbNCqdfTV+2=JM#J_bJr4n|GcoyS;s9_KLRSu@Y*%~sF3ufjg86d$L^ z7i-bSc|cdGwv|-*z~kBz$1J#P)96#D=b79-@}EVZ&;UCqextj|x<&rrUB8YgB@4ez zuv!vT{s2zI<%>G&J5Q%wuU#SA0CjW3EqDIW^pE_x*&B$%lrQ1-3F_724@*|iD#Ld{ z39pdRA^+6U9uf?xAlHZ~k--Jz4+S{)KFEh;;R#Vx9M<+Yz$HBID;wR3O+aN7=F>!G zeG&h1`jMqK14cz$%i!xR0p7fp(ZHZ+f-nQ+i3uMihdOu5^Qp|XM-)Th13{sE#SB<< z)Ji$89k1UZ&mxE`jnORr{N?n^`X@mOAL=k(v4#5`BkccbF)PE?r&9b zJEO1MV)pF(M*6ju{ljmjcmK%*@RA0x$*zYNj3JMNr9uk`ou$s)Maj33mllku@ADEf zn4?6Z{i^L@`FNLjwAfpzx(VK8Nfb~LH_U)k&LX@u4+5%JsZaL$waHAGNG`8_!T4(w zGV;tX2Uk2AC-@1FMiA0H&a94XF|%)!y2rhLv0-d4{eMbSvPo}w4%GVkXNW6Q(z24) zRrVT2ahHY~!WH#A9JpGeBeWIlF7uMFj*e6*(e4{?QoQf_Jb^8o zw?!Ol4z1R^6^F?Vw!|aIj`=)qQt?6RBVDX~L>;4|%7NlPJ;IV&G~=UZp;b6Jgn{Mp z0>IYBxgXMtLuBs5?KHT*vb&g}DvV^W21UC{yeOwG#CLu-@uYV+E*+>33{|~<-4vq? zyoun!bhmb`KKn1~GCy$~4ndlc1!YL_C^hY|XaiyO+*&V~`mfsM-V|M}Ft`;p zsD~H&q0X7mA>M%>r5^SmOfBkUh>iGB+%)10&xG6Z9@Rzx3$Sw{P*&Jclz|>}%`b0& z@=Z7BKxOD1JQNnS);m|*B+6lj24cjnyyW#60N5-vT|NhByv%wZ4Af?K00)44GqKbipQYLI#CtOx!cdq^i^pQ#oGXeduLb%@o-#| zgK4{-B99_Bsj>u|&!TEUB*S2r5w$*FDhWaB16(Xp9Yf_Z;7nB;uDWirh@Q+=*JrZ# zfE7zgx=SnI?L`I-QFw6Vl4Bu}IvA{eHomS?E*rPLUZ@mj?Ml_KpVuE0P4Tw|oI2B1 z5H3vS56D!fzg3;?3DQMJ<_}=nvP8>YETwej(WyE4^YL`|VJ+)9sg3*y{KO zHs~f2$17&qbJ-bldGTIy7Cjtb&TUj;cX)dzm5?Q+sO3=T!Vi)A& ziw-ttczz{66c5Cg@UEY83y94$AGgVZ8sn_ieXFp?mE<-$6n^geRhehcSH?-n1|4Lu zk?kZSe7N6`|1NE_JKd>&G9cD0F-~sf_9ntlhe}XFFd$8M3fXa!*x%`X5#l!y=2I!= z-}nfKPNJ3}86V-Ofl562Q?-P!p{79U^|>FRflebl;&WEfR{*dMg-Kq7CV^y4^s@3x z&!4lstb!yonZ<*1)9R4@TIEHjj>b}ckssnJCy|2O z!BlK+RAwzm94XUbAPOLt?Gt$kyNySaAm7K<^YsnP7BPi3#6`;2orcEPFgGcrRNLoc z0Fmlo6jkwCr~+fX-rXnDhra4dq1F9RU<$9}-N>u>SW$&=Dqs4{<;o(_dG~ z!ewNlZlgqs5FWV=CpfxVIS|bte#}_8_;ss_goF+t3Ylm)tg`HdH*${Jw%0e30kK|T zmn7 z(7F$gD3S}K{}cfPe_d@v)~DB}^*WGE*|T*dqhob1j#RsmCdQ~IVpH)JctQZcjVegw``^oq}b~_~cn@82$Hb&|UN7v-~4d}62g&e1tG`LM_ z#0g&c>#|EdEaJhGynhU>w71M+Xv20tiJeqdhL}y zU39_uMo3??E7qr>L)5;JWvN{WgWKK!zCq(g_*o8?fTv|4pb9kQk(sy@eZl{3=o@(rzP684NIL z9ixg@m!DZ--Y@&@bDFvD7i!m5WBYnLPIw%24tM2{jEVHu=Ov z$O9v=H?-Etb%=bKzUpgcGCw*S*!@8}e$E#>$Di4%&dMB@ccdCfc9~;vwqZS%kX_{l z8u}=pNin-EX!RkyZqbK7j0Li&`Xc6K@u~or3Gf+o;Vckw(Uj?1h~AJYl^$j%zN|rf zhk@l{7WX`qrAUY$*P-3<6b}hBnOSJTt<)cLAbf_{K4C$eu3irX?!P}Jz2%>Pe1<9` zXm1)&ABPj5lSRUCdK5n+cQA^58u+!aqn9>^hSev})Gd_|zAc{WX;(uHSNC;ooKGfB zO^Pg7AB@z&^LD|d&=H}fD_*-0Wp~=G2P)3=p|`!y)IndwS1*vrsjN^bue~)4ghe;! zUFF;Bbx2b`Xk1|G`A?^hO;FQB7T+dS&IZV*ZbnAGaY-=Q$J3{^D0L0 zZ)p5UJZySn7DAeyD-4y^>O3LGi+G?J#I}%i5kZqZ+}ejn`FY?PMr;fiwjZc#MkSbK z!FVA)_AR}kxTz2LO2RRI+}1HaPAe;8M6A^F@Jj;uvx8rYMDyj{A~Nfru*1Uv+a-s!h>%{N zQ>yJ13PyAuv>|F;y@2;>nQDT9h%4S$O&jyyZkCbAakvQR#u$1F3)X|)Yk~%nCVS`s zED7)gdgZ1!KvAez^~R3sF#KBYI6KFF1JiG1d#hC%d`&J}bpnsco!7|JT#+`Zd%5 z#mb+MqYXFQU7&BSn41A%y%kZj17mEa7v~vy|o?UojF_TX1r4y zQF1XmYA2AFQ5vt6-18CMhR?ztUhPv8K{A5d^!XgC7%Mff09Cpphw%6Xg31YLLHjbJ ztPtl0=x1qsLQ}!Ox2HR`A|Rx4y6Q*pcKp2W9|v&5_bR+kRyOXSK(C@k6bsw)j5svA zCn)|GL13siWn_mlU?L~v5OG-qA=11p3G`uhl-cah?GJpZR4e*Yx44Ci&HB*g-fF+F z293i8IBsu|ueoV45rs}L0!&&tJVPWY1ni0#GI-&B`Tgm(AGw=m%#*2|OCcE1QQk1} zs)%FR`@s>KjBm#ndr%mgAJdhrRRVKVvxa0;<;OUHJ|lF}9W4v;=)uC$FJ zi9SnI!IctZem36edQt31pxZ5Z8}Zln&2f0Y@~7tVXcg-gD(B<9HQzt-BBk(QD@5*S!Qf^A;Jb zN2{ExJ_aCcmoN$=j?lc)_|Uon=Rn3Ufjr~*=*qs?U&a>e&`GhHc8 z6}jH|GDH~7RQy`B_F{zZ9nxU_>kON%BVdIttP?aOv>7Cl;Uaa#lPTlN#NiHQGum*q zWKea`rHN|)`Vt=)_cEJ0+#UJ4A?>`^ZP`M|C$E^oE5Z{#A$8TIScm7=Qb72@Wc~(r zJV8X$!oi8EmzoO|mhvBEYh7IR!m;vg-%&QV2s06-G1q)H{k#?+qwVd@G%qG8~|g5 z@1X8Q^{4?H4aOGs{ra^3tl+9SiJ>&sP?-FnFTDsHcTis^L4)IpBfP8ftM#`TnU{3i zT$_GSC8M`~(BEc+lj_*!0`2U&L7|U!e|gmIBC>d6PaLoKKo%q1mj>gow(6;BQUrsB z%&sQ>EjOs)AaJWn=lT+fC)nyVA{oT`GR*)BhC&`4;L$f0wveFlGV02)IkTa=7^2iv zD-Z3j`~*sOJfjikDd~NlEfU>)Nw z$rvuqPBb~d_bSv#xRoB-nQogeVUr~=SvWFP$(gd`LlC0sGzA4Z8YLJ82P2z84%|Q? zYcuo-Z2})4VO@2t<;T-M^wu!eBQk;bLHWdqjfPOCyiiL1keW4`J=&KPy6X z(I<47O@f4&J?kM55Nh|l6Gyh~E+^P`z+}CVRq=kQR^*`lGyDd~$3K5T;PN^|K$X<@ zq8?2Ty`@2=b!EfeD3bo+*YvgE|NW(}={9s^Ave2lqqna=0xNgcnjwDmWZ7V08bkHd z>3?}ME>$nmd-SsL53f$q&X4Lx6_`Tu^&w^w(fYZptu|h66vSJs}mDcGSF4G&_mbqhE z*dBeJ&YDu_{TEtEJ-gvpBu>eMyVH)>b&D5YB|=cC79z#=6TktHJg&duDQ5T{b&$1cqgCt>6h8l(ZjoV(A@mQAHla#^K1~M^ zHd3M#pR|(o_;iTaC_3m1oe`woJ>l@{3H(B>0uz?L>Lyn4>d*XjMUX6IR|NUt-TJOP z$CUUF6;&xp{Azk+fsHjb`f+8_^(OH%q-LGM6JFNZk;lJglWEoA9PD2ZT&&NJ`b%rC z+oS*ABC2*gSLxKMKA%3U?@|kv{nd%>MHX>1Rt*=Kt)&Gae7$WA!cRGqgKmO0l@+VH zju0B&G!Un@h0lpV2I1Wy=w|;?e)$r_uQ+?qCDKuSt7>yZ{_3C5tM3#G+hbxj4EOYj z-M}7>n*tC&Vj+Km+7yg%CO6crt2j6vPLW8mAUnK#Zqsp2$xDM10Bmx<09oDdq-%zP zzNTV?+TjA?@`xif<=14X5Cal1Po3HvgoDNvpL#1m$;LXuh`L3H!KNEJ7cbUdprBdV zmUZ;!LS3q9UdETGKnd%C9urCoBoBhZX0C?s;~d+!VSi%$aBrD%(6UXbsAm767=t2J zKpk2IZwA!xRdjlh2c&w>5fJy`&5h_a`#r3SVbo|mZZik6rXk%=rp8_qpxm#|NIQIF z$#R<{CPrCPBkk7xNgAfr4vNP&2RaZdEG=DdWs^|t@G%>y#Q zShI2ZHWb70{&Bciou$S3>WxHJEuF}9s1O_$@rBoRGt{P^t$19z;(ODZ^SGJ&k_dIH zf;)a(d`$N=GGwiaG?}Ss|Fh{MA3W0|@QjcnUb0Y%SZ(zvp=p?kQkOUqlwqID@%s+D zVlO*bq@3w}?{0t=Q3@R<^9}UKSb*V;LJEbY+s^1n`UQ) zBAVbxSa_2()z&6^DO_3$I@yPtSG5YPNsY&78oOy~(dQweZk73LxgL_`V;E4;Jg5X8 z{nrEW487JOR0tI9!$=i?VM!T+_6zlJiDP9wKS;w*x;?Eeu&&x!^%H9dbOyg z8@_p>P>CI#g;n_o1Nkspx<8%Xth$N>&pqN4Sohd)!9n!ka!(ye>`rQYK7C}pq><*c zFC7m@X0$5?xIuSxD4%%ts11 zKnIe8G8c<&WO`jJ72m?NARXrG z*aeoPs%{?=od=@0Yk+JQ2XXWo6+sdw?)2bvWX;6e%g&vEv`U zOx)Axh0+Gh9oQ%+3%MaPDKjBb=H>*>=Q_Hy$ZSuN}NzZ)#}VG1uqY`{<{X)$vZJ$?Ynt*)T9An_cW}KMl}{F7Kh6%Ykkdu)PXVA8p;Blc!IO%z z<#Y#v52QVJl&;uK#(QBRzSj>OiihvQ^0=|wB2{`59|&T4wK^FgL2rrt%YdVqg5aYO z^L2-TE#lbz#u>($rlr+!QIV@8APK+-5i(TSW9l(5?f%Z3U=Y!CuY zPvi0p{y=u|U?^}py637Npx=-(e|37wvn{~F7wRfVBRDtj)flUibc!U_zX_l;`^i6A z+C^A2i>i9~dK2#08-jp2;ho+97yTAMY5}#{ldSQ{?c;sSc5Yn{+saM`boE0Eylf9d z!K15M^a<2h|K26=8>de=?m!->FPZ=b#?VCZX6@WV1WhXzB-eu{kG;HeRnj01>X7Li6G^r;jbS!j z*MF^6|0U_anjU)NoYd%jz6p#HqG<&FT+q8U-Rh&m%~^o56h{8AHb!0@r@)Jp>7*=c zeOr^4$=BAJ`E&e9vG_1^F!qvgk-8a++!>WbzTrdauYl6jLgZp>Jl0vIhRy8*CKR!) z+r|bFqGPg=-P3_YN?hbC60>kn$xpoAg}z2l(8sO*N=G1dU<@zyW|Cnl!+mJg4YT;l z(4cKFu=1;b_nwLeO-MNPk%@!=y=7uI*HJ~aRy`fYm%J+T7L2FH#fh-wg04aesKEw0 z9$Gfh(21GWTapcRkqWKU(Q8CV_AQ|fWOE%38b!OShJFM2S*%CrrzHjnX@LOBOhPN7 zu#tTpep4|iaygq%(q~?5=nL7y=vzfNdK+6h5X-zO)s0**`~jaANpnr`%9IeskmL9& z3PvadsWCk@^w$iz3nFK|?zJK>?^FB9e3(~_vP!Q&Dbk?<;)E1Z45d3@Pr1aOeg66M zfzJSZB8Un9Ed1d?G`yN^EimiuVFR~iPz#;ULA4DCxWPdi)Ud;`q%E8AiIf4uCw!>d9)Y7eK<12%+!?rH-Ld3?dr)#O?ttL^)YlZUnUa9&(4!o>>}Dbfg-Q z5jb}K^bRs}9X}6{@B6; z$xaT2R~504(88X1n_h@cd?XsQrUC!GL`r+_p!COZChW8sYfVY)5GLxtz=c8H< z@vSb^KpU?^5y~<}N=A@Lhre#|{{+)JtWJK-gjBfp57Y{O~yxv5M z6?70yv+s)f>;fqfNS;>_&}V*4A!`NiM`+DJHJMx>Xh&ZmZs8oK6 zU~U4YrTe*fwN`yVF&NU%BsyIAwu3*PepaL`f30~!0=532CKF+c2r~z!rxhQ;^XvtO zn!x{EbjMTlkjp6E6zbJhlj8sW3wBYsFhsbMc5qiF443*e)j3qtS@)^&5mQa-6iJME z*Bgx-4U9ZD8J^J(|91NRFW&vFRT9*q;-p@fSmi7pCu*`h91ft>31K$~QXdgw)eptU zrP-|mEkho35=e=0>e+0B>Ox7a=Kt11T4eGRN^dKWKZp?N?EfBU>5ZV2Dv_TqIncMFrb1v1Zgl zzeN?-sM*M;;2;w6$8L)VdXiDQ~gRi$W=7Ac4{{m?}F@e}gDfN64lg!fg9~EAl*O8(R#BUEb}=EcMXrwmQ_= z#ud|GQ+8A~LC5t|>1oP)r9N-yy57Nz#?uxj!*}on(rX$c8?)6fCSR@usW1KMBJm2E z)j792)1BIB8XPc$lF_dO0vF3KHAMQ`_up4j}kw zPT}M7nRnA>1rDMOB@xEjtrS6e)GlZMjqvpOZ1xB0B|RNAaqRnqh*d%ztCsal61b_C zY|c94&Z9b?5*?rK7IN725A2dunpu(=*e~!G2ormasHEe%T@hR4cydhZZAt@*G{>s3^nCb**Ac8Kl%wCATC5S#{so~8KM zCh*30@k%U5GN**{OpaT}(=)n})aE(vXSs!ED1uPGyCl3{hRyp3?ODqo6)yDyr5AYz zD&$(gxar}E6sS+jR*6*2Tp(Uc8bwpBEePc%PC*^n|IWKn`-jbWBvVMXV0r}o?8|_@ z$hVDnOvl+MllcCBEMZ90v%Nm}vQKIBnCR^0!|atu2=Us~h53U-Q1?(%g(gOH^aideIf|s z-Nfq`>7_lEjlti7<+->evT6q~(|&eKmNr7Mvv(gQX$5|fu_a9?T(u4M?X>dYoohfM z@5pX82#3r+VzZq^WR3=8bBMvRXmo6fHZ%K2&sStu>KGL8*!9JZK$@0iA|vLoVu3nXW_yi+v$~j_1a0{cudv%Xn@Gff1zGJ zct-UF_3b4f5rj8)*ePm)dMN($)-Q3wh%(35#o5R8akVE|#mBi?37lFE7WxZxT|5qd z6t?%{#wfzrp<+gZPwP0?rZhNQ5z26b-n}J%bkW3q4`R>WXJfV&$*9Hb5gejL@=A3xt$F9FHhEf?-s2V) z5VoeWiC!DW;_mP7-ZEAC1Fux&*+&{N)+bN&6zeTm`DSu+8wwiT?ub00Pc8A<(F%@tZ)*5~|jf6qO5ql7U6y z3>Qhm1Ey(lHY&}-w@@;=9*+$sr*34h>rjSb_1Bq?#N8O@FnsU&KAGcV>CH1@XWr84 z5S*y_;Srdo{fP1t+Hi}a5ol=xcCNGbR|zUm2~ny6Bd=4r@)sf<^NMVLBG~SzuJ>7+ zP3rLj&u!XBdNMr;Ii_-^g9SQh9zc-l6jM;SN`Gt4^P@n#+(X&}KI`NtIoK|QC*F^r z8TU+BU!uteDKk#Z{8I~$R|N1-qlIvloFyWX0MU6()u<|@k$km$v3%%7LKu+2z1K~FN{$x^g&_h__Y`LV0a4xu> z%gMi@eMECA4DeujkS-)ZDGqoDuG$AC9)P5Jgvx?(xx%12#K4LE8fTIn`#~@&1wY9I z9rUBTMG@l;3|=fV`V3iG;Hj#Z)OV}q7oey%=i!0UyRIHZue!*3=~P<_P((>EA@n1W zFeI7Z1rU)%g&j#DUV?I|C`NvJDQZswu!RN(B_;1pomavhN9O_V66Aed-N4XBCd$9x|ZcC(|dJNL-T2*SL)$3iNUt&Ok0sV>TY@m`HjiFXUu3+D<6IL&hDT>?{id^XLbRJzX`&`8`$?F*cS>!SkAU>WWc4+9az9TC)xYaqFBQd zU{buCg*$&H1+u8`lzH7bV@)7hy|migqDGI_J?;?%y1Q;4BRnFlO6pz({o~Ayo52By zQ;-_BOuosD)Zd#I{r=h7z_;?l8n>E(Jg zQj1@7b9OXp3BT)SQ)E`;JJerv(-v(j0{)ZfPwK+x zKD2s8Bqjqp$~&U|yi_#9S>zhAOBCNyMXEnTQuZXXNK~PcbNAnYfY^K{hz<{CFCkpk zF_hiuJH(?!&+2eymzay8aFHL61(dN>6mslnbqdX~e{ULYyy6c1eFBJ^h(4%w8w@N# zVe%r}&p9kGM)*OajLZ)b9lt0t=HM{VeKtJz?kqM1N+U{JrugkX6D18us zpvaOFjIe<|DmYI(jF?%g_H$bJR?7t?M6i>{tEz<>K0+;4CQ=lJ%$B{^O4iNo=kN-= zWelDd;iiCkJCr#Z@(FDO-!x0%r?iX3nYQ_&Oyo(sWVa8z>x7kCoFYmDa( zYUCuH{^DF@R+@KYpNt{Y^*F`5aB z^f@pzf*Ey_jEaxLd^JD;QU{l)?gy&~@bfm26=L~^-?g5xs3i(pwjcV(X$ z!)DBQAp63ntOv*fkgc=hs)f(SED(uk|B8uta9Jks0urbm9@e8@gi@@pC0b|a2j z=}5?TlaDJBgwkrY$#-u*pFZ|B=CKT{aWsOe525NjJ^9_+Uy%LE+EY}ADuygZi2lIx zwdS%DyrBEsbTQwPr!ENtkP#vkBox9LBy}m+o#|eEi{@%ks3MGmsNR=f+1d6m_&^r2 z%o>yiRlWjnFY|EjRdNmfUw&--((x;~GUl0a7K0kVTX0kc)DT|+qPYyi6p zYZc3wM_C62X|j$7A;I48yQSa=rrne$c(tHSR1;QjL^nk*gv;w=O8!xTz7TaKu)O3x zLEvRGtN|QO#I|Jb6o5XJsAuO;Zdh6LJ{>-7_@}~+Z3;bgen?2I2ob>G%=BwA8d=3R zr_rV!kObcKQAcpLfL9ZUe%48^-31=_&O~nl?^01#8H6Spd6^15ggOzs4oV0epg>jF znM6pkd<6f0-4u&%^&$Uadh(U;od5^~RDab(shkIQbb}2~ulN~)Bs-8-t&*QP#)wB; z_#uw^bqsV?YUbcY5F2aPoB$unXcy0T?vRV4A__u}Qvm^lNH{>=`rS0p;5_}LVc_vC z|ArurAaV`f_&H@U{{E{0T&eRIrbn1!3|z$7=u1$ll>D`y4ap*q7mcq0Q9(cbwylTL zLqGi<>`|PHWXOmm<>F?fLwW!__j}*HRm>o=i^8j;nCekD;8~|m`0yFg(lA9HU0#C8Nt`o1nr3c%56*t_h!+ zBY0)cTpLc6QV{MIdn{41CcI+hqZzpk6P=Mq&Dg&){f?LZijATo+Wzu{m_rkY2On8ZH!0S-IZ4xAL>^}fG?D~(vfa}4Jmd2_|IOIa2^ z9>wWLqTi1m0x|vaW3snMqdH)q6WJ&Ss<;YR17Scw`t?~@DJNTC%)7fcxT@eqR(B2@ zeWA(bstBgB)#C_h8jZul&RV6>UT@tW!+^=75z@cz7_dWxn}@x4-2|YP(mn?|{}T)e zrL7(*tM9O&>m~`NjGOd#!~pBedft;+=;P*WxfrVHie1%pXdUIQl)(v=*P#QtxW|%Y zHKSAO%ewU^kK@*v5U!-nReS}?5zx@&Ix3nR}Od3#oBfs-7@P zD4G<0A!7BFoD+auw5Yg4aKT%+kK~TrhME=$Wnn;qvN_vDJ|wv!IU%m>Sf9F0JA18f zWi8My_Fu#A|8)BAya1H)IvkDlwDX)GVp06NznV%|G9~`a@@a1V+rx zaEjsl++yQ7)40=wMeki0`7M7tOVl0`;DRsx^)cHcxQCwGt^JivN}vrOKOsgVUVj7f z)5XM=Mq4A~kA($lWt6q21BI0%HJFI_Q@n!Z9gllYN@KH4CGT~7)WLk@o89BseDIji zYKQ41-S{Yo7!w^%jA!i8d&>MztI7YS9xbPT)qaQSg8oDOX+V!wK3!m8WJ-(PX;)eQ ze31jd2H4986Go<#+`$DrIWb9z!@1uzHg4>UAOnq#yoATo)^0JgGQ0lh7b2|l{wRU0 z#r8Qd7ZA&ZT8G^ZD~+I6EG=^~I3*{cK(T5^l&C(9^iMmIs1~+h9R`sYQx6ewnJ2zw zm4ZOw_2GXo{nV^d&6R4qTTc_>J_?dTK5P*qY?~aPD@(vwLlQnvkIvA_AU^A8g2tTs zf^UuBi+_GR(bFK*9+mF)n%2uOfT3F=h4;Bvlx4kGkYOB_Q7KsQ21WzWVh$K?a=3UUjIkgJOavt}D=Rnfg z_cRZ6DC(5t_@v&9OBCJjW&xBeNR|N~3C5}AK%jC>0r`jWInk7Ac)}k#zus3fz>`of zjNZcpb42Lf+pnf30+C&WOt_p;TId~<-F0Litg<_o6LqTH+(uf|SAO`-yT6;>{l@a( zMjzKP5IjBTyoLZ_dr3%U!EdvA`##oJeN3T^gdd3)fN5MG z@`20f4ug@{GPlu+RDWGHWwVY{(SkqNrohfcr`8>5E*o~f-^-J5xih+kvnbOzEB_wz z5T@h3@Yi;Eg&IQpH#5M)#bv6wG^}dQ`o_gzUC|T6`Vx}otqA$JOXz14)x!#n5v0xf zs$2X*Xxly*>ADO>pxn#~Ueme?0xZ>66s_5V%P;yQD6BT(e)Cx|9QQXIq$R)v0(n6Pc(LkZ;ir z61f_&A%TGL#TF1qaUgoj??1$UV|qce`fX@-m2+A~*Y1nB;2~7vNGWOJRN1G`fY3wT z_cmATKTJRT4|Rr*Qh8%PL{$A4R8o@UZG^u3&G-Lin*XQ%et-JThXKh1oI`Ej!ct@(VTnDS zZf$v&FI9#F^0)X?6QZzi{3C|s>$bkgEBG%fC@oy?=WnJq;66jNm?Lf)F0nVC#{+I7 z+rrLUnp|xm$LN@uD-tvA0f4(3-jnMI#Ag7{Bl-i6%41af2rFb*% zMaIFE7Fno4QG~VCzn)>h5mFPlfxE63&u)=Vn3VgG=+}EEX1012{7Cdl#2<%#57O&5Ho7{Yn)CMu7BIN-`BN z)g}Fd>38c(ga*=9xG&3KQJ1Gg>K*`;B58#g712=SyLGY8ML31?K-zIS^UTsSPuqX`k zqIIvVIB7Zi2h*SUfMj$g(t*wr^8{40zL0A=WsCj+SZLKmh-6w>R9~N+ ztY0XqzLtSa|P^wi?eo z<(X;I?JF^7G&t}QpDqGO3Q4os;yeu>`ElI_Tm_b_$QL`ulnGg%1-@p83_s(GDME3i z)Cqt2PS2H|doRA84bX(~>e2_g3?%Xp%OTrUZ(}XH0TWf5ZuX8&4J~vI<}bq5BcwdQ zv|iTS@+`UGfnW`b=J!RYID(`_vnX!gI;dOla;F*F6?cSM=5?UJZNN`Y!>9I+Ju z*U*qEy1d#i;o=E?uQP{=4mtn^jT4jbJBD8t50qa|Vhtkg-VY%MI_iXU9ifigy-R}-85GzrrVuP>#d_R#(Q8^=@5bYiwxeq{j=%M zEX-($CFfyL>a5;6EXCBZGhMzh?P#*mMf7=BV&oY)wC{qONabHN;z|iTzaHD7mVW3C z0OLL_A$JE}uN}HQz>}s3W>X>YNvc|`4~zoNb_flmFdR_t-BSX|Qi~gm5Qh3f0cWVs zDt8u*OZKLxUYJA=;iy;)-X#imL0yuc-K=7Zm{f?xbE29--BorFi6=EWd=+qA_BJXu z7ixM0Qf)R1s)Tv#E4w9Rf18Nfky{H&#c@zRJyN28~ulIchhQ7 zAR|9aLL9=Psv_GdK#r}e1v=@=k-Y^E`w(xVAy!Ezs^Pj0VZNGcVBEbKD15XC5Zsfri?h9jR zV$&m($3O`e(eop1*@s;2qVrg=aza*C{OPif!RLX=+;SV(PkTYzF{yT?v7s8 za|fIVg12~45LnZ2PX!InDNwRzwNwf*Cye2_EJgqk@VLI{K-$ZMmZ>)_ zHlz;{Gw`CR58-w2-0zYrkIcb)G?bX~!69?uWcQtZgi+ImU&+TlC>12WNE5x0`;j#5g!X^>j+nye5 z74<1c7UE$BNBCfRHb5v^NKh3Jd@m?pb9Pyok6Tj`j_WO>{=nN-dE()-j@Qv=D7~Lu zj4#g7j{=D24`WvP2b`8{*)pm)DHrvd&|yjh_&`;3zK+UI+DMEJtrlZrWV=IRx2LB? zv3EV^UZsGVZ58~n3-c^S(GqN=O4F+7HW?vC1dVAVK4Ah?${a;`E`d(bMQ9YVXE@lG zaqn%j)y`C=26MGLhpG(Q8%c}@k!S<8!qAAL`ab0jm@FyZX(&NKJs2G0MD-t<+&MYaok_m-ouT_a*_vlVDhpa4c)G^O4?vyoH}>5DKxu*kA1 z%0~rlA`g0XM0#IGsvWA%%)O z_(lY*?_Nvth3h*5;Om2a2>Qi~+)=DB?L{aSaQJWq>j6-4Mr?5By9ny5X*bfTnE!pk z!XB3T{+jQ{-Kp3Zzc~G51qL$*hh#_-l{u+OF8Uc|H#_e@W_Ndbuv=?>_x6M7bKfTj zZtP6|tdZ_+fi}{U0o0@S5i(Bdp&qAB1N{SrZR$^q6>9wA;zz1;q=GZDt-s|Dg%C%yr>1!@P_o(XW{jFdRJKW%yUw}nCIkOU?G;f~tTZ}Z=z|5w{r5&AkVkch<@Pwva-P9mNG zn;$lF3C9xN))1s9GF+9|AOe?qGxcHhvEY?nN5TUkA5Qsg#A`a~(j$S}gtkAd%_xVf zhK*ssWAtEKl2-6>@Bm28-x=5DB7SkhS=3po;k$xCvl;GVx;5R}_D@?o(*wVP_Ey1C zQgqpy2jTc$ZypGTDJyj#gq2nNRqOasb`kvTjr`P8+kI83S z1qnEy-CF2rP83&OG{%vi!Qn1R<)xS!ew0FZQZ%g#cGyXWhp3Mu3Ezkd+1IQ6I5f!L z8&aQ;9C3?MV@LTc*gYiE>SSPsAV5EZ+6eT9e}}BadHG%pioY(vwI5HPdqJvD zi*Nz146Z){#Ik~{Kg|9zZisJ8=W5Dojd6z}rkR{gV`I1gKHK`B=};a{Oe0X&)U{6w z5*5_fi{s`!KI*5_@70b6@;4+Ed9wZV3|G_Z`Y42o=}|)LPo^K)u>A9D=n7_5gw*6W zJjG=*wgG?*^9Vew22C&EMqn9pLc2# z-#Uj3QZ_>*FA|eNuZWX;c;uhbnUPTt7S-IQaiLi6s2DrFa7=R~^<_AGSdG{(x|KR# z(Spr3MayE$1EfSVf?>JW4Qx-lJ`kQ1R=~Tre=z-lsX+sl(FNWRq45L-Fr8G|+UXS) z>V4*ikNGswNtIU}JsT3wkHmDyQYPp@L5A?p9;^963eLH_4LCxgt8StTjz5ttF|JEZ zWiP_#XMmcaxAPQT?;0=#>kRp_tw>sgM(%J5myDGwv8*m?aj%E)<#093z={Z{Ii!z< zE2BYyj{*KZ=o>}660h*rcZCN=B-aZyU+xos7w~sh`o5wP2%f^cD+(Fa6^uo3ORkjWQ;X9JOCsSX*#5;9eM z8uz9>Q>$EX%m_z+wr|AHXn|H=8s=6lNi~$f;QS1^hgcRqVrlJFC?GWKXgX`nQj>;A zi`6@%$HM1)JS(Hn5yx0WFWS$4LIBSz;=q%K)M*e;AWw#UQpjMEzJ<{9PlQYMed{DT z6kc+Y8!P?8>4W;XY&DlxU~9;EvhB*HD8udZnLAW2yk#0rqFpjCzAtHi6r~E}H8n06 zOS2AkL&I!crMJzdu3Lo=)4Wk?^##Bzgg+8ey(Nz*9}JqoZh5 z*B|~j)AzqI{SwB29I1O=p43CdM{1QLgR+SP{W&VJ_aOSdJ_sXpTbbX08qjs@`WZH3 ztoLh+!5yq<6S@_(LYdZ6}HDxjUIq|?v-}a4PXx_%x#nqK`@O=aKg$Vrf z?r@BJlq9cEkU7kGm;`fAUq;Aq>sUR*r3Bmv^(GSURZ!ph%G_`hv@?;=C#O@OU_36`^_f$i zMWa+Gye?XTC)2IOGr`#RT|Gw2HdOw$-tNEgw#%@v1)3W^P?=4CJpFNf80m=mZ|Xtz z7{2=nU_M`$ZVMBv(E)N?O#nwHtbKG%wyaSA1OvjqAPz?eBn+Z^5Y@gj`a3j~LdMv+ zmC2{BLvzQG;EEHxe{bcFjpV!F7W9j{PzJhq|(H1aMs5&4{<;|X3F)05GAMJ+glixGOj4uC+m zg3zI+nR%&{F*=|YNVqftHe(J2OTu-i4=9-FYgPD{xy3CS)iS1|mRr+R*b+4Hnl7)? zx**m^DrL&p&7!<2n-}pp7(;L)L4&i4@lklqV!G=O2eXYhpP@OyULB zV}&F(kBE{)&DnZlCI`0CZ@h_Jn|9&mH>+s*OhRo#yW){`^=`41z=t1%gt&$7?3Dr` z{!i1pzb@e-Rol0QY(VPOpVUos?V}c;=31U-?P+U!`qCTTBa95#&A;G|f#HNvy7m6Z zXii@ChBrvt11jO``FIS$sEk9&&g#G{4_35F^`Y*lD+VHa(}zxOcNHT2j3XWl&@z{E zK`9sS|JL;5dNr)mz5B6%{1SD_pZoCo!K*9v>KfQ|Z-v-3`nir$T~+#&Wk|ScjY6~d z)OmkOPQAk~r0j-{G`#`fbR!R?aypx;^xl?j4=Aeof;)3cs6M3TF?dP$^U-jLfAjrs ziUGwR*;ekR6`8M^7bv)zb2c$uWRD^ayU#CkJI!P58rZe_^Mgwmn;<|;$Hs@gbNX$ZzkAik*jvAV5culd67yq1Ad z-&JsJeO{FHqv;P#LI%e8!@%9*t4OC8wJl8JaB2jy0G;hM4d__{6!x*m(k7CD9Qh_L z!G4>t54FUfPJjAe{}=OrDl&NYck}-r6$kZ$51-(jy!ik(3f^(CIlT5~B;e$+c(IpM zm4tyxfZp*2$@~wCSP&gRUc)lJudcjx@aH)3QR#U=UEg_l4+9Ak=sd!h0u6a#z;Wpt zn>Rb>ZyK0FFu|W5Rwdx4`i-&zOi0AQt)sg&J@M@}Fe;*$!%Ml)Or`n?ZJKL0d#bLe z+tzSTpBL@R_-eqEZ+!Urs#^U;UV3LW=YUL;09DDxcySP@5K7Du@@K9z0Xzaj0A$I2 z*G-IKv$IgRr?Vc+z5(<0!r~`_xDc!q26WJc>lyPl(9>=A0+}+94hoH0ztsU=(a~u}?a9tz z2Klg*Hv6Q`mIfzB%ha}_31N#42k*L;f`>=UpW0IYLPfgKMdKTCAw`BD1Wv%z0{pMn zaf(^>-Gr z>OHL#wxOGTCt;N2zGzvY^Y}EB98TC!Nj`D+i%w`Jxo;=NfV3^t+9?_XWK0ki!8QpV zTR6%nAos4}B)|P0c@CH>?T9CgdbAEy_&Q^h5NJkJ@t71)Md20&V^b?6+wm|Wsw~6g zN(izKW>2*1{c?uJHTrV5#%R#R-7AJvy}nnA<0euuk2} zKbw9h4ggbxIIn*}J&^DXbpb#6gXv%XsP?PK!m4xMx@~~&Aq8ZPowvECFDg+G4idLfl#<=*u10FGoeLCgwLmh-UD@7FHql!ue~J+i1m3y zu`Yx-$_k%HVwxC`Fw_^fX=dVCcAS&wVNOB09UwDD@2<4&DEp(+kMg-61QO%1m zFoacE`ZFLxanRb+82{;uQ1GzUK#24R9eHvbudD98^>sobM=?A{iUBc>{e&Wjo;!ns zKsox+!oZ^V{fko!Iu6h8f-Ldg4lHks>$qJrCIYJ zh5C-VC0eEm<@v;)-%-d0gatyl!m~fQ`ERO!6t^Oq!|xKY=)q>vF;54gFOhOSb$`uw zZ~r;TkoAHHNb-XgsmDoV!_BpeIiKN&ja2wKhi^%;MU?97P&eyyyAPTOJFj-f z9ljS_yp8TTsF%EZOJ9y23ML#5d3(QLPyHK4m4EFYLo5K*9ULd9nSV8}140Nkha$@H^&p|<1asVJ}mecxub4Ik14{R*t-J*kHKe=4Fy z=$N93pCeJ%jkqF5ot<6OGlMJj@m}H{a8I>5;qPkiLP}qSRd9WVp-uz9x(Yl%?l(K| z4?nGzbu@kFgRd!`izZOR{^sEPKs;v}#F|FP_QE!-qSp6dd%EMN5;U6!8-%4oF#(s+ z?Oqkv?}FDAW0*RkJ4U%+{I{KlobttRgC6TmeIxOv0762c)AL11>c}UA8!ser;8$;> zT`xB4aA}T}fSN<+MJEEEc0P0qONPcGO-xX#sloaSPh)3=c$Yhd9`Ie<7ZjUHlTEWKlcf&jVEH|m zg`FG%6xW6nYhPbl!pl~@z8ZSVq(LYNK#+gN7kdx6a2)Y^4a0 z5@I;YCgQV+S0pl=k9v;c?AQBVsOp8XzZ5hV`P~6mk0X+Ykrg`ocs3z-kvP}qS^h<4 zx2?s|W&o+zcPkUZnY%UCeGuO6EpNDqbtv8u6)`!eE!K^~uVZ-k_ET32 zs*&Ssz3oBnv76PI4GUETFyvRe< zy-`K7iDVWQ)2IT>()rnPKT?M-Gds-)$UOLGSplgx)P*{|#hbDrB{NMGP_&99N~^V@R# zgrcq2kDUP7sPiZ7&KY?D7e}Kj%TX&(8K1RQWPc+G@K;{fNo`NJ9lGRXAZGxjk%E(# zXF;MGM_k6*(7U()YWi2T9;_jRzE^u6p?2}cEIeXwBia}wNrIWUQ1J8(%yrSXNZ=(g ztlvcc%Qo=Po$fb1+SP~h7N1}lbSMKd$@WxG-KmKKd>BWnEKwbsrcM`8(oI7`$_IXW z1O*8B;62Y-68|i+PpPlqL?c~xBdCJE=Le%Hz?A?)sdJ{Ei(|ChyNvh{YJ;fPQeRnn ziw!aG#7!^*Jky+vn6F(jA!e^$r2zNmb#z)@6{4nkZ&CiBUXs04NE|s``-2IiG22bA z=ekx+YQ!EfosO`G+NoqWPzF_<9D8$glEcL2d>e|PN*HrKcu9=#gjWHjz_W{_DOtYn zj-tT-fR0g%&=r@C+vj_EGIcilY6`T?Xof~I)oWPRX0i-P3& z;zwIuupUViui#EZ{sFgcB zKU|-z^1TW2rX#q+q`Oea1i6T=Xr`<$>x};V+)gySz=?n3@TOek-X-^Vxkx%~UY%5w zMF7(pOZZ+;Nr|xWt30i*EoFD=5Rj4s3)QB-w)~lS|6rIW)Dae+5v*tRm+|=zrk@w_ zVgMnp+d0%v7U$%uk*yFnQmek8^dRM9exIyulDj$F=C0DgxK8yQu2w3uSXTW(7Oh>l z`HVc`yeKIcup=qXA+#?y5Au3^b z!bN6*Zy2C!5|<-SX1LQFOnKvS>HyTUG0?%7VR%-uTfB)Kc%(Zwg7T1@jx8fHT)l?P z!-@Ol6yXdLu3mRfs5?Izhi4T^?9mi`PY=|y6L$5d8MBaZ|GU#u^Q1iH({oJIVt!nf zpPBi$Q8&lfMTFD5xOoEg#iIWah~OKh@_RSai&aw$QuH)D^{3M0)+W3Ft3NU zZX*qays$E)D_!`_a$D2G;x%APcfsFJI*sCv-oLZfR zku%Ku&^GwC3iW4(oP*51htJ;-cntnR6)iputF%GEDGDf&+rC)K#IDT|T&6QRBvZdo z=S!j+$!sz?H(ja;F{=Wr_9Lx+7UW9}e_f$uCnJRGlWLQ(iKNcU>l6H@1=NIfU)-ph#;ZthG2$XIIqH0HZ8(*-+;;Imz_&m8!>^~m z|Fw?>wSGxStqE}V07FT=9Ba)W>N#^YRst zAq`tS+O2+ZiJO1eGIua@ufOgVDzl$<=A=-n-*^D1O7ZLM;l?4|OitzE)ZZ^w6 zJac?r%;6ChPT0wS2|JerlhQle*DRQwbX7^{T`c61CBij3WT??Y=RQKH0R&-K#P@m2 zMF$lgO%Liwu~bT6L-2ooUBCZ&x?OKY)Svsp4xTehbRwu{8Al!>N1!sA0RijgJlwwh z4T%8ivfxuAfbL8?^)BVrYlqV1PUQewp%U5W2yf6(wLS|GMJ!N$x4^1yAvP=wub8U5 zoRg-O)sSEvggE%;^^Z{uKN&9X@Ez)HeEyJ6*ha7DXi#G7T_>yO#=ifUZ|6y*;|EP0VkES)T5?DG7M%UhtYK#DFbDja7M<7jn%h=gNM0MDLpOB$_8#3754pnxk`@U^zljh&{Wb=?Cc(5I&i zy{+S2U1r!0$72Gv72Jqy;nFYjpCY;gA_!hIt!}v9G)al{WQXIMVH}nMpkw{@5w1)e zqFweNvv2g;PO)RRf&j$G^}g{3Pfo(~%_OJU9;J5l(m^NMR>(V^Y1+S5Oki$xxJWZo zbH{OnU_FJ_g#oZV~VZHL;ITkNEQ;%{_=(%BtUA ze|zrI6+sA0lJoEgSRzIYL{cgV+17F%Mdz>|?88*~`WOWJ#?ts)x>H zP>>ki?kK4S{e0F&--EsB7v9piLJ#30--mJA`9cVphadan57-0((7(8kD~nxZzTU@x zr)aOYhDcw4e1S(?$MbbP9R`5gkr7sik`YQUIjaJ`xKp9%l^>b%1j0>0eYK)8&yPa2 z%x%jeGK$Px#Y|eD%$J~e>);<@vwNS9z$+YNH>ofAb2u5>a^-Sz(t-LkAlB=a2!eA> z{^>sTA11^RFY{)wqKPJ!?MypEQ^19~+;eD>2bN-byBeQ$5yj>dRtz5n+@GA@;sf-> zy6IRZX_OPFye3VVF}03iiN0f|2FksS?H^zjE;?A@H3{VB{bIGZHTEK?eU!Sil0r6; zEcHRCHfj|KZ-JB$-?mn;%h%ydK7d-{iuSf6zaWLKrhZ=c&+xC4m;M`WyU8Eig@2Hb911iwZ$sx>8dgl=r1 zBAj(-Mu}uXICbKLOx=WVSe2J?V#IM_yMtzb=@AnXd9`KP16qs2^x2^P69)g;^qCKr zRo;XN&BSQ30AbAICd1G_pcy^K=>-Vv0r1hkxN+M3M8PF$Jq*iIUax*6Ztz#r_y2Q! z_N#%0^vy{DTEIZEgnldrMztJ3f=q(U<9df#yx8V2=Mt??rw5YK5sV-sauNFkc>xY0 zXzY>9^jf$4p@l*Q2^unaAXL-B0~5Sagw<<;F>s04Qr{~EOzU(-j4lG?JzV5^IayO# z68$4Qw@{-`F*oZ(hcx)M2u&ROdHn#0JE5qGtrJA7FYc>vL4(uD1+AlWLGdo%5_3>JX3dE0bqy*%n=s$|Vo&~Pkjyl+9S zn&UkRF${0SJabQBy4L~16Abs_dQ`P~F^Zb>3uV4KfG`q9;A|K3tw8mlu@jlNFMq%N zBrZIRwM)|5VVrI8{LPuA=73vag-M~;l4kb(cizVgmP!}yS{@%^4PXD3LLFoueLsZy zo9;7#z6=W-yX~az=p%p(vd(#~+`XOxDZulq`>6RKuQj~`;_-MwC$v&^|tN+gQ?tiO=skue} zZ6K#Eqr0@?<)enXzKw=tCoSXeJe}^;lSc88KX)I>3MGFaRIxh_YamVd`-<^%rD2=g zVL0ZRoRXptj@Uhr7(XF_t0ZH+m^7Pf{09`UQPlGNBNs)CQ~ibd@Yo%56D@vFS^MiV zzm9;f$cW~DNEegFG2Ed0GHYWrOGjH|0Hwv@Q*p9CM4y8L5*f8@i0kUo`TCLd8J5y} z_SlLBdc+aPxfP+_=jFa*xNS@+4HkVuYi#cdVj`PdiS!?S{qC>$zrU>i#lKko!dLjE zI^TNRW3XrFGh%lfSor08(70c_O+d(2yXf@3pS5)*2jVKSM90)cO5d2%1=|NP=F*P+ zY@|;OLmUGA8`TvAuOigxja!>RJH

M**3jJJ=@tCkzSPUReRww^$#6f}$D6xnk6P z8487ZU(aq_0Dm2TbCaWb>P6pAxJzIX;gcgMa)}8r;634l5r0O$L3MG-=)8>$4*tMN z3FCJaaztGy<#~C5H{q4m33Sa>7QIMIVPX>ihx013XUr><$>7lIXl!3H>cRBHmrpe; zS;jgAf*(*|{X|=e1w0O=Zr2jx2Th*?T33gM!i6Apys!r>tTj1lTKXXc-JpKf^kD7C zh1k4auDr%cY>|~^Co8UGMl!ehc>|qwc>!teVqXn7{Ln8Y5ICo>YLSSkB{`4V=9(I@ zuz;n);s;0P;fZ3C=NJi!zt>C&>W^uA)K!|C7J_yW z!r7f3)i*Uwu&Zm)Uy!IpJ@g3dqJ2Q_3+5~y!#7?C%}wYCb%bGg=GSf2w~*Pfyq{f* zfll=c)r2@4^!G=v$tSkq+^*p1Hs;0h1B3{##17=UWt>5siSI-ym`MGL}j z`Jrz1!APssPb$M(l z)c3#=vR$xT{sqsK$q}*!{%Vjjy4m$M8fYdHq_$2;76j(a;kwjgsBBrL9nYV0?3r(n zsC9&FE=h~=@&t`B!N{})_o_LE92mc!b}dcqa^fFYMs776*S{mtd^NcJ(!Pp@8ECGe zOw1s_S>+%1ey}0Q{hL?#JXi<+Jd;6vT;)Qm1fuMz&;%E_mW~*pFMIt&5Ce7YgWQ^K z`GPc4_hptNJx3}lztwXJQK*qEAC2oXkuhQ&$r^XijOz|gl2t}f1V(w@y0FX;m=6yQ z6DDC1WkY(dVZFE`M3_DmmJRKB5{QDjVAN(E6mGP1W1ZuOi*ii$^Kg|ej>4G|06n=; ziB#ywCkfalOc%%qV?;tR^d5;3xS+)sA!-{LnCW3wTq+MVL$p~^*yW^9)w=6bH`ml!l&lMcOQz)k5^5J>agoO7_G#CFB+DQ z)7HkeT#Xac_dlKfY3&F?rgGTNAfI*NdYc=tX6*nW8BzZuLSsbXqZ$%WY>o~e(ZigS!uo{~AX*3k`6;vVG=FIRm{3YM%q7RQtUC?mJGjep15_ z+cXG=MJiT4OP(vemm%u0mRL3GoY~bNk@Kz6} z(-!lY>v5y6iEX_{s&rDM*}_RW8SXJvfRY;urEXNhK=x0)U^G|=V8jl4r&Rd0-$HW_ z5OWLG5K-`p_{`|PxHu%fa+ncVL^`^A0K4klM4LA5P&nIVUO8$2^2ljByEAwB?2dR~ zo6986j=F*LqbvndqWhyd0KxZVAN$M*bP~1<#KYEzf%`S6WOtx@_m;_@bs_xa=-f-D z4U{(Gjm&{}Ga7LPest@C zSFR%<1wX32ewz?7pCU1{I!5=cv!nGh_$-d+f#};`m&n`l5-GjE2y*)t*H=OI+99p- zV#b*&V^+No{GsgUvO62GcFWGW(Hod*M_|+qI8z}&cP}Geu3$H-M`QLk*zKaNv1Y({I8@qR#Z1id}DG)Ve<5O``n+v`Byg^d4LB z2SA=|OroIo3D2@cQ`9=U1nnZhLfK)@Zbzm^3Am9MR7Am<`v|(pJ+m{`%d2jE^YY8yW1QhVyxUm7lt>bQmb@n}#KR*y@Zfmq*Tr zVlS?vdzLg#WE8eU(ZT6VC?12<+Wti!)95V$BVWt3bhd+VqD1=DaQCnkkcbCZF(z9+$(XOkA!Ab#sSnEK+UpEXH z|C*wH;)!i!Z7(9N&pd({7Ven2w=mJ zIY&VOY=u7>AV#m${`Z~jX|H77Kb(I2ht`Im?vP482W*|-VadeH_W8Ck)V`J+|NdX8Wx|w;9O}%Z`~CYsR)5 z!G``X|JHYMe`{qi#_pIA3qa;M-(}zT+IwfkH`)YUSCpX1t)JEpKmYmg^Pkt+kXK;= z;=v-GZZcZXjIFp^8L>Us0M~#=4Y7HiC74^K2O&wtibw-4DDLzkC8sJaQ$#wTS}Ar z^c$cG1CADOV^6hyP?CFrzGOq=TZFb%)HH~-;2JrX+i@DCO z&GHG!r5ZuHniOGS=FQLQ2NCGfn^qUmI8mGrs-d#j^b%X_EOhA=BBo_W{S2->=P!(FfpSpW$$1(st@fAoAu8ZZUhXaD8%A@sMebRSMd#Q zX@@`~lmgX96oSa6?LL^U=`p*Mvs+^E4fX2#e@T^+f5QC;_Yx`I9sGTTHvRC%KI#b5 zArK;u-pl?+Yf}|hGZI6ZA2ZN7aY@&wl-LddW8EP_fR*WEpckr6CRVTn5lWuif`A+= z%wggOxh&u^8N0qGp#|m_C7Ii=>e*J772j2jNYo2NG87g(H(LhTd^%$xG zhJl|#Y*M(qhM#SH+C%@S*=a0At=|@F1-(zCpzG2ojVnq**n}DP6uJ!_RW$0CT@?Ub zOwWOmrM2y2eWSFpe-!cTEXz@^NFqYDh?614KU0)*J5eyAuIt6K;d!?b#j<8Q-VA1BF*rmfw|2YP%6rGT285{;(8wz55bBejh7n5>X&2p z0KKbqE0wcmnc_SXB(tKIYqA8$1Er>-Xqk<*?yRlbe-zoZnhr8F6I)^uyimZ$CKtk^ zMY6EFYRyQC_YrKr!OsyoGqpSz57LlfmmSgKwb=9(Gr8(CBvuW+ZYc7O?E>I5bVm!? z?tTcY#eJIe5cCvQcM8EA!XB+ZKLHIhC>ZTi-r`)d_ZTUNY%=|<@ubD;#vQqXtK)e{ zG4QciLIMzXzw1Tz+uxWY#9*N0PcFW-jjh{d>k6rRS;E#N*lujt2_4r~q{$^dx;3Wk z;4+jFCKEmFyz=%+%$2wZ*{F4q7)x(EC8}y8D6vgzj#%U&2Jvaayl8xC2*Cw1deS3s zmqRT1IjRQeaBUN-rYT^b!YkKbfrQWNiuR$|R}ol&tv z@Lv1o!Rluagq)3B7%vN@lah+00ZUOKoR1gw!vhAX?KQ#g6tVciRvoO(X07<{mlW}d zc8VjgM!TgTTytFz3Eo);Pqk@Wgr_2{9aVFC>mz!=bdO7`M-ijBxhTZyPe1x^_~_@K znmsj8+&kVP6GBkhoc(bnpS;z6h*a`n{D;`#z*=ZC0-nojI7OCk7{1(0cI!tSXfxe@ zlJ?5kG*G#c^zqPsSvJabx$*|XWNY}-WR0t$6<+WNM; z{uOLO^tBUCXAxCb-Zz8z`*5JLNnQ;v ze2qlbI^^=12~6SsA9*>eM1HvTfGX6hfK!U&<-mzT&4{JM!jET2O<=fk&5Z%lraV3|!Fe~AjxXx*%|NZACBAC&Q&iL0F3dfy}Wgo7>q%ff}*YH%5++oEAE| z@rrM3oXs*1EwbAn29`joezO1UF@0t#gfgo137!4zS%JKDGPoPsiqKk^fZ!nEXV zPPidYLzv486RWF$tjLwZ5jSz$7`~|At9ey0L<@2wwCm4;_=OgFJ((-9QXI=mZ@kkM zPZ`1t*g-fK3n*^1=%xj-z_n%w16DDn2e;>4DUepQQH~{JY?x{b8PsfE*%f!Sz`Ek< z{Y-IH`T34OvK%(EBy5Pnb&_%U+i#Um15Hw*J7Ag4g=K&+kM;g(3Qq# zl`7Z4IXBuwhsK)Mq8izHd=Rp%}sMOK8!un|(6is0l=$9}NlvYd~~#5d7ryu<{eDw3579ouw zCkqmyr_&6lQlZgqLxFT(6MKWMMiLAHKAz)+hMDzzcv9~{+Ckch^dXon0q7GvmcI4r zKf$n8D!M2K`SLvIqGJQhNpFQU;Vek<6qPdt!AOtAkt7)-KRL}%ei7rel(#& z1?a3p!rtP8rXWa_loMAR(A@jQ5@n?#6mN_MT^ab^Qj8(%`d0mK@XTsE`@??ylh8_S z=9&C4KURt4nDGomVjdnoC`yW|e4GlF0Yd2-uw%U1 zX|Mvr*BuKaz~OdDq!Qt4UnNlsOlgf#TUYpp96-_{>GAB9`*3=CxmLfYEA>-Rx1%+v zhXzIObB;YEz(4@s#*6C{nS){HehxXuxU1PU+|e{`|es2{I%5xv1`flx0xs(wO6tWhi@dKN`5m=@W%<3JEJ`%@b*A4i`_ ze174<>B>wOiIS%^w{PKrlQ39gW%>|F-uYf-g`6;@#oIVhryS2%_=+BJxY+nM@_NzX+^Gt8!WSY*mqL1h_TK5K&#RJ>cFZwbuR zYi!xT`f+SC74f(`gfQwLZn&Ldy&t6y%FTyzw(7yps!VsAbay0JODNbf4q3lUZ%xN5 z1ceE)H3?eIx{#)pV0hOJRauBpSS#GT3ocNTXtv^kZw$Xv+iMHSP+!Y|MXCD<@+k+4 z9^!Da&f0E6`^NoZWu}Hwzm4_wt1lsR0#^0@1~5G71{zlh=tCA0}x#UWz~ z0-iT3A+o39q_G&!Z)jBI1p|->jY}jZdkk?<68mO}UFXYD5Nr52j2|p3>4#brqkEM1 zy^EAM`ER6CSF)sK`gU+Tf+nW3H%dxV)L2X9@OXuv&DctXpG-2F2)0GE&loq)Aq35k zmtjMp`sGKiMyB`Fr#iMmYL=+&Lu@z7+mT~aQ}Ff*;rb@twbz?cXkI*#J^FDT*-I;rV#=c=@s*d5i}Mn{#?~9 zrzia!z}%j!MP^j{iJY*Nj44DhjWX99v$?zlU8%e0jdeZE?V%k*Z4&6k=@8x(R|g); zRJwh*KfLq?Xx`UYCpI9qz@k{w{%RFr*(Ot{`ZyH{bCp9c>ryHT_VQ8lSK3b9IZUkp z?4sjku$v=yg0`?CvCx3_@v!tCJ3nr_6ACQ=q@K)c;U{N7v|MFM=iyym(VK}(} z*jIPgOv%u6jFeJhc=kT48USGsE3B`ybL?!7Qi`TebR}sKQdySuc@m3sqcpRhAX}K! z;zb=U&h7`b`0!`8nf1o;nG8qXrjxmYiHF=^z2p3p3k=Z>*0!gdK~mAm5y&ern*Na^ zuID;hjZ&eD_2Z z7&M0y4!zdX2(T@0(;fdYiJkhRGAhW_(t3wOh)QmkBn0a!HX*om+9SivT(Eh?Jx@DV z)Amg9z#i8>8uok~i(4}AeA?ooA|K5)DQSic7F{TMCbyOitw^W%<_RHPW!0H5Hi9!=5DTsg)ys56)nQvdZh(yv(Y9-)q}w&-nCWplXPM)keR zgcXpD!2hm|VztLhh0b{!v^tqHg-+ZVz79?KxiPQgknc<)mc(?QiK25NvH<=Z^B{`f z>LxZg!r5F$RJW@pX8i}btl?xMweDgJ2uy%UK>D+|Ml$oo7xirN+be${R>^rzFD-_* zB9syQ`qt`=`HHzRn zF|YKHUSfj9O-aBVuT*`A-@Tn1_aXdvQj`o-*o+taWdi_wERhOhp*wyaEL zulhq!c=zZ#Fiy3+u|2x~s%WR(PGw3CH(b`?m~gvyFK$on8a38M|MZA|UMIR#b#jjR zRvj3GlvOpe`)qMdkWo$^0`iAeD*HHAA_Cr|RD8Q-k zBx}-o#IJkJU}cn-EKERK)lq`C0+4#uctvZ#Za+LGi1wgBxxaxu((Gm3;BOVP- z^YJ3Vkg~-GcmutdPbHxlDcGj9qCBdiw#N^@7raH`pdqvK1Fbc55%u?0l@fgc)ZH3$YVC!$D- zPxv`d(7D*Wj$`|!(-1JZEb+d5c9?U4$ZNEeeVuGmJtu~Al6e)XC}8ch5tD~$X2%Ok zdc;D;t!2$#pL#yXE=h(`H)`$7azSzy7IG6M2_TYPbvg|t8px>EyaYkrS1uqu=tkc& z^V`ft8jQERj{yy>Qy`U+>wrBKD-(7Lj0<-b+^4*N53^>i&JfG;viMGm_`f1kqm8cv|NB~6&T+YvkJ34DdvIN zyA9dqKzm7=U+_03;%$!heZ$cEcP+BQQ~?{vNM@vOJsp5dERuqd7S1aYQov=_SrrAF zZ8Sm6wPD;%qniTcILrbesaVhKsusNk00OLm_0TbW?N@T%rwOuMD!cO%;cX;S9v*g1 z67Mr)cfA6Si1MEBuSf!~Pg&Z-EzV*yF4n^h)##*nR?cIDgpQ!)ddsT&LhXAl6^%Vv z-#nC%Go8r*Ui0}FY)sywyAhZe)Qj3D(PZ`3&j{x7u@J%GJ|?}b5)&Q|YdvlB`mEpU z6LMGnA{v19ss^JA;(fJ@K@dsL1tBFgNh`pq zzKM~6g z^B!s0JGQhSU8>tEhOS#pHIq&c)zBiOYe90b59#TjHJdkVS%lcF zJ>zEa;Wc5VC?aCL+w%^w=tja3s3KI?uiy(Q-l+WhZ0xA#9iKIXh{0?K*bW5ppWh+_Z*Bc8VS*+Q*^%h89zR+_uKnkN=$I&UOm0(3V{Vt@ft@tdqRp>?(*3ai23=pI}R}kU8NC4qcJ9+V9 zcwtV#8dX2HM3)}nFyO&^w+Fa_q^cUW^=xHYK1wpfM>YVKOS4Wm(VbG0MUK zf_82~0bP;UQFN)ECcAFdO=~RR{$zm!WGmGt3CM!XH_;qWd>BdTLj}z0uk2AHz3Zz$ z4!FQNfW6`H$kZqwoAzD7-qelDIyJwy=!gxO7N_;2i2#Pqe2Rg`LW)#m(#Ml#G-1Ml ztG^NrH!MuX7JDYC;x`|%682HsB3wY=Cg_qR85Wo?OAUOmgi@~5Oc8&GN+xU<3f679 z5L%OLQ8uVZY9sS+EdnJo%1cvzja+_x{ONFD_5v`48&Inz8K^F&^`l~ZBuwjzzdyY4 zWgIL3pdw7_fF3ZIdZZc4`#KVzl$nr?m3i$KYufBXSH#N0O?sbq#A9&X2+T@k#t7<3 z_kjd7pbAsu;|HRO;Cj3hgwrCxi(jxYE|K%~IZ4t)3IMz~E`(RCIOS!h`PP|)TIsl% zGn5HtEjQ!WK7->z9@8QkiQ`jLLi!xCv1tI2qfeOYAp76X5zPMFDzP_1=eyp3hMdFj z#XU3*2}e$LF%mFPF>31u73QyGfwX=pe(2cnwTFu^OtIaaM_3SVs%73`=S1LEeo!>| z5xgh5jq>6(JnJDG^~R)6r5Y86(W^mNYi_u>nQ#I)Kz|;!jSY0Wyz)^B*ha{KU&H1; zbCu`bf;%h9RpXxyjW}xe=f{$3Qh8)1GQc>$@l8OTvV+=v1ufhYt&~lcGXel^W!x3h zlEa1q{kEp;P=EajkDd?Pra;n6Xxv&iQoxohA_?c%n{319aS{9*YSMKSk<3u%fRWDX zP&EZ@5$f+bfvI{Ob(b%Xywv7jN`Tenlf@9$!Y_O(tnK7(?QuC84g3)DyB?1_dVimO z1NPv3D_j*L6iFW-ll_JZ%}9m=N8HD3 zzGoM1zfdKK?_3^i8yJ9}X-46<@KWeGMVCV%Fu-{T_0z(D*J1r|`Aqv0Kh`Gp(Z~@1 zM>McVooZIOV+Sk#BC%s5gk}{ACb7P{T7W}uGF+gpJ3Sco#qc6v#FLXZWAZoT4|42% z4-xb19-)4c$>^6U+YYq^KtOeO)ZQQZ?$q@Xb=;#)U>y2!!;fh`2+;KRbtX7<#@!={ zmOXL`3(#4o3kcyH96qKp<9pw$m8@&#Gc@&S*nmww^m99C>1EUX-TV|Tgy`yEXJ6{4 zTAu8~&-RsnKyUU3Cv=V;udi~R!!EN}E z*dM4HDg7J=y{&xIpunKAac1zAYJ&OJK^#Q#F5_m7fxU@PEuWQnnjL#Pr);MyK;<2f zLP1G_6Ryv;-h~NF8UpuCPe0b#W0W}bF^d4G>C-2Qg1?a%+G`@t?Pw1xjBQKVNpS3RJA^(9CrCYE8bp`DQ)&N8&d*G!|>d^)pkvQ2iag^LJisQE?V@-CnN9# z<*-&C3zi;{GSrushmC{PgT=E9XB%>uMMtn> zB9drH6UeG6i>7?O9R)7c1>IDMw3=RqDF5^)^riC3AWA2q@y3ESI)=NrPCaBS>x2aF z!%T3W{7kOTP6Z+cBQa)n5t$QdH}3K>A2|8T;MlRGtwr$jLX~F~w!?SvwEW^aML|8{ zLF~@Ksg^7>fRB>tEof1G4hNS#qv$YkSP#D$ywrwkSgbXI zlMP^#M1^O>AiV%Q;*O2_y8N?Q z$EVVq__PZG7&@#Z6?uW@_HB~@Qe48jXrL%3qX_9@Ms-jD;jj5+;AW_f9xXwBA7pjD z#N3l|u=h~?6Ix`$N3lY$)={XV{<4SzL{S?mRkuOfv?p;FU>lMVs7;y;Z|?& za2Jahce3Ft5-=QvSFPYC)ZGj+?m9uhXSz#<@jqfGX7L+^sd4oJ^iTK!mDk#_*M>%9 z|k5~d~EXGlYYjJ&KNC;p~IXhXktVcJGh`?NhvORsP#sg+jxo{RPq{ILv zqnYZA=2!=jb*BG6Rj{=`_3-gZN9&>ukFF0eo$8G*75|Kl*WVT~<^{4iL0|XO)F;F5 z6wUt0(7P+Y#u7$2hCy=&!3qKIBZotto416`!o4YRO2Yk)9%yZe^*f}btoOB=%<16B z5z^H=I6|&WdX7{6c!J07iS-JoW)6MH-}~t)I4w!<$06%a-C#|T%1$~eW%52q&S*%J zW%;oTMGmvBNhpOYZ|NK>5{1YI{0zY43Ln(DQVdr&0bVEM;b+Z>r%{H#dN@%~I;^SV zN!E8tIz%9nM$E(|c(>Oh*{#z`8WylE44vG>1H){=?5XA(M@%60}wqYR%^<1qf|@kmniCbneu0A z4TLZq)eydiU9j^OJ*vRUBf9}XQOpY2_+jk(wsX6Z*vyFIVQJiLb9*Xa=nLL~O`-)u zlg7lkEZ6t~hYhkL9lF+-85R>jv=|x}gf_Rf+124RTH~OzP9tA^)N1SQ03p@)$%o)r zrGVII^-FcDdt1ZjrVwm{bHb3;BMP8_1I>|-%x}Th@X>EYOoV&B?;-q{1hoYhLVUr{ z^1ivncU|j=on_>ktS|?_ZA#$yccdrq6zFCV0{bb3JXX=#*jl(ZyY;H$KQtO*8-omX zXl`m3c&&=qhe1J)Jc&A~B~cSEq=}Z+8OSHCpx5l!pTTNhfUJPZ{m?CCgE9y5PfTVU ze$Y!Nz_@_6udg+bOU{7e964WwCrmo$UE41nRuHQO2{!i{33Ct zYK;_OpqCree)+(guNjm;*^5v?P)xmoP77h&?#Mx}O{{3W#4Ok1rd|kRTjwN0Z4_$g zWR@zw9uBNQH4Pkj5}`FZo3whX#chOK3HfDntHa@CUA?j@Yfc0Cq0m==Z{D8vRHsB* zx>(YuwK=6h*O-awNx?!W2AG*wS5!ROb_jr9{gmy=!x$+^F*KVkof*sLB~n_{Jv z@GM-uOq_rT1Ex`?s8Ych*|cZF{wveV9hP64C*bo$BV0mvy_N)(O2PJhxXl~at%+_x zZWg%B?7Y^h?#$6scP(>Hv=KHb3giSpND2r>YQuYjqL$PJD+xQevx#O@##@x@Ba~Q= zQtDO1y+5i`$G^Cxp177q^zoRUX_gmQp0hhVOS={%W9Gf=ac}tCq|Xa4vKEV^C@RO# z!hRCwj4SnVio~yoy7@N*Iohq&lGIsXk@LNEhnSPB!XIJ0=xG>Pc>6HR+^_C)ZTiuI zH{*2aY_GDfq9_`oMQdhdR-h?Uiy{(?7bHXc^E2z8(n3z9Vvg)V%Ko_r4WTdzcUsbt}9Z zY|snxZ$9XqVQ0%fedjv~2BhY4=*%hjq2OF~DC9Z;Eiuj2OH*A;taOV!ZqN;G7YYLj z(M5E#!n4oR3Xq&B)(JT-2YpZ}i#}!yNGFu(%|Fa2)CU9YFc;?2(oFlyrLuX*{v@qU zDB|rb^xAf4Ov!P_-051e??E%X~FF-KtH<$_=A&2NCZb0>(@{ z;ob6BxZ+uqf(ka0RKweXm&Z{Mov36dEuytgtB$c^PzoeF5^iBHO~Q%WN8UvKXVubP zl|a7AbJ`;7HB@)hejB(W9>Y_zdC_6oMX-(Jsh`&-h6pc`nLN4LJqjcUT$pe_7(CTl z>6_45cQOkv9h%u34r-0c3}h+x27!o4NXb?@mS>~9-Qw2qz%hiT=7bzLg;)n7c)X6Y|Yi7MX58Y&UKGn{tgnKGMCKB+&DitcBD8;sXl z&(JMk72I}b4EDMYad%8!2y}>7MSDS{4C&5bL4`y()LGB_(A_uY_~_wDRdX1+Q6XWy!mXGh%&m4z3U(cR<> zc^f_UniDGMX{2HI=I=;e%(ChPx10cF*bzFNtV2;8g1BidVRvoLmJAi@p|9hPpGozv zr*PE|U0u=pZ9L&Nyy+VS%z0&~N)b|PIOzLCFdye$s8G1Ypze}b@Dp6hI(rqC|Ja1a zX|U02#EEq+#3_@$r$ulp1IGINXxQEH^)lL1?L<(&J*qWd!MnZ2!v=S>O56}Ut?ZC* zWW#E-A4yxyyoWM2aXa~m|E$dI;jvrLbqu(nLoLxj1anle-CRf7a(tLG`_bk*T z_9;C+>Zg4cbrNcmyM;dihB#h)JGnuAPP@dXtZ8`HF%W1&$UF#`q|232HIoyJAVM&# zSUDtf;hHI4t3mFt)4Ee(DI+&X4o;z>1dr_szanytovTP^88sW&Yi(*vMjiyGk#m`) z9u1UXYB}J3K$nF`B?C*aiOx5X3EWi~7H(Yx6B?!u&%ha9g>1sT=L5w3B56;?C=nc2 z6vAOz4@mE#rTyW^M}&3V1;6?q;%5hk(=SG*Xiy78%-)Jhbrk^n>c`dOZe!z1ZoXq;bD)SF$*4?@Nws< zLuC>AJ-`8wVTK@d;@zb~wXuxQkY(MeMDarF?tPsg?8wM3_LkmdwE+15^pM_<|ZPKmb zB{aePVZBUA)c`PT5~N4t)bY zIL1ZqSq!LZJD=U+DD_Fcb|1#!k&N9Nz!!Ccs2Xjw*j0(&bak*#86Rq5TiLk%^U_1v zXh^h5@f3|`uY>%#R&JHNZ9>B^=KyQ^NKqgP*c+#d=%hr+2Rx;ua)_`AMyR?nD7Ia@ z72G{QESSRQJ)?sV&xv5DtQ(xq8RoZNp^$`%;O|ED4ff z9vMB&f2NbJMZyWlbW9kTELP$K&N1CqgZup1El5cnviY#DqBRcZm}@zpd+Qd#eukZ; z*aNefLSr_-VYcTYpGbXgZ1qce__B8kCN)uS5&f_-2`*fti`+_0-Od;DSYvCzif`p} zdZ{d*MUAjK{-Zz$1o(U(ceA&|#fphGu|Vr@D1Gcb2h8>eX4l^|m_!{CipEh%?`R6_ z^SnBRf6%tkN!M&cfjYXzZrh@4}SOo64qZs8)pL&)K*4JH-@bv%Xx_)CGOYx`ksV=)RW(^ zH#|hNaPB(91k&!oJM{{Q01Ksz z7}`Mu$0!HgVUbg-Ytk|LY~iR@)`725SM%NBlllS$Y}{xJsBWk-6Fvf*Ft^vfqofW~ z0T$tC3JMtJ=H{@Bq#7{Aj|X;*0~b0s2}Bd_H;;$JA|JJdXbe`Q=>j^iwz8WEH^d=+ zo(Y&yeUuAic;ZDv@uIJMw;D|8pPtYe+VpXUDJYEB`iUyXj?z;Jfe%W>wK*4Q9Fl;P zn?=M#^oIE5!hE9g<`5VBuB7ZC$t>RwrZu)n1kn@;;2HO{R&cEAw}$V>oYXP>OBBi^ zDZqM{P%6?Qa_^VyN+~Qt?jK>{zYDzS*o{ge+n5MI6v{HyvgKP9KY&ABoZF=1WlBCH zIPjShngBqRR5-utU!-?iWL<$jshnKc&$=TO!KLOT+&j=fK6c6?J3v8o1M8y#S2K#d zjj=OeADI03ABM+&<)=MIw1+Md@9OStHE|E99Z1TjxrI)}TQ(s!yM-IJoxvqGxcx#y z(M)hkx3^FeM^dyUVWKck9e_;!jb>VfF?oTFcag+s2x$~(^Gg{R#;fLFi6w}>Y8`E_Aq-KmdxCh$ek!LF-b!hw4hCf%ec zS-H_smxs{hn_AEqoBs0>QzXG+>4M0FfZIfGb_NiTGcsCe=xCjbK9}am2{aNZta_c^ zWYj4KRIjHXI|PmliPo697K;O}*o;?r1P3`Nt}NXNhGw9@@t@)nS-%k{(;6U*S^*I_a$`MSphPce@osm%00vYPDt9`AjFb5}?1yg=DVxRHcWQFx<%|-w6{4i!HI|$4h*e^xpSj_iR8&euX|VC6G6srCUk_OU{$&K#j7={)OTiDa55wc%dG)}<)QKP%c{RYROKuGC>F{|-2oTOlFNiw4 z{09mcOo+IHi$#=V5;+;}C<-a*M-M>X2Xezd2xhnE^GP`vWW>}j)rKo1ODJae% zkb2|_ZX>+(-QhQiqxQ2e!=iy)u}e&F$JqHOE$@T~>FG(}xeuoVUJ>1aXi|`~p8qGq zpZH}kVINHt{|`>oWC0ybP|fGOB@^W#i}q0l(Wlhu8!p6 z_)RO_Cm`nSN#ZQ}O@Dp(vv2RaMK-P1m~tfA>6E(TD>lQe!rsKbyRn42KOu zT}xEmQa?hlY?}1#b*ZRz%>aMO#h;D?D12T=Y;7KfP7~wg8iN~!B8gxn`8xVwi{SmP zJ%T@UC8#rCETYQg>Mf{WZ11(vr6|@k`|cZIMF|D!`K}eTqv&e8@|(BKNt_Wv3KH?J zUa@7U&a*{??49+ykkMEVYxz@e0GJ7`RHqxY8nHfJ4gB_dgg#N`CCqMl7f$BN4O<3E z8o#Yx{P^JyhW$Djo`Zof3-zOR;hpghDPO}nD>A&^jwaNzxI@8UPa6a5+QF(aK|7t2(6FXO50QJ!d1A@OfwGkAj40d zp6?epB6cpz^4kxA>6l^F*A?EXi&0Q5r8g?;HahL&r^%O8=*SLVgF^L+J7%q0N@(D z@@H;!4w4a9Zd&?lKbubxi6sIe0qw(WT~Ph}hOCu2WMy`Jidsjbg_4V#I%v1>?IpIC~f^+j%zxuxh{f|kIPsj=JqFXEc8y;G#YN)L2 zj-(zaR0DKrhq;x-;a|%2M?lm?slm;CG^9}Ega#fdE^ri40I{eWAXXW8e@53<0JOC& zLNBHhaG<)jMQ@<5?oM(&)#o8Roy zwBMB}IGfMj)?S6$CX!d?DR;$GH7kK(ow5|h!Y=u)FV+irAmXrLSHVQ=%LLCX>C2S5ScdiV41DOBw}T32JH{EoM_hJBR^Th?X5_AKOXMp0V$iA&TZQ(&U1x@W zR0mB9&?nw2id!>jYq7kH1$aGe1UA_<^V0TkY(I3&`6+&sqC}k|ZHrQG!b*N@o99Hh zw|2YYiHU=!~f$&b{F z&8eo0K`~9`dx58KFX-g!)EKYhgLN$vpr?V3lI&GxA;dY>+&v&sKM;&IM%3w=n>2!> zytYPp8V#g0C?pW)N_bZMxyLO_&>#R zLZLpBaUOC+y-dOb#IKS2Ng+T_p#du>(#LP|yN{Q+64=3JfL-pHT)uyCsWyvdUrZRx z4sY#=j#Go&1{|K z6dwGoCC`-ZBH4W7Y|iexQu&1065i+82oCa>pCMJUwdW=z?zKWd#}00jopFz7AI`4a z2*2X)p4P_>G_(z0h7*3Q5PHsr;56qw2sfkIzaOo(N7FjS1+@?LZ!+ttgir_%b@iVN z->gqf=xySKk?DL#ZL6Q&upj{;5_M>eiu{mW6b6N>_7rY3XQL$x z{iMxX>GS!!Zi22Fncnx86M_dV*`t%2a$hfl4tv8wp(ux6{Tpvp1q8RKx}qz&N(K!4 zZ1}zJnpE`B9%pHixo7T3glKayjS#A%H54i!VKI=y%G&T#F=P^_PKa)TOu`^3miqbd zwOSosFofgNYF;O^g_hw3X!N<@&TAGLc|Z&n8w9tf0$J%ZsF!Z12+`@IAznwZhyxg} zQFwXeVx?dtlF<)wL#sL9ew><5axiNexQsy zENXvx3o?sj>Z-ms+4ES~ZFW+ELuoM_?~X~r3WoapiXF1Q^y&7&0`yj8U>qU}?IiM= zv^QF1aq1~Jb#MesQZEJK@}QhR{j3aH=6qi^?O%Tt7+$6A-sXn1cd^dK@Y*!iWbq2l z;Uj$pyZI-gFKNNGl7mjMOE>o5i!l^s!|8LhU1!&-V^+UAoS`8BW5{!GLbDA}>Vf*~lIcJRmaK;()%> zH-IT*9Qrpq3^Ej%)Ecqo?z-=bt(HXhR0fV;cnHPzn6Y9WJ6puLEHG$Wi<3H+Z7fckR$B@QM{O2hQ<6E+`sb^u0 z7A@LGSBU zfPVHPHM~QKH5(kdK6QnM?dJWo2wJ}Nmlk|N!InA^0eKdoXt3ppXw;3v8?)}`zji`j zG1eU`SS-vHcFw+%sy*YLd6I(^NJ&Rm0Jxw+>z=`9ADN==7B?IaMMb?FQwf58Kyq>k zq19@sFZzw&@R#XRpvzs0@V#r@{Qa5k@JSkl0uMeSL%GcnETb0A6$6(LH3Rb%9Vxp)Z@N zAvMZITiGR3t`w(Rbb)>~5zr%Q(tW{A_m`x5&V)u#UIq6~hRM6+CwaxQuGGeuY4*Qh z-vE?s@pG0+A*9gg-T;Kq`WJg;;*d)pv#>Yd2#Q2e>O8rAI%Xl6w``pTvXeurYU&=m zME5MB-cN`~RA<0?q{!flCnYhI$nsfGb;q4qBw}*3EQ0KrezprFCrk7WcAE*-B^~e- zpC(-kS`l%aCY%k<&_Y$cgq8X50>QBLP#`?hOX)Ypp0cJfnCk9Stzpu6Li1$4Z|ry- zhB!-7Bq9Lc<*PQDF{+~w_d8-`GarY@;Df(8C3AWoF8 zh;i>A$NTO=XIbk*yM6f1F()m24H$#u%MgPAY>>iTBL3t13p$6Z{CBkfNC zjz?40@zO_l4S0PIBmsvkmZ-=pv4_}54cj)6lXZ(``(XE`F_vKiVRhAo+H+)#CEhAz zX4W2KzfQ-sl;y zc^(8z(-;AJ)eh3p1%X9;+M2t7GhGj8Jq`S}-4T@4d5Ufli2kgX+n}+kYG(b!y)g~q zhY0DXh;hk4wWP>FA4IT4;byS0-8xG)eLYtdTaPVbo=O?}8|w8ZJ2L!us-15mUGo$w zft9>Wu@Q7wx1_nxU_QL+UoJKGG`n0$RZyC;Mqi4wcMc)sxsECLoJlYIRxQVwM_<<%6L{P>p)MZRK_2 zprzXFWRR$X+q$4@y<)n2fBc}lVZasy=twhN2~@FT2$|KOI4d0W&iYZE#Qvam4z(ZB zaoGZCqnR;jxMGo^J3|z-=(w-2HAq5(_|lOTczEHpo(U=hKh-A*AzRWj9X z&Ugk;97{NP3&&yp??bA@AL`!6aBGoiD=hYRTl>@qvl58n!WxhQT(XY{Y&k~2SqGmz zOzAU5f+5^i{(elMGhefGWqUYi7}tK1Yz}>B?cT?FB%GL9Ch@!+JF`WJJ$U}ii8d3g z?(+q42A?>&>Dx##K|pBU7Rs*ABi{nB#h~a+g%iYg^ZV&su>Xu^HyA{k5lHeJIn(AE zVdLkXIaD#9&s{%frJ!Ogt-jaytj>;WaLcU=l68ogo9OJa$pM$ZcscySFZc}hT6CY3 z{!rb}LG}^usDqY~vRB8VK#wo4k2mzmlI?K7_VL5lhwm1Zy=r`J_8`!4Pm{e61&U3g zQ0faxoy@v^fTlsn`fmh9-g}7$7AULcoc5g}%eDAw z8#OrTg)XR_#mS;WCwFmFh#_^GQ4Ea<=>^D$BJCIkjVLy7tGrPVe#{=<^{yb646Et{bmOJ#$bRS(x>~C4jU5)d{RB#^?-EAk-YgUm>JZ+DX!8>>bL@9g&4J%c zm5KijRCDyvMfhiHlHnZg*+1a^JILd;*VdOjf2q*sW z-`C9v*%xgpA0#jsjqZCp81|p}X0e^Glv~ec4q(E`>Ud>mSS9Z>9*6-YG-Xa(+c+o6 zh8sQ3_|M0DXRC?~A44M~~HUS)80hHK^n14kls2mPkJXu|x&C*zD? z#9a|P4R9anV@&bO5TzY96QJoYY3O>#88=s>ZWj3O5Kfw`xCEL#SBSK(Sy}iD2DN~y zg5c&1a{FAq53|uL*0RKLo!zfpGwK%;$)Q+{VypF0(>V#s+iFK^HAH=_1}a)8$Vq@^ zmN*tZ%2AGl96S)m*S54P4_nemnYz}^;Z;)gtvDAKDlB?g{S5=Pxb~*mPg|yBdF?~j z-~CVVWf)RK=8)Tq^ROBAhbK>K{epmVbw)1#$?z?o_-8EYsyjWZUU^wxR0oxrW?t2P zjtCCbt{YOebGt?qk1I-@0pV<=M27R?_oivFW-z@>$OSP`5-5nz5B7CNCwIqnWk0)c-oLUqf zBatwRN^BZ`Z0Dyr&kf zh9+M(^~?Mq#sf$){5CoBZkH^lw4W!vCLK)CC?m|1GCIB(4*XyYGVT%1a%_!wkh}mN zYJma$n=#Qjo2GtLR0uxfUX$5YdGfAVcHymmT7>aDH7hU#kwB5tsrD2(xO$f0NxXkvYJc`FMB&)u6j78Hm+ zIR4F8*>AfaLhY(TDkoAj1c0!RN;+Y7bfreD6)0%LRxpkfdB}n#DbtK2)28<>npMn~ zjJGmM=@5#us36LX4dhFt0ZMwVICp|c`SVAu8l*@=!N@&-S+;rH=4;l)fY|ywCd14h_=2QNX9Wcva+nEr!bvm( zA;SGGmVHR5_JW)2bPdT5UZ`ccTagul_q)YnhbJH2Z+c z*X(Vn!RcZQ%Z?KqB8*}s1WaFyo&uws{SsSgPX?`IArZdwgJ##)ixp@tD4vrp+S5=M z-}bT0=r!WQ4%E`>E_h5~^KOr0hImfR~uW@~3C)l!TS<6u>hf1K$A2tvM zaWJPMxOI{!pJCyxi8020o8iJy%bt_gH8$bFM6xYi;bklt4BxV+1Vp?ApwbFmx-S6I zR|za%#NLlbMQm{8U;;;!fy$i$XlhiAiPJquPD z2eEr+G~J(vBckU{8%U=vjwoCJ7_Lug;efhcbA`VDy-$aq6qTsU-lAvyZqfdcT$@q4 z9^Qgd0qH$_sU|C-P1Vi$vSWVshnGcg*frB^c`esGRrlNAlLKiC(r*}J(z zIAmHK)6Jv388 zNC(AY-a%D9uUl0KtlRQ30q-Ijz_1$>j$EL+NjYg-wH|yy1Z5ns&}@Vcn1xXXtYX2T z>jeeLHhrL2c#FFRKlG{y3u60XtaxYbnbFT0f~5VCztVbjDpXX9Y<*qo&|j$AFo4%u zGbs*>BDMU@yBQAB>rm6@#`L?$km-58wDp5Tsk z(B!v>L{L3@Upz|PPZ`Y0KMIZdL((p(!Ccv80?Smi^(1#sk_oR z820Mvs{C2>AR8+Cv}a4&8P$PNg@zH9K));5<~d@JoWo5lNc|QFvb2L|+Op8Q|VMy>OW=igiE8_Bw2( z>TXfm{+Ub?b-#Wg5`@P~-xR$|7Y5N$&4g(i*TDg->DtyQ-4DbNRtv_pG%UZf&4vw| z2?YIZapuDV;MIyHWT$uUnFvo;w~9d$$*wzWagVDorEwExcs{KSRS~C>8t&lvbUk~w zL+;Xf{_}ByM?Jm)iQhsS?cZ=)Y}R$)_Z$=!KR2h?KLH>J>!Gp5|uS!7KuDD#V8oo1gv4r&h0w`GHi zNF=fGM$$8f=R;z?9$p+hx7^vkz<^sdCaPy}6mVuX>><=k^Zo`}tToFwMlIEDWdlE{ z9~N9gAH}CrcQAvohyfu6Qj6i?BUWh+3djW6D10qLL8GB`XNIr-RV^FSi95B6EyP#v z6Go2(AMX@{MaZODuWYDjqQev@1WDIS09M=lPh?jx@)OY9i~CMa)MKMdy>OZ|sEriX zp?^mNR^pSY6aQ{!u~hDyq9&6DQJrA=bE1kKlZJcnz2S!?m)?V$txce5=b?pGIiyA$ zzHVXF6@p50O%JSxXq@YQT`^=yU@8_%0I?a487gSlr>ihCp=z3ZitW7C%ShPF5LptK@*z?TiyOA%ABkD=>cpzKZZC#q24{EKq7P}9Ej?cuQgz)-Xw7DqMg z554#5gP41XM;qsxgu20WCJ!7Y2t2GwF-Umd;GF8I{ z^ejw^uXbhk#8;lD4R*+_$jK>kB!o#>U0pY@k>1DUkB0A>Q>cR^S|zW;)$Lzk64fbD zEmLHnnOfn~H@rhDV)Q1E2v>nLQR|Kgsgj6d@oegODChB)cIkW*Vv(B;yL(j)-3k5} zc9{0j3#{B2lt2FC;gA0~$c9)EW}j>nUqfr0-R(U{AjtYlY_{1V$QoKCiV&_`rUd#q zV|?27#4HB>Yr5K?ImuN-?gc9Qie@VpjXd`>N4?^9V6ANHp zwPk$P5Cpu`VVP!KOry~keuSz%%8BrV% zVG2165n$q~5D)cY)e%y6Q9nNU7$8F{PuMwO$EaO`Sdw49xWg$Uh-_;oP}DG+42$|V zwsRf=dpf-;X~XHdRtQAfyc^Yc;%h-JI)1c#h%fR%z@C8{N`$pK8 zg2!23mU_d~;VX;4&g;jSf)@@Nk<-5l(M2-8e^LWB$dc9J3NR!t^x6F}YA%311r9_ zE<&)4NQ5Dqs~}Sn(0PD4<%c(y*h|ITxYr4IEgR5@KaN>*8}v#5G;K{)uR;O!anh-x z5IswTdxM@~0V5oI2o7hClULQEDEyLP)^L101<-Dhn3Atd|1=!X?q< zhq_!!kvD3a4@BwiL)p^*>y`zq^<78QtCM=?2l5Q>Zt_xz-2#4Gc9o|{WW682oj5+k4;@?69bpJX@EYRpjd84U`k{k-om++Of(`H)qA)_I zsn!dA^PgpDCE9o&UmX9do9{GJ0SryONxe%h#bj!RWagYqLpEQBPjWCewl(9lm$P`F z+BIOg6eSUbCOR5H3284JH3XzTf~Pi*JOZ(7EEbHN4DI7M2p#BC5V0NAJjM;gVcAZ1Pw4CD&vEMrB5#6jl|%G^1Jj|1AN<-U_lE_BdYQn?^=m5gx;QhOt4T$Tl*1&8jVmXqIteE1IWqX9(9GC12i~I$ND} zjt7*1(1A?r6wkQ0Sr1_1KS$1~S?ISNg+O<9uVF{KGhKZt<58}6eL{D)Zhk6s6c1OH zl^gd$H`|N_AL-a`C&7;D-pe!V6|$Pn@1u^9BH!S)2Wk}n%J~Hcm}wp#h9$#6%;w=5 z06$>*8_Ab$d=RsphTVC1f64?kXYA!*2}WwN9Y-7v59DHMncNp_83inXgN@alx4t=+ z4c<`;3)VCi&$5$K)yFsgRr`B=04u2b?c8&?-GuhHP)@qgfT!tZcHsavI zkRz0LRzu!kEdX$+*2Mj}81Ddo>n$KW@e1mSi1c6= zbx*c6KZvO1j_fl#sKkGr=wCqu>x{0LD7faldTt*y*bn~k177{{BFc*}9~kfv!B^GE z&)#!@W;!*Mx<(&|i`A6UP2b1}wd!85qOG+@Y&$9sqfg>to6x%6DnV$%uF933fN}l) zJyPPxShwpHKHYSC#|BX0!DR|Nsjpz2&U38ft~z7~aJd;5L(B6!5Lm?*yTeg^_wmCQ z!-h8lx?vu2avUPo^qRl0o4W^OKJDf?3i*2n4^%7MtRMGg6!3qcg-&M|>!<>M_y?jewcB9w_|T$#y< z+fZ;9d{RkHOeWYVgQHV$V%$+ledfF#)DhzQ3~#)H*1tHeQ(Y{(f)=2JUOsiGz)oqGX^!8XNG){YY-K2JGk~zZii4%UEKy0shT_29 zzz#=^CmZlFCIr=mwNC9>SGR#ddXje2ftOonUNGRMlmPJUsh;4v5ND5$XlpF+>)+39b})FUKNcaY1^~ zqe{0a2Nct{Z}_YKX?Xl=Z{ZO6^pX2^pssr09DGQ09(1gcBK7kS71cM&!UJ!eZa#Nm zlHaKqR>~#i{l!G(C*glz@g=Ti9SlRyFTxF($Qmob5DDGblf+D#R${P+$%r1X8;gUR zIYw6IdXSOWXOR=3^@wB|K2;Hk`rme67MMs#BaLDUSL;B1?L>}UK}aH#P`3bXSd>*Q zMZJJXo2^P^ds{X;zH>`X9baBTrM@FWtb<6>eYdj_EK_9( zeRoSZ73~)Ckb2=xJ74?S@U^d5&|wFteUB{STI#DtOw~xLeoL5XLd06|k2!n zj?3bEK5YAI&*dD@#=_B1v?7*i2EX z;_A8Ee=r<*ORMe*5SNVs0;y)xCv>LocMBw4dNYpcVGPX&E0fW_jiBh9nqkmQTv5q0 z!7=H9UL*%}H>YS4_tQ>hH?>~nh)NToIayl$Bre?8^-o6t&rEfS^uYNMQ&J9;@V(hq zWAzw?=hIka#u~HvxpbSz>sFn?T31LFI$-10*&k+qFEU9B8H>L6Lu&N|3(DECQwUWK z(3+*Z-O4gReAh|+>aT`>{I9%065|A&tchp$@1=(W)hDBTe&2g;;OaS-(SXKx{cqU% zS!qoE6Ye)xA|iOU#(Z=mSmnGg4(n1!i!dyjGwc`OQSlqOwPZ)T5CiI!%_9_1KJ(o` z@bp&=T!H*;SS=^?LRqXjUV&IgeP;#14A7aZ;-V%sE3ALxwDzRaD$!=9q0KR_lGoF^ zUM|8$TzM`21UW<5V}1lRCTo)(0k)r`xS`4Vi~@i1A$`!&u2O3}sOagJLuB7M!kNBN z)F~=Yj`-&At>3LXksEP)8X9eP*g32(B5TMynVK-bFmjFK`TFql_#<%RWMK^=;4X_W z#fVWJQ~h~s9dh;|pxU(Z1Hu`2(+sAPU6L9Ob(&#`7PY9QgGGiTeei$**V_-b;AwrD zwbE?B#>pFsFB_-%0g#$1!2OG+g0jKICiX&iu+e5txvEO1BUipnZnk!U=y$5JCiT?%ojx7P8$heo#(4TWnzOyNcmAe{=ZF-z<3mcgCzrP`Rq4 zFqM9Z{fd@kg=<+#?45}J;&v2Q5S&=VIjTWF`P||Hl(ztTDZCU=M8ehXUN94inhKxK zmOik?NW{z7n?FHK8rxrlwT>5#(C0(khUxqFm;B1G31vwhI7lnPRtt<7FP=2jJVCnF z`mv$mqeUOrFdqWNwz4WRq*K6W9$4r7>TuYo$Eid)Z;3dh^It++$i1sC>c#c%y~APK z8&khnc*DN{n#_F%vKG%otzhbq8~EeUfu_oO#QHSDxT`$6Vh%lDQ_wMQNKR10) zHlhobIc@SIL)Z|FVGhq%ZPV&p(P@}{{)%vzn*SNP58qN1W*H%xVx>i_bWc5+9%Ay> z_W3*2u()rN_|KY$Aqujy&b<7KcQ<$>1Yq4h|96J(`tOBOUn8VBpibPRQdQ4~eILED zzL-AV7!0pgemUa)msGNg#-v;<2=6B(peOIvB0!%Fi-<6*{w3Q+J?E6)#NUB@+-8xu zeERFtwQ_gKs5$76fL75=WMsW6>H>4HyVN$-Q0To@E(a}E^0%3 zPP!+`7B9Q2BY*IQ71U`#ZwC2|RSN%ySH{RNie4h-rM54;TeNMuzFPt@mOSdGOmJvz z!7~L8Z1I!$%MQRavL=0`Zp4NpR~<$@p3klCu@&KF8!0+4p^e-rk7Wx2T_?{)WM9=6 z@NLJ<{nnD!Nxqko9NjGfg*od!0?(l& zKL*ItO0`{A))nmI{9{JnM`T{Z9gy8U$by(%bji{hA>Cfp;`Uw-FTHNslrAE$+~R71 zLj`w3W8|zs&=?w=3KJ&E2xZbq9PQ|2yZ8&s&aWeQlBKCwYx2!&X>=L*+{D$y6Q*XG z6{rYl0}GA2$$IJOK3nIlej}FpO-~db3`hP8pyFlJIs>i3qGXzV#*k_9dDB5ik_MBg zghlhCkIDWQ9W}9B5_~KuM#6PLiIIWFYA!h9?CTscI9jiv{w#^>5KQW_NmCN9e$Fof zYox#tPY7eON~Yk$Ixgh6n6;ZnpRS?USJ}_{FpLkMxdnv0=h`SFY~)WHkimIi)nqSxR6*cUAng9#wkiB*AsLsVaez+WZ98Zbiv5&5B zk989S_Q&2Ook77jbP{O@@pR2V1(9qCvsqUml$Q1BRP!95WLo71J#4e9wTeMl*HLr` zv+32^qnzO`1%CH;hu`g9wV*h}|B5R>>h;sbfE_}w6I6yh+he<0QG-aGSJ9aoNz4M< zv3EOk$+_xBS#MHQ-OFLdbqK=Vn@<@&37e#)fW;IsX<_Ygk25qt z;k%Sf4`B+t^m*|ZB`-GS-sxkVEgnCp-q3=Ix${`Aaa2z=doMM~8E+1&<;$h4XXg+I zI<+F2kB7+~HDlS?_3KljmY}}bDrgaNLo>Z6y`EmCX2^fVZi(#FQCVdA0ScJGV?jb{ zdepyyKh_#SI3-5*ej2Un&}J0kWV}GPtqG|`4$4VKw6|r0>ix=|HQ{HTIX=CZylE3d zisb~oOAH_=fO~g5CJJupLkk61Ydp;Qx`tO?#)dYDu0LR}*7^_m>l~p!ZdXMh7fDiHj;_d`>sN@-dw!nGax~G6lHgK?JYI< z80L4V%1sFEnmTk#Wr@^g7_D!m3A(y{%6kN+zX(0^5^D5yj=ok)WX1kf{^v(9WtuE% zk4`E66V4XXx_cMI>vO2bo3N`ji7qJW@`wx_}Yv~}3ku2!V!FMu~KJBa8}RM8A`kZ`qzm z;X^SbV%?9NV!p}X^foATzOYN*nE`x@1lh_r@$jb`g}=IuqmK^ctoX z{Pnw@OJat?WcN;7s|aoZlDOW5E~6`szs$X@xmGWLn{y@gaUh34>dh~hOPo%mO50p9 z;A+=lWC1B9^AHhSKsZfK*AZaqTr@`LcZOq>WWEXZB!@S_*rDa;YFW(jnAKaVk-)Ol zduF06IDTqT=O0kiV zHF@nSJ#i8A29y|oFw$bm@vL!%W<7?j$;YQrNrX6r55wd+z(FEf=S7XiTsSYXdlwN~ z*FlpaHnBAu9FGlCEwXD0v4A}+G9m;|DYU=zIm=R8aXEFqkXM9syD71EtyEHHJz+Hu z4*gl65tADqmEu=x*AIdn>}SfgCrDD?Dp{4acM~x>iKA~Q({y0b(TeE@uPa=kR=e-| z9Rnd%WGF--iRxy~7nOl+SYFkkAn4d&(Gmyr6YrV88+`3rm$ffdzydru1ASO@irppHMGQ8^ee1X=rfuBiH(euux$yb`+6N;$m$EY6 zYp;M+sJi(lt6m@f>FU3s{^18J(TMW!MFZWt>scYp&D<`kgD{-Gygl4avB2vWDycz& zqwWPhzVIQ)~gzq>GOWpEq z&t2nU2_@bHbbJ-h(DVRtgfZUC7***L9INfE%(F?neUsD6nlF{`e!sTI;3b&9ND#3lx>e@zG|_^BdGS@R8xta)%c_Q@pFyDZLQ+V$N?Tg$J+?yFn4-aeN&DU< zWFK8UVlrzJn4r*^pI_R5O9&fgb2!>E2|dq(2SF2%vi|EP)vA%{#tFlaj?y@cyU>oH z_@)t}iGWE67u+`H4An_Le)!t(Q!Az$;W^l-|GYK4@G0B@Sz#(91XFYdS<3MdznO+c zbAC1GuVe^qxw`2*wj}$9`V-;2=mSBPpEH84wg(C0x3~tYEqNZGt0OcC+OOJ7d;uf9C$6!I*jyCY-wP zt%dZE#Bh2Y?0kWTY;zkrPOOk71FfPywPcc-sPjBmlq5kw|E3vh7)#!kdd!9hS6JoK zS^ywvUfNZ+n%q-!QH!u7h39pkw1O^Ylcf^4w9Dk%+JKvCW#;yG_{{+p6E zD=@zVH3inTo{7G{i6RF^DQ_H^KDf-$!%eNMh77@w6S+_psaAwo4m_K3YM+W9lkGJl*2{4rOr`4lfZZ z)XAbd7j$8U^lBL}WY$DIKN^k#u!5#o@2mQpAlXeHx=Jpmo1VHQoY#8p511UF_~|G zB=Bu|@s+wU_hS_~`>+rosEc_j7}mT?*1j}dF(a5q6u-PtSAb~ekJtbL`$bi zJH*TMrHF?Z3{;o0NAkUH8STY%`}#?RQBT4`K=>{q5weH11gXV!6SF|XoR&~U19M+@ zylpKz3Kis3IrLU!HXBiZHW%33Qe}_u{CM|u3azROC0Rf7E@*8Ly%1JWc)MMKAc(ebW;?-Pvi<;mQm%Y)am!>)RFW5kN$5xzjmVU8_7F?M9GH7nGQ{6ZLn z1h$fm8)JKZ%K^q(=-e@Yo(hp7Mm#$^LzoYf`>>m6WlS%3Y@Z;+T|R`t=gkuGqA^je zUK%b_g4<++y&@ckbf=D&!}}P>`)t@R#=?)!`g@}7gjOYFaSiL<@$Qzs_nyzQ0i-y0 z^t`%Iw}sTLoby?f7yzb56LsaF^h~eW6|8&TRB|leSxs-tg8$a=U;3|`(ETMe5dIat zVl%Rgb3V&@)g4T=IzbYBSEm04aZcdf1t#R8TZxY!=oHB7?xVI0a3eBQ;Rjz5G7%E< z&nO`bhde#ai)j#v`VdV%6jck;4>L;+wZEmgE^LJh&)|nLvq=24hWo7{JjEXMaH&9X znn^?9hfgh;r|M{ekT=C+aWCgLq1%PXzKf1b&8C;C@l)_!kpn}&rki0J%p3)8;US`7 zxd7HzYi*tQ$l2G=1k;*E^V%`We;Gy^H|Syk;s*jbnV*}|CZ37u5>bdhL>&|{y%_@J z)k^vBaf>`?SGVQ@Xv)WJ4D^#J!c`U{wX&SP@^*$vO+o>@i)DNcZTYL84|5CNRp(_W zqSgu3`p*PCrHJ@*@h9p04s3*G0c@K^I;6J_acm**SH5mx(vMH4-ZpuT05eGC+;v_R z%su|d*gKSXtpfoytABGB9ei-6gYxrVG6J^_N^U5p`rz#eYstm4JyX|eUS~MJC!!4y z_EEtzfsODVCnp{*(_A7N9@XB-tf@rM4^~SR7SPFdIlQTZ!2uNSOnMP-vpNXOnk%I) zwOVTkcrT*O@-IrtQ%016W@oX`^|&%7lgE ziwcjp9q)uhAWO9Q-_jQwtMK<%|IOoH@&EtwKlZ=Ff9wzS``R{iLIS%;P6@BINs%o6 z_P7p3jyq)ALzxF+?9&I67_>B}R$cCDNsHEZt{&fFPWR!cwrZC<4RSY2kvEb1!a4eA zm)F>KYW?M9PZ;>)4}8JgJx~*~z$c@D!W8P`RVIbMs1Ex z2KO{xfQP1K@nqPoo%8%`r-}_l5*i*2EEPd4U~&zyR5Y+8Ug+}%dC4w(e7x^2rb{mc za#s5>*r{Ht2IAdiq|Rj)>Rg=%;dQ~L>wQ#Lb)rY$@V>0391CUO1TSd=t!C6UT{4Xx zJXa^YSnU%IFht0qSkVt6y#r^|vJZG_mULZ=jmc`6Nd5O_4VKW3(g(APCDU0MIwiYb zUp*w2TpwhB5uJ7WpY~%o?|Sc$eZ_}q6=$*(VjTa1zM1u~)@<^&A3PHBE~G&YX1Sm1a^z{(@`Utw<7Vq1ms4)ii+K(KQ=}4!`6E_K zou`Fq!&!|cr7wV5W`#bcvZKA0L%loJb|^q`PlHP%GQmC$^%ki@%BAxG`NkXXuSmQGpn@>2xq}XD; z6{i62$8SZ{%;tfF*qgr-lGCMFidXQF`o1b)>^;pHXMtk&Dk)NwU$gAlkI8V>uWpMC zcKkp19V}KK)^j8#Ej;`lWJwoae50?tP1a^s2*gAC~$jS8KG>0S}$bi3lSSiG)+{a>+9Tk)KqW7C0X(6F5XCp!S`>OU0#L5=b z9zkj>W>s?)%^hJT_^ZNmcL(eJ5T?$P;T^`McYU)8h6BT_rBW$*9ol^R*SjCQVvtKh zPFm2}SFhX)SAXH{O>PDKzr}K>BOnCaB5uA$8lNnMfbA6X_~Fl}=P%>?-d+I;9mjx@ z6K_K%ha^0J#CijuVznBL+$yFMtyzi$h^mhGj`~IG6+&#W8|)g!^A~l%fYrJycr@(P zVX<|y<9fd}!lhkzLwm%P>RBwlGgHgD(<N=DqoD#BgQyG(7YFOWM0VN0wb@e*PQAF%R>0racO=DY4l%HrX1H3IK(wLN{&%P+e+W zMw7T_7k~tj$U>ns<8VlRw&&4)tB_1lvLa@Dku?{M_0U!Nzx@05!P?)Ktw(nR83iEo zoW0jxdtJY^7No09iGxP`VLhVtNxX`qx+LvoL?CdEIiS4$Y4z-~NL5eyAz;QhK8@qG z_xg(ZXf2HwCtCaZb!yZRxa5i(LRk@|cmUe)MAHwbx+>!m)WG2&EdXcxQ`)Qyra00y z1hk-EbudzTlYSY1yt^&U%JvY5Gy2C(U)Mc-<`M^VqNj*!iW!_(S73r#^8q8kg=axZ z8({pfn!W%MXT~cXXoeq-N3E5+76@`;2D_SyW} z-s_ZS5!LaNiBNL*MPd|eu!<1)!yPI0F!Z?~(=^SF8xBs&+?Rh1fBqg(c^->GtU{%J zaj>%JQskNweyiHs6#>7n;ksJPw22Id4!c5wLg|}1<_^S%{jQI{ks!v#j zz#-|MG9Wog&aJ_rHK88Kx(kteNnDXwepoaF0l597v`zV^!+lr=l}M6U(~(Be1h!;0pMXv1TbuEix&yKc%0Vd+|p zplyf(2S5~72u}gFf;|58_P~(ujXpkuh!&))b&v4H+82C1Rm11osRO1j7Z<8k`%bGI zx*>D2mIb(qS`*8xXDN2p(7NI9uFb?=`c%EY%}@Ef?2pGHkeQjgm{zr#P5gEJ70q&q zNmxI&31caogz5y-Prf}~_2KYwE)SpzcfTklAQZyEJ+V-iJx<6pPi&*GMKnd@yZdcy zIg3#Jm&9)UdW8RNNj;6c@=~x4T0esND@ydsV1JAQ+&3qB58!NHBx3$l1QiMIx$(g& zSTi_Vy%RXyK+`aKggLJ9=w6B7zsC#)^W$YvTFVIMnKOu0EQw~Wx4->&!?%B5-`$0k znHd!GN_B#N`R;G(1Ml9@CFRV1J1R`9{NFw}eqwZ$EkGScvcop+zNeC+y`t96W?~^x z1Fh&aZiIOCw5OR>&w^wK`re*18Jg4ey7pxVbPw&_oA-y`GL6UvFD|h#eX({Cz%5sB;ruC zNreW2@gqp6;dx#JqH~vfE}+|Yx=bu0JGn21!yDhmJ*Z;Z6g~*e;!dq&kKODYa431;Ch{Ugs4NF*Q6*JEPFUZi%K6*AI@i13rc$6c^ znrRGHZxBE9=E94kt>pQ!3S=ko%h_R@#4IRA$ugSuT&PkvdN^_~R4n|RR*wyueG*O8 zkNFHvK1&GRS_7OCI%FiVgWIatyM(&WcVzIGy=NpzQD_;Beo|n;q8cPpXzNg))dvU)s}e1vSZ&enGz<3 zQKRm#(66M~_ON5>t_V;i5JnvSt(qwjW$DJPy33ll+&{%-GQCX%cj^8MIYMNpm{EZ& z7|Ns(Iu-C_TJUn-2vm6|y9>DcwkIzHJ|Sk)>8%1o$!Kqd2D0Q_)N#GeSVuH;Q&%Xc zY>&B%Eeh<@)ZLVL#$38IdkrplP)_ z48E}c2+c9Z_%_y|AF&E(&F1stN;ZSE({@h`7HEN;#v^x2*-+GCMk@D_ggtGmrUJiP zT>aiZ8v5LrBk(w=hvF?&doOXX#$5q+HmZbpvZ}R<= z@`H&>nN6Hsj%YXPEilZfD zJ9uCmX4Vs+&pApRczqT}-$nMXHU6spLVu79yQ605H{cb~3U4b`#K9^NQ%6nMl-m=y zqixj$63-~we)LiHMj?|*Ss0LqdinbHfqT~ojwDoHofE>9xOH~@BCLNEPy(7fD=;N5 z#7oq!VXsp^1p}G8XfZa9g^PTIYGzpWEsWufgOrQ5Jzp^46yZmp)^8dstql6)@IE$S zuo>ruk@NH}3|c3R#GKPw9fl7}QJ66Vt6Az!~9l~#(7=0FM!D=F9vQN&lyAyV% z7T+c&*8)+mL`nm!sXjqc>2-1hfWzW{g5??%qcD#Cf7c2?o37{ZH^G7>{i(JCid5Ji zRr98v?vNjLLV`z<)WKV`ZzT3E1%*wAlChhRbobr_3|NAI>a9lw2nm; z3(6(#%ra9tXQ|W10e2nRnVXQ^G?ywewx+B<{;W=c7%#|#dbHB@-3is&2IE?v1d zq;?EO0gIiuC|N%Ii_2rAfXGYyq?!*l%$^_-@J z$=|?2V}OnGFmzWZ4i3BGW!7?}wejtW_R28$0pGL=qv8&l#?+}o1i%^oAdHA5Z69}R zr@)&moKFR};aet6O}7ucG64swS0g{OGSF5u%Mm1Vj-oz?h%;%fT0V>vY9COwAfASC z0O|}A0NI5QS6hJENL2`$I2eKM1rI%6gjc!_r=UBE3E^{M<)kxWKXkHVMO1b9l)jiz z7k(A-;{(N0DW@sgsw#GOzeK`&zxCem!}>PB;y?fP-w$tpX(G1q-2?6JYY&()cB3@Wy%rau}vAnoXH9Sbve`b7>4N8S=;P5=*wGZnZd=$u7Ruy&l-xPenbi| z{rSL;l7ea0mzOaNEpk-MtfAV4oK$0qKF{SEEbT`%H)xq-C zk>e-zC<>S80O@5RYH*OTMf=lPg&(F&AkEhTNh53owDc~NRCdx>2|X@G(Tk_whJ4n{ z)c_?StpMZIZTr{5zc%&o#xp%YFbefuYb1~plCY&OaTe;YKpne1sES)owEjfl)z`K1 zeOhM7O{l+-YwRyTKKIdQH#{2C!%JHe#}tF2&ICv@H>y)xB5S2bdbDws{ra1F(2?cj zs3?D8nWz?gupj%Ymy@oSaPT7zFdcyB)t;JcBPLs?jsDyozV^pkLN$HbwVN@z*85~( zdqgb+A+Jw9^w%fH@->4LEnj73I>kppUmNlQ1_*V}6+pJRa+CH~f;U7Ow zUA-m3yS@k2Mi8TBwMM2fXzABP@*W>XixeWi>w7>>#da93)5=Z$;n)hgk+f}fN}shpU2iJ z9BrBJR%$r~t4O;B}0)Mv+lWTr7YB=(3fd*f8^p8my{W{aaS_|x%_G-Q?Tw=#SBUi z{g-#YsvpPseBmGheCtk#EZU3koF7nIowBXVeuwUlBH_nA2t~e>`|?_35@So2z&${m z2do(TbYLERh+u|8Ou7aq6xZrUU4Tlh0L))c12vl69N-D;2T3;{_Z^hNCq33@eyvho*NIzLTce0tEW)GfH?$*=>w?u)gca~BU>XE3k+n0TXTBTUU&U4k ztEL~Qa-*y(*QXFH9q)i3jv`y=Gn^7!H;upIF40-syvqa19j+VL6am88%YQEI9_A9! z=vEe&0T|SAoE9ybplkg=6L>w@5q#Ayk`P zLjg_0+-P^?;VW;F#e-IA^<;2 z!OHS;)x?zGxmUA#azRpX5e@%{jwm1#CrMyr=%^iH4m5;6>hha#O+N(L-6w(h6`WFK zU`r4n(v>~js(GI*((7A8-5uVCyRY2dC&f~G>%+<;f4Ftm&Z%~-WPwa&wC-8KQe#I= zq&Yx79WDJKD2u&zWX^QZ5^~cUC`BWTd{jkxu&_w=oOgGj^2AG9F}^P zh!4y`+>U4_1@2X%i!VnUYd*Y)Fv2Q3qX-z#rPs59w>t?BZcllSi;TxTZl~Ari&R`c z8V>w`9rbJL0i_nQ^%98+e&!3aJv`go^pA{L+*_~DipgCj1fB2_x(Y#IT)^SnpCYO{ z1J%E=B@zNG(`(5SRunINe7Lu0{ZZ^a` z^5?8e+5-;RY`@#2a9kRg0GqZK;Hr1fr+$@mXgFmTiefJnP_&p4dooj;<43u5$Opsw z9~6O5HxV7!8`k%W12z;=d1axdP-V;e`&Wu-pb>L25h2!&>GVlrZP1>?xKFhnLX03O z{25I+Cu`ViX7`8acY&$fM!>{PHS6?=jNaguJ z8m7tKS5uQW-KhP7o6r>p;AV20rITrl;|(34mM6e$>NOHJ*v%%NoN~>|2_lf9R5CT( zOhVtFl`ZM;Kj@G<_PJQHU&i^%7Dxb%mgdjV3!Xc+94!9yN8a?qfaNc+w8`SR;XW$^zIG7#zX%kjo_cSTxJf9QGlpBE5(VEPtEKUj zT&JbP9;yfQDQxd83FCAdU|9C=R-t(CoR2@eiQK|W_Ah8WdbiYj09_H_y%pHEh&59r zq^{H|VVR=EKej_DlaBK)f6Y7saB&jKd&G<@ty}Msd)a3V>-tF}7d_Eb7)AAy%*Sbf zMGfAu{F8x=2!!dfl0-3~b7<)UZ-`eUi&u(n^IW?M1k|h5j5f0i_$o$&poM%3S_K${ z?@zuajR!`dp41{OFKYMFIB*A-a6dF~ z$jrhP?5vAgM@;HHKZ8vPKAkvz*#DSU( zpCg@kcfDHIy2Q=?5Ks+DFfiE#P$=eOh#|j9wT9}5<|3F57^N3IhDEUhKqnKDC#g$|7?i<+L4N1Q!F6DZ!LqM^xCbEbHOjJ6h_(a}+@BBpB7qhf$&F|jqAc4G1JPoqF$*dfA2Y(}Pbvbe( zFD3mtlL!r8sL&zDaO_j`Zx&}rXtHdC_uDB&er3;T4&spDY-JN zt@)>Q=9(1qX=)LdHOx7T{bvVM|C#bkWRP4x{%3`nqloj{zj)jJzkxjJhY~sj)3^%p z%#&B=_&?LG%G-z=j|Oi}VW6_xQD4JACfL@3Q52Wdz}F79hmGwgzJB|dSA>rJg-bj+ zh10@F$IWe;$9vyW@9fb0=rm_VC~4`<6d$)dnqNmc<%pzCZ5SCLq2dPv!k|gFSED2l zcvzn|*XTtz%MW;M35ySg@7F*7&G2u02n0_KX{Z+0V|&yR~12oZl?$}?KobN}&x<t&(hB3iy$*gj{H(rxs;2vg$WT+}G54@e_sOv4 zjU2#V9jGTS;HKk1cAgE}k7~icHoX6v^?Z)%Mr}&EDgM>4X0ojeEA?KD`pD3hj?5MQ z3fBJT`kpYpfm9>lJe|v^BVOdiZUg@bSq9J+D$Tu)_lEa^Z(%2mC$Jm2tqMFLjnrk% zS7hguzTnHjzUQ3Qtv>B{gHga* zLgTt34Nh=ZGN+!7fH{x&$P*DNb|eWJ;s?R7;N6QT;$T*&dZ=1hAcO!+*Yh>(Vb`bl z&_pqsF@RaX=ZRF?L^6-|WEM#$HP1VEhq%&bk{6ue8?gJG*)Dx^CKu6{(B!Z0B?rGmN0G7IZvGTJAH zMXn|O;s>eA`diDZn$CCaviRpOIro5Ie>nWuub^XDMBVq0PTU&be-!uaZB}8e3=R#c z$Di%zW*^c2)$eWbwobb??4mCjA?LzijL)lZ>PB0Bxliui=#-^eQI+{gFR1@&2@!~w zO5?=1Iui_529FoNDbFkKAg$ajF3y}-PJKp&X{y?AD(8;AjZ4l95vI@0jZa(u1?Ks6 zZhXMp1U>JmXivF8dnHt;pV8ps$c_w6Hcj4Hl0d8~Z!Fd@2$41~PI?S%AE(DB%*72p zyF14bp3fxmd{8vKS;|DLK)jyGg(rJ6H7w?_$BlQSdOoJxnrKak5aO|5Iw#}yx=k(& znSP=QR5?ms#y;Lu>atZ#p1Lq-S^0W1hG77{+z|uJGP11>e?I8k;Bt|z!=n*+9^Wf>Q z<~NCj=&)%<1pJJR&Hsgx+iZp8C3RF(5J)ABuOss2;{-Xu6|{GUr*nhX!l}=?A<{xZ z=hlR+L*U7cS~v}%xtY_bv|bJr1n-97)RPrzyXRM9ge|fakD}kVJ&W^HAS!=&3+LnZ z-0KwhKnulZ?q~y`uv}Zy^$B+nZzzNtj$Xfj>PKr%Jz==aVR$xw`|i*1Z1{h9HvI=5 z1%WV4eelbkDtnH}<|XebiPS@Gr%nI@ICv2Z1%zNSzwAjFxSObV=TyBULt9fK}x3#z-DiJRAHPQx+627J`W=p5GShb(77=x7Ul0*<( z>*U&RqfA|PmuV|&=T$MJ!sDNKC2ZxcNYW>ni`Ythq@bp?LEpEim1!xSxjHAipip+BPx?Emoycv-n z?`U#yhPa2XE`v+v%p_1pi4|19kx6T2P{G6PLD2^*kItAH<- zIuzf|=p{0Q8l~&=abM#YpLKl_k_Dtzo!$@mZ_xi0Q~lxaJ-?n<2HkG`v#jl`<>b5+ z&8nM5A&Mu^*S#Ex2_%=Svp37kmaR_*avx$uNDxR3z@=a^ntpNA<{hfd+F6A1 z#l>u;x^@^w48ULbTk0E&?|<~dDjUfmG$6OY`=SCW^OZn=&@GN%%hK}$s43D+SAtl@ zS;~?|fVL$eprs7wVr13UTy<7_`=`UVzp6heWO&jY0;ZU8ghwGO)&&$9M^KrUNLnMj zZbJLF368unBvL!B$U?2ESRfjRqa}!U^ti=*2KuElp{$&|KYn% zN0Ai;Y_g&*<{hESHVFIS!qnfHbqx_ag|_(N=)Cy!r^%<+GU;Ng4JV)z@}cOA#{vP< zXtgy1zv&F#RKpAPRPD*C4TodyRUa&iBP|JBw%+r=1NCHb8c9wWo2fH6&-_YvfA zqsSm*7$A>)U9jjR+fzg-ceu#HEtpO+5S}chy0SJrEg`a}sTO*Q*qgR3M?gNOvU99H1W>ctrDu!cpixxZS^_ zu@%V|(q|!u9tr9i&lDq=qsX^{)|hu$thUqKLhp~T%BLTOng?16QeQrqA3|PKrB$61 zzyKaa)QcgJ^G;eNy-JqD^>j@`rWD;N(EF%}!FEyG$)y10t^!~)W=2skvK_scW37+q zytO&*{KH&4c2&u{K}8x^kAmGM~cQ=;qdOw?+%-mYl;)%3AX%5onpfTcH0=# z`kqNbJeRFAJ+!B&)(Nqz0?a2uM6J~X`FRIO(>mh~icq}K7IN)>1KJUmB1FJ2y6xut z`H31Ag!{WgD$K`@DUkThwdswTkN`dqX+A&w{#UNzU3mEOuT0el}e6TnMZAPBB57+@N9!KWnbI&q)t1 z(NZzeqmKyJv~mR>7S~aM5^i=e3gG68i&7|G5Bol%ua=CN@lDXCYjK(BY{ZLE7Mp@- z(-&*Wt(TBNpId>*<4|Q|f7%E9H+=Ac`QQ+Szl%tW2B!Ma7y%wSMXG$btPLp;9wjZ946259?<2>f+$7?w&ss z1AFzGvK##)m-iybMgg27fJ|tophbR_pQtcE1BllmlqH*X8GIw8AfWR4E7AnrSUiR* ze_5|qM^Mk#QkmI*9G>#VCf3TnIQ*iaAoXl#{M(qoV@Z0gK9k3osneueSPKIufY zaXgg@sC8j-!yVIba*5!{s_}szgHTbst$HZ2)Zym=|0H01vs~aA08j*)8mQEF$tecMabr-CLU+BQhWPiNq%~N!%-d z3(@+1A|=wVeSgGrqV;BBFNlv)ZgPF+1c6S4jM%~7&*zivB%ivFmf?3rR9XJP3G z9?*QDm=S_lI9}fF`S8daZYaCfRlutwDIxX_CVMJtH-b3o4R9AZ3D8Dz*lb;>+@Lo% zA~G=aS+0~_`t2OLK$wu*zw1!INHTDRF)f|wWHfa&@}NJXPDYM3j@}#{Txl1+qHo zHwk}$j?DE}f)_~KY&!Fm+Pp9tv)E7>adbd^0gp}y*d}*kJxn0ZK8D3&EQoRFB(cyk z=}s~n4uKW9Dg_DvvJ?^F2Bf0dlO7p(T6RL@wNdyf;MaFTumWJz)|qYs1Tu#i%JUT)Pxb z@+ozVQqrvrTXhg@Z_#xXlgQL-uZMf^0>?6%G?f~=%Q34*(2XbiEj(|Js6S$r4U^)k zIfH+a{F86`$DnBhJ14KO2uvhg`yYR5nfQqNKr%0;eyTiAU2XgoQ1m&=LWZ;glHHjU z#C@keE(-+nu9gNspDBs;6AfM{>#2`QeJ_Qes((Xh#uDT`s(os&V*Q+LEow#1uB-s_GFP}EwOE#~92)mv?Wyni$_)j9eD$g$ zzXcuW7jQVc523H5^hr+09Hpku$;EyB{xNWZAZ+-JTY%|`32+@cWZ(%I5A+p!-> zSn4tmS-9hz#ZuOq;$1cA=nI}*T#J*t^@GJ}BX3P+7z?YV$m#Uae@(!3*0X?EABW^u z%*bS*ID|}sGAAR>OBY@83bI+;z--(hV!Hv5o~bJ# zt+k5skAOyadr$Bfbczw}dH3cIhu^F9M-!x3SW8ZVv`Cz?DFOCWt<*x{Cj}RwRqKh zbS{&L34L;;Eq-ga6x4=6se^i&dOwY$4$O6n>oaJDapq0o>*B8Z1WsPb2@{m1=mOUf zK4Mk=5Ejrw&vt)E)+8)muWx9@_Ne%XRQ^d`uTT0;b@42O@g$7u0*}OU>XR`1ML4*9 za&s6j6rTB2&F&>M^TBv;Z3WR6z)TyT?jHW(c~966$QB$k6MP ztj2}5nE{@kydv@@-D;-5347%wac4r+tbm+0(|^g|id191`OoX1MAWeLaW_<1WvY|s zK+zLxtuIW@&WEts$f7_SC|%Eg_hy%%{DEJ7fExDm#+(k5l&q@zylTkirzl#}@o^*u ziK|+!V~oU=Khy+~;OP}%#whuyB`akZYO1L%OHTYPwa+iXMM|!>nnw5Kn3S)OVy_nh zsCak8XS*@SgYl=#Q-TF@mFgFaOjknctdC}#&M?=Hdprqe4M3H(--)vJqa&5A>8?)B zW~~uh<=Key^YU@k3K4Lkvw(mUsYCz_wF+! zBy6|uKwX+K@Fj=HJFYU<3k?<>(`cndWvirw@9EgS&)li#58Yo&1S+g~j0qlE zi~tf-$q{=>q!o`Af#4afk*1uzkrjZ#`c~YV)_if&*;T`NbFex!Q!mo4rhrO1|0bLn zoQ4au>~iYnnL3FUC$8x(26bAr&+MXc75cjO)|&Wul9Yfc0B&yyt^I zK?0G>z*O|_W&rsK8^E7tKS7PhWPhH;OLCago*^J^kr5lrTBUGI%KZ}>ZcOf8bWMjU z*D8Z~$*()@Ld7MA5l4@C8G-K`*)$1Jqj(8ue+U|gAPky8T^z$voErkVSNp(zS3Ufy zBJdWj?`GV_{;+Q|ZlfT3mY(c|^XpqH=s*#jUKMq9ROK5{4DM@h`lR9=>>Z!;9Go!W z?l8?aCrF;Cqzh4eLE%m+OU)0x7vVS(b``;|fdlU9$p+(TkqxreaUC>n%8A2`(~)(wV6kCUx6w0bmj`pL8hG%xrW{3 zC+WBpBL!Zh)M^)DKbRER%TzRj);uS=Sc7J^Xgj=iQq>7`|8Qg)<1O^w_H| zS-9b&B||E!sErbS7*kun5!?q^u*8*gO5mHd;g3E}a1j1yBo_2A2|GXSF{Lk*LBFcs z&?TBggR<8}av72s{oUGAT?Y|klcphpr$@fiTs2thNX2j_!oFwg;FgA6y{~pRz}SjQ z_Y_uNIbKu7r4SqMolN{D2{mY8cP!-sz^wIE>I+TOaT&oQO|6;=z zJ+8I$Xjre0Hey%`{`3dKPk&I~1NT2&uF+T_(*-m7%h%lWNL$jJP4cC|7;0f_n=X}X zbAZF8SAqh~pje@GjGT1Enp31;p31+DxC8q^`0=t*iw&h7rvEPppJyS_0hCVjAKUqx z+NrWPY~-XirbVIm{AVF@5|O8hoMIUA<6~uI9V0;#bq{B4#VtY%|3CG^p2C^+Cd3f} zAbhYk&xkI=X#qZ0uW`7(p1|>D9n9tl&23SZMF|p?w5hiUxmFlhQKx!oEA-8dHx%Y3 z5`|zb;hO}vp>D6QSyMfM4eL0}k(fb*>NTD;0v=TBAf6oVmU>H++8v4oz1(H-n6e;E z7WAl=%3l4k;sYoBI+vYm>MNy#*WGU!u|r|{Ywb$TU`iB!ZY=edQ;e|w0y65l4w)v4 zV@}Y&SW{y7ve&w>m+4YLQ)}Vv26WdLQ=K7X7(T8?J-|{^32^gV-lLXOb+)Y!NC&NA z1Ce9t&XsU#4n3-bZ|FGP7BPAqy&+UYb6#6T2feE00P-W5O486*a&19@2d#kKB^}|!=8jgwST7yCZX(|EC z7HrCtBvt_PUXd!Vuc*7+tXH~%`B-$PT=15@6k+FBziCuMH`N`?8z8PGSH{~U>ZqQ( z@|3&`4BB>uGp$8)v$SH)cn;;fkr_Y)D8Phq=-2I$*I`}7N63I~2y%VCgI*I`1%j`# zeVn|Pi=u0&8K`Co9Sl2|0)`?YiOPh7zN#IHFzw0Ajc$ z>yH{cHFW? zP=aHw$J15d(aOPkkV~p=Qjg!Ixla9Cbv>rt3QZa77-!eG4t@{a0#%7C;TZ>oCO1+6 zPfPSRYjA6bLM%(Ut>M78jQHar)%E_ySdb8jQB~M()dI<7UZjaKHi%}fL@@%R%ZmmL zK=SFQ^<(2O$At0i6sZnbD44oZ$~M>JMn`e)u7V2*Ay8kikb#tLp9D&<^<%bn<5Ive z)HwQ3)UV7?+k>d$L=a?$e&lvQHyEaWm=8{ZNT5+NMKa;T{#aqe1w9n*Q8pO+I$jJ z!4@&4((ig9G27FDyo?1K#@<(+jyJW0#k^vxQLcaA$Mqq?@Ll6e5 z1kh(f7N257crWU)Xhn?Au5Mvc(3Z}-YgvE$25|W7qOK5W%E?RJWW0$wIrai~ zbSrTJzjb%7MNq;hWhw#|2Ez|5!a7C_?*tPiVWB5j{W>$g^vG`~9S29(|8+wh(JqMW3rD zfvBOwP|rBAdyFCz`*KPSDIWl&SG35M8K9Qh;I~BiVI|3|s|Rtdu!_)7UJ18TrnRq| zEVpT`oni&lq6Am{Mv(z!XtRp{DQkYL5kekZ57o%BK4^wP6742pYq};VU}#gKLU?nv2f@8G!mV4?`;a}{o9fEP`IpRLc-WGQ zx~og$-5{>6&nG^5o_#K7gfOXnhB2u~e#uZAG?uxtAx`32p9WpAz>*A9m#^?|QvQ+ff`WW_svj?%B&= zDry8khx=Go$$A~eiiMi>b`IgLo*ih=n z(OR}=cLhl%UtHMkSks)CD@JiDZSM8ry3JiBAz|Rx(>3?N4`9F!@ikeML|aW1kdg}m zdXbh`SEyvx^|%yC%vCzSR|!N-eg6J;{u{pY^Y^`)Rs0ICTXoARp#XK0Td2QJbQ4gC zwy$E#sDB<+bd|>?i_3lyAwYHND58@3yT7gnJstLJbfH^#meLOrxISep%(EUqXZz7v zsSWaWpKMJP+*@C-uL_e*JD>VdeT$zZ2f#6apVG71xFSAm*>v%cBnWc^l|cTQtj4Sh zBl<;P+L0kF=|lvAK`Dr*`UOovaeJ8M<%?g;&+CiI%B>UM=$99{h?w??=`j7o8vx9- z&nmLQ$_ZW?d$Py6DHP%iX{2drTC4o%p%^Zw=W`MM^LsZ5PH^Kn>W01QLl^+y4NP=AIT*AAJKjTBj+mLVOE)l=2tdHp2}r(dnb&r8 zvFk0woVniUBCX#;?!F_aLk#PXtt zW#%*Qt)9o8k1l9)P^`b#)UXdlC4Jr5!=A)s4vz=4`6#*k0<72xW>LiLj_ z81s##%l5krrL7K;DGh`3R(1)DkQ(zT5TEv6H>i0b{%xxh>Y|MNxgywkSl7&%Kv&Qb$AVH3?iP*r-UUriA(t1~WKu@+U~(!Gx$ann)X z0)zQsZ3Z^*SHJNo@~TxkSfffb(+N~5Z4cO0Ty)KA_zF7@4dq|#arcfEv0o}1HJ-Xo zozv2}){jbHq2av_9H(3Hb1*_Rk0nzzsw-BSD<=rfBG2U*?>)H*B%mb7qx{fsFA z#51Gqdw4k552+!(;G~x5(Ru{k==#)iT8=ni|8mAM=ZF2Fym%NK_ek%Egslka7zaUH zN!fJ{w^N1P@fM=~^Ws@ZpVK8TXj%;=-xJ`#9G%8G?%X_aZQbs=rf;@j9J0dc`A zz7D!<2)9ET!7_o|A{BCSzRkX^Da!>|1`C*nRyXpn<6U2($IO6pX0nf%ilxc`G$UTP z-?-AIChHcNgv!r$GBkd1;F)W;ht zG=d#t+bOO@-KDpGIehyU#f~7LyBCCC4r=Kv!VjkxtT&nN|&=_0imeJ zZF*?L9?%F~t8Z~u=d+%6q&sZ@!6_4h@m1vR^`K`NA0)EO`kO+qX$3$u*f*-^hdxjr`i3Yh6$4l3{P zv&HOa8een8Y~A%73D4P@K%`JZWa5Y_hFyPt3^lL1lSy*pILvMweIy_^pmt4A?zZQM zl>JEQ|CHz{9p1C?Ef&sF#S=a;?=Jpkp+M1tT}~OSr_<;dgq7j(uD6NdiVn0g9WB?f zd5x(f@8N11q0%*#P>8c?&q4xa(pp30=sn>Vt)Z42wmHm1E!7}LJ*A-PxQGCkpL~+3 zRDy+y?|FSIG_c22@M1vy_X1KfcAt73;A&r`s3mt5iUJ82OuMQ!IHbJmx$6R5bj=8k z8p&@P1H(?C7kOUB=E@OV^zekhSOjep>#G&DS^>`7SLJA?3~$O^s*;X-gqKqFkL$GQ zp8RLSxBpkMQVZuHqdcRy7_X{FeMj9Yo%wq-#9Ht5(M0ngU zCjw<~Ani_B+1XvZ8f-G~vEcZz7#mgSJAw%xx>@-`%ioLH;bBWf*<>Lx!#zb--9{cu zb}OMaxWO%x1kJ;j>g^bW`;7MB6#vPAYg*YWUQtf^V&-2clt_J_PnOBh!%?Y&C&N?o zBncFfy>uRs3aC%4dK|q_elqdReVXC{|Als-xM~1a-g z6cJIoRr3jdeIE^A^Sw23cHpe3irRo8VM{RK4Yid3ewd1_P0+04*q8OuirY$4em49l zdsuo>u@^+I7i%MVKVaS`NJ7m73a}qKJye}SpM82@-F zTNB5&3DzRa%I>gHhpo)l*M8;^wh-Isn~+Rq21gM^uy!zBrYf-*6-Cq{1(dtG%4t21 z@tCl&EYpFSQs%>BQ6#1xN((#sREQEUh7w}7{5k}QDk=y<`g=!sEJV>kOT|&QiN5&@ zrd1G`jMrVaJY6e{4^hWW;(gaWkO&fTh#(R+1;(mbXbajJ@Z7u{Q}y?UKdgj7c zr4*5%+fl7qBu4hDDuw6M`tVX_qN#C569Hw?&d%d0cbN zSHr}oGUo4zy8BlQbENUw68@VmD0g%VG&!YPe&ib>K?P!xMtN~V$Wu#Dc)Lh60#sB* zN4iIoPOc*#vN2AcGCf*!B^d;-0XDvqjTEtL+N_ZWU2r~@tVQ{7IGT@d_0V(7r%;7WHV-sJmMYbB=4@F-i_6a z(Uo2t@oiq5`66MsGUGPZURf&Rz9DXoLS(=+1h)ECT6!{K$^_7x*AOJR6*7#dx7XSV z%(LgvizYso2`M-PCcTe*8(4Ag^`!}DAq0_c#KP4i(n-3OKr%a@Dz zuZO7pWt0%K*vAN1>8eR=uc5P+QN$(gon3zW%i-OxObP0GlqQr8Fo*8S%f>qPZyK7M zC;O?`HlXBcVRPbv0hPR@oS)Py5A_!wBdil8!{Xa)aGq-JLO(kmgyn%bSx-Q^HGN+{ zt26V0b=MNUVFX>6mV_|qTeUA{4DNlEIDm(uX1`oTvN^SYJ9Tm5!rIVGu?5%B7y_INA2A1Z+#+`&g3d_r#K-q9`zSrzpV1n1( zKA&LMO~cq7FaFugGFA+rm?s)16qyY|KYU|Ol?ud&Bx37YwlwBs7qzOaVHb+X5@cEJ zYfWaRA9*pk8|Djo$kpF5geSqRM>KiJ_qBO_NDWhV0s|4baA=5ABvf+MEcH0IbC4zB zsmsDPzoY!FB4PQ9g*E}3=WQgW*zgy%QWDBnAu{JPXL~U4E<)F0cyXMO{vdlo^h%Iq z^UN+N2l*V{8NgF@m9&sx9wQOVrJ)1w-e>~vU)GxB`^1YebL1RZbM8*qf^v|(x!NP> zeP%`41~Y}gflcA2*;Sz*-~P!z{7L?A$RG4?zw1F(Ev4R0TS;lUXD3$@oW9ZWAKl>V z_lIAvi{I*=dVqK)2kR(g@R)uW%hAkU%?_YIv`Z%i+PYo4Nie;5K*)6zrr-8x*j}^b zpf3ZVKmYuNE<%6MM?V@q_BR1m9n>e+luuxXCQG$5a*zg2oOiKBzTu(IcRdhBLB*L- zkmkc7Nl){IoSEaX5YHc@jUxRj7ZJdGjObnlA`s!m7Li5bhanCg(pO;$QC+bZh;oDl zj$C;ijIxH7z#obeY28)-QTsXN%N92_9H@2e-S`N71dL)|x6QW!GLYX0IyB7x*&=jJ zgJQ=g@B;zxICv!Z;AW8(M<)8ukP4*tU&KDaRdLmpq~tdUknc34AA8W4-xSbqcso8n=$N>>WLU%k@nLfwf;yfBMEHaz04|dfN*9S%%#HPb z2xO{$RN?~v+>}@$ZRMY~!L!%OK$b4(92$NCPNOnwJszVp-HR?~8^34W5#j@9$ZWX} zOiOytTa~p7h|zuQR1D&CU!75;m(dIbtBx5pQcIK}tZ;X2V07&KMxl6BfnS9Glt8in z`UsKB^Y1G@wLd&JnGPgQ>w8a82=(vjWQgLZRnBzgEv@P>qf<&VKUwN&J8PzHh3y;5f&CKfr;4dgNBigpdeR> zsc^FnV1IA;{W|JHw2|89)iEe1VzOR8n9KbZw>b+-ydy^tVH$_Cg*ANRp|$&5yS!HL z2A?n1lHI?%S9g15Wms8>zHJwPsT?E>E>bYhKR!i+U)D1ePFb;XxH+ks@;YJ+x{ z7eq}ZQ&LvZTAF?!#lepplmz|hv0uUk4B!5Z50efCZ8TCXZ5-^-Mf_N27NnX_s0cdi z4ZKvAmW{&E_n1T|iF)1|CrM|yBHqTqu(7jUN2$4%lau4{46w+^^Llv?keV(q`E$R2 zcqILvJkJ{|v~iTTYXH2|zlnFp6+wd*{h7S5lrvS7^&zWR+W@A<33P*6{giHk>#A34 z=eluRg99Z3(j>%Wiv`r{S9w&3WjaGyTVO`41omnFwt-&LOOx;1*R{YKaE*v!>qHq~ z{vx=%av*F90;m&!VSZ@FwL4)bKagruQ0K-hj(w@8~%ll4Om$#Qu5bF zyh{h!sC7iPI-rn_kUO4rg5_na79`TEowKet{C0{1)TMhA}?wGy?6kS`)P8< zF6-k8)J}S5(y!L{xUc)B`ii`|(96OWquFaMENz@eUIZe*@$H}25A^u>@FPoVN)*W< zF^9=ZA>lp`Z)zLaQbb~0?Mu6N#732RzHWS5Ygbd{^sbKx=3>xwosY(g=F)2fE=yW| zGlVH3di zWsVha^Y+G!vgK7Nsg6ikQw=9wM#|n>!|9Q_DTZhRSJ)x9A)hgY|bsl&9TcRhWIXo*0&|_>Nuny4RKgj1z`$#=yD-QS#TW9*9iN~`>95HAwlf*YcD*bZO3QQ1+Hot}h^M0h5^VCRfN4 zwpnCETk?~0RB(v~Dogj*BQ!noV9k#p{@1#0dBdB!bUqq7T{UB~SPhn3biAMX{2cX= z)%(Ny(WORS8vye9o`PF3-%>tbq_jEOhhiNo4_*thr4gWCOeOaE=ntj!7r{eon$;H} z(T7G<2oMvsVU=1J0<(su;mE>-F`PA!Ak6D4>>sIsRMQQ%vh;N2{>Dx_W`w2#)iJ18 z2Bp+p6q*Ck_rWnjc8iekVNqbQC<>~FPd{qnEg(B5QMF0Z3gIHH;CEk%2{qoJo=9~B z>|h$t0el1ZJ|}t-_Q$?XoFH$p=6L6oDeLZTKI%le&o5JL1yZx<@b1mehM$^Cv0mC_na*$M zQ#tOYf+F~EB*LxE4jTs1+ud@3k;V#XKlbDr-vu;SmsZF@p6U<4{QL$Zr;d-XAB_cl zJ499-y>=IhOH{zNoWyBGZx^|=EwY3WN1Z5n6s(fc}7q z(lA9cOA z>|?+T^ptjDRXuKqNPSt{r67d4;SrJxi^9LS<(hL2ekiKlBi22yiyS5m*391;ozS~ZZ=NwB6uV5rubl=WI90Y z`vTgydl3;O%DzqwOAjduQP8#0D;SB&(!_WQKIY){rDN5@$)~+V><_Ldy zQ+3D7VjG=I5YV8wtx@cI!)t2)8MEb4b=)8WKzD{nJZ#br)~AeH#}&@VCiv>vVJv9x zVQxbBcDVimH--DP1>3ziiP{tl76~o89YTcsjhB4JJdCsMabY`5BBqUN-?~@W1c5}9 zo_{Cwf@-F%WCJ)n@(rjNZZ1-p1qE&{ZG^Wj&;B_ulZe5A7-1h-P&0i!sjS zT?qP^3UP1LBE?g3wa6@*+H|!-(VT`zQr8v5L*rgCgj(bB5axcfx}hOh{AMD#1SgA! zp#C%hI~r0D?63vh)N9B3g7~`3R-Wjkwn9?B;5q8dGhvB~?icfZ^y%;uuS~&m#A7{Y z9IzcLr_miIR*xuyYwXw&0z{5D31M6hQHtZ<0pq6gz4(sJ%kVD|lVrbE#Ugxgq9W-t&-FeMRj>aHd`Ho{U5Aeuz7ZEHrTy6c&O%24Cp z1`9JKZrXh#tke{>E%mo#kIPa8@M0PrVtb>Tz?pd~G$&M=)8K02G1yVGmMJ=tB-FsK zBD`Fjl#B$u`Fef06|-khiJ8I;I#!pG&!(%w5(9=ed$45T2{THg>o|U*1QOG6YB^$( zMfkt1g&#G7NB!U>^d|MqyCGcb@s=*tSYV;L#+|uwX(%BQ8)(UUC4w{UjwKC;A-DfX zua|5nbO(G3rCxMUbL_+VFqvX`PTupz6;6em%a>sfs~H5Yh?!O=31gWQ*xZ|b$T*pA zsfb$vwwLu*LNLOtn?HzGN@l_{<_}sXG+!`;*F!5UzlalR_+!$Z7L}CD@F*`9OJ7L= zKUp^M^qPQE7ooTlUK@AGZSL2HW9Qs9*4!d*e@498Uy^wr^Pj8l8nOpT?~ffo$7^3w za@QE7NKhJQ`TE^zq-aPdiFDY?8`#D6qxWQ1Y6A2?a?HB(NGwH9iR-~_c@Y67b)KC` zuj{C5fq0*6K1>U>eN!&1K0x{x)J4opqh;=g{v+o( z0HFcjgzt&R(01F;R#q(}^yA@iv7fNineGzW9B*o=E6~VFrZHhyB+{Y}T4-39>n`-i z{^*$n7~>+ii9Wp%noY(ER`Qee6QVAo7A1;Vl6m)`o>(lJe=YgLxg-oASJzN zMWWg@!Glors(rwAGLMk*lTG0`&JI(oikd42zk!Rs|k@9t0gO%Z_>99M$=On(sYTF70)Z=C}I4fyQ@K3VO?e$cVq%I!CxhnR)rY&-Hb9_dA67iD@_o_HI~244XQV-4cpshxWyHH|4Cv*u{mBI2@<()vtH`= zZ-+mwR{|bP8Kn#z^&*)MY}13eaK$bH#&~EhyOS&0d6cks1}V7e5IWf7RFW1^RP3g- zlickhq*Uh;@t$PWD@CR{A}*a@G^yy-`Ha=d(|o^I}Ra(~rR`pTU}b z>!fj^?X%8=z_A3uy_j1j$`llePnHq?^h1;=%;4F+>P&kR0F^I?LFtnHE_b*+sULBa zbZQ7*i+&{eMBVH&E;IH#j%YBs#vAHax+bR`0V%4!+Z}WVVT8bhJe3i2i)T~>ASdQ+ zts+XM-EMgXY8KSRb z3)kt#Bi15#+nS@#0ynX>IjouuAs&ecGs@Wi&TpbQQ~E~?=M-JCsn4N8OV)cz>Td5B zTQ%EF*Q|a)GF4}lQ+xNR|IOMY0F|xh4%#S!8AR{+T7-L#W#H*fnGTq1z3JT=`ZijA zNWCq&po)$~x^XVCncPoCoTBcMPBI@J`-ZvGRjhE{+`1kpyO*}{wx1BL!y+(cg%$3{ zqRg9d1uCxYxB_Z6myC0)7?omlJBNJk(na6F^7BPkU<+++y!`W_qZ2oUL8t|(CUAK7 z7xiPa_wKu^$(85KdRPwkiZrS>M+G(3RoD)Xu`mNyPS{E}A_~wbLlPLRVO)KHyNFdk zm8rQ>%Kk;`hnfIyApTTq<;9oje~2B{rKQ3V*$)84eiiT7ZR{LsYOaC+bQc|HBi};5 zRQRVW3jz3~0pcEEZQL>f%=Dwc?k3z{Nl0!Q{ObM4jzo?6>h85>F&1qtl!|1{Aps|? zf0>F`oC+hf-?C?Z^a?0P+Blau`Gyz!kCS5=Y(3G-VfMIkiveDQU5M`DUSt`{9B|7r zF_^6#qtx~f+?sY%R0*Q2sbNzs>Zd;)e(JM_6tM~-?MYrtfm{0Nbz27K?fTl(7p@fP z-Iez`b0k6H`2*nf81--y{vC{h|J+6`7`8*#cal5E#0aMW#v^)+u<)g7oosGEMfjx1 zp2A5op#8CniL$8VGOQ<;{oc@%1ZxF*v*5f=hg`B$e1JOzmYl=GY&nBz!RI6)ve z9L3)JsKsPW$OAVv$HzHOv-O1W-?odpB*VY$e;JdB+x!pOTk^tM`HyOk<93nIYZ ztY@L+z918PR&8xHe0X2B3sy@OV3ttA`qujPftP&Q%DwL$u%;CjjxM|> zOWL119f!D-Ir=CE%{Am1k;;U|OhAj=qi_r`CA7vRQqx&0=H)pX%`jv%C=WZ)9)q)eRduxlFR8a z4QyK18uLi2Bs|9zsqe`a5#qpS2#Sq%W_SL@K@&UOO;dvYqH96nhN%*+$AR@yVk~6M zI&;HtG;8q!6-Rbhm1u_Q-VXtq+ewEBpe&392dZ3ZKki z4^FTShgOWA_kaJ$EjtnT_|3k92oawanFdT*Gk~HPo_obBZzlb(qnVOrB#&2ypU1Hv zR@9F()YsM#_81A}{tS7RcPA$y#sNLi@x3!B04gX~7yDx9e5JtDNxJegq0f)*(Pdu3 z7|BL#`WCm)CM-!0=^nbFUif3t%<_-7{&@X|~Q`m}gI_tb5%{7lPvH~c!?XXfyHnWQqR5zbY{V*T!f~C+(+sV2GMG_a*p`e zEfIAcf{%`LJX*llOqY-*S+%J%rDJ*k00)(W_kytm(#3~RFmne<@_LYX%+`}4 z<8@aLnO0xxq1981;+b-k7cz5_Y(_muSb)8iTEp94e*5Rcw}0(FJ{?wf{1byP_Dm^= z(A0&0Q*9vY{kWYi@my;|QN8rvXsdwWtiOT%h`yN^aYP>?q07%|?+Eq6j;oW&xQlWw z7!tLk)(P9VA@+YUGda)}QyeoVQ#UDW?b`Vk^TMB1VLA478u#+=;riObuMO{i;P=u2 zc@pWU-@{JsrARCT*&Mt|SH#;u{?V@E0?rce^vGiC3G3y|#k*p^5^np$m*zXQ!viEy zerf=XIn)(@&92$p29ed-%S=5-z(zvN+ZN#^rWQNP*-wVoHmM1xO+{ zAFm4zbs#47)QfNUbL0+i3HH2Xb?f-weXAiYW?21^q zAc%XZ;)_2O8$TH~{AVJh6mdx8o9%b$(}xZ!)>MvY682|n)8~jCm#Z1q@p&fncF#)y zlU@=p3sKU?7w{go1{LLMWCdV}QkgQG-%Y245RXYl6(n3mc$E*&x+FoF$FQqo_Ni8i z-aFz^NKpRYTUETc*yH2RhEGjkB^+Alxw&Y*3af--aqrKlFZH6gsN#!8ehx31$}8MJ zSjG?9RkUk_J(xZ!3gd2O4ofCJtL%%$?f@KEyaI&?W_iZ-h|BcK=yacfh67=t!db#% z#`MSR?cM^p#bL2TG?EgkF!dr^2i_ZgWd>~u1niDKL^PP5JrK$tI(+D8VxlyVP8!CV zkMLh@w4p6gHuUC&Dg0>0m@ATg7B^36%BwE*RSNdM)>{aJNSsXjr&Win_OtbYcW-_V zArW>x=yBp0O1PR)p6w57rV1($lnCe0`Cjs5!&_~tBp$Frfp3be1DQA_|4@j(h?dXH zY`Ewy3CBTz&#mPU1k-t!XrE9{>zB?orr_F;K;?YG5ooRuCP8<~t!+k(5=0pw5?yD~b~9C+-y7R4b=B zve@6dQ(@f$E7$(t{*M@dUcy7#H>yZ*UvfQ-USY|svz!jqY|~&Okbl4xB|GHRD|SK5 zwi_gpBk_5SErHag>%Q=l{g}ZAggttUHFI{7w|jn%K%v7_Al+L3#zY++ll)%l5=O-f zB_@X^lx3fvepB8zbi-prlYLUW6y!zedlKUA)g8ilO+G372Oz_B~ z8sb?42>I}iQGg^7ctgaH1P(-t`kiP%;*Rw*@}emLvmMD1MO~Ic0WHij?E*nX_TkS` z^_I<@0P>(2v(}NEgbFprY-jPFAdn$N5cEt5@-i@}RI!=bLaALbXz}vBWMwFOE;$Ol zXJ-hle{9C0RPpjK@{9tOq^Y}wg^en@vX)%xha9f~_AabYQBW-iD7Njy(Zc&IZ?$a2 z?7yN=zE=J2%@;73zkS7ArF6emhO>~(2>%Q7-AlE9Y2`oGBScAg9)<2pfx77x=~_wR~5F6|o%(u3hJY^w9v#Vov;Nwc?#cF9-TX(9A*nld!k5#iZ5bF!iHE z2u$X-3J5C=jJOHQiN*$(hl)X%D)J@I`aN%xfB3tXe5J;%7$$Aoz5=p?yAL39q+m6prgro0&2KsFw~zO8+fP$XYjDv!K#Qlm@H(*l z@zH2pXuX8ty&S8W-Q9Ml5LvBqKJ&0*RLeGAwk>}9SKt2qyZ@iB2IP6G<{Rkf?}tYN5dH8Z!!M*5ZPI*=sOc{YuH+;vwF)MlXrhRyys&`MR}1xT)WJ$RtSEvsNr>&2@Y5xC!zT!U1w*N`GTtUkU*En)qjdGI z7%~qlBS`4_+8rTpqKK9(iz^n^+7Q%}u&y&{j+;!}aT@xq*M-DYqbOnQ?vz7uRFT3o z>K?d~7ZzOK#`LD4Okm?hXArE(MfVSgr1LkgS}uh=pQLB&YCz1wJ0+G97WU(ou3|Yp zf)@3OF6a*}z0kBrTxmwcJz}0C@x|5l_jUTqn_L()u`s`kTAyA zYw7zkO9O%-)4X+32)KtjSi|Zq*H+&XGSDWJ66trQ8O>k7YNdtP;AnOPD0mz~cs3QkY0E3Qs%+b|FK)I{4wC&wtl^@pf;(6NmY zK}C3F3#K)rMp86V4m|0SY<1XN_wjDin>srI@81ts-4-V(Rd`PQi6(#8aE3XZ2)GEQB|uG@L!6(3?q@b zPo(VQkj2|yrw5L2DKtWOL6CRRZx%*xqF{D838nUVhdG6Eu57(D2S#!hxUF+tH%BZv z^fdQdMwxJ^w|WZJ;i$8W=C#!=TWKjgzZjd|T)`FKE#5h?VmYUIm5|bpOmvY7GHB&X1iN4c{H85J8{3o~7 z3;;u;BE#35L?_twHh%u7;Q{Npx6FV}uaQ`<5Di3seb47e?)hn(NmgG|#PTd+R^Jv1n(Gr7vh>NSGDqb%2`Mx?O38@SP#HmL&i>Kdba;MiFJ~WZxP}h<6{Sg6N8*XoU zS|-a%R2x)Zq18lpO+`8#s!v|7DbhPz3^FJ6&K&kcW1G5#fTgkMfgrH_9Z}Fy+y)p3 z8;S1|93r)YG35;t*pJqq>i~88GAA#aMH2zqC(t15f^E27bE(Y(ge%e6J=9OXu8U>*X;7LBHla0WAB#gks|@Tial0T zC+o$MQzG2j%P^i|O8bvHhl(EbjtFG;qj??T5L20?P94xAao5Ex@^7tV5F{2Zb-f;b z%6USdSJZ5t_LNY>_Qy6_63-HRMLnDjbfg6m<|BYb&dt=!_Y4lSf#k|L)vH?@p*nOm zcXaepXB_dnl~a1$+#F}66aF_5k)kjyz3QnDA#-1}Y~=!nPG7yCbKWgs`Zz#xO$wpc zBh-PViwwN0brXX8r6QK-NV5wXgO+k6YicjJM4(Z9Y``=X!L`6u$SB-94M)2;iMZnE zET#JY62rdh9))UV9hJ(XvLPhhr*xm9Q?l|xH!q4*FNbhWu?%q`lK1z4j-r|_x_!vE zLz*wHxCqzR_ZEgq1x?SEB$iqTW$CeUDmRoWn`CmnC|9s;F7&f2Z56re#yP8hWm@t% z#b^ABEkzp**_t(zAbpFw8Lx*#=G>JAqpsu7LM`qcz`>TpnOyh*Th(WkQk-&8of2hd3oPC!_A zp*-cNmlh~sklp1UQohjHU%)lEiTZZrN8?3*K$DI>v*dgjV%i-U&6>glErQdir!P)q z!6~p+cYHsmVanJh-M5*N2g1b5fk6Q_&cf#aoWMl}xxq@AZsG{nbKfR;Id^z0XOw|L|^OJx2 z-;DqL;5+{f`nk@LTto;jiKCO~4nkj4DykQ~)zA-yU-vGQZ_+eoFJ%`K1>3^6OvgJm z|3?Yd=ZFiCTcXdjAJyX4L{94?z=Gxbod#r<5G6eJ@J)XXSct+(bu2`rvTh9R%O$#o zPkdC^wZmzTkhhxJI5;@(FfhxfEs!?G4lISGwb2TS5SYCYJ2+(u)zp<%uANUeAJik4g!m@pI#Uq)nw z_dPiSz$#LQaV9*#1z`hdW|o$VnZajF)sMYERAC$Y+jVkT2(>-#%&Xv8PRA>2-Y1Pw zuKcwg6;%G zF#+G=_=XSoul|O;nvH-TL;PCc`@`>;pftQdb6_7%ywK4CZ=jf3=({)1m2&M1Z>?L| zkgpXH)afz)zkl;=jPr%L4b%9~(#ROH{R=DmPX|VrGr`<1Rq)Q(HGJILp4|bYz^SC= zIB(gVC5+V+%WKiS(2qVHJ~wq4dQ7=g>`0Kt>^`V5+VdW2Q!?zp90b~x4b!a18=@LO z>H4q%z8Lm%H$)7O5T-qhOW#a9s(xqb=xyyvo*}RaI)Uj`MD+I;Ov|1|_mrm~D7w_& zSI(!HLDqhl1uY7fQT=_e7$yuY+ph)>%2H>TSqG;+0#L5HWA8#2btVh0LvT>zvXLX+ zJf@iGg@wNIH&HwE6co^`B*~a*>Yl~hH^aAo>1&9|a8@@3j(uH!K@?&w`+`PC<$l41 zMFXi7*O4|z=u;}|mz2naCCwO&>D#bI<6NS!R4@eZt<&Ru8hQ+ze(MKUOA@!-Oq~{f zP>ZhCxsUdOqQA`GYc!P?PA%#0Xrry65elx1K% zhFkX#(KqD8ebG<^sNwo>ql{5um}a>3=KVQcRKBy@=TCEF^p(m}s1my&9yaUuUy+6A zpCP`%I`E(d*#;Ngqh1}J*=R!dt1}JNhOo64f(&ncvf@{3iWTzl?grsHb*20XL$r&5 z=-RVya!oHWI4PwxMb2d5P1$Cd|~o}QYUw#EylHkM1x^sgy}yuAtE7-tPdhoEwYhgRZjCaVH@$;GXQ1)jjP&i?<9>`G3v-2#xT!TdG4z|Of*WNfQ6du?cV*gs=?pzKSvMqLW&h<3AOBgwwbR=30ZFy2(X5=J#vo9}GYAOVo~luv!Gd zRsA&WR_{1PBw*Sj1`21un;|rejH_qzZA6p2eRtg4(C~6ggRRMp=;3ZZ$ok2xbJ;+- z2=SPHI2JP6yCbPe87}lu#DXb5qNcD%v&dhKc^bO=l2bP zF$x0w{yX&yIJdOj11W8Op0vhZO&r&S!J8)Q)=8p32Fe6WAUm0jeM|_M*9p$3Y0R!% zYG>Rjv2Z=L19d_$wicjDLSa7NvtuFBEX1x1jd)(=0Lik9@ap5ZK=4vR@Ge2hZ z#IYU39RWX^BOO~h*#sIRJcd)~0R3L1?L2xc??c{6a08uhDU)8Z-PEHt)ViVqs3|kh z=yJ!(R#!q;uCa0g;P8A*A(wPEg`!@lPm8);f#(d`5pcn zey5&<-qOfK_4KoCM%?O#v8GK&`-A5SULE1R7OSm6*OoLfCBGomjV|w;d5XJO7nh+D zb;$_)J)lXP!9p;xL-WWcFpN4{(v8`p6ZR% z^dn|2jE2U`D)0k9WpwFDmhET>ve(QxNs1uiHr-lkGXzeVgwR7Ot)0eUty_7~UI8&W zjydKKuMHta9l4L1^T7x<1Xe-l$xV0;C;;g%ZlAv-pq4E4;?qmQs1`+Aj6J}jlf7Rb zSf`J0tr0x*Q3!x0x9FJAJ&|oeqH4Fo$AR{(9~D<&3w{^U37rZQ}WjZ0mHLE^G& zXKmm-dLGnMWn5wKnxl3#xyVx&S2vt;f~Z57Y|LR^RsmGR5XOy1bGI@y`?ak9;ufT{ zB5=VPz!*QQo3=hYs}HKff|{v$i5e1(grtaEl@x&GcM1{Yf^~ z1DBRvhZQlhVrtn_dE@PF_+3xIq0y1WGIF9=MXRZ*(-izm9Piy6;4wFC=I69tPes!0rRBJM2d>`-BGw`wSq$BvS2}?%cdjiizV+g`k7>=>#QNghb$I?|72=U`i zP<^IrZ8`POB{vQO@Cm+vDhz66r2ph@nxbp?(iw}|F&2_ECK(6(2-8*#Cg=cCPZQ%! z&f+TS7&#ChPy;M9=~=Ht0e^?NkNHx=dU`9(4x-nCAH+6PT|{w_UrxJ*Y>`4LhkR#* z!JdHqMqp9-QrxfE=5>X*CHLLHXv2m+_3NZUNuP1g>*Iv3QnKyv;BewGNHkhSq0M5D zcW=HP7LngsRnH65kJtY=_4LXZ!i^Pe469rJL9OE*cld#%r>FQx;7177rP zZVc;>yo{saKC~JAZd!qWm$57I`*Fx0oI*A1@JDV4vd09@tr^}LL6)ApsOHIa$gSHg zPpF^wF)oF~t)CU3CtuW6!DCWE9vS}b%^wf%)knDtGVR0nAwh#MtRp4+_o4SHyT)4? zOR}@T1VofP=}-^qYQjlp%>dj77pqr9I5Z99!G-XQuN3U-MiAw)JT;$9NJD9K_M{VNmowTH=Ipq6YjA33 z>U+e2>BT%OsuC22@RdIxxBGJ1H>Qm~4WmnSu@^lOx3al8eC_Ay*jrclpyQrgAY!<- zYVg6>5GIL(N19o}p2J%wAl`0Qb~6~;)zw~vCR-`4knd6d#sdhL#equdvx^2kF}?JN z^IAcXb)&5!Y~f#Bx3EiJcnD^H16^?*^cgXUcX$O`Lt31?@vL@8yV^Kte~8LRml@AnPL+{p%TyWHWP zu3&Zmo`TbsRGGwer)t9GmMZ;(7SW5OB^#kGy46wazn5L#0$p1Auj{#B}9TFAf|8rK`J6c zm()HIjXg(oi;B8lL^Mv#n5Mm<=!Dj}F#K6W@$tmqD5^{R9v#nFG@O1lTm}@A3o^WP zWFKo_9Br9_Hlc;q_e`dr)0)i%10Q`dd|K|v;c!}4vspQJ7T1`4^30kh_GhGrEcu$J z)Pgw0&V-&>V1W7AV2vJ+8v5@Ux&>KE2w1?0LvoBN}(5bJN)9`O6P zBUkooVn)baWcF+Kts#-`4IeRZ+05{KcEc?UAqlVsiS0|JfaxNX>spIc*6xvXFCj(| z2O}8X&G`6{fMVncd*(5kG(?t+x?k%5g!OAqK3dbw%(dmESkBk*|P9>j7l&hfXe# z?L-iaq={Rlj))?=Ftg^Slf8_hN7d$f+I*|d&-9M zx5gYl-IQy2O0ER)_K_x?wH0M#6=784PDu>biWPG-6@Z1^*|j2RDEmIH;2ba4XRXGn zFfAuuOIfg#h!4+ixfE#4_zL}saf+6N*F_5}cSHW&n?D%dv&y4ODvXcKgE+80#ogffN8oDMI)#JE(ub ze#ujqppQZHX2g68LjrXWSj+>Nc4721#FJ1@eh9Mh@GvCU`hvLP62}um_|fp;hjoRS z0jMaDf1E47;s>f<@J%$OmjJl#v1)!Is?T(!4z$34P~yU@+_L!w=Da0M{&yzmgR+gu z-g1zX2=)PX`o z5c+`0yT!~ZgwgdwalK_9JB-KfHRcJ$-}#)L2^pKZn5xlkpM|kE?AG-nBD?Kl5M21< z`qSF5(#^qVSHp{|rCKdshMe(NH~A@8VA6W^i@D9CXzC^|6g;Sx;9CWdE!dtsG5T0!dV8KiU=Z7Q!3X?a&pBO+!p9C z>31dVL}&ksR5=z2TD3$B;jxT;a(#kIbm`4B3~ky4y>+4(ncQs_;I}FIo?;dq$Bb&E zCYtayd|>AK#X?}k1Tr9X#^?+yEGbUl5vtcun%D8shknMo3^3Xs)P7*jY3!HH(~t)V zRezUrz2QS1TYJ$3FC0-ab4cp{kF$4sjx5X0`}{Z9PkK8VI)WU9#y(#3poea4tzO-ULewhx$I20+CdiL@^ecK~m^B)PcVBG)>oLOp zuEz(2j>Tv=I<#j1MnG7GQAC?wil<Kh!MHBUHW1{Eq8z~t zpl;|*8WosEgyeCa)f(Kd*(Z9dVrBnk(h3yQW1~-;SQ4rfxloTizla_GsdwHmI9h7L z4e^r+AfTH~Q$^6ZC;r8Tybk;IyBk}>%HGDhuf1{uvj2NFd}DY~!cd(ghJW~<>PM_u z?pp|OT4?i&2mk<>ARgq;Z*yCE=@xiaGqS*Yj%}y~84BK3rc9|G*^oqwA3)=Hf1sRy zKHUAAx-Tt27Kz*Nzr89F)UQMSOs<8I#IlRi+M}C+0m_BjeR49sr&Qw~^<)iN0s!0R(6I_RPL`W(a7*aJrWnEZcVYgkFoS?Ut0^fi^ z`NivWDzNW#W^v=63~PS-txmL4Tc&zP>`{@qG{L0x)traHEGun_Oqtf=fJv@w`PuFA zEtjC+56ZuQXU&qJRG<;a)uMs?&rCp*C{74#yK5taq8?JCB`}$N?g~XlM(&(EQ&WQe zgp!J}kgc(|)me!9)^|I!2+Rn9Nl|&1tFZP}fNAO6)%sD^Ehiz{2OB|Z?87XT*a7l_ zii=mu16u^nZ?k#}rNEJ)i5~5oaQE(+rd=03?%uus)i=Mm`|IKTpZe~MJw(Il;Isn$ zBzOLeKTSFVQ4?El({|Af-Q3+ExVh{>tsbGjUN4P4+QSx^Uq?C-8&)a4Vi~l;>~#ej zR)$UQfN0{8dhuUbEaKN`W{Er_AENYW&)_2M`A|Kao99aIC*XLPO;6 z3Xj!~iVsru?Ndh0F#FDbMl`;T(dxTOHeB3v^YFt*!zcbSt{HI&s}L|Ba^N_3|BljM zjVI*n>l%Q(!EW^Kt%!S$jTIwC>qRXqztl~f8CiJ$DQRa5 zJ}^QiL07GAt1cI6^E1dDsJRec20U=)jg@afLTTKZ>BF4-JsJ%Y; zWC(K?QQu6P;&okvV5iqdCz8&1EVW357BPQoP2*I0p9Rh{)&<_WSw*b~UPVp`%Ll2j zgkiiz+o2>PQ=^_T*v#|GdXXX}t829h<#zlA1V0v_>GlNv4M%M0FCGjt0eFa$FJWSI&90F^@fuE9YeviOe_T;9J(r8nhKR)$Zol|E@zo zG?GUiW$es3E7H4V$(7=GXr!0^_b+{A>VD)h8R_e~P{jHg|? zs+mQX*9<6=@a?LmRVXjHb~QqgfrYB9Aca)tiPb~mu>VCNGmDtvNHnf7F<;8frk&F+}sZlHxr z+5s~p1$3{x7Q~Q(1oc&w`hd*#-`nT|hqakK4IP(rgTU4N`IW=rEBthmgLeJ`c*gYx z!NxdGb@C|X2XpwG?;H6o&ij3Bn6ysewNnNRu}m9H+!Y2E)D{}3G65DKix7lG?bdAj zYqQ>cd6>igFFfeCLDyZ2@p|ZlBf36X(%$IdeXBXUk68qiXozVf zjGAamMKFfBj&9B^%3Cyy0yA(}*kQ91lObqi;}gt4oN*U|jQ zYks1p5f^H$S&tC?dYl^%iDc6}Q=g2G(^GJ->l-s9W)~xZR$>Mf8Ij)+8>K(<2Oree z)|cgjx*R$@j_|OdCx)GRB88H)Q|kGBHITy@G9XoHxpwysd-Y^VgJ`a}hTsG+{Uv6N z`k1G$O|VEsLct_2p*1?QhyFhNbokU)h2ACY^R+I`3L#)`E?U|394IixNIyXWq zzY?IU#K%|JDTP^#+$i^oBc2Iq-}>cGzg3DMC$ttK+n2P!uXUJOYk2UjCJ0ME*6~4; z9?*FBaClg28-jD&;vT{Or+?FE)lB`wQH*nAQFMf1|Xy-m;0w;I)#IIuE<& z=0dP5g=_2MKm6hF!(yTnRB}k!W-u$C9Zi|abgmIzC-9jzjKbbsOe-M2 zxb%%dZ!wF9;`DwY(PO6(v6+zh!Ql4JztnV)4uvC)G&f5yi*3g@eT?*mQo-St&kk2u z{*D&()!4qx>-t?y@|Gjj-~3eT0MKO8Dv}xg2U|hZw!7>#JfYf*q>qK=_BgxNHqp<9 z=*_)KYawrSTav0@7-i42qJ)E;B4;hRi7vUbI9hnvT%Y*OjO-*^zvRS$)Sm|g3YX=S zlx?e7@;2e1WuuaCLxfhpz%;{4qpmQ6a5n>pfoTONmbVGow?HmN+9=UeqI zRWLmEC)ZVv;U8b}#U7K!Pgw+I1-Ig_@fy~B-m6l#u;AlwIC7U!^|glNUj}C%My>od z!icio)()lw-p=~)xDIiHZ|A{q$yGvMAKm0~c@KNT=JrN?Q?R$Fn|Eg-8WQ z-NR8Id>|9X(y}FoY~2YC8NM3&-#6xb`n7UJ2wa~ z;VVbrXB$?S#pQyo2zi>6le?I>2kUw}`SkAKDy&Vm9 zX=TuL$|$^SV+S>;_Uzohf_$Q-SBZG#<1Zjri1CX-mv+#C)Woz8KUI?Bw8YHr%J9-F z(gc}HYz7}5olX=e^x@GCNRuJ20?Twjfi>!|9=V0l<DH{mrRu6I2qC}yzkeoOOg%V zhaU{lFK$=yJj}7&l#Ds9jS01isKS)@FP^B&#p|Acks$oSPq@KV)6EO8C70wJy8!`L z59u92{JmY~$O=9czTt?j1G-x@!~7zpS3hBsU*5z4PTEY=qmwYKFJ*DK96)(o`7cGd zbrJzK2O2U`DQ5#tG^H4IALpX7D_!P1Asm`Ddx~^_X(2_aE*OS4 zaG`&PZS#*xswZ?kyt(QGU&Eda?TmSQ6Zr~a8i%qKgSiBur9G)>P0ExSO6(;w%4VC7 zlEn-Aat;+NDthanSJHeUJl|^TZhAaS0P*=@WL}Uas;Ts=A#x*A2Uv_8Sd2mbk8Zdnxw$&TkIC=VkF}tW|$|OA6g)4+V$aR+wNWcOBX=nYlf*L{=)31PasSRNa?- zBG_@~b@iVOD>kL@N+QQWUDS-)1}~1Jxv!saEuOQB)9>&5ndLjRnuA2FKMhjS?)E5n~^#Oxs>G+%Qp zvq^b#Z|G;m)5A~R4g?n(LCEu3+diCk5S+{KRd~0rh1zy>XFqx_U3f`rxcE5+xmBSq z1=T4WQOA>dS%X+_`SvxF8Ed8FKfw@HP}OP3B?<2kOoSp}B-s5tddciNfu`nh>iq>j zr@M{(K^~@O!jPk@*a*l-R8~E;^`;1e_Yexqy70t$3i*LarfTMp1k^-KBZDO%2gglE zTXW$HWzGyu13PpR0qn~&Y!jrd$@E06_2>-kCmgrN(%qAh<{@)pnZANWh6BrVik_h5 zl4moq2Jmu3LhUSWah2<(D{8XTx|`Qw$V{J?J@lR74~kl-8s1$$Y54YGocBi5PJQ(A*uhU!C&=X@-(Y6qu9hwlzQv?L_&u}BG~60VY&T%0IJ(|Hm-SI3zm z2!YZQQVd0ZjN%qznfJSa7*6!2s_;Adl@;UUrm4B9T&^NfULN{~M5RfAZ%E6lc2R1T zfU_b2CPY7+3FCQH^6`=PAs;~M9CX{9XJWs_{Y1QkAGDBN5*a_V{{5)yIE~NeK=W>T zSh%%fZ0V1%U^e!Kr)D|Hp*lRo+f77|5obcw5o$NquceTAm>Ee8?OThf;9S7S9~E8L zMHqqyXewF^n6eW@=2bWH?%px1i+*waPH;gMiSB3!R@LbE3D1ZRSm! zZ)$?yJ`07mPaJCrc1(8){4yn)9b6OacrJJbZ_jRhD>$}7shU-)4XyX_una0V5jpS z1r{Zt6%$xJ>hZtw+|*z|;Nns(5_Sj@LzQLyKK1gCF50MExreo9dV>xVzXVWTE4Pem zu3|9Muh3Yv_*UH?Zs{7VNxh8eyh?IilKrd)q3A18t0dtIuW&A2n9585oCwBQw-yV! zyPY!Z=3QSBZG@QNU*VWXJOl0=JG+^%S+0i1iuZF-UAy#F8^av&3#;YDi zqJ;6fZv(sJi2fQwu-74^7wYPDcad2u9NhtXP-Q8UjF<-(?O?OoEMd^b*Wm$Iv@0GD zUF+;P7=QGQhB&4vzb|Q>8DI6B1GZA%rf>QX!#4#<;~2~egsi!Mypy)Xg$B`g-U1KiCMZCRKQ7yKhP+cO3SNK=I9_InZk#A?ld!N^aZK zpb4dZ@!(iZ^ivm>bl+c|2u_7;s-2+cHZRHwm<*5W_YmLRLf-$`@DG1<_kR~Zf`9`3sd!Aj!Sm6l!{?&vMlwbfTO^( zU}e*J*itl>GncaT)>rgx7_!zy0BP3K&;mmO`3(bX>=B1j@)OPlyhPIYHwz@KG?U3s zzrv#>y-i3^%~JxmqB}Q@34iUD^(>1l4}o2aVx*coWTrvwmh`D&GjLxv=v&hbPfenZ z0kc@*E)5dat!SjP6Y{kuh=`S(rpn&?B6e6>%(@FfS-{!9bsVFGu~2(pOL3h*@4Y_K z6fWN|rq5mAXTymZG6ovm%vb4PP;iV%H`ISGjAxflqhTl9~&ZI$VyRZ)(!VG^Jdo0x!ko%ZX zejsk>6}CnFnUW7`De5}nb*$856?DfY@17489OUqO5}H!Fx(>HW3V0XzYPEQz)U^0*Cqh_bAgv@x^Uu-GPyY@gYnYU8M-R7HP8bWmiE| zEUEB>m=F{8-HRECM+w?m;au$K&$Ilc>KyiqDD>D1v9yz-&xT3Sx;55|xU}HIH0$G* zb%@qWyvc6T{TO|kQCjX%j|pPeB+j5!;&gd~HfZw-eJ1Lcb)<2~CMkvSSIX+uh5h01 z+quxpBJa=w_0r$O7C~=VU&=Xr6|M$SOTdd!+Z5*^L8oW8RGrszfI?3x8AN1wnSlG$ z#4Nc%%}{}@QT|oOAuUO8v3M2+4y8omX6%!t2m2cnRB6n_nO@V1r^#2_GMjKQSEC-Q z;birFoH8*$QR;ZOsZV|I;n3?bh-k>Yiy?Vp@KlIV&Td)MNo>KA3hpBF0r9haF-!> z@2I)2zc6lf*e}MSoL~%w)-S~Uq-WeJ3rEVSB1Pq|=u%y8#2`O5sTfwS&?L@H;3Bm5 zH*)`Jkx)y(xXT3lXr9yjs91)?^3R96zbSenJco@ZMIksEuXNGlMyl12ySr^J&w=UQg&G8bC`lr#?TI3-1EqaS`ed{oP6=%^m>Su2;=6x?A6_`$Vcl!%%M zcBux*&x@z|+o|ZC;s`VLEL8#$li&tgzOK;shwl|7F*(xf#BQj+y#JHo?yt@F*chlX z{58LMg)yt#gs>MecyLhA6ka;k{Oegr`z`!QKNXtcH!l%WLA=+Er;2uJPT{ZYYQ`Zv z(f#llk>&dHmk4zJrW!pjVHGns8zbx!$e^Sme44$sk!~U4x~2L%g-EJ@_W@lFx4_Rj zJ@cMAFNop=Tz!MDK^ zv=f%p?HQd1!>1n?%j`uAJvOxeO(0xZ*ha5g01{0oP%M%HpXqu@xsSK&f|Ld z#E#Ddcgs^<(P1$9wBCqHh0^sCl%aN}9PpL!sLD*_7qISuZRQ^(yX;f_Fi@ClPcM4J-Kn8Dd^`9G$2&+eL&vMojjKo z52*X|3ik2}pSGK4{!kj6ZoIV=$@STB{Y0Z+@T*01DRv_Bh+x}BZHVUBJAmKU_rS=Y zG>eGp_DXfTm6BwK{Gsq5Ta%jldYhATQ^%teYI1FTolG_6dwKGHHAwM-hMC<43` ziN=G0eXgT)7;lNInX%T(>cLn~rJqTH3!gk6p0C%QMhw&6`JFGhw7RABWd%?|-Mk=N z*e?bE#ThHPrbT7O zZT4G0bJjZ1_=rG23;!UfQJu;v27{Z2p~2tQUKI@QAyQ(|+zG6?A{FJfe;73-Yf5}s zWRY#<%7pQ)#%`bbyC}Ew{aV6k29?e|9l{{~93^;DUz_4*MERjSs;gEwm0svl zVZ8~MBsdzbHY^abhXN=_Fbn;&I31^#*0yzTrf)7BWhL*W1jFn5F?_zZbZt|nN0MS$ z!(s6!#&L6THPr$`e;v^V$nXN}+lM|((p4Coku;EV|Ei}{IqGPgf#y7H7)0&g&yZaT z4=qAFpU)2T&InvTWu3Hv3Nglc;ENR9)c)k-Zop$+b1px-F$1U@&uttqD7EvbOF$s= z0;FiDr!6As;jU|jYAcHN8i=GTw^yfu#kpLNxpFFlyZ(%s$!Yb{Bt^a(p2#9a_93K? z;1VK>lvfB^+@Aj09!uc#Q8RF9dE@Dn;F!cU6Kx<*6w zCO@yc&+Wa6CWI?$PZ!@*<8Q;4Wf2%?B&2`ID9{p7UO&d)B5=%O2}zsO(nCuj?^lFb zDX#7A%&lrOO=?>HTt%jy?)t#l5?-oqTtvdU_&z7=78(mg2#WHG3)jO2D8jK@Lwt+} zjL8nykOPoWh%-17giY8nU@756$5-HkfHUExP{8VaqZ;JLn2~kkvM?I_9Gyi~Dr?aM z0ssS3%-Ca8I|}*CV^O>|1e_KP1d*53F=IgI9MN&=MRO^XW{52Bal}$ds$(a8 zU58GHcZgR%6E9}uagF30c5DKrur(-c?!i$dG8&h7t6%CXwL4{3vXp>~PB<5uztpMn z{*Q)lerX*nSkRcYuHDK>Rr0A;ufm%is6yIG+i^j?5<%VK$4l;Ilwa*)`lJ=MJyoKh z9=`XKwz~BwJTy|KJp_%Ko(${t$-DnBy#KRWW79+^zWSz&^(6(oz6c_}X8lx|GtvrOZa;h=|!gaZSfuud1AQ2;te3pGFnxMQz+Lq)4yg%Y(r z2q$2d{BcMkd>VevOM@uw)(`>`Hf7AXNuWY`fDtX@TUNt5hg0_qDV_E2D{K$Rbjvfm zL(1E_rQKxLlHsYYB;1_tXrmGK>*=ywqI(6+&oOiQC%I&w78zi?TcvyIXX=5FvNz#^ zGb)#8ay^;bo9QqHOQ4t%i_fBGkpgn-Lu*m64p_~IisCcOA8tg??;=#PhU=xO3vf-k zJ507k5NU*Gts0jJ%i`dbIg2dv*eoPlxKEqAQd_xu_xZ465v`;uPVn;fh?$-VY#rUT z$&v#!LfXQz5kXwVWW!VSE=fY3(7)p!wW^iAb@`MA-%`fwX|r6COA+H2+ceAZIFV%rSSq36^RuTz;~2!7;uzSNtou{wI30DyNf-xtE4 zEMk#>;31joSWN5kLajw@QwHlrm&Xi%t4%8GcuN$cK8PQQxm5D^c=*~|Sd&wr(LrGr zBk)6tHir!t&T>`e_Z!%@juILq1xQg*-H^sVz}M~$Q=HVDMP+dafYw>SaD5y$>O?R( zq^~yxdqUm%8I__d(8KrN9e(f~?+ZZ=dRe14YL$;cSZ^0a#;}o(q(%AkHnw8J0s9qi zM@JmzG`ZwFg&*)aO)%HHSVUt#^n+P&6|r{;vybTX3{shFNG*)eIE4Z>Gr-2-DW3Vog8TClT!D$emb!53B!KAluE&70A0fCXNN|B=3^aQ{o z{NiX0v14*lMU3}N1dmSJ!esxdmt$d&7Oqo%%=%&`ARNvDh zGUWEZ<%g{xH^HFPI+!d=txEie9iQ!YRBJY+6udsq1VR~@76n4tB4F!WbVbrI=8kZ# z_3=Z&aAZ@}-UQ77UHz(9o0cYF;**JA8pVW7fMdVJQZxq19@C&s{5E)`&6e$5q?O!h zPDxjVLXt5^%Y1~Hijd4W+U1m~Kq#f!p@QJq7G@7)IAFVkdangW_7!XCiH0Ha?=Hof z3Q2@Y!!jpb-ZKT@u!YHH!Kpfqplb)u7)>a#;cvK3d(@v^b}} z3;ax|&J~J{mkKMew$6bF#ns!EV+)gZQt<;Hs?AKUHRh?LJPR!5gKYf{N;QyJtM6bd z^2qw>26Z-%(b1F}D7W>ko7UR@xs4XMsh}`TnN+*j?PJCHzxn0xQ(p|U7NMb9+a@(& z_3vkRmv!3(4(Mp4H8$QRqZkdqbv+3mo&o{c59asQqXqQY;Q%K|kxj~65 z0oerxx1Ex_D6b(x|3UQHJG~Etn$;#ODenI+M9;Ko1Jt9hEMWIAF6fJQ(ndfO4e*FQ zt>f4MiI7eeTJ*A8(8?HmQlG@Afi1k9SIDjd=_nKAg;39cTk9)=HP7Nwen_`r6Q5&u z-25I=H#=#JP!uJb$j!Zt5(Z|az%4ywOcqdT8_OrIs({@YV@ip zsM%NXdaaD`_YlpmT4e=S*yl!YY=vF}*5OD_;7Cn4*F(trr;ShiXAFEJzeAR=5RUM5 ziFoizswv7zI9#!4pF)-cJL|BgW5`m2f(n2r>9{uhcXTPe!marYIIuRox9EZm@yIV; z$IK>LZNgt-fTdx+3o~BWvB4q6e3Cb1LQW5GKaS3$5&>y)J*l%anDUFaL!dOc;uKcbr4NLR?t$Ky43Bqyy z#7p1fuBU;zrU`v@v;t=n1g_g~bcTCbU&V@G43xu(f?9+Tu&e1xJlV$$(r&MKy06DRdOt^gv;)CLG80ks6c*@m$!V{zp$owF7Az8Y)gz7dj- z9{M8TxTslO5rWMrr|ch9bhH$#L+yO4=>gD@(ekWyD|Y61tpJ<-lx}3EBU4O_EhZ3Z z1+ZFupWc1=9NxF$-0bQkc|o|xoRhpp8lPp*PCjU zXtPC_7a}Ppj@4`4x#p>?*-Q=Pz7TDwK)ZkL03Hk3<~iQ1q=U1*&*z)-WD7jqlqq2zGMRI zDmUOObvafd5N9h|iY~Kk?o3DaI`%dSr#?uB5$u)P3!cpNviv}`#YIDUsuaL0Q}zvv zte3(qQ>_qfX{?&DZd>Erq8_f_N3|HCAP+4+=!}8xpB*3-{Y!xu!G&Qu#Y3MSon5L1 z&TAn9PA9aOH&=jI<>+ulYRjwW;d#JDKnila7BqoGNQ)Wc&UhL zH7yT)27su^T2l!*3p2(Qcs}ga3*VCIp|Mw2mTCkkpn;EKmv(l-Lm?RIG!-1Hf1|xJ zA})AGTxbN{D1f|TO)aL5CPo&h{}#c5OyLn@opr}U0TWTsYE)D67frx_Z>IHl}$Mh=PQjBL_$Tq$1`|buIA5g7U9b7*&xg)v?daY*v zla&J$uWK=wsW^1uB1hb7|ANQ(y<9J>T!+Oa^tmpbfr!ZP=OTop0Owt&qY~|UFyV9H z1OA{4iBJ6FV}w)p4O?>)d%TKMNduKp{lrz5t=_3#K?`DpmPG@R!qyU@}t7j)mLWN#DZw6f3;CNW@`sjq% zbCCdKIu9-q>~YP(g%|zh7NTCSyPNgf!}scMgek{lY^|6mMt6bk-hDZ&ZLL_mELy@y z5P+!ZU=K1=vta`31FKTY0Uo7!pC)0ODcLu@s`hUuEf?igp?`%c@OSW;5%l#)VI=VK zYWu*-F>>ms1}1(`r-3%8Z;kNLzo;J@d!KbOsntqTV%{&9PlYuV^YYSK_pGyi+HbNf zog50YZlnRcax!X&WBo9f{}C}zQxDz2bj8rs;!gL7;z?odRG(?+_wX$*GPIirRBoj z^Jn!f_yQ^VsZVkuCnT!`7|(+@Ump7$fKb8m+AqvT$z20ng9s89~+$BgjL{gRunhFC9_ zaPZ#slIJckmcoTu+3rJ_!9&|6a};q^$NjWpO)V@w_SZ-edsSw2-goM zr_%t$9P+58en{(TGF86Yt~!;4{8$^h(t2i_kS0u59lZw~hLB98UT^>?@HK4WNCBog z>z7bdlU&287_Ak*^%;`77NqoRw;vT1F^jlbKNQW&;rXs5;tDLSxKzEowQx|KuSvvd zo>Nwn$bZ<=!-hx5MV^I7p`NC z$oi8oKH+=PwpcdBLg?!c&7Ldjl-m&lQ(~}TSD!1c4SRXN0%U-W&fX+A^T#0R_=CI0 z64|BoO^W~JkYxtYP_r1>@up}+A;qQ_ip7u&!mEwn5#^8mG54!v!;z35V zF8yX3XfoD}d+o2VpfN$}C}x1Fr$j93m{2=GA_eoXNAf7H)u>WLFJ`}c_q&N*+m~!} z9;x*+_D6yG*AFmx8V6X{WM*(b?Z9YimIP&3!%wunTDuBw5Lw~0qsQt0h@D;Zn@Cel zBZi})$5ePpsMkW8H1ZkT$7eHxe>f+skEE}s0=tw=QZxgW@rl2$=2rCM^*K;+cOeuv z{&9DK(Tlwz?*)J8Q5~v?iCl+4_&M+!M-7yq)!=5ICzl!2Vt0^6NKi2&3K^iHN;-<8 z1$`voESX?~Nle$z6M9{}^2g`qRh_@T@O*jLpQ4&2sVj$e5vhd%CyHIlT+S zGW;FQI9k-J(;0aJ_xGp{{dGbMF^i4{1YB{;;GNx}mtD$ye`!-;1wO|Zy;=}ktuJgf z_tD&>{M8VohZ6XkKH0@B4iY*)!pIZyKOI&@r@^4ZGeNEE;b0iJnv1wv3+fr#Rd+pA zGap)K!zLk2Lw%GgtKv6##kGgKcRzuZ{81DKin8m!t>Jlz17afT-`FfUe|v)1F1Du6 zxFl2kh}@UCRa-!7!)aFfZ5EpECj5<12x`4KLLrFDYT3{5wu*%?(a*Y^X4b}%QHPC# zFDh|w+V39S%F0Q&i@NA8C1bg{GF1b-4)zng7HEW$wwFWLMAO6ZTH5)bZ2{D)9V=!) zpZX3ylE;fVn1NMd&RRuh@0Xxl*mG?Y*hr3c$7_a4#Dsh%XKn7OOr`n`?P0QZ9D9Cx z5hb)r9?j3|!!X9Y-w%XT4*@8=h>Pc~s8lKgei9H&8RHF+%W0NWlIyK{nX53`CY17+D`dieS(`{MQpl#m5N3F} znrx7uwKGMNbQ?PlLYfw3Fo}_ALWS3hd9P83Tik-kPCgY3h(-G3CkJZRp*rv0y>N4H z*jV@D0p|!;|4NZm*2h%zqV&0qK%fHjT(=tLm4*Kt~6MUKwtQ(kkZB(_7Vy zQGa7yNNtP?cU#*QfAa|$e&Fx5E|r$-OllXq5Y`Vs>b1e`%2N9Q3k`y}h*lHwL9cpj zk3`uJbNXaa)WH)(IqH967!ybDWl(cC^LExSq=+)u6RX-2lLSu|j;&{-X^ums$pwyH zHAFsI$XTsA30d6k>$QIFz_yEZ8rKZ0d7f=VZ zdRHCXArxe{-f^Im5Q6ty^NVPJ&%&1-O_fXa`zrnVa@h7+(XQ=Usq`R=>hZHMmbfjE zkL;5G-LVKMCN=HO#nKLqwWe2DZ%=3gd!d%_Y}hQWh2sJwN8X3Z$JB}2li}IwTJkkh zipD`p{RUxie4I9zLfCcR?=66oVHN6!PVT9QiRd;tx@J zdU0TRkHl(jTNv=VhV#x{YPo?>J2O!kEW%h3wNW=SQQ3!^8Fnh#7M|mKMX1*7D;7&1 zzk?pe;ErC_{*iAnj7h{nmnJ!XLTcwQ0vnt92wU|E#a-yz8{fi~)OqOjKI2G;`uSFE zPrnlG=(;Zn_w%#5waO;;>_JNd-Tkef1!!Cq(|%SVCC}>n@Bi1~oBvoGhi+lG&%8$; zzM|-;3m+c-i{b9ymn}0UV^zZEd$?17WL5a6V($9makdpeis6|RRf(oVFa>DhnT!)( z(nAbl1y8Zhx^JSqw%k-R&xP<`-Z@q|@rBMRC^tgSKkJ!K;6YKUgHmX5<-BVXZeJ?| zD=ragVq@ZE;RzD=1%FSH-tT=(3W)0LBCo9S)%xKE0UR+-^_`(6&4+W`l8_is^*G8& z$H;{*#Xb$#POtIa7u|nQwyz^8xYDdE^DTSA48icV3fMmf8wv{2@dY5rNs7r4t0h2a zHIYwteG0U=9;RM#UExnb5(V?0%<+L)XFLj-K?u%VGA5Fa-uNRcxIYZ7KZh0df`5+OY#Nc|4z63&snVnLh3|DfUzJVb|g01?F^i&J|Jo$pOWKRH!~q zZK!M8=1&9et_ubJVdezHnEk-^Bih|F*F@5u(|@{d=TC=!Q73u`Ny8ZXXesNut!y+~ zs5c9h0}lrYx5=Q)&Tj(ki2Q>g`xcw|d(xTrTUoeWq1dkuR2SFKaoAL#XE ztjQLlk>+~!S6x&5XCLu1uBpj4y5B9w(fS&rt7!f^mG3CzIM_YQj z-m7uJD6FE4Ao>dgj^)JxMJH4FUQ4%YesDm!e;4f&zEY2uaUBy0zU03SF&F7}* zrpR;BVoj}8LwD^eYGlr75#VUBj)te_N3kol-;miG2Ret**f%5`bZzdt-yOc|$ExX3 zwsl{_SbvLmRG*#GOT!2c=H!0*CC22TbE5A zisb^yNBrL3Ve|p&q-UihyXh=j%L7Q-!JDdd+wjiR^kp8dj!H6G+_r}zRyf1;>2<`I zCdzR65rP>d-fgdewzCuWH9;q0KN&uob=!ybf8K|(;zx%al=NG}_iX=Na`u#F5;!eJ zc{!|?3?cD{MHy%Ne4xKWPP19u65l5{rcnWAQUG=OrI!Zx$MEyT5$B8z9DeYFA9ylm>$w-1a#cw-D3@zumPU~8 z2z=R*{uAm79K`kQWrjV7 z=Jjl|+EL32D@qep9B6K4HFg0=ZgjN|Yi{e(ka&LRgXHT1J$N{LDfbCQ&Wk5Q7$!sab-j%O#}7@vUaKa z>&A;=>=90kyd)i#e8-X_CFN7fxcrF?rNb|EQbikJsGs<` zJXLNdJp*$d$@G4FN#{BOF%OGEk1<2|nt5rPxHDF*Uy6$2jAS*{cjR;P8(ReTK-{iv zu&$9V({M)tn!pVq-?p(@BSH%r1pzZE2Al3QM7W=32p@Vyga{!L$mE_*0Xc>x=JhH4 z(f`Xp_*y6q})Tz*=#;++($U?@Hc-mbuIKdlLc}J)TSQtVmr)hq~4U@T+sIK1_zWW1n z4KBRKL&(&L{DT_jkOq$2nmcB zM5kF|?DBYousKe^BRxEBhQbOx!YnUeW>!FVK)&!s6lo~^ee@Fo*UO$BNdDz$6Ocv? zA5wP?1{}Jdta~5RlkeWq_}n%l*Z_nYuk)r?u3174tFH}@eKJT1xskDjRoDQ`-rJaa zCJ%R+@-UNzJe9Vh5E;X~cvxyY@)r%JwjUlL!wK>1?Vlh)dlx1m-EhK5FBpGS#Q_tPUqcb!_AE>?g3SC;8#;+=+iIbX|2um#mE7Nz5?xKfOx&VFZce-V5BC;&4 z#}XkJZh1S*w0g#Ul0)jNexYdc~!^i4$47{WhSJL(UMWL6a8! z&IrX@m^N8G4QB~+zX29fe`COXW`cf>Xsug~n5B-e$GQn`WOrMxVT`ioiaxXgC0W=; zm0~#=<3s>hqgm?dauVlnLeh8J@GAPx9pVdU)(A(Exv910w$`lh?I};Xa zhk0=11(KSRrNUF``%wZypqd_kCq;eGZBCi@_-3v?MOPeHQA_r4S4?;$ywtk4dWo>< z?-5cg94>nOVr_J$R&R*x)qu5knIlhcaO|e)qVM)N3JPC$@hyxG)>H7$noP-v!(Gua zN67=fvjx$7C4kDb`9xuaYpD0MgT~buxW_dxzwHTOu1i#T>(AwN7Ku^r33_0*JH@_X z4$w!8_JhUE{I%QRGF}k*KEh*FfgJ0iaF+I@310!?z#$-jXxoVZ2l>|Gk5vgK67O|j z^r_+@WZz5yAPJ-r0)K;%CX)3uw%(D?$J6O+ZB;X`y4&7RhW&~{tuWqq#t>HcOic9R zHBoZIWfC{-gNp4U>h2xQ%s#NhZlB?ngq1|SZT5rHmE=ArIPQS-Bqn0#OU+dHkx8Id zK!~r*6K-$_DbK$;k}|%EkOD39m37cUygI3&d)#v!fh-0x$)Nyg;0L4EkqGvS{IMA+ zp{&LD1E+XZTuBSoYtG;T_@>_>oP9!xYj8|9G(`3@^9zkvU%SW1^q)~8H0XARgKfZ1 zj-;~La6PMFNptmz=V^+wKQ}Q{Ktx`BS;9X-J{{Wp_}9bTU-}@%ddekx=Zw}s+_H$c zfic=HF+P#o-u)%Aj$bPt=Lc@hUa84|nB>@fZ?FHqIU4`@SS13(}ia5 zyx$Z>k>OwR5kIT}4FxA%DnG&zpglmh(Z*`ko}5Egfn)9o2mlOB?GBj4W7O7u>Hy-Q z1l*hemVav|r`H^WpiC{-GaBraq3Uk=~(^+EQN2Sdppgq$YYr?&GxBog0=16DZ{ z7s0JbKSH$?WuD*9$Xk;9PuT_O2`hc6Kg6s+nV|u>e$(!tsB}Xi@kKDF!Yfz3mS}HS zt&1x7W&=8v2oq2=6v7oQ(=uJ8qLFfmPCBN_tmXVVOFyQYOR*7pqYG0^@K5||#oV}* z#bZQBX2$Cw`w;GL!oE7;I!Bc)=~Ky(1g*5F-;xLb_+_UERjWK0=2ZA)+;Dc-rRMuz z4tIaUahNqv zsv3o(#aL$H&t>wO@6~e;=_l#rW%D4==>E_fiZ)oBB&kK1Sb6p~bMGX}_};X(yE6R5 z+oiUTs=%di`8dGqpl^ALvD%XjuapewMYLg{=nW?sZ@>7E@a^}CT!hGBb1$y7m4qcoE9H1}<~zMOIE65mr#OFz zaX*EyG6dfu&Tm7RYHH!6Rxs6xbJ!`%CIt$*01OTq8(#)pskql2v;T ztYLO^T*SZ)T_2uCqXV@Z!tJtQ+IsCT$tBj1?ROs24`3Fa&}-Eh-wSzf1w*1wRkH?q6}xc3T-(+}p|-Js8G9XxvU{$^#42v3zs#fhbc>xtbaxLSLaw(fl;hu=0Q?aI~e@dbpdyVi(xZ_aY|0j{OOJ7V6nVUa&262SP~gkb&x zE7{I}8~dWll6t+wl&VB++bWtu%nwLEoo#*pr-wN2vaRb_WWi?9^nwdSlE~C2ZvJ*( zLA9aZJ;jp;w3PsWX%BzwkH&>Fv zGF~CSmLldk8K7znaufzBskq%>^05Lk-kYI?kj;wvdK50Em+{beJYW z$<=FsY4=KSD1gK*8vB|Q4ffP_=pytfVJqdTLk{c{J4}XbVXsfje_WMTgy}3ZaJYGB z{0ZjIJPO*C7_Hk%Z2nQLg;a&IfYzSuUvnM(i*1zW{)rrZxI%fO2$Pu{aB-XYIx&#-b_+P>^7J5nBE3}^v9P3Dh z-!@)l%XS#Qa;|-a_-s9bwF8<}Ib!o4jn2DubocJ}hu;tCsDWk%h=sFFBS*gU%~cd^ z6YCP3+)X$%FXZPY!kT=d&^_GNt7$7f$Br;xp{4jeoSGL!_4MZMmnwno6**hfMHc0d z!x?72qQ%dCAW^tiYS`MwU6`B-;t+jKAPP9qk1PaQ;WqUv(?ePE{vr`46eS3SA{(wR z4?<9sZTzo>@0$Q4mOtZx3`~<~i-h*s4H$cP>~(~5kUwM0Si&PYbfM|b1SEW*Po525 znojGiZa=yZ)!@ST0>I69q{I&_z69{%m`N7+wKd7ds3B}YPkL|o!s`f$jUF!-VBs^K zG&B{~T=rZ*h0l7GAOJyf2t`+LNg&VY`cpf_qB@x!cu`NdtrFrEmjYMBs>ZD}=O7dtbg}1M^-09_+` zjA$1PMvl-MI!AgpNgK+9tPNI9mj5$Hso^TZ;%S1BkV)5{f?8<0eCdUWitKXP+!1&7 z(SXTru}}_G3~&TCBhzQXd@d>6MxINb9}<}p%*+IaJkdLE@ePtn?JwSh@$>>*X}$6i z3SRV|HhKSfHv;56vDAZZKEcOVVd=L~nk*1iRgY7vI{%~=nEq)Kz>Ft|3)uTz@8nc~ zcwSZn6z~+VY5fRs)n&m#5yGg1BBv-I&D&g;^ZF9H7D6BUC5VS69qaZzqYq^$JXv8V z$b$&I&o5`3PiRS64iR?fQ3+1SZ`*-9U3Mq{KYdel5!(LMBCbYE32X_T66GZ1>VadZ zw`arE0El_07B&PKWqQ6{vK5|Fyl#0$C>mQSOhIg^V4sz>y4W z$Z!!%_VcbU{EpO0%pjr=P@&tgJ4o$lGW@u9w59uPGzT#p^*4M-vlqYY%HsZMD z0qan8cdnz~#4fB+TWxpeA_DhVidW=c@W$euAi|ImD2pyQ04|6FeStTy{%=V^9roz{ zN&}Y$bcJvce&RB81qr68D(ob-VS z#CcpqP(sLGb)}Lx_@Doq@xS+fboVFx-yi?i{{P{>_KWqs+5m_gu1aDhUnm4wf4+OS zC9f$oEheUFuhL`R!*H+w!p5Xg7}5IThIZmTy`=sFaGZl3oO8A8>tDffK|=X6+#V&} zj&v)`NKX}$uD7(tEuG8Syx44_g>h3~#K+dte5Ht%Ye{v5==qXRG=*)>$cOs#YJ34I zq~3g~zD6Ml&qmc#uOrO0Gi-K+xX$A=6QsTkg)u$+)0Y!RW&|9nlTcDvY046nlvQ;h ziWm3E@W71vRN{n-O{}3FO!yFk%r}2$EMn_Ink&K0h_oq!;GBSycR{V>WuJYzK5UyF zCs@e9qy2E=F&y8#N2ca@)%75mH^Vo-sBN{Y7#)aP+JC=R&Bo}^;x|p$20>%LiI^M4 zQHGjur!_FBUiZ5#HMeY`D3WP`gsV=kyngxu0scqJ0p`0jButL;M62q2GEpVYz7^r3(z8wn1hwWH?D9 zSsA#T77oe_DGd?_vLTNRC#-vM0EB>BTwO&MPC~ls7CUV%)$LfvZ;xTMg3*4c6{B%xodGgMuVHK-VlE=*q<*3VvzyEuWgomF3_Iegd_uf~R;*h4Sg@TW5}3UAfnP7`gx1 z|AznUU-_f0z0ZMs2sE)Yc)Xg|6B^S^FUsB`c;u}qT~3^( zh(qF+cvw2qM1v)p6;<4q*%#Ez78?ae8at2-9U| zF&b(YcI7lmFiQ8|oL|->A^pKY0|68CtVK5s2$IfQ+QqZTyTvM+LGMZmK}!P*j{*`3 zCo)ec@te4?J9J^dY%@tuTY+qLf!!c7J#%TMa^hDxE`oPpPx=WP8mY2abn6pcMJzO$ zuuA1-N0>`wJ}nh7Jp0Z1cz9HoR)~Vw5gv?O#Be!Pdw^E(=P!q@<$OF_5r#@8fPje|iE#V}P<0lP+J}KA#mPvbNadsaJEV%{WT< zn2q&dD?w3@7#LgR03b1MY!da}*tKNAxTa4fODaO_@aQln#};6t@8_p5x)L0zcMrix z?t2w0c!#@YTFNui{{@WC&Eey90UrRLzb+V0(Kstvuf+y65*Pr+*t)4qpijX)z++SBRVW zK11l2xMx%6mwF7MPv1^`DvwF2bLt6{V9F6C$X`YCy$NEHi|l)j0sqBi^y&0)d{&P;RO7AMdE%@ zKk`{D>Mr$&N1a9VXMPpRa54pdebo~i1Y;8XjD6}^&xqqt^($mBUU#$+#Hr)i7`C>& zHI*x;`-KPE^9_4g`DUN7-NhKl^KaFENB*vC9-rXs0v3@H^MNR51#hu!e>sr4VNWnK ztxuH!g{<0?BAl$b8#nxrdg%6$vh|4RE(@N>ud=QnEehVukrRD`j z=gIydH89`?m&A~$op_tPhHR)pX1+jxn04ME+>y=+2iz6DmTB4jnSV9h{hQ+3e>MDm zZ6AaJ>&;jiCkktH`tva~c*Kluqy5;(wcc>H{>c3}Cf*$vQCsT=)5 z!uw#UR4Fue@T=Dd*4HsIqpEw(px%U(tn*_I`Vjp(X#g_1QI2Y2fKU)0YWUl8X(0It zVqKo7KedhI1fvu?`s3l7|5UuOMH`u`<;D;n4?Sj!R-qA3zu%`qz2VB%lDAR2H}xb! zrU)`~ih5ILQiV?H&{>&RukBRuZm+Giv(7$|>N*Q^TB;oq;MexzYERnalx)-15EYo~ zL8>QoA3&v;_2!p?tttcP)ik%lD%w0Z4GgCxx-SGY)Y zG%t!AVF)PH?$-j%;yS(xtAh^WuiKm`zkn;?jvE3r&3Y1zpEica79cN{N*j^Gfn;HZ zs4BjqEX%kk2bGm^BZh9`9uTEsAd%^GPUqvU&QLN4+KecW zbAzdC@4#e{X!&(#lTfN7}rtIxB1{NJNv%N8uCj?kwnQ%E;|Sm^wt*b<6rcG0>6 zTC^O_#0;$t*1;R(IZal{vIzI<=YjUdYVhXSIA{6Bn}3Rp8E6>HHQ_))%8#J6{eEYj z0-3ALpciphY%;>O%$dVH!r1UDo<&YE!uJhiMYr9(ZNBi3CSq8tX~IU zl;r6UcQLK7)lo0f|2?2f3-*)wl2pjS*^Szy75mUCdFZaJTG-01@}B`ae({3HSutbegC@S{}_NF*wFuDZQI${4=m zmirsT9Z8~h^GL$3qiEm%@8A6C@cv)l{nATEjBAeXQlN)uoF;A862;lRMl{X>t84o0 z3jztD{Tn8gG|#6_3QILdI1o}((4OfS_bJH+3ZT~LET zV-4rBNPm20nQ*zd6?}!YVw3s3K!6s#>_{!C&-@ka=qm4V5RjwCa{X12gUA(XFF`&S zvV?U~=&kJIk@w0ay6~7sCG)w!m%vxOCP}owRgnYW9j)J75d(hR_kp)MnyUobpljg^ zj55)G(B^x6BC!mi5 zEBb0A6~s!161ydYAQUGU?}yrQ{S?_SWw5YYu*1JuCxXv4=<)QM;j4P2%G;i;*XgZe z2AT!#>(F4Lp1oCBZno^hHhz_LUl3S+wmJ#Kb7o{CFrtxdKCRY`M2a3%W>dMxtc@F?om$0TC2 zZR#0BJ*Xg8zlGWc&@{=YxI+ydMg_C`-@vTIRia$uj9Qh;urbuuk{+D3vu6nASAL~9 z{c=fRBE-`8Js5MAK2VyH5oZG{ROEU8t8XZhqY^F);U=vy5c09!i zYlx^UF}e53c!k-DC>8d53k20li9)GwVEu?K!(|GxVl;nC1ML&IJfMq~nczMbW2g-B z2yV;w!p@^e@@d1so!>~QrP+Nf3)=F(!9zYXH&O15@rOZt=vH@EN2ge}l433T=?~vG z3tGC<;^zpq(n0H{gIFl`ii9Ew4+fHSX#q>V?M?qAjmpDhJD1l9JAl!n8 zOiW-uTUsO3nbr*A*&KS#U7VT^*zwpy-Ktt~0UGxRnHra43)`Ukw1TjJ zvk(_Qm;!&Who-aKf!K&-Sbm8 zO`q1*>E*$Yb{{m|9t5x<0FkBdH0URgBwMa1cyEr@yFYw1Jg^Lte>v@kJ{tCaS>F`? zm&~0d{+g;4mcZXAbL>4o4vo6{>aOawzf?d?e9MPONBs&eXZ&a~vA7+?`I$ z|58G&9;fzN0u|J^g`e8ouDz-AS&xvY9emE&SW%})i$W?GVj?FjhU<>a;1@wA?6KM; zXZfK|XB#r9Q&D|-19qQl@KF)3 zH$2k7NQCX%S}6#P*QOGzh1|{%HO?4zfmcgUFQ(tsw@(Q_)=_e^(ygNr*;)_LLxUk& z4%C!4k{{+|1gI?6ImD=~10SU2bG_g@OtAPZi+6&!xR&s}k5J|^+St#$P(LFXn+g3R zh>8z;J?p!`f{1|1IH0Rr#$R%FUBb4xs9{M!FFKYJlXB+yex7+O3g)L%o)@K*BNs^C zM{SMf0MoA36~tXF5{Mbn00J1sg52y%u@b%B1NWX&!5@w4HP)N${XKEBU z-IsMjIHV}5PNYYRhG+9ypK3A&`hZwLtyPJbXbej+eeos*5!7M?<8LU;)4%w|K+v@~ z=j))Vu%PGtGziP|j%b<%rI>8JK?M(hqP@QAs2uuS(G~I`l@*7ywQ?qPy^sD?$K1R& ze5#%f{A9V<3A+xt=c9ZvV>=gJJJB~|9$Zb75J8NkXuyVA8ArGti||m6a1nuG)IepT zKa{qp)nLz1qS&YKU~gv%*Fd%IRO~_4*RyUn z6FI<>vkX*iG*)_8-N#KVb_RVuWS<+>{LyFrb`5Xv%L#kWes<$eU;&Yzw7vydbw>2* zwRd_6`8xLP3+8BL%H)`gEIEfdu(2W!XVYIt(Ya~oqV>D!ZaWN(oox~Wq50TLySYGC zfvl^)15P?P?Wjs0=)p8WVmH0X8 z;WPW%i(bRiZ)2X!0U*GUrhO%(b&2m6QLt8ydI=ugd^I80Uj`QD-YEZngn1zsF&eH8-k`j(cxvOJR_VnRCQWCJL(!9qLDXr^z&5unktmyrY2?M z24E3gqo929n!p?6SRbB0^AS(jBOR9e{l|^Gy^P@a<^}dJHk89p8+v~|U6l%ytoV7@ zA)K`J<3nMke9nsY^G0Df3WokK{tN#N|AoKzIbNC++9jzh=|DNsC@%1;gjz`GF~PRB z0qhv>Fph@^k*#B7lAacdRzY^&Cd9tEN0tnwDs?U=$SsGZS)ArWhVEYAzD00zM}{;T zC{2AjL{PtlfmEizIas4nWJ(t-K?Z#xRu1K-5zRNvr-w4|+THSZcM(BjlAt!QK)ki6 zeQkgo5CX6MjO#r+LdSPb0!0A!(kL3R$N zlyt?<7U?=w! z2ksjA6mJ3h19pk(a+!iJyxtwIY)mQM24#_LAVG9#qHUAG!IA7T8}Btm+~pjHM3?Xg zpR~qo&1Uv{2`mm#1Ps5!cVqX1;FP`lFlW(11H*sl&E0=1;e^WBowd@d5OhzG)&!91 zDZnt-_8cTDuvm*Y`ZByC5<>Z;T#n*%;c3ppjp1c#rYQyG#}R71kMiNz`k)qXX2*nZ zBG>xl43Gb0-fO!E|6nGYUnhwArGUW-Pkxhukrj;4o4EWJ*heEK+jh-}aSV%s5hrm6 z2%65LJmuN&c+J=H4SgLDUsU%N>6B1@=?~DQ9%|EaFLW^@(i>bU`Z-czPb8VUBuaJD z_xYE@)_Ofyiay$-jCIopO)GRru@qg|!){&5wtd;)eZw|ltn?Bbk2xlel{6ritmH!~d{X4qfu{ zocnv5+&`jhoJ-u3*2dU!8}^#p>H2kMIeXiTxWMZ(toDmgce1AAUfH*$Nt^WE58&x)24ybZa0|{KbhXuoIL9Y+Nh%wG-`B#S;_`}tY zJ0t@@ou*BGWfRvp0auSzHVTaabu+* zP6X;n+~l}{qeJ8FF574HDOfXjUf;*Y+fS=p^NKIJzR{=|mBPe1++5+s$#^NsV{CX> zRJFnh?I{2vhGN}RATyy>TKC+7g7_^v0-#+iNJ^V{_nm3`d)1D|t&B^aG)8)bVC4 z@hSNffjfd9IdR1fF?nlklzRXV>8Wdm-E%7SM-^F+hFq2-%vMCG1}zbr@sA1&?2W(?DbypGYLmmkZyg)4#P`DOwTZ8fH1z7$5~#Br3ds zjW9I8FM4xCFDZrXI{$qEsdAIv zWFPGXCTmW#Y}BgP?*ltFzfO=9gebip?6TRV&?OeEJHzvrbvcx`fEr;=jL5Yhs)C6D z3RuB|^SpvqU`t-e&!ZO{X1N)0c7a*Y5?Y=m*`f_$TxwjA&e2cdB(b21r9i4IqChfi zx~roTFqR7F*6GSbr16(oP5?fAKLhOB;;+S`hXq7D~E z!3e6KbT~I%>@dihT|&CNTtv=iinn4TZEQ~oQ&oO!Z2Q7rG zFi8APdR|Q)dKjV^`QQ9yvHIH3h0@qMO+u+olQfA;-h7-+Q@~^szM+3kLjP?IE6@BD z1u%rQbv#vHkhdOO(mD|}i@$crUIZUXH20kot3DuAq0 z0hCSPYhu@!T-1C3`4^nEL=3LahWTI{Q~b1F&CiJDe^$>{CMSD~=;{%#hk3id-AdU% zdRRAGYn{~Bxi7dQ_0}z}wckq4f=HIdN^@NC_f_*=obWaO4f>zlj2?Q8ooQl%^f_tH zUA zJzVtOM2)C1f(g0H6}rUY&Y}a2^FrFkWkuk*sBk1x}7B?EEJ}+PRTCwAL@nK5@St#zSKTfwX1@NGnr@jr2LCsMe zwpf?vL_uafjj%g6{y-Hv)4gwH%iT{+orjzo#Jj(Zy-bR}_|KaauPEgG~l<^Tu6I zKf9pjB0w%;?2HjlribKX1D~+6JM4MeU=s*MccYM#zcLye&wx=|nZA4m^A#0H=gtD< z4zx)E*^g!=XystR3EeWI@}CjNOyH(aC9*OtR6mCA>Z-Oze>XjejOLqc_=XW^oRR@p zij+%NwSgpF=|3O7U&4!mu?N09Yurvp#= zPq}Kr#%s^1AgQ-MrsIU4vq?pj?->15*YwW590~^Rh20gk+QXdH%Maw!-0~hG#CwQ}$HX%=ViWc=y2*pX3>+E)RtT*mMuL;B9KJ~^M_jYJstbnhI zXt-7h&UC0i&A|S`RHYV+@dLGPHu1XnN={7}KtGwC3%nB9brt9Tm9436)Dj{A(^%{^ zhE{*&?T}=pH7C7u>!n+C=B$4My9@qpA;wk&TVkS>5h7-}ef!QY2*=`|5c$L*Om9pg zfoK`D=JRZGJ{ex_3r&ol!tCI;RRnG(qy>NkMH+sf;D(3bU_(md)PpGTdqyYVqVIJR zB&+X)A}^Y0%6ohw7Y$QW3&YTBGO-J>78rb3e+D{EI+iqVZ5OrJiosR7=4Ucz41kA! zp3r~OknZ1QjsXmq^@G^14GOK$NuA{9yop9k-rNSR>3M*BP)Z62LG7Txu6zq_vcGmo zCPyFg9te69jO;{WtmOywaWdJUgIQZV60tTc2)fs2FmPC!gqVM$4WOukfw5wNLNCYZ zpe}Wmj?w(+N5hYPRHVehI&VsuvB1T?S5$)CTiHQPPTeKwIugwF6p|}zTf>)rEQ#<- zT%aY&2@0qBupa54&)d+s0^_;iUh60{5P}e4T~%h3c$R@Yrq|{%__2l)dc`Y5mu3+> z0;|y4Y~d}Pg%6Dn*m~zJchzR_#H_Cj1*WZp#Vn7(J!h00VoI{l`U^gd;`A?Qg7b3( ziH}ZgTZl?^yOIE$YlKkn0r(|Xd4u!UrN@Ke6YC$E>4XL+w?c5Ge-z+jkK767ayT4z zO@cGgOWYAikNn7|uvxtfKuR~DgsS_oKSqP6z!u@IKMX`aG`_#n#Z&;N<`3*F&*UgbKr1!z8BZ9jcImm@<-lZQ*EI3QG&!mxw_Z_^d^(%RA znf9G~ELgp9pBMH-3=ZH`7z67>0peD^A$2F&RuAT%p+aduGQYWuYYoDz74Vuh_L{>G zu@J<53o2P1^j`3@)Q_vfZG7v#2SdDSMV1IMrKmE72D$@<8=~Ywt^1IYqMJY_um*X> zK0ZSDH1Gn}RnJofkULd0+anekSx#;USrOAk)n9IffaTtYCt|bUWCcD_iRq`lD%&uj z(rC3(1(mNNfGB)DCSNjR$BBS=H;P&izq4aozQ2i@{ zWw^6|WHrjl_wweVbzuEAR2%1K$=gSY8eNKln?{amLT|U)gQORhwu$>xP-lZ6H4iaN z+e;0J&w!5NDz#bswxx~~8K8*INGjJm=3ODkXtT?2FX2QpO-Kd1I~&JkS(fc1^A zy!Z%NySJdG1Gj+Sf>};6^d-9xBG{m#z&JuY+ZYsh9d3(*L=1@TrQB6sXbwFdqhP^b z=Kyv2NVB4BG60S_6tP^UOGh?thClH}b;tDR-H+1B&4$EjGd{ha10j(kXh)zvy!&+| z*)*CqwwXM*pk^;VFQlf=W$(WIc$aWnBz~c7q-~@hcj_RptM-O3ymIPANq`c+=;By$ zN$M7gOC%AZWAduNJsQTjbD;mqmU^~me%PkY=0#>h-8^Y4bl>2 z!$b~*en9itu+n(vHiOF+GM)@*_FiwUZ>gV7DQ;zx{f=fShgP{Hi3|Fm94eeue|8tH zblor4L?M{^Gf+CzZ2I`1t6)QhXPYXT6AcY4G@d<2hSVdP20|(o#c_3RdTbFaOe(tr zOoc(SQGA|saEm6POq8Q9x6gNlgZ+(|Z!*zRsn4_n_ z0Ll!}raU_#)ov(Al_<#PMUKTVT^S3*LLZL0EtHIQV_xi%_zKPbQ+pMkscbZEXc}b- z*j!xaz-{IBFkivy)l0aNJ%|Ic_Jd`3t>U>z$K{f$$2@m-Y0569_^9f>;us5%lX=M0 zLrtJK_SKslfov~c!JdGliYORt_<DBZ&@O88k zfHT>v1Ve2W8{)#HIvPg?=m#rf3^3FZs8L14Ul~rRm05)58kD@|&`TkWbH-HmQpj(nq@Kl;>fJD1Hqi)%jGI1vIM{h|$f z2ZU}TGY7iQ+o9gRnM^%sOnnD4_O4nzpV-hBaRW@;eWTIZsyfqlGXjB>vk?`VRh7+45B)7*1$`cyAgLCrlD1WP<+lzEK#u z7VdPzxUWy@{I_vI{CL&3tL8xah-2WTxMk^fNJ-!era0ql+pvjx@+SR6ig-#-epx?o zsL2-?cqs5C043 zY~m?jzTkaFZ@qvg$kF32bfq2;v;OlQv{>q+hqBM&PBa}4c%uoZMH@mv{RCEq)o4h7 z2wN_WBZbee)D6d%xHU%OasJKfYz+Cup%J<#o@^$0GHg8aPg^J+ZvjyPAlF2}X0QbZ zsPXK$Rz3r~TL-zZK5XvQmlQ}}vW&+uUtZAUl84&zXx<9$QQSJBY!qd;Fs;9a&|ak< zoyFWSmAB(uh+2no_JL?%_TAg22Wrhqx7KInq8!zl$4q_%yLG2AJ)Fem*4+{g9qOJJS^jlgZ5n$+tLXji}QeV29 z%jw|8M8;{i`@!|{QzkHAEAg(`^{AO15Pd=X=b~Q!3olqcnHWL}MBz zSAC&=mE52@MlAdU<^pue0Up@6aa$HbxYFd-Pe&#Y`zan$(MurPrv z0JCjx_aX4Cc@H2zWHw}-mWGen3cPzqQ2hTL?*7)~AH(=IF_qcv#8G5mS^8^&2n=7& zyjKCJmMeGZm$SZEI}3R6M3ZuNR1D-u=pqk>+;Z6`qcaVe|NVzg&xJ`xU& zN-1vpw+33%HBuRiL?W$|3!zy$-lo@l;I0OcA*h1+o1Kl&vFhAp!9lBF#7&ug3RC4o0VVY+sJp;$u$|hh#Mto7pVs%$^JD|n^TIrHJ%59; zs+)kI#ssau%EZoqp#n);^)MVpKApYm?%pxTteeTIV5|qB&0Ac}g5c(obnaOcD(T#S zJt-hus|UNNti8MsHwARGF+6? znv{YlnxM$lX0;A`Z>dJ{5S^m@f42F>TBi*_sG=36Gwb^xR=Vx~P%VYM9TnwK267!4 zNySk}>#4g$7?&%T@Mu<^{|l;I%_YYT5SPI(dK|iXTe!6?@Cd0%@kk!j3Ri~xIH>1< z1jhSPT%iV!Gp~AjpzWanih%1Vt_u=C#o?M3nz5V5$cN023tUbZpr^wL(4SdxXY-*h zhT64q6WJZE4Hb%AH!6zr=jNMpfSI#t7&3@*_C%pzZJEYkG3`+)L``~K6a%Rf57SsoG0L@YIuQ#+F+P1jXe!LZHTB(>spW@!MNY0K z5?}z&O>8mizLyP(HS58xfjhF^?`GgN)&Li|9<1OpzwjqEng@)&uZIW-LcYKM%<)km z<{l|5H0!ujdsAPP2RT3Vh$ex?o96Zh@N+;a)M`K=OyA~x-oc1Epgl%?fE`he_`JUV z6-&Vt>M&G{09eEdjYoE!e_l1&DS}owe{Sw)zgX_Y5Z3-Iyh~U zKWmY)rXKsj$C|f!6s<^Lcg<->;A9ScPU%lLUbw|cDVbG-3F?h%G04%^kCgMmE&WG1 zI>X(+ufGbFkNx6P>V+v@B%NUo0I#n*jpJKjZ44f_&ryXe3N}^cwvpch5%TA)q%)a`M$U% zG{4lg^2x9nlOm+@F5;#&pHoQ4SS$T}rJ3sfXlRBx3mcS*!FdEK?^~xe99rCqo`$)5 z_Y-0cCKh3LIxXuJ%Ou@PFfAc`VElM(U+U+> zmy^BNB=Piet$#G^)jwy$&-|HP2;s~A25aU7uqf21#Vbi0*@S((sQI=1hQlK=(#TFO z#8OA5HH0aRQq1!dVAAcvhay{zZ)FC21qW+EK+`+k$KzF9coFXb6OU}fv3bI^1W#Z| z-;^!Z*M%LWyB^QNy+x;GP71U)+zGHZnNf1x_W(n>HxH7;7!P9Q=R`QEVz|q;<2ybQR@;hULTnC`KOe z8MBvqG-J5u{WPaUXvqo$mzrM%t)f>!l4_O z#)&9&_Y7$UqW~0u8XT%v1vDD1@J3kNz1{HL+IP)^DAF>ml`P5DT9zZa!T)Q2&s)fR zvc?W|gs1^f^?mOl-+VKld@@t^R|#g{2>=uKwAKbn^6}4TQyk)WgrG=xn(^c9oBJPj z56bO}<=A)}xQN8!5mQf{K>ypeG>wneVyy{;07f-Y2Vom36;SiiOyROh!piBD5wANr*SPsnf{ zc`m%&&8`#x!uT3ybOO_8Kc0k5$M$+V_hC6NT0gC)?LlJpFDutM%buO*->D0kg7ZoD zDBiQlI*f`HyT?6r!xEo}e+c)^v>3@9GMDt-R{=y65fcqXA zv`VO%=1Qs>Ux~XbL0@zc@7l_G_eBgB$GRfsvlD1ei{2kCPtZncZ60fP2F|2)jh~6J z3bQ+pX99&-g)3%OJ8xhysEUZbUq-|XJU@94d2w+~&}+9AG2Ugp+*yaquz;6}2uXK& zqZ9xhO+p0dn3>@UHs|lV!Xcz~VoqtD>q>na<9pPKD?JBVGpKs$^nv=Xi-KMj{#X%{ zJ|ngUm&w`uHm@KC5OQH*e%vbR6V=H42-#j`rs7v1feYo`n}g2C!bO+(ECMUp`K{4+hQ54-F=-KzyDG_6OmOn82u#<6KS zC*HEfdzC;~Xt+9aF&W7h)?GY?*m9aUEfll99*b~!Fn-EY6wItaZX;ds#(XAjxjN1asu)-EaSuWeBA* zxXz*`|E&9Gz7lA?3}9WVx$SmOmi$c#?PXzxf)_yag*SFkgc~({A@)~e5BSkksM3dH z>I!3${1!60s!*0cyv=Pvsoq#%ba7!_59o-Zp3wgI9-SX)ROCfaE$xN?cway2KKd}8 zx4vBSq?&@N{2dr&3+NZYr-@&7S1H!Ajn&+ecR>?kI>IHF{lh=+{&|dTU%F%-N3)is zb0UT8(XxfY-Xf9N3m6~;@y$JzE5hr%GyT&WoW+=5k3$?YUtfVGvloAX#97#ACpW;Q z2L*|n@L*y^RVmH28B*$((HCROy-k-83N^m96wg;5E52w<6w>{80$?_9U!|1qiNl)b z3C>Ef1y>QWn2fBw^_7^+{ZG12e7MN$pc#ToRiF>kq80kMy&LbxG=dW8YxDMvvg*h> zvT$-k)zpq<{#s6*N<_-DvQQ9KGqh~;x65t5B!}IV5tXZ)w*g27 zus&H+qk>*!fm{z1_;aBi4jUy&@dpszN~z?PSV7kuAB*RUu5Vc!zMA)DTy*;nu?i${ zA;u7tG};>L+~~HS#Lm3m{qS3{+ElG?QJ1i3FS|6HpQEY##P#OpQTxV&U&l!NU+!>SDckTWI=$U={J9Ue`?wWI?X*tRBXS2-M9Ei_{h?8V}Tk7?7-k zTt7^-T9*V#rdF;676Z&>-=viaD#_Cnqi*ECSgw{4Mb`o!Cwbi?P`Ik12(~MpRGy_^ zqsBE{*rh{QdM#bg8%!ha&5;abbjcS7d3VwTn|qZ2^yd zMd`L_f)CHIglt`)xe-oJt+mw!m#Kix3in-cN&vkNvJeKO26`?;|Ag2tk!F^xk4aNg zID;cw7zHBUCS!#VYRy_ZV!*61C66njr&fRa9%?%{JoGe!QJXxHz;6+^UGZ@pFGGcM za}}j9E?LFsUXcalf;e!C;OS=b#CqLl)^PJGxD`v_b?30N(4|E?ZK?_#p~=`>!K8E=IofC#xw+oYI={X1zG9 z)Jvo?7XP#H2?y8d3DiJLunJ&}WZ>pg#rLs*NK2*#NIZXO9j5r+7om`WtkQ-g1Ow4B zzNw&-i2P|h5K${9RHco^2V8Fj@3|wlQ8=ye*7ty|z-BD1^G2~}X%m4Wjgx2#Tz-&6 zJOPBRWpc&bNdRIYYqmCqvkc9-Q5kdEVH*C*k>Z7AJ$qvW$f!?xD#0n0aB^#IYm(omqOd4>omF+>-c5v zr$RxnO@YW#SIoK}7n*{Ai~Zuf03II^5uqhRGqPij8fc71Noz3a&Qh1C5P9L1^$`NA zGAtswCi!7PoudNbA2xzQy?+i{;I;Z4>ScvwRe*%Ukalx_Ozmn?eabb$Vejq6L7~sP zx94*-M+nlqVuvU{vDXA98c#8%4fXtik2P8xj74~*e@9|BdPnz?Pczmcz8?^EJciXP zOTNi@Gr0jo@49H%370%(awQ=hSTU(qTd=3&IASm9J0Uu9vr6$V`r*ta#Y^y-h}){5#42(?7VHc_T=)8V_or_m zX8zirB}iJ%!;%y~3mQ|sJ8EUPB8DzD-fh(CB=+HbTDBC;(1Oavh}hvY6xF&hBsn^jvM%660*O)WFGOtSbj8|Fh#at5gLPeg zlgtmFwX2DmQE2Y5&ztDwa_m8!N2UBY73gHFv6^MKsM!J&mb*E2fGn`#th8d=L$^;t zcfh=gy;qz~CN)Ef*NYonc=vZ_H#P0x)F<)a=HQHS3pDo1JSU0CV1K^`3)fb<&2p=Q z1>TF3s=STyC_|m=Dh+A_rGZEwfa7fyFvmbKMYXe57x*C~b#-Jri^=tB@XA5G5OvCY&%@AlM*9*hp+=*3!JP=Epqa-TU@AO{FB2FT zgRgq{w4-Qjx6oRGJwOL)IXje(VYpVpQBSY0ylZA-tQ8z!OiPJaT9wXpd~06PQkIQ} zu}r5IP0}m3O?7UX&c=c5k=NCa8SSv!@oZffS7qZ{vMYzHn5HLcJ4j%NQe5!_(En5; z^4t$n-z~$pDHg2$cs}uJi+E@$LbPYF8VmgtvkgO}6qKRutmJS|e$^uGlyE2z0t8%& z)8nDleP$@rl7t{=JneN6(H814flXEG1|AXgAmk)j>A1;)A(9z_EvjA0g(Sa^?qZ(X zQR7w6Y(CE-3g~4mO0mJyih|&3RsAFs_gUJMFz{Q(w09a-4Z74m$-6&;B60g0Z#@_qrRspPFd?Fdr&?|m>4EeH9$;Q!Ul}VQAb0?jZ;3CXYnk;p*>;Vi@%AtL`iWj< z<}5q|T4`9Xv^A9q48g<-JS!fD4%jr~5IFnua%>DYGz*oUOhkM z3eEIqw|~>>THXFlocw5u)%s(_vvDCx0`Ze4-IFIC^$aMM-9=F9dK2z&(iBPnFt3!% zV>?KlJxGaCXdDAQH&B-+l~BAx2d!=yYAvSD-LxP7{jBS4#70o#BGz6Il893ts=O(F zQEKchp=La6EM1(X0)s4X!zy2YVnN@cI0*)38thPNuNb%=A=Sc7A1lYMz`%k_8m@?G9>bWtO}lb2YL@^u z@|uJ!cb)vM{&gRCDK^j`v5|VZZXUl4#=jZeAEUH(^dKBCE#+z%h5%SzZLKuG z)syXzK^U$j{MnKn3g8W*!gI}eP<8xR?(e`xs+TRbWd&!8EMU>@qg^#8|7Md+R~63n zZ%AcBbqz}VWA*cH$A=Pb20)u!uh>kSYVhde(leYFCx}KL@P`H!+WJo+5W=Ig*b>t* zah5=VG@2}Y!2lDL zhEv;N)2(&O@j>o6xqlX+zTB)cW)`2oR!7RRE{@{MwdGY0`BLIlvEO=KuNQimnUS!J zJ;Q5Sw59BUqDHlF3oaeu7|Td-6%9XQO@jTjN^V}di2&SEheSZcJi4Qq#rtG1x2bv& zAFJXq3aC6vH&MBtsSl0ag0Wvf1n$9?`Qy>NS!AqLiyPejCVp%s{>BB`TV@EggRv_8 z!1Xu4sIo5{&aBz2(`+Oe;I#ybi6ER8yBmFhQu1kd(mh-EiP_%2E3B}P;;Se~tsys~ z)dXb{;vuR!QTsKv5@Q7Fy@V@j*+tKV&>x>bb?^p`$%$fym^ZP+`>1>3ScPP}+@wH7 z(Y$Iqh~Sk)a+8yyXvx^`?SX=k@mvU4s);LX6TK8$j(y27C#^5$BSfWA^q1vyrmk$2 z2`WZNI%EuDcR0bUY6*rh6>bn6_;PK$fSeG2{TBZ^{x4n)=MPO9e+92@BaOexA2#uT zur;w{&jJrhrFr98Yb+ao^9S7zOKZE$!Izj3_ADT~AYSoR5GFH4Fi=WFfzC7kE_RV( zzRO!?`9`HRMFbfwbts`mKabrc!$!9c_2zPe$pCGwz0er8S;TEc zd^IL7$5aXag}q4`_!dQvHI|T-D)Em2#Xx`Se&QQSulR7P9vjqOy(c{}Oxce^TsSd+ z_Sa=(EgTA8p#++DVPl~_<2$M>LIs!%8HxX1jo7O^n-(RCpzrzn^EjY4L)Fsqg257l z@>1+wE>ILub#5Y&&Q~J23EjsZ`3=JCY^36{kEfHuIDE_z_i@#S#dh-7QGjUEkO<7- zbikkm-1lZTuj`B;KztNC_CYyb!o5!m1WZSHMc3=N=?ao^xTN|Hv1nlXrSRko7ia|O#AyQ7Bq>_#kP<+ai;^`35dhQantzxSpzLrN^U8o|mI!;hj)7l2) zUjovURy+)H68CVd-XHL<)BpG?pIz}K8T1+EXEC^x3qlKdi?)$k^pa;Ypvov)Q}Z@) zfK;&4lcy91*HR6+wXS*PCP&8M$S~*_uN*d4MwiOn%5(+vb2ZbCPt<}hw}I|fcGHou z(Y$CyY~dbPBx31_Plp6`VM)%4D6e&!F%V(x?guch(dZN!fU;cFag+H{cvD=@+N@0% z>9~OG(2qE0m#}J6i7UucXfAycvRI`$*n>NWJEZRR%@4W{VqdYfbjj9mSF5Wnd?SwC zPB%;C2IZG^kELlvS4L%T!Bk|_IA;WTw2t7Jt@nmD>)=o(K@4PK}Z`=<~+h58%Q(wv!cE>$bX2*E50&FOPmLD3e&UZN3Qxw zQSM|4V}zA3#rjT{gl@@canp%W;AN%?eLxcvFEY(^FI-naLBe_!%BH~}EWp;CvFAb_ zbUNlzImsRs-=fF6Rc$i&IHi^Zvi*gYRpK1QlxT%oy*qLceDSD~lQ0;c^d$<(f@$#F zSVF>acq4ub`jUH_Q|Q?qLaw+|;*A#*FGn(J8uIeV6-jqUXCjnM)zp+=QZkN?LUM4HoLYBIqTT-Rf4M8p zN5aLg-v0ddFFXFnV{0s6LQObjO zua4-F&ujO4{PVM)Dwqu;C_W7`aCR1G-s5h`pCplDa8vFxyfXRL&eapU9Am+&I8PQo z;5&Z3l08pjBFOQB*!q@_mZ_DGyA?|TF)&CE#U{lw8`Xw+ga8n(T0V=!Ub+2VlmnQD z-EjeCfUaqkr6y_~@ZjuVOo0h5Prn>WgevVCj-;uH%nlzAAR$%3<(SoLOs4^S9!tW{ zpAkoxw`7W0&uwil&pMhruP#L$Zqi(68FQzekvhKNC3_t35-`9ZesDR4*v`=cmpYut zS}5)*{JL5;0ff%3tB@MPFd+NYn zD+Hw`<>VEj5+v8P+yVcN?z{BFluhP!+C(G@$z;Z6Q+F`rN(rYHT$Ne|zAK@|bHRBG z&UzkiT0!HNd9#ZSd0tpz0HuI1p4+@sSG*82{8R4?g9SP+vugPg)I6{d>8y|v7vBMb zny*AWka`B?`$wva-4vNu1-YDI764uOcYAjf1@~M&>@vD5Eq9zth#)R<{a0YKf!f7h z%FVt1Fn%Hd16hmFwn|rWV?${mRPGcck=u6mfx{xujk659p%<9(j%bnM!1QW7>?8O}BcYAF7?rOK^4P8EI)gRcZ_%wC| z+CHY$0E>n<#c#yt;nrmUmQ^OmL5wHt3bsq);8LmBJ72{KejK{DDMSP|zGm>D0I!6CIRvo8Eq+hQAEE{2`+{m8MQ_zjt zX9Q_3ksOtir6f~96ewBWbg?EALaTf6b^IX1wPf)==N43vlxeP-4(w`SMLh;ukqUA7 zGl+s|pivg=p0AL*cvWRGFN5^LF+qPy|Ham{3I!i~>SCH5pt~v#sUQn)&TC_PTz&jYEM38hH^I z-6>gc+QbSNQpOavB+Gtv?#~iuxtOU2hob9o3_n{)8kM8&cj0w>STs<7rF$B8B%I#D zPE^SiELgs+O)i4h@1A&N;BGNonj(1SYoL_soaPhr5@Z=_lz+ikb^FJQUv)=bSVV&N zdBC5I;ja7XaMg))RvU-*n`|G6oc<7=?TFxax+naZ!tf!QmnS$G{6e<7$HXl>!Z7gE zIJgStUY5XCWVFwh#NS=Qf`X45gT_w~MLZ4$r@b&=;Gihv<@Zo#SwZz$;R^f^o1j+q zezFKNL?)k!S%qPY^HH2{p5*I_5#VbxfY|jKirp!S{OSW!RPk87jfR=G?Sg){i=KWQ;QqX zRa~69#t?V*eAwwj;nlnx1Vt&CU zh-U(baRS*@Z`rjX`zJ%hImRKc04aPe{%qs?>H^~up;mM_UV|dVse}ty zHkI4YA|QR)0Hyc_J{L+8c2BGf;F%d9?)C1I%?d>8Zt)=T zRtfXWSIA@PaS|;KJ*5d}Fb-)tDNBHuACxO|k9#;Y6qN@(G*RYcH58q9amNmh^?cl9 zxIFAJ5&8II3*X)T`}nc-L$5AF)9Dy0=BEPKwqCd;M4~VQCUos&rOS1JfJ*Dq0UXM% zIHpfSmN@w;;=-=R_OuI2L6_Jb%nHXUS=+9}x;ZkWO~zNb!UWn{yi~M#d|a@!T`7KrVIhOvqjg)%# z4y84e^A^(-fAafrbuQzR#0zn&)C#*qW|}M1+~W8$s?x36<%bEj(>q-v{ocLqUgpTM zyv;9-?+e>17Z(Lun(Y${i|2&*i2LU;^VG1tx>qw zycMpBiIZ1UE@*-XkZ}?=W*fRIOU@EkGM;YXmgx@D$>ZL)Z88uTxVQ}@`r^49=MkHisWJpeu!z+@%9>Du`xM!_4!PN2 z|I#&+c2I&qam!JziQQAZ0m-je!vKc?J_g-+eLyYr$5`>3&t5;2&-cY+*O3{gB>7+?jh z>78*+roXvQ+uO2ObI3pM_1iac%*y+kCeDKl25?n$$vG&WPatheYVr{GLrXC!TlbV3 zyDUh%Y^%3RknpaizgK`NVqE%VyAS!=bV1i+(YF>*2KA*$xnoO)&0X*7&|biRyec)@ zI|?xVyZBp6psGdO8mpCX?nSY>QRPy_H7amP97z{@0en$=w!*nM#PGGs5O_Vn%`JQx zl}EVm$)7_8T4Zi=jjCjhg_A=ohm??~AAEh6$x`06GrJL9@2Z6F{eJg@_xzQaA{qk4 z{AB9op+_(y(wKaW*oXM;%J{-F@Txt<(Mr!8m~4Ue&3eDKMur&D`bzoBvhV-W!%?$V zt!`>lUGaFYML(g@cu1T`xka@qsC(%8q5ImPLm=(Aw8)t%^oRZpa^^9%=F}xNdfk=J zu9Lh|U>~CY0fL7t@%y#4znIJD^Phnb%NM94FpK~%) zSjAdL9J)JqEEWbs{cMm`M256O9~CuCCk*tIi(RN{^e^6~ZOJCfW{+plV1PnQ9-)(68U^erPFg9;6SWHnun)!hyOYu^YTMO9NN{WH9(QH;u<> zABrmgr#S{rTorPLKi!{uO^Miu7f5(6_O7paS#Btwth3{TeOF@2GXOE6vTRYS4JL9o zPxN~2r%Z^NWX5`_@p8q=W4udbm|J@(uMk3>_kk6HR}%Z6gMlE$!Bj;6K;!hBTVysK z+$yeo>Z#Gx3+;i0>$x<{)(NE4j?wdzA|o0uOA$v1tnY!pJ3~fEA&)=ed_MHBcA>(I z(jVhC6!*ch+an-MC~MILxx2b+dK%6QhLtA-xpELML|rDoR`pZJoAKZt^FgXi#ImWc zWTDp);kznVm5%H5Dm9;aITp=`Hp2$t0`c$m={YC1&q;~k?x`*;B)<%j?y}r0W()z` zg2K%iz04NrA%|Sf({kO)-I}HYO(|}0G0fDwQqIW@OvNUklm$EjoKZSB4V&qX04G46wh6o!Z-MRp#|!{${Dd zZKgtl2-lweD)x@@E_?*;Ly7R2?hnrJzu^0ZVD$Rk7d{64R;0FY+OOq$CFMjnz(2${ z0ml<%{aW{JPmBOuD~kM;kaz17%5r)Ft@JiTU1zI^Wf-HuWsoCMgPXe=Ol)3c>DFaJ zCi#e*@>l>knpC^-Zn55`z$;H^?X~~Ntz-@hQdStdFhTMu^Kj7L#7UIjohMPsU8StO zwQW!G>XaG&_id_xw^f=|R+10+TDfKXKe`^6JZE2yl z&ZUZmvvjc~RBV7IVG-;rJEr?Q9zgFW1?S^OxL6YX5R50-gXOF8JbVEGh-50TbY|-$ zu3d|V?ajlR-l)66KlR>)zA&rrP`L_t>M?*G*I4{FYVTt&FS-`&3qE;8yTDC1`m4@m z3Mk+XU5SAycn-DF13N^q(XlsU3Y=2zq3!@~Oo+<$6)gAL4#y}_6mcu^=iFPn?n3pmO(-NIVIME*dMFzDeja9@q+eD zK;nKBG#-+DMrph~=?t=!ll|DGT+V?~T1yU0euVRv zBDujAW07U&L7noCFDieKC^h9KV+Zd)>>k*pBohkERN+9QF@!S&D8$ngC51N;3qnEL zivEbdpe2HY7oXsW&XV1s?d1w6nE`QPIf|Bn=d2-=jd&;VDeTOm?i!jZZ!DHaO%L6i z5gA06#Q-=r=I>CwjU9`xF zsQ0>W|B=4=aH5%YKve2q$17MqD6UkT1hh%f|e^72x*{v=+|N5AQLvCZ652wXXiYSOY; zmjasHMC{x63fIGK_iQzuh+U>cjKWTWpO%d*dPSKjle6y)AhWL(`t!01I!CEgZVb^H zS(C(8LJY?IV-d!skd9o{0o1*`lqjCN5=WNxD;YBVXWjGo%PuF!FF(9)IEXzZWeR|D z#ys(+WKtCD53;&gq%kn)EE-2`kVUo{!zU2xhhYA3FWFa^_jhew+Wa7;NAKIPr=b#F z?uc5xHFO20r8}`!U>aS_+g*58jfDZbvRoql=KCC#lQzRz=0?ipqr2!sKZ3Gpg#V!u zcziUFQsoazL?3jboNn&Spt*MvDyr7K_7;#VTO|0Wna1R(H>O?DJu%lT(E)0 zq{tH^nPp&?!vrsM!bTxuY~+`5cDWE81%l^>>*iGkdOrlEt|=o;`PY1gBWe3o1puyR z2@0%kbx*8aW>{fL&cAa)Eq!8D*22vQ=j#f`6BL{k3qk~WF{)4)KDT06NDq7Lgvpe5 zRdYe2TbcSpdF>mIK9&Q=wjUV5Vm%Yajv6Xh~U#J!Z;H)R48{jCTQX?E_Nn+I? z$0~1xb2y#-Z<+1Xz59!||LyI6{qO%bB(M>oA9&8>(ol}8me=UPb;vhj8z71f6UWlgmzOBGn9x~$^~A6eMjO1z}Jku!PG^xz{I`V-0=?Q z;D5~zxy5F;mD|A`@++Sp(|&-$uSG%#OJQSZcl`x*%ZSnUh!FY9>*UH8k}SF1sC#Af zdjQvhs%TtVR5*#k9XED7rrAyPF6Gdbyx3zd1awsnV7XiN(+?>k*SH9_1ynCq%djX4 zeXQG)qF@fk@_$2A?%2Q|#Id=3^Dnx85#wv=#4Zv}Gq1DU5%nLw{qyeapT?5}b>ov{ z5NSXViPIDh78G1|Pqom}`I?t2uYv=}gVF8T&PZ4q(HONb;r66!z%dOcNLg|r(o>j= z)WbrwMhC@oBcPdOP?gPtRC^6u9Bm|Va+w+V9)ID}s_MS_ovnHIb?OGWx$Q!HJIS5%X9a{zNgc9XX)|;w{lO)}bKgLMOc+8x0&)pq= zZ@CO_f9fsf<(RTwwe!(|vYRkS{Ta+xlydw`(@kvAqYOD8Ds?-5n(4wQB7tUNg1tLy z{N10t{fmF>|GIzdkNR8K+<)BtvB$iNIYRXi6iLyWbMuQd=nR=Dee{UyO7(rLOIz}u z+y;O5MwP8+LzU);W5+VxQC|JcbCR672?o;`)YGmXi==lc))S&fB=%K2i_*x5y-H=r zU&-kHICcobU4eQ(NUdpV*Hb_ff;zO0SezR?gR=n$Pd(k6h|+Ok->A2#04hHmo)9O{ zlKTX`gU0qRo?a3As%3N4F0wWfUK@5L_Y6De^eVnB1^)Iwbnkwd^Q3Wg{P$*OU`iJQ zCBnR;AXjv_xdI}LkJGfbm<}8|?^?s3nHB{+?Q$HU&P!S?nEZ&p} zw+Py-m_|>!mC(V!549}iMoZB5^AxiZn(S_(1EBX3C5GkH;y}2E@km-vUl#*nM@{3f zeuzcK`U!Uj=%_oo!|id9L*({w+9IancK4aTi!N7QaxCDzPimGh)h*d+q~2l&o8T;O z21pIcSxiCT8sOp}@y+6|K1e`ARo{d1T?oaEcRr)al&{7<-lt#J!-Yu8K)E3-y$AoC z%^*eBy8{NTyX3$(u=r|#WGHNTiLt7;Z%9mwAJh}#Kg9rKIo5-JO(nWU<@ae&9>+M% zdQ zYw=iaDDni%T!G3nst955#p`fATtJ=>@sMYb9t2nrpLq9wcDH}y zjoPZU$ss`^CKmiXNYsXg!Fla46kNjJyp2DyMjNn zPym%=SFW5Zh}iy&Di+pWa#Zyys$dE(oWKKRffryAag*WP$6)avMkVOdzX3%dKAi!c z%kIdQF_z|>DJ5J#q+X#6UnU}I2%lTnlcx>qDp%|lG|B1M-=c^b>OhLWNYNfLC-fkO zIwn3_jtJSB3R0Jq8;i4@L?o(gkenJDJ0$y-I7Td?N{~oy;%Cx`0WA;n@qO?O*h8cJ za!3{F0EoBjqP!Hssu|d4XRR)Ga~0B`#&8smKIXBy3v0;IAaWn~YMk5KH?Mf6MSNr4 zieNo`JcSydgZmRGS%e(gWy}>XuDdWl6CTr}`e_+-+=Bj9k6Cb;noq9mtPRhThhL~} zgolVcrkVMyZ%0KCOtFUOQ%U-}kI#`ZmoZ!>DVlWZ z@M1lo9`UfaFIy~ryc@s_0_I#+lKAsjCmXr0Afw8x zA1EC>1~G!RHQ2w-8(Z#QnImqs4pFK2CrK*~5`XTJD$MIoi^V`oPaX$@c~pvz)FOXg zDyp!Fr1Cl?CkAh%q;X<~g*+gh7RTTyR$pd*;ZM)Ao|{qZ4HY5Kl~_~@>EhJ8L#*)A zkS`F1d*KzR(B41xk0N~k_&@7S-YC1JR@YeNqV^c*uw7tV!W-nNgR@(rg-=2(v0z}( z!0@DYPl@Paf1(Rn(F^fIvJnSZl_Bk2t%dEc2c*cx`g#1IpjlyppK#radzdGTK7xpN zVav^v2Dyu3kFi=IBg$qeGC}e$fzXmLzCWS{qirJGFBT~D{kQM_b@3lonpl5E9d`s} z$08{$f1@8#ER@|B%og`A3D}VRYO%PD#_{#{u}&VvIw)Ca)yG;Vy5WtL9R7mx@$2yg z4DOl^t&_Ky?lc=Vpj9?ZICn9C7toJl1-NA!)E*l}BDq<2V<-=_X}&{UX${Vp8!WLSm3f zj!LkASkVRtZ!gY~$ zFSfvWZq^)*bkSt7$3Bk3AwO@Sdk_vLK_FjJYZ$;N6_`r_Mv?z(mB8v^-~xU5S5#M+ z;4D;lOvZ+wrGp~SBu=ud_FNyq_bJnqfyE7#;}p>$FH@G6T3y_aJ`Km5s%-p&H8`qb zfb>eyreYn^9aN7Fa#!#w@L07S^G48L6jUdKMJP-TM_fc0JM|KvZTi6bENe_u zXOI)eFx1G5J&SFB@SyuBo~{A|%buHe(J$-7p%ZHKTo9Wq zHZMv~sWu^eqB8rm?-Cazzw$WF!;5Y?7M8GFN-SEKV3F5nqG~}4*QstsU?JCp?Dx#^ z1jMVBfvEBH9rTEZh-4;ji!FFS6zdLECH2}on% zy?a?8H}|gaP6e3) zPZkBZA&)kut<-UBY*eQ8a-*P%%N093?>HD@%;Wr7csVi->~12>N*^z=40fdzc3X)0 zJ(q)!QCHk#7THrI!(x;M>Ft}Ft`O^G`c}vpo9%&T14}~5Zbh)b-4|$-YpSJrk&^u2 zQTK_b({5SaH{S3iP}P!hO1HVf4jWU?f6rpq_cGL0URjfHi zoK#gTq*~blY4b#{4Nj`~MZP?g!ObtP_A@xBr0H5phUgEDtRF!%_p8OJl-$~-Ac$;I zJG&DRxKdHe7&e&T6-!shnxl&^xlbCi*W03kl4sRD61m`NQr4E&oO9=pGM!5T?uBVv z3;ROk^lr=s_}gmOVJlVTkYWMF`Z#m~5Nj8J9^A(ZgSW&|@TY2No>o|Xkphaa&1+iQvjxdHpXjz0HKXhsaSzTo z@q`dQ)HnEXnaQhWu%dW9ydF9nu7!U=WB}LT(AEy7*t~#(@Oyu5UNs`sDCo(Tl|iLt zR2(K`RS{;E4fPUx!CeJKJnuk}@hjvd=tLdkr!yC%2asq?0|?=Fgt=m|%pfvK2l$QH zXG#MuNHpm~GA6tv6Z|>u2I}`)cy|ee9G3vr?1BOr6pzfILO$Yzg{GzZqUCSGWrg%1 zF}X5A>iR2Q&lLsu0LUB5KNbToQuH^gBo3j>884$)!2+ZI?C$NG${CO?a zEHY(#L|G>aM6(EoMR1x81=4UBQ+mHE#rR(W0SJxt){}rG{L`yAykK1cH0u>i+zz=) zA4Zay`krWWJ9yqIRJy8EWys>{fJOAOa>a6?k*7btDCdBCcd8ueMy-jJY|Qd{rnx%r ze3LjAu(`UcSKja)UTmsHfWN3AT1qviF93w24J;WHtKG9*k6YQ}*98ZVg47}N*5NyfJHfE! z6~@-7G{f+$42*53eyJ_$bM8OC<~eYLO!rt0n63Z$_CKo2HUBS6uVp&!`YWgNz*u>O zR#3;mp8>n%u)8p|LYeTI{edUkTjDdtD^6M6m_p;t{Pth@Xu!k7f{?xld820^7lNTp z0b*BWL`OJ((gpEI@HciZ50XCq%#uuUWgeP)_o)GGWsx15eR5|kI)+|6nu~h9+wRBH zrmzSui?qEE`=K~@93y=n>EgV2imNR*cZ?)1v>_cRvCsrB&Cg z3WNj{maRB(V4g=uF;;2S^-a&Lk0=m1k3AtdDN!PB`Ak|2bfv->Ixj+4YDIj+f>7V( zXm;dr6P*=js&WHV%$q?Tm+jj`E1fJ5wN{H8DRXC4LUI>Zp$EpE0~B2o`Jioe#eAL8t!-ZNa&8Sm0T=Uf?VK zv{3RI=lrXjpIS2p^37EdHib+I%0~Q_2v`h*=$k0NW z6LJ;jiB%F#e^S3AAXTp~Ft-}z-CpaqJc#AyD5k9pj?PtQLG=-cSBTGXr^n*@-0EKV z8@hbt3s$3R3Fa0CV;mQ{Zk*E=F@989B{yVjhtQNVq5ZMCt|4et;(f_dNQz7=|E z+7c-!LXz$}9PTf=owA#o*ci1$1q*XWU(2Bh3!{W#3{9av@~z{?#>1~o?g^GWw47nS zFQ5m^673VK^xCbDg(3}-V%5ls#|kq0J;L)|`|S59w&6Yd{=9ouEXXQZss84UBH{7l z3(TyEqe0jTgqjNB4xH%lYIIQ?S=Jh0Cl(bbUZ{N_5wpzq>@B=EFPU7@ zBQ6%7iPl;`nf+;@kdJx3@A=dH>QaPsYAMC0s}{>B*-3j`L&dHKELhB6N0imVyg_s^ z32;B)agL*Z0gimq-Bd7i!qQX!R>4ot?GUYf?r*J$(p|~0b5mCUpr^nbIZXq)qvj;v z{i1vO_g)uaV;s-2E?E3{JU6K4IQ$4)H;IK?5qs#c+ar=X@o0vvia=T;R05jX*+BcK zPajwb*mNwV9$Kj&$n)!)^I`xK)k0jc2xAGzyxZ2#s=LYnuZKgkau#>r{n) zn7O*32QvNY^)CqlgDK^N*r>u5ea%%+Xg6HWv@FE(4^jPE_x`thE~LV(9`6&NjcK5- zm}?=rs^~Uptoa}qKPAak!&0T_WH+KgF>KNJUi~ea#vG1H5UWR$Zc*iAOjWc9<4LZb z?x+~lUBMsk_)51E=LklVU!`IdG_bJ%sm(HLpMEKXHw4vIwyqvj$ejlm@56v(e^Mn% z7%tC~l=h%|nALS|-+bPE{<+^bC9c2E&Ewx}Xf9|wYr_+OI~K%gSkw`?NfFnzO&kUT zyV@fn%ez1GBciSLgPFqIodE}mB~qGY;Xcs!gN_1>%ZG2BB**c~w#@M*`J8zIR$J@B z7&WGYO2TPVLZT3aPvV=p0kg%~>Z`e-nP!SO$)CmAs(U(#QQkArQSoA8KR3EPFP@MB z5RlYq2u7uFsb=Rz@|xG_rygI8@fzde+Agtv(uM^L9#nz=9CWz(hq>)aeWVO*JaV4> zqZJ4xA?L~=$R4T0tMMmX8Wda1Sq?Y(Sh1Mu!v$jy*E~I#c|u|-q1%IGQ0&~@=I_L! z?GNS%+~NOvmkdCP+`>QQ%(Z{9S=-NQU+{C4=Emnr{jH!THjEWSe}Y3+V)W~ow#2Ow z!{Dqp6__&9+{FAzng7+cWs|!UP|8;S_9r1e`s6E%=87E7Ih0{3C;Jn$4C82WlDddP zn*WUjTN*(Dmb5-ivvMb$uxbkt&?g{24!8ma@MBp5!Slr>AO?4FQ*;fP9im6Y%>!9P zcWKk}Aq9>5Vw$MZiY*lI!c}Q0gn~7aF8gBMMuLz&54I(Fco!R7`CVNh5jD|c>ptMM&vsgL6<3gX60BRuW>BG}3m8IvV^g9MlD z!yJNf>yOin26>9r=l&Jjgfg$?37pZh0=0ty%9o>LL5@|vPr-c97sT8>ge~!2{K_8t zZ^o;!Vk^pg0=nzHe2v{)W@1i?w)RPcaC6p68GV z)GG9j#iVuyR-;TF>jTzm6f-lQp*tYTbHAojamz&TJXYa%Q2U@-u!oqT02i`5ygU5~ z=f9XGJi8*m3MS$wxmXI8Pe~?jR7nsOPldGl!afx)QRcaf(1)B*S4vcd_)#ngxPqR5 zXli&RCDBTh_m+F*x2<-y!G#K%qHg_+6}-|@%9VM#gxj&|;-w9gyZu?5L6zz(gbBr9 z%7B39Bu?cpEz^e2E3?^cZ}~-vV^^quz1!`1!z8`UtngUxiiH(pJDTPcA;)`}C!qO7 zea`REQP>d}Au${6^nt|)(*PxuV!=1_j z5&~~6%JOP|q}rdC3lPP|cK3YMo);2I{n|!_abSf?yq+kY09%lIf_usx$7K;CAs0-@ zw8)TNd0ABr7jz98Q?(4(t@sSrFSn^9G_vAkki=LN z(mkWFCU<-+F1>n$Mq0FIpgnq~zeAx(763&X*BpRy>2j@Vdt8UgB#aMW=p6Zpq|0HM zFKkxR7Uj4C#w)iWWAD4QJM?U%)~@Z&<;WYHJaTC63m$i}eqVb@?Q9gZrGiDWUHMc*|}-#E8=k z=gOsMMHaCET%PPgp^s<3^9Re`=#Nq^@Y0k(N zi0{aoK(hE1Y{oJKaq}bw}8L z)hycNoV8M~9_8%1!2{QI`K1}~sN(=Ah{B;Qq03$H4Ng?mhEcxR16l8OGA&__COBH9 z*eU>NyaBJI8E{2b?#Bdy-VlXiH++2o#*w95jnRHr0N~|rdnX1&YhgYaZ`kf0Keh(K zrv{sa0}#T27gy`Mexbw;Q=AChFGPZ=GWHL(V>yH*qTKyhP@ThjNpkKZ;>HpoAIzK3wjY$t5># zk<%8f7A4u+Mzol_gnQmkZ3PbE$G#+|)~uq1#wEw2K0$V7QP-b*Vnhdm$5G~7+`jo? z_Xk#X+}p?U27p4s+B)w_x)Z^DA$VmmgnVZ&&lVts3SpzLInH??AZ&caH&c$eqM_x9 z6rdVRLdLjyyOnT1&^%||90zVNuqWJ_%ZT_Dt4vV;Wm#nkmNsTsLGPBZYHk3i;~P}c zv=T)_rgJVA2qWDpcboX(Y&J{_j&BxI0?Fjm#qm@f)N!pDI_^sQay(b80Dl%YGF>}5 z#k3NCzVYNfT&1z_A&DR%5nzamStC8r1$QWCL*Zz6IkF8^`b6CBxTDF?z}yZU24FqL z!?^e-St<@V;|T4Kh&1HNqbEo)j@3k$6kQ;`B@VYYwR`u|_(2r@M^+n*mHgOZ7k(z@ z)J3`IRr;TYc|X*}Q@Yd-Q3(g)P4NnlaK!+ezdpQt@wy+&=eSyuCk{j*agUesr4)#jUK3O%Y>`5+07+8 zN>s!`Pw8!RPh2tU1zG}aiosGT$|L{ zj1^Q?eyzm#Y|_CRTfj%~Lv%yNH<~k-bZgSu$4Q}S9#SjL#KjZ4f<{NH)$~=VM>TXP zBkgl@PD|b5l`+g5DN#?EB5hy+H$^8CZY`v36)b{R6T@uAN$EQ*CDcmCg?~R(mHUVLR|PF=2#a%2f~%iNkTT}>J9Or@ zm~BC&>lE>gBY|OnUdT^#k*C5VIs#l1Q^s0`JYwmG4S^8}aLPLRVfQ}l?mvvL@Bs!J zQioYy1OpahUZ8-^P$BXdOK^`*%KqUyOplY%MZy^fm#0K`2!d*$3YG~7igXBwQMC@u zMhBtyFt1{xqsUu8!3ye$s^hsV6T$7j>u&!tp4XBR-u|+D95@@h)Y5cyAyaHIkVJSN z|Jir>;IP4kf&{$VbMp7Fj6}b_$B9-kpxD61S45!w*rT2@Yc{^D{uI7=Y67z=Bu}Ph@Ln*+D$9PkN7I@KcDqHGn|?8wvEA z6c3@iN<3YOzBk=I)r2hNjc<%zE_(-|<$*r{GiAU+SJ*@ZTQ>^DI1_diFX8e7dXpaU zGy07(hL-oNb~eak^d~gyW2(p{1BgFwt@xQ}mur$eQlE=2wEf&CbDbJ8Mv)X)xzmh3 z7=)Q9_{aIHMR8>?9m967EDsSuYuUst?w;8o^X8NTRRkaYri;YgzWF{&6FWNw1N0Q< z>R?o@=>85F_ZFs0qIf8LLRuj@pbrSOpRoizT77Ghwfv3ln_i?jQ*m$>mj%TWLKc<< zb`rBM(4_+9oIC z1g2-Md7nLyVdhBzDINoO%<{4z<+aq7g-HbNa8`y0WovlOd}`>HT74*>7y`}8gy)o_ z(B~<1gN!kKh7EUBPUue8-}9GAc)|9HwF9K&oMADyN1 zI>_|mdCeyz%)(D_zP<0PtostRuP8U^hj1ZR8W6?kFj2}EHt5K*01tf53>M-Z$eATi zrY9Gbv%p1`X>e(An@BhTp*sK~jK!sOwp*)lYySeIEYAEVbZYVoM3imyF}(3Fbiz+8 zJ?j@_-6D;gP5+YnS`2Cx(C{x;6e2>(u=PTWZ{Mhdg@1wt_#{^U>)nsyKYyqDW(+_K z2VPZh7xVyye#N8}(q z=I7n&mJcFBCywZRp*|io$fC!!iEY6;?U!Kv5s1bWh0MxI_E5MS%h)XW+7JB_fZ!{E z`aXM`H!Y270bK>p0KhsliUIsO9#5?zXc5e+;%Z#LQ7i`;r%^C4huhoY)}ck`0$-F= zDH-+W(7ilYVWq_I>hxtMv{;+R&TV)-w>Lt>fnzzb)8e^c7lFuouWF$1im>At?xinX zIbov$*StVYbcF?q+s71Rdx?~C-+6WKnx_=qd^S`;;#%)uX9{D2A#wBid@NyCBormO zQ`vUF7tELLAQ1`)YE54aD!ri!jCpLykI0)5{^LO6cYYp4r?(v)qFJF z94USBw#(yNBq4-rd82#QkLeGFSYd$jwV=xy2eb@k8EC|&KJ#Q{1Q(iJDc%N+3T=cl zdGkxq#H2El{*<{61CD0wEmlGCJC4NK;`y-@>yNwj)fJ27ybh|uip5gcio@CjdvJUfjNewzUX@&9P~ZJ-_ubec(Yfj4O9aLXlz zs`y^mpCS>c{vp~2%@or=MLEaQ35*nh@JKZYew+@jT8+_s(mQZKwGj-o5^J63y0=mLaP(-8J5@qj3CoGSUR(yed{%&u z6qJDNZ`XN^PiGG2Vs75e{Y1F|deGrYoc+tNQ1cmQFj_jGHV7AwllgJm6(jJCnm zM*R@rF1q{#9u;qKkppRRkb8d(LBe)xHAcd0`6E{=6#!V43kBL-5GEcU{&x3`7;?MK z%T}#|nG|XWLUIQ&#T2x+u8tw)2q2rMXRUiyJW-rqpp%Q^YF^B#iA|4$=l}%1@{;kB z*!7kG;we~MRRVfcot4iDl;H0HLIODzt&jAN3Ime9Jo2SNCb=RiLr&=f*F`Yqo!24h z7mJ!gQy0Z@c_E-Rmh@TYax_5Lg{X)`8wYEwQ7<28IeUFt)yok)I_#DLJ5Uoy(6ZY| z&`cFME{S&p;IoWe#sl|Av5V2Y>`KKRxbXn+JTPQ{)G8)cnI1&Qe|!x~ zvXVb_1GlTGIB8E<(Bc^+t-Ty=?k$$^sxXr=M)(>AMo62Q!jIBh{_Kw2=aKAz3D@Q0 z4&wqM_F-EATlzG2o!jyvH)<1RR6*@F@oy@?w}~>%8SD-xR}jHAgUo{-L}5TR&*4+p+qbVRh6h7V9XJ zOPB#Qd8#S(vetx=QMs5O@lM&$WMFx#4&a)e7K@yMe$&$LWN-+}aBf-nJF{zyi_|)I z6{Cv6<}R8QX`!AB2iY>ee%@*%bE;yd1ZG@yL2&P&Xi_p9xJp>ykO~FLuhKetA!Y}= z;R^(Fhvc_vfXS96Z`ltZKV#JrN#TBc`SG|MvgNg|x9iV8?uMz#1jCNgrC#E}8L>sZ z%511bL$Hd_d=-9$n{x@a1uE+$(Y`XpGL3|WJWOSc0T^A5yc3u>=MI>z@Cg4+%tfPv zoaW}Ah9C@HD|Ap|oLk#_DtF}9JqCp$zE$Y}6R@epAPWiILTJimAO>=!MXaHXrMt4S8l*pel`D%$AjmHH;&I5M`Hb=T}CBF3QcEJNd)+`Mec?dIt=1` zdXbh|+Ky$XkAZ?k7OT{%l3gS;-L%sKfBJlm#d(aNO1igdF=c9I2yJff4NV69>z>De z{Q(`vPeu2tGE00IqoH75oQ;CAkj3?(rD#Ju4}|D4IT>)h7m93AB4~Ev;S$%DSvxJK z4HY=gL5T6JQwv)4p;Oa7_^6_Nk=#_qFV~jy=hM?8z0Ia0V$5*_94kn!R(MuDR+WX4WJttJ44_!fc?F|d*l-)jinOueb;u15R{}I@*%!v_iV$pb3160j zPMU1ydjKGn!__vrs66UkA064S7bMq;)x9hiFZdbp9#St#k^z)$rqgJ?T*$HJf|M`C zJM>n4pXJzBDcq&LIo%o zFYhk_w?3x?87f%ohOjh0lOu%Yt?W;iQ;jhX5bPw_H+AVPyciyz;Z_T;ifsWPZj|vd zP23CAs^3Z~CJ|;^x6fa-ge*z$}$=EXZ%@zn*c*Q4L6QmLA0$ARqS){-s%pSjso@8@(Io5>>86S4R z=#*9KX7JqCM++@1_8|pyuXAB>@ByHx5f%ej7Lz5Gpry%+>0X9J45OONPg|LxnCk7D zG2I{h^=R|sqcf3DNfkIl8$w0SUm|g>-oiN z2s-vaVsSd3t_8r|b5VFNhpo^0LfH}3scRQUtP9r0WdSMC9p@N8fy~i+Tz-D6w)gNL)$2i0Qc))AYp%PZhu^WDWX1 z0DnedcV;&oxiFikH;}>@?f0=p^891QqAO<^Fd)A4VLT19;6HV@e`Rr0=$b36(t7#_ zC!#l-{y@yG-WiiQRXiTqB)oJbq};LpL@Uvq)S` zDjKc8TKZQj=mC!Kw1Dg~YcI;BBEo4Yyo@U}!cqVpiA6)uD^217sg5D2kH9bevQ1TC zqCs>CF{?i;@Jt8@OHLGUl5{Zv9GgDikHs-}`zjcjpZf9ysi({-P!RKpuk@*%E6ltG z@&Qp>%2#{_y@6;z?eQ-}3&c8(!$;J4xm)rWS4G3fSZtQ?^lquGcfqyQ&W;4Qkn-C@t)tdCcKsaPwq2$@24gD#hruw!Scpt6RbFQ^*j&(#ZEv86scm5UELq2 zxJ^D5M1}z2Y-$(Qi*kPblGRw1zPh&(CeHnYSc40W`Qr46+c$sI{h_^ONF&7d@bzc4 zPBv35g1IYp8MZ*V$CX}HT(9-+$(9A6b<)d)#HS%tTA@9zD(e&uogaLNH1fB2lq7Kx z1KGjbDd8bb=XoKhVtokEYxjQ<9W*fX{4}htG2NN4m-kS$MVn)183kLu7{u0*7?#?F z5hhM0OhzGU?O*8B)2=knpk-8|Chvao_UBG@WK|Sh59vbf1FglpVOw$S7%uCxsVqe4 zqopAO)v~$NsVhfQ!v^AxvpPOFKn?+}EVslca5B;BF}Z{8ycC#ZLfnT3!&b&G9s@oF zk(ffCULW>hQXn#jRE$vZR)2?z)D;a>{5ypLZtKEvBsuCUJe8%cxBN5~WEWFF&6vub z4fi4EP^7Y83iGVpoY;ef*yEP$#1gS5Hl3zC#*6H*@wKlF4ySSx;nk-rOVI8_hqazU zY~tY|2?H*FLMx1gxVaof<4(&u3n40`0K_HHhi8<#rbrk9P)P|pIQ4Rhy#dR}bh(CB zv>$ct8}HE{f59-)z*TM_^r|8in4MVr3oA_BJtT$Gm^zXwN8C^VRUx&FE}2;=Kkj~< zc6v{e>5mb&&HM$RqvdXM$3ufJ}7`O$&*xus}P(V`;aO7E&{8SHtYsZ_L$g{6vb zNDj(VG3Ro$)Ktfts^ZJ^gIz$}G2q5E^@cuoDJs1~C3!F8NDJIWR4QkvM?i^>kqaIN zZ%9GB@q7kB+FTIhRO&@cpO^z`3GWJ=%(scuZAKp9Pa6@TAa&{j zWS$3ccC3_D>uGQ?PBVcc*xZq<4y?MyP$0cNJ*-8_1}w>YF?(ILsK45gMzUcbi2T>Az@?ge*awqbM*Q7{T z2_pBq-lo@qwi+y3EjHUUFe0r~4v!JiI<{%-1ZXW*>4kR4kD|dIH43N2A|f}EiBL}< zM2-YG=fSipkBTS8Q%OY;>cfRnZhA^6cBwcp^|+I3hXaQ7GVe*!=!)ZYaD)E_DZ(S{ z&mEW)a8xce>EhIGb`y&wS2;CzhsRB9Vj0vckNwZ-ObEJ^;NLn0k;=$*r@!y4iSLYwVT2# zrn(DmtQ z0$1e;#kS)Qv?xq`6zlX;&kfgEA$9^gJRVGx;DCFy#I?z5 zfdP>w#0F4oEFAWI4S3UQuU{-+s^1jR<5biiUucQEXRCA=SiIf^NG&qtWeG31^~6Nc z0O{L+Xt+zBIFbh?yL3Xe7QE2HYLKT$GqhO2bBaxtT_mYnH(=TiD4#io7Bb201<52t z7HpO)17Y`dq4qn}{aQG!?FvMfc&qjZ<(=C+D<3k*H!c31uQ=SU@C@H{O@ys4Im$~fcM zx)c5?*JlQ+rtI+KOz3Q-9&iOM++4_iL(6J$dybeQKnwGwLQ8|dy!LzAS}&lQ8Z1yz zsQ~byKaF3ZVoz3siX^CX;X*w}8Y~sy0RKG07pt4y((0~nVPW(j5L{B;{NXDN8^wTW z5KH{^_q+FF6Xgg{=^++tlkjTSk3Vu$FtUq{DQp(M*k_cGDm^ME4U~re93) z+b?xFP@=Y6-+dL~D5g&L?%%xqvv>dYwj_78YF5=5QL{pP%6NweHy-QFMe$1KQ6seR z8RWSF1Y>VaUxxnuM)$s#qggqN44g|S-R%%qz*_G8s$)PrWR2Ww?`Cr{7O03_)Ll-`i+gE%G7*LH?9aFmhk*jR6?<` zskCukTjpX1@5LKbswLA#7C4k>ln0J4#I^Hx70r9KZZ#~lB zM=XnV8G&ImfRcRUMe@6Sl$CT*YkuzG#qwcqGokE~ZU#b9>?~DADJi&X^HRmtg22?6 zl<3g%pqWG_&5;tfP==vNAvMfm(bTK}`4Ax?#;=IM{j8O#@*m}_AF5mdA1)6d#KJAT zaNz)q63(y#qxw^LZn=@?cexL-3S|1qZH*u0PjfwajA~g2kWitY@5zQP`*6YcWq0`~ z1F}yutBGvbT0QgWkcLqx(6V%D0S8pLVAh|>2VVG9@2Ej_?hW(sV^^UNVWdEqDn)&8 z8mFn@?yB3QmGIPX(sW(Br%6V!EvkD;jB6lfb%d4%#9<11cxTRp$}3WArR`^4(|vs7 z;vnoXl8}4qyX_|p5uE2rTeZ&|JD(o( z$-{U+gV@llXvn=-C3VRCQ|}ykl?0+oIDxZ7*lfR|!k}U5s)r7VH)~hygELM= zfuW)ls(|9(?6_#oz;j;RV{QZ;R7;d97xF2vr1CMoKI`Z-;#C$Z1VTmh!=r%4_+d

r15%>T_NS)U)Ji-wk#{lMSfdUt+-nA zV78yd1e#2i0gby!qQR((o4@_B1v1qN$S8M@gmp+QQ z5^zHy)%%&CK#K&v9u{WW&))u$=<-+ow^e-mAN|26V_9t;X2nUEA62=|zCQ_b(t$cg zM^?_NN2eT82N1+4IVjq2hn7_0ZH)`NK1#Tvned!hL?hgfOLy@?xWb3DiJ`aK)A&SO%`EzG?~u6L1(R_nKUvT1WGV&iPeQ zOXTW;$Y_4vU+#~_U6RpYF(T&9nfBYc0+i1UQlVRy-I|L^EYR7mNvQpt?3O26|6TQ}c57c6qE=_)#7cmzQY})a9_)%BtUr|Q{96K?SW5Irfe&=Yh3URqbcWF!(K`5h=1(bU-5&c>eGYUKz&W zj&D_(W!+X=Zy8|F#}rRcy+%VCfoz<4tNs_tHL!$f?I|X4d6UP4$_XgYf;l{br2HF^@517v zf=cXUIpxvVNUo*n2P$42$B+v*%SFl` zxhs|8UD!n3vCux?l0RmS0J%2f3ZxL@ofopw_vh5A;wSgIT32I_qIlJ6-p85*l(bA8 z6H=fGWcpI<}xMWdS{cbbWyF%N>H#q7yb*En7{G$y+s^N;5s)*CeEcZ*W1idmm%!9|@ zPtbkkXYG!4&L;SqrHJ!0Ixg3smc$k6A(=_rmy8@%@WDgxB35^_GPga2d9+_(gduGz z*#n&XI22eQlzdGJ^*-UQVNpInT_9Zuk9jyk6f3nL>D#$PRE5sE9lE)vUCH1~E@&?4 zV?_FKe3{#$g0C~I6%_mM@PEo;hNYf;Q;IPYOy2)Y1*~|`+2GnWST@;~B!-L*RsJe< zU&9{VGHh2T#R~{+a=jnVL==vST{7=xm&yoR?kJ=aQ}ko+EaHbHYwgPMOEuS6CG{M& z3emuV@u-hKc5Q#`=PTd$M5=9xb0?J>^gG>m_!mpVWubmEKC5iWSamky1GjGkI2GB$ z>*C1#lU#JM+M#C6oVsb@~V^2(#Ipn>==3~e)Wkt}g&U_+8U__l9e1&W`h2%no=p=~?lwXfE#5Nd} z*!7B{o){|e3JtkC^S-ps!lQ{$OZA=ba;v$p&Eh`BoK>9CPIy{1`&Z;w}`9Q<@gRLTf;%2 zj87~U_u@t2lOVFnjy5>la#Rpf%kcL(c=9%4g|#^*bi$Ns%oUp{cU94Ke;4yfNTitX zWzQ9sIym*B?UE_Ri>O_7khiibiabkbkkV^nuf<4&-VGM&K5-blICHuiYzeCk?xdH6 zvA97bt=4qf=X%7gDq_ibu%)B`04l{xL)W(kHuyL5IQ_VUQmL!Z+%J)_ydHf7zJXDt zhVU}H8n!cb!#gif%V#uQzDU^k;Q~|q5KSPC+y!V>S)WVsTNaj# zF!22T@~0?SL--tIJ6=rv_hJ_*VW2E<0}{P{*l|Em{&`!TQsJbnt2_d@oVWo!U18JxutTyF%OyI*>)@B6+GXcbVQQgl2(Z<_RdX)p;zV*XW|OYd#ZeZc?h1k<7tyXf{)9 zGvR$Wa`^dPANgRC&XAEFLE!R96)=8x#?x; z?U31BPGb+3^JH)yo5CeDE|t@;ibO9Fs^@W#TBfL?<+ZQ&xjxT8(XH^}>%yBLQZ`*k zVt5J&RpvHB!fEmsFWI@Pg;gw|U3SYi5cnR<7PK3RCcXq(-g>Jg?{BGl>_HNJuhPgF zfjnNPbw-6r7AjVP18b8`34B;^IX+?mNIb6(RVc=6Z!-YijqYV=#Z}rB)eCgRPrzUQtg5zUP(-)!vb!Qj z;*TTpV3oFaCR_zlH(nPk1LVvSj=(EgwBYCLz<)~}Jd%BYcvzoIRg_YM5DyKB&MLeL zyv}KA-mg{lS5uNr<$JLqP;(n4rbNTZnQeup3r|D05fPkzc(i{3>d$-$IiMQOo z`Fi&Q|M3}K?Q!N5t#t_@+`jpz-PhwKbgtx)e#r!B01QZtwb~qj__TmB7yF_6H#Sea zo)X5j$J{0kEizp&SoNe#GwU00xZ<0w{)A-=1qg;hIK|rqDVk9h*2y|h=Ghfo1AlEt zMUaM=m$4z3^12)2E#xe`EV=0lI$V@KAT(iazO-N3K(Sh(QT?)@n${gyfvImxmD55T z#*0a_I227SDTH+>A<~(P7XjTjQ7Gpd&;Xi!3E{)sy|es_zyQO-v%#zNeZsM7oxpU{ zd1a6(0vxa`yfT{%wG=Pf1$)d$JaE===}x*kXLazz5N${rqxy8UuUN>Y862ub8FrYZ zvB&%B&r7W3K|$PUZiHTOgb9rJ#$D^kjA?PIg@sclfs|eui#)v6Q88vZhULQsdDQ?2 zh#8+8i z2^&ytpety*haz%WOES|IZbxRA(HZ67*-?i&_OL5A=jK=qKV9yj<>wF1&pnhq1-Hv$ zgQa-&xUm;IiekQSf0a~vsRinSxIGn}{m8@^mnR;NnOg|%5h~w@g{=PWa70@7!qO2T zy)IYMj7YCa7!+kh-L6HyaHV?@m(16?_q+?Iiu8s0k>_6wb6$*?aU{H}&}^l_4r8_gT_kaO~$Df-GWWnTfF&PbH`B49qEBh!IkZX40gP zWy1AU{E4fP9qsx8eJt0@@=|Ep6EPg^F(PluuAtL7)yCo;xqU;g++=~sHsY#^m68+n z)cR#}tmx7CAlHRFm_nnl#`bfD$VRul?8iPA?B^U;@S;av$`vlb17OrdE8qRByZu${C%FkS_ZKcO z;T9>!n4QXCh-FZfR6=VZPwJnL-W3U`*cPm(=QO&2t5&X}C2kcGb<6R>NyEuPr&b9% zy&-r145u`92CHRuTHSpmytj~_-^DLTyohfwYtklSrHQG3%b$F#x)HD9Z|kHx_-|ZT zB-~r7md+&2G{&6Y?VO(Om2C5DMD5ue83@5*5qDX){6 zRGf;2g(LANFwBjTtY7EqSh|v`ccng9d{l3eQK1AuNd_g>44e$^? zFG}w<1ISF`F99b*erSXTE+#s9EIidEE=Fnah`loL*g zM`x6#t-y2+ulvRW>S40t+g1$XsbJ>Q?5yyj+w!tev4X84S2_;DgHO!qCwvUbZzW9u z8s4w#JoXul40$lb_}zc#ZhsbI?Q;Z_ne4Aq0+MA@@YnObP-oDy8y_`-1&7xNC%1{W zM!DK^NCN-OUq)q#6=pHqr}Y8$XsHoS9P#o*#ZG{{V$l2u4jM~LgFF#=V%UlVbw>`4 zz58`{`*+2rk*_S<{&jVY0M+Dc*Nx;S;f&sNxOdbD!MYWuyjX2kwf`LA;X=Q2&^%L- zvkK+2wH*Vo%S+^n=8s__{h<3%Z01LwbdO?Uc%4_oq)NDe-Vy8&66OWm)n$mZ#!_vz zzIWaTR+n-mjjmR$W5SMBOahiEViWI`5MkpKP1D0>CF!L~2D>!({9@!}Ox=$i!eKz> z^MaO@+Uv)d(eR|3sqUJEdc5B#c10cGNtqns!&t+3L?l7~c z-u<%s?N2=2wZZjZB&RDLw*(h(T8@QQS9!c1W$!hr9DN@r7fR+RDH9qpI~D`&k-q8k z*(wsx9Xy|ki{>}#SX>s497`z+u&C@U@B`0Mo#jj>5NE7ZpPcKkf;ilgJi>UQL|+fgTVSwTY%9 zPn(cF+NJG8Icz)fzUf8cWj`SfK0fgGWHQgA$`$Egb1+3mh@}a4gVQU@mc?SJD*Gse zNI#aAG?$mT=L^)0r ztS2<#sx15tZ+bbOhF?!mb0JrnY9-JU@`)7huGc-Xq%I*|F$j3b0;~^UpjZub-Xy(A zIV!AbIWQSnco@DuWve2>0d{WuxoqIM@(GjR^#PZaA= zARrYYyi(8uldXOG=JN`yUE||te5BoDT`6L>+&%GAHo*%jIhVKx@ab1=iG3tjXo%-0 z?W+tAPJuNUa>c4$4)bDM?*4zu-t9TE>^k%N8yw+qgx`)@bdVgA?CR?38)Ha0kw5|H z!i~tRDmF!hlt?5lT>ugdAPZeJAw@_tejWR5=CMrbYK5(lWZgAVL>2gd`S8 zEd$KIV)MN<5(i4ekaM`IBL#5@xZkk_4V7FOIU7+*^cMXTR%Lunrur-=a{qewEg!?; z0-nHLSi&T^$~5lw%D|i{d&V*MWmZ*0xkY2xm%Jjc9G_EWw$Dzfam;izDyrl;L&K4w5xqq^k1nYYrNJr)3@w=2KO48-WcpdCN$r^+S0yve@s`?!; zD9_+Z>*zi&{;v8=*nK>~Aw5D zcr{O#Y&U_fn-pLn;hury;_LROZDpHNB>%*u!Rq;ue` z$0C$#YV=9jl-qlz5|={(F*%p<1$maUM1%JLKfIuQ*&5~Qjp6TUX$Q84OeFO_9;Fh? zMae}Z*hXVzc|ax#W0FuCIWPoUg*O)G1anknyu}V4wI=jr2NKdU3lZXM9R+;6q%q-( z19sUUTqB{Rmrmw2kz{}9uN@HDmusCyqTEF2<0ZEShu>(zSudq>KT;ec+N4gs(1Dr} zUEdUZR6Si(N_=tbeBHqzwxM!$G>gJ{Tnd{WmJ*XA8FnXjpcOJ$8O0^VG-B<^P_1BS z2|~fwqHltC20JMCHoiX>s^GaLMDlZFmm*FROWkH4R$gzCu8#41MhK{InXsKYEsy90 z6W_2NCTBh_(j;XSUNV+7O#|`s?o)Fx`UmVtwKxR;LtVFF?-eBl?p_p25&xg-J1wpu zWj_j1`&H;6vcW?x{;uQn659#sAX75DGH1{X3kg}Z*F7Ek@<42AR)Y~_4>UcQN>DCD z0RygP+yth|m>h!jCPbZdram)%RF0bO9AMFvRktQO%sE%)ZBGjksTA5b&qFT`njuAb z@;Eg3jeh8pB@gIQmGA@)Vly&B)#9_3qa96f>rsFBC-GFNZtOZgbr%5{^u6p#Xz_BP zw4kD1cDa=F_sb*`*Z}%v2`&!`u}X`J)0~3#w0mkZ2bR@*Y332cXW#vY?(JWim-GBnZ@In3`I`nsD;npb6MArG;z>tQ8+SlM?V!W0$|_!iP3o(A4J$j15ep@3_=JxXSGK`M06K>W zU#voRv;I2uqcDM=ID;z4+oC?m5a1=w_`r%+-MR>C1ix|A~?1@DR4xW3Z76blj|4YuF^vB z(0Bi9_x7LsP>i5jP)_Q;0y)!^^jtK;|Hkj({J!gS?E7ck&w@RAV8SzB*1&jblXjZU z)JX}dBs9q$!&u9eEf|JH}-dn9le@{(HWVQjJ=6!9-UY#JBp zXycnWh~0~lopS{1;^-i4yfF@EsWuu}h_BF+jLK`sC-^^n zp-(jnlW$B6L>snCwKrG*QEo|So5ISjDkhXP_(c(Gga@yySJ4Jq6$v^*MC2X98l%~` z3=Qb_^|O8~ zMhBb(8d=Ibo=c)(YlI!Ar~IJFZE)kH`zl`0s`zToD$?$1aw`)mKGK3AM&C3bKpL@_ zc|zYV5sPP?6@MEZl~HCz<)W%g1nDHZLD)gNPR8R@5J@SzumD?2+b z@)&eax%y|_cy4==3CR5bQ+HV~4YQHL-U9+E}^*kD5GRxRI`6bOsEmN1=Cj6eLjg*3%-JRo+8RlSdRN z2WussE!P9lQjqutu!9CCE{?71W&{Mow6@Yxq%Wt3lS{y+&%uuM0u#0pw}RX4c2D=? z=%tF-ovC>!s|iN7aYp_b72r)&JD*nCR1NYhlZLQA%x3uHYN95SNl4io*oMWWfCGdr zlVTBz%~6-8kVKDViBgs^6qY2KD$1y~Nn012EZaBWwGr&CRU353ZNU{58gC^;)ruJN z7nmkOz&v9}se#%}5pgVQIklfDA&||46c&frDWoM(0J>L$m$ZO`@hTppmP2Qn?rV_1JZ|OmLY*OtQEvS`b z&M?}GrEP7^nAXi!h2aIs86hgISX`S&^p}Z;B91!_N|GI(N7;^_TNf4*gd$wQ9!(8`PyFO7l>evSd&@rhPX3HE1>RocHQ0{s$9q_R#7 zIi5-L8I!M#GMSY$d`x)-A{&?)B%d#*w$YyO3Zn2tdz0b+4}?oaM^*SB6J z2^33RLsoIf^DT-4?>^r#vt7-IHn2-SO$A-CO5|W4lZ#bi2oTe_^xxn8mtXyUclVF+ zchvWs+~!#KK-~`F2ZsN4Ww23W>v=hzfHvw4#>!{sui*@H*J|VydlfA+xh5cNDVOou zgUVE4^uGtAz}AmqQQt^HQ~$Af3Z=1f6D&Df@KE|`X*slxK{@cNpaJgT;bB$b3eI<= zuoVE9BC<}F|AJi7x~0ay%$@tFuY$(@pJyl|si9Pr{M16ng-Lb}B3 z6QyoSD=JczP88V|24|+-L-J#;CPw{v%QQlg#t~@X76;{T(53{#dXWMN7Wlw#-EBae zKfSFZnL9h;kTTU;g(s?>C)T`Px(pA_6iY1{{$A$fVM#Y;;bN^j1gSwR&D-hIpHc9zF>gnd%$eR@cdUO>0uVkFl5Ur29^`PLWBl*Mf zn8f}%sDnmq$0uF!{D}gzoRdTSr~~(G(rB5YJ!Jr@~`qeTF!cIK?yHtKJ|YJI`5!ngG?tB%NHkV zj3iqql`zkBvgppzds5v$*D&4=f<(x~CqU$rkrTdJiWN+iLxPNRL~96uy3~!4I@MW=8v=M4+*MM` zTAf#!V=x?d&tiY73zYX#ke>UZj6}kyXpzMoY>uD%huzzMjvs|`frQ5)=zea-DXOvZ zhaXxEVYX;Md!_h*TiBuj1q_9JD8+>=U(+qDXsJ5CY}kEa8ALl5R#U6tK*o+sI7g#A z7iUL)D6+DlZ+L!%*-#$jqIeT9)@{5}7D*BLBTQSz@lI#7?nMN|S3x9;$i+KQCDy!P z-3+ipb&P;r9K6z3~-&S1atCGv7Brt_*m9-bgt;TljFdni!?S}E+TL7v)i~}}*j1uNC^r zKIE9z0XG>Y%1*NHckhMjx#>zxP+!7wdNQ9Em=e?k`C&kEf>bU1uz_Y1h|DHNXRG7Ry{S6oT%nfU-~5{apkNChXTm1S6`@kzV;Fa;VKu z@vpYIUkoXlq+&W#V6w)Dp3i1*LhWNqP*o|%73G#RNf#18jxe;}fr$Wh@FXVp@aH?4 zDBu0<|M&lIZM6RXd?EP$z3$tl4+Wd;*t{^wePv^3p$7qFmopD!u=>N?Ng?;OCYWDI z7O+1vI$j7!Sji1tSwJcYF@I(s&;cJY4w67O!(YuiKkYV?8^P*ypT3NnA6+8k4MTw0cA7W>V{%mmix5d7xmZsw;~Y8o70Se zs{(ltVu#HO)`F{Caw0!(#y8VB{8qIr5ESL#>|+(m;r3=ntx6Tv_0c>>E{H&Qjn?e< zVQDf6d|U{kl5)v2)NI->V~ADL7eN)$--Ob|4h4k>U5ziegRL)_W^IyR7*9}a1m)}% ze9Fx5_z9>QR5+fl%LFo#HI#IU&$%XKQPg-XeK&f@Gq30^;=VamVR;j2OM=k4kF0G!QOkYFA0T%9cKQFikkp{fNNU^)-VTsHAK$coAZ#9h3HqUwDCbzyArG*rhZ*A2{jhV$v15j>{-^>NZm*%$8in`M&QWM5Q3Aiu7sH6PsYvDxSR=?otIdO3U?` zBV60WO)rBy0Mu_PW^#q=7&0<^==D`S`x8uJ=LFzxgB3)u{}$H&x++bXv}nl?%w=N7*3ot>D;o!s(5SB z(jvQI8c0_2ag-dwiTJ8to>35oH1=;8m)7c@>N($I@3l1z{Az6Z?#+h}x(`F5GxzW) z$cMbu)8ZVqy2Xp)%-y#Xi6fPd+%cIX9z?sXk}*LspU{VzaG~f9k~E$!Zq8wdyL;Wp zhd`dKigwTl-q+@Rze}`ptN=rleN9V0DLN11gj^KR4uytbYVqWhw;z7gedeRXGR49x zNX^|FVsRH4#vYZT2n->?p%%}A3NklRK~k8>k0ud@{7?=o*g#_^C@*!gD?9;k8}tVu zRhEBH^cjDPKOs)l*2+?ul%O_br{=s0PDZxdb|{V9x1~5r*2WQ&+BgmjK`*W)4Az`` z4ed0Z&XxT%bAhn52|U;8S0b&BjNa#&m=*&{WvHz=DlRr(2PX6DSHQsV(aZ<4K83y% zxx?-)6Qm5}(ttCxx{82(FK*_C>Ze`+HdTcT2m5{-ZIEU(;xUJ9C_K^}w!Hg9PRZP< zS?g+^jf>(pT?*$1Kr~ab#*0}Zu*62_)*ehh#)VAq2Tp<;e8MV#E!lbW>^w|fM~Anp zeZQCsCWxX~cKuXnYljV#l=s^gpp;)6DEX(3HfuTGm#|>9kXnn4>Rz-Yyv2F(WoSIC zQ>ktyH@Ht-sFn(odC2gY7m!nPGg^?7(`jZl8>yDzR##k-D7vE$+{U zA9tU{gNMBn6z2f$HYrJPXr^9r6fbMN8xKrvP@De;{riS_W>?Mf{N|^ccePAVjUuzm z#o$yGcd*Yn3?e-eKkq8K!8P$)nlnxGxEeZrE>&#qaksmls2L&)-E^HfAktI`SZzuq z*9a_0N>?fQexv)f_hKC_>UrfpVJZ}*f4?PJbHBzx`foG1CP=S>u=37()HN1MJ$siB zP#Y+zs7r{j)taldig8rzvZ?4@mGCt~)44)bKkln19`eJNVg(%=cYgDJ@>}C2Qy3kq ztPc3g=YOAQlkN!TjH0Pn#l3nISVF>y@7)MX@bpF2U1JBGwMbwrLfI0^%3PUUdBB!} z1xI;iq%|7HqDRb0iiPHxmo_E}Bd_h6^6NTYuVug;A#W!Bt~3C6GtO9v+sn8Gsq`>N ztuT1j@;Krfa=Dl6g%&F(5<2#@6QZ+hT;aLA>Ow^A0yJWZ4|AoZ_ zAxNNX?YT<7l!G(c6Ds~+WSAhq*xM!68gQsrwn~ecd4WTOwcs5gZT*%J8=|#9TvmR# z%B&U4hl?VYF6hNHIV0X#+*R>Do>1*OGHw0uYh|zONX&A-l+IcpGq2)QzwQoV4@&JO z*6ckTBU#RJ4razazS_*8kKQ&yh;)Eh| zg_=t%36)l9{1Z`(AQD;iR|P#ln#>q<+2*;$)5$XLl%H}&@F#|A^1JvImE^4Ln@bBK zU?eTerd85~;Oqoxu!l0}x4P5>S9gN;*XpihUR%>t-lb!sP*|+x@!kaDy6B~%7~0ot}WzfTUg@<8%QeJZB#)76-pPy zkMVVbxG>4&Xhu^3RGH-jS<7jY5skpYH^Z$0iP6YB*h*f-a`%k&`_k27hp3UIRB_7z z@vB=#AQx%*7A`Vi_I>{8z8`PmS_5ZR`STpz{UlDhiPzdw^fA_xw?r5GZEvCb{5vwWgS9zcvFH{s?Z~lN%o&6^1iEh(9SkXk_6hgnNt{BQ~c3)#^;*uB+OmY#wQrhT7em9pHuQ6tAG6q`&@ym!cX)DrUSCe04Q7 zt-A64;`%d>VSFTiF&}nXQ^6M@&V-avZtkdz4}&%W@dJ=wt=S^sPyKKX3$>AJ9PDw( z*sboFc~W^=r$vkGcaKX?$!%P71cuL@ZaUm2EHB@J+cqKL;`r>9dz~SBgo`06ISxo$ zJg+%m|mC3bKvkoNFId`(b?0G~Fz>iAcSuwiE+E1jCg()KKx-pMIAQxJ(v( z%5{$?Qr^|4o|(WSj;_W^q5lUVR6hKyOGHmZ{_L~(-j`H*$@=gSu(u34fCQmd>#@No zq9;aTmwW0!jw=}5hq?q)jc8xXfc1i3A-IQ{?nIjTG(&d<98K9Hj`P zG%VXwG>K9*v*FEz@r`-4=nD(GEb(qHcuB|ngRiZ1DM z3w%c8@hGj&em-9EoS2Owvrdf^qA^Qn=9|Gn1+iPKCYM3l?e)xR`1q=oBh#CrO;|EL z#$4r0a_FssU;3~6V8prAcGniIEvhsWg4r&l-m+wPSa@eV%UMvdS zSlU-sP;dk0jzi4yXS-rlA9Lpg;g~#7|LiE%{+Q4;=UJTmYtrImOcp72y_mL4fbuJ$ zJ}pzz>~pO;6yT_MGX{8p0g+Ys`SsdT_yr?qv8 zEEUP^m`S-w!QnL1PKL%@#6dwWU8MW+tr|V81-zH1P zk69Hj&b|I^LL9P5(;w`|4EUO$awcg`D7WpeVkeQpt;;$_pi-9O%gvGL+d#OL zE03^_MF0kI@v;$6LyBJ24m?~B>6MgLPrIM`vIzlqAG219^}1Lmw^#D}-6pLSrq%96 z%1{Cmxp{S2z6ISiWwhv*NwcJa!ios2jlF?M?Z>A?tdf#<)Exyk(>AVmtCzAHHURQ~ zMdH~CocP#VmR+fp7*qC0;8S2gTtj3Zd1o3<5Z6dysAhcmBPw{qzAo_!vPdc%&1?H^ z3{JHHBv2h)ENsBSYG-?uKYdSOK;=$BxX4tL$@%Mx`ijA$@xRyk_Pzws>K4G?bl6fJ2%^?0SxS#8;EuL(mZK(5 zy5b5dKhiCkm@m2!5{3e38ly@)CJBK$_Jcg|Ddb69>!gqMIaOstCTkcE{7_0$p?#lR zZHzTjiWqm3Jo@8xSGuErHeZ0BfjmB|zTeh#=Hu>Ul)|#^{CP7r2 z8ZwLUL{Q;g0&v9Q{31L2pe;#vESA(ObRNYaSkFc46MYw^9{beP424d8)GGF5>*6O9 z9@J-A7I;R$t_{PD?SRAc7(ZkWu_;))z`F>c*QaH5nAk!!U$=AcoAwaUQ=2j))i<`` zJj~{att*#~O;t=zKo_#dUlfMgmrIfwQeF!kF~!?U>*#~+*2^F3+kyb=P`C;BnZxAA zimzNqEVDNs7$%Rny_R3CD$dUmgU|#>5cAbJh68yBO=N?JIxaU39wY79K;nR(GWk5r zkpukY!g|=^#1o4QQ~TafPp)UT^8cEgs#`6hngo-$j zzn3xy#~pT}Fee_xyuwGZ5lo5N#`-xjAcu0^z4^K;O>pvg!Q1Z*AkGEy1Q%IQB(KZGlH$R@q$-`g_rH=hzqcVpjeu51UH}PRXGB9(+dzio_ z<7i1f59trP<0X(ld5gR`&I}UJ26SUu@hezLrQ(Ms&KK9Mj3I4QstH?L*H{(lc))Ld z!{_%^(rNtPcS`psu4Y&zT&oiMZOM?SEc;O$Y%ShFG8|j=$*o`?q}KhI^qV;HO?B=G zPH7ZcTH9dC0nN^9SN^f;)yk>vH^d^31nz}adoG-ILIKdJW ziIh0=VX)Pu!ci*JZWf!2J6wZu@@GUaypw_4pNwC$%QGQ>v+{N*W9Ou{vmGC1QbC>e||?8|_N*Ke{E^Cu;~ zXtuP`C>{^PZ*Rn6>pt}c1LXbn<+4p<`RJvgv)z|_` z@%%Dx9~4m_D~LOlnC4=bM6E|mBe?0xg;Fdyh(3nH{NJw%W(#j;mISl5yh_x{NlHF- zwW%#L5AY?B=_Lk7Pw68lT+HGljdn^`TskQ|QR%%)FP2HEEH`nQf7ksyNcHXwb0*>k z0plk&6$FD`wGs_B8mw^l<|jxzUoM~udd2dAc*q_O*3iVqt^|6oxhv!XTDPPA2{!uU z3R|fggV_PmQ0<5N?(&6+a2*x8T8_-`-&gR&p^7@HWRGpg3OLRy^T~tmvp7A}<0tVc zENj*ibR?nTs-+dgR=E|&sYVE-!pwObxUO#2^c)+0XLDyg-tp+OF0&E9$x!t*74)V0 z6IOJ30!(I#KNAAGAQ0@m!F?CC5y&+U_>Dypn?%ab~>96x2{z0hHqu9BLRd*o&9L8iSnq{@w1^$_?wo-LO$O&=S9$(usPzm zTLo~4ugxzm(2@j_kbIVgiYzG~YU##Op_A8hxX}&9@z}dJG?7G=V#%4A@KNxNZ0{om z$jJkn;YI%9=mMjY8&Ui&YUF4YRh~p}^!&Uer%ELp7d5XyF)0!8YO0B0gVGSA)F17y zb?dT(1S`KZv!gF~-aXC#4HlC{pEL5+8-|7^Ajb zUcatUIG_N|+V`Tzn{Zar{2BC|Go*`ZyqInogZOWA-QBxbr#RAgP0yyr?t+H zGKW6$>qpEFNJr$pRN#vQYNgOyo6)RpHd&|(74MHzYvgNkBo=)?q#-%{W`5 za|;-x5k|$MXhBm3Q?)8eAlAI}>a=)_nNx(%7}(E(mB{;k5K;c!W-Ne7N`0+>KTGGr zeP2&9Ov%>8@h^!;2=+t}uH9r09hZFYhx5e& z)hUZJGe}C3iik3{h_lt<#(wrNhpr{Mq)4;Hw4^Evh6(k5OJWkU!A?~U`M)nOCk!6&jY$&&Jq zik4;+x*_>))veI6Zg!mEMD1SETs;IvO)~;UQK#@A7iP8aqG=}j4AG&ZQM{J20V??l znp?;?)E83|w|s--dm`QlUe3PcTkV0CaPqN6?E59b!jDTybTan~MfbaGJ%$=RFB>40 zpFI@=pX|Z;k*I3oU@i!%s$L1;u4Z!Yf|D25Reic2*3D$B?z7Uc_G! zB5@f%zWc+sge-sI%hZyC@XE>_m_v!!t1ioi#wirdr0@f-T*`(6L9 zP@GR|`tH7lUlw*5A)(GEmX~y~z`B#2Tsj(4P$JhbnWPm7KZ>2HDvd%I@2poEc&>2r~Aebf8p~5LWIehH1;FyQfD_&km+#@E&Gt|#q*?Zuk+YcSE6#ApfAo#&O-v(0LV-`2v5dWi)2b}w}p)A zkKrD3wczHPSv6*Ysy;8BE~_D#ENwov&M#^Nm>6b*s&WIEM1X7Y6lK1>{pYUz>#u|4 zW0jc20Rc0q^2EZqul9`3iXNrk!{JOt?0rIXf>Vz^afPQw^MPK*fdK9i=%LG=0UF>V z4w6C=?C*FBHpirdjFGGw>zYDawczAunx5Noqt3Dm2_~w}iSe9QHIz#$GGWWtlR!pj5+JPjDb+2Qs4OI6E z1a#tjr>U@g*(v4UScfpw&*RH7ebN+Y)p79`49hy>ZWl2en~*O)e^H{rBV}@wgMRp- z%RBinQ_V_LsZ~YF4JqI?9Xo);bIqDX(cJ={nKhwO-=qZVzFb}K*5#q8tK?2tT-*{p zCGvJ$q`?8r^VK229|E43bz`7pbK7fH!}SVQ1{`Ya8B``+0&ZH~!Vt54TGLkPgIR!F zRzahAzjV~31@G`ni5$q9kC$NeQ7*VF=!YtO#x)A2c*MxcI53c5V_5Q|$tEbK7v>QX z%T5k#puCSdzhhMwmL9xxG^MnrTc|7I4ckc zz!upiHdb`4Yp@RiT64W_6vN;FW{5T1VE?p(rZ#^!(XYTy@W@CGnU7dzlM~40ujxX? z)3{n-Kk*YcfrcINAc{3=*cdLB`UtYA;;=w*t=R{T0sf@dM*jw7!Pnw8+Qtbq!#eVj zj#?^0ErQ+GZ(v@ZEEuE4k4jL-3Wm(P00j3bG{|e3%>qaYk~3#RqUW){@{1L)!EQ`D zl)MD!LF!-m4n_0b-QNX;cE{bu+I~=%1Sa|M#YJXvB%{`2g?DdW!}6u<;EK9eN~XQ! z$D|hVLho~H!E7F`4F}~6s?FzCIC&oz9ooi z$MXdEvloHD& zxtrqVa>$r`>QKjMR%dX9rA= z%S!2t50^-#L|HspsJJ47J~)hKj;$f0acPw9QwMkrBy`f=T3hSZ%$mb1O66I4`jQlz zd=_sbh|qAq;0SPCj8(j%BFxu0tJFBcD7t(TwrD!#$bw&XcYo=P?Gs0wcmxq^Ce&D^ zIKcGeVlm4*LSzexSZuWs;|wywxyLZB>4_Jr;tUu*{XIqa>Wqq(G@k1|09CH@n3+vT z-0_%|M| zE2WkbA%TcK^f6%#-~{+qj#+ulH6!x@Cl5D5^4#xx%u%9KtQ%ETUi{4?h?rg2@n|VB z_B*$yPnKAIi}1EPBnU5bT?rFHYVsg$DO5>;)etPp7+&scSbx;!mVzf$?Pi=3=%+Q1 z^d^Y?^}Td_CqTg^Z8h*B);pDV+D$9;$XT%ef`D+~*x;t(bD8e3LV$ZDju~uEJ1jnd!wGR^a@@S(%lZAB ziz-a|6m2Je9b9w=CJ3n?oIlB7H7)MR>=lY{DT2EK3C@!H+F0gTEJ9Utkn3~}#TPI( z(nLY4(P<-b1g@uC-#ApH!6$y$UduKu#QxyZ?z0EpCF3MK zCZ$-a)5&JMDlYN}pLk%nO7rn7*j+!Yw(w62o49vyBX;14l4L z+JDv?5nsVP4Tw+0imX#6$Ttq2yN@rxP4%EK)rL-5{lo=HpS9j*I;WMt8o$OH);p!8 z#!omM;6g#EcYg}d8^=JF;|udBd!o}h%?voGSUtTtxcB_Rr-4FlX&7aPXf*W3Wjq$t z{?z=29s_T~6r5yr34rJPns-h8DbN5C>6mR^ds-eP+8Y<~-5XC;+c3C@&^0XtPA8t)VtpE;WQ-Mq;E;T#tLtyK^ckzIZIMWi?&z-IculCWq} z>SJ6^(axX3s#l8%A0zpCd~i7C?8wcEM8yP}2u*r&9O|{I+QehyA4FWDvS#HpB z^2;6)lZJdL0PtV4(>AfzBm17Df%okp+2+da#q2>hyhrZU; zH^OtT?I@-~f5r_Z2>+2aII03+XSAO_x?NV0)0I3^fyk(${v-A$bO8|=KlXh;J~AIu zJi(#ZqL;rVxW<<87GjSqp5r{E4?pTYHV5?&D3*A@4zX2Kq^oT)7Xe6|i;+WJ;s6M< zBX6Xwv)T?9_*RAyzDdd}U8Ta?h>0WR#iMGxHS&OY)J+B?aaKXnLK^LKkGH&wZR)Oz zQN1eX2wT78oukQwzPG}I>df!$l15gNG*?!-pZEla-QLKw2l|=Z#*d1E;b#UYeBN!v z*HoHwnoVrE2VB|86(4t*M=o?RvH!a_zuSG+gj8e%5q(}Hviw+nW8qFEOq=;7+M05B z?akmX464|@2rNNmna3?h#7n{ZwcM6hbepjjRu|rc$@g)$3y#Bqus`(@bLHJC#S1_U zJ{(GSC3}KWKkU98yft`CQ%ipkjZ24>!U993D-n~4&mY0tWrXmhz>s_UEBz7}jxo3F`O+a5 z+9Py&)ZKf&m}rQ2F>F>j6z(P3PuFY3K+XCTEeXLY1ZnvPFPnz;l14X_BPW%C zKxw#?z6oY$#_31nZZ&pA%=4jXPfxsdb&5S8(>|uYUKQOj0uejOT31cNE`Ow{6zHq< zJfe|4Op2fssZVIY3{1vn%~40bqBBm%`c_ba9j)3s~j&*7b`q+IFW zMdVB=lqi1N7VY5UA}KS4O1iw961nCx#T?1=STd18>5GZO^`*6a;jt&k(~AlR;E*oz z-nIaIg)}j-8A5Ll6ZT*PL#&Q#MCaZT<0KeJIt(3#(lk>-Wvm|a_bL-LKJPvcX?4=Q z4(?)r09NxiOg*GuvOSvhkqNL)uM#cdEKh!4MuIu*D zmig>O@ioZKmhB@m+MgkM-eqk=$I4!U#I}i{hp=@26&CGC~47xS3DBfuuWMni7wRQ4v44B$uXDcb6p* z_^jJpjWw!D7-lmm`sTz?$ClfQ2J$gbU5!!;(eCpdtAjhDpT56G0s$e3Ca{2i%m}_X6(x!@GfCPPY*uP~ zy+$RarkeWZKasPP`%w0yq4ER%y9jmCdeqi2<+!?9%nnNo4_6oX!fPM_MpZF>+2`mo z=M;hfU(f>!S7pgO01F_>m6B@{=OIpDqbsv}_}t>CC`u;#4E=5{BtU%uk2D!6MTF&`2`zVULl7(~A1PVs3c}54I1M+~&N1T0;8oSoxSmqlRo&SA z*GB|BzBQn=NCS3f6(?@0pp z!={s~gZW9z--c8seJgOR1KQy3Zf<68C9aXb5?kwoGL|-~GUE_ZW?>SY)4H>WUrY^+ z?yc2?PxrShR#l`p82Lz`J(^nWV2ZT?;w$;=>JI?ay{GQcF{OEjceWzTq&n|pAgbF| z39+DUp?C=MQ{q=?`jGi8Eo`3IegzlD(|6ddI*Jr41P%C2R}-2bAcF%Q<0Zx8df+rO zIWP%IFoSx+a|IL&pRLheCy1{|__MWGG)eNl%55|mawV3Dr!yx(#!62`6Q{+ZZl}v{ z>a4>#vv-vk=!d{asF+OH5OP1y(7}c}2J(rsf^rZ%(y; zODBEVUCq-r#{iR^9YL^o7+9-<{IT!5H~+T#pW=sa1k%;XLeUOCj4`dRY0O*72kvVw z=UzX^2=Iv#Lj1ipEfu@bi*1qt{isy(pd6ZlhRFy;e@vZk!D5^bhGj4>vf%CC#dCnEjNNQtDdBy2mxt`K0J^iU zV`OC&KqC@&Fxqd#>9J+nahG#Dq$FMlp6{B#C}41tkjgcpLso^FJ*oUVS>weO;;PT* ziJ~AJSzs|Pz!8Y}wBTWEkS$DWZW4~t3uLfqHG(&}0^T$dd0uFsx}Eal8a`)2zW8xY zIS`~e8U#-eSwl2DUspt4N4(w2Nm=&D({BY7kBI-Q%T-1dmPR}pkXW(Q!j5@AM2^U#^em+tad}^#)HuXBkp&Do#J}z ztnb!*5jcx&J1gS7AO2?d@80u$nh<+I8m4hGr!>H=e^EwcD|xw;OTLyVZmUa=Z)!94 zjp6il1p(G&KnIz(z?aD+h^3Px@%EP{;8-?Yad=l$@^|L9llk25ru=Tyhmu%xJd{=$ zS6N7iQhZgrXLHRCYf9pV5`f7=jn(6hVU&AG1Y=KcUI$-KRcm}Tqt{>M15j_$2Z9yx z$;xe4`<8n!-#e-om#u!`s3~O>9*p4=tU-gS&mi zgPUCA_)>zlsXol++cqON^^6qZ7viXJk1Lp%7f@ z7PT(t!&nzi4M$a^uABs$;UuEZ(;1`}2@-k9t)!Pj&LVjDY4?V1X9LK zav(ZwR@^MCf)s?*PD&>Rd(x<-3@iQ_6^&qIcx@$o^;ll-b>X?u6#Z6Pc8z4%%nQ`E zSqJ6Z1VmW`II_Q4L(e5}D>DOOb;Pvz zCF+o*`XHVUxWaz?6YooLggZd%-n=6F-R7Qe0k$EgT_Q2fFsPYceY4|qiIx6ZHcz7q zVhGt(_WkZ#O92D*Z^J}-KTzp53e_JJI3GS%1zONdelAT7{2fxMFadqB-V~!5fRXgO+Qh-3Ip}f2XRsF-VnbG$;Y`A?@!aUN)8Lw zcVA?S%V>>tfa^mPBaMVs|7QEV?h_ zC*dk}u!X1W6>84$nhPGYdnxQ!`qRLUuv+qg3RE&|hK>s+$Xrhxk29qeZ>*uO$c)3& zig51ZkJFAOvws!}2^l!num1>*VNBn|)mkw0t zt#zur_*e+ww03Bza88POYadnY6LsigUb7iva&U-Gv7BH!o+w&VS1{r}-^!ajUdXXx;~DS4w?kJ_|Cpr;EQNZtxDX_*r0`ak6uxZcZfH)q3O6b- z;$d;Mnnh%xN0@~CNm$V5N$44$_f?V*qf=Q7@%A+k4}W;pwc^`!8TQc0{+>v8 zLb!|cP%w;MltHcPY;x_c2jYGn5Da;&f{7yUIKE_01(X!X^LEtDflfFV<+f8(g|sn& zhyei$maiatxPe#YQi4pPIiw?N9^$6txy-VJV`&M$jvMS5U0& zDJJ*q*kSflb&-qjQ!MB8^wp&=y#*em^ky9V`1~H6Jq}gMiS>;d{0&lkxv0`4v8&T7 zl0d1ok1v6kAtt$j@c|8DAql%=$?t)%TolWM4us-?Si`&jxqJJ+`ISohv~&@9>%KvVp_bywFHN2) zb)Mj#Al3ezBrK69?9yOG${dvp8aI=cJ_WB(YT3tef8oE7BK=MBBVgY}*_eh%6CChs z?@LYrd*TTVkNGT!g{j}Vx5cZ#pyPur!yqr9=v+;$5%=aQIjqesRdak3QFH86C>U>I z=CE+&WRl4-8VIA#v|PFGTt|4;UmB=yPMTRtPjTPE3?a%AC@;A*Jj+71lLwT9*O(OMTw1m0=J=VU1WN7HA!!W(xxpWdwY~eZ?(UD` z2iXaagXm;AH{ZKqQ1MVOMnbWg*Is(&QbS3?5|XKM(ic?X#@YZa+EWUDt8HL$sz{ew z7JXQ2?mCYlNN9b2SnjP{V2ZBdgd6NPFLIVlb1H0#zp}&N#o)j>R{Ltyq@YQU`~VG{1UND(!L4s8-{{ktQGzLVbz_V6{FM0L ziAoGKJI_i{0op4ikrh$Ca^GIjFKvB*j!L>5Q4|l&(djM{3zK6@BZfG7wJ9sU_ny+p zYY1nBk@rw6HcJz%PlB2lvjQW)BH}dE3xx1O2#$?zCH~UZP$An*!t0pIm9txI3H!99 zQjD_!vPEdBQ>Fau%c8jy0j%9Xff0#K>9?^>)g$B;9?5O-$}Z)`#D!~8i@>%w+58;R ztNfco6|Uf<$K@c)h4|AIqbxEEI#4X60>2Er5@`yjGBWH;?XpUviSsseyl6mO03tz! zADI+KR~WBlS7C;wZ=6W2lyB@IzpOF^>&?q68SzR-MkcnJ9;AJq7okm_hfs^cU0-x- zwZRJ8$SnEI?l_aU>G~kWXg1wOOab=zO+(zeq9bv|Tbt7AqdcN`+)wEHU=;DW4SA>( zx931A9GxH(1v49|9RU70u0gQzqC3kAp@{dANIp`c$}KQ~7pmNWM1I;#gY;Z(2@xS~ z;{p8*p0Z{& z%sp*=6;VC$(;JAfWGY`KC^BX#MLw-^=LKT;u*g&8#zGNKg2VJi1o_xEqzFk+FJgZP zdWB_Dd{Dwz+1x^K*F1ZmfCMDZ!jlRV$9b*NYBSEYX}Y}R7@ip7g{kQA_<{xx2<>9q z!?S>5O|HeQ5Z80};tiO{+4~0%OHM;F0(LgDfUVu7w-L^*BW^rS0~VJ;xoh{i7QrgO zB%b1fyMu3=d1O?zj|-x-jclX@E%oyV<7O%EX^6Lx1wq^L38yKVL^+*8Y1x9y>O^qd zEpWWd(o#SumZ)04gj~tRkn9B*^(7|1hW5?!K!fG3A3S)_J;)FgtvARP_A2D8l!*4u?DiTFpkc|u zX^%dt)k0_$xXE`2fJJT9YBPdc;1P5Y-Lh(9Pt>Bm%0xsjO*+5UfFjlO^kThe7|1CD z35&3%NK_=RVx?);%xa_Q&8I%RtJYl4*=aKsOP(%Dz8rBInyktIzc}_ea)>ynh!s-i z<_T?m*M-OXoJ7Suxqv1s7nqYO2wt0j#tVFwN~q{Hoe4rtaY>{@CeWMN0cCv2EGivP zRvstBeoIdjvJ@MLSx`>GHO;wPl8&G(bOV{Ev4T5au%$y(qPm$EZP7{u%FoSbDIs(Q zYbtxQdFy6R65kIF^2UPf7N=+vSUEn~WY~+eY*ndAcBsj9Hao@wtewaB2+Toa!~{~6 zc0m42K4jeZsql{a4Yjil((SH#z_d(9U{e(i3t(hL3LtbEwAI%#wpv**BK< zRk>-{v(1lQ^5Tw_O>tGsgb^dh@l!&Sy!#K`uYNyvCM2r5j$EnpbFUdoiWpRZ_{W?D zT_S@2DJaAg4PT~>=98Mr&*xW6oVW#Y_p|S_-Tfh=1B&kzInLPwcFj^*kh+4f zja$*$Orw- z?{vTOJMpK+p=SZAJn2rnrAy3srT;Cm#D%(NSd}|h{O)N}`z6_LgcSg!S$IVi<|Wrq zlL3Ls0Pm(g3eH_Ah;u>}X(q#kSa;2;st7H$?uf0R)AYp!AT2JiccjQ+(E08S=}o{R z1Q+~Z1SBU#hG8w*REHY0_w}U=8DE!G#D=O$v!Sf4+~h~P2zB3!H*@}UC-+xL>S#?@ zlky0li>&Eas{cR4$y1}S=P$O3ku#QPrbOoyYr1tzeEUlSf$GqErOzOj3guPeQ$oC) zW(z`AgJcX|*EqL|J7ww5gDI#8iF?n5RdCBX<%rpq+I5@;0-H6ws<|ssUGIVE>)rEs zAcz>fY6`hm+7LTP9G{R6!mToKMz0h}0%S9BR#mKD`peUBOZII%f?Nr(gd$X6VvU^k z0}#rkCmPpwaug@CKj_xufRSM!7T#;0XIo6J&&j}IoKg;OqH^Wqc+2z6oz-sldGVmO z^fU~I5N?{=keNUroOZHhvWdOIRnKE3>eKWM08k}@3SOk5!9(eopIZ%R0)s5T z7yf)fcj9)OzCCTQAa|2eJd+badX^WkWw=X`29?GdJyaqF1I;<&BIFF1A9e4?mgNf- zEM%V&HNTtt-|{sJ*nn1=lrS0A3!Dlh-hv9r_@DQwazS}v$pmCd^l@E+CY#CYCjP4# zY^+*WOrkUZH|6UjCa2MtsIs^N`&NvJcTNRmMPmm6#Kn~YGxw*-QY8Fg>H;iP zd>wc&F9Sio+7f|AvlQ#Q@Q8G*(?m`N3YZqO|ByX#x-C@T&Gvn`NIQrVc@F6GB-Z)E z?l)tfK>ICa77A1%&B-XKIc-FEhYs~T6J6WCrHROKhlQ) zGOFssk9;nbnye<%+|v&MlCaZg@ZjwMG+xdHdc<{RzGXF_Pzn++How4AGb(yNq{Rzb zC5xar63J;|22__1Z`iHvWbmBwOxo+9#Ld&!uTXLqug*D!xGx&bVJdM=4~n$Y_>>mr z@+($Zamn+A8I$ z&+wzxNaQ9wSt^L{^D|tHCj=h=Hi$W(91_9cN1V|MQjU&)gPBTX-5sY-F)o&eRD4WV zWFPQFD>j_z3&sScHORrx%53l^KnZRZA{TUJ1VP#71hwE>XL2 zDxYKMnrO7DE$;R+JVkFuMS1#Wyj+Rx7-C+3pL5V!CB_rg8rIDrG`<6qNLW|V0c;l` zp9>lzdi>IL_3;9pisgPtd&Br|WMAqix!*{*>q~Pa&O8|pJ;7tma-LyH|A;tsGhSf` zr$r|ps9qgcA;>>%aCc4o*aP+W(#5|@HbTJHfa*G4Wy&!U&T5l#s(pwx$_R=#C{cy9 z>7o$M5FGPjVA03{Q5j-ygUD2RN+jqYrIFM{h^b446tPbUa$kxukzYZ`&8_YWe+Ry% zOoQ*lA!5`Z=KKL!;Ru2natFR5oZL3=vuojBobY(fdbjt~lz%2T%nwxnozB4)r(-P1 z*e0UYnZqM)eJP#mJKjKbQla8aSjz>~ZEYyj?;**oaUaN=B1c(GrhCw)rx}4EiMVxb zP*-{vcW(%4zza=0`BQsDV{uQsMInbJk=f&dJn@nU$aWk)2nF_o!GY~=ch!Fz%S2C5 z99)HqRm6|l(N1Wk9LwQ+giu!W4BM*=F(9Lu0AZrGG}kB*bku@?)KV8WYT5j3xulV9 zE2AAZ)H}vawNW-g-W`>XgCjR%%ZJ`>7xVIt;-J?$TKNRuDfx0<08r98pt%%9e1bQS z2^?GAR0|T7I++EBEuvFfC?A3maV|kuE|4&*B0}MAa0E=FcnRq;tKDNibFM&t5vUv+ z+sZXD>;}49^J6_@)Z+c7ghPfs43YtCaJvef_knQfW>n${vX%O#NM6H=%p3ZunUtU^ zWRaZBeV}Az#V%z{K$%j=^NhSA;v})hJ}tmcBWwluv~**B0zcFq7c2qu#Wow>Bm1s7 zM69oGI1tzJ$3O0V?02j+Yho!=i-6$T{#wvR5iB|i_#3zqk}dH;W<3JS5b>SD@_oOB1dEpFtAbZBUQ;e5bA(y_G zfcxrIf(w=rS`-8pqe=SrcZ{d|qqo0&`{$-|3`9=b`tHrs?z#W2p32mdSlWK}S@%(F zgEQl{Y?}R5(aM>zuGkt1ZE`pdo{-n*T>+S(a)U@pJJh?1N*>evQ5@id&`3k< zJRk4PXFh<3|6|`r)enP5;0+Ou{FizGdE}jNWvtcJIsq>JHQSiAt9P@Y;mgN zF`A-MP~$PYjEspjDEV4mt&dxwULFQU=VZ*$X7{tTc%U%Be;eP23EC&)=X=~YH9Tg? z8ojfI&=B z-a+J*9#upK#HO1>5r!+Ite!|gSd{Syo-?mT(o?!`dpnPnw01Q4+Fv=mlBr3iYmiJJ z)+|#i1S@FPN&FQX7^XH0lqh{UNFpx;QxmHY=)Rny_Lw`4q2=AK0m*p|DPxS&Z~Mhg?I6)VDxRGwVK{9SBh|3W@T;I7>Ug zmxeX$`W$gzOZjXoen?mj_?udbVr8tNZ0{Ro#tGAi$DEq~ z8pPts1#e+pl%!!q#-Fhwo|!=8H>n*b=u(_{O9e~56M%=&ng;Dlc;_aQfS5=$H(#w| zN20c_)4)-^Y3hd7%*QX5hpwK{H6xfXH`>H`79@~7iv3az@kuIc4Z*T|Gyp-OKXn|~ zeY!Bu300=xQhx=@!v{rSk_}NjIC-f{y_GSAwE7-G@O~L?+$V>}8t>lxv@65blmU^U zeYpxVRrNF+B8ZKN1?%~y67IS@@f+~V`Z-m;O-hMH?#C|{$`F#_W>Lsrl}L?iQ5}-hKO~+2jza0ZvAa%V7>;Q_-$n51K5Gy|M zSc6ljXb5W3H34c8h=|LAh5H~rEEu6XI4u#t7sv@GG)Ww8u5YAmL^htVk=S8BQth97 z11Me;s01qIjgh)ccLW0eHFqB1Q4V3nULvqFp=fuktO@;;ACuriO_3ayIiZ*OUv=^F%{2FBca~DC?!5V%M3yl z?WS%&#tF9|6m6O5sHpVIe-CKZ4TK?H>w3Xo3MfRcZP(xt)&KUs~RP&g3Yct z5lXr4`yo61K`42_H>F5;$;-SvL>dk(%l0rDmj+kS9HZ6X6(nkSf>Vm5c8M6fhX>73 zM0Zx*zh=MN-42F^a7;V}Bo1yk1i9qQ<42tct%SXxVT*ooLwz3E-qii76_02GE@lqx zPo@I#`rbilz)ytwWlBOjCcjIGFYN5JOgS6tl#}|{w?dSwK|kUvi8_6>R2Q)#(%K=p zZqOKs4yFYN-z|YZ^Xs`JFi42kbS~j1Ibzi@onOA{qy3ftw$^B28W->bISlc`yFX?= zUhtm+Z=30Za(_zrUkPlbL61I48=l~__3qRUAv%{bbD(zpo;EXdvk6j@UpWz9jsHj= zZyZYC)R*pvWLGX-N$A-kuv=(w%A^uth=PPP$x7eixMJ9<@}lL|1elYRf#i-{N0OehV?dzHO2>!n{Mo=2C*- z2-W<`ZnJJH2}`BYN6q)uJjBZ#sEZ5Q#(X5398eQIg)CFKHjjAaq~OM+Gt^{D;`as=KfrO$(7mvB z-fZq#=~hSaSV}wC?QvpAn0T4RPM#m2zfxBG=@D|tLq?txFkA}`{vW&V1c@*<6^$}M z8V9;XF(xl~WN?=hn9rKfGWDcV5{WFTStKx7e10FTW%edY)_*148>HZ@`zrQJ5j5I6 z7CAN}-@W;`d-T9`YqA^F9vA!W&2M>_7U;H<4N(0hx2)ZHT{WI z1`fKYVVf^dXk&`Tq#KtF^>*7|_#=`tFc~5}cB=Uda?cUTvBXDUw{Z&1i_ZKiYJJqM zGWNPqxhTGS^1p&$Eh288l|>SSh_|V2G(IbGE`>5=tr^=KkJ~az*49K)j#|<4ph&A2 zK|;oQ`hYJl@P-$cXPR4qri>3gR^J*pFDcFWE8@BY4m|v%E9fw8&Y{vey$cp1aq7D; zM25#3r1v4zL2;RPyS3qJa0Kmz5ECh+K^GGTC$2;_x8JjR-|;)8xWd=&eg5&M-N$ii z-|oKqJ+A<-pD>p}eR?>v2f)b$W<^_4s)LDt^n2ZpelJd4^&i)TiYkwTK1*{)S8#bS z%41V7in2E>54Lzc>PszF;;~x#b>35q*jlPKp!nGjV+E$r+FJL-TccsbvQ;%2bsOc+ z^nybMn}(pp$U-sM8NDz(cyzffE8WV_>jq7{Dw)sM z2wf~`3o8iOMrxT(>LS3HLUkYTW7@T{jld-pOe`=&xT|HC5;gnw%2w@&QMm82m<4N8 zOP(-kCU%K#I8VTuK>ya77kJvySs3pi&I~h`!6ESzA!g$7{t4Yy(k;|-22;k%Q)mrj zuPH>EpqMSUx=#c;9JWIu(7GUQwu&%xh5Yl{ez(87?>CRTE!%4-Lep-OyIe#9=d0XT ziv(JVa<0tn$>2VyISCatNG9{*n7$;1jfl#Snn%~LP)R%8h9BBTt=Uf!j%W}`1M%U_ z?k6S-V@HIUQI=I(88at~8v#BWoBm?H=*EkK!hYDNMa4jitb$^C?nL%1JmC zjxzGIG)bPf0C@Es4rWZ)=oH&6q8#WDui$0n-Yxi#OP-DzEsC3L^|3r!Qq80u9br3P z#^1rMsA7$uXeq=X=D3y5_PU***1CJ=$}=gm*bpZKGza<%giH2w%RR1+y8Z7r7bL0i?5MacPgQCy6M9jBq zd8``8)!L*dh%YY=kR+!RV9aqNUwYOX!M|U0zxo4z8XLX1sQnCYXycm+Kwk!kbq^nz zP;?*{^Ji-K-J37f$;46|6uTD~aXX~5WwOQ3^io78dq5ICGm$QV-<5+G7wT+s`ZBog zHYnRm$YiR>p*tc1Fh^hc-ndbEb|5XdR6Y^VX6oPD?QP2o)0^HAMHtoCG z714-vZw;DmC>V-w37#3O7Fwyb8%J$^Cp(+#U)RccQ?!*ARVcFhJ_J2A+;cuv21)UL zR-~RQ%`p=>GW1<24oZI>Jn(Df%aMJ7lj7fvnCS#yP2?&_%%K$g0ga`RHL?tx_V}syGv0O#+FkQ64AOqfydc2?xgTLPWr@MdZ-ufdOJnfQ!Nk`13@ez`NeHMK)38n4%cVsd^+2u~pemwCcCb7KcG4kcb25mV3pI?a8n$XBD{o=XYNNx%=x-(RwlSzAQvn^?$`DfQM;3 zS8o9|@F+E|emvw>0;xpe*$uXxjZ2OSj{&2h-Zi90g%vP;(jyfU#?CbGJM|}KAclOJ z5UR?$q4>=kXRrj9=*rfMOej?scrgkPuy$qF`NDAV7bQU#$Lff%c9RY!;rs7*@7o$) zVDxHwCk~0`hWJ_MfX&r_nXBtJIFea+mkS8z5xuhz)-(tndQy?4LOBxaPnxqM&m@72ws<+h*}$N?w-j+g_7fo8qDD|th+alx{n{2(UgqW!H-;2 zRD>Tvyut9>V+AvUMwo<>y!qKN=|C5?Z6XPeA+ul7&UQCJvHylgwS)q8#)&3*{a^n*-+) z#7~NKACzo=07O+TXE=p(L%o0inlvaF;XTt51R(iSF3vJpRV9Vwhlooz;A4+2(U?`d z;``nWHGn{E)7rLA=rSt65>NjKmiONe(SfaA#$fiRU2!ux6=Z^UB~ODr%!>$M7?#jk zR_l{daxb+Vg^f3b|3W+yd>}EOrvp`hQ2m5`0VM^HQN#6d{2Aua1U$5NJPteu)Xi4t zh*b(_h;LBj2UvvjSwtc>`|QZ-e^s$J4UzP~G2#uuTbB$e2KbM&CtW*pMYg0|a6^5m z&-%1LzkHX#vSyye*BP1cN&NR5AKE-3XS>bWj-6qW0O`0z{K^a~*yU6(E~aKpE26WG zBe;T7(zHH8#a(Aa27;GUoN5S1>-1vgc&uto{BvWU@BLQyz4t;8E66zV$~T09DpNN8 zE_|i1IyPt@v^7?xLq#43jn#>a1XTaHwb?x>Q0o}$PN8s%rIwO>0#wIQdnLQ7dVI;9 z@`+rF5N;x%Myr`+F+@B$SBB~~ymTCHpWaYN7!71i8D-M7zJrCGXp(i5YhOXQlmI_E z0v(cpI{_b;#jvB83h2&s7 zW{+Xd8~$Ob%J4v$)m0ibpDJwZud5}POdcXZ#vY2OBo_3o3^ZFWUWctXDakHGJ*X_@ z9ggZy5K%I;HWdU4b)(t|8%?ckYT+`D6D&+{l-X)sP(0$=#*}=<8-@2{?mxf%lkVMrj<;}yC{v7|S{VM;-=|uKzlvQGCLlZQ<-2Jdb!f*gdQIfV{spP6natw`@ZG|^@NL8X|mLG z9_IGY$@mtD^}51#PB~%B6t%3r$wQ*$MB(8*XSq8N_(_5D42E zhSWgPh$KP1IMUCGTdGMMMF6+KiM55+K8Bj_3O3PcOI5Xu-b4K=z*{`XYjXJzB&l5m z%V3Pz4roE$W=Y$Xk~i>*yq+L6id{@vwD@yHCmY~n_89YQ{BVB>YYQLW1P95R6}N#$ zV(A98h6)?{vZ0TqewOTWxT`dd++RS>+L~A8+X#G2sq)T&+G==>aIiM)Q&HeX8*xu; zO7d%g(FT7lGj@a*^d|22Hhuzoa3|i)+Z?Ty( zBqb?Ua9WzyJ`C>N-Rrj2eExJD!oehesCKt!Z5VjvZt{YrPfUOVAz4dr-0#-o9iUQ| z1qDC7&>&glR1gD$6|9evp!h|r;UV;Y8Nc8VXNU;*t><)#Eq!y}o5SlF&y?0^KI%0- z@*^mA(3N~EtU+Esui1f5FC^>_3xGA_`e52Xnb_Ceo1b^z_7Zom(R62Ncgm?y=(1!) zjbPa~+jwg=^>IRnGX;QfbCkYk}sy;5umxXw(_grYUAD$G^Scm_ErX;%GHa zbRpsUI$4%#M!9>}lVS}sV1x7m@x{yUQY(F<<9K~yr;$rRs|0VNH`Da0rR*``4 z&$N)UE|X7yUCv9eZeKX`d4rDqL`@VyE0(sDGZZSwOUezdLbb$TT@`tmyEnjHPbr@D zjbMpI<@Md(iV)dR7$Y~8bI&-Ppv3FfuD;%+VOGI#G_exZWga0FwWSJ~AbSh-a4P9x zn>XCqErDlVwVtDU;fsQ-k)Or5MwC%(W*`?j^`M*{tQG^TsJIjpDie1F8MG(G@*wyh zf8f%_Ap!{JID|VrJS@A0=cK7`G7-TuVJQzC?-6Bhrggh6eSi@z7ve2q zEpg`v2>j9AKPoQpM;>SiZZ~;7c|ki<`ZC4q?k7lM*n35cIy)vV7^fxM%NG?pLS{@! zhf$6U`pe%RV*Zj}5vQamKnD4gBH_WiOrH5DK1Q0*X<~CM6dv&-S5dpzAHm9d+;~ek zdWUzwm9={xCGQ{jI4-YW5gVdPkahbQ4^NK;C=+TmZC6m&izE)p5;uU^#Ra}r_EMeg zI_9bL+VoNf*d?TV#8EKM4^DZ#Xa;8NRSEy=-T>g)Brr5KGnPZi4QT ztiCv7dSe!BY{)^k$Z2hw@B+u?V6JKiziHS_idWNWac)ZZ@r)z5%A=;-=I1^iV!zy= zSX+})u^u(@CRP-Tjls>?&Pep|i)pEv;)337SM+8Kaq(q$X`yv9SLUlKCo$LKfo$*o znwgBWGg1~>1*%4-3oMzwo2yrN%mm=sHG2*|RND~3 zf8OO&uFy64z3~v_a8qg}C-oNLb}>CM&oIy7iirTqHp{e+%@y10VGUM%LuI+R?UcHEmW#h$a1Rw{0%c-~u=t6bVK@%)v zY}q1bfqSWD3BC|yGu{s_AeDmZx!TBe?P~>%Nn!E^R+-s?YgQOtCZ|~7Go_e@%#+I^PX`ni#ETl3^MOgZ4=Pb700SyZ zI@LaJM6l%m6vD|ky#}52i<^)>qtFU2ioBbj-~ZFae*}$M#l5+Y)8*PYhWhjFz3=({ zY!W^YDpDj3F2vUaS`61<9a9}qj9tn=$N=y|p0F$^SZzx6;3Mj3nMUM-1D+o*W<_Hj zP_M0&Y9jVad0R~V)-`s&;UDl%r~mtbi9%do5`v1s(&>yIKt-tnvwRk0*~alctWt_0 z-y{KKPYdNDT&c}TnsIOxLRa?Msa@r}H<+Uzeh?f;{zj(AXm;?rIUHC4^;EcS1g2RO z?SL_XwvtuCsFYjZpXn>rvAK2;bF%Nd_N#w?_qXve5Gy_;+%jJLYy8vc|ICF40&;U9 zOc#*^kIxflQYziWu@$i5tn^}q*ZFm@Hfj(q_2Fs$1(KHkSBh;=8C&p_dJ>-!Z7JIF zlmzcrlHU$>%1NnE(aeGY-!cO15h*mPea3D79FEQ{D2CdJ{>3&A(J^ySqLf#kmWSgK6z3mHD`xFJ`-eaIet>;HijFrbxcuFtu*?Ah%Wo3 zy_8wzXo0_)J_M-Rb^32G##P}Ste0Eb4|)3}IHxwoV7#JcC%6N|gw&;CJoHnTmja7a zxUZLrC2VSparvjQEl@pl4Z0&%yPz8=;}BBX-@ReTbrDZYsZ&?3N_0u#GfC69$KVCU z07Lz;$r`VdJ$w2x-bfP@)P1eE6XHuAzwVLOIl@;dXQtf1iY7Hor&#bYOKYjbd|Qyo zJZJ1unY6tY%fJt7ee8U5VVZ{IaT_l4G^MaMS?(IJZ6JTwFDfszoZ2`puTUDCnyx_-El{VuB$9X*NwoAJD zDk-_7@Oca_u{o7A*@7ZjkQDmhGj0dsG_lY3e#lsm@4e^0l3n#OVLU9z5;7)F%by!a z2*$mMj~ZR^?Vz;-?Tb#Qf8Up|C;AL0r%IVI3``Hj9n}M5#M)?A3wgn?(QD}%3qYL)F>s2eHwni8A%iBm%ESRIRF=sC)09tis2bzD+>WkAIj9<*hh}LW`*Nq z<$2*ymEz|{ZpG_7 z6eCVY*(3J=YiXz+6LI!}_N2s7w5NE3s)D6gdiH9e`BR@KYAk^4crC|`*o?bRC4(I=v*kkx&lEgqOs%eC`2bQUGMR1X-~d_~2R zU95U=0Fs7#6X(Hr@5f$2tH>D-&XPLBEZ+Zx*NjC;tX3V&_}!61bS=7QAN7%&>XP<)Y6?)~^9g^4ZMQY6PFjDDGs3*~nwX`Kq_ zSi&fTK2dJ0$%-2D6NSeESGKeSiL@?bj>G921Z#Z@=0aCTq-185KH?*{rQd0XzSu+y28@!6|q1Jaw5C&xI?w0 zWFC8IH9>shqy#Zqzm5A56F&R?@wtW^FiY0ZijpEg%G(H24Q&H*w4i z!o@UE58+$~9`LnEA130P)921zrvXHBZWR&Z@yokLX8|? zB*ju2&8&8NE30ve8Qj2%?~4{JY8Keu)(6oK z?8D?)SP#OCreg8O0sK7hY;+lo$_*y$7IoQ;!&>=49qA`meso=!zfV6%h>j%|0z&wl zlV$hfn9!M7LBM3Lq&a0x8oPurxA)uwfI}r91sO~?`kG~M5cg(6XJnHCwK=|G$`rik zZy;dH^47+3SV~_gAe6-8>nLivCt)u6G-xx@CYMV~`)(-lj&xyx7GYE+BOXsry`n3c zY$5ZnPh675>I=biWa&1MEw~j{VF-Af7bSNoV5j-B;~ync}qJr>lQ)`e0(_ zwK^udO9;6>5~D_0^%M&&m@q3DR$wRiqex$*r&mIDg%F8bDU<5p%3-k;e**kpIc(Lg zlTNjypembVi`1} z;;p4O0fEmM+)7&(qNbs31~0m7J$r9OP>1gOf`Llkw%GVHnep*Qp=ng46@R>Y_j}#% zdDW}jd&IeK22s^4XGLR5ul?uwQziusj`u!7P&o)o4~ zHJb~#2z+>tJJju45@RbcC$mx=<8~nqiiMAtO~k7Ff?EL&BO`p@n=?VATpH1xoDSk3 zc!2aS_J;K|$`~ailb<0u6{Fud%GZ| znx*KwqZZ0b?kfXJSpy=-Oe0qgBP|uJ5?Pone#rg;tQTia0ZC~qc))cM+|SQj5hUwk zQp_ZXW!ydug-ZwO8D$f&xWr=c4dl3)#oLct@jLoD4)lA{tB63mGCZLvjMdIb=kM>SHJ11{j#m-?xy}cT$q0eU41&I8*NcAX@oRzHNa->|$V#OO+0AlT%+V zx^D;u8Ol;94>@ zX0$vM19}@9RuxO5bG1%iq}F5SRH>hn=`f#Q$p}*$>tr1cwoFv|Pm7fbPN6>@F;55H zH*~QDL60{;eKn6TEw1QR3i9x_B-9{)_TPY+@n+iA)FVXX4u=_fdQUR!XMRH;R_S>U zEBJ2RJmHle{;%%t$H6m_s8&A<@MdGGI1Jif5lKGIS5XKsP4Kcp2FO33eUrb;VHc*> z3+RFW8(tT2*Tb-b-y+YA4qgIQl~csXgPV%0gL9{`iHlx-rXrYJP$m?|n+h~XuB9Fu zyfU+qJ19q|p5YDYzopU38(dr4$!ciu@MG@?A_z|6Uz}YI@Lo!IwzYvr$;BO%!8=I5 ziy$-!-STyUiAY#WL*g~$N?zw3{O}*UyFa@7b5puy0})IDwOYIg2&_lfAYs#s%ffRV zkc|TH!CXhOKI4bGcatu|I)u}(!bx#?AB-;Tn>*wd&q^c-U;({n-HnObGVq(aO#*VVTjSC^_2SWN%u6_V`4HXy}JcT=hrdRk84@f z#gV=lyaQ#VFTEH-L#Q8V2j3M;qXs4bfNI*1G)ZF(hgb7W5*+iGu(!X?aAe^4N`1!E_=(6erK;oVQ#$#n&v8eQ z$|6x0O0Jk&L`g90{4~TE9GekeLY;}~QtuF<)N}FVb6JYNOPRFJDOLh(x?tA&=W4f8 zGjQA8+ZuWnSoop$P2@wv9^6Rj!|%WU8|H1j{~Mo+8o!%%z;R%%3eCjd`NYdZbqp?R zFrmjJt;7d3fNg1*-@|02!ChDG)K3+3E?ZOF(;xob+B#P@+(7?Yx_jw+4Uw&C= zcGBXdOU^PQ>H9v>Lj)y8tyh5Y58A>%BcSlP;%9}$AyNH2-bNLwYLG6-S-COioX;sy zrXt1J7hZhLX}8ipo7#)4-n>XxNOKjESUrHV8EB8uXh8r%gh#7eA+FjSBzsP>jc$@1 zA3o_m@m;x;)gaks;++r|!wL|*yM(1x@CQa^a8aE0Gf=X(DYIdKulJYRD?gz+dxOz;cn@@ww{2{SD`M_+MH!KWyT?QoLvO&4+hFz($Yewr`LHLwh9%-Rw4 z2p_Pj>=xXc7dsr}9(QF)pW^&uT7tgJZ663yhP6@9Fkf^V*%W#xs1TXN#J(a}O81|q zd)?Z4_6`HV?Y|HzTh~8rZZKJxJsi@XJ0ZX_)FB?33lz}$)9dq6kJLohg%+ZsufFjmqiO4 zpy2>~#Jk}_&pO3_IWVUj1K>cDmfY2WEDE>Onl~Pvm*DMvnLN~r1qjhIJ|CpULOb4= zb^`9CwRQK7q<3{dcKPu*IWdE8USVr8#>x3*yI)-C=-LLx+X-jYeeb3kFw1i zBBkBf_kchNUP|IX4!bokM7=w(Q_8Med-RMz(Z7Yi*JAAff}H+yzEN-0r`0cYhqa+mq(@r(4gGrFgLJUb$NuBa1Yxsez;C z-M$HepV_cy6N|w4wSf5ekq#`}H}vIJdNUf71iZt)SRv_yNU-PtDT6K5%eLu%J>Tkn$xl9~{S|!Zu2AHb z@i9{K63F8s#A}7nfQM5T%;fu|=s)CE$9`0|QS6}=oan109I`I{8!?n#iK>e5`b?u; zlrr+)&m`1Y{@R?Z6jF2m=HvKvvDkAIA`VC1ay8$>!B{Fw9o_H+tcPOfGtEdU$>)w6 z!Hk!REfX1z`HWA$-#y)npKiFRHL2L2+>kae+`pE0kAf>f(7TO-N=X==_uhY5n;2FL%H23pSq6 z852;7&E2(Zd#)(-5WD^&Xh7zZXhLu&%0*tP_cx^Ma)d;Sm-G(0wV(!%!nbjwUHmW_ zdw4{Hq!X)+uWokhN%^NhN%ymyj3A86Av26}Rs15hiEuwHF}H>cD`(P&^=ge71%L^Q zm(P=z;_e;7@x5a7dW?F*M#MeY_O9ocxf zQ@Di>N?iu(eeR@w%QvQ(bD@!$K1I3`T))xhX=5KJrttInS3Xms=;$x;MVumS$rQ)7 z>>$cN!X~m(!lC~(&gD0Nr+PaJTyRNU9#g}vW~x#&<_nN9Tw(XGAVIw4xozL+zVl1| zEIEM|0gDX~2qMJeS7NVUQ;QmZ)+vp%6F;#j@1vDO$iT1)Xgh5&DHtX`2^IkhuYnPQ zhxRz50>pbU0yRLS7nto0*BLVzx{(c zL=mtPDavLC1^q#!Wkz$Ar2UtJv`}hf7mt6(x>J zi2ea6n^+qO-BR`b4Agb}1w*MA0&p|VXp#b+E&YWQMQ9FIZPP!z*Yq<-cEQCRw@FXR zh}oi5$#n`?B=BX1*uVQx_tCG|iWx|jo(pR_^Dno#cuELk+JqQce+>;GT%1j-al5{V zojv-rdlKhCg0i+Z_ySuOJ(xio*$&KXjR$SW&sh#ur~;^N7qOQ3+2R5o*XpQtyaiH|u`dY3s*Xl6bd@N-p8*PURC4 z3$PKY=pZKrv@q_V@*mU8!7FlzX>ReHBiAbwe$j0H`*~9|SkYPU) zSvKR)zSdQOJdKVP%KEepnt;OWifwB%t%G>HO||B)i^bG%J(tBx2bPn0%0msRNiCyZ zHBR-flT3|=lFcN>6bBBl9SPwOn($=O`oYrT*{5FfD?&g>mAVbJB*3cjxHT>a7radq zGan9-TcF0v_J>?eYNzc`680ryWn)jaON}TjT8fpRj>4mP)sJ~W%H9R+B=!KNV^TPp z;#>)d#pQ|5+`ap5_b=lh(CL6F$Tj`5f3`65Km5VnkNMw^{zv`)?tj#a_f-kU z9Y~|KsA_T9o^7vn>-%QD@5(*<)4RWpOAS&@Nudycdj$L9cC^IfJpLm4(tp|M_IF~_ zAE8V>is!X3^ba!!hj1g4<<~QYSjEg|-tk$|=3x)<$mjocU(N(=cuB_vQ{orh{! zh!lwhsvbUGbVLsMYIu#bFE7er;}yct7k8W48?10zbo{93Hc_1gRD1XCSGtePmTMF2Ef9g3SE83X*FPNjuPQoF3B?&*1N?0w zPViu9M$4-&i&`F&9s61btRdi^lX8wEp2{#r;o)VkTS%)@UzK6Hg)oM^_8Ph&CG#ry zm1m&);-|ZJue+4RDtU4WqJX412#vL33(|p+fQw zg>OKBVrgo9DZOvjke>()DGj<0e{}cr?)`uAGn<8kwa#>gSUXZps1HAcy9%wwrLP!b zY@`Y=>F}a$S~^{-Uvpf1P0)~w`6pzv{N~*eWuR2ZP-Po$X4s0rxhBGW3nSNd1N_i% z2wn@ZeBN|XD&=cE#w#i>kvmdFcz5>~an|a`lFn2c3IY-R(nG9bRpXIiX0w^5my1-4jBaZ|i`Iazwx~J0uV?TW zVEsbJ9n!6-260h5;}SF$`|#E<%s`p|_l>W#l18<|@h>*zjvD5#=1kq)-}xD!eUt&I zv#d6{qg8+4RPw3T4DS-eH#gwz6EjnzU!S1#nv>JV@FST1&azVd;VE$=;{CfMISUPEGS}tZTYI&B zVtmNg`3vqg^T~okTc|wQ;urX*`-OOXyIS56-vbm8KMTSpl(?z8)haWvDq@RdV4Gyj z$2XTU?-bgD+gpZ7zXDI8OmC=FC~)jsNwSayHBlLSzPQ!GPI?veoTqe=A@lJRqBKwZ z6v+p9AUJqFcyeZo=nuyk+U8A{Zu66myN5w9SrwBqMoJ|r8hoOa$7c=*Ue?}p>n9z` zxS#}Yu16Rd8Vnlup%qU#EYa((hUSDz^tN{1uxLJ8*#nnoG^+}5e(rPh9OI$vktQAJ zzXhVuJ3m)L)Y0o!ZxU3!d#CP>YV{Z-lqF#bqhwI6OZcYs=F4XHt2GBzG!IbHIPFw& zJ>8bigMw6Ssq?|Bnu$K;npTtSlTYcT99#(--sH$Xyygc%c0sg8#Wv!-tqH~TOne$& zr~Y`B9L8lzzqI+Swb5uR(^5sdw|~vjrw7|2+pRRobeDv<%0%42watG?t8C zz&Q#<9cO|J0X#S@-Tx7~*AmO)>+E7mMs>NSUr05f&_A{PMN}wdR zo*|F~hz=4ehdL-p&Q+B?7GNL2B)(r_cL*o>mhsq{wxfgW!6Xf zfkva|K)hR`GPS2`x)>FYMY;Z3(31d~BtZ>Cyu$ocZARQObz8Wtlgs!56?=qgE>rzW ztDsl-e?NNvQ}b!`?#FcB z;Ga(aG9j^8*$^VMn4pyMl5H`eo^mzysn{YF>a^JTW=Ls*^MFjw@-6%fo}IS5u?Q;g zkTtOg{dBNKM93`PS6}#ZcIijOg&F#?_s$LlaxFt#1_rDQGr zHA*Cq5HMVdt1+M_vj+h#N-|~d1Z`S0&zwN%<>QZoGIR*yao$6#+^pyE3_qo5^!N*Tnx25&nVU{) z2s61{V?tLMfqlb0j#c90E!?^wDRR6GuMDDST2iqpx0wG#v&&Fz;JHhI9dX3of zZ{^m%%;cQ16YR?;fP{c=yiEiS4Sq29$~GidN=e*}=IgXkf#)^cNVN^&pS%+&|NFlB zUdj=UOWJBrJNG60sQb0=2AdEZLRd3Y+VWYhpOE>i)ElQK0;x!nE2f^T2;DQqjs5Mt z(JO%Js1vv9x9a%u-JhC!@j7HVmuvKek~eWP1jF{`+MDy*P_#_!`t^4Dlsop(+Qb(ykmg6%)ElH-sS<=j+q<6CuMA#y716zsA^eKXjq{| z%Bs^LH7RjPw{Zc0z?lUYU(g~#D7QFJ4H>a5rNCBE)6)rN=BwHgOdl6!I+^TC?pQI^ z@s0+#9u{$}d^(#_s$8TtH`EPRH=S{@J$mpkkEp#Xkw1gn69g@ zgp+Z(sok<*77kNwhC(P(G4Z+Iq@MPZ%eYd~9}H8f&`2O@na*puth7RLJC@u=urIV5ab~ussQ=4%00k$Ve zBn?qLBn~r3BA2^P-lKsk1c38>tIoU z9q!ytSDGfasnstPa`Q$&MF?sX0_(&7+ui+F{}m5k32FZVYw_};7_0cPaau6aAH&D7 zRfS^*H$G~mgXFTBIk79DY5c5icM7P-&sbs@O0&|{@N)hdExHV$-z4U@;6pRHpl&=! zy3GrhNMqw(-2>`MgGxu;W>8qMMiOFR)LjQhiPm|ymqshirHC5@#b}m$T`j;i+qSlN zLj`c%y^<454I*Fug-A@$!oT=Z{k*Q{-q6 zxsTM;tSjqU>-DlHdZt{JR1SF~g-VIMxN>^``BJZq(d+Yp?g;j3ybHRKWLz%-5T<7} zL$YrE%nlaaC^3+-!OMlxC6mH*=$eTkx`%8bgvSp&s#W2j+P&eEhh#_0j)K0iv2y6m z8VIIPdkAeai3K|oyK}ZzO~HTw_<^)cz`Yw3COm|#mO`dY8u$Q}OCM7i;LZhP1IdMlL+^CD^2v>!p7@m3tYV+(QBf9f_--p4uD{#8n2_!UFa8 z6}x<3a)YsDC!Xf4Rbe|=F*W@^67A`QupR#U@~WvH~JU5t$|%xF*Ki?z2Qk_O`H~)d#Hf&x#!XN z+oa*-FH;&WkQnHZps(g_m4XP=3AIfYNNPILs%pHwA%TOFV&y;JTuay()}m!gd>pv) zRG8)X8S@FlqK=6>qv8#tPQm1$@zW+Vya;9XZ@}WG(A?_Am3k~1;Idtxe#;i>RL(Lw zzC){^SOnSvsp^8|0V_1^4`y$=z1l~a>Zp?v5Ry-HJOgZ} z4=EsNG&2bS?32&Ha;{SILnNEmwYjwnX8Hpgo|^*5=}TC%FOU<7;--A{q;OjVnIS>BH+@}@Xo zz(N_anYxtHjm~rU+BVa}Pz>%tYAD(!aJg+Twi;mqA5GzzPLVUR*d=L7MMaotyIpD9 z0IU0Qt3lPapO0?i?%vXVV99N|Rw2B^dK@Fp!jgDY_P@}#k4!IR(v%gI1kJV*RH+|i z;BWRU&hBfDiyUY@9kPj>V*F`-e%WPoLC+ayaVy4-*NW`U86%1i>;zfXy6^ka0v?)` zDS|696k8`XvrYf2u79+?8@#x;?qTw0nrm2uF4;M3Wn`tVd4qw-Bnu*9fmQQWL4ec3 zG!wzMj*Bfqzf;1kzInafZq+6PDnLp}bdME}1=EnFYEEG;3auznAep^q48nuhk=v?2+ZuU*(hQ3#=!a*LSt$b==NsX1ev zs>A)Y*oz`Px*cvL`pc=CC2qScUIi5Y|kdMniHb+ZR^s(7!H|aB`qBt-q(7aVifwkNRtieq`TBoJ2s@`NQ0=^^zcV>`vm(yE zZyRc}P8VNhw)y-Pwve#a@#Og#0(Dj(3MYu>OpA~sc{RRGs61(`H|mWX0t$RdJV$;E zLOITla6jIY5T#xsLYangg~b)5pBJHYgDAl4QFr5b4w0SnG0@W*-$i-qV1xe`?YP^; z_wE^ULE&$MsQFQcg0XW=W6d9a{Ev)@{L46^c0UThfB*m(sa7Hv?;`4tGe;4S3D{r2 z6hg-I3DuQ=9^>@5j41zj2^+#_h!vm}1hnxIK~Jh8sEFA$EI$hW;v6X^t-{Di5&oluy+D zK*BWgV$L@njMhR$2!3f2NS@!86cjO9we=7ZDVU6DtAjMz@i4jgur-N?crN`Z;fWcT zKy#XKBE_R%{KMc?fp@H;Kq)8hiWpjPS* zd2N+NiAIKqWPH@Ffz)&odK1TGtN_gC`~PI3C?8Q9Mu5Zu+ZF+3mUM)KeV;AoMcm67W-Vu58vn>yF~?0dP3- z*CYx`=A6AL44}&E4N4*nhZQEW%Ah1YLz6Lj2>Z1_Ud!@=fA$x*#X%RAvonZXl*5G5!@p^)kdcQOA;bgOvW2y9Y;@g(Ab zz`1L*MEQg&eN~~tO&+HBTL%bC#-znAI8oOK0$waQf*CS*VbOA2bmYSdj-IptUlCX7L6T=4tq zsG!Nml-_@=e@g6Ubf#x0l2%*t_O4+&Mc0BxN1UF?azxZ(a)SF73sHPVV^O^&*stoM zR%<$%B8$gG#`P{20worxk+v;c8LwIdAP5rs1lniNOukKOYEPPcx$FK$i!x?L>S!qp zppQE|E0jJ>nl1&Z4t593A(6G@Y6%X=FJf)TImIqyJIA&URg_(aU9cUcV9TIm?Pw|y z#uw`*M%Su2ZF9#VBtaQC9pUzh5zXY>auaW>Ch$S>}owC}o;Pk%$L7VuIFT zHq3NFuD2#fX${}K`)2p;I9>|IkQTS4SReVzR%>bz)DiEN&^HaGRspdWIe386Y3>6i z-7GYSUwzR{Y!-q(!PT;?z86b5V9#%sWW+Ig4?37Ey&-b+l2Ixlsi@A!Zj8rKeLO5# zov+0^6tLlFO2q>A3!Ks-bK$Llxo_gdhjoD+0uUtx;>)fuI^>f4%{U(3Hdc|J4sj{kI0QE`L=~vWe>q|?8DvQ}*9x7VS_p+h zSK~NI1xt+OF^ zCo;Ord9Afrf6Xdl;?3`?Khx|&fll9GA{6{C&EOetoso9De6DD1Pk>cHj)Vu=2NjfhLgoRs?=AkM>s^ zG9V8pF9y98^{$mDAe`%0*)c8`BV??&PgHMD6E0Jb$my%HdPPbu(#kkL@6L;#IAs=M ztNIIB^R$-Ms$OP%{likRu!6CpDJyY66c%w_Sc9>Djdp<7OVs*+kD#|qU?nQl!ZAoe z(}XlbT8Aw*9VZ4tVuI9EIbaEMucP`94cOvV-K}>J0tHJWuI(CuN!7%|e(^2@VJ>xb zVe{lgQRpvmUj)k@#+OKO6kGZ9K5%QNfDHkCYN&*?ps|k%z8f*rTFSAKSR7|5T%{}{ zi=;1Uv>l{XgEk6iYH^EZYp|olfdc!ZaO}7!tXsMO;<`Igp+8F;m_4LNGoDOjhr=m) zR1OZWXyMwzQ=dFSy2Tlz`V|!|Y^l64s*YA1KIT605rqM~JA!=~6((edt{IMw>$t34 zn34Bm(Zq6z9Wn%|)C_{Q55b{a+;Mpy*)5HAGX1s%S9FfL1JEpJ)zByQ_OVZb3 z*I0&dSp@IKg{=4bzeHTn;iQzMU*l__;hGRjMj>#hm_V|+z|!fD2`$_@L?&g>gWXx? zJkbx6R8+`9H*k}Mm^lz2%Os=-%o;%uxdQ#Yn81-yahtfXLF28 z(4Jt>HBJe)z!RmwiP5{xTUe75OrFoT<`8FLQVt2xCe39dwnwgx?_A<-dFkuiP=d=| z3mwhz&l~f^_N4Q;#`n^jifvs*<#A-j2We5(}@jB1zOT{N2i3f`AA zM!ZsqIx-!xyQY8ASv-WVnfM!jl&(C@Kaw5esM7;pN0x%>PsbDSk3^~vSoAz3@Inv} z+>BsJ#>GD{N28JCwRXNktnxVTkUZlO{XYPrD{hU2_6G?1^dt@ilBd=ouhs$gj3%Wf z0SfUraiTZ)EcpX@XqVMsw5@feUlgo#{O0(Qcr8Ho*q0bv8OPWc{8B9Z9-yaDt9A3+ zX%#xS3SRrxx4LhA%YSQeK%e%#83}L3>r@Ct-9fnqLo8AxSkR)yu9C{KR{gkh*ooT- zLs`^;|Kt`GEI^M5CvD2!Boi7E{mwba2OZ2a{0UCX-M1Fz#yJj__q5mbcGqKhoK$Hj zMKiv={7Z{-m?a{ZP*_(SRvKsRN-Uic3JBtGU7ba?St4t`BykXAM7m=jS@6hQ5E)} zH=$xjjGNlO<4cLFWzK+si)JKscMf-yKGUFTIT9*?y~*7XE(twC%eBGvxs~NrQDkQv z-ZW!#zt?3ulLHkO5vw)F=@u2L9AP{vNbK15uB`r&5W+tt^qpE-EeL}`G4W}NC!0&XwYp&$_V3v8u+=0s*V(DeaEsR5?#ByCRrEBOPl7bDm5!F@q zI+#n@0+1{f=^{H9X7(qJz_kG6@|Ny9Cocg^M#KbVPkW1GbH&HmXC5=+p*e`qZIzp& z%l|nIJXu+g7{Lg@7h5B~1UFn(p!@+B5O;a$W~@tb;v-qV_GuJ|gw?f+2k9Q{@A$+S zHc^Y=Rg`*6=ioSvr=FNAl;ifsdQcVXz00UASGzWOS-(dJrC~rctdi#t(aEWoO9SDt zqh1cOLWa7W^F(li*3_AnXcLU7W)r2a1cNkH_7Ahw#`oMB>AR&jsm~Ws3Ud{6Sy>PN zGUE|AnK<=8!IVHVDM7h7#2UX=dxhUo5D5Lt2@AY%+Yy?*tbXZtf;PrrKq&Yq6nz_- z(*RqrBs3V9OMhqukFMl<2(Fjbf`di=*?LPxV@apwb z5q+3nDfeaK>x;1>r}fQt^2_T+@xtDG49?ea+al!LI86|V_OJL&9q(8VG2VK z1-MN%7R!xepKyrBg{WU8vNR1V!JeL$v3Er9K!vAB0(0Ty4IuVZFA8G%rxte{7nYbR z4{O3{d~6xEdO1)AF6gpGaQNbOQp!BTc{h*Tn@><9Xe+rfDV?o) z_IiAR&uBJ7t!bF($q?l0z5^B5OLjlmx zw!sltRTT(;FC=tOUfz3jNQl<0uJFgJHjk(n05|iuZ+Unj7alrc*RYncW|OlOET_jz z{E05kzZh&y22IAu62HveTrD~m=jmx$C5Qtnj%G$6S-84!!XwZXB?a;AZ+GARcD(U9 z-!`Kls?)~BG8<)#*wD;)sCxW#cR%xLkI(|uC|BZSZ_~WILWmL77vD!P13pXmG(g@| zxs-a9D7hvJmI;h4ZCfCS8W&h1Hh6{-dKnIZbsAD8eT5;=qS2{WaRRN%RXKdq&EuGV zz57jnZuUb+8k8b%0bt#K#D%&o96;s?CFs5soJm*WlLXA#Ahb~dN-knFLev%6&PvOq zH%xMM{^4f(#jEn+LpAXl(p}0QWe|j$pM5|mIbj_rghY7C#Ux7VGwfX=C#R!=4rQb! z0Yl5NDAz`kWr|EHH5Z{u1In8PeDc}%##-A%( zgTH=L1R%;-MTN=hSX*;E4^!x=5?S$V-t@6c=;00Q@`7r&6i+SjQM^x#C{n%xboyh;dO9pZj6y5-QLK$xuhW!q*J#Ak%W)~U z-a&Qn)C{b1qo_J5fpvP)5Xd((e1)8kdw)n7^7y;e$mF?3X1Txr+z2c&1Nk1$HKNiut=|tt3Vygk z5JX_1NQ0ovH;Pa4B)1-r2H#$bKgl`H>xMz1;9rrI^rL623_`YQ6Igv*RBm6bU*T?ePNU#KB$u`fthb$f#CyA?9aT$F@$JZ~_N`j$g)55Gx5fH_S%~Ckhr?_}K z-l2-%KMuXL{u236iFZOHQ+$dHw<=SiL`$KjGG;3HhzF|Ns^pRtNggbBwI!A?EmS(C zsi5+slkyh)C=IBKbzyTbIc{cR(UkPIHTkl?x6;P2SEVr!zyNndO|co9pMb&6anmv^u{V~^e64@IicHe}h^XgDqS;~_cRCg$-|2!0LO1 zT5I^PN}g8|%dr&lakOoKt*4o?QzLiK7x^=4Rd-@)vfp>90Tr!WaIgWk*tnQVtEiIc zB>451{7TS!SD^@`_CL=+aLhKj=YjarH@a_pGpeEQxuYQ&x5<$*ieejbHWg=!pRw~*1ywRpw6tuHqoAU?F-+W*L?b;ggM$}9E`RcI z;pR5Ql8W_I9!kMm^7aqt5Y<=Rh)E6q&q0<{)B2n%SH{L8z+t&c`&D@~)x_;}Pmi|z zOolLHGh+2x`l4qQc^1sUlv=}qW$JM3m7%)i3DTr!6Xg-cam4BlTfJG_7o2(j^%>X4 zYe2ISP=MA};($+BfNxxjgknv^J=p2q6x(z$9j5dfNLe9fi4$bzctgX)&s_8|fZr0i zS}mdjLy#tMFlu~!haf7-Abm2Ke$$MaWBq*9Jv9+g%y5SgGAaIAw~ktOgwR7Cm?!8l zsVoEmVACD`6don$E?gQCHP*x;L0Oz-<5b`*pLY*US%jSiH#!p0N1ZdHfdS~rci3Iu z?7nHz4?z@2HFyou#cbRQMRp>LQNJZP&u|OgqlA8NJd>b%hq3Sjxt&#g1N%_^JA+K9 zpMruq9FvziDbLsxCEs-QPLWnIvy=1yfVB^E&kqAG#B8W3hc=(Rc5Dv-^(O zW}v(isDhh1@dnfs5!;F1hj8evrt?LNRO!aT$BaID5|<4dVF~0O?cgeUo{45H(W|zf ze#=xQZ*d8z{XJpvOQ2t+@NWt9n>unE)r~sd&jq`v@FbiKxp+n1ToGeOiIBI;tJ2K> zVkvYR8)iqs@~Xjz7ZB%X&Z^J-XmI@&LF&w#N=~+e4;NXWhu$R_PcB0ohl1QC)~O#= zjNq65aE_^ZQ7-7V$`0zSCi+5_As&dy;|e2&gM?QMlbB{#s_P$4UWas}Na}&NM;5GH z_?Fa4VB(kEc5P!rr_>f!Od+)o4ynemNfO~8rx!09>(N8+PWIALu>)w~qJT80QjxBb zc2&4%dT&a{h7cc$=b)Iq359U{(^}WxHPd76F8Y^OIk%y*49=sXU#D znu7Aq{CcS3_#^^UeQ+9?)_Y87N?6e@WFt3 z*50XV&r!m&1n%BA8GKG>w{(sOjbH^$%aA27yCt_Ls9Cv#D%A%8Y2OTu)SA>PH&D^X z7U2fnD08Md)m?e>(8pUYX7Cr<`@`fuz5D)|>d6OpD z3O56x<8$WzF(o>JNN}QIWFw7 zVFgG-D4o3Z7?N;#G4#2E<){|vu!7#+5+6B#wn()9XYXRzN-0WZT&u|8nl75OnF;Q_ z^BN?4j#WgPS+mtH6i)R_f>IVyfnI}o#m1tAbrG>Iy6;hoosPMJ$GxL?A{hXcYJ0UD zJ03`)3g~QT(D^A$n|^7-BDn`5c)_wvMpoZsCI(A^gHC6KfMEB-IHP;$N3m*eRe-s) zDe~{lO09`1-KaBoHuiA?VwkvN{J3ya6J4#|!JYFpiaT|0i@kkLn+KfWTX*MFwpY{Vdk+DwJo`G42l z{oHE@_cZKIN`gq5Y+T|1bT(aNwU)EyMElUFet(~ssrnB4$-5Xyx94R`Dw4^-qcTsp zK|~Auj6)2)^Ff_>Nu!@dBrG;f|AlnR-gW{<`)IH!&@23^u!rDpMLg~(9#IqKo2bQN zmb4CCH}FP?zD0~C-rhqsn8B$zT}lJ5aLZ}* z00Sej%Q;RzZ&ln?^aZ&%bqc9a5;cChPIMzatZy{)MrBw8dDYjO%CDeTYi`nv+xWme z-rXkH?55%9uSX3tYfBPOWpdrTN8(9dX*ntL!xD31a&Dz29d)p7{d~2Oz0SOh zu$hC4`J3#sh>efO%bg9HFXA+hLQXId3jGl$Kt!&DO_7{**k3L`Ii@H#)vR1fVaAn|EL5elX?i+qWD${#K17w79Zyn-$yy%=iTb?8cV`1f05o$+C#zc_ zEKa4Y9UukD_Ev*14fuu3^7{}F0!YUPkf3BWuTzQjZ?O~PTdzz?mPDyUDUu*(3sIMf zOX^po)rzV;qJLl_-;NFrx_I!?a}Qd*tIOh!Ppx#MX9_ba=tcdll`V)pRa;Vn9((zv z?!zD6{o&ouyh`#fr8|IvCNIoOt-4z#`(g~01=?_o^23y&37nfh14}K)>C+LZlsZHO zX-69tvI~{_iFQw3g<83LCm>S8d&T)e)a?G#@3wkg_(ivsVI@S|+oc-#9!8a7Ho<`t zeCG^EJ^8fz-1qO0Wo8&tK2RwEP(>*-WD>lsy4JssXHtsaO)Sg@Cz>Wh2a@ZQTdT4{ z&7HF=m9|%Ps}iBPGJyUNAwZ$63OgRQ3213t2;xQ67m6ef6)A(GQag(>|9Es(gwC2! zmgAC18fut`Q3pI>Kvh7on^x$vW1JHWGWYy8{O2Wq_J*dF6b#3ashzn`bapQMIgzpQ zE0RXYo*ElpMQcsxUu%P&wqhQmdaIOnDgs3lf;CSNcGw7Qyg}j)+oGe|vlV;#Q`_A^ zKMwKk-FLfhoBE6D21ovArgJ<`D-JovP&Ts;qmrL|96!nhn$+QtsC;yf+7|oygCVux zxQ$*+zZ7<;dG;{7%YMJx^%qYuY#$8)Vf z)A(cP-4>y!Y(fJAHeLGkTz932vbDwm%~WfnRKs#6s~VhkY-iyq|B3G@9cej7>Zj_v zXuil%VUc)ats~N`Ec^L65~vk&g8a%7vj8OE2(W;O56D8V`K+D?ypazPof^66}nx#2blsX1V)zzgzXnuMk)aMU;h3`Zz*#nxh#%KsD94~i_hx!I`$XE(iF>TOTdovTHX-peFILT7Kq4Q$2`2BaK9 zy*!p2u1)k4VVc+uy=oqK#oGc+32#xsw9f_G!y1YeDvXI5FV>zAgr!uZy`c!e$!m#n zmBR)fwU!-EVhw=EGBu21lU8y4oN^G>S!y3}TD;6t^n}2T=n1O@W+@S>)EjFxJ9wlM z_^4X31Xwe$clYl5yg0d*F8n39bx8nPwPtbQ6w^x*ehSda=c~*ngg;K_0pi)hC1qTU z|7W|J3sdB>Y<#S5^At!S@U_`F>x)Y*E9#l8868hm~v?x3;_Xd+mb)BM!4nrY!ZYe zO4RMe4_Kskf91Uq8&N5rNxh*gy{tViV|F{gQHIrq`!>%vgMT$Yn^0;AP1Aw<@P7lZ z4ORqck+=IP-6mrxfayN{)UTmkIntaQMKNXXgE2u$5Lh_FMST#hgBtNb!jG=t*v71Z zT_+{l1yifLL4^^aJtZ0A1t=gzKfJe@3}I9~M{EWeB7nxlNV}nOSgDWT5E7BtUMBKE zx3k{suLnJ#(s(c8I{u1p07J!l z9}xzMjZ!jP;ZGFyp+rcQOiT3e`T3-iPjCGWgjI;N%qq|+0HDP|>~ztH7tIxK6o4w; z*eWhPEQ4DiC19nbH!&SYnk1;Py3f3Ov!)E2(GbDwM=G(GIV==Y3W)7Px|(|7=u6xz zdqA!I0zt1>xv-g>v{xe~_Hx|1FnIyEW4GN2AyCtscVcx0z1#5j#3KnLgEUC{g5H$a z+Gcn->pni?g$|K|QZarafw9lSMNujQK(Lqw*XQNPd`bvMg6Oui#%vhS{c%1Q-gqGk#Lo%;7_Fb_fqED^W>;ncGlO$P`;@*Q9of+TA?F zMm_R@Qd31b(W_ASqYMw@4wb!1)}!3bku9D0{zpL#Xo1?**ti59$y<7p2nsGJmV}T0 zx+J5B=KZrT4ebzfogguD3`QZ@7{Ia_=dTSXbNv5kDV-`+~V_~J#pRA z;Ig5LIW)fxZwqlsoYRYK2GUX!bX+3MY>1O@sN32|n+t(UWMXgfh3mBb5pGT|D-wmc zi}QXX^C($L)z37#2p0OHTk|WC-_@+@s}*`x?#x7;9oGwm^V5n!Q5Cr~>ROw@WR^l4 zL{cL^MHJ+SdawQxk=vk0}^EWf#)5~Ziz}_>cbm@;p3q`u8nezpRBkdN#SY2TbeRO>p^`%~x zGD3wx^T1FqY&sTp&{2O?Gow2s(as9P1&%#l-;5frM$#JprWw(}5b7B<&*zIF$o_h* z-|c?aR{Ne3B06JL+j!U92!)U&ocvT`k~iL^W>|TIQHz_sj3q95F%W%^FAHH<(~vx> z-i2T7zVWaAmA~A)i0Z`92kwNXLvH&1mhg~S>%NGA-h)9@g{3jn%%d%z%z`8~<3nIe z{7CwvCG$Q8u8`u6$pD;R32$q{4YqBLnmjTE^h4IYP$`!df1#69E;e1IhHz+(C5 z-D9DUeF_>LgWnauko_fSKw=t+wxvLoz4}>MrMwq28EJ%e$Faog7<1)37d^my1d6$x z;{f_7Km5_%&%1y8KP+yKNXayQFYi;=gIK7JNS0yoR|h9txv9K1KZe5w+Rw18_})HN zmULk8?C9ntyixP)f|YrEw|Zc*3BXuA8Va{+fz28~L4tf7WT+F6x}W=$u}&KBZYg3y z8xY*2p%;7K2gLBfR*%>Wc7JGgL3J3EDSC*`)!6KI=pchoc(HmouB9}BUo;9j`BC=s zcH(OQ*PS4#O$GehEUD(Pv)fQPzux_JJioO|q^QBN{Q$L3O!T!+oJ|M^xMX0iz{*%L zZ%Ul!n&2mTV3sI&qFLk2uhO~WNt~nF8cTt^u_j9euL`a-?EVe^bo!SU)8j@SpKDH8 zkVMgda-Vm)^=uA+jhZrzM!hW#x28^P8p!tqlB(E#&3CWI&igLbE_ipGyOSVpYVW1c}a)DM>*U zfaP%rqAS08iD}WLH%stUX;MX=Vt?{TJfH^mz&*ph+x=F&SsR_C*yO^sP?1|a zB&sLniIv1Vu%r3Z+ID-d+t@$c?Hz6J@5M%CQjhZ{wN@PY6C#1Uve~;iDS`<#$s0$_ zLMPeNy}@%8BlB4tU_xJ{36Gsx|0tBF+X@M(D~!3zNABKzM%(C*gD7-N{UrE9&6}~e zrb;e)H{8{?zCW#5*OFHtg#8&9ieekAQG%0M*Io-)m_JFES{G@GiH@P%&=rh)WNt~K zG;{)HPP$}F=DomPsC~LHCfgm1eQlzdgJL8ezh)D^9LtkwScj^;F%-y6qYO*H1cmRl z_b}&*-uME+XqKb>At%u?(Lp^ddVp{KK4#E$(OQUl`tsFo+xksQZ=ZNw^i>!u!Y-pE zB@+z1IIG&uU}uCU$^|SL=9FMBi=Bd+Py_)vzLa8^VYM|m8_QoPta3fOv8q|)&zfrf zq=sl07?M$mBQN?{Hkz2r;QH?DQUTYsgi8O@?%iK&=KbIO(Kq=BUJ&}CqyoMYz%95~ zSy!PaJ?H0Wxgi_Hq-1iI9t4;tV=) zvm+HZhK~zG(9DLoHhm%`#E?_;a#t1VB8gh`!a*}%ED_bi2f6+=8+0-EZAomv=%kdF z5+X_+lE^IjK#)oVQe5iO?kbZ|q&hU+BhD0Yj%KSqf$F4-m!XDrjK9Ljy|ny2c{8IA zT`R|8EwORRnYD*<9P14e6U(5r*3j4UTICzmxTF21si1)#%hn5016S8=jm0k9MzXXe zGBq1fpVLA?89PCnY6Q8LwaY}h3f<%qD;7H|dYMvUj)*1vL*q{x_r)W&_V22XT#{rQGk+7gP!-Hf*9PCrlmUZeV z*%!@Cm6nL}B_cxKJkSHpgAXC4h`{$_#TvndK&69NTnKdxDCs*^UodH_q&?x_c+>}~ z!G2cGQhDEH;Vd628ChehYsZT8=Y#Bs$Aut_O-heFN=yz}eE6_Pjybbb01YjJC{H-% z(`A_G4wlS#U`E{^GC87KB@yX!+Q3S2m%}ZJF6y+2LmHh4`#!!biS1pq@_1NoD%1fi z5WAuO20;O8(1IJeSzZIjB_Rv1#AYQzN;g!}u|CMT1TgDch6fP@(bgbJCGIRCBgo4;Bj-q;Ddr_YQ7tqtk$xf#8G z@umZiMI(J5ZR6w*jPktbDMm~uo(G%JtUu^XuKw=laUu$@O1|dL^m!LnGkzY`={arHG~Y>5Enn{jt=@} z!X3ru{b#TRnEr7J2sf@&oPyeGlqdBSOetz{r?rI3d6JeC~H`akYa0s%^vw_K#o4(2AGzIZ1C*UKaAMSTtMA4@~(>$w1Lo=s3tM-gWDKcX@ zDxcn#_!YX9tFW@J5Z1bvL^Kn990XFqM?FhQHke`Vvn0m{U@3)+bmE|lgrKJc+MjDf z*xRpwLAb&}SMuqf#D72C?bf|xpg&z^;$q%NtW1?wgM_(oS&8DuSCvL674-|$MJec= zGXtY#BQP%vFOX1np)F6OG&h5%n3dyMgC`km^x+Ta?e7ygB)lh(U#NZD)%fF@8cj6s zI(|d(A5I}6xmTzE2^%3A`ytZ7CUTfZr<> zAMo8+-tTq)I^M$AJK_4O$|6D3`so*^6)XrT+u9SDB3!ry;hDw^78D!xVu={=a`mtC zxzU0rOVP&N*3&h|&`z1Ti}n=+!dxrZM;lw`4Ju=ImVLx&hitMc5U|W{D)6hof$!1< zzD#04h@a!+G_a!=j@JbkK$j(W^U;&;VbJcQ?l=8WKBKHV&+r0UU$;UPIuU#v6i}q% zDzmdB9k3VEcSCS@<$f`_k-$*VwVRrQWQZFUmPSmx|5=CuqC-zUj~_n#`|j??@dJ6l zVR1DjR`KGwh_SA>++G`EayyGz)Jo_o$P#mkF}@{QzIxv3bOu2$G+ohNM_75(75JR` z)HQHW1pvSk;U?=N+(v3XQoxM`>nL0SGd9o0+=FJ74gvey7SVGK`QOI^_2{23fJS-|CGPu!ubU%>vr27A&lPt zr2EK!-$OXmxX%1qtvlp!#`9Z|e#v-XJ?f$OdLAP$gAFJT_$;mspli!4<|>Se{63(l)<^kr zB!ahxdczu19wG}Xfz;l~C@&7CD=G6!|c!6D#;C|>>q8jGGee`ElbR@PZmM(MQ6uT{BR=&bfNFowQQ8KbvfLqeU zX&#be87Gdq-`$kc5ChlYR9iG*hW=b)JyJf3uRa`tU=Di-0`JUTM31nDf!QFR_NHK#4A<0zNz=g+VUKNv)Fn5#8I

lh1q+1kFRj*U>5~*l2)Jf?xaHW^kWcrdx=p zthGSgmHsMb79n!+i?t#d!Rc+WfC;PdnIfd9+vi|vb=%75oP-v1ta0g{U<|1Y4#tQU?C)CWrW0O2@w4B4oP*CGgXBy5~XM7IL^K zI)ZL@YJO69PPe#iLgLl$T~UnfWu3mbs9ADxH_ZSMK*(+%*DiZ)*?2z(tbSZx!8;5E%&WC*gmf{~!D7_(PPLob2tOOf))jujy0X#~rq4hO*pjLuaD_2M%Ir~z7(^X@zi&R2X>~y65J>(Y zi4m-A1gT zKVlYBJM7p;Z&hk6K1(aJwfGAhdDUg+UAB+o9NcOetz|=RvK6Il0yl8s*(b3GkAd_L z+JF+I%?2XLLB}>Px;Vacops9(n&7zLK*_3e_BT)XO-}(>?|ybcub^s?(l!!%ztD%= z5(axZiRvh_y&4pf$H0|^qNYQSY8^AP#1O+)IS+>&gfRjq8E2(X1-uwk{hW&)f5J{u z^2+xa*YN5pqnxLTX2kah4SeQRJS)=xxti#&q(5S|pp168E5IG&DF6mv`gP3FKs|=E zIPB8I0MLU{G<&Jb6v1PF{moJI;e9ayW5Gj;R^F7-9h@y;Q_kL$ioiZi6Oo=3hA7XH zcs7KN#Q>v<-OBx&j&PnVzSkWbT2JVFiAG?YdeJj#c^dS|A8XK* zX0n@ktUN?vn?5QKa@3ibP2! z3STVg>9ARh$rXZ7zKlIg=OtU3%L?1tQl}@DtPnh?d?2c=yLRkLn+E0CzF1D`5L;_B zO`ub&22!Ia6Og(#@K<9`6T?pe$^0{)wk&4A4?Z%|n@dguUKTeqNH-=mn7k&C5g`wM zchowq*5JOmVsP3sGQ_z4>B6=lI*;h>S`YgN4EvQv4)#_x?!NUP9weN>P zeQ&lGUi>MRnFLsDY(jq<+?=2tib9p~%aIs*2g7DQ8=yODz8^iL!ywJuuh>+^dxZUq zuSvrVimS5dv+M;U0p)7Mb8(L80#_O!@Rr*Q`wFir-)u?4(PkQ*f-Mz{axSYO9*A4U zIxou^4VAeY`9&N)dS1eo+n?Lv;@}|qkpyj)JK&*=h~cy4lO+2q-jmlzpjG3>AY`)w z>gb)-W_erz@ImOA#0SWwiWDSml_M!hP0UfOHK6@u;ODJz6-MHP6}s3jm_quNxprA*rtNHZS40~qJA8EdNL^JONEuMeDr zO?BYz6>7_OSPYxfcK&-k)D{+cP9xSjDp*2bpAJ-&d13h2~r z4nG-T@iZ;%Zyjiy^YMAf9e^6gU@|Ur`x%KvKeR_j%^LBhw1&aR)r4SV8DhNVbT|-| zR#cNw;K20#QDC9fOs@T5_aaEGgj)$w5aGcX!HRUanyfn3z&H;_vD>sBnfutb9gka6 zxrsS}XX)rol+6L~Mh?#(Q8#Mh#r_3@W;H!8o~Q{xU&K)Yz4<9Q?Lznom`kl7SJY>+ z3-8Q$V!N1LKr103!Rz19avY^TO%Zp5(eF0Xk3Q=j`gMpXJ)9zvpe7%t0Nw%xiB11t z5UR&nQ_q3WFOGo@S8>(?wX0+|ewGs~C~8Qw7hp;K$ZdQhlCQP*GBAacq@i4pCNfP! zfIPT)<@tStRUIlL+m{8H3Ivv$V;K8tm9<%%jg{}N;4~OSL|Oha$iBq0zU0;86pLFh z6rwv;)+9p>G7^;*WU#h?S4!vPCJ#-9*m zod%pRcyO44Sp@>7fXXZ6b}S%*+VS&-sEwSos3!rbIZEzisGf@QKGLC8)@;chljMbw zj>|@#cSofy4P9Z0cBaU|b5}iUu36#B^cjcg3$we4%fSkhRiPkqO??G+^Cjg zt#cDrkcPFRq9$UPO%|iRt23!_Lupri_>pQse;N;;x5+fch$LE;4iXm=@4Of)#7eMp zl9h<_NMYvZxz++@geJV`R=h0X=gG8X4C%xF)4l&`92mDtTD-)RHid+&Erk0A>&))! z?f8j@nh8a%LokWPbYI(6p%fD6%EDl?P5*>ha`A-*$__%1M+FG@7nKFBT8R*(F5;5v zKxAWHp58Gb9GeD5ta*Byp`{Q1XZQXuEhPv9bF|b3xH6a0Lw=P!gi*|gY}$z?=zOp7 zMB}AH0AK;1bZIPTd>562@y88jS?wJ8<+K1sL*q-=F&+$&%h)W>qBXp)3`ut)yd0;0 zMRJ~Txg5BEbs)TzB1b+K9i`-t-whq(M*Cw2Z6MpYv@JvTwpNa{k?`q&f@aQH>MleS z;Su)Az^ypQ1_|?;S{6d6G9M=G%%Jnack~jdGy>M^2I&+EV`XOQPPIvBE`NR%Z~}`)A)+AE*~nwb#lMCmf0@(l>9o47^a>VDT-qh@ZwL;h8( zeM7!w^pqP+!p0w<6+4A(kRxszbxIJUMzt0NBHZm_2lUuF8SR2IrYj>6_X{7xGtA1; zX{OoUpPO9RJ+SDkXj#%=*K+b=+mAo%9{X{-$fC(fqHSq|o|V=za+j{nI_vlOb8kwr z5w^W#dOUVnA%fgGiF6AHI2ZAN9;eE$>e(BZm1=>Kt-5dUm<^0nvut-}uk|vY9k&wI zV1+~3qj493$U%v+(M5r%FUaAk>_ zi%wth+SWG4F#}oIpy@M6gz_ay-~&5rIi}=!0@1ZM>q44N;$mUVlup{MFW2ncEQ=Z- zT+xsT*5-i72$s6An|X>CUjBU#gKDwvf=(Z&@WXzsy_|(MFGH;WekAMB#!!JA2lup; z!D5&gH?oAqD2fAxwU{j&Waaps(j+Nq!KZfmgEZ)lpqtl9 zfG&b$-NobO-rc+1AwSg@iU3FM2b`{k>ICiK{WScE7%BZ&E7}LASc>S(ug|ZgOl6wP zlpcmudcJS*XY;&i)hWl}cWiJM zX6ODHYY>qfSBKLSZqt8OIhqR>I1k5!Nh?f*BW3^bb>$xkVS5Erx=}K^4yN=n>9wNT zZ_5;AuLNqx5($}tgdR-i@ezTz?*4qyEc?%M^FX{`*bpIZEJ%kQQl!a%nj;X+n3`VV zCXdeikaF#{Kj;}TijD!T8E}KFVh>Z~Iq~`1bg`cb)Dr8&+r@om8N9DJpu1XF;!d}E zTRmE5Z}oyAlnX0+Qjv9tVL)SD`sF&?-l1hR*=&y1A`C<=HhMz7l&h*GD);6@hB4%3 zCKIVUTYggr@zs65XmT^gCpswfHs zh~u<)vuV69?Np*xgqJ<0u62)7D_<}3RDnVc&A$PXafZv3TSkD5)4hV6D0l-P8pQb^ zc#jK#7a6Cca`1@&AwFbM5?5HHlBls{6^B;eo{kO+Ad0WT{JO;{m(;S#!p6x0`xg+O>$!oG3083kMBE`oFd#T9rRj z4yea0$7w5%N=1J6?uj}b#M;Ie@}`o(m9=E4^;VI>T+C~+>?BGp%(s0y#vt#m#~Oh^ zj?!K|C0uGcY5qENw<02cLqh^;iK z6}c4kT#j$@6Wl*t|Kp;yiO3x6A0C+|Jqh4=JOz#)d&dfIIcxWEElQ`#Sn*&YjqxM` zL6i&@<$Tx%wqk_%CWCdsDk;KjrA-vOi;Jf zk)I)R)@QJ%j<53UgR{T$o$fo|iARG~;BFNTA^QI^L!mfvpZSbQRy8?ED5AKIAaDSA zBwKv;SyxK;kktxwU}VG#@4;@dn1-RSU>n{y2q|}qVgQ8{3v}R(A|2A`@ys@eWM1MF zup+|ixELA4_948#uu9B1dyYR@8c|M+Ql+6w&)@BJU#$U-76oo_eEWEkYEO$U1VsS-@ z=rK4nlMyWJH6n@bT6sG#VP$){_dnh&XfO9STHWcMmNu*OK*K_>xzDG2-D(`=kxXlI zqFSBSzTr(146aK@EBb^C5y1DuMB*_A1f+}Z+Z~W0on*9y;4arJZUPAJ4C}X{0?1q6 z-WBBw%3b^NezuYWyX)dD{1C7D9%!EjLR?8g*u??u@INufLR1+1TES6YbHt{;F-7LMci-3J!|(s&@Bg>{*ZsHtsK0qBJ&a$5h85l=_7b!{r?55w zGzSE-FaO2;3TUr<)g|*a-m?O{1lh?kAXNOG1Jcac3;x9-$DHxczVN-cQn8R;hZZeG z7ML?vwAvk8VJ-&G$3-BVnibf{H+z`G2*_GXFtwJRK~lB;P(n4f%4i1L3#y8sJ3?Gx zu)PDiwpeCfj!robBzLS96B}14o~6-?$uj^1Q91(A2Uilbe-g_;bT=@g=|aSGa@rLE zy{3jF%=*kHGWLdqC&-o(vg@k{HCAuDV|};l`R!{07y5J`Q@bWEcJgIekSH5toE|H%lJzd*sHFuUhAF}!L`0d7eM-Wd3&qaXXwdt zMH$$n{I2jX!56I)=0$>SDDq<~r7VX@2hc=Y%zyzYF?Du}V*<Lshk!HgYY2RXV@6iy|BQ5^>GAckC2ho(UT{ zbGhlKhxFFoI||MRxIl~{-ri_N-Xivgag}Kam00fdN_JhO*BatWfsUUg%1uRBW-L9naVrLPj~8fB^5Ivu3yd@lSmG&P6dBkDm{ z?yd>Z=(Kck)eE`Y)D;aj%Yr?^j48gLk$9@84Fa`f@WB|n$O2}5>1z%^Yw`-oE9)ta z?!zC_IQ{Og&3AZxt-34eqtXLUN@&_Ef#!5G!OD%3SXCcUTyyVl6)u}!8a#wK$NNtS zGg^3{#yJpWhH_Cb4Yhd?j6WL)~XuFhcT7F1>!vnMyWE;3NdWL)JJh^$jaZ&86A%{x7@t z|6nQ&@h;?Q`iiL?@l0vVtc^zm3zvyL6#j*gk;Gg-AlgOnKe zebb*J^Av-{n6fA$hgnnoL(c2MI+>r|Q0)YVl-dK02KZ^L>^50@2ziqf?NhTVy%BlWno*lG!V2Z&wc zy%@!?#>rnTIiK~qYP!rGhu$m#VCqNtQ;>*;jbqA|K%Y$erg%#vYleC9I#dIL%3DWAsguI%Nn4n@4g;p|q4Az6N zU$ad>$>9K2>84E5SvrIghy}S!$cUTXnt;^to6j{|iM!{G5=G~+7a8`$F+~s+jsTpe zTFWGxnlnAk_9eu37}}d|F@wP%Y8?irDS=Cqc?Evww%nA6_!gH>m8?`6gho`A1`f7~ z**l<0WP3H}ucFiIJPq{^axtYL`ralK05)m_r7 z-tWpRPH?B^BT!+8ixhthQF|1(gpi#A8<{3FTYWA1whd&@4MmZb9{p$lSz~n8FI)CKX6Sr{L*LXhEoXM zm^Q>&QC8G=bT`2X#Mv0j8}^EPsP})6 zm9*Mjpo;px{CnQRh$phOz&3oMQM80{oWeWC*j88Oa=U6AczGKGuk|U$vFTdI!hz zGHwF(gaAm?(j35w*E`~qC#pA_;qMm%px+fg`k`qc1r>_k#nd=d+z~qJ|xMo4HcDw{i-fI=)Y!+!Q*#2#~i_s#2AaWWnls&mV+KV5*k+tbPoLygX z^|Ef|mbpEd5SR3A->m%U1L8CrCl558hYx!0;FF5JOTnC3W%g&`Y|0Xwkuc6YC}h zjAJ+}F6pHRYF7Pyv0NruxS*uQ;z$=qU#t+3SIv%4h1cs+`v7kUi=^>h7VZpuuv#4s zJ6zf0p7RRCDZS0)1O~(cW4?GTF$=(s5Z$_3%Zf!zlzI_ce+A8=P!M$O-sKI&?RR|| zI|0&JJQR4Sx=iyq!m+@9M{f zk##t!wslrAYm_rc7>zN8;F43tc{(BH8o!a(NX^>fP`B)b%gfRWp*KixxE~k$*Yvjw z44DwtF{ut2>tV7Wu2u*L8N;CrsEkstBAhG1+kmh>AWLK^*Sy;S%1-cf*~S5lVgOP@ z{ee<}xW!LcuUs0jLd2MJjV09s+6 zc}eU84(nNNikJ%-w?RV4{EW}3T4**zR7xhb6pG=?-B3b2jk40{QJzcVS)a$UfB5k~ z{kY33P9+`{;bDOK%zeURIgoBH8TC@dFhq}e0Y+8DroBcM`rqI{6o zma@L4PvrsZ;$c9z_t;iAl_j-r_pmPxT@=R%({jJ?Y-h)EIZ<9QF6g9a^i;*FIK*tN z+8XyC(bf5g71Ud5-sd$*(BJMw^PEP8oOW+?5~@-ZMM}G@d{tCo`g>d*ik$H~ zi!_RbLHjc@?6|84@Wr%rrC_J~gsN8-VH&I1cBs|Cy97tJNg69$%bhDGY8oI%++i9- zl+vg5{+WPDu?%yXmw6Oa3Eq7cqA!|A9^FmZ8wFPy-4N=^EPp{Ppp_wY6i?lce^Z^M z^i)|IqetRTOQ6nPCFoDchsVMb&(ew%{yjb{Tr)D}Fpt=MvJPs~J#t&GGj)Hv70J_i zw-5?aATZ$f0x3){vSR}vV3KqgO5s5XAev?_lE!xH7rY=V2yegK^lwha zu+w$1$ER-PDyji)$f&~ScX>11QBfimiGX23#)E_I6;JY^dnwIcL><@xPsVfGB){_@ z*}JF`0WcRuh$-c9h{8dxs*y=0C0Kg6s{(_)BbzwPg+Zgxiy=Eqo|TaWm`5}Vt#nT$ zHx=X~7a)e4kua%)Y2XSsNa@Sf%& z6Sxf`AD^Z?l(Of2-~pToM`&}C2>Yhk*RWlm@bU?2iz(#9M5mEKfUV zSUoY{c_|RGdx(}%2veP6W79e74ZUI4;sD81?(moWJdUKYs$0=3p8-^KuH4gZeK+=0 z-D;W%UHnHrt8>hgeb3-B>lh&@o=#w~2ljAS5eyK2M1fiemYlev5sup#_lGb^1Z_## z`GuqoaV<{@ljxy-XUs_)wF*k0Jg-YdmnKSFxJ3_;fN&ZoRcSLUQ#@>q`>{e|y1NnR zSY>us0sOT7TLw$5l10^#!Z|`I3ni$hK67E*Hx&fts1?vHLc}*;Dd~^WNZz3lQ(H`T z*aGm@#-EJOV?vY-p7qt}lSDF^0ex?l~g}jUt8O_ddlBO?+HVLwc-2qR+BeI;HIpo;ZcWt|S!hY;UYn z@_x7-`cZYk6J}?;8e6)NAAa%vKi~h7fbsi33-uGfdQTMjZ~FMC7nu6XbPcY~##dlg z$_KYR!d+)@wQ}PopP6NZr6*L(6PGkEe;Gx6&rX7Lfb3nBv-8u(bL5vR;@@7PDQ9MlQa~VpS8pp~#_!vxfx|!%-cD zKlh)2|6hl+{95<>ek=hE%v~-&e|EduNl09(pO(np1+{)$$XFMBwCM76(RsZwzmBE z;lJ+Vk7LFU-zCE-Z?qjnz{|G7vP7$l3*Y7Xp?4$EI3HDNkxmo7qd-HvL71jUP8fs! z)xJ^}kjzvuED_;}0>1X`h1g`9xdXY~wXFh(rZ6o`1>a!}27NtkHAmv|rb+XzA2aGJ zn)ny+Mlm2VmV>s#7c9!3QdysPSyxz8op^tr=p}68Yaqx_Al8Ap7i~`%;@nV32i$vK8*1}W*SB;aV#HB=g_aJ|H_O0kNS3b*E zb{aGctd{HrpDN109bP2|ZlxPFUpv<5SkMAx?LfOi{KzT&kErZeBe+;4^r#$8qFgE9 zjo|k>o`#dBD_0g%>{?$gF^1EZv#3Q@4k>*m>5YX3T*VH-30-t2)-l1kDqvE8gq> zAyRe741fO<<%hrjNy`t9A3T?5l#b7-0V=R%#q!@_{fXEg&Wjt$-j_RM*z?4!lZ1jm z!3`=>-_dD+MeMX^00C zrIlIb`<(a^|A&IX6waZ;mf*}_eU$;Ig@%q*q1!G&saOQZWbIaC9HoPQk0EF~G7kok3iVanUD4``{UsUyQ`lK74EK_)z(jnkAR9`R$ti77e)1%b~2L zyRa+3j!7G0NNb@_tpcDOVW9f-fH)Pa;{p@lNYG^nv~0ILjsw;WEIWqNCow5T5TBNL zSq!o(KxTh<4<8U}Zr&&=EbruvJ4pAZ*{-A|7viADgVjUuKYP$1Y`5_fI-V+k-r!q# zj{B55FBbd;89(tT4)Uz5`}_idt!U;s2K#nFmJxNGk6C(tEk|*hC=Hem8EybzMal`a zbEt&5#3sO)edODuk&GSERk~(OXNoO6DN&Zm8E9Tz#%0l#m|Z+=aB6d+%br@dX2^t6?-NhtqSi}w>yfd!2Zn$OKa)tTV6}WtU+&lPFqS- z(f7nJiJ;e$3`uO3J_lPhEl%Eu2VRw0D+dZxHPU;C^eb5vMFz#-D`cA4IYOH!h^c8j z1ad5w1a??37romA8>}s4sha@U<|3IE@mH1O zC%3jp9wsFq!xRNImgHdz6+KY0? z1KiGh*g)?TSPe)}P<*++cPy%VZT@M*sE?BD^ccU63cAskjeK-W0Lnf4mg;Nsm(ac7 zu|5&NG!xlkY!;PeW32)^huz;g_F&M1x zz+<%KH8Mze6Z!Hs+)z?}9>-`Gb>&T0p7oN1TtIEiG(T$(8)#-hq{b;C zd<&@S!!^RC#bSjZ<%&1d@xhgpRTj(62|O>iDV&<|kq`7YtQLtHe($<4E6WfrBOAj1 zg^;NscULao)}QmHUcUI#X~S$Y7AVQyA6lruP)5XDv;z?n3{+i5ERKVsz`p8sHa)^+ zhMfKj5uCJ50yY@f= zBnX$Ds_6Y{&K#RO++E-4lnx!gkf^f-)hJMdqEZ|XJw??IG`r6O6j0bbp4&CtMVJhR zm3Lcqv=+=C8UcZ{7*DJZ+O<>|!4)AX7if^csF&#aWK68kWwkNp`!-v39l-zyhgHsB z>%WV1{yg6Ggwp%*Z)Cb09QcXg$WRtB8U5bLOp47F8;)U3dMW|2l${$8nJzvFnT}*( zVvYIK4)6`WU9f$J3!fX<#~3wDK>-3u3l16ZJGDUUJAhn*ad zY7c#a84#ydSu52JhZY0yp3Sm|l{jwBy+Yi>Z-Vni4olf-9!B(&8VPBGw8zC-uN6O| z)ypj1#6hmvH=Y&aWR9GuVvhm*al4iiMd*?@tMcPVZwl1fWWwZ5R;geympDJ2eZF~ASJ9ve zL0lD5yJaheN)d1+L$$Ld$?B|Jcyy+&UDcU5di*RgG)0Xd7$-hKWa{VL`@f96e@gx= zE&Y64Xf$wW86Zs#cl%5KhkyA1-N4$> z3U>NH{ku;hV9hyUKMG^l+IYKqDNtYxd(hLptWA-AkwD6r6)Q?a;LGll&truZHd%|| z-@m)%Mcn&J7LGksLA+5G*%8Y=mk(1_bW*<<$|Vyz_XOZvPDL_73Fqs{F#0rx{v4B% zHTH^c!JX{hUSpib=xOP3ix*W;K+gID7H83UDeZGRJ3ET6F|k&5F(#@wTn^#ayYGDz zL!dhMkI5^JN$ei>fskS}a%EPC_B9ue(?8#B5M!LZE-w4N%KkW=lWV_}>zQ7hP(4;j z2gPKON2xvoy9ZV%hhh!*?viAE5w4m-Oq!h!!WG? zIjVPBDr+gIx7dIjlFx#v$=j3V-pbA&W=Imvfsx5rtYjzSqgJ-hoAM)+NkK1`*thc{ z1ESW~Mh?0qNs#w3pl30jAH^s_oFPVHHR0^~Ge>80EOIW1_zdAF{Fvt(%W>rKSy28S zCvb$=D0xQ`NdV?3m4H@V!RGB-DXM)yexWWsGDL+t=ZH&Ew2^> zCSC+aPFL;tX#)#)n)+naJ>T(iwRF1k@nuXD5JhlI%|^euA`_C!D<1cihF2170ZmX? z6d|f4B^qVR_kY{mMk@xq4>~0J=hZFMk&GuZBLKb_9N^Le2`2t7^}ey<^?QJ%TB$HU zbHK42XFLQw@N-q8ZT~MGO(E-pzGp+6cM{{4;;+=E0BAg;@REPCj2P03A>*)*h+p}9 zlQ*fxXN8{M=PFb5HC{lizXFY5^+3K@jCd`}ib+~w^S6M;eHQk|(4|zg#iODu1$f(e zLbsDy_r`x!-T_ue-VVwL?cV>yTTL>Nt~>uijx;m_uO&L|`0$M<8m;N8bHBpzvKjQ@ z=LP;2Ff}Wr4nq0g<~*KA-D8K5p8elF*cz}w&=jawMM$vg^CFNYAlMx$Xni!2()Wa) z!bnwlNFQYY5E7|cITSDC1!1KYeDODRqfPLf{2_QC_?~#Dp?ztC(3N|TN)}YMSY&}8 zcJs^9-I}I+tW1txhZKIRh52#!_%r`R0hBM}U0-#h?XB1aj1HNOMFF6oa6wfbE51%p z{`9bhj_?aFi7r$mQxyt4Vk0g6Zcf6h+uJ7UGjf-KSr~LM&r$UCrD(Z*RBzCf;|= z-{z$r$!Ip|apgpBMl+ar=BcadLoOeIk)3YWn?E`M&JgF;k}og;HWeJVV9nj#2)a5BSjjB zKIdm7YnA7{S}Wk7H5biXKaFLEsm28SLD|Hm2F{2e@NGd^ zX`V8r(RYo!zEm0n7=+~00 z)WXazQc5%!!We?wt2R%1S&W6tg=TG(hGK-Yu}^MlOL?stb$5J>QiTNSX)O~%C+XP; zu8Q_v!AeH6vpyld2;QfU{H--uKKMg1wU%*E5Udd|sfiU|al4O;I)oBOpaRRQG+#eD z6j`n>@E>P*G!}R=F4)kF4ib%U6|cU3hkO4h?C9MtYYHN_N=Wj=)*(#-E5JmQj}n}5 zm4w8yq7fYEBSrJP!^*`PY{iq8x>Bix+xRWdF2Tl!Vl_P28!SjpdM0Lrk4~)RA}?)F z@o^3D4mAv2syx}#11UQWDMim5#b4C%x;6q5{{tI{GM^SGy{~bZiXSr(06d9NUhqha zXztB;2bj`LO!}klSK@=bi(BHF4DI+JCp{CY&>&ju7#kY08>XQ)KrY8c*+8Kjg;K!X zS8yR-xYIq0zsM!)?*GcSGj0_l*q8Ymg1Cjfpt2bGVr4>BfRlQvr;EC8gn5O z*MEmmb^CYNMzOxoYQBkK5LN?%Rr*Z~G`r50;@eg(Ltp;<@ZVYl%Oj8I2<`S!fp3L*jrT)*`}}i%QKexJL}0 zz*GCu(**;*I#mvy_n)~%O;2Bwq~Jr+MerAl-vit#Y}wkQ#8n&1Sx|+b3L>j!zeZp0 z5|yQ(a%j0-PK9hzFEfOe%hLlFWtcmoeBIo3&}FXUY&hMZ|~pf zx_=U@On}aXb;)I>_FGP+?#erB7#3Y0(_We zN)?RmF7f>PcW=849x9^uz_BgYIuRs{14zMTibH};anO}4Y&j4&ReNFrY6EXdN_FdP z*eGOP^M?xZsU0?YXKpiH;q{CNEIw51++u@$eI{z#EvyW!2B%! zwgobCW`C?_b8!sRR=ge@YQ=H6)UJ%=_+nYdKHR_ioF)O0vjrpYA{+AvW}r5Ow{!=; z{j8>ugQy5PZtlIu3GO}GvkuYWls|{S#O_|Gz#eFX*Al)kbmEI7f6-*OE8da{_ArnA znzgw(Srxe@76b(Hs#HJ4h#}5W3GWIxmDN^3H6AEQ$ zp+D?OF=D(}D#&8xDpx&LV?0qYk#uonTOts#_#%CbOZ;dbULyMFtdx}>ws3MZ-i^Iz zT_~vxd|}R_h%uy?vHcj?@dCDxSU~&)pniFC=dU337*DE8f~t(YQQ;*{RN5m)oiR3X z0xs#KwjY%aTb zdm198V9{i%I9sXB#29J?z|Aq&MGgEfOBuxzgtOfLLXgu>-~VZ>4#k!L=)4Tzhp%L+jD=L@ywY=DcuUyX4ikWbw!GvSw{2kg4QvHvm?Zy!mmt)Aw*w z+Fd#;=U|&E&LG30!qhpajJ@~rC6@D<8Es$#r~*rMb3+#*`;+VLwY4oK{`I__bx19C z*kpnyH57TA6rb;<_Mx7f)hp>Zm8cGp`?&k?%lH4~{eSW>F#^y><(Q0MS_#y;E?QX^ z;Pg1N31EG=us$eG-s+;p`#L0P85Xid1%eh0Oc%H?x&tda-t>gyL1Z$C62=?vIkZfxDSKI+km}+I?0vD})eTViqq+^9QhbnC?ucn^WAvVhE zV(Sp)tof*iZ~D?}weiJXx4j#B{6{q#xwl$H&v&M7Ul8#;T@-$95w(kP$$s0F!8Hx} zmynNZ(mAJuR3IJpvgaHw-AiTpAj}XjolfjT6Sqa#NlEkFQnp~}P&awng!k{h3->L_ zZ_{I9DC|sm&K0$>@oXte17Z2ECRxeMZp;1h(`OXXzq-I|0rGnZlf@A3S?|$z$ zy59>s@8gfV-?C_mI|OP~aL(oJZrz7qpvx0+MtgW0A-*`bs?p5j;WS^`^kJ3UGue=- zZ>$Q48s4SOUBXg-GcAVtQ!E>DH)1~qr1ns*u8=@*VnwPkT%g1dzn7jeNPAxLD!>0( zVIg6Cfj_nbSa=7V#{=7A#6pN8AD;P;;rL^kz;-Pl>cQ91HvB^W5L>3&Y z$ilD{v!ia}BTJ-~g_g?&DQ&5p*n2Ev$#>Z3X+UK+mQ-xEit-A@oBZHqQ6EqjF_}3u zZAb2KCeXiJ@+B7tGCR|)Pu%QQcH;ZG3}(g97%K(VK}v9I8InK;qHj}bNA>}n5HEEjS7Nm%7(T*^jI|yC7{wqpfz_q> zN%d?iN_eAdtOR~ZzGU}lxV)eW0HAzb%OQi1_m8p1A9ueKQrddg|I?3Sc943ieLW!_ zXufuXJU3$F4k^GTf6a%Ttlm_zXEa8}JBUfBmDt5`WXZQ2byoSN4O&=5eowh|goM5N zBfRf%A&1^VL!*~QdWD#%9Bu0uIs%bzSf018JHr+rnu0_MF1*FOHFGe zWj-I4ptdTzfVIWcnypuy2r&TIl4*(eh{F83EtTgb9qgr+jZvY?HPoceWg_g9h22|= zO#{t3#EgwiQ@}j6w2EBF8_1=fy;jpzFA!fn1L}@#lzqTeOTJY+ebhj%!iz*NDVr^j-3h zX<&xAy!uOSln*s2(c7)8i?SvH4Htn{S>_^~--W=a^= zZ4%>@yPvj|Oe7I;eC$;R8Ajp!+%nh{Co(Y=!_J>%9E7gf^w>A+&J4~Pz) zOO{})588%$O7=&Id$Z3ADis@;T3Rs#<+6^GJdXXgaA0@la1S0dP4OjIK0cOWER&9V z6m8@S4@aias`y;2f#3qbmj^O{mwtnahZIaY&m0LXxoNKLCYY`6n-CxB`#iGvH0CrY zQD0M`?DCx01r&%Ynp5UDIc`FkSg|5LF7GC{u>g^MKDM`XV||l`S}V)(T_n8P?NsYy z)C?4iH5Ec*puCz}A;|1{Qra=yoZzLMkVI3%rd*5)$BxgevyZtLxzzLm-H^4X>M^~Z zFv@V8yrj@lCWkmCX$Y{w4O}UjrL`0$iIWPJ#aq~Axv2}|Q$X?1TH=+6GlB1tCMjd5 zJoQ=&kIa$vUDka_swEmC(?alikAdA{Dd8BLU&aq8=V`*WkTuwrz8j>sf^t+U=as{5 z0GZ^^$DmaW&V5$;S1KTSp$3#FD@!?LNMa`lvisuYGtPf-ycDprZ?dMlFF7#Ru=F%_=@7K7?uCMKe4MhIMH zpx{lEjXR*A!}-{clrzZb<&I0s`*k-D*%`8DiW2$r^tmQr-R>P9vD>EcSvimDfZ7Nk zx#ga|<5@5w&wd zQq%0D7>Eyji|T&pW^Z)>yhKPFw_f_D4Do=N2H~HyQWeauIX;asa!&Q2IF}eERf`Gc z=*`|(_m-!X`uYA{@4cs8iTm$_sx7LTd31Yt9H3y|aKHetmJ1-~~azoM# zPd+5`M9?7QzX22|XRj*fVVo8i2scA_Ld1hX@6$`YTQwd9y|Lw##Tcb~_nyYbF{XPnHhuw|(dXHCr;zHFjmqR>xp!Gz z89%YyvRkIJA{c|~?szdDUB)>Fy&Ae|B8l`>?mnFWcufq3c*{18i}${ zu89)V3ef^`5eINHPZ`?rZakD2p-_!!(ahw!Tvew6R$ylCD-Vi705Nm*_yM&o>7LYm zxXe3S9!=(KFnkVsrUW(~qSL}&fsIV^=#j2p86Wgot+eQ6De0T^ZART;(clQCu=Elj zTvvSfs;G4KN{$o;i&qlFOA&~oh_+C&WumJ%#?!0}#rTSnBDlt~*ZOVHUz`6K%PhLI z)L1E*w$?o_nT%=<24%MB9+P@jA?{V+Sf3?_A%T#$%r|6@?ZAvohj5#saj{uo;|PBh z7SI|}J1rPwn{UJo{s-M}#23Nb6>1p!jsi;NAUAbbBk6=1vQ?I<;LG5pmQtxZfC$Xa zuTKl!g(^}Lgc5!m$b&L<Y#qk zKX)Y?lM4SRk2l^*MnPM7r7BNXIL}-vDZ?xl?H%iCQkh1PJD+s=0HU97GK@B*qTfku z#jEaB9NA@&mS@NFJd9<1MEXgLWm@zD49Yd zRJ7|nBk7ZFHFb-E>D~R$;v6*gKrK%|+^co`l-DVtrTPPt6i#J=w6hQ@jA3*p%gi4x1<3C zJ~=-Iyh~&1Ar2Z-*`?lTNDnt>%u1D}biwE1wZP(h-@747jndPL#rzU4MbQMGkq9~{ zL>cSe+}L^uo+=>@en(s$*aVevL+PJp^u%mi`Wr@<#S>CtLxbW0N;ND7!4e%23FBsH zMjR?IeXD>GJE(f!HSi-znw)x-o|cf1~E88o9FKQIZ#f-D2 zl(=!~sb%u@8BwvvgrF(=HKrqQxYSSQAnuW%i!qwrr9b>{WH5Q2t-wP3$o;L908bBeid3?qW6x(Y zv*$%_hV=4`!EYktwE`Gd9XG8ybb35HIO;_4)12Z?OmesW-8Ww-pO=xi04XU~2ev?s zz%Ge0lscTeF%@%EhNI&>t)WmE2Nnjm)J=)2?ziYbtOXLk)2*z=CP_1AnV^x_uo*R2 zE>TmD`;1lR^d@&mhWjTLZS-+podEgwm!RFzRH+v2T9PT`F+sI4UZ~y1s)-@0MP*Vw z5X`=?4Y-9hcq?^+M1d<=hExvD&HqrU^RD>ST!$D(xkV}#N*eF%yd+2UeRUGNT9+0k zxlBfc6#^BzaS`~vC$larI%MaEhPC!QKx#TNh^d*^!Obbd9#dyT+G^@8+AGM%8kEUNV1kuEA)~T>mXtDUYxU7P2 zJis*}T3(XrMKd%imH!8%OZfxDy);B8RBbcDg-aIQ0ZrF$cnT$BU@1dKN;RToO>4Y= z_xMqlIa#}4|Js;~G-nY2MH-44rXP7D7iqOa4L)D((%c|I+DLmD)4(+ohR_b79jA3j zL-BQ>qgU~7m9&fNexqBpqK6bmJWqy?lV8}-zSkvcSE!q}i*C{BmvNn`-gt>*1#2?z z4CUcGdE}Kt9A<^Duq=m`mE#!sc;Z*>@0W59)Uc8V8WT~bj>q$?J1eFOl|gu3;=^KL z7wcn_zm>s?6Np89y|4!;Ahp;eMK2a>t^1*61?Dk|8L-Riw?V(F=GEk+DWSo0L%%m2 zB+tXT0HT``Gt3PkBIJI zc{igV<{>Kt}u<;16HFQL0MVKbP)IuVW~Q?8*B5qa5P zcJF`c(OmNZh3$QqB^4CcSHfg5R94kkE5vxH_{g|;KnN>mIkU5lvAK-KF7eHR0yA8U z7pz#jJ}=`Js094m7WZAcaOGBQ)Ae>^#baJ+8P}B_ z3Z$(lM8S1K0@WZ?A{XfQ&IMpp(w0a{u3s?{+;eSp7tdn!uV+qu;grPNfF3Pl^a++wo(Lz zF*q?*{Y0n_G1k_52Yf+sA%=w7B>PD#NbZ{$ zuaK$DUH=4YaTGssWl*6ehxpU({a=(xU~2&!)`~@>_czj!wWLlsFb<2r`HAVZ1ZBkW z=I+vmA2W>42(!#_L=&^`R0V*E6wigbNSzltC4r8{whi%Wi$g_z9$L{` zfg%FyMj3@7+JnXxvy?H6BSK3vp1N?;t)i*9dwkwbp(8-+%bS=515xw|S{6P(E);XgU~(_Vt*XLH9lt`7Tf&_ti=@P^29WCJD!pib7Bzy2UL8;& zx42$DoS0nWw_X$q0%B%Xmj3`}zc<`M23p2-v8p*rt*}&zpvZrH?Axgx_wT;$zVXSp z1Z2M`q)f_l^Bw#dzXay+ry*1ye?(a;#}wraNzFWPBIB`WWp;E@Y{n7wQbW*4jxBO{ z20>70Z?boZv=a~UN^oI9X*@je!UCJ z`z%@_di+28FJ=#Ni)rLzHiw=kB~f7;ar2b*cU{)~NDdN`VnFfty6;=Wsl8h++YRt*Bk!%kX$ zKHT%0R#`!Y?41LFo)n$8PE>|+KMUG=1q`58!RKe3y!1$DNZoOf9_V@nX9P$+-}flM4{!1)^*J7r$vCIOwQjJ3{ou*q{o`3*AHHFc zEcFE8FeRu|l0Jeyb!&&~{+;4xaHnIe>g)6uWtGJcV;l0t&75}*Vq;&v4%u817+74^ zhpZ~;1>av6WAUtiS;mX zQNNt~kpJo))_)hpmRCUlJUs zgO1#FUviWLm@pLx)2=elb90}628!A zr^OxK1&e_6ZAn$B4sPzs-#8ezEWgUvU^BU^QrQ8BK!wc5Zo<@>o{p~)i`0fnyrm$# z>PtYH7*FZxfLmL;EZ-wpX-SEnay`rB z*86u{jOkpEm;|pf7F}7m>5ebQu{$YIqhL5kX#P8aZX-1csSSt?fVJd~^3IdE%#3-48qg7Y}2QwTg-1y>M0xOe>7*pKn zo_pg+mfDJOuo`Helmlw20J8f&7o(iLrFB$7_43bb4yb7gDS#nUv;@gRB?3*WOjsfJ zjc&VWb<)P2jxW{B0+jVt9!t$|Y+<3>l0nU3f>GS~pgw@&*vv2U01{fDz@itr8KRiz z{>`YV=d*NDKm4ySUA+dI3d!`M9R~z-5*MX%e^sz&Ts+$?(GYAU!CWYcs+DKPHY>Rb zBX_X%se2GqS`*tC+8oOXgCxk!&q;>{JmlA>*bx2_6IvwwI3AzK1dRmlrEfLIGf;a*duPPyuS8pZUOLb<_o zRiN(9ZpunwWC-5nQ?aq{|G)13=W(2{N~XZ$3x*Z-B{(RBfpe?&+=m6E8?yR`h0Li; z8b!onk@{{Eaf0ywIdmq-AE+?9r$M_A@IDvI^wk%uVn@Vq>6e~xL-;kJ;hHyJ_A`L!G9fD01(W2{QhDT#it(%Usy z&$pV=937<)31;2ii>2p%A|0WaeYL7}K}{8Dfon=k>eR(*|A>HB%yc7)Z}^J92;jiO zG2%3P#&A-oX%$jMY3z8~=hUM0$J7>hmihGS-EL7PeGdRe9WN&|fXk1@i)YcurYK%u z8%B-97H;O=L-me$BXw(8w7O)M!Y1q8#olw;VjEy`DaQklJ=~qz8ax{vK|3GEufAAtG!F+*JDNwAC;b{7S+kJ3l z+f~P~G+()8r^E`Z1@|zyZd%l+Mm0EB3uVwg>poBFM1MJQPo2jw<)nq+# z>DLSuj_pI7Gu0|LKUY{RJ;A58os%S4Dr>CLY0DB+_5ImfI|LgEw!46REB;MH%7Z<#P43n>?6=z1fNa2{w94&o+!5xPOOJ1H<3j zMW>Zy@FAo-wUOE~8}JZeilZsoDw~qa$(H8Yj;H}%f~B+`&ydIvQzpsAT%+9lvCI%W zKC{G)=JaDL0SlFH1SKm0S|UeA;I%6;Hxk{b9^&>YL_4?QC3Ji$nsQaGd`VtBD+m@Q z4reRog~3uop(-Yirq;C>3xS{y*If^QgHa+m_~C`cDeV8KfDR{z=icMJmj2z&sAU23 z3-&rG&oQ3+cfa5LZcJ2g8^(GK)lSb&pd2f*X+1d-JW??>6?UBo$M*qVR*p=IUHW_L zKn3_x-jgM)d9opL>L32r60xD(^2R3q#TVq+=B!au+A5W7e1 zKR1y?0>&b|MA3QwZh0TAvT8ffR*_g{!e!MTd`1uE4))7n#}SmaF$K%- z&bHP7ES1Q2h5mO{-a{!cHDPKELZfO2#m?w-$T5qPFTAn#-$UMzx<=8la1tKB){$C( z7F71?I-wwh5m_zS+@*)0FbRCSSlISG!pf-tzxccYAzR$r+|va`*!!m2DIA&A6_nNP zV8h}|%LNXltdE#h<2Qa0KjBsv&tEW=^p|fvF`U>cS|QXNkco5wfFssuY%gho#WGX6 zdBd;0X)6i`7jm{D4P4FHGUs1+zw6h+nZh+`U|n&#Kshj*Lcieu2*tHb-Ed(mRj>qU z*|14|L8X}nqk>%sjFQ}o2_1SwO$jFsO{35%JQy3rqtN^X;AkZWD%Pr!2OgS6MN5J6 z+-W(g_(qe&&hZ{oPh&uG%~rGafYM=z>aRN7{G0!c|6*S-G*c}p+yX-){=_j#r$}T# zGFL+^)PkeSnz%c%)ZSL9sd_*G zLxeI!X@QTPB>ntS7D=KF0Jcgph`EVlmBM~_)C!EDYI6k2j==;nmY~J6vvst=82$>oMhV% z+CyEIFa)IafmIlXWX7#fYG$ra=-J1Cs0jdT7`G}K7AwnX=4L5<*nrIWLK*B4`+hGN87e39O^D-m_rg{--@K{`Bg@#Z#eskg*53fL(piE-j#i6IB%f;d z8+=<#kvNekJ?eq@6L=UR5OE6Q6OF2w{!|N#Bij3Uhx0T+Mc$`nZi36}!vy!~-{ERX z@zmzDmXI^$L1v`GslAq;L~#h^fEEN1$ABhmUN>@Pd6ETn$o-h+1jsOLPz5r^!h#7* zGs}H?#MHYxr!TXSZ|xrpDVUSo_`y||d3jW4&U~%bMzf}2POnrx0e{YyI1DI}mjfso z)b6$zCj1d_vrGxguq!shp3YvuP2jS)-*{xXOw!ku0u zZ{9_2qjBp#z^ng`#E|#h43P4|0l|hw}1h6v2{h z7t`k^?r5Ky<2@2N5I*t6fnye2BGhPo@ajDce9_NhpJ#B$Qc6TYLR;lU#$Fy5i65Z| zu0%C1e%0ngtkLs$95AH0;{tWKp;TP2M)aT*Eq5P$^KxQd7L-vo+7u-`kVZ zRpLK7zYc10Lf@nz^vNeUabXJXfj-A}-Ty`R;m_lT5C41j{%`V!e-9n2C_9c{b02ad zTO8giR?>GajaFm~ay6wj0A$v{&4Q{EQsTEa%%<*%&YmoMKWzow3SkYrHAP%`2wLa* zv0H(_izWYFNb~tYO-YMXUaOc*yCH21OPzrRKq_*Rj5F}ZO9b!8ivT7C0ZaTVt5Jy8 z=Gv}G z*jWz|zEsj_JWNtB$J3~dN(oP1qIj$?58q%`HTBG5$2TcTb9P5AQQdW5w;BZQ17|$S z8$v?k^sK)CEI=qO8bb@!4egJRweDl3n3pVQ`0#$zmKOTBsgaO7kP@oGr*{WvSWrXw zZDXv8BHT-J8g@0hX{;0`585Jr5^S-(84sre5F~~SRNGs_(s|f4)u`}5by&g*hUGK z`DHsKziJ4+*@VvSc!xPYTrvLh{ktD_yRjY@$`Y8AQ1n@!Oy~?yh3~wUvVFNV<>)(iC1WHt7i4$^!e&T?5P8VpW1>f|7T~H8dO3hT?lg%E0 z30tH1yN8#J7+42-q1tI|mX835nK0G9Ri^+F6*AYxeUd)|RE&#R#1UO{tq1}8~ zywAEluf14kLRRD4$s8$9hM*?sIIeY^xIgqbFAh~1J#%Pa2T_8egnE_6oPNnJ@Z3V3 z?H0wQkWaY{C<=H~sL&($8Q4+Khi?w6H7FYBEF}_0{;f|TYEOk+ODmw3k%iZk$6PV$ z@o}MfUf(gYtK$NDsPx{k?%;(e><-hmhH#bJvYZ#sX$|spGPA~v<;Y$ppjEGGL z|E^jyKEzbg=J=9AFc#yh+3cu#s}ep|*_frq_WDMTu6^G44tly+5$;oifHWE&A z^X$qxDrWG|n6NI1Fg*+-4{VRJYgI;pKEZUWCBf1n?XaGFbe0Hckiy{cl7M!S;m2p} zZ=&ZiIcND(tJ{@J1aeud$P>m33nEqj{ks=fK;>o~-64zENjF6n*Ht)xODE)l%D4G4 z-Ts3tJs=Q|kV{qa%~o)H;|R45Ig@(BU=X#l7Bp}ev#fw8O!Z$QUn29@rijXeVQSXw z0%XOYsLM)m8#%ExSSn7B_=Lt@ZsE%sm`<^J{YA&apTS;KZ`?cJC7DFqhei_7e5MwMhN zW=T0k+327XqBYm@Bl?9h+@b@0)8n%KfEZoYDf(Xb@y8#>8sKA!OXDALx+*f89R_7_ z$9ut0xS6n1;z$dI0Q**oU5(2goy~=TT`NPu3xP0XYhHpg6jCl7q<^T_`?D0N_|jwR zKD>MX=kZ6uv(s6edW^w`1O%V+Vcd3g+*A`cAypF@4Sc-0J}J)|Z&2~{FEcGgunaE| z926fI0(@VaO8(>S{eO&SHOq}9i#^3aIxH0M*8tUR^D6gi2gXGKhnn}!tWHdkgEEWm z;+FK9?8fU@VHv6xZE}at)}XW#HN5kr2Gp69_(hPV|mWtfw5&EkjG_>qxME|QdYa+ zFrLd<`NEP;*y{qYBqT$lQsB!vi%>=I(#=^KKsmZ7nWrO=fgH{Y%;59^fEp0BD~ELy z_q(NYVFo29DR06bxL(0$5QfRZN=7*1-S0$>~3> zZO6`9QIN*v^qMT|wgM!drj~%k;p(`$Cjsew#<*a(?HE`ri7KyP?^kVr?==gdRVKju zpDJ4WXZHns1xdz~M=A@nPdUoII<>TdC%vab-j$5!!5Je!2^wdS6r7ct;rg}(T|8<^ zxxkevNZCYcZi0g!(@_2wwtGPUFc98i!iHqC?=wG;l#XNe*7vxyI60FsZ8gDQAJ{^! zozQB?{|sL;ew4C7sfDPnbZdS-_3zLvxR+iB(W>}mjEAFC;zl$aAZIqtEZD^?59qcm zn}1gIhv{Q(Iy3*YdlduO?4BE2#hXzocuP|lpT)_gLMDaEh&F(qbYFZPV-T3tC5MY{cI(>(JjO6VoTde871i=m?}!IX`NhH1gGTb3eHu&t za_~(zvQTaGLLT34om*ye35qGNc^Pu>QDAMdw7=R9yn@W+tFB9ci2m0OK{U)A{^^C6?i>#cOuj5ZG zTAzOsX86`?v_ZxvyW?LB5qJ)5tb&49;~Ob7f8=JYPozw=4vSyL+r+xuN@Gb;Rv9&C zd5lS3&=R&{Lb2>Pm?hBFTlC!9XJuGlq26GtdV>F|`>xLf!7G=+(MvSd!BcEZXSymc2W0r9Pu1zE6RIm*JGJ=OP&&Lo7iJFX(Oww z5&S0VU&KZAb?q^J?uH-^v~v#bbZL5=%pZHHw5MCa+=Er< zS1eP-emTVP5qYlh??3+I?vMQ-kUWk<$sR^DdORznLnT8~)uIo<_77%n7A^w@L*Sky zwt|m{QiCs$OUWrB`Lw#nu*2_)QN4x|b0u{w3^M;L|EOVp7HVN3I#9W)o0!)V`fkRe zb2e~SbjFKcP0Ir)4=s}HWqKjcCnXobz08x)VmQ~Q4r8I&q1z~Xso^vwQw5Nl6olDL zQ7P*(H*Du{C6r(Val{j{R%3~2zkN`aPHL1E$l1Ub&jt(JA74W7iz0zSp+HAGO!xou zPR*QzwD{p*TJi{G4g#A*}mBkVo$CB2jNi-k=xnNaesz3^7;Jb7}yDH#$ ziy)RDTtmBV3Tt4OyOW5Q*LOpy@ijr9&uOSm0(UDRq*yXycJfY(&pAR@Jd`~y#{l$) zB1c6XoJj6DX$!zFy;w~F7>SwQ*GcfY?seD~9`7ZRMkx!wi0^l^!};qNU+eO=?}o+{ zgCVZr2hhSpB?CirTVg=wBkAdRDvzcD%wxML6Vbi$Yo&nj(#rfKe5xzMYI>8@i>aN_ za>1*@3HM1gnKtl%K{yk5bl%BV>RY0R?5y9Z5159{P1V* zNt^tec<}wZ-|BwzH)FLadCB^xp#@JEjl=NCSncBkG%`Pl#h!US)jEBj^qb7za!?YWK4l zUepzI=$mQr9A)xXM3qM98?hDyen@UgO{%^a<2PP)g~IL@whSvr&4|2Elt+UVBt>74 z#g_x{${3P34fw&;)ME(45INdhR30lhC7x@YH5^KGMb-ZMvQfRcUw7*-VvToP8{#`3 zcfT7S+Ch3McK`;>=dtjcRLhP%Zbk2|4)Szx(`|G+{wvND%os0_l36g$%5T(hSXZ=H zXb9`5kHIAAK=XkY5hHjy=zbWh#SJ7!&lN%%uehMjtcFl1`3f;RVwm%;q#h9{Q&iM2 z===ishR&_3$!-}th2W+>q3Obk1YObu(Dm?&;w7?9jFD3H=&_&F`o>7Bcx5(vtT9(| zq#LxwTYgp#PcAB*^lR)uZpJ%6a zA{m9`ufxgSibrGTHf0e%BO;I?uZtYLY_NZ$<;C&UG`E*m;ye7PG^kO?6@3>z_eWLZ zKXdtIh9afaTT-={NqqHJy5IRI#wiltdHkTTaoT5n3fZ)WVk>F=A2$*=3H)LuXUSk7}54$J_7T*RrLO6`sXqjQs;PdE?Zn7#H1ID!riH*L`NUQGJ?-~Ik~JN-|QU{F7XS2X_;cO(~aton!>5;NEj#G6e& zo|N4Jl@D7^)9&F>j25=Q=UJZq3zgrnMwXw0PYP6vTDFXY}NF3YtM5seN|1O5Y|E zhMKmw^^r=GXazf}0R@+&yQ>iG5>R&0j3g2+q-LScK?F12t^&`ZmB$$;9`(eNBH)8WJTK9o%8NK^t_1`2W{m~zHzyH0DVvm_H0M6@p7vAN}k0Vefm(7p6 zp0k8N1UP#>QzG9_FG<_AoFSt0J%siv*Wyyo1jo;A`n^CUt z2^l^X{I%91zzfvMHeXT`s>bsuQwD+a&T_LQ>MBrT6GbmKRT%MBi@Dsh)Qc6j#)eO? zwbo^6=|3ev&uR6tbttD>i{`WlZ=-G1QleaJ8);)U3(5UIaCHufCLZ0godu+Wh4DBP z1Gug$(&2nE#$-prY^!eUjW{Zb#Cx;2@214=>KWWb$x<&gmU#=6eRMX>>@qC+m1XOV zP2y(47T_bKg9H_LmUp2zRbP0S_qEKn%EY9tMF0inoFsV?ln-hkWxZlB%nxh)syfY> zaka=UkSAP7opH;>h&qExkiFv?|QKE?Zo95Axmi`9qL`Bq*o$?R+zIml&{1ea?KIsAiDUl1E8#^X7# zVWLs3zfG-SFyb|^%nHuPRsGO}-HUvnj563Mo(=S_y^@j&4Il181-4!sir8BcE}F$W z)4dZCp;+?!f9w;0e;Erw^rYD)At7`h$uqDAsq_@bplrgjlW5shVi+k})Nd+=!1Xys z4cD(V=ogRpz@rceg^gQ2NOcW2Fd%d3T6WY?5=61f*5Nd6KhLsNvdrLfaRv<&v=rhT zKj2tH(_;L67KmEG=z|4w9D#r|U#s24|t;j|?=vz4}qgp1EsfRp~DkXTx$X<*5(Dtbl~rRz-k-x1I?v4E>LA`mG+M~oSs!#|@EKkJ@7izQdExq|f6 z1rmsulc698-e@TG`*$=5h_61=#ujl{6AbG2J{5Y;C*r$A8fza0dT*f((*PlG)44Mu z2Ngii8*>TEA3vdP4B10j9TKmhpbl%?=%BxV(S87F9YQQk3H%6v!Mk{%qd7TQ3<2V) zUotGOZX+V%6S{{U?Df7P55+4zb6h_HbGHR7!{`! zrbCgiJw&q*n)*<5h@+6?@ieTriEV~ zg(~w*ASqzd-Vg{FMDHE1;k(nm_?3sCa|Q@%2RTc_e#_pQK#JX;72A?hGOIBT?ap8$ zM$UROMk{OrZmlfB^7e6k#d*BK(T9+oi+fS=h~>)_g7L4cGg=d zPUje+LP?~6uIaTJkf(Q6N&sA9rYJHWlM^zvSULeq8x*DvuizKT{{094jr_RLo_}2B z(3F6R9M3{!!o?n^#ev;A6J=MrvAhhY{-DIXrd)g6N6#p^TnwWPB z+a}tk;1=2Isrk8PPjl%S%rgqD289mQqTH?c4T&BAPgGDT{MrFme7UCgq%{GT(q35R zUOEZ=qA+ycLToxP3E_iSc-7^nimM4@bmnxd~+&4VAytaR<=+9_&x*gr!lQGF34y-gR8kMnOEbYy(Ju?8 zreO5o=7uZFumR^n(U z9@Pk`CuN^Sb@?woHWN}^TQ9G)1u>oo%XnI=x{YTncuaw!l<1Xtv8mZkYRgqVFRXU} z4}N8zRFWxpQRg_O0!k!5Gq$y7%c=;JWobw>wA64`s*G}=VlY_P0&6E$V1O)r5&CMi zshl8@%QPbNrtS~D3~gXk5;-o|GX76@!cC>SH#D3|hacvN-bOQP-f?k_Akf6r;Qjqw z_x|s_j*aZXhs)B+VPH3F6RgInWVsJ=jrz(6&`D*L}2O7UbDO?8m2n!)_)QF-! zSmMX@!!GPzfTW|T)gXavnbDKOg;ebOca-qP$uZ|pm@T|5S5G$%o%Q~ z!qO~{9>6L~g!9vEO&L($z8Cn5*eW*#F&#kN!qDz5xsZcB_Zcu1*BM#R39Iqwtf)wt^#}5 zTprj)TP|K?y@Jurt>vE;8lIkqyfvmux#7rdyvjeysCoQ}Pal7r7kHIkb604i3ecjS zX5~5>o(bYYpnarvWq(s~!#WH71j?t~ib29iJ*ad7Fj%Efc`=mi4u;~V%5$nROB~d> z@Yl*_6;&%XhwE*cXh@t3KMZ}@PJOHqY)tg72Rt0l0mgL(V!-q^<8;lE$8vfIO`zPL z5Fk(s2vK3l1!6fuWXmxl*9I3AE8E*h;reEGSJh5d4!!CovQYk6893Z(_ebKLwX4M8D#^1-G;u#op^OAhE<>Rvj zJL4?x)HQuBc5Tp3nolQ8K2kVp#CCwtQ~eZkFJ3edN7#F)`;t_#+93mUhO>hu!0KBq zuXV#+e{D;Zj0l1zSRwfDV(L*7;-$i2NGx633-C=7-#!W*^ALw~cf~(3trA3}%nNfq z&fA>c?q9@%>Ad!X5c^e>11$4#y342@;c}rNHoLFmqnPAq2guNV=SH`&6F=d!5zDmc z)KFi=Mivebg;Hw$g_di;PQ^pu*V9YSZ&V&Jw~f~jeh7=UQ0J=|dFncCI>sK~p>t#r zww3N>yhARLopnqsLB4LMvW4aqWk?s1r9?zR)uM@^^|%r`D}JH8hA~x)%6F*X#GB4C z0Y`~-C241V(QtsX*A)ZYgl-e+2E^&7w5p29N3b1*8oz>EN&S$Cq|W02wn02oOL6<4 z(&L#@N!Z%{^tBl1wX7rO{3r$g1VktuuvRz})4tdT*GddT{V$-H>sztZO5mL)ezigx zPK<+PXOtzrAb-ZJy{mtAtCP5TF?%DW6k-hHu4LC=tEnm$j|C8dv${9S{4e&xOH2v{ zT;kNVaOFa&1~p$RxAO|-<{T8!q$}F7{wGQUd6HN)StEPBU6@61q)hf}$yaUH`GV8D zrpx|pel`z%qaO6xcAHRN0VPGfyiUT!U|2cQN?3EFFNx3lzkkRDjVCV&@h$f5oD(MP z`)%NHM5v;Wk<@>0U$iLip8$!!^BR!$RM0*e#nscZ)KaD|@;o95!|fQ-%4)a16+hg+ z`?c=ZeC?o#bj2a^mRKcU;=%aUlIg^|}o6z#OxVB;< zL;cf%NC6=?3;@yY3o((aQoI@8l6y&I^Sr90rT6v1xK}oLLPgm@Q2jjC+oZ==0Z;=A zb*uEj)gJf6u(-}X0T3f|La@j4#xr``R3h_B)f6FZ8R-@k(_!bX-`0?h(P=;}IZk+x&| z5O4oke}>~V$|wlE1&j*As{*AemK<;8rIcE<@kB{@^bkerocG@vCw?9q!j0($V7gX_ zDd!3TrJWvj#r+gvR!@v?E0Z+QAHGci`gz9Nv>i*a0F-^kPATme$UESMj|Y8?7yQOk zB-Y=ZrikCaqkre8@lmxg`lSU0UctF=gL-gZu=EvwB_*E=IqzdEiR#H*!h}(*)z6&V z>9$lYhuDE3CRa>_Rq1a(OD68pB?=Z~q5{D@q?J?;32C8G3nbVXXY2H^sJOL8uPW}~ zaD&|B#M?V>=auEWh;0^y2UDo_{8Nw+))|98=7H^rr1#*oLe z=LlP*b{^>8l>PFCZOXhWKd%#NnU$dAfSiij%1ZZwZ78w_&xJvxF^Y;)F_{?ShIYd* z0|{WAy&XT2kKz_@OU%gn@PcGA(V@%k&bUZL^Gzpn&PjB}^)fJ3P5C*J-(ImK3{S<> zra2_pb41iKJ!7-`$_kW%tH>Ki&;vdp4qB{Mk%%e&Rc7=Rw+9mISzt5?Ld+5CY+V*N zO<)^!oq_X0V%MQ>*EyeIYY5}Nydm>fV=xBtI8lJ-8HJ; zazP1lRn4veda+G(2t0Ex57Ay?khsLRekIGuD(b@U@5TpxkF#2Rbz`iVW+($a-k`yF z=suuN=))bHyjtb$>JTS@98tgEoJw;TX581F3hGC-`E^-%A15k%!;FhfITUsK>@Hz1 zihW&Nk5g%U-hKJyzsP1BBS}oBa)40{FcucJ^+GFYHnZ;w5wn2UL7KeP&2aWWB`yIR z1$fH29}sV#yGlHP6U4H6@1K!?beUX9YJ)8hTpAL4%Vvh=ddWml8vefcz=y?KKr~}` z&!sr2j>1J$?rHGs!eV2Kx%+hDy}kpf49u{6Dj5B7S`w;1An>gsP>R$pRLucnk^5Vi z66~0nEJIFEQDj>U}#U zwhJy2Q-0tbR8adIe}x5mTg*-B2MS-ahppm5Dwq?Jq0Wz}exDtjUqw1}c0zrLX4iJM z=%H@TtEO_ zAlDK?ctlGxzZrOy)Z2nPsO22s%}qg4iFmm=0MBdJi6g6EVQ8nSCa4<;%XL-+Ys8h5 zqQUh-W_IkHT>roFauWQ+myn~saDz3HAIc_HfpGXv=^8f@GcfHZkxm>@X!k&zOiLxUGxGGLl@8$qvc3dfA8W@tmXv=1{jFuqkcedy{7zd^Fk#q804mO zUnRw=*4M&isd(Qw85=~JND7=B#vvGXFMXn*ajO;z1NK5uC)PG9$lJ8kMr!=YW~snS z@RO zLGQjt0P)>xL(ZJ)u32JtTKs-T2?jBiA3W+l3yCAckVwSQQ9NBL7j@*tIU2uifz?zW zE7D1{(!0rW5+8ni4H@i26S6CsT^e(^>3so&Tx!-Pb6;i+F2)fvlCdfKgQCe4

#!fFDVLvyRF()o2^!MT!$)A#nszVQfi9gkUNlbO+I>xGH0Px8M6H1{?n^N z31KlAgk5x`5<|>9rBJt&qG17Vi$x3u{3v7rRgC=;t3O6$u!1`hs5Vp zsg@XgjN&0?m_WN?&n1r%*W^-9UJOV8Ig`EaEn61EdtD@FI6foQnXkDG0vEnie_8t_ z7Q3hmr37=3rf*_?ScA*ma{M%S2FA4Nm0{zi$JB-JZ^XrwNooNm%*dG{a_54LLv{ws z#8T5^&nOB1iZ$qqpZVDosVB1LMD!%|Rb?wC;) z%kk}LvyZFsFoIkU(BP_Lf>*wY$MzwR1+Pu`DvCKi!L2R>%h?ZL7A86?!E@UxFZgFf*r#E&t5E>R!D-6A(HS3PU%Z&-Eqh=_JJBS+S3(~}(iFHF~2n9g! zgvfLL2xfnSUmK5E$&qmq13uFM-^aCp+yD57%x(#A?m5i6DMaU8D__$I(P z#2RlU5y=Gvnv=WSy$G>sT#v``(*{cJDk&23QJG-zpdpPTqvom*+sPtHtvTYi;}h7Q z75c$b;KC|^<iV0Ju31c@({8T zgouiu;3gfV(M4P=JwRB22*_nAV|{XWNzGMoD`m6$5U!GKLF#wBuO(dMtxvJNRuh0(gX=4_~!%0iBLIhsCD)Qbt z&y#sNO>a4UIZWI|O8HmcXqk9?mfqN;tGSM*-%U1z=zf)G)9mVl8g&$q6{`K;bl(Y$ z^YKFticcDSod`9ed&|ZQBwJn#8S&COUU!;N5yr_2K*dAK*SRsSrnb?z03ajZG%nIY zM7y=Bt&csYHY8k_P$jzTF&R8oHEDqafbd%cxe0K;KvW$(Zk?WM1Yro#+k;nEQ;uc4 zMeaIwTddIV>P~1-9z+fafQIE@i)x_p6P6t4LWqD$2$haC0}7kRJ9O`J(R(IPGa7lM zdJ=m$k`x_3EI73Ksgf&@65m%A14fGn54Z|&XJYV3;hAW=@n=D`D=W|9&$0%%`?7rM zva*$8pN9YkysIFn1c=`MrR5IWwIxNCou+SlcjQlOP+36Ee_pHv7Wt`Cs;_4IT$MU3 zF(|GgAzKLsUeAth)N3>W6l@3fI97R%p`I7H+BuNEpDu~`Z?b%2gSI!eSG#ZGt5A){ z$-mqau)upQ7jtUSvVLAWvYa11_TM+fpEFyp+4@|4@h({}ZWstH2c)Sa%c@t)%3wD> zt$O*hGmG)s9EHm{FU&Y`4`wp96j%25R;kDD@0I4bmae^Ph+6YI;sUX`2sr{rF}A!4 zs4rV+I}{g?K6_mbTVJXGkM8cv2T=I?|Be61XZx@J=TS(NT+J=eA||tJwUGtAiizM_ zsP@U#ZmjdT#D*l;cmZS#YAXN?y#OMr-ijue1zT9m6OrPO7bW}0aw4x>X&#?65M|EK zLUrDDbB{zWFhEMJA997a{VkRR1+@BBfFVfSN-E$RuSEG^V|c6#UPq?XQX=xCQ?%fI zG!k7cEoMhC!CUU0sY(V_xv~2LobOoYol&y7=N9xU8ud70dLWkA02thWF&s8{>_#w z`xxbn-hZ|B@IxoWY9>Mto4w)+HK@WJNEi@90OcHVEIh%(0Dp&EnW;^Ls!ytgj`;(B zxYGanetNVGs&i4Xi`XPgz`vrAUax8WF7YH=9hk z?;a~RITXW6mjxKagT5#(2cCrl0ffW=J~sUOU&MI0n#`yY1*Loc|HLuw3!|wFR2+O< zq}^_P)p7p(wa9sZW9lVX~B@O%|PDHF+KeW{pCOjHFWGqr(>0*AnM2sTfnKgJ$@(S4R-Z;-jGn&I^MlkU@y zMBF)|!-aml)0Hehe&8CKPLd?>+ug5vbc8T$o!p-zR;y| ztG@iwKT$n#7(2)rL=j)0b7C33QpTkRF#zMuiySn8orpDfjU30?l6{U`ot7F0RM0(a zW$=nND|MwSbV#_B1WGh_8okq2ilM0XTe`0_P@7^o@lM7rLCG-)I04kk+@S|@eN0U7 z%yK!H%6q~dE5>C*UM^$nxTFbi&u)v4iD}MKQLDhKjZj_6q%3#$QullmYxIPCazBu} zhmA=WPu+goPFb<9B#j%AUk!*TqP;yuZ( zq=eyCvNV?!(43Z}mrLkmdaP$k!MQ#IBGz9uEkwJsNUmet~(d=+u~7UF_%-QSMXe6kXQuI$FERH z%?yeME=hfP%`rUsP^Beg_fS0MNedr(VDf`;>XSQ91cX<4Nq3c0R-hjh5t@|yasvmC zRH+y^W+2u=tf_<&hhlQ$Q7At~v{r$K9tGaaok~N*I+Gwk`Pkw}QiF-@m$h>Mm*w{@i{_F7o`4aUcEDp~k-Y#dbG!lNk+O7F1IQ>v=6~ZaR zB8+Jj&;b?o7Bn4(hRQ7aIgwn0eXkHd%Oi*be^k@GIB~F>i_ZY%R*8~cfWcC?;z6}W zx45HJl?s6vk%C*iBXa!|^_(W?foZJ^)Ye!}J30#;USLUGMp`woSXkzIQ(1``1AO|*Z0kSCL2zF;VM6jU200Wec4vH0> zC)CxqVy{f6y=$1$UnbB36S4UD=Cujx~$69fNMx3q^#mAoL zz!+m;_LxPzoFBZ>!bW+ai?9k94$%?$9C4teYD;|u>WiT6y{@~05aC(I3tPbE2Qf3^ zBH7zjr9qVA9Dk#$U?ElP5k$OBlo$9-ja+jzmuW6HrnIMpYahl-i;1NSU>blI#?$F@ z@}<=y2Zn%x9GiUo-oi9Q5m(`qI5(Jl5`ZFtn@BG2ETpp~a@)(nq;U#wC=T2Zk(Co( z;6cob5??39^)oN?Z*+g)edg?3rst(@Kr7oZv_8^yTwcVX5<^aiRD4JTM^RJ#vqGDf zsU{@;m;&*_55J(E!TsNQ!H#DX=#b7{@N{Q1Fom}f#{^{p+Z$?qJf?B&to9-70w*)F zaw+aW=fjVyNC{WpIgM`7t+ofx$H$!as2J)++Ur(5CGpE^ox)A%XQ>28gGXc#x$<(} zMDUS9q47Wrg4!T%M?%93N((yBFvTS!xhO(S!oHyBs^M` zs&oRU^s1;*))~ED!zM~0q6oGuMxQl4q=uAvv#9U{#;qSsN)b!KLdF_VV1rNCpg7keE2mx2uvLNCCMo=KBPmo@47vWV&M#{%ZUd`qhm;oBMYm>Rcui8 zcGS(|c@I?#w{~v3MpDstE-cAy-u+>T6F9{ok&^6u2zX%*P^;)>W9`GwiRHw|M1~;9 z6&TE~cmKviQn+z$X4>cIISUBRit^S9+&7psJfig(jmKjHbF#}cbt&VJ`Skz)guPjF zE*JU5?Q@eXH`~ab~V*A69@#BOl*Y+WL7n&ttSxJq7sNe2M~!wQk023 z)Qe;$nO6#>FKe2-wOngS{7rG~&rn)HV9Ir6j||(-Fv8u#)peS}(ui8RFld ze>Hx~sr}byf{zl2sDh2cu#LMz7;{q878h(Y6IR(;qAjuhs;<|nL_Oz&+*!VB8K=N- zlx=+e{lB5ModUtPsP?{1HX;D=KFXHQpZ~Ti`svT}kA}(p2A+|k^HHYij z`Yb9ry3@MWD%~zV1p(s|+mJ-Da7(zpx$nK0G@;Gj&Xv#@xYfrKTlAEO;cEb!sDXCN z)sDxsPfA?>H4+PCpduYUnLQmMJNQ`CE(588%#UNj;uuR9GnOmpa(ro;2Gs-~`SaV{ zKva-u1PHbg!{Tx}O&a@uB)sP(fZ8TycuoFQ+qAp4pS!I%<`#;=tAnYYQy(d}cG_U) z9)(6?rLkL^X-KHam?)jEWPkZ|9f9%IqR_e6!^%wk3aL}l47Cmxg@-}CtYJFD8)duc zYjzVMOt*uYz+6*zJ=w#x&{Cx!_>sHhh_=i?vL;=1TJp5t^Cuw>j~fDp)1l$JT&SXx z6cx6X!e2fDL(1Xv``XEfhCkL9^-WF3Ed6U`$A3QpAwV>}8A^sqSRYr6>UFzWoc7TU z@2TY2Qm6tC^UL%LVlE7_i4LC2@B-z2eP;3M67tC86hhyr+VvV15m7{7kK9ha$Hm-c zq}$?E2p>J->pyB1|KW$$+JvnpljeBz7o`z1`)Ywdv`Cqa7tz3PGy@3 zN(%L@`bIfOetAA8{ETjD7Ewh#me@2xmJ*VKd4>)^AEhy7;z3?ku>RxghTaK?&WCnYZ{A#&yfJ8jn?P zHeIo?j;OTb228?WzLkI10=g>TcoSa*R3Ssor5-zVGN14hUdN3$6! zsL|o<(jTR6)*(G;{R=V8`H1&(gdLB~k@1xs;L20jm~pZzuJCdEq!ct;BeGsz*&5;Q z)73}(HRgy4MG1>5>Q5~qOjroV4i`WdQE}UPv#S)VzmIhbkm&e71ZHOf9x(f)PT5=z zWoC06lej1O6FJc``P`j4as{#G*~xdD!z!Hl1`)%_iwUrliZ5|baJzWITktRl;(9V$xa5w0N?)^c z$&UMa`TEympQw1*_UuEd(_1V~FkI=<)11uefY(N9!rkNbax%b^U#6HDm4;O$JFRXX z>76CuYXztIFwOs5m;o!|ZC7Zz+@M=nIcr7AeK`xUGW{BCcwg$yIumjN7D(&?I7Yv0 z$CHQ|q57JA2wnZet5=INB%}KX{F93|zANE_H4*DnEeZ_ry#AL0nA4qB+k}IeSVo>Q zk18XvjJF_GO2%n2F28V=tX448d_}Kt16;Ss-@SeRztj05=}wG;uSD6wNj(KTr;Edw z0=iAQHE95rR!-}2s*uJnM8Wfs7RW!I%wf%V>0Usu81&*cti3X017^}e>ArK`MaNaPKSyw!)%JtkkXRxdxD{u zak2a~X82M4Cu*AzX@Hc6*1~G0khts*3+JMEu@s_{E$-g_R{3481eO>H`rdH07ZlXl zOb0w^Xz~Yp9`HI%5X#|g8q<+ld>OK8|)1_yU!c8XKtg0gk`n+f83)bBSn9&ES|M z5DsmA#cQsByu34nSw>lXkR{Ju-A*z#qEeQ;w)+ZVHd6uv%rVQc8Y`w1WYxKn+o6IC zO^R16$)y6UO#~l-0(d<*!V2z+p5PsNro>;iF?1(idF){!U;wr8Z~%4kVk`jHHwYB7 zPC5?ZI>Ohbu3ZlEO7lU~Iyp(wb26J_WpP!8Fe|-dD?|W1n4P#8r{k{_Wc0O%z5*<* zx_1U5qKYap^6Vk4F3ds;J0}3Yf=xp|2gr6}mb<(Pm#*#rZs!=^26y9Z?3dz5s6 zyKn(8h{2)K)e3r}kcL-U$-_&TOw!9!=&vbdNWaRRpBS=HElZeVh?fFv z7D80<#DrBmhS(LthacKPa0(oa)Ae#ATAzT5IGiYSIP|UVlms99(9(G`M82d~EzSVF z)5~;6(;uum*xcRm)*PNo!{_=zddZL?86l?Zwwo}LV#XGvBgThS3=#d}w+`3=-`Z-F zZF;?8RJ%LzYDj)D#8o)MjZ&QTJ2M~u`q zv7TaaD3U@_6wXyZULx`7@~MQ?U@)?=6lWv%Odcn&R0WlEF83ARHreeuqY z;gNBS#wj4$C%wfRb4r2?vtKq@GJ*TiQhNItQ9jz&SEGD<)80#c)5hv^=>9KvVHN9_kA<#r0wNeUEUJ+lW@}{)Hm7FSFGK%bJq`P8Uy8=wsNBUV*u3 zQ`Tx^gz@_%R&9ZA?dz0W0qw)(_}GgKcteqT;C6peSya?XY(N^qj>8hekS3hY{pk*Y z`5exJZx=IOD;x2xwawDoN-U7Qvim$lU4@hzGfs_9%BW3RT=LyPd{T*$Nl{Mm2=8eR z77?8~V!{Lq46 z5o4rmA(HoB+-FXG^ZuU_{}E3oa}wJ-WjjfcPQ;?~lWDhto@!B+ePl2>s1$%!15nS7 z;%sWslOT|*v*``m_A28qT=$$$v}No9r3!L4lpCbG*3tBXg$q;PGr@mHba~*+aZh?a zu#5Kk{&8KFs2AlHL(~U;P#HDxS5zw3OpHhOLo+%5RrxS}kGAX&LYMs3Z@j!8{Q;S)N>T2gle|miKDJC!6C) zQ^-37*D=P*^mVK!DV7Pn+k>V9pTLve9(8&0^(0ptRJ?`wp2g@{ExZv=M6vCAh2*hm zsOgcEPml@hWIAFeWPn*_smD1>_xhgD5P8!94^&sYqU8;Fh+Typ3j7$d`tc)b5PB@s zMna37HnPe4|Cn9WXgrHN50Y7ryvcfxE>TYCD1d@(YJQBPFDZk;3$^ zkR5bPbwT1qL((8pIcc>UV7l3!cvK#>Vzk3~Ic=!%hUC=KA+NwC^0#3Xc$MU+;5hlS z+_#QH^eKg56iu#Ni(B2jeTr%S<8<|}!QYTaY#~xYEBfjpmbr#CT8#<}Yq&TS9q@Se z4$8{bW=oJ%G78Bg9v+Bi2ArWe(7HIeB$c8KtUXBsRR{6mrH4gb%nEvXSa_5>@srHe z(^QU-6&H%0c}Bn`z;NUS6D78CQ{{9_gUY+~fYoayouUJjdS`QJIRQY||XuBa;;K=rEqGdl3SyzrMnom(o(i^WYkcsbLPWq;W*V z0VG1#8qsaxz`lrQpK;Z3FZtPr8rsp+H{>=6E<-}w?E-~=ky?-4Fj$b~bOl$!6b$#Y zfiL8Kw#!QfX_oj?qL0gK4F}`~a(N6=K6htt)t}k3mDilHZ8-vVeu@CjcieY_Gy78Y+%YLkkDhn?q-gC-_ zVEPz!g-ZKXqr2%q#C59 z3dmgWF8Yhso=oA65aic zBcO7L2CinTp_E~hWFlUw;FLTAhJ~G0{pGB`kU7XGxhugT-vyxcK*+>ue6CIIX8aoL zmXzDX?a1x2Y?4>{r@?oW9@>jv>R$CyU!AEkwqHd$$mjlv=B|%pgI5*);P!37Uycoj zm(y$wD$kmwzi3vGv`pHGYy1_wu)jw9w8Fo>vmT`|BH81T5;+X><$E3vP|#+J^5F9(L0s?{&XY1{FA`!!oK(dn zLc3F_I!SenDy1~!7h|XaRL0HKfcLQOr}6bJR*5Gd+vm2{flo4Z3=ilO1VI9k=Cvx5 zlDY)mXN5e>|I!`$BK~;AFw~VO#F)&1lH?vs=QQOwuCQeCR0x=OyKc}0DgS`0Vy5!4N0+^ul~1$hSGx z)ny*c3&!}eEiD#b%wLNk@Gi#DdtKYf>GWC!*8EW%X^CmboJ^z5j(<8Rwhs}|L_h%4 z;lO&m*saQ0i|>x<4CxABeMFa(R(~8w=PTvdx(L5ma2k%UvG8#2{hc_uoTb`#DruBK zw;Xu>N-&u;heYmex%#GJi04qH5qbh+?3bNURs@8~6 z>Th#(UADmydVgcI5)waB>yP50?Y8o4Aoca|`JBSD$HCGOX^+Q~pvZqi!|5E;+`%px zr$k_~_o};g+TcFr&^(QoD~<^vGB%b;S5U5LL2fWY^d1WWdtW0xMKU^DJ)0a~luj(~ z!KFC#&Rs%AGd>QYdqy3z7)tdLoC&M*hRuDPs>WdMRca4cwZF7SyWjK-f#0c1(FvCu zK}D=3O9pIW-FU+=k~Ygh9GlO?6lf2BI%Ry5##CL<4?JNz*21QYoz@{LTXxpqCAAslKARo#t^qW3u6Yhh+n85Ef;w%iquKG2Qh@77-pMLqs_$qHYI6X_pvWOeP)<-ymt4)y@6@4c{H4=@SNF=SBXAQ^h%^&R%*O zik_>AGC5dpOUjVcZFI&G00WussxjK{-olvvZm4q>KO%OCFjgB`-2)VHVqQ)JOCBGc zHq&LS<|S>(NC5xTPbJ(1HigU3tb1uH_S3tr++k9>yv*Z?2nztRu}~sRcvTh|0bgMb zvIzLw6i)K1m8FSj(6Tdz4y=h#l1g;xPp#{4L2h^78mJ7V!J^_a@5DF;OZc)Z_Digz zkW7IOE;(~F2eNsA2YFvLqz#njxy!G^*7aiu z0~fd&huJe0i4AngdpMs6Tn8V@)EN#co1;#pep(}Mrr%w{Zo)o;Vs5CRHk3UFZO|d` zdlpRUT1CLXBeH_gNtESYA;bpaF~Nnj9I70j?81hx)B*%cU0+XNK``F98`sOKzkW60 z%EF3IBNt4LrGpOe451xVtItHTEqKrtS=lQhy~R6Tl|=VAmt6!&$T|%d67^>x+X6si z5Pn|g@D}Xa81#wrK4T$J21uBnsYH_;#%Ag7{Xjn!V@yolWOCHjt@FhB>sLGwyU^c) zbA{0Pq5)n;RHhe6FLv<>YO1%#104aBD^w!IPjPG$TjFv~8Yv*3ViqlGNJ3o8(^l>1 zX}kFd;X5HxRg$m|VE>-{HXHP+!XQPP4-}( zf*4)+bQB;9O*18>E%xv^0#=h1;*S)weB{MgXL&}cTg!bzA0y-#H1uW+Rnps`FK!G9 z?^bjsKdg`8beYn+IG-qDXf@8{lzxH46wkYR``hK8`2pvsm`oHpB25QH*$f4y-K-wq z+v-#_yQVgym3P)|`|9B}LIr*cI@djTjsPv2`5@bqcffWJb2|ILo_LwnKqo zcg@ZhJ(xmV*xDm#^dW~7c6=NURxServI}~f7 z2^W@z%hfFEe5kafH|u2l8fVG+zSZx{w-0YENApx?=Q#;+gr3+`e-n+dF#U4+J5e_& z@Ys5cf#X`GP?t;+5k=RRb5$6m2n0*X4Wu`r?f_}5{J7Xd-RwiBP0bdeuXh7fmtNj6q?T?;`qSfGT&IT4fdqIq%Xei;G&V;|$p#_a@z z^X!n2+L@9^($zMq-7{3SImTO)VIW3^K&iumvO7R6{41&v62ZuU6$0bE7Lr09)Q8%I zYE;aIsE&unRhGbTLX034$uL?gi3!%D#72mnyxM0T$0|{Siz__Q_&e8XE(&yt`sTsj zx(W~x()LKUI350}Th_MXCnk-&#FJIfvRz)4pOcZdi;`Z#)yx5yBo6x`8mxeITEs1s zF0RWo-L-`O&cVWO(+;7ySUknkLcmx9T#m**e*a5q#l@2B)hcb(oU-_1p`9Gdiyw~C znC26Be5AGFYs8&JQltW4`7CW#z^IW#c>Izwqzlzn8;Q~jUR#@}6Ni{-1WS#L{TOE^ z1}l21496wRVEwo@i?jj(Afl1!^ebWd>8N>%aLVRNBqTOi;b*TC#R)Rdq#`R^6NcZ! zv@q;X8e_g0I%-Lf%K1rGHbPH_fORsOzMPlW*Tdvsj;cs=mc6un_7SRxw}m4LtU^eW z9f;Ehw1NdPO6_<)0&{B_dBoJ@SH}#ohA#*%`pfG;KDiusZ4J(bvN|EvGL`l^p@Jyp zpUR)__sZT@jQAB1+UkkMFO&-|-zlk3a5kB_%`LfmdjIox{}H4l z{_?%@Ew9H5t}ZVj9411Oaq5+>_hCFlcoo4ii!b>)m~qfbtYGM1iw??bRC^c!Bg6|| z2-8cEPztEZl+6|8FajWkiWVA61o7^n_kjf=+o%bR@M}d8V$g)ib8XsFK8?+kT+NOo zF+MFcJ4M=uwt{>U%I4ZpbbF0L`DVY)R^?6s0K72}Nu z(b4E*KM2F-dg%QG_m%I^H2-?1 z_);;bZ+^3U)1%N9$ufvNt1K-48KAPJZCEn*@AEX%l?!h4B%Z!Pb!Wd>jmEMG+Z8h~ z^FKN&r0dV`5%zU+_SIu5Xo$#?tXMSX=yt?!mt)okeJG-S&N^nMEoJgIk-OLkg2(tr zSGM$2(eu6-r&`TyAU&bvBb|KVm5jy2q&FThNT?pCZXa+;b6q^RKRB5G%Uh> zad^HYuE(ls3iCA=syxBsPun9@_AmUvJ_Uq55$s7ML`N)Lg!JmJR(jH8;={*k^cO5w zOBra|^jgjg_jP=z9{b6uKLp!_93w%mZsvlFrJa9D9h{P+ozl>{5u0v!|3@gz5V|Wq znuwYpkK%_m+5!cvxR0i3t%NY@e%A#T!n)}DlaC)=9zO9}v45>V1xg1{3arkWucNYp z**6(fo*kiqtvFso@J}$WVSnY{!rejYi`!tzGm0*6@`#3t}4N3dQ zB5e#|qNol@;k9;aMVD{&?7cqx$Mr=B&E_!{JH+^Ih?W7l}B_)Cdwt@ z?MCu3eH9D~ZO2`Tr~1m6u7VpL+yb#0H&28r!n{9b48^Wlp0wJ!`wuY(L03Ntc@Y}} zK1Th&Kl50Ge9$!gDqHnjkMInd0rr5bJ4^B>4@%2vx_gWLnU@qB#z*dBC}6u- zYFMybb`__cPq?BIt|3Zen0A}`&ew3d^GBWv-Z=|n?TgZgKi#cPpH6*io4thCA^?Z% zfJ-n>pi50aEz`HBW+s z_ShC-R!b;%6)ttju&Fx0lAN?l2*gC}vUtEs>1=I<=-`1<-glg0%Ou+9#0#bIhbW_W z?fTZFf??uh?`KgwhOP5X0|h;Z6(H`B3C)MfT(kX2s!a2XMOt|B-5Bb=;?C9!DxzK4 zj2_tbh^wqA-$h*xgZ!9vp+Fz>IHMl+gI#@d1oW(klxkkOP{lKPT8yDTQr#$ z__u$vd=NiP%lkk4^&fbpuGz5apT(#Mt57ey5hCCJ+w$&Lo(@H#NK;Gp!A9BWTLjnC zV*%Q8C<7GQ>k<7Bs^?{;26>r^l5m*wb)ajT(8}dm@@Uu5?;M#JZoTXljJpWXit*Okp#xrI6jo=$3q-dLuT~Kz*)kSBU0NE*>54maP3kMMmT5_ zNs>nhtA|M1hsjSc6*2`~Ep6>3C>#u(HTVMHzuE*t1EWY%Y^$&j&!>8C><)}Lu?(Qu>t5U z-Xg5%Ml9}pzjCsVhtz_Lx!k=~Fw!T4a#&bV+}d7DG!PGUoPu^X9shMy<<3-=r`4Wv zITZrla#Am>od{JELsvip(Q5aJijImSmkqq0&118%%M+siyUI6uLT3Z{dsxA zt(gZ05T#o%B#oe?QXf5~E8%B^k@$>%+2ck#lim9=1!EK{z(m8oNZt+Y#=9|F>QnBN zH6LBzWo!rGepb7W?9o$40$ysTULU z2^g()p)B^pR#QP#F0UR$(@*0mVqLexi)UAJNBvRa&RxHwbM&Qp-DJv#*d^Qo$WmyN znlpg`7R#j)MGMRq(N5_n^-(la$c7pQ9Opj8mxtW!ggE2bM8bu48I&Ly`cM2vu%=fo zku8NtjLQjZAiuvV@BS%fH$DXpZzKt_9VrZNS9wsG%ZgXTQVnzx;>*z4(U^Yq=O~hV zo=8})315IR@qJYvAA41va{CH2RyfTk2sB&j7WdEx!qLw3tgF&Yg=Zpku>;=u1(JH- zxu~5wR*$5{otT11vzsfHP-z8}UAlrmOJ|3sE!5a$Xac<#3mZSkz z7fx}Nf{=CGqGpd1c}NYNL?^+aNwEzEs>2gUi%MXH4#V-|LX9_92J~!h|7=*HjkW@{ zhG(V&QFsBb-fKYduF8#zVJhD|{<;m84*WaK2G>bVPW(!H!XH3T^X?I9;sQY~jgyl8 zc@FwPy`^||?=?m!_JLc9-@U!5fKNydvYe4gHvH5v5Ox~D;St0@QlPR12VyEOSVGGg z5iLfQ;W$EY3j;6JJbNajnHPtg>%LTkI7CPViF2ine(8J>pOy4UD&w6%FynA8;`c4?dzyUpH$;_Kr7 z$R;V}nz3EK^6&DW|7!6YkBQcGz>f`f*}@m)e)6kmg)RT1-7?Yvp6(jUm>6>s)hjJi z?BYOeLa~6nTyRiROpNHWSK|`b>x4w=v$YI65h(wpPP8{%#zSzFkfF{4LBkv$`24Y2z8ZdG37 zq9vtwdrwZnX~HIHdP8Eq2*haOI4ke}xV-z**kp3Ar4^PpLE&Rgf4Yy!U=vo&Cu8r0 zTdMwb3>evHuoOUAna1nZsLYf~Asi}Yua;zwA*jjC=4vlKKW8!9wYnpS*Xz)Mn+hP4 zfkQEI5So!c1b>Aaf)e6hn8V6_P398OTQN*M1hC|bLw{latlVCowTe*09#U9!NZGS# z?y$UjeoI9g*L(hgCVlI%P#`SaXX9p?RsOgo4@u^vO2_Nb5LX2rzah@n9Q*I`L5pS4r>lUNTh-p-6kSnIrMZXD#*L+9LhY1$#X+}?8o8vR$RLo;$SK{eWFiArOegZ z9!}Jb?L4-hpIN_{xReGvGD=EJgxHHTZv_c8x@6=LS!U*7Q6WXRF#SQiK}p#M+rH?i zETA$zlj-!d$v!NlS9q`%14sZuJ(AnARt9^q$%yO?0u`fP#H z5HSlyiGjlXV~=$8m$5fPC`1rV_`a@s@8ABQq)HMT-&LRXi?K>hF5B+?@>xslRX)^{ z#slg}PO@6ABn9c+HV054fv}?lx(Hh&Yd(#d?tD!-%BpOdx^~$`z9*v;E5YPvVrdc9 z;0`2>wK4+*mKI5CP3qSes@g0aXUfR4EOiHc{=m_&z!L0;u^A_o+qxO}%<(coU9(#y zd44sh(--43gF<9|eG-T272|1Id(87(X^eK);4iW1Tr=mweqvki-d1o-@vKxiMO@Mm zPYl`NI>gn*koDX;sN%6|(pwZZ%I0fpdmlLB@4Bru5~)^Z361zHp}iwE#iw!N_47MTiZi{VR( z&%z!j|FW-Z-gq3+IhT#-b%N$dq>%CkJ{B(&EIi}nwX+6qTM@Sy8v!sLdgC9J?3bDw z3p|jgA&!ebG-FC>xFF}UN);CMt#eGW2$!O~OaJl+pT1U;$}EG8Ma?ho5pm;&K~hyA z2bYTy&a9jw6^IEC?mY z<*)m(8$HY-A0y5RqrB(Wg!m&TEX!?TaKf6BtY#waHYi%Iy$ZVqQ?1=7T_BWm-SB>> zbjJ0YI4!a-S$*S>Z1-g{7Y>zzGj8xJN{tWX>x`3~vqbSTSz+#m&>iD1OY2XIajcl& zQn6SV{^B9WiNjAoGc5;FFw+H;#P-%jNT&&PU=b#{t6q35^h=wOuVh1+KDFo=oQulS zgI2tfPT7fxL)eqOFdH?YhPetAHe;6K_CJ2mqvmr&N~1=4~nr)w-cCGumpH zgskqiyEPfs0>ok|nB!4Gfh$z#B8fg$Tx@9(w}Zy=allq(Upq?rRUmMiWn- zCt@SOC(oM?&m98*B;aPe9#hMO+cCbkin?yx;r-3MGKjy-q1!aWr*@jzS@n?6>NTqy zv&7S^&Fqe6#y+JRjxR`V;UI8RgvuZ-V+@TQXD((BxWWecNfV0aym7#v6Mm$2obmB$ zOeRmTU(4mRwUjG4Z)3-SgBbU>vG~;@p*GUx(k)fXy;048K(R-;QVW7zH~(m)x@)TU zoYd_e?iyc;r$ZSQaEHpDB{Q&6A}7;0i1b5Foz28BCCGkgPGRe9!jr_g(t2*lw{_MaHFR@mQ8~;@1_HV%w|7(FRYj+)v`;Fc0SX#Z zLPIw72{OO*bvfm#He=~QE*4O0b|`9SJGF;`0M`Bnuq2)*EdcR-mlGL# zHBcYhhQ)^wlYCijJq%oOFRi6&N7$;o47W=qad|c4TWlpWwnb#}kpT5i2L#GZi)F%3 z!6cZrbyVugw_>W(Yf)J$FnJPFbnuw~(mf&;Q++Ox)6vMFM+8%0SlTKU}=lZx| zB%s^6uwpbl&aQ+thJW|=_saKvvju^^U%vnU^N;nro^w3CrUZoy*LVRt1mrPJHgFfp zDtUDAfqIc(G5c%}X_v}2`mVvM0K%BJ_$-Yp+kh28ZDeF<;qA6m8u|CVE}tW|;3j@c zALRjHmJ5AzfpU$da3efC#(vVU6V+2W=ng`l@Gh}D(^UB|Zj@iTV7j(TS%_bg4HBtT z%K_r;5?aIViQ44-SS=0KuYszui-_Y$ZI|6<7LJs3!Ls@N^y3r_E@VZ?&$EU!tqf~* zRshWSyZU0B9!3D5TawDUpYKclw#)cp?N}b)s3us*5);4>&OtWnhFlbGCm-x* zjEuySR$&*L43(Cy-6OuCvN~DmY!(=}u}qbclHj=?Ea_<~<5Eo8a@NJ;r`W$ut5q@? zy;dq`qvRC!&3k^}dUnbKU3LVai(Vx~wTjx(c)fsF2)dFzF{Ez0Dlg^%?r zUcV<=g`dWWSVqC6%9rZeR=e)1xlf?ShqgxUwHJEVOG0fib`66wRumn$Q@Y+oNNe0y zF`&<6N5&6W$n&Pmq%0`|?kB_SQ%%HN?}biM%FDrB@|;v_Go^sT!y|OlZYO0(H&bX~ zi4<1fpB@P!NzA+s?1#p`>?2{82t4vHr>V^yIUZ;7P`q!@W+>o3rTCp?RV9c8 z@sc{{or4rI_DA;4ZY8e@OL#jkx!>mrAvuB)+-zED2D!r-omrW)L_XwVNgO6Gi2^gF zNe88GUFV&PmZM}l6UohX(h5N?6gI!LEId;%_ptd@bYD!|A84X7MVD0ceUF?pztNCF zT*(YPK$w*cQjkUnjO3Z}2({rCAmDv>Q0}SPw|2?vif$Y2& zz2~kZ-jRC@O5?v$@U&^Ydp1w@Xa?vN50)SV~cow5|4gz&4+HIvU|6j%n z_o>2V%mA0;p-s(3Q%Y*D0>Bn;9-W(;-iK%WcancTZJ8dOBZN0z3`T>A=&1mYsQ}?jFdazt>*EH zt^BiiaJ5OZW6C-B3LH;+j0M&iYHox(EB6sO4|jl4E=@!3sdPeoR+ll9GSY&QmHgv* zy)&+^AkL%MV;xDD`oha17rEz0oq$OBneT` zR5zCG3SZ676NiIa(qD&MS1)ES#AJ$XhB@tICPC?RxNXLPNDEH+#cgfYm&9^95BX9q zevSs!TFA28TiTRVmynWJ6TUAyAsxRnh>A4HO246%k`jR1D$8=7P!ch%iB@28G;1|` zPPjCd18Y#7mugLR*`_6MxVy~N<_?hct{X#ypx)9mK+p)9c~NLgNX;-0ldy*Fl2L1y)e*H}Mpr0n*$Yaq zV{ZZeC-A0ENg_;|crsEdPperdO8(fQv1!a+LCNOd?Gpnh#Wvjn>!rVCffj2AzF_Kz zeE0Ul@`2C5uyzM?k_}{##QVgI#EOqaI6w=GvFX$4-voAB)YGn0gXZ=tY8m zV~!x)3fiR9biBy>=v314tocjNWgVg<#W+&U+FF(^9l-ux*$PptmBN}nt8HM7$WwOr zyp~CbsX^zP*NUiKnO9U4ks0WZpp&tEk~yjzDpIqIIjA7cFFZ-WPtc0cMvxmC>yYa* z)nAGR#+fGY45`N4c^Th+b6XPg53g!&wv?&{HpMxm?mUiYPFvjMJT=D@L<>h!RXIqxl8Lv9zAQxnkxAsaMj$O5!y^=$tMxuopMJPig@sq$%;RQ5K= zOIG1uFJI@MSiG99|7q%HAU}GMCt`(I)xj5GmY$SUTeu4;GM)uK%DmG)g78x=PV(r( zgJW3^;u{=x<*moMz&&bc;^U>B&5Npi@wvUS-;4i-e3n%5{xQ9FL>Nn(wg*`PbD|5ASa2VNe^5)(w6dIeA;q?_f@_aSSuDE`i_IO4OEMFqPQ`Ck+XkJnO@ zAK#&=4fU3pw$R89#!KPpQ!fN+hl7*>7R=E-i@4=6-u3bM{=#PW=Z`>37G+Gm4jT+^MvbC)0$meqh$~({O1bjGBjEK4WQW2c z0{7bIR*liwK=lZUb*(&-HU-QdrgRa1lpb){h?1TJc9OOMrkFPo#l`#B;`GNFA`$Qai@l8GJd;5X!_;-*CFnV=S#2Rm6#~uSZ#LwK{~3OaeB+o&jb zlgUSbpMiiRrSa|$acbu@z-xZ=SS1qO%@3Md0v(@m zMU!KfTzo}F7C?~sQN#?9wVdbHM_;2NL$V)yw|uwBDQf9ulN)kib#!rEGJ9dil7@M` zXki^EQQ>)*g%**HQZQ%Dy~gd3@!KvZUcj2f+v$NpsYkEjaajS`@!oM;N()eP&l+a4 zw$Ni8D6cboXug&ioq=2Jt+MI2CYHUYF9w*E%9;e|6^QbOJzr}14UP3M@I{6%^#>mmhgIF zLBtxkOm-W$uQCVH-Tjoy9(8KVg;;5=;>t;1x(2pT2unpix1l|F`{l2yy2N!lnjJO_ zR3L2#$Tw`+hS}~-U%vm>@BTy`3B1)(Y$)h#{O{Km=Uh|XxIH#{l9>GK3P>A*+y%2X zHUu_|7SAs^6y-s5-q^Nj?VHM^;};m!q%ES9{firzax9LQ8!Ja~^(KGH4Z2DN8`jH4 z>l{X?krM={>X{P4Wj|4O-YyuU4`X+0`J7q85Cv!^)9nnBGDfnWcOnimbIQM>O-)f4 z$gf}zT0ol&GLC%W*K7mzzz9E^Cym?PJ=iY;Z#5Gc<0j4Qm16JVAEnoFR}i~iBBF1? zYdmkDuwL97cunLIzA-YkH~m;X~!bf+>BVNC4dw+6rTo}4Fb6LpS?SK;Oe;~%K;a)&od0! zYD(nhFPNzupBhPsW#z!`dO1{<5g*^zT2!!j085i>O4Tmg{DUVbKA1%fni<4oYU*0xD;58`Oy7l)IIa`o>Z8 zJRs2`XBkb<>N#rw8$m9&d*f;m$d@WlM>WBvZo8%fJ@q6Lm1);pV{klk)hxmO`dM zVTa>cQ(RJu%*IVz7EssY7;zV^a^uaoP@NL1y}P%ckICG*NLF$mdu=IYLn$odq?n?u z3G3)=$c4MA%t_DSN-^G475-o(=9V~){Sl>0 zY>^V8L)z2cY?k%4SU-9Gq%8|?unvGmBb6&h#*!m%6u2YK_!H;~$&*qSbr7lwT!iQ; z-t_nJ09m5s5L`9OeV|LmehI8T%wEz(skvi4Ncm?i9=q$&L7)DTi(YZrc=(qB7jcw_N0$1df!xKW!wMy zC;rkECHOiN?4d%? zNv@aB-Q}*|58>)i4b!DD?kG0+*Oo1o7FxO48dyO0^*<7F%<+U^Nse&H5KbM~nyLY% zc|tqK4xcUBJ$oOn=({MEf}L2>A-HM168JFGjEvdvvPJAX#xFG0he_Ibn3#Nn?ec^8 zs=ya8(@8Qhy416qs>%tVP56l@8xN0BtKvA*^P=qS@2*pFZ#zaXER@=4yE9Os!sa3G zA~GR^NDK%|a$I*rZeKyejci zx=8JY^sJg@{ax}uMArOFtOZ{OB4?D2`qjMeE8bfEuQ4ahZ*$;oufwZCgp(w=971yf!0m}`z?CUmt0A+W23^0=dBZt%5!gcSr^whf-1=ZdHy%mSfB3zpZ1G8AiqNspX zfM-M0uqoNt;cIWfXLF9)DB(z3D0>8|sX_68K0Ans6JY3sekJ(j=fFqDG-(*o=7bU! zoY+M3y`VhPeJOkoN7uP;3?clpxgWZ!>TySj>;y%a=p`<_*LDC@%s!1%X?gr9u36Ip zwirlEfN1N3`0qzl+l@h@~UZ&YBA=OQujT2#}a;tR#TO@Gd z^aidcFY1m8v79%X6c-O#Uc7@#7;V%76<`L6_9l~_0KnB*JJIq{Iy+7Cs^=UAD}(eL zx^x~rYN~h93QK{>Hyc$HVQbv{j~xcUr4hEA%49U-4Ezu?m>QQ}Xwixo zf$iJj{2G&27il{dzjOaj(f?bb1}aeqKIB)C+-I4WZC^{y`x{+KPfRJi>JLGcs{M(Z zz*P3zG&w?x%B;(s`_K+oFJs8HM$jKGszv{qC-y@3e#VEX+({+dI96a)4=m>tG{ewm z1JO&d?o!zpHSvZ0vbSyxJuc&MsF>d=zaJmMTuJHu_onhvnPYlga>pht0M`|?BVJnV z{9*btCDsSM{g^AENOk`LQXnL6@g=H==8i#d%o7iGKL#1kig zm;pwziv=mpe9_<)Wpa7Am%v_a$rkj?B&t|j;;SU3v!{n8G-jJOksJ;X;pwd43OjQP zwZ&G%UpJHklRz(BQV9wG1XU!rF%=-KjVE@0nCi7yW-MXG=vQ}3FQgbtH8)L<(zC=C zV!!uun%oQ!5G*Op8PoAAieY`u4ilz1PwM~Ka9&Qh6GEgEY|Vj$RRt2Tp)JrID7xsI z*yUb1Z@tSzuf9k+h4dJ=I}i`tG0D?%*|Kd8xGi2g&clfZm_QV21ks11d+*4Uv+-rD z67KY{EiwR8wp;r!%kj0FT?REBGj7%d2pjZ~HNYjhSW(O-M3i{v3(X3}A-haQ2#ZTVSv6Jr_4nSBz}QbS0Co3~30_@d|^tr7J$#vO(Y|9&gA&Dm+gw`*K8SI^JE}=9Ou+}?vjfXEZ zI8B$9vD%iKBm|vuFbF9FZKcZtN-)E8>m52tc>Sv%kSX!+|4q63Bd_AA?s9fCRt7<7WI*M9bo?RD8=_Spn(!-`R3LHVf@!i9?!Jx=a%UM5g z0`6PC`@4HoQ?6&r0@~qrjk$2iE&BRc8Wtq1SX)I+53GPz?h_0-d`U~F1LwmKL5M3e z+UodyF90*V;sOC`eq6GAE~ePAIbK%_Ak&h$ZL!*{wKA3ibgCx+Qlr$`H027OFJ(ZN z59>c6BV=p8jQ=J?@p+m&w#T&DdrA;R)5f3SBaBcu>3USL=-g8qI;L24Z8s-sD|Llh zO$SuclrUTQJ(ummsF&nv0;*11vuexxO93mfzOY6(O)(o?T^1!gd$d$`Be?)t%0Hwy zu-f=0oas;U8Y+06OchW{rj}M2sv|2k88^oO+rP{kO`OMl8taCqEVb8Sj&RaIr9;Bj zOJ}=h`2g}m{|JwW8Pef|gA*IBl1gKh-woyao8?>aC~87YZ+u8l(!eSeH4Ndz2O`GN zYJ}W3fn>+x5l_qR#!kFh@5-PBDM*CQMPZBQLFk2g-Sjsuw#|ZVg6Q50dTEP_mrQi- zLQk6_Zz}7UC`&Y1Owh-KTOKirRj6c`+1RF9b<0?l82X8D`*uW+RZ+mz*Q=Ov43|K9 zg~WRy4FD%+zPC}Zx_ps`8mJ8e_9UYCF$8bXkT=iCU-(*JXQC zZbjJl%5{MF(HaS1An@}t8%v-r3O-V6(F{v^(6wur0tmnYF}KcY=3b(1u~^Wi9^Z2@ zgG)mLUoe@alG*qPm#R`8vsGQZaXNN%mBM?^WsP$r;$Cr-?AKI6(iFL2O}82-askQZaZ;A>h`An$nmp6)D45pT?ahq#0_^ko+fB#qviQOD4%@SUsE3Ko)yJPc^ZEMw7>vv+82~K8#|UKt@)D*kQu z2L|{QOIJ9s4N+gL$t`Q}E>q;iw$!_{!be@0D+V5Q~nZRQ7i|u?|?Pa~z39YL&HAUD#N> z8c9}duGqyrRO;lTdvZ>6GW-Af|MJy8cwCRbX+q|JR0s;fx#dKAtRFonj~@8&ji$#m zNrL_3<=S3E5?ksN*=L+ZKeeWn0o7$mMj>A(ZMPuMCOy)}NXtO$&KG6iF;RglBX?v? z3(E^tOj)rvad;_b;DL5YZ%{r{Jm~#j{rV56P8>h3aUe%8la;TW5+&l{4aB+=fGEwE zp#^pV0jb|yS-4h)^gu@Ad9n9UhTiorATwhx(j3pOOjid|!Eq0|W&>VM)BUKl7R>9I zuDDK?^Q+p|K&jrXysRD&Qzt$vuZIjY^}qT3sMu!FyLZwYFlv+R4L^g%hP#` z0vhC|r37dI70%Md*9A?McmF9qeFe2RWc$8YhmlttLdcr;*+rh&3F-~t{ z1{~V#Ru49MWwV9bsnUeO>}3_njq$bH2y;q!bC$Ue$_FV08eI(~e4fNZkIvUK=G#V0^thsP2@i2lcTB)5b-!oegR`RIq}xMkOr=0`$Eq2 z(yUKw;`?=C=8kCBg&ItWd_uZ0wYh7WREYo;PlTMrb?dcT9o{0exh?mX2!m1_Ec+DS zAg1PVvP$q3r5Hj6$P8_T7eS<08t=7yR8??ZvXP{kxMkxVggR=%aXjzC56bsKrr6C} z8I{e59IK*6u7${DEX3Zgyq;gw!tS{AKBJ6Umi>?tfZ1mhCfqieCn^2I%LC(AUrApO zEOwZ#i~%HJg(_ky_-@i*ZECvdDOQpKz(gx(nSC2-?Nw=|;Ct9_*vnq&dsmV61Fb7{dfyrF+Zi$0%+EW$w3_+!UY&~o+6zY2-T0127tYZUae84*qJwWT)5%HQSh4e zv4(vgRLVh1#8JRssGQGSAMT`o>Xn^AEFysl?hf1$&jpjn4&M>>&}s8rUTi?S>-oFa z)W~I{LTds^%<@%%_E7(WOeO(mJUJ%EsU2M)$)>*PIdn;#_~T1UR82CgbtIE|9jMfa zp&2wK!lANyF)|7!Lx5YNy44eD|Dl#UPgyPs*I-4@6PS6A0EOf~Dvy(N3~Gf*?+5tq zQb}ngLJ(K52n%GH*W@m|oV3_lqu3hCxy(!eBfmIp(~hpPFQ?WgGx6he9ykFW#}*id zgP6NJt8_kzzi!`4zn`H%;T3^Uf@evouTQbMu*9tgMs#1nQo~Y+trn}H6$^r``a0&I zyy$7GrBzc^e3y#>RGB~?0xF~4GE`?yvF4iRmVj$1Xm7+vi8=|dn7hDHZuBRhB|;gp zfFr4E&(#HS3PFxiu6I-mP?*9;bQ=S2#1Zu>tmDMPqGiEjC@RDhtBVxM14>c2PR;0C zHZlOy9>!AXHgM(z1Ql#LH!mVGp(1;J{F>M;P5a4V;}%polpd< zTBP3nBVt??)#C>BOgX+d4^{%J^-~odh!>#}6!MHsmYZE^UGet~E*X6A7>MYBUMje_ zMj`1oN7JQW{Q7^6XUT~YOEg`fFw;mo_OPLuu(X&sDR_4Xfj&i^DhW5M{DJUjhUC{^ zm~by9dkhkPuKV8u=WDKFbqWl7++k-9_Sfjx)gR>z-rDJ+31wsmXA)K4EyKQVTz0FJLOwR2dKc68Yo+iT0?~< zIJNgu`iTJbiNO`J{xOE50+=7IX+jyEv>XK0&1*=ebcjoD055En-l`|LvgmOi3%a1( zf9i+#cgxz~1=oCh0{dFr!kFaqvgH*Ni{rSN9W}#D;fDAsLCFJ<6t93<95Orrgm~dS zuq(_zz9lnbnKOqih^ZAi8B;PDS$HtHuxl;d5XS-K0qx9}6Sp5-5rhgOLq2y42k=s1 zO@C8$Z$oUpM5Dx{#e0)S>`B!^qP31f%;5b`@BWTjb@BBze4hA|>{QrY7NUJcRQYy% z;2qt7|L*-SJ!slLOM*m70(7cv4l=W+Cv zfj67Ju5rlam8!7Ia*_TEr8z0ndWnaz1=r%V9c!qrsJ@V)9UGkYbq3-uALP=*Ti7XQ z(^QonYmgGhgq(p1z(y?8Tw=n>ygI82UB-{_<8LueV>_NAUGd*2gom-ST2oN{v-lfk zUXD8M!9*YQW1!T_vH&UAz?CK|#bR;8!I`N!LQ=4pG6dj4$X3w*2KgUT=rLYR1%lKh zu_ifSXeC8~l1)|}ptUqNSwsy`Um=1W2j=eW@0Q=T#PxwcRP`g4N5v+N?T{n9wEFya zu_~Jvm@dRVJh6RX+C^B{8_+|O61a<*iAHhNTvf%{oMAP7S&|`#fi)YZ42hLRhRrFR zdi?adzAo6IN^3ug$zf^R$?EQ^g6Kg%3L1fSj88DctHu;i;Ac|F7!gl-)}8BG4JGCc z3q!{|U3JH=xlgl?3XcJyjy>Bc{f_6~CuuMJw3Jm=aUgd#f~;L`Q;=dqA<<0h1B63k zDH$}jO!*4QpB|o5JT^~G$T}ube0%{V{mNSi?d4RMM>eVZTv$#>l#C=Yi#TG(OGzmh zH0YV7vS9MzbN%V7mn0Y<@Lq?9sbo+sZcET0M%7W)uLPN1EAcx-lLl(YHhcsBPKSGU zq9@08;`q^w4`ezvgVNFsfRxIe$5P)vhCr0QnSIZlq zm0q^Rd`-vQn8yja?PXge)uqm?K~jr2UTJUuT}n+!tGv4g&!Ov0r_V<{#krq4Kwju)7y!CM;W)= z7(J=8y2k~}ZoiCW_9mX8C^HevmJBehh3MN_126i0h>FbllaOOYL;%l23u2VJc6V*j zBI7Z+;MAEfo4Y$+&qcbjrr_V5*D$i!Ssr4qB9QTO5Q1Wx@efEiic##bj0sq;)!R9l z*ah(kot`yYxEN&B$X%eg@e-ttYr*wC@l?1*QE--U@^A=BV-%eyy#R#Ida8a`tP($q zJ?IDb5kUAF4FxE09EhX}=U6;5@TlmV31^CqDk`pLhb;;_6Y;)>xEF5*Fe#vBQh|D~7$R z7(mOW@A1AEGVx6@=MyB;skNAF*+F$s=Gds6i4D$Aa|ZEkRF2-O#>crs7Of%T^>`Dh z5o&j}v=SPK71JFaHT{L1-bx{=0pbzC962JK6JOe0xNQsB2F5{H3oo}81COZ+5P0SW#;ort!}C8e4j2s#9)%3 zrty|_HvOt;Brcv&3OKHn)rNN?)usPga(CCnRc=A^cYh*B4NT?T-^MkpOx)Pqj~+ZI z2_4@+lxxvKYd^fcMFvH!h3@SE1t5tzo+G5co%&U7b+V|E9SaIh38G|DD85E0-$0;3 zen@#d4=VAz(b2zGerRcu->85rW}$z%MJ*Fd0L5G_mm7q6S>}dIamX-R$O4X?60kxg znwXQw0n0o@AVnhL@$pMge#|+aWUO&W5Qr#9x)W zfATaiOsR|ZN_Shxy=a*FxncAq+YjQ8yZlr1uh?2RHR)>8p~FB{*RlA9YJXPRRZNOr zXK;Jc{Uj%;y41}A=;55mhEC5AzHa6EbV@LLKJ+|D&rx!da$~Her^Gy2o7{?!fFrgy zMoLIMx%fD{v3M643~4&Zp;VQ^dmaj=Ea$0R?zJ{S+fdE)G{;QU8RgY+s@8RMd|xk_ zJIsUWuS|R!)mFi_K*n!M z%i93I%FMxmqS!V;#fhYSMZ8*7Vgh>dny`b6JX!CRJc;;uo1f}V8J#5x1{vab=Bqv& zP_R>?V)^J(f4jP`WSYe=xTz<2pf$1D59s68rMsY`d+^7tB-GYa>|bk?Lx>yUNO|QL zG~@K~!jV)X0O`>9Wla(L>3xb=h~nMb|EYY(Lm`$pyNJKh%;WACvElp$h&`qzwISA0 zPBNFZE?TpnCUW|t*lGHzq@-lLx|8d4<6zmOm=c~hHv%Cz z3o7>tePb{fW0x;6b-6i><%%tblN662$_E5@)wW!{$Bt_q1wWJ=YZ;37uA&L8w8_uZFIwMM^AOc@!kf8Le1;h|W8?1a50=_G)rd;9 z>-}K9#}wK%)ZHkb#Z<*uoTtm3TTy770SvNrE<a*ca@ zKAtwamzqJTsQ4yUiK^3LBgQCHAUczy7A2!OJI$!rE{97cbnw<Trt@0RhHnZ^Axg)ArtmI@S5DQBS>km!K0Z9v@P(P8^w3Y$( zEgK^Hgz&ITV%maDXb@u6zBz$a!c=u>dRJFeG_Wm(7(6-+ehAAsYS4=?Snw(z6Y_aD z#@Nc|>3}l|iONi+ue_?O^79CST(@Y4s7FUlRZ4RXX(i)Vui(|Sw%`X!qENz@mpDk> zDNJ<9N@ZbL5Yl3{FCh3U$t!+V6$qf}SrmBs`PnM4JE5r*4@*qWc& z-7U|1`=K)5|6k?ZKgRb-O<%5bbd9HkiAaI&ynX^TD7!Bqwj4lKX0@{MT8keN)^GTs zLW1L0@n&)toL3TUFzsuILxg#3u>wIwXn_hTh1=L3^>&j@fKbF;OOe>*vC+#LN*@~& zljJ|KDO!nyE$mp&g7aF{Z77L@k_u2=A?g&PjR)z}Vc2_RDGY|o7xN<_vfuY-S)NI% z(}}DDA}5!1lhYP65Lw1!5~_5K;P_ekO*(=*WA$eF>uC)7o8|kKYi=5(P74r!{vXOe zcVq>0Pp$9N*dfCNA3sncm7fBFB2FS#ynm-#2{@YEb|2ynK*w}Za;e?9wPBzrUNJrP z=ZRGlV89Uq8kdE_Or37bUbq2QI6h4*^MV?6*#HeZqVnWnEc*LjEY%WJVK=fW@M@B^ zMWO}*gHhz83Rj48hGiM=B?wNqiK{0m6V(+K0>Qk~=vBViVX@8CT1?+Z^a)qJao;V2bCL+Me< zC3ZDDfY9;iO^k}(2$afeM^&JV#UE3>P|Q3@cB&as$koz55p$v01|uQ3-0d+{Y@u*d2tPbQUV6w4(SI!CrI z@!~^MP0A~Oac{rurc{VkfQ?4=R6B)XDo8`n|JH(n2scQpUnMl1>XRw#7jmyKgJY2h z#PgNW_|jWMdbz}}y>JEd%J`jZ-+89;Y%}ZZ`JW?awIfr!f{-YmSjw~n48l!Mp=>}` zdXtS}zvtL7trr5p@NDZU6++}2Kfa4uJsc;Gp@thl0+y^R^@YnyPdabeT~vZfyOgKz z|B%EHQy&KvhHx>LZTVIdh{6o;nHXa(L@*^Z!Jb&&AfPcfQ4>?V0=>z>$GnKo;~m0Q zbSnrBf-4^JuxxhRN2ITl*|A$DrydX?K9FRv*(LOi7}BE7pD(!W5t+rbsX0kkFO>{; zTbTnE2ieZ*oTvrs|Bqe7s(!W;jp=)#RZ z3)wnYZ()aT(BsMrsZ-1~N8&<@sJ?$&-u==Opt#A5P`A%@PptgJb-UDnZAz|Fj>F?z zD}zo9Za zfcbbt3AmIZTr2bA*Ed0fFXAKdWv!N*7MW;ZVnNZM{0Nr8e+K(-L%Pd%7=cNTQ&URC zre)xSRzZu*8s4mB(kZBE2Z^P zP$iDvZAr;4H%#j!0nbn8xc(X4LKKzL5>hpR-E*C;T$OlXaP$NB_#^={hiop-YUiW) zJ90+a6xNMJ9(kv?W8UsPI=;m#sWo_PeYa0wj{S(IDV&Orn7hypbYHwztZeX^iU;p0>U#&+C>>_Kv?Sg!0v^;lQp{=?*0DV<8nrtz7c5a;$1BZdzBChrX6CMRmFa^7ys$-H@4Wgmz$&dS}}ZGPc!jRT9B4|0*fXy!6 z3OV5MbbdqMYcC7M_>jDb!3s5o(5@kiLJ331?w9qrYW~I7%YPsL{e4A6bdiRx+# z!#2J~Sn3yW#q0n5O$>H=Sj&qi=Wh?xnMs>P1nRMLwor>LBUz0EJYf2!PJPHlR=96k z$3VQYa@JIquoBd>DjTS9W(=QI6gA4GH^9=gxmIeDhFcUPO`}WFX&$ zC~TXL`kd#wSi&(B=tw@~fkGc(8M)TzQxTOLQv9a0fHl}Qbh36S0K-no>+!MUR4nfd zRp{w=>jg>kb;xLk0`L9!0flPP3HVj7@73$Uh^)mOJ#(Us;(W3F|7D}cECBF;=W~<5 zYDBiluwVNFsMcFOEA=spSj)u9iq2M*P*g6v#>jjBM|bq#`O~<624 zONU4(x%kmO<~8n+6q!g9tYBIi2eE2*Z@6>6) z+OQ{->DBm{EWlbr(Hk#_hI2sow-^JQymUKL=True@~XWA2!6Juw`jhbt&W=Klvxnn z4a&jv2Qk*#t2ZGd7+=WR%XVSlqM@DrI>~Li+iMl(N?pE6-ROzIR7@;vj@#0z=5z(( zRhQY%Tt)Fb3u#`tEt@S4_XO7^K37r6cmL?+QfX0ACCELuMtmh)R%g*}6B+GE)u^gclK=P)2aXmsFY@M3Cg9&aurDRcKF^DfDS;3ML%iwp;g%{%)!9G$FG8>6cRD!l*T)&;tKJQD#wMh zco0TYg~RJ4eJ690+ZK~>rXK5`Kthr7rlW3Gr#3{h4gk?<5TR8us% z9me8vXNb2nZZdKVlPYb^Ay$MCqj;cFuBr8x(iYUS^}bIN!%rSQaw?=G200b99A!m>78zc|vcu&%PPHa3=wHC1waTa*dZl}2UwWJB z0R{!+CnYa#PaJdaqRzuBuf!VbKWy0BlnqS~*N&`~&>U4;r+L%eTP#>pJJ8kbHj{P8 zcSG8LqkP9gxq=E)YO%-EAVJQE$NnrxPVtMUb}m{M2Y7G^Jw}ey|F;H0)d;!rk8sOZfS_KZ(!iCsNTZ z&Zi5-!uI>*5XWDzG5xWV*;plJI4Itlb_#{C55i4n`tpt6m`vR{U*uSxmR_?x5;=u-PZhsH5W>bThMHeYXLrj%A*Kcdii6VuiZp||gJe*tSbin;*$89ET$kP3e zkLLDRDzt@k7|5MaR9;=)s{I#Zlido^*Q}g_L0OAEg=Cd5BdjF?>@C2m+vOq^a=AH! z@FEL78-yFOwL^=nmS?d~0u*JzD-*;Ai&lc-;AZook_%Xb^hLE{Biqp3XGpx~%{aKp zre&~rqSf@bhj&B}eT0Od^mOiGoog!-I#@Q5RhhZe(D|G*i2p>i*F(jzks`_*0(ECA zZ(0Ox0~O!EDpD^}o0F4@6e~6rGQTZ6E)Ld{$JC?xC^mui-GqjMuS z8i|j6BMdv7wWLqB{8OJjz|7id52b3C9?Z7}!u^VEVSbeFj)fsHCBrGW4|7$MiFuF? z#`U=FA2%`6+jzKkxx-^yD7EpCUvx2~zROk4s(3LyKPePYPNqw*Mh_FGM&~H+o%VI=$Pgwm!`V%fM;d8ct|kGfjXibU(}$Zk1y+J+P| zM}3wAV4+>;yx?(cLG_5A7EF{_8JdSB?&f&12&5h#d2Y2Y(T{0LgP3R9iGtau6^4xr zt@nTV?w{WMYVq%~_&+Z?->sN$9A`=~r#}iGMKSm{Y+p6UW1K8YGi`KLtTcKB#yz#$ zTXGao(o*dN`GUIV$u9Cv5a@C)A_xF4dlPSZP#!+^`KHAvLFDvRYQl}0&3X6sgYv`p z;udgJ{5SEQ=72H4(%ZKB+@k?Ch(nI%D;TGf?)R?LMo-CtB9{DSnojLg46d2d;XSL5 zDOCJP{D@Pbp5lHj3?Q@@esUaPtOwl(>6T*#6tHU7;UQxPsX=zphw*UKlETz}(yN*R z9zUaNd9HJy2{}z~;z=D4reUhZiJO8HfiosJT5uLj3SQwAVU1X`qmUnHmob`Mrh!c! zFTiRATlST}(Zq&2ohMI@XiM&8&52=YR12YP;3G7KA2GyM!Iab08ShW|C90M3`nUlT z;8hP}Xv8VE%0b_=5MV#QN+=;EWvD!C9uXb<9{S38#Yo|{1BJ%ydIC05B>fB;1#o^g z`Cgz^mLf5vrMA_fEyGinX9m`J=xI_K1QQ$7;v^wUTB}Pr$0(VY7S|%;)x6w8z*rKR znwLvFv>G#Q&>~&-8}tdIajzuB{S+OToFbKL0wHXXWZBxEJhE!olvlut#bEUC+v|o< zuj!_@>!*s{C3P37T~rloN;f|yD%o#W7hd-x()Oj1a!ZE}<%0aqfrQatpn8yctx<8o z1BJXM+u-WIX%PKD;fTd`c<*AgQSe!8GttlE{=+wzw98P~a+)zY^;@gnIE4$b8Uz%1 zgga=ihMo`*Q_kz`Hjx`HlrJXOMsWlye9k#14-2&#ik{-(^qgao!DD4?poZF^AuyIg zde=1|9v`fEWj!XYDh*1>cl`@to~Z^5MZw*g3pC*tFeX_C5cZS_ zlZH>oiCXm*il9&cn`1ds z?Y{h?^lXa&d@N!vK998;;{pH=!GlH2uD5g^>?oX!oM!i#^3u~iL6N8R*lp}SZUMfJ zeH>?P7ej2-i$Az$AjZxgos`r@SY;p2G9ZDQDk~*sLDU0*cz(nR^-IQ|l z>Erkb=$?K^@e{W)h=f=0;5$o$Y*TPFItfgylk-lYPz(a08vM%!K86jzP8jB z0Klgv5XZ<=1Qotlti#<~6$Xf(x?7#Hxg9@MAH0p5x;`tO?e2AE6L2)$6UPpmWP865CXY%AqOTC*vNg~RWKuO#rx#2;ORgdN;G(h3r}NUp^; z#sFkwpHAXHY|jxlUYyQ3fq`~#)IjbeeZRZAyq)h)yKxhoy>MG&7t6ho)4` z%(`w*c-?Xav1)P-Ix#(ARq?Lzkou2}&8}f>t&eB)Y8_%wmu){*r=6Pr` z8Az)dx6u=m!B=$BtCSjT;5^#O;1F1wmCZIwPMx4w0z`@*T{IW7#?ag!If; z;ptji)J4mXOw4ZH6c1LGg>qCvNH8;8@@I$e$iM@gPtN%>Rl9QvG8qQ{Z(Zm1+t_uT z@%$fsITN6D3!@X;6MH;pU4SB~%M2+gqGrZv8w8|8Nwh9hL~-OXTA*oO3-mFsLW?+c z8l*uV(xA788TtR}@7qUfe~V7)haf|d=j_Yc>+-E{t<6x&d#j#)3C(;4lw_<)ov~B9 z{^@IUvtu#8~7vlSi=40=0JNa_47f63d>y&`ob$0Ck6z8DK% zYmL<)&L>W&Cau5xUBx1G@s%aO)K*uRT=1eI%UhCWt!mbkYz7EeP7l$O`=I(?lnDQ( zsgFmH4_4Laxi*DNx0jv>n(?vYuTr47*~=U}AdwGa7vKH^iKduMiEv#ufjmmYeV)0K ztn~k=63c2Rey>URPmICz=-BUNd$HB)!?KH0#U-yj??9X0z#a7WpW#G-?s#{fNoQ3p z&K5PV6UO=7$6f&8pJLV^HCCNMU^vPN&to@)UHg?CQu#zpu@-l?_%uHIqA{M-sJm_5 z{U{W}up~%ga$rACJh@%@gF3M>mm)!&qxkxh=KYV-Kmn3SMAZYr@l|w$Q{eBgyXq-Z zcLB$Nsb#6c!PZ$zmSPhp1@|Hb!2+thvA(9$CS*`%UJ&s}p%;5WcuT(0`2bF`bou9= z>_`n`$NZ0Vc#(Q)HK)~xgCbD@90U|`?VLYdeUozK-8he2XJ@A&os4I6&zdJ$$^HV4(OBi zm*WG6?!c$*yz*h zO$hOvB}BT*Z!grwF3vx#wR3U-k^+0~Mbkms9$Bfv_7%IBdf#MH0VonmMFGrfTr9A^ z;{^S!D-wnBoRlNL{Z&B*xd&(&`tXXkZFsJu*OaNRPlnR3o!Z zDK#oiu&WuE1F5^JW=Z6RUOZDy{|EIn3gwt3>qxE#bHbzS&0t}GtIJ-cKn3;5L8%cE z5L|9ru&)w?@nOQG_C@5Y`bV|1Br@re8aqN(iYG3r^CfI?o_s?zmeT1&1R*c5Y0*&B zA^L;Q=C9?2SVP<>PGmOmNlWMjtVqBM6)cNOPCW>{_fKgNVk@LR){XFnw3uZR$6eIp z8NZVop2M^7S3l(}$G_yXJB~bPauGV0l_?|W zr7S%k(piJx$82!Oq{(sQ?ZF-Avp){8PSwc2B z&M8NH$pUdTdCecG)qwNI+w{GOjiI&e(GgAK+~*3A@vN519D*kJtK2k;F+ZyY5$b=(Y4wB^IBbPM7FNS zeFWg&b=unoJcKOGJ;9DLXQp!6i~V!PB~Z(ON;1kNJ^=m@tkLYBuHa!!>1 zX10wyEhi(D6p+~SW4Qr%&p*@c5BQobBhIG_P{fh^rLtyRHqt)R`Lw6q5 zwH$vFPr_(iW&Cs^pD$S^_&OK^v!`)CQDSjUcAGVuMovHlH@ry;d>>pCQgyKndjB8! z3-mz1Q&o=3i=tpeN8aC(?^QlZNMlLijw`$331ofm7x^Id{&D#*cr}54+P+V@L@Aa5 zSCQirVzdK++JhN@e2lS&@m*4>Tm8zXpn11&oA~2|wp3m}Xq4zF8gJHP&2giI$H(Mq zipQOk-()^O=~6nK)Jg6q*@jfcqTgc9(mV4j<`24J71z{0YGRZY9rr4ZX@mOMb(m0|iXi_~A>OyT&V_ft5|47NT|HX0c zH??=q6#;T}3-RCMR)AdDv!f$MqOLZH8($quvV7Dx&=qN<>vWb$ijIb(J27tXtew3k zbs#~EnfqxB?N6J(@}UudkgEW!BxMP?HcCds-DM#X|MQz)VPfC>Ds~4_L+#r_`8=fQ zf#V4d1{voGlfu!h?$j0v5BW?74Aa!#jVDqsIkIe(Yblw(kAV_v_g5j;N5XGahwLP8 z2pAz(@*ca4f?zjRXGHFTIL9wi8k@^jQr6*!11PLJexlASI7wckRu(Y}a`>$$nc5j4 zYp#tOEzb#(k*d(xZ-3f+{dXa}L>gRX5U(j!%(Gg&LXgNfNcm|xVNB&Yjt*r!%}*$A}L-={2?VMb8Ar$cvlm73AdX47xAyNsc=O< zAK$cqm>9-FU#j23PM)T>QJE#kd{NbFX;go+@X}G6h|_^i%pK&qC%R9D%nYDlETHJM z)WZ6N^$>Z|cAgpr8nzWqv{h3%FT&@%u@@oq)j4m~}IH&UNiOg=#6_!sPf<7#44#@^9@&SI-}nq&EA{s<-6x|=bH!ZAvxrd2rf zfykMK@uZDWt~2Qkw*(w@kz*1LC7fSTn&R;V1$hYt8*%BUQVC#ap1F-jUc0uy?6+ztAIN}&EIUO|Zw*wQl2i+~%QpxAFY_mJ)$B3cIR-8yt*jX}8SgQiUpoZdQLoVOg-08)7 zVBKH=VZ!5a{CzvVOIRliBPI(R-lsG1AWr4?@fvGkE8j(tlM+knc}Im*v5(SrrlP@z zBohWdtVTf#k-)ye5`x{sJFZVE{*4ROv9qCKU2KzD&Q$gGF+9*1AUm7s^<-=DgjZrz z(44j{87`2IHMhD@Ejd>9%}cS1cCPpYP?omcbHgXD?Sk+Tr375x{@|VF+ZL)D7%ioB zX!xhBg;HyzU+#-@eC*jxsA9^*`hb9O6r<}c9BE!|aZ9~>;ys_Ml{R2el>id=Ag6WR z@m2toh4cBfDyban;WTZ?g(UHn-k2}sY)o4t2AjB%cG#g|OSWRynY1VW9 zi_BwPnBXaylGoF%BJ<=-4r$PGpfcqU6a3T+8$%GKvzaFvUP0~|$#pSRU`O~VH}R}e zCUu1pB5`tL@h6-0I&M>W=N)j)x*0@{T;105?mlgv5B#tFW?J*I*y)c$1JYyUiN&4}dQm&`q?8Z^y>K9Di$_Lw&uQ5jtM?7GMD)N>m4i z_wVsLkq^>l^ij^>k)8-U#7R>sGnS`DcTjkfuOIRECoYb9m_x$EJY+dd)EX% zuVgrqdSWu?DUX9kDvr#weuH_^Y2h-KOIP4HS%Vo`rdX^)X!g0s2>C#{1R31AR+#r$ zJ&FQ8{jfc3d)l{a)lSuIK=ab+!amf8&EwQF617_{?6mF08RB3BT1UTNFDnLXcI8o$ zn8LXyjfjDbO~1chsH#NH+}rA#N-&B7ZDYY6 z(NqxLZuTP~58}OLA{Ip#5Es+&>kPRza;sYjl7Jtnre`E1qAY)m@Fl&s6VscD1ukW> zw{;G^Z<>~d0GK@ z;*bIhoF_+Aum>mQ3tN|iDOS~XW3Mb(t-S4TZ#4UxUePrIFD#l@z5~Dm<&z#WAktnq zkT`iy=@a@O+1AEx~$eVM3T|G;7cOs3RYd8-e@6r@7@a zod_pErHeN0;Z+Vy)$4B%vIU(`F9kekb)YwHu09YByM&I+M5({j0_bkVw*YNzV zY32~?0|9A2{y%N#YZ$i9>4?w&u=$fevzXJmsNU*@OU~&%n%=}-@ByF>-M_40z)sPm zWC{^|zjoSuY9oDt3n85@&)#Ddr@hS8?*;-G#NsG=J)Ks%1!8nH^CPJMpg)wjdfBJv zrdKRQ66wyMdD;2?$ZuF{y4_GygQlik!5L@|8*5I78}}?G2{pY21x(iGxH+gaTEaIp zOZQm}yA|HL&YZd<9|vsC(GkWnS4atha?J`s6O~BuFiZ>KM)h`;x6T*dVnxUXPwAMH z!W1G@JZ!AOERlbA6x6Z$Hv8m}SYRZS1}%AR`>E#|vN*q~lDOS}*^E4w?dgpYMg$dJ z-A*&=#3BkCt$8oNNm2Sz^$IYF`U5bitUZ284-rcU5d2|Ttq?z$k`e=BMgXiLO2yyR zMFuCx4ZEo@q#vQ5crz#J6Flg7NRoiDR|2D#U_A^gsqWn|S+xplUlP`!#QYFqMM6x4 zppF+=nPT=ywZ?uT9PiO+KNbwIfDWlX%{_I(Ss1vaRXN8Gyht^18EY*qmiR0lB=XUD zBH+Ug#(F0$IB6MWNs+9fwyV}=qn8X&?Q~Z$> zmz<^e`S@7=3|$9Z%R$>S7=)n&>6ZQ|&9=?+ge>=P)c0astS#yxcCD)n3MxAOitYfE z^^>HeIAAd>(CA@fm!A~YBTI~2ae@-*2R{J*j-s0w3g0=XCb~pusluN_%^kC1<;MH) zCWx1+Uf$Vg_H2nqzyrGfI=)FiMH_n6f#kKxv^%aFNe2u}QeD$0{2oFS(KJ*5A#w4=&KUVQFxLML5G zr2XN3dtspZ1AAakp99sB#`8XN!_BDgwdB~PvL8qQl+LnMK0csu(#7x(Qkb(qGZj=V znZ{i!>VLa?iIzpU4({t&{N6lW6 z6qStdqFC6+#4Zo+z3K0K0(&fjBiucVnyn~nxr69<8jCftOh8T6* z2ZR$gsj|ub?674)pd4bTnA2JO|D9%Q;P0yJQBaWSr{csWv>rtY632jJL_IWmZWx35 zCW2O@{8Ze~1-8%)tMO92m71!=vre^FLy$lStG)=PJwE5-v*Zb0&3$D}>}(%{opS~l z@Dx)>djc5R@g@{a; zC;|Y6OpB(#NAL7hK{#bH@VFDjVjcV%kS;31ckgYJOWRPa&;|dd}Yqo%ft02N)h3daA-page$IGHcZASQ!XARPNaXUC{_k(e|+It_EcUISfUt_d{amMp-zNGi#eW;$6yB>B! zZD_5TDDD(>GvBAqQH_kO?F&t|+2D8wx@pkNN3kiu13TOMT#GV@ZJmGdx~Ub+^J8{! z;?_&+-2Gg+M+6-k-iID4HAb#nk@9F&$8v>(%b;oY_g%lA^Ns&xA5k#jn_m|E@Xaq{ z7O3^oxhh@<$itbBArIM?QOc~7n}P`JaF4Yu!LKZem$WNB8e3GOC0XK3 zwY@@Hm-ACMNsTaMa<1%W@cYtYsG#97Vp+(W>P=IZ|JaMwUZj=!({%+E6Q$U}+QTk! z_9!#($_)xt*k=K50!Gt+pIsCuJz1{j>pN~k*Z;KHjWHDOSs16yiTt^8=XmA-mSuzW zTR|84QXkcQP%u~NG!wx89>lL5^dm%;TRRG^C{kIMh zeW>jzwz`i(otgy#i>V$O%Bg;#xs>oa; zq$Vq7SriMqOj$q{n5CB0b&QT=l(epwq$M_##&NNwCo>F3Oo3c4k!Ji0f0gh!hNzrk za*WYT6SOCMElj86sSAgsTz4z=YF%sMkWTlRbM%L#1&&;_aOZmAblx7L+{3qsU4o z=6R`J!Wo#R1(*=Nu~VdRs=uh(R`9Ci-RTC?-14SVTH=IIe73SdaM(y^FvbFQ&~FBPFZ)m_Y??G&RrV!41jOT(7huDS+Q%<$ucJy(KIJck=u?!6 z-yuzwI_P&WSDDd>*K{_gkxGKpC_Yy=mXr{~9?~~wr3byTV|)@)hgli)!py2e)AWcM z5-)Shf#-f3Ot-(b2LWJwmf-32Ldcg#VeKFwg58fdb;YC!x!@4tYqQ-IK!&kd-E6Ee zzpb>J((Lu)SQ+vacv}?+Lx7fxowrU6-EugVbv$D(++r5&I&ay$ZX zLrF;@mTvfjPG%ciq-Mq zR2lLj2>$+vpV!5MNk}KX^hV^n1Wz>zfW31#kH^DYNoV~>OPq$*o%7?l1tRA?ov#Awt+1^zSo;U?)b(uG&m>P^3BULih~VOdqQ8b{v`WYrE67osD-iQ4}x z#(Ojx@IsZEgw`ZjP@WT((GmVU`E~oq+IcNy6H8^h^bfiMMX*sx*^B(emV5i?9D~m06x6YOL%00j3#0;fPTd;A-C`Q1+YEGE%@M(osfR8n_#G8U{1?5K(;JtlG)8MwR`bvlA%>q z5Qz1f-kwLVytFo^Nn2k0*iu2_cdI8A3nPG7B|0IM%md?_%ZSs4){y2B4$C``@QF&$B(B(iyc+_3STMn@SWzJhe;xkhJw_G z%OMMrbRqp#tNUNN9i@B_!nCY|BdwgS23g~8x*j*qCc&Eu#(naTPE4g*Umk~7{s(3ZW^>ceiSL2(sx(PD-Og>VsoRbZJU1v;jee6qOl* zXe*Ejm}jx7^u^E-u8A{6SsvJ8Oh27`84ReBkjLErQ=tHUHd-x_Jb>81Cu5oJZl9>w zAl?a3U$lmg6c1fUxyMA*<~(1>cRlAa7$V~fSl+3~`66b2t($nA{Rl$%NTjlV>P z2}&1=U5qcWWKazcPD2>k@6`CPFn1^88isIBbs}%hMb!3z#KYDVZ1N7+U7z{gy*^=l zk7~@Ao^0`l)rgHWfAsJ|JS`fo_)Ufj0+QJEyc-w!8mo-u#ar0(=qo z{$=WA2ixRf&>bxiPvGx&27`E^93iplV`a$xKJui_!CH}Ih`I57#i=mfD#c&P#pfRF z+h2eEV<<`fwsI!3)T*olW~d&L$|+TU8praYK2h=h%)5y!%hnPV#FOn5kc7(4$n_jhIZumvYzd11adN|&ry%UhS<((0vpH>zE-p-Q z!PClTnZkMbx~Fvxy8DgAsR1ETK<<;pEq)NW;2EAJ>fu5?6{SGDFLc&nxTSJ9v@b_2 zYpJbfcj$!^P?5Vam&zI4Hmzf1ZAbQx1UD^wq4x4Q>}IZg;tBeHYw@OmB6xqh>Z0UeI?M5#a%!S;w|E! zzqF#J3M(fl1nZqq>$2i_sK!B9sL;xKQt3tV=Bs$9?$3N}12{}B!UZcsjwPq(CW5=- zrz29d6&uh!4$$zKhF1v>UN9YESCrEY%-^t~YK+#u*DIRk)wlaga|g zmktV-LM2w6K%~ujPY?g#p7(Q2YrqTC{?)_d Date: Thu, 28 Dec 2023 11:20:00 -0800 Subject: [PATCH 155/811] Fix OpenAI server sampling w.r.t. temp and seed (#4668) The default values for tfs_z and typical_p were being set to zero, which caused the token candidates array to get shrunk down to one element thus preventing any sampling. Note this only applies to OpenAI API compatible HTTP server requests. The solution is to use the default values that OpenAI documents, as well as ensuring we use the llama.cpp defaults for the rest. I've tested this change still ensures deterministic output by default. If a "temperature" greater than 0 is explicitly passed, then output is unique each time. If "seed" is specified in addition to "temperature" then the output becomes deterministic once more. See mozilla-Ocho/llamafile#117 See mozilla-Ocho/llamafile@9e4bf29 --- examples/server/server.cpp | 31 +++++++++++++++++++------------ 1 file changed, 19 insertions(+), 12 deletions(-) diff --git a/examples/server/server.cpp b/examples/server/server.cpp index 72dfe452c..c5035e202 100644 --- a/examples/server/server.cpp +++ b/examples/server/server.cpp @@ -441,7 +441,6 @@ struct llama_client_slot } images.clear(); - // llama_set_rng_seed(ctx, params.seed); in batched the seed matter??????? } bool has_budget(gpt_params &global_params) { @@ -921,6 +920,7 @@ struct llama_server_context llama_sampling_free(slot->ctx_sampling); } slot->ctx_sampling = llama_sampling_init(slot->sparams); + llama_set_rng_seed(ctx, slot->params.seed); slot->command = LOAD_PROMPT; all_slots_are_idle = false; @@ -1215,7 +1215,7 @@ struct llama_server_context {"n_ctx", slot.n_ctx}, {"model", params.model_alias}, {"seed", slot.params.seed}, - {"temp", slot.sparams.temp}, + {"temperature", slot.sparams.temp}, {"top_k", slot.sparams.top_k}, {"top_p", slot.sparams.top_p}, {"min_p", slot.sparams.min_p}, @@ -2437,26 +2437,33 @@ json oaicompat_completion_params_parse( llama_params["__oaicompat"] = true; // Map OpenAI parameters to llama.cpp parameters + // + // For parameters that are defined by the OpenAI documentation (e.g. + // temperature), we explicitly specify OpenAI's intended default; we + // need to do that because sometimes OpenAI disagrees with llama.cpp + // + // https://platform.openai.com/docs/api-reference/chat/create + llama_sampling_params default_sparams; llama_params["model"] = json_value(body, "model", std::string("uknown")); llama_params["prompt"] = format_chatml(body["messages"]); // OpenAI 'messages' to llama.cpp 'prompt' llama_params["cache_prompt"] = json_value(body, "cache_prompt", false); - llama_params["temperature"] = json_value(body, "temperature", 0.8); - llama_params["top_k"] = json_value(body, "top_k", 40); - llama_params["top_p"] = json_value(body, "top_p", 0.95); + llama_params["temperature"] = json_value(body, "temperature", 0.0); + llama_params["top_k"] = json_value(body, "top_k", default_sparams.top_k); + llama_params["top_p"] = json_value(body, "top_p", 1.0); llama_params["n_predict"] = json_value(body, "max_tokens", -1); llama_params["logit_bias"] = json_value(body, "logit_bias",json::object()); llama_params["frequency_penalty"] = json_value(body, "frequency_penalty", 0.0); llama_params["presence_penalty"] = json_value(body, "presence_penalty", 0.0); - llama_params["seed"] = json_value(body, "seed", 0); + llama_params["seed"] = json_value(body, "seed", LLAMA_DEFAULT_SEED); llama_params["stream"] = json_value(body, "stream", false); - llama_params["mirostat"] = json_value(body, "mirostat", false); - llama_params["mirostat_tau"] = json_value(body, "mirostat_tau", 0.0); - llama_params["mirostat_eta"] = json_value(body, "mirostat_eta", 0.0); - llama_params["penalize_nl"] = json_value(body, "penalize_nl", false); - llama_params["typical_p"] = json_value(body, "typical_p", 0.0); + llama_params["mirostat"] = json_value(body, "mirostat", default_sparams.mirostat); + llama_params["mirostat_tau"] = json_value(body, "mirostat_tau", default_sparams.mirostat_tau); + llama_params["mirostat_eta"] = json_value(body, "mirostat_eta", default_sparams.mirostat_eta); + llama_params["penalize_nl"] = json_value(body, "penalize_nl", default_sparams.penalize_nl); + llama_params["typical_p"] = json_value(body, "typical_p", default_sparams.typical_p); llama_params["repeat_last_n"] = json_value(body, "repeat_last_n", 0); llama_params["ignore_eos"] = json_value(body, "ignore_eos", false); - llama_params["tfs_z"] = json_value(body, "tfs_z", 0.0); + llama_params["tfs_z"] = json_value(body, "tfs_z", default_sparams.tfs_z); if (body.count("grammar") != 0) { llama_params["grammar"] = json_value(body, "grammar", json::object()); From ca38b8d334baa724bd6c9402470931d26427466f Mon Sep 17 00:00:00 2001 From: Georgi Gerganov Date: Fri, 29 Dec 2023 14:41:36 +0200 Subject: [PATCH 156/811] scripts : do not sync commits from this repo --- scripts/sync-ggml-am.sh | 44 +++++++++++++++++++++++++++-------------- 1 file changed, 29 insertions(+), 15 deletions(-) diff --git a/scripts/sync-ggml-am.sh b/scripts/sync-ggml-am.sh index 83abe3681..93aad88a7 100755 --- a/scripts/sync-ggml-am.sh +++ b/scripts/sync-ggml-am.sh @@ -26,22 +26,36 @@ echo "Syncing ggml changes since commit $lc" cd $SRC_GGML -git log --oneline $lc..HEAD +git log --oneline $lc..HEAD | grep -v "(llama/[0-9]*)" | cut -d' ' -f1 > $SRC_LLAMA/ggml-commits -git format-patch $lc --stdout -- \ - include/ggml/ggml*.h \ - src/ggml*.h \ - src/ggml*.c \ - src/ggml*.cpp \ - src/ggml*.m \ - src/ggml*.metal \ - src/ggml*.cu \ - tests/test-opt.cpp \ - tests/test-grad0.cpp \ - tests/test-quantize-fns.cpp \ - tests/test-quantize-perf.cpp \ - tests/test-backend-ops.cpp \ - > $SRC_LLAMA/ggml-src.patch +if [ ! -s $SRC_LLAMA/ggml-commits ]; then + rm -v $SRC_LLAMA/ggml-commits + echo "No new commits" + exit 0 +fi + +if [ -f $SRC_LLAMA/ggml-src.patch ]; then + rm -v $SRC_LLAMA/ggml-src.patch +fi + +while read c; do + git format-patch -k $c~1..$c --stdout -- \ + include/ggml/ggml*.h \ + src/ggml*.h \ + src/ggml*.c \ + src/ggml*.cpp \ + src/ggml*.m \ + src/ggml*.metal \ + src/ggml*.cu \ + tests/test-opt.cpp \ + tests/test-grad0.cpp \ + tests/test-quantize-fns.cpp \ + tests/test-quantize-perf.cpp \ + tests/test-backend-ops.cpp \ + >> $SRC_LLAMA/ggml-src.patch +done < $SRC_LLAMA/ggml-commits + +rm -v $SRC_LLAMA/ggml-commits # delete files if empty if [ ! -s $SRC_LLAMA/ggml-src.patch ]; then From afc8c192919f04613a92d40391bff4c8cd99856b Mon Sep 17 00:00:00 2001 From: bssrdf Date: Fri, 29 Dec 2023 03:32:31 -0500 Subject: [PATCH 157/811] ggml : fix some mul mat cases + add tests for src1 F16 (ggml/669) * fixed mul-mat error for old GPUs * style fixes * add mul mat src1 f16 test cases, fix more cases ggml-ci --------- Co-authored-by: bssrdf Co-authored-by: slaren --- ggml-backend.c | 8 +++- ggml-cuda.cu | 89 +++++++++++++++++++------------------- ggml.c | 2 +- tests/test-backend-ops.cpp | 14 +++--- 4 files changed, 60 insertions(+), 53 deletions(-) diff --git a/ggml-backend.c b/ggml-backend.c index 526ce732b..2c3752067 100644 --- a/ggml-backend.c +++ b/ggml-backend.c @@ -614,10 +614,14 @@ static void ggml_backend_cpu_graph_compute(ggml_backend_t backend, struct ggml_c } static bool ggml_backend_cpu_supports_op(ggml_backend_t backend, const struct ggml_tensor * op) { - return true; + switch (op->op) { + case GGML_OP_MUL_MAT: + return op->src[1]->type == GGML_TYPE_F32 || op->src[1]->type == ggml_internal_get_type_traits(op->src[0]->type).vec_dot_type; + default: + return true; + } GGML_UNUSED(backend); - GGML_UNUSED(op); } static struct ggml_backend_i cpu_backend_i = { diff --git a/ggml-cuda.cu b/ggml-cuda.cu index abad9cc39..9a9effcf5 100644 --- a/ggml-cuda.cu +++ b/ggml-cuda.cu @@ -7485,6 +7485,8 @@ static void ggml_cuda_op_dequantize_mul_mat_vec( const int64_t ne00 = src0->ne[0]; const int64_t row_diff = row_high - row_low; + GGML_ASSERT(src1->type == GGML_TYPE_F32); + // on some GPUs it is faster to convert src1 to half and to use half precision intrinsics #ifdef GGML_CUDA_F16 cuda_pool_alloc src1_dfloat_a; @@ -7577,6 +7579,7 @@ static void ggml_cuda_op_mul_mat_cublas( const int compute_capability = g_device_caps[id].cc; if (compute_capability >= CC_VOLTA && (src0->type == GGML_TYPE_F16 || ggml_is_quantized(src0->type)) && ggml_is_contiguous(src0) && row_diff == src0->ne[1] && dst->op_params[0] == GGML_PREC_DEFAULT) { + //printf("this branch\n"); // convert src0 and src1 to fp16, multiply as fp16, convert dst to fp32 cuda_pool_alloc src0_as_f16; if (src0->type != GGML_TYPE_F16) { @@ -7614,9 +7617,9 @@ static void ggml_cuda_op_mul_mat_cublas( const to_fp32_cuda_t to_fp32_cuda = ggml_get_to_fp32_cuda(GGML_TYPE_F16); to_fp32_cuda(dst_f16.get(), dst_dd_i, row_diff*src1_ncols, stream); - } - else { + } else { cuda_pool_alloc src0_ddq_as_f32; + cuda_pool_alloc src1_ddq_as_f32; if (src0->type != GGML_TYPE_F32) { const to_fp32_cuda_t to_fp32_cuda = ggml_get_to_fp32_cuda(src0->type); @@ -7624,7 +7627,15 @@ static void ggml_cuda_op_mul_mat_cublas( src0_ddq_as_f32.alloc(row_diff*ne00); to_fp32_cuda(src0_dd_i, src0_ddq_as_f32.get(), row_diff*ne00, stream); } + if (src1->type != GGML_TYPE_F32) { + const to_fp32_cuda_t to_fp32_cuda = ggml_get_to_fp32_cuda(src1->type); + GGML_ASSERT(to_fp32_cuda != nullptr); + src1_ddq_as_f32.alloc(src1_ncols*ne10); + to_fp32_cuda(src1_ddf_i, src1_ddq_as_f32.get(), src1_ncols*ne10, stream); + } + const float * src0_ddf_i = src0->type == GGML_TYPE_F32 ? (const float *) src0_dd_i : src0_ddq_as_f32.get(); + const float * src1_ddf1_i = src1->type == GGML_TYPE_F32 ? (const float *) src1_ddf_i : src1_ddq_as_f32.get(); const float alpha = 1.0f; const float beta = 0.0f; @@ -7633,9 +7644,9 @@ static void ggml_cuda_op_mul_mat_cublas( CUBLAS_CHECK( cublasSgemm(g_cublas_handles[id], CUBLAS_OP_T, CUBLAS_OP_N, row_diff, src1_ncols, ne10, - &alpha, src0_ddf_i, ne00, - src1_ddf_i, ne10, - &beta, dst_dd_i, ldc)); + &alpha, src0_ddf_i, ne00, + src1_ddf1_i, ne10, + &beta, dst_dd_i, ldc)); } (void) dst; @@ -8035,6 +8046,7 @@ static void ggml_cuda_op_mul_mat( GGML_ASSERT(dst->backend != GGML_BACKEND_GPU_SPLIT); GGML_ASSERT(src1->backend != GGML_BACKEND_GPU_SPLIT); + GGML_ASSERT(src1->type == GGML_TYPE_F32 || (src1->ne[2] == 1 && src1->ne[3] == 1)); GGML_ASSERT(ne12 >= ne02 && ne12 % ne02 == 0); @@ -8481,9 +8493,9 @@ static __global__ void k_compute_batched_ptrs( int64_t i03 = i13 / r3; int64_t i02 = i12 / r2; - ptrs_src[0*ne23 + i12 + i13*ne12] = (const char *) src0_as_f16 + i02*nb02 + i03*nb03; - ptrs_src[1*ne23 + i12 + i13*ne12] = (const char *) src1_as_f16 + i12*nb12/2 + i13*nb13/2; - ptrs_dst[0*ne23 + i12 + i13*ne12] = ( char *) dst + i12*nbd2 + i13*nbd3; + ptrs_src[0*ne23 + i12 + i13*ne12] = (const char *) src0_as_f16 + i02*nb02 + i03*nb03; + ptrs_src[1*ne23 + i12 + i13*ne12] = (const char *) src1_as_f16 + i12*nb12 + i13*nb13; + ptrs_dst[0*ne23 + i12 + i13*ne12] = ( char *) dst + i12*nbd2 + i13*nbd3; } static void ggml_cuda_mul_mat_mat_batched_cublas(const ggml_tensor * src0, const ggml_tensor * src1, ggml_tensor * dst) { @@ -8492,28 +8504,10 @@ static void ggml_cuda_mul_mat_mat_batched_cublas(const ggml_tensor * src0, const GGML_ASSERT(src0->backend != GGML_BACKEND_GPU_SPLIT); GGML_ASSERT(src0->type == GGML_TYPE_F16); - GGML_ASSERT(src1->type == GGML_TYPE_F32); - const int64_t ne00 = src0->ne[0]; GGML_UNUSED(ne00); - const int64_t ne01 = src0->ne[1]; - const int64_t ne02 = src0->ne[2]; - const int64_t ne03 = src0->ne[3]; + GGML_TENSOR_BINARY_OP_LOCALS - const int64_t nb01 = src0->nb[1]; - const int64_t nb02 = src0->nb[2]; GGML_UNUSED(nb02); - const int64_t nb03 = src0->nb[3]; GGML_UNUSED(nb03); - - const int64_t ne10 = src1->ne[0]; - const int64_t ne11 = src1->ne[1]; - const int64_t ne12 = src1->ne[2]; - const int64_t ne13 = src1->ne[3]; - - const int64_t nb11 = src1->nb[1]; - const int64_t nb12 = src1->nb[2]; GGML_UNUSED(nb12); - const int64_t nb13 = src1->nb[3]; GGML_UNUSED(nb13); - - const int64_t ne1 = ggml_nelements(src1); - const int64_t ne = ggml_nelements(dst); + const int64_t ne_dst = ggml_nelements(dst); ggml_cuda_set_device(g_main_device); cudaStream_t main_stream = g_cudaStreams[g_main_device][0]; @@ -8522,7 +8516,7 @@ static void ggml_cuda_mul_mat_mat_batched_cublas(const ggml_tensor * src0, const ggml_tensor_extra_gpu * src0_extra = (ggml_tensor_extra_gpu *) src0->extra; void * src0_ddq = src0_extra->data_device[g_main_device]; - half * src0_as_f16 = (half *) src0_ddq; + half * src0_f16 = (half *) src0_ddq; ggml_tensor_extra_gpu * src1_extra = (ggml_tensor_extra_gpu *) src1->extra; float * src1_ddf = (float *) src1_extra->data_device[g_main_device]; @@ -8531,11 +8525,15 @@ static void ggml_cuda_mul_mat_mat_batched_cublas(const ggml_tensor * src0, const float * dst_ddf = (float *) dst_extra->data_device[g_main_device]; // convert src1 to fp16 - const to_fp16_cuda_t to_fp16_cuda = ggml_get_to_fp16_cuda(src1->type); - GGML_ASSERT(to_fp16_cuda != nullptr); - - cuda_pool_alloc src1_as_f16(ne1); - to_fp16_cuda(src1_ddf, src1_as_f16.get(), ne1, main_stream); + cuda_pool_alloc src1_f16_alloc; + if (src1->type != GGML_TYPE_F16) { + const to_fp16_cuda_t to_fp16_cuda = ggml_get_to_fp16_cuda(src1->type); + const int64_t ne_src1 = ggml_nelements(src1); + src1_f16_alloc.alloc(ne_src1); + GGML_ASSERT(to_fp16_cuda != nullptr); + to_fp16_cuda(src1_ddf, src1_f16_alloc.get(), ne_src1, main_stream); + } + half * src1_f16 = src1->type == GGML_TYPE_F16 ? (half *) src1_ddf : src1_f16_alloc.get(); cuda_pool_alloc dst_f16; char * dst_t; @@ -8557,7 +8555,7 @@ static void ggml_cuda_mul_mat_mat_batched_cublas(const ggml_tensor * src0, const const void * beta = &beta_f16; if (dst->op_params[0] == GGML_PREC_DEFAULT) { - dst_t = (char *) dst_f16.alloc(ne); + dst_t = (char *) dst_f16.alloc(ne_dst); nbd2 /= sizeof(float) / sizeof(half); nbd3 /= sizeof(float) / sizeof(half); @@ -8604,9 +8602,9 @@ static void ggml_cuda_mul_mat_mat_batched_cublas(const ggml_tensor * src0, const CUBLAS_CHECK( cublasGemmStridedBatchedEx(g_cublas_handles[g_main_device], CUBLAS_OP_T, CUBLAS_OP_N, ne01, ne11, ne10, - alpha, (const char *) src0_as_f16, CUDA_R_16F, nb01/sizeof(half), src0->nb[2]/sizeof(half), // strideA - (const char *) src1_as_f16.get(), CUDA_R_16F, nb11/sizeof(float), src1->nb[2]/sizeof(float), // strideB - beta, ( char *) dst_t, cu_data_type, ne01, dst->nb[2]/sizeof(float), // strideC + alpha, (const char *) src0_f16, CUDA_R_16F, nb01/nb00, nb02/nb00, // strideA + (const char *) src1_f16, CUDA_R_16F, nb11/nb10, nb12/nb10, // strideB + beta, ( char *) dst_t, cu_data_type, ne01, nb2/nb0, // strideC ne12*ne13, cu_compute_type, CUBLAS_GEMM_DEFAULT_TENSOR_OP)); @@ -8619,12 +8617,13 @@ static void ggml_cuda_mul_mat_mat_batched_cublas(const ggml_tensor * src0, const dim3 block_dims(ne13, ne12); k_compute_batched_ptrs<<<1, block_dims, 0, main_stream>>>( - src0_as_f16, src1_as_f16.get(), dst_t, + src0_f16, src1_f16, dst_t, ptrs_src.get(), ptrs_dst.get(), ne12, ne13, ne23, nb02, nb03, - nb12, nb13, + src1->type == GGML_TYPE_F16 ? nb12 : nb12/2, + src1->type == GGML_TYPE_F16 ? nb13 : nb13/2, nbd2, nbd3, r2, r3); CUDA_CHECK(cudaGetLastError()); @@ -8632,8 +8631,8 @@ static void ggml_cuda_mul_mat_mat_batched_cublas(const ggml_tensor * src0, const CUBLAS_CHECK( cublasGemmBatchedEx(g_cublas_handles[g_main_device], CUBLAS_OP_T, CUBLAS_OP_N, ne01, ne11, ne10, - alpha, (const void **) (ptrs_src.get() + 0*ne23), CUDA_R_16F, nb01/sizeof(half), - (const void **) (ptrs_src.get() + 1*ne23), CUDA_R_16F, nb11/sizeof(float), + alpha, (const void **) (ptrs_src.get() + 0*ne23), CUDA_R_16F, nb01/nb00, + (const void **) (ptrs_src.get() + 1*ne23), CUDA_R_16F, nb11/nb10, beta, ( void **) (ptrs_dst.get() + 0*ne23), cu_data_type, ne01, ne23, cu_compute_type, @@ -8643,7 +8642,7 @@ static void ggml_cuda_mul_mat_mat_batched_cublas(const ggml_tensor * src0, const if (dst->op_params[0] == GGML_PREC_DEFAULT) { const to_fp32_cuda_t to_fp32_cuda = ggml_get_to_fp32_cuda(GGML_TYPE_F16); - to_fp32_cuda(dst_f16.get(), dst_ddf, ne, main_stream); + to_fp32_cuda(dst_f16.get(), dst_ddf, ne_dst, main_stream); } } @@ -8682,13 +8681,13 @@ static void ggml_cuda_mul_mat(const ggml_tensor * src0, const ggml_tensor * src1 } else if (!split && all_on_device && !use_tensor_cores && src0->type == GGML_TYPE_F16 && !ggml_is_contiguous(src0) && !ggml_is_transposed(src1) && src1->ne[1] == 1) { // KQV single-batch ggml_cuda_mul_mat_vec_nc(src0, src1, dst); - } else if (!split && all_on_device && use_tensor_cores && src0->type == GGML_TYPE_F16 && src1->type == GGML_TYPE_F32 && !ggml_is_transposed(src0) && !ggml_is_transposed(src1)) { + } else if (!split && all_on_device && use_tensor_cores && src0->type == GGML_TYPE_F16 && !ggml_is_transposed(src0) && !ggml_is_transposed(src1)) { // KQ + KQV multi-batch ggml_cuda_mul_mat_mat_batched_cublas(src0, src1, dst); } else if (src0->type == GGML_TYPE_F32) { ggml_cuda_op_mul_mat(src0, src1, dst, ggml_cuda_op_mul_mat_cublas, false); } else if (ggml_is_quantized(src0->type) || src0->type == GGML_TYPE_F16) { - if (src1->ne[1] == 1 && src0->ne[0] % GGML_CUDA_DMMV_X == 0) { + if (src1->ne[1] == 1 && src0->ne[0] % GGML_CUDA_DMMV_X == 0 && src1->type == GGML_TYPE_F32) { #ifdef GGML_CUDA_FORCE_DMMV const bool use_mul_mat_vec_q = false; #else diff --git a/ggml.c b/ggml.c index ed56e60a8..a9e1ea9b4 100644 --- a/ggml.c +++ b/ggml.c @@ -9687,7 +9687,7 @@ static void ggml_compute_forward_mul_mat( const size_t row_size = ggml_row_size(vec_dot_type, ne10); assert(params->wsize >= ne11*ne12*ne13*row_size); - assert(src1->type == GGML_TYPE_F32); + GGML_ASSERT(src1->type == GGML_TYPE_F32); for (int64_t i13 = 0; i13 < ne13; ++i13) { for (int64_t i12 = 0; i12 < ne12; ++i12) { diff --git a/tests/test-backend-ops.cpp b/tests/test-backend-ops.cpp index f3df8a8c6..b115299c0 100644 --- a/tests/test-backend-ops.cpp +++ b/tests/test-backend-ops.cpp @@ -350,13 +350,18 @@ struct test_case { fflush(stdout); // check if backends support op + bool supported = true; for (ggml_backend_t backend : {backend1, backend2}) { if (!ggml_backend_supports_op(backend, out)) { - printf("not supported\n"); - ggml_free(ctx); - return true; + printf("not supported [%s] ", ggml_backend_name(backend)); + supported = false; } } + if (!supported) { + printf("\n"); + ggml_free(ctx); + return true; + } // post-graph sentinel add_sentinel(ctx); @@ -1505,8 +1510,7 @@ static bool test_backend(ggml_backend_t backend, test_mode mode, const char * op } for (ggml_type type_a : all_types) { - for (ggml_type type_b : {GGML_TYPE_F32 /*, GGML_TYPE_F16 */}) { - // FIXME: CPU crashes on f16xf16 + for (ggml_type type_b : {GGML_TYPE_F32, GGML_TYPE_F16}) { test_cases.emplace_back(new test_mul_mat(type_a, type_b, 16, 1, 256, { 1, 1}, {1, 1})); test_cases.emplace_back(new test_mul_mat(type_a, type_b, 16, 1, 256, {10, 1}, {1, 1})); test_cases.emplace_back(new test_mul_mat(type_a, type_b, 16, 1, 256, {10, 1}, {2, 1})); From 38b3de4658292582a8941a2be5c77b40ce6ac0f2 Mon Sep 17 00:00:00 2001 From: Georgi Gerganov Date: Fri, 29 Dec 2023 14:56:41 +0200 Subject: [PATCH 158/811] sync : ggml --- scripts/sync-ggml.last | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/scripts/sync-ggml.last b/scripts/sync-ggml.last index 1ec144116..6ff2d5233 100644 --- a/scripts/sync-ggml.last +++ b/scripts/sync-ggml.last @@ -1 +1 @@ -76e7f47b69e8334384dc718480c496dafbd47999 +168c43edd1f85ebdecd4c79262cacb32b74eda68 From 441f51dca004debf8b275f1bdc08e0f1af7fd8f8 Mon Sep 17 00:00:00 2001 From: Tamotsu Takahashi Date: Fri, 29 Dec 2023 19:23:27 +0900 Subject: [PATCH 159/811] ci : build with CLBlast + ggml-opencl use GGML_API (whisper/1576) * Build with CLBlast * Declare GGML_API After rebasing, examples/talk-llama failed: "D:\a\whisper.cpp\whisper.cpp\build\ALL_BUILD.vcxproj" (build target) (1) -> "D:\a\whisper.cpp\whisper.cpp\build\examples\talk-llama\talk-llama.vcxproj" (default target) (14) -> (Link target) -> llama.obj : error LNK2019: unresolved external symbol ggml_cl_free_data referenced in function "public: __cdecl llama_model::~llama_model(void)" (??1llama_model@@QEAA@XZ) [D:\a\whisper.cpp\whisper.cpp\build\examples\talk-llama\talk-llama.vcxproj] llama.obj : error LNK2019: unresolved external symbol ggml_cl_transform_tensor referenced in function "public: void __cdecl llama_model_loader::load_all_data(struct ggml_context *,void (__cdecl*)(float,void *),void *,struct llama_mlock *)" (?load_all_data@llama_model_loader@@QEAAXPEAUggml_context@@P6AXMPEAX@Z1PEAUllama_mlock@@@Z) [D:\a\whisper.cpp\whisper.cpp\build\examples\talk-llama\talk-llama.vcxproj] D:\a\whisper.cpp\whisper.cpp\build\bin\Release\talk-llama.exe : fatal error LNK1120: 2 unresolved externals [D:\a\whisper.cpp\whisper.cpp\build\examples\talk-llama\talk-llama.vcxproj] --- ggml-opencl.h | 18 +++++++++--------- 1 file changed, 9 insertions(+), 9 deletions(-) diff --git a/ggml-opencl.h b/ggml-opencl.h index a92b445c9..44d05bd64 100644 --- a/ggml-opencl.h +++ b/ggml-opencl.h @@ -6,19 +6,19 @@ extern "C" { #endif -void ggml_cl_init(void); +GGML_API void ggml_cl_init(void); -void ggml_cl_mul(const struct ggml_tensor * src0, const struct ggml_tensor * src1, struct ggml_tensor * dst); -bool ggml_cl_can_mul_mat(const struct ggml_tensor * src0, const struct ggml_tensor * src1, struct ggml_tensor * dst); -size_t ggml_cl_mul_mat_get_wsize(const struct ggml_tensor * src0, const struct ggml_tensor * src1, struct ggml_tensor * dst); -void ggml_cl_mul_mat(const struct ggml_tensor * src0, const struct ggml_tensor * src1, struct ggml_tensor * dst, void * wdata, size_t wsize); +GGML_API void ggml_cl_mul(const struct ggml_tensor * src0, const struct ggml_tensor * src1, struct ggml_tensor * dst); +GGML_API bool ggml_cl_can_mul_mat(const struct ggml_tensor * src0, const struct ggml_tensor * src1, struct ggml_tensor * dst); +GGML_API size_t ggml_cl_mul_mat_get_wsize(const struct ggml_tensor * src0, const struct ggml_tensor * src1, struct ggml_tensor * dst); +GGML_API void ggml_cl_mul_mat(const struct ggml_tensor * src0, const struct ggml_tensor * src1, struct ggml_tensor * dst, void * wdata, size_t wsize); -void * ggml_cl_host_malloc(size_t size); -void ggml_cl_host_free(void * ptr); +GGML_API void * ggml_cl_host_malloc(size_t size); +GGML_API void ggml_cl_host_free(void * ptr); -void ggml_cl_free_data(const struct ggml_tensor* tensor); +GGML_API void ggml_cl_free_data(const struct ggml_tensor* tensor); -void ggml_cl_transform_tensor(void * data, struct ggml_tensor * tensor); +GGML_API void ggml_cl_transform_tensor(void * data, struct ggml_tensor * tensor); #ifdef __cplusplus } From c8255f8a6b2a3b3ebc6cb340cc2487f39fc95ffc Mon Sep 17 00:00:00 2001 From: Georgi Gerganov Date: Fri, 29 Dec 2023 15:12:35 +0200 Subject: [PATCH 160/811] scripts : print list of sync commits --- scripts/sync-ggml-am.sh | 1 + scripts/sync-ggml.last | 2 +- 2 files changed, 2 insertions(+), 1 deletion(-) diff --git a/scripts/sync-ggml-am.sh b/scripts/sync-ggml-am.sh index 93aad88a7..91478f177 100755 --- a/scripts/sync-ggml-am.sh +++ b/scripts/sync-ggml-am.sh @@ -26,6 +26,7 @@ echo "Syncing ggml changes since commit $lc" cd $SRC_GGML +git log --oneline $lc..HEAD git log --oneline $lc..HEAD | grep -v "(llama/[0-9]*)" | cut -d' ' -f1 > $SRC_LLAMA/ggml-commits if [ ! -s $SRC_LLAMA/ggml-commits ]; then diff --git a/scripts/sync-ggml.last b/scripts/sync-ggml.last index 6ff2d5233..5b6a440f7 100644 --- a/scripts/sync-ggml.last +++ b/scripts/sync-ggml.last @@ -1 +1 @@ -168c43edd1f85ebdecd4c79262cacb32b74eda68 +df098ea908764cba4a4889a1cbe7b026b2d31a14 From afd997ab6011dfefe9e917425b04ef4d83614841 Mon Sep 17 00:00:00 2001 From: Peter Sugihara Date: Fri, 29 Dec 2023 05:58:56 -0800 Subject: [PATCH 161/811] llama.swiftui : fix infinite loop, ouput timings, buff UI (#4674) * fix infinite loop * slight UI simplification, clearer UX * clearer UI text, add timings to completion log --- .../llama.cpp.swift/LibLlama.swift | 2 ++ .../llama.swiftui/Models/LlamaState.swift | 27 ++++++++++---- .../llama.swiftui/UI/ContentView.swift | 35 +++---------------- .../llama.swiftui/UI/DownloadButton.swift | 2 +- 4 files changed, 29 insertions(+), 37 deletions(-) diff --git a/examples/llama.swiftui/llama.cpp.swift/LibLlama.swift b/examples/llama.swiftui/llama.cpp.swift/LibLlama.swift index 464fb3277..66244382f 100644 --- a/examples/llama.swiftui/llama.cpp.swift/LibLlama.swift +++ b/examples/llama.swiftui/llama.cpp.swift/LibLlama.swift @@ -1,5 +1,7 @@ import Foundation +// To use this in your own project, add llama.cpp as a swift package dependency +// and uncomment this import line. // import llama enum LlamaError: Error { diff --git a/examples/llama.swiftui/llama.swiftui/Models/LlamaState.swift b/examples/llama.swiftui/llama.swiftui/Models/LlamaState.swift index 3393eb242..17cb5b9dd 100644 --- a/examples/llama.swiftui/llama.swiftui/Models/LlamaState.swift +++ b/examples/llama.swiftui/llama.swiftui/Models/LlamaState.swift @@ -4,6 +4,7 @@ import Foundation class LlamaState: ObservableObject { @Published var messageLog = "" @Published var cacheCleared = false + let NS_PER_S = 1_000_000_000.0 private var llamaContext: LlamaContext? private var defaultModelUrl: URL? { @@ -20,12 +21,12 @@ class LlamaState: ObservableObject { } func loadModel(modelUrl: URL?) throws { - messageLog += "Loading model...\n" if let modelUrl { + messageLog += "Loading model...\n" llamaContext = try LlamaContext.create_context(path: modelUrl.path()) messageLog += "Loaded model \(modelUrl.lastPathComponent)\n" } else { - messageLog += "Could not locate model\n" + messageLog += "Load a model from the list below\n" } } @@ -34,15 +35,29 @@ class LlamaState: ObservableObject { return } + let t_start = DispatchTime.now().uptimeNanoseconds await llamaContext.completion_init(text: text) + let t_heat_end = DispatchTime.now().uptimeNanoseconds + let t_heat = Double(t_heat_end - t_start) / NS_PER_S + messageLog += "\(text)" - while await llamaContext.n_cur <= llamaContext.n_len { + while await llamaContext.n_cur < llamaContext.n_len { let result = await llamaContext.completion_loop() messageLog += "\(result)" } + + let t_end = DispatchTime.now().uptimeNanoseconds + let t_generation = Double(t_end - t_heat_end) / NS_PER_S + let tokens_per_second = Double(await llamaContext.n_len) / t_generation + await llamaContext.clear() - messageLog += "\n\ndone\n" + messageLog += """ + \n + Done + Heat up took \(t_heat)s + Generated \(tokens_per_second) t/s\n + """ } func bench() async { @@ -56,10 +71,10 @@ class LlamaState: ObservableObject { messageLog += await llamaContext.model_info() + "\n" let t_start = DispatchTime.now().uptimeNanoseconds - await llamaContext.bench(pp: 8, tg: 4, pl: 1) // heat up + let _ = await llamaContext.bench(pp: 8, tg: 4, pl: 1) // heat up let t_end = DispatchTime.now().uptimeNanoseconds - let t_heat = Double(t_end - t_start) / 1_000_000_000.0 + let t_heat = Double(t_end - t_start) / NS_PER_S messageLog += "Heat up time: \(t_heat) seconds, please wait...\n" // if more than 5 seconds, then we're probably running on a slow device diff --git a/examples/llama.swiftui/llama.swiftui/UI/ContentView.swift b/examples/llama.swiftui/llama.swiftui/UI/ContentView.swift index c78f107b3..147e0c63b 100644 --- a/examples/llama.swiftui/llama.swiftui/UI/ContentView.swift +++ b/examples/llama.swiftui/llama.swiftui/UI/ContentView.swift @@ -42,46 +42,27 @@ struct ContentView: View { Button("Send") { sendText() } - .padding(8) - .background(Color.blue) - .foregroundColor(.white) - .cornerRadius(8) Button("Bench") { bench() } - .padding(8) - .background(Color.blue) - .foregroundColor(.white) - .cornerRadius(8) Button("Clear") { clear() } - .padding(8) - .background(Color.blue) - .foregroundColor(.white) - .cornerRadius(8) Button("Copy") { UIPasteboard.general.string = llamaState.messageLog } - .padding(8) - .background(Color.blue) - .foregroundColor(.white) - .cornerRadius(8) - } + }.buttonStyle(.bordered) - VStack { + VStack(alignment: .leading) { DownloadButton( llamaState: llamaState, modelName: "TinyLlama-1.1B (Q4_0, 0.6 GiB)", modelUrl: "https://huggingface.co/TheBloke/TinyLlama-1.1B-1T-OpenOrca-GGUF/resolve/main/tinyllama-1.1b-1t-openorca.Q4_0.gguf?download=true", filename: "tinyllama-1.1b-1t-openorca.Q4_0.gguf" ) - .font(.system(size: 12)) - .padding(.top, 4) - .frame(maxWidth: .infinity, alignment: .leading) DownloadButton( llamaState: llamaState, @@ -89,7 +70,6 @@ struct ContentView: View { modelUrl: "https://huggingface.co/TheBloke/TinyLlama-1.1B-1T-OpenOrca-GGUF/resolve/main/tinyllama-1.1b-1t-openorca.Q8_0.gguf?download=true", filename: "tinyllama-1.1b-1t-openorca.Q8_0.gguf" ) - .font(.system(size: 12)) DownloadButton( llamaState: llamaState, @@ -97,8 +77,6 @@ struct ContentView: View { modelUrl: "https://huggingface.co/ggml-org/models/resolve/main/tinyllama-1.1b/ggml-model-f16.gguf?download=true", filename: "tinyllama-1.1b-f16.gguf" ) - .font(.system(size: 12)) - .frame(maxWidth: .infinity, alignment: .leading) DownloadButton( llamaState: llamaState, @@ -106,7 +84,6 @@ struct ContentView: View { modelUrl: "https://huggingface.co/ggml-org/models/resolve/main/phi-2/ggml-model-q4_0.gguf?download=true", filename: "phi-2-q4_0.gguf" ) - .font(.system(size: 12)) DownloadButton( llamaState: llamaState, @@ -114,8 +91,6 @@ struct ContentView: View { modelUrl: "https://huggingface.co/ggml-org/models/resolve/main/phi-2/ggml-model-q8_0.gguf?download=true", filename: "phi-2-q8_0.gguf" ) - .font(.system(size: 12)) - .frame(maxWidth: .infinity, alignment: .leading) DownloadButton( llamaState: llamaState, @@ -123,15 +98,15 @@ struct ContentView: View { modelUrl: "https://huggingface.co/TheBloke/Mistral-7B-v0.1-GGUF/resolve/main/mistral-7b-v0.1.Q4_0.gguf?download=true", filename: "mistral-7b-v0.1.Q4_0.gguf" ) - .font(.system(size: 12)) Button("Clear downloaded models") { ContentView.cleanupModelCaches() llamaState.cacheCleared = true } - .padding(8) - .font(.system(size: 12)) } + .padding(.top, 4) + .font(.system(size: 12)) + .frame(maxWidth: .infinity, alignment: .leading) } .padding() } diff --git a/examples/llama.swiftui/llama.swiftui/UI/DownloadButton.swift b/examples/llama.swiftui/llama.swiftui/UI/DownloadButton.swift index 4bd75cb69..c9f322ca1 100644 --- a/examples/llama.swiftui/llama.swiftui/UI/DownloadButton.swift +++ b/examples/llama.swiftui/llama.swiftui/UI/DownloadButton.swift @@ -93,7 +93,7 @@ struct DownloadButton: View { print("Error: \(err.localizedDescription)") } }) { - Text("\(modelName) (Downloaded)") + Text("Load \(modelName)") } } else { Text("Unknown status") From 82d6eab224862a7044069fb9211dc4b29124264b Mon Sep 17 00:00:00 2001 From: andrijdavid Date: Fri, 29 Dec 2023 15:18:20 +0100 Subject: [PATCH 162/811] main-cmake-pkg : fix build issue (#4665) * Fix main-cmake-pkg compilation * Use glob to load common files * cmake : fix trailing whitespace --------- Co-authored-by: Georgi Gerganov --- examples/main-cmake-pkg/CMakeLists.txt | 27 ++++++-------------------- 1 file changed, 6 insertions(+), 21 deletions(-) diff --git a/examples/main-cmake-pkg/CMakeLists.txt b/examples/main-cmake-pkg/CMakeLists.txt index cb00edbbb..deb77d588 100644 --- a/examples/main-cmake-pkg/CMakeLists.txt +++ b/examples/main-cmake-pkg/CMakeLists.txt @@ -7,28 +7,13 @@ find_package(Llama 0.0.1 REQUIRED) # Bake common functionality in with target. Because applications # using the relocatable Llama package should be outside of the # source tree, main-cmake-pkg pretends the dependencies are built-in. - set(_common_path "${CMAKE_CURRENT_LIST_DIR}/../../common") -add_library(common OBJECT - ${_common_path}/common.h - ${_common_path}/common.cpp - ${_common_path}/console.h - ${_common_path}/console.cpp - ${_common_path}/grammar-parser.h - ${_common_path}/grammar-parser.cpp - ${_common_path}/sampling.h - ${_common_path}/sampling.cpp - ) - -# WARNING: because build-info.h is auto-generated, it will only -# be available after the user has built the llama.cpp sources. -# -configure_file(${_common_path}/../build-info.h - ${CMAKE_CURRENT_BINARY_DIR}/build-info.h - COPYONLY) - -target_include_directories(common PUBLIC ${LLAMA_INCLUDE_DIR} - ${CMAKE_CURRENT_BINARY_DIR}) +add_library(common OBJECT) +file(GLOB _common_files + "${_common_path}/*.h" + "${_common_path}/*.cpp" +) +target_sources(common PRIVATE ${_common_files}) # If the common project was part of "main-cmake-pkg" the transient # defines would automatically be attached. Because the common func- From b93edd22f55d3e5268263c3edcdae1818505c078 Mon Sep 17 00:00:00 2001 From: Karthik Sethuraman Date: Fri, 29 Dec 2023 06:22:10 -0800 Subject: [PATCH 163/811] server : allow to generate multimodal embeddings (#4681) --- examples/server/README.md | 4 +++- examples/server/server.cpp | 12 +++++++++++- 2 files changed, 14 insertions(+), 2 deletions(-) diff --git a/examples/server/README.md b/examples/server/README.md index f1e586a1c..718a7e064 100644 --- a/examples/server/README.md +++ b/examples/server/README.md @@ -166,7 +166,7 @@ node index.js `n_probs`: If greater than 0, the response also contains the probabilities of top N tokens for each generated token (default: 0) - `image_data`: An array of objects to hold base64-encoded image `data` and its `id`s to be reference in `prompt`. You can determine the place of the image in the prompt as in the following: `USER:[img-12]Describe the image in detail.\nASSISTANT:` In this case, `[img-12]` will be replaced by the embeddings of the image id 12 in the following `image_data` array: `{..., "image_data": [{"data": "", "id": 12}]}`. Use `image_data` only with multimodal models, e.g., LLaVA. + `image_data`: An array of objects to hold base64-encoded image `data` and its `id`s to be reference in `prompt`. You can determine the place of the image in the prompt as in the following: `USER:[img-12]Describe the image in detail.\nASSISTANT:`. In this case, `[img-12]` will be replaced by the embeddings of the image with id `12` in the following `image_data` array: `{..., "image_data": [{"data": "", "id": 12}]}`. Use `image_data` only with multimodal models, e.g., LLaVA. *Result JSON:* @@ -224,6 +224,8 @@ node index.js `content`: Set the text to process. + `image_data`: An array of objects to hold base64-encoded image `data` and its `id`s to be reference in `content`. You can determine the place of the image in the content as in the following: `Image: [img-21].\nCaption: This is a picture of a house`. In this case, `[img-21]` will be replaced by the embeddings of the image with id `21` in the following `image_data` array: `{..., "image_data": [{"data": "", "id": 21}]}`. Use `image_data` only with multimodal models, e.g., LLaVA. + - **POST** `/infill`: For code infilling. Takes a prefix and a suffix and returns the predicted completion as stream. *Options:* diff --git a/examples/server/server.cpp b/examples/server/server.cpp index c5035e202..31b8cf33d 100644 --- a/examples/server/server.cpp +++ b/examples/server/server.cpp @@ -3077,7 +3077,17 @@ int main(int argc, char **argv) { prompt = ""; } - const int task_id = llama.request_completion({ {"prompt", prompt}, { "n_predict", 0} }, false, true, -1); + + json image_data; + if (body.count("image_data") != 0) { + image_data = body["image_data"]; + } + else + { + image_data = ""; + } + + const int task_id = llama.request_completion({ {"prompt", prompt}, { "n_predict", 0}, {"image_data", image_data} }, false, true, -1); task_result result = llama.next_result(task_id); return res.set_content(result.result_json.dump(), "application/json; charset=utf-8"); }); From 60f55e888c29cbd87c4238dd19e85d0eef87245d Mon Sep 17 00:00:00 2001 From: SakuraUmi Date: Fri, 29 Dec 2023 22:22:44 +0800 Subject: [PATCH 164/811] server : fix OpenAI server sampling w.r.t. penalty. (#4675) --- examples/server/server.cpp | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/examples/server/server.cpp b/examples/server/server.cpp index 31b8cf33d..035eb24ac 100644 --- a/examples/server/server.cpp +++ b/examples/server/server.cpp @@ -2461,7 +2461,7 @@ json oaicompat_completion_params_parse( llama_params["mirostat_eta"] = json_value(body, "mirostat_eta", default_sparams.mirostat_eta); llama_params["penalize_nl"] = json_value(body, "penalize_nl", default_sparams.penalize_nl); llama_params["typical_p"] = json_value(body, "typical_p", default_sparams.typical_p); - llama_params["repeat_last_n"] = json_value(body, "repeat_last_n", 0); + llama_params["repeat_last_n"] = json_value(body, "repeat_last_n", default_sparams.penalty_last_n); llama_params["ignore_eos"] = json_value(body, "ignore_eos", false); llama_params["tfs_z"] = json_value(body, "tfs_z", default_sparams.tfs_z); From db49ff8ed7f0bb201176703441cc02911b08ef2a Mon Sep 17 00:00:00 2001 From: Justine Tunney Date: Fri, 29 Dec 2023 06:24:12 -0800 Subject: [PATCH 165/811] server : replace sleep with condition variables (#4673) The server currently schedules tasks using a sleep(5ms) busy loop. This adds unnecessary latency since most sleep implementations do a round up to the system scheduling quantum (usually 10ms). Other libc sleep impls spin for smaller time intervals which results in the server's busy loop consuming all available cpu. Having the explicit notify() / wait() code also helps aid in the readability of the server code. See mozilla-Ocho/llamafile@711344b --- examples/server/server.cpp | 41 ++++++++++++++++++++++++-------------- 1 file changed, 26 insertions(+), 15 deletions(-) diff --git a/examples/server/server.cpp b/examples/server/server.cpp index 035eb24ac..0aada8e28 100644 --- a/examples/server/server.cpp +++ b/examples/server/server.cpp @@ -25,6 +25,7 @@ #include #include #include +#include #ifndef SERVER_VERBOSE #define SERVER_VERBOSE 1 @@ -541,7 +542,9 @@ struct llama_server_context std::vector queue_results; std::vector queue_multitasks; std::mutex mutex_tasks; // also guards id_gen, and queue_multitasks + std::condition_variable condition_tasks; std::mutex mutex_results; + std::condition_variable condition_results; ~llama_server_context() { @@ -1169,7 +1172,7 @@ struct llama_server_context void send_error(task_server& task, std::string error) { - std::lock_guard lock(mutex_results); + std::unique_lock lock(mutex_results); task_result res; res.id = task.id; res.multitask_id = task.multitask_id; @@ -1177,6 +1180,7 @@ struct llama_server_context res.error = true; res.result_json = { { "content", error } }; queue_results.push_back(res); + condition_results.notify_all(); } void add_multi_task(int id, std::vector& sub_ids) @@ -1186,6 +1190,7 @@ struct llama_server_context multi.id = id; std::copy(sub_ids.begin(), sub_ids.end(), std::inserter(multi.subtasks_remaining, multi.subtasks_remaining.end())); queue_multitasks.push_back(multi); + condition_tasks.notify_one(); } void update_multi_task(int multitask_id, int subtask_id, task_result& result) @@ -1197,6 +1202,7 @@ struct llama_server_context { multitask.subtasks_remaining.erase(subtask_id); multitask.results.push_back(result); + condition_tasks.notify_one(); } } } @@ -1244,7 +1250,7 @@ struct llama_server_context void send_partial_response(llama_client_slot &slot, completion_token_output tkn) { - std::lock_guard lock(mutex_results); + std::unique_lock lock(mutex_results); task_result res; res.id = slot.task_id; res.multitask_id = slot.multitask_id; @@ -1280,11 +1286,12 @@ struct llama_server_context } queue_results.push_back(res); + condition_results.notify_all(); } void send_final_response(llama_client_slot &slot) { - std::lock_guard lock(mutex_results); + std::unique_lock lock(mutex_results); task_result res; res.id = slot.task_id; res.multitask_id = slot.multitask_id; @@ -1340,11 +1347,12 @@ struct llama_server_context } queue_results.push_back(res); + condition_results.notify_all(); } void send_embedding(llama_client_slot &slot) { - std::lock_guard lock(mutex_results); + std::unique_lock lock(mutex_results); task_result res; res.id = slot.task_id; res.multitask_id = slot.multitask_id; @@ -1372,6 +1380,7 @@ struct llama_server_context }; } queue_results.push_back(res); + condition_results.notify_all(); } int request_completion(json data, bool infill, bool embedding, int multitask_id) @@ -1395,6 +1404,7 @@ struct llama_server_context // otherwise, it's a single-prompt task, we actually queue it queue_tasks.push_back(task); + condition_tasks.notify_one(); return task.id; } @@ -1402,13 +1412,10 @@ struct llama_server_context { while (true) { - std::this_thread::sleep_for(std::chrono::microseconds(5)); - std::lock_guard lock(mutex_results); - - if (queue_results.empty()) - { - continue; - } + std::unique_lock lock(mutex_results); + condition_results.wait(lock, [&]{ + return !queue_results.empty(); + }); for (int i = 0; i < (int) queue_results.size(); i++) { @@ -1504,12 +1511,13 @@ struct llama_server_context void request_cancel(int task_id) { - std::lock_guard lock(mutex_tasks); + std::unique_lock lock(mutex_tasks); task_server task; task.id = id_gen++; task.type = CANCEL_TASK; task.target_id = task_id; queue_tasks.push_back(task); + condition_tasks.notify_one(); } int split_multiprompt_task(task_server& multiprompt_task) @@ -1535,7 +1543,7 @@ struct llama_server_context void process_tasks() { - std::lock_guard lock(mutex_tasks); + std::unique_lock lock(mutex_tasks); while (!queue_tasks.empty()) { task_server task = queue_tasks.front(); @@ -1607,6 +1615,7 @@ struct llama_server_context std::lock_guard lock(mutex_results); queue_results.push_back(aggregate_result); + condition_results.notify_all(); queue_iterator = queue_multitasks.erase(queue_iterator); } @@ -1637,8 +1646,10 @@ struct llama_server_context LOG_TEE("all slots are idle and system prompt is empty, clear the KV cache\n"); kv_cache_clear(); } - // avoid 100% usage of cpu all time - std::this_thread::sleep_for(std::chrono::milliseconds(5)); + std::unique_lock lock(mutex_tasks); + condition_tasks.wait(lock, [&]{ + return !queue_tasks.empty(); + }); } for (llama_client_slot &slot : slots) From 4af4801566bc262a38fb77f51edf278ac323c2bd Mon Sep 17 00:00:00 2001 From: Justine Tunney Date: Fri, 29 Dec 2023 06:38:38 -0800 Subject: [PATCH 166/811] llava-cli : refactor to use sampling library (#4669) This change makes it possible to use flags like `--grammar` when using the `llava-cli` program. The rest is just code cleanup deleting a long standing TODO comment. This change also ensures that logging information is emitted to stderr which helps the `llava-cli` command be more friendly to shell scripts. See Mozilla-Ocho/llamafile@1cd334f --- examples/llava/llava-cli.cpp | 85 ++++++------------------------------ 1 file changed, 13 insertions(+), 72 deletions(-) diff --git a/examples/llava/llava-cli.cpp b/examples/llava/llava-cli.cpp index 31f8cd8e0..502b788b1 100644 --- a/examples/llava/llava-cli.cpp +++ b/examples/llava/llava-cli.cpp @@ -39,73 +39,11 @@ static bool eval_string(struct llama_context * ctx_llama, const char* str, int n return true; } -// TODO: use common/sampling.h -static llama_token sample_id(llama_context * ctx_llama, gpt_params & params) { - auto & sparams = params.sparams; - - // out of user input, sample next token - const float temp = sparams.temp; - const int32_t top_k = sparams.top_k <= 0 ? llama_n_vocab(llama_get_model(ctx_llama)) : sparams.top_k; - const float top_p = sparams.top_p; - const float tfs_z = sparams.tfs_z; - const float typical_p = sparams.typical_p; - // const int32_t repeat_last_n = sparams.repeat_last_n < 0 ? n_ctx : sparams.repeat_last_n; - // const float repeat_penalty = sparams.repeat_penalty; - // const float alpha_presence = sparams.presence_penalty; - // const float alpha_frequency = sparams.frequency_penalty; - const int mirostat = sparams.mirostat; - const float mirostat_tau = sparams.mirostat_tau; - const float mirostat_eta = sparams.mirostat_eta; - // const bool penalize_nl = sparams.penalize_nl; - - llama_token id = 0; - { - auto logits = llama_get_logits(ctx_llama); - auto n_vocab = llama_n_vocab(llama_get_model(ctx_llama)); - - // Apply params.logit_bias map - for (auto it = sparams.logit_bias.begin(); it != sparams.logit_bias.end(); it++) { - logits[it->first] += it->second; - } - - std::vector candidates; - candidates.reserve(n_vocab); - for (llama_token token_id = 0; token_id < n_vocab; token_id++) { - candidates.emplace_back(llama_token_data{token_id, logits[token_id], 0.0f}); - } - - llama_token_data_array candidates_p = { candidates.data(), candidates.size(), false }; - - if (temp <= 0) { - // Greedy sampling - id = llama_sample_token_greedy(ctx_llama, &candidates_p); - } else { - if (mirostat == 1) { - static float mirostat_mu = 2.0f * mirostat_tau; - const int mirostat_m = 100; - llama_sample_temp(ctx_llama, &candidates_p, temp); - id = llama_sample_token_mirostat(ctx_llama, &candidates_p, mirostat_tau, mirostat_eta, mirostat_m, &mirostat_mu); - } else if (mirostat == 2) { - static float mirostat_mu = 2.0f * mirostat_tau; - llama_sample_temp(ctx_llama, &candidates_p, temp); - id = llama_sample_token_mirostat_v2(ctx_llama, &candidates_p, mirostat_tau, mirostat_eta, &mirostat_mu); - } else { - // Temperature sampling - llama_sample_top_k(ctx_llama, &candidates_p, top_k, 1); - llama_sample_tail_free(ctx_llama, &candidates_p, tfs_z, 1); - llama_sample_typical(ctx_llama, &candidates_p, typical_p, 1); - llama_sample_top_p(ctx_llama, &candidates_p, top_p, 1); - llama_sample_temp(ctx_llama, &candidates_p, temp); - id = llama_sample_token(ctx_llama, &candidates_p); - } - } - } - - return id; -} - -static const char * sample(struct llama_context * ctx_llama, gpt_params & params, int * n_past) { - int id = sample_id(ctx_llama, params); +static const char * sample(struct llama_sampling_context * ctx_sampling, + struct llama_context * ctx_llama, + int * n_past) { + const llama_token id = llama_sampling_sample(ctx_sampling, ctx_llama, NULL); + llama_sampling_accept(ctx_sampling, ctx_llama, id, true); static std::string ret; if (id == llama_token_eos(llama_get_model(ctx_llama))) { ret = ""; @@ -174,8 +112,8 @@ struct llava_context { }; static void show_additional_info(int /*argc*/, char ** argv) { - printf("\n example usage: %s -m --mmproj --image [--temp 0.1] [-p \"describe the image in detail.\"]\n", argv[0]); - printf(" note: a lower temperature value like 0.1 is recommended for better quality.\n"); + fprintf(stderr, "\n example usage: %s -m --mmproj --image [--temp 0.1] [-p \"describe the image in detail.\"]\n", argv[0]); + fprintf(stderr, " note: a lower temperature value like 0.1 is recommended for better quality.\n"); } static struct llava_image_embed * load_image(llava_context * ctx_llava, gpt_params * params) { @@ -185,7 +123,7 @@ static struct llava_image_embed * load_image(llava_context * ctx_llava, gpt_para auto prompt = params->prompt; if (prompt_contains_image(prompt)) { if (!params->image.empty()) { - printf("using base64 encoded image instead of command line image path\n"); + fprintf(stderr, "using base64 encoded image instead of command line image path\n"); } embed = llava_image_embed_make_with_prompt_base64(ctx_llava->ctx_clip, params->n_threads, prompt); if (!embed) { @@ -217,16 +155,19 @@ static void process_prompt(struct llava_context * ctx_llava, struct llava_image_ // generate the response - printf("\n"); + fprintf(stderr, "\n"); + + struct llama_sampling_context * ctx_sampling = llama_sampling_init(params->sparams); for (int i = 0; i < max_tgt_len; i++) { - const char * tmp = sample(ctx_llava->ctx_llama, *params, &n_past); + const char * tmp = sample(ctx_sampling, ctx_llava->ctx_llama, &n_past); if (strcmp(tmp, "") == 0) break; printf("%s", tmp); fflush(stdout); } + llama_sampling_free(ctx_sampling); printf("\n"); } From 97bbca6e8522d18041fcde6c3d0907a52ce36446 Mon Sep 17 00:00:00 2001 From: Cuong Trinh Manh Date: Fri, 29 Dec 2023 21:39:15 +0700 Subject: [PATCH 167/811] cmake : fix ld warning duplicate libraries libllama.a (#4671) * fix "ld: warning: ignoring duplicate libraries: '../libllama.a'" * fix warning in example. --- common/CMakeLists.txt | 2 +- examples/llava/CMakeLists.txt | 2 +- examples/server/CMakeLists.txt | 2 +- tests/CMakeLists.txt | 4 ++-- 4 files changed, 5 insertions(+), 5 deletions(-) diff --git a/common/CMakeLists.txt b/common/CMakeLists.txt index b5d5453d2..f79acfef1 100644 --- a/common/CMakeLists.txt +++ b/common/CMakeLists.txt @@ -65,4 +65,4 @@ endif() target_include_directories(${TARGET} PUBLIC .) target_compile_features(${TARGET} PUBLIC cxx_std_11) -target_link_libraries(${TARGET} PRIVATE llama build_info) +target_link_libraries(${TARGET} PRIVATE build_info PUBLIC llama) diff --git a/examples/llava/CMakeLists.txt b/examples/llava/CMakeLists.txt index 8ea3e5c83..48dae1506 100644 --- a/examples/llava/CMakeLists.txt +++ b/examples/llava/CMakeLists.txt @@ -32,5 +32,5 @@ endif() set(TARGET llava-cli) add_executable(llava-cli llava-cli.cpp) install(TARGETS llava-cli RUNTIME) -target_link_libraries(llava-cli PRIVATE common llama llava ${CMAKE_THREAD_LIBS_INIT}) +target_link_libraries(llava-cli PRIVATE common llava ${CMAKE_THREAD_LIBS_INIT}) target_compile_features(llava PRIVATE cxx_std_11) diff --git a/examples/server/CMakeLists.txt b/examples/server/CMakeLists.txt index 859cd12c6..81709e448 100644 --- a/examples/server/CMakeLists.txt +++ b/examples/server/CMakeLists.txt @@ -6,7 +6,7 @@ install(TARGETS ${TARGET} RUNTIME) target_compile_definitions(${TARGET} PRIVATE SERVER_VERBOSE=$ ) -target_link_libraries(${TARGET} PRIVATE common llama llava ${CMAKE_THREAD_LIBS_INIT}) +target_link_libraries(${TARGET} PRIVATE common llava ${CMAKE_THREAD_LIBS_INIT}) if (WIN32) TARGET_LINK_LIBRARIES(${TARGET} PRIVATE ws2_32) endif() diff --git a/tests/CMakeLists.txt b/tests/CMakeLists.txt index 9b5e69d13..7c932240d 100644 --- a/tests/CMakeLists.txt +++ b/tests/CMakeLists.txt @@ -2,7 +2,7 @@ function(llama_build_executable source) get_filename_component(TEST_TARGET ${source} NAME_WE) add_executable(${TEST_TARGET} ${source}) install(TARGETS ${TEST_TARGET} RUNTIME) - target_link_libraries(${TEST_TARGET} PRIVATE llama common) + target_link_libraries(${TEST_TARGET} PRIVATE common) endfunction() function(llama_test_executable name source) @@ -14,7 +14,7 @@ function(llama_build_and_test_executable source) get_filename_component(TEST_TARGET ${source} NAME_WE) add_executable(${TEST_TARGET} ${source}) install(TARGETS ${TEST_TARGET} RUNTIME) - target_link_libraries(${TEST_TARGET} PRIVATE llama common) + target_link_libraries(${TEST_TARGET} PRIVATE common) add_test(NAME ${TEST_TARGET} COMMAND $ ${ARGN}) endfunction() From 68eccbdc5b56f2a2450f9a8463f9934388cafabf Mon Sep 17 00:00:00 2001 From: Philip Taron Date: Fri, 29 Dec 2023 06:42:26 -0800 Subject: [PATCH 168/811] flake.nix : rewrite (#4605) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * flake.lock: update to hotfix CUDA::cuda_driver Required to support https://github.com/ggerganov/llama.cpp/pull/4606 * flake.nix: rewrite 1. Split into separate files per output. 2. Added overlays, so that this flake can be integrated into others. The names in the overlay are `llama-cpp`, `llama-cpp-opencl`, `llama-cpp-cuda`, and `llama-cpp-rocm` so that they fit into the broader set of Nix packages from [nixpkgs](https://github.com/nixos/nixpkgs). 3. Use [callPackage](https://summer.nixos.org/blog/callpackage-a-tool-for-the-lazy/) rather than `with pkgs;` so that there's dependency injection rather than dependency lookup. 4. Add a description and meta information for each package. The description includes a bit about what's trying to accelerate each one. 5. Use specific CUDA packages instead of cudatoolkit on the advice of SomeoneSerge. 6. Format with `serokell/nixfmt` for a consistent style. 7. Update `flake.lock` with the latest goods. * flake.nix: use finalPackage instead of passing it manually * nix: unclutter darwin support * nix: pass most darwin frameworks unconditionally ...for simplicity * *.nix: nixfmt nix shell github:piegamesde/nixfmt/rfc101-style --command \ nixfmt flake.nix .devops/nix/*.nix * flake.nix: add maintainers * nix: move meta down to follow Nixpkgs style more closely * nix: add missing meta attributes nix: clarify the interpretation of meta.maintainers nix: clarify the meaning of "broken" and "badPlatforms" nix: passthru: expose the use* flags for inspection E.g.: ``` ❯ nix eval .#cuda.useCuda true ``` * flake.nix: avoid re-evaluating nixpkgs too many times * flake.nix: use flake-parts * nix: migrate to pname+version * flake.nix: overlay: expose both the namespace and the default attribute * ci: add the (Nix) flakestry workflow * nix: cmakeFlags: explicit OFF bools * nix: cuda: reduce runtime closure * nix: fewer rebuilds * nix: respect config.cudaCapabilities * nix: add the impure driver's location to the DT_RUNPATHs * nix: clean sources more thoroughly ...this way outPaths change less frequently, and so there are fewer rebuilds * nix: explicit mpi support * nix: explicit jetson support * flake.nix: darwin: only expose the default --------- Co-authored-by: Someone Serge --- .devops/nix/apps.nix | 22 +++ .devops/nix/devshells.nix | 13 ++ .devops/nix/jetson-support.nix | 32 ++++ .devops/nix/nixpkgs-instances.nix | 35 ++++ .devops/nix/package.nix | 265 ++++++++++++++++++++++++++++ .devops/nix/scope.nix | 12 ++ .github/workflows/nix-flakestry.yml | 23 +++ flake.lock | 55 +++--- flake.nix | 226 ++++++++++-------------- 9 files changed, 524 insertions(+), 159 deletions(-) create mode 100644 .devops/nix/apps.nix create mode 100644 .devops/nix/devshells.nix create mode 100644 .devops/nix/jetson-support.nix create mode 100644 .devops/nix/nixpkgs-instances.nix create mode 100644 .devops/nix/package.nix create mode 100644 .devops/nix/scope.nix create mode 100644 .github/workflows/nix-flakestry.yml diff --git a/.devops/nix/apps.nix b/.devops/nix/apps.nix new file mode 100644 index 000000000..b8a12cc0a --- /dev/null +++ b/.devops/nix/apps.nix @@ -0,0 +1,22 @@ +{ + perSystem = + { config, lib, ... }: + { + apps = + let + inherit (config.packages) default; + binaries = [ + "llama" + "llama-embedding" + "llama-server" + "quantize" + "train-text-from-scratch" + ]; + mkApp = name: { + type = "app"; + program = "${default}/bin/${name}"; + }; + in + lib.genAttrs binaries mkApp; + }; +} diff --git a/.devops/nix/devshells.nix b/.devops/nix/devshells.nix new file mode 100644 index 000000000..1862f0f08 --- /dev/null +++ b/.devops/nix/devshells.nix @@ -0,0 +1,13 @@ +{ + perSystem = + { config, lib, ... }: + { + devShells = + lib.concatMapAttrs + (name: package: { + ${name} = package.passthru.shell; + ${name + "-extra"} = package.passthru.shell-extra; + }) + config.packages; + }; +} diff --git a/.devops/nix/jetson-support.nix b/.devops/nix/jetson-support.nix new file mode 100644 index 000000000..08426d2ab --- /dev/null +++ b/.devops/nix/jetson-support.nix @@ -0,0 +1,32 @@ +{ inputs, ... }: +{ + perSystem = + { + config, + system, + lib, + pkgsCuda, + ... + }: + lib.optionalAttrs (system == "aarch64-linux") { + packages = + let + caps.jetson-xavier = "7.2"; + caps.jetson-orin = "8.7"; + caps.jetson-nano = "5.3"; + + pkgsFor = + cap: + import inputs.nixpkgs { + inherit system; + config = { + cudaSupport = true; + cudaCapabilities = [ cap ]; + cudaEnableForwardCompat = false; + inherit (pkgsCuda.config) allowUnfreePredicate; + }; + }; + in + builtins.mapAttrs (name: cap: ((pkgsFor cap).callPackage ./scope.nix { }).llama-cpp) caps; + }; +} diff --git a/.devops/nix/nixpkgs-instances.nix b/.devops/nix/nixpkgs-instances.nix new file mode 100644 index 000000000..6e9872b28 --- /dev/null +++ b/.devops/nix/nixpkgs-instances.nix @@ -0,0 +1,35 @@ +{ inputs, ... }: +{ + # The _module.args definitions are passed on to modules as arguments. E.g. + # the module `{ pkgs ... }: { /* config */ }` implicitly uses + # `_module.args.pkgs` (defined in this case by flake-parts). + perSystem = + { system, ... }: + { + _module.args = { + pkgsCuda = import inputs.nixpkgs { + inherit system; + # Ensure dependencies use CUDA consistently (e.g. that openmpi, ucc, + # and ucx are built with CUDA support) + config.cudaSupport = true; + config.allowUnfreePredicate = + p: + builtins.all + ( + license: + license.free + || builtins.elem license.shortName [ + "CUDA EULA" + "cuDNN EULA" + ] + ) + (p.meta.licenses or [ p.meta.license ]); + }; + # Ensure dependencies use ROCm consistently + pkgsRocm = import inputs.nixpkgs { + inherit system; + config.rocmSupport = true; + }; + }; + }; +} diff --git a/.devops/nix/package.nix b/.devops/nix/package.nix new file mode 100644 index 000000000..5f2a7c9f4 --- /dev/null +++ b/.devops/nix/package.nix @@ -0,0 +1,265 @@ +{ + lib, + config, + stdenv, + mkShell, + cmake, + ninja, + pkg-config, + git, + python3, + mpi, + openblas, # TODO: Use the generic `blas` so users could switch betwen alternative implementations + cudaPackages, + darwin, + rocmPackages, + clblast, + useBlas ? builtins.all (x: !x) [ + useCuda + useMetalKit + useOpenCL + useRocm + ], + useCuda ? config.cudaSupport, + useMetalKit ? stdenv.isAarch64 && stdenv.isDarwin && !useOpenCL, + useMpi ? false, # Increases the runtime closure size by ~700M + useOpenCL ? false, + useRocm ? config.rocmSupport, + llamaVersion ? "0.0.0", # Arbitrary version, substituted by the flake +}@inputs: + +let + inherit (lib) + cmakeBool + cmakeFeature + optionals + strings + versionOlder + ; + + # It's necessary to consistently use backendStdenv when building with CUDA support, + # otherwise we get libstdc++ errors downstream. + stdenv = throw "Use effectiveStdenv instead"; + effectiveStdenv = if useCuda then cudaPackages.backendStdenv else inputs.stdenv; + + suffices = + lib.optionals useBlas [ "BLAS" ] + ++ lib.optionals useCuda [ "CUDA" ] + ++ lib.optionals useMetalKit [ "MetalKit" ] + ++ lib.optionals useMpi [ "MPI" ] + ++ lib.optionals useOpenCL [ "OpenCL" ] + ++ lib.optionals useRocm [ "ROCm" ]; + + pnameSuffix = + strings.optionalString (suffices != [ ]) + "-${strings.concatMapStringsSep "-" strings.toLower suffices}"; + descriptionSuffix = + strings.optionalString (suffices != [ ]) + ", accelerated with ${strings.concatStringsSep ", " suffices}"; + + # TODO: package the Python in this repository in a Nix-like way. + # It'd be nice to migrate to buildPythonPackage, as well as ensure this repo + # is PEP 517-compatible, and ensure the correct .dist-info is generated. + # https://peps.python.org/pep-0517/ + llama-python = python3.withPackages ( + ps: [ + ps.numpy + ps.sentencepiece + ] + ); + + # TODO(Green-Sky): find a better way to opt-into the heavy ml python runtime + llama-python-extra = python3.withPackages ( + ps: [ + ps.numpy + ps.sentencepiece + ps.torchWithoutCuda + ps.transformers + ] + ); + + # apple_sdk is supposed to choose sane defaults, no need to handle isAarch64 + # separately + darwinBuildInputs = + with darwin.apple_sdk.frameworks; + [ + Accelerate + CoreVideo + CoreGraphics + ] + ++ optionals useMetalKit [ MetalKit ]; + + cudaBuildInputs = with cudaPackages; [ + cuda_cccl.dev # + + # A temporary hack for reducing the closure size, remove once cudaPackages + # have stopped using lndir: https://github.com/NixOS/nixpkgs/issues/271792 + cuda_cudart.dev + cuda_cudart.lib + cuda_cudart.static + libcublas.dev + libcublas.lib + libcublas.static + ]; + + rocmBuildInputs = with rocmPackages; [ + clr + hipblas + rocblas + ]; +in + +effectiveStdenv.mkDerivation ( + finalAttrs: { + pname = "llama-cpp${pnameSuffix}"; + version = llamaVersion; + + src = lib.cleanSourceWith { + filter = + name: type: + !(builtins.any (_: _) [ + (lib.hasSuffix ".nix" name) # Ignore *.nix files when computing outPaths + (name == "README.md") # Ignore *.md changes whe computing outPaths + (lib.hasPrefix "." name) # Skip hidden files and directories + ]); + src = lib.cleanSource ../../.; + }; + + postPatch = '' + substituteInPlace ./ggml-metal.m \ + --replace '[bundle pathForResource:@"ggml-metal" ofType:@"metal"];' "@\"$out/bin/ggml-metal.metal\";" + + # TODO: Package up each Python script or service appropriately. + # If we were to migrate to buildPythonPackage and prepare the `pyproject.toml`, + # we could make those *.py into setuptools' entrypoints + substituteInPlace ./*.py --replace "/usr/bin/env python" "${llama-python}/bin/python" + ''; + + nativeBuildInputs = + [ + cmake + ninja + pkg-config + git + ] + ++ optionals useCuda [ + cudaPackages.cuda_nvcc + + # TODO: Replace with autoAddDriverRunpath + # once https://github.com/NixOS/nixpkgs/pull/275241 has been merged + cudaPackages.autoAddOpenGLRunpathHook + ]; + + buildInputs = + optionals effectiveStdenv.isDarwin darwinBuildInputs + ++ optionals useCuda cudaBuildInputs + ++ optionals useMpi [ mpi ] + ++ optionals useOpenCL [ clblast ] + ++ optionals useRocm rocmBuildInputs; + + cmakeFlags = + [ + (cmakeBool "LLAMA_NATIVE" true) + (cmakeBool "LLAMA_BUILD_SERVER" true) + (cmakeBool "BUILD_SHARED_LIBS" true) + (cmakeBool "CMAKE_SKIP_BUILD_RPATH" true) + (cmakeBool "LLAMA_BLAS" useBlas) + (cmakeBool "LLAMA_CLBLAST" useOpenCL) + (cmakeBool "LLAMA_CUBLAS" useCuda) + (cmakeBool "LLAMA_HIPBLAS" useRocm) + (cmakeBool "LLAMA_METAL" useMetalKit) + (cmakeBool "LLAMA_MPI" useMpi) + ] + ++ optionals useCuda [ + ( + with cudaPackages.flags; + cmakeFeature "CMAKE_CUDA_ARCHITECTURES" ( + builtins.concatStringsSep ";" (map dropDot cudaCapabilities) + ) + ) + ] + ++ optionals useRocm [ + (cmakeFeature "CMAKE_C_COMPILER" "hipcc") + (cmakeFeature "CMAKE_CXX_COMPILER" "hipcc") + + # Build all targets supported by rocBLAS. When updating search for TARGET_LIST_ROCM + # in https://github.com/ROCmSoftwarePlatform/rocBLAS/blob/develop/CMakeLists.txt + # and select the line that matches the current nixpkgs version of rocBLAS. + # Should likely use `rocmPackages.clr.gpuTargets`. + "-DAMDGPU_TARGETS=gfx803;gfx900;gfx906:xnack-;gfx908:xnack-;gfx90a:xnack+;gfx90a:xnack-;gfx940;gfx941;gfx942;gfx1010;gfx1012;gfx1030;gfx1100;gfx1101;gfx1102" + ] + ++ optionals useMetalKit [ (lib.cmakeFeature "CMAKE_C_FLAGS" "-D__ARM_FEATURE_DOTPROD=1") ] + ++ optionals useBlas [ (lib.cmakeFeature "LLAMA_BLAS_VENDOR" "OpenBLAS") ]; + + # TODO(SomeoneSerge): It's better to add proper install targets at the CMake level, + # if they haven't been added yet. + postInstall = '' + mv $out/bin/main $out/bin/llama + mv $out/bin/server $out/bin/llama-server + mkdir -p $out/include + cp $src/llama.h $out/include/ + ''; + + # Define the shells here, but don't add in the inputsFrom to avoid recursion. + passthru = { + inherit + useBlas + useCuda + useMetalKit + useMpi + useOpenCL + useRocm + ; + + shell = mkShell { + name = "shell-${finalAttrs.finalPackage.name}"; + description = "contains numpy and sentencepiece"; + buildInputs = [ llama-python ]; + inputsFrom = [ finalAttrs.finalPackage ]; + }; + + shell-extra = mkShell { + name = "shell-extra-${finalAttrs.finalPackage.name}"; + description = "contains numpy, sentencepiece, torchWithoutCuda, and transformers"; + buildInputs = [ llama-python-extra ]; + inputsFrom = [ finalAttrs.finalPackage ]; + }; + }; + + meta = { + # Configurations we don't want even the CI to evaluate. Results in the + # "unsupported platform" messages. This is mostly a no-op, because + # cudaPackages would've refused to evaluate anyway. + badPlatforms = optionals (useCuda || useOpenCL) lib.platforms.darwin; + + # Configurations that are known to result in build failures. Can be + # overridden by importing Nixpkgs with `allowBroken = true`. + broken = (useMetalKit && !effectiveStdenv.isDarwin); + + description = "Inference of LLaMA model in pure C/C++${descriptionSuffix}"; + homepage = "https://github.com/ggerganov/llama.cpp/"; + license = lib.licenses.mit; + + # Accommodates `nix run` and `lib.getExe` + mainProgram = "llama"; + + # These people might respond, on the best effort basis, if you ping them + # in case of Nix-specific regressions or for reviewing Nix-specific PRs. + # Consider adding yourself to this list if you want to ensure this flake + # stays maintained and you're willing to invest your time. Do not add + # other people without their consent. Consider removing people after + # they've been unreachable for long periods of time. + + # Note that lib.maintainers is defined in Nixpkgs, but you may just add + # an attrset following the same format as in + # https://github.com/NixOS/nixpkgs/blob/f36a80e54da29775c78d7eff0e628c2b4e34d1d7/maintainers/maintainer-list.nix + maintainers = with lib.maintainers; [ + philiptaron + SomeoneSerge + ]; + + # Extend `badPlatforms` instead + platforms = lib.platforms.all; + }; + } +) diff --git a/.devops/nix/scope.nix b/.devops/nix/scope.nix new file mode 100644 index 000000000..7932ac1e8 --- /dev/null +++ b/.devops/nix/scope.nix @@ -0,0 +1,12 @@ +{ + lib, + newScope, + llamaVersion ? "0.0.0", +}: + +lib.makeScope newScope ( + self: { + inherit llamaVersion; + llama-cpp = self.callPackage ./package.nix { }; + } +) diff --git a/.github/workflows/nix-flakestry.yml b/.github/workflows/nix-flakestry.yml new file mode 100644 index 000000000..3abfb3509 --- /dev/null +++ b/.github/workflows/nix-flakestry.yml @@ -0,0 +1,23 @@ +# Make the flake discoverable on https://flakestry.dev +name: "Publish a flake to flakestry" +on: + push: + tags: + - "v?[0-9]+.[0-9]+.[0-9]+" + - "v?[0-9]+.[0-9]+" + workflow_dispatch: + inputs: + tag: + description: "The existing tag to publish" + type: "string" + required: true +jobs: + publish-flake: + runs-on: ubuntu-latest + permissions: + id-token: "write" + contents: "read" + steps: + - uses: flakestry/flakestry-publish@main + with: + version: "${{ inputs.tag || github.ref_name }}" diff --git a/flake.lock b/flake.lock index 0455f6561..3fcd1f45d 100644 --- a/flake.lock +++ b/flake.lock @@ -1,30 +1,30 @@ { "nodes": { - "flake-utils": { + "flake-parts": { "inputs": { - "systems": "systems" + "nixpkgs-lib": "nixpkgs-lib" }, "locked": { - "lastModified": 1694529238, - "narHash": "sha256-zsNZZGTGnMOf9YpHKJqMSsa0dXbfmxeoJ7xHlrt+xmY=", - "owner": "numtide", - "repo": "flake-utils", - "rev": "ff7b65b44d01cf9ba6a71320833626af21126384", + "lastModified": 1701473968, + "narHash": "sha256-YcVE5emp1qQ8ieHUnxt1wCZCC3ZfAS+SRRWZ2TMda7E=", + "owner": "hercules-ci", + "repo": "flake-parts", + "rev": "34fed993f1674c8d06d58b37ce1e0fe5eebcb9f5", "type": "github" }, "original": { - "owner": "numtide", - "repo": "flake-utils", + "owner": "hercules-ci", + "repo": "flake-parts", "type": "github" } }, "nixpkgs": { "locked": { - "lastModified": 1698318101, - "narHash": "sha256-gUihHt3yPD7bVqg+k/UVHgngyaJ3DMEBchbymBMvK1E=", + "lastModified": 1703559957, + "narHash": "sha256-x9PUuMEPGUOMB51zNxrDr2QoHbYWlCS2xhFedm9MC5Q=", "owner": "NixOS", "repo": "nixpkgs", - "rev": "63678e9f3d3afecfeafa0acead6239cdb447574c", + "rev": "75dd68c36f458c6593c5bbb48abfd3e59bfed380", "type": "github" }, "original": { @@ -34,26 +34,29 @@ "type": "github" } }, - "root": { - "inputs": { - "flake-utils": "flake-utils", - "nixpkgs": "nixpkgs" - } - }, - "systems": { + "nixpkgs-lib": { "locked": { - "lastModified": 1681028828, - "narHash": "sha256-Vy1rq5AaRuLzOxct8nz4T6wlgyUR7zLU309k9mBC768=", - "owner": "nix-systems", - "repo": "default", - "rev": "da67096a3b9bf56a91d16901293e51ba5b49a27e", + "dir": "lib", + "lastModified": 1701253981, + "narHash": "sha256-ztaDIyZ7HrTAfEEUt9AtTDNoCYxUdSd6NrRHaYOIxtk=", + "owner": "NixOS", + "repo": "nixpkgs", + "rev": "e92039b55bcd58469325ded85d4f58dd5a4eaf58", "type": "github" }, "original": { - "owner": "nix-systems", - "repo": "default", + "dir": "lib", + "owner": "NixOS", + "ref": "nixos-unstable", + "repo": "nixpkgs", "type": "github" } + }, + "root": { + "inputs": { + "flake-parts": "flake-parts", + "nixpkgs": "nixpkgs" + } } }, "root": "root", diff --git a/flake.nix b/flake.nix index 4cf28d5c1..2209070aa 100644 --- a/flake.nix +++ b/flake.nix @@ -1,139 +1,99 @@ { + description = "Port of Facebook's LLaMA model in C/C++"; + inputs = { nixpkgs.url = "github:NixOS/nixpkgs/nixos-unstable"; - flake-utils.url = "github:numtide/flake-utils"; + flake-parts.url = "github:hercules-ci/flake-parts"; }; - outputs = { self, nixpkgs, flake-utils }: - flake-utils.lib.eachDefaultSystem (system: - let - name = "llama.cpp"; - src = ./.; - meta.mainProgram = "llama"; - inherit (pkgs.stdenv) isAarch32 isAarch64 isDarwin; - buildInputs = with pkgs; [ openmpi ]; - osSpecific = with pkgs; buildInputs ++ ( - if isAarch64 && isDarwin then - with pkgs.darwin.apple_sdk_11_0.frameworks; [ - Accelerate - MetalKit - ] - else if isAarch32 && isDarwin then - with pkgs.darwin.apple_sdk.frameworks; [ - Accelerate - CoreGraphics - CoreVideo - ] - else if isDarwin then - with pkgs.darwin.apple_sdk.frameworks; [ - Accelerate - CoreGraphics - CoreVideo - ] - else - with pkgs; [ openblas ] - ); - pkgs = import nixpkgs { inherit system; }; - nativeBuildInputs = with pkgs; [ cmake ninja pkg-config ]; - cudatoolkit_joined = with pkgs; symlinkJoin { - # HACK(Green-Sky): nix currently has issues with cmake findcudatoolkit - # see https://github.com/NixOS/nixpkgs/issues/224291 - # copied from jaxlib - name = "${cudaPackages.cudatoolkit.name}-merged"; - paths = [ - cudaPackages.cudatoolkit.lib - cudaPackages.cudatoolkit.out - ] ++ lib.optionals (lib.versionOlder cudaPackages.cudatoolkit.version "11") [ - # for some reason some of the required libs are in the targets/x86_64-linux - # directory; not sure why but this works around it - "${cudaPackages.cudatoolkit}/targets/${system}" - ]; - }; - llama-python = - pkgs.python3.withPackages (ps: with ps; [ numpy sentencepiece ]); - # TODO(Green-Sky): find a better way to opt-into the heavy ml python runtime - llama-python-extra = - pkgs.python3.withPackages (ps: with ps; [ numpy sentencepiece torchWithoutCuda transformers ]); - postPatch = '' - substituteInPlace ./ggml-metal.m \ - --replace '[bundle pathForResource:@"ggml-metal" ofType:@"metal"];' "@\"$out/bin/ggml-metal.metal\";" - substituteInPlace ./*.py --replace '/usr/bin/env python' '${llama-python}/bin/python' - ''; - postInstall = '' - mv $out/bin/main $out/bin/llama - mv $out/bin/server $out/bin/llama-server - mkdir -p $out/include - cp ${src}/llama.h $out/include/ - ''; - cmakeFlags = [ "-DLLAMA_NATIVE=OFF" "-DLLAMA_BUILD_SERVER=ON" "-DBUILD_SHARED_LIBS=ON" "-DCMAKE_SKIP_BUILD_RPATH=ON" ]; - in + + # For inspection, use `nix flake show github:ggerganov/llama.cpp` or the nix repl: + # + # ```bash + # ❯ nix repl + # nix-repl> :lf github:ggerganov/llama.cpp + # Added 13 variables. + # nix-repl> outputs.apps.x86_64-linux.quantize + # { program = "/nix/store/00000000000000000000000000000000-llama.cpp/bin/quantize"; type = "app"; } + # ``` + outputs = + { self, flake-parts, ... }@inputs: + let + # We could include the git revisions in the package names but those would + # needlessly trigger rebuilds: + # llamaVersion = self.dirtyShortRev or self.shortRev; + + # Nix already uses cryptographic hashes for versioning, so we'll just fix + # the fake semver for now: + llamaVersion = "0.0.0"; + in + flake-parts.lib.mkFlake { inherit inputs; } + { - packages.default = pkgs.stdenv.mkDerivation { - inherit name src meta postPatch nativeBuildInputs postInstall; - buildInputs = osSpecific; - cmakeFlags = cmakeFlags - ++ (if isAarch64 && isDarwin then [ - "-DCMAKE_C_FLAGS=-D__ARM_FEATURE_DOTPROD=1" - "-DLLAMA_METAL=ON" - ] else [ - "-DLLAMA_BLAS=ON" - "-DLLAMA_BLAS_VENDOR=OpenBLAS" - ]); - }; - packages.opencl = pkgs.stdenv.mkDerivation { - inherit name src meta postPatch nativeBuildInputs postInstall; - buildInputs = with pkgs; buildInputs ++ [ clblast ]; - cmakeFlags = cmakeFlags ++ [ - "-DLLAMA_CLBLAST=ON" - ]; - }; - packages.cuda = pkgs.stdenv.mkDerivation { - inherit name src meta postPatch nativeBuildInputs postInstall; - buildInputs = with pkgs; buildInputs ++ [ cudatoolkit_joined ]; - cmakeFlags = cmakeFlags ++ [ - "-DLLAMA_CUBLAS=ON" - ]; - }; - packages.rocm = pkgs.stdenv.mkDerivation { - inherit name src meta postPatch nativeBuildInputs postInstall; - buildInputs = with pkgs.rocmPackages; buildInputs ++ [ clr hipblas rocblas ]; - cmakeFlags = cmakeFlags ++ [ - "-DLLAMA_HIPBLAS=1" - "-DCMAKE_C_COMPILER=hipcc" - "-DCMAKE_CXX_COMPILER=hipcc" - # Build all targets supported by rocBLAS. When updating search for TARGET_LIST_ROCM - # in github.com/ROCmSoftwarePlatform/rocBLAS/blob/develop/CMakeLists.txt - # and select the line that matches the current nixpkgs version of rocBLAS. - "-DAMDGPU_TARGETS=gfx803;gfx900;gfx906:xnack-;gfx908:xnack-;gfx90a:xnack+;gfx90a:xnack-;gfx940;gfx941;gfx942;gfx1010;gfx1012;gfx1030;gfx1100;gfx1101;gfx1102" - ]; - }; - apps.llama-server = { - type = "app"; - program = "${self.packages.${system}.default}/bin/llama-server"; - }; - apps.llama-embedding = { - type = "app"; - program = "${self.packages.${system}.default}/bin/embedding"; - }; - apps.llama = { - type = "app"; - program = "${self.packages.${system}.default}/bin/llama"; - }; - apps.quantize = { - type = "app"; - program = "${self.packages.${system}.default}/bin/quantize"; - }; - apps.train-text-from-scratch = { - type = "app"; - program = "${self.packages.${system}.default}/bin/train-text-from-scratch"; - }; - apps.default = self.apps.${system}.llama; - devShells.default = pkgs.mkShell { - buildInputs = [ llama-python ]; - packages = nativeBuildInputs ++ osSpecific; - }; - devShells.extra = pkgs.mkShell { - buildInputs = [ llama-python-extra ]; - packages = nativeBuildInputs ++ osSpecific; - }; - }); + + imports = [ + .devops/nix/nixpkgs-instances.nix + .devops/nix/apps.nix + .devops/nix/devshells.nix + .devops/nix/jetson-support.nix + ]; + + # An overlay can be used to have a more granular control over llama-cpp's + # dependencies and configuration, than that offered by the `.override` + # mechanism. Cf. https://nixos.org/manual/nixpkgs/stable/#chap-overlays. + # + # E.g. in a flake: + # ``` + # { nixpkgs, llama-cpp, ... }: + # let pkgs = import nixpkgs { + # overlays = [ (llama-cpp.overlays.default) ]; + # system = "aarch64-linux"; + # config.allowUnfree = true; + # config.cudaSupport = true; + # config.cudaCapabilities = [ "7.2" ]; + # config.cudaEnableForwardCompat = false; + # }; in { + # packages.aarch64-linux.llamaJetsonXavier = pkgs.llamaPackages.llama-cpp; + # } + # ``` + # + # Cf. https://nixos.org/manual/nix/unstable/command-ref/new-cli/nix3-flake.html?highlight=flake#flake-format + flake.overlays.default = + (final: prev: { + llamaPackages = final.callPackage .devops/nix/scope.nix { inherit llamaVersion; }; + inherit (final.llamaPackages) llama-cpp; + }); + + systems = [ + "aarch64-darwin" + "aarch64-linux" + "x86_64-darwin" # x86_64-darwin isn't tested (and likely isn't relevant) + "x86_64-linux" + ]; + + perSystem = + { + config, + lib, + pkgs, + pkgsCuda, + pkgsRocm, + ... + }: + { + # We don't use the overlay here so as to avoid making too many instances of nixpkgs, + # cf. https://zimbatm.com/notes/1000-instances-of-nixpkgs + packages = + { + default = (pkgs.callPackage .devops/nix/scope.nix { inherit llamaVersion; }).llama-cpp; + } + // lib.optionalAttrs pkgs.stdenv.isLinux { + opencl = config.packages.default.override { useOpenCL = true; }; + cuda = (pkgsCuda.callPackage .devops/nix/scope.nix { inherit llamaVersion; }).llama-cpp; + rocm = (pkgsRocm.callPackage .devops/nix/scope.nix { inherit llamaVersion; }).llama-cpp; + + mpi-cpu = config.packages.default.override { useMpi = true; }; + mpi-cuda = config.packages.default.override { useMpi = true; }; + }; + }; + }; } From 04ac0607e913ab91234dfb240e12a76509e30982 Mon Sep 17 00:00:00 2001 From: crasm Date: Fri, 29 Dec 2023 09:50:29 -0500 Subject: [PATCH 169/811] python : add check-requirements.sh and GitHub workflow (#4585) * python: add check-requirements.sh and GitHub workflow This script and workflow forces package versions to remain compatible across all convert*.py scripts, while allowing secondary convert scripts to import dependencies not wanted in convert.py. * Move requirements into ./requirements * Fail on "==" being used for package requirements (but can be suppressed) * Enforce "compatible release" syntax instead of == * Update workflow * Add upper version bound for transformers and protobuf * improve check-requirements.sh * small syntax change * don't remove venvs if nocleanup is passed * See if this fixes docker workflow * Move check-requirements.sh into ./scripts/ --------- Co-authored-by: Jared Van Bortel --- .devops/full-cuda.Dockerfile | 3 +- .devops/full-rocm.Dockerfile | 3 +- .devops/full.Dockerfile | 3 +- .devops/main-rocm.Dockerfile | 3 +- .../workflows/python-check-requirements.yml | 29 +++ convert-hf-to-gguf.py | 95 +++++----- convert-lora-to-ggml.py | 147 +++++++-------- convert-persimmon-to-gguf.py | 1 + requirements-hf-to-gguf.txt | 3 - requirements.txt | 17 +- .../requirements-convert-hf-to-gguf.txt | 2 + ...equirements-convert-llama-ggml-to-gguf.txt | 1 + .../requirements-convert-lora-to-ggml.txt | 2 + ...requirements-convert-persimmon-to-gguf.txt | 2 + requirements/requirements-convert.txt | 5 + scripts/check-requirements.sh | 174 ++++++++++++++++++ 16 files changed, 360 insertions(+), 130 deletions(-) create mode 100644 .github/workflows/python-check-requirements.yml mode change 100644 => 100755 convert-persimmon-to-gguf.py delete mode 100644 requirements-hf-to-gguf.txt create mode 100644 requirements/requirements-convert-hf-to-gguf.txt create mode 100644 requirements/requirements-convert-llama-ggml-to-gguf.txt create mode 100644 requirements/requirements-convert-lora-to-ggml.txt create mode 100644 requirements/requirements-convert-persimmon-to-gguf.txt create mode 100644 requirements/requirements-convert.txt create mode 100755 scripts/check-requirements.sh diff --git a/.devops/full-cuda.Dockerfile b/.devops/full-cuda.Dockerfile index 360602d65..77a9ddc14 100644 --- a/.devops/full-cuda.Dockerfile +++ b/.devops/full-cuda.Dockerfile @@ -14,7 +14,8 @@ ARG CUDA_DOCKER_ARCH=all RUN apt-get update && \ apt-get install -y build-essential python3 python3-pip git -COPY requirements.txt requirements.txt +COPY requirements.txt requirements.txt +COPY requirements requirements RUN pip install --upgrade pip setuptools wheel \ && pip install -r requirements.txt diff --git a/.devops/full-rocm.Dockerfile b/.devops/full-rocm.Dockerfile index 6c521e9b4..8b9633dc4 100644 --- a/.devops/full-rocm.Dockerfile +++ b/.devops/full-rocm.Dockerfile @@ -23,7 +23,8 @@ ARG ROCM_DOCKER_ARCH=\ gfx1101 \ gfx1102 -COPY requirements.txt requirements.txt +COPY requirements.txt requirements.txt +COPY requirements requirements RUN pip install --upgrade pip setuptools wheel \ && pip install -r requirements.txt diff --git a/.devops/full.Dockerfile b/.devops/full.Dockerfile index 687628b35..cef1297d3 100644 --- a/.devops/full.Dockerfile +++ b/.devops/full.Dockerfile @@ -5,7 +5,8 @@ FROM ubuntu:$UBUNTU_VERSION as build RUN apt-get update && \ apt-get install -y build-essential python3 python3-pip git -COPY requirements.txt requirements.txt +COPY requirements.txt requirements.txt +COPY requirements requirements RUN pip install --upgrade pip setuptools wheel \ && pip install -r requirements.txt diff --git a/.devops/main-rocm.Dockerfile b/.devops/main-rocm.Dockerfile index 789deff6d..0a706dc73 100644 --- a/.devops/main-rocm.Dockerfile +++ b/.devops/main-rocm.Dockerfile @@ -23,7 +23,8 @@ ARG ROCM_DOCKER_ARCH=\ gfx1101 \ gfx1102 -COPY requirements.txt requirements.txt +COPY requirements.txt requirements.txt +COPY requirements requirements RUN pip install --upgrade pip setuptools wheel \ && pip install -r requirements.txt diff --git a/.github/workflows/python-check-requirements.yml b/.github/workflows/python-check-requirements.yml new file mode 100644 index 000000000..92e1108b3 --- /dev/null +++ b/.github/workflows/python-check-requirements.yml @@ -0,0 +1,29 @@ +name: Python check requirements.txt + +on: + push: + paths: + - 'scripts/check-requirements.sh' + - 'convert*.py' + - 'requirements.txt' + - 'requirements/*.txt' + pull_request: + paths: + - 'scripts/check-requirements.sh' + - 'convert*.py' + - 'requirements.txt' + - 'requirements/*.txt' + +jobs: + python-check-requirements: + runs-on: ubuntu-latest + name: check-requirements + steps: + - name: Check out source repository + uses: actions/checkout@v3 + - name: Set up Python environment + uses: actions/setup-python@v4 + with: + python-version: "3.11" + - name: Run check-requirements.sh script + run: bash scripts/check-requirements.sh nocleanup diff --git a/convert-hf-to-gguf.py b/convert-hf-to-gguf.py index 3557a825e..51724c0df 100755 --- a/convert-hf-to-gguf.py +++ b/convert-hf-to-gguf.py @@ -242,7 +242,7 @@ class Model: tokens: list[bytearray] = [] toktypes: list[int] = [] - from transformers import AutoTokenizer # type: ignore[attr-defined] + from transformers import AutoTokenizer tokenizer = AutoTokenizer.from_pretrained(dir_model) vocab_size = hparams.get("vocab_size", len(tokenizer.vocab)) assert max(tokenizer.vocab.values()) < vocab_size @@ -856,7 +856,7 @@ class StableLMModel(Model): hparams = self.hparams block_count = hparams["num_hidden_layers"] - self.gguf_writer.add_name(dir_model.name) + self.gguf_writer.add_name(self.dir_model.name) self.gguf_writer.add_context_length(hparams["max_position_embeddings"]) self.gguf_writer.add_embedding_length(hparams["hidden_size"]) self.gguf_writer.add_block_count(block_count) @@ -902,7 +902,7 @@ class QwenModel(Model): tokens: list[bytearray] = [] toktypes: list[int] = [] - from transformers import AutoTokenizer # type: ignore[attr-defined] + from transformers import AutoTokenizer tokenizer = AutoTokenizer.from_pretrained(dir_model, trust_remote_code=True) vocab_size = hparams["vocab_size"] assert max(tokenizer.get_vocab().values()) < vocab_size @@ -1185,57 +1185,62 @@ def parse_args() -> argparse.Namespace: return parser.parse_args() -args = parse_args() +def main() -> None: + args = parse_args() -dir_model = args.model + dir_model = args.model -if args.awq_path: - sys.path.insert(1, str(Path(__file__).parent / 'awq-py')) - from awq.apply_awq import add_scale_weights - tmp_model_path = args.model / "weighted_model" - dir_model = tmp_model_path - if tmp_model_path.is_dir(): - print(f"{tmp_model_path} exists as a weighted model.") + if args.awq_path: + sys.path.insert(1, str(Path(__file__).parent / 'awq-py')) + from awq.apply_awq import add_scale_weights + tmp_model_path = args.model / "weighted_model" + dir_model = tmp_model_path + if tmp_model_path.is_dir(): + print(f"{tmp_model_path} exists as a weighted model.") + else: + tmp_model_path.mkdir(parents=True, exist_ok=True) + print("Saving new weighted model ...") + add_scale_weights(str(args.model), str(args.awq_path), str(tmp_model_path)) + print(f"Saved weighted model at {tmp_model_path}.") + + if not dir_model.is_dir(): + print(f'Error: {args.model} is not a directory', file=sys.stderr) + sys.exit(1) + + ftype_map = { + "f32": gguf.GGMLQuantizationType.F32, + "f16": gguf.GGMLQuantizationType.F16, + } + + if args.outfile is not None: + fname_out = args.outfile else: - tmp_model_path.mkdir(parents=True, exist_ok=True) - print("Saving new weighted model ...") - add_scale_weights(str(args.model), str(args.awq_path), str(tmp_model_path)) - print(f"Saved weighted model at {tmp_model_path}.") + # output in the same directory as the model by default + fname_out = dir_model / f'ggml-model-{args.outtype}.gguf' -if not dir_model.is_dir(): - print(f'Error: {args.model} is not a directory', file=sys.stderr) - sys.exit(1) + print(f"Loading model: {dir_model.name}") -ftype_map = { - "f32": gguf.GGMLQuantizationType.F32, - "f16": gguf.GGMLQuantizationType.F16, -} + hparams = Model.load_hparams(dir_model) -if args.outfile is not None: - fname_out = args.outfile -else: - # output in the same directory as the model by default - fname_out = dir_model / f'ggml-model-{args.outtype}.gguf' + with torch.inference_mode(): + model_class = Model.from_model_architecture(hparams["architectures"][0]) + model_instance = model_class(dir_model, ftype_map[args.outtype], fname_out, args.bigendian) -print(f"Loading model: {dir_model.name}") + print("Set model parameters") + model_instance.set_gguf_parameters() -hparams = Model.load_hparams(dir_model) + print("Set model tokenizer") + model_instance.set_vocab() -with torch.inference_mode(): - model_class = Model.from_model_architecture(hparams["architectures"][0]) - model_instance = model_class(dir_model, ftype_map[args.outtype], fname_out, args.bigendian) + if args.vocab_only: + print(f"Exporting model vocab to '{fname_out}'") + model_instance.write_vocab() + else: + print(f"Exporting model to '{fname_out}'") + model_instance.write() - print("Set model parameters") - model_instance.set_gguf_parameters() + print(f"Model successfully exported to '{fname_out}'") - print("Set model tokenizer") - model_instance.set_vocab() - if args.vocab_only: - print(f"Exporting model vocab to '{fname_out}'") - model_instance.write_vocab() - else: - print(f"Exporting model to '{fname_out}'") - model_instance.write() - - print(f"Model successfully exported to '{fname_out}'") +if __name__ == '__main__': + main() diff --git a/convert-lora-to-ggml.py b/convert-lora-to-ggml.py index 53bb8a3d9..35ce152f4 100755 --- a/convert-lora-to-ggml.py +++ b/convert-lora-to-ggml.py @@ -47,95 +47,96 @@ def write_tensor_header(fout: BinaryIO, name: str, shape: Sequence[int], data_ty fout.seek((fout.tell() + 31) & -32) -if len(sys.argv) < 2: - print(f"Usage: python {sys.argv[0]} [arch]") - print( - "Path must contain HuggingFace PEFT LoRA files 'adapter_config.json' and 'adapter_model.bin'" - ) - print(f"Arch must be one of {list(gguf.MODEL_ARCH_NAMES.values())} (default: llama)") - sys.exit(1) +if __name__ == '__main__': + if len(sys.argv) < 2: + print(f"Usage: python {sys.argv[0]} [arch]") + print( + "Path must contain HuggingFace PEFT LoRA files 'adapter_config.json' and 'adapter_model.bin'" + ) + print(f"Arch must be one of {list(gguf.MODEL_ARCH_NAMES.values())} (default: llama)") + sys.exit(1) -input_json = os.path.join(sys.argv[1], "adapter_config.json") -input_model = os.path.join(sys.argv[1], "adapter_model.bin") -output_path = os.path.join(sys.argv[1], "ggml-adapter-model.bin") + input_json = os.path.join(sys.argv[1], "adapter_config.json") + input_model = os.path.join(sys.argv[1], "adapter_model.bin") + output_path = os.path.join(sys.argv[1], "ggml-adapter-model.bin") -model = torch.load(input_model, map_location="cpu") -arch_name = sys.argv[2] if len(sys.argv) == 3 else "llama" + model = torch.load(input_model, map_location="cpu") + arch_name = sys.argv[2] if len(sys.argv) == 3 else "llama" -if arch_name not in gguf.MODEL_ARCH_NAMES.values(): - print(f"Error: unsupported architecture {arch_name}") - sys.exit(1) + if arch_name not in gguf.MODEL_ARCH_NAMES.values(): + print(f"Error: unsupported architecture {arch_name}") + sys.exit(1) -arch = list(gguf.MODEL_ARCH_NAMES.keys())[list(gguf.MODEL_ARCH_NAMES.values()).index(arch_name)] -name_map = gguf.TensorNameMap(arch, 200) # 200 layers ought to be enough for anyone + arch = list(gguf.MODEL_ARCH_NAMES.keys())[list(gguf.MODEL_ARCH_NAMES.values()).index(arch_name)] + name_map = gguf.TensorNameMap(arch, 200) # 200 layers ought to be enough for anyone -with open(input_json, "r") as f: - params = json.load(f) + with open(input_json, "r") as f: + params = json.load(f) -if params["peft_type"] != "LORA": - print(f"Error: unsupported adapter type {params['peft_type']}, expected LORA") - sys.exit(1) + if params["peft_type"] != "LORA": + print(f"Error: unsupported adapter type {params['peft_type']}, expected LORA") + sys.exit(1) -if params["fan_in_fan_out"] is True: - print("Error: param fan_in_fan_out is not supported") - sys.exit(1) + if params["fan_in_fan_out"] is True: + print("Error: param fan_in_fan_out is not supported") + sys.exit(1) -if params["bias"] is not None and params["bias"] != "none": - print("Error: param bias is not supported") - sys.exit(1) + if params["bias"] is not None and params["bias"] != "none": + print("Error: param bias is not supported") + sys.exit(1) -# TODO: these seem to be layers that have been trained but without lora. -# doesn't seem widely used but eventually should be supported -if params["modules_to_save"] is not None and len(params["modules_to_save"]) > 0: - print("Error: param modules_to_save is not supported") - sys.exit(1) + # TODO: these seem to be layers that have been trained but without lora. + # doesn't seem widely used but eventually should be supported + if params["modules_to_save"] is not None and len(params["modules_to_save"]) > 0: + print("Error: param modules_to_save is not supported") + sys.exit(1) -with open(output_path, "wb") as fout: - fout.truncate() + with open(output_path, "wb") as fout: + fout.truncate() - write_file_header(fout, params) - for k, v in model.items(): - orig_k = k - if k.endswith(".default.weight"): - k = k.replace(".default.weight", ".weight") - if k in ["llama_proj.weight", "llama_proj.bias"]: - continue - if k.endswith("lora_A.weight"): - if v.dtype != torch.float16 and v.dtype != torch.float32: + write_file_header(fout, params) + for k, v in model.items(): + orig_k = k + if k.endswith(".default.weight"): + k = k.replace(".default.weight", ".weight") + if k in ["llama_proj.weight", "llama_proj.bias"]: + continue + if k.endswith("lora_A.weight"): + if v.dtype != torch.float16 and v.dtype != torch.float32: + v = v.float() + v = v.T + else: v = v.float() - v = v.T - else: - v = v.float() - t = v.detach().numpy() + t = v.detach().numpy() - prefix = "base_model.model." - if k.startswith(prefix): - k = k[len(prefix) :] + prefix = "base_model.model." + if k.startswith(prefix): + k = k[len(prefix) :] - lora_suffixes = (".lora_A.weight", ".lora_B.weight") - if k.endswith(lora_suffixes): - suffix = k[-len(lora_suffixes[0]):] - k = k[: -len(lora_suffixes[0])] - else: - print(f"Error: unrecognized tensor name {orig_k}") - sys.exit(1) + lora_suffixes = (".lora_A.weight", ".lora_B.weight") + if k.endswith(lora_suffixes): + suffix = k[-len(lora_suffixes[0]):] + k = k[: -len(lora_suffixes[0])] + else: + print(f"Error: unrecognized tensor name {orig_k}") + sys.exit(1) - tname = name_map.get_name(k) - if tname is None: - print(f"Error: could not map tensor name {orig_k}") - print(" Note: the arch parameter must be specified if the model is not llama") - sys.exit(1) + tname = name_map.get_name(k) + if tname is None: + print(f"Error: could not map tensor name {orig_k}") + print(" Note: the arch parameter must be specified if the model is not llama") + sys.exit(1) - if suffix == ".lora_A.weight": - tname += ".weight.loraA" - elif suffix == ".lora_B.weight": - tname += ".weight.loraB" - else: - assert False + if suffix == ".lora_A.weight": + tname += ".weight.loraA" + elif suffix == ".lora_B.weight": + tname += ".weight.loraB" + else: + assert False - print(f"{k} => {tname} {t.shape} {t.dtype} {t.nbytes/1024/1024:.2f}MB") - write_tensor_header(fout, tname, t.shape, t.dtype) - t.tofile(fout) + print(f"{k} => {tname} {t.shape} {t.dtype} {t.nbytes/1024/1024:.2f}MB") + write_tensor_header(fout, tname, t.shape, t.dtype) + t.tofile(fout) -print(f"Converted {input_json} and {input_model} to {output_path}") + print(f"Converted {input_json} and {input_model} to {output_path}") diff --git a/convert-persimmon-to-gguf.py b/convert-persimmon-to-gguf.py old mode 100644 new mode 100755 index 206b7d5ff..1ba5864dc --- a/convert-persimmon-to-gguf.py +++ b/convert-persimmon-to-gguf.py @@ -1,3 +1,4 @@ +#!/usr/bin/env python3 import torch import os from pprint import pprint diff --git a/requirements-hf-to-gguf.txt b/requirements-hf-to-gguf.txt deleted file mode 100644 index f4600539e..000000000 --- a/requirements-hf-to-gguf.txt +++ /dev/null @@ -1,3 +0,0 @@ --r requirements.txt -torch==2.1.1 -transformers==4.35.2 diff --git a/requirements.txt b/requirements.txt index 1a1162566..d36f74520 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,5 +1,12 @@ -numpy==1.24.4 -sentencepiece==0.1.98 -transformers>=4.34.0 -gguf>=0.1.0 -protobuf>=4.21.0 +# These requirements include all dependencies for all top-level python scripts +# for llama.cpp. Avoid adding packages here directly. +# +# Package versions must stay compatible across all top-level python scripts. +# + +-r ./requirements/requirements-convert.txt + +-r ./requirements/requirements-convert-hf-to-gguf.txt +-r ./requirements/requirements-convert-llama-ggml-to-gguf.txt +-r ./requirements/requirements-convert-lora-to-ggml.txt +-r ./requirements/requirements-convert-persimmon-to-gguf.txt diff --git a/requirements/requirements-convert-hf-to-gguf.txt b/requirements/requirements-convert-hf-to-gguf.txt new file mode 100644 index 000000000..6ac402610 --- /dev/null +++ b/requirements/requirements-convert-hf-to-gguf.txt @@ -0,0 +1,2 @@ +-r ./requirements-convert.txt +torch~=2.1.1 diff --git a/requirements/requirements-convert-llama-ggml-to-gguf.txt b/requirements/requirements-convert-llama-ggml-to-gguf.txt new file mode 100644 index 000000000..a0f37cd1c --- /dev/null +++ b/requirements/requirements-convert-llama-ggml-to-gguf.txt @@ -0,0 +1 @@ +-r ./requirements-convert.txt diff --git a/requirements/requirements-convert-lora-to-ggml.txt b/requirements/requirements-convert-lora-to-ggml.txt new file mode 100644 index 000000000..6ac402610 --- /dev/null +++ b/requirements/requirements-convert-lora-to-ggml.txt @@ -0,0 +1,2 @@ +-r ./requirements-convert.txt +torch~=2.1.1 diff --git a/requirements/requirements-convert-persimmon-to-gguf.txt b/requirements/requirements-convert-persimmon-to-gguf.txt new file mode 100644 index 000000000..6ac402610 --- /dev/null +++ b/requirements/requirements-convert-persimmon-to-gguf.txt @@ -0,0 +1,2 @@ +-r ./requirements-convert.txt +torch~=2.1.1 diff --git a/requirements/requirements-convert.txt b/requirements/requirements-convert.txt new file mode 100644 index 000000000..a3d6ecec0 --- /dev/null +++ b/requirements/requirements-convert.txt @@ -0,0 +1,5 @@ +numpy~=1.24.4 +sentencepiece~=0.1.98 +transformers>=4.35.2,<5.0.0 +gguf>=0.1.0 +protobuf>=4.21.0,<5.0.0 diff --git a/scripts/check-requirements.sh b/scripts/check-requirements.sh new file mode 100755 index 000000000..af7bab753 --- /dev/null +++ b/scripts/check-requirements.sh @@ -0,0 +1,174 @@ +#!/bin/bash +set -euo pipefail + +# +# check-requirements.sh checks all requirements files for each top-level +# convert*.py script. +# +# WARNING: This is quite IO intensive, because a fresh venv is set up for every +# python script. As of 2023-12-22, this writes ~2.7GB of data. An adequately +# sized tmpfs /tmp or ramdisk is recommended if running this frequently. +# +# usage: check-requirements.sh [] +# check-requirements.sh nocleanup [] +# +# where: +# - is a directory that can be used as the base for +# setting up the venvs. Defaults to `/tmp`. +# - 'nocleanup' as the first argument will disable automatic cleanup +# of the files created by this script. +# +# requires: +# - bash >= 3.2.57 +# - shellcheck +# +# For each script, it creates a fresh venv, `pip install`s the requirements, and +# finally imports the python script to check for `ImportError`. +# + +log() { + local level=$1 msg=$2 + printf >&2 '%s: %s\n' "$level" "$msg" +} + +debug() { + log DEBUG "$@" +} + +info() { + log INFO "$@" +} + +fatal() { + log FATAL "$@" + exit 1 +} + +cleanup() { + if [[ -n ${workdir+x} && -d $workdir && -w $workdir ]]; then + info "Removing $workdir" + local count=0 + rm -rfv -- "$workdir" | while read -r; do + if (( count++ > 750 )); then + printf . + count=0 + fi + done + printf '\n' + info "Removed $workdir" + fi +} + +do_cleanup=1 +if [[ ${1-} == nocleanup ]]; then + do_cleanup=0; shift +fi + +if (( do_cleanup )); then + trap exit INT TERM + trap cleanup EXIT +fi + +this=$(realpath -- "$0"); readonly this +cd "$(dirname "$this")/.." # PWD should stay in llama.cpp project directory + +shellcheck "$this" + +readonly reqs_dir=requirements + +if [[ ${1+x} ]]; then + tmp_dir=$(realpath -- "$1") + if [[ ! ( -d $tmp_dir && -w $tmp_dir ) ]]; then + fatal "$tmp_dir is not a writable directory" + fi +else + tmp_dir=/tmp +fi + +workdir=$(mktemp -d "$tmp_dir/check-requirements.XXXX"); readonly workdir +info "Working directory: $workdir" + +check_requirements() { + local reqs=$1 + + info "$reqs: beginning check" + pip --disable-pip-version-check install -qr "$reqs" + info "$reqs: OK" +} + +check_convert_script() { + local py=$1 # e.g. ./convert-hf-to-gguf.py + local pyname=${py##*/} # e.g. convert-hf-to-gguf.py + pyname=${pyname%.py} # e.g. convert-hf-to-gguf + + info "$py: beginning check" + + local reqs="$reqs_dir/requirements-$pyname.txt" + if [[ ! -r $reqs ]]; then + fatal "$py missing requirements. Expected: $reqs" + fi + + local venv="$workdir/$pyname-venv" + python3 -m venv "$venv" + + ( + # shellcheck source=/dev/null + source "$venv/bin/activate" + + check_requirements "$reqs" + + python - "$py" "$pyname" <<'EOF' +import sys +from importlib.machinery import SourceFileLoader +py, pyname = sys.argv[1:] +SourceFileLoader(pyname, py).load_module() +EOF + ) + + if (( do_cleanup )); then + rm -rf -- "$venv" + fi + + info "$py: imports OK" +} + +readonly ignore_eq_eq='check_requirements: ignore "=="' + +for req in "$reqs_dir"/*; do + # Check that all sub-requirements are added to top-level requirements.txt + if ! grep -qF "$req" requirements.txt; then + fatal "$req needs to be added to requirements.txt" + fi + + # Make sure exact release versions aren't being pinned in the requirements + # Filters out the ignore string + if grep -vF "$ignore_eq_eq" "$req" | grep -q '=='; then + tab=$'\t' + cat >&2 < Date: Sat, 30 Dec 2023 00:31:19 +0800 Subject: [PATCH 170/811] cuda: fix vmm oom issue on NVIDIA AGX Orin (#4687) Signed-off-by: hydai --- ggml-cuda.cu | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/ggml-cuda.cu b/ggml-cuda.cu index 9a9effcf5..09585b07d 100644 --- a/ggml-cuda.cu +++ b/ggml-cuda.cu @@ -6662,7 +6662,7 @@ static void ggml_cuda_pool_free_leg(int device, void * ptr, size_t size) { // pool with virtual memory static CUdeviceptr g_cuda_pool_addr[GGML_CUDA_MAX_DEVICES] = {0}; static size_t g_cuda_pool_used[GGML_CUDA_MAX_DEVICES] = {0}; -static const size_t CUDA_POOL_VMM_MAX_SIZE = 1ull << 36; // 64 GB +static const size_t CUDA_POOL_VMM_MAX_SIZE = 1ull << 35; // 32 GB static void * ggml_cuda_pool_malloc_vmm(int device, size_t size, size_t * actual_size) { scoped_spin_lock lock(g_cuda_pool_lock); From ce18d727a47f2473ca863a6f78bf3ad480008f72 Mon Sep 17 00:00:00 2001 From: Steward Garcia <57494570+FSSRepo@users.noreply.github.com> Date: Fri, 29 Dec 2023 11:52:15 -0500 Subject: [PATCH 171/811] clip : enable gpu backend (#4205) * clip: enable CUDA backend * add missing kernels * add enough padding for alignment * remove ggml_repeat of clip.cpp * add metal backend * llava : fixes - avoid ggml_repeat - use GGML_USE_ instead of CLIP_USE_ macros - remove unused vars --------- Co-authored-by: Georgi Gerganov --- examples/llava/CMakeLists.txt | 3 +- examples/llava/clip.cpp | 231 +++++++++++++++++++--------------- 2 files changed, 131 insertions(+), 103 deletions(-) diff --git a/examples/llava/CMakeLists.txt b/examples/llava/CMakeLists.txt index 48dae1506..2985caff8 100644 --- a/examples/llava/CMakeLists.txt +++ b/examples/llava/CMakeLists.txt @@ -24,7 +24,8 @@ endif() if (NOT MSVC) target_compile_options(llava PRIVATE -Wno-cast-qual) # stb_image.h - endif() +endif() + if(TARGET BUILD_INFO) add_dependencies(llava BUILD_INFO) endif() diff --git a/examples/llava/clip.cpp b/examples/llava/clip.cpp index f06ec400d..f9326a5cc 100644 --- a/examples/llava/clip.cpp +++ b/examples/llava/clip.cpp @@ -16,12 +16,19 @@ #include "clip.h" #include "ggml.h" #include "ggml-alloc.h" +#include "ggml-backend.h" + +#ifdef GGML_USE_CUBLAS +#include "ggml-cuda.h" +#endif + +#ifdef GGML_USE_METAL +#include "ggml-metal.h" +#endif #define STB_IMAGE_IMPLEMENTATION #include "stb_image.h" -#define CLIP_DEBUG - static std::string format(const char * fmt, ...) { va_list ap; va_list ap2; @@ -196,20 +203,6 @@ struct clip_vision_model { struct ggml_tensor * mm_2_b; }; -// Replacement for std::vector that doesn't require zero-initialization. -struct clip_buffer { - uint8_t * data = NULL; - size_t size = 0; - - void resize(size_t size) { - delete[] data; - data = new uint8_t[size]; - this->size = size; - } - - ~clip_buffer() { delete[] data; } -}; - struct clip_ctx { bool has_text_encoder = false; bool has_vision_encoder = false; @@ -223,9 +216,10 @@ struct clip_ctx { struct gguf_context * ctx_gguf; // memory buffers to evaluate the model - clip_buffer buf_compute; - clip_buffer buf_alloc; - ggml_allocr * alloc = NULL; + ggml_backend_buffer_t params_buffer = NULL; + ggml_backend_buffer_t compute_buffer = NULL; + ggml_backend_t backend = NULL; + ggml_allocr * compute_alloc = NULL; }; static ggml_cgraph * clip_image_build_graph(const clip_ctx * ctx, const clip_image_f32_batch * imgs) { @@ -252,25 +246,20 @@ static ggml_cgraph * clip_image_build_graph(const clip_ctx * ctx, const clip_ima if(ctx->has_llava_projector) { GGML_ASSERT(batch_size == 1); } - - const auto & buf_compute = ctx->buf_compute; - struct ggml_init_params params = { - /*.mem_size =*/ buf_compute.size, - /*.mem_buffer =*/ buf_compute.data, - /*.no_alloc =*/ false, + /*.mem_size =*/ GGML_DEFAULT_GRAPH_SIZE * ggml_tensor_overhead() + ggml_graph_overhead(), + /*.mem_buffer =*/ NULL, + /*.no_alloc =*/ true, }; - params.no_alloc = true; - struct ggml_context * ctx0 = ggml_init(params); struct ggml_cgraph * gf = ggml_new_graph(ctx0); struct ggml_tensor * inp_raw = ggml_new_tensor_4d(ctx0, GGML_TYPE_F32, image_size, image_size, 3, batch_size); - ggml_allocr_alloc(ctx->alloc, inp_raw); + ggml_allocr_alloc(ctx->compute_alloc, inp_raw); - if (!ggml_allocr_is_measure(ctx->alloc)) { - float * data = (float *)ggml_get_data(inp_raw); + if (!ggml_allocr_is_measure(ctx->compute_alloc)) { + float * data = (float *)malloc(ggml_nbytes(inp_raw)); for (size_t i = 0; i < imgs->size; i++) { const int nx = imgs->data[i].nx; @@ -289,6 +278,8 @@ static ggml_cgraph * clip_image_build_graph(const clip_ctx * ctx, const clip_ima } } } + ggml_backend_tensor_set(inp_raw, data, 0, ggml_nbytes(inp_raw)); + free(data); } struct ggml_tensor * inp = ggml_conv_2d(ctx0, model.patch_embeddings, inp_raw, patch_size, patch_size, 0, 0, 1, 1); @@ -298,36 +289,39 @@ static ggml_cgraph * clip_image_build_graph(const clip_ctx * ctx, const clip_ima // concat class_embeddings and patch_embeddings struct ggml_tensor * embeddings = ggml_new_tensor_3d(ctx0, GGML_TYPE_F32, hidden_size, num_positions, batch_size); - ggml_allocr_alloc(ctx->alloc, embeddings); - if (!ggml_allocr_is_measure(ctx->alloc)) { - ggml_set_zero(embeddings); + ggml_allocr_alloc(ctx->compute_alloc, embeddings); + if (!ggml_allocr_is_measure(ctx->compute_alloc)) { + void* zero_mem = malloc(ggml_nbytes(embeddings)); + memset(zero_mem, 0, ggml_nbytes(embeddings)); + ggml_backend_tensor_set(embeddings, zero_mem, 0, ggml_nbytes(embeddings)); + free(zero_mem); } - struct ggml_tensor * temp = ggml_new_tensor_3d(ctx0, GGML_TYPE_F32, hidden_size, 1, batch_size); - ggml_allocr_alloc(ctx->alloc, temp); + embeddings = ggml_acc(ctx0, embeddings, model.class_embedding, + embeddings->nb[1], embeddings->nb[2], embeddings->nb[3], 0); - embeddings = ggml_acc(ctx0, embeddings, ggml_repeat(ctx0, model.class_embedding, temp), embeddings->nb[1], - embeddings->nb[2], embeddings->nb[3], 0); - embeddings = - ggml_acc(ctx0, embeddings, inp, embeddings->nb[1], embeddings->nb[2], embeddings->nb[3], model.class_embedding->nb[1]); + embeddings = ggml_acc(ctx0, embeddings, inp, + embeddings->nb[1], embeddings->nb[2], embeddings->nb[3], model.class_embedding->nb[1]); struct ggml_tensor * positions = ggml_new_tensor_1d(ctx0, GGML_TYPE_I32, num_positions); - ggml_allocr_alloc(ctx->alloc, positions); - if (!ggml_allocr_is_measure(ctx->alloc)) { + ggml_allocr_alloc(ctx->compute_alloc, positions); + if (!ggml_allocr_is_measure(ctx->compute_alloc)) { + int* positions_data = (int*)malloc(ggml_nbytes(positions)); for (int i = 0; i < num_positions; i++) { - ggml_set_i32_1d(positions, i, i); + positions_data[i] = i; } + ggml_backend_tensor_set(positions, positions_data, 0, ggml_nbytes(positions)); + free(positions_data); } embeddings = - ggml_add(ctx0, embeddings, ggml_repeat(ctx0, ggml_get_rows(ctx0, model.position_embeddings, positions), embeddings)); + ggml_add(ctx0, embeddings, ggml_get_rows(ctx0, model.position_embeddings, positions)); // pre-layernorm { embeddings = ggml_norm(ctx0, embeddings, eps); - embeddings = ggml_add(ctx0, ggml_mul(ctx0, ggml_repeat(ctx0, model.pre_ln_w, embeddings), embeddings), - ggml_repeat(ctx0, model.pre_ln_b, embeddings)); + embeddings = ggml_add(ctx0, ggml_mul(ctx0, embeddings, model.pre_ln_w), model.pre_ln_b); } // loop over layers @@ -340,15 +334,15 @@ static ggml_cgraph * clip_image_build_graph(const clip_ctx * ctx, const clip_ima { cur = ggml_norm(ctx0, cur, eps); - cur = ggml_add(ctx0, ggml_mul(ctx0, ggml_repeat(ctx0, model.layers[il].ln_1_w, cur), cur), - ggml_repeat(ctx0, model.layers[il].ln_1_b, cur)); + cur = ggml_add(ctx0, ggml_mul(ctx0, cur, model.layers[il].ln_1_w), + model.layers[il].ln_1_b); } // self-attention { struct ggml_tensor * Q = - ggml_add(ctx0, ggml_repeat(ctx0, model.layers[il].q_b, cur), ggml_mul_mat(ctx0, model.layers[il].q_w, cur)); + ggml_add(ctx0, ggml_mul_mat(ctx0, model.layers[il].q_w, cur), model.layers[il].q_b); Q = ggml_scale_inplace(ctx0, Q, 1.0f / sqrt((float)d_head)); Q = ggml_reshape_4d(ctx0, Q, d_head, n_head, num_positions, batch_size); @@ -356,14 +350,14 @@ static ggml_cgraph * clip_image_build_graph(const clip_ctx * ctx, const clip_ima Q = ggml_reshape_3d(ctx0, Q, d_head, num_positions, n_head * batch_size); struct ggml_tensor * K = - ggml_add(ctx0, ggml_repeat(ctx0, model.layers[il].k_b, cur), ggml_mul_mat(ctx0, model.layers[il].k_w, cur)); + ggml_add(ctx0, ggml_mul_mat(ctx0, model.layers[il].k_w, cur), model.layers[il].k_b); K = ggml_reshape_4d(ctx0, K, d_head, n_head, num_positions, batch_size); K = ggml_cont(ctx0, ggml_permute(ctx0, K, 0, 2, 1, 3)); K = ggml_reshape_3d(ctx0, K, d_head, num_positions, n_head * batch_size); struct ggml_tensor * V = - ggml_add(ctx0, ggml_repeat(ctx0, model.layers[il].v_b, cur), ggml_mul_mat(ctx0, model.layers[il].v_w, cur)); + ggml_add(ctx0, ggml_mul_mat(ctx0, model.layers[il].v_w, cur), model.layers[il].v_b); V = ggml_reshape_4d(ctx0, V, d_head, n_head, num_positions, batch_size); V = ggml_cont(ctx0, ggml_permute(ctx0, V, 1, 2, 0, 3)); @@ -379,7 +373,7 @@ static ggml_cgraph * clip_image_build_graph(const clip_ctx * ctx, const clip_ima } // attention output - cur = ggml_add(ctx0, ggml_repeat(ctx0, model.layers[il].o_b, cur), ggml_mul_mat(ctx0, model.layers[il].o_w, cur)); + cur = ggml_add(ctx0, ggml_mul_mat(ctx0, model.layers[il].o_w, cur), model.layers[il].o_b); // re-add the layer input, e.g., residual cur = ggml_add(ctx0, cur, embeddings); @@ -390,12 +384,11 @@ static ggml_cgraph * clip_image_build_graph(const clip_ctx * ctx, const clip_ima { cur = ggml_norm(ctx0, cur, eps); - cur = ggml_add(ctx0, ggml_mul(ctx0, ggml_repeat(ctx0, model.layers[il].ln_2_w, cur), cur), - ggml_repeat(ctx0, model.layers[il].ln_2_b, cur)); + cur = ggml_add(ctx0, ggml_mul(ctx0, cur, model.layers[il].ln_2_w), model.layers[il].ln_2_b); } cur = ggml_mul_mat(ctx0, model.layers[il].ff_i_w, cur); - cur = ggml_add(ctx0, ggml_repeat(ctx0, model.layers[il].ff_i_b, cur), cur); + cur = ggml_add(ctx0, cur, model.layers[il].ff_i_b); if (ctx->use_gelu) { cur = ggml_gelu_inplace(ctx0, cur); @@ -404,7 +397,7 @@ static ggml_cgraph * clip_image_build_graph(const clip_ctx * ctx, const clip_ima } cur = ggml_mul_mat(ctx0, model.layers[il].ff_o_w, cur); - cur = ggml_add(ctx0, ggml_repeat(ctx0, model.layers[il].ff_o_b, cur), cur); + cur = ggml_add(ctx0, cur, model.layers[il].ff_o_b); // residual 2 cur = ggml_add(ctx0, embeddings, cur); @@ -417,23 +410,26 @@ static ggml_cgraph * clip_image_build_graph(const clip_ctx * ctx, const clip_ima embeddings = ggml_reshape_2d(ctx0, embeddings, embeddings->ne[0], embeddings->ne[1]); struct ggml_tensor * patches = ggml_new_tensor_1d(ctx0, GGML_TYPE_I32, num_patches); - ggml_allocr_alloc(ctx->alloc, patches); - if (!ggml_allocr_is_measure(ctx->alloc)) { - for (int i = 0; i < num_patches; ++i) { - ggml_set_i32_1d(patches, i, i+1); + ggml_allocr_alloc(ctx->compute_alloc, patches); + if (!ggml_allocr_is_measure(ctx->compute_alloc)) { + int* patches_data = (int*)malloc(ggml_nbytes(patches)); + for (int i = 0; i < num_positions; i++) { + patches_data[i] = i + 1; } + ggml_backend_tensor_set(patches, patches_data, 0, ggml_nbytes(patches)); + free(patches_data); } embeddings = ggml_get_rows(ctx0, embeddings, patches); // mm projection 0 embeddings = ggml_mul_mat(ctx0, model.mm_0_w, embeddings); - embeddings = ggml_add(ctx0, ggml_repeat(ctx0, model.mm_0_b, embeddings), embeddings); + embeddings = ggml_add(ctx0, embeddings, model.mm_0_b); embeddings = ggml_gelu(ctx0, embeddings); embeddings = ggml_mul_mat(ctx0, model.mm_2_w, embeddings); - embeddings = ggml_add(ctx0, ggml_repeat(ctx0, model.mm_2_b, embeddings), embeddings); + embeddings = ggml_add(ctx0, embeddings, model.mm_2_b); } // build the graph @@ -446,7 +442,6 @@ static ggml_cgraph * clip_image_build_graph(const clip_ctx * ctx, const clip_ima // read and create ggml_context containing the tensors and their data struct clip_ctx * clip_model_load(const char * fname, const int verbosity = 1) { - struct ggml_context * meta = NULL; struct gguf_init_params params = { @@ -479,7 +474,7 @@ struct clip_ctx * clip_model_load(const char * fname, const int verbosity = 1) { printf("%s: ftype: %s\n", __func__, ftype_str.c_str()); printf("\n"); } - + const int n_tensors = gguf_get_n_tensors(ctx); // kv if (verbosity >= 3) { const int n_kv = gguf_get_n_kv(ctx); @@ -493,27 +488,38 @@ struct clip_ctx * clip_model_load(const char * fname, const int verbosity = 1) { } // data - size_t ctx_size = 0; + size_t buffer_size = 0; { - const int n_tensors = gguf_get_n_tensors(ctx); - for (int i = 0; i < n_tensors; ++i) { const char * name = gguf_get_tensor_name(ctx, i); const size_t offset = gguf_get_tensor_offset(ctx, i); - struct ggml_tensor * cur = ggml_get_tensor(meta, name); - ctx_size += sizeof(struct ggml_tensor) + GGML_OBJECT_SIZE; size_t tensor_size = ggml_nbytes(cur); - size_t padded_size = ggml_nbytes_pad(cur); - ctx_size += padded_size; + buffer_size += tensor_size; if (verbosity >= 3) { - printf("%s: tensor[%d]: n_dims = %d, name = %s, tensor_size=%zu, padded_size=%zu, offset=%zu\n", __func__, i, - ggml_n_dims(cur), cur->name, tensor_size, padded_size, offset); + printf("%s: tensor[%d]: n_dims = %d, name = %s, tensor_size=%zu, offset=%zu\n", __func__, i, + ggml_n_dims(cur), cur->name, tensor_size, offset); } } } + buffer_size += n_tensors * 128 /* CLIP PADDING */; + clip_ctx * new_clip = new clip_ctx; +#ifdef GGML_USE_CUBLAS + new_clip->backend = ggml_backend_cuda_init(0); + printf("%s: CLIP using CUDA backend\n", __func__); +#endif + +#ifdef GGML_USE_METAL + new_clip->backend = ggml_backend_metal_init(); + printf("%s: CLIP using Metal backend\n", __func__); +#endif + + if (!new_clip->backend) { + new_clip->backend = ggml_backend_cpu_init(); + printf("%s: CLIP using CPU backend\n", __func__); + } // model size and capabilities { @@ -539,17 +545,20 @@ struct clip_ctx * clip_model_load(const char * fname, const int verbosity = 1) { printf("%s: text_encoder: %d\n", __func__, new_clip->has_text_encoder); printf("%s: vision_encoder: %d\n", __func__, new_clip->has_vision_encoder); printf("%s: llava_projector: %d\n", __func__, new_clip->has_llava_projector); - printf("%s: model size: %.2f MB\n", __func__, (ctx_size / 1024.0 / 1024.0)); + printf("%s: model size: %.2f MB\n", __func__, buffer_size / 1024.0 / 1024.0); printf("%s: metadata size: %.2f MB\n", __func__, ggml_get_mem_size(meta) / 1024.0 / 1024.0); } } + printf("%s: params backend buffer size = % 6.2f MB (%i tensors)\n", __func__, buffer_size / (1024.0 * 1024.0), n_tensors); + // load tensors { + std::vector read_buf; struct ggml_init_params params = { - /*.mem_size =*/ ctx_size, + /*.mem_size =*/ (n_tensors + 1) * ggml_tensor_overhead(), /*.mem_buffer =*/ NULL, - /*.no_alloc =*/ false, + /*.no_alloc =*/ true, }; new_clip->ctx = ggml_init(params); @@ -566,13 +575,21 @@ struct clip_ctx * clip_model_load(const char * fname, const int verbosity = 1) { return nullptr; } - const int n_tensors = gguf_get_n_tensors(ctx); + // add tensors to context for (int i = 0; i < n_tensors; ++i) { const char * name = gguf_get_tensor_name(ctx, i); struct ggml_tensor * t = ggml_get_tensor(meta, name); struct ggml_tensor * cur = ggml_dup_tensor(new_clip->ctx, t); ggml_set_name(cur, name); + } + // alloc memory and offload data + new_clip->params_buffer = ggml_backend_alloc_buffer(new_clip->backend, buffer_size); + ggml_allocr* alloc = ggml_allocr_new_from_buffer(new_clip->params_buffer); + for (int i = 0; i < n_tensors; ++i) { + const char * name = gguf_get_tensor_name(ctx, i); + struct ggml_tensor * cur = ggml_get_tensor(new_clip->ctx, name); + ggml_allocr_alloc(alloc, cur); const size_t offset = gguf_get_data_offset(ctx) + gguf_get_tensor_offset(ctx, i); fin.seekg(offset, std::ios::beg); if (!fin) { @@ -580,10 +597,22 @@ struct clip_ctx * clip_model_load(const char * fname, const int verbosity = 1) { clip_free(new_clip); return nullptr; } - - fin.read(reinterpret_cast(cur->data), ggml_nbytes(t)); + int num_bytes = ggml_nbytes(cur); + if (ggml_backend_is_cpu(new_clip->backend) +#ifdef GGML_USE_METAL + || ggml_backend_is_metal(new_clip->backend) +#endif + ) { + // for the CPU and Metal backend, we can read directly into the tensor + fin.read(reinterpret_cast(cur->data), num_bytes); + } else { + // read into a temporary buffer first, then copy to device memory + read_buf.resize(num_bytes); + fin.read(reinterpret_cast(read_buf.data()), num_bytes); + ggml_backend_tensor_set(cur, read_buf.data(), 0, num_bytes); + } } - + ggml_allocr_free(alloc); fin.close(); } @@ -657,18 +686,16 @@ struct clip_ctx * clip_model_load(const char * fname, const int verbosity = 1) { // measure mem requirement and allocate { - static const size_t tensor_alignment = 32; - new_clip->buf_compute.resize(ggml_tensor_overhead()*GGML_DEFAULT_GRAPH_SIZE + ggml_graph_overhead()); - new_clip->alloc = ggml_allocr_new_measure(tensor_alignment); + new_clip->compute_alloc = ggml_allocr_new_measure_from_backend(new_clip->backend); clip_image_f32_batch batch; batch.size = 1; ggml_cgraph * gf = clip_image_build_graph(new_clip, &batch); - size_t alloc_size = ggml_allocr_alloc_graph(new_clip->alloc, gf) + tensor_alignment; - ggml_allocr_free(new_clip->alloc); - new_clip->buf_alloc.resize(alloc_size); - new_clip->alloc = ggml_allocr_new(new_clip->buf_alloc.data, new_clip->buf_alloc.size, tensor_alignment); + size_t compute_memory_buffer_size = ggml_allocr_alloc_graph(new_clip->compute_alloc, gf); + ggml_allocr_free(new_clip->compute_alloc); + new_clip->compute_buffer = ggml_backend_alloc_buffer(new_clip->backend, compute_memory_buffer_size); + new_clip->compute_alloc = ggml_allocr_new_from_buffer(new_clip->compute_buffer); - printf("%s: total allocated memory: %.2f MB\n", __func__, (new_clip->buf_compute.size + alloc_size)/1024.0/1024.0); + printf("%s: compute allocated memory: %.2f MB\n", __func__, compute_memory_buffer_size /1024.0/1024.0); } return new_clip; @@ -852,29 +879,29 @@ bool clip_image_batch_encode(const clip_ctx * ctx, const int n_threads, const cl } // reset alloc buffer to clean the memory from previous invocations - ggml_allocr_reset(ctx->alloc); + ggml_allocr_reset(ctx->compute_alloc); // build the inference graph ggml_cgraph * gf = clip_image_build_graph(ctx, imgs); - ggml_allocr_alloc_graph(ctx->alloc, gf); + ggml_allocr_alloc_graph(ctx->compute_alloc, gf); - struct ggml_cplan plan = ggml_graph_plan(gf, n_threads); - if (plan.work_size > 0) { - plan.work_data = (uint8_t *)malloc(plan.work_size); + if (ggml_backend_is_cpu(ctx->backend)) { + ggml_backend_cpu_set_n_threads(ctx->backend, n_threads); } - ggml_graph_compute(gf, &plan); +#ifdef GGML_USE_METAL + if (ggml_backend_is_metal(ctx->backend)) { + ggml_backend_metal_set_n_cb(ctx->backend, n_threads); + } +#endif + + ggml_backend_graph_compute(ctx->backend, gf); // the last node is the embedding tensor -struct ggml_tensor * embeddings = gf->nodes[gf->n_nodes - 1]; + struct ggml_tensor * embeddings = gf->nodes[gf->n_nodes - 1]; // copy the embeddings to the location passed by the user - memcpy(vec, ggml_get_data_f32(embeddings), ggml_nbytes(embeddings)); - - if (plan.work_size > 0) { - free(plan.work_data); - } - + ggml_backend_tensor_get(embeddings, vec, 0, ggml_nbytes(embeddings)); return true; } @@ -1045,8 +1072,8 @@ bool clip_model_quantize(const char * fname_inp, const char * fname_out, const i gguf_free(ctx_out); { - printf("%s: original size = %8.2f MB\n", __func__, total_size_org / 1024.0 / 1024.0); - printf("%s: quantized size = %8.2f MB\n", __func__, total_size_new / 1024.0 / 1024.0); + printf("%s: original size = %8.2f MB\n", __func__, total_size_org / 1024.0 / 1024.0); + printf("%s: quantized size = %8.2f MB\n", __func__, total_size_new / 1024.0 / 1024.0); int64_t sum_all = 0; for (size_t i = 0; i < hist_all.size(); ++i) { From 0235b9b571f3cc7d2b8836409a5404b41ce1379c Mon Sep 17 00:00:00 2001 From: Georgi Gerganov Date: Fri, 29 Dec 2023 18:53:34 +0200 Subject: [PATCH 172/811] clip : use ggml_backend_buffer_is_host (#4205) --- examples/llava/clip.cpp | 6 +----- 1 file changed, 1 insertion(+), 5 deletions(-) diff --git a/examples/llava/clip.cpp b/examples/llava/clip.cpp index f9326a5cc..6a731eeec 100644 --- a/examples/llava/clip.cpp +++ b/examples/llava/clip.cpp @@ -598,11 +598,7 @@ struct clip_ctx * clip_model_load(const char * fname, const int verbosity = 1) { return nullptr; } int num_bytes = ggml_nbytes(cur); - if (ggml_backend_is_cpu(new_clip->backend) -#ifdef GGML_USE_METAL - || ggml_backend_is_metal(new_clip->backend) -#endif - ) { + if (ggml_backend_buffer_is_host(new_clip->params_buffer)) { // for the CPU and Metal backend, we can read directly into the tensor fin.read(reinterpret_cast(cur->data), num_bytes); } else { From a20f3c7465d6d1b33767757c2760643b799a81bf Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Johannes=20G=C3=A4=C3=9Fler?= Date: Fri, 29 Dec 2023 23:12:53 +0100 Subject: [PATCH 173/811] CUDA: fix tensor core logic for Pascal and HIP (#4682) --- ggml-cuda.cu | 72 ++++++++++++++++++++++++++++------------------------ 1 file changed, 39 insertions(+), 33 deletions(-) diff --git a/ggml-cuda.cu b/ggml-cuda.cu index 09585b07d..71a64ca09 100644 --- a/ggml-cuda.cu +++ b/ggml-cuda.cu @@ -123,24 +123,6 @@ #define GGML_CUDA_MAX_NODES 8192 -// define this if you want to always fallback to MMQ kernels and not use cuBLAS for matrix multiplication -// on modern hardware, using cuBLAS is recommended as it utilizes F16 tensor cores which are very performant -// for large computational tasks. the drawback is that this requires some extra amount of VRAM: -// - 7B quantum model: +100-200 MB -// - 13B quantum model: +200-400 MB -// -//#define GGML_CUDA_FORCE_MMQ - -// TODO: improve this to be correct for more hardware -// for example, currently fails for GeForce GTX 1660 which is TURING arch (> VOLTA) but does not have tensor cores -// probably other such cases, and not sure what happens on AMD hardware -#if !defined(GGML_CUDA_FORCE_MMQ) -#define CUDA_USE_TENSOR_CORES -#endif - -// max batch size to use MMQ kernels when tensor cores are available -#define MMQ_MAX_BATCH_SIZE 32 - #if defined(GGML_USE_HIPBLAS) #define __CUDA_ARCH__ 1300 @@ -207,6 +189,23 @@ static __device__ __forceinline__ int __dp4a(const int a, const int b, int c) { } #endif // defined(GGML_USE_HIPBLAS) +// define this if you want to always fallback to MMQ kernels and not use cuBLAS for matrix multiplication +// on modern hardware, using cuBLAS is recommended as it utilizes F16 tensor cores which are very performant +// for large computational tasks. the drawback is that this requires some extra amount of VRAM: +// - 7B quantum model: +100-200 MB +// - 13B quantum model: +200-400 MB +// +//#define GGML_CUDA_FORCE_MMQ + +// TODO: improve this to be correct for more hardware +// for example, currently fails for GeForce GTX 1660 which is TURING arch (> VOLTA) but does not have tensor cores +#if !defined(GGML_CUDA_FORCE_MMQ) && (!defined(GGML_USE_HIPBLAS) || defined(RDNA3)) +#define CUDA_USE_TENSOR_CORES +#endif + +// max batch size to use MMQ kernels when tensor cores are available +#define MMQ_MAX_BATCH_SIZE 32 + #if defined(_MSC_VER) #pragma warning(disable: 4244 4267) // possible loss of data #endif @@ -8661,11 +8660,26 @@ static void ggml_cuda_mul_mat(const ggml_tensor * src0, const ggml_tensor * src1 } } -#ifdef CUDA_USE_TENSOR_CORES - const bool use_tensor_cores = true; +#if defined(GGML_USE_HIPBLAS) && defined(__HIP_PLATFORM_AMD__) + const bool fp16_performance_good = true; + +#ifdef RDNA3 + const bool use_mul_mat_q = false; #else - const bool use_tensor_cores = false; -#endif + const bool use_mul_mat_q = true; +#endif // RDNA3 + +#else + + const bool fp16_performance_good = min_compute_capability >= CC_VOLTA; + bool use_mul_mat_q = min_compute_capability >= MIN_CC_DP4A && ggml_is_quantized(src0->type); +#ifdef CUDA_USE_TENSOR_CORES + // when tensor cores are available, use them for large batch size + // ref: https://github.com/ggerganov/llama.cpp/pull/3776 + use_mul_mat_q = use_mul_mat_q && !(fp16_performance_good && src1->ne[1] > MMQ_MAX_BATCH_SIZE); +#endif // CUDA_USE_TENSOR_CORES + +#endif // defined(GGML_USE_HIPBLAS) && defined(__HIP_PLATFORM_AMD__) // debug helpers //printf("src0: %8d %8d %8d %8d\n", src0->ne[0], src0->ne[1], src0->ne[2], src0->ne[3]); @@ -8675,13 +8689,13 @@ static void ggml_cuda_mul_mat(const ggml_tensor * src0, const ggml_tensor * src1 //printf("src0 is contiguous %d, transposed %d, type = %s, name = %s\n", ggml_is_contiguous(src0), ggml_is_transposed(src0), ggml_type_name(src0->type), src0->name); //printf("src1 is contiguous %d, transposed %d, type = %s, name = %s\n", ggml_is_contiguous(src1), ggml_is_transposed(src1), ggml_type_name(src1->type), src1->name); - if (!split && all_on_device && !use_tensor_cores && src0->type == GGML_TYPE_F16 && ggml_is_permuted(src0) && ggml_is_permuted(src1) && src1->ne[1] == 1) { + if (!split && all_on_device && !fp16_performance_good && src0->type == GGML_TYPE_F16 && ggml_is_permuted(src0) && ggml_is_permuted(src1) && src1->ne[1] == 1) { // KQ single-batch ggml_cuda_mul_mat_vec_p021(src0, src1, dst); - } else if (!split && all_on_device && !use_tensor_cores && src0->type == GGML_TYPE_F16 && !ggml_is_contiguous(src0) && !ggml_is_transposed(src1) && src1->ne[1] == 1) { + } else if (!split && all_on_device && !fp16_performance_good && src0->type == GGML_TYPE_F16 && !ggml_is_contiguous(src0) && !ggml_is_transposed(src1) && src1->ne[1] == 1) { // KQV single-batch ggml_cuda_mul_mat_vec_nc(src0, src1, dst); - } else if (!split && all_on_device && use_tensor_cores && src0->type == GGML_TYPE_F16 && !ggml_is_transposed(src0) && !ggml_is_transposed(src1)) { + } else if (!split && all_on_device && fp16_performance_good && src0->type == GGML_TYPE_F16 && !ggml_is_transposed(src0) && !ggml_is_transposed(src1)) { // KQ + KQV multi-batch ggml_cuda_mul_mat_mat_batched_cublas(src0, src1, dst); } else if (src0->type == GGML_TYPE_F32) { @@ -8701,14 +8715,6 @@ static void ggml_cuda_mul_mat(const ggml_tensor * src0, const ggml_tensor * src1 ggml_cuda_op_mul_mat(src0, src1, dst, ggml_cuda_op_dequantize_mul_mat_vec, false); } } else { - bool use_mul_mat_q = min_compute_capability >= MIN_CC_DP4A && ggml_is_quantized(src0->type); - - // when tensor cores are available, use them for large batch size - // ref: https://github.com/ggerganov/llama.cpp/pull/3776 - if (use_tensor_cores && min_compute_capability >= CC_VOLTA && src1->ne[1] > MMQ_MAX_BATCH_SIZE) { - use_mul_mat_q = false; - } - if (use_mul_mat_q) { ggml_cuda_op_mul_mat(src0, src1, dst, ggml_cuda_op_mul_mat_q, true); } else { From 24a447e20af425fa44cf10feaa632b6bb596c80f Mon Sep 17 00:00:00 2001 From: automaticcat Date: Sat, 30 Dec 2023 15:07:48 +0700 Subject: [PATCH 174/811] ggml : add ggml_cpu_has_avx_vnni() (#4589) * feat: add avx_vnni based on intel documents * ggml: add avx vnni based on intel document * llama: add avx vnni information display * docs: add more details about using oneMKL and oneAPI for intel processors * docs: add more details about using oneMKL and oneAPI for intel processors * docs: add more details about using oneMKL and oneAPI for intel processors * docs: add more details about using oneMKL and oneAPI for intel processors * docs: add more details about using oneMKL and oneAPI for intel processors * Update ggml.c Fix indentation upgate Co-authored-by: Georgi Gerganov --------- Co-authored-by: Georgi Gerganov --- README.md | 30 ++++++++++++++++++++++-------- common/common.cpp | 1 + ggml.c | 8 ++++++++ ggml.h | 1 + llama.cpp | 1 + 5 files changed, 33 insertions(+), 8 deletions(-) diff --git a/README.md b/README.md index 48dcd6464..ca6d14e17 100644 --- a/README.md +++ b/README.md @@ -385,16 +385,30 @@ Building the program with BLAS support may lead to some performance improvements Check [BLIS.md](docs/BLIS.md) for more information. -- #### Intel MKL +- #### Intel oneMKL + - Using manual oneAPI installation: + By default, `LLAMA_BLAS_VENDOR` is set to `Generic`, so if you already sourced intel environment script and assign `-DLLAMA_BLAS=ON` in cmake, the mkl version of Blas will automatically been selected. Otherwise please install oneAPI and follow the below steps: + ```bash + mkdir build + cd build + source /opt/intel/oneapi/setvars.sh # You can skip this step if in oneapi-runtime docker image, only required for manual installation + cmake .. -DLLAMA_BLAS=ON -DLLAMA_BLAS_VENDOR=Intel10_64lp -DCMAKE_C_COMPILER=icx -DCMAKE_CXX_COMPILER=icpx -DLLAMA_NATIVE=ON + cmake --build . --config Release + ``` - By default, `LLAMA_BLAS_VENDOR` is set to `Generic`, so if you already sourced intel environment script and assign `-DLLAMA_BLAS=ON` in cmake, the mkl version of Blas will automatically been selected. You may also specify it by: + - Using oneAPI docker image: + If you do not want to source the environment vars and install oneAPI manually, you can also build the code using intel docker container: [oneAPI-runtime](https://hub.docker.com/r/intel/oneapi-runtime) - ```bash - mkdir build - cd build - cmake .. -DLLAMA_BLAS=ON -DLLAMA_BLAS_VENDOR=Intel10_64lp -DCMAKE_C_COMPILER=icx -DCMAKE_CXX_COMPILER=icpx - cmake --build . --config Release - ``` + ```bash + mkdir build + cd build + cmake .. -DLLAMA_BLAS=ON -DLLAMA_BLAS_VENDOR=Intel10_64lp -DCMAKE_C_COMPILER=icx -DCMAKE_CXX_COMPILER=icpx -DLLAMA_NATIVE=ON + cmake --build . --config Release + ``` + + Building through oneAPI compilers will make avx_vnni instruction set available for intel processors that do not support avx512 and avx512_vnni. + + Check [Optimizing and Running LLaMA2 on Intel® CPU](https://www.intel.com/content/www/us/en/content-details/791610/optimizing-and-running-llama2-on-intel-cpu.html) for more information. - #### cuBLAS diff --git a/common/common.cpp b/common/common.cpp index b3425ab09..eacaee18e 100644 --- a/common/common.cpp +++ b/common/common.cpp @@ -1394,6 +1394,7 @@ void dump_non_result_info_yaml(FILE * stream, const gpt_params & params, const l fprintf(stream, "build_number: %d\n", LLAMA_BUILD_NUMBER); fprintf(stream, "cpu_has_arm_fma: %s\n", ggml_cpu_has_arm_fma() ? "true" : "false"); fprintf(stream, "cpu_has_avx: %s\n", ggml_cpu_has_avx() ? "true" : "false"); + fprintf(stream, "cpu_has_avx_vnni: %s\n", ggml_cpu_has_avx_vnni() ? "true" : "false"); fprintf(stream, "cpu_has_avx2: %s\n", ggml_cpu_has_avx2() ? "true" : "false"); fprintf(stream, "cpu_has_avx512: %s\n", ggml_cpu_has_avx512() ? "true" : "false"); fprintf(stream, "cpu_has_avx512_vbmi: %s\n", ggml_cpu_has_avx512_vbmi() ? "true" : "false"); diff --git a/ggml.c b/ggml.c index a9e1ea9b4..bcec200f6 100644 --- a/ggml.c +++ b/ggml.c @@ -19638,6 +19638,14 @@ int ggml_cpu_has_avx(void) { #endif } +int ggml_cpu_has_avx_vnni(void) { +#if defined(__AVXVNNI__) + return 1; +#else + return 0; +#endif +} + int ggml_cpu_has_avx2(void) { #if defined(__AVX2__) return 1; diff --git a/ggml.h b/ggml.h index 67d6bc4f1..64f4e45e8 100644 --- a/ggml.h +++ b/ggml.h @@ -2198,6 +2198,7 @@ extern "C" { // GGML_API int ggml_cpu_has_avx (void); + GGML_API int ggml_cpu_has_avx_vnni (void); GGML_API int ggml_cpu_has_avx2 (void); GGML_API int ggml_cpu_has_avx512 (void); GGML_API int ggml_cpu_has_avx512_vbmi(void); diff --git a/llama.cpp b/llama.cpp index 68c7cced6..a833d4c15 100644 --- a/llama.cpp +++ b/llama.cpp @@ -10780,6 +10780,7 @@ const char * llama_print_system_info(void) { s = ""; s += "AVX = " + std::to_string(ggml_cpu_has_avx()) + " | "; + s += "AVX_VNNI = " + std::to_string(ggml_cpu_has_avx_vnni()) + " | "; s += "AVX2 = " + std::to_string(ggml_cpu_has_avx2()) + " | "; s += "AVX512 = " + std::to_string(ggml_cpu_has_avx512()) + " | "; s += "AVX512_VBMI = " + std::to_string(ggml_cpu_has_avx512_vbmi()) + " | "; From 39d8bc71edcb8b6f99d46fa4216af7a15232e218 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Johannes=20G=C3=A4=C3=9Fler?= Date: Sat, 30 Dec 2023 13:52:01 +0100 Subject: [PATCH 175/811] CUDA: fixed tensor cores not being used on RDNA3 (#4697) --- ggml-cuda.cu | 47 ++++++++++++++++++++++++----------------------- 1 file changed, 24 insertions(+), 23 deletions(-) diff --git a/ggml-cuda.cu b/ggml-cuda.cu index 71a64ca09..8c2712308 100644 --- a/ggml-cuda.cu +++ b/ggml-cuda.cu @@ -119,10 +119,29 @@ #define MIN_CC_DP4A 610 // minimum compute capability for __dp4a, an intrinsic for byte-wise dot products #define CC_VOLTA 700 #define CC_OFFSET_AMD 1000000 +#define CC_RDNA1 (CC_OFFSET_AMD + 1010) #define CC_RDNA2 (CC_OFFSET_AMD + 1030) +#define CC_RDNA3 (CC_OFFSET_AMD + 1100) #define GGML_CUDA_MAX_NODES 8192 +// define this if you want to always fallback to MMQ kernels and not use cuBLAS for matrix multiplication +// on modern hardware, using cuBLAS is recommended as it utilizes F16 tensor cores which are very performant +// for large computational tasks. the drawback is that this requires some extra amount of VRAM: +// - 7B quantum model: +100-200 MB +// - 13B quantum model: +200-400 MB +// +//#define GGML_CUDA_FORCE_MMQ + +// TODO: improve this to be correct for more hardware +// for example, currently fails for GeForce GTX 1660 which is TURING arch (> VOLTA) but does not have tensor cores +#if !defined(GGML_CUDA_FORCE_MMQ) +#define CUDA_USE_TENSOR_CORES +#endif + +// max batch size to use MMQ kernels when tensor cores are available +#define MMQ_MAX_BATCH_SIZE 32 + #if defined(GGML_USE_HIPBLAS) #define __CUDA_ARCH__ 1300 @@ -189,23 +208,6 @@ static __device__ __forceinline__ int __dp4a(const int a, const int b, int c) { } #endif // defined(GGML_USE_HIPBLAS) -// define this if you want to always fallback to MMQ kernels and not use cuBLAS for matrix multiplication -// on modern hardware, using cuBLAS is recommended as it utilizes F16 tensor cores which are very performant -// for large computational tasks. the drawback is that this requires some extra amount of VRAM: -// - 7B quantum model: +100-200 MB -// - 13B quantum model: +200-400 MB -// -//#define GGML_CUDA_FORCE_MMQ - -// TODO: improve this to be correct for more hardware -// for example, currently fails for GeForce GTX 1660 which is TURING arch (> VOLTA) but does not have tensor cores -#if !defined(GGML_CUDA_FORCE_MMQ) && (!defined(GGML_USE_HIPBLAS) || defined(RDNA3)) -#define CUDA_USE_TENSOR_CORES -#endif - -// max batch size to use MMQ kernels when tensor cores are available -#define MMQ_MAX_BATCH_SIZE 32 - #if defined(_MSC_VER) #pragma warning(disable: 4244 4267) // possible loss of data #endif @@ -8661,13 +8663,12 @@ static void ggml_cuda_mul_mat(const ggml_tensor * src0, const ggml_tensor * src1 } #if defined(GGML_USE_HIPBLAS) && defined(__HIP_PLATFORM_AMD__) - const bool fp16_performance_good = true; -#ifdef RDNA3 - const bool use_mul_mat_q = false; -#else - const bool use_mul_mat_q = true; -#endif // RDNA3 + const bool fp16_performance_good = min_compute_capability >= CC_RDNA1; + bool use_mul_mat_q = ggml_is_quantized(src0->type); +#ifdef CUDA_USE_TENSOR_CORES + use_mul_mat_q = use_mul_mat_q && min_compute_capability < CC_RDNA3; +#endif // CUDA_USE_TENSOR_CORES #else From 9fbda719de18a9400a064c28759c39d55d687d3e Mon Sep 17 00:00:00 2001 From: Georgi Gerganov Date: Sat, 30 Dec 2023 23:24:42 +0200 Subject: [PATCH 176/811] clip : refactor + bug fixes (#4696) * clip : refactor + bug fixes ggml-ci * server : add log message --- examples/llava/clip.cpp | 241 +++++++++++++++++++++---------------- examples/llava/clip.h | 48 +++----- examples/llava/llava.cpp | 4 +- examples/server/server.cpp | 38 +++--- 4 files changed, 169 insertions(+), 162 deletions(-) diff --git a/examples/llava/clip.cpp b/examples/llava/clip.cpp index 6a731eeec..cfb79e789 100644 --- a/examples/llava/clip.cpp +++ b/examples/llava/clip.cpp @@ -146,6 +146,27 @@ static std::string get_ftype(int ftype) { } } +// +// image data +// + +// RGB uint8 image +struct clip_image_u8 { + int nx; + int ny; + + std::vector buf; +}; + +// RGB float32 image (NHWC) +// Memory layout: RGBRGBRGB... +struct clip_image_f32 { + int nx; + int ny; + + std::vector buf; +}; + // // clip layers // @@ -204,16 +225,21 @@ struct clip_vision_model { }; struct clip_ctx { - bool has_text_encoder = false; - bool has_vision_encoder = false; + bool has_text_encoder = false; + bool has_vision_encoder = false; bool has_llava_projector = false; + struct clip_vision_model vision_model; + float image_mean[3]; float image_std[3]; bool use_gelu = false; int32_t ftype = 1; - struct ggml_context * ctx; + struct gguf_context * ctx_gguf; + struct ggml_context * ctx_data; + + std::vector buf_compute_meta; // memory buffers to evaluate the model ggml_backend_buffer_t params_buffer = NULL; @@ -222,7 +248,7 @@ struct clip_ctx { ggml_allocr * compute_alloc = NULL; }; -static ggml_cgraph * clip_image_build_graph(const clip_ctx * ctx, const clip_image_f32_batch * imgs) { +static ggml_cgraph * clip_image_build_graph(clip_ctx * ctx, const clip_image_f32_batch * imgs) { if (!ctx->has_vision_encoder) { printf("This gguf file seems to have no vision encoder\n"); return nullptr; @@ -243,13 +269,14 @@ static ggml_cgraph * clip_image_build_graph(const clip_ctx * ctx, const clip_ima //const int projection_dim = hparams.projection_dim; const float eps = hparams.eps; int batch_size = imgs->size; - if(ctx->has_llava_projector) { + if (ctx->has_llava_projector) { GGML_ASSERT(batch_size == 1); } + struct ggml_init_params params = { - /*.mem_size =*/ GGML_DEFAULT_GRAPH_SIZE * ggml_tensor_overhead() + ggml_graph_overhead(), - /*.mem_buffer =*/ NULL, - /*.no_alloc =*/ true, + /*.mem_size =*/ ctx->buf_compute_meta.size(), + /*.mem_buffer =*/ ctx->buf_compute_meta.data(), + /*.no_alloc =*/ true, }; struct ggml_context * ctx0 = ggml_init(params); @@ -272,7 +299,7 @@ static ggml_cgraph * clip_image_build_graph(const clip_ctx * ctx, const clip_ima for (int k = 0; k < 3; k++) { for (int y = 0; y < ny; y++) { for (int x = 0; x < nx; x++) { - data[(b * 3 * n) + k * n + y * nx + x] = imgs->data[b].data[3 * (y * nx + x) + k]; + data[(b * 3 * n) + k * n + y * nx + x] = imgs->data[b].buf[3 * (y * nx + x) + k]; } } } @@ -413,7 +440,7 @@ static ggml_cgraph * clip_image_build_graph(const clip_ctx * ctx, const clip_ima ggml_allocr_alloc(ctx->compute_alloc, patches); if (!ggml_allocr_is_measure(ctx->compute_alloc)) { int* patches_data = (int*)malloc(ggml_nbytes(patches)); - for (int i = 0; i < num_positions; i++) { + for (int i = 0; i < num_patches; i++) { patches_data[i] = i + 1; } ggml_backend_tensor_set(patches, patches_data, 0, ggml_nbytes(patches)); @@ -561,8 +588,8 @@ struct clip_ctx * clip_model_load(const char * fname, const int verbosity = 1) { /*.no_alloc =*/ true, }; - new_clip->ctx = ggml_init(params); - if (!new_clip->ctx) { + new_clip->ctx_data = ggml_init(params); + if (!new_clip->ctx_data) { fprintf(stderr, "%s: ggml_init() failed\n", __func__); clip_free(new_clip); return nullptr; @@ -579,7 +606,7 @@ struct clip_ctx * clip_model_load(const char * fname, const int verbosity = 1) { for (int i = 0; i < n_tensors; ++i) { const char * name = gguf_get_tensor_name(ctx, i); struct ggml_tensor * t = ggml_get_tensor(meta, name); - struct ggml_tensor * cur = ggml_dup_tensor(new_clip->ctx, t); + struct ggml_tensor * cur = ggml_dup_tensor(new_clip->ctx_data, t); ggml_set_name(cur, name); } @@ -588,7 +615,7 @@ struct clip_ctx * clip_model_load(const char * fname, const int verbosity = 1) { ggml_allocr* alloc = ggml_allocr_new_from_buffer(new_clip->params_buffer); for (int i = 0; i < n_tensors; ++i) { const char * name = gguf_get_tensor_name(ctx, i); - struct ggml_tensor * cur = ggml_get_tensor(new_clip->ctx, name); + struct ggml_tensor * cur = ggml_get_tensor(new_clip->ctx_data, name); ggml_allocr_alloc(alloc, cur); const size_t offset = gguf_get_data_offset(ctx) + gguf_get_tensor_offset(ctx, i); fin.seekg(offset, std::ios::beg); @@ -617,20 +644,20 @@ struct clip_ctx * clip_model_load(const char * fname, const int verbosity = 1) { // load vision model auto & vision_model = new_clip->vision_model; auto & hparams = vision_model.hparams; - hparams.hidden_size = get_u32(ctx, format(KEY_N_EMBD, "vision")); - hparams.n_head = get_u32(ctx, format(KEY_N_HEAD, "vision")); + hparams.hidden_size = get_u32(ctx, format(KEY_N_EMBD, "vision")); + hparams.n_head = get_u32(ctx, format(KEY_N_HEAD, "vision")); hparams.n_intermediate = get_u32(ctx, format(KEY_N_FF, "vision")); - hparams.n_layer = get_u32(ctx, format(KEY_N_BLOCK, "vision")); - hparams.image_size = get_u32(ctx, KEY_IMAGE_SIZE); - hparams.patch_size = get_u32(ctx, KEY_PATCH_SIZE); + hparams.n_layer = get_u32(ctx, format(KEY_N_BLOCK, "vision")); + hparams.image_size = get_u32(ctx, KEY_IMAGE_SIZE); + hparams.patch_size = get_u32(ctx, KEY_PATCH_SIZE); hparams.projection_dim = get_u32(ctx, format(KEY_PROJ_DIM, "vision")); - hparams.eps = get_f32(ctx, format(KEY_LAYER_NORM_EPS, "vision")); + hparams.eps = get_f32(ctx, format(KEY_LAYER_NORM_EPS, "vision")); int idx_mean = get_key_idx(ctx, KEY_IMAGE_MEAN); - int idx_std = get_key_idx(ctx, KEY_IMAGE_STD); + int idx_std = get_key_idx(ctx, KEY_IMAGE_STD); for (int i = 0; i < 3; ++i) { new_clip->image_mean[i] = *((const float *)gguf_get_arr_data(ctx, idx_mean)); - new_clip->image_std[i] = *((const float *)gguf_get_arr_data(ctx, idx_std)); + new_clip->image_std[i] = *((const float *)gguf_get_arr_data(ctx, idx_std)); } if (verbosity >= 2) { @@ -644,35 +671,35 @@ struct clip_ctx * clip_model_load(const char * fname, const int verbosity = 1) { printf("v_n_layer %d\n", hparams.n_layer); } - vision_model.patch_embeddings = get_tensor(new_clip->ctx, TN_PATCH_EMBD); - vision_model.class_embedding = get_tensor(new_clip->ctx, TN_CLASS_EMBD); - vision_model.position_embeddings = get_tensor(new_clip->ctx, format(TN_POS_EMBD, "v")); - vision_model.pre_ln_w = get_tensor(new_clip->ctx, format(TN_LN_PRE, "v", "weight")); - vision_model.pre_ln_b = get_tensor(new_clip->ctx, format(TN_LN_PRE, "v", "bias")); - vision_model.mm_0_w = get_tensor(new_clip->ctx, format(TN_LLAVA_PROJ, 0, "weight")); - vision_model.mm_0_b = get_tensor(new_clip->ctx, format(TN_LLAVA_PROJ, 0, "bias")); - vision_model.mm_2_w = get_tensor(new_clip->ctx, format(TN_LLAVA_PROJ, 2, "weight")); - vision_model.mm_2_b = get_tensor(new_clip->ctx, format(TN_LLAVA_PROJ, 2, "bias")); + vision_model.patch_embeddings = get_tensor(new_clip->ctx_data, TN_PATCH_EMBD); + vision_model.class_embedding = get_tensor(new_clip->ctx_data, TN_CLASS_EMBD); + vision_model.position_embeddings = get_tensor(new_clip->ctx_data, format(TN_POS_EMBD, "v")); + vision_model.pre_ln_w = get_tensor(new_clip->ctx_data, format(TN_LN_PRE, "v", "weight")); + vision_model.pre_ln_b = get_tensor(new_clip->ctx_data, format(TN_LN_PRE, "v", "bias")); + vision_model.mm_0_w = get_tensor(new_clip->ctx_data, format(TN_LLAVA_PROJ, 0, "weight")); + vision_model.mm_0_b = get_tensor(new_clip->ctx_data, format(TN_LLAVA_PROJ, 0, "bias")); + vision_model.mm_2_w = get_tensor(new_clip->ctx_data, format(TN_LLAVA_PROJ, 2, "weight")); + vision_model.mm_2_b = get_tensor(new_clip->ctx_data, format(TN_LLAVA_PROJ, 2, "bias")); vision_model.layers.resize(hparams.n_layer); for (int il = 0; il < hparams.n_layer; ++il) { auto & layer = vision_model.layers[il]; - layer.k_w = get_tensor(new_clip->ctx, format(TN_ATTN_K, "v", il, "weight")); - layer.q_w = get_tensor(new_clip->ctx, format(TN_ATTN_Q, "v", il, "weight")); - layer.v_w = get_tensor(new_clip->ctx, format(TN_ATTN_V, "v", il, "weight")); - layer.o_w = get_tensor(new_clip->ctx, format(TN_ATTN_OUTPUT, "v", il, "weight")); - layer.ln_1_w = get_tensor(new_clip->ctx, format(TN_LN_1, "v", il, "weight")); - layer.ln_2_w = get_tensor(new_clip->ctx, format(TN_LN_2, "v", il, "weight")); - layer.ff_i_w = get_tensor(new_clip->ctx, format(TN_FFN_DOWN, "v", il, "weight")); - layer.ff_o_w = get_tensor(new_clip->ctx, format(TN_FFN_UP, "v", il, "weight")); - layer.k_b = get_tensor(new_clip->ctx, format(TN_ATTN_K, "v", il, "bias")); - layer.q_b = get_tensor(new_clip->ctx, format(TN_ATTN_Q, "v", il, "bias")); - layer.v_b = get_tensor(new_clip->ctx, format(TN_ATTN_V, "v", il, "bias")); - layer.o_b = get_tensor(new_clip->ctx, format(TN_ATTN_OUTPUT, "v", il, "bias")); - layer.ln_1_b = get_tensor(new_clip->ctx, format(TN_LN_1, "v", il, "bias")); - layer.ln_2_b = get_tensor(new_clip->ctx, format(TN_LN_2, "v", il, "bias")); - layer.ff_i_b = get_tensor(new_clip->ctx, format(TN_FFN_DOWN, "v", il, "bias")); - layer.ff_o_b = get_tensor(new_clip->ctx, format(TN_FFN_UP, "v", il, "bias")); + layer.k_w = get_tensor(new_clip->ctx_data, format(TN_ATTN_K, "v", il, "weight")); + layer.q_w = get_tensor(new_clip->ctx_data, format(TN_ATTN_Q, "v", il, "weight")); + layer.v_w = get_tensor(new_clip->ctx_data, format(TN_ATTN_V, "v", il, "weight")); + layer.o_w = get_tensor(new_clip->ctx_data, format(TN_ATTN_OUTPUT, "v", il, "weight")); + layer.ln_1_w = get_tensor(new_clip->ctx_data, format(TN_LN_1, "v", il, "weight")); + layer.ln_2_w = get_tensor(new_clip->ctx_data, format(TN_LN_2, "v", il, "weight")); + layer.ff_i_w = get_tensor(new_clip->ctx_data, format(TN_FFN_DOWN, "v", il, "weight")); + layer.ff_o_w = get_tensor(new_clip->ctx_data, format(TN_FFN_UP, "v", il, "weight")); + layer.k_b = get_tensor(new_clip->ctx_data, format(TN_ATTN_K, "v", il, "bias")); + layer.q_b = get_tensor(new_clip->ctx_data, format(TN_ATTN_Q, "v", il, "bias")); + layer.v_b = get_tensor(new_clip->ctx_data, format(TN_ATTN_V, "v", il, "bias")); + layer.o_b = get_tensor(new_clip->ctx_data, format(TN_ATTN_OUTPUT, "v", il, "bias")); + layer.ln_1_b = get_tensor(new_clip->ctx_data, format(TN_LN_1, "v", il, "bias")); + layer.ln_2_b = get_tensor(new_clip->ctx_data, format(TN_LN_2, "v", il, "bias")); + layer.ff_i_b = get_tensor(new_clip->ctx_data, format(TN_FFN_DOWN, "v", il, "bias")); + layer.ff_o_b = get_tensor(new_clip->ctx_data, format(TN_FFN_UP, "v", il, "bias")); } } @@ -680,8 +707,9 @@ struct clip_ctx * clip_model_load(const char * fname, const int verbosity = 1) { new_clip->ctx_gguf = ctx; -// measure mem requirement and allocate + // measure mem requirement and allocate { + new_clip->buf_compute_meta.resize(GGML_DEFAULT_GRAPH_SIZE * ggml_tensor_overhead() + ggml_graph_overhead()); new_clip->compute_alloc = ggml_allocr_new_measure_from_backend(new_clip->backend); clip_image_f32_batch batch; batch.size = 1; @@ -697,26 +725,27 @@ struct clip_ctx * clip_model_load(const char * fname, const int verbosity = 1) { return new_clip; } -clip_image_u8 * make_clip_image_u8() { - auto img = new clip_image_u8(); - return img; +struct clip_image_u8 * clip_image_u8_init() { + return new clip_image_u8(); } -clip_image_f32 * make_clip_image_f32() { return new clip_image_f32(); } -void clip_image_u8_free(clip_image_u8 * img) { if (img->data) { delete[] img->data; } delete img; } -void clip_image_f32_free(clip_image_f32 * img) { if (img->data) { delete[] img->data; } delete img; } +struct clip_image_f32 * clip_image_f32_init() { + return new clip_image_f32(); +} + +void clip_image_u8_free (struct clip_image_u8 * img) { delete img; } +void clip_image_f32_free(struct clip_image_f32 * img) { delete img; } static void build_clip_img_from_data(const stbi_uc * data, int nx, int ny, clip_image_u8 * img) { img->nx = nx; img->ny = ny; - img->size = nx * ny * 3; - img->data = new uint8_t[img->size](); - memcpy(img->data, data, img->size); + img->buf.resize(3 * nx * ny); + memcpy(img->buf.data(), data, img->buf.size()); } bool clip_image_load_from_file(const char * fname, clip_image_u8 * img) { int nx, ny, nc; - auto data = stbi_load(fname, &nx, &ny, &nc, 3); + auto * data = stbi_load(fname, &nx, &ny, &nc, 3); if (!data) { fprintf(stderr, "%s: failed to load image '%s'\n", __func__, fname); return false; @@ -728,7 +757,7 @@ bool clip_image_load_from_file(const char * fname, clip_image_u8 * img) { bool clip_image_load_from_bytes(const unsigned char * bytes, size_t bytes_length, struct clip_image_u8 * img) { int nx, ny, nc; - auto data = stbi_load_from_memory(bytes, bytes_length, &nx, &ny, &nc, 3); + auto * data = stbi_load_from_memory(bytes, bytes_length, &nx, &ny, &nc, 3); if (!data) { fprintf(stderr, "%s: failed to decode image bytes\n", __func__); return false; @@ -740,7 +769,7 @@ bool clip_image_load_from_bytes(const unsigned char * bytes, size_t bytes_length // normalize: x = (x - mean) / std // TODO: implement bicubic interpolation instead of linear. -bool clip_image_preprocess(const clip_ctx * ctx, const clip_image_u8 * img, clip_image_f32 * res, const bool pad2square) { +bool clip_image_preprocess(struct clip_ctx * ctx, const clip_image_u8 * img, clip_image_f32 * res, const bool pad2square) { if (!ctx->has_vision_encoder) { printf("This gguf file seems to have no vision encoder\n"); return false; @@ -749,18 +778,17 @@ bool clip_image_preprocess(const clip_ctx * ctx, const clip_image_u8 * img, clip // the logic below is to pad the shorter side to the longer side with a background color: rgb(122, 116, 104) // see https://github.com/haotian-liu/LLaVA/blob/e854a2bf85118c504f6f16bf5c3c7c92f8fa8c6b/llava/conversation.py#L113-L156 - clip_image_u8 * temp = make_clip_image_u8(); // we will keep the input image data here temporarily + clip_image_u8 * temp = clip_image_u8_init(); // we will keep the input image data here temporarily if (pad2square && img->nx != img->ny) { int longer_side = std::max(img->nx, img->ny); temp->nx = longer_side; temp->ny = longer_side; - temp->size = 3 * longer_side * longer_side; - temp->data = new uint8_t[temp->size](); - uint8_t bc[3] = {122, 116, 104}; // background color in RGB from LLaVA + temp->buf.resize(3 * longer_side * longer_side); + const uint8_t bc[3] = {122, 116, 104}; // background color in RGB from LLaVA // fill with background color - for (size_t i = 0; i < temp->size; i++) { - temp->data[i] = bc[i % 3]; + for (size_t i = 0; i < temp->buf.size(); i++) { + temp->buf[i] = bc[i % 3]; } // copy from the input image @@ -768,17 +796,16 @@ bool clip_image_preprocess(const clip_ctx * ctx, const clip_image_u8 * img, clip for (int x = 0; x < img->nx; x++) { const int i = 3 * (y * img->nx + x); const int j = 3 * (y * temp->nx + x); - temp->data[j] = img->data[i]; - temp->data[j+1] = img->data[i+1]; - temp->data[j+2] = img->data[i+2]; + temp->buf[j] = img->buf[i]; + temp->buf[j+1] = img->buf[i+1]; + temp->buf[j+2] = img->buf[i+2]; } } } else { - temp->nx = img->nx; - temp->ny = img->ny; - temp->size = img->size; - temp->data = new uint8_t[temp->size](); - memcpy(&temp->data[0], &img->data[0], temp->size); // copy + temp->nx = img->nx; + temp->ny = img->ny; + temp->buf.resize(img->buf.size()); + memcpy(temp->buf.data(), img->buf.data(), temp->buf.size()); } const int nx = temp->nx; @@ -789,8 +816,7 @@ bool clip_image_preprocess(const clip_ctx * ctx, const clip_image_u8 * img, clip res->nx = nx2; res->ny = ny2; - res->size = 3 * nx2 * ny2; - res->data = new float[res->size](); + res->buf.resize(3 * nx2 * ny2); const float scale = std::max(nx, ny) / (float)ctx->vision_model.hparams.image_size; @@ -821,10 +847,10 @@ bool clip_image_preprocess(const clip_ctx * ctx, const clip_image_u8 * img, clip const int j10 = 3 * (y1 * nx + x0) + c; const int j11 = 3 * (y1 * nx + x1) + c; - const float v00 = temp->data[j00]; - const float v01 = temp->data[j01]; - const float v10 = temp->data[j10]; - const float v11 = temp->data[j11]; + const float v00 = temp->buf[j00]; + const float v01 = temp->buf[j01]; + const float v10 = temp->buf[j10]; + const float v11 = temp->buf[j11]; const float v0 = v00 * (1.0f - dx) + v01 * dx; const float v1 = v10 * (1.0f - dx) + v11 * dx; @@ -835,7 +861,7 @@ bool clip_image_preprocess(const clip_ctx * ctx, const clip_image_u8 * img, clip const int i = 3 * (y * nx3 + x) + c; - res->data[i] = ((float(v2) / 255.0f) - m3[c]) / s3[c]; + res->buf[i] = ((float(v2) / 255.0f) - m3[c]) / s3[c]; } } } @@ -845,12 +871,13 @@ bool clip_image_preprocess(const clip_ctx * ctx, const clip_image_u8 * img, clip } void clip_free(clip_ctx * ctx) { - ggml_free(ctx->ctx); + ggml_free(ctx->ctx_data); gguf_free(ctx->ctx_gguf); + delete ctx; } -bool clip_image_encode(const clip_ctx * ctx, const int n_threads, clip_image_f32 * img, float * vec) { +bool clip_image_encode(struct clip_ctx * ctx, const int n_threads, clip_image_f32 * img, float * vec) { if (!ctx->has_vision_encoder) { printf("This gguf file seems to have no vision encoder\n"); return false; @@ -862,8 +889,7 @@ bool clip_image_encode(const clip_ctx * ctx, const int n_threads, clip_image_f32 return clip_image_batch_encode(ctx, n_threads, &imgs, vec); } -bool clip_image_batch_encode(const clip_ctx * ctx, const int n_threads, const clip_image_f32_batch * imgs, float * vec) { - +bool clip_image_batch_encode(clip_ctx * ctx, const int n_threads, const clip_image_f32_batch * imgs, float * vec) { if (!ctx->has_vision_encoder) { printf("This gguf file seems to have no vision encoder\n"); return false; @@ -906,31 +932,32 @@ bool clip_model_quantize(const char * fname_inp, const char * fname_out, const i ggml_type type = GGML_TYPE_Q4_1; switch (itype) { - case 2: - type = GGML_TYPE_Q4_0; - break; - case 3: - type = GGML_TYPE_Q4_1; - break; - case 6: - type = GGML_TYPE_Q5_0; - break; - case 7: - type = GGML_TYPE_Q5_1; - break; - case 8: - type = GGML_TYPE_Q8_0; - break; - default: - fprintf(stderr, "%s: invalid quantization type %d\n", __func__, itype); - return false; + case 2: + type = GGML_TYPE_Q4_0; + break; + case 3: + type = GGML_TYPE_Q4_1; + break; + case 6: + type = GGML_TYPE_Q5_0; + break; + case 7: + type = GGML_TYPE_Q5_1; + break; + case 8: + type = GGML_TYPE_Q8_0; + break; + default: + fprintf(stderr, "%s: invalid quantization type %d\n", __func__, itype); + return false; }; - auto ctx_clip = clip_model_load(fname_inp, 2); - const auto & ctx_src = ctx_clip->ctx_gguf; - const auto & ctx_data = ctx_clip->ctx; + auto * ctx_clip = clip_model_load(fname_inp, 2); - auto ctx_out = gguf_init_empty(); + const auto & ctx_src = ctx_clip->ctx_gguf; + const auto & ctx_data = ctx_clip->ctx_data; + + auto * ctx_out = gguf_init_empty(); gguf_set_kv(ctx_out, ctx_src); gguf_set_val_u32(ctx_out, "general.quantization_version", GGML_QNT_VERSION); gguf_set_val_u32(ctx_out, "general.file_type", itype); diff --git a/examples/llava/clip.h b/examples/llava/clip.h index f11df85de..458a256a1 100644 --- a/examples/llava/clip.h +++ b/examples/llava/clip.h @@ -35,31 +35,14 @@ struct clip_vision_hparams { float eps; }; -/** load mmproj model */ -CLIP_API struct clip_ctx * clip_model_load(const char * fname, const int verbosity); -/** free mmproj model */ +CLIP_API struct clip_ctx * clip_model_load(const char * fname, int verbosity); + CLIP_API void clip_free(struct clip_ctx * ctx); -size_t clip_embd_nbytes(const struct clip_ctx * ctx); -int clip_n_patches(const struct clip_ctx * ctx); -int clip_n_mmproj_embd(const struct clip_ctx * ctx); +CLIP_API size_t clip_embd_nbytes(const struct clip_ctx * ctx); -// RGB uint8 image -struct clip_image_u8 { - int nx; - int ny; - uint8_t * data = NULL; - size_t size; -}; - -// RGB float32 image (NHWC) -// Memory layout: RGBRGBRGB... -struct clip_image_f32 { - int nx; - int ny; - float * data = NULL; - size_t size; -}; +CLIP_API int clip_n_patches (const struct clip_ctx * ctx); +CLIP_API int clip_n_mmproj_embd(const struct clip_ctx * ctx); struct clip_image_u8_batch { struct clip_image_u8 * data; @@ -71,21 +54,22 @@ struct clip_image_f32_batch { size_t size; }; -struct clip_image_u8 * make_clip_image_u8(); -struct clip_image_f32 * make_clip_image_f32(); -CLIP_API void clip_image_u8_free(clip_image_u8 * img); -CLIP_API void clip_image_f32_free(clip_image_f32 * img); +CLIP_API struct clip_image_u8 * clip_image_u8_init (); +CLIP_API struct clip_image_f32 * clip_image_f32_init(); + +CLIP_API void clip_image_u8_free (struct clip_image_u8 * img); +CLIP_API void clip_image_f32_free(struct clip_image_f32 * img); + CLIP_API bool clip_image_load_from_file(const char * fname, struct clip_image_u8 * img); + /** interpret bytes as an image file with length bytes_length, and use the result to populate img */ CLIP_API bool clip_image_load_from_bytes(const unsigned char * bytes, size_t bytes_length, struct clip_image_u8 * img); -bool clip_image_preprocess(const struct clip_ctx * ctx, const struct clip_image_u8 * img, struct clip_image_f32 * res, const bool pad2square); -bool clip_image_encode(const struct clip_ctx * ctx, const int n_threads, struct clip_image_f32 * img, float * vec); +CLIP_API bool clip_image_preprocess (struct clip_ctx * ctx, const struct clip_image_u8 * img, struct clip_image_f32 * res, bool pad2square); +CLIP_API bool clip_image_encode (struct clip_ctx * ctx, int n_threads, struct clip_image_f32 * img, float * vec); +CLIP_API bool clip_image_batch_encode(struct clip_ctx * ctx, int n_threads, const struct clip_image_f32_batch * imgs, float * vec); -bool clip_image_batch_encode(const struct clip_ctx * ctx, const int n_threads, const struct clip_image_f32_batch * imgs, - float * vec); - -bool clip_model_quantize(const char * fname_inp, const char * fname_out, const int itype); +CLIP_API bool clip_model_quantize(const char * fname_inp, const char * fname_out, int itype); #ifdef __cplusplus } diff --git a/examples/llava/llava.cpp b/examples/llava/llava.cpp index 0cae8c4b1..d42e7582e 100644 --- a/examples/llava/llava.cpp +++ b/examples/llava/llava.cpp @@ -10,7 +10,7 @@ #include "base64.hpp" static bool encode_image_with_clip(clip_ctx * ctx_clip, int n_threads, const clip_image_u8 * img, float * image_embd, int * n_img_pos) { - clip_image_f32 * img_res = make_clip_image_f32(); + clip_image_f32 * img_res = clip_image_f32_init(); if (!clip_image_preprocess(ctx_clip, img, img_res, /*pad2square =*/ true)) { fprintf(stderr, "%s: unable to preprocess image\n", __func__); clip_image_f32_free(img_res); @@ -86,7 +86,7 @@ bool llava_eval_image_embed(llama_context * ctx_llama, const struct llava_image_ } LLAVA_API struct llava_image_embed * llava_image_embed_make_with_bytes(struct clip_ctx * ctx_clip, int n_threads, const unsigned char * image_bytes, int image_bytes_length) { - clip_image_u8 * img = make_clip_image_u8(); + clip_image_u8 * img = clip_image_u8_init(); if (!clip_image_load_from_bytes(image_bytes, image_bytes_length, img)) { clip_image_u8_free(img); fprintf(stderr, "%s: can't load image from bytes, is it a valid image?", __func__); diff --git a/examples/server/server.cpp b/examples/server/server.cpp index 0aada8e28..52d9b9768 100644 --- a/examples/server/server.cpp +++ b/examples/server/server.cpp @@ -82,7 +82,7 @@ static inline bool is_base64(uint8_t c) return (isalnum(c) || (c == '+') || (c == '/')); } -static std::vector base64_decode(std::string const &encoded_string) +static std::vector base64_decode(const std::string & encoded_string) { int i = 0; int j = 0; @@ -209,10 +209,10 @@ struct slot_image int32_t id; bool request_encode_image = false; - float* image_embedding = nullptr; + float * image_embedding = nullptr; int32_t image_tokens = 0; - clip_image_u8 img_data; + clip_image_u8 * img_data; std::string prefix_prompt; // before of this image }; @@ -434,10 +434,12 @@ struct llama_client_slot generated_token_probs.clear(); - for (slot_image &img : images) + for (slot_image & img : images) { free(img.image_embedding); - delete[] img.img_data.data; + if (img.img_data) { + clip_image_u8_free(img.img_data); + } img.prefix_prompt = ""; } @@ -851,24 +853,17 @@ struct llama_server_context { for (const auto &img : *images_data) { - std::string data_b64 = img["data"].get(); + const std::vector image_buffer = base64_decode(img["data"].get()); + slot_image img_sl; img_sl.id = img.count("id") != 0 ? img["id"].get() : slot->images.size(); - int width, height, channels; - std::vector image_buffer = base64_decode(data_b64); - data_b64.clear(); - auto data = stbi_load_from_memory(image_buffer.data(), image_buffer.size(), &width, &height, &channels, 3); - if (!data) { + img_sl.img_data = clip_image_u8_init(); + if (!clip_image_load_from_bytes(image_buffer.data(), image_buffer.size(), img_sl.img_data)) + { LOG_TEE("slot %i - failed to load image [id: %i]\n", slot->id, img_sl.id); return false; } - LOG_TEE("slot %i - image loaded [id: %i] resolution (%i x %i)\n", slot->id, img_sl.id, width, height); - img_sl.img_data.nx = width; - img_sl.img_data.ny = height; - img_sl.img_data.size = width * height * 3; - img_sl.img_data.data = new uint8_t[width * height * 3](); - memcpy(img_sl.img_data.data, data, width * height * 3); - stbi_image_free(data); + LOG_TEE("slot %i - loaded image\n", slot->id); img_sl.request_encode_image = true; slot->images.push_back(img_sl); } @@ -1143,8 +1138,8 @@ struct llama_server_context { continue; } - clip_image_f32 img_res; - if (!clip_image_preprocess(clp_ctx, &img.img_data, &img_res, /*pad2square =*/ true)) + clip_image_f32 * img_res = clip_image_f32_init(); + if (!clip_image_preprocess(clp_ctx, img.img_data, img_res, /*pad2square =*/ true)) { LOG_TEE("Error processing the given image"); clip_free(clp_ctx); @@ -1159,11 +1154,12 @@ struct llama_server_context return false; } LOG_TEE("slot %i - encoding image [id: %i]\n", slot.id, img.id); - if (!clip_image_encode(clp_ctx, params.n_threads, &img_res, img.image_embedding)) + if (!clip_image_encode(clp_ctx, params.n_threads, img_res, img.image_embedding)) { LOG_TEE("Unable to encode image\n"); return false; } + clip_image_f32_free(img_res); img.request_encode_image = false; } From e39106c0554cbd0e9310e08fb3b2a577ea4b6273 Mon Sep 17 00:00:00 2001 From: Georgi Gerganov Date: Sun, 31 Dec 2023 11:43:31 +0200 Subject: [PATCH 177/811] ggml : add ggml_vdotq_s32 alias (#4715) ggml-ci --- ggml-quants.c | 118 ++++++++++++++++++++++++++------------------------ 1 file changed, 61 insertions(+), 57 deletions(-) diff --git a/ggml-quants.c b/ggml-quants.c index 05ef8f9b7..55a9496d1 100644 --- a/ggml-quants.c +++ b/ggml-quants.c @@ -410,13 +410,17 @@ inline static ggml_int8x16x4_t ggml_vld1q_s8_x4(const int8_t * ptr) { #if !defined(__ARM_FEATURE_DOTPROD) -inline static int32x4_t vdotq_s32(int32x4_t acc, int8x16_t a, int8x16_t b) { +inline static int32x4_t ggml_vdotq_s32(int32x4_t acc, int8x16_t a, int8x16_t b) { const int16x8_t p0 = vmull_s8(vget_low_s8 (a), vget_low_s8 (b)); const int16x8_t p1 = vmull_s8(vget_high_s8(a), vget_high_s8(b)); return vaddq_s32(acc, vaddq_s32(vpaddlq_s16(p0), vpaddlq_s16(p1))); } +#else + +#define ggml_vdotq_s32(a, b, c) vdotq_s32(a, b, c) + #endif #endif @@ -2481,8 +2485,8 @@ void ggml_vec_dot_q4_0_q8_0(int n, float * restrict s, const void * restrict vx, const int8x16_t v1_1h = vld1q_s8(y1->qs + 16); // dot product into int32x4_t - const int32x4_t p_0 = vdotq_s32(vdotq_s32(vdupq_n_s32(0), v0_0ls, v1_0l), v0_0hs, v1_0h); - const int32x4_t p_1 = vdotq_s32(vdotq_s32(vdupq_n_s32(0), v0_1ls, v1_1l), v0_1hs, v1_1h); + const int32x4_t p_0 = ggml_vdotq_s32(ggml_vdotq_s32(vdupq_n_s32(0), v0_0ls, v1_0l), v0_0hs, v1_0h); + const int32x4_t p_1 = ggml_vdotq_s32(ggml_vdotq_s32(vdupq_n_s32(0), v0_1ls, v1_1l), v0_1hs, v1_1h); sumv0 = vmlaq_n_f32(sumv0, vcvtq_f32_s32(p_0), GGML_FP16_TO_FP32(x0->d)*GGML_FP16_TO_FP32(y0->d)); sumv1 = vmlaq_n_f32(sumv1, vcvtq_f32_s32(p_1), GGML_FP16_TO_FP32(x1->d)*GGML_FP16_TO_FP32(y1->d)); @@ -2769,8 +2773,8 @@ void ggml_vec_dot_q4_1_q8_1(const int n, float * restrict s, const void * restri const int8x16_t v1_1h = vld1q_s8(y1->qs + 16); // dot product into int32x4_t - const int32x4_t p_0 = vdotq_s32(vdotq_s32(vdupq_n_s32(0), v0_0l, v1_0l), v0_0h, v1_0h); - const int32x4_t p_1 = vdotq_s32(vdotq_s32(vdupq_n_s32(0), v0_1l, v1_1l), v0_1h, v1_1h); + const int32x4_t p_0 = ggml_vdotq_s32(ggml_vdotq_s32(vdupq_n_s32(0), v0_0l, v1_0l), v0_0h, v1_0h); + const int32x4_t p_1 = ggml_vdotq_s32(ggml_vdotq_s32(vdupq_n_s32(0), v0_1l, v1_1l), v0_1h, v1_1h); sumv0 = vmlaq_n_f32(sumv0, vcvtq_f32_s32(p_0), GGML_FP16_TO_FP32(x0->d)*y0->d); sumv1 = vmlaq_n_f32(sumv1, vcvtq_f32_s32(p_1), GGML_FP16_TO_FP32(x1->d)*y1->d); @@ -2936,11 +2940,11 @@ void ggml_vec_dot_q5_0_q8_0(const int n, float * restrict s, const void * restri const int8x16_t v1_1h = vld1q_s8(y1->qs + 16); sumv0 = vmlaq_n_f32(sumv0, vcvtq_f32_s32(vaddq_s32( - vdotq_s32(vdupq_n_s32(0), v0_0lf, v1_0l), - vdotq_s32(vdupq_n_s32(0), v0_0hf, v1_0h))), GGML_FP16_TO_FP32(x0->d)*GGML_FP16_TO_FP32(y0->d)); + ggml_vdotq_s32(vdupq_n_s32(0), v0_0lf, v1_0l), + ggml_vdotq_s32(vdupq_n_s32(0), v0_0hf, v1_0h))), GGML_FP16_TO_FP32(x0->d)*GGML_FP16_TO_FP32(y0->d)); sumv1 = vmlaq_n_f32(sumv1, vcvtq_f32_s32(vaddq_s32( - vdotq_s32(vdupq_n_s32(0), v0_1lf, v1_1l), - vdotq_s32(vdupq_n_s32(0), v0_1hf, v1_1h))), GGML_FP16_TO_FP32(x1->d)*GGML_FP16_TO_FP32(y1->d)); + ggml_vdotq_s32(vdupq_n_s32(0), v0_1lf, v1_1l), + ggml_vdotq_s32(vdupq_n_s32(0), v0_1hf, v1_1h))), GGML_FP16_TO_FP32(x1->d)*GGML_FP16_TO_FP32(y1->d)); } *s = vaddvq_f32(sumv0) + vaddvq_f32(sumv1); @@ -3228,11 +3232,11 @@ void ggml_vec_dot_q5_1_q8_1(const int n, float * restrict s, const void * restri const int8x16_t v1_1h = vld1q_s8(y1->qs + 16); sumv0 = vmlaq_n_f32(sumv0, vcvtq_f32_s32(vaddq_s32( - vdotq_s32(vdupq_n_s32(0), v0_0lf, v1_0l), - vdotq_s32(vdupq_n_s32(0), v0_0hf, v1_0h))), GGML_FP16_TO_FP32(x0->d)*y0->d); + ggml_vdotq_s32(vdupq_n_s32(0), v0_0lf, v1_0l), + ggml_vdotq_s32(vdupq_n_s32(0), v0_0hf, v1_0h))), GGML_FP16_TO_FP32(x0->d)*y0->d); sumv1 = vmlaq_n_f32(sumv1, vcvtq_f32_s32(vaddq_s32( - vdotq_s32(vdupq_n_s32(0), v0_1lf, v1_1l), - vdotq_s32(vdupq_n_s32(0), v0_1hf, v1_1h))), GGML_FP16_TO_FP32(x1->d)*y1->d); + ggml_vdotq_s32(vdupq_n_s32(0), v0_1lf, v1_1l), + ggml_vdotq_s32(vdupq_n_s32(0), v0_1hf, v1_1h))), GGML_FP16_TO_FP32(x1->d)*y1->d); } *s = vaddvq_f32(sumv0) + vaddvq_f32(sumv1) + summs0 + summs1; @@ -3483,12 +3487,12 @@ void ggml_vec_dot_q8_0_q8_0(const int n, float * restrict s, const void * restri const int8x16_t y1_1 = vld1q_s8(y1->qs + 16); sumv0 = vmlaq_n_f32(sumv0, vcvtq_f32_s32(vaddq_s32( - vdotq_s32(vdupq_n_s32(0), x0_0, y0_0), - vdotq_s32(vdupq_n_s32(0), x0_1, y0_1))), GGML_FP16_TO_FP32(x0->d)*GGML_FP16_TO_FP32(y0->d)); + ggml_vdotq_s32(vdupq_n_s32(0), x0_0, y0_0), + ggml_vdotq_s32(vdupq_n_s32(0), x0_1, y0_1))), GGML_FP16_TO_FP32(x0->d)*GGML_FP16_TO_FP32(y0->d)); sumv1 = vmlaq_n_f32(sumv1, vcvtq_f32_s32(vaddq_s32( - vdotq_s32(vdupq_n_s32(0), x1_0, y1_0), - vdotq_s32(vdupq_n_s32(0), x1_1, y1_1))), GGML_FP16_TO_FP32(x1->d)*GGML_FP16_TO_FP32(y1->d)); + ggml_vdotq_s32(vdupq_n_s32(0), x1_0, y1_0), + ggml_vdotq_s32(vdupq_n_s32(0), x1_1, y1_1))), GGML_FP16_TO_FP32(x1->d)*GGML_FP16_TO_FP32(y1->d)); } *s = vaddvq_f32(sumv0) + vaddvq_f32(sumv1); @@ -3598,8 +3602,8 @@ void ggml_vec_dot_q2_K_q8_K(const int n, float * restrict s, const void * restri // We use this macro instead of a function call because for some reason // the code runs 2-3% slower, even if the function is declared inline #define MULTIPLY_ACCUM_WITH_SCALE(index)\ - isum += vaddvq_s32(vdotq_s32(vzero, q2bytes.val[0], q8bytes.val[0])) * aux[is+(index)];\ - isum += vaddvq_s32(vdotq_s32(vzero, q2bytes.val[1], q8bytes.val[1])) * aux[is+1+(index)]; + isum += vaddvq_s32(ggml_vdotq_s32(vzero, q2bytes.val[0], q8bytes.val[0])) * aux[is+(index)];\ + isum += vaddvq_s32(ggml_vdotq_s32(vzero, q2bytes.val[1], q8bytes.val[1])) * aux[is+1+(index)]; #define SHIFT_MULTIPLY_ACCUM_WITH_SCALE(shift, index)\ q8bytes = ggml_vld1q_s8_x2(q8); q8 += 32;\ @@ -3973,10 +3977,10 @@ void ggml_vec_dot_q2_K_q8_K(const int n, float * restrict s, const void * restri q2bytes.val[2] = vreinterpretq_s8_u8(vandq_u8(vshrq_n_u8(q2bits, 4), m3)); q2bytes.val[3] = vreinterpretq_s8_u8(vandq_u8(vshrq_n_u8(q2bits, 6), m3)); - isum1 += vaddvq_s32(vdotq_s32(vzero, q2bytes.val[0], q8bytes.val[0])) * scales[0]; - isum2 += vaddvq_s32(vdotq_s32(vzero, q2bytes.val[1], q8bytes.val[1])) * scales[1]; - isum1 += vaddvq_s32(vdotq_s32(vzero, q2bytes.val[2], q8bytes.val[2])) * scales[2]; - isum2 += vaddvq_s32(vdotq_s32(vzero, q2bytes.val[3], q8bytes.val[3])) * scales[3]; + isum1 += vaddvq_s32(ggml_vdotq_s32(vzero, q2bytes.val[0], q8bytes.val[0])) * scales[0]; + isum2 += vaddvq_s32(ggml_vdotq_s32(vzero, q2bytes.val[1], q8bytes.val[1])) * scales[1]; + isum1 += vaddvq_s32(ggml_vdotq_s32(vzero, q2bytes.val[2], q8bytes.val[2])) * scales[2]; + isum2 += vaddvq_s32(ggml_vdotq_s32(vzero, q2bytes.val[3], q8bytes.val[3])) * scales[3]; sum += d * (isum1 + isum2); } @@ -4256,10 +4260,10 @@ void ggml_vec_dot_q3_K_q8_K(const int n, float * restrict s, const void * restri q3bytes.val[2] = vsubq_s8(vreinterpretq_s8_u8(vandq_u8(vshrq_n_u8(q3bits.val[0], 2), m3b)), vreinterpretq_s8_u8(q3h.val[2])); q3bytes.val[3] = vsubq_s8(vreinterpretq_s8_u8(vandq_u8(vshrq_n_u8(q3bits.val[1], 2), m3b)), vreinterpretq_s8_u8(q3h.val[3])); - isum += vaddvq_s32(vdotq_s32(vzero, q3bytes.val[0], q8bytes_1.val[0])) * scale[0]; - isum += vaddvq_s32(vdotq_s32(vzero, q3bytes.val[1], q8bytes_1.val[1])) * scale[1]; - isum += vaddvq_s32(vdotq_s32(vzero, q3bytes.val[2], q8bytes_1.val[2])) * scale[2]; - isum += vaddvq_s32(vdotq_s32(vzero, q3bytes.val[3], q8bytes_1.val[3])) * scale[3]; + isum += vaddvq_s32(ggml_vdotq_s32(vzero, q3bytes.val[0], q8bytes_1.val[0])) * scale[0]; + isum += vaddvq_s32(ggml_vdotq_s32(vzero, q3bytes.val[1], q8bytes_1.val[1])) * scale[1]; + isum += vaddvq_s32(ggml_vdotq_s32(vzero, q3bytes.val[2], q8bytes_1.val[2])) * scale[2]; + isum += vaddvq_s32(ggml_vdotq_s32(vzero, q3bytes.val[3], q8bytes_1.val[3])) * scale[3]; scale += 4; @@ -4273,10 +4277,10 @@ void ggml_vec_dot_q3_K_q8_K(const int n, float * restrict s, const void * restri q3bytes.val[2] = vsubq_s8(vreinterpretq_s8_u8(vandq_u8(vshrq_n_u8(q3bits.val[0], 6), m3b)), vreinterpretq_s8_u8(q3h.val[2])); q3bytes.val[3] = vsubq_s8(vreinterpretq_s8_u8(vandq_u8(vshrq_n_u8(q3bits.val[1], 6), m3b)), vreinterpretq_s8_u8(q3h.val[3])); - isum += vaddvq_s32(vdotq_s32(vzero, q3bytes.val[0], q8bytes_2.val[0])) * scale[0]; - isum += vaddvq_s32(vdotq_s32(vzero, q3bytes.val[1], q8bytes_2.val[1])) * scale[1]; - isum += vaddvq_s32(vdotq_s32(vzero, q3bytes.val[2], q8bytes_2.val[2])) * scale[2]; - isum += vaddvq_s32(vdotq_s32(vzero, q3bytes.val[3], q8bytes_2.val[3])) * scale[3]; + isum += vaddvq_s32(ggml_vdotq_s32(vzero, q3bytes.val[0], q8bytes_2.val[0])) * scale[0]; + isum += vaddvq_s32(ggml_vdotq_s32(vzero, q3bytes.val[1], q8bytes_2.val[1])) * scale[1]; + isum += vaddvq_s32(ggml_vdotq_s32(vzero, q3bytes.val[2], q8bytes_2.val[2])) * scale[2]; + isum += vaddvq_s32(ggml_vdotq_s32(vzero, q3bytes.val[3], q8bytes_2.val[3])) * scale[3]; scale += 4; @@ -4757,10 +4761,10 @@ void ggml_vec_dot_q3_K_q8_K(const int n, float * restrict s, const void * restri q3bytes.val[2] = vreinterpretq_s8_u8(vorrq_u8(vandq_u8(vshrq_n_u8(q3bits, 4), m3b), q3h.val[2])); q3bytes.val[3] = vreinterpretq_s8_u8(vorrq_u8(vshrq_n_u8(q3bits, 6), q3h.val[3])); - isum += vaddvq_s32(vdotq_s32(vzero, q3bytes.val[0], q8bytes.val[0])) * scales[0]; - isum += vaddvq_s32(vdotq_s32(vzero, q3bytes.val[1], q8bytes.val[1])) * scales[2]; - isum += vaddvq_s32(vdotq_s32(vzero, q3bytes.val[2], q8bytes.val[2])) * scales[1]; - isum += vaddvq_s32(vdotq_s32(vzero, q3bytes.val[3], q8bytes.val[3])) * scales[3]; + isum += vaddvq_s32(ggml_vdotq_s32(vzero, q3bytes.val[0], q8bytes.val[0])) * scales[0]; + isum += vaddvq_s32(ggml_vdotq_s32(vzero, q3bytes.val[1], q8bytes.val[1])) * scales[2]; + isum += vaddvq_s32(ggml_vdotq_s32(vzero, q3bytes.val[2], q8bytes.val[2])) * scales[1]; + isum += vaddvq_s32(ggml_vdotq_s32(vzero, q3bytes.val[3], q8bytes.val[3])) * scales[3]; sum += d * isum; @@ -5109,14 +5113,14 @@ void ggml_vec_dot_q4_K_q8_K(const int n, float * restrict s, const void * restri q4bytes.val[0] = vreinterpretq_s8_u8(vandq_u8 (q4bits.val[0], m4b)); q4bytes.val[1] = vreinterpretq_s8_u8(vandq_u8 (q4bits.val[1], m4b)); - const int32x4_t p1 = vdotq_s32(vdotq_s32(mzero, q4bytes.val[0], q8bytes.val[0]), q4bytes.val[1], q8bytes.val[1]); + const int32x4_t p1 = ggml_vdotq_s32(ggml_vdotq_s32(mzero, q4bytes.val[0], q8bytes.val[0]), q4bytes.val[1], q8bytes.val[1]); sumi1 += vaddvq_s32(p1) * scales[2*j+0]; q8bytes = ggml_vld1q_s8_x2(q8); q8 += 32; q4bytes.val[0] = vreinterpretq_s8_u8(vshrq_n_u8(q4bits.val[0], 4)); q4bytes.val[1] = vreinterpretq_s8_u8(vshrq_n_u8(q4bits.val[1], 4)); - const int32x4_t p2 = vdotq_s32(vdotq_s32(mzero, q4bytes.val[0], q8bytes.val[0]), q4bytes.val[1], q8bytes.val[1]); + const int32x4_t p2 = ggml_vdotq_s32(ggml_vdotq_s32(mzero, q4bytes.val[0], q8bytes.val[0]), q4bytes.val[1], q8bytes.val[1]); sumi2 += vaddvq_s32(p2) * scales[2*j+1]; } @@ -5449,13 +5453,13 @@ void ggml_vec_dot_q4_K_q8_K(const int n, float * restrict s, const void * restri q4bytes.val[0] = vreinterpretq_s8_u8(vandq_u8 (q4bits.val[0], m4b)); q4bytes.val[1] = vreinterpretq_s8_u8(vandq_u8 (q4bits.val[1], m4b)); - const int32x4_t p1 = vdotq_s32(vdotq_s32(mzero, q4bytes.val[0], q8bytes.val[0]), q4bytes.val[1], q8bytes.val[1]); + const int32x4_t p1 = ggml_vdotq_s32(ggml_vdotq_s32(mzero, q4bytes.val[0], q8bytes.val[0]), q4bytes.val[1], q8bytes.val[1]); const int32_t sumi1 = vaddvq_s32(p1) * scales[0]; q4bytes.val[0] = vreinterpretq_s8_u8(vshrq_n_u8(q4bits.val[0], 4)); q4bytes.val[1] = vreinterpretq_s8_u8(vshrq_n_u8(q4bits.val[1], 4)); - const int32x4_t p2 = vdotq_s32(vdotq_s32(mzero, q4bytes.val[0], q8bytes.val[2]), q4bytes.val[1], q8bytes.val[3]); + const int32x4_t p2 = ggml_vdotq_s32(ggml_vdotq_s32(mzero, q4bytes.val[0], q8bytes.val[2]), q4bytes.val[1], q8bytes.val[3]); const int32_t sumi2 = vaddvq_s32(p2) * scales[1]; sumf += d * (sumi1 + sumi2); @@ -5722,8 +5726,8 @@ void ggml_vec_dot_q5_K_q8_K(const int n, float * restrict s, const void * restri q5bytes.val[2] = vreinterpretq_s8_u8(vorrq_u8(vshrq_n_u8(q5bits.val[0], 4), q5h.val[2])); q5bytes.val[3] = vreinterpretq_s8_u8(vorrq_u8(vshrq_n_u8(q5bits.val[1], 4), q5h.val[3])); - sumi += vaddvq_s32(vdotq_s32(vdotq_s32(mzero, q5bytes.val[0], q8bytes.val[0]), q5bytes.val[1], q8bytes.val[1])) * *scales++; - sumi += vaddvq_s32(vdotq_s32(vdotq_s32(mzero, q5bytes.val[2], q8bytes.val[2]), q5bytes.val[3], q8bytes.val[3])) * *scales++; + sumi += vaddvq_s32(ggml_vdotq_s32(ggml_vdotq_s32(mzero, q5bytes.val[0], q8bytes.val[0]), q5bytes.val[1], q8bytes.val[1])) * *scales++; + sumi += vaddvq_s32(ggml_vdotq_s32(ggml_vdotq_s32(mzero, q5bytes.val[2], q8bytes.val[2]), q5bytes.val[3], q8bytes.val[3])) * *scales++; } sumf += d * sumi - dmin * sumi_mins; @@ -6112,10 +6116,10 @@ void ggml_vec_dot_q5_K_q8_K(const int n, float * restrict s, const void * restri q5bytes.val[2] = vsubq_s8(vreinterpretq_s8_u8(vshrq_n_u8(q5bits.val[0], 4)), vreinterpretq_s8_u8(q5h.val[2])); q5bytes.val[3] = vsubq_s8(vreinterpretq_s8_u8(vshrq_n_u8(q5bits.val[1], 4)), vreinterpretq_s8_u8(q5h.val[3])); - int32_t sumi1 = sc[0] * vaddvq_s32(vdotq_s32(mzero, q5bytes.val[0], q8bytes.val[0])); - int32_t sumi2 = sc[1] * vaddvq_s32(vdotq_s32(mzero, q5bytes.val[1], q8bytes.val[1])); - int32_t sumi3 = sc[2] * vaddvq_s32(vdotq_s32(mzero, q5bytes.val[2], q8bytes.val[2])); - int32_t sumi4 = sc[3] * vaddvq_s32(vdotq_s32(mzero, q5bytes.val[3], q8bytes.val[3])); + int32_t sumi1 = sc[0] * vaddvq_s32(ggml_vdotq_s32(mzero, q5bytes.val[0], q8bytes.val[0])); + int32_t sumi2 = sc[1] * vaddvq_s32(ggml_vdotq_s32(mzero, q5bytes.val[1], q8bytes.val[1])); + int32_t sumi3 = sc[2] * vaddvq_s32(ggml_vdotq_s32(mzero, q5bytes.val[2], q8bytes.val[2])); + int32_t sumi4 = sc[3] * vaddvq_s32(ggml_vdotq_s32(mzero, q5bytes.val[3], q8bytes.val[3])); sumf += d * (sumi1 + sumi2 + sumi3 + sumi4); } @@ -6399,10 +6403,10 @@ void ggml_vec_dot_q6_K_q8_K(const int n, float * restrict s, const void * restri q6bytes.val[2] = vreinterpretq_s8_u8(vorrq_u8(vandq_u8(q6bits.val[2], m4b), q6h.val[2])); q6bytes.val[3] = vreinterpretq_s8_u8(vorrq_u8(vandq_u8(q6bits.val[3], m4b), q6h.val[3])); - isum += vaddvq_s32(vdotq_s32(vzero, q6bytes.val[0], q8bytes.val[0])) * scale[0] + - vaddvq_s32(vdotq_s32(vzero, q6bytes.val[1], q8bytes.val[1])) * scale[1] + - vaddvq_s32(vdotq_s32(vzero, q6bytes.val[2], q8bytes.val[2])) * scale[2] + - vaddvq_s32(vdotq_s32(vzero, q6bytes.val[3], q8bytes.val[3])) * scale[3]; + isum += vaddvq_s32(ggml_vdotq_s32(vzero, q6bytes.val[0], q8bytes.val[0])) * scale[0] + + vaddvq_s32(ggml_vdotq_s32(vzero, q6bytes.val[1], q8bytes.val[1])) * scale[1] + + vaddvq_s32(ggml_vdotq_s32(vzero, q6bytes.val[2], q8bytes.val[2])) * scale[2] + + vaddvq_s32(ggml_vdotq_s32(vzero, q6bytes.val[3], q8bytes.val[3])) * scale[3]; scale += 4; @@ -6426,10 +6430,10 @@ void ggml_vec_dot_q6_K_q8_K(const int n, float * restrict s, const void * restri q6bytes.val[2] = vreinterpretq_s8_u8(vorrq_u8(vshrq_n_u8(q6bits.val[2], 4), q6h.val[2])); q6bytes.val[3] = vreinterpretq_s8_u8(vorrq_u8(vshrq_n_u8(q6bits.val[3], 4), q6h.val[3])); - isum += vaddvq_s32(vdotq_s32(vzero, q6bytes.val[0], q8bytes.val[0])) * scale[0] + - vaddvq_s32(vdotq_s32(vzero, q6bytes.val[1], q8bytes.val[1])) * scale[1] + - vaddvq_s32(vdotq_s32(vzero, q6bytes.val[2], q8bytes.val[2])) * scale[2] + - vaddvq_s32(vdotq_s32(vzero, q6bytes.val[3], q8bytes.val[3])) * scale[3]; + isum += vaddvq_s32(ggml_vdotq_s32(vzero, q6bytes.val[0], q8bytes.val[0])) * scale[0] + + vaddvq_s32(ggml_vdotq_s32(vzero, q6bytes.val[1], q8bytes.val[1])) * scale[1] + + vaddvq_s32(ggml_vdotq_s32(vzero, q6bytes.val[2], q8bytes.val[2])) * scale[2] + + vaddvq_s32(ggml_vdotq_s32(vzero, q6bytes.val[3], q8bytes.val[3])) * scale[3]; scale += 4; } //sum += isum * d_all * y[i].d; @@ -6816,10 +6820,10 @@ void ggml_vec_dot_q6_K_q8_K(const int n, float * restrict s, const void * restri q6bytes.val[2] = vsubq_s8(vreinterpretq_s8_u8(vorrq_u8(vshrq_n_u8(q6bits.val[0], 4), q6h.val[2])), m32s); q6bytes.val[3] = vsubq_s8(vreinterpretq_s8_u8(vorrq_u8(vshrq_n_u8(q6bits.val[1], 4), q6h.val[3])), m32s); - isum += vaddvq_s32(vdotq_s32(vzero, q6bytes.val[0], q8bytes.val[0])) * scale[0] + - vaddvq_s32(vdotq_s32(vzero, q6bytes.val[1], q8bytes.val[1])) * scale[1] + - vaddvq_s32(vdotq_s32(vzero, q6bytes.val[2], q8bytes.val[2])) * scale[2] + - vaddvq_s32(vdotq_s32(vzero, q6bytes.val[3], q8bytes.val[3])) * scale[3]; + isum += vaddvq_s32(ggml_vdotq_s32(vzero, q6bytes.val[0], q8bytes.val[0])) * scale[0] + + vaddvq_s32(ggml_vdotq_s32(vzero, q6bytes.val[1], q8bytes.val[1])) * scale[1] + + vaddvq_s32(ggml_vdotq_s32(vzero, q6bytes.val[2], q8bytes.val[2])) * scale[2] + + vaddvq_s32(ggml_vdotq_s32(vzero, q6bytes.val[3], q8bytes.val[3])) * scale[3]; sum += isum * d_all * y[i].d; From 1e3900ebacb3a0b385271389686403c97ad76d88 Mon Sep 17 00:00:00 2001 From: Someone Serge Date: Fri, 29 Dec 2023 16:15:37 +0000 Subject: [PATCH 178/811] flake.nix: expose full scope in legacyPackages --- .devops/nix/jetson-support.nix | 19 +++++++++++++------ flake.nix | 20 +++++++++++++++++--- 2 files changed, 30 insertions(+), 9 deletions(-) diff --git a/.devops/nix/jetson-support.nix b/.devops/nix/jetson-support.nix index 08426d2ab..78e2e40e0 100644 --- a/.devops/nix/jetson-support.nix +++ b/.devops/nix/jetson-support.nix @@ -8,12 +8,13 @@ pkgsCuda, ... }: - lib.optionalAttrs (system == "aarch64-linux") { - packages = + { + legacyPackages = let - caps.jetson-xavier = "7.2"; - caps.jetson-orin = "8.7"; - caps.jetson-nano = "5.3"; + caps.llamaPackagesXavier = "7.2"; + caps.llamaPackagesOrin = "8.7"; + caps.llamaPackagesTX2 = "6.2"; + caps.llamaPackagesNano = "5.3"; pkgsFor = cap: @@ -27,6 +28,12 @@ }; }; in - builtins.mapAttrs (name: cap: ((pkgsFor cap).callPackage ./scope.nix { }).llama-cpp) caps; + builtins.mapAttrs (name: cap: (pkgsFor cap).callPackage ./scope.nix { }) caps; + + packages = lib.optionalAttrs (system == "aarch64-linux") { + jetson-xavier = config.legacyPackages.llamaPackagesXavier.llama-cpp; + jetson-orin = config.legacyPackages.llamaPackagesOrin.llama-cpp; + jetson-nano = config.legacyPackages.llamaPackagesNano.llama-cpp; + }; }; } diff --git a/flake.nix b/flake.nix index 2209070aa..6785b52f4 100644 --- a/flake.nix +++ b/flake.nix @@ -80,16 +80,30 @@ ... }: { + # Unlike `.#packages`, legacyPackages may contain values of + # arbitrary types (including nested attrsets) and may even throw + # exceptions. This attribute isn't recursed into by `nix flake + # show` either. + # + # You can add arbitrary scripts to `.devops/nix/scope.nix` and + # access them as `nix build .#llamaPackages.${scriptName}` using + # the same path you would with an overlay. + legacyPackages = { + llamaPackages = pkgs.callPackage .devops/nix/scope.nix { inherit llamaVersion; }; + llamaPackagesCuda = pkgsCuda.callPackage .devops/nix/scope.nix { inherit llamaVersion; }; + llamaPackagesRocm = pkgsRocm.callPackage .devops/nix/scope.nix { inherit llamaVersion; }; + }; + # We don't use the overlay here so as to avoid making too many instances of nixpkgs, # cf. https://zimbatm.com/notes/1000-instances-of-nixpkgs packages = { - default = (pkgs.callPackage .devops/nix/scope.nix { inherit llamaVersion; }).llama-cpp; + default = config.legacyPackages.llamaPackages.llama-cpp; } // lib.optionalAttrs pkgs.stdenv.isLinux { opencl = config.packages.default.override { useOpenCL = true; }; - cuda = (pkgsCuda.callPackage .devops/nix/scope.nix { inherit llamaVersion; }).llama-cpp; - rocm = (pkgsRocm.callPackage .devops/nix/scope.nix { inherit llamaVersion; }).llama-cpp; + cuda = config.legacyPackages.llamaPackagesCuda.llama-cpp; + rocm = config.legacyPackages.llamaPackagesRocm.llama-cpp; mpi-cpu = config.packages.default.override { useMpi = true; }; mpi-cuda = config.packages.default.override { useMpi = true; }; From a5c088d8c698299b973d2709153e5d95295606d9 Mon Sep 17 00:00:00 2001 From: Someone Serge Date: Tue, 26 Dec 2023 23:34:40 +0000 Subject: [PATCH 179/811] flake.nix: rocm not yet supported on aarch64, so hide the output --- flake.nix | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/flake.nix b/flake.nix index 6785b52f4..920a79906 100644 --- a/flake.nix +++ b/flake.nix @@ -74,6 +74,7 @@ { config, lib, + system, pkgs, pkgsCuda, pkgsRocm, @@ -103,10 +104,12 @@ // lib.optionalAttrs pkgs.stdenv.isLinux { opencl = config.packages.default.override { useOpenCL = true; }; cuda = config.legacyPackages.llamaPackagesCuda.llama-cpp; - rocm = config.legacyPackages.llamaPackagesRocm.llama-cpp; mpi-cpu = config.packages.default.override { useMpi = true; }; mpi-cuda = config.packages.default.override { useMpi = true; }; + } + // lib.optionalAttrs (system == "x86_64-linux") { + rocm = config.legacyPackages.llamaPackagesRocm.llama-cpp; }; }; }; From 356ea17e0f92bfbbf28a4f69261bed48eff68d9c Mon Sep 17 00:00:00 2001 From: Someone Serge Date: Fri, 29 Dec 2023 16:21:50 +0000 Subject: [PATCH 180/811] flake.nix: expose checks --- flake.nix | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/flake.nix b/flake.nix index 920a79906..8d0f095d7 100644 --- a/flake.nix +++ b/flake.nix @@ -111,6 +111,11 @@ // lib.optionalAttrs (system == "x86_64-linux") { rocm = config.legacyPackages.llamaPackagesRocm.llama-cpp; }; + + # Packages exposed in `.#checks` will be built by the CI and by + # `nix flake check`. Currently we expose all packages, but we could + # make more granular choices + checks = config.packages; }; }; } From 7adedecbe39bd552bc14142f496246d55a43ac4e Mon Sep 17 00:00:00 2001 From: Someone Serge Date: Tue, 26 Dec 2023 19:17:26 +0000 Subject: [PATCH 181/811] workflows: nix-ci: init; build flake outputs --- .github/workflows/build.yml | 1 - .github/workflows/nix-ci.yml | 44 ++++++++++++++++++++++++++++++++++++ 2 files changed, 44 insertions(+), 1 deletion(-) create mode 100644 .github/workflows/nix-ci.yml diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index a5090e398..0a28a1111 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -515,7 +515,6 @@ jobs: - name: Build Xcode project run: xcodebuild -project examples/llama.swiftui/llama.swiftui.xcodeproj -scheme llama.swiftui -sdk iphoneos CODE_SIGNING_REQUIRED=NO CODE_SIGN_IDENTITY= -destination 'generic/platform=iOS' build - # freeBSD-latest: # runs-on: macos-12 # steps: diff --git a/.github/workflows/nix-ci.yml b/.github/workflows/nix-ci.yml new file mode 100644 index 000000000..f82b2cb3d --- /dev/null +++ b/.github/workflows/nix-ci.yml @@ -0,0 +1,44 @@ +name: Nix CI + +on: + workflow_dispatch: # allows manual triggering + push: + branches: + - master + paths: ['.github/workflows/**', '**/CMakeLists.txt', '**/Makefile', '**/*.h', '**/*.hpp', '**/*.c', '**/*.cpp', '**/*.cu', '**/*.swift', '**/*.m', '**/*.sh', '**/*.py', '**/*.nix'] + pull_request: + types: [opened, synchronize, reopened] + paths: ['**/CMakeLists.txt', '**/Makefile', '**/*.h', '**/*.hpp', '**/*.c', '**/*.cpp', '**/*.cu', '**/*.swift', '**/*.m', '**/*.sh', '**/*.py', '**/*.nix'] + +jobs: + nix-build: + if: ${{ vars.CACHIX_NAME != '' }} + strategy: + fail-fast: false + matrix: + os: [ ubuntu-latest, macos-latest ] + runs-on: ${{ matrix.os }} + steps: + - name: Checkout repository + uses: actions/checkout@v4 + - name: Install Nix + uses: DeterminateSystems/nix-installer-action@v9 + with: + github-token: ${{ secrets.GITHUB_TOKEN }} + extra-conf: | + extra-substituters = https://${{ vars.CACHIX_NAME }}.cachix.org https://cuda-maintainers.cachix.org + extra-trusted-public-keys = ${{ vars.CACHIX_PUBLIC_KEY }} cuda-maintainers.cachix.org-1:0dq3bujKpuEPMCX6U4WylrUDZ9JyUG0VpVZa7CNfq5E= + - uses: DeterminateSystems/magic-nix-cache-action@v2 + with: + upstream-cache: https://${{ matrix.cachixName }}.cachix.org + - name: Set-up cachix to push the results to + uses: cachix/cachix-action@v13 + with: + authToken: '${{ secrets.CACHIX_AUTH_TOKEN }}' + name: ${{ vars.CACHIX_NAME }} + - name: Build + run: > + nix run github:Mic92/nix-fast-build + -- --skip-cached --no-nom + --flake + ".#checks.$(nix eval --raw --impure --expr builtins.currentSystem)" From 1e9ae54cf24d27afe3900d1250634a2a33423db1 Mon Sep 17 00:00:00 2001 From: Someone Serge Date: Sat, 30 Dec 2023 17:19:11 +0000 Subject: [PATCH 182/811] workflows: nix-ci: add a job for eval --- .github/workflows/nix-ci.yml | 27 +++++++++++++++++++++++++++ 1 file changed, 27 insertions(+) diff --git a/.github/workflows/nix-ci.yml b/.github/workflows/nix-ci.yml index f82b2cb3d..845b93bfb 100644 --- a/.github/workflows/nix-ci.yml +++ b/.github/workflows/nix-ci.yml @@ -11,6 +11,33 @@ on: paths: ['**/CMakeLists.txt', '**/Makefile', '**/*.h', '**/*.hpp', '**/*.c', '**/*.cpp', '**/*.cu', '**/*.swift', '**/*.m', '**/*.sh', '**/*.py', '**/*.nix'] jobs: + nix-eval: + strategy: + fail-fast: false + matrix: + os: [ ubuntu-latest, macos-latest ] + runs-on: ${{ matrix.os }} + steps: + - name: Checkout repository + uses: actions/checkout@v4 + - name: Install Nix + uses: DeterminateSystems/nix-installer-action@v9 + with: + github-token: ${{ secrets.GITHUB_TOKEN }} + extra-conf: | + extra-substituters = https://${{ vars.CACHIX_NAME }}.cachix.org https://cuda-maintainers.cachix.org + extra-trusted-public-keys = ${{ vars.CACHIX_PUBLIC_KEY }} cuda-maintainers.cachix.org-1:0dq3bujKpuEPMCX6U4WylrUDZ9JyUG0VpVZa7CNfq5E= + - uses: DeterminateSystems/magic-nix-cache-action@v2 + with: + upstream-cache: https://${{ matrix.cachixName }}.cachix.org + - name: List all flake outputs + run: nix flake show --all-systems + - name: Show all output paths + run: > + nix run github:nix-community/nix-eval-jobs + -- --gc-roots-dir gcroot + --flake + ".#packages.$(nix eval --raw --impure --expr builtins.currentSystem)" nix-build: if: ${{ vars.CACHIX_NAME != '' }} strategy: From c5239944bab0ff71915df8f2dc7e42fc2c138ff6 Mon Sep 17 00:00:00 2001 From: Someone Serge Date: Sat, 30 Dec 2023 16:38:36 +0000 Subject: [PATCH 183/811] workflows: weekly `nix flake update` --- .github/workflows/nix-flake-update.yml | 22 ++++++++++++++++++++++ 1 file changed, 22 insertions(+) create mode 100644 .github/workflows/nix-flake-update.yml diff --git a/.github/workflows/nix-flake-update.yml b/.github/workflows/nix-flake-update.yml new file mode 100644 index 000000000..fa9360841 --- /dev/null +++ b/.github/workflows/nix-flake-update.yml @@ -0,0 +1,22 @@ +name: update-flake-lock +on: + workflow_dispatch: + schedule: + - cron: '0 0 * * 0' # runs weekly on Sunday at 00:00 + +jobs: + lockfile: + runs-on: ubuntu-latest + steps: + - name: Checkout repository + uses: actions/checkout@v4 + - name: Install Nix + uses: DeterminateSystems/nix-installer-action@main + - name: Update flake.lock + uses: DeterminateSystems/update-flake-lock@main + with: + pr-title: "nix: update flake.lock" + pr-labels: | + nix + pr-reviewers: philiptaron,SomeoneSerge + token: ${{ secrets.GITHUB_TOKEN }} From 06f2a5d1909a1385b1a16dab4ade68377e121bdd Mon Sep 17 00:00:00 2001 From: Someone Serge Date: Sat, 30 Dec 2023 17:36:08 +0000 Subject: [PATCH 184/811] workflows: nix-flakestry: drop tag filters ...and add a job for flakehub.com --- .github/workflows/nix-flakestry.yml | 23 ---------------- .github/workflows/nix-publish-flake.yml | 36 +++++++++++++++++++++++++ 2 files changed, 36 insertions(+), 23 deletions(-) delete mode 100644 .github/workflows/nix-flakestry.yml create mode 100644 .github/workflows/nix-publish-flake.yml diff --git a/.github/workflows/nix-flakestry.yml b/.github/workflows/nix-flakestry.yml deleted file mode 100644 index 3abfb3509..000000000 --- a/.github/workflows/nix-flakestry.yml +++ /dev/null @@ -1,23 +0,0 @@ -# Make the flake discoverable on https://flakestry.dev -name: "Publish a flake to flakestry" -on: - push: - tags: - - "v?[0-9]+.[0-9]+.[0-9]+" - - "v?[0-9]+.[0-9]+" - workflow_dispatch: - inputs: - tag: - description: "The existing tag to publish" - type: "string" - required: true -jobs: - publish-flake: - runs-on: ubuntu-latest - permissions: - id-token: "write" - contents: "read" - steps: - - uses: flakestry/flakestry-publish@main - with: - version: "${{ inputs.tag || github.ref_name }}" diff --git a/.github/workflows/nix-publish-flake.yml b/.github/workflows/nix-publish-flake.yml new file mode 100644 index 000000000..2c3c1ebda --- /dev/null +++ b/.github/workflows/nix-publish-flake.yml @@ -0,0 +1,36 @@ +# Make the flake discoverable on https://flakestry.dev and https://flakehub.com/flakes +name: "Publish a flake to flakestry & flakehub" +on: + push: + tags: + - "*" + workflow_dispatch: + inputs: + tag: + description: "The existing tag to publish" + type: "string" + required: true +jobs: + flakestry-publish: + runs-on: ubuntu-latest + permissions: + id-token: "write" + contents: "read" + steps: + - uses: flakestry/flakestry-publish@main + with: + version: "${{ inputs.tag || github.ref_name }}" + flakehub-publish: + runs-on: "ubuntu-latest" + permissions: + id-token: "write" + contents: "read" + steps: + - uses: "actions/checkout@v4" + with: + ref: "${{ (inputs.tag != null) && format('refs/tags/{0}', inputs.tag) || '' }}" + - uses: "DeterminateSystems/nix-installer-action@main" + - uses: "DeterminateSystems/flakehub-push@main" + with: + visibility: "public" + tag: "${{ inputs.tag }}" From d8361747317c5cb2e00e7fb3b59ff4dce5a176a5 Mon Sep 17 00:00:00 2001 From: Someone Serge Date: Sat, 30 Dec 2023 18:01:07 +0000 Subject: [PATCH 185/811] workflows: nix-ci: add a qemu job for jetsons --- .github/workflows/nix-ci.yml | 41 ++++++++++++++++++++++++++++++++++++ 1 file changed, 41 insertions(+) diff --git a/.github/workflows/nix-ci.yml b/.github/workflows/nix-ci.yml index 845b93bfb..a38c6ead4 100644 --- a/.github/workflows/nix-ci.yml +++ b/.github/workflows/nix-ci.yml @@ -69,3 +69,44 @@ jobs: -- --skip-cached --no-nom --flake ".#checks.$(nix eval --raw --impure --expr builtins.currentSystem)" + nix-build-aarch64: + if: ${{ vars.CACHIX_NAME != '' }} + runs-on: ubuntu-latest + steps: + - name: Checkout repository + uses: actions/checkout@v4 + - name: Install QEMU + # Copy-paste from https://github.com/orgs/community/discussions/8305#discussioncomment-5888654 + run: | + sudo apt-get install -y qemu-user-static qemu-system-aarch64 + sudo usermod -a -G kvm $USER + - name: Install Nix + uses: DeterminateSystems/nix-installer-action@v9 + with: + github-token: ${{ secrets.GITHUB_TOKEN }} + extra-conf: | + extra-platforms = aarch64-linux + extra-system-features = nixos-test kvm + extra-substituters = https://${{ vars.CACHIX_NAME }}.cachix.org https://cuda-maintainers.cachix.org + extra-trusted-public-keys = ${{ vars.CACHIX_PUBLIC_KEY }} cuda-maintainers.cachix.org-1:0dq3bujKpuEPMCX6U4WylrUDZ9JyUG0VpVZa7CNfq5E= + - uses: DeterminateSystems/magic-nix-cache-action@v2 + with: + upstream-cache: https://${{ matrix.cachixName }}.cachix.org + - name: Set-up cachix to push the results to + uses: cachix/cachix-action@v13 + with: + authToken: '${{ secrets.CACHIX_AUTH_TOKEN }}' + name: ${{ vars.CACHIX_NAME }} + - name: Show all output paths + run: > + nix run github:nix-community/nix-eval-jobs + -- --gc-roots-dir gcroot + --flake + ".#packages.aarch64-linux" + - name: Build + run: > + nix run github:Mic92/nix-fast-build + -- --skip-cached --no-nom + --systems aarch64-linux + --flake + ".#checks.aarch64-linux" From 198ed7ebfc89b8f2b35a8b1655d57bfb57530c1a Mon Sep 17 00:00:00 2001 From: Someone Serge Date: Sat, 30 Dec 2023 18:25:25 +0000 Subject: [PATCH 186/811] flake.nix: suggest the binary caches --- flake.nix | 23 +++++++++++++++++++++++ 1 file changed, 23 insertions(+) diff --git a/flake.nix b/flake.nix index 8d0f095d7..488ed6c59 100644 --- a/flake.nix +++ b/flake.nix @@ -6,6 +6,29 @@ flake-parts.url = "github:hercules-ci/flake-parts"; }; + # Optional binary cache + nixConfig = { + extra-substituters = [ + # Populated by the CI in ggerganov/llama.cpp + "https://llama-cpp.cachix.org" + + # A development cache for nixpkgs imported with `config.cudaSupport = true`. + # Populated by https://hercules-ci.com/github/SomeoneSerge/nixpkgs-cuda-ci. + # This lets one skip building e.g. the CUDA-enabled openmpi. + # TODO: Replace once nix-community obtains an official one. + "https://cuda-maintainers.cachix.org" + ]; + + # Verify these are the same keys as published on + # - https://app.cachix.org/cache/llama-cpp + # - https://app.cachix.org/cache/cuda-maintainers + extra-trusted-public-keys = [ + "llama-cpp.cachix.org-1:H75X+w83wUKTIPSO1KWy9ADUrzThyGs8P5tmAbkWhQc=" + "cuda-maintainers.cachix.org-1:0dq3bujKpuEPMCX6U4WylrUDZ9JyUG0VpVZa7CNfq5E=" + ]; + }; + + # For inspection, use `nix flake show github:ggerganov/llama.cpp` or the nix repl: # # ```bash From edd1ab7bc34c10a780ee7f9a4499f7689cdad36d Mon Sep 17 00:00:00 2001 From: Someone Serge Date: Sun, 31 Dec 2023 17:42:22 +0000 Subject: [PATCH 187/811] flake.lock: update to a commit recently cached by nixpkgs-cuda-ci --- flake.lock | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/flake.lock b/flake.lock index 3fcd1f45d..15a0a1a8e 100644 --- a/flake.lock +++ b/flake.lock @@ -20,11 +20,11 @@ }, "nixpkgs": { "locked": { - "lastModified": 1703559957, - "narHash": "sha256-x9PUuMEPGUOMB51zNxrDr2QoHbYWlCS2xhFedm9MC5Q=", + "lastModified": 1703637592, + "narHash": "sha256-8MXjxU0RfFfzl57Zy3OfXCITS0qWDNLzlBAdwxGZwfY=", "owner": "NixOS", "repo": "nixpkgs", - "rev": "75dd68c36f458c6593c5bbb48abfd3e59bfed380", + "rev": "cfc3698c31b1fb9cdcf10f36c9643460264d0ca8", "type": "github" }, "original": { From 58ba655af054715c0516ee270ad028ad9e74f357 Mon Sep 17 00:00:00 2001 From: Georgi Gerganov Date: Tue, 2 Jan 2024 10:57:44 +0200 Subject: [PATCH 188/811] metal : enable shader debugging (cmake option) (#4705) * ggml : disable fast-math for Metal (cmake build only) ggml-ci * metal : fix Metal API debug warnings * cmake : add -fno-inline for Metal build (#4545) * metal : fix API debug warnings * metal : fix compile warnings * metal : use uint64_t for strides * cmake : rename option to LLAMA_METAL_SHADER_DEBUG * metal : fix mat-vec Q8_0 kernel for BS > 1 * metal : normalize mat-vec kernel signatures * cmake : respect LLAMA_QKK_64 option * metal : fix mat-vec Q4_K kernel for QK_K == 64 ggml-ci --- CMakeLists.txt | 34 ++- ci/run.sh | 14 +- ggml-metal.m | 28 ++- ggml-metal.metal | 475 +++++++++++++++++++++---------------- tests/test-backend-ops.cpp | 8 +- 5 files changed, 329 insertions(+), 230 deletions(-) diff --git a/CMakeLists.txt b/CMakeLists.txt index 545aab267..57ae4c2df 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -95,6 +95,7 @@ option(LLAMA_HIP_UMA "llama: use HIP unified memory arch option(LLAMA_CLBLAST "llama: use CLBlast" OFF) option(LLAMA_METAL "llama: use Metal" ${LLAMA_METAL_DEFAULT}) option(LLAMA_METAL_NDEBUG "llama: disable Metal debugging" OFF) +option(LLAMA_METAL_SHADER_DEBUG "llama: compile Metal with -fno-fast-math" OFF) option(LLAMA_MPI "llama: use MPI" OFF) option(LLAMA_QKK_64 "llama: use super-block size of 64 for k-quants" OFF) @@ -154,9 +155,9 @@ if (APPLE AND LLAMA_ACCELERATE) endif() if (LLAMA_METAL) - find_library(FOUNDATION_LIBRARY Foundation REQUIRED) - find_library(METAL_FRAMEWORK Metal REQUIRED) - find_library(METALKIT_FRAMEWORK MetalKit REQUIRED) + find_library(FOUNDATION_LIBRARY Foundation REQUIRED) + find_library(METAL_FRAMEWORK Metal REQUIRED) + find_library(METALKIT_FRAMEWORK MetalKit REQUIRED) message(STATUS "Metal framework found") set(GGML_HEADERS_METAL ggml-metal.h) @@ -173,6 +174,33 @@ if (LLAMA_METAL) # copy ggml-metal.metal to bin directory configure_file(ggml-metal.metal ${CMAKE_RUNTIME_OUTPUT_DIRECTORY}/ggml-metal.metal COPYONLY) + if (LLAMA_METAL_SHADER_DEBUG) + # custom command to do the following: + # xcrun -sdk macosx metal -fno-fast-math -c ggml-metal.metal -o ggml-metal.air + # xcrun -sdk macosx metallib ggml-metal.air -o ggml.metallib + # + # note: this is the only way I found to disable fast-math in Metal. it's ugly, but at least it works + # disabling fast math is needed in order to pass tests/test-backend-ops + # note: adding -fno-inline fixes the tests when using MTL_SHADER_VALIDATION=1 + set(XC_FLAGS -fno-fast-math -fno-inline -g) + if (LLAMA_QKK_64) + set(XC_FLAGS ${XC_FLAGS} -DQK_K=64) + endif() + + add_custom_command( + OUTPUT ${CMAKE_RUNTIME_OUTPUT_DIRECTORY}/ggml.metallib + COMMAND xcrun -sdk macosx metal ${XC_FLAGS} -c ${CMAKE_RUNTIME_OUTPUT_DIRECTORY}/ggml-metal.metal -o ${CMAKE_RUNTIME_OUTPUT_DIRECTORY}/ggml-metal.air + COMMAND xcrun -sdk macosx metallib ${CMAKE_RUNTIME_OUTPUT_DIRECTORY}/ggml-metal.air -o ${CMAKE_RUNTIME_OUTPUT_DIRECTORY}/ggml.metallib + DEPENDS ggml-metal.metal + COMMENT "Compiling Metal kernels" + ) + + add_custom_target( + ggml-metal ALL + DEPENDS ${CMAKE_RUNTIME_OUTPUT_DIRECTORY}/ggml.metallib + ) + endif() + set(LLAMA_EXTRA_LIBS ${LLAMA_EXTRA_LIBS} ${FOUNDATION_LIBRARY} ${METAL_FRAMEWORK} diff --git a/ci/run.sh b/ci/run.sh index 2e3343831..47a254f4c 100755 --- a/ci/run.sh +++ b/ci/run.sh @@ -30,6 +30,12 @@ sd=`dirname $0` cd $sd/../ SRC=`pwd` +CMAKE_EXTRA="" + +if [ ! -z ${GG_BUILD_METAL} ]; then + CMAKE_EXTRA="${CMAKE_EXTRA} -DLLAMA_METAL_SHADER_DEBUG=ON" +fi + ## helpers # download a file if it does not exist or if it is outdated @@ -81,8 +87,8 @@ function gg_run_ctest_debug { set -e - (time cmake -DCMAKE_BUILD_TYPE=Debug .. ) 2>&1 | tee -a $OUT/${ci}-cmake.log - (time make -j ) 2>&1 | tee -a $OUT/${ci}-make.log + (time cmake -DCMAKE_BUILD_TYPE=Debug ${CMAKE_EXTRA} .. ) 2>&1 | tee -a $OUT/${ci}-cmake.log + (time make -j ) 2>&1 | tee -a $OUT/${ci}-make.log (time ctest --output-on-failure -E test-opt ) 2>&1 | tee -a $OUT/${ci}-ctest.log @@ -109,8 +115,8 @@ function gg_run_ctest_release { set -e - (time cmake -DCMAKE_BUILD_TYPE=Release .. ) 2>&1 | tee -a $OUT/${ci}-cmake.log - (time make -j ) 2>&1 | tee -a $OUT/${ci}-make.log + (time cmake -DCMAKE_BUILD_TYPE=Release ${CMAKE_EXTRA} .. ) 2>&1 | tee -a $OUT/${ci}-cmake.log + (time make -j ) 2>&1 | tee -a $OUT/${ci}-make.log if [ -z ${GG_BUILD_LOW_PERF} ]; then (time ctest --output-on-failure ) 2>&1 | tee -a $OUT/${ci}-ctest.log diff --git a/ggml-metal.m b/ggml-metal.m index 51a72ae33..cd9d00456 100644 --- a/ggml-metal.m +++ b/ggml-metal.m @@ -257,13 +257,14 @@ struct ggml_metal_context * ggml_metal_init(int n_cb) { bundle = [NSBundle bundleForClass:[GGMLMetalClass class]]; #endif NSError * error = nil; - NSString * libPath = [bundle pathForResource:@"default" ofType:@"metallib"]; + NSString * libPath = [bundle pathForResource:@"ggml" ofType:@"metallib"]; if (libPath != nil) { + // pre-compiled library found NSURL * libURL = [NSURL fileURLWithPath:libPath]; GGML_METAL_LOG_INFO("%s: loading '%s'\n", __func__, [libPath UTF8String]); ctx->library = [ctx->device newLibraryWithURL:libURL error:&error]; } else { - GGML_METAL_LOG_INFO("%s: default.metallib not found, loading from source\n", __func__); + GGML_METAL_LOG_INFO("%s: ggml.metallib not found, loading from source\n", __func__); NSString * sourcePath; NSString * ggmlMetalPathResources = [[NSProcessInfo processInfo].environment objectForKey:@"GGML_METAL_PATH_RESOURCES"]; @@ -291,6 +292,13 @@ struct ggml_metal_context * ggml_metal_init(int n_cb) { options = [MTLCompileOptions new]; options.preprocessorMacros = @{ @"QK_K" : @(64) }; #endif + // try to disable fast-math + // NOTE: this seems to have no effect whatsoever + // instead, in order to disable fast-math, we have to build ggml.metallib from the command line + // using xcrun -sdk macosx metal -fno-fast-math -c ggml-metal.metal -o ggml-metal.air + // and go through the "pre-compiled library found" path above + //[options setFastMathEnabled:false]; + ctx->library = [ctx->device newLibraryWithSource:src options:options error:&error]; } @@ -1230,7 +1238,7 @@ void ggml_metal_graph_compute( // not sure how to avoid this // TODO: make a simpler cpy_bytes kernel - const int nth = MIN(1024, ne00); + const int nth = MIN((int) ctx->pipeline_cpy_f32_f32.maxTotalThreadsPerThreadgroup, ne00); [encoder setComputePipelineState:ctx->pipeline_cpy_f32_f32]; [encoder setBuffer:id_src0 offset:offs_src0 atIndex:0]; @@ -1285,7 +1293,7 @@ void ggml_metal_graph_compute( [encoder setBytes:&pnb3 length:sizeof(pnb3) atIndex:26]; [encoder setBytes:&offs length:sizeof(offs) atIndex:27]; - const int nth = MIN(1024, ne0); + const int nth = MIN((int) ctx->pipeline_add.maxTotalThreadsPerThreadgroup, ne00); [encoder dispatchThreadgroups:MTLSizeMake(ne11, ne12, ne13) threadsPerThreadgroup:MTLSizeMake(nth, 1, 1)]; } break; @@ -1785,8 +1793,9 @@ void ggml_metal_graph_compute( [encoder setBytes:&r3 length:sizeof(r3) atIndex:17]; [encoder setBytes:&idx length:sizeof(idx) atIndex:18]; // TODO: how to make this an array? read Metal docs - for (int j = 0; j < n_as; ++j) { - struct ggml_tensor * src_cur = dst->src[2 + j]; + for (int j = 0; j < 8; ++j) { + // NOTE: this is done like this to avoid uninitialized kernel arguments when n_as < 8 + struct ggml_tensor * src_cur = dst->src[2 + (j % n_as)]; size_t offs_src_cur = 0; id id_src_cur = ggml_metal_get_buffer(ctx, src_cur, &offs_src_cur); @@ -1909,8 +1918,9 @@ void ggml_metal_graph_compute( [encoder setBytes:&r3 length:sizeof(r3) atIndex:21]; [encoder setBytes:&idx length:sizeof(idx) atIndex:22]; // TODO: how to make this an array? read Metal docs - for (int j = 0; j < n_as; ++j) { - struct ggml_tensor * src_cur = dst->src[2 + j]; + for (int j = 0; j < 8; ++j) { + // NOTE: this is done like this to avoid uninitialized kernel arguments when n_as < 8 + struct ggml_tensor * src_cur = dst->src[2 + (j % n_as)]; size_t offs_src_cur = 0; id id_src_cur = ggml_metal_get_buffer(ctx, src_cur, &offs_src_cur); @@ -2229,7 +2239,7 @@ void ggml_metal_graph_compute( [encoder setBytes:&nb3 length:sizeof(nb3) atIndex:17]; [encoder setBytes:&sf length:sizeof(sf) atIndex:18]; - const int nth = MIN(1024, ne0); + const int nth = MIN((int) ctx->pipeline_upscale_f32.maxTotalThreadsPerThreadgroup, ne0); [encoder dispatchThreadgroups:MTLSizeMake(ne1, ne2, ne3) threadsPerThreadgroup:MTLSizeMake(nth, 1, 1)]; } break; diff --git a/ggml-metal.metal b/ggml-metal.metal index d5b54e112..1d5b8f6f4 100644 --- a/ggml-metal.metal +++ b/ggml-metal.metal @@ -59,26 +59,26 @@ kernel void kernel_add( constant int64_t & ne01, constant int64_t & ne02, constant int64_t & ne03, - constant int64_t & nb00, - constant int64_t & nb01, - constant int64_t & nb02, - constant int64_t & nb03, + constant uint64_t & nb00, + constant uint64_t & nb01, + constant uint64_t & nb02, + constant uint64_t & nb03, constant int64_t & ne10, constant int64_t & ne11, constant int64_t & ne12, constant int64_t & ne13, - constant int64_t & nb10, - constant int64_t & nb11, - constant int64_t & nb12, - constant int64_t & nb13, + constant uint64_t & nb10, + constant uint64_t & nb11, + constant uint64_t & nb12, + constant uint64_t & nb13, constant int64_t & ne0, constant int64_t & ne1, constant int64_t & ne2, constant int64_t & ne3, - constant int64_t & nb0, - constant int64_t & nb1, - constant int64_t & nb2, - constant int64_t & nb3, + constant uint64_t & nb0, + constant uint64_t & nb1, + constant uint64_t & nb2, + constant uint64_t & nb3, constant int64_t & offs, uint3 tgpig[[threadgroup_position_in_grid]], uint3 tpitg[[thread_position_in_threadgroup]], @@ -109,26 +109,26 @@ kernel void kernel_mul( constant int64_t & ne01, constant int64_t & ne02, constant int64_t & ne03, - constant int64_t & nb00, - constant int64_t & nb01, - constant int64_t & nb02, - constant int64_t & nb03, + constant uint64_t & nb00, + constant uint64_t & nb01, + constant uint64_t & nb02, + constant uint64_t & nb03, constant int64_t & ne10, constant int64_t & ne11, constant int64_t & ne12, constant int64_t & ne13, - constant int64_t & nb10, - constant int64_t & nb11, - constant int64_t & nb12, - constant int64_t & nb13, + constant uint64_t & nb10, + constant uint64_t & nb11, + constant uint64_t & nb12, + constant uint64_t & nb13, constant int64_t & ne0, constant int64_t & ne1, constant int64_t & ne2, constant int64_t & ne3, - constant int64_t & nb0, - constant int64_t & nb1, - constant int64_t & nb2, - constant int64_t & nb3, + constant uint64_t & nb0, + constant uint64_t & nb1, + constant uint64_t & nb2, + constant uint64_t & nb3, uint3 tgpig[[threadgroup_position_in_grid]], uint3 tpitg[[thread_position_in_threadgroup]], uint3 ntg[[threads_per_threadgroup]]) { @@ -158,26 +158,26 @@ kernel void kernel_div( constant int64_t & ne01, constant int64_t & ne02, constant int64_t & ne03, - constant int64_t & nb00, - constant int64_t & nb01, - constant int64_t & nb02, - constant int64_t & nb03, + constant uint64_t & nb00, + constant uint64_t & nb01, + constant uint64_t & nb02, + constant uint64_t & nb03, constant int64_t & ne10, constant int64_t & ne11, constant int64_t & ne12, constant int64_t & ne13, - constant int64_t & nb10, - constant int64_t & nb11, - constant int64_t & nb12, - constant int64_t & nb13, + constant uint64_t & nb10, + constant uint64_t & nb11, + constant uint64_t & nb12, + constant uint64_t & nb13, constant int64_t & ne0, constant int64_t & ne1, constant int64_t & ne2, constant int64_t & ne3, - constant int64_t & nb0, - constant int64_t & nb1, - constant int64_t & nb2, - constant int64_t & nb3, + constant uint64_t & nb0, + constant uint64_t & nb1, + constant uint64_t & nb2, + constant uint64_t & nb3, uint3 tgpig[[threadgroup_position_in_grid]], uint3 tpitg[[thread_position_in_threadgroup]], uint3 ntg[[threads_per_threadgroup]]) { @@ -205,7 +205,7 @@ kernel void kernel_add_row( device const float4 * src0, device const float4 * src1, device float4 * dst, - constant int64_t & nb [[buffer(28)]], + constant uint64_t & nb [[buffer(28)]], uint tpig[[thread_position_in_grid]]) { dst[tpig] = src0[tpig] + src1[tpig % nb]; } @@ -214,7 +214,7 @@ kernel void kernel_mul_row( device const float4 * src0, device const float4 * src1, device float4 * dst, - constant int64_t & nb [[buffer(28)]], + constant uint64_t & nb [[buffer(28)]], uint tpig[[thread_position_in_grid]]) { dst[tpig] = src0[tpig] * src1[tpig % nb]; } @@ -223,7 +223,7 @@ kernel void kernel_div_row( device const float4 * src0, device const float4 * src1, device float4 * dst, - constant int64_t & nb [[buffer(28)]], + constant uint64_t & nb [[buffer(28)]], uint tpig[[thread_position_in_grid]]) { dst[tpig] = src0[tpig] / src1[tpig % nb]; } @@ -307,26 +307,26 @@ kernel void kernel_sum_rows( constant int64_t & ne01, constant int64_t & ne02, constant int64_t & ne03, - constant int64_t & nb00, - constant int64_t & nb01, - constant int64_t & nb02, - constant int64_t & nb03, + constant uint64_t & nb00, + constant uint64_t & nb01, + constant uint64_t & nb02, + constant uint64_t & nb03, constant int64_t & ne10, constant int64_t & ne11, constant int64_t & ne12, constant int64_t & ne13, - constant int64_t & nb10, - constant int64_t & nb11, - constant int64_t & nb12, - constant int64_t & nb13, + constant uint64_t & nb10, + constant uint64_t & nb11, + constant uint64_t & nb12, + constant uint64_t & nb13, constant int64_t & ne0, constant int64_t & ne1, constant int64_t & ne2, constant int64_t & ne3, - constant int64_t & nb0, - constant int64_t & nb1, - constant int64_t & nb2, - constant int64_t & nb3, + constant uint64_t & nb0, + constant uint64_t & nb1, + constant uint64_t & nb2, + constant uint64_t & nb3, uint3 tpig[[thread_position_in_grid]]) { int64_t i3 = tpig.z; int64_t i2 = tpig.y; @@ -920,14 +920,21 @@ kernel void kernel_mul_mv_q4_0_f32( device const float * src1, device float * dst, constant int64_t & ne00, - constant int64_t & ne01[[buffer(4)]], - constant int64_t & ne02[[buffer(5)]], - constant int64_t & ne10[[buffer(9)]], - constant int64_t & ne12[[buffer(11)]], - constant int64_t & ne0 [[buffer(15)]], - constant int64_t & ne1 [[buffer(16)]], - constant uint & r2 [[buffer(17)]], - constant uint & r3 [[buffer(18)]], + constant int64_t & ne01, + constant int64_t & ne02, + constant uint64_t & nb00, + constant uint64_t & nb01, + constant uint64_t & nb02, + constant int64_t & ne10, + constant int64_t & ne11, + constant int64_t & ne12, + constant uint64_t & nb10, + constant uint64_t & nb11, + constant uint64_t & nb12, + constant int64_t & ne0, + constant int64_t & ne1, + constant uint & r2, + constant uint & r3, uint3 tgpig[[threadgroup_position_in_grid]], uint tiisg[[thread_index_in_simdgroup]], uint sgitg[[simdgroup_index_in_threadgroup]]) { @@ -939,14 +946,21 @@ kernel void kernel_mul_mv_q4_1_f32( device const float * src1, device float * dst, constant int64_t & ne00, - constant int64_t & ne01[[buffer(4)]], - constant int64_t & ne02[[buffer(5)]], - constant int64_t & ne10[[buffer(9)]], - constant int64_t & ne12[[buffer(11)]], - constant int64_t & ne0 [[buffer(15)]], - constant int64_t & ne1 [[buffer(16)]], - constant uint & r2 [[buffer(17)]], - constant uint & r3 [[buffer(18)]], + constant int64_t & ne01, + constant int64_t & ne02, + constant uint64_t & nb00, + constant uint64_t & nb01, + constant uint64_t & nb02, + constant int64_t & ne10, + constant int64_t & ne11, + constant int64_t & ne12, + constant uint64_t & nb10, + constant uint64_t & nb11, + constant uint64_t & nb12, + constant int64_t & ne0, + constant int64_t & ne1, + constant uint & r2, + constant uint & r3, uint3 tgpig[[threadgroup_position_in_grid]], uint tiisg[[thread_index_in_simdgroup]], uint sgitg[[simdgroup_index_in_threadgroup]]) { @@ -958,14 +972,21 @@ kernel void kernel_mul_mv_q5_0_f32( device const float * src1, device float * dst, constant int64_t & ne00, - constant int64_t & ne01[[buffer(4)]], - constant int64_t & ne02[[buffer(5)]], - constant int64_t & ne10[[buffer(9)]], - constant int64_t & ne12[[buffer(11)]], - constant int64_t & ne0 [[buffer(15)]], - constant int64_t & ne1 [[buffer(16)]], - constant uint & r2 [[buffer(17)]], - constant uint & r3 [[buffer(18)]], + constant int64_t & ne01, + constant int64_t & ne02, + constant uint64_t & nb00, + constant uint64_t & nb01, + constant uint64_t & nb02, + constant int64_t & ne10, + constant int64_t & ne11, + constant int64_t & ne12, + constant uint64_t & nb10, + constant uint64_t & nb11, + constant uint64_t & nb12, + constant int64_t & ne0, + constant int64_t & ne1, + constant uint & r2, + constant uint & r3, uint3 tgpig[[threadgroup_position_in_grid]], uint tiisg[[thread_index_in_simdgroup]], uint sgitg[[simdgroup_index_in_threadgroup]]) { @@ -977,14 +998,21 @@ kernel void kernel_mul_mv_q5_1_f32( device const float * src1, device float * dst, constant int64_t & ne00, - constant int64_t & ne01[[buffer(4)]], - constant int64_t & ne02[[buffer(5)]], - constant int64_t & ne10[[buffer(9)]], - constant int64_t & ne12[[buffer(11)]], - constant int64_t & ne0 [[buffer(15)]], - constant int64_t & ne1 [[buffer(16)]], - constant uint & r2 [[buffer(17)]], - constant uint & r3 [[buffer(18)]], + constant int64_t & ne01, + constant int64_t & ne02, + constant uint64_t & nb00, + constant uint64_t & nb01, + constant uint64_t & nb02, + constant int64_t & ne10, + constant int64_t & ne11, + constant int64_t & ne12, + constant uint64_t & nb10, + constant uint64_t & nb11, + constant uint64_t & nb12, + constant int64_t & ne0, + constant int64_t & ne1, + constant uint & r2, + constant uint & r3, uint3 tgpig[[threadgroup_position_in_grid]], uint tiisg[[thread_index_in_simdgroup]], uint sgitg[[simdgroup_index_in_threadgroup]]) { @@ -1071,12 +1099,19 @@ kernel void kernel_mul_mv_q8_0_f32( constant int64_t & ne00, constant int64_t & ne01, constant int64_t & ne02, + constant uint64_t & nb00, + constant uint64_t & nb01, + constant uint64_t & nb02, constant int64_t & ne10, + constant int64_t & ne11, constant int64_t & ne12, + constant uint64_t & nb10, + constant uint64_t & nb11, + constant uint64_t & nb12, constant int64_t & ne0, constant int64_t & ne1, - constant uint & r2 [[buffer(17)]], - constant uint & r3 [[buffer(18)]], + constant uint & r2, + constant uint & r3, uint3 tgpig[[threadgroup_position_in_grid]], uint tiisg[[thread_index_in_simdgroup]], uint sgitg[[simdgroup_index_in_threadgroup]]) { @@ -1182,8 +1217,8 @@ kernel void kernel_mul_mv_f32_f32( constant uint64_t & nb12, constant int64_t & ne0, constant int64_t & ne1, - constant uint & r2 [[buffer(17)]], - constant uint & r3 [[buffer(18)]], + constant uint & r2, + constant uint & r3, uint3 tgpig[[threadgroup_position_in_grid]], uint tiisg[[thread_index_in_simdgroup]]) { kernel_mul_mv_f32_f32_impl(src0, src1, dst, ne00, ne01, ne02, nb00, nb01, nb02, ne10, ne11, ne12, nb10, nb11, nb12, ne0, ne1, r2, r3, tgpig, tiisg); @@ -1209,8 +1244,8 @@ kernel void kernel_mul_mv_f16_f16( constant uint64_t & nb12, constant int64_t & ne0, constant int64_t & ne1, - constant uint & r2 [[buffer(17)]], - constant uint & r3 [[buffer(18)]], + constant uint & r2, + constant uint & r3, uint3 tgpig[[threadgroup_position_in_grid]], uint tiisg[[thread_index_in_simdgroup]]) { @@ -1346,8 +1381,8 @@ kernel void kernel_mul_mv_f16_f32_1row( constant uint64_t & nb12, constant int64_t & ne0, constant int64_t & ne1, - constant uint & r2 [[buffer(17)]], - constant uint & r3 [[buffer(18)]], + constant uint & r2, + constant uint & r3, uint3 tgpig[[threadgroup_position_in_grid]], uint tiisg[[thread_index_in_simdgroup]]) { kernel_mul_mv_f16_f32_1row_impl(src0, src1, dst, ne00, ne01, ne02, nb00, nb01, nb02, ne10, ne11, ne12, nb10, nb11, nb12, ne0, ne1, r2, r3, tgpig, tiisg); @@ -1452,8 +1487,8 @@ kernel void kernel_mul_mv_f16_f32( constant uint64_t & nb12, constant int64_t & ne0, constant int64_t & ne1, - constant uint & r2 [[buffer(17)]], - constant uint & r3 [[buffer(18)]], + constant uint & r2, + constant uint & r3, uint3 tgpig[[threadgroup_position_in_grid]], uint tiisg[[thread_index_in_simdgroup]]) { kernel_mul_mv_f16_f32_impl(src0, src1, dst, ne00, ne01, ne02, nb00, nb01, nb02, ne10, ne11, ne12, nb10, nb11, nb12, ne0, ne1, r2, r3, tgpig, tiisg); @@ -1478,8 +1513,8 @@ kernel void kernel_mul_mv_f16_f32_l4( constant uint64_t & nb12, constant int64_t & ne0, constant int64_t & ne1, - constant uint & r2 [[buffer(17)]], - constant uint & r3 [[buffer(18)]], + constant uint & r2, + constant uint & r3, uint3 tgpig[[threadgroup_position_in_grid]], uint tiisg[[thread_index_in_simdgroup]]) { @@ -1543,7 +1578,8 @@ kernel void kernel_alibi_f32( const int64_t i3 = n / (ne2*ne1*ne0); const int64_t i2 = (n - i3*ne2*ne1*ne0) / (ne1*ne0); const int64_t i1 = (n - i3*ne2*ne1*ne0 - i2*ne1*ne0) / ne0; - const int64_t i0 = (n - i3*ne2*ne1*ne0 - i2*ne1*ne0 - i1*ne0); + //const int64_t i0 = (n - i3*ne2*ne1*ne0 - i2*ne1*ne0 - i1*ne0); + const int64_t k = i3*ne3 + i2; float m_k; @@ -2410,22 +2446,6 @@ typedef struct { } block_q6_K; // 210 bytes / block -static inline uchar4 get_scale_min_k4(int j, device const uint8_t * q) { - uchar4 r; - if (j < 4) { - r[0] = q[j+0] & 63; - r[2] = q[j+1] & 63; - r[1] = q[j+4] & 63; - r[3] = q[j+5] & 63; - } else { - r[0] = (q[j+4] & 0xF) | ((q[j-4] >> 6) << 4); - r[2] = (q[j+5] & 0xF) | ((q[j-3] >> 6) << 4); - r[1] = (q[j+4] >> 4) | ((q[j-0] >> 6) << 4); - r[3] = (q[j+5] >> 4) | ((q[j+1] >> 6) << 4); - } - return r; -} - //====================================== dot products ========================= void kernel_mul_mv_q2_K_f32_impl( @@ -2584,14 +2604,21 @@ kernel void kernel_mul_mv_q2_K_f32( device const float * src1, device float * dst, constant int64_t & ne00, - constant int64_t & ne01[[buffer(4)]], - constant int64_t & ne02[[buffer(5)]], - constant int64_t & ne10[[buffer(9)]], - constant int64_t & ne12[[buffer(11)]], - constant int64_t & ne0 [[buffer(15)]], - constant int64_t & ne1 [[buffer(16)]], - constant uint & r2 [[buffer(17)]], - constant uint & r3 [[buffer(18)]], + constant int64_t & ne01, + constant int64_t & ne02, + constant uint64_t & nb00, + constant uint64_t & nb01, + constant uint64_t & nb02, + constant int64_t & ne10, + constant int64_t & ne11, + constant int64_t & ne12, + constant uint64_t & nb10, + constant uint64_t & nb11, + constant uint64_t & nb12, + constant int64_t & ne0, + constant int64_t & ne1, + constant uint & r2, + constant uint & r3, uint3 tgpig[[threadgroup_position_in_grid]], uint tiisg[[thread_index_in_simdgroup]], uint sgitg[[simdgroup_index_in_threadgroup]]) { @@ -2841,14 +2868,21 @@ kernel void kernel_mul_mv_q3_K_f32( device const float * src1, device float * dst, constant int64_t & ne00, - constant int64_t & ne01[[buffer(4)]], - constant int64_t & ne02[[buffer(5)]], - constant int64_t & ne10[[buffer(9)]], - constant int64_t & ne12[[buffer(11)]], - constant int64_t & ne0 [[buffer(15)]], - constant int64_t & ne1 [[buffer(16)]], - constant uint & r2 [[buffer(17)]], - constant uint & r3 [[buffer(18)]], + constant int64_t & ne01, + constant int64_t & ne02, + constant uint64_t & nb00, + constant uint64_t & nb01, + constant uint64_t & nb02, + constant int64_t & ne10, + constant int64_t & ne11, + constant int64_t & ne12, + constant uint64_t & nb10, + constant uint64_t & nb11, + constant uint64_t & nb12, + constant int64_t & ne0, + constant int64_t & ne1, + constant uint & r2, + constant uint & r3, uint3 tgpig[[threadgroup_position_in_grid]], uint tiisg[[thread_index_in_simdgroup]], uint sgitg[[simdgroup_index_in_threadgroup]]) { @@ -2984,8 +3018,8 @@ void kernel_mul_mv_q4_K_f32_impl( constant uint & r2, constant uint & r3, uint3 tgpig[[threadgroup_position_in_grid]], - uint tiisg[[thread_index_in_simdgroup]], - uint sgitg[[simdgroup_index_in_threadgroup]]) { + uint tiisg[[thread_index_in_simdgroup]], + uint sgitg[[simdgroup_index_in_threadgroup]]) { const int ix = tiisg/4; // 0...7 const int it = tiisg%4; // 0...3 @@ -2994,7 +3028,7 @@ void kernel_mul_mv_q4_K_f32_impl( const int r0 = tgpig.x; const int r1 = tgpig.y; const int im = tgpig.z; - const int first_row = (r0 * N_SIMDGROUP + sgitg) * N_DST; + const int first_row = r0 * N_DST; const int ib_row = first_row * nb; const uint i12 = im%ne12; @@ -3060,7 +3094,7 @@ void kernel_mul_mv_q4_K_f32_impl( for (int row = 0; row < N_DST; ++row) { all_sum = simd_sum(sumf[row]); if (tiisg == 0) { - dst[r1*ne0+ im*ne0*ne1 + first_row + row] = all_sum; + dst[r1*ne0 + im*ne0*ne1 + first_row + row] = all_sum; } } } @@ -3072,14 +3106,21 @@ kernel void kernel_mul_mv_q4_K_f32( device const float * src1, device float * dst, constant int64_t & ne00, - constant int64_t & ne01[[buffer(4)]], - constant int64_t & ne02[[buffer(5)]], - constant int64_t & ne10[[buffer(9)]], - constant int64_t & ne12[[buffer(11)]], - constant int64_t & ne0 [[buffer(15)]], - constant int64_t & ne1 [[buffer(16)]], - constant uint & r2 [[buffer(17)]], - constant uint & r3 [[buffer(18)]], + constant int64_t & ne01, + constant int64_t & ne02, + constant uint64_t & nb00, + constant uint64_t & nb01, + constant uint64_t & nb02, + constant int64_t & ne10, + constant int64_t & ne11, + constant int64_t & ne12, + constant uint64_t & nb10, + constant uint64_t & nb11, + constant uint64_t & nb12, + constant int64_t & ne0, + constant int64_t & ne1, + constant uint & r2, + constant uint & r3, uint3 tgpig[[threadgroup_position_in_grid]], uint tiisg[[thread_index_in_simdgroup]], uint sgitg[[simdgroup_index_in_threadgroup]]) { @@ -3271,14 +3312,21 @@ kernel void kernel_mul_mv_q5_K_f32( device const float * src1, device float * dst, constant int64_t & ne00, - constant int64_t & ne01[[buffer(4)]], - constant int64_t & ne02[[buffer(5)]], - constant int64_t & ne10[[buffer(9)]], - constant int64_t & ne12[[buffer(11)]], - constant int64_t & ne0 [[buffer(15)]], - constant int64_t & ne1 [[buffer(16)]], - constant uint & r2 [[buffer(17)]], - constant uint & r3 [[buffer(18)]], + constant int64_t & ne01, + constant int64_t & ne02, + constant uint64_t & nb00, + constant uint64_t & nb01, + constant uint64_t & nb02, + constant int64_t & ne10, + constant int64_t & ne11, + constant int64_t & ne12, + constant uint64_t & nb10, + constant uint64_t & nb11, + constant uint64_t & nb12, + constant int64_t & ne0, + constant int64_t & ne1, + constant uint & r2, + constant uint & r3, uint3 tgpig[[threadgroup_position_in_grid]], uint tiisg[[thread_index_in_simdgroup]], uint sgitg[[simdgroup_index_in_threadgroup]]) { @@ -3398,14 +3446,21 @@ kernel void kernel_mul_mv_q6_K_f32( device const float * src1, device float * dst, constant int64_t & ne00, - constant int64_t & ne01[[buffer(4)]], - constant int64_t & ne02[[buffer(5)]], - constant int64_t & ne10[[buffer(9)]], - constant int64_t & ne12[[buffer(11)]], - constant int64_t & ne0 [[buffer(15)]], - constant int64_t & ne1 [[buffer(16)]], - constant uint & r2 [[buffer(17)]], - constant uint & r3 [[buffer(18)]], + constant int64_t & ne01, + constant int64_t & ne02, + constant uint64_t & nb00, + constant uint64_t & nb01, + constant uint64_t & nb02, + constant int64_t & ne10, + constant int64_t & ne11, + constant int64_t & ne12, + constant uint64_t & nb10, + constant uint64_t & nb11, + constant uint64_t & nb12, + constant int64_t & ne0, + constant int64_t & ne1, + constant uint & r2, + constant uint & r3, uint3 tgpig[[threadgroup_position_in_grid]], uint tiisg[[thread_index_in_simdgroup]], uint sgitg[[simdgroup_index_in_threadgroup]]) { @@ -3523,7 +3578,7 @@ void dequantize_q8_0(device const block_q8_0 *xb, short il, thread type4x4 & reg device const int8_t * qs = ((device const int8_t *)xb->qs); const half d = xb->d; - for (int i=0;i<16;i++) { + for (int i = 0; i < 16; i++) { reg[i/4][i%4] = (qs[i + 16*il] * d); } } @@ -3792,12 +3847,12 @@ void kernel_mul_mm_impl(device const uchar * src0, device float * dst, constant int64_t & ne00, constant int64_t & ne02, - constant int64_t & nb01, - constant int64_t & nb02, + constant uint64_t & nb01, + constant uint64_t & nb02, constant int64_t & ne12, - constant int64_t & nb10, - constant int64_t & nb11, - constant int64_t & nb12, + constant uint64_t & nb10, + constant uint64_t & nb11, + constant uint64_t & nb12, constant int64_t & ne0, constant int64_t & ne1, constant uint & r2, @@ -3924,12 +3979,12 @@ kernel void kernel_mul_mm(device const uchar * src0, device float * dst, constant int64_t & ne00, constant int64_t & ne02, - constant int64_t & nb01, - constant int64_t & nb02, + constant uint64_t & nb01, + constant uint64_t & nb02, constant int64_t & ne12, - constant int64_t & nb10, - constant int64_t & nb11, - constant int64_t & nb12, + constant uint64_t & nb10, + constant uint64_t & nb11, + constant uint64_t & nb12, constant int64_t & ne0, constant int64_t & ne1, constant uint & r2, @@ -3965,19 +4020,19 @@ kernel void kernel_mul_mm_id( device const uchar * ids, device const uchar * src1, device uchar * dst, - constant int64_t & nbi1, + constant uint64_t & nbi1, constant int64_t & ne00, constant int64_t & ne02, - constant int64_t & nb01, - constant int64_t & nb02, + constant uint64_t & nb01, + constant uint64_t & nb02, constant int64_t & ne12, constant int64_t & ne13, - constant int64_t & nb10, - constant int64_t & nb11, - constant int64_t & nb12, + constant uint64_t & nb10, + constant uint64_t & nb11, + constant uint64_t & nb12, constant int64_t & ne0, constant int64_t & ne1, - constant int64_t & nb1, + constant uint64_t & nb1, constant uint & r2, constant uint & r3, constant int & idx, @@ -4070,12 +4125,12 @@ typedef void (mat_mm_t)( device float * dst, constant int64_t & ne00, constant int64_t & ne02, - constant int64_t & nb01, - constant int64_t & nb02, + constant uint64_t & nb01, + constant uint64_t & nb02, constant int64_t & ne12, - constant int64_t & nb10, - constant int64_t & nb11, - constant int64_t & nb12, + constant uint64_t & nb10, + constant uint64_t & nb11, + constant uint64_t & nb12, constant int64_t & ne0, constant int64_t & ne1, constant uint & r2, @@ -4104,19 +4159,19 @@ typedef void (mat_mm_id_t)( device const uchar * ids, device const uchar * src1, device uchar * dst, - constant int64_t & nbi1, + constant uint64_t & nbi1, constant int64_t & ne00, constant int64_t & ne02, - constant int64_t & nb01, - constant int64_t & nb02, + constant uint64_t & nb01, + constant uint64_t & nb02, constant int64_t & ne12, constant int64_t & ne13, - constant int64_t & nb10, - constant int64_t & nb11, - constant int64_t & nb12, + constant uint64_t & nb10, + constant uint64_t & nb11, + constant uint64_t & nb12, constant int64_t & ne0, constant int64_t & ne1, - constant int64_t & nb1, + constant uint64_t & nb1, constant uint & r2, constant uint & r3, constant int & idx, @@ -4153,7 +4208,7 @@ kernel void kernel_mul_mv_id_f32_f32( device const char * ids, device const char * src1, device uchar * dst, - constant int64_t & nbi1, + constant uint64_t & nbi1, constant int64_t & ne00, constant int64_t & ne01, constant int64_t & ne02, @@ -4169,7 +4224,7 @@ kernel void kernel_mul_mv_id_f32_f32( constant uint64_t & nb12, constant int64_t & ne0, constant int64_t & ne1, - constant int64_t & nb1, + constant uint64_t & nb1, constant uint & r2, constant uint & r3, constant int & idx, @@ -4222,7 +4277,7 @@ kernel void kernel_mul_mv_id_f16_f32( device const char * ids, device const char * src1, device uchar * dst, - constant int64_t & nbi1, + constant uint64_t & nbi1, constant int64_t & ne00, constant int64_t & ne01, constant int64_t & ne02, @@ -4238,7 +4293,7 @@ kernel void kernel_mul_mv_id_f16_f32( constant uint64_t & nb12, constant int64_t & ne0, constant int64_t & ne1, - constant int64_t & nb1, + constant uint64_t & nb1, constant uint & r2, constant uint & r3, constant int & idx, @@ -4291,7 +4346,7 @@ kernel void kernel_mul_mv_id_q8_0_f32( device const char * ids, device const char * src1, device uchar * dst, - constant int64_t & nbi1, + constant uint64_t & nbi1, constant int64_t & ne00, constant int64_t & ne01, constant int64_t & ne02, @@ -4307,7 +4362,7 @@ kernel void kernel_mul_mv_id_q8_0_f32( constant uint64_t & nb12, constant int64_t & ne0, constant int64_t & ne1, - constant int64_t & nb1, + constant uint64_t & nb1, constant uint & r2, constant uint & r3, constant int & idx, @@ -4354,7 +4409,7 @@ kernel void kernel_mul_mv_id_q4_0_f32( device const char * ids, device const char * src1, device uchar * dst, - constant int64_t & nbi1, + constant uint64_t & nbi1, constant int64_t & ne00, constant int64_t & ne01, constant int64_t & ne02, @@ -4370,7 +4425,7 @@ kernel void kernel_mul_mv_id_q4_0_f32( constant uint64_t & nb12, constant int64_t & ne0, constant int64_t & ne1, - constant int64_t & nb1, + constant uint64_t & nb1, constant uint & r2, constant uint & r3, constant int & idx, @@ -4417,7 +4472,7 @@ kernel void kernel_mul_mv_id_q4_1_f32( device const char * ids, device const char * src1, device uchar * dst, - constant int64_t & nbi1, + constant uint64_t & nbi1, constant int64_t & ne00, constant int64_t & ne01, constant int64_t & ne02, @@ -4433,7 +4488,7 @@ kernel void kernel_mul_mv_id_q4_1_f32( constant uint64_t & nb12, constant int64_t & ne0, constant int64_t & ne1, - constant int64_t & nb1, + constant uint64_t & nb1, constant uint & r2, constant uint & r3, constant int & idx, @@ -4480,7 +4535,7 @@ kernel void kernel_mul_mv_id_q5_0_f32( device const char * ids, device const char * src1, device uchar * dst, - constant int64_t & nbi1, + constant uint64_t & nbi1, constant int64_t & ne00, constant int64_t & ne01, constant int64_t & ne02, @@ -4496,7 +4551,7 @@ kernel void kernel_mul_mv_id_q5_0_f32( constant uint64_t & nb12, constant int64_t & ne0, constant int64_t & ne1, - constant int64_t & nb1, + constant uint64_t & nb1, constant uint & r2, constant uint & r3, constant int & idx, @@ -4543,7 +4598,7 @@ kernel void kernel_mul_mv_id_q5_1_f32( device const char * ids, device const char * src1, device uchar * dst, - constant int64_t & nbi1, + constant uint64_t & nbi1, constant int64_t & ne00, constant int64_t & ne01, constant int64_t & ne02, @@ -4559,7 +4614,7 @@ kernel void kernel_mul_mv_id_q5_1_f32( constant uint64_t & nb12, constant int64_t & ne0, constant int64_t & ne1, - constant int64_t & nb1, + constant uint64_t & nb1, constant uint & r2, constant uint & r3, constant int & idx, @@ -4606,7 +4661,7 @@ kernel void kernel_mul_mv_id_q2_K_f32( device const char * ids, device const char * src1, device uchar * dst, - constant int64_t & nbi1, + constant uint64_t & nbi1, constant int64_t & ne00, constant int64_t & ne01, constant int64_t & ne02, @@ -4622,7 +4677,7 @@ kernel void kernel_mul_mv_id_q2_K_f32( constant uint64_t & nb12, constant int64_t & ne0, constant int64_t & ne1, - constant int64_t & nb1, + constant uint64_t & nb1, constant uint & r2, constant uint & r3, constant int & idx, @@ -4669,7 +4724,7 @@ kernel void kernel_mul_mv_id_q3_K_f32( device const char * ids, device const char * src1, device uchar * dst, - constant int64_t & nbi1, + constant uint64_t & nbi1, constant int64_t & ne00, constant int64_t & ne01, constant int64_t & ne02, @@ -4685,7 +4740,7 @@ kernel void kernel_mul_mv_id_q3_K_f32( constant uint64_t & nb12, constant int64_t & ne0, constant int64_t & ne1, - constant int64_t & nb1, + constant uint64_t & nb1, constant uint & r2, constant uint & r3, constant int & idx, @@ -4732,7 +4787,7 @@ kernel void kernel_mul_mv_id_q4_K_f32( device const char * ids, device const char * src1, device uchar * dst, - constant int64_t & nbi1, + constant uint64_t & nbi1, constant int64_t & ne00, constant int64_t & ne01, constant int64_t & ne02, @@ -4748,7 +4803,7 @@ kernel void kernel_mul_mv_id_q4_K_f32( constant uint64_t & nb12, constant int64_t & ne0, constant int64_t & ne1, - constant int64_t & nb1, + constant uint64_t & nb1, constant uint & r2, constant uint & r3, constant int & idx, @@ -4795,7 +4850,7 @@ kernel void kernel_mul_mv_id_q5_K_f32( device const char * ids, device const char * src1, device uchar * dst, - constant int64_t & nbi1, + constant uint64_t & nbi1, constant int64_t & ne00, constant int64_t & ne01, constant int64_t & ne02, @@ -4811,7 +4866,7 @@ kernel void kernel_mul_mv_id_q5_K_f32( constant uint64_t & nb12, constant int64_t & ne0, constant int64_t & ne1, - constant int64_t & nb1, + constant uint64_t & nb1, constant uint & r2, constant uint & r3, constant int & idx, @@ -4858,7 +4913,7 @@ kernel void kernel_mul_mv_id_q6_K_f32( device const char * ids, device const char * src1, device uchar * dst, - constant int64_t & nbi1, + constant uint64_t & nbi1, constant int64_t & ne00, constant int64_t & ne01, constant int64_t & ne02, @@ -4874,7 +4929,7 @@ kernel void kernel_mul_mv_id_q6_K_f32( constant uint64_t & nb12, constant int64_t & ne0, constant int64_t & ne1, - constant int64_t & nb1, + constant uint64_t & nb1, constant uint & r2, constant uint & r3, constant int & idx, diff --git a/tests/test-backend-ops.cpp b/tests/test-backend-ops.cpp index b115299c0..eff063b2d 100644 --- a/tests/test-backend-ops.cpp +++ b/tests/test-backend-ops.cpp @@ -15,19 +15,18 @@ #include #include - static void init_tensor_uniform(ggml_tensor * tensor, float min = -1.0f, float max = 1.0f) { size_t size = ggml_nelements(tensor); std::vector data(size); #if 0 - std::default_random_engine generator(rd()); + static std::default_random_engine generator(1234); std::uniform_real_distribution distribution(min, max); for (size_t i = 0; i < size; i++) { data[i] = distribution(generator); } -#endif +#else auto init_thread = [&](size_t start, size_t end) { std::random_device rd; std::default_random_engine generator(rd()); @@ -49,6 +48,7 @@ static void init_tensor_uniform(ggml_tensor * tensor, float min = -1.0f, float m for (auto & t : threads) { t.join(); } +#endif if (tensor->type == GGML_TYPE_F32 || tensor->type == GGML_TYPE_I32) { ggml_backend_tensor_set(tensor, data.data(), 0, size * sizeof(float)); @@ -437,7 +437,7 @@ struct test_case { double err = nmse(f1.data(), f2.data(), f1.size()); if (err > ud->max_err) { printf("[%s] NMSE = %f ", ggml_op_desc(t1), err); - //for (int i = 0; i < f1.size(); i++) { + //for (int i = 0; i < (int) f1.size(); i++) { // printf("%5d %9.6f %9.6f, diff = %9.6f\n", i, f1[i], f2[i], f1[i] - f2[i]); //} //printf("\n"); From 775ac8712a7b42cfead2585f42cec0dfd56644ab Mon Sep 17 00:00:00 2001 From: Daniel Bevenius Date: Tue, 2 Jan 2024 10:16:55 +0100 Subject: [PATCH 189/811] finetune: fix typo in README.md (#4733) Signed-off-by: Daniel Bevenius --- examples/finetune/README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/examples/finetune/README.md b/examples/finetune/README.md index a2a2c1281..a884706c5 100644 --- a/examples/finetune/README.md +++ b/examples/finetune/README.md @@ -61,7 +61,7 @@ For example to apply 40% of the 'shakespeare' LORA adapter, 80% of the 'bible' L --lora lora-open-llama-3b-v2-q8_0-yet-another-one-LATEST.bin ``` -The scale numbers don't need to add up to one, and you can also use numbers greater than 1 to further increase the influence of an adapter. But making the values to big will sometimes result in worse output. Play around to find good values. +The scale numbers don't need to add up to one, and you can also use numbers greater than 1 to further increase the influence of an adapter. But making the values too big will sometimes result in worse output. Play around to find good values. Gradient checkpointing reduces the memory requirements by ~50% but increases the runtime. If you have enough RAM, you can make finetuning a bit faster by disabling checkpointing with `--no-checkpointing`. From 26f3071d714f0b27ad7f021a46a66a1085480258 Mon Sep 17 00:00:00 2001 From: "Nam D. Tran" <42194884+namtranase@users.noreply.github.com> Date: Tue, 2 Jan 2024 16:23:38 +0700 Subject: [PATCH 190/811] py : re-enable mmap in convert hf (#4732) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * update: awq support llama-7b model * update: change order * update: benchmark results for llama2-7b * update: mistral 7b v1 benchmark * update: support 4 models * fix: Readme * update: ready for PR * update: readme * fix: readme * update: change order import * black * format code * update: work for bot mpt and awqmpt * update: readme * Rename to llm_build_ffn_mpt_awq * Formatted other files * Fixed params count * fix: remove code * update: more detail for mpt * fix: readme * fix: readme * update: change folder architecture * fix: common.cpp * fix: readme * fix: remove ggml_repeat * update: cicd * update: cicd * uppdate: remove use_awq arg * update: readme * llama : adapt plamo to new ffn ggml-ci * fix: update torch version --------- Co-authored-by: Trần Đức Nam Co-authored-by: Le Hoang Anh Co-authored-by: Georgi Gerganov --- awq-py/requirements.txt | 2 +- convert-hf-to-gguf.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/awq-py/requirements.txt b/awq-py/requirements.txt index 5fe604329..991896116 100644 --- a/awq-py/requirements.txt +++ b/awq-py/requirements.txt @@ -1,2 +1,2 @@ -torch>=2.0.0 +torch>=2.1.1 transformers>=4.32.0 diff --git a/convert-hf-to-gguf.py b/convert-hf-to-gguf.py index 51724c0df..203eaf64b 100755 --- a/convert-hf-to-gguf.py +++ b/convert-hf-to-gguf.py @@ -59,7 +59,7 @@ class Model: from safetensors import safe_open ctx = cast(ContextManager[Any], safe_open(self.dir_model / part_name, framework="pt", device="cpu")) else: - ctx = contextlib.nullcontext(torch.load(str(self.dir_model / part_name), map_location="cpu", weights_only=True)) + ctx = contextlib.nullcontext(torch.load(str(self.dir_model / part_name), map_location="cpu", mmap=True, weights_only=True)) with ctx as model_part: for name in model_part.keys(): From 5d7002d4372ebf107cfaf46fcd90df27b204f330 Mon Sep 17 00:00:00 2001 From: minarchist Date: Tue, 2 Jan 2024 04:38:15 -0600 Subject: [PATCH 191/811] server : add --override-kv parameter (#4710) * Changes to server to allow metadata override * documentation * flake.nix: expose full scope in legacyPackages * flake.nix: rocm not yet supported on aarch64, so hide the output * flake.nix: expose checks * workflows: nix-ci: init; build flake outputs * workflows: nix-ci: add a job for eval * workflows: weekly `nix flake update` * workflows: nix-flakestry: drop tag filters ...and add a job for flakehub.com * workflows: nix-ci: add a qemu job for jetsons * flake.nix: suggest the binary caches * flake.lock: update to a commit recently cached by nixpkgs-cuda-ci --------- Co-authored-by: John Co-authored-by: Someone Serge --- examples/server/server.cpp | 51 ++++++++++++++++++++++++++++++++++++++ 1 file changed, 51 insertions(+) diff --git a/examples/server/server.cpp b/examples/server/server.cpp index 52d9b9768..b77d3f079 100644 --- a/examples/server/server.cpp +++ b/examples/server/server.cpp @@ -2016,6 +2016,10 @@ static void server_print_usage(const char *argv0, const gpt_params ¶ms, printf(" --mmproj MMPROJ_FILE path to a multimodal projector file for LLaVA.\n"); printf(" --log-disable disables logging to a file.\n"); printf("\n"); + printf(" --override-kv KEY=TYPE:VALUE\n"); + printf(" advanced option to override model metadata by key. may be specified multiple times.\n"); + printf(" types: int, float, bool. example: --override-kv tokenizer.ggml.add_bos_token=bool:false\n"); + printf("\n"); } static void server_params_parse(int argc, char **argv, server_params &sparams, @@ -2379,6 +2383,49 @@ static void server_params_parse(int argc, char **argv, server_params &sparams, log_set_target(stdout); LOG_INFO("logging to file is disabled.", {}); } + else if (arg == "--override-kv") + { + if (++i >= argc) { + invalid_param = true; + break; + } + char * sep = strchr(argv[i], '='); + if (sep == nullptr || sep - argv[i] >= 128) { + fprintf(stderr, "error: Malformed KV override: %s\n", argv[i]); + invalid_param = true; + break; + } + struct llama_model_kv_override kvo; + std::strncpy(kvo.key, argv[i], sep - argv[i]); + kvo.key[sep - argv[i]] = 0; + sep++; + if (strncmp(sep, "int:", 4) == 0) { + sep += 4; + kvo.tag = LLAMA_KV_OVERRIDE_INT; + kvo.int_value = std::atol(sep); + } else if (strncmp(sep, "float:", 6) == 0) { + sep += 6; + kvo.tag = LLAMA_KV_OVERRIDE_FLOAT; + kvo.float_value = std::atof(sep); + } else if (strncmp(sep, "bool:", 5) == 0) { + sep += 5; + kvo.tag = LLAMA_KV_OVERRIDE_BOOL; + if (std::strcmp(sep, "true") == 0) { + kvo.bool_value = true; + } else if (std::strcmp(sep, "false") == 0) { + kvo.bool_value = false; + } else { + fprintf(stderr, "error: Invalid boolean value for KV override: %s\n", argv[i]); + invalid_param = true; + break; + } + } else { + fprintf(stderr, "error: Invalid type for KV override: %s\n", argv[i]); + invalid_param = true; + break; + } + params.kv_overrides.push_back(kvo); + } else { fprintf(stderr, "error: unknown argument: %s\n", arg.c_str()); @@ -2386,6 +2433,10 @@ static void server_params_parse(int argc, char **argv, server_params &sparams, exit(1); } } + if (!params.kv_overrides.empty()) { + params.kv_overrides.emplace_back(llama_model_kv_override()); + params.kv_overrides.back().key[0] = 0; + } if (invalid_param) { From 32866c5edde402f42ff4233bb89dcfcede34fd22 Mon Sep 17 00:00:00 2001 From: Georgi Gerganov Date: Tue, 2 Jan 2024 13:28:15 +0200 Subject: [PATCH 192/811] editorconfig : fix whitespace and indentation #4710 --- examples/server/server.cpp | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/examples/server/server.cpp b/examples/server/server.cpp index b77d3f079..e45ea809a 100644 --- a/examples/server/server.cpp +++ b/examples/server/server.cpp @@ -2383,8 +2383,8 @@ static void server_params_parse(int argc, char **argv, server_params &sparams, log_set_target(stdout); LOG_INFO("logging to file is disabled.", {}); } - else if (arg == "--override-kv") - { + else if (arg == "--override-kv") + { if (++i >= argc) { invalid_param = true; break; From 83e633c27efdf0eb0ba54249e784b0ea760b1007 Mon Sep 17 00:00:00 2001 From: postmasters Date: Tue, 2 Jan 2024 03:51:28 -0800 Subject: [PATCH 193/811] llama : differentiate the KV dims in the attention (#4657) * Add n_key_dim and n_value_dim Some models use values that are not derived from `n_embd`. Also remove `n_embd_head` and `n_embd_gqa` because it is not clear which "head" is referred to (key or value). Fix issue #4648. * Fix `llm_build_kqv` to use `n_value_gqa` * Rebase * Rename variables * Fix llm_build_kqv to be more generic wrt n_embd_head_k * Update default values for n_embd_head_k and n_embd_head_v Co-authored-by: Georgi Gerganov * Fix llm_load_tensors: the asserts were not backcompat --------- Co-authored-by: Georgi Gerganov --- gguf-py/gguf/constants.py | 2 + gguf-py/gguf/gguf_writer.py | 6 + llama.cpp | 271 +++++++++++++++++++++++++----------- 3 files changed, 201 insertions(+), 78 deletions(-) diff --git a/gguf-py/gguf/constants.py b/gguf-py/gguf/constants.py index ae62cc575..f0a1c51f8 100644 --- a/gguf-py/gguf/constants.py +++ b/gguf-py/gguf/constants.py @@ -46,6 +46,8 @@ class Keys: HEAD_COUNT_KV = "{arch}.attention.head_count_kv" MAX_ALIBI_BIAS = "{arch}.attention.max_alibi_bias" CLAMP_KQV = "{arch}.attention.clamp_kqv" + KEY_LENGTH = "{arch}.attention.key_length" + VALUE_LENGTH = "{arch}.attention.value_length" LAYERNORM_EPS = "{arch}.attention.layer_norm_epsilon" LAYERNORM_RMS_EPS = "{arch}.attention.layer_norm_rms_epsilon" diff --git a/gguf-py/gguf/gguf_writer.py b/gguf-py/gguf/gguf_writer.py index 73e021607..d93aaa877 100644 --- a/gguf-py/gguf/gguf_writer.py +++ b/gguf-py/gguf/gguf_writer.py @@ -333,6 +333,12 @@ class GGUFWriter: def add_head_count_kv(self, count: int) -> None: self.add_uint32(Keys.Attention.HEAD_COUNT_KV.format(arch=self.arch), count) + def add_key_length(self, length: int) -> None: + self.add_uint32(Keys.Attention.KEY_LENGTH.format(arch=self.arch), length) + + def add_value_length(self, length: int) -> None: + self.add_uint32(Keys.Attention.VALUE_LENGTH.format(arch=self.arch), length) + def add_max_alibi_bias(self, bias: float) -> None: self.add_float32(Keys.Attention.MAX_ALIBI_BIAS.format(arch=self.arch), bias) diff --git a/llama.cpp b/llama.cpp index a833d4c15..704464039 100644 --- a/llama.cpp +++ b/llama.cpp @@ -245,6 +245,8 @@ enum llm_kv { LLM_KV_ATTENTION_HEAD_COUNT_KV, LLM_KV_ATTENTION_MAX_ALIBI_BIAS, LLM_KV_ATTENTION_CLAMP_KQV, + LLM_KV_ATTENTION_KEY_LENGTH, + LLM_KV_ATTENTION_VALUE_LENGTH, LLM_KV_ATTENTION_LAYERNORM_EPS, LLM_KV_ATTENTION_LAYERNORM_RMS_EPS, @@ -297,6 +299,8 @@ static std::map LLM_KV_NAMES = { { LLM_KV_ATTENTION_HEAD_COUNT_KV, "%s.attention.head_count_kv" }, { LLM_KV_ATTENTION_MAX_ALIBI_BIAS, "%s.attention.max_alibi_bias" }, { LLM_KV_ATTENTION_CLAMP_KQV, "%s.attention.clamp_kqv" }, + { LLM_KV_ATTENTION_KEY_LENGTH, "%s.attention.key_length" }, + { LLM_KV_ATTENTION_VALUE_LENGTH, "%s.attention.value_length" }, { LLM_KV_ATTENTION_LAYERNORM_EPS, "%s.attention.layer_norm_epsilon" }, { LLM_KV_ATTENTION_LAYERNORM_RMS_EPS, "%s.attention.layer_norm_rms_epsilon" }, @@ -1284,6 +1288,8 @@ struct llama_hparams { uint32_t n_head_kv; uint32_t n_layer; uint32_t n_rot; + uint32_t n_embd_head_k; // dimension of keys (d_k). d_q is assumed to be the same, but there are n_head q heads, and only n_head_kv k-v heads + uint32_t n_embd_head_v; // dimension of values (d_v) aka n_embd_head uint32_t n_ff; uint32_t n_expert = 0; uint32_t n_expert_used = 0; @@ -1310,6 +1316,8 @@ struct llama_hparams { if (this->n_head_kv != other.n_head_kv) return true; if (this->n_layer != other.n_layer) return true; if (this->n_rot != other.n_rot) return true; + if (this->n_embd_head_k != other.n_embd_head_k) return true; + if (this->n_embd_head_v != other.n_embd_head_v) return true; if (this->n_ff != other.n_ff) return true; if (this->n_expert != other.n_expert) return true; if (this->n_expert_used != other.n_expert_used) return true; @@ -1331,12 +1339,12 @@ struct llama_hparams { return n_head/n_head_kv; } - uint32_t n_embd_head() const { - return n_embd/n_head; + uint32_t n_embd_k_gqa() const { // dimension of key embeddings across all k-v heads + return n_embd_head_k * n_head_kv; } - uint32_t n_embd_gqa() const { - return n_embd/n_gqa(); + uint32_t n_embd_v_gqa() const { // dimension of value embeddings across all k-v heads + return n_embd_head_v * n_head_kv; } }; @@ -1645,8 +1653,9 @@ static bool llama_kv_cache_init( uint32_t n_ctx, int n_gpu_layers, bool offload) { - const uint32_t n_embd = hparams.n_embd_gqa(); - const uint32_t n_layer = hparams.n_layer; + const uint32_t n_embd_k_gqa = hparams.n_embd_k_gqa(); + const uint32_t n_embd_v_gqa = hparams.n_embd_v_gqa(); + const uint32_t n_layer = hparams.n_layer; cache.has_shift = false; @@ -1677,8 +1686,8 @@ static bool llama_kv_cache_init( const int i_gpu_start = (int) n_layer - n_gpu_layers; for (int i = 0; i < (int) n_layer; i++) { - ggml_tensor * k = ggml_new_tensor_1d(cache.ctx, ktype, n_embd*n_ctx); - ggml_tensor * v = ggml_new_tensor_1d(cache.ctx, vtype, n_embd*n_ctx); + ggml_tensor * k = ggml_new_tensor_1d(cache.ctx, ktype, n_embd_k_gqa*n_ctx); + ggml_tensor * v = ggml_new_tensor_1d(cache.ctx, vtype, n_embd_v_gqa*n_ctx); ggml_format_name(k, "cache_k_l%d", i); ggml_format_name(v, "cache_v_l%d", i); cache.k_l.push_back(k); @@ -2672,6 +2681,12 @@ static void llm_load_hparams( // gpt-j n_rot = rotary_dim } + hparams.n_embd_head_k = hparams.n_embd / hparams.n_head; + ml.get_key(LLM_KV_ATTENTION_KEY_LENGTH, hparams.n_embd_head_k, false); + + hparams.n_embd_head_v = hparams.n_embd / hparams.n_head; + ml.get_key(LLM_KV_ATTENTION_VALUE_LENGTH, hparams.n_embd_head_v, false); + // arch-specific KVs switch (model.arch) { case LLM_ARCH_LLAMA: @@ -3082,8 +3097,12 @@ static void llm_load_print_meta(llama_model_loader & ml, llama_model & model) { LLAMA_LOG_INFO("%s: n_head = %u\n", __func__, hparams.n_head); LLAMA_LOG_INFO("%s: n_head_kv = %u\n", __func__, hparams.n_head_kv); LLAMA_LOG_INFO("%s: n_layer = %u\n", __func__, hparams.n_layer); - LLAMA_LOG_INFO("%s: n_rot = %u\n", __func__, hparams.n_rot); // a.k.a. n_embd_head, n_head_dim + LLAMA_LOG_INFO("%s: n_rot = %u\n", __func__, hparams.n_rot); + LLAMA_LOG_INFO("%s: n_embd_head_k = %u\n", __func__, hparams.n_embd_head_k); + LLAMA_LOG_INFO("%s: n_embd_head_v = %u\n", __func__, hparams.n_embd_head_v); LLAMA_LOG_INFO("%s: n_gqa = %u\n", __func__, hparams.n_gqa()); + LLAMA_LOG_INFO("%s: n_embd_k_gqa = %u\n", __func__, hparams.n_embd_k_gqa()); + LLAMA_LOG_INFO("%s: n_embd_v_gqa = %u\n", __func__, hparams.n_embd_v_gqa()); LLAMA_LOG_INFO("%s: f_norm_eps = %.1e\n", __func__, hparams.f_norm_eps); LLAMA_LOG_INFO("%s: f_norm_rms_eps = %.1e\n", __func__, hparams.f_norm_rms_eps); LLAMA_LOG_INFO("%s: f_clamp_kqv = %.1e\n", __func__, hparams.f_clamp_kqv); @@ -3173,10 +3192,11 @@ static bool llm_load_tensors( // create tensors for the weights { - const int64_t n_embd = hparams.n_embd; - const int64_t n_embd_gqa = hparams.n_embd_gqa(); - const int64_t n_layer = hparams.n_layer; - const int64_t n_vocab = hparams.n_vocab; + const int64_t n_embd = hparams.n_embd; + const int64_t n_embd_k_gqa = hparams.n_embd_k_gqa(); + const int64_t n_embd_v_gqa = hparams.n_embd_v_gqa(); + const int64_t n_layer = hparams.n_layer; + const int64_t n_vocab = hparams.n_vocab; const auto tn = LLM_TN(model.arch); switch (model.arch) { @@ -3202,7 +3222,10 @@ static bool llm_load_tensors( model.output = ml.create_tensor(ctx, tn(LLM_TENSOR_OUTPUT, "weight"), {n_embd, n_vocab}, backend_output); } - const uint32_t n_ff = hparams.n_ff; + const uint32_t n_ff = hparams.n_ff; + const int64_t n_embd_gqa = n_embd_v_gqa; + GGML_ASSERT(n_embd_gqa == n_embd / hparams.n_gqa()); + GGML_ASSERT(n_embd_gqa == n_embd_k_gqa); const int i_gpu_start = n_layer - n_gpu_layers; @@ -3270,7 +3293,10 @@ static bool llm_load_tensors( model.output = ml.create_tensor(ctx, tn(LLM_TENSOR_OUTPUT, "weight"), {n_embd, n_vocab}, backend_output); } - const uint32_t n_ff = hparams.n_ff; + const uint32_t n_ff = hparams.n_ff; + const int64_t n_embd_gqa = n_embd_v_gqa; + GGML_ASSERT(n_embd_gqa == n_embd / hparams.n_gqa()); + GGML_ASSERT(n_embd_gqa == n_embd_k_gqa); const int i_gpu_start = n_layer - n_gpu_layers; @@ -3318,7 +3344,10 @@ static bool llm_load_tensors( model.output = ml.create_tensor(ctx, tn(LLM_TENSOR_OUTPUT, "weight"), {n_embd, n_vocab}, backend_output); } - const uint32_t n_ff = hparams.n_ff; + const uint32_t n_ff = hparams.n_ff; + const int64_t n_embd_gqa = n_embd_v_gqa; + GGML_ASSERT(n_embd_gqa == n_embd / hparams.n_gqa()); + GGML_ASSERT(n_embd_gqa == n_embd_k_gqa); const int i_gpu_start = n_layer - n_gpu_layers; @@ -3368,7 +3397,10 @@ static bool llm_load_tensors( model.output = ml.create_tensor(ctx, tn(LLM_TENSOR_OUTPUT, "weight"), {n_embd, n_vocab}, backend_output); } - const uint32_t n_ff = hparams.n_ff; + const uint32_t n_ff = hparams.n_ff; + const int64_t n_embd_gqa = n_embd_v_gqa; + GGML_ASSERT(n_embd_gqa == n_embd / hparams.n_gqa()); + GGML_ASSERT(n_embd_gqa == n_embd_k_gqa); const int i_gpu_start = n_layer - n_gpu_layers; @@ -3420,7 +3452,11 @@ static bool llm_load_tensors( model.output = ml.create_tensor(ctx, tn(LLM_TENSOR_OUTPUT, "weight"), {n_embd, n_vocab}, backend_output); } - const uint32_t n_ff = hparams.n_ff; + const uint32_t n_ff = hparams.n_ff; + const int64_t n_embd_gqa = n_embd_v_gqa; + GGML_ASSERT(n_embd_gqa == n_embd / hparams.n_gqa()); + GGML_ASSERT(n_embd_gqa == n_embd_k_gqa); + const int i_gpu_start = n_layer - n_gpu_layers; model.layers.resize(n_layer); for (uint32_t i = 0; i < n_layer; ++i) { @@ -3469,7 +3505,10 @@ static bool llm_load_tensors( model.output = ml.create_tensor(ctx, tn(LLM_TENSOR_OUTPUT, "weight"), {n_embd, n_vocab}, backend_output); } - const uint32_t n_ff = hparams.n_ff; + const uint32_t n_ff = hparams.n_ff; + const int64_t n_embd_gqa = n_embd_v_gqa; + GGML_ASSERT(n_embd_gqa == n_embd / hparams.n_gqa()); + GGML_ASSERT(n_embd_gqa == n_embd_k_gqa); const int i_gpu_start = n_layer - n_gpu_layers; @@ -3520,7 +3559,10 @@ static bool llm_load_tensors( model.output = ml.create_tensor(ctx, tn(LLM_TENSOR_OUTPUT, "weight"), {n_embd, n_vocab}, backend_output); } - const uint32_t n_ff = hparams.n_ff; + const uint32_t n_ff = hparams.n_ff; + const int64_t n_embd_gqa = n_embd_v_gqa; + GGML_ASSERT(n_embd_gqa == n_embd / hparams.n_gqa()); + GGML_ASSERT(n_embd_gqa == n_embd_k_gqa); const int i_gpu_start = n_layer - n_gpu_layers; @@ -3567,7 +3609,10 @@ static bool llm_load_tensors( model.output = ml.create_tensor(ctx, tn(LLM_TENSOR_OUTPUT, "weight"), {n_embd, n_vocab}, backend_output); } - const uint32_t n_ff = hparams.n_ff; + const uint32_t n_ff = hparams.n_ff; + const int64_t n_embd_gqa = n_embd_v_gqa; + GGML_ASSERT(n_embd_gqa == n_embd / hparams.n_gqa()); + GGML_ASSERT(n_embd_gqa == n_embd_k_gqa); const int i_gpu_start = n_layer - n_gpu_layers; @@ -3665,7 +3710,10 @@ static bool llm_load_tensors( model.output_b = ml.create_tensor(ctx, tn(LLM_TENSOR_OUTPUT, "bias"), {n_vocab}, backend_output); } - const uint32_t n_ff = hparams.n_ff; + const uint32_t n_ff = hparams.n_ff; + const int64_t n_embd_gqa = n_embd_v_gqa; + GGML_ASSERT(n_embd_gqa == n_embd / hparams.n_gqa()); + GGML_ASSERT(n_embd_gqa == n_embd_k_gqa); const int i_gpu_start = n_layer - n_gpu_layers; @@ -3714,7 +3762,10 @@ static bool llm_load_tensors( model.output = ml.create_tensor(ctx, tn(LLM_TENSOR_OUTPUT, "weight"), {n_embd, n_vocab}, backend_output); } - const uint32_t n_ff = hparams.n_ff; + const uint32_t n_ff = hparams.n_ff; + const int64_t n_embd_gqa = n_embd_v_gqa; + GGML_ASSERT(n_embd_gqa == n_embd / hparams.n_gqa()); + GGML_ASSERT(n_embd_gqa == n_embd_k_gqa); const int i_gpu_start = n_layer - n_gpu_layers; @@ -3761,7 +3812,10 @@ static bool llm_load_tensors( model.output = ml.create_tensor(ctx, tn(LLM_TENSOR_OUTPUT, "weight"), {n_embd, n_vocab}, backend_output); } - const uint32_t n_ff = hparams.n_ff; + const uint32_t n_ff = hparams.n_ff; + const int64_t n_embd_gqa = n_embd_v_gqa; + GGML_ASSERT(n_embd_gqa == n_embd / hparams.n_gqa()); + GGML_ASSERT(n_embd_gqa == n_embd_k_gqa); const int i_gpu_start = n_layer - n_gpu_layers; @@ -4000,8 +4054,8 @@ static struct ggml_tensor * llm_build_inp_embd( return inpL; } -// Persimmon: n_rot = n_embd_head/2 -// Other: n_rot = n_embd_head +// Persimmon: n_rot = n_embd_head_k/2 +// Other: n_rot = n_embd_head_k static void llm_build_k_shift( struct ggml_context * ctx, const llama_hparams & hparams, @@ -4014,17 +4068,17 @@ static void llm_build_k_shift( float freq_base, float freq_scale, const llm_build_cb & cb) { - const int64_t n_layer = hparams.n_layer; - const int64_t n_head_kv = hparams.n_head_kv; - const int64_t n_embd_gqa = hparams.n_embd_gqa(); - const int64_t n_embd_head = hparams.n_embd_head(); - const int32_t n_orig_ctx = cparams.n_yarn_orig_ctx; - const float ext_factor = cparams.yarn_ext_factor; - const float attn_factor = cparams.yarn_attn_factor; - const float beta_fast = cparams.yarn_beta_fast; - const float beta_slow = cparams.yarn_beta_slow; + const int64_t n_layer = hparams.n_layer; + const int64_t n_head_kv = hparams.n_head_kv; + const int64_t n_embd_head_k = hparams.n_embd_head_k; + const int64_t n_embd_k_gqa = hparams.n_embd_k_gqa(); + const int32_t n_orig_ctx = cparams.n_yarn_orig_ctx; + const float ext_factor = cparams.yarn_ext_factor; + const float attn_factor = cparams.yarn_attn_factor; + const float beta_fast = cparams.yarn_beta_fast; + const float beta_slow = cparams.yarn_beta_slow; - GGML_ASSERT(n_embd_head % n_rot == 0); + GGML_ASSERT(n_embd_head_k % n_rot == 0); struct ggml_tensor * K_shift = ggml_new_tensor_1d(ctx, GGML_TYPE_I32, n_ctx); cb(K_shift, "K_shift", -1); @@ -4042,9 +4096,9 @@ static void llm_build_k_shift( // we rotate only the first n_rot dimensions ggml_rope_custom_inplace(ctx, ggml_view_3d(ctx, kv.k_l[il], - n_embd_head, n_head_kv, n_ctx, - ggml_row_size(kv.k_l[il]->type, n_embd_head), - ggml_row_size(kv.k_l[il]->type, n_embd_gqa), + n_embd_head_k, n_head_kv, n_ctx, + ggml_row_size(kv.k_l[il]->type, n_embd_head_k), + ggml_row_size(kv.k_l[il]->type, n_embd_k_gqa), 0), K_shift, n_rot, rope_type, 0, n_orig_ctx, freq_base, freq_scale, ext_factor, attn_factor, beta_fast, beta_slow); @@ -4065,18 +4119,19 @@ static void llm_build_kv_store( int32_t kv_head, const llm_build_cb & cb, int64_t il) { - const int64_t n_embd_gqa = hparams.n_embd_gqa(); + const int64_t n_embd_k_gqa = hparams.n_embd_k_gqa(); + const int64_t n_embd_v_gqa = hparams.n_embd_v_gqa(); // compute the transposed [n_tokens, n_embd] V matrix - struct ggml_tensor * v_cur_t = ggml_transpose(ctx, ggml_reshape_2d(ctx, v_cur, n_embd_gqa, n_tokens)); + struct ggml_tensor * v_cur_t = ggml_transpose(ctx, ggml_reshape_2d(ctx, v_cur, n_embd_v_gqa, n_tokens)); //struct ggml_tensor * v_cur_t = ggml_transpose(ctx, v_cur); // TODO: reshape above is likely not needed cb(v_cur_t, "v_cur_t", il); - struct ggml_tensor * k_cache_view = ggml_view_1d(ctx, kv.k_l[il], n_tokens*n_embd_gqa, - (ggml_row_size(kv.k_l[il]->type, n_embd_gqa))*kv_head); + struct ggml_tensor * k_cache_view = ggml_view_1d(ctx, kv.k_l[il], n_tokens*n_embd_k_gqa, + (ggml_row_size(kv.k_l[il]->type, n_embd_k_gqa))*kv_head); cb(k_cache_view, "k_cache_view", il); - struct ggml_tensor * v_cache_view = ggml_view_2d(ctx, kv.v_l[il], n_tokens, n_embd_gqa, + struct ggml_tensor * v_cache_view = ggml_view_2d(ctx, kv.v_l[il], n_tokens, n_embd_v_gqa, ( n_ctx)*ggml_element_size(kv.v_l[il]), (kv_head)*ggml_element_size(kv.v_l[il])); cb(v_cache_view, "v_cache_view", il); @@ -4226,20 +4281,20 @@ static struct ggml_tensor * llm_build_kqv( float kq_scale, const llm_build_cb & cb, int il) { - const int64_t n_embd = hparams.n_embd; - const int64_t n_head = hparams.n_head; - const int64_t n_head_kv = hparams.n_head_kv; - const int64_t n_embd_head = hparams.n_embd_head(); - const int64_t n_embd_gqa = hparams.n_embd_gqa(); + const int64_t n_head = hparams.n_head; + const int64_t n_head_kv = hparams.n_head_kv; + const int64_t n_embd_head_k = hparams.n_embd_head_k; + const int64_t n_embd_k_gqa = hparams.n_embd_k_gqa(); + const int64_t n_embd_head_v = hparams.n_embd_head_v; struct ggml_tensor * q = ggml_permute(ctx, q_cur, 0, 2, 1, 3); cb(q, "q", il); struct ggml_tensor * k = ggml_view_3d(ctx, kv.k_l[il], - n_embd_head, n_kv, n_head_kv, - ggml_row_size(kv.k_l[il]->type, n_embd_gqa), - ggml_row_size(kv.k_l[il]->type, n_embd_head), + n_embd_head_k, n_kv, n_head_kv, + ggml_row_size(kv.k_l[il]->type, n_embd_k_gqa), + ggml_row_size(kv.k_l[il]->type, n_embd_head_k), 0); cb(k, "k", il); @@ -4278,9 +4333,9 @@ static struct ggml_tensor * llm_build_kqv( // split cached v into n_head heads struct ggml_tensor * v = ggml_view_3d(ctx, kv.v_l[il], - n_kv, n_embd_head, n_head_kv, + n_kv, n_embd_head_v, n_head_kv, ggml_element_size(kv.v_l[il])*n_ctx, - ggml_element_size(kv.v_l[il])*n_ctx*n_embd_head, + ggml_element_size(kv.v_l[il])*n_ctx*n_embd_head_v, 0); cb(v, "v", il); @@ -4290,7 +4345,7 @@ static struct ggml_tensor * llm_build_kqv( struct ggml_tensor * kqv_merged = ggml_permute(ctx, kqv, 0, 2, 1, 3); cb(kqv_merged, "kqv_merged", il); - struct ggml_tensor * cur = ggml_cont_2d(ctx, kqv_merged, n_embd, n_tokens); + struct ggml_tensor * cur = ggml_cont_2d(ctx, kqv_merged, n_embd_head_k*n_head, n_tokens); cb(cur, "kqv_merged_cont", il); cur = ggml_mul_mat(ctx, wo, cur); @@ -4317,8 +4372,10 @@ struct llm_build_context { const int64_t n_ctx; // user-specified context size (can be different from n_ctx_train) const int64_t n_head; const int64_t n_head_kv; - const int64_t n_embd_head; - const int64_t n_embd_gqa; + const int64_t n_embd_head_k; + const int64_t n_embd_k_gqa; + const int64_t n_embd_head_v; + const int64_t n_embd_v_gqa; const int64_t n_expert; const int64_t n_expert_used; @@ -4360,8 +4417,10 @@ struct llm_build_context { n_ctx (cparams.n_ctx), n_head (hparams.n_head), n_head_kv (hparams.n_head_kv), - n_embd_head (hparams.n_embd_head()), - n_embd_gqa (hparams.n_embd_gqa()), + n_embd_head_k (hparams.n_embd_head_k), + n_embd_k_gqa (hparams.n_embd_k_gqa()), + n_embd_head_v (hparams.n_embd_head_v), + n_embd_v_gqa (hparams.n_embd_v_gqa()), n_expert (hparams.n_expert), n_expert_used (hparams.n_expert_used), freq_base (cparams.rope_freq_base), @@ -4404,6 +4463,8 @@ struct llm_build_context { struct ggml_cgraph * build_llama() { struct ggml_cgraph * gf = ggml_new_graph_custom(ctx0, LLAMA_MAX_NODES, false); + const int64_t n_embd_head = hparams.n_embd_head_v; + GGML_ASSERT(n_embd_head == hparams.n_embd_head_k); GGML_ASSERT(n_embd_head == hparams.n_rot); struct ggml_tensor * cur; @@ -4588,6 +4649,9 @@ struct llm_build_context { struct ggml_cgraph * build_baichuan() { struct ggml_cgraph * gf = ggml_new_graph_custom(ctx0, LLAMA_MAX_NODES, false); + const int64_t n_embd_head = hparams.n_embd_head_v; + GGML_ASSERT(n_embd_head == hparams.n_embd_head_k); + struct ggml_tensor * cur; struct ggml_tensor * inpL; @@ -4705,6 +4769,11 @@ struct llm_build_context { struct ggml_cgraph * build_falcon() { struct ggml_cgraph * gf = ggml_new_graph_custom(ctx0, LLAMA_MAX_NODES, false); + const int64_t n_embd_head = hparams.n_embd_head_v; + const int64_t n_embd_gqa = hparams.n_embd_v_gqa(); + GGML_ASSERT(n_embd_head == hparams.n_embd_head_k); + GGML_ASSERT(n_embd_gqa == n_embd); + struct ggml_tensor * cur; struct ggml_tensor * inpL; @@ -4824,6 +4893,11 @@ struct llm_build_context { struct ggml_cgraph * build_starcoder() { struct ggml_cgraph * gf = ggml_new_graph_custom(ctx0, LLAMA_MAX_NODES, false); + const int64_t n_embd_head = hparams.n_embd_head_v; + const int64_t n_embd_gqa = hparams.n_embd_v_gqa(); + GGML_ASSERT(n_embd_head == hparams.n_embd_head_k); + GGML_ASSERT(n_embd_gqa == n_embd); + struct ggml_tensor * cur; struct ggml_tensor * pos; struct ggml_tensor * inpL; @@ -4920,7 +4994,12 @@ struct llm_build_context { struct ggml_cgraph * build_persimmon() { struct ggml_cgraph * gf = ggml_new_graph_custom(ctx0, LLAMA_MAX_NODES, false); - const int64_t n_rot = n_embd_head / 2; + const int64_t n_embd_head = hparams.n_embd_head_v; + const int64_t n_embd_gqa = hparams.n_embd_v_gqa(); + GGML_ASSERT(n_embd_head == hparams.n_embd_head_k); + GGML_ASSERT(n_embd_gqa == n_embd); + + const int64_t n_rot = n_embd_head_k / 2; struct ggml_tensor * cur; struct ggml_tensor * inpL; @@ -5129,6 +5208,11 @@ struct llm_build_context { struct ggml_cgraph * build_refact() { struct ggml_cgraph * gf = ggml_new_graph_custom(ctx0, LLAMA_MAX_NODES, false); + const int64_t n_embd_head = hparams.n_embd_head_v; + const int64_t n_embd_gqa = hparams.n_embd_v_gqa(); + GGML_ASSERT(n_embd_head == hparams.n_embd_head_k); + GGML_ASSERT(n_embd_gqa == n_embd); + struct ggml_tensor * cur; struct ggml_tensor * inpL; @@ -5217,6 +5301,11 @@ struct llm_build_context { struct ggml_cgraph * build_bloom() { struct ggml_cgraph * gf = ggml_new_graph_custom(ctx0, LLAMA_MAX_NODES, false); + const int64_t n_embd_head = hparams.n_embd_head_v; + const int64_t n_embd_gqa = hparams.n_embd_v_gqa(); + GGML_ASSERT(n_embd_head == hparams.n_embd_head_k); + GGML_ASSERT(n_embd_gqa == n_embd); + struct ggml_tensor * cur; struct ggml_tensor * inpL; @@ -5308,6 +5397,11 @@ struct llm_build_context { struct ggml_cgraph * build_mpt() { struct ggml_cgraph * gf = ggml_new_graph_custom(ctx0, LLAMA_MAX_NODES, false); + const int64_t n_embd_head = hparams.n_embd_head_v; + const int64_t n_embd_gqa = hparams.n_embd_v_gqa(); + GGML_ASSERT(n_embd_head == hparams.n_embd_head_k); + GGML_ASSERT(n_embd_gqa == n_embd); + struct ggml_tensor * cur; struct ggml_tensor * inpL; @@ -5403,6 +5497,9 @@ struct llm_build_context { struct ggml_cgraph * build_stablelm() { struct ggml_cgraph * gf = ggml_new_graph(ctx0); + const int64_t n_embd_head = hparams.n_embd_head_v; + GGML_ASSERT(n_embd_head == hparams.n_embd_head_k); + struct ggml_tensor * cur; struct ggml_tensor * inpL; @@ -5513,6 +5610,9 @@ struct llm_build_context { struct ggml_cgraph * build_qwen() { struct ggml_cgraph * gf = ggml_new_graph_custom(ctx0, LLAMA_MAX_NODES, false); + const int64_t n_embd_head = hparams.n_embd_head_v; + GGML_ASSERT(n_embd_head == hparams.n_embd_head_k); + struct ggml_tensor * cur; struct ggml_tensor * inpL; @@ -5624,6 +5724,11 @@ struct llm_build_context { struct ggml_cgraph * build_phi2() { struct ggml_cgraph * gf = ggml_new_graph_custom(ctx0, LLAMA_MAX_NODES, false); + const int64_t n_embd_head = hparams.n_embd_head_v; + const int64_t n_embd_gqa = hparams.n_embd_v_gqa(); + GGML_ASSERT(n_embd_head == hparams.n_embd_head_k); + GGML_ASSERT(n_embd_gqa == n_embd); + struct ggml_tensor * cur; struct ggml_tensor * attn_norm_output; struct ggml_tensor * ffn_output; @@ -5736,6 +5841,9 @@ struct llm_build_context { struct ggml_cgraph * build_plamo() { struct ggml_cgraph * gf = ggml_new_graph(ctx0); + const int64_t n_embd_head = hparams.n_embd_head_v; + GGML_ASSERT(n_embd_head == hparams.n_embd_head_k); + struct ggml_tensor * cur; struct ggml_tensor * inpL; @@ -5840,6 +5948,11 @@ struct llm_build_context { struct ggml_cgraph * build_gpt2() { struct ggml_cgraph * gf = ggml_new_graph_custom(ctx0, LLAMA_MAX_NODES, false); + const int64_t n_embd_head = hparams.n_embd_head_v; + const int64_t n_embd_gqa = hparams.n_embd_v_gqa(); + GGML_ASSERT(n_embd_head == hparams.n_embd_head_k); + GGML_ASSERT(n_embd_gqa == n_embd); + struct ggml_tensor * cur; struct ggml_tensor * pos; struct ggml_tensor * inpL; @@ -9627,8 +9740,8 @@ struct llama_context * llama_new_context_with_model( const ggml_type type_k = params.type_k; const ggml_type type_v = params.type_v; - GGML_ASSERT(hparams.n_embd_head() % ggml_blck_size(type_k) == 0); - GGML_ASSERT(hparams.n_embd_head() % ggml_blck_size(type_v) == 0); + GGML_ASSERT(hparams.n_embd_head_k % ggml_blck_size(type_k) == 0); + GGML_ASSERT(hparams.n_embd_head_v % ggml_blck_size(type_v) == 0); // reserve memory for context buffers if (!hparams.vocab_only) { @@ -10172,9 +10285,10 @@ static void llama_copy_state_data_internal(struct llama_context * ctx, llama_dat const auto & hparams = ctx->model.hparams; const auto & cparams = ctx->cparams; - const auto n_layer = hparams.n_layer; - const auto n_embd = hparams.n_embd_gqa(); - const auto n_ctx = cparams.n_ctx; + const auto n_layer = hparams.n_layer; + const auto n_embd_k_gqa = hparams.n_embd_k_gqa(); + const auto n_embd_v_gqa = hparams.n_embd_v_gqa(); + const auto n_ctx = cparams.n_ctx; const size_t kv_buf_size = ggml_backend_buffer_get_size(kv_self.buf); const uint32_t kv_head = kv_self.head; @@ -10196,15 +10310,15 @@ static void llama_copy_state_data_internal(struct llama_context * ctx, llama_dat std::vector vout2d(n_layer); for (int il = 0; il < (int) n_layer; ++il) { - kout2d[il] = ggml_new_tensor_2d(cpy_ctx, kv_self.k_l[il]->type, n_embd, kv_head); - vout2d[il] = ggml_new_tensor_2d(cpy_ctx, kv_self.v_l[il]->type, kv_head, n_embd); + kout2d[il] = ggml_new_tensor_2d(cpy_ctx, kv_self.k_l[il]->type, n_embd_k_gqa, kv_head); + vout2d[il] = ggml_new_tensor_2d(cpy_ctx, kv_self.v_l[il]->type, kv_head, n_embd_v_gqa); ggml_tensor * k2d = ggml_view_2d(cpy_ctx, kv_self.k_l[il], - n_embd, kv_head, - elt_size*n_embd, 0); + n_embd_k_gqa, kv_head, + elt_size*n_embd_k_gqa, 0); ggml_tensor * v2d = ggml_view_2d(cpy_ctx, kv_self.v_l[il], - kv_head, n_embd, + kv_head, n_embd_v_gqa, elt_size*n_ctx, 0); ggml_build_forward_expand(gf, ggml_cpy(cpy_ctx, k2d, kout2d[il])); @@ -10311,9 +10425,10 @@ size_t llama_set_state_data(struct llama_context * ctx, uint8_t * src) { const auto & hparams = ctx->model.hparams; const auto & cparams = ctx->cparams; - const int n_layer = hparams.n_layer; - const int n_embd = hparams.n_embd_gqa(); - const int n_ctx = cparams.n_ctx; + const int n_layer = hparams.n_layer; + const int n_embd_k_gqa = hparams.n_embd_k_gqa(); + const int n_embd_v_gqa = hparams.n_embd_v_gqa(); + const int n_ctx = cparams.n_ctx; size_t kv_buf_size; uint32_t kv_head; @@ -10337,15 +10452,15 @@ size_t llama_set_state_data(struct llama_context * ctx, uint8_t * src) { std::vector vin2d(n_layer); for (int il = 0; il < n_layer; ++il) { - kin2d[il] = ggml_new_tensor_2d(cpy_ctx, kv_self.k_l[il]->type, n_embd, kv_head); - vin2d[il] = ggml_new_tensor_2d(cpy_ctx, kv_self.v_l[il]->type, kv_head, n_embd); + kin2d[il] = ggml_new_tensor_2d(cpy_ctx, kv_self.k_l[il]->type, n_embd_k_gqa, kv_head); + vin2d[il] = ggml_new_tensor_2d(cpy_ctx, kv_self.v_l[il]->type, kv_head, n_embd_v_gqa); ggml_tensor * k2d = ggml_view_2d(cpy_ctx, kv_self.k_l[il], - n_embd, kv_head, - elt_size*n_embd, 0); + n_embd_k_gqa, kv_head, + elt_size*n_embd_k_gqa, 0); ggml_tensor * v2d = ggml_view_2d(cpy_ctx, kv_self.v_l[il], - kv_head, n_embd, + kv_head, n_embd_v_gqa, elt_size*n_ctx, 0); ggml_build_forward_expand(gf, ggml_cpy(cpy_ctx, kin2d[il], k2d)); From 0040d42eeb237197054cc7790df5776eacfa608e Mon Sep 17 00:00:00 2001 From: Marcus Dunn <51931484+MarcusDunn@users.noreply.github.com> Date: Tue, 2 Jan 2024 06:15:16 -0800 Subject: [PATCH 194/811] llama : replace all API facing `int`'s with `int32_t` (#4577) * replaced all API facing `int`'s with `int32_t` * formatting and missed `int` in `llama_token_to_piece` --- llama.cpp | 50 +++++++++++++++++++++---------------------- llama.h | 63 +++++++++++++++++++++++++++---------------------------- 2 files changed, 56 insertions(+), 57 deletions(-) diff --git a/llama.cpp b/llama.cpp index 704464039..2e34cb395 100644 --- a/llama.cpp +++ b/llama.cpp @@ -8030,7 +8030,7 @@ void llama_sample_softmax(struct llama_context * ctx, llama_token_data_array * c } } -void llama_sample_top_k(struct llama_context * ctx, llama_token_data_array * candidates, int k, size_t min_keep) { +void llama_sample_top_k(struct llama_context * ctx, llama_token_data_array * candidates, int32_t k, size_t min_keep) { const int64_t t_start_sample_us = ggml_time_us(); k = std::max(k, (int) min_keep); @@ -8390,7 +8390,7 @@ void llama_sample_classifier_free_guidance( } } -llama_token llama_sample_token_mirostat(struct llama_context * ctx, llama_token_data_array * candidates, float tau, float eta, int m, float * mu) { +llama_token llama_sample_token_mirostat(struct llama_context * ctx, llama_token_data_array * candidates, float tau, float eta, int32_t m, float * mu) { GGML_ASSERT(ctx); auto N = float(llama_n_vocab(llama_get_model(ctx))); @@ -9598,7 +9598,7 @@ struct llama_model_quantize_params llama_model_quantize_default_params() { return result; } -int llama_max_devices(void) { +int32_t llama_max_devices(void) { return LLAMA_MAX_DEVICES; } @@ -9909,15 +9909,15 @@ enum llama_vocab_type llama_vocab_type(const struct llama_model * model) { return model->vocab.type; } -int llama_n_vocab(const struct llama_model * model) { +int32_t llama_n_vocab(const struct llama_model * model) { return model->vocab.id_to_token.size(); } -int llama_n_ctx_train(const struct llama_model * model) { +int32_t llama_n_ctx_train(const struct llama_model * model) { return model->hparams.n_ctx_train; } -int llama_n_embd(const struct llama_model * model) { +int32_t llama_n_embd(const struct llama_model * model) { return model->hparams.n_embd; } @@ -9925,7 +9925,7 @@ float llama_rope_freq_scale_train(const struct llama_model * model) { return model->hparams.rope_freq_scale_train; } -int llama_model_meta_val_str(const struct llama_model * model, const char * key, char * buf, size_t buf_size) { +int32_t llama_model_meta_val_str(const struct llama_model * model, const char * key, char * buf, size_t buf_size) { const auto & it = model->gguf_kv.find(key); if (it == model->gguf_kv.end()) { if (buf_size > 0) { @@ -9936,11 +9936,11 @@ int llama_model_meta_val_str(const struct llama_model * model, const char * key, return snprintf(buf, buf_size, "%s", it->second.c_str()); } -int llama_model_meta_count(const struct llama_model * model) { +int32_t llama_model_meta_count(const struct llama_model * model) { return (int)model->gguf_kv.size(); } -int llama_model_meta_key_by_index(const struct llama_model * model, int i, char * buf, size_t buf_size) { +int32_t llama_model_meta_key_by_index(const struct llama_model * model, int i, char * buf, size_t buf_size) { if (i < 0 || i >= (int)model->gguf_kv.size()) { if (buf_size > 0) { buf[0] = '\0'; @@ -9952,7 +9952,7 @@ int llama_model_meta_key_by_index(const struct llama_model * model, int i, char return snprintf(buf, buf_size, "%s", it->first.c_str()); } -int llama_model_meta_val_str_by_index(const struct llama_model * model, int i, char * buf, size_t buf_size) { +int32_t llama_model_meta_val_str_by_index(const struct llama_model * model, int32_t i, char * buf, size_t buf_size) { if (i < 0 || i >= (int)model->gguf_kv.size()) { if (buf_size > 0) { buf[0] = '\0'; @@ -9964,7 +9964,7 @@ int llama_model_meta_val_str_by_index(const struct llama_model * model, int i, c return snprintf(buf, buf_size, "%s", it->second.c_str()); } -int llama_model_desc(const struct llama_model * model, char * buf, size_t buf_size) { +int32_t llama_model_desc(const struct llama_model * model, char * buf, size_t buf_size) { return snprintf(buf, buf_size, "%s %s %s", llama_model_arch_name(model->arch).c_str(), llama_model_type_name(model->type), @@ -9991,7 +9991,7 @@ struct ggml_tensor * llama_get_model_tensor(struct llama_model * model, const ch return ggml_get_tensor(model->ctx, name); } -int llama_model_quantize( +uint32_t llama_model_quantize( const char * fname_inp, const char * fname_out, const llama_model_quantize_params * params) { @@ -10004,7 +10004,7 @@ int llama_model_quantize( } } -int llama_apply_lora_from_file(struct llama_context * ctx, const char * path_lora, float scale, const char * path_base_model, int n_threads) { +int32_t llama_apply_lora_from_file(struct llama_context * ctx, const char * path_lora, float scale, const char * path_base_model, int32_t n_threads) { try { return llama_apply_lora_from_file_internal(ctx->model, path_lora, scale, path_base_model, n_threads); } catch (const std::exception & err) { @@ -10013,7 +10013,7 @@ int llama_apply_lora_from_file(struct llama_context * ctx, const char * path_lor } } -int llama_model_apply_lora_from_file(const struct llama_model * model, const char * path_lora, float scale, const char * path_base_model, int n_threads) { +int32_t llama_model_apply_lora_from_file(const struct llama_model * model, const char * path_lora, float scale, const char * path_base_model, int32_t n_threads) { try { return llama_apply_lora_from_file_internal(*model, path_lora, scale, path_base_model, n_threads); } catch (const std::exception & err) { @@ -10111,7 +10111,7 @@ void llama_kv_cache_view_update(const struct llama_context * ctx, struct llama_k } } -int llama_get_kv_cache_token_count(const struct llama_context * ctx) { +int32_t llama_get_kv_cache_token_count(const struct llama_context * ctx) { int result = 0; for (uint32_t i = 0; i < ctx->kv_self.size; i++) { @@ -10121,7 +10121,7 @@ int llama_get_kv_cache_token_count(const struct llama_context * ctx) { return result; } -int llama_get_kv_cache_used_cells(const struct llama_context * ctx) { +int32_t llama_get_kv_cache_used_cells(const struct llama_context * ctx) { return ctx->kv_self.used; } @@ -10603,7 +10603,7 @@ int llama_eval( struct llama_context * ctx, llama_token * tokens, int32_t n_tokens, - int n_past) { + int32_t n_past) { llama_kv_cache_seq_rm(ctx->kv_self, -1, n_past, -1); const int ret = llama_decode_internal(*ctx, llama_batch_get_one(tokens, n_tokens, n_past, 0)); @@ -10618,7 +10618,7 @@ int llama_eval_embd( struct llama_context * ctx, float * embd, int32_t n_tokens, - int n_past) { + int32_t n_past) { llama_kv_cache_seq_rm(ctx->kv_self, -1, n_past, -1); llama_batch batch = { n_tokens, nullptr, embd, nullptr, nullptr, nullptr, nullptr, n_past, 1, 0, }; @@ -10689,7 +10689,7 @@ void llama_batch_free(struct llama_batch batch) { if (batch.logits) free(batch.logits); } -int llama_decode( +int32_t llama_decode( struct llama_context * ctx, struct llama_batch batch) { const int ret = llama_decode_internal(*ctx, batch); @@ -10737,11 +10737,11 @@ llama_token llama_token_nl(const struct llama_model * model) { return model->vocab.linefeed_id; } -int llama_add_bos_token(const struct llama_model * model) { +int32_t llama_add_bos_token(const struct llama_model * model) { return model->vocab.special_add_bos; } -int llama_add_eos_token(const struct llama_model * model) { +int32_t llama_add_eos_token(const struct llama_model * model) { return model->vocab.special_add_eos; } @@ -10761,12 +10761,12 @@ llama_token llama_token_eot(const struct llama_model * model) { return model->vocab.special_eot_id; } -int llama_tokenize( +int32_t llama_tokenize( const struct llama_model * model, const char * text, - int text_len, + int32_t text_len, llama_token * tokens, - int n_max_tokens, + int32_t n_max_tokens, bool add_bos, bool special) { auto res = llama_tokenize_internal(model->vocab, std::string(text, text_len), add_bos, special); @@ -10794,7 +10794,7 @@ static std::string llama_decode_text(const std::string & text) { } // does not write null-terminator to buf -int llama_token_to_piece(const struct llama_model * model, llama_token token, char * buf, int length) { +int32_t llama_token_to_piece(const struct llama_model * model, llama_token token, char * buf, int32_t length) { if (0 <= token && token < llama_n_vocab(model)) { switch (llama_vocab_get_type(model->vocab)) { case LLAMA_VOCAB_TYPE_SPM: { diff --git a/llama.h b/llama.h index af76bae2d..461d4604a 100644 --- a/llama.h +++ b/llama.h @@ -226,7 +226,7 @@ extern "C" { // model quantization parameters typedef struct llama_model_quantize_params { - int nthread; // number of threads to use for quantizing, if <=0 will use std::thread::hardware_concurrency() + int32_t nthread; // number of threads to use for quantizing, if <=0 will use std::thread::hardware_concurrency() enum llama_ftype ftype; // quantize to this llama_ftype bool allow_requantize; // allow quantizing non-f32/f16 tensors bool quantize_output_tensor; // quantize output.weight @@ -310,21 +310,20 @@ extern "C" { LLAMA_API int64_t llama_time_us(void); - LLAMA_API int llama_max_devices (void); + LLAMA_API int32_t llama_max_devices(void); LLAMA_API bool llama_mmap_supported (void); LLAMA_API bool llama_mlock_supported(void); LLAMA_API const struct llama_model * llama_get_model(const struct llama_context * ctx); - // TODO: become more consistent with returned int types across the API LLAMA_API uint32_t llama_n_ctx (const struct llama_context * ctx); LLAMA_API uint32_t llama_n_batch (const struct llama_context * ctx); LLAMA_API enum llama_vocab_type llama_vocab_type(const struct llama_model * model); - LLAMA_API int llama_n_vocab (const struct llama_model * model); - LLAMA_API int llama_n_ctx_train(const struct llama_model * model); - LLAMA_API int llama_n_embd (const struct llama_model * model); + LLAMA_API int32_t llama_n_vocab (const struct llama_model * model); + LLAMA_API int32_t llama_n_ctx_train(const struct llama_model * model); + LLAMA_API int32_t llama_n_embd (const struct llama_model * model); // Get the model's RoPE frequency scaling factor LLAMA_API float llama_rope_freq_scale_train(const struct llama_model * model); @@ -335,19 +334,19 @@ extern "C" { // - GGUF array values are not supported by these functions // Get metadata value as a string by key name - LLAMA_API int llama_model_meta_val_str(const struct llama_model * model, const char * key, char * buf, size_t buf_size); + LLAMA_API int32_t llama_model_meta_val_str(const struct llama_model * model, const char * key, char * buf, size_t buf_size); // Get the number of metadata key/value pairs - LLAMA_API int llama_model_meta_count(const struct llama_model * model); + LLAMA_API int32_t llama_model_meta_count(const struct llama_model * model); // Get metadata key name by index - LLAMA_API int llama_model_meta_key_by_index(const struct llama_model * model, int i, char * buf, size_t buf_size); + LLAMA_API int32_t llama_model_meta_key_by_index(const struct llama_model * model, int32_t i, char * buf, size_t buf_size); // Get metadata value as a string by index - LLAMA_API int llama_model_meta_val_str_by_index(const struct llama_model * model, int i, char * buf, size_t buf_size); + LLAMA_API int32_t llama_model_meta_val_str_by_index(const struct llama_model * model, int32_t i, char * buf, size_t buf_size); // Get a string describing the model type - LLAMA_API int llama_model_desc(const struct llama_model * model, char * buf, size_t buf_size); + LLAMA_API int32_t llama_model_desc(const struct llama_model * model, char * buf, size_t buf_size); // Returns the total size of all the tensors in the model in bytes LLAMA_API uint64_t llama_model_size(const struct llama_model * model); @@ -359,7 +358,7 @@ extern "C" { LLAMA_API struct ggml_tensor * llama_get_model_tensor(struct llama_model * model, const char * name); // Returns 0 on success - LLAMA_API int llama_model_quantize( + LLAMA_API uint32_t llama_model_quantize( const char * fname_inp, const char * fname_out, const llama_model_quantize_params * params); @@ -370,20 +369,20 @@ extern "C" { // The model needs to be reloaded before applying a new adapter, otherwise the adapter // will be applied on top of the previous one // Returns 0 on success - LLAMA_API DEPRECATED(int llama_apply_lora_from_file( + LLAMA_API DEPRECATED(int32_t llama_apply_lora_from_file( struct llama_context * ctx, const char * path_lora, float scale, const char * path_base_model, - int n_threads), + int32_t n_threads), "use llama_model_apply_lora_from_file instead"); - LLAMA_API int llama_model_apply_lora_from_file( + LLAMA_API int32_t llama_model_apply_lora_from_file( const struct llama_model * model, const char * path_lora, float scale, const char * path_base_model, - int n_threads); + int32_t n_threads); // // KV cache @@ -439,10 +438,10 @@ extern "C" { // Returns the number of tokens in the KV cache (slow, use only for debug) // If a KV cell has multiple sequences assigned to it, it will be counted multiple times - LLAMA_API int llama_get_kv_cache_token_count(const struct llama_context * ctx); + LLAMA_API int32_t llama_get_kv_cache_token_count(const struct llama_context * ctx); // Returns the number of used KV cells (i.e. have at least one sequence assigned to them) - LLAMA_API int llama_get_kv_cache_used_cells(const struct llama_context * ctx); + LLAMA_API int32_t llama_get_kv_cache_used_cells(const struct llama_context * ctx); // Clear the KV cache LLAMA_API void llama_kv_cache_clear( @@ -533,7 +532,7 @@ extern "C" { struct llama_context * ctx, llama_token * tokens, int32_t n_tokens, - int n_past), + int32_t n_past), "use llama_decode() instead"); // Same as llama_eval, but use float matrix input directly. @@ -542,7 +541,7 @@ extern "C" { struct llama_context * ctx, float * embd, int32_t n_tokens, - int n_past), + int32_t n_past), "use llama_decode() instead"); // Return batch for single sequence of tokens starting at pos_0 @@ -574,7 +573,7 @@ extern "C" { // 0 - success // 1 - could not find a KV slot for the batch (try reducing the size of the batch or increase the context) // < 0 - error - LLAMA_API int llama_decode( + LLAMA_API int32_t llama_decode( struct llama_context * ctx, struct llama_batch batch); @@ -614,10 +613,10 @@ extern "C" { LLAMA_API llama_token llama_token_nl (const struct llama_model * model); // next-line // Returns -1 if unknown, 1 for true or 0 for false. - LLAMA_API int llama_add_bos_token(const struct llama_model * model); + LLAMA_API int32_t llama_add_bos_token(const struct llama_model * model); // Returns -1 if unknown, 1 for true or 0 for false. - LLAMA_API int llama_add_eos_token(const struct llama_model * model); + LLAMA_API int32_t llama_add_eos_token(const struct llama_model * model); // codellama infill tokens LLAMA_API llama_token llama_token_prefix(const struct llama_model * model); // Beginning of infill prefix @@ -635,12 +634,12 @@ extern "C" { /// @return Returns a negative number on failure - the number of tokens that would have been returned /// @param special Allow tokenizing special and/or control tokens which otherwise are not exposed and treated as plaintext. /// Does not insert a leading space. - LLAMA_API int llama_tokenize( + LLAMA_API int32_t llama_tokenize( const struct llama_model * model, const char * text, - int text_len, + int32_t text_len, llama_token * tokens, - int n_max_tokens, + int32_t n_max_tokens, bool add_bos, bool special); @@ -648,11 +647,11 @@ extern "C" { // Uses the vocabulary in the provided context. // Does not write null terminator to the buffer. // User code is responsible to remove the leading whitespace of the first non-BOS token when decoding multiple tokens. - LLAMA_API int llama_token_to_piece( + LLAMA_API int32_t llama_token_to_piece( const struct llama_model * model, llama_token token, char * buf, - int length); + int32_t length); // // Grammar @@ -704,7 +703,7 @@ extern "C" { LLAMA_API void llama_sample_top_k( struct llama_context * ctx, llama_token_data_array * candidates, - int k, + int32_t k, size_t min_keep); /// @details Nucleus sampling described in academic paper "The Curious Case of Neural Text Degeneration" https://arxiv.org/abs/1904.09751 @@ -763,7 +762,7 @@ extern "C" { llama_token_data_array * candidates, float tau, float eta, - int m, + int32_t m, float * mu); /// @details Mirostat 2.0 algorithm described in the paper https://arxiv.org/abs/2007.14966. Uses tokens instead of words. @@ -836,8 +835,8 @@ extern "C" { llama_beam_search_callback_fn_t callback, void * callback_data, size_t n_beams, - int n_past, - int n_predict); + int32_t n_past, + int32_t n_predict); // Performance information LLAMA_API struct llama_timings llama_get_timings(struct llama_context * ctx); From 540938f8904707dd74cb3be18495f853b312e72f Mon Sep 17 00:00:00 2001 From: Georgi Gerganov Date: Tue, 2 Jan 2024 16:26:45 +0200 Subject: [PATCH 195/811] llama : llama_model_desc print number of experts --- llama.cpp | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/llama.cpp b/llama.cpp index 2e34cb395..3bb056dba 100644 --- a/llama.cpp +++ b/llama.cpp @@ -9965,8 +9965,9 @@ int32_t llama_model_meta_val_str_by_index(const struct llama_model * model, int3 } int32_t llama_model_desc(const struct llama_model * model, char * buf, size_t buf_size) { - return snprintf(buf, buf_size, "%s %s %s", + return snprintf(buf, buf_size, "%s %s%s %s", llama_model_arch_name(model->arch).c_str(), + model->hparams.n_expert > 0 ? (std::to_string(model->hparams.n_expert) + "x").c_str() : "", llama_model_type_name(model->type), llama_model_ftype_name(model->ftype).c_str()); } From 0ef3ca2ac62016c0c545de1c89dc2e3e130f4a99 Mon Sep 17 00:00:00 2001 From: Phil H <5756783+phiharri@users.noreply.github.com> Date: Tue, 2 Jan 2024 15:48:49 +0000 Subject: [PATCH 196/811] server : add token counts to html footer (#4738) * server: add token counts to stats * server: generate hpp --------- Co-authored-by: phiharri --- examples/server/completion.js.hpp | 693 ++--- examples/server/index.html.hpp | 4591 +++++++++++++++-------------- examples/server/index.js.hpp | 3693 +++++++++++------------ examples/server/public/index.html | 4 +- 4 files changed, 4529 insertions(+), 4452 deletions(-) diff --git a/examples/server/completion.js.hpp b/examples/server/completion.js.hpp index f0a071a69..fe5f81228 100644 --- a/examples/server/completion.js.hpp +++ b/examples/server/completion.js.hpp @@ -74,355 +74,376 @@ unsigned char completion_js[] = { 0x6f, 0x6e, 0x2f, 0x6a, 0x73, 0x6f, 0x6e, 0x27, 0x2c, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x27, 0x41, 0x63, 0x63, 0x65, 0x70, 0x74, 0x27, 0x3a, 0x20, 0x27, 0x74, 0x65, 0x78, 0x74, 0x2f, 0x65, 0x76, 0x65, 0x6e, - 0x74, 0x2d, 0x73, 0x74, 0x72, 0x65, 0x61, 0x6d, 0x27, 0x0a, 0x20, 0x20, - 0x20, 0x20, 0x7d, 0x2c, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x73, 0x69, 0x67, - 0x6e, 0x61, 0x6c, 0x3a, 0x20, 0x63, 0x6f, 0x6e, 0x74, 0x72, 0x6f, 0x6c, - 0x6c, 0x65, 0x72, 0x2e, 0x73, 0x69, 0x67, 0x6e, 0x61, 0x6c, 0x2c, 0x0a, - 0x20, 0x20, 0x7d, 0x29, 0x3b, 0x0a, 0x0a, 0x20, 0x20, 0x63, 0x6f, 0x6e, - 0x73, 0x74, 0x20, 0x72, 0x65, 0x61, 0x64, 0x65, 0x72, 0x20, 0x3d, 0x20, - 0x72, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x2e, 0x62, 0x6f, 0x64, - 0x79, 0x2e, 0x67, 0x65, 0x74, 0x52, 0x65, 0x61, 0x64, 0x65, 0x72, 0x28, - 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x64, - 0x65, 0x63, 0x6f, 0x64, 0x65, 0x72, 0x20, 0x3d, 0x20, 0x6e, 0x65, 0x77, - 0x20, 0x54, 0x65, 0x78, 0x74, 0x44, 0x65, 0x63, 0x6f, 0x64, 0x65, 0x72, - 0x28, 0x29, 0x3b, 0x0a, 0x0a, 0x20, 0x20, 0x6c, 0x65, 0x74, 0x20, 0x63, - 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x20, 0x3d, 0x20, 0x22, 0x22, 0x3b, - 0x0a, 0x20, 0x20, 0x6c, 0x65, 0x74, 0x20, 0x6c, 0x65, 0x66, 0x74, 0x6f, - 0x76, 0x65, 0x72, 0x20, 0x3d, 0x20, 0x22, 0x22, 0x3b, 0x20, 0x2f, 0x2f, - 0x20, 0x42, 0x75, 0x66, 0x66, 0x65, 0x72, 0x20, 0x66, 0x6f, 0x72, 0x20, - 0x70, 0x61, 0x72, 0x74, 0x69, 0x61, 0x6c, 0x6c, 0x79, 0x20, 0x72, 0x65, - 0x61, 0x64, 0x20, 0x6c, 0x69, 0x6e, 0x65, 0x73, 0x0a, 0x0a, 0x20, 0x20, - 0x74, 0x72, 0x79, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x6c, 0x65, - 0x74, 0x20, 0x63, 0x6f, 0x6e, 0x74, 0x20, 0x3d, 0x20, 0x74, 0x72, 0x75, - 0x65, 0x3b, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x77, 0x68, 0x69, 0x6c, - 0x65, 0x20, 0x28, 0x63, 0x6f, 0x6e, 0x74, 0x29, 0x20, 0x7b, 0x0a, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x72, - 0x65, 0x73, 0x75, 0x6c, 0x74, 0x20, 0x3d, 0x20, 0x61, 0x77, 0x61, 0x69, - 0x74, 0x20, 0x72, 0x65, 0x61, 0x64, 0x65, 0x72, 0x2e, 0x72, 0x65, 0x61, - 0x64, 0x28, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x69, - 0x66, 0x20, 0x28, 0x72, 0x65, 0x73, 0x75, 0x6c, 0x74, 0x2e, 0x64, 0x6f, - 0x6e, 0x65, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x62, 0x72, 0x65, 0x61, 0x6b, 0x3b, 0x0a, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x2f, 0x2f, 0x20, 0x41, 0x64, 0x64, 0x20, 0x61, 0x6e, 0x79, 0x20, 0x6c, - 0x65, 0x66, 0x74, 0x6f, 0x76, 0x65, 0x72, 0x20, 0x64, 0x61, 0x74, 0x61, - 0x20, 0x74, 0x6f, 0x20, 0x74, 0x68, 0x65, 0x20, 0x63, 0x75, 0x72, 0x72, - 0x65, 0x6e, 0x74, 0x20, 0x63, 0x68, 0x75, 0x6e, 0x6b, 0x20, 0x6f, 0x66, - 0x20, 0x64, 0x61, 0x74, 0x61, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x74, 0x65, 0x78, 0x74, 0x20, 0x3d, - 0x20, 0x6c, 0x65, 0x66, 0x74, 0x6f, 0x76, 0x65, 0x72, 0x20, 0x2b, 0x20, - 0x64, 0x65, 0x63, 0x6f, 0x64, 0x65, 0x72, 0x2e, 0x64, 0x65, 0x63, 0x6f, - 0x64, 0x65, 0x28, 0x72, 0x65, 0x73, 0x75, 0x6c, 0x74, 0x2e, 0x76, 0x61, - 0x6c, 0x75, 0x65, 0x29, 0x3b, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x2f, 0x2f, 0x20, 0x43, 0x68, 0x65, 0x63, 0x6b, 0x20, 0x69, 0x66, - 0x20, 0x74, 0x68, 0x65, 0x20, 0x6c, 0x61, 0x73, 0x74, 0x20, 0x63, 0x68, - 0x61, 0x72, 0x61, 0x63, 0x74, 0x65, 0x72, 0x20, 0x69, 0x73, 0x20, 0x61, - 0x20, 0x6c, 0x69, 0x6e, 0x65, 0x20, 0x62, 0x72, 0x65, 0x61, 0x6b, 0x0a, + 0x74, 0x2d, 0x73, 0x74, 0x72, 0x65, 0x61, 0x6d, 0x27, 0x2c, 0x0a, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x2e, 0x2e, 0x2e, 0x28, 0x70, 0x61, 0x72, + 0x61, 0x6d, 0x73, 0x2e, 0x61, 0x70, 0x69, 0x5f, 0x6b, 0x65, 0x79, 0x20, + 0x3f, 0x20, 0x7b, 0x27, 0x41, 0x75, 0x74, 0x68, 0x6f, 0x72, 0x69, 0x7a, + 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x27, 0x3a, 0x20, 0x60, 0x42, 0x65, 0x61, + 0x72, 0x65, 0x72, 0x20, 0x24, 0x7b, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, + 0x2e, 0x61, 0x70, 0x69, 0x5f, 0x6b, 0x65, 0x79, 0x7d, 0x60, 0x7d, 0x20, + 0x3a, 0x20, 0x7b, 0x7d, 0x29, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x2c, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x73, 0x69, 0x67, 0x6e, 0x61, 0x6c, 0x3a, + 0x20, 0x63, 0x6f, 0x6e, 0x74, 0x72, 0x6f, 0x6c, 0x6c, 0x65, 0x72, 0x2e, + 0x73, 0x69, 0x67, 0x6e, 0x61, 0x6c, 0x2c, 0x0a, 0x20, 0x20, 0x7d, 0x29, + 0x3b, 0x0a, 0x0a, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x72, + 0x65, 0x61, 0x64, 0x65, 0x72, 0x20, 0x3d, 0x20, 0x72, 0x65, 0x73, 0x70, + 0x6f, 0x6e, 0x73, 0x65, 0x2e, 0x62, 0x6f, 0x64, 0x79, 0x2e, 0x67, 0x65, + 0x74, 0x52, 0x65, 0x61, 0x64, 0x65, 0x72, 0x28, 0x29, 0x3b, 0x0a, 0x20, + 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x64, 0x65, 0x63, 0x6f, 0x64, + 0x65, 0x72, 0x20, 0x3d, 0x20, 0x6e, 0x65, 0x77, 0x20, 0x54, 0x65, 0x78, + 0x74, 0x44, 0x65, 0x63, 0x6f, 0x64, 0x65, 0x72, 0x28, 0x29, 0x3b, 0x0a, + 0x0a, 0x20, 0x20, 0x6c, 0x65, 0x74, 0x20, 0x63, 0x6f, 0x6e, 0x74, 0x65, + 0x6e, 0x74, 0x20, 0x3d, 0x20, 0x22, 0x22, 0x3b, 0x0a, 0x20, 0x20, 0x6c, + 0x65, 0x74, 0x20, 0x6c, 0x65, 0x66, 0x74, 0x6f, 0x76, 0x65, 0x72, 0x20, + 0x3d, 0x20, 0x22, 0x22, 0x3b, 0x20, 0x2f, 0x2f, 0x20, 0x42, 0x75, 0x66, + 0x66, 0x65, 0x72, 0x20, 0x66, 0x6f, 0x72, 0x20, 0x70, 0x61, 0x72, 0x74, + 0x69, 0x61, 0x6c, 0x6c, 0x79, 0x20, 0x72, 0x65, 0x61, 0x64, 0x20, 0x6c, + 0x69, 0x6e, 0x65, 0x73, 0x0a, 0x0a, 0x20, 0x20, 0x74, 0x72, 0x79, 0x20, + 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x6c, 0x65, 0x74, 0x20, 0x63, 0x6f, + 0x6e, 0x74, 0x20, 0x3d, 0x20, 0x74, 0x72, 0x75, 0x65, 0x3b, 0x0a, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x77, 0x68, 0x69, 0x6c, 0x65, 0x20, 0x28, 0x63, + 0x6f, 0x6e, 0x74, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x72, 0x65, 0x73, 0x75, 0x6c, + 0x74, 0x20, 0x3d, 0x20, 0x61, 0x77, 0x61, 0x69, 0x74, 0x20, 0x72, 0x65, + 0x61, 0x64, 0x65, 0x72, 0x2e, 0x72, 0x65, 0x61, 0x64, 0x28, 0x29, 0x3b, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x69, 0x66, 0x20, 0x28, 0x72, + 0x65, 0x73, 0x75, 0x6c, 0x74, 0x2e, 0x64, 0x6f, 0x6e, 0x65, 0x29, 0x20, + 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x62, 0x72, + 0x65, 0x61, 0x6b, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, + 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x2f, 0x2f, 0x20, 0x41, + 0x64, 0x64, 0x20, 0x61, 0x6e, 0x79, 0x20, 0x6c, 0x65, 0x66, 0x74, 0x6f, + 0x76, 0x65, 0x72, 0x20, 0x64, 0x61, 0x74, 0x61, 0x20, 0x74, 0x6f, 0x20, + 0x74, 0x68, 0x65, 0x20, 0x63, 0x75, 0x72, 0x72, 0x65, 0x6e, 0x74, 0x20, + 0x63, 0x68, 0x75, 0x6e, 0x6b, 0x20, 0x6f, 0x66, 0x20, 0x64, 0x61, 0x74, + 0x61, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, + 0x74, 0x20, 0x74, 0x65, 0x78, 0x74, 0x20, 0x3d, 0x20, 0x6c, 0x65, 0x66, + 0x74, 0x6f, 0x76, 0x65, 0x72, 0x20, 0x2b, 0x20, 0x64, 0x65, 0x63, 0x6f, + 0x64, 0x65, 0x72, 0x2e, 0x64, 0x65, 0x63, 0x6f, 0x64, 0x65, 0x28, 0x72, + 0x65, 0x73, 0x75, 0x6c, 0x74, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x29, + 0x3b, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x2f, 0x2f, 0x20, + 0x43, 0x68, 0x65, 0x63, 0x6b, 0x20, 0x69, 0x66, 0x20, 0x74, 0x68, 0x65, + 0x20, 0x6c, 0x61, 0x73, 0x74, 0x20, 0x63, 0x68, 0x61, 0x72, 0x61, 0x63, + 0x74, 0x65, 0x72, 0x20, 0x69, 0x73, 0x20, 0x61, 0x20, 0x6c, 0x69, 0x6e, + 0x65, 0x20, 0x62, 0x72, 0x65, 0x61, 0x6b, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x65, 0x6e, 0x64, 0x73, + 0x57, 0x69, 0x74, 0x68, 0x4c, 0x69, 0x6e, 0x65, 0x42, 0x72, 0x65, 0x61, + 0x6b, 0x20, 0x3d, 0x20, 0x74, 0x65, 0x78, 0x74, 0x2e, 0x65, 0x6e, 0x64, + 0x73, 0x57, 0x69, 0x74, 0x68, 0x28, 0x27, 0x5c, 0x6e, 0x27, 0x29, 0x3b, + 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x2f, 0x2f, 0x20, 0x53, + 0x70, 0x6c, 0x69, 0x74, 0x20, 0x74, 0x68, 0x65, 0x20, 0x74, 0x65, 0x78, + 0x74, 0x20, 0x69, 0x6e, 0x74, 0x6f, 0x20, 0x6c, 0x69, 0x6e, 0x65, 0x73, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x6c, 0x65, 0x74, 0x20, 0x6c, + 0x69, 0x6e, 0x65, 0x73, 0x20, 0x3d, 0x20, 0x74, 0x65, 0x78, 0x74, 0x2e, + 0x73, 0x70, 0x6c, 0x69, 0x74, 0x28, 0x27, 0x5c, 0x6e, 0x27, 0x29, 0x3b, + 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x2f, 0x2f, 0x20, 0x49, + 0x66, 0x20, 0x74, 0x68, 0x65, 0x20, 0x74, 0x65, 0x78, 0x74, 0x20, 0x64, + 0x6f, 0x65, 0x73, 0x6e, 0x27, 0x74, 0x20, 0x65, 0x6e, 0x64, 0x20, 0x77, + 0x69, 0x74, 0x68, 0x20, 0x61, 0x20, 0x6c, 0x69, 0x6e, 0x65, 0x20, 0x62, + 0x72, 0x65, 0x61, 0x6b, 0x2c, 0x20, 0x74, 0x68, 0x65, 0x6e, 0x20, 0x74, + 0x68, 0x65, 0x20, 0x6c, 0x61, 0x73, 0x74, 0x20, 0x6c, 0x69, 0x6e, 0x65, + 0x20, 0x69, 0x73, 0x20, 0x69, 0x6e, 0x63, 0x6f, 0x6d, 0x70, 0x6c, 0x65, + 0x74, 0x65, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x2f, 0x2f, 0x20, + 0x53, 0x74, 0x6f, 0x72, 0x65, 0x20, 0x69, 0x74, 0x20, 0x69, 0x6e, 0x20, + 0x6c, 0x65, 0x66, 0x74, 0x6f, 0x76, 0x65, 0x72, 0x20, 0x74, 0x6f, 0x20, + 0x62, 0x65, 0x20, 0x61, 0x64, 0x64, 0x65, 0x64, 0x20, 0x74, 0x6f, 0x20, + 0x74, 0x68, 0x65, 0x20, 0x6e, 0x65, 0x78, 0x74, 0x20, 0x63, 0x68, 0x75, + 0x6e, 0x6b, 0x20, 0x6f, 0x66, 0x20, 0x64, 0x61, 0x74, 0x61, 0x0a, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x69, 0x66, 0x20, 0x28, 0x21, 0x65, 0x6e, + 0x64, 0x73, 0x57, 0x69, 0x74, 0x68, 0x4c, 0x69, 0x6e, 0x65, 0x42, 0x72, + 0x65, 0x61, 0x6b, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x6c, 0x65, 0x66, 0x74, 0x6f, 0x76, 0x65, 0x72, 0x20, + 0x3d, 0x20, 0x6c, 0x69, 0x6e, 0x65, 0x73, 0x2e, 0x70, 0x6f, 0x70, 0x28, + 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x20, 0x65, + 0x6c, 0x73, 0x65, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x6c, 0x65, 0x66, 0x74, 0x6f, 0x76, 0x65, 0x72, 0x20, 0x3d, + 0x20, 0x22, 0x22, 0x3b, 0x20, 0x2f, 0x2f, 0x20, 0x52, 0x65, 0x73, 0x65, + 0x74, 0x20, 0x6c, 0x65, 0x66, 0x74, 0x6f, 0x76, 0x65, 0x72, 0x20, 0x69, + 0x66, 0x20, 0x77, 0x65, 0x20, 0x68, 0x61, 0x76, 0x65, 0x20, 0x61, 0x20, + 0x6c, 0x69, 0x6e, 0x65, 0x20, 0x62, 0x72, 0x65, 0x61, 0x6b, 0x20, 0x61, + 0x74, 0x20, 0x74, 0x68, 0x65, 0x20, 0x65, 0x6e, 0x64, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x2f, 0x2f, 0x20, 0x50, 0x61, 0x72, 0x73, 0x65, 0x20, 0x61, 0x6c, + 0x6c, 0x20, 0x73, 0x73, 0x65, 0x20, 0x65, 0x76, 0x65, 0x6e, 0x74, 0x73, + 0x20, 0x61, 0x6e, 0x64, 0x20, 0x61, 0x64, 0x64, 0x20, 0x74, 0x68, 0x65, + 0x6d, 0x20, 0x74, 0x6f, 0x20, 0x72, 0x65, 0x73, 0x75, 0x6c, 0x74, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, - 0x65, 0x6e, 0x64, 0x73, 0x57, 0x69, 0x74, 0x68, 0x4c, 0x69, 0x6e, 0x65, - 0x42, 0x72, 0x65, 0x61, 0x6b, 0x20, 0x3d, 0x20, 0x74, 0x65, 0x78, 0x74, - 0x2e, 0x65, 0x6e, 0x64, 0x73, 0x57, 0x69, 0x74, 0x68, 0x28, 0x27, 0x5c, - 0x6e, 0x27, 0x29, 0x3b, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x2f, 0x2f, 0x20, 0x53, 0x70, 0x6c, 0x69, 0x74, 0x20, 0x74, 0x68, 0x65, - 0x20, 0x74, 0x65, 0x78, 0x74, 0x20, 0x69, 0x6e, 0x74, 0x6f, 0x20, 0x6c, - 0x69, 0x6e, 0x65, 0x73, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x6c, - 0x65, 0x74, 0x20, 0x6c, 0x69, 0x6e, 0x65, 0x73, 0x20, 0x3d, 0x20, 0x74, - 0x65, 0x78, 0x74, 0x2e, 0x73, 0x70, 0x6c, 0x69, 0x74, 0x28, 0x27, 0x5c, - 0x6e, 0x27, 0x29, 0x3b, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x2f, 0x2f, 0x20, 0x49, 0x66, 0x20, 0x74, 0x68, 0x65, 0x20, 0x74, 0x65, - 0x78, 0x74, 0x20, 0x64, 0x6f, 0x65, 0x73, 0x6e, 0x27, 0x74, 0x20, 0x65, - 0x6e, 0x64, 0x20, 0x77, 0x69, 0x74, 0x68, 0x20, 0x61, 0x20, 0x6c, 0x69, - 0x6e, 0x65, 0x20, 0x62, 0x72, 0x65, 0x61, 0x6b, 0x2c, 0x20, 0x74, 0x68, - 0x65, 0x6e, 0x20, 0x74, 0x68, 0x65, 0x20, 0x6c, 0x61, 0x73, 0x74, 0x20, - 0x6c, 0x69, 0x6e, 0x65, 0x20, 0x69, 0x73, 0x20, 0x69, 0x6e, 0x63, 0x6f, - 0x6d, 0x70, 0x6c, 0x65, 0x74, 0x65, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x2f, 0x2f, 0x20, 0x53, 0x74, 0x6f, 0x72, 0x65, 0x20, 0x69, 0x74, - 0x20, 0x69, 0x6e, 0x20, 0x6c, 0x65, 0x66, 0x74, 0x6f, 0x76, 0x65, 0x72, - 0x20, 0x74, 0x6f, 0x20, 0x62, 0x65, 0x20, 0x61, 0x64, 0x64, 0x65, 0x64, - 0x20, 0x74, 0x6f, 0x20, 0x74, 0x68, 0x65, 0x20, 0x6e, 0x65, 0x78, 0x74, - 0x20, 0x63, 0x68, 0x75, 0x6e, 0x6b, 0x20, 0x6f, 0x66, 0x20, 0x64, 0x61, - 0x74, 0x61, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x69, 0x66, 0x20, - 0x28, 0x21, 0x65, 0x6e, 0x64, 0x73, 0x57, 0x69, 0x74, 0x68, 0x4c, 0x69, - 0x6e, 0x65, 0x42, 0x72, 0x65, 0x61, 0x6b, 0x29, 0x20, 0x7b, 0x0a, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x6c, 0x65, 0x66, 0x74, 0x6f, - 0x76, 0x65, 0x72, 0x20, 0x3d, 0x20, 0x6c, 0x69, 0x6e, 0x65, 0x73, 0x2e, - 0x70, 0x6f, 0x70, 0x28, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x7d, 0x20, 0x65, 0x6c, 0x73, 0x65, 0x20, 0x7b, 0x0a, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x6c, 0x65, 0x66, 0x74, 0x6f, 0x76, - 0x65, 0x72, 0x20, 0x3d, 0x20, 0x22, 0x22, 0x3b, 0x20, 0x2f, 0x2f, 0x20, - 0x52, 0x65, 0x73, 0x65, 0x74, 0x20, 0x6c, 0x65, 0x66, 0x74, 0x6f, 0x76, - 0x65, 0x72, 0x20, 0x69, 0x66, 0x20, 0x77, 0x65, 0x20, 0x68, 0x61, 0x76, - 0x65, 0x20, 0x61, 0x20, 0x6c, 0x69, 0x6e, 0x65, 0x20, 0x62, 0x72, 0x65, - 0x61, 0x6b, 0x20, 0x61, 0x74, 0x20, 0x74, 0x68, 0x65, 0x20, 0x65, 0x6e, - 0x64, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x0a, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x2f, 0x2f, 0x20, 0x50, 0x61, 0x72, 0x73, - 0x65, 0x20, 0x61, 0x6c, 0x6c, 0x20, 0x73, 0x73, 0x65, 0x20, 0x65, 0x76, - 0x65, 0x6e, 0x74, 0x73, 0x20, 0x61, 0x6e, 0x64, 0x20, 0x61, 0x64, 0x64, - 0x20, 0x74, 0x68, 0x65, 0x6d, 0x20, 0x74, 0x6f, 0x20, 0x72, 0x65, 0x73, - 0x75, 0x6c, 0x74, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, - 0x6e, 0x73, 0x74, 0x20, 0x72, 0x65, 0x67, 0x65, 0x78, 0x20, 0x3d, 0x20, - 0x2f, 0x5e, 0x28, 0x5c, 0x53, 0x2b, 0x29, 0x3a, 0x5c, 0x73, 0x28, 0x2e, - 0x2a, 0x29, 0x24, 0x2f, 0x67, 0x6d, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x66, 0x6f, 0x72, 0x20, 0x28, 0x63, 0x6f, 0x6e, 0x73, 0x74, - 0x20, 0x6c, 0x69, 0x6e, 0x65, 0x20, 0x6f, 0x66, 0x20, 0x6c, 0x69, 0x6e, - 0x65, 0x73, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x6d, 0x61, 0x74, 0x63, - 0x68, 0x20, 0x3d, 0x20, 0x72, 0x65, 0x67, 0x65, 0x78, 0x2e, 0x65, 0x78, - 0x65, 0x63, 0x28, 0x6c, 0x69, 0x6e, 0x65, 0x29, 0x3b, 0x0a, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x69, 0x66, 0x20, 0x28, 0x6d, 0x61, - 0x74, 0x63, 0x68, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x72, 0x65, 0x73, 0x75, 0x6c, 0x74, 0x5b, - 0x6d, 0x61, 0x74, 0x63, 0x68, 0x5b, 0x31, 0x5d, 0x5d, 0x20, 0x3d, 0x20, - 0x6d, 0x61, 0x74, 0x63, 0x68, 0x5b, 0x32, 0x5d, 0x0a, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x2f, 0x2f, 0x20, 0x73, 0x69, - 0x6e, 0x63, 0x65, 0x20, 0x77, 0x65, 0x20, 0x6b, 0x6e, 0x6f, 0x77, 0x20, - 0x74, 0x68, 0x69, 0x73, 0x20, 0x69, 0x73, 0x20, 0x6c, 0x6c, 0x61, 0x6d, - 0x61, 0x2e, 0x63, 0x70, 0x70, 0x2c, 0x20, 0x6c, 0x65, 0x74, 0x27, 0x73, - 0x20, 0x6a, 0x75, 0x73, 0x74, 0x20, 0x64, 0x65, 0x63, 0x6f, 0x64, 0x65, - 0x20, 0x74, 0x68, 0x65, 0x20, 0x6a, 0x73, 0x6f, 0x6e, 0x20, 0x69, 0x6e, - 0x20, 0x64, 0x61, 0x74, 0x61, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x69, 0x66, 0x20, 0x28, 0x72, 0x65, 0x73, 0x75, - 0x6c, 0x74, 0x2e, 0x64, 0x61, 0x74, 0x61, 0x29, 0x20, 0x7b, 0x0a, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x72, - 0x65, 0x73, 0x75, 0x6c, 0x74, 0x2e, 0x64, 0x61, 0x74, 0x61, 0x20, 0x3d, - 0x20, 0x4a, 0x53, 0x4f, 0x4e, 0x2e, 0x70, 0x61, 0x72, 0x73, 0x65, 0x28, - 0x72, 0x65, 0x73, 0x75, 0x6c, 0x74, 0x2e, 0x64, 0x61, 0x74, 0x61, 0x29, + 0x72, 0x65, 0x67, 0x65, 0x78, 0x20, 0x3d, 0x20, 0x2f, 0x5e, 0x28, 0x5c, + 0x53, 0x2b, 0x29, 0x3a, 0x5c, 0x73, 0x28, 0x2e, 0x2a, 0x29, 0x24, 0x2f, + 0x67, 0x6d, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x66, 0x6f, + 0x72, 0x20, 0x28, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x6c, 0x69, 0x6e, + 0x65, 0x20, 0x6f, 0x66, 0x20, 0x6c, 0x69, 0x6e, 0x65, 0x73, 0x29, 0x20, + 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, + 0x6e, 0x73, 0x74, 0x20, 0x6d, 0x61, 0x74, 0x63, 0x68, 0x20, 0x3d, 0x20, + 0x72, 0x65, 0x67, 0x65, 0x78, 0x2e, 0x65, 0x78, 0x65, 0x63, 0x28, 0x6c, + 0x69, 0x6e, 0x65, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x69, 0x66, 0x20, 0x28, 0x6d, 0x61, 0x74, 0x63, 0x68, 0x29, + 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x72, 0x65, 0x73, 0x75, 0x6c, 0x74, 0x5b, 0x6d, 0x61, 0x74, 0x63, + 0x68, 0x5b, 0x31, 0x5d, 0x5d, 0x20, 0x3d, 0x20, 0x6d, 0x61, 0x74, 0x63, + 0x68, 0x5b, 0x32, 0x5d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x2f, 0x2f, 0x20, 0x73, 0x69, 0x6e, 0x63, 0x65, 0x20, + 0x77, 0x65, 0x20, 0x6b, 0x6e, 0x6f, 0x77, 0x20, 0x74, 0x68, 0x69, 0x73, + 0x20, 0x69, 0x73, 0x20, 0x6c, 0x6c, 0x61, 0x6d, 0x61, 0x2e, 0x63, 0x70, + 0x70, 0x2c, 0x20, 0x6c, 0x65, 0x74, 0x27, 0x73, 0x20, 0x6a, 0x75, 0x73, + 0x74, 0x20, 0x64, 0x65, 0x63, 0x6f, 0x64, 0x65, 0x20, 0x74, 0x68, 0x65, + 0x20, 0x6a, 0x73, 0x6f, 0x6e, 0x20, 0x69, 0x6e, 0x20, 0x64, 0x61, 0x74, + 0x61, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x69, 0x66, 0x20, 0x28, 0x72, 0x65, 0x73, 0x75, 0x6c, 0x74, 0x2e, 0x64, + 0x61, 0x74, 0x61, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x72, 0x65, 0x73, 0x75, 0x6c, + 0x74, 0x2e, 0x64, 0x61, 0x74, 0x61, 0x20, 0x3d, 0x20, 0x4a, 0x53, 0x4f, + 0x4e, 0x2e, 0x70, 0x61, 0x72, 0x73, 0x65, 0x28, 0x72, 0x65, 0x73, 0x75, + 0x6c, 0x74, 0x2e, 0x64, 0x61, 0x74, 0x61, 0x29, 0x3b, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, + 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x20, 0x2b, 0x3d, 0x20, 0x72, 0x65, 0x73, + 0x75, 0x6c, 0x74, 0x2e, 0x64, 0x61, 0x74, 0x61, 0x2e, 0x63, 0x6f, 0x6e, + 0x74, 0x65, 0x6e, 0x74, 0x3b, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x2f, 0x2f, 0x20, 0x79, 0x69, + 0x65, 0x6c, 0x64, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x79, 0x69, 0x65, 0x6c, 0x64, 0x20, 0x72, 0x65, + 0x73, 0x75, 0x6c, 0x74, 0x3b, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x2f, 0x2f, 0x20, 0x69, 0x66, + 0x20, 0x77, 0x65, 0x20, 0x67, 0x6f, 0x74, 0x20, 0x61, 0x20, 0x73, 0x74, + 0x6f, 0x70, 0x20, 0x74, 0x6f, 0x6b, 0x65, 0x6e, 0x20, 0x66, 0x72, 0x6f, + 0x6d, 0x20, 0x73, 0x65, 0x72, 0x76, 0x65, 0x72, 0x2c, 0x20, 0x77, 0x65, + 0x20, 0x77, 0x69, 0x6c, 0x6c, 0x20, 0x62, 0x72, 0x65, 0x61, 0x6b, 0x20, + 0x68, 0x65, 0x72, 0x65, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x69, 0x66, 0x20, 0x28, 0x72, 0x65, 0x73, + 0x75, 0x6c, 0x74, 0x2e, 0x64, 0x61, 0x74, 0x61, 0x2e, 0x73, 0x74, 0x6f, + 0x70, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x69, 0x66, 0x20, 0x28, 0x72, + 0x65, 0x73, 0x75, 0x6c, 0x74, 0x2e, 0x64, 0x61, 0x74, 0x61, 0x2e, 0x67, + 0x65, 0x6e, 0x65, 0x72, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x5f, 0x73, 0x65, + 0x74, 0x74, 0x69, 0x6e, 0x67, 0x73, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x67, 0x65, 0x6e, 0x65, 0x72, 0x61, 0x74, 0x69, 0x6f, 0x6e, + 0x5f, 0x73, 0x65, 0x74, 0x74, 0x69, 0x6e, 0x67, 0x73, 0x20, 0x3d, 0x20, + 0x72, 0x65, 0x73, 0x75, 0x6c, 0x74, 0x2e, 0x64, 0x61, 0x74, 0x61, 0x2e, + 0x67, 0x65, 0x6e, 0x65, 0x72, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x5f, 0x73, + 0x65, 0x74, 0x74, 0x69, 0x6e, 0x67, 0x73, 0x3b, 0x0a, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x74, 0x20, 0x3d, 0x20, 0x66, 0x61, + 0x6c, 0x73, 0x65, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x62, 0x72, 0x65, 0x61, 0x6b, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x20, 0x2b, 0x3d, - 0x20, 0x72, 0x65, 0x73, 0x75, 0x6c, 0x74, 0x2e, 0x64, 0x61, 0x74, 0x61, - 0x2e, 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x3b, 0x0a, 0x0a, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x2f, - 0x2f, 0x20, 0x79, 0x69, 0x65, 0x6c, 0x64, 0x0a, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x79, 0x69, 0x65, 0x6c, - 0x64, 0x20, 0x72, 0x65, 0x73, 0x75, 0x6c, 0x74, 0x3b, 0x0a, 0x0a, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x2f, - 0x2f, 0x20, 0x69, 0x66, 0x20, 0x77, 0x65, 0x20, 0x67, 0x6f, 0x74, 0x20, - 0x61, 0x20, 0x73, 0x74, 0x6f, 0x70, 0x20, 0x74, 0x6f, 0x6b, 0x65, 0x6e, - 0x20, 0x66, 0x72, 0x6f, 0x6d, 0x20, 0x73, 0x65, 0x72, 0x76, 0x65, 0x72, - 0x2c, 0x20, 0x77, 0x65, 0x20, 0x77, 0x69, 0x6c, 0x6c, 0x20, 0x62, 0x72, - 0x65, 0x61, 0x6b, 0x20, 0x68, 0x65, 0x72, 0x65, 0x0a, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x69, 0x66, 0x20, - 0x28, 0x72, 0x65, 0x73, 0x75, 0x6c, 0x74, 0x2e, 0x64, 0x61, 0x74, 0x61, - 0x2e, 0x73, 0x74, 0x6f, 0x70, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x69, - 0x66, 0x20, 0x28, 0x72, 0x65, 0x73, 0x75, 0x6c, 0x74, 0x2e, 0x64, 0x61, + 0x20, 0x20, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x69, 0x66, 0x20, 0x28, 0x72, 0x65, 0x73, 0x75, 0x6c, 0x74, + 0x2e, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x72, 0x65, + 0x73, 0x75, 0x6c, 0x74, 0x2e, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x20, 0x3d, + 0x20, 0x4a, 0x53, 0x4f, 0x4e, 0x2e, 0x70, 0x61, 0x72, 0x73, 0x65, 0x28, + 0x72, 0x65, 0x73, 0x75, 0x6c, 0x74, 0x2e, 0x65, 0x72, 0x72, 0x6f, 0x72, + 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x6f, 0x6c, 0x65, 0x2e, 0x65, + 0x72, 0x72, 0x6f, 0x72, 0x28, 0x60, 0x6c, 0x6c, 0x61, 0x6d, 0x61, 0x2e, + 0x63, 0x70, 0x70, 0x20, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x3a, 0x20, 0x24, + 0x7b, 0x72, 0x65, 0x73, 0x75, 0x6c, 0x74, 0x2e, 0x65, 0x72, 0x72, 0x6f, + 0x72, 0x2e, 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x7d, 0x60, 0x29, + 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x7d, 0x0a, 0x20, 0x20, 0x7d, 0x20, 0x63, 0x61, 0x74, 0x63, 0x68, 0x20, + 0x28, 0x65, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x69, 0x66, + 0x20, 0x28, 0x65, 0x2e, 0x6e, 0x61, 0x6d, 0x65, 0x20, 0x21, 0x3d, 0x3d, + 0x20, 0x27, 0x41, 0x62, 0x6f, 0x72, 0x74, 0x45, 0x72, 0x72, 0x6f, 0x72, + 0x27, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, + 0x6f, 0x6e, 0x73, 0x6f, 0x6c, 0x65, 0x2e, 0x65, 0x72, 0x72, 0x6f, 0x72, + 0x28, 0x22, 0x6c, 0x6c, 0x61, 0x6d, 0x61, 0x20, 0x65, 0x72, 0x72, 0x6f, + 0x72, 0x3a, 0x20, 0x22, 0x2c, 0x20, 0x65, 0x29, 0x3b, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x74, 0x68, 0x72, 0x6f, + 0x77, 0x20, 0x65, 0x3b, 0x0a, 0x20, 0x20, 0x7d, 0x0a, 0x20, 0x20, 0x66, + 0x69, 0x6e, 0x61, 0x6c, 0x6c, 0x79, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, + 0x20, 0x63, 0x6f, 0x6e, 0x74, 0x72, 0x6f, 0x6c, 0x6c, 0x65, 0x72, 0x2e, + 0x61, 0x62, 0x6f, 0x72, 0x74, 0x28, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x7d, + 0x0a, 0x0a, 0x20, 0x20, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x63, + 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x3b, 0x0a, 0x7d, 0x0a, 0x0a, 0x2f, + 0x2f, 0x20, 0x43, 0x61, 0x6c, 0x6c, 0x20, 0x6c, 0x6c, 0x61, 0x6d, 0x61, + 0x2c, 0x20, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x61, 0x6e, 0x20, + 0x65, 0x76, 0x65, 0x6e, 0x74, 0x20, 0x74, 0x61, 0x72, 0x67, 0x65, 0x74, + 0x20, 0x74, 0x68, 0x61, 0x74, 0x20, 0x79, 0x6f, 0x75, 0x20, 0x63, 0x61, + 0x6e, 0x20, 0x73, 0x75, 0x62, 0x73, 0x63, 0x72, 0x69, 0x62, 0x65, 0x20, + 0x74, 0x6f, 0x0a, 0x2f, 0x2f, 0x0a, 0x2f, 0x2f, 0x20, 0x45, 0x78, 0x61, + 0x6d, 0x70, 0x6c, 0x65, 0x3a, 0x0a, 0x2f, 0x2f, 0x0a, 0x2f, 0x2f, 0x20, + 0x20, 0x20, 0x20, 0x69, 0x6d, 0x70, 0x6f, 0x72, 0x74, 0x20, 0x7b, 0x20, + 0x6c, 0x6c, 0x61, 0x6d, 0x61, 0x45, 0x76, 0x65, 0x6e, 0x74, 0x54, 0x61, + 0x72, 0x67, 0x65, 0x74, 0x20, 0x7d, 0x20, 0x66, 0x72, 0x6f, 0x6d, 0x20, + 0x27, 0x2f, 0x63, 0x6f, 0x6d, 0x70, 0x6c, 0x65, 0x74, 0x69, 0x6f, 0x6e, + 0x2e, 0x6a, 0x73, 0x27, 0x0a, 0x2f, 0x2f, 0x0a, 0x2f, 0x2f, 0x20, 0x20, + 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x63, 0x6f, 0x6e, 0x6e, + 0x20, 0x3d, 0x20, 0x6c, 0x6c, 0x61, 0x6d, 0x61, 0x45, 0x76, 0x65, 0x6e, + 0x74, 0x54, 0x61, 0x72, 0x67, 0x65, 0x74, 0x28, 0x70, 0x72, 0x6f, 0x6d, + 0x70, 0x74, 0x29, 0x0a, 0x2f, 0x2f, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, + 0x6e, 0x6e, 0x2e, 0x61, 0x64, 0x64, 0x45, 0x76, 0x65, 0x6e, 0x74, 0x4c, + 0x69, 0x73, 0x74, 0x65, 0x6e, 0x65, 0x72, 0x28, 0x22, 0x6d, 0x65, 0x73, + 0x73, 0x61, 0x67, 0x65, 0x22, 0x2c, 0x20, 0x28, 0x63, 0x68, 0x75, 0x6e, + 0x6b, 0x29, 0x20, 0x3d, 0x3e, 0x20, 0x7b, 0x0a, 0x2f, 0x2f, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x64, 0x6f, 0x63, 0x75, 0x6d, 0x65, 0x6e, 0x74, + 0x2e, 0x77, 0x72, 0x69, 0x74, 0x65, 0x28, 0x63, 0x68, 0x75, 0x6e, 0x6b, + 0x2e, 0x64, 0x65, 0x74, 0x61, 0x69, 0x6c, 0x2e, 0x63, 0x6f, 0x6e, 0x74, + 0x65, 0x6e, 0x74, 0x29, 0x0a, 0x2f, 0x2f, 0x20, 0x20, 0x20, 0x20, 0x7d, + 0x29, 0x0a, 0x2f, 0x2f, 0x0a, 0x65, 0x78, 0x70, 0x6f, 0x72, 0x74, 0x20, + 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x6c, 0x6c, 0x61, 0x6d, 0x61, 0x45, + 0x76, 0x65, 0x6e, 0x74, 0x54, 0x61, 0x72, 0x67, 0x65, 0x74, 0x20, 0x3d, + 0x20, 0x28, 0x70, 0x72, 0x6f, 0x6d, 0x70, 0x74, 0x2c, 0x20, 0x70, 0x61, + 0x72, 0x61, 0x6d, 0x73, 0x20, 0x3d, 0x20, 0x7b, 0x7d, 0x2c, 0x20, 0x63, + 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x20, 0x3d, 0x20, 0x7b, 0x7d, 0x29, 0x20, + 0x3d, 0x3e, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, + 0x20, 0x65, 0x76, 0x65, 0x6e, 0x74, 0x54, 0x61, 0x72, 0x67, 0x65, 0x74, + 0x20, 0x3d, 0x20, 0x6e, 0x65, 0x77, 0x20, 0x45, 0x76, 0x65, 0x6e, 0x74, + 0x54, 0x61, 0x72, 0x67, 0x65, 0x74, 0x28, 0x29, 0x3b, 0x0a, 0x20, 0x20, + 0x28, 0x61, 0x73, 0x79, 0x6e, 0x63, 0x20, 0x28, 0x29, 0x20, 0x3d, 0x3e, + 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x6c, 0x65, 0x74, 0x20, 0x63, + 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x20, 0x3d, 0x20, 0x22, 0x22, 0x3b, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x66, 0x6f, 0x72, 0x20, 0x61, 0x77, 0x61, + 0x69, 0x74, 0x20, 0x28, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x63, 0x68, + 0x75, 0x6e, 0x6b, 0x20, 0x6f, 0x66, 0x20, 0x6c, 0x6c, 0x61, 0x6d, 0x61, + 0x28, 0x70, 0x72, 0x6f, 0x6d, 0x70, 0x74, 0x2c, 0x20, 0x70, 0x61, 0x72, + 0x61, 0x6d, 0x73, 0x2c, 0x20, 0x63, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x29, + 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x69, 0x66, + 0x20, 0x28, 0x63, 0x68, 0x75, 0x6e, 0x6b, 0x2e, 0x64, 0x61, 0x74, 0x61, + 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x20, 0x2b, 0x3d, 0x20, 0x63, + 0x68, 0x75, 0x6e, 0x6b, 0x2e, 0x64, 0x61, 0x74, 0x61, 0x2e, 0x63, 0x6f, + 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x65, 0x76, 0x65, 0x6e, 0x74, 0x54, 0x61, 0x72, 0x67, + 0x65, 0x74, 0x2e, 0x64, 0x69, 0x73, 0x70, 0x61, 0x74, 0x63, 0x68, 0x45, + 0x76, 0x65, 0x6e, 0x74, 0x28, 0x6e, 0x65, 0x77, 0x20, 0x43, 0x75, 0x73, + 0x74, 0x6f, 0x6d, 0x45, 0x76, 0x65, 0x6e, 0x74, 0x28, 0x22, 0x6d, 0x65, + 0x73, 0x73, 0x61, 0x67, 0x65, 0x22, 0x2c, 0x20, 0x7b, 0x20, 0x64, 0x65, + 0x74, 0x61, 0x69, 0x6c, 0x3a, 0x20, 0x63, 0x68, 0x75, 0x6e, 0x6b, 0x2e, + 0x64, 0x61, 0x74, 0x61, 0x20, 0x7d, 0x29, 0x29, 0x3b, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x69, 0x66, 0x20, 0x28, 0x63, 0x68, 0x75, 0x6e, 0x6b, 0x2e, 0x64, 0x61, 0x74, 0x61, 0x2e, 0x67, 0x65, 0x6e, 0x65, 0x72, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x5f, 0x73, 0x65, 0x74, 0x74, 0x69, 0x6e, 0x67, 0x73, 0x29, 0x20, - 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x67, 0x65, 0x6e, 0x65, 0x72, 0x61, - 0x74, 0x69, 0x6f, 0x6e, 0x5f, 0x73, 0x65, 0x74, 0x74, 0x69, 0x6e, 0x67, - 0x73, 0x20, 0x3d, 0x20, 0x72, 0x65, 0x73, 0x75, 0x6c, 0x74, 0x2e, 0x64, - 0x61, 0x74, 0x61, 0x2e, 0x67, 0x65, 0x6e, 0x65, 0x72, 0x61, 0x74, 0x69, - 0x6f, 0x6e, 0x5f, 0x73, 0x65, 0x74, 0x74, 0x69, 0x6e, 0x67, 0x73, 0x3b, - 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x74, 0x20, - 0x3d, 0x20, 0x66, 0x61, 0x6c, 0x73, 0x65, 0x3b, 0x0a, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x62, - 0x72, 0x65, 0x61, 0x6b, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x20, 0x20, 0x7d, 0x20, - 0x63, 0x61, 0x74, 0x63, 0x68, 0x20, 0x28, 0x65, 0x29, 0x20, 0x7b, 0x0a, - 0x20, 0x20, 0x20, 0x20, 0x69, 0x66, 0x20, 0x28, 0x65, 0x2e, 0x6e, 0x61, - 0x6d, 0x65, 0x20, 0x21, 0x3d, 0x3d, 0x20, 0x27, 0x41, 0x62, 0x6f, 0x72, - 0x74, 0x45, 0x72, 0x72, 0x6f, 0x72, 0x27, 0x29, 0x20, 0x7b, 0x0a, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x6f, 0x6c, 0x65, - 0x2e, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x28, 0x22, 0x6c, 0x6c, 0x61, 0x6d, - 0x61, 0x20, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x3a, 0x20, 0x22, 0x2c, 0x20, - 0x65, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x20, 0x20, - 0x20, 0x20, 0x74, 0x68, 0x72, 0x6f, 0x77, 0x20, 0x65, 0x3b, 0x0a, 0x20, - 0x20, 0x7d, 0x0a, 0x20, 0x20, 0x66, 0x69, 0x6e, 0x61, 0x6c, 0x6c, 0x79, - 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x74, 0x72, - 0x6f, 0x6c, 0x6c, 0x65, 0x72, 0x2e, 0x61, 0x62, 0x6f, 0x72, 0x74, 0x28, - 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x7d, 0x0a, 0x0a, 0x20, 0x20, 0x72, 0x65, - 0x74, 0x75, 0x72, 0x6e, 0x20, 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, - 0x3b, 0x0a, 0x7d, 0x0a, 0x0a, 0x2f, 0x2f, 0x20, 0x43, 0x61, 0x6c, 0x6c, - 0x20, 0x6c, 0x6c, 0x61, 0x6d, 0x61, 0x2c, 0x20, 0x72, 0x65, 0x74, 0x75, - 0x72, 0x6e, 0x20, 0x61, 0x6e, 0x20, 0x65, 0x76, 0x65, 0x6e, 0x74, 0x20, - 0x74, 0x61, 0x72, 0x67, 0x65, 0x74, 0x20, 0x74, 0x68, 0x61, 0x74, 0x20, - 0x79, 0x6f, 0x75, 0x20, 0x63, 0x61, 0x6e, 0x20, 0x73, 0x75, 0x62, 0x63, - 0x72, 0x69, 0x62, 0x65, 0x20, 0x74, 0x6f, 0x0a, 0x2f, 0x2f, 0x0a, 0x2f, - 0x2f, 0x20, 0x45, 0x78, 0x61, 0x6d, 0x70, 0x6c, 0x65, 0x3a, 0x0a, 0x2f, - 0x2f, 0x0a, 0x2f, 0x2f, 0x20, 0x20, 0x20, 0x20, 0x69, 0x6d, 0x70, 0x6f, - 0x72, 0x74, 0x20, 0x7b, 0x20, 0x6c, 0x6c, 0x61, 0x6d, 0x61, 0x45, 0x76, - 0x65, 0x6e, 0x74, 0x54, 0x61, 0x72, 0x67, 0x65, 0x74, 0x20, 0x7d, 0x20, - 0x66, 0x72, 0x6f, 0x6d, 0x20, 0x27, 0x2f, 0x63, 0x6f, 0x6d, 0x70, 0x6c, - 0x65, 0x74, 0x69, 0x6f, 0x6e, 0x2e, 0x6a, 0x73, 0x27, 0x0a, 0x2f, 0x2f, - 0x0a, 0x2f, 0x2f, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, - 0x20, 0x63, 0x6f, 0x6e, 0x6e, 0x20, 0x3d, 0x20, 0x6c, 0x6c, 0x61, 0x6d, - 0x61, 0x45, 0x76, 0x65, 0x6e, 0x74, 0x54, 0x61, 0x72, 0x67, 0x65, 0x74, - 0x28, 0x70, 0x72, 0x6f, 0x6d, 0x70, 0x74, 0x29, 0x0a, 0x2f, 0x2f, 0x20, - 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x6e, 0x2e, 0x61, 0x64, 0x64, 0x45, - 0x76, 0x65, 0x6e, 0x74, 0x4c, 0x69, 0x73, 0x74, 0x65, 0x6e, 0x65, 0x72, - 0x28, 0x22, 0x6d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x22, 0x2c, 0x20, - 0x28, 0x63, 0x68, 0x75, 0x6e, 0x6b, 0x29, 0x20, 0x3d, 0x3e, 0x20, 0x7b, - 0x0a, 0x2f, 0x2f, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x64, 0x6f, 0x63, - 0x75, 0x6d, 0x65, 0x6e, 0x74, 0x2e, 0x77, 0x72, 0x69, 0x74, 0x65, 0x28, - 0x63, 0x68, 0x75, 0x6e, 0x6b, 0x2e, 0x64, 0x65, 0x74, 0x61, 0x69, 0x6c, - 0x2e, 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x29, 0x0a, 0x2f, 0x2f, - 0x20, 0x20, 0x20, 0x20, 0x7d, 0x29, 0x0a, 0x2f, 0x2f, 0x0a, 0x65, 0x78, + 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x65, 0x76, + 0x65, 0x6e, 0x74, 0x54, 0x61, 0x72, 0x67, 0x65, 0x74, 0x2e, 0x64, 0x69, + 0x73, 0x70, 0x61, 0x74, 0x63, 0x68, 0x45, 0x76, 0x65, 0x6e, 0x74, 0x28, + 0x6e, 0x65, 0x77, 0x20, 0x43, 0x75, 0x73, 0x74, 0x6f, 0x6d, 0x45, 0x76, + 0x65, 0x6e, 0x74, 0x28, 0x22, 0x67, 0x65, 0x6e, 0x65, 0x72, 0x61, 0x74, + 0x69, 0x6f, 0x6e, 0x5f, 0x73, 0x65, 0x74, 0x74, 0x69, 0x6e, 0x67, 0x73, + 0x22, 0x2c, 0x20, 0x7b, 0x20, 0x64, 0x65, 0x74, 0x61, 0x69, 0x6c, 0x3a, + 0x20, 0x63, 0x68, 0x75, 0x6e, 0x6b, 0x2e, 0x64, 0x61, 0x74, 0x61, 0x2e, + 0x67, 0x65, 0x6e, 0x65, 0x72, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x5f, 0x73, + 0x65, 0x74, 0x74, 0x69, 0x6e, 0x67, 0x73, 0x20, 0x7d, 0x29, 0x29, 0x3b, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x69, 0x66, 0x20, 0x28, 0x63, 0x68, 0x75, 0x6e, 0x6b, + 0x2e, 0x64, 0x61, 0x74, 0x61, 0x2e, 0x74, 0x69, 0x6d, 0x69, 0x6e, 0x67, + 0x73, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x65, 0x76, 0x65, 0x6e, 0x74, 0x54, 0x61, 0x72, 0x67, 0x65, 0x74, + 0x2e, 0x64, 0x69, 0x73, 0x70, 0x61, 0x74, 0x63, 0x68, 0x45, 0x76, 0x65, + 0x6e, 0x74, 0x28, 0x6e, 0x65, 0x77, 0x20, 0x43, 0x75, 0x73, 0x74, 0x6f, + 0x6d, 0x45, 0x76, 0x65, 0x6e, 0x74, 0x28, 0x22, 0x74, 0x69, 0x6d, 0x69, + 0x6e, 0x67, 0x73, 0x22, 0x2c, 0x20, 0x7b, 0x20, 0x64, 0x65, 0x74, 0x61, + 0x69, 0x6c, 0x3a, 0x20, 0x63, 0x68, 0x75, 0x6e, 0x6b, 0x2e, 0x64, 0x61, + 0x74, 0x61, 0x2e, 0x74, 0x69, 0x6d, 0x69, 0x6e, 0x67, 0x73, 0x20, 0x7d, + 0x29, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x65, 0x76, + 0x65, 0x6e, 0x74, 0x54, 0x61, 0x72, 0x67, 0x65, 0x74, 0x2e, 0x64, 0x69, + 0x73, 0x70, 0x61, 0x74, 0x63, 0x68, 0x45, 0x76, 0x65, 0x6e, 0x74, 0x28, + 0x6e, 0x65, 0x77, 0x20, 0x43, 0x75, 0x73, 0x74, 0x6f, 0x6d, 0x45, 0x76, + 0x65, 0x6e, 0x74, 0x28, 0x22, 0x64, 0x6f, 0x6e, 0x65, 0x22, 0x2c, 0x20, + 0x7b, 0x20, 0x64, 0x65, 0x74, 0x61, 0x69, 0x6c, 0x3a, 0x20, 0x7b, 0x20, + 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x20, 0x7d, 0x20, 0x7d, 0x29, + 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x7d, 0x29, 0x28, 0x29, 0x3b, 0x0a, 0x20, + 0x20, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x65, 0x76, 0x65, 0x6e, + 0x74, 0x54, 0x61, 0x72, 0x67, 0x65, 0x74, 0x3b, 0x0a, 0x7d, 0x0a, 0x0a, + 0x2f, 0x2f, 0x20, 0x43, 0x61, 0x6c, 0x6c, 0x20, 0x6c, 0x6c, 0x61, 0x6d, + 0x61, 0x2c, 0x20, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x61, 0x20, + 0x70, 0x72, 0x6f, 0x6d, 0x69, 0x73, 0x65, 0x20, 0x74, 0x68, 0x61, 0x74, + 0x20, 0x72, 0x65, 0x73, 0x6f, 0x6c, 0x76, 0x65, 0x73, 0x20, 0x74, 0x6f, + 0x20, 0x74, 0x68, 0x65, 0x20, 0x63, 0x6f, 0x6d, 0x70, 0x6c, 0x65, 0x74, + 0x65, 0x64, 0x20, 0x74, 0x65, 0x78, 0x74, 0x2e, 0x20, 0x54, 0x68, 0x69, + 0x73, 0x20, 0x64, 0x6f, 0x65, 0x73, 0x20, 0x6e, 0x6f, 0x74, 0x20, 0x73, + 0x75, 0x70, 0x70, 0x6f, 0x72, 0x74, 0x20, 0x73, 0x74, 0x72, 0x65, 0x61, + 0x6d, 0x69, 0x6e, 0x67, 0x0a, 0x2f, 0x2f, 0x0a, 0x2f, 0x2f, 0x20, 0x45, + 0x78, 0x61, 0x6d, 0x70, 0x6c, 0x65, 0x3a, 0x0a, 0x2f, 0x2f, 0x0a, 0x2f, + 0x2f, 0x20, 0x20, 0x20, 0x20, 0x20, 0x6c, 0x6c, 0x61, 0x6d, 0x61, 0x50, + 0x72, 0x6f, 0x6d, 0x69, 0x73, 0x65, 0x28, 0x70, 0x72, 0x6f, 0x6d, 0x70, + 0x74, 0x29, 0x2e, 0x74, 0x68, 0x65, 0x6e, 0x28, 0x28, 0x63, 0x6f, 0x6e, + 0x74, 0x65, 0x6e, 0x74, 0x29, 0x20, 0x3d, 0x3e, 0x20, 0x7b, 0x0a, 0x2f, + 0x2f, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x64, 0x6f, 0x63, 0x75, + 0x6d, 0x65, 0x6e, 0x74, 0x2e, 0x77, 0x72, 0x69, 0x74, 0x65, 0x28, 0x63, + 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x29, 0x0a, 0x2f, 0x2f, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x7d, 0x29, 0x0a, 0x2f, 0x2f, 0x0a, 0x2f, 0x2f, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x6f, 0x72, 0x0a, 0x2f, 0x2f, 0x0a, 0x2f, 0x2f, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x63, + 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x20, 0x3d, 0x20, 0x61, 0x77, 0x61, + 0x69, 0x74, 0x20, 0x6c, 0x6c, 0x61, 0x6d, 0x61, 0x50, 0x72, 0x6f, 0x6d, + 0x69, 0x73, 0x65, 0x28, 0x70, 0x72, 0x6f, 0x6d, 0x70, 0x74, 0x29, 0x0a, + 0x2f, 0x2f, 0x20, 0x20, 0x20, 0x20, 0x20, 0x64, 0x6f, 0x63, 0x75, 0x6d, + 0x65, 0x6e, 0x74, 0x2e, 0x77, 0x72, 0x69, 0x74, 0x65, 0x28, 0x63, 0x6f, + 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x29, 0x0a, 0x2f, 0x2f, 0x0a, 0x65, 0x78, 0x70, 0x6f, 0x72, 0x74, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x6c, - 0x6c, 0x61, 0x6d, 0x61, 0x45, 0x76, 0x65, 0x6e, 0x74, 0x54, 0x61, 0x72, - 0x67, 0x65, 0x74, 0x20, 0x3d, 0x20, 0x28, 0x70, 0x72, 0x6f, 0x6d, 0x70, - 0x74, 0x2c, 0x20, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x20, 0x3d, 0x20, - 0x7b, 0x7d, 0x2c, 0x20, 0x63, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x20, 0x3d, - 0x20, 0x7b, 0x7d, 0x29, 0x20, 0x3d, 0x3e, 0x20, 0x7b, 0x0a, 0x20, 0x20, - 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x65, 0x76, 0x65, 0x6e, 0x74, 0x54, - 0x61, 0x72, 0x67, 0x65, 0x74, 0x20, 0x3d, 0x20, 0x6e, 0x65, 0x77, 0x20, - 0x45, 0x76, 0x65, 0x6e, 0x74, 0x54, 0x61, 0x72, 0x67, 0x65, 0x74, 0x28, - 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x28, 0x61, 0x73, 0x79, 0x6e, 0x63, 0x20, - 0x28, 0x29, 0x20, 0x3d, 0x3e, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x6c, 0x61, 0x6d, 0x61, 0x50, 0x72, 0x6f, 0x6d, 0x69, 0x73, 0x65, 0x20, + 0x3d, 0x20, 0x28, 0x70, 0x72, 0x6f, 0x6d, 0x70, 0x74, 0x2c, 0x20, 0x70, + 0x61, 0x72, 0x61, 0x6d, 0x73, 0x20, 0x3d, 0x20, 0x7b, 0x7d, 0x2c, 0x20, + 0x63, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x20, 0x3d, 0x20, 0x7b, 0x7d, 0x29, + 0x20, 0x3d, 0x3e, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x72, 0x65, 0x74, 0x75, + 0x72, 0x6e, 0x20, 0x6e, 0x65, 0x77, 0x20, 0x50, 0x72, 0x6f, 0x6d, 0x69, + 0x73, 0x65, 0x28, 0x61, 0x73, 0x79, 0x6e, 0x63, 0x20, 0x28, 0x72, 0x65, + 0x73, 0x6f, 0x6c, 0x76, 0x65, 0x2c, 0x20, 0x72, 0x65, 0x6a, 0x65, 0x63, + 0x74, 0x29, 0x20, 0x3d, 0x3e, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x6c, 0x65, 0x74, 0x20, 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x20, - 0x3d, 0x20, 0x22, 0x22, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x66, 0x6f, + 0x3d, 0x20, 0x22, 0x22, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x74, 0x72, + 0x79, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x66, 0x6f, 0x72, 0x20, 0x61, 0x77, 0x61, 0x69, 0x74, 0x20, 0x28, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x63, 0x68, 0x75, 0x6e, 0x6b, 0x20, 0x6f, 0x66, 0x20, 0x6c, 0x6c, 0x61, 0x6d, 0x61, 0x28, 0x70, 0x72, 0x6f, 0x6d, 0x70, 0x74, 0x2c, 0x20, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x2c, 0x20, 0x63, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x29, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x69, 0x66, 0x20, 0x28, 0x63, 0x68, 0x75, 0x6e, 0x6b, - 0x2e, 0x64, 0x61, 0x74, 0x61, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x20, 0x2b, 0x3d, 0x20, 0x63, 0x68, 0x75, 0x6e, 0x6b, 0x2e, 0x64, 0x61, 0x74, 0x61, 0x2e, 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x3b, 0x0a, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x65, 0x76, 0x65, 0x6e, - 0x74, 0x54, 0x61, 0x72, 0x67, 0x65, 0x74, 0x2e, 0x64, 0x69, 0x73, 0x70, - 0x61, 0x74, 0x63, 0x68, 0x45, 0x76, 0x65, 0x6e, 0x74, 0x28, 0x6e, 0x65, - 0x77, 0x20, 0x43, 0x75, 0x73, 0x74, 0x6f, 0x6d, 0x45, 0x76, 0x65, 0x6e, - 0x74, 0x28, 0x22, 0x6d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x22, 0x2c, - 0x20, 0x7b, 0x20, 0x64, 0x65, 0x74, 0x61, 0x69, 0x6c, 0x3a, 0x20, 0x63, - 0x68, 0x75, 0x6e, 0x6b, 0x2e, 0x64, 0x61, 0x74, 0x61, 0x20, 0x7d, 0x29, - 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x69, 0x66, 0x20, 0x28, 0x63, 0x68, 0x75, - 0x6e, 0x6b, 0x2e, 0x64, 0x61, 0x74, 0x61, 0x2e, 0x67, 0x65, 0x6e, 0x65, - 0x72, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x5f, 0x73, 0x65, 0x74, 0x74, 0x69, - 0x6e, 0x67, 0x73, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x65, 0x76, 0x65, 0x6e, 0x74, 0x54, 0x61, 0x72, 0x67, - 0x65, 0x74, 0x2e, 0x64, 0x69, 0x73, 0x70, 0x61, 0x74, 0x63, 0x68, 0x45, - 0x76, 0x65, 0x6e, 0x74, 0x28, 0x6e, 0x65, 0x77, 0x20, 0x43, 0x75, 0x73, - 0x74, 0x6f, 0x6d, 0x45, 0x76, 0x65, 0x6e, 0x74, 0x28, 0x22, 0x67, 0x65, - 0x6e, 0x65, 0x72, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x5f, 0x73, 0x65, 0x74, - 0x74, 0x69, 0x6e, 0x67, 0x73, 0x22, 0x2c, 0x20, 0x7b, 0x20, 0x64, 0x65, - 0x74, 0x61, 0x69, 0x6c, 0x3a, 0x20, 0x63, 0x68, 0x75, 0x6e, 0x6b, 0x2e, - 0x64, 0x61, 0x74, 0x61, 0x2e, 0x67, 0x65, 0x6e, 0x65, 0x72, 0x61, 0x74, - 0x69, 0x6f, 0x6e, 0x5f, 0x73, 0x65, 0x74, 0x74, 0x69, 0x6e, 0x67, 0x73, - 0x20, 0x7d, 0x29, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x69, 0x66, 0x20, 0x28, - 0x63, 0x68, 0x75, 0x6e, 0x6b, 0x2e, 0x64, 0x61, 0x74, 0x61, 0x2e, 0x74, - 0x69, 0x6d, 0x69, 0x6e, 0x67, 0x73, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x65, 0x76, 0x65, 0x6e, 0x74, 0x54, - 0x61, 0x72, 0x67, 0x65, 0x74, 0x2e, 0x64, 0x69, 0x73, 0x70, 0x61, 0x74, - 0x63, 0x68, 0x45, 0x76, 0x65, 0x6e, 0x74, 0x28, 0x6e, 0x65, 0x77, 0x20, - 0x43, 0x75, 0x73, 0x74, 0x6f, 0x6d, 0x45, 0x76, 0x65, 0x6e, 0x74, 0x28, - 0x22, 0x74, 0x69, 0x6d, 0x69, 0x6e, 0x67, 0x73, 0x22, 0x2c, 0x20, 0x7b, - 0x20, 0x64, 0x65, 0x74, 0x61, 0x69, 0x6c, 0x3a, 0x20, 0x63, 0x68, 0x75, - 0x6e, 0x6b, 0x2e, 0x64, 0x61, 0x74, 0x61, 0x2e, 0x74, 0x69, 0x6d, 0x69, - 0x6e, 0x67, 0x73, 0x20, 0x7d, 0x29, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x20, - 0x20, 0x20, 0x20, 0x65, 0x76, 0x65, 0x6e, 0x74, 0x54, 0x61, 0x72, 0x67, - 0x65, 0x74, 0x2e, 0x64, 0x69, 0x73, 0x70, 0x61, 0x74, 0x63, 0x68, 0x45, - 0x76, 0x65, 0x6e, 0x74, 0x28, 0x6e, 0x65, 0x77, 0x20, 0x43, 0x75, 0x73, - 0x74, 0x6f, 0x6d, 0x45, 0x76, 0x65, 0x6e, 0x74, 0x28, 0x22, 0x64, 0x6f, - 0x6e, 0x65, 0x22, 0x2c, 0x20, 0x7b, 0x20, 0x64, 0x65, 0x74, 0x61, 0x69, - 0x6c, 0x3a, 0x20, 0x7b, 0x20, 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, - 0x20, 0x7d, 0x20, 0x7d, 0x29, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x7d, 0x29, - 0x28, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, - 0x20, 0x65, 0x76, 0x65, 0x6e, 0x74, 0x54, 0x61, 0x72, 0x67, 0x65, 0x74, - 0x3b, 0x0a, 0x7d, 0x0a, 0x0a, 0x2f, 0x2f, 0x20, 0x43, 0x61, 0x6c, 0x6c, - 0x20, 0x6c, 0x6c, 0x61, 0x6d, 0x61, 0x2c, 0x20, 0x72, 0x65, 0x74, 0x75, - 0x72, 0x6e, 0x20, 0x61, 0x20, 0x70, 0x72, 0x6f, 0x6d, 0x69, 0x73, 0x65, - 0x20, 0x74, 0x68, 0x61, 0x74, 0x20, 0x72, 0x65, 0x73, 0x6f, 0x6c, 0x76, - 0x65, 0x73, 0x20, 0x74, 0x6f, 0x20, 0x74, 0x68, 0x65, 0x20, 0x63, 0x6f, - 0x6d, 0x70, 0x6c, 0x65, 0x74, 0x65, 0x64, 0x20, 0x74, 0x65, 0x78, 0x74, - 0x2e, 0x20, 0x54, 0x68, 0x69, 0x73, 0x20, 0x64, 0x6f, 0x65, 0x73, 0x20, - 0x6e, 0x6f, 0x74, 0x20, 0x73, 0x75, 0x70, 0x70, 0x6f, 0x72, 0x74, 0x20, - 0x73, 0x74, 0x72, 0x65, 0x61, 0x6d, 0x69, 0x6e, 0x67, 0x0a, 0x2f, 0x2f, - 0x0a, 0x2f, 0x2f, 0x20, 0x45, 0x78, 0x61, 0x6d, 0x70, 0x6c, 0x65, 0x3a, - 0x0a, 0x2f, 0x2f, 0x0a, 0x2f, 0x2f, 0x20, 0x20, 0x20, 0x20, 0x20, 0x6c, - 0x6c, 0x61, 0x6d, 0x61, 0x50, 0x72, 0x6f, 0x6d, 0x69, 0x73, 0x65, 0x28, - 0x70, 0x72, 0x6f, 0x6d, 0x70, 0x74, 0x29, 0x2e, 0x74, 0x68, 0x65, 0x6e, - 0x28, 0x28, 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x29, 0x20, 0x3d, - 0x3e, 0x20, 0x7b, 0x0a, 0x2f, 0x2f, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x64, 0x6f, 0x63, 0x75, 0x6d, 0x65, 0x6e, 0x74, 0x2e, 0x77, 0x72, - 0x69, 0x74, 0x65, 0x28, 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x29, - 0x0a, 0x2f, 0x2f, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x29, 0x0a, 0x2f, - 0x2f, 0x0a, 0x2f, 0x2f, 0x20, 0x20, 0x20, 0x20, 0x20, 0x6f, 0x72, 0x0a, - 0x2f, 0x2f, 0x0a, 0x2f, 0x2f, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, - 0x6e, 0x73, 0x74, 0x20, 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x20, - 0x3d, 0x20, 0x61, 0x77, 0x61, 0x69, 0x74, 0x20, 0x6c, 0x6c, 0x61, 0x6d, - 0x61, 0x50, 0x72, 0x6f, 0x6d, 0x69, 0x73, 0x65, 0x28, 0x70, 0x72, 0x6f, - 0x6d, 0x70, 0x74, 0x29, 0x0a, 0x2f, 0x2f, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x64, 0x6f, 0x63, 0x75, 0x6d, 0x65, 0x6e, 0x74, 0x2e, 0x77, 0x72, 0x69, - 0x74, 0x65, 0x28, 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x29, 0x0a, - 0x2f, 0x2f, 0x0a, 0x65, 0x78, 0x70, 0x6f, 0x72, 0x74, 0x20, 0x63, 0x6f, - 0x6e, 0x73, 0x74, 0x20, 0x6c, 0x6c, 0x61, 0x6d, 0x61, 0x50, 0x72, 0x6f, - 0x6d, 0x69, 0x73, 0x65, 0x20, 0x3d, 0x20, 0x28, 0x70, 0x72, 0x6f, 0x6d, - 0x70, 0x74, 0x2c, 0x20, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x20, 0x3d, - 0x20, 0x7b, 0x7d, 0x2c, 0x20, 0x63, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x20, - 0x3d, 0x20, 0x7b, 0x7d, 0x29, 0x20, 0x3d, 0x3e, 0x20, 0x7b, 0x0a, 0x20, - 0x20, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x6e, 0x65, 0x77, 0x20, - 0x50, 0x72, 0x6f, 0x6d, 0x69, 0x73, 0x65, 0x28, 0x61, 0x73, 0x79, 0x6e, - 0x63, 0x20, 0x28, 0x72, 0x65, 0x73, 0x6f, 0x6c, 0x76, 0x65, 0x2c, 0x20, - 0x72, 0x65, 0x6a, 0x65, 0x63, 0x74, 0x29, 0x20, 0x3d, 0x3e, 0x20, 0x7b, - 0x0a, 0x20, 0x20, 0x20, 0x20, 0x6c, 0x65, 0x74, 0x20, 0x63, 0x6f, 0x6e, - 0x74, 0x65, 0x6e, 0x74, 0x20, 0x3d, 0x20, 0x22, 0x22, 0x3b, 0x0a, 0x20, - 0x20, 0x20, 0x20, 0x74, 0x72, 0x79, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x66, 0x6f, 0x72, 0x20, 0x61, 0x77, 0x61, 0x69, 0x74, - 0x20, 0x28, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x63, 0x68, 0x75, 0x6e, - 0x6b, 0x20, 0x6f, 0x66, 0x20, 0x6c, 0x6c, 0x61, 0x6d, 0x61, 0x28, 0x70, - 0x72, 0x6f, 0x6d, 0x70, 0x74, 0x2c, 0x20, 0x70, 0x61, 0x72, 0x61, 0x6d, - 0x73, 0x2c, 0x20, 0x63, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x29, 0x29, 0x20, - 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, - 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x20, 0x2b, 0x3d, 0x20, 0x63, 0x68, 0x75, - 0x6e, 0x6b, 0x2e, 0x64, 0x61, 0x74, 0x61, 0x2e, 0x63, 0x6f, 0x6e, 0x74, - 0x65, 0x6e, 0x74, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, - 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x72, 0x65, 0x73, 0x6f, 0x6c, - 0x76, 0x65, 0x28, 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x29, 0x3b, - 0x0a, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x20, 0x63, 0x61, 0x74, 0x63, 0x68, - 0x20, 0x28, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x29, 0x20, 0x7b, 0x0a, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x72, 0x65, 0x6a, 0x65, 0x63, 0x74, 0x28, - 0x65, 0x72, 0x72, 0x6f, 0x72, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, - 0x7d, 0x0a, 0x20, 0x20, 0x7d, 0x29, 0x3b, 0x0a, 0x7d, 0x3b, 0x0a, 0x0a, - 0x2f, 0x2a, 0x2a, 0x0a, 0x20, 0x2a, 0x20, 0x28, 0x64, 0x65, 0x70, 0x72, - 0x65, 0x63, 0x61, 0x74, 0x65, 0x64, 0x29, 0x0a, 0x20, 0x2a, 0x2f, 0x0a, - 0x65, 0x78, 0x70, 0x6f, 0x72, 0x74, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, - 0x20, 0x6c, 0x6c, 0x61, 0x6d, 0x61, 0x43, 0x6f, 0x6d, 0x70, 0x6c, 0x65, - 0x74, 0x65, 0x20, 0x3d, 0x20, 0x61, 0x73, 0x79, 0x6e, 0x63, 0x20, 0x28, - 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x2c, 0x20, 0x63, 0x6f, 0x6e, 0x74, - 0x72, 0x6f, 0x6c, 0x6c, 0x65, 0x72, 0x2c, 0x20, 0x63, 0x61, 0x6c, 0x6c, - 0x62, 0x61, 0x63, 0x6b, 0x29, 0x20, 0x3d, 0x3e, 0x20, 0x7b, 0x0a, 0x20, - 0x20, 0x66, 0x6f, 0x72, 0x20, 0x61, 0x77, 0x61, 0x69, 0x74, 0x20, 0x28, - 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x63, 0x68, 0x75, 0x6e, 0x6b, 0x20, - 0x6f, 0x66, 0x20, 0x6c, 0x6c, 0x61, 0x6d, 0x61, 0x28, 0x70, 0x61, 0x72, - 0x61, 0x6d, 0x73, 0x2e, 0x70, 0x72, 0x6f, 0x6d, 0x70, 0x74, 0x2c, 0x20, - 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x2c, 0x20, 0x7b, 0x20, 0x63, 0x6f, - 0x6e, 0x74, 0x72, 0x6f, 0x6c, 0x6c, 0x65, 0x72, 0x20, 0x7d, 0x29, 0x29, - 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x63, 0x61, 0x6c, 0x6c, 0x62, - 0x61, 0x63, 0x6b, 0x28, 0x63, 0x68, 0x75, 0x6e, 0x6b, 0x29, 0x3b, 0x0a, - 0x20, 0x20, 0x7d, 0x0a, 0x7d, 0x0a, 0x0a, 0x2f, 0x2f, 0x20, 0x47, 0x65, - 0x74, 0x20, 0x74, 0x68, 0x65, 0x20, 0x6d, 0x6f, 0x64, 0x65, 0x6c, 0x20, - 0x69, 0x6e, 0x66, 0x6f, 0x20, 0x66, 0x72, 0x6f, 0x6d, 0x20, 0x74, 0x68, - 0x65, 0x20, 0x73, 0x65, 0x72, 0x76, 0x65, 0x72, 0x2e, 0x20, 0x54, 0x68, - 0x69, 0x73, 0x20, 0x69, 0x73, 0x20, 0x75, 0x73, 0x65, 0x66, 0x75, 0x6c, - 0x20, 0x66, 0x6f, 0x72, 0x20, 0x67, 0x65, 0x74, 0x74, 0x69, 0x6e, 0x67, - 0x20, 0x74, 0x68, 0x65, 0x20, 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x78, 0x74, - 0x20, 0x77, 0x69, 0x6e, 0x64, 0x6f, 0x77, 0x20, 0x61, 0x6e, 0x64, 0x20, - 0x73, 0x6f, 0x20, 0x6f, 0x6e, 0x2e, 0x0a, 0x65, 0x78, 0x70, 0x6f, 0x72, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x72, 0x65, 0x73, 0x6f, 0x6c, 0x76, 0x65, 0x28, 0x63, 0x6f, + 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x7d, 0x20, 0x63, 0x61, 0x74, 0x63, 0x68, 0x20, 0x28, 0x65, 0x72, 0x72, + 0x6f, 0x72, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x72, 0x65, 0x6a, 0x65, 0x63, 0x74, 0x28, 0x65, 0x72, 0x72, 0x6f, 0x72, + 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x20, 0x20, 0x7d, + 0x29, 0x3b, 0x0a, 0x7d, 0x3b, 0x0a, 0x0a, 0x2f, 0x2a, 0x2a, 0x0a, 0x20, + 0x2a, 0x20, 0x28, 0x64, 0x65, 0x70, 0x72, 0x65, 0x63, 0x61, 0x74, 0x65, + 0x64, 0x29, 0x0a, 0x20, 0x2a, 0x2f, 0x0a, 0x65, 0x78, 0x70, 0x6f, 0x72, 0x74, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x6c, 0x6c, 0x61, 0x6d, - 0x61, 0x4d, 0x6f, 0x64, 0x65, 0x6c, 0x49, 0x6e, 0x66, 0x6f, 0x20, 0x3d, - 0x20, 0x61, 0x73, 0x79, 0x6e, 0x63, 0x20, 0x28, 0x29, 0x20, 0x3d, 0x3e, - 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x69, 0x66, 0x20, 0x28, 0x21, 0x67, 0x65, - 0x6e, 0x65, 0x72, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x5f, 0x73, 0x65, 0x74, - 0x74, 0x69, 0x6e, 0x67, 0x73, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, - 0x20, 0x67, 0x65, 0x6e, 0x65, 0x72, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x5f, - 0x73, 0x65, 0x74, 0x74, 0x69, 0x6e, 0x67, 0x73, 0x20, 0x3d, 0x20, 0x61, - 0x77, 0x61, 0x69, 0x74, 0x20, 0x66, 0x65, 0x74, 0x63, 0x68, 0x28, 0x22, - 0x2f, 0x6d, 0x6f, 0x64, 0x65, 0x6c, 0x2e, 0x6a, 0x73, 0x6f, 0x6e, 0x22, - 0x29, 0x2e, 0x74, 0x68, 0x65, 0x6e, 0x28, 0x72, 0x20, 0x3d, 0x3e, 0x20, - 0x72, 0x2e, 0x6a, 0x73, 0x6f, 0x6e, 0x28, 0x29, 0x29, 0x3b, 0x0a, 0x20, - 0x20, 0x7d, 0x0a, 0x20, 0x20, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, - 0x67, 0x65, 0x6e, 0x65, 0x72, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x5f, 0x73, - 0x65, 0x74, 0x74, 0x69, 0x6e, 0x67, 0x73, 0x3b, 0x0a, 0x7d, 0x0a + 0x61, 0x43, 0x6f, 0x6d, 0x70, 0x6c, 0x65, 0x74, 0x65, 0x20, 0x3d, 0x20, + 0x61, 0x73, 0x79, 0x6e, 0x63, 0x20, 0x28, 0x70, 0x61, 0x72, 0x61, 0x6d, + 0x73, 0x2c, 0x20, 0x63, 0x6f, 0x6e, 0x74, 0x72, 0x6f, 0x6c, 0x6c, 0x65, + 0x72, 0x2c, 0x20, 0x63, 0x61, 0x6c, 0x6c, 0x62, 0x61, 0x63, 0x6b, 0x29, + 0x20, 0x3d, 0x3e, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x66, 0x6f, 0x72, 0x20, + 0x61, 0x77, 0x61, 0x69, 0x74, 0x20, 0x28, 0x63, 0x6f, 0x6e, 0x73, 0x74, + 0x20, 0x63, 0x68, 0x75, 0x6e, 0x6b, 0x20, 0x6f, 0x66, 0x20, 0x6c, 0x6c, + 0x61, 0x6d, 0x61, 0x28, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x2e, 0x70, + 0x72, 0x6f, 0x6d, 0x70, 0x74, 0x2c, 0x20, 0x70, 0x61, 0x72, 0x61, 0x6d, + 0x73, 0x2c, 0x20, 0x7b, 0x20, 0x63, 0x6f, 0x6e, 0x74, 0x72, 0x6f, 0x6c, + 0x6c, 0x65, 0x72, 0x20, 0x7d, 0x29, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x63, 0x61, 0x6c, 0x6c, 0x62, 0x61, 0x63, 0x6b, 0x28, 0x63, + 0x68, 0x75, 0x6e, 0x6b, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x7d, 0x0a, 0x7d, + 0x0a, 0x0a, 0x2f, 0x2f, 0x20, 0x47, 0x65, 0x74, 0x20, 0x74, 0x68, 0x65, + 0x20, 0x6d, 0x6f, 0x64, 0x65, 0x6c, 0x20, 0x69, 0x6e, 0x66, 0x6f, 0x20, + 0x66, 0x72, 0x6f, 0x6d, 0x20, 0x74, 0x68, 0x65, 0x20, 0x73, 0x65, 0x72, + 0x76, 0x65, 0x72, 0x2e, 0x20, 0x54, 0x68, 0x69, 0x73, 0x20, 0x69, 0x73, + 0x20, 0x75, 0x73, 0x65, 0x66, 0x75, 0x6c, 0x20, 0x66, 0x6f, 0x72, 0x20, + 0x67, 0x65, 0x74, 0x74, 0x69, 0x6e, 0x67, 0x20, 0x74, 0x68, 0x65, 0x20, + 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x78, 0x74, 0x20, 0x77, 0x69, 0x6e, 0x64, + 0x6f, 0x77, 0x20, 0x61, 0x6e, 0x64, 0x20, 0x73, 0x6f, 0x20, 0x6f, 0x6e, + 0x2e, 0x0a, 0x65, 0x78, 0x70, 0x6f, 0x72, 0x74, 0x20, 0x63, 0x6f, 0x6e, + 0x73, 0x74, 0x20, 0x6c, 0x6c, 0x61, 0x6d, 0x61, 0x4d, 0x6f, 0x64, 0x65, + 0x6c, 0x49, 0x6e, 0x66, 0x6f, 0x20, 0x3d, 0x20, 0x61, 0x73, 0x79, 0x6e, + 0x63, 0x20, 0x28, 0x29, 0x20, 0x3d, 0x3e, 0x20, 0x7b, 0x0a, 0x20, 0x20, + 0x69, 0x66, 0x20, 0x28, 0x21, 0x67, 0x65, 0x6e, 0x65, 0x72, 0x61, 0x74, + 0x69, 0x6f, 0x6e, 0x5f, 0x73, 0x65, 0x74, 0x74, 0x69, 0x6e, 0x67, 0x73, + 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x67, 0x65, 0x6e, 0x65, + 0x72, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x5f, 0x73, 0x65, 0x74, 0x74, 0x69, + 0x6e, 0x67, 0x73, 0x20, 0x3d, 0x20, 0x61, 0x77, 0x61, 0x69, 0x74, 0x20, + 0x66, 0x65, 0x74, 0x63, 0x68, 0x28, 0x22, 0x2f, 0x6d, 0x6f, 0x64, 0x65, + 0x6c, 0x2e, 0x6a, 0x73, 0x6f, 0x6e, 0x22, 0x29, 0x2e, 0x74, 0x68, 0x65, + 0x6e, 0x28, 0x72, 0x20, 0x3d, 0x3e, 0x20, 0x72, 0x2e, 0x6a, 0x73, 0x6f, + 0x6e, 0x28, 0x29, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x7d, 0x0a, 0x20, 0x20, + 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x67, 0x65, 0x6e, 0x65, 0x72, + 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x5f, 0x73, 0x65, 0x74, 0x74, 0x69, 0x6e, + 0x67, 0x73, 0x3b, 0x0a, 0x7d, 0x0a }; -unsigned int completion_js_len = 5099; +unsigned int completion_js_len = 5346; diff --git a/examples/server/index.html.hpp b/examples/server/index.html.hpp index f22b77e7f..20551520e 100644 --- a/examples/server/index.html.hpp +++ b/examples/server/index.html.hpp @@ -383,2380 +383,2409 @@ unsigned char index_html[] = { 0x20, 0x30, 0x20, 0x74, 0x6f, 0x20, 0x75, 0x73, 0x65, 0x20, 0x76, 0x6f, 0x63, 0x61, 0x62, 0x20, 0x73, 0x69, 0x7a, 0x65, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x74, 0x6f, 0x70, 0x5f, 0x70, 0x3a, 0x20, 0x30, 0x2e, - 0x35, 0x2c, 0x20, 0x2f, 0x2f, 0x20, 0x31, 0x2e, 0x30, 0x20, 0x3d, 0x20, + 0x39, 0x35, 0x2c, 0x20, 0x2f, 0x2f, 0x20, 0x31, 0x2e, 0x30, 0x20, 0x3d, + 0x20, 0x64, 0x69, 0x73, 0x61, 0x62, 0x6c, 0x65, 0x64, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x6d, 0x69, 0x6e, 0x5f, 0x70, 0x3a, 0x20, 0x30, + 0x2e, 0x30, 0x35, 0x2c, 0x20, 0x2f, 0x2f, 0x20, 0x30, 0x20, 0x3d, 0x20, 0x64, 0x69, 0x73, 0x61, 0x62, 0x6c, 0x65, 0x64, 0x0a, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x6d, 0x69, 0x6e, 0x5f, 0x70, 0x3a, 0x20, 0x30, 0x2e, - 0x30, 0x35, 0x2c, 0x20, 0x2f, 0x2f, 0x20, 0x30, 0x20, 0x3d, 0x20, 0x64, - 0x69, 0x73, 0x61, 0x62, 0x6c, 0x65, 0x64, 0x0a, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x74, 0x66, 0x73, 0x5f, 0x7a, 0x3a, 0x20, 0x31, 0x2e, 0x30, - 0x2c, 0x20, 0x2f, 0x2f, 0x20, 0x31, 0x2e, 0x30, 0x20, 0x3d, 0x20, 0x64, - 0x69, 0x73, 0x61, 0x62, 0x6c, 0x65, 0x64, 0x0a, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x74, 0x79, 0x70, 0x69, 0x63, 0x61, 0x6c, 0x5f, 0x70, 0x3a, - 0x20, 0x31, 0x2e, 0x30, 0x2c, 0x20, 0x2f, 0x2f, 0x20, 0x31, 0x2e, 0x30, + 0x20, 0x20, 0x20, 0x74, 0x66, 0x73, 0x5f, 0x7a, 0x3a, 0x20, 0x31, 0x2e, + 0x30, 0x2c, 0x20, 0x2f, 0x2f, 0x20, 0x31, 0x2e, 0x30, 0x20, 0x3d, 0x20, + 0x64, 0x69, 0x73, 0x61, 0x62, 0x6c, 0x65, 0x64, 0x0a, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x74, 0x79, 0x70, 0x69, 0x63, 0x61, 0x6c, 0x5f, 0x70, + 0x3a, 0x20, 0x31, 0x2e, 0x30, 0x2c, 0x20, 0x2f, 0x2f, 0x20, 0x31, 0x2e, + 0x30, 0x20, 0x3d, 0x20, 0x64, 0x69, 0x73, 0x61, 0x62, 0x6c, 0x65, 0x64, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x70, 0x72, 0x65, 0x73, 0x65, + 0x6e, 0x63, 0x65, 0x5f, 0x70, 0x65, 0x6e, 0x61, 0x6c, 0x74, 0x79, 0x3a, + 0x20, 0x30, 0x2e, 0x30, 0x2c, 0x20, 0x2f, 0x2f, 0x20, 0x30, 0x2e, 0x30, 0x20, 0x3d, 0x20, 0x64, 0x69, 0x73, 0x61, 0x62, 0x6c, 0x65, 0x64, 0x0a, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x70, 0x72, 0x65, 0x73, 0x65, 0x6e, - 0x63, 0x65, 0x5f, 0x70, 0x65, 0x6e, 0x61, 0x6c, 0x74, 0x79, 0x3a, 0x20, - 0x30, 0x2e, 0x30, 0x2c, 0x20, 0x2f, 0x2f, 0x20, 0x30, 0x2e, 0x30, 0x20, - 0x3d, 0x20, 0x64, 0x69, 0x73, 0x61, 0x62, 0x6c, 0x65, 0x64, 0x0a, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x66, 0x72, 0x65, 0x71, 0x75, 0x65, 0x6e, - 0x63, 0x79, 0x5f, 0x70, 0x65, 0x6e, 0x61, 0x6c, 0x74, 0x79, 0x3a, 0x20, - 0x30, 0x2e, 0x30, 0x2c, 0x20, 0x2f, 0x2f, 0x20, 0x30, 0x2e, 0x30, 0x20, - 0x3d, 0x20, 0x64, 0x69, 0x73, 0x61, 0x62, 0x6c, 0x65, 0x64, 0x0a, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x6d, 0x69, 0x72, 0x6f, 0x73, 0x74, 0x61, - 0x74, 0x3a, 0x20, 0x30, 0x2c, 0x20, 0x2f, 0x2f, 0x20, 0x30, 0x2f, 0x31, - 0x2f, 0x32, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x6d, 0x69, 0x72, - 0x6f, 0x73, 0x74, 0x61, 0x74, 0x5f, 0x74, 0x61, 0x75, 0x3a, 0x20, 0x35, - 0x2c, 0x20, 0x2f, 0x2f, 0x20, 0x74, 0x61, 0x72, 0x67, 0x65, 0x74, 0x20, - 0x65, 0x6e, 0x74, 0x72, 0x6f, 0x70, 0x79, 0x0a, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x6d, 0x69, 0x72, 0x6f, 0x73, 0x74, 0x61, 0x74, 0x5f, 0x65, - 0x74, 0x61, 0x3a, 0x20, 0x30, 0x2e, 0x31, 0x2c, 0x20, 0x2f, 0x2f, 0x20, - 0x6c, 0x65, 0x61, 0x72, 0x6e, 0x69, 0x6e, 0x67, 0x20, 0x72, 0x61, 0x74, - 0x65, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x67, 0x72, 0x61, 0x6d, - 0x6d, 0x61, 0x72, 0x3a, 0x20, 0x27, 0x27, 0x2c, 0x0a, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x6e, 0x5f, 0x70, 0x72, 0x6f, 0x62, 0x73, 0x3a, 0x20, - 0x30, 0x2c, 0x20, 0x2f, 0x2f, 0x20, 0x6e, 0x6f, 0x20, 0x63, 0x6f, 0x6d, - 0x70, 0x6c, 0x65, 0x74, 0x69, 0x6f, 0x6e, 0x5f, 0x70, 0x72, 0x6f, 0x62, - 0x61, 0x62, 0x69, 0x6c, 0x69, 0x74, 0x69, 0x65, 0x73, 0x2c, 0x0a, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x69, 0x6d, 0x61, 0x67, 0x65, 0x5f, 0x64, - 0x61, 0x74, 0x61, 0x3a, 0x20, 0x5b, 0x5d, 0x2c, 0x0a, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x63, 0x61, 0x63, 0x68, 0x65, 0x5f, 0x70, 0x72, 0x6f, - 0x6d, 0x70, 0x74, 0x3a, 0x20, 0x74, 0x72, 0x75, 0x65, 0x0a, 0x20, 0x20, - 0x20, 0x20, 0x7d, 0x29, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x2f, 0x2a, - 0x20, 0x53, 0x54, 0x41, 0x52, 0x54, 0x3a, 0x20, 0x53, 0x75, 0x70, 0x70, - 0x6f, 0x72, 0x74, 0x20, 0x66, 0x6f, 0x72, 0x20, 0x73, 0x74, 0x6f, 0x72, - 0x69, 0x6e, 0x67, 0x20, 0x70, 0x72, 0x6f, 0x6d, 0x70, 0x74, 0x20, 0x74, - 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x73, 0x20, 0x61, 0x6e, 0x64, - 0x20, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, 0x73, 0x20, - 0x69, 0x6e, 0x20, 0x62, 0x6f, 0x72, 0x77, 0x73, 0x65, 0x72, 0x20, 0x4c, - 0x6f, 0x63, 0x61, 0x6c, 0x53, 0x74, 0x6f, 0x72, 0x61, 0x67, 0x65, 0x20, - 0x2a, 0x2f, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, - 0x74, 0x20, 0x6c, 0x6f, 0x63, 0x61, 0x6c, 0x5f, 0x73, 0x74, 0x6f, 0x72, - 0x61, 0x67, 0x65, 0x5f, 0x73, 0x74, 0x6f, 0x72, 0x61, 0x67, 0x65, 0x4b, - 0x65, 0x79, 0x20, 0x3d, 0x20, 0x22, 0x6c, 0x6c, 0x61, 0x6d, 0x61, 0x63, - 0x70, 0x70, 0x5f, 0x73, 0x65, 0x72, 0x76, 0x65, 0x72, 0x5f, 0x6c, 0x6f, - 0x63, 0x61, 0x6c, 0x5f, 0x73, 0x74, 0x6f, 0x72, 0x61, 0x67, 0x65, 0x22, - 0x3b, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x66, 0x75, 0x6e, 0x63, 0x74, - 0x69, 0x6f, 0x6e, 0x20, 0x6c, 0x6f, 0x63, 0x61, 0x6c, 0x5f, 0x73, 0x74, - 0x6f, 0x72, 0x61, 0x67, 0x65, 0x5f, 0x73, 0x65, 0x74, 0x44, 0x61, 0x74, - 0x61, 0x46, 0x72, 0x6f, 0x6d, 0x4f, 0x62, 0x6a, 0x65, 0x63, 0x74, 0x28, - 0x74, 0x61, 0x67, 0x2c, 0x20, 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, - 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x6c, 0x6f, - 0x63, 0x61, 0x6c, 0x53, 0x74, 0x6f, 0x72, 0x61, 0x67, 0x65, 0x2e, 0x73, - 0x65, 0x74, 0x49, 0x74, 0x65, 0x6d, 0x28, 0x6c, 0x6f, 0x63, 0x61, 0x6c, - 0x5f, 0x73, 0x74, 0x6f, 0x72, 0x61, 0x67, 0x65, 0x5f, 0x73, 0x74, 0x6f, - 0x72, 0x61, 0x67, 0x65, 0x4b, 0x65, 0x79, 0x20, 0x2b, 0x20, 0x27, 0x2f, - 0x27, 0x20, 0x2b, 0x20, 0x74, 0x61, 0x67, 0x2c, 0x20, 0x4a, 0x53, 0x4f, - 0x4e, 0x2e, 0x73, 0x74, 0x72, 0x69, 0x6e, 0x67, 0x69, 0x66, 0x79, 0x28, - 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x29, 0x29, 0x3b, 0x0a, 0x20, - 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x66, 0x75, - 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x6c, 0x6f, 0x63, 0x61, 0x6c, - 0x5f, 0x73, 0x74, 0x6f, 0x72, 0x61, 0x67, 0x65, 0x5f, 0x73, 0x65, 0x74, - 0x44, 0x61, 0x74, 0x61, 0x46, 0x72, 0x6f, 0x6d, 0x52, 0x61, 0x77, 0x54, - 0x65, 0x78, 0x74, 0x28, 0x74, 0x61, 0x67, 0x2c, 0x20, 0x63, 0x6f, 0x6e, - 0x74, 0x65, 0x6e, 0x74, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x6c, 0x6f, 0x63, 0x61, 0x6c, 0x53, 0x74, 0x6f, 0x72, 0x61, - 0x67, 0x65, 0x2e, 0x73, 0x65, 0x74, 0x49, 0x74, 0x65, 0x6d, 0x28, 0x6c, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x66, 0x72, 0x65, 0x71, 0x75, 0x65, + 0x6e, 0x63, 0x79, 0x5f, 0x70, 0x65, 0x6e, 0x61, 0x6c, 0x74, 0x79, 0x3a, + 0x20, 0x30, 0x2e, 0x30, 0x2c, 0x20, 0x2f, 0x2f, 0x20, 0x30, 0x2e, 0x30, + 0x20, 0x3d, 0x20, 0x64, 0x69, 0x73, 0x61, 0x62, 0x6c, 0x65, 0x64, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x6d, 0x69, 0x72, 0x6f, 0x73, 0x74, + 0x61, 0x74, 0x3a, 0x20, 0x30, 0x2c, 0x20, 0x2f, 0x2f, 0x20, 0x30, 0x2f, + 0x31, 0x2f, 0x32, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x6d, 0x69, + 0x72, 0x6f, 0x73, 0x74, 0x61, 0x74, 0x5f, 0x74, 0x61, 0x75, 0x3a, 0x20, + 0x35, 0x2c, 0x20, 0x2f, 0x2f, 0x20, 0x74, 0x61, 0x72, 0x67, 0x65, 0x74, + 0x20, 0x65, 0x6e, 0x74, 0x72, 0x6f, 0x70, 0x79, 0x0a, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x6d, 0x69, 0x72, 0x6f, 0x73, 0x74, 0x61, 0x74, 0x5f, + 0x65, 0x74, 0x61, 0x3a, 0x20, 0x30, 0x2e, 0x31, 0x2c, 0x20, 0x2f, 0x2f, + 0x20, 0x6c, 0x65, 0x61, 0x72, 0x6e, 0x69, 0x6e, 0x67, 0x20, 0x72, 0x61, + 0x74, 0x65, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x67, 0x72, 0x61, + 0x6d, 0x6d, 0x61, 0x72, 0x3a, 0x20, 0x27, 0x27, 0x2c, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x6e, 0x5f, 0x70, 0x72, 0x6f, 0x62, 0x73, 0x3a, + 0x20, 0x30, 0x2c, 0x20, 0x2f, 0x2f, 0x20, 0x6e, 0x6f, 0x20, 0x63, 0x6f, + 0x6d, 0x70, 0x6c, 0x65, 0x74, 0x69, 0x6f, 0x6e, 0x5f, 0x70, 0x72, 0x6f, + 0x62, 0x61, 0x62, 0x69, 0x6c, 0x69, 0x74, 0x69, 0x65, 0x73, 0x2c, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x69, 0x6d, 0x61, 0x67, 0x65, 0x5f, + 0x64, 0x61, 0x74, 0x61, 0x3a, 0x20, 0x5b, 0x5d, 0x2c, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x63, 0x61, 0x63, 0x68, 0x65, 0x5f, 0x70, 0x72, + 0x6f, 0x6d, 0x70, 0x74, 0x3a, 0x20, 0x74, 0x72, 0x75, 0x65, 0x2c, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x61, 0x70, 0x69, 0x5f, 0x6b, 0x65, + 0x79, 0x3a, 0x20, 0x27, 0x27, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x29, + 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x2f, 0x2a, 0x20, 0x53, 0x54, 0x41, + 0x52, 0x54, 0x3a, 0x20, 0x53, 0x75, 0x70, 0x70, 0x6f, 0x72, 0x74, 0x20, + 0x66, 0x6f, 0x72, 0x20, 0x73, 0x74, 0x6f, 0x72, 0x69, 0x6e, 0x67, 0x20, + 0x70, 0x72, 0x6f, 0x6d, 0x70, 0x74, 0x20, 0x74, 0x65, 0x6d, 0x70, 0x6c, + 0x61, 0x74, 0x65, 0x73, 0x20, 0x61, 0x6e, 0x64, 0x20, 0x70, 0x61, 0x72, + 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, 0x73, 0x20, 0x69, 0x6e, 0x20, 0x62, + 0x72, 0x6f, 0x77, 0x73, 0x65, 0x72, 0x73, 0x20, 0x4c, 0x6f, 0x63, 0x61, + 0x6c, 0x53, 0x74, 0x6f, 0x72, 0x61, 0x67, 0x65, 0x20, 0x2a, 0x2f, 0x0a, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x6c, 0x6f, 0x63, 0x61, 0x6c, 0x5f, 0x73, 0x74, 0x6f, 0x72, 0x61, 0x67, 0x65, 0x5f, 0x73, 0x74, 0x6f, 0x72, 0x61, 0x67, 0x65, 0x4b, 0x65, 0x79, 0x20, - 0x2b, 0x20, 0x27, 0x2f, 0x27, 0x20, 0x2b, 0x20, 0x74, 0x61, 0x67, 0x2c, - 0x20, 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x29, 0x3b, 0x0a, 0x20, - 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x66, 0x75, - 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x6c, 0x6f, 0x63, 0x61, 0x6c, - 0x5f, 0x73, 0x74, 0x6f, 0x72, 0x61, 0x67, 0x65, 0x5f, 0x67, 0x65, 0x74, - 0x44, 0x61, 0x74, 0x61, 0x41, 0x73, 0x4f, 0x62, 0x6a, 0x65, 0x63, 0x74, - 0x28, 0x74, 0x61, 0x67, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x69, 0x74, 0x65, 0x6d, - 0x20, 0x3d, 0x20, 0x6c, 0x6f, 0x63, 0x61, 0x6c, 0x53, 0x74, 0x6f, 0x72, - 0x61, 0x67, 0x65, 0x2e, 0x67, 0x65, 0x74, 0x49, 0x74, 0x65, 0x6d, 0x28, - 0x6c, 0x6f, 0x63, 0x61, 0x6c, 0x5f, 0x73, 0x74, 0x6f, 0x72, 0x61, 0x67, - 0x65, 0x5f, 0x73, 0x74, 0x6f, 0x72, 0x61, 0x67, 0x65, 0x4b, 0x65, 0x79, - 0x20, 0x2b, 0x20, 0x27, 0x2f, 0x27, 0x20, 0x2b, 0x20, 0x74, 0x61, 0x67, - 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x69, 0x66, 0x20, - 0x28, 0x21, 0x69, 0x74, 0x65, 0x6d, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, - 0x20, 0x6e, 0x75, 0x6c, 0x6c, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x7d, 0x20, 0x65, 0x6c, 0x73, 0x65, 0x20, 0x7b, 0x0a, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, - 0x20, 0x4a, 0x53, 0x4f, 0x4e, 0x2e, 0x70, 0x61, 0x72, 0x73, 0x65, 0x28, - 0x69, 0x74, 0x65, 0x6d, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x0a, 0x20, 0x20, - 0x20, 0x20, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x6c, - 0x6f, 0x63, 0x61, 0x6c, 0x5f, 0x73, 0x74, 0x6f, 0x72, 0x61, 0x67, 0x65, - 0x5f, 0x67, 0x65, 0x74, 0x44, 0x61, 0x74, 0x61, 0x41, 0x73, 0x52, 0x61, - 0x77, 0x54, 0x65, 0x78, 0x74, 0x28, 0x74, 0x61, 0x67, 0x29, 0x20, 0x7b, - 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, - 0x20, 0x69, 0x74, 0x65, 0x6d, 0x20, 0x3d, 0x20, 0x6c, 0x6f, 0x63, 0x61, - 0x6c, 0x53, 0x74, 0x6f, 0x72, 0x61, 0x67, 0x65, 0x2e, 0x67, 0x65, 0x74, - 0x49, 0x74, 0x65, 0x6d, 0x28, 0x6c, 0x6f, 0x63, 0x61, 0x6c, 0x5f, 0x73, - 0x74, 0x6f, 0x72, 0x61, 0x67, 0x65, 0x5f, 0x73, 0x74, 0x6f, 0x72, 0x61, - 0x67, 0x65, 0x4b, 0x65, 0x79, 0x20, 0x2b, 0x20, 0x27, 0x2f, 0x27, 0x20, - 0x2b, 0x20, 0x74, 0x61, 0x67, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x69, 0x66, 0x20, 0x28, 0x21, 0x69, 0x74, 0x65, 0x6d, 0x29, - 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x72, - 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x6e, 0x75, 0x6c, 0x6c, 0x3b, 0x0a, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x20, 0x65, 0x6c, 0x73, 0x65, - 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x72, - 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x69, 0x74, 0x65, 0x6d, 0x3b, 0x0a, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, - 0x7d, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x2f, 0x2f, 0x20, 0x63, 0x72, - 0x65, 0x61, 0x74, 0x65, 0x20, 0x61, 0x20, 0x63, 0x6f, 0x6e, 0x74, 0x61, - 0x69, 0x6e, 0x65, 0x72, 0x20, 0x66, 0x6f, 0x72, 0x20, 0x75, 0x73, 0x65, - 0x72, 0x20, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x73, 0x20, - 0x61, 0x6e, 0x64, 0x20, 0x73, 0x65, 0x74, 0x74, 0x69, 0x6e, 0x67, 0x73, - 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, - 0x73, 0x61, 0x76, 0x65, 0x64, 0x55, 0x73, 0x65, 0x72, 0x54, 0x65, 0x6d, - 0x70, 0x6c, 0x61, 0x74, 0x65, 0x73, 0x20, 0x3d, 0x20, 0x73, 0x69, 0x67, - 0x6e, 0x61, 0x6c, 0x28, 0x7b, 0x7d, 0x29, 0x0a, 0x20, 0x20, 0x20, 0x20, - 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x73, 0x65, 0x6c, 0x65, 0x63, 0x74, - 0x65, 0x64, 0x55, 0x73, 0x65, 0x72, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, - 0x74, 0x65, 0x20, 0x3d, 0x20, 0x73, 0x69, 0x67, 0x6e, 0x61, 0x6c, 0x28, - 0x7b, 0x20, 0x6e, 0x61, 0x6d, 0x65, 0x3a, 0x20, 0x27, 0x27, 0x2c, 0x20, - 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x3a, 0x20, 0x7b, 0x20, - 0x73, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x3a, 0x20, 0x7b, 0x7d, 0x2c, - 0x20, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x3a, 0x20, 0x7b, 0x7d, 0x20, - 0x7d, 0x20, 0x7d, 0x29, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x2f, 0x2f, - 0x20, 0x6c, 0x65, 0x74, 0x27, 0x73, 0x20, 0x69, 0x6d, 0x70, 0x6f, 0x72, - 0x74, 0x20, 0x6c, 0x6f, 0x63, 0x61, 0x6c, 0x6c, 0x79, 0x20, 0x73, 0x61, - 0x76, 0x65, 0x64, 0x20, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, - 0x73, 0x20, 0x61, 0x6e, 0x64, 0x20, 0x73, 0x65, 0x74, 0x74, 0x69, 0x6e, - 0x67, 0x73, 0x20, 0x69, 0x66, 0x20, 0x74, 0x68, 0x65, 0x72, 0x65, 0x20, - 0x61, 0x72, 0x65, 0x20, 0x61, 0x6e, 0x79, 0x0a, 0x20, 0x20, 0x20, 0x20, - 0x2f, 0x2f, 0x20, 0x75, 0x73, 0x65, 0x72, 0x20, 0x74, 0x65, 0x6d, 0x70, - 0x6c, 0x61, 0x74, 0x65, 0x73, 0x20, 0x61, 0x6e, 0x64, 0x20, 0x73, 0x65, - 0x74, 0x74, 0x69, 0x6e, 0x67, 0x73, 0x20, 0x61, 0x72, 0x65, 0x20, 0x73, - 0x74, 0x6f, 0x72, 0x65, 0x64, 0x20, 0x69, 0x6e, 0x20, 0x6f, 0x6e, 0x65, - 0x20, 0x6f, 0x62, 0x6a, 0x65, 0x63, 0x74, 0x0a, 0x20, 0x20, 0x20, 0x20, - 0x2f, 0x2f, 0x20, 0x69, 0x6e, 0x20, 0x66, 0x6f, 0x72, 0x6d, 0x20, 0x6f, - 0x66, 0x20, 0x7b, 0x20, 0x22, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, - 0x65, 0x6e, 0x61, 0x6d, 0x65, 0x22, 0x3a, 0x20, 0x22, 0x74, 0x65, 0x6d, - 0x70, 0x6c, 0x61, 0x74, 0x65, 0x64, 0x61, 0x74, 0x61, 0x22, 0x20, 0x7d, - 0x20, 0x61, 0x6e, 0x64, 0x20, 0x7b, 0x20, 0x22, 0x73, 0x65, 0x74, 0x74, - 0x69, 0x6e, 0x67, 0x73, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, - 0x6e, 0x61, 0x6d, 0x65, 0x22, 0x3a, 0x22, 0x73, 0x65, 0x74, 0x74, 0x69, - 0x6e, 0x67, 0x73, 0x64, 0x61, 0x74, 0x61, 0x22, 0x20, 0x7d, 0x0a, 0x0a, - 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x6f, 0x6c, 0x65, 0x2e, - 0x6c, 0x6f, 0x67, 0x28, 0x27, 0x49, 0x6d, 0x70, 0x6f, 0x72, 0x74, 0x69, - 0x6e, 0x67, 0x20, 0x73, 0x61, 0x76, 0x65, 0x64, 0x20, 0x74, 0x65, 0x6d, - 0x70, 0x6c, 0x61, 0x74, 0x65, 0x73, 0x27, 0x29, 0x0a, 0x0a, 0x20, 0x20, - 0x20, 0x20, 0x6c, 0x65, 0x74, 0x20, 0x69, 0x6d, 0x70, 0x6f, 0x72, 0x74, - 0x65, 0x64, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x73, 0x20, - 0x3d, 0x20, 0x6c, 0x6f, 0x63, 0x61, 0x6c, 0x5f, 0x73, 0x74, 0x6f, 0x72, - 0x61, 0x67, 0x65, 0x5f, 0x67, 0x65, 0x74, 0x44, 0x61, 0x74, 0x61, 0x41, - 0x73, 0x4f, 0x62, 0x6a, 0x65, 0x63, 0x74, 0x28, 0x27, 0x75, 0x73, 0x65, - 0x72, 0x5f, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x73, 0x27, - 0x29, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x69, 0x66, 0x20, 0x28, 0x69, - 0x6d, 0x70, 0x6f, 0x72, 0x74, 0x65, 0x64, 0x54, 0x65, 0x6d, 0x70, 0x6c, - 0x61, 0x74, 0x65, 0x73, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x2f, 0x2f, 0x20, 0x73, 0x61, 0x76, 0x65, 0x64, 0x20, 0x74, - 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x73, 0x20, 0x77, 0x65, 0x72, - 0x65, 0x20, 0x73, 0x75, 0x63, 0x63, 0x65, 0x73, 0x73, 0x66, 0x75, 0x6c, - 0x79, 0x20, 0x69, 0x6d, 0x70, 0x6f, 0x72, 0x74, 0x65, 0x64, 0x2e, 0x0a, - 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x6f, - 0x6c, 0x65, 0x2e, 0x6c, 0x6f, 0x67, 0x28, 0x27, 0x50, 0x72, 0x6f, 0x63, - 0x65, 0x73, 0x73, 0x69, 0x6e, 0x67, 0x20, 0x73, 0x61, 0x76, 0x65, 0x64, - 0x20, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x73, 0x20, 0x61, - 0x6e, 0x64, 0x20, 0x75, 0x70, 0x64, 0x61, 0x74, 0x69, 0x6e, 0x67, 0x20, - 0x64, 0x65, 0x66, 0x61, 0x75, 0x6c, 0x74, 0x20, 0x74, 0x65, 0x6d, 0x70, - 0x6c, 0x61, 0x74, 0x65, 0x27, 0x29, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x2e, 0x76, 0x61, 0x6c, 0x75, - 0x65, 0x20, 0x3d, 0x20, 0x7b, 0x20, 0x2e, 0x2e, 0x2e, 0x70, 0x61, 0x72, - 0x61, 0x6d, 0x73, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2c, 0x20, 0x69, - 0x6d, 0x61, 0x67, 0x65, 0x5f, 0x64, 0x61, 0x74, 0x61, 0x3a, 0x20, 0x5b, - 0x5d, 0x20, 0x7d, 0x3b, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x2f, 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x6f, 0x6c, 0x65, 0x2e, 0x6c, 0x6f, - 0x67, 0x28, 0x69, 0x6d, 0x70, 0x6f, 0x72, 0x74, 0x65, 0x64, 0x54, 0x65, - 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x73, 0x29, 0x3b, 0x0a, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x73, 0x61, 0x76, 0x65, 0x64, 0x55, 0x73, 0x65, - 0x72, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x73, 0x2e, 0x76, - 0x61, 0x6c, 0x75, 0x65, 0x20, 0x3d, 0x20, 0x69, 0x6d, 0x70, 0x6f, 0x72, - 0x74, 0x65, 0x64, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x73, - 0x3b, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x2f, 0x2f, 0x6f, - 0x76, 0x65, 0x72, 0x72, 0x69, 0x64, 0x65, 0x20, 0x64, 0x65, 0x66, 0x61, - 0x75, 0x6c, 0x74, 0x20, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, - 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x73, 0x61, 0x76, 0x65, 0x64, - 0x55, 0x73, 0x65, 0x72, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, - 0x73, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x64, 0x65, 0x66, 0x61, - 0x75, 0x6c, 0x74, 0x20, 0x3d, 0x20, 0x7b, 0x20, 0x73, 0x65, 0x73, 0x73, - 0x69, 0x6f, 0x6e, 0x3a, 0x20, 0x73, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, - 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2c, 0x20, 0x70, 0x61, 0x72, 0x61, - 0x6d, 0x73, 0x3a, 0x20, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x2e, 0x76, - 0x61, 0x6c, 0x75, 0x65, 0x20, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x3d, 0x20, 0x22, 0x6c, 0x6c, 0x61, 0x6d, 0x61, 0x63, 0x70, 0x70, 0x5f, + 0x73, 0x65, 0x72, 0x76, 0x65, 0x72, 0x5f, 0x6c, 0x6f, 0x63, 0x61, 0x6c, + 0x5f, 0x73, 0x74, 0x6f, 0x72, 0x61, 0x67, 0x65, 0x22, 0x3b, 0x0a, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x6c, 0x6f, 0x63, 0x61, 0x6c, 0x5f, 0x73, 0x74, 0x6f, 0x72, 0x61, 0x67, 0x65, 0x5f, 0x73, 0x65, 0x74, 0x44, 0x61, 0x74, 0x61, 0x46, 0x72, - 0x6f, 0x6d, 0x4f, 0x62, 0x6a, 0x65, 0x63, 0x74, 0x28, 0x27, 0x75, 0x73, - 0x65, 0x72, 0x5f, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x73, - 0x27, 0x2c, 0x20, 0x73, 0x61, 0x76, 0x65, 0x64, 0x55, 0x73, 0x65, 0x72, - 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x73, 0x2e, 0x76, 0x61, - 0x6c, 0x75, 0x65, 0x29, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x20, 0x65, - 0x6c, 0x73, 0x65, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x2f, 0x2f, 0x20, 0x6e, 0x6f, 0x20, 0x73, 0x61, 0x76, 0x65, 0x64, 0x20, - 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x73, 0x20, 0x64, 0x65, - 0x74, 0x65, 0x63, 0x74, 0x65, 0x64, 0x2e, 0x0a, 0x0a, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x6f, 0x6c, 0x65, 0x2e, 0x6c, - 0x6f, 0x67, 0x28, 0x27, 0x49, 0x6e, 0x69, 0x74, 0x69, 0x61, 0x6c, 0x69, - 0x7a, 0x69, 0x6e, 0x67, 0x20, 0x4c, 0x6f, 0x63, 0x61, 0x6c, 0x53, 0x74, - 0x6f, 0x72, 0x61, 0x67, 0x65, 0x20, 0x61, 0x6e, 0x64, 0x20, 0x73, 0x61, - 0x76, 0x69, 0x6e, 0x67, 0x20, 0x64, 0x65, 0x66, 0x61, 0x75, 0x6c, 0x74, - 0x20, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x27, 0x29, 0x0a, - 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x73, 0x61, 0x76, 0x65, 0x64, - 0x55, 0x73, 0x65, 0x72, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, - 0x73, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x20, 0x3d, 0x20, 0x7b, 0x20, - 0x22, 0x64, 0x65, 0x66, 0x61, 0x75, 0x6c, 0x74, 0x22, 0x3a, 0x20, 0x7b, - 0x20, 0x73, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x3a, 0x20, 0x73, 0x65, - 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2c, - 0x20, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x3a, 0x20, 0x70, 0x61, 0x72, - 0x61, 0x6d, 0x73, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x20, 0x7d, 0x20, - 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x6c, 0x6f, 0x63, 0x61, - 0x6c, 0x5f, 0x73, 0x74, 0x6f, 0x72, 0x61, 0x67, 0x65, 0x5f, 0x73, 0x65, - 0x74, 0x44, 0x61, 0x74, 0x61, 0x46, 0x72, 0x6f, 0x6d, 0x4f, 0x62, 0x6a, - 0x65, 0x63, 0x74, 0x28, 0x27, 0x75, 0x73, 0x65, 0x72, 0x5f, 0x74, 0x65, - 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x73, 0x27, 0x2c, 0x20, 0x73, 0x61, - 0x76, 0x65, 0x64, 0x55, 0x73, 0x65, 0x72, 0x54, 0x65, 0x6d, 0x70, 0x6c, - 0x61, 0x74, 0x65, 0x73, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x29, 0x0a, + 0x6f, 0x6d, 0x4f, 0x62, 0x6a, 0x65, 0x63, 0x74, 0x28, 0x74, 0x61, 0x67, + 0x2c, 0x20, 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x29, 0x20, 0x7b, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x6c, 0x6f, 0x63, 0x61, 0x6c, + 0x53, 0x74, 0x6f, 0x72, 0x61, 0x67, 0x65, 0x2e, 0x73, 0x65, 0x74, 0x49, + 0x74, 0x65, 0x6d, 0x28, 0x6c, 0x6f, 0x63, 0x61, 0x6c, 0x5f, 0x73, 0x74, + 0x6f, 0x72, 0x61, 0x67, 0x65, 0x5f, 0x73, 0x74, 0x6f, 0x72, 0x61, 0x67, + 0x65, 0x4b, 0x65, 0x79, 0x20, 0x2b, 0x20, 0x27, 0x2f, 0x27, 0x20, 0x2b, + 0x20, 0x74, 0x61, 0x67, 0x2c, 0x20, 0x4a, 0x53, 0x4f, 0x4e, 0x2e, 0x73, + 0x74, 0x72, 0x69, 0x6e, 0x67, 0x69, 0x66, 0x79, 0x28, 0x63, 0x6f, 0x6e, + 0x74, 0x65, 0x6e, 0x74, 0x29, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x7d, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x66, 0x75, 0x6e, 0x63, 0x74, + 0x69, 0x6f, 0x6e, 0x20, 0x6c, 0x6f, 0x63, 0x61, 0x6c, 0x5f, 0x73, 0x74, + 0x6f, 0x72, 0x61, 0x67, 0x65, 0x5f, 0x73, 0x65, 0x74, 0x44, 0x61, 0x74, + 0x61, 0x46, 0x72, 0x6f, 0x6d, 0x52, 0x61, 0x77, 0x54, 0x65, 0x78, 0x74, + 0x28, 0x74, 0x61, 0x67, 0x2c, 0x20, 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x6e, + 0x74, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x6c, + 0x6f, 0x63, 0x61, 0x6c, 0x53, 0x74, 0x6f, 0x72, 0x61, 0x67, 0x65, 0x2e, + 0x73, 0x65, 0x74, 0x49, 0x74, 0x65, 0x6d, 0x28, 0x6c, 0x6f, 0x63, 0x61, + 0x6c, 0x5f, 0x73, 0x74, 0x6f, 0x72, 0x61, 0x67, 0x65, 0x5f, 0x73, 0x74, + 0x6f, 0x72, 0x61, 0x67, 0x65, 0x4b, 0x65, 0x79, 0x20, 0x2b, 0x20, 0x27, + 0x2f, 0x27, 0x20, 0x2b, 0x20, 0x74, 0x61, 0x67, 0x2c, 0x20, 0x63, 0x6f, + 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x7d, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x66, 0x75, 0x6e, 0x63, 0x74, + 0x69, 0x6f, 0x6e, 0x20, 0x6c, 0x6f, 0x63, 0x61, 0x6c, 0x5f, 0x73, 0x74, + 0x6f, 0x72, 0x61, 0x67, 0x65, 0x5f, 0x67, 0x65, 0x74, 0x44, 0x61, 0x74, + 0x61, 0x41, 0x73, 0x4f, 0x62, 0x6a, 0x65, 0x63, 0x74, 0x28, 0x74, 0x61, + 0x67, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, + 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x69, 0x74, 0x65, 0x6d, 0x20, 0x3d, 0x20, + 0x6c, 0x6f, 0x63, 0x61, 0x6c, 0x53, 0x74, 0x6f, 0x72, 0x61, 0x67, 0x65, + 0x2e, 0x67, 0x65, 0x74, 0x49, 0x74, 0x65, 0x6d, 0x28, 0x6c, 0x6f, 0x63, + 0x61, 0x6c, 0x5f, 0x73, 0x74, 0x6f, 0x72, 0x61, 0x67, 0x65, 0x5f, 0x73, + 0x74, 0x6f, 0x72, 0x61, 0x67, 0x65, 0x4b, 0x65, 0x79, 0x20, 0x2b, 0x20, + 0x27, 0x2f, 0x27, 0x20, 0x2b, 0x20, 0x74, 0x61, 0x67, 0x29, 0x3b, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x69, 0x66, 0x20, 0x28, 0x21, 0x69, + 0x74, 0x65, 0x6d, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x6e, 0x75, + 0x6c, 0x6c, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x20, + 0x65, 0x6c, 0x73, 0x65, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x4a, 0x53, + 0x4f, 0x4e, 0x2e, 0x70, 0x61, 0x72, 0x73, 0x65, 0x28, 0x69, 0x74, 0x65, + 0x6d, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x66, - 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x75, 0x73, 0x65, 0x72, - 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x52, 0x65, 0x73, 0x65, - 0x74, 0x54, 0x6f, 0x44, 0x65, 0x66, 0x61, 0x75, 0x6c, 0x74, 0x28, 0x29, - 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, - 0x73, 0x6f, 0x6c, 0x65, 0x2e, 0x6c, 0x6f, 0x67, 0x28, 0x27, 0x52, 0x65, - 0x73, 0x65, 0x74, 0x69, 0x6e, 0x67, 0x20, 0x74, 0x68, 0x65, 0x6d, 0x70, - 0x6c, 0x61, 0x74, 0x65, 0x20, 0x74, 0x6f, 0x20, 0x64, 0x65, 0x66, 0x61, - 0x75, 0x6c, 0x74, 0x27, 0x29, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x73, 0x65, 0x6c, 0x65, 0x63, 0x74, 0x65, 0x64, 0x55, 0x73, 0x65, 0x72, - 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x2e, 0x76, 0x61, 0x6c, - 0x75, 0x65, 0x2e, 0x6e, 0x61, 0x6d, 0x65, 0x20, 0x3d, 0x20, 0x27, 0x64, - 0x65, 0x66, 0x61, 0x75, 0x6c, 0x74, 0x27, 0x3b, 0x0a, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x73, 0x65, 0x6c, 0x65, 0x63, 0x74, 0x65, 0x64, 0x55, - 0x73, 0x65, 0x72, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x2e, - 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x64, 0x61, 0x74, 0x61, 0x20, 0x3d, + 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x6c, 0x6f, 0x63, 0x61, + 0x6c, 0x5f, 0x73, 0x74, 0x6f, 0x72, 0x61, 0x67, 0x65, 0x5f, 0x67, 0x65, + 0x74, 0x44, 0x61, 0x74, 0x61, 0x41, 0x73, 0x52, 0x61, 0x77, 0x54, 0x65, + 0x78, 0x74, 0x28, 0x74, 0x61, 0x67, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x69, 0x74, + 0x65, 0x6d, 0x20, 0x3d, 0x20, 0x6c, 0x6f, 0x63, 0x61, 0x6c, 0x53, 0x74, + 0x6f, 0x72, 0x61, 0x67, 0x65, 0x2e, 0x67, 0x65, 0x74, 0x49, 0x74, 0x65, + 0x6d, 0x28, 0x6c, 0x6f, 0x63, 0x61, 0x6c, 0x5f, 0x73, 0x74, 0x6f, 0x72, + 0x61, 0x67, 0x65, 0x5f, 0x73, 0x74, 0x6f, 0x72, 0x61, 0x67, 0x65, 0x4b, + 0x65, 0x79, 0x20, 0x2b, 0x20, 0x27, 0x2f, 0x27, 0x20, 0x2b, 0x20, 0x74, + 0x61, 0x67, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x69, + 0x66, 0x20, 0x28, 0x21, 0x69, 0x74, 0x65, 0x6d, 0x29, 0x20, 0x7b, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x72, 0x65, 0x74, 0x75, + 0x72, 0x6e, 0x20, 0x6e, 0x75, 0x6c, 0x6c, 0x3b, 0x0a, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x7d, 0x20, 0x65, 0x6c, 0x73, 0x65, 0x20, 0x7b, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x72, 0x65, 0x74, 0x75, + 0x72, 0x6e, 0x20, 0x69, 0x74, 0x65, 0x6d, 0x3b, 0x0a, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x2f, 0x2f, 0x20, 0x63, 0x72, 0x65, 0x61, 0x74, + 0x65, 0x20, 0x61, 0x20, 0x63, 0x6f, 0x6e, 0x74, 0x61, 0x69, 0x6e, 0x65, + 0x72, 0x20, 0x66, 0x6f, 0x72, 0x20, 0x75, 0x73, 0x65, 0x72, 0x20, 0x74, + 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x73, 0x20, 0x61, 0x6e, 0x64, + 0x20, 0x73, 0x65, 0x74, 0x74, 0x69, 0x6e, 0x67, 0x73, 0x0a, 0x0a, 0x20, + 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x73, 0x61, 0x76, + 0x65, 0x64, 0x55, 0x73, 0x65, 0x72, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, + 0x74, 0x65, 0x73, 0x20, 0x3d, 0x20, 0x73, 0x69, 0x67, 0x6e, 0x61, 0x6c, + 0x28, 0x7b, 0x7d, 0x29, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, + 0x73, 0x74, 0x20, 0x73, 0x65, 0x6c, 0x65, 0x63, 0x74, 0x65, 0x64, 0x55, + 0x73, 0x65, 0x72, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x20, + 0x3d, 0x20, 0x73, 0x69, 0x67, 0x6e, 0x61, 0x6c, 0x28, 0x7b, 0x20, 0x6e, + 0x61, 0x6d, 0x65, 0x3a, 0x20, 0x27, 0x27, 0x2c, 0x20, 0x74, 0x65, 0x6d, + 0x70, 0x6c, 0x61, 0x74, 0x65, 0x3a, 0x20, 0x7b, 0x20, 0x73, 0x65, 0x73, + 0x73, 0x69, 0x6f, 0x6e, 0x3a, 0x20, 0x7b, 0x7d, 0x2c, 0x20, 0x70, 0x61, + 0x72, 0x61, 0x6d, 0x73, 0x3a, 0x20, 0x7b, 0x7d, 0x20, 0x7d, 0x20, 0x7d, + 0x29, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x2f, 0x2f, 0x20, 0x6c, 0x65, + 0x74, 0x27, 0x73, 0x20, 0x69, 0x6d, 0x70, 0x6f, 0x72, 0x74, 0x20, 0x6c, + 0x6f, 0x63, 0x61, 0x6c, 0x6c, 0x79, 0x20, 0x73, 0x61, 0x76, 0x65, 0x64, + 0x20, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x73, 0x20, 0x61, + 0x6e, 0x64, 0x20, 0x73, 0x65, 0x74, 0x74, 0x69, 0x6e, 0x67, 0x73, 0x20, + 0x69, 0x66, 0x20, 0x74, 0x68, 0x65, 0x72, 0x65, 0x20, 0x61, 0x72, 0x65, + 0x20, 0x61, 0x6e, 0x79, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x2f, 0x2f, 0x20, + 0x75, 0x73, 0x65, 0x72, 0x20, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, + 0x65, 0x73, 0x20, 0x61, 0x6e, 0x64, 0x20, 0x73, 0x65, 0x74, 0x74, 0x69, + 0x6e, 0x67, 0x73, 0x20, 0x61, 0x72, 0x65, 0x20, 0x73, 0x74, 0x6f, 0x72, + 0x65, 0x64, 0x20, 0x69, 0x6e, 0x20, 0x6f, 0x6e, 0x65, 0x20, 0x6f, 0x62, + 0x6a, 0x65, 0x63, 0x74, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x2f, 0x2f, 0x20, + 0x69, 0x6e, 0x20, 0x66, 0x6f, 0x72, 0x6d, 0x20, 0x6f, 0x66, 0x20, 0x7b, + 0x20, 0x22, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x6e, 0x61, + 0x6d, 0x65, 0x22, 0x3a, 0x20, 0x22, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, + 0x74, 0x65, 0x64, 0x61, 0x74, 0x61, 0x22, 0x20, 0x7d, 0x20, 0x61, 0x6e, + 0x64, 0x20, 0x7b, 0x20, 0x22, 0x73, 0x65, 0x74, 0x74, 0x69, 0x6e, 0x67, + 0x73, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x6e, 0x61, 0x6d, + 0x65, 0x22, 0x3a, 0x22, 0x73, 0x65, 0x74, 0x74, 0x69, 0x6e, 0x67, 0x73, + 0x64, 0x61, 0x74, 0x61, 0x22, 0x20, 0x7d, 0x0a, 0x0a, 0x20, 0x20, 0x20, + 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x6f, 0x6c, 0x65, 0x2e, 0x6c, 0x6f, 0x67, + 0x28, 0x27, 0x49, 0x6d, 0x70, 0x6f, 0x72, 0x74, 0x69, 0x6e, 0x67, 0x20, + 0x73, 0x61, 0x76, 0x65, 0x64, 0x20, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, + 0x74, 0x65, 0x73, 0x27, 0x29, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x6c, + 0x65, 0x74, 0x20, 0x69, 0x6d, 0x70, 0x6f, 0x72, 0x74, 0x65, 0x64, 0x54, + 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x73, 0x20, 0x3d, 0x20, 0x6c, + 0x6f, 0x63, 0x61, 0x6c, 0x5f, 0x73, 0x74, 0x6f, 0x72, 0x61, 0x67, 0x65, + 0x5f, 0x67, 0x65, 0x74, 0x44, 0x61, 0x74, 0x61, 0x41, 0x73, 0x4f, 0x62, + 0x6a, 0x65, 0x63, 0x74, 0x28, 0x27, 0x75, 0x73, 0x65, 0x72, 0x5f, 0x74, + 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x73, 0x27, 0x29, 0x0a, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x69, 0x66, 0x20, 0x28, 0x69, 0x6d, 0x70, 0x6f, + 0x72, 0x74, 0x65, 0x64, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, + 0x73, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x2f, + 0x2f, 0x20, 0x73, 0x61, 0x76, 0x65, 0x64, 0x20, 0x74, 0x65, 0x6d, 0x70, + 0x6c, 0x61, 0x74, 0x65, 0x73, 0x20, 0x77, 0x65, 0x72, 0x65, 0x20, 0x73, + 0x75, 0x63, 0x63, 0x65, 0x73, 0x73, 0x66, 0x75, 0x6c, 0x6c, 0x79, 0x20, + 0x69, 0x6d, 0x70, 0x6f, 0x72, 0x74, 0x65, 0x64, 0x2e, 0x0a, 0x0a, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x6f, 0x6c, 0x65, + 0x2e, 0x6c, 0x6f, 0x67, 0x28, 0x27, 0x50, 0x72, 0x6f, 0x63, 0x65, 0x73, + 0x73, 0x69, 0x6e, 0x67, 0x20, 0x73, 0x61, 0x76, 0x65, 0x64, 0x20, 0x74, + 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x73, 0x20, 0x61, 0x6e, 0x64, + 0x20, 0x75, 0x70, 0x64, 0x61, 0x74, 0x69, 0x6e, 0x67, 0x20, 0x64, 0x65, + 0x66, 0x61, 0x75, 0x6c, 0x74, 0x20, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, + 0x74, 0x65, 0x27, 0x29, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x70, + 0x61, 0x72, 0x61, 0x6d, 0x73, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x20, + 0x3d, 0x20, 0x7b, 0x20, 0x2e, 0x2e, 0x2e, 0x70, 0x61, 0x72, 0x61, 0x6d, + 0x73, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2c, 0x20, 0x69, 0x6d, 0x61, + 0x67, 0x65, 0x5f, 0x64, 0x61, 0x74, 0x61, 0x3a, 0x20, 0x5b, 0x5d, 0x20, + 0x7d, 0x3b, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x2f, 0x2f, + 0x63, 0x6f, 0x6e, 0x73, 0x6f, 0x6c, 0x65, 0x2e, 0x6c, 0x6f, 0x67, 0x28, + 0x69, 0x6d, 0x70, 0x6f, 0x72, 0x74, 0x65, 0x64, 0x54, 0x65, 0x6d, 0x70, + 0x6c, 0x61, 0x74, 0x65, 0x73, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x73, 0x61, 0x76, 0x65, 0x64, 0x55, 0x73, 0x65, 0x72, 0x54, + 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x73, 0x2e, 0x76, 0x61, 0x6c, + 0x75, 0x65, 0x20, 0x3d, 0x20, 0x69, 0x6d, 0x70, 0x6f, 0x72, 0x74, 0x65, + 0x64, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x73, 0x3b, 0x0a, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x2f, 0x2f, 0x6f, 0x76, 0x65, + 0x72, 0x72, 0x69, 0x64, 0x65, 0x20, 0x64, 0x65, 0x66, 0x61, 0x75, 0x6c, + 0x74, 0x20, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x0a, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x73, 0x61, 0x76, 0x65, 0x64, 0x55, 0x73, + 0x65, 0x72, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x73, 0x2e, + 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x64, 0x65, 0x66, 0x61, 0x75, 0x6c, + 0x74, 0x20, 0x3d, 0x20, 0x7b, 0x20, 0x73, 0x65, 0x73, 0x73, 0x69, 0x6f, + 0x6e, 0x3a, 0x20, 0x73, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x2e, 0x76, + 0x61, 0x6c, 0x75, 0x65, 0x2c, 0x20, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, + 0x3a, 0x20, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x2e, 0x76, 0x61, 0x6c, + 0x75, 0x65, 0x20, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x6c, + 0x6f, 0x63, 0x61, 0x6c, 0x5f, 0x73, 0x74, 0x6f, 0x72, 0x61, 0x67, 0x65, + 0x5f, 0x73, 0x65, 0x74, 0x44, 0x61, 0x74, 0x61, 0x46, 0x72, 0x6f, 0x6d, + 0x4f, 0x62, 0x6a, 0x65, 0x63, 0x74, 0x28, 0x27, 0x75, 0x73, 0x65, 0x72, + 0x5f, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x73, 0x27, 0x2c, 0x20, 0x73, 0x61, 0x76, 0x65, 0x64, 0x55, 0x73, 0x65, 0x72, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x73, 0x2e, 0x76, 0x61, 0x6c, 0x75, - 0x65, 0x5b, 0x27, 0x64, 0x65, 0x66, 0x61, 0x75, 0x6c, 0x74, 0x27, 0x5d, - 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x0a, 0x20, 0x20, 0x20, - 0x20, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x75, 0x73, - 0x65, 0x72, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x41, 0x70, - 0x70, 0x6c, 0x79, 0x28, 0x74, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, + 0x65, 0x29, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x20, 0x65, 0x6c, 0x73, + 0x65, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x2f, 0x2f, + 0x20, 0x6e, 0x6f, 0x20, 0x73, 0x61, 0x76, 0x65, 0x64, 0x20, 0x74, 0x65, + 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x73, 0x20, 0x64, 0x65, 0x74, 0x65, + 0x63, 0x74, 0x65, 0x64, 0x2e, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x6f, 0x6c, 0x65, 0x2e, 0x6c, 0x6f, 0x67, + 0x28, 0x27, 0x49, 0x6e, 0x69, 0x74, 0x69, 0x61, 0x6c, 0x69, 0x7a, 0x69, + 0x6e, 0x67, 0x20, 0x4c, 0x6f, 0x63, 0x61, 0x6c, 0x53, 0x74, 0x6f, 0x72, + 0x61, 0x67, 0x65, 0x20, 0x61, 0x6e, 0x64, 0x20, 0x73, 0x61, 0x76, 0x69, + 0x6e, 0x67, 0x20, 0x64, 0x65, 0x66, 0x61, 0x75, 0x6c, 0x74, 0x20, 0x74, + 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x27, 0x29, 0x0a, 0x0a, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x73, 0x61, 0x76, 0x65, 0x64, 0x55, 0x73, + 0x65, 0x72, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x73, 0x2e, + 0x76, 0x61, 0x6c, 0x75, 0x65, 0x20, 0x3d, 0x20, 0x7b, 0x20, 0x22, 0x64, + 0x65, 0x66, 0x61, 0x75, 0x6c, 0x74, 0x22, 0x3a, 0x20, 0x7b, 0x20, 0x73, + 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x3a, 0x20, 0x73, 0x65, 0x73, 0x73, + 0x69, 0x6f, 0x6e, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2c, 0x20, 0x70, + 0x61, 0x72, 0x61, 0x6d, 0x73, 0x3a, 0x20, 0x70, 0x61, 0x72, 0x61, 0x6d, + 0x73, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x20, 0x7d, 0x20, 0x7d, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x6c, 0x6f, 0x63, 0x61, 0x6c, 0x5f, + 0x73, 0x74, 0x6f, 0x72, 0x61, 0x67, 0x65, 0x5f, 0x73, 0x65, 0x74, 0x44, + 0x61, 0x74, 0x61, 0x46, 0x72, 0x6f, 0x6d, 0x4f, 0x62, 0x6a, 0x65, 0x63, + 0x74, 0x28, 0x27, 0x75, 0x73, 0x65, 0x72, 0x5f, 0x74, 0x65, 0x6d, 0x70, + 0x6c, 0x61, 0x74, 0x65, 0x73, 0x27, 0x2c, 0x20, 0x73, 0x61, 0x76, 0x65, + 0x64, 0x55, 0x73, 0x65, 0x72, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, + 0x65, 0x73, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x29, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x7d, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x66, 0x75, 0x6e, + 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x75, 0x73, 0x65, 0x72, 0x54, 0x65, + 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x52, 0x65, 0x73, 0x65, 0x74, 0x54, + 0x6f, 0x44, 0x65, 0x66, 0x61, 0x75, 0x6c, 0x74, 0x28, 0x29, 0x20, 0x7b, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x6f, + 0x6c, 0x65, 0x2e, 0x6c, 0x6f, 0x67, 0x28, 0x27, 0x52, 0x65, 0x73, 0x65, + 0x74, 0x74, 0x69, 0x6e, 0x67, 0x20, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, + 0x74, 0x65, 0x20, 0x74, 0x6f, 0x20, 0x64, 0x65, 0x66, 0x61, 0x75, 0x6c, + 0x74, 0x27, 0x29, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x73, 0x65, + 0x6c, 0x65, 0x63, 0x74, 0x65, 0x64, 0x55, 0x73, 0x65, 0x72, 0x54, 0x65, + 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, + 0x2e, 0x6e, 0x61, 0x6d, 0x65, 0x20, 0x3d, 0x20, 0x27, 0x64, 0x65, 0x66, + 0x61, 0x75, 0x6c, 0x74, 0x27, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x73, 0x65, 0x6c, 0x65, 0x63, 0x74, 0x65, 0x64, 0x55, 0x73, 0x65, + 0x72, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x2e, 0x76, 0x61, + 0x6c, 0x75, 0x65, 0x2e, 0x64, 0x61, 0x74, 0x61, 0x20, 0x3d, 0x20, 0x73, + 0x61, 0x76, 0x65, 0x64, 0x55, 0x73, 0x65, 0x72, 0x54, 0x65, 0x6d, 0x70, + 0x6c, 0x61, 0x74, 0x65, 0x73, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x5b, + 0x27, 0x64, 0x65, 0x66, 0x61, 0x75, 0x6c, 0x74, 0x27, 0x5d, 0x3b, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x66, + 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x75, 0x73, 0x65, 0x72, + 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x41, 0x70, 0x70, 0x6c, + 0x79, 0x28, 0x74, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x73, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x2e, 0x76, 0x61, 0x6c, + 0x75, 0x65, 0x20, 0x3d, 0x20, 0x74, 0x2e, 0x64, 0x61, 0x74, 0x61, 0x2e, + 0x73, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x73, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x2e, 0x76, - 0x61, 0x6c, 0x75, 0x65, 0x20, 0x3d, 0x20, 0x74, 0x2e, 0x64, 0x61, 0x74, - 0x61, 0x2e, 0x73, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x3b, 0x0a, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x73, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, - 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x20, 0x3d, 0x20, 0x7b, 0x20, 0x2e, - 0x2e, 0x2e, 0x73, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x2e, 0x76, 0x61, - 0x6c, 0x75, 0x65, 0x2c, 0x20, 0x69, 0x6d, 0x61, 0x67, 0x65, 0x5f, 0x73, - 0x65, 0x6c, 0x65, 0x63, 0x74, 0x65, 0x64, 0x3a, 0x20, 0x27, 0x27, 0x20, - 0x7d, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x70, 0x61, 0x72, - 0x61, 0x6d, 0x73, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x20, 0x3d, 0x20, - 0x74, 0x2e, 0x64, 0x61, 0x74, 0x61, 0x2e, 0x70, 0x61, 0x72, 0x61, 0x6d, - 0x73, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x70, 0x61, 0x72, - 0x61, 0x6d, 0x73, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x20, 0x3d, 0x20, - 0x7b, 0x20, 0x2e, 0x2e, 0x2e, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x2e, - 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2c, 0x20, 0x69, 0x6d, 0x61, 0x67, 0x65, - 0x5f, 0x64, 0x61, 0x74, 0x61, 0x3a, 0x20, 0x5b, 0x5d, 0x20, 0x7d, 0x3b, + 0x61, 0x6c, 0x75, 0x65, 0x20, 0x3d, 0x20, 0x7b, 0x20, 0x2e, 0x2e, 0x2e, + 0x73, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x2e, 0x76, 0x61, 0x6c, 0x75, + 0x65, 0x2c, 0x20, 0x69, 0x6d, 0x61, 0x67, 0x65, 0x5f, 0x73, 0x65, 0x6c, + 0x65, 0x63, 0x74, 0x65, 0x64, 0x3a, 0x20, 0x27, 0x27, 0x20, 0x7d, 0x3b, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x70, 0x61, 0x72, 0x61, 0x6d, + 0x73, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x20, 0x3d, 0x20, 0x74, 0x2e, + 0x64, 0x61, 0x74, 0x61, 0x2e, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x3b, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x70, 0x61, 0x72, 0x61, 0x6d, + 0x73, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x20, 0x3d, 0x20, 0x7b, 0x20, + 0x2e, 0x2e, 0x2e, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x2e, 0x76, 0x61, + 0x6c, 0x75, 0x65, 0x2c, 0x20, 0x69, 0x6d, 0x61, 0x67, 0x65, 0x5f, 0x64, + 0x61, 0x74, 0x61, 0x3a, 0x20, 0x5b, 0x5d, 0x20, 0x7d, 0x3b, 0x0a, 0x20, + 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x66, 0x75, + 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x75, 0x73, 0x65, 0x72, 0x54, + 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x52, 0x65, 0x73, 0x65, 0x74, + 0x54, 0x6f, 0x44, 0x65, 0x66, 0x61, 0x75, 0x6c, 0x74, 0x41, 0x6e, 0x64, + 0x41, 0x70, 0x70, 0x6c, 0x79, 0x28, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x75, 0x73, 0x65, 0x72, 0x54, 0x65, 0x6d, 0x70, + 0x6c, 0x61, 0x74, 0x65, 0x52, 0x65, 0x73, 0x65, 0x74, 0x54, 0x6f, 0x44, + 0x65, 0x66, 0x61, 0x75, 0x6c, 0x74, 0x28, 0x29, 0x0a, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x75, 0x73, 0x65, 0x72, 0x54, 0x65, 0x6d, 0x70, 0x6c, + 0x61, 0x74, 0x65, 0x41, 0x70, 0x70, 0x6c, 0x79, 0x28, 0x73, 0x65, 0x6c, + 0x65, 0x63, 0x74, 0x65, 0x64, 0x55, 0x73, 0x65, 0x72, 0x54, 0x65, 0x6d, + 0x70, 0x6c, 0x61, 0x74, 0x65, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x29, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x75, 0x73, 0x65, - 0x72, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x52, 0x65, 0x73, - 0x65, 0x74, 0x54, 0x6f, 0x44, 0x65, 0x66, 0x61, 0x75, 0x6c, 0x74, 0x41, - 0x6e, 0x64, 0x41, 0x70, 0x70, 0x6c, 0x79, 0x28, 0x29, 0x20, 0x7b, 0x0a, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x75, 0x73, 0x65, 0x72, 0x54, 0x65, - 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x52, 0x65, 0x73, 0x65, 0x74, 0x54, - 0x6f, 0x44, 0x65, 0x66, 0x61, 0x75, 0x6c, 0x74, 0x28, 0x29, 0x0a, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x75, 0x73, 0x65, 0x72, 0x54, 0x65, 0x6d, - 0x70, 0x6c, 0x61, 0x74, 0x65, 0x41, 0x70, 0x70, 0x6c, 0x79, 0x28, 0x73, - 0x65, 0x6c, 0x65, 0x63, 0x74, 0x65, 0x64, 0x55, 0x73, 0x65, 0x72, 0x54, - 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x2e, 0x76, 0x61, 0x6c, 0x75, - 0x65, 0x29, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x0a, 0x20, 0x20, - 0x20, 0x20, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x75, - 0x73, 0x65, 0x72, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x4c, - 0x6f, 0x61, 0x64, 0x41, 0x6e, 0x64, 0x41, 0x70, 0x70, 0x6c, 0x79, 0x41, - 0x75, 0x74, 0x6f, 0x73, 0x61, 0x76, 0x65, 0x64, 0x28, 0x29, 0x20, 0x7b, - 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x2f, 0x2f, 0x20, 0x67, 0x65, - 0x74, 0x20, 0x61, 0x75, 0x74, 0x6f, 0x73, 0x61, 0x76, 0x65, 0x64, 0x20, - 0x6c, 0x61, 0x73, 0x74, 0x20, 0x75, 0x73, 0x65, 0x64, 0x20, 0x74, 0x65, - 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x6c, 0x65, 0x74, 0x20, 0x6c, 0x61, 0x73, 0x74, 0x55, 0x73, 0x65, - 0x64, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x20, 0x3d, 0x20, - 0x6c, 0x6f, 0x63, 0x61, 0x6c, 0x5f, 0x73, 0x74, 0x6f, 0x72, 0x61, 0x67, - 0x65, 0x5f, 0x67, 0x65, 0x74, 0x44, 0x61, 0x74, 0x61, 0x41, 0x73, 0x4f, - 0x62, 0x6a, 0x65, 0x63, 0x74, 0x28, 0x27, 0x75, 0x73, 0x65, 0x72, 0x5f, - 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x73, 0x5f, 0x6c, 0x61, - 0x73, 0x74, 0x27, 0x29, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x69, 0x66, 0x20, 0x28, 0x6c, 0x61, 0x73, 0x74, 0x55, 0x73, 0x65, 0x64, - 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x29, 0x20, 0x7b, 0x0a, - 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, - 0x73, 0x6f, 0x6c, 0x65, 0x2e, 0x6c, 0x6f, 0x67, 0x28, 0x27, 0x41, 0x75, - 0x74, 0x6f, 0x73, 0x61, 0x76, 0x65, 0x64, 0x20, 0x74, 0x65, 0x6d, 0x70, - 0x6c, 0x61, 0x74, 0x65, 0x20, 0x66, 0x6f, 0x75, 0x6e, 0x64, 0x2c, 0x20, - 0x72, 0x65, 0x73, 0x74, 0x6f, 0x72, 0x69, 0x6e, 0x67, 0x27, 0x29, 0x0a, - 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x73, 0x65, 0x6c, - 0x65, 0x63, 0x74, 0x65, 0x64, 0x55, 0x73, 0x65, 0x72, 0x54, 0x65, 0x6d, - 0x70, 0x6c, 0x61, 0x74, 0x65, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x20, - 0x3d, 0x20, 0x6c, 0x61, 0x73, 0x74, 0x55, 0x73, 0x65, 0x64, 0x54, 0x65, - 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x65, 0x6c, 0x73, - 0x65, 0x20, 0x7b, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x6f, 0x6c, 0x65, 0x2e, 0x6c, 0x6f, 0x67, - 0x28, 0x27, 0x4e, 0x6f, 0x20, 0x61, 0x75, 0x74, 0x6f, 0x73, 0x61, 0x76, - 0x65, 0x64, 0x20, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x20, - 0x66, 0x6f, 0x75, 0x6e, 0x64, 0x2c, 0x20, 0x75, 0x73, 0x69, 0x6e, 0x67, - 0x20, 0x64, 0x65, 0x66, 0x61, 0x75, 0x6c, 0x74, 0x20, 0x74, 0x65, 0x6d, - 0x70, 0x6c, 0x61, 0x74, 0x65, 0x27, 0x29, 0x0a, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x2f, 0x2f, 0x20, 0x6e, 0x6f, 0x20, 0x61, 0x75, - 0x74, 0x6f, 0x73, 0x61, 0x76, 0x65, 0x64, 0x20, 0x6c, 0x61, 0x73, 0x74, - 0x20, 0x75, 0x73, 0x65, 0x64, 0x20, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, - 0x74, 0x65, 0x20, 0x77, 0x61, 0x73, 0x20, 0x66, 0x6f, 0x75, 0x6e, 0x64, - 0x2c, 0x20, 0x73, 0x6f, 0x20, 0x6c, 0x6f, 0x61, 0x64, 0x20, 0x66, 0x72, - 0x6f, 0x6d, 0x20, 0x64, 0x65, 0x66, 0x61, 0x75, 0x6c, 0x74, 0x2e, 0x0a, - 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x75, 0x73, 0x65, - 0x72, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x52, 0x65, 0x73, - 0x65, 0x74, 0x54, 0x6f, 0x44, 0x65, 0x66, 0x61, 0x75, 0x6c, 0x74, 0x28, - 0x29, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x0a, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x6f, 0x6c, 0x65, - 0x2e, 0x6c, 0x6f, 0x67, 0x28, 0x27, 0x41, 0x70, 0x70, 0x6c, 0x79, 0x69, - 0x6e, 0x67, 0x20, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x27, - 0x29, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x2f, 0x2f, 0x20, 0x61, - 0x6e, 0x64, 0x20, 0x75, 0x70, 0x64, 0x61, 0x74, 0x65, 0x20, 0x69, 0x6e, - 0x74, 0x65, 0x72, 0x6e, 0x61, 0x6c, 0x20, 0x64, 0x61, 0x74, 0x61, 0x20, - 0x66, 0x72, 0x6f, 0x6d, 0x20, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, - 0x65, 0x73, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x75, 0x73, - 0x65, 0x72, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x41, 0x70, - 0x70, 0x6c, 0x79, 0x28, 0x73, 0x65, 0x6c, 0x65, 0x63, 0x74, 0x65, 0x64, + 0x72, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x4c, 0x6f, 0x61, + 0x64, 0x41, 0x6e, 0x64, 0x41, 0x70, 0x70, 0x6c, 0x79, 0x41, 0x75, 0x74, + 0x6f, 0x73, 0x61, 0x76, 0x65, 0x64, 0x28, 0x29, 0x20, 0x7b, 0x0a, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x2f, 0x2f, 0x20, 0x67, 0x65, 0x74, 0x20, + 0x61, 0x75, 0x74, 0x6f, 0x73, 0x61, 0x76, 0x65, 0x64, 0x20, 0x6c, 0x61, + 0x73, 0x74, 0x20, 0x75, 0x73, 0x65, 0x64, 0x20, 0x74, 0x65, 0x6d, 0x70, + 0x6c, 0x61, 0x74, 0x65, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x6c, + 0x65, 0x74, 0x20, 0x6c, 0x61, 0x73, 0x74, 0x55, 0x73, 0x65, 0x64, 0x54, + 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x20, 0x3d, 0x20, 0x6c, 0x6f, + 0x63, 0x61, 0x6c, 0x5f, 0x73, 0x74, 0x6f, 0x72, 0x61, 0x67, 0x65, 0x5f, + 0x67, 0x65, 0x74, 0x44, 0x61, 0x74, 0x61, 0x41, 0x73, 0x4f, 0x62, 0x6a, + 0x65, 0x63, 0x74, 0x28, 0x27, 0x75, 0x73, 0x65, 0x72, 0x5f, 0x74, 0x65, + 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x73, 0x5f, 0x6c, 0x61, 0x73, 0x74, + 0x27, 0x29, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x69, 0x66, + 0x20, 0x28, 0x6c, 0x61, 0x73, 0x74, 0x55, 0x73, 0x65, 0x64, 0x54, 0x65, + 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x29, 0x20, 0x7b, 0x0a, 0x0a, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x6f, + 0x6c, 0x65, 0x2e, 0x6c, 0x6f, 0x67, 0x28, 0x27, 0x41, 0x75, 0x74, 0x6f, + 0x73, 0x61, 0x76, 0x65, 0x64, 0x20, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, + 0x74, 0x65, 0x20, 0x66, 0x6f, 0x75, 0x6e, 0x64, 0x2c, 0x20, 0x72, 0x65, + 0x73, 0x74, 0x6f, 0x72, 0x69, 0x6e, 0x67, 0x27, 0x29, 0x0a, 0x0a, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x73, 0x65, 0x6c, 0x65, 0x63, + 0x74, 0x65, 0x64, 0x55, 0x73, 0x65, 0x72, 0x54, 0x65, 0x6d, 0x70, 0x6c, + 0x61, 0x74, 0x65, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x20, 0x3d, 0x20, + 0x6c, 0x61, 0x73, 0x74, 0x55, 0x73, 0x65, 0x64, 0x54, 0x65, 0x6d, 0x70, + 0x6c, 0x61, 0x74, 0x65, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x65, 0x6c, 0x73, 0x65, 0x20, + 0x7b, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, + 0x6f, 0x6e, 0x73, 0x6f, 0x6c, 0x65, 0x2e, 0x6c, 0x6f, 0x67, 0x28, 0x27, + 0x4e, 0x6f, 0x20, 0x61, 0x75, 0x74, 0x6f, 0x73, 0x61, 0x76, 0x65, 0x64, + 0x20, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x20, 0x66, 0x6f, + 0x75, 0x6e, 0x64, 0x2c, 0x20, 0x75, 0x73, 0x69, 0x6e, 0x67, 0x20, 0x64, + 0x65, 0x66, 0x61, 0x75, 0x6c, 0x74, 0x20, 0x74, 0x65, 0x6d, 0x70, 0x6c, + 0x61, 0x74, 0x65, 0x27, 0x29, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x2f, 0x2f, 0x20, 0x6e, 0x6f, 0x20, 0x61, 0x75, 0x74, 0x6f, + 0x73, 0x61, 0x76, 0x65, 0x64, 0x20, 0x6c, 0x61, 0x73, 0x74, 0x20, 0x75, + 0x73, 0x65, 0x64, 0x20, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, + 0x20, 0x77, 0x61, 0x73, 0x20, 0x66, 0x6f, 0x75, 0x6e, 0x64, 0x2c, 0x20, + 0x73, 0x6f, 0x20, 0x6c, 0x6f, 0x61, 0x64, 0x20, 0x66, 0x72, 0x6f, 0x6d, + 0x20, 0x64, 0x65, 0x66, 0x61, 0x75, 0x6c, 0x74, 0x2e, 0x0a, 0x0a, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x75, 0x73, 0x65, 0x72, 0x54, + 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x52, 0x65, 0x73, 0x65, 0x74, + 0x54, 0x6f, 0x44, 0x65, 0x66, 0x61, 0x75, 0x6c, 0x74, 0x28, 0x29, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x0a, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x6f, 0x6c, 0x65, 0x2e, 0x6c, + 0x6f, 0x67, 0x28, 0x27, 0x41, 0x70, 0x70, 0x6c, 0x79, 0x69, 0x6e, 0x67, + 0x20, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x27, 0x29, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x2f, 0x2f, 0x20, 0x61, 0x6e, 0x64, + 0x20, 0x75, 0x70, 0x64, 0x61, 0x74, 0x65, 0x20, 0x69, 0x6e, 0x74, 0x65, + 0x72, 0x6e, 0x61, 0x6c, 0x20, 0x64, 0x61, 0x74, 0x61, 0x20, 0x66, 0x72, + 0x6f, 0x6d, 0x20, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x73, + 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x75, 0x73, 0x65, 0x72, + 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x41, 0x70, 0x70, 0x6c, + 0x79, 0x28, 0x73, 0x65, 0x6c, 0x65, 0x63, 0x74, 0x65, 0x64, 0x55, 0x73, + 0x65, 0x72, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x2e, 0x76, + 0x61, 0x6c, 0x75, 0x65, 0x29, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x2f, 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x6f, + 0x6c, 0x65, 0x2e, 0x6c, 0x6f, 0x67, 0x28, 0x73, 0x61, 0x76, 0x65, 0x64, 0x55, 0x73, 0x65, 0x72, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, - 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x29, 0x0a, 0x20, 0x20, 0x20, 0x20, - 0x7d, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x2f, 0x2f, 0x63, 0x6f, 0x6e, - 0x73, 0x6f, 0x6c, 0x65, 0x2e, 0x6c, 0x6f, 0x67, 0x28, 0x73, 0x61, 0x76, - 0x65, 0x64, 0x55, 0x73, 0x65, 0x72, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, - 0x74, 0x65, 0x73, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x29, 0x0a, 0x20, - 0x20, 0x20, 0x20, 0x2f, 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x6f, 0x6c, 0x65, - 0x2e, 0x6c, 0x6f, 0x67, 0x28, 0x73, 0x65, 0x6c, 0x65, 0x63, 0x74, 0x65, - 0x64, 0x55, 0x73, 0x65, 0x72, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, - 0x65, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x29, 0x0a, 0x0a, 0x20, 0x20, - 0x20, 0x20, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x75, - 0x73, 0x65, 0x72, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x41, - 0x75, 0x74, 0x6f, 0x73, 0x61, 0x76, 0x65, 0x28, 0x29, 0x20, 0x7b, 0x0a, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x6f, 0x6c, - 0x65, 0x2e, 0x6c, 0x6f, 0x67, 0x28, 0x27, 0x54, 0x65, 0x6d, 0x70, 0x6c, - 0x61, 0x74, 0x65, 0x20, 0x41, 0x75, 0x74, 0x6f, 0x73, 0x61, 0x76, 0x65, - 0x2e, 0x2e, 0x2e, 0x27, 0x29, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x69, 0x66, 0x20, 0x28, 0x73, 0x65, 0x6c, 0x65, 0x63, 0x74, 0x65, 0x64, - 0x55, 0x73, 0x65, 0x72, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, - 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x6e, 0x61, 0x6d, 0x65, 0x20, - 0x3d, 0x3d, 0x20, 0x27, 0x64, 0x65, 0x66, 0x61, 0x75, 0x6c, 0x74, 0x27, - 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x2f, 0x2f, 0x20, 0x77, 0x65, 0x20, 0x64, 0x6f, 0x6e, 0x27, 0x74, 0x20, - 0x77, 0x61, 0x6e, 0x74, 0x20, 0x74, 0x6f, 0x20, 0x73, 0x61, 0x76, 0x65, - 0x20, 0x6f, 0x76, 0x65, 0x72, 0x20, 0x64, 0x65, 0x66, 0x61, 0x75, 0x6c, - 0x74, 0x20, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x2c, 0x20, - 0x73, 0x6f, 0x20, 0x6c, 0x65, 0x74, 0x27, 0x73, 0x20, 0x63, 0x72, 0x65, - 0x61, 0x74, 0x65, 0x20, 0x61, 0x20, 0x6e, 0x65, 0x77, 0x20, 0x6f, 0x6e, - 0x65, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x6c, 0x65, - 0x74, 0x20, 0x6e, 0x65, 0x77, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, - 0x65, 0x4e, 0x61, 0x6d, 0x65, 0x20, 0x3d, 0x20, 0x27, 0x55, 0x73, 0x65, - 0x72, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x2d, 0x27, 0x20, - 0x2b, 0x20, 0x44, 0x61, 0x74, 0x65, 0x2e, 0x6e, 0x6f, 0x77, 0x28, 0x29, - 0x2e, 0x74, 0x6f, 0x53, 0x74, 0x72, 0x69, 0x6e, 0x67, 0x28, 0x29, 0x0a, + 0x73, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x29, 0x0a, 0x20, 0x20, 0x20, + 0x20, 0x2f, 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x6f, 0x6c, 0x65, 0x2e, 0x6c, + 0x6f, 0x67, 0x28, 0x73, 0x65, 0x6c, 0x65, 0x63, 0x74, 0x65, 0x64, 0x55, + 0x73, 0x65, 0x72, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x2e, + 0x76, 0x61, 0x6c, 0x75, 0x65, 0x29, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x75, 0x73, 0x65, + 0x72, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x41, 0x75, 0x74, + 0x6f, 0x73, 0x61, 0x76, 0x65, 0x28, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x6f, 0x6c, 0x65, 0x2e, + 0x6c, 0x6f, 0x67, 0x28, 0x27, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, + 0x65, 0x20, 0x41, 0x75, 0x74, 0x6f, 0x73, 0x61, 0x76, 0x65, 0x2e, 0x2e, + 0x2e, 0x27, 0x29, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x69, 0x66, + 0x20, 0x28, 0x73, 0x65, 0x6c, 0x65, 0x63, 0x74, 0x65, 0x64, 0x55, 0x73, + 0x65, 0x72, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x2e, 0x76, + 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x6e, 0x61, 0x6d, 0x65, 0x20, 0x3d, 0x3d, + 0x20, 0x27, 0x64, 0x65, 0x66, 0x61, 0x75, 0x6c, 0x74, 0x27, 0x29, 0x20, + 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x2f, 0x2f, + 0x20, 0x77, 0x65, 0x20, 0x64, 0x6f, 0x6e, 0x27, 0x74, 0x20, 0x77, 0x61, + 0x6e, 0x74, 0x20, 0x74, 0x6f, 0x20, 0x73, 0x61, 0x76, 0x65, 0x20, 0x6f, + 0x76, 0x65, 0x72, 0x20, 0x64, 0x65, 0x66, 0x61, 0x75, 0x6c, 0x74, 0x20, + 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x2c, 0x20, 0x73, 0x6f, + 0x20, 0x6c, 0x65, 0x74, 0x27, 0x73, 0x20, 0x63, 0x72, 0x65, 0x61, 0x74, + 0x65, 0x20, 0x61, 0x20, 0x6e, 0x65, 0x77, 0x20, 0x6f, 0x6e, 0x65, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x6c, 0x65, 0x74, 0x20, - 0x6e, 0x65, 0x77, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x20, - 0x3d, 0x20, 0x7b, 0x20, 0x27, 0x6e, 0x61, 0x6d, 0x65, 0x27, 0x3a, 0x20, 0x6e, 0x65, 0x77, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x4e, + 0x61, 0x6d, 0x65, 0x20, 0x3d, 0x20, 0x27, 0x55, 0x73, 0x65, 0x72, 0x54, + 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x2d, 0x27, 0x20, 0x2b, 0x20, + 0x44, 0x61, 0x74, 0x65, 0x2e, 0x6e, 0x6f, 0x77, 0x28, 0x29, 0x2e, 0x74, + 0x6f, 0x53, 0x74, 0x72, 0x69, 0x6e, 0x67, 0x28, 0x29, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x6c, 0x65, 0x74, 0x20, 0x6e, 0x65, + 0x77, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x20, 0x3d, 0x20, + 0x7b, 0x20, 0x27, 0x6e, 0x61, 0x6d, 0x65, 0x27, 0x3a, 0x20, 0x6e, 0x65, + 0x77, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x4e, 0x61, 0x6d, + 0x65, 0x2c, 0x20, 0x27, 0x64, 0x61, 0x74, 0x61, 0x27, 0x3a, 0x20, 0x7b, + 0x20, 0x27, 0x73, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x27, 0x3a, 0x20, + 0x73, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x2e, 0x76, 0x61, 0x6c, 0x75, + 0x65, 0x2c, 0x20, 0x27, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x27, 0x3a, + 0x20, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x2e, 0x76, 0x61, 0x6c, 0x75, + 0x65, 0x20, 0x7d, 0x20, 0x7d, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x6f, 0x6c, 0x65, 0x2e, 0x6c, + 0x6f, 0x67, 0x28, 0x27, 0x53, 0x61, 0x76, 0x69, 0x6e, 0x67, 0x20, 0x61, + 0x73, 0x20, 0x27, 0x20, 0x2b, 0x20, 0x6e, 0x65, 0x77, 0x54, 0x65, 0x6d, + 0x70, 0x6c, 0x61, 0x74, 0x65, 0x4e, 0x61, 0x6d, 0x65, 0x29, 0x0a, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x2f, 0x2f, 0x20, 0x73, + 0x61, 0x76, 0x65, 0x20, 0x69, 0x6e, 0x20, 0x74, 0x68, 0x65, 0x20, 0x61, + 0x75, 0x74, 0x6f, 0x73, 0x61, 0x76, 0x65, 0x20, 0x73, 0x6c, 0x6f, 0x74, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x6c, 0x6f, 0x63, + 0x61, 0x6c, 0x5f, 0x73, 0x74, 0x6f, 0x72, 0x61, 0x67, 0x65, 0x5f, 0x73, + 0x65, 0x74, 0x44, 0x61, 0x74, 0x61, 0x46, 0x72, 0x6f, 0x6d, 0x4f, 0x62, + 0x6a, 0x65, 0x63, 0x74, 0x28, 0x27, 0x75, 0x73, 0x65, 0x72, 0x5f, 0x74, + 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x73, 0x5f, 0x6c, 0x61, 0x73, + 0x74, 0x27, 0x2c, 0x20, 0x6e, 0x65, 0x77, 0x54, 0x65, 0x6d, 0x70, 0x6c, + 0x61, 0x74, 0x65, 0x29, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x2f, 0x2f, 0x20, 0x61, 0x6e, 0x64, 0x20, 0x6c, 0x6f, 0x61, + 0x64, 0x20, 0x69, 0x74, 0x20, 0x62, 0x61, 0x63, 0x6b, 0x20, 0x61, 0x6e, + 0x64, 0x20, 0x61, 0x70, 0x70, 0x6c, 0x79, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x75, 0x73, 0x65, 0x72, 0x54, 0x65, 0x6d, 0x70, + 0x6c, 0x61, 0x74, 0x65, 0x4c, 0x6f, 0x61, 0x64, 0x41, 0x6e, 0x64, 0x41, + 0x70, 0x70, 0x6c, 0x79, 0x41, 0x75, 0x74, 0x6f, 0x73, 0x61, 0x76, 0x65, + 0x64, 0x28, 0x29, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x20, + 0x65, 0x6c, 0x73, 0x65, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x6c, 0x6f, 0x63, 0x61, 0x6c, 0x5f, 0x73, 0x74, 0x6f, + 0x72, 0x61, 0x67, 0x65, 0x5f, 0x73, 0x65, 0x74, 0x44, 0x61, 0x74, 0x61, + 0x46, 0x72, 0x6f, 0x6d, 0x4f, 0x62, 0x6a, 0x65, 0x63, 0x74, 0x28, 0x27, + 0x75, 0x73, 0x65, 0x72, 0x5f, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, + 0x65, 0x73, 0x5f, 0x6c, 0x61, 0x73, 0x74, 0x27, 0x2c, 0x20, 0x7b, 0x20, + 0x27, 0x6e, 0x61, 0x6d, 0x65, 0x27, 0x3a, 0x20, 0x73, 0x65, 0x6c, 0x65, + 0x63, 0x74, 0x65, 0x64, 0x55, 0x73, 0x65, 0x72, 0x54, 0x65, 0x6d, 0x70, + 0x6c, 0x61, 0x74, 0x65, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x6e, 0x61, 0x6d, 0x65, 0x2c, 0x20, 0x27, 0x64, 0x61, 0x74, 0x61, 0x27, 0x3a, 0x20, 0x7b, 0x20, 0x27, 0x73, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x27, 0x3a, 0x20, 0x73, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2c, 0x20, 0x27, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x27, 0x3a, 0x20, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x2e, 0x76, 0x61, - 0x6c, 0x75, 0x65, 0x20, 0x7d, 0x20, 0x7d, 0x0a, 0x0a, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x6f, 0x6c, 0x65, - 0x2e, 0x6c, 0x6f, 0x67, 0x28, 0x27, 0x53, 0x61, 0x76, 0x69, 0x6e, 0x67, - 0x20, 0x61, 0x73, 0x20, 0x27, 0x20, 0x2b, 0x20, 0x6e, 0x65, 0x77, 0x54, - 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x4e, 0x61, 0x6d, 0x65, 0x29, - 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x2f, 0x2f, - 0x20, 0x73, 0x61, 0x76, 0x65, 0x20, 0x69, 0x6e, 0x20, 0x74, 0x68, 0x65, - 0x20, 0x61, 0x75, 0x74, 0x6f, 0x73, 0x61, 0x76, 0x65, 0x20, 0x73, 0x6c, - 0x6f, 0x74, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x6c, - 0x6f, 0x63, 0x61, 0x6c, 0x5f, 0x73, 0x74, 0x6f, 0x72, 0x61, 0x67, 0x65, - 0x5f, 0x73, 0x65, 0x74, 0x44, 0x61, 0x74, 0x61, 0x46, 0x72, 0x6f, 0x6d, - 0x4f, 0x62, 0x6a, 0x65, 0x63, 0x74, 0x28, 0x27, 0x75, 0x73, 0x65, 0x72, - 0x5f, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x73, 0x5f, 0x6c, - 0x61, 0x73, 0x74, 0x27, 0x2c, 0x20, 0x6e, 0x65, 0x77, 0x54, 0x65, 0x6d, - 0x70, 0x6c, 0x61, 0x74, 0x65, 0x29, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x2f, 0x2f, 0x20, 0x61, 0x6e, 0x64, 0x20, 0x6c, - 0x6f, 0x61, 0x64, 0x20, 0x69, 0x74, 0x20, 0x62, 0x61, 0x63, 0x6b, 0x20, - 0x61, 0x6e, 0x64, 0x20, 0x61, 0x70, 0x70, 0x6c, 0x79, 0x0a, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x75, 0x73, 0x65, 0x72, 0x54, 0x65, - 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x4c, 0x6f, 0x61, 0x64, 0x41, 0x6e, - 0x64, 0x41, 0x70, 0x70, 0x6c, 0x79, 0x41, 0x75, 0x74, 0x6f, 0x73, 0x61, - 0x76, 0x65, 0x64, 0x28, 0x29, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x7d, 0x20, 0x65, 0x6c, 0x73, 0x65, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x6c, 0x6f, 0x63, 0x61, 0x6c, 0x5f, 0x73, - 0x74, 0x6f, 0x72, 0x61, 0x67, 0x65, 0x5f, 0x73, 0x65, 0x74, 0x44, 0x61, - 0x74, 0x61, 0x46, 0x72, 0x6f, 0x6d, 0x4f, 0x62, 0x6a, 0x65, 0x63, 0x74, - 0x28, 0x27, 0x75, 0x73, 0x65, 0x72, 0x5f, 0x74, 0x65, 0x6d, 0x70, 0x6c, - 0x61, 0x74, 0x65, 0x73, 0x5f, 0x6c, 0x61, 0x73, 0x74, 0x27, 0x2c, 0x20, - 0x7b, 0x20, 0x27, 0x6e, 0x61, 0x6d, 0x65, 0x27, 0x3a, 0x20, 0x73, 0x65, - 0x6c, 0x65, 0x63, 0x74, 0x65, 0x64, 0x55, 0x73, 0x65, 0x72, 0x54, 0x65, - 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, - 0x2e, 0x6e, 0x61, 0x6d, 0x65, 0x2c, 0x20, 0x27, 0x64, 0x61, 0x74, 0x61, - 0x27, 0x3a, 0x20, 0x7b, 0x20, 0x27, 0x73, 0x65, 0x73, 0x73, 0x69, 0x6f, - 0x6e, 0x27, 0x3a, 0x20, 0x73, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x2e, - 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2c, 0x20, 0x27, 0x70, 0x61, 0x72, 0x61, - 0x6d, 0x73, 0x27, 0x3a, 0x20, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x2e, - 0x76, 0x61, 0x6c, 0x75, 0x65, 0x20, 0x7d, 0x20, 0x7d, 0x29, 0x0a, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x7d, - 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x6f, 0x6c, - 0x65, 0x2e, 0x6c, 0x6f, 0x67, 0x28, 0x27, 0x43, 0x68, 0x65, 0x63, 0x6b, - 0x69, 0x6e, 0x67, 0x20, 0x66, 0x6f, 0x72, 0x20, 0x61, 0x75, 0x74, 0x6f, - 0x73, 0x61, 0x76, 0x65, 0x64, 0x20, 0x6c, 0x61, 0x73, 0x74, 0x20, 0x75, - 0x73, 0x65, 0x64, 0x20, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, - 0x27, 0x29, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x75, 0x73, 0x65, 0x72, 0x54, - 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x4c, 0x6f, 0x61, 0x64, 0x41, - 0x6e, 0x64, 0x41, 0x70, 0x70, 0x6c, 0x79, 0x41, 0x75, 0x74, 0x6f, 0x73, - 0x61, 0x76, 0x65, 0x64, 0x28, 0x29, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, - 0x2f, 0x2a, 0x20, 0x45, 0x4e, 0x44, 0x3a, 0x20, 0x53, 0x75, 0x70, 0x70, - 0x6f, 0x72, 0x74, 0x20, 0x66, 0x6f, 0x72, 0x20, 0x73, 0x74, 0x6f, 0x72, - 0x69, 0x6e, 0x67, 0x20, 0x70, 0x72, 0x6f, 0x6d, 0x70, 0x74, 0x20, 0x74, - 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x73, 0x20, 0x61, 0x6e, 0x64, - 0x20, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, 0x73, 0x20, - 0x69, 0x6e, 0x20, 0x62, 0x72, 0x6f, 0x77, 0x73, 0x65, 0x72, 0x73, 0x20, - 0x4c, 0x6f, 0x63, 0x61, 0x6c, 0x53, 0x74, 0x6f, 0x72, 0x61, 0x67, 0x65, - 0x20, 0x2a, 0x2f, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, - 0x73, 0x74, 0x20, 0x6c, 0x6c, 0x61, 0x6d, 0x61, 0x53, 0x74, 0x61, 0x74, - 0x73, 0x20, 0x3d, 0x20, 0x73, 0x69, 0x67, 0x6e, 0x61, 0x6c, 0x28, 0x6e, - 0x75, 0x6c, 0x6c, 0x29, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, - 0x73, 0x74, 0x20, 0x63, 0x6f, 0x6e, 0x74, 0x72, 0x6f, 0x6c, 0x6c, 0x65, - 0x72, 0x20, 0x3d, 0x20, 0x73, 0x69, 0x67, 0x6e, 0x61, 0x6c, 0x28, 0x6e, - 0x75, 0x6c, 0x6c, 0x29, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x2f, 0x2f, - 0x20, 0x63, 0x75, 0x72, 0x72, 0x65, 0x6e, 0x74, 0x6c, 0x79, 0x20, 0x67, - 0x65, 0x6e, 0x65, 0x72, 0x61, 0x74, 0x69, 0x6e, 0x67, 0x20, 0x61, 0x20, - 0x63, 0x6f, 0x6d, 0x70, 0x6c, 0x65, 0x74, 0x69, 0x6f, 0x6e, 0x3f, 0x0a, - 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x67, 0x65, - 0x6e, 0x65, 0x72, 0x61, 0x74, 0x69, 0x6e, 0x67, 0x20, 0x3d, 0x20, 0x63, - 0x6f, 0x6d, 0x70, 0x75, 0x74, 0x65, 0x64, 0x28, 0x28, 0x29, 0x20, 0x3d, - 0x3e, 0x20, 0x63, 0x6f, 0x6e, 0x74, 0x72, 0x6f, 0x6c, 0x6c, 0x65, 0x72, - 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x20, 0x21, 0x3d, 0x20, 0x6e, 0x75, - 0x6c, 0x6c, 0x29, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x2f, 0x2f, 0x20, - 0x68, 0x61, 0x73, 0x20, 0x74, 0x68, 0x65, 0x20, 0x75, 0x73, 0x65, 0x72, - 0x20, 0x73, 0x74, 0x61, 0x72, 0x74, 0x65, 0x64, 0x20, 0x61, 0x20, 0x63, - 0x68, 0x61, 0x74, 0x3f, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, - 0x73, 0x74, 0x20, 0x63, 0x68, 0x61, 0x74, 0x53, 0x74, 0x61, 0x72, 0x74, - 0x65, 0x64, 0x20, 0x3d, 0x20, 0x63, 0x6f, 0x6d, 0x70, 0x75, 0x74, 0x65, - 0x64, 0x28, 0x28, 0x29, 0x20, 0x3d, 0x3e, 0x20, 0x73, 0x65, 0x73, 0x73, - 0x69, 0x6f, 0x6e, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x74, 0x72, - 0x61, 0x6e, 0x73, 0x63, 0x72, 0x69, 0x70, 0x74, 0x2e, 0x6c, 0x65, 0x6e, - 0x67, 0x74, 0x68, 0x20, 0x3e, 0x20, 0x30, 0x29, 0x0a, 0x0a, 0x20, 0x20, - 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x74, 0x72, 0x61, 0x6e, - 0x73, 0x63, 0x72, 0x69, 0x70, 0x74, 0x55, 0x70, 0x64, 0x61, 0x74, 0x65, - 0x20, 0x3d, 0x20, 0x28, 0x74, 0x72, 0x61, 0x6e, 0x73, 0x63, 0x72, 0x69, - 0x70, 0x74, 0x29, 0x20, 0x3d, 0x3e, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x73, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x2e, 0x76, - 0x61, 0x6c, 0x75, 0x65, 0x20, 0x3d, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x2e, 0x2e, 0x2e, 0x73, 0x65, 0x73, 0x73, - 0x69, 0x6f, 0x6e, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2c, 0x0a, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x74, 0x72, 0x61, 0x6e, 0x73, - 0x63, 0x72, 0x69, 0x70, 0x74, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x0a, 0x20, 0x20, 0x20, - 0x20, 0x2f, 0x2f, 0x20, 0x73, 0x69, 0x6d, 0x70, 0x6c, 0x65, 0x20, 0x74, - 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x20, 0x72, 0x65, 0x70, 0x6c, - 0x61, 0x63, 0x65, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, - 0x74, 0x20, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x20, 0x3d, - 0x20, 0x28, 0x73, 0x74, 0x72, 0x2c, 0x20, 0x65, 0x78, 0x74, 0x72, 0x61, - 0x53, 0x65, 0x74, 0x74, 0x69, 0x6e, 0x67, 0x73, 0x29, 0x20, 0x3d, 0x3e, - 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x6c, 0x65, 0x74, - 0x20, 0x73, 0x65, 0x74, 0x74, 0x69, 0x6e, 0x67, 0x73, 0x20, 0x3d, 0x20, - 0x73, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x2e, 0x76, 0x61, 0x6c, 0x75, - 0x65, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x69, 0x66, 0x20, - 0x28, 0x65, 0x78, 0x74, 0x72, 0x61, 0x53, 0x65, 0x74, 0x74, 0x69, 0x6e, - 0x67, 0x73, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x73, 0x65, 0x74, 0x74, 0x69, 0x6e, 0x67, 0x73, 0x20, 0x3d, - 0x20, 0x7b, 0x20, 0x2e, 0x2e, 0x2e, 0x73, 0x65, 0x74, 0x74, 0x69, 0x6e, - 0x67, 0x73, 0x2c, 0x20, 0x2e, 0x2e, 0x2e, 0x65, 0x78, 0x74, 0x72, 0x61, - 0x53, 0x65, 0x74, 0x74, 0x69, 0x6e, 0x67, 0x73, 0x20, 0x7d, 0x3b, 0x0a, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x53, 0x74, 0x72, - 0x69, 0x6e, 0x67, 0x28, 0x73, 0x74, 0x72, 0x29, 0x2e, 0x72, 0x65, 0x70, - 0x6c, 0x61, 0x63, 0x65, 0x41, 0x6c, 0x6c, 0x28, 0x2f, 0x5c, 0x7b, 0x5c, - 0x7b, 0x28, 0x2e, 0x2a, 0x3f, 0x29, 0x5c, 0x7d, 0x5c, 0x7d, 0x2f, 0x67, - 0x2c, 0x20, 0x28, 0x5f, 0x2c, 0x20, 0x6b, 0x65, 0x79, 0x29, 0x20, 0x3d, - 0x3e, 0x20, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x28, 0x73, - 0x65, 0x74, 0x74, 0x69, 0x6e, 0x67, 0x73, 0x5b, 0x6b, 0x65, 0x79, 0x5d, - 0x29, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x0a, 0x20, - 0x20, 0x20, 0x20, 0x61, 0x73, 0x79, 0x6e, 0x63, 0x20, 0x66, 0x75, 0x6e, - 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x72, 0x75, 0x6e, 0x4c, 0x6c, 0x61, - 0x6d, 0x61, 0x28, 0x70, 0x72, 0x6f, 0x6d, 0x70, 0x74, 0x2c, 0x20, 0x6c, - 0x6c, 0x61, 0x6d, 0x61, 0x50, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x2c, 0x20, - 0x63, 0x68, 0x61, 0x72, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x63, 0x75, 0x72, 0x72, - 0x65, 0x6e, 0x74, 0x4d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x73, 0x20, - 0x3d, 0x20, 0x5b, 0x5d, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x68, 0x69, 0x73, 0x74, 0x6f, 0x72, - 0x79, 0x20, 0x3d, 0x20, 0x73, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x2e, - 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x74, 0x72, 0x61, 0x6e, 0x73, 0x63, - 0x72, 0x69, 0x70, 0x74, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x69, 0x66, 0x20, 0x28, 0x63, 0x6f, 0x6e, 0x74, 0x72, 0x6f, 0x6c, 0x6c, - 0x65, 0x72, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x29, 0x20, 0x7b, 0x0a, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x74, 0x68, 0x72, 0x6f, - 0x77, 0x20, 0x6e, 0x65, 0x77, 0x20, 0x45, 0x72, 0x72, 0x6f, 0x72, 0x28, - 0x22, 0x61, 0x6c, 0x72, 0x65, 0x61, 0x64, 0x79, 0x20, 0x72, 0x75, 0x6e, - 0x6e, 0x69, 0x6e, 0x67, 0x22, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, - 0x6e, 0x74, 0x72, 0x6f, 0x6c, 0x6c, 0x65, 0x72, 0x2e, 0x76, 0x61, 0x6c, - 0x75, 0x65, 0x20, 0x3d, 0x20, 0x6e, 0x65, 0x77, 0x20, 0x41, 0x62, 0x6f, - 0x72, 0x74, 0x43, 0x6f, 0x6e, 0x74, 0x72, 0x6f, 0x6c, 0x6c, 0x65, 0x72, - 0x28, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x66, 0x6f, - 0x72, 0x20, 0x61, 0x77, 0x61, 0x69, 0x74, 0x20, 0x28, 0x63, 0x6f, 0x6e, - 0x73, 0x74, 0x20, 0x63, 0x68, 0x75, 0x6e, 0x6b, 0x20, 0x6f, 0x66, 0x20, - 0x6c, 0x6c, 0x61, 0x6d, 0x61, 0x28, 0x70, 0x72, 0x6f, 0x6d, 0x70, 0x74, - 0x2c, 0x20, 0x6c, 0x6c, 0x61, 0x6d, 0x61, 0x50, 0x61, 0x72, 0x61, 0x6d, - 0x73, 0x2c, 0x20, 0x7b, 0x20, 0x63, 0x6f, 0x6e, 0x74, 0x72, 0x6f, 0x6c, - 0x6c, 0x65, 0x72, 0x3a, 0x20, 0x63, 0x6f, 0x6e, 0x74, 0x72, 0x6f, 0x6c, - 0x6c, 0x65, 0x72, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x20, 0x7d, 0x29, - 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x64, 0x61, 0x74, 0x61, 0x20, 0x3d, - 0x20, 0x63, 0x68, 0x75, 0x6e, 0x6b, 0x2e, 0x64, 0x61, 0x74, 0x61, 0x3b, - 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x69, 0x66, - 0x20, 0x28, 0x64, 0x61, 0x74, 0x61, 0x2e, 0x73, 0x74, 0x6f, 0x70, 0x29, - 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x77, 0x68, 0x69, 0x6c, 0x65, 0x20, 0x28, 0x0a, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x75, 0x72, - 0x72, 0x65, 0x6e, 0x74, 0x4d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x73, - 0x2e, 0x6c, 0x65, 0x6e, 0x67, 0x74, 0x68, 0x20, 0x3e, 0x20, 0x30, 0x20, - 0x26, 0x26, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x63, 0x75, 0x72, 0x72, 0x65, 0x6e, 0x74, 0x4d, 0x65, - 0x73, 0x73, 0x61, 0x67, 0x65, 0x73, 0x5b, 0x63, 0x75, 0x72, 0x72, 0x65, - 0x6e, 0x74, 0x4d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x73, 0x2e, 0x6c, - 0x65, 0x6e, 0x67, 0x74, 0x68, 0x20, 0x2d, 0x20, 0x31, 0x5d, 0x2e, 0x63, - 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x2e, 0x6d, 0x61, 0x74, 0x63, 0x68, - 0x28, 0x2f, 0x5c, 0x6e, 0x24, 0x2f, 0x29, 0x20, 0x21, 0x3d, 0x20, 0x6e, - 0x75, 0x6c, 0x6c, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x75, 0x72, 0x72, 0x65, 0x6e, - 0x74, 0x4d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x73, 0x2e, 0x70, 0x6f, - 0x70, 0x28, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x74, 0x72, 0x61, 0x6e, 0x73, 0x63, 0x72, 0x69, 0x70, - 0x74, 0x55, 0x70, 0x64, 0x61, 0x74, 0x65, 0x28, 0x5b, 0x2e, 0x2e, 0x2e, - 0x68, 0x69, 0x73, 0x74, 0x6f, 0x72, 0x79, 0x2c, 0x20, 0x5b, 0x63, 0x68, - 0x61, 0x72, 0x2c, 0x20, 0x63, 0x75, 0x72, 0x72, 0x65, 0x6e, 0x74, 0x4d, - 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x73, 0x5d, 0x5d, 0x29, 0x0a, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, - 0x73, 0x6f, 0x6c, 0x65, 0x2e, 0x6c, 0x6f, 0x67, 0x28, 0x22, 0x43, 0x6f, - 0x6d, 0x70, 0x6c, 0x65, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x66, 0x69, 0x6e, - 0x69, 0x73, 0x68, 0x65, 0x64, 0x3a, 0x20, 0x27, 0x22, 0x2c, 0x20, 0x63, - 0x75, 0x72, 0x72, 0x65, 0x6e, 0x74, 0x4d, 0x65, 0x73, 0x73, 0x61, 0x67, - 0x65, 0x73, 0x2e, 0x6d, 0x61, 0x70, 0x28, 0x6d, 0x73, 0x67, 0x20, 0x3d, - 0x3e, 0x20, 0x6d, 0x73, 0x67, 0x2e, 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x6e, - 0x74, 0x29, 0x2e, 0x6a, 0x6f, 0x69, 0x6e, 0x28, 0x27, 0x27, 0x29, 0x2c, - 0x20, 0x22, 0x27, 0x2c, 0x20, 0x73, 0x75, 0x6d, 0x6d, 0x61, 0x72, 0x79, - 0x3a, 0x20, 0x22, 0x2c, 0x20, 0x64, 0x61, 0x74, 0x61, 0x29, 0x3b, 0x0a, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x20, 0x65, 0x6c, - 0x73, 0x65, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x63, 0x75, 0x72, 0x72, 0x65, 0x6e, 0x74, 0x4d, 0x65, - 0x73, 0x73, 0x61, 0x67, 0x65, 0x73, 0x2e, 0x70, 0x75, 0x73, 0x68, 0x28, - 0x64, 0x61, 0x74, 0x61, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x73, 0x6c, 0x6f, 0x74, 0x5f, 0x69, 0x64, - 0x20, 0x3d, 0x20, 0x64, 0x61, 0x74, 0x61, 0x2e, 0x73, 0x6c, 0x6f, 0x74, - 0x5f, 0x69, 0x64, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x69, 0x66, 0x20, 0x28, 0x73, 0x65, 0x6c, 0x65, 0x63, - 0x74, 0x65, 0x64, 0x5f, 0x69, 0x6d, 0x61, 0x67, 0x65, 0x20, 0x26, 0x26, - 0x20, 0x21, 0x64, 0x61, 0x74, 0x61, 0x2e, 0x6d, 0x75, 0x6c, 0x74, 0x69, - 0x6d, 0x6f, 0x64, 0x61, 0x6c, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x61, 0x6c, 0x65, - 0x72, 0x74, 0x28, 0x22, 0x54, 0x68, 0x65, 0x20, 0x73, 0x65, 0x72, 0x76, - 0x65, 0x72, 0x20, 0x77, 0x61, 0x73, 0x20, 0x6e, 0x6f, 0x74, 0x20, 0x63, - 0x6f, 0x6d, 0x70, 0x69, 0x6c, 0x65, 0x64, 0x20, 0x66, 0x6f, 0x72, 0x20, - 0x6d, 0x75, 0x6c, 0x74, 0x69, 0x6d, 0x6f, 0x64, 0x61, 0x6c, 0x20, 0x6f, - 0x72, 0x20, 0x74, 0x68, 0x65, 0x20, 0x6d, 0x6f, 0x64, 0x65, 0x6c, 0x20, - 0x70, 0x72, 0x6f, 0x6a, 0x65, 0x63, 0x74, 0x6f, 0x72, 0x20, 0x63, 0x61, - 0x6e, 0x27, 0x74, 0x20, 0x62, 0x65, 0x20, 0x6c, 0x6f, 0x61, 0x64, 0x65, - 0x64, 0x2e, 0x22, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, - 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x74, 0x72, 0x61, 0x6e, 0x73, 0x63, 0x72, 0x69, 0x70, 0x74, 0x55, 0x70, - 0x64, 0x61, 0x74, 0x65, 0x28, 0x5b, 0x2e, 0x2e, 0x2e, 0x68, 0x69, 0x73, - 0x74, 0x6f, 0x72, 0x79, 0x2c, 0x20, 0x5b, 0x63, 0x68, 0x61, 0x72, 0x2c, - 0x20, 0x63, 0x75, 0x72, 0x72, 0x65, 0x6e, 0x74, 0x4d, 0x65, 0x73, 0x73, - 0x61, 0x67, 0x65, 0x73, 0x5d, 0x5d, 0x29, 0x0a, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x69, 0x66, 0x20, 0x28, 0x64, 0x61, 0x74, 0x61, 0x2e, - 0x74, 0x69, 0x6d, 0x69, 0x6e, 0x67, 0x73, 0x29, 0x20, 0x7b, 0x0a, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x6c, 0x6c, 0x61, - 0x6d, 0x61, 0x53, 0x74, 0x61, 0x74, 0x73, 0x2e, 0x76, 0x61, 0x6c, 0x75, - 0x65, 0x20, 0x3d, 0x20, 0x64, 0x61, 0x74, 0x61, 0x2e, 0x74, 0x69, 0x6d, - 0x69, 0x6e, 0x67, 0x73, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, - 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x74, 0x72, - 0x6f, 0x6c, 0x6c, 0x65, 0x72, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x20, - 0x3d, 0x20, 0x6e, 0x75, 0x6c, 0x6c, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, - 0x7d, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x2f, 0x2f, 0x20, 0x73, 0x65, - 0x6e, 0x64, 0x20, 0x6d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x20, 0x74, - 0x6f, 0x20, 0x73, 0x65, 0x72, 0x76, 0x65, 0x72, 0x0a, 0x20, 0x20, 0x20, - 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x63, 0x68, 0x61, 0x74, 0x20, - 0x3d, 0x20, 0x61, 0x73, 0x79, 0x6e, 0x63, 0x20, 0x28, 0x6d, 0x73, 0x67, + 0x6c, 0x75, 0x65, 0x20, 0x7d, 0x20, 0x7d, 0x29, 0x0a, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x6f, 0x6c, 0x65, 0x2e, + 0x6c, 0x6f, 0x67, 0x28, 0x27, 0x43, 0x68, 0x65, 0x63, 0x6b, 0x69, 0x6e, + 0x67, 0x20, 0x66, 0x6f, 0x72, 0x20, 0x61, 0x75, 0x74, 0x6f, 0x73, 0x61, + 0x76, 0x65, 0x64, 0x20, 0x6c, 0x61, 0x73, 0x74, 0x20, 0x75, 0x73, 0x65, + 0x64, 0x20, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x27, 0x29, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x75, 0x73, 0x65, 0x72, 0x54, 0x65, 0x6d, + 0x70, 0x6c, 0x61, 0x74, 0x65, 0x4c, 0x6f, 0x61, 0x64, 0x41, 0x6e, 0x64, + 0x41, 0x70, 0x70, 0x6c, 0x79, 0x41, 0x75, 0x74, 0x6f, 0x73, 0x61, 0x76, + 0x65, 0x64, 0x28, 0x29, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x2f, 0x2a, + 0x20, 0x45, 0x4e, 0x44, 0x3a, 0x20, 0x53, 0x75, 0x70, 0x70, 0x6f, 0x72, + 0x74, 0x20, 0x66, 0x6f, 0x72, 0x20, 0x73, 0x74, 0x6f, 0x72, 0x69, 0x6e, + 0x67, 0x20, 0x70, 0x72, 0x6f, 0x6d, 0x70, 0x74, 0x20, 0x74, 0x65, 0x6d, + 0x70, 0x6c, 0x61, 0x74, 0x65, 0x73, 0x20, 0x61, 0x6e, 0x64, 0x20, 0x70, + 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, 0x73, 0x20, 0x69, 0x6e, + 0x20, 0x62, 0x72, 0x6f, 0x77, 0x73, 0x65, 0x72, 0x73, 0x20, 0x4c, 0x6f, + 0x63, 0x61, 0x6c, 0x53, 0x74, 0x6f, 0x72, 0x61, 0x67, 0x65, 0x20, 0x2a, + 0x2f, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, + 0x20, 0x6c, 0x6c, 0x61, 0x6d, 0x61, 0x53, 0x74, 0x61, 0x74, 0x73, 0x20, + 0x3d, 0x20, 0x73, 0x69, 0x67, 0x6e, 0x61, 0x6c, 0x28, 0x6e, 0x75, 0x6c, + 0x6c, 0x29, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, + 0x20, 0x63, 0x6f, 0x6e, 0x74, 0x72, 0x6f, 0x6c, 0x6c, 0x65, 0x72, 0x20, + 0x3d, 0x20, 0x73, 0x69, 0x67, 0x6e, 0x61, 0x6c, 0x28, 0x6e, 0x75, 0x6c, + 0x6c, 0x29, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x2f, 0x2f, 0x20, 0x63, + 0x75, 0x72, 0x72, 0x65, 0x6e, 0x74, 0x6c, 0x79, 0x20, 0x67, 0x65, 0x6e, + 0x65, 0x72, 0x61, 0x74, 0x69, 0x6e, 0x67, 0x20, 0x61, 0x20, 0x63, 0x6f, + 0x6d, 0x70, 0x6c, 0x65, 0x74, 0x69, 0x6f, 0x6e, 0x3f, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x67, 0x65, 0x6e, 0x65, + 0x72, 0x61, 0x74, 0x69, 0x6e, 0x67, 0x20, 0x3d, 0x20, 0x63, 0x6f, 0x6d, + 0x70, 0x75, 0x74, 0x65, 0x64, 0x28, 0x28, 0x29, 0x20, 0x3d, 0x3e, 0x20, + 0x63, 0x6f, 0x6e, 0x74, 0x72, 0x6f, 0x6c, 0x6c, 0x65, 0x72, 0x2e, 0x76, + 0x61, 0x6c, 0x75, 0x65, 0x20, 0x21, 0x3d, 0x20, 0x6e, 0x75, 0x6c, 0x6c, + 0x29, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x2f, 0x2f, 0x20, 0x68, 0x61, + 0x73, 0x20, 0x74, 0x68, 0x65, 0x20, 0x75, 0x73, 0x65, 0x72, 0x20, 0x73, + 0x74, 0x61, 0x72, 0x74, 0x65, 0x64, 0x20, 0x61, 0x20, 0x63, 0x68, 0x61, + 0x74, 0x3f, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, + 0x20, 0x63, 0x68, 0x61, 0x74, 0x53, 0x74, 0x61, 0x72, 0x74, 0x65, 0x64, + 0x20, 0x3d, 0x20, 0x63, 0x6f, 0x6d, 0x70, 0x75, 0x74, 0x65, 0x64, 0x28, + 0x28, 0x29, 0x20, 0x3d, 0x3e, 0x20, 0x73, 0x65, 0x73, 0x73, 0x69, 0x6f, + 0x6e, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x74, 0x72, 0x61, 0x6e, + 0x73, 0x63, 0x72, 0x69, 0x70, 0x74, 0x2e, 0x6c, 0x65, 0x6e, 0x67, 0x74, + 0x68, 0x20, 0x3e, 0x20, 0x30, 0x29, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x74, 0x72, 0x61, 0x6e, 0x73, 0x63, + 0x72, 0x69, 0x70, 0x74, 0x55, 0x70, 0x64, 0x61, 0x74, 0x65, 0x20, 0x3d, + 0x20, 0x28, 0x74, 0x72, 0x61, 0x6e, 0x73, 0x63, 0x72, 0x69, 0x70, 0x74, 0x29, 0x20, 0x3d, 0x3e, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x69, 0x66, 0x20, 0x28, 0x63, 0x6f, 0x6e, 0x74, 0x72, 0x6f, 0x6c, - 0x6c, 0x65, 0x72, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x29, 0x20, 0x7b, - 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, - 0x73, 0x6f, 0x6c, 0x65, 0x2e, 0x6c, 0x6f, 0x67, 0x28, 0x27, 0x61, 0x6c, - 0x72, 0x65, 0x61, 0x64, 0x79, 0x20, 0x72, 0x75, 0x6e, 0x6e, 0x69, 0x6e, - 0x67, 0x2e, 0x2e, 0x2e, 0x27, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x3b, 0x0a, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x0a, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x74, 0x72, 0x61, 0x6e, 0x73, 0x63, 0x72, 0x69, 0x70, - 0x74, 0x55, 0x70, 0x64, 0x61, 0x74, 0x65, 0x28, 0x5b, 0x2e, 0x2e, 0x2e, - 0x73, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x2e, 0x76, 0x61, 0x6c, 0x75, - 0x65, 0x2e, 0x74, 0x72, 0x61, 0x6e, 0x73, 0x63, 0x72, 0x69, 0x70, 0x74, - 0x2c, 0x20, 0x5b, 0x22, 0x7b, 0x7b, 0x75, 0x73, 0x65, 0x72, 0x7d, 0x7d, - 0x22, 0x2c, 0x20, 0x6d, 0x73, 0x67, 0x5d, 0x5d, 0x29, 0x0a, 0x0a, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x6c, 0x65, 0x74, 0x20, 0x70, 0x72, 0x6f, - 0x6d, 0x70, 0x74, 0x20, 0x3d, 0x20, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, - 0x74, 0x65, 0x28, 0x73, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x2e, 0x76, - 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, - 0x65, 0x2c, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x6d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x3a, 0x20, 0x6d, 0x73, - 0x67, 0x2c, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x68, - 0x69, 0x73, 0x74, 0x6f, 0x72, 0x79, 0x3a, 0x20, 0x73, 0x65, 0x73, 0x73, - 0x69, 0x6f, 0x6e, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x74, 0x72, - 0x61, 0x6e, 0x73, 0x63, 0x72, 0x69, 0x70, 0x74, 0x2e, 0x66, 0x6c, 0x61, - 0x74, 0x4d, 0x61, 0x70, 0x28, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x28, 0x5b, 0x6e, 0x61, 0x6d, 0x65, 0x2c, 0x20, - 0x64, 0x61, 0x74, 0x61, 0x5d, 0x29, 0x20, 0x3d, 0x3e, 0x0a, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x74, 0x65, - 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x28, 0x0a, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x73, 0x65, - 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2e, - 0x68, 0x69, 0x73, 0x74, 0x6f, 0x72, 0x79, 0x54, 0x65, 0x6d, 0x70, 0x6c, - 0x61, 0x74, 0x65, 0x2c, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x6e, 0x61, 0x6d, 0x65, 0x2c, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x6d, - 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x3a, 0x20, 0x41, 0x72, 0x72, 0x61, - 0x79, 0x2e, 0x69, 0x73, 0x41, 0x72, 0x72, 0x61, 0x79, 0x28, 0x64, 0x61, - 0x74, 0x61, 0x29, 0x20, 0x3f, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x64, 0x61, 0x74, 0x61, 0x2e, 0x6d, 0x61, 0x70, 0x28, 0x6d, 0x73, 0x67, - 0x20, 0x3d, 0x3e, 0x20, 0x6d, 0x73, 0x67, 0x2e, 0x63, 0x6f, 0x6e, 0x74, - 0x65, 0x6e, 0x74, 0x29, 0x2e, 0x6a, 0x6f, 0x69, 0x6e, 0x28, 0x27, 0x27, - 0x29, 0x2e, 0x72, 0x65, 0x70, 0x6c, 0x61, 0x63, 0x65, 0x28, 0x2f, 0x5e, - 0x5c, 0x73, 0x2f, 0x2c, 0x20, 0x27, 0x27, 0x29, 0x20, 0x3a, 0x0a, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x64, 0x61, 0x74, 0x61, 0x2c, 0x0a, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x29, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x29, 0x2e, 0x6a, 0x6f, 0x69, 0x6e, 0x28, 0x22, 0x5c, 0x6e, 0x22, - 0x29, 0x2c, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x29, 0x3b, - 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x69, 0x66, 0x20, 0x28, 0x73, - 0x65, 0x6c, 0x65, 0x63, 0x74, 0x65, 0x64, 0x5f, 0x69, 0x6d, 0x61, 0x67, - 0x65, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x70, 0x72, 0x6f, 0x6d, 0x70, 0x74, 0x20, 0x3d, 0x20, 0x60, 0x41, - 0x20, 0x63, 0x68, 0x61, 0x74, 0x20, 0x62, 0x65, 0x74, 0x77, 0x65, 0x65, - 0x6e, 0x20, 0x61, 0x20, 0x63, 0x75, 0x72, 0x69, 0x6f, 0x75, 0x73, 0x20, - 0x68, 0x75, 0x6d, 0x61, 0x6e, 0x20, 0x61, 0x6e, 0x64, 0x20, 0x61, 0x6e, - 0x20, 0x61, 0x72, 0x74, 0x69, 0x66, 0x69, 0x63, 0x69, 0x61, 0x6c, 0x20, - 0x69, 0x6e, 0x74, 0x65, 0x6c, 0x6c, 0x69, 0x67, 0x65, 0x6e, 0x63, 0x65, - 0x20, 0x61, 0x73, 0x73, 0x69, 0x73, 0x74, 0x61, 0x6e, 0x74, 0x2e, 0x20, - 0x54, 0x68, 0x65, 0x20, 0x61, 0x73, 0x73, 0x69, 0x73, 0x74, 0x61, 0x6e, - 0x74, 0x20, 0x67, 0x69, 0x76, 0x65, 0x73, 0x20, 0x68, 0x65, 0x6c, 0x70, - 0x66, 0x75, 0x6c, 0x2c, 0x20, 0x64, 0x65, 0x74, 0x61, 0x69, 0x6c, 0x65, - 0x64, 0x2c, 0x20, 0x61, 0x6e, 0x64, 0x20, 0x70, 0x6f, 0x6c, 0x69, 0x74, - 0x65, 0x20, 0x61, 0x6e, 0x73, 0x77, 0x65, 0x72, 0x73, 0x20, 0x74, 0x6f, - 0x20, 0x74, 0x68, 0x65, 0x20, 0x68, 0x75, 0x6d, 0x61, 0x6e, 0x27, 0x73, - 0x20, 0x71, 0x75, 0x65, 0x73, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x2e, 0x5c, - 0x6e, 0x55, 0x53, 0x45, 0x52, 0x3a, 0x5b, 0x69, 0x6d, 0x67, 0x2d, 0x31, - 0x30, 0x5d, 0x24, 0x7b, 0x6d, 0x73, 0x67, 0x7d, 0x5c, 0x6e, 0x41, 0x53, - 0x53, 0x49, 0x53, 0x54, 0x41, 0x4e, 0x54, 0x3a, 0x60, 0x3b, 0x0a, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x61, 0x77, 0x61, 0x69, 0x74, 0x20, 0x72, 0x75, 0x6e, 0x4c, 0x6c, + 0x20, 0x73, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x2e, 0x76, 0x61, 0x6c, + 0x75, 0x65, 0x20, 0x3d, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x2e, 0x2e, 0x2e, 0x73, 0x65, 0x73, 0x73, 0x69, 0x6f, + 0x6e, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2c, 0x0a, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x74, 0x72, 0x61, 0x6e, 0x73, 0x63, 0x72, + 0x69, 0x70, 0x74, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x2f, + 0x2f, 0x20, 0x73, 0x69, 0x6d, 0x70, 0x6c, 0x65, 0x20, 0x74, 0x65, 0x6d, + 0x70, 0x6c, 0x61, 0x74, 0x65, 0x20, 0x72, 0x65, 0x70, 0x6c, 0x61, 0x63, + 0x65, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, + 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x20, 0x3d, 0x20, 0x28, + 0x73, 0x74, 0x72, 0x2c, 0x20, 0x65, 0x78, 0x74, 0x72, 0x61, 0x53, 0x65, + 0x74, 0x74, 0x69, 0x6e, 0x67, 0x73, 0x29, 0x20, 0x3d, 0x3e, 0x20, 0x7b, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x6c, 0x65, 0x74, 0x20, 0x73, + 0x65, 0x74, 0x74, 0x69, 0x6e, 0x67, 0x73, 0x20, 0x3d, 0x20, 0x73, 0x65, + 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3b, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x69, 0x66, 0x20, 0x28, 0x65, + 0x78, 0x74, 0x72, 0x61, 0x53, 0x65, 0x74, 0x74, 0x69, 0x6e, 0x67, 0x73, + 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x73, 0x65, 0x74, 0x74, 0x69, 0x6e, 0x67, 0x73, 0x20, 0x3d, 0x20, 0x7b, + 0x20, 0x2e, 0x2e, 0x2e, 0x73, 0x65, 0x74, 0x74, 0x69, 0x6e, 0x67, 0x73, + 0x2c, 0x20, 0x2e, 0x2e, 0x2e, 0x65, 0x78, 0x74, 0x72, 0x61, 0x53, 0x65, + 0x74, 0x74, 0x69, 0x6e, 0x67, 0x73, 0x20, 0x7d, 0x3b, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x53, 0x74, 0x72, 0x69, 0x6e, + 0x67, 0x28, 0x73, 0x74, 0x72, 0x29, 0x2e, 0x72, 0x65, 0x70, 0x6c, 0x61, + 0x63, 0x65, 0x41, 0x6c, 0x6c, 0x28, 0x2f, 0x5c, 0x7b, 0x5c, 0x7b, 0x28, + 0x2e, 0x2a, 0x3f, 0x29, 0x5c, 0x7d, 0x5c, 0x7d, 0x2f, 0x67, 0x2c, 0x20, + 0x28, 0x5f, 0x2c, 0x20, 0x6b, 0x65, 0x79, 0x29, 0x20, 0x3d, 0x3e, 0x20, + 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x28, 0x73, 0x65, 0x74, + 0x74, 0x69, 0x6e, 0x67, 0x73, 0x5b, 0x6b, 0x65, 0x79, 0x5d, 0x29, 0x29, + 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x0a, 0x20, 0x20, 0x20, + 0x20, 0x61, 0x73, 0x79, 0x6e, 0x63, 0x20, 0x66, 0x75, 0x6e, 0x63, 0x74, + 0x69, 0x6f, 0x6e, 0x20, 0x72, 0x75, 0x6e, 0x4c, 0x6c, 0x61, 0x6d, 0x61, + 0x28, 0x70, 0x72, 0x6f, 0x6d, 0x70, 0x74, 0x2c, 0x20, 0x6c, 0x6c, 0x61, + 0x6d, 0x61, 0x50, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x2c, 0x20, 0x63, 0x68, + 0x61, 0x72, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x63, 0x75, 0x72, 0x72, 0x65, 0x6e, + 0x74, 0x4d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x73, 0x20, 0x3d, 0x20, + 0x5b, 0x5d, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, + 0x6e, 0x73, 0x74, 0x20, 0x68, 0x69, 0x73, 0x74, 0x6f, 0x72, 0x79, 0x20, + 0x3d, 0x20, 0x73, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x2e, 0x76, 0x61, + 0x6c, 0x75, 0x65, 0x2e, 0x74, 0x72, 0x61, 0x6e, 0x73, 0x63, 0x72, 0x69, + 0x70, 0x74, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x69, 0x66, + 0x20, 0x28, 0x63, 0x6f, 0x6e, 0x74, 0x72, 0x6f, 0x6c, 0x6c, 0x65, 0x72, + 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x74, 0x68, 0x72, 0x6f, 0x77, 0x20, + 0x6e, 0x65, 0x77, 0x20, 0x45, 0x72, 0x72, 0x6f, 0x72, 0x28, 0x22, 0x61, + 0x6c, 0x72, 0x65, 0x61, 0x64, 0x79, 0x20, 0x72, 0x75, 0x6e, 0x6e, 0x69, + 0x6e, 0x67, 0x22, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x74, + 0x72, 0x6f, 0x6c, 0x6c, 0x65, 0x72, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, + 0x20, 0x3d, 0x20, 0x6e, 0x65, 0x77, 0x20, 0x41, 0x62, 0x6f, 0x72, 0x74, + 0x43, 0x6f, 0x6e, 0x74, 0x72, 0x6f, 0x6c, 0x6c, 0x65, 0x72, 0x28, 0x29, + 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x66, 0x6f, 0x72, 0x20, + 0x61, 0x77, 0x61, 0x69, 0x74, 0x20, 0x28, 0x63, 0x6f, 0x6e, 0x73, 0x74, + 0x20, 0x63, 0x68, 0x75, 0x6e, 0x6b, 0x20, 0x6f, 0x66, 0x20, 0x6c, 0x6c, 0x61, 0x6d, 0x61, 0x28, 0x70, 0x72, 0x6f, 0x6d, 0x70, 0x74, 0x2c, 0x20, - 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x2e, 0x2e, - 0x2e, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x2e, 0x76, 0x61, 0x6c, 0x75, - 0x65, 0x2c, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x73, - 0x6c, 0x6f, 0x74, 0x5f, 0x69, 0x64, 0x3a, 0x20, 0x73, 0x6c, 0x6f, 0x74, - 0x5f, 0x69, 0x64, 0x2c, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x73, 0x74, 0x6f, 0x70, 0x3a, 0x20, 0x5b, 0x22, 0x3c, 0x2f, 0x73, - 0x3e, 0x22, 0x2c, 0x20, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, - 0x28, 0x22, 0x7b, 0x7b, 0x63, 0x68, 0x61, 0x72, 0x7d, 0x7d, 0x3a, 0x22, - 0x29, 0x2c, 0x20, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x28, - 0x22, 0x7b, 0x7b, 0x75, 0x73, 0x65, 0x72, 0x7d, 0x7d, 0x3a, 0x22, 0x29, - 0x5d, 0x2c, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x2c, 0x20, - 0x22, 0x7b, 0x7b, 0x63, 0x68, 0x61, 0x72, 0x7d, 0x7d, 0x22, 0x29, 0x3b, - 0x0a, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, - 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x72, 0x75, 0x6e, 0x43, 0x6f, 0x6d, - 0x70, 0x6c, 0x65, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x3d, 0x20, 0x28, 0x29, - 0x20, 0x3d, 0x3e, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x69, 0x66, 0x20, 0x28, 0x63, 0x6f, 0x6e, 0x74, 0x72, 0x6f, 0x6c, 0x6c, - 0x65, 0x72, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x29, 0x20, 0x7b, 0x0a, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, - 0x6f, 0x6c, 0x65, 0x2e, 0x6c, 0x6f, 0x67, 0x28, 0x27, 0x61, 0x6c, 0x72, - 0x65, 0x61, 0x64, 0x79, 0x20, 0x72, 0x75, 0x6e, 0x6e, 0x69, 0x6e, 0x67, - 0x2e, 0x2e, 0x2e, 0x27, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x3b, 0x0a, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x7b, 0x20, 0x70, 0x72, 0x6f, - 0x6d, 0x70, 0x74, 0x20, 0x7d, 0x20, 0x3d, 0x20, 0x73, 0x65, 0x73, 0x73, - 0x69, 0x6f, 0x6e, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3b, 0x0a, 0x20, + 0x6c, 0x6c, 0x61, 0x6d, 0x61, 0x50, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x2c, + 0x20, 0x7b, 0x20, 0x63, 0x6f, 0x6e, 0x74, 0x72, 0x6f, 0x6c, 0x6c, 0x65, + 0x72, 0x3a, 0x20, 0x63, 0x6f, 0x6e, 0x74, 0x72, 0x6f, 0x6c, 0x6c, 0x65, + 0x72, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x20, 0x7d, 0x29, 0x29, 0x20, + 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, + 0x6e, 0x73, 0x74, 0x20, 0x64, 0x61, 0x74, 0x61, 0x20, 0x3d, 0x20, 0x63, + 0x68, 0x75, 0x6e, 0x6b, 0x2e, 0x64, 0x61, 0x74, 0x61, 0x3b, 0x0a, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x69, 0x66, 0x20, 0x28, + 0x64, 0x61, 0x74, 0x61, 0x2e, 0x73, 0x74, 0x6f, 0x70, 0x29, 0x20, 0x7b, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x77, + 0x68, 0x69, 0x6c, 0x65, 0x20, 0x28, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x75, 0x72, 0x72, 0x65, + 0x6e, 0x74, 0x4d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x73, 0x2e, 0x6c, + 0x65, 0x6e, 0x67, 0x74, 0x68, 0x20, 0x3e, 0x20, 0x30, 0x20, 0x26, 0x26, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x63, 0x75, 0x72, 0x72, 0x65, 0x6e, 0x74, 0x4d, 0x65, 0x73, 0x73, + 0x61, 0x67, 0x65, 0x73, 0x5b, 0x63, 0x75, 0x72, 0x72, 0x65, 0x6e, 0x74, + 0x4d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x73, 0x2e, 0x6c, 0x65, 0x6e, + 0x67, 0x74, 0x68, 0x20, 0x2d, 0x20, 0x31, 0x5d, 0x2e, 0x63, 0x6f, 0x6e, + 0x74, 0x65, 0x6e, 0x74, 0x2e, 0x6d, 0x61, 0x74, 0x63, 0x68, 0x28, 0x2f, + 0x5c, 0x6e, 0x24, 0x2f, 0x29, 0x20, 0x21, 0x3d, 0x20, 0x6e, 0x75, 0x6c, + 0x6c, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x63, 0x75, 0x72, 0x72, 0x65, 0x6e, 0x74, 0x4d, + 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x73, 0x2e, 0x70, 0x6f, 0x70, 0x28, + 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x74, 0x72, 0x61, 0x6e, 0x73, 0x63, 0x72, 0x69, 0x70, 0x74, 0x55, + 0x70, 0x64, 0x61, 0x74, 0x65, 0x28, 0x5b, 0x2e, 0x2e, 0x2e, 0x68, 0x69, + 0x73, 0x74, 0x6f, 0x72, 0x79, 0x2c, 0x20, 0x5b, 0x63, 0x68, 0x61, 0x72, + 0x2c, 0x20, 0x63, 0x75, 0x72, 0x72, 0x65, 0x6e, 0x74, 0x4d, 0x65, 0x73, + 0x73, 0x61, 0x67, 0x65, 0x73, 0x5d, 0x5d, 0x29, 0x0a, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x6f, + 0x6c, 0x65, 0x2e, 0x6c, 0x6f, 0x67, 0x28, 0x22, 0x43, 0x6f, 0x6d, 0x70, + 0x6c, 0x65, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x66, 0x69, 0x6e, 0x69, 0x73, + 0x68, 0x65, 0x64, 0x3a, 0x20, 0x27, 0x22, 0x2c, 0x20, 0x63, 0x75, 0x72, + 0x72, 0x65, 0x6e, 0x74, 0x4d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x73, + 0x2e, 0x6d, 0x61, 0x70, 0x28, 0x6d, 0x73, 0x67, 0x20, 0x3d, 0x3e, 0x20, + 0x6d, 0x73, 0x67, 0x2e, 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x29, + 0x2e, 0x6a, 0x6f, 0x69, 0x6e, 0x28, 0x27, 0x27, 0x29, 0x2c, 0x20, 0x22, + 0x27, 0x2c, 0x20, 0x73, 0x75, 0x6d, 0x6d, 0x61, 0x72, 0x79, 0x3a, 0x20, + 0x22, 0x2c, 0x20, 0x64, 0x61, 0x74, 0x61, 0x29, 0x3b, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x20, 0x65, 0x6c, 0x73, 0x65, + 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x63, 0x75, 0x72, 0x72, 0x65, 0x6e, 0x74, 0x4d, 0x65, 0x73, 0x73, + 0x61, 0x67, 0x65, 0x73, 0x2e, 0x70, 0x75, 0x73, 0x68, 0x28, 0x64, 0x61, + 0x74, 0x61, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x73, 0x6c, 0x6f, 0x74, 0x5f, 0x69, 0x64, 0x20, 0x3d, + 0x20, 0x64, 0x61, 0x74, 0x61, 0x2e, 0x73, 0x6c, 0x6f, 0x74, 0x5f, 0x69, + 0x64, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x69, 0x66, 0x20, 0x28, 0x73, 0x65, 0x6c, 0x65, 0x63, 0x74, 0x65, + 0x64, 0x5f, 0x69, 0x6d, 0x61, 0x67, 0x65, 0x20, 0x26, 0x26, 0x20, 0x21, + 0x64, 0x61, 0x74, 0x61, 0x2e, 0x6d, 0x75, 0x6c, 0x74, 0x69, 0x6d, 0x6f, + 0x64, 0x61, 0x6c, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x61, 0x6c, 0x65, 0x72, 0x74, + 0x28, 0x22, 0x54, 0x68, 0x65, 0x20, 0x73, 0x65, 0x72, 0x76, 0x65, 0x72, + 0x20, 0x77, 0x61, 0x73, 0x20, 0x6e, 0x6f, 0x74, 0x20, 0x63, 0x6f, 0x6d, + 0x70, 0x69, 0x6c, 0x65, 0x64, 0x20, 0x66, 0x6f, 0x72, 0x20, 0x6d, 0x75, + 0x6c, 0x74, 0x69, 0x6d, 0x6f, 0x64, 0x61, 0x6c, 0x20, 0x6f, 0x72, 0x20, + 0x74, 0x68, 0x65, 0x20, 0x6d, 0x6f, 0x64, 0x65, 0x6c, 0x20, 0x70, 0x72, + 0x6f, 0x6a, 0x65, 0x63, 0x74, 0x6f, 0x72, 0x20, 0x63, 0x61, 0x6e, 0x27, + 0x74, 0x20, 0x62, 0x65, 0x20, 0x6c, 0x6f, 0x61, 0x64, 0x65, 0x64, 0x2e, + 0x22, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x3b, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x74, 0x72, + 0x61, 0x6e, 0x73, 0x63, 0x72, 0x69, 0x70, 0x74, 0x55, 0x70, 0x64, 0x61, + 0x74, 0x65, 0x28, 0x5b, 0x2e, 0x2e, 0x2e, 0x68, 0x69, 0x73, 0x74, 0x6f, + 0x72, 0x79, 0x2c, 0x20, 0x5b, 0x63, 0x68, 0x61, 0x72, 0x2c, 0x20, 0x63, + 0x75, 0x72, 0x72, 0x65, 0x6e, 0x74, 0x4d, 0x65, 0x73, 0x73, 0x61, 0x67, + 0x65, 0x73, 0x5d, 0x5d, 0x29, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x7d, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x69, 0x66, 0x20, 0x28, 0x64, 0x61, 0x74, 0x61, 0x2e, 0x74, 0x69, + 0x6d, 0x69, 0x6e, 0x67, 0x73, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x6c, 0x6c, 0x61, 0x6d, 0x61, + 0x53, 0x74, 0x61, 0x74, 0x73, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x20, + 0x3d, 0x20, 0x64, 0x61, 0x74, 0x61, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x7d, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, + 0x74, 0x72, 0x6f, 0x6c, 0x6c, 0x65, 0x72, 0x2e, 0x76, 0x61, 0x6c, 0x75, + 0x65, 0x20, 0x3d, 0x20, 0x6e, 0x75, 0x6c, 0x6c, 0x3b, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x7d, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x2f, 0x2f, 0x20, + 0x73, 0x65, 0x6e, 0x64, 0x20, 0x6d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, + 0x20, 0x74, 0x6f, 0x20, 0x73, 0x65, 0x72, 0x76, 0x65, 0x72, 0x0a, 0x20, + 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x63, 0x68, 0x61, + 0x74, 0x20, 0x3d, 0x20, 0x61, 0x73, 0x79, 0x6e, 0x63, 0x20, 0x28, 0x6d, + 0x73, 0x67, 0x29, 0x20, 0x3d, 0x3e, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x69, 0x66, 0x20, 0x28, 0x63, 0x6f, 0x6e, 0x74, 0x72, + 0x6f, 0x6c, 0x6c, 0x65, 0x72, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x29, + 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, + 0x6f, 0x6e, 0x73, 0x6f, 0x6c, 0x65, 0x2e, 0x6c, 0x6f, 0x67, 0x28, 0x27, + 0x61, 0x6c, 0x72, 0x65, 0x61, 0x64, 0x79, 0x20, 0x72, 0x75, 0x6e, 0x6e, + 0x69, 0x6e, 0x67, 0x2e, 0x2e, 0x2e, 0x27, 0x29, 0x3b, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, + 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x74, 0x72, 0x61, 0x6e, 0x73, 0x63, 0x72, 0x69, 0x70, 0x74, 0x55, 0x70, 0x64, 0x61, 0x74, 0x65, 0x28, 0x5b, 0x2e, 0x2e, 0x2e, 0x73, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x74, 0x72, 0x61, 0x6e, 0x73, 0x63, 0x72, 0x69, - 0x70, 0x74, 0x2c, 0x20, 0x5b, 0x22, 0x22, 0x2c, 0x20, 0x70, 0x72, 0x6f, - 0x6d, 0x70, 0x74, 0x5d, 0x5d, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x72, 0x75, 0x6e, 0x4c, 0x6c, 0x61, 0x6d, 0x61, 0x28, 0x70, - 0x72, 0x6f, 0x6d, 0x70, 0x74, 0x2c, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x2e, 0x2e, 0x2e, 0x70, 0x61, 0x72, 0x61, - 0x6d, 0x73, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2c, 0x0a, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x73, 0x6c, 0x6f, 0x74, 0x5f, 0x69, - 0x64, 0x3a, 0x20, 0x73, 0x6c, 0x6f, 0x74, 0x5f, 0x69, 0x64, 0x2c, 0x0a, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x73, 0x74, 0x6f, 0x70, - 0x3a, 0x20, 0x5b, 0x5d, 0x2c, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x7d, 0x2c, 0x20, 0x22, 0x22, 0x29, 0x2e, 0x66, 0x69, 0x6e, 0x61, 0x6c, - 0x6c, 0x79, 0x28, 0x28, 0x29, 0x20, 0x3d, 0x3e, 0x20, 0x7b, 0x0a, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x73, 0x65, 0x73, 0x73, 0x69, - 0x6f, 0x6e, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x70, 0x72, 0x6f, - 0x6d, 0x70, 0x74, 0x20, 0x3d, 0x20, 0x73, 0x65, 0x73, 0x73, 0x69, 0x6f, - 0x6e, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x74, 0x72, 0x61, 0x6e, - 0x73, 0x63, 0x72, 0x69, 0x70, 0x74, 0x2e, 0x6d, 0x61, 0x70, 0x28, 0x28, - 0x5b, 0x5f, 0x2c, 0x20, 0x64, 0x61, 0x74, 0x61, 0x5d, 0x29, 0x20, 0x3d, - 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x41, 0x72, 0x72, 0x61, 0x79, 0x2e, 0x69, 0x73, 0x41, 0x72, 0x72, 0x61, - 0x79, 0x28, 0x64, 0x61, 0x74, 0x61, 0x29, 0x20, 0x3f, 0x20, 0x64, 0x61, - 0x74, 0x61, 0x2e, 0x6d, 0x61, 0x70, 0x28, 0x6d, 0x73, 0x67, 0x20, 0x3d, - 0x3e, 0x20, 0x6d, 0x73, 0x67, 0x2e, 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x6e, - 0x74, 0x29, 0x2e, 0x6a, 0x6f, 0x69, 0x6e, 0x28, 0x27, 0x27, 0x29, 0x20, - 0x3a, 0x20, 0x64, 0x61, 0x74, 0x61, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x29, 0x2e, 0x6a, 0x6f, 0x69, 0x6e, 0x28, 0x27, 0x27, - 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x73, - 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, - 0x2e, 0x74, 0x72, 0x61, 0x6e, 0x73, 0x63, 0x72, 0x69, 0x70, 0x74, 0x20, - 0x3d, 0x20, 0x5b, 0x5d, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x7d, 0x29, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x0a, 0x20, 0x20, - 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x73, 0x74, 0x6f, 0x70, - 0x20, 0x3d, 0x20, 0x28, 0x65, 0x29, 0x20, 0x3d, 0x3e, 0x20, 0x7b, 0x0a, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x65, 0x2e, 0x70, 0x72, 0x65, 0x76, - 0x65, 0x6e, 0x74, 0x44, 0x65, 0x66, 0x61, 0x75, 0x6c, 0x74, 0x28, 0x29, - 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x69, 0x66, 0x20, 0x28, - 0x63, 0x6f, 0x6e, 0x74, 0x72, 0x6f, 0x6c, 0x6c, 0x65, 0x72, 0x2e, 0x76, - 0x61, 0x6c, 0x75, 0x65, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x74, 0x72, 0x6f, 0x6c, 0x6c, - 0x65, 0x72, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x61, 0x62, 0x6f, - 0x72, 0x74, 0x28, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x74, 0x72, 0x6f, 0x6c, 0x6c, 0x65, 0x72, - 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x20, 0x3d, 0x20, 0x6e, 0x75, 0x6c, - 0x6c, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x20, - 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, - 0x6e, 0x73, 0x74, 0x20, 0x72, 0x65, 0x73, 0x65, 0x74, 0x20, 0x3d, 0x20, - 0x28, 0x65, 0x29, 0x20, 0x3d, 0x3e, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x73, 0x74, 0x6f, 0x70, 0x28, 0x65, 0x29, 0x3b, 0x0a, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x74, 0x72, 0x61, 0x6e, 0x73, 0x63, - 0x72, 0x69, 0x70, 0x74, 0x55, 0x70, 0x64, 0x61, 0x74, 0x65, 0x28, 0x5b, - 0x5d, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x0a, 0x20, - 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x75, 0x70, 0x6c, - 0x6f, 0x61, 0x64, 0x49, 0x6d, 0x61, 0x67, 0x65, 0x20, 0x3d, 0x20, 0x28, - 0x65, 0x29, 0x20, 0x3d, 0x3e, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x65, 0x2e, 0x70, 0x72, 0x65, 0x76, 0x65, 0x6e, 0x74, 0x44, - 0x65, 0x66, 0x61, 0x75, 0x6c, 0x74, 0x28, 0x29, 0x3b, 0x0a, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x64, 0x6f, 0x63, 0x75, 0x6d, 0x65, 0x6e, 0x74, - 0x2e, 0x67, 0x65, 0x74, 0x45, 0x6c, 0x65, 0x6d, 0x65, 0x6e, 0x74, 0x42, - 0x79, 0x49, 0x64, 0x28, 0x22, 0x66, 0x69, 0x6c, 0x65, 0x49, 0x6e, 0x70, - 0x75, 0x74, 0x22, 0x29, 0x2e, 0x63, 0x6c, 0x69, 0x63, 0x6b, 0x28, 0x29, - 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x64, 0x6f, 0x63, 0x75, - 0x6d, 0x65, 0x6e, 0x74, 0x2e, 0x67, 0x65, 0x74, 0x45, 0x6c, 0x65, 0x6d, - 0x65, 0x6e, 0x74, 0x42, 0x79, 0x49, 0x64, 0x28, 0x22, 0x66, 0x69, 0x6c, - 0x65, 0x49, 0x6e, 0x70, 0x75, 0x74, 0x22, 0x29, 0x2e, 0x61, 0x64, 0x64, - 0x45, 0x76, 0x65, 0x6e, 0x74, 0x4c, 0x69, 0x73, 0x74, 0x65, 0x6e, 0x65, - 0x72, 0x28, 0x22, 0x63, 0x68, 0x61, 0x6e, 0x67, 0x65, 0x22, 0x2c, 0x20, - 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x28, 0x65, 0x76, - 0x65, 0x6e, 0x74, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x73, 0x65, 0x6c, - 0x65, 0x63, 0x74, 0x65, 0x64, 0x46, 0x69, 0x6c, 0x65, 0x20, 0x3d, 0x20, - 0x65, 0x76, 0x65, 0x6e, 0x74, 0x2e, 0x74, 0x61, 0x72, 0x67, 0x65, 0x74, - 0x2e, 0x66, 0x69, 0x6c, 0x65, 0x73, 0x5b, 0x30, 0x5d, 0x3b, 0x0a, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x69, 0x66, 0x20, 0x28, 0x73, - 0x65, 0x6c, 0x65, 0x63, 0x74, 0x65, 0x64, 0x46, 0x69, 0x6c, 0x65, 0x29, - 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x72, 0x65, 0x61, 0x64, 0x65, - 0x72, 0x20, 0x3d, 0x20, 0x6e, 0x65, 0x77, 0x20, 0x46, 0x69, 0x6c, 0x65, - 0x52, 0x65, 0x61, 0x64, 0x65, 0x72, 0x28, 0x29, 0x3b, 0x0a, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x72, 0x65, 0x61, 0x64, - 0x65, 0x72, 0x2e, 0x6f, 0x6e, 0x6c, 0x6f, 0x61, 0x64, 0x20, 0x3d, 0x20, - 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x28, 0x29, 0x20, - 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x69, 0x6d, 0x61, 0x67, - 0x65, 0x5f, 0x64, 0x61, 0x74, 0x61, 0x20, 0x3d, 0x20, 0x72, 0x65, 0x61, - 0x64, 0x65, 0x72, 0x2e, 0x72, 0x65, 0x73, 0x75, 0x6c, 0x74, 0x3b, 0x0a, + 0x70, 0x74, 0x2c, 0x20, 0x5b, 0x22, 0x7b, 0x7b, 0x75, 0x73, 0x65, 0x72, + 0x7d, 0x7d, 0x22, 0x2c, 0x20, 0x6d, 0x73, 0x67, 0x5d, 0x5d, 0x29, 0x0a, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x6c, 0x65, 0x74, 0x20, 0x70, + 0x72, 0x6f, 0x6d, 0x70, 0x74, 0x20, 0x3d, 0x20, 0x74, 0x65, 0x6d, 0x70, + 0x6c, 0x61, 0x74, 0x65, 0x28, 0x73, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, + 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x74, 0x65, 0x6d, 0x70, 0x6c, + 0x61, 0x74, 0x65, 0x2c, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x6d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x3a, 0x20, + 0x6d, 0x73, 0x67, 0x2c, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x68, 0x69, 0x73, 0x74, 0x6f, 0x72, 0x79, 0x3a, 0x20, 0x73, 0x65, + 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2e, + 0x74, 0x72, 0x61, 0x6e, 0x73, 0x63, 0x72, 0x69, 0x70, 0x74, 0x2e, 0x66, + 0x6c, 0x61, 0x74, 0x4d, 0x61, 0x70, 0x28, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x28, 0x5b, 0x6e, 0x61, 0x6d, 0x65, + 0x2c, 0x20, 0x64, 0x61, 0x74, 0x61, 0x5d, 0x29, 0x20, 0x3d, 0x3e, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x28, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x73, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x2e, 0x76, 0x61, 0x6c, 0x75, - 0x65, 0x20, 0x3d, 0x20, 0x7b, 0x20, 0x2e, 0x2e, 0x2e, 0x73, 0x65, 0x73, - 0x73, 0x69, 0x6f, 0x6e, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2c, 0x20, - 0x69, 0x6d, 0x61, 0x67, 0x65, 0x5f, 0x73, 0x65, 0x6c, 0x65, 0x63, 0x74, - 0x65, 0x64, 0x3a, 0x20, 0x69, 0x6d, 0x61, 0x67, 0x65, 0x5f, 0x64, 0x61, - 0x74, 0x61, 0x20, 0x7d, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, - 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x20, 0x3d, 0x20, 0x7b, 0x0a, 0x20, + 0x65, 0x2e, 0x68, 0x69, 0x73, 0x74, 0x6f, 0x72, 0x79, 0x54, 0x65, 0x6d, + 0x70, 0x6c, 0x61, 0x74, 0x65, 0x2c, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x2e, 0x2e, 0x2e, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x2e, 0x76, - 0x61, 0x6c, 0x75, 0x65, 0x2c, 0x20, 0x69, 0x6d, 0x61, 0x67, 0x65, 0x5f, - 0x64, 0x61, 0x74, 0x61, 0x3a, 0x20, 0x5b, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x6e, 0x61, 0x6d, 0x65, 0x2c, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x7b, 0x20, 0x64, 0x61, 0x74, 0x61, 0x3a, 0x20, 0x69, 0x6d, 0x61, 0x67, - 0x65, 0x5f, 0x64, 0x61, 0x74, 0x61, 0x2e, 0x72, 0x65, 0x70, 0x6c, 0x61, - 0x63, 0x65, 0x28, 0x2f, 0x64, 0x61, 0x74, 0x61, 0x3a, 0x69, 0x6d, 0x61, - 0x67, 0x65, 0x5c, 0x2f, 0x5b, 0x5e, 0x3b, 0x5d, 0x2b, 0x3b, 0x62, 0x61, - 0x73, 0x65, 0x36, 0x34, 0x2c, 0x2f, 0x2c, 0x20, 0x27, 0x27, 0x29, 0x2c, - 0x20, 0x69, 0x64, 0x3a, 0x20, 0x31, 0x30, 0x20, 0x7d, 0x5d, 0x0a, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, - 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, - 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x73, 0x65, 0x6c, 0x65, 0x63, 0x74, 0x65, 0x64, 0x5f, 0x69, 0x6d, 0x61, - 0x67, 0x65, 0x20, 0x3d, 0x20, 0x74, 0x72, 0x75, 0x65, 0x3b, 0x0a, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x72, 0x65, 0x61, - 0x64, 0x65, 0x72, 0x2e, 0x72, 0x65, 0x61, 0x64, 0x41, 0x73, 0x44, 0x61, - 0x74, 0x61, 0x55, 0x52, 0x4c, 0x28, 0x73, 0x65, 0x6c, 0x65, 0x63, 0x74, - 0x65, 0x64, 0x46, 0x69, 0x6c, 0x65, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x7d, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x0a, - 0x20, 0x20, 0x20, 0x20, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, - 0x20, 0x4d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x49, 0x6e, 0x70, 0x75, - 0x74, 0x28, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x6d, 0x65, 0x73, 0x73, 0x61, 0x67, - 0x65, 0x20, 0x3d, 0x20, 0x75, 0x73, 0x65, 0x53, 0x69, 0x67, 0x6e, 0x61, - 0x6c, 0x28, 0x22, 0x22, 0x29, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x73, 0x75, 0x62, 0x6d, 0x69, - 0x74, 0x20, 0x3d, 0x20, 0x28, 0x65, 0x29, 0x20, 0x3d, 0x3e, 0x20, 0x7b, - 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x73, 0x74, 0x6f, - 0x70, 0x28, 0x65, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x63, 0x68, 0x61, 0x74, 0x28, 0x6d, 0x65, 0x73, 0x73, 0x61, - 0x67, 0x65, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x29, 0x3b, 0x0a, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x6d, 0x65, 0x73, 0x73, 0x61, - 0x67, 0x65, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x20, 0x3d, 0x20, 0x22, - 0x22, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x0a, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, - 0x65, 0x6e, 0x74, 0x65, 0x72, 0x53, 0x75, 0x62, 0x6d, 0x69, 0x74, 0x73, - 0x20, 0x3d, 0x20, 0x28, 0x65, 0x76, 0x65, 0x6e, 0x74, 0x29, 0x20, 0x3d, - 0x3e, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x69, 0x66, 0x20, 0x28, 0x65, 0x76, 0x65, 0x6e, 0x74, 0x2e, 0x77, 0x68, - 0x69, 0x63, 0x68, 0x20, 0x3d, 0x3d, 0x3d, 0x20, 0x31, 0x33, 0x20, 0x26, - 0x26, 0x20, 0x21, 0x65, 0x76, 0x65, 0x6e, 0x74, 0x2e, 0x73, 0x68, 0x69, - 0x66, 0x74, 0x4b, 0x65, 0x79, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x73, 0x75, 0x62, 0x6d, 0x69, - 0x74, 0x28, 0x65, 0x76, 0x65, 0x6e, 0x74, 0x29, 0x3b, 0x0a, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x7d, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x72, - 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x68, 0x74, 0x6d, 0x6c, 0x60, 0x0a, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x66, 0x6f, 0x72, - 0x6d, 0x20, 0x6f, 0x6e, 0x73, 0x75, 0x62, 0x6d, 0x69, 0x74, 0x3d, 0x24, - 0x7b, 0x73, 0x75, 0x62, 0x6d, 0x69, 0x74, 0x7d, 0x3e, 0x0a, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x64, 0x69, 0x76, - 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x3c, 0x74, 0x65, 0x78, 0x74, 0x61, 0x72, 0x65, 0x61, 0x0a, + 0x20, 0x6d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x3a, 0x20, 0x41, 0x72, + 0x72, 0x61, 0x79, 0x2e, 0x69, 0x73, 0x41, 0x72, 0x72, 0x61, 0x79, 0x28, + 0x64, 0x61, 0x74, 0x61, 0x29, 0x20, 0x3f, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x63, 0x6c, 0x61, 0x73, 0x73, 0x4e, 0x61, 0x6d, 0x65, - 0x3d, 0x24, 0x7b, 0x67, 0x65, 0x6e, 0x65, 0x72, 0x61, 0x74, 0x69, 0x6e, - 0x67, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x20, 0x3f, 0x20, 0x22, 0x6c, - 0x6f, 0x61, 0x64, 0x69, 0x6e, 0x67, 0x22, 0x20, 0x3a, 0x20, 0x6e, 0x75, - 0x6c, 0x6c, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x6f, 0x6e, 0x69, 0x6e, 0x70, - 0x75, 0x74, 0x3d, 0x24, 0x7b, 0x28, 0x65, 0x29, 0x20, 0x3d, 0x3e, 0x20, - 0x6d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x2e, 0x76, 0x61, 0x6c, 0x75, - 0x65, 0x20, 0x3d, 0x20, 0x65, 0x2e, 0x74, 0x61, 0x72, 0x67, 0x65, 0x74, - 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x6f, - 0x6e, 0x6b, 0x65, 0x79, 0x70, 0x72, 0x65, 0x73, 0x73, 0x3d, 0x24, 0x7b, - 0x65, 0x6e, 0x74, 0x65, 0x72, 0x53, 0x75, 0x62, 0x6d, 0x69, 0x74, 0x73, - 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x70, 0x6c, 0x61, 0x63, 0x65, 0x68, 0x6f, - 0x6c, 0x64, 0x65, 0x72, 0x3d, 0x22, 0x53, 0x61, 0x79, 0x20, 0x73, 0x6f, - 0x6d, 0x65, 0x74, 0x68, 0x69, 0x6e, 0x67, 0x2e, 0x2e, 0x2e, 0x22, 0x0a, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x72, 0x6f, 0x77, 0x73, 0x3d, 0x32, 0x0a, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x74, 0x79, 0x70, 0x65, 0x3d, 0x22, 0x74, 0x65, 0x78, 0x74, 0x22, + 0x20, 0x20, 0x64, 0x61, 0x74, 0x61, 0x2e, 0x6d, 0x61, 0x70, 0x28, 0x6d, + 0x73, 0x67, 0x20, 0x3d, 0x3e, 0x20, 0x6d, 0x73, 0x67, 0x2e, 0x63, 0x6f, + 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x29, 0x2e, 0x6a, 0x6f, 0x69, 0x6e, 0x28, + 0x27, 0x27, 0x29, 0x2e, 0x72, 0x65, 0x70, 0x6c, 0x61, 0x63, 0x65, 0x28, + 0x2f, 0x5e, 0x5c, 0x73, 0x2f, 0x2c, 0x20, 0x27, 0x27, 0x29, 0x20, 0x3a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3d, 0x22, 0x24, - 0x7b, 0x6d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x7d, 0x22, 0x0a, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x2f, - 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x3c, 0x2f, 0x64, 0x69, 0x76, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x64, 0x69, 0x76, 0x20, 0x63, 0x6c, - 0x61, 0x73, 0x73, 0x3d, 0x22, 0x72, 0x69, 0x67, 0x68, 0x74, 0x22, 0x3e, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x64, 0x61, 0x74, 0x61, 0x2c, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x3c, 0x62, 0x75, 0x74, 0x74, 0x6f, 0x6e, 0x20, 0x74, 0x79, 0x70, - 0x65, 0x3d, 0x22, 0x73, 0x75, 0x62, 0x6d, 0x69, 0x74, 0x22, 0x20, 0x64, - 0x69, 0x73, 0x61, 0x62, 0x6c, 0x65, 0x64, 0x3d, 0x24, 0x7b, 0x67, 0x65, - 0x6e, 0x65, 0x72, 0x61, 0x74, 0x69, 0x6e, 0x67, 0x2e, 0x76, 0x61, 0x6c, - 0x75, 0x65, 0x7d, 0x3e, 0x53, 0x65, 0x6e, 0x64, 0x3c, 0x2f, 0x62, 0x75, - 0x74, 0x74, 0x6f, 0x6e, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x62, 0x75, 0x74, 0x74, 0x6f, - 0x6e, 0x20, 0x6f, 0x6e, 0x63, 0x6c, 0x69, 0x63, 0x6b, 0x3d, 0x24, 0x7b, - 0x75, 0x70, 0x6c, 0x6f, 0x61, 0x64, 0x49, 0x6d, 0x61, 0x67, 0x65, 0x7d, - 0x3e, 0x55, 0x70, 0x6c, 0x6f, 0x61, 0x64, 0x20, 0x49, 0x6d, 0x61, 0x67, - 0x65, 0x3c, 0x2f, 0x62, 0x75, 0x74, 0x74, 0x6f, 0x6e, 0x3e, 0x0a, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, - 0x62, 0x75, 0x74, 0x74, 0x6f, 0x6e, 0x20, 0x6f, 0x6e, 0x63, 0x6c, 0x69, - 0x63, 0x6b, 0x3d, 0x24, 0x7b, 0x73, 0x74, 0x6f, 0x70, 0x7d, 0x20, 0x64, - 0x69, 0x73, 0x61, 0x62, 0x6c, 0x65, 0x64, 0x3d, 0x24, 0x7b, 0x21, 0x67, - 0x65, 0x6e, 0x65, 0x72, 0x61, 0x74, 0x69, 0x6e, 0x67, 0x2e, 0x76, 0x61, - 0x6c, 0x75, 0x65, 0x7d, 0x3e, 0x53, 0x74, 0x6f, 0x70, 0x3c, 0x2f, 0x62, - 0x75, 0x74, 0x74, 0x6f, 0x6e, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x62, 0x75, 0x74, 0x74, - 0x6f, 0x6e, 0x20, 0x6f, 0x6e, 0x63, 0x6c, 0x69, 0x63, 0x6b, 0x3d, 0x24, - 0x7b, 0x72, 0x65, 0x73, 0x65, 0x74, 0x7d, 0x3e, 0x52, 0x65, 0x73, 0x65, - 0x74, 0x3c, 0x2f, 0x62, 0x75, 0x74, 0x74, 0x6f, 0x6e, 0x3e, 0x0a, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x2f, 0x64, - 0x69, 0x76, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x3c, 0x2f, 0x66, 0x6f, 0x72, 0x6d, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x60, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x0a, 0x20, - 0x20, 0x20, 0x20, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, - 0x43, 0x6f, 0x6d, 0x70, 0x6c, 0x65, 0x74, 0x69, 0x6f, 0x6e, 0x43, 0x6f, - 0x6e, 0x74, 0x72, 0x6f, 0x6c, 0x73, 0x28, 0x29, 0x20, 0x7b, 0x0a, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x73, - 0x75, 0x62, 0x6d, 0x69, 0x74, 0x20, 0x3d, 0x20, 0x28, 0x65, 0x29, 0x20, - 0x3d, 0x3e, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x73, 0x74, 0x6f, 0x70, 0x28, 0x65, 0x29, 0x3b, 0x0a, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x72, 0x75, 0x6e, 0x43, 0x6f, 0x6d, - 0x70, 0x6c, 0x65, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x29, 0x3b, 0x0a, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x68, 0x74, 0x6d, 0x6c, - 0x60, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x64, - 0x69, 0x76, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x3c, 0x62, 0x75, 0x74, 0x74, 0x6f, 0x6e, 0x20, 0x6f, 0x6e, - 0x63, 0x6c, 0x69, 0x63, 0x6b, 0x3d, 0x24, 0x7b, 0x73, 0x75, 0x62, 0x6d, - 0x69, 0x74, 0x7d, 0x20, 0x74, 0x79, 0x70, 0x65, 0x3d, 0x22, 0x62, 0x75, - 0x74, 0x74, 0x6f, 0x6e, 0x22, 0x20, 0x64, 0x69, 0x73, 0x61, 0x62, 0x6c, - 0x65, 0x64, 0x3d, 0x24, 0x7b, 0x67, 0x65, 0x6e, 0x65, 0x72, 0x61, 0x74, - 0x69, 0x6e, 0x67, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x7d, 0x3e, 0x53, - 0x74, 0x61, 0x72, 0x74, 0x3c, 0x2f, 0x62, 0x75, 0x74, 0x74, 0x6f, 0x6e, - 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x3c, 0x62, 0x75, 0x74, 0x74, 0x6f, 0x6e, 0x20, 0x6f, 0x6e, 0x63, 0x6c, - 0x69, 0x63, 0x6b, 0x3d, 0x24, 0x7b, 0x73, 0x74, 0x6f, 0x70, 0x7d, 0x20, - 0x64, 0x69, 0x73, 0x61, 0x62, 0x6c, 0x65, 0x64, 0x3d, 0x24, 0x7b, 0x21, - 0x67, 0x65, 0x6e, 0x65, 0x72, 0x61, 0x74, 0x69, 0x6e, 0x67, 0x2e, 0x76, - 0x61, 0x6c, 0x75, 0x65, 0x7d, 0x3e, 0x53, 0x74, 0x6f, 0x70, 0x3c, 0x2f, - 0x62, 0x75, 0x74, 0x74, 0x6f, 0x6e, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x62, 0x75, 0x74, 0x74, 0x6f, - 0x6e, 0x20, 0x6f, 0x6e, 0x63, 0x6c, 0x69, 0x63, 0x6b, 0x3d, 0x24, 0x7b, - 0x72, 0x65, 0x73, 0x65, 0x74, 0x7d, 0x3e, 0x52, 0x65, 0x73, 0x65, 0x74, - 0x3c, 0x2f, 0x62, 0x75, 0x74, 0x74, 0x6f, 0x6e, 0x3e, 0x0a, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x2f, 0x64, 0x69, 0x76, 0x3e, - 0x60, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x0a, 0x20, 0x20, - 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x43, 0x68, 0x61, 0x74, - 0x4c, 0x6f, 0x67, 0x20, 0x3d, 0x20, 0x28, 0x70, 0x72, 0x6f, 0x70, 0x73, - 0x29, 0x20, 0x3d, 0x3e, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x6d, 0x65, 0x73, 0x73, 0x61, - 0x67, 0x65, 0x73, 0x20, 0x3d, 0x20, 0x73, 0x65, 0x73, 0x73, 0x69, 0x6f, - 0x6e, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x74, 0x72, 0x61, 0x6e, - 0x73, 0x63, 0x72, 0x69, 0x70, 0x74, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x63, 0x6f, 0x6e, 0x74, - 0x61, 0x69, 0x6e, 0x65, 0x72, 0x20, 0x3d, 0x20, 0x75, 0x73, 0x65, 0x52, - 0x65, 0x66, 0x28, 0x6e, 0x75, 0x6c, 0x6c, 0x29, 0x0a, 0x0a, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x75, 0x73, 0x65, 0x45, 0x66, 0x66, 0x65, 0x63, - 0x74, 0x28, 0x28, 0x29, 0x20, 0x3d, 0x3e, 0x20, 0x7b, 0x0a, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x2f, 0x2f, 0x20, 0x73, 0x63, 0x72, - 0x6f, 0x6c, 0x6c, 0x20, 0x74, 0x6f, 0x20, 0x62, 0x6f, 0x74, 0x74, 0x6f, - 0x6d, 0x20, 0x28, 0x69, 0x66, 0x20, 0x6e, 0x65, 0x65, 0x64, 0x65, 0x64, - 0x29, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, - 0x6e, 0x73, 0x74, 0x20, 0x70, 0x61, 0x72, 0x65, 0x6e, 0x74, 0x20, 0x3d, - 0x20, 0x63, 0x6f, 0x6e, 0x74, 0x61, 0x69, 0x6e, 0x65, 0x72, 0x2e, 0x63, - 0x75, 0x72, 0x72, 0x65, 0x6e, 0x74, 0x2e, 0x70, 0x61, 0x72, 0x65, 0x6e, - 0x74, 0x45, 0x6c, 0x65, 0x6d, 0x65, 0x6e, 0x74, 0x3b, 0x0a, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x69, 0x66, 0x20, 0x28, 0x70, 0x61, - 0x72, 0x65, 0x6e, 0x74, 0x20, 0x26, 0x26, 0x20, 0x70, 0x61, 0x72, 0x65, - 0x6e, 0x74, 0x2e, 0x73, 0x63, 0x72, 0x6f, 0x6c, 0x6c, 0x48, 0x65, 0x69, - 0x67, 0x68, 0x74, 0x20, 0x3c, 0x3d, 0x20, 0x70, 0x61, 0x72, 0x65, 0x6e, - 0x74, 0x2e, 0x73, 0x63, 0x72, 0x6f, 0x6c, 0x6c, 0x54, 0x6f, 0x70, 0x20, - 0x2b, 0x20, 0x70, 0x61, 0x72, 0x65, 0x6e, 0x74, 0x2e, 0x6f, 0x66, 0x66, - 0x73, 0x65, 0x74, 0x48, 0x65, 0x69, 0x67, 0x68, 0x74, 0x20, 0x2b, 0x20, - 0x33, 0x30, 0x30, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x70, 0x61, 0x72, 0x65, 0x6e, 0x74, 0x2e, - 0x73, 0x63, 0x72, 0x6f, 0x6c, 0x6c, 0x54, 0x6f, 0x28, 0x30, 0x2c, 0x20, - 0x70, 0x61, 0x72, 0x65, 0x6e, 0x74, 0x2e, 0x73, 0x63, 0x72, 0x6f, 0x6c, - 0x6c, 0x48, 0x65, 0x69, 0x67, 0x68, 0x74, 0x29, 0x0a, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x7d, 0x2c, 0x20, 0x5b, 0x6d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, - 0x73, 0x5d, 0x29, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, - 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x69, 0x73, 0x43, 0x6f, 0x6d, 0x70, 0x6c, - 0x65, 0x74, 0x69, 0x6f, 0x6e, 0x4d, 0x6f, 0x64, 0x65, 0x20, 0x3d, 0x20, - 0x73, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x2e, 0x76, 0x61, 0x6c, 0x75, - 0x65, 0x2e, 0x74, 0x79, 0x70, 0x65, 0x20, 0x3d, 0x3d, 0x3d, 0x20, 0x27, - 0x63, 0x6f, 0x6d, 0x70, 0x6c, 0x65, 0x74, 0x69, 0x6f, 0x6e, 0x27, 0x0a, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, - 0x63, 0x68, 0x61, 0x74, 0x4c, 0x69, 0x6e, 0x65, 0x20, 0x3d, 0x20, 0x28, - 0x5b, 0x75, 0x73, 0x65, 0x72, 0x2c, 0x20, 0x64, 0x61, 0x74, 0x61, 0x5d, - 0x2c, 0x20, 0x69, 0x6e, 0x64, 0x65, 0x78, 0x29, 0x20, 0x3d, 0x3e, 0x20, - 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x6c, 0x65, - 0x74, 0x20, 0x6d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x0a, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, - 0x69, 0x73, 0x41, 0x72, 0x72, 0x61, 0x79, 0x4d, 0x65, 0x73, 0x73, 0x61, - 0x67, 0x65, 0x20, 0x3d, 0x20, 0x41, 0x72, 0x72, 0x61, 0x79, 0x2e, 0x69, - 0x73, 0x41, 0x72, 0x72, 0x61, 0x79, 0x28, 0x64, 0x61, 0x74, 0x61, 0x29, - 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x69, 0x66, 0x20, - 0x28, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x2e, 0x76, 0x61, 0x6c, 0x75, - 0x65, 0x2e, 0x6e, 0x5f, 0x70, 0x72, 0x6f, 0x62, 0x73, 0x20, 0x3e, 0x20, - 0x30, 0x20, 0x26, 0x26, 0x20, 0x69, 0x73, 0x41, 0x72, 0x72, 0x61, 0x79, - 0x4d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x29, 0x20, 0x7b, 0x0a, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x6d, 0x65, 0x73, - 0x73, 0x61, 0x67, 0x65, 0x20, 0x3d, 0x20, 0x68, 0x74, 0x6d, 0x6c, 0x60, - 0x3c, 0x24, 0x7b, 0x50, 0x72, 0x6f, 0x62, 0x61, 0x62, 0x69, 0x6c, 0x69, - 0x74, 0x69, 0x65, 0x73, 0x7d, 0x20, 0x64, 0x61, 0x74, 0x61, 0x3d, 0x24, - 0x7b, 0x64, 0x61, 0x74, 0x61, 0x7d, 0x20, 0x2f, 0x3e, 0x60, 0x0a, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x20, 0x65, 0x6c, 0x73, - 0x65, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x74, 0x65, 0x78, 0x74, - 0x20, 0x3d, 0x20, 0x69, 0x73, 0x41, 0x72, 0x72, 0x61, 0x79, 0x4d, 0x65, - 0x73, 0x73, 0x61, 0x67, 0x65, 0x20, 0x3f, 0x0a, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x64, 0x61, 0x74, 0x61, - 0x2e, 0x6d, 0x61, 0x70, 0x28, 0x6d, 0x73, 0x67, 0x20, 0x3d, 0x3e, 0x20, - 0x6d, 0x73, 0x67, 0x2e, 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x29, - 0x2e, 0x6a, 0x6f, 0x69, 0x6e, 0x28, 0x27, 0x27, 0x29, 0x2e, 0x72, 0x65, - 0x70, 0x6c, 0x61, 0x63, 0x65, 0x28, 0x2f, 0x5e, 0x5c, 0x73, 0x2b, 0x2f, - 0x2c, 0x20, 0x27, 0x27, 0x29, 0x20, 0x3a, 0x0a, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x64, 0x61, 0x74, 0x61, - 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x6d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x20, 0x3d, 0x20, 0x69, 0x73, - 0x43, 0x6f, 0x6d, 0x70, 0x6c, 0x65, 0x74, 0x69, 0x6f, 0x6e, 0x4d, 0x6f, - 0x64, 0x65, 0x20, 0x3f, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x74, 0x65, 0x78, 0x74, 0x20, 0x3a, 0x0a, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x68, 0x74, 0x6d, 0x6c, 0x60, 0x3c, 0x24, 0x7b, 0x4d, 0x61, 0x72, 0x6b, - 0x64, 0x6f, 0x77, 0x6e, 0x69, 0x73, 0x68, 0x7d, 0x20, 0x74, 0x65, 0x78, - 0x74, 0x3d, 0x24, 0x7b, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, - 0x28, 0x74, 0x65, 0x78, 0x74, 0x29, 0x7d, 0x20, 0x2f, 0x3e, 0x60, 0x0a, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x69, 0x66, 0x20, 0x28, 0x75, 0x73, - 0x65, 0x72, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x68, - 0x74, 0x6d, 0x6c, 0x60, 0x3c, 0x70, 0x20, 0x6b, 0x65, 0x79, 0x3d, 0x24, - 0x7b, 0x69, 0x6e, 0x64, 0x65, 0x78, 0x7d, 0x3e, 0x3c, 0x73, 0x74, 0x72, - 0x6f, 0x6e, 0x67, 0x3e, 0x24, 0x7b, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, - 0x74, 0x65, 0x28, 0x75, 0x73, 0x65, 0x72, 0x29, 0x7d, 0x3a, 0x3c, 0x2f, - 0x73, 0x74, 0x72, 0x6f, 0x6e, 0x67, 0x3e, 0x20, 0x24, 0x7b, 0x6d, 0x65, - 0x73, 0x73, 0x61, 0x67, 0x65, 0x7d, 0x3c, 0x2f, 0x70, 0x3e, 0x60, 0x0a, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x20, 0x65, 0x6c, - 0x73, 0x65, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x69, 0x73, - 0x43, 0x6f, 0x6d, 0x70, 0x6c, 0x65, 0x74, 0x69, 0x6f, 0x6e, 0x4d, 0x6f, - 0x64, 0x65, 0x20, 0x3f, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x68, 0x74, 0x6d, 0x6c, 0x60, 0x3c, 0x73, - 0x70, 0x61, 0x6e, 0x20, 0x6b, 0x65, 0x79, 0x3d, 0x24, 0x7b, 0x69, 0x6e, - 0x64, 0x65, 0x78, 0x7d, 0x3e, 0x24, 0x7b, 0x6d, 0x65, 0x73, 0x73, 0x61, - 0x67, 0x65, 0x7d, 0x3c, 0x2f, 0x73, 0x70, 0x61, 0x6e, 0x3e, 0x60, 0x20, - 0x3a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x68, 0x74, 0x6d, 0x6c, 0x60, 0x3c, 0x70, 0x20, 0x6b, 0x65, - 0x79, 0x3d, 0x24, 0x7b, 0x69, 0x6e, 0x64, 0x65, 0x78, 0x7d, 0x3e, 0x24, - 0x7b, 0x6d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x7d, 0x3c, 0x2f, 0x70, - 0x3e, 0x60, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, - 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x3b, 0x0a, 0x0a, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x68, - 0x61, 0x6e, 0x64, 0x6c, 0x65, 0x43, 0x6f, 0x6d, 0x70, 0x6c, 0x65, 0x74, - 0x69, 0x6f, 0x6e, 0x45, 0x64, 0x69, 0x74, 0x20, 0x3d, 0x20, 0x28, 0x65, - 0x29, 0x20, 0x3d, 0x3e, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x73, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x2e, 0x76, - 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x6d, 0x70, 0x74, 0x20, - 0x3d, 0x20, 0x65, 0x2e, 0x74, 0x61, 0x72, 0x67, 0x65, 0x74, 0x2e, 0x69, - 0x6e, 0x6e, 0x65, 0x72, 0x54, 0x65, 0x78, 0x74, 0x3b, 0x0a, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x73, 0x65, 0x73, 0x73, 0x69, 0x6f, - 0x6e, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x74, 0x72, 0x61, 0x6e, - 0x73, 0x63, 0x72, 0x69, 0x70, 0x74, 0x20, 0x3d, 0x20, 0x5b, 0x5d, 0x3b, - 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x0a, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x68, - 0x74, 0x6d, 0x6c, 0x60, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x3c, 0x64, 0x69, 0x76, 0x20, 0x69, 0x64, 0x3d, 0x22, 0x63, 0x68, - 0x61, 0x74, 0x22, 0x20, 0x72, 0x65, 0x66, 0x3d, 0x24, 0x7b, 0x63, 0x6f, - 0x6e, 0x74, 0x61, 0x69, 0x6e, 0x65, 0x72, 0x7d, 0x20, 0x6b, 0x65, 0x79, - 0x3d, 0x24, 0x7b, 0x6d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x73, 0x2e, - 0x6c, 0x65, 0x6e, 0x67, 0x74, 0x68, 0x7d, 0x3e, 0x0a, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x69, 0x6d, 0x67, 0x20, - 0x73, 0x74, 0x79, 0x6c, 0x65, 0x3d, 0x22, 0x77, 0x69, 0x64, 0x74, 0x68, - 0x3a, 0x20, 0x36, 0x30, 0x25, 0x3b, 0x24, 0x7b, 0x21, 0x73, 0x65, 0x73, - 0x73, 0x69, 0x6f, 0x6e, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x69, - 0x6d, 0x61, 0x67, 0x65, 0x5f, 0x73, 0x65, 0x6c, 0x65, 0x63, 0x74, 0x65, - 0x64, 0x20, 0x3f, 0x20, 0x60, 0x64, 0x69, 0x73, 0x70, 0x6c, 0x61, 0x79, - 0x3a, 0x20, 0x6e, 0x6f, 0x6e, 0x65, 0x3b, 0x60, 0x20, 0x3a, 0x20, 0x60, - 0x60, 0x7d, 0x22, 0x20, 0x73, 0x72, 0x63, 0x3d, 0x22, 0x24, 0x7b, 0x73, - 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, - 0x2e, 0x69, 0x6d, 0x61, 0x67, 0x65, 0x5f, 0x73, 0x65, 0x6c, 0x65, 0x63, - 0x74, 0x65, 0x64, 0x7d, 0x22, 0x2f, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x73, 0x70, 0x61, 0x6e, 0x20, - 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x65, 0x64, 0x69, 0x74, 0x61, - 0x62, 0x6c, 0x65, 0x3d, 0x24, 0x7b, 0x69, 0x73, 0x43, 0x6f, 0x6d, 0x70, - 0x6c, 0x65, 0x74, 0x69, 0x6f, 0x6e, 0x4d, 0x6f, 0x64, 0x65, 0x7d, 0x20, - 0x72, 0x65, 0x66, 0x3d, 0x24, 0x7b, 0x63, 0x6f, 0x6e, 0x74, 0x61, 0x69, - 0x6e, 0x65, 0x72, 0x7d, 0x20, 0x6f, 0x6e, 0x69, 0x6e, 0x70, 0x75, 0x74, - 0x3d, 0x24, 0x7b, 0x68, 0x61, 0x6e, 0x64, 0x6c, 0x65, 0x43, 0x6f, 0x6d, - 0x70, 0x6c, 0x65, 0x74, 0x69, 0x6f, 0x6e, 0x45, 0x64, 0x69, 0x74, 0x7d, - 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x24, 0x7b, 0x6d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x73, - 0x2e, 0x66, 0x6c, 0x61, 0x74, 0x4d, 0x61, 0x70, 0x28, 0x63, 0x68, 0x61, - 0x74, 0x4c, 0x69, 0x6e, 0x65, 0x29, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x2f, 0x73, 0x70, 0x61, 0x6e, - 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x2f, - 0x64, 0x69, 0x76, 0x3e, 0x60, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x7d, - 0x3b, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, - 0x20, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x46, 0x6f, 0x72, 0x6d, 0x20, - 0x3d, 0x20, 0x28, 0x70, 0x72, 0x6f, 0x70, 0x73, 0x29, 0x20, 0x3d, 0x3e, - 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, - 0x73, 0x74, 0x20, 0x75, 0x70, 0x64, 0x61, 0x74, 0x65, 0x53, 0x65, 0x73, - 0x73, 0x69, 0x6f, 0x6e, 0x20, 0x3d, 0x20, 0x28, 0x65, 0x6c, 0x29, 0x20, - 0x3d, 0x3e, 0x20, 0x73, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x2e, 0x76, - 0x61, 0x6c, 0x75, 0x65, 0x20, 0x3d, 0x20, 0x7b, 0x20, 0x2e, 0x2e, 0x2e, - 0x73, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x2e, 0x76, 0x61, 0x6c, 0x75, - 0x65, 0x2c, 0x20, 0x5b, 0x65, 0x6c, 0x2e, 0x74, 0x61, 0x72, 0x67, 0x65, - 0x74, 0x2e, 0x6e, 0x61, 0x6d, 0x65, 0x5d, 0x3a, 0x20, 0x65, 0x6c, 0x2e, - 0x74, 0x61, 0x72, 0x67, 0x65, 0x74, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, - 0x20, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, - 0x73, 0x74, 0x20, 0x75, 0x70, 0x64, 0x61, 0x74, 0x65, 0x50, 0x61, 0x72, - 0x61, 0x6d, 0x73, 0x20, 0x3d, 0x20, 0x28, 0x65, 0x6c, 0x29, 0x20, 0x3d, - 0x3e, 0x20, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x2e, 0x76, 0x61, 0x6c, - 0x75, 0x65, 0x20, 0x3d, 0x20, 0x7b, 0x20, 0x2e, 0x2e, 0x2e, 0x70, 0x61, - 0x72, 0x61, 0x6d, 0x73, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2c, 0x20, - 0x5b, 0x65, 0x6c, 0x2e, 0x74, 0x61, 0x72, 0x67, 0x65, 0x74, 0x2e, 0x6e, - 0x61, 0x6d, 0x65, 0x5d, 0x3a, 0x20, 0x65, 0x6c, 0x2e, 0x74, 0x61, 0x72, - 0x67, 0x65, 0x74, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x20, 0x7d, 0x0a, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, - 0x75, 0x70, 0x64, 0x61, 0x74, 0x65, 0x50, 0x61, 0x72, 0x61, 0x6d, 0x73, - 0x46, 0x6c, 0x6f, 0x61, 0x74, 0x20, 0x3d, 0x20, 0x28, 0x65, 0x6c, 0x29, - 0x20, 0x3d, 0x3e, 0x20, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x2e, 0x76, - 0x61, 0x6c, 0x75, 0x65, 0x20, 0x3d, 0x20, 0x7b, 0x20, 0x2e, 0x2e, 0x2e, - 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, - 0x2c, 0x20, 0x5b, 0x65, 0x6c, 0x2e, 0x74, 0x61, 0x72, 0x67, 0x65, 0x74, - 0x2e, 0x6e, 0x61, 0x6d, 0x65, 0x5d, 0x3a, 0x20, 0x70, 0x61, 0x72, 0x73, - 0x65, 0x46, 0x6c, 0x6f, 0x61, 0x74, 0x28, 0x65, 0x6c, 0x2e, 0x74, 0x61, - 0x72, 0x67, 0x65, 0x74, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x29, 0x20, - 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, - 0x74, 0x20, 0x75, 0x70, 0x64, 0x61, 0x74, 0x65, 0x50, 0x61, 0x72, 0x61, - 0x6d, 0x73, 0x49, 0x6e, 0x74, 0x20, 0x3d, 0x20, 0x28, 0x65, 0x6c, 0x29, - 0x20, 0x3d, 0x3e, 0x20, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x2e, 0x76, - 0x61, 0x6c, 0x75, 0x65, 0x20, 0x3d, 0x20, 0x7b, 0x20, 0x2e, 0x2e, 0x2e, - 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, - 0x2c, 0x20, 0x5b, 0x65, 0x6c, 0x2e, 0x74, 0x61, 0x72, 0x67, 0x65, 0x74, - 0x2e, 0x6e, 0x61, 0x6d, 0x65, 0x5d, 0x3a, 0x20, 0x4d, 0x61, 0x74, 0x68, - 0x2e, 0x66, 0x6c, 0x6f, 0x6f, 0x72, 0x28, 0x70, 0x61, 0x72, 0x73, 0x65, - 0x46, 0x6c, 0x6f, 0x61, 0x74, 0x28, 0x65, 0x6c, 0x2e, 0x74, 0x61, 0x72, - 0x67, 0x65, 0x74, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x29, 0x29, 0x20, - 0x7d, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, - 0x73, 0x74, 0x20, 0x67, 0x72, 0x61, 0x6d, 0x6d, 0x61, 0x72, 0x4a, 0x73, - 0x6f, 0x6e, 0x53, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x50, 0x72, 0x6f, 0x70, - 0x4f, 0x72, 0x64, 0x65, 0x72, 0x20, 0x3d, 0x20, 0x73, 0x69, 0x67, 0x6e, - 0x61, 0x6c, 0x28, 0x27, 0x27, 0x29, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x75, 0x70, 0x64, 0x61, 0x74, - 0x65, 0x47, 0x72, 0x61, 0x6d, 0x6d, 0x61, 0x72, 0x4a, 0x73, 0x6f, 0x6e, - 0x53, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x50, 0x72, 0x6f, 0x70, 0x4f, 0x72, - 0x64, 0x65, 0x72, 0x20, 0x3d, 0x20, 0x28, 0x65, 0x6c, 0x29, 0x20, 0x3d, - 0x3e, 0x20, 0x67, 0x72, 0x61, 0x6d, 0x6d, 0x61, 0x72, 0x4a, 0x73, 0x6f, - 0x6e, 0x53, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x50, 0x72, 0x6f, 0x70, 0x4f, - 0x72, 0x64, 0x65, 0x72, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x20, 0x3d, - 0x20, 0x65, 0x6c, 0x2e, 0x74, 0x61, 0x72, 0x67, 0x65, 0x74, 0x2e, 0x76, - 0x61, 0x6c, 0x75, 0x65, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, - 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x63, 0x6f, 0x6e, 0x76, 0x65, 0x72, 0x74, - 0x4a, 0x53, 0x4f, 0x4e, 0x53, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x47, 0x72, - 0x61, 0x6d, 0x6d, 0x61, 0x72, 0x20, 0x3d, 0x20, 0x28, 0x29, 0x20, 0x3d, - 0x3e, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x74, 0x72, 0x79, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x73, 0x63, - 0x68, 0x65, 0x6d, 0x61, 0x20, 0x3d, 0x20, 0x4a, 0x53, 0x4f, 0x4e, 0x2e, - 0x70, 0x61, 0x72, 0x73, 0x65, 0x28, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, - 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x67, 0x72, 0x61, 0x6d, 0x6d, - 0x61, 0x72, 0x29, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x63, 0x6f, 0x6e, 0x76, - 0x65, 0x72, 0x74, 0x65, 0x72, 0x20, 0x3d, 0x20, 0x6e, 0x65, 0x77, 0x20, - 0x53, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x43, 0x6f, 0x6e, 0x76, 0x65, 0x72, - 0x74, 0x65, 0x72, 0x28, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x67, 0x72, 0x61, 0x6d, 0x6d, 0x61, 0x72, - 0x4a, 0x73, 0x6f, 0x6e, 0x53, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x50, 0x72, - 0x6f, 0x70, 0x4f, 0x72, 0x64, 0x65, 0x72, 0x2e, 0x76, 0x61, 0x6c, 0x75, - 0x65, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x2e, 0x73, 0x70, 0x6c, 0x69, 0x74, 0x28, 0x27, - 0x2c, 0x27, 0x29, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x2e, 0x72, 0x65, 0x64, 0x75, 0x63, - 0x65, 0x28, 0x28, 0x61, 0x63, 0x63, 0x2c, 0x20, 0x63, 0x75, 0x72, 0x2c, - 0x20, 0x69, 0x29, 0x20, 0x3d, 0x3e, 0x20, 0x28, 0x7b, 0x20, 0x2e, 0x2e, - 0x2e, 0x61, 0x63, 0x63, 0x2c, 0x20, 0x5b, 0x63, 0x75, 0x72, 0x2e, 0x74, - 0x72, 0x69, 0x6d, 0x28, 0x29, 0x5d, 0x3a, 0x20, 0x69, 0x20, 0x7d, 0x29, - 0x2c, 0x20, 0x7b, 0x7d, 0x29, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x29, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x76, 0x65, 0x72, 0x74, 0x65, - 0x72, 0x2e, 0x76, 0x69, 0x73, 0x69, 0x74, 0x28, 0x73, 0x63, 0x68, 0x65, - 0x6d, 0x61, 0x2c, 0x20, 0x27, 0x27, 0x29, 0x0a, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, - 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x20, 0x3d, 0x20, 0x7b, 0x0a, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x2e, - 0x2e, 0x2e, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x2e, 0x76, 0x61, 0x6c, - 0x75, 0x65, 0x2c, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x67, 0x72, 0x61, 0x6d, 0x6d, 0x61, 0x72, 0x3a, - 0x20, 0x63, 0x6f, 0x6e, 0x76, 0x65, 0x72, 0x74, 0x65, 0x72, 0x2e, 0x66, - 0x6f, 0x72, 0x6d, 0x61, 0x74, 0x47, 0x72, 0x61, 0x6d, 0x6d, 0x61, 0x72, - 0x28, 0x29, 0x2c, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x7d, 0x20, 0x63, 0x61, 0x74, 0x63, 0x68, 0x20, 0x28, 0x65, 0x29, 0x20, - 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x61, 0x6c, 0x65, 0x72, 0x74, 0x28, 0x60, 0x43, 0x6f, 0x6e, 0x76, 0x65, - 0x72, 0x74, 0x20, 0x66, 0x61, 0x69, 0x6c, 0x65, 0x64, 0x3a, 0x20, 0x24, - 0x7b, 0x65, 0x2e, 0x6d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x7d, 0x60, - 0x29, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x0a, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x46, 0x6c, 0x6f, - 0x61, 0x74, 0x46, 0x69, 0x65, 0x6c, 0x64, 0x20, 0x3d, 0x20, 0x28, 0x7b, - 0x20, 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x2c, 0x20, 0x6d, 0x61, 0x78, 0x2c, - 0x20, 0x6d, 0x69, 0x6e, 0x2c, 0x20, 0x6e, 0x61, 0x6d, 0x65, 0x2c, 0x20, - 0x73, 0x74, 0x65, 0x70, 0x2c, 0x20, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x20, - 0x7d, 0x29, 0x20, 0x3d, 0x3e, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x68, - 0x74, 0x6d, 0x6c, 0x60, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x3c, 0x64, 0x69, 0x76, 0x3e, 0x0a, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x6c, 0x61, - 0x62, 0x65, 0x6c, 0x20, 0x66, 0x6f, 0x72, 0x3d, 0x22, 0x24, 0x7b, 0x6e, - 0x61, 0x6d, 0x65, 0x7d, 0x22, 0x3e, 0x24, 0x7b, 0x6c, 0x61, 0x62, 0x65, - 0x6c, 0x7d, 0x3c, 0x2f, 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x3e, 0x0a, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, - 0x69, 0x6e, 0x70, 0x75, 0x74, 0x20, 0x74, 0x79, 0x70, 0x65, 0x3d, 0x22, - 0x72, 0x61, 0x6e, 0x67, 0x65, 0x22, 0x20, 0x69, 0x64, 0x3d, 0x22, 0x24, - 0x7b, 0x6e, 0x61, 0x6d, 0x65, 0x7d, 0x22, 0x20, 0x6d, 0x69, 0x6e, 0x3d, - 0x22, 0x24, 0x7b, 0x6d, 0x69, 0x6e, 0x7d, 0x22, 0x20, 0x6d, 0x61, 0x78, - 0x3d, 0x22, 0x24, 0x7b, 0x6d, 0x61, 0x78, 0x7d, 0x22, 0x20, 0x73, 0x74, - 0x65, 0x70, 0x3d, 0x22, 0x24, 0x7b, 0x73, 0x74, 0x65, 0x70, 0x7d, 0x22, - 0x20, 0x6e, 0x61, 0x6d, 0x65, 0x3d, 0x22, 0x24, 0x7b, 0x6e, 0x61, 0x6d, - 0x65, 0x7d, 0x22, 0x20, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3d, 0x22, 0x24, - 0x7b, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x7d, 0x22, 0x20, 0x6f, 0x6e, 0x69, - 0x6e, 0x70, 0x75, 0x74, 0x3d, 0x24, 0x7b, 0x75, 0x70, 0x64, 0x61, 0x74, - 0x65, 0x50, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x46, 0x6c, 0x6f, 0x61, 0x74, - 0x7d, 0x20, 0x2f, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x73, 0x70, 0x61, 0x6e, 0x3e, 0x24, - 0x7b, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x7d, 0x3c, 0x2f, 0x73, 0x70, 0x61, - 0x6e, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x3c, 0x2f, 0x64, 0x69, 0x76, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x60, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x7d, 0x3b, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, - 0x6e, 0x73, 0x74, 0x20, 0x49, 0x6e, 0x74, 0x46, 0x69, 0x65, 0x6c, 0x64, - 0x20, 0x3d, 0x20, 0x28, 0x7b, 0x20, 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x2c, - 0x20, 0x6d, 0x61, 0x78, 0x2c, 0x20, 0x6d, 0x69, 0x6e, 0x2c, 0x20, 0x6e, - 0x61, 0x6d, 0x65, 0x2c, 0x20, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x20, 0x7d, - 0x29, 0x20, 0x3d, 0x3e, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x68, 0x74, - 0x6d, 0x6c, 0x60, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x3c, 0x64, 0x69, 0x76, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x6c, 0x61, 0x62, - 0x65, 0x6c, 0x20, 0x66, 0x6f, 0x72, 0x3d, 0x22, 0x24, 0x7b, 0x6e, 0x61, - 0x6d, 0x65, 0x7d, 0x22, 0x3e, 0x24, 0x7b, 0x6c, 0x61, 0x62, 0x65, 0x6c, - 0x7d, 0x3c, 0x2f, 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x3e, 0x0a, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x69, - 0x6e, 0x70, 0x75, 0x74, 0x20, 0x74, 0x79, 0x70, 0x65, 0x3d, 0x22, 0x72, - 0x61, 0x6e, 0x67, 0x65, 0x22, 0x20, 0x69, 0x64, 0x3d, 0x22, 0x24, 0x7b, - 0x6e, 0x61, 0x6d, 0x65, 0x7d, 0x22, 0x20, 0x6d, 0x69, 0x6e, 0x3d, 0x22, - 0x24, 0x7b, 0x6d, 0x69, 0x6e, 0x7d, 0x22, 0x20, 0x6d, 0x61, 0x78, 0x3d, - 0x22, 0x24, 0x7b, 0x6d, 0x61, 0x78, 0x7d, 0x22, 0x20, 0x6e, 0x61, 0x6d, - 0x65, 0x3d, 0x22, 0x24, 0x7b, 0x6e, 0x61, 0x6d, 0x65, 0x7d, 0x22, 0x20, - 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3d, 0x22, 0x24, 0x7b, 0x76, 0x61, 0x6c, - 0x75, 0x65, 0x7d, 0x22, 0x20, 0x6f, 0x6e, 0x69, 0x6e, 0x70, 0x75, 0x74, - 0x3d, 0x24, 0x7b, 0x75, 0x70, 0x64, 0x61, 0x74, 0x65, 0x50, 0x61, 0x72, - 0x61, 0x6d, 0x73, 0x49, 0x6e, 0x74, 0x7d, 0x20, 0x2f, 0x3e, 0x0a, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, - 0x73, 0x70, 0x61, 0x6e, 0x3e, 0x24, 0x7b, 0x76, 0x61, 0x6c, 0x75, 0x65, - 0x7d, 0x3c, 0x2f, 0x73, 0x70, 0x61, 0x6e, 0x3e, 0x0a, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x2f, 0x64, 0x69, 0x76, - 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x60, 0x0a, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x3b, 0x0a, 0x0a, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x75, 0x73, - 0x65, 0x72, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x52, 0x65, - 0x73, 0x65, 0x74, 0x20, 0x3d, 0x20, 0x28, 0x65, 0x29, 0x20, 0x3d, 0x3e, - 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x65, - 0x2e, 0x70, 0x72, 0x65, 0x76, 0x65, 0x6e, 0x74, 0x44, 0x65, 0x66, 0x61, - 0x75, 0x6c, 0x74, 0x28, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x75, 0x73, 0x65, 0x72, 0x54, 0x65, 0x6d, 0x70, 0x6c, - 0x61, 0x74, 0x65, 0x52, 0x65, 0x73, 0x65, 0x74, 0x54, 0x6f, 0x44, 0x65, - 0x66, 0x61, 0x75, 0x6c, 0x74, 0x41, 0x6e, 0x64, 0x41, 0x70, 0x70, 0x6c, - 0x79, 0x28, 0x29, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, - 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, - 0x20, 0x55, 0x73, 0x65, 0x72, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, - 0x65, 0x52, 0x65, 0x73, 0x65, 0x74, 0x42, 0x75, 0x74, 0x74, 0x6f, 0x6e, - 0x20, 0x3d, 0x20, 0x28, 0x29, 0x20, 0x3d, 0x3e, 0x20, 0x7b, 0x0a, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x69, 0x66, 0x20, 0x28, 0x73, - 0x65, 0x6c, 0x65, 0x63, 0x74, 0x65, 0x64, 0x55, 0x73, 0x65, 0x72, 0x54, - 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x2e, 0x76, 0x61, 0x6c, 0x75, - 0x65, 0x2e, 0x6e, 0x61, 0x6d, 0x65, 0x20, 0x3d, 0x3d, 0x20, 0x27, 0x64, - 0x65, 0x66, 0x61, 0x75, 0x6c, 0x74, 0x27, 0x29, 0x20, 0x7b, 0x0a, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x72, 0x65, 0x74, - 0x75, 0x72, 0x6e, 0x20, 0x68, 0x74, 0x6d, 0x6c, 0x60, 0x0a, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x62, - 0x75, 0x74, 0x74, 0x6f, 0x6e, 0x20, 0x64, 0x69, 0x73, 0x61, 0x62, 0x6c, - 0x65, 0x64, 0x3e, 0x55, 0x73, 0x69, 0x6e, 0x67, 0x20, 0x64, 0x65, 0x66, - 0x61, 0x75, 0x6c, 0x74, 0x20, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, - 0x65, 0x3c, 0x2f, 0x62, 0x75, 0x74, 0x74, 0x6f, 0x6e, 0x3e, 0x0a, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x60, 0x0a, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x0a, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, - 0x20, 0x68, 0x74, 0x6d, 0x6c, 0x60, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x62, 0x75, 0x74, 0x74, 0x6f, 0x6e, - 0x20, 0x6f, 0x6e, 0x63, 0x6c, 0x69, 0x63, 0x6b, 0x3d, 0x24, 0x7b, 0x75, - 0x73, 0x65, 0x72, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x52, - 0x65, 0x73, 0x65, 0x74, 0x7d, 0x3e, 0x52, 0x65, 0x73, 0x65, 0x74, 0x20, - 0x61, 0x6c, 0x6c, 0x20, 0x74, 0x6f, 0x20, 0x64, 0x65, 0x66, 0x61, 0x75, - 0x6c, 0x74, 0x3c, 0x2f, 0x62, 0x75, 0x74, 0x74, 0x6f, 0x6e, 0x3e, 0x0a, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x60, 0x0a, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x7d, 0x3b, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x75, 0x73, 0x65, 0x45, 0x66, 0x66, 0x65, 0x63, 0x74, 0x28, + 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x29, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x29, 0x2e, 0x6a, 0x6f, 0x69, 0x6e, 0x28, 0x22, 0x5c, + 0x6e, 0x22, 0x29, 0x2c, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, + 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x69, 0x66, 0x20, + 0x28, 0x73, 0x65, 0x6c, 0x65, 0x63, 0x74, 0x65, 0x64, 0x5f, 0x69, 0x6d, + 0x61, 0x67, 0x65, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x70, 0x72, 0x6f, 0x6d, 0x70, 0x74, 0x20, 0x3d, 0x20, + 0x60, 0x41, 0x20, 0x63, 0x68, 0x61, 0x74, 0x20, 0x62, 0x65, 0x74, 0x77, + 0x65, 0x65, 0x6e, 0x20, 0x61, 0x20, 0x63, 0x75, 0x72, 0x69, 0x6f, 0x75, + 0x73, 0x20, 0x68, 0x75, 0x6d, 0x61, 0x6e, 0x20, 0x61, 0x6e, 0x64, 0x20, + 0x61, 0x6e, 0x20, 0x61, 0x72, 0x74, 0x69, 0x66, 0x69, 0x63, 0x69, 0x61, + 0x6c, 0x20, 0x69, 0x6e, 0x74, 0x65, 0x6c, 0x6c, 0x69, 0x67, 0x65, 0x6e, + 0x63, 0x65, 0x20, 0x61, 0x73, 0x73, 0x69, 0x73, 0x74, 0x61, 0x6e, 0x74, + 0x2e, 0x20, 0x54, 0x68, 0x65, 0x20, 0x61, 0x73, 0x73, 0x69, 0x73, 0x74, + 0x61, 0x6e, 0x74, 0x20, 0x67, 0x69, 0x76, 0x65, 0x73, 0x20, 0x68, 0x65, + 0x6c, 0x70, 0x66, 0x75, 0x6c, 0x2c, 0x20, 0x64, 0x65, 0x74, 0x61, 0x69, + 0x6c, 0x65, 0x64, 0x2c, 0x20, 0x61, 0x6e, 0x64, 0x20, 0x70, 0x6f, 0x6c, + 0x69, 0x74, 0x65, 0x20, 0x61, 0x6e, 0x73, 0x77, 0x65, 0x72, 0x73, 0x20, + 0x74, 0x6f, 0x20, 0x74, 0x68, 0x65, 0x20, 0x68, 0x75, 0x6d, 0x61, 0x6e, + 0x27, 0x73, 0x20, 0x71, 0x75, 0x65, 0x73, 0x74, 0x69, 0x6f, 0x6e, 0x73, + 0x2e, 0x5c, 0x6e, 0x55, 0x53, 0x45, 0x52, 0x3a, 0x5b, 0x69, 0x6d, 0x67, + 0x2d, 0x31, 0x30, 0x5d, 0x24, 0x7b, 0x6d, 0x73, 0x67, 0x7d, 0x5c, 0x6e, + 0x41, 0x53, 0x53, 0x49, 0x53, 0x54, 0x41, 0x4e, 0x54, 0x3a, 0x60, 0x3b, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x61, 0x77, 0x61, 0x69, 0x74, 0x20, 0x72, 0x75, 0x6e, + 0x4c, 0x6c, 0x61, 0x6d, 0x61, 0x28, 0x70, 0x72, 0x6f, 0x6d, 0x70, 0x74, + 0x2c, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x2e, 0x2e, 0x2e, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x2e, 0x76, 0x61, + 0x6c, 0x75, 0x65, 0x2c, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x73, 0x6c, 0x6f, 0x74, 0x5f, 0x69, 0x64, 0x3a, 0x20, 0x73, 0x6c, + 0x6f, 0x74, 0x5f, 0x69, 0x64, 0x2c, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x73, 0x74, 0x6f, 0x70, 0x3a, 0x20, 0x5b, 0x22, 0x3c, + 0x2f, 0x73, 0x3e, 0x22, 0x2c, 0x20, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, + 0x74, 0x65, 0x28, 0x22, 0x7b, 0x7b, 0x63, 0x68, 0x61, 0x72, 0x7d, 0x7d, + 0x3a, 0x22, 0x29, 0x2c, 0x20, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, + 0x65, 0x28, 0x22, 0x7b, 0x7b, 0x75, 0x73, 0x65, 0x72, 0x7d, 0x7d, 0x3a, + 0x22, 0x29, 0x5d, 0x2c, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, + 0x2c, 0x20, 0x22, 0x7b, 0x7b, 0x63, 0x68, 0x61, 0x72, 0x7d, 0x7d, 0x22, + 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x72, 0x75, 0x6e, 0x43, + 0x6f, 0x6d, 0x70, 0x6c, 0x65, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x3d, 0x20, 0x28, 0x29, 0x20, 0x3d, 0x3e, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x2f, 0x2f, 0x20, 0x61, 0x75, 0x74, 0x6f, 0x73, - 0x61, 0x76, 0x65, 0x20, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, - 0x20, 0x6f, 0x6e, 0x20, 0x65, 0x76, 0x65, 0x72, 0x79, 0x20, 0x63, 0x68, - 0x61, 0x6e, 0x67, 0x65, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x75, 0x73, 0x65, 0x72, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, - 0x65, 0x41, 0x75, 0x74, 0x6f, 0x73, 0x61, 0x76, 0x65, 0x28, 0x29, 0x0a, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x2c, 0x20, 0x5b, 0x73, 0x65, - 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2c, - 0x20, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x2e, 0x76, 0x61, 0x6c, 0x75, - 0x65, 0x5d, 0x29, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, - 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x47, 0x72, 0x61, 0x6d, 0x6d, 0x61, 0x72, - 0x43, 0x6f, 0x6e, 0x74, 0x72, 0x6f, 0x6c, 0x20, 0x3d, 0x20, 0x28, 0x29, - 0x20, 0x3d, 0x3e, 0x20, 0x28, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x68, 0x74, 0x6d, 0x6c, 0x60, 0x0a, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x64, 0x69, 0x76, 0x3e, 0x0a, + 0x20, 0x20, 0x69, 0x66, 0x20, 0x28, 0x63, 0x6f, 0x6e, 0x74, 0x72, 0x6f, + 0x6c, 0x6c, 0x65, 0x72, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x29, 0x20, + 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, + 0x6e, 0x73, 0x6f, 0x6c, 0x65, 0x2e, 0x6c, 0x6f, 0x67, 0x28, 0x27, 0x61, + 0x6c, 0x72, 0x65, 0x61, 0x64, 0x79, 0x20, 0x72, 0x75, 0x6e, 0x6e, 0x69, + 0x6e, 0x67, 0x2e, 0x2e, 0x2e, 0x27, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x3b, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x7b, 0x20, 0x70, + 0x72, 0x6f, 0x6d, 0x70, 0x74, 0x20, 0x7d, 0x20, 0x3d, 0x20, 0x73, 0x65, + 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3b, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x74, 0x72, 0x61, 0x6e, 0x73, + 0x63, 0x72, 0x69, 0x70, 0x74, 0x55, 0x70, 0x64, 0x61, 0x74, 0x65, 0x28, + 0x5b, 0x2e, 0x2e, 0x2e, 0x73, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x2e, + 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x74, 0x72, 0x61, 0x6e, 0x73, 0x63, + 0x72, 0x69, 0x70, 0x74, 0x2c, 0x20, 0x5b, 0x22, 0x22, 0x2c, 0x20, 0x70, + 0x72, 0x6f, 0x6d, 0x70, 0x74, 0x5d, 0x5d, 0x29, 0x3b, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x72, 0x75, 0x6e, 0x4c, 0x6c, 0x61, 0x6d, 0x61, + 0x28, 0x70, 0x72, 0x6f, 0x6d, 0x70, 0x74, 0x2c, 0x20, 0x7b, 0x0a, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x2e, 0x2e, 0x2e, 0x70, 0x61, + 0x72, 0x61, 0x6d, 0x73, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2c, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x73, 0x6c, 0x6f, 0x74, + 0x5f, 0x69, 0x64, 0x3a, 0x20, 0x73, 0x6c, 0x6f, 0x74, 0x5f, 0x69, 0x64, + 0x2c, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x73, 0x74, + 0x6f, 0x70, 0x3a, 0x20, 0x5b, 0x5d, 0x2c, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x7d, 0x2c, 0x20, 0x22, 0x22, 0x29, 0x2e, 0x66, 0x69, 0x6e, + 0x61, 0x6c, 0x6c, 0x79, 0x28, 0x28, 0x29, 0x20, 0x3d, 0x3e, 0x20, 0x7b, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x73, 0x65, 0x73, + 0x73, 0x69, 0x6f, 0x6e, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x70, + 0x72, 0x6f, 0x6d, 0x70, 0x74, 0x20, 0x3d, 0x20, 0x73, 0x65, 0x73, 0x73, + 0x69, 0x6f, 0x6e, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x74, 0x72, + 0x61, 0x6e, 0x73, 0x63, 0x72, 0x69, 0x70, 0x74, 0x2e, 0x6d, 0x61, 0x70, + 0x28, 0x28, 0x5b, 0x5f, 0x2c, 0x20, 0x64, 0x61, 0x74, 0x61, 0x5d, 0x29, + 0x20, 0x3d, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x41, 0x72, 0x72, 0x61, 0x79, 0x2e, 0x69, 0x73, 0x41, 0x72, + 0x72, 0x61, 0x79, 0x28, 0x64, 0x61, 0x74, 0x61, 0x29, 0x20, 0x3f, 0x20, + 0x64, 0x61, 0x74, 0x61, 0x2e, 0x6d, 0x61, 0x70, 0x28, 0x6d, 0x73, 0x67, + 0x20, 0x3d, 0x3e, 0x20, 0x6d, 0x73, 0x67, 0x2e, 0x63, 0x6f, 0x6e, 0x74, + 0x65, 0x6e, 0x74, 0x29, 0x2e, 0x6a, 0x6f, 0x69, 0x6e, 0x28, 0x27, 0x27, + 0x29, 0x20, 0x3a, 0x20, 0x64, 0x61, 0x74, 0x61, 0x0a, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x29, 0x2e, 0x6a, 0x6f, 0x69, 0x6e, 0x28, + 0x27, 0x27, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x73, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x2e, 0x76, 0x61, 0x6c, + 0x75, 0x65, 0x2e, 0x74, 0x72, 0x61, 0x6e, 0x73, 0x63, 0x72, 0x69, 0x70, + 0x74, 0x20, 0x3d, 0x20, 0x5b, 0x5d, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x7d, 0x29, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x73, 0x74, + 0x6f, 0x70, 0x20, 0x3d, 0x20, 0x28, 0x65, 0x29, 0x20, 0x3d, 0x3e, 0x20, + 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x65, 0x2e, 0x70, 0x72, + 0x65, 0x76, 0x65, 0x6e, 0x74, 0x44, 0x65, 0x66, 0x61, 0x75, 0x6c, 0x74, + 0x28, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x69, 0x66, + 0x20, 0x28, 0x63, 0x6f, 0x6e, 0x74, 0x72, 0x6f, 0x6c, 0x6c, 0x65, 0x72, + 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x74, 0x72, 0x6f, + 0x6c, 0x6c, 0x65, 0x72, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x61, + 0x62, 0x6f, 0x72, 0x74, 0x28, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x74, 0x72, 0x6f, 0x6c, 0x6c, + 0x65, 0x72, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x20, 0x3d, 0x20, 0x6e, + 0x75, 0x6c, 0x6c, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x72, 0x65, 0x73, 0x65, 0x74, 0x20, + 0x3d, 0x20, 0x28, 0x65, 0x29, 0x20, 0x3d, 0x3e, 0x20, 0x7b, 0x0a, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x73, 0x74, 0x6f, 0x70, 0x28, 0x65, 0x29, + 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x74, 0x72, 0x61, 0x6e, + 0x73, 0x63, 0x72, 0x69, 0x70, 0x74, 0x55, 0x70, 0x64, 0x61, 0x74, 0x65, + 0x28, 0x5b, 0x5d, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x75, + 0x70, 0x6c, 0x6f, 0x61, 0x64, 0x49, 0x6d, 0x61, 0x67, 0x65, 0x20, 0x3d, + 0x20, 0x28, 0x65, 0x29, 0x20, 0x3d, 0x3e, 0x20, 0x7b, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x65, 0x2e, 0x70, 0x72, 0x65, 0x76, 0x65, 0x6e, + 0x74, 0x44, 0x65, 0x66, 0x61, 0x75, 0x6c, 0x74, 0x28, 0x29, 0x3b, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x64, 0x6f, 0x63, 0x75, 0x6d, 0x65, + 0x6e, 0x74, 0x2e, 0x67, 0x65, 0x74, 0x45, 0x6c, 0x65, 0x6d, 0x65, 0x6e, + 0x74, 0x42, 0x79, 0x49, 0x64, 0x28, 0x22, 0x66, 0x69, 0x6c, 0x65, 0x49, + 0x6e, 0x70, 0x75, 0x74, 0x22, 0x29, 0x2e, 0x63, 0x6c, 0x69, 0x63, 0x6b, + 0x28, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x64, 0x6f, + 0x63, 0x75, 0x6d, 0x65, 0x6e, 0x74, 0x2e, 0x67, 0x65, 0x74, 0x45, 0x6c, + 0x65, 0x6d, 0x65, 0x6e, 0x74, 0x42, 0x79, 0x49, 0x64, 0x28, 0x22, 0x66, + 0x69, 0x6c, 0x65, 0x49, 0x6e, 0x70, 0x75, 0x74, 0x22, 0x29, 0x2e, 0x61, + 0x64, 0x64, 0x45, 0x76, 0x65, 0x6e, 0x74, 0x4c, 0x69, 0x73, 0x74, 0x65, + 0x6e, 0x65, 0x72, 0x28, 0x22, 0x63, 0x68, 0x61, 0x6e, 0x67, 0x65, 0x22, + 0x2c, 0x20, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x28, + 0x65, 0x76, 0x65, 0x6e, 0x74, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x73, + 0x65, 0x6c, 0x65, 0x63, 0x74, 0x65, 0x64, 0x46, 0x69, 0x6c, 0x65, 0x20, + 0x3d, 0x20, 0x65, 0x76, 0x65, 0x6e, 0x74, 0x2e, 0x74, 0x61, 0x72, 0x67, + 0x65, 0x74, 0x2e, 0x66, 0x69, 0x6c, 0x65, 0x73, 0x5b, 0x30, 0x5d, 0x3b, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x69, 0x66, 0x20, + 0x28, 0x73, 0x65, 0x6c, 0x65, 0x63, 0x74, 0x65, 0x64, 0x46, 0x69, 0x6c, + 0x65, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x72, 0x65, 0x61, + 0x64, 0x65, 0x72, 0x20, 0x3d, 0x20, 0x6e, 0x65, 0x77, 0x20, 0x46, 0x69, + 0x6c, 0x65, 0x52, 0x65, 0x61, 0x64, 0x65, 0x72, 0x28, 0x29, 0x3b, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x72, 0x65, + 0x61, 0x64, 0x65, 0x72, 0x2e, 0x6f, 0x6e, 0x6c, 0x6f, 0x61, 0x64, 0x20, + 0x3d, 0x20, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x28, + 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x69, 0x6d, + 0x61, 0x67, 0x65, 0x5f, 0x64, 0x61, 0x74, 0x61, 0x20, 0x3d, 0x20, 0x72, + 0x65, 0x61, 0x64, 0x65, 0x72, 0x2e, 0x72, 0x65, 0x73, 0x75, 0x6c, 0x74, + 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x73, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x2e, 0x76, 0x61, + 0x6c, 0x75, 0x65, 0x20, 0x3d, 0x20, 0x7b, 0x20, 0x2e, 0x2e, 0x2e, 0x73, + 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, + 0x2c, 0x20, 0x69, 0x6d, 0x61, 0x67, 0x65, 0x5f, 0x73, 0x65, 0x6c, 0x65, + 0x63, 0x74, 0x65, 0x64, 0x3a, 0x20, 0x69, 0x6d, 0x61, 0x67, 0x65, 0x5f, + 0x64, 0x61, 0x74, 0x61, 0x20, 0x7d, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x70, 0x61, 0x72, 0x61, + 0x6d, 0x73, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x20, 0x3d, 0x20, 0x7b, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x2e, 0x2e, 0x2e, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, + 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2c, 0x20, 0x69, 0x6d, 0x61, 0x67, + 0x65, 0x5f, 0x64, 0x61, 0x74, 0x61, 0x3a, 0x20, 0x5b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x3c, 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x20, 0x66, 0x6f, 0x72, 0x3d, 0x22, - 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x22, 0x3e, 0x47, 0x72, - 0x61, 0x6d, 0x6d, 0x61, 0x72, 0x3c, 0x2f, 0x6c, 0x61, 0x62, 0x65, 0x6c, - 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x3c, 0x74, 0x65, 0x78, 0x74, 0x61, 0x72, 0x65, 0x61, 0x20, - 0x69, 0x64, 0x3d, 0x22, 0x67, 0x72, 0x61, 0x6d, 0x6d, 0x61, 0x72, 0x22, - 0x20, 0x6e, 0x61, 0x6d, 0x65, 0x3d, 0x22, 0x67, 0x72, 0x61, 0x6d, 0x6d, - 0x61, 0x72, 0x22, 0x20, 0x70, 0x6c, 0x61, 0x63, 0x65, 0x68, 0x6f, 0x6c, - 0x64, 0x65, 0x72, 0x3d, 0x22, 0x55, 0x73, 0x65, 0x20, 0x67, 0x62, 0x6e, - 0x66, 0x20, 0x6f, 0x72, 0x20, 0x4a, 0x53, 0x4f, 0x4e, 0x20, 0x53, 0x63, - 0x68, 0x65, 0x6d, 0x61, 0x2b, 0x63, 0x6f, 0x6e, 0x76, 0x65, 0x72, 0x74, - 0x22, 0x20, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3d, 0x22, 0x24, 0x7b, 0x70, - 0x61, 0x72, 0x61, 0x6d, 0x73, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2e, - 0x67, 0x72, 0x61, 0x6d, 0x6d, 0x61, 0x72, 0x7d, 0x22, 0x20, 0x72, 0x6f, - 0x77, 0x73, 0x3d, 0x34, 0x20, 0x6f, 0x6e, 0x69, 0x6e, 0x70, 0x75, 0x74, - 0x3d, 0x24, 0x7b, 0x75, 0x70, 0x64, 0x61, 0x74, 0x65, 0x50, 0x61, 0x72, - 0x61, 0x6d, 0x73, 0x7d, 0x2f, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x69, 0x6e, 0x70, 0x75, - 0x74, 0x20, 0x74, 0x79, 0x70, 0x65, 0x3d, 0x22, 0x74, 0x65, 0x78, 0x74, - 0x22, 0x20, 0x6e, 0x61, 0x6d, 0x65, 0x3d, 0x22, 0x70, 0x72, 0x6f, 0x70, - 0x2d, 0x6f, 0x72, 0x64, 0x65, 0x72, 0x22, 0x20, 0x70, 0x6c, 0x61, 0x63, - 0x65, 0x68, 0x6f, 0x6c, 0x64, 0x65, 0x72, 0x3d, 0x22, 0x6f, 0x72, 0x64, - 0x65, 0x72, 0x3a, 0x20, 0x70, 0x72, 0x6f, 0x70, 0x31, 0x2c, 0x70, 0x72, - 0x6f, 0x70, 0x32, 0x2c, 0x70, 0x72, 0x6f, 0x70, 0x33, 0x22, 0x20, 0x6f, - 0x6e, 0x69, 0x6e, 0x70, 0x75, 0x74, 0x3d, 0x24, 0x7b, 0x75, 0x70, 0x64, - 0x61, 0x74, 0x65, 0x47, 0x72, 0x61, 0x6d, 0x6d, 0x61, 0x72, 0x4a, 0x73, - 0x6f, 0x6e, 0x53, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x50, 0x72, 0x6f, 0x70, - 0x4f, 0x72, 0x64, 0x65, 0x72, 0x7d, 0x20, 0x2f, 0x3e, 0x0a, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x62, - 0x75, 0x74, 0x74, 0x6f, 0x6e, 0x20, 0x74, 0x79, 0x70, 0x65, 0x3d, 0x22, - 0x62, 0x75, 0x74, 0x74, 0x6f, 0x6e, 0x22, 0x20, 0x6f, 0x6e, 0x63, 0x6c, - 0x69, 0x63, 0x6b, 0x3d, 0x24, 0x7b, 0x63, 0x6f, 0x6e, 0x76, 0x65, 0x72, - 0x74, 0x4a, 0x53, 0x4f, 0x4e, 0x53, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x47, - 0x72, 0x61, 0x6d, 0x6d, 0x61, 0x72, 0x7d, 0x3e, 0x43, 0x6f, 0x6e, 0x76, - 0x65, 0x72, 0x74, 0x20, 0x4a, 0x53, 0x4f, 0x4e, 0x20, 0x53, 0x63, 0x68, - 0x65, 0x6d, 0x61, 0x3c, 0x2f, 0x62, 0x75, 0x74, 0x74, 0x6f, 0x6e, 0x3e, + 0x20, 0x20, 0x7b, 0x20, 0x64, 0x61, 0x74, 0x61, 0x3a, 0x20, 0x69, 0x6d, + 0x61, 0x67, 0x65, 0x5f, 0x64, 0x61, 0x74, 0x61, 0x2e, 0x72, 0x65, 0x70, + 0x6c, 0x61, 0x63, 0x65, 0x28, 0x2f, 0x64, 0x61, 0x74, 0x61, 0x3a, 0x69, + 0x6d, 0x61, 0x67, 0x65, 0x5c, 0x2f, 0x5b, 0x5e, 0x3b, 0x5d, 0x2b, 0x3b, + 0x62, 0x61, 0x73, 0x65, 0x36, 0x34, 0x2c, 0x2f, 0x2c, 0x20, 0x27, 0x27, + 0x29, 0x2c, 0x20, 0x69, 0x64, 0x3a, 0x20, 0x31, 0x30, 0x20, 0x7d, 0x5d, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x7d, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x73, 0x65, 0x6c, 0x65, 0x63, 0x74, 0x65, 0x64, 0x5f, 0x69, + 0x6d, 0x61, 0x67, 0x65, 0x20, 0x3d, 0x20, 0x74, 0x72, 0x75, 0x65, 0x3b, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x72, + 0x65, 0x61, 0x64, 0x65, 0x72, 0x2e, 0x72, 0x65, 0x61, 0x64, 0x41, 0x73, + 0x44, 0x61, 0x74, 0x61, 0x55, 0x52, 0x4c, 0x28, 0x73, 0x65, 0x6c, 0x65, + 0x63, 0x74, 0x65, 0x64, 0x46, 0x69, 0x6c, 0x65, 0x29, 0x3b, 0x0a, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x7d, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x7d, + 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, + 0x6f, 0x6e, 0x20, 0x4d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x49, 0x6e, + 0x70, 0x75, 0x74, 0x28, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x6d, 0x65, 0x73, 0x73, + 0x61, 0x67, 0x65, 0x20, 0x3d, 0x20, 0x75, 0x73, 0x65, 0x53, 0x69, 0x67, + 0x6e, 0x61, 0x6c, 0x28, 0x22, 0x22, 0x29, 0x0a, 0x0a, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x73, 0x75, 0x62, + 0x6d, 0x69, 0x74, 0x20, 0x3d, 0x20, 0x28, 0x65, 0x29, 0x20, 0x3d, 0x3e, + 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x73, + 0x74, 0x6f, 0x70, 0x28, 0x65, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x63, 0x68, 0x61, 0x74, 0x28, 0x6d, 0x65, 0x73, + 0x73, 0x61, 0x67, 0x65, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x29, 0x3b, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x6d, 0x65, 0x73, + 0x73, 0x61, 0x67, 0x65, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x20, 0x3d, + 0x20, 0x22, 0x22, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, + 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, + 0x74, 0x20, 0x65, 0x6e, 0x74, 0x65, 0x72, 0x53, 0x75, 0x62, 0x6d, 0x69, + 0x74, 0x73, 0x20, 0x3d, 0x20, 0x28, 0x65, 0x76, 0x65, 0x6e, 0x74, 0x29, + 0x20, 0x3d, 0x3e, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x69, 0x66, 0x20, 0x28, 0x65, 0x76, 0x65, 0x6e, 0x74, 0x2e, + 0x77, 0x68, 0x69, 0x63, 0x68, 0x20, 0x3d, 0x3d, 0x3d, 0x20, 0x31, 0x33, + 0x20, 0x26, 0x26, 0x20, 0x21, 0x65, 0x76, 0x65, 0x6e, 0x74, 0x2e, 0x73, + 0x68, 0x69, 0x66, 0x74, 0x4b, 0x65, 0x79, 0x29, 0x20, 0x7b, 0x0a, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x73, 0x75, 0x62, + 0x6d, 0x69, 0x74, 0x28, 0x65, 0x76, 0x65, 0x6e, 0x74, 0x29, 0x3b, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x68, 0x74, 0x6d, 0x6c, + 0x60, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x66, + 0x6f, 0x72, 0x6d, 0x20, 0x6f, 0x6e, 0x73, 0x75, 0x62, 0x6d, 0x69, 0x74, + 0x3d, 0x24, 0x7b, 0x73, 0x75, 0x62, 0x6d, 0x69, 0x74, 0x7d, 0x3e, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x64, + 0x69, 0x76, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x3c, 0x74, 0x65, 0x78, 0x74, 0x61, 0x72, 0x65, + 0x61, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6c, 0x61, 0x73, 0x73, 0x4e, 0x61, + 0x6d, 0x65, 0x3d, 0x24, 0x7b, 0x67, 0x65, 0x6e, 0x65, 0x72, 0x61, 0x74, + 0x69, 0x6e, 0x67, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x20, 0x3f, 0x20, + 0x22, 0x6c, 0x6f, 0x61, 0x64, 0x69, 0x6e, 0x67, 0x22, 0x20, 0x3a, 0x20, + 0x6e, 0x75, 0x6c, 0x6c, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x6f, 0x6e, 0x69, + 0x6e, 0x70, 0x75, 0x74, 0x3d, 0x24, 0x7b, 0x28, 0x65, 0x29, 0x20, 0x3d, + 0x3e, 0x20, 0x6d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x2e, 0x76, 0x61, + 0x6c, 0x75, 0x65, 0x20, 0x3d, 0x20, 0x65, 0x2e, 0x74, 0x61, 0x72, 0x67, + 0x65, 0x74, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x7d, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x6f, 0x6e, 0x6b, 0x65, 0x79, 0x70, 0x72, 0x65, 0x73, 0x73, 0x3d, + 0x24, 0x7b, 0x65, 0x6e, 0x74, 0x65, 0x72, 0x53, 0x75, 0x62, 0x6d, 0x69, + 0x74, 0x73, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x70, 0x6c, 0x61, 0x63, 0x65, + 0x68, 0x6f, 0x6c, 0x64, 0x65, 0x72, 0x3d, 0x22, 0x53, 0x61, 0x79, 0x20, + 0x73, 0x6f, 0x6d, 0x65, 0x74, 0x68, 0x69, 0x6e, 0x67, 0x2e, 0x2e, 0x2e, + 0x22, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x72, 0x6f, 0x77, 0x73, 0x3d, 0x32, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x74, 0x79, 0x70, 0x65, 0x3d, 0x22, 0x74, 0x65, 0x78, + 0x74, 0x22, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3d, + 0x22, 0x24, 0x7b, 0x6d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x7d, 0x22, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x2f, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x3c, 0x2f, 0x64, 0x69, 0x76, 0x3e, 0x0a, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x64, 0x69, 0x76, 0x20, + 0x63, 0x6c, 0x61, 0x73, 0x73, 0x3d, 0x22, 0x72, 0x69, 0x67, 0x68, 0x74, + 0x22, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x3c, 0x62, 0x75, 0x74, 0x74, 0x6f, 0x6e, 0x20, 0x74, + 0x79, 0x70, 0x65, 0x3d, 0x22, 0x73, 0x75, 0x62, 0x6d, 0x69, 0x74, 0x22, + 0x20, 0x64, 0x69, 0x73, 0x61, 0x62, 0x6c, 0x65, 0x64, 0x3d, 0x24, 0x7b, + 0x67, 0x65, 0x6e, 0x65, 0x72, 0x61, 0x74, 0x69, 0x6e, 0x67, 0x2e, 0x76, + 0x61, 0x6c, 0x75, 0x65, 0x7d, 0x3e, 0x53, 0x65, 0x6e, 0x64, 0x3c, 0x2f, + 0x62, 0x75, 0x74, 0x74, 0x6f, 0x6e, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x62, 0x75, 0x74, + 0x74, 0x6f, 0x6e, 0x20, 0x6f, 0x6e, 0x63, 0x6c, 0x69, 0x63, 0x6b, 0x3d, + 0x24, 0x7b, 0x75, 0x70, 0x6c, 0x6f, 0x61, 0x64, 0x49, 0x6d, 0x61, 0x67, + 0x65, 0x7d, 0x3e, 0x55, 0x70, 0x6c, 0x6f, 0x61, 0x64, 0x20, 0x49, 0x6d, + 0x61, 0x67, 0x65, 0x3c, 0x2f, 0x62, 0x75, 0x74, 0x74, 0x6f, 0x6e, 0x3e, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x3c, 0x62, 0x75, 0x74, 0x74, 0x6f, 0x6e, 0x20, 0x6f, 0x6e, 0x63, + 0x6c, 0x69, 0x63, 0x6b, 0x3d, 0x24, 0x7b, 0x73, 0x74, 0x6f, 0x70, 0x7d, + 0x20, 0x64, 0x69, 0x73, 0x61, 0x62, 0x6c, 0x65, 0x64, 0x3d, 0x24, 0x7b, + 0x21, 0x67, 0x65, 0x6e, 0x65, 0x72, 0x61, 0x74, 0x69, 0x6e, 0x67, 0x2e, + 0x76, 0x61, 0x6c, 0x75, 0x65, 0x7d, 0x3e, 0x53, 0x74, 0x6f, 0x70, 0x3c, + 0x2f, 0x62, 0x75, 0x74, 0x74, 0x6f, 0x6e, 0x3e, 0x0a, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x62, 0x75, + 0x74, 0x74, 0x6f, 0x6e, 0x20, 0x6f, 0x6e, 0x63, 0x6c, 0x69, 0x63, 0x6b, + 0x3d, 0x24, 0x7b, 0x72, 0x65, 0x73, 0x65, 0x74, 0x7d, 0x3e, 0x52, 0x65, + 0x73, 0x65, 0x74, 0x3c, 0x2f, 0x62, 0x75, 0x74, 0x74, 0x6f, 0x6e, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x2f, 0x64, 0x69, 0x76, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x60, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x29, 0x3b, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, - 0x6e, 0x73, 0x74, 0x20, 0x50, 0x72, 0x6f, 0x6d, 0x70, 0x74, 0x43, 0x6f, - 0x6e, 0x74, 0x72, 0x6f, 0x6c, 0x46, 0x69, 0x65, 0x6c, 0x64, 0x53, 0x65, - 0x74, 0x20, 0x3d, 0x20, 0x28, 0x29, 0x20, 0x3d, 0x3e, 0x20, 0x28, 0x0a, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x68, 0x74, 0x6d, 0x6c, - 0x60, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x66, - 0x69, 0x65, 0x6c, 0x64, 0x73, 0x65, 0x74, 0x3e, 0x0a, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x64, 0x69, 0x76, 0x3e, - 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x3c, 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x20, 0x68, 0x74, 0x6d, 0x6c, - 0x46, 0x6f, 0x72, 0x3d, 0x22, 0x70, 0x72, 0x6f, 0x6d, 0x70, 0x74, 0x22, - 0x3e, 0x50, 0x72, 0x6f, 0x6d, 0x70, 0x74, 0x3c, 0x2f, 0x6c, 0x61, 0x62, - 0x65, 0x6c, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x3c, 0x74, 0x65, 0x78, 0x74, 0x61, 0x72, 0x65, - 0x61, 0x20, 0x74, 0x79, 0x70, 0x65, 0x3d, 0x22, 0x74, 0x65, 0x78, 0x74, - 0x22, 0x20, 0x6e, 0x61, 0x6d, 0x65, 0x3d, 0x22, 0x70, 0x72, 0x6f, 0x6d, - 0x70, 0x74, 0x22, 0x20, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3d, 0x22, 0x24, + 0x20, 0x20, 0x3c, 0x2f, 0x66, 0x6f, 0x72, 0x6d, 0x3e, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x60, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, + 0x6e, 0x20, 0x43, 0x6f, 0x6d, 0x70, 0x6c, 0x65, 0x74, 0x69, 0x6f, 0x6e, + 0x43, 0x6f, 0x6e, 0x74, 0x72, 0x6f, 0x6c, 0x73, 0x28, 0x29, 0x20, 0x7b, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, + 0x20, 0x73, 0x75, 0x62, 0x6d, 0x69, 0x74, 0x20, 0x3d, 0x20, 0x28, 0x65, + 0x29, 0x20, 0x3d, 0x3e, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x73, 0x74, 0x6f, 0x70, 0x28, 0x65, 0x29, 0x3b, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x72, 0x75, 0x6e, 0x43, + 0x6f, 0x6d, 0x70, 0x6c, 0x65, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x29, 0x3b, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x68, 0x74, + 0x6d, 0x6c, 0x60, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x3c, 0x64, 0x69, 0x76, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x3c, 0x62, 0x75, 0x74, 0x74, 0x6f, 0x6e, 0x20, + 0x6f, 0x6e, 0x63, 0x6c, 0x69, 0x63, 0x6b, 0x3d, 0x24, 0x7b, 0x73, 0x75, + 0x62, 0x6d, 0x69, 0x74, 0x7d, 0x20, 0x74, 0x79, 0x70, 0x65, 0x3d, 0x22, + 0x62, 0x75, 0x74, 0x74, 0x6f, 0x6e, 0x22, 0x20, 0x64, 0x69, 0x73, 0x61, + 0x62, 0x6c, 0x65, 0x64, 0x3d, 0x24, 0x7b, 0x67, 0x65, 0x6e, 0x65, 0x72, + 0x61, 0x74, 0x69, 0x6e, 0x67, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x7d, + 0x3e, 0x53, 0x74, 0x61, 0x72, 0x74, 0x3c, 0x2f, 0x62, 0x75, 0x74, 0x74, + 0x6f, 0x6e, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x3c, 0x62, 0x75, 0x74, 0x74, 0x6f, 0x6e, 0x20, 0x6f, 0x6e, + 0x63, 0x6c, 0x69, 0x63, 0x6b, 0x3d, 0x24, 0x7b, 0x73, 0x74, 0x6f, 0x70, + 0x7d, 0x20, 0x64, 0x69, 0x73, 0x61, 0x62, 0x6c, 0x65, 0x64, 0x3d, 0x24, + 0x7b, 0x21, 0x67, 0x65, 0x6e, 0x65, 0x72, 0x61, 0x74, 0x69, 0x6e, 0x67, + 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x7d, 0x3e, 0x53, 0x74, 0x6f, 0x70, + 0x3c, 0x2f, 0x62, 0x75, 0x74, 0x74, 0x6f, 0x6e, 0x3e, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x62, 0x75, 0x74, + 0x74, 0x6f, 0x6e, 0x20, 0x6f, 0x6e, 0x63, 0x6c, 0x69, 0x63, 0x6b, 0x3d, + 0x24, 0x7b, 0x72, 0x65, 0x73, 0x65, 0x74, 0x7d, 0x3e, 0x52, 0x65, 0x73, + 0x65, 0x74, 0x3c, 0x2f, 0x62, 0x75, 0x74, 0x74, 0x6f, 0x6e, 0x3e, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x2f, 0x64, 0x69, + 0x76, 0x3e, 0x60, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x43, 0x68, + 0x61, 0x74, 0x4c, 0x6f, 0x67, 0x20, 0x3d, 0x20, 0x28, 0x70, 0x72, 0x6f, + 0x70, 0x73, 0x29, 0x20, 0x3d, 0x3e, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x6d, 0x65, 0x73, + 0x73, 0x61, 0x67, 0x65, 0x73, 0x20, 0x3d, 0x20, 0x73, 0x65, 0x73, 0x73, + 0x69, 0x6f, 0x6e, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x74, 0x72, + 0x61, 0x6e, 0x73, 0x63, 0x72, 0x69, 0x70, 0x74, 0x3b, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x63, 0x6f, + 0x6e, 0x74, 0x61, 0x69, 0x6e, 0x65, 0x72, 0x20, 0x3d, 0x20, 0x75, 0x73, + 0x65, 0x52, 0x65, 0x66, 0x28, 0x6e, 0x75, 0x6c, 0x6c, 0x29, 0x0a, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x75, 0x73, 0x65, 0x45, 0x66, 0x66, + 0x65, 0x63, 0x74, 0x28, 0x28, 0x29, 0x20, 0x3d, 0x3e, 0x20, 0x7b, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x2f, 0x2f, 0x20, 0x73, + 0x63, 0x72, 0x6f, 0x6c, 0x6c, 0x20, 0x74, 0x6f, 0x20, 0x62, 0x6f, 0x74, + 0x74, 0x6f, 0x6d, 0x20, 0x28, 0x69, 0x66, 0x20, 0x6e, 0x65, 0x65, 0x64, + 0x65, 0x64, 0x29, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x70, 0x61, 0x72, 0x65, 0x6e, 0x74, + 0x20, 0x3d, 0x20, 0x63, 0x6f, 0x6e, 0x74, 0x61, 0x69, 0x6e, 0x65, 0x72, + 0x2e, 0x63, 0x75, 0x72, 0x72, 0x65, 0x6e, 0x74, 0x2e, 0x70, 0x61, 0x72, + 0x65, 0x6e, 0x74, 0x45, 0x6c, 0x65, 0x6d, 0x65, 0x6e, 0x74, 0x3b, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x69, 0x66, 0x20, 0x28, + 0x70, 0x61, 0x72, 0x65, 0x6e, 0x74, 0x20, 0x26, 0x26, 0x20, 0x70, 0x61, + 0x72, 0x65, 0x6e, 0x74, 0x2e, 0x73, 0x63, 0x72, 0x6f, 0x6c, 0x6c, 0x48, + 0x65, 0x69, 0x67, 0x68, 0x74, 0x20, 0x3c, 0x3d, 0x20, 0x70, 0x61, 0x72, + 0x65, 0x6e, 0x74, 0x2e, 0x73, 0x63, 0x72, 0x6f, 0x6c, 0x6c, 0x54, 0x6f, + 0x70, 0x20, 0x2b, 0x20, 0x70, 0x61, 0x72, 0x65, 0x6e, 0x74, 0x2e, 0x6f, + 0x66, 0x66, 0x73, 0x65, 0x74, 0x48, 0x65, 0x69, 0x67, 0x68, 0x74, 0x20, + 0x2b, 0x20, 0x33, 0x30, 0x30, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x70, 0x61, 0x72, 0x65, 0x6e, + 0x74, 0x2e, 0x73, 0x63, 0x72, 0x6f, 0x6c, 0x6c, 0x54, 0x6f, 0x28, 0x30, + 0x2c, 0x20, 0x70, 0x61, 0x72, 0x65, 0x6e, 0x74, 0x2e, 0x73, 0x63, 0x72, + 0x6f, 0x6c, 0x6c, 0x48, 0x65, 0x69, 0x67, 0x68, 0x74, 0x29, 0x0a, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x7d, 0x2c, 0x20, 0x5b, 0x6d, 0x65, 0x73, 0x73, 0x61, + 0x67, 0x65, 0x73, 0x5d, 0x29, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x69, 0x73, 0x43, 0x6f, 0x6d, + 0x70, 0x6c, 0x65, 0x74, 0x69, 0x6f, 0x6e, 0x4d, 0x6f, 0x64, 0x65, 0x20, + 0x3d, 0x20, 0x73, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x2e, 0x76, 0x61, + 0x6c, 0x75, 0x65, 0x2e, 0x74, 0x79, 0x70, 0x65, 0x20, 0x3d, 0x3d, 0x3d, + 0x20, 0x27, 0x63, 0x6f, 0x6d, 0x70, 0x6c, 0x65, 0x74, 0x69, 0x6f, 0x6e, + 0x27, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, + 0x74, 0x20, 0x63, 0x68, 0x61, 0x74, 0x4c, 0x69, 0x6e, 0x65, 0x20, 0x3d, + 0x20, 0x28, 0x5b, 0x75, 0x73, 0x65, 0x72, 0x2c, 0x20, 0x64, 0x61, 0x74, + 0x61, 0x5d, 0x2c, 0x20, 0x69, 0x6e, 0x64, 0x65, 0x78, 0x29, 0x20, 0x3d, + 0x3e, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x6c, 0x65, 0x74, 0x20, 0x6d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, + 0x74, 0x20, 0x69, 0x73, 0x41, 0x72, 0x72, 0x61, 0x79, 0x4d, 0x65, 0x73, + 0x73, 0x61, 0x67, 0x65, 0x20, 0x3d, 0x20, 0x41, 0x72, 0x72, 0x61, 0x79, + 0x2e, 0x69, 0x73, 0x41, 0x72, 0x72, 0x61, 0x79, 0x28, 0x64, 0x61, 0x74, + 0x61, 0x29, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x69, + 0x66, 0x20, 0x28, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x2e, 0x76, 0x61, + 0x6c, 0x75, 0x65, 0x2e, 0x6e, 0x5f, 0x70, 0x72, 0x6f, 0x62, 0x73, 0x20, + 0x3e, 0x20, 0x30, 0x20, 0x26, 0x26, 0x20, 0x69, 0x73, 0x41, 0x72, 0x72, + 0x61, 0x79, 0x4d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x29, 0x20, 0x7b, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x6d, + 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x20, 0x3d, 0x20, 0x68, 0x74, 0x6d, + 0x6c, 0x60, 0x3c, 0x24, 0x7b, 0x50, 0x72, 0x6f, 0x62, 0x61, 0x62, 0x69, + 0x6c, 0x69, 0x74, 0x69, 0x65, 0x73, 0x7d, 0x20, 0x64, 0x61, 0x74, 0x61, + 0x3d, 0x24, 0x7b, 0x64, 0x61, 0x74, 0x61, 0x7d, 0x20, 0x2f, 0x3e, 0x60, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x20, 0x65, + 0x6c, 0x73, 0x65, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x74, 0x65, + 0x78, 0x74, 0x20, 0x3d, 0x20, 0x69, 0x73, 0x41, 0x72, 0x72, 0x61, 0x79, + 0x4d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x20, 0x3f, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x64, 0x61, + 0x74, 0x61, 0x2e, 0x6d, 0x61, 0x70, 0x28, 0x6d, 0x73, 0x67, 0x20, 0x3d, + 0x3e, 0x20, 0x6d, 0x73, 0x67, 0x2e, 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x6e, + 0x74, 0x29, 0x2e, 0x6a, 0x6f, 0x69, 0x6e, 0x28, 0x27, 0x27, 0x29, 0x2e, + 0x72, 0x65, 0x70, 0x6c, 0x61, 0x63, 0x65, 0x28, 0x2f, 0x5e, 0x5c, 0x73, + 0x2b, 0x2f, 0x2c, 0x20, 0x27, 0x27, 0x29, 0x20, 0x3a, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x64, 0x61, + 0x74, 0x61, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x6d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x20, 0x3d, 0x20, + 0x69, 0x73, 0x43, 0x6f, 0x6d, 0x70, 0x6c, 0x65, 0x74, 0x69, 0x6f, 0x6e, + 0x4d, 0x6f, 0x64, 0x65, 0x20, 0x3f, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x74, 0x65, 0x78, 0x74, 0x20, + 0x3a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x68, 0x74, 0x6d, 0x6c, 0x60, 0x3c, 0x24, 0x7b, 0x4d, 0x61, + 0x72, 0x6b, 0x64, 0x6f, 0x77, 0x6e, 0x69, 0x73, 0x68, 0x7d, 0x20, 0x74, + 0x65, 0x78, 0x74, 0x3d, 0x24, 0x7b, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, + 0x74, 0x65, 0x28, 0x74, 0x65, 0x78, 0x74, 0x29, 0x7d, 0x20, 0x2f, 0x3e, + 0x60, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x69, 0x66, 0x20, 0x28, + 0x75, 0x73, 0x65, 0x72, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, + 0x20, 0x68, 0x74, 0x6d, 0x6c, 0x60, 0x3c, 0x70, 0x20, 0x6b, 0x65, 0x79, + 0x3d, 0x24, 0x7b, 0x69, 0x6e, 0x64, 0x65, 0x78, 0x7d, 0x3e, 0x3c, 0x73, + 0x74, 0x72, 0x6f, 0x6e, 0x67, 0x3e, 0x24, 0x7b, 0x74, 0x65, 0x6d, 0x70, + 0x6c, 0x61, 0x74, 0x65, 0x28, 0x75, 0x73, 0x65, 0x72, 0x29, 0x7d, 0x3a, + 0x3c, 0x2f, 0x73, 0x74, 0x72, 0x6f, 0x6e, 0x67, 0x3e, 0x20, 0x24, 0x7b, + 0x6d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x7d, 0x3c, 0x2f, 0x70, 0x3e, + 0x60, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x20, + 0x65, 0x6c, 0x73, 0x65, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, + 0x69, 0x73, 0x43, 0x6f, 0x6d, 0x70, 0x6c, 0x65, 0x74, 0x69, 0x6f, 0x6e, + 0x4d, 0x6f, 0x64, 0x65, 0x20, 0x3f, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x68, 0x74, 0x6d, 0x6c, 0x60, + 0x3c, 0x73, 0x70, 0x61, 0x6e, 0x20, 0x6b, 0x65, 0x79, 0x3d, 0x24, 0x7b, + 0x69, 0x6e, 0x64, 0x65, 0x78, 0x7d, 0x3e, 0x24, 0x7b, 0x6d, 0x65, 0x73, + 0x73, 0x61, 0x67, 0x65, 0x7d, 0x3c, 0x2f, 0x73, 0x70, 0x61, 0x6e, 0x3e, + 0x60, 0x20, 0x3a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x68, 0x74, 0x6d, 0x6c, 0x60, 0x3c, 0x70, 0x20, + 0x6b, 0x65, 0x79, 0x3d, 0x24, 0x7b, 0x69, 0x6e, 0x64, 0x65, 0x78, 0x7d, + 0x3e, 0x24, 0x7b, 0x6d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x7d, 0x3c, + 0x2f, 0x70, 0x3e, 0x60, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x3b, 0x0a, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, + 0x20, 0x68, 0x61, 0x6e, 0x64, 0x6c, 0x65, 0x43, 0x6f, 0x6d, 0x70, 0x6c, + 0x65, 0x74, 0x69, 0x6f, 0x6e, 0x45, 0x64, 0x69, 0x74, 0x20, 0x3d, 0x20, + 0x28, 0x65, 0x29, 0x20, 0x3d, 0x3e, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x73, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, + 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x6d, 0x70, + 0x74, 0x20, 0x3d, 0x20, 0x65, 0x2e, 0x74, 0x61, 0x72, 0x67, 0x65, 0x74, + 0x2e, 0x69, 0x6e, 0x6e, 0x65, 0x72, 0x54, 0x65, 0x78, 0x74, 0x3b, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x73, 0x65, 0x73, 0x73, + 0x69, 0x6f, 0x6e, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x74, 0x72, + 0x61, 0x6e, 0x73, 0x63, 0x72, 0x69, 0x70, 0x74, 0x20, 0x3d, 0x20, 0x5b, + 0x5d, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, + 0x20, 0x68, 0x74, 0x6d, 0x6c, 0x60, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x3c, 0x64, 0x69, 0x76, 0x20, 0x69, 0x64, 0x3d, 0x22, + 0x63, 0x68, 0x61, 0x74, 0x22, 0x20, 0x72, 0x65, 0x66, 0x3d, 0x24, 0x7b, + 0x63, 0x6f, 0x6e, 0x74, 0x61, 0x69, 0x6e, 0x65, 0x72, 0x7d, 0x20, 0x6b, + 0x65, 0x79, 0x3d, 0x24, 0x7b, 0x6d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, + 0x73, 0x2e, 0x6c, 0x65, 0x6e, 0x67, 0x74, 0x68, 0x7d, 0x3e, 0x0a, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x69, 0x6d, + 0x67, 0x20, 0x73, 0x74, 0x79, 0x6c, 0x65, 0x3d, 0x22, 0x77, 0x69, 0x64, + 0x74, 0x68, 0x3a, 0x20, 0x36, 0x30, 0x25, 0x3b, 0x24, 0x7b, 0x21, 0x73, + 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, + 0x2e, 0x69, 0x6d, 0x61, 0x67, 0x65, 0x5f, 0x73, 0x65, 0x6c, 0x65, 0x63, + 0x74, 0x65, 0x64, 0x20, 0x3f, 0x20, 0x60, 0x64, 0x69, 0x73, 0x70, 0x6c, + 0x61, 0x79, 0x3a, 0x20, 0x6e, 0x6f, 0x6e, 0x65, 0x3b, 0x60, 0x20, 0x3a, + 0x20, 0x60, 0x60, 0x7d, 0x22, 0x20, 0x73, 0x72, 0x63, 0x3d, 0x22, 0x24, 0x7b, 0x73, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x2e, 0x76, 0x61, 0x6c, - 0x75, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x6d, 0x70, 0x74, 0x7d, 0x22, 0x20, - 0x6f, 0x6e, 0x69, 0x6e, 0x70, 0x75, 0x74, 0x3d, 0x24, 0x7b, 0x75, 0x70, - 0x64, 0x61, 0x74, 0x65, 0x53, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x7d, - 0x2f, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x3c, 0x2f, 0x64, 0x69, 0x76, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x3c, 0x2f, 0x66, 0x69, 0x65, 0x6c, 0x64, 0x73, - 0x65, 0x74, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x60, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x29, 0x3b, 0x0a, 0x0a, + 0x75, 0x65, 0x2e, 0x69, 0x6d, 0x61, 0x67, 0x65, 0x5f, 0x73, 0x65, 0x6c, + 0x65, 0x63, 0x74, 0x65, 0x64, 0x7d, 0x22, 0x2f, 0x3e, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x73, 0x70, 0x61, + 0x6e, 0x20, 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x65, 0x64, 0x69, + 0x74, 0x61, 0x62, 0x6c, 0x65, 0x3d, 0x24, 0x7b, 0x69, 0x73, 0x43, 0x6f, + 0x6d, 0x70, 0x6c, 0x65, 0x74, 0x69, 0x6f, 0x6e, 0x4d, 0x6f, 0x64, 0x65, + 0x7d, 0x20, 0x72, 0x65, 0x66, 0x3d, 0x24, 0x7b, 0x63, 0x6f, 0x6e, 0x74, + 0x61, 0x69, 0x6e, 0x65, 0x72, 0x7d, 0x20, 0x6f, 0x6e, 0x69, 0x6e, 0x70, + 0x75, 0x74, 0x3d, 0x24, 0x7b, 0x68, 0x61, 0x6e, 0x64, 0x6c, 0x65, 0x43, + 0x6f, 0x6d, 0x70, 0x6c, 0x65, 0x74, 0x69, 0x6f, 0x6e, 0x45, 0x64, 0x69, + 0x74, 0x7d, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x24, 0x7b, 0x6d, 0x65, 0x73, 0x73, 0x61, 0x67, + 0x65, 0x73, 0x2e, 0x66, 0x6c, 0x61, 0x74, 0x4d, 0x61, 0x70, 0x28, 0x63, + 0x68, 0x61, 0x74, 0x4c, 0x69, 0x6e, 0x65, 0x29, 0x7d, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x2f, 0x73, 0x70, + 0x61, 0x6e, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x3c, 0x2f, 0x64, 0x69, 0x76, 0x3e, 0x60, 0x3b, 0x0a, 0x20, 0x20, 0x20, + 0x20, 0x7d, 0x3b, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, + 0x73, 0x74, 0x20, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x46, 0x6f, 0x72, + 0x6d, 0x20, 0x3d, 0x20, 0x28, 0x70, 0x72, 0x6f, 0x70, 0x73, 0x29, 0x20, + 0x3d, 0x3e, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, + 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x75, 0x70, 0x64, 0x61, 0x74, 0x65, 0x53, + 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x20, 0x3d, 0x20, 0x28, 0x65, 0x6c, + 0x29, 0x20, 0x3d, 0x3e, 0x20, 0x73, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, + 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x20, 0x3d, 0x20, 0x7b, 0x20, 0x2e, + 0x2e, 0x2e, 0x73, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x2e, 0x76, 0x61, + 0x6c, 0x75, 0x65, 0x2c, 0x20, 0x5b, 0x65, 0x6c, 0x2e, 0x74, 0x61, 0x72, + 0x67, 0x65, 0x74, 0x2e, 0x6e, 0x61, 0x6d, 0x65, 0x5d, 0x3a, 0x20, 0x65, + 0x6c, 0x2e, 0x74, 0x61, 0x72, 0x67, 0x65, 0x74, 0x2e, 0x76, 0x61, 0x6c, + 0x75, 0x65, 0x20, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, + 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x75, 0x70, 0x64, 0x61, 0x74, 0x65, 0x50, + 0x61, 0x72, 0x61, 0x6d, 0x73, 0x20, 0x3d, 0x20, 0x28, 0x65, 0x6c, 0x29, + 0x20, 0x3d, 0x3e, 0x20, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x2e, 0x76, + 0x61, 0x6c, 0x75, 0x65, 0x20, 0x3d, 0x20, 0x7b, 0x20, 0x2e, 0x2e, 0x2e, + 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, + 0x2c, 0x20, 0x5b, 0x65, 0x6c, 0x2e, 0x74, 0x61, 0x72, 0x67, 0x65, 0x74, + 0x2e, 0x6e, 0x61, 0x6d, 0x65, 0x5d, 0x3a, 0x20, 0x65, 0x6c, 0x2e, 0x74, + 0x61, 0x72, 0x67, 0x65, 0x74, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x20, + 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, + 0x74, 0x20, 0x75, 0x70, 0x64, 0x61, 0x74, 0x65, 0x50, 0x61, 0x72, 0x61, + 0x6d, 0x73, 0x46, 0x6c, 0x6f, 0x61, 0x74, 0x20, 0x3d, 0x20, 0x28, 0x65, + 0x6c, 0x29, 0x20, 0x3d, 0x3e, 0x20, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, + 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x20, 0x3d, 0x20, 0x7b, 0x20, 0x2e, + 0x2e, 0x2e, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x2e, 0x76, 0x61, 0x6c, + 0x75, 0x65, 0x2c, 0x20, 0x5b, 0x65, 0x6c, 0x2e, 0x74, 0x61, 0x72, 0x67, + 0x65, 0x74, 0x2e, 0x6e, 0x61, 0x6d, 0x65, 0x5d, 0x3a, 0x20, 0x70, 0x61, + 0x72, 0x73, 0x65, 0x46, 0x6c, 0x6f, 0x61, 0x74, 0x28, 0x65, 0x6c, 0x2e, + 0x74, 0x61, 0x72, 0x67, 0x65, 0x74, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, + 0x29, 0x20, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, + 0x6e, 0x73, 0x74, 0x20, 0x75, 0x70, 0x64, 0x61, 0x74, 0x65, 0x50, 0x61, + 0x72, 0x61, 0x6d, 0x73, 0x49, 0x6e, 0x74, 0x20, 0x3d, 0x20, 0x28, 0x65, + 0x6c, 0x29, 0x20, 0x3d, 0x3e, 0x20, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, + 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x20, 0x3d, 0x20, 0x7b, 0x20, 0x2e, + 0x2e, 0x2e, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x2e, 0x76, 0x61, 0x6c, + 0x75, 0x65, 0x2c, 0x20, 0x5b, 0x65, 0x6c, 0x2e, 0x74, 0x61, 0x72, 0x67, + 0x65, 0x74, 0x2e, 0x6e, 0x61, 0x6d, 0x65, 0x5d, 0x3a, 0x20, 0x4d, 0x61, + 0x74, 0x68, 0x2e, 0x66, 0x6c, 0x6f, 0x6f, 0x72, 0x28, 0x70, 0x61, 0x72, + 0x73, 0x65, 0x46, 0x6c, 0x6f, 0x61, 0x74, 0x28, 0x65, 0x6c, 0x2e, 0x74, + 0x61, 0x72, 0x67, 0x65, 0x74, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x29, + 0x29, 0x20, 0x7d, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, + 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x67, 0x72, 0x61, 0x6d, 0x6d, 0x61, 0x72, + 0x4a, 0x73, 0x6f, 0x6e, 0x53, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x50, 0x72, + 0x6f, 0x70, 0x4f, 0x72, 0x64, 0x65, 0x72, 0x20, 0x3d, 0x20, 0x73, 0x69, + 0x67, 0x6e, 0x61, 0x6c, 0x28, 0x27, 0x27, 0x29, 0x0a, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x75, 0x70, 0x64, + 0x61, 0x74, 0x65, 0x47, 0x72, 0x61, 0x6d, 0x6d, 0x61, 0x72, 0x4a, 0x73, + 0x6f, 0x6e, 0x53, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x50, 0x72, 0x6f, 0x70, + 0x4f, 0x72, 0x64, 0x65, 0x72, 0x20, 0x3d, 0x20, 0x28, 0x65, 0x6c, 0x29, + 0x20, 0x3d, 0x3e, 0x20, 0x67, 0x72, 0x61, 0x6d, 0x6d, 0x61, 0x72, 0x4a, + 0x73, 0x6f, 0x6e, 0x53, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x50, 0x72, 0x6f, + 0x70, 0x4f, 0x72, 0x64, 0x65, 0x72, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, + 0x20, 0x3d, 0x20, 0x65, 0x6c, 0x2e, 0x74, 0x61, 0x72, 0x67, 0x65, 0x74, + 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x63, 0x6f, 0x6e, 0x76, 0x65, + 0x72, 0x74, 0x4a, 0x53, 0x4f, 0x4e, 0x53, 0x63, 0x68, 0x65, 0x6d, 0x61, + 0x47, 0x72, 0x61, 0x6d, 0x6d, 0x61, 0x72, 0x20, 0x3d, 0x20, 0x28, 0x29, + 0x20, 0x3d, 0x3e, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x74, 0x72, 0x79, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, - 0x43, 0x68, 0x61, 0x74, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x46, 0x6f, - 0x72, 0x6d, 0x20, 0x3d, 0x20, 0x28, 0x29, 0x20, 0x3d, 0x3e, 0x20, 0x28, - 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x68, 0x74, 0x6d, - 0x6c, 0x60, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x24, 0x7b, 0x50, 0x72, 0x6f, 0x6d, 0x70, 0x74, 0x43, 0x6f, 0x6e, - 0x74, 0x72, 0x6f, 0x6c, 0x46, 0x69, 0x65, 0x6c, 0x64, 0x53, 0x65, 0x74, - 0x28, 0x29, 0x7d, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x3c, 0x66, 0x69, 0x65, 0x6c, 0x64, 0x73, 0x65, 0x74, - 0x20, 0x63, 0x6c, 0x61, 0x73, 0x73, 0x3d, 0x22, 0x74, 0x77, 0x6f, 0x22, - 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x3c, 0x64, 0x69, 0x76, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x73, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x20, 0x3d, 0x20, 0x4a, 0x53, 0x4f, + 0x4e, 0x2e, 0x70, 0x61, 0x72, 0x73, 0x65, 0x28, 0x70, 0x61, 0x72, 0x61, + 0x6d, 0x73, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x67, 0x72, 0x61, + 0x6d, 0x6d, 0x61, 0x72, 0x29, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x63, 0x6f, + 0x6e, 0x76, 0x65, 0x72, 0x74, 0x65, 0x72, 0x20, 0x3d, 0x20, 0x6e, 0x65, + 0x77, 0x20, 0x53, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x43, 0x6f, 0x6e, 0x76, + 0x65, 0x72, 0x74, 0x65, 0x72, 0x28, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x67, 0x72, 0x61, 0x6d, 0x6d, + 0x61, 0x72, 0x4a, 0x73, 0x6f, 0x6e, 0x53, 0x63, 0x68, 0x65, 0x6d, 0x61, + 0x50, 0x72, 0x6f, 0x70, 0x4f, 0x72, 0x64, 0x65, 0x72, 0x2e, 0x76, 0x61, + 0x6c, 0x75, 0x65, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x2e, 0x73, 0x70, 0x6c, 0x69, 0x74, + 0x28, 0x27, 0x2c, 0x27, 0x29, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x2e, 0x72, 0x65, 0x64, + 0x75, 0x63, 0x65, 0x28, 0x28, 0x61, 0x63, 0x63, 0x2c, 0x20, 0x63, 0x75, + 0x72, 0x2c, 0x20, 0x69, 0x29, 0x20, 0x3d, 0x3e, 0x20, 0x28, 0x7b, 0x20, + 0x2e, 0x2e, 0x2e, 0x61, 0x63, 0x63, 0x2c, 0x20, 0x5b, 0x63, 0x75, 0x72, + 0x2e, 0x74, 0x72, 0x69, 0x6d, 0x28, 0x29, 0x5d, 0x3a, 0x20, 0x69, 0x20, + 0x7d, 0x29, 0x2c, 0x20, 0x7b, 0x7d, 0x29, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x29, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x76, 0x65, 0x72, + 0x74, 0x65, 0x72, 0x2e, 0x76, 0x69, 0x73, 0x69, 0x74, 0x28, 0x73, 0x63, + 0x68, 0x65, 0x6d, 0x61, 0x2c, 0x20, 0x27, 0x27, 0x29, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x70, 0x61, 0x72, 0x61, + 0x6d, 0x73, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x20, 0x3d, 0x20, 0x7b, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x2e, 0x2e, 0x2e, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x2e, 0x76, + 0x61, 0x6c, 0x75, 0x65, 0x2c, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x67, 0x72, 0x61, 0x6d, 0x6d, 0x61, + 0x72, 0x3a, 0x20, 0x63, 0x6f, 0x6e, 0x76, 0x65, 0x72, 0x74, 0x65, 0x72, + 0x2e, 0x66, 0x6f, 0x72, 0x6d, 0x61, 0x74, 0x47, 0x72, 0x61, 0x6d, 0x6d, + 0x61, 0x72, 0x28, 0x29, 0x2c, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x7d, 0x20, 0x63, 0x61, 0x74, 0x63, 0x68, 0x20, 0x28, 0x65, + 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x61, 0x6c, 0x65, 0x72, 0x74, 0x28, 0x60, 0x43, 0x6f, 0x6e, + 0x76, 0x65, 0x72, 0x74, 0x20, 0x66, 0x61, 0x69, 0x6c, 0x65, 0x64, 0x3a, + 0x20, 0x24, 0x7b, 0x65, 0x2e, 0x6d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, + 0x7d, 0x60, 0x29, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x0a, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x46, + 0x6c, 0x6f, 0x61, 0x74, 0x46, 0x69, 0x65, 0x6c, 0x64, 0x20, 0x3d, 0x20, + 0x28, 0x7b, 0x20, 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x2c, 0x20, 0x6d, 0x61, + 0x78, 0x2c, 0x20, 0x6d, 0x69, 0x6e, 0x2c, 0x20, 0x6e, 0x61, 0x6d, 0x65, + 0x2c, 0x20, 0x73, 0x74, 0x65, 0x70, 0x2c, 0x20, 0x76, 0x61, 0x6c, 0x75, + 0x65, 0x20, 0x7d, 0x29, 0x20, 0x3d, 0x3e, 0x20, 0x7b, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, + 0x20, 0x68, 0x74, 0x6d, 0x6c, 0x60, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x64, 0x69, 0x76, 0x3e, 0x0a, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, + 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x20, 0x66, 0x6f, 0x72, 0x3d, 0x22, 0x24, + 0x7b, 0x6e, 0x61, 0x6d, 0x65, 0x7d, 0x22, 0x3e, 0x24, 0x7b, 0x6c, 0x61, + 0x62, 0x65, 0x6c, 0x7d, 0x3c, 0x2f, 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x3e, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x3c, 0x69, 0x6e, 0x70, 0x75, 0x74, 0x20, 0x74, 0x79, 0x70, 0x65, + 0x3d, 0x22, 0x72, 0x61, 0x6e, 0x67, 0x65, 0x22, 0x20, 0x69, 0x64, 0x3d, + 0x22, 0x24, 0x7b, 0x6e, 0x61, 0x6d, 0x65, 0x7d, 0x22, 0x20, 0x6d, 0x69, + 0x6e, 0x3d, 0x22, 0x24, 0x7b, 0x6d, 0x69, 0x6e, 0x7d, 0x22, 0x20, 0x6d, + 0x61, 0x78, 0x3d, 0x22, 0x24, 0x7b, 0x6d, 0x61, 0x78, 0x7d, 0x22, 0x20, + 0x73, 0x74, 0x65, 0x70, 0x3d, 0x22, 0x24, 0x7b, 0x73, 0x74, 0x65, 0x70, + 0x7d, 0x22, 0x20, 0x6e, 0x61, 0x6d, 0x65, 0x3d, 0x22, 0x24, 0x7b, 0x6e, + 0x61, 0x6d, 0x65, 0x7d, 0x22, 0x20, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3d, + 0x22, 0x24, 0x7b, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x7d, 0x22, 0x20, 0x6f, + 0x6e, 0x69, 0x6e, 0x70, 0x75, 0x74, 0x3d, 0x24, 0x7b, 0x75, 0x70, 0x64, + 0x61, 0x74, 0x65, 0x50, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x46, 0x6c, 0x6f, + 0x61, 0x74, 0x7d, 0x20, 0x2f, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x73, 0x70, 0x61, 0x6e, + 0x3e, 0x24, 0x7b, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x7d, 0x3c, 0x2f, 0x73, + 0x70, 0x61, 0x6e, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x3c, 0x2f, 0x64, 0x69, 0x76, 0x3e, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x60, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x7d, 0x3b, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x49, 0x6e, 0x74, 0x46, 0x69, 0x65, + 0x6c, 0x64, 0x20, 0x3d, 0x20, 0x28, 0x7b, 0x20, 0x6c, 0x61, 0x62, 0x65, + 0x6c, 0x2c, 0x20, 0x6d, 0x61, 0x78, 0x2c, 0x20, 0x6d, 0x69, 0x6e, 0x2c, + 0x20, 0x6e, 0x61, 0x6d, 0x65, 0x2c, 0x20, 0x76, 0x61, 0x6c, 0x75, 0x65, + 0x20, 0x7d, 0x29, 0x20, 0x3d, 0x3e, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, + 0x68, 0x74, 0x6d, 0x6c, 0x60, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x3c, 0x64, 0x69, 0x76, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x6c, - 0x61, 0x62, 0x65, 0x6c, 0x20, 0x66, 0x6f, 0x72, 0x3d, 0x22, 0x75, 0x73, - 0x65, 0x72, 0x22, 0x3e, 0x55, 0x73, 0x65, 0x72, 0x20, 0x6e, 0x61, 0x6d, - 0x65, 0x3c, 0x2f, 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x3e, 0x0a, 0x20, 0x20, + 0x61, 0x62, 0x65, 0x6c, 0x20, 0x66, 0x6f, 0x72, 0x3d, 0x22, 0x24, 0x7b, + 0x6e, 0x61, 0x6d, 0x65, 0x7d, 0x22, 0x3e, 0x24, 0x7b, 0x6c, 0x61, 0x62, + 0x65, 0x6c, 0x7d, 0x3c, 0x2f, 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x69, 0x6e, 0x70, 0x75, 0x74, 0x20, 0x74, 0x79, 0x70, 0x65, 0x3d, - 0x22, 0x74, 0x65, 0x78, 0x74, 0x22, 0x20, 0x6e, 0x61, 0x6d, 0x65, 0x3d, - 0x22, 0x75, 0x73, 0x65, 0x72, 0x22, 0x20, 0x76, 0x61, 0x6c, 0x75, 0x65, - 0x3d, 0x22, 0x24, 0x7b, 0x73, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x2e, - 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x75, 0x73, 0x65, 0x72, 0x7d, 0x22, - 0x20, 0x6f, 0x6e, 0x69, 0x6e, 0x70, 0x75, 0x74, 0x3d, 0x24, 0x7b, 0x75, - 0x70, 0x64, 0x61, 0x74, 0x65, 0x53, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, - 0x7d, 0x20, 0x2f, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x2f, 0x64, 0x69, 0x76, 0x3e, 0x0a, + 0x22, 0x72, 0x61, 0x6e, 0x67, 0x65, 0x22, 0x20, 0x69, 0x64, 0x3d, 0x22, + 0x24, 0x7b, 0x6e, 0x61, 0x6d, 0x65, 0x7d, 0x22, 0x20, 0x6d, 0x69, 0x6e, + 0x3d, 0x22, 0x24, 0x7b, 0x6d, 0x69, 0x6e, 0x7d, 0x22, 0x20, 0x6d, 0x61, + 0x78, 0x3d, 0x22, 0x24, 0x7b, 0x6d, 0x61, 0x78, 0x7d, 0x22, 0x20, 0x6e, + 0x61, 0x6d, 0x65, 0x3d, 0x22, 0x24, 0x7b, 0x6e, 0x61, 0x6d, 0x65, 0x7d, + 0x22, 0x20, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3d, 0x22, 0x24, 0x7b, 0x76, + 0x61, 0x6c, 0x75, 0x65, 0x7d, 0x22, 0x20, 0x6f, 0x6e, 0x69, 0x6e, 0x70, + 0x75, 0x74, 0x3d, 0x24, 0x7b, 0x75, 0x70, 0x64, 0x61, 0x74, 0x65, 0x50, + 0x61, 0x72, 0x61, 0x6d, 0x73, 0x49, 0x6e, 0x74, 0x7d, 0x20, 0x2f, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x3c, 0x64, 0x69, 0x76, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x6c, 0x61, - 0x62, 0x65, 0x6c, 0x20, 0x66, 0x6f, 0x72, 0x3d, 0x22, 0x62, 0x6f, 0x74, - 0x22, 0x3e, 0x42, 0x6f, 0x74, 0x20, 0x6e, 0x61, 0x6d, 0x65, 0x3c, 0x2f, - 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x3c, 0x73, 0x70, 0x61, 0x6e, 0x3e, 0x24, 0x7b, 0x76, 0x61, 0x6c, + 0x75, 0x65, 0x7d, 0x3c, 0x2f, 0x73, 0x70, 0x61, 0x6e, 0x3e, 0x0a, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x2f, 0x64, + 0x69, 0x76, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x60, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x3b, 0x0a, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, + 0x75, 0x73, 0x65, 0x72, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, + 0x52, 0x65, 0x73, 0x65, 0x74, 0x20, 0x3d, 0x20, 0x28, 0x65, 0x29, 0x20, + 0x3d, 0x3e, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x65, 0x2e, 0x70, 0x72, 0x65, 0x76, 0x65, 0x6e, 0x74, 0x44, 0x65, + 0x66, 0x61, 0x75, 0x6c, 0x74, 0x28, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x75, 0x73, 0x65, 0x72, 0x54, 0x65, 0x6d, + 0x70, 0x6c, 0x61, 0x74, 0x65, 0x52, 0x65, 0x73, 0x65, 0x74, 0x54, 0x6f, + 0x44, 0x65, 0x66, 0x61, 0x75, 0x6c, 0x74, 0x41, 0x6e, 0x64, 0x41, 0x70, + 0x70, 0x6c, 0x79, 0x28, 0x29, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x7d, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, + 0x73, 0x74, 0x20, 0x55, 0x73, 0x65, 0x72, 0x54, 0x65, 0x6d, 0x70, 0x6c, + 0x61, 0x74, 0x65, 0x52, 0x65, 0x73, 0x65, 0x74, 0x42, 0x75, 0x74, 0x74, + 0x6f, 0x6e, 0x20, 0x3d, 0x20, 0x28, 0x29, 0x20, 0x3d, 0x3e, 0x20, 0x7b, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x69, 0x66, 0x20, + 0x28, 0x73, 0x65, 0x6c, 0x65, 0x63, 0x74, 0x65, 0x64, 0x55, 0x73, 0x65, + 0x72, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x2e, 0x76, 0x61, + 0x6c, 0x75, 0x65, 0x2e, 0x6e, 0x61, 0x6d, 0x65, 0x20, 0x3d, 0x3d, 0x20, + 0x27, 0x64, 0x65, 0x66, 0x61, 0x75, 0x6c, 0x74, 0x27, 0x29, 0x20, 0x7b, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x72, + 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x68, 0x74, 0x6d, 0x6c, 0x60, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x3c, 0x62, 0x75, 0x74, 0x74, 0x6f, 0x6e, 0x20, 0x64, 0x69, 0x73, 0x61, + 0x62, 0x6c, 0x65, 0x64, 0x3e, 0x55, 0x73, 0x69, 0x6e, 0x67, 0x20, 0x64, + 0x65, 0x66, 0x61, 0x75, 0x6c, 0x74, 0x20, 0x74, 0x65, 0x6d, 0x70, 0x6c, + 0x61, 0x74, 0x65, 0x3c, 0x2f, 0x62, 0x75, 0x74, 0x74, 0x6f, 0x6e, 0x3e, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x60, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x72, 0x65, 0x74, 0x75, + 0x72, 0x6e, 0x20, 0x68, 0x74, 0x6d, 0x6c, 0x60, 0x0a, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x62, 0x75, 0x74, 0x74, + 0x6f, 0x6e, 0x20, 0x6f, 0x6e, 0x63, 0x6c, 0x69, 0x63, 0x6b, 0x3d, 0x24, + 0x7b, 0x75, 0x73, 0x65, 0x72, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, + 0x65, 0x52, 0x65, 0x73, 0x65, 0x74, 0x7d, 0x3e, 0x52, 0x65, 0x73, 0x65, + 0x74, 0x20, 0x61, 0x6c, 0x6c, 0x20, 0x74, 0x6f, 0x20, 0x64, 0x65, 0x66, + 0x61, 0x75, 0x6c, 0x74, 0x3c, 0x2f, 0x62, 0x75, 0x74, 0x74, 0x6f, 0x6e, + 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x60, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x3b, 0x0a, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x75, 0x73, 0x65, 0x45, 0x66, 0x66, 0x65, 0x63, + 0x74, 0x28, 0x28, 0x29, 0x20, 0x3d, 0x3e, 0x20, 0x7b, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x2f, 0x2f, 0x20, 0x61, 0x75, 0x74, + 0x6f, 0x73, 0x61, 0x76, 0x65, 0x20, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, + 0x74, 0x65, 0x20, 0x6f, 0x6e, 0x20, 0x65, 0x76, 0x65, 0x72, 0x79, 0x20, + 0x63, 0x68, 0x61, 0x6e, 0x67, 0x65, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x75, 0x73, 0x65, 0x72, 0x54, 0x65, 0x6d, 0x70, 0x6c, + 0x61, 0x74, 0x65, 0x41, 0x75, 0x74, 0x6f, 0x73, 0x61, 0x76, 0x65, 0x28, + 0x29, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x2c, 0x20, 0x5b, + 0x73, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x2e, 0x76, 0x61, 0x6c, 0x75, + 0x65, 0x2c, 0x20, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x2e, 0x76, 0x61, + 0x6c, 0x75, 0x65, 0x5d, 0x29, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x47, 0x72, 0x61, 0x6d, 0x6d, + 0x61, 0x72, 0x43, 0x6f, 0x6e, 0x74, 0x72, 0x6f, 0x6c, 0x20, 0x3d, 0x20, + 0x28, 0x29, 0x20, 0x3d, 0x3e, 0x20, 0x28, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x68, 0x74, 0x6d, 0x6c, 0x60, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x64, 0x69, 0x76, + 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x3c, 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x20, 0x66, 0x6f, 0x72, + 0x3d, 0x22, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x22, 0x3e, + 0x47, 0x72, 0x61, 0x6d, 0x6d, 0x61, 0x72, 0x3c, 0x2f, 0x6c, 0x61, 0x62, + 0x65, 0x6c, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x3c, 0x74, 0x65, 0x78, 0x74, 0x61, 0x72, 0x65, + 0x61, 0x20, 0x69, 0x64, 0x3d, 0x22, 0x67, 0x72, 0x61, 0x6d, 0x6d, 0x61, + 0x72, 0x22, 0x20, 0x6e, 0x61, 0x6d, 0x65, 0x3d, 0x22, 0x67, 0x72, 0x61, + 0x6d, 0x6d, 0x61, 0x72, 0x22, 0x20, 0x70, 0x6c, 0x61, 0x63, 0x65, 0x68, + 0x6f, 0x6c, 0x64, 0x65, 0x72, 0x3d, 0x22, 0x55, 0x73, 0x65, 0x20, 0x67, + 0x62, 0x6e, 0x66, 0x20, 0x6f, 0x72, 0x20, 0x4a, 0x53, 0x4f, 0x4e, 0x20, + 0x53, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x2b, 0x63, 0x6f, 0x6e, 0x76, 0x65, + 0x72, 0x74, 0x22, 0x20, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3d, 0x22, 0x24, + 0x7b, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x2e, 0x76, 0x61, 0x6c, 0x75, + 0x65, 0x2e, 0x67, 0x72, 0x61, 0x6d, 0x6d, 0x61, 0x72, 0x7d, 0x22, 0x20, + 0x72, 0x6f, 0x77, 0x73, 0x3d, 0x34, 0x20, 0x6f, 0x6e, 0x69, 0x6e, 0x70, + 0x75, 0x74, 0x3d, 0x24, 0x7b, 0x75, 0x70, 0x64, 0x61, 0x74, 0x65, 0x50, + 0x61, 0x72, 0x61, 0x6d, 0x73, 0x7d, 0x2f, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x69, 0x6e, 0x70, 0x75, 0x74, 0x20, 0x74, 0x79, 0x70, 0x65, 0x3d, 0x22, 0x74, 0x65, - 0x78, 0x74, 0x22, 0x20, 0x6e, 0x61, 0x6d, 0x65, 0x3d, 0x22, 0x63, 0x68, - 0x61, 0x72, 0x22, 0x20, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3d, 0x22, 0x24, - 0x7b, 0x73, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x2e, 0x76, 0x61, 0x6c, - 0x75, 0x65, 0x2e, 0x63, 0x68, 0x61, 0x72, 0x7d, 0x22, 0x20, 0x6f, 0x6e, - 0x69, 0x6e, 0x70, 0x75, 0x74, 0x3d, 0x24, 0x7b, 0x75, 0x70, 0x64, 0x61, - 0x74, 0x65, 0x53, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x7d, 0x20, 0x2f, - 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x3c, 0x2f, 0x64, 0x69, 0x76, 0x3e, 0x0a, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x2f, 0x66, 0x69, 0x65, - 0x6c, 0x64, 0x73, 0x65, 0x74, 0x3e, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x66, 0x69, 0x65, 0x6c, 0x64, - 0x73, 0x65, 0x74, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x64, 0x69, 0x76, 0x3e, 0x0a, 0x20, + 0x78, 0x74, 0x22, 0x20, 0x6e, 0x61, 0x6d, 0x65, 0x3d, 0x22, 0x70, 0x72, + 0x6f, 0x70, 0x2d, 0x6f, 0x72, 0x64, 0x65, 0x72, 0x22, 0x20, 0x70, 0x6c, + 0x61, 0x63, 0x65, 0x68, 0x6f, 0x6c, 0x64, 0x65, 0x72, 0x3d, 0x22, 0x6f, + 0x72, 0x64, 0x65, 0x72, 0x3a, 0x20, 0x70, 0x72, 0x6f, 0x70, 0x31, 0x2c, + 0x70, 0x72, 0x6f, 0x70, 0x32, 0x2c, 0x70, 0x72, 0x6f, 0x70, 0x33, 0x22, + 0x20, 0x6f, 0x6e, 0x69, 0x6e, 0x70, 0x75, 0x74, 0x3d, 0x24, 0x7b, 0x75, + 0x70, 0x64, 0x61, 0x74, 0x65, 0x47, 0x72, 0x61, 0x6d, 0x6d, 0x61, 0x72, + 0x4a, 0x73, 0x6f, 0x6e, 0x53, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x50, 0x72, + 0x6f, 0x70, 0x4f, 0x72, 0x64, 0x65, 0x72, 0x7d, 0x20, 0x2f, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x3c, 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x20, 0x66, 0x6f, 0x72, 0x3d, - 0x22, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x22, 0x3e, 0x50, - 0x72, 0x6f, 0x6d, 0x70, 0x74, 0x20, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, - 0x74, 0x65, 0x3c, 0x2f, 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x3e, 0x0a, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x3c, 0x74, 0x65, 0x78, 0x74, 0x61, 0x72, 0x65, 0x61, 0x20, 0x69, - 0x64, 0x3d, 0x22, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x22, - 0x20, 0x6e, 0x61, 0x6d, 0x65, 0x3d, 0x22, 0x74, 0x65, 0x6d, 0x70, 0x6c, - 0x61, 0x74, 0x65, 0x22, 0x20, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3d, 0x22, - 0x24, 0x7b, 0x73, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x2e, 0x76, 0x61, - 0x6c, 0x75, 0x65, 0x2e, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, - 0x7d, 0x22, 0x20, 0x72, 0x6f, 0x77, 0x73, 0x3d, 0x34, 0x20, 0x6f, 0x6e, - 0x69, 0x6e, 0x70, 0x75, 0x74, 0x3d, 0x24, 0x7b, 0x75, 0x70, 0x64, 0x61, - 0x74, 0x65, 0x53, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x7d, 0x2f, 0x3e, - 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x3c, 0x2f, 0x64, 0x69, 0x76, 0x3e, 0x0a, 0x0a, 0x20, 0x20, 0x20, + 0x3c, 0x62, 0x75, 0x74, 0x74, 0x6f, 0x6e, 0x20, 0x74, 0x79, 0x70, 0x65, + 0x3d, 0x22, 0x62, 0x75, 0x74, 0x74, 0x6f, 0x6e, 0x22, 0x20, 0x6f, 0x6e, + 0x63, 0x6c, 0x69, 0x63, 0x6b, 0x3d, 0x24, 0x7b, 0x63, 0x6f, 0x6e, 0x76, + 0x65, 0x72, 0x74, 0x4a, 0x53, 0x4f, 0x4e, 0x53, 0x63, 0x68, 0x65, 0x6d, + 0x61, 0x47, 0x72, 0x61, 0x6d, 0x6d, 0x61, 0x72, 0x7d, 0x3e, 0x43, 0x6f, + 0x6e, 0x76, 0x65, 0x72, 0x74, 0x20, 0x4a, 0x53, 0x4f, 0x4e, 0x20, 0x53, + 0x63, 0x68, 0x65, 0x6d, 0x61, 0x3c, 0x2f, 0x62, 0x75, 0x74, 0x74, 0x6f, + 0x6e, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x3c, 0x2f, 0x64, 0x69, 0x76, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x60, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x29, 0x3b, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x50, 0x72, 0x6f, 0x6d, 0x70, 0x74, + 0x43, 0x6f, 0x6e, 0x74, 0x72, 0x6f, 0x6c, 0x46, 0x69, 0x65, 0x6c, 0x64, + 0x53, 0x65, 0x74, 0x20, 0x3d, 0x20, 0x28, 0x29, 0x20, 0x3d, 0x3e, 0x20, + 0x28, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x68, 0x74, + 0x6d, 0x6c, 0x60, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x3c, 0x66, 0x69, 0x65, 0x6c, 0x64, 0x73, 0x65, 0x74, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x64, 0x69, 0x76, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x20, - 0x66, 0x6f, 0x72, 0x3d, 0x22, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, - 0x65, 0x22, 0x3e, 0x43, 0x68, 0x61, 0x74, 0x20, 0x68, 0x69, 0x73, 0x74, - 0x6f, 0x72, 0x79, 0x20, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, - 0x3c, 0x2f, 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x3e, 0x0a, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, - 0x74, 0x65, 0x78, 0x74, 0x61, 0x72, 0x65, 0x61, 0x20, 0x69, 0x64, 0x3d, - 0x22, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x22, 0x20, 0x6e, - 0x61, 0x6d, 0x65, 0x3d, 0x22, 0x68, 0x69, 0x73, 0x74, 0x6f, 0x72, 0x79, - 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x22, 0x20, 0x76, 0x61, - 0x6c, 0x75, 0x65, 0x3d, 0x22, 0x24, 0x7b, 0x73, 0x65, 0x73, 0x73, 0x69, - 0x6f, 0x6e, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x68, 0x69, 0x73, - 0x74, 0x6f, 0x72, 0x79, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, - 0x7d, 0x22, 0x20, 0x72, 0x6f, 0x77, 0x73, 0x3d, 0x31, 0x20, 0x6f, 0x6e, - 0x69, 0x6e, 0x70, 0x75, 0x74, 0x3d, 0x24, 0x7b, 0x75, 0x70, 0x64, 0x61, - 0x74, 0x65, 0x53, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x7d, 0x2f, 0x3e, - 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x3c, 0x2f, 0x64, 0x69, 0x76, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x24, 0x7b, 0x47, 0x72, - 0x61, 0x6d, 0x6d, 0x61, 0x72, 0x43, 0x6f, 0x6e, 0x74, 0x72, 0x6f, 0x6c, - 0x28, 0x29, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x3c, 0x2f, 0x66, 0x69, 0x65, 0x6c, 0x64, 0x73, 0x65, 0x74, - 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x60, 0x0a, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x29, 0x3b, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x43, 0x6f, 0x6d, 0x70, - 0x6c, 0x65, 0x74, 0x69, 0x6f, 0x6e, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, + 0x20, 0x20, 0x20, 0x3c, 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x20, 0x68, 0x74, + 0x6d, 0x6c, 0x46, 0x6f, 0x72, 0x3d, 0x22, 0x70, 0x72, 0x6f, 0x6d, 0x70, + 0x74, 0x22, 0x3e, 0x50, 0x72, 0x6f, 0x6d, 0x70, 0x74, 0x3c, 0x2f, 0x6c, + 0x61, 0x62, 0x65, 0x6c, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x74, 0x65, 0x78, 0x74, 0x61, + 0x72, 0x65, 0x61, 0x20, 0x74, 0x79, 0x70, 0x65, 0x3d, 0x22, 0x74, 0x65, + 0x78, 0x74, 0x22, 0x20, 0x6e, 0x61, 0x6d, 0x65, 0x3d, 0x22, 0x70, 0x72, + 0x6f, 0x6d, 0x70, 0x74, 0x22, 0x20, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3d, + 0x22, 0x24, 0x7b, 0x73, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x2e, 0x76, + 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x6d, 0x70, 0x74, 0x7d, + 0x22, 0x20, 0x6f, 0x6e, 0x69, 0x6e, 0x70, 0x75, 0x74, 0x3d, 0x24, 0x7b, + 0x75, 0x70, 0x64, 0x61, 0x74, 0x65, 0x53, 0x65, 0x73, 0x73, 0x69, 0x6f, + 0x6e, 0x7d, 0x2f, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x3c, 0x2f, 0x64, 0x69, 0x76, 0x3e, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x2f, 0x66, 0x69, 0x65, 0x6c, + 0x64, 0x73, 0x65, 0x74, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x60, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x29, 0x3b, + 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, + 0x74, 0x20, 0x43, 0x68, 0x61, 0x74, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x46, 0x6f, 0x72, 0x6d, 0x20, 0x3d, 0x20, 0x28, 0x29, 0x20, 0x3d, 0x3e, 0x20, 0x28, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x68, 0x74, 0x6d, 0x6c, 0x60, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x24, 0x7b, 0x50, 0x72, 0x6f, 0x6d, 0x70, 0x74, 0x43, 0x6f, 0x6e, 0x74, 0x72, 0x6f, 0x6c, 0x46, 0x69, 0x65, 0x6c, 0x64, 0x53, - 0x65, 0x74, 0x28, 0x29, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x3c, 0x66, 0x69, 0x65, 0x6c, 0x64, 0x73, 0x65, - 0x74, 0x3e, 0x24, 0x7b, 0x47, 0x72, 0x61, 0x6d, 0x6d, 0x61, 0x72, 0x43, - 0x6f, 0x6e, 0x74, 0x72, 0x6f, 0x6c, 0x28, 0x29, 0x7d, 0x3c, 0x2f, 0x66, - 0x69, 0x65, 0x6c, 0x64, 0x73, 0x65, 0x74, 0x3e, 0x0a, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x60, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x29, 0x3b, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x72, - 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x68, 0x74, 0x6d, 0x6c, 0x60, 0x0a, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x66, 0x6f, 0x72, - 0x6d, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x3c, 0x66, 0x69, 0x65, 0x6c, 0x64, 0x73, 0x65, 0x74, 0x20, 0x63, - 0x6c, 0x61, 0x73, 0x73, 0x3d, 0x22, 0x74, 0x77, 0x6f, 0x22, 0x3e, 0x0a, + 0x65, 0x74, 0x28, 0x29, 0x7d, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x66, 0x69, 0x65, 0x6c, 0x64, 0x73, + 0x65, 0x74, 0x20, 0x63, 0x6c, 0x61, 0x73, 0x73, 0x3d, 0x22, 0x74, 0x77, + 0x6f, 0x22, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x3c, 0x64, 0x69, 0x76, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x3c, 0x24, 0x7b, 0x55, 0x73, 0x65, 0x72, 0x54, 0x65, 0x6d, 0x70, 0x6c, - 0x61, 0x74, 0x65, 0x52, 0x65, 0x73, 0x65, 0x74, 0x42, 0x75, 0x74, 0x74, - 0x6f, 0x6e, 0x7d, 0x2f, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x64, 0x69, 0x76, 0x3e, 0x0a, + 0x3c, 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x20, 0x66, 0x6f, 0x72, 0x3d, 0x22, + 0x75, 0x73, 0x65, 0x72, 0x22, 0x3e, 0x55, 0x73, 0x65, 0x72, 0x20, 0x6e, + 0x61, 0x6d, 0x65, 0x3c, 0x2f, 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x3c, 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x20, 0x63, 0x6c, 0x61, - 0x73, 0x73, 0x3d, 0x22, 0x73, 0x6c, 0x69, 0x6d, 0x22, 0x3e, 0x3c, 0x69, - 0x6e, 0x70, 0x75, 0x74, 0x20, 0x74, 0x79, 0x70, 0x65, 0x3d, 0x22, 0x72, - 0x61, 0x64, 0x69, 0x6f, 0x22, 0x20, 0x6e, 0x61, 0x6d, 0x65, 0x3d, 0x22, - 0x74, 0x79, 0x70, 0x65, 0x22, 0x20, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3d, - 0x22, 0x63, 0x68, 0x61, 0x74, 0x22, 0x20, 0x63, 0x68, 0x65, 0x63, 0x6b, - 0x65, 0x64, 0x3d, 0x24, 0x7b, 0x73, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, - 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x74, 0x79, 0x70, 0x65, 0x20, - 0x3d, 0x3d, 0x3d, 0x20, 0x22, 0x63, 0x68, 0x61, 0x74, 0x22, 0x7d, 0x20, + 0x20, 0x20, 0x3c, 0x69, 0x6e, 0x70, 0x75, 0x74, 0x20, 0x74, 0x79, 0x70, + 0x65, 0x3d, 0x22, 0x74, 0x65, 0x78, 0x74, 0x22, 0x20, 0x6e, 0x61, 0x6d, + 0x65, 0x3d, 0x22, 0x75, 0x73, 0x65, 0x72, 0x22, 0x20, 0x76, 0x61, 0x6c, + 0x75, 0x65, 0x3d, 0x22, 0x24, 0x7b, 0x73, 0x65, 0x73, 0x73, 0x69, 0x6f, + 0x6e, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x75, 0x73, 0x65, 0x72, + 0x7d, 0x22, 0x20, 0x6f, 0x6e, 0x69, 0x6e, 0x70, 0x75, 0x74, 0x3d, 0x24, + 0x7b, 0x75, 0x70, 0x64, 0x61, 0x74, 0x65, 0x53, 0x65, 0x73, 0x73, 0x69, + 0x6f, 0x6e, 0x7d, 0x20, 0x2f, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x2f, 0x64, 0x69, 0x76, + 0x3e, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x3c, 0x64, 0x69, 0x76, 0x3e, 0x0a, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, + 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x20, 0x66, 0x6f, 0x72, 0x3d, 0x22, 0x62, + 0x6f, 0x74, 0x22, 0x3e, 0x42, 0x6f, 0x74, 0x20, 0x6e, 0x61, 0x6d, 0x65, + 0x3c, 0x2f, 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x3e, 0x0a, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, + 0x69, 0x6e, 0x70, 0x75, 0x74, 0x20, 0x74, 0x79, 0x70, 0x65, 0x3d, 0x22, + 0x74, 0x65, 0x78, 0x74, 0x22, 0x20, 0x6e, 0x61, 0x6d, 0x65, 0x3d, 0x22, + 0x63, 0x68, 0x61, 0x72, 0x22, 0x20, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3d, + 0x22, 0x24, 0x7b, 0x73, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x2e, 0x76, + 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x63, 0x68, 0x61, 0x72, 0x7d, 0x22, 0x20, 0x6f, 0x6e, 0x69, 0x6e, 0x70, 0x75, 0x74, 0x3d, 0x24, 0x7b, 0x75, 0x70, 0x64, 0x61, 0x74, 0x65, 0x53, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x7d, - 0x20, 0x2f, 0x3e, 0x20, 0x43, 0x68, 0x61, 0x74, 0x3c, 0x2f, 0x6c, 0x61, - 0x62, 0x65, 0x6c, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x6c, 0x61, 0x62, 0x65, - 0x6c, 0x20, 0x63, 0x6c, 0x61, 0x73, 0x73, 0x3d, 0x22, 0x73, 0x6c, 0x69, - 0x6d, 0x22, 0x3e, 0x3c, 0x69, 0x6e, 0x70, 0x75, 0x74, 0x20, 0x74, 0x79, - 0x70, 0x65, 0x3d, 0x22, 0x72, 0x61, 0x64, 0x69, 0x6f, 0x22, 0x20, 0x6e, - 0x61, 0x6d, 0x65, 0x3d, 0x22, 0x74, 0x79, 0x70, 0x65, 0x22, 0x20, 0x76, - 0x61, 0x6c, 0x75, 0x65, 0x3d, 0x22, 0x63, 0x6f, 0x6d, 0x70, 0x6c, 0x65, - 0x74, 0x69, 0x6f, 0x6e, 0x22, 0x20, 0x63, 0x68, 0x65, 0x63, 0x6b, 0x65, - 0x64, 0x3d, 0x24, 0x7b, 0x73, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x2e, - 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x74, 0x79, 0x70, 0x65, 0x20, 0x3d, - 0x3d, 0x3d, 0x20, 0x22, 0x63, 0x6f, 0x6d, 0x70, 0x6c, 0x65, 0x74, 0x69, - 0x6f, 0x6e, 0x22, 0x7d, 0x20, 0x6f, 0x6e, 0x69, 0x6e, 0x70, 0x75, 0x74, - 0x3d, 0x24, 0x7b, 0x75, 0x70, 0x64, 0x61, 0x74, 0x65, 0x53, 0x65, 0x73, - 0x73, 0x69, 0x6f, 0x6e, 0x7d, 0x20, 0x2f, 0x3e, 0x20, 0x43, 0x6f, 0x6d, - 0x70, 0x6c, 0x65, 0x74, 0x69, 0x6f, 0x6e, 0x3c, 0x2f, 0x6c, 0x61, 0x62, - 0x65, 0x6c, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x2f, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x2f, 0x64, 0x69, 0x76, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x2f, 0x66, 0x69, 0x65, 0x6c, 0x64, 0x73, 0x65, 0x74, 0x3e, 0x0a, 0x0a, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x24, 0x7b, 0x73, 0x65, - 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2e, - 0x74, 0x79, 0x70, 0x65, 0x20, 0x3d, 0x3d, 0x3d, 0x20, 0x27, 0x63, 0x68, - 0x61, 0x74, 0x27, 0x20, 0x3f, 0x20, 0x43, 0x68, 0x61, 0x74, 0x43, 0x6f, - 0x6e, 0x66, 0x69, 0x67, 0x46, 0x6f, 0x72, 0x6d, 0x28, 0x29, 0x20, 0x3a, - 0x20, 0x43, 0x6f, 0x6d, 0x70, 0x6c, 0x65, 0x74, 0x69, 0x6f, 0x6e, 0x43, - 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x46, 0x6f, 0x72, 0x6d, 0x28, 0x29, 0x7d, - 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x3c, 0x66, 0x69, 0x65, 0x6c, 0x64, 0x73, 0x65, 0x74, 0x20, 0x63, 0x6c, - 0x61, 0x73, 0x73, 0x3d, 0x22, 0x74, 0x77, 0x6f, 0x22, 0x3e, 0x0a, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x24, - 0x7b, 0x49, 0x6e, 0x74, 0x46, 0x69, 0x65, 0x6c, 0x64, 0x28, 0x7b, 0x20, - 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x3a, 0x20, 0x22, 0x50, 0x72, 0x65, 0x64, - 0x69, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x22, 0x2c, 0x20, 0x6d, 0x61, - 0x78, 0x3a, 0x20, 0x32, 0x30, 0x34, 0x38, 0x2c, 0x20, 0x6d, 0x69, 0x6e, - 0x3a, 0x20, 0x2d, 0x31, 0x2c, 0x20, 0x6e, 0x61, 0x6d, 0x65, 0x3a, 0x20, - 0x22, 0x6e, 0x5f, 0x70, 0x72, 0x65, 0x64, 0x69, 0x63, 0x74, 0x22, 0x2c, - 0x20, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3a, 0x20, 0x70, 0x61, 0x72, 0x61, - 0x6d, 0x73, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x6e, 0x5f, 0x70, - 0x72, 0x65, 0x64, 0x69, 0x63, 0x74, 0x20, 0x7d, 0x29, 0x7d, 0x0a, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x24, - 0x7b, 0x46, 0x6c, 0x6f, 0x61, 0x74, 0x46, 0x69, 0x65, 0x6c, 0x64, 0x28, - 0x7b, 0x20, 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x3a, 0x20, 0x22, 0x54, 0x65, - 0x6d, 0x70, 0x65, 0x72, 0x61, 0x74, 0x75, 0x72, 0x65, 0x22, 0x2c, 0x20, - 0x6d, 0x61, 0x78, 0x3a, 0x20, 0x31, 0x2e, 0x35, 0x2c, 0x20, 0x6d, 0x69, - 0x6e, 0x3a, 0x20, 0x30, 0x2e, 0x30, 0x2c, 0x20, 0x6e, 0x61, 0x6d, 0x65, - 0x3a, 0x20, 0x22, 0x74, 0x65, 0x6d, 0x70, 0x65, 0x72, 0x61, 0x74, 0x75, - 0x72, 0x65, 0x22, 0x2c, 0x20, 0x73, 0x74, 0x65, 0x70, 0x3a, 0x20, 0x30, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x66, 0x69, 0x65, + 0x6c, 0x64, 0x73, 0x65, 0x74, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x64, 0x69, 0x76, 0x3e, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x3c, 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x20, 0x66, 0x6f, + 0x72, 0x3d, 0x22, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x22, + 0x3e, 0x50, 0x72, 0x6f, 0x6d, 0x70, 0x74, 0x20, 0x74, 0x65, 0x6d, 0x70, + 0x6c, 0x61, 0x74, 0x65, 0x3c, 0x2f, 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x3e, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x3c, 0x74, 0x65, 0x78, 0x74, 0x61, 0x72, 0x65, 0x61, + 0x20, 0x69, 0x64, 0x3d, 0x22, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, + 0x65, 0x22, 0x20, 0x6e, 0x61, 0x6d, 0x65, 0x3d, 0x22, 0x74, 0x65, 0x6d, + 0x70, 0x6c, 0x61, 0x74, 0x65, 0x22, 0x20, 0x76, 0x61, 0x6c, 0x75, 0x65, + 0x3d, 0x22, 0x24, 0x7b, 0x73, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x2e, + 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, + 0x74, 0x65, 0x7d, 0x22, 0x20, 0x72, 0x6f, 0x77, 0x73, 0x3d, 0x34, 0x20, + 0x6f, 0x6e, 0x69, 0x6e, 0x70, 0x75, 0x74, 0x3d, 0x24, 0x7b, 0x75, 0x70, + 0x64, 0x61, 0x74, 0x65, 0x53, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x7d, + 0x2f, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x3c, 0x2f, 0x64, 0x69, 0x76, 0x3e, 0x0a, 0x0a, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, + 0x64, 0x69, 0x76, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x6c, 0x61, 0x62, 0x65, + 0x6c, 0x20, 0x66, 0x6f, 0x72, 0x3d, 0x22, 0x74, 0x65, 0x6d, 0x70, 0x6c, + 0x61, 0x74, 0x65, 0x22, 0x3e, 0x43, 0x68, 0x61, 0x74, 0x20, 0x68, 0x69, + 0x73, 0x74, 0x6f, 0x72, 0x79, 0x20, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, + 0x74, 0x65, 0x3c, 0x2f, 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x3e, 0x0a, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x3c, 0x74, 0x65, 0x78, 0x74, 0x61, 0x72, 0x65, 0x61, 0x20, 0x69, + 0x64, 0x3d, 0x22, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x22, + 0x20, 0x6e, 0x61, 0x6d, 0x65, 0x3d, 0x22, 0x68, 0x69, 0x73, 0x74, 0x6f, + 0x72, 0x79, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x22, 0x20, + 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3d, 0x22, 0x24, 0x7b, 0x73, 0x65, 0x73, + 0x73, 0x69, 0x6f, 0x6e, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x68, + 0x69, 0x73, 0x74, 0x6f, 0x72, 0x79, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, + 0x74, 0x65, 0x7d, 0x22, 0x20, 0x72, 0x6f, 0x77, 0x73, 0x3d, 0x31, 0x20, + 0x6f, 0x6e, 0x69, 0x6e, 0x70, 0x75, 0x74, 0x3d, 0x24, 0x7b, 0x75, 0x70, + 0x64, 0x61, 0x74, 0x65, 0x53, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x7d, + 0x2f, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x3c, 0x2f, 0x64, 0x69, 0x76, 0x3e, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x24, 0x7b, + 0x47, 0x72, 0x61, 0x6d, 0x6d, 0x61, 0x72, 0x43, 0x6f, 0x6e, 0x74, 0x72, + 0x6f, 0x6c, 0x28, 0x29, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x3c, 0x2f, 0x66, 0x69, 0x65, 0x6c, 0x64, 0x73, + 0x65, 0x74, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x60, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x29, 0x3b, 0x0a, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x43, 0x6f, + 0x6d, 0x70, 0x6c, 0x65, 0x74, 0x69, 0x6f, 0x6e, 0x43, 0x6f, 0x6e, 0x66, + 0x69, 0x67, 0x46, 0x6f, 0x72, 0x6d, 0x20, 0x3d, 0x20, 0x28, 0x29, 0x20, + 0x3d, 0x3e, 0x20, 0x28, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x68, 0x74, 0x6d, 0x6c, 0x60, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x24, 0x7b, 0x50, 0x72, 0x6f, 0x6d, 0x70, + 0x74, 0x43, 0x6f, 0x6e, 0x74, 0x72, 0x6f, 0x6c, 0x46, 0x69, 0x65, 0x6c, + 0x64, 0x53, 0x65, 0x74, 0x28, 0x29, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x66, 0x69, 0x65, 0x6c, 0x64, + 0x73, 0x65, 0x74, 0x3e, 0x24, 0x7b, 0x47, 0x72, 0x61, 0x6d, 0x6d, 0x61, + 0x72, 0x43, 0x6f, 0x6e, 0x74, 0x72, 0x6f, 0x6c, 0x28, 0x29, 0x7d, 0x3c, + 0x2f, 0x66, 0x69, 0x65, 0x6c, 0x64, 0x73, 0x65, 0x74, 0x3e, 0x0a, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x60, 0x0a, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x29, 0x3b, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x68, 0x74, 0x6d, 0x6c, + 0x60, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x66, + 0x6f, 0x72, 0x6d, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x3c, 0x66, 0x69, 0x65, 0x6c, 0x64, 0x73, 0x65, 0x74, + 0x20, 0x63, 0x6c, 0x61, 0x73, 0x73, 0x3d, 0x22, 0x74, 0x77, 0x6f, 0x22, + 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x3c, 0x24, 0x7b, 0x55, 0x73, 0x65, 0x72, 0x54, 0x65, 0x6d, + 0x70, 0x6c, 0x61, 0x74, 0x65, 0x52, 0x65, 0x73, 0x65, 0x74, 0x42, 0x75, + 0x74, 0x74, 0x6f, 0x6e, 0x7d, 0x2f, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x64, 0x69, 0x76, + 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x3c, 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x20, 0x63, + 0x6c, 0x61, 0x73, 0x73, 0x3d, 0x22, 0x73, 0x6c, 0x69, 0x6d, 0x22, 0x3e, + 0x3c, 0x69, 0x6e, 0x70, 0x75, 0x74, 0x20, 0x74, 0x79, 0x70, 0x65, 0x3d, + 0x22, 0x72, 0x61, 0x64, 0x69, 0x6f, 0x22, 0x20, 0x6e, 0x61, 0x6d, 0x65, + 0x3d, 0x22, 0x74, 0x79, 0x70, 0x65, 0x22, 0x20, 0x76, 0x61, 0x6c, 0x75, + 0x65, 0x3d, 0x22, 0x63, 0x68, 0x61, 0x74, 0x22, 0x20, 0x63, 0x68, 0x65, + 0x63, 0x6b, 0x65, 0x64, 0x3d, 0x24, 0x7b, 0x73, 0x65, 0x73, 0x73, 0x69, + 0x6f, 0x6e, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x74, 0x79, 0x70, + 0x65, 0x20, 0x3d, 0x3d, 0x3d, 0x20, 0x22, 0x63, 0x68, 0x61, 0x74, 0x22, + 0x7d, 0x20, 0x6f, 0x6e, 0x69, 0x6e, 0x70, 0x75, 0x74, 0x3d, 0x24, 0x7b, + 0x75, 0x70, 0x64, 0x61, 0x74, 0x65, 0x53, 0x65, 0x73, 0x73, 0x69, 0x6f, + 0x6e, 0x7d, 0x20, 0x2f, 0x3e, 0x20, 0x43, 0x68, 0x61, 0x74, 0x3c, 0x2f, + 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x6c, 0x61, + 0x62, 0x65, 0x6c, 0x20, 0x63, 0x6c, 0x61, 0x73, 0x73, 0x3d, 0x22, 0x73, + 0x6c, 0x69, 0x6d, 0x22, 0x3e, 0x3c, 0x69, 0x6e, 0x70, 0x75, 0x74, 0x20, + 0x74, 0x79, 0x70, 0x65, 0x3d, 0x22, 0x72, 0x61, 0x64, 0x69, 0x6f, 0x22, + 0x20, 0x6e, 0x61, 0x6d, 0x65, 0x3d, 0x22, 0x74, 0x79, 0x70, 0x65, 0x22, + 0x20, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3d, 0x22, 0x63, 0x6f, 0x6d, 0x70, + 0x6c, 0x65, 0x74, 0x69, 0x6f, 0x6e, 0x22, 0x20, 0x63, 0x68, 0x65, 0x63, + 0x6b, 0x65, 0x64, 0x3d, 0x24, 0x7b, 0x73, 0x65, 0x73, 0x73, 0x69, 0x6f, + 0x6e, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x74, 0x79, 0x70, 0x65, + 0x20, 0x3d, 0x3d, 0x3d, 0x20, 0x22, 0x63, 0x6f, 0x6d, 0x70, 0x6c, 0x65, + 0x74, 0x69, 0x6f, 0x6e, 0x22, 0x7d, 0x20, 0x6f, 0x6e, 0x69, 0x6e, 0x70, + 0x75, 0x74, 0x3d, 0x24, 0x7b, 0x75, 0x70, 0x64, 0x61, 0x74, 0x65, 0x53, + 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x7d, 0x20, 0x2f, 0x3e, 0x20, 0x43, + 0x6f, 0x6d, 0x70, 0x6c, 0x65, 0x74, 0x69, 0x6f, 0x6e, 0x3c, 0x2f, 0x6c, + 0x61, 0x62, 0x65, 0x6c, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x2f, 0x64, 0x69, 0x76, 0x3e, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, + 0x2f, 0x66, 0x69, 0x65, 0x6c, 0x64, 0x73, 0x65, 0x74, 0x3e, 0x0a, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x24, 0x7b, + 0x73, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x2e, 0x76, 0x61, 0x6c, 0x75, + 0x65, 0x2e, 0x74, 0x79, 0x70, 0x65, 0x20, 0x3d, 0x3d, 0x3d, 0x20, 0x27, + 0x63, 0x68, 0x61, 0x74, 0x27, 0x20, 0x3f, 0x20, 0x43, 0x68, 0x61, 0x74, + 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x46, 0x6f, 0x72, 0x6d, 0x28, 0x29, + 0x20, 0x3a, 0x20, 0x43, 0x6f, 0x6d, 0x70, 0x6c, 0x65, 0x74, 0x69, 0x6f, + 0x6e, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x46, 0x6f, 0x72, 0x6d, 0x28, + 0x29, 0x7d, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x3c, 0x66, 0x69, 0x65, 0x6c, 0x64, 0x73, 0x65, 0x74, 0x20, + 0x63, 0x6c, 0x61, 0x73, 0x73, 0x3d, 0x22, 0x74, 0x77, 0x6f, 0x22, 0x3e, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x24, 0x7b, 0x49, 0x6e, 0x74, 0x46, 0x69, 0x65, 0x6c, 0x64, 0x28, + 0x7b, 0x20, 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x3a, 0x20, 0x22, 0x50, 0x72, + 0x65, 0x64, 0x69, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x22, 0x2c, 0x20, + 0x6d, 0x61, 0x78, 0x3a, 0x20, 0x32, 0x30, 0x34, 0x38, 0x2c, 0x20, 0x6d, + 0x69, 0x6e, 0x3a, 0x20, 0x2d, 0x31, 0x2c, 0x20, 0x6e, 0x61, 0x6d, 0x65, + 0x3a, 0x20, 0x22, 0x6e, 0x5f, 0x70, 0x72, 0x65, 0x64, 0x69, 0x63, 0x74, + 0x22, 0x2c, 0x20, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3a, 0x20, 0x70, 0x61, + 0x72, 0x61, 0x6d, 0x73, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x6e, + 0x5f, 0x70, 0x72, 0x65, 0x64, 0x69, 0x63, 0x74, 0x20, 0x7d, 0x29, 0x7d, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x24, 0x7b, 0x46, 0x6c, 0x6f, 0x61, 0x74, 0x46, 0x69, 0x65, 0x6c, + 0x64, 0x28, 0x7b, 0x20, 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x3a, 0x20, 0x22, + 0x54, 0x65, 0x6d, 0x70, 0x65, 0x72, 0x61, 0x74, 0x75, 0x72, 0x65, 0x22, + 0x2c, 0x20, 0x6d, 0x61, 0x78, 0x3a, 0x20, 0x32, 0x2e, 0x30, 0x2c, 0x20, + 0x6d, 0x69, 0x6e, 0x3a, 0x20, 0x30, 0x2e, 0x30, 0x2c, 0x20, 0x6e, 0x61, + 0x6d, 0x65, 0x3a, 0x20, 0x22, 0x74, 0x65, 0x6d, 0x70, 0x65, 0x72, 0x61, + 0x74, 0x75, 0x72, 0x65, 0x22, 0x2c, 0x20, 0x73, 0x74, 0x65, 0x70, 0x3a, + 0x20, 0x30, 0x2e, 0x30, 0x31, 0x2c, 0x20, 0x76, 0x61, 0x6c, 0x75, 0x65, + 0x3a, 0x20, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x2e, 0x76, 0x61, 0x6c, + 0x75, 0x65, 0x2e, 0x74, 0x65, 0x6d, 0x70, 0x65, 0x72, 0x61, 0x74, 0x75, + 0x72, 0x65, 0x20, 0x7d, 0x29, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x24, 0x7b, 0x46, 0x6c, 0x6f, + 0x61, 0x74, 0x46, 0x69, 0x65, 0x6c, 0x64, 0x28, 0x7b, 0x20, 0x6c, 0x61, + 0x62, 0x65, 0x6c, 0x3a, 0x20, 0x22, 0x50, 0x65, 0x6e, 0x61, 0x6c, 0x69, + 0x7a, 0x65, 0x20, 0x72, 0x65, 0x70, 0x65, 0x61, 0x74, 0x20, 0x73, 0x65, + 0x71, 0x75, 0x65, 0x6e, 0x63, 0x65, 0x22, 0x2c, 0x20, 0x6d, 0x61, 0x78, + 0x3a, 0x20, 0x32, 0x2e, 0x30, 0x2c, 0x20, 0x6d, 0x69, 0x6e, 0x3a, 0x20, + 0x30, 0x2e, 0x30, 0x2c, 0x20, 0x6e, 0x61, 0x6d, 0x65, 0x3a, 0x20, 0x22, + 0x72, 0x65, 0x70, 0x65, 0x61, 0x74, 0x5f, 0x70, 0x65, 0x6e, 0x61, 0x6c, + 0x74, 0x79, 0x22, 0x2c, 0x20, 0x73, 0x74, 0x65, 0x70, 0x3a, 0x20, 0x30, 0x2e, 0x30, 0x31, 0x2c, 0x20, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3a, 0x20, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, - 0x2e, 0x74, 0x65, 0x6d, 0x70, 0x65, 0x72, 0x61, 0x74, 0x75, 0x72, 0x65, - 0x20, 0x7d, 0x29, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x24, 0x7b, 0x46, 0x6c, 0x6f, 0x61, 0x74, - 0x46, 0x69, 0x65, 0x6c, 0x64, 0x28, 0x7b, 0x20, 0x6c, 0x61, 0x62, 0x65, - 0x6c, 0x3a, 0x20, 0x22, 0x50, 0x65, 0x6e, 0x61, 0x6c, 0x69, 0x7a, 0x65, - 0x20, 0x72, 0x65, 0x70, 0x65, 0x61, 0x74, 0x20, 0x73, 0x65, 0x71, 0x75, - 0x65, 0x6e, 0x63, 0x65, 0x22, 0x2c, 0x20, 0x6d, 0x61, 0x78, 0x3a, 0x20, - 0x32, 0x2e, 0x30, 0x2c, 0x20, 0x6d, 0x69, 0x6e, 0x3a, 0x20, 0x30, 0x2e, - 0x30, 0x2c, 0x20, 0x6e, 0x61, 0x6d, 0x65, 0x3a, 0x20, 0x22, 0x72, 0x65, - 0x70, 0x65, 0x61, 0x74, 0x5f, 0x70, 0x65, 0x6e, 0x61, 0x6c, 0x74, 0x79, - 0x22, 0x2c, 0x20, 0x73, 0x74, 0x65, 0x70, 0x3a, 0x20, 0x30, 0x2e, 0x30, - 0x31, 0x2c, 0x20, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3a, 0x20, 0x70, 0x61, - 0x72, 0x61, 0x6d, 0x73, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x72, - 0x65, 0x70, 0x65, 0x61, 0x74, 0x5f, 0x70, 0x65, 0x6e, 0x61, 0x6c, 0x74, - 0x79, 0x20, 0x7d, 0x29, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x24, 0x7b, 0x49, 0x6e, 0x74, 0x46, - 0x69, 0x65, 0x6c, 0x64, 0x28, 0x7b, 0x20, 0x6c, 0x61, 0x62, 0x65, 0x6c, - 0x3a, 0x20, 0x22, 0x43, 0x6f, 0x6e, 0x73, 0x69, 0x64, 0x65, 0x72, 0x20, - 0x4e, 0x20, 0x74, 0x6f, 0x6b, 0x65, 0x6e, 0x73, 0x20, 0x66, 0x6f, 0x72, - 0x20, 0x70, 0x65, 0x6e, 0x61, 0x6c, 0x69, 0x7a, 0x65, 0x22, 0x2c, 0x20, - 0x6d, 0x61, 0x78, 0x3a, 0x20, 0x32, 0x30, 0x34, 0x38, 0x2c, 0x20, 0x6d, - 0x69, 0x6e, 0x3a, 0x20, 0x30, 0x2c, 0x20, 0x6e, 0x61, 0x6d, 0x65, 0x3a, - 0x20, 0x22, 0x72, 0x65, 0x70, 0x65, 0x61, 0x74, 0x5f, 0x6c, 0x61, 0x73, - 0x74, 0x5f, 0x6e, 0x22, 0x2c, 0x20, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3a, - 0x20, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x2e, 0x76, 0x61, 0x6c, 0x75, - 0x65, 0x2e, 0x72, 0x65, 0x70, 0x65, 0x61, 0x74, 0x5f, 0x6c, 0x61, 0x73, - 0x74, 0x5f, 0x6e, 0x20, 0x7d, 0x29, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x2e, 0x72, 0x65, 0x70, 0x65, 0x61, 0x74, 0x5f, 0x70, 0x65, 0x6e, 0x61, + 0x6c, 0x74, 0x79, 0x20, 0x7d, 0x29, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x24, 0x7b, 0x49, 0x6e, 0x74, 0x46, 0x69, 0x65, 0x6c, 0x64, 0x28, 0x7b, 0x20, 0x6c, 0x61, 0x62, - 0x65, 0x6c, 0x3a, 0x20, 0x22, 0x54, 0x6f, 0x70, 0x2d, 0x4b, 0x20, 0x73, - 0x61, 0x6d, 0x70, 0x6c, 0x69, 0x6e, 0x67, 0x22, 0x2c, 0x20, 0x6d, 0x61, - 0x78, 0x3a, 0x20, 0x31, 0x30, 0x30, 0x2c, 0x20, 0x6d, 0x69, 0x6e, 0x3a, - 0x20, 0x2d, 0x31, 0x2c, 0x20, 0x6e, 0x61, 0x6d, 0x65, 0x3a, 0x20, 0x22, - 0x74, 0x6f, 0x70, 0x5f, 0x6b, 0x22, 0x2c, 0x20, 0x76, 0x61, 0x6c, 0x75, + 0x65, 0x6c, 0x3a, 0x20, 0x22, 0x43, 0x6f, 0x6e, 0x73, 0x69, 0x64, 0x65, + 0x72, 0x20, 0x4e, 0x20, 0x74, 0x6f, 0x6b, 0x65, 0x6e, 0x73, 0x20, 0x66, + 0x6f, 0x72, 0x20, 0x70, 0x65, 0x6e, 0x61, 0x6c, 0x69, 0x7a, 0x65, 0x22, + 0x2c, 0x20, 0x6d, 0x61, 0x78, 0x3a, 0x20, 0x32, 0x30, 0x34, 0x38, 0x2c, + 0x20, 0x6d, 0x69, 0x6e, 0x3a, 0x20, 0x30, 0x2c, 0x20, 0x6e, 0x61, 0x6d, + 0x65, 0x3a, 0x20, 0x22, 0x72, 0x65, 0x70, 0x65, 0x61, 0x74, 0x5f, 0x6c, + 0x61, 0x73, 0x74, 0x5f, 0x6e, 0x22, 0x2c, 0x20, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3a, 0x20, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x2e, 0x76, 0x61, - 0x6c, 0x75, 0x65, 0x2e, 0x74, 0x6f, 0x70, 0x5f, 0x6b, 0x20, 0x7d, 0x29, - 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x24, 0x7b, 0x46, 0x6c, 0x6f, 0x61, 0x74, 0x46, 0x69, 0x65, - 0x6c, 0x64, 0x28, 0x7b, 0x20, 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x3a, 0x20, - 0x22, 0x54, 0x6f, 0x70, 0x2d, 0x50, 0x20, 0x73, 0x61, 0x6d, 0x70, 0x6c, - 0x69, 0x6e, 0x67, 0x22, 0x2c, 0x20, 0x6d, 0x61, 0x78, 0x3a, 0x20, 0x31, - 0x2e, 0x30, 0x2c, 0x20, 0x6d, 0x69, 0x6e, 0x3a, 0x20, 0x30, 0x2e, 0x30, - 0x2c, 0x20, 0x6e, 0x61, 0x6d, 0x65, 0x3a, 0x20, 0x22, 0x74, 0x6f, 0x70, - 0x5f, 0x70, 0x22, 0x2c, 0x20, 0x73, 0x74, 0x65, 0x70, 0x3a, 0x20, 0x30, - 0x2e, 0x30, 0x31, 0x2c, 0x20, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3a, 0x20, - 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, - 0x2e, 0x74, 0x6f, 0x70, 0x5f, 0x70, 0x20, 0x7d, 0x29, 0x7d, 0x0a, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x24, - 0x7b, 0x46, 0x6c, 0x6f, 0x61, 0x74, 0x46, 0x69, 0x65, 0x6c, 0x64, 0x28, - 0x7b, 0x20, 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x3a, 0x20, 0x22, 0x4d, 0x69, - 0x6e, 0x2d, 0x50, 0x20, 0x73, 0x61, 0x6d, 0x70, 0x6c, 0x69, 0x6e, 0x67, - 0x22, 0x2c, 0x20, 0x6d, 0x61, 0x78, 0x3a, 0x20, 0x31, 0x2e, 0x30, 0x2c, - 0x20, 0x6d, 0x69, 0x6e, 0x3a, 0x20, 0x30, 0x2e, 0x30, 0x2c, 0x20, 0x6e, - 0x61, 0x6d, 0x65, 0x3a, 0x20, 0x22, 0x6d, 0x69, 0x6e, 0x5f, 0x70, 0x22, - 0x2c, 0x20, 0x73, 0x74, 0x65, 0x70, 0x3a, 0x20, 0x30, 0x2e, 0x30, 0x31, - 0x2c, 0x20, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3a, 0x20, 0x70, 0x61, 0x72, - 0x61, 0x6d, 0x73, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x6d, 0x69, - 0x6e, 0x5f, 0x70, 0x20, 0x7d, 0x29, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x2f, 0x66, 0x69, 0x65, 0x6c, - 0x64, 0x73, 0x65, 0x74, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x3c, 0x64, 0x65, 0x74, 0x61, 0x69, 0x6c, 0x73, - 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x3c, 0x73, 0x75, 0x6d, 0x6d, 0x61, 0x72, 0x79, 0x3e, 0x4d, - 0x6f, 0x72, 0x65, 0x20, 0x6f, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x3c, - 0x2f, 0x73, 0x75, 0x6d, 0x6d, 0x61, 0x72, 0x79, 0x3e, 0x0a, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x66, - 0x69, 0x65, 0x6c, 0x64, 0x73, 0x65, 0x74, 0x20, 0x63, 0x6c, 0x61, 0x73, - 0x73, 0x3d, 0x22, 0x74, 0x77, 0x6f, 0x22, 0x3e, 0x0a, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x24, - 0x7b, 0x46, 0x6c, 0x6f, 0x61, 0x74, 0x46, 0x69, 0x65, 0x6c, 0x64, 0x28, - 0x7b, 0x20, 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x3a, 0x20, 0x22, 0x54, 0x46, - 0x53, 0x2d, 0x5a, 0x22, 0x2c, 0x20, 0x6d, 0x61, 0x78, 0x3a, 0x20, 0x31, - 0x2e, 0x30, 0x2c, 0x20, 0x6d, 0x69, 0x6e, 0x3a, 0x20, 0x30, 0x2e, 0x30, - 0x2c, 0x20, 0x6e, 0x61, 0x6d, 0x65, 0x3a, 0x20, 0x22, 0x74, 0x66, 0x73, - 0x5f, 0x7a, 0x22, 0x2c, 0x20, 0x73, 0x74, 0x65, 0x70, 0x3a, 0x20, 0x30, - 0x2e, 0x30, 0x31, 0x2c, 0x20, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3a, 0x20, - 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, - 0x2e, 0x74, 0x66, 0x73, 0x5f, 0x7a, 0x20, 0x7d, 0x29, 0x7d, 0x0a, 0x20, + 0x6c, 0x75, 0x65, 0x2e, 0x72, 0x65, 0x70, 0x65, 0x61, 0x74, 0x5f, 0x6c, + 0x61, 0x73, 0x74, 0x5f, 0x6e, 0x20, 0x7d, 0x29, 0x7d, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x24, 0x7b, + 0x49, 0x6e, 0x74, 0x46, 0x69, 0x65, 0x6c, 0x64, 0x28, 0x7b, 0x20, 0x6c, + 0x61, 0x62, 0x65, 0x6c, 0x3a, 0x20, 0x22, 0x54, 0x6f, 0x70, 0x2d, 0x4b, + 0x20, 0x73, 0x61, 0x6d, 0x70, 0x6c, 0x69, 0x6e, 0x67, 0x22, 0x2c, 0x20, + 0x6d, 0x61, 0x78, 0x3a, 0x20, 0x31, 0x30, 0x30, 0x2c, 0x20, 0x6d, 0x69, + 0x6e, 0x3a, 0x20, 0x2d, 0x31, 0x2c, 0x20, 0x6e, 0x61, 0x6d, 0x65, 0x3a, + 0x20, 0x22, 0x74, 0x6f, 0x70, 0x5f, 0x6b, 0x22, 0x2c, 0x20, 0x76, 0x61, + 0x6c, 0x75, 0x65, 0x3a, 0x20, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x2e, + 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x74, 0x6f, 0x70, 0x5f, 0x6b, 0x20, + 0x7d, 0x29, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x24, 0x7b, 0x46, 0x6c, 0x6f, 0x61, 0x74, 0x46, + 0x69, 0x65, 0x6c, 0x64, 0x28, 0x7b, 0x20, 0x6c, 0x61, 0x62, 0x65, 0x6c, + 0x3a, 0x20, 0x22, 0x54, 0x6f, 0x70, 0x2d, 0x50, 0x20, 0x73, 0x61, 0x6d, + 0x70, 0x6c, 0x69, 0x6e, 0x67, 0x22, 0x2c, 0x20, 0x6d, 0x61, 0x78, 0x3a, + 0x20, 0x31, 0x2e, 0x30, 0x2c, 0x20, 0x6d, 0x69, 0x6e, 0x3a, 0x20, 0x30, + 0x2e, 0x30, 0x2c, 0x20, 0x6e, 0x61, 0x6d, 0x65, 0x3a, 0x20, 0x22, 0x74, + 0x6f, 0x70, 0x5f, 0x70, 0x22, 0x2c, 0x20, 0x73, 0x74, 0x65, 0x70, 0x3a, + 0x20, 0x30, 0x2e, 0x30, 0x31, 0x2c, 0x20, 0x76, 0x61, 0x6c, 0x75, 0x65, + 0x3a, 0x20, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x2e, 0x76, 0x61, 0x6c, + 0x75, 0x65, 0x2e, 0x74, 0x6f, 0x70, 0x5f, 0x70, 0x20, 0x7d, 0x29, 0x7d, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x24, 0x7b, 0x46, 0x6c, 0x6f, 0x61, 0x74, 0x46, 0x69, 0x65, 0x6c, + 0x64, 0x28, 0x7b, 0x20, 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x3a, 0x20, 0x22, + 0x4d, 0x69, 0x6e, 0x2d, 0x50, 0x20, 0x73, 0x61, 0x6d, 0x70, 0x6c, 0x69, + 0x6e, 0x67, 0x22, 0x2c, 0x20, 0x6d, 0x61, 0x78, 0x3a, 0x20, 0x31, 0x2e, + 0x30, 0x2c, 0x20, 0x6d, 0x69, 0x6e, 0x3a, 0x20, 0x30, 0x2e, 0x30, 0x2c, + 0x20, 0x6e, 0x61, 0x6d, 0x65, 0x3a, 0x20, 0x22, 0x6d, 0x69, 0x6e, 0x5f, + 0x70, 0x22, 0x2c, 0x20, 0x73, 0x74, 0x65, 0x70, 0x3a, 0x20, 0x30, 0x2e, + 0x30, 0x31, 0x2c, 0x20, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3a, 0x20, 0x70, + 0x61, 0x72, 0x61, 0x6d, 0x73, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2e, + 0x6d, 0x69, 0x6e, 0x5f, 0x70, 0x20, 0x7d, 0x29, 0x7d, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x2f, 0x66, 0x69, + 0x65, 0x6c, 0x64, 0x73, 0x65, 0x74, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x64, 0x65, 0x74, 0x61, 0x69, + 0x6c, 0x73, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x3c, 0x73, 0x75, 0x6d, 0x6d, 0x61, 0x72, 0x79, + 0x3e, 0x4d, 0x6f, 0x72, 0x65, 0x20, 0x6f, 0x70, 0x74, 0x69, 0x6f, 0x6e, + 0x73, 0x3c, 0x2f, 0x73, 0x75, 0x6d, 0x6d, 0x61, 0x72, 0x79, 0x3e, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x3c, 0x66, 0x69, 0x65, 0x6c, 0x64, 0x73, 0x65, 0x74, 0x20, 0x63, 0x6c, + 0x61, 0x73, 0x73, 0x3d, 0x22, 0x74, 0x77, 0x6f, 0x22, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x24, 0x7b, 0x46, 0x6c, 0x6f, 0x61, 0x74, 0x46, 0x69, 0x65, 0x6c, 0x64, 0x28, 0x7b, 0x20, 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x3a, 0x20, 0x22, - 0x54, 0x79, 0x70, 0x69, 0x63, 0x61, 0x6c, 0x20, 0x50, 0x22, 0x2c, 0x20, - 0x6d, 0x61, 0x78, 0x3a, 0x20, 0x31, 0x2e, 0x30, 0x2c, 0x20, 0x6d, 0x69, - 0x6e, 0x3a, 0x20, 0x30, 0x2e, 0x30, 0x2c, 0x20, 0x6e, 0x61, 0x6d, 0x65, - 0x3a, 0x20, 0x22, 0x74, 0x79, 0x70, 0x69, 0x63, 0x61, 0x6c, 0x5f, 0x70, - 0x22, 0x2c, 0x20, 0x73, 0x74, 0x65, 0x70, 0x3a, 0x20, 0x30, 0x2e, 0x30, - 0x31, 0x2c, 0x20, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3a, 0x20, 0x70, 0x61, - 0x72, 0x61, 0x6d, 0x73, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x74, - 0x79, 0x70, 0x69, 0x63, 0x61, 0x6c, 0x5f, 0x70, 0x20, 0x7d, 0x29, 0x7d, + 0x54, 0x46, 0x53, 0x2d, 0x5a, 0x22, 0x2c, 0x20, 0x6d, 0x61, 0x78, 0x3a, + 0x20, 0x31, 0x2e, 0x30, 0x2c, 0x20, 0x6d, 0x69, 0x6e, 0x3a, 0x20, 0x30, + 0x2e, 0x30, 0x2c, 0x20, 0x6e, 0x61, 0x6d, 0x65, 0x3a, 0x20, 0x22, 0x74, + 0x66, 0x73, 0x5f, 0x7a, 0x22, 0x2c, 0x20, 0x73, 0x74, 0x65, 0x70, 0x3a, + 0x20, 0x30, 0x2e, 0x30, 0x31, 0x2c, 0x20, 0x76, 0x61, 0x6c, 0x75, 0x65, + 0x3a, 0x20, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x2e, 0x76, 0x61, 0x6c, + 0x75, 0x65, 0x2e, 0x74, 0x66, 0x73, 0x5f, 0x7a, 0x20, 0x7d, 0x29, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x24, 0x7b, 0x46, 0x6c, 0x6f, 0x61, 0x74, 0x46, 0x69, 0x65, 0x6c, 0x64, 0x28, 0x7b, 0x20, 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x3a, - 0x20, 0x22, 0x50, 0x72, 0x65, 0x73, 0x65, 0x6e, 0x63, 0x65, 0x20, 0x70, - 0x65, 0x6e, 0x61, 0x6c, 0x74, 0x79, 0x22, 0x2c, 0x20, 0x6d, 0x61, 0x78, - 0x3a, 0x20, 0x31, 0x2e, 0x30, 0x2c, 0x20, 0x6d, 0x69, 0x6e, 0x3a, 0x20, - 0x30, 0x2e, 0x30, 0x2c, 0x20, 0x6e, 0x61, 0x6d, 0x65, 0x3a, 0x20, 0x22, - 0x70, 0x72, 0x65, 0x73, 0x65, 0x6e, 0x63, 0x65, 0x5f, 0x70, 0x65, 0x6e, - 0x61, 0x6c, 0x74, 0x79, 0x22, 0x2c, 0x20, 0x73, 0x74, 0x65, 0x70, 0x3a, - 0x20, 0x30, 0x2e, 0x30, 0x31, 0x2c, 0x20, 0x76, 0x61, 0x6c, 0x75, 0x65, - 0x3a, 0x20, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x2e, 0x76, 0x61, 0x6c, - 0x75, 0x65, 0x2e, 0x70, 0x72, 0x65, 0x73, 0x65, 0x6e, 0x63, 0x65, 0x5f, - 0x70, 0x65, 0x6e, 0x61, 0x6c, 0x74, 0x79, 0x20, 0x7d, 0x29, 0x7d, 0x0a, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x24, 0x7b, 0x46, 0x6c, 0x6f, 0x61, 0x74, 0x46, 0x69, 0x65, - 0x6c, 0x64, 0x28, 0x7b, 0x20, 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x3a, 0x20, - 0x22, 0x46, 0x72, 0x65, 0x71, 0x75, 0x65, 0x6e, 0x63, 0x79, 0x20, 0x70, - 0x65, 0x6e, 0x61, 0x6c, 0x74, 0x79, 0x22, 0x2c, 0x20, 0x6d, 0x61, 0x78, - 0x3a, 0x20, 0x31, 0x2e, 0x30, 0x2c, 0x20, 0x6d, 0x69, 0x6e, 0x3a, 0x20, - 0x30, 0x2e, 0x30, 0x2c, 0x20, 0x6e, 0x61, 0x6d, 0x65, 0x3a, 0x20, 0x22, - 0x66, 0x72, 0x65, 0x71, 0x75, 0x65, 0x6e, 0x63, 0x79, 0x5f, 0x70, 0x65, - 0x6e, 0x61, 0x6c, 0x74, 0x79, 0x22, 0x2c, 0x20, 0x73, 0x74, 0x65, 0x70, - 0x3a, 0x20, 0x30, 0x2e, 0x30, 0x31, 0x2c, 0x20, 0x76, 0x61, 0x6c, 0x75, - 0x65, 0x3a, 0x20, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x2e, 0x76, 0x61, - 0x6c, 0x75, 0x65, 0x2e, 0x66, 0x72, 0x65, 0x71, 0x75, 0x65, 0x6e, 0x63, - 0x79, 0x5f, 0x70, 0x65, 0x6e, 0x61, 0x6c, 0x74, 0x79, 0x20, 0x7d, 0x29, + 0x20, 0x22, 0x54, 0x79, 0x70, 0x69, 0x63, 0x61, 0x6c, 0x20, 0x50, 0x22, + 0x2c, 0x20, 0x6d, 0x61, 0x78, 0x3a, 0x20, 0x31, 0x2e, 0x30, 0x2c, 0x20, + 0x6d, 0x69, 0x6e, 0x3a, 0x20, 0x30, 0x2e, 0x30, 0x2c, 0x20, 0x6e, 0x61, + 0x6d, 0x65, 0x3a, 0x20, 0x22, 0x74, 0x79, 0x70, 0x69, 0x63, 0x61, 0x6c, + 0x5f, 0x70, 0x22, 0x2c, 0x20, 0x73, 0x74, 0x65, 0x70, 0x3a, 0x20, 0x30, + 0x2e, 0x30, 0x31, 0x2c, 0x20, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3a, 0x20, + 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, + 0x2e, 0x74, 0x79, 0x70, 0x69, 0x63, 0x61, 0x6c, 0x5f, 0x70, 0x20, 0x7d, + 0x29, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x24, 0x7b, 0x46, 0x6c, 0x6f, 0x61, 0x74, + 0x46, 0x69, 0x65, 0x6c, 0x64, 0x28, 0x7b, 0x20, 0x6c, 0x61, 0x62, 0x65, + 0x6c, 0x3a, 0x20, 0x22, 0x50, 0x72, 0x65, 0x73, 0x65, 0x6e, 0x63, 0x65, + 0x20, 0x70, 0x65, 0x6e, 0x61, 0x6c, 0x74, 0x79, 0x22, 0x2c, 0x20, 0x6d, + 0x61, 0x78, 0x3a, 0x20, 0x31, 0x2e, 0x30, 0x2c, 0x20, 0x6d, 0x69, 0x6e, + 0x3a, 0x20, 0x30, 0x2e, 0x30, 0x2c, 0x20, 0x6e, 0x61, 0x6d, 0x65, 0x3a, + 0x20, 0x22, 0x70, 0x72, 0x65, 0x73, 0x65, 0x6e, 0x63, 0x65, 0x5f, 0x70, + 0x65, 0x6e, 0x61, 0x6c, 0x74, 0x79, 0x22, 0x2c, 0x20, 0x73, 0x74, 0x65, + 0x70, 0x3a, 0x20, 0x30, 0x2e, 0x30, 0x31, 0x2c, 0x20, 0x76, 0x61, 0x6c, + 0x75, 0x65, 0x3a, 0x20, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x2e, 0x76, + 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x70, 0x72, 0x65, 0x73, 0x65, 0x6e, 0x63, + 0x65, 0x5f, 0x70, 0x65, 0x6e, 0x61, 0x6c, 0x74, 0x79, 0x20, 0x7d, 0x29, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x3c, 0x2f, 0x66, 0x69, 0x65, 0x6c, 0x64, 0x73, 0x65, 0x74, - 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x3c, 0x68, 0x72, 0x20, 0x2f, 0x3e, 0x0a, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x66, 0x69, - 0x65, 0x6c, 0x64, 0x73, 0x65, 0x74, 0x20, 0x63, 0x6c, 0x61, 0x73, 0x73, - 0x3d, 0x22, 0x74, 0x68, 0x72, 0x65, 0x65, 0x22, 0x3e, 0x0a, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x3c, 0x64, 0x69, 0x76, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x6c, - 0x61, 0x62, 0x65, 0x6c, 0x3e, 0x3c, 0x69, 0x6e, 0x70, 0x75, 0x74, 0x20, - 0x74, 0x79, 0x70, 0x65, 0x3d, 0x22, 0x72, 0x61, 0x64, 0x69, 0x6f, 0x22, - 0x20, 0x6e, 0x61, 0x6d, 0x65, 0x3d, 0x22, 0x6d, 0x69, 0x72, 0x6f, 0x73, - 0x74, 0x61, 0x74, 0x22, 0x20, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3d, 0x22, - 0x30, 0x22, 0x20, 0x63, 0x68, 0x65, 0x63, 0x6b, 0x65, 0x64, 0x3d, 0x24, - 0x7b, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x2e, 0x76, 0x61, 0x6c, 0x75, - 0x65, 0x2e, 0x6d, 0x69, 0x72, 0x6f, 0x73, 0x74, 0x61, 0x74, 0x20, 0x3d, - 0x3d, 0x20, 0x30, 0x7d, 0x20, 0x6f, 0x6e, 0x69, 0x6e, 0x70, 0x75, 0x74, - 0x3d, 0x24, 0x7b, 0x75, 0x70, 0x64, 0x61, 0x74, 0x65, 0x50, 0x61, 0x72, - 0x61, 0x6d, 0x73, 0x49, 0x6e, 0x74, 0x7d, 0x20, 0x2f, 0x3e, 0x20, 0x6e, - 0x6f, 0x20, 0x4d, 0x69, 0x72, 0x6f, 0x73, 0x74, 0x61, 0x74, 0x3c, 0x2f, - 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x24, 0x7b, 0x46, 0x6c, 0x6f, 0x61, 0x74, 0x46, + 0x69, 0x65, 0x6c, 0x64, 0x28, 0x7b, 0x20, 0x6c, 0x61, 0x62, 0x65, 0x6c, + 0x3a, 0x20, 0x22, 0x46, 0x72, 0x65, 0x71, 0x75, 0x65, 0x6e, 0x63, 0x79, + 0x20, 0x70, 0x65, 0x6e, 0x61, 0x6c, 0x74, 0x79, 0x22, 0x2c, 0x20, 0x6d, + 0x61, 0x78, 0x3a, 0x20, 0x31, 0x2e, 0x30, 0x2c, 0x20, 0x6d, 0x69, 0x6e, + 0x3a, 0x20, 0x30, 0x2e, 0x30, 0x2c, 0x20, 0x6e, 0x61, 0x6d, 0x65, 0x3a, + 0x20, 0x22, 0x66, 0x72, 0x65, 0x71, 0x75, 0x65, 0x6e, 0x63, 0x79, 0x5f, + 0x70, 0x65, 0x6e, 0x61, 0x6c, 0x74, 0x79, 0x22, 0x2c, 0x20, 0x73, 0x74, + 0x65, 0x70, 0x3a, 0x20, 0x30, 0x2e, 0x30, 0x31, 0x2c, 0x20, 0x76, 0x61, + 0x6c, 0x75, 0x65, 0x3a, 0x20, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x2e, + 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x66, 0x72, 0x65, 0x71, 0x75, 0x65, + 0x6e, 0x63, 0x79, 0x5f, 0x70, 0x65, 0x6e, 0x61, 0x6c, 0x74, 0x79, 0x20, + 0x7d, 0x29, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x3c, 0x2f, 0x66, 0x69, 0x65, 0x6c, 0x64, 0x73, + 0x65, 0x74, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x3c, 0x68, 0x72, 0x20, 0x2f, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, - 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x3e, 0x3c, 0x69, 0x6e, 0x70, 0x75, 0x74, - 0x20, 0x74, 0x79, 0x70, 0x65, 0x3d, 0x22, 0x72, 0x61, 0x64, 0x69, 0x6f, - 0x22, 0x20, 0x6e, 0x61, 0x6d, 0x65, 0x3d, 0x22, 0x6d, 0x69, 0x72, 0x6f, - 0x73, 0x74, 0x61, 0x74, 0x22, 0x20, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3d, - 0x22, 0x31, 0x22, 0x20, 0x63, 0x68, 0x65, 0x63, 0x6b, 0x65, 0x64, 0x3d, - 0x24, 0x7b, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x2e, 0x76, 0x61, 0x6c, - 0x75, 0x65, 0x2e, 0x6d, 0x69, 0x72, 0x6f, 0x73, 0x74, 0x61, 0x74, 0x20, - 0x3d, 0x3d, 0x20, 0x31, 0x7d, 0x20, 0x6f, 0x6e, 0x69, 0x6e, 0x70, 0x75, - 0x74, 0x3d, 0x24, 0x7b, 0x75, 0x70, 0x64, 0x61, 0x74, 0x65, 0x50, 0x61, - 0x72, 0x61, 0x6d, 0x73, 0x49, 0x6e, 0x74, 0x7d, 0x20, 0x2f, 0x3e, 0x20, - 0x4d, 0x69, 0x72, 0x6f, 0x73, 0x74, 0x61, 0x74, 0x20, 0x76, 0x31, 0x3c, - 0x2f, 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x66, 0x69, 0x65, 0x6c, 0x64, 0x73, 0x65, 0x74, 0x20, 0x63, 0x6c, 0x61, + 0x73, 0x73, 0x3d, 0x22, 0x74, 0x68, 0x72, 0x65, 0x65, 0x22, 0x3e, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x3c, 0x64, 0x69, 0x76, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x3e, 0x3c, 0x69, 0x6e, 0x70, 0x75, 0x74, 0x20, 0x74, 0x79, 0x70, 0x65, 0x3d, 0x22, 0x72, 0x61, 0x64, 0x69, 0x6f, 0x22, 0x20, 0x6e, 0x61, 0x6d, 0x65, 0x3d, 0x22, 0x6d, 0x69, 0x72, 0x6f, 0x73, 0x74, 0x61, 0x74, 0x22, 0x20, 0x76, 0x61, 0x6c, 0x75, 0x65, - 0x3d, 0x22, 0x32, 0x22, 0x20, 0x63, 0x68, 0x65, 0x63, 0x6b, 0x65, 0x64, + 0x3d, 0x22, 0x30, 0x22, 0x20, 0x63, 0x68, 0x65, 0x63, 0x6b, 0x65, 0x64, 0x3d, 0x24, 0x7b, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x6d, 0x69, 0x72, 0x6f, 0x73, 0x74, 0x61, 0x74, - 0x20, 0x3d, 0x3d, 0x20, 0x32, 0x7d, 0x20, 0x6f, 0x6e, 0x69, 0x6e, 0x70, + 0x20, 0x3d, 0x3d, 0x20, 0x30, 0x7d, 0x20, 0x6f, 0x6e, 0x69, 0x6e, 0x70, 0x75, 0x74, 0x3d, 0x24, 0x7b, 0x75, 0x70, 0x64, 0x61, 0x74, 0x65, 0x50, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x49, 0x6e, 0x74, 0x7d, 0x20, 0x2f, 0x3e, - 0x20, 0x4d, 0x69, 0x72, 0x6f, 0x73, 0x74, 0x61, 0x74, 0x20, 0x76, 0x32, + 0x20, 0x6e, 0x6f, 0x20, 0x4d, 0x69, 0x72, 0x6f, 0x73, 0x74, 0x61, 0x74, 0x3c, 0x2f, 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x3e, 0x0a, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, - 0x2f, 0x64, 0x69, 0x76, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x24, 0x7b, 0x46, 0x6c, - 0x6f, 0x61, 0x74, 0x46, 0x69, 0x65, 0x6c, 0x64, 0x28, 0x7b, 0x20, 0x6c, - 0x61, 0x62, 0x65, 0x6c, 0x3a, 0x20, 0x22, 0x4d, 0x69, 0x72, 0x6f, 0x73, - 0x74, 0x61, 0x74, 0x20, 0x74, 0x61, 0x75, 0x22, 0x2c, 0x20, 0x6d, 0x61, - 0x78, 0x3a, 0x20, 0x31, 0x30, 0x2e, 0x30, 0x2c, 0x20, 0x6d, 0x69, 0x6e, - 0x3a, 0x20, 0x30, 0x2e, 0x30, 0x2c, 0x20, 0x6e, 0x61, 0x6d, 0x65, 0x3a, - 0x20, 0x22, 0x6d, 0x69, 0x72, 0x6f, 0x73, 0x74, 0x61, 0x74, 0x5f, 0x74, - 0x61, 0x75, 0x22, 0x2c, 0x20, 0x73, 0x74, 0x65, 0x70, 0x3a, 0x20, 0x30, - 0x2e, 0x30, 0x31, 0x2c, 0x20, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3a, 0x20, - 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, - 0x2e, 0x6d, 0x69, 0x72, 0x6f, 0x73, 0x74, 0x61, 0x74, 0x5f, 0x74, 0x61, - 0x75, 0x20, 0x7d, 0x29, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x24, 0x7b, 0x46, 0x6c, - 0x6f, 0x61, 0x74, 0x46, 0x69, 0x65, 0x6c, 0x64, 0x28, 0x7b, 0x20, 0x6c, - 0x61, 0x62, 0x65, 0x6c, 0x3a, 0x20, 0x22, 0x4d, 0x69, 0x72, 0x6f, 0x73, - 0x74, 0x61, 0x74, 0x20, 0x65, 0x74, 0x61, 0x22, 0x2c, 0x20, 0x6d, 0x61, - 0x78, 0x3a, 0x20, 0x31, 0x2e, 0x30, 0x2c, 0x20, 0x6d, 0x69, 0x6e, 0x3a, - 0x20, 0x30, 0x2e, 0x30, 0x2c, 0x20, 0x6e, 0x61, 0x6d, 0x65, 0x3a, 0x20, - 0x22, 0x6d, 0x69, 0x72, 0x6f, 0x73, 0x74, 0x61, 0x74, 0x5f, 0x65, 0x74, - 0x61, 0x22, 0x2c, 0x20, 0x73, 0x74, 0x65, 0x70, 0x3a, 0x20, 0x30, 0x2e, - 0x30, 0x31, 0x2c, 0x20, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3a, 0x20, 0x70, - 0x61, 0x72, 0x61, 0x6d, 0x73, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2e, - 0x6d, 0x69, 0x72, 0x6f, 0x73, 0x74, 0x61, 0x74, 0x5f, 0x65, 0x74, 0x61, - 0x20, 0x7d, 0x29, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x2f, 0x66, 0x69, 0x65, 0x6c, 0x64, - 0x73, 0x65, 0x74, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x66, 0x69, 0x65, 0x6c, 0x64, 0x73, - 0x65, 0x74, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x24, 0x7b, 0x49, 0x6e, 0x74, 0x46, - 0x69, 0x65, 0x6c, 0x64, 0x28, 0x7b, 0x20, 0x6c, 0x61, 0x62, 0x65, 0x6c, - 0x3a, 0x20, 0x22, 0x53, 0x68, 0x6f, 0x77, 0x20, 0x50, 0x72, 0x6f, 0x62, - 0x61, 0x62, 0x69, 0x6c, 0x69, 0x74, 0x69, 0x65, 0x73, 0x22, 0x2c, 0x20, - 0x6d, 0x61, 0x78, 0x3a, 0x20, 0x31, 0x30, 0x2c, 0x20, 0x6d, 0x69, 0x6e, - 0x3a, 0x20, 0x30, 0x2c, 0x20, 0x6e, 0x61, 0x6d, 0x65, 0x3a, 0x20, 0x22, - 0x6e, 0x5f, 0x70, 0x72, 0x6f, 0x62, 0x73, 0x22, 0x2c, 0x20, 0x76, 0x61, - 0x6c, 0x75, 0x65, 0x3a, 0x20, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x2e, - 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x6e, 0x5f, 0x70, 0x72, 0x6f, 0x62, - 0x73, 0x20, 0x7d, 0x29, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x2f, 0x66, 0x69, 0x65, 0x6c, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x3c, 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x3e, 0x3c, 0x69, 0x6e, 0x70, + 0x75, 0x74, 0x20, 0x74, 0x79, 0x70, 0x65, 0x3d, 0x22, 0x72, 0x61, 0x64, + 0x69, 0x6f, 0x22, 0x20, 0x6e, 0x61, 0x6d, 0x65, 0x3d, 0x22, 0x6d, 0x69, + 0x72, 0x6f, 0x73, 0x74, 0x61, 0x74, 0x22, 0x20, 0x76, 0x61, 0x6c, 0x75, + 0x65, 0x3d, 0x22, 0x31, 0x22, 0x20, 0x63, 0x68, 0x65, 0x63, 0x6b, 0x65, + 0x64, 0x3d, 0x24, 0x7b, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x2e, 0x76, + 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x6d, 0x69, 0x72, 0x6f, 0x73, 0x74, 0x61, + 0x74, 0x20, 0x3d, 0x3d, 0x20, 0x31, 0x7d, 0x20, 0x6f, 0x6e, 0x69, 0x6e, + 0x70, 0x75, 0x74, 0x3d, 0x24, 0x7b, 0x75, 0x70, 0x64, 0x61, 0x74, 0x65, + 0x50, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x49, 0x6e, 0x74, 0x7d, 0x20, 0x2f, + 0x3e, 0x20, 0x4d, 0x69, 0x72, 0x6f, 0x73, 0x74, 0x61, 0x74, 0x20, 0x76, + 0x31, 0x3c, 0x2f, 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x3e, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x3c, 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x3e, 0x3c, 0x69, 0x6e, + 0x70, 0x75, 0x74, 0x20, 0x74, 0x79, 0x70, 0x65, 0x3d, 0x22, 0x72, 0x61, + 0x64, 0x69, 0x6f, 0x22, 0x20, 0x6e, 0x61, 0x6d, 0x65, 0x3d, 0x22, 0x6d, + 0x69, 0x72, 0x6f, 0x73, 0x74, 0x61, 0x74, 0x22, 0x20, 0x76, 0x61, 0x6c, + 0x75, 0x65, 0x3d, 0x22, 0x32, 0x22, 0x20, 0x63, 0x68, 0x65, 0x63, 0x6b, + 0x65, 0x64, 0x3d, 0x24, 0x7b, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x2e, + 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x6d, 0x69, 0x72, 0x6f, 0x73, 0x74, + 0x61, 0x74, 0x20, 0x3d, 0x3d, 0x20, 0x32, 0x7d, 0x20, 0x6f, 0x6e, 0x69, + 0x6e, 0x70, 0x75, 0x74, 0x3d, 0x24, 0x7b, 0x75, 0x70, 0x64, 0x61, 0x74, + 0x65, 0x50, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x49, 0x6e, 0x74, 0x7d, 0x20, + 0x2f, 0x3e, 0x20, 0x4d, 0x69, 0x72, 0x6f, 0x73, 0x74, 0x61, 0x74, 0x20, + 0x76, 0x32, 0x3c, 0x2f, 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x3e, 0x0a, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x3c, 0x2f, 0x64, 0x69, 0x76, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x24, 0x7b, + 0x46, 0x6c, 0x6f, 0x61, 0x74, 0x46, 0x69, 0x65, 0x6c, 0x64, 0x28, 0x7b, + 0x20, 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x3a, 0x20, 0x22, 0x4d, 0x69, 0x72, + 0x6f, 0x73, 0x74, 0x61, 0x74, 0x20, 0x74, 0x61, 0x75, 0x22, 0x2c, 0x20, + 0x6d, 0x61, 0x78, 0x3a, 0x20, 0x31, 0x30, 0x2e, 0x30, 0x2c, 0x20, 0x6d, + 0x69, 0x6e, 0x3a, 0x20, 0x30, 0x2e, 0x30, 0x2c, 0x20, 0x6e, 0x61, 0x6d, + 0x65, 0x3a, 0x20, 0x22, 0x6d, 0x69, 0x72, 0x6f, 0x73, 0x74, 0x61, 0x74, + 0x5f, 0x74, 0x61, 0x75, 0x22, 0x2c, 0x20, 0x73, 0x74, 0x65, 0x70, 0x3a, + 0x20, 0x30, 0x2e, 0x30, 0x31, 0x2c, 0x20, 0x76, 0x61, 0x6c, 0x75, 0x65, + 0x3a, 0x20, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x2e, 0x76, 0x61, 0x6c, + 0x75, 0x65, 0x2e, 0x6d, 0x69, 0x72, 0x6f, 0x73, 0x74, 0x61, 0x74, 0x5f, + 0x74, 0x61, 0x75, 0x20, 0x7d, 0x29, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x24, 0x7b, + 0x46, 0x6c, 0x6f, 0x61, 0x74, 0x46, 0x69, 0x65, 0x6c, 0x64, 0x28, 0x7b, + 0x20, 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x3a, 0x20, 0x22, 0x4d, 0x69, 0x72, + 0x6f, 0x73, 0x74, 0x61, 0x74, 0x20, 0x65, 0x74, 0x61, 0x22, 0x2c, 0x20, + 0x6d, 0x61, 0x78, 0x3a, 0x20, 0x31, 0x2e, 0x30, 0x2c, 0x20, 0x6d, 0x69, + 0x6e, 0x3a, 0x20, 0x30, 0x2e, 0x30, 0x2c, 0x20, 0x6e, 0x61, 0x6d, 0x65, + 0x3a, 0x20, 0x22, 0x6d, 0x69, 0x72, 0x6f, 0x73, 0x74, 0x61, 0x74, 0x5f, + 0x65, 0x74, 0x61, 0x22, 0x2c, 0x20, 0x73, 0x74, 0x65, 0x70, 0x3a, 0x20, + 0x30, 0x2e, 0x30, 0x31, 0x2c, 0x20, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3a, + 0x20, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x2e, 0x76, 0x61, 0x6c, 0x75, + 0x65, 0x2e, 0x6d, 0x69, 0x72, 0x6f, 0x73, 0x74, 0x61, 0x74, 0x5f, 0x65, + 0x74, 0x61, 0x20, 0x7d, 0x29, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x2f, 0x66, 0x69, 0x65, + 0x6c, 0x64, 0x73, 0x65, 0x74, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x66, 0x69, 0x65, 0x6c, 0x64, 0x73, 0x65, 0x74, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x3c, 0x2f, 0x64, 0x65, 0x74, 0x61, 0x69, 0x6c, - 0x73, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, - 0x2f, 0x66, 0x6f, 0x72, 0x6d, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x60, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x0a, 0x20, 0x20, - 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x70, 0x72, 0x6f, 0x62, - 0x43, 0x6f, 0x6c, 0x6f, 0x72, 0x20, 0x3d, 0x20, 0x28, 0x70, 0x29, 0x20, - 0x3d, 0x3e, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, - 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x72, 0x20, 0x3d, 0x20, 0x4d, 0x61, 0x74, - 0x68, 0x2e, 0x66, 0x6c, 0x6f, 0x6f, 0x72, 0x28, 0x31, 0x39, 0x32, 0x20, - 0x2a, 0x20, 0x28, 0x31, 0x20, 0x2d, 0x20, 0x70, 0x29, 0x29, 0x3b, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x24, 0x7b, 0x49, 0x6e, + 0x74, 0x46, 0x69, 0x65, 0x6c, 0x64, 0x28, 0x7b, 0x20, 0x6c, 0x61, 0x62, + 0x65, 0x6c, 0x3a, 0x20, 0x22, 0x53, 0x68, 0x6f, 0x77, 0x20, 0x50, 0x72, + 0x6f, 0x62, 0x61, 0x62, 0x69, 0x6c, 0x69, 0x74, 0x69, 0x65, 0x73, 0x22, + 0x2c, 0x20, 0x6d, 0x61, 0x78, 0x3a, 0x20, 0x31, 0x30, 0x2c, 0x20, 0x6d, + 0x69, 0x6e, 0x3a, 0x20, 0x30, 0x2c, 0x20, 0x6e, 0x61, 0x6d, 0x65, 0x3a, + 0x20, 0x22, 0x6e, 0x5f, 0x70, 0x72, 0x6f, 0x62, 0x73, 0x22, 0x2c, 0x20, + 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3a, 0x20, 0x70, 0x61, 0x72, 0x61, 0x6d, + 0x73, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x6e, 0x5f, 0x70, 0x72, + 0x6f, 0x62, 0x73, 0x20, 0x7d, 0x29, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x2f, 0x66, 0x69, + 0x65, 0x6c, 0x64, 0x73, 0x65, 0x74, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x66, 0x69, 0x65, + 0x6c, 0x64, 0x73, 0x65, 0x74, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x6c, 0x61, + 0x62, 0x65, 0x6c, 0x20, 0x66, 0x6f, 0x72, 0x3d, 0x22, 0x61, 0x70, 0x69, + 0x5f, 0x6b, 0x65, 0x79, 0x22, 0x3e, 0x41, 0x50, 0x49, 0x20, 0x4b, 0x65, + 0x79, 0x3c, 0x2f, 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x3e, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x3c, 0x69, 0x6e, 0x70, 0x75, 0x74, 0x20, 0x74, 0x79, 0x70, 0x65, 0x3d, + 0x22, 0x74, 0x65, 0x78, 0x74, 0x22, 0x20, 0x6e, 0x61, 0x6d, 0x65, 0x3d, + 0x22, 0x61, 0x70, 0x69, 0x5f, 0x6b, 0x65, 0x79, 0x22, 0x20, 0x76, 0x61, + 0x6c, 0x75, 0x65, 0x3d, 0x22, 0x24, 0x7b, 0x70, 0x61, 0x72, 0x61, 0x6d, + 0x73, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x61, 0x70, 0x69, 0x5f, + 0x6b, 0x65, 0x79, 0x7d, 0x22, 0x20, 0x70, 0x6c, 0x61, 0x63, 0x65, 0x68, + 0x6f, 0x6c, 0x64, 0x65, 0x72, 0x3d, 0x22, 0x45, 0x6e, 0x74, 0x65, 0x72, + 0x20, 0x41, 0x50, 0x49, 0x20, 0x6b, 0x65, 0x79, 0x22, 0x20, 0x6f, 0x6e, + 0x69, 0x6e, 0x70, 0x75, 0x74, 0x3d, 0x24, 0x7b, 0x75, 0x70, 0x64, 0x61, + 0x74, 0x65, 0x50, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x7d, 0x20, 0x2f, 0x3e, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x3c, 0x2f, 0x66, 0x69, 0x65, 0x6c, 0x64, 0x73, 0x65, 0x74, 0x3e, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, + 0x2f, 0x64, 0x65, 0x74, 0x61, 0x69, 0x6c, 0x73, 0x3e, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x2f, 0x66, 0x6f, 0x72, 0x6d, + 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x60, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x7d, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, + 0x73, 0x74, 0x20, 0x70, 0x72, 0x6f, 0x62, 0x43, 0x6f, 0x6c, 0x6f, 0x72, + 0x20, 0x3d, 0x20, 0x28, 0x70, 0x29, 0x20, 0x3d, 0x3e, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, - 0x67, 0x20, 0x3d, 0x20, 0x4d, 0x61, 0x74, 0x68, 0x2e, 0x66, 0x6c, 0x6f, - 0x6f, 0x72, 0x28, 0x31, 0x39, 0x32, 0x20, 0x2a, 0x20, 0x70, 0x29, 0x3b, - 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x72, 0x65, 0x74, 0x75, 0x72, - 0x6e, 0x20, 0x60, 0x72, 0x67, 0x62, 0x61, 0x28, 0x24, 0x7b, 0x72, 0x7d, - 0x2c, 0x24, 0x7b, 0x67, 0x7d, 0x2c, 0x30, 0x2c, 0x30, 0x2e, 0x33, 0x29, - 0x60, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x0a, 0x20, 0x20, - 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x50, 0x72, 0x6f, 0x62, - 0x61, 0x62, 0x69, 0x6c, 0x69, 0x74, 0x69, 0x65, 0x73, 0x20, 0x3d, 0x20, - 0x28, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x29, 0x20, 0x3d, 0x3e, 0x20, - 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x72, 0x65, 0x74, 0x75, - 0x72, 0x6e, 0x20, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x2e, 0x64, 0x61, - 0x74, 0x61, 0x2e, 0x6d, 0x61, 0x70, 0x28, 0x6d, 0x73, 0x67, 0x20, 0x3d, - 0x3e, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x7b, 0x20, 0x63, 0x6f, 0x6d, 0x70, - 0x6c, 0x65, 0x74, 0x69, 0x6f, 0x6e, 0x5f, 0x70, 0x72, 0x6f, 0x62, 0x61, - 0x62, 0x69, 0x6c, 0x69, 0x74, 0x69, 0x65, 0x73, 0x20, 0x7d, 0x20, 0x3d, - 0x20, 0x6d, 0x73, 0x67, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x69, 0x66, 0x20, 0x28, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x21, 0x63, 0x6f, 0x6d, 0x70, 0x6c, 0x65, - 0x74, 0x69, 0x6f, 0x6e, 0x5f, 0x70, 0x72, 0x6f, 0x62, 0x61, 0x62, 0x69, - 0x6c, 0x69, 0x74, 0x69, 0x65, 0x73, 0x20, 0x7c, 0x7c, 0x0a, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6d, 0x70, - 0x6c, 0x65, 0x74, 0x69, 0x6f, 0x6e, 0x5f, 0x70, 0x72, 0x6f, 0x62, 0x61, - 0x62, 0x69, 0x6c, 0x69, 0x74, 0x69, 0x65, 0x73, 0x2e, 0x6c, 0x65, 0x6e, - 0x67, 0x74, 0x68, 0x20, 0x3d, 0x3d, 0x3d, 0x20, 0x30, 0x0a, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x29, 0x20, 0x72, 0x65, 0x74, 0x75, - 0x72, 0x6e, 0x20, 0x6d, 0x73, 0x67, 0x2e, 0x63, 0x6f, 0x6e, 0x74, 0x65, - 0x6e, 0x74, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x69, 0x66, 0x20, 0x28, 0x63, 0x6f, 0x6d, 0x70, 0x6c, 0x65, 0x74, 0x69, - 0x6f, 0x6e, 0x5f, 0x70, 0x72, 0x6f, 0x62, 0x61, 0x62, 0x69, 0x6c, 0x69, - 0x74, 0x69, 0x65, 0x73, 0x2e, 0x6c, 0x65, 0x6e, 0x67, 0x74, 0x68, 0x20, - 0x3e, 0x20, 0x31, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x2f, 0x2f, 0x20, 0x4e, 0x6f, 0x74, 0x20, - 0x66, 0x6f, 0x72, 0x20, 0x62, 0x79, 0x74, 0x65, 0x20, 0x70, 0x61, 0x69, - 0x72, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x69, 0x66, 0x20, 0x28, 0x63, 0x6f, 0x6d, 0x70, 0x6c, 0x65, 0x74, 0x69, - 0x6f, 0x6e, 0x5f, 0x70, 0x72, 0x6f, 0x62, 0x61, 0x62, 0x69, 0x6c, 0x69, - 0x74, 0x69, 0x65, 0x73, 0x5b, 0x30, 0x5d, 0x2e, 0x63, 0x6f, 0x6e, 0x74, - 0x65, 0x6e, 0x74, 0x2e, 0x73, 0x74, 0x61, 0x72, 0x74, 0x73, 0x57, 0x69, - 0x74, 0x68, 0x28, 0x27, 0x62, 0x79, 0x74, 0x65, 0x3a, 0x20, 0x5c, 0x5c, - 0x27, 0x29, 0x29, 0x20, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x6d, - 0x73, 0x67, 0x2e, 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x0a, 0x0a, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, - 0x6e, 0x73, 0x74, 0x20, 0x73, 0x70, 0x6c, 0x69, 0x74, 0x44, 0x61, 0x74, - 0x61, 0x20, 0x3d, 0x20, 0x63, 0x6f, 0x6d, 0x70, 0x6c, 0x65, 0x74, 0x69, - 0x6f, 0x6e, 0x5f, 0x70, 0x72, 0x6f, 0x62, 0x61, 0x62, 0x69, 0x6c, 0x69, - 0x74, 0x69, 0x65, 0x73, 0x2e, 0x6d, 0x61, 0x70, 0x28, 0x70, 0x72, 0x6f, - 0x62, 0x20, 0x3d, 0x3e, 0x20, 0x28, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x74, - 0x65, 0x6e, 0x74, 0x3a, 0x20, 0x70, 0x72, 0x6f, 0x62, 0x2e, 0x63, 0x6f, - 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x2c, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6d, 0x70, 0x6c, - 0x65, 0x74, 0x69, 0x6f, 0x6e, 0x5f, 0x70, 0x72, 0x6f, 0x62, 0x61, 0x62, - 0x69, 0x6c, 0x69, 0x74, 0x69, 0x65, 0x73, 0x3a, 0x20, 0x5b, 0x70, 0x72, - 0x6f, 0x62, 0x5d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x7d, 0x29, 0x29, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x68, - 0x74, 0x6d, 0x6c, 0x60, 0x3c, 0x24, 0x7b, 0x50, 0x72, 0x6f, 0x62, 0x61, - 0x62, 0x69, 0x6c, 0x69, 0x74, 0x69, 0x65, 0x73, 0x7d, 0x20, 0x64, 0x61, - 0x74, 0x61, 0x3d, 0x24, 0x7b, 0x73, 0x70, 0x6c, 0x69, 0x74, 0x44, 0x61, - 0x74, 0x61, 0x7d, 0x20, 0x2f, 0x3e, 0x60, 0x0a, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x7b, 0x20, 0x70, - 0x72, 0x6f, 0x62, 0x73, 0x2c, 0x20, 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x6e, - 0x74, 0x20, 0x7d, 0x20, 0x3d, 0x20, 0x63, 0x6f, 0x6d, 0x70, 0x6c, 0x65, - 0x74, 0x69, 0x6f, 0x6e, 0x5f, 0x70, 0x72, 0x6f, 0x62, 0x61, 0x62, 0x69, - 0x6c, 0x69, 0x74, 0x69, 0x65, 0x73, 0x5b, 0x30, 0x5d, 0x0a, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, - 0x66, 0x6f, 0x75, 0x6e, 0x64, 0x20, 0x3d, 0x20, 0x70, 0x72, 0x6f, 0x62, - 0x73, 0x2e, 0x66, 0x69, 0x6e, 0x64, 0x28, 0x70, 0x20, 0x3d, 0x3e, 0x20, - 0x70, 0x2e, 0x74, 0x6f, 0x6b, 0x5f, 0x73, 0x74, 0x72, 0x20, 0x3d, 0x3d, - 0x3d, 0x20, 0x6d, 0x73, 0x67, 0x2e, 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x6e, - 0x74, 0x29, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, - 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x70, 0x43, 0x6f, 0x6c, 0x6f, 0x72, 0x20, - 0x3d, 0x20, 0x66, 0x6f, 0x75, 0x6e, 0x64, 0x20, 0x3f, 0x20, 0x70, 0x72, - 0x6f, 0x62, 0x43, 0x6f, 0x6c, 0x6f, 0x72, 0x28, 0x66, 0x6f, 0x75, 0x6e, - 0x64, 0x2e, 0x70, 0x72, 0x6f, 0x62, 0x29, 0x20, 0x3a, 0x20, 0x27, 0x74, - 0x72, 0x61, 0x6e, 0x73, 0x70, 0x61, 0x72, 0x65, 0x6e, 0x74, 0x27, 0x0a, - 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, - 0x73, 0x74, 0x20, 0x70, 0x6f, 0x70, 0x6f, 0x76, 0x65, 0x72, 0x43, 0x68, - 0x69, 0x6c, 0x64, 0x72, 0x65, 0x6e, 0x20, 0x3d, 0x20, 0x68, 0x74, 0x6d, - 0x6c, 0x60, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x3c, 0x64, 0x69, 0x76, 0x20, 0x63, 0x6c, 0x61, 0x73, 0x73, 0x3d, - 0x22, 0x70, 0x72, 0x6f, 0x62, 0x2d, 0x73, 0x65, 0x74, 0x22, 0x3e, 0x0a, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x24, 0x7b, 0x70, 0x72, 0x6f, 0x62, 0x73, 0x2e, 0x6d, 0x61, 0x70, 0x28, - 0x28, 0x70, 0x2c, 0x20, 0x69, 0x6e, 0x64, 0x65, 0x78, 0x29, 0x20, 0x3d, - 0x3e, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x68, 0x74, 0x6d, - 0x6c, 0x60, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x64, 0x69, 0x76, 0x0a, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x6b, 0x65, 0x79, 0x3d, 0x24, 0x7b, - 0x69, 0x6e, 0x64, 0x65, 0x78, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x74, 0x69, 0x74, 0x6c, 0x65, 0x3d, 0x24, 0x7b, 0x60, 0x70, 0x72, - 0x6f, 0x62, 0x3a, 0x20, 0x24, 0x7b, 0x70, 0x2e, 0x70, 0x72, 0x6f, 0x62, - 0x7d, 0x60, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x73, 0x74, - 0x79, 0x6c, 0x65, 0x3d, 0x24, 0x7b, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x70, 0x61, - 0x64, 0x64, 0x69, 0x6e, 0x67, 0x3a, 0x20, 0x27, 0x30, 0x2e, 0x33, 0x65, - 0x6d, 0x27, 0x2c, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x62, 0x61, 0x63, 0x6b, 0x67, 0x72, - 0x6f, 0x75, 0x6e, 0x64, 0x43, 0x6f, 0x6c, 0x6f, 0x72, 0x3a, 0x20, 0x70, - 0x2e, 0x74, 0x6f, 0x6b, 0x5f, 0x73, 0x74, 0x72, 0x20, 0x3d, 0x3d, 0x3d, - 0x20, 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x20, 0x3f, 0x20, 0x70, - 0x72, 0x6f, 0x62, 0x43, 0x6f, 0x6c, 0x6f, 0x72, 0x28, 0x70, 0x2e, 0x70, - 0x72, 0x6f, 0x62, 0x29, 0x20, 0x3a, 0x20, 0x27, 0x74, 0x72, 0x61, 0x6e, - 0x73, 0x70, 0x61, 0x72, 0x65, 0x6e, 0x74, 0x27, 0x0a, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x7d, 0x0a, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x3c, 0x73, 0x70, 0x61, 0x6e, 0x3e, 0x24, 0x7b, 0x70, 0x2e, 0x74, 0x6f, - 0x6b, 0x5f, 0x73, 0x74, 0x72, 0x7d, 0x3a, 0x20, 0x3c, 0x2f, 0x73, 0x70, - 0x61, 0x6e, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x73, - 0x70, 0x61, 0x6e, 0x3e, 0x24, 0x7b, 0x4d, 0x61, 0x74, 0x68, 0x2e, 0x66, - 0x6c, 0x6f, 0x6f, 0x72, 0x28, 0x70, 0x2e, 0x70, 0x72, 0x6f, 0x62, 0x20, - 0x2a, 0x20, 0x31, 0x30, 0x30, 0x29, 0x7d, 0x25, 0x3c, 0x2f, 0x73, 0x70, - 0x61, 0x6e, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x2f, 0x64, 0x69, - 0x76, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x60, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x7d, 0x29, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x2f, 0x64, 0x69, 0x76, 0x3e, 0x0a, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x60, 0x0a, 0x0a, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x72, 0x65, 0x74, 0x75, 0x72, - 0x6e, 0x20, 0x68, 0x74, 0x6d, 0x6c, 0x60, 0x0a, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x24, 0x7b, 0x50, 0x6f, 0x70, - 0x6f, 0x76, 0x65, 0x72, 0x7d, 0x20, 0x73, 0x74, 0x79, 0x6c, 0x65, 0x3d, - 0x24, 0x7b, 0x7b, 0x20, 0x62, 0x61, 0x63, 0x6b, 0x67, 0x72, 0x6f, 0x75, - 0x6e, 0x64, 0x43, 0x6f, 0x6c, 0x6f, 0x72, 0x3a, 0x20, 0x70, 0x43, 0x6f, - 0x6c, 0x6f, 0x72, 0x20, 0x7d, 0x7d, 0x20, 0x70, 0x6f, 0x70, 0x6f, 0x76, - 0x65, 0x72, 0x43, 0x68, 0x69, 0x6c, 0x64, 0x72, 0x65, 0x6e, 0x3d, 0x24, - 0x7b, 0x70, 0x6f, 0x70, 0x6f, 0x76, 0x65, 0x72, 0x43, 0x68, 0x69, 0x6c, - 0x64, 0x72, 0x65, 0x6e, 0x7d, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x24, 0x7b, 0x6d, 0x73, 0x67, - 0x2e, 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x2e, 0x6d, 0x61, 0x74, - 0x63, 0x68, 0x28, 0x2f, 0x5c, 0x6e, 0x2f, 0x67, 0x69, 0x6d, 0x29, 0x20, - 0x3f, 0x20, 0x68, 0x74, 0x6d, 0x6c, 0x60, 0x3c, 0x62, 0x72, 0x20, 0x2f, - 0x3e, 0x60, 0x20, 0x3a, 0x20, 0x6d, 0x73, 0x67, 0x2e, 0x63, 0x6f, 0x6e, - 0x74, 0x65, 0x6e, 0x74, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x3c, 0x2f, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x60, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x7d, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x0a, 0x20, - 0x20, 0x20, 0x20, 0x2f, 0x2f, 0x20, 0x70, 0x6f, 0x6f, 0x72, 0x20, 0x6d, - 0x61, 0x6e, 0x73, 0x20, 0x6d, 0x61, 0x72, 0x6b, 0x64, 0x6f, 0x77, 0x6e, - 0x20, 0x72, 0x65, 0x70, 0x6c, 0x61, 0x63, 0x65, 0x6d, 0x65, 0x6e, 0x74, - 0x0a, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x4d, - 0x61, 0x72, 0x6b, 0x64, 0x6f, 0x77, 0x6e, 0x69, 0x73, 0x68, 0x20, 0x3d, - 0x20, 0x28, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x29, 0x20, 0x3d, 0x3e, - 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, - 0x73, 0x74, 0x20, 0x6d, 0x64, 0x20, 0x3d, 0x20, 0x70, 0x61, 0x72, 0x61, - 0x6d, 0x73, 0x2e, 0x74, 0x65, 0x78, 0x74, 0x0a, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x2e, 0x72, 0x65, 0x70, 0x6c, 0x61, 0x63, 0x65, - 0x28, 0x2f, 0x26, 0x2f, 0x67, 0x2c, 0x20, 0x27, 0x26, 0x61, 0x6d, 0x70, - 0x3b, 0x27, 0x29, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x2e, 0x72, 0x65, 0x70, 0x6c, 0x61, 0x63, 0x65, 0x28, 0x2f, 0x3c, 0x2f, - 0x67, 0x2c, 0x20, 0x27, 0x26, 0x6c, 0x74, 0x3b, 0x27, 0x29, 0x0a, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x2e, 0x72, 0x65, 0x70, 0x6c, - 0x61, 0x63, 0x65, 0x28, 0x2f, 0x3e, 0x2f, 0x67, 0x2c, 0x20, 0x27, 0x26, - 0x67, 0x74, 0x3b, 0x27, 0x29, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x2e, 0x72, 0x65, 0x70, 0x6c, 0x61, 0x63, 0x65, 0x28, 0x2f, - 0x5e, 0x23, 0x7b, 0x31, 0x2c, 0x36, 0x7d, 0x20, 0x28, 0x2e, 0x2a, 0x29, - 0x24, 0x2f, 0x67, 0x69, 0x6d, 0x2c, 0x20, 0x27, 0x3c, 0x68, 0x33, 0x3e, - 0x24, 0x31, 0x3c, 0x2f, 0x68, 0x33, 0x3e, 0x27, 0x29, 0x0a, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x2e, 0x72, 0x65, 0x70, 0x6c, 0x61, - 0x63, 0x65, 0x28, 0x2f, 0x5c, 0x2a, 0x5c, 0x2a, 0x28, 0x2e, 0x2a, 0x3f, - 0x29, 0x5c, 0x2a, 0x5c, 0x2a, 0x2f, 0x67, 0x2c, 0x20, 0x27, 0x3c, 0x73, - 0x74, 0x72, 0x6f, 0x6e, 0x67, 0x3e, 0x24, 0x31, 0x3c, 0x2f, 0x73, 0x74, - 0x72, 0x6f, 0x6e, 0x67, 0x3e, 0x27, 0x29, 0x0a, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x2e, 0x72, 0x65, 0x70, 0x6c, 0x61, 0x63, 0x65, - 0x28, 0x2f, 0x5f, 0x5f, 0x28, 0x2e, 0x2a, 0x3f, 0x29, 0x5f, 0x5f, 0x2f, - 0x67, 0x2c, 0x20, 0x27, 0x3c, 0x73, 0x74, 0x72, 0x6f, 0x6e, 0x67, 0x3e, - 0x24, 0x31, 0x3c, 0x2f, 0x73, 0x74, 0x72, 0x6f, 0x6e, 0x67, 0x3e, 0x27, - 0x29, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x2e, 0x72, - 0x65, 0x70, 0x6c, 0x61, 0x63, 0x65, 0x28, 0x2f, 0x5c, 0x2a, 0x28, 0x2e, - 0x2a, 0x3f, 0x29, 0x5c, 0x2a, 0x2f, 0x67, 0x2c, 0x20, 0x27, 0x3c, 0x65, - 0x6d, 0x3e, 0x24, 0x31, 0x3c, 0x2f, 0x65, 0x6d, 0x3e, 0x27, 0x29, 0x0a, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x2e, 0x72, 0x65, 0x70, - 0x6c, 0x61, 0x63, 0x65, 0x28, 0x2f, 0x5f, 0x28, 0x2e, 0x2a, 0x3f, 0x29, - 0x5f, 0x2f, 0x67, 0x2c, 0x20, 0x27, 0x3c, 0x65, 0x6d, 0x3e, 0x24, 0x31, - 0x3c, 0x2f, 0x65, 0x6d, 0x3e, 0x27, 0x29, 0x0a, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x2e, 0x72, 0x65, 0x70, 0x6c, 0x61, 0x63, 0x65, - 0x28, 0x2f, 0x60, 0x60, 0x60, 0x2e, 0x2a, 0x3f, 0x5c, 0x6e, 0x28, 0x5b, - 0x5c, 0x73, 0x5c, 0x53, 0x5d, 0x2a, 0x3f, 0x29, 0x60, 0x60, 0x60, 0x2f, - 0x67, 0x2c, 0x20, 0x27, 0x3c, 0x70, 0x72, 0x65, 0x3e, 0x3c, 0x63, 0x6f, - 0x64, 0x65, 0x3e, 0x24, 0x31, 0x3c, 0x2f, 0x63, 0x6f, 0x64, 0x65, 0x3e, - 0x3c, 0x2f, 0x70, 0x72, 0x65, 0x3e, 0x27, 0x29, 0x0a, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x2e, 0x72, 0x65, 0x70, 0x6c, 0x61, 0x63, - 0x65, 0x28, 0x2f, 0x60, 0x28, 0x2e, 0x2a, 0x3f, 0x29, 0x60, 0x2f, 0x67, - 0x2c, 0x20, 0x27, 0x3c, 0x63, 0x6f, 0x64, 0x65, 0x3e, 0x24, 0x31, 0x3c, - 0x2f, 0x63, 0x6f, 0x64, 0x65, 0x3e, 0x27, 0x29, 0x0a, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x2e, 0x72, 0x65, 0x70, 0x6c, 0x61, 0x63, - 0x65, 0x28, 0x2f, 0x5c, 0x6e, 0x2f, 0x67, 0x69, 0x6d, 0x2c, 0x20, 0x27, - 0x3c, 0x62, 0x72, 0x20, 0x2f, 0x3e, 0x27, 0x29, 0x3b, 0x0a, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x68, - 0x74, 0x6d, 0x6c, 0x60, 0x3c, 0x73, 0x70, 0x61, 0x6e, 0x20, 0x64, 0x61, - 0x6e, 0x67, 0x65, 0x72, 0x6f, 0x75, 0x73, 0x6c, 0x79, 0x53, 0x65, 0x74, - 0x49, 0x6e, 0x6e, 0x65, 0x72, 0x48, 0x54, 0x4d, 0x4c, 0x3d, 0x24, 0x7b, - 0x7b, 0x20, 0x5f, 0x5f, 0x68, 0x74, 0x6d, 0x6c, 0x3a, 0x20, 0x6d, 0x64, - 0x20, 0x7d, 0x7d, 0x20, 0x2f, 0x3e, 0x60, 0x3b, 0x0a, 0x20, 0x20, 0x20, - 0x20, 0x7d, 0x3b, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, - 0x73, 0x74, 0x20, 0x4d, 0x6f, 0x64, 0x65, 0x6c, 0x47, 0x65, 0x6e, 0x65, - 0x72, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x49, 0x6e, 0x66, 0x6f, 0x20, 0x3d, - 0x20, 0x28, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x29, 0x20, 0x3d, 0x3e, - 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x69, 0x66, 0x20, - 0x28, 0x21, 0x6c, 0x6c, 0x61, 0x6d, 0x61, 0x53, 0x74, 0x61, 0x74, 0x73, - 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, - 0x20, 0x68, 0x74, 0x6d, 0x6c, 0x60, 0x3c, 0x73, 0x70, 0x61, 0x6e, 0x2f, - 0x3e, 0x60, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, - 0x68, 0x74, 0x6d, 0x6c, 0x60, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x3c, 0x73, 0x70, 0x61, 0x6e, 0x3e, 0x0a, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x24, 0x7b, 0x6c, 0x6c, 0x61, - 0x6d, 0x61, 0x53, 0x74, 0x61, 0x74, 0x73, 0x2e, 0x76, 0x61, 0x6c, 0x75, - 0x65, 0x2e, 0x70, 0x72, 0x65, 0x64, 0x69, 0x63, 0x74, 0x65, 0x64, 0x5f, - 0x70, 0x65, 0x72, 0x5f, 0x74, 0x6f, 0x6b, 0x65, 0x6e, 0x5f, 0x6d, 0x73, - 0x2e, 0x74, 0x6f, 0x46, 0x69, 0x78, 0x65, 0x64, 0x28, 0x29, 0x7d, 0x6d, - 0x73, 0x20, 0x70, 0x65, 0x72, 0x20, 0x74, 0x6f, 0x6b, 0x65, 0x6e, 0x2c, - 0x20, 0x24, 0x7b, 0x6c, 0x6c, 0x61, 0x6d, 0x61, 0x53, 0x74, 0x61, 0x74, - 0x73, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x70, 0x72, 0x65, 0x64, - 0x69, 0x63, 0x74, 0x65, 0x64, 0x5f, 0x70, 0x65, 0x72, 0x5f, 0x73, 0x65, - 0x63, 0x6f, 0x6e, 0x64, 0x2e, 0x74, 0x6f, 0x46, 0x69, 0x78, 0x65, 0x64, - 0x28, 0x32, 0x29, 0x7d, 0x20, 0x74, 0x6f, 0x6b, 0x65, 0x6e, 0x73, 0x20, - 0x70, 0x65, 0x72, 0x20, 0x73, 0x65, 0x63, 0x6f, 0x6e, 0x64, 0x0a, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x2f, 0x73, 0x70, 0x61, - 0x6e, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x60, 0x0a, 0x20, - 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x2f, 0x2f, - 0x20, 0x73, 0x69, 0x6d, 0x70, 0x6c, 0x65, 0x20, 0x70, 0x6f, 0x70, 0x6f, - 0x76, 0x65, 0x72, 0x20, 0x69, 0x6d, 0x70, 0x6c, 0x0a, 0x20, 0x20, 0x20, - 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x50, 0x6f, 0x70, 0x6f, 0x76, - 0x65, 0x72, 0x20, 0x3d, 0x20, 0x28, 0x70, 0x72, 0x6f, 0x70, 0x73, 0x29, - 0x20, 0x3d, 0x3e, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x69, 0x73, 0x4f, 0x70, 0x65, 0x6e, - 0x20, 0x3d, 0x20, 0x75, 0x73, 0x65, 0x53, 0x69, 0x67, 0x6e, 0x61, 0x6c, - 0x28, 0x66, 0x61, 0x6c, 0x73, 0x65, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x70, 0x6f, 0x73, - 0x69, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x3d, 0x20, 0x75, 0x73, 0x65, 0x53, - 0x69, 0x67, 0x6e, 0x61, 0x6c, 0x28, 0x7b, 0x20, 0x74, 0x6f, 0x70, 0x3a, - 0x20, 0x27, 0x30, 0x70, 0x78, 0x27, 0x2c, 0x20, 0x6c, 0x65, 0x66, 0x74, - 0x3a, 0x20, 0x27, 0x30, 0x70, 0x78, 0x27, 0x20, 0x7d, 0x29, 0x3b, 0x0a, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, - 0x62, 0x75, 0x74, 0x74, 0x6f, 0x6e, 0x52, 0x65, 0x66, 0x20, 0x3d, 0x20, - 0x75, 0x73, 0x65, 0x52, 0x65, 0x66, 0x28, 0x6e, 0x75, 0x6c, 0x6c, 0x29, - 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, - 0x74, 0x20, 0x70, 0x6f, 0x70, 0x6f, 0x76, 0x65, 0x72, 0x52, 0x65, 0x66, - 0x20, 0x3d, 0x20, 0x75, 0x73, 0x65, 0x52, 0x65, 0x66, 0x28, 0x6e, 0x75, - 0x6c, 0x6c, 0x29, 0x3b, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x74, 0x6f, 0x67, 0x67, 0x6c, 0x65, - 0x50, 0x6f, 0x70, 0x6f, 0x76, 0x65, 0x72, 0x20, 0x3d, 0x20, 0x28, 0x29, - 0x20, 0x3d, 0x3e, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x69, 0x66, 0x20, 0x28, 0x62, 0x75, 0x74, 0x74, 0x6f, 0x6e, - 0x52, 0x65, 0x66, 0x2e, 0x63, 0x75, 0x72, 0x72, 0x65, 0x6e, 0x74, 0x29, - 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x72, 0x65, 0x63, 0x74, 0x20, - 0x3d, 0x20, 0x62, 0x75, 0x74, 0x74, 0x6f, 0x6e, 0x52, 0x65, 0x66, 0x2e, - 0x63, 0x75, 0x72, 0x72, 0x65, 0x6e, 0x74, 0x2e, 0x67, 0x65, 0x74, 0x42, - 0x6f, 0x75, 0x6e, 0x64, 0x69, 0x6e, 0x67, 0x43, 0x6c, 0x69, 0x65, 0x6e, - 0x74, 0x52, 0x65, 0x63, 0x74, 0x28, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x70, 0x6f, 0x73, 0x69, 0x74, - 0x69, 0x6f, 0x6e, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x20, 0x3d, 0x20, + 0x72, 0x20, 0x3d, 0x20, 0x4d, 0x61, 0x74, 0x68, 0x2e, 0x66, 0x6c, 0x6f, + 0x6f, 0x72, 0x28, 0x31, 0x39, 0x32, 0x20, 0x2a, 0x20, 0x28, 0x31, 0x20, + 0x2d, 0x20, 0x70, 0x29, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x67, 0x20, 0x3d, 0x20, 0x4d, + 0x61, 0x74, 0x68, 0x2e, 0x66, 0x6c, 0x6f, 0x6f, 0x72, 0x28, 0x31, 0x39, + 0x32, 0x20, 0x2a, 0x20, 0x70, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x60, 0x72, 0x67, + 0x62, 0x61, 0x28, 0x24, 0x7b, 0x72, 0x7d, 0x2c, 0x24, 0x7b, 0x67, 0x7d, + 0x2c, 0x30, 0x2c, 0x30, 0x2e, 0x33, 0x29, 0x60, 0x3b, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x7d, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, + 0x73, 0x74, 0x20, 0x50, 0x72, 0x6f, 0x62, 0x61, 0x62, 0x69, 0x6c, 0x69, + 0x74, 0x69, 0x65, 0x73, 0x20, 0x3d, 0x20, 0x28, 0x70, 0x61, 0x72, 0x61, + 0x6d, 0x73, 0x29, 0x20, 0x3d, 0x3e, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x70, 0x61, + 0x72, 0x61, 0x6d, 0x73, 0x2e, 0x64, 0x61, 0x74, 0x61, 0x2e, 0x6d, 0x61, + 0x70, 0x28, 0x6d, 0x73, 0x67, 0x20, 0x3d, 0x3e, 0x20, 0x7b, 0x0a, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, + 0x20, 0x7b, 0x20, 0x63, 0x6f, 0x6d, 0x70, 0x6c, 0x65, 0x74, 0x69, 0x6f, + 0x6e, 0x5f, 0x70, 0x72, 0x6f, 0x62, 0x61, 0x62, 0x69, 0x6c, 0x69, 0x74, + 0x69, 0x65, 0x73, 0x20, 0x7d, 0x20, 0x3d, 0x20, 0x6d, 0x73, 0x67, 0x3b, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x69, 0x66, 0x20, + 0x28, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x21, 0x63, 0x6f, 0x6d, 0x70, 0x6c, 0x65, 0x74, 0x69, 0x6f, 0x6e, 0x5f, + 0x70, 0x72, 0x6f, 0x62, 0x61, 0x62, 0x69, 0x6c, 0x69, 0x74, 0x69, 0x65, + 0x73, 0x20, 0x7c, 0x7c, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6d, 0x70, 0x6c, 0x65, 0x74, 0x69, 0x6f, + 0x6e, 0x5f, 0x70, 0x72, 0x6f, 0x62, 0x61, 0x62, 0x69, 0x6c, 0x69, 0x74, + 0x69, 0x65, 0x73, 0x2e, 0x6c, 0x65, 0x6e, 0x67, 0x74, 0x68, 0x20, 0x3d, + 0x3d, 0x3d, 0x20, 0x30, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x29, 0x20, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x6d, 0x73, + 0x67, 0x2e, 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x0a, 0x0a, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x69, 0x66, 0x20, 0x28, 0x63, + 0x6f, 0x6d, 0x70, 0x6c, 0x65, 0x74, 0x69, 0x6f, 0x6e, 0x5f, 0x70, 0x72, + 0x6f, 0x62, 0x61, 0x62, 0x69, 0x6c, 0x69, 0x74, 0x69, 0x65, 0x73, 0x2e, + 0x6c, 0x65, 0x6e, 0x67, 0x74, 0x68, 0x20, 0x3e, 0x20, 0x31, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x74, 0x6f, 0x70, 0x3a, 0x20, 0x60, 0x24, 0x7b, 0x72, 0x65, - 0x63, 0x74, 0x2e, 0x62, 0x6f, 0x74, 0x74, 0x6f, 0x6d, 0x20, 0x2b, 0x20, - 0x77, 0x69, 0x6e, 0x64, 0x6f, 0x77, 0x2e, 0x73, 0x63, 0x72, 0x6f, 0x6c, - 0x6c, 0x59, 0x7d, 0x70, 0x78, 0x60, 0x2c, 0x0a, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x6c, 0x65, 0x66, 0x74, - 0x3a, 0x20, 0x60, 0x24, 0x7b, 0x72, 0x65, 0x63, 0x74, 0x2e, 0x6c, 0x65, - 0x66, 0x74, 0x20, 0x2b, 0x20, 0x77, 0x69, 0x6e, 0x64, 0x6f, 0x77, 0x2e, - 0x73, 0x63, 0x72, 0x6f, 0x6c, 0x6c, 0x58, 0x7d, 0x70, 0x78, 0x60, 0x2c, - 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, - 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x69, 0x73, 0x4f, 0x70, - 0x65, 0x6e, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x20, 0x3d, 0x20, 0x21, - 0x69, 0x73, 0x4f, 0x70, 0x65, 0x6e, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, - 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x3b, 0x0a, 0x0a, + 0x2f, 0x2f, 0x20, 0x4e, 0x6f, 0x74, 0x20, 0x66, 0x6f, 0x72, 0x20, 0x62, + 0x79, 0x74, 0x65, 0x20, 0x70, 0x61, 0x69, 0x72, 0x0a, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x69, 0x66, 0x20, 0x28, 0x63, + 0x6f, 0x6d, 0x70, 0x6c, 0x65, 0x74, 0x69, 0x6f, 0x6e, 0x5f, 0x70, 0x72, + 0x6f, 0x62, 0x61, 0x62, 0x69, 0x6c, 0x69, 0x74, 0x69, 0x65, 0x73, 0x5b, + 0x30, 0x5d, 0x2e, 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x2e, 0x73, + 0x74, 0x61, 0x72, 0x74, 0x73, 0x57, 0x69, 0x74, 0x68, 0x28, 0x27, 0x62, + 0x79, 0x74, 0x65, 0x3a, 0x20, 0x5c, 0x5c, 0x27, 0x29, 0x29, 0x20, 0x72, + 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x6d, 0x73, 0x67, 0x2e, 0x63, 0x6f, + 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x73, + 0x70, 0x6c, 0x69, 0x74, 0x44, 0x61, 0x74, 0x61, 0x20, 0x3d, 0x20, 0x63, + 0x6f, 0x6d, 0x70, 0x6c, 0x65, 0x74, 0x69, 0x6f, 0x6e, 0x5f, 0x70, 0x72, + 0x6f, 0x62, 0x61, 0x62, 0x69, 0x6c, 0x69, 0x74, 0x69, 0x65, 0x73, 0x2e, + 0x6d, 0x61, 0x70, 0x28, 0x70, 0x72, 0x6f, 0x62, 0x20, 0x3d, 0x3e, 0x20, + 0x28, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x3a, 0x20, + 0x70, 0x72, 0x6f, 0x62, 0x2e, 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, + 0x2c, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x63, 0x6f, 0x6d, 0x70, 0x6c, 0x65, 0x74, 0x69, 0x6f, 0x6e, + 0x5f, 0x70, 0x72, 0x6f, 0x62, 0x61, 0x62, 0x69, 0x6c, 0x69, 0x74, 0x69, + 0x65, 0x73, 0x3a, 0x20, 0x5b, 0x70, 0x72, 0x6f, 0x62, 0x5d, 0x0a, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x29, 0x29, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x72, + 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x68, 0x74, 0x6d, 0x6c, 0x60, 0x3c, + 0x24, 0x7b, 0x50, 0x72, 0x6f, 0x62, 0x61, 0x62, 0x69, 0x6c, 0x69, 0x74, + 0x69, 0x65, 0x73, 0x7d, 0x20, 0x64, 0x61, 0x74, 0x61, 0x3d, 0x24, 0x7b, + 0x73, 0x70, 0x6c, 0x69, 0x74, 0x44, 0x61, 0x74, 0x61, 0x7d, 0x20, 0x2f, + 0x3e, 0x60, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, + 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, + 0x6e, 0x73, 0x74, 0x20, 0x7b, 0x20, 0x70, 0x72, 0x6f, 0x62, 0x73, 0x2c, + 0x20, 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x20, 0x7d, 0x20, 0x3d, + 0x20, 0x63, 0x6f, 0x6d, 0x70, 0x6c, 0x65, 0x74, 0x69, 0x6f, 0x6e, 0x5f, + 0x70, 0x72, 0x6f, 0x62, 0x61, 0x62, 0x69, 0x6c, 0x69, 0x74, 0x69, 0x65, + 0x73, 0x5b, 0x30, 0x5d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x66, 0x6f, 0x75, 0x6e, 0x64, + 0x20, 0x3d, 0x20, 0x70, 0x72, 0x6f, 0x62, 0x73, 0x2e, 0x66, 0x69, 0x6e, + 0x64, 0x28, 0x70, 0x20, 0x3d, 0x3e, 0x20, 0x70, 0x2e, 0x74, 0x6f, 0x6b, + 0x5f, 0x73, 0x74, 0x72, 0x20, 0x3d, 0x3d, 0x3d, 0x20, 0x6d, 0x73, 0x67, + 0x2e, 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x29, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, - 0x68, 0x61, 0x6e, 0x64, 0x6c, 0x65, 0x43, 0x6c, 0x69, 0x63, 0x6b, 0x4f, - 0x75, 0x74, 0x73, 0x69, 0x64, 0x65, 0x20, 0x3d, 0x20, 0x28, 0x65, 0x76, - 0x65, 0x6e, 0x74, 0x29, 0x20, 0x3d, 0x3e, 0x20, 0x7b, 0x0a, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x69, 0x66, 0x20, 0x28, 0x70, 0x6f, - 0x70, 0x6f, 0x76, 0x65, 0x72, 0x52, 0x65, 0x66, 0x2e, 0x63, 0x75, 0x72, - 0x72, 0x65, 0x6e, 0x74, 0x20, 0x26, 0x26, 0x20, 0x21, 0x70, 0x6f, 0x70, - 0x6f, 0x76, 0x65, 0x72, 0x52, 0x65, 0x66, 0x2e, 0x63, 0x75, 0x72, 0x72, - 0x65, 0x6e, 0x74, 0x2e, 0x63, 0x6f, 0x6e, 0x74, 0x61, 0x69, 0x6e, 0x73, - 0x28, 0x65, 0x76, 0x65, 0x6e, 0x74, 0x2e, 0x74, 0x61, 0x72, 0x67, 0x65, - 0x74, 0x29, 0x20, 0x26, 0x26, 0x20, 0x21, 0x62, 0x75, 0x74, 0x74, 0x6f, - 0x6e, 0x52, 0x65, 0x66, 0x2e, 0x63, 0x75, 0x72, 0x72, 0x65, 0x6e, 0x74, - 0x2e, 0x63, 0x6f, 0x6e, 0x74, 0x61, 0x69, 0x6e, 0x73, 0x28, 0x65, 0x76, - 0x65, 0x6e, 0x74, 0x2e, 0x74, 0x61, 0x72, 0x67, 0x65, 0x74, 0x29, 0x29, - 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x69, 0x73, 0x4f, 0x70, 0x65, 0x6e, 0x2e, 0x76, 0x61, 0x6c, 0x75, - 0x65, 0x20, 0x3d, 0x20, 0x66, 0x61, 0x6c, 0x73, 0x65, 0x3b, 0x0a, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x7d, 0x3b, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x75, 0x73, 0x65, 0x45, 0x66, 0x66, 0x65, 0x63, 0x74, 0x28, 0x28, - 0x29, 0x20, 0x3d, 0x3e, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x64, 0x6f, 0x63, 0x75, 0x6d, 0x65, 0x6e, 0x74, 0x2e, - 0x61, 0x64, 0x64, 0x45, 0x76, 0x65, 0x6e, 0x74, 0x4c, 0x69, 0x73, 0x74, - 0x65, 0x6e, 0x65, 0x72, 0x28, 0x27, 0x6d, 0x6f, 0x75, 0x73, 0x65, 0x64, - 0x6f, 0x77, 0x6e, 0x27, 0x2c, 0x20, 0x68, 0x61, 0x6e, 0x64, 0x6c, 0x65, - 0x43, 0x6c, 0x69, 0x63, 0x6b, 0x4f, 0x75, 0x74, 0x73, 0x69, 0x64, 0x65, - 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x72, - 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x28, 0x29, 0x20, 0x3d, 0x3e, 0x20, - 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x64, 0x6f, 0x63, 0x75, 0x6d, 0x65, 0x6e, 0x74, 0x2e, 0x72, 0x65, 0x6d, - 0x6f, 0x76, 0x65, 0x45, 0x76, 0x65, 0x6e, 0x74, 0x4c, 0x69, 0x73, 0x74, - 0x65, 0x6e, 0x65, 0x72, 0x28, 0x27, 0x6d, 0x6f, 0x75, 0x73, 0x65, 0x64, - 0x6f, 0x77, 0x6e, 0x27, 0x2c, 0x20, 0x68, 0x61, 0x6e, 0x64, 0x6c, 0x65, - 0x43, 0x6c, 0x69, 0x63, 0x6b, 0x4f, 0x75, 0x74, 0x73, 0x69, 0x64, 0x65, - 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, - 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x2c, 0x20, 0x5b, - 0x5d, 0x29, 0x3b, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x72, - 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x68, 0x74, 0x6d, 0x6c, 0x60, 0x0a, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x73, 0x70, 0x61, - 0x6e, 0x20, 0x73, 0x74, 0x79, 0x6c, 0x65, 0x3d, 0x24, 0x7b, 0x70, 0x72, - 0x6f, 0x70, 0x73, 0x2e, 0x73, 0x74, 0x79, 0x6c, 0x65, 0x7d, 0x20, 0x72, - 0x65, 0x66, 0x3d, 0x24, 0x7b, 0x62, 0x75, 0x74, 0x74, 0x6f, 0x6e, 0x52, - 0x65, 0x66, 0x7d, 0x20, 0x6f, 0x6e, 0x43, 0x6c, 0x69, 0x63, 0x6b, 0x3d, - 0x24, 0x7b, 0x74, 0x6f, 0x67, 0x67, 0x6c, 0x65, 0x50, 0x6f, 0x70, 0x6f, - 0x76, 0x65, 0x72, 0x7d, 0x3e, 0x24, 0x7b, 0x70, 0x72, 0x6f, 0x70, 0x73, - 0x2e, 0x63, 0x68, 0x69, 0x6c, 0x64, 0x72, 0x65, 0x6e, 0x7d, 0x3c, 0x2f, - 0x73, 0x70, 0x61, 0x6e, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x24, 0x7b, 0x69, 0x73, 0x4f, 0x70, 0x65, 0x6e, 0x2e, 0x76, - 0x61, 0x6c, 0x75, 0x65, 0x20, 0x26, 0x26, 0x20, 0x68, 0x74, 0x6d, 0x6c, - 0x60, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x3c, 0x24, 0x7b, 0x50, 0x6f, 0x72, 0x74, 0x61, 0x6c, 0x7d, 0x20, 0x69, - 0x6e, 0x74, 0x6f, 0x3d, 0x22, 0x23, 0x70, 0x6f, 0x72, 0x74, 0x61, 0x6c, - 0x22, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x3c, 0x64, 0x69, 0x76, 0x0a, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x72, 0x65, - 0x66, 0x3d, 0x24, 0x7b, 0x70, 0x6f, 0x70, 0x6f, 0x76, 0x65, 0x72, 0x52, - 0x65, 0x66, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6c, 0x61, 0x73, 0x73, 0x3d, - 0x22, 0x70, 0x6f, 0x70, 0x6f, 0x76, 0x65, 0x72, 0x2d, 0x63, 0x6f, 0x6e, - 0x74, 0x65, 0x6e, 0x74, 0x22, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x73, 0x74, 0x79, 0x6c, - 0x65, 0x3d, 0x24, 0x7b, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x74, 0x6f, 0x70, 0x3a, 0x20, 0x70, - 0x6f, 0x73, 0x69, 0x74, 0x69, 0x6f, 0x6e, 0x2e, 0x76, 0x61, 0x6c, 0x75, - 0x65, 0x2e, 0x74, 0x6f, 0x70, 0x2c, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x6c, 0x65, 0x66, 0x74, 0x3a, - 0x20, 0x70, 0x6f, 0x73, 0x69, 0x74, 0x69, 0x6f, 0x6e, 0x2e, 0x76, 0x61, - 0x6c, 0x75, 0x65, 0x2e, 0x6c, 0x65, 0x66, 0x74, 0x2c, 0x0a, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x7d, 0x0a, 0x20, + 0x70, 0x43, 0x6f, 0x6c, 0x6f, 0x72, 0x20, 0x3d, 0x20, 0x66, 0x6f, 0x75, + 0x6e, 0x64, 0x20, 0x3f, 0x20, 0x70, 0x72, 0x6f, 0x62, 0x43, 0x6f, 0x6c, + 0x6f, 0x72, 0x28, 0x66, 0x6f, 0x75, 0x6e, 0x64, 0x2e, 0x70, 0x72, 0x6f, + 0x62, 0x29, 0x20, 0x3a, 0x20, 0x27, 0x74, 0x72, 0x61, 0x6e, 0x73, 0x70, + 0x61, 0x72, 0x65, 0x6e, 0x74, 0x27, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x70, 0x6f, + 0x70, 0x6f, 0x76, 0x65, 0x72, 0x43, 0x68, 0x69, 0x6c, 0x64, 0x72, 0x65, + 0x6e, 0x20, 0x3d, 0x20, 0x68, 0x74, 0x6d, 0x6c, 0x60, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x64, 0x69, 0x76, + 0x20, 0x63, 0x6c, 0x61, 0x73, 0x73, 0x3d, 0x22, 0x70, 0x72, 0x6f, 0x62, + 0x2d, 0x73, 0x65, 0x74, 0x22, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x24, 0x7b, 0x70, 0x72, 0x6f, + 0x62, 0x73, 0x2e, 0x6d, 0x61, 0x70, 0x28, 0x28, 0x70, 0x2c, 0x20, 0x69, + 0x6e, 0x64, 0x65, 0x78, 0x29, 0x20, 0x3d, 0x3e, 0x20, 0x7b, 0x0a, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x72, 0x65, 0x74, + 0x75, 0x72, 0x6e, 0x20, 0x68, 0x74, 0x6d, 0x6c, 0x60, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x3c, 0x64, 0x69, 0x76, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x6b, 0x65, 0x79, 0x3d, 0x24, 0x7b, 0x69, 0x6e, 0x64, 0x65, 0x78, + 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x74, 0x69, 0x74, 0x6c, + 0x65, 0x3d, 0x24, 0x7b, 0x60, 0x70, 0x72, 0x6f, 0x62, 0x3a, 0x20, 0x24, + 0x7b, 0x70, 0x2e, 0x70, 0x72, 0x6f, 0x62, 0x7d, 0x60, 0x7d, 0x0a, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x73, 0x74, 0x79, 0x6c, 0x65, 0x3d, 0x24, + 0x7b, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x70, 0x61, 0x64, 0x64, 0x69, 0x6e, 0x67, + 0x3a, 0x20, 0x27, 0x30, 0x2e, 0x33, 0x65, 0x6d, 0x27, 0x2c, 0x0a, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x62, 0x61, 0x63, 0x6b, 0x67, 0x72, 0x6f, 0x75, 0x6e, 0x64, 0x43, + 0x6f, 0x6c, 0x6f, 0x72, 0x3a, 0x20, 0x70, 0x2e, 0x74, 0x6f, 0x6b, 0x5f, + 0x73, 0x74, 0x72, 0x20, 0x3d, 0x3d, 0x3d, 0x20, 0x63, 0x6f, 0x6e, 0x74, + 0x65, 0x6e, 0x74, 0x20, 0x3f, 0x20, 0x70, 0x72, 0x6f, 0x62, 0x43, 0x6f, + 0x6c, 0x6f, 0x72, 0x28, 0x70, 0x2e, 0x70, 0x72, 0x6f, 0x62, 0x29, 0x20, + 0x3a, 0x20, 0x27, 0x74, 0x72, 0x61, 0x6e, 0x73, 0x70, 0x61, 0x72, 0x65, + 0x6e, 0x74, 0x27, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x7d, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x24, 0x7b, 0x70, 0x72, 0x6f, 0x70, 0x73, 0x2e, 0x70, - 0x6f, 0x70, 0x6f, 0x76, 0x65, 0x72, 0x43, 0x68, 0x69, 0x6c, 0x64, 0x72, - 0x65, 0x6e, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x3c, 0x2f, 0x64, 0x69, 0x76, 0x3e, 0x0a, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x2f, 0x24, - 0x7b, 0x50, 0x6f, 0x72, 0x74, 0x61, 0x6c, 0x7d, 0x3e, 0x0a, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x60, 0x7d, 0x0a, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x60, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x3b, - 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x2f, 0x2f, 0x20, 0x53, 0x6f, 0x75, - 0x72, 0x63, 0x65, 0x3a, 0x20, 0x70, 0x72, 0x65, 0x61, 0x63, 0x74, 0x2d, - 0x70, 0x6f, 0x72, 0x74, 0x61, 0x6c, 0x20, 0x28, 0x68, 0x74, 0x74, 0x70, - 0x73, 0x3a, 0x2f, 0x2f, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, - 0x6f, 0x6d, 0x2f, 0x64, 0x65, 0x76, 0x65, 0x6c, 0x6f, 0x70, 0x69, 0x74, - 0x2f, 0x70, 0x72, 0x65, 0x61, 0x63, 0x74, 0x2d, 0x70, 0x6f, 0x72, 0x74, - 0x61, 0x6c, 0x2f, 0x62, 0x6c, 0x6f, 0x62, 0x2f, 0x6d, 0x61, 0x73, 0x74, - 0x65, 0x72, 0x2f, 0x73, 0x72, 0x63, 0x2f, 0x70, 0x72, 0x65, 0x61, 0x63, - 0x74, 0x2d, 0x70, 0x6f, 0x72, 0x74, 0x61, 0x6c, 0x2e, 0x6a, 0x73, 0x29, - 0x0a, 0x20, 0x20, 0x20, 0x20, 0x2f, 0x2a, 0x2a, 0x20, 0x52, 0x65, 0x64, - 0x69, 0x72, 0x65, 0x63, 0x74, 0x20, 0x72, 0x65, 0x6e, 0x64, 0x65, 0x72, - 0x69, 0x6e, 0x67, 0x20, 0x6f, 0x66, 0x20, 0x64, 0x65, 0x73, 0x63, 0x65, - 0x6e, 0x64, 0x61, 0x6e, 0x74, 0x73, 0x20, 0x69, 0x6e, 0x74, 0x6f, 0x20, - 0x74, 0x68, 0x65, 0x20, 0x67, 0x69, 0x76, 0x65, 0x6e, 0x20, 0x43, 0x53, - 0x53, 0x20, 0x73, 0x65, 0x6c, 0x65, 0x63, 0x74, 0x6f, 0x72, 0x20, 0x2a, - 0x2f, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6c, 0x61, 0x73, 0x73, 0x20, - 0x50, 0x6f, 0x72, 0x74, 0x61, 0x6c, 0x20, 0x65, 0x78, 0x74, 0x65, 0x6e, - 0x64, 0x73, 0x20, 0x43, 0x6f, 0x6d, 0x70, 0x6f, 0x6e, 0x65, 0x6e, 0x74, - 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6d, - 0x70, 0x6f, 0x6e, 0x65, 0x6e, 0x74, 0x44, 0x69, 0x64, 0x55, 0x70, 0x64, - 0x61, 0x74, 0x65, 0x28, 0x70, 0x72, 0x6f, 0x70, 0x73, 0x29, 0x20, 0x7b, - 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x66, 0x6f, 0x72, - 0x20, 0x28, 0x6c, 0x65, 0x74, 0x20, 0x69, 0x20, 0x69, 0x6e, 0x20, 0x70, - 0x72, 0x6f, 0x70, 0x73, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x69, 0x66, 0x20, 0x28, 0x70, 0x72, - 0x6f, 0x70, 0x73, 0x5b, 0x69, 0x5d, 0x20, 0x21, 0x3d, 0x3d, 0x20, 0x74, - 0x68, 0x69, 0x73, 0x2e, 0x70, 0x72, 0x6f, 0x70, 0x73, 0x5b, 0x69, 0x5d, - 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x73, - 0x65, 0x74, 0x54, 0x69, 0x6d, 0x65, 0x6f, 0x75, 0x74, 0x28, 0x74, 0x68, - 0x69, 0x73, 0x2e, 0x72, 0x65, 0x6e, 0x64, 0x65, 0x72, 0x4c, 0x61, 0x79, - 0x65, 0x72, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x73, 0x70, 0x61, 0x6e, + 0x3e, 0x24, 0x7b, 0x70, 0x2e, 0x74, 0x6f, 0x6b, 0x5f, 0x73, 0x74, 0x72, + 0x7d, 0x3a, 0x20, 0x3c, 0x2f, 0x73, 0x70, 0x61, 0x6e, 0x3e, 0x0a, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x73, 0x70, 0x61, 0x6e, 0x3e, 0x24, + 0x7b, 0x4d, 0x61, 0x74, 0x68, 0x2e, 0x66, 0x6c, 0x6f, 0x6f, 0x72, 0x28, + 0x70, 0x2e, 0x70, 0x72, 0x6f, 0x62, 0x20, 0x2a, 0x20, 0x31, 0x30, 0x30, + 0x29, 0x7d, 0x25, 0x3c, 0x2f, 0x73, 0x70, 0x61, 0x6e, 0x3e, 0x0a, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x3c, 0x2f, 0x64, 0x69, 0x76, 0x3e, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x60, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x29, + 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x3c, 0x2f, 0x64, 0x69, 0x76, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x60, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x68, 0x74, 0x6d, + 0x6c, 0x60, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x3c, 0x24, 0x7b, 0x50, 0x6f, 0x70, 0x6f, 0x76, 0x65, 0x72, 0x7d, + 0x20, 0x73, 0x74, 0x79, 0x6c, 0x65, 0x3d, 0x24, 0x7b, 0x7b, 0x20, 0x62, + 0x61, 0x63, 0x6b, 0x67, 0x72, 0x6f, 0x75, 0x6e, 0x64, 0x43, 0x6f, 0x6c, + 0x6f, 0x72, 0x3a, 0x20, 0x70, 0x43, 0x6f, 0x6c, 0x6f, 0x72, 0x20, 0x7d, + 0x7d, 0x20, 0x70, 0x6f, 0x70, 0x6f, 0x76, 0x65, 0x72, 0x43, 0x68, 0x69, + 0x6c, 0x64, 0x72, 0x65, 0x6e, 0x3d, 0x24, 0x7b, 0x70, 0x6f, 0x70, 0x6f, + 0x76, 0x65, 0x72, 0x43, 0x68, 0x69, 0x6c, 0x64, 0x72, 0x65, 0x6e, 0x7d, + 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x24, 0x7b, 0x6d, 0x73, 0x67, 0x2e, 0x63, 0x6f, 0x6e, 0x74, + 0x65, 0x6e, 0x74, 0x2e, 0x6d, 0x61, 0x74, 0x63, 0x68, 0x28, 0x2f, 0x5c, + 0x6e, 0x2f, 0x67, 0x69, 0x6d, 0x29, 0x20, 0x3f, 0x20, 0x68, 0x74, 0x6d, + 0x6c, 0x60, 0x3c, 0x62, 0x72, 0x20, 0x2f, 0x3e, 0x60, 0x20, 0x3a, 0x20, + 0x6d, 0x73, 0x67, 0x2e, 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x7d, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, + 0x2f, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x60, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x29, 0x3b, 0x0a, 0x20, + 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x2f, 0x2f, + 0x20, 0x70, 0x6f, 0x6f, 0x72, 0x20, 0x6d, 0x61, 0x6e, 0x73, 0x20, 0x6d, + 0x61, 0x72, 0x6b, 0x64, 0x6f, 0x77, 0x6e, 0x20, 0x72, 0x65, 0x70, 0x6c, + 0x61, 0x63, 0x65, 0x6d, 0x65, 0x6e, 0x74, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x4d, 0x61, 0x72, 0x6b, 0x64, 0x6f, + 0x77, 0x6e, 0x69, 0x73, 0x68, 0x20, 0x3d, 0x20, 0x28, 0x70, 0x61, 0x72, + 0x61, 0x6d, 0x73, 0x29, 0x20, 0x3d, 0x3e, 0x20, 0x7b, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x6d, 0x64, + 0x20, 0x3d, 0x20, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x2e, 0x74, 0x65, + 0x78, 0x74, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x2e, + 0x72, 0x65, 0x70, 0x6c, 0x61, 0x63, 0x65, 0x28, 0x2f, 0x26, 0x2f, 0x67, + 0x2c, 0x20, 0x27, 0x26, 0x61, 0x6d, 0x70, 0x3b, 0x27, 0x29, 0x0a, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x2e, 0x72, 0x65, 0x70, 0x6c, + 0x61, 0x63, 0x65, 0x28, 0x2f, 0x3c, 0x2f, 0x67, 0x2c, 0x20, 0x27, 0x26, + 0x6c, 0x74, 0x3b, 0x27, 0x29, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x2e, 0x72, 0x65, 0x70, 0x6c, 0x61, 0x63, 0x65, 0x28, 0x2f, + 0x3e, 0x2f, 0x67, 0x2c, 0x20, 0x27, 0x26, 0x67, 0x74, 0x3b, 0x27, 0x29, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x2e, 0x72, 0x65, + 0x70, 0x6c, 0x61, 0x63, 0x65, 0x28, 0x2f, 0x5e, 0x23, 0x7b, 0x31, 0x2c, + 0x36, 0x7d, 0x20, 0x28, 0x2e, 0x2a, 0x29, 0x24, 0x2f, 0x67, 0x69, 0x6d, + 0x2c, 0x20, 0x27, 0x3c, 0x68, 0x33, 0x3e, 0x24, 0x31, 0x3c, 0x2f, 0x68, + 0x33, 0x3e, 0x27, 0x29, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x2e, 0x72, 0x65, 0x70, 0x6c, 0x61, 0x63, 0x65, 0x28, 0x2f, 0x5c, + 0x2a, 0x5c, 0x2a, 0x28, 0x2e, 0x2a, 0x3f, 0x29, 0x5c, 0x2a, 0x5c, 0x2a, + 0x2f, 0x67, 0x2c, 0x20, 0x27, 0x3c, 0x73, 0x74, 0x72, 0x6f, 0x6e, 0x67, + 0x3e, 0x24, 0x31, 0x3c, 0x2f, 0x73, 0x74, 0x72, 0x6f, 0x6e, 0x67, 0x3e, + 0x27, 0x29, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x2e, + 0x72, 0x65, 0x70, 0x6c, 0x61, 0x63, 0x65, 0x28, 0x2f, 0x5f, 0x5f, 0x28, + 0x2e, 0x2a, 0x3f, 0x29, 0x5f, 0x5f, 0x2f, 0x67, 0x2c, 0x20, 0x27, 0x3c, + 0x73, 0x74, 0x72, 0x6f, 0x6e, 0x67, 0x3e, 0x24, 0x31, 0x3c, 0x2f, 0x73, + 0x74, 0x72, 0x6f, 0x6e, 0x67, 0x3e, 0x27, 0x29, 0x0a, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x2e, 0x72, 0x65, 0x70, 0x6c, 0x61, 0x63, + 0x65, 0x28, 0x2f, 0x5c, 0x2a, 0x28, 0x2e, 0x2a, 0x3f, 0x29, 0x5c, 0x2a, + 0x2f, 0x67, 0x2c, 0x20, 0x27, 0x3c, 0x65, 0x6d, 0x3e, 0x24, 0x31, 0x3c, + 0x2f, 0x65, 0x6d, 0x3e, 0x27, 0x29, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x2e, 0x72, 0x65, 0x70, 0x6c, 0x61, 0x63, 0x65, 0x28, + 0x2f, 0x5f, 0x28, 0x2e, 0x2a, 0x3f, 0x29, 0x5f, 0x2f, 0x67, 0x2c, 0x20, + 0x27, 0x3c, 0x65, 0x6d, 0x3e, 0x24, 0x31, 0x3c, 0x2f, 0x65, 0x6d, 0x3e, + 0x27, 0x29, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x2e, + 0x72, 0x65, 0x70, 0x6c, 0x61, 0x63, 0x65, 0x28, 0x2f, 0x60, 0x60, 0x60, + 0x2e, 0x2a, 0x3f, 0x5c, 0x6e, 0x28, 0x5b, 0x5c, 0x73, 0x5c, 0x53, 0x5d, + 0x2a, 0x3f, 0x29, 0x60, 0x60, 0x60, 0x2f, 0x67, 0x2c, 0x20, 0x27, 0x3c, + 0x70, 0x72, 0x65, 0x3e, 0x3c, 0x63, 0x6f, 0x64, 0x65, 0x3e, 0x24, 0x31, + 0x3c, 0x2f, 0x63, 0x6f, 0x64, 0x65, 0x3e, 0x3c, 0x2f, 0x70, 0x72, 0x65, + 0x3e, 0x27, 0x29, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x2e, 0x72, 0x65, 0x70, 0x6c, 0x61, 0x63, 0x65, 0x28, 0x2f, 0x60, 0x28, + 0x2e, 0x2a, 0x3f, 0x29, 0x60, 0x2f, 0x67, 0x2c, 0x20, 0x27, 0x3c, 0x63, + 0x6f, 0x64, 0x65, 0x3e, 0x24, 0x31, 0x3c, 0x2f, 0x63, 0x6f, 0x64, 0x65, + 0x3e, 0x27, 0x29, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x2e, 0x72, 0x65, 0x70, 0x6c, 0x61, 0x63, 0x65, 0x28, 0x2f, 0x5c, 0x6e, + 0x2f, 0x67, 0x69, 0x6d, 0x2c, 0x20, 0x27, 0x3c, 0x62, 0x72, 0x20, 0x2f, + 0x3e, 0x27, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x72, + 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x68, 0x74, 0x6d, 0x6c, 0x60, 0x3c, + 0x73, 0x70, 0x61, 0x6e, 0x20, 0x64, 0x61, 0x6e, 0x67, 0x65, 0x72, 0x6f, + 0x75, 0x73, 0x6c, 0x79, 0x53, 0x65, 0x74, 0x49, 0x6e, 0x6e, 0x65, 0x72, + 0x48, 0x54, 0x4d, 0x4c, 0x3d, 0x24, 0x7b, 0x7b, 0x20, 0x5f, 0x5f, 0x68, + 0x74, 0x6d, 0x6c, 0x3a, 0x20, 0x6d, 0x64, 0x20, 0x7d, 0x7d, 0x20, 0x2f, + 0x3e, 0x60, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x3b, 0x0a, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x4d, 0x6f, + 0x64, 0x65, 0x6c, 0x47, 0x65, 0x6e, 0x65, 0x72, 0x61, 0x74, 0x69, 0x6f, + 0x6e, 0x49, 0x6e, 0x66, 0x6f, 0x20, 0x3d, 0x20, 0x28, 0x70, 0x61, 0x72, + 0x61, 0x6d, 0x73, 0x29, 0x20, 0x3d, 0x3e, 0x20, 0x7b, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x69, 0x66, 0x20, 0x28, 0x21, 0x6c, 0x6c, 0x61, + 0x6d, 0x61, 0x53, 0x74, 0x61, 0x74, 0x73, 0x2e, 0x76, 0x61, 0x6c, 0x75, + 0x65, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x68, 0x74, 0x6d, 0x6c, + 0x60, 0x3c, 0x73, 0x70, 0x61, 0x6e, 0x2f, 0x3e, 0x60, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x68, 0x74, 0x6d, 0x6c, 0x60, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x73, 0x70, + 0x61, 0x6e, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x24, 0x7b, 0x6c, 0x6c, 0x61, 0x6d, 0x61, 0x53, 0x74, 0x61, + 0x74, 0x73, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x74, 0x6f, 0x6b, + 0x65, 0x6e, 0x73, 0x5f, 0x70, 0x72, 0x65, 0x64, 0x69, 0x63, 0x74, 0x65, + 0x64, 0x7d, 0x20, 0x70, 0x72, 0x65, 0x64, 0x69, 0x63, 0x74, 0x65, 0x64, + 0x2c, 0x20, 0x24, 0x7b, 0x6c, 0x6c, 0x61, 0x6d, 0x61, 0x53, 0x74, 0x61, + 0x74, 0x73, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x74, 0x6f, 0x6b, + 0x65, 0x6e, 0x73, 0x5f, 0x63, 0x61, 0x63, 0x68, 0x65, 0x64, 0x7d, 0x20, + 0x63, 0x61, 0x63, 0x68, 0x65, 0x64, 0x2c, 0x20, 0x24, 0x7b, 0x6c, 0x6c, + 0x61, 0x6d, 0x61, 0x53, 0x74, 0x61, 0x74, 0x73, 0x2e, 0x76, 0x61, 0x6c, + 0x75, 0x65, 0x2e, 0x74, 0x69, 0x6d, 0x69, 0x6e, 0x67, 0x73, 0x2e, 0x70, + 0x72, 0x65, 0x64, 0x69, 0x63, 0x74, 0x65, 0x64, 0x5f, 0x70, 0x65, 0x72, + 0x5f, 0x74, 0x6f, 0x6b, 0x65, 0x6e, 0x5f, 0x6d, 0x73, 0x2e, 0x74, 0x6f, + 0x46, 0x69, 0x78, 0x65, 0x64, 0x28, 0x29, 0x7d, 0x6d, 0x73, 0x20, 0x70, + 0x65, 0x72, 0x20, 0x74, 0x6f, 0x6b, 0x65, 0x6e, 0x2c, 0x20, 0x24, 0x7b, + 0x6c, 0x6c, 0x61, 0x6d, 0x61, 0x53, 0x74, 0x61, 0x74, 0x73, 0x2e, 0x76, + 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x74, 0x69, 0x6d, 0x69, 0x6e, 0x67, 0x73, + 0x2e, 0x70, 0x72, 0x65, 0x64, 0x69, 0x63, 0x74, 0x65, 0x64, 0x5f, 0x70, + 0x65, 0x72, 0x5f, 0x73, 0x65, 0x63, 0x6f, 0x6e, 0x64, 0x2e, 0x74, 0x6f, + 0x46, 0x69, 0x78, 0x65, 0x64, 0x28, 0x32, 0x29, 0x7d, 0x20, 0x74, 0x6f, + 0x6b, 0x65, 0x6e, 0x73, 0x20, 0x70, 0x65, 0x72, 0x20, 0x73, 0x65, 0x63, + 0x6f, 0x6e, 0x64, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x3c, 0x2f, 0x73, 0x70, 0x61, 0x6e, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x60, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x0a, 0x20, + 0x20, 0x20, 0x20, 0x2f, 0x2f, 0x20, 0x73, 0x69, 0x6d, 0x70, 0x6c, 0x65, + 0x20, 0x70, 0x6f, 0x70, 0x6f, 0x76, 0x65, 0x72, 0x20, 0x69, 0x6d, 0x70, + 0x6c, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, + 0x50, 0x6f, 0x70, 0x6f, 0x76, 0x65, 0x72, 0x20, 0x3d, 0x20, 0x28, 0x70, + 0x72, 0x6f, 0x70, 0x73, 0x29, 0x20, 0x3d, 0x3e, 0x20, 0x7b, 0x0a, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x69, + 0x73, 0x4f, 0x70, 0x65, 0x6e, 0x20, 0x3d, 0x20, 0x75, 0x73, 0x65, 0x53, + 0x69, 0x67, 0x6e, 0x61, 0x6c, 0x28, 0x66, 0x61, 0x6c, 0x73, 0x65, 0x29, + 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, + 0x74, 0x20, 0x70, 0x6f, 0x73, 0x69, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x3d, + 0x20, 0x75, 0x73, 0x65, 0x53, 0x69, 0x67, 0x6e, 0x61, 0x6c, 0x28, 0x7b, + 0x20, 0x74, 0x6f, 0x70, 0x3a, 0x20, 0x27, 0x30, 0x70, 0x78, 0x27, 0x2c, + 0x20, 0x6c, 0x65, 0x66, 0x74, 0x3a, 0x20, 0x27, 0x30, 0x70, 0x78, 0x27, + 0x20, 0x7d, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, + 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x62, 0x75, 0x74, 0x74, 0x6f, 0x6e, 0x52, + 0x65, 0x66, 0x20, 0x3d, 0x20, 0x75, 0x73, 0x65, 0x52, 0x65, 0x66, 0x28, + 0x6e, 0x75, 0x6c, 0x6c, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x70, 0x6f, 0x70, 0x6f, 0x76, + 0x65, 0x72, 0x52, 0x65, 0x66, 0x20, 0x3d, 0x20, 0x75, 0x73, 0x65, 0x52, + 0x65, 0x66, 0x28, 0x6e, 0x75, 0x6c, 0x6c, 0x29, 0x3b, 0x0a, 0x0a, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x74, + 0x6f, 0x67, 0x67, 0x6c, 0x65, 0x50, 0x6f, 0x70, 0x6f, 0x76, 0x65, 0x72, + 0x20, 0x3d, 0x20, 0x28, 0x29, 0x20, 0x3d, 0x3e, 0x20, 0x7b, 0x0a, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x69, 0x66, 0x20, 0x28, 0x62, + 0x75, 0x74, 0x74, 0x6f, 0x6e, 0x52, 0x65, 0x66, 0x2e, 0x63, 0x75, 0x72, + 0x72, 0x65, 0x6e, 0x74, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, + 0x72, 0x65, 0x63, 0x74, 0x20, 0x3d, 0x20, 0x62, 0x75, 0x74, 0x74, 0x6f, + 0x6e, 0x52, 0x65, 0x66, 0x2e, 0x63, 0x75, 0x72, 0x72, 0x65, 0x6e, 0x74, + 0x2e, 0x67, 0x65, 0x74, 0x42, 0x6f, 0x75, 0x6e, 0x64, 0x69, 0x6e, 0x67, + 0x43, 0x6c, 0x69, 0x65, 0x6e, 0x74, 0x52, 0x65, 0x63, 0x74, 0x28, 0x29, + 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x70, 0x6f, 0x73, 0x69, 0x74, 0x69, 0x6f, 0x6e, 0x2e, 0x76, 0x61, 0x6c, + 0x75, 0x65, 0x20, 0x3d, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x74, 0x6f, 0x70, 0x3a, 0x20, + 0x60, 0x24, 0x7b, 0x72, 0x65, 0x63, 0x74, 0x2e, 0x62, 0x6f, 0x74, 0x74, + 0x6f, 0x6d, 0x20, 0x2b, 0x20, 0x77, 0x69, 0x6e, 0x64, 0x6f, 0x77, 0x2e, + 0x73, 0x63, 0x72, 0x6f, 0x6c, 0x6c, 0x59, 0x7d, 0x70, 0x78, 0x60, 0x2c, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x6c, 0x65, 0x66, 0x74, 0x3a, 0x20, 0x60, 0x24, 0x7b, 0x72, 0x65, + 0x63, 0x74, 0x2e, 0x6c, 0x65, 0x66, 0x74, 0x20, 0x2b, 0x20, 0x77, 0x69, + 0x6e, 0x64, 0x6f, 0x77, 0x2e, 0x73, 0x63, 0x72, 0x6f, 0x6c, 0x6c, 0x58, + 0x7d, 0x70, 0x78, 0x60, 0x2c, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x7d, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x0a, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6d, 0x70, 0x6f, 0x6e, - 0x65, 0x6e, 0x74, 0x44, 0x69, 0x64, 0x4d, 0x6f, 0x75, 0x6e, 0x74, 0x28, - 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x74, 0x68, 0x69, 0x73, 0x2e, 0x69, 0x73, 0x4d, 0x6f, 0x75, 0x6e, 0x74, - 0x65, 0x64, 0x20, 0x3d, 0x20, 0x74, 0x72, 0x75, 0x65, 0x3b, 0x0a, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x74, 0x68, 0x69, 0x73, 0x2e, - 0x72, 0x65, 0x6e, 0x64, 0x65, 0x72, 0x4c, 0x61, 0x79, 0x65, 0x72, 0x20, - 0x3d, 0x20, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x72, 0x65, 0x6e, 0x64, 0x65, - 0x72, 0x4c, 0x61, 0x79, 0x65, 0x72, 0x2e, 0x62, 0x69, 0x6e, 0x64, 0x28, - 0x74, 0x68, 0x69, 0x73, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x72, 0x65, 0x6e, 0x64, - 0x65, 0x72, 0x4c, 0x61, 0x79, 0x65, 0x72, 0x28, 0x29, 0x3b, 0x0a, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x63, 0x6f, 0x6d, 0x70, 0x6f, 0x6e, 0x65, 0x6e, 0x74, 0x57, - 0x69, 0x6c, 0x6c, 0x55, 0x6e, 0x6d, 0x6f, 0x75, 0x6e, 0x74, 0x28, 0x29, - 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x74, - 0x68, 0x69, 0x73, 0x2e, 0x72, 0x65, 0x6e, 0x64, 0x65, 0x72, 0x4c, 0x61, - 0x79, 0x65, 0x72, 0x28, 0x66, 0x61, 0x6c, 0x73, 0x65, 0x29, 0x3b, 0x0a, + 0x20, 0x69, 0x73, 0x4f, 0x70, 0x65, 0x6e, 0x2e, 0x76, 0x61, 0x6c, 0x75, + 0x65, 0x20, 0x3d, 0x20, 0x21, 0x69, 0x73, 0x4f, 0x70, 0x65, 0x6e, 0x2e, + 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x7d, 0x3b, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, + 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x68, 0x61, 0x6e, 0x64, 0x6c, 0x65, 0x43, + 0x6c, 0x69, 0x63, 0x6b, 0x4f, 0x75, 0x74, 0x73, 0x69, 0x64, 0x65, 0x20, + 0x3d, 0x20, 0x28, 0x65, 0x76, 0x65, 0x6e, 0x74, 0x29, 0x20, 0x3d, 0x3e, + 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x69, + 0x66, 0x20, 0x28, 0x70, 0x6f, 0x70, 0x6f, 0x76, 0x65, 0x72, 0x52, 0x65, + 0x66, 0x2e, 0x63, 0x75, 0x72, 0x72, 0x65, 0x6e, 0x74, 0x20, 0x26, 0x26, + 0x20, 0x21, 0x70, 0x6f, 0x70, 0x6f, 0x76, 0x65, 0x72, 0x52, 0x65, 0x66, + 0x2e, 0x63, 0x75, 0x72, 0x72, 0x65, 0x6e, 0x74, 0x2e, 0x63, 0x6f, 0x6e, + 0x74, 0x61, 0x69, 0x6e, 0x73, 0x28, 0x65, 0x76, 0x65, 0x6e, 0x74, 0x2e, + 0x74, 0x61, 0x72, 0x67, 0x65, 0x74, 0x29, 0x20, 0x26, 0x26, 0x20, 0x21, + 0x62, 0x75, 0x74, 0x74, 0x6f, 0x6e, 0x52, 0x65, 0x66, 0x2e, 0x63, 0x75, + 0x72, 0x72, 0x65, 0x6e, 0x74, 0x2e, 0x63, 0x6f, 0x6e, 0x74, 0x61, 0x69, + 0x6e, 0x73, 0x28, 0x65, 0x76, 0x65, 0x6e, 0x74, 0x2e, 0x74, 0x61, 0x72, + 0x67, 0x65, 0x74, 0x29, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x69, 0x73, 0x4f, 0x70, 0x65, 0x6e, + 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x20, 0x3d, 0x20, 0x66, 0x61, 0x6c, + 0x73, 0x65, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x3b, 0x0a, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x75, 0x73, 0x65, 0x45, 0x66, 0x66, + 0x65, 0x63, 0x74, 0x28, 0x28, 0x29, 0x20, 0x3d, 0x3e, 0x20, 0x7b, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x64, 0x6f, 0x63, 0x75, + 0x6d, 0x65, 0x6e, 0x74, 0x2e, 0x61, 0x64, 0x64, 0x45, 0x76, 0x65, 0x6e, + 0x74, 0x4c, 0x69, 0x73, 0x74, 0x65, 0x6e, 0x65, 0x72, 0x28, 0x27, 0x6d, + 0x6f, 0x75, 0x73, 0x65, 0x64, 0x6f, 0x77, 0x6e, 0x27, 0x2c, 0x20, 0x68, + 0x61, 0x6e, 0x64, 0x6c, 0x65, 0x43, 0x6c, 0x69, 0x63, 0x6b, 0x4f, 0x75, + 0x74, 0x73, 0x69, 0x64, 0x65, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x28, + 0x29, 0x20, 0x3d, 0x3e, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x64, 0x6f, 0x63, 0x75, 0x6d, 0x65, 0x6e, + 0x74, 0x2e, 0x72, 0x65, 0x6d, 0x6f, 0x76, 0x65, 0x45, 0x76, 0x65, 0x6e, + 0x74, 0x4c, 0x69, 0x73, 0x74, 0x65, 0x6e, 0x65, 0x72, 0x28, 0x27, 0x6d, + 0x6f, 0x75, 0x73, 0x65, 0x64, 0x6f, 0x77, 0x6e, 0x27, 0x2c, 0x20, 0x68, + 0x61, 0x6e, 0x64, 0x6c, 0x65, 0x43, 0x6c, 0x69, 0x63, 0x6b, 0x4f, 0x75, + 0x74, 0x73, 0x69, 0x64, 0x65, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x7d, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x7d, 0x2c, 0x20, 0x5b, 0x5d, 0x29, 0x3b, 0x0a, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x68, + 0x74, 0x6d, 0x6c, 0x60, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x3c, 0x73, 0x70, 0x61, 0x6e, 0x20, 0x73, 0x74, 0x79, 0x6c, 0x65, + 0x3d, 0x24, 0x7b, 0x70, 0x72, 0x6f, 0x70, 0x73, 0x2e, 0x73, 0x74, 0x79, + 0x6c, 0x65, 0x7d, 0x20, 0x72, 0x65, 0x66, 0x3d, 0x24, 0x7b, 0x62, 0x75, + 0x74, 0x74, 0x6f, 0x6e, 0x52, 0x65, 0x66, 0x7d, 0x20, 0x6f, 0x6e, 0x43, + 0x6c, 0x69, 0x63, 0x6b, 0x3d, 0x24, 0x7b, 0x74, 0x6f, 0x67, 0x67, 0x6c, + 0x65, 0x50, 0x6f, 0x70, 0x6f, 0x76, 0x65, 0x72, 0x7d, 0x3e, 0x24, 0x7b, + 0x70, 0x72, 0x6f, 0x70, 0x73, 0x2e, 0x63, 0x68, 0x69, 0x6c, 0x64, 0x72, + 0x65, 0x6e, 0x7d, 0x3c, 0x2f, 0x73, 0x70, 0x61, 0x6e, 0x3e, 0x0a, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x24, 0x7b, 0x69, 0x73, 0x4f, + 0x70, 0x65, 0x6e, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x20, 0x26, 0x26, + 0x20, 0x68, 0x74, 0x6d, 0x6c, 0x60, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x24, 0x7b, 0x50, 0x6f, 0x72, 0x74, + 0x61, 0x6c, 0x7d, 0x20, 0x69, 0x6e, 0x74, 0x6f, 0x3d, 0x22, 0x23, 0x70, + 0x6f, 0x72, 0x74, 0x61, 0x6c, 0x22, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x64, 0x69, 0x76, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x72, 0x65, 0x66, 0x3d, 0x24, 0x7b, 0x70, 0x6f, 0x70, + 0x6f, 0x76, 0x65, 0x72, 0x52, 0x65, 0x66, 0x7d, 0x0a, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, + 0x6c, 0x61, 0x73, 0x73, 0x3d, 0x22, 0x70, 0x6f, 0x70, 0x6f, 0x76, 0x65, + 0x72, 0x2d, 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x22, 0x0a, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x73, 0x74, 0x79, 0x6c, 0x65, 0x3d, 0x24, 0x7b, 0x7b, 0x0a, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x74, + 0x6f, 0x70, 0x3a, 0x20, 0x70, 0x6f, 0x73, 0x69, 0x74, 0x69, 0x6f, 0x6e, + 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x74, 0x6f, 0x70, 0x2c, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x6c, 0x65, 0x66, 0x74, 0x3a, 0x20, 0x70, 0x6f, 0x73, 0x69, 0x74, 0x69, + 0x6f, 0x6e, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x6c, 0x65, 0x66, + 0x74, 0x2c, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x7d, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x24, 0x7b, 0x70, 0x72, + 0x6f, 0x70, 0x73, 0x2e, 0x70, 0x6f, 0x70, 0x6f, 0x76, 0x65, 0x72, 0x43, + 0x68, 0x69, 0x6c, 0x64, 0x72, 0x65, 0x6e, 0x7d, 0x0a, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x2f, 0x64, + 0x69, 0x76, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x3c, 0x2f, 0x24, 0x7b, 0x50, 0x6f, 0x72, 0x74, 0x61, 0x6c, + 0x7d, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x60, + 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x60, 0x3b, 0x0a, 0x20, + 0x20, 0x20, 0x20, 0x7d, 0x3b, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x2f, + 0x2f, 0x20, 0x53, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x3a, 0x20, 0x70, 0x72, + 0x65, 0x61, 0x63, 0x74, 0x2d, 0x70, 0x6f, 0x72, 0x74, 0x61, 0x6c, 0x20, + 0x28, 0x68, 0x74, 0x74, 0x70, 0x73, 0x3a, 0x2f, 0x2f, 0x67, 0x69, 0x74, + 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x64, 0x65, 0x76, 0x65, + 0x6c, 0x6f, 0x70, 0x69, 0x74, 0x2f, 0x70, 0x72, 0x65, 0x61, 0x63, 0x74, + 0x2d, 0x70, 0x6f, 0x72, 0x74, 0x61, 0x6c, 0x2f, 0x62, 0x6c, 0x6f, 0x62, + 0x2f, 0x6d, 0x61, 0x73, 0x74, 0x65, 0x72, 0x2f, 0x73, 0x72, 0x63, 0x2f, + 0x70, 0x72, 0x65, 0x61, 0x63, 0x74, 0x2d, 0x70, 0x6f, 0x72, 0x74, 0x61, + 0x6c, 0x2e, 0x6a, 0x73, 0x29, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x2f, 0x2a, + 0x2a, 0x20, 0x52, 0x65, 0x64, 0x69, 0x72, 0x65, 0x63, 0x74, 0x20, 0x72, + 0x65, 0x6e, 0x64, 0x65, 0x72, 0x69, 0x6e, 0x67, 0x20, 0x6f, 0x66, 0x20, + 0x64, 0x65, 0x73, 0x63, 0x65, 0x6e, 0x64, 0x61, 0x6e, 0x74, 0x73, 0x20, + 0x69, 0x6e, 0x74, 0x6f, 0x20, 0x74, 0x68, 0x65, 0x20, 0x67, 0x69, 0x76, + 0x65, 0x6e, 0x20, 0x43, 0x53, 0x53, 0x20, 0x73, 0x65, 0x6c, 0x65, 0x63, + 0x74, 0x6f, 0x72, 0x20, 0x2a, 0x2f, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x63, + 0x6c, 0x61, 0x73, 0x73, 0x20, 0x50, 0x6f, 0x72, 0x74, 0x61, 0x6c, 0x20, + 0x65, 0x78, 0x74, 0x65, 0x6e, 0x64, 0x73, 0x20, 0x43, 0x6f, 0x6d, 0x70, + 0x6f, 0x6e, 0x65, 0x6e, 0x74, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x63, 0x6f, 0x6d, 0x70, 0x6f, 0x6e, 0x65, 0x6e, 0x74, 0x44, + 0x69, 0x64, 0x55, 0x70, 0x64, 0x61, 0x74, 0x65, 0x28, 0x70, 0x72, 0x6f, + 0x70, 0x73, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x66, 0x6f, 0x72, 0x20, 0x28, 0x6c, 0x65, 0x74, 0x20, 0x69, + 0x20, 0x69, 0x6e, 0x20, 0x70, 0x72, 0x6f, 0x70, 0x73, 0x29, 0x20, 0x7b, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x69, + 0x66, 0x20, 0x28, 0x70, 0x72, 0x6f, 0x70, 0x73, 0x5b, 0x69, 0x5d, 0x20, + 0x21, 0x3d, 0x3d, 0x20, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x70, 0x72, 0x6f, + 0x70, 0x73, 0x5b, 0x69, 0x5d, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x72, 0x65, 0x74, + 0x75, 0x72, 0x6e, 0x20, 0x73, 0x65, 0x74, 0x54, 0x69, 0x6d, 0x65, 0x6f, + 0x75, 0x74, 0x28, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x72, 0x65, 0x6e, 0x64, + 0x65, 0x72, 0x4c, 0x61, 0x79, 0x65, 0x72, 0x29, 0x3b, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x7d, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, + 0x6f, 0x6d, 0x70, 0x6f, 0x6e, 0x65, 0x6e, 0x74, 0x44, 0x69, 0x64, 0x4d, + 0x6f, 0x75, 0x6e, 0x74, 0x28, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x69, 0x73, + 0x4d, 0x6f, 0x75, 0x6e, 0x74, 0x65, 0x64, 0x20, 0x3d, 0x20, 0x74, 0x72, + 0x75, 0x65, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x74, 0x68, 0x69, 0x73, 0x2e, 0x72, 0x65, 0x6e, 0x64, 0x65, 0x72, 0x4c, + 0x61, 0x79, 0x65, 0x72, 0x20, 0x3d, 0x20, 0x74, 0x68, 0x69, 0x73, 0x2e, + 0x72, 0x65, 0x6e, 0x64, 0x65, 0x72, 0x4c, 0x61, 0x79, 0x65, 0x72, 0x2e, + 0x62, 0x69, 0x6e, 0x64, 0x28, 0x74, 0x68, 0x69, 0x73, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x74, 0x68, 0x69, 0x73, - 0x2e, 0x69, 0x73, 0x4d, 0x6f, 0x75, 0x6e, 0x74, 0x65, 0x64, 0x20, 0x3d, - 0x20, 0x66, 0x61, 0x6c, 0x73, 0x65, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x69, 0x66, 0x20, 0x28, 0x74, 0x68, 0x69, 0x73, - 0x2e, 0x72, 0x65, 0x6d, 0x6f, 0x74, 0x65, 0x20, 0x26, 0x26, 0x20, 0x74, - 0x68, 0x69, 0x73, 0x2e, 0x72, 0x65, 0x6d, 0x6f, 0x74, 0x65, 0x2e, 0x70, - 0x61, 0x72, 0x65, 0x6e, 0x74, 0x4e, 0x6f, 0x64, 0x65, 0x29, 0x20, 0x74, - 0x68, 0x69, 0x73, 0x2e, 0x72, 0x65, 0x6d, 0x6f, 0x74, 0x65, 0x2e, 0x70, - 0x61, 0x72, 0x65, 0x6e, 0x74, 0x4e, 0x6f, 0x64, 0x65, 0x2e, 0x72, 0x65, - 0x6d, 0x6f, 0x76, 0x65, 0x43, 0x68, 0x69, 0x6c, 0x64, 0x28, 0x74, 0x68, + 0x2e, 0x72, 0x65, 0x6e, 0x64, 0x65, 0x72, 0x4c, 0x61, 0x79, 0x65, 0x72, + 0x28, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6d, 0x70, 0x6f, + 0x6e, 0x65, 0x6e, 0x74, 0x57, 0x69, 0x6c, 0x6c, 0x55, 0x6e, 0x6d, 0x6f, + 0x75, 0x6e, 0x74, 0x28, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x72, 0x65, 0x6e, + 0x64, 0x65, 0x72, 0x4c, 0x61, 0x79, 0x65, 0x72, 0x28, 0x66, 0x61, 0x6c, + 0x73, 0x65, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x69, 0x73, 0x4d, 0x6f, 0x75, 0x6e, + 0x74, 0x65, 0x64, 0x20, 0x3d, 0x20, 0x66, 0x61, 0x6c, 0x73, 0x65, 0x3b, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x69, 0x66, 0x20, + 0x28, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x72, 0x65, 0x6d, 0x6f, 0x74, 0x65, + 0x20, 0x26, 0x26, 0x20, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x72, 0x65, 0x6d, + 0x6f, 0x74, 0x65, 0x2e, 0x70, 0x61, 0x72, 0x65, 0x6e, 0x74, 0x4e, 0x6f, + 0x64, 0x65, 0x29, 0x20, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x72, 0x65, 0x6d, + 0x6f, 0x74, 0x65, 0x2e, 0x70, 0x61, 0x72, 0x65, 0x6e, 0x74, 0x4e, 0x6f, + 0x64, 0x65, 0x2e, 0x72, 0x65, 0x6d, 0x6f, 0x76, 0x65, 0x43, 0x68, 0x69, + 0x6c, 0x64, 0x28, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x72, 0x65, 0x6d, 0x6f, + 0x74, 0x65, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, + 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x66, 0x69, 0x6e, 0x64, + 0x4e, 0x6f, 0x64, 0x65, 0x28, 0x6e, 0x6f, 0x64, 0x65, 0x29, 0x20, 0x7b, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x72, 0x65, 0x74, + 0x75, 0x72, 0x6e, 0x20, 0x74, 0x79, 0x70, 0x65, 0x6f, 0x66, 0x20, 0x6e, + 0x6f, 0x64, 0x65, 0x20, 0x3d, 0x3d, 0x3d, 0x20, 0x27, 0x73, 0x74, 0x72, + 0x69, 0x6e, 0x67, 0x27, 0x20, 0x3f, 0x20, 0x64, 0x6f, 0x63, 0x75, 0x6d, + 0x65, 0x6e, 0x74, 0x2e, 0x71, 0x75, 0x65, 0x72, 0x79, 0x53, 0x65, 0x6c, + 0x65, 0x63, 0x74, 0x6f, 0x72, 0x28, 0x6e, 0x6f, 0x64, 0x65, 0x29, 0x20, + 0x3a, 0x20, 0x6e, 0x6f, 0x64, 0x65, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x7d, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x72, + 0x65, 0x6e, 0x64, 0x65, 0x72, 0x4c, 0x61, 0x79, 0x65, 0x72, 0x28, 0x73, + 0x68, 0x6f, 0x77, 0x20, 0x3d, 0x20, 0x74, 0x72, 0x75, 0x65, 0x29, 0x20, + 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x69, 0x66, + 0x20, 0x28, 0x21, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x69, 0x73, 0x4d, 0x6f, + 0x75, 0x6e, 0x74, 0x65, 0x64, 0x29, 0x20, 0x72, 0x65, 0x74, 0x75, 0x72, + 0x6e, 0x3b, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x2f, 0x2f, 0x20, 0x63, 0x6c, 0x65, 0x61, 0x6e, 0x20, 0x75, 0x70, 0x20, + 0x6f, 0x6c, 0x64, 0x20, 0x6e, 0x6f, 0x64, 0x65, 0x20, 0x69, 0x66, 0x20, + 0x6d, 0x6f, 0x76, 0x69, 0x6e, 0x67, 0x20, 0x62, 0x61, 0x73, 0x65, 0x73, + 0x3a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x69, 0x66, + 0x20, 0x28, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x70, 0x72, 0x6f, 0x70, 0x73, + 0x2e, 0x69, 0x6e, 0x74, 0x6f, 0x20, 0x21, 0x3d, 0x3d, 0x20, 0x74, 0x68, + 0x69, 0x73, 0x2e, 0x69, 0x6e, 0x74, 0x6f, 0x50, 0x6f, 0x69, 0x6e, 0x74, + 0x65, 0x72, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x69, 0x6e, 0x74, + 0x6f, 0x50, 0x6f, 0x69, 0x6e, 0x74, 0x65, 0x72, 0x20, 0x3d, 0x20, 0x74, + 0x68, 0x69, 0x73, 0x2e, 0x70, 0x72, 0x6f, 0x70, 0x73, 0x2e, 0x69, 0x6e, + 0x74, 0x6f, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x69, 0x66, 0x20, 0x28, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x69, + 0x6e, 0x74, 0x6f, 0x20, 0x26, 0x26, 0x20, 0x74, 0x68, 0x69, 0x73, 0x2e, + 0x72, 0x65, 0x6d, 0x6f, 0x74, 0x65, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x74, 0x68, + 0x69, 0x73, 0x2e, 0x72, 0x65, 0x6d, 0x6f, 0x74, 0x65, 0x20, 0x3d, 0x20, + 0x72, 0x65, 0x6e, 0x64, 0x65, 0x72, 0x28, 0x68, 0x74, 0x6d, 0x6c, 0x60, + 0x3c, 0x24, 0x7b, 0x50, 0x6f, 0x72, 0x74, 0x61, 0x6c, 0x50, 0x72, 0x6f, + 0x78, 0x79, 0x7d, 0x20, 0x2f, 0x3e, 0x60, 0x2c, 0x20, 0x74, 0x68, 0x69, + 0x73, 0x2e, 0x69, 0x6e, 0x74, 0x6f, 0x2c, 0x20, 0x74, 0x68, 0x69, 0x73, + 0x2e, 0x72, 0x65, 0x6d, 0x6f, 0x74, 0x65, 0x29, 0x3b, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x74, 0x68, 0x69, 0x73, + 0x2e, 0x69, 0x6e, 0x74, 0x6f, 0x20, 0x3d, 0x20, 0x74, 0x68, 0x69, 0x73, + 0x2e, 0x66, 0x69, 0x6e, 0x64, 0x4e, 0x6f, 0x64, 0x65, 0x28, 0x74, 0x68, + 0x69, 0x73, 0x2e, 0x70, 0x72, 0x6f, 0x70, 0x73, 0x2e, 0x69, 0x6e, 0x74, + 0x6f, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x7d, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x74, + 0x68, 0x69, 0x73, 0x2e, 0x72, 0x65, 0x6d, 0x6f, 0x74, 0x65, 0x20, 0x3d, + 0x20, 0x72, 0x65, 0x6e, 0x64, 0x65, 0x72, 0x28, 0x68, 0x74, 0x6d, 0x6c, + 0x60, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x3c, 0x24, 0x7b, 0x50, 0x6f, 0x72, 0x74, 0x61, 0x6c, 0x50, 0x72, 0x6f, + 0x78, 0x79, 0x7d, 0x20, 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x78, 0x74, 0x3d, + 0x24, 0x7b, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x63, 0x6f, 0x6e, 0x74, 0x65, + 0x78, 0x74, 0x7d, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x24, 0x7b, 0x73, 0x68, 0x6f, 0x77, 0x20, + 0x26, 0x26, 0x20, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x70, 0x72, 0x6f, 0x70, + 0x73, 0x2e, 0x63, 0x68, 0x69, 0x6c, 0x64, 0x72, 0x65, 0x6e, 0x20, 0x7c, + 0x7c, 0x20, 0x6e, 0x75, 0x6c, 0x6c, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x2f, 0x24, 0x7b, 0x50, 0x6f, + 0x72, 0x74, 0x61, 0x6c, 0x50, 0x72, 0x6f, 0x78, 0x79, 0x7d, 0x3e, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x60, 0x2c, 0x20, 0x74, + 0x68, 0x69, 0x73, 0x2e, 0x69, 0x6e, 0x74, 0x6f, 0x2c, 0x20, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x72, 0x65, 0x6d, 0x6f, 0x74, 0x65, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x0a, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x66, 0x69, 0x6e, 0x64, 0x4e, 0x6f, 0x64, 0x65, 0x28, - 0x6e, 0x6f, 0x64, 0x65, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x74, - 0x79, 0x70, 0x65, 0x6f, 0x66, 0x20, 0x6e, 0x6f, 0x64, 0x65, 0x20, 0x3d, - 0x3d, 0x3d, 0x20, 0x27, 0x73, 0x74, 0x72, 0x69, 0x6e, 0x67, 0x27, 0x20, - 0x3f, 0x20, 0x64, 0x6f, 0x63, 0x75, 0x6d, 0x65, 0x6e, 0x74, 0x2e, 0x71, - 0x75, 0x65, 0x72, 0x79, 0x53, 0x65, 0x6c, 0x65, 0x63, 0x74, 0x6f, 0x72, - 0x28, 0x6e, 0x6f, 0x64, 0x65, 0x29, 0x20, 0x3a, 0x20, 0x6e, 0x6f, 0x64, - 0x65, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x0a, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x72, 0x65, 0x6e, 0x64, 0x65, 0x72, - 0x4c, 0x61, 0x79, 0x65, 0x72, 0x28, 0x73, 0x68, 0x6f, 0x77, 0x20, 0x3d, - 0x20, 0x74, 0x72, 0x75, 0x65, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x69, 0x66, 0x20, 0x28, 0x21, 0x74, 0x68, - 0x69, 0x73, 0x2e, 0x69, 0x73, 0x4d, 0x6f, 0x75, 0x6e, 0x74, 0x65, 0x64, - 0x29, 0x20, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x3b, 0x0a, 0x0a, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x2f, 0x2f, 0x20, 0x63, 0x6c, - 0x65, 0x61, 0x6e, 0x20, 0x75, 0x70, 0x20, 0x6f, 0x6c, 0x64, 0x20, 0x6e, - 0x6f, 0x64, 0x65, 0x20, 0x69, 0x66, 0x20, 0x6d, 0x6f, 0x76, 0x69, 0x6e, - 0x67, 0x20, 0x62, 0x61, 0x73, 0x65, 0x73, 0x3a, 0x0a, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x69, 0x66, 0x20, 0x28, 0x74, 0x68, 0x69, - 0x73, 0x2e, 0x70, 0x72, 0x6f, 0x70, 0x73, 0x2e, 0x69, 0x6e, 0x74, 0x6f, - 0x20, 0x21, 0x3d, 0x3d, 0x20, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x69, 0x6e, - 0x74, 0x6f, 0x50, 0x6f, 0x69, 0x6e, 0x74, 0x65, 0x72, 0x29, 0x20, 0x7b, - 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x74, - 0x68, 0x69, 0x73, 0x2e, 0x69, 0x6e, 0x74, 0x6f, 0x50, 0x6f, 0x69, 0x6e, - 0x74, 0x65, 0x72, 0x20, 0x3d, 0x20, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x70, - 0x72, 0x6f, 0x70, 0x73, 0x2e, 0x69, 0x6e, 0x74, 0x6f, 0x3b, 0x0a, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x69, 0x66, 0x20, - 0x28, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x69, 0x6e, 0x74, 0x6f, 0x20, 0x26, - 0x26, 0x20, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x72, 0x65, 0x6d, 0x6f, 0x74, - 0x65, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x72, 0x65, - 0x6d, 0x6f, 0x74, 0x65, 0x20, 0x3d, 0x20, 0x72, 0x65, 0x6e, 0x64, 0x65, - 0x72, 0x28, 0x68, 0x74, 0x6d, 0x6c, 0x60, 0x3c, 0x24, 0x7b, 0x50, 0x6f, - 0x72, 0x74, 0x61, 0x6c, 0x50, 0x72, 0x6f, 0x78, 0x79, 0x7d, 0x20, 0x2f, - 0x3e, 0x60, 0x2c, 0x20, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x69, 0x6e, 0x74, - 0x6f, 0x2c, 0x20, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x72, 0x65, 0x6d, 0x6f, - 0x74, 0x65, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x69, 0x6e, 0x74, 0x6f, - 0x20, 0x3d, 0x20, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x66, 0x69, 0x6e, 0x64, - 0x4e, 0x6f, 0x64, 0x65, 0x28, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x70, 0x72, - 0x6f, 0x70, 0x73, 0x2e, 0x69, 0x6e, 0x74, 0x6f, 0x29, 0x3b, 0x0a, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x0a, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x72, - 0x65, 0x6d, 0x6f, 0x74, 0x65, 0x20, 0x3d, 0x20, 0x72, 0x65, 0x6e, 0x64, - 0x65, 0x72, 0x28, 0x68, 0x74, 0x6d, 0x6c, 0x60, 0x0a, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x24, 0x7b, 0x50, 0x6f, - 0x72, 0x74, 0x61, 0x6c, 0x50, 0x72, 0x6f, 0x78, 0x79, 0x7d, 0x20, 0x63, - 0x6f, 0x6e, 0x74, 0x65, 0x78, 0x74, 0x3d, 0x24, 0x7b, 0x74, 0x68, 0x69, - 0x73, 0x2e, 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x78, 0x74, 0x7d, 0x3e, 0x0a, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x24, 0x7b, 0x73, 0x68, 0x6f, 0x77, 0x20, 0x26, 0x26, 0x20, 0x74, 0x68, - 0x69, 0x73, 0x2e, 0x70, 0x72, 0x6f, 0x70, 0x73, 0x2e, 0x63, 0x68, 0x69, + 0x20, 0x20, 0x20, 0x72, 0x65, 0x6e, 0x64, 0x65, 0x72, 0x28, 0x29, 0x20, + 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x72, 0x65, + 0x74, 0x75, 0x72, 0x6e, 0x20, 0x6e, 0x75, 0x6c, 0x6c, 0x3b, 0x0a, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x7d, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x2f, 0x2f, 0x20, 0x68, 0x69, 0x67, 0x68, + 0x2d, 0x6f, 0x72, 0x64, 0x65, 0x72, 0x20, 0x63, 0x6f, 0x6d, 0x70, 0x6f, + 0x6e, 0x65, 0x6e, 0x74, 0x20, 0x74, 0x68, 0x61, 0x74, 0x20, 0x72, 0x65, + 0x6e, 0x64, 0x65, 0x72, 0x73, 0x20, 0x69, 0x74, 0x73, 0x20, 0x66, 0x69, + 0x72, 0x73, 0x74, 0x20, 0x63, 0x68, 0x69, 0x6c, 0x64, 0x20, 0x69, 0x66, + 0x20, 0x69, 0x74, 0x20, 0x65, 0x78, 0x69, 0x73, 0x74, 0x73, 0x2e, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x2f, 0x2f, 0x20, 0x75, 0x73, 0x65, 0x64, 0x20, + 0x61, 0x73, 0x20, 0x61, 0x20, 0x63, 0x6f, 0x6e, 0x64, 0x69, 0x74, 0x69, + 0x6f, 0x6e, 0x61, 0x6c, 0x20, 0x72, 0x65, 0x6e, 0x64, 0x65, 0x72, 0x69, + 0x6e, 0x67, 0x20, 0x70, 0x72, 0x6f, 0x78, 0x79, 0x2e, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x63, 0x6c, 0x61, 0x73, 0x73, 0x20, 0x50, 0x6f, 0x72, 0x74, + 0x61, 0x6c, 0x50, 0x72, 0x6f, 0x78, 0x79, 0x20, 0x65, 0x78, 0x74, 0x65, + 0x6e, 0x64, 0x73, 0x20, 0x43, 0x6f, 0x6d, 0x70, 0x6f, 0x6e, 0x65, 0x6e, + 0x74, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x67, 0x65, + 0x74, 0x43, 0x68, 0x69, 0x6c, 0x64, 0x43, 0x6f, 0x6e, 0x74, 0x65, 0x78, + 0x74, 0x28, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x74, 0x68, 0x69, + 0x73, 0x2e, 0x70, 0x72, 0x6f, 0x70, 0x73, 0x2e, 0x63, 0x6f, 0x6e, 0x74, + 0x65, 0x78, 0x74, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x72, 0x65, 0x6e, 0x64, 0x65, + 0x72, 0x28, 0x7b, 0x20, 0x63, 0x68, 0x69, 0x6c, 0x64, 0x72, 0x65, 0x6e, + 0x20, 0x7d, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x63, 0x68, 0x69, 0x6c, 0x64, 0x72, 0x65, 0x6e, 0x20, 0x7c, 0x7c, 0x20, 0x6e, 0x75, 0x6c, - 0x6c, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x3c, 0x2f, 0x24, 0x7b, 0x50, 0x6f, 0x72, 0x74, 0x61, 0x6c, 0x50, - 0x72, 0x6f, 0x78, 0x79, 0x7d, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x60, 0x2c, 0x20, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x69, - 0x6e, 0x74, 0x6f, 0x2c, 0x20, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x72, 0x65, - 0x6d, 0x6f, 0x74, 0x65, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x7d, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x72, 0x65, - 0x6e, 0x64, 0x65, 0x72, 0x28, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, - 0x6e, 0x75, 0x6c, 0x6c, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, - 0x2f, 0x2f, 0x20, 0x68, 0x69, 0x67, 0x68, 0x2d, 0x6f, 0x72, 0x64, 0x65, - 0x72, 0x20, 0x63, 0x6f, 0x6d, 0x70, 0x6f, 0x6e, 0x65, 0x6e, 0x74, 0x20, - 0x74, 0x68, 0x61, 0x74, 0x20, 0x72, 0x65, 0x6e, 0x64, 0x65, 0x72, 0x73, - 0x20, 0x69, 0x74, 0x73, 0x20, 0x66, 0x69, 0x72, 0x73, 0x74, 0x20, 0x63, - 0x68, 0x69, 0x6c, 0x64, 0x20, 0x69, 0x66, 0x20, 0x69, 0x74, 0x20, 0x65, - 0x78, 0x69, 0x73, 0x74, 0x73, 0x2e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x2f, - 0x2f, 0x20, 0x75, 0x73, 0x65, 0x64, 0x20, 0x61, 0x73, 0x20, 0x61, 0x20, - 0x63, 0x6f, 0x6e, 0x64, 0x69, 0x74, 0x69, 0x6f, 0x6e, 0x61, 0x6c, 0x20, - 0x72, 0x65, 0x6e, 0x64, 0x65, 0x72, 0x69, 0x6e, 0x67, 0x20, 0x70, 0x72, - 0x6f, 0x78, 0x79, 0x2e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6c, 0x61, - 0x73, 0x73, 0x20, 0x50, 0x6f, 0x72, 0x74, 0x61, 0x6c, 0x50, 0x72, 0x6f, - 0x78, 0x79, 0x20, 0x65, 0x78, 0x74, 0x65, 0x6e, 0x64, 0x73, 0x20, 0x43, - 0x6f, 0x6d, 0x70, 0x6f, 0x6e, 0x65, 0x6e, 0x74, 0x20, 0x7b, 0x0a, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x67, 0x65, 0x74, 0x43, 0x68, 0x69, 0x6c, - 0x64, 0x43, 0x6f, 0x6e, 0x74, 0x65, 0x78, 0x74, 0x28, 0x29, 0x20, 0x7b, - 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x72, 0x65, 0x74, - 0x75, 0x72, 0x6e, 0x20, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x70, 0x72, 0x6f, - 0x70, 0x73, 0x2e, 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x78, 0x74, 0x3b, 0x0a, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x72, 0x65, 0x6e, 0x64, 0x65, 0x72, 0x28, 0x7b, 0x20, 0x63, - 0x68, 0x69, 0x6c, 0x64, 0x72, 0x65, 0x6e, 0x20, 0x7d, 0x29, 0x20, 0x7b, - 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x72, 0x65, 0x74, - 0x75, 0x72, 0x6e, 0x20, 0x63, 0x68, 0x69, 0x6c, 0x64, 0x72, 0x65, 0x6e, - 0x20, 0x7c, 0x7c, 0x20, 0x6e, 0x75, 0x6c, 0x6c, 0x3b, 0x0a, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, - 0x0a, 0x20, 0x20, 0x20, 0x20, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, - 0x6e, 0x20, 0x41, 0x70, 0x70, 0x28, 0x70, 0x72, 0x6f, 0x70, 0x73, 0x29, - 0x20, 0x7b, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x72, 0x65, - 0x74, 0x75, 0x72, 0x6e, 0x20, 0x68, 0x74, 0x6d, 0x6c, 0x60, 0x0a, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x64, 0x69, 0x76, 0x20, - 0x63, 0x6c, 0x61, 0x73, 0x73, 0x3d, 0x22, 0x6d, 0x6f, 0x64, 0x65, 0x2d, - 0x24, 0x7b, 0x73, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x2e, 0x76, 0x61, - 0x6c, 0x75, 0x65, 0x2e, 0x74, 0x79, 0x70, 0x65, 0x7d, 0x22, 0x3e, 0x0a, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x68, - 0x65, 0x61, 0x64, 0x65, 0x72, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x68, 0x31, 0x3e, 0x6c, - 0x6c, 0x61, 0x6d, 0x61, 0x2e, 0x63, 0x70, 0x70, 0x3c, 0x2f, 0x68, 0x31, - 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x3c, 0x2f, 0x68, 0x65, 0x61, 0x64, 0x65, 0x72, 0x3e, 0x0a, 0x0a, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x6d, 0x61, - 0x69, 0x6e, 0x20, 0x69, 0x64, 0x3d, 0x22, 0x63, 0x6f, 0x6e, 0x74, 0x65, - 0x6e, 0x74, 0x22, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x24, 0x7b, 0x63, 0x68, 0x61, 0x74, - 0x53, 0x74, 0x61, 0x72, 0x74, 0x65, 0x64, 0x2e, 0x76, 0x61, 0x6c, 0x75, - 0x65, 0x20, 0x3f, 0x20, 0x43, 0x68, 0x61, 0x74, 0x4c, 0x6f, 0x67, 0x20, - 0x3a, 0x20, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x46, 0x6f, 0x72, 0x6d, - 0x7d, 0x20, 0x2f, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x3c, 0x2f, 0x6d, 0x61, 0x69, 0x6e, 0x3e, 0x0a, 0x0a, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x73, - 0x65, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x69, 0x64, 0x3d, 0x22, 0x77, - 0x72, 0x69, 0x74, 0x65, 0x22, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x24, 0x7b, 0x73, 0x65, - 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2e, - 0x74, 0x79, 0x70, 0x65, 0x20, 0x3d, 0x3d, 0x3d, 0x20, 0x27, 0x63, 0x68, - 0x61, 0x74, 0x27, 0x20, 0x3f, 0x20, 0x4d, 0x65, 0x73, 0x73, 0x61, 0x67, - 0x65, 0x49, 0x6e, 0x70, 0x75, 0x74, 0x20, 0x3a, 0x20, 0x43, 0x6f, 0x6d, - 0x70, 0x6c, 0x65, 0x74, 0x69, 0x6f, 0x6e, 0x43, 0x6f, 0x6e, 0x74, 0x72, - 0x6f, 0x6c, 0x73, 0x7d, 0x20, 0x2f, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x2f, 0x73, 0x65, 0x63, 0x74, - 0x69, 0x6f, 0x6e, 0x3e, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x3c, 0x66, 0x6f, 0x6f, 0x74, 0x65, 0x72, 0x3e, - 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x3c, 0x70, 0x3e, 0x3c, 0x24, 0x7b, 0x4d, 0x6f, 0x64, 0x65, 0x6c, - 0x47, 0x65, 0x6e, 0x65, 0x72, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x49, 0x6e, - 0x66, 0x6f, 0x7d, 0x20, 0x2f, 0x3e, 0x3c, 0x2f, 0x70, 0x3e, 0x0a, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, - 0x70, 0x3e, 0x50, 0x6f, 0x77, 0x65, 0x72, 0x65, 0x64, 0x20, 0x62, 0x79, + 0x6c, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x20, + 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x66, 0x75, + 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x41, 0x70, 0x70, 0x28, 0x70, + 0x72, 0x6f, 0x70, 0x73, 0x29, 0x20, 0x7b, 0x0a, 0x0a, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x68, 0x74, + 0x6d, 0x6c, 0x60, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x3c, 0x64, 0x69, 0x76, 0x20, 0x63, 0x6c, 0x61, 0x73, 0x73, 0x3d, 0x22, + 0x6d, 0x6f, 0x64, 0x65, 0x2d, 0x24, 0x7b, 0x73, 0x65, 0x73, 0x73, 0x69, + 0x6f, 0x6e, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x74, 0x79, 0x70, + 0x65, 0x7d, 0x22, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x3c, 0x68, 0x65, 0x61, 0x64, 0x65, 0x72, 0x3e, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x3c, 0x68, 0x31, 0x3e, 0x6c, 0x6c, 0x61, 0x6d, 0x61, 0x2e, 0x63, 0x70, + 0x70, 0x3c, 0x2f, 0x68, 0x31, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x2f, 0x68, 0x65, 0x61, 0x64, 0x65, + 0x72, 0x3e, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x3c, 0x6d, 0x61, 0x69, 0x6e, 0x20, 0x69, 0x64, 0x3d, 0x22, + 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x22, 0x3e, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x24, + 0x7b, 0x63, 0x68, 0x61, 0x74, 0x53, 0x74, 0x61, 0x72, 0x74, 0x65, 0x64, + 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x20, 0x3f, 0x20, 0x43, 0x68, 0x61, + 0x74, 0x4c, 0x6f, 0x67, 0x20, 0x3a, 0x20, 0x43, 0x6f, 0x6e, 0x66, 0x69, + 0x67, 0x46, 0x6f, 0x72, 0x6d, 0x7d, 0x20, 0x2f, 0x3e, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x2f, 0x6d, 0x61, + 0x69, 0x6e, 0x3e, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x3c, 0x73, 0x65, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, + 0x69, 0x64, 0x3d, 0x22, 0x77, 0x72, 0x69, 0x74, 0x65, 0x22, 0x3e, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x3c, 0x24, 0x7b, 0x73, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x2e, 0x76, + 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x74, 0x79, 0x70, 0x65, 0x20, 0x3d, 0x3d, + 0x3d, 0x20, 0x27, 0x63, 0x68, 0x61, 0x74, 0x27, 0x20, 0x3f, 0x20, 0x4d, + 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x49, 0x6e, 0x70, 0x75, 0x74, 0x20, + 0x3a, 0x20, 0x43, 0x6f, 0x6d, 0x70, 0x6c, 0x65, 0x74, 0x69, 0x6f, 0x6e, + 0x43, 0x6f, 0x6e, 0x74, 0x72, 0x6f, 0x6c, 0x73, 0x7d, 0x20, 0x2f, 0x3e, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, + 0x2f, 0x73, 0x65, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x3e, 0x0a, 0x0a, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x66, 0x6f, + 0x6f, 0x74, 0x65, 0x72, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x70, 0x3e, 0x3c, 0x24, 0x7b, + 0x4d, 0x6f, 0x64, 0x65, 0x6c, 0x47, 0x65, 0x6e, 0x65, 0x72, 0x61, 0x74, + 0x69, 0x6f, 0x6e, 0x49, 0x6e, 0x66, 0x6f, 0x7d, 0x20, 0x2f, 0x3e, 0x3c, + 0x2f, 0x70, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x3c, 0x70, 0x3e, 0x50, 0x6f, 0x77, 0x65, 0x72, + 0x65, 0x64, 0x20, 0x62, 0x79, 0x20, 0x3c, 0x61, 0x20, 0x68, 0x72, 0x65, + 0x66, 0x3d, 0x22, 0x68, 0x74, 0x74, 0x70, 0x73, 0x3a, 0x2f, 0x2f, 0x67, + 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x67, 0x67, + 0x65, 0x72, 0x67, 0x61, 0x6e, 0x6f, 0x76, 0x2f, 0x6c, 0x6c, 0x61, 0x6d, + 0x61, 0x2e, 0x63, 0x70, 0x70, 0x22, 0x3e, 0x6c, 0x6c, 0x61, 0x6d, 0x61, + 0x2e, 0x63, 0x70, 0x70, 0x3c, 0x2f, 0x61, 0x3e, 0x20, 0x61, 0x6e, 0x64, 0x20, 0x3c, 0x61, 0x20, 0x68, 0x72, 0x65, 0x66, 0x3d, 0x22, 0x68, 0x74, - 0x74, 0x70, 0x73, 0x3a, 0x2f, 0x2f, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, - 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x67, 0x67, 0x65, 0x72, 0x67, 0x61, 0x6e, - 0x6f, 0x76, 0x2f, 0x6c, 0x6c, 0x61, 0x6d, 0x61, 0x2e, 0x63, 0x70, 0x70, - 0x22, 0x3e, 0x6c, 0x6c, 0x61, 0x6d, 0x61, 0x2e, 0x63, 0x70, 0x70, 0x3c, - 0x2f, 0x61, 0x3e, 0x20, 0x61, 0x6e, 0x64, 0x20, 0x3c, 0x61, 0x20, 0x68, - 0x72, 0x65, 0x66, 0x3d, 0x22, 0x68, 0x74, 0x74, 0x70, 0x73, 0x3a, 0x2f, - 0x2f, 0x67, 0x67, 0x6d, 0x6c, 0x2e, 0x61, 0x69, 0x22, 0x3e, 0x67, 0x67, - 0x6d, 0x6c, 0x2e, 0x61, 0x69, 0x3c, 0x2f, 0x61, 0x3e, 0x2e, 0x3c, 0x2f, - 0x70, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x3c, 0x2f, 0x66, 0x6f, 0x6f, 0x74, 0x65, 0x72, 0x3e, 0x0a, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x2f, 0x64, 0x69, 0x76, - 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x60, 0x3b, 0x0a, 0x20, - 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x72, 0x65, - 0x6e, 0x64, 0x65, 0x72, 0x28, 0x68, 0x28, 0x41, 0x70, 0x70, 0x29, 0x2c, - 0x20, 0x64, 0x6f, 0x63, 0x75, 0x6d, 0x65, 0x6e, 0x74, 0x2e, 0x71, 0x75, - 0x65, 0x72, 0x79, 0x53, 0x65, 0x6c, 0x65, 0x63, 0x74, 0x6f, 0x72, 0x28, - 0x27, 0x23, 0x63, 0x6f, 0x6e, 0x74, 0x61, 0x69, 0x6e, 0x65, 0x72, 0x27, - 0x29, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x3c, 0x2f, 0x73, 0x63, 0x72, 0x69, - 0x70, 0x74, 0x3e, 0x0a, 0x3c, 0x2f, 0x68, 0x65, 0x61, 0x64, 0x3e, 0x0a, - 0x0a, 0x3c, 0x62, 0x6f, 0x64, 0x79, 0x3e, 0x0a, 0x20, 0x20, 0x3c, 0x64, - 0x69, 0x76, 0x20, 0x69, 0x64, 0x3d, 0x22, 0x63, 0x6f, 0x6e, 0x74, 0x61, - 0x69, 0x6e, 0x65, 0x72, 0x22, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x3c, - 0x69, 0x6e, 0x70, 0x75, 0x74, 0x20, 0x74, 0x79, 0x70, 0x65, 0x3d, 0x22, - 0x66, 0x69, 0x6c, 0x65, 0x22, 0x20, 0x69, 0x64, 0x3d, 0x22, 0x66, 0x69, - 0x6c, 0x65, 0x49, 0x6e, 0x70, 0x75, 0x74, 0x22, 0x20, 0x61, 0x63, 0x63, - 0x65, 0x70, 0x74, 0x3d, 0x22, 0x69, 0x6d, 0x61, 0x67, 0x65, 0x2f, 0x2a, - 0x22, 0x20, 0x73, 0x74, 0x79, 0x6c, 0x65, 0x3d, 0x22, 0x64, 0x69, 0x73, - 0x70, 0x6c, 0x61, 0x79, 0x3a, 0x20, 0x6e, 0x6f, 0x6e, 0x65, 0x3b, 0x22, - 0x3e, 0x0a, 0x20, 0x20, 0x3c, 0x2f, 0x64, 0x69, 0x76, 0x3e, 0x0a, 0x20, - 0x20, 0x3c, 0x64, 0x69, 0x76, 0x20, 0x69, 0x64, 0x3d, 0x22, 0x70, 0x6f, - 0x72, 0x74, 0x61, 0x6c, 0x22, 0x3e, 0x3c, 0x2f, 0x64, 0x69, 0x76, 0x3e, - 0x0a, 0x3c, 0x2f, 0x62, 0x6f, 0x64, 0x79, 0x3e, 0x0a, 0x0a, 0x3c, 0x2f, - 0x68, 0x74, 0x6d, 0x6c, 0x3e, 0x0a, 0x0a + 0x74, 0x70, 0x73, 0x3a, 0x2f, 0x2f, 0x67, 0x67, 0x6d, 0x6c, 0x2e, 0x61, + 0x69, 0x22, 0x3e, 0x67, 0x67, 0x6d, 0x6c, 0x2e, 0x61, 0x69, 0x3c, 0x2f, + 0x61, 0x3e, 0x2e, 0x3c, 0x2f, 0x70, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x2f, 0x66, 0x6f, 0x6f, 0x74, + 0x65, 0x72, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x3c, 0x2f, 0x64, 0x69, 0x76, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x60, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x0a, 0x20, + 0x20, 0x20, 0x20, 0x72, 0x65, 0x6e, 0x64, 0x65, 0x72, 0x28, 0x68, 0x28, + 0x41, 0x70, 0x70, 0x29, 0x2c, 0x20, 0x64, 0x6f, 0x63, 0x75, 0x6d, 0x65, + 0x6e, 0x74, 0x2e, 0x71, 0x75, 0x65, 0x72, 0x79, 0x53, 0x65, 0x6c, 0x65, + 0x63, 0x74, 0x6f, 0x72, 0x28, 0x27, 0x23, 0x63, 0x6f, 0x6e, 0x74, 0x61, + 0x69, 0x6e, 0x65, 0x72, 0x27, 0x29, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x3c, + 0x2f, 0x73, 0x63, 0x72, 0x69, 0x70, 0x74, 0x3e, 0x0a, 0x3c, 0x2f, 0x68, + 0x65, 0x61, 0x64, 0x3e, 0x0a, 0x0a, 0x3c, 0x62, 0x6f, 0x64, 0x79, 0x3e, + 0x0a, 0x20, 0x20, 0x3c, 0x64, 0x69, 0x76, 0x20, 0x69, 0x64, 0x3d, 0x22, + 0x63, 0x6f, 0x6e, 0x74, 0x61, 0x69, 0x6e, 0x65, 0x72, 0x22, 0x3e, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x3c, 0x69, 0x6e, 0x70, 0x75, 0x74, 0x20, 0x74, + 0x79, 0x70, 0x65, 0x3d, 0x22, 0x66, 0x69, 0x6c, 0x65, 0x22, 0x20, 0x69, + 0x64, 0x3d, 0x22, 0x66, 0x69, 0x6c, 0x65, 0x49, 0x6e, 0x70, 0x75, 0x74, + 0x22, 0x20, 0x61, 0x63, 0x63, 0x65, 0x70, 0x74, 0x3d, 0x22, 0x69, 0x6d, + 0x61, 0x67, 0x65, 0x2f, 0x2a, 0x22, 0x20, 0x73, 0x74, 0x79, 0x6c, 0x65, + 0x3d, 0x22, 0x64, 0x69, 0x73, 0x70, 0x6c, 0x61, 0x79, 0x3a, 0x20, 0x6e, + 0x6f, 0x6e, 0x65, 0x3b, 0x22, 0x3e, 0x0a, 0x20, 0x20, 0x3c, 0x2f, 0x64, + 0x69, 0x76, 0x3e, 0x0a, 0x20, 0x20, 0x3c, 0x64, 0x69, 0x76, 0x20, 0x69, + 0x64, 0x3d, 0x22, 0x70, 0x6f, 0x72, 0x74, 0x61, 0x6c, 0x22, 0x3e, 0x3c, + 0x2f, 0x64, 0x69, 0x76, 0x3e, 0x0a, 0x3c, 0x2f, 0x62, 0x6f, 0x64, 0x79, + 0x3e, 0x0a, 0x0a, 0x3c, 0x2f, 0x68, 0x74, 0x6d, 0x6c, 0x3e, 0x0a, 0x0a }; -unsigned int index_html_len = 33103; +unsigned int index_html_len = 33456; diff --git a/examples/server/index.js.hpp b/examples/server/index.js.hpp index c9dc078b7..e09b3c8c5 100644 --- a/examples/server/index.js.hpp +++ b/examples/server/index.js.hpp @@ -2,1875 +2,1902 @@ unsigned char index_js[] = { 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x74, 0x28, 0x29, 0x7b, 0x74, 0x68, 0x72, 0x6f, 0x77, 0x20, 0x6e, 0x65, 0x77, 0x20, 0x45, 0x72, 0x72, 0x6f, 0x72, 0x28, 0x22, 0x43, 0x79, 0x63, 0x6c, 0x65, 0x20, - 0x64, 0x65, 0x74, 0x65, 0x63, 0x74, 0x65, 0x64, 0x22, 0x29, 0x7d, 0x66, - 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x6e, 0x28, 0x29, 0x7b, - 0x69, 0x66, 0x28, 0x75, 0x3e, 0x31, 0x29, 0x7b, 0x75, 0x2d, 0x2d, 0x3b, - 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x7d, 0x6c, 0x65, 0x74, 0x20, 0x74, - 0x2c, 0x6e, 0x3d, 0x21, 0x31, 0x3b, 0x77, 0x68, 0x69, 0x6c, 0x65, 0x28, - 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x21, 0x3d, 0x3d, 0x5f, 0x29, 0x7b, - 0x6c, 0x65, 0x74, 0x20, 0x69, 0x3d, 0x5f, 0x3b, 0x5f, 0x3d, 0x76, 0x6f, - 0x69, 0x64, 0x20, 0x30, 0x3b, 0x66, 0x2b, 0x2b, 0x3b, 0x77, 0x68, 0x69, - 0x6c, 0x65, 0x28, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x21, 0x3d, 0x3d, - 0x69, 0x29, 0x7b, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x5f, 0x3d, 0x69, - 0x2e, 0x6f, 0x3b, 0x69, 0x2e, 0x6f, 0x3d, 0x76, 0x6f, 0x69, 0x64, 0x20, - 0x30, 0x3b, 0x69, 0x2e, 0x66, 0x26, 0x3d, 0x2d, 0x33, 0x3b, 0x69, 0x66, - 0x28, 0x21, 0x28, 0x38, 0x26, 0x69, 0x2e, 0x66, 0x29, 0x26, 0x26, 0x61, - 0x28, 0x69, 0x29, 0x29, 0x74, 0x72, 0x79, 0x7b, 0x69, 0x2e, 0x63, 0x28, - 0x29, 0x7d, 0x63, 0x61, 0x74, 0x63, 0x68, 0x28, 0x65, 0x29, 0x7b, 0x69, - 0x66, 0x28, 0x21, 0x6e, 0x29, 0x7b, 0x74, 0x3d, 0x65, 0x3b, 0x6e, 0x3d, - 0x21, 0x30, 0x7d, 0x7d, 0x69, 0x3d, 0x5f, 0x7d, 0x7d, 0x66, 0x3d, 0x30, - 0x3b, 0x75, 0x2d, 0x2d, 0x3b, 0x69, 0x66, 0x28, 0x6e, 0x29, 0x74, 0x68, - 0x72, 0x6f, 0x77, 0x20, 0x74, 0x7d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, - 0x6f, 0x6e, 0x20, 0x65, 0x28, 0x74, 0x29, 0x7b, 0x69, 0x66, 0x28, 0x75, - 0x3e, 0x30, 0x29, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x74, 0x28, - 0x29, 0x3b, 0x75, 0x2b, 0x2b, 0x3b, 0x74, 0x72, 0x79, 0x7b, 0x72, 0x65, + 0x64, 0x65, 0x74, 0x65, 0x63, 0x74, 0x65, 0x64, 0x22, 0x29, 0x7d, 0x63, + 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x6e, 0x3d, 0x53, 0x79, 0x6d, 0x62, 0x6f, + 0x6c, 0x2e, 0x66, 0x6f, 0x72, 0x28, 0x22, 0x70, 0x72, 0x65, 0x61, 0x63, + 0x74, 0x2d, 0x73, 0x69, 0x67, 0x6e, 0x61, 0x6c, 0x73, 0x22, 0x29, 0x3b, + 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x65, 0x28, 0x29, + 0x7b, 0x69, 0x66, 0x28, 0x66, 0x3e, 0x31, 0x29, 0x7b, 0x66, 0x2d, 0x2d, + 0x3b, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x7d, 0x6c, 0x65, 0x74, 0x20, + 0x74, 0x2c, 0x6e, 0x3d, 0x21, 0x31, 0x3b, 0x77, 0x68, 0x69, 0x6c, 0x65, + 0x28, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x21, 0x3d, 0x3d, 0x6f, 0x29, + 0x7b, 0x6c, 0x65, 0x74, 0x20, 0x5f, 0x3d, 0x6f, 0x3b, 0x6f, 0x3d, 0x76, + 0x6f, 0x69, 0x64, 0x20, 0x30, 0x3b, 0x73, 0x2b, 0x2b, 0x3b, 0x77, 0x68, + 0x69, 0x6c, 0x65, 0x28, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x21, 0x3d, + 0x3d, 0x5f, 0x29, 0x7b, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x69, 0x3d, + 0x5f, 0x2e, 0x6f, 0x3b, 0x5f, 0x2e, 0x6f, 0x3d, 0x76, 0x6f, 0x69, 0x64, + 0x20, 0x30, 0x3b, 0x5f, 0x2e, 0x66, 0x26, 0x3d, 0x2d, 0x33, 0x3b, 0x69, + 0x66, 0x28, 0x21, 0x28, 0x38, 0x26, 0x5f, 0x2e, 0x66, 0x29, 0x26, 0x26, + 0x70, 0x28, 0x5f, 0x29, 0x29, 0x74, 0x72, 0x79, 0x7b, 0x5f, 0x2e, 0x63, + 0x28, 0x29, 0x7d, 0x63, 0x61, 0x74, 0x63, 0x68, 0x28, 0x65, 0x29, 0x7b, + 0x69, 0x66, 0x28, 0x21, 0x6e, 0x29, 0x7b, 0x74, 0x3d, 0x65, 0x3b, 0x6e, + 0x3d, 0x21, 0x30, 0x7d, 0x7d, 0x5f, 0x3d, 0x69, 0x7d, 0x7d, 0x73, 0x3d, + 0x30, 0x3b, 0x66, 0x2d, 0x2d, 0x3b, 0x69, 0x66, 0x28, 0x6e, 0x29, 0x74, + 0x68, 0x72, 0x6f, 0x77, 0x20, 0x74, 0x7d, 0x66, 0x75, 0x6e, 0x63, 0x74, + 0x69, 0x6f, 0x6e, 0x20, 0x5f, 0x28, 0x74, 0x29, 0x7b, 0x69, 0x66, 0x28, + 0x66, 0x3e, 0x30, 0x29, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x74, + 0x28, 0x29, 0x3b, 0x66, 0x2b, 0x2b, 0x3b, 0x74, 0x72, 0x79, 0x7b, 0x72, + 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x74, 0x28, 0x29, 0x7d, 0x66, 0x69, + 0x6e, 0x61, 0x6c, 0x6c, 0x79, 0x7b, 0x65, 0x28, 0x29, 0x7d, 0x7d, 0x6c, + 0x65, 0x74, 0x20, 0x69, 0x2c, 0x6f, 0x2c, 0x72, 0x3d, 0x30, 0x3b, 0x66, + 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x75, 0x28, 0x74, 0x29, + 0x7b, 0x69, 0x66, 0x28, 0x72, 0x3e, 0x30, 0x29, 0x72, 0x65, 0x74, 0x75, + 0x72, 0x6e, 0x20, 0x74, 0x28, 0x29, 0x3b, 0x63, 0x6f, 0x6e, 0x73, 0x74, + 0x20, 0x6e, 0x3d, 0x69, 0x3b, 0x69, 0x3d, 0x76, 0x6f, 0x69, 0x64, 0x20, + 0x30, 0x3b, 0x72, 0x2b, 0x2b, 0x3b, 0x74, 0x72, 0x79, 0x7b, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x74, 0x28, 0x29, 0x7d, 0x66, 0x69, 0x6e, - 0x61, 0x6c, 0x6c, 0x79, 0x7b, 0x6e, 0x28, 0x29, 0x7d, 0x7d, 0x6c, 0x65, - 0x74, 0x20, 0x69, 0x2c, 0x5f, 0x2c, 0x6f, 0x3d, 0x30, 0x3b, 0x66, 0x75, - 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x72, 0x28, 0x74, 0x29, 0x7b, - 0x69, 0x66, 0x28, 0x6f, 0x3e, 0x30, 0x29, 0x72, 0x65, 0x74, 0x75, 0x72, - 0x6e, 0x20, 0x74, 0x28, 0x29, 0x3b, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, - 0x6e, 0x3d, 0x69, 0x3b, 0x69, 0x3d, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, - 0x3b, 0x6f, 0x2b, 0x2b, 0x3b, 0x74, 0x72, 0x79, 0x7b, 0x72, 0x65, 0x74, - 0x75, 0x72, 0x6e, 0x20, 0x74, 0x28, 0x29, 0x7d, 0x66, 0x69, 0x6e, 0x61, - 0x6c, 0x6c, 0x79, 0x7b, 0x6f, 0x2d, 0x2d, 0x3b, 0x69, 0x3d, 0x6e, 0x7d, - 0x7d, 0x6c, 0x65, 0x74, 0x20, 0x75, 0x3d, 0x30, 0x2c, 0x66, 0x3d, 0x30, - 0x2c, 0x6c, 0x3d, 0x30, 0x3b, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, - 0x6e, 0x20, 0x73, 0x28, 0x74, 0x29, 0x7b, 0x69, 0x66, 0x28, 0x76, 0x6f, - 0x69, 0x64, 0x20, 0x30, 0x3d, 0x3d, 0x3d, 0x69, 0x29, 0x72, 0x65, 0x74, - 0x75, 0x72, 0x6e, 0x3b, 0x6c, 0x65, 0x74, 0x20, 0x6e, 0x3d, 0x74, 0x2e, - 0x6e, 0x3b, 0x69, 0x66, 0x28, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x3d, - 0x3d, 0x3d, 0x6e, 0x7c, 0x7c, 0x6e, 0x2e, 0x74, 0x21, 0x3d, 0x3d, 0x69, - 0x29, 0x7b, 0x6e, 0x3d, 0x7b, 0x69, 0x3a, 0x30, 0x2c, 0x53, 0x3a, 0x74, - 0x2c, 0x70, 0x3a, 0x69, 0x2e, 0x73, 0x2c, 0x6e, 0x3a, 0x76, 0x6f, 0x69, - 0x64, 0x20, 0x30, 0x2c, 0x74, 0x3a, 0x69, 0x2c, 0x65, 0x3a, 0x76, 0x6f, - 0x69, 0x64, 0x20, 0x30, 0x2c, 0x78, 0x3a, 0x76, 0x6f, 0x69, 0x64, 0x20, - 0x30, 0x2c, 0x72, 0x3a, 0x6e, 0x7d, 0x3b, 0x69, 0x66, 0x28, 0x76, 0x6f, - 0x69, 0x64, 0x20, 0x30, 0x21, 0x3d, 0x3d, 0x69, 0x2e, 0x73, 0x29, 0x69, - 0x2e, 0x73, 0x2e, 0x6e, 0x3d, 0x6e, 0x3b, 0x69, 0x2e, 0x73, 0x3d, 0x6e, - 0x3b, 0x74, 0x2e, 0x6e, 0x3d, 0x6e, 0x3b, 0x69, 0x66, 0x28, 0x33, 0x32, - 0x26, 0x69, 0x2e, 0x66, 0x29, 0x74, 0x2e, 0x53, 0x28, 0x6e, 0x29, 0x3b, - 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x6e, 0x7d, 0x65, 0x6c, 0x73, - 0x65, 0x20, 0x69, 0x66, 0x28, 0x2d, 0x31, 0x3d, 0x3d, 0x3d, 0x6e, 0x2e, - 0x69, 0x29, 0x7b, 0x6e, 0x2e, 0x69, 0x3d, 0x30, 0x3b, 0x69, 0x66, 0x28, - 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x21, 0x3d, 0x3d, 0x6e, 0x2e, 0x6e, - 0x29, 0x7b, 0x6e, 0x2e, 0x6e, 0x2e, 0x70, 0x3d, 0x6e, 0x2e, 0x70, 0x3b, - 0x69, 0x66, 0x28, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x21, 0x3d, 0x3d, - 0x6e, 0x2e, 0x70, 0x29, 0x6e, 0x2e, 0x70, 0x2e, 0x6e, 0x3d, 0x6e, 0x2e, - 0x6e, 0x3b, 0x6e, 0x2e, 0x70, 0x3d, 0x69, 0x2e, 0x73, 0x3b, 0x6e, 0x2e, - 0x6e, 0x3d, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x3b, 0x69, 0x2e, 0x73, - 0x2e, 0x6e, 0x3d, 0x6e, 0x3b, 0x69, 0x2e, 0x73, 0x3d, 0x6e, 0x7d, 0x72, - 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x6e, 0x7d, 0x7d, 0x66, 0x75, 0x6e, - 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x63, 0x28, 0x74, 0x29, 0x7b, 0x74, - 0x68, 0x69, 0x73, 0x2e, 0x76, 0x3d, 0x74, 0x3b, 0x74, 0x68, 0x69, 0x73, - 0x2e, 0x69, 0x3d, 0x30, 0x3b, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x6e, 0x3d, - 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x3b, 0x74, 0x68, 0x69, 0x73, 0x2e, - 0x74, 0x3d, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x7d, 0x63, 0x2e, 0x70, - 0x72, 0x6f, 0x74, 0x6f, 0x74, 0x79, 0x70, 0x65, 0x2e, 0x68, 0x3d, 0x66, - 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x29, 0x7b, 0x72, 0x65, - 0x74, 0x75, 0x72, 0x6e, 0x21, 0x30, 0x7d, 0x3b, 0x63, 0x2e, 0x70, 0x72, - 0x6f, 0x74, 0x6f, 0x74, 0x79, 0x70, 0x65, 0x2e, 0x53, 0x3d, 0x66, 0x75, - 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x74, 0x29, 0x7b, 0x69, 0x66, - 0x28, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x74, 0x21, 0x3d, 0x3d, 0x74, 0x26, - 0x26, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x3d, 0x3d, 0x3d, 0x74, 0x2e, - 0x65, 0x29, 0x7b, 0x74, 0x2e, 0x78, 0x3d, 0x74, 0x68, 0x69, 0x73, 0x2e, - 0x74, 0x3b, 0x69, 0x66, 0x28, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x21, - 0x3d, 0x3d, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x74, 0x29, 0x74, 0x68, 0x69, - 0x73, 0x2e, 0x74, 0x2e, 0x65, 0x3d, 0x74, 0x3b, 0x74, 0x68, 0x69, 0x73, - 0x2e, 0x74, 0x3d, 0x74, 0x7d, 0x7d, 0x3b, 0x63, 0x2e, 0x70, 0x72, 0x6f, - 0x74, 0x6f, 0x74, 0x79, 0x70, 0x65, 0x2e, 0x55, 0x3d, 0x66, 0x75, 0x6e, - 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x74, 0x29, 0x7b, 0x69, 0x66, 0x28, - 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x21, 0x3d, 0x3d, 0x74, 0x68, 0x69, - 0x73, 0x2e, 0x74, 0x29, 0x7b, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x6e, - 0x3d, 0x74, 0x2e, 0x65, 0x2c, 0x65, 0x3d, 0x74, 0x2e, 0x78, 0x3b, 0x69, - 0x66, 0x28, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x21, 0x3d, 0x3d, 0x6e, - 0x29, 0x7b, 0x6e, 0x2e, 0x78, 0x3d, 0x65, 0x3b, 0x74, 0x2e, 0x65, 0x3d, - 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x7d, 0x69, 0x66, 0x28, 0x76, 0x6f, - 0x69, 0x64, 0x20, 0x30, 0x21, 0x3d, 0x3d, 0x65, 0x29, 0x7b, 0x65, 0x2e, - 0x65, 0x3d, 0x6e, 0x3b, 0x74, 0x2e, 0x78, 0x3d, 0x76, 0x6f, 0x69, 0x64, - 0x20, 0x30, 0x7d, 0x69, 0x66, 0x28, 0x74, 0x3d, 0x3d, 0x3d, 0x74, 0x68, - 0x69, 0x73, 0x2e, 0x74, 0x29, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x74, 0x3d, - 0x65, 0x7d, 0x7d, 0x3b, 0x63, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x74, - 0x79, 0x70, 0x65, 0x2e, 0x73, 0x75, 0x62, 0x73, 0x63, 0x72, 0x69, 0x62, - 0x65, 0x3d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x74, - 0x29, 0x7b, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x6e, 0x3d, 0x74, 0x68, - 0x69, 0x73, 0x3b, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x53, 0x28, - 0x28, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x29, 0x7b, - 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x65, 0x3d, 0x6e, 0x2e, 0x76, 0x61, - 0x6c, 0x75, 0x65, 0x2c, 0x69, 0x3d, 0x33, 0x32, 0x26, 0x74, 0x68, 0x69, - 0x73, 0x2e, 0x66, 0x3b, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x66, 0x26, 0x3d, - 0x2d, 0x33, 0x33, 0x3b, 0x74, 0x72, 0x79, 0x7b, 0x74, 0x28, 0x65, 0x29, - 0x7d, 0x66, 0x69, 0x6e, 0x61, 0x6c, 0x6c, 0x79, 0x7b, 0x74, 0x68, 0x69, - 0x73, 0x2e, 0x66, 0x7c, 0x3d, 0x69, 0x7d, 0x7d, 0x29, 0x29, 0x7d, 0x3b, - 0x63, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x74, 0x79, 0x70, 0x65, 0x2e, - 0x76, 0x61, 0x6c, 0x75, 0x65, 0x4f, 0x66, 0x3d, 0x66, 0x75, 0x6e, 0x63, + 0x61, 0x6c, 0x6c, 0x79, 0x7b, 0x72, 0x2d, 0x2d, 0x3b, 0x69, 0x3d, 0x6e, + 0x7d, 0x7d, 0x6c, 0x65, 0x74, 0x20, 0x66, 0x3d, 0x30, 0x2c, 0x73, 0x3d, + 0x30, 0x2c, 0x6c, 0x3d, 0x30, 0x3b, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, + 0x6f, 0x6e, 0x20, 0x63, 0x28, 0x74, 0x29, 0x7b, 0x69, 0x66, 0x28, 0x76, + 0x6f, 0x69, 0x64, 0x20, 0x30, 0x3d, 0x3d, 0x3d, 0x69, 0x29, 0x72, 0x65, + 0x74, 0x75, 0x72, 0x6e, 0x3b, 0x6c, 0x65, 0x74, 0x20, 0x6e, 0x3d, 0x74, + 0x2e, 0x6e, 0x3b, 0x69, 0x66, 0x28, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, + 0x3d, 0x3d, 0x3d, 0x6e, 0x7c, 0x7c, 0x6e, 0x2e, 0x74, 0x21, 0x3d, 0x3d, + 0x69, 0x29, 0x7b, 0x6e, 0x3d, 0x7b, 0x69, 0x3a, 0x30, 0x2c, 0x53, 0x3a, + 0x74, 0x2c, 0x70, 0x3a, 0x69, 0x2e, 0x73, 0x2c, 0x6e, 0x3a, 0x76, 0x6f, + 0x69, 0x64, 0x20, 0x30, 0x2c, 0x74, 0x3a, 0x69, 0x2c, 0x65, 0x3a, 0x76, + 0x6f, 0x69, 0x64, 0x20, 0x30, 0x2c, 0x78, 0x3a, 0x76, 0x6f, 0x69, 0x64, + 0x20, 0x30, 0x2c, 0x72, 0x3a, 0x6e, 0x7d, 0x3b, 0x69, 0x66, 0x28, 0x76, + 0x6f, 0x69, 0x64, 0x20, 0x30, 0x21, 0x3d, 0x3d, 0x69, 0x2e, 0x73, 0x29, + 0x69, 0x2e, 0x73, 0x2e, 0x6e, 0x3d, 0x6e, 0x3b, 0x69, 0x2e, 0x73, 0x3d, + 0x6e, 0x3b, 0x74, 0x2e, 0x6e, 0x3d, 0x6e, 0x3b, 0x69, 0x66, 0x28, 0x33, + 0x32, 0x26, 0x69, 0x2e, 0x66, 0x29, 0x74, 0x2e, 0x53, 0x28, 0x6e, 0x29, + 0x3b, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x6e, 0x7d, 0x65, 0x6c, + 0x73, 0x65, 0x20, 0x69, 0x66, 0x28, 0x2d, 0x31, 0x3d, 0x3d, 0x3d, 0x6e, + 0x2e, 0x69, 0x29, 0x7b, 0x6e, 0x2e, 0x69, 0x3d, 0x30, 0x3b, 0x69, 0x66, + 0x28, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x21, 0x3d, 0x3d, 0x6e, 0x2e, + 0x6e, 0x29, 0x7b, 0x6e, 0x2e, 0x6e, 0x2e, 0x70, 0x3d, 0x6e, 0x2e, 0x70, + 0x3b, 0x69, 0x66, 0x28, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x21, 0x3d, + 0x3d, 0x6e, 0x2e, 0x70, 0x29, 0x6e, 0x2e, 0x70, 0x2e, 0x6e, 0x3d, 0x6e, + 0x2e, 0x6e, 0x3b, 0x6e, 0x2e, 0x70, 0x3d, 0x69, 0x2e, 0x73, 0x3b, 0x6e, + 0x2e, 0x6e, 0x3d, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x3b, 0x69, 0x2e, + 0x73, 0x2e, 0x6e, 0x3d, 0x6e, 0x3b, 0x69, 0x2e, 0x73, 0x3d, 0x6e, 0x7d, + 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x6e, 0x7d, 0x7d, 0x66, 0x75, + 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x68, 0x28, 0x74, 0x29, 0x7b, + 0x74, 0x68, 0x69, 0x73, 0x2e, 0x76, 0x3d, 0x74, 0x3b, 0x74, 0x68, 0x69, + 0x73, 0x2e, 0x69, 0x3d, 0x30, 0x3b, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x6e, + 0x3d, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x3b, 0x74, 0x68, 0x69, 0x73, + 0x2e, 0x74, 0x3d, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x7d, 0x68, 0x2e, + 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x74, 0x79, 0x70, 0x65, 0x2e, 0x62, 0x72, + 0x61, 0x6e, 0x64, 0x3d, 0x6e, 0x3b, 0x68, 0x2e, 0x70, 0x72, 0x6f, 0x74, + 0x6f, 0x74, 0x79, 0x70, 0x65, 0x2e, 0x68, 0x3d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x29, 0x7b, 0x72, 0x65, 0x74, 0x75, 0x72, - 0x6e, 0x20, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, - 0x7d, 0x3b, 0x63, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x74, 0x79, 0x70, - 0x65, 0x2e, 0x74, 0x6f, 0x53, 0x74, 0x72, 0x69, 0x6e, 0x67, 0x3d, 0x66, - 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x29, 0x7b, 0x72, 0x65, - 0x74, 0x75, 0x72, 0x6e, 0x20, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x76, 0x61, - 0x6c, 0x75, 0x65, 0x2b, 0x22, 0x22, 0x7d, 0x3b, 0x63, 0x2e, 0x70, 0x72, - 0x6f, 0x74, 0x6f, 0x74, 0x79, 0x70, 0x65, 0x2e, 0x74, 0x6f, 0x4a, 0x53, - 0x4f, 0x4e, 0x3d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, - 0x29, 0x7b, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x74, 0x68, 0x69, - 0x73, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x7d, 0x3b, 0x63, 0x2e, 0x70, - 0x72, 0x6f, 0x74, 0x6f, 0x74, 0x79, 0x70, 0x65, 0x2e, 0x70, 0x65, 0x65, - 0x6b, 0x3d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x29, - 0x7b, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x74, 0x68, 0x69, 0x73, - 0x2e, 0x76, 0x7d, 0x3b, 0x4f, 0x62, 0x6a, 0x65, 0x63, 0x74, 0x2e, 0x64, - 0x65, 0x66, 0x69, 0x6e, 0x65, 0x50, 0x72, 0x6f, 0x70, 0x65, 0x72, 0x74, - 0x79, 0x28, 0x63, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x74, 0x79, 0x70, - 0x65, 0x2c, 0x22, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x22, 0x2c, 0x7b, 0x67, - 0x65, 0x74, 0x28, 0x29, 0x7b, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x74, - 0x3d, 0x73, 0x28, 0x74, 0x68, 0x69, 0x73, 0x29, 0x3b, 0x69, 0x66, 0x28, - 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x21, 0x3d, 0x3d, 0x74, 0x29, 0x74, - 0x2e, 0x69, 0x3d, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x69, 0x3b, 0x72, 0x65, - 0x74, 0x75, 0x72, 0x6e, 0x20, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x76, 0x7d, - 0x2c, 0x73, 0x65, 0x74, 0x28, 0x65, 0x29, 0x7b, 0x69, 0x66, 0x28, 0x69, - 0x20, 0x69, 0x6e, 0x73, 0x74, 0x61, 0x6e, 0x63, 0x65, 0x6f, 0x66, 0x20, - 0x76, 0x29, 0x21, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, - 0x29, 0x7b, 0x74, 0x68, 0x72, 0x6f, 0x77, 0x20, 0x6e, 0x65, 0x77, 0x20, - 0x45, 0x72, 0x72, 0x6f, 0x72, 0x28, 0x22, 0x43, 0x6f, 0x6d, 0x70, 0x75, - 0x74, 0x65, 0x64, 0x20, 0x63, 0x61, 0x6e, 0x6e, 0x6f, 0x74, 0x20, 0x68, - 0x61, 0x76, 0x65, 0x20, 0x73, 0x69, 0x64, 0x65, 0x2d, 0x65, 0x66, 0x66, - 0x65, 0x63, 0x74, 0x73, 0x22, 0x29, 0x7d, 0x28, 0x29, 0x3b, 0x69, 0x66, - 0x28, 0x65, 0x21, 0x3d, 0x3d, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x76, 0x29, - 0x7b, 0x69, 0x66, 0x28, 0x66, 0x3e, 0x31, 0x30, 0x30, 0x29, 0x74, 0x28, - 0x29, 0x3b, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x76, 0x3d, 0x65, 0x3b, 0x74, - 0x68, 0x69, 0x73, 0x2e, 0x69, 0x2b, 0x2b, 0x3b, 0x6c, 0x2b, 0x2b, 0x3b, - 0x75, 0x2b, 0x2b, 0x3b, 0x74, 0x72, 0x79, 0x7b, 0x66, 0x6f, 0x72, 0x28, - 0x6c, 0x65, 0x74, 0x20, 0x74, 0x3d, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x74, - 0x3b, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x21, 0x3d, 0x3d, 0x74, 0x3b, - 0x74, 0x3d, 0x74, 0x2e, 0x78, 0x29, 0x74, 0x2e, 0x74, 0x2e, 0x4e, 0x28, - 0x29, 0x7d, 0x66, 0x69, 0x6e, 0x61, 0x6c, 0x6c, 0x79, 0x7b, 0x6e, 0x28, - 0x29, 0x7d, 0x7d, 0x7d, 0x7d, 0x29, 0x3b, 0x66, 0x75, 0x6e, 0x63, 0x74, - 0x69, 0x6f, 0x6e, 0x20, 0x68, 0x28, 0x74, 0x29, 0x7b, 0x72, 0x65, 0x74, - 0x75, 0x72, 0x6e, 0x20, 0x6e, 0x65, 0x77, 0x20, 0x63, 0x28, 0x74, 0x29, - 0x7d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x61, 0x28, - 0x74, 0x29, 0x7b, 0x66, 0x6f, 0x72, 0x28, 0x6c, 0x65, 0x74, 0x20, 0x6e, - 0x3d, 0x74, 0x2e, 0x73, 0x3b, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x21, - 0x3d, 0x3d, 0x6e, 0x3b, 0x6e, 0x3d, 0x6e, 0x2e, 0x6e, 0x29, 0x69, 0x66, - 0x28, 0x6e, 0x2e, 0x53, 0x2e, 0x69, 0x21, 0x3d, 0x3d, 0x6e, 0x2e, 0x69, - 0x7c, 0x7c, 0x21, 0x6e, 0x2e, 0x53, 0x2e, 0x68, 0x28, 0x29, 0x7c, 0x7c, - 0x6e, 0x2e, 0x53, 0x2e, 0x69, 0x21, 0x3d, 0x3d, 0x6e, 0x2e, 0x69, 0x29, - 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x21, 0x30, 0x3b, 0x72, 0x65, 0x74, - 0x75, 0x72, 0x6e, 0x21, 0x31, 0x7d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, - 0x6f, 0x6e, 0x20, 0x70, 0x28, 0x74, 0x29, 0x7b, 0x66, 0x6f, 0x72, 0x28, - 0x6c, 0x65, 0x74, 0x20, 0x6e, 0x3d, 0x74, 0x2e, 0x73, 0x3b, 0x76, 0x6f, - 0x69, 0x64, 0x20, 0x30, 0x21, 0x3d, 0x3d, 0x6e, 0x3b, 0x6e, 0x3d, 0x6e, - 0x2e, 0x6e, 0x29, 0x7b, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x65, 0x3d, - 0x6e, 0x2e, 0x53, 0x2e, 0x6e, 0x3b, 0x69, 0x66, 0x28, 0x76, 0x6f, 0x69, - 0x64, 0x20, 0x30, 0x21, 0x3d, 0x3d, 0x65, 0x29, 0x6e, 0x2e, 0x72, 0x3d, - 0x65, 0x3b, 0x6e, 0x2e, 0x53, 0x2e, 0x6e, 0x3d, 0x6e, 0x3b, 0x6e, 0x2e, - 0x69, 0x3d, 0x2d, 0x31, 0x3b, 0x69, 0x66, 0x28, 0x76, 0x6f, 0x69, 0x64, - 0x20, 0x30, 0x3d, 0x3d, 0x3d, 0x6e, 0x2e, 0x6e, 0x29, 0x7b, 0x74, 0x2e, - 0x73, 0x3d, 0x6e, 0x3b, 0x62, 0x72, 0x65, 0x61, 0x6b, 0x7d, 0x7d, 0x7d, - 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x64, 0x28, 0x74, - 0x29, 0x7b, 0x6c, 0x65, 0x74, 0x20, 0x6e, 0x2c, 0x65, 0x3d, 0x74, 0x2e, - 0x73, 0x3b, 0x77, 0x68, 0x69, 0x6c, 0x65, 0x28, 0x76, 0x6f, 0x69, 0x64, - 0x20, 0x30, 0x21, 0x3d, 0x3d, 0x65, 0x29, 0x7b, 0x63, 0x6f, 0x6e, 0x73, - 0x74, 0x20, 0x74, 0x3d, 0x65, 0x2e, 0x70, 0x3b, 0x69, 0x66, 0x28, 0x2d, - 0x31, 0x3d, 0x3d, 0x3d, 0x65, 0x2e, 0x69, 0x29, 0x7b, 0x65, 0x2e, 0x53, - 0x2e, 0x55, 0x28, 0x65, 0x29, 0x3b, 0x69, 0x66, 0x28, 0x76, 0x6f, 0x69, - 0x64, 0x20, 0x30, 0x21, 0x3d, 0x3d, 0x74, 0x29, 0x74, 0x2e, 0x6e, 0x3d, - 0x65, 0x2e, 0x6e, 0x3b, 0x69, 0x66, 0x28, 0x76, 0x6f, 0x69, 0x64, 0x20, - 0x30, 0x21, 0x3d, 0x3d, 0x65, 0x2e, 0x6e, 0x29, 0x65, 0x2e, 0x6e, 0x2e, - 0x70, 0x3d, 0x74, 0x7d, 0x65, 0x6c, 0x73, 0x65, 0x20, 0x6e, 0x3d, 0x65, - 0x3b, 0x65, 0x2e, 0x53, 0x2e, 0x6e, 0x3d, 0x65, 0x2e, 0x72, 0x3b, 0x69, - 0x66, 0x28, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x21, 0x3d, 0x3d, 0x65, - 0x2e, 0x72, 0x29, 0x65, 0x2e, 0x72, 0x3d, 0x76, 0x6f, 0x69, 0x64, 0x20, - 0x30, 0x3b, 0x65, 0x3d, 0x74, 0x7d, 0x74, 0x2e, 0x73, 0x3d, 0x6e, 0x7d, - 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x76, 0x28, 0x74, - 0x29, 0x7b, 0x63, 0x2e, 0x63, 0x61, 0x6c, 0x6c, 0x28, 0x74, 0x68, 0x69, - 0x73, 0x2c, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x29, 0x3b, 0x74, 0x68, - 0x69, 0x73, 0x2e, 0x78, 0x3d, 0x74, 0x3b, 0x74, 0x68, 0x69, 0x73, 0x2e, - 0x73, 0x3d, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x3b, 0x74, 0x68, 0x69, - 0x73, 0x2e, 0x67, 0x3d, 0x6c, 0x2d, 0x31, 0x3b, 0x74, 0x68, 0x69, 0x73, - 0x2e, 0x66, 0x3d, 0x34, 0x7d, 0x28, 0x76, 0x2e, 0x70, 0x72, 0x6f, 0x74, - 0x6f, 0x74, 0x79, 0x70, 0x65, 0x3d, 0x6e, 0x65, 0x77, 0x20, 0x63, 0x29, - 0x2e, 0x68, 0x3d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, - 0x29, 0x7b, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x66, 0x26, 0x3d, 0x2d, 0x33, - 0x3b, 0x69, 0x66, 0x28, 0x31, 0x26, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x66, - 0x29, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x21, 0x31, 0x3b, 0x69, 0x66, - 0x28, 0x33, 0x32, 0x3d, 0x3d, 0x28, 0x33, 0x36, 0x26, 0x74, 0x68, 0x69, - 0x73, 0x2e, 0x66, 0x29, 0x29, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x21, - 0x30, 0x3b, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x66, 0x26, 0x3d, 0x2d, 0x35, - 0x3b, 0x69, 0x66, 0x28, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x67, 0x3d, 0x3d, - 0x3d, 0x6c, 0x29, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x21, 0x30, 0x3b, - 0x74, 0x68, 0x69, 0x73, 0x2e, 0x67, 0x3d, 0x6c, 0x3b, 0x74, 0x68, 0x69, - 0x73, 0x2e, 0x66, 0x7c, 0x3d, 0x31, 0x3b, 0x69, 0x66, 0x28, 0x74, 0x68, - 0x69, 0x73, 0x2e, 0x69, 0x3e, 0x30, 0x26, 0x26, 0x21, 0x61, 0x28, 0x74, - 0x68, 0x69, 0x73, 0x29, 0x29, 0x7b, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x66, - 0x26, 0x3d, 0x2d, 0x32, 0x3b, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x21, - 0x30, 0x7d, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x74, 0x3d, 0x69, 0x3b, - 0x74, 0x72, 0x79, 0x7b, 0x70, 0x28, 0x74, 0x68, 0x69, 0x73, 0x29, 0x3b, - 0x69, 0x3d, 0x74, 0x68, 0x69, 0x73, 0x3b, 0x63, 0x6f, 0x6e, 0x73, 0x74, - 0x20, 0x74, 0x3d, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x78, 0x28, 0x29, 0x3b, - 0x69, 0x66, 0x28, 0x31, 0x36, 0x26, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x66, - 0x7c, 0x7c, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x76, 0x21, 0x3d, 0x3d, 0x74, - 0x7c, 0x7c, 0x30, 0x3d, 0x3d, 0x3d, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x69, - 0x29, 0x7b, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x76, 0x3d, 0x74, 0x3b, 0x74, - 0x68, 0x69, 0x73, 0x2e, 0x66, 0x26, 0x3d, 0x2d, 0x31, 0x37, 0x3b, 0x74, - 0x68, 0x69, 0x73, 0x2e, 0x69, 0x2b, 0x2b, 0x7d, 0x7d, 0x63, 0x61, 0x74, - 0x63, 0x68, 0x28, 0x74, 0x29, 0x7b, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x76, - 0x3d, 0x74, 0x3b, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x66, 0x7c, 0x3d, 0x31, - 0x36, 0x3b, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x69, 0x2b, 0x2b, 0x7d, 0x69, - 0x3d, 0x74, 0x3b, 0x64, 0x28, 0x74, 0x68, 0x69, 0x73, 0x29, 0x3b, 0x74, - 0x68, 0x69, 0x73, 0x2e, 0x66, 0x26, 0x3d, 0x2d, 0x32, 0x3b, 0x72, 0x65, - 0x74, 0x75, 0x72, 0x6e, 0x21, 0x30, 0x7d, 0x3b, 0x76, 0x2e, 0x70, 0x72, - 0x6f, 0x74, 0x6f, 0x74, 0x79, 0x70, 0x65, 0x2e, 0x53, 0x3d, 0x66, 0x75, - 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x74, 0x29, 0x7b, 0x69, 0x66, - 0x28, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x3d, 0x3d, 0x3d, 0x74, 0x68, - 0x69, 0x73, 0x2e, 0x74, 0x29, 0x7b, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x66, - 0x7c, 0x3d, 0x33, 0x36, 0x3b, 0x66, 0x6f, 0x72, 0x28, 0x6c, 0x65, 0x74, - 0x20, 0x74, 0x3d, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x73, 0x3b, 0x76, 0x6f, - 0x69, 0x64, 0x20, 0x30, 0x21, 0x3d, 0x3d, 0x74, 0x3b, 0x74, 0x3d, 0x74, - 0x2e, 0x6e, 0x29, 0x74, 0x2e, 0x53, 0x2e, 0x53, 0x28, 0x74, 0x29, 0x7d, - 0x63, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x74, 0x79, 0x70, 0x65, 0x2e, - 0x53, 0x2e, 0x63, 0x61, 0x6c, 0x6c, 0x28, 0x74, 0x68, 0x69, 0x73, 0x2c, - 0x74, 0x29, 0x7d, 0x3b, 0x76, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x74, + 0x6e, 0x21, 0x30, 0x7d, 0x3b, 0x68, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, + 0x74, 0x79, 0x70, 0x65, 0x2e, 0x53, 0x3d, 0x66, 0x75, 0x6e, 0x63, 0x74, + 0x69, 0x6f, 0x6e, 0x28, 0x74, 0x29, 0x7b, 0x69, 0x66, 0x28, 0x74, 0x68, + 0x69, 0x73, 0x2e, 0x74, 0x21, 0x3d, 0x3d, 0x74, 0x26, 0x26, 0x76, 0x6f, + 0x69, 0x64, 0x20, 0x30, 0x3d, 0x3d, 0x3d, 0x74, 0x2e, 0x65, 0x29, 0x7b, + 0x74, 0x2e, 0x78, 0x3d, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x74, 0x3b, 0x69, + 0x66, 0x28, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x21, 0x3d, 0x3d, 0x74, + 0x68, 0x69, 0x73, 0x2e, 0x74, 0x29, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x74, + 0x2e, 0x65, 0x3d, 0x74, 0x3b, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x74, 0x3d, + 0x74, 0x7d, 0x7d, 0x3b, 0x68, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x74, 0x79, 0x70, 0x65, 0x2e, 0x55, 0x3d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x74, 0x29, 0x7b, 0x69, 0x66, 0x28, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x21, 0x3d, 0x3d, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x74, - 0x29, 0x7b, 0x63, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x74, 0x79, 0x70, - 0x65, 0x2e, 0x55, 0x2e, 0x63, 0x61, 0x6c, 0x6c, 0x28, 0x74, 0x68, 0x69, - 0x73, 0x2c, 0x74, 0x29, 0x3b, 0x69, 0x66, 0x28, 0x76, 0x6f, 0x69, 0x64, - 0x20, 0x30, 0x3d, 0x3d, 0x3d, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x74, 0x29, - 0x7b, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x66, 0x26, 0x3d, 0x2d, 0x33, 0x33, - 0x3b, 0x66, 0x6f, 0x72, 0x28, 0x6c, 0x65, 0x74, 0x20, 0x74, 0x3d, 0x74, - 0x68, 0x69, 0x73, 0x2e, 0x73, 0x3b, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, - 0x21, 0x3d, 0x3d, 0x74, 0x3b, 0x74, 0x3d, 0x74, 0x2e, 0x6e, 0x29, 0x74, - 0x2e, 0x53, 0x2e, 0x55, 0x28, 0x74, 0x29, 0x7d, 0x7d, 0x7d, 0x3b, 0x76, - 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x74, 0x79, 0x70, 0x65, 0x2e, 0x4e, - 0x3d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x29, 0x7b, - 0x69, 0x66, 0x28, 0x21, 0x28, 0x32, 0x26, 0x74, 0x68, 0x69, 0x73, 0x2e, - 0x66, 0x29, 0x29, 0x7b, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x66, 0x7c, 0x3d, - 0x36, 0x3b, 0x66, 0x6f, 0x72, 0x28, 0x6c, 0x65, 0x74, 0x20, 0x74, 0x3d, - 0x74, 0x68, 0x69, 0x73, 0x2e, 0x74, 0x3b, 0x76, 0x6f, 0x69, 0x64, 0x20, - 0x30, 0x21, 0x3d, 0x3d, 0x74, 0x3b, 0x74, 0x3d, 0x74, 0x2e, 0x78, 0x29, - 0x74, 0x2e, 0x74, 0x2e, 0x4e, 0x28, 0x29, 0x7d, 0x7d, 0x3b, 0x76, 0x2e, - 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x74, 0x79, 0x70, 0x65, 0x2e, 0x70, 0x65, - 0x65, 0x6b, 0x3d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, - 0x29, 0x7b, 0x69, 0x66, 0x28, 0x21, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x68, - 0x28, 0x29, 0x29, 0x74, 0x28, 0x29, 0x3b, 0x69, 0x66, 0x28, 0x31, 0x36, - 0x26, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x66, 0x29, 0x74, 0x68, 0x72, 0x6f, - 0x77, 0x20, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x76, 0x3b, 0x72, 0x65, 0x74, - 0x75, 0x72, 0x6e, 0x20, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x76, 0x7d, 0x3b, - 0x4f, 0x62, 0x6a, 0x65, 0x63, 0x74, 0x2e, 0x64, 0x65, 0x66, 0x69, 0x6e, - 0x65, 0x50, 0x72, 0x6f, 0x70, 0x65, 0x72, 0x74, 0x79, 0x28, 0x76, 0x2e, - 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x74, 0x79, 0x70, 0x65, 0x2c, 0x22, 0x76, - 0x61, 0x6c, 0x75, 0x65, 0x22, 0x2c, 0x7b, 0x67, 0x65, 0x74, 0x28, 0x29, - 0x7b, 0x69, 0x66, 0x28, 0x31, 0x26, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x66, - 0x29, 0x74, 0x28, 0x29, 0x3b, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x6e, - 0x3d, 0x73, 0x28, 0x74, 0x68, 0x69, 0x73, 0x29, 0x3b, 0x74, 0x68, 0x69, - 0x73, 0x2e, 0x68, 0x28, 0x29, 0x3b, 0x69, 0x66, 0x28, 0x76, 0x6f, 0x69, - 0x64, 0x20, 0x30, 0x21, 0x3d, 0x3d, 0x6e, 0x29, 0x6e, 0x2e, 0x69, 0x3d, - 0x74, 0x68, 0x69, 0x73, 0x2e, 0x69, 0x3b, 0x69, 0x66, 0x28, 0x31, 0x36, - 0x26, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x66, 0x29, 0x74, 0x68, 0x72, 0x6f, - 0x77, 0x20, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x76, 0x3b, 0x72, 0x65, 0x74, - 0x75, 0x72, 0x6e, 0x20, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x76, 0x7d, 0x7d, - 0x29, 0x3b, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x79, - 0x28, 0x74, 0x29, 0x7b, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x6e, - 0x65, 0x77, 0x20, 0x76, 0x28, 0x74, 0x29, 0x7d, 0x66, 0x75, 0x6e, 0x63, - 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x6d, 0x28, 0x74, 0x29, 0x7b, 0x63, 0x6f, - 0x6e, 0x73, 0x74, 0x20, 0x65, 0x3d, 0x74, 0x2e, 0x75, 0x3b, 0x74, 0x2e, - 0x75, 0x3d, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x3b, 0x69, 0x66, 0x28, - 0x22, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x22, 0x3d, 0x3d, - 0x74, 0x79, 0x70, 0x65, 0x6f, 0x66, 0x20, 0x65, 0x29, 0x7b, 0x75, 0x2b, - 0x2b, 0x3b, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x5f, 0x3d, 0x69, 0x3b, - 0x69, 0x3d, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x3b, 0x74, 0x72, 0x79, - 0x7b, 0x65, 0x28, 0x29, 0x7d, 0x63, 0x61, 0x74, 0x63, 0x68, 0x28, 0x6e, - 0x29, 0x7b, 0x74, 0x2e, 0x66, 0x26, 0x3d, 0x2d, 0x32, 0x3b, 0x74, 0x2e, - 0x66, 0x7c, 0x3d, 0x38, 0x3b, 0x67, 0x28, 0x74, 0x29, 0x3b, 0x74, 0x68, - 0x72, 0x6f, 0x77, 0x20, 0x6e, 0x7d, 0x66, 0x69, 0x6e, 0x61, 0x6c, 0x6c, - 0x79, 0x7b, 0x69, 0x3d, 0x5f, 0x3b, 0x6e, 0x28, 0x29, 0x7d, 0x7d, 0x7d, - 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x67, 0x28, 0x74, - 0x29, 0x7b, 0x66, 0x6f, 0x72, 0x28, 0x6c, 0x65, 0x74, 0x20, 0x6e, 0x3d, - 0x74, 0x2e, 0x73, 0x3b, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x21, 0x3d, - 0x3d, 0x6e, 0x3b, 0x6e, 0x3d, 0x6e, 0x2e, 0x6e, 0x29, 0x6e, 0x2e, 0x53, - 0x2e, 0x55, 0x28, 0x6e, 0x29, 0x3b, 0x74, 0x2e, 0x78, 0x3d, 0x76, 0x6f, - 0x69, 0x64, 0x20, 0x30, 0x3b, 0x74, 0x2e, 0x73, 0x3d, 0x76, 0x6f, 0x69, - 0x64, 0x20, 0x30, 0x3b, 0x6d, 0x28, 0x74, 0x29, 0x7d, 0x66, 0x75, 0x6e, - 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x62, 0x28, 0x74, 0x29, 0x7b, 0x69, - 0x66, 0x28, 0x69, 0x21, 0x3d, 0x3d, 0x74, 0x68, 0x69, 0x73, 0x29, 0x74, + 0x29, 0x7b, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x6e, 0x3d, 0x74, 0x2e, + 0x65, 0x2c, 0x65, 0x3d, 0x74, 0x2e, 0x78, 0x3b, 0x69, 0x66, 0x28, 0x76, + 0x6f, 0x69, 0x64, 0x20, 0x30, 0x21, 0x3d, 0x3d, 0x6e, 0x29, 0x7b, 0x6e, + 0x2e, 0x78, 0x3d, 0x65, 0x3b, 0x74, 0x2e, 0x65, 0x3d, 0x76, 0x6f, 0x69, + 0x64, 0x20, 0x30, 0x7d, 0x69, 0x66, 0x28, 0x76, 0x6f, 0x69, 0x64, 0x20, + 0x30, 0x21, 0x3d, 0x3d, 0x65, 0x29, 0x7b, 0x65, 0x2e, 0x65, 0x3d, 0x6e, + 0x3b, 0x74, 0x2e, 0x78, 0x3d, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x7d, + 0x69, 0x66, 0x28, 0x74, 0x3d, 0x3d, 0x3d, 0x74, 0x68, 0x69, 0x73, 0x2e, + 0x74, 0x29, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x74, 0x3d, 0x65, 0x7d, 0x7d, + 0x3b, 0x68, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x74, 0x79, 0x70, 0x65, + 0x2e, 0x73, 0x75, 0x62, 0x73, 0x63, 0x72, 0x69, 0x62, 0x65, 0x3d, 0x66, + 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x74, 0x29, 0x7b, 0x63, + 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x6e, 0x3d, 0x74, 0x68, 0x69, 0x73, 0x3b, + 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x77, 0x28, 0x28, 0x66, 0x75, + 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x29, 0x7b, 0x63, 0x6f, 0x6e, + 0x73, 0x74, 0x20, 0x65, 0x3d, 0x6e, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, + 0x2c, 0x5f, 0x3d, 0x33, 0x32, 0x26, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x66, + 0x3b, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x66, 0x26, 0x3d, 0x2d, 0x33, 0x33, + 0x3b, 0x74, 0x72, 0x79, 0x7b, 0x74, 0x28, 0x65, 0x29, 0x7d, 0x66, 0x69, + 0x6e, 0x61, 0x6c, 0x6c, 0x79, 0x7b, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x66, + 0x7c, 0x3d, 0x5f, 0x7d, 0x7d, 0x29, 0x29, 0x7d, 0x3b, 0x68, 0x2e, 0x70, + 0x72, 0x6f, 0x74, 0x6f, 0x74, 0x79, 0x70, 0x65, 0x2e, 0x76, 0x61, 0x6c, + 0x75, 0x65, 0x4f, 0x66, 0x3d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, + 0x6e, 0x28, 0x29, 0x7b, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x74, + 0x68, 0x69, 0x73, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x7d, 0x3b, 0x68, + 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x74, 0x79, 0x70, 0x65, 0x2e, 0x74, + 0x6f, 0x53, 0x74, 0x72, 0x69, 0x6e, 0x67, 0x3d, 0x66, 0x75, 0x6e, 0x63, + 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x29, 0x7b, 0x72, 0x65, 0x74, 0x75, 0x72, + 0x6e, 0x20, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, + 0x2b, 0x22, 0x22, 0x7d, 0x3b, 0x68, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, + 0x74, 0x79, 0x70, 0x65, 0x2e, 0x74, 0x6f, 0x4a, 0x53, 0x4f, 0x4e, 0x3d, + 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x29, 0x7b, 0x72, + 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x76, + 0x61, 0x6c, 0x75, 0x65, 0x7d, 0x3b, 0x68, 0x2e, 0x70, 0x72, 0x6f, 0x74, + 0x6f, 0x74, 0x79, 0x70, 0x65, 0x2e, 0x70, 0x65, 0x65, 0x6b, 0x3d, 0x66, + 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x29, 0x7b, 0x72, 0x65, + 0x74, 0x75, 0x72, 0x6e, 0x20, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x76, 0x7d, + 0x3b, 0x4f, 0x62, 0x6a, 0x65, 0x63, 0x74, 0x2e, 0x64, 0x65, 0x66, 0x69, + 0x6e, 0x65, 0x50, 0x72, 0x6f, 0x70, 0x65, 0x72, 0x74, 0x79, 0x28, 0x68, + 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x74, 0x79, 0x70, 0x65, 0x2c, 0x22, + 0x76, 0x61, 0x6c, 0x75, 0x65, 0x22, 0x2c, 0x7b, 0x67, 0x65, 0x74, 0x28, + 0x29, 0x7b, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x74, 0x3d, 0x63, 0x28, + 0x74, 0x68, 0x69, 0x73, 0x29, 0x3b, 0x69, 0x66, 0x28, 0x76, 0x6f, 0x69, + 0x64, 0x20, 0x30, 0x21, 0x3d, 0x3d, 0x74, 0x29, 0x74, 0x2e, 0x69, 0x3d, + 0x74, 0x68, 0x69, 0x73, 0x2e, 0x69, 0x3b, 0x72, 0x65, 0x74, 0x75, 0x72, + 0x6e, 0x20, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x76, 0x7d, 0x2c, 0x73, 0x65, + 0x74, 0x28, 0x6e, 0x29, 0x7b, 0x69, 0x66, 0x28, 0x69, 0x20, 0x69, 0x6e, + 0x73, 0x74, 0x61, 0x6e, 0x63, 0x65, 0x6f, 0x66, 0x20, 0x79, 0x29, 0x21, + 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x29, 0x7b, 0x74, 0x68, 0x72, 0x6f, 0x77, 0x20, 0x6e, 0x65, 0x77, 0x20, 0x45, 0x72, 0x72, - 0x6f, 0x72, 0x28, 0x22, 0x4f, 0x75, 0x74, 0x2d, 0x6f, 0x66, 0x2d, 0x6f, - 0x72, 0x64, 0x65, 0x72, 0x20, 0x65, 0x66, 0x66, 0x65, 0x63, 0x74, 0x22, - 0x29, 0x3b, 0x64, 0x28, 0x74, 0x68, 0x69, 0x73, 0x29, 0x3b, 0x69, 0x3d, - 0x74, 0x3b, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x66, 0x26, 0x3d, 0x2d, 0x32, - 0x3b, 0x69, 0x66, 0x28, 0x38, 0x26, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x66, - 0x29, 0x67, 0x28, 0x74, 0x68, 0x69, 0x73, 0x29, 0x3b, 0x6e, 0x28, 0x29, - 0x7d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x6b, 0x28, - 0x74, 0x29, 0x7b, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x78, 0x3d, 0x74, 0x3b, - 0x74, 0x68, 0x69, 0x73, 0x2e, 0x75, 0x3d, 0x76, 0x6f, 0x69, 0x64, 0x20, - 0x30, 0x3b, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x73, 0x3d, 0x76, 0x6f, 0x69, - 0x64, 0x20, 0x30, 0x3b, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x6f, 0x3d, 0x76, - 0x6f, 0x69, 0x64, 0x20, 0x30, 0x3b, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x66, - 0x3d, 0x33, 0x32, 0x7d, 0x6b, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x74, - 0x79, 0x70, 0x65, 0x2e, 0x63, 0x3d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, - 0x6f, 0x6e, 0x28, 0x29, 0x7b, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x74, - 0x3d, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x53, 0x28, 0x29, 0x3b, 0x74, 0x72, - 0x79, 0x7b, 0x69, 0x66, 0x28, 0x38, 0x26, 0x74, 0x68, 0x69, 0x73, 0x2e, - 0x66, 0x29, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x3b, 0x69, 0x66, 0x28, - 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x3d, 0x3d, 0x3d, 0x74, 0x68, 0x69, - 0x73, 0x2e, 0x78, 0x29, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x3b, 0x63, - 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x6e, 0x3d, 0x74, 0x68, 0x69, 0x73, 0x2e, - 0x78, 0x28, 0x29, 0x3b, 0x69, 0x66, 0x28, 0x22, 0x66, 0x75, 0x6e, 0x63, - 0x74, 0x69, 0x6f, 0x6e, 0x22, 0x3d, 0x3d, 0x74, 0x79, 0x70, 0x65, 0x6f, - 0x66, 0x20, 0x6e, 0x29, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x75, 0x3d, 0x6e, - 0x7d, 0x66, 0x69, 0x6e, 0x61, 0x6c, 0x6c, 0x79, 0x7b, 0x74, 0x28, 0x29, - 0x7d, 0x7d, 0x3b, 0x6b, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x74, 0x79, - 0x70, 0x65, 0x2e, 0x53, 0x3d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, - 0x6e, 0x28, 0x29, 0x7b, 0x69, 0x66, 0x28, 0x31, 0x26, 0x74, 0x68, 0x69, - 0x73, 0x2e, 0x66, 0x29, 0x74, 0x28, 0x29, 0x3b, 0x74, 0x68, 0x69, 0x73, - 0x2e, 0x66, 0x7c, 0x3d, 0x31, 0x3b, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x66, - 0x26, 0x3d, 0x2d, 0x39, 0x3b, 0x6d, 0x28, 0x74, 0x68, 0x69, 0x73, 0x29, - 0x3b, 0x70, 0x28, 0x74, 0x68, 0x69, 0x73, 0x29, 0x3b, 0x75, 0x2b, 0x2b, - 0x3b, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x6e, 0x3d, 0x69, 0x3b, 0x69, - 0x3d, 0x74, 0x68, 0x69, 0x73, 0x3b, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, - 0x20, 0x62, 0x2e, 0x62, 0x69, 0x6e, 0x64, 0x28, 0x74, 0x68, 0x69, 0x73, - 0x2c, 0x6e, 0x29, 0x7d, 0x3b, 0x6b, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, - 0x74, 0x79, 0x70, 0x65, 0x2e, 0x4e, 0x3d, 0x66, 0x75, 0x6e, 0x63, 0x74, - 0x69, 0x6f, 0x6e, 0x28, 0x29, 0x7b, 0x69, 0x66, 0x28, 0x21, 0x28, 0x32, - 0x26, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x66, 0x29, 0x29, 0x7b, 0x74, 0x68, - 0x69, 0x73, 0x2e, 0x66, 0x7c, 0x3d, 0x32, 0x3b, 0x74, 0x68, 0x69, 0x73, - 0x2e, 0x6f, 0x3d, 0x5f, 0x3b, 0x5f, 0x3d, 0x74, 0x68, 0x69, 0x73, 0x7d, - 0x7d, 0x3b, 0x6b, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x74, 0x79, 0x70, - 0x65, 0x2e, 0x64, 0x3d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, - 0x28, 0x29, 0x7b, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x66, 0x7c, 0x3d, 0x38, - 0x3b, 0x69, 0x66, 0x28, 0x21, 0x28, 0x31, 0x26, 0x74, 0x68, 0x69, 0x73, - 0x2e, 0x66, 0x29, 0x29, 0x67, 0x28, 0x74, 0x68, 0x69, 0x73, 0x29, 0x7d, - 0x3b, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x53, 0x28, - 0x74, 0x29, 0x7b, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x6e, 0x3d, 0x6e, - 0x65, 0x77, 0x20, 0x6b, 0x28, 0x74, 0x29, 0x3b, 0x74, 0x72, 0x79, 0x7b, - 0x6e, 0x2e, 0x63, 0x28, 0x29, 0x7d, 0x63, 0x61, 0x74, 0x63, 0x68, 0x28, - 0x74, 0x29, 0x7b, 0x6e, 0x2e, 0x64, 0x28, 0x29, 0x3b, 0x74, 0x68, 0x72, - 0x6f, 0x77, 0x20, 0x74, 0x7d, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, - 0x6e, 0x2e, 0x64, 0x2e, 0x62, 0x69, 0x6e, 0x64, 0x28, 0x6e, 0x29, 0x7d, - 0x76, 0x61, 0x72, 0x20, 0x78, 0x2c, 0x77, 0x2c, 0x43, 0x2c, 0x45, 0x2c, - 0x55, 0x2c, 0x48, 0x2c, 0x4e, 0x2c, 0x50, 0x2c, 0x24, 0x2c, 0x44, 0x3d, - 0x7b, 0x7d, 0x2c, 0x54, 0x3d, 0x5b, 0x5d, 0x2c, 0x56, 0x3d, 0x2f, 0x61, - 0x63, 0x69, 0x74, 0x7c, 0x65, 0x78, 0x28, 0x3f, 0x3a, 0x73, 0x7c, 0x67, - 0x7c, 0x6e, 0x7c, 0x70, 0x7c, 0x24, 0x29, 0x7c, 0x72, 0x70, 0x68, 0x7c, - 0x67, 0x72, 0x69, 0x64, 0x7c, 0x6f, 0x77, 0x73, 0x7c, 0x6d, 0x6e, 0x63, - 0x7c, 0x6e, 0x74, 0x77, 0x7c, 0x69, 0x6e, 0x65, 0x5b, 0x63, 0x68, 0x5d, - 0x7c, 0x7a, 0x6f, 0x6f, 0x7c, 0x5e, 0x6f, 0x72, 0x64, 0x7c, 0x69, 0x74, - 0x65, 0x72, 0x61, 0x2f, 0x69, 0x2c, 0x41, 0x3d, 0x41, 0x72, 0x72, 0x61, - 0x79, 0x2e, 0x69, 0x73, 0x41, 0x72, 0x72, 0x61, 0x79, 0x3b, 0x66, 0x75, - 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x46, 0x28, 0x74, 0x2c, 0x6e, - 0x29, 0x7b, 0x66, 0x6f, 0x72, 0x28, 0x76, 0x61, 0x72, 0x20, 0x65, 0x20, - 0x69, 0x6e, 0x20, 0x6e, 0x29, 0x74, 0x5b, 0x65, 0x5d, 0x3d, 0x6e, 0x5b, - 0x65, 0x5d, 0x3b, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x74, 0x7d, - 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x4d, 0x28, 0x74, - 0x29, 0x7b, 0x76, 0x61, 0x72, 0x20, 0x6e, 0x3d, 0x74, 0x2e, 0x70, 0x61, - 0x72, 0x65, 0x6e, 0x74, 0x4e, 0x6f, 0x64, 0x65, 0x3b, 0x6e, 0x26, 0x26, - 0x6e, 0x2e, 0x72, 0x65, 0x6d, 0x6f, 0x76, 0x65, 0x43, 0x68, 0x69, 0x6c, - 0x64, 0x28, 0x74, 0x29, 0x7d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, - 0x6e, 0x20, 0x57, 0x28, 0x74, 0x2c, 0x6e, 0x2c, 0x65, 0x29, 0x7b, 0x76, - 0x61, 0x72, 0x20, 0x69, 0x2c, 0x5f, 0x2c, 0x6f, 0x2c, 0x72, 0x3d, 0x7b, - 0x7d, 0x3b, 0x66, 0x6f, 0x72, 0x28, 0x6f, 0x20, 0x69, 0x6e, 0x20, 0x6e, - 0x29, 0x22, 0x6b, 0x65, 0x79, 0x22, 0x3d, 0x3d, 0x6f, 0x3f, 0x69, 0x3d, - 0x6e, 0x5b, 0x6f, 0x5d, 0x3a, 0x22, 0x72, 0x65, 0x66, 0x22, 0x3d, 0x3d, - 0x6f, 0x3f, 0x5f, 0x3d, 0x6e, 0x5b, 0x6f, 0x5d, 0x3a, 0x72, 0x5b, 0x6f, - 0x5d, 0x3d, 0x6e, 0x5b, 0x6f, 0x5d, 0x3b, 0x69, 0x66, 0x28, 0x61, 0x72, - 0x67, 0x75, 0x6d, 0x65, 0x6e, 0x74, 0x73, 0x2e, 0x6c, 0x65, 0x6e, 0x67, - 0x74, 0x68, 0x3e, 0x32, 0x26, 0x26, 0x28, 0x72, 0x2e, 0x63, 0x68, 0x69, - 0x6c, 0x64, 0x72, 0x65, 0x6e, 0x3d, 0x61, 0x72, 0x67, 0x75, 0x6d, 0x65, - 0x6e, 0x74, 0x73, 0x2e, 0x6c, 0x65, 0x6e, 0x67, 0x74, 0x68, 0x3e, 0x33, - 0x3f, 0x78, 0x2e, 0x63, 0x61, 0x6c, 0x6c, 0x28, 0x61, 0x72, 0x67, 0x75, - 0x6d, 0x65, 0x6e, 0x74, 0x73, 0x2c, 0x32, 0x29, 0x3a, 0x65, 0x29, 0x2c, - 0x22, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x22, 0x3d, 0x3d, - 0x74, 0x79, 0x70, 0x65, 0x6f, 0x66, 0x20, 0x74, 0x26, 0x26, 0x6e, 0x75, - 0x6c, 0x6c, 0x21, 0x3d, 0x74, 0x2e, 0x64, 0x65, 0x66, 0x61, 0x75, 0x6c, - 0x74, 0x50, 0x72, 0x6f, 0x70, 0x73, 0x29, 0x66, 0x6f, 0x72, 0x28, 0x6f, - 0x20, 0x69, 0x6e, 0x20, 0x74, 0x2e, 0x64, 0x65, 0x66, 0x61, 0x75, 0x6c, - 0x74, 0x50, 0x72, 0x6f, 0x70, 0x73, 0x29, 0x76, 0x6f, 0x69, 0x64, 0x20, - 0x30, 0x3d, 0x3d, 0x3d, 0x72, 0x5b, 0x6f, 0x5d, 0x26, 0x26, 0x28, 0x72, - 0x5b, 0x6f, 0x5d, 0x3d, 0x74, 0x2e, 0x64, 0x65, 0x66, 0x61, 0x75, 0x6c, - 0x74, 0x50, 0x72, 0x6f, 0x70, 0x73, 0x5b, 0x6f, 0x5d, 0x29, 0x3b, 0x72, - 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x4f, 0x28, 0x74, 0x2c, 0x72, 0x2c, - 0x69, 0x2c, 0x5f, 0x2c, 0x6e, 0x75, 0x6c, 0x6c, 0x29, 0x7d, 0x66, 0x75, - 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x4f, 0x28, 0x74, 0x2c, 0x6e, - 0x2c, 0x65, 0x2c, 0x69, 0x2c, 0x5f, 0x29, 0x7b, 0x76, 0x61, 0x72, 0x20, - 0x6f, 0x3d, 0x7b, 0x74, 0x79, 0x70, 0x65, 0x3a, 0x74, 0x2c, 0x70, 0x72, - 0x6f, 0x70, 0x73, 0x3a, 0x6e, 0x2c, 0x6b, 0x65, 0x79, 0x3a, 0x65, 0x2c, - 0x72, 0x65, 0x66, 0x3a, 0x69, 0x2c, 0x5f, 0x5f, 0x6b, 0x3a, 0x6e, 0x75, - 0x6c, 0x6c, 0x2c, 0x5f, 0x5f, 0x3a, 0x6e, 0x75, 0x6c, 0x6c, 0x2c, 0x5f, - 0x5f, 0x62, 0x3a, 0x30, 0x2c, 0x5f, 0x5f, 0x65, 0x3a, 0x6e, 0x75, 0x6c, - 0x6c, 0x2c, 0x5f, 0x5f, 0x64, 0x3a, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, - 0x2c, 0x5f, 0x5f, 0x63, 0x3a, 0x6e, 0x75, 0x6c, 0x6c, 0x2c, 0x5f, 0x5f, - 0x68, 0x3a, 0x6e, 0x75, 0x6c, 0x6c, 0x2c, 0x63, 0x6f, 0x6e, 0x73, 0x74, + 0x6f, 0x72, 0x28, 0x22, 0x43, 0x6f, 0x6d, 0x70, 0x75, 0x74, 0x65, 0x64, + 0x20, 0x63, 0x61, 0x6e, 0x6e, 0x6f, 0x74, 0x20, 0x68, 0x61, 0x76, 0x65, + 0x20, 0x73, 0x69, 0x64, 0x65, 0x2d, 0x65, 0x66, 0x66, 0x65, 0x63, 0x74, + 0x73, 0x22, 0x29, 0x7d, 0x28, 0x29, 0x3b, 0x69, 0x66, 0x28, 0x6e, 0x21, + 0x3d, 0x3d, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x76, 0x29, 0x7b, 0x69, 0x66, + 0x28, 0x73, 0x3e, 0x31, 0x30, 0x30, 0x29, 0x74, 0x28, 0x29, 0x3b, 0x74, + 0x68, 0x69, 0x73, 0x2e, 0x76, 0x3d, 0x6e, 0x3b, 0x74, 0x68, 0x69, 0x73, + 0x2e, 0x69, 0x2b, 0x2b, 0x3b, 0x6c, 0x2b, 0x2b, 0x3b, 0x66, 0x2b, 0x2b, + 0x3b, 0x74, 0x72, 0x79, 0x7b, 0x66, 0x6f, 0x72, 0x28, 0x6c, 0x65, 0x74, + 0x20, 0x74, 0x3d, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x74, 0x3b, 0x76, 0x6f, + 0x69, 0x64, 0x20, 0x30, 0x21, 0x3d, 0x3d, 0x74, 0x3b, 0x74, 0x3d, 0x74, + 0x2e, 0x78, 0x29, 0x74, 0x2e, 0x74, 0x2e, 0x4e, 0x28, 0x29, 0x7d, 0x66, + 0x69, 0x6e, 0x61, 0x6c, 0x6c, 0x79, 0x7b, 0x65, 0x28, 0x29, 0x7d, 0x7d, + 0x7d, 0x7d, 0x29, 0x3b, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, + 0x20, 0x61, 0x28, 0x74, 0x29, 0x7b, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, + 0x20, 0x6e, 0x65, 0x77, 0x20, 0x68, 0x28, 0x74, 0x29, 0x7d, 0x66, 0x75, + 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x70, 0x28, 0x74, 0x29, 0x7b, + 0x66, 0x6f, 0x72, 0x28, 0x6c, 0x65, 0x74, 0x20, 0x6e, 0x3d, 0x74, 0x2e, + 0x73, 0x3b, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x21, 0x3d, 0x3d, 0x6e, + 0x3b, 0x6e, 0x3d, 0x6e, 0x2e, 0x6e, 0x29, 0x69, 0x66, 0x28, 0x6e, 0x2e, + 0x53, 0x2e, 0x69, 0x21, 0x3d, 0x3d, 0x6e, 0x2e, 0x69, 0x7c, 0x7c, 0x21, + 0x6e, 0x2e, 0x53, 0x2e, 0x68, 0x28, 0x29, 0x7c, 0x7c, 0x6e, 0x2e, 0x53, + 0x2e, 0x69, 0x21, 0x3d, 0x3d, 0x6e, 0x2e, 0x69, 0x29, 0x72, 0x65, 0x74, + 0x75, 0x72, 0x6e, 0x21, 0x30, 0x3b, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, + 0x21, 0x31, 0x7d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, + 0x64, 0x28, 0x74, 0x29, 0x7b, 0x66, 0x6f, 0x72, 0x28, 0x6c, 0x65, 0x74, + 0x20, 0x6e, 0x3d, 0x74, 0x2e, 0x73, 0x3b, 0x76, 0x6f, 0x69, 0x64, 0x20, + 0x30, 0x21, 0x3d, 0x3d, 0x6e, 0x3b, 0x6e, 0x3d, 0x6e, 0x2e, 0x6e, 0x29, + 0x7b, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x65, 0x3d, 0x6e, 0x2e, 0x53, + 0x2e, 0x6e, 0x3b, 0x69, 0x66, 0x28, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, + 0x21, 0x3d, 0x3d, 0x65, 0x29, 0x6e, 0x2e, 0x72, 0x3d, 0x65, 0x3b, 0x6e, + 0x2e, 0x53, 0x2e, 0x6e, 0x3d, 0x6e, 0x3b, 0x6e, 0x2e, 0x69, 0x3d, 0x2d, + 0x31, 0x3b, 0x69, 0x66, 0x28, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x3d, + 0x3d, 0x3d, 0x6e, 0x2e, 0x6e, 0x29, 0x7b, 0x74, 0x2e, 0x73, 0x3d, 0x6e, + 0x3b, 0x62, 0x72, 0x65, 0x61, 0x6b, 0x7d, 0x7d, 0x7d, 0x66, 0x75, 0x6e, + 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x76, 0x28, 0x74, 0x29, 0x7b, 0x6c, + 0x65, 0x74, 0x20, 0x6e, 0x2c, 0x65, 0x3d, 0x74, 0x2e, 0x73, 0x3b, 0x77, + 0x68, 0x69, 0x6c, 0x65, 0x28, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x21, + 0x3d, 0x3d, 0x65, 0x29, 0x7b, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x74, + 0x3d, 0x65, 0x2e, 0x70, 0x3b, 0x69, 0x66, 0x28, 0x2d, 0x31, 0x3d, 0x3d, + 0x3d, 0x65, 0x2e, 0x69, 0x29, 0x7b, 0x65, 0x2e, 0x53, 0x2e, 0x55, 0x28, + 0x65, 0x29, 0x3b, 0x69, 0x66, 0x28, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, + 0x21, 0x3d, 0x3d, 0x74, 0x29, 0x74, 0x2e, 0x6e, 0x3d, 0x65, 0x2e, 0x6e, + 0x3b, 0x69, 0x66, 0x28, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x21, 0x3d, + 0x3d, 0x65, 0x2e, 0x6e, 0x29, 0x65, 0x2e, 0x6e, 0x2e, 0x70, 0x3d, 0x74, + 0x7d, 0x65, 0x6c, 0x73, 0x65, 0x20, 0x6e, 0x3d, 0x65, 0x3b, 0x65, 0x2e, + 0x53, 0x2e, 0x6e, 0x3d, 0x65, 0x2e, 0x72, 0x3b, 0x69, 0x66, 0x28, 0x76, + 0x6f, 0x69, 0x64, 0x20, 0x30, 0x21, 0x3d, 0x3d, 0x65, 0x2e, 0x72, 0x29, + 0x65, 0x2e, 0x72, 0x3d, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x3b, 0x65, + 0x3d, 0x74, 0x7d, 0x74, 0x2e, 0x73, 0x3d, 0x6e, 0x7d, 0x66, 0x75, 0x6e, + 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x79, 0x28, 0x74, 0x29, 0x7b, 0x68, + 0x2e, 0x63, 0x61, 0x6c, 0x6c, 0x28, 0x74, 0x68, 0x69, 0x73, 0x2c, 0x76, + 0x6f, 0x69, 0x64, 0x20, 0x30, 0x29, 0x3b, 0x74, 0x68, 0x69, 0x73, 0x2e, + 0x78, 0x3d, 0x74, 0x3b, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x73, 0x3d, 0x76, + 0x6f, 0x69, 0x64, 0x20, 0x30, 0x3b, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x67, + 0x3d, 0x6c, 0x2d, 0x31, 0x3b, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x66, 0x3d, + 0x34, 0x7d, 0x28, 0x79, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x74, 0x79, + 0x70, 0x65, 0x3d, 0x6e, 0x65, 0x77, 0x20, 0x68, 0x29, 0x2e, 0x68, 0x3d, + 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x29, 0x7b, 0x74, + 0x68, 0x69, 0x73, 0x2e, 0x66, 0x26, 0x3d, 0x2d, 0x33, 0x3b, 0x69, 0x66, + 0x28, 0x31, 0x26, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x66, 0x29, 0x72, 0x65, + 0x74, 0x75, 0x72, 0x6e, 0x21, 0x31, 0x3b, 0x69, 0x66, 0x28, 0x33, 0x32, + 0x3d, 0x3d, 0x28, 0x33, 0x36, 0x26, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x66, + 0x29, 0x29, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x21, 0x30, 0x3b, 0x74, + 0x68, 0x69, 0x73, 0x2e, 0x66, 0x26, 0x3d, 0x2d, 0x35, 0x3b, 0x69, 0x66, + 0x28, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x67, 0x3d, 0x3d, 0x3d, 0x6c, 0x29, + 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x21, 0x30, 0x3b, 0x74, 0x68, 0x69, + 0x73, 0x2e, 0x67, 0x3d, 0x6c, 0x3b, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x66, + 0x7c, 0x3d, 0x31, 0x3b, 0x69, 0x66, 0x28, 0x74, 0x68, 0x69, 0x73, 0x2e, + 0x69, 0x3e, 0x30, 0x26, 0x26, 0x21, 0x70, 0x28, 0x74, 0x68, 0x69, 0x73, + 0x29, 0x29, 0x7b, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x66, 0x26, 0x3d, 0x2d, + 0x32, 0x3b, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x21, 0x30, 0x7d, 0x63, + 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x74, 0x3d, 0x69, 0x3b, 0x74, 0x72, 0x79, + 0x7b, 0x64, 0x28, 0x74, 0x68, 0x69, 0x73, 0x29, 0x3b, 0x69, 0x3d, 0x74, + 0x68, 0x69, 0x73, 0x3b, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x74, 0x3d, + 0x74, 0x68, 0x69, 0x73, 0x2e, 0x78, 0x28, 0x29, 0x3b, 0x69, 0x66, 0x28, + 0x31, 0x36, 0x26, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x66, 0x7c, 0x7c, 0x74, + 0x68, 0x69, 0x73, 0x2e, 0x76, 0x21, 0x3d, 0x3d, 0x74, 0x7c, 0x7c, 0x30, + 0x3d, 0x3d, 0x3d, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x69, 0x29, 0x7b, 0x74, + 0x68, 0x69, 0x73, 0x2e, 0x76, 0x3d, 0x74, 0x3b, 0x74, 0x68, 0x69, 0x73, + 0x2e, 0x66, 0x26, 0x3d, 0x2d, 0x31, 0x37, 0x3b, 0x74, 0x68, 0x69, 0x73, + 0x2e, 0x69, 0x2b, 0x2b, 0x7d, 0x7d, 0x63, 0x61, 0x74, 0x63, 0x68, 0x28, + 0x74, 0x29, 0x7b, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x76, 0x3d, 0x74, 0x3b, + 0x74, 0x68, 0x69, 0x73, 0x2e, 0x66, 0x7c, 0x3d, 0x31, 0x36, 0x3b, 0x74, + 0x68, 0x69, 0x73, 0x2e, 0x69, 0x2b, 0x2b, 0x7d, 0x69, 0x3d, 0x74, 0x3b, + 0x76, 0x28, 0x74, 0x68, 0x69, 0x73, 0x29, 0x3b, 0x74, 0x68, 0x69, 0x73, + 0x2e, 0x66, 0x26, 0x3d, 0x2d, 0x32, 0x3b, 0x72, 0x65, 0x74, 0x75, 0x72, + 0x6e, 0x21, 0x30, 0x7d, 0x3b, 0x79, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, + 0x74, 0x79, 0x70, 0x65, 0x2e, 0x53, 0x3d, 0x66, 0x75, 0x6e, 0x63, 0x74, + 0x69, 0x6f, 0x6e, 0x28, 0x74, 0x29, 0x7b, 0x69, 0x66, 0x28, 0x76, 0x6f, + 0x69, 0x64, 0x20, 0x30, 0x3d, 0x3d, 0x3d, 0x74, 0x68, 0x69, 0x73, 0x2e, + 0x74, 0x29, 0x7b, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x66, 0x7c, 0x3d, 0x33, + 0x36, 0x3b, 0x66, 0x6f, 0x72, 0x28, 0x6c, 0x65, 0x74, 0x20, 0x74, 0x3d, + 0x74, 0x68, 0x69, 0x73, 0x2e, 0x73, 0x3b, 0x76, 0x6f, 0x69, 0x64, 0x20, + 0x30, 0x21, 0x3d, 0x3d, 0x74, 0x3b, 0x74, 0x3d, 0x74, 0x2e, 0x6e, 0x29, + 0x74, 0x2e, 0x53, 0x2e, 0x53, 0x28, 0x74, 0x29, 0x7d, 0x68, 0x2e, 0x70, + 0x72, 0x6f, 0x74, 0x6f, 0x74, 0x79, 0x70, 0x65, 0x2e, 0x53, 0x2e, 0x63, + 0x61, 0x6c, 0x6c, 0x28, 0x74, 0x68, 0x69, 0x73, 0x2c, 0x74, 0x29, 0x7d, + 0x3b, 0x79, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x74, 0x79, 0x70, 0x65, + 0x2e, 0x55, 0x3d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, + 0x74, 0x29, 0x7b, 0x69, 0x66, 0x28, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, + 0x21, 0x3d, 0x3d, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x74, 0x29, 0x7b, 0x68, + 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x74, 0x79, 0x70, 0x65, 0x2e, 0x55, + 0x2e, 0x63, 0x61, 0x6c, 0x6c, 0x28, 0x74, 0x68, 0x69, 0x73, 0x2c, 0x74, + 0x29, 0x3b, 0x69, 0x66, 0x28, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x3d, + 0x3d, 0x3d, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x74, 0x29, 0x7b, 0x74, 0x68, + 0x69, 0x73, 0x2e, 0x66, 0x26, 0x3d, 0x2d, 0x33, 0x33, 0x3b, 0x66, 0x6f, + 0x72, 0x28, 0x6c, 0x65, 0x74, 0x20, 0x74, 0x3d, 0x74, 0x68, 0x69, 0x73, + 0x2e, 0x73, 0x3b, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x21, 0x3d, 0x3d, + 0x74, 0x3b, 0x74, 0x3d, 0x74, 0x2e, 0x6e, 0x29, 0x74, 0x2e, 0x53, 0x2e, + 0x55, 0x28, 0x74, 0x29, 0x7d, 0x7d, 0x7d, 0x3b, 0x79, 0x2e, 0x70, 0x72, + 0x6f, 0x74, 0x6f, 0x74, 0x79, 0x70, 0x65, 0x2e, 0x4e, 0x3d, 0x66, 0x75, + 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x29, 0x7b, 0x69, 0x66, 0x28, + 0x21, 0x28, 0x32, 0x26, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x66, 0x29, 0x29, + 0x7b, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x66, 0x7c, 0x3d, 0x36, 0x3b, 0x66, + 0x6f, 0x72, 0x28, 0x6c, 0x65, 0x74, 0x20, 0x74, 0x3d, 0x74, 0x68, 0x69, + 0x73, 0x2e, 0x74, 0x3b, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x21, 0x3d, + 0x3d, 0x74, 0x3b, 0x74, 0x3d, 0x74, 0x2e, 0x78, 0x29, 0x74, 0x2e, 0x74, + 0x2e, 0x4e, 0x28, 0x29, 0x7d, 0x7d, 0x3b, 0x79, 0x2e, 0x70, 0x72, 0x6f, + 0x74, 0x6f, 0x74, 0x79, 0x70, 0x65, 0x2e, 0x70, 0x65, 0x65, 0x6b, 0x3d, + 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x29, 0x7b, 0x69, + 0x66, 0x28, 0x21, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x68, 0x28, 0x29, 0x29, + 0x74, 0x28, 0x29, 0x3b, 0x69, 0x66, 0x28, 0x31, 0x36, 0x26, 0x74, 0x68, + 0x69, 0x73, 0x2e, 0x66, 0x29, 0x74, 0x68, 0x72, 0x6f, 0x77, 0x20, 0x74, + 0x68, 0x69, 0x73, 0x2e, 0x76, 0x3b, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, + 0x20, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x76, 0x7d, 0x3b, 0x4f, 0x62, 0x6a, + 0x65, 0x63, 0x74, 0x2e, 0x64, 0x65, 0x66, 0x69, 0x6e, 0x65, 0x50, 0x72, + 0x6f, 0x70, 0x65, 0x72, 0x74, 0x79, 0x28, 0x79, 0x2e, 0x70, 0x72, 0x6f, + 0x74, 0x6f, 0x74, 0x79, 0x70, 0x65, 0x2c, 0x22, 0x76, 0x61, 0x6c, 0x75, + 0x65, 0x22, 0x2c, 0x7b, 0x67, 0x65, 0x74, 0x28, 0x29, 0x7b, 0x69, 0x66, + 0x28, 0x31, 0x26, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x66, 0x29, 0x74, 0x28, + 0x29, 0x3b, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x6e, 0x3d, 0x63, 0x28, + 0x74, 0x68, 0x69, 0x73, 0x29, 0x3b, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x68, + 0x28, 0x29, 0x3b, 0x69, 0x66, 0x28, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, + 0x21, 0x3d, 0x3d, 0x6e, 0x29, 0x6e, 0x2e, 0x69, 0x3d, 0x74, 0x68, 0x69, + 0x73, 0x2e, 0x69, 0x3b, 0x69, 0x66, 0x28, 0x31, 0x36, 0x26, 0x74, 0x68, + 0x69, 0x73, 0x2e, 0x66, 0x29, 0x74, 0x68, 0x72, 0x6f, 0x77, 0x20, 0x74, + 0x68, 0x69, 0x73, 0x2e, 0x76, 0x3b, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, + 0x20, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x76, 0x7d, 0x7d, 0x29, 0x3b, 0x66, + 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x6d, 0x28, 0x74, 0x29, + 0x7b, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x6e, 0x65, 0x77, 0x20, + 0x79, 0x28, 0x74, 0x29, 0x7d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, + 0x6e, 0x20, 0x67, 0x28, 0x74, 0x29, 0x7b, 0x63, 0x6f, 0x6e, 0x73, 0x74, + 0x20, 0x6e, 0x3d, 0x74, 0x2e, 0x75, 0x3b, 0x74, 0x2e, 0x75, 0x3d, 0x76, + 0x6f, 0x69, 0x64, 0x20, 0x30, 0x3b, 0x69, 0x66, 0x28, 0x22, 0x66, 0x75, + 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x22, 0x3d, 0x3d, 0x74, 0x79, 0x70, + 0x65, 0x6f, 0x66, 0x20, 0x6e, 0x29, 0x7b, 0x66, 0x2b, 0x2b, 0x3b, 0x63, + 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x5f, 0x3d, 0x69, 0x3b, 0x69, 0x3d, 0x76, + 0x6f, 0x69, 0x64, 0x20, 0x30, 0x3b, 0x74, 0x72, 0x79, 0x7b, 0x6e, 0x28, + 0x29, 0x7d, 0x63, 0x61, 0x74, 0x63, 0x68, 0x28, 0x6e, 0x29, 0x7b, 0x74, + 0x2e, 0x66, 0x26, 0x3d, 0x2d, 0x32, 0x3b, 0x74, 0x2e, 0x66, 0x7c, 0x3d, + 0x38, 0x3b, 0x62, 0x28, 0x74, 0x29, 0x3b, 0x74, 0x68, 0x72, 0x6f, 0x77, + 0x20, 0x6e, 0x7d, 0x66, 0x69, 0x6e, 0x61, 0x6c, 0x6c, 0x79, 0x7b, 0x69, + 0x3d, 0x5f, 0x3b, 0x65, 0x28, 0x29, 0x7d, 0x7d, 0x7d, 0x66, 0x75, 0x6e, + 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x62, 0x28, 0x74, 0x29, 0x7b, 0x66, + 0x6f, 0x72, 0x28, 0x6c, 0x65, 0x74, 0x20, 0x6e, 0x3d, 0x74, 0x2e, 0x73, + 0x3b, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x21, 0x3d, 0x3d, 0x6e, 0x3b, + 0x6e, 0x3d, 0x6e, 0x2e, 0x6e, 0x29, 0x6e, 0x2e, 0x53, 0x2e, 0x55, 0x28, + 0x6e, 0x29, 0x3b, 0x74, 0x2e, 0x78, 0x3d, 0x76, 0x6f, 0x69, 0x64, 0x20, + 0x30, 0x3b, 0x74, 0x2e, 0x73, 0x3d, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, + 0x3b, 0x67, 0x28, 0x74, 0x29, 0x7d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, + 0x6f, 0x6e, 0x20, 0x6b, 0x28, 0x74, 0x29, 0x7b, 0x69, 0x66, 0x28, 0x69, + 0x21, 0x3d, 0x3d, 0x74, 0x68, 0x69, 0x73, 0x29, 0x74, 0x68, 0x72, 0x6f, + 0x77, 0x20, 0x6e, 0x65, 0x77, 0x20, 0x45, 0x72, 0x72, 0x6f, 0x72, 0x28, + 0x22, 0x4f, 0x75, 0x74, 0x2d, 0x6f, 0x66, 0x2d, 0x6f, 0x72, 0x64, 0x65, + 0x72, 0x20, 0x65, 0x66, 0x66, 0x65, 0x63, 0x74, 0x22, 0x29, 0x3b, 0x76, + 0x28, 0x74, 0x68, 0x69, 0x73, 0x29, 0x3b, 0x69, 0x3d, 0x74, 0x3b, 0x74, + 0x68, 0x69, 0x73, 0x2e, 0x66, 0x26, 0x3d, 0x2d, 0x32, 0x3b, 0x69, 0x66, + 0x28, 0x38, 0x26, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x66, 0x29, 0x62, 0x28, + 0x74, 0x68, 0x69, 0x73, 0x29, 0x3b, 0x65, 0x28, 0x29, 0x7d, 0x66, 0x75, + 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x53, 0x28, 0x74, 0x29, 0x7b, + 0x74, 0x68, 0x69, 0x73, 0x2e, 0x78, 0x3d, 0x74, 0x3b, 0x74, 0x68, 0x69, + 0x73, 0x2e, 0x75, 0x3d, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x3b, 0x74, + 0x68, 0x69, 0x73, 0x2e, 0x73, 0x3d, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, + 0x3b, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x6f, 0x3d, 0x76, 0x6f, 0x69, 0x64, + 0x20, 0x30, 0x3b, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x66, 0x3d, 0x33, 0x32, + 0x7d, 0x53, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x74, 0x79, 0x70, 0x65, + 0x2e, 0x63, 0x3d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, + 0x29, 0x7b, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x74, 0x3d, 0x74, 0x68, + 0x69, 0x73, 0x2e, 0x53, 0x28, 0x29, 0x3b, 0x74, 0x72, 0x79, 0x7b, 0x69, + 0x66, 0x28, 0x38, 0x26, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x66, 0x29, 0x72, + 0x65, 0x74, 0x75, 0x72, 0x6e, 0x3b, 0x69, 0x66, 0x28, 0x76, 0x6f, 0x69, + 0x64, 0x20, 0x30, 0x3d, 0x3d, 0x3d, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x78, + 0x29, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x3b, 0x63, 0x6f, 0x6e, 0x73, + 0x74, 0x20, 0x6e, 0x3d, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x78, 0x28, 0x29, + 0x3b, 0x69, 0x66, 0x28, 0x22, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, + 0x6e, 0x22, 0x3d, 0x3d, 0x74, 0x79, 0x70, 0x65, 0x6f, 0x66, 0x20, 0x6e, + 0x29, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x75, 0x3d, 0x6e, 0x7d, 0x66, 0x69, + 0x6e, 0x61, 0x6c, 0x6c, 0x79, 0x7b, 0x74, 0x28, 0x29, 0x7d, 0x7d, 0x3b, + 0x53, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x74, 0x79, 0x70, 0x65, 0x2e, + 0x53, 0x3d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x29, + 0x7b, 0x69, 0x66, 0x28, 0x31, 0x26, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x66, + 0x29, 0x74, 0x28, 0x29, 0x3b, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x66, 0x7c, + 0x3d, 0x31, 0x3b, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x66, 0x26, 0x3d, 0x2d, + 0x39, 0x3b, 0x67, 0x28, 0x74, 0x68, 0x69, 0x73, 0x29, 0x3b, 0x64, 0x28, + 0x74, 0x68, 0x69, 0x73, 0x29, 0x3b, 0x66, 0x2b, 0x2b, 0x3b, 0x63, 0x6f, + 0x6e, 0x73, 0x74, 0x20, 0x6e, 0x3d, 0x69, 0x3b, 0x69, 0x3d, 0x74, 0x68, + 0x69, 0x73, 0x3b, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x6b, 0x2e, + 0x62, 0x69, 0x6e, 0x64, 0x28, 0x74, 0x68, 0x69, 0x73, 0x2c, 0x6e, 0x29, + 0x7d, 0x3b, 0x53, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x74, 0x79, 0x70, + 0x65, 0x2e, 0x4e, 0x3d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, + 0x28, 0x29, 0x7b, 0x69, 0x66, 0x28, 0x21, 0x28, 0x32, 0x26, 0x74, 0x68, + 0x69, 0x73, 0x2e, 0x66, 0x29, 0x29, 0x7b, 0x74, 0x68, 0x69, 0x73, 0x2e, + 0x66, 0x7c, 0x3d, 0x32, 0x3b, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x6f, 0x3d, + 0x6f, 0x3b, 0x6f, 0x3d, 0x74, 0x68, 0x69, 0x73, 0x7d, 0x7d, 0x3b, 0x53, + 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x74, 0x79, 0x70, 0x65, 0x2e, 0x64, + 0x3d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x29, 0x7b, + 0x74, 0x68, 0x69, 0x73, 0x2e, 0x66, 0x7c, 0x3d, 0x38, 0x3b, 0x69, 0x66, + 0x28, 0x21, 0x28, 0x31, 0x26, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x66, 0x29, + 0x29, 0x62, 0x28, 0x74, 0x68, 0x69, 0x73, 0x29, 0x7d, 0x3b, 0x66, 0x75, + 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x77, 0x28, 0x74, 0x29, 0x7b, + 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x6e, 0x3d, 0x6e, 0x65, 0x77, 0x20, + 0x53, 0x28, 0x74, 0x29, 0x3b, 0x74, 0x72, 0x79, 0x7b, 0x6e, 0x2e, 0x63, + 0x28, 0x29, 0x7d, 0x63, 0x61, 0x74, 0x63, 0x68, 0x28, 0x74, 0x29, 0x7b, + 0x6e, 0x2e, 0x64, 0x28, 0x29, 0x3b, 0x74, 0x68, 0x72, 0x6f, 0x77, 0x20, + 0x74, 0x7d, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x6e, 0x2e, 0x64, + 0x2e, 0x62, 0x69, 0x6e, 0x64, 0x28, 0x6e, 0x29, 0x7d, 0x76, 0x61, 0x72, + 0x20, 0x78, 0x2c, 0x43, 0x2c, 0x45, 0x2c, 0x55, 0x2c, 0x48, 0x2c, 0x50, + 0x2c, 0x4e, 0x2c, 0x24, 0x2c, 0x44, 0x2c, 0x54, 0x3d, 0x7b, 0x7d, 0x2c, + 0x56, 0x3d, 0x5b, 0x5d, 0x2c, 0x41, 0x3d, 0x2f, 0x61, 0x63, 0x69, 0x74, + 0x7c, 0x65, 0x78, 0x28, 0x3f, 0x3a, 0x73, 0x7c, 0x67, 0x7c, 0x6e, 0x7c, + 0x70, 0x7c, 0x24, 0x29, 0x7c, 0x72, 0x70, 0x68, 0x7c, 0x67, 0x72, 0x69, + 0x64, 0x7c, 0x6f, 0x77, 0x73, 0x7c, 0x6d, 0x6e, 0x63, 0x7c, 0x6e, 0x74, + 0x77, 0x7c, 0x69, 0x6e, 0x65, 0x5b, 0x63, 0x68, 0x5d, 0x7c, 0x7a, 0x6f, + 0x6f, 0x7c, 0x5e, 0x6f, 0x72, 0x64, 0x7c, 0x69, 0x74, 0x65, 0x72, 0x61, + 0x2f, 0x69, 0x2c, 0x46, 0x3d, 0x41, 0x72, 0x72, 0x61, 0x79, 0x2e, 0x69, + 0x73, 0x41, 0x72, 0x72, 0x61, 0x79, 0x3b, 0x66, 0x75, 0x6e, 0x63, 0x74, + 0x69, 0x6f, 0x6e, 0x20, 0x4d, 0x28, 0x74, 0x2c, 0x6e, 0x29, 0x7b, 0x66, + 0x6f, 0x72, 0x28, 0x76, 0x61, 0x72, 0x20, 0x65, 0x20, 0x69, 0x6e, 0x20, + 0x6e, 0x29, 0x74, 0x5b, 0x65, 0x5d, 0x3d, 0x6e, 0x5b, 0x65, 0x5d, 0x3b, + 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x74, 0x7d, 0x66, 0x75, 0x6e, + 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x57, 0x28, 0x74, 0x29, 0x7b, 0x76, + 0x61, 0x72, 0x20, 0x6e, 0x3d, 0x74, 0x2e, 0x70, 0x61, 0x72, 0x65, 0x6e, + 0x74, 0x4e, 0x6f, 0x64, 0x65, 0x3b, 0x6e, 0x26, 0x26, 0x6e, 0x2e, 0x72, + 0x65, 0x6d, 0x6f, 0x76, 0x65, 0x43, 0x68, 0x69, 0x6c, 0x64, 0x28, 0x74, + 0x29, 0x7d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x4c, + 0x28, 0x74, 0x2c, 0x6e, 0x2c, 0x65, 0x29, 0x7b, 0x76, 0x61, 0x72, 0x20, + 0x5f, 0x2c, 0x69, 0x2c, 0x6f, 0x2c, 0x72, 0x3d, 0x7b, 0x7d, 0x3b, 0x66, + 0x6f, 0x72, 0x28, 0x6f, 0x20, 0x69, 0x6e, 0x20, 0x6e, 0x29, 0x22, 0x6b, + 0x65, 0x79, 0x22, 0x3d, 0x3d, 0x6f, 0x3f, 0x5f, 0x3d, 0x6e, 0x5b, 0x6f, + 0x5d, 0x3a, 0x22, 0x72, 0x65, 0x66, 0x22, 0x3d, 0x3d, 0x6f, 0x3f, 0x69, + 0x3d, 0x6e, 0x5b, 0x6f, 0x5d, 0x3a, 0x72, 0x5b, 0x6f, 0x5d, 0x3d, 0x6e, + 0x5b, 0x6f, 0x5d, 0x3b, 0x69, 0x66, 0x28, 0x61, 0x72, 0x67, 0x75, 0x6d, + 0x65, 0x6e, 0x74, 0x73, 0x2e, 0x6c, 0x65, 0x6e, 0x67, 0x74, 0x68, 0x3e, + 0x32, 0x26, 0x26, 0x28, 0x72, 0x2e, 0x63, 0x68, 0x69, 0x6c, 0x64, 0x72, + 0x65, 0x6e, 0x3d, 0x61, 0x72, 0x67, 0x75, 0x6d, 0x65, 0x6e, 0x74, 0x73, + 0x2e, 0x6c, 0x65, 0x6e, 0x67, 0x74, 0x68, 0x3e, 0x33, 0x3f, 0x78, 0x2e, + 0x63, 0x61, 0x6c, 0x6c, 0x28, 0x61, 0x72, 0x67, 0x75, 0x6d, 0x65, 0x6e, + 0x74, 0x73, 0x2c, 0x32, 0x29, 0x3a, 0x65, 0x29, 0x2c, 0x22, 0x66, 0x75, + 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x22, 0x3d, 0x3d, 0x74, 0x79, 0x70, + 0x65, 0x6f, 0x66, 0x20, 0x74, 0x26, 0x26, 0x6e, 0x75, 0x6c, 0x6c, 0x21, + 0x3d, 0x74, 0x2e, 0x64, 0x65, 0x66, 0x61, 0x75, 0x6c, 0x74, 0x50, 0x72, + 0x6f, 0x70, 0x73, 0x29, 0x66, 0x6f, 0x72, 0x28, 0x6f, 0x20, 0x69, 0x6e, + 0x20, 0x74, 0x2e, 0x64, 0x65, 0x66, 0x61, 0x75, 0x6c, 0x74, 0x50, 0x72, + 0x6f, 0x70, 0x73, 0x29, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x3d, 0x3d, + 0x3d, 0x72, 0x5b, 0x6f, 0x5d, 0x26, 0x26, 0x28, 0x72, 0x5b, 0x6f, 0x5d, + 0x3d, 0x74, 0x2e, 0x64, 0x65, 0x66, 0x61, 0x75, 0x6c, 0x74, 0x50, 0x72, + 0x6f, 0x70, 0x73, 0x5b, 0x6f, 0x5d, 0x29, 0x3b, 0x72, 0x65, 0x74, 0x75, + 0x72, 0x6e, 0x20, 0x4f, 0x28, 0x74, 0x2c, 0x72, 0x2c, 0x5f, 0x2c, 0x69, + 0x2c, 0x6e, 0x75, 0x6c, 0x6c, 0x29, 0x7d, 0x66, 0x75, 0x6e, 0x63, 0x74, + 0x69, 0x6f, 0x6e, 0x20, 0x4f, 0x28, 0x74, 0x2c, 0x6e, 0x2c, 0x65, 0x2c, + 0x5f, 0x2c, 0x69, 0x29, 0x7b, 0x76, 0x61, 0x72, 0x20, 0x6f, 0x3d, 0x7b, + 0x74, 0x79, 0x70, 0x65, 0x3a, 0x74, 0x2c, 0x70, 0x72, 0x6f, 0x70, 0x73, + 0x3a, 0x6e, 0x2c, 0x6b, 0x65, 0x79, 0x3a, 0x65, 0x2c, 0x72, 0x65, 0x66, + 0x3a, 0x5f, 0x2c, 0x5f, 0x5f, 0x6b, 0x3a, 0x6e, 0x75, 0x6c, 0x6c, 0x2c, + 0x5f, 0x5f, 0x3a, 0x6e, 0x75, 0x6c, 0x6c, 0x2c, 0x5f, 0x5f, 0x62, 0x3a, + 0x30, 0x2c, 0x5f, 0x5f, 0x65, 0x3a, 0x6e, 0x75, 0x6c, 0x6c, 0x2c, 0x5f, + 0x5f, 0x64, 0x3a, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x2c, 0x5f, 0x5f, + 0x63, 0x3a, 0x6e, 0x75, 0x6c, 0x6c, 0x2c, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x72, 0x75, 0x63, 0x74, 0x6f, 0x72, 0x3a, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x2c, 0x5f, 0x5f, 0x76, 0x3a, 0x6e, 0x75, 0x6c, 0x6c, 0x3d, 0x3d, - 0x5f, 0x3f, 0x2b, 0x2b, 0x43, 0x3a, 0x5f, 0x7d, 0x3b, 0x72, 0x65, 0x74, - 0x75, 0x72, 0x6e, 0x20, 0x6e, 0x75, 0x6c, 0x6c, 0x3d, 0x3d, 0x5f, 0x26, - 0x26, 0x6e, 0x75, 0x6c, 0x6c, 0x21, 0x3d, 0x77, 0x2e, 0x76, 0x6e, 0x6f, - 0x64, 0x65, 0x26, 0x26, 0x77, 0x2e, 0x76, 0x6e, 0x6f, 0x64, 0x65, 0x28, - 0x6f, 0x29, 0x2c, 0x6f, 0x7d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, - 0x6e, 0x20, 0x4c, 0x28, 0x29, 0x7b, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, - 0x7b, 0x63, 0x75, 0x72, 0x72, 0x65, 0x6e, 0x74, 0x3a, 0x6e, 0x75, 0x6c, - 0x6c, 0x7d, 0x7d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, - 0x52, 0x28, 0x74, 0x29, 0x7b, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, - 0x74, 0x2e, 0x63, 0x68, 0x69, 0x6c, 0x64, 0x72, 0x65, 0x6e, 0x7d, 0x66, - 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x49, 0x28, 0x74, 0x2c, - 0x6e, 0x29, 0x7b, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x70, 0x72, 0x6f, 0x70, - 0x73, 0x3d, 0x74, 0x2c, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x63, 0x6f, 0x6e, - 0x74, 0x65, 0x78, 0x74, 0x3d, 0x6e, 0x7d, 0x66, 0x75, 0x6e, 0x63, 0x74, - 0x69, 0x6f, 0x6e, 0x20, 0x6a, 0x28, 0x74, 0x2c, 0x6e, 0x29, 0x7b, 0x69, - 0x66, 0x28, 0x6e, 0x75, 0x6c, 0x6c, 0x3d, 0x3d, 0x6e, 0x29, 0x72, 0x65, - 0x74, 0x75, 0x72, 0x6e, 0x20, 0x74, 0x2e, 0x5f, 0x5f, 0x3f, 0x6a, 0x28, - 0x74, 0x2e, 0x5f, 0x5f, 0x2c, 0x74, 0x2e, 0x5f, 0x5f, 0x2e, 0x5f, 0x5f, - 0x6b, 0x2e, 0x69, 0x6e, 0x64, 0x65, 0x78, 0x4f, 0x66, 0x28, 0x74, 0x29, - 0x2b, 0x31, 0x29, 0x3a, 0x6e, 0x75, 0x6c, 0x6c, 0x3b, 0x66, 0x6f, 0x72, - 0x28, 0x76, 0x61, 0x72, 0x20, 0x65, 0x3b, 0x6e, 0x3c, 0x74, 0x2e, 0x5f, + 0x69, 0x3f, 0x2b, 0x2b, 0x45, 0x3a, 0x69, 0x2c, 0x5f, 0x5f, 0x69, 0x3a, + 0x2d, 0x31, 0x2c, 0x5f, 0x5f, 0x75, 0x3a, 0x30, 0x7d, 0x3b, 0x72, 0x65, + 0x74, 0x75, 0x72, 0x6e, 0x20, 0x6e, 0x75, 0x6c, 0x6c, 0x3d, 0x3d, 0x69, + 0x26, 0x26, 0x6e, 0x75, 0x6c, 0x6c, 0x21, 0x3d, 0x43, 0x2e, 0x76, 0x6e, + 0x6f, 0x64, 0x65, 0x26, 0x26, 0x43, 0x2e, 0x76, 0x6e, 0x6f, 0x64, 0x65, + 0x28, 0x6f, 0x29, 0x2c, 0x6f, 0x7d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, + 0x6f, 0x6e, 0x20, 0x52, 0x28, 0x29, 0x7b, 0x72, 0x65, 0x74, 0x75, 0x72, + 0x6e, 0x7b, 0x63, 0x75, 0x72, 0x72, 0x65, 0x6e, 0x74, 0x3a, 0x6e, 0x75, + 0x6c, 0x6c, 0x7d, 0x7d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, + 0x20, 0x6a, 0x28, 0x74, 0x29, 0x7b, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, + 0x20, 0x74, 0x2e, 0x63, 0x68, 0x69, 0x6c, 0x64, 0x72, 0x65, 0x6e, 0x7d, + 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x49, 0x28, 0x74, + 0x2c, 0x6e, 0x29, 0x7b, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x70, 0x72, 0x6f, + 0x70, 0x73, 0x3d, 0x74, 0x2c, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x63, 0x6f, + 0x6e, 0x74, 0x65, 0x78, 0x74, 0x3d, 0x6e, 0x7d, 0x66, 0x75, 0x6e, 0x63, + 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x71, 0x28, 0x74, 0x2c, 0x6e, 0x29, 0x7b, + 0x69, 0x66, 0x28, 0x6e, 0x75, 0x6c, 0x6c, 0x3d, 0x3d, 0x6e, 0x29, 0x72, + 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x74, 0x2e, 0x5f, 0x5f, 0x3f, 0x71, + 0x28, 0x74, 0x2e, 0x5f, 0x5f, 0x2c, 0x74, 0x2e, 0x5f, 0x5f, 0x69, 0x2b, + 0x31, 0x29, 0x3a, 0x6e, 0x75, 0x6c, 0x6c, 0x3b, 0x66, 0x6f, 0x72, 0x28, + 0x76, 0x61, 0x72, 0x20, 0x65, 0x3b, 0x6e, 0x3c, 0x74, 0x2e, 0x5f, 0x5f, + 0x6b, 0x2e, 0x6c, 0x65, 0x6e, 0x67, 0x74, 0x68, 0x3b, 0x6e, 0x2b, 0x2b, + 0x29, 0x69, 0x66, 0x28, 0x6e, 0x75, 0x6c, 0x6c, 0x21, 0x3d, 0x28, 0x65, + 0x3d, 0x74, 0x2e, 0x5f, 0x5f, 0x6b, 0x5b, 0x6e, 0x5d, 0x29, 0x26, 0x26, + 0x6e, 0x75, 0x6c, 0x6c, 0x21, 0x3d, 0x65, 0x2e, 0x5f, 0x5f, 0x65, 0x29, + 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x65, 0x2e, 0x5f, 0x5f, 0x65, + 0x3b, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x22, 0x66, 0x75, 0x6e, 0x63, + 0x74, 0x69, 0x6f, 0x6e, 0x22, 0x3d, 0x3d, 0x74, 0x79, 0x70, 0x65, 0x6f, + 0x66, 0x20, 0x74, 0x2e, 0x74, 0x79, 0x70, 0x65, 0x3f, 0x71, 0x28, 0x74, + 0x29, 0x3a, 0x6e, 0x75, 0x6c, 0x6c, 0x7d, 0x66, 0x75, 0x6e, 0x63, 0x74, + 0x69, 0x6f, 0x6e, 0x20, 0x42, 0x28, 0x74, 0x29, 0x7b, 0x76, 0x61, 0x72, + 0x20, 0x6e, 0x2c, 0x65, 0x3b, 0x69, 0x66, 0x28, 0x6e, 0x75, 0x6c, 0x6c, + 0x21, 0x3d, 0x28, 0x74, 0x3d, 0x74, 0x2e, 0x5f, 0x5f, 0x29, 0x26, 0x26, + 0x6e, 0x75, 0x6c, 0x6c, 0x21, 0x3d, 0x74, 0x2e, 0x5f, 0x5f, 0x63, 0x29, + 0x7b, 0x66, 0x6f, 0x72, 0x28, 0x74, 0x2e, 0x5f, 0x5f, 0x65, 0x3d, 0x74, + 0x2e, 0x5f, 0x5f, 0x63, 0x2e, 0x62, 0x61, 0x73, 0x65, 0x3d, 0x6e, 0x75, + 0x6c, 0x6c, 0x2c, 0x6e, 0x3d, 0x30, 0x3b, 0x6e, 0x3c, 0x74, 0x2e, 0x5f, 0x5f, 0x6b, 0x2e, 0x6c, 0x65, 0x6e, 0x67, 0x74, 0x68, 0x3b, 0x6e, 0x2b, 0x2b, 0x29, 0x69, 0x66, 0x28, 0x6e, 0x75, 0x6c, 0x6c, 0x21, 0x3d, 0x28, 0x65, 0x3d, 0x74, 0x2e, 0x5f, 0x5f, 0x6b, 0x5b, 0x6e, 0x5d, 0x29, 0x26, 0x26, 0x6e, 0x75, 0x6c, 0x6c, 0x21, 0x3d, 0x65, 0x2e, 0x5f, 0x5f, 0x65, - 0x29, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x65, 0x2e, 0x5f, 0x5f, - 0x65, 0x3b, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x22, 0x66, 0x75, 0x6e, - 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x22, 0x3d, 0x3d, 0x74, 0x79, 0x70, 0x65, - 0x6f, 0x66, 0x20, 0x74, 0x2e, 0x74, 0x79, 0x70, 0x65, 0x3f, 0x6a, 0x28, - 0x74, 0x29, 0x3a, 0x6e, 0x75, 0x6c, 0x6c, 0x7d, 0x66, 0x75, 0x6e, 0x63, - 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x42, 0x28, 0x74, 0x29, 0x7b, 0x76, 0x61, - 0x72, 0x20, 0x6e, 0x2c, 0x65, 0x3b, 0x69, 0x66, 0x28, 0x6e, 0x75, 0x6c, - 0x6c, 0x21, 0x3d, 0x28, 0x74, 0x3d, 0x74, 0x2e, 0x5f, 0x5f, 0x29, 0x26, - 0x26, 0x6e, 0x75, 0x6c, 0x6c, 0x21, 0x3d, 0x74, 0x2e, 0x5f, 0x5f, 0x63, - 0x29, 0x7b, 0x66, 0x6f, 0x72, 0x28, 0x74, 0x2e, 0x5f, 0x5f, 0x65, 0x3d, - 0x74, 0x2e, 0x5f, 0x5f, 0x63, 0x2e, 0x62, 0x61, 0x73, 0x65, 0x3d, 0x6e, - 0x75, 0x6c, 0x6c, 0x2c, 0x6e, 0x3d, 0x30, 0x3b, 0x6e, 0x3c, 0x74, 0x2e, - 0x5f, 0x5f, 0x6b, 0x2e, 0x6c, 0x65, 0x6e, 0x67, 0x74, 0x68, 0x3b, 0x6e, - 0x2b, 0x2b, 0x29, 0x69, 0x66, 0x28, 0x6e, 0x75, 0x6c, 0x6c, 0x21, 0x3d, - 0x28, 0x65, 0x3d, 0x74, 0x2e, 0x5f, 0x5f, 0x6b, 0x5b, 0x6e, 0x5d, 0x29, - 0x26, 0x26, 0x6e, 0x75, 0x6c, 0x6c, 0x21, 0x3d, 0x65, 0x2e, 0x5f, 0x5f, - 0x65, 0x29, 0x7b, 0x74, 0x2e, 0x5f, 0x5f, 0x65, 0x3d, 0x74, 0x2e, 0x5f, - 0x5f, 0x63, 0x2e, 0x62, 0x61, 0x73, 0x65, 0x3d, 0x65, 0x2e, 0x5f, 0x5f, - 0x65, 0x3b, 0x62, 0x72, 0x65, 0x61, 0x6b, 0x7d, 0x72, 0x65, 0x74, 0x75, - 0x72, 0x6e, 0x20, 0x42, 0x28, 0x74, 0x29, 0x7d, 0x7d, 0x66, 0x75, 0x6e, - 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x71, 0x28, 0x74, 0x29, 0x7b, 0x28, - 0x21, 0x74, 0x2e, 0x5f, 0x5f, 0x64, 0x26, 0x26, 0x28, 0x74, 0x2e, 0x5f, - 0x5f, 0x64, 0x3d, 0x21, 0x30, 0x29, 0x26, 0x26, 0x55, 0x2e, 0x70, 0x75, - 0x73, 0x68, 0x28, 0x74, 0x29, 0x26, 0x26, 0x21, 0x47, 0x2e, 0x5f, 0x5f, - 0x72, 0x2b, 0x2b, 0x7c, 0x7c, 0x48, 0x21, 0x3d, 0x3d, 0x77, 0x2e, 0x64, - 0x65, 0x62, 0x6f, 0x75, 0x6e, 0x63, 0x65, 0x52, 0x65, 0x6e, 0x64, 0x65, - 0x72, 0x69, 0x6e, 0x67, 0x29, 0x26, 0x26, 0x28, 0x28, 0x48, 0x3d, 0x77, - 0x2e, 0x64, 0x65, 0x62, 0x6f, 0x75, 0x6e, 0x63, 0x65, 0x52, 0x65, 0x6e, - 0x64, 0x65, 0x72, 0x69, 0x6e, 0x67, 0x29, 0x7c, 0x7c, 0x4e, 0x29, 0x28, - 0x47, 0x29, 0x7d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, - 0x47, 0x28, 0x29, 0x7b, 0x76, 0x61, 0x72, 0x20, 0x74, 0x2c, 0x6e, 0x2c, - 0x65, 0x2c, 0x69, 0x2c, 0x5f, 0x2c, 0x6f, 0x2c, 0x72, 0x2c, 0x75, 0x2c, - 0x66, 0x3b, 0x66, 0x6f, 0x72, 0x28, 0x55, 0x2e, 0x73, 0x6f, 0x72, 0x74, - 0x28, 0x50, 0x29, 0x3b, 0x74, 0x3d, 0x55, 0x2e, 0x73, 0x68, 0x69, 0x66, - 0x74, 0x28, 0x29, 0x3b, 0x29, 0x74, 0x2e, 0x5f, 0x5f, 0x64, 0x26, 0x26, - 0x28, 0x6e, 0x3d, 0x55, 0x2e, 0x6c, 0x65, 0x6e, 0x67, 0x74, 0x68, 0x2c, - 0x69, 0x3d, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x2c, 0x5f, 0x3d, 0x76, - 0x6f, 0x69, 0x64, 0x20, 0x30, 0x2c, 0x6f, 0x3d, 0x76, 0x6f, 0x69, 0x64, - 0x20, 0x30, 0x2c, 0x75, 0x3d, 0x28, 0x72, 0x3d, 0x28, 0x65, 0x3d, 0x74, - 0x29, 0x2e, 0x5f, 0x5f, 0x76, 0x29, 0x2e, 0x5f, 0x5f, 0x65, 0x2c, 0x28, - 0x66, 0x3d, 0x65, 0x2e, 0x5f, 0x5f, 0x50, 0x29, 0x26, 0x26, 0x28, 0x69, - 0x3d, 0x5b, 0x5d, 0x2c, 0x5f, 0x3d, 0x5b, 0x5d, 0x2c, 0x28, 0x6f, 0x3d, - 0x46, 0x28, 0x7b, 0x7d, 0x2c, 0x72, 0x29, 0x29, 0x2e, 0x5f, 0x5f, 0x76, - 0x3d, 0x72, 0x2e, 0x5f, 0x5f, 0x76, 0x2b, 0x31, 0x2c, 0x69, 0x74, 0x28, - 0x66, 0x2c, 0x72, 0x2c, 0x6f, 0x2c, 0x65, 0x2e, 0x5f, 0x5f, 0x6e, 0x2c, - 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x21, 0x3d, 0x3d, 0x66, 0x2e, 0x6f, - 0x77, 0x6e, 0x65, 0x72, 0x53, 0x56, 0x47, 0x45, 0x6c, 0x65, 0x6d, 0x65, - 0x6e, 0x74, 0x2c, 0x6e, 0x75, 0x6c, 0x6c, 0x21, 0x3d, 0x72, 0x2e, 0x5f, - 0x5f, 0x68, 0x3f, 0x5b, 0x75, 0x5d, 0x3a, 0x6e, 0x75, 0x6c, 0x6c, 0x2c, - 0x69, 0x2c, 0x6e, 0x75, 0x6c, 0x6c, 0x3d, 0x3d, 0x75, 0x3f, 0x6a, 0x28, - 0x72, 0x29, 0x3a, 0x75, 0x2c, 0x72, 0x2e, 0x5f, 0x5f, 0x68, 0x2c, 0x5f, - 0x29, 0x2c, 0x5f, 0x74, 0x28, 0x69, 0x2c, 0x72, 0x2c, 0x5f, 0x29, 0x2c, - 0x72, 0x2e, 0x5f, 0x5f, 0x65, 0x21, 0x3d, 0x75, 0x26, 0x26, 0x42, 0x28, - 0x72, 0x29, 0x29, 0x2c, 0x55, 0x2e, 0x6c, 0x65, 0x6e, 0x67, 0x74, 0x68, - 0x3e, 0x6e, 0x26, 0x26, 0x55, 0x2e, 0x73, 0x6f, 0x72, 0x74, 0x28, 0x50, - 0x29, 0x29, 0x3b, 0x47, 0x2e, 0x5f, 0x5f, 0x72, 0x3d, 0x30, 0x7d, 0x66, - 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x7a, 0x28, 0x74, 0x2c, - 0x6e, 0x2c, 0x65, 0x2c, 0x69, 0x2c, 0x5f, 0x2c, 0x6f, 0x2c, 0x72, 0x2c, - 0x75, 0x2c, 0x66, 0x2c, 0x6c, 0x2c, 0x73, 0x29, 0x7b, 0x76, 0x61, 0x72, - 0x20, 0x63, 0x2c, 0x68, 0x2c, 0x61, 0x2c, 0x70, 0x2c, 0x64, 0x2c, 0x76, - 0x2c, 0x79, 0x2c, 0x6d, 0x2c, 0x67, 0x2c, 0x62, 0x2c, 0x6b, 0x3d, 0x30, - 0x2c, 0x53, 0x3d, 0x69, 0x26, 0x26, 0x69, 0x2e, 0x5f, 0x5f, 0x6b, 0x7c, - 0x7c, 0x54, 0x2c, 0x78, 0x3d, 0x53, 0x2e, 0x6c, 0x65, 0x6e, 0x67, 0x74, - 0x68, 0x2c, 0x77, 0x3d, 0x78, 0x2c, 0x43, 0x3d, 0x6e, 0x2e, 0x6c, 0x65, - 0x6e, 0x67, 0x74, 0x68, 0x3b, 0x66, 0x6f, 0x72, 0x28, 0x65, 0x2e, 0x5f, - 0x5f, 0x6b, 0x3d, 0x5b, 0x5d, 0x2c, 0x63, 0x3d, 0x30, 0x3b, 0x63, 0x3c, - 0x43, 0x3b, 0x63, 0x2b, 0x2b, 0x29, 0x6e, 0x75, 0x6c, 0x6c, 0x21, 0x3d, - 0x28, 0x70, 0x3d, 0x65, 0x2e, 0x5f, 0x5f, 0x6b, 0x5b, 0x63, 0x5d, 0x3d, - 0x6e, 0x75, 0x6c, 0x6c, 0x3d, 0x3d, 0x28, 0x70, 0x3d, 0x6e, 0x5b, 0x63, - 0x5d, 0x29, 0x7c, 0x7c, 0x22, 0x62, 0x6f, 0x6f, 0x6c, 0x65, 0x61, 0x6e, - 0x22, 0x3d, 0x3d, 0x74, 0x79, 0x70, 0x65, 0x6f, 0x66, 0x20, 0x70, 0x7c, - 0x7c, 0x22, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x22, 0x3d, - 0x3d, 0x74, 0x79, 0x70, 0x65, 0x6f, 0x66, 0x20, 0x70, 0x3f, 0x6e, 0x75, - 0x6c, 0x6c, 0x3a, 0x22, 0x73, 0x74, 0x72, 0x69, 0x6e, 0x67, 0x22, 0x3d, - 0x3d, 0x74, 0x79, 0x70, 0x65, 0x6f, 0x66, 0x20, 0x70, 0x7c, 0x7c, 0x22, - 0x6e, 0x75, 0x6d, 0x62, 0x65, 0x72, 0x22, 0x3d, 0x3d, 0x74, 0x79, 0x70, - 0x65, 0x6f, 0x66, 0x20, 0x70, 0x7c, 0x7c, 0x22, 0x62, 0x69, 0x67, 0x69, - 0x6e, 0x74, 0x22, 0x3d, 0x3d, 0x74, 0x79, 0x70, 0x65, 0x6f, 0x66, 0x20, - 0x70, 0x3f, 0x4f, 0x28, 0x6e, 0x75, 0x6c, 0x6c, 0x2c, 0x70, 0x2c, 0x6e, - 0x75, 0x6c, 0x6c, 0x2c, 0x6e, 0x75, 0x6c, 0x6c, 0x2c, 0x70, 0x29, 0x3a, - 0x41, 0x28, 0x70, 0x29, 0x3f, 0x4f, 0x28, 0x52, 0x2c, 0x7b, 0x63, 0x68, - 0x69, 0x6c, 0x64, 0x72, 0x65, 0x6e, 0x3a, 0x70, 0x7d, 0x2c, 0x6e, 0x75, - 0x6c, 0x6c, 0x2c, 0x6e, 0x75, 0x6c, 0x6c, 0x2c, 0x6e, 0x75, 0x6c, 0x6c, - 0x29, 0x3a, 0x70, 0x2e, 0x5f, 0x5f, 0x62, 0x3e, 0x30, 0x3f, 0x4f, 0x28, - 0x70, 0x2e, 0x74, 0x79, 0x70, 0x65, 0x2c, 0x70, 0x2e, 0x70, 0x72, 0x6f, - 0x70, 0x73, 0x2c, 0x70, 0x2e, 0x6b, 0x65, 0x79, 0x2c, 0x70, 0x2e, 0x72, - 0x65, 0x66, 0x3f, 0x70, 0x2e, 0x72, 0x65, 0x66, 0x3a, 0x6e, 0x75, 0x6c, - 0x6c, 0x2c, 0x70, 0x2e, 0x5f, 0x5f, 0x76, 0x29, 0x3a, 0x70, 0x29, 0x26, - 0x26, 0x28, 0x70, 0x2e, 0x5f, 0x5f, 0x3d, 0x65, 0x2c, 0x70, 0x2e, 0x5f, - 0x5f, 0x62, 0x3d, 0x65, 0x2e, 0x5f, 0x5f, 0x62, 0x2b, 0x31, 0x2c, 0x2d, - 0x31, 0x3d, 0x3d, 0x3d, 0x28, 0x6d, 0x3d, 0x58, 0x28, 0x70, 0x2c, 0x53, - 0x2c, 0x79, 0x3d, 0x63, 0x2b, 0x6b, 0x2c, 0x77, 0x29, 0x29, 0x3f, 0x61, - 0x3d, 0x44, 0x3a, 0x28, 0x61, 0x3d, 0x53, 0x5b, 0x6d, 0x5d, 0x7c, 0x7c, - 0x44, 0x2c, 0x53, 0x5b, 0x6d, 0x5d, 0x3d, 0x76, 0x6f, 0x69, 0x64, 0x20, - 0x30, 0x2c, 0x77, 0x2d, 0x2d, 0x29, 0x2c, 0x69, 0x74, 0x28, 0x74, 0x2c, - 0x70, 0x2c, 0x61, 0x2c, 0x5f, 0x2c, 0x6f, 0x2c, 0x72, 0x2c, 0x75, 0x2c, - 0x66, 0x2c, 0x6c, 0x2c, 0x73, 0x29, 0x2c, 0x64, 0x3d, 0x70, 0x2e, 0x5f, - 0x5f, 0x65, 0x2c, 0x28, 0x68, 0x3d, 0x70, 0x2e, 0x72, 0x65, 0x66, 0x29, - 0x26, 0x26, 0x61, 0x2e, 0x72, 0x65, 0x66, 0x21, 0x3d, 0x68, 0x26, 0x26, - 0x28, 0x61, 0x2e, 0x72, 0x65, 0x66, 0x26, 0x26, 0x72, 0x74, 0x28, 0x61, - 0x2e, 0x72, 0x65, 0x66, 0x2c, 0x6e, 0x75, 0x6c, 0x6c, 0x2c, 0x70, 0x29, - 0x2c, 0x73, 0x2e, 0x70, 0x75, 0x73, 0x68, 0x28, 0x68, 0x2c, 0x70, 0x2e, - 0x5f, 0x5f, 0x63, 0x7c, 0x7c, 0x64, 0x2c, 0x70, 0x29, 0x29, 0x2c, 0x6e, - 0x75, 0x6c, 0x6c, 0x21, 0x3d, 0x64, 0x26, 0x26, 0x28, 0x6e, 0x75, 0x6c, - 0x6c, 0x3d, 0x3d, 0x76, 0x26, 0x26, 0x28, 0x76, 0x3d, 0x64, 0x29, 0x2c, - 0x62, 0x3d, 0x21, 0x28, 0x67, 0x3d, 0x61, 0x3d, 0x3d, 0x3d, 0x44, 0x7c, - 0x7c, 0x6e, 0x75, 0x6c, 0x6c, 0x3d, 0x3d, 0x3d, 0x61, 0x2e, 0x5f, 0x5f, - 0x76, 0x29, 0x26, 0x26, 0x6d, 0x3d, 0x3d, 0x3d, 0x79, 0x2c, 0x67, 0x3f, - 0x2d, 0x31, 0x3d, 0x3d, 0x6d, 0x26, 0x26, 0x6b, 0x2d, 0x2d, 0x3a, 0x6d, - 0x21, 0x3d, 0x3d, 0x79, 0x26, 0x26, 0x28, 0x6d, 0x3d, 0x3d, 0x3d, 0x79, - 0x2b, 0x31, 0x3f, 0x28, 0x6b, 0x2b, 0x2b, 0x2c, 0x62, 0x3d, 0x21, 0x30, - 0x29, 0x3a, 0x6d, 0x3e, 0x79, 0x3f, 0x77, 0x3e, 0x43, 0x2d, 0x79, 0x3f, - 0x28, 0x6b, 0x2b, 0x3d, 0x6d, 0x2d, 0x79, 0x2c, 0x62, 0x3d, 0x21, 0x30, - 0x29, 0x3a, 0x6b, 0x2d, 0x2d, 0x3a, 0x6b, 0x3d, 0x6d, 0x3c, 0x79, 0x26, - 0x26, 0x6d, 0x3d, 0x3d, 0x79, 0x2d, 0x31, 0x3f, 0x6d, 0x2d, 0x79, 0x3a, - 0x30, 0x29, 0x2c, 0x79, 0x3d, 0x63, 0x2b, 0x6b, 0x2c, 0x62, 0x3d, 0x62, - 0x7c, 0x7c, 0x6d, 0x3d, 0x3d, 0x63, 0x26, 0x26, 0x21, 0x67, 0x2c, 0x22, - 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x22, 0x21, 0x3d, 0x74, - 0x79, 0x70, 0x65, 0x6f, 0x66, 0x20, 0x70, 0x2e, 0x74, 0x79, 0x70, 0x65, - 0x7c, 0x7c, 0x6d, 0x3d, 0x3d, 0x3d, 0x79, 0x26, 0x26, 0x61, 0x2e, 0x5f, - 0x5f, 0x6b, 0x21, 0x3d, 0x3d, 0x70, 0x2e, 0x5f, 0x5f, 0x6b, 0x3f, 0x22, + 0x29, 0x7b, 0x74, 0x2e, 0x5f, 0x5f, 0x65, 0x3d, 0x74, 0x2e, 0x5f, 0x5f, + 0x63, 0x2e, 0x62, 0x61, 0x73, 0x65, 0x3d, 0x65, 0x2e, 0x5f, 0x5f, 0x65, + 0x3b, 0x62, 0x72, 0x65, 0x61, 0x6b, 0x7d, 0x72, 0x65, 0x74, 0x75, 0x72, + 0x6e, 0x20, 0x42, 0x28, 0x74, 0x29, 0x7d, 0x7d, 0x66, 0x75, 0x6e, 0x63, + 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x47, 0x28, 0x74, 0x29, 0x7b, 0x28, 0x21, + 0x74, 0x2e, 0x5f, 0x5f, 0x64, 0x26, 0x26, 0x28, 0x74, 0x2e, 0x5f, 0x5f, + 0x64, 0x3d, 0x21, 0x30, 0x29, 0x26, 0x26, 0x48, 0x2e, 0x70, 0x75, 0x73, + 0x68, 0x28, 0x74, 0x29, 0x26, 0x26, 0x21, 0x7a, 0x2e, 0x5f, 0x5f, 0x72, + 0x2b, 0x2b, 0x7c, 0x7c, 0x50, 0x21, 0x3d, 0x3d, 0x43, 0x2e, 0x64, 0x65, + 0x62, 0x6f, 0x75, 0x6e, 0x63, 0x65, 0x52, 0x65, 0x6e, 0x64, 0x65, 0x72, + 0x69, 0x6e, 0x67, 0x29, 0x26, 0x26, 0x28, 0x28, 0x50, 0x3d, 0x43, 0x2e, + 0x64, 0x65, 0x62, 0x6f, 0x75, 0x6e, 0x63, 0x65, 0x52, 0x65, 0x6e, 0x64, + 0x65, 0x72, 0x69, 0x6e, 0x67, 0x29, 0x7c, 0x7c, 0x4e, 0x29, 0x28, 0x7a, + 0x29, 0x7d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x7a, + 0x28, 0x29, 0x7b, 0x76, 0x61, 0x72, 0x20, 0x74, 0x2c, 0x6e, 0x2c, 0x65, + 0x2c, 0x5f, 0x2c, 0x69, 0x2c, 0x6f, 0x2c, 0x72, 0x2c, 0x75, 0x2c, 0x66, + 0x3b, 0x66, 0x6f, 0x72, 0x28, 0x48, 0x2e, 0x73, 0x6f, 0x72, 0x74, 0x28, + 0x24, 0x29, 0x3b, 0x74, 0x3d, 0x48, 0x2e, 0x73, 0x68, 0x69, 0x66, 0x74, + 0x28, 0x29, 0x3b, 0x29, 0x74, 0x2e, 0x5f, 0x5f, 0x64, 0x26, 0x26, 0x28, + 0x6e, 0x3d, 0x48, 0x2e, 0x6c, 0x65, 0x6e, 0x67, 0x74, 0x68, 0x2c, 0x5f, + 0x3d, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x2c, 0x6f, 0x3d, 0x28, 0x69, + 0x3d, 0x28, 0x65, 0x3d, 0x74, 0x29, 0x2e, 0x5f, 0x5f, 0x76, 0x29, 0x2e, + 0x5f, 0x5f, 0x65, 0x2c, 0x75, 0x3d, 0x5b, 0x5d, 0x2c, 0x66, 0x3d, 0x5b, + 0x5d, 0x2c, 0x28, 0x72, 0x3d, 0x65, 0x2e, 0x5f, 0x5f, 0x50, 0x29, 0x26, + 0x26, 0x28, 0x28, 0x5f, 0x3d, 0x4d, 0x28, 0x7b, 0x7d, 0x2c, 0x69, 0x29, + 0x29, 0x2e, 0x5f, 0x5f, 0x76, 0x3d, 0x69, 0x2e, 0x5f, 0x5f, 0x76, 0x2b, + 0x31, 0x2c, 0x43, 0x2e, 0x76, 0x6e, 0x6f, 0x64, 0x65, 0x26, 0x26, 0x43, + 0x2e, 0x76, 0x6e, 0x6f, 0x64, 0x65, 0x28, 0x5f, 0x29, 0x2c, 0x5f, 0x74, + 0x28, 0x72, 0x2c, 0x5f, 0x2c, 0x69, 0x2c, 0x65, 0x2e, 0x5f, 0x5f, 0x6e, + 0x2c, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x21, 0x3d, 0x3d, 0x72, 0x2e, + 0x6f, 0x77, 0x6e, 0x65, 0x72, 0x53, 0x56, 0x47, 0x45, 0x6c, 0x65, 0x6d, + 0x65, 0x6e, 0x74, 0x2c, 0x33, 0x32, 0x26, 0x69, 0x2e, 0x5f, 0x5f, 0x75, + 0x3f, 0x5b, 0x6f, 0x5d, 0x3a, 0x6e, 0x75, 0x6c, 0x6c, 0x2c, 0x75, 0x2c, + 0x6e, 0x75, 0x6c, 0x6c, 0x3d, 0x3d, 0x6f, 0x3f, 0x71, 0x28, 0x69, 0x29, + 0x3a, 0x6f, 0x2c, 0x21, 0x21, 0x28, 0x33, 0x32, 0x26, 0x69, 0x2e, 0x5f, + 0x5f, 0x75, 0x29, 0x2c, 0x66, 0x29, 0x2c, 0x5f, 0x2e, 0x5f, 0x5f, 0x2e, + 0x5f, 0x5f, 0x6b, 0x5b, 0x5f, 0x2e, 0x5f, 0x5f, 0x69, 0x5d, 0x3d, 0x5f, + 0x2c, 0x69, 0x74, 0x28, 0x75, 0x2c, 0x5f, 0x2c, 0x66, 0x29, 0x2c, 0x5f, + 0x2e, 0x5f, 0x5f, 0x65, 0x21, 0x3d, 0x6f, 0x26, 0x26, 0x42, 0x28, 0x5f, + 0x29, 0x29, 0x2c, 0x48, 0x2e, 0x6c, 0x65, 0x6e, 0x67, 0x74, 0x68, 0x3e, + 0x6e, 0x26, 0x26, 0x48, 0x2e, 0x73, 0x6f, 0x72, 0x74, 0x28, 0x24, 0x29, + 0x29, 0x3b, 0x7a, 0x2e, 0x5f, 0x5f, 0x72, 0x3d, 0x30, 0x7d, 0x66, 0x75, + 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x4a, 0x28, 0x74, 0x2c, 0x6e, + 0x2c, 0x65, 0x2c, 0x5f, 0x2c, 0x69, 0x2c, 0x6f, 0x2c, 0x72, 0x2c, 0x75, + 0x2c, 0x66, 0x2c, 0x73, 0x2c, 0x6c, 0x29, 0x7b, 0x76, 0x61, 0x72, 0x20, + 0x63, 0x2c, 0x68, 0x2c, 0x61, 0x2c, 0x70, 0x2c, 0x64, 0x2c, 0x76, 0x3d, + 0x5f, 0x26, 0x26, 0x5f, 0x2e, 0x5f, 0x5f, 0x6b, 0x7c, 0x7c, 0x56, 0x2c, + 0x79, 0x3d, 0x6e, 0x2e, 0x6c, 0x65, 0x6e, 0x67, 0x74, 0x68, 0x3b, 0x66, + 0x6f, 0x72, 0x28, 0x65, 0x2e, 0x5f, 0x5f, 0x64, 0x3d, 0x66, 0x2c, 0x4b, + 0x28, 0x65, 0x2c, 0x6e, 0x2c, 0x76, 0x29, 0x2c, 0x66, 0x3d, 0x65, 0x2e, + 0x5f, 0x5f, 0x64, 0x2c, 0x63, 0x3d, 0x30, 0x3b, 0x63, 0x3c, 0x79, 0x3b, + 0x63, 0x2b, 0x2b, 0x29, 0x6e, 0x75, 0x6c, 0x6c, 0x21, 0x3d, 0x28, 0x61, + 0x3d, 0x65, 0x2e, 0x5f, 0x5f, 0x6b, 0x5b, 0x63, 0x5d, 0x29, 0x26, 0x26, + 0x22, 0x62, 0x6f, 0x6f, 0x6c, 0x65, 0x61, 0x6e, 0x22, 0x21, 0x3d, 0x74, + 0x79, 0x70, 0x65, 0x6f, 0x66, 0x20, 0x61, 0x26, 0x26, 0x22, 0x66, 0x75, + 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x22, 0x21, 0x3d, 0x74, 0x79, 0x70, + 0x65, 0x6f, 0x66, 0x20, 0x61, 0x26, 0x26, 0x28, 0x68, 0x3d, 0x2d, 0x31, + 0x3d, 0x3d, 0x3d, 0x61, 0x2e, 0x5f, 0x5f, 0x69, 0x3f, 0x54, 0x3a, 0x76, + 0x5b, 0x61, 0x2e, 0x5f, 0x5f, 0x69, 0x5d, 0x7c, 0x7c, 0x54, 0x2c, 0x61, + 0x2e, 0x5f, 0x5f, 0x69, 0x3d, 0x63, 0x2c, 0x5f, 0x74, 0x28, 0x74, 0x2c, + 0x61, 0x2c, 0x68, 0x2c, 0x69, 0x2c, 0x6f, 0x2c, 0x72, 0x2c, 0x75, 0x2c, + 0x66, 0x2c, 0x73, 0x2c, 0x6c, 0x29, 0x2c, 0x70, 0x3d, 0x61, 0x2e, 0x5f, + 0x5f, 0x65, 0x2c, 0x61, 0x2e, 0x72, 0x65, 0x66, 0x26, 0x26, 0x68, 0x2e, + 0x72, 0x65, 0x66, 0x21, 0x3d, 0x61, 0x2e, 0x72, 0x65, 0x66, 0x26, 0x26, + 0x28, 0x68, 0x2e, 0x72, 0x65, 0x66, 0x26, 0x26, 0x72, 0x74, 0x28, 0x68, + 0x2e, 0x72, 0x65, 0x66, 0x2c, 0x6e, 0x75, 0x6c, 0x6c, 0x2c, 0x61, 0x29, + 0x2c, 0x6c, 0x2e, 0x70, 0x75, 0x73, 0x68, 0x28, 0x61, 0x2e, 0x72, 0x65, + 0x66, 0x2c, 0x61, 0x2e, 0x5f, 0x5f, 0x63, 0x7c, 0x7c, 0x70, 0x2c, 0x61, + 0x29, 0x29, 0x2c, 0x6e, 0x75, 0x6c, 0x6c, 0x3d, 0x3d, 0x64, 0x26, 0x26, + 0x6e, 0x75, 0x6c, 0x6c, 0x21, 0x3d, 0x70, 0x26, 0x26, 0x28, 0x64, 0x3d, + 0x70, 0x29, 0x2c, 0x36, 0x35, 0x35, 0x33, 0x36, 0x26, 0x61, 0x2e, 0x5f, + 0x5f, 0x75, 0x7c, 0x7c, 0x68, 0x2e, 0x5f, 0x5f, 0x6b, 0x3d, 0x3d, 0x3d, + 0x61, 0x2e, 0x5f, 0x5f, 0x6b, 0x3f, 0x66, 0x3d, 0x51, 0x28, 0x61, 0x2c, + 0x66, 0x2c, 0x74, 0x29, 0x3a, 0x22, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, + 0x6f, 0x6e, 0x22, 0x3d, 0x3d, 0x74, 0x79, 0x70, 0x65, 0x6f, 0x66, 0x20, + 0x61, 0x2e, 0x74, 0x79, 0x70, 0x65, 0x26, 0x26, 0x76, 0x6f, 0x69, 0x64, + 0x20, 0x30, 0x21, 0x3d, 0x3d, 0x61, 0x2e, 0x5f, 0x5f, 0x64, 0x3f, 0x66, + 0x3d, 0x61, 0x2e, 0x5f, 0x5f, 0x64, 0x3a, 0x70, 0x26, 0x26, 0x28, 0x66, + 0x3d, 0x70, 0x2e, 0x6e, 0x65, 0x78, 0x74, 0x53, 0x69, 0x62, 0x6c, 0x69, + 0x6e, 0x67, 0x29, 0x2c, 0x61, 0x2e, 0x5f, 0x5f, 0x64, 0x3d, 0x76, 0x6f, + 0x69, 0x64, 0x20, 0x30, 0x2c, 0x61, 0x2e, 0x5f, 0x5f, 0x75, 0x26, 0x3d, + 0x2d, 0x31, 0x39, 0x36, 0x36, 0x30, 0x39, 0x29, 0x3b, 0x65, 0x2e, 0x5f, + 0x5f, 0x64, 0x3d, 0x66, 0x2c, 0x65, 0x2e, 0x5f, 0x5f, 0x65, 0x3d, 0x64, + 0x7d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x4b, 0x28, + 0x74, 0x2c, 0x6e, 0x2c, 0x65, 0x29, 0x7b, 0x76, 0x61, 0x72, 0x20, 0x5f, + 0x2c, 0x69, 0x2c, 0x6f, 0x2c, 0x72, 0x2c, 0x75, 0x2c, 0x66, 0x3d, 0x6e, + 0x2e, 0x6c, 0x65, 0x6e, 0x67, 0x74, 0x68, 0x2c, 0x73, 0x3d, 0x65, 0x2e, + 0x6c, 0x65, 0x6e, 0x67, 0x74, 0x68, 0x2c, 0x6c, 0x3d, 0x73, 0x2c, 0x63, + 0x3d, 0x30, 0x3b, 0x66, 0x6f, 0x72, 0x28, 0x74, 0x2e, 0x5f, 0x5f, 0x6b, + 0x3d, 0x5b, 0x5d, 0x2c, 0x5f, 0x3d, 0x30, 0x3b, 0x5f, 0x3c, 0x66, 0x3b, + 0x5f, 0x2b, 0x2b, 0x29, 0x6e, 0x75, 0x6c, 0x6c, 0x21, 0x3d, 0x28, 0x69, + 0x3d, 0x74, 0x2e, 0x5f, 0x5f, 0x6b, 0x5b, 0x5f, 0x5d, 0x3d, 0x6e, 0x75, + 0x6c, 0x6c, 0x3d, 0x3d, 0x28, 0x69, 0x3d, 0x6e, 0x5b, 0x5f, 0x5d, 0x29, + 0x7c, 0x7c, 0x22, 0x62, 0x6f, 0x6f, 0x6c, 0x65, 0x61, 0x6e, 0x22, 0x3d, + 0x3d, 0x74, 0x79, 0x70, 0x65, 0x6f, 0x66, 0x20, 0x69, 0x7c, 0x7c, 0x22, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x22, 0x3d, 0x3d, 0x74, - 0x79, 0x70, 0x65, 0x6f, 0x66, 0x20, 0x70, 0x2e, 0x74, 0x79, 0x70, 0x65, - 0x7c, 0x7c, 0x62, 0x3f, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x21, 0x3d, - 0x3d, 0x70, 0x2e, 0x5f, 0x5f, 0x64, 0x3f, 0x28, 0x66, 0x3d, 0x70, 0x2e, - 0x5f, 0x5f, 0x64, 0x2c, 0x70, 0x2e, 0x5f, 0x5f, 0x64, 0x3d, 0x76, 0x6f, - 0x69, 0x64, 0x20, 0x30, 0x29, 0x3a, 0x66, 0x3d, 0x64, 0x2e, 0x6e, 0x65, - 0x78, 0x74, 0x53, 0x69, 0x62, 0x6c, 0x69, 0x6e, 0x67, 0x3a, 0x66, 0x3d, - 0x51, 0x28, 0x74, 0x2c, 0x64, 0x2c, 0x66, 0x29, 0x3a, 0x66, 0x3d, 0x4a, - 0x28, 0x70, 0x2c, 0x66, 0x2c, 0x74, 0x29, 0x2c, 0x22, 0x66, 0x75, 0x6e, - 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x22, 0x3d, 0x3d, 0x74, 0x79, 0x70, 0x65, - 0x6f, 0x66, 0x20, 0x65, 0x2e, 0x74, 0x79, 0x70, 0x65, 0x26, 0x26, 0x28, - 0x65, 0x2e, 0x5f, 0x5f, 0x64, 0x3d, 0x66, 0x29, 0x29, 0x29, 0x3b, 0x66, - 0x6f, 0x72, 0x28, 0x65, 0x2e, 0x5f, 0x5f, 0x65, 0x3d, 0x76, 0x2c, 0x63, - 0x3d, 0x78, 0x3b, 0x63, 0x2d, 0x2d, 0x3b, 0x29, 0x6e, 0x75, 0x6c, 0x6c, - 0x21, 0x3d, 0x53, 0x5b, 0x63, 0x5d, 0x26, 0x26, 0x28, 0x22, 0x66, 0x75, - 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x22, 0x3d, 0x3d, 0x74, 0x79, 0x70, - 0x65, 0x6f, 0x66, 0x20, 0x65, 0x2e, 0x74, 0x79, 0x70, 0x65, 0x26, 0x26, - 0x6e, 0x75, 0x6c, 0x6c, 0x21, 0x3d, 0x53, 0x5b, 0x63, 0x5d, 0x2e, 0x5f, - 0x5f, 0x65, 0x26, 0x26, 0x53, 0x5b, 0x63, 0x5d, 0x2e, 0x5f, 0x5f, 0x65, - 0x3d, 0x3d, 0x65, 0x2e, 0x5f, 0x5f, 0x64, 0x26, 0x26, 0x28, 0x65, 0x2e, - 0x5f, 0x5f, 0x64, 0x3d, 0x53, 0x5b, 0x63, 0x5d, 0x2e, 0x5f, 0x5f, 0x65, - 0x2e, 0x6e, 0x65, 0x78, 0x74, 0x53, 0x69, 0x62, 0x6c, 0x69, 0x6e, 0x67, - 0x29, 0x2c, 0x75, 0x74, 0x28, 0x53, 0x5b, 0x63, 0x5d, 0x2c, 0x53, 0x5b, - 0x63, 0x5d, 0x29, 0x29, 0x7d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, - 0x6e, 0x20, 0x4a, 0x28, 0x74, 0x2c, 0x6e, 0x2c, 0x65, 0x29, 0x7b, 0x66, - 0x6f, 0x72, 0x28, 0x76, 0x61, 0x72, 0x20, 0x69, 0x2c, 0x5f, 0x3d, 0x74, - 0x2e, 0x5f, 0x5f, 0x6b, 0x2c, 0x6f, 0x3d, 0x30, 0x3b, 0x5f, 0x26, 0x26, - 0x6f, 0x3c, 0x5f, 0x2e, 0x6c, 0x65, 0x6e, 0x67, 0x74, 0x68, 0x3b, 0x6f, - 0x2b, 0x2b, 0x29, 0x28, 0x69, 0x3d, 0x5f, 0x5b, 0x6f, 0x5d, 0x29, 0x26, - 0x26, 0x28, 0x69, 0x2e, 0x5f, 0x5f, 0x3d, 0x74, 0x2c, 0x6e, 0x3d, 0x22, - 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x22, 0x3d, 0x3d, 0x74, - 0x79, 0x70, 0x65, 0x6f, 0x66, 0x20, 0x69, 0x2e, 0x74, 0x79, 0x70, 0x65, - 0x3f, 0x4a, 0x28, 0x69, 0x2c, 0x6e, 0x2c, 0x65, 0x29, 0x3a, 0x51, 0x28, - 0x65, 0x2c, 0x69, 0x2e, 0x5f, 0x5f, 0x65, 0x2c, 0x6e, 0x29, 0x29, 0x3b, - 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x6e, 0x7d, 0x66, 0x75, 0x6e, - 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x4b, 0x28, 0x74, 0x2c, 0x6e, 0x29, - 0x7b, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x6e, 0x3d, 0x6e, 0x7c, - 0x7c, 0x5b, 0x5d, 0x2c, 0x6e, 0x75, 0x6c, 0x6c, 0x3d, 0x3d, 0x74, 0x7c, - 0x7c, 0x22, 0x62, 0x6f, 0x6f, 0x6c, 0x65, 0x61, 0x6e, 0x22, 0x3d, 0x3d, - 0x74, 0x79, 0x70, 0x65, 0x6f, 0x66, 0x20, 0x74, 0x7c, 0x7c, 0x28, 0x41, - 0x28, 0x74, 0x29, 0x3f, 0x74, 0x2e, 0x73, 0x6f, 0x6d, 0x65, 0x28, 0x28, - 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x74, 0x29, 0x7b, - 0x4b, 0x28, 0x74, 0x2c, 0x6e, 0x29, 0x7d, 0x29, 0x29, 0x3a, 0x6e, 0x2e, - 0x70, 0x75, 0x73, 0x68, 0x28, 0x74, 0x29, 0x29, 0x2c, 0x6e, 0x7d, 0x66, - 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x51, 0x28, 0x74, 0x2c, - 0x6e, 0x2c, 0x65, 0x29, 0x7b, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, - 0x6e, 0x75, 0x6c, 0x6c, 0x3d, 0x3d, 0x65, 0x7c, 0x7c, 0x65, 0x2e, 0x70, - 0x61, 0x72, 0x65, 0x6e, 0x74, 0x4e, 0x6f, 0x64, 0x65, 0x21, 0x3d, 0x3d, - 0x74, 0x3f, 0x74, 0x2e, 0x69, 0x6e, 0x73, 0x65, 0x72, 0x74, 0x42, 0x65, - 0x66, 0x6f, 0x72, 0x65, 0x28, 0x6e, 0x2c, 0x6e, 0x75, 0x6c, 0x6c, 0x29, - 0x3a, 0x6e, 0x3d, 0x3d, 0x65, 0x26, 0x26, 0x6e, 0x75, 0x6c, 0x6c, 0x21, - 0x3d, 0x6e, 0x2e, 0x70, 0x61, 0x72, 0x65, 0x6e, 0x74, 0x4e, 0x6f, 0x64, - 0x65, 0x7c, 0x7c, 0x74, 0x2e, 0x69, 0x6e, 0x73, 0x65, 0x72, 0x74, 0x42, - 0x65, 0x66, 0x6f, 0x72, 0x65, 0x28, 0x6e, 0x2c, 0x65, 0x29, 0x2c, 0x6e, + 0x79, 0x70, 0x65, 0x6f, 0x66, 0x20, 0x69, 0x3f, 0x6e, 0x75, 0x6c, 0x6c, + 0x3a, 0x22, 0x73, 0x74, 0x72, 0x69, 0x6e, 0x67, 0x22, 0x3d, 0x3d, 0x74, + 0x79, 0x70, 0x65, 0x6f, 0x66, 0x20, 0x69, 0x7c, 0x7c, 0x22, 0x6e, 0x75, + 0x6d, 0x62, 0x65, 0x72, 0x22, 0x3d, 0x3d, 0x74, 0x79, 0x70, 0x65, 0x6f, + 0x66, 0x20, 0x69, 0x7c, 0x7c, 0x22, 0x62, 0x69, 0x67, 0x69, 0x6e, 0x74, + 0x22, 0x3d, 0x3d, 0x74, 0x79, 0x70, 0x65, 0x6f, 0x66, 0x20, 0x69, 0x7c, + 0x7c, 0x69, 0x2e, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x72, 0x75, 0x63, 0x74, + 0x6f, 0x72, 0x3d, 0x3d, 0x53, 0x74, 0x72, 0x69, 0x6e, 0x67, 0x3f, 0x4f, + 0x28, 0x6e, 0x75, 0x6c, 0x6c, 0x2c, 0x69, 0x2c, 0x6e, 0x75, 0x6c, 0x6c, + 0x2c, 0x6e, 0x75, 0x6c, 0x6c, 0x2c, 0x69, 0x29, 0x3a, 0x46, 0x28, 0x69, + 0x29, 0x3f, 0x4f, 0x28, 0x6a, 0x2c, 0x7b, 0x63, 0x68, 0x69, 0x6c, 0x64, + 0x72, 0x65, 0x6e, 0x3a, 0x69, 0x7d, 0x2c, 0x6e, 0x75, 0x6c, 0x6c, 0x2c, + 0x6e, 0x75, 0x6c, 0x6c, 0x2c, 0x6e, 0x75, 0x6c, 0x6c, 0x29, 0x3a, 0x76, + 0x6f, 0x69, 0x64, 0x20, 0x30, 0x3d, 0x3d, 0x3d, 0x69, 0x2e, 0x63, 0x6f, + 0x6e, 0x73, 0x74, 0x72, 0x75, 0x63, 0x74, 0x6f, 0x72, 0x26, 0x26, 0x69, + 0x2e, 0x5f, 0x5f, 0x62, 0x3e, 0x30, 0x3f, 0x4f, 0x28, 0x69, 0x2e, 0x74, + 0x79, 0x70, 0x65, 0x2c, 0x69, 0x2e, 0x70, 0x72, 0x6f, 0x70, 0x73, 0x2c, + 0x69, 0x2e, 0x6b, 0x65, 0x79, 0x2c, 0x69, 0x2e, 0x72, 0x65, 0x66, 0x3f, + 0x69, 0x2e, 0x72, 0x65, 0x66, 0x3a, 0x6e, 0x75, 0x6c, 0x6c, 0x2c, 0x69, + 0x2e, 0x5f, 0x5f, 0x76, 0x29, 0x3a, 0x69, 0x29, 0x3f, 0x28, 0x69, 0x2e, + 0x5f, 0x5f, 0x3d, 0x74, 0x2c, 0x69, 0x2e, 0x5f, 0x5f, 0x62, 0x3d, 0x74, + 0x2e, 0x5f, 0x5f, 0x62, 0x2b, 0x31, 0x2c, 0x75, 0x3d, 0x59, 0x28, 0x69, + 0x2c, 0x65, 0x2c, 0x72, 0x3d, 0x5f, 0x2b, 0x63, 0x2c, 0x6c, 0x29, 0x2c, + 0x69, 0x2e, 0x5f, 0x5f, 0x69, 0x3d, 0x75, 0x2c, 0x6f, 0x3d, 0x6e, 0x75, + 0x6c, 0x6c, 0x2c, 0x2d, 0x31, 0x21, 0x3d, 0x3d, 0x75, 0x26, 0x26, 0x28, + 0x6c, 0x2d, 0x2d, 0x2c, 0x28, 0x6f, 0x3d, 0x65, 0x5b, 0x75, 0x5d, 0x29, + 0x26, 0x26, 0x28, 0x6f, 0x2e, 0x5f, 0x5f, 0x75, 0x7c, 0x3d, 0x31, 0x33, + 0x31, 0x30, 0x37, 0x32, 0x29, 0x29, 0x2c, 0x6e, 0x75, 0x6c, 0x6c, 0x3d, + 0x3d, 0x6f, 0x7c, 0x7c, 0x6e, 0x75, 0x6c, 0x6c, 0x3d, 0x3d, 0x3d, 0x6f, + 0x2e, 0x5f, 0x5f, 0x76, 0x3f, 0x28, 0x2d, 0x31, 0x3d, 0x3d, 0x75, 0x26, + 0x26, 0x63, 0x2d, 0x2d, 0x2c, 0x22, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, + 0x6f, 0x6e, 0x22, 0x21, 0x3d, 0x74, 0x79, 0x70, 0x65, 0x6f, 0x66, 0x20, + 0x69, 0x2e, 0x74, 0x79, 0x70, 0x65, 0x26, 0x26, 0x28, 0x69, 0x2e, 0x5f, + 0x5f, 0x75, 0x7c, 0x3d, 0x36, 0x35, 0x35, 0x33, 0x36, 0x29, 0x29, 0x3a, + 0x75, 0x21, 0x3d, 0x3d, 0x72, 0x26, 0x26, 0x28, 0x75, 0x3d, 0x3d, 0x3d, + 0x72, 0x2b, 0x31, 0x3f, 0x63, 0x2b, 0x2b, 0x3a, 0x75, 0x3e, 0x72, 0x3f, + 0x6c, 0x3e, 0x66, 0x2d, 0x72, 0x3f, 0x63, 0x2b, 0x3d, 0x75, 0x2d, 0x72, + 0x3a, 0x63, 0x2d, 0x2d, 0x3a, 0x63, 0x3d, 0x75, 0x3c, 0x72, 0x26, 0x26, + 0x75, 0x3d, 0x3d, 0x72, 0x2d, 0x31, 0x3f, 0x75, 0x2d, 0x72, 0x3a, 0x30, + 0x2c, 0x75, 0x21, 0x3d, 0x3d, 0x5f, 0x2b, 0x63, 0x26, 0x26, 0x28, 0x69, + 0x2e, 0x5f, 0x5f, 0x75, 0x7c, 0x3d, 0x36, 0x35, 0x35, 0x33, 0x36, 0x29, + 0x29, 0x29, 0x3a, 0x28, 0x6f, 0x3d, 0x65, 0x5b, 0x5f, 0x5d, 0x29, 0x26, + 0x26, 0x6e, 0x75, 0x6c, 0x6c, 0x3d, 0x3d, 0x6f, 0x2e, 0x6b, 0x65, 0x79, + 0x26, 0x26, 0x6f, 0x2e, 0x5f, 0x5f, 0x65, 0x26, 0x26, 0x28, 0x6f, 0x2e, + 0x5f, 0x5f, 0x65, 0x3d, 0x3d, 0x74, 0x2e, 0x5f, 0x5f, 0x64, 0x26, 0x26, + 0x28, 0x74, 0x2e, 0x5f, 0x5f, 0x64, 0x3d, 0x71, 0x28, 0x6f, 0x29, 0x29, + 0x2c, 0x75, 0x74, 0x28, 0x6f, 0x2c, 0x6f, 0x2c, 0x21, 0x31, 0x29, 0x2c, + 0x65, 0x5b, 0x5f, 0x5d, 0x3d, 0x6e, 0x75, 0x6c, 0x6c, 0x2c, 0x6c, 0x2d, + 0x2d, 0x29, 0x3b, 0x69, 0x66, 0x28, 0x6c, 0x29, 0x66, 0x6f, 0x72, 0x28, + 0x5f, 0x3d, 0x30, 0x3b, 0x5f, 0x3c, 0x73, 0x3b, 0x5f, 0x2b, 0x2b, 0x29, + 0x6e, 0x75, 0x6c, 0x6c, 0x21, 0x3d, 0x28, 0x6f, 0x3d, 0x65, 0x5b, 0x5f, + 0x5d, 0x29, 0x26, 0x26, 0x30, 0x3d, 0x3d, 0x28, 0x31, 0x33, 0x31, 0x30, + 0x37, 0x32, 0x26, 0x6f, 0x2e, 0x5f, 0x5f, 0x75, 0x29, 0x26, 0x26, 0x28, + 0x6f, 0x2e, 0x5f, 0x5f, 0x65, 0x3d, 0x3d, 0x74, 0x2e, 0x5f, 0x5f, 0x64, + 0x26, 0x26, 0x28, 0x74, 0x2e, 0x5f, 0x5f, 0x64, 0x3d, 0x71, 0x28, 0x6f, + 0x29, 0x29, 0x2c, 0x75, 0x74, 0x28, 0x6f, 0x2c, 0x6f, 0x29, 0x29, 0x7d, + 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x51, 0x28, 0x74, + 0x2c, 0x6e, 0x2c, 0x65, 0x29, 0x7b, 0x76, 0x61, 0x72, 0x20, 0x5f, 0x2c, + 0x69, 0x3b, 0x69, 0x66, 0x28, 0x22, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, + 0x6f, 0x6e, 0x22, 0x3d, 0x3d, 0x74, 0x79, 0x70, 0x65, 0x6f, 0x66, 0x20, + 0x74, 0x2e, 0x74, 0x79, 0x70, 0x65, 0x29, 0x7b, 0x66, 0x6f, 0x72, 0x28, + 0x5f, 0x3d, 0x74, 0x2e, 0x5f, 0x5f, 0x6b, 0x2c, 0x69, 0x3d, 0x30, 0x3b, + 0x5f, 0x26, 0x26, 0x69, 0x3c, 0x5f, 0x2e, 0x6c, 0x65, 0x6e, 0x67, 0x74, + 0x68, 0x3b, 0x69, 0x2b, 0x2b, 0x29, 0x5f, 0x5b, 0x69, 0x5d, 0x26, 0x26, + 0x28, 0x5f, 0x5b, 0x69, 0x5d, 0x2e, 0x5f, 0x5f, 0x3d, 0x74, 0x2c, 0x6e, + 0x3d, 0x51, 0x28, 0x5f, 0x5b, 0x69, 0x5d, 0x2c, 0x6e, 0x2c, 0x65, 0x29, + 0x29, 0x3b, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x6e, 0x7d, 0x72, + 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x74, 0x2e, 0x5f, 0x5f, 0x65, 0x21, + 0x3d, 0x6e, 0x26, 0x26, 0x28, 0x65, 0x2e, 0x69, 0x6e, 0x73, 0x65, 0x72, + 0x74, 0x42, 0x65, 0x66, 0x6f, 0x72, 0x65, 0x28, 0x74, 0x2e, 0x5f, 0x5f, + 0x65, 0x2c, 0x6e, 0x7c, 0x7c, 0x6e, 0x75, 0x6c, 0x6c, 0x29, 0x2c, 0x6e, + 0x3d, 0x74, 0x2e, 0x5f, 0x5f, 0x65, 0x29, 0x2c, 0x6e, 0x26, 0x26, 0x6e, 0x2e, 0x6e, 0x65, 0x78, 0x74, 0x53, 0x69, 0x62, 0x6c, 0x69, 0x6e, 0x67, 0x7d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x58, 0x28, - 0x74, 0x2c, 0x6e, 0x2c, 0x65, 0x2c, 0x69, 0x29, 0x7b, 0x76, 0x61, 0x72, - 0x20, 0x5f, 0x3d, 0x74, 0x2e, 0x6b, 0x65, 0x79, 0x2c, 0x6f, 0x3d, 0x74, - 0x2e, 0x74, 0x79, 0x70, 0x65, 0x2c, 0x72, 0x3d, 0x65, 0x2d, 0x31, 0x2c, - 0x75, 0x3d, 0x65, 0x2b, 0x31, 0x2c, 0x66, 0x3d, 0x6e, 0x5b, 0x65, 0x5d, - 0x3b, 0x69, 0x66, 0x28, 0x6e, 0x75, 0x6c, 0x6c, 0x3d, 0x3d, 0x3d, 0x66, - 0x7c, 0x7c, 0x66, 0x26, 0x26, 0x5f, 0x3d, 0x3d, 0x66, 0x2e, 0x6b, 0x65, + 0x74, 0x2c, 0x6e, 0x29, 0x7b, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, + 0x6e, 0x3d, 0x6e, 0x7c, 0x7c, 0x5b, 0x5d, 0x2c, 0x6e, 0x75, 0x6c, 0x6c, + 0x3d, 0x3d, 0x74, 0x7c, 0x7c, 0x22, 0x62, 0x6f, 0x6f, 0x6c, 0x65, 0x61, + 0x6e, 0x22, 0x3d, 0x3d, 0x74, 0x79, 0x70, 0x65, 0x6f, 0x66, 0x20, 0x74, + 0x7c, 0x7c, 0x28, 0x46, 0x28, 0x74, 0x29, 0x3f, 0x74, 0x2e, 0x73, 0x6f, + 0x6d, 0x65, 0x28, 0x28, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, + 0x28, 0x74, 0x29, 0x7b, 0x58, 0x28, 0x74, 0x2c, 0x6e, 0x29, 0x7d, 0x29, + 0x29, 0x3a, 0x6e, 0x2e, 0x70, 0x75, 0x73, 0x68, 0x28, 0x74, 0x29, 0x29, + 0x2c, 0x6e, 0x7d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, + 0x59, 0x28, 0x74, 0x2c, 0x6e, 0x2c, 0x65, 0x2c, 0x5f, 0x29, 0x7b, 0x76, + 0x61, 0x72, 0x20, 0x69, 0x3d, 0x74, 0x2e, 0x6b, 0x65, 0x79, 0x2c, 0x6f, + 0x3d, 0x74, 0x2e, 0x74, 0x79, 0x70, 0x65, 0x2c, 0x72, 0x3d, 0x65, 0x2d, + 0x31, 0x2c, 0x75, 0x3d, 0x65, 0x2b, 0x31, 0x2c, 0x66, 0x3d, 0x6e, 0x5b, + 0x65, 0x5d, 0x3b, 0x69, 0x66, 0x28, 0x6e, 0x75, 0x6c, 0x6c, 0x3d, 0x3d, + 0x3d, 0x66, 0x7c, 0x7c, 0x66, 0x26, 0x26, 0x69, 0x3d, 0x3d, 0x66, 0x2e, + 0x6b, 0x65, 0x79, 0x26, 0x26, 0x6f, 0x3d, 0x3d, 0x3d, 0x66, 0x2e, 0x74, + 0x79, 0x70, 0x65, 0x29, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x65, + 0x3b, 0x69, 0x66, 0x28, 0x5f, 0x3e, 0x28, 0x6e, 0x75, 0x6c, 0x6c, 0x21, + 0x3d, 0x66, 0x26, 0x26, 0x30, 0x3d, 0x3d, 0x28, 0x31, 0x33, 0x31, 0x30, + 0x37, 0x32, 0x26, 0x66, 0x2e, 0x5f, 0x5f, 0x75, 0x29, 0x3f, 0x31, 0x3a, + 0x30, 0x29, 0x29, 0x66, 0x6f, 0x72, 0x28, 0x3b, 0x72, 0x3e, 0x3d, 0x30, + 0x7c, 0x7c, 0x75, 0x3c, 0x6e, 0x2e, 0x6c, 0x65, 0x6e, 0x67, 0x74, 0x68, + 0x3b, 0x29, 0x7b, 0x69, 0x66, 0x28, 0x72, 0x3e, 0x3d, 0x30, 0x29, 0x7b, + 0x69, 0x66, 0x28, 0x28, 0x66, 0x3d, 0x6e, 0x5b, 0x72, 0x5d, 0x29, 0x26, + 0x26, 0x30, 0x3d, 0x3d, 0x28, 0x31, 0x33, 0x31, 0x30, 0x37, 0x32, 0x26, + 0x66, 0x2e, 0x5f, 0x5f, 0x75, 0x29, 0x26, 0x26, 0x69, 0x3d, 0x3d, 0x66, + 0x2e, 0x6b, 0x65, 0x79, 0x26, 0x26, 0x6f, 0x3d, 0x3d, 0x3d, 0x66, 0x2e, + 0x74, 0x79, 0x70, 0x65, 0x29, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, + 0x72, 0x3b, 0x72, 0x2d, 0x2d, 0x7d, 0x69, 0x66, 0x28, 0x75, 0x3c, 0x6e, + 0x2e, 0x6c, 0x65, 0x6e, 0x67, 0x74, 0x68, 0x29, 0x7b, 0x69, 0x66, 0x28, + 0x28, 0x66, 0x3d, 0x6e, 0x5b, 0x75, 0x5d, 0x29, 0x26, 0x26, 0x30, 0x3d, + 0x3d, 0x28, 0x31, 0x33, 0x31, 0x30, 0x37, 0x32, 0x26, 0x66, 0x2e, 0x5f, + 0x5f, 0x75, 0x29, 0x26, 0x26, 0x69, 0x3d, 0x3d, 0x66, 0x2e, 0x6b, 0x65, 0x79, 0x26, 0x26, 0x6f, 0x3d, 0x3d, 0x3d, 0x66, 0x2e, 0x74, 0x79, 0x70, - 0x65, 0x29, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x65, 0x3b, 0x69, - 0x66, 0x28, 0x69, 0x3e, 0x28, 0x6e, 0x75, 0x6c, 0x6c, 0x21, 0x3d, 0x66, - 0x3f, 0x31, 0x3a, 0x30, 0x29, 0x29, 0x66, 0x6f, 0x72, 0x28, 0x3b, 0x72, - 0x3e, 0x3d, 0x30, 0x7c, 0x7c, 0x75, 0x3c, 0x6e, 0x2e, 0x6c, 0x65, 0x6e, - 0x67, 0x74, 0x68, 0x3b, 0x29, 0x7b, 0x69, 0x66, 0x28, 0x72, 0x3e, 0x3d, - 0x30, 0x29, 0x7b, 0x69, 0x66, 0x28, 0x28, 0x66, 0x3d, 0x6e, 0x5b, 0x72, - 0x5d, 0x29, 0x26, 0x26, 0x5f, 0x3d, 0x3d, 0x66, 0x2e, 0x6b, 0x65, 0x79, - 0x26, 0x26, 0x6f, 0x3d, 0x3d, 0x3d, 0x66, 0x2e, 0x74, 0x79, 0x70, 0x65, - 0x29, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x72, 0x3b, 0x72, 0x2d, - 0x2d, 0x7d, 0x69, 0x66, 0x28, 0x75, 0x3c, 0x6e, 0x2e, 0x6c, 0x65, 0x6e, - 0x67, 0x74, 0x68, 0x29, 0x7b, 0x69, 0x66, 0x28, 0x28, 0x66, 0x3d, 0x6e, - 0x5b, 0x75, 0x5d, 0x29, 0x26, 0x26, 0x5f, 0x3d, 0x3d, 0x66, 0x2e, 0x6b, - 0x65, 0x79, 0x26, 0x26, 0x6f, 0x3d, 0x3d, 0x3d, 0x66, 0x2e, 0x74, 0x79, - 0x70, 0x65, 0x29, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x75, 0x3b, - 0x75, 0x2b, 0x2b, 0x7d, 0x7d, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x2d, - 0x31, 0x7d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x59, - 0x28, 0x74, 0x2c, 0x6e, 0x2c, 0x65, 0x2c, 0x69, 0x2c, 0x5f, 0x29, 0x7b, - 0x76, 0x61, 0x72, 0x20, 0x6f, 0x3b, 0x66, 0x6f, 0x72, 0x28, 0x6f, 0x20, - 0x69, 0x6e, 0x20, 0x65, 0x29, 0x22, 0x63, 0x68, 0x69, 0x6c, 0x64, 0x72, - 0x65, 0x6e, 0x22, 0x3d, 0x3d, 0x3d, 0x6f, 0x7c, 0x7c, 0x22, 0x6b, 0x65, - 0x79, 0x22, 0x3d, 0x3d, 0x3d, 0x6f, 0x7c, 0x7c, 0x6f, 0x20, 0x69, 0x6e, - 0x20, 0x6e, 0x7c, 0x7c, 0x74, 0x74, 0x28, 0x74, 0x2c, 0x6f, 0x2c, 0x6e, - 0x75, 0x6c, 0x6c, 0x2c, 0x65, 0x5b, 0x6f, 0x5d, 0x2c, 0x69, 0x29, 0x3b, - 0x66, 0x6f, 0x72, 0x28, 0x6f, 0x20, 0x69, 0x6e, 0x20, 0x6e, 0x29, 0x5f, - 0x26, 0x26, 0x22, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x22, - 0x21, 0x3d, 0x74, 0x79, 0x70, 0x65, 0x6f, 0x66, 0x20, 0x6e, 0x5b, 0x6f, - 0x5d, 0x7c, 0x7c, 0x22, 0x63, 0x68, 0x69, 0x6c, 0x64, 0x72, 0x65, 0x6e, - 0x22, 0x3d, 0x3d, 0x3d, 0x6f, 0x7c, 0x7c, 0x22, 0x6b, 0x65, 0x79, 0x22, - 0x3d, 0x3d, 0x3d, 0x6f, 0x7c, 0x7c, 0x22, 0x76, 0x61, 0x6c, 0x75, 0x65, - 0x22, 0x3d, 0x3d, 0x3d, 0x6f, 0x7c, 0x7c, 0x22, 0x63, 0x68, 0x65, 0x63, - 0x6b, 0x65, 0x64, 0x22, 0x3d, 0x3d, 0x3d, 0x6f, 0x7c, 0x7c, 0x65, 0x5b, - 0x6f, 0x5d, 0x3d, 0x3d, 0x3d, 0x6e, 0x5b, 0x6f, 0x5d, 0x7c, 0x7c, 0x74, - 0x74, 0x28, 0x74, 0x2c, 0x6f, 0x2c, 0x6e, 0x5b, 0x6f, 0x5d, 0x2c, 0x65, - 0x5b, 0x6f, 0x5d, 0x2c, 0x69, 0x29, 0x7d, 0x66, 0x75, 0x6e, 0x63, 0x74, - 0x69, 0x6f, 0x6e, 0x20, 0x5a, 0x28, 0x74, 0x2c, 0x6e, 0x2c, 0x65, 0x29, - 0x7b, 0x22, 0x2d, 0x22, 0x3d, 0x3d, 0x3d, 0x6e, 0x5b, 0x30, 0x5d, 0x3f, - 0x74, 0x2e, 0x73, 0x65, 0x74, 0x50, 0x72, 0x6f, 0x70, 0x65, 0x72, 0x74, - 0x79, 0x28, 0x6e, 0x2c, 0x6e, 0x75, 0x6c, 0x6c, 0x3d, 0x3d, 0x65, 0x3f, - 0x22, 0x22, 0x3a, 0x65, 0x29, 0x3a, 0x74, 0x5b, 0x6e, 0x5d, 0x3d, 0x6e, - 0x75, 0x6c, 0x6c, 0x3d, 0x3d, 0x65, 0x3f, 0x22, 0x22, 0x3a, 0x22, 0x6e, - 0x75, 0x6d, 0x62, 0x65, 0x72, 0x22, 0x21, 0x3d, 0x74, 0x79, 0x70, 0x65, - 0x6f, 0x66, 0x20, 0x65, 0x7c, 0x7c, 0x56, 0x2e, 0x74, 0x65, 0x73, 0x74, - 0x28, 0x6e, 0x29, 0x3f, 0x65, 0x3a, 0x65, 0x2b, 0x22, 0x70, 0x78, 0x22, - 0x7d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x74, 0x74, - 0x28, 0x74, 0x2c, 0x6e, 0x2c, 0x65, 0x2c, 0x69, 0x2c, 0x5f, 0x29, 0x7b, - 0x76, 0x61, 0x72, 0x20, 0x6f, 0x3b, 0x74, 0x3a, 0x69, 0x66, 0x28, 0x22, - 0x73, 0x74, 0x79, 0x6c, 0x65, 0x22, 0x3d, 0x3d, 0x3d, 0x6e, 0x29, 0x69, - 0x66, 0x28, 0x22, 0x73, 0x74, 0x72, 0x69, 0x6e, 0x67, 0x22, 0x3d, 0x3d, - 0x74, 0x79, 0x70, 0x65, 0x6f, 0x66, 0x20, 0x65, 0x29, 0x74, 0x2e, 0x73, - 0x74, 0x79, 0x6c, 0x65, 0x2e, 0x63, 0x73, 0x73, 0x54, 0x65, 0x78, 0x74, - 0x3d, 0x65, 0x3b, 0x65, 0x6c, 0x73, 0x65, 0x7b, 0x69, 0x66, 0x28, 0x22, - 0x73, 0x74, 0x72, 0x69, 0x6e, 0x67, 0x22, 0x3d, 0x3d, 0x74, 0x79, 0x70, - 0x65, 0x6f, 0x66, 0x20, 0x69, 0x26, 0x26, 0x28, 0x74, 0x2e, 0x73, 0x74, - 0x79, 0x6c, 0x65, 0x2e, 0x63, 0x73, 0x73, 0x54, 0x65, 0x78, 0x74, 0x3d, - 0x69, 0x3d, 0x22, 0x22, 0x29, 0x2c, 0x69, 0x29, 0x66, 0x6f, 0x72, 0x28, - 0x6e, 0x20, 0x69, 0x6e, 0x20, 0x69, 0x29, 0x65, 0x26, 0x26, 0x6e, 0x20, - 0x69, 0x6e, 0x20, 0x65, 0x7c, 0x7c, 0x5a, 0x28, 0x74, 0x2e, 0x73, 0x74, - 0x79, 0x6c, 0x65, 0x2c, 0x6e, 0x2c, 0x22, 0x22, 0x29, 0x3b, 0x69, 0x66, - 0x28, 0x65, 0x29, 0x66, 0x6f, 0x72, 0x28, 0x6e, 0x20, 0x69, 0x6e, 0x20, - 0x65, 0x29, 0x69, 0x26, 0x26, 0x65, 0x5b, 0x6e, 0x5d, 0x3d, 0x3d, 0x3d, - 0x69, 0x5b, 0x6e, 0x5d, 0x7c, 0x7c, 0x5a, 0x28, 0x74, 0x2e, 0x73, 0x74, - 0x79, 0x6c, 0x65, 0x2c, 0x6e, 0x2c, 0x65, 0x5b, 0x6e, 0x5d, 0x29, 0x7d, - 0x65, 0x6c, 0x73, 0x65, 0x20, 0x69, 0x66, 0x28, 0x22, 0x6f, 0x22, 0x3d, - 0x3d, 0x3d, 0x6e, 0x5b, 0x30, 0x5d, 0x26, 0x26, 0x22, 0x6e, 0x22, 0x3d, - 0x3d, 0x3d, 0x6e, 0x5b, 0x31, 0x5d, 0x29, 0x6f, 0x3d, 0x6e, 0x21, 0x3d, - 0x3d, 0x28, 0x6e, 0x3d, 0x6e, 0x2e, 0x72, 0x65, 0x70, 0x6c, 0x61, 0x63, - 0x65, 0x28, 0x2f, 0x43, 0x61, 0x70, 0x74, 0x75, 0x72, 0x65, 0x24, 0x2f, - 0x2c, 0x22, 0x22, 0x29, 0x29, 0x2c, 0x6e, 0x3d, 0x6e, 0x2e, 0x74, 0x6f, - 0x4c, 0x6f, 0x77, 0x65, 0x72, 0x43, 0x61, 0x73, 0x65, 0x28, 0x29, 0x69, - 0x6e, 0x20, 0x74, 0x3f, 0x6e, 0x2e, 0x74, 0x6f, 0x4c, 0x6f, 0x77, 0x65, - 0x72, 0x43, 0x61, 0x73, 0x65, 0x28, 0x29, 0x2e, 0x73, 0x6c, 0x69, 0x63, - 0x65, 0x28, 0x32, 0x29, 0x3a, 0x6e, 0x2e, 0x73, 0x6c, 0x69, 0x63, 0x65, - 0x28, 0x32, 0x29, 0x2c, 0x74, 0x2e, 0x6c, 0x7c, 0x7c, 0x28, 0x74, 0x2e, - 0x6c, 0x3d, 0x7b, 0x7d, 0x29, 0x2c, 0x74, 0x2e, 0x6c, 0x5b, 0x6e, 0x2b, - 0x6f, 0x5d, 0x3d, 0x65, 0x2c, 0x65, 0x3f, 0x69, 0x7c, 0x7c, 0x74, 0x2e, - 0x61, 0x64, 0x64, 0x45, 0x76, 0x65, 0x6e, 0x74, 0x4c, 0x69, 0x73, 0x74, - 0x65, 0x6e, 0x65, 0x72, 0x28, 0x6e, 0x2c, 0x6f, 0x3f, 0x65, 0x74, 0x3a, - 0x6e, 0x74, 0x2c, 0x6f, 0x29, 0x3a, 0x74, 0x2e, 0x72, 0x65, 0x6d, 0x6f, - 0x76, 0x65, 0x45, 0x76, 0x65, 0x6e, 0x74, 0x4c, 0x69, 0x73, 0x74, 0x65, - 0x6e, 0x65, 0x72, 0x28, 0x6e, 0x2c, 0x6f, 0x3f, 0x65, 0x74, 0x3a, 0x6e, - 0x74, 0x2c, 0x6f, 0x29, 0x3b, 0x65, 0x6c, 0x73, 0x65, 0x20, 0x69, 0x66, - 0x28, 0x22, 0x64, 0x61, 0x6e, 0x67, 0x65, 0x72, 0x6f, 0x75, 0x73, 0x6c, - 0x79, 0x53, 0x65, 0x74, 0x49, 0x6e, 0x6e, 0x65, 0x72, 0x48, 0x54, 0x4d, - 0x4c, 0x22, 0x21, 0x3d, 0x3d, 0x6e, 0x29, 0x7b, 0x69, 0x66, 0x28, 0x5f, - 0x29, 0x6e, 0x3d, 0x6e, 0x2e, 0x72, 0x65, 0x70, 0x6c, 0x61, 0x63, 0x65, - 0x28, 0x2f, 0x78, 0x6c, 0x69, 0x6e, 0x6b, 0x28, 0x48, 0x7c, 0x3a, 0x68, - 0x29, 0x2f, 0x2c, 0x22, 0x68, 0x22, 0x29, 0x2e, 0x72, 0x65, 0x70, 0x6c, - 0x61, 0x63, 0x65, 0x28, 0x2f, 0x73, 0x4e, 0x61, 0x6d, 0x65, 0x24, 0x2f, - 0x2c, 0x22, 0x73, 0x22, 0x29, 0x3b, 0x65, 0x6c, 0x73, 0x65, 0x20, 0x69, - 0x66, 0x28, 0x22, 0x77, 0x69, 0x64, 0x74, 0x68, 0x22, 0x21, 0x3d, 0x3d, - 0x6e, 0x26, 0x26, 0x22, 0x68, 0x65, 0x69, 0x67, 0x68, 0x74, 0x22, 0x21, - 0x3d, 0x3d, 0x6e, 0x26, 0x26, 0x22, 0x68, 0x72, 0x65, 0x66, 0x22, 0x21, - 0x3d, 0x3d, 0x6e, 0x26, 0x26, 0x22, 0x6c, 0x69, 0x73, 0x74, 0x22, 0x21, - 0x3d, 0x3d, 0x6e, 0x26, 0x26, 0x22, 0x66, 0x6f, 0x72, 0x6d, 0x22, 0x21, - 0x3d, 0x3d, 0x6e, 0x26, 0x26, 0x22, 0x74, 0x61, 0x62, 0x49, 0x6e, 0x64, - 0x65, 0x78, 0x22, 0x21, 0x3d, 0x3d, 0x6e, 0x26, 0x26, 0x22, 0x64, 0x6f, - 0x77, 0x6e, 0x6c, 0x6f, 0x61, 0x64, 0x22, 0x21, 0x3d, 0x3d, 0x6e, 0x26, - 0x26, 0x22, 0x72, 0x6f, 0x77, 0x53, 0x70, 0x61, 0x6e, 0x22, 0x21, 0x3d, - 0x3d, 0x6e, 0x26, 0x26, 0x22, 0x63, 0x6f, 0x6c, 0x53, 0x70, 0x61, 0x6e, - 0x22, 0x21, 0x3d, 0x3d, 0x6e, 0x26, 0x26, 0x6e, 0x20, 0x69, 0x6e, 0x20, - 0x74, 0x29, 0x74, 0x72, 0x79, 0x7b, 0x74, 0x5b, 0x6e, 0x5d, 0x3d, 0x6e, - 0x75, 0x6c, 0x6c, 0x3d, 0x3d, 0x65, 0x3f, 0x22, 0x22, 0x3a, 0x65, 0x3b, - 0x62, 0x72, 0x65, 0x61, 0x6b, 0x20, 0x74, 0x7d, 0x63, 0x61, 0x74, 0x63, - 0x68, 0x28, 0x74, 0x29, 0x7b, 0x7d, 0x22, 0x66, 0x75, 0x6e, 0x63, 0x74, - 0x69, 0x6f, 0x6e, 0x22, 0x3d, 0x3d, 0x74, 0x79, 0x70, 0x65, 0x6f, 0x66, - 0x20, 0x65, 0x7c, 0x7c, 0x28, 0x6e, 0x75, 0x6c, 0x6c, 0x3d, 0x3d, 0x65, - 0x7c, 0x7c, 0x21, 0x31, 0x3d, 0x3d, 0x3d, 0x65, 0x26, 0x26, 0x22, 0x2d, - 0x22, 0x21, 0x3d, 0x3d, 0x6e, 0x5b, 0x34, 0x5d, 0x3f, 0x74, 0x2e, 0x72, - 0x65, 0x6d, 0x6f, 0x76, 0x65, 0x41, 0x74, 0x74, 0x72, 0x69, 0x62, 0x75, - 0x74, 0x65, 0x28, 0x6e, 0x29, 0x3a, 0x74, 0x2e, 0x73, 0x65, 0x74, 0x41, - 0x74, 0x74, 0x72, 0x69, 0x62, 0x75, 0x74, 0x65, 0x28, 0x6e, 0x2c, 0x65, - 0x29, 0x29, 0x7d, 0x7d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, - 0x20, 0x6e, 0x74, 0x28, 0x74, 0x29, 0x7b, 0x72, 0x65, 0x74, 0x75, 0x72, - 0x6e, 0x20, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x6c, 0x5b, 0x74, 0x2e, 0x74, - 0x79, 0x70, 0x65, 0x2b, 0x21, 0x31, 0x5d, 0x28, 0x77, 0x2e, 0x65, 0x76, - 0x65, 0x6e, 0x74, 0x3f, 0x77, 0x2e, 0x65, 0x76, 0x65, 0x6e, 0x74, 0x28, - 0x74, 0x29, 0x3a, 0x74, 0x29, 0x7d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, - 0x6f, 0x6e, 0x20, 0x65, 0x74, 0x28, 0x74, 0x29, 0x7b, 0x72, 0x65, 0x74, - 0x75, 0x72, 0x6e, 0x20, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x6c, 0x5b, 0x74, - 0x2e, 0x74, 0x79, 0x70, 0x65, 0x2b, 0x21, 0x30, 0x5d, 0x28, 0x77, 0x2e, - 0x65, 0x76, 0x65, 0x6e, 0x74, 0x3f, 0x77, 0x2e, 0x65, 0x76, 0x65, 0x6e, - 0x74, 0x28, 0x74, 0x29, 0x3a, 0x74, 0x29, 0x7d, 0x66, 0x75, 0x6e, 0x63, - 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x69, 0x74, 0x28, 0x74, 0x2c, 0x6e, 0x2c, - 0x65, 0x2c, 0x69, 0x2c, 0x5f, 0x2c, 0x6f, 0x2c, 0x72, 0x2c, 0x75, 0x2c, - 0x66, 0x2c, 0x6c, 0x29, 0x7b, 0x76, 0x61, 0x72, 0x20, 0x73, 0x2c, 0x63, - 0x2c, 0x68, 0x2c, 0x61, 0x2c, 0x70, 0x2c, 0x64, 0x2c, 0x76, 0x2c, 0x79, - 0x2c, 0x6d, 0x2c, 0x67, 0x2c, 0x62, 0x2c, 0x6b, 0x2c, 0x53, 0x2c, 0x78, - 0x2c, 0x43, 0x2c, 0x45, 0x3d, 0x6e, 0x2e, 0x74, 0x79, 0x70, 0x65, 0x3b, - 0x69, 0x66, 0x28, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x21, 0x3d, 0x3d, - 0x6e, 0x2e, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x72, 0x75, 0x63, 0x74, 0x6f, - 0x72, 0x29, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x6e, 0x75, 0x6c, - 0x6c, 0x3b, 0x6e, 0x75, 0x6c, 0x6c, 0x21, 0x3d, 0x65, 0x2e, 0x5f, 0x5f, - 0x68, 0x26, 0x26, 0x28, 0x66, 0x3d, 0x65, 0x2e, 0x5f, 0x5f, 0x68, 0x2c, - 0x75, 0x3d, 0x6e, 0x2e, 0x5f, 0x5f, 0x65, 0x3d, 0x65, 0x2e, 0x5f, 0x5f, - 0x65, 0x2c, 0x6e, 0x2e, 0x5f, 0x5f, 0x68, 0x3d, 0x6e, 0x75, 0x6c, 0x6c, - 0x2c, 0x6f, 0x3d, 0x5b, 0x75, 0x5d, 0x29, 0x2c, 0x28, 0x73, 0x3d, 0x77, - 0x2e, 0x5f, 0x5f, 0x62, 0x29, 0x26, 0x26, 0x73, 0x28, 0x6e, 0x29, 0x3b, - 0x74, 0x72, 0x79, 0x7b, 0x74, 0x3a, 0x69, 0x66, 0x28, 0x22, 0x66, 0x75, - 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x22, 0x3d, 0x3d, 0x74, 0x79, 0x70, - 0x65, 0x6f, 0x66, 0x20, 0x45, 0x29, 0x7b, 0x69, 0x66, 0x28, 0x79, 0x3d, - 0x6e, 0x2e, 0x70, 0x72, 0x6f, 0x70, 0x73, 0x2c, 0x6d, 0x3d, 0x28, 0x73, - 0x3d, 0x45, 0x2e, 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x78, 0x74, 0x54, 0x79, - 0x70, 0x65, 0x29, 0x26, 0x26, 0x69, 0x5b, 0x73, 0x2e, 0x5f, 0x5f, 0x63, - 0x5d, 0x2c, 0x67, 0x3d, 0x73, 0x3f, 0x6d, 0x3f, 0x6d, 0x2e, 0x70, 0x72, - 0x6f, 0x70, 0x73, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3a, 0x73, 0x2e, - 0x5f, 0x5f, 0x3a, 0x69, 0x2c, 0x65, 0x2e, 0x5f, 0x5f, 0x63, 0x3f, 0x76, - 0x3d, 0x28, 0x63, 0x3d, 0x6e, 0x2e, 0x5f, 0x5f, 0x63, 0x3d, 0x65, 0x2e, - 0x5f, 0x5f, 0x63, 0x29, 0x2e, 0x5f, 0x5f, 0x3d, 0x63, 0x2e, 0x5f, 0x5f, - 0x45, 0x3a, 0x28, 0x22, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x74, 0x79, 0x70, - 0x65, 0x22, 0x69, 0x6e, 0x20, 0x45, 0x26, 0x26, 0x45, 0x2e, 0x70, 0x72, - 0x6f, 0x74, 0x6f, 0x74, 0x79, 0x70, 0x65, 0x2e, 0x72, 0x65, 0x6e, 0x64, - 0x65, 0x72, 0x3f, 0x6e, 0x2e, 0x5f, 0x5f, 0x63, 0x3d, 0x63, 0x3d, 0x6e, - 0x65, 0x77, 0x20, 0x45, 0x28, 0x79, 0x2c, 0x67, 0x29, 0x3a, 0x28, 0x6e, - 0x2e, 0x5f, 0x5f, 0x63, 0x3d, 0x63, 0x3d, 0x6e, 0x65, 0x77, 0x20, 0x49, - 0x28, 0x79, 0x2c, 0x67, 0x29, 0x2c, 0x63, 0x2e, 0x63, 0x6f, 0x6e, 0x73, - 0x74, 0x72, 0x75, 0x63, 0x74, 0x6f, 0x72, 0x3d, 0x45, 0x2c, 0x63, 0x2e, - 0x72, 0x65, 0x6e, 0x64, 0x65, 0x72, 0x3d, 0x66, 0x74, 0x29, 0x2c, 0x6d, - 0x26, 0x26, 0x6d, 0x2e, 0x73, 0x75, 0x62, 0x28, 0x63, 0x29, 0x2c, 0x63, - 0x2e, 0x70, 0x72, 0x6f, 0x70, 0x73, 0x3d, 0x79, 0x2c, 0x63, 0x2e, 0x73, - 0x74, 0x61, 0x74, 0x65, 0x7c, 0x7c, 0x28, 0x63, 0x2e, 0x73, 0x74, 0x61, - 0x74, 0x65, 0x3d, 0x7b, 0x7d, 0x29, 0x2c, 0x63, 0x2e, 0x63, 0x6f, 0x6e, - 0x74, 0x65, 0x78, 0x74, 0x3d, 0x67, 0x2c, 0x63, 0x2e, 0x5f, 0x5f, 0x6e, - 0x3d, 0x69, 0x2c, 0x68, 0x3d, 0x63, 0x2e, 0x5f, 0x5f, 0x64, 0x3d, 0x21, - 0x30, 0x2c, 0x63, 0x2e, 0x5f, 0x5f, 0x68, 0x3d, 0x5b, 0x5d, 0x2c, 0x63, - 0x2e, 0x5f, 0x73, 0x62, 0x3d, 0x5b, 0x5d, 0x29, 0x2c, 0x6e, 0x75, 0x6c, - 0x6c, 0x3d, 0x3d, 0x63, 0x2e, 0x5f, 0x5f, 0x73, 0x26, 0x26, 0x28, 0x63, - 0x2e, 0x5f, 0x5f, 0x73, 0x3d, 0x63, 0x2e, 0x73, 0x74, 0x61, 0x74, 0x65, - 0x29, 0x2c, 0x6e, 0x75, 0x6c, 0x6c, 0x21, 0x3d, 0x45, 0x2e, 0x67, 0x65, - 0x74, 0x44, 0x65, 0x72, 0x69, 0x76, 0x65, 0x64, 0x53, 0x74, 0x61, 0x74, - 0x65, 0x46, 0x72, 0x6f, 0x6d, 0x50, 0x72, 0x6f, 0x70, 0x73, 0x26, 0x26, - 0x28, 0x63, 0x2e, 0x5f, 0x5f, 0x73, 0x3d, 0x3d, 0x63, 0x2e, 0x73, 0x74, - 0x61, 0x74, 0x65, 0x26, 0x26, 0x28, 0x63, 0x2e, 0x5f, 0x5f, 0x73, 0x3d, - 0x46, 0x28, 0x7b, 0x7d, 0x2c, 0x63, 0x2e, 0x5f, 0x5f, 0x73, 0x29, 0x29, - 0x2c, 0x46, 0x28, 0x63, 0x2e, 0x5f, 0x5f, 0x73, 0x2c, 0x45, 0x2e, 0x67, - 0x65, 0x74, 0x44, 0x65, 0x72, 0x69, 0x76, 0x65, 0x64, 0x53, 0x74, 0x61, - 0x74, 0x65, 0x46, 0x72, 0x6f, 0x6d, 0x50, 0x72, 0x6f, 0x70, 0x73, 0x28, - 0x79, 0x2c, 0x63, 0x2e, 0x5f, 0x5f, 0x73, 0x29, 0x29, 0x29, 0x2c, 0x61, - 0x3d, 0x63, 0x2e, 0x70, 0x72, 0x6f, 0x70, 0x73, 0x2c, 0x70, 0x3d, 0x63, - 0x2e, 0x73, 0x74, 0x61, 0x74, 0x65, 0x2c, 0x63, 0x2e, 0x5f, 0x5f, 0x76, - 0x3d, 0x6e, 0x2c, 0x68, 0x29, 0x6e, 0x75, 0x6c, 0x6c, 0x3d, 0x3d, 0x45, + 0x65, 0x29, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x75, 0x3b, 0x75, + 0x2b, 0x2b, 0x7d, 0x7d, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x2d, 0x31, + 0x7d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x5a, 0x28, + 0x74, 0x2c, 0x6e, 0x2c, 0x65, 0x29, 0x7b, 0x22, 0x2d, 0x22, 0x3d, 0x3d, + 0x3d, 0x6e, 0x5b, 0x30, 0x5d, 0x3f, 0x74, 0x2e, 0x73, 0x65, 0x74, 0x50, + 0x72, 0x6f, 0x70, 0x65, 0x72, 0x74, 0x79, 0x28, 0x6e, 0x2c, 0x6e, 0x75, + 0x6c, 0x6c, 0x3d, 0x3d, 0x65, 0x3f, 0x22, 0x22, 0x3a, 0x65, 0x29, 0x3a, + 0x74, 0x5b, 0x6e, 0x5d, 0x3d, 0x6e, 0x75, 0x6c, 0x6c, 0x3d, 0x3d, 0x65, + 0x3f, 0x22, 0x22, 0x3a, 0x22, 0x6e, 0x75, 0x6d, 0x62, 0x65, 0x72, 0x22, + 0x21, 0x3d, 0x74, 0x79, 0x70, 0x65, 0x6f, 0x66, 0x20, 0x65, 0x7c, 0x7c, + 0x41, 0x2e, 0x74, 0x65, 0x73, 0x74, 0x28, 0x6e, 0x29, 0x3f, 0x65, 0x3a, + 0x65, 0x2b, 0x22, 0x70, 0x78, 0x22, 0x7d, 0x66, 0x75, 0x6e, 0x63, 0x74, + 0x69, 0x6f, 0x6e, 0x20, 0x74, 0x74, 0x28, 0x74, 0x2c, 0x6e, 0x2c, 0x65, + 0x2c, 0x5f, 0x2c, 0x69, 0x29, 0x7b, 0x76, 0x61, 0x72, 0x20, 0x6f, 0x3b, + 0x74, 0x3a, 0x69, 0x66, 0x28, 0x22, 0x73, 0x74, 0x79, 0x6c, 0x65, 0x22, + 0x3d, 0x3d, 0x3d, 0x6e, 0x29, 0x69, 0x66, 0x28, 0x22, 0x73, 0x74, 0x72, + 0x69, 0x6e, 0x67, 0x22, 0x3d, 0x3d, 0x74, 0x79, 0x70, 0x65, 0x6f, 0x66, + 0x20, 0x65, 0x29, 0x74, 0x2e, 0x73, 0x74, 0x79, 0x6c, 0x65, 0x2e, 0x63, + 0x73, 0x73, 0x54, 0x65, 0x78, 0x74, 0x3d, 0x65, 0x3b, 0x65, 0x6c, 0x73, + 0x65, 0x7b, 0x69, 0x66, 0x28, 0x22, 0x73, 0x74, 0x72, 0x69, 0x6e, 0x67, + 0x22, 0x3d, 0x3d, 0x74, 0x79, 0x70, 0x65, 0x6f, 0x66, 0x20, 0x5f, 0x26, + 0x26, 0x28, 0x74, 0x2e, 0x73, 0x74, 0x79, 0x6c, 0x65, 0x2e, 0x63, 0x73, + 0x73, 0x54, 0x65, 0x78, 0x74, 0x3d, 0x5f, 0x3d, 0x22, 0x22, 0x29, 0x2c, + 0x5f, 0x29, 0x66, 0x6f, 0x72, 0x28, 0x6e, 0x20, 0x69, 0x6e, 0x20, 0x5f, + 0x29, 0x65, 0x26, 0x26, 0x6e, 0x20, 0x69, 0x6e, 0x20, 0x65, 0x7c, 0x7c, + 0x5a, 0x28, 0x74, 0x2e, 0x73, 0x74, 0x79, 0x6c, 0x65, 0x2c, 0x6e, 0x2c, + 0x22, 0x22, 0x29, 0x3b, 0x69, 0x66, 0x28, 0x65, 0x29, 0x66, 0x6f, 0x72, + 0x28, 0x6e, 0x20, 0x69, 0x6e, 0x20, 0x65, 0x29, 0x5f, 0x26, 0x26, 0x65, + 0x5b, 0x6e, 0x5d, 0x3d, 0x3d, 0x3d, 0x5f, 0x5b, 0x6e, 0x5d, 0x7c, 0x7c, + 0x5a, 0x28, 0x74, 0x2e, 0x73, 0x74, 0x79, 0x6c, 0x65, 0x2c, 0x6e, 0x2c, + 0x65, 0x5b, 0x6e, 0x5d, 0x29, 0x7d, 0x65, 0x6c, 0x73, 0x65, 0x20, 0x69, + 0x66, 0x28, 0x22, 0x6f, 0x22, 0x3d, 0x3d, 0x3d, 0x6e, 0x5b, 0x30, 0x5d, + 0x26, 0x26, 0x22, 0x6e, 0x22, 0x3d, 0x3d, 0x3d, 0x6e, 0x5b, 0x31, 0x5d, + 0x29, 0x6f, 0x3d, 0x6e, 0x21, 0x3d, 0x3d, 0x28, 0x6e, 0x3d, 0x6e, 0x2e, + 0x72, 0x65, 0x70, 0x6c, 0x61, 0x63, 0x65, 0x28, 0x2f, 0x28, 0x50, 0x6f, + 0x69, 0x6e, 0x74, 0x65, 0x72, 0x43, 0x61, 0x70, 0x74, 0x75, 0x72, 0x65, + 0x29, 0x24, 0x7c, 0x43, 0x61, 0x70, 0x74, 0x75, 0x72, 0x65, 0x24, 0x2f, + 0x2c, 0x22, 0x24, 0x31, 0x22, 0x29, 0x29, 0x2c, 0x6e, 0x3d, 0x6e, 0x2e, + 0x74, 0x6f, 0x4c, 0x6f, 0x77, 0x65, 0x72, 0x43, 0x61, 0x73, 0x65, 0x28, + 0x29, 0x69, 0x6e, 0x20, 0x74, 0x3f, 0x6e, 0x2e, 0x74, 0x6f, 0x4c, 0x6f, + 0x77, 0x65, 0x72, 0x43, 0x61, 0x73, 0x65, 0x28, 0x29, 0x2e, 0x73, 0x6c, + 0x69, 0x63, 0x65, 0x28, 0x32, 0x29, 0x3a, 0x6e, 0x2e, 0x73, 0x6c, 0x69, + 0x63, 0x65, 0x28, 0x32, 0x29, 0x2c, 0x74, 0x2e, 0x6c, 0x7c, 0x7c, 0x28, + 0x74, 0x2e, 0x6c, 0x3d, 0x7b, 0x7d, 0x29, 0x2c, 0x74, 0x2e, 0x6c, 0x5b, + 0x6e, 0x2b, 0x6f, 0x5d, 0x3d, 0x65, 0x2c, 0x65, 0x3f, 0x5f, 0x3f, 0x65, + 0x2e, 0x75, 0x3d, 0x5f, 0x2e, 0x75, 0x3a, 0x28, 0x65, 0x2e, 0x75, 0x3d, + 0x44, 0x61, 0x74, 0x65, 0x2e, 0x6e, 0x6f, 0x77, 0x28, 0x29, 0x2c, 0x74, + 0x2e, 0x61, 0x64, 0x64, 0x45, 0x76, 0x65, 0x6e, 0x74, 0x4c, 0x69, 0x73, + 0x74, 0x65, 0x6e, 0x65, 0x72, 0x28, 0x6e, 0x2c, 0x6f, 0x3f, 0x65, 0x74, + 0x3a, 0x6e, 0x74, 0x2c, 0x6f, 0x29, 0x29, 0x3a, 0x74, 0x2e, 0x72, 0x65, + 0x6d, 0x6f, 0x76, 0x65, 0x45, 0x76, 0x65, 0x6e, 0x74, 0x4c, 0x69, 0x73, + 0x74, 0x65, 0x6e, 0x65, 0x72, 0x28, 0x6e, 0x2c, 0x6f, 0x3f, 0x65, 0x74, + 0x3a, 0x6e, 0x74, 0x2c, 0x6f, 0x29, 0x3b, 0x65, 0x6c, 0x73, 0x65, 0x7b, + 0x69, 0x66, 0x28, 0x69, 0x29, 0x6e, 0x3d, 0x6e, 0x2e, 0x72, 0x65, 0x70, + 0x6c, 0x61, 0x63, 0x65, 0x28, 0x2f, 0x78, 0x6c, 0x69, 0x6e, 0x6b, 0x28, + 0x48, 0x7c, 0x3a, 0x68, 0x29, 0x2f, 0x2c, 0x22, 0x68, 0x22, 0x29, 0x2e, + 0x72, 0x65, 0x70, 0x6c, 0x61, 0x63, 0x65, 0x28, 0x2f, 0x73, 0x4e, 0x61, + 0x6d, 0x65, 0x24, 0x2f, 0x2c, 0x22, 0x73, 0x22, 0x29, 0x3b, 0x65, 0x6c, + 0x73, 0x65, 0x20, 0x69, 0x66, 0x28, 0x22, 0x77, 0x69, 0x64, 0x74, 0x68, + 0x22, 0x21, 0x3d, 0x3d, 0x6e, 0x26, 0x26, 0x22, 0x68, 0x65, 0x69, 0x67, + 0x68, 0x74, 0x22, 0x21, 0x3d, 0x3d, 0x6e, 0x26, 0x26, 0x22, 0x68, 0x72, + 0x65, 0x66, 0x22, 0x21, 0x3d, 0x3d, 0x6e, 0x26, 0x26, 0x22, 0x6c, 0x69, + 0x73, 0x74, 0x22, 0x21, 0x3d, 0x3d, 0x6e, 0x26, 0x26, 0x22, 0x66, 0x6f, + 0x72, 0x6d, 0x22, 0x21, 0x3d, 0x3d, 0x6e, 0x26, 0x26, 0x22, 0x74, 0x61, + 0x62, 0x49, 0x6e, 0x64, 0x65, 0x78, 0x22, 0x21, 0x3d, 0x3d, 0x6e, 0x26, + 0x26, 0x22, 0x64, 0x6f, 0x77, 0x6e, 0x6c, 0x6f, 0x61, 0x64, 0x22, 0x21, + 0x3d, 0x3d, 0x6e, 0x26, 0x26, 0x22, 0x72, 0x6f, 0x77, 0x53, 0x70, 0x61, + 0x6e, 0x22, 0x21, 0x3d, 0x3d, 0x6e, 0x26, 0x26, 0x22, 0x63, 0x6f, 0x6c, + 0x53, 0x70, 0x61, 0x6e, 0x22, 0x21, 0x3d, 0x3d, 0x6e, 0x26, 0x26, 0x22, + 0x72, 0x6f, 0x6c, 0x65, 0x22, 0x21, 0x3d, 0x3d, 0x6e, 0x26, 0x26, 0x6e, + 0x20, 0x69, 0x6e, 0x20, 0x74, 0x29, 0x74, 0x72, 0x79, 0x7b, 0x74, 0x5b, + 0x6e, 0x5d, 0x3d, 0x6e, 0x75, 0x6c, 0x6c, 0x3d, 0x3d, 0x65, 0x3f, 0x22, + 0x22, 0x3a, 0x65, 0x3b, 0x62, 0x72, 0x65, 0x61, 0x6b, 0x20, 0x74, 0x7d, + 0x63, 0x61, 0x74, 0x63, 0x68, 0x28, 0x74, 0x29, 0x7b, 0x7d, 0x22, 0x66, + 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x22, 0x3d, 0x3d, 0x74, 0x79, + 0x70, 0x65, 0x6f, 0x66, 0x20, 0x65, 0x7c, 0x7c, 0x28, 0x6e, 0x75, 0x6c, + 0x6c, 0x3d, 0x3d, 0x65, 0x7c, 0x7c, 0x21, 0x31, 0x3d, 0x3d, 0x3d, 0x65, + 0x26, 0x26, 0x22, 0x2d, 0x22, 0x21, 0x3d, 0x3d, 0x6e, 0x5b, 0x34, 0x5d, + 0x3f, 0x74, 0x2e, 0x72, 0x65, 0x6d, 0x6f, 0x76, 0x65, 0x41, 0x74, 0x74, + 0x72, 0x69, 0x62, 0x75, 0x74, 0x65, 0x28, 0x6e, 0x29, 0x3a, 0x74, 0x2e, + 0x73, 0x65, 0x74, 0x41, 0x74, 0x74, 0x72, 0x69, 0x62, 0x75, 0x74, 0x65, + 0x28, 0x6e, 0x2c, 0x65, 0x29, 0x29, 0x7d, 0x7d, 0x66, 0x75, 0x6e, 0x63, + 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x6e, 0x74, 0x28, 0x74, 0x29, 0x7b, 0x76, + 0x61, 0x72, 0x20, 0x6e, 0x3d, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x6c, 0x5b, + 0x74, 0x2e, 0x74, 0x79, 0x70, 0x65, 0x2b, 0x21, 0x31, 0x5d, 0x3b, 0x69, + 0x66, 0x28, 0x74, 0x2e, 0x74, 0x29, 0x7b, 0x69, 0x66, 0x28, 0x74, 0x2e, + 0x74, 0x3c, 0x3d, 0x6e, 0x2e, 0x75, 0x29, 0x72, 0x65, 0x74, 0x75, 0x72, + 0x6e, 0x7d, 0x65, 0x6c, 0x73, 0x65, 0x20, 0x74, 0x2e, 0x74, 0x3d, 0x44, + 0x61, 0x74, 0x65, 0x2e, 0x6e, 0x6f, 0x77, 0x28, 0x29, 0x3b, 0x72, 0x65, + 0x74, 0x75, 0x72, 0x6e, 0x20, 0x6e, 0x28, 0x43, 0x2e, 0x65, 0x76, 0x65, + 0x6e, 0x74, 0x3f, 0x43, 0x2e, 0x65, 0x76, 0x65, 0x6e, 0x74, 0x28, 0x74, + 0x29, 0x3a, 0x74, 0x29, 0x7d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, + 0x6e, 0x20, 0x65, 0x74, 0x28, 0x74, 0x29, 0x7b, 0x72, 0x65, 0x74, 0x75, + 0x72, 0x6e, 0x20, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x6c, 0x5b, 0x74, 0x2e, + 0x74, 0x79, 0x70, 0x65, 0x2b, 0x21, 0x30, 0x5d, 0x28, 0x43, 0x2e, 0x65, + 0x76, 0x65, 0x6e, 0x74, 0x3f, 0x43, 0x2e, 0x65, 0x76, 0x65, 0x6e, 0x74, + 0x28, 0x74, 0x29, 0x3a, 0x74, 0x29, 0x7d, 0x66, 0x75, 0x6e, 0x63, 0x74, + 0x69, 0x6f, 0x6e, 0x20, 0x5f, 0x74, 0x28, 0x74, 0x2c, 0x6e, 0x2c, 0x65, + 0x2c, 0x5f, 0x2c, 0x69, 0x2c, 0x6f, 0x2c, 0x72, 0x2c, 0x75, 0x2c, 0x66, + 0x2c, 0x73, 0x29, 0x7b, 0x76, 0x61, 0x72, 0x20, 0x6c, 0x2c, 0x63, 0x2c, + 0x68, 0x2c, 0x61, 0x2c, 0x70, 0x2c, 0x64, 0x2c, 0x76, 0x2c, 0x79, 0x2c, + 0x6d, 0x2c, 0x67, 0x2c, 0x62, 0x2c, 0x6b, 0x2c, 0x53, 0x2c, 0x77, 0x2c, + 0x78, 0x2c, 0x45, 0x3d, 0x6e, 0x2e, 0x74, 0x79, 0x70, 0x65, 0x3b, 0x69, + 0x66, 0x28, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x21, 0x3d, 0x3d, 0x6e, + 0x2e, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x72, 0x75, 0x63, 0x74, 0x6f, 0x72, + 0x29, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x6e, 0x75, 0x6c, 0x6c, + 0x3b, 0x31, 0x32, 0x38, 0x26, 0x65, 0x2e, 0x5f, 0x5f, 0x75, 0x26, 0x26, + 0x28, 0x66, 0x3d, 0x21, 0x21, 0x28, 0x33, 0x32, 0x26, 0x65, 0x2e, 0x5f, + 0x5f, 0x75, 0x29, 0x2c, 0x6f, 0x3d, 0x5b, 0x75, 0x3d, 0x6e, 0x2e, 0x5f, + 0x5f, 0x65, 0x3d, 0x65, 0x2e, 0x5f, 0x5f, 0x65, 0x5d, 0x29, 0x2c, 0x28, + 0x6c, 0x3d, 0x43, 0x2e, 0x5f, 0x5f, 0x62, 0x29, 0x26, 0x26, 0x6c, 0x28, + 0x6e, 0x29, 0x3b, 0x74, 0x3a, 0x69, 0x66, 0x28, 0x22, 0x66, 0x75, 0x6e, + 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x22, 0x3d, 0x3d, 0x74, 0x79, 0x70, 0x65, + 0x6f, 0x66, 0x20, 0x45, 0x29, 0x74, 0x72, 0x79, 0x7b, 0x69, 0x66, 0x28, + 0x79, 0x3d, 0x6e, 0x2e, 0x70, 0x72, 0x6f, 0x70, 0x73, 0x2c, 0x6d, 0x3d, + 0x28, 0x6c, 0x3d, 0x45, 0x2e, 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x78, 0x74, + 0x54, 0x79, 0x70, 0x65, 0x29, 0x26, 0x26, 0x5f, 0x5b, 0x6c, 0x2e, 0x5f, + 0x5f, 0x63, 0x5d, 0x2c, 0x67, 0x3d, 0x6c, 0x3f, 0x6d, 0x3f, 0x6d, 0x2e, + 0x70, 0x72, 0x6f, 0x70, 0x73, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3a, + 0x6c, 0x2e, 0x5f, 0x5f, 0x3a, 0x5f, 0x2c, 0x65, 0x2e, 0x5f, 0x5f, 0x63, + 0x3f, 0x76, 0x3d, 0x28, 0x63, 0x3d, 0x6e, 0x2e, 0x5f, 0x5f, 0x63, 0x3d, + 0x65, 0x2e, 0x5f, 0x5f, 0x63, 0x29, 0x2e, 0x5f, 0x5f, 0x3d, 0x63, 0x2e, + 0x5f, 0x5f, 0x45, 0x3a, 0x28, 0x22, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x74, + 0x79, 0x70, 0x65, 0x22, 0x69, 0x6e, 0x20, 0x45, 0x26, 0x26, 0x45, 0x2e, + 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x74, 0x79, 0x70, 0x65, 0x2e, 0x72, 0x65, + 0x6e, 0x64, 0x65, 0x72, 0x3f, 0x6e, 0x2e, 0x5f, 0x5f, 0x63, 0x3d, 0x63, + 0x3d, 0x6e, 0x65, 0x77, 0x20, 0x45, 0x28, 0x79, 0x2c, 0x67, 0x29, 0x3a, + 0x28, 0x6e, 0x2e, 0x5f, 0x5f, 0x63, 0x3d, 0x63, 0x3d, 0x6e, 0x65, 0x77, + 0x20, 0x49, 0x28, 0x79, 0x2c, 0x67, 0x29, 0x2c, 0x63, 0x2e, 0x63, 0x6f, + 0x6e, 0x73, 0x74, 0x72, 0x75, 0x63, 0x74, 0x6f, 0x72, 0x3d, 0x45, 0x2c, + 0x63, 0x2e, 0x72, 0x65, 0x6e, 0x64, 0x65, 0x72, 0x3d, 0x66, 0x74, 0x29, + 0x2c, 0x6d, 0x26, 0x26, 0x6d, 0x2e, 0x73, 0x75, 0x62, 0x28, 0x63, 0x29, + 0x2c, 0x63, 0x2e, 0x70, 0x72, 0x6f, 0x70, 0x73, 0x3d, 0x79, 0x2c, 0x63, + 0x2e, 0x73, 0x74, 0x61, 0x74, 0x65, 0x7c, 0x7c, 0x28, 0x63, 0x2e, 0x73, + 0x74, 0x61, 0x74, 0x65, 0x3d, 0x7b, 0x7d, 0x29, 0x2c, 0x63, 0x2e, 0x63, + 0x6f, 0x6e, 0x74, 0x65, 0x78, 0x74, 0x3d, 0x67, 0x2c, 0x63, 0x2e, 0x5f, + 0x5f, 0x6e, 0x3d, 0x5f, 0x2c, 0x68, 0x3d, 0x63, 0x2e, 0x5f, 0x5f, 0x64, + 0x3d, 0x21, 0x30, 0x2c, 0x63, 0x2e, 0x5f, 0x5f, 0x68, 0x3d, 0x5b, 0x5d, + 0x2c, 0x63, 0x2e, 0x5f, 0x73, 0x62, 0x3d, 0x5b, 0x5d, 0x29, 0x2c, 0x6e, + 0x75, 0x6c, 0x6c, 0x3d, 0x3d, 0x63, 0x2e, 0x5f, 0x5f, 0x73, 0x26, 0x26, + 0x28, 0x63, 0x2e, 0x5f, 0x5f, 0x73, 0x3d, 0x63, 0x2e, 0x73, 0x74, 0x61, + 0x74, 0x65, 0x29, 0x2c, 0x6e, 0x75, 0x6c, 0x6c, 0x21, 0x3d, 0x45, 0x2e, + 0x67, 0x65, 0x74, 0x44, 0x65, 0x72, 0x69, 0x76, 0x65, 0x64, 0x53, 0x74, + 0x61, 0x74, 0x65, 0x46, 0x72, 0x6f, 0x6d, 0x50, 0x72, 0x6f, 0x70, 0x73, + 0x26, 0x26, 0x28, 0x63, 0x2e, 0x5f, 0x5f, 0x73, 0x3d, 0x3d, 0x63, 0x2e, + 0x73, 0x74, 0x61, 0x74, 0x65, 0x26, 0x26, 0x28, 0x63, 0x2e, 0x5f, 0x5f, + 0x73, 0x3d, 0x4d, 0x28, 0x7b, 0x7d, 0x2c, 0x63, 0x2e, 0x5f, 0x5f, 0x73, + 0x29, 0x29, 0x2c, 0x4d, 0x28, 0x63, 0x2e, 0x5f, 0x5f, 0x73, 0x2c, 0x45, 0x2e, 0x67, 0x65, 0x74, 0x44, 0x65, 0x72, 0x69, 0x76, 0x65, 0x64, 0x53, 0x74, 0x61, 0x74, 0x65, 0x46, 0x72, 0x6f, 0x6d, 0x50, 0x72, 0x6f, 0x70, - 0x73, 0x26, 0x26, 0x6e, 0x75, 0x6c, 0x6c, 0x21, 0x3d, 0x63, 0x2e, 0x63, + 0x73, 0x28, 0x79, 0x2c, 0x63, 0x2e, 0x5f, 0x5f, 0x73, 0x29, 0x29, 0x29, + 0x2c, 0x61, 0x3d, 0x63, 0x2e, 0x70, 0x72, 0x6f, 0x70, 0x73, 0x2c, 0x70, + 0x3d, 0x63, 0x2e, 0x73, 0x74, 0x61, 0x74, 0x65, 0x2c, 0x63, 0x2e, 0x5f, + 0x5f, 0x76, 0x3d, 0x6e, 0x2c, 0x68, 0x29, 0x6e, 0x75, 0x6c, 0x6c, 0x3d, + 0x3d, 0x45, 0x2e, 0x67, 0x65, 0x74, 0x44, 0x65, 0x72, 0x69, 0x76, 0x65, + 0x64, 0x53, 0x74, 0x61, 0x74, 0x65, 0x46, 0x72, 0x6f, 0x6d, 0x50, 0x72, + 0x6f, 0x70, 0x73, 0x26, 0x26, 0x6e, 0x75, 0x6c, 0x6c, 0x21, 0x3d, 0x63, + 0x2e, 0x63, 0x6f, 0x6d, 0x70, 0x6f, 0x6e, 0x65, 0x6e, 0x74, 0x57, 0x69, + 0x6c, 0x6c, 0x4d, 0x6f, 0x75, 0x6e, 0x74, 0x26, 0x26, 0x63, 0x2e, 0x63, 0x6f, 0x6d, 0x70, 0x6f, 0x6e, 0x65, 0x6e, 0x74, 0x57, 0x69, 0x6c, 0x6c, - 0x4d, 0x6f, 0x75, 0x6e, 0x74, 0x26, 0x26, 0x63, 0x2e, 0x63, 0x6f, 0x6d, - 0x70, 0x6f, 0x6e, 0x65, 0x6e, 0x74, 0x57, 0x69, 0x6c, 0x6c, 0x4d, 0x6f, - 0x75, 0x6e, 0x74, 0x28, 0x29, 0x2c, 0x6e, 0x75, 0x6c, 0x6c, 0x21, 0x3d, - 0x63, 0x2e, 0x63, 0x6f, 0x6d, 0x70, 0x6f, 0x6e, 0x65, 0x6e, 0x74, 0x44, - 0x69, 0x64, 0x4d, 0x6f, 0x75, 0x6e, 0x74, 0x26, 0x26, 0x63, 0x2e, 0x5f, - 0x5f, 0x68, 0x2e, 0x70, 0x75, 0x73, 0x68, 0x28, 0x63, 0x2e, 0x63, 0x6f, - 0x6d, 0x70, 0x6f, 0x6e, 0x65, 0x6e, 0x74, 0x44, 0x69, 0x64, 0x4d, 0x6f, - 0x75, 0x6e, 0x74, 0x29, 0x3b, 0x65, 0x6c, 0x73, 0x65, 0x7b, 0x69, 0x66, - 0x28, 0x6e, 0x75, 0x6c, 0x6c, 0x3d, 0x3d, 0x45, 0x2e, 0x67, 0x65, 0x74, - 0x44, 0x65, 0x72, 0x69, 0x76, 0x65, 0x64, 0x53, 0x74, 0x61, 0x74, 0x65, - 0x46, 0x72, 0x6f, 0x6d, 0x50, 0x72, 0x6f, 0x70, 0x73, 0x26, 0x26, 0x79, - 0x21, 0x3d, 0x3d, 0x61, 0x26, 0x26, 0x6e, 0x75, 0x6c, 0x6c, 0x21, 0x3d, - 0x63, 0x2e, 0x63, 0x6f, 0x6d, 0x70, 0x6f, 0x6e, 0x65, 0x6e, 0x74, 0x57, - 0x69, 0x6c, 0x6c, 0x52, 0x65, 0x63, 0x65, 0x69, 0x76, 0x65, 0x50, 0x72, - 0x6f, 0x70, 0x73, 0x26, 0x26, 0x63, 0x2e, 0x63, 0x6f, 0x6d, 0x70, 0x6f, - 0x6e, 0x65, 0x6e, 0x74, 0x57, 0x69, 0x6c, 0x6c, 0x52, 0x65, 0x63, 0x65, - 0x69, 0x76, 0x65, 0x50, 0x72, 0x6f, 0x70, 0x73, 0x28, 0x79, 0x2c, 0x67, - 0x29, 0x2c, 0x21, 0x63, 0x2e, 0x5f, 0x5f, 0x65, 0x26, 0x26, 0x28, 0x6e, - 0x75, 0x6c, 0x6c, 0x21, 0x3d, 0x63, 0x2e, 0x73, 0x68, 0x6f, 0x75, 0x6c, - 0x64, 0x43, 0x6f, 0x6d, 0x70, 0x6f, 0x6e, 0x65, 0x6e, 0x74, 0x55, 0x70, - 0x64, 0x61, 0x74, 0x65, 0x26, 0x26, 0x21, 0x31, 0x3d, 0x3d, 0x3d, 0x63, - 0x2e, 0x73, 0x68, 0x6f, 0x75, 0x6c, 0x64, 0x43, 0x6f, 0x6d, 0x70, 0x6f, - 0x6e, 0x65, 0x6e, 0x74, 0x55, 0x70, 0x64, 0x61, 0x74, 0x65, 0x28, 0x79, - 0x2c, 0x63, 0x2e, 0x5f, 0x5f, 0x73, 0x2c, 0x67, 0x29, 0x7c, 0x7c, 0x6e, - 0x2e, 0x5f, 0x5f, 0x76, 0x3d, 0x3d, 0x3d, 0x65, 0x2e, 0x5f, 0x5f, 0x76, - 0x29, 0x29, 0x7b, 0x66, 0x6f, 0x72, 0x28, 0x6e, 0x2e, 0x5f, 0x5f, 0x76, - 0x21, 0x3d, 0x3d, 0x65, 0x2e, 0x5f, 0x5f, 0x76, 0x26, 0x26, 0x28, 0x63, - 0x2e, 0x70, 0x72, 0x6f, 0x70, 0x73, 0x3d, 0x79, 0x2c, 0x63, 0x2e, 0x73, - 0x74, 0x61, 0x74, 0x65, 0x3d, 0x63, 0x2e, 0x5f, 0x5f, 0x73, 0x2c, 0x63, - 0x2e, 0x5f, 0x5f, 0x64, 0x3d, 0x21, 0x31, 0x29, 0x2c, 0x6e, 0x2e, 0x5f, - 0x5f, 0x65, 0x3d, 0x65, 0x2e, 0x5f, 0x5f, 0x65, 0x2c, 0x6e, 0x2e, 0x5f, - 0x5f, 0x6b, 0x3d, 0x65, 0x2e, 0x5f, 0x5f, 0x6b, 0x2c, 0x6e, 0x2e, 0x5f, - 0x5f, 0x6b, 0x2e, 0x66, 0x6f, 0x72, 0x45, 0x61, 0x63, 0x68, 0x28, 0x28, - 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x74, 0x29, 0x7b, - 0x74, 0x26, 0x26, 0x28, 0x74, 0x2e, 0x5f, 0x5f, 0x3d, 0x6e, 0x29, 0x7d, - 0x29, 0x29, 0x2c, 0x62, 0x3d, 0x30, 0x3b, 0x62, 0x3c, 0x63, 0x2e, 0x5f, - 0x73, 0x62, 0x2e, 0x6c, 0x65, 0x6e, 0x67, 0x74, 0x68, 0x3b, 0x62, 0x2b, - 0x2b, 0x29, 0x63, 0x2e, 0x5f, 0x5f, 0x68, 0x2e, 0x70, 0x75, 0x73, 0x68, - 0x28, 0x63, 0x2e, 0x5f, 0x73, 0x62, 0x5b, 0x62, 0x5d, 0x29, 0x3b, 0x63, - 0x2e, 0x5f, 0x73, 0x62, 0x3d, 0x5b, 0x5d, 0x2c, 0x63, 0x2e, 0x5f, 0x5f, - 0x68, 0x2e, 0x6c, 0x65, 0x6e, 0x67, 0x74, 0x68, 0x26, 0x26, 0x72, 0x2e, - 0x70, 0x75, 0x73, 0x68, 0x28, 0x63, 0x29, 0x3b, 0x62, 0x72, 0x65, 0x61, - 0x6b, 0x20, 0x74, 0x7d, 0x6e, 0x75, 0x6c, 0x6c, 0x21, 0x3d, 0x63, 0x2e, - 0x63, 0x6f, 0x6d, 0x70, 0x6f, 0x6e, 0x65, 0x6e, 0x74, 0x57, 0x69, 0x6c, - 0x6c, 0x55, 0x70, 0x64, 0x61, 0x74, 0x65, 0x26, 0x26, 0x63, 0x2e, 0x63, - 0x6f, 0x6d, 0x70, 0x6f, 0x6e, 0x65, 0x6e, 0x74, 0x57, 0x69, 0x6c, 0x6c, - 0x55, 0x70, 0x64, 0x61, 0x74, 0x65, 0x28, 0x79, 0x2c, 0x63, 0x2e, 0x5f, - 0x5f, 0x73, 0x2c, 0x67, 0x29, 0x2c, 0x6e, 0x75, 0x6c, 0x6c, 0x21, 0x3d, - 0x63, 0x2e, 0x63, 0x6f, 0x6d, 0x70, 0x6f, 0x6e, 0x65, 0x6e, 0x74, 0x44, - 0x69, 0x64, 0x55, 0x70, 0x64, 0x61, 0x74, 0x65, 0x26, 0x26, 0x63, 0x2e, - 0x5f, 0x5f, 0x68, 0x2e, 0x70, 0x75, 0x73, 0x68, 0x28, 0x28, 0x66, 0x75, - 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x29, 0x7b, 0x63, 0x2e, 0x63, - 0x6f, 0x6d, 0x70, 0x6f, 0x6e, 0x65, 0x6e, 0x74, 0x44, 0x69, 0x64, 0x55, - 0x70, 0x64, 0x61, 0x74, 0x65, 0x28, 0x61, 0x2c, 0x70, 0x2c, 0x64, 0x29, - 0x7d, 0x29, 0x29, 0x7d, 0x69, 0x66, 0x28, 0x63, 0x2e, 0x63, 0x6f, 0x6e, - 0x74, 0x65, 0x78, 0x74, 0x3d, 0x67, 0x2c, 0x63, 0x2e, 0x70, 0x72, 0x6f, - 0x70, 0x73, 0x3d, 0x79, 0x2c, 0x63, 0x2e, 0x5f, 0x5f, 0x50, 0x3d, 0x74, - 0x2c, 0x63, 0x2e, 0x5f, 0x5f, 0x65, 0x3d, 0x21, 0x31, 0x2c, 0x6b, 0x3d, - 0x77, 0x2e, 0x5f, 0x5f, 0x72, 0x2c, 0x53, 0x3d, 0x30, 0x2c, 0x22, 0x70, - 0x72, 0x6f, 0x74, 0x6f, 0x74, 0x79, 0x70, 0x65, 0x22, 0x69, 0x6e, 0x20, - 0x45, 0x26, 0x26, 0x45, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x74, 0x79, - 0x70, 0x65, 0x2e, 0x72, 0x65, 0x6e, 0x64, 0x65, 0x72, 0x29, 0x7b, 0x66, - 0x6f, 0x72, 0x28, 0x63, 0x2e, 0x73, 0x74, 0x61, 0x74, 0x65, 0x3d, 0x63, - 0x2e, 0x5f, 0x5f, 0x73, 0x2c, 0x63, 0x2e, 0x5f, 0x5f, 0x64, 0x3d, 0x21, - 0x31, 0x2c, 0x6b, 0x26, 0x26, 0x6b, 0x28, 0x6e, 0x29, 0x2c, 0x73, 0x3d, - 0x63, 0x2e, 0x72, 0x65, 0x6e, 0x64, 0x65, 0x72, 0x28, 0x63, 0x2e, 0x70, - 0x72, 0x6f, 0x70, 0x73, 0x2c, 0x63, 0x2e, 0x73, 0x74, 0x61, 0x74, 0x65, - 0x2c, 0x63, 0x2e, 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x78, 0x74, 0x29, 0x2c, - 0x78, 0x3d, 0x30, 0x3b, 0x78, 0x3c, 0x63, 0x2e, 0x5f, 0x73, 0x62, 0x2e, - 0x6c, 0x65, 0x6e, 0x67, 0x74, 0x68, 0x3b, 0x78, 0x2b, 0x2b, 0x29, 0x63, + 0x4d, 0x6f, 0x75, 0x6e, 0x74, 0x28, 0x29, 0x2c, 0x6e, 0x75, 0x6c, 0x6c, + 0x21, 0x3d, 0x63, 0x2e, 0x63, 0x6f, 0x6d, 0x70, 0x6f, 0x6e, 0x65, 0x6e, + 0x74, 0x44, 0x69, 0x64, 0x4d, 0x6f, 0x75, 0x6e, 0x74, 0x26, 0x26, 0x63, 0x2e, 0x5f, 0x5f, 0x68, 0x2e, 0x70, 0x75, 0x73, 0x68, 0x28, 0x63, 0x2e, - 0x5f, 0x73, 0x62, 0x5b, 0x78, 0x5d, 0x29, 0x3b, 0x63, 0x2e, 0x5f, 0x73, - 0x62, 0x3d, 0x5b, 0x5d, 0x7d, 0x65, 0x6c, 0x73, 0x65, 0x20, 0x64, 0x6f, - 0x7b, 0x63, 0x2e, 0x5f, 0x5f, 0x64, 0x3d, 0x21, 0x31, 0x2c, 0x6b, 0x26, - 0x26, 0x6b, 0x28, 0x6e, 0x29, 0x2c, 0x73, 0x3d, 0x63, 0x2e, 0x72, 0x65, - 0x6e, 0x64, 0x65, 0x72, 0x28, 0x63, 0x2e, 0x70, 0x72, 0x6f, 0x70, 0x73, - 0x2c, 0x63, 0x2e, 0x73, 0x74, 0x61, 0x74, 0x65, 0x2c, 0x63, 0x2e, 0x63, - 0x6f, 0x6e, 0x74, 0x65, 0x78, 0x74, 0x29, 0x2c, 0x63, 0x2e, 0x73, 0x74, - 0x61, 0x74, 0x65, 0x3d, 0x63, 0x2e, 0x5f, 0x5f, 0x73, 0x7d, 0x77, 0x68, - 0x69, 0x6c, 0x65, 0x28, 0x63, 0x2e, 0x5f, 0x5f, 0x64, 0x26, 0x26, 0x2b, - 0x2b, 0x53, 0x3c, 0x32, 0x35, 0x29, 0x3b, 0x63, 0x2e, 0x73, 0x74, 0x61, - 0x74, 0x65, 0x3d, 0x63, 0x2e, 0x5f, 0x5f, 0x73, 0x2c, 0x6e, 0x75, 0x6c, - 0x6c, 0x21, 0x3d, 0x63, 0x2e, 0x67, 0x65, 0x74, 0x43, 0x68, 0x69, 0x6c, - 0x64, 0x43, 0x6f, 0x6e, 0x74, 0x65, 0x78, 0x74, 0x26, 0x26, 0x28, 0x69, - 0x3d, 0x46, 0x28, 0x46, 0x28, 0x7b, 0x7d, 0x2c, 0x69, 0x29, 0x2c, 0x63, - 0x2e, 0x67, 0x65, 0x74, 0x43, 0x68, 0x69, 0x6c, 0x64, 0x43, 0x6f, 0x6e, - 0x74, 0x65, 0x78, 0x74, 0x28, 0x29, 0x29, 0x29, 0x2c, 0x68, 0x7c, 0x7c, - 0x6e, 0x75, 0x6c, 0x6c, 0x3d, 0x3d, 0x63, 0x2e, 0x67, 0x65, 0x74, 0x53, - 0x6e, 0x61, 0x70, 0x73, 0x68, 0x6f, 0x74, 0x42, 0x65, 0x66, 0x6f, 0x72, - 0x65, 0x55, 0x70, 0x64, 0x61, 0x74, 0x65, 0x7c, 0x7c, 0x28, 0x64, 0x3d, - 0x63, 0x2e, 0x67, 0x65, 0x74, 0x53, 0x6e, 0x61, 0x70, 0x73, 0x68, 0x6f, - 0x74, 0x42, 0x65, 0x66, 0x6f, 0x72, 0x65, 0x55, 0x70, 0x64, 0x61, 0x74, - 0x65, 0x28, 0x61, 0x2c, 0x70, 0x29, 0x29, 0x2c, 0x7a, 0x28, 0x74, 0x2c, - 0x41, 0x28, 0x43, 0x3d, 0x6e, 0x75, 0x6c, 0x6c, 0x21, 0x3d, 0x73, 0x26, - 0x26, 0x73, 0x2e, 0x74, 0x79, 0x70, 0x65, 0x3d, 0x3d, 0x3d, 0x52, 0x26, - 0x26, 0x6e, 0x75, 0x6c, 0x6c, 0x3d, 0x3d, 0x73, 0x2e, 0x6b, 0x65, 0x79, - 0x3f, 0x73, 0x2e, 0x70, 0x72, 0x6f, 0x70, 0x73, 0x2e, 0x63, 0x68, 0x69, - 0x6c, 0x64, 0x72, 0x65, 0x6e, 0x3a, 0x73, 0x29, 0x3f, 0x43, 0x3a, 0x5b, - 0x43, 0x5d, 0x2c, 0x6e, 0x2c, 0x65, 0x2c, 0x69, 0x2c, 0x5f, 0x2c, 0x6f, - 0x2c, 0x72, 0x2c, 0x75, 0x2c, 0x66, 0x2c, 0x6c, 0x29, 0x2c, 0x63, 0x2e, - 0x62, 0x61, 0x73, 0x65, 0x3d, 0x6e, 0x2e, 0x5f, 0x5f, 0x65, 0x2c, 0x6e, - 0x2e, 0x5f, 0x5f, 0x68, 0x3d, 0x6e, 0x75, 0x6c, 0x6c, 0x2c, 0x63, 0x2e, + 0x63, 0x6f, 0x6d, 0x70, 0x6f, 0x6e, 0x65, 0x6e, 0x74, 0x44, 0x69, 0x64, + 0x4d, 0x6f, 0x75, 0x6e, 0x74, 0x29, 0x3b, 0x65, 0x6c, 0x73, 0x65, 0x7b, + 0x69, 0x66, 0x28, 0x6e, 0x75, 0x6c, 0x6c, 0x3d, 0x3d, 0x45, 0x2e, 0x67, + 0x65, 0x74, 0x44, 0x65, 0x72, 0x69, 0x76, 0x65, 0x64, 0x53, 0x74, 0x61, + 0x74, 0x65, 0x46, 0x72, 0x6f, 0x6d, 0x50, 0x72, 0x6f, 0x70, 0x73, 0x26, + 0x26, 0x79, 0x21, 0x3d, 0x3d, 0x61, 0x26, 0x26, 0x6e, 0x75, 0x6c, 0x6c, + 0x21, 0x3d, 0x63, 0x2e, 0x63, 0x6f, 0x6d, 0x70, 0x6f, 0x6e, 0x65, 0x6e, + 0x74, 0x57, 0x69, 0x6c, 0x6c, 0x52, 0x65, 0x63, 0x65, 0x69, 0x76, 0x65, + 0x50, 0x72, 0x6f, 0x70, 0x73, 0x26, 0x26, 0x63, 0x2e, 0x63, 0x6f, 0x6d, + 0x70, 0x6f, 0x6e, 0x65, 0x6e, 0x74, 0x57, 0x69, 0x6c, 0x6c, 0x52, 0x65, + 0x63, 0x65, 0x69, 0x76, 0x65, 0x50, 0x72, 0x6f, 0x70, 0x73, 0x28, 0x79, + 0x2c, 0x67, 0x29, 0x2c, 0x21, 0x63, 0x2e, 0x5f, 0x5f, 0x65, 0x26, 0x26, + 0x28, 0x6e, 0x75, 0x6c, 0x6c, 0x21, 0x3d, 0x63, 0x2e, 0x73, 0x68, 0x6f, + 0x75, 0x6c, 0x64, 0x43, 0x6f, 0x6d, 0x70, 0x6f, 0x6e, 0x65, 0x6e, 0x74, + 0x55, 0x70, 0x64, 0x61, 0x74, 0x65, 0x26, 0x26, 0x21, 0x31, 0x3d, 0x3d, + 0x3d, 0x63, 0x2e, 0x73, 0x68, 0x6f, 0x75, 0x6c, 0x64, 0x43, 0x6f, 0x6d, + 0x70, 0x6f, 0x6e, 0x65, 0x6e, 0x74, 0x55, 0x70, 0x64, 0x61, 0x74, 0x65, + 0x28, 0x79, 0x2c, 0x63, 0x2e, 0x5f, 0x5f, 0x73, 0x2c, 0x67, 0x29, 0x7c, + 0x7c, 0x6e, 0x2e, 0x5f, 0x5f, 0x76, 0x3d, 0x3d, 0x3d, 0x65, 0x2e, 0x5f, + 0x5f, 0x76, 0x29, 0x29, 0x7b, 0x66, 0x6f, 0x72, 0x28, 0x6e, 0x2e, 0x5f, + 0x5f, 0x76, 0x21, 0x3d, 0x3d, 0x65, 0x2e, 0x5f, 0x5f, 0x76, 0x26, 0x26, + 0x28, 0x63, 0x2e, 0x70, 0x72, 0x6f, 0x70, 0x73, 0x3d, 0x79, 0x2c, 0x63, + 0x2e, 0x73, 0x74, 0x61, 0x74, 0x65, 0x3d, 0x63, 0x2e, 0x5f, 0x5f, 0x73, + 0x2c, 0x63, 0x2e, 0x5f, 0x5f, 0x64, 0x3d, 0x21, 0x31, 0x29, 0x2c, 0x6e, + 0x2e, 0x5f, 0x5f, 0x65, 0x3d, 0x65, 0x2e, 0x5f, 0x5f, 0x65, 0x2c, 0x6e, + 0x2e, 0x5f, 0x5f, 0x6b, 0x3d, 0x65, 0x2e, 0x5f, 0x5f, 0x6b, 0x2c, 0x6e, + 0x2e, 0x5f, 0x5f, 0x6b, 0x2e, 0x66, 0x6f, 0x72, 0x45, 0x61, 0x63, 0x68, + 0x28, 0x28, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x74, + 0x29, 0x7b, 0x74, 0x26, 0x26, 0x28, 0x74, 0x2e, 0x5f, 0x5f, 0x3d, 0x6e, + 0x29, 0x7d, 0x29, 0x29, 0x2c, 0x62, 0x3d, 0x30, 0x3b, 0x62, 0x3c, 0x63, + 0x2e, 0x5f, 0x73, 0x62, 0x2e, 0x6c, 0x65, 0x6e, 0x67, 0x74, 0x68, 0x3b, + 0x62, 0x2b, 0x2b, 0x29, 0x63, 0x2e, 0x5f, 0x5f, 0x68, 0x2e, 0x70, 0x75, + 0x73, 0x68, 0x28, 0x63, 0x2e, 0x5f, 0x73, 0x62, 0x5b, 0x62, 0x5d, 0x29, + 0x3b, 0x63, 0x2e, 0x5f, 0x73, 0x62, 0x3d, 0x5b, 0x5d, 0x2c, 0x63, 0x2e, 0x5f, 0x5f, 0x68, 0x2e, 0x6c, 0x65, 0x6e, 0x67, 0x74, 0x68, 0x26, 0x26, - 0x72, 0x2e, 0x70, 0x75, 0x73, 0x68, 0x28, 0x63, 0x29, 0x2c, 0x76, 0x26, - 0x26, 0x28, 0x63, 0x2e, 0x5f, 0x5f, 0x45, 0x3d, 0x63, 0x2e, 0x5f, 0x5f, - 0x3d, 0x6e, 0x75, 0x6c, 0x6c, 0x29, 0x7d, 0x65, 0x6c, 0x73, 0x65, 0x20, + 0x72, 0x2e, 0x70, 0x75, 0x73, 0x68, 0x28, 0x63, 0x29, 0x3b, 0x62, 0x72, + 0x65, 0x61, 0x6b, 0x20, 0x74, 0x7d, 0x6e, 0x75, 0x6c, 0x6c, 0x21, 0x3d, + 0x63, 0x2e, 0x63, 0x6f, 0x6d, 0x70, 0x6f, 0x6e, 0x65, 0x6e, 0x74, 0x57, + 0x69, 0x6c, 0x6c, 0x55, 0x70, 0x64, 0x61, 0x74, 0x65, 0x26, 0x26, 0x63, + 0x2e, 0x63, 0x6f, 0x6d, 0x70, 0x6f, 0x6e, 0x65, 0x6e, 0x74, 0x57, 0x69, + 0x6c, 0x6c, 0x55, 0x70, 0x64, 0x61, 0x74, 0x65, 0x28, 0x79, 0x2c, 0x63, + 0x2e, 0x5f, 0x5f, 0x73, 0x2c, 0x67, 0x29, 0x2c, 0x6e, 0x75, 0x6c, 0x6c, + 0x21, 0x3d, 0x63, 0x2e, 0x63, 0x6f, 0x6d, 0x70, 0x6f, 0x6e, 0x65, 0x6e, + 0x74, 0x44, 0x69, 0x64, 0x55, 0x70, 0x64, 0x61, 0x74, 0x65, 0x26, 0x26, + 0x63, 0x2e, 0x5f, 0x5f, 0x68, 0x2e, 0x70, 0x75, 0x73, 0x68, 0x28, 0x28, + 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x29, 0x7b, 0x63, + 0x2e, 0x63, 0x6f, 0x6d, 0x70, 0x6f, 0x6e, 0x65, 0x6e, 0x74, 0x44, 0x69, + 0x64, 0x55, 0x70, 0x64, 0x61, 0x74, 0x65, 0x28, 0x61, 0x2c, 0x70, 0x2c, + 0x64, 0x29, 0x7d, 0x29, 0x29, 0x7d, 0x69, 0x66, 0x28, 0x63, 0x2e, 0x63, + 0x6f, 0x6e, 0x74, 0x65, 0x78, 0x74, 0x3d, 0x67, 0x2c, 0x63, 0x2e, 0x70, + 0x72, 0x6f, 0x70, 0x73, 0x3d, 0x79, 0x2c, 0x63, 0x2e, 0x5f, 0x5f, 0x50, + 0x3d, 0x74, 0x2c, 0x63, 0x2e, 0x5f, 0x5f, 0x65, 0x3d, 0x21, 0x31, 0x2c, + 0x6b, 0x3d, 0x43, 0x2e, 0x5f, 0x5f, 0x72, 0x2c, 0x53, 0x3d, 0x30, 0x2c, + 0x22, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x74, 0x79, 0x70, 0x65, 0x22, 0x69, + 0x6e, 0x20, 0x45, 0x26, 0x26, 0x45, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, + 0x74, 0x79, 0x70, 0x65, 0x2e, 0x72, 0x65, 0x6e, 0x64, 0x65, 0x72, 0x29, + 0x7b, 0x66, 0x6f, 0x72, 0x28, 0x63, 0x2e, 0x73, 0x74, 0x61, 0x74, 0x65, + 0x3d, 0x63, 0x2e, 0x5f, 0x5f, 0x73, 0x2c, 0x63, 0x2e, 0x5f, 0x5f, 0x64, + 0x3d, 0x21, 0x31, 0x2c, 0x6b, 0x26, 0x26, 0x6b, 0x28, 0x6e, 0x29, 0x2c, + 0x6c, 0x3d, 0x63, 0x2e, 0x72, 0x65, 0x6e, 0x64, 0x65, 0x72, 0x28, 0x63, + 0x2e, 0x70, 0x72, 0x6f, 0x70, 0x73, 0x2c, 0x63, 0x2e, 0x73, 0x74, 0x61, + 0x74, 0x65, 0x2c, 0x63, 0x2e, 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x78, 0x74, + 0x29, 0x2c, 0x77, 0x3d, 0x30, 0x3b, 0x77, 0x3c, 0x63, 0x2e, 0x5f, 0x73, + 0x62, 0x2e, 0x6c, 0x65, 0x6e, 0x67, 0x74, 0x68, 0x3b, 0x77, 0x2b, 0x2b, + 0x29, 0x63, 0x2e, 0x5f, 0x5f, 0x68, 0x2e, 0x70, 0x75, 0x73, 0x68, 0x28, + 0x63, 0x2e, 0x5f, 0x73, 0x62, 0x5b, 0x77, 0x5d, 0x29, 0x3b, 0x63, 0x2e, + 0x5f, 0x73, 0x62, 0x3d, 0x5b, 0x5d, 0x7d, 0x65, 0x6c, 0x73, 0x65, 0x20, + 0x64, 0x6f, 0x7b, 0x63, 0x2e, 0x5f, 0x5f, 0x64, 0x3d, 0x21, 0x31, 0x2c, + 0x6b, 0x26, 0x26, 0x6b, 0x28, 0x6e, 0x29, 0x2c, 0x6c, 0x3d, 0x63, 0x2e, + 0x72, 0x65, 0x6e, 0x64, 0x65, 0x72, 0x28, 0x63, 0x2e, 0x70, 0x72, 0x6f, + 0x70, 0x73, 0x2c, 0x63, 0x2e, 0x73, 0x74, 0x61, 0x74, 0x65, 0x2c, 0x63, + 0x2e, 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x78, 0x74, 0x29, 0x2c, 0x63, 0x2e, + 0x73, 0x74, 0x61, 0x74, 0x65, 0x3d, 0x63, 0x2e, 0x5f, 0x5f, 0x73, 0x7d, + 0x77, 0x68, 0x69, 0x6c, 0x65, 0x28, 0x63, 0x2e, 0x5f, 0x5f, 0x64, 0x26, + 0x26, 0x2b, 0x2b, 0x53, 0x3c, 0x32, 0x35, 0x29, 0x3b, 0x63, 0x2e, 0x73, + 0x74, 0x61, 0x74, 0x65, 0x3d, 0x63, 0x2e, 0x5f, 0x5f, 0x73, 0x2c, 0x6e, + 0x75, 0x6c, 0x6c, 0x21, 0x3d, 0x63, 0x2e, 0x67, 0x65, 0x74, 0x43, 0x68, + 0x69, 0x6c, 0x64, 0x43, 0x6f, 0x6e, 0x74, 0x65, 0x78, 0x74, 0x26, 0x26, + 0x28, 0x5f, 0x3d, 0x4d, 0x28, 0x4d, 0x28, 0x7b, 0x7d, 0x2c, 0x5f, 0x29, + 0x2c, 0x63, 0x2e, 0x67, 0x65, 0x74, 0x43, 0x68, 0x69, 0x6c, 0x64, 0x43, + 0x6f, 0x6e, 0x74, 0x65, 0x78, 0x74, 0x28, 0x29, 0x29, 0x29, 0x2c, 0x68, + 0x7c, 0x7c, 0x6e, 0x75, 0x6c, 0x6c, 0x3d, 0x3d, 0x63, 0x2e, 0x67, 0x65, + 0x74, 0x53, 0x6e, 0x61, 0x70, 0x73, 0x68, 0x6f, 0x74, 0x42, 0x65, 0x66, + 0x6f, 0x72, 0x65, 0x55, 0x70, 0x64, 0x61, 0x74, 0x65, 0x7c, 0x7c, 0x28, + 0x64, 0x3d, 0x63, 0x2e, 0x67, 0x65, 0x74, 0x53, 0x6e, 0x61, 0x70, 0x73, + 0x68, 0x6f, 0x74, 0x42, 0x65, 0x66, 0x6f, 0x72, 0x65, 0x55, 0x70, 0x64, + 0x61, 0x74, 0x65, 0x28, 0x61, 0x2c, 0x70, 0x29, 0x29, 0x2c, 0x4a, 0x28, + 0x74, 0x2c, 0x46, 0x28, 0x78, 0x3d, 0x6e, 0x75, 0x6c, 0x6c, 0x21, 0x3d, + 0x6c, 0x26, 0x26, 0x6c, 0x2e, 0x74, 0x79, 0x70, 0x65, 0x3d, 0x3d, 0x3d, + 0x6a, 0x26, 0x26, 0x6e, 0x75, 0x6c, 0x6c, 0x3d, 0x3d, 0x6c, 0x2e, 0x6b, + 0x65, 0x79, 0x3f, 0x6c, 0x2e, 0x70, 0x72, 0x6f, 0x70, 0x73, 0x2e, 0x63, + 0x68, 0x69, 0x6c, 0x64, 0x72, 0x65, 0x6e, 0x3a, 0x6c, 0x29, 0x3f, 0x78, + 0x3a, 0x5b, 0x78, 0x5d, 0x2c, 0x6e, 0x2c, 0x65, 0x2c, 0x5f, 0x2c, 0x69, + 0x2c, 0x6f, 0x2c, 0x72, 0x2c, 0x75, 0x2c, 0x66, 0x2c, 0x73, 0x29, 0x2c, + 0x63, 0x2e, 0x62, 0x61, 0x73, 0x65, 0x3d, 0x6e, 0x2e, 0x5f, 0x5f, 0x65, + 0x2c, 0x6e, 0x2e, 0x5f, 0x5f, 0x75, 0x26, 0x3d, 0x2d, 0x31, 0x36, 0x31, + 0x2c, 0x63, 0x2e, 0x5f, 0x5f, 0x68, 0x2e, 0x6c, 0x65, 0x6e, 0x67, 0x74, + 0x68, 0x26, 0x26, 0x72, 0x2e, 0x70, 0x75, 0x73, 0x68, 0x28, 0x63, 0x29, + 0x2c, 0x76, 0x26, 0x26, 0x28, 0x63, 0x2e, 0x5f, 0x5f, 0x45, 0x3d, 0x63, + 0x2e, 0x5f, 0x5f, 0x3d, 0x6e, 0x75, 0x6c, 0x6c, 0x29, 0x7d, 0x63, 0x61, + 0x74, 0x63, 0x68, 0x28, 0x74, 0x29, 0x7b, 0x6e, 0x2e, 0x5f, 0x5f, 0x76, + 0x3d, 0x6e, 0x75, 0x6c, 0x6c, 0x2c, 0x66, 0x7c, 0x7c, 0x6e, 0x75, 0x6c, + 0x6c, 0x21, 0x3d, 0x6f, 0x3f, 0x28, 0x6e, 0x2e, 0x5f, 0x5f, 0x65, 0x3d, + 0x75, 0x2c, 0x6e, 0x2e, 0x5f, 0x5f, 0x75, 0x7c, 0x3d, 0x66, 0x3f, 0x31, + 0x36, 0x30, 0x3a, 0x33, 0x32, 0x2c, 0x6f, 0x5b, 0x6f, 0x2e, 0x69, 0x6e, + 0x64, 0x65, 0x78, 0x4f, 0x66, 0x28, 0x75, 0x29, 0x5d, 0x3d, 0x6e, 0x75, + 0x6c, 0x6c, 0x29, 0x3a, 0x28, 0x6e, 0x2e, 0x5f, 0x5f, 0x65, 0x3d, 0x65, + 0x2e, 0x5f, 0x5f, 0x65, 0x2c, 0x6e, 0x2e, 0x5f, 0x5f, 0x6b, 0x3d, 0x65, + 0x2e, 0x5f, 0x5f, 0x6b, 0x29, 0x2c, 0x43, 0x2e, 0x5f, 0x5f, 0x65, 0x28, + 0x74, 0x2c, 0x6e, 0x2c, 0x65, 0x29, 0x7d, 0x65, 0x6c, 0x73, 0x65, 0x20, 0x6e, 0x75, 0x6c, 0x6c, 0x3d, 0x3d, 0x6f, 0x26, 0x26, 0x6e, 0x2e, 0x5f, 0x5f, 0x76, 0x3d, 0x3d, 0x3d, 0x65, 0x2e, 0x5f, 0x5f, 0x76, 0x3f, 0x28, 0x6e, 0x2e, 0x5f, 0x5f, 0x6b, 0x3d, 0x65, 0x2e, 0x5f, 0x5f, 0x6b, 0x2c, 0x6e, 0x2e, 0x5f, 0x5f, 0x65, 0x3d, 0x65, 0x2e, 0x5f, 0x5f, 0x65, 0x29, 0x3a, 0x6e, 0x2e, 0x5f, 0x5f, 0x65, 0x3d, 0x6f, 0x74, 0x28, 0x65, 0x2e, - 0x5f, 0x5f, 0x65, 0x2c, 0x6e, 0x2c, 0x65, 0x2c, 0x69, 0x2c, 0x5f, 0x2c, - 0x6f, 0x2c, 0x72, 0x2c, 0x66, 0x2c, 0x6c, 0x29, 0x3b, 0x28, 0x73, 0x3d, - 0x77, 0x2e, 0x64, 0x69, 0x66, 0x66, 0x65, 0x64, 0x29, 0x26, 0x26, 0x73, - 0x28, 0x6e, 0x29, 0x7d, 0x63, 0x61, 0x74, 0x63, 0x68, 0x28, 0x74, 0x29, - 0x7b, 0x6e, 0x2e, 0x5f, 0x5f, 0x76, 0x3d, 0x6e, 0x75, 0x6c, 0x6c, 0x2c, - 0x28, 0x66, 0x7c, 0x7c, 0x6e, 0x75, 0x6c, 0x6c, 0x21, 0x3d, 0x6f, 0x29, - 0x26, 0x26, 0x28, 0x6e, 0x2e, 0x5f, 0x5f, 0x65, 0x3d, 0x75, 0x2c, 0x6e, - 0x2e, 0x5f, 0x5f, 0x68, 0x3d, 0x21, 0x21, 0x66, 0x2c, 0x6f, 0x5b, 0x6f, - 0x2e, 0x69, 0x6e, 0x64, 0x65, 0x78, 0x4f, 0x66, 0x28, 0x75, 0x29, 0x5d, - 0x3d, 0x6e, 0x75, 0x6c, 0x6c, 0x29, 0x2c, 0x77, 0x2e, 0x5f, 0x5f, 0x65, - 0x28, 0x74, 0x2c, 0x6e, 0x2c, 0x65, 0x29, 0x7d, 0x7d, 0x66, 0x75, 0x6e, - 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x5f, 0x74, 0x28, 0x74, 0x2c, 0x6e, - 0x2c, 0x65, 0x29, 0x7b, 0x66, 0x6f, 0x72, 0x28, 0x76, 0x61, 0x72, 0x20, - 0x69, 0x3d, 0x30, 0x3b, 0x69, 0x3c, 0x65, 0x2e, 0x6c, 0x65, 0x6e, 0x67, - 0x74, 0x68, 0x3b, 0x69, 0x2b, 0x2b, 0x29, 0x72, 0x74, 0x28, 0x65, 0x5b, - 0x69, 0x5d, 0x2c, 0x65, 0x5b, 0x2b, 0x2b, 0x69, 0x5d, 0x2c, 0x65, 0x5b, - 0x2b, 0x2b, 0x69, 0x5d, 0x29, 0x3b, 0x77, 0x2e, 0x5f, 0x5f, 0x63, 0x26, - 0x26, 0x77, 0x2e, 0x5f, 0x5f, 0x63, 0x28, 0x6e, 0x2c, 0x74, 0x29, 0x2c, - 0x74, 0x2e, 0x73, 0x6f, 0x6d, 0x65, 0x28, 0x28, 0x66, 0x75, 0x6e, 0x63, - 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x6e, 0x29, 0x7b, 0x74, 0x72, 0x79, 0x7b, - 0x74, 0x3d, 0x6e, 0x2e, 0x5f, 0x5f, 0x68, 0x2c, 0x6e, 0x2e, 0x5f, 0x5f, - 0x68, 0x3d, 0x5b, 0x5d, 0x2c, 0x74, 0x2e, 0x73, 0x6f, 0x6d, 0x65, 0x28, - 0x28, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x74, 0x29, - 0x7b, 0x74, 0x2e, 0x63, 0x61, 0x6c, 0x6c, 0x28, 0x6e, 0x29, 0x7d, 0x29, - 0x29, 0x7d, 0x63, 0x61, 0x74, 0x63, 0x68, 0x28, 0x74, 0x29, 0x7b, 0x77, - 0x2e, 0x5f, 0x5f, 0x65, 0x28, 0x74, 0x2c, 0x6e, 0x2e, 0x5f, 0x5f, 0x76, - 0x29, 0x7d, 0x7d, 0x29, 0x29, 0x7d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, - 0x6f, 0x6e, 0x20, 0x6f, 0x74, 0x28, 0x74, 0x2c, 0x6e, 0x2c, 0x65, 0x2c, - 0x69, 0x2c, 0x5f, 0x2c, 0x6f, 0x2c, 0x72, 0x2c, 0x75, 0x2c, 0x66, 0x29, - 0x7b, 0x76, 0x61, 0x72, 0x20, 0x6c, 0x2c, 0x73, 0x2c, 0x63, 0x2c, 0x68, - 0x3d, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x70, 0x73, 0x2c, 0x61, 0x3d, 0x6e, - 0x2e, 0x70, 0x72, 0x6f, 0x70, 0x73, 0x2c, 0x70, 0x3d, 0x6e, 0x2e, 0x74, - 0x79, 0x70, 0x65, 0x2c, 0x64, 0x3d, 0x30, 0x3b, 0x69, 0x66, 0x28, 0x22, - 0x73, 0x76, 0x67, 0x22, 0x3d, 0x3d, 0x3d, 0x70, 0x26, 0x26, 0x28, 0x5f, + 0x5f, 0x5f, 0x65, 0x2c, 0x6e, 0x2c, 0x65, 0x2c, 0x5f, 0x2c, 0x69, 0x2c, + 0x6f, 0x2c, 0x72, 0x2c, 0x66, 0x2c, 0x73, 0x29, 0x3b, 0x28, 0x6c, 0x3d, + 0x43, 0x2e, 0x64, 0x69, 0x66, 0x66, 0x65, 0x64, 0x29, 0x26, 0x26, 0x6c, + 0x28, 0x6e, 0x29, 0x7d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, + 0x20, 0x69, 0x74, 0x28, 0x74, 0x2c, 0x6e, 0x2c, 0x65, 0x29, 0x7b, 0x6e, + 0x2e, 0x5f, 0x5f, 0x64, 0x3d, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x3b, + 0x66, 0x6f, 0x72, 0x28, 0x76, 0x61, 0x72, 0x20, 0x5f, 0x3d, 0x30, 0x3b, + 0x5f, 0x3c, 0x65, 0x2e, 0x6c, 0x65, 0x6e, 0x67, 0x74, 0x68, 0x3b, 0x5f, + 0x2b, 0x2b, 0x29, 0x72, 0x74, 0x28, 0x65, 0x5b, 0x5f, 0x5d, 0x2c, 0x65, + 0x5b, 0x2b, 0x2b, 0x5f, 0x5d, 0x2c, 0x65, 0x5b, 0x2b, 0x2b, 0x5f, 0x5d, + 0x29, 0x3b, 0x43, 0x2e, 0x5f, 0x5f, 0x63, 0x26, 0x26, 0x43, 0x2e, 0x5f, + 0x5f, 0x63, 0x28, 0x6e, 0x2c, 0x74, 0x29, 0x2c, 0x74, 0x2e, 0x73, 0x6f, + 0x6d, 0x65, 0x28, 0x28, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, + 0x28, 0x6e, 0x29, 0x7b, 0x74, 0x72, 0x79, 0x7b, 0x74, 0x3d, 0x6e, 0x2e, + 0x5f, 0x5f, 0x68, 0x2c, 0x6e, 0x2e, 0x5f, 0x5f, 0x68, 0x3d, 0x5b, 0x5d, + 0x2c, 0x74, 0x2e, 0x73, 0x6f, 0x6d, 0x65, 0x28, 0x28, 0x66, 0x75, 0x6e, + 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x74, 0x29, 0x7b, 0x74, 0x2e, 0x63, + 0x61, 0x6c, 0x6c, 0x28, 0x6e, 0x29, 0x7d, 0x29, 0x29, 0x7d, 0x63, 0x61, + 0x74, 0x63, 0x68, 0x28, 0x74, 0x29, 0x7b, 0x43, 0x2e, 0x5f, 0x5f, 0x65, + 0x28, 0x74, 0x2c, 0x6e, 0x2e, 0x5f, 0x5f, 0x76, 0x29, 0x7d, 0x7d, 0x29, + 0x29, 0x7d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x6f, + 0x74, 0x28, 0x74, 0x2c, 0x6e, 0x2c, 0x65, 0x2c, 0x5f, 0x2c, 0x69, 0x2c, + 0x6f, 0x2c, 0x72, 0x2c, 0x75, 0x2c, 0x66, 0x29, 0x7b, 0x76, 0x61, 0x72, + 0x20, 0x73, 0x2c, 0x6c, 0x2c, 0x63, 0x2c, 0x68, 0x2c, 0x61, 0x2c, 0x70, + 0x2c, 0x64, 0x2c, 0x76, 0x3d, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x70, 0x73, + 0x2c, 0x79, 0x3d, 0x6e, 0x2e, 0x70, 0x72, 0x6f, 0x70, 0x73, 0x2c, 0x6d, + 0x3d, 0x6e, 0x2e, 0x74, 0x79, 0x70, 0x65, 0x3b, 0x69, 0x66, 0x28, 0x22, + 0x73, 0x76, 0x67, 0x22, 0x3d, 0x3d, 0x3d, 0x6d, 0x26, 0x26, 0x28, 0x69, 0x3d, 0x21, 0x30, 0x29, 0x2c, 0x6e, 0x75, 0x6c, 0x6c, 0x21, 0x3d, 0x6f, - 0x29, 0x66, 0x6f, 0x72, 0x28, 0x3b, 0x64, 0x3c, 0x6f, 0x2e, 0x6c, 0x65, - 0x6e, 0x67, 0x74, 0x68, 0x3b, 0x64, 0x2b, 0x2b, 0x29, 0x69, 0x66, 0x28, - 0x28, 0x6c, 0x3d, 0x6f, 0x5b, 0x64, 0x5d, 0x29, 0x26, 0x26, 0x22, 0x73, - 0x65, 0x74, 0x41, 0x74, 0x74, 0x72, 0x69, 0x62, 0x75, 0x74, 0x65, 0x22, - 0x69, 0x6e, 0x20, 0x6c, 0x3d, 0x3d, 0x21, 0x21, 0x70, 0x26, 0x26, 0x28, - 0x70, 0x3f, 0x6c, 0x2e, 0x6c, 0x6f, 0x63, 0x61, 0x6c, 0x4e, 0x61, 0x6d, - 0x65, 0x3d, 0x3d, 0x3d, 0x70, 0x3a, 0x33, 0x3d, 0x3d, 0x3d, 0x6c, 0x2e, - 0x6e, 0x6f, 0x64, 0x65, 0x54, 0x79, 0x70, 0x65, 0x29, 0x29, 0x7b, 0x74, - 0x3d, 0x6c, 0x2c, 0x6f, 0x5b, 0x64, 0x5d, 0x3d, 0x6e, 0x75, 0x6c, 0x6c, - 0x3b, 0x62, 0x72, 0x65, 0x61, 0x6b, 0x7d, 0x69, 0x66, 0x28, 0x6e, 0x75, - 0x6c, 0x6c, 0x3d, 0x3d, 0x74, 0x29, 0x7b, 0x69, 0x66, 0x28, 0x6e, 0x75, - 0x6c, 0x6c, 0x3d, 0x3d, 0x3d, 0x70, 0x29, 0x72, 0x65, 0x74, 0x75, 0x72, - 0x6e, 0x20, 0x64, 0x6f, 0x63, 0x75, 0x6d, 0x65, 0x6e, 0x74, 0x2e, 0x63, - 0x72, 0x65, 0x61, 0x74, 0x65, 0x54, 0x65, 0x78, 0x74, 0x4e, 0x6f, 0x64, - 0x65, 0x28, 0x61, 0x29, 0x3b, 0x74, 0x3d, 0x5f, 0x3f, 0x64, 0x6f, 0x63, + 0x29, 0x66, 0x6f, 0x72, 0x28, 0x73, 0x3d, 0x30, 0x3b, 0x73, 0x3c, 0x6f, + 0x2e, 0x6c, 0x65, 0x6e, 0x67, 0x74, 0x68, 0x3b, 0x73, 0x2b, 0x2b, 0x29, + 0x69, 0x66, 0x28, 0x28, 0x61, 0x3d, 0x6f, 0x5b, 0x73, 0x5d, 0x29, 0x26, + 0x26, 0x22, 0x73, 0x65, 0x74, 0x41, 0x74, 0x74, 0x72, 0x69, 0x62, 0x75, + 0x74, 0x65, 0x22, 0x69, 0x6e, 0x20, 0x61, 0x3d, 0x3d, 0x21, 0x21, 0x6d, + 0x26, 0x26, 0x28, 0x6d, 0x3f, 0x61, 0x2e, 0x6c, 0x6f, 0x63, 0x61, 0x6c, + 0x4e, 0x61, 0x6d, 0x65, 0x3d, 0x3d, 0x3d, 0x6d, 0x3a, 0x33, 0x3d, 0x3d, + 0x3d, 0x61, 0x2e, 0x6e, 0x6f, 0x64, 0x65, 0x54, 0x79, 0x70, 0x65, 0x29, + 0x29, 0x7b, 0x74, 0x3d, 0x61, 0x2c, 0x6f, 0x5b, 0x73, 0x5d, 0x3d, 0x6e, + 0x75, 0x6c, 0x6c, 0x3b, 0x62, 0x72, 0x65, 0x61, 0x6b, 0x7d, 0x69, 0x66, + 0x28, 0x6e, 0x75, 0x6c, 0x6c, 0x3d, 0x3d, 0x74, 0x29, 0x7b, 0x69, 0x66, + 0x28, 0x6e, 0x75, 0x6c, 0x6c, 0x3d, 0x3d, 0x3d, 0x6d, 0x29, 0x72, 0x65, + 0x74, 0x75, 0x72, 0x6e, 0x20, 0x64, 0x6f, 0x63, 0x75, 0x6d, 0x65, 0x6e, + 0x74, 0x2e, 0x63, 0x72, 0x65, 0x61, 0x74, 0x65, 0x54, 0x65, 0x78, 0x74, + 0x4e, 0x6f, 0x64, 0x65, 0x28, 0x79, 0x29, 0x3b, 0x74, 0x3d, 0x69, 0x3f, + 0x64, 0x6f, 0x63, 0x75, 0x6d, 0x65, 0x6e, 0x74, 0x2e, 0x63, 0x72, 0x65, + 0x61, 0x74, 0x65, 0x45, 0x6c, 0x65, 0x6d, 0x65, 0x6e, 0x74, 0x4e, 0x53, + 0x28, 0x22, 0x68, 0x74, 0x74, 0x70, 0x3a, 0x2f, 0x2f, 0x77, 0x77, 0x77, + 0x2e, 0x77, 0x33, 0x2e, 0x6f, 0x72, 0x67, 0x2f, 0x32, 0x30, 0x30, 0x30, + 0x2f, 0x73, 0x76, 0x67, 0x22, 0x2c, 0x6d, 0x29, 0x3a, 0x64, 0x6f, 0x63, 0x75, 0x6d, 0x65, 0x6e, 0x74, 0x2e, 0x63, 0x72, 0x65, 0x61, 0x74, 0x65, - 0x45, 0x6c, 0x65, 0x6d, 0x65, 0x6e, 0x74, 0x4e, 0x53, 0x28, 0x22, 0x68, - 0x74, 0x74, 0x70, 0x3a, 0x2f, 0x2f, 0x77, 0x77, 0x77, 0x2e, 0x77, 0x33, - 0x2e, 0x6f, 0x72, 0x67, 0x2f, 0x32, 0x30, 0x30, 0x30, 0x2f, 0x73, 0x76, - 0x67, 0x22, 0x2c, 0x70, 0x29, 0x3a, 0x64, 0x6f, 0x63, 0x75, 0x6d, 0x65, - 0x6e, 0x74, 0x2e, 0x63, 0x72, 0x65, 0x61, 0x74, 0x65, 0x45, 0x6c, 0x65, - 0x6d, 0x65, 0x6e, 0x74, 0x28, 0x70, 0x2c, 0x61, 0x2e, 0x69, 0x73, 0x26, - 0x26, 0x61, 0x29, 0x2c, 0x6f, 0x3d, 0x6e, 0x75, 0x6c, 0x6c, 0x2c, 0x75, - 0x3d, 0x21, 0x31, 0x7d, 0x69, 0x66, 0x28, 0x6e, 0x75, 0x6c, 0x6c, 0x3d, - 0x3d, 0x3d, 0x70, 0x29, 0x68, 0x3d, 0x3d, 0x3d, 0x61, 0x7c, 0x7c, 0x75, - 0x26, 0x26, 0x74, 0x2e, 0x64, 0x61, 0x74, 0x61, 0x3d, 0x3d, 0x3d, 0x61, - 0x7c, 0x7c, 0x28, 0x74, 0x2e, 0x64, 0x61, 0x74, 0x61, 0x3d, 0x61, 0x29, - 0x3b, 0x65, 0x6c, 0x73, 0x65, 0x7b, 0x69, 0x66, 0x28, 0x6f, 0x3d, 0x6f, - 0x26, 0x26, 0x78, 0x2e, 0x63, 0x61, 0x6c, 0x6c, 0x28, 0x74, 0x2e, 0x63, - 0x68, 0x69, 0x6c, 0x64, 0x4e, 0x6f, 0x64, 0x65, 0x73, 0x29, 0x2c, 0x73, - 0x3d, 0x28, 0x68, 0x3d, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x70, 0x73, 0x7c, - 0x7c, 0x44, 0x29, 0x2e, 0x64, 0x61, 0x6e, 0x67, 0x65, 0x72, 0x6f, 0x75, - 0x73, 0x6c, 0x79, 0x53, 0x65, 0x74, 0x49, 0x6e, 0x6e, 0x65, 0x72, 0x48, - 0x54, 0x4d, 0x4c, 0x2c, 0x63, 0x3d, 0x61, 0x2e, 0x64, 0x61, 0x6e, 0x67, - 0x65, 0x72, 0x6f, 0x75, 0x73, 0x6c, 0x79, 0x53, 0x65, 0x74, 0x49, 0x6e, - 0x6e, 0x65, 0x72, 0x48, 0x54, 0x4d, 0x4c, 0x2c, 0x21, 0x75, 0x29, 0x7b, - 0x69, 0x66, 0x28, 0x6e, 0x75, 0x6c, 0x6c, 0x21, 0x3d, 0x6f, 0x29, 0x66, - 0x6f, 0x72, 0x28, 0x68, 0x3d, 0x7b, 0x7d, 0x2c, 0x64, 0x3d, 0x30, 0x3b, - 0x64, 0x3c, 0x74, 0x2e, 0x61, 0x74, 0x74, 0x72, 0x69, 0x62, 0x75, 0x74, - 0x65, 0x73, 0x2e, 0x6c, 0x65, 0x6e, 0x67, 0x74, 0x68, 0x3b, 0x64, 0x2b, - 0x2b, 0x29, 0x68, 0x5b, 0x74, 0x2e, 0x61, 0x74, 0x74, 0x72, 0x69, 0x62, - 0x75, 0x74, 0x65, 0x73, 0x5b, 0x64, 0x5d, 0x2e, 0x6e, 0x61, 0x6d, 0x65, - 0x5d, 0x3d, 0x74, 0x2e, 0x61, 0x74, 0x74, 0x72, 0x69, 0x62, 0x75, 0x74, - 0x65, 0x73, 0x5b, 0x64, 0x5d, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3b, - 0x28, 0x63, 0x7c, 0x7c, 0x73, 0x29, 0x26, 0x26, 0x28, 0x63, 0x26, 0x26, - 0x28, 0x73, 0x26, 0x26, 0x63, 0x2e, 0x5f, 0x5f, 0x68, 0x74, 0x6d, 0x6c, - 0x3d, 0x3d, 0x73, 0x2e, 0x5f, 0x5f, 0x68, 0x74, 0x6d, 0x6c, 0x7c, 0x7c, - 0x63, 0x2e, 0x5f, 0x5f, 0x68, 0x74, 0x6d, 0x6c, 0x3d, 0x3d, 0x3d, 0x74, - 0x2e, 0x69, 0x6e, 0x6e, 0x65, 0x72, 0x48, 0x54, 0x4d, 0x4c, 0x29, 0x7c, - 0x7c, 0x28, 0x74, 0x2e, 0x69, 0x6e, 0x6e, 0x65, 0x72, 0x48, 0x54, 0x4d, - 0x4c, 0x3d, 0x63, 0x26, 0x26, 0x63, 0x2e, 0x5f, 0x5f, 0x68, 0x74, 0x6d, - 0x6c, 0x7c, 0x7c, 0x22, 0x22, 0x29, 0x29, 0x7d, 0x69, 0x66, 0x28, 0x59, - 0x28, 0x74, 0x2c, 0x61, 0x2c, 0x68, 0x2c, 0x5f, 0x2c, 0x75, 0x29, 0x2c, - 0x63, 0x29, 0x6e, 0x2e, 0x5f, 0x5f, 0x6b, 0x3d, 0x5b, 0x5d, 0x3b, 0x65, - 0x6c, 0x73, 0x65, 0x20, 0x69, 0x66, 0x28, 0x7a, 0x28, 0x74, 0x2c, 0x41, - 0x28, 0x64, 0x3d, 0x6e, 0x2e, 0x70, 0x72, 0x6f, 0x70, 0x73, 0x2e, 0x63, - 0x68, 0x69, 0x6c, 0x64, 0x72, 0x65, 0x6e, 0x29, 0x3f, 0x64, 0x3a, 0x5b, - 0x64, 0x5d, 0x2c, 0x6e, 0x2c, 0x65, 0x2c, 0x69, 0x2c, 0x5f, 0x26, 0x26, - 0x22, 0x66, 0x6f, 0x72, 0x65, 0x69, 0x67, 0x6e, 0x4f, 0x62, 0x6a, 0x65, - 0x63, 0x74, 0x22, 0x21, 0x3d, 0x3d, 0x70, 0x2c, 0x6f, 0x2c, 0x72, 0x2c, - 0x6f, 0x3f, 0x6f, 0x5b, 0x30, 0x5d, 0x3a, 0x65, 0x2e, 0x5f, 0x5f, 0x6b, - 0x26, 0x26, 0x6a, 0x28, 0x65, 0x2c, 0x30, 0x29, 0x2c, 0x75, 0x2c, 0x66, - 0x29, 0x2c, 0x6e, 0x75, 0x6c, 0x6c, 0x21, 0x3d, 0x6f, 0x29, 0x66, 0x6f, - 0x72, 0x28, 0x64, 0x3d, 0x6f, 0x2e, 0x6c, 0x65, 0x6e, 0x67, 0x74, 0x68, - 0x3b, 0x64, 0x2d, 0x2d, 0x3b, 0x29, 0x6e, 0x75, 0x6c, 0x6c, 0x21, 0x3d, - 0x6f, 0x5b, 0x64, 0x5d, 0x26, 0x26, 0x4d, 0x28, 0x6f, 0x5b, 0x64, 0x5d, - 0x29, 0x3b, 0x75, 0x7c, 0x7c, 0x28, 0x22, 0x76, 0x61, 0x6c, 0x75, 0x65, - 0x22, 0x69, 0x6e, 0x20, 0x61, 0x26, 0x26, 0x76, 0x6f, 0x69, 0x64, 0x20, - 0x30, 0x21, 0x3d, 0x3d, 0x28, 0x64, 0x3d, 0x61, 0x2e, 0x76, 0x61, 0x6c, - 0x75, 0x65, 0x29, 0x26, 0x26, 0x28, 0x64, 0x21, 0x3d, 0x3d, 0x74, 0x2e, - 0x76, 0x61, 0x6c, 0x75, 0x65, 0x7c, 0x7c, 0x22, 0x70, 0x72, 0x6f, 0x67, - 0x72, 0x65, 0x73, 0x73, 0x22, 0x3d, 0x3d, 0x3d, 0x70, 0x26, 0x26, 0x21, - 0x64, 0x7c, 0x7c, 0x22, 0x6f, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x22, 0x3d, - 0x3d, 0x3d, 0x70, 0x26, 0x26, 0x64, 0x21, 0x3d, 0x3d, 0x68, 0x2e, 0x76, - 0x61, 0x6c, 0x75, 0x65, 0x29, 0x26, 0x26, 0x74, 0x74, 0x28, 0x74, 0x2c, - 0x22, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x22, 0x2c, 0x64, 0x2c, 0x68, 0x2e, - 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2c, 0x21, 0x31, 0x29, 0x2c, 0x22, 0x63, - 0x68, 0x65, 0x63, 0x6b, 0x65, 0x64, 0x22, 0x69, 0x6e, 0x20, 0x61, 0x26, - 0x26, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x21, 0x3d, 0x3d, 0x28, 0x64, - 0x3d, 0x61, 0x2e, 0x63, 0x68, 0x65, 0x63, 0x6b, 0x65, 0x64, 0x29, 0x26, - 0x26, 0x64, 0x21, 0x3d, 0x3d, 0x74, 0x2e, 0x63, 0x68, 0x65, 0x63, 0x6b, - 0x65, 0x64, 0x26, 0x26, 0x74, 0x74, 0x28, 0x74, 0x2c, 0x22, 0x63, 0x68, - 0x65, 0x63, 0x6b, 0x65, 0x64, 0x22, 0x2c, 0x64, 0x2c, 0x68, 0x2e, 0x63, - 0x68, 0x65, 0x63, 0x6b, 0x65, 0x64, 0x2c, 0x21, 0x31, 0x29, 0x29, 0x7d, - 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x74, 0x7d, 0x66, 0x75, 0x6e, - 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x72, 0x74, 0x28, 0x74, 0x2c, 0x6e, - 0x2c, 0x65, 0x29, 0x7b, 0x74, 0x72, 0x79, 0x7b, 0x22, 0x66, 0x75, 0x6e, - 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x22, 0x3d, 0x3d, 0x74, 0x79, 0x70, 0x65, - 0x6f, 0x66, 0x20, 0x74, 0x3f, 0x74, 0x28, 0x6e, 0x29, 0x3a, 0x74, 0x2e, - 0x63, 0x75, 0x72, 0x72, 0x65, 0x6e, 0x74, 0x3d, 0x6e, 0x7d, 0x63, 0x61, - 0x74, 0x63, 0x68, 0x28, 0x74, 0x29, 0x7b, 0x77, 0x2e, 0x5f, 0x5f, 0x65, - 0x28, 0x74, 0x2c, 0x65, 0x29, 0x7d, 0x7d, 0x66, 0x75, 0x6e, 0x63, 0x74, - 0x69, 0x6f, 0x6e, 0x20, 0x75, 0x74, 0x28, 0x74, 0x2c, 0x6e, 0x2c, 0x65, - 0x29, 0x7b, 0x76, 0x61, 0x72, 0x20, 0x69, 0x2c, 0x5f, 0x3b, 0x69, 0x66, - 0x28, 0x77, 0x2e, 0x75, 0x6e, 0x6d, 0x6f, 0x75, 0x6e, 0x74, 0x26, 0x26, - 0x77, 0x2e, 0x75, 0x6e, 0x6d, 0x6f, 0x75, 0x6e, 0x74, 0x28, 0x74, 0x29, - 0x2c, 0x28, 0x69, 0x3d, 0x74, 0x2e, 0x72, 0x65, 0x66, 0x29, 0x26, 0x26, - 0x28, 0x69, 0x2e, 0x63, 0x75, 0x72, 0x72, 0x65, 0x6e, 0x74, 0x26, 0x26, - 0x69, 0x2e, 0x63, 0x75, 0x72, 0x72, 0x65, 0x6e, 0x74, 0x21, 0x3d, 0x3d, - 0x74, 0x2e, 0x5f, 0x5f, 0x65, 0x7c, 0x7c, 0x72, 0x74, 0x28, 0x69, 0x2c, - 0x6e, 0x75, 0x6c, 0x6c, 0x2c, 0x6e, 0x29, 0x29, 0x2c, 0x6e, 0x75, 0x6c, - 0x6c, 0x21, 0x3d, 0x28, 0x69, 0x3d, 0x74, 0x2e, 0x5f, 0x5f, 0x63, 0x29, - 0x29, 0x7b, 0x69, 0x66, 0x28, 0x69, 0x2e, 0x63, 0x6f, 0x6d, 0x70, 0x6f, - 0x6e, 0x65, 0x6e, 0x74, 0x57, 0x69, 0x6c, 0x6c, 0x55, 0x6e, 0x6d, 0x6f, - 0x75, 0x6e, 0x74, 0x29, 0x74, 0x72, 0x79, 0x7b, 0x69, 0x2e, 0x63, 0x6f, - 0x6d, 0x70, 0x6f, 0x6e, 0x65, 0x6e, 0x74, 0x57, 0x69, 0x6c, 0x6c, 0x55, - 0x6e, 0x6d, 0x6f, 0x75, 0x6e, 0x74, 0x28, 0x29, 0x7d, 0x63, 0x61, 0x74, - 0x63, 0x68, 0x28, 0x74, 0x29, 0x7b, 0x77, 0x2e, 0x5f, 0x5f, 0x65, 0x28, - 0x74, 0x2c, 0x6e, 0x29, 0x7d, 0x69, 0x2e, 0x62, 0x61, 0x73, 0x65, 0x3d, - 0x69, 0x2e, 0x5f, 0x5f, 0x50, 0x3d, 0x6e, 0x75, 0x6c, 0x6c, 0x2c, 0x74, - 0x2e, 0x5f, 0x5f, 0x63, 0x3d, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x7d, - 0x69, 0x66, 0x28, 0x69, 0x3d, 0x74, 0x2e, 0x5f, 0x5f, 0x6b, 0x29, 0x66, - 0x6f, 0x72, 0x28, 0x5f, 0x3d, 0x30, 0x3b, 0x5f, 0x3c, 0x69, 0x2e, 0x6c, - 0x65, 0x6e, 0x67, 0x74, 0x68, 0x3b, 0x5f, 0x2b, 0x2b, 0x29, 0x69, 0x5b, - 0x5f, 0x5d, 0x26, 0x26, 0x75, 0x74, 0x28, 0x69, 0x5b, 0x5f, 0x5d, 0x2c, - 0x6e, 0x2c, 0x65, 0x7c, 0x7c, 0x22, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, - 0x6f, 0x6e, 0x22, 0x21, 0x3d, 0x74, 0x79, 0x70, 0x65, 0x6f, 0x66, 0x20, - 0x74, 0x2e, 0x74, 0x79, 0x70, 0x65, 0x29, 0x3b, 0x65, 0x7c, 0x7c, 0x6e, - 0x75, 0x6c, 0x6c, 0x3d, 0x3d, 0x74, 0x2e, 0x5f, 0x5f, 0x65, 0x7c, 0x7c, - 0x4d, 0x28, 0x74, 0x2e, 0x5f, 0x5f, 0x65, 0x29, 0x2c, 0x74, 0x2e, 0x5f, - 0x5f, 0x3d, 0x74, 0x2e, 0x5f, 0x5f, 0x65, 0x3d, 0x74, 0x2e, 0x5f, 0x5f, - 0x64, 0x3d, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x7d, 0x66, 0x75, 0x6e, - 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x66, 0x74, 0x28, 0x74, 0x2c, 0x6e, - 0x2c, 0x65, 0x29, 0x7b, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x74, - 0x68, 0x69, 0x73, 0x2e, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x72, 0x75, 0x63, - 0x74, 0x6f, 0x72, 0x28, 0x74, 0x2c, 0x65, 0x29, 0x7d, 0x66, 0x75, 0x6e, - 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x6c, 0x74, 0x28, 0x74, 0x2c, 0x6e, - 0x2c, 0x65, 0x29, 0x7b, 0x76, 0x61, 0x72, 0x20, 0x69, 0x2c, 0x5f, 0x2c, - 0x6f, 0x2c, 0x72, 0x3b, 0x77, 0x2e, 0x5f, 0x5f, 0x26, 0x26, 0x77, 0x2e, - 0x5f, 0x5f, 0x28, 0x74, 0x2c, 0x6e, 0x29, 0x2c, 0x5f, 0x3d, 0x28, 0x69, - 0x3d, 0x22, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x22, 0x3d, - 0x3d, 0x74, 0x79, 0x70, 0x65, 0x6f, 0x66, 0x20, 0x65, 0x29, 0x3f, 0x6e, - 0x75, 0x6c, 0x6c, 0x3a, 0x65, 0x26, 0x26, 0x65, 0x2e, 0x5f, 0x5f, 0x6b, - 0x7c, 0x7c, 0x6e, 0x2e, 0x5f, 0x5f, 0x6b, 0x2c, 0x6f, 0x3d, 0x5b, 0x5d, - 0x2c, 0x72, 0x3d, 0x5b, 0x5d, 0x2c, 0x69, 0x74, 0x28, 0x6e, 0x2c, 0x74, - 0x3d, 0x28, 0x21, 0x69, 0x26, 0x26, 0x65, 0x7c, 0x7c, 0x6e, 0x29, 0x2e, - 0x5f, 0x5f, 0x6b, 0x3d, 0x57, 0x28, 0x52, 0x2c, 0x6e, 0x75, 0x6c, 0x6c, - 0x2c, 0x5b, 0x74, 0x5d, 0x29, 0x2c, 0x5f, 0x7c, 0x7c, 0x44, 0x2c, 0x44, - 0x2c, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x21, 0x3d, 0x3d, 0x6e, 0x2e, - 0x6f, 0x77, 0x6e, 0x65, 0x72, 0x53, 0x56, 0x47, 0x45, 0x6c, 0x65, 0x6d, - 0x65, 0x6e, 0x74, 0x2c, 0x21, 0x69, 0x26, 0x26, 0x65, 0x3f, 0x5b, 0x65, - 0x5d, 0x3a, 0x5f, 0x3f, 0x6e, 0x75, 0x6c, 0x6c, 0x3a, 0x6e, 0x2e, 0x66, - 0x69, 0x72, 0x73, 0x74, 0x43, 0x68, 0x69, 0x6c, 0x64, 0x3f, 0x78, 0x2e, - 0x63, 0x61, 0x6c, 0x6c, 0x28, 0x6e, 0x2e, 0x63, 0x68, 0x69, 0x6c, 0x64, - 0x4e, 0x6f, 0x64, 0x65, 0x73, 0x29, 0x3a, 0x6e, 0x75, 0x6c, 0x6c, 0x2c, - 0x6f, 0x2c, 0x21, 0x69, 0x26, 0x26, 0x65, 0x3f, 0x65, 0x3a, 0x5f, 0x3f, - 0x5f, 0x2e, 0x5f, 0x5f, 0x65, 0x3a, 0x6e, 0x2e, 0x66, 0x69, 0x72, 0x73, - 0x74, 0x43, 0x68, 0x69, 0x6c, 0x64, 0x2c, 0x69, 0x2c, 0x72, 0x29, 0x2c, - 0x5f, 0x74, 0x28, 0x6f, 0x2c, 0x74, 0x2c, 0x72, 0x29, 0x7d, 0x66, 0x75, - 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x73, 0x74, 0x28, 0x74, 0x2c, - 0x6e, 0x29, 0x7b, 0x6c, 0x74, 0x28, 0x74, 0x2c, 0x6e, 0x2c, 0x73, 0x74, - 0x29, 0x7d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x63, + 0x45, 0x6c, 0x65, 0x6d, 0x65, 0x6e, 0x74, 0x28, 0x6d, 0x2c, 0x79, 0x2e, + 0x69, 0x73, 0x26, 0x26, 0x79, 0x29, 0x2c, 0x6f, 0x3d, 0x6e, 0x75, 0x6c, + 0x6c, 0x2c, 0x75, 0x3d, 0x21, 0x31, 0x7d, 0x69, 0x66, 0x28, 0x6e, 0x75, + 0x6c, 0x6c, 0x3d, 0x3d, 0x3d, 0x6d, 0x29, 0x76, 0x3d, 0x3d, 0x3d, 0x79, + 0x7c, 0x7c, 0x75, 0x26, 0x26, 0x74, 0x2e, 0x64, 0x61, 0x74, 0x61, 0x3d, + 0x3d, 0x3d, 0x79, 0x7c, 0x7c, 0x28, 0x74, 0x2e, 0x64, 0x61, 0x74, 0x61, + 0x3d, 0x79, 0x29, 0x3b, 0x65, 0x6c, 0x73, 0x65, 0x7b, 0x69, 0x66, 0x28, + 0x6f, 0x3d, 0x6f, 0x26, 0x26, 0x78, 0x2e, 0x63, 0x61, 0x6c, 0x6c, 0x28, + 0x74, 0x2e, 0x63, 0x68, 0x69, 0x6c, 0x64, 0x4e, 0x6f, 0x64, 0x65, 0x73, + 0x29, 0x2c, 0x76, 0x3d, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x70, 0x73, 0x7c, + 0x7c, 0x54, 0x2c, 0x21, 0x75, 0x26, 0x26, 0x6e, 0x75, 0x6c, 0x6c, 0x21, + 0x3d, 0x6f, 0x29, 0x66, 0x6f, 0x72, 0x28, 0x76, 0x3d, 0x7b, 0x7d, 0x2c, + 0x73, 0x3d, 0x30, 0x3b, 0x73, 0x3c, 0x74, 0x2e, 0x61, 0x74, 0x74, 0x72, + 0x69, 0x62, 0x75, 0x74, 0x65, 0x73, 0x2e, 0x6c, 0x65, 0x6e, 0x67, 0x74, + 0x68, 0x3b, 0x73, 0x2b, 0x2b, 0x29, 0x76, 0x5b, 0x28, 0x61, 0x3d, 0x74, + 0x2e, 0x61, 0x74, 0x74, 0x72, 0x69, 0x62, 0x75, 0x74, 0x65, 0x73, 0x5b, + 0x73, 0x5d, 0x29, 0x2e, 0x6e, 0x61, 0x6d, 0x65, 0x5d, 0x3d, 0x61, 0x2e, + 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3b, 0x66, 0x6f, 0x72, 0x28, 0x73, 0x20, + 0x69, 0x6e, 0x20, 0x76, 0x29, 0x61, 0x3d, 0x76, 0x5b, 0x73, 0x5d, 0x2c, + 0x22, 0x63, 0x68, 0x69, 0x6c, 0x64, 0x72, 0x65, 0x6e, 0x22, 0x3d, 0x3d, + 0x73, 0x7c, 0x7c, 0x28, 0x22, 0x64, 0x61, 0x6e, 0x67, 0x65, 0x72, 0x6f, + 0x75, 0x73, 0x6c, 0x79, 0x53, 0x65, 0x74, 0x49, 0x6e, 0x6e, 0x65, 0x72, + 0x48, 0x54, 0x4d, 0x4c, 0x22, 0x3d, 0x3d, 0x73, 0x3f, 0x63, 0x3d, 0x61, + 0x3a, 0x22, 0x6b, 0x65, 0x79, 0x22, 0x3d, 0x3d, 0x3d, 0x73, 0x7c, 0x7c, + 0x73, 0x20, 0x69, 0x6e, 0x20, 0x79, 0x7c, 0x7c, 0x74, 0x74, 0x28, 0x74, + 0x2c, 0x73, 0x2c, 0x6e, 0x75, 0x6c, 0x6c, 0x2c, 0x61, 0x2c, 0x69, 0x29, + 0x29, 0x3b, 0x66, 0x6f, 0x72, 0x28, 0x73, 0x20, 0x69, 0x6e, 0x20, 0x79, + 0x29, 0x61, 0x3d, 0x79, 0x5b, 0x73, 0x5d, 0x2c, 0x22, 0x63, 0x68, 0x69, + 0x6c, 0x64, 0x72, 0x65, 0x6e, 0x22, 0x3d, 0x3d, 0x73, 0x3f, 0x68, 0x3d, + 0x61, 0x3a, 0x22, 0x64, 0x61, 0x6e, 0x67, 0x65, 0x72, 0x6f, 0x75, 0x73, + 0x6c, 0x79, 0x53, 0x65, 0x74, 0x49, 0x6e, 0x6e, 0x65, 0x72, 0x48, 0x54, + 0x4d, 0x4c, 0x22, 0x3d, 0x3d, 0x73, 0x3f, 0x6c, 0x3d, 0x61, 0x3a, 0x22, + 0x76, 0x61, 0x6c, 0x75, 0x65, 0x22, 0x3d, 0x3d, 0x73, 0x3f, 0x70, 0x3d, + 0x61, 0x3a, 0x22, 0x63, 0x68, 0x65, 0x63, 0x6b, 0x65, 0x64, 0x22, 0x3d, + 0x3d, 0x73, 0x3f, 0x64, 0x3d, 0x61, 0x3a, 0x22, 0x6b, 0x65, 0x79, 0x22, + 0x3d, 0x3d, 0x3d, 0x73, 0x7c, 0x7c, 0x75, 0x26, 0x26, 0x22, 0x66, 0x75, + 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x22, 0x21, 0x3d, 0x74, 0x79, 0x70, + 0x65, 0x6f, 0x66, 0x20, 0x61, 0x7c, 0x7c, 0x76, 0x5b, 0x73, 0x5d, 0x3d, + 0x3d, 0x3d, 0x61, 0x7c, 0x7c, 0x74, 0x74, 0x28, 0x74, 0x2c, 0x73, 0x2c, + 0x61, 0x2c, 0x76, 0x5b, 0x73, 0x5d, 0x2c, 0x69, 0x29, 0x3b, 0x69, 0x66, + 0x28, 0x6c, 0x29, 0x75, 0x7c, 0x7c, 0x63, 0x26, 0x26, 0x28, 0x6c, 0x2e, + 0x5f, 0x5f, 0x68, 0x74, 0x6d, 0x6c, 0x3d, 0x3d, 0x3d, 0x63, 0x2e, 0x5f, + 0x5f, 0x68, 0x74, 0x6d, 0x6c, 0x7c, 0x7c, 0x6c, 0x2e, 0x5f, 0x5f, 0x68, + 0x74, 0x6d, 0x6c, 0x3d, 0x3d, 0x3d, 0x74, 0x2e, 0x69, 0x6e, 0x6e, 0x65, + 0x72, 0x48, 0x54, 0x4d, 0x4c, 0x29, 0x7c, 0x7c, 0x28, 0x74, 0x2e, 0x69, + 0x6e, 0x6e, 0x65, 0x72, 0x48, 0x54, 0x4d, 0x4c, 0x3d, 0x6c, 0x2e, 0x5f, + 0x5f, 0x68, 0x74, 0x6d, 0x6c, 0x29, 0x2c, 0x6e, 0x2e, 0x5f, 0x5f, 0x6b, + 0x3d, 0x5b, 0x5d, 0x3b, 0x65, 0x6c, 0x73, 0x65, 0x20, 0x69, 0x66, 0x28, + 0x63, 0x26, 0x26, 0x28, 0x74, 0x2e, 0x69, 0x6e, 0x6e, 0x65, 0x72, 0x48, + 0x54, 0x4d, 0x4c, 0x3d, 0x22, 0x22, 0x29, 0x2c, 0x4a, 0x28, 0x74, 0x2c, + 0x46, 0x28, 0x68, 0x29, 0x3f, 0x68, 0x3a, 0x5b, 0x68, 0x5d, 0x2c, 0x6e, + 0x2c, 0x65, 0x2c, 0x5f, 0x2c, 0x69, 0x26, 0x26, 0x22, 0x66, 0x6f, 0x72, + 0x65, 0x69, 0x67, 0x6e, 0x4f, 0x62, 0x6a, 0x65, 0x63, 0x74, 0x22, 0x21, + 0x3d, 0x3d, 0x6d, 0x2c, 0x6f, 0x2c, 0x72, 0x2c, 0x6f, 0x3f, 0x6f, 0x5b, + 0x30, 0x5d, 0x3a, 0x65, 0x2e, 0x5f, 0x5f, 0x6b, 0x26, 0x26, 0x71, 0x28, + 0x65, 0x2c, 0x30, 0x29, 0x2c, 0x75, 0x2c, 0x66, 0x29, 0x2c, 0x6e, 0x75, + 0x6c, 0x6c, 0x21, 0x3d, 0x6f, 0x29, 0x66, 0x6f, 0x72, 0x28, 0x73, 0x3d, + 0x6f, 0x2e, 0x6c, 0x65, 0x6e, 0x67, 0x74, 0x68, 0x3b, 0x73, 0x2d, 0x2d, + 0x3b, 0x29, 0x6e, 0x75, 0x6c, 0x6c, 0x21, 0x3d, 0x6f, 0x5b, 0x73, 0x5d, + 0x26, 0x26, 0x57, 0x28, 0x6f, 0x5b, 0x73, 0x5d, 0x29, 0x3b, 0x75, 0x7c, + 0x7c, 0x28, 0x73, 0x3d, 0x22, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x22, 0x2c, + 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x21, 0x3d, 0x3d, 0x70, 0x26, 0x26, + 0x28, 0x70, 0x21, 0x3d, 0x3d, 0x74, 0x5b, 0x73, 0x5d, 0x7c, 0x7c, 0x22, + 0x70, 0x72, 0x6f, 0x67, 0x72, 0x65, 0x73, 0x73, 0x22, 0x3d, 0x3d, 0x3d, + 0x6d, 0x26, 0x26, 0x21, 0x70, 0x7c, 0x7c, 0x22, 0x6f, 0x70, 0x74, 0x69, + 0x6f, 0x6e, 0x22, 0x3d, 0x3d, 0x3d, 0x6d, 0x26, 0x26, 0x70, 0x21, 0x3d, + 0x3d, 0x76, 0x5b, 0x73, 0x5d, 0x29, 0x26, 0x26, 0x74, 0x74, 0x28, 0x74, + 0x2c, 0x73, 0x2c, 0x70, 0x2c, 0x76, 0x5b, 0x73, 0x5d, 0x2c, 0x21, 0x31, + 0x29, 0x2c, 0x73, 0x3d, 0x22, 0x63, 0x68, 0x65, 0x63, 0x6b, 0x65, 0x64, + 0x22, 0x2c, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x21, 0x3d, 0x3d, 0x64, + 0x26, 0x26, 0x64, 0x21, 0x3d, 0x3d, 0x74, 0x5b, 0x73, 0x5d, 0x26, 0x26, + 0x74, 0x74, 0x28, 0x74, 0x2c, 0x73, 0x2c, 0x64, 0x2c, 0x76, 0x5b, 0x73, + 0x5d, 0x2c, 0x21, 0x31, 0x29, 0x29, 0x7d, 0x72, 0x65, 0x74, 0x75, 0x72, + 0x6e, 0x20, 0x74, 0x7d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, + 0x20, 0x72, 0x74, 0x28, 0x74, 0x2c, 0x6e, 0x2c, 0x65, 0x29, 0x7b, 0x74, + 0x72, 0x79, 0x7b, 0x22, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, + 0x22, 0x3d, 0x3d, 0x74, 0x79, 0x70, 0x65, 0x6f, 0x66, 0x20, 0x74, 0x3f, + 0x74, 0x28, 0x6e, 0x29, 0x3a, 0x74, 0x2e, 0x63, 0x75, 0x72, 0x72, 0x65, + 0x6e, 0x74, 0x3d, 0x6e, 0x7d, 0x63, 0x61, 0x74, 0x63, 0x68, 0x28, 0x74, + 0x29, 0x7b, 0x43, 0x2e, 0x5f, 0x5f, 0x65, 0x28, 0x74, 0x2c, 0x65, 0x29, + 0x7d, 0x7d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x75, 0x74, 0x28, 0x74, 0x2c, 0x6e, 0x2c, 0x65, 0x29, 0x7b, 0x76, 0x61, 0x72, - 0x20, 0x69, 0x2c, 0x5f, 0x2c, 0x6f, 0x2c, 0x72, 0x2c, 0x75, 0x3d, 0x46, - 0x28, 0x7b, 0x7d, 0x2c, 0x74, 0x2e, 0x70, 0x72, 0x6f, 0x70, 0x73, 0x29, - 0x3b, 0x66, 0x6f, 0x72, 0x28, 0x6f, 0x20, 0x69, 0x6e, 0x20, 0x74, 0x2e, - 0x74, 0x79, 0x70, 0x65, 0x26, 0x26, 0x74, 0x2e, 0x74, 0x79, 0x70, 0x65, - 0x2e, 0x64, 0x65, 0x66, 0x61, 0x75, 0x6c, 0x74, 0x50, 0x72, 0x6f, 0x70, - 0x73, 0x26, 0x26, 0x28, 0x72, 0x3d, 0x74, 0x2e, 0x74, 0x79, 0x70, 0x65, - 0x2e, 0x64, 0x65, 0x66, 0x61, 0x75, 0x6c, 0x74, 0x50, 0x72, 0x6f, 0x70, - 0x73, 0x29, 0x2c, 0x6e, 0x29, 0x22, 0x6b, 0x65, 0x79, 0x22, 0x3d, 0x3d, - 0x6f, 0x3f, 0x69, 0x3d, 0x6e, 0x5b, 0x6f, 0x5d, 0x3a, 0x22, 0x72, 0x65, - 0x66, 0x22, 0x3d, 0x3d, 0x6f, 0x3f, 0x5f, 0x3d, 0x6e, 0x5b, 0x6f, 0x5d, - 0x3a, 0x75, 0x5b, 0x6f, 0x5d, 0x3d, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, - 0x3d, 0x3d, 0x3d, 0x6e, 0x5b, 0x6f, 0x5d, 0x26, 0x26, 0x76, 0x6f, 0x69, - 0x64, 0x20, 0x30, 0x21, 0x3d, 0x3d, 0x72, 0x3f, 0x72, 0x5b, 0x6f, 0x5d, - 0x3a, 0x6e, 0x5b, 0x6f, 0x5d, 0x3b, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, - 0x20, 0x61, 0x72, 0x67, 0x75, 0x6d, 0x65, 0x6e, 0x74, 0x73, 0x2e, 0x6c, - 0x65, 0x6e, 0x67, 0x74, 0x68, 0x3e, 0x32, 0x26, 0x26, 0x28, 0x75, 0x2e, - 0x63, 0x68, 0x69, 0x6c, 0x64, 0x72, 0x65, 0x6e, 0x3d, 0x61, 0x72, 0x67, - 0x75, 0x6d, 0x65, 0x6e, 0x74, 0x73, 0x2e, 0x6c, 0x65, 0x6e, 0x67, 0x74, - 0x68, 0x3e, 0x33, 0x3f, 0x78, 0x2e, 0x63, 0x61, 0x6c, 0x6c, 0x28, 0x61, - 0x72, 0x67, 0x75, 0x6d, 0x65, 0x6e, 0x74, 0x73, 0x2c, 0x32, 0x29, 0x3a, - 0x65, 0x29, 0x2c, 0x4f, 0x28, 0x74, 0x2e, 0x74, 0x79, 0x70, 0x65, 0x2c, - 0x75, 0x2c, 0x69, 0x7c, 0x7c, 0x74, 0x2e, 0x6b, 0x65, 0x79, 0x2c, 0x5f, - 0x7c, 0x7c, 0x74, 0x2e, 0x72, 0x65, 0x66, 0x2c, 0x6e, 0x75, 0x6c, 0x6c, - 0x29, 0x7d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x68, - 0x74, 0x28, 0x74, 0x2c, 0x6e, 0x29, 0x7b, 0x76, 0x61, 0x72, 0x20, 0x65, - 0x3d, 0x7b, 0x5f, 0x5f, 0x63, 0x3a, 0x6e, 0x3d, 0x22, 0x5f, 0x5f, 0x63, - 0x43, 0x22, 0x2b, 0x24, 0x2b, 0x2b, 0x2c, 0x5f, 0x5f, 0x3a, 0x74, 0x2c, - 0x43, 0x6f, 0x6e, 0x73, 0x75, 0x6d, 0x65, 0x72, 0x3a, 0x66, 0x75, 0x6e, - 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x74, 0x2c, 0x6e, 0x29, 0x7b, 0x72, - 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x74, 0x2e, 0x63, 0x68, 0x69, 0x6c, - 0x64, 0x72, 0x65, 0x6e, 0x28, 0x6e, 0x29, 0x7d, 0x2c, 0x50, 0x72, 0x6f, - 0x76, 0x69, 0x64, 0x65, 0x72, 0x3a, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, - 0x6f, 0x6e, 0x28, 0x74, 0x29, 0x7b, 0x76, 0x61, 0x72, 0x20, 0x65, 0x2c, - 0x69, 0x3b, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x74, 0x68, 0x69, - 0x73, 0x2e, 0x67, 0x65, 0x74, 0x43, 0x68, 0x69, 0x6c, 0x64, 0x43, 0x6f, - 0x6e, 0x74, 0x65, 0x78, 0x74, 0x7c, 0x7c, 0x28, 0x65, 0x3d, 0x5b, 0x5d, - 0x2c, 0x28, 0x69, 0x3d, 0x7b, 0x7d, 0x29, 0x5b, 0x6e, 0x5d, 0x3d, 0x74, - 0x68, 0x69, 0x73, 0x2c, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x67, 0x65, 0x74, + 0x20, 0x5f, 0x2c, 0x69, 0x3b, 0x69, 0x66, 0x28, 0x43, 0x2e, 0x75, 0x6e, + 0x6d, 0x6f, 0x75, 0x6e, 0x74, 0x26, 0x26, 0x43, 0x2e, 0x75, 0x6e, 0x6d, + 0x6f, 0x75, 0x6e, 0x74, 0x28, 0x74, 0x29, 0x2c, 0x28, 0x5f, 0x3d, 0x74, + 0x2e, 0x72, 0x65, 0x66, 0x29, 0x26, 0x26, 0x28, 0x5f, 0x2e, 0x63, 0x75, + 0x72, 0x72, 0x65, 0x6e, 0x74, 0x26, 0x26, 0x5f, 0x2e, 0x63, 0x75, 0x72, + 0x72, 0x65, 0x6e, 0x74, 0x21, 0x3d, 0x3d, 0x74, 0x2e, 0x5f, 0x5f, 0x65, + 0x7c, 0x7c, 0x72, 0x74, 0x28, 0x5f, 0x2c, 0x6e, 0x75, 0x6c, 0x6c, 0x2c, + 0x6e, 0x29, 0x29, 0x2c, 0x6e, 0x75, 0x6c, 0x6c, 0x21, 0x3d, 0x28, 0x5f, + 0x3d, 0x74, 0x2e, 0x5f, 0x5f, 0x63, 0x29, 0x29, 0x7b, 0x69, 0x66, 0x28, + 0x5f, 0x2e, 0x63, 0x6f, 0x6d, 0x70, 0x6f, 0x6e, 0x65, 0x6e, 0x74, 0x57, + 0x69, 0x6c, 0x6c, 0x55, 0x6e, 0x6d, 0x6f, 0x75, 0x6e, 0x74, 0x29, 0x74, + 0x72, 0x79, 0x7b, 0x5f, 0x2e, 0x63, 0x6f, 0x6d, 0x70, 0x6f, 0x6e, 0x65, + 0x6e, 0x74, 0x57, 0x69, 0x6c, 0x6c, 0x55, 0x6e, 0x6d, 0x6f, 0x75, 0x6e, + 0x74, 0x28, 0x29, 0x7d, 0x63, 0x61, 0x74, 0x63, 0x68, 0x28, 0x74, 0x29, + 0x7b, 0x43, 0x2e, 0x5f, 0x5f, 0x65, 0x28, 0x74, 0x2c, 0x6e, 0x29, 0x7d, + 0x5f, 0x2e, 0x62, 0x61, 0x73, 0x65, 0x3d, 0x5f, 0x2e, 0x5f, 0x5f, 0x50, + 0x3d, 0x6e, 0x75, 0x6c, 0x6c, 0x2c, 0x74, 0x2e, 0x5f, 0x5f, 0x63, 0x3d, + 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x7d, 0x69, 0x66, 0x28, 0x5f, 0x3d, + 0x74, 0x2e, 0x5f, 0x5f, 0x6b, 0x29, 0x66, 0x6f, 0x72, 0x28, 0x69, 0x3d, + 0x30, 0x3b, 0x69, 0x3c, 0x5f, 0x2e, 0x6c, 0x65, 0x6e, 0x67, 0x74, 0x68, + 0x3b, 0x69, 0x2b, 0x2b, 0x29, 0x5f, 0x5b, 0x69, 0x5d, 0x26, 0x26, 0x75, + 0x74, 0x28, 0x5f, 0x5b, 0x69, 0x5d, 0x2c, 0x6e, 0x2c, 0x65, 0x7c, 0x7c, + 0x22, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x22, 0x21, 0x3d, + 0x74, 0x79, 0x70, 0x65, 0x6f, 0x66, 0x20, 0x74, 0x2e, 0x74, 0x79, 0x70, + 0x65, 0x29, 0x3b, 0x65, 0x7c, 0x7c, 0x6e, 0x75, 0x6c, 0x6c, 0x3d, 0x3d, + 0x74, 0x2e, 0x5f, 0x5f, 0x65, 0x7c, 0x7c, 0x57, 0x28, 0x74, 0x2e, 0x5f, + 0x5f, 0x65, 0x29, 0x2c, 0x74, 0x2e, 0x5f, 0x5f, 0x3d, 0x74, 0x2e, 0x5f, + 0x5f, 0x65, 0x3d, 0x74, 0x2e, 0x5f, 0x5f, 0x64, 0x3d, 0x76, 0x6f, 0x69, + 0x64, 0x20, 0x30, 0x7d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, + 0x20, 0x66, 0x74, 0x28, 0x74, 0x2c, 0x6e, 0x2c, 0x65, 0x29, 0x7b, 0x72, + 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x63, + 0x6f, 0x6e, 0x73, 0x74, 0x72, 0x75, 0x63, 0x74, 0x6f, 0x72, 0x28, 0x74, + 0x2c, 0x65, 0x29, 0x7d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, + 0x20, 0x73, 0x74, 0x28, 0x74, 0x2c, 0x6e, 0x2c, 0x65, 0x29, 0x7b, 0x76, + 0x61, 0x72, 0x20, 0x5f, 0x2c, 0x69, 0x2c, 0x6f, 0x2c, 0x72, 0x3b, 0x43, + 0x2e, 0x5f, 0x5f, 0x26, 0x26, 0x43, 0x2e, 0x5f, 0x5f, 0x28, 0x74, 0x2c, + 0x6e, 0x29, 0x2c, 0x69, 0x3d, 0x28, 0x5f, 0x3d, 0x22, 0x66, 0x75, 0x6e, + 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x22, 0x3d, 0x3d, 0x74, 0x79, 0x70, 0x65, + 0x6f, 0x66, 0x20, 0x65, 0x29, 0x3f, 0x6e, 0x75, 0x6c, 0x6c, 0x3a, 0x65, + 0x26, 0x26, 0x65, 0x2e, 0x5f, 0x5f, 0x6b, 0x7c, 0x7c, 0x6e, 0x2e, 0x5f, + 0x5f, 0x6b, 0x2c, 0x6f, 0x3d, 0x5b, 0x5d, 0x2c, 0x72, 0x3d, 0x5b, 0x5d, + 0x2c, 0x5f, 0x74, 0x28, 0x6e, 0x2c, 0x74, 0x3d, 0x28, 0x21, 0x5f, 0x26, + 0x26, 0x65, 0x7c, 0x7c, 0x6e, 0x29, 0x2e, 0x5f, 0x5f, 0x6b, 0x3d, 0x4c, + 0x28, 0x6a, 0x2c, 0x6e, 0x75, 0x6c, 0x6c, 0x2c, 0x5b, 0x74, 0x5d, 0x29, + 0x2c, 0x69, 0x7c, 0x7c, 0x54, 0x2c, 0x54, 0x2c, 0x76, 0x6f, 0x69, 0x64, + 0x20, 0x30, 0x21, 0x3d, 0x3d, 0x6e, 0x2e, 0x6f, 0x77, 0x6e, 0x65, 0x72, + 0x53, 0x56, 0x47, 0x45, 0x6c, 0x65, 0x6d, 0x65, 0x6e, 0x74, 0x2c, 0x21, + 0x5f, 0x26, 0x26, 0x65, 0x3f, 0x5b, 0x65, 0x5d, 0x3a, 0x69, 0x3f, 0x6e, + 0x75, 0x6c, 0x6c, 0x3a, 0x6e, 0x2e, 0x66, 0x69, 0x72, 0x73, 0x74, 0x43, + 0x68, 0x69, 0x6c, 0x64, 0x3f, 0x78, 0x2e, 0x63, 0x61, 0x6c, 0x6c, 0x28, + 0x6e, 0x2e, 0x63, 0x68, 0x69, 0x6c, 0x64, 0x4e, 0x6f, 0x64, 0x65, 0x73, + 0x29, 0x3a, 0x6e, 0x75, 0x6c, 0x6c, 0x2c, 0x6f, 0x2c, 0x21, 0x5f, 0x26, + 0x26, 0x65, 0x3f, 0x65, 0x3a, 0x69, 0x3f, 0x69, 0x2e, 0x5f, 0x5f, 0x65, + 0x3a, 0x6e, 0x2e, 0x66, 0x69, 0x72, 0x73, 0x74, 0x43, 0x68, 0x69, 0x6c, + 0x64, 0x2c, 0x5f, 0x2c, 0x72, 0x29, 0x2c, 0x69, 0x74, 0x28, 0x6f, 0x2c, + 0x74, 0x2c, 0x72, 0x29, 0x7d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, + 0x6e, 0x20, 0x6c, 0x74, 0x28, 0x74, 0x2c, 0x6e, 0x29, 0x7b, 0x73, 0x74, + 0x28, 0x74, 0x2c, 0x6e, 0x2c, 0x6c, 0x74, 0x29, 0x7d, 0x66, 0x75, 0x6e, + 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x63, 0x74, 0x28, 0x74, 0x2c, 0x6e, + 0x2c, 0x65, 0x29, 0x7b, 0x76, 0x61, 0x72, 0x20, 0x5f, 0x2c, 0x69, 0x2c, + 0x6f, 0x2c, 0x72, 0x2c, 0x75, 0x3d, 0x4d, 0x28, 0x7b, 0x7d, 0x2c, 0x74, + 0x2e, 0x70, 0x72, 0x6f, 0x70, 0x73, 0x29, 0x3b, 0x66, 0x6f, 0x72, 0x28, + 0x6f, 0x20, 0x69, 0x6e, 0x20, 0x74, 0x2e, 0x74, 0x79, 0x70, 0x65, 0x26, + 0x26, 0x74, 0x2e, 0x74, 0x79, 0x70, 0x65, 0x2e, 0x64, 0x65, 0x66, 0x61, + 0x75, 0x6c, 0x74, 0x50, 0x72, 0x6f, 0x70, 0x73, 0x26, 0x26, 0x28, 0x72, + 0x3d, 0x74, 0x2e, 0x74, 0x79, 0x70, 0x65, 0x2e, 0x64, 0x65, 0x66, 0x61, + 0x75, 0x6c, 0x74, 0x50, 0x72, 0x6f, 0x70, 0x73, 0x29, 0x2c, 0x6e, 0x29, + 0x22, 0x6b, 0x65, 0x79, 0x22, 0x3d, 0x3d, 0x6f, 0x3f, 0x5f, 0x3d, 0x6e, + 0x5b, 0x6f, 0x5d, 0x3a, 0x22, 0x72, 0x65, 0x66, 0x22, 0x3d, 0x3d, 0x6f, + 0x3f, 0x69, 0x3d, 0x6e, 0x5b, 0x6f, 0x5d, 0x3a, 0x75, 0x5b, 0x6f, 0x5d, + 0x3d, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x3d, 0x3d, 0x3d, 0x6e, 0x5b, + 0x6f, 0x5d, 0x26, 0x26, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x21, 0x3d, + 0x3d, 0x72, 0x3f, 0x72, 0x5b, 0x6f, 0x5d, 0x3a, 0x6e, 0x5b, 0x6f, 0x5d, + 0x3b, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x61, 0x72, 0x67, 0x75, + 0x6d, 0x65, 0x6e, 0x74, 0x73, 0x2e, 0x6c, 0x65, 0x6e, 0x67, 0x74, 0x68, + 0x3e, 0x32, 0x26, 0x26, 0x28, 0x75, 0x2e, 0x63, 0x68, 0x69, 0x6c, 0x64, + 0x72, 0x65, 0x6e, 0x3d, 0x61, 0x72, 0x67, 0x75, 0x6d, 0x65, 0x6e, 0x74, + 0x73, 0x2e, 0x6c, 0x65, 0x6e, 0x67, 0x74, 0x68, 0x3e, 0x33, 0x3f, 0x78, + 0x2e, 0x63, 0x61, 0x6c, 0x6c, 0x28, 0x61, 0x72, 0x67, 0x75, 0x6d, 0x65, + 0x6e, 0x74, 0x73, 0x2c, 0x32, 0x29, 0x3a, 0x65, 0x29, 0x2c, 0x4f, 0x28, + 0x74, 0x2e, 0x74, 0x79, 0x70, 0x65, 0x2c, 0x75, 0x2c, 0x5f, 0x7c, 0x7c, + 0x74, 0x2e, 0x6b, 0x65, 0x79, 0x2c, 0x69, 0x7c, 0x7c, 0x74, 0x2e, 0x72, + 0x65, 0x66, 0x2c, 0x6e, 0x75, 0x6c, 0x6c, 0x29, 0x7d, 0x66, 0x75, 0x6e, + 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x68, 0x74, 0x28, 0x74, 0x2c, 0x6e, + 0x29, 0x7b, 0x76, 0x61, 0x72, 0x20, 0x65, 0x3d, 0x7b, 0x5f, 0x5f, 0x63, + 0x3a, 0x6e, 0x3d, 0x22, 0x5f, 0x5f, 0x63, 0x43, 0x22, 0x2b, 0x44, 0x2b, + 0x2b, 0x2c, 0x5f, 0x5f, 0x3a, 0x74, 0x2c, 0x43, 0x6f, 0x6e, 0x73, 0x75, + 0x6d, 0x65, 0x72, 0x3a, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, + 0x28, 0x74, 0x2c, 0x6e, 0x29, 0x7b, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, + 0x20, 0x74, 0x2e, 0x63, 0x68, 0x69, 0x6c, 0x64, 0x72, 0x65, 0x6e, 0x28, + 0x6e, 0x29, 0x7d, 0x2c, 0x50, 0x72, 0x6f, 0x76, 0x69, 0x64, 0x65, 0x72, + 0x3a, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x74, 0x29, + 0x7b, 0x76, 0x61, 0x72, 0x20, 0x65, 0x2c, 0x5f, 0x3b, 0x72, 0x65, 0x74, + 0x75, 0x72, 0x6e, 0x20, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x67, 0x65, 0x74, 0x43, 0x68, 0x69, 0x6c, 0x64, 0x43, 0x6f, 0x6e, 0x74, 0x65, 0x78, 0x74, - 0x3d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x29, 0x7b, - 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x69, 0x7d, 0x2c, 0x74, 0x68, - 0x69, 0x73, 0x2e, 0x73, 0x68, 0x6f, 0x75, 0x6c, 0x64, 0x43, 0x6f, 0x6d, - 0x70, 0x6f, 0x6e, 0x65, 0x6e, 0x74, 0x55, 0x70, 0x64, 0x61, 0x74, 0x65, - 0x3d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x74, 0x29, - 0x7b, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x70, 0x72, 0x6f, 0x70, 0x73, 0x2e, - 0x76, 0x61, 0x6c, 0x75, 0x65, 0x21, 0x3d, 0x3d, 0x74, 0x2e, 0x76, 0x61, - 0x6c, 0x75, 0x65, 0x26, 0x26, 0x65, 0x2e, 0x73, 0x6f, 0x6d, 0x65, 0x28, - 0x28, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x74, 0x29, - 0x7b, 0x74, 0x2e, 0x5f, 0x5f, 0x65, 0x3d, 0x21, 0x30, 0x2c, 0x71, 0x28, - 0x74, 0x29, 0x7d, 0x29, 0x29, 0x7d, 0x2c, 0x74, 0x68, 0x69, 0x73, 0x2e, - 0x73, 0x75, 0x62, 0x3d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, - 0x28, 0x74, 0x29, 0x7b, 0x65, 0x2e, 0x70, 0x75, 0x73, 0x68, 0x28, 0x74, - 0x29, 0x3b, 0x76, 0x61, 0x72, 0x20, 0x6e, 0x3d, 0x74, 0x2e, 0x63, 0x6f, - 0x6d, 0x70, 0x6f, 0x6e, 0x65, 0x6e, 0x74, 0x57, 0x69, 0x6c, 0x6c, 0x55, - 0x6e, 0x6d, 0x6f, 0x75, 0x6e, 0x74, 0x3b, 0x74, 0x2e, 0x63, 0x6f, 0x6d, - 0x70, 0x6f, 0x6e, 0x65, 0x6e, 0x74, 0x57, 0x69, 0x6c, 0x6c, 0x55, 0x6e, - 0x6d, 0x6f, 0x75, 0x6e, 0x74, 0x3d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, - 0x6f, 0x6e, 0x28, 0x29, 0x7b, 0x65, 0x2e, 0x73, 0x70, 0x6c, 0x69, 0x63, - 0x65, 0x28, 0x65, 0x2e, 0x69, 0x6e, 0x64, 0x65, 0x78, 0x4f, 0x66, 0x28, - 0x74, 0x29, 0x2c, 0x31, 0x29, 0x2c, 0x6e, 0x26, 0x26, 0x6e, 0x2e, 0x63, - 0x61, 0x6c, 0x6c, 0x28, 0x74, 0x29, 0x7d, 0x7d, 0x29, 0x2c, 0x74, 0x2e, - 0x63, 0x68, 0x69, 0x6c, 0x64, 0x72, 0x65, 0x6e, 0x7d, 0x7d, 0x3b, 0x72, - 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x65, 0x2e, 0x50, 0x72, 0x6f, 0x76, - 0x69, 0x64, 0x65, 0x72, 0x2e, 0x5f, 0x5f, 0x3d, 0x65, 0x2e, 0x43, 0x6f, - 0x6e, 0x73, 0x75, 0x6d, 0x65, 0x72, 0x2e, 0x63, 0x6f, 0x6e, 0x74, 0x65, - 0x78, 0x74, 0x54, 0x79, 0x70, 0x65, 0x3d, 0x65, 0x7d, 0x78, 0x3d, 0x54, - 0x2e, 0x73, 0x6c, 0x69, 0x63, 0x65, 0x2c, 0x77, 0x3d, 0x7b, 0x5f, 0x5f, - 0x65, 0x3a, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x74, - 0x2c, 0x6e, 0x2c, 0x65, 0x2c, 0x69, 0x29, 0x7b, 0x66, 0x6f, 0x72, 0x28, - 0x76, 0x61, 0x72, 0x20, 0x5f, 0x2c, 0x6f, 0x2c, 0x72, 0x3b, 0x6e, 0x3d, - 0x6e, 0x2e, 0x5f, 0x5f, 0x3b, 0x29, 0x69, 0x66, 0x28, 0x28, 0x5f, 0x3d, - 0x6e, 0x2e, 0x5f, 0x5f, 0x63, 0x29, 0x26, 0x26, 0x21, 0x5f, 0x2e, 0x5f, - 0x5f, 0x29, 0x74, 0x72, 0x79, 0x7b, 0x69, 0x66, 0x28, 0x28, 0x6f, 0x3d, - 0x5f, 0x2e, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x72, 0x75, 0x63, 0x74, 0x6f, - 0x72, 0x29, 0x26, 0x26, 0x6e, 0x75, 0x6c, 0x6c, 0x21, 0x3d, 0x6f, 0x2e, - 0x67, 0x65, 0x74, 0x44, 0x65, 0x72, 0x69, 0x76, 0x65, 0x64, 0x53, 0x74, - 0x61, 0x74, 0x65, 0x46, 0x72, 0x6f, 0x6d, 0x45, 0x72, 0x72, 0x6f, 0x72, - 0x26, 0x26, 0x28, 0x5f, 0x2e, 0x73, 0x65, 0x74, 0x53, 0x74, 0x61, 0x74, - 0x65, 0x28, 0x6f, 0x2e, 0x67, 0x65, 0x74, 0x44, 0x65, 0x72, 0x69, 0x76, - 0x65, 0x64, 0x53, 0x74, 0x61, 0x74, 0x65, 0x46, 0x72, 0x6f, 0x6d, 0x45, - 0x72, 0x72, 0x6f, 0x72, 0x28, 0x74, 0x29, 0x29, 0x2c, 0x72, 0x3d, 0x5f, - 0x2e, 0x5f, 0x5f, 0x64, 0x29, 0x2c, 0x6e, 0x75, 0x6c, 0x6c, 0x21, 0x3d, - 0x5f, 0x2e, 0x63, 0x6f, 0x6d, 0x70, 0x6f, 0x6e, 0x65, 0x6e, 0x74, 0x44, - 0x69, 0x64, 0x43, 0x61, 0x74, 0x63, 0x68, 0x26, 0x26, 0x28, 0x5f, 0x2e, - 0x63, 0x6f, 0x6d, 0x70, 0x6f, 0x6e, 0x65, 0x6e, 0x74, 0x44, 0x69, 0x64, - 0x43, 0x61, 0x74, 0x63, 0x68, 0x28, 0x74, 0x2c, 0x69, 0x7c, 0x7c, 0x7b, - 0x7d, 0x29, 0x2c, 0x72, 0x3d, 0x5f, 0x2e, 0x5f, 0x5f, 0x64, 0x29, 0x2c, - 0x72, 0x29, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x5f, 0x2e, 0x5f, - 0x5f, 0x45, 0x3d, 0x5f, 0x7d, 0x63, 0x61, 0x74, 0x63, 0x68, 0x28, 0x6e, - 0x29, 0x7b, 0x74, 0x3d, 0x6e, 0x7d, 0x74, 0x68, 0x72, 0x6f, 0x77, 0x20, - 0x74, 0x7d, 0x7d, 0x2c, 0x43, 0x3d, 0x30, 0x2c, 0x45, 0x3d, 0x66, 0x75, - 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x74, 0x29, 0x7b, 0x72, 0x65, - 0x74, 0x75, 0x72, 0x6e, 0x20, 0x6e, 0x75, 0x6c, 0x6c, 0x21, 0x3d, 0x74, - 0x26, 0x26, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x3d, 0x3d, 0x3d, 0x74, - 0x2e, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x72, 0x75, 0x63, 0x74, 0x6f, 0x72, - 0x7d, 0x2c, 0x49, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x74, 0x79, 0x70, - 0x65, 0x2e, 0x73, 0x65, 0x74, 0x53, 0x74, 0x61, 0x74, 0x65, 0x3d, 0x66, - 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x74, 0x2c, 0x6e, 0x29, - 0x7b, 0x76, 0x61, 0x72, 0x20, 0x65, 0x3b, 0x65, 0x3d, 0x6e, 0x75, 0x6c, - 0x6c, 0x21, 0x3d, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x5f, 0x5f, 0x73, 0x26, - 0x26, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x5f, 0x5f, 0x73, 0x21, 0x3d, 0x3d, - 0x74, 0x68, 0x69, 0x73, 0x2e, 0x73, 0x74, 0x61, 0x74, 0x65, 0x3f, 0x74, - 0x68, 0x69, 0x73, 0x2e, 0x5f, 0x5f, 0x73, 0x3a, 0x74, 0x68, 0x69, 0x73, - 0x2e, 0x5f, 0x5f, 0x73, 0x3d, 0x46, 0x28, 0x7b, 0x7d, 0x2c, 0x74, 0x68, - 0x69, 0x73, 0x2e, 0x73, 0x74, 0x61, 0x74, 0x65, 0x29, 0x2c, 0x22, 0x66, - 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x22, 0x3d, 0x3d, 0x74, 0x79, - 0x70, 0x65, 0x6f, 0x66, 0x20, 0x74, 0x26, 0x26, 0x28, 0x74, 0x3d, 0x74, - 0x28, 0x46, 0x28, 0x7b, 0x7d, 0x2c, 0x65, 0x29, 0x2c, 0x74, 0x68, 0x69, - 0x73, 0x2e, 0x70, 0x72, 0x6f, 0x70, 0x73, 0x29, 0x29, 0x2c, 0x74, 0x26, - 0x26, 0x46, 0x28, 0x65, 0x2c, 0x74, 0x29, 0x2c, 0x6e, 0x75, 0x6c, 0x6c, - 0x21, 0x3d, 0x74, 0x26, 0x26, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x5f, 0x5f, - 0x76, 0x26, 0x26, 0x28, 0x6e, 0x26, 0x26, 0x74, 0x68, 0x69, 0x73, 0x2e, - 0x5f, 0x73, 0x62, 0x2e, 0x70, 0x75, 0x73, 0x68, 0x28, 0x6e, 0x29, 0x2c, - 0x71, 0x28, 0x74, 0x68, 0x69, 0x73, 0x29, 0x29, 0x7d, 0x2c, 0x49, 0x2e, - 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x74, 0x79, 0x70, 0x65, 0x2e, 0x66, 0x6f, - 0x72, 0x63, 0x65, 0x55, 0x70, 0x64, 0x61, 0x74, 0x65, 0x3d, 0x66, 0x75, - 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x74, 0x29, 0x7b, 0x74, 0x68, - 0x69, 0x73, 0x2e, 0x5f, 0x5f, 0x76, 0x26, 0x26, 0x28, 0x74, 0x68, 0x69, - 0x73, 0x2e, 0x5f, 0x5f, 0x65, 0x3d, 0x21, 0x30, 0x2c, 0x74, 0x26, 0x26, - 0x74, 0x68, 0x69, 0x73, 0x2e, 0x5f, 0x5f, 0x68, 0x2e, 0x70, 0x75, 0x73, - 0x68, 0x28, 0x74, 0x29, 0x2c, 0x71, 0x28, 0x74, 0x68, 0x69, 0x73, 0x29, - 0x29, 0x7d, 0x2c, 0x49, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x74, 0x79, - 0x70, 0x65, 0x2e, 0x72, 0x65, 0x6e, 0x64, 0x65, 0x72, 0x3d, 0x52, 0x2c, - 0x55, 0x3d, 0x5b, 0x5d, 0x2c, 0x4e, 0x3d, 0x22, 0x66, 0x75, 0x6e, 0x63, - 0x74, 0x69, 0x6f, 0x6e, 0x22, 0x3d, 0x3d, 0x74, 0x79, 0x70, 0x65, 0x6f, - 0x66, 0x20, 0x50, 0x72, 0x6f, 0x6d, 0x69, 0x73, 0x65, 0x3f, 0x50, 0x72, - 0x6f, 0x6d, 0x69, 0x73, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x74, - 0x79, 0x70, 0x65, 0x2e, 0x74, 0x68, 0x65, 0x6e, 0x2e, 0x62, 0x69, 0x6e, - 0x64, 0x28, 0x50, 0x72, 0x6f, 0x6d, 0x69, 0x73, 0x65, 0x2e, 0x72, 0x65, - 0x73, 0x6f, 0x6c, 0x76, 0x65, 0x28, 0x29, 0x29, 0x3a, 0x73, 0x65, 0x74, - 0x54, 0x69, 0x6d, 0x65, 0x6f, 0x75, 0x74, 0x2c, 0x50, 0x3d, 0x66, 0x75, - 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x74, 0x2c, 0x6e, 0x29, 0x7b, - 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x74, 0x2e, 0x5f, 0x5f, 0x76, - 0x2e, 0x5f, 0x5f, 0x62, 0x2d, 0x6e, 0x2e, 0x5f, 0x5f, 0x76, 0x2e, 0x5f, - 0x5f, 0x62, 0x7d, 0x2c, 0x47, 0x2e, 0x5f, 0x5f, 0x72, 0x3d, 0x30, 0x2c, - 0x24, 0x3d, 0x30, 0x3b, 0x76, 0x61, 0x72, 0x20, 0x61, 0x74, 0x2c, 0x70, - 0x74, 0x2c, 0x64, 0x74, 0x2c, 0x76, 0x74, 0x2c, 0x79, 0x74, 0x3d, 0x30, - 0x2c, 0x6d, 0x74, 0x3d, 0x5b, 0x5d, 0x2c, 0x67, 0x74, 0x3d, 0x5b, 0x5d, - 0x2c, 0x62, 0x74, 0x3d, 0x77, 0x2e, 0x5f, 0x5f, 0x62, 0x2c, 0x6b, 0x74, - 0x3d, 0x77, 0x2e, 0x5f, 0x5f, 0x72, 0x2c, 0x53, 0x74, 0x3d, 0x77, 0x2e, - 0x64, 0x69, 0x66, 0x66, 0x65, 0x64, 0x2c, 0x78, 0x74, 0x3d, 0x77, 0x2e, - 0x5f, 0x5f, 0x63, 0x2c, 0x77, 0x74, 0x3d, 0x77, 0x2e, 0x75, 0x6e, 0x6d, - 0x6f, 0x75, 0x6e, 0x74, 0x3b, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, - 0x6e, 0x20, 0x43, 0x74, 0x28, 0x74, 0x2c, 0x6e, 0x29, 0x7b, 0x77, 0x2e, - 0x5f, 0x5f, 0x68, 0x26, 0x26, 0x77, 0x2e, 0x5f, 0x5f, 0x68, 0x28, 0x70, - 0x74, 0x2c, 0x74, 0x2c, 0x79, 0x74, 0x7c, 0x7c, 0x6e, 0x29, 0x2c, 0x79, - 0x74, 0x3d, 0x30, 0x3b, 0x76, 0x61, 0x72, 0x20, 0x65, 0x3d, 0x70, 0x74, - 0x2e, 0x5f, 0x5f, 0x48, 0x7c, 0x7c, 0x28, 0x70, 0x74, 0x2e, 0x5f, 0x5f, - 0x48, 0x3d, 0x7b, 0x5f, 0x5f, 0x3a, 0x5b, 0x5d, 0x2c, 0x5f, 0x5f, 0x68, - 0x3a, 0x5b, 0x5d, 0x7d, 0x29, 0x3b, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, - 0x20, 0x74, 0x3e, 0x3d, 0x65, 0x2e, 0x5f, 0x5f, 0x2e, 0x6c, 0x65, 0x6e, - 0x67, 0x74, 0x68, 0x26, 0x26, 0x65, 0x2e, 0x5f, 0x5f, 0x2e, 0x70, 0x75, - 0x73, 0x68, 0x28, 0x7b, 0x5f, 0x5f, 0x56, 0x3a, 0x67, 0x74, 0x7d, 0x29, - 0x2c, 0x65, 0x2e, 0x5f, 0x5f, 0x5b, 0x74, 0x5d, 0x7d, 0x66, 0x75, 0x6e, - 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x45, 0x74, 0x28, 0x74, 0x29, 0x7b, - 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x79, 0x74, 0x3d, 0x31, 0x2c, - 0x55, 0x74, 0x28, 0x42, 0x74, 0x2c, 0x74, 0x29, 0x7d, 0x66, 0x75, 0x6e, - 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x55, 0x74, 0x28, 0x74, 0x2c, 0x6e, - 0x2c, 0x65, 0x29, 0x7b, 0x76, 0x61, 0x72, 0x20, 0x69, 0x3d, 0x43, 0x74, - 0x28, 0x61, 0x74, 0x2b, 0x2b, 0x2c, 0x32, 0x29, 0x3b, 0x69, 0x66, 0x28, - 0x69, 0x2e, 0x74, 0x3d, 0x74, 0x2c, 0x21, 0x69, 0x2e, 0x5f, 0x5f, 0x63, - 0x26, 0x26, 0x28, 0x69, 0x2e, 0x5f, 0x5f, 0x3d, 0x5b, 0x65, 0x3f, 0x65, - 0x28, 0x6e, 0x29, 0x3a, 0x42, 0x74, 0x28, 0x76, 0x6f, 0x69, 0x64, 0x20, - 0x30, 0x2c, 0x6e, 0x29, 0x2c, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, - 0x6e, 0x28, 0x74, 0x29, 0x7b, 0x76, 0x61, 0x72, 0x20, 0x6e, 0x3d, 0x69, - 0x2e, 0x5f, 0x5f, 0x4e, 0x3f, 0x69, 0x2e, 0x5f, 0x5f, 0x4e, 0x5b, 0x30, - 0x5d, 0x3a, 0x69, 0x2e, 0x5f, 0x5f, 0x5b, 0x30, 0x5d, 0x2c, 0x65, 0x3d, - 0x69, 0x2e, 0x74, 0x28, 0x6e, 0x2c, 0x74, 0x29, 0x3b, 0x6e, 0x21, 0x3d, - 0x3d, 0x65, 0x26, 0x26, 0x28, 0x69, 0x2e, 0x5f, 0x5f, 0x4e, 0x3d, 0x5b, - 0x65, 0x2c, 0x69, 0x2e, 0x5f, 0x5f, 0x5b, 0x31, 0x5d, 0x5d, 0x2c, 0x69, - 0x2e, 0x5f, 0x5f, 0x63, 0x2e, 0x73, 0x65, 0x74, 0x53, 0x74, 0x61, 0x74, - 0x65, 0x28, 0x7b, 0x7d, 0x29, 0x29, 0x7d, 0x5d, 0x2c, 0x69, 0x2e, 0x5f, - 0x5f, 0x63, 0x3d, 0x70, 0x74, 0x2c, 0x21, 0x70, 0x74, 0x2e, 0x75, 0x29, - 0x29, 0x7b, 0x76, 0x61, 0x72, 0x20, 0x5f, 0x3d, 0x66, 0x75, 0x6e, 0x63, - 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x74, 0x2c, 0x6e, 0x2c, 0x65, 0x29, 0x7b, - 0x69, 0x66, 0x28, 0x21, 0x69, 0x2e, 0x5f, 0x5f, 0x63, 0x2e, 0x5f, 0x5f, - 0x48, 0x29, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x21, 0x30, 0x3b, 0x76, - 0x61, 0x72, 0x20, 0x5f, 0x3d, 0x69, 0x2e, 0x5f, 0x5f, 0x63, 0x2e, 0x5f, - 0x5f, 0x48, 0x2e, 0x5f, 0x5f, 0x2e, 0x66, 0x69, 0x6c, 0x74, 0x65, 0x72, - 0x28, 0x28, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x74, - 0x29, 0x7b, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x74, 0x2e, 0x5f, - 0x5f, 0x63, 0x7d, 0x29, 0x29, 0x3b, 0x69, 0x66, 0x28, 0x5f, 0x2e, 0x65, - 0x76, 0x65, 0x72, 0x79, 0x28, 0x28, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, - 0x6f, 0x6e, 0x28, 0x74, 0x29, 0x7b, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, - 0x21, 0x74, 0x2e, 0x5f, 0x5f, 0x4e, 0x7d, 0x29, 0x29, 0x29, 0x72, 0x65, - 0x74, 0x75, 0x72, 0x6e, 0x21, 0x6f, 0x7c, 0x7c, 0x6f, 0x2e, 0x63, 0x61, - 0x6c, 0x6c, 0x28, 0x74, 0x68, 0x69, 0x73, 0x2c, 0x74, 0x2c, 0x6e, 0x2c, - 0x65, 0x29, 0x3b, 0x76, 0x61, 0x72, 0x20, 0x72, 0x3d, 0x21, 0x31, 0x3b, - 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x5f, 0x2e, 0x66, 0x6f, 0x72, - 0x45, 0x61, 0x63, 0x68, 0x28, 0x28, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, - 0x6f, 0x6e, 0x28, 0x74, 0x29, 0x7b, 0x69, 0x66, 0x28, 0x74, 0x2e, 0x5f, - 0x5f, 0x4e, 0x29, 0x7b, 0x76, 0x61, 0x72, 0x20, 0x6e, 0x3d, 0x74, 0x2e, - 0x5f, 0x5f, 0x5b, 0x30, 0x5d, 0x3b, 0x74, 0x2e, 0x5f, 0x5f, 0x3d, 0x74, - 0x2e, 0x5f, 0x5f, 0x4e, 0x2c, 0x74, 0x2e, 0x5f, 0x5f, 0x4e, 0x3d, 0x76, - 0x6f, 0x69, 0x64, 0x20, 0x30, 0x2c, 0x6e, 0x21, 0x3d, 0x3d, 0x74, 0x2e, - 0x5f, 0x5f, 0x5b, 0x30, 0x5d, 0x26, 0x26, 0x28, 0x72, 0x3d, 0x21, 0x30, - 0x29, 0x7d, 0x7d, 0x29, 0x29, 0x2c, 0x21, 0x28, 0x21, 0x72, 0x26, 0x26, - 0x69, 0x2e, 0x5f, 0x5f, 0x63, 0x2e, 0x70, 0x72, 0x6f, 0x70, 0x73, 0x3d, - 0x3d, 0x3d, 0x74, 0x29, 0x26, 0x26, 0x28, 0x21, 0x6f, 0x7c, 0x7c, 0x6f, - 0x2e, 0x63, 0x61, 0x6c, 0x6c, 0x28, 0x74, 0x68, 0x69, 0x73, 0x2c, 0x74, - 0x2c, 0x6e, 0x2c, 0x65, 0x29, 0x29, 0x7d, 0x3b, 0x70, 0x74, 0x2e, 0x75, - 0x3d, 0x21, 0x30, 0x3b, 0x76, 0x61, 0x72, 0x20, 0x6f, 0x3d, 0x70, 0x74, - 0x2e, 0x73, 0x68, 0x6f, 0x75, 0x6c, 0x64, 0x43, 0x6f, 0x6d, 0x70, 0x6f, - 0x6e, 0x65, 0x6e, 0x74, 0x55, 0x70, 0x64, 0x61, 0x74, 0x65, 0x2c, 0x72, - 0x3d, 0x70, 0x74, 0x2e, 0x63, 0x6f, 0x6d, 0x70, 0x6f, 0x6e, 0x65, 0x6e, - 0x74, 0x57, 0x69, 0x6c, 0x6c, 0x55, 0x70, 0x64, 0x61, 0x74, 0x65, 0x3b, - 0x70, 0x74, 0x2e, 0x63, 0x6f, 0x6d, 0x70, 0x6f, 0x6e, 0x65, 0x6e, 0x74, - 0x57, 0x69, 0x6c, 0x6c, 0x55, 0x70, 0x64, 0x61, 0x74, 0x65, 0x3d, 0x66, - 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x74, 0x2c, 0x6e, 0x2c, - 0x65, 0x29, 0x7b, 0x69, 0x66, 0x28, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x5f, - 0x5f, 0x65, 0x29, 0x7b, 0x76, 0x61, 0x72, 0x20, 0x69, 0x3d, 0x6f, 0x3b, - 0x6f, 0x3d, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x2c, 0x5f, 0x28, 0x74, - 0x2c, 0x6e, 0x2c, 0x65, 0x29, 0x2c, 0x6f, 0x3d, 0x69, 0x7d, 0x72, 0x26, - 0x26, 0x72, 0x2e, 0x63, 0x61, 0x6c, 0x6c, 0x28, 0x74, 0x68, 0x69, 0x73, - 0x2c, 0x74, 0x2c, 0x6e, 0x2c, 0x65, 0x29, 0x7d, 0x2c, 0x70, 0x74, 0x2e, - 0x73, 0x68, 0x6f, 0x75, 0x6c, 0x64, 0x43, 0x6f, 0x6d, 0x70, 0x6f, 0x6e, - 0x65, 0x6e, 0x74, 0x55, 0x70, 0x64, 0x61, 0x74, 0x65, 0x3d, 0x5f, 0x7d, - 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x69, 0x2e, 0x5f, 0x5f, 0x4e, - 0x7c, 0x7c, 0x69, 0x2e, 0x5f, 0x5f, 0x7d, 0x66, 0x75, 0x6e, 0x63, 0x74, - 0x69, 0x6f, 0x6e, 0x20, 0x48, 0x74, 0x28, 0x74, 0x2c, 0x6e, 0x29, 0x7b, - 0x76, 0x61, 0x72, 0x20, 0x65, 0x3d, 0x43, 0x74, 0x28, 0x61, 0x74, 0x2b, - 0x2b, 0x2c, 0x33, 0x29, 0x3b, 0x21, 0x77, 0x2e, 0x5f, 0x5f, 0x73, 0x26, - 0x26, 0x6a, 0x74, 0x28, 0x65, 0x2e, 0x5f, 0x5f, 0x48, 0x2c, 0x6e, 0x29, - 0x26, 0x26, 0x28, 0x65, 0x2e, 0x5f, 0x5f, 0x3d, 0x74, 0x2c, 0x65, 0x2e, - 0x69, 0x3d, 0x6e, 0x2c, 0x70, 0x74, 0x2e, 0x5f, 0x5f, 0x48, 0x2e, 0x5f, - 0x5f, 0x68, 0x2e, 0x70, 0x75, 0x73, 0x68, 0x28, 0x65, 0x29, 0x29, 0x7d, - 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x4e, 0x74, 0x28, - 0x74, 0x2c, 0x6e, 0x29, 0x7b, 0x76, 0x61, 0x72, 0x20, 0x65, 0x3d, 0x43, - 0x74, 0x28, 0x61, 0x74, 0x2b, 0x2b, 0x2c, 0x34, 0x29, 0x3b, 0x21, 0x77, - 0x2e, 0x5f, 0x5f, 0x73, 0x26, 0x26, 0x6a, 0x74, 0x28, 0x65, 0x2e, 0x5f, - 0x5f, 0x48, 0x2c, 0x6e, 0x29, 0x26, 0x26, 0x28, 0x65, 0x2e, 0x5f, 0x5f, - 0x3d, 0x74, 0x2c, 0x65, 0x2e, 0x69, 0x3d, 0x6e, 0x2c, 0x70, 0x74, 0x2e, - 0x5f, 0x5f, 0x68, 0x2e, 0x70, 0x75, 0x73, 0x68, 0x28, 0x65, 0x29, 0x29, - 0x7d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x50, 0x74, - 0x28, 0x74, 0x29, 0x7b, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x79, - 0x74, 0x3d, 0x35, 0x2c, 0x44, 0x74, 0x28, 0x28, 0x66, 0x75, 0x6e, 0x63, + 0x7c, 0x7c, 0x28, 0x65, 0x3d, 0x5b, 0x5d, 0x2c, 0x28, 0x5f, 0x3d, 0x7b, + 0x7d, 0x29, 0x5b, 0x6e, 0x5d, 0x3d, 0x74, 0x68, 0x69, 0x73, 0x2c, 0x74, + 0x68, 0x69, 0x73, 0x2e, 0x67, 0x65, 0x74, 0x43, 0x68, 0x69, 0x6c, 0x64, + 0x43, 0x6f, 0x6e, 0x74, 0x65, 0x78, 0x74, 0x3d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x29, 0x7b, 0x72, 0x65, 0x74, 0x75, 0x72, - 0x6e, 0x7b, 0x63, 0x75, 0x72, 0x72, 0x65, 0x6e, 0x74, 0x3a, 0x74, 0x7d, - 0x7d, 0x29, 0x2c, 0x5b, 0x5d, 0x29, 0x7d, 0x66, 0x75, 0x6e, 0x63, 0x74, - 0x69, 0x6f, 0x6e, 0x20, 0x24, 0x74, 0x28, 0x74, 0x2c, 0x6e, 0x2c, 0x65, - 0x29, 0x7b, 0x79, 0x74, 0x3d, 0x36, 0x2c, 0x4e, 0x74, 0x28, 0x28, 0x66, - 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x29, 0x7b, 0x72, 0x65, - 0x74, 0x75, 0x72, 0x6e, 0x22, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, - 0x6e, 0x22, 0x3d, 0x3d, 0x74, 0x79, 0x70, 0x65, 0x6f, 0x66, 0x20, 0x74, - 0x3f, 0x28, 0x74, 0x28, 0x6e, 0x28, 0x29, 0x29, 0x2c, 0x66, 0x75, 0x6e, - 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x29, 0x7b, 0x72, 0x65, 0x74, 0x75, - 0x72, 0x6e, 0x20, 0x74, 0x28, 0x6e, 0x75, 0x6c, 0x6c, 0x29, 0x7d, 0x29, - 0x3a, 0x74, 0x3f, 0x28, 0x74, 0x2e, 0x63, 0x75, 0x72, 0x72, 0x65, 0x6e, - 0x74, 0x3d, 0x6e, 0x28, 0x29, 0x2c, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, - 0x6f, 0x6e, 0x28, 0x29, 0x7b, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, - 0x74, 0x2e, 0x63, 0x75, 0x72, 0x72, 0x65, 0x6e, 0x74, 0x3d, 0x6e, 0x75, - 0x6c, 0x6c, 0x7d, 0x29, 0x3a, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x7d, - 0x29, 0x2c, 0x6e, 0x75, 0x6c, 0x6c, 0x3d, 0x3d, 0x65, 0x3f, 0x65, 0x3a, - 0x65, 0x2e, 0x63, 0x6f, 0x6e, 0x63, 0x61, 0x74, 0x28, 0x74, 0x29, 0x29, - 0x7d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x44, 0x74, - 0x28, 0x74, 0x2c, 0x6e, 0x29, 0x7b, 0x76, 0x61, 0x72, 0x20, 0x65, 0x3d, - 0x43, 0x74, 0x28, 0x61, 0x74, 0x2b, 0x2b, 0x2c, 0x37, 0x29, 0x3b, 0x72, - 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x6a, 0x74, 0x28, 0x65, 0x2e, 0x5f, - 0x5f, 0x48, 0x2c, 0x6e, 0x29, 0x3f, 0x28, 0x65, 0x2e, 0x5f, 0x5f, 0x56, - 0x3d, 0x74, 0x28, 0x29, 0x2c, 0x65, 0x2e, 0x69, 0x3d, 0x6e, 0x2c, 0x65, - 0x2e, 0x5f, 0x5f, 0x68, 0x3d, 0x74, 0x2c, 0x65, 0x2e, 0x5f, 0x5f, 0x56, - 0x29, 0x3a, 0x65, 0x2e, 0x5f, 0x5f, 0x7d, 0x66, 0x75, 0x6e, 0x63, 0x74, - 0x69, 0x6f, 0x6e, 0x20, 0x54, 0x74, 0x28, 0x74, 0x2c, 0x6e, 0x29, 0x7b, - 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x79, 0x74, 0x3d, 0x38, 0x2c, - 0x44, 0x74, 0x28, 0x28, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, - 0x28, 0x29, 0x7b, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x74, 0x7d, - 0x29, 0x2c, 0x6e, 0x29, 0x7d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, - 0x6e, 0x20, 0x56, 0x74, 0x28, 0x74, 0x29, 0x7b, 0x76, 0x61, 0x72, 0x20, - 0x6e, 0x3d, 0x70, 0x74, 0x2e, 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x78, 0x74, - 0x5b, 0x74, 0x2e, 0x5f, 0x5f, 0x63, 0x5d, 0x2c, 0x65, 0x3d, 0x43, 0x74, - 0x28, 0x61, 0x74, 0x2b, 0x2b, 0x2c, 0x39, 0x29, 0x3b, 0x72, 0x65, 0x74, - 0x75, 0x72, 0x6e, 0x20, 0x65, 0x2e, 0x63, 0x3d, 0x74, 0x2c, 0x6e, 0x3f, - 0x28, 0x6e, 0x75, 0x6c, 0x6c, 0x3d, 0x3d, 0x65, 0x2e, 0x5f, 0x5f, 0x26, - 0x26, 0x28, 0x65, 0x2e, 0x5f, 0x5f, 0x3d, 0x21, 0x30, 0x2c, 0x6e, 0x2e, - 0x73, 0x75, 0x62, 0x28, 0x70, 0x74, 0x29, 0x29, 0x2c, 0x6e, 0x2e, 0x70, - 0x72, 0x6f, 0x70, 0x73, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x29, 0x3a, - 0x74, 0x2e, 0x5f, 0x5f, 0x7d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, - 0x6e, 0x20, 0x41, 0x74, 0x28, 0x74, 0x2c, 0x6e, 0x29, 0x7b, 0x77, 0x2e, - 0x75, 0x73, 0x65, 0x44, 0x65, 0x62, 0x75, 0x67, 0x56, 0x61, 0x6c, 0x75, - 0x65, 0x26, 0x26, 0x77, 0x2e, 0x75, 0x73, 0x65, 0x44, 0x65, 0x62, 0x75, - 0x67, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x28, 0x6e, 0x3f, 0x6e, 0x28, 0x74, - 0x29, 0x3a, 0x74, 0x29, 0x7d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, - 0x6e, 0x20, 0x46, 0x74, 0x28, 0x74, 0x29, 0x7b, 0x76, 0x61, 0x72, 0x20, - 0x6e, 0x3d, 0x43, 0x74, 0x28, 0x61, 0x74, 0x2b, 0x2b, 0x2c, 0x31, 0x30, - 0x29, 0x2c, 0x65, 0x3d, 0x45, 0x74, 0x28, 0x29, 0x3b, 0x72, 0x65, 0x74, - 0x75, 0x72, 0x6e, 0x20, 0x6e, 0x2e, 0x5f, 0x5f, 0x3d, 0x74, 0x2c, 0x70, - 0x74, 0x2e, 0x63, 0x6f, 0x6d, 0x70, 0x6f, 0x6e, 0x65, 0x6e, 0x74, 0x44, - 0x69, 0x64, 0x43, 0x61, 0x74, 0x63, 0x68, 0x7c, 0x7c, 0x28, 0x70, 0x74, - 0x2e, 0x63, 0x6f, 0x6d, 0x70, 0x6f, 0x6e, 0x65, 0x6e, 0x74, 0x44, 0x69, - 0x64, 0x43, 0x61, 0x74, 0x63, 0x68, 0x3d, 0x66, 0x75, 0x6e, 0x63, 0x74, - 0x69, 0x6f, 0x6e, 0x28, 0x74, 0x2c, 0x69, 0x29, 0x7b, 0x6e, 0x2e, 0x5f, - 0x5f, 0x26, 0x26, 0x6e, 0x2e, 0x5f, 0x5f, 0x28, 0x74, 0x2c, 0x69, 0x29, - 0x2c, 0x65, 0x5b, 0x31, 0x5d, 0x28, 0x74, 0x29, 0x7d, 0x29, 0x2c, 0x5b, - 0x65, 0x5b, 0x30, 0x5d, 0x2c, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, - 0x6e, 0x28, 0x29, 0x7b, 0x65, 0x5b, 0x31, 0x5d, 0x28, 0x76, 0x6f, 0x69, - 0x64, 0x20, 0x30, 0x29, 0x7d, 0x5d, 0x7d, 0x66, 0x75, 0x6e, 0x63, 0x74, - 0x69, 0x6f, 0x6e, 0x20, 0x4d, 0x74, 0x28, 0x29, 0x7b, 0x76, 0x61, 0x72, - 0x20, 0x74, 0x3d, 0x43, 0x74, 0x28, 0x61, 0x74, 0x2b, 0x2b, 0x2c, 0x31, - 0x31, 0x29, 0x3b, 0x69, 0x66, 0x28, 0x21, 0x74, 0x2e, 0x5f, 0x5f, 0x29, - 0x7b, 0x66, 0x6f, 0x72, 0x28, 0x76, 0x61, 0x72, 0x20, 0x6e, 0x3d, 0x70, - 0x74, 0x2e, 0x5f, 0x5f, 0x76, 0x3b, 0x6e, 0x75, 0x6c, 0x6c, 0x21, 0x3d, - 0x3d, 0x6e, 0x26, 0x26, 0x21, 0x6e, 0x2e, 0x5f, 0x5f, 0x6d, 0x26, 0x26, - 0x6e, 0x75, 0x6c, 0x6c, 0x21, 0x3d, 0x3d, 0x6e, 0x2e, 0x5f, 0x5f, 0x3b, - 0x29, 0x6e, 0x3d, 0x6e, 0x2e, 0x5f, 0x5f, 0x3b, 0x76, 0x61, 0x72, 0x20, - 0x65, 0x3d, 0x6e, 0x2e, 0x5f, 0x5f, 0x6d, 0x7c, 0x7c, 0x28, 0x6e, 0x2e, - 0x5f, 0x5f, 0x6d, 0x3d, 0x5b, 0x30, 0x2c, 0x30, 0x5d, 0x29, 0x3b, 0x74, - 0x2e, 0x5f, 0x5f, 0x3d, 0x22, 0x50, 0x22, 0x2b, 0x65, 0x5b, 0x30, 0x5d, - 0x2b, 0x22, 0x2d, 0x22, 0x2b, 0x65, 0x5b, 0x31, 0x5d, 0x2b, 0x2b, 0x7d, - 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x74, 0x2e, 0x5f, 0x5f, 0x7d, - 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x57, 0x74, 0x28, - 0x29, 0x7b, 0x66, 0x6f, 0x72, 0x28, 0x76, 0x61, 0x72, 0x20, 0x74, 0x3b, - 0x74, 0x3d, 0x6d, 0x74, 0x2e, 0x73, 0x68, 0x69, 0x66, 0x74, 0x28, 0x29, - 0x3b, 0x29, 0x69, 0x66, 0x28, 0x74, 0x2e, 0x5f, 0x5f, 0x50, 0x26, 0x26, - 0x74, 0x2e, 0x5f, 0x5f, 0x48, 0x29, 0x74, 0x72, 0x79, 0x7b, 0x74, 0x2e, - 0x5f, 0x5f, 0x48, 0x2e, 0x5f, 0x5f, 0x68, 0x2e, 0x66, 0x6f, 0x72, 0x45, - 0x61, 0x63, 0x68, 0x28, 0x52, 0x74, 0x29, 0x2c, 0x74, 0x2e, 0x5f, 0x5f, - 0x48, 0x2e, 0x5f, 0x5f, 0x68, 0x2e, 0x66, 0x6f, 0x72, 0x45, 0x61, 0x63, - 0x68, 0x28, 0x49, 0x74, 0x29, 0x2c, 0x74, 0x2e, 0x5f, 0x5f, 0x48, 0x2e, - 0x5f, 0x5f, 0x68, 0x3d, 0x5b, 0x5d, 0x7d, 0x63, 0x61, 0x74, 0x63, 0x68, - 0x28, 0x75, 0x29, 0x7b, 0x74, 0x2e, 0x5f, 0x5f, 0x48, 0x2e, 0x5f, 0x5f, - 0x68, 0x3d, 0x5b, 0x5d, 0x2c, 0x77, 0x2e, 0x5f, 0x5f, 0x65, 0x28, 0x75, - 0x2c, 0x74, 0x2e, 0x5f, 0x5f, 0x76, 0x29, 0x7d, 0x7d, 0x77, 0x2e, 0x5f, - 0x5f, 0x62, 0x3d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, - 0x74, 0x29, 0x7b, 0x70, 0x74, 0x3d, 0x6e, 0x75, 0x6c, 0x6c, 0x2c, 0x62, - 0x74, 0x26, 0x26, 0x62, 0x74, 0x28, 0x74, 0x29, 0x7d, 0x2c, 0x77, 0x2e, - 0x5f, 0x5f, 0x72, 0x3d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, - 0x28, 0x74, 0x29, 0x7b, 0x6b, 0x74, 0x26, 0x26, 0x6b, 0x74, 0x28, 0x74, - 0x29, 0x2c, 0x61, 0x74, 0x3d, 0x30, 0x3b, 0x76, 0x61, 0x72, 0x20, 0x6e, - 0x3d, 0x28, 0x70, 0x74, 0x3d, 0x74, 0x2e, 0x5f, 0x5f, 0x63, 0x29, 0x2e, - 0x5f, 0x5f, 0x48, 0x3b, 0x6e, 0x26, 0x26, 0x28, 0x64, 0x74, 0x3d, 0x3d, - 0x3d, 0x70, 0x74, 0x3f, 0x28, 0x6e, 0x2e, 0x5f, 0x5f, 0x68, 0x3d, 0x5b, - 0x5d, 0x2c, 0x70, 0x74, 0x2e, 0x5f, 0x5f, 0x68, 0x3d, 0x5b, 0x5d, 0x2c, - 0x6e, 0x2e, 0x5f, 0x5f, 0x2e, 0x66, 0x6f, 0x72, 0x45, 0x61, 0x63, 0x68, - 0x28, 0x28, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x74, - 0x29, 0x7b, 0x74, 0x2e, 0x5f, 0x5f, 0x4e, 0x26, 0x26, 0x28, 0x74, 0x2e, - 0x5f, 0x5f, 0x3d, 0x74, 0x2e, 0x5f, 0x5f, 0x4e, 0x29, 0x2c, 0x74, 0x2e, - 0x5f, 0x5f, 0x56, 0x3d, 0x67, 0x74, 0x2c, 0x74, 0x2e, 0x5f, 0x5f, 0x4e, - 0x3d, 0x74, 0x2e, 0x69, 0x3d, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x7d, - 0x29, 0x29, 0x29, 0x3a, 0x28, 0x6e, 0x2e, 0x5f, 0x5f, 0x68, 0x2e, 0x66, - 0x6f, 0x72, 0x45, 0x61, 0x63, 0x68, 0x28, 0x52, 0x74, 0x29, 0x2c, 0x6e, - 0x2e, 0x5f, 0x5f, 0x68, 0x2e, 0x66, 0x6f, 0x72, 0x45, 0x61, 0x63, 0x68, - 0x28, 0x49, 0x74, 0x29, 0x2c, 0x6e, 0x2e, 0x5f, 0x5f, 0x68, 0x3d, 0x5b, - 0x5d, 0x2c, 0x61, 0x74, 0x3d, 0x30, 0x29, 0x29, 0x2c, 0x64, 0x74, 0x3d, - 0x70, 0x74, 0x7d, 0x2c, 0x77, 0x2e, 0x64, 0x69, 0x66, 0x66, 0x65, 0x64, - 0x3d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x74, 0x29, - 0x7b, 0x53, 0x74, 0x26, 0x26, 0x53, 0x74, 0x28, 0x74, 0x29, 0x3b, 0x76, - 0x61, 0x72, 0x20, 0x6e, 0x3d, 0x74, 0x2e, 0x5f, 0x5f, 0x63, 0x3b, 0x6e, - 0x26, 0x26, 0x6e, 0x2e, 0x5f, 0x5f, 0x48, 0x26, 0x26, 0x28, 0x6e, 0x2e, - 0x5f, 0x5f, 0x48, 0x2e, 0x5f, 0x5f, 0x68, 0x2e, 0x6c, 0x65, 0x6e, 0x67, - 0x74, 0x68, 0x26, 0x26, 0x28, 0x31, 0x21, 0x3d, 0x3d, 0x6d, 0x74, 0x2e, - 0x70, 0x75, 0x73, 0x68, 0x28, 0x6e, 0x29, 0x26, 0x26, 0x76, 0x74, 0x3d, - 0x3d, 0x3d, 0x77, 0x2e, 0x72, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x41, - 0x6e, 0x69, 0x6d, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x46, 0x72, 0x61, 0x6d, - 0x65, 0x7c, 0x7c, 0x28, 0x28, 0x76, 0x74, 0x3d, 0x77, 0x2e, 0x72, 0x65, - 0x71, 0x75, 0x65, 0x73, 0x74, 0x41, 0x6e, 0x69, 0x6d, 0x61, 0x74, 0x69, - 0x6f, 0x6e, 0x46, 0x72, 0x61, 0x6d, 0x65, 0x29, 0x7c, 0x7c, 0x4c, 0x74, - 0x29, 0x28, 0x57, 0x74, 0x29, 0x29, 0x2c, 0x6e, 0x2e, 0x5f, 0x5f, 0x48, + 0x6e, 0x20, 0x5f, 0x7d, 0x2c, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x73, 0x68, + 0x6f, 0x75, 0x6c, 0x64, 0x43, 0x6f, 0x6d, 0x70, 0x6f, 0x6e, 0x65, 0x6e, + 0x74, 0x55, 0x70, 0x64, 0x61, 0x74, 0x65, 0x3d, 0x66, 0x75, 0x6e, 0x63, + 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x74, 0x29, 0x7b, 0x74, 0x68, 0x69, 0x73, + 0x2e, 0x70, 0x72, 0x6f, 0x70, 0x73, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, + 0x21, 0x3d, 0x3d, 0x74, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x26, 0x26, + 0x65, 0x2e, 0x73, 0x6f, 0x6d, 0x65, 0x28, 0x28, 0x66, 0x75, 0x6e, 0x63, + 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x74, 0x29, 0x7b, 0x74, 0x2e, 0x5f, 0x5f, + 0x65, 0x3d, 0x21, 0x30, 0x2c, 0x47, 0x28, 0x74, 0x29, 0x7d, 0x29, 0x29, + 0x7d, 0x2c, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x73, 0x75, 0x62, 0x3d, 0x66, + 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x74, 0x29, 0x7b, 0x65, + 0x2e, 0x70, 0x75, 0x73, 0x68, 0x28, 0x74, 0x29, 0x3b, 0x76, 0x61, 0x72, + 0x20, 0x6e, 0x3d, 0x74, 0x2e, 0x63, 0x6f, 0x6d, 0x70, 0x6f, 0x6e, 0x65, + 0x6e, 0x74, 0x57, 0x69, 0x6c, 0x6c, 0x55, 0x6e, 0x6d, 0x6f, 0x75, 0x6e, + 0x74, 0x3b, 0x74, 0x2e, 0x63, 0x6f, 0x6d, 0x70, 0x6f, 0x6e, 0x65, 0x6e, + 0x74, 0x57, 0x69, 0x6c, 0x6c, 0x55, 0x6e, 0x6d, 0x6f, 0x75, 0x6e, 0x74, + 0x3d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x29, 0x7b, + 0x65, 0x2e, 0x73, 0x70, 0x6c, 0x69, 0x63, 0x65, 0x28, 0x65, 0x2e, 0x69, + 0x6e, 0x64, 0x65, 0x78, 0x4f, 0x66, 0x28, 0x74, 0x29, 0x2c, 0x31, 0x29, + 0x2c, 0x6e, 0x26, 0x26, 0x6e, 0x2e, 0x63, 0x61, 0x6c, 0x6c, 0x28, 0x74, + 0x29, 0x7d, 0x7d, 0x29, 0x2c, 0x74, 0x2e, 0x63, 0x68, 0x69, 0x6c, 0x64, + 0x72, 0x65, 0x6e, 0x7d, 0x7d, 0x3b, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, + 0x20, 0x65, 0x2e, 0x50, 0x72, 0x6f, 0x76, 0x69, 0x64, 0x65, 0x72, 0x2e, + 0x5f, 0x5f, 0x3d, 0x65, 0x2e, 0x43, 0x6f, 0x6e, 0x73, 0x75, 0x6d, 0x65, + 0x72, 0x2e, 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x78, 0x74, 0x54, 0x79, 0x70, + 0x65, 0x3d, 0x65, 0x7d, 0x78, 0x3d, 0x56, 0x2e, 0x73, 0x6c, 0x69, 0x63, + 0x65, 0x2c, 0x43, 0x3d, 0x7b, 0x5f, 0x5f, 0x65, 0x3a, 0x66, 0x75, 0x6e, + 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x74, 0x2c, 0x6e, 0x2c, 0x65, 0x2c, + 0x5f, 0x29, 0x7b, 0x66, 0x6f, 0x72, 0x28, 0x76, 0x61, 0x72, 0x20, 0x69, + 0x2c, 0x6f, 0x2c, 0x72, 0x3b, 0x6e, 0x3d, 0x6e, 0x2e, 0x5f, 0x5f, 0x3b, + 0x29, 0x69, 0x66, 0x28, 0x28, 0x69, 0x3d, 0x6e, 0x2e, 0x5f, 0x5f, 0x63, + 0x29, 0x26, 0x26, 0x21, 0x69, 0x2e, 0x5f, 0x5f, 0x29, 0x74, 0x72, 0x79, + 0x7b, 0x69, 0x66, 0x28, 0x28, 0x6f, 0x3d, 0x69, 0x2e, 0x63, 0x6f, 0x6e, + 0x73, 0x74, 0x72, 0x75, 0x63, 0x74, 0x6f, 0x72, 0x29, 0x26, 0x26, 0x6e, + 0x75, 0x6c, 0x6c, 0x21, 0x3d, 0x6f, 0x2e, 0x67, 0x65, 0x74, 0x44, 0x65, + 0x72, 0x69, 0x76, 0x65, 0x64, 0x53, 0x74, 0x61, 0x74, 0x65, 0x46, 0x72, + 0x6f, 0x6d, 0x45, 0x72, 0x72, 0x6f, 0x72, 0x26, 0x26, 0x28, 0x69, 0x2e, + 0x73, 0x65, 0x74, 0x53, 0x74, 0x61, 0x74, 0x65, 0x28, 0x6f, 0x2e, 0x67, + 0x65, 0x74, 0x44, 0x65, 0x72, 0x69, 0x76, 0x65, 0x64, 0x53, 0x74, 0x61, + 0x74, 0x65, 0x46, 0x72, 0x6f, 0x6d, 0x45, 0x72, 0x72, 0x6f, 0x72, 0x28, + 0x74, 0x29, 0x29, 0x2c, 0x72, 0x3d, 0x69, 0x2e, 0x5f, 0x5f, 0x64, 0x29, + 0x2c, 0x6e, 0x75, 0x6c, 0x6c, 0x21, 0x3d, 0x69, 0x2e, 0x63, 0x6f, 0x6d, + 0x70, 0x6f, 0x6e, 0x65, 0x6e, 0x74, 0x44, 0x69, 0x64, 0x43, 0x61, 0x74, + 0x63, 0x68, 0x26, 0x26, 0x28, 0x69, 0x2e, 0x63, 0x6f, 0x6d, 0x70, 0x6f, + 0x6e, 0x65, 0x6e, 0x74, 0x44, 0x69, 0x64, 0x43, 0x61, 0x74, 0x63, 0x68, + 0x28, 0x74, 0x2c, 0x5f, 0x7c, 0x7c, 0x7b, 0x7d, 0x29, 0x2c, 0x72, 0x3d, + 0x69, 0x2e, 0x5f, 0x5f, 0x64, 0x29, 0x2c, 0x72, 0x29, 0x72, 0x65, 0x74, + 0x75, 0x72, 0x6e, 0x20, 0x69, 0x2e, 0x5f, 0x5f, 0x45, 0x3d, 0x69, 0x7d, + 0x63, 0x61, 0x74, 0x63, 0x68, 0x28, 0x6e, 0x29, 0x7b, 0x74, 0x3d, 0x6e, + 0x7d, 0x74, 0x68, 0x72, 0x6f, 0x77, 0x20, 0x74, 0x7d, 0x7d, 0x2c, 0x45, + 0x3d, 0x30, 0x2c, 0x55, 0x3d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, + 0x6e, 0x28, 0x74, 0x29, 0x7b, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, + 0x6e, 0x75, 0x6c, 0x6c, 0x21, 0x3d, 0x74, 0x26, 0x26, 0x6e, 0x75, 0x6c, + 0x6c, 0x3d, 0x3d, 0x74, 0x2e, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x72, 0x75, + 0x63, 0x74, 0x6f, 0x72, 0x7d, 0x2c, 0x49, 0x2e, 0x70, 0x72, 0x6f, 0x74, + 0x6f, 0x74, 0x79, 0x70, 0x65, 0x2e, 0x73, 0x65, 0x74, 0x53, 0x74, 0x61, + 0x74, 0x65, 0x3d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, + 0x74, 0x2c, 0x6e, 0x29, 0x7b, 0x76, 0x61, 0x72, 0x20, 0x65, 0x3b, 0x65, + 0x3d, 0x6e, 0x75, 0x6c, 0x6c, 0x21, 0x3d, 0x74, 0x68, 0x69, 0x73, 0x2e, + 0x5f, 0x5f, 0x73, 0x26, 0x26, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x5f, 0x5f, + 0x73, 0x21, 0x3d, 0x3d, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x73, 0x74, 0x61, + 0x74, 0x65, 0x3f, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x5f, 0x5f, 0x73, 0x3a, + 0x74, 0x68, 0x69, 0x73, 0x2e, 0x5f, 0x5f, 0x73, 0x3d, 0x4d, 0x28, 0x7b, + 0x7d, 0x2c, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x73, 0x74, 0x61, 0x74, 0x65, + 0x29, 0x2c, 0x22, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x22, + 0x3d, 0x3d, 0x74, 0x79, 0x70, 0x65, 0x6f, 0x66, 0x20, 0x74, 0x26, 0x26, + 0x28, 0x74, 0x3d, 0x74, 0x28, 0x4d, 0x28, 0x7b, 0x7d, 0x2c, 0x65, 0x29, + 0x2c, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x70, 0x72, 0x6f, 0x70, 0x73, 0x29, + 0x29, 0x2c, 0x74, 0x26, 0x26, 0x4d, 0x28, 0x65, 0x2c, 0x74, 0x29, 0x2c, + 0x6e, 0x75, 0x6c, 0x6c, 0x21, 0x3d, 0x74, 0x26, 0x26, 0x74, 0x68, 0x69, + 0x73, 0x2e, 0x5f, 0x5f, 0x76, 0x26, 0x26, 0x28, 0x6e, 0x26, 0x26, 0x74, + 0x68, 0x69, 0x73, 0x2e, 0x5f, 0x73, 0x62, 0x2e, 0x70, 0x75, 0x73, 0x68, + 0x28, 0x6e, 0x29, 0x2c, 0x47, 0x28, 0x74, 0x68, 0x69, 0x73, 0x29, 0x29, + 0x7d, 0x2c, 0x49, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x74, 0x79, 0x70, + 0x65, 0x2e, 0x66, 0x6f, 0x72, 0x63, 0x65, 0x55, 0x70, 0x64, 0x61, 0x74, + 0x65, 0x3d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x74, + 0x29, 0x7b, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x5f, 0x5f, 0x76, 0x26, 0x26, + 0x28, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x5f, 0x5f, 0x65, 0x3d, 0x21, 0x30, + 0x2c, 0x74, 0x26, 0x26, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x5f, 0x5f, 0x68, + 0x2e, 0x70, 0x75, 0x73, 0x68, 0x28, 0x74, 0x29, 0x2c, 0x47, 0x28, 0x74, + 0x68, 0x69, 0x73, 0x29, 0x29, 0x7d, 0x2c, 0x49, 0x2e, 0x70, 0x72, 0x6f, + 0x74, 0x6f, 0x74, 0x79, 0x70, 0x65, 0x2e, 0x72, 0x65, 0x6e, 0x64, 0x65, + 0x72, 0x3d, 0x6a, 0x2c, 0x48, 0x3d, 0x5b, 0x5d, 0x2c, 0x4e, 0x3d, 0x22, + 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x22, 0x3d, 0x3d, 0x74, + 0x79, 0x70, 0x65, 0x6f, 0x66, 0x20, 0x50, 0x72, 0x6f, 0x6d, 0x69, 0x73, + 0x65, 0x3f, 0x50, 0x72, 0x6f, 0x6d, 0x69, 0x73, 0x65, 0x2e, 0x70, 0x72, + 0x6f, 0x74, 0x6f, 0x74, 0x79, 0x70, 0x65, 0x2e, 0x74, 0x68, 0x65, 0x6e, + 0x2e, 0x62, 0x69, 0x6e, 0x64, 0x28, 0x50, 0x72, 0x6f, 0x6d, 0x69, 0x73, + 0x65, 0x2e, 0x72, 0x65, 0x73, 0x6f, 0x6c, 0x76, 0x65, 0x28, 0x29, 0x29, + 0x3a, 0x73, 0x65, 0x74, 0x54, 0x69, 0x6d, 0x65, 0x6f, 0x75, 0x74, 0x2c, + 0x24, 0x3d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x74, + 0x2c, 0x6e, 0x29, 0x7b, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x74, + 0x2e, 0x5f, 0x5f, 0x76, 0x2e, 0x5f, 0x5f, 0x62, 0x2d, 0x6e, 0x2e, 0x5f, + 0x5f, 0x76, 0x2e, 0x5f, 0x5f, 0x62, 0x7d, 0x2c, 0x7a, 0x2e, 0x5f, 0x5f, + 0x72, 0x3d, 0x30, 0x2c, 0x44, 0x3d, 0x30, 0x3b, 0x76, 0x61, 0x72, 0x20, + 0x61, 0x74, 0x2c, 0x70, 0x74, 0x2c, 0x64, 0x74, 0x2c, 0x76, 0x74, 0x2c, + 0x79, 0x74, 0x3d, 0x30, 0x2c, 0x6d, 0x74, 0x3d, 0x5b, 0x5d, 0x2c, 0x67, + 0x74, 0x3d, 0x5b, 0x5d, 0x2c, 0x62, 0x74, 0x3d, 0x43, 0x2e, 0x5f, 0x5f, + 0x62, 0x2c, 0x6b, 0x74, 0x3d, 0x43, 0x2e, 0x5f, 0x5f, 0x72, 0x2c, 0x53, + 0x74, 0x3d, 0x43, 0x2e, 0x64, 0x69, 0x66, 0x66, 0x65, 0x64, 0x2c, 0x77, + 0x74, 0x3d, 0x43, 0x2e, 0x5f, 0x5f, 0x63, 0x2c, 0x78, 0x74, 0x3d, 0x43, + 0x2e, 0x75, 0x6e, 0x6d, 0x6f, 0x75, 0x6e, 0x74, 0x3b, 0x66, 0x75, 0x6e, + 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x43, 0x74, 0x28, 0x74, 0x2c, 0x6e, + 0x29, 0x7b, 0x43, 0x2e, 0x5f, 0x5f, 0x68, 0x26, 0x26, 0x43, 0x2e, 0x5f, + 0x5f, 0x68, 0x28, 0x70, 0x74, 0x2c, 0x74, 0x2c, 0x79, 0x74, 0x7c, 0x7c, + 0x6e, 0x29, 0x2c, 0x79, 0x74, 0x3d, 0x30, 0x3b, 0x76, 0x61, 0x72, 0x20, + 0x65, 0x3d, 0x70, 0x74, 0x2e, 0x5f, 0x5f, 0x48, 0x7c, 0x7c, 0x28, 0x70, + 0x74, 0x2e, 0x5f, 0x5f, 0x48, 0x3d, 0x7b, 0x5f, 0x5f, 0x3a, 0x5b, 0x5d, + 0x2c, 0x5f, 0x5f, 0x68, 0x3a, 0x5b, 0x5d, 0x7d, 0x29, 0x3b, 0x72, 0x65, + 0x74, 0x75, 0x72, 0x6e, 0x20, 0x74, 0x3e, 0x3d, 0x65, 0x2e, 0x5f, 0x5f, + 0x2e, 0x6c, 0x65, 0x6e, 0x67, 0x74, 0x68, 0x26, 0x26, 0x65, 0x2e, 0x5f, + 0x5f, 0x2e, 0x70, 0x75, 0x73, 0x68, 0x28, 0x7b, 0x5f, 0x5f, 0x56, 0x3a, + 0x67, 0x74, 0x7d, 0x29, 0x2c, 0x65, 0x2e, 0x5f, 0x5f, 0x5b, 0x74, 0x5d, + 0x7d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x45, 0x74, + 0x28, 0x74, 0x29, 0x7b, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x79, + 0x74, 0x3d, 0x31, 0x2c, 0x55, 0x74, 0x28, 0x71, 0x74, 0x2c, 0x74, 0x29, + 0x7d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x55, 0x74, + 0x28, 0x74, 0x2c, 0x6e, 0x2c, 0x65, 0x29, 0x7b, 0x76, 0x61, 0x72, 0x20, + 0x5f, 0x3d, 0x43, 0x74, 0x28, 0x61, 0x74, 0x2b, 0x2b, 0x2c, 0x32, 0x29, + 0x3b, 0x69, 0x66, 0x28, 0x5f, 0x2e, 0x74, 0x3d, 0x74, 0x2c, 0x21, 0x5f, + 0x2e, 0x5f, 0x5f, 0x63, 0x26, 0x26, 0x28, 0x5f, 0x2e, 0x5f, 0x5f, 0x3d, + 0x5b, 0x65, 0x3f, 0x65, 0x28, 0x6e, 0x29, 0x3a, 0x71, 0x74, 0x28, 0x76, + 0x6f, 0x69, 0x64, 0x20, 0x30, 0x2c, 0x6e, 0x29, 0x2c, 0x66, 0x75, 0x6e, + 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x74, 0x29, 0x7b, 0x76, 0x61, 0x72, + 0x20, 0x6e, 0x3d, 0x5f, 0x2e, 0x5f, 0x5f, 0x4e, 0x3f, 0x5f, 0x2e, 0x5f, + 0x5f, 0x4e, 0x5b, 0x30, 0x5d, 0x3a, 0x5f, 0x2e, 0x5f, 0x5f, 0x5b, 0x30, + 0x5d, 0x2c, 0x65, 0x3d, 0x5f, 0x2e, 0x74, 0x28, 0x6e, 0x2c, 0x74, 0x29, + 0x3b, 0x6e, 0x21, 0x3d, 0x3d, 0x65, 0x26, 0x26, 0x28, 0x5f, 0x2e, 0x5f, + 0x5f, 0x4e, 0x3d, 0x5b, 0x65, 0x2c, 0x5f, 0x2e, 0x5f, 0x5f, 0x5b, 0x31, + 0x5d, 0x5d, 0x2c, 0x5f, 0x2e, 0x5f, 0x5f, 0x63, 0x2e, 0x73, 0x65, 0x74, + 0x53, 0x74, 0x61, 0x74, 0x65, 0x28, 0x7b, 0x7d, 0x29, 0x29, 0x7d, 0x5d, + 0x2c, 0x5f, 0x2e, 0x5f, 0x5f, 0x63, 0x3d, 0x70, 0x74, 0x2c, 0x21, 0x70, + 0x74, 0x2e, 0x75, 0x29, 0x29, 0x7b, 0x76, 0x61, 0x72, 0x20, 0x69, 0x3d, + 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x74, 0x2c, 0x6e, + 0x2c, 0x65, 0x29, 0x7b, 0x69, 0x66, 0x28, 0x21, 0x5f, 0x2e, 0x5f, 0x5f, + 0x63, 0x2e, 0x5f, 0x5f, 0x48, 0x29, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, + 0x21, 0x30, 0x3b, 0x76, 0x61, 0x72, 0x20, 0x69, 0x3d, 0x5f, 0x2e, 0x5f, + 0x5f, 0x63, 0x2e, 0x5f, 0x5f, 0x48, 0x2e, 0x5f, 0x5f, 0x2e, 0x66, 0x69, + 0x6c, 0x74, 0x65, 0x72, 0x28, 0x28, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, + 0x6f, 0x6e, 0x28, 0x74, 0x29, 0x7b, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, + 0x20, 0x74, 0x2e, 0x5f, 0x5f, 0x63, 0x7d, 0x29, 0x29, 0x3b, 0x69, 0x66, + 0x28, 0x69, 0x2e, 0x65, 0x76, 0x65, 0x72, 0x79, 0x28, 0x28, 0x66, 0x75, + 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x74, 0x29, 0x7b, 0x72, 0x65, + 0x74, 0x75, 0x72, 0x6e, 0x21, 0x74, 0x2e, 0x5f, 0x5f, 0x4e, 0x7d, 0x29, + 0x29, 0x29, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x21, 0x6f, 0x7c, 0x7c, + 0x6f, 0x2e, 0x63, 0x61, 0x6c, 0x6c, 0x28, 0x74, 0x68, 0x69, 0x73, 0x2c, + 0x74, 0x2c, 0x6e, 0x2c, 0x65, 0x29, 0x3b, 0x76, 0x61, 0x72, 0x20, 0x72, + 0x3d, 0x21, 0x31, 0x3b, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x69, + 0x2e, 0x66, 0x6f, 0x72, 0x45, 0x61, 0x63, 0x68, 0x28, 0x28, 0x66, 0x75, + 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x74, 0x29, 0x7b, 0x69, 0x66, + 0x28, 0x74, 0x2e, 0x5f, 0x5f, 0x4e, 0x29, 0x7b, 0x76, 0x61, 0x72, 0x20, + 0x6e, 0x3d, 0x74, 0x2e, 0x5f, 0x5f, 0x5b, 0x30, 0x5d, 0x3b, 0x74, 0x2e, + 0x5f, 0x5f, 0x3d, 0x74, 0x2e, 0x5f, 0x5f, 0x4e, 0x2c, 0x74, 0x2e, 0x5f, + 0x5f, 0x4e, 0x3d, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x2c, 0x6e, 0x21, + 0x3d, 0x3d, 0x74, 0x2e, 0x5f, 0x5f, 0x5b, 0x30, 0x5d, 0x26, 0x26, 0x28, + 0x72, 0x3d, 0x21, 0x30, 0x29, 0x7d, 0x7d, 0x29, 0x29, 0x2c, 0x21, 0x28, + 0x21, 0x72, 0x26, 0x26, 0x5f, 0x2e, 0x5f, 0x5f, 0x63, 0x2e, 0x70, 0x72, + 0x6f, 0x70, 0x73, 0x3d, 0x3d, 0x3d, 0x74, 0x29, 0x26, 0x26, 0x28, 0x21, + 0x6f, 0x7c, 0x7c, 0x6f, 0x2e, 0x63, 0x61, 0x6c, 0x6c, 0x28, 0x74, 0x68, + 0x69, 0x73, 0x2c, 0x74, 0x2c, 0x6e, 0x2c, 0x65, 0x29, 0x29, 0x7d, 0x3b, + 0x70, 0x74, 0x2e, 0x75, 0x3d, 0x21, 0x30, 0x3b, 0x76, 0x61, 0x72, 0x20, + 0x6f, 0x3d, 0x70, 0x74, 0x2e, 0x73, 0x68, 0x6f, 0x75, 0x6c, 0x64, 0x43, + 0x6f, 0x6d, 0x70, 0x6f, 0x6e, 0x65, 0x6e, 0x74, 0x55, 0x70, 0x64, 0x61, + 0x74, 0x65, 0x2c, 0x72, 0x3d, 0x70, 0x74, 0x2e, 0x63, 0x6f, 0x6d, 0x70, + 0x6f, 0x6e, 0x65, 0x6e, 0x74, 0x57, 0x69, 0x6c, 0x6c, 0x55, 0x70, 0x64, + 0x61, 0x74, 0x65, 0x3b, 0x70, 0x74, 0x2e, 0x63, 0x6f, 0x6d, 0x70, 0x6f, + 0x6e, 0x65, 0x6e, 0x74, 0x57, 0x69, 0x6c, 0x6c, 0x55, 0x70, 0x64, 0x61, + 0x74, 0x65, 0x3d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, + 0x74, 0x2c, 0x6e, 0x2c, 0x65, 0x29, 0x7b, 0x69, 0x66, 0x28, 0x74, 0x68, + 0x69, 0x73, 0x2e, 0x5f, 0x5f, 0x65, 0x29, 0x7b, 0x76, 0x61, 0x72, 0x20, + 0x5f, 0x3d, 0x6f, 0x3b, 0x6f, 0x3d, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, + 0x2c, 0x69, 0x28, 0x74, 0x2c, 0x6e, 0x2c, 0x65, 0x29, 0x2c, 0x6f, 0x3d, + 0x5f, 0x7d, 0x72, 0x26, 0x26, 0x72, 0x2e, 0x63, 0x61, 0x6c, 0x6c, 0x28, + 0x74, 0x68, 0x69, 0x73, 0x2c, 0x74, 0x2c, 0x6e, 0x2c, 0x65, 0x29, 0x7d, + 0x2c, 0x70, 0x74, 0x2e, 0x73, 0x68, 0x6f, 0x75, 0x6c, 0x64, 0x43, 0x6f, + 0x6d, 0x70, 0x6f, 0x6e, 0x65, 0x6e, 0x74, 0x55, 0x70, 0x64, 0x61, 0x74, + 0x65, 0x3d, 0x69, 0x7d, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x5f, + 0x2e, 0x5f, 0x5f, 0x4e, 0x7c, 0x7c, 0x5f, 0x2e, 0x5f, 0x5f, 0x7d, 0x66, + 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x48, 0x74, 0x28, 0x74, + 0x2c, 0x6e, 0x29, 0x7b, 0x76, 0x61, 0x72, 0x20, 0x65, 0x3d, 0x43, 0x74, + 0x28, 0x61, 0x74, 0x2b, 0x2b, 0x2c, 0x33, 0x29, 0x3b, 0x21, 0x43, 0x2e, + 0x5f, 0x5f, 0x73, 0x26, 0x26, 0x49, 0x74, 0x28, 0x65, 0x2e, 0x5f, 0x5f, + 0x48, 0x2c, 0x6e, 0x29, 0x26, 0x26, 0x28, 0x65, 0x2e, 0x5f, 0x5f, 0x3d, + 0x74, 0x2c, 0x65, 0x2e, 0x69, 0x3d, 0x6e, 0x2c, 0x70, 0x74, 0x2e, 0x5f, + 0x5f, 0x48, 0x2e, 0x5f, 0x5f, 0x68, 0x2e, 0x70, 0x75, 0x73, 0x68, 0x28, + 0x65, 0x29, 0x29, 0x7d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, + 0x20, 0x50, 0x74, 0x28, 0x74, 0x2c, 0x6e, 0x29, 0x7b, 0x76, 0x61, 0x72, + 0x20, 0x65, 0x3d, 0x43, 0x74, 0x28, 0x61, 0x74, 0x2b, 0x2b, 0x2c, 0x34, + 0x29, 0x3b, 0x21, 0x43, 0x2e, 0x5f, 0x5f, 0x73, 0x26, 0x26, 0x49, 0x74, + 0x28, 0x65, 0x2e, 0x5f, 0x5f, 0x48, 0x2c, 0x6e, 0x29, 0x26, 0x26, 0x28, + 0x65, 0x2e, 0x5f, 0x5f, 0x3d, 0x74, 0x2c, 0x65, 0x2e, 0x69, 0x3d, 0x6e, + 0x2c, 0x70, 0x74, 0x2e, 0x5f, 0x5f, 0x68, 0x2e, 0x70, 0x75, 0x73, 0x68, + 0x28, 0x65, 0x29, 0x29, 0x7d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, + 0x6e, 0x20, 0x4e, 0x74, 0x28, 0x74, 0x29, 0x7b, 0x72, 0x65, 0x74, 0x75, + 0x72, 0x6e, 0x20, 0x79, 0x74, 0x3d, 0x35, 0x2c, 0x44, 0x74, 0x28, 0x28, + 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x29, 0x7b, 0x72, + 0x65, 0x74, 0x75, 0x72, 0x6e, 0x7b, 0x63, 0x75, 0x72, 0x72, 0x65, 0x6e, + 0x74, 0x3a, 0x74, 0x7d, 0x7d, 0x29, 0x2c, 0x5b, 0x5d, 0x29, 0x7d, 0x66, + 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x24, 0x74, 0x28, 0x74, + 0x2c, 0x6e, 0x2c, 0x65, 0x29, 0x7b, 0x79, 0x74, 0x3d, 0x36, 0x2c, 0x50, + 0x74, 0x28, 0x28, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, + 0x29, 0x7b, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x22, 0x66, 0x75, 0x6e, + 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x22, 0x3d, 0x3d, 0x74, 0x79, 0x70, 0x65, + 0x6f, 0x66, 0x20, 0x74, 0x3f, 0x28, 0x74, 0x28, 0x6e, 0x28, 0x29, 0x29, + 0x2c, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x29, 0x7b, + 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x74, 0x28, 0x6e, 0x75, 0x6c, + 0x6c, 0x29, 0x7d, 0x29, 0x3a, 0x74, 0x3f, 0x28, 0x74, 0x2e, 0x63, 0x75, + 0x72, 0x72, 0x65, 0x6e, 0x74, 0x3d, 0x6e, 0x28, 0x29, 0x2c, 0x66, 0x75, + 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x29, 0x7b, 0x72, 0x65, 0x74, + 0x75, 0x72, 0x6e, 0x20, 0x74, 0x2e, 0x63, 0x75, 0x72, 0x72, 0x65, 0x6e, + 0x74, 0x3d, 0x6e, 0x75, 0x6c, 0x6c, 0x7d, 0x29, 0x3a, 0x76, 0x6f, 0x69, + 0x64, 0x20, 0x30, 0x7d, 0x29, 0x2c, 0x6e, 0x75, 0x6c, 0x6c, 0x3d, 0x3d, + 0x65, 0x3f, 0x65, 0x3a, 0x65, 0x2e, 0x63, 0x6f, 0x6e, 0x63, 0x61, 0x74, + 0x28, 0x74, 0x29, 0x29, 0x7d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, + 0x6e, 0x20, 0x44, 0x74, 0x28, 0x74, 0x2c, 0x6e, 0x29, 0x7b, 0x76, 0x61, + 0x72, 0x20, 0x65, 0x3d, 0x43, 0x74, 0x28, 0x61, 0x74, 0x2b, 0x2b, 0x2c, + 0x37, 0x29, 0x3b, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x49, 0x74, + 0x28, 0x65, 0x2e, 0x5f, 0x5f, 0x48, 0x2c, 0x6e, 0x29, 0x3f, 0x28, 0x65, + 0x2e, 0x5f, 0x5f, 0x56, 0x3d, 0x74, 0x28, 0x29, 0x2c, 0x65, 0x2e, 0x69, + 0x3d, 0x6e, 0x2c, 0x65, 0x2e, 0x5f, 0x5f, 0x68, 0x3d, 0x74, 0x2c, 0x65, + 0x2e, 0x5f, 0x5f, 0x56, 0x29, 0x3a, 0x65, 0x2e, 0x5f, 0x5f, 0x7d, 0x66, + 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x54, 0x74, 0x28, 0x74, + 0x2c, 0x6e, 0x29, 0x7b, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x79, + 0x74, 0x3d, 0x38, 0x2c, 0x44, 0x74, 0x28, 0x28, 0x66, 0x75, 0x6e, 0x63, + 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x29, 0x7b, 0x72, 0x65, 0x74, 0x75, 0x72, + 0x6e, 0x20, 0x74, 0x7d, 0x29, 0x2c, 0x6e, 0x29, 0x7d, 0x66, 0x75, 0x6e, + 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x56, 0x74, 0x28, 0x74, 0x29, 0x7b, + 0x76, 0x61, 0x72, 0x20, 0x6e, 0x3d, 0x70, 0x74, 0x2e, 0x63, 0x6f, 0x6e, + 0x74, 0x65, 0x78, 0x74, 0x5b, 0x74, 0x2e, 0x5f, 0x5f, 0x63, 0x5d, 0x2c, + 0x65, 0x3d, 0x43, 0x74, 0x28, 0x61, 0x74, 0x2b, 0x2b, 0x2c, 0x39, 0x29, + 0x3b, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x65, 0x2e, 0x63, 0x3d, + 0x74, 0x2c, 0x6e, 0x3f, 0x28, 0x6e, 0x75, 0x6c, 0x6c, 0x3d, 0x3d, 0x65, + 0x2e, 0x5f, 0x5f, 0x26, 0x26, 0x28, 0x65, 0x2e, 0x5f, 0x5f, 0x3d, 0x21, + 0x30, 0x2c, 0x6e, 0x2e, 0x73, 0x75, 0x62, 0x28, 0x70, 0x74, 0x29, 0x29, + 0x2c, 0x6e, 0x2e, 0x70, 0x72, 0x6f, 0x70, 0x73, 0x2e, 0x76, 0x61, 0x6c, + 0x75, 0x65, 0x29, 0x3a, 0x74, 0x2e, 0x5f, 0x5f, 0x7d, 0x66, 0x75, 0x6e, + 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x41, 0x74, 0x28, 0x74, 0x2c, 0x6e, + 0x29, 0x7b, 0x43, 0x2e, 0x75, 0x73, 0x65, 0x44, 0x65, 0x62, 0x75, 0x67, + 0x56, 0x61, 0x6c, 0x75, 0x65, 0x26, 0x26, 0x43, 0x2e, 0x75, 0x73, 0x65, + 0x44, 0x65, 0x62, 0x75, 0x67, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x28, 0x6e, + 0x3f, 0x6e, 0x28, 0x74, 0x29, 0x3a, 0x74, 0x29, 0x7d, 0x66, 0x75, 0x6e, + 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x46, 0x74, 0x28, 0x74, 0x29, 0x7b, + 0x76, 0x61, 0x72, 0x20, 0x6e, 0x3d, 0x43, 0x74, 0x28, 0x61, 0x74, 0x2b, + 0x2b, 0x2c, 0x31, 0x30, 0x29, 0x2c, 0x65, 0x3d, 0x45, 0x74, 0x28, 0x29, + 0x3b, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x6e, 0x2e, 0x5f, 0x5f, + 0x3d, 0x74, 0x2c, 0x70, 0x74, 0x2e, 0x63, 0x6f, 0x6d, 0x70, 0x6f, 0x6e, + 0x65, 0x6e, 0x74, 0x44, 0x69, 0x64, 0x43, 0x61, 0x74, 0x63, 0x68, 0x7c, + 0x7c, 0x28, 0x70, 0x74, 0x2e, 0x63, 0x6f, 0x6d, 0x70, 0x6f, 0x6e, 0x65, + 0x6e, 0x74, 0x44, 0x69, 0x64, 0x43, 0x61, 0x74, 0x63, 0x68, 0x3d, 0x66, + 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x74, 0x2c, 0x5f, 0x29, + 0x7b, 0x6e, 0x2e, 0x5f, 0x5f, 0x26, 0x26, 0x6e, 0x2e, 0x5f, 0x5f, 0x28, + 0x74, 0x2c, 0x5f, 0x29, 0x2c, 0x65, 0x5b, 0x31, 0x5d, 0x28, 0x74, 0x29, + 0x7d, 0x29, 0x2c, 0x5b, 0x65, 0x5b, 0x30, 0x5d, 0x2c, 0x66, 0x75, 0x6e, + 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x29, 0x7b, 0x65, 0x5b, 0x31, 0x5d, + 0x28, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x29, 0x7d, 0x5d, 0x7d, 0x66, + 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x4d, 0x74, 0x28, 0x29, + 0x7b, 0x76, 0x61, 0x72, 0x20, 0x74, 0x3d, 0x43, 0x74, 0x28, 0x61, 0x74, + 0x2b, 0x2b, 0x2c, 0x31, 0x31, 0x29, 0x3b, 0x69, 0x66, 0x28, 0x21, 0x74, + 0x2e, 0x5f, 0x5f, 0x29, 0x7b, 0x66, 0x6f, 0x72, 0x28, 0x76, 0x61, 0x72, + 0x20, 0x6e, 0x3d, 0x70, 0x74, 0x2e, 0x5f, 0x5f, 0x76, 0x3b, 0x6e, 0x75, + 0x6c, 0x6c, 0x21, 0x3d, 0x3d, 0x6e, 0x26, 0x26, 0x21, 0x6e, 0x2e, 0x5f, + 0x5f, 0x6d, 0x26, 0x26, 0x6e, 0x75, 0x6c, 0x6c, 0x21, 0x3d, 0x3d, 0x6e, + 0x2e, 0x5f, 0x5f, 0x3b, 0x29, 0x6e, 0x3d, 0x6e, 0x2e, 0x5f, 0x5f, 0x3b, + 0x76, 0x61, 0x72, 0x20, 0x65, 0x3d, 0x6e, 0x2e, 0x5f, 0x5f, 0x6d, 0x7c, + 0x7c, 0x28, 0x6e, 0x2e, 0x5f, 0x5f, 0x6d, 0x3d, 0x5b, 0x30, 0x2c, 0x30, + 0x5d, 0x29, 0x3b, 0x74, 0x2e, 0x5f, 0x5f, 0x3d, 0x22, 0x50, 0x22, 0x2b, + 0x65, 0x5b, 0x30, 0x5d, 0x2b, 0x22, 0x2d, 0x22, 0x2b, 0x65, 0x5b, 0x31, + 0x5d, 0x2b, 0x2b, 0x7d, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x74, + 0x2e, 0x5f, 0x5f, 0x7d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, + 0x20, 0x57, 0x74, 0x28, 0x29, 0x7b, 0x66, 0x6f, 0x72, 0x28, 0x76, 0x61, + 0x72, 0x20, 0x74, 0x3b, 0x74, 0x3d, 0x6d, 0x74, 0x2e, 0x73, 0x68, 0x69, + 0x66, 0x74, 0x28, 0x29, 0x3b, 0x29, 0x69, 0x66, 0x28, 0x74, 0x2e, 0x5f, + 0x5f, 0x50, 0x26, 0x26, 0x74, 0x2e, 0x5f, 0x5f, 0x48, 0x29, 0x74, 0x72, + 0x79, 0x7b, 0x74, 0x2e, 0x5f, 0x5f, 0x48, 0x2e, 0x5f, 0x5f, 0x68, 0x2e, + 0x66, 0x6f, 0x72, 0x45, 0x61, 0x63, 0x68, 0x28, 0x52, 0x74, 0x29, 0x2c, + 0x74, 0x2e, 0x5f, 0x5f, 0x48, 0x2e, 0x5f, 0x5f, 0x68, 0x2e, 0x66, 0x6f, + 0x72, 0x45, 0x61, 0x63, 0x68, 0x28, 0x6a, 0x74, 0x29, 0x2c, 0x74, 0x2e, + 0x5f, 0x5f, 0x48, 0x2e, 0x5f, 0x5f, 0x68, 0x3d, 0x5b, 0x5d, 0x7d, 0x63, + 0x61, 0x74, 0x63, 0x68, 0x28, 0x75, 0x29, 0x7b, 0x74, 0x2e, 0x5f, 0x5f, + 0x48, 0x2e, 0x5f, 0x5f, 0x68, 0x3d, 0x5b, 0x5d, 0x2c, 0x43, 0x2e, 0x5f, + 0x5f, 0x65, 0x28, 0x75, 0x2c, 0x74, 0x2e, 0x5f, 0x5f, 0x76, 0x29, 0x7d, + 0x7d, 0x43, 0x2e, 0x5f, 0x5f, 0x62, 0x3d, 0x66, 0x75, 0x6e, 0x63, 0x74, + 0x69, 0x6f, 0x6e, 0x28, 0x74, 0x29, 0x7b, 0x70, 0x74, 0x3d, 0x6e, 0x75, + 0x6c, 0x6c, 0x2c, 0x62, 0x74, 0x26, 0x26, 0x62, 0x74, 0x28, 0x74, 0x29, + 0x7d, 0x2c, 0x43, 0x2e, 0x5f, 0x5f, 0x72, 0x3d, 0x66, 0x75, 0x6e, 0x63, + 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x74, 0x29, 0x7b, 0x6b, 0x74, 0x26, 0x26, + 0x6b, 0x74, 0x28, 0x74, 0x29, 0x2c, 0x61, 0x74, 0x3d, 0x30, 0x3b, 0x76, + 0x61, 0x72, 0x20, 0x6e, 0x3d, 0x28, 0x70, 0x74, 0x3d, 0x74, 0x2e, 0x5f, + 0x5f, 0x63, 0x29, 0x2e, 0x5f, 0x5f, 0x48, 0x3b, 0x6e, 0x26, 0x26, 0x28, + 0x64, 0x74, 0x3d, 0x3d, 0x3d, 0x70, 0x74, 0x3f, 0x28, 0x6e, 0x2e, 0x5f, + 0x5f, 0x68, 0x3d, 0x5b, 0x5d, 0x2c, 0x70, 0x74, 0x2e, 0x5f, 0x5f, 0x68, + 0x3d, 0x5b, 0x5d, 0x2c, 0x6e, 0x2e, 0x5f, 0x5f, 0x2e, 0x66, 0x6f, 0x72, + 0x45, 0x61, 0x63, 0x68, 0x28, 0x28, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, + 0x6f, 0x6e, 0x28, 0x74, 0x29, 0x7b, 0x74, 0x2e, 0x5f, 0x5f, 0x4e, 0x26, + 0x26, 0x28, 0x74, 0x2e, 0x5f, 0x5f, 0x3d, 0x74, 0x2e, 0x5f, 0x5f, 0x4e, + 0x29, 0x2c, 0x74, 0x2e, 0x5f, 0x5f, 0x56, 0x3d, 0x67, 0x74, 0x2c, 0x74, + 0x2e, 0x5f, 0x5f, 0x4e, 0x3d, 0x74, 0x2e, 0x69, 0x3d, 0x76, 0x6f, 0x69, + 0x64, 0x20, 0x30, 0x7d, 0x29, 0x29, 0x29, 0x3a, 0x28, 0x6e, 0x2e, 0x5f, + 0x5f, 0x68, 0x2e, 0x66, 0x6f, 0x72, 0x45, 0x61, 0x63, 0x68, 0x28, 0x52, + 0x74, 0x29, 0x2c, 0x6e, 0x2e, 0x5f, 0x5f, 0x68, 0x2e, 0x66, 0x6f, 0x72, + 0x45, 0x61, 0x63, 0x68, 0x28, 0x6a, 0x74, 0x29, 0x2c, 0x6e, 0x2e, 0x5f, + 0x5f, 0x68, 0x3d, 0x5b, 0x5d, 0x2c, 0x61, 0x74, 0x3d, 0x30, 0x29, 0x29, + 0x2c, 0x64, 0x74, 0x3d, 0x70, 0x74, 0x7d, 0x2c, 0x43, 0x2e, 0x64, 0x69, + 0x66, 0x66, 0x65, 0x64, 0x3d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, + 0x6e, 0x28, 0x74, 0x29, 0x7b, 0x53, 0x74, 0x26, 0x26, 0x53, 0x74, 0x28, + 0x74, 0x29, 0x3b, 0x76, 0x61, 0x72, 0x20, 0x6e, 0x3d, 0x74, 0x2e, 0x5f, + 0x5f, 0x63, 0x3b, 0x6e, 0x26, 0x26, 0x6e, 0x2e, 0x5f, 0x5f, 0x48, 0x26, + 0x26, 0x28, 0x6e, 0x2e, 0x5f, 0x5f, 0x48, 0x2e, 0x5f, 0x5f, 0x68, 0x2e, + 0x6c, 0x65, 0x6e, 0x67, 0x74, 0x68, 0x26, 0x26, 0x28, 0x31, 0x21, 0x3d, + 0x3d, 0x6d, 0x74, 0x2e, 0x70, 0x75, 0x73, 0x68, 0x28, 0x6e, 0x29, 0x26, + 0x26, 0x76, 0x74, 0x3d, 0x3d, 0x3d, 0x43, 0x2e, 0x72, 0x65, 0x71, 0x75, + 0x65, 0x73, 0x74, 0x41, 0x6e, 0x69, 0x6d, 0x61, 0x74, 0x69, 0x6f, 0x6e, + 0x46, 0x72, 0x61, 0x6d, 0x65, 0x7c, 0x7c, 0x28, 0x28, 0x76, 0x74, 0x3d, + 0x43, 0x2e, 0x72, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x41, 0x6e, 0x69, + 0x6d, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x46, 0x72, 0x61, 0x6d, 0x65, 0x29, + 0x7c, 0x7c, 0x4f, 0x74, 0x29, 0x28, 0x57, 0x74, 0x29, 0x29, 0x2c, 0x6e, + 0x2e, 0x5f, 0x5f, 0x48, 0x2e, 0x5f, 0x5f, 0x2e, 0x66, 0x6f, 0x72, 0x45, + 0x61, 0x63, 0x68, 0x28, 0x28, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, + 0x6e, 0x28, 0x74, 0x29, 0x7b, 0x74, 0x2e, 0x69, 0x26, 0x26, 0x28, 0x74, + 0x2e, 0x5f, 0x5f, 0x48, 0x3d, 0x74, 0x2e, 0x69, 0x29, 0x2c, 0x74, 0x2e, + 0x5f, 0x5f, 0x56, 0x21, 0x3d, 0x3d, 0x67, 0x74, 0x26, 0x26, 0x28, 0x74, + 0x2e, 0x5f, 0x5f, 0x3d, 0x74, 0x2e, 0x5f, 0x5f, 0x56, 0x29, 0x2c, 0x74, + 0x2e, 0x69, 0x3d, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x2c, 0x74, 0x2e, + 0x5f, 0x5f, 0x56, 0x3d, 0x67, 0x74, 0x7d, 0x29, 0x29, 0x29, 0x2c, 0x64, + 0x74, 0x3d, 0x70, 0x74, 0x3d, 0x6e, 0x75, 0x6c, 0x6c, 0x7d, 0x2c, 0x43, + 0x2e, 0x5f, 0x5f, 0x63, 0x3d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, + 0x6e, 0x28, 0x74, 0x2c, 0x6e, 0x29, 0x7b, 0x6e, 0x2e, 0x73, 0x6f, 0x6d, + 0x65, 0x28, 0x28, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, + 0x74, 0x29, 0x7b, 0x74, 0x72, 0x79, 0x7b, 0x74, 0x2e, 0x5f, 0x5f, 0x68, + 0x2e, 0x66, 0x6f, 0x72, 0x45, 0x61, 0x63, 0x68, 0x28, 0x52, 0x74, 0x29, + 0x2c, 0x74, 0x2e, 0x5f, 0x5f, 0x68, 0x3d, 0x74, 0x2e, 0x5f, 0x5f, 0x68, + 0x2e, 0x66, 0x69, 0x6c, 0x74, 0x65, 0x72, 0x28, 0x28, 0x66, 0x75, 0x6e, + 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x74, 0x29, 0x7b, 0x72, 0x65, 0x74, + 0x75, 0x72, 0x6e, 0x21, 0x74, 0x2e, 0x5f, 0x5f, 0x7c, 0x7c, 0x6a, 0x74, + 0x28, 0x74, 0x29, 0x7d, 0x29, 0x29, 0x7d, 0x63, 0x61, 0x74, 0x63, 0x68, + 0x28, 0x6c, 0x29, 0x7b, 0x6e, 0x2e, 0x73, 0x6f, 0x6d, 0x65, 0x28, 0x28, + 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x74, 0x29, 0x7b, + 0x74, 0x2e, 0x5f, 0x5f, 0x68, 0x26, 0x26, 0x28, 0x74, 0x2e, 0x5f, 0x5f, + 0x68, 0x3d, 0x5b, 0x5d, 0x29, 0x7d, 0x29, 0x29, 0x2c, 0x6e, 0x3d, 0x5b, + 0x5d, 0x2c, 0x43, 0x2e, 0x5f, 0x5f, 0x65, 0x28, 0x6c, 0x2c, 0x74, 0x2e, + 0x5f, 0x5f, 0x76, 0x29, 0x7d, 0x7d, 0x29, 0x29, 0x2c, 0x77, 0x74, 0x26, + 0x26, 0x77, 0x74, 0x28, 0x74, 0x2c, 0x6e, 0x29, 0x7d, 0x2c, 0x43, 0x2e, + 0x75, 0x6e, 0x6d, 0x6f, 0x75, 0x6e, 0x74, 0x3d, 0x66, 0x75, 0x6e, 0x63, + 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x74, 0x29, 0x7b, 0x78, 0x74, 0x26, 0x26, + 0x78, 0x74, 0x28, 0x74, 0x29, 0x3b, 0x76, 0x61, 0x72, 0x20, 0x6e, 0x2c, + 0x65, 0x3d, 0x74, 0x2e, 0x5f, 0x5f, 0x63, 0x3b, 0x65, 0x26, 0x26, 0x65, + 0x2e, 0x5f, 0x5f, 0x48, 0x26, 0x26, 0x28, 0x65, 0x2e, 0x5f, 0x5f, 0x48, 0x2e, 0x5f, 0x5f, 0x2e, 0x66, 0x6f, 0x72, 0x45, 0x61, 0x63, 0x68, 0x28, 0x28, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x74, 0x29, - 0x7b, 0x74, 0x2e, 0x69, 0x26, 0x26, 0x28, 0x74, 0x2e, 0x5f, 0x5f, 0x48, - 0x3d, 0x74, 0x2e, 0x69, 0x29, 0x2c, 0x74, 0x2e, 0x5f, 0x5f, 0x56, 0x21, - 0x3d, 0x3d, 0x67, 0x74, 0x26, 0x26, 0x28, 0x74, 0x2e, 0x5f, 0x5f, 0x3d, - 0x74, 0x2e, 0x5f, 0x5f, 0x56, 0x29, 0x2c, 0x74, 0x2e, 0x69, 0x3d, 0x76, - 0x6f, 0x69, 0x64, 0x20, 0x30, 0x2c, 0x74, 0x2e, 0x5f, 0x5f, 0x56, 0x3d, - 0x67, 0x74, 0x7d, 0x29, 0x29, 0x29, 0x2c, 0x64, 0x74, 0x3d, 0x70, 0x74, - 0x3d, 0x6e, 0x75, 0x6c, 0x6c, 0x7d, 0x2c, 0x77, 0x2e, 0x5f, 0x5f, 0x63, - 0x3d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x74, 0x2c, - 0x6e, 0x29, 0x7b, 0x6e, 0x2e, 0x73, 0x6f, 0x6d, 0x65, 0x28, 0x28, 0x66, - 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x74, 0x29, 0x7b, 0x74, - 0x72, 0x79, 0x7b, 0x74, 0x2e, 0x5f, 0x5f, 0x68, 0x2e, 0x66, 0x6f, 0x72, - 0x45, 0x61, 0x63, 0x68, 0x28, 0x52, 0x74, 0x29, 0x2c, 0x74, 0x2e, 0x5f, - 0x5f, 0x68, 0x3d, 0x74, 0x2e, 0x5f, 0x5f, 0x68, 0x2e, 0x66, 0x69, 0x6c, - 0x74, 0x65, 0x72, 0x28, 0x28, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, - 0x6e, 0x28, 0x74, 0x29, 0x7b, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x21, - 0x74, 0x2e, 0x5f, 0x5f, 0x7c, 0x7c, 0x49, 0x74, 0x28, 0x74, 0x29, 0x7d, - 0x29, 0x29, 0x7d, 0x63, 0x61, 0x74, 0x63, 0x68, 0x28, 0x73, 0x29, 0x7b, - 0x6e, 0x2e, 0x73, 0x6f, 0x6d, 0x65, 0x28, 0x28, 0x66, 0x75, 0x6e, 0x63, - 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x74, 0x29, 0x7b, 0x74, 0x2e, 0x5f, 0x5f, - 0x68, 0x26, 0x26, 0x28, 0x74, 0x2e, 0x5f, 0x5f, 0x68, 0x3d, 0x5b, 0x5d, - 0x29, 0x7d, 0x29, 0x29, 0x2c, 0x6e, 0x3d, 0x5b, 0x5d, 0x2c, 0x77, 0x2e, - 0x5f, 0x5f, 0x65, 0x28, 0x73, 0x2c, 0x74, 0x2e, 0x5f, 0x5f, 0x76, 0x29, - 0x7d, 0x7d, 0x29, 0x29, 0x2c, 0x78, 0x74, 0x26, 0x26, 0x78, 0x74, 0x28, - 0x74, 0x2c, 0x6e, 0x29, 0x7d, 0x2c, 0x77, 0x2e, 0x75, 0x6e, 0x6d, 0x6f, - 0x75, 0x6e, 0x74, 0x3d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, - 0x28, 0x74, 0x29, 0x7b, 0x77, 0x74, 0x26, 0x26, 0x77, 0x74, 0x28, 0x74, - 0x29, 0x3b, 0x76, 0x61, 0x72, 0x20, 0x6e, 0x2c, 0x65, 0x3d, 0x74, 0x2e, - 0x5f, 0x5f, 0x63, 0x3b, 0x65, 0x26, 0x26, 0x65, 0x2e, 0x5f, 0x5f, 0x48, - 0x26, 0x26, 0x28, 0x65, 0x2e, 0x5f, 0x5f, 0x48, 0x2e, 0x5f, 0x5f, 0x2e, - 0x66, 0x6f, 0x72, 0x45, 0x61, 0x63, 0x68, 0x28, 0x28, 0x66, 0x75, 0x6e, - 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x74, 0x29, 0x7b, 0x74, 0x72, 0x79, - 0x7b, 0x52, 0x74, 0x28, 0x74, 0x29, 0x7d, 0x63, 0x61, 0x74, 0x63, 0x68, - 0x28, 0x74, 0x29, 0x7b, 0x6e, 0x3d, 0x74, 0x7d, 0x7d, 0x29, 0x29, 0x2c, - 0x65, 0x2e, 0x5f, 0x5f, 0x48, 0x3d, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, - 0x2c, 0x6e, 0x26, 0x26, 0x77, 0x2e, 0x5f, 0x5f, 0x65, 0x28, 0x6e, 0x2c, - 0x65, 0x2e, 0x5f, 0x5f, 0x76, 0x29, 0x29, 0x7d, 0x3b, 0x76, 0x61, 0x72, - 0x20, 0x4f, 0x74, 0x3d, 0x22, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, - 0x6e, 0x22, 0x3d, 0x3d, 0x74, 0x79, 0x70, 0x65, 0x6f, 0x66, 0x20, 0x72, - 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x41, 0x6e, 0x69, 0x6d, 0x61, 0x74, - 0x69, 0x6f, 0x6e, 0x46, 0x72, 0x61, 0x6d, 0x65, 0x3b, 0x66, 0x75, 0x6e, - 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x4c, 0x74, 0x28, 0x74, 0x29, 0x7b, - 0x76, 0x61, 0x72, 0x20, 0x6e, 0x2c, 0x65, 0x3d, 0x66, 0x75, 0x6e, 0x63, - 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x29, 0x7b, 0x63, 0x6c, 0x65, 0x61, 0x72, - 0x54, 0x69, 0x6d, 0x65, 0x6f, 0x75, 0x74, 0x28, 0x69, 0x29, 0x2c, 0x4f, - 0x74, 0x26, 0x26, 0x63, 0x61, 0x6e, 0x63, 0x65, 0x6c, 0x41, 0x6e, 0x69, - 0x6d, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x46, 0x72, 0x61, 0x6d, 0x65, 0x28, - 0x6e, 0x29, 0x2c, 0x73, 0x65, 0x74, 0x54, 0x69, 0x6d, 0x65, 0x6f, 0x75, - 0x74, 0x28, 0x74, 0x29, 0x7d, 0x2c, 0x69, 0x3d, 0x73, 0x65, 0x74, 0x54, - 0x69, 0x6d, 0x65, 0x6f, 0x75, 0x74, 0x28, 0x65, 0x2c, 0x31, 0x30, 0x30, - 0x29, 0x3b, 0x4f, 0x74, 0x26, 0x26, 0x28, 0x6e, 0x3d, 0x72, 0x65, 0x71, - 0x75, 0x65, 0x73, 0x74, 0x41, 0x6e, 0x69, 0x6d, 0x61, 0x74, 0x69, 0x6f, - 0x6e, 0x46, 0x72, 0x61, 0x6d, 0x65, 0x28, 0x65, 0x29, 0x29, 0x7d, 0x66, - 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x52, 0x74, 0x28, 0x74, - 0x29, 0x7b, 0x76, 0x61, 0x72, 0x20, 0x6e, 0x3d, 0x70, 0x74, 0x2c, 0x65, - 0x3d, 0x74, 0x2e, 0x5f, 0x5f, 0x63, 0x3b, 0x22, 0x66, 0x75, 0x6e, 0x63, + 0x7b, 0x74, 0x72, 0x79, 0x7b, 0x52, 0x74, 0x28, 0x74, 0x29, 0x7d, 0x63, + 0x61, 0x74, 0x63, 0x68, 0x28, 0x74, 0x29, 0x7b, 0x6e, 0x3d, 0x74, 0x7d, + 0x7d, 0x29, 0x29, 0x2c, 0x65, 0x2e, 0x5f, 0x5f, 0x48, 0x3d, 0x76, 0x6f, + 0x69, 0x64, 0x20, 0x30, 0x2c, 0x6e, 0x26, 0x26, 0x43, 0x2e, 0x5f, 0x5f, + 0x65, 0x28, 0x6e, 0x2c, 0x65, 0x2e, 0x5f, 0x5f, 0x76, 0x29, 0x29, 0x7d, + 0x3b, 0x76, 0x61, 0x72, 0x20, 0x4c, 0x74, 0x3d, 0x22, 0x66, 0x75, 0x6e, + 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x22, 0x3d, 0x3d, 0x74, 0x79, 0x70, 0x65, + 0x6f, 0x66, 0x20, 0x72, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x41, 0x6e, + 0x69, 0x6d, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x46, 0x72, 0x61, 0x6d, 0x65, + 0x3b, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x4f, 0x74, + 0x28, 0x74, 0x29, 0x7b, 0x76, 0x61, 0x72, 0x20, 0x6e, 0x2c, 0x65, 0x3d, + 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x29, 0x7b, 0x63, + 0x6c, 0x65, 0x61, 0x72, 0x54, 0x69, 0x6d, 0x65, 0x6f, 0x75, 0x74, 0x28, + 0x5f, 0x29, 0x2c, 0x4c, 0x74, 0x26, 0x26, 0x63, 0x61, 0x6e, 0x63, 0x65, + 0x6c, 0x41, 0x6e, 0x69, 0x6d, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x46, 0x72, + 0x61, 0x6d, 0x65, 0x28, 0x6e, 0x29, 0x2c, 0x73, 0x65, 0x74, 0x54, 0x69, + 0x6d, 0x65, 0x6f, 0x75, 0x74, 0x28, 0x74, 0x29, 0x7d, 0x2c, 0x5f, 0x3d, + 0x73, 0x65, 0x74, 0x54, 0x69, 0x6d, 0x65, 0x6f, 0x75, 0x74, 0x28, 0x65, + 0x2c, 0x31, 0x30, 0x30, 0x29, 0x3b, 0x4c, 0x74, 0x26, 0x26, 0x28, 0x6e, + 0x3d, 0x72, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x41, 0x6e, 0x69, 0x6d, + 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x46, 0x72, 0x61, 0x6d, 0x65, 0x28, 0x65, + 0x29, 0x29, 0x7d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, + 0x52, 0x74, 0x28, 0x74, 0x29, 0x7b, 0x76, 0x61, 0x72, 0x20, 0x6e, 0x3d, + 0x70, 0x74, 0x2c, 0x65, 0x3d, 0x74, 0x2e, 0x5f, 0x5f, 0x63, 0x3b, 0x22, + 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x22, 0x3d, 0x3d, 0x74, + 0x79, 0x70, 0x65, 0x6f, 0x66, 0x20, 0x65, 0x26, 0x26, 0x28, 0x74, 0x2e, + 0x5f, 0x5f, 0x63, 0x3d, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x2c, 0x65, + 0x28, 0x29, 0x29, 0x2c, 0x70, 0x74, 0x3d, 0x6e, 0x7d, 0x66, 0x75, 0x6e, + 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x6a, 0x74, 0x28, 0x74, 0x29, 0x7b, + 0x76, 0x61, 0x72, 0x20, 0x6e, 0x3d, 0x70, 0x74, 0x3b, 0x74, 0x2e, 0x5f, + 0x5f, 0x63, 0x3d, 0x74, 0x2e, 0x5f, 0x5f, 0x28, 0x29, 0x2c, 0x70, 0x74, + 0x3d, 0x6e, 0x7d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, + 0x49, 0x74, 0x28, 0x74, 0x2c, 0x6e, 0x29, 0x7b, 0x72, 0x65, 0x74, 0x75, + 0x72, 0x6e, 0x21, 0x74, 0x7c, 0x7c, 0x74, 0x2e, 0x6c, 0x65, 0x6e, 0x67, + 0x74, 0x68, 0x21, 0x3d, 0x3d, 0x6e, 0x2e, 0x6c, 0x65, 0x6e, 0x67, 0x74, + 0x68, 0x7c, 0x7c, 0x6e, 0x2e, 0x73, 0x6f, 0x6d, 0x65, 0x28, 0x28, 0x66, + 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x6e, 0x2c, 0x65, 0x29, + 0x7b, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x6e, 0x21, 0x3d, 0x3d, + 0x74, 0x5b, 0x65, 0x5d, 0x7d, 0x29, 0x29, 0x7d, 0x66, 0x75, 0x6e, 0x63, + 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x71, 0x74, 0x28, 0x74, 0x2c, 0x6e, 0x29, + 0x7b, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x22, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x22, 0x3d, 0x3d, 0x74, 0x79, 0x70, 0x65, 0x6f, - 0x66, 0x20, 0x65, 0x26, 0x26, 0x28, 0x74, 0x2e, 0x5f, 0x5f, 0x63, 0x3d, - 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x2c, 0x65, 0x28, 0x29, 0x29, 0x2c, - 0x70, 0x74, 0x3d, 0x6e, 0x7d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, - 0x6e, 0x20, 0x49, 0x74, 0x28, 0x74, 0x29, 0x7b, 0x76, 0x61, 0x72, 0x20, - 0x6e, 0x3d, 0x70, 0x74, 0x3b, 0x74, 0x2e, 0x5f, 0x5f, 0x63, 0x3d, 0x74, - 0x2e, 0x5f, 0x5f, 0x28, 0x29, 0x2c, 0x70, 0x74, 0x3d, 0x6e, 0x7d, 0x66, - 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x6a, 0x74, 0x28, 0x74, - 0x2c, 0x6e, 0x29, 0x7b, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x21, 0x74, - 0x7c, 0x7c, 0x74, 0x2e, 0x6c, 0x65, 0x6e, 0x67, 0x74, 0x68, 0x21, 0x3d, - 0x3d, 0x6e, 0x2e, 0x6c, 0x65, 0x6e, 0x67, 0x74, 0x68, 0x7c, 0x7c, 0x6e, - 0x2e, 0x73, 0x6f, 0x6d, 0x65, 0x28, 0x28, 0x66, 0x75, 0x6e, 0x63, 0x74, - 0x69, 0x6f, 0x6e, 0x28, 0x6e, 0x2c, 0x65, 0x29, 0x7b, 0x72, 0x65, 0x74, - 0x75, 0x72, 0x6e, 0x20, 0x6e, 0x21, 0x3d, 0x3d, 0x74, 0x5b, 0x65, 0x5d, - 0x7d, 0x29, 0x29, 0x7d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, - 0x20, 0x42, 0x74, 0x28, 0x74, 0x2c, 0x6e, 0x29, 0x7b, 0x72, 0x65, 0x74, - 0x75, 0x72, 0x6e, 0x22, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, - 0x22, 0x3d, 0x3d, 0x74, 0x79, 0x70, 0x65, 0x6f, 0x66, 0x20, 0x6e, 0x3f, - 0x6e, 0x28, 0x74, 0x29, 0x3a, 0x6e, 0x7d, 0x66, 0x75, 0x6e, 0x63, 0x74, - 0x69, 0x6f, 0x6e, 0x20, 0x71, 0x74, 0x28, 0x74, 0x2c, 0x6e, 0x29, 0x7b, - 0x77, 0x5b, 0x74, 0x5d, 0x3d, 0x6e, 0x2e, 0x62, 0x69, 0x6e, 0x64, 0x28, - 0x6e, 0x75, 0x6c, 0x6c, 0x2c, 0x77, 0x5b, 0x74, 0x5d, 0x7c, 0x7c, 0x28, - 0x28, 0x29, 0x3d, 0x3e, 0x7b, 0x7d, 0x29, 0x29, 0x7d, 0x6c, 0x65, 0x74, - 0x20, 0x47, 0x74, 0x2c, 0x7a, 0x74, 0x3b, 0x66, 0x75, 0x6e, 0x63, 0x74, - 0x69, 0x6f, 0x6e, 0x20, 0x4a, 0x74, 0x28, 0x74, 0x29, 0x7b, 0x69, 0x66, - 0x28, 0x7a, 0x74, 0x29, 0x7a, 0x74, 0x28, 0x29, 0x3b, 0x7a, 0x74, 0x3d, - 0x74, 0x26, 0x26, 0x74, 0x2e, 0x53, 0x28, 0x29, 0x7d, 0x66, 0x75, 0x6e, - 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x4b, 0x74, 0x28, 0x7b, 0x64, 0x61, - 0x74, 0x61, 0x3a, 0x74, 0x7d, 0x29, 0x7b, 0x63, 0x6f, 0x6e, 0x73, 0x74, - 0x20, 0x6e, 0x3d, 0x58, 0x74, 0x28, 0x74, 0x29, 0x3b, 0x6e, 0x2e, 0x76, - 0x61, 0x6c, 0x75, 0x65, 0x3d, 0x74, 0x3b, 0x63, 0x6f, 0x6e, 0x73, 0x74, - 0x20, 0x65, 0x3d, 0x44, 0x74, 0x28, 0x28, 0x29, 0x3d, 0x3e, 0x7b, 0x6c, - 0x65, 0x74, 0x20, 0x74, 0x3d, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x5f, 0x5f, - 0x76, 0x3b, 0x77, 0x68, 0x69, 0x6c, 0x65, 0x28, 0x74, 0x3d, 0x74, 0x2e, - 0x5f, 0x5f, 0x29, 0x69, 0x66, 0x28, 0x74, 0x2e, 0x5f, 0x5f, 0x63, 0x29, - 0x7b, 0x74, 0x2e, 0x5f, 0x5f, 0x63, 0x2e, 0x5f, 0x5f, 0x24, 0x66, 0x7c, - 0x3d, 0x34, 0x3b, 0x62, 0x72, 0x65, 0x61, 0x6b, 0x7d, 0x74, 0x68, 0x69, - 0x73, 0x2e, 0x5f, 0x5f, 0x24, 0x75, 0x2e, 0x63, 0x3d, 0x28, 0x29, 0x3d, - 0x3e, 0x7b, 0x76, 0x61, 0x72, 0x20, 0x74, 0x3b, 0x69, 0x66, 0x28, 0x21, - 0x45, 0x28, 0x65, 0x2e, 0x70, 0x65, 0x65, 0x6b, 0x28, 0x29, 0x29, 0x26, - 0x26, 0x33, 0x3d, 0x3d, 0x3d, 0x28, 0x6e, 0x75, 0x6c, 0x6c, 0x3d, 0x3d, - 0x28, 0x74, 0x3d, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x62, 0x61, 0x73, 0x65, - 0x29, 0x3f, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x3a, 0x74, 0x2e, 0x6e, - 0x6f, 0x64, 0x65, 0x54, 0x79, 0x70, 0x65, 0x29, 0x29, 0x74, 0x68, 0x69, - 0x73, 0x2e, 0x62, 0x61, 0x73, 0x65, 0x2e, 0x64, 0x61, 0x74, 0x61, 0x3d, - 0x65, 0x2e, 0x70, 0x65, 0x65, 0x6b, 0x28, 0x29, 0x3b, 0x65, 0x6c, 0x73, - 0x65, 0x7b, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x5f, 0x5f, 0x24, 0x66, 0x7c, - 0x3d, 0x31, 0x3b, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x73, 0x65, 0x74, 0x53, - 0x74, 0x61, 0x74, 0x65, 0x28, 0x7b, 0x7d, 0x29, 0x7d, 0x7d, 0x3b, 0x72, - 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x79, 0x28, 0x28, 0x29, 0x3d, 0x3e, - 0x7b, 0x6c, 0x65, 0x74, 0x20, 0x74, 0x3d, 0x6e, 0x2e, 0x76, 0x61, 0x6c, - 0x75, 0x65, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3b, 0x72, 0x65, 0x74, - 0x75, 0x72, 0x6e, 0x20, 0x30, 0x3d, 0x3d, 0x3d, 0x74, 0x3f, 0x30, 0x3a, - 0x21, 0x30, 0x3d, 0x3d, 0x3d, 0x74, 0x3f, 0x22, 0x22, 0x3a, 0x74, 0x7c, - 0x7c, 0x22, 0x22, 0x7d, 0x29, 0x7d, 0x2c, 0x5b, 0x5d, 0x29, 0x3b, 0x72, - 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x65, 0x2e, 0x76, 0x61, 0x6c, 0x75, - 0x65, 0x7d, 0x4b, 0x74, 0x2e, 0x64, 0x69, 0x73, 0x70, 0x6c, 0x61, 0x79, - 0x4e, 0x61, 0x6d, 0x65, 0x3d, 0x22, 0x5f, 0x73, 0x74, 0x22, 0x3b, 0x4f, - 0x62, 0x6a, 0x65, 0x63, 0x74, 0x2e, 0x64, 0x65, 0x66, 0x69, 0x6e, 0x65, - 0x50, 0x72, 0x6f, 0x70, 0x65, 0x72, 0x74, 0x69, 0x65, 0x73, 0x28, 0x63, - 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x74, 0x79, 0x70, 0x65, 0x2c, 0x7b, - 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x72, 0x75, 0x63, 0x74, 0x6f, 0x72, 0x3a, + 0x66, 0x20, 0x6e, 0x3f, 0x6e, 0x28, 0x74, 0x29, 0x3a, 0x6e, 0x7d, 0x66, + 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x42, 0x74, 0x28, 0x74, + 0x2c, 0x6e, 0x29, 0x7b, 0x43, 0x5b, 0x74, 0x5d, 0x3d, 0x6e, 0x2e, 0x62, + 0x69, 0x6e, 0x64, 0x28, 0x6e, 0x75, 0x6c, 0x6c, 0x2c, 0x43, 0x5b, 0x74, + 0x5d, 0x7c, 0x7c, 0x28, 0x28, 0x29, 0x3d, 0x3e, 0x7b, 0x7d, 0x29, 0x29, + 0x7d, 0x6c, 0x65, 0x74, 0x20, 0x47, 0x74, 0x2c, 0x7a, 0x74, 0x3b, 0x66, + 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x4a, 0x74, 0x28, 0x74, + 0x29, 0x7b, 0x69, 0x66, 0x28, 0x7a, 0x74, 0x29, 0x7a, 0x74, 0x28, 0x29, + 0x3b, 0x7a, 0x74, 0x3d, 0x74, 0x26, 0x26, 0x74, 0x2e, 0x53, 0x28, 0x29, + 0x7d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x4b, 0x74, + 0x28, 0x7b, 0x64, 0x61, 0x74, 0x61, 0x3a, 0x74, 0x7d, 0x29, 0x7b, 0x63, + 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x6e, 0x3d, 0x58, 0x74, 0x28, 0x74, 0x29, + 0x3b, 0x6e, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3d, 0x74, 0x3b, 0x63, + 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x65, 0x3d, 0x44, 0x74, 0x28, 0x28, 0x29, + 0x3d, 0x3e, 0x7b, 0x6c, 0x65, 0x74, 0x20, 0x74, 0x3d, 0x74, 0x68, 0x69, + 0x73, 0x2e, 0x5f, 0x5f, 0x76, 0x3b, 0x77, 0x68, 0x69, 0x6c, 0x65, 0x28, + 0x74, 0x3d, 0x74, 0x2e, 0x5f, 0x5f, 0x29, 0x69, 0x66, 0x28, 0x74, 0x2e, + 0x5f, 0x5f, 0x63, 0x29, 0x7b, 0x74, 0x2e, 0x5f, 0x5f, 0x63, 0x2e, 0x5f, + 0x5f, 0x24, 0x66, 0x7c, 0x3d, 0x34, 0x3b, 0x62, 0x72, 0x65, 0x61, 0x6b, + 0x7d, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x5f, 0x5f, 0x24, 0x75, 0x2e, 0x63, + 0x3d, 0x28, 0x29, 0x3d, 0x3e, 0x7b, 0x76, 0x61, 0x72, 0x20, 0x74, 0x3b, + 0x69, 0x66, 0x28, 0x21, 0x55, 0x28, 0x65, 0x2e, 0x70, 0x65, 0x65, 0x6b, + 0x28, 0x29, 0x29, 0x26, 0x26, 0x33, 0x3d, 0x3d, 0x3d, 0x28, 0x6e, 0x75, + 0x6c, 0x6c, 0x3d, 0x3d, 0x28, 0x74, 0x3d, 0x74, 0x68, 0x69, 0x73, 0x2e, + 0x62, 0x61, 0x73, 0x65, 0x29, 0x3f, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, + 0x3a, 0x74, 0x2e, 0x6e, 0x6f, 0x64, 0x65, 0x54, 0x79, 0x70, 0x65, 0x29, + 0x29, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x62, 0x61, 0x73, 0x65, 0x2e, 0x64, + 0x61, 0x74, 0x61, 0x3d, 0x65, 0x2e, 0x70, 0x65, 0x65, 0x6b, 0x28, 0x29, + 0x3b, 0x65, 0x6c, 0x73, 0x65, 0x7b, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x5f, + 0x5f, 0x24, 0x66, 0x7c, 0x3d, 0x31, 0x3b, 0x74, 0x68, 0x69, 0x73, 0x2e, + 0x73, 0x65, 0x74, 0x53, 0x74, 0x61, 0x74, 0x65, 0x28, 0x7b, 0x7d, 0x29, + 0x7d, 0x7d, 0x3b, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x6d, 0x28, + 0x28, 0x29, 0x3d, 0x3e, 0x7b, 0x6c, 0x65, 0x74, 0x20, 0x74, 0x3d, 0x6e, + 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, + 0x3b, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x30, 0x3d, 0x3d, 0x3d, + 0x74, 0x3f, 0x30, 0x3a, 0x21, 0x30, 0x3d, 0x3d, 0x3d, 0x74, 0x3f, 0x22, + 0x22, 0x3a, 0x74, 0x7c, 0x7c, 0x22, 0x22, 0x7d, 0x29, 0x7d, 0x2c, 0x5b, + 0x5d, 0x29, 0x3b, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x65, 0x2e, + 0x76, 0x61, 0x6c, 0x75, 0x65, 0x7d, 0x4b, 0x74, 0x2e, 0x64, 0x69, 0x73, + 0x70, 0x6c, 0x61, 0x79, 0x4e, 0x61, 0x6d, 0x65, 0x3d, 0x22, 0x5f, 0x73, + 0x74, 0x22, 0x3b, 0x4f, 0x62, 0x6a, 0x65, 0x63, 0x74, 0x2e, 0x64, 0x65, + 0x66, 0x69, 0x6e, 0x65, 0x50, 0x72, 0x6f, 0x70, 0x65, 0x72, 0x74, 0x69, + 0x65, 0x73, 0x28, 0x68, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x74, 0x79, + 0x70, 0x65, 0x2c, 0x7b, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x72, 0x75, 0x63, + 0x74, 0x6f, 0x72, 0x3a, 0x7b, 0x63, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x75, + 0x72, 0x61, 0x62, 0x6c, 0x65, 0x3a, 0x21, 0x30, 0x2c, 0x76, 0x61, 0x6c, + 0x75, 0x65, 0x3a, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x7d, 0x2c, 0x74, + 0x79, 0x70, 0x65, 0x3a, 0x7b, 0x63, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x75, + 0x72, 0x61, 0x62, 0x6c, 0x65, 0x3a, 0x21, 0x30, 0x2c, 0x76, 0x61, 0x6c, + 0x75, 0x65, 0x3a, 0x4b, 0x74, 0x7d, 0x2c, 0x70, 0x72, 0x6f, 0x70, 0x73, + 0x3a, 0x7b, 0x63, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x75, 0x72, 0x61, 0x62, + 0x6c, 0x65, 0x3a, 0x21, 0x30, 0x2c, 0x67, 0x65, 0x74, 0x28, 0x29, 0x7b, + 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x7b, 0x64, 0x61, 0x74, 0x61, 0x3a, + 0x74, 0x68, 0x69, 0x73, 0x7d, 0x7d, 0x7d, 0x2c, 0x5f, 0x5f, 0x62, 0x3a, 0x7b, 0x63, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x75, 0x72, 0x61, 0x62, 0x6c, - 0x65, 0x3a, 0x21, 0x30, 0x2c, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3a, 0x76, - 0x6f, 0x69, 0x64, 0x20, 0x30, 0x7d, 0x2c, 0x74, 0x79, 0x70, 0x65, 0x3a, - 0x7b, 0x63, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x75, 0x72, 0x61, 0x62, 0x6c, - 0x65, 0x3a, 0x21, 0x30, 0x2c, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3a, 0x4b, - 0x74, 0x7d, 0x2c, 0x70, 0x72, 0x6f, 0x70, 0x73, 0x3a, 0x7b, 0x63, 0x6f, - 0x6e, 0x66, 0x69, 0x67, 0x75, 0x72, 0x61, 0x62, 0x6c, 0x65, 0x3a, 0x21, - 0x30, 0x2c, 0x67, 0x65, 0x74, 0x28, 0x29, 0x7b, 0x72, 0x65, 0x74, 0x75, - 0x72, 0x6e, 0x7b, 0x64, 0x61, 0x74, 0x61, 0x3a, 0x74, 0x68, 0x69, 0x73, - 0x7d, 0x7d, 0x7d, 0x2c, 0x5f, 0x5f, 0x62, 0x3a, 0x7b, 0x63, 0x6f, 0x6e, - 0x66, 0x69, 0x67, 0x75, 0x72, 0x61, 0x62, 0x6c, 0x65, 0x3a, 0x21, 0x30, - 0x2c, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3a, 0x31, 0x7d, 0x7d, 0x29, 0x3b, - 0x71, 0x74, 0x28, 0x22, 0x5f, 0x5f, 0x62, 0x22, 0x2c, 0x28, 0x74, 0x2c, + 0x65, 0x3a, 0x21, 0x30, 0x2c, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3a, 0x31, + 0x7d, 0x7d, 0x29, 0x3b, 0x42, 0x74, 0x28, 0x22, 0x5f, 0x5f, 0x62, 0x22, + 0x2c, 0x28, 0x74, 0x2c, 0x6e, 0x29, 0x3d, 0x3e, 0x7b, 0x69, 0x66, 0x28, + 0x22, 0x73, 0x74, 0x72, 0x69, 0x6e, 0x67, 0x22, 0x3d, 0x3d, 0x74, 0x79, + 0x70, 0x65, 0x6f, 0x66, 0x20, 0x6e, 0x2e, 0x74, 0x79, 0x70, 0x65, 0x29, + 0x7b, 0x6c, 0x65, 0x74, 0x20, 0x74, 0x2c, 0x65, 0x3d, 0x6e, 0x2e, 0x70, + 0x72, 0x6f, 0x70, 0x73, 0x3b, 0x66, 0x6f, 0x72, 0x28, 0x6c, 0x65, 0x74, + 0x20, 0x5f, 0x20, 0x69, 0x6e, 0x20, 0x65, 0x29, 0x7b, 0x69, 0x66, 0x28, + 0x22, 0x63, 0x68, 0x69, 0x6c, 0x64, 0x72, 0x65, 0x6e, 0x22, 0x3d, 0x3d, + 0x3d, 0x5f, 0x29, 0x63, 0x6f, 0x6e, 0x74, 0x69, 0x6e, 0x75, 0x65, 0x3b, + 0x6c, 0x65, 0x74, 0x20, 0x69, 0x3d, 0x65, 0x5b, 0x5f, 0x5d, 0x3b, 0x69, + 0x66, 0x28, 0x69, 0x20, 0x69, 0x6e, 0x73, 0x74, 0x61, 0x6e, 0x63, 0x65, + 0x6f, 0x66, 0x20, 0x68, 0x29, 0x7b, 0x69, 0x66, 0x28, 0x21, 0x74, 0x29, + 0x6e, 0x2e, 0x5f, 0x5f, 0x6e, 0x70, 0x3d, 0x74, 0x3d, 0x7b, 0x7d, 0x3b, + 0x74, 0x5b, 0x5f, 0x5d, 0x3d, 0x69, 0x3b, 0x65, 0x5b, 0x5f, 0x5d, 0x3d, + 0x69, 0x2e, 0x70, 0x65, 0x65, 0x6b, 0x28, 0x29, 0x7d, 0x7d, 0x7d, 0x74, + 0x28, 0x6e, 0x29, 0x7d, 0x29, 0x3b, 0x42, 0x74, 0x28, 0x22, 0x5f, 0x5f, + 0x72, 0x22, 0x2c, 0x28, 0x74, 0x2c, 0x6e, 0x29, 0x3d, 0x3e, 0x7b, 0x4a, + 0x74, 0x28, 0x29, 0x3b, 0x6c, 0x65, 0x74, 0x20, 0x65, 0x2c, 0x5f, 0x3d, + 0x6e, 0x2e, 0x5f, 0x5f, 0x63, 0x3b, 0x69, 0x66, 0x28, 0x5f, 0x29, 0x7b, + 0x5f, 0x2e, 0x5f, 0x5f, 0x24, 0x66, 0x26, 0x3d, 0x2d, 0x32, 0x3b, 0x65, + 0x3d, 0x5f, 0x2e, 0x5f, 0x5f, 0x24, 0x75, 0x3b, 0x69, 0x66, 0x28, 0x76, + 0x6f, 0x69, 0x64, 0x20, 0x30, 0x3d, 0x3d, 0x3d, 0x65, 0x29, 0x5f, 0x2e, + 0x5f, 0x5f, 0x24, 0x75, 0x3d, 0x65, 0x3d, 0x66, 0x75, 0x6e, 0x63, 0x74, + 0x69, 0x6f, 0x6e, 0x28, 0x74, 0x29, 0x7b, 0x6c, 0x65, 0x74, 0x20, 0x6e, + 0x3b, 0x77, 0x28, 0x28, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, + 0x28, 0x29, 0x7b, 0x6e, 0x3d, 0x74, 0x68, 0x69, 0x73, 0x7d, 0x29, 0x29, + 0x3b, 0x6e, 0x2e, 0x63, 0x3d, 0x28, 0x29, 0x3d, 0x3e, 0x7b, 0x5f, 0x2e, + 0x5f, 0x5f, 0x24, 0x66, 0x7c, 0x3d, 0x31, 0x3b, 0x5f, 0x2e, 0x73, 0x65, + 0x74, 0x53, 0x74, 0x61, 0x74, 0x65, 0x28, 0x7b, 0x7d, 0x29, 0x7d, 0x3b, + 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x6e, 0x7d, 0x28, 0x29, 0x7d, + 0x47, 0x74, 0x3d, 0x5f, 0x3b, 0x4a, 0x74, 0x28, 0x65, 0x29, 0x3b, 0x74, + 0x28, 0x6e, 0x29, 0x7d, 0x29, 0x3b, 0x42, 0x74, 0x28, 0x22, 0x5f, 0x5f, + 0x65, 0x22, 0x2c, 0x28, 0x74, 0x2c, 0x6e, 0x2c, 0x65, 0x2c, 0x5f, 0x29, + 0x3d, 0x3e, 0x7b, 0x4a, 0x74, 0x28, 0x29, 0x3b, 0x47, 0x74, 0x3d, 0x76, + 0x6f, 0x69, 0x64, 0x20, 0x30, 0x3b, 0x74, 0x28, 0x6e, 0x2c, 0x65, 0x2c, + 0x5f, 0x29, 0x7d, 0x29, 0x3b, 0x42, 0x74, 0x28, 0x22, 0x64, 0x69, 0x66, + 0x66, 0x65, 0x64, 0x22, 0x2c, 0x28, 0x74, 0x2c, 0x6e, 0x29, 0x3d, 0x3e, + 0x7b, 0x4a, 0x74, 0x28, 0x29, 0x3b, 0x47, 0x74, 0x3d, 0x76, 0x6f, 0x69, + 0x64, 0x20, 0x30, 0x3b, 0x6c, 0x65, 0x74, 0x20, 0x65, 0x3b, 0x69, 0x66, + 0x28, 0x22, 0x73, 0x74, 0x72, 0x69, 0x6e, 0x67, 0x22, 0x3d, 0x3d, 0x74, + 0x79, 0x70, 0x65, 0x6f, 0x66, 0x20, 0x6e, 0x2e, 0x74, 0x79, 0x70, 0x65, + 0x26, 0x26, 0x28, 0x65, 0x3d, 0x6e, 0x2e, 0x5f, 0x5f, 0x65, 0x29, 0x29, + 0x7b, 0x6c, 0x65, 0x74, 0x20, 0x74, 0x3d, 0x6e, 0x2e, 0x5f, 0x5f, 0x6e, + 0x70, 0x2c, 0x5f, 0x3d, 0x6e, 0x2e, 0x70, 0x72, 0x6f, 0x70, 0x73, 0x3b, + 0x69, 0x66, 0x28, 0x74, 0x29, 0x7b, 0x6c, 0x65, 0x74, 0x20, 0x6e, 0x3d, + 0x65, 0x2e, 0x55, 0x3b, 0x69, 0x66, 0x28, 0x6e, 0x29, 0x66, 0x6f, 0x72, + 0x28, 0x6c, 0x65, 0x74, 0x20, 0x65, 0x20, 0x69, 0x6e, 0x20, 0x6e, 0x29, + 0x7b, 0x6c, 0x65, 0x74, 0x20, 0x5f, 0x3d, 0x6e, 0x5b, 0x65, 0x5d, 0x3b, + 0x69, 0x66, 0x28, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x21, 0x3d, 0x3d, + 0x5f, 0x26, 0x26, 0x21, 0x28, 0x65, 0x20, 0x69, 0x6e, 0x20, 0x74, 0x29, + 0x29, 0x7b, 0x5f, 0x2e, 0x64, 0x28, 0x29, 0x3b, 0x6e, 0x5b, 0x65, 0x5d, + 0x3d, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x7d, 0x7d, 0x65, 0x6c, 0x73, + 0x65, 0x7b, 0x6e, 0x3d, 0x7b, 0x7d, 0x3b, 0x65, 0x2e, 0x55, 0x3d, 0x6e, + 0x7d, 0x66, 0x6f, 0x72, 0x28, 0x6c, 0x65, 0x74, 0x20, 0x69, 0x20, 0x69, + 0x6e, 0x20, 0x74, 0x29, 0x7b, 0x6c, 0x65, 0x74, 0x20, 0x6f, 0x3d, 0x6e, + 0x5b, 0x69, 0x5d, 0x2c, 0x72, 0x3d, 0x74, 0x5b, 0x69, 0x5d, 0x3b, 0x69, + 0x66, 0x28, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x3d, 0x3d, 0x3d, 0x6f, + 0x29, 0x7b, 0x6f, 0x3d, 0x51, 0x74, 0x28, 0x65, 0x2c, 0x69, 0x2c, 0x72, + 0x2c, 0x5f, 0x29, 0x3b, 0x6e, 0x5b, 0x69, 0x5d, 0x3d, 0x6f, 0x7d, 0x65, + 0x6c, 0x73, 0x65, 0x20, 0x6f, 0x2e, 0x6f, 0x28, 0x72, 0x2c, 0x5f, 0x29, + 0x7d, 0x7d, 0x7d, 0x74, 0x28, 0x6e, 0x29, 0x7d, 0x29, 0x3b, 0x66, 0x75, + 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x51, 0x74, 0x28, 0x74, 0x2c, + 0x6e, 0x2c, 0x65, 0x2c, 0x5f, 0x29, 0x7b, 0x63, 0x6f, 0x6e, 0x73, 0x74, + 0x20, 0x69, 0x3d, 0x6e, 0x20, 0x69, 0x6e, 0x20, 0x74, 0x26, 0x26, 0x76, + 0x6f, 0x69, 0x64, 0x20, 0x30, 0x3d, 0x3d, 0x3d, 0x74, 0x2e, 0x6f, 0x77, + 0x6e, 0x65, 0x72, 0x53, 0x56, 0x47, 0x45, 0x6c, 0x65, 0x6d, 0x65, 0x6e, + 0x74, 0x2c, 0x6f, 0x3d, 0x61, 0x28, 0x65, 0x29, 0x3b, 0x72, 0x65, 0x74, + 0x75, 0x72, 0x6e, 0x7b, 0x6f, 0x3a, 0x28, 0x74, 0x2c, 0x6e, 0x29, 0x3d, + 0x3e, 0x7b, 0x6f, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3d, 0x74, 0x3b, + 0x5f, 0x3d, 0x6e, 0x7d, 0x2c, 0x64, 0x3a, 0x77, 0x28, 0x28, 0x29, 0x3d, + 0x3e, 0x7b, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x65, 0x3d, 0x6f, 0x2e, + 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3b, + 0x69, 0x66, 0x28, 0x5f, 0x5b, 0x6e, 0x5d, 0x21, 0x3d, 0x3d, 0x65, 0x29, + 0x7b, 0x5f, 0x5b, 0x6e, 0x5d, 0x3d, 0x65, 0x3b, 0x69, 0x66, 0x28, 0x69, + 0x29, 0x74, 0x5b, 0x6e, 0x5d, 0x3d, 0x65, 0x3b, 0x65, 0x6c, 0x73, 0x65, + 0x20, 0x69, 0x66, 0x28, 0x65, 0x29, 0x74, 0x2e, 0x73, 0x65, 0x74, 0x41, + 0x74, 0x74, 0x72, 0x69, 0x62, 0x75, 0x74, 0x65, 0x28, 0x6e, 0x2c, 0x65, + 0x29, 0x3b, 0x65, 0x6c, 0x73, 0x65, 0x20, 0x74, 0x2e, 0x72, 0x65, 0x6d, + 0x6f, 0x76, 0x65, 0x41, 0x74, 0x74, 0x72, 0x69, 0x62, 0x75, 0x74, 0x65, + 0x28, 0x6e, 0x29, 0x7d, 0x7d, 0x29, 0x7d, 0x7d, 0x42, 0x74, 0x28, 0x22, + 0x75, 0x6e, 0x6d, 0x6f, 0x75, 0x6e, 0x74, 0x22, 0x2c, 0x28, 0x74, 0x2c, 0x6e, 0x29, 0x3d, 0x3e, 0x7b, 0x69, 0x66, 0x28, 0x22, 0x73, 0x74, 0x72, 0x69, 0x6e, 0x67, 0x22, 0x3d, 0x3d, 0x74, 0x79, 0x70, 0x65, 0x6f, 0x66, 0x20, 0x6e, 0x2e, 0x74, 0x79, 0x70, 0x65, 0x29, 0x7b, 0x6c, 0x65, 0x74, - 0x20, 0x74, 0x2c, 0x65, 0x3d, 0x6e, 0x2e, 0x70, 0x72, 0x6f, 0x70, 0x73, - 0x3b, 0x66, 0x6f, 0x72, 0x28, 0x6c, 0x65, 0x74, 0x20, 0x69, 0x20, 0x69, - 0x6e, 0x20, 0x65, 0x29, 0x7b, 0x69, 0x66, 0x28, 0x22, 0x63, 0x68, 0x69, - 0x6c, 0x64, 0x72, 0x65, 0x6e, 0x22, 0x3d, 0x3d, 0x3d, 0x69, 0x29, 0x63, - 0x6f, 0x6e, 0x74, 0x69, 0x6e, 0x75, 0x65, 0x3b, 0x6c, 0x65, 0x74, 0x20, - 0x5f, 0x3d, 0x65, 0x5b, 0x69, 0x5d, 0x3b, 0x69, 0x66, 0x28, 0x5f, 0x20, - 0x69, 0x6e, 0x73, 0x74, 0x61, 0x6e, 0x63, 0x65, 0x6f, 0x66, 0x20, 0x63, - 0x29, 0x7b, 0x69, 0x66, 0x28, 0x21, 0x74, 0x29, 0x6e, 0x2e, 0x5f, 0x5f, - 0x6e, 0x70, 0x3d, 0x74, 0x3d, 0x7b, 0x7d, 0x3b, 0x74, 0x5b, 0x69, 0x5d, - 0x3d, 0x5f, 0x3b, 0x65, 0x5b, 0x69, 0x5d, 0x3d, 0x5f, 0x2e, 0x70, 0x65, - 0x65, 0x6b, 0x28, 0x29, 0x7d, 0x7d, 0x7d, 0x74, 0x28, 0x6e, 0x29, 0x7d, - 0x29, 0x3b, 0x71, 0x74, 0x28, 0x22, 0x5f, 0x5f, 0x72, 0x22, 0x2c, 0x28, - 0x74, 0x2c, 0x6e, 0x29, 0x3d, 0x3e, 0x7b, 0x4a, 0x74, 0x28, 0x29, 0x3b, - 0x6c, 0x65, 0x74, 0x20, 0x65, 0x2c, 0x69, 0x3d, 0x6e, 0x2e, 0x5f, 0x5f, - 0x63, 0x3b, 0x69, 0x66, 0x28, 0x69, 0x29, 0x7b, 0x69, 0x2e, 0x5f, 0x5f, - 0x24, 0x66, 0x26, 0x3d, 0x2d, 0x32, 0x3b, 0x65, 0x3d, 0x69, 0x2e, 0x5f, - 0x5f, 0x24, 0x75, 0x3b, 0x69, 0x66, 0x28, 0x76, 0x6f, 0x69, 0x64, 0x20, - 0x30, 0x3d, 0x3d, 0x3d, 0x65, 0x29, 0x69, 0x2e, 0x5f, 0x5f, 0x24, 0x75, - 0x3d, 0x65, 0x3d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, - 0x74, 0x29, 0x7b, 0x6c, 0x65, 0x74, 0x20, 0x6e, 0x3b, 0x53, 0x28, 0x28, - 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x29, 0x7b, 0x6e, - 0x3d, 0x74, 0x68, 0x69, 0x73, 0x7d, 0x29, 0x29, 0x3b, 0x6e, 0x2e, 0x63, - 0x3d, 0x28, 0x29, 0x3d, 0x3e, 0x7b, 0x69, 0x2e, 0x5f, 0x5f, 0x24, 0x66, - 0x7c, 0x3d, 0x31, 0x3b, 0x69, 0x2e, 0x73, 0x65, 0x74, 0x53, 0x74, 0x61, - 0x74, 0x65, 0x28, 0x7b, 0x7d, 0x29, 0x7d, 0x3b, 0x72, 0x65, 0x74, 0x75, - 0x72, 0x6e, 0x20, 0x6e, 0x7d, 0x28, 0x29, 0x7d, 0x47, 0x74, 0x3d, 0x69, - 0x3b, 0x4a, 0x74, 0x28, 0x65, 0x29, 0x3b, 0x74, 0x28, 0x6e, 0x29, 0x7d, - 0x29, 0x3b, 0x71, 0x74, 0x28, 0x22, 0x5f, 0x5f, 0x65, 0x22, 0x2c, 0x28, - 0x74, 0x2c, 0x6e, 0x2c, 0x65, 0x2c, 0x69, 0x29, 0x3d, 0x3e, 0x7b, 0x4a, - 0x74, 0x28, 0x29, 0x3b, 0x47, 0x74, 0x3d, 0x76, 0x6f, 0x69, 0x64, 0x20, - 0x30, 0x3b, 0x74, 0x28, 0x6e, 0x2c, 0x65, 0x2c, 0x69, 0x29, 0x7d, 0x29, - 0x3b, 0x71, 0x74, 0x28, 0x22, 0x64, 0x69, 0x66, 0x66, 0x65, 0x64, 0x22, - 0x2c, 0x28, 0x74, 0x2c, 0x6e, 0x29, 0x3d, 0x3e, 0x7b, 0x4a, 0x74, 0x28, - 0x29, 0x3b, 0x47, 0x74, 0x3d, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x3b, - 0x6c, 0x65, 0x74, 0x20, 0x65, 0x3b, 0x69, 0x66, 0x28, 0x22, 0x73, 0x74, - 0x72, 0x69, 0x6e, 0x67, 0x22, 0x3d, 0x3d, 0x74, 0x79, 0x70, 0x65, 0x6f, - 0x66, 0x20, 0x6e, 0x2e, 0x74, 0x79, 0x70, 0x65, 0x26, 0x26, 0x28, 0x65, - 0x3d, 0x6e, 0x2e, 0x5f, 0x5f, 0x65, 0x29, 0x29, 0x7b, 0x6c, 0x65, 0x74, - 0x20, 0x74, 0x3d, 0x6e, 0x2e, 0x5f, 0x5f, 0x6e, 0x70, 0x2c, 0x69, 0x3d, - 0x6e, 0x2e, 0x70, 0x72, 0x6f, 0x70, 0x73, 0x3b, 0x69, 0x66, 0x28, 0x74, - 0x29, 0x7b, 0x6c, 0x65, 0x74, 0x20, 0x6e, 0x3d, 0x65, 0x2e, 0x55, 0x3b, - 0x69, 0x66, 0x28, 0x6e, 0x29, 0x66, 0x6f, 0x72, 0x28, 0x6c, 0x65, 0x74, - 0x20, 0x65, 0x20, 0x69, 0x6e, 0x20, 0x6e, 0x29, 0x7b, 0x6c, 0x65, 0x74, - 0x20, 0x69, 0x3d, 0x6e, 0x5b, 0x65, 0x5d, 0x3b, 0x69, 0x66, 0x28, 0x76, - 0x6f, 0x69, 0x64, 0x20, 0x30, 0x21, 0x3d, 0x3d, 0x69, 0x26, 0x26, 0x21, - 0x28, 0x65, 0x20, 0x69, 0x6e, 0x20, 0x74, 0x29, 0x29, 0x7b, 0x69, 0x2e, - 0x64, 0x28, 0x29, 0x3b, 0x6e, 0x5b, 0x65, 0x5d, 0x3d, 0x76, 0x6f, 0x69, - 0x64, 0x20, 0x30, 0x7d, 0x7d, 0x65, 0x6c, 0x73, 0x65, 0x7b, 0x6e, 0x3d, - 0x7b, 0x7d, 0x3b, 0x65, 0x2e, 0x55, 0x3d, 0x6e, 0x7d, 0x66, 0x6f, 0x72, - 0x28, 0x6c, 0x65, 0x74, 0x20, 0x5f, 0x20, 0x69, 0x6e, 0x20, 0x74, 0x29, - 0x7b, 0x6c, 0x65, 0x74, 0x20, 0x6f, 0x3d, 0x6e, 0x5b, 0x5f, 0x5d, 0x2c, - 0x72, 0x3d, 0x74, 0x5b, 0x5f, 0x5d, 0x3b, 0x69, 0x66, 0x28, 0x76, 0x6f, - 0x69, 0x64, 0x20, 0x30, 0x3d, 0x3d, 0x3d, 0x6f, 0x29, 0x7b, 0x6f, 0x3d, - 0x51, 0x74, 0x28, 0x65, 0x2c, 0x5f, 0x2c, 0x72, 0x2c, 0x69, 0x29, 0x3b, - 0x6e, 0x5b, 0x5f, 0x5d, 0x3d, 0x6f, 0x7d, 0x65, 0x6c, 0x73, 0x65, 0x20, - 0x6f, 0x2e, 0x6f, 0x28, 0x72, 0x2c, 0x69, 0x29, 0x7d, 0x7d, 0x7d, 0x74, - 0x28, 0x6e, 0x29, 0x7d, 0x29, 0x3b, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, - 0x6f, 0x6e, 0x20, 0x51, 0x74, 0x28, 0x74, 0x2c, 0x6e, 0x2c, 0x65, 0x2c, - 0x69, 0x29, 0x7b, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x5f, 0x3d, 0x6e, - 0x20, 0x69, 0x6e, 0x20, 0x74, 0x26, 0x26, 0x76, 0x6f, 0x69, 0x64, 0x20, - 0x30, 0x3d, 0x3d, 0x3d, 0x74, 0x2e, 0x6f, 0x77, 0x6e, 0x65, 0x72, 0x53, - 0x56, 0x47, 0x45, 0x6c, 0x65, 0x6d, 0x65, 0x6e, 0x74, 0x2c, 0x6f, 0x3d, - 0x68, 0x28, 0x65, 0x29, 0x3b, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x7b, - 0x6f, 0x3a, 0x28, 0x74, 0x2c, 0x6e, 0x29, 0x3d, 0x3e, 0x7b, 0x6f, 0x2e, - 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3d, 0x74, 0x3b, 0x69, 0x3d, 0x6e, 0x7d, - 0x2c, 0x64, 0x3a, 0x53, 0x28, 0x28, 0x29, 0x3d, 0x3e, 0x7b, 0x63, 0x6f, - 0x6e, 0x73, 0x74, 0x20, 0x65, 0x3d, 0x6f, 0x2e, 0x76, 0x61, 0x6c, 0x75, - 0x65, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3b, 0x69, 0x66, 0x28, 0x69, - 0x5b, 0x6e, 0x5d, 0x21, 0x3d, 0x3d, 0x65, 0x29, 0x7b, 0x69, 0x5b, 0x6e, - 0x5d, 0x3d, 0x65, 0x3b, 0x69, 0x66, 0x28, 0x5f, 0x29, 0x74, 0x5b, 0x6e, - 0x5d, 0x3d, 0x65, 0x3b, 0x65, 0x6c, 0x73, 0x65, 0x20, 0x69, 0x66, 0x28, - 0x65, 0x29, 0x74, 0x2e, 0x73, 0x65, 0x74, 0x41, 0x74, 0x74, 0x72, 0x69, - 0x62, 0x75, 0x74, 0x65, 0x28, 0x6e, 0x2c, 0x65, 0x29, 0x3b, 0x65, 0x6c, - 0x73, 0x65, 0x20, 0x74, 0x2e, 0x72, 0x65, 0x6d, 0x6f, 0x76, 0x65, 0x41, - 0x74, 0x74, 0x72, 0x69, 0x62, 0x75, 0x74, 0x65, 0x28, 0x6e, 0x29, 0x7d, - 0x7d, 0x29, 0x7d, 0x7d, 0x71, 0x74, 0x28, 0x22, 0x75, 0x6e, 0x6d, 0x6f, - 0x75, 0x6e, 0x74, 0x22, 0x2c, 0x28, 0x74, 0x2c, 0x6e, 0x29, 0x3d, 0x3e, - 0x7b, 0x69, 0x66, 0x28, 0x22, 0x73, 0x74, 0x72, 0x69, 0x6e, 0x67, 0x22, - 0x3d, 0x3d, 0x74, 0x79, 0x70, 0x65, 0x6f, 0x66, 0x20, 0x6e, 0x2e, 0x74, - 0x79, 0x70, 0x65, 0x29, 0x7b, 0x6c, 0x65, 0x74, 0x20, 0x74, 0x3d, 0x6e, - 0x2e, 0x5f, 0x5f, 0x65, 0x3b, 0x69, 0x66, 0x28, 0x74, 0x29, 0x7b, 0x63, - 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x6e, 0x3d, 0x74, 0x2e, 0x55, 0x3b, 0x69, - 0x66, 0x28, 0x6e, 0x29, 0x7b, 0x74, 0x2e, 0x55, 0x3d, 0x76, 0x6f, 0x69, - 0x64, 0x20, 0x30, 0x3b, 0x66, 0x6f, 0x72, 0x28, 0x6c, 0x65, 0x74, 0x20, - 0x74, 0x20, 0x69, 0x6e, 0x20, 0x6e, 0x29, 0x7b, 0x6c, 0x65, 0x74, 0x20, - 0x65, 0x3d, 0x6e, 0x5b, 0x74, 0x5d, 0x3b, 0x69, 0x66, 0x28, 0x65, 0x29, - 0x65, 0x2e, 0x64, 0x28, 0x29, 0x7d, 0x7d, 0x7d, 0x7d, 0x65, 0x6c, 0x73, - 0x65, 0x7b, 0x6c, 0x65, 0x74, 0x20, 0x74, 0x3d, 0x6e, 0x2e, 0x5f, 0x5f, - 0x63, 0x3b, 0x69, 0x66, 0x28, 0x74, 0x29, 0x7b, 0x63, 0x6f, 0x6e, 0x73, - 0x74, 0x20, 0x6e, 0x3d, 0x74, 0x2e, 0x5f, 0x5f, 0x24, 0x75, 0x3b, 0x69, - 0x66, 0x28, 0x6e, 0x29, 0x7b, 0x74, 0x2e, 0x5f, 0x5f, 0x24, 0x75, 0x3d, - 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x3b, 0x6e, 0x2e, 0x64, 0x28, 0x29, - 0x7d, 0x7d, 0x7d, 0x74, 0x28, 0x6e, 0x29, 0x7d, 0x29, 0x3b, 0x71, 0x74, - 0x28, 0x22, 0x5f, 0x5f, 0x68, 0x22, 0x2c, 0x28, 0x74, 0x2c, 0x6e, 0x2c, - 0x65, 0x2c, 0x69, 0x29, 0x3d, 0x3e, 0x7b, 0x69, 0x66, 0x28, 0x69, 0x3c, - 0x33, 0x7c, 0x7c, 0x39, 0x3d, 0x3d, 0x3d, 0x69, 0x29, 0x6e, 0x2e, 0x5f, - 0x5f, 0x24, 0x66, 0x7c, 0x3d, 0x32, 0x3b, 0x74, 0x28, 0x6e, 0x2c, 0x65, - 0x2c, 0x69, 0x29, 0x7d, 0x29, 0x3b, 0x49, 0x2e, 0x70, 0x72, 0x6f, 0x74, - 0x6f, 0x74, 0x79, 0x70, 0x65, 0x2e, 0x73, 0x68, 0x6f, 0x75, 0x6c, 0x64, - 0x43, 0x6f, 0x6d, 0x70, 0x6f, 0x6e, 0x65, 0x6e, 0x74, 0x55, 0x70, 0x64, - 0x61, 0x74, 0x65, 0x3d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, - 0x28, 0x74, 0x2c, 0x6e, 0x29, 0x7b, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, - 0x65, 0x3d, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x5f, 0x5f, 0x24, 0x75, 0x3b, - 0x69, 0x66, 0x28, 0x21, 0x28, 0x65, 0x26, 0x26, 0x76, 0x6f, 0x69, 0x64, - 0x20, 0x30, 0x21, 0x3d, 0x3d, 0x65, 0x2e, 0x73, 0x7c, 0x7c, 0x34, 0x26, - 0x74, 0x68, 0x69, 0x73, 0x2e, 0x5f, 0x5f, 0x24, 0x66, 0x29, 0x29, 0x72, - 0x65, 0x74, 0x75, 0x72, 0x6e, 0x21, 0x30, 0x3b, 0x69, 0x66, 0x28, 0x33, - 0x26, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x5f, 0x5f, 0x24, 0x66, 0x29, 0x72, - 0x65, 0x74, 0x75, 0x72, 0x6e, 0x21, 0x30, 0x3b, 0x66, 0x6f, 0x72, 0x28, - 0x6c, 0x65, 0x74, 0x20, 0x69, 0x20, 0x69, 0x6e, 0x20, 0x6e, 0x29, 0x72, - 0x65, 0x74, 0x75, 0x72, 0x6e, 0x21, 0x30, 0x3b, 0x66, 0x6f, 0x72, 0x28, - 0x6c, 0x65, 0x74, 0x20, 0x69, 0x20, 0x69, 0x6e, 0x20, 0x74, 0x29, 0x69, - 0x66, 0x28, 0x22, 0x5f, 0x5f, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x22, - 0x21, 0x3d, 0x3d, 0x69, 0x26, 0x26, 0x74, 0x5b, 0x69, 0x5d, 0x21, 0x3d, - 0x3d, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x70, 0x72, 0x6f, 0x70, 0x73, 0x5b, - 0x69, 0x5d, 0x29, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x21, 0x30, 0x3b, - 0x66, 0x6f, 0x72, 0x28, 0x6c, 0x65, 0x74, 0x20, 0x69, 0x20, 0x69, 0x6e, - 0x20, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x70, 0x72, 0x6f, 0x70, 0x73, 0x29, - 0x69, 0x66, 0x28, 0x21, 0x28, 0x69, 0x20, 0x69, 0x6e, 0x20, 0x74, 0x29, - 0x29, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x21, 0x30, 0x3b, 0x72, 0x65, - 0x74, 0x75, 0x72, 0x6e, 0x21, 0x31, 0x7d, 0x3b, 0x66, 0x75, 0x6e, 0x63, - 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x58, 0x74, 0x28, 0x74, 0x29, 0x7b, 0x72, - 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x44, 0x74, 0x28, 0x28, 0x29, 0x3d, - 0x3e, 0x68, 0x28, 0x74, 0x29, 0x2c, 0x5b, 0x5d, 0x29, 0x7d, 0x66, 0x75, - 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x59, 0x74, 0x28, 0x74, 0x29, - 0x7b, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x6e, 0x3d, 0x50, 0x74, 0x28, - 0x74, 0x29, 0x3b, 0x6e, 0x2e, 0x63, 0x75, 0x72, 0x72, 0x65, 0x6e, 0x74, - 0x3d, 0x74, 0x3b, 0x47, 0x74, 0x2e, 0x5f, 0x5f, 0x24, 0x66, 0x7c, 0x3d, - 0x34, 0x3b, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x44, 0x74, 0x28, - 0x28, 0x29, 0x3d, 0x3e, 0x79, 0x28, 0x28, 0x29, 0x3d, 0x3e, 0x6e, 0x2e, - 0x63, 0x75, 0x72, 0x72, 0x65, 0x6e, 0x74, 0x28, 0x29, 0x29, 0x2c, 0x5b, - 0x5d, 0x29, 0x7d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, - 0x5a, 0x74, 0x28, 0x74, 0x29, 0x7b, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, - 0x6e, 0x3d, 0x50, 0x74, 0x28, 0x74, 0x29, 0x3b, 0x6e, 0x2e, 0x63, 0x75, - 0x72, 0x72, 0x65, 0x6e, 0x74, 0x3d, 0x74, 0x3b, 0x48, 0x74, 0x28, 0x28, - 0x29, 0x3d, 0x3e, 0x53, 0x28, 0x28, 0x29, 0x3d, 0x3e, 0x6e, 0x2e, 0x63, - 0x75, 0x72, 0x72, 0x65, 0x6e, 0x74, 0x28, 0x29, 0x29, 0x2c, 0x5b, 0x5d, - 0x29, 0x7d, 0x76, 0x61, 0x72, 0x20, 0x74, 0x6e, 0x3d, 0x66, 0x75, 0x6e, - 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x74, 0x2c, 0x6e, 0x2c, 0x65, 0x2c, - 0x69, 0x29, 0x7b, 0x76, 0x61, 0x72, 0x20, 0x5f, 0x3b, 0x6e, 0x5b, 0x30, - 0x5d, 0x3d, 0x30, 0x3b, 0x66, 0x6f, 0x72, 0x28, 0x76, 0x61, 0x72, 0x20, - 0x6f, 0x3d, 0x31, 0x3b, 0x6f, 0x3c, 0x6e, 0x2e, 0x6c, 0x65, 0x6e, 0x67, - 0x74, 0x68, 0x3b, 0x6f, 0x2b, 0x2b, 0x29, 0x7b, 0x76, 0x61, 0x72, 0x20, - 0x72, 0x3d, 0x6e, 0x5b, 0x6f, 0x2b, 0x2b, 0x5d, 0x2c, 0x75, 0x3d, 0x6e, - 0x5b, 0x6f, 0x5d, 0x3f, 0x28, 0x6e, 0x5b, 0x30, 0x5d, 0x7c, 0x3d, 0x72, - 0x3f, 0x31, 0x3a, 0x32, 0x2c, 0x65, 0x5b, 0x6e, 0x5b, 0x6f, 0x2b, 0x2b, - 0x5d, 0x5d, 0x29, 0x3a, 0x6e, 0x5b, 0x2b, 0x2b, 0x6f, 0x5d, 0x3b, 0x33, - 0x3d, 0x3d, 0x3d, 0x72, 0x3f, 0x69, 0x5b, 0x30, 0x5d, 0x3d, 0x75, 0x3a, - 0x34, 0x3d, 0x3d, 0x3d, 0x72, 0x3f, 0x69, 0x5b, 0x31, 0x5d, 0x3d, 0x4f, - 0x62, 0x6a, 0x65, 0x63, 0x74, 0x2e, 0x61, 0x73, 0x73, 0x69, 0x67, 0x6e, - 0x28, 0x69, 0x5b, 0x31, 0x5d, 0x7c, 0x7c, 0x7b, 0x7d, 0x2c, 0x75, 0x29, - 0x3a, 0x35, 0x3d, 0x3d, 0x3d, 0x72, 0x3f, 0x28, 0x69, 0x5b, 0x31, 0x5d, - 0x3d, 0x69, 0x5b, 0x31, 0x5d, 0x7c, 0x7c, 0x7b, 0x7d, 0x29, 0x5b, 0x6e, - 0x5b, 0x2b, 0x2b, 0x6f, 0x5d, 0x5d, 0x3d, 0x75, 0x3a, 0x36, 0x3d, 0x3d, - 0x3d, 0x72, 0x3f, 0x69, 0x5b, 0x31, 0x5d, 0x5b, 0x6e, 0x5b, 0x2b, 0x2b, - 0x6f, 0x5d, 0x5d, 0x2b, 0x3d, 0x75, 0x2b, 0x22, 0x22, 0x3a, 0x72, 0x3f, - 0x28, 0x5f, 0x3d, 0x74, 0x2e, 0x61, 0x70, 0x70, 0x6c, 0x79, 0x28, 0x75, - 0x2c, 0x74, 0x6e, 0x28, 0x74, 0x2c, 0x75, 0x2c, 0x65, 0x2c, 0x5b, 0x22, - 0x22, 0x2c, 0x6e, 0x75, 0x6c, 0x6c, 0x5d, 0x29, 0x29, 0x2c, 0x69, 0x2e, - 0x70, 0x75, 0x73, 0x68, 0x28, 0x5f, 0x29, 0x2c, 0x75, 0x5b, 0x30, 0x5d, - 0x3f, 0x6e, 0x5b, 0x30, 0x5d, 0x7c, 0x3d, 0x32, 0x3a, 0x28, 0x6e, 0x5b, - 0x6f, 0x2d, 0x32, 0x5d, 0x3d, 0x30, 0x2c, 0x6e, 0x5b, 0x6f, 0x5d, 0x3d, - 0x5f, 0x29, 0x29, 0x3a, 0x69, 0x2e, 0x70, 0x75, 0x73, 0x68, 0x28, 0x75, - 0x29, 0x7d, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x69, 0x7d, 0x2c, - 0x6e, 0x6e, 0x3d, 0x6e, 0x65, 0x77, 0x20, 0x4d, 0x61, 0x70, 0x3b, 0x66, - 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x65, 0x6e, 0x28, 0x74, - 0x29, 0x7b, 0x76, 0x61, 0x72, 0x20, 0x6e, 0x3d, 0x6e, 0x6e, 0x2e, 0x67, - 0x65, 0x74, 0x28, 0x74, 0x68, 0x69, 0x73, 0x29, 0x3b, 0x72, 0x65, 0x74, - 0x75, 0x72, 0x6e, 0x20, 0x6e, 0x7c, 0x7c, 0x28, 0x6e, 0x3d, 0x6e, 0x65, - 0x77, 0x20, 0x4d, 0x61, 0x70, 0x2c, 0x6e, 0x6e, 0x2e, 0x73, 0x65, 0x74, - 0x28, 0x74, 0x68, 0x69, 0x73, 0x2c, 0x6e, 0x29, 0x29, 0x2c, 0x28, 0x6e, - 0x3d, 0x74, 0x6e, 0x28, 0x74, 0x68, 0x69, 0x73, 0x2c, 0x6e, 0x2e, 0x67, - 0x65, 0x74, 0x28, 0x74, 0x29, 0x7c, 0x7c, 0x28, 0x6e, 0x2e, 0x73, 0x65, - 0x74, 0x28, 0x74, 0x2c, 0x6e, 0x3d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, - 0x6f, 0x6e, 0x28, 0x74, 0x29, 0x7b, 0x66, 0x6f, 0x72, 0x28, 0x76, 0x61, - 0x72, 0x20, 0x6e, 0x2c, 0x65, 0x2c, 0x69, 0x3d, 0x31, 0x2c, 0x5f, 0x3d, - 0x22, 0x22, 0x2c, 0x6f, 0x3d, 0x22, 0x22, 0x2c, 0x72, 0x3d, 0x5b, 0x30, - 0x5d, 0x2c, 0x75, 0x3d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, - 0x28, 0x74, 0x29, 0x7b, 0x31, 0x3d, 0x3d, 0x3d, 0x69, 0x26, 0x26, 0x28, - 0x74, 0x7c, 0x7c, 0x28, 0x5f, 0x3d, 0x5f, 0x2e, 0x72, 0x65, 0x70, 0x6c, - 0x61, 0x63, 0x65, 0x28, 0x2f, 0x5e, 0x5c, 0x73, 0x2a, 0x5c, 0x6e, 0x5c, - 0x73, 0x2a, 0x7c, 0x5c, 0x73, 0x2a, 0x5c, 0x6e, 0x5c, 0x73, 0x2a, 0x24, - 0x2f, 0x67, 0x2c, 0x22, 0x22, 0x29, 0x29, 0x29, 0x3f, 0x72, 0x2e, 0x70, - 0x75, 0x73, 0x68, 0x28, 0x30, 0x2c, 0x74, 0x2c, 0x5f, 0x29, 0x3a, 0x33, - 0x3d, 0x3d, 0x3d, 0x69, 0x26, 0x26, 0x28, 0x74, 0x7c, 0x7c, 0x5f, 0x29, - 0x3f, 0x28, 0x72, 0x2e, 0x70, 0x75, 0x73, 0x68, 0x28, 0x33, 0x2c, 0x74, - 0x2c, 0x5f, 0x29, 0x2c, 0x69, 0x3d, 0x32, 0x29, 0x3a, 0x32, 0x3d, 0x3d, - 0x3d, 0x69, 0x26, 0x26, 0x22, 0x2e, 0x2e, 0x2e, 0x22, 0x3d, 0x3d, 0x3d, - 0x5f, 0x26, 0x26, 0x74, 0x3f, 0x72, 0x2e, 0x70, 0x75, 0x73, 0x68, 0x28, - 0x34, 0x2c, 0x74, 0x2c, 0x30, 0x29, 0x3a, 0x32, 0x3d, 0x3d, 0x3d, 0x69, - 0x26, 0x26, 0x5f, 0x26, 0x26, 0x21, 0x74, 0x3f, 0x72, 0x2e, 0x70, 0x75, - 0x73, 0x68, 0x28, 0x35, 0x2c, 0x30, 0x2c, 0x21, 0x30, 0x2c, 0x5f, 0x29, - 0x3a, 0x69, 0x3e, 0x3d, 0x35, 0x26, 0x26, 0x28, 0x28, 0x5f, 0x7c, 0x7c, - 0x21, 0x74, 0x26, 0x26, 0x35, 0x3d, 0x3d, 0x3d, 0x69, 0x29, 0x26, 0x26, - 0x28, 0x72, 0x2e, 0x70, 0x75, 0x73, 0x68, 0x28, 0x69, 0x2c, 0x30, 0x2c, - 0x5f, 0x2c, 0x65, 0x29, 0x2c, 0x69, 0x3d, 0x36, 0x29, 0x2c, 0x74, 0x26, - 0x26, 0x28, 0x72, 0x2e, 0x70, 0x75, 0x73, 0x68, 0x28, 0x69, 0x2c, 0x74, - 0x2c, 0x30, 0x2c, 0x65, 0x29, 0x2c, 0x69, 0x3d, 0x36, 0x29, 0x29, 0x2c, - 0x5f, 0x3d, 0x22, 0x22, 0x7d, 0x2c, 0x66, 0x3d, 0x30, 0x3b, 0x66, 0x3c, - 0x74, 0x2e, 0x6c, 0x65, 0x6e, 0x67, 0x74, 0x68, 0x3b, 0x66, 0x2b, 0x2b, - 0x29, 0x7b, 0x66, 0x26, 0x26, 0x28, 0x31, 0x3d, 0x3d, 0x3d, 0x69, 0x26, - 0x26, 0x75, 0x28, 0x29, 0x2c, 0x75, 0x28, 0x66, 0x29, 0x29, 0x3b, 0x66, - 0x6f, 0x72, 0x28, 0x76, 0x61, 0x72, 0x20, 0x6c, 0x3d, 0x30, 0x3b, 0x6c, - 0x3c, 0x74, 0x5b, 0x66, 0x5d, 0x2e, 0x6c, 0x65, 0x6e, 0x67, 0x74, 0x68, - 0x3b, 0x6c, 0x2b, 0x2b, 0x29, 0x6e, 0x3d, 0x74, 0x5b, 0x66, 0x5d, 0x5b, - 0x6c, 0x5d, 0x2c, 0x31, 0x3d, 0x3d, 0x3d, 0x69, 0x3f, 0x22, 0x3c, 0x22, - 0x3d, 0x3d, 0x3d, 0x6e, 0x3f, 0x28, 0x75, 0x28, 0x29, 0x2c, 0x72, 0x3d, - 0x5b, 0x72, 0x5d, 0x2c, 0x69, 0x3d, 0x33, 0x29, 0x3a, 0x5f, 0x2b, 0x3d, - 0x6e, 0x3a, 0x34, 0x3d, 0x3d, 0x3d, 0x69, 0x3f, 0x22, 0x2d, 0x2d, 0x22, - 0x3d, 0x3d, 0x3d, 0x5f, 0x26, 0x26, 0x22, 0x3e, 0x22, 0x3d, 0x3d, 0x3d, - 0x6e, 0x3f, 0x28, 0x69, 0x3d, 0x31, 0x2c, 0x5f, 0x3d, 0x22, 0x22, 0x29, - 0x3a, 0x5f, 0x3d, 0x6e, 0x2b, 0x5f, 0x5b, 0x30, 0x5d, 0x3a, 0x6f, 0x3f, - 0x6e, 0x3d, 0x3d, 0x3d, 0x6f, 0x3f, 0x6f, 0x3d, 0x22, 0x22, 0x3a, 0x5f, - 0x2b, 0x3d, 0x6e, 0x3a, 0x27, 0x22, 0x27, 0x3d, 0x3d, 0x3d, 0x6e, 0x7c, - 0x7c, 0x22, 0x27, 0x22, 0x3d, 0x3d, 0x3d, 0x6e, 0x3f, 0x6f, 0x3d, 0x6e, - 0x3a, 0x22, 0x3e, 0x22, 0x3d, 0x3d, 0x3d, 0x6e, 0x3f, 0x28, 0x75, 0x28, - 0x29, 0x2c, 0x69, 0x3d, 0x31, 0x29, 0x3a, 0x69, 0x26, 0x26, 0x28, 0x22, - 0x3d, 0x22, 0x3d, 0x3d, 0x3d, 0x6e, 0x3f, 0x28, 0x69, 0x3d, 0x35, 0x2c, - 0x65, 0x3d, 0x5f, 0x2c, 0x5f, 0x3d, 0x22, 0x22, 0x29, 0x3a, 0x22, 0x2f, - 0x22, 0x3d, 0x3d, 0x3d, 0x6e, 0x26, 0x26, 0x28, 0x69, 0x3c, 0x35, 0x7c, - 0x7c, 0x22, 0x3e, 0x22, 0x3d, 0x3d, 0x3d, 0x74, 0x5b, 0x66, 0x5d, 0x5b, - 0x6c, 0x2b, 0x31, 0x5d, 0x29, 0x3f, 0x28, 0x75, 0x28, 0x29, 0x2c, 0x33, - 0x3d, 0x3d, 0x3d, 0x69, 0x26, 0x26, 0x28, 0x72, 0x3d, 0x72, 0x5b, 0x30, - 0x5d, 0x29, 0x2c, 0x69, 0x3d, 0x72, 0x2c, 0x28, 0x72, 0x3d, 0x72, 0x5b, - 0x30, 0x5d, 0x29, 0x2e, 0x70, 0x75, 0x73, 0x68, 0x28, 0x32, 0x2c, 0x30, - 0x2c, 0x69, 0x29, 0x2c, 0x69, 0x3d, 0x30, 0x29, 0x3a, 0x22, 0x20, 0x22, - 0x3d, 0x3d, 0x3d, 0x6e, 0x7c, 0x7c, 0x22, 0x5c, 0x74, 0x22, 0x3d, 0x3d, - 0x3d, 0x6e, 0x7c, 0x7c, 0x22, 0x5c, 0x6e, 0x22, 0x3d, 0x3d, 0x3d, 0x6e, - 0x7c, 0x7c, 0x22, 0x5c, 0x72, 0x22, 0x3d, 0x3d, 0x3d, 0x6e, 0x3f, 0x28, - 0x75, 0x28, 0x29, 0x2c, 0x69, 0x3d, 0x32, 0x29, 0x3a, 0x5f, 0x2b, 0x3d, - 0x6e, 0x29, 0x2c, 0x33, 0x3d, 0x3d, 0x3d, 0x69, 0x26, 0x26, 0x22, 0x21, - 0x2d, 0x2d, 0x22, 0x3d, 0x3d, 0x3d, 0x5f, 0x26, 0x26, 0x28, 0x69, 0x3d, - 0x34, 0x2c, 0x72, 0x3d, 0x72, 0x5b, 0x30, 0x5d, 0x29, 0x7d, 0x72, 0x65, - 0x74, 0x75, 0x72, 0x6e, 0x20, 0x75, 0x28, 0x29, 0x2c, 0x72, 0x7d, 0x28, - 0x74, 0x29, 0x29, 0x2c, 0x6e, 0x29, 0x2c, 0x61, 0x72, 0x67, 0x75, 0x6d, - 0x65, 0x6e, 0x74, 0x73, 0x2c, 0x5b, 0x5d, 0x29, 0x29, 0x2e, 0x6c, 0x65, - 0x6e, 0x67, 0x74, 0x68, 0x3e, 0x31, 0x3f, 0x6e, 0x3a, 0x6e, 0x5b, 0x30, - 0x5d, 0x7d, 0x76, 0x61, 0x72, 0x20, 0x5f, 0x6e, 0x3d, 0x65, 0x6e, 0x2e, - 0x62, 0x69, 0x6e, 0x64, 0x28, 0x57, 0x29, 0x3b, 0x65, 0x78, 0x70, 0x6f, - 0x72, 0x74, 0x7b, 0x49, 0x20, 0x61, 0x73, 0x20, 0x43, 0x6f, 0x6d, 0x70, - 0x6f, 0x6e, 0x65, 0x6e, 0x74, 0x2c, 0x52, 0x20, 0x61, 0x73, 0x20, 0x46, - 0x72, 0x61, 0x67, 0x6d, 0x65, 0x6e, 0x74, 0x2c, 0x63, 0x20, 0x61, 0x73, - 0x20, 0x53, 0x69, 0x67, 0x6e, 0x61, 0x6c, 0x2c, 0x65, 0x20, 0x61, 0x73, - 0x20, 0x62, 0x61, 0x74, 0x63, 0x68, 0x2c, 0x63, 0x74, 0x20, 0x61, 0x73, - 0x20, 0x63, 0x6c, 0x6f, 0x6e, 0x65, 0x45, 0x6c, 0x65, 0x6d, 0x65, 0x6e, - 0x74, 0x2c, 0x79, 0x20, 0x61, 0x73, 0x20, 0x63, 0x6f, 0x6d, 0x70, 0x75, - 0x74, 0x65, 0x64, 0x2c, 0x68, 0x74, 0x20, 0x61, 0x73, 0x20, 0x63, 0x72, - 0x65, 0x61, 0x74, 0x65, 0x43, 0x6f, 0x6e, 0x74, 0x65, 0x78, 0x74, 0x2c, - 0x57, 0x20, 0x61, 0x73, 0x20, 0x63, 0x72, 0x65, 0x61, 0x74, 0x65, 0x45, - 0x6c, 0x65, 0x6d, 0x65, 0x6e, 0x74, 0x2c, 0x4c, 0x20, 0x61, 0x73, 0x20, - 0x63, 0x72, 0x65, 0x61, 0x74, 0x65, 0x52, 0x65, 0x66, 0x2c, 0x53, 0x20, - 0x61, 0x73, 0x20, 0x65, 0x66, 0x66, 0x65, 0x63, 0x74, 0x2c, 0x57, 0x20, - 0x61, 0x73, 0x20, 0x68, 0x2c, 0x5f, 0x6e, 0x20, 0x61, 0x73, 0x20, 0x68, - 0x74, 0x6d, 0x6c, 0x2c, 0x73, 0x74, 0x20, 0x61, 0x73, 0x20, 0x68, 0x79, - 0x64, 0x72, 0x61, 0x74, 0x65, 0x2c, 0x45, 0x20, 0x61, 0x73, 0x20, 0x69, - 0x73, 0x56, 0x61, 0x6c, 0x69, 0x64, 0x45, 0x6c, 0x65, 0x6d, 0x65, 0x6e, - 0x74, 0x2c, 0x77, 0x20, 0x61, 0x73, 0x20, 0x6f, 0x70, 0x74, 0x69, 0x6f, - 0x6e, 0x73, 0x2c, 0x6c, 0x74, 0x20, 0x61, 0x73, 0x20, 0x72, 0x65, 0x6e, - 0x64, 0x65, 0x72, 0x2c, 0x68, 0x20, 0x61, 0x73, 0x20, 0x73, 0x69, 0x67, - 0x6e, 0x61, 0x6c, 0x2c, 0x4b, 0x20, 0x61, 0x73, 0x20, 0x74, 0x6f, 0x43, - 0x68, 0x69, 0x6c, 0x64, 0x41, 0x72, 0x72, 0x61, 0x79, 0x2c, 0x72, 0x20, - 0x61, 0x73, 0x20, 0x75, 0x6e, 0x74, 0x72, 0x61, 0x63, 0x6b, 0x65, 0x64, - 0x2c, 0x54, 0x74, 0x20, 0x61, 0x73, 0x20, 0x75, 0x73, 0x65, 0x43, 0x61, - 0x6c, 0x6c, 0x62, 0x61, 0x63, 0x6b, 0x2c, 0x59, 0x74, 0x20, 0x61, 0x73, - 0x20, 0x75, 0x73, 0x65, 0x43, 0x6f, 0x6d, 0x70, 0x75, 0x74, 0x65, 0x64, - 0x2c, 0x56, 0x74, 0x20, 0x61, 0x73, 0x20, 0x75, 0x73, 0x65, 0x43, 0x6f, - 0x6e, 0x74, 0x65, 0x78, 0x74, 0x2c, 0x41, 0x74, 0x20, 0x61, 0x73, 0x20, - 0x75, 0x73, 0x65, 0x44, 0x65, 0x62, 0x75, 0x67, 0x56, 0x61, 0x6c, 0x75, - 0x65, 0x2c, 0x48, 0x74, 0x20, 0x61, 0x73, 0x20, 0x75, 0x73, 0x65, 0x45, - 0x66, 0x66, 0x65, 0x63, 0x74, 0x2c, 0x46, 0x74, 0x20, 0x61, 0x73, 0x20, - 0x75, 0x73, 0x65, 0x45, 0x72, 0x72, 0x6f, 0x72, 0x42, 0x6f, 0x75, 0x6e, - 0x64, 0x61, 0x72, 0x79, 0x2c, 0x4d, 0x74, 0x20, 0x61, 0x73, 0x20, 0x75, - 0x73, 0x65, 0x49, 0x64, 0x2c, 0x24, 0x74, 0x20, 0x61, 0x73, 0x20, 0x75, - 0x73, 0x65, 0x49, 0x6d, 0x70, 0x65, 0x72, 0x61, 0x74, 0x69, 0x76, 0x65, - 0x48, 0x61, 0x6e, 0x64, 0x6c, 0x65, 0x2c, 0x4e, 0x74, 0x20, 0x61, 0x73, - 0x20, 0x75, 0x73, 0x65, 0x4c, 0x61, 0x79, 0x6f, 0x75, 0x74, 0x45, 0x66, - 0x66, 0x65, 0x63, 0x74, 0x2c, 0x44, 0x74, 0x20, 0x61, 0x73, 0x20, 0x75, - 0x73, 0x65, 0x4d, 0x65, 0x6d, 0x6f, 0x2c, 0x55, 0x74, 0x20, 0x61, 0x73, - 0x20, 0x75, 0x73, 0x65, 0x52, 0x65, 0x64, 0x75, 0x63, 0x65, 0x72, 0x2c, - 0x50, 0x74, 0x20, 0x61, 0x73, 0x20, 0x75, 0x73, 0x65, 0x52, 0x65, 0x66, - 0x2c, 0x58, 0x74, 0x20, 0x61, 0x73, 0x20, 0x75, 0x73, 0x65, 0x53, 0x69, - 0x67, 0x6e, 0x61, 0x6c, 0x2c, 0x5a, 0x74, 0x20, 0x61, 0x73, 0x20, 0x75, - 0x73, 0x65, 0x53, 0x69, 0x67, 0x6e, 0x61, 0x6c, 0x45, 0x66, 0x66, 0x65, - 0x63, 0x74, 0x2c, 0x45, 0x74, 0x20, 0x61, 0x73, 0x20, 0x75, 0x73, 0x65, - 0x53, 0x74, 0x61, 0x74, 0x65, 0x7d, 0x3b, 0x0a + 0x20, 0x74, 0x3d, 0x6e, 0x2e, 0x5f, 0x5f, 0x65, 0x3b, 0x69, 0x66, 0x28, + 0x74, 0x29, 0x7b, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x6e, 0x3d, 0x74, + 0x2e, 0x55, 0x3b, 0x69, 0x66, 0x28, 0x6e, 0x29, 0x7b, 0x74, 0x2e, 0x55, + 0x3d, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x3b, 0x66, 0x6f, 0x72, 0x28, + 0x6c, 0x65, 0x74, 0x20, 0x74, 0x20, 0x69, 0x6e, 0x20, 0x6e, 0x29, 0x7b, + 0x6c, 0x65, 0x74, 0x20, 0x65, 0x3d, 0x6e, 0x5b, 0x74, 0x5d, 0x3b, 0x69, + 0x66, 0x28, 0x65, 0x29, 0x65, 0x2e, 0x64, 0x28, 0x29, 0x7d, 0x7d, 0x7d, + 0x7d, 0x65, 0x6c, 0x73, 0x65, 0x7b, 0x6c, 0x65, 0x74, 0x20, 0x74, 0x3d, + 0x6e, 0x2e, 0x5f, 0x5f, 0x63, 0x3b, 0x69, 0x66, 0x28, 0x74, 0x29, 0x7b, + 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x6e, 0x3d, 0x74, 0x2e, 0x5f, 0x5f, + 0x24, 0x75, 0x3b, 0x69, 0x66, 0x28, 0x6e, 0x29, 0x7b, 0x74, 0x2e, 0x5f, + 0x5f, 0x24, 0x75, 0x3d, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x3b, 0x6e, + 0x2e, 0x64, 0x28, 0x29, 0x7d, 0x7d, 0x7d, 0x74, 0x28, 0x6e, 0x29, 0x7d, + 0x29, 0x3b, 0x42, 0x74, 0x28, 0x22, 0x5f, 0x5f, 0x68, 0x22, 0x2c, 0x28, + 0x74, 0x2c, 0x6e, 0x2c, 0x65, 0x2c, 0x5f, 0x29, 0x3d, 0x3e, 0x7b, 0x69, + 0x66, 0x28, 0x5f, 0x3c, 0x33, 0x7c, 0x7c, 0x39, 0x3d, 0x3d, 0x3d, 0x5f, + 0x29, 0x6e, 0x2e, 0x5f, 0x5f, 0x24, 0x66, 0x7c, 0x3d, 0x32, 0x3b, 0x74, + 0x28, 0x6e, 0x2c, 0x65, 0x2c, 0x5f, 0x29, 0x7d, 0x29, 0x3b, 0x49, 0x2e, + 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x74, 0x79, 0x70, 0x65, 0x2e, 0x73, 0x68, + 0x6f, 0x75, 0x6c, 0x64, 0x43, 0x6f, 0x6d, 0x70, 0x6f, 0x6e, 0x65, 0x6e, + 0x74, 0x55, 0x70, 0x64, 0x61, 0x74, 0x65, 0x3d, 0x66, 0x75, 0x6e, 0x63, + 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x74, 0x2c, 0x6e, 0x29, 0x7b, 0x63, 0x6f, + 0x6e, 0x73, 0x74, 0x20, 0x65, 0x3d, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x5f, + 0x5f, 0x24, 0x75, 0x3b, 0x69, 0x66, 0x28, 0x21, 0x28, 0x65, 0x26, 0x26, + 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x21, 0x3d, 0x3d, 0x65, 0x2e, 0x73, + 0x7c, 0x7c, 0x34, 0x26, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x5f, 0x5f, 0x24, + 0x66, 0x29, 0x29, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x21, 0x30, 0x3b, + 0x69, 0x66, 0x28, 0x33, 0x26, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x5f, 0x5f, + 0x24, 0x66, 0x29, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x21, 0x30, 0x3b, + 0x66, 0x6f, 0x72, 0x28, 0x6c, 0x65, 0x74, 0x20, 0x5f, 0x20, 0x69, 0x6e, + 0x20, 0x6e, 0x29, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x21, 0x30, 0x3b, + 0x66, 0x6f, 0x72, 0x28, 0x6c, 0x65, 0x74, 0x20, 0x5f, 0x20, 0x69, 0x6e, + 0x20, 0x74, 0x29, 0x69, 0x66, 0x28, 0x22, 0x5f, 0x5f, 0x73, 0x6f, 0x75, + 0x72, 0x63, 0x65, 0x22, 0x21, 0x3d, 0x3d, 0x5f, 0x26, 0x26, 0x74, 0x5b, + 0x5f, 0x5d, 0x21, 0x3d, 0x3d, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x70, 0x72, + 0x6f, 0x70, 0x73, 0x5b, 0x5f, 0x5d, 0x29, 0x72, 0x65, 0x74, 0x75, 0x72, + 0x6e, 0x21, 0x30, 0x3b, 0x66, 0x6f, 0x72, 0x28, 0x6c, 0x65, 0x74, 0x20, + 0x5f, 0x20, 0x69, 0x6e, 0x20, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x70, 0x72, + 0x6f, 0x70, 0x73, 0x29, 0x69, 0x66, 0x28, 0x21, 0x28, 0x5f, 0x20, 0x69, + 0x6e, 0x20, 0x74, 0x29, 0x29, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x21, + 0x30, 0x3b, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x21, 0x31, 0x7d, 0x3b, + 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x58, 0x74, 0x28, + 0x74, 0x29, 0x7b, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x44, 0x74, + 0x28, 0x28, 0x29, 0x3d, 0x3e, 0x61, 0x28, 0x74, 0x29, 0x2c, 0x5b, 0x5d, + 0x29, 0x7d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x59, + 0x74, 0x28, 0x74, 0x29, 0x7b, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x6e, + 0x3d, 0x4e, 0x74, 0x28, 0x74, 0x29, 0x3b, 0x6e, 0x2e, 0x63, 0x75, 0x72, + 0x72, 0x65, 0x6e, 0x74, 0x3d, 0x74, 0x3b, 0x47, 0x74, 0x2e, 0x5f, 0x5f, + 0x24, 0x66, 0x7c, 0x3d, 0x34, 0x3b, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, + 0x20, 0x44, 0x74, 0x28, 0x28, 0x29, 0x3d, 0x3e, 0x6d, 0x28, 0x28, 0x29, + 0x3d, 0x3e, 0x6e, 0x2e, 0x63, 0x75, 0x72, 0x72, 0x65, 0x6e, 0x74, 0x28, + 0x29, 0x29, 0x2c, 0x5b, 0x5d, 0x29, 0x7d, 0x66, 0x75, 0x6e, 0x63, 0x74, + 0x69, 0x6f, 0x6e, 0x20, 0x5a, 0x74, 0x28, 0x74, 0x29, 0x7b, 0x63, 0x6f, + 0x6e, 0x73, 0x74, 0x20, 0x6e, 0x3d, 0x4e, 0x74, 0x28, 0x74, 0x29, 0x3b, + 0x6e, 0x2e, 0x63, 0x75, 0x72, 0x72, 0x65, 0x6e, 0x74, 0x3d, 0x74, 0x3b, + 0x48, 0x74, 0x28, 0x28, 0x29, 0x3d, 0x3e, 0x77, 0x28, 0x28, 0x29, 0x3d, + 0x3e, 0x6e, 0x2e, 0x63, 0x75, 0x72, 0x72, 0x65, 0x6e, 0x74, 0x28, 0x29, + 0x29, 0x2c, 0x5b, 0x5d, 0x29, 0x7d, 0x76, 0x61, 0x72, 0x20, 0x74, 0x6e, + 0x3d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x74, 0x2c, + 0x6e, 0x2c, 0x65, 0x2c, 0x5f, 0x29, 0x7b, 0x76, 0x61, 0x72, 0x20, 0x69, + 0x3b, 0x6e, 0x5b, 0x30, 0x5d, 0x3d, 0x30, 0x3b, 0x66, 0x6f, 0x72, 0x28, + 0x76, 0x61, 0x72, 0x20, 0x6f, 0x3d, 0x31, 0x3b, 0x6f, 0x3c, 0x6e, 0x2e, + 0x6c, 0x65, 0x6e, 0x67, 0x74, 0x68, 0x3b, 0x6f, 0x2b, 0x2b, 0x29, 0x7b, + 0x76, 0x61, 0x72, 0x20, 0x72, 0x3d, 0x6e, 0x5b, 0x6f, 0x2b, 0x2b, 0x5d, + 0x2c, 0x75, 0x3d, 0x6e, 0x5b, 0x6f, 0x5d, 0x3f, 0x28, 0x6e, 0x5b, 0x30, + 0x5d, 0x7c, 0x3d, 0x72, 0x3f, 0x31, 0x3a, 0x32, 0x2c, 0x65, 0x5b, 0x6e, + 0x5b, 0x6f, 0x2b, 0x2b, 0x5d, 0x5d, 0x29, 0x3a, 0x6e, 0x5b, 0x2b, 0x2b, + 0x6f, 0x5d, 0x3b, 0x33, 0x3d, 0x3d, 0x3d, 0x72, 0x3f, 0x5f, 0x5b, 0x30, + 0x5d, 0x3d, 0x75, 0x3a, 0x34, 0x3d, 0x3d, 0x3d, 0x72, 0x3f, 0x5f, 0x5b, + 0x31, 0x5d, 0x3d, 0x4f, 0x62, 0x6a, 0x65, 0x63, 0x74, 0x2e, 0x61, 0x73, + 0x73, 0x69, 0x67, 0x6e, 0x28, 0x5f, 0x5b, 0x31, 0x5d, 0x7c, 0x7c, 0x7b, + 0x7d, 0x2c, 0x75, 0x29, 0x3a, 0x35, 0x3d, 0x3d, 0x3d, 0x72, 0x3f, 0x28, + 0x5f, 0x5b, 0x31, 0x5d, 0x3d, 0x5f, 0x5b, 0x31, 0x5d, 0x7c, 0x7c, 0x7b, + 0x7d, 0x29, 0x5b, 0x6e, 0x5b, 0x2b, 0x2b, 0x6f, 0x5d, 0x5d, 0x3d, 0x75, + 0x3a, 0x36, 0x3d, 0x3d, 0x3d, 0x72, 0x3f, 0x5f, 0x5b, 0x31, 0x5d, 0x5b, + 0x6e, 0x5b, 0x2b, 0x2b, 0x6f, 0x5d, 0x5d, 0x2b, 0x3d, 0x75, 0x2b, 0x22, + 0x22, 0x3a, 0x72, 0x3f, 0x28, 0x69, 0x3d, 0x74, 0x2e, 0x61, 0x70, 0x70, + 0x6c, 0x79, 0x28, 0x75, 0x2c, 0x74, 0x6e, 0x28, 0x74, 0x2c, 0x75, 0x2c, + 0x65, 0x2c, 0x5b, 0x22, 0x22, 0x2c, 0x6e, 0x75, 0x6c, 0x6c, 0x5d, 0x29, + 0x29, 0x2c, 0x5f, 0x2e, 0x70, 0x75, 0x73, 0x68, 0x28, 0x69, 0x29, 0x2c, + 0x75, 0x5b, 0x30, 0x5d, 0x3f, 0x6e, 0x5b, 0x30, 0x5d, 0x7c, 0x3d, 0x32, + 0x3a, 0x28, 0x6e, 0x5b, 0x6f, 0x2d, 0x32, 0x5d, 0x3d, 0x30, 0x2c, 0x6e, + 0x5b, 0x6f, 0x5d, 0x3d, 0x69, 0x29, 0x29, 0x3a, 0x5f, 0x2e, 0x70, 0x75, + 0x73, 0x68, 0x28, 0x75, 0x29, 0x7d, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, + 0x20, 0x5f, 0x7d, 0x2c, 0x6e, 0x6e, 0x3d, 0x6e, 0x65, 0x77, 0x20, 0x4d, + 0x61, 0x70, 0x3b, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, + 0x65, 0x6e, 0x28, 0x74, 0x29, 0x7b, 0x76, 0x61, 0x72, 0x20, 0x6e, 0x3d, + 0x6e, 0x6e, 0x2e, 0x67, 0x65, 0x74, 0x28, 0x74, 0x68, 0x69, 0x73, 0x29, + 0x3b, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x6e, 0x7c, 0x7c, 0x28, + 0x6e, 0x3d, 0x6e, 0x65, 0x77, 0x20, 0x4d, 0x61, 0x70, 0x2c, 0x6e, 0x6e, + 0x2e, 0x73, 0x65, 0x74, 0x28, 0x74, 0x68, 0x69, 0x73, 0x2c, 0x6e, 0x29, + 0x29, 0x2c, 0x28, 0x6e, 0x3d, 0x74, 0x6e, 0x28, 0x74, 0x68, 0x69, 0x73, + 0x2c, 0x6e, 0x2e, 0x67, 0x65, 0x74, 0x28, 0x74, 0x29, 0x7c, 0x7c, 0x28, + 0x6e, 0x2e, 0x73, 0x65, 0x74, 0x28, 0x74, 0x2c, 0x6e, 0x3d, 0x66, 0x75, + 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x74, 0x29, 0x7b, 0x66, 0x6f, + 0x72, 0x28, 0x76, 0x61, 0x72, 0x20, 0x6e, 0x2c, 0x65, 0x2c, 0x5f, 0x3d, + 0x31, 0x2c, 0x69, 0x3d, 0x22, 0x22, 0x2c, 0x6f, 0x3d, 0x22, 0x22, 0x2c, + 0x72, 0x3d, 0x5b, 0x30, 0x5d, 0x2c, 0x75, 0x3d, 0x66, 0x75, 0x6e, 0x63, + 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x74, 0x29, 0x7b, 0x31, 0x3d, 0x3d, 0x3d, + 0x5f, 0x26, 0x26, 0x28, 0x74, 0x7c, 0x7c, 0x28, 0x69, 0x3d, 0x69, 0x2e, + 0x72, 0x65, 0x70, 0x6c, 0x61, 0x63, 0x65, 0x28, 0x2f, 0x5e, 0x5c, 0x73, + 0x2a, 0x5c, 0x6e, 0x5c, 0x73, 0x2a, 0x7c, 0x5c, 0x73, 0x2a, 0x5c, 0x6e, + 0x5c, 0x73, 0x2a, 0x24, 0x2f, 0x67, 0x2c, 0x22, 0x22, 0x29, 0x29, 0x29, + 0x3f, 0x72, 0x2e, 0x70, 0x75, 0x73, 0x68, 0x28, 0x30, 0x2c, 0x74, 0x2c, + 0x69, 0x29, 0x3a, 0x33, 0x3d, 0x3d, 0x3d, 0x5f, 0x26, 0x26, 0x28, 0x74, + 0x7c, 0x7c, 0x69, 0x29, 0x3f, 0x28, 0x72, 0x2e, 0x70, 0x75, 0x73, 0x68, + 0x28, 0x33, 0x2c, 0x74, 0x2c, 0x69, 0x29, 0x2c, 0x5f, 0x3d, 0x32, 0x29, + 0x3a, 0x32, 0x3d, 0x3d, 0x3d, 0x5f, 0x26, 0x26, 0x22, 0x2e, 0x2e, 0x2e, + 0x22, 0x3d, 0x3d, 0x3d, 0x69, 0x26, 0x26, 0x74, 0x3f, 0x72, 0x2e, 0x70, + 0x75, 0x73, 0x68, 0x28, 0x34, 0x2c, 0x74, 0x2c, 0x30, 0x29, 0x3a, 0x32, + 0x3d, 0x3d, 0x3d, 0x5f, 0x26, 0x26, 0x69, 0x26, 0x26, 0x21, 0x74, 0x3f, + 0x72, 0x2e, 0x70, 0x75, 0x73, 0x68, 0x28, 0x35, 0x2c, 0x30, 0x2c, 0x21, + 0x30, 0x2c, 0x69, 0x29, 0x3a, 0x5f, 0x3e, 0x3d, 0x35, 0x26, 0x26, 0x28, + 0x28, 0x69, 0x7c, 0x7c, 0x21, 0x74, 0x26, 0x26, 0x35, 0x3d, 0x3d, 0x3d, + 0x5f, 0x29, 0x26, 0x26, 0x28, 0x72, 0x2e, 0x70, 0x75, 0x73, 0x68, 0x28, + 0x5f, 0x2c, 0x30, 0x2c, 0x69, 0x2c, 0x65, 0x29, 0x2c, 0x5f, 0x3d, 0x36, + 0x29, 0x2c, 0x74, 0x26, 0x26, 0x28, 0x72, 0x2e, 0x70, 0x75, 0x73, 0x68, + 0x28, 0x5f, 0x2c, 0x74, 0x2c, 0x30, 0x2c, 0x65, 0x29, 0x2c, 0x5f, 0x3d, + 0x36, 0x29, 0x29, 0x2c, 0x69, 0x3d, 0x22, 0x22, 0x7d, 0x2c, 0x66, 0x3d, + 0x30, 0x3b, 0x66, 0x3c, 0x74, 0x2e, 0x6c, 0x65, 0x6e, 0x67, 0x74, 0x68, + 0x3b, 0x66, 0x2b, 0x2b, 0x29, 0x7b, 0x66, 0x26, 0x26, 0x28, 0x31, 0x3d, + 0x3d, 0x3d, 0x5f, 0x26, 0x26, 0x75, 0x28, 0x29, 0x2c, 0x75, 0x28, 0x66, + 0x29, 0x29, 0x3b, 0x66, 0x6f, 0x72, 0x28, 0x76, 0x61, 0x72, 0x20, 0x73, + 0x3d, 0x30, 0x3b, 0x73, 0x3c, 0x74, 0x5b, 0x66, 0x5d, 0x2e, 0x6c, 0x65, + 0x6e, 0x67, 0x74, 0x68, 0x3b, 0x73, 0x2b, 0x2b, 0x29, 0x6e, 0x3d, 0x74, + 0x5b, 0x66, 0x5d, 0x5b, 0x73, 0x5d, 0x2c, 0x31, 0x3d, 0x3d, 0x3d, 0x5f, + 0x3f, 0x22, 0x3c, 0x22, 0x3d, 0x3d, 0x3d, 0x6e, 0x3f, 0x28, 0x75, 0x28, + 0x29, 0x2c, 0x72, 0x3d, 0x5b, 0x72, 0x5d, 0x2c, 0x5f, 0x3d, 0x33, 0x29, + 0x3a, 0x69, 0x2b, 0x3d, 0x6e, 0x3a, 0x34, 0x3d, 0x3d, 0x3d, 0x5f, 0x3f, + 0x22, 0x2d, 0x2d, 0x22, 0x3d, 0x3d, 0x3d, 0x69, 0x26, 0x26, 0x22, 0x3e, + 0x22, 0x3d, 0x3d, 0x3d, 0x6e, 0x3f, 0x28, 0x5f, 0x3d, 0x31, 0x2c, 0x69, + 0x3d, 0x22, 0x22, 0x29, 0x3a, 0x69, 0x3d, 0x6e, 0x2b, 0x69, 0x5b, 0x30, + 0x5d, 0x3a, 0x6f, 0x3f, 0x6e, 0x3d, 0x3d, 0x3d, 0x6f, 0x3f, 0x6f, 0x3d, + 0x22, 0x22, 0x3a, 0x69, 0x2b, 0x3d, 0x6e, 0x3a, 0x27, 0x22, 0x27, 0x3d, + 0x3d, 0x3d, 0x6e, 0x7c, 0x7c, 0x22, 0x27, 0x22, 0x3d, 0x3d, 0x3d, 0x6e, + 0x3f, 0x6f, 0x3d, 0x6e, 0x3a, 0x22, 0x3e, 0x22, 0x3d, 0x3d, 0x3d, 0x6e, + 0x3f, 0x28, 0x75, 0x28, 0x29, 0x2c, 0x5f, 0x3d, 0x31, 0x29, 0x3a, 0x5f, + 0x26, 0x26, 0x28, 0x22, 0x3d, 0x22, 0x3d, 0x3d, 0x3d, 0x6e, 0x3f, 0x28, + 0x5f, 0x3d, 0x35, 0x2c, 0x65, 0x3d, 0x69, 0x2c, 0x69, 0x3d, 0x22, 0x22, + 0x29, 0x3a, 0x22, 0x2f, 0x22, 0x3d, 0x3d, 0x3d, 0x6e, 0x26, 0x26, 0x28, + 0x5f, 0x3c, 0x35, 0x7c, 0x7c, 0x22, 0x3e, 0x22, 0x3d, 0x3d, 0x3d, 0x74, + 0x5b, 0x66, 0x5d, 0x5b, 0x73, 0x2b, 0x31, 0x5d, 0x29, 0x3f, 0x28, 0x75, + 0x28, 0x29, 0x2c, 0x33, 0x3d, 0x3d, 0x3d, 0x5f, 0x26, 0x26, 0x28, 0x72, + 0x3d, 0x72, 0x5b, 0x30, 0x5d, 0x29, 0x2c, 0x5f, 0x3d, 0x72, 0x2c, 0x28, + 0x72, 0x3d, 0x72, 0x5b, 0x30, 0x5d, 0x29, 0x2e, 0x70, 0x75, 0x73, 0x68, + 0x28, 0x32, 0x2c, 0x30, 0x2c, 0x5f, 0x29, 0x2c, 0x5f, 0x3d, 0x30, 0x29, + 0x3a, 0x22, 0x20, 0x22, 0x3d, 0x3d, 0x3d, 0x6e, 0x7c, 0x7c, 0x22, 0x5c, + 0x74, 0x22, 0x3d, 0x3d, 0x3d, 0x6e, 0x7c, 0x7c, 0x22, 0x5c, 0x6e, 0x22, + 0x3d, 0x3d, 0x3d, 0x6e, 0x7c, 0x7c, 0x22, 0x5c, 0x72, 0x22, 0x3d, 0x3d, + 0x3d, 0x6e, 0x3f, 0x28, 0x75, 0x28, 0x29, 0x2c, 0x5f, 0x3d, 0x32, 0x29, + 0x3a, 0x69, 0x2b, 0x3d, 0x6e, 0x29, 0x2c, 0x33, 0x3d, 0x3d, 0x3d, 0x5f, + 0x26, 0x26, 0x22, 0x21, 0x2d, 0x2d, 0x22, 0x3d, 0x3d, 0x3d, 0x69, 0x26, + 0x26, 0x28, 0x5f, 0x3d, 0x34, 0x2c, 0x72, 0x3d, 0x72, 0x5b, 0x30, 0x5d, + 0x29, 0x7d, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x75, 0x28, 0x29, + 0x2c, 0x72, 0x7d, 0x28, 0x74, 0x29, 0x29, 0x2c, 0x6e, 0x29, 0x2c, 0x61, + 0x72, 0x67, 0x75, 0x6d, 0x65, 0x6e, 0x74, 0x73, 0x2c, 0x5b, 0x5d, 0x29, + 0x29, 0x2e, 0x6c, 0x65, 0x6e, 0x67, 0x74, 0x68, 0x3e, 0x31, 0x3f, 0x6e, + 0x3a, 0x6e, 0x5b, 0x30, 0x5d, 0x7d, 0x76, 0x61, 0x72, 0x20, 0x5f, 0x6e, + 0x3d, 0x65, 0x6e, 0x2e, 0x62, 0x69, 0x6e, 0x64, 0x28, 0x4c, 0x29, 0x3b, + 0x65, 0x78, 0x70, 0x6f, 0x72, 0x74, 0x7b, 0x49, 0x20, 0x61, 0x73, 0x20, + 0x43, 0x6f, 0x6d, 0x70, 0x6f, 0x6e, 0x65, 0x6e, 0x74, 0x2c, 0x6a, 0x20, + 0x61, 0x73, 0x20, 0x46, 0x72, 0x61, 0x67, 0x6d, 0x65, 0x6e, 0x74, 0x2c, + 0x68, 0x20, 0x61, 0x73, 0x20, 0x53, 0x69, 0x67, 0x6e, 0x61, 0x6c, 0x2c, + 0x5f, 0x20, 0x61, 0x73, 0x20, 0x62, 0x61, 0x74, 0x63, 0x68, 0x2c, 0x63, + 0x74, 0x20, 0x61, 0x73, 0x20, 0x63, 0x6c, 0x6f, 0x6e, 0x65, 0x45, 0x6c, + 0x65, 0x6d, 0x65, 0x6e, 0x74, 0x2c, 0x6d, 0x20, 0x61, 0x73, 0x20, 0x63, + 0x6f, 0x6d, 0x70, 0x75, 0x74, 0x65, 0x64, 0x2c, 0x68, 0x74, 0x20, 0x61, + 0x73, 0x20, 0x63, 0x72, 0x65, 0x61, 0x74, 0x65, 0x43, 0x6f, 0x6e, 0x74, + 0x65, 0x78, 0x74, 0x2c, 0x4c, 0x20, 0x61, 0x73, 0x20, 0x63, 0x72, 0x65, + 0x61, 0x74, 0x65, 0x45, 0x6c, 0x65, 0x6d, 0x65, 0x6e, 0x74, 0x2c, 0x52, + 0x20, 0x61, 0x73, 0x20, 0x63, 0x72, 0x65, 0x61, 0x74, 0x65, 0x52, 0x65, + 0x66, 0x2c, 0x77, 0x20, 0x61, 0x73, 0x20, 0x65, 0x66, 0x66, 0x65, 0x63, + 0x74, 0x2c, 0x4c, 0x20, 0x61, 0x73, 0x20, 0x68, 0x2c, 0x5f, 0x6e, 0x20, + 0x61, 0x73, 0x20, 0x68, 0x74, 0x6d, 0x6c, 0x2c, 0x6c, 0x74, 0x20, 0x61, + 0x73, 0x20, 0x68, 0x79, 0x64, 0x72, 0x61, 0x74, 0x65, 0x2c, 0x55, 0x20, + 0x61, 0x73, 0x20, 0x69, 0x73, 0x56, 0x61, 0x6c, 0x69, 0x64, 0x45, 0x6c, + 0x65, 0x6d, 0x65, 0x6e, 0x74, 0x2c, 0x43, 0x20, 0x61, 0x73, 0x20, 0x6f, + 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x2c, 0x73, 0x74, 0x20, 0x61, 0x73, + 0x20, 0x72, 0x65, 0x6e, 0x64, 0x65, 0x72, 0x2c, 0x61, 0x20, 0x61, 0x73, + 0x20, 0x73, 0x69, 0x67, 0x6e, 0x61, 0x6c, 0x2c, 0x58, 0x20, 0x61, 0x73, + 0x20, 0x74, 0x6f, 0x43, 0x68, 0x69, 0x6c, 0x64, 0x41, 0x72, 0x72, 0x61, + 0x79, 0x2c, 0x75, 0x20, 0x61, 0x73, 0x20, 0x75, 0x6e, 0x74, 0x72, 0x61, + 0x63, 0x6b, 0x65, 0x64, 0x2c, 0x54, 0x74, 0x20, 0x61, 0x73, 0x20, 0x75, + 0x73, 0x65, 0x43, 0x61, 0x6c, 0x6c, 0x62, 0x61, 0x63, 0x6b, 0x2c, 0x59, + 0x74, 0x20, 0x61, 0x73, 0x20, 0x75, 0x73, 0x65, 0x43, 0x6f, 0x6d, 0x70, + 0x75, 0x74, 0x65, 0x64, 0x2c, 0x56, 0x74, 0x20, 0x61, 0x73, 0x20, 0x75, + 0x73, 0x65, 0x43, 0x6f, 0x6e, 0x74, 0x65, 0x78, 0x74, 0x2c, 0x41, 0x74, + 0x20, 0x61, 0x73, 0x20, 0x75, 0x73, 0x65, 0x44, 0x65, 0x62, 0x75, 0x67, + 0x56, 0x61, 0x6c, 0x75, 0x65, 0x2c, 0x48, 0x74, 0x20, 0x61, 0x73, 0x20, + 0x75, 0x73, 0x65, 0x45, 0x66, 0x66, 0x65, 0x63, 0x74, 0x2c, 0x46, 0x74, + 0x20, 0x61, 0x73, 0x20, 0x75, 0x73, 0x65, 0x45, 0x72, 0x72, 0x6f, 0x72, + 0x42, 0x6f, 0x75, 0x6e, 0x64, 0x61, 0x72, 0x79, 0x2c, 0x4d, 0x74, 0x20, + 0x61, 0x73, 0x20, 0x75, 0x73, 0x65, 0x49, 0x64, 0x2c, 0x24, 0x74, 0x20, + 0x61, 0x73, 0x20, 0x75, 0x73, 0x65, 0x49, 0x6d, 0x70, 0x65, 0x72, 0x61, + 0x74, 0x69, 0x76, 0x65, 0x48, 0x61, 0x6e, 0x64, 0x6c, 0x65, 0x2c, 0x50, + 0x74, 0x20, 0x61, 0x73, 0x20, 0x75, 0x73, 0x65, 0x4c, 0x61, 0x79, 0x6f, + 0x75, 0x74, 0x45, 0x66, 0x66, 0x65, 0x63, 0x74, 0x2c, 0x44, 0x74, 0x20, + 0x61, 0x73, 0x20, 0x75, 0x73, 0x65, 0x4d, 0x65, 0x6d, 0x6f, 0x2c, 0x55, + 0x74, 0x20, 0x61, 0x73, 0x20, 0x75, 0x73, 0x65, 0x52, 0x65, 0x64, 0x75, + 0x63, 0x65, 0x72, 0x2c, 0x4e, 0x74, 0x20, 0x61, 0x73, 0x20, 0x75, 0x73, + 0x65, 0x52, 0x65, 0x66, 0x2c, 0x58, 0x74, 0x20, 0x61, 0x73, 0x20, 0x75, + 0x73, 0x65, 0x53, 0x69, 0x67, 0x6e, 0x61, 0x6c, 0x2c, 0x5a, 0x74, 0x20, + 0x61, 0x73, 0x20, 0x75, 0x73, 0x65, 0x53, 0x69, 0x67, 0x6e, 0x61, 0x6c, + 0x45, 0x66, 0x66, 0x65, 0x63, 0x74, 0x2c, 0x45, 0x74, 0x20, 0x61, 0x73, + 0x20, 0x75, 0x73, 0x65, 0x53, 0x74, 0x61, 0x74, 0x65, 0x7d, 0x3b, 0x0a }; -unsigned int index_js_len = 22472; +unsigned int index_js_len = 22800; diff --git a/examples/server/public/index.html b/examples/server/public/index.html index 07d779d20..b059c75f2 100644 --- a/examples/server/public/index.html +++ b/examples/server/public/index.html @@ -427,7 +427,7 @@ } if (data.timings) { - llamaStats.value = data.timings; + llamaStats.value = data; } } @@ -880,7 +880,7 @@ } return html` - ${llamaStats.value.predicted_per_token_ms.toFixed()}ms per token, ${llamaStats.value.predicted_per_second.toFixed(2)} tokens per second + ${llamaStats.value.tokens_predicted} predicted, ${llamaStats.value.tokens_cached} cached, ${llamaStats.value.timings.predicted_per_token_ms.toFixed()}ms per token, ${llamaStats.value.timings.predicted_per_second.toFixed(2)} tokens per second ` } From f3f62f0d835d559e80714bbeb05d03125574e3dd Mon Sep 17 00:00:00 2001 From: Georgi Gerganov Date: Tue, 2 Jan 2024 21:07:47 +0200 Subject: [PATCH 197/811] metal : optimize ggml_mul_mat_id (faster Mixtral PP) (#4725) * ggml : disable fast-math for Metal (cmake build only) ggml-ci * metal : fix Metal API debug warnings * cmake : add -fno-inline for Metal build (#4545) * metal : fix API debug warnings * metal : fix compile warnings * metal : use uint64_t for strides * cmake : rename option to LLAMA_METAL_SHADER_DEBUG * metal : fix mat-vec Q8_0 kernel for BS > 1 * metal : normalize mat-vec kernel signatures * cmake : respect LLAMA_QKK_64 option * metal : fix mat-vec Q4_K kernel for QK_K == 64 * metal : optimizing ggml_mul_mat_id (wip) * metal : minor fix * metal : opt mul_mm_id --- ggml-metal.m | 31 ++++--- ggml-metal.metal | 205 +++++++++++++++++++++++++++++++++++++++-------- 2 files changed, 190 insertions(+), 46 deletions(-) diff --git a/ggml-metal.m b/ggml-metal.m index cd9d00456..7a369b55e 100644 --- a/ggml-metal.m +++ b/ggml-metal.m @@ -1657,6 +1657,10 @@ void ggml_metal_graph_compute( } }; + if (ggml_is_quantized(src0t)) { + GGML_ASSERT(ne00 >= nth0*nth1); + } + [encoder setBuffer:id_src0 offset:offs_src0 atIndex:0]; [encoder setBuffer:id_src1 offset:offs_src1 atIndex:1]; [encoder setBuffer:id_dst offset:offs_dst atIndex:2]; @@ -1715,6 +1719,9 @@ void ggml_metal_graph_compute( // TODO: make this more general GGML_ASSERT(n_as <= 8); + // max size of the src1ids array in the kernel stack + GGML_ASSERT(ne11 <= 512); + struct ggml_tensor * src2 = gf->nodes[i]->src[2]; const int64_t ne20 = src2 ? src2->ne[0] : 0; @@ -1732,9 +1739,6 @@ void ggml_metal_graph_compute( GGML_ASSERT(!ggml_is_transposed(src2)); GGML_ASSERT(!ggml_is_transposed(src1)); - GGML_ASSERT(ne20 % 32 == 0); - // !!!!!!!!! TODO: this assert is probably required but not sure! - //GGML_ASSERT(ne20 >= 64); GGML_ASSERT(src1t == GGML_TYPE_F32); const uint r2 = ne12/ne22; @@ -1742,22 +1746,22 @@ void ggml_metal_graph_compute( // find the break-even point where the matrix-matrix kernel becomes more efficient compared // to the matrix-vector kernel - int ne11_mm_min = 1; + int ne11_mm_min = n_as; const int idx = ((int32_t *) dst->op_params)[0]; // batch size GGML_ASSERT(ne01 == ne11); - const int64_t _ne1 = 1; // kernel_mul_mm_impl needs a reference in constant memory - // for now the matrix-matrix multiplication kernel only works on A14+/M1+ SoCs // AMD GPU and older A-chips will reuse matrix-vector multiplication kernel // !!! // TODO: for now, always use mat-vec kernels until we figure out how to improve the // indirect matrix multiplication // !!! - if ([ctx->device supportsFamily:MTLGPUFamilyApple7] && _ne1 > ne11_mm_min) { + if ([ctx->device supportsFamily:MTLGPUFamilyApple7] && + ne20 % 32 == 0 && ne20 >= 64 && + ne11 > ne11_mm_min) { switch (src2->type) { case GGML_TYPE_F32: [encoder setComputePipelineState:ctx->pipeline_mul_mm_id_f32_f32]; break; case GGML_TYPE_F16: [encoder setComputePipelineState:ctx->pipeline_mul_mm_id_f16_f32]; break; @@ -1787,7 +1791,7 @@ void ggml_metal_graph_compute( [encoder setBytes:&nb11 length:sizeof(nb11) atIndex:11]; [encoder setBytes:&nb12 length:sizeof(nb12) atIndex:12]; [encoder setBytes:&ne0 length:sizeof(ne0) atIndex:13]; - [encoder setBytes:&_ne1 length:sizeof(_ne1) atIndex:14]; + [encoder setBytes:&ne1 length:sizeof(ne1) atIndex:14]; [encoder setBytes:&nb1 length:sizeof(nb1) atIndex:15]; [encoder setBytes:&r2 length:sizeof(r2) atIndex:16]; [encoder setBytes:&r3 length:sizeof(r3) atIndex:17]; @@ -1805,8 +1809,7 @@ void ggml_metal_graph_compute( [encoder setThreadgroupMemoryLength:8192 atIndex:0]; - // TODO: processing one row at a time (ne11 -> 1) is not efficient - [encoder dispatchThreadgroups:MTLSizeMake( (_ne1 + 31)/32, (ne21 + 63)/64, ne01*ne12*ne13) threadsPerThreadgroup:MTLSizeMake(128, 1, 1)]; + [encoder dispatchThreadgroups:MTLSizeMake((ne11 + 31)/32, (ne21 + 63)/64, n_as*ne12*ne13) threadsPerThreadgroup:MTLSizeMake(128, 1, 1)]; } else { int nth0 = 32; int nth1 = 1; @@ -1889,11 +1892,17 @@ void ggml_metal_graph_compute( } break; default: { - GGML_METAL_LOG_ERROR("Asserting on type %d\n", (int)src0t); + GGML_METAL_LOG_ERROR("Asserting on type %d\n", (int)src2t); GGML_ASSERT(false && "not implemented"); } }; + if (ggml_is_quantized(src2t)) { + GGML_ASSERT(ne20 >= nth0*nth1); + } + + const int64_t _ne1 = 1; // kernels needs a reference in constant memory + [encoder setBuffer:id_src0 offset:offs_src0 atIndex:0]; [encoder setBuffer:id_src1 offset:offs_src1 atIndex:1]; [encoder setBuffer:id_dst offset:offs_dst atIndex:2]; diff --git a/ggml-metal.metal b/ggml-metal.metal index 1d5b8f6f4..9aa7b502a 100644 --- a/ggml-metal.metal +++ b/ggml-metal.metal @@ -846,7 +846,7 @@ inline float block_q_n_dot_y(device const block_q5_1 * qb_curr, float sumy, thre #define N_SIMDGROUP 2 // number of SIMD groups in a thread group //Note: This is a template, but strictly speaking it only applies to // quantizations where the block size is 32. It also does not -// giard against the number of rows not being divisible by +// guard against the number of rows not being divisible by // N_DST, so this is another explicit assumption of the implementation. template void mul_vec_q_n_f32_impl( @@ -3973,6 +3973,131 @@ void kernel_mul_mm_impl(device const uchar * src0, } } +// same as kernel_mul_mm_impl, but src1 and dst are accessed via indices stored in src1ids +template +void kernel_mul_mm_id_impl( + device const uchar * src0, + device const uchar * src1, + thread short * src1ids, + device float * dst, + constant int64_t & ne00, + constant int64_t & ne02, + constant uint64_t & nb01, + constant uint64_t & nb02, + constant int64_t & ne12, + constant uint64_t & nb10, + constant uint64_t & nb11, + constant uint64_t & nb12, + constant int64_t & ne0, + int64_t ne1, + constant uint & r2, + constant uint & r3, + threadgroup uchar * shared_memory, + uint3 tgpig[[threadgroup_position_in_grid]], + uint tiitg[[thread_index_in_threadgroup]], + uint sgitg[[simdgroup_index_in_threadgroup]]) { + + threadgroup half * sa = (threadgroup half *)(shared_memory); + threadgroup float * sb = (threadgroup float *)(shared_memory + 4096); + + const uint r0 = tgpig.y; + const uint r1 = tgpig.x; + const uint im = tgpig.z; + + if (r1 * BLOCK_SIZE_N >= ne1) return; + + // if this block is of 64x32 shape or smaller + short n_rows = (ne0 - r0 * BLOCK_SIZE_M < BLOCK_SIZE_M) ? (ne0 - r0 * BLOCK_SIZE_M) : BLOCK_SIZE_M; + short n_cols = (ne1 - r1 * BLOCK_SIZE_N < BLOCK_SIZE_N) ? (ne1 - r1 * BLOCK_SIZE_N) : BLOCK_SIZE_N; + + // a thread shouldn't load data outside of the matrix + short thread_row = ((short)tiitg/THREAD_PER_ROW) < n_rows ? ((short)tiitg/THREAD_PER_ROW) : n_rows - 1; + short thread_col = ((short)tiitg/THREAD_PER_COL) < n_cols ? ((short)tiitg/THREAD_PER_COL) : n_cols - 1; + + simdgroup_half8x8 ma[4]; + simdgroup_float8x8 mb[2]; + simdgroup_float8x8 c_res[8]; + for (int i = 0; i < 8; i++){ + c_res[i] = make_filled_simdgroup_matrix(0.f); + } + + short il = (tiitg % THREAD_PER_ROW); + + const uint i12 = im%ne12; + const uint i13 = im/ne12; + + uint offset0 = (i12/r2)*nb02 + (i13/r3)*(nb02*ne02); + ushort offset1 = il/nl; + + device const block_q * x = (device const block_q *)(src0 + (r0 * BLOCK_SIZE_M + thread_row) * nb01 + offset0) + offset1; + device const float * y = (device const float *)(src1 + + nb12 * im + + nb11 * src1ids[r1 * BLOCK_SIZE_N + thread_col] + + nb10 * (BLOCK_SIZE_K / THREAD_PER_COL * (tiitg % THREAD_PER_COL))); + + for (int loop_k = 0; loop_k < ne00; loop_k += BLOCK_SIZE_K) { + // load data and store to threadgroup memory + half4x4 temp_a; + dequantize_func(x, il, temp_a); + threadgroup_barrier(mem_flags::mem_threadgroup); + + for (int i = 0; i < 16; i++) { + *(sa + SG_MAT_SIZE * ((tiitg / THREAD_PER_ROW / 8) \ + + (tiitg % THREAD_PER_ROW) * 16 + (i / 8) * 8) \ + + (tiitg / THREAD_PER_ROW) % 8 + (i & 7) * 8) = temp_a[i/4][i%4]; + } + + *(threadgroup float2x4 *)(sb + (tiitg % THREAD_PER_COL) * 8 * 32 + 8 * (tiitg / THREAD_PER_COL)) = *((device float2x4 *)y); + + il = (il + 2 < nl) ? il + 2 : il % 2; + x = (il < 2) ? x + (2+nl-1)/nl : x; + y += BLOCK_SIZE_K; + + threadgroup_barrier(mem_flags::mem_threadgroup); + + // load matrices from threadgroup memory and conduct outer products + threadgroup half * lsma = (sa + THREAD_MAT_M * SG_MAT_SIZE * (sgitg % 2)); + threadgroup float * lsmb = (sb + THREAD_MAT_N * SG_MAT_SIZE * (sgitg / 2)); + + for (int ik = 0; ik < BLOCK_SIZE_K / 8; ik++) { + for (int i = 0; i < 4; i++) { + simdgroup_load(ma[i],lsma + SG_MAT_SIZE * i); + } + simdgroup_barrier(mem_flags::mem_none); + for (int i = 0; i < 2; i++) { + simdgroup_load(mb[i],lsmb + SG_MAT_SIZE * i); + } + + lsma += BLOCK_SIZE_M / SG_MAT_ROW * SG_MAT_SIZE; + lsmb += BLOCK_SIZE_N / SG_MAT_ROW * SG_MAT_SIZE; + + for (int i = 0; i < 8; i++){ + simdgroup_multiply_accumulate(c_res[i], mb[i/4], ma[i%4], c_res[i]); + } + } + } + + { + threadgroup_barrier(mem_flags::mem_threadgroup); + threadgroup float * temp_str = ((threadgroup float *)shared_memory) \ + + 32 * (sgitg&1) + (16 * (sgitg>>1)) * BLOCK_SIZE_M; + for (int i = 0; i < 8; i++) { + simdgroup_store(c_res[i], temp_str + 8 * (i%4) + 8 * BLOCK_SIZE_M * (i/4), BLOCK_SIZE_M); + } + + threadgroup_barrier(mem_flags::mem_threadgroup); + + device float * C = dst + (BLOCK_SIZE_M * r0) + im*ne1*ne0; + if (sgitg == 0) { + for (int i = 0; i < n_rows; i++) { + for (int j = tiitg; j < n_cols; j += BLOCK_SIZE_N) { + *(C + i + src1ids[j + r1*BLOCK_SIZE_N] * ne0) = *(temp_str + i + j * BLOCK_SIZE_M); + } + } + } + } +} + template kernel void kernel_mul_mm(device const uchar * src0, device const uchar * src1, @@ -4019,7 +4144,7 @@ template( - src0[id], - src1 + bid*nb11, - (device float *) (dst + bid*nb1), + for (int64_t i1 = 0; i1 < ne1; i1++) { + if (((device int32_t *) (ids + i1*nbi1))[idx] == id) { + src1ids[_ne1++] = i1; + } + } + + kernel_mul_mm_id_impl( + src0s[id], + src1, + src1ids, + dst, ne00, ne02, nb01, @@ -4069,7 +4204,7 @@ kernel void kernel_mul_mm_id( nb11, nb12, ne0, - ne1, + _ne1, r2, r3, shared_memory, @@ -4158,7 +4293,7 @@ template [[host_name("kernel_mul_mm_q6_K_f32")]] kernel mat_mm_t kernel_mul_mm( src0[id], (device const float *) (src1 + bid*nb11), - (device float *) ( dst + bid*nb1), + dst + bid*ne0, ne00, ne01, ne02, @@ -4471,7 +4606,7 @@ kernel void kernel_mul_mv_id_q4_0_f32( kernel void kernel_mul_mv_id_q4_1_f32( device const char * ids, device const char * src1, - device uchar * dst, + device float * dst, constant uint64_t & nbi1, constant int64_t & ne00, constant int64_t & ne01, @@ -4515,7 +4650,7 @@ kernel void kernel_mul_mv_id_q4_1_f32( mul_vec_q_n_f32_impl( src0[id], (device const float *) (src1 + bid*nb11), - (device float *) ( dst + bid*nb1), + dst + bid*ne0, ne00, ne01, ne02, @@ -4534,7 +4669,7 @@ kernel void kernel_mul_mv_id_q4_1_f32( kernel void kernel_mul_mv_id_q5_0_f32( device const char * ids, device const char * src1, - device uchar * dst, + device float * dst, constant uint64_t & nbi1, constant int64_t & ne00, constant int64_t & ne01, @@ -4578,7 +4713,7 @@ kernel void kernel_mul_mv_id_q5_0_f32( mul_vec_q_n_f32_impl( src0[id], (device const float *) (src1 + bid*nb11), - (device float *) ( dst + bid*nb1), + dst + bid*ne0, ne00, ne01, ne02, @@ -4597,7 +4732,7 @@ kernel void kernel_mul_mv_id_q5_0_f32( kernel void kernel_mul_mv_id_q5_1_f32( device const char * ids, device const char * src1, - device uchar * dst, + device float * dst, constant uint64_t & nbi1, constant int64_t & ne00, constant int64_t & ne01, @@ -4641,7 +4776,7 @@ kernel void kernel_mul_mv_id_q5_1_f32( mul_vec_q_n_f32_impl( src0[id], (device const float *) (src1 + bid*nb11), - (device float *) ( dst + bid*nb1), + dst + bid*ne0, ne00, ne01, ne02, @@ -4660,7 +4795,7 @@ kernel void kernel_mul_mv_id_q5_1_f32( kernel void kernel_mul_mv_id_q2_K_f32( device const char * ids, device const char * src1, - device uchar * dst, + device float * dst, constant uint64_t & nbi1, constant int64_t & ne00, constant int64_t & ne01, @@ -4704,7 +4839,7 @@ kernel void kernel_mul_mv_id_q2_K_f32( kernel_mul_mv_q2_K_f32_impl( src0[id], (device const float *) (src1 + bid*nb11), - (device float *) ( dst + bid*nb1), + dst + bid*ne0, ne00, ne01, ne02, @@ -4723,7 +4858,7 @@ kernel void kernel_mul_mv_id_q2_K_f32( kernel void kernel_mul_mv_id_q3_K_f32( device const char * ids, device const char * src1, - device uchar * dst, + device float * dst, constant uint64_t & nbi1, constant int64_t & ne00, constant int64_t & ne01, @@ -4767,7 +4902,7 @@ kernel void kernel_mul_mv_id_q3_K_f32( kernel_mul_mv_q3_K_f32_impl( src0[id], (device const float *) (src1 + bid*nb11), - (device float *) ( dst + bid*nb1), + dst + bid*ne0, ne00, ne01, ne02, @@ -4786,7 +4921,7 @@ kernel void kernel_mul_mv_id_q3_K_f32( kernel void kernel_mul_mv_id_q4_K_f32( device const char * ids, device const char * src1, - device uchar * dst, + device float * dst, constant uint64_t & nbi1, constant int64_t & ne00, constant int64_t & ne01, @@ -4830,7 +4965,7 @@ kernel void kernel_mul_mv_id_q4_K_f32( kernel_mul_mv_q4_K_f32_impl( src0[id], (device const float *) (src1 + bid*nb11), - (device float *) ( dst + bid*nb1), + dst + bid*ne0, ne00, ne01, ne02, @@ -4849,7 +4984,7 @@ kernel void kernel_mul_mv_id_q4_K_f32( kernel void kernel_mul_mv_id_q5_K_f32( device const char * ids, device const char * src1, - device uchar * dst, + device float * dst, constant uint64_t & nbi1, constant int64_t & ne00, constant int64_t & ne01, @@ -4893,7 +5028,7 @@ kernel void kernel_mul_mv_id_q5_K_f32( kernel_mul_mv_q5_K_f32_impl( src0[id], (device const float *) (src1 + bid*nb11), - (device float *) ( dst + bid*nb1), + dst + bid*ne0, ne00, ne01, ne02, @@ -4912,7 +5047,7 @@ kernel void kernel_mul_mv_id_q5_K_f32( kernel void kernel_mul_mv_id_q6_K_f32( device const char * ids, device const char * src1, - device uchar * dst, + device float * dst, constant uint64_t & nbi1, constant int64_t & ne00, constant int64_t & ne01, @@ -4956,7 +5091,7 @@ kernel void kernel_mul_mv_id_q6_K_f32( kernel_mul_mv_q6_K_f32_impl( src0[id], (device const float *) (src1 + bid*nb11), - (device float *) ( dst + bid*nb1), + dst + bid*ne0, ne00, ne01, ne02, From f2eb19bd8bc9f5730d6e05d7a52a9e19bf5ac099 Mon Sep 17 00:00:00 2001 From: Justin Parker Date: Wed, 3 Jan 2024 03:43:19 -0500 Subject: [PATCH 198/811] server : throw an error when `slot unavailable` (#4741) --- examples/server/public/completion.js | 9 +++++++++ 1 file changed, 9 insertions(+) diff --git a/examples/server/public/completion.js b/examples/server/public/completion.js index 6e2b99565..baaec1d60 100644 --- a/examples/server/public/completion.js +++ b/examples/server/public/completion.js @@ -95,6 +95,15 @@ export async function* llama(prompt, params = {}, config = {}) { break; } } + if (result.error) { + result.error = JSON.parse(result.error); + if (result.error.content.includes('slot unavailable')) { + // Throw an error to be caught by upstream callers + throw new Error('slot unavailable'); + } else { + console.error(`llama.cpp error: ${result.error.content}`); + } + } if (result.error) { result.error = JSON.parse(result.error); console.error(`llama.cpp error: ${result.error.content}`); From 5f66ebca9c41a17385341da4b553a8eb5f07edee Mon Sep 17 00:00:00 2001 From: Guillaume Wenzek Date: Fri, 29 Dec 2023 18:07:03 +0100 Subject: [PATCH 199/811] ggml : extend ggml_get_rows, ggml_repeat, ggml_concat (ggml/639) * add more int ops * ggml_compute_forward_dup_bytes * add tests * PR comments * tests : minor indentations --------- Co-authored-by: Georgi Gerganov --- ggml.c | 166 ++++++++++++++++++++++++++++++++++++- tests/test-backend-ops.cpp | 42 ++++++++-- 2 files changed, 198 insertions(+), 10 deletions(-) diff --git a/ggml.c b/ggml.c index bcec200f6..b124f14cc 100644 --- a/ggml.c +++ b/ggml.c @@ -4766,8 +4766,11 @@ struct ggml_tensor * ggml_get_rows( } // TODO: implement non F32 return - //struct ggml_tensor * result = ggml_new_tensor_2d(ctx, a->type, a->ne[0], b->ne[0]); - struct ggml_tensor * result = ggml_new_tensor_4d(ctx, GGML_TYPE_F32, a->ne[0], b->ne[0], b->ne[1], b->ne[2]); + enum ggml_type type = GGML_TYPE_F32; + if (a->type == GGML_TYPE_I32) { + type = a->type; + } + struct ggml_tensor * result = ggml_new_tensor_4d(ctx, type, a->ne[0], b->ne[0], b->ne[1], b->ne[2]); result->op = GGML_OP_GET_ROWS; result->grad = is_node ? ggml_dup_tensor(ctx, result) : NULL; @@ -6938,14 +6941,165 @@ static void ggml_compute_forward_dup_f32( } } +// A simplified version of ggml_compute_forward_dup that doesn't do float upcasting, and just plain old memcpy. +static void ggml_compute_forward_dup_bytes( + const struct ggml_compute_params * params, + const struct ggml_tensor * src0, + struct ggml_tensor * dst) { + GGML_ASSERT(ggml_nelements(dst) == ggml_nelements(src0)); + GGML_ASSERT(src0->type == dst->type); + + if (params->type == GGML_TASK_INIT || params->type == GGML_TASK_FINALIZE) { + return; + } + + if (ggml_is_contiguous(src0) && ggml_is_contiguous(dst)) { + ggml_compute_forward_dup_same_cont(params, src0, dst); + return; + } + + GGML_TENSOR_UNARY_OP_LOCALS; + + const size_t type_size = ggml_type_size(src0->type); + const int ith = params->ith; // thread index + const int nth = params->nth; // number of threads + + + // parallelize by rows + const int nr = ne01; + // number of rows per thread + const int dr = (nr + nth - 1) / nth; + // row range for this thread + const int ir0 = dr * ith; + const int ir1 = MIN(ir0 + dr, nr); + + if (src0->type == dst->type && + ne00 == ne0 && + nb00 == type_size && nb0 == type_size) { + // copy by rows + const size_t rs = ne00 * type_size; + for (int64_t i03 = 0; i03 < ne03; i03++) { + for (int64_t i02 = 0; i02 < ne02; i02++) { + for (int64_t i01 = ir0; i01 < ir1; i01++) { + memcpy( + ((char *) dst->data + i01*nb1 + i02*nb2 + i03*nb3), + ((char *) src0->data + i01*nb01 + i02*nb02 + i03*nb03), + rs); + } + } + } + return; + } + + if (ggml_is_contiguous(dst)) { + size_t id = 0; + char * dst_ptr = (char *) dst->data; + const size_t rs = ne00 * type_size; + + if (nb00 == type_size) { + // src0 is contigous on first dimension, copy by rows + for (int64_t i03 = 0; i03 < ne03; i03++) { + for (int64_t i02 = 0; i02 < ne02; i02++) { + id += rs * ir0; + for (int64_t i01 = ir0; i01 < ir1; i01++) { + const char * src0_ptr = (char *) src0->data + i01*nb01 + i02*nb02 + i03*nb03; + memcpy(dst_ptr + id, src0_ptr, rs); + id += rs; + } + id += rs * (ne01 - ir1); + } + } + } else { + //printf("%s: this is not optimal - fix me\n", __func__); + + for (int64_t i03 = 0; i03 < ne03; i03++) { + for (int64_t i02 = 0; i02 < ne02; i02++) { + id += rs * ir0; + for (int64_t i01 = ir0; i01 < ir1; i01++) { + for (int64_t i00 = 0; i00 < ne00; i00++) { + const char * src0_ptr = (char *) src0->data + i00*nb00 + i01*nb01 + i02*nb02 + i03*nb03; + memcpy(dst_ptr + id, src0_ptr, type_size); + + id += type_size; + } + } + id += rs * (ne01 - ir1); + } + } + } + + return; + } + + // dst counters + + int64_t i10 = 0; + int64_t i11 = 0; + int64_t i12 = 0; + int64_t i13 = 0; + + for (int64_t i03 = 0; i03 < ne03; i03++) { + for (int64_t i02 = 0; i02 < ne02; i02++) { + i10 += ne00 * ir0; + while (i10 >= ne0) { + i10 -= ne0; + if (++i11 == ne1) { + i11 = 0; + if (++i12 == ne2) { + i12 = 0; + if (++i13 == ne3) { + i13 = 0; + } + } + } + } + for (int64_t i01 = ir0; i01 < ir1; i01++) { + for (int64_t i00 = 0; i00 < ne00; i00++) { + const char * src0_ptr = ((char *) src0->data + i00*nb00 + i01*nb01 + i02*nb02 + i03*nb03); + char * dst_ptr = ((char *) dst->data + i10*nb0 + i11*nb1 + i12*nb2 + i13*nb3); + + memcpy(dst_ptr, src0_ptr, type_size); + + if (++i10 == ne0) { + i10 = 0; + if (++i11 == ne1) { + i11 = 0; + if (++i12 == ne2) { + i12 = 0; + if (++i13 == ne3) { + i13 = 0; + } + } + } + } + } + } + i10 += ne00 * (ne01 - ir1); + while (i10 >= ne0) { + i10 -= ne0; + if (++i11 == ne1) { + i11 = 0; + if (++i12 == ne2) { + i12 = 0; + if (++i13 == ne3) { + i13 = 0; + } + } + } + } + } + } +} + static void ggml_compute_forward_dup( const struct ggml_compute_params * params, const struct ggml_tensor * src0, struct ggml_tensor * dst) { - if (ggml_is_contiguous(src0) && ggml_is_contiguous(dst) && src0->type == dst->type) { - ggml_compute_forward_dup_same_cont(params, src0, dst); + if (src0->type == dst->type) { + ggml_compute_forward_dup_bytes(params, src0, dst); return; } + switch (src0->type) { case GGML_TYPE_F16: { @@ -8404,10 +8558,12 @@ static void ggml_compute_forward_repeat( struct ggml_tensor * dst) { switch (src0->type) { case GGML_TYPE_F16: + case GGML_TYPE_I16: { ggml_compute_forward_repeat_f16(params, src0, dst); } break; case GGML_TYPE_F32: + case GGML_TYPE_I32: { ggml_compute_forward_repeat_f32(params, src0, dst); } break; @@ -8550,6 +8706,7 @@ static void ggml_compute_forward_concat( struct ggml_tensor* dst) { switch (src0->type) { case GGML_TYPE_F32: + case GGML_TYPE_I32: { ggml_compute_forward_concat_f32(params, src0, src1, dst); } break; @@ -10674,6 +10831,7 @@ static void ggml_compute_forward_get_rows( ggml_compute_forward_get_rows_f16(params, src0, src1, dst); } break; case GGML_TYPE_F32: + case GGML_TYPE_I32: { ggml_compute_forward_get_rows_f32(params, src0, src1, dst); } break; diff --git a/tests/test-backend-ops.cpp b/tests/test-backend-ops.cpp index eff063b2d..44412cb94 100644 --- a/tests/test-backend-ops.cpp +++ b/tests/test-backend-ops.cpp @@ -58,6 +58,9 @@ static void init_tensor_uniform(ggml_tensor * tensor, float min = -1.0f, float m int64_t hist[16]; ggml_quantize_chunk(tensor->type, data.data(), dataq.data(), 0, size, hist); ggml_backend_tensor_set(tensor, dataq.data(), 0, dataq.size()); + } else if (tensor->type == GGML_TYPE_I8 || tensor->type == GGML_TYPE_I16 || tensor->type == GGML_TYPE_I32) { + // This is going to create some weird integers though. + ggml_backend_tensor_set(tensor, data.data(), 0, ggml_nbytes(tensor)); } else { GGML_ASSERT(false); } @@ -87,8 +90,13 @@ static std::vector tensor_to_float(const ggml_tensor * t) { tv.push_back(*(float *) &buf[i]); } else if (t->type == GGML_TYPE_I32) { tv.push_back((float)*(int32_t *) &buf[i]); + } else if (t->type == GGML_TYPE_I16) { + tv.push_back((float)*(int16_t *) &buf[i]); + } else if (t->type == GGML_TYPE_I8) { + tv.push_back((float)*(int8_t *) &buf[i]); } else if (quantized) { - tt.to_float(&buf[i], vq.data(), bs); + std::vector vq(ggml_blck_size(t->type)); + tt.to_float(&buf[i], vq.data(), ggml_blck_size(t->type)); tv.insert(tv.end(), vq.begin(), vq.end()); } else { GGML_ASSERT(false); @@ -661,17 +669,26 @@ struct test_repeat : public test_case { struct test_dup : public test_case { const ggml_type type; const std::array ne; + const std::array permute; + bool _use_permute; std::string vars() override { - return VARS_TO_STR2(type, ne); + std::string v = VARS_TO_STR2(type, ne); + if (_use_permute) v += "," + VAR_TO_STR(permute); + return v; } test_dup(ggml_type type = GGML_TYPE_F32, - std::array ne = {10, 10, 10, 1}) - : type(type), ne(ne) {} + std::array ne = {10, 10, 10, 1}, + std::array permute = {0, 0, 0, 0}) + : type(type), ne(ne), permute(permute), + _use_permute(permute[0] + permute[1] + permute[2] + permute[3] > 0) {} ggml_tensor * build_graph(ggml_context * ctx) override { ggml_tensor * src = ggml_new_tensor(ctx, type, 4, ne.data()); + if (_use_permute) { + src = ggml_permute(ctx, src, permute[0], permute[1], permute[2], permute[3]); + } ggml_tensor * out = ggml_dup(ctx, src); return out; } @@ -1450,14 +1467,26 @@ static bool test_backend(ggml_backend_t backend, test_mode mode, const char * op } } } + for (int b : {1, 7}) { + for (bool v : {false, true}) { + test_cases.emplace_back(new test_get_rows(GGML_TYPE_I32, 256, 5, 4, b, v)); + } + } test_cases.emplace_back(new test_repeat(GGML_TYPE_F32, {10, 10, 10, 10}, {1, 1, 1, 1})); test_cases.emplace_back(new test_repeat(GGML_TYPE_F32, {10, 10, 10, 10}, {2, 1, 1, 1})); test_cases.emplace_back(new test_repeat(GGML_TYPE_F32, {10, 10, 10, 10}, {1, 2, 1, 1})); test_cases.emplace_back(new test_repeat(GGML_TYPE_F32, {10, 10, 10, 10}, {1, 1, 2, 1})); test_cases.emplace_back(new test_repeat(GGML_TYPE_F32, {10, 10, 10, 10}, {1, 1, 1, 2})); + test_cases.emplace_back(new test_repeat(GGML_TYPE_I32, {10, 10, 10, 10}, {2, 1, 1, 1})); + test_cases.emplace_back(new test_repeat(GGML_TYPE_I16, {10, 10, 10, 10}, {1, 1, 1, 2})); - test_cases.emplace_back(new test_dup()); + test_cases.emplace_back(new test_dup(GGML_TYPE_F32)); + test_cases.emplace_back(new test_dup(GGML_TYPE_F16)); + test_cases.emplace_back(new test_dup(GGML_TYPE_I32)); + test_cases.emplace_back(new test_dup(GGML_TYPE_I16)); + test_cases.emplace_back(new test_dup(GGML_TYPE_I16, {10, 8, 3, 1}, {0, 2, 1, 3})); + test_cases.emplace_back(new test_dup(GGML_TYPE_I16, {10, 8, 3, 1}, {1, 2, 0, 3})); for (ggml_type type : all_types) { test_cases.emplace_back(new test_cpy(GGML_TYPE_F32, type, {256, 10, 10, 1})); @@ -1565,7 +1594,8 @@ static bool test_backend(ggml_backend_t backend, test_mode mode, const char * op test_cases.emplace_back(new test_alibi()); test_cases.emplace_back(new test_im2col()); - test_cases.emplace_back(new test_concat()); + test_cases.emplace_back(new test_concat(GGML_TYPE_F32)); + test_cases.emplace_back(new test_concat(GGML_TYPE_I32)); for (ggml_sort_order order : {GGML_SORT_ASC, GGML_SORT_DESC}) { test_cases.emplace_back(new test_argsort(GGML_TYPE_F32, {8, 1, 1, 1}, order)); From ab62fc3e5520f5a143c36cb23c269f11aa4dafd6 Mon Sep 17 00:00:00 2001 From: Georgi Gerganov Date: Wed, 3 Jan 2024 11:25:54 +0200 Subject: [PATCH 200/811] scripts : fix sync order + metal sed --- scripts/sync-ggml-am.sh | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/scripts/sync-ggml-am.sh b/scripts/sync-ggml-am.sh index 91478f177..248cf1023 100755 --- a/scripts/sync-ggml-am.sh +++ b/scripts/sync-ggml-am.sh @@ -27,7 +27,7 @@ echo "Syncing ggml changes since commit $lc" cd $SRC_GGML git log --oneline $lc..HEAD -git log --oneline $lc..HEAD | grep -v "(llama/[0-9]*)" | cut -d' ' -f1 > $SRC_LLAMA/ggml-commits +git log --oneline $lc..HEAD --reverse | grep -v "(llama/[0-9]*)" | cut -d' ' -f1 > $SRC_LLAMA/ggml-commits if [ ! -s $SRC_LLAMA/ggml-commits ]; then rm -v $SRC_LLAMA/ggml-commits @@ -87,7 +87,6 @@ if [ -f $SRC_LLAMA/ggml-src.patch ]; then # src/ggml-impl.h -> ggml-impl.h # src/ggml-metal.h -> ggml-metal.h # src/ggml-metal.m -> ggml-metal.m - # src/ggml-metal.metal -> ggml-metal.metal # src/ggml-mpi.h -> ggml-mpi.h # src/ggml-mpi.c -> ggml-mpi.c # src/ggml-opencl.cpp -> ggml-opencl.cpp @@ -114,7 +113,6 @@ if [ -f $SRC_LLAMA/ggml-src.patch ]; then -e 's/src\/ggml-impl\.h/ggml-impl.h/g' \ -e 's/src\/ggml-metal\.h/ggml-metal.h/g' \ -e 's/src\/ggml-metal\.m/ggml-metal.m/g' \ - -e 's/src\/ggml-metal\.metal/ggml-metal.metal/g' \ -e 's/src\/ggml-mpi\.h/ggml-mpi.h/g' \ -e 's/src\/ggml-mpi\.c/ggml-mpi.c/g' \ -e 's/src\/ggml-opencl\.cpp/ggml-opencl.cpp/g' \ From 289313716ff7ccf6aee284f686a0fe8cbc7714af Mon Sep 17 00:00:00 2001 From: Georgi Gerganov Date: Wed, 3 Jan 2024 11:35:46 +0200 Subject: [PATCH 201/811] metal : add kernel_get_rows_i32 ggml-ci --- ggml-metal.m | 4 ++++ ggml-metal.metal | 29 +++++++++++++++++++++++++++++ 2 files changed, 33 insertions(+) diff --git a/ggml-metal.m b/ggml-metal.m index 7a369b55e..7aa92c14c 100644 --- a/ggml-metal.m +++ b/ggml-metal.m @@ -87,6 +87,7 @@ struct ggml_metal_context { GGML_METAL_DECL_KERNEL(get_rows_q4_K); GGML_METAL_DECL_KERNEL(get_rows_q5_K); GGML_METAL_DECL_KERNEL(get_rows_q6_K); + GGML_METAL_DECL_KERNEL(get_rows_i32); GGML_METAL_DECL_KERNEL(rms_norm); GGML_METAL_DECL_KERNEL(group_norm); GGML_METAL_DECL_KERNEL(norm); @@ -377,6 +378,7 @@ struct ggml_metal_context * ggml_metal_init(int n_cb) { GGML_METAL_ADD_KERNEL(get_rows_q4_K); GGML_METAL_ADD_KERNEL(get_rows_q5_K); GGML_METAL_ADD_KERNEL(get_rows_q6_K); + GGML_METAL_ADD_KERNEL(get_rows_i32); GGML_METAL_ADD_KERNEL(rms_norm); GGML_METAL_ADD_KERNEL(group_norm); GGML_METAL_ADD_KERNEL(norm); @@ -499,6 +501,7 @@ void ggml_metal_free(struct ggml_metal_context * ctx) { GGML_METAL_DEL_KERNEL(get_rows_q4_K); GGML_METAL_DEL_KERNEL(get_rows_q5_K); GGML_METAL_DEL_KERNEL(get_rows_q6_K); + GGML_METAL_DEL_KERNEL(get_rows_i32); GGML_METAL_DEL_KERNEL(rms_norm); GGML_METAL_DEL_KERNEL(group_norm); GGML_METAL_DEL_KERNEL(norm); @@ -1978,6 +1981,7 @@ void ggml_metal_graph_compute( case GGML_TYPE_Q4_K: [encoder setComputePipelineState:ctx->pipeline_get_rows_q4_K]; break; case GGML_TYPE_Q5_K: [encoder setComputePipelineState:ctx->pipeline_get_rows_q5_K]; break; case GGML_TYPE_Q6_K: [encoder setComputePipelineState:ctx->pipeline_get_rows_q6_K]; break; + case GGML_TYPE_I32: [encoder setComputePipelineState:ctx->pipeline_get_rows_i32]; break; default: GGML_ASSERT(false && "not implemented"); } diff --git a/ggml-metal.metal b/ggml-metal.metal index 9aa7b502a..a7d3f9efa 100644 --- a/ggml-metal.metal +++ b/ggml-metal.metal @@ -3829,6 +3829,35 @@ kernel void kernel_get_rows_f16( } } +kernel void kernel_get_rows_i32( + device const void * src0, + device const char * src1, + device int32_t * dst, + constant int64_t & ne00, + constant uint64_t & nb01, + constant uint64_t & nb02, + constant int64_t & ne10, + constant uint64_t & nb10, + constant uint64_t & nb11, + constant uint64_t & nb1, + constant uint64_t & nb2, + uint3 tgpig[[threadgroup_position_in_grid]], + uint tiitg[[thread_index_in_threadgroup]], + uint3 tptg [[threads_per_threadgroup]]) { + const int64_t i10 = tgpig.x; + const int64_t i11 = tgpig.y; + + const int64_t r = ((device int32_t *) ((device char *) src1 + i11*nb11 + i10*nb10))[0]; + + const int64_t i02 = i11; + + for (int ind = tiitg; ind < ne00; ind += tptg.x) { + ((device int32_t *) ((device char *) dst + i11*nb2 + i10*nb1))[ind] = + ((device int32_t *) ((device char *) src0 + r*nb01 + i02*nb02))[ind]; + } +} + + #define BLOCK_SIZE_M 64 // 8 simdgroup matrices from matrix A #define BLOCK_SIZE_N 32 // 4 simdgroup matrices from matrix B #define BLOCK_SIZE_K 32 From 75e3fd85814c367b55aea11e7bb38cb7b82c6aa0 Mon Sep 17 00:00:00 2001 From: Georgi Gerganov Date: Wed, 3 Jan 2024 11:37:44 +0200 Subject: [PATCH 202/811] sync : ggml ggml-ci --- scripts/sync-ggml.last | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/scripts/sync-ggml.last b/scripts/sync-ggml.last index 5b6a440f7..2105a8df2 100644 --- a/scripts/sync-ggml.last +++ b/scripts/sync-ggml.last @@ -1 +1 @@ -df098ea908764cba4a4889a1cbe7b026b2d31a14 +5b6f3aeba051be8926cb921b8ba529ff990608bf From d55356d3baa58a6c3a9171cb67a67094b9aa9dff Mon Sep 17 00:00:00 2001 From: Georgi Gerganov Date: Wed, 3 Jan 2024 13:01:44 +0200 Subject: [PATCH 203/811] cuda : mark I16 and I32 ops as unsupported ggml-ci --- ggml-cuda.cu | 13 ++++++++++--- 1 file changed, 10 insertions(+), 3 deletions(-) diff --git a/ggml-cuda.cu b/ggml-cuda.cu index 8c2712308..2e759d43e 100644 --- a/ggml-cuda.cu +++ b/ggml-cuda.cu @@ -10039,14 +10039,22 @@ static bool ggml_backend_cuda_supports_op(ggml_backend_t backend, const ggml_ten } return false; } break; + case GGML_OP_DUP: + case GGML_OP_REPEAT: + case GGML_OP_CONCAT: + { + ggml_type src0_type = op->src[0]->type; + if (src0_type != GGML_TYPE_I32 && src0_type != GGML_TYPE_I16) { + return true; + } + return false; + } break; case GGML_OP_NONE: case GGML_OP_RESHAPE: case GGML_OP_VIEW: case GGML_OP_PERMUTE: case GGML_OP_TRANSPOSE: case GGML_OP_NORM: - case GGML_OP_REPEAT: - case GGML_OP_DUP: case GGML_OP_ADD: case GGML_OP_MUL: case GGML_OP_DIV: @@ -10063,7 +10071,6 @@ static bool ggml_backend_cuda_supports_op(ggml_backend_t backend, const ggml_ten case GGML_OP_SUM_ROWS: case GGML_OP_ARGSORT: case GGML_OP_ACC: - case GGML_OP_CONCAT: case GGML_OP_GROUP_NORM: case GGML_OP_UPSCALE: case GGML_OP_PAD: From 7bed7eba359b0fa8e575345dc5467a46b4ba509f Mon Sep 17 00:00:00 2001 From: Georgi Gerganov Date: Wed, 3 Jan 2024 14:18:46 +0200 Subject: [PATCH 204/811] cuda : simplify expression Co-authored-by: slaren --- ggml-cuda.cu | 5 +---- scripts/sync-ggml.last | 2 +- 2 files changed, 2 insertions(+), 5 deletions(-) diff --git a/ggml-cuda.cu b/ggml-cuda.cu index 2e759d43e..52d3cc6a6 100644 --- a/ggml-cuda.cu +++ b/ggml-cuda.cu @@ -10044,10 +10044,7 @@ static bool ggml_backend_cuda_supports_op(ggml_backend_t backend, const ggml_ten case GGML_OP_CONCAT: { ggml_type src0_type = op->src[0]->type; - if (src0_type != GGML_TYPE_I32 && src0_type != GGML_TYPE_I16) { - return true; - } - return false; + return src0_type != GGML_TYPE_I32 && src0_type != GGML_TYPE_I16; } break; case GGML_OP_NONE: case GGML_OP_RESHAPE: diff --git a/scripts/sync-ggml.last b/scripts/sync-ggml.last index 2105a8df2..354246a26 100644 --- a/scripts/sync-ggml.last +++ b/scripts/sync-ggml.last @@ -1 +1 @@ -5b6f3aeba051be8926cb921b8ba529ff990608bf +3fd01e00e40583ccd4b393a7c6502d6a4455a1d5 From ece9a45e8ffb73ad461c792720c2fec28b0137bc Mon Sep 17 00:00:00 2001 From: Ashraful Islam Date: Wed, 3 Jan 2024 11:30:02 -0600 Subject: [PATCH 205/811] swift : update Package.swift to use ggml as dependency (#4691) * updates the package.swift to use ggml as dependency * changes the ggml package url src to ggerganov --- Package.swift | 12 ++++-------- 1 file changed, 4 insertions(+), 8 deletions(-) diff --git a/Package.swift b/Package.swift index 18d610d69..e33a4ff46 100644 --- a/Package.swift +++ b/Package.swift @@ -13,21 +13,17 @@ let package = Package( products: [ .library(name: "llama", targets: ["llama"]), ], + dependencies: [ + .package(url: "https://github.com/ggerganov/ggml.git", .branch("master")) + ], targets: [ .target( name: "llama", + dependencies: ["ggml"], path: ".", exclude: [], sources: [ - "ggml.c", "llama.cpp", - "ggml-alloc.c", - "ggml-backend.c", - "ggml-quants.c", - "ggml-metal.m", - ], - resources: [ - .process("ggml-metal.metal") ], publicHeadersPath: "spm-headers", cSettings: [ From cb1e2818e0e12ec99f7236ec5d4f3ffd8bcc2f4a Mon Sep 17 00:00:00 2001 From: Daniel Bevenius Date: Wed, 3 Jan 2024 18:53:40 +0100 Subject: [PATCH 206/811] train : fix typo in overlapping-samples help msg (#4758) This commit fixes a typo in the help message for the --overlapping-samples option. Signed-off-by: Daniel Bevenius --- common/train.cpp | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/common/train.cpp b/common/train.cpp index dcf9614e4..e6f2f7a2f 100644 --- a/common/train.cpp +++ b/common/train.cpp @@ -1107,7 +1107,7 @@ void print_common_train_usage(int /*argc*/, char ** /*argv*/, const struct train fprintf(stderr, " --sample-start STR Sets the starting point for samples after the specified pattern. If empty use every token position as sample start. (default '%s')\n", params->sample_start.c_str()); fprintf(stderr, " --include-sample-start Include the sample start in the samples. (default off)\n"); fprintf(stderr, " --escape process sample start escapes sequences (\\n, \\r, \\t, \\', \\\", \\\\)\n"); - fprintf(stderr, " --overlapping-samples Samples my overlap, will include sample-start of second and following samples. When off, samples will end at begin of next sample. (default off)\n"); + fprintf(stderr, " --overlapping-samples Samples may overlap, will include sample-start of second and following samples. When off, samples will end at begin of next sample. (default off)\n"); fprintf(stderr, " --fill-with-next-samples Samples shorter than context length will be followed by the next (shuffled) samples. (default off)\n"); fprintf(stderr, " --separate-with-eos When fill-with-next-samples, insert end-of-sequence token between samples.%s\n", params->separate_with_eos ? " (default)" : ""); fprintf(stderr, " --separate-with-bos When fill-with-next-samples, insert begin-of-sequence token between samples.%s\n", params->separate_with_bos ? " (default)" : ""); From 46cea79e1f32499bb24b9fab12123cd386e96728 Mon Sep 17 00:00:00 2001 From: singularity <12184989+singularity-s0@users.noreply.github.com> Date: Thu, 4 Jan 2024 15:58:16 +0800 Subject: [PATCH 207/811] llama.swiftui : fix build of ggml.metallib (#4754) * metal: fix metal backend init failure in swiftui * metal: build ggml.metallib instead of copy src * llama.swift : remove debug flags from metallib build --------- Co-authored-by: Georgi Gerganov --- .../llama.swiftui.xcodeproj/project.pbxproj | 21 +++++++++++++++++-- 1 file changed, 19 insertions(+), 2 deletions(-) diff --git a/examples/llama.swiftui/llama.swiftui.xcodeproj/project.pbxproj b/examples/llama.swiftui/llama.swiftui.xcodeproj/project.pbxproj index 2e6159928..7bf4489a2 100644 --- a/examples/llama.swiftui/llama.swiftui.xcodeproj/project.pbxproj +++ b/examples/llama.swiftui/llama.swiftui.xcodeproj/project.pbxproj @@ -9,7 +9,6 @@ /* Begin PBXBuildFile section */ 542376082B0D9BFB008E6A1C /* ggml-quants.c in Sources */ = {isa = PBXBuildFile; fileRef = 542376072B0D9BFB008E6A1C /* ggml-quants.c */; settings = {COMPILER_FLAGS = "-O3"; }; }; 5423760B2B0D9C4B008E6A1C /* ggml-backend.c in Sources */ = {isa = PBXBuildFile; fileRef = 5423760A2B0D9C4B008E6A1C /* ggml-backend.c */; settings = {COMPILER_FLAGS = "-O3"; }; }; - 542378792ACE3F3500834A7B /* ggml-metal.metal in Resources */ = {isa = PBXBuildFile; fileRef = 549479C82AC9E10B00E0F78B /* ggml-metal.metal */; }; 542EA09D2AC8723900A8AEE9 /* ggml.c in Sources */ = {isa = PBXBuildFile; fileRef = 542EA09B2AC8723900A8AEE9 /* ggml.c */; settings = {COMPILER_FLAGS = "-DGGML_USE_ACCELERATE -DGGML_USE_METAL -DGGML_USE_K_QUANTS -O3"; }; }; 542EA0A02AC8725700A8AEE9 /* ggml-alloc.c in Sources */ = {isa = PBXBuildFile; fileRef = 542EA09F2AC8725700A8AEE9 /* ggml-alloc.c */; settings = {COMPILER_FLAGS = "-O3"; }; }; 542EA0A32AC8729100A8AEE9 /* llama.cpp in Sources */ = {isa = PBXBuildFile; fileRef = 542EA0A12AC8729100A8AEE9 /* llama.cpp */; settings = {COMPILER_FLAGS = "-DGGML_USE_K_QUANTS -DGGML_USE_METAL -O3"; }; }; @@ -24,8 +23,25 @@ 8A3F84242AC4C891005E2EE8 /* models in Resources */ = {isa = PBXBuildFile; fileRef = 8A3F84232AC4C891005E2EE8 /* models */; }; 8A907F332AC7138A006146EA /* LibLlama.swift in Sources */ = {isa = PBXBuildFile; fileRef = 8A907F322AC7134E006146EA /* LibLlama.swift */; }; 8A9F7C4D2AC332EE008AE1EA /* LlamaState.swift in Sources */ = {isa = PBXBuildFile; fileRef = 8A9F7C4C2AC332EE008AE1EA /* LlamaState.swift */; }; + F1FE20DC2B465C4500B45541 /* ggml-metal.metal in Resources */ = {isa = PBXBuildFile; fileRef = 549479C82AC9E10B00E0F78B /* ggml-metal.metal */; }; /* End PBXBuildFile section */ +/* Begin PBXBuildRule section */ + F1FE20DB2B465C2100B45541 /* PBXBuildRule */ = { + isa = PBXBuildRule; + compilerSpec = com.apple.compilers.proxy.script; + fileType = sourcecode.metal; + inputFiles = ( + ); + isEditable = 1; + outputFiles = ( + "${DERIVED_FILES_DIR}/ggml-metal.air", + "${DERIVED_FILES_DIR}/ggml.metallib", + ); + script = "# metal\nxcrun metal -c \"${INPUT_FILE_PATH}\" -o \"${DERIVED_FILES_DIR}/${INPUT_FILE_BASE}.air\"\nxcrun metallib -o \"${DERIVED_FILES_DIR}/${INPUT_FILE_BASE%-metal}.metallib\" \"${DERIVED_FILES_DIR}/${INPUT_FILE_BASE}.air\"\n"; + }; +/* End PBXBuildRule section */ + /* Begin PBXFileReference section */ 542376062B0D9BEA008E6A1C /* ggml-quants.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; name = "ggml-quants.h"; path = "../../ggml-quants.h"; sourceTree = ""; }; 542376072B0D9BFB008E6A1C /* ggml-quants.c */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.c; name = "ggml-quants.c"; path = "../../ggml-quants.c"; sourceTree = ""; }; @@ -190,6 +206,7 @@ 8A1C83712AC328BD0096AF73 /* Resources */, ); buildRules = ( + F1FE20DB2B465C2100B45541 /* PBXBuildRule */, ); dependencies = ( ); @@ -241,7 +258,7 @@ isa = PBXResourcesBuildPhase; buildActionMask = 2147483647; files = ( - 542378792ACE3F3500834A7B /* ggml-metal.metal in Resources */, + F1FE20DC2B465C4500B45541 /* ggml-metal.metal in Resources */, 8A3F84242AC4C891005E2EE8 /* models in Resources */, 8A1C837E2AC328BE0096AF73 /* Preview Assets.xcassets in Resources */, 8A1C837B2AC328BE0096AF73 /* Assets.xcassets in Resources */, From dc891b7f7a23158d54f9383790b92c79cc5906c1 Mon Sep 17 00:00:00 2001 From: Georgi Gerganov Date: Thu, 4 Jan 2024 10:12:26 +0200 Subject: [PATCH 208/811] ggml : include stdlib.h before intrin.h (#4736) --- ggml-impl.h | 1 + 1 file changed, 1 insertion(+) diff --git a/ggml-impl.h b/ggml-impl.h index 1f5610a86..2faced080 100644 --- a/ggml-impl.h +++ b/ggml-impl.h @@ -5,6 +5,7 @@ // GGML internal header #include +#include // load `stdlib.h` before other headers to work around MinGW bug: https://sourceforge.net/p/mingw-w64/bugs/192/ #include #include #include // memcpy From e5804313a1edaf00726ed0b96ecced07accbf50c Mon Sep 17 00:00:00 2001 From: Michael Coppola Date: Thu, 4 Jan 2024 03:17:09 -0500 Subject: [PATCH 209/811] server : fix options in README.md (#4765) * fix examples/server/README.md * minor : fix whitespace --------- Co-authored-by: Georgi Gerganov --- examples/server/README.md | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/examples/server/README.md b/examples/server/README.md index 718a7e064..243e66991 100644 --- a/examples/server/README.md +++ b/examples/server/README.md @@ -168,6 +168,12 @@ node index.js `image_data`: An array of objects to hold base64-encoded image `data` and its `id`s to be reference in `prompt`. You can determine the place of the image in the prompt as in the following: `USER:[img-12]Describe the image in detail.\nASSISTANT:`. In this case, `[img-12]` will be replaced by the embeddings of the image with id `12` in the following `image_data` array: `{..., "image_data": [{"data": "", "id": 12}]}`. Use `image_data` only with multimodal models, e.g., LLaVA. + `slot_id`: Assign the completion task to an specific slot. If is -1 the task will be assigned to a Idle slot (default: -1) + + `cache_prompt`: Save the prompt and generation for avoid reprocess entire prompt if a part of this isn't change (default: false) + + `system_prompt`: Change the system prompt (initial prompt of all slots), this is useful for chat applications. [See more](#change-system-prompt-on-runtime) + *Result JSON:* Note: When using streaming mode (`stream`) only `content` and `stop` will be returned until end of completion. @@ -198,12 +204,6 @@ node index.js `truncated`: Boolean indicating if the context size was exceeded during generation, i.e. the number of tokens provided in the prompt (`tokens_evaluated`) plus tokens generated (`tokens predicted`) exceeded the context size (`n_ctx`) - `slot_id`: Assign the completion task to an specific slot. If is -1 the task will be assigned to a Idle slot (default: -1) - - `cache_prompt`: Save the prompt and generation for avoid reprocess entire prompt if a part of this isn't change (default: false) - - `system_prompt`: Change the system prompt (initial prompt of all slots), this is useful for chat applications. [See more](#change-system-prompt-on-runtime) - - **POST** `/tokenize`: Tokenize a given text. *Options:* From 3c0b585561d74a56977cf3a3844535ecc9e37972 Mon Sep 17 00:00:00 2001 From: singularity <12184989+singularity-s0@users.noreply.github.com> Date: Thu, 4 Jan 2024 16:22:38 +0800 Subject: [PATCH 210/811] llama.swiftui : support loading custom model from file picker (#4767) * swiftui: support load model from file picker * swiftui: remove trailing whitespace --- .../llama.swiftui.xcodeproj/project.pbxproj | 4 ++ .../llama.swiftui/UI/ContentView.swift | 2 + .../llama.swiftui/UI/LoadCustomButton.swift | 44 +++++++++++++++++++ 3 files changed, 50 insertions(+) create mode 100644 examples/llama.swiftui/llama.swiftui/UI/LoadCustomButton.swift diff --git a/examples/llama.swiftui/llama.swiftui.xcodeproj/project.pbxproj b/examples/llama.swiftui/llama.swiftui.xcodeproj/project.pbxproj index 7bf4489a2..a70750a22 100644 --- a/examples/llama.swiftui/llama.swiftui.xcodeproj/project.pbxproj +++ b/examples/llama.swiftui/llama.swiftui.xcodeproj/project.pbxproj @@ -23,6 +23,7 @@ 8A3F84242AC4C891005E2EE8 /* models in Resources */ = {isa = PBXBuildFile; fileRef = 8A3F84232AC4C891005E2EE8 /* models */; }; 8A907F332AC7138A006146EA /* LibLlama.swift in Sources */ = {isa = PBXBuildFile; fileRef = 8A907F322AC7134E006146EA /* LibLlama.swift */; }; 8A9F7C4D2AC332EE008AE1EA /* LlamaState.swift in Sources */ = {isa = PBXBuildFile; fileRef = 8A9F7C4C2AC332EE008AE1EA /* LlamaState.swift */; }; + F1FE20E22B465ECA00B45541 /* LoadCustomButton.swift in Sources */ = {isa = PBXBuildFile; fileRef = F1FE20E12B465EC900B45541 /* LoadCustomButton.swift */; }; F1FE20DC2B465C4500B45541 /* ggml-metal.metal in Resources */ = {isa = PBXBuildFile; fileRef = 549479C82AC9E10B00E0F78B /* ggml-metal.metal */; }; /* End PBXBuildFile section */ @@ -68,6 +69,7 @@ 8A3F84232AC4C891005E2EE8 /* models */ = {isa = PBXFileReference; lastKnownFileType = folder; name = models; path = llama.swiftui/Resources/models; sourceTree = ""; }; 8A907F322AC7134E006146EA /* LibLlama.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = LibLlama.swift; sourceTree = ""; }; 8A9F7C4C2AC332EE008AE1EA /* LlamaState.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = LlamaState.swift; sourceTree = ""; }; + F1FE20E12B465EC900B45541 /* LoadCustomButton.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = LoadCustomButton.swift; sourceTree = ""; }; /* End PBXFileReference section */ /* Begin PBXFrameworksBuildPhase section */ @@ -182,6 +184,7 @@ children = ( 7FA3D2B22B2EA2F600543F92 /* DownloadButton.swift */, 8A1C83782AC328BD0096AF73 /* ContentView.swift */, + F1FE20E12B465EC900B45541 /* LoadCustomButton.swift */, ); path = UI; sourceTree = ""; @@ -274,6 +277,7 @@ files = ( 542376082B0D9BFB008E6A1C /* ggml-quants.c in Sources */, 549479CD2AC9E42A00E0F78B /* ggml-metal.m in Sources */, + F1FE20E22B465ECA00B45541 /* LoadCustomButton.swift in Sources */, 542EA09D2AC8723900A8AEE9 /* ggml.c in Sources */, 8A907F332AC7138A006146EA /* LibLlama.swift in Sources */, 542EA0A32AC8729100A8AEE9 /* llama.cpp in Sources */, diff --git a/examples/llama.swiftui/llama.swiftui/UI/ContentView.swift b/examples/llama.swiftui/llama.swiftui/UI/ContentView.swift index 147e0c63b..7c81ea256 100644 --- a/examples/llama.swiftui/llama.swiftui/UI/ContentView.swift +++ b/examples/llama.swiftui/llama.swiftui/UI/ContentView.swift @@ -103,6 +103,8 @@ struct ContentView: View { ContentView.cleanupModelCaches() llamaState.cacheCleared = true } + + LoadCustomButton(llamaState: llamaState) } .padding(.top, 4) .font(.system(size: 12)) diff --git a/examples/llama.swiftui/llama.swiftui/UI/LoadCustomButton.swift b/examples/llama.swiftui/llama.swiftui/UI/LoadCustomButton.swift new file mode 100644 index 000000000..4315dbe4f --- /dev/null +++ b/examples/llama.swiftui/llama.swiftui/UI/LoadCustomButton.swift @@ -0,0 +1,44 @@ +import SwiftUI +import UniformTypeIdentifiers + +struct LoadCustomButton: View { + @ObservedObject private var llamaState: LlamaState + @State private var showFileImporter = false + + init(llamaState: LlamaState) { + self.llamaState = llamaState + } + + var body: some View { + VStack { + Button(action: { + showFileImporter = true + }) { + Text("Load Custom Model") + } + } + .fileImporter( + isPresented: $showFileImporter, + allowedContentTypes: [UTType(filenameExtension: "gguf", conformingTo: .data)!], + allowsMultipleSelection: false + ) { result in + switch result { + case .success(let files): + files.forEach { file in + let gotAccess = file.startAccessingSecurityScopedResource() + if !gotAccess { return } + + do { + try llamaState.loadModel(modelUrl: file.absoluteURL) + } catch let err { + print("Error: \(err.localizedDescription)") + } + + file.stopAccessingSecurityScopedResource() + } + case .failure(let error): + print(error) + } + } + } +} From a91928014fcf51fe297823fcff0788de4f14206e Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Johannes=20G=C3=A4=C3=9Fler?= Date: Thu, 4 Jan 2024 09:43:23 +0100 Subject: [PATCH 211/811] Print backend name on test-backend-ops failure (#4751) --- tests/test-backend-ops.cpp | 12 +++++++++--- 1 file changed, 9 insertions(+), 3 deletions(-) diff --git a/tests/test-backend-ops.cpp b/tests/test-backend-ops.cpp index 44412cb94..b79de7a7d 100644 --- a/tests/test-backend-ops.cpp +++ b/tests/test-backend-ops.cpp @@ -392,15 +392,21 @@ struct test_case { struct callback_userdata { bool ok; double max_err; + ggml_backend_t backend1; + ggml_backend_t backend2; }; callback_userdata ud { true, max_nmse_err(), + backend1, + backend2 }; auto callback = [](int index, ggml_tensor * t1, ggml_tensor * t2, void * user_data) -> bool { callback_userdata * ud = (callback_userdata *) user_data; + const char * bn1 = ggml_backend_name(ud->backend1); + const char * bn2 = ggml_backend_name(ud->backend2); if (t1->op == GGML_OP_NONE) { // sentinels must be unchanged @@ -422,7 +428,7 @@ struct test_case { for (size_t i = 0; i < f1.size(); i++) { // check for nans if (std::isnan(f1[i]) || std::isnan(f2[i])) { - printf("[%s] NaN at index %zu (%f %f) ", ggml_op_desc(t1), i, f1[i], f2[i]); + printf("[%s] NaN at index %zu (%s=%f %s=%f) ", ggml_op_desc(t1), i, bn1, f1[i], bn2, f2[i]); ud->ok = false; return true; } @@ -430,12 +436,12 @@ struct test_case { if (isinf_or_max(f1[i]) || isinf_or_max(f2[i])) { if (isinf_or_max(f1[i]) && isinf_or_max(f2[i])) { if (std::signbit(f1[i]) != std::signbit(f2[i])) { - printf("[%s] inf sign mismatch: %f %f ", ggml_op_desc(t1), f1[i], f2[i]); + printf("[%s] inf sign mismatch: %s=%f %s=%f ", ggml_op_desc(t1), bn1, f1[i], bn2, f2[i]); ud->ok = false; return true; } } else { - printf("[%s] inf mismatch: %f %f ", ggml_op_desc(t1), f1[i], f2[i]); + printf("[%s] inf mismatch: %s=%f %s=%f ", ggml_op_desc(t1), bn1, f1[i], bn2, f2[i]); ud->ok = false; return true; } From 012cf349aec8ffb47c9def5dc018240fa3721e8b Mon Sep 17 00:00:00 2001 From: Georgi Gerganov Date: Thu, 4 Jan 2024 19:56:33 +0200 Subject: [PATCH 212/811] server : send token probs for "stream == false" (#4714) --- examples/server/server.cpp | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/examples/server/server.cpp b/examples/server/server.cpp index e45ea809a..d1469fb08 100644 --- a/examples/server/server.cpp +++ b/examples/server/server.cpp @@ -1265,7 +1265,7 @@ struct llama_server_context { std::vector probs_output = {}; const std::vector to_send_toks = llama_tokenize(ctx, tkn.text_to_send, false); - size_t probs_pos = std::min(slot.sent_token_probs_index, slot.generated_token_probs.size()); + size_t probs_pos = std::min(slot.sent_token_probs_index, slot.generated_token_probs.size()); size_t probs_stop_pos = std::min(slot.sent_token_probs_index + to_send_toks.size(), slot.generated_token_probs.size()); if (probs_pos < probs_stop_pos) { @@ -1325,7 +1325,7 @@ struct llama_server_context { probs = std::vector( slot.generated_token_probs.begin(), - slot.generated_token_probs.begin() + slot.sent_token_probs_index); + slot.generated_token_probs.end()); } res.result_json["completion_probabilities"] = probs_vector_to_json(ctx, probs); } From b3a7c20b5c035250257d2b62851c379b159c899a Mon Sep 17 00:00:00 2001 From: Daniel Bevenius Date: Thu, 4 Jan 2024 20:45:37 +0100 Subject: [PATCH 213/811] finetune : remove unused includes (#4756) This commit removes unused includes from finetune.cpp. Signed-off-by: Daniel Bevenius --- examples/finetune/finetune.cpp | 6 ------ 1 file changed, 6 deletions(-) diff --git a/examples/finetune/finetune.cpp b/examples/finetune/finetune.cpp index e0520f64c..eaca42fc1 100644 --- a/examples/finetune/finetune.cpp +++ b/examples/finetune/finetune.cpp @@ -3,15 +3,9 @@ #include "llama.h" #include "common.h" #include "train.h" -#include #include -#include -#include #include -#include #include -#include -#include #include #include From 3681f22443d917e7328456b69c276d6927dafeec Mon Sep 17 00:00:00 2001 From: Georgi Gerganov Date: Fri, 5 Jan 2024 15:11:10 +0200 Subject: [PATCH 214/811] examples : add few-shot translation example (#4783) --- examples/base-translate.sh | 56 ++++++++++++++++++++++++++++++++++++++ 1 file changed, 56 insertions(+) create mode 100755 examples/base-translate.sh diff --git a/examples/base-translate.sh b/examples/base-translate.sh new file mode 100755 index 000000000..50fba025c --- /dev/null +++ b/examples/base-translate.sh @@ -0,0 +1,56 @@ +#!/bin/bash +# +# Few-shot translation example. +# Requires a base model (i.e. no fine-tuned or instruct models). +# +# Usage: +# +# cd llama.cpp +# make -j +# +# ./examples/base-translate.sh "" +# + +if [ $# -ne 2 ]; then + echo "Usage: ./base-translate.sh \"\"" + exit 1 +fi + +ftmp="__llama.cpp_example_tmp__.txt" +trap "rm -f $ftmp" EXIT + +echo "Translate from English to French: + +=== + +sea otter, peppermint, plush girafe: + +sea otter => loutre de mer +peppermint => menthe poivrée +plush girafe => girafe peluche + +=== + +violin + +violin => violon + +=== + +phone, computer, mouse, keyboard: + +phone => téléphone +computer => ordinateur +mouse => souris +keyboard => clavier + +=== +" > $ftmp + +echo "$2 +" >> $ftmp + +model=$1 + +# generate the most likely continuation, run on the CPU until the string "===" is found +./main -m $model -f $ftmp -n 64 --temp 0 --repeat-penalty 1.0 --no-penalize-nl -ngl 0 -r "===" From c1d7cb28d3fed97fbc95fc3c43f0c5e2113e546c Mon Sep 17 00:00:00 2001 From: Georgi Gerganov Date: Fri, 5 Jan 2024 15:18:21 +0200 Subject: [PATCH 215/811] ggml : do not sched_yield when calling BLAS (#4761) * ggml : do not sched_yield when calling BLAS ggml-ci * ggml : fix do_yield logic ggml-ci * ggml : simplify do_yield logic ggml-ci --- ggml.c | 41 ++++++++++++++--------------------------- 1 file changed, 14 insertions(+), 27 deletions(-) diff --git a/ggml.c b/ggml.c index b124f14cc..62f0f18ef 100644 --- a/ggml.c +++ b/ggml.c @@ -9704,10 +9704,10 @@ static void ggml_compute_forward_group_norm( #if defined(GGML_USE_ACCELERATE) || defined(GGML_USE_OPENBLAS) // helper function to determine if it is better to use BLAS or not // for large matrices, BLAS is faster -static bool ggml_compute_forward_mul_mat_use_blas( - const struct ggml_tensor * src0, - const struct ggml_tensor * src1, - struct ggml_tensor * dst) { +static bool ggml_compute_forward_mul_mat_use_blas(struct ggml_tensor * dst) { + const struct ggml_tensor * src0 = dst->src[0]; + const struct ggml_tensor * src1 = dst->src[1]; + //const int64_t ne00 = src0->ne[0]; //const int64_t ne01 = src0->ne[1]; @@ -9787,7 +9787,7 @@ static void ggml_compute_forward_mul_mat( #endif #if defined(GGML_USE_ACCELERATE) || defined(GGML_USE_OPENBLAS) - if (ggml_compute_forward_mul_mat_use_blas(src0, src1, dst)) { + if (ggml_compute_forward_mul_mat_use_blas(dst)) { if (params->ith != 0) { return; } @@ -16301,24 +16301,6 @@ static int ggml_get_n_tasks(struct ggml_tensor * node, int n_threads) { //n_tasks = MIN(n_threads, MAX(1, nr0/128)); //printf("nr0 = %8d, nr1 = %8d, nr0*nr1 = %8d, n_tasks%d\n", nr0, nr1, nr0*nr1, n_tasks); - -#if defined(GGML_USE_CUBLAS) - if (ggml_cuda_can_mul_mat(node->src[0], node->src[1], node)) { - n_tasks = 1; // TODO: this actually is doing nothing - // the threads are still spinning - } -#elif defined(GGML_USE_CLBLAST) - if (ggml_cl_can_mul_mat(node->src[0], node->src[1], node)) { - n_tasks = 1; // TODO: this actually is doing nothing - // the threads are still spinning - } -#endif -#if defined(GGML_USE_ACCELERATE) || defined(GGML_USE_OPENBLAS) - if (ggml_compute_forward_mul_mat_use_blas(node->src[0], node->src[1], node)) { - n_tasks = 1; // TODO: this actually is doing nothing - // the threads are still spinning - } -#endif } break; case GGML_OP_MUL_MAT_ID: { @@ -16491,6 +16473,7 @@ static thread_ret_t ggml_graph_compute_thread(void * data) { state->shared->node_n += 1; return (thread_ret_t) GGML_EXIT_ABORTED; } + if (atomic_fetch_sub(&state->shared->n_active, 1) == 1) { // all other threads are finished and spinning // do finalize and init here so we don't have synchronize again @@ -16556,14 +16539,18 @@ static thread_ret_t ggml_graph_compute_thread(void * data) { } else { // wait for other threads to finish const int last = node_n; + + const bool do_yield = last < 0 || cgraph->nodes[last]->op == GGML_OP_MUL_MAT; + while (true) { // TODO: this sched_yield can have significant impact on the performance - either positive or negative // depending on the workload and the operating system. // since it is not clear what is the best approach, it should potentially become user-configurable // ref: https://github.com/ggerganov/ggml/issues/291 -#if defined(GGML_USE_ACCELERATE) || defined(GGML_USE_OPENBLAS) - sched_yield(); -#endif + // UPD: adding the do_yield flag seems to resolve the issue universally + if (do_yield) { + sched_yield(); + } node_n = atomic_load(&state->shared->node_n); if (node_n != last) break; @@ -16642,7 +16629,7 @@ struct ggml_cplan ggml_graph_plan(struct ggml_cgraph * cgraph, int n_threads) { } else #endif #if defined(GGML_USE_ACCELERATE) || defined(GGML_USE_OPENBLAS) - if (ggml_compute_forward_mul_mat_use_blas(node->src[0], node->src[1], node)) { + if (ggml_compute_forward_mul_mat_use_blas(node)) { if (node->src[0]->type != GGML_TYPE_F32) { // here we need memory just for single 2D matrix from src0 cur = ggml_type_size(GGML_TYPE_F32)*(node->src[0]->ne[0]*node->src[0]->ne[1]); From 1bf681f90ef4cf37b36e6d604d3e30fc57eda650 Mon Sep 17 00:00:00 2001 From: Finn Voorhees Date: Wed, 3 Jan 2024 08:39:43 -0500 Subject: [PATCH 216/811] ggml : add error handling to graph_compute (whisper/1714) --- ggml-backend-impl.h | 2 +- ggml-backend.c | 10 +++++++--- ggml-backend.h | 2 +- ggml-cuda.cu | 4 +++- ggml-metal.h | 2 +- ggml-metal.m | 9 +++++---- 6 files changed, 18 insertions(+), 11 deletions(-) diff --git a/ggml-backend-impl.h b/ggml-backend-impl.h index 05859935a..ca21b4743 100644 --- a/ggml-backend-impl.h +++ b/ggml-backend-impl.h @@ -90,7 +90,7 @@ extern "C" { void (*graph_plan_compute)(ggml_backend_t backend, ggml_backend_graph_plan_t plan); // compute graph without a plan - void (*graph_compute)(ggml_backend_t backend, struct ggml_cgraph * cgraph); + bool (*graph_compute)(ggml_backend_t backend, struct ggml_cgraph * cgraph); // check if the backend supports an operation bool (*supports_op)(ggml_backend_t backend, const struct ggml_tensor * op); diff --git a/ggml-backend.c b/ggml-backend.c index 2c3752067..53e741cb8 100644 --- a/ggml-backend.c +++ b/ggml-backend.c @@ -195,11 +195,14 @@ void ggml_backend_graph_plan_compute(ggml_backend_t backend, ggml_backend_graph_ ggml_backend_synchronize(backend); } -void ggml_backend_graph_compute(ggml_backend_t backend, struct ggml_cgraph * cgraph) { - backend->iface.graph_compute(backend, cgraph); +bool ggml_backend_graph_compute(ggml_backend_t backend, struct ggml_cgraph * cgraph) { + if (!backend->iface.graph_compute(backend, cgraph)) { + return false; + } // TODO: optional sync ggml_backend_synchronize(backend); + return true; } bool ggml_backend_supports_op(ggml_backend_t backend, const struct ggml_tensor * op) { @@ -597,7 +600,7 @@ static void ggml_backend_cpu_graph_plan_compute(ggml_backend_t backend, ggml_bac GGML_UNUSED(backend); } -static void ggml_backend_cpu_graph_compute(ggml_backend_t backend, struct ggml_cgraph * cgraph) { +static bool ggml_backend_cpu_graph_compute(ggml_backend_t backend, struct ggml_cgraph * cgraph) { struct ggml_backend_cpu_context * cpu_ctx = (struct ggml_backend_cpu_context *)backend->context; struct ggml_cplan cplan = ggml_graph_plan(cgraph, cpu_ctx->n_threads); @@ -611,6 +614,7 @@ static void ggml_backend_cpu_graph_compute(ggml_backend_t backend, struct ggml_c cplan.work_data = cpu_ctx->work_data; ggml_graph_compute(cgraph, &cplan); + return true; } static bool ggml_backend_cpu_supports_op(ggml_backend_t backend, const struct ggml_tensor * op) { diff --git a/ggml-backend.h b/ggml-backend.h index a9d2fddd7..85ff67b0e 100644 --- a/ggml-backend.h +++ b/ggml-backend.h @@ -58,7 +58,7 @@ extern "C" { GGML_API void ggml_backend_graph_plan_free (ggml_backend_t backend, ggml_backend_graph_plan_t plan); GGML_API void ggml_backend_graph_plan_compute(ggml_backend_t backend, ggml_backend_graph_plan_t plan); - GGML_API void ggml_backend_graph_compute (ggml_backend_t backend, struct ggml_cgraph * cgraph); + GGML_API bool ggml_backend_graph_compute (ggml_backend_t backend, struct ggml_cgraph * cgraph); GGML_API bool ggml_backend_supports_op (ggml_backend_t backend, const struct ggml_tensor * op); // tensor copy between different backends diff --git a/ggml-cuda.cu b/ggml-cuda.cu index 52d3cc6a6..10c21615e 100644 --- a/ggml-cuda.cu +++ b/ggml-cuda.cu @@ -9910,7 +9910,7 @@ static void ggml_backend_cuda_graph_plan_compute(ggml_backend_t backend, ggml_ba UNUSED(plan); } -static void ggml_backend_cuda_graph_compute(ggml_backend_t backend, ggml_cgraph * cgraph) { +static bool ggml_backend_cuda_graph_compute(ggml_backend_t backend, ggml_cgraph * cgraph) { ggml_backend_context_cuda * cuda_ctx = (ggml_backend_context_cuda *)backend->context; ggml_cuda_set_main_device(cuda_ctx->device); @@ -9967,6 +9967,8 @@ static void ggml_backend_cuda_graph_compute(ggml_backend_t backend, ggml_cgraph } UNUSED(backend); + + return true; } static bool ggml_backend_cuda_supports_op(ggml_backend_t backend, const ggml_tensor * op) { diff --git a/ggml-metal.h b/ggml-metal.h index b5e02b668..c4b7325da 100644 --- a/ggml-metal.h +++ b/ggml-metal.h @@ -87,7 +87,7 @@ int * ggml_metal_get_concur_list(struct ggml_metal_context * ctx); // same as ggml_graph_compute but uses Metal // creates gf->n_threads command buffers in parallel -void ggml_metal_graph_compute(struct ggml_metal_context * ctx, struct ggml_cgraph * gf); +bool ggml_metal_graph_compute(struct ggml_metal_context * ctx, struct ggml_cgraph * gf); // // backend API diff --git a/ggml-metal.m b/ggml-metal.m index 7aa92c14c..55cc1a872 100644 --- a/ggml-metal.m +++ b/ggml-metal.m @@ -977,7 +977,7 @@ static bool ggml_metal_supports_op(const struct ggml_tensor * op) { return false; } } -void ggml_metal_graph_compute( +bool ggml_metal_graph_compute( struct ggml_metal_context * ctx, struct ggml_cgraph * gf) { @autoreleasepool { @@ -2405,10 +2405,11 @@ void ggml_metal_graph_compute( MTLCommandBufferStatus status = (MTLCommandBufferStatus) [ctx->command_buffers[i] status]; if (status != MTLCommandBufferStatusCompleted) { GGML_METAL_LOG_INFO("%s: command buffer %d failed with status %lu\n", __func__, i, status); - GGML_ASSERT(false); + return false; } } + return true; } } @@ -2688,10 +2689,10 @@ static ggml_backend_buffer_type_t ggml_backend_metal_get_default_buffer_type(ggm UNUSED(backend); } -static void ggml_backend_metal_graph_compute(ggml_backend_t backend, struct ggml_cgraph * cgraph) { +static bool ggml_backend_metal_graph_compute(ggml_backend_t backend, struct ggml_cgraph * cgraph) { struct ggml_metal_context * metal_ctx = (struct ggml_metal_context *)backend->context; - ggml_metal_graph_compute(metal_ctx, cgraph); + return ggml_metal_graph_compute(metal_ctx, cgraph); } static bool ggml_backend_metal_supports_op(ggml_backend_t backend, const struct ggml_tensor * op) { From d061bf9405cc5fd50792fb2dbdff9c9ea53d6bf9 Mon Sep 17 00:00:00 2001 From: Georgi Gerganov Date: Fri, 5 Jan 2024 15:36:04 +0200 Subject: [PATCH 217/811] ggml : fix q2_k bpw in comments (ggml/680) --- ggml-quants.h | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/ggml-quants.h b/ggml-quants.h index 70c12c274..62c1df6cb 100644 --- a/ggml-quants.h +++ b/ggml-quants.h @@ -70,7 +70,7 @@ static_assert(sizeof(block_q8_1) == 2*sizeof(float) + QK8_1, "wrong q8_1 block s // 2-bit quantization // weight is represented as x = a * q + b // 16 blocks of 16 elements each -// Effectively 2.5625 bits per weight +// Effectively 2.625 bits per weight typedef struct { uint8_t scales[QK_K/16]; // scales and mins, quantized with 4 bits uint8_t qs[QK_K/4]; // quants From 91d38876dfa10332359ac671b62353aeceb448d3 Mon Sep 17 00:00:00 2001 From: Georgi Gerganov Date: Fri, 5 Jan 2024 16:30:52 +0200 Subject: [PATCH 218/811] metal : switch back to default.metallib (ggml/681) ggml-ci --- CMakeLists.txt | 10 ++++++---- .../llama.swiftui.xcodeproj/project.pbxproj | 19 +------------------ ggml-metal.m | 6 +++--- scripts/sync-ggml.last | 2 +- 4 files changed, 11 insertions(+), 26 deletions(-) diff --git a/CMakeLists.txt b/CMakeLists.txt index 57ae4c2df..ce237cf45 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -177,27 +177,29 @@ if (LLAMA_METAL) if (LLAMA_METAL_SHADER_DEBUG) # custom command to do the following: # xcrun -sdk macosx metal -fno-fast-math -c ggml-metal.metal -o ggml-metal.air - # xcrun -sdk macosx metallib ggml-metal.air -o ggml.metallib + # xcrun -sdk macosx metallib ggml-metal.air -o default.metallib # # note: this is the only way I found to disable fast-math in Metal. it's ugly, but at least it works # disabling fast math is needed in order to pass tests/test-backend-ops # note: adding -fno-inline fixes the tests when using MTL_SHADER_VALIDATION=1 + # note: unfortunately, we have to call it default.metallib instead of ggml.metallib + # ref: https://github.com/ggerganov/whisper.cpp/issues/1720 set(XC_FLAGS -fno-fast-math -fno-inline -g) if (LLAMA_QKK_64) set(XC_FLAGS ${XC_FLAGS} -DQK_K=64) endif() add_custom_command( - OUTPUT ${CMAKE_RUNTIME_OUTPUT_DIRECTORY}/ggml.metallib + OUTPUT ${CMAKE_RUNTIME_OUTPUT_DIRECTORY}/default.metallib COMMAND xcrun -sdk macosx metal ${XC_FLAGS} -c ${CMAKE_RUNTIME_OUTPUT_DIRECTORY}/ggml-metal.metal -o ${CMAKE_RUNTIME_OUTPUT_DIRECTORY}/ggml-metal.air - COMMAND xcrun -sdk macosx metallib ${CMAKE_RUNTIME_OUTPUT_DIRECTORY}/ggml-metal.air -o ${CMAKE_RUNTIME_OUTPUT_DIRECTORY}/ggml.metallib + COMMAND xcrun -sdk macosx metallib ${CMAKE_RUNTIME_OUTPUT_DIRECTORY}/ggml-metal.air -o ${CMAKE_RUNTIME_OUTPUT_DIRECTORY}/default.metallib DEPENDS ggml-metal.metal COMMENT "Compiling Metal kernels" ) add_custom_target( ggml-metal ALL - DEPENDS ${CMAKE_RUNTIME_OUTPUT_DIRECTORY}/ggml.metallib + DEPENDS ${CMAKE_RUNTIME_OUTPUT_DIRECTORY}/default.metallib ) endif() diff --git a/examples/llama.swiftui/llama.swiftui.xcodeproj/project.pbxproj b/examples/llama.swiftui/llama.swiftui.xcodeproj/project.pbxproj index a70750a22..14b93f26c 100644 --- a/examples/llama.swiftui/llama.swiftui.xcodeproj/project.pbxproj +++ b/examples/llama.swiftui/llama.swiftui.xcodeproj/project.pbxproj @@ -23,26 +23,10 @@ 8A3F84242AC4C891005E2EE8 /* models in Resources */ = {isa = PBXBuildFile; fileRef = 8A3F84232AC4C891005E2EE8 /* models */; }; 8A907F332AC7138A006146EA /* LibLlama.swift in Sources */ = {isa = PBXBuildFile; fileRef = 8A907F322AC7134E006146EA /* LibLlama.swift */; }; 8A9F7C4D2AC332EE008AE1EA /* LlamaState.swift in Sources */ = {isa = PBXBuildFile; fileRef = 8A9F7C4C2AC332EE008AE1EA /* LlamaState.swift */; }; - F1FE20E22B465ECA00B45541 /* LoadCustomButton.swift in Sources */ = {isa = PBXBuildFile; fileRef = F1FE20E12B465EC900B45541 /* LoadCustomButton.swift */; }; F1FE20DC2B465C4500B45541 /* ggml-metal.metal in Resources */ = {isa = PBXBuildFile; fileRef = 549479C82AC9E10B00E0F78B /* ggml-metal.metal */; }; + F1FE20E22B465ECA00B45541 /* LoadCustomButton.swift in Sources */ = {isa = PBXBuildFile; fileRef = F1FE20E12B465EC900B45541 /* LoadCustomButton.swift */; }; /* End PBXBuildFile section */ -/* Begin PBXBuildRule section */ - F1FE20DB2B465C2100B45541 /* PBXBuildRule */ = { - isa = PBXBuildRule; - compilerSpec = com.apple.compilers.proxy.script; - fileType = sourcecode.metal; - inputFiles = ( - ); - isEditable = 1; - outputFiles = ( - "${DERIVED_FILES_DIR}/ggml-metal.air", - "${DERIVED_FILES_DIR}/ggml.metallib", - ); - script = "# metal\nxcrun metal -c \"${INPUT_FILE_PATH}\" -o \"${DERIVED_FILES_DIR}/${INPUT_FILE_BASE}.air\"\nxcrun metallib -o \"${DERIVED_FILES_DIR}/${INPUT_FILE_BASE%-metal}.metallib\" \"${DERIVED_FILES_DIR}/${INPUT_FILE_BASE}.air\"\n"; - }; -/* End PBXBuildRule section */ - /* Begin PBXFileReference section */ 542376062B0D9BEA008E6A1C /* ggml-quants.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; name = "ggml-quants.h"; path = "../../ggml-quants.h"; sourceTree = ""; }; 542376072B0D9BFB008E6A1C /* ggml-quants.c */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.c; name = "ggml-quants.c"; path = "../../ggml-quants.c"; sourceTree = ""; }; @@ -209,7 +193,6 @@ 8A1C83712AC328BD0096AF73 /* Resources */, ); buildRules = ( - F1FE20DB2B465C2100B45541 /* PBXBuildRule */, ); dependencies = ( ); diff --git a/ggml-metal.m b/ggml-metal.m index 55cc1a872..fbbdcd8c4 100644 --- a/ggml-metal.m +++ b/ggml-metal.m @@ -258,14 +258,14 @@ struct ggml_metal_context * ggml_metal_init(int n_cb) { bundle = [NSBundle bundleForClass:[GGMLMetalClass class]]; #endif NSError * error = nil; - NSString * libPath = [bundle pathForResource:@"ggml" ofType:@"metallib"]; + NSString * libPath = [bundle pathForResource:@"default" ofType:@"metallib"]; if (libPath != nil) { // pre-compiled library found NSURL * libURL = [NSURL fileURLWithPath:libPath]; GGML_METAL_LOG_INFO("%s: loading '%s'\n", __func__, [libPath UTF8String]); ctx->library = [ctx->device newLibraryWithURL:libURL error:&error]; } else { - GGML_METAL_LOG_INFO("%s: ggml.metallib not found, loading from source\n", __func__); + GGML_METAL_LOG_INFO("%s: default.metallib not found, loading from source\n", __func__); NSString * sourcePath; NSString * ggmlMetalPathResources = [[NSProcessInfo processInfo].environment objectForKey:@"GGML_METAL_PATH_RESOURCES"]; @@ -295,7 +295,7 @@ struct ggml_metal_context * ggml_metal_init(int n_cb) { #endif // try to disable fast-math // NOTE: this seems to have no effect whatsoever - // instead, in order to disable fast-math, we have to build ggml.metallib from the command line + // instead, in order to disable fast-math, we have to build default.metallib from the command line // using xcrun -sdk macosx metal -fno-fast-math -c ggml-metal.metal -o ggml-metal.air // and go through the "pre-compiled library found" path above //[options setFastMathEnabled:false]; diff --git a/scripts/sync-ggml.last b/scripts/sync-ggml.last index 354246a26..fe7f3202f 100644 --- a/scripts/sync-ggml.last +++ b/scripts/sync-ggml.last @@ -1 +1 @@ -3fd01e00e40583ccd4b393a7c6502d6a4455a1d5 +f96711108d55bdbbd277e6be07204dce6a94fb93 From be36bb946a6336238e92706464de6a30495fe825 Mon Sep 17 00:00:00 2001 From: Ikko Eltociear Ashimine Date: Sat, 6 Jan 2024 01:02:44 +0900 Subject: [PATCH 219/811] flake.nix : fix typo (#4700) betwen -> between --- .devops/nix/package.nix | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.devops/nix/package.nix b/.devops/nix/package.nix index 5f2a7c9f4..43bdbd755 100644 --- a/.devops/nix/package.nix +++ b/.devops/nix/package.nix @@ -9,7 +9,7 @@ git, python3, mpi, - openblas, # TODO: Use the generic `blas` so users could switch betwen alternative implementations + openblas, # TODO: Use the generic `blas` so users could switch between alternative implementations cudaPackages, darwin, rocmPackages, From eec22a1c6378d9a013943cbddb4330c0da621442 Mon Sep 17 00:00:00 2001 From: a-n-n-a-l-e-e <150648636+a-n-n-a-l-e-e@users.noreply.github.com> Date: Fri, 5 Jan 2024 08:04:40 -0800 Subject: [PATCH 220/811] cmake : check for openblas64 (#4134) openblas v0.3.22 64-bit pkg-config file is named openblas64.pc https://github.com/OpenMathLib/OpenBLAS/issues/3790 --- CMakeLists.txt | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/CMakeLists.txt b/CMakeLists.txt index ce237cf45..668669c6d 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -230,7 +230,11 @@ if (LLAMA_BLAS) if (${LLAMA_BLAS_VENDOR} MATCHES "Generic") pkg_check_modules(DepBLAS REQUIRED blas) elseif (${LLAMA_BLAS_VENDOR} MATCHES "OpenBLAS") - pkg_check_modules(DepBLAS REQUIRED openblas) + # As of openblas v0.3.22, the 64-bit is named openblas64.pc + pkg_check_modules(DepBLAS openblas64) + if (NOT DepBLAS_FOUND) + pkg_check_modules(DepBLAS REQUIRED openblas) + endif() elseif (${LLAMA_BLAS_VENDOR} MATCHES "FLAME") pkg_check_modules(DepBLAS REQUIRED blis) elseif (${LLAMA_BLAS_VENDOR} MATCHES "ATLAS") From 96e80dabc6e73ff68b09b68947b1fc25883c5094 Mon Sep 17 00:00:00 2001 From: Georgi Gerganov Date: Sat, 6 Jan 2024 11:40:24 +0200 Subject: [PATCH 221/811] examples : improve base-translate.sh script (#4783) --- examples/base-translate.sh | 15 ++++++++++----- 1 file changed, 10 insertions(+), 5 deletions(-) diff --git a/examples/base-translate.sh b/examples/base-translate.sh index 50fba025c..00dedd0df 100755 --- a/examples/base-translate.sh +++ b/examples/base-translate.sh @@ -8,14 +8,19 @@ # cd llama.cpp # make -j # -# ./examples/base-translate.sh "" +# ./examples/base-translate.sh "" [extra-main-args] # -if [ $# -ne 2 ]; then - echo "Usage: ./base-translate.sh \"\"" +if [ $# -lt 2 ]; then + echo "Usage: ./base-translate.sh \"\" [extra-main-args]" exit 1 fi +eargs="" +if [ $# -gt 2 ]; then + eargs="${@:3}" +fi + ftmp="__llama.cpp_example_tmp__.txt" trap "rm -f $ftmp" EXIT @@ -52,5 +57,5 @@ echo "$2 model=$1 -# generate the most likely continuation, run on the CPU until the string "===" is found -./main -m $model -f $ftmp -n 64 --temp 0 --repeat-penalty 1.0 --no-penalize-nl -ngl 0 -r "===" +# generate the most likely continuation until the string "===" is found +./main -m $model -f $ftmp -n 64 --temp 0 --repeat-penalty 1.0 --no-penalize-nl -r "===" $eargs From c75ca5d96f902564cbbbdd7f5cade80d53c288bb Mon Sep 17 00:00:00 2001 From: Daniel Illescas Romero Date: Sat, 6 Jan 2024 16:12:59 +0100 Subject: [PATCH 222/811] llama.swiftui : use correct pointer for llama_token_eos (#4797) --- examples/llama.swiftui/llama.cpp.swift/LibLlama.swift | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/examples/llama.swiftui/llama.cpp.swift/LibLlama.swift b/examples/llama.swiftui/llama.cpp.swift/LibLlama.swift index 66244382f..8696b493c 100644 --- a/examples/llama.swiftui/llama.cpp.swift/LibLlama.swift +++ b/examples/llama.swiftui/llama.cpp.swift/LibLlama.swift @@ -161,7 +161,7 @@ actor LlamaContext { new_token_id = llama_sample_token_greedy(context, &candidates_p) } - if new_token_id == llama_token_eos(context) || n_cur == n_len { + if new_token_id == llama_token_eos(model) || n_cur == n_len { print("\n") let new_token_str = String(cString: temporary_invalid_cchars + [0]) temporary_invalid_cchars.removeAll() From 67984921a70a7e680a24494aeb7575a66e90685d Mon Sep 17 00:00:00 2001 From: Georgi Gerganov Date: Sun, 7 Jan 2024 08:45:26 +0200 Subject: [PATCH 223/811] server : fix n_predict check (#4798) --- examples/server/server.cpp | 15 +++++++++++---- 1 file changed, 11 insertions(+), 4 deletions(-) diff --git a/examples/server/server.cpp b/examples/server/server.cpp index d1469fb08..6c7fcd176 100644 --- a/examples/server/server.cpp +++ b/examples/server/server.cpp @@ -447,8 +447,14 @@ struct llama_client_slot } bool has_budget(gpt_params &global_params) { + if (params.n_predict == -1 && global_params.n_predict == -1) + { + return true; // limitless + } + n_remaining = -1; - if(params.n_predict != -1) + + if (params.n_predict != -1) { n_remaining = params.n_predict - n_decoded; } @@ -456,7 +462,8 @@ struct llama_client_slot { n_remaining = global_params.n_predict - n_decoded; } - return n_remaining > 0 || n_remaining == -1; // no budget || limitless + + return n_remaining > 0; // no budget } bool available() const { @@ -1102,7 +1109,7 @@ struct llama_server_context } // check the limits - if (slot.n_decoded > 2 && slot.has_next_token && !slot.has_budget(params)) + if (slot.n_decoded > 0 && slot.has_next_token && !slot.has_budget(params)) { slot.stopped_limit = true; slot.has_next_token = false; @@ -1703,7 +1710,6 @@ struct llama_server_context llama_batch_add(batch, slot.sampled, system_tokens.size() + slot.n_past, { slot.id }, true); - slot.n_decoded += 1; slot.n_past += 1; } @@ -1921,6 +1927,7 @@ struct llama_server_context llama_sampling_accept(slot.ctx_sampling, ctx, id, true); + slot.n_decoded += 1; if (slot.n_decoded == 1) { slot.t_start_genereration = ggml_time_us(); From 63ee677efd92060b14894b984597c62e3742b8da Mon Sep 17 00:00:00 2001 From: Konstantin Zhuravlyov Date: Sun, 7 Jan 2024 01:52:42 -0500 Subject: [PATCH 224/811] ggml : use __builtin_amdgcn_sudot4 in __dp4a for gfx11 (#4787) --- ggml-cuda.cu | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/ggml-cuda.cu b/ggml-cuda.cu index 10c21615e..54b266be4 100644 --- a/ggml-cuda.cu +++ b/ggml-cuda.cu @@ -183,7 +183,7 @@ static __device__ __forceinline__ int __vsubss4(const int a, const int b) { static __device__ __forceinline__ int __dp4a(const int a, const int b, int c) { #if defined(__gfx906__) || defined(__gfx908__) || defined(__gfx90a__) || defined(__gfx1030__) c = __builtin_amdgcn_sdot4(a, b, c, false); -#elif defined(__gfx1100__) +#elif defined(RDNA3) c = __builtin_amdgcn_sudot4( true, a, true, b, c, false); #elif defined(__gfx1010__) || defined(__gfx900__) int tmp1; From 3418c03ecc149fd657527ebb06776239b60a3f3b Mon Sep 17 00:00:00 2001 From: Alex Azarov Date: Sun, 7 Jan 2024 08:46:55 +0100 Subject: [PATCH 225/811] llama.swiftui : add visionOS target (#4805) --- .../llama.swiftui/llama.swiftui.xcodeproj/project.pbxproj | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/examples/llama.swiftui/llama.swiftui.xcodeproj/project.pbxproj b/examples/llama.swiftui/llama.swiftui.xcodeproj/project.pbxproj index 14b93f26c..9b1a9787b 100644 --- a/examples/llama.swiftui/llama.swiftui.xcodeproj/project.pbxproj +++ b/examples/llama.swiftui/llama.swiftui.xcodeproj/project.pbxproj @@ -420,11 +420,13 @@ MARKETING_VERSION = 1.0; PRODUCT_BUNDLE_IDENTIFIER = "com.bachittle.llama-swift"; PRODUCT_NAME = "$(TARGET_NAME)"; + SUPPORTED_PLATFORMS = "iphoneos iphonesimulator xros xrsimulator"; + SUPPORTS_XR_DESIGNED_FOR_IPHONE_IPAD = NO; SWIFT_EMIT_LOC_STRINGS = YES; SWIFT_OBJC_BRIDGING_HEADER = "llama.cpp.swift/bridging-header.h"; SWIFT_OPTIMIZATION_LEVEL = "-Onone"; SWIFT_VERSION = 5.0; - TARGETED_DEVICE_FAMILY = "1,2"; + TARGETED_DEVICE_FAMILY = "1,2,7"; }; name = Debug; }; @@ -453,10 +455,12 @@ MARKETING_VERSION = 1.0; PRODUCT_BUNDLE_IDENTIFIER = "com.bachittle.llama-swift"; PRODUCT_NAME = "$(TARGET_NAME)"; + SUPPORTED_PLATFORMS = "iphoneos iphonesimulator xros xrsimulator"; + SUPPORTS_XR_DESIGNED_FOR_IPHONE_IPAD = NO; SWIFT_EMIT_LOC_STRINGS = YES; SWIFT_OBJC_BRIDGING_HEADER = "llama.cpp.swift/bridging-header.h"; SWIFT_VERSION = 5.0; - TARGETED_DEVICE_FAMILY = "1,2"; + TARGETED_DEVICE_FAMILY = "1,2,7"; }; name = Release; }; From d117d4dc5dadb46831036bfa4d6e5e8c86babaf1 Mon Sep 17 00:00:00 2001 From: Georgi Gerganov Date: Sun, 7 Jan 2024 09:50:31 +0200 Subject: [PATCH 226/811] llama : print tensor meta for debugging --- llama.cpp | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/llama.cpp b/llama.cpp index 3bb056dba..06db40303 100644 --- a/llama.cpp +++ b/llama.cpp @@ -2180,7 +2180,11 @@ struct llama_model_loader { type_max = type; } - // LLAMA_LOG_INFO("%s: - tensor %4d: %32s %-8s [ %s ]\n", __func__, i, name, ggml_type_name(meta->type), llama_format_tensor_shape(meta).c_str()); + // TODO: make runtime configurable +#if 0 + struct ggml_tensor * meta = ggml_get_tensor(ctx_meta, gguf_get_tensor_name(ctx_gguf, i)); + LLAMA_LOG_INFO("%s: - tensor %4d: %32s %-8s [ %s ]\n", __func__, i, ggml_get_name(meta), ggml_type_name(type), llama_format_tensor_shape(meta).c_str()); +#endif } switch (type_max) { From 72d8407b3696dd1293bd233b6db392be108bc377 Mon Sep 17 00:00:00 2001 From: Alex Azarov Date: Sun, 7 Jan 2024 09:20:50 +0100 Subject: [PATCH 227/811] llama.swiftui : use llama.cpp as SPM package (#4804) --- .../llama.cpp.swift/LibLlama.swift | 5 +- .../llama.cpp.swift/bridging-header.h | 5 -- .../llama.swiftui.xcodeproj/project.pbxproj | 80 +++---------------- .../AccentColor.colorset/Contents.json | 11 --- .../Preview Assets.xcassets/Contents.json | 6 -- 5 files changed, 13 insertions(+), 94 deletions(-) delete mode 100644 examples/llama.swiftui/llama.cpp.swift/bridging-header.h delete mode 100644 examples/llama.swiftui/llama.swiftui/Assets.xcassets/AccentColor.colorset/Contents.json delete mode 100644 examples/llama.swiftui/llama.swiftui/Preview Content/Preview Assets.xcassets/Contents.json diff --git a/examples/llama.swiftui/llama.cpp.swift/LibLlama.swift b/examples/llama.swiftui/llama.cpp.swift/LibLlama.swift index 8696b493c..fc79fd346 100644 --- a/examples/llama.swiftui/llama.cpp.swift/LibLlama.swift +++ b/examples/llama.swiftui/llama.cpp.swift/LibLlama.swift @@ -1,8 +1,5 @@ import Foundation - -// To use this in your own project, add llama.cpp as a swift package dependency -// and uncomment this import line. -// import llama +import llama enum LlamaError: Error { case couldNotInitializeContext diff --git a/examples/llama.swiftui/llama.cpp.swift/bridging-header.h b/examples/llama.swiftui/llama.cpp.swift/bridging-header.h deleted file mode 100644 index 6cd72c979..000000000 --- a/examples/llama.swiftui/llama.cpp.swift/bridging-header.h +++ /dev/null @@ -1,5 +0,0 @@ -// -// Use this file to import your target's public headers that you would like to expose to Swift. -// - -#import "llama.h" diff --git a/examples/llama.swiftui/llama.swiftui.xcodeproj/project.pbxproj b/examples/llama.swiftui/llama.swiftui.xcodeproj/project.pbxproj index 9b1a9787b..a8848a49f 100644 --- a/examples/llama.swiftui/llama.swiftui.xcodeproj/project.pbxproj +++ b/examples/llama.swiftui/llama.swiftui.xcodeproj/project.pbxproj @@ -7,52 +7,31 @@ objects = { /* Begin PBXBuildFile section */ - 542376082B0D9BFB008E6A1C /* ggml-quants.c in Sources */ = {isa = PBXBuildFile; fileRef = 542376072B0D9BFB008E6A1C /* ggml-quants.c */; settings = {COMPILER_FLAGS = "-O3"; }; }; - 5423760B2B0D9C4B008E6A1C /* ggml-backend.c in Sources */ = {isa = PBXBuildFile; fileRef = 5423760A2B0D9C4B008E6A1C /* ggml-backend.c */; settings = {COMPILER_FLAGS = "-O3"; }; }; - 542EA09D2AC8723900A8AEE9 /* ggml.c in Sources */ = {isa = PBXBuildFile; fileRef = 542EA09B2AC8723900A8AEE9 /* ggml.c */; settings = {COMPILER_FLAGS = "-DGGML_USE_ACCELERATE -DGGML_USE_METAL -DGGML_USE_K_QUANTS -O3"; }; }; - 542EA0A02AC8725700A8AEE9 /* ggml-alloc.c in Sources */ = {isa = PBXBuildFile; fileRef = 542EA09F2AC8725700A8AEE9 /* ggml-alloc.c */; settings = {COMPILER_FLAGS = "-O3"; }; }; - 542EA0A32AC8729100A8AEE9 /* llama.cpp in Sources */ = {isa = PBXBuildFile; fileRef = 542EA0A12AC8729100A8AEE9 /* llama.cpp */; settings = {COMPILER_FLAGS = "-DGGML_USE_K_QUANTS -DGGML_USE_METAL -O3"; }; }; 549479CB2AC9E16000E0F78B /* Metal.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = 549479CA2AC9E16000E0F78B /* Metal.framework */; }; - 549479CD2AC9E42A00E0F78B /* ggml-metal.m in Sources */ = {isa = PBXBuildFile; fileRef = 549479C52AC9E0F200E0F78B /* ggml-metal.m */; settings = {COMPILER_FLAGS = "-fno-objc-arc -DGGML_SWIFT -DGGML_USE_METAL -O3"; }; }; 7FA3D2B32B2EA2F600543F92 /* DownloadButton.swift in Sources */ = {isa = PBXBuildFile; fileRef = 7FA3D2B22B2EA2F600543F92 /* DownloadButton.swift */; }; 8A1C83772AC328BD0096AF73 /* llama_swiftuiApp.swift in Sources */ = {isa = PBXBuildFile; fileRef = 8A1C83762AC328BD0096AF73 /* llama_swiftuiApp.swift */; }; 8A1C83792AC328BD0096AF73 /* ContentView.swift in Sources */ = {isa = PBXBuildFile; fileRef = 8A1C83782AC328BD0096AF73 /* ContentView.swift */; }; 8A1C837B2AC328BE0096AF73 /* Assets.xcassets in Resources */ = {isa = PBXBuildFile; fileRef = 8A1C837A2AC328BE0096AF73 /* Assets.xcassets */; }; - 8A1C837E2AC328BE0096AF73 /* Preview Assets.xcassets in Resources */ = {isa = PBXBuildFile; fileRef = 8A1C837D2AC328BE0096AF73 /* Preview Assets.xcassets */; }; 8A39BE0A2AC7601100BFEB40 /* Accelerate.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = 8A39BE092AC7601000BFEB40 /* Accelerate.framework */; }; 8A3F84242AC4C891005E2EE8 /* models in Resources */ = {isa = PBXBuildFile; fileRef = 8A3F84232AC4C891005E2EE8 /* models */; }; 8A907F332AC7138A006146EA /* LibLlama.swift in Sources */ = {isa = PBXBuildFile; fileRef = 8A907F322AC7134E006146EA /* LibLlama.swift */; }; 8A9F7C4D2AC332EE008AE1EA /* LlamaState.swift in Sources */ = {isa = PBXBuildFile; fileRef = 8A9F7C4C2AC332EE008AE1EA /* LlamaState.swift */; }; - F1FE20DC2B465C4500B45541 /* ggml-metal.metal in Resources */ = {isa = PBXBuildFile; fileRef = 549479C82AC9E10B00E0F78B /* ggml-metal.metal */; }; + DF810E132B4A5BA200301144 /* llama in Frameworks */ = {isa = PBXBuildFile; productRef = DF810E122B4A5BA200301144 /* llama */; }; F1FE20E22B465ECA00B45541 /* LoadCustomButton.swift in Sources */ = {isa = PBXBuildFile; fileRef = F1FE20E12B465EC900B45541 /* LoadCustomButton.swift */; }; /* End PBXBuildFile section */ /* Begin PBXFileReference section */ - 542376062B0D9BEA008E6A1C /* ggml-quants.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; name = "ggml-quants.h"; path = "../../ggml-quants.h"; sourceTree = ""; }; - 542376072B0D9BFB008E6A1C /* ggml-quants.c */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.c; name = "ggml-quants.c"; path = "../../ggml-quants.c"; sourceTree = ""; }; - 542376092B0D9C40008E6A1C /* ggml-backend.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; name = "ggml-backend.h"; path = "../../ggml-backend.h"; sourceTree = ""; }; - 5423760A2B0D9C4B008E6A1C /* ggml-backend.c */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.c; name = "ggml-backend.c"; path = "../../ggml-backend.c"; sourceTree = ""; }; - 542EA09B2AC8723900A8AEE9 /* ggml.c */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.c; name = ggml.c; path = ../../ggml.c; sourceTree = ""; }; - 542EA09C2AC8723900A8AEE9 /* ggml.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; name = ggml.h; path = ../../ggml.h; sourceTree = ""; }; - 542EA09E2AC8725700A8AEE9 /* ggml-alloc.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; name = "ggml-alloc.h"; path = "../../ggml-alloc.h"; sourceTree = ""; }; - 542EA09F2AC8725700A8AEE9 /* ggml-alloc.c */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.c; name = "ggml-alloc.c"; path = "../../ggml-alloc.c"; sourceTree = ""; }; - 542EA0A12AC8729100A8AEE9 /* llama.cpp */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.cpp.cpp; name = llama.cpp; path = ../../llama.cpp; sourceTree = ""; }; - 542EA0A22AC8729100A8AEE9 /* llama.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; name = llama.h; path = ../../llama.h; sourceTree = ""; }; - 549479C52AC9E0F200E0F78B /* ggml-metal.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; name = "ggml-metal.m"; path = "../../ggml-metal.m"; sourceTree = ""; }; - 549479C62AC9E0F200E0F78B /* ggml-metal.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; name = "ggml-metal.h"; path = "../../ggml-metal.h"; sourceTree = ""; }; - 549479C82AC9E10B00E0F78B /* ggml-metal.metal */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.metal; name = "ggml-metal.metal"; path = "../../ggml-metal.metal"; sourceTree = ""; }; 549479CA2AC9E16000E0F78B /* Metal.framework */ = {isa = PBXFileReference; lastKnownFileType = wrapper.framework; name = Metal.framework; path = System/Library/Frameworks/Metal.framework; sourceTree = SDKROOT; }; 7FA3D2B22B2EA2F600543F92 /* DownloadButton.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = DownloadButton.swift; sourceTree = ""; }; - 8A08D20A2AC73B1500FE6CD4 /* bridging-header.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = "bridging-header.h"; sourceTree = ""; }; 8A1C83732AC328BD0096AF73 /* llama.swiftui.app */ = {isa = PBXFileReference; explicitFileType = wrapper.application; includeInIndex = 0; path = llama.swiftui.app; sourceTree = BUILT_PRODUCTS_DIR; }; 8A1C83762AC328BD0096AF73 /* llama_swiftuiApp.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = llama_swiftuiApp.swift; sourceTree = ""; }; 8A1C83782AC328BD0096AF73 /* ContentView.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = ContentView.swift; sourceTree = ""; }; 8A1C837A2AC328BE0096AF73 /* Assets.xcassets */ = {isa = PBXFileReference; lastKnownFileType = folder.assetcatalog; path = Assets.xcassets; sourceTree = ""; }; - 8A1C837D2AC328BE0096AF73 /* Preview Assets.xcassets */ = {isa = PBXFileReference; lastKnownFileType = folder.assetcatalog; path = "Preview Assets.xcassets"; sourceTree = ""; }; 8A39BE092AC7601000BFEB40 /* Accelerate.framework */ = {isa = PBXFileReference; lastKnownFileType = wrapper.framework; name = Accelerate.framework; path = System/Library/Frameworks/Accelerate.framework; sourceTree = SDKROOT; }; 8A3F84232AC4C891005E2EE8 /* models */ = {isa = PBXFileReference; lastKnownFileType = folder; name = models; path = llama.swiftui/Resources/models; sourceTree = ""; }; 8A907F322AC7134E006146EA /* LibLlama.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = LibLlama.swift; sourceTree = ""; }; 8A9F7C4C2AC332EE008AE1EA /* LlamaState.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = LlamaState.swift; sourceTree = ""; }; + DF2D2FE72B4A59BE00FCB72D /* llama.cpp */ = {isa = PBXFileReference; lastKnownFileType = wrapper; name = llama.cpp; path = ../..; sourceTree = ""; }; F1FE20E12B465EC900B45541 /* LoadCustomButton.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = LoadCustomButton.swift; sourceTree = ""; }; /* End PBXFileReference section */ @@ -61,6 +40,7 @@ isa = PBXFrameworksBuildPhase; buildActionMask = 2147483647; files = ( + DF810E132B4A5BA200301144 /* llama in Frameworks */, 549479CB2AC9E16000E0F78B /* Metal.framework in Frameworks */, 8A39BE0A2AC7601100BFEB40 /* Accelerate.framework in Frameworks */, ); @@ -69,30 +49,10 @@ /* End PBXFrameworksBuildPhase section */ /* Begin PBXGroup section */ - 8A08D1F62AC7383900FE6CD4 /* llama.cpp */ = { - isa = PBXGroup; - children = ( - 5423760A2B0D9C4B008E6A1C /* ggml-backend.c */, - 542376092B0D9C40008E6A1C /* ggml-backend.h */, - 542376062B0D9BEA008E6A1C /* ggml-quants.h */, - 542376072B0D9BFB008E6A1C /* ggml-quants.c */, - 549479C82AC9E10B00E0F78B /* ggml-metal.metal */, - 549479C62AC9E0F200E0F78B /* ggml-metal.h */, - 549479C52AC9E0F200E0F78B /* ggml-metal.m */, - 542EA09B2AC8723900A8AEE9 /* ggml.c */, - 542EA09C2AC8723900A8AEE9 /* ggml.h */, - 542EA09F2AC8725700A8AEE9 /* ggml-alloc.c */, - 542EA09E2AC8725700A8AEE9 /* ggml-alloc.h */, - 542EA0A12AC8729100A8AEE9 /* llama.cpp */, - 542EA0A22AC8729100A8AEE9 /* llama.h */, - ); - name = llama.cpp; - sourceTree = ""; - }; 8A1C836A2AC328BD0096AF73 = { isa = PBXGroup; children = ( - 8A08D1F62AC7383900FE6CD4 /* llama.cpp */, + DF2D2FE72B4A59BE00FCB72D /* llama.cpp */, 8A907F312AC7134E006146EA /* llama.cpp.swift */, 8A3F84232AC4C891005E2EE8 /* models */, 8A1C83752AC328BD0096AF73 /* llama.swiftui */, @@ -117,19 +77,10 @@ 8A9F7C4A2AC332BF008AE1EA /* UI */, 8A1C83762AC328BD0096AF73 /* llama_swiftuiApp.swift */, 8A1C837A2AC328BE0096AF73 /* Assets.xcassets */, - 8A1C837C2AC328BE0096AF73 /* Preview Content */, ); path = llama.swiftui; sourceTree = ""; }; - 8A1C837C2AC328BE0096AF73 /* Preview Content */ = { - isa = PBXGroup; - children = ( - 8A1C837D2AC328BE0096AF73 /* Preview Assets.xcassets */, - ); - path = "Preview Content"; - sourceTree = ""; - }; 8A39BE082AC7601000BFEB40 /* Frameworks */ = { isa = PBXGroup; children = ( @@ -157,7 +108,6 @@ 8A907F312AC7134E006146EA /* llama.cpp.swift */ = { isa = PBXGroup; children = ( - 8A08D20A2AC73B1500FE6CD4 /* bridging-header.h */, 8A907F322AC7134E006146EA /* LibLlama.swift */, ); path = llama.cpp.swift; @@ -198,6 +148,7 @@ ); name = llama.swiftui; packageProductDependencies = ( + DF810E122B4A5BA200301144 /* llama */, ); productName = llama.swiftui; productReference = 8A1C83732AC328BD0096AF73 /* llama.swiftui.app */; @@ -244,9 +195,7 @@ isa = PBXResourcesBuildPhase; buildActionMask = 2147483647; files = ( - F1FE20DC2B465C4500B45541 /* ggml-metal.metal in Resources */, 8A3F84242AC4C891005E2EE8 /* models in Resources */, - 8A1C837E2AC328BE0096AF73 /* Preview Assets.xcassets in Resources */, 8A1C837B2AC328BE0096AF73 /* Assets.xcassets in Resources */, ); runOnlyForDeploymentPostprocessing = 0; @@ -258,18 +207,12 @@ isa = PBXSourcesBuildPhase; buildActionMask = 2147483647; files = ( - 542376082B0D9BFB008E6A1C /* ggml-quants.c in Sources */, - 549479CD2AC9E42A00E0F78B /* ggml-metal.m in Sources */, F1FE20E22B465ECA00B45541 /* LoadCustomButton.swift in Sources */, - 542EA09D2AC8723900A8AEE9 /* ggml.c in Sources */, 8A907F332AC7138A006146EA /* LibLlama.swift in Sources */, - 542EA0A32AC8729100A8AEE9 /* llama.cpp in Sources */, 8A9F7C4D2AC332EE008AE1EA /* LlamaState.swift in Sources */, 8A1C83792AC328BD0096AF73 /* ContentView.swift in Sources */, 8A1C83772AC328BD0096AF73 /* llama_swiftuiApp.swift in Sources */, 7FA3D2B32B2EA2F600543F92 /* DownloadButton.swift in Sources */, - 542EA0A02AC8725700A8AEE9 /* ggml-alloc.c in Sources */, - 5423760B2B0D9C4B008E6A1C /* ggml-backend.c in Sources */, ); runOnlyForDeploymentPostprocessing = 0; }; @@ -399,11 +342,9 @@ isa = XCBuildConfiguration; buildSettings = { ASSETCATALOG_COMPILER_APPICON_NAME = AppIcon; - ASSETCATALOG_COMPILER_GLOBAL_ACCENT_COLOR_NAME = AccentColor; CLANG_ENABLE_MODULES = YES; CODE_SIGN_STYLE = Automatic; CURRENT_PROJECT_VERSION = 1; - DEVELOPMENT_ASSET_PATHS = "\"llama.swiftui/Preview Content\""; DEVELOPMENT_TEAM = STLSG3FG8Q; ENABLE_PREVIEWS = YES; GENERATE_INFOPLIST_FILE = YES; @@ -423,7 +364,6 @@ SUPPORTED_PLATFORMS = "iphoneos iphonesimulator xros xrsimulator"; SUPPORTS_XR_DESIGNED_FOR_IPHONE_IPAD = NO; SWIFT_EMIT_LOC_STRINGS = YES; - SWIFT_OBJC_BRIDGING_HEADER = "llama.cpp.swift/bridging-header.h"; SWIFT_OPTIMIZATION_LEVEL = "-Onone"; SWIFT_VERSION = 5.0; TARGETED_DEVICE_FAMILY = "1,2,7"; @@ -434,11 +374,9 @@ isa = XCBuildConfiguration; buildSettings = { ASSETCATALOG_COMPILER_APPICON_NAME = AppIcon; - ASSETCATALOG_COMPILER_GLOBAL_ACCENT_COLOR_NAME = AccentColor; CLANG_ENABLE_MODULES = YES; CODE_SIGN_STYLE = Automatic; CURRENT_PROJECT_VERSION = 1; - DEVELOPMENT_ASSET_PATHS = "\"llama.swiftui/Preview Content\""; DEVELOPMENT_TEAM = STLSG3FG8Q; ENABLE_PREVIEWS = YES; GENERATE_INFOPLIST_FILE = YES; @@ -458,7 +396,6 @@ SUPPORTED_PLATFORMS = "iphoneos iphonesimulator xros xrsimulator"; SUPPORTS_XR_DESIGNED_FOR_IPHONE_IPAD = NO; SWIFT_EMIT_LOC_STRINGS = YES; - SWIFT_OBJC_BRIDGING_HEADER = "llama.cpp.swift/bridging-header.h"; SWIFT_VERSION = 5.0; TARGETED_DEVICE_FAMILY = "1,2,7"; }; @@ -486,6 +423,13 @@ defaultConfigurationName = Release; }; /* End XCConfigurationList section */ + +/* Begin XCSwiftPackageProductDependency section */ + DF810E122B4A5BA200301144 /* llama */ = { + isa = XCSwiftPackageProductDependency; + productName = llama; + }; +/* End XCSwiftPackageProductDependency section */ }; rootObject = 8A1C836B2AC328BD0096AF73 /* Project object */; } diff --git a/examples/llama.swiftui/llama.swiftui/Assets.xcassets/AccentColor.colorset/Contents.json b/examples/llama.swiftui/llama.swiftui/Assets.xcassets/AccentColor.colorset/Contents.json deleted file mode 100644 index eb8789700..000000000 --- a/examples/llama.swiftui/llama.swiftui/Assets.xcassets/AccentColor.colorset/Contents.json +++ /dev/null @@ -1,11 +0,0 @@ -{ - "colors" : [ - { - "idiom" : "universal" - } - ], - "info" : { - "author" : "xcode", - "version" : 1 - } -} diff --git a/examples/llama.swiftui/llama.swiftui/Preview Content/Preview Assets.xcassets/Contents.json b/examples/llama.swiftui/llama.swiftui/Preview Content/Preview Assets.xcassets/Contents.json deleted file mode 100644 index 73c00596a..000000000 --- a/examples/llama.swiftui/llama.swiftui/Preview Content/Preview Assets.xcassets/Contents.json +++ /dev/null @@ -1,6 +0,0 @@ -{ - "info" : { - "author" : "xcode", - "version" : 1 - } -} From 3c36213df850a2353e95572b3636797c79b7c815 Mon Sep 17 00:00:00 2001 From: Georgi Gerganov Date: Sun, 7 Jan 2024 11:21:53 +0200 Subject: [PATCH 228/811] llama : remove redundant GQA check (#4796) --- llama.cpp | 8 -------- 1 file changed, 8 deletions(-) diff --git a/llama.cpp b/llama.cpp index 06db40303..021e79a8f 100644 --- a/llama.cpp +++ b/llama.cpp @@ -4776,7 +4776,6 @@ struct llm_build_context { const int64_t n_embd_head = hparams.n_embd_head_v; const int64_t n_embd_gqa = hparams.n_embd_v_gqa(); GGML_ASSERT(n_embd_head == hparams.n_embd_head_k); - GGML_ASSERT(n_embd_gqa == n_embd); struct ggml_tensor * cur; struct ggml_tensor * inpL; @@ -4900,7 +4899,6 @@ struct llm_build_context { const int64_t n_embd_head = hparams.n_embd_head_v; const int64_t n_embd_gqa = hparams.n_embd_v_gqa(); GGML_ASSERT(n_embd_head == hparams.n_embd_head_k); - GGML_ASSERT(n_embd_gqa == n_embd); struct ggml_tensor * cur; struct ggml_tensor * pos; @@ -5001,7 +4999,6 @@ struct llm_build_context { const int64_t n_embd_head = hparams.n_embd_head_v; const int64_t n_embd_gqa = hparams.n_embd_v_gqa(); GGML_ASSERT(n_embd_head == hparams.n_embd_head_k); - GGML_ASSERT(n_embd_gqa == n_embd); const int64_t n_rot = n_embd_head_k / 2; @@ -5215,7 +5212,6 @@ struct llm_build_context { const int64_t n_embd_head = hparams.n_embd_head_v; const int64_t n_embd_gqa = hparams.n_embd_v_gqa(); GGML_ASSERT(n_embd_head == hparams.n_embd_head_k); - GGML_ASSERT(n_embd_gqa == n_embd); struct ggml_tensor * cur; struct ggml_tensor * inpL; @@ -5308,7 +5304,6 @@ struct llm_build_context { const int64_t n_embd_head = hparams.n_embd_head_v; const int64_t n_embd_gqa = hparams.n_embd_v_gqa(); GGML_ASSERT(n_embd_head == hparams.n_embd_head_k); - GGML_ASSERT(n_embd_gqa == n_embd); struct ggml_tensor * cur; struct ggml_tensor * inpL; @@ -5404,7 +5399,6 @@ struct llm_build_context { const int64_t n_embd_head = hparams.n_embd_head_v; const int64_t n_embd_gqa = hparams.n_embd_v_gqa(); GGML_ASSERT(n_embd_head == hparams.n_embd_head_k); - GGML_ASSERT(n_embd_gqa == n_embd); struct ggml_tensor * cur; struct ggml_tensor * inpL; @@ -5731,7 +5725,6 @@ struct llm_build_context { const int64_t n_embd_head = hparams.n_embd_head_v; const int64_t n_embd_gqa = hparams.n_embd_v_gqa(); GGML_ASSERT(n_embd_head == hparams.n_embd_head_k); - GGML_ASSERT(n_embd_gqa == n_embd); struct ggml_tensor * cur; struct ggml_tensor * attn_norm_output; @@ -5955,7 +5948,6 @@ struct llm_build_context { const int64_t n_embd_head = hparams.n_embd_head_v; const int64_t n_embd_gqa = hparams.n_embd_v_gqa(); GGML_ASSERT(n_embd_head == hparams.n_embd_head_k); - GGML_ASSERT(n_embd_gqa == n_embd); struct ggml_tensor * cur; struct ggml_tensor * pos; From 9dede37d812604897496dd9d276ae9fbe13d1042 Mon Sep 17 00:00:00 2001 From: Georgi Gerganov Date: Sun, 7 Jan 2024 14:29:36 +0200 Subject: [PATCH 229/811] llama : remove unused vars (#4796) --- llama.cpp | 2 -- 1 file changed, 2 deletions(-) diff --git a/llama.cpp b/llama.cpp index 021e79a8f..91aa3f8e7 100644 --- a/llama.cpp +++ b/llama.cpp @@ -4997,7 +4997,6 @@ struct llm_build_context { struct ggml_cgraph * gf = ggml_new_graph_custom(ctx0, LLAMA_MAX_NODES, false); const int64_t n_embd_head = hparams.n_embd_head_v; - const int64_t n_embd_gqa = hparams.n_embd_v_gqa(); GGML_ASSERT(n_embd_head == hparams.n_embd_head_k); const int64_t n_rot = n_embd_head_k / 2; @@ -5210,7 +5209,6 @@ struct llm_build_context { struct ggml_cgraph * gf = ggml_new_graph_custom(ctx0, LLAMA_MAX_NODES, false); const int64_t n_embd_head = hparams.n_embd_head_v; - const int64_t n_embd_gqa = hparams.n_embd_v_gqa(); GGML_ASSERT(n_embd_head == hparams.n_embd_head_k); struct ggml_tensor * cur; From d5a410e8556191672465f7ff58682ea2474038b0 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Johannes=20G=C3=A4=C3=9Fler?= Date: Sun, 7 Jan 2024 17:24:08 +0100 Subject: [PATCH 230/811] CUDA: fixed redundant value dequantization (#4809) --- ggml-cuda.cu | 35 +++++++++++++++++++++++------------ 1 file changed, 23 insertions(+), 12 deletions(-) diff --git a/ggml-cuda.cu b/ggml-cuda.cu index 54b266be4..2df64b111 100644 --- a/ggml-cuda.cu +++ b/ggml-cuda.cu @@ -1872,14 +1872,6 @@ static __device__ void convert_f16(const void * vx, const int ib, const int iqs, v.y = x[ib + iqs + 1]; } -static __device__ void convert_f32(const void * vx, const int ib, const int iqs, dfloat2 & v){ - const float * x = (const float *) vx; - - // automatic half -> float type cast if dfloat == float - v.x = x[ib + iqs + 0]; - v.y = x[ib + iqs + 1]; -} - static __global__ void quantize_q8_1(const float * __restrict__ x, void * __restrict__ vy, const int kx, const int kx_padded) { const int ix = blockDim.x*blockIdx.x + threadIdx.x; @@ -1983,7 +1975,7 @@ static __global__ void k_get_rows_float( template static __global__ void dequantize_block(const void * __restrict__ vx, dst_t * __restrict__ y, const int k) { - const int i = blockDim.x*blockIdx.x + 2*threadIdx.x; + const int i = 2*(blockDim.x*blockIdx.x + threadIdx.x); if (i >= k) { return; @@ -2002,6 +1994,19 @@ static __global__ void dequantize_block(const void * __restrict__ vx, dst_t * __ y[iybs + iqs + y_offset] = v.y; } +template +static __global__ void convert_unary(const void * __restrict__ vx, dst_t * __restrict__ y, const int k) { + const int i = blockDim.x*blockIdx.x + threadIdx.x; + + if (i >= k) { + return; + } + + const src_t * x = (src_t *) vx; + + y[i] = x[i]; +} + // VDR = vec dot ratio, how many contiguous integers each thread processes when the vec dot kernel is called // MMVQ = mul_mat_vec_q, MMQ = mul_mat_q @@ -5609,7 +5614,7 @@ static void quantize_row_q8_1_cuda(const float * x, void * vy, const int kx, con template static void dequantize_block_cuda(const void * __restrict__ vx, dst_t * __restrict__ y, const int k, cudaStream_t stream) { - const int num_blocks = (k + CUDA_DEQUANTIZE_BLOCK_SIZE - 1) / CUDA_DEQUANTIZE_BLOCK_SIZE; + const int num_blocks = (k + 2*CUDA_DEQUANTIZE_BLOCK_SIZE - 1) / (2*CUDA_DEQUANTIZE_BLOCK_SIZE); dequantize_block<<>>(vx, y, k); } @@ -5659,6 +5664,12 @@ static void dequantize_row_q6_K_cuda(const void * vx, dst_t * y, const int k, cu #endif } +template +static void convert_unary_cuda(const void * __restrict__ vx, dst_t * __restrict__ y, const int k, cudaStream_t stream) { + const int num_blocks = (k + CUDA_DEQUANTIZE_BLOCK_SIZE - 1) / CUDA_DEQUANTIZE_BLOCK_SIZE; + convert_unary<<>>(vx, y, k); +} + static to_fp16_cuda_t ggml_get_to_fp16_cuda(ggml_type type) { switch (type) { case GGML_TYPE_Q4_0: @@ -5682,7 +5693,7 @@ static to_fp16_cuda_t ggml_get_to_fp16_cuda(ggml_type type) { case GGML_TYPE_Q6_K: return dequantize_row_q6_K_cuda; case GGML_TYPE_F32: - return dequantize_block_cuda<1, 1, convert_f32>; + return convert_unary_cuda; default: return nullptr; } @@ -5711,7 +5722,7 @@ static to_fp32_cuda_t ggml_get_to_fp32_cuda(ggml_type type) { case GGML_TYPE_Q6_K: return dequantize_row_q6_K_cuda; case GGML_TYPE_F16: - return dequantize_block_cuda<1, 1, convert_f16>; + return convert_unary_cuda; default: return nullptr; } From 226460cc0d5b185bc6685fb76f418fd9418d7add Mon Sep 17 00:00:00 2001 From: slaren Date: Sun, 7 Jan 2024 17:59:01 +0100 Subject: [PATCH 231/811] llama-bench : add no-kv-offload parameter (#4812) --- examples/llama-bench/llama-bench.cpp | 34 +++++++++++++++++++++++++--- 1 file changed, 31 insertions(+), 3 deletions(-) diff --git a/examples/llama-bench/llama-bench.cpp b/examples/llama-bench/llama-bench.cpp index 6617c050d..7f7186cde 100644 --- a/examples/llama-bench/llama-bench.cpp +++ b/examples/llama-bench/llama-bench.cpp @@ -138,6 +138,7 @@ struct cmd_params { std::vector n_threads; std::vector n_gpu_layers; std::vector main_gpu; + std::vector no_kv_offload; std::vector mul_mat_q; std::vector> tensor_split; int reps; @@ -155,6 +156,7 @@ static const cmd_params cmd_params_defaults = { /* n_threads */ {get_num_physical_cores()}, /* n_gpu_layers */ {99}, /* main_gpu */ {0}, + /* no_kv_offload */ {false}, /* mul_mat_q */ {true}, /* tensor_split */ {{}}, /* reps */ 5, @@ -176,6 +178,7 @@ static void print_usage(int /* argc */, char ** argv) { printf(" -t, --threads (default: %s)\n", join(cmd_params_defaults.n_threads, ",").c_str()); printf(" -ngl, --n-gpu-layers (default: %s)\n", join(cmd_params_defaults.n_gpu_layers, ",").c_str()); printf(" -mg, --main-gpu (default: %s)\n", join(cmd_params_defaults.main_gpu, ",").c_str()); + printf(" -nkvo, --no-kv-offload <0|1> (default: %s)\n", join(cmd_params_defaults.no_kv_offload, ",").c_str()); printf(" -mmq, --mul-mat-q <0|1> (default: %s)\n", join(cmd_params_defaults.mul_mat_q, ",").c_str()); printf(" -ts, --tensor_split \n"); printf(" -r, --repetitions (default: %d)\n", cmd_params_defaults.reps); @@ -309,6 +312,13 @@ static cmd_params parse_cmd_params(int argc, char ** argv) { break; } params.main_gpu = split(argv[i], split_delim); + } else if (arg == "-nkvo" || arg == "--no-kv-offload") { + if (++i >= argc) { + invalid_param = true; + break; + } + auto p = split(argv[i], split_delim); + params.no_kv_offload.insert(params.no_kv_offload.end(), p.begin(), p.end()); } else if (arg == "-mmq" || arg == "--mul-mat-q") { if (++i >= argc) { invalid_param = true; @@ -383,6 +393,7 @@ static cmd_params parse_cmd_params(int argc, char ** argv) { if (params.type_v.empty()) { params.type_v = cmd_params_defaults.type_v; } if (params.n_gpu_layers.empty()) { params.n_gpu_layers = cmd_params_defaults.n_gpu_layers; } if (params.main_gpu.empty()) { params.main_gpu = cmd_params_defaults.main_gpu; } + if (params.no_kv_offload.empty()){ params.no_kv_offload = cmd_params_defaults.no_kv_offload; } if (params.mul_mat_q.empty()) { params.mul_mat_q = cmd_params_defaults.mul_mat_q; } if (params.tensor_split.empty()) { params.tensor_split = cmd_params_defaults.tensor_split; } if (params.n_threads.empty()) { params.n_threads = cmd_params_defaults.n_threads; } @@ -400,6 +411,7 @@ struct cmd_params_instance { int n_threads; int n_gpu_layers; int main_gpu; + bool no_kv_offload; bool mul_mat_q; std::array tensor_split; @@ -428,6 +440,7 @@ struct cmd_params_instance { cparams.type_k = type_k; cparams.type_v = type_v; cparams.mul_mat_q = mul_mat_q; + cparams.offload_kqv = !no_kv_offload; return cparams; } @@ -444,6 +457,7 @@ static std::vector get_cmd_params_instances_int(const cmd_p for (const auto & tk : params.type_k) for (const auto & tv : params.type_v) for (const auto & mmq : params.mul_mat_q) + for (const auto & nkvo : params.no_kv_offload) for (const auto & nt : params.n_threads) { cmd_params_instance instance = { /* .model = */ m, @@ -455,6 +469,7 @@ static std::vector get_cmd_params_instances_int(const cmd_p /* .n_threads = */ nt, /* .n_gpu_layers = */ nl, /* .main_gpu = */ mg, + /* .no_kv_offload= */ nkvo, /* .mul_mat_q = */ mmq, /* .tensor_split = */ ts, }; @@ -476,6 +491,7 @@ static std::vector get_cmd_params_instances(const cmd_param for (const auto & tk : params.type_k) for (const auto & tv : params.type_v) for (const auto & mmq : params.mul_mat_q) + for (const auto & nkvo : params.no_kv_offload) for (const auto & nt : params.n_threads) { for (const auto & n_prompt : params.n_prompt) { if (n_prompt == 0) { @@ -491,6 +507,7 @@ static std::vector get_cmd_params_instances(const cmd_param /* .n_threads = */ nt, /* .n_gpu_layers = */ nl, /* .main_gpu = */ mg, + /* .no_kv_offload= */ nkvo, /* .mul_mat_q = */ mmq, /* .tensor_split = */ ts, }; @@ -511,6 +528,7 @@ static std::vector get_cmd_params_instances(const cmd_param /* .n_threads = */ nt, /* .n_gpu_layers = */ nl, /* .main_gpu = */ mg, + /* .no_kv_offload= */ nkvo, /* .mul_mat_q = */ mmq, /* .tensor_split = */ ts, }; @@ -559,6 +577,7 @@ struct test { ggml_type type_v; int n_gpu_layers; int main_gpu; + bool no_kv_offload; bool mul_mat_q; std::array tensor_split; int n_prompt; @@ -579,6 +598,7 @@ struct test { type_v = inst.type_v; n_gpu_layers = inst.n_gpu_layers; main_gpu = inst.main_gpu; + no_kv_offload = inst.no_kv_offload; mul_mat_q = inst.mul_mat_q; tensor_split = inst.tensor_split; n_prompt = inst.n_prompt; @@ -640,7 +660,8 @@ struct test { "cpu_info", "gpu_info", "model_filename", "model_type", "model_size", "model_n_params", "n_batch", "n_threads", "type_k", "type_v", - "n_gpu_layers", "main_gpu", "mul_mat_q", "tensor_split", + "n_gpu_layers", "main_gpu", "no_kv_offload", + "mul_mat_q", "tensor_split", "n_prompt", "n_gen", "test_time", "avg_ns", "stddev_ns", "avg_ts", "stddev_ts" @@ -659,7 +680,7 @@ struct test { return INT; } if (field == "cuda" || field == "opencl" || field == "metal" || field == "gpu_blas" || field == "blas" || - field == "f16_kv" || field == "mul_mat_q") { + field == "f16_kv" || field == "no_kv_offload" || field == "mul_mat_q") { return BOOL; } if (field == "avg_ts" || field == "stddev_ts") { @@ -690,7 +711,8 @@ struct test { cpu_info, gpu_info, model_filename, model_type, std::to_string(model_size), std::to_string(model_n_params), std::to_string(n_batch), std::to_string(n_threads), ggml_type_name(type_k), ggml_type_name(type_v), - std::to_string(n_gpu_layers), std::to_string(main_gpu), std::to_string(mul_mat_q), tensor_split_str, + std::to_string(n_gpu_layers), std::to_string(main_gpu), std::to_string(no_kv_offload), + std::to_string(mul_mat_q), tensor_split_str, std::to_string(n_prompt), std::to_string(n_gen), test_time, std::to_string(avg_ns()), std::to_string(stdev_ns()), std::to_string(avg_ts()), std::to_string(stdev_ts()) @@ -851,6 +873,9 @@ struct markdown_printer : public printer { if (field == "mul_mat_q") { return "mmq"; } + if (field == "no_kv_offload") { + return "nkvo"; + } if (field == "tensor_split") { return "ts"; } @@ -885,6 +910,9 @@ struct markdown_printer : public printer { if (params.mul_mat_q.size() > 1 || params.mul_mat_q != cmd_params_defaults.mul_mat_q) { fields.push_back("mul_mat_q"); } + if (params.no_kv_offload.size() > 1 || params.no_kv_offload != cmd_params_defaults.no_kv_offload) { + fields.push_back("no_kv_offload"); + } if (params.tensor_split.size() > 1 || params.tensor_split != cmd_params_defaults.tensor_split) { fields.push_back("tensor_split"); } From b7e7982953f80a656e03feb5cfb17a17a173eb26 Mon Sep 17 00:00:00 2001 From: Lars Grammel Date: Sun, 7 Jan 2024 21:24:11 +0100 Subject: [PATCH 232/811] readme : add lgrammel/modelfusion JS/TS client for llama.cpp (#4814) --- README.md | 1 + 1 file changed, 1 insertion(+) diff --git a/README.md b/README.md index ca6d14e17..2f6e6ffee 100644 --- a/README.md +++ b/README.md @@ -118,6 +118,7 @@ as the main playground for developing new features for the [ggml](https://github - Python: [abetlen/llama-cpp-python](https://github.com/abetlen/llama-cpp-python) - Go: [go-skynet/go-llama.cpp](https://github.com/go-skynet/go-llama.cpp) - Node.js: [withcatai/node-llama-cpp](https://github.com/withcatai/node-llama-cpp) +- JS/TS (llama.cpp server client): [lgrammel/modelfusion](https://modelfusion.dev/integration/model-provider/llamacpp) - Ruby: [yoshoku/llama_cpp.rb](https://github.com/yoshoku/llama_cpp.rb) - Rust: [mdrokz/rust-llama.cpp](https://github.com/mdrokz/rust-llama.cpp) - C#/.NET: [SciSharp/LLamaSharp](https://github.com/SciSharp/LLamaSharp) From b0034d93ce2949ce7d9c098ca02e56f66cd484e2 Mon Sep 17 00:00:00 2001 From: Georgi Gerganov Date: Mon, 8 Jan 2024 11:14:04 +0200 Subject: [PATCH 233/811] examples : add passkey test (#3856) * examples : add passkey test * passkey : better prints * passkey : select pass key pos from CLI * passkey : simplify n_past logic * make : add passkey target * passkey : add "self-extend"-like context extension (#4810) * llama : "self-extend"-like context extension * passkey : add comment * passkey : add readme --- .gitignore | 1 + Makefile | 5 +- examples/CMakeLists.txt | 1 + examples/batched/batched.cpp | 1 + examples/passkey/CMakeLists.txt | 5 + examples/passkey/README.md | 12 ++ examples/passkey/passkey.cpp | 296 ++++++++++++++++++++++++++++++++ llama.cpp | 34 ++++ llama.h | 7 + 9 files changed, 361 insertions(+), 1 deletion(-) create mode 100644 examples/passkey/CMakeLists.txt create mode 100644 examples/passkey/README.md create mode 100644 examples/passkey/passkey.cpp diff --git a/.gitignore b/.gitignore index def74a1e9..cf1b692e9 100644 --- a/.gitignore +++ b/.gitignore @@ -51,6 +51,7 @@ models-mnt /lookup /main /metal +/passkey /perplexity /q8dot /quantize diff --git a/Makefile b/Makefile index 28c6d79bc..4c7e175bf 100644 --- a/Makefile +++ b/Makefile @@ -2,7 +2,7 @@ BUILD_TARGETS = \ main quantize quantize-stats perplexity embedding vdot q8dot train-text-from-scratch convert-llama2c-to-ggml \ simple batched batched-bench save-load-state server gguf llama-bench libllava.a llava-cli baby-llama beam-search \ - speculative infill tokenize benchmark-matmult parallel finetune export-lora lookahead lookup tests/test-c.o + speculative infill tokenize benchmark-matmult parallel finetune export-lora lookahead lookup passkey tests/test-c.o # Binaries only useful for tests TEST_TARGETS = \ @@ -665,6 +665,9 @@ lookahead: examples/lookahead/lookahead.cpp ggml.o llama.o $(COMMON_DEPS) $(OBJS lookup: examples/lookup/lookup.cpp ggml.o llama.o $(COMMON_DEPS) $(OBJS) $(CXX) $(CXXFLAGS) $(filter-out %.h,$^) -o $@ $(LDFLAGS) +passkey: examples/passkey/passkey.cpp ggml.o llama.o $(COMMON_DEPS) $(OBJS) + $(CXX) $(CXXFLAGS) $(filter-out %.h,$^) -o $@ $(LDFLAGS) + ifdef LLAMA_METAL metal: examples/metal/metal.cpp ggml.o $(OBJS) $(CXX) $(CXXFLAGS) $^ -o $@ $(LDFLAGS) diff --git a/examples/CMakeLists.txt b/examples/CMakeLists.txt index 4cc13d6e9..0c71cbdf7 100644 --- a/examples/CMakeLists.txt +++ b/examples/CMakeLists.txt @@ -31,6 +31,7 @@ else() add_subdirectory(quantize-stats) add_subdirectory(save-load-state) add_subdirectory(simple) + add_subdirectory(passkey) add_subdirectory(speculative) add_subdirectory(lookahead) add_subdirectory(lookup) diff --git a/examples/batched/batched.cpp b/examples/batched/batched.cpp index 22a4265df..b1775e0b0 100644 --- a/examples/batched/batched.cpp +++ b/examples/batched/batched.cpp @@ -69,6 +69,7 @@ int main(int argc, char ** argv) { std::vector tokens_list; tokens_list = ::llama_tokenize(model, params.prompt, true); + const int n_kv_req = tokens_list.size() + (n_len - tokens_list.size())*n_parallel; // initialize the context diff --git a/examples/passkey/CMakeLists.txt b/examples/passkey/CMakeLists.txt new file mode 100644 index 000000000..3161bf3ef --- /dev/null +++ b/examples/passkey/CMakeLists.txt @@ -0,0 +1,5 @@ +set(TARGET passkey) +add_executable(${TARGET} passkey.cpp) +install(TARGETS ${TARGET} RUNTIME) +target_link_libraries(${TARGET} PRIVATE common llama ${CMAKE_THREAD_LIBS_INIT}) +target_compile_features(${TARGET} PRIVATE cxx_std_11) diff --git a/examples/passkey/README.md b/examples/passkey/README.md new file mode 100644 index 000000000..4a22bb559 --- /dev/null +++ b/examples/passkey/README.md @@ -0,0 +1,12 @@ +# llama.cpp/example/passkey + +See the following PRs for more info: + +- https://github.com/ggerganov/llama.cpp/pull/3856 +- https://github.com/ggerganov/llama.cpp/pull/4810 + +### Usage + +```bash +make -j && ./passkey ./models/llama-7b-v2/ggml-model-f16.gguf 250 +``` diff --git a/examples/passkey/passkey.cpp b/examples/passkey/passkey.cpp new file mode 100644 index 000000000..5c0022832 --- /dev/null +++ b/examples/passkey/passkey.cpp @@ -0,0 +1,296 @@ +#include "common.h" +#include "llama.h" + +#include +#include +#include +#include + +int main(int argc, char ** argv) { + gpt_params params; + + if (argc == 1 || argv[1][0] == '-') { + printf("usage: %s MODEL_PATH N_JUNK N_GRP I_POS SEED\n" , argv[0]); + return 1 ; + } + + int seed = -1; + + int n_junk = 250; // number of times to repeat the junk text + int n_keep = 32; // number of tokens in the prompt prefix + int n_grp = 1; // if more than 1 - perform LongLM SelfExtend + int i_pos = -1; // position of the passkey in the junk text + + if (argc >= 2) { + params.model = argv[1]; + } + + if (argc >= 3) { + n_junk = std::stoi(argv[2]); + } + + if (argc >= 4) { + n_grp = std::stoi(argv[3]); + } + + if (argc >= 5) { + i_pos = std::stoi(argv[4]); + } + + if (argc >= 6) { + seed = std::stoi(argv[5]); + } + + if (seed == -1) { + seed = time(NULL); + } + + srand(seed); + + if (i_pos == -1) { + i_pos = rand() % n_junk; + } + + const std::string prompt_prefix = "There is an important info hidden inside a lot of irrelevant text. Find it and memorize them. I will quiz you about the important information there."; + const std::string prompt_suffix = " What is the pass key? The pass key is"; + + // generate junk text + params.prompt = prompt_prefix; + + const int passkey = rand() % 50000 + 1; + + for (int i = 0; i < n_junk; i++) { + if (i % n_junk == i_pos) { + params.prompt += " The pass key is " + std::to_string(passkey) + ". Remember it. " + std::to_string(passkey) + " is the pass key."; + } + + params.prompt += " The grass is green. The sky is blue. The sun is yellow. Here we go. There and back again."; + } + + params.prompt += prompt_suffix; + + // init LLM + + llama_backend_init(params.numa); + + // initialize the model + + llama_model_params model_params = llama_model_default_params(); + + model_params.n_gpu_layers = 99; // offload all layers to the GPU + + llama_model * model = llama_load_model_from_file(params.model.c_str(), model_params); + + if (model == NULL) { + fprintf(stderr , "%s: error: unable to load model\n" , __func__); + return 1; + } + + // initialize the context + + llama_context_params ctx_params = llama_context_default_params(); + + ctx_params.seed = seed; + ctx_params.n_ctx = llama_n_ctx_train(model)*n_grp + n_keep; + ctx_params.n_batch = 512; + ctx_params.n_threads = params.n_threads; + ctx_params.n_threads_batch = params.n_threads_batch == -1 ? params.n_threads : params.n_threads_batch; + + GGML_ASSERT(ctx_params.n_batch % n_grp == 0 && "n_batch must be divisible by n_grp"); + + llama_context * ctx = llama_new_context_with_model(model, ctx_params); + + if (ctx == NULL) { + fprintf(stderr , "%s: error: failed to create the llama_context\n" , __func__); + return 1; + } + + // tokenize the prompt + std::vector tokens_list; + tokens_list = ::llama_tokenize(ctx, params.prompt, true); + + // tokenize the prefix and use it as a sink + const int n_tokens_prefix = ::llama_tokenize(ctx, prompt_prefix, true).size(); + + const int n_tokens_all = tokens_list.size(); + + // we leave a margin of 16 tokens for the generated text - it should contain just the passkey + const int n_predict = 16; + + // total length of the sequences including the prompt + const int n_len = n_tokens_all + n_predict; + + const int n_ctx = llama_n_ctx(ctx) - n_keep; + const int n_kv_req = llama_n_ctx(ctx); + const int n_batch = ctx_params.n_batch; + const int n_batch_grp = ctx_params.n_batch/n_grp; + + LOG_TEE("\n%s: n_len = %d, n_ctx = %d, n_kv_req = %d, n_grp = %d, n_batch = %d\n", __func__, n_len, n_ctx, n_kv_req, n_grp, n_batch); + + // print the prompt token-by-token + + LOG_TEE("\n"); + LOG_TEE("prefix tokens: %d\n", n_tokens_prefix); + LOG_TEE("prompt tokens: %d\n", n_tokens_all); + //LOG_TEE("prompt: %s\n", params.prompt.c_str()); + + llama_batch batch = llama_batch_init(512, 0, 1); + + int n_past = 0; + + // fill the KV cache + for (int i = 0; i < n_ctx; i += n_batch) { + if (i > 0 && n_grp > 1) { + // if SelfExtend is enabled, we compress the position from the last batch by a factor of n_grp + const int ib = i/n_batch - 1; + const int bd = n_batch_grp*(n_grp - 1); + + llama_kv_cache_seq_shift(ctx, 0, n_past - n_batch, n_past, ib*bd); + llama_kv_cache_seq_div (ctx, 0, n_past - n_batch + ib*bd, n_past + ib*bd, n_grp); + + n_past -= bd; + } + + llama_batch_clear(batch); + + for (int j = 0; j < n_batch && i + j < n_tokens_all; j++) { + llama_batch_add(batch, tokens_list[i + j], n_past++, { 0 }, false); + } + + if (i + n_batch >= n_tokens_all) { + batch.logits[batch.n_tokens - 1] = true; + } + + if (llama_decode(ctx, batch) != 0) { + LOG_TEE("%s: llama_decode() failed\n", __func__); + return 1; + } + + LOG_TEE("%s: processed: [%6d, %6d)\n", __func__, i, std::min(i + n_batch, n_tokens_all)); + + if (i + n_batch >= n_tokens_all) { + break; + } + } + + for (int i = n_ctx; i < n_tokens_all; i += n_batch) { + const int n_discard = n_batch; + + LOG_TEE("%s: shifting KV cache with %d\n", __func__, n_discard); + + llama_kv_cache_seq_rm (ctx, 0, n_keep , n_keep + n_discard); + llama_kv_cache_seq_shift(ctx, 0, n_keep + n_discard, n_ctx, -n_discard); + + n_past -= n_discard; + + llama_batch_clear(batch); + + for (int j = 0; j < n_batch && i + j < n_tokens_all; j++) { + llama_batch_add(batch, tokens_list[i + j], n_past++, { 0 }, false); + } + + if (i + n_batch >= n_tokens_all) { + batch.logits[batch.n_tokens - 1] = true; + } + + if (llama_decode(ctx, batch) != 0) { + LOG_TEE("%s: llama_decode() failed\n", __func__); + return 1; + } + + LOG_TEE("%s: processed: [%6d, %6d)\n", __func__, i, std::min(i + n_batch, n_tokens_all)); + } + + { + const int n_discard = n_past - n_ctx + n_predict; + + if (n_discard > 0) { + LOG_TEE("%s: shifting KV cache with %d to free space for the answer\n", __func__, n_discard); + + llama_kv_cache_seq_rm (ctx, 0, n_keep , n_keep + n_discard); + llama_kv_cache_seq_shift(ctx, 0, n_keep + n_discard, n_ctx, -n_discard); + + n_past -= n_discard; + } + } + + LOG_TEE("\n"); + LOG_TEE("%s: passkey = %d, inserted at position %d / %d (token pos: ~%d)\n", __func__, passkey, i_pos, n_junk, (i_pos * n_tokens_all) / n_junk); + LOG_TEE("\n"); + + // main loop + + int n_cur = n_tokens_all; + int n_decode = 0; + + LOG_TEE("%s", prompt_suffix.c_str()); + fflush(stdout); + + const auto t_main_start = ggml_time_us(); + + while (n_cur <= n_len) { + // sample the next token + { + auto n_vocab = llama_n_vocab(model); + auto * logits = llama_get_logits_ith(ctx, batch.n_tokens - 1); + + std::vector candidates; + candidates.reserve(n_vocab); + + for (llama_token token_id = 0; token_id < n_vocab; token_id++) { + candidates.emplace_back(llama_token_data{ token_id, logits[token_id], 0.0f }); + } + + llama_token_data_array candidates_p = { candidates.data(), candidates.size(), false }; + + // sample the most likely token + const llama_token new_token_id = llama_sample_token_greedy(ctx, &candidates_p); + + // is it an end of stream? + if (new_token_id == llama_token_eos(model) || n_cur == n_len) { + LOG_TEE("\n"); + + break; + } + + LOG_TEE("%s", llama_token_to_piece(ctx, new_token_id).c_str()); + fflush(stdout); + + n_decode += 1; + + // prepare the next batch + llama_batch_clear(batch); + + // push this new token for next evaluation + llama_batch_add(batch, new_token_id, n_past++, { 0 }, true); + } + + n_cur += 1; + + // evaluate the current batch with the transformer model + if (llama_decode(ctx, batch)) { + fprintf(stderr, "%s : failed to eval, return code %d\n", __func__, 1); + return 1; + } + } + + LOG_TEE("\n"); + + const auto t_main_end = ggml_time_us(); + + LOG_TEE("%s: decoded %d tokens in %.2f s, speed: %.2f t/s\n", + __func__, n_decode, (t_main_end - t_main_start) / 1000000.0f, n_decode / ((t_main_end - t_main_start) / 1000000.0f)); + + llama_print_timings(ctx); + + fprintf(stderr, "\n"); + + llama_batch_free(batch); + + llama_free(ctx); + llama_free_model(model); + + llama_backend_free(); + + return 0; +} diff --git a/llama.cpp b/llama.cpp index 91aa3f8e7..63853d1c3 100644 --- a/llama.cpp +++ b/llama.cpp @@ -1903,6 +1903,28 @@ static void llama_kv_cache_seq_shift( cache.head = new_head != cache.size ? new_head : 0; } +static void llama_kv_cache_seq_div( + struct llama_kv_cache & cache, + llama_seq_id seq_id, + llama_pos p0, + llama_pos p1, + int d) { + if (p0 < 0) p0 = 0; + if (p1 < 0) p1 = std::numeric_limits::max(); + + for (uint32_t i = 0; i < cache.size; ++i) { + if (cache.cells[i].has_seq_id(seq_id) && cache.cells[i].pos >= p0 && cache.cells[i].pos < p1) { + cache.has_shift = true; + + { + llama_pos p_old = cache.cells[i].pos; + cache.cells[i].pos /= d; + cache.cells[i].delta += cache.cells[i].pos - p_old; + } + } + } +} + // // model loading and saving // @@ -10140,9 +10162,21 @@ void llama_kv_cache_seq_keep(struct llama_context * ctx, llama_seq_id seq_id) { } void llama_kv_cache_seq_shift(struct llama_context * ctx, llama_seq_id seq_id, llama_pos p0, llama_pos p1, llama_pos delta) { + if (delta == 0) { + return; + } + llama_kv_cache_seq_shift(ctx->kv_self, seq_id, p0, p1, delta); } +void llama_kv_cache_seq_div(struct llama_context * ctx, llama_seq_id seq_id, llama_pos p0, llama_pos p1, int d) { + if (d == 1) { + return; + } + + llama_kv_cache_seq_div(ctx->kv_self, seq_id, p0, p1, d); +} + // Returns the *maximum* size of the state size_t llama_get_state_size(const struct llama_context * ctx) { // we don't know size of rng until we actually serialize it. so reserve more than enough memory for its serialized state. diff --git a/llama.h b/llama.h index 461d4604a..5305de90b 100644 --- a/llama.h +++ b/llama.h @@ -484,6 +484,13 @@ extern "C" { llama_pos p1, llama_pos delta); + LLAMA_API void llama_kv_cache_seq_div( + struct llama_context * ctx, + llama_seq_id seq_id, + llama_pos p0, + llama_pos p1, + int d); + // // State / sessions // From 52531fdff88764282c1b233174721aab8347252d Mon Sep 17 00:00:00 2001 From: Georgi Gerganov Date: Mon, 8 Jan 2024 11:18:32 +0200 Subject: [PATCH 234/811] main : add self-extend support (#4815) * examples : add passkey test * passkey : better prints * passkey : select pass key pos from CLI * passkey : simplify n_past logic * llama : "self-extend"-like context extension * passkey : add comment * main : add Self-Extend support * llama : add comment about llama_kv_cache_seq_div --- common/common.cpp | 18 +++++++++ common/common.h | 2 + examples/main/main.cpp | 87 ++++++++++++++++++++++++++++++------------ llama.h | 4 ++ 4 files changed, 87 insertions(+), 24 deletions(-) diff --git a/common/common.cpp b/common/common.cpp index eacaee18e..6b4913a65 100644 --- a/common/common.cpp +++ b/common/common.cpp @@ -220,6 +220,20 @@ bool gpt_params_parse_ex(int argc, char ** argv, gpt_params & params) { break; } params.n_ctx = std::stoi(argv[i]); + } else if (arg == "--grp-attn-n" || arg == "-gan") { + if (++i >= argc) { + invalid_param = true; + break; + } + + params.grp_attn_n = std::stoi(argv[i]); + } else if (arg == "--grp-attn-w" || arg == "-gaw") { + if (++i >= argc) { + invalid_param = true; + break; + } + + params.grp_attn_w = std::stoi(argv[i]); } else if (arg == "--rope-freq-base") { if (++i >= argc) { invalid_param = true; @@ -904,6 +918,10 @@ void gpt_print_usage(int /*argc*/, char ** argv, const gpt_params & params) { printf(" Not recommended since this is both slower and uses more VRAM.\n"); #endif // GGML_USE_CUBLAS #endif + printf(" -gan N, --grp-attn-n N\n"); + printf(" group-attention factor (default: %d)\n", params.grp_attn_n); + printf(" -gat N, --grp-attn-w N\n"); + printf(" group-attention width (default: %.1f)\n", (double)params.grp_attn_w); printf(" --verbose-prompt print prompt before generation\n"); printf(" -dkvc, --dump-kv-cache\n"); printf(" verbose print of the KV cache\n"); diff --git a/common/common.h b/common/common.h index 9659aa045..e2bbfc258 100644 --- a/common/common.h +++ b/common/common.h @@ -62,6 +62,8 @@ struct gpt_params { int32_t main_gpu = 0; // the GPU that is used for scratch and small tensors float tensor_split[LLAMA_MAX_DEVICES] = {0}; // how split tensors should be distributed across GPUs int32_t n_beams = 0; // if non-zero then use beam search of given width. + int32_t grp_attn_n = 1; // group-attention factor + int32_t grp_attn_w = 512; // group-attention width float rope_freq_base = 0.0f; // RoPE base frequency float rope_freq_scale = 0.0f; // RoPE frequency scaling factor float yarn_ext_factor = -1.0f; // YaRN extrapolation mix factor diff --git a/examples/main/main.cpp b/examples/main/main.cpp index c096f110b..5ea67051f 100644 --- a/examples/main/main.cpp +++ b/examples/main/main.cpp @@ -439,6 +439,21 @@ int main(int argc, char ** argv) { LOG_TEE("sampling: \n%s\n", llama_sampling_print(sparams).c_str()); LOG_TEE("sampling order: \n%s\n", llama_sampling_order_print(sparams).c_str()); LOG_TEE("generate: n_ctx = %d, n_batch = %d, n_predict = %d, n_keep = %d\n", n_ctx, params.n_batch, params.n_predict, params.n_keep); + + // group-attention state + // number of grouped KV tokens so far (used only if params.grp_attn_n > 1) + int ga_i = 0; + + const int ga_n = params.grp_attn_n; + const int ga_w = params.grp_attn_w; + + if (ga_n != 1) { + GGML_ASSERT(ga_n > 0 && "grp_attn_n must be positive"); // NOLINT + GGML_ASSERT(ga_w % ga_n == 0 && "grp_attn_w must be a multiple of grp_attn_n"); // NOLINT + //GGML_ASSERT(n_ctx_train % ga_w == 0 && "n_ctx_train must be a multiple of grp_attn_w"); // NOLINT + //GGML_ASSERT(n_ctx >= n_ctx_train * ga_n && "n_ctx must be at least n_ctx_train * grp_attn_n"); // NOLINT + LOG_TEE("self-extend: n_ctx_train = %d, grp_attn_n = %d, grp_attn_w = %d\n", n_ctx_train, ga_n, ga_w); + } LOG_TEE("\n\n"); if (params.interactive) { @@ -500,37 +515,61 @@ int main(int argc, char ** argv) { fflush(stdout); } - // infinite text generation via context swapping - // if we run out of context: - // - take the n_keep first tokens from the original prompt (via n_past) - // - take half of the last (n_ctx - n_keep) tokens and recompute the logits in batches - if (n_past + (int) embd.size() + std::max(0, guidance_offset) > n_ctx) { - if (params.n_predict == -2) { - LOG_TEE("\n\n%s: context full and n_predict == -%d => stopping\n", __func__, params.n_predict); - break; + if (ga_n == 1) { + // infinite text generation via context shifting + // if we run out of context: + // - take the n_keep first tokens from the original prompt (via n_past) + // - take half of the last (n_ctx - n_keep) tokens and recompute the logits in batches + if (n_past + (int) embd.size() + std::max(0, guidance_offset) > n_ctx) { + if (params.n_predict == -2) { + LOG_TEE("\n\n%s: context full and n_predict == -%d => stopping\n", __func__, params.n_predict); + break; + } + + const int n_left = n_past - params.n_keep - 1; + const int n_discard = n_left/2; + + LOG("context full, swapping: n_past = %d, n_left = %d, n_ctx = %d, n_keep = %d, n_discard = %d\n", + n_past, n_left, n_ctx, params.n_keep, n_discard); + + llama_kv_cache_seq_rm (ctx, 0, params.n_keep + 1 , params.n_keep + n_discard + 1); + llama_kv_cache_seq_shift(ctx, 0, params.n_keep + 1 + n_discard, n_past, -n_discard); + + n_past -= n_discard; + + if (ctx_guidance) { + n_past_guidance -= n_discard; + } + + LOG("after swap: n_past = %d, n_past_guidance = %d\n", n_past, n_past_guidance); + + LOG("embd: %s\n", LOG_TOKENS_TOSTR_PRETTY(ctx, embd).c_str()); + + LOG("clear session path\n"); + path_session.clear(); } + } else { + // context extension via Self-Extend + while (n_past >= ga_i + ga_w) { + const int ib = (ga_n*ga_i)/ga_w; + const int bd = (ga_w/ga_n)*(ga_n - 1); + const int dd = (ga_w/ga_n) - ib*bd - ga_w; - const int n_left = n_past - params.n_keep - 1; - const int n_discard = n_left/2; + LOG("\n"); + LOG("shift: [%6d, %6d] + %6d -> [%6d, %6d]\n", ga_i, n_past, ib*bd, ga_i + ib*bd, n_past + ib*bd); + LOG("div: [%6d, %6d] / %6d -> [%6d, %6d]\n", ga_i + ib*bd, ga_i + ib*bd + ga_w, ga_n, (ga_i + ib*bd)/ga_n, (ga_i + ib*bd + ga_w)/ga_n); + LOG("shift: [%6d, %6d] + %6d -> [%6d, %6d]\n", ga_i + ib*bd + ga_w, n_past + ib*bd, dd, ga_i + ib*bd + ga_w + dd, n_past + ib*bd + dd); - LOG("context full, swapping: n_past = %d, n_left = %d, n_ctx = %d, n_keep = %d, n_discard = %d\n", - n_past, n_left, n_ctx, params.n_keep, n_discard); + llama_kv_cache_seq_shift(ctx, 0, ga_i, n_past, ib*bd); + llama_kv_cache_seq_div (ctx, 0, ga_i + ib*bd, ga_i + ib*bd + ga_w, ga_n); + llama_kv_cache_seq_shift(ctx, 0, ga_i + ib*bd + ga_w, n_past + ib*bd, dd); - llama_kv_cache_seq_rm (ctx, 0, params.n_keep + 1 , params.n_keep + n_discard + 1); - llama_kv_cache_seq_shift(ctx, 0, params.n_keep + 1 + n_discard, n_past, -n_discard); + n_past -= bd; - n_past -= n_discard; + ga_i += ga_w/ga_n; - if (ctx_guidance) { - n_past_guidance -= n_discard; + LOG("\nn_past_old = %d, n_past = %d, ga_i = %d\n\n", n_past + bd, n_past, ga_i); } - - LOG("after swap: n_past = %d, n_past_guidance = %d\n", n_past, n_past_guidance); - - LOG("embd: %s\n", LOG_TOKENS_TOSTR_PRETTY(ctx, embd).c_str()); - - LOG("clear session path\n"); - path_session.clear(); } // try to reuse a matching prefix from the loaded session instead of re-eval (via n_past) diff --git a/llama.h b/llama.h index 5305de90b..869ff0acf 100644 --- a/llama.h +++ b/llama.h @@ -484,6 +484,10 @@ extern "C" { llama_pos p1, llama_pos delta); + // Integer division of the positions by factor of `d > 1` + // If the KV cache is RoPEd, the KV data is updated accordingly + // p0 < 0 : [0, p1] + // p1 < 0 : [p0, inf) LLAMA_API void llama_kv_cache_seq_div( struct llama_context * ctx, llama_seq_id seq_id, From 42ea63c5a3da01d4a94e906d8565868012c79f4f Mon Sep 17 00:00:00 2001 From: Georgi Gerganov Date: Mon, 8 Jan 2024 15:57:36 +0200 Subject: [PATCH 235/811] llama.swiftui : update readme --- examples/llama.swiftui/README.md | 13 +++++++++---- 1 file changed, 9 insertions(+), 4 deletions(-) diff --git a/examples/llama.swiftui/README.md b/examples/llama.swiftui/README.md index fa68e6ed8..96cf743d4 100644 --- a/examples/llama.swiftui/README.md +++ b/examples/llama.swiftui/README.md @@ -1,7 +1,12 @@ -# llama.swiftui +# llama.cpp/examples/llama.swiftui -Local inference of llama.cpp on an iPhone. -So far I only tested with starcoder 1B model, but it can most likely handle 7B models as well. +Local inference of llama.cpp on an iPhone. This is a sample app that can be used as a starting +point for more advanced projects. + +For usage instructions and performance stats, check the following discussion: https://github.com/ggerganov/llama.cpp/discussions/4508 + +![image](https://github.com/ggerganov/llama.cpp/assets/1991296/2b40284f-8421-47a2-b634-74eece09a299) + +Video demonstration: https://github.com/bachittle/llama.cpp/assets/39804642/e290827a-4edb-4093-9642-2a5e399ec545 - From 668b31fc7d86245435ad6574e0e1126e734049e2 Mon Sep 17 00:00:00 2001 From: Georgi Gerganov Date: Mon, 8 Jan 2024 16:40:51 +0200 Subject: [PATCH 236/811] swift : exclude ggml-metal.metal from the package (#4822) --- Package.swift | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Package.swift b/Package.swift index e33a4ff46..583e2e276 100644 --- a/Package.swift +++ b/Package.swift @@ -21,7 +21,7 @@ let package = Package( name: "llama", dependencies: ["ggml"], path: ".", - exclude: [], + exclude: ["ggml-metal.metal"], sources: [ "llama.cpp", ], From dd5ae06405c5565b99889bdb3f168f4351252cfb Mon Sep 17 00:00:00 2001 From: Kawrakow <48489457+ikawrakow@users.noreply.github.com> Date: Mon, 8 Jan 2024 16:02:32 +0100 Subject: [PATCH 237/811] SOTA 2-bit quants (#4773) * iq2_xxs: basics * iq2_xxs: scalar and AVX2 dot products Needed to change Q8_K to have quants in the -127...127 range, else the IQ2_XXS AVX implementation becomes very awkward. The alternative would have been to use Q8_0 instead. Perhaps I'll change later, for now this is what we have. * iq2_xxs: ARM_NEON dot product Somehow strangely slow (112 ms/token). * iq2_xxs: WIP Metal Dequantize works, something is still wrong with the dot product. * iq2_xxs: Metal dot product now works We have PP-512 = 475 t/s TG-128 = 47.3 t/s Not the greatest performance, but not complete garbage either. * iq2_xxs: slighty faster dot product TG-128 is now 48.4 t/s * iq2_xxs: slighty faster dot product TG-128 is now 50.9 t/s * iq2_xxs: even faster Metal dot product TG-128 is now 54.1 t/s. Strangely enough, putting the signs lookup table into shared memory has a bigger impact than the grid values being in shared memory. * iq2_xxs: dequantize CUDA kernel - fix conflict with master * iq2_xxs: quantized CUDA dot product (MMVQ) We get TG-128 = 153.1 t/s * iq2_xxs: slightly faster CUDA dot product TG-128 is now at 155.1 t/s. * iq2_xxs: add to llama ftype enum * iq2_xxs: fix MoE on Metal * Fix missing MMQ ops when on hipBLAS I had put the ggml_supports_mmq call at the wrong place. * Fix bug in qequantize_row_iq2_xxs The 0.25f factor was missing. Great detective work by @ggerganov! * Fixing tests * PR suggestion --------- Co-authored-by: Iwan Kawrakow --- ggml-cuda.cu | 205 +++++++++++++++++++++++ ggml-metal.m | 40 +++++ ggml-metal.metal | 314 ++++++++++++++++++++++++++++++++++++ ggml-quants.c | 294 ++++++++++++++++++++++++++++++++- ggml-quants.h | 12 ++ ggml.c | 26 +++ ggml.h | 3 + llama.cpp | 3 + llama.h | 1 + tests/test-quantize-fns.cpp | 5 + 10 files changed, 902 insertions(+), 1 deletion(-) diff --git a/ggml-cuda.cu b/ggml-cuda.cu index 2df64b111..e0ea890b1 100644 --- a/ggml-cuda.cu +++ b/ggml-cuda.cu @@ -477,6 +477,14 @@ typedef struct { } block_q6_K; static_assert(sizeof(block_q6_K) == sizeof(ggml_fp16_t) + 13*QK_K/16, "wrong q6_K block size/padding"); +#define QR2_XXS 8 +#define QI2_XXS (QK_K / (4*QR2_XXS)) +typedef struct { + half d; + uint16_t qs[QK_K/8]; +} block_iq2_xxs; +static_assert(sizeof(block_iq2_xxs) == sizeof(ggml_fp16_t) + QK_K/8*sizeof(uint16_t), "wrong iq2_xxs block size/padding"); + #define WARP_SIZE 32 #define MATRIX_ROW_PADDING 512 // last row of quant. matrices is a multiple of this to avoid out-of-bounds memory accesses @@ -1292,6 +1300,128 @@ static __global__ void dequantize_block_q6_K(const void * __restrict__ vx, dst_t #endif } +static const __device__ uint64_t kgrid_iq2xxs[256] = { + 0x0808080808080808, 0x080808080808082b, 0x0808080808081919, 0x0808080808082b08, + 0x0808080808082b2b, 0x0808080808190819, 0x0808080808191908, 0x08080808082b0808, + 0x08080808082b082b, 0x08080808082b2b08, 0x08080808082b2b2b, 0x0808080819080819, + 0x0808080819081908, 0x0808080819190808, 0x0808080819192b08, 0x08080808192b0819, + 0x08080808192b1908, 0x080808082b080808, 0x080808082b08082b, 0x080808082b082b2b, + 0x080808082b2b082b, 0x0808081908080819, 0x0808081908081908, 0x0808081908190808, + 0x0808081908191919, 0x0808081919080808, 0x080808192b081908, 0x080808192b192b08, + 0x0808082b08080808, 0x0808082b0808082b, 0x0808082b082b082b, 0x0808082b2b08082b, + 0x0808190808080819, 0x0808190808081908, 0x0808190808190808, 0x08081908082b0819, + 0x08081908082b1908, 0x0808190819080808, 0x080819081908082b, 0x0808190819082b08, + 0x08081908192b0808, 0x080819082b080819, 0x080819082b081908, 0x080819082b190808, + 0x080819082b2b1908, 0x0808191908080808, 0x080819190808082b, 0x0808191908082b08, + 0x08081919082b0808, 0x080819191908192b, 0x08081919192b2b19, 0x080819192b080808, + 0x080819192b190819, 0x0808192b08082b19, 0x0808192b08190808, 0x0808192b19080808, + 0x0808192b2b081908, 0x0808192b2b2b1908, 0x08082b0808080808, 0x08082b0808081919, + 0x08082b0808082b08, 0x08082b0808191908, 0x08082b08082b2b08, 0x08082b0819080819, + 0x08082b0819081908, 0x08082b0819190808, 0x08082b081919082b, 0x08082b082b082b08, + 0x08082b1908081908, 0x08082b1919080808, 0x08082b2b0808082b, 0x08082b2b08191908, + 0x0819080808080819, 0x0819080808081908, 0x0819080808190808, 0x08190808082b0819, + 0x0819080819080808, 0x08190808192b0808, 0x081908082b081908, 0x081908082b190808, + 0x081908082b191919, 0x0819081908080808, 0x0819081908082b08, 0x08190819082b0808, + 0x0819081919190808, 0x0819081919192b2b, 0x081908192b080808, 0x0819082b082b1908, + 0x0819082b19081919, 0x0819190808080808, 0x0819190808082b08, 0x08191908082b0808, + 0x08191908082b1919, 0x0819190819082b19, 0x081919082b080808, 0x0819191908192b08, + 0x08191919192b082b, 0x0819192b08080808, 0x0819192b0819192b, 0x08192b0808080819, + 0x08192b0808081908, 0x08192b0808190808, 0x08192b0819080808, 0x08192b082b080819, + 0x08192b1908080808, 0x08192b1908081919, 0x08192b192b2b0808, 0x08192b2b19190819, + 0x082b080808080808, 0x082b08080808082b, 0x082b080808082b2b, 0x082b080819081908, + 0x082b0808192b0819, 0x082b08082b080808, 0x082b08082b08082b, 0x082b0819082b2b19, + 0x082b081919082b08, 0x082b082b08080808, 0x082b082b0808082b, 0x082b190808080819, + 0x082b190808081908, 0x082b190808190808, 0x082b190819080808, 0x082b19081919192b, + 0x082b191908080808, 0x082b191919080819, 0x082b1919192b1908, 0x082b192b2b190808, + 0x082b2b0808082b08, 0x082b2b08082b0808, 0x082b2b082b191908, 0x082b2b2b19081908, + 0x1908080808080819, 0x1908080808081908, 0x1908080808190808, 0x1908080808192b08, + 0x19080808082b0819, 0x19080808082b1908, 0x1908080819080808, 0x1908080819082b08, + 0x190808081919192b, 0x19080808192b0808, 0x190808082b080819, 0x190808082b081908, + 0x190808082b190808, 0x1908081908080808, 0x19080819082b0808, 0x19080819192b0819, + 0x190808192b080808, 0x190808192b081919, 0x1908082b08080819, 0x1908082b08190808, + 0x1908082b19082b08, 0x1908082b1919192b, 0x1908082b192b2b08, 0x1908190808080808, + 0x1908190808082b08, 0x19081908082b0808, 0x190819082b080808, 0x190819082b192b19, + 0x190819190819082b, 0x19081919082b1908, 0x1908192b08080808, 0x19082b0808080819, + 0x19082b0808081908, 0x19082b0808190808, 0x19082b0819080808, 0x19082b0819081919, + 0x19082b1908080808, 0x19082b1919192b08, 0x19082b19192b0819, 0x19082b192b08082b, + 0x19082b2b19081919, 0x19082b2b2b190808, 0x1919080808080808, 0x1919080808082b08, + 0x1919080808190819, 0x1919080808192b19, 0x19190808082b0808, 0x191908082b080808, + 0x191908082b082b08, 0x1919081908081908, 0x191908191908082b, 0x191908192b2b1908, + 0x1919082b2b190819, 0x191919082b190808, 0x191919082b19082b, 0x1919191908082b2b, + 0x1919192b08080819, 0x1919192b19191908, 0x19192b0808080808, 0x19192b0808190819, + 0x19192b0808192b19, 0x19192b08192b1908, 0x19192b1919080808, 0x19192b2b08082b08, + 0x192b080808081908, 0x192b080808190808, 0x192b080819080808, 0x192b0808192b2b08, + 0x192b081908080808, 0x192b081919191919, 0x192b082b08192b08, 0x192b082b192b0808, + 0x192b190808080808, 0x192b190808081919, 0x192b191908190808, 0x192b19190819082b, + 0x192b19192b081908, 0x192b2b081908082b, 0x2b08080808080808, 0x2b0808080808082b, + 0x2b08080808082b2b, 0x2b08080819080819, 0x2b0808082b08082b, 0x2b08081908081908, + 0x2b08081908192b08, 0x2b08081919080808, 0x2b08082b08190819, 0x2b08190808080819, + 0x2b08190808081908, 0x2b08190808190808, 0x2b08190808191919, 0x2b08190819080808, + 0x2b081908192b0808, 0x2b08191908080808, 0x2b0819191908192b, 0x2b0819192b191908, + 0x2b08192b08082b19, 0x2b08192b19080808, 0x2b08192b192b0808, 0x2b082b080808082b, + 0x2b082b1908081908, 0x2b082b2b08190819, 0x2b19080808081908, 0x2b19080808190808, + 0x2b190808082b1908, 0x2b19080819080808, 0x2b1908082b2b0819, 0x2b1908190819192b, + 0x2b1908192b080808, 0x2b19082b19081919, 0x2b19190808080808, 0x2b191908082b082b, + 0x2b19190819081908, 0x2b19191919190819, 0x2b192b082b080819, 0x2b192b19082b0808, + 0x2b2b08080808082b, 0x2b2b080819190808, 0x2b2b08082b081919, 0x2b2b081908082b19, + 0x2b2b082b08080808, 0x2b2b190808192b08, 0x2b2b2b0819190808, 0x2b2b2b1908081908, +}; + +static const __device__ uint8_t ksigns_iq2xs[128] = { + 0, 129, 130, 3, 132, 5, 6, 135, 136, 9, 10, 139, 12, 141, 142, 15, + 144, 17, 18, 147, 20, 149, 150, 23, 24, 153, 154, 27, 156, 29, 30, 159, + 160, 33, 34, 163, 36, 165, 166, 39, 40, 169, 170, 43, 172, 45, 46, 175, + 48, 177, 178, 51, 180, 53, 54, 183, 184, 57, 58, 187, 60, 189, 190, 63, + 192, 65, 66, 195, 68, 197, 198, 71, 72, 201, 202, 75, 204, 77, 78, 207, + 80, 209, 210, 83, 212, 85, 86, 215, 216, 89, 90, 219, 92, 221, 222, 95, + 96, 225, 226, 99, 228, 101, 102, 231, 232, 105, 106, 235, 108, 237, 238, 111, + 240, 113, 114, 243, 116, 245, 246, 119, 120, 249, 250, 123, 252, 125, 126, 255, +}; + +static const __device__ uint8_t kmask_iq2xs[8] = {1, 2, 4, 8, 16, 32, 64, 128}; + +inline bool ggml_cuda_supports_mmq(enum ggml_type type) { + switch (type) { + case GGML_TYPE_Q4_0: + case GGML_TYPE_Q4_1: + case GGML_TYPE_Q5_0: + case GGML_TYPE_Q5_1: + case GGML_TYPE_Q8_0: + case GGML_TYPE_Q2_K: + case GGML_TYPE_Q3_K: + case GGML_TYPE_Q4_K: + case GGML_TYPE_Q5_K: + case GGML_TYPE_Q6_K: + return true; + default: + return false; + } +} + +template +static __global__ void dequantize_block_iq2_xxs(const void * __restrict__ vx, dst_t * __restrict__ yy) { + + const int i = blockIdx.x; + const block_iq2_xxs * x = (const block_iq2_xxs *) vx; + + const int tid = threadIdx.x; +#if QK_K == 256 + const int il = tid/8; // 0...3 + const int ib = tid%8; // 0...7 + dst_t * y = yy + i*QK_K + 32*ib + 8*il; + const uint16_t * q2 = x[i].qs + 4*ib; + const uint8_t * aux8 = (const uint8_t *)q2; + const uint8_t * grid = (const uint8_t *)(kgrid_iq2xxs + aux8[il]); + const uint32_t aux32 = q2[2] | (q2[3] << 16); + const float d = (float)x[i].d * (0.5f + (aux32 >> 28)) * 0.25f; + const uint8_t signs = ksigns_iq2xs[(aux32 >> 7*il) & 127]; + for (int j = 0; j < 8; ++j) y[j] = d * grid[j] * (signs & kmask_iq2xs[j] ? -1.f : 1.f); +#else + assert(false); +#endif + +} + static __global__ void dequantize_mul_mat_vec_q2_k(const void * __restrict__ vx, const float * __restrict__ yy, float * __restrict__ dst, const int ncols, int nrows) { static_assert(16%K_QUANTS_PER_ITERATION == 0, "16 must be divisible by K_QUANTS_PER_ITERATION"); @@ -3825,6 +3955,55 @@ static __device__ __forceinline__ float vec_dot_q6_K_q8_1_mul_mat( return vec_dot_q6_K_q8_1_impl_mmq(&x_ql[index_x], &y_qs[index_y], sc, x_dmf[i * (WARP_SIZE/QI6_K) + i/QI6_K], &y_df[index_y/QI8_1]); } +static __device__ __forceinline__ float vec_dot_iq2_xxs_q8_1( + const void * __restrict__ vbq, const block_q8_1 * __restrict__ bq8_1, const int & iqs) { +#if QK_K == 256 + const block_iq2_xxs * bq2 = (const block_iq2_xxs *) vbq; + +#if QR2_XXS == 8 + const int ib32 = iqs; + const uint16_t * q2 = bq2->qs + 4*ib32; + const uint8_t * aux8 = (const uint8_t *)q2; + const int8_t * q8 = bq8_1[ib32].qs; + uint32_t aux32 = q2[2] | (q2[3] << 16); + int sumi = 0; + for (int l = 0; l < 4; ++l) { + const uint8_t * grid = (const uint8_t *)(kgrid_iq2xxs + aux8[l]); + const uint8_t signs = ksigns_iq2xs[aux32 & 127]; + for (int j = 0; j < 8; ++j) { + sumi += q8[j] * grid[j] * (signs & kmask_iq2xs[j] ? -1 : 1); + } + q8 += 8; + aux32 >>= 7; + } + const float d = (float)bq2->d * (0.5f + aux32) * (float)bq8_1[ib32].ds.x * 0.25f; + return d * sumi; +#else + // iqs is 0...15 + const int ib32 = iqs/2; + const int il = iqs%2; + const uint16_t * q2 = bq2->qs + 4*ib32; + const uint8_t * aux8 = (const uint8_t *)q2; + const uint8_t * grid1 = (const uint8_t *)(kgrid_iq2xxs + aux8[2*il+0]); + const uint8_t * grid2 = (const uint8_t *)(kgrid_iq2xxs + aux8[2*il+1]); + const uint32_t aux32 = q2[2] | (q2[3] << 16); + const float d = (float)bq2->d * (0.5f + (aux32 >> 28)) * (float)bq8_1[ib32].ds.x * 0.25f; + const uint8_t signs1 = ksigns_iq2xs[(aux32 >> 14*il) & 127]; + const uint8_t signs2 = ksigns_iq2xs[(aux32 >> (14*il + 7)) & 127]; + const int8_t * q8 = bq8_1[ib32].qs + 16*il; + int sumi1 = 0, sumi2 = 0; + for (int j = 0; j < 8; ++j) { + sumi1 += q8[j+0] * grid1[j] * (signs1 & kmask_iq2xs[j] ? -1 : 1); + sumi2 += q8[j+8] * grid2[j] * (signs2 & kmask_iq2xs[j] ? -1 : 1); + } + return d * (sumi1 + sumi2); +#endif +#else + assert(false); + return 0.f; +#endif +} + template static __device__ __forceinline__ void mul_mat_q( @@ -5664,6 +5843,12 @@ static void dequantize_row_q6_K_cuda(const void * vx, dst_t * y, const int k, cu #endif } +template +static void dequantize_row_iq2_xxs_cuda(const void * vx, dst_t * y, const int k, cudaStream_t stream) { + const int nb = k / QK_K; + dequantize_block_iq2_xxs<<>>(vx, y); +} + template static void convert_unary_cuda(const void * __restrict__ vx, dst_t * __restrict__ y, const int k, cudaStream_t stream) { const int num_blocks = (k + CUDA_DEQUANTIZE_BLOCK_SIZE - 1) / CUDA_DEQUANTIZE_BLOCK_SIZE; @@ -5692,6 +5877,8 @@ static to_fp16_cuda_t ggml_get_to_fp16_cuda(ggml_type type) { return dequantize_row_q5_K_cuda; case GGML_TYPE_Q6_K: return dequantize_row_q6_K_cuda; + case GGML_TYPE_IQ2_XXS: + return dequantize_row_iq2_xxs_cuda; case GGML_TYPE_F32: return convert_unary_cuda; default: @@ -5721,6 +5908,8 @@ static to_fp32_cuda_t ggml_get_to_fp32_cuda(ggml_type type) { return dequantize_row_q5_K_cuda; case GGML_TYPE_Q6_K: return dequantize_row_q6_K_cuda; + case GGML_TYPE_IQ2_XXS: + return dequantize_row_iq2_xxs_cuda; case GGML_TYPE_F16: return convert_unary_cuda; default: @@ -5915,6 +6104,15 @@ static void mul_mat_vec_q6_K_q8_1_cuda(const void * vx, const void * vy, float * <<>>(vx, vy, dst, ncols, nrows); } +static void mul_mat_vec_iq2_xxs_q8_1_cuda(const void * vx, const void * vy, float * dst, const int ncols, const int nrows, cudaStream_t stream) { + GGML_ASSERT(ncols % QK_K == 0); + const int block_num_y = (nrows + GGML_CUDA_MMV_Y - 1) / GGML_CUDA_MMV_Y; + const dim3 block_nums(block_num_y, 1, 1); + const dim3 block_dims(WARP_SIZE, GGML_CUDA_MMV_Y, 1); + mul_mat_vec_q + <<>>(vx, vy, dst, ncols, nrows); +} + static void ggml_mul_mat_q4_0_q8_1_cuda( const void * vx, const void * vy, float * dst, const int ncols_x, const int nrows_x, const int ncols_y, const int nrows_y, const int nrows_dst, cudaStream_t stream) { @@ -7407,6 +7605,7 @@ static int64_t get_row_rounding(ggml_type type) { case GGML_TYPE_Q4_K: case GGML_TYPE_Q5_K: case GGML_TYPE_Q6_K: + case GGML_TYPE_IQ2_XXS: return max_compute_capability >= CC_RDNA2 ? 128 : 64; default: GGML_ASSERT(false); @@ -7427,6 +7626,7 @@ static int64_t get_row_rounding(ggml_type type) { case GGML_TYPE_Q3_K: case GGML_TYPE_Q4_K: case GGML_TYPE_Q5_K: + case GGML_TYPE_IQ2_XXS: return max_compute_capability >= CC_VOLTA ? 128 : 64; case GGML_TYPE_Q6_K: return 64; @@ -7477,6 +7677,9 @@ static void ggml_cuda_op_mul_mat_vec_q( case GGML_TYPE_Q6_K: mul_mat_vec_q6_K_q8_1_cuda(src0_dd_i, src1_ddq_i, dst_dd_i, ne00, row_diff, stream); break; + case GGML_TYPE_IQ2_XXS: + mul_mat_vec_iq2_xxs_q8_1_cuda(src0_dd_i, src1_ddq_i, dst_dd_i, ne00, row_diff, stream); + break; default: GGML_ASSERT(false); break; @@ -8693,6 +8896,8 @@ static void ggml_cuda_mul_mat(const ggml_tensor * src0, const ggml_tensor * src1 #endif // defined(GGML_USE_HIPBLAS) && defined(__HIP_PLATFORM_AMD__) + use_mul_mat_q = use_mul_mat_q && ggml_cuda_supports_mmq(src0->type); + // debug helpers //printf("src0: %8d %8d %8d %8d\n", src0->ne[0], src0->ne[1], src0->ne[2], src0->ne[3]); //printf(" %8d %8d %8d %8d\n", src0->nb[0], src0->nb[1], src0->nb[2], src0->nb[3]); diff --git a/ggml-metal.m b/ggml-metal.m index fbbdcd8c4..6c2a8d04e 100644 --- a/ggml-metal.m +++ b/ggml-metal.m @@ -88,6 +88,7 @@ struct ggml_metal_context { GGML_METAL_DECL_KERNEL(get_rows_q5_K); GGML_METAL_DECL_KERNEL(get_rows_q6_K); GGML_METAL_DECL_KERNEL(get_rows_i32); + GGML_METAL_DECL_KERNEL(get_rows_iq2_xxs); GGML_METAL_DECL_KERNEL(rms_norm); GGML_METAL_DECL_KERNEL(group_norm); GGML_METAL_DECL_KERNEL(norm); @@ -106,6 +107,7 @@ struct ggml_metal_context { GGML_METAL_DECL_KERNEL(mul_mv_q4_K_f32); GGML_METAL_DECL_KERNEL(mul_mv_q5_K_f32); GGML_METAL_DECL_KERNEL(mul_mv_q6_K_f32); + GGML_METAL_DECL_KERNEL(mul_mv_iq2_xxs_f32); GGML_METAL_DECL_KERNEL(mul_mv_id_f32_f32); //GGML_METAL_DECL_KERNEL(mul_mv_id_f16_f16); GGML_METAL_DECL_KERNEL(mul_mv_id_f16_f32); @@ -121,6 +123,7 @@ struct ggml_metal_context { GGML_METAL_DECL_KERNEL(mul_mv_id_q4_K_f32); GGML_METAL_DECL_KERNEL(mul_mv_id_q5_K_f32); GGML_METAL_DECL_KERNEL(mul_mv_id_q6_K_f32); + GGML_METAL_DECL_KERNEL(mul_mv_id_iq2_xxs_f32); GGML_METAL_DECL_KERNEL(mul_mm_f32_f32); GGML_METAL_DECL_KERNEL(mul_mm_f16_f32); GGML_METAL_DECL_KERNEL(mul_mm_q4_0_f32); @@ -133,6 +136,7 @@ struct ggml_metal_context { GGML_METAL_DECL_KERNEL(mul_mm_q4_K_f32); GGML_METAL_DECL_KERNEL(mul_mm_q5_K_f32); GGML_METAL_DECL_KERNEL(mul_mm_q6_K_f32); + GGML_METAL_DECL_KERNEL(mul_mm_iq2_xxs_f32); GGML_METAL_DECL_KERNEL(mul_mm_id_f32_f32); GGML_METAL_DECL_KERNEL(mul_mm_id_f16_f32); GGML_METAL_DECL_KERNEL(mul_mm_id_q4_0_f32); @@ -145,6 +149,7 @@ struct ggml_metal_context { GGML_METAL_DECL_KERNEL(mul_mm_id_q4_K_f32); GGML_METAL_DECL_KERNEL(mul_mm_id_q5_K_f32); GGML_METAL_DECL_KERNEL(mul_mm_id_q6_K_f32); + GGML_METAL_DECL_KERNEL(mul_mm_id_iq2_xxs_f32); GGML_METAL_DECL_KERNEL(rope_f32); GGML_METAL_DECL_KERNEL(rope_f16); GGML_METAL_DECL_KERNEL(alibi_f32); @@ -379,6 +384,7 @@ struct ggml_metal_context * ggml_metal_init(int n_cb) { GGML_METAL_ADD_KERNEL(get_rows_q5_K); GGML_METAL_ADD_KERNEL(get_rows_q6_K); GGML_METAL_ADD_KERNEL(get_rows_i32); + GGML_METAL_ADD_KERNEL(get_rows_iq2_xxs); GGML_METAL_ADD_KERNEL(rms_norm); GGML_METAL_ADD_KERNEL(group_norm); GGML_METAL_ADD_KERNEL(norm); @@ -397,6 +403,7 @@ struct ggml_metal_context * ggml_metal_init(int n_cb) { GGML_METAL_ADD_KERNEL(mul_mv_q4_K_f32); GGML_METAL_ADD_KERNEL(mul_mv_q5_K_f32); GGML_METAL_ADD_KERNEL(mul_mv_q6_K_f32); + GGML_METAL_ADD_KERNEL(mul_mv_iq2_xxs_f32); GGML_METAL_ADD_KERNEL(mul_mv_id_f32_f32); //GGML_METAL_ADD_KERNEL(mul_mv_id_f16_f16); GGML_METAL_ADD_KERNEL(mul_mv_id_f16_f32); @@ -412,6 +419,7 @@ struct ggml_metal_context * ggml_metal_init(int n_cb) { GGML_METAL_ADD_KERNEL(mul_mv_id_q4_K_f32); GGML_METAL_ADD_KERNEL(mul_mv_id_q5_K_f32); GGML_METAL_ADD_KERNEL(mul_mv_id_q6_K_f32); + GGML_METAL_ADD_KERNEL(mul_mv_id_iq2_xxs_f32); if ([ctx->device supportsFamily:MTLGPUFamilyApple7]) { GGML_METAL_ADD_KERNEL(mul_mm_f32_f32); GGML_METAL_ADD_KERNEL(mul_mm_f16_f32); @@ -425,6 +433,7 @@ struct ggml_metal_context * ggml_metal_init(int n_cb) { GGML_METAL_ADD_KERNEL(mul_mm_q4_K_f32); GGML_METAL_ADD_KERNEL(mul_mm_q5_K_f32); GGML_METAL_ADD_KERNEL(mul_mm_q6_K_f32); + GGML_METAL_ADD_KERNEL(mul_mm_iq2_xxs_f32); GGML_METAL_ADD_KERNEL(mul_mm_id_f32_f32); GGML_METAL_ADD_KERNEL(mul_mm_id_f16_f32); GGML_METAL_ADD_KERNEL(mul_mm_id_q4_0_f32); @@ -437,6 +446,7 @@ struct ggml_metal_context * ggml_metal_init(int n_cb) { GGML_METAL_ADD_KERNEL(mul_mm_id_q4_K_f32); GGML_METAL_ADD_KERNEL(mul_mm_id_q5_K_f32); GGML_METAL_ADD_KERNEL(mul_mm_id_q6_K_f32); + GGML_METAL_ADD_KERNEL(mul_mm_id_iq2_xxs_f32); } GGML_METAL_ADD_KERNEL(rope_f32); GGML_METAL_ADD_KERNEL(rope_f16); @@ -502,6 +512,7 @@ void ggml_metal_free(struct ggml_metal_context * ctx) { GGML_METAL_DEL_KERNEL(get_rows_q5_K); GGML_METAL_DEL_KERNEL(get_rows_q6_K); GGML_METAL_DEL_KERNEL(get_rows_i32); + GGML_METAL_DEL_KERNEL(get_rows_iq2_xxs); GGML_METAL_DEL_KERNEL(rms_norm); GGML_METAL_DEL_KERNEL(group_norm); GGML_METAL_DEL_KERNEL(norm); @@ -520,6 +531,7 @@ void ggml_metal_free(struct ggml_metal_context * ctx) { GGML_METAL_DEL_KERNEL(mul_mv_q4_K_f32); GGML_METAL_DEL_KERNEL(mul_mv_q5_K_f32); GGML_METAL_DEL_KERNEL(mul_mv_q6_K_f32); + GGML_METAL_DEL_KERNEL(mul_mv_iq2_xxs_f32); GGML_METAL_DEL_KERNEL(mul_mv_id_f32_f32); //GGML_METAL_DEL_KERNEL(mul_mv_id_f16_f16); GGML_METAL_DEL_KERNEL(mul_mv_id_f16_f32); @@ -535,6 +547,7 @@ void ggml_metal_free(struct ggml_metal_context * ctx) { GGML_METAL_DEL_KERNEL(mul_mv_id_q4_K_f32); GGML_METAL_DEL_KERNEL(mul_mv_id_q5_K_f32); GGML_METAL_DEL_KERNEL(mul_mv_id_q6_K_f32); + GGML_METAL_DEL_KERNEL(mul_mv_id_iq2_xxs_f32); if ([ctx->device supportsFamily:MTLGPUFamilyApple7]) { GGML_METAL_DEL_KERNEL(mul_mm_f32_f32); GGML_METAL_DEL_KERNEL(mul_mm_f16_f32); @@ -548,6 +561,7 @@ void ggml_metal_free(struct ggml_metal_context * ctx) { GGML_METAL_DEL_KERNEL(mul_mm_q4_K_f32); GGML_METAL_DEL_KERNEL(mul_mm_q5_K_f32); GGML_METAL_DEL_KERNEL(mul_mm_q6_K_f32); + GGML_METAL_DEL_KERNEL(mul_mm_iq2_xxs_f32); GGML_METAL_DEL_KERNEL(mul_mm_id_f32_f32); GGML_METAL_DEL_KERNEL(mul_mm_id_f16_f32); GGML_METAL_DEL_KERNEL(mul_mm_id_q4_0_f32); @@ -560,6 +574,7 @@ void ggml_metal_free(struct ggml_metal_context * ctx) { GGML_METAL_DEL_KERNEL(mul_mm_id_q4_K_f32); GGML_METAL_DEL_KERNEL(mul_mm_id_q5_K_f32); GGML_METAL_DEL_KERNEL(mul_mm_id_q6_K_f32); + GGML_METAL_DEL_KERNEL(mul_mm_id_iq2_xxs_f32); } GGML_METAL_DEL_KERNEL(rope_f32); GGML_METAL_DEL_KERNEL(rope_f16); @@ -1541,6 +1556,7 @@ bool ggml_metal_graph_compute( case GGML_TYPE_Q4_K: [encoder setComputePipelineState:ctx->pipeline_mul_mm_q4_K_f32]; break; case GGML_TYPE_Q5_K: [encoder setComputePipelineState:ctx->pipeline_mul_mm_q5_K_f32]; break; case GGML_TYPE_Q6_K: [encoder setComputePipelineState:ctx->pipeline_mul_mm_q6_K_f32]; break; + case GGML_TYPE_IQ2_XXS: [encoder setComputePipelineState:ctx->pipeline_mul_mm_iq2_xxs_f32]; break; default: GGML_ASSERT(false && "MUL MAT-MAT not implemented"); } [encoder setBuffer:id_src0 offset:offs_src0 atIndex:0]; @@ -1653,6 +1669,12 @@ bool ggml_metal_graph_compute( nth1 = 32; [encoder setComputePipelineState:ctx->pipeline_mul_mv_q6_K_f32]; } break; + case GGML_TYPE_IQ2_XXS: + { + nth0 = 4; + nth1 = 16; + [encoder setComputePipelineState:ctx->pipeline_mul_mv_iq2_xxs_f32]; + } break; default: { GGML_METAL_LOG_ERROR("Asserting on type %d\n", (int)src0t); @@ -1686,9 +1708,14 @@ bool ggml_metal_graph_compute( if (src0t == GGML_TYPE_Q4_0 || src0t == GGML_TYPE_Q4_1 || src0t == GGML_TYPE_Q5_0 || src0t == GGML_TYPE_Q5_1 || src0t == GGML_TYPE_Q8_0 || + //src0t == GGML_TYPE_IQ2_XXS || src0t == GGML_TYPE_Q2_K) { // || src0t == GGML_TYPE_Q4_K) { [encoder dispatchThreadgroups:MTLSizeMake((ne01 + 7)/8, ne11, ne12*ne13) threadsPerThreadgroup:MTLSizeMake(nth0, nth1, 1)]; } + else if (src0t == GGML_TYPE_IQ2_XXS) { + [encoder setThreadgroupMemoryLength:(256*8+128) atIndex:0]; + [encoder dispatchThreadgroups:MTLSizeMake((ne01 + 7)/8, ne11, ne12*ne13) threadsPerThreadgroup:MTLSizeMake(nth0, nth1, 1)]; + } else if (src0t == GGML_TYPE_Q4_K) { [encoder dispatchThreadgroups:MTLSizeMake((ne01 + 3)/4, ne11, ne12*ne13) threadsPerThreadgroup:MTLSizeMake(nth0, nth1, 1)]; } @@ -1778,6 +1805,7 @@ bool ggml_metal_graph_compute( case GGML_TYPE_Q4_K: [encoder setComputePipelineState:ctx->pipeline_mul_mm_id_q4_K_f32]; break; case GGML_TYPE_Q5_K: [encoder setComputePipelineState:ctx->pipeline_mul_mm_id_q5_K_f32]; break; case GGML_TYPE_Q6_K: [encoder setComputePipelineState:ctx->pipeline_mul_mm_id_q6_K_f32]; break; + case GGML_TYPE_IQ2_XXS: [encoder setComputePipelineState:ctx->pipeline_mul_mm_id_iq2_xxs_f32]; break; default: GGML_ASSERT(false && "MUL_MAT_ID not implemented"); } [encoder setBuffer:id_src0 offset:offs_src0 atIndex:0]; @@ -1893,6 +1921,12 @@ bool ggml_metal_graph_compute( nth1 = 32; [encoder setComputePipelineState:ctx->pipeline_mul_mv_id_q6_K_f32]; } break; + case GGML_TYPE_IQ2_XXS: + { + nth0 = 4; + nth1 = 16; + [encoder setComputePipelineState:ctx->pipeline_mul_mv_id_iq2_xxs_f32]; + } break; default: { GGML_METAL_LOG_ERROR("Asserting on type %d\n", (int)src2t); @@ -1942,9 +1976,14 @@ bool ggml_metal_graph_compute( if (src2t == GGML_TYPE_Q4_0 || src2t == GGML_TYPE_Q4_1 || src2t == GGML_TYPE_Q5_0 || src2t == GGML_TYPE_Q5_1 || src2t == GGML_TYPE_Q8_0 || + //src2t == GGML_TYPE_IQ2_XXS || src2t == GGML_TYPE_Q2_K) { // || src2t == GGML_TYPE_Q4_K) { [encoder dispatchThreadgroups:MTLSizeMake((ne21 + 7)/8, _ne1, ne01*ne12*ne13) threadsPerThreadgroup:MTLSizeMake(nth0, nth1, 1)]; } + else if (src2t == GGML_TYPE_IQ2_XXS) { + [encoder setThreadgroupMemoryLength:(256*8+128) atIndex:0]; + [encoder dispatchThreadgroups:MTLSizeMake((ne21 + 7)/8, _ne1, ne01*ne12*ne13) threadsPerThreadgroup:MTLSizeMake(nth0, nth1, 1)]; + } else if (src2t == GGML_TYPE_Q4_K) { [encoder dispatchThreadgroups:MTLSizeMake((ne21 + 3)/4, _ne1, ne01*ne12*ne13) threadsPerThreadgroup:MTLSizeMake(nth0, nth1, 1)]; } @@ -1982,6 +2021,7 @@ bool ggml_metal_graph_compute( case GGML_TYPE_Q5_K: [encoder setComputePipelineState:ctx->pipeline_get_rows_q5_K]; break; case GGML_TYPE_Q6_K: [encoder setComputePipelineState:ctx->pipeline_get_rows_q6_K]; break; case GGML_TYPE_I32: [encoder setComputePipelineState:ctx->pipeline_get_rows_i32]; break; + case GGML_TYPE_IQ2_XXS: [encoder setComputePipelineState:ctx->pipeline_get_rows_iq2_xxs]; break; default: GGML_ASSERT(false && "not implemented"); } diff --git a/ggml-metal.metal b/ggml-metal.metal index a7d3f9efa..0cc535ac7 100644 --- a/ggml-metal.metal +++ b/ggml-metal.metal @@ -2446,6 +2446,12 @@ typedef struct { } block_q6_K; // 210 bytes / block +typedef struct { + half d; + uint16_t qs[QK_K/8]; +} block_iq2_xxs; +// 66 bytes / block for QK_K = 256, so 2.0625 bpw + //====================================== dot products ========================= void kernel_mul_mv_q2_K_f32_impl( @@ -3468,6 +3474,221 @@ kernel void kernel_mul_mv_q6_K_f32( kernel_mul_mv_q6_K_f32_impl(src0, src1, dst, ne00, ne01, ne02, ne10, ne12, ne0, ne1, r2, r3, tgpig, tiisg, sgitg); } +// ======================= "True" 2-bit + +constexpr constant static uint64_t kgrid_iq2xxs[256] = { + 0x0808080808080808, 0x080808080808082b, 0x0808080808081919, 0x0808080808082b08, + 0x0808080808082b2b, 0x0808080808190819, 0x0808080808191908, 0x08080808082b0808, + 0x08080808082b082b, 0x08080808082b2b08, 0x08080808082b2b2b, 0x0808080819080819, + 0x0808080819081908, 0x0808080819190808, 0x0808080819192b08, 0x08080808192b0819, + 0x08080808192b1908, 0x080808082b080808, 0x080808082b08082b, 0x080808082b082b2b, + 0x080808082b2b082b, 0x0808081908080819, 0x0808081908081908, 0x0808081908190808, + 0x0808081908191919, 0x0808081919080808, 0x080808192b081908, 0x080808192b192b08, + 0x0808082b08080808, 0x0808082b0808082b, 0x0808082b082b082b, 0x0808082b2b08082b, + 0x0808190808080819, 0x0808190808081908, 0x0808190808190808, 0x08081908082b0819, + 0x08081908082b1908, 0x0808190819080808, 0x080819081908082b, 0x0808190819082b08, + 0x08081908192b0808, 0x080819082b080819, 0x080819082b081908, 0x080819082b190808, + 0x080819082b2b1908, 0x0808191908080808, 0x080819190808082b, 0x0808191908082b08, + 0x08081919082b0808, 0x080819191908192b, 0x08081919192b2b19, 0x080819192b080808, + 0x080819192b190819, 0x0808192b08082b19, 0x0808192b08190808, 0x0808192b19080808, + 0x0808192b2b081908, 0x0808192b2b2b1908, 0x08082b0808080808, 0x08082b0808081919, + 0x08082b0808082b08, 0x08082b0808191908, 0x08082b08082b2b08, 0x08082b0819080819, + 0x08082b0819081908, 0x08082b0819190808, 0x08082b081919082b, 0x08082b082b082b08, + 0x08082b1908081908, 0x08082b1919080808, 0x08082b2b0808082b, 0x08082b2b08191908, + 0x0819080808080819, 0x0819080808081908, 0x0819080808190808, 0x08190808082b0819, + 0x0819080819080808, 0x08190808192b0808, 0x081908082b081908, 0x081908082b190808, + 0x081908082b191919, 0x0819081908080808, 0x0819081908082b08, 0x08190819082b0808, + 0x0819081919190808, 0x0819081919192b2b, 0x081908192b080808, 0x0819082b082b1908, + 0x0819082b19081919, 0x0819190808080808, 0x0819190808082b08, 0x08191908082b0808, + 0x08191908082b1919, 0x0819190819082b19, 0x081919082b080808, 0x0819191908192b08, + 0x08191919192b082b, 0x0819192b08080808, 0x0819192b0819192b, 0x08192b0808080819, + 0x08192b0808081908, 0x08192b0808190808, 0x08192b0819080808, 0x08192b082b080819, + 0x08192b1908080808, 0x08192b1908081919, 0x08192b192b2b0808, 0x08192b2b19190819, + 0x082b080808080808, 0x082b08080808082b, 0x082b080808082b2b, 0x082b080819081908, + 0x082b0808192b0819, 0x082b08082b080808, 0x082b08082b08082b, 0x082b0819082b2b19, + 0x082b081919082b08, 0x082b082b08080808, 0x082b082b0808082b, 0x082b190808080819, + 0x082b190808081908, 0x082b190808190808, 0x082b190819080808, 0x082b19081919192b, + 0x082b191908080808, 0x082b191919080819, 0x082b1919192b1908, 0x082b192b2b190808, + 0x082b2b0808082b08, 0x082b2b08082b0808, 0x082b2b082b191908, 0x082b2b2b19081908, + 0x1908080808080819, 0x1908080808081908, 0x1908080808190808, 0x1908080808192b08, + 0x19080808082b0819, 0x19080808082b1908, 0x1908080819080808, 0x1908080819082b08, + 0x190808081919192b, 0x19080808192b0808, 0x190808082b080819, 0x190808082b081908, + 0x190808082b190808, 0x1908081908080808, 0x19080819082b0808, 0x19080819192b0819, + 0x190808192b080808, 0x190808192b081919, 0x1908082b08080819, 0x1908082b08190808, + 0x1908082b19082b08, 0x1908082b1919192b, 0x1908082b192b2b08, 0x1908190808080808, + 0x1908190808082b08, 0x19081908082b0808, 0x190819082b080808, 0x190819082b192b19, + 0x190819190819082b, 0x19081919082b1908, 0x1908192b08080808, 0x19082b0808080819, + 0x19082b0808081908, 0x19082b0808190808, 0x19082b0819080808, 0x19082b0819081919, + 0x19082b1908080808, 0x19082b1919192b08, 0x19082b19192b0819, 0x19082b192b08082b, + 0x19082b2b19081919, 0x19082b2b2b190808, 0x1919080808080808, 0x1919080808082b08, + 0x1919080808190819, 0x1919080808192b19, 0x19190808082b0808, 0x191908082b080808, + 0x191908082b082b08, 0x1919081908081908, 0x191908191908082b, 0x191908192b2b1908, + 0x1919082b2b190819, 0x191919082b190808, 0x191919082b19082b, 0x1919191908082b2b, + 0x1919192b08080819, 0x1919192b19191908, 0x19192b0808080808, 0x19192b0808190819, + 0x19192b0808192b19, 0x19192b08192b1908, 0x19192b1919080808, 0x19192b2b08082b08, + 0x192b080808081908, 0x192b080808190808, 0x192b080819080808, 0x192b0808192b2b08, + 0x192b081908080808, 0x192b081919191919, 0x192b082b08192b08, 0x192b082b192b0808, + 0x192b190808080808, 0x192b190808081919, 0x192b191908190808, 0x192b19190819082b, + 0x192b19192b081908, 0x192b2b081908082b, 0x2b08080808080808, 0x2b0808080808082b, + 0x2b08080808082b2b, 0x2b08080819080819, 0x2b0808082b08082b, 0x2b08081908081908, + 0x2b08081908192b08, 0x2b08081919080808, 0x2b08082b08190819, 0x2b08190808080819, + 0x2b08190808081908, 0x2b08190808190808, 0x2b08190808191919, 0x2b08190819080808, + 0x2b081908192b0808, 0x2b08191908080808, 0x2b0819191908192b, 0x2b0819192b191908, + 0x2b08192b08082b19, 0x2b08192b19080808, 0x2b08192b192b0808, 0x2b082b080808082b, + 0x2b082b1908081908, 0x2b082b2b08190819, 0x2b19080808081908, 0x2b19080808190808, + 0x2b190808082b1908, 0x2b19080819080808, 0x2b1908082b2b0819, 0x2b1908190819192b, + 0x2b1908192b080808, 0x2b19082b19081919, 0x2b19190808080808, 0x2b191908082b082b, + 0x2b19190819081908, 0x2b19191919190819, 0x2b192b082b080819, 0x2b192b19082b0808, + 0x2b2b08080808082b, 0x2b2b080819190808, 0x2b2b08082b081919, 0x2b2b081908082b19, + 0x2b2b082b08080808, 0x2b2b190808192b08, 0x2b2b2b0819190808, 0x2b2b2b1908081908, +}; + +constexpr constant static uint8_t ksigns_iq2xs[128] = { + 0, 129, 130, 3, 132, 5, 6, 135, 136, 9, 10, 139, 12, 141, 142, 15, + 144, 17, 18, 147, 20, 149, 150, 23, 24, 153, 154, 27, 156, 29, 30, 159, + 160, 33, 34, 163, 36, 165, 166, 39, 40, 169, 170, 43, 172, 45, 46, 175, + 48, 177, 178, 51, 180, 53, 54, 183, 184, 57, 58, 187, 60, 189, 190, 63, + 192, 65, 66, 195, 68, 197, 198, 71, 72, 201, 202, 75, 204, 77, 78, 207, + 80, 209, 210, 83, 212, 85, 86, 215, 216, 89, 90, 219, 92, 221, 222, 95, + 96, 225, 226, 99, 228, 101, 102, 231, 232, 105, 106, 235, 108, 237, 238, 111, + 240, 113, 114, 243, 116, 245, 246, 119, 120, 249, 250, 123, 252, 125, 126, 255, +}; + +constexpr constant static uint8_t kmask_iq2xs[8] = {1, 2, 4, 8, 16, 32, 64, 128}; + +void kernel_mul_mv_iq2_xxs_f32_impl( + device const void * src0, + device const float * src1, + device float * dst, + constant int64_t & ne00, + constant int64_t & ne01, + constant int64_t & ne02, + constant int64_t & ne10, + constant int64_t & ne12, + constant int64_t & ne0, + constant int64_t & ne1, + constant uint & r2, + constant uint & r3, + threadgroup int8_t * shared_values [[threadgroup(0)]], + uint3 tgpig[[threadgroup_position_in_grid]], + uint tiisg[[thread_index_in_simdgroup]], + uint sgitg[[simdgroup_index_in_threadgroup]]) { + + const int nb = ne00/QK_K; + const int r0 = tgpig.x; + const int r1 = tgpig.y; + const int im = tgpig.z; + + const int first_row = (r0 * N_SIMDGROUP + sgitg) * N_DST; + const int ib_row = first_row * nb; + + const uint i12 = im%ne12; + const uint i13 = im/ne12; + + const uint offset0 = (i12/r2)*(nb*ne01) + (i13/r3)*(nb*ne01*ne02); + + device const block_iq2_xxs * x = (device const block_iq2_xxs *) src0 + ib_row + offset0; + device const float * y = (device const float *) src1 + r1*ne10 + im*ne00*ne1; + + float yl[32]; + float sumf[N_DST]={0.f}, all_sum; + + const int nb32 = nb * (QK_K / 32); + + threadgroup uint64_t * values = (threadgroup uint64_t *)shared_values; + threadgroup uint8_t * shared_signs = (threadgroup uint8_t *)(values + 256); + { + int nval = 4; + int pos = (32*sgitg + tiisg)*nval; + for (int i = 0; i < nval; ++i) values[pos + i] = kgrid_iq2xxs[pos + i]; + nval = 2; + pos = (32*sgitg + tiisg)*nval; + for (int i = 0; i < nval; ++i) shared_signs[pos+i] = ksigns_iq2xs[pos+i]; + threadgroup_barrier(mem_flags::mem_threadgroup); + } + +#if QK_K == 256 + const int ix = tiisg; + + device const float * y4 = y + 32 * ix; + + for (int ib32 = ix; ib32 < nb32; ib32 += 32) { + + for (int i = 0; i < 32; ++i) { + yl[i] = y4[i]; + } + + const int ibl = ib32 / (QK_K / 32); + const int ib = ib32 % (QK_K / 32); + + device const block_iq2_xxs * xr = x + ibl; + device const uint16_t * q2 = xr->qs + 4 * ib; + device const half * dh = &xr->d; + + for (int row = 0; row < N_DST; row++) { + + const float db = dh[0]; + device const uint8_t * aux8 = (device const uint8_t *)q2; + const uint32_t aux32 = q2[2] | (q2[3] << 16); + const float d = db * (0.5f + (aux32 >> 28)); + + float sum = 0; + for (int l = 0; l < 4; ++l) { + const threadgroup uint8_t * grid = (const threadgroup uint8_t *)(values + aux8[l]); + const uint8_t signs = shared_signs[(aux32 >> 7*l) & 127]; + for (int j = 0; j < 8; ++j) { + sum += yl[8*l + j] * grid[j] * (signs & kmask_iq2xs[j] ? -1.f : 1.f); + } + } + sumf[row] += d * sum; + + dh += nb*sizeof(block_iq2_xxs)/2; + q2 += nb*sizeof(block_iq2_xxs)/2; + } + + y4 += 32 * 32; + } +#else + // TODO +#endif + + for (int row = 0; row < N_DST; ++row) { + all_sum = simd_sum(sumf[row]); + if (tiisg == 0) { + dst[r1*ne0 + im*ne0*ne1 + first_row + row] = all_sum * 0.25f; + } + } +} + +[[host_name("kernel_mul_mv_iq2_xxs_f32")]] +kernel void kernel_mul_mv_iq2_xxs_f32( + device const void * src0, + device const float * src1, + device float * dst, + constant int64_t & ne00, + constant int64_t & ne01, + constant int64_t & ne02, + constant uint64_t & nb00, + constant uint64_t & nb01, + constant uint64_t & nb02, + constant int64_t & ne10, + constant int64_t & ne11, + constant int64_t & ne12, + constant uint64_t & nb10, + constant uint64_t & nb11, + constant uint64_t & nb12, + constant int64_t & ne0, + constant int64_t & ne1, + constant uint & r2, + constant uint & r3, + threadgroup int8_t * shared_values [[threadgroup(0)]], + uint3 tgpig[[threadgroup_position_in_grid]], + uint tiisg[[thread_index_in_simdgroup]], + uint sgitg[[simdgroup_index_in_threadgroup]]) { + + kernel_mul_mv_iq2_xxs_f32_impl(src0, src1, dst, ne00, ne01, ne02, ne10, ne12, ne0, ne1, r2, r3, shared_values, tgpig, tiisg, sgitg); +} + //============================= templates and their specializations ============================= // NOTE: this is not dequantizing - we are simply fitting the template @@ -3739,6 +3960,31 @@ void dequantize_q6_K(device const block_q6_K *xb, short il, thread type4x4 & reg } } +template +void dequantize_iq2_xxs(device const block_iq2_xxs * xb, short il, thread type4x4 & reg) { + // il is 0...15 for QK_K = 256 => index of block of 32 is il/2 + const float d = xb->d; + const int ib32 = il/2; + il = il%2; + // il = 0 or 1. il = 0 processes the first 16 quants in a block of 32, il = 1 the second 16 + // each block of 32 needs 2 uint32_t's for the quants & scale, so 4 uint16_t's. + device const uint16_t * q2 = xb->qs + 4*ib32; + const uint32_t aux32_g = q2[0] | (q2[1] << 16); + const uint32_t aux32_s = q2[2] | (q2[3] << 16); + thread const uint8_t * aux8 = (thread const uint8_t *)&aux32_g; + const float dl = d * (0.5f + (aux32_s >> 28)) * 0.25f; + constant uint8_t * grid = (constant uint8_t *)(kgrid_iq2xxs + aux8[2*il+0]); + uint8_t signs = ksigns_iq2xs[(aux32_s >> 14*il) & 127]; + for (int i = 0; i < 8; ++i) { + reg[i/4][i%4] = dl * grid[i] * (signs & kmask_iq2xs[i] ? -1.f : 1.f); + } + grid = (constant uint8_t *)(kgrid_iq2xxs + aux8[2*il+1]); + signs = ksigns_iq2xs[(aux32_s >> (14*il+7)) & 127]; + for (int i = 0; i < 8; ++i) { + reg[2+i/4][i%4] = dl * grid[i] * (signs & kmask_iq2xs[i] ? -1.f : 1.f); + } +} + template kernel void kernel_get_rows( device const void * src0, @@ -4278,6 +4524,7 @@ template [[host_name("kernel_get_rows_q3_K")]] kernel get_rows_t kernel_get_rows template [[host_name("kernel_get_rows_q4_K")]] kernel get_rows_t kernel_get_rows; template [[host_name("kernel_get_rows_q5_K")]] kernel get_rows_t kernel_get_rows; template [[host_name("kernel_get_rows_q6_K")]] kernel get_rows_t kernel_get_rows; +template [[host_name("kernel_get_rows_iq2_xxs")]] kernel get_rows_t kernel_get_rows; // // matrix-matrix multiplication @@ -4314,6 +4561,7 @@ template [[host_name("kernel_mul_mm_q3_K_f32")]] kernel mat_mm_t kernel_mul_mm; template [[host_name("kernel_mul_mm_q5_K_f32")]] kernel mat_mm_t kernel_mul_mm; template [[host_name("kernel_mul_mm_q6_K_f32")]] kernel mat_mm_t kernel_mul_mm; +template [[host_name("kernel_mul_mm_iq2_xxs_f32")]] kernel mat_mm_t kernel_mul_mm; // // indirect matrix-matrix multiplication @@ -4362,6 +4610,7 @@ template [[host_name("kernel_mul_mm_id_q3_K_f32")]] kernel mat_mm_id_t kernel_mu template [[host_name("kernel_mul_mm_id_q4_K_f32")]] kernel mat_mm_id_t kernel_mul_mm_id; template [[host_name("kernel_mul_mm_id_q5_K_f32")]] kernel mat_mm_id_t kernel_mul_mm_id; template [[host_name("kernel_mul_mm_id_q6_K_f32")]] kernel mat_mm_id_t kernel_mul_mm_id; +template [[host_name("kernel_mul_mm_id_iq2_xxs_f32")]] kernel mat_mm_id_t kernel_mul_mm_id; // // matrix-vector multiplication @@ -5134,3 +5383,68 @@ kernel void kernel_mul_mv_id_q6_K_f32( tiisg, sgitg); } + +[[host_name("kernel_mul_mv_id_iq2_xxs_f32")]] +kernel void kernel_mul_mv_id_iq2_xxs_f32( + device const char * ids, + device const char * src1, + device float * dst, + constant uint64_t & nbi1, + constant int64_t & ne00, + constant int64_t & ne01, + constant int64_t & ne02, + constant uint64_t & nb00, + constant uint64_t & nb01, + constant uint64_t & nb02, + constant int64_t & ne10, + constant int64_t & ne11, + constant int64_t & ne12, + constant int64_t & ne13, + constant uint64_t & nb10, + constant uint64_t & nb11, + constant uint64_t & nb12, + constant int64_t & ne0, + constant int64_t & ne1, + constant uint64_t & nb1, + constant uint & r2, + constant uint & r3, + constant int & idx, + device const char * src00, + device const char * src01, + device const char * src02, + device const char * src03, + device const char * src04, + device const char * src05, + device const char * src06, + device const char * src07, + threadgroup int8_t * shared_values [[threadgroup(0)]], + uint3 tgpig[[threadgroup_position_in_grid]], + uint tiitg[[thread_index_in_threadgroup]], + uint tiisg[[thread_index_in_simdgroup]], + uint sgitg[[simdgroup_index_in_threadgroup]]) { + device const char * src0[8] = {src00, src01, src02, src03, src04, src05, src06, src07}; + + const int64_t bid = tgpig.z/(ne12*ne13); + + tgpig.z = tgpig.z%(ne12*ne13); + + const int32_t id = ((device int32_t *) (ids + bid*nbi1))[idx]; + + kernel_mul_mv_iq2_xxs_f32_impl( + src0[id], + (device const float *) (src1 + bid*nb11), + dst + bid*ne0, + ne00, + ne01, + ne02, + ne10, + ne12, + ne0, + ne1, + r2, + r3, + shared_values, + tgpig, + tiisg, + sgitg); +} diff --git a/ggml-quants.c b/ggml-quants.c index 55a9496d1..fd127f2d1 100644 --- a/ggml-quants.c +++ b/ggml-quants.c @@ -2340,6 +2340,138 @@ size_t ggml_quantize_q6_K(const float * src, void * dst, int n, int k, int64_t * return (n/QK_K*sizeof(block_q6_K)); } +// ====================== "True" 2-bit (de)-quantization + +void quantize_row_iq2_xxs_reference(const float * restrict x, block_iq2_xxs * restrict y, int k) { + (void)x; + (void)y; + (void)k; + assert(k % QK_K == 0); + //fprintf(stderr, "=========================== %s: not implemented\n", __func__); +} + +static const uint64_t iq2xxs_grid[256] = { + 0x0808080808080808, 0x080808080808082b, 0x0808080808081919, 0x0808080808082b08, + 0x0808080808082b2b, 0x0808080808190819, 0x0808080808191908, 0x08080808082b0808, + 0x08080808082b082b, 0x08080808082b2b08, 0x08080808082b2b2b, 0x0808080819080819, + 0x0808080819081908, 0x0808080819190808, 0x0808080819192b08, 0x08080808192b0819, + 0x08080808192b1908, 0x080808082b080808, 0x080808082b08082b, 0x080808082b082b2b, + 0x080808082b2b082b, 0x0808081908080819, 0x0808081908081908, 0x0808081908190808, + 0x0808081908191919, 0x0808081919080808, 0x080808192b081908, 0x080808192b192b08, + 0x0808082b08080808, 0x0808082b0808082b, 0x0808082b082b082b, 0x0808082b2b08082b, + 0x0808190808080819, 0x0808190808081908, 0x0808190808190808, 0x08081908082b0819, + 0x08081908082b1908, 0x0808190819080808, 0x080819081908082b, 0x0808190819082b08, + 0x08081908192b0808, 0x080819082b080819, 0x080819082b081908, 0x080819082b190808, + 0x080819082b2b1908, 0x0808191908080808, 0x080819190808082b, 0x0808191908082b08, + 0x08081919082b0808, 0x080819191908192b, 0x08081919192b2b19, 0x080819192b080808, + 0x080819192b190819, 0x0808192b08082b19, 0x0808192b08190808, 0x0808192b19080808, + 0x0808192b2b081908, 0x0808192b2b2b1908, 0x08082b0808080808, 0x08082b0808081919, + 0x08082b0808082b08, 0x08082b0808191908, 0x08082b08082b2b08, 0x08082b0819080819, + 0x08082b0819081908, 0x08082b0819190808, 0x08082b081919082b, 0x08082b082b082b08, + 0x08082b1908081908, 0x08082b1919080808, 0x08082b2b0808082b, 0x08082b2b08191908, + 0x0819080808080819, 0x0819080808081908, 0x0819080808190808, 0x08190808082b0819, + 0x0819080819080808, 0x08190808192b0808, 0x081908082b081908, 0x081908082b190808, + 0x081908082b191919, 0x0819081908080808, 0x0819081908082b08, 0x08190819082b0808, + 0x0819081919190808, 0x0819081919192b2b, 0x081908192b080808, 0x0819082b082b1908, + 0x0819082b19081919, 0x0819190808080808, 0x0819190808082b08, 0x08191908082b0808, + 0x08191908082b1919, 0x0819190819082b19, 0x081919082b080808, 0x0819191908192b08, + 0x08191919192b082b, 0x0819192b08080808, 0x0819192b0819192b, 0x08192b0808080819, + 0x08192b0808081908, 0x08192b0808190808, 0x08192b0819080808, 0x08192b082b080819, + 0x08192b1908080808, 0x08192b1908081919, 0x08192b192b2b0808, 0x08192b2b19190819, + 0x082b080808080808, 0x082b08080808082b, 0x082b080808082b2b, 0x082b080819081908, + 0x082b0808192b0819, 0x082b08082b080808, 0x082b08082b08082b, 0x082b0819082b2b19, + 0x082b081919082b08, 0x082b082b08080808, 0x082b082b0808082b, 0x082b190808080819, + 0x082b190808081908, 0x082b190808190808, 0x082b190819080808, 0x082b19081919192b, + 0x082b191908080808, 0x082b191919080819, 0x082b1919192b1908, 0x082b192b2b190808, + 0x082b2b0808082b08, 0x082b2b08082b0808, 0x082b2b082b191908, 0x082b2b2b19081908, + 0x1908080808080819, 0x1908080808081908, 0x1908080808190808, 0x1908080808192b08, + 0x19080808082b0819, 0x19080808082b1908, 0x1908080819080808, 0x1908080819082b08, + 0x190808081919192b, 0x19080808192b0808, 0x190808082b080819, 0x190808082b081908, + 0x190808082b190808, 0x1908081908080808, 0x19080819082b0808, 0x19080819192b0819, + 0x190808192b080808, 0x190808192b081919, 0x1908082b08080819, 0x1908082b08190808, + 0x1908082b19082b08, 0x1908082b1919192b, 0x1908082b192b2b08, 0x1908190808080808, + 0x1908190808082b08, 0x19081908082b0808, 0x190819082b080808, 0x190819082b192b19, + 0x190819190819082b, 0x19081919082b1908, 0x1908192b08080808, 0x19082b0808080819, + 0x19082b0808081908, 0x19082b0808190808, 0x19082b0819080808, 0x19082b0819081919, + 0x19082b1908080808, 0x19082b1919192b08, 0x19082b19192b0819, 0x19082b192b08082b, + 0x19082b2b19081919, 0x19082b2b2b190808, 0x1919080808080808, 0x1919080808082b08, + 0x1919080808190819, 0x1919080808192b19, 0x19190808082b0808, 0x191908082b080808, + 0x191908082b082b08, 0x1919081908081908, 0x191908191908082b, 0x191908192b2b1908, + 0x1919082b2b190819, 0x191919082b190808, 0x191919082b19082b, 0x1919191908082b2b, + 0x1919192b08080819, 0x1919192b19191908, 0x19192b0808080808, 0x19192b0808190819, + 0x19192b0808192b19, 0x19192b08192b1908, 0x19192b1919080808, 0x19192b2b08082b08, + 0x192b080808081908, 0x192b080808190808, 0x192b080819080808, 0x192b0808192b2b08, + 0x192b081908080808, 0x192b081919191919, 0x192b082b08192b08, 0x192b082b192b0808, + 0x192b190808080808, 0x192b190808081919, 0x192b191908190808, 0x192b19190819082b, + 0x192b19192b081908, 0x192b2b081908082b, 0x2b08080808080808, 0x2b0808080808082b, + 0x2b08080808082b2b, 0x2b08080819080819, 0x2b0808082b08082b, 0x2b08081908081908, + 0x2b08081908192b08, 0x2b08081919080808, 0x2b08082b08190819, 0x2b08190808080819, + 0x2b08190808081908, 0x2b08190808190808, 0x2b08190808191919, 0x2b08190819080808, + 0x2b081908192b0808, 0x2b08191908080808, 0x2b0819191908192b, 0x2b0819192b191908, + 0x2b08192b08082b19, 0x2b08192b19080808, 0x2b08192b192b0808, 0x2b082b080808082b, + 0x2b082b1908081908, 0x2b082b2b08190819, 0x2b19080808081908, 0x2b19080808190808, + 0x2b190808082b1908, 0x2b19080819080808, 0x2b1908082b2b0819, 0x2b1908190819192b, + 0x2b1908192b080808, 0x2b19082b19081919, 0x2b19190808080808, 0x2b191908082b082b, + 0x2b19190819081908, 0x2b19191919190819, 0x2b192b082b080819, 0x2b192b19082b0808, + 0x2b2b08080808082b, 0x2b2b080819190808, 0x2b2b08082b081919, 0x2b2b081908082b19, + 0x2b2b082b08080808, 0x2b2b190808192b08, 0x2b2b2b0819190808, 0x2b2b2b1908081908, +}; + +static const uint8_t ksigns_iq2xs[128] = { + 0, 129, 130, 3, 132, 5, 6, 135, 136, 9, 10, 139, 12, 141, 142, 15, + 144, 17, 18, 147, 20, 149, 150, 23, 24, 153, 154, 27, 156, 29, 30, 159, + 160, 33, 34, 163, 36, 165, 166, 39, 40, 169, 170, 43, 172, 45, 46, 175, + 48, 177, 178, 51, 180, 53, 54, 183, 184, 57, 58, 187, 60, 189, 190, 63, + 192, 65, 66, 195, 68, 197, 198, 71, 72, 201, 202, 75, 204, 77, 78, 207, + 80, 209, 210, 83, 212, 85, 86, 215, 216, 89, 90, 219, 92, 221, 222, 95, + 96, 225, 226, 99, 228, 101, 102, 231, 232, 105, 106, 235, 108, 237, 238, 111, + 240, 113, 114, 243, 116, 245, 246, 119, 120, 249, 250, 123, 252, 125, 126, 255, +}; +static const uint8_t kmask_iq2xs[8] = {1, 2, 4, 8, 16, 32, 64, 128}; + +void dequantize_row_iq2_xxs(const block_iq2_xxs * restrict x, float * restrict y, int k) { + assert(k % QK_K == 0); + const int nb = k / QK_K; + + uint32_t aux32[2]; + const uint8_t * aux8 = (const uint8_t *)aux32; + + for (int i = 0; i < nb; i++) { + + const float d = GGML_FP16_TO_FP32(x[i].d); + + for (int ib32 = 0; ib32 < QK_K/32; ++ib32) { + memcpy(aux32, x[i].qs + 4*ib32, 2*sizeof(uint32_t)); + const float db = d * (0.5f + (aux32[1] >> 28)) * 0.25f; + for (int l = 0; l < 4; ++l) { + const uint8_t * grid = (const uint8_t *)(iq2xxs_grid + aux8[l]); + const uint8_t signs = ksigns_iq2xs[(aux32[1] >> 7*l) & 127]; + for (int j = 0; j < 8; ++j) { + y[j] = db * grid[j] * (signs & kmask_iq2xs[j] ? -1.f : 1.f); + } + y += 8; + } + } + } +} + +void quantize_row_iq2_xxs(const float * restrict x, void * restrict vy, int k) { + assert(k % QK_K == 0); + block_iq2_xxs * restrict y = vy; + quantize_row_iq2_xxs_reference(x, y, k); +} + +size_t ggml_quantize_iq2_xxs(const float * src, void * dst, int n, int k, int64_t * hist) { + assert(k % QK_K == 0); + (void)hist; // TODO: collect histograms + + for (int j = 0; j < n; j += k) { + block_iq2_xxs * restrict y = (block_iq2_xxs *)dst + j/QK_K; + quantize_row_iq2_xxs_reference(src + j, y, k); + } + return (n/QK_K*sizeof(block_iq2_xxs)); +} + //===================================== Q8_K ============================================== void quantize_row_q8_K_reference(const float * restrict x, block_q8_K * restrict y, int k) { @@ -2362,7 +2494,9 @@ void quantize_row_q8_K_reference(const float * restrict x, block_q8_K * restrict x += QK_K; continue; } - const float iscale = -128.f/max; + //const float iscale = -128.f/max; + // We need this change for IQ2_XXS, else the AVX implementation becomes very awkward + const float iscale = -127.f/max; for (int j = 0; j < QK_K; ++j) { int v = nearest_int(iscale*x[j]); y[i].qs[j] = MIN(127, v); @@ -7065,3 +7199,161 @@ void ggml_vec_dot_q6_K_q8_K(const int n, float * restrict s, const void * restri } #endif + +static const int8_t keven_signs_q2xs[1024] = { + 1, 1, 1, 1, 1, 1, 1, 1, -1, 1, 1, 1, 1, 1, 1, -1, 1, -1, 1, 1, 1, 1, 1, -1, -1, -1, 1, 1, 1, 1, 1, 1, + 1, 1, -1, 1, 1, 1, 1, -1, -1, 1, -1, 1, 1, 1, 1, 1, 1, -1, -1, 1, 1, 1, 1, 1, -1, -1, -1, 1, 1, 1, 1, -1, + 1, 1, 1, -1, 1, 1, 1, -1, -1, 1, 1, -1, 1, 1, 1, 1, 1, -1, 1, -1, 1, 1, 1, 1, -1, -1, 1, -1, 1, 1, 1, -1, + 1, 1, -1, -1, 1, 1, 1, 1, -1, 1, -1, -1, 1, 1, 1, -1, 1, -1, -1, -1, 1, 1, 1, -1, -1, -1, -1, -1, 1, 1, 1, 1, + 1, 1, 1, 1, -1, 1, 1, -1, -1, 1, 1, 1, -1, 1, 1, 1, 1, -1, 1, 1, -1, 1, 1, 1, -1, -1, 1, 1, -1, 1, 1, -1, + 1, 1, -1, 1, -1, 1, 1, 1, -1, 1, -1, 1, -1, 1, 1, -1, 1, -1, -1, 1, -1, 1, 1, -1, -1, -1, -1, 1, -1, 1, 1, 1, + 1, 1, 1, -1, -1, 1, 1, 1, -1, 1, 1, -1, -1, 1, 1, -1, 1, -1, 1, -1, -1, 1, 1, -1, -1, -1, 1, -1, -1, 1, 1, 1, + 1, 1, -1, -1, -1, 1, 1, -1, -1, 1, -1, -1, -1, 1, 1, 1, 1, -1, -1, -1, -1, 1, 1, 1, -1, -1, -1, -1, -1, 1, 1, -1, + 1, 1, 1, 1, 1, -1, 1, -1, -1, 1, 1, 1, 1, -1, 1, 1, 1, -1, 1, 1, 1, -1, 1, 1, -1, -1, 1, 1, 1, -1, 1, -1, + 1, 1, -1, 1, 1, -1, 1, 1, -1, 1, -1, 1, 1, -1, 1, -1, 1, -1, -1, 1, 1, -1, 1, -1, -1, -1, -1, 1, 1, -1, 1, 1, + 1, 1, 1, -1, 1, -1, 1, 1, -1, 1, 1, -1, 1, -1, 1, -1, 1, -1, 1, -1, 1, -1, 1, -1, -1, -1, 1, -1, 1, -1, 1, 1, + 1, 1, -1, -1, 1, -1, 1, -1, -1, 1, -1, -1, 1, -1, 1, 1, 1, -1, -1, -1, 1, -1, 1, 1, -1, -1, -1, -1, 1, -1, 1, -1, + 1, 1, 1, 1, -1, -1, 1, 1, -1, 1, 1, 1, -1, -1, 1, -1, 1, -1, 1, 1, -1, -1, 1, -1, -1, -1, 1, 1, -1, -1, 1, 1, + 1, 1, -1, 1, -1, -1, 1, -1, -1, 1, -1, 1, -1, -1, 1, 1, 1, -1, -1, 1, -1, -1, 1, 1, -1, -1, -1, 1, -1, -1, 1, -1, + 1, 1, 1, -1, -1, -1, 1, -1, -1, 1, 1, -1, -1, -1, 1, 1, 1, -1, 1, -1, -1, -1, 1, 1, -1, -1, 1, -1, -1, -1, 1, -1, + 1, 1, -1, -1, -1, -1, 1, 1, -1, 1, -1, -1, -1, -1, 1, -1, 1, -1, -1, -1, -1, -1, 1, -1, -1, -1, -1, -1, -1, -1, 1, 1, + 1, 1, 1, 1, 1, 1, -1, -1, -1, 1, 1, 1, 1, 1, -1, 1, 1, -1, 1, 1, 1, 1, -1, 1, -1, -1, 1, 1, 1, 1, -1, -1, + 1, 1, -1, 1, 1, 1, -1, 1, -1, 1, -1, 1, 1, 1, -1, -1, 1, -1, -1, 1, 1, 1, -1, -1, -1, -1, -1, 1, 1, 1, -1, 1, + 1, 1, 1, -1, 1, 1, -1, 1, -1, 1, 1, -1, 1, 1, -1, -1, 1, -1, 1, -1, 1, 1, -1, -1, -1, -1, 1, -1, 1, 1, -1, 1, + 1, 1, -1, -1, 1, 1, -1, -1, -1, 1, -1, -1, 1, 1, -1, 1, 1, -1, -1, -1, 1, 1, -1, 1, -1, -1, -1, -1, 1, 1, -1, -1, + 1, 1, 1, 1, -1, 1, -1, 1, -1, 1, 1, 1, -1, 1, -1, -1, 1, -1, 1, 1, -1, 1, -1, -1, -1, -1, 1, 1, -1, 1, -1, 1, + 1, 1, -1, 1, -1, 1, -1, -1, -1, 1, -1, 1, -1, 1, -1, 1, 1, -1, -1, 1, -1, 1, -1, 1, -1, -1, -1, 1, -1, 1, -1, -1, + 1, 1, 1, -1, -1, 1, -1, -1, -1, 1, 1, -1, -1, 1, -1, 1, 1, -1, 1, -1, -1, 1, -1, 1, -1, -1, 1, -1, -1, 1, -1, -1, + 1, 1, -1, -1, -1, 1, -1, 1, -1, 1, -1, -1, -1, 1, -1, -1, 1, -1, -1, -1, -1, 1, -1, -1, -1, -1, -1, -1, -1, 1, -1, 1, + 1, 1, 1, 1, 1, -1, -1, 1, -1, 1, 1, 1, 1, -1, -1, -1, 1, -1, 1, 1, 1, -1, -1, -1, -1, -1, 1, 1, 1, -1, -1, 1, + 1, 1, -1, 1, 1, -1, -1, -1, -1, 1, -1, 1, 1, -1, -1, 1, 1, -1, -1, 1, 1, -1, -1, 1, -1, -1, -1, 1, 1, -1, -1, -1, + 1, 1, 1, -1, 1, -1, -1, -1, -1, 1, 1, -1, 1, -1, -1, 1, 1, -1, 1, -1, 1, -1, -1, 1, -1, -1, 1, -1, 1, -1, -1, -1, + 1, 1, -1, -1, 1, -1, -1, 1, -1, 1, -1, -1, 1, -1, -1, -1, 1, -1, -1, -1, 1, -1, -1, -1, -1, -1, -1, -1, 1, -1, -1, 1, + 1, 1, 1, 1, -1, -1, -1, -1, -1, 1, 1, 1, -1, -1, -1, 1, 1, -1, 1, 1, -1, -1, -1, 1, -1, -1, 1, 1, -1, -1, -1, -1, + 1, 1, -1, 1, -1, -1, -1, 1, -1, 1, -1, 1, -1, -1, -1, -1, 1, -1, -1, 1, -1, -1, -1, -1, -1, -1, -1, 1, -1, -1, -1, 1, + 1, 1, 1, -1, -1, -1, -1, 1, -1, 1, 1, -1, -1, -1, -1, -1, 1, -1, 1, -1, -1, -1, -1, -1, -1, -1, 1, -1, -1, -1, -1, 1, + 1, 1, -1, -1, -1, -1, -1, -1, -1, 1, -1, -1, -1, -1, -1, 1, 1, -1, -1, -1, -1, -1, -1, 1, -1, -1, -1, -1, -1, -1, -1, -1, +}; + +void ggml_vec_dot_iq2_xxs_q8_K(const int n, float * restrict s, const void * restrict vx, const void * restrict vy) { + assert(n % QK_K == 0); + + const block_iq2_xxs * restrict x = vx; + const block_q8_K * restrict y = vy; + + const int nb = n / QK_K; + +#if defined(__ARM_NEON) + + const uint64_t * signs64 = (const uint64_t *)keven_signs_q2xs; + + uint32_t aux32[4]; + const uint8_t * aux8 = (const uint8_t *)aux32; + + int8x16x4_t q2u; + int8x16x4_t q2s; + int8x16x4_t q8b; + + float sumf = 0; + for (int i = 0; i < nb; ++i) { + const float d = GGML_FP16_TO_FP32(x[i].d) * y[i].d; + const uint16_t * restrict q2 = x[i].qs; + const int8_t * restrict q8 = y[i].qs; + float sumf1 = 0, sumf2 = 0; + for (int ib32 = 0; ib32 < QK_K/32; ib32 += 2) { + q8b = vld1q_s8_x4(q8); q8 += 64; + memcpy(aux32, q2, 4*sizeof(uint32_t)); q2 += 8; + q2u.val[0] = vcombine_s8(vld1_s8((const void *)(iq2xxs_grid + aux8[ 0])), vld1_s8((const void *)(iq2xxs_grid + aux8[ 1]))); + q2u.val[1] = vcombine_s8(vld1_s8((const void *)(iq2xxs_grid + aux8[ 2])), vld1_s8((const void *)(iq2xxs_grid + aux8[ 3]))); + q2u.val[2] = vcombine_s8(vld1_s8((const void *)(iq2xxs_grid + aux8[ 8])), vld1_s8((const void *)(iq2xxs_grid + aux8[ 9]))); + q2u.val[3] = vcombine_s8(vld1_s8((const void *)(iq2xxs_grid + aux8[10])), vld1_s8((const void *)(iq2xxs_grid + aux8[11]))); + q2s.val[0] = vcombine_s8(vld1_s8((const void *)(signs64 + ((aux32[1] >> 0) & 127))), vld1_s8((const void *)(signs64 + ((aux32[1] >> 7) & 127)))); + q2s.val[1] = vcombine_s8(vld1_s8((const void *)(signs64 + ((aux32[1] >> 14) & 127))), vld1_s8((const void *)(signs64 + ((aux32[1] >> 21) & 127)))); + q2s.val[2] = vcombine_s8(vld1_s8((const void *)(signs64 + ((aux32[3] >> 0) & 127))), vld1_s8((const void *)(signs64 + ((aux32[3] >> 7) & 127)))); + q2s.val[3] = vcombine_s8(vld1_s8((const void *)(signs64 + ((aux32[3] >> 14) & 127))), vld1_s8((const void *)(signs64 + ((aux32[3] >> 21) & 127)))); + q2u.val[0] = vmulq_s8(q2u.val[0], q2s.val[0]); + q2u.val[1] = vmulq_s8(q2u.val[1], q2s.val[1]); + q2u.val[2] = vmulq_s8(q2u.val[2], q2s.val[2]); + q2u.val[3] = vmulq_s8(q2u.val[3], q2s.val[3]); + const int32x4_t p1 = ggml_vdotq_s32(ggml_vdotq_s32(vdupq_n_s32(0), q2u.val[0], q8b.val[0]), q2u.val[1], q8b.val[1]); + const int32x4_t p2 = ggml_vdotq_s32(ggml_vdotq_s32(vdupq_n_s32(0), q2u.val[2], q8b.val[2]), q2u.val[3], q8b.val[3]); + sumf1 += vaddvq_s32(p1) * (0.5f + (aux32[1] >> 28)); + sumf2 += vaddvq_s32(p2) * (0.5f + (aux32[3] >> 28)); + } + sumf += d*(sumf1 + sumf2); + } + *s = 0.25f * sumf; + +#elif defined(__AVX2__) + + const uint64_t * signs64 = (const uint64_t *)keven_signs_q2xs; + + uint32_t aux32[4]; + const uint8_t * aux8 = (const uint8_t *)aux32; + + __m256 accumf = _mm256_setzero_ps(); + for (int i = 0; i < nb; ++i) { + const float d = GGML_FP16_TO_FP32(x[i].d) * y[i].d; + const uint16_t * restrict q2 = x[i].qs; + const int8_t * restrict q8 = y[i].qs; + __m256i sumi1 = _mm256_setzero_si256(); + __m256i sumi2 = _mm256_setzero_si256(); + for (int ib32 = 0; ib32 < QK_K/32; ib32 += 2) { + const __m256i q8_1 = _mm256_loadu_si256((const __m256i *)q8); q8 += 32; + const __m256i q8_2 = _mm256_loadu_si256((const __m256i *)q8); q8 += 32; + memcpy(aux32, q2, 4*sizeof(uint32_t)); q2 += 8; + const __m256i q2_1 = _mm256_set_epi64x(iq2xxs_grid[aux8[ 3]], iq2xxs_grid[aux8[ 2]], iq2xxs_grid[aux8[1]], iq2xxs_grid[aux8[0]]); + const __m256i q2_2 = _mm256_set_epi64x(iq2xxs_grid[aux8[11]], iq2xxs_grid[aux8[10]], iq2xxs_grid[aux8[9]], iq2xxs_grid[aux8[8]]); + const __m256i s2_1 = _mm256_set_epi64x(signs64[(aux32[1] >> 21) & 127], signs64[(aux32[1] >> 14) & 127], + signs64[(aux32[1] >> 7) & 127], signs64[(aux32[1] >> 0) & 127]); + const __m256i s2_2 = _mm256_set_epi64x(signs64[(aux32[3] >> 21) & 127], signs64[(aux32[3] >> 14) & 127], + signs64[(aux32[3] >> 7) & 127], signs64[(aux32[3] >> 0) & 127]); + const __m256i q8s_1 = _mm256_sign_epi8(q8_1, s2_1); + const __m256i q8s_2 = _mm256_sign_epi8(q8_2, s2_2); + const __m256i dot1 = _mm256_maddubs_epi16(q2_1, q8s_1); + const __m256i dot2 = _mm256_maddubs_epi16(q2_2, q8s_2); + const uint16_t ls1 = aux32[1] >> 28; + const uint16_t ls2 = aux32[3] >> 28; + const __m256i p1 = _mm256_madd_epi16(dot1, _mm256_set1_epi16(2*ls1+1)); + const __m256i p2 = _mm256_madd_epi16(dot2, _mm256_set1_epi16(2*ls2+1)); + sumi1 = _mm256_add_epi32(sumi1, p1); + sumi2 = _mm256_add_epi32(sumi2, p2); + } + + accumf = _mm256_fmadd_ps(_mm256_set1_ps(d), _mm256_cvtepi32_ps(_mm256_add_epi32(sumi1, sumi2)), accumf); + + } + + *s = 0.125f * hsum_float_8(accumf); + +#else + + uint32_t aux32[2]; + const uint8_t * aux8 = (const uint8_t *)aux32; + + float sumf = 0.f; + for (int i = 0; i < nb; ++i) { + const float d = GGML_FP16_TO_FP32(x[i].d) * y[i].d; + const uint16_t * restrict q2 = x[i].qs; + const int8_t * restrict q8 = y[i].qs; + int32_t bsum = 0; + for (int ib32 = 0; ib32 < QK_K/32; ++ib32) { + memcpy(aux32, q2, 2*sizeof(uint32_t)); + q2 += 4; + const uint32_t ls = 2*(aux32[1] >> 28) + 1; + int32_t sumi = 0; + for (int l = 0; l < 4; ++l) { + const uint8_t * grid = (const uint8_t *)(iq2xxs_grid + aux8[l]); + const uint8_t signs = ksigns_iq2xs[(aux32[1] >> 7*l) & 127]; + for (int j = 0; j < 8; ++j) { + sumi += grid[j] * q8[j] * (signs & kmask_iq2xs[j] ? -1 : 1); + } + q8 += 8; + } + bsum += sumi * ls; + } + sumf += d * bsum; + } + *s = 0.125f * sumf; +#endif +} diff --git a/ggml-quants.h b/ggml-quants.h index 62c1df6cb..8dd911d41 100644 --- a/ggml-quants.h +++ b/ggml-quants.h @@ -165,6 +165,14 @@ typedef struct { } block_q8_K; static_assert(sizeof(block_q8_K) == sizeof(float) + QK_K + QK_K/16*sizeof(int16_t), "wrong q8_K block size/padding"); +// (Almost) "true" 2-bit quantization. +// Due to the need to use blocks as per ggml dsign, it ends up using +// 2.0625 bpw because of the 16-bit scale for each block of 256. +typedef struct { + ggml_fp16_t d; + uint16_t qs[QK_K/8]; +} block_iq2_xxs; +static_assert(sizeof(block_iq2_xxs) == sizeof(ggml_fp16_t) + QK_K/8*sizeof(uint16_t), "wrong iq2_xxs block size/padding"); // Quantization void quantize_row_q4_0_reference(const float * restrict x, block_q4_0 * restrict y, int k); @@ -180,6 +188,7 @@ void quantize_row_q4_K_reference(const float * restrict x, block_q4_K * restrict void quantize_row_q5_K_reference(const float * restrict x, block_q5_K * restrict y, int k); void quantize_row_q6_K_reference(const float * restrict x, block_q6_K * restrict y, int k); void quantize_row_q8_K_reference(const float * restrict x, block_q8_K * restrict y, int k); +void quantize_row_iq2_xxs_reference(const float * restrict x, block_iq2_xxs * restrict y, int k); void quantize_row_q4_0(const float * restrict x, void * restrict y, int k); void quantize_row_q4_1(const float * restrict x, void * restrict y, int k); @@ -194,6 +203,7 @@ void quantize_row_q4_K(const float * restrict x, void * restrict y, int k); void quantize_row_q5_K(const float * restrict x, void * restrict y, int k); void quantize_row_q6_K(const float * restrict x, void * restrict y, int k); void quantize_row_q8_K(const float * restrict x, void * restrict y, int k); +void quantize_row_iq2_xxs(const float * restrict x, void * restrict y, int k); // Dequantization void dequantize_row_q4_0(const block_q4_0 * restrict x, float * restrict y, int k); @@ -209,6 +219,7 @@ void dequantize_row_q4_K(const block_q4_K * restrict x, float * restrict y, int void dequantize_row_q5_K(const block_q5_K * restrict x, float * restrict y, int k); void dequantize_row_q6_K(const block_q6_K * restrict x, float * restrict y, int k); void dequantize_row_q8_K(const block_q8_K * restrict x, float * restrict y, int k); +void dequantize_row_iq2_xxs(const block_iq2_xxs * restrict x, float * restrict y, int k); // Dot product void ggml_vec_dot_q4_0_q8_0(int n, float * restrict s, const void * restrict vx, const void * restrict vy); @@ -222,3 +233,4 @@ void ggml_vec_dot_q3_K_q8_K(int n, float * restrict s, const void * restrict vx, void ggml_vec_dot_q4_K_q8_K(int n, float * restrict s, const void * restrict vx, const void * restrict vy); void ggml_vec_dot_q5_K_q8_K(int n, float * restrict s, const void * restrict vx, const void * restrict vy); void ggml_vec_dot_q6_K_q8_K(int n, float * restrict s, const void * restrict vx, const void * restrict vy); +void ggml_vec_dot_iq2_xxs_q8_K(int n, float * restrict s, const void * restrict vx, const void * restrict vy); diff --git a/ggml.c b/ggml.c index 62f0f18ef..adb387100 100644 --- a/ggml.c +++ b/ggml.c @@ -573,6 +573,17 @@ static const ggml_type_traits_t type_traits[GGML_TYPE_COUNT] = { .vec_dot = ggml_vec_dot_q6_K_q8_K, .vec_dot_type = GGML_TYPE_Q8_K, }, + [GGML_TYPE_IQ2_XXS] = { + .type_name = "iq2_xxs", + .blck_size = QK_K, + .type_size = sizeof(block_iq2_xxs), + .is_quantized = true, + .to_float = (ggml_to_float_t) dequantize_row_iq2_xxs, + .from_float = quantize_row_iq2_xxs, + .from_float_reference = (ggml_from_float_t) quantize_row_iq2_xxs_reference, + .vec_dot = ggml_vec_dot_iq2_xxs_q8_K, + .vec_dot_type = GGML_TYPE_Q8_K, + }, [GGML_TYPE_Q8_K] = { .type_name = "q8_K", .blck_size = QK_K, @@ -2111,6 +2122,7 @@ enum ggml_type ggml_ftype_to_ggml_type(enum ggml_ftype ftype) { case GGML_FTYPE_MOSTLY_Q4_K: wtype = GGML_TYPE_Q4_K; break; case GGML_FTYPE_MOSTLY_Q5_K: wtype = GGML_TYPE_Q5_K; break; case GGML_FTYPE_MOSTLY_Q6_K: wtype = GGML_TYPE_Q6_K; break; + case GGML_FTYPE_MOSTLY_IQ2_XXS: wtype = GGML_TYPE_IQ2_XXS; break; case GGML_FTYPE_UNKNOWN: wtype = GGML_TYPE_COUNT; break; case GGML_FTYPE_MOSTLY_Q4_1_SOME_F16: wtype = GGML_TYPE_COUNT; break; } @@ -7436,6 +7448,7 @@ static void ggml_compute_forward_add( case GGML_TYPE_Q4_K: case GGML_TYPE_Q5_K: case GGML_TYPE_Q6_K: + case GGML_TYPE_IQ2_XXS: { ggml_compute_forward_add_q_f32(params, src0, src1, dst); } break; @@ -7700,6 +7713,7 @@ static void ggml_compute_forward_add1( case GGML_TYPE_Q4_K: case GGML_TYPE_Q5_K: case GGML_TYPE_Q6_K: + case GGML_TYPE_IQ2_XXS: { ggml_compute_forward_add1_q_f32(params, src0, src1, dst); } break; @@ -7814,6 +7828,7 @@ static void ggml_compute_forward_acc( case GGML_TYPE_Q4_K: case GGML_TYPE_Q5_K: case GGML_TYPE_Q6_K: + case GGML_TYPE_IQ2_XXS: default: { GGML_ASSERT(false); @@ -10455,6 +10470,7 @@ static void ggml_compute_forward_out_prod( case GGML_TYPE_Q4_K: case GGML_TYPE_Q5_K: case GGML_TYPE_Q6_K: + case GGML_TYPE_IQ2_XXS: { ggml_compute_forward_out_prod_q_f32(params, src0, src1, dst); } break; @@ -10629,6 +10645,7 @@ static void ggml_compute_forward_set( case GGML_TYPE_Q4_K: case GGML_TYPE_Q5_K: case GGML_TYPE_Q6_K: + case GGML_TYPE_IQ2_XXS: default: { GGML_ASSERT(false); @@ -10823,6 +10840,7 @@ static void ggml_compute_forward_get_rows( case GGML_TYPE_Q4_K: case GGML_TYPE_Q5_K: case GGML_TYPE_Q6_K: + case GGML_TYPE_IQ2_XXS: { ggml_compute_forward_get_rows_q(params, src0, src1, dst); } break; @@ -11459,6 +11477,7 @@ static void ggml_compute_forward_alibi( case GGML_TYPE_Q4_K: case GGML_TYPE_Q5_K: case GGML_TYPE_Q6_K: + case GGML_TYPE_IQ2_XXS: case GGML_TYPE_Q8_K: case GGML_TYPE_I8: case GGML_TYPE_I16: @@ -11533,6 +11552,7 @@ static void ggml_compute_forward_clamp( case GGML_TYPE_Q4_K: case GGML_TYPE_Q5_K: case GGML_TYPE_Q6_K: + case GGML_TYPE_IQ2_XXS: case GGML_TYPE_Q8_K: case GGML_TYPE_I8: case GGML_TYPE_I16: @@ -18648,6 +18668,12 @@ size_t ggml_quantize_chunk(enum ggml_type type, const float * src, void * dst, i block_q6_K * block = (block_q6_K*)dst + start / QK_K; result = ggml_quantize_q6_K(src + start, block, n, n, hist); } break; + case GGML_TYPE_IQ2_XXS: + { + GGML_ASSERT(start % QK_K == 0); + block_iq2_xxs * block = (block_iq2_xxs*)dst + start / QK_K; + result = ggml_quantize_iq2_xxs(src + start, block, n, n, hist); + } break; case GGML_TYPE_F16: { int elemsize = sizeof(ggml_fp16_t); diff --git a/ggml.h b/ggml.h index 64f4e45e8..c55e598b4 100644 --- a/ggml.h +++ b/ggml.h @@ -339,6 +339,7 @@ extern "C" { GGML_TYPE_Q5_K = 13, GGML_TYPE_Q6_K = 14, GGML_TYPE_Q8_K = 15, + GGML_TYPE_IQ2_XXS = 16, GGML_TYPE_I8, GGML_TYPE_I16, GGML_TYPE_I32, @@ -373,6 +374,7 @@ extern "C" { GGML_FTYPE_MOSTLY_Q4_K = 12, // except 1d tensors GGML_FTYPE_MOSTLY_Q5_K = 13, // except 1d tensors GGML_FTYPE_MOSTLY_Q6_K = 14, // except 1d tensors + GGML_FTYPE_MOSTLY_IQ2_XXS = 15, // except 1d tensors }; // available tensor operations: @@ -2067,6 +2069,7 @@ extern "C" { GGML_API size_t ggml_quantize_q4_K(const float * src, void * dst, int n, int k, int64_t * hist); GGML_API size_t ggml_quantize_q5_K(const float * src, void * dst, int n, int k, int64_t * hist); GGML_API size_t ggml_quantize_q6_K(const float * src, void * dst, int n, int k, int64_t * hist); + GGML_API size_t ggml_quantize_iq2_xxs(const float * src, void * dst, int n, int k, int64_t * hist); GGML_API size_t ggml_quantize_chunk(enum ggml_type type, const float * src, void * dst, int start, int n, int64_t * hist); diff --git a/llama.cpp b/llama.cpp index 63853d1c3..8e0717db9 100644 --- a/llama.cpp +++ b/llama.cpp @@ -2222,6 +2222,7 @@ struct llama_model_loader { case GGML_TYPE_Q4_K: ftype = LLAMA_FTYPE_MOSTLY_Q4_K_M; break; case GGML_TYPE_Q5_K: ftype = LLAMA_FTYPE_MOSTLY_Q5_K_M; break; case GGML_TYPE_Q6_K: ftype = LLAMA_FTYPE_MOSTLY_Q6_K; break; + case GGML_TYPE_IQ2_XXS: ftype = LLAMA_FTYPE_MOSTLY_IQ2_XXS; break; default: { LLAMA_LOG_WARN("%s: unknown type %s\n", __func__, ggml_type_name(type_max)); @@ -2593,6 +2594,7 @@ static std::string llama_model_ftype_name(llama_ftype ftype) { case LLAMA_FTYPE_MOSTLY_Q5_K_S: return "Q5_K - Small"; case LLAMA_FTYPE_MOSTLY_Q5_K_M: return "Q5_K - Medium"; case LLAMA_FTYPE_MOSTLY_Q6_K: return "Q6_K"; + case LLAMA_FTYPE_MOSTLY_IQ2_XXS:return "IQ2_XSS - 2.0625 bpw"; default: return "unknown, may not work"; } @@ -9038,6 +9040,7 @@ static void llama_model_quantize_internal(const std::string & fname_inp, const s case LLAMA_FTYPE_MOSTLY_Q5_K_S: case LLAMA_FTYPE_MOSTLY_Q5_K_M: quantized_type = GGML_TYPE_Q5_K; break; case LLAMA_FTYPE_MOSTLY_Q6_K: quantized_type = GGML_TYPE_Q6_K; break; + case LLAMA_FTYPE_MOSTLY_IQ2_XXS:quantized_type = GGML_TYPE_IQ2_XXS; break; default: throw std::runtime_error(format("invalid output file type %d\n", ftype)); } diff --git a/llama.h b/llama.h index 869ff0acf..c11075bbc 100644 --- a/llama.h +++ b/llama.h @@ -103,6 +103,7 @@ extern "C" { LLAMA_FTYPE_MOSTLY_Q5_K_S = 16, // except 1d tensors LLAMA_FTYPE_MOSTLY_Q5_K_M = 17, // except 1d tensors LLAMA_FTYPE_MOSTLY_Q6_K = 18, // except 1d tensors + LLAMA_FTYPE_MOSTLY_IQ2_XXS = 19, // except 1d tensors LLAMA_FTYPE_GUESSED = 1024, // not specified in the model file }; diff --git a/tests/test-quantize-fns.cpp b/tests/test-quantize-fns.cpp index a2459a286..cee712618 100644 --- a/tests/test-quantize-fns.cpp +++ b/tests/test-quantize-fns.cpp @@ -134,6 +134,11 @@ int main(int argc, char * argv[]) { continue; } + if ((ggml_type)i == GGML_TYPE_IQ2_XXS) { + printf("Skip %s due to missing quantization functionality\n", ggml_type_name((ggml_type) i)); + continue; + } + printf("Testing %s\n", ggml_type_name((ggml_type) i)); if (qfns.from_float && qfns.to_float) { From a9a8c5de3d2028701c239d821b220214fcaefbf1 Mon Sep 17 00:00:00 2001 From: Georgi Gerganov Date: Mon, 8 Jan 2024 20:25:17 +0200 Subject: [PATCH 238/811] readme : add link to SOTA models --- README.md | 1 + 1 file changed, 1 insertion(+) diff --git a/README.md b/README.md index 2f6e6ffee..a0d86a6ef 100644 --- a/README.md +++ b/README.md @@ -10,6 +10,7 @@ Inference of [LLaMA](https://arxiv.org/abs/2302.13971) model in pure C/C++ ### Hot topics +- New SOTA quantized models, including pure 2-bits: https://huggingface.co/ikawrakow - Collecting Apple Silicon performance stats: - M-series: https://github.com/ggerganov/llama.cpp/discussions/4167 - A-series: https://github.com/ggerganov/llama.cpp/discussions/4508 From 1fc2f265ff9377a37fd2c61eae9cd813a3491bea Mon Sep 17 00:00:00 2001 From: howlger Date: Mon, 8 Jan 2024 20:05:53 +0100 Subject: [PATCH 239/811] common : fix the short form of `--grp-attn-w`, not `-gat` (#4825) See https://github.com/ggerganov/llama.cpp/blob/master/common/common.cpp#L230C53-L230C57 --- common/common.cpp | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/common/common.cpp b/common/common.cpp index 6b4913a65..4e89fe516 100644 --- a/common/common.cpp +++ b/common/common.cpp @@ -920,7 +920,7 @@ void gpt_print_usage(int /*argc*/, char ** argv, const gpt_params & params) { #endif printf(" -gan N, --grp-attn-n N\n"); printf(" group-attention factor (default: %d)\n", params.grp_attn_n); - printf(" -gat N, --grp-attn-w N\n"); + printf(" -gaw N, --grp-attn-w N\n"); printf(" group-attention width (default: %.1f)\n", (double)params.grp_attn_w); printf(" --verbose-prompt print prompt before generation\n"); printf(" -dkvc, --dump-kv-cache\n"); From 8f900abfc09851e281bc9027e0ab2f16bf079b29 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Johannes=20G=C3=A4=C3=9Fler?= Date: Tue, 9 Jan 2024 08:58:55 +0100 Subject: [PATCH 240/811] CUDA: faster softmax via shared memory + fp16 math (#4742) --- ggml-cuda.cu | 333 ++++++++++++++++++++++++++++++++++--- tests/test-backend-ops.cpp | 17 +- 2 files changed, 321 insertions(+), 29 deletions(-) diff --git a/ggml-cuda.cu b/ggml-cuda.cu index e0ea890b1..e26260a35 100644 --- a/ggml-cuda.cu +++ b/ggml-cuda.cu @@ -116,6 +116,7 @@ #include "ggml.h" #include "ggml-backend-impl.h" +#define CC_PASCAL 600 #define MIN_CC_DP4A 610 // minimum compute capability for __dp4a, an intrinsic for byte-wise dot products #define CC_VOLTA 700 #define CC_OFFSET_AMD 1000000 @@ -556,11 +557,12 @@ static float g_tensor_split[GGML_CUDA_MAX_DEVICES] = {0}; struct cuda_device_capabilities { int cc; // compute capability + size_t smpb; // max. shared memory per block bool vmm; // virtual memory support size_t vmm_granularity; // granularity of virtual memory }; -static cuda_device_capabilities g_device_caps[GGML_CUDA_MAX_DEVICES] = { {0, false, 0} }; +static cuda_device_capabilities g_device_caps[GGML_CUDA_MAX_DEVICES] = { {0, 0, false, 0} }; static void * g_scratch_buffer = nullptr; static size_t g_scratch_size = 0; // disabled by default @@ -593,6 +595,19 @@ static __device__ __forceinline__ float2 warp_reduce_sum(float2 a) { return a; } +static __device__ __forceinline__ half2 warp_reduce_sum(half2 a) { +#if __CUDA_ARCH__ < CC_PASCAL || (defined(GGML_USE_HIPBLAS) && defined(__HIP_PLATFORM_AMD__)) + (void) a; + bad_arch(); +#else +#pragma unroll + for (int mask = 16; mask > 0; mask >>= 1) { + a = __hadd2(a, __shfl_xor_sync(0xffffffff, a, mask, 32)); + } + return a; +#endif // __CUDA_ARCH__ < CC_PASCAL || (defined(GGML_USE_HIPBLAS) && defined(__HIP_PLATFORM_AMD__)) +} + static __device__ __forceinline__ float warp_reduce_max(float x) { #pragma unroll for (int mask = 16; mask > 0; mask >>= 1) { @@ -601,6 +616,19 @@ static __device__ __forceinline__ float warp_reduce_max(float x) { return x; } +static __device__ __forceinline__ half2 warp_reduce_max(half2 x) { +#if __CUDA_ARCH__ < CC_PASCAL || (defined(GGML_USE_HIPBLAS) && defined(__HIP_PLATFORM_AMD__)) + (void) x; + bad_arch(); +#else +#pragma unroll + for (int mask = 16; mask > 0; mask >>= 1) { + x = __hmax2(x, __shfl_xor_sync(0xffffffff, x, mask, 32)); + } + return x; +#endif // __CUDA_ARCH__ < CC_PASCAL || (defined(GGML_USE_HIPBLAS) && defined(__HIP_PLATFORM_AMD__)) +} + static __device__ __forceinline__ float op_repeat(const float a, const float b) { return b; GGML_UNUSED(a); @@ -5385,75 +5413,233 @@ static __global__ void diag_mask_inf_f32(const float * x, float * dst, const int dst[i] = x[i] - (col > n_past + row % rows_per_channel) * FLT_MAX; } -static __global__ void soft_max_f32(const float * x, const float * y, float * dst, const int ncols, const int nrows_y, const float scale) { +template +static __global__ void soft_max_f16(const float * x, const float * y, float * dst, const int ncols_par, const int nrows_y, const float scale) { +#if !(defined(GGML_USE_HIPBLAS) && defined(__HIP_PLATFORM_AMD__)) && __CUDA_ARCH__ >= CC_PASCAL + const int ncols_data = ncols_template == 0 ? ncols_par : ncols_template; + const int ncols_smem = GGML_PAD(ncols_data, 2*WARP_SIZE)/2; + const int tid = threadIdx.x; const int rowx = blockIdx.x; const int rowy = rowx % nrows_y; // broadcast the mask (y) in the row dimension - const int block_size = blockDim.x; + const int block_size = block_size_template == 0 ? blockDim.x : block_size_template; const int warp_id = threadIdx.x / WARP_SIZE; const int lane_id = threadIdx.x % WARP_SIZE; - __shared__ float buf[CUDA_SOFT_MAX_BLOCK_SIZE/WARP_SIZE]; + extern __shared__ half data_soft_max_f16[]; + half * buf_iw = data_soft_max_f16 + 0; // shared memory buffer for inter-warp communication + // (shared memory) buffer to cache values between iterations: + half2 * vals = vals_smem ? (half2 *) (buf_iw + WARP_SIZE) : (half2 *) (dst + rowx*ncols_data); + // if the buffer is larger than max. shared memory per block, use dst as temp. buffer instead + // in that case col_smem == col_data must be enforced to avoid race conditions - float max_val = -INFINITY; + half2 max_val = make_half2(-INFINITY, -INFINITY); - for (int col = tid; col < ncols; col += block_size) { - const int ix = rowx*ncols + col; - const int iy = rowy*ncols + col; - max_val = max(max_val, x[ix]*scale + (y ? y[iy] : 0.0f)); +#pragma unroll + for (int col0 = 0; col0 < ncols_smem; col0 += block_size) { + const int col_data = 2*col0 + 2*WARP_SIZE*warp_id + lane_id; + const int col_smem = vals_smem ? col0 + tid : col_data; + + const int ix = rowx*ncols_data + col_data; + const int iy = rowy*ncols_data + col_data; + + half2 val; + if (need_check && col_data + 0 >= ncols_data) { + val.x = -INFINITY; + } else { + val.x = x[ix + 0]*scale + (y ? y[iy + 0] : 0.0f); + } + if (need_check && col_data + WARP_SIZE >= ncols_data) { + val.y = -INFINITY; + } else { + val.y = x[ix + WARP_SIZE]*scale + (y ? y[iy + WARP_SIZE] : 0.0f); + } + if (!need_check || col_smem < (vals_smem ? ncols_smem : ncols_data)) { + vals[col_smem] = val; + } + max_val = __hmax2(max_val, val); } // find the max value in the block max_val = warp_reduce_max(max_val); if (block_size > WARP_SIZE) { if (warp_id == 0) { - buf[lane_id] = -INFINITY; + buf_iw[lane_id] = -INFINITY; } __syncthreads(); if (lane_id == 0) { - buf[warp_id] = max_val; + buf_iw[warp_id] = __hmax(max_val.x, max_val.y); } __syncthreads(); - max_val = buf[lane_id]; + max_val = __half2half2(buf_iw[lane_id]); max_val = warp_reduce_max(max_val); + } else { + max_val = __half2half2(__hmax(max_val.x, max_val.y)); } - float tmp = 0.f; + half2 tmp = make_half2(0.0f, 0.0f); // partial sums + +#pragma unroll + for (int col0 = 0; col0 < ncols_smem; col0 += block_size) { + const int col_smem = vals_smem ? col0 + tid : 2*col0 + 2*warp_id*WARP_SIZE + lane_id; + + if (ncols_template == 0 && col_smem >= (vals_smem ? ncols_smem : ncols_data)) { + break; + } + + const half2 val = h2exp(vals[col_smem] - max_val); - for (int col = tid; col < ncols; col += block_size) { - const int ix = rowx*ncols + col; - const int iy = rowy*ncols + col; - const float val = expf((x[ix]*scale + (y ? y[iy] : 0.0f)) - max_val); tmp += val; - dst[ix] = val; + vals[col_smem] = val; } // find the sum of exps in the block tmp = warp_reduce_sum(tmp); if (block_size > WARP_SIZE) { if (warp_id == 0) { - buf[lane_id] = 0.f; + buf_iw[lane_id] = 0.0f; } __syncthreads(); if (lane_id == 0) { - buf[warp_id] = tmp; + buf_iw[warp_id] = tmp.x + tmp.y; } __syncthreads(); - tmp = buf[lane_id]; + tmp = __half2half2(buf_iw[lane_id]); + tmp = warp_reduce_sum(tmp); + } else { + tmp = __half2half2(tmp.x + tmp.y); + } + + const half2 inv_sum = make_half2(1.0f, 1.0f) / tmp; + +#pragma unroll + for (int col0 = 0; col0 < ncols_smem; col0 += block_size) { + const int col_data = 2*col0 + 2*WARP_SIZE*warp_id + lane_id; + const int col_smem = vals_smem ? col0 + tid : col_data; + + const int idst = rowx*ncols_data + col_data; + const half2 result = vals[col_smem] * inv_sum; + + if (need_check && col_data + 0 >= ncols_data) { + return; + } + dst[idst] = result.x; + + if (need_check && col_data + WARP_SIZE >= ncols_data) { + return; + } + + dst[idst + WARP_SIZE] = result.y; + } +#else + (void) x; (void) y; (void) dst; (void) ncols_par; (void) nrows_y; (void) scale; + bad_arch(); +#endif // !(defined(GGML_USE_HIPBLAS) && defined(__HIP_PLATFORM_AMD__)) && __CUDA_ARCH__ >= CC_PASCAL +} + +template +static __global__ void soft_max_f32(const float * x, const float * y, float * dst, const int ncols_par, const int nrows_y, const float scale) { + const int ncols = ncols_template == 0 ? ncols_par : ncols_template; + + const int tid = threadIdx.x; + const int rowx = blockIdx.x; + const int rowy = rowx % nrows_y; // broadcast the mask (y) in the row dimension + + const int block_size = block_size_template == 0 ? blockDim.x : block_size_template; + + const int warp_id = threadIdx.x / WARP_SIZE; + const int lane_id = threadIdx.x % WARP_SIZE; + + extern __shared__ float data_soft_max_f32[]; + float * buf_iw = data_soft_max_f32; // shared memory buffer for inter-warp communication + // shared memory buffer to cache values between iterations: + float * vals = vals_smem ? buf_iw + WARP_SIZE : dst + rowx*ncols; + + float max_val = -INFINITY; + +#pragma unroll + for (int col0 = 0; col0 < ncols; col0 += block_size) { + const int col = col0 + tid; + + if (ncols_template == 0 && col >= ncols) { + break; + } + + const int ix = rowx*ncols + col; + const int iy = rowy*ncols + col; + + const float val = x[ix]*scale + (y ? y[iy] : 0.0f); + vals[col] = val; + max_val = max(max_val, val); + } + + // find the max value in the block + max_val = warp_reduce_max(max_val); + if (block_size > WARP_SIZE) { + if (warp_id == 0) { + buf_iw[lane_id] = -INFINITY; + } + __syncthreads(); + + if (lane_id == 0) { + buf_iw[warp_id] = max_val; + } + __syncthreads(); + + max_val = buf_iw[lane_id]; + max_val = warp_reduce_max(max_val); + } + + float tmp = 0.0f; // partial sum + +#pragma unroll + for (int col0 = 0; col0 < ncols; col0 += block_size) { + const int col = col0 + tid; + + if (ncols_template == 0 && col >= ncols) { + break; + } + + const float val = expf(vals[col] - max_val); + tmp += val; + vals[col] = val; + } + + // find the sum of exps in the block + tmp = warp_reduce_sum(tmp); + if (block_size > WARP_SIZE) { + if (warp_id == 0) { + buf_iw[lane_id] = 0.0f; + } + __syncthreads(); + + if (lane_id == 0) { + buf_iw[warp_id] = tmp; + } + __syncthreads(); + + tmp = buf_iw[lane_id]; tmp = warp_reduce_sum(tmp); } - const float inv_tmp = 1.f / tmp; + const float inv_sum = 1.0f / tmp; - for (int col = tid; col < ncols; col += block_size) { - const int i = rowx*ncols + col; - dst[i] *= inv_tmp; +#pragma unroll + for (int col0 = 0; col0 < ncols; col0 += block_size) { + const int col = col0 + tid; + + if (ncols_template == 0 && col >= ncols) { + return; + } + + const int idst = rowx*ncols + col; + dst[idst] = vals[col] * inv_sum; } } @@ -6752,12 +6938,90 @@ static void diag_mask_inf_f32_cuda(const float * x, float * dst, const int ncols diag_mask_inf_f32<<>>(x, dst, ncols_x, rows_per_channel, n_past); } +static void soft_max_f16_cuda(const float * x, const float * y, float * dst, const int ncols_x, const int nrows_x, const int nrows_y, const float scale, cudaStream_t stream) { + int nth = WARP_SIZE; + while (nth < ncols_x/2 && nth < CUDA_SOFT_MAX_BLOCK_SIZE) nth *= 2; + const dim3 block_dims(nth, 1, 1); + const dim3 block_nums(nrows_x, 1, 1); + const size_t shmem = (GGML_PAD(ncols_x, 2*WARP_SIZE) + WARP_SIZE)*sizeof(half); + static_assert(CUDA_SOFT_MAX_BLOCK_SIZE == 1024, "These values need to be adjusted."); + if (shmem <= g_device_caps[g_main_device].smpb) { + switch (ncols_x) { + case 32: + soft_max_f16<<>>(x, y, dst, ncols_x, nrows_y, scale); + break; + case 64: + soft_max_f16<<>>(x, y, dst, ncols_x, nrows_y, scale); + break; + case 128: + soft_max_f16<<>>(x, y, dst, ncols_x, nrows_y, scale); + break; + case 256: + soft_max_f16<<>>(x, y, dst, ncols_x, nrows_y, scale); + break; + case 512: + soft_max_f16<<>>(x, y, dst, ncols_x, nrows_y, scale); + break; + case 1024: + soft_max_f16<<>>(x, y, dst, ncols_x, nrows_y, scale); + break; + case 2048: + soft_max_f16<<>>(x, y, dst, ncols_x, nrows_y, scale); + break; + case 4096: + soft_max_f16<<>>(x, y, dst, ncols_x, nrows_y, scale); + break; + default: + soft_max_f16<<>>(x, y, dst, ncols_x, nrows_y, scale); + break; + } + } else { + const size_t shmem_low = WARP_SIZE*sizeof(half); + soft_max_f16<<>>(x, y, dst, ncols_x, nrows_y, scale); + } +} + static void soft_max_f32_cuda(const float * x, const float * y, float * dst, const int ncols_x, const int nrows_x, const int nrows_y, const float scale, cudaStream_t stream) { int nth = WARP_SIZE; while (nth < ncols_x && nth < CUDA_SOFT_MAX_BLOCK_SIZE) nth *= 2; const dim3 block_dims(nth, 1, 1); const dim3 block_nums(nrows_x, 1, 1); - soft_max_f32<<>>(x, y, dst, ncols_x, nrows_y, scale); + const size_t shmem = (GGML_PAD(ncols_x, WARP_SIZE) + WARP_SIZE)*sizeof(float); + static_assert(CUDA_SOFT_MAX_BLOCK_SIZE == 1024, "These values need to be adjusted."); + if (shmem < g_device_caps[g_main_device].smpb) { + switch (ncols_x) { + case 32: + soft_max_f32<<>>(x, y, dst, ncols_x, nrows_y, scale); + break; + case 64: + soft_max_f32<<>>(x, y, dst, ncols_x, nrows_y, scale); + break; + case 128: + soft_max_f32<<>>(x, y, dst, ncols_x, nrows_y, scale); + break; + case 256: + soft_max_f32<<>>(x, y, dst, ncols_x, nrows_y, scale); + break; + case 512: + soft_max_f32<<>>(x, y, dst, ncols_x, nrows_y, scale); + break; + case 1024: + soft_max_f32<<>>(x, y, dst, ncols_x, nrows_y, scale); + break; + case 2048: + soft_max_f32<<>>(x, y, dst, ncols_x, nrows_y, scale); + break; + case 4096: + soft_max_f32<<>>(x, y, dst, ncols_x, nrows_y, scale); + break; + default: + soft_max_f32<<>>(x, y, dst, ncols_x, nrows_y, scale); + break; + } + } else { + const size_t shmem_low = WARP_SIZE*sizeof(float); + soft_max_f32<<>>(x, y, dst, ncols_x, nrows_y, scale); + } } static void im2col_f32_f16_cuda(const float* x, half* dst, @@ -7072,6 +7336,7 @@ void ggml_init_cublas() { #else g_device_caps[id].cc = 100*prop.major + 10*prop.minor; #endif // defined(GGML_USE_HIPBLAS) && defined(__HIP_PLATFORM_AMD__) + g_device_caps[id].smpb = prop.sharedMemPerBlock; } for (int id = 0; id < g_device_count; ++id) { g_tensor_split[id] /= total_vram; @@ -8087,7 +8352,21 @@ static void ggml_cuda_op_soft_max( float scale = 1.0f; memcpy(&scale, dst->op_params, sizeof(float)); - soft_max_f32_cuda(src0_dd, src1 ? src1_dd : nullptr, dst_dd, ne00, nrows_x, nrows_y, scale, main_stream); +#if defined(GGML_USE_HIPBLAS) && defined(__HIP_PLATFORM_AMD__) + const bool use_f16_soft_max = false; +#else +#ifdef GGML_CUDA_F16 + const bool use_f16_soft_max = true; +#else + const bool use_f16_soft_max = false; +#endif // GGML_CUDA_F16 +#endif // defined(GGML_USE_HIPBLAS) && defined(__HIP_PLATFORM_AMD__) + + if (use_f16_soft_max) { + soft_max_f16_cuda(src0_dd, src1 ? src1_dd : nullptr, dst_dd, ne00, nrows_x, nrows_y, scale, main_stream); + } else { + soft_max_f32_cuda(src0_dd, src1 ? src1_dd : nullptr, dst_dd, ne00, nrows_x, nrows_y, scale, main_stream); + } (void) dst; } diff --git a/tests/test-backend-ops.cpp b/tests/test-backend-ops.cpp index b79de7a7d..7a60d7743 100644 --- a/tests/test-backend-ops.cpp +++ b/tests/test-backend-ops.cpp @@ -450,7 +450,7 @@ struct test_case { double err = nmse(f1.data(), f2.data(), f1.size()); if (err > ud->max_err) { - printf("[%s] NMSE = %f ", ggml_op_desc(t1), err); + printf("[%s] NMSE = %.9f > %.9f ", ggml_op_desc(t1), err, ud->max_err); //for (int i = 0; i < (int) f1.size(); i++) { // printf("%5d %9.6f %9.6f, diff = %9.6f\n", i, f1[i], f2[i], f1[i] - f2[i]); //} @@ -1449,6 +1449,7 @@ struct test_moe : public test_case { static bool test_backend(ggml_backend_t backend, test_mode mode, const char * op_name) { std::vector> test_cases; + std::default_random_engine rng(0); const ggml_type all_types[] = { GGML_TYPE_F32, GGML_TYPE_F16, @@ -1583,7 +1584,19 @@ static bool test_backend(ggml_backend_t backend, test_mode mode, const char * op test_cases.emplace_back(new test_diag_mask_inf(GGML_TYPE_F32, {10, 10, 10, 1}, 5)); test_cases.emplace_back(new test_diag_mask_inf(GGML_TYPE_F32, {10, 10, 10, 10}, 5)); - test_cases.emplace_back(new test_soft_max()); + std::uniform_int_distribution<> dist_ne1(1, 50); + int exponent = 1; + while (exponent < (1 << 17)) { + std::uniform_int_distribution<> dist_ne0(exponent, 2*exponent); + + for (int n = 0; n < 10; ++n) { + int64_t ne0 = dist_ne0(rng); + int64_t ne1 = dist_ne1(rng); + test_cases.emplace_back(new test_soft_max(GGML_TYPE_F32, {ne0, ne1, 1, 1})); + } + + exponent <<= 1; + } for (ggml_type type : {GGML_TYPE_F32, GGML_TYPE_F16}) { test_cases.emplace_back(new test_rope(type, {128, 32, 10, 1}, 128, 0, 512)); // llama 7B From 18c2e1752c3b387689e9e73d7d8a1a3b1511ce23 Mon Sep 17 00:00:00 2001 From: Georgi Gerganov Date: Tue, 9 Jan 2024 10:42:06 +0200 Subject: [PATCH 241/811] ggml : fix vld1q_s8_x4 32-bit compat (#4828) * ggml : fix vld1q_s8_x4 32-bit compat ggml-ci * ggml : fix 32-bit ARM compat (cont) ggml-ci --- ggml-quants.c | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/ggml-quants.c b/ggml-quants.c index fd127f2d1..d497e6de9 100644 --- a/ggml-quants.c +++ b/ggml-quants.c @@ -7250,9 +7250,9 @@ void ggml_vec_dot_iq2_xxs_q8_K(const int n, float * restrict s, const void * res uint32_t aux32[4]; const uint8_t * aux8 = (const uint8_t *)aux32; - int8x16x4_t q2u; - int8x16x4_t q2s; - int8x16x4_t q8b; + ggml_int8x16x4_t q2u; + ggml_int8x16x4_t q2s; + ggml_int8x16x4_t q8b; float sumf = 0; for (int i = 0; i < nb; ++i) { @@ -7261,7 +7261,7 @@ void ggml_vec_dot_iq2_xxs_q8_K(const int n, float * restrict s, const void * res const int8_t * restrict q8 = y[i].qs; float sumf1 = 0, sumf2 = 0; for (int ib32 = 0; ib32 < QK_K/32; ib32 += 2) { - q8b = vld1q_s8_x4(q8); q8 += 64; + q8b = ggml_vld1q_s8_x4(q8); q8 += 64; memcpy(aux32, q2, 4*sizeof(uint32_t)); q2 += 8; q2u.val[0] = vcombine_s8(vld1_s8((const void *)(iq2xxs_grid + aux8[ 0])), vld1_s8((const void *)(iq2xxs_grid + aux8[ 1]))); q2u.val[1] = vcombine_s8(vld1_s8((const void *)(iq2xxs_grid + aux8[ 2])), vld1_s8((const void *)(iq2xxs_grid + aux8[ 3]))); From 8c5833031857c9e9ada61948bae894ab9c785f86 Mon Sep 17 00:00:00 2001 From: Zsapi Date: Tue, 9 Jan 2024 10:12:43 +0100 Subject: [PATCH 242/811] server : add api-key flag to documentation (#4832) Document the api-key flag added to server in https://github.com/ggerganov/llama.cpp/pull/4441 --- examples/server/README.md | 1 + 1 file changed, 1 insertion(+) diff --git a/examples/server/README.md b/examples/server/README.md index 243e66991..5d9829624 100644 --- a/examples/server/README.md +++ b/examples/server/README.md @@ -23,6 +23,7 @@ Command line options: - `--host`: Set the hostname or ip address to listen. Default `127.0.0.1`. - `--port`: Set the port to listen. Default: `8080`. - `--path`: path from which to serve static files (default examples/server/public) +- `--api-key`: Set an api key for request authorization. By default the server responds to every request. With an api key set, the requests must have the Authorization header set with the api key as Bearer token. - `--embedding`: Enable embedding extraction, Default: disabled. - `-np N`, `--parallel N`: Set the number of slots for process requests (default: 1) - `-cb`, `--cont-batching`: enable continuous batching (a.k.a dynamic batching) (default: disabled) From 128de3585b0f58b1e562733448fc00109f23a95d Mon Sep 17 00:00:00 2001 From: Behnam M <58621210+ibehnam@users.noreply.github.com> Date: Tue, 9 Jan 2024 05:02:05 -0500 Subject: [PATCH 243/811] server : update readme about token probs (#4777) * updated server readme to reflect the gg/server-token-probs-4088 commit added explanation for the API's completion result which now includes `completion_probabilities`. Also added a JSON schema that shows the type/structure of `completion_probabilities`. * simplified the `completion_probabilities` JSON schema It's now easier to understand what the structure of `completion_probabilities` looks like. * minor : fix trailing whitespace --------- Co-authored-by: Georgi Gerganov --- examples/server/README.md | 57 ++++++++++++++++++++++----------------- 1 file changed, 33 insertions(+), 24 deletions(-) diff --git a/examples/server/README.md b/examples/server/README.md index 5d9829624..d85a14f89 100644 --- a/examples/server/README.md +++ b/examples/server/README.md @@ -175,35 +175,44 @@ node index.js `system_prompt`: Change the system prompt (initial prompt of all slots), this is useful for chat applications. [See more](#change-system-prompt-on-runtime) - *Result JSON:* +### Result JSON: - Note: When using streaming mode (`stream`) only `content` and `stop` will be returned until end of completion. +* Note: When using streaming mode (`stream`) only `content` and `stop` will be returned until end of completion. - `content`: Completion result as a string (excluding `stopping_word` if any). In case of streaming mode, will contain the next token as a string. - `stop`: Boolean for use with `stream` to check whether the generation has stopped (Note: This is not related to stopping words array `stop` from input options) +- `completion_probabilities`: An array of token probabilities for each completion. The array's length is `n_predict`. Each item in the array has the following structure: - `generation_settings`: The provided options above excluding `prompt` but including `n_ctx`, `model` +``` +{ + "content": "", + "probs": [ + { + "prob": float, + "tok_str": "" + }, + { + "prob": float, + "tok_str": "" + }, + ... + ] +}, +``` +Notice that each `probs` is an array of length `n_probs`. - `model`: The path to the model loaded with `-m` - - `prompt`: The provided `prompt` - - `stopped_eos`: Indicating whether the completion has stopped because it encountered the EOS token - - `stopped_limit`: Indicating whether the completion stopped because `n_predict` tokens were generated before stop words or EOS was encountered - - `stopped_word`: Indicating whether the completion stopped due to encountering a stopping word from `stop` JSON array provided - - `stopping_word`: The stopping word encountered which stopped the generation (or "" if not stopped due to a stopping word) - - `timings`: Hash of timing information about the completion such as the number of tokens `predicted_per_second` - - `tokens_cached`: Number of tokens from the prompt which could be re-used from previous completion (`n_past`) - - `tokens_evaluated`: Number of tokens evaluated in total from the prompt - - `truncated`: Boolean indicating if the context size was exceeded during generation, i.e. the number of tokens provided in the prompt (`tokens_evaluated`) plus tokens generated (`tokens predicted`) exceeded the context size (`n_ctx`) +- `content`: Completion result as a string (excluding `stopping_word` if any). In case of streaming mode, will contain the next token as a string. +- `stop`: Boolean for use with `stream` to check whether the generation has stopped (Note: This is not related to stopping words array `stop` from input options) +- `generation_settings`: The provided options above excluding `prompt` but including `n_ctx`, `model` +- `model`: The path to the model loaded with `-m` +- `prompt`: The provided `prompt` +- `stopped_eos`: Indicating whether the completion has stopped because it encountered the EOS token +- `stopped_limit`: Indicating whether the completion stopped because `n_predict` tokens were generated before stop words or EOS was encountered +- `stopped_word`: Indicating whether the completion stopped due to encountering a stopping word from `stop` JSON array provided +- `stopping_word`: The stopping word encountered which stopped the generation (or "" if not stopped due to a stopping word) +- `timings`: Hash of timing information about the completion such as the number of tokens `predicted_per_second` +- `tokens_cached`: Number of tokens from the prompt which could be re-used from previous completion (`n_past`) +- `tokens_evaluated`: Number of tokens evaluated in total from the prompt +- `truncated`: Boolean indicating if the context size was exceeded during generation, i.e. the number of tokens provided in the prompt (`tokens_evaluated`) plus tokens generated (`tokens predicted`) exceeded the context size (`n_ctx`) - **POST** `/tokenize`: Tokenize a given text. From d9653894dffbfd3a58616f31b0967b34faf6f611 Mon Sep 17 00:00:00 2001 From: Georgi Gerganov Date: Tue, 9 Jan 2024 16:23:05 +0200 Subject: [PATCH 244/811] scripts : script to get Paul Graham essays in txt format (#4838) --- scripts/get-pg.sh | 47 +++++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 47 insertions(+) create mode 100755 scripts/get-pg.sh diff --git a/scripts/get-pg.sh b/scripts/get-pg.sh new file mode 100755 index 000000000..d516db46c --- /dev/null +++ b/scripts/get-pg.sh @@ -0,0 +1,47 @@ +#!/bin/bash + +function usage { + echo "usage: $0" + exit 1 +} + +function has_cmd { + if ! [ -x "$(command -v $1)" ]; then + echo "error: $1 is not available" >&2 + exit 1 + fi +} + +# check for: curl, html2text, tail, sed, fmt +has_cmd curl +has_cmd html2text +has_cmd tail +has_cmd sed + +if [ $# -ne 1 ]; then + usage +fi + +n=$1 + +# get urls +urls="$(curl http://www.aaronsw.com/2002/feeds/pgessays.rss | grep html | sed -e "s/.*http/http/" | sed -e "s/html.*/html/" | head -n $n)" + +printf "urls:\n%s\n" "$urls" + +if [ -f pg.txt ]; then + rm pg.txt +fi + +for url in $urls; do + echo "processing $url" + + curl -L $url | html2text | tail -n +4 | sed -E "s/^[[:space:]]+//g" | fmt -w 80 >> pg.txt + + # don't flood the server + sleep 1 +done + +echo "done. data in pg.txt" + +exit 0 From 18adb4e9bb340b7b4565d8b6715b4449283e7641 Mon Sep 17 00:00:00 2001 From: iohub Date: Wed, 10 Jan 2024 00:45:54 +0800 Subject: [PATCH 245/811] readme : add 3rd party collama reference to UI list (#4840) Add a VSCode extension for llama.cpp reference to UI list --- README.md | 1 + 1 file changed, 1 insertion(+) diff --git a/README.md b/README.md index a0d86a6ef..866aa87b4 100644 --- a/README.md +++ b/README.md @@ -137,6 +137,7 @@ as the main playground for developing new features for the [ggml](https://github - [semperai/amica](https://github.com/semperai/amica) - [psugihara/FreeChat](https://github.com/psugihara/FreeChat) - [ptsochantaris/emeltal](https://github.com/ptsochantaris/emeltal) +- [iohub/collama](https://github.com/iohub/coLLaMA) --- From 9a818f7c42761984ac99e08e613cc20634f8410e Mon Sep 17 00:00:00 2001 From: Georgi Gerganov Date: Tue, 9 Jan 2024 19:20:45 +0200 Subject: [PATCH 246/811] scripts : improve get-pg.sh (#4838) --- scripts/get-pg.sh | 25 ++++++++++++++++++++++++- 1 file changed, 24 insertions(+), 1 deletion(-) diff --git a/scripts/get-pg.sh b/scripts/get-pg.sh index d516db46c..b027793e1 100755 --- a/scripts/get-pg.sh +++ b/scripts/get-pg.sh @@ -2,6 +2,22 @@ function usage { echo "usage: $0" + echo "note: n is the number of essays to download" + echo "for specific n, the resulting pg.txt file will have the following number of tokens:" + echo "n | tokens" + echo "--- | ---" + echo "1 | 6230" + echo "2 | 23619" + echo "5 | 25859" + echo "10 | 36888" + echo "15 | 50188" + echo "20 | 59094" + echo "25 | 88764" + echo "30 | 103121" + echo "32 | 108338" + echo "35 | 113403" + echo "40 | 127699" + echo "45 | 135896" exit 1 } @@ -33,10 +49,17 @@ if [ -f pg.txt ]; then rm pg.txt fi +c=1 for url in $urls; do echo "processing $url" - curl -L $url | html2text | tail -n +4 | sed -E "s/^[[:space:]]+//g" | fmt -w 80 >> pg.txt + cc=$(printf "%03d" $c) + + curl -L $url | html2text | tail -n +4 | sed -E "s/^[[:space:]]+//g" | fmt -w 80 >> pg-$cc-one.txt + cat pg-$cc-one.txt >> pg.txt + + cp -v pg.txt pg-$cc-all.txt + c=$((c+1)) # don't flood the server sleep 1 From 4dccb38d9abab7f9f2d1f9a6977df4185d490132 Mon Sep 17 00:00:00 2001 From: Georgi Gerganov Date: Tue, 9 Jan 2024 19:37:08 +0200 Subject: [PATCH 247/811] metal : improve dequantize precision to match CPU (#4836) ggml-ci --- ggml-metal.metal | 16 ++++++++-------- 1 file changed, 8 insertions(+), 8 deletions(-) diff --git a/ggml-metal.metal b/ggml-metal.metal index 0cc535ac7..229efb8b6 100644 --- a/ggml-metal.metal +++ b/ggml-metal.metal @@ -3841,8 +3841,8 @@ void dequantize_q3_K(device const block_q3_K *xb, short il, thread type4x4 & reg uint16_t scale_2 = scales[il%8], scale_1 = scales[8 + il%4]; int16_t dl_int = (il/4)&1 ? (scale_2&kmask2) | ((scale_1&kmask1) << 2) : (scale_2&kmask2) | ((scale_1&kmask1) << 4); - half dl = il<8 ? d_all * (dl_int - 32.h) : d_all * (dl_int / 16.h - 32.h); - const half ml = 4.h * dl; + float dl = il<8 ? d_all * (dl_int - 32.f) : d_all * (dl_int / 16.f - 32.f); + const float ml = 4.f * dl; il = (il/2) & 3; const half coef = il>1 ? (il>2 ? 1/64.h : 1/16.h) : (il>0 ? 1/4.h : 1.h); @@ -3909,7 +3909,7 @@ void dequantize_q5_K(device const block_q5_K *xb, short il, thread type4x4 & reg uint8_t ul = 1 << (il/2); il = il & 3; const uchar2 sc = get_scale_min_k4_just2(is, il/2, xb->scales); - const float d = il < 2 ? xb->d : xb->d / 16.h; + const float d = il < 2 ? xb->d : xb->d / 16.f; const float min = xb->dmin; const float dl = d * sc[0]; const float ml = min * sc[1]; @@ -3942,17 +3942,17 @@ void dequantize_q6_K(device const block_q6_K *xb, short il, thread type4x4 & reg #if QK_K == 256 ql = ql + 64*(il/8) + 32*((il/2)&1) + 16*(il&1); qh = qh + 32*(il/8) + 16*(il&1); - half sc = scales[(il%2) + 2 * ((il/2))]; + float sc = scales[(il%2) + 2 * ((il/2))]; il = (il/2) & 3; #else ql = ql + 16 * (il&1); - half sc = scales[il]; + float sc = scales[il]; #endif const uint16_t kmask1 = il>1 ? (il>2 ? 192 : 48) : (il>0 ? 12 : 3); const uint16_t kmask2 = il>1 ? 0xF0 : 0x0F; - const half coef = il>1 ? 1.f/16.h : 1.h; - const half ml = d_all * sc * 32.h; - const half dl = d_all * sc * coef; + const float coef = il>1 ? 1.f/16.f : 1.f; + const float ml = d_all * sc * 32.f; + const float dl = d_all * sc * coef; for (int i = 0; i < 16; ++i) { const half q = il&1 ? ((ql[i] & kmask2) | ((qh[i] & kmask1) << 2)) : ((ql[i] & kmask2) | ((qh[i] & kmask1) << 4)); From 36e5a08b203542dca53cca4eaf172c5dc4bbc991 Mon Sep 17 00:00:00 2001 From: Justine Tunney Date: Tue, 9 Jan 2024 09:59:14 -0800 Subject: [PATCH 248/811] llava-cli : don't crash if --image flag is invalid (#4835) This change fixes an issue where supplying `--image missing-file` would result in a segfault due to a null pointer being dereferenced. This can result in distracting info being printed if robust crash analysis tools are being used. --- examples/llava/llava-cli.cpp | 3 +++ 1 file changed, 3 insertions(+) diff --git a/examples/llava/llava-cli.cpp b/examples/llava/llava-cli.cpp index 502b788b1..d94795fe3 100644 --- a/examples/llava/llava-cli.cpp +++ b/examples/llava/llava-cli.cpp @@ -243,6 +243,9 @@ int main(int argc, char ** argv) { } auto image_embed = load_image(ctx_llava, ¶ms); + if (!image_embed) { + return 1; + } // process the prompt process_prompt(ctx_llava, image_embed, ¶ms, params.prompt); From 6efb8eb30e7025b168f3fda3ff83b9b386428ad6 Mon Sep 17 00:00:00 2001 From: Austin <77757836+teleprint-me@users.noreply.github.com> Date: Tue, 9 Jan 2024 13:46:46 -0500 Subject: [PATCH 249/811] convert.py : fix vanilla LLaMA model conversion (#4818) * Update Imports and Add Notes for Future Reference - Updated import statements in `convert.py`. - Added import for `AutoTokenizer` from `transformers` module. - Added conditional import for `gguf` from the local directory. - Added comments and notes for future reference. Additional Notes: - Noted removal of a redundant `TypeAlias` import. - Noted the removal of a `gguf` debug statement. - Commented on the presence of `ARCH` and `NDArray` definitions. - Commented on cleaning up and refactoring data type definitions. * Refine Model Hyperparameters and Params Class - Updated type annotations to use `Optional` for clarity. - Improved method names and attribute consistency. - Removed unnecessary variables for better code readability. Additional Notes: - Highlighted the use of `Optional` for clearer intent. - Ensured backward and forward compatibility. * Restore BpeVocab and SentencePieceVocab classes - Restored the BpeVocab class for handling BPE tokenization. - Restored the SentencePieceVocab class for SentencePiece tokenization. These classes are essential for maintaining the original behavior of the codebase. * refactor: Standardize vocabulary handling with HfVocab - Replaced VocabLoader with HfVocab, aligning vocabulary handling across classes. - Updated initialization of HfVocab with local_files_only=True for AutoTokenizer. - Introduced optional parameter fname_added_tokens for flexible added token management. - Streamlined added token handling for clarity and conciseness. - Maintained special tokens and IDs, enhancing token management. - Simplified token processing methods for improved readability. - Added a placeholder for score computation with a default value of -1000.0. - Optimized newline token check for efficiency. - Updated __repr__ function for clarity in representation. - Adjusted type alias Vocab to include BpeVocab, SentencePieceVocab, and HfVocab. - Removed redundant code related to special token handling, reverse vocabulary mapping, and vocabulary file detection. This refactoring promotes a standardized and modular approach to vocabulary management, facilitating future integration with a VocabFactory and improving code maintainability and scalability. * refactor: Enhance readability, functionality, and code quality - Improved code formatting and readability for better maintainability. - Refactored LazyUnpickler's CLASSES dictionary for clarity. - Added print statements and warnings in check_vocab_size for user feedback. - Removed find_vocab_file_path, as it's superseded by VocabFactory. - Preparatory changes for upcoming classes: OutputFile and VocabFactory. - Overall focus on code quality, error handling, and consistency. These changes reflect a continuous effort to refine the codebase, ensuring it meets best practices and prepares for future enhancements, such as the VocabFactory. * refactor: Update OutputFile class for enhanced model vocabulary management - Restructured the constructor for improved readability. - Updated `add_meta_arch` method for flexible model name determination. - Introduced `handle_tokenizer_model` for mapping vocab types to supported tokenizer models. - Streamlined vocabulary extraction with `extract_vocabulary_from_model`. - Simplified vocabulary metadata addition using `add_meta_vocab`. - Refactored `add_tensor_info` for clarity and consistency. - Improved error handling for better user feedback. These changes signify the development of a versatile and comprehensive `OutputFile` class, enabling efficient management of model conversion output, metadata, vocabulary, and tensor information. * feat: Introduce VocabFactory for flexible vocabulary management in model conversion - The VocabFactory class is added to facilitate modular vocabulary handling. - The constructor initializes a directory path and detects vocabulary-related files. - The _select_file method provides file paths based on vocabulary type (e.g., BPE, SentencePiece). - _create_special_vocab generates special vocabularies, accommodating different types. - The load_vocab method loads vocabularies, handling BPE, SentencePiece, and Hugging Face Fast Tokenizer. - Error handling and logging enhance debugging and user feedback. - The modular and flexible design simplifies vocabulary management and supports future extensions. The VocabFactory class enhances code modularity and maintainability, allowing versatile vocabulary handling in the model conversion process. * refactor: Improve code organization, argument parsing, and user interface - Renamed 'default_outfile' to 'default_output_file' for clarity. - Refactored argument parser setup into 'get_argument_parser' function. - Introduced descriptive comments for each argument in the parser. - Added '--vocab-type' argument with choices ["spm", "bpe", "hfft"] for vocabulary processing. - Improved flag naming consistency: '--outfile' to '--out-file' and '--bigendian' to '--big-endian'. - Enhanced error handling to prevent overwriting input data in 'default_output_file'. - Made 'argv' in 'main' an optional parameter for flexibility. - Introduced dynamic import for 'awq.apply_awq' based on 'args.awq_path' for conditional dependency. These changes enhance code clarity, organization, and the user interface of the script, aligning it with Python best practices and improving maintainability. * refactor: Further refine functionality, improve user interaction, and streamline vocabulary handling - Renamed command-line arguments for clarity and consistency. - Improved path resolution and import adjustments for robustness. - Thoughtfully handled 'awq-path' and conditional logic for the weighted model. - Enhanced model and vocabulary loading with the 'VocabFactory' class for structured and adaptable loading. - Strengthened error handling and user feedback for a more user-friendly experience. - Structured output file handling with clear conditions and defaults. - Streamlined and organized the 'main' function for better logic flow. - Passed 'sys.argv[1:]' to 'main' for adaptability and testability. These changes solidify the script's functionality, making it more robust, user-friendly, and adaptable. The use of the 'VocabFactory' class is a notable enhancement in efficient vocabulary handling, reflecting a thoughtful and iterative approach to script development. * chore: Apply ruff formatting to convert.py Signed-off-by: teleprint-me <77757836+teleprint-me@users.noreply.github.com> * Revert to commit 0614c33 * chore: Apply flake8 formatting rules Signed-off-by: teleprint-me <77757836+teleprint-me@users.noreply.github.com> * refactor: Revise `check_vocab_size` for Enhanced Clarity and Correctness - Resolved an unreachable branch issue by reorganizing the conditional structure. - Moved the special case check for `params.n_vocab == -1` to the top for immediate assertion. - Flattened the conditional logic for improved clarity and predictability of the function's behavior. These changes enhance the readability and functional correctness of the `check_vocab_size` function without altering its intended functionality. * py : fix outfile and outtype * py : suggest hint for missing vocab size --------- Signed-off-by: teleprint-me <77757836+teleprint-me@users.noreply.github.com> Co-authored-by: Georgi Gerganov --- convert.py | 969 ++++++++++++++++++++++++++++++++++++----------------- 1 file changed, 666 insertions(+), 303 deletions(-) diff --git a/convert.py b/convert.py index c3f3fc0a1..3b613eefc 100755 --- a/convert.py +++ b/convert.py @@ -17,29 +17,58 @@ import signal import struct import sys import time +import warnings import zipfile from abc import ABCMeta, abstractmethod -from collections import OrderedDict +from argparse import ArgumentParser from concurrent.futures import ProcessPoolExecutor, ThreadPoolExecutor from dataclasses import dataclass from pathlib import Path -from typing import IO, TYPE_CHECKING, Any, Callable, Iterable, Literal, Optional, TypeVar, cast +from typing import ( + IO, + TYPE_CHECKING, + Any, + Callable, + Iterable, + Literal, + Optional, + Tuple, + TypeVar, +) import numpy as np from sentencepiece import SentencePieceProcessor -if 'NO_LOCAL_GGUF' not in os.environ: - sys.path.insert(1, str(Path(__file__).parent / 'gguf-py')) -import gguf +try: + from transformers import AutoTokenizer +except ModuleNotFoundError as e: + warnings.warn(f"Could not import AutoTokenizer from transformers: {e}") -if TYPE_CHECKING: - from typing import TypeAlias +# If NO_LOCAL_GGUF is not set, try to import gguf from the local gguf-py directory +if "NO_LOCAL_GGUF" not in os.environ: + # Use absolute path to the gguf-py directory + gguf_py_dir = str(Path(__file__).resolve().parent / "gguf-py") + print(gguf_py_dir) # NOTE: Remove this once path is verified after changes are completed + if gguf_py_dir not in sys.path: + sys.path.insert(1, gguf_py_dir) -if hasattr(faulthandler, 'register') and hasattr(signal, 'SIGUSR1'): +# Import gguf module +try: + import gguf +except ModuleNotFoundError as e: + print(f"Could not import gguf: {e}") + sys.exit(1) + +if TYPE_CHECKING: # NOTE: This isn't necessary. + from typing import TypeAlias # This can technically be omitted. + +if hasattr(faulthandler, "register") and hasattr(signal, "SIGUSR1"): faulthandler.register(signal.SIGUSR1) -NDArray: TypeAlias = 'np.ndarray[Any, Any]' +# NOTE: n-dimensional arrays should be directly referenced +NDArray: TypeAlias = "np.ndarray[Any, Any]" +# Why is this here? LLAMA and GPT are technically the only compatible ARCHs. ARCH = gguf.MODEL_ARCH.LLAMA DEFAULT_CONCURRENCY = 8 @@ -49,6 +78,7 @@ DEFAULT_CONCURRENCY = 8 # +# TODO: Clean up and refactor data types @dataclass(frozen=True) class DataType: name: str @@ -153,65 +183,85 @@ GGML_FILE_TYPE_TO_DATA_TYPE: dict[GGMLFileType, DataType] = { @dataclass class Params: - n_vocab: int - n_embd: int - n_layer: int - n_ctx: int - n_ff: int - n_head: int - n_head_kv: int - n_experts: int | None = None - n_experts_used: int | None = None - f_norm_eps: float | None = None + n_vocab: int + n_embd: int + n_layer: int + n_ctx: int + n_ff: int + n_head: int + n_head_kv: int + f_norm_eps: Optional[float] = None + n_experts: Optional[int] = None + n_experts_used: Optional[int] = None - rope_scaling_type: gguf.RopeScalingType | None = None - f_rope_freq_base: float | None = None - f_rope_scale: float | None = None - n_orig_ctx: int | None = None - rope_finetuned: bool | None = None + rope_scaling_type: Optional[gguf.RopeScalingType] = None + f_rope_freq_base: Optional[float] = None + f_rope_scale: Optional[float] = None + n_orig_ctx: Optional[int] = None + rope_finetuned: Optional[bool] = None - ftype: GGMLFileType | None = None + ftype: Optional[GGMLFileType] = None # path to the directory containing the model files - path_model: Path | None = None + path_model: Optional[Path] = None @staticmethod - def guessed(model: LazyModel) -> Params: + def guessed(model: LazyModel) -> "Params": # try transformer naming first - n_vocab, n_embd = model["model.embed_tokens.weight"].shape if "model.embed_tokens.weight" in model else model["tok_embeddings.weight"].shape + n_vocab, n_embd = ( + model["model.embed_tokens.weight"].shape + if "model.embed_tokens.weight" in model + else model["tok_embeddings.weight"].shape + ) # try transformer naming first if "model.layers.0.self_attn.q_proj.weight" in model: - n_layer = next(i for i in itertools.count() if f"model.layers.{i}.self_attn.q_proj.weight" not in model) - elif "model.layers.0.self_attn.W_pack.weight" in model: # next: try baichuan naming - n_layer = next(i for i in itertools.count() if f"model.layers.{i}.self_attn.W_pack.weight" not in model) + n_layer = next( + i + for i in itertools.count() + if f"model.layers.{i}.self_attn.q_proj.weight" not in model + ) + elif ( + "model.layers.0.self_attn.W_pack.weight" in model + ): # next: try baichuan naming + n_layer = next( + i + for i in itertools.count() + if f"model.layers.{i}.self_attn.W_pack.weight" not in model + ) else: - n_layer = next(i for i in itertools.count() if f"layers.{i}.attention.wq.weight" not in model) + n_layer = next( + i + for i in itertools.count() + if f"layers.{i}.attention.wq.weight" not in model + ) if n_layer < 1: - raise Exception("failed to guess 'n_layer'. This model is unknown or unsupported.\n" - "Suggestion: provide 'config.json' of the model in the same directory containing model files.") + raise Exception( + "failed to guess 'n_layer'. This model is unknown or unsupported.\n" + "Suggestion: provide 'config.json' of the model in the same directory containing model files." + ) - n_head = n_embd // 128 # guessed - n_mult = 256 # guessed + n_head = n_embd // 128 # guessed + n_mult = 256 # guessed # TODO: verify this n_ff = int(2 * (4 * n_embd) / 3) n_ff = n_mult * ((n_ff + n_mult - 1) // n_mult) return Params( - n_vocab = n_vocab, - n_embd = n_embd, - n_layer = n_layer, - n_ctx = -1, - n_ff = n_ff, - n_head = n_head, - n_head_kv = n_head, - f_norm_eps = 1e-5, + n_vocab=n_vocab, + n_embd=n_embd, + n_layer=n_layer, + n_ctx=-1, + n_ff=n_ff, + n_head=n_head, + n_head_kv=n_head, + f_norm_eps=1e-5, ) @staticmethod - def loadHFTransformerJson(model: LazyModel, config_path: Path) -> Params: + def load_transformers_config(model: LazyModel, config_path: Path) -> "Params": config = json.load(open(config_path)) rope_scaling_type = f_rope_scale = n_orig_ctx = rope_finetuned = None @@ -224,20 +274,22 @@ class Params: rope_scaling_type = gguf.RopeScalingType.LINEAR elif typ == "yarn": rope_scaling_type = gguf.RopeScalingType.YARN - n_orig_ctx = rope_scaling['original_max_position_embeddings'] - rope_finetuned = rope_scaling['finetuned'] + n_orig_ctx = rope_scaling["original_max_position_embeddings"] + rope_finetuned = rope_scaling["finetuned"] else: - raise NotImplementedError(f'Unknown rope scaling type: {typ}') + raise NotImplementedError(f"Unknown rope scaling type: {typ}") if "max_sequence_length" in config: n_ctx = config["max_sequence_length"] elif "max_position_embeddings" in config: n_ctx = config["max_position_embeddings"] else: - raise Exception("failed to guess 'n_ctx'. This model is unknown or unsupported.\n" - "Suggestion: provide 'config.json' of the model in the same directory containing model files.") + raise Exception( + "failed to guess 'n_ctx'. This model is unknown or unsupported.\n" + "Suggestion: provide 'config.json' of the model in the same directory containing model files." + ) - n_experts = None + n_experts = None n_experts_used = None if "num_local_experts" in config: @@ -245,30 +297,30 @@ class Params: n_experts_used = config["num_experts_per_tok"] return Params( - n_vocab = config["vocab_size"], - n_embd = config["hidden_size"], - n_layer = config["num_hidden_layers"], - n_ctx = n_ctx, - n_ff = config["intermediate_size"], - n_head = (n_head := config["num_attention_heads"]), - n_head_kv = config.get("num_key_value_heads", n_head), - n_experts = n_experts, - n_experts_used = n_experts_used, - f_norm_eps = config["rms_norm_eps"], - f_rope_freq_base = config.get("rope_theta"), - rope_scaling_type = rope_scaling_type, - f_rope_scale = f_rope_scale, - n_orig_ctx = n_orig_ctx, - rope_finetuned = rope_finetuned, + n_vocab=config["vocab_size"], + n_embd=config["hidden_size"], + n_layer=config["num_hidden_layers"], + n_ctx=n_ctx, + n_ff=config["intermediate_size"], + n_head=(n_head := config["num_attention_heads"]), + n_head_kv=config.get("num_key_value_heads", n_head), + n_experts=n_experts, + n_experts_used=n_experts_used, + f_norm_eps=config["rms_norm_eps"], + f_rope_freq_base=config.get("rope_theta"), + rope_scaling_type=rope_scaling_type, + f_rope_scale=f_rope_scale, + n_orig_ctx=n_orig_ctx, + rope_finetuned=rope_finetuned, ) # LLaMA v2 70B params.json # {"dim": 8192, "multiple_of": 4096, "ffn_dim_multiplier": 1.3, "n_heads": 64, "n_kv_heads": 8, "n_layers": 80, "norm_eps": 1e-05, "vocab_size": -1} @staticmethod - def loadOriginalParamsJson(model: LazyModel, config_path: Path) -> Params: + def load_torch_params(model: LazyModel, config_path: Path) -> "Params": config = json.load(open(config_path)) - n_experts = None + n_experts = None n_experts_used = None f_rope_freq_base = None @@ -291,129 +343,249 @@ class Params: if config.get("moe"): n_ff = model["layers.0.feed_forward.experts.0.w1.weight"].shape[0] - n_experts = config["moe"]["num_experts"] + n_experts = config["moe"]["num_experts"] n_experts_used = config["moe"]["num_experts_per_tok"] f_rope_freq_base = 1e6 return Params( - n_vocab = model["tok_embeddings.weight"].shape[0], - n_embd = config["dim"], - n_layer = config["n_layers"], - n_ctx = n_ctx, - n_ff = n_ff, - n_head = (n_head := config["n_heads"]), - n_head_kv = config.get("n_kv_heads", n_head), - n_experts = n_experts, - n_experts_used = n_experts_used, - f_norm_eps = config["norm_eps"], - f_rope_freq_base = config.get("rope_theta", f_rope_freq_base), + n_vocab=config.get("vocab_size", model["tok_embeddings.weight"].shape[0]), + n_embd=config["dim"], + n_layer=config["n_layers"], + n_ctx=n_ctx, + n_ff=n_ff, + n_head=(n_head := config["n_heads"]), + n_head_kv=config.get("n_kv_heads", n_head), + n_experts=n_experts, + n_experts_used=n_experts_used, + f_norm_eps=config["norm_eps"], + f_rope_freq_base=config.get("rope_theta", f_rope_freq_base), ) @staticmethod - def load(model_plus: ModelPlus) -> Params: - hf_config_path = model_plus.paths[0].parent / "config.json" + def load(model_plus: ModelPlus) -> "Params": + hf_config_path = model_plus.paths[0].parent / "config.json" orig_config_path = model_plus.paths[0].parent / "params.json" if hf_config_path.exists(): - params = Params.loadHFTransformerJson(model_plus.model, hf_config_path) + params = Params.load_transformers_config(model_plus.model, hf_config_path) elif orig_config_path.exists(): - params = Params.loadOriginalParamsJson(model_plus.model, orig_config_path) - elif model_plus.format != 'none': + params = Params.load_torch_params(model_plus.model, orig_config_path) + elif model_plus.format != "none": params = Params.guessed(model_plus.model) else: - raise ValueError('Cannot guess params when model format is none') + raise ValueError("Cannot guess params when model format is none") params.path_model = model_plus.paths[0].parent return params -class VocabLoader: - def __init__(self, params: Params, fname_tokenizer: Path) -> None: - try: - from transformers import AutoTokenizer - except ImportError as e: - raise ImportError( - "To use VocabLoader, please install the `transformers` package. " - "You can install it with `pip install transformers`." - ) from e +class BpeVocab: # GPT + def __init__( + self, fname_tokenizer: Path, fname_added_tokens: Optional[Path] + ) -> None: + self.bpe_tokenizer = json.loads( + open(str(fname_tokenizer), encoding="utf-8").read() + ) + added_tokens: dict[str, int] + if fname_added_tokens is not None: + # FIXME: Verify that added tokens here _cannot_ overlap with the main vocab. + added_tokens = json.load(open(fname_added_tokens, encoding="utf-8")) + else: + # Fall back to trying to find the added tokens in tokenizer.json + tokenizer_json_file = fname_tokenizer.parent / "tokenizer.json" + if not tokenizer_json_file.is_file(): + added_tokens = {} + else: + tokenizer_json = json.load(open(tokenizer_json_file, encoding="utf-8")) + added_tokens = dict( + (item["content"], item["id"]) + for item in tokenizer_json.get("added_tokens", []) + # Added tokens here can be duplicates of the main vocabulary. + if item["content"] not in self.bpe_tokenizer + ) - try: - self.tokenizer = AutoTokenizer.from_pretrained(str(fname_tokenizer), trust_remote_code=True) - except ValueError: - self.tokenizer = AutoTokenizer.from_pretrained(str(fname_tokenizer), use_fast=False, trust_remote_code=True) + vocab_size: int = len(self.bpe_tokenizer) + expected_ids = list(range(vocab_size, vocab_size + len(added_tokens))) + actual_ids = sorted(added_tokens.values()) + if expected_ids != actual_ids: + expected_end_id = vocab_size + len(actual_ids) - 1 + raise Exception( + f"Expected the {len(actual_ids)} added token ID(s) to be sequential in the range {vocab_size} - {expected_end_id}; got {actual_ids}" + ) - self.added_tokens_dict: OrderedDict[str, int] = OrderedDict() + items = sorted(added_tokens.items(), key=lambda text_idx: text_idx[1]) + self.added_tokens_list = [text for (text, idx) in items] + self.vocab_size_base: int = vocab_size + self.vocab_size: int = self.vocab_size_base + len(self.added_tokens_list) + self.fname_tokenizer = fname_tokenizer + self.fname_added_tokens = fname_added_tokens - for tok, tokidx in sorted(self.tokenizer.get_added_vocab().items(), key=lambda x: x[1]): - if tokidx >= params.n_vocab or tokidx < self.tokenizer.vocab_size: - continue + def bpe_tokens(self) -> Iterable[tuple[bytes, float, gguf.TokenType]]: + tokenizer = self.bpe_tokenizer + reverse_vocab = {id: encoded_tok for encoded_tok, id in tokenizer.items()} - self.added_tokens_dict[tok] = tokidx + for i, _ in enumerate(tokenizer): + yield reverse_vocab[i], 0.0, gguf.TokenType.NORMAL - self.unk_token_id: int = self.tokenizer.unk_token_id - self.specials: dict[str, int] = { + def added_tokens(self) -> Iterable[tuple[bytes, float, gguf.TokenType]]: + for text in self.added_tokens_list: + score = -1000.0 + yield text.encode("utf-8"), score, gguf.TokenType.CONTROL + + def all_tokens(self) -> Iterable[tuple[bytes, float, gguf.TokenType]]: + yield from self.bpe_tokens() + yield from self.added_tokens() + + def __repr__(self) -> str: + return f"" + + +class SentencePieceVocab: # LlaMa + def __init__( + self, fname_tokenizer: Path, fname_added_tokens: Optional[Path] + ) -> None: + self.sentencepiece_tokenizer = SentencePieceProcessor(str(fname_tokenizer)) + added_tokens: dict[str, int] + if fname_added_tokens is not None: + added_tokens = json.load(open(fname_added_tokens, encoding="utf-8")) + else: + added_tokens = {} + + vocab_size: int = self.sentencepiece_tokenizer.vocab_size() + + new_tokens = { + id: piece for piece, id in added_tokens.items() if id >= vocab_size + } + expected_new_ids = list(range(vocab_size, vocab_size + len(new_tokens))) + actual_new_ids = sorted(new_tokens.keys()) + + if expected_new_ids != actual_new_ids: + raise ValueError( + f"Expected new token IDs {expected_new_ids} to be sequential; got {actual_new_ids}" + ) + + # Token pieces that were added to the base vocabulary. + self.added_tokens_list = [new_tokens[id] for id in actual_new_ids] + self.vocab_size_base = vocab_size + self.vocab_size = self.vocab_size_base + len(self.added_tokens_list) + self.fname_tokenizer = fname_tokenizer + self.fname_added_tokens = fname_added_tokens + + def sentencepiece_tokens(self) -> Iterable[tuple[bytes, float, gguf.TokenType]]: + tokenizer = self.sentencepiece_tokenizer + for i in range(tokenizer.vocab_size()): + piece = tokenizer.id_to_piece(i) + text: bytes = piece.encode("utf-8") + score: float = tokenizer.get_score(i) + + toktype = gguf.TokenType.NORMAL + if tokenizer.is_unknown(i): + toktype = gguf.TokenType.UNKNOWN + if tokenizer.is_control(i): + toktype = gguf.TokenType.CONTROL + + # NOTE: I think added_tokens are user defined. + # ref: https://github.com/google/sentencepiece/blob/master/src/sentencepiece_model.proto + # if tokenizer.is_user_defined(i): toktype = gguf.TokenType.USER_DEFINED + + if tokenizer.is_unused(i): + toktype = gguf.TokenType.UNUSED + if tokenizer.is_byte(i): + toktype = gguf.TokenType.BYTE + + yield text, score, toktype + + def added_tokens(self) -> Iterable[tuple[bytes, float, gguf.TokenType]]: + for text in self.added_tokens_list: + score = -1000.0 + yield text.encode("utf-8"), score, gguf.TokenType.USER_DEFINED + + def all_tokens(self) -> Iterable[tuple[bytes, float, gguf.TokenType]]: + yield from self.sentencepiece_tokens() + yield from self.added_tokens() + + def __repr__(self) -> str: + return f"" + + +class HfVocab: + def __init__( + self, + fname_tokenizer: Path, + fname_added_tokens: Optional[Path] = None, + ) -> None: + print("fname_tokenizer:", fname_tokenizer) + # Allow the tokenizer to default to slow or fast versions. + # Explicitly set tokenizer to use local paths. + self.tokenizer = AutoTokenizer.from_pretrained( + fname_tokenizer, + cache_dir=fname_tokenizer, + local_files_only=True, + ) + + # Initialize lists and dictionaries for added tokens + self.added_tokens_list = [] + self.added_tokens_dict = dict() + self.added_tokens_ids = set() + + # Process added tokens + for tok, tokidx in sorted( + self.tokenizer.get_added_vocab().items(), key=lambda x: x[1] + ): + # Only consider added tokens that are not in the base vocabulary + if tokidx >= self.tokenizer.vocab_size: + self.added_tokens_list.append(tok) + self.added_tokens_dict[tok] = tokidx + self.added_tokens_ids.add(tokidx) + + # Store special tokens and their IDs + self.specials = { tok: self.tokenizer.get_vocab()[tok] for tok in self.tokenizer.all_special_tokens } - self.special_ids: set[int] = set(self.tokenizer.all_special_ids) - self.reverse_vocab = {id: encoded_tok for encoded_tok, id in self.tokenizer.get_vocab().items()} - self.vocab_size_base: int = self.tokenizer.vocab_size - self.vocab_size: int = self.vocab_size_base + len(self.added_tokens_dict) - self.fname_tokenizer: Path = fname_tokenizer + self.special_ids = set(self.tokenizer.all_special_ids) - vocab_file = "tokenizer.model" - path_candidate = find_vocab_file_path(self.fname_tokenizer, vocab_file) - if path_candidate is not None: - self.spm = SentencePieceProcessor(str(path_candidate)) - print(self.spm.vocab_size(), self.vocab_size_base) - else: - self.spm = None + # Set vocabulary sizes + self.vocab_size_base = self.tokenizer.vocab_size + self.vocab_size = self.vocab_size_base + len(self.added_tokens_list) - def hf_tokens(self) -> Iterable[tuple[bytes, float, gguf.TokenType]]: - added_tokens_ids = set(self.added_tokens_dict.values()) + self.fname_tokenizer = fname_tokenizer + self.fname_added_tokens = fname_added_tokens - for i in range(self.vocab_size_base): - if i in added_tokens_ids: + def hf_tokens(self) -> Iterable[Tuple[bytes, float, gguf.TokenType]]: + reverse_vocab = { + id: encoded_tok for encoded_tok, id in self.tokenizer.get_vocab().items() + } + + for token_id in range(self.vocab_size_base): + # Skip processing added tokens here + if token_id in self.added_tokens_ids: continue - text = self.reverse_vocab[i].encode("utf-8") - yield text, self.get_token_score(i), self.get_token_type(i) + # Convert token text to bytes + token_text = reverse_vocab[token_id].encode("utf-8") - def get_token_type(self, token_id: int) -> gguf.TokenType: - toktype = gguf.TokenType.NORMAL + # Yield token text, score, and type + yield token_text, self.get_token_score(token_id), self.get_token_type( + token_id, self.special_ids # Reuse already stored special IDs + ) - if self.spm is not None and token_id < self.spm.vocab_size(): - if self.spm.is_unknown(token_id): - toktype = gguf.TokenType.UNKNOWN - if self.spm.is_control(token_id): - toktype = gguf.TokenType.CONTROL - if self.spm.is_unused(token_id): - toktype = gguf.TokenType.UNUSED - if self.spm.is_byte(token_id): - toktype = gguf.TokenType.BYTE - else: - token = self.reverse_vocab[token_id] - if token_id == self.unk_token_id: - toktype = gguf.TokenType.UNKNOWN - elif token_id in self.special_ids: - toktype = gguf.TokenType.CONTROL - elif len(token) == 6 and token.startswith("<0x") and token.endswith(">"): - toktype = gguf.TokenType.BYTE - - return toktype + def get_token_type(self, token_id: int, special_ids: set) -> gguf.TokenType: + # Determine token type based on whether it's a special token + return ( + gguf.TokenType.CONTROL if token_id in special_ids else gguf.TokenType.NORMAL + ) def get_token_score(self, token_id: int) -> float: - if self.spm is not None and token_id < self.spm.vocab_size(): - return cast(float, self.spm.get_score(token_id)) - return 0.0 + # Placeholder for actual logic to determine the token's score + # This needs to be implemented based on specific requirements + return -1000.0 # Default score def added_tokens(self) -> Iterable[tuple[bytes, float, gguf.TokenType]]: - - for text in self.added_tokens_dict: + for text in self.added_tokens_list: if text in self.specials: - - toktype = self.get_token_type(self.specials[text]) + toktype = self.get_token_type(self.specials[text], self.special_ids) score = self.get_token_score(self.specials[text]) else: @@ -422,45 +594,18 @@ class VocabLoader: yield text.encode("utf-8"), score, toktype - def has_newline_token(self) -> bool: - return '<0x0A>' in self.tokenizer.vocab or '\n' in self.tokenizer.vocab + def has_newline_token(self): + return "<0x0A>" in self.tokenizer.vocab or "\n" in self.tokenizer.vocab def all_tokens(self) -> Iterable[tuple[bytes, float, gguf.TokenType]]: yield from self.hf_tokens() yield from self.added_tokens() - def get_vocab_type(self) -> str: - path_candidates = [] - vocab_file = "tokenizer.model" - path_candidates.append(vocab_file) - path_candidate = find_vocab_file_path(self.fname_tokenizer, vocab_file) - if path_candidate is not None: - return "llama" - - vocab_file = "vocab.json" - path_candidates.append(vocab_file) - path_candidate = find_vocab_file_path(self.fname_tokenizer, vocab_file) - if path_candidate is not None: - return "gpt2" - - vocab_file = "tokenizer.json" - path_candidates.append(vocab_file) - path_candidate = find_vocab_file_path(self.fname_tokenizer, vocab_file) - if path_candidate: - if not self.has_newline_token(): - return "gpt2" - return "llama" - - raise FileNotFoundError( - f"Could not find {path_candidates} in {self.fname_tokenizer} or its parent; " - "if it's in another directory, pass the directory as --vocab-dir" - ) - def __repr__(self) -> str: - return f"" + return f"" -Vocab: TypeAlias = 'VocabLoader' +Vocab: TypeAlias = "BpeVocab | SentencePieceVocab | HfVocab" # @@ -724,13 +869,17 @@ class LazyUnpickler(pickle.Unpickler): CLASSES: dict[tuple[str, str], Any] = { # getattr used here as a workaround for mypy not being smart enough to determine # the staticmethods have a __func__ attribute. - ('torch._tensor', '_rebuild_from_type_v2'): getattr(rebuild_from_type_v2, '__func__'), - ('torch._utils', '_rebuild_tensor_v2'): getattr(lazy_rebuild_tensor_v2, '__func__'), - ('torch', 'BFloat16Storage'): LazyStorageKind(DT_BF16), - ('torch', 'HalfStorage'): LazyStorageKind(DT_F16), - ('torch', 'FloatStorage'): LazyStorageKind(DT_F32), - ('torch', 'IntStorage'): LazyStorageKind(DT_I32), - ('torch', 'Tensor'): LazyTensor, + ("torch._tensor", "_rebuild_from_type_v2"): getattr( + rebuild_from_type_v2, "__func__" + ), + ("torch._utils", "_rebuild_tensor_v2"): getattr( + lazy_rebuild_tensor_v2, "__func__" + ), + ("torch", "BFloat16Storage"): LazyStorageKind(DT_BF16), + ("torch", "HalfStorage"): LazyStorageKind(DT_F16), + ("torch", "FloatStorage"): LazyStorageKind(DT_F32), + ("torch", "IntStorage"): LazyStorageKind(DT_I32), + ("torch", "Tensor"): LazyTensor, } def find_class(self, module: str, name: str) -> Any: @@ -839,32 +988,43 @@ def bounded_parallel_map(func: Callable[[In], Out], iterable: Iterable[In], conc def check_vocab_size(params: Params, vocab: Vocab, pad_vocab: bool = False) -> None: - if params.n_vocab != vocab.vocab_size: - if params.n_vocab == vocab.vocab_size: - print("Ignoring added_tokens.json since model matches vocab size without it.") - vocab.added_tokens_dict = OrderedDict() - vocab.vocab_size = vocab.vocab_size - return + # Handle special case where the model's vocab size is not set + if params.n_vocab == -1: + raise ValueError( + f"The model's vocab size is set to -1 in params.json. Please update it manually. Maybe {vocab.vocab_size}?" + ) - if pad_vocab and params.n_vocab > vocab.vocab_size: - pad_count = params.n_vocab - vocab.vocab_size - print(f'Padding vocab with {pad_count} token(s) - through ') - for i in range(1, (params.n_vocab - vocab.vocab_size) + 1): - vocab.added_tokens_dict[f''] = -1 - vocab.vocab_size = params.n_vocab - return - msg = f"Vocab size mismatch (model has {params.n_vocab}, but {vocab.fname_tokenizer}" - msg += f" has {vocab.vocab_size})." - if vocab.vocab_size < params.n_vocab < vocab.vocab_size + 20: - msg += f" Most likely you are missing added_tokens.json (should be in {vocab.fname_tokenizer.parent})." - if vocab.vocab_size < params.n_vocab: - msg += " Possibly try using the --padvocab option." - raise Exception(msg) + # Check for a vocab size mismatch + if params.n_vocab == vocab.vocab_size: + print("Ignoring added_tokens.json since model matches vocab size without it.") + return + + if pad_vocab and params.n_vocab > vocab.vocab_size: + pad_count = params.n_vocab - vocab.vocab_size + print( + f"Padding vocab with {pad_count} token(s) - through " + ) + for i in range(1, pad_count + 1): + vocab.added_tokens_dict[f""] = -1 + vocab.vocab_size = params.n_vocab + return + + msg = f"Vocab size mismatch (model has {params.n_vocab}, but {vocab.fname_tokenizer} has {vocab.vocab_size})." + if vocab.vocab_size < params.n_vocab < vocab.vocab_size + 20: + msg += f" Most likely you are missing added_tokens.json (should be in {vocab.fname_tokenizer.parent})." + if vocab.vocab_size < params.n_vocab: + msg += " Add the --pad-vocab option and try again." + + raise Exception(msg) class OutputFile: - def __init__(self, fname_out: Path, endianess:gguf.GGUFEndian = gguf.GGUFEndian.LITTLE) -> None: - self.gguf = gguf.GGUFWriter(fname_out, gguf.MODEL_ARCH_NAMES[ARCH], endianess=endianess) + def __init__( + self, fname_out: Path, endianess: gguf.GGUFEndian = gguf.GGUFEndian.LITTLE + ) -> None: + self.gguf = gguf.GGUFWriter( + fname_out, gguf.MODEL_ARCH_NAMES[ARCH], endianess=endianess + ) def add_meta_arch(self, params: Params) -> None: name = "LLaMA" @@ -873,16 +1033,21 @@ class OutputFile: if params.n_ctx == 4096: name = "LLaMA v2" elif params.path_model is not None: - name = str(params.path_model.parent).split('/')[-1] + name = str(params.path_model.parent).split("/")[-1] - self.gguf.add_name (name) - self.gguf.add_context_length (params.n_ctx) - self.gguf.add_embedding_length (params.n_embd) - self.gguf.add_block_count (params.n_layer) - self.gguf.add_feed_forward_length (params.n_ff) + self.gguf.add_name(name) + self.gguf.add_context_length(params.n_ctx) + self.gguf.add_embedding_length(params.n_embd) + self.gguf.add_block_count(params.n_layer) + self.gguf.add_feed_forward_length(params.n_ff) self.gguf.add_rope_dimension_count(params.n_embd // params.n_head) - self.gguf.add_head_count (params.n_head) - self.gguf.add_head_count_kv (params.n_head_kv) + self.gguf.add_head_count(params.n_head) + self.gguf.add_head_count_kv(params.n_head_kv) + + if params.f_norm_eps is None: + raise ValueError("f_norm_eps is None") + + self.gguf.add_layer_norm_rms_eps(params.f_norm_eps) if params.n_experts: self.gguf.add_expert_count(params.n_experts) @@ -890,11 +1055,6 @@ class OutputFile: if params.n_experts_used: self.gguf.add_expert_used_count(params.n_experts_used) - if params.f_norm_eps: - self.gguf.add_layer_norm_rms_eps(params.f_norm_eps) - else: - raise ValueError('f_norm_eps is None') - if params.f_rope_freq_base is not None: self.gguf.add_rope_freq_base(params.f_rope_freq_base) @@ -912,18 +1072,44 @@ class OutputFile: if params.ftype is not None: self.gguf.add_file_type(params.ftype) - def add_meta_vocab(self, vocab: Vocab) -> None: + def handle_tokenizer_model(self, vocab: Vocab) -> str: + # Map the vocab types to the supported tokenizer models + tokenizer_model = { + SentencePieceVocab: "llama", + HfVocab: "llama", + BpeVocab: "gpt2", + }.get(type(vocab)) + + # Block if vocab type is not predefined + if tokenizer_model is None: + raise ValueError("Unknown vocab type: Not supported") + + return tokenizer_model + + def extract_vocabulary_from_model(self, vocab: Vocab) -> Tuple[list, list, list]: tokens = [] scores = [] toktypes = [] + # NOTE: `all_tokens` returns the base vocabulary and added tokens for text, score, toktype in vocab.all_tokens(): tokens.append(text) scores.append(score) toktypes.append(toktype) - vocab_type = vocab.get_vocab_type() - self.gguf.add_tokenizer_model(vocab_type) + return tokens, scores, toktypes + + def add_meta_vocab(self, vocab: Vocab) -> None: + # Handle the tokenizer model + tokenizer_model = self.handle_tokenizer_model(vocab) + + # Ensure that tokenizer_model is added to the GGUF model + self.gguf.add_tokenizer_model(tokenizer_model) + + # Extract model vocabulary for model conversion + tokens, scores, toktypes = self.extract_vocabulary_from_model(vocab) + + # Add extracted token information for model conversion self.gguf.add_token_list(tokens) self.gguf.add_token_scores(scores) self.gguf.add_token_types(toktypes) @@ -933,10 +1119,14 @@ class OutputFile: def add_tensor_info(self, name: str, tensor: LazyTensor) -> None: n_elements = int(np.prod(tensor.shape)) - raw_dtype = getattr(tensor.data_type, 'ggml_type', None) - data_type = getattr(tensor.data_type, 'quantized_type', None) or tensor.data_type.dtype + raw_dtype = getattr(tensor.data_type, "ggml_type", None) + data_type = ( + getattr(tensor.data_type, "quantized_type", None) or tensor.data_type.dtype + ) data_nbytes = tensor.data_type.elements_to_bytes(n_elements) - self.gguf.add_tensor_info(name, tensor.shape, data_type, data_nbytes, raw_dtype = raw_dtype) + self.gguf.add_tensor_info( + name, tensor.shape, data_type, data_nbytes, raw_dtype=raw_dtype + ) def write_meta(self) -> None: self.gguf.write_header_to_file() @@ -950,11 +1140,14 @@ class OutputFile: @staticmethod def write_vocab_only( - fname_out: Path, params: Params, vocab: Vocab, svocab: gguf.SpecialVocab, + fname_out: Path, + params: Params, + vocab: Vocab, + svocab: gguf.SpecialVocab, endianess: gguf.GGUFEndian = gguf.GGUFEndian.LITTLE, pad_vocab: bool = False, ) -> None: - check_vocab_size(params, vocab, pad_vocab = pad_vocab) + check_vocab_size(params, vocab, pad_vocab=pad_vocab) of = OutputFile(fname_out, endianess=endianess) @@ -982,12 +1175,17 @@ class OutputFile: @staticmethod def write_all( - fname_out: Path, ftype: GGMLFileType, params: Params, model: LazyModel, vocab: Vocab, svocab: gguf.SpecialVocab, + fname_out: Path, + ftype: GGMLFileType, + params: Params, + model: LazyModel, + vocab: Vocab, + svocab: gguf.SpecialVocab, concurrency: int = DEFAULT_CONCURRENCY, endianess: gguf.GGUFEndian = gguf.GGUFEndian.LITTLE, pad_vocab: bool = False, ) -> None: - check_vocab_size(params, vocab, pad_vocab = pad_vocab) + check_vocab_size(params, vocab, pad_vocab=pad_vocab) of = OutputFile(fname_out, endianess=endianess) @@ -1004,18 +1202,30 @@ class OutputFile: of.write_tensor_info() # tensor data - ndarrays_inner = bounded_parallel_map(OutputFile.do_item, model.items(), concurrency = concurrency) + ndarrays_inner = bounded_parallel_map( + OutputFile.do_item, model.items(), concurrency=concurrency + ) if ftype == GGMLFileType.MostlyQ8_0: - ndarrays = bounded_parallel_map(OutputFile.maybe_do_quantize, ndarrays_inner, concurrency = concurrency, max_workers = concurrency, use_processpool_executor = True) + ndarrays = bounded_parallel_map( + OutputFile.maybe_do_quantize, + ndarrays_inner, + concurrency=concurrency, + max_workers=concurrency, + use_processpool_executor=True, + ) else: ndarrays = map(OutputFile.maybe_do_quantize, ndarrays_inner) start = time.time() - for i, ((name, lazy_tensor), ndarray) in enumerate(zip(model.items(), ndarrays)): + for i, ((name, lazy_tensor), ndarray) in enumerate( + zip(model.items(), ndarrays) + ): elapsed = time.time() - start - size = ' x '.join(f"{dim:6d}" for dim in lazy_tensor.shape) + size = " x ".join(f"{dim:6d}" for dim in lazy_tensor.shape) padi = len(str(len(model))) - print(f"[{i+1:{padi}d}/{len(model)}] Writing tensor {name:38s} | size {size:16} | type {lazy_tensor.data_type.name:4} | T+{int(elapsed):4}") + print( + f"[{i+1:{padi}d}/{len(model)}] Writing tensor {name:38s} | size {size:16} | type {lazy_tensor.data_type.name:4} | T+{int(elapsed):4}" + ) of.gguf.write_tensor_data(ndarray) of.close() @@ -1145,30 +1355,95 @@ def load_some_model(path: Path) -> ModelPlus: return model_plus -def find_vocab_file_path(path: Path, vocab_file: str) -> Optional[Path]: - path2 = path / vocab_file - # Use `.parent` instead of /.. to handle the symlink case better. - path3 = path.parent / vocab_file +class VocabFactory: + def __init__(self, path: Path): + self.path = path + self.files = { + "tokenizer.model": None, + "vocab.json": None, + "tokenizer.json": None, + } + self._detect_files() - if path2.exists(): - return path2 - if path3.exists(): - return path3 + def _detect_files(self): + for file in self.files.keys(): + file_path = self.path / file + parent_file_path = self.path.parent / file + if file_path.exists(): + self.files[file] = file_path + elif parent_file_path.exists(): + self.files[file] = parent_file_path - return None + def _select_file(self, vocabtype: Optional[str]) -> Path: + if vocabtype in ["spm", "bpe"]: + # For SentencePiece and BPE, return specific files as before + file_key = "tokenizer.model" if vocabtype == "spm" else "vocab.json" + if self.files[file_key]: + return self.files[file_key] + else: + raise FileNotFoundError(f"{vocabtype} {file_key} not found.") + elif vocabtype == "hfft": + # For Hugging Face Fast Tokenizer, return the directory path instead of a specific file + return self.path + else: + raise ValueError(f"Unsupported vocabulary type {vocabtype}") + + def _create_special_vocab( + self, + vocab: Vocab, + vocabtype: str, + model_parent_path: Path, + ) -> gguf.SpecialVocab: + load_merges = vocabtype == "bpe" + n_vocab = vocab.vocab_size if hasattr(vocab, "vocab_size") else None + return gguf.SpecialVocab( + model_parent_path, + load_merges=load_merges, + special_token_types=None, # Predetermined or passed as a parameter + n_vocab=n_vocab, + ) + + def load_vocab( + self, vocabtype: str, model_parent_path: Path + ) -> Tuple[Vocab, gguf.SpecialVocab]: + path = self._select_file(vocabtype) + print(f"Loading vocab file '{path}', type '{vocabtype}'") + + added_tokens_path = path.parent / "added_tokens.json" + if vocabtype == "bpe": + vocab = BpeVocab( + path, added_tokens_path if added_tokens_path.exists() else None + ) + elif vocabtype == "spm": + vocab = SentencePieceVocab( + path, added_tokens_path if added_tokens_path.exists() else None + ) + elif vocabtype == "hfft": + vocab = HfVocab( + path, added_tokens_path if added_tokens_path.exists() else None + ) + else: + raise ValueError(f"Unsupported vocabulary type {vocabtype}") + special_vocab = self._create_special_vocab( + vocab, + vocabtype, + model_parent_path, + ) + return vocab, special_vocab -def default_outfile(model_paths: list[Path], file_type: GGMLFileType) -> Path: +def default_output_file(model_paths: list[Path], file_type: GGMLFileType) -> Path: namestr = { - GGMLFileType.AllF32: "f32", + GGMLFileType.AllF32: "f32", GGMLFileType.MostlyF16: "f16", - GGMLFileType.MostlyQ8_0:"q8_0", + GGMLFileType.MostlyQ8_0: "q8_0", }[file_type] ret = model_paths[0].parent / f"ggml-model-{namestr}.gguf" if ret in model_paths: sys.stderr.write( f"Error: Default output path ({ret}) would overwrite the input. " - "Please explicitly specify a path using --outfile.\n") + "Please explicitly specify a path using --outfile.\n" + ) sys.exit(1) return ret @@ -1178,32 +1453,111 @@ def do_dump_model(model_plus: ModelPlus) -> None: print(f"model_plus.format = {model_plus.format!r}") print(f"model_plus.vocab = {model_plus.vocab!r}") for name, lazy_tensor in model_plus.model.items(): - print(f"{name}: shape={lazy_tensor.shape} type={lazy_tensor.data_type}; {lazy_tensor.description}") + print( + f"{name}: shape={lazy_tensor.shape} type={lazy_tensor.data_type}; {lazy_tensor.description}" + ) -def main(args_in: list[str] | None = None) -> None: +def get_argument_parser() -> ArgumentParser: output_choices = ["f32", "f16"] if np.uint32(1) == np.uint32(1).newbyteorder("<"): # We currently only support Q8_0 output on little endian systems. output_choices.append("q8_0") - parser = argparse.ArgumentParser(description="Convert a LLaMa model to a GGML compatible file") - parser.add_argument("--awq-path", type=Path, help="Path to scale awq cache file", default=None) - parser.add_argument("--dump", action="store_true", help="don't convert, just show what's in the model") - parser.add_argument("--dump-single", action="store_true", help="don't convert, just show what's in a single model file") - parser.add_argument("--vocab-only", action="store_true", help="extract only the vocab") - parser.add_argument("--outtype", choices=output_choices, help="output format - note: q8_0 may be very slow (default: f16 or f32 based on input)") - parser.add_argument("--vocab-dir", type=Path, help="directory containing tokenizer.model, if separate from model file") - parser.add_argument("--outfile", type=Path, help="path to write to; default: based on input") - parser.add_argument("model", type=Path, help="directory containing model file, or model file itself (*.pth, *.pt, *.bin)") - parser.add_argument("--ctx", type=int, help="model training context (default: based on input)") - parser.add_argument("--concurrency", type=int, help=f"concurrency used for conversion (default: {DEFAULT_CONCURRENCY})", default = DEFAULT_CONCURRENCY) - parser.add_argument("--bigendian", action="store_true", help="model is executed on big endian machine") - parser.add_argument("--padvocab", action="store_true", help="add pad tokens when model vocab expects more than tokenizer metadata provides") - args = parser.parse_args(args_in) + parser = argparse.ArgumentParser( + description="Convert a LLaMa model to a GGML compatible file" + ) + + parser.add_argument( + "model", + type=Path, + help="Directory containing the model file or the model file itself (*.pth, *.pt, *.bin)", + ) + + parser.add_argument( + "--awq-path", + type=Path, + help="Path to the Activation-aware Weight Quantization cache file", + default=None, + ) + + parser.add_argument( + "--dump", + action="store_true", + help="Display the model content without converting it", + ) + + parser.add_argument( + "--dump-single", + action="store_true", + help="Display the content of a single model file without conversion", + ) + + parser.add_argument( + "--vocab-only", + action="store_true", + help="Extract and output only the vocabulary", + ) + + parser.add_argument( + "--outtype", + choices=output_choices, + help="Output format - note: q8_0 may be very slow (default: f16 or f32 based on input)", + ) + + parser.add_argument( + "--vocab-dir", + type=Path, + help="Directory containing the tokenizer.model, if separate from the model file", + ) + + parser.add_argument( + "--vocab-type", + choices=["spm", "bpe", "hfft"], # hfft: Hugging Face Fast Tokenizer + default="spm", + help="The vocabulary format used to define the tokenizer model (default: spm)", + ) + + parser.add_argument( + "--pad-vocab", + action="store_true", + help="Add padding tokens when the model's vocabulary size exceeds the tokenizer metadata", + ) + + parser.add_argument( + "--outfile", + type=Path, + help="Specify the path for the output file (default is based on input)", + ) + + parser.add_argument( + "--ctx", type=int, help="Model training context (default is based on input)" + ) + + parser.add_argument( + "--concurrency", + type=int, + help=f"Concurrency used for conversion (default: {DEFAULT_CONCURRENCY})", + default=DEFAULT_CONCURRENCY, + ) + + parser.add_argument( + "--big-endian", + action="store_true", + help="Indicate that the model is executed on a big-endian machine", + ) + + return parser + + +def main(argv: Optional[list[str]] = None) -> None: + parser = get_argument_parser() + args = parser.parse_args(argv) + if args.awq_path: - sys.path.insert(1, str(Path(__file__).parent / 'awq-py')) + sys.path.insert(1, str(Path(__file__).resolve().parent / "awq-py")) from awq.apply_awq import add_scale_weights + tmp_model_path = args.model / "weighted_model" if tmp_model_path.is_dir(): print(f"{tmp_model_path} exists as a weighted model.") @@ -1222,22 +1576,27 @@ def main(args_in: list[str] | None = None) -> None: if not args.vocab_only: model_plus = load_some_model(args.model) else: - model_plus = ModelPlus(model = {}, paths = [args.model / 'dummy'], format = 'none', vocab = None) + model_plus = ModelPlus( + model={}, paths=[args.model / "dummy"], format="none", vocab=None + ) if args.dump: do_dump_model(model_plus) return + endianess = gguf.GGUFEndian.LITTLE - if args.bigendian: + if args.big_endian: endianess = gguf.GGUFEndian.BIG params = Params.load(model_plus) if params.n_ctx == -1: if args.ctx is None: - raise Exception("The model doesn't have a context size, and you didn't specify one with --ctx\n" - "Please specify one with --ctx:\n" - " - LLaMA v1: --ctx 2048\n" - " - LLaMA v2: --ctx 4096\n") + raise Exception( + "The model doesn't have a context size, and you didn't specify one with --ctx\n" + "Please specify one with --ctx:\n" + " - LLaMA v1: --ctx 2048\n" + " - LLaMA v2: --ctx 4096\n" + ) params.n_ctx = args.ctx if args.outtype: @@ -1249,47 +1608,51 @@ def main(args_in: list[str] | None = None) -> None: print(f"params = {params}") - vocab: Vocab + model_parent_path = model_plus.paths[0].parent + vocab_path = Path(args.vocab_dir or args.model or model_parent_path) + vocab_factory = VocabFactory(vocab_path) + vocab, special_vocab = vocab_factory.load_vocab(args.vocab_type, model_parent_path) + if args.vocab_only: if not args.outfile: raise ValueError("need --outfile if using --vocab-only") - # FIXME: Try to respect vocab_dir somehow? - vocab = VocabLoader(params, args.vocab_dir or args.model) - special_vocab = gguf.SpecialVocab(model_plus.paths[0].parent, - load_merges = True, - n_vocab = vocab.vocab_size) outfile = args.outfile - OutputFile.write_vocab_only(outfile, params, vocab, special_vocab, - endianess = endianess, pad_vocab = args.padvocab) + OutputFile.write_vocab_only( + outfile, + params, + vocab, + special_vocab, + endianess=endianess, + pad_vocab=args.pad_vocab, + ) print(f"Wrote {outfile}") return if model_plus.vocab is not None and args.vocab_dir is None: vocab = model_plus.vocab - else: - vocab_dir = args.vocab_dir if args.vocab_dir else model_plus.paths[0].parent - vocab = VocabLoader(params, vocab_dir) - # FIXME: Try to respect vocab_dir somehow? - print(f"Vocab info: {vocab}") - special_vocab = gguf.SpecialVocab(model_plus.paths[0].parent, - load_merges = True, - n_vocab = vocab.vocab_size) - - print(f"Special vocab info: {special_vocab}") - model = model_plus.model - model = convert_model_names(model, params) - ftype = pick_output_type(model, args.outtype) - model = convert_to_output_type(model, ftype) - outfile = args.outfile or default_outfile(model_plus.paths, ftype) + model = model_plus.model + model = convert_model_names(model, params) + ftype = pick_output_type(model, args.outtype) + model = convert_to_output_type(model, ftype) + outfile = args.outfile or default_output_file(model_plus.paths, ftype) params.ftype = ftype print(f"Writing {outfile}, format {ftype}") - OutputFile.write_all(outfile, ftype, params, model, vocab, special_vocab, - concurrency = args.concurrency, endianess = endianess, pad_vocab = args.padvocab) + OutputFile.write_all( + outfile, + ftype, + params, + model, + vocab, + special_vocab, + concurrency=args.concurrency, + endianess=endianess, + pad_vocab=args.pad_vocab, + ) print(f"Wrote {outfile}") -if __name__ == '__main__': - main() +if __name__ == "__main__": + main(sys.argv[1:]) # Exclude the first element (script name) from sys.argv From 4f56458d34cb13dcbf69aca650e9bf77d5497e6f Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Johannes=20G=C3=A4=C3=9Fler?= Date: Wed, 10 Jan 2024 01:04:33 +0100 Subject: [PATCH 250/811] Python script to compare commits with llama-bench (#4844) --- scripts/compare-llama-bench.py | 356 +++++++++++++++++++++++++++++++++ 1 file changed, 356 insertions(+) create mode 100755 scripts/compare-llama-bench.py diff --git a/scripts/compare-llama-bench.py b/scripts/compare-llama-bench.py new file mode 100755 index 000000000..bc1714487 --- /dev/null +++ b/scripts/compare-llama-bench.py @@ -0,0 +1,356 @@ +#!/usr/bin/env python3 + +import argparse +import heapq +import sys +import os +from glob import glob +import sqlite3 + +try: + import git + from tabulate import tabulate +except ImportError: + print("ERROR: the following Python libraries are required: GitPython, tabulate.") + sys.exit(1) + +# Properties by which to differentiate results per commit: +KEY_PROPERTIES = [ + "cuda", "opencl", "metal", "gpu_blas", "blas", "cpu_info", "gpu_info", "model_filename", + "model_type", "model_size", "model_n_params", "n_batch", "n_threads", "type_k", "type_v", + "n_gpu_layers", "main_gpu", "no_kv_offload", "mul_mat_q", "tensor_split", "n_prompt", "n_gen" +] + +# Properties that are boolean and are converted to Yes/No for the table: +BOOL_PROPERTIES = ["cuda", "opencl", "metal", "gpu_blas", "blas"] + +# Header names for the table: +PRETTY_NAMES = { + "cuda": "CUDA", "opencl": "OpenCL", "metal": "Metal", "gpu_blas": "GPU BLAS", "blas": "BLAS", + "cpu_info": "CPU", "gpu_info": "GPU", "model_filename": "File", "model_type": "Model", + "model_size": "Model Size [GiB]", "model_n_params": "Num. of Parameters", + "n_batch": "Batch size", "n_threads": "Threads", "type_k": "K type", "type_v": "V type", + "n_gpu_layers": "GPU layers", "main_gpu": "Main GPU", "no_kv_offload": "NKVO", + "mul_mat_q": "MMQ", "tensor_split": "Tensor split" +} + +DEFAULT_SHOW = ["model_type"] # Always show these properties by default. +DEFAULT_HIDE = ["model_filename"] # Always hide these properties by default. +GPU_NAME_STRIP = ["NVIDIA GeForce ", "Tesla ", "AMD Radeon "] # Strip prefixes for smaller tables. + +DESCRIPTION = """Creates tables from llama-bench data written to an SQLite database. Example usage (Linux): + +$ git checkout master +$ make clean && make llama-bench +$ ./llama-bench -o sql | sqlite3 llama-bench.sqlite +$ git checkout some_branch +$ make clean && make llama-bench +$ ./llama-bench -o sql | sqlite3 llama-bench.sqlite +$ ./scripts/compare-llama-bench.py + +Performance numbers from multiple runs per commit are averaged WITHOUT being weighted by the --repetitions parameter of llama-bench. +""" + +parser = argparse.ArgumentParser( + description=DESCRIPTION, formatter_class=argparse.RawDescriptionHelpFormatter) +help_b = ( + "The baseline commit to compare performance to. " + "Accepts either a branch name, tag name, or commit hash. " + "Defaults to latest master commit with data." +) +parser.add_argument("-b", "--baseline", help=help_b) +help_c = ( + "The commit whose performance is to be compared to the baseline. " + "Accepts either a branch name, tag name, or commit hash. " + "Defaults to the non-master commit for which llama-bench was run most recently." +) +parser.add_argument("-c", "--compare", help=help_c) +help_i = ( + "Input SQLite file for comparing commits. " + "Defaults to 'llama-bench.sqlite' in the current working directory. " + "If no such file is found and there is exactly one .sqlite file in the current directory, " + "that file is instead used as input." +) +parser.add_argument("-i", "--input", help=help_i) +help_o = ( + "Output format for the table. " + "Defaults to 'pipe' (GitHub compatible). " + "Also supports e.g. 'latex' or 'mediawiki'. " + "See tabulate documentation for full list." +) +parser.add_argument("-o", "--output", help=help_o, default="pipe") +help_s = ( + "Columns to add to the table. " + "Accepts a comma-separated list of values. " + f"Legal values: {', '.join(KEY_PROPERTIES[:-2])}. " + "Defaults to model name (model_type) and CPU and/or GPU name (cpu_info, gpu_info) " + "plus any column where not all data points are the same. " + "If the columns are manually specified, then the results for each unique combination of the " + "specified values are averaged WITHOUT weighing by the --repetitions parameter of llama-bench." +) +parser.add_argument("-s", "--show", help=help_s) + +known_args, unknown_args = parser.parse_known_args() + +if unknown_args: + print(f"ERROR: Received unknown args: {unknown_args}.") + print() + parser.print_help() + sys.exit(1) + +input_file = known_args.input +if input_file is None and os.path.exists("./llama-bench.sqlite"): + input_file = "llama-bench.sqlite" +if input_file is None: + sqlite_files = glob("*.sqlite") + if len(sqlite_files) == 1: + input_file = sqlite_files[0] + +if input_file is None: + print("ERROR: Cannot find a suitable input file, please provide one.") + print() + parser.print_help() + sys.exit(1) + +connection = sqlite3.connect(input_file) +cursor = connection.cursor() +builds = cursor.execute("SELECT DISTINCT build_commit FROM test;").fetchall() + +try: + repo = git.Repo(".", search_parent_directories=True) +except git.exc.InvalidGitRepositoryError: + repo = None + + +def find_parent_in_data(commit): + """Helper function to find the most recent parent measured in number of commits for which there is data.""" + heap = [(0, commit)] + seen_hexsha8 = set() + while heap: + depth, current_commit = heapq.heappop(heap) + current_hexsha8 = commit.hexsha[:8] + if (current_hexsha8,) in builds: + return current_hexsha8 + for parent in commit.parents: + parent_hexsha8 = parent.hexsha[:8] + if parent_hexsha8 not in seen_hexsha8: + seen_hexsha8.add(parent_hexsha8) + heapq.heappush(heap, (depth + 1, parent)) + return None + + +def get_all_parent_hexsha8s(commit): + """Helper function to recursively get hexsha8 values for all parents of a commit.""" + unvisited = [commit] + visited = [] + + while unvisited: + current_commit = unvisited.pop(0) + visited.append(current_commit.hexsha[:8]) + for parent in current_commit.parents: + if parent.hexsha[:8] not in visited: + unvisited.append(parent) + + return visited + + +def get_commit_name(hexsha8): + """Helper function to find a human-readable name for a commit if possible.""" + if repo is None: + return hexsha8 + for h in repo.heads: + if h.commit.hexsha[:8] == hexsha8: + return h.name + for t in repo.tags: + if t.commit.hexsha[:8] == hexsha8: + return t.name + return hexsha8 + + +def get_commit_hexsha8(name): + """Helper function to search for a commit given a human-readable name.""" + if repo is None: + return None + for h in repo.heads: + if h.name == name: + return h.commit.hexsha[:8] + for t in repo.tags: + if t.name == name: + return t.commit.hexsha[:8] + return None + + +hexsha8_baseline = name_baseline = None + +# If the user specified a baseline, try to find a commit for it: +if known_args.baseline is not None: + if (known_args.baseline,) in builds: + hexsha8_baseline = known_args.baseline + if hexsha8_baseline is None: + hexsha8_baseline = get_commit_hexsha8(known_args.baseline) + name_baseline = known_args.baseline + if hexsha8_baseline is None: + print(f"ERROR: cannot find data for baseline={known_args.baseline}.") + sys.exit(1) +# Otherwise, search for the most recent parent of master for which there is data: +elif repo is not None: + hexsha8_baseline = find_parent_in_data(repo.heads.master.commit) + + if hexsha8_baseline is None: + print("ERROR: No baseline was provided and did not find data for any master branch commits.") + print() + parser.print_help() + sys.exit(1) +else: + print( + "ERROR: No baseline was provided and the current working directory " + "is not part of a git repository from which a baseline could be inferred." + ) + print() + parser.print_help() + sys.exit(1) + + +name_baseline = get_commit_name(hexsha8_baseline) + +hexsha8_compare = name_compare = None + +# If the user has specified a compare value, try to find a corresponding commit: +if known_args.compare is not None: + if (known_args.compare,) in builds: + hexsha8_compare = known_args.compare + if hexsha8_compare is None: + hexsha8_compare = get_commit_hexsha8(known_args.compare) + name_compare = known_args.compare + if hexsha8_compare is None: + print(f"ERROR: cannot find data for baseline={known_args.compare}.") + sys.exit(1) +# Otherwise, search for the commit for llama-bench was most recently run +# and that is not a parent of master: +elif repo is not None: + hexsha8s_master = get_all_parent_hexsha8s(repo.heads.master.commit) + builds_timestamp = cursor.execute( + "SELECT build_commit, test_time FROM test ORDER BY test_time;").fetchall() + for (hexsha8, _) in reversed(builds_timestamp): + if hexsha8 not in hexsha8s_master: + hexsha8_compare = hexsha8 + break + + if hexsha8_compare is None: + print("ERROR: No compare target was provided and did not find data for any non-master commits.") + print() + parser.print_help() + sys.exit(1) +else: + print( + "ERROR: No compare target was provided and the current working directory " + "is not part of a git repository from which a compare target could be inferred." + ) + print() + parser.print_help() + sys.exit(1) + +name_compare = get_commit_name(hexsha8_compare) + + +def get_rows(properties): + """ + Helper function that gets table rows for some list of properties. + Rows are created by combining those where all provided properties are equal. + The resulting rows are then grouped by the provided properties and the t/s values are averaged. + The returned rows are unique in terms of property combinations. + """ + select_string = ", ".join( + [f"tb.{p}" for p in properties] + ["tb.n_prompt", "tb.n_gen", "AVG(tb.avg_ts)", "AVG(tc.avg_ts)"]) + equal_string = " AND ".join( + [f"tb.{p} = tc.{p}" for p in KEY_PROPERTIES] + [ + f"tb.build_commit = '{hexsha8_baseline}'", f"tc.build_commit = '{hexsha8_compare}'"] + ) + group_order_string = ", ".join([f"tb.{p}" for p in properties] + ["tb.n_gen", "tb.n_prompt"]) + query = (f"SELECT {select_string} FROM test tb JOIN test tc ON {equal_string} " + f"GROUP BY {group_order_string} ORDER BY {group_order_string};") + return cursor.execute(query).fetchall() + + +# If the user provided columns to group the results by, use them: +if known_args.show is not None: + show = known_args.show.split(",") + unknown_cols = [] + for prop in show: + if prop not in KEY_PROPERTIES[:-2]: # Last two values are n_prompt, n_gen. + unknown_cols.append(prop) + if unknown_cols: + print(f"ERROR: Unknown values for --show: {', '.join(unknown_cols)}") + print() + parser.print_usage() + sys.exit(1) + rows_show = get_rows(show) +# Otherwise, select those columns where the values are not all the same: +else: + rows_full = get_rows(KEY_PROPERTIES) + properties_different = [] + for i, kp_i in enumerate(KEY_PROPERTIES): + if kp_i in DEFAULT_SHOW or kp_i == "n_prompt" or kp_i == "n_gen": + continue + for row_full in rows_full: + if row_full[i] != rows_full[0][i]: + properties_different.append(kp_i) + break + + show = [] + # Show CPU and/or GPU by default even if the hardware for all results is the same: + if "gpu_blas" not in properties_different and "n_gpu_layers" not in properties_different: + gpu_blas = bool(rows_full[0][KEY_PROPERTIES.index("gpu_blas")]) + ngl = int(rows_full[0][KEY_PROPERTIES.index("n_gpu_layers")]) + + if not gpu_blas or ngl != 99 and "cpu_info" not in properties_different: + show.append("cpu_info") + if gpu_blas and "gpu_info" not in properties_different: + show.append("gpu_info") + + show += DEFAULT_SHOW + show += properties_different + for prop in DEFAULT_HIDE: + try: + show.remove(prop) + except ValueError: + pass + rows_show = get_rows(show) + +table = [] +for row in rows_show: + n_prompt = int(row[-4]) + n_gen = int(row[-3]) + assert n_prompt == 0 or n_gen == 0 + test_name = f"tg{n_gen}" if n_prompt == 0 else f"pp{n_prompt}" + # Regular columns test name avg t/s values Speedup + # VVVVVVVVVVVVV VVVVVVVVV VVVVVVVVVVVVVV VVVVVVV + table.append(list(row[:-4]) + [test_name] + list(row[-2:]) + [float(row[-1]) / float(row[-2])]) + +# Some a-posteriori fixes to make the table contents prettier: +for bool_property in BOOL_PROPERTIES: + if bool_property in show: + ip = show.index(bool_property) + for row_table in table: + row_table[ip] = "Yes" if int(row_table[ip]) == 1 else "No" + +if "model_size" in show: + ip = show.index("model_size") + for row_table in table: + row_table[ip] = float(row_table[ip]) / 1024 ** 3 + +if "gpu_info" in show: + ip = show.index("gpu_info") + for gns in GPU_NAME_STRIP: + for row_table in table: + row_table[ip] = row_table[ip].replace(gns, "") + +headers = [PRETTY_NAMES[p] for p in show] +headers += ["Test", f"t/s {name_baseline}", f"t/s {name_compare}", "Speedup"] + +print(tabulate( + table, + headers=headers, + floatfmt=".2f", + tablefmt=known_args.output +)) From d34633d8db6c2e400355de4862cd699154ecc73f Mon Sep 17 00:00:00 2001 From: John <78893154+cmp-nct@users.noreply.github.com> Date: Wed, 10 Jan 2024 14:37:09 +0100 Subject: [PATCH 251/811] clip : support more quantization types (#4846) Uses ggml functions instead of hardcoded names and adds support to quantize into the modern Q-K variants. This is just the bare minimum to get k-types working - a more refined choice of types would be needed to get best quality on low quantizations. I ran a few tests, it doesn't break anything I could notice and a Q6_K ViT works almost as well as Q8_0 but 3 times the inference speed. --- examples/llava/clip.cpp | 62 ++++++++++++++++------------------------- 1 file changed, 24 insertions(+), 38 deletions(-) diff --git a/examples/llava/clip.cpp b/examples/llava/clip.cpp index cfb79e789..2ae8853d3 100644 --- a/examples/llava/clip.cpp +++ b/examples/llava/clip.cpp @@ -126,24 +126,7 @@ static struct ggml_tensor * get_tensor(struct ggml_context * ctx, const std::str } static std::string get_ftype(int ftype) { - switch (ftype) { - case 0: - return "f32"; - case 1: - return "f16"; - case 2: - return "q4_0"; - case 3: - return "q4_1"; - case 6: - return "q5_0"; - case 7: - return "q5_1"; - case 8: - return "q8_0"; - default: - throw std::runtime_error(format("%s: Unrecognized file type: %d\n", __func__, ftype)); - } + return ggml_type_name(static_cast(ftype)); } // @@ -533,6 +516,7 @@ struct clip_ctx * clip_model_load(const char * fname, const int verbosity = 1) { buffer_size += n_tensors * 128 /* CLIP PADDING */; clip_ctx * new_clip = new clip_ctx; + #ifdef GGML_USE_CUBLAS new_clip->backend = ggml_backend_cuda_init(0); printf("%s: CLIP using CUDA backend\n", __func__); @@ -543,6 +527,7 @@ struct clip_ctx * clip_model_load(const char * fname, const int verbosity = 1) { printf("%s: CLIP using Metal backend\n", __func__); #endif + if (!new_clip->backend) { new_clip->backend = ggml_backend_cpu_init(); printf("%s: CLIP using CPU backend\n", __func__); @@ -931,26 +916,8 @@ bool clip_model_quantize(const char * fname_inp, const char * fname_out, const i ggml_type type = GGML_TYPE_Q4_1; - switch (itype) { - case 2: - type = GGML_TYPE_Q4_0; - break; - case 3: - type = GGML_TYPE_Q4_1; - break; - case 6: - type = GGML_TYPE_Q5_0; - break; - case 7: - type = GGML_TYPE_Q5_1; - break; - case 8: - type = GGML_TYPE_Q8_0; - break; - default: - fprintf(stderr, "%s: invalid quantization type %d\n", __func__, itype); - return false; - }; + assert(itype < GGML_TYPE_COUNT); + type = static_cast(itype); auto * ctx_clip = clip_model_load(fname_inp, 2); @@ -1010,6 +977,10 @@ bool clip_model_quantize(const char * fname_inp, const char * fname_out, const i if (quantize) { new_type = type; + if (new_type >= GGML_TYPE_Q2_K && name.find("embd") != std::string::npos) { + new_type = GGML_TYPE_Q8_0; // ggml_get_rows needs non K type + // fprintf(stderr, "%s: quantizing %s to %s\n", __func__, name.c_str(), ggml_type_name(new_type)); + } const size_t n_elms = ggml_nelements(cur); float * f32_data; @@ -1054,6 +1025,21 @@ bool clip_model_quantize(const char * fname_inp, const char * fname_out, const i case GGML_TYPE_Q8_0: { new_size = ggml_quantize_q8_0(f32_data, new_data, n_elms, cur->ne[0], hist_cur.data()); } break; + case GGML_TYPE_Q2_K: { + new_size = ggml_quantize_q2_K(f32_data, new_data, n_elms, cur->ne[0], hist_cur.data()); + } break; + case GGML_TYPE_Q3_K: { + new_size = ggml_quantize_q3_K(f32_data, new_data, n_elms, cur->ne[0], hist_cur.data()); + } break; + case GGML_TYPE_Q4_K: { + new_size = ggml_quantize_q4_K(f32_data, new_data, n_elms, cur->ne[0], hist_cur.data()); + } break; + case GGML_TYPE_Q5_K: { + new_size = ggml_quantize_q5_K(f32_data, new_data, n_elms, cur->ne[0], hist_cur.data()); + } break; + case GGML_TYPE_Q6_K: { + new_size = ggml_quantize_q6_K(f32_data, new_data, n_elms, cur->ne[0], hist_cur.data()); + } break; default: { fprintf(stderr, "%s: unsupported quantization type %d\n", __func__, new_type); return false; From 329ff615699d32f596d4ebf8baba654c30064e0d Mon Sep 17 00:00:00 2001 From: Austin <77757836+teleprint-me@users.noreply.github.com> Date: Wed, 10 Jan 2024 08:39:09 -0500 Subject: [PATCH 252/811] llama : recognize 1B phi models (#4847) This update categorizes models with 24 layers as MODEL_1B, ensuring compatibility with different Phi model variants without impacting existing Phi-2 model functionality. --- llama.cpp | 1 + 1 file changed, 1 insertion(+) diff --git a/llama.cpp b/llama.cpp index 8e0717db9..0f09d0c2b 100644 --- a/llama.cpp +++ b/llama.cpp @@ -2829,6 +2829,7 @@ static void llm_load_hparams( ml.get_key(LLM_KV_ATTENTION_LAYERNORM_EPS, hparams.f_norm_eps); switch (hparams.n_layer) { + case 24: model.type = e_model::MODEL_1B; break; case 32: model.type = e_model::MODEL_3B; break; default: model.type = e_model::MODEL_UNKNOWN; } From 57d016ba2d46a6e22517a31a75cebb48f9e234b6 Mon Sep 17 00:00:00 2001 From: Brian Date: Thu, 11 Jan 2024 01:09:53 +1100 Subject: [PATCH 253/811] llama : add additional suffixes for model params (#4834) * llm_load_print_meta: Add additional suffixs for model params * Update llama.cpp model param log remove unneeded comments and convert from > to >= --- llama.cpp | 10 +++++++++- 1 file changed, 9 insertions(+), 1 deletion(-) diff --git a/llama.cpp b/llama.cpp index 0f09d0c2b..e1f1932ba 100644 --- a/llama.cpp +++ b/llama.cpp @@ -3146,7 +3146,15 @@ static void llm_load_print_meta(llama_model_loader & ml, llama_model & model) { LLAMA_LOG_INFO("%s: rope_finetuned = %s\n", __func__, hparams.rope_finetuned ? "yes" : "unknown"); LLAMA_LOG_INFO("%s: model type = %s\n", __func__, llama_model_type_name(model.type)); LLAMA_LOG_INFO("%s: model ftype = %s\n", __func__, llama_model_ftype_name(model.ftype).c_str()); - LLAMA_LOG_INFO("%s: model params = %.2f B\n", __func__, ml.n_elements*1e-9); + if (ml.n_elements >= 1e12) { + LLAMA_LOG_INFO("%s: model params = %.2f T\n", __func__, ml.n_elements*1e-12); + } else if (ml.n_elements >= 1e9) { + LLAMA_LOG_INFO("%s: model params = %.2f B\n", __func__, ml.n_elements*1e-9); + } else if (ml.n_elements >= 1e6) { + LLAMA_LOG_INFO("%s: model params = %.2f M\n", __func__, ml.n_elements*1e-6); + } else { + LLAMA_LOG_INFO("%s: model params = %.2f K\n", __func__, ml.n_elements*1e-3); + } if (ml.n_bytes < GiB) { LLAMA_LOG_INFO("%s: model size = %.2f MiB (%.2f BPW) \n", __func__, ml.n_bytes/1024.0/1024.0, ml.n_bytes*8.0/ml.n_elements); } else { From cd108e641dbdedd8c5641c4cec1762f751f38136 Mon Sep 17 00:00:00 2001 From: Behnam M <58621210+ibehnam@users.noreply.github.com> Date: Wed, 10 Jan 2024 14:56:05 -0500 Subject: [PATCH 254/811] server : add a `/health` endpoint (#4860) * added /health endpoint to the server * added comments on the additional /health endpoint * Better handling of server state When the model is being loaded, the server state is `LOADING_MODEL`. If model-loading fails, the server state becomes `ERROR`, otherwise it becomes `READY`. The `/health` endpoint provides more granular messages now according to the server_state value. * initialized server_state * fixed a typo * starting http server before initializing the model * Update server.cpp * Update server.cpp * fixes * fixes * fixes * made ServerState atomic and turned two-line spaces into one-line --- examples/server/server.cpp | 199 +++++++++++++++++++++---------------- 1 file changed, 113 insertions(+), 86 deletions(-) diff --git a/examples/server/server.cpp b/examples/server/server.cpp index 6c7fcd176..1cca634d5 100644 --- a/examples/server/server.cpp +++ b/examples/server/server.cpp @@ -26,6 +26,7 @@ #include #include #include +#include #ifndef SERVER_VERBOSE #define SERVER_VERBOSE 1 @@ -146,6 +147,12 @@ static std::vector base64_decode(const std::string & encoded_string) // parallel // +enum ServerState { + LOADING_MODEL, // Server is starting up, model not fully loaded yet + READY, // Server is ready and model is loaded + ERROR // An error occurred, load_model failed +}; + enum task_type { COMPLETION_TASK, CANCEL_TASK @@ -2453,7 +2460,6 @@ static void server_params_parse(int argc, char **argv, server_params &sparams, } } - static std::string random_string() { static const std::string str("0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz"); @@ -2790,15 +2796,117 @@ int main(int argc, char **argv) {"system_info", llama_print_system_info()}, }); - // load the model - if (!llama.load_model(params)) + httplib::Server svr; + + std::atomic server_state{LOADING_MODEL}; + + svr.set_default_headers({{"Server", "llama.cpp"}, + {"Access-Control-Allow-Origin", "*"}, + {"Access-Control-Allow-Headers", "content-type"}}); + + svr.Get("/health", [&](const httplib::Request&, httplib::Response& res) { + ServerState current_state = server_state.load(); + switch(current_state) { + case READY: + res.set_content(R"({"status": "ok"})", "application/json"); + res.status = 200; // HTTP OK + break; + case LOADING_MODEL: + res.set_content(R"({"status": "loading model"})", "application/json"); + res.status = 503; // HTTP Service Unavailable + break; + case ERROR: + res.set_content(R"({"status": "error", "error": "Model failed to load"})", "application/json"); + res.status = 500; // HTTP Internal Server Error + break; + } + }); + + svr.set_logger(log_server_request); + + svr.set_exception_handler([](const httplib::Request &, httplib::Response &res, std::exception_ptr ep) + { + const char fmt[] = "500 Internal Server Error\n%s"; + char buf[BUFSIZ]; + try + { + std::rethrow_exception(std::move(ep)); + } + catch (std::exception &e) + { + snprintf(buf, sizeof(buf), fmt, e.what()); + } + catch (...) + { + snprintf(buf, sizeof(buf), fmt, "Unknown Exception"); + } + res.set_content(buf, "text/plain; charset=utf-8"); + res.status = 500; + }); + + svr.set_error_handler([](const httplib::Request &, httplib::Response &res) + { + if (res.status == 401) + { + res.set_content("Unauthorized", "text/plain; charset=utf-8"); + } + if (res.status == 400) + { + res.set_content("Invalid request", "text/plain; charset=utf-8"); + } + else if (res.status == 404) + { + res.set_content("File Not Found", "text/plain; charset=utf-8"); + res.status = 404; + } + }); + + // set timeouts and change hostname and port + svr.set_read_timeout (sparams.read_timeout); + svr.set_write_timeout(sparams.write_timeout); + + if (!svr.bind_to_port(sparams.hostname, sparams.port)) { + fprintf(stderr, "\ncouldn't bind to server socket: hostname=%s port=%d\n\n", sparams.hostname.c_str(), sparams.port); return 1; } - llama.initialize(); + // Set the base directory for serving static files + svr.set_base_dir(sparams.public_path); - httplib::Server svr; + // to make it ctrl+clickable: + LOG_TEE("\nllama server listening at http://%s:%d\n\n", sparams.hostname.c_str(), sparams.port); + + std::unordered_map log_data; + log_data["hostname"] = sparams.hostname; + log_data["port"] = std::to_string(sparams.port); + + if (!sparams.api_key.empty()) { + log_data["api_key"] = "api_key: ****" + sparams.api_key.substr(sparams.api_key.length() - 4); + } + + LOG_INFO("HTTP server listening", log_data); + // run the HTTP server in a thread - see comment below + std::thread t([&]() + { + if (!svr.listen_after_bind()) + { + server_state.store(ERROR); + return 1; + } + + return 0; + }); + + // load the model + if (!llama.load_model(params)) + { + server_state.store(ERROR); + return 1; + } else { + llama.initialize(); + server_state.store(READY); + } // Middleware for API key validation auto validate_api_key = [&sparams](const httplib::Request &req, httplib::Response &res) -> bool { @@ -2826,10 +2934,6 @@ int main(int argc, char **argv) return false; }; - svr.set_default_headers({{"Server", "llama.cpp"}, - {"Access-Control-Allow-Origin", "*"}, - {"Access-Control-Allow-Headers", "content-type"}}); - // this is only called if no index.html is found in the public --path svr.Get("/", [](const httplib::Request &, httplib::Response &res) { @@ -2937,8 +3041,6 @@ int main(int argc, char **argv) } }); - - svr.Get("/v1/models", [¶ms](const httplib::Request&, httplib::Response& res) { std::time_t t = std::time(0); @@ -3157,81 +3259,6 @@ int main(int argc, char **argv) return res.set_content(result.result_json.dump(), "application/json; charset=utf-8"); }); - svr.set_logger(log_server_request); - - svr.set_exception_handler([](const httplib::Request &, httplib::Response &res, std::exception_ptr ep) - { - const char fmt[] = "500 Internal Server Error\n%s"; - char buf[BUFSIZ]; - try - { - std::rethrow_exception(std::move(ep)); - } - catch (std::exception &e) - { - snprintf(buf, sizeof(buf), fmt, e.what()); - } - catch (...) - { - snprintf(buf, sizeof(buf), fmt, "Unknown Exception"); - } - res.set_content(buf, "text/plain; charset=utf-8"); - res.status = 500; - }); - - svr.set_error_handler([](const httplib::Request &, httplib::Response &res) - { - if (res.status == 401) - { - res.set_content("Unauthorized", "text/plain; charset=utf-8"); - } - if (res.status == 400) - { - res.set_content("Invalid request", "text/plain; charset=utf-8"); - } - else if (res.status == 404) - { - res.set_content("File Not Found", "text/plain; charset=utf-8"); - res.status = 404; - } - }); - - // set timeouts and change hostname and port - svr.set_read_timeout (sparams.read_timeout); - svr.set_write_timeout(sparams.write_timeout); - - if (!svr.bind_to_port(sparams.hostname, sparams.port)) - { - fprintf(stderr, "\ncouldn't bind to server socket: hostname=%s port=%d\n\n", sparams.hostname.c_str(), sparams.port); - return 1; - } - - // Set the base directory for serving static files - svr.set_base_dir(sparams.public_path); - - // to make it ctrl+clickable: - LOG_TEE("\nllama server listening at http://%s:%d\n\n", sparams.hostname.c_str(), sparams.port); - - std::unordered_map log_data; - log_data["hostname"] = sparams.hostname; - log_data["port"] = std::to_string(sparams.port); - - if (!sparams.api_key.empty()) { - log_data["api_key"] = "api_key: ****" + sparams.api_key.substr(sparams.api_key.length() - 4); - } - - LOG_INFO("HTTP server listening", log_data); - // run the HTTP server in a thread - see comment below - std::thread t([&]() - { - if (!svr.listen_after_bind()) - { - return 1; - } - - return 0; - }); - // GG: if I put the main loop inside a thread, it crashes on the first request when build in Debug!? // "Bus error: 10" - this is on macOS, it does not crash on Linux //std::thread t2([&]() From 5c1980d8d4c4e0c0af77359f81cc44d90b3f250b Mon Sep 17 00:00:00 2001 From: Georgi Gerganov Date: Thu, 11 Jan 2024 09:10:34 +0200 Subject: [PATCH 255/811] server : fix build + rename enums (#4870) --- examples/server/server.cpp | 36 ++++++++++++++++++------------------ 1 file changed, 18 insertions(+), 18 deletions(-) diff --git a/examples/server/server.cpp b/examples/server/server.cpp index 1cca634d5..4a0714997 100644 --- a/examples/server/server.cpp +++ b/examples/server/server.cpp @@ -147,15 +147,15 @@ static std::vector base64_decode(const std::string & encoded_string) // parallel // -enum ServerState { - LOADING_MODEL, // Server is starting up, model not fully loaded yet - READY, // Server is ready and model is loaded - ERROR // An error occurred, load_model failed +enum server_state { + SERVER_STATE_LOADING_MODEL, // Server is starting up, model not fully loaded yet + SERVER_STATE_READY, // Server is ready and model is loaded + SERVER_STATE_ERROR // An error occurred, load_model failed }; enum task_type { - COMPLETION_TASK, - CANCEL_TASK + TASK_TYPE_COMPLETION, + TASK_TYPE_CANCEL, }; struct task_server { @@ -1402,7 +1402,7 @@ struct llama_server_context task.data = std::move(data); task.infill_mode = infill; task.embedding_mode = embedding; - task.type = COMPLETION_TASK; + task.type = TASK_TYPE_COMPLETION; task.multitask_id = multitask_id; // when a completion task's prompt array is not a singleton, we split it into multiple requests @@ -1524,7 +1524,7 @@ struct llama_server_context std::unique_lock lock(mutex_tasks); task_server task; task.id = id_gen++; - task.type = CANCEL_TASK; + task.type = TASK_TYPE_CANCEL; task.target_id = task_id; queue_tasks.push_back(task); condition_tasks.notify_one(); @@ -1560,7 +1560,7 @@ struct llama_server_context queue_tasks.erase(queue_tasks.begin()); switch (task.type) { - case COMPLETION_TASK: { + case TASK_TYPE_COMPLETION: { llama_client_slot *slot = get_slot(json_value(task.data, "slot_id", -1)); if (slot == nullptr) { @@ -1589,7 +1589,7 @@ struct llama_server_context break; } } break; - case CANCEL_TASK: { // release slot linked with the task id + case TASK_TYPE_CANCEL: { // release slot linked with the task id for (auto & slot : slots) { if (slot.task_id == task.target_id) @@ -2798,24 +2798,24 @@ int main(int argc, char **argv) httplib::Server svr; - std::atomic server_state{LOADING_MODEL}; + std::atomic state{SERVER_STATE_LOADING_MODEL}; svr.set_default_headers({{"Server", "llama.cpp"}, {"Access-Control-Allow-Origin", "*"}, {"Access-Control-Allow-Headers", "content-type"}}); svr.Get("/health", [&](const httplib::Request&, httplib::Response& res) { - ServerState current_state = server_state.load(); + server_state current_state = state.load(); switch(current_state) { - case READY: + case SERVER_STATE_READY: res.set_content(R"({"status": "ok"})", "application/json"); res.status = 200; // HTTP OK break; - case LOADING_MODEL: + case SERVER_STATE_LOADING_MODEL: res.set_content(R"({"status": "loading model"})", "application/json"); res.status = 503; // HTTP Service Unavailable break; - case ERROR: + case SERVER_STATE_ERROR: res.set_content(R"({"status": "error", "error": "Model failed to load"})", "application/json"); res.status = 500; // HTTP Internal Server Error break; @@ -2891,7 +2891,7 @@ int main(int argc, char **argv) { if (!svr.listen_after_bind()) { - server_state.store(ERROR); + state.store(SERVER_STATE_ERROR); return 1; } @@ -2901,11 +2901,11 @@ int main(int argc, char **argv) // load the model if (!llama.load_model(params)) { - server_state.store(ERROR); + state.store(SERVER_STATE_ERROR); return 1; } else { llama.initialize(); - server_state.store(READY); + state.store(SERVER_STATE_READY); } // Middleware for API key validation From 7a9f75c38b5e62fe27b8a5a3ed823b4a3714024b Mon Sep 17 00:00:00 2001 From: Behnam M <58621210+ibehnam@users.noreply.github.com> Date: Thu, 11 Jan 2024 02:12:05 -0500 Subject: [PATCH 256/811] server : update readme to document the new `/health` endpoint (#4866) * added /health endpoint to the server * added comments on the additional /health endpoint * Better handling of server state When the model is being loaded, the server state is `LOADING_MODEL`. If model-loading fails, the server state becomes `ERROR`, otherwise it becomes `READY`. The `/health` endpoint provides more granular messages now according to the server_state value. * initialized server_state * fixed a typo * starting http server before initializing the model * Update server.cpp * Update server.cpp * fixes * fixes * fixes * made ServerState atomic and turned two-line spaces into one-line * updated `server` readme to document the `/health` endpoint too --- examples/server/README.md | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/examples/server/README.md b/examples/server/README.md index d85a14f89..dc27e72b9 100644 --- a/examples/server/README.md +++ b/examples/server/README.md @@ -110,6 +110,10 @@ node index.js ``` ## API Endpoints +- **GET** `/health`: Returns the current state of the server: + - `{"status": "loading model"}` if the model is still being loaded. + - `{"status": "error"}` if the model failed to load. + - `{"status": "ok"}` if the model is successfully loaded and the server is ready for further requests mentioned below. - **POST** `/completion`: Given a `prompt`, it returns the predicted completion. From f34432ca1e0b288129390c1db8296a82aaf1e632 Mon Sep 17 00:00:00 2001 From: Erik Scholz Date: Fri, 5 Jan 2024 16:00:00 +0100 Subject: [PATCH 257/811] fix : cuda order of synchronization when setting a buffer (ggml/679) * fix : cuda order of synchronization when setting a buffer * also sync before memcpy --------- Co-authored-by: slaren --- ggml-cuda.cu | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/ggml-cuda.cu b/ggml-cuda.cu index e26260a35..900f7ba4a 100644 --- a/ggml-cuda.cu +++ b/ggml-cuda.cu @@ -10184,8 +10184,8 @@ static void ggml_backend_cuda_buffer_set_tensor(ggml_backend_buffer_t buffer, gg ggml_cuda_set_device(ctx->device); CUDA_CHECK(cudaDeviceSynchronize()); - CUDA_CHECK(cudaMemcpy((char *)tensor->data + offset, data, size, cudaMemcpyHostToDevice)); + CUDA_CHECK(cudaDeviceSynchronize()); } static void ggml_backend_cuda_buffer_get_tensor(ggml_backend_buffer_t buffer, const ggml_tensor * tensor, void * data, size_t offset, size_t size) { From c910e3c28a1caee8cb1398143d582dd9ab697e68 Mon Sep 17 00:00:00 2001 From: Halalaluyafail3 <55773281+Halalaluyafail3@users.noreply.github.com> Date: Tue, 9 Jan 2024 11:16:37 -0500 Subject: [PATCH 258/811] Fix execlp call (ggml/689) NULL can be an integer constant expression with the value zero, in this case the behavior would be undefined because of an incorrect type being passed to the variable arguments. --- ggml.c | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/ggml.c b/ggml.c index adb387100..4a0ec4c44 100644 --- a/ggml.c +++ b/ggml.c @@ -132,7 +132,7 @@ void ggml_print_backtrace(void) { "-ex", "bt -frame-info source-and-location", "-ex", "detach", "-ex", "quit", - NULL); + (char *) NULL); } else { waitpid(pid, NULL, 0); } From e739de790921e6abbc8c70398303cacd74913f61 Mon Sep 17 00:00:00 2001 From: leejet Date: Wed, 10 Jan 2024 21:13:42 +0800 Subject: [PATCH 259/811] ggml : change GGML_MAX_NAME at compile time (ggml/682) * change GGML_MAX_NAME to 128 * allow controlling the value of GGML_MAX_NAME through external macro definitions --- ggml.h | 2 ++ 1 file changed, 2 insertions(+) diff --git a/ggml.h b/ggml.h index c55e598b4..b6cc85952 100644 --- a/ggml.h +++ b/ggml.h @@ -218,7 +218,9 @@ #define GGML_MAX_PARAMS 2048 #define GGML_MAX_CONTEXTS 64 #define GGML_MAX_SRC 10 +#ifndef GGML_MAX_NAME #define GGML_MAX_NAME 64 +#endif #define GGML_MAX_OP_PARAMS 64 #define GGML_DEFAULT_N_THREADS 4 #define GGML_DEFAULT_GRAPH_SIZE 2048 From 5362e43962e84d61e20b91f34991d7ccaef4a7d5 Mon Sep 17 00:00:00 2001 From: Jack Mousseau Date: Wed, 10 Jan 2024 06:19:19 -0800 Subject: [PATCH 260/811] metal : wrap each operation in debug group (ggml/690) --- ggml-metal.m | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/ggml-metal.m b/ggml-metal.m index 6c2a8d04e..161906824 100644 --- a/ggml-metal.m +++ b/ggml-metal.m @@ -1067,6 +1067,8 @@ bool ggml_metal_graph_compute( GGML_ASSERT(!"unsupported op"); } + [encoder pushDebugGroup:[NSString stringWithCString:ggml_op_desc(dst)]]; + const int64_t ne00 = src0 ? src0->ne[0] : 0; const int64_t ne01 = src0 ? src0->ne[1] : 0; const int64_t ne02 = src0 ? src0->ne[2] : 0; @@ -2423,6 +2425,8 @@ bool ggml_metal_graph_compute( GGML_ASSERT(false); } } + + [encoder popDebugGroup]; } if (encoder != nil) { From f85a973aa139ae6f37e8b8e1966f1d278b5e0372 Mon Sep 17 00:00:00 2001 From: Timothy Cronin <40186632+4imothy@users.noreply.github.com> Date: Thu, 11 Jan 2024 02:27:48 -0500 Subject: [PATCH 261/811] ggml : remove ggml_cpy_inplace and ggml_cont_inplace (ggml/693) --- ggml.c | 30 ++++++++---------------------- ggml.h | 11 ----------- 2 files changed, 8 insertions(+), 33 deletions(-) diff --git a/ggml.c b/ggml.c index 4a0ec4c44..9c42a45e3 100644 --- a/ggml.c +++ b/ggml.c @@ -4311,13 +4311,13 @@ struct ggml_tensor * ggml_set_2d_inplace( static struct ggml_tensor * ggml_cpy_impl( struct ggml_context * ctx, struct ggml_tensor * a, - struct ggml_tensor * b, - bool inplace) { + struct ggml_tensor * b) { GGML_ASSERT(ggml_nelements(a) == ggml_nelements(b)); bool is_node = false; - if (!inplace && (a->grad || b->grad)) { + if (a->grad || b->grad) { + // inplace is false and either one have a grad is_node = true; } @@ -4341,29 +4341,21 @@ struct ggml_tensor * ggml_cpy( struct ggml_context * ctx, struct ggml_tensor * a, struct ggml_tensor * b) { - return ggml_cpy_impl(ctx, a, b, false); -} - -struct ggml_tensor * ggml_cpy_inplace( - struct ggml_context * ctx, - struct ggml_tensor * a, - struct ggml_tensor * b) { - return ggml_cpy_impl(ctx, a, b, true); + return ggml_cpy_impl(ctx, a, b); } // ggml_cont static struct ggml_tensor * ggml_cont_impl( struct ggml_context * ctx, - struct ggml_tensor * a, - bool inplace) { + struct ggml_tensor * a) { bool is_node = false; - if (!inplace && a->grad) { + if (a->grad) { is_node = true; } - struct ggml_tensor * result = inplace ? ggml_view_tensor(ctx, a) : ggml_dup_tensor(ctx, a); + struct ggml_tensor * result = ggml_dup_tensor(ctx, a); ggml_format_name(result, "%s (cont)", a->name); result->op = GGML_OP_CONT; @@ -4376,13 +4368,7 @@ static struct ggml_tensor * ggml_cont_impl( struct ggml_tensor * ggml_cont( struct ggml_context * ctx, struct ggml_tensor * a) { - return ggml_cont_impl(ctx, a, false); -} - -struct ggml_tensor * ggml_cont_inplace( - struct ggml_context * ctx, - struct ggml_tensor * a) { - return ggml_cont_impl(ctx, a, true); + return ggml_cont_impl(ctx, a); } // make contiguous, with new shape diff --git a/ggml.h b/ggml.h index b6cc85952..127dcef1d 100644 --- a/ggml.h +++ b/ggml.h @@ -1163,22 +1163,11 @@ extern "C" { struct ggml_tensor * a, struct ggml_tensor * b); - // a -> b, in-place, return view(b) - GGML_API struct ggml_tensor * ggml_cpy_inplace( - struct ggml_context * ctx, - struct ggml_tensor * a, - struct ggml_tensor * b); - // make contiguous GGML_API struct ggml_tensor * ggml_cont( struct ggml_context * ctx, struct ggml_tensor * a); - // make contiguous, in-place - GGML_API struct ggml_tensor * ggml_cont_inplace( - struct ggml_context * ctx, - struct ggml_tensor * a); - // make contiguous, with new shape GGML_API struct ggml_tensor * ggml_cont_1d( struct ggml_context * ctx, From 3267c2abc72e34608224408ace3c048831050f97 Mon Sep 17 00:00:00 2001 From: Georgi Gerganov Date: Thu, 11 Jan 2024 09:34:59 +0200 Subject: [PATCH 262/811] metal : fix deprecation warning (ggml/690) --- ggml-metal.m | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/ggml-metal.m b/ggml-metal.m index 161906824..82d68cd1b 100644 --- a/ggml-metal.m +++ b/ggml-metal.m @@ -1067,7 +1067,7 @@ bool ggml_metal_graph_compute( GGML_ASSERT(!"unsupported op"); } - [encoder pushDebugGroup:[NSString stringWithCString:ggml_op_desc(dst)]]; + [encoder pushDebugGroup:[NSString stringWithCString:ggml_op_desc(dst) encoding:NSUTF8StringEncoding]]; const int64_t ne00 = src0 ? src0->ne[0] : 0; const int64_t ne01 = src0 ? src0->ne[1] : 0; From 64802ec00d6383784a9dacf616095eaced16c3c3 Mon Sep 17 00:00:00 2001 From: Georgi Gerganov Date: Thu, 11 Jan 2024 09:39:08 +0200 Subject: [PATCH 263/811] sync : ggml --- scripts/sync-ggml.last | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/scripts/sync-ggml.last b/scripts/sync-ggml.last index fe7f3202f..3e2c579d5 100644 --- a/scripts/sync-ggml.last +++ b/scripts/sync-ggml.last @@ -1 +1 @@ -f96711108d55bdbbd277e6be07204dce6a94fb93 +979cc23b345006504cfc1f67c0fdf627805e3319 From 2a7c94db5fb67b2f8882d2d16a11bf5d8d12d397 Mon Sep 17 00:00:00 2001 From: Paul Tsochantaris Date: Thu, 11 Jan 2024 14:31:52 +0000 Subject: [PATCH 264/811] metal : put encoder debug group behind a define (#4873) --- ggml-metal.m | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/ggml-metal.m b/ggml-metal.m index 82d68cd1b..9698e5a79 100644 --- a/ggml-metal.m +++ b/ggml-metal.m @@ -1067,7 +1067,9 @@ bool ggml_metal_graph_compute( GGML_ASSERT(!"unsupported op"); } +#ifndef GGML_METAL_NDEBUG [encoder pushDebugGroup:[NSString stringWithCString:ggml_op_desc(dst) encoding:NSUTF8StringEncoding]]; +#endif const int64_t ne00 = src0 ? src0->ne[0] : 0; const int64_t ne01 = src0 ? src0->ne[1] : 0; @@ -2426,7 +2428,9 @@ bool ggml_metal_graph_compute( } } +#ifndef GGML_METAL_NDEBUG [encoder popDebugGroup]; +#endif } if (encoder != nil) { From 2f043328e3116724d15b915b5c6078e2df860a69 Mon Sep 17 00:00:00 2001 From: Isaac McFadyen Date: Thu, 11 Jan 2024 09:33:26 -0500 Subject: [PATCH 265/811] server : fix typo in model name (#4876) --- examples/server/server.cpp | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/examples/server/server.cpp b/examples/server/server.cpp index 4a0714997..860e4e9ae 100644 --- a/examples/server/server.cpp +++ b/examples/server/server.cpp @@ -2515,7 +2515,7 @@ json oaicompat_completion_params_parse( // // https://platform.openai.com/docs/api-reference/chat/create llama_sampling_params default_sparams; - llama_params["model"] = json_value(body, "model", std::string("uknown")); + llama_params["model"] = json_value(body, "model", std::string("unknown")); llama_params["prompt"] = format_chatml(body["messages"]); // OpenAI 'messages' to llama.cpp 'prompt' llama_params["cache_prompt"] = json_value(body, "cache_prompt", false); llama_params["temperature"] = json_value(body, "temperature", 0.0); From 43f76bf1c362c067fce46bb8dcda0b64af8a9533 Mon Sep 17 00:00:00 2001 From: pudepiedj Date: Thu, 11 Jan 2024 16:14:52 +0000 Subject: [PATCH 266/811] main : print total token count and tokens consumed so far (#4874) * Token count changes * Add show token count * Updating before PR * Two requested changes * Move param def posn --- common/common.cpp | 8 ++++++++ common/common.h | 2 +- examples/main/main.cpp | 6 +++++- llama.cpp | 2 +- 4 files changed, 15 insertions(+), 3 deletions(-) diff --git a/common/common.cpp b/common/common.cpp index 4e89fe516..bfcd6d4df 100644 --- a/common/common.cpp +++ b/common/common.cpp @@ -630,6 +630,12 @@ bool gpt_params_parse_ex(int argc, char ** argv, gpt_params & params) { break; } params.ppl_stride = std::stoi(argv[i]); + } else if (arg == "-stc" || arg == "--show_token_count") { + if (++i >= argc) { + invalid_param = true; + break; + } + params.token_interval = std::stoi(argv[i]); } else if (arg == "--ppl-output-type") { if (++i >= argc) { invalid_param = true; @@ -944,6 +950,8 @@ void gpt_print_usage(int /*argc*/, char ** argv, const gpt_params & params) { printf(" --override-kv KEY=TYPE:VALUE\n"); printf(" advanced option to override model metadata by key. may be specified multiple times.\n"); printf(" types: int, float, bool. example: --override-kv tokenizer.ggml.add_bos_token=bool:false\n"); + printf(" -stc N --show_token_count N\n"); + printf(" show consumed tokens every N tokens\n"); printf("\n"); #ifndef LOG_DISABLE_LOGS log_print_usage(); diff --git a/common/common.h b/common/common.h index e2bbfc258..a295e88b0 100644 --- a/common/common.h +++ b/common/common.h @@ -64,6 +64,7 @@ struct gpt_params { int32_t n_beams = 0; // if non-zero then use beam search of given width. int32_t grp_attn_n = 1; // group-attention factor int32_t grp_attn_w = 512; // group-attention width + int32_t token_interval = 512; // show token count every 512 tokens float rope_freq_base = 0.0f; // RoPE base frequency float rope_freq_scale = 0.0f; // RoPE frequency scaling factor float yarn_ext_factor = -1.0f; // YaRN extrapolation mix factor @@ -242,4 +243,3 @@ void dump_kv_cache_view(const llama_kv_cache_view & view, int row_size = 80); // Dump the KV cache view showing individual sequences in each cell (long output). void dump_kv_cache_view_seqs(const llama_kv_cache_view & view, int row_size = 40); - diff --git a/examples/main/main.cpp b/examples/main/main.cpp index 5ea67051f..1f35febbd 100644 --- a/examples/main/main.cpp +++ b/examples/main/main.cpp @@ -500,7 +500,7 @@ int main(int argc, char ** argv) { while ((n_remain != 0 && !is_antiprompt) || params.interactive) { // predict if (!embd.empty()) { - // Note: n_ctx - 4 here is to match the logic for commandline prompt handling via + // Note: (n_ctx - 4) here is to match the logic for commandline prompt handling via // --prompt or --file which uses the same value. int max_embd_size = n_ctx - 4; @@ -650,6 +650,10 @@ int main(int argc, char ** argv) { n_past += n_eval; LOG("n_past = %d\n", n_past); + // Display total tokens alongside total time + if (n_past % params.token_interval == 0) { + printf("\n\033[31mTokens consumed so far = %d / %d \033[0m\n", n_past, n_ctx); + } } if (!embd.empty() && !path_session.empty()) { diff --git a/llama.cpp b/llama.cpp index e1f1932ba..aaadfa444 100644 --- a/llama.cpp +++ b/llama.cpp @@ -10921,7 +10921,7 @@ void llama_print_timings(struct llama_context * ctx) { __func__, timings.t_p_eval_ms, timings.n_p_eval, timings.t_p_eval_ms / timings.n_p_eval, 1e3 / timings.t_p_eval_ms * timings.n_p_eval); LLAMA_LOG_INFO("%s: eval time = %10.2f ms / %5d runs (%8.2f ms per token, %8.2f tokens per second)\n", __func__, timings.t_eval_ms, timings.n_eval, timings.t_eval_ms / timings.n_eval, 1e3 / timings.t_eval_ms * timings.n_eval); - LLAMA_LOG_INFO("%s: total time = %10.2f ms\n", __func__, (timings.t_end_ms - timings.t_start_ms)); + LLAMA_LOG_INFO("%s: total time = %10.2f ms / %5d tokens\n", __func__, (timings.t_end_ms - timings.t_start_ms), (timings.n_p_eval + timings.n_eval)); } void llama_reset_timings(struct llama_context * ctx) { From d8d90aa343c22fe01429d3540e47ded87e9dcb9d Mon Sep 17 00:00:00 2001 From: Someone Date: Thu, 11 Jan 2024 17:22:34 +0000 Subject: [PATCH 267/811] ci: nix-flake-update: new token with pr permissions (#4879) * ci: nix-flake-update: new token with pr permissions --------- Co-authored-by: Georgi Gerganov --- .github/workflows/nix-flake-update.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/nix-flake-update.yml b/.github/workflows/nix-flake-update.yml index fa9360841..3a6a96e26 100644 --- a/.github/workflows/nix-flake-update.yml +++ b/.github/workflows/nix-flake-update.yml @@ -19,4 +19,4 @@ jobs: pr-labels: | nix pr-reviewers: philiptaron,SomeoneSerge - token: ${{ secrets.GITHUB_TOKEN }} + token: ${{ secrets.FLAKE_TOKEN }} From eab67950068e4b125007d027232c47d2a5831cd0 Mon Sep 17 00:00:00 2001 From: Behnam M <58621210+ibehnam@users.noreply.github.com> Date: Thu, 11 Jan 2024 12:41:39 -0500 Subject: [PATCH 268/811] server : add `LOG_INFO` when model is successfully loaded (#4881) * added /health endpoint to the server * added comments on the additional /health endpoint * Better handling of server state When the model is being loaded, the server state is `LOADING_MODEL`. If model-loading fails, the server state becomes `ERROR`, otherwise it becomes `READY`. The `/health` endpoint provides more granular messages now according to the server_state value. * initialized server_state * fixed a typo * starting http server before initializing the model * Update server.cpp * Update server.cpp * fixes * fixes * fixes * made ServerState atomic and turned two-line spaces into one-line * updated `server` readme to document the `/health` endpoint too * used LOG_INFO after successful model loading --- examples/server/server.cpp | 1 + 1 file changed, 1 insertion(+) diff --git a/examples/server/server.cpp b/examples/server/server.cpp index 860e4e9ae..51a4b689f 100644 --- a/examples/server/server.cpp +++ b/examples/server/server.cpp @@ -2906,6 +2906,7 @@ int main(int argc, char **argv) } else { llama.initialize(); state.store(SERVER_STATE_READY); + LOG_INFO("model loaded", {}); } // Middleware for API key validation From 27379455c38cb13f24de92dbd6fcdd04eeb1b9d9 Mon Sep 17 00:00:00 2001 From: Michael Coppola Date: Thu, 11 Jan 2024 12:51:17 -0500 Subject: [PATCH 269/811] server : support for multiple api keys (#4864) * server: added support for multiple api keys, added loading api keys from file * minor: fix whitespace * added file error handling to --api-key-file, changed code to better reflect current style * server: update README.md for --api-key-file --------- Co-authored-by: Michael Coppola --- examples/server/README.md | 3 ++- examples/server/server.cpp | 36 ++++++++++++++++++++++++++++++------ 2 files changed, 32 insertions(+), 7 deletions(-) diff --git a/examples/server/README.md b/examples/server/README.md index dc27e72b9..fd3034b99 100644 --- a/examples/server/README.md +++ b/examples/server/README.md @@ -23,7 +23,8 @@ Command line options: - `--host`: Set the hostname or ip address to listen. Default `127.0.0.1`. - `--port`: Set the port to listen. Default: `8080`. - `--path`: path from which to serve static files (default examples/server/public) -- `--api-key`: Set an api key for request authorization. By default the server responds to every request. With an api key set, the requests must have the Authorization header set with the api key as Bearer token. +- `--api-key`: Set an api key for request authorization. By default the server responds to every request. With an api key set, the requests must have the Authorization header set with the api key as Bearer token. May be used multiple times to enable multiple valid keys. +- `--api-key-file`: path to file containing api keys delimited by new lines. If set, requests must include one of the keys for access. May be used in conjunction with `--api-key`'s. - `--embedding`: Enable embedding extraction, Default: disabled. - `-np N`, `--parallel N`: Set the number of slots for process requests (default: 1) - `-cb`, `--cont-batching`: enable continuous batching (a.k.a dynamic batching) (default: disabled) diff --git a/examples/server/server.cpp b/examples/server/server.cpp index 51a4b689f..345004fa1 100644 --- a/examples/server/server.cpp +++ b/examples/server/server.cpp @@ -39,7 +39,7 @@ using json = nlohmann::json; struct server_params { std::string hostname = "127.0.0.1"; - std::string api_key; + std::vector api_keys; std::string public_path = "examples/server/public"; int32_t port = 8080; int32_t read_timeout = 600; @@ -2021,6 +2021,7 @@ static void server_print_usage(const char *argv0, const gpt_params ¶ms, printf(" --port PORT port to listen (default (default: %d)\n", sparams.port); printf(" --path PUBLIC_PATH path from which to serve static files (default %s)\n", sparams.public_path.c_str()); printf(" --api-key API_KEY optional api key to enhance server security. If set, requests must include this key for access.\n"); + printf(" --api-key-file FNAME path to file containing api keys delimited by new lines. If set, requests must include one of the keys for access.\n"); printf(" -to N, --timeout N server read/write timeout in seconds (default: %d)\n", sparams.read_timeout); printf(" --embedding enable embedding vector output (default: %s)\n", params.embedding ? "enabled" : "disabled"); printf(" -np N, --parallel N number of slots for process requests (default: %d)\n", params.n_parallel); @@ -2081,7 +2082,28 @@ static void server_params_parse(int argc, char **argv, server_params &sparams, invalid_param = true; break; } - sparams.api_key = argv[i]; + sparams.api_keys.push_back(argv[i]); + } + else if (arg == "--api-key-file") + { + if (++i >= argc) + { + invalid_param = true; + break; + } + std::ifstream key_file(argv[i]); + if (!key_file) { + fprintf(stderr, "error: failed to open file '%s'\n", argv[i]); + invalid_param = true; + break; + } + std::string key; + while (std::getline(key_file, key)) { + if (key.size() > 0) { + sparams.api_keys.push_back(key); + } + } + key_file.close(); } else if (arg == "--timeout" || arg == "-to") { @@ -2881,8 +2903,10 @@ int main(int argc, char **argv) log_data["hostname"] = sparams.hostname; log_data["port"] = std::to_string(sparams.port); - if (!sparams.api_key.empty()) { - log_data["api_key"] = "api_key: ****" + sparams.api_key.substr(sparams.api_key.length() - 4); + if (sparams.api_keys.size() == 1) { + log_data["api_key"] = "api_key: ****" + sparams.api_keys[0].substr(sparams.api_keys[0].length() - 4); + } else if (sparams.api_keys.size() > 1) { + log_data["api_key"] = "api_key: " + std::to_string(sparams.api_keys.size()) + " keys loaded"; } LOG_INFO("HTTP server listening", log_data); @@ -2912,7 +2936,7 @@ int main(int argc, char **argv) // Middleware for API key validation auto validate_api_key = [&sparams](const httplib::Request &req, httplib::Response &res) -> bool { // If API key is not set, skip validation - if (sparams.api_key.empty()) { + if (sparams.api_keys.empty()) { return true; } @@ -2921,7 +2945,7 @@ int main(int argc, char **argv) std::string prefix = "Bearer "; if (auth_header.substr(0, prefix.size()) == prefix) { std::string received_api_key = auth_header.substr(prefix.size()); - if (received_api_key == sparams.api_key) { + if (std::find(sparams.api_keys.begin(), sparams.api_keys.end(), received_api_key) != sparams.api_keys.end()) { return true; // API key is valid } } From 4330bd83feb39683de4bd7a34cfcf672ff8ac3e4 Mon Sep 17 00:00:00 2001 From: Laura Date: Thu, 11 Jan 2024 19:02:48 +0100 Subject: [PATCH 270/811] server : implement credentialed CORS (#4514) * Implement credentialed CORS according to MDN * Fix syntax error * Move validate_api_key up so it is defined before its first usage --- examples/server/server.cpp | 26 ++++++++++++++++++++------ 1 file changed, 20 insertions(+), 6 deletions(-) diff --git a/examples/server/server.cpp b/examples/server/server.cpp index 345004fa1..031824e14 100644 --- a/examples/server/server.cpp +++ b/examples/server/server.cpp @@ -2822,9 +2822,15 @@ int main(int argc, char **argv) std::atomic state{SERVER_STATE_LOADING_MODEL}; - svr.set_default_headers({{"Server", "llama.cpp"}, - {"Access-Control-Allow-Origin", "*"}, - {"Access-Control-Allow-Headers", "content-type"}}); + svr.set_default_headers({{"Server", "llama.cpp"}}); + + // CORS preflight + svr.Options(R"(.*)", [](const httplib::Request &req, httplib::Response &res) { + res.set_header("Access-Control-Allow-Origin", req.get_header_value("Origin")); + res.set_header("Access-Control-Allow-Credentials", "true"); + res.set_header("Access-Control-Allow-Methods", "POST"); + res.set_header("Access-Control-Allow-Headers", "*"); + }); svr.Get("/health", [&](const httplib::Request&, httplib::Response& res) { server_state current_state = state.load(); @@ -2987,9 +2993,9 @@ int main(int argc, char **argv) return false; }); - svr.Get("/props", [&llama](const httplib::Request & /*req*/, httplib::Response &res) + svr.Get("/props", [&llama](const httplib::Request & req, httplib::Response &res) { - res.set_header("Access-Control-Allow-Origin", "*"); + res.set_header("Access-Control-Allow-Origin", req.get_header_value("Origin")); json data = { { "user_name", llama.name_user.c_str() }, { "assistant_name", llama.name_assistant.c_str() } @@ -2999,6 +3005,7 @@ int main(int argc, char **argv) svr.Post("/completion", [&llama, &validate_api_key](const httplib::Request &req, httplib::Response &res) { + res.set_header("Access-Control-Allow-Origin", req.get_header_value("Origin")); if (!validate_api_key(req, res)) { return; } @@ -3066,8 +3073,9 @@ int main(int argc, char **argv) } }); - svr.Get("/v1/models", [¶ms](const httplib::Request&, httplib::Response& res) + svr.Get("/v1/models", [¶ms](const httplib::Request& req, httplib::Response& res) { + res.set_header("Access-Control-Allow-Origin", req.get_header_value("Origin")); std::time_t t = std::time(0); json models = { @@ -3085,9 +3093,11 @@ int main(int argc, char **argv) res.set_content(models.dump(), "application/json; charset=utf-8"); }); + // TODO: add mount point without "/v1" prefix -- how? svr.Post("/v1/chat/completions", [&llama, &validate_api_key](const httplib::Request &req, httplib::Response &res) { + res.set_header("Access-Control-Allow-Origin", req.get_header_value("Origin")); if (!validate_api_key(req, res)) { return; } @@ -3161,6 +3171,7 @@ int main(int argc, char **argv) svr.Post("/infill", [&llama, &validate_api_key](const httplib::Request &req, httplib::Response &res) { + res.set_header("Access-Control-Allow-Origin", req.get_header_value("Origin")); if (!validate_api_key(req, res)) { return; } @@ -3233,6 +3244,7 @@ int main(int argc, char **argv) svr.Post("/tokenize", [&llama](const httplib::Request &req, httplib::Response &res) { + res.set_header("Access-Control-Allow-Origin", req.get_header_value("Origin")); const json body = json::parse(req.body); std::vector tokens; if (body.count("content") != 0) @@ -3245,6 +3257,7 @@ int main(int argc, char **argv) svr.Post("/detokenize", [&llama](const httplib::Request &req, httplib::Response &res) { + res.set_header("Access-Control-Allow-Origin", req.get_header_value("Origin")); const json body = json::parse(req.body); std::string content; if (body.count("tokens") != 0) @@ -3259,6 +3272,7 @@ int main(int argc, char **argv) svr.Post("/embedding", [&llama](const httplib::Request &req, httplib::Response &res) { + res.set_header("Access-Control-Allow-Origin", req.get_header_value("Origin")); const json body = json::parse(req.body); json prompt; if (body.count("content") != 0) From 3ba5b8ca8e6181a5c712c5b77595a29f1d3e2b97 Mon Sep 17 00:00:00 2001 From: Georgi Gerganov Date: Thu, 11 Jan 2024 21:31:31 +0200 Subject: [PATCH 271/811] swift : pin ggml commit + remove ggml.h from spm-headers (#4878) ggml-ci --- Package.swift | 2 +- spm-headers/ggml.h | 1 - 2 files changed, 1 insertion(+), 2 deletions(-) delete mode 120000 spm-headers/ggml.h diff --git a/Package.swift b/Package.swift index 583e2e276..59191da45 100644 --- a/Package.swift +++ b/Package.swift @@ -14,7 +14,7 @@ let package = Package( .library(name: "llama", targets: ["llama"]), ], dependencies: [ - .package(url: "https://github.com/ggerganov/ggml.git", .branch("master")) + .package(url: "https://github.com/ggerganov/ggml.git", .revision("979cc23b345006504cfc1f67c0fdf627805e3319")) ], targets: [ .target( diff --git a/spm-headers/ggml.h b/spm-headers/ggml.h deleted file mode 120000 index 39215298f..000000000 --- a/spm-headers/ggml.h +++ /dev/null @@ -1 +0,0 @@ -../ggml.h \ No newline at end of file From 49662cbed3e95f5976c070b85b9fd53fd577038d Mon Sep 17 00:00:00 2001 From: Kawrakow <48489457+ikawrakow@users.noreply.github.com> Date: Thu, 11 Jan 2024 20:39:39 +0100 Subject: [PATCH 272/811] ggml : SOTA 2-bit quants (add IQ2_XS) (#4856) * iq2_xs: basics * iq2_xs: this should have been in the basics * iq2_xs: CUDA and scalar CPU works * iq2_xs: WIP Metal * iq2_xs: Metal now works * iq2_xs: working, but dog slow, ARM_NEON dot product * iq2_xs: better ARM_NEON dot product We are now at 19.5 t/s for TG-128 and 61 t/s for PP-512 when running on the CPU. * iq2_xs: AVX2 dot product - 19.5 t/s * iq2_xs: faster AVX2 dit product 21.4 t/s for TG-128, 59.2 t/s for PP-512. The latter is 2x compared to the previous version. * iq2_xs: had forgotten to delete iq2-data.h * Add llama enum for IQ2_XS --------- Co-authored-by: Iwan Kawrakow --- ggml-cuda.cu | 232 +++++++++++++++++++++- ggml-metal.m | 42 +++- ggml-metal.metal | 378 +++++++++++++++++++++++++++++++++++- ggml-quants.c | 360 +++++++++++++++++++++++++++++++++- ggml-quants.h | 12 ++ ggml.c | 30 ++- ggml.h | 3 + llama.cpp | 3 + llama.h | 1 + tests/test-quantize-fns.cpp | 5 +- 10 files changed, 1038 insertions(+), 28 deletions(-) diff --git a/ggml-cuda.cu b/ggml-cuda.cu index 900f7ba4a..dd19699f6 100644 --- a/ggml-cuda.cu +++ b/ggml-cuda.cu @@ -486,6 +486,15 @@ typedef struct { } block_iq2_xxs; static_assert(sizeof(block_iq2_xxs) == sizeof(ggml_fp16_t) + QK_K/8*sizeof(uint16_t), "wrong iq2_xxs block size/padding"); +#define QR2_XS 8 +#define QI2_XS (QK_K / (4*QR2_XS)) +typedef struct { + half d; + uint16_t qs[QK_K/8]; + uint8_t scales[QK_K/32]; +} block_iq2_xs; +static_assert(sizeof(block_iq2_xs) == sizeof(ggml_fp16_t) + QK_K/8*sizeof(uint16_t) + QK_K/32, "wrong iq2_xs block size/padding"); + #define WARP_SIZE 32 #define MATRIX_ROW_PADDING 512 // last row of quant. matrices is a multiple of this to avoid out-of-bounds memory accesses @@ -1328,7 +1337,7 @@ static __global__ void dequantize_block_q6_K(const void * __restrict__ vx, dst_t #endif } -static const __device__ uint64_t kgrid_iq2xxs[256] = { +static const __device__ uint64_t iq2xxs_grid[256] = { 0x0808080808080808, 0x080808080808082b, 0x0808080808081919, 0x0808080808082b08, 0x0808080808082b2b, 0x0808080808190819, 0x0808080808191908, 0x08080808082b0808, 0x08080808082b082b, 0x08080808082b2b08, 0x08080808082b2b2b, 0x0808080819080819, @@ -1395,6 +1404,137 @@ static const __device__ uint64_t kgrid_iq2xxs[256] = { 0x2b2b082b08080808, 0x2b2b190808192b08, 0x2b2b2b0819190808, 0x2b2b2b1908081908, }; +static const __device__ uint64_t iq2xs_grid[512] = { + 0x0808080808080808, 0x080808080808082b, 0x0808080808081919, 0x0808080808082b08, + 0x0808080808082b2b, 0x0808080808190819, 0x0808080808191908, 0x080808080819192b, + 0x0808080808192b19, 0x08080808082b0808, 0x08080808082b082b, 0x08080808082b1919, + 0x08080808082b2b08, 0x0808080819080819, 0x0808080819081908, 0x080808081908192b, + 0x0808080819082b19, 0x0808080819190808, 0x080808081919082b, 0x0808080819191919, + 0x0808080819192b08, 0x08080808192b0819, 0x08080808192b1908, 0x080808082b080808, + 0x080808082b08082b, 0x080808082b081919, 0x080808082b082b08, 0x080808082b190819, + 0x080808082b191908, 0x080808082b192b19, 0x080808082b2b0808, 0x0808081908080819, + 0x0808081908081908, 0x080808190808192b, 0x0808081908082b19, 0x0808081908190808, + 0x080808190819082b, 0x0808081908191919, 0x0808081908192b08, 0x0808081908192b2b, + 0x08080819082b0819, 0x08080819082b1908, 0x0808081919080808, 0x080808191908082b, + 0x0808081919081919, 0x0808081919082b08, 0x0808081919190819, 0x0808081919191908, + 0x08080819192b0808, 0x08080819192b2b08, 0x080808192b080819, 0x080808192b081908, + 0x080808192b190808, 0x0808082b08080808, 0x0808082b0808082b, 0x0808082b08081919, + 0x0808082b08082b08, 0x0808082b08190819, 0x0808082b08191908, 0x0808082b082b0808, + 0x0808082b19080819, 0x0808082b19081908, 0x0808082b19190808, 0x0808082b19191919, + 0x0808082b2b080808, 0x0808082b2b082b2b, 0x0808190808080819, 0x0808190808081908, + 0x080819080808192b, 0x0808190808082b19, 0x0808190808190808, 0x080819080819082b, + 0x0808190808191919, 0x0808190808192b08, 0x08081908082b0819, 0x08081908082b1908, + 0x0808190819080808, 0x080819081908082b, 0x0808190819081919, 0x0808190819082b08, + 0x0808190819190819, 0x0808190819191908, 0x080819081919192b, 0x08081908192b0808, + 0x080819082b080819, 0x080819082b081908, 0x080819082b190808, 0x0808191908080808, + 0x080819190808082b, 0x0808191908081919, 0x0808191908082b08, 0x0808191908190819, + 0x0808191908191908, 0x08081919082b0808, 0x0808191919080819, 0x0808191919081908, + 0x0808191919190808, 0x08081919192b0819, 0x080819192b080808, 0x0808192b08080819, + 0x0808192b08081908, 0x0808192b08190808, 0x0808192b082b192b, 0x0808192b19080808, + 0x0808192b1908082b, 0x0808192b2b081908, 0x08082b0808080808, 0x08082b080808082b, + 0x08082b0808081919, 0x08082b0808082b08, 0x08082b0808082b2b, 0x08082b0808190819, + 0x08082b0808191908, 0x08082b08082b0808, 0x08082b08082b1919, 0x08082b0819080819, + 0x08082b0819081908, 0x08082b0819190808, 0x08082b0819192b08, 0x08082b082b080808, + 0x08082b082b2b0808, 0x08082b082b2b2b2b, 0x08082b1908080819, 0x08082b1908081908, + 0x08082b1908190808, 0x08082b1919080808, 0x08082b192b080819, 0x08082b192b082b19, + 0x08082b2b08080808, 0x08082b2b082b0808, 0x08082b2b082b2b08, 0x08082b2b2b19192b, + 0x08082b2b2b2b0808, 0x0819080808080819, 0x0819080808081908, 0x081908080808192b, + 0x0819080808082b19, 0x0819080808190808, 0x081908080819082b, 0x0819080808191919, + 0x0819080808192b08, 0x08190808082b0819, 0x08190808082b1908, 0x0819080819080808, + 0x081908081908082b, 0x0819080819081919, 0x0819080819082b08, 0x0819080819190819, + 0x0819080819191908, 0x08190808192b0808, 0x08190808192b2b2b, 0x081908082b080819, + 0x081908082b081908, 0x081908082b190808, 0x0819081908080808, 0x081908190808082b, + 0x0819081908081919, 0x0819081908082b08, 0x0819081908190819, 0x0819081908191908, + 0x08190819082b0808, 0x0819081919080819, 0x0819081919081908, 0x0819081919190808, + 0x081908192b080808, 0x081908192b191908, 0x081908192b19192b, 0x0819082b08080819, + 0x0819082b08081908, 0x0819082b0808192b, 0x0819082b08190808, 0x0819082b19080808, + 0x0819082b192b0808, 0x0819190808080808, 0x081919080808082b, 0x0819190808081919, + 0x0819190808082b08, 0x0819190808190819, 0x0819190808191908, 0x08191908082b0808, + 0x0819190819080819, 0x0819190819081908, 0x0819190819082b19, 0x0819190819190808, + 0x08191908192b1908, 0x081919082b080808, 0x0819191908080819, 0x0819191908081908, + 0x0819191908190808, 0x0819191919080808, 0x0819192b08080808, 0x0819192b08191908, + 0x0819192b19082b19, 0x08192b0808080819, 0x08192b0808081908, 0x08192b0808190808, + 0x08192b080819082b, 0x08192b0819080808, 0x08192b0819191908, 0x08192b082b08192b, + 0x08192b1908080808, 0x08192b1908081919, 0x08192b19192b192b, 0x08192b2b19190819, + 0x08192b2b2b2b2b19, 0x082b080808080808, 0x082b08080808082b, 0x082b080808081919, + 0x082b080808082b08, 0x082b080808082b2b, 0x082b080808190819, 0x082b080808191908, + 0x082b0808082b0808, 0x082b080819080819, 0x082b080819081908, 0x082b080819190808, + 0x082b08082b080808, 0x082b08082b2b0808, 0x082b081908080819, 0x082b081908081908, + 0x082b081908190808, 0x082b081919080808, 0x082b081919082b08, 0x082b0819192b1919, + 0x082b082b08080808, 0x082b082b082b082b, 0x082b082b2b080808, 0x082b082b2b2b2b08, + 0x082b190808080819, 0x082b190808081908, 0x082b190808190808, 0x082b1908082b2b19, + 0x082b190819080808, 0x082b191908080808, 0x082b191919080819, 0x082b19191919082b, + 0x082b19192b192b19, 0x082b192b08080819, 0x082b192b08192b2b, 0x082b192b2b2b192b, + 0x082b2b0808080808, 0x082b2b0808082b08, 0x082b2b0808082b2b, 0x082b2b08082b0808, + 0x082b2b0819191919, 0x082b2b082b082b08, 0x082b2b082b2b082b, 0x082b2b19192b2b08, + 0x082b2b192b190808, 0x082b2b2b08082b08, 0x082b2b2b082b0808, 0x082b2b2b2b08082b, + 0x082b2b2b2b082b08, 0x082b2b2b2b082b2b, 0x1908080808080819, 0x1908080808081908, + 0x190808080808192b, 0x1908080808082b19, 0x1908080808190808, 0x190808080819082b, + 0x1908080808191919, 0x1908080808192b08, 0x19080808082b0819, 0x19080808082b1908, + 0x1908080819080808, 0x190808081908082b, 0x1908080819081919, 0x1908080819082b08, + 0x1908080819082b2b, 0x1908080819190819, 0x1908080819191908, 0x19080808192b0808, + 0x19080808192b1919, 0x190808082b080819, 0x190808082b081908, 0x190808082b190808, + 0x1908081908080808, 0x190808190808082b, 0x1908081908081919, 0x1908081908082b08, + 0x1908081908190819, 0x1908081908191908, 0x19080819082b0808, 0x1908081919080819, + 0x1908081919081908, 0x1908081919190808, 0x190808192b080808, 0x190808192b081919, + 0x190808192b2b082b, 0x1908082b08080819, 0x1908082b08081908, 0x1908082b08190808, + 0x1908082b0819082b, 0x1908082b082b2b19, 0x1908082b19080808, 0x1908190808080808, + 0x190819080808082b, 0x1908190808081919, 0x1908190808082b08, 0x1908190808190819, + 0x1908190808191908, 0x1908190808192b19, 0x19081908082b0808, 0x1908190819080819, + 0x1908190819081908, 0x1908190819190808, 0x190819082b080808, 0x190819082b191908, + 0x1908191908080819, 0x1908191908081908, 0x1908191908190808, 0x19081919082b1908, + 0x1908191919080808, 0x190819192b192b2b, 0x1908192b08080808, 0x1908192b08082b2b, + 0x1908192b19081908, 0x1908192b19190808, 0x19082b0808080819, 0x19082b0808081908, + 0x19082b0808190808, 0x19082b0819080808, 0x19082b0819081919, 0x19082b0819191908, + 0x19082b08192b082b, 0x19082b1908080808, 0x19082b1908190819, 0x19082b1919081908, + 0x19082b1919190808, 0x19082b19192b2b19, 0x19082b2b08081908, 0x1919080808080808, + 0x191908080808082b, 0x1919080808081919, 0x1919080808082b08, 0x1919080808190819, + 0x1919080808191908, 0x19190808082b0808, 0x19190808082b2b08, 0x1919080819080819, + 0x1919080819081908, 0x1919080819190808, 0x191908082b080808, 0x1919081908080819, + 0x1919081908081908, 0x1919081908190808, 0x1919081908191919, 0x1919081919080808, + 0x191908191908082b, 0x1919082b08080808, 0x1919082b19081908, 0x1919082b2b2b2b2b, + 0x1919190808080819, 0x1919190808081908, 0x1919190808190808, 0x19191908082b0819, + 0x1919190819080808, 0x19191908192b0808, 0x191919082b080819, 0x191919082b2b0819, + 0x1919191908080808, 0x1919191908082b08, 0x191919192b080808, 0x191919192b082b08, + 0x1919192b082b0819, 0x1919192b192b2b08, 0x1919192b2b2b0819, 0x19192b0808080808, + 0x19192b0808191908, 0x19192b0819080819, 0x19192b0819190808, 0x19192b082b192b19, + 0x19192b1908192b2b, 0x19192b1919080808, 0x19192b191908082b, 0x19192b2b2b081919, + 0x192b080808080819, 0x192b080808081908, 0x192b080808190808, 0x192b080819080808, + 0x192b080819191908, 0x192b0808192b082b, 0x192b08082b08192b, 0x192b08082b2b2b19, + 0x192b081908080808, 0x192b082b082b1908, 0x192b082b19082b2b, 0x192b082b2b19082b, + 0x192b190808080808, 0x192b19080819192b, 0x192b191908190808, 0x192b191919080808, + 0x192b191919081919, 0x192b19192b2b1908, 0x192b2b0808080819, 0x192b2b08192b2b2b, + 0x192b2b19082b1919, 0x192b2b2b0808192b, 0x192b2b2b19191908, 0x192b2b2b192b082b, + 0x2b08080808080808, 0x2b0808080808082b, 0x2b08080808081919, 0x2b08080808082b08, + 0x2b08080808190819, 0x2b08080808191908, 0x2b080808082b0808, 0x2b080808082b2b2b, + 0x2b08080819080819, 0x2b08080819081908, 0x2b08080819190808, 0x2b0808082b080808, + 0x2b0808082b08082b, 0x2b0808082b2b2b08, 0x2b0808082b2b2b2b, 0x2b08081908080819, + 0x2b08081908081908, 0x2b0808190808192b, 0x2b08081908190808, 0x2b08081919080808, + 0x2b08081919190819, 0x2b08081919192b19, 0x2b08082b08080808, 0x2b08082b082b0808, + 0x2b08082b2b080808, 0x2b08082b2b08082b, 0x2b08082b2b2b0808, 0x2b08082b2b2b2b08, + 0x2b08190808080819, 0x2b08190808081908, 0x2b08190808190808, 0x2b0819080819082b, + 0x2b08190808191919, 0x2b08190819080808, 0x2b081908192b0808, 0x2b0819082b082b19, + 0x2b08191908080808, 0x2b08191919081908, 0x2b0819192b2b1919, 0x2b08192b08192b08, + 0x2b08192b192b2b2b, 0x2b082b0808080808, 0x2b082b0808082b08, 0x2b082b08082b1919, + 0x2b082b0819192b2b, 0x2b082b082b080808, 0x2b082b082b08082b, 0x2b082b082b2b2b08, + 0x2b082b190808192b, 0x2b082b2b082b082b, 0x2b082b2b2b080808, 0x2b082b2b2b082b08, + 0x2b082b2b2b19192b, 0x2b082b2b2b2b2b08, 0x2b19080808080819, 0x2b19080808081908, + 0x2b19080808190808, 0x2b19080819080808, 0x2b1908081919192b, 0x2b1908082b081908, + 0x2b19081908080808, 0x2b190819082b082b, 0x2b190819192b1908, 0x2b19082b1919192b, + 0x2b19082b2b082b19, 0x2b19190808080808, 0x2b19190808081919, 0x2b19190819081908, + 0x2b19190819190808, 0x2b19190819192b08, 0x2b191919082b2b19, 0x2b1919192b190808, + 0x2b1919192b19082b, 0x2b19192b19080819, 0x2b192b0819190819, 0x2b192b082b2b192b, + 0x2b192b1919082b19, 0x2b192b2b08191919, 0x2b192b2b192b0808, 0x2b2b080808080808, + 0x2b2b08080808082b, 0x2b2b080808082b08, 0x2b2b080808082b2b, 0x2b2b0808082b0808, + 0x2b2b0808082b2b2b, 0x2b2b08082b2b0808, 0x2b2b081919190819, 0x2b2b081919192b19, + 0x2b2b08192b2b192b, 0x2b2b082b08080808, 0x2b2b082b0808082b, 0x2b2b082b08082b08, + 0x2b2b082b082b2b2b, 0x2b2b082b2b080808, 0x2b2b082b2b2b0808, 0x2b2b190819080808, + 0x2b2b19082b191919, 0x2b2b192b192b1919, 0x2b2b192b2b192b08, 0x2b2b2b0808082b2b, + 0x2b2b2b08082b0808, 0x2b2b2b08082b082b, 0x2b2b2b08082b2b08, 0x2b2b2b082b2b0808, + 0x2b2b2b082b2b2b08, 0x2b2b2b1908081908, 0x2b2b2b192b081908, 0x2b2b2b192b08192b, + 0x2b2b2b2b082b2b08, 0x2b2b2b2b082b2b2b, 0x2b2b2b2b2b190819, 0x2b2b2b2b2b2b2b2b, +}; + static const __device__ uint8_t ksigns_iq2xs[128] = { 0, 129, 130, 3, 132, 5, 6, 135, 136, 9, 10, 139, 12, 141, 142, 15, 144, 17, 18, 147, 20, 149, 150, 23, 24, 153, 154, 27, 156, 29, 30, 159, @@ -1439,7 +1579,7 @@ static __global__ void dequantize_block_iq2_xxs(const void * __restrict__ vx, ds dst_t * y = yy + i*QK_K + 32*ib + 8*il; const uint16_t * q2 = x[i].qs + 4*ib; const uint8_t * aux8 = (const uint8_t *)q2; - const uint8_t * grid = (const uint8_t *)(kgrid_iq2xxs + aux8[il]); + const uint8_t * grid = (const uint8_t *)(iq2xxs_grid + aux8[il]); const uint32_t aux32 = q2[2] | (q2[3] << 16); const float d = (float)x[i].d * (0.5f + (aux32 >> 28)) * 0.25f; const uint8_t signs = ksigns_iq2xs[(aux32 >> 7*il) & 127]; @@ -1450,6 +1590,28 @@ static __global__ void dequantize_block_iq2_xxs(const void * __restrict__ vx, ds } +template +static __global__ void dequantize_block_iq2_xs(const void * __restrict__ vx, dst_t * __restrict__ yy) { + + const int i = blockIdx.x; + const block_iq2_xs * x = (const block_iq2_xs *) vx; + + const int tid = threadIdx.x; +#if QK_K == 256 + const int il = tid/8; // 0...3 + const int ib = tid%8; // 0...7 + dst_t * y = yy + i*QK_K + 32*ib + 8*il; + const uint16_t * q2 = x[i].qs + 4*ib; + const uint8_t * grid = (const uint8_t *)(iq2xs_grid + (q2[il] & 511)); + const float d = (float)x[i].d * (0.5f + ((x[i].scales[ib] >> 4*(il/2)) & 0xf)) * 0.25f; + const uint8_t signs = ksigns_iq2xs[q2[il] >> 9]; + for (int j = 0; j < 8; ++j) y[j] = d * grid[j] * (signs & kmask_iq2xs[j] ? -1.f : 1.f); +#else + assert(false); +#endif + +} + static __global__ void dequantize_mul_mat_vec_q2_k(const void * __restrict__ vx, const float * __restrict__ yy, float * __restrict__ dst, const int ncols, int nrows) { static_assert(16%K_QUANTS_PER_ITERATION == 0, "16 must be divisible by K_QUANTS_PER_ITERATION"); @@ -3996,7 +4158,7 @@ static __device__ __forceinline__ float vec_dot_iq2_xxs_q8_1( uint32_t aux32 = q2[2] | (q2[3] << 16); int sumi = 0; for (int l = 0; l < 4; ++l) { - const uint8_t * grid = (const uint8_t *)(kgrid_iq2xxs + aux8[l]); + const uint8_t * grid = (const uint8_t *)(iq2xxs_grid + aux8[l]); const uint8_t signs = ksigns_iq2xs[aux32 & 127]; for (int j = 0; j < 8; ++j) { sumi += q8[j] * grid[j] * (signs & kmask_iq2xs[j] ? -1 : 1); @@ -4012,8 +4174,8 @@ static __device__ __forceinline__ float vec_dot_iq2_xxs_q8_1( const int il = iqs%2; const uint16_t * q2 = bq2->qs + 4*ib32; const uint8_t * aux8 = (const uint8_t *)q2; - const uint8_t * grid1 = (const uint8_t *)(kgrid_iq2xxs + aux8[2*il+0]); - const uint8_t * grid2 = (const uint8_t *)(kgrid_iq2xxs + aux8[2*il+1]); + const uint8_t * grid1 = (const uint8_t *)(iq2xxs_grid + aux8[2*il+0]); + const uint8_t * grid2 = (const uint8_t *)(iq2xxs_grid + aux8[2*il+1]); const uint32_t aux32 = q2[2] | (q2[3] << 16); const float d = (float)bq2->d * (0.5f + (aux32 >> 28)) * (float)bq8_1[ib32].ds.x * 0.25f; const uint8_t signs1 = ksigns_iq2xs[(aux32 >> 14*il) & 127]; @@ -4032,6 +4194,42 @@ static __device__ __forceinline__ float vec_dot_iq2_xxs_q8_1( #endif } +static __device__ __forceinline__ float vec_dot_iq2_xs_q8_1( + const void * __restrict__ vbq, const block_q8_1 * __restrict__ bq8_1, const int & iqs) { +#if QK_K == 256 + const block_iq2_xs * bq2 = (const block_iq2_xs *) vbq; + + const int ib32 = iqs; + const uint16_t * q2 = bq2->qs + 4*ib32; + const int8_t * q8 = bq8_1[ib32].qs; + const uint8_t ls1 = bq2->scales[ib32] & 0xf; + const uint8_t ls2 = bq2->scales[ib32] >> 4; + int sumi1 = 0; + for (int l = 0; l < 2; ++l) { + const uint8_t * grid = (const uint8_t *)(iq2xs_grid + (q2[l] & 511)); + const uint8_t signs = ksigns_iq2xs[q2[l] >> 9]; + for (int j = 0; j < 8; ++j) { + sumi1 += q8[j] * grid[j] * (signs & kmask_iq2xs[j] ? -1 : 1); + } + q8 += 8; + } + int sumi2 = 0; + for (int l = 2; l < 4; ++l) { + const uint8_t * grid = (const uint8_t *)(iq2xs_grid + (q2[l] & 511)); + const uint8_t signs = ksigns_iq2xs[q2[l] >> 9]; + for (int j = 0; j < 8; ++j) { + sumi2 += q8[j] * grid[j] * (signs & kmask_iq2xs[j] ? -1 : 1); + } + q8 += 8; + } + const float d = (float)bq2->d * (float)bq8_1[ib32].ds.x * 0.25f; + return d * ((0.5f + ls1) * sumi1 + (0.5f + ls2) * sumi2); +#else + assert(false); + return 0.f; +#endif +} + template static __device__ __forceinline__ void mul_mat_q( @@ -6035,6 +6233,12 @@ static void dequantize_row_iq2_xxs_cuda(const void * vx, dst_t * y, const int k, dequantize_block_iq2_xxs<<>>(vx, y); } +template +static void dequantize_row_iq2_xs_cuda(const void * vx, dst_t * y, const int k, cudaStream_t stream) { + const int nb = k / QK_K; + dequantize_block_iq2_xs<<>>(vx, y); +} + template static void convert_unary_cuda(const void * __restrict__ vx, dst_t * __restrict__ y, const int k, cudaStream_t stream) { const int num_blocks = (k + CUDA_DEQUANTIZE_BLOCK_SIZE - 1) / CUDA_DEQUANTIZE_BLOCK_SIZE; @@ -6065,6 +6269,8 @@ static to_fp16_cuda_t ggml_get_to_fp16_cuda(ggml_type type) { return dequantize_row_q6_K_cuda; case GGML_TYPE_IQ2_XXS: return dequantize_row_iq2_xxs_cuda; + case GGML_TYPE_IQ2_XS: + return dequantize_row_iq2_xs_cuda; case GGML_TYPE_F32: return convert_unary_cuda; default: @@ -6096,6 +6302,8 @@ static to_fp32_cuda_t ggml_get_to_fp32_cuda(ggml_type type) { return dequantize_row_q6_K_cuda; case GGML_TYPE_IQ2_XXS: return dequantize_row_iq2_xxs_cuda; + case GGML_TYPE_IQ2_XS: + return dequantize_row_iq2_xs_cuda; case GGML_TYPE_F16: return convert_unary_cuda; default: @@ -6299,6 +6507,15 @@ static void mul_mat_vec_iq2_xxs_q8_1_cuda(const void * vx, const void * vy, floa <<>>(vx, vy, dst, ncols, nrows); } +static void mul_mat_vec_iq2_xs_q8_1_cuda(const void * vx, const void * vy, float * dst, const int ncols, const int nrows, cudaStream_t stream) { + GGML_ASSERT(ncols % QK_K == 0); + const int block_num_y = (nrows + GGML_CUDA_MMV_Y - 1) / GGML_CUDA_MMV_Y; + const dim3 block_nums(block_num_y, 1, 1); + const dim3 block_dims(WARP_SIZE, GGML_CUDA_MMV_Y, 1); + mul_mat_vec_q + <<>>(vx, vy, dst, ncols, nrows); +} + static void ggml_mul_mat_q4_0_q8_1_cuda( const void * vx, const void * vy, float * dst, const int ncols_x, const int nrows_x, const int ncols_y, const int nrows_y, const int nrows_dst, cudaStream_t stream) { @@ -7871,6 +8088,7 @@ static int64_t get_row_rounding(ggml_type type) { case GGML_TYPE_Q5_K: case GGML_TYPE_Q6_K: case GGML_TYPE_IQ2_XXS: + case GGML_TYPE_IQ2_XS: return max_compute_capability >= CC_RDNA2 ? 128 : 64; default: GGML_ASSERT(false); @@ -7892,6 +8110,7 @@ static int64_t get_row_rounding(ggml_type type) { case GGML_TYPE_Q4_K: case GGML_TYPE_Q5_K: case GGML_TYPE_IQ2_XXS: + case GGML_TYPE_IQ2_XS: return max_compute_capability >= CC_VOLTA ? 128 : 64; case GGML_TYPE_Q6_K: return 64; @@ -7945,6 +8164,9 @@ static void ggml_cuda_op_mul_mat_vec_q( case GGML_TYPE_IQ2_XXS: mul_mat_vec_iq2_xxs_q8_1_cuda(src0_dd_i, src1_ddq_i, dst_dd_i, ne00, row_diff, stream); break; + case GGML_TYPE_IQ2_XS: + mul_mat_vec_iq2_xs_q8_1_cuda(src0_dd_i, src1_ddq_i, dst_dd_i, ne00, row_diff, stream); + break; default: GGML_ASSERT(false); break; diff --git a/ggml-metal.m b/ggml-metal.m index 9698e5a79..6e5594432 100644 --- a/ggml-metal.m +++ b/ggml-metal.m @@ -89,6 +89,7 @@ struct ggml_metal_context { GGML_METAL_DECL_KERNEL(get_rows_q6_K); GGML_METAL_DECL_KERNEL(get_rows_i32); GGML_METAL_DECL_KERNEL(get_rows_iq2_xxs); + GGML_METAL_DECL_KERNEL(get_rows_iq2_xs); GGML_METAL_DECL_KERNEL(rms_norm); GGML_METAL_DECL_KERNEL(group_norm); GGML_METAL_DECL_KERNEL(norm); @@ -108,6 +109,7 @@ struct ggml_metal_context { GGML_METAL_DECL_KERNEL(mul_mv_q5_K_f32); GGML_METAL_DECL_KERNEL(mul_mv_q6_K_f32); GGML_METAL_DECL_KERNEL(mul_mv_iq2_xxs_f32); + GGML_METAL_DECL_KERNEL(mul_mv_iq2_xs_f32); GGML_METAL_DECL_KERNEL(mul_mv_id_f32_f32); //GGML_METAL_DECL_KERNEL(mul_mv_id_f16_f16); GGML_METAL_DECL_KERNEL(mul_mv_id_f16_f32); @@ -124,6 +126,7 @@ struct ggml_metal_context { GGML_METAL_DECL_KERNEL(mul_mv_id_q5_K_f32); GGML_METAL_DECL_KERNEL(mul_mv_id_q6_K_f32); GGML_METAL_DECL_KERNEL(mul_mv_id_iq2_xxs_f32); + GGML_METAL_DECL_KERNEL(mul_mv_id_iq2_xs_f32); GGML_METAL_DECL_KERNEL(mul_mm_f32_f32); GGML_METAL_DECL_KERNEL(mul_mm_f16_f32); GGML_METAL_DECL_KERNEL(mul_mm_q4_0_f32); @@ -137,6 +140,7 @@ struct ggml_metal_context { GGML_METAL_DECL_KERNEL(mul_mm_q5_K_f32); GGML_METAL_DECL_KERNEL(mul_mm_q6_K_f32); GGML_METAL_DECL_KERNEL(mul_mm_iq2_xxs_f32); + GGML_METAL_DECL_KERNEL(mul_mm_iq2_xs_f32); GGML_METAL_DECL_KERNEL(mul_mm_id_f32_f32); GGML_METAL_DECL_KERNEL(mul_mm_id_f16_f32); GGML_METAL_DECL_KERNEL(mul_mm_id_q4_0_f32); @@ -150,6 +154,7 @@ struct ggml_metal_context { GGML_METAL_DECL_KERNEL(mul_mm_id_q5_K_f32); GGML_METAL_DECL_KERNEL(mul_mm_id_q6_K_f32); GGML_METAL_DECL_KERNEL(mul_mm_id_iq2_xxs_f32); + GGML_METAL_DECL_KERNEL(mul_mm_id_iq2_xs_f32); GGML_METAL_DECL_KERNEL(rope_f32); GGML_METAL_DECL_KERNEL(rope_f16); GGML_METAL_DECL_KERNEL(alibi_f32); @@ -385,6 +390,7 @@ struct ggml_metal_context * ggml_metal_init(int n_cb) { GGML_METAL_ADD_KERNEL(get_rows_q6_K); GGML_METAL_ADD_KERNEL(get_rows_i32); GGML_METAL_ADD_KERNEL(get_rows_iq2_xxs); + GGML_METAL_ADD_KERNEL(get_rows_iq2_xs); GGML_METAL_ADD_KERNEL(rms_norm); GGML_METAL_ADD_KERNEL(group_norm); GGML_METAL_ADD_KERNEL(norm); @@ -404,6 +410,7 @@ struct ggml_metal_context * ggml_metal_init(int n_cb) { GGML_METAL_ADD_KERNEL(mul_mv_q5_K_f32); GGML_METAL_ADD_KERNEL(mul_mv_q6_K_f32); GGML_METAL_ADD_KERNEL(mul_mv_iq2_xxs_f32); + GGML_METAL_ADD_KERNEL(mul_mv_iq2_xs_f32); GGML_METAL_ADD_KERNEL(mul_mv_id_f32_f32); //GGML_METAL_ADD_KERNEL(mul_mv_id_f16_f16); GGML_METAL_ADD_KERNEL(mul_mv_id_f16_f32); @@ -420,6 +427,7 @@ struct ggml_metal_context * ggml_metal_init(int n_cb) { GGML_METAL_ADD_KERNEL(mul_mv_id_q5_K_f32); GGML_METAL_ADD_KERNEL(mul_mv_id_q6_K_f32); GGML_METAL_ADD_KERNEL(mul_mv_id_iq2_xxs_f32); + GGML_METAL_ADD_KERNEL(mul_mv_id_iq2_xs_f32); if ([ctx->device supportsFamily:MTLGPUFamilyApple7]) { GGML_METAL_ADD_KERNEL(mul_mm_f32_f32); GGML_METAL_ADD_KERNEL(mul_mm_f16_f32); @@ -434,6 +442,7 @@ struct ggml_metal_context * ggml_metal_init(int n_cb) { GGML_METAL_ADD_KERNEL(mul_mm_q5_K_f32); GGML_METAL_ADD_KERNEL(mul_mm_q6_K_f32); GGML_METAL_ADD_KERNEL(mul_mm_iq2_xxs_f32); + GGML_METAL_ADD_KERNEL(mul_mm_iq2_xs_f32); GGML_METAL_ADD_KERNEL(mul_mm_id_f32_f32); GGML_METAL_ADD_KERNEL(mul_mm_id_f16_f32); GGML_METAL_ADD_KERNEL(mul_mm_id_q4_0_f32); @@ -447,6 +456,7 @@ struct ggml_metal_context * ggml_metal_init(int n_cb) { GGML_METAL_ADD_KERNEL(mul_mm_id_q5_K_f32); GGML_METAL_ADD_KERNEL(mul_mm_id_q6_K_f32); GGML_METAL_ADD_KERNEL(mul_mm_id_iq2_xxs_f32); + GGML_METAL_ADD_KERNEL(mul_mm_id_iq2_xs_f32); } GGML_METAL_ADD_KERNEL(rope_f32); GGML_METAL_ADD_KERNEL(rope_f16); @@ -513,6 +523,7 @@ void ggml_metal_free(struct ggml_metal_context * ctx) { GGML_METAL_DEL_KERNEL(get_rows_q6_K); GGML_METAL_DEL_KERNEL(get_rows_i32); GGML_METAL_DEL_KERNEL(get_rows_iq2_xxs); + GGML_METAL_DEL_KERNEL(get_rows_iq2_xs); GGML_METAL_DEL_KERNEL(rms_norm); GGML_METAL_DEL_KERNEL(group_norm); GGML_METAL_DEL_KERNEL(norm); @@ -532,6 +543,7 @@ void ggml_metal_free(struct ggml_metal_context * ctx) { GGML_METAL_DEL_KERNEL(mul_mv_q5_K_f32); GGML_METAL_DEL_KERNEL(mul_mv_q6_K_f32); GGML_METAL_DEL_KERNEL(mul_mv_iq2_xxs_f32); + GGML_METAL_DEL_KERNEL(mul_mv_iq2_xs_f32); GGML_METAL_DEL_KERNEL(mul_mv_id_f32_f32); //GGML_METAL_DEL_KERNEL(mul_mv_id_f16_f16); GGML_METAL_DEL_KERNEL(mul_mv_id_f16_f32); @@ -548,6 +560,7 @@ void ggml_metal_free(struct ggml_metal_context * ctx) { GGML_METAL_DEL_KERNEL(mul_mv_id_q5_K_f32); GGML_METAL_DEL_KERNEL(mul_mv_id_q6_K_f32); GGML_METAL_DEL_KERNEL(mul_mv_id_iq2_xxs_f32); + GGML_METAL_DEL_KERNEL(mul_mv_id_iq2_xs_f32); if ([ctx->device supportsFamily:MTLGPUFamilyApple7]) { GGML_METAL_DEL_KERNEL(mul_mm_f32_f32); GGML_METAL_DEL_KERNEL(mul_mm_f16_f32); @@ -562,6 +575,7 @@ void ggml_metal_free(struct ggml_metal_context * ctx) { GGML_METAL_DEL_KERNEL(mul_mm_q5_K_f32); GGML_METAL_DEL_KERNEL(mul_mm_q6_K_f32); GGML_METAL_DEL_KERNEL(mul_mm_iq2_xxs_f32); + GGML_METAL_DEL_KERNEL(mul_mm_iq2_xs_f32); GGML_METAL_DEL_KERNEL(mul_mm_id_f32_f32); GGML_METAL_DEL_KERNEL(mul_mm_id_f16_f32); GGML_METAL_DEL_KERNEL(mul_mm_id_q4_0_f32); @@ -575,6 +589,7 @@ void ggml_metal_free(struct ggml_metal_context * ctx) { GGML_METAL_DEL_KERNEL(mul_mm_id_q5_K_f32); GGML_METAL_DEL_KERNEL(mul_mm_id_q6_K_f32); GGML_METAL_DEL_KERNEL(mul_mm_id_iq2_xxs_f32); + GGML_METAL_DEL_KERNEL(mul_mm_id_iq2_xs_f32); } GGML_METAL_DEL_KERNEL(rope_f32); GGML_METAL_DEL_KERNEL(rope_f16); @@ -1561,6 +1576,7 @@ bool ggml_metal_graph_compute( case GGML_TYPE_Q5_K: [encoder setComputePipelineState:ctx->pipeline_mul_mm_q5_K_f32]; break; case GGML_TYPE_Q6_K: [encoder setComputePipelineState:ctx->pipeline_mul_mm_q6_K_f32]; break; case GGML_TYPE_IQ2_XXS: [encoder setComputePipelineState:ctx->pipeline_mul_mm_iq2_xxs_f32]; break; + case GGML_TYPE_IQ2_XS : [encoder setComputePipelineState:ctx->pipeline_mul_mm_iq2_xs_f32]; break; default: GGML_ASSERT(false && "MUL MAT-MAT not implemented"); } [encoder setBuffer:id_src0 offset:offs_src0 atIndex:0]; @@ -1679,6 +1695,12 @@ bool ggml_metal_graph_compute( nth1 = 16; [encoder setComputePipelineState:ctx->pipeline_mul_mv_iq2_xxs_f32]; } break; + case GGML_TYPE_IQ2_XS: + { + nth0 = 4; + nth1 = 16; + [encoder setComputePipelineState:ctx->pipeline_mul_mv_iq2_xs_f32]; + } break; default: { GGML_METAL_LOG_ERROR("Asserting on type %d\n", (int)src0t); @@ -1712,12 +1734,12 @@ bool ggml_metal_graph_compute( if (src0t == GGML_TYPE_Q4_0 || src0t == GGML_TYPE_Q4_1 || src0t == GGML_TYPE_Q5_0 || src0t == GGML_TYPE_Q5_1 || src0t == GGML_TYPE_Q8_0 || - //src0t == GGML_TYPE_IQ2_XXS || src0t == GGML_TYPE_Q2_K) { // || src0t == GGML_TYPE_Q4_K) { [encoder dispatchThreadgroups:MTLSizeMake((ne01 + 7)/8, ne11, ne12*ne13) threadsPerThreadgroup:MTLSizeMake(nth0, nth1, 1)]; } - else if (src0t == GGML_TYPE_IQ2_XXS) { - [encoder setThreadgroupMemoryLength:(256*8+128) atIndex:0]; + else if (src0t == GGML_TYPE_IQ2_XXS || src0t == GGML_TYPE_IQ2_XS) { + const int mem_size = src0t == GGML_TYPE_IQ2_XXS ? 256*8+128 : 512*8+128; + [encoder setThreadgroupMemoryLength:mem_size atIndex:0]; [encoder dispatchThreadgroups:MTLSizeMake((ne01 + 7)/8, ne11, ne12*ne13) threadsPerThreadgroup:MTLSizeMake(nth0, nth1, 1)]; } else if (src0t == GGML_TYPE_Q4_K) { @@ -1810,6 +1832,7 @@ bool ggml_metal_graph_compute( case GGML_TYPE_Q5_K: [encoder setComputePipelineState:ctx->pipeline_mul_mm_id_q5_K_f32]; break; case GGML_TYPE_Q6_K: [encoder setComputePipelineState:ctx->pipeline_mul_mm_id_q6_K_f32]; break; case GGML_TYPE_IQ2_XXS: [encoder setComputePipelineState:ctx->pipeline_mul_mm_id_iq2_xxs_f32]; break; + case GGML_TYPE_IQ2_XS : [encoder setComputePipelineState:ctx->pipeline_mul_mm_id_iq2_xs_f32]; break; default: GGML_ASSERT(false && "MUL_MAT_ID not implemented"); } [encoder setBuffer:id_src0 offset:offs_src0 atIndex:0]; @@ -1931,6 +1954,12 @@ bool ggml_metal_graph_compute( nth1 = 16; [encoder setComputePipelineState:ctx->pipeline_mul_mv_id_iq2_xxs_f32]; } break; + case GGML_TYPE_IQ2_XS: + { + nth0 = 4; + nth1 = 16; + [encoder setComputePipelineState:ctx->pipeline_mul_mv_id_iq2_xs_f32]; + } break; default: { GGML_METAL_LOG_ERROR("Asserting on type %d\n", (int)src2t); @@ -1980,12 +2009,12 @@ bool ggml_metal_graph_compute( if (src2t == GGML_TYPE_Q4_0 || src2t == GGML_TYPE_Q4_1 || src2t == GGML_TYPE_Q5_0 || src2t == GGML_TYPE_Q5_1 || src2t == GGML_TYPE_Q8_0 || - //src2t == GGML_TYPE_IQ2_XXS || src2t == GGML_TYPE_Q2_K) { // || src2t == GGML_TYPE_Q4_K) { [encoder dispatchThreadgroups:MTLSizeMake((ne21 + 7)/8, _ne1, ne01*ne12*ne13) threadsPerThreadgroup:MTLSizeMake(nth0, nth1, 1)]; } - else if (src2t == GGML_TYPE_IQ2_XXS) { - [encoder setThreadgroupMemoryLength:(256*8+128) atIndex:0]; + else if (src2t == GGML_TYPE_IQ2_XXS || src2t == GGML_TYPE_IQ2_XS) { + const int mem_size = src2t == GGML_TYPE_IQ2_XXS ? 256*8+128 : 512*8+128; + [encoder setThreadgroupMemoryLength:mem_size atIndex:0]; [encoder dispatchThreadgroups:MTLSizeMake((ne21 + 7)/8, _ne1, ne01*ne12*ne13) threadsPerThreadgroup:MTLSizeMake(nth0, nth1, 1)]; } else if (src2t == GGML_TYPE_Q4_K) { @@ -2026,6 +2055,7 @@ bool ggml_metal_graph_compute( case GGML_TYPE_Q6_K: [encoder setComputePipelineState:ctx->pipeline_get_rows_q6_K]; break; case GGML_TYPE_I32: [encoder setComputePipelineState:ctx->pipeline_get_rows_i32]; break; case GGML_TYPE_IQ2_XXS: [encoder setComputePipelineState:ctx->pipeline_get_rows_iq2_xxs]; break; + case GGML_TYPE_IQ2_XS : [encoder setComputePipelineState:ctx->pipeline_get_rows_iq2_xs]; break; default: GGML_ASSERT(false && "not implemented"); } diff --git a/ggml-metal.metal b/ggml-metal.metal index 229efb8b6..029578dc5 100644 --- a/ggml-metal.metal +++ b/ggml-metal.metal @@ -2452,6 +2452,13 @@ typedef struct { } block_iq2_xxs; // 66 bytes / block for QK_K = 256, so 2.0625 bpw +typedef struct { + half d; + uint16_t qs[QK_K/8]; + uint8_t scales[QK_K/32]; +} block_iq2_xs; +// 74 bytes / block for QK_K = 256, so 2.3125 bpw + //====================================== dot products ========================= void kernel_mul_mv_q2_K_f32_impl( @@ -3476,7 +3483,7 @@ kernel void kernel_mul_mv_q6_K_f32( // ======================= "True" 2-bit -constexpr constant static uint64_t kgrid_iq2xxs[256] = { +constexpr constant static uint64_t iq2xxs_grid[256] = { 0x0808080808080808, 0x080808080808082b, 0x0808080808081919, 0x0808080808082b08, 0x0808080808082b2b, 0x0808080808190819, 0x0808080808191908, 0x08080808082b0808, 0x08080808082b082b, 0x08080808082b2b08, 0x08080808082b2b2b, 0x0808080819080819, @@ -3543,6 +3550,137 @@ constexpr constant static uint64_t kgrid_iq2xxs[256] = { 0x2b2b082b08080808, 0x2b2b190808192b08, 0x2b2b2b0819190808, 0x2b2b2b1908081908, }; +constexpr constant static uint64_t iq2xs_grid[512] = { + 0x0808080808080808, 0x080808080808082b, 0x0808080808081919, 0x0808080808082b08, + 0x0808080808082b2b, 0x0808080808190819, 0x0808080808191908, 0x080808080819192b, + 0x0808080808192b19, 0x08080808082b0808, 0x08080808082b082b, 0x08080808082b1919, + 0x08080808082b2b08, 0x0808080819080819, 0x0808080819081908, 0x080808081908192b, + 0x0808080819082b19, 0x0808080819190808, 0x080808081919082b, 0x0808080819191919, + 0x0808080819192b08, 0x08080808192b0819, 0x08080808192b1908, 0x080808082b080808, + 0x080808082b08082b, 0x080808082b081919, 0x080808082b082b08, 0x080808082b190819, + 0x080808082b191908, 0x080808082b192b19, 0x080808082b2b0808, 0x0808081908080819, + 0x0808081908081908, 0x080808190808192b, 0x0808081908082b19, 0x0808081908190808, + 0x080808190819082b, 0x0808081908191919, 0x0808081908192b08, 0x0808081908192b2b, + 0x08080819082b0819, 0x08080819082b1908, 0x0808081919080808, 0x080808191908082b, + 0x0808081919081919, 0x0808081919082b08, 0x0808081919190819, 0x0808081919191908, + 0x08080819192b0808, 0x08080819192b2b08, 0x080808192b080819, 0x080808192b081908, + 0x080808192b190808, 0x0808082b08080808, 0x0808082b0808082b, 0x0808082b08081919, + 0x0808082b08082b08, 0x0808082b08190819, 0x0808082b08191908, 0x0808082b082b0808, + 0x0808082b19080819, 0x0808082b19081908, 0x0808082b19190808, 0x0808082b19191919, + 0x0808082b2b080808, 0x0808082b2b082b2b, 0x0808190808080819, 0x0808190808081908, + 0x080819080808192b, 0x0808190808082b19, 0x0808190808190808, 0x080819080819082b, + 0x0808190808191919, 0x0808190808192b08, 0x08081908082b0819, 0x08081908082b1908, + 0x0808190819080808, 0x080819081908082b, 0x0808190819081919, 0x0808190819082b08, + 0x0808190819190819, 0x0808190819191908, 0x080819081919192b, 0x08081908192b0808, + 0x080819082b080819, 0x080819082b081908, 0x080819082b190808, 0x0808191908080808, + 0x080819190808082b, 0x0808191908081919, 0x0808191908082b08, 0x0808191908190819, + 0x0808191908191908, 0x08081919082b0808, 0x0808191919080819, 0x0808191919081908, + 0x0808191919190808, 0x08081919192b0819, 0x080819192b080808, 0x0808192b08080819, + 0x0808192b08081908, 0x0808192b08190808, 0x0808192b082b192b, 0x0808192b19080808, + 0x0808192b1908082b, 0x0808192b2b081908, 0x08082b0808080808, 0x08082b080808082b, + 0x08082b0808081919, 0x08082b0808082b08, 0x08082b0808082b2b, 0x08082b0808190819, + 0x08082b0808191908, 0x08082b08082b0808, 0x08082b08082b1919, 0x08082b0819080819, + 0x08082b0819081908, 0x08082b0819190808, 0x08082b0819192b08, 0x08082b082b080808, + 0x08082b082b2b0808, 0x08082b082b2b2b2b, 0x08082b1908080819, 0x08082b1908081908, + 0x08082b1908190808, 0x08082b1919080808, 0x08082b192b080819, 0x08082b192b082b19, + 0x08082b2b08080808, 0x08082b2b082b0808, 0x08082b2b082b2b08, 0x08082b2b2b19192b, + 0x08082b2b2b2b0808, 0x0819080808080819, 0x0819080808081908, 0x081908080808192b, + 0x0819080808082b19, 0x0819080808190808, 0x081908080819082b, 0x0819080808191919, + 0x0819080808192b08, 0x08190808082b0819, 0x08190808082b1908, 0x0819080819080808, + 0x081908081908082b, 0x0819080819081919, 0x0819080819082b08, 0x0819080819190819, + 0x0819080819191908, 0x08190808192b0808, 0x08190808192b2b2b, 0x081908082b080819, + 0x081908082b081908, 0x081908082b190808, 0x0819081908080808, 0x081908190808082b, + 0x0819081908081919, 0x0819081908082b08, 0x0819081908190819, 0x0819081908191908, + 0x08190819082b0808, 0x0819081919080819, 0x0819081919081908, 0x0819081919190808, + 0x081908192b080808, 0x081908192b191908, 0x081908192b19192b, 0x0819082b08080819, + 0x0819082b08081908, 0x0819082b0808192b, 0x0819082b08190808, 0x0819082b19080808, + 0x0819082b192b0808, 0x0819190808080808, 0x081919080808082b, 0x0819190808081919, + 0x0819190808082b08, 0x0819190808190819, 0x0819190808191908, 0x08191908082b0808, + 0x0819190819080819, 0x0819190819081908, 0x0819190819082b19, 0x0819190819190808, + 0x08191908192b1908, 0x081919082b080808, 0x0819191908080819, 0x0819191908081908, + 0x0819191908190808, 0x0819191919080808, 0x0819192b08080808, 0x0819192b08191908, + 0x0819192b19082b19, 0x08192b0808080819, 0x08192b0808081908, 0x08192b0808190808, + 0x08192b080819082b, 0x08192b0819080808, 0x08192b0819191908, 0x08192b082b08192b, + 0x08192b1908080808, 0x08192b1908081919, 0x08192b19192b192b, 0x08192b2b19190819, + 0x08192b2b2b2b2b19, 0x082b080808080808, 0x082b08080808082b, 0x082b080808081919, + 0x082b080808082b08, 0x082b080808082b2b, 0x082b080808190819, 0x082b080808191908, + 0x082b0808082b0808, 0x082b080819080819, 0x082b080819081908, 0x082b080819190808, + 0x082b08082b080808, 0x082b08082b2b0808, 0x082b081908080819, 0x082b081908081908, + 0x082b081908190808, 0x082b081919080808, 0x082b081919082b08, 0x082b0819192b1919, + 0x082b082b08080808, 0x082b082b082b082b, 0x082b082b2b080808, 0x082b082b2b2b2b08, + 0x082b190808080819, 0x082b190808081908, 0x082b190808190808, 0x082b1908082b2b19, + 0x082b190819080808, 0x082b191908080808, 0x082b191919080819, 0x082b19191919082b, + 0x082b19192b192b19, 0x082b192b08080819, 0x082b192b08192b2b, 0x082b192b2b2b192b, + 0x082b2b0808080808, 0x082b2b0808082b08, 0x082b2b0808082b2b, 0x082b2b08082b0808, + 0x082b2b0819191919, 0x082b2b082b082b08, 0x082b2b082b2b082b, 0x082b2b19192b2b08, + 0x082b2b192b190808, 0x082b2b2b08082b08, 0x082b2b2b082b0808, 0x082b2b2b2b08082b, + 0x082b2b2b2b082b08, 0x082b2b2b2b082b2b, 0x1908080808080819, 0x1908080808081908, + 0x190808080808192b, 0x1908080808082b19, 0x1908080808190808, 0x190808080819082b, + 0x1908080808191919, 0x1908080808192b08, 0x19080808082b0819, 0x19080808082b1908, + 0x1908080819080808, 0x190808081908082b, 0x1908080819081919, 0x1908080819082b08, + 0x1908080819082b2b, 0x1908080819190819, 0x1908080819191908, 0x19080808192b0808, + 0x19080808192b1919, 0x190808082b080819, 0x190808082b081908, 0x190808082b190808, + 0x1908081908080808, 0x190808190808082b, 0x1908081908081919, 0x1908081908082b08, + 0x1908081908190819, 0x1908081908191908, 0x19080819082b0808, 0x1908081919080819, + 0x1908081919081908, 0x1908081919190808, 0x190808192b080808, 0x190808192b081919, + 0x190808192b2b082b, 0x1908082b08080819, 0x1908082b08081908, 0x1908082b08190808, + 0x1908082b0819082b, 0x1908082b082b2b19, 0x1908082b19080808, 0x1908190808080808, + 0x190819080808082b, 0x1908190808081919, 0x1908190808082b08, 0x1908190808190819, + 0x1908190808191908, 0x1908190808192b19, 0x19081908082b0808, 0x1908190819080819, + 0x1908190819081908, 0x1908190819190808, 0x190819082b080808, 0x190819082b191908, + 0x1908191908080819, 0x1908191908081908, 0x1908191908190808, 0x19081919082b1908, + 0x1908191919080808, 0x190819192b192b2b, 0x1908192b08080808, 0x1908192b08082b2b, + 0x1908192b19081908, 0x1908192b19190808, 0x19082b0808080819, 0x19082b0808081908, + 0x19082b0808190808, 0x19082b0819080808, 0x19082b0819081919, 0x19082b0819191908, + 0x19082b08192b082b, 0x19082b1908080808, 0x19082b1908190819, 0x19082b1919081908, + 0x19082b1919190808, 0x19082b19192b2b19, 0x19082b2b08081908, 0x1919080808080808, + 0x191908080808082b, 0x1919080808081919, 0x1919080808082b08, 0x1919080808190819, + 0x1919080808191908, 0x19190808082b0808, 0x19190808082b2b08, 0x1919080819080819, + 0x1919080819081908, 0x1919080819190808, 0x191908082b080808, 0x1919081908080819, + 0x1919081908081908, 0x1919081908190808, 0x1919081908191919, 0x1919081919080808, + 0x191908191908082b, 0x1919082b08080808, 0x1919082b19081908, 0x1919082b2b2b2b2b, + 0x1919190808080819, 0x1919190808081908, 0x1919190808190808, 0x19191908082b0819, + 0x1919190819080808, 0x19191908192b0808, 0x191919082b080819, 0x191919082b2b0819, + 0x1919191908080808, 0x1919191908082b08, 0x191919192b080808, 0x191919192b082b08, + 0x1919192b082b0819, 0x1919192b192b2b08, 0x1919192b2b2b0819, 0x19192b0808080808, + 0x19192b0808191908, 0x19192b0819080819, 0x19192b0819190808, 0x19192b082b192b19, + 0x19192b1908192b2b, 0x19192b1919080808, 0x19192b191908082b, 0x19192b2b2b081919, + 0x192b080808080819, 0x192b080808081908, 0x192b080808190808, 0x192b080819080808, + 0x192b080819191908, 0x192b0808192b082b, 0x192b08082b08192b, 0x192b08082b2b2b19, + 0x192b081908080808, 0x192b082b082b1908, 0x192b082b19082b2b, 0x192b082b2b19082b, + 0x192b190808080808, 0x192b19080819192b, 0x192b191908190808, 0x192b191919080808, + 0x192b191919081919, 0x192b19192b2b1908, 0x192b2b0808080819, 0x192b2b08192b2b2b, + 0x192b2b19082b1919, 0x192b2b2b0808192b, 0x192b2b2b19191908, 0x192b2b2b192b082b, + 0x2b08080808080808, 0x2b0808080808082b, 0x2b08080808081919, 0x2b08080808082b08, + 0x2b08080808190819, 0x2b08080808191908, 0x2b080808082b0808, 0x2b080808082b2b2b, + 0x2b08080819080819, 0x2b08080819081908, 0x2b08080819190808, 0x2b0808082b080808, + 0x2b0808082b08082b, 0x2b0808082b2b2b08, 0x2b0808082b2b2b2b, 0x2b08081908080819, + 0x2b08081908081908, 0x2b0808190808192b, 0x2b08081908190808, 0x2b08081919080808, + 0x2b08081919190819, 0x2b08081919192b19, 0x2b08082b08080808, 0x2b08082b082b0808, + 0x2b08082b2b080808, 0x2b08082b2b08082b, 0x2b08082b2b2b0808, 0x2b08082b2b2b2b08, + 0x2b08190808080819, 0x2b08190808081908, 0x2b08190808190808, 0x2b0819080819082b, + 0x2b08190808191919, 0x2b08190819080808, 0x2b081908192b0808, 0x2b0819082b082b19, + 0x2b08191908080808, 0x2b08191919081908, 0x2b0819192b2b1919, 0x2b08192b08192b08, + 0x2b08192b192b2b2b, 0x2b082b0808080808, 0x2b082b0808082b08, 0x2b082b08082b1919, + 0x2b082b0819192b2b, 0x2b082b082b080808, 0x2b082b082b08082b, 0x2b082b082b2b2b08, + 0x2b082b190808192b, 0x2b082b2b082b082b, 0x2b082b2b2b080808, 0x2b082b2b2b082b08, + 0x2b082b2b2b19192b, 0x2b082b2b2b2b2b08, 0x2b19080808080819, 0x2b19080808081908, + 0x2b19080808190808, 0x2b19080819080808, 0x2b1908081919192b, 0x2b1908082b081908, + 0x2b19081908080808, 0x2b190819082b082b, 0x2b190819192b1908, 0x2b19082b1919192b, + 0x2b19082b2b082b19, 0x2b19190808080808, 0x2b19190808081919, 0x2b19190819081908, + 0x2b19190819190808, 0x2b19190819192b08, 0x2b191919082b2b19, 0x2b1919192b190808, + 0x2b1919192b19082b, 0x2b19192b19080819, 0x2b192b0819190819, 0x2b192b082b2b192b, + 0x2b192b1919082b19, 0x2b192b2b08191919, 0x2b192b2b192b0808, 0x2b2b080808080808, + 0x2b2b08080808082b, 0x2b2b080808082b08, 0x2b2b080808082b2b, 0x2b2b0808082b0808, + 0x2b2b0808082b2b2b, 0x2b2b08082b2b0808, 0x2b2b081919190819, 0x2b2b081919192b19, + 0x2b2b08192b2b192b, 0x2b2b082b08080808, 0x2b2b082b0808082b, 0x2b2b082b08082b08, + 0x2b2b082b082b2b2b, 0x2b2b082b2b080808, 0x2b2b082b2b2b0808, 0x2b2b190819080808, + 0x2b2b19082b191919, 0x2b2b192b192b1919, 0x2b2b192b2b192b08, 0x2b2b2b0808082b2b, + 0x2b2b2b08082b0808, 0x2b2b2b08082b082b, 0x2b2b2b08082b2b08, 0x2b2b2b082b2b0808, + 0x2b2b2b082b2b2b08, 0x2b2b2b1908081908, 0x2b2b2b192b081908, 0x2b2b2b192b08192b, + 0x2b2b2b2b082b2b08, 0x2b2b2b2b082b2b2b, 0x2b2b2b2b2b190819, 0x2b2b2b2b2b2b2b2b, +}; + constexpr constant static uint8_t ksigns_iq2xs[128] = { 0, 129, 130, 3, 132, 5, 6, 135, 136, 9, 10, 139, 12, 141, 142, 15, 144, 17, 18, 147, 20, 149, 150, 23, 24, 153, 154, 27, 156, 29, 30, 159, @@ -3600,7 +3738,7 @@ void kernel_mul_mv_iq2_xxs_f32_impl( { int nval = 4; int pos = (32*sgitg + tiisg)*nval; - for (int i = 0; i < nval; ++i) values[pos + i] = kgrid_iq2xxs[pos + i]; + for (int i = 0; i < nval; ++i) values[pos + i] = iq2xxs_grid[pos + i]; nval = 2; pos = (32*sgitg + tiisg)*nval; for (int i = 0; i < nval; ++i) shared_signs[pos+i] = ksigns_iq2xs[pos+i]; @@ -3689,6 +3827,149 @@ kernel void kernel_mul_mv_iq2_xxs_f32( kernel_mul_mv_iq2_xxs_f32_impl(src0, src1, dst, ne00, ne01, ne02, ne10, ne12, ne0, ne1, r2, r3, shared_values, tgpig, tiisg, sgitg); } +void kernel_mul_mv_iq2_xs_f32_impl( + device const void * src0, + device const float * src1, + device float * dst, + constant int64_t & ne00, + constant int64_t & ne01, + constant int64_t & ne02, + constant int64_t & ne10, + constant int64_t & ne12, + constant int64_t & ne0, + constant int64_t & ne1, + constant uint & r2, + constant uint & r3, + threadgroup int8_t * shared_values [[threadgroup(0)]], + uint3 tgpig[[threadgroup_position_in_grid]], + uint tiisg[[thread_index_in_simdgroup]], + uint sgitg[[simdgroup_index_in_threadgroup]]) { + + const int nb = ne00/QK_K; + const int r0 = tgpig.x; + const int r1 = tgpig.y; + const int im = tgpig.z; + + const int first_row = (r0 * N_SIMDGROUP + sgitg) * N_DST; + const int ib_row = first_row * nb; + + const uint i12 = im%ne12; + const uint i13 = im/ne12; + + const uint offset0 = (i12/r2)*(nb*ne01) + (i13/r3)*(nb*ne01*ne02); + + device const block_iq2_xs * x = (device const block_iq2_xs *) src0 + ib_row + offset0; + device const float * y = (device const float *) src1 + r1*ne10 + im*ne00*ne1; + + float yl[32]; + float sumf[N_DST]={0.f}, all_sum; + + const int nb32 = nb * (QK_K / 32); + + threadgroup uint64_t * values = (threadgroup uint64_t *)shared_values; + threadgroup uint8_t * shared_signs = (threadgroup uint8_t *)(values + 512); + { + int nval = 8; + int pos = (32*sgitg + tiisg)*nval; + for (int i = 0; i < nval; ++i) values[pos + i] = iq2xs_grid[pos + i]; + nval = 2; + pos = (32*sgitg + tiisg)*nval; + for (int i = 0; i < nval; ++i) shared_signs[pos+i] = ksigns_iq2xs[pos+i]; + threadgroup_barrier(mem_flags::mem_threadgroup); + } + +#if QK_K == 256 + const int ix = tiisg; + + device const float * y4 = y + 32 * ix; + + for (int ib32 = ix; ib32 < nb32; ib32 += 32) { + + for (int i = 0; i < 32; ++i) { + yl[i] = y4[i]; + } + + const int ibl = ib32 / (QK_K / 32); + const int ib = ib32 % (QK_K / 32); + + device const block_iq2_xs * xr = x + ibl; + device const uint16_t * q2 = xr->qs + 4 * ib; + device const uint8_t * sc = xr->scales + ib; + device const half * dh = &xr->d; + + for (int row = 0; row < N_DST; row++) { + + const float db = dh[0]; + const uint8_t ls1 = sc[0] & 0xf; + const uint8_t ls2 = sc[0] >> 4; + const float d1 = db * (0.5f + ls1); + const float d2 = db * (0.5f + ls2); + + float sum1 = 0, sum2 = 0; + for (int l = 0; l < 2; ++l) { + const threadgroup uint8_t * grid = (const threadgroup uint8_t *)(values + (q2[l] & 511)); + const uint8_t signs = shared_signs[(q2[l] >> 9)]; + for (int j = 0; j < 8; ++j) { + sum1 += yl[8*l + j] * grid[j] * (signs & kmask_iq2xs[j] ? -1.f : 1.f); + } + } + for (int l = 2; l < 4; ++l) { + const threadgroup uint8_t * grid = (const threadgroup uint8_t *)(values + (q2[l] & 511)); + const uint8_t signs = shared_signs[(q2[l] >> 9)]; + for (int j = 0; j < 8; ++j) { + sum2 += yl[8*l + j] * grid[j] * (signs & kmask_iq2xs[j] ? -1.f : 1.f); + } + } + sumf[row] += d1 * sum1 + d2 * sum2; + + dh += nb*sizeof(block_iq2_xs)/2; + q2 += nb*sizeof(block_iq2_xs)/2; + sc += nb*sizeof(block_iq2_xs); + } + + y4 += 32 * 32; + } +#else + // TODO +#endif + + for (int row = 0; row < N_DST; ++row) { + all_sum = simd_sum(sumf[row]); + if (tiisg == 0) { + dst[r1*ne0 + im*ne0*ne1 + first_row + row] = all_sum * 0.25f; + } + } +} + +[[host_name("kernel_mul_mv_iq2_xs_f32")]] +kernel void kernel_mul_mv_iq2_xs_f32( + device const void * src0, + device const float * src1, + device float * dst, + constant int64_t & ne00, + constant int64_t & ne01, + constant int64_t & ne02, + constant uint64_t & nb00, + constant uint64_t & nb01, + constant uint64_t & nb02, + constant int64_t & ne10, + constant int64_t & ne11, + constant int64_t & ne12, + constant uint64_t & nb10, + constant uint64_t & nb11, + constant uint64_t & nb12, + constant int64_t & ne0, + constant int64_t & ne1, + constant uint & r2, + constant uint & r3, + threadgroup int8_t * shared_values [[threadgroup(0)]], + uint3 tgpig[[threadgroup_position_in_grid]], + uint tiisg[[thread_index_in_simdgroup]], + uint sgitg[[simdgroup_index_in_threadgroup]]) { + + kernel_mul_mv_iq2_xs_f32_impl(src0, src1, dst, ne00, ne01, ne02, ne10, ne12, ne0, ne1, r2, r3, shared_values, tgpig, tiisg, sgitg); +} + //============================= templates and their specializations ============================= // NOTE: this is not dequantizing - we are simply fitting the template @@ -3973,18 +4254,39 @@ void dequantize_iq2_xxs(device const block_iq2_xxs * xb, short il, thread type4x const uint32_t aux32_s = q2[2] | (q2[3] << 16); thread const uint8_t * aux8 = (thread const uint8_t *)&aux32_g; const float dl = d * (0.5f + (aux32_s >> 28)) * 0.25f; - constant uint8_t * grid = (constant uint8_t *)(kgrid_iq2xxs + aux8[2*il+0]); + constant uint8_t * grid = (constant uint8_t *)(iq2xxs_grid + aux8[2*il+0]); uint8_t signs = ksigns_iq2xs[(aux32_s >> 14*il) & 127]; for (int i = 0; i < 8; ++i) { reg[i/4][i%4] = dl * grid[i] * (signs & kmask_iq2xs[i] ? -1.f : 1.f); } - grid = (constant uint8_t *)(kgrid_iq2xxs + aux8[2*il+1]); + grid = (constant uint8_t *)(iq2xxs_grid + aux8[2*il+1]); signs = ksigns_iq2xs[(aux32_s >> (14*il+7)) & 127]; for (int i = 0; i < 8; ++i) { reg[2+i/4][i%4] = dl * grid[i] * (signs & kmask_iq2xs[i] ? -1.f : 1.f); } } +template +void dequantize_iq2_xs(device const block_iq2_xs * xb, short il, thread type4x4 & reg) { + // il is 0...15 for QK_K = 256 => index of block of 32 is il/2 + const float d = xb->d; + const int ib32 = il/2; + il = il%2; + // il = 0 or 1. il = 0 processes the first 16 quants in a block of 32, il = 1 the second 16 + device const uint16_t * q2 = xb->qs + 4*ib32; + const float dl = d * (0.5f + ((xb->scales[ib32] >> 4*il) & 0xf)) * 0.25f; + constant uint8_t * grid = (constant uint8_t *)(iq2xs_grid + (q2[2*il+0] & 511)); + uint8_t signs = ksigns_iq2xs[q2[2*il+0] >> 9]; + for (int i = 0; i < 8; ++i) { + reg[i/4][i%4] = dl * grid[i] * (signs & kmask_iq2xs[i] ? -1.f : 1.f); + } + grid = (constant uint8_t *)(iq2xs_grid + (q2[2*il+1] & 511)); + signs = ksigns_iq2xs[q2[2*il+1] >> 9]; + for (int i = 0; i < 8; ++i) { + reg[2+i/4][i%4] = dl * grid[i] * (signs & kmask_iq2xs[i] ? -1.f : 1.f); + } +} + template kernel void kernel_get_rows( device const void * src0, @@ -4525,6 +4827,7 @@ template [[host_name("kernel_get_rows_q4_K")]] kernel get_rows_t kernel_get_rows template [[host_name("kernel_get_rows_q5_K")]] kernel get_rows_t kernel_get_rows; template [[host_name("kernel_get_rows_q6_K")]] kernel get_rows_t kernel_get_rows; template [[host_name("kernel_get_rows_iq2_xxs")]] kernel get_rows_t kernel_get_rows; +template [[host_name("kernel_get_rows_iq2_xs")]] kernel get_rows_t kernel_get_rows; // // matrix-matrix multiplication @@ -4562,6 +4865,7 @@ template [[host_name("kernel_mul_mm_q4_K_f32")]] kernel mat_mm_t kernel_mul_mm; template [[host_name("kernel_mul_mm_q6_K_f32")]] kernel mat_mm_t kernel_mul_mm; template [[host_name("kernel_mul_mm_iq2_xxs_f32")]] kernel mat_mm_t kernel_mul_mm; +template [[host_name("kernel_mul_mm_iq2_xs_f32")]] kernel mat_mm_t kernel_mul_mm; // // indirect matrix-matrix multiplication @@ -4611,6 +4915,7 @@ template [[host_name("kernel_mul_mm_id_q4_K_f32")]] kernel mat_mm_id_t kernel_mu template [[host_name("kernel_mul_mm_id_q5_K_f32")]] kernel mat_mm_id_t kernel_mul_mm_id; template [[host_name("kernel_mul_mm_id_q6_K_f32")]] kernel mat_mm_id_t kernel_mul_mm_id; template [[host_name("kernel_mul_mm_id_iq2_xxs_f32")]] kernel mat_mm_id_t kernel_mul_mm_id; +template [[host_name("kernel_mul_mm_id_iq2_xs_f32")]] kernel mat_mm_id_t kernel_mul_mm_id; // // matrix-vector multiplication @@ -5448,3 +5753,68 @@ kernel void kernel_mul_mv_id_iq2_xxs_f32( tiisg, sgitg); } + +[[host_name("kernel_mul_mv_id_iq2_xs_f32")]] +kernel void kernel_mul_mv_id_iq2_xs_f32( + device const char * ids, + device const char * src1, + device float * dst, + constant uint64_t & nbi1, + constant int64_t & ne00, + constant int64_t & ne01, + constant int64_t & ne02, + constant uint64_t & nb00, + constant uint64_t & nb01, + constant uint64_t & nb02, + constant int64_t & ne10, + constant int64_t & ne11, + constant int64_t & ne12, + constant int64_t & ne13, + constant uint64_t & nb10, + constant uint64_t & nb11, + constant uint64_t & nb12, + constant int64_t & ne0, + constant int64_t & ne1, + constant uint64_t & nb1, + constant uint & r2, + constant uint & r3, + constant int & idx, + device const char * src00, + device const char * src01, + device const char * src02, + device const char * src03, + device const char * src04, + device const char * src05, + device const char * src06, + device const char * src07, + threadgroup int8_t * shared_values [[threadgroup(0)]], + uint3 tgpig[[threadgroup_position_in_grid]], + uint tiitg[[thread_index_in_threadgroup]], + uint tiisg[[thread_index_in_simdgroup]], + uint sgitg[[simdgroup_index_in_threadgroup]]) { + device const char * src0[8] = {src00, src01, src02, src03, src04, src05, src06, src07}; + + const int64_t bid = tgpig.z/(ne12*ne13); + + tgpig.z = tgpig.z%(ne12*ne13); + + const int32_t id = ((device int32_t *) (ids + bid*nbi1))[idx]; + + kernel_mul_mv_iq2_xs_f32_impl( + src0[id], + (device const float *) (src1 + bid*nb11), + dst + bid*ne0, + ne00, + ne01, + ne02, + ne10, + ne12, + ne0, + ne1, + r2, + r3, + shared_values, + tgpig, + tiisg, + sgitg); +} diff --git a/ggml-quants.c b/ggml-quants.c index d497e6de9..a24b4b244 100644 --- a/ggml-quants.c +++ b/ggml-quants.c @@ -2342,15 +2342,7 @@ size_t ggml_quantize_q6_K(const float * src, void * dst, int n, int k, int64_t * // ====================== "True" 2-bit (de)-quantization -void quantize_row_iq2_xxs_reference(const float * restrict x, block_iq2_xxs * restrict y, int k) { - (void)x; - (void)y; - (void)k; - assert(k % QK_K == 0); - //fprintf(stderr, "=========================== %s: not implemented\n", __func__); -} - -static const uint64_t iq2xxs_grid[256] = { +static const uint64_t iq2xxs_grid[256] = { 0x0808080808080808, 0x080808080808082b, 0x0808080808081919, 0x0808080808082b08, 0x0808080808082b2b, 0x0808080808190819, 0x0808080808191908, 0x08080808082b0808, 0x08080808082b082b, 0x08080808082b2b08, 0x08080808082b2b2b, 0x0808080819080819, @@ -2417,6 +2409,137 @@ static const uint64_t iq2xxs_grid[256] = { 0x2b2b082b08080808, 0x2b2b190808192b08, 0x2b2b2b0819190808, 0x2b2b2b1908081908, }; +static const uint64_t iq2xs_grid[512] = { + 0x0808080808080808, 0x080808080808082b, 0x0808080808081919, 0x0808080808082b08, + 0x0808080808082b2b, 0x0808080808190819, 0x0808080808191908, 0x080808080819192b, + 0x0808080808192b19, 0x08080808082b0808, 0x08080808082b082b, 0x08080808082b1919, + 0x08080808082b2b08, 0x0808080819080819, 0x0808080819081908, 0x080808081908192b, + 0x0808080819082b19, 0x0808080819190808, 0x080808081919082b, 0x0808080819191919, + 0x0808080819192b08, 0x08080808192b0819, 0x08080808192b1908, 0x080808082b080808, + 0x080808082b08082b, 0x080808082b081919, 0x080808082b082b08, 0x080808082b190819, + 0x080808082b191908, 0x080808082b192b19, 0x080808082b2b0808, 0x0808081908080819, + 0x0808081908081908, 0x080808190808192b, 0x0808081908082b19, 0x0808081908190808, + 0x080808190819082b, 0x0808081908191919, 0x0808081908192b08, 0x0808081908192b2b, + 0x08080819082b0819, 0x08080819082b1908, 0x0808081919080808, 0x080808191908082b, + 0x0808081919081919, 0x0808081919082b08, 0x0808081919190819, 0x0808081919191908, + 0x08080819192b0808, 0x08080819192b2b08, 0x080808192b080819, 0x080808192b081908, + 0x080808192b190808, 0x0808082b08080808, 0x0808082b0808082b, 0x0808082b08081919, + 0x0808082b08082b08, 0x0808082b08190819, 0x0808082b08191908, 0x0808082b082b0808, + 0x0808082b19080819, 0x0808082b19081908, 0x0808082b19190808, 0x0808082b19191919, + 0x0808082b2b080808, 0x0808082b2b082b2b, 0x0808190808080819, 0x0808190808081908, + 0x080819080808192b, 0x0808190808082b19, 0x0808190808190808, 0x080819080819082b, + 0x0808190808191919, 0x0808190808192b08, 0x08081908082b0819, 0x08081908082b1908, + 0x0808190819080808, 0x080819081908082b, 0x0808190819081919, 0x0808190819082b08, + 0x0808190819190819, 0x0808190819191908, 0x080819081919192b, 0x08081908192b0808, + 0x080819082b080819, 0x080819082b081908, 0x080819082b190808, 0x0808191908080808, + 0x080819190808082b, 0x0808191908081919, 0x0808191908082b08, 0x0808191908190819, + 0x0808191908191908, 0x08081919082b0808, 0x0808191919080819, 0x0808191919081908, + 0x0808191919190808, 0x08081919192b0819, 0x080819192b080808, 0x0808192b08080819, + 0x0808192b08081908, 0x0808192b08190808, 0x0808192b082b192b, 0x0808192b19080808, + 0x0808192b1908082b, 0x0808192b2b081908, 0x08082b0808080808, 0x08082b080808082b, + 0x08082b0808081919, 0x08082b0808082b08, 0x08082b0808082b2b, 0x08082b0808190819, + 0x08082b0808191908, 0x08082b08082b0808, 0x08082b08082b1919, 0x08082b0819080819, + 0x08082b0819081908, 0x08082b0819190808, 0x08082b0819192b08, 0x08082b082b080808, + 0x08082b082b2b0808, 0x08082b082b2b2b2b, 0x08082b1908080819, 0x08082b1908081908, + 0x08082b1908190808, 0x08082b1919080808, 0x08082b192b080819, 0x08082b192b082b19, + 0x08082b2b08080808, 0x08082b2b082b0808, 0x08082b2b082b2b08, 0x08082b2b2b19192b, + 0x08082b2b2b2b0808, 0x0819080808080819, 0x0819080808081908, 0x081908080808192b, + 0x0819080808082b19, 0x0819080808190808, 0x081908080819082b, 0x0819080808191919, + 0x0819080808192b08, 0x08190808082b0819, 0x08190808082b1908, 0x0819080819080808, + 0x081908081908082b, 0x0819080819081919, 0x0819080819082b08, 0x0819080819190819, + 0x0819080819191908, 0x08190808192b0808, 0x08190808192b2b2b, 0x081908082b080819, + 0x081908082b081908, 0x081908082b190808, 0x0819081908080808, 0x081908190808082b, + 0x0819081908081919, 0x0819081908082b08, 0x0819081908190819, 0x0819081908191908, + 0x08190819082b0808, 0x0819081919080819, 0x0819081919081908, 0x0819081919190808, + 0x081908192b080808, 0x081908192b191908, 0x081908192b19192b, 0x0819082b08080819, + 0x0819082b08081908, 0x0819082b0808192b, 0x0819082b08190808, 0x0819082b19080808, + 0x0819082b192b0808, 0x0819190808080808, 0x081919080808082b, 0x0819190808081919, + 0x0819190808082b08, 0x0819190808190819, 0x0819190808191908, 0x08191908082b0808, + 0x0819190819080819, 0x0819190819081908, 0x0819190819082b19, 0x0819190819190808, + 0x08191908192b1908, 0x081919082b080808, 0x0819191908080819, 0x0819191908081908, + 0x0819191908190808, 0x0819191919080808, 0x0819192b08080808, 0x0819192b08191908, + 0x0819192b19082b19, 0x08192b0808080819, 0x08192b0808081908, 0x08192b0808190808, + 0x08192b080819082b, 0x08192b0819080808, 0x08192b0819191908, 0x08192b082b08192b, + 0x08192b1908080808, 0x08192b1908081919, 0x08192b19192b192b, 0x08192b2b19190819, + 0x08192b2b2b2b2b19, 0x082b080808080808, 0x082b08080808082b, 0x082b080808081919, + 0x082b080808082b08, 0x082b080808082b2b, 0x082b080808190819, 0x082b080808191908, + 0x082b0808082b0808, 0x082b080819080819, 0x082b080819081908, 0x082b080819190808, + 0x082b08082b080808, 0x082b08082b2b0808, 0x082b081908080819, 0x082b081908081908, + 0x082b081908190808, 0x082b081919080808, 0x082b081919082b08, 0x082b0819192b1919, + 0x082b082b08080808, 0x082b082b082b082b, 0x082b082b2b080808, 0x082b082b2b2b2b08, + 0x082b190808080819, 0x082b190808081908, 0x082b190808190808, 0x082b1908082b2b19, + 0x082b190819080808, 0x082b191908080808, 0x082b191919080819, 0x082b19191919082b, + 0x082b19192b192b19, 0x082b192b08080819, 0x082b192b08192b2b, 0x082b192b2b2b192b, + 0x082b2b0808080808, 0x082b2b0808082b08, 0x082b2b0808082b2b, 0x082b2b08082b0808, + 0x082b2b0819191919, 0x082b2b082b082b08, 0x082b2b082b2b082b, 0x082b2b19192b2b08, + 0x082b2b192b190808, 0x082b2b2b08082b08, 0x082b2b2b082b0808, 0x082b2b2b2b08082b, + 0x082b2b2b2b082b08, 0x082b2b2b2b082b2b, 0x1908080808080819, 0x1908080808081908, + 0x190808080808192b, 0x1908080808082b19, 0x1908080808190808, 0x190808080819082b, + 0x1908080808191919, 0x1908080808192b08, 0x19080808082b0819, 0x19080808082b1908, + 0x1908080819080808, 0x190808081908082b, 0x1908080819081919, 0x1908080819082b08, + 0x1908080819082b2b, 0x1908080819190819, 0x1908080819191908, 0x19080808192b0808, + 0x19080808192b1919, 0x190808082b080819, 0x190808082b081908, 0x190808082b190808, + 0x1908081908080808, 0x190808190808082b, 0x1908081908081919, 0x1908081908082b08, + 0x1908081908190819, 0x1908081908191908, 0x19080819082b0808, 0x1908081919080819, + 0x1908081919081908, 0x1908081919190808, 0x190808192b080808, 0x190808192b081919, + 0x190808192b2b082b, 0x1908082b08080819, 0x1908082b08081908, 0x1908082b08190808, + 0x1908082b0819082b, 0x1908082b082b2b19, 0x1908082b19080808, 0x1908190808080808, + 0x190819080808082b, 0x1908190808081919, 0x1908190808082b08, 0x1908190808190819, + 0x1908190808191908, 0x1908190808192b19, 0x19081908082b0808, 0x1908190819080819, + 0x1908190819081908, 0x1908190819190808, 0x190819082b080808, 0x190819082b191908, + 0x1908191908080819, 0x1908191908081908, 0x1908191908190808, 0x19081919082b1908, + 0x1908191919080808, 0x190819192b192b2b, 0x1908192b08080808, 0x1908192b08082b2b, + 0x1908192b19081908, 0x1908192b19190808, 0x19082b0808080819, 0x19082b0808081908, + 0x19082b0808190808, 0x19082b0819080808, 0x19082b0819081919, 0x19082b0819191908, + 0x19082b08192b082b, 0x19082b1908080808, 0x19082b1908190819, 0x19082b1919081908, + 0x19082b1919190808, 0x19082b19192b2b19, 0x19082b2b08081908, 0x1919080808080808, + 0x191908080808082b, 0x1919080808081919, 0x1919080808082b08, 0x1919080808190819, + 0x1919080808191908, 0x19190808082b0808, 0x19190808082b2b08, 0x1919080819080819, + 0x1919080819081908, 0x1919080819190808, 0x191908082b080808, 0x1919081908080819, + 0x1919081908081908, 0x1919081908190808, 0x1919081908191919, 0x1919081919080808, + 0x191908191908082b, 0x1919082b08080808, 0x1919082b19081908, 0x1919082b2b2b2b2b, + 0x1919190808080819, 0x1919190808081908, 0x1919190808190808, 0x19191908082b0819, + 0x1919190819080808, 0x19191908192b0808, 0x191919082b080819, 0x191919082b2b0819, + 0x1919191908080808, 0x1919191908082b08, 0x191919192b080808, 0x191919192b082b08, + 0x1919192b082b0819, 0x1919192b192b2b08, 0x1919192b2b2b0819, 0x19192b0808080808, + 0x19192b0808191908, 0x19192b0819080819, 0x19192b0819190808, 0x19192b082b192b19, + 0x19192b1908192b2b, 0x19192b1919080808, 0x19192b191908082b, 0x19192b2b2b081919, + 0x192b080808080819, 0x192b080808081908, 0x192b080808190808, 0x192b080819080808, + 0x192b080819191908, 0x192b0808192b082b, 0x192b08082b08192b, 0x192b08082b2b2b19, + 0x192b081908080808, 0x192b082b082b1908, 0x192b082b19082b2b, 0x192b082b2b19082b, + 0x192b190808080808, 0x192b19080819192b, 0x192b191908190808, 0x192b191919080808, + 0x192b191919081919, 0x192b19192b2b1908, 0x192b2b0808080819, 0x192b2b08192b2b2b, + 0x192b2b19082b1919, 0x192b2b2b0808192b, 0x192b2b2b19191908, 0x192b2b2b192b082b, + 0x2b08080808080808, 0x2b0808080808082b, 0x2b08080808081919, 0x2b08080808082b08, + 0x2b08080808190819, 0x2b08080808191908, 0x2b080808082b0808, 0x2b080808082b2b2b, + 0x2b08080819080819, 0x2b08080819081908, 0x2b08080819190808, 0x2b0808082b080808, + 0x2b0808082b08082b, 0x2b0808082b2b2b08, 0x2b0808082b2b2b2b, 0x2b08081908080819, + 0x2b08081908081908, 0x2b0808190808192b, 0x2b08081908190808, 0x2b08081919080808, + 0x2b08081919190819, 0x2b08081919192b19, 0x2b08082b08080808, 0x2b08082b082b0808, + 0x2b08082b2b080808, 0x2b08082b2b08082b, 0x2b08082b2b2b0808, 0x2b08082b2b2b2b08, + 0x2b08190808080819, 0x2b08190808081908, 0x2b08190808190808, 0x2b0819080819082b, + 0x2b08190808191919, 0x2b08190819080808, 0x2b081908192b0808, 0x2b0819082b082b19, + 0x2b08191908080808, 0x2b08191919081908, 0x2b0819192b2b1919, 0x2b08192b08192b08, + 0x2b08192b192b2b2b, 0x2b082b0808080808, 0x2b082b0808082b08, 0x2b082b08082b1919, + 0x2b082b0819192b2b, 0x2b082b082b080808, 0x2b082b082b08082b, 0x2b082b082b2b2b08, + 0x2b082b190808192b, 0x2b082b2b082b082b, 0x2b082b2b2b080808, 0x2b082b2b2b082b08, + 0x2b082b2b2b19192b, 0x2b082b2b2b2b2b08, 0x2b19080808080819, 0x2b19080808081908, + 0x2b19080808190808, 0x2b19080819080808, 0x2b1908081919192b, 0x2b1908082b081908, + 0x2b19081908080808, 0x2b190819082b082b, 0x2b190819192b1908, 0x2b19082b1919192b, + 0x2b19082b2b082b19, 0x2b19190808080808, 0x2b19190808081919, 0x2b19190819081908, + 0x2b19190819190808, 0x2b19190819192b08, 0x2b191919082b2b19, 0x2b1919192b190808, + 0x2b1919192b19082b, 0x2b19192b19080819, 0x2b192b0819190819, 0x2b192b082b2b192b, + 0x2b192b1919082b19, 0x2b192b2b08191919, 0x2b192b2b192b0808, 0x2b2b080808080808, + 0x2b2b08080808082b, 0x2b2b080808082b08, 0x2b2b080808082b2b, 0x2b2b0808082b0808, + 0x2b2b0808082b2b2b, 0x2b2b08082b2b0808, 0x2b2b081919190819, 0x2b2b081919192b19, + 0x2b2b08192b2b192b, 0x2b2b082b08080808, 0x2b2b082b0808082b, 0x2b2b082b08082b08, + 0x2b2b082b082b2b2b, 0x2b2b082b2b080808, 0x2b2b082b2b2b0808, 0x2b2b190819080808, + 0x2b2b19082b191919, 0x2b2b192b192b1919, 0x2b2b192b2b192b08, 0x2b2b2b0808082b2b, + 0x2b2b2b08082b0808, 0x2b2b2b08082b082b, 0x2b2b2b08082b2b08, 0x2b2b2b082b2b0808, + 0x2b2b2b082b2b2b08, 0x2b2b2b1908081908, 0x2b2b2b192b081908, 0x2b2b2b192b08192b, + 0x2b2b2b2b082b2b08, 0x2b2b2b2b082b2b2b, 0x2b2b2b2b2b190819, 0x2b2b2b2b2b2b2b2b, +}; + static const uint8_t ksigns_iq2xs[128] = { 0, 129, 130, 3, 132, 5, 6, 135, 136, 9, 10, 139, 12, 141, 142, 15, 144, 17, 18, 147, 20, 149, 150, 23, 24, 153, 154, 27, 156, 29, 30, 159, @@ -2427,8 +2550,17 @@ static const uint8_t ksigns_iq2xs[128] = { 96, 225, 226, 99, 228, 101, 102, 231, 232, 105, 106, 235, 108, 237, 238, 111, 240, 113, 114, 243, 116, 245, 246, 119, 120, 249, 250, 123, 252, 125, 126, 255, }; + static const uint8_t kmask_iq2xs[8] = {1, 2, 4, 8, 16, 32, 64, 128}; +void quantize_row_iq2_xxs_reference(const float * restrict x, block_iq2_xxs * restrict y, int k) { + (void)x; + (void)y; + (void)k; + assert(k % QK_K == 0); + //fprintf(stderr, "=========================== %s: not implemented\n", __func__); +} + void dequantize_row_iq2_xxs(const block_iq2_xxs * restrict x, float * restrict y, int k) { assert(k % QK_K == 0); const int nb = k / QK_K; @@ -2472,6 +2604,58 @@ size_t ggml_quantize_iq2_xxs(const float * src, void * dst, int n, int k, int64_ return (n/QK_K*sizeof(block_iq2_xxs)); } +// ====================== 2.3125 bpw (de)-quantization + +void quantize_row_iq2_xs_reference(const float * restrict x, block_iq2_xs * restrict y, int k) { + (void)x; + (void)y; + (void)k; + assert(k % QK_K == 0); + //fprintf(stderr, "=========================== %s: not implemented\n", __func__); +} + +void dequantize_row_iq2_xs(const block_iq2_xs * restrict x, float * restrict y, int k) { + assert(k % QK_K == 0); + const int nb = k / QK_K; + + float db[2]; + + for (int i = 0; i < nb; i++) { + + const float d = GGML_FP16_TO_FP32(x[i].d); + + for (int ib32 = 0; ib32 < QK_K/32; ++ib32) { + db[0] = d * (0.5f + (x[i].scales[ib32] & 0xf)) * 0.25f; + db[1] = d * (0.5f + (x[i].scales[ib32] >> 4)) * 0.25f; + for (int l = 0; l < 4; ++l) { + const uint8_t * grid = (const uint8_t *)(iq2xs_grid + (x[i].qs[4*ib32 + l] & 511)); + const uint8_t signs = ksigns_iq2xs[x[i].qs[4*ib32 + l] >> 9]; + for (int j = 0; j < 8; ++j) { + y[j] = db[l/2] * grid[j] * (signs & kmask_iq2xs[j] ? -1.f : 1.f); + } + y += 8; + } + } + } +} + +void quantize_row_iq2_xs(const float * restrict x, void * restrict vy, int k) { + assert(k % QK_K == 0); + block_iq2_xs * restrict y = vy; + quantize_row_iq2_xs_reference(x, y, k); +} + +size_t ggml_quantize_iq2_xs(const float * src, void * dst, int n, int k, int64_t * hist) { + assert(k % QK_K == 0); + (void)hist; // TODO: collect histograms + + for (int j = 0; j < n; j += k) { + block_iq2_xs * restrict y = (block_iq2_xs *)dst + j/QK_K; + quantize_row_iq2_xs_reference(src + j, y, k); + } + return (n/QK_K*sizeof(block_iq2_xs)); +} + //===================================== Q8_K ============================================== void quantize_row_q8_K_reference(const float * restrict x, block_q8_K * restrict y, int k) { @@ -7357,3 +7541,161 @@ void ggml_vec_dot_iq2_xxs_q8_K(const int n, float * restrict s, const void * res *s = 0.125f * sumf; #endif } + +void ggml_vec_dot_iq2_xs_q8_K(const int n, float * restrict s, const void * restrict vx, const void * restrict vy) { + assert(n % QK_K == 0); + + const block_iq2_xs * restrict x = vx; + const block_q8_K * restrict y = vy; + + const int nb = n / QK_K; + +#if defined(__ARM_NEON) + + const uint64_t * signs64 = (const uint64_t *)keven_signs_q2xs; + + int8x16x4_t q2u; + int8x16x4_t q2s; + int8x16x4_t q8b; + + int32x4x4_t scales32; + + float sumf = 0; + for (int i = 0; i < nb; ++i) { + const float d = GGML_FP16_TO_FP32(x[i].d) * y[i].d; + const uint16_t * restrict q2 = x[i].qs; + const int8_t * restrict q8 = y[i].qs; + const uint8x8_t scales8 = vld1_u8(x[i].scales); + const uint8x8_t scales_l = vand_u8(scales8, vdup_n_u8(0xf)); + const uint8x8_t scales_h = vshr_n_u8(scales8, 4); + uint8x16_t scales = vcombine_u8(vzip1_u8(scales_l, scales_h), vzip2_u8(scales_l, scales_h)); + scales = vaddq_u8(vshlq_n_u8(scales, 1), vdupq_n_u8(1)); + const uint16x8_t scales1 = vmovl_u8(vget_low_u8(scales)); + const uint16x8_t scales2 = vmovl_u8(vget_high_u8(scales)); + scales32.val[0] = vreinterpretq_s32_u32(vmovl_u16(vget_low_u16(scales1))); + scales32.val[1] = vreinterpretq_s32_u32(vmovl_u16(vget_high_u16(scales1))); + scales32.val[2] = vreinterpretq_s32_u32(vmovl_u16(vget_low_u16(scales2))); + scales32.val[3] = vreinterpretq_s32_u32(vmovl_u16(vget_high_u16(scales2))); + int32x4_t sumi = vdupq_n_s32(0); + for (int ib64 = 0; ib64 < QK_K/64; ++ib64) { + q8b = vld1q_s8_x4(q8); q8 += 64; + q2u.val[0] = vcombine_s8(vld1_s8((const void *)(iq2xs_grid + (q2[0] & 511))), vld1_s8((const void *)(iq2xs_grid + (q2[1] & 511)))); + q2u.val[1] = vcombine_s8(vld1_s8((const void *)(iq2xs_grid + (q2[2] & 511))), vld1_s8((const void *)(iq2xs_grid + (q2[3] & 511)))); + q2u.val[2] = vcombine_s8(vld1_s8((const void *)(iq2xs_grid + (q2[4] & 511))), vld1_s8((const void *)(iq2xs_grid + (q2[5] & 511)))); + q2u.val[3] = vcombine_s8(vld1_s8((const void *)(iq2xs_grid + (q2[6] & 511))), vld1_s8((const void *)(iq2xs_grid + (q2[7] & 511)))); + q2s.val[0] = vcombine_s8(vld1_s8((const void *)(signs64 + (q2[0] >> 9))), vld1_s8((const void *)(signs64 + (q2[1] >> 9)))); + q2s.val[1] = vcombine_s8(vld1_s8((const void *)(signs64 + (q2[2] >> 9))), vld1_s8((const void *)(signs64 + (q2[3] >> 9)))); + q2s.val[2] = vcombine_s8(vld1_s8((const void *)(signs64 + (q2[4] >> 9))), vld1_s8((const void *)(signs64 + (q2[5] >> 9)))); + q2s.val[3] = vcombine_s8(vld1_s8((const void *)(signs64 + (q2[6] >> 9))), vld1_s8((const void *)(signs64 + (q2[7] >> 9)))); + q2u.val[0] = vmulq_s8(q2u.val[0], q2s.val[0]); + q2u.val[1] = vmulq_s8(q2u.val[1], q2s.val[1]); + q2u.val[2] = vmulq_s8(q2u.val[2], q2s.val[2]); + q2u.val[3] = vmulq_s8(q2u.val[3], q2s.val[3]); + const int32x4_t p1 = ggml_vdotq_s32(vdupq_n_s32(0), q2u.val[0], q8b.val[0]); + const int32x4_t p2 = ggml_vdotq_s32(vdupq_n_s32(0), q2u.val[1], q8b.val[1]); + const int32x4_t p3 = ggml_vdotq_s32(vdupq_n_s32(0), q2u.val[2], q8b.val[2]); + const int32x4_t p4 = ggml_vdotq_s32(vdupq_n_s32(0), q2u.val[3], q8b.val[3]); + const int32x4_t p = vpaddq_s32(vpaddq_s32(p1, p2), vpaddq_s32(p3, p4)); + sumi = vmlaq_s32(sumi, p, scales32.val[ib64]); + q2 += 8; + } + sumf += d*vaddvq_s32(sumi); + } + *s = 0.125f * sumf; + +#elif defined(__AVX2__) + + const __m128i m4 = _mm_set1_epi8(0xf); + const __m128i m1 = _mm_set1_epi8(1); + const __m128i m511 = _mm_set1_epi16(511); + const __m128i m127 = _mm_set1_epi16(127); + + const uint64_t * signs64 = (const uint64_t *)keven_signs_q2xs; + + uint64_t aux64; + + // somewhat hacky, but gives a significant boost in performance + __m128i aux_gindex, aux_sindex; + const uint16_t * gindex = (const uint16_t *)&aux_gindex; + const uint16_t * sindex = (const uint16_t *)&aux_sindex; + + __m256 accumf = _mm256_setzero_ps(); + for (int i = 0; i < nb; ++i) { + const float d = GGML_FP16_TO_FP32(x[i].d) * y[i].d; + const uint16_t * restrict q2 = x[i].qs; + const int8_t * restrict q8 = y[i].qs; + + memcpy(&aux64, x[i].scales, 8); + __m128i stmp = _mm_set1_epi64x(aux64); + stmp = _mm_unpacklo_epi8(_mm_and_si128(stmp, m4), _mm_and_si128(_mm_srli_epi16(stmp, 4), m4)); + const __m128i scales = _mm_add_epi8(_mm_slli_epi16(stmp, 1), m1); + + __m256i sumi1 = _mm256_setzero_si256(); + __m256i sumi2 = _mm256_setzero_si256(); + for (int ib32 = 0; ib32 < QK_K/32; ib32 += 2) { + const __m256i q8_1 = _mm256_loadu_si256((const __m256i *)q8); q8 += 32; + const __m256i q8_2 = _mm256_loadu_si256((const __m256i *)q8); q8 += 32; + const __m128i q2_data = _mm_loadu_si128((const __m128i*)q2); q2 += 8; + aux_gindex = _mm_and_si128(q2_data, m511); + aux_sindex = _mm_and_si128(_mm_srli_epi16(q2_data, 9), m127); + const __m256i q2_1 = _mm256_set_epi64x(iq2xs_grid[gindex[3]], iq2xs_grid[gindex[2]], iq2xs_grid[gindex[1]], iq2xs_grid[gindex[0]]); + const __m256i q2_2 = _mm256_set_epi64x(iq2xs_grid[gindex[7]], iq2xs_grid[gindex[6]], iq2xs_grid[gindex[5]], iq2xs_grid[gindex[4]]); + const __m256i s2_1 = _mm256_set_epi64x(signs64[sindex[3]], signs64[sindex[2]], signs64[sindex[1]], signs64[sindex[0]]); + const __m256i s2_2 = _mm256_set_epi64x(signs64[sindex[7]], signs64[sindex[6]], signs64[sindex[5]], signs64[sindex[4]]); + const __m256i q8s_1 = _mm256_sign_epi8(q8_1, s2_1); + const __m256i q8s_2 = _mm256_sign_epi8(q8_2, s2_2); + const __m256i dot1 = _mm256_maddubs_epi16(q2_1, q8s_1); + const __m256i dot2 = _mm256_maddubs_epi16(q2_2, q8s_2); + + const __m256i sc1 = _mm256_cvtepi8_epi16(_mm_shuffle_epi8(scales, get_scale_shuffle(ib32+0))); + const __m256i sc2 = _mm256_cvtepi8_epi16(_mm_shuffle_epi8(scales, get_scale_shuffle(ib32+1))); + + sumi1 = _mm256_add_epi32(sumi1, _mm256_madd_epi16(dot1, sc1)); + sumi2 = _mm256_add_epi32(sumi2, _mm256_madd_epi16(dot2, sc2)); + } + + accumf = _mm256_fmadd_ps(_mm256_set1_ps(d), _mm256_cvtepi32_ps(_mm256_add_epi32(sumi1, sumi2)), accumf); + + } + + *s = 0.125f * hsum_float_8(accumf); + +#else + + float sumf = 0.f; + for (int i = 0; i < nb; ++i) { + const float d = GGML_FP16_TO_FP32(x[i].d) * y[i].d; + const uint16_t * restrict q2 = x[i].qs; + const uint8_t * restrict sc = x[i].scales; + const int8_t * restrict q8 = y[i].qs; + int32_t bsum = 0; + for (int ib32 = 0; ib32 < QK_K/32; ++ib32) { + const uint16_t ls1 = 2*(sc[ib32] & 0xf) + 1; + const uint16_t ls2 = 2*(sc[ib32] >> 4) + 1; + int32_t sumi = 0; + for (int l = 0; l < 2; ++l) { + const uint8_t * grid = (const uint8_t *)(iq2xs_grid + (q2[l] & 511)); + const uint8_t signs = ksigns_iq2xs[q2[l] >> 9]; + for (int j = 0; j < 8; ++j) { + sumi += grid[j] * q8[j] * (signs & kmask_iq2xs[j] ? -1 : 1); + } + q8 += 8; + } + bsum += sumi * ls1; + sumi = 0; + for (int l = 2; l < 4; ++l) { + const uint8_t * grid = (const uint8_t *)(iq2xs_grid + (q2[l] & 511)); + const uint8_t signs = ksigns_iq2xs[q2[l] >> 9]; + for (int j = 0; j < 8; ++j) { + sumi += grid[j] * q8[j] * (signs & kmask_iq2xs[j] ? -1 : 1); + } + q8 += 8; + } + bsum += sumi * ls2; + q2 += 4; + } + sumf += d * bsum; + } + *s = 0.125f * sumf; +#endif +} diff --git a/ggml-quants.h b/ggml-quants.h index 8dd911d41..df5e7ae80 100644 --- a/ggml-quants.h +++ b/ggml-quants.h @@ -174,6 +174,14 @@ typedef struct { } block_iq2_xxs; static_assert(sizeof(block_iq2_xxs) == sizeof(ggml_fp16_t) + QK_K/8*sizeof(uint16_t), "wrong iq2_xxs block size/padding"); +// 2.3125 bpw quants +typedef struct { + ggml_fp16_t d; + uint16_t qs[QK_K/8]; + uint8_t scales[QK_K/32]; +} block_iq2_xs; +static_assert(sizeof(block_iq2_xs) == sizeof(ggml_fp16_t) + QK_K/8*sizeof(uint16_t) + QK_K/32, "wrong iq2_xs block size/padding"); + // Quantization void quantize_row_q4_0_reference(const float * restrict x, block_q4_0 * restrict y, int k); void quantize_row_q4_1_reference(const float * restrict x, block_q4_1 * restrict y, int k); @@ -189,6 +197,7 @@ void quantize_row_q5_K_reference(const float * restrict x, block_q5_K * restrict void quantize_row_q6_K_reference(const float * restrict x, block_q6_K * restrict y, int k); void quantize_row_q8_K_reference(const float * restrict x, block_q8_K * restrict y, int k); void quantize_row_iq2_xxs_reference(const float * restrict x, block_iq2_xxs * restrict y, int k); +void quantize_row_iq2_xs_reference (const float * restrict x, block_iq2_xs * restrict y, int k); void quantize_row_q4_0(const float * restrict x, void * restrict y, int k); void quantize_row_q4_1(const float * restrict x, void * restrict y, int k); @@ -204,6 +213,7 @@ void quantize_row_q5_K(const float * restrict x, void * restrict y, int k); void quantize_row_q6_K(const float * restrict x, void * restrict y, int k); void quantize_row_q8_K(const float * restrict x, void * restrict y, int k); void quantize_row_iq2_xxs(const float * restrict x, void * restrict y, int k); +void quantize_row_iq2_xs (const float * restrict x, void * restrict y, int k); // Dequantization void dequantize_row_q4_0(const block_q4_0 * restrict x, float * restrict y, int k); @@ -220,6 +230,7 @@ void dequantize_row_q5_K(const block_q5_K * restrict x, float * restrict y, int void dequantize_row_q6_K(const block_q6_K * restrict x, float * restrict y, int k); void dequantize_row_q8_K(const block_q8_K * restrict x, float * restrict y, int k); void dequantize_row_iq2_xxs(const block_iq2_xxs * restrict x, float * restrict y, int k); +void dequantize_row_iq2_xs (const block_iq2_xs * restrict x, float * restrict y, int k); // Dot product void ggml_vec_dot_q4_0_q8_0(int n, float * restrict s, const void * restrict vx, const void * restrict vy); @@ -234,3 +245,4 @@ void ggml_vec_dot_q4_K_q8_K(int n, float * restrict s, const void * restrict vx, void ggml_vec_dot_q5_K_q8_K(int n, float * restrict s, const void * restrict vx, const void * restrict vy); void ggml_vec_dot_q6_K_q8_K(int n, float * restrict s, const void * restrict vx, const void * restrict vy); void ggml_vec_dot_iq2_xxs_q8_K(int n, float * restrict s, const void * restrict vx, const void * restrict vy); +void ggml_vec_dot_iq2_xs_q8_K (int n, float * restrict s, const void * restrict vx, const void * restrict vy); diff --git a/ggml.c b/ggml.c index 9c42a45e3..d2a8c0478 100644 --- a/ggml.c +++ b/ggml.c @@ -584,6 +584,17 @@ static const ggml_type_traits_t type_traits[GGML_TYPE_COUNT] = { .vec_dot = ggml_vec_dot_iq2_xxs_q8_K, .vec_dot_type = GGML_TYPE_Q8_K, }, + [GGML_TYPE_IQ2_XS] = { + .type_name = "iq2_xs", + .blck_size = QK_K, + .type_size = sizeof(block_iq2_xs), + .is_quantized = true, + .to_float = (ggml_to_float_t) dequantize_row_iq2_xs, + .from_float = quantize_row_iq2_xs, + .from_float_reference = (ggml_from_float_t) quantize_row_iq2_xs_reference, + .vec_dot = ggml_vec_dot_iq2_xs_q8_K, + .vec_dot_type = GGML_TYPE_Q8_K, + }, [GGML_TYPE_Q8_K] = { .type_name = "q8_K", .blck_size = QK_K, @@ -2123,6 +2134,7 @@ enum ggml_type ggml_ftype_to_ggml_type(enum ggml_ftype ftype) { case GGML_FTYPE_MOSTLY_Q5_K: wtype = GGML_TYPE_Q5_K; break; case GGML_FTYPE_MOSTLY_Q6_K: wtype = GGML_TYPE_Q6_K; break; case GGML_FTYPE_MOSTLY_IQ2_XXS: wtype = GGML_TYPE_IQ2_XXS; break; + case GGML_FTYPE_MOSTLY_IQ2_XS: wtype = GGML_TYPE_IQ2_XS; break; case GGML_FTYPE_UNKNOWN: wtype = GGML_TYPE_COUNT; break; case GGML_FTYPE_MOSTLY_Q4_1_SOME_F16: wtype = GGML_TYPE_COUNT; break; } @@ -7435,6 +7447,7 @@ static void ggml_compute_forward_add( case GGML_TYPE_Q5_K: case GGML_TYPE_Q6_K: case GGML_TYPE_IQ2_XXS: + case GGML_TYPE_IQ2_XS: { ggml_compute_forward_add_q_f32(params, src0, src1, dst); } break; @@ -7700,6 +7713,7 @@ static void ggml_compute_forward_add1( case GGML_TYPE_Q5_K: case GGML_TYPE_Q6_K: case GGML_TYPE_IQ2_XXS: + case GGML_TYPE_IQ2_XS: { ggml_compute_forward_add1_q_f32(params, src0, src1, dst); } break; @@ -7815,6 +7829,7 @@ static void ggml_compute_forward_acc( case GGML_TYPE_Q5_K: case GGML_TYPE_Q6_K: case GGML_TYPE_IQ2_XXS: + case GGML_TYPE_IQ2_XS: default: { GGML_ASSERT(false); @@ -10457,6 +10472,7 @@ static void ggml_compute_forward_out_prod( case GGML_TYPE_Q5_K: case GGML_TYPE_Q6_K: case GGML_TYPE_IQ2_XXS: + case GGML_TYPE_IQ2_XS: { ggml_compute_forward_out_prod_q_f32(params, src0, src1, dst); } break; @@ -10632,6 +10648,7 @@ static void ggml_compute_forward_set( case GGML_TYPE_Q5_K: case GGML_TYPE_Q6_K: case GGML_TYPE_IQ2_XXS: + case GGML_TYPE_IQ2_XS: default: { GGML_ASSERT(false); @@ -10827,6 +10844,7 @@ static void ggml_compute_forward_get_rows( case GGML_TYPE_Q5_K: case GGML_TYPE_Q6_K: case GGML_TYPE_IQ2_XXS: + case GGML_TYPE_IQ2_XS: { ggml_compute_forward_get_rows_q(params, src0, src1, dst); } break; @@ -11464,6 +11482,7 @@ static void ggml_compute_forward_alibi( case GGML_TYPE_Q5_K: case GGML_TYPE_Q6_K: case GGML_TYPE_IQ2_XXS: + case GGML_TYPE_IQ2_XS: case GGML_TYPE_Q8_K: case GGML_TYPE_I8: case GGML_TYPE_I16: @@ -11539,6 +11558,7 @@ static void ggml_compute_forward_clamp( case GGML_TYPE_Q5_K: case GGML_TYPE_Q6_K: case GGML_TYPE_IQ2_XXS: + case GGML_TYPE_IQ2_XS: case GGML_TYPE_Q8_K: case GGML_TYPE_I8: case GGML_TYPE_I16: @@ -18660,6 +18680,12 @@ size_t ggml_quantize_chunk(enum ggml_type type, const float * src, void * dst, i block_iq2_xxs * block = (block_iq2_xxs*)dst + start / QK_K; result = ggml_quantize_iq2_xxs(src + start, block, n, n, hist); } break; + case GGML_TYPE_IQ2_XS: + { + GGML_ASSERT(start % QK_K == 0); + block_iq2_xs * block = (block_iq2_xs*)dst + start / QK_K; + result = ggml_quantize_iq2_xs(src + start, block, n, n, hist); + } break; case GGML_TYPE_F16: { int elemsize = sizeof(ggml_fp16_t); @@ -19015,8 +19041,8 @@ struct gguf_context * gguf_init_from_file(const char * fname, struct gguf_init_p (int64_t) info->ne[3]; if (ne % ggml_blck_size(info->type) != 0) { - fprintf(stderr, "%s: tensor '%s' number of elements (%" PRId64 ") is not a multiple of block size (%d)\n", - __func__, info->name.data, ne, ggml_blck_size(info->type)); + fprintf(stderr, "%s: tensor '%s' of type %d (%s) number of elements (%" PRId64 ") is not a multiple of block size (%d)\n", + __func__, info->name.data, (int)info->type, ggml_type_name(info->type), ne, ggml_blck_size(info->type)); fclose(file); gguf_free(ctx); return NULL; diff --git a/ggml.h b/ggml.h index 127dcef1d..93b42a27d 100644 --- a/ggml.h +++ b/ggml.h @@ -342,6 +342,7 @@ extern "C" { GGML_TYPE_Q6_K = 14, GGML_TYPE_Q8_K = 15, GGML_TYPE_IQ2_XXS = 16, + GGML_TYPE_IQ2_XS = 17, GGML_TYPE_I8, GGML_TYPE_I16, GGML_TYPE_I32, @@ -377,6 +378,7 @@ extern "C" { GGML_FTYPE_MOSTLY_Q5_K = 13, // except 1d tensors GGML_FTYPE_MOSTLY_Q6_K = 14, // except 1d tensors GGML_FTYPE_MOSTLY_IQ2_XXS = 15, // except 1d tensors + GGML_FTYPE_MOSTLY_IQ2_XS = 16, // except 1d tensors }; // available tensor operations: @@ -2061,6 +2063,7 @@ extern "C" { GGML_API size_t ggml_quantize_q5_K(const float * src, void * dst, int n, int k, int64_t * hist); GGML_API size_t ggml_quantize_q6_K(const float * src, void * dst, int n, int k, int64_t * hist); GGML_API size_t ggml_quantize_iq2_xxs(const float * src, void * dst, int n, int k, int64_t * hist); + GGML_API size_t ggml_quantize_iq2_xs (const float * src, void * dst, int n, int k, int64_t * hist); GGML_API size_t ggml_quantize_chunk(enum ggml_type type, const float * src, void * dst, int start, int n, int64_t * hist); diff --git a/llama.cpp b/llama.cpp index aaadfa444..bd219d49c 100644 --- a/llama.cpp +++ b/llama.cpp @@ -2223,6 +2223,7 @@ struct llama_model_loader { case GGML_TYPE_Q5_K: ftype = LLAMA_FTYPE_MOSTLY_Q5_K_M; break; case GGML_TYPE_Q6_K: ftype = LLAMA_FTYPE_MOSTLY_Q6_K; break; case GGML_TYPE_IQ2_XXS: ftype = LLAMA_FTYPE_MOSTLY_IQ2_XXS; break; + case GGML_TYPE_IQ2_XS: ftype = LLAMA_FTYPE_MOSTLY_IQ2_XS; break; default: { LLAMA_LOG_WARN("%s: unknown type %s\n", __func__, ggml_type_name(type_max)); @@ -2595,6 +2596,7 @@ static std::string llama_model_ftype_name(llama_ftype ftype) { case LLAMA_FTYPE_MOSTLY_Q5_K_M: return "Q5_K - Medium"; case LLAMA_FTYPE_MOSTLY_Q6_K: return "Q6_K"; case LLAMA_FTYPE_MOSTLY_IQ2_XXS:return "IQ2_XSS - 2.0625 bpw"; + case LLAMA_FTYPE_MOSTLY_IQ2_XS: return "IQ2_XS - 2.3125 bpw"; default: return "unknown, may not work"; } @@ -9050,6 +9052,7 @@ static void llama_model_quantize_internal(const std::string & fname_inp, const s case LLAMA_FTYPE_MOSTLY_Q5_K_M: quantized_type = GGML_TYPE_Q5_K; break; case LLAMA_FTYPE_MOSTLY_Q6_K: quantized_type = GGML_TYPE_Q6_K; break; case LLAMA_FTYPE_MOSTLY_IQ2_XXS:quantized_type = GGML_TYPE_IQ2_XXS; break; + case LLAMA_FTYPE_MOSTLY_IQ2_XS :quantized_type = GGML_TYPE_IQ2_XS; break; default: throw std::runtime_error(format("invalid output file type %d\n", ftype)); } diff --git a/llama.h b/llama.h index c11075bbc..6fde113ff 100644 --- a/llama.h +++ b/llama.h @@ -104,6 +104,7 @@ extern "C" { LLAMA_FTYPE_MOSTLY_Q5_K_M = 17, // except 1d tensors LLAMA_FTYPE_MOSTLY_Q6_K = 18, // except 1d tensors LLAMA_FTYPE_MOSTLY_IQ2_XXS = 19, // except 1d tensors + LLAMA_FTYPE_MOSTLY_IQ2_XS = 20, // except 1d tensors LLAMA_FTYPE_GUESSED = 1024, // not specified in the model file }; diff --git a/tests/test-quantize-fns.cpp b/tests/test-quantize-fns.cpp index cee712618..31a78c632 100644 --- a/tests/test-quantize-fns.cpp +++ b/tests/test-quantize-fns.cpp @@ -134,8 +134,9 @@ int main(int argc, char * argv[]) { continue; } - if ((ggml_type)i == GGML_TYPE_IQ2_XXS) { - printf("Skip %s due to missing quantization functionality\n", ggml_type_name((ggml_type) i)); + const ggml_type ei = (ggml_type)i; + if (ei == GGML_TYPE_IQ2_XXS || ei == GGML_TYPE_IQ2_XS) { + printf("Skip %s due to missing quantization functionality\n", ggml_type_name(ei)); continue; } From 469e75d0a35b08de549a4fd87f082ca7a8a539ba Mon Sep 17 00:00:00 2001 From: Kawrakow <48489457+ikawrakow@users.noreply.github.com> Date: Thu, 11 Jan 2024 20:43:15 +0100 Subject: [PATCH 273/811] llama : restore intended k-quants mixes for MoE models (#4872) * Restore intended k-quants quantization mixes for MoE models * Update Q2_K_S values in the quantize tool Still using LLaMA-v1 PPL values in the quant description today does not make much sense. But let's leave this update for another PR. --------- Co-authored-by: Iwan Kawrakow Co-authored-by: Georgi Gerganov --- examples/quantize/quantize.cpp | 1 + llama.cpp | 24 +++++++++++++++--------- llama.h | 1 + 3 files changed, 17 insertions(+), 9 deletions(-) diff --git a/examples/quantize/quantize.cpp b/examples/quantize/quantize.cpp index d27ea5e91..f878f6911 100644 --- a/examples/quantize/quantize.cpp +++ b/examples/quantize/quantize.cpp @@ -18,6 +18,7 @@ static const std::vector QUANT_OPTIONS = { { "Q5_0", LLAMA_FTYPE_MOSTLY_Q5_0, " 4.33G, +0.0683 ppl @ LLaMA-v1-7B", }, { "Q5_1", LLAMA_FTYPE_MOSTLY_Q5_1, " 4.70G, +0.0349 ppl @ LLaMA-v1-7B", }, { "Q2_K", LLAMA_FTYPE_MOSTLY_Q2_K, " 2.63G, +0.6717 ppl @ LLaMA-v1-7B", }, + { "Q2_K_S", LLAMA_FTYPE_MOSTLY_Q2_K_S, " 2.16G, +9.0634 ppl @ LLaMA-v1-7B", }, { "Q3_K", LLAMA_FTYPE_MOSTLY_Q3_K_M, "alias for Q3_K_M" }, { "Q3_K_S", LLAMA_FTYPE_MOSTLY_Q3_K_S, " 2.75G, +0.5551 ppl @ LLaMA-v1-7B", }, { "Q3_K_M", LLAMA_FTYPE_MOSTLY_Q3_K_M, " 3.07G, +0.2496 ppl @ LLaMA-v1-7B", }, diff --git a/llama.cpp b/llama.cpp index bd219d49c..d39ff94c7 100644 --- a/llama.cpp +++ b/llama.cpp @@ -2586,7 +2586,8 @@ static std::string llama_model_ftype_name(llama_ftype ftype) { case LLAMA_FTYPE_MOSTLY_Q8_0: return "Q8_0"; // K-quants - case LLAMA_FTYPE_MOSTLY_Q2_K: return "Q2_K"; + case LLAMA_FTYPE_MOSTLY_Q2_K: return "Q2_K - Medium"; + case LLAMA_FTYPE_MOSTLY_Q2_K_S: return "Q2_K - Small"; case LLAMA_FTYPE_MOSTLY_Q3_K_S: return "Q3_K - Small"; case LLAMA_FTYPE_MOSTLY_Q3_K_M: return "Q3_K - Medium"; case LLAMA_FTYPE_MOSTLY_Q3_K_L: return "Q3_K - Large"; @@ -8955,10 +8956,13 @@ static ggml_type get_k_quant_type(quantize_state_internal & qs, ggml_type new_ty // TODO: explore better strategies new_type = GGML_TYPE_Q8_0; } - } else if (name.find("ffn_down.weight") != std::string::npos) { + } else if (name.find("ffn_down") != std::string::npos) { if (ftype == LLAMA_FTYPE_MOSTLY_Q2_K) new_type = GGML_TYPE_Q3_K; + else if (ftype == LLAMA_FTYPE_MOSTLY_Q2_K_S) { + if (qs.i_feed_forward_w2 < qs.n_feed_forward_w2/8) new_type = GGML_TYPE_Q4_K; + } else if (ftype == LLAMA_FTYPE_MOSTLY_Q3_K_M) { - new_type = qs.i_feed_forward_w2 < 2 ? GGML_TYPE_Q5_K + new_type = qs.i_feed_forward_w2 < qs.n_feed_forward_w2/16 ? GGML_TYPE_Q5_K : arch != LLM_ARCH_FALCON || use_more_bits(qs.i_feed_forward_w2, qs.n_feed_forward_w2) ? GGML_TYPE_Q4_K : GGML_TYPE_Q3_K; } @@ -8967,14 +8971,14 @@ static ggml_type get_k_quant_type(quantize_state_internal & qs, ggml_type new_ty } else if (ftype == LLAMA_FTYPE_MOSTLY_Q4_K_M) { if (arch == LLM_ARCH_FALCON) { - new_type = qs.i_feed_forward_w2 < 2 ? GGML_TYPE_Q6_K : + new_type = qs.i_feed_forward_w2 < qs.n_feed_forward_w2/16 ? GGML_TYPE_Q6_K : use_more_bits(qs.i_feed_forward_w2, qs.n_feed_forward_w2) ? GGML_TYPE_Q5_K : GGML_TYPE_Q4_K; } else { if (use_more_bits(qs.i_feed_forward_w2, qs.n_feed_forward_w2)) new_type = GGML_TYPE_Q6_K; } } else if (ftype == LLAMA_FTYPE_MOSTLY_Q5_K_M && use_more_bits(qs.i_feed_forward_w2, qs.n_feed_forward_w2)) new_type = GGML_TYPE_Q6_K; - else if (ftype == LLAMA_FTYPE_MOSTLY_Q4_K_S && arch != LLM_ARCH_FALCON && qs.i_feed_forward_w2 < 4) { + else if (ftype == LLAMA_FTYPE_MOSTLY_Q4_K_S && arch != LLM_ARCH_FALCON && qs.i_feed_forward_w2 < qs.n_feed_forward_w2/8) { new_type = GGML_TYPE_Q5_K; } ++qs.i_feed_forward_w2; @@ -8992,9 +8996,10 @@ static ggml_type get_k_quant_type(quantize_state_internal & qs, ggml_type new_ty else if (ftype == LLAMA_FTYPE_MOSTLY_Q4_K_M) new_type = GGML_TYPE_Q5_K; else if (ftype == LLAMA_FTYPE_MOSTLY_Q5_K_M) new_type = GGML_TYPE_Q6_K; } - else if (name.find("ffn_gate.weight") != std::string::npos || name.find("ffn_up.weight") != std::string::npos) { - if (ftype == LLAMA_FTYPE_MOSTLY_Q2_K) new_type = GGML_TYPE_Q3_K; - } + // IK: let's remove this, else Q2_K is almost the same as Q3_K_S + //else if (name.find("ffn_gate") != std::string::npos || name.find("ffn_up") != std::string::npos) { + // if (ftype == LLAMA_FTYPE_MOSTLY_Q2_K) new_type = GGML_TYPE_Q3_K; + //} // This can be used to reduce the size of the Q5_K_S model. // The associated PPL increase is fully in line with the size reduction //else { @@ -9043,6 +9048,7 @@ static void llama_model_quantize_internal(const std::string & fname_inp, const s // K-quants case LLAMA_FTYPE_MOSTLY_Q2_K: quantized_type = GGML_TYPE_Q2_K; break; + case LLAMA_FTYPE_MOSTLY_Q2_K_S: quantized_type = GGML_TYPE_Q2_K; break; case LLAMA_FTYPE_MOSTLY_Q3_K_S: case LLAMA_FTYPE_MOSTLY_Q3_K_M: case LLAMA_FTYPE_MOSTLY_Q3_K_L: quantized_type = GGML_TYPE_Q3_K; break; @@ -9101,7 +9107,7 @@ static void llama_model_quantize_internal(const std::string & fname_inp, const s if (name.find("attn_v.weight") != std::string::npos || name.find("attn_qkv.weight") != std::string::npos) { ++qs.n_attention_wv; } - else if (name.find("ffn_down.weight") != std::string::npos) { + else if (name.find("ffn_down") != std::string::npos) { ++qs.n_feed_forward_w2; } } diff --git a/llama.h b/llama.h index 6fde113ff..43d41b8f6 100644 --- a/llama.h +++ b/llama.h @@ -105,6 +105,7 @@ extern "C" { LLAMA_FTYPE_MOSTLY_Q6_K = 18, // except 1d tensors LLAMA_FTYPE_MOSTLY_IQ2_XXS = 19, // except 1d tensors LLAMA_FTYPE_MOSTLY_IQ2_XS = 20, // except 1d tensors + LLAMA_FTYPE_MOSTLY_Q2_K_S = 21, // except 1d tensors LLAMA_FTYPE_GUESSED = 1024, // not specified in the model file }; From b0377875488b33f7114138687d828da1de61775d Mon Sep 17 00:00:00 2001 From: Georgi Gerganov Date: Thu, 11 Jan 2024 21:58:28 +0200 Subject: [PATCH 274/811] swift : track ggml release branch (#4867) --- Package.swift | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Package.swift b/Package.swift index 59191da45..37524edee 100644 --- a/Package.swift +++ b/Package.swift @@ -14,7 +14,7 @@ let package = Package( .library(name: "llama", targets: ["llama"]), ], dependencies: [ - .package(url: "https://github.com/ggerganov/ggml.git", .revision("979cc23b345006504cfc1f67c0fdf627805e3319")) + .package(url: "https://github.com/ggerganov/ggml.git", .branch("release")) ], targets: [ .target( From 3ca63b4538dfc78aaec88cd2c3e3f8417c1924e3 Mon Sep 17 00:00:00 2001 From: Georgi Gerganov Date: Thu, 11 Jan 2024 22:43:05 +0200 Subject: [PATCH 275/811] main : disable token count by default (#4874) --- common/common.cpp | 6 +++--- common/common.h | 2 +- examples/main/main.cpp | 4 ++-- 3 files changed, 6 insertions(+), 6 deletions(-) diff --git a/common/common.cpp b/common/common.cpp index bfcd6d4df..287e8bd5a 100644 --- a/common/common.cpp +++ b/common/common.cpp @@ -630,7 +630,7 @@ bool gpt_params_parse_ex(int argc, char ** argv, gpt_params & params) { break; } params.ppl_stride = std::stoi(argv[i]); - } else if (arg == "-stc" || arg == "--show_token_count") { + } else if (arg == "-stc" || arg == "--show-token-count") { if (++i >= argc) { invalid_param = true; break; @@ -950,8 +950,8 @@ void gpt_print_usage(int /*argc*/, char ** argv, const gpt_params & params) { printf(" --override-kv KEY=TYPE:VALUE\n"); printf(" advanced option to override model metadata by key. may be specified multiple times.\n"); printf(" types: int, float, bool. example: --override-kv tokenizer.ggml.add_bos_token=bool:false\n"); - printf(" -stc N --show_token_count N\n"); - printf(" show consumed tokens every N tokens\n"); + printf(" -stc N --show-token-count N\n"); + printf(" show consumed tokens every N tokens (default: %d)\n", params.token_interval); printf("\n"); #ifndef LOG_DISABLE_LOGS log_print_usage(); diff --git a/common/common.h b/common/common.h index a295e88b0..82d23cf54 100644 --- a/common/common.h +++ b/common/common.h @@ -64,7 +64,7 @@ struct gpt_params { int32_t n_beams = 0; // if non-zero then use beam search of given width. int32_t grp_attn_n = 1; // group-attention factor int32_t grp_attn_w = 512; // group-attention width - int32_t token_interval = 512; // show token count every 512 tokens + int32_t token_interval = -1; // show token count every 512 tokens (-1 = disabled) float rope_freq_base = 0.0f; // RoPE base frequency float rope_freq_scale = 0.0f; // RoPE frequency scaling factor float yarn_ext_factor = -1.0f; // YaRN extrapolation mix factor diff --git a/examples/main/main.cpp b/examples/main/main.cpp index 1f35febbd..6953d107c 100644 --- a/examples/main/main.cpp +++ b/examples/main/main.cpp @@ -651,8 +651,8 @@ int main(int argc, char ** argv) { LOG("n_past = %d\n", n_past); // Display total tokens alongside total time - if (n_past % params.token_interval == 0) { - printf("\n\033[31mTokens consumed so far = %d / %d \033[0m\n", n_past, n_ctx); + if (params.token_interval > 0 && n_past % params.token_interval == 0) { + LOG_TEE("\n\033[31mTokens consumed so far = %d / %d \033[0m\n", n_past, n_ctx); } } From 7edefbd79cc6dea96640edc54c6b94b2b2496d8b Mon Sep 17 00:00:00 2001 From: Georgi Gerganov Date: Thu, 11 Jan 2024 22:46:26 +0200 Subject: [PATCH 276/811] main : better name for variable n_print (#4874) --- common/common.cpp | 8 ++++---- common/common.h | 2 +- examples/main/main.cpp | 2 +- 3 files changed, 6 insertions(+), 6 deletions(-) diff --git a/common/common.cpp b/common/common.cpp index 287e8bd5a..b2cb0e257 100644 --- a/common/common.cpp +++ b/common/common.cpp @@ -630,12 +630,12 @@ bool gpt_params_parse_ex(int argc, char ** argv, gpt_params & params) { break; } params.ppl_stride = std::stoi(argv[i]); - } else if (arg == "-stc" || arg == "--show-token-count") { + } else if (arg == "-ptc" || arg == "--print-token-count") { if (++i >= argc) { invalid_param = true; break; } - params.token_interval = std::stoi(argv[i]); + params.n_print = std::stoi(argv[i]); } else if (arg == "--ppl-output-type") { if (++i >= argc) { invalid_param = true; @@ -950,8 +950,8 @@ void gpt_print_usage(int /*argc*/, char ** argv, const gpt_params & params) { printf(" --override-kv KEY=TYPE:VALUE\n"); printf(" advanced option to override model metadata by key. may be specified multiple times.\n"); printf(" types: int, float, bool. example: --override-kv tokenizer.ggml.add_bos_token=bool:false\n"); - printf(" -stc N --show-token-count N\n"); - printf(" show consumed tokens every N tokens (default: %d)\n", params.token_interval); + printf(" -stc N --print-token-count N\n"); + printf(" print token count every N tokens (default: %d)\n", params.n_print); printf("\n"); #ifndef LOG_DISABLE_LOGS log_print_usage(); diff --git a/common/common.h b/common/common.h index 82d23cf54..1359e76ab 100644 --- a/common/common.h +++ b/common/common.h @@ -64,7 +64,7 @@ struct gpt_params { int32_t n_beams = 0; // if non-zero then use beam search of given width. int32_t grp_attn_n = 1; // group-attention factor int32_t grp_attn_w = 512; // group-attention width - int32_t token_interval = -1; // show token count every 512 tokens (-1 = disabled) + int32_t n_print = -1; // print token count every n tokens (-1 = disabled) float rope_freq_base = 0.0f; // RoPE base frequency float rope_freq_scale = 0.0f; // RoPE frequency scaling factor float yarn_ext_factor = -1.0f; // YaRN extrapolation mix factor diff --git a/examples/main/main.cpp b/examples/main/main.cpp index 6953d107c..c53b29978 100644 --- a/examples/main/main.cpp +++ b/examples/main/main.cpp @@ -651,7 +651,7 @@ int main(int argc, char ** argv) { LOG("n_past = %d\n", n_past); // Display total tokens alongside total time - if (params.token_interval > 0 && n_past % params.token_interval == 0) { + if (params.n_print > 0 && n_past % params.n_print == 0) { LOG_TEE("\n\033[31mTokens consumed so far = %d / %d \033[0m\n", n_past, n_ctx); } } From 1d118386fea031f01550f8cd47a5c86296e5333f Mon Sep 17 00:00:00 2001 From: Georgi Gerganov Date: Thu, 11 Jan 2024 23:23:49 +0200 Subject: [PATCH 277/811] server : fix infill when prompt is empty (#4833) --- examples/server/server.cpp | 15 ++++++++------- 1 file changed, 8 insertions(+), 7 deletions(-) diff --git a/examples/server/server.cpp b/examples/server/server.cpp index 031824e14..1d30a15a6 100644 --- a/examples/server/server.cpp +++ b/examples/server/server.cpp @@ -1406,7 +1406,7 @@ struct llama_server_context task.multitask_id = multitask_id; // when a completion task's prompt array is not a singleton, we split it into multiple requests - if (task.data.at("prompt").size() > 1) + if (task.data.count("prompt") && task.data.at("prompt").size() > 1) { lock.unlock(); // entering new func scope return split_multiprompt_task(task); @@ -1577,9 +1577,9 @@ struct llama_server_context slot->reset(); - slot->infill = task.infill_mode; - slot->embedding = task.embedding_mode; - slot->task_id = task.id; + slot->infill = task.infill_mode; + slot->embedding = task.embedding_mode; + slot->task_id = task.id; slot->multitask_id = task.multitask_id; if (!launch_slot_with_data(slot, task.data)) @@ -1731,7 +1731,8 @@ struct llama_server_context const bool has_prompt = slot.prompt.is_array() || (slot.prompt.is_string() && !slot.prompt.get().empty()) || !slot.images.empty(); // empty prompt passed -> release the slot and send empty response - if (slot.state == IDLE && slot.command == LOAD_PROMPT && !has_prompt) + // note: infill mode allows empty prompt + if (slot.state == IDLE && slot.command == LOAD_PROMPT && !has_prompt && !slot.infill) { slot.release(); slot.print_timings(); @@ -2609,8 +2610,8 @@ static json format_final_response_oaicompat(const json &request, const task_resu {"object", streaming ? "chat.completion.chunk" : "chat.completion"}, {"usage", json{{"completion_tokens", num_tokens_predicted}, - {"prompt_tokens", num_prompt_tokens}, - {"total_tokens", num_tokens_predicted + num_prompt_tokens}}}, + {"prompt_tokens", num_prompt_tokens}, + {"total_tokens", num_tokens_predicted + num_prompt_tokens}}}, {"id", gen_chatcmplid()}}; if (server_verbose) { From 326b418b59b6d48d854c4461a2303e8ac0a311e6 Mon Sep 17 00:00:00 2001 From: Kawrakow <48489457+ikawrakow@users.noreply.github.com> Date: Fri, 12 Jan 2024 06:59:57 +0100 Subject: [PATCH 278/811] Importance Matrix calculation (#4861) * imatrix: 1st version * imatrix: WIP * Cleanup * Update examples/imatrix/imatrix.cpp Co-authored-by: Georgi Gerganov --------- Co-authored-by: Iwan Kawrakow Co-authored-by: Georgi Gerganov --- Makefile | 5 +- examples/CMakeLists.txt | 1 + examples/imatrix/CMakeLists.txt | 5 + examples/imatrix/imatrix.cpp | 380 ++++++++++++++++++++++++++++++++ ggml.c | 14 ++ ggml.h | 6 + 6 files changed, 410 insertions(+), 1 deletion(-) create mode 100644 examples/imatrix/CMakeLists.txt create mode 100644 examples/imatrix/imatrix.cpp diff --git a/Makefile b/Makefile index 4c7e175bf..05fe9a0f6 100644 --- a/Makefile +++ b/Makefile @@ -1,6 +1,6 @@ # Define the default target now so that it is always the first target BUILD_TARGETS = \ - main quantize quantize-stats perplexity embedding vdot q8dot train-text-from-scratch convert-llama2c-to-ggml \ + main quantize quantize-stats perplexity imatrix embedding vdot q8dot train-text-from-scratch convert-llama2c-to-ggml \ simple batched batched-bench save-load-state server gguf llama-bench libllava.a llava-cli baby-llama beam-search \ speculative infill tokenize benchmark-matmult parallel finetune export-lora lookahead lookup passkey tests/test-c.o @@ -614,6 +614,9 @@ quantize-stats: examples/quantize-stats/quantize-stats.cpp build-info.o ggml. perplexity: examples/perplexity/perplexity.cpp ggml.o llama.o $(COMMON_DEPS) $(OBJS) $(CXX) $(CXXFLAGS) $(filter-out %.h,$^) -o $@ $(LDFLAGS) +imatrix: examples/imatrix/imatrix.cpp ggml.o llama.o $(COMMON_DEPS) $(OBJS) + $(CXX) $(CXXFLAGS) $(filter-out %.h,$^) -o $@ $(LDFLAGS) + embedding: examples/embedding/embedding.cpp ggml.o llama.o $(COMMON_DEPS) $(OBJS) $(CXX) $(CXXFLAGS) $(filter-out %.h,$^) -o $@ $(LDFLAGS) diff --git a/examples/CMakeLists.txt b/examples/CMakeLists.txt index 0c71cbdf7..fa127a3aa 100644 --- a/examples/CMakeLists.txt +++ b/examples/CMakeLists.txt @@ -36,6 +36,7 @@ else() add_subdirectory(lookahead) add_subdirectory(lookup) add_subdirectory(train-text-from-scratch) + add_subdirectory(imatrix) if (LLAMA_METAL) add_subdirectory(metal) endif() diff --git a/examples/imatrix/CMakeLists.txt b/examples/imatrix/CMakeLists.txt new file mode 100644 index 000000000..d688a1620 --- /dev/null +++ b/examples/imatrix/CMakeLists.txt @@ -0,0 +1,5 @@ +set(TARGET imatrix) +add_executable(${TARGET} imatrix.cpp) +install(TARGETS ${TARGET} RUNTIME) +target_link_libraries(${TARGET} PRIVATE common llama ${CMAKE_THREAD_LIBS_INIT}) +target_compile_features(${TARGET} PRIVATE cxx_std_11) diff --git a/examples/imatrix/imatrix.cpp b/examples/imatrix/imatrix.cpp new file mode 100644 index 000000000..1461bc963 --- /dev/null +++ b/examples/imatrix/imatrix.cpp @@ -0,0 +1,380 @@ +#include "common.h" +#include "llama.h" + +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include + +#if defined(_MSC_VER) +#pragma warning(disable: 4244 4267) // possible loss of data +#endif + +struct Stats { + std::vector values; + int ncall = 0; +}; + +struct StatParams { + std::string ofile = "imatrix.dat"; + int n_output_frequency = 10; + int verbosity = 1; + bool collect_output_weight = false; +}; + +class IMatrixCollector { +public: + IMatrixCollector() = default; + void set_parameters(StatParams&& params) { m_params = std::move(params); } + void collect_imatrix(const struct ggml_tensor * src0, const struct ggml_tensor * src1); + void save_imatrix() const; +private: + std::unordered_map m_stats; + StatParams m_params; + std::mutex m_mutex; + int m_last_call = 0; +}; + +void IMatrixCollector::collect_imatrix(const struct ggml_tensor * src0, const struct ggml_tensor * src1) { + if (src1->ne[1] < 16 || src1->type != GGML_TYPE_F32) return; + if (!(strncmp(src0->name, "blk.", 4) == 0 || (m_params.collect_output_weight && strcmp(src0->name, "output.weight") == 0))) return; + std::lock_guard lock(m_mutex); + auto& e = m_stats[src0->name]; + if (e.values.empty()) { + e.values.resize(src1->ne[0], 0); + } + else if (e.values.size() != (size_t)src1->ne[0]) { + fprintf(stderr, "Oops: inconsistent size for %s (%d vs %d)\n", src0->name, (int)e.values.size(), (int)src1->ne[0]); + exit(1); //GGML_ASSERT(false); + } + ++e.ncall; + if (m_params.verbosity > 1) { + printf("%s[%d]: %s, %d x %d, %d\n",__func__,m_last_call,src0->name,(int)src1->ne[0],(int)src1->ne[1],(int)src1->type); + } + for (int row = 0; row < (int)src1->ne[1]; ++row) { + const float * x = (const float *)src1->data + row * src1->ne[0]; + for (int j = 0; j < (int)src1->ne[0]; ++j) { + e.values[j] += x[j]*x[j]; + } + } + if (e.ncall > m_last_call) { + m_last_call = e.ncall; + if (m_last_call % m_params.n_output_frequency == 0) { + save_imatrix(); + } + } +} + +void IMatrixCollector::save_imatrix() const { + const char * fname = m_params.ofile.empty() ? "imatrix.dat" : m_params.ofile.c_str(); + std::ofstream out(fname, std::ios::binary); + int n_entries = m_stats.size(); + out.write((const char*)&n_entries, sizeof(n_entries)); + for (auto& p : m_stats) { + int len = p.first.size(); + out.write((const char*)&len, sizeof(len)); + out.write(p.first.c_str(), len); + out.write((const char*)&p.second.ncall, sizeof(p.second.ncall)); + int nval = p.second.values.size(); + out.write((const char*)&nval, sizeof(nval)); + if (nval > 0) out.write((const char*)p.second.values.data(), nval*sizeof(float)); + } + if (m_params.verbosity > 0) { + fprintf(stderr, "\n%s: stored collected data after %d chunks in %s\n",__func__,m_last_call,fname); + } +} + +static IMatrixCollector g_collector; + +static void ik_collect_imatrix(const struct ggml_tensor * src0, const struct ggml_tensor * src1) { + g_collector.collect_imatrix(src0, src1); +} + + +struct results_log_softmax { + double log_softmax; + float logit; + float prob; +}; + +static std::vector softmax(const std::vector& logits) { + std::vector probs(logits.size()); + float max_logit = logits[0]; + for (float v : logits) { + max_logit = std::max(max_logit, v); + } + double sum_exp = 0.0; + for (size_t i = 0; i < logits.size(); i++) { + // Subtract the maximum logit value from the current logit value for numerical stability + const float logit = logits[i] - max_logit; + const float exp_logit = expf(logit); + sum_exp += exp_logit; + probs[i] = exp_logit; + } + for (size_t i = 0; i < probs.size(); i++) { + probs[i] /= sum_exp; + } + return probs; +} + +static results_log_softmax log_softmax(int n_vocab, const float * logits, int tok) { + float max_logit = logits[0]; + for (int i = 1; i < n_vocab; ++i) { + max_logit = std::max(max_logit, logits[i]); + } + double sum_exp = 0.0; + for (int i = 0; i < n_vocab; ++i) { + sum_exp += expf(logits[i] - max_logit); + } + return {logits[tok] - max_logit - log(sum_exp), logits[tok], expf(logits[tok] - max_logit) / (float) sum_exp}; +} + +static void process_logits( + int n_vocab, const float * logits, const int * tokens, int n_token, std::vector & workers, + double & nll, double & nll2, float * logit_history, float * prob_history +) { + std::mutex mutex; + int counter = 0; + auto compute = [&mutex, &counter, &nll, &nll2, logit_history, prob_history, n_vocab, logits, tokens, n_token] () { + double local_nll = 0; + double local_nll2 = 0; + while (true) { + std::unique_lock lock(mutex); + int i = counter++; + if (i >= n_token) { + nll += local_nll; nll2 += local_nll2; + break; + } + lock.unlock(); + const results_log_softmax results = log_softmax(n_vocab, logits + i*n_vocab, tokens[i+1]); + const double v = -results.log_softmax; + local_nll += v; + local_nll2 += v*v; + + logit_history[i] = results.logit; + prob_history[i] = results.prob; + } + }; + for (auto & w : workers) { + w = std::thread(compute); + } + compute(); + for (auto & w : workers) { + w.join(); + } +} + +static bool compute_imatrix(llama_context * ctx, const gpt_params & params) { + + const bool add_bos = llama_should_add_bos_token(llama_get_model(ctx)); + const int n_ctx = llama_n_ctx(ctx); + + auto tim1 = std::chrono::high_resolution_clock::now(); + fprintf(stderr, "%s: tokenizing the input ..\n", __func__); + + std::vector tokens = ::llama_tokenize(ctx, params.prompt, add_bos); + + auto tim2 = std::chrono::high_resolution_clock::now(); + fprintf(stderr, "%s: tokenization took %g ms\n",__func__,1e-3*std::chrono::duration_cast(tim2-tim1).count()); + + if (int(tokens.size()) < 2*n_ctx) { + fprintf(stderr, "%s: you need at least %d tokens for a context of %d tokens\n",__func__,2*n_ctx, + n_ctx); + fprintf(stderr, "%s: the data file you provided tokenizes to only %zu tokens\n",__func__,tokens.size()); + return false; + } + + std::vector logit_history; + logit_history.resize(tokens.size()); + + std::vector prob_history; + prob_history.resize(tokens.size()); + + const int n_chunk_max = tokens.size() / n_ctx; + + const int n_chunk = params.n_chunks < 0 ? n_chunk_max : std::min(params.n_chunks, n_chunk_max); + const int n_vocab = llama_n_vocab(llama_get_model(ctx)); + const int n_batch = params.n_batch; + + int count = 0; + double nll = 0.0; + double nll2 = 0.0; + + fprintf(stderr, "%s: computing over %d chunks with batch_size %d\n", __func__, n_chunk, n_batch); + + std::vector workers(std::thread::hardware_concurrency() - 1); + + for (int i = 0; i < n_chunk; ++i) { + const int start = i * n_ctx; + const int end = start + n_ctx; + + const int num_batches = (n_ctx + n_batch - 1) / n_batch; + + std::vector logits; + + const auto t_start = std::chrono::high_resolution_clock::now(); + + // clear the KV cache + llama_kv_cache_clear(ctx); + + for (int j = 0; j < num_batches; ++j) { + const int batch_start = start + j * n_batch; + const int batch_size = std::min(end - batch_start, n_batch); + + // save original token and restore it after eval + const auto token_org = tokens[batch_start]; + + // add BOS token for the first batch of each chunk + if (add_bos && j == 0) { + tokens[batch_start] = llama_token_bos(llama_get_model(ctx)); + } + + if (llama_decode(ctx, llama_batch_get_one(tokens.data() + batch_start, batch_size, j * n_batch, 0))) { + fprintf(stderr, "%s : failed to eval\n", __func__); + return false; + } + + // restore the original token in case it was set to BOS + tokens[batch_start] = token_org; + + const auto * batch_logits = llama_get_logits(ctx); + logits.insert(logits.end(), batch_logits, batch_logits + batch_size * n_vocab); + } + + const auto t_end = std::chrono::high_resolution_clock::now(); + + if (i == 0) { + const float t_total = std::chrono::duration(t_end - t_start).count(); + fprintf(stderr, "%s: %.2f seconds per pass - ETA ", __func__, t_total); + int total_seconds = (int)(t_total * n_chunk); + if (total_seconds >= 60*60) { + fprintf(stderr, "%d hours ", total_seconds / (60*60)); + total_seconds = total_seconds % (60*60); + } + fprintf(stderr, "%.2f minutes\n", total_seconds / 60.0); + } + + const int first = n_ctx/2; + process_logits(n_vocab, logits.data() + first*n_vocab, tokens.data() + start + first, n_ctx - 1 - first, + workers, nll, nll2, logit_history.data() + start + first, prob_history.data() + start + first); + count += n_ctx - first - 1; + + printf("[%d]%.4lf,", i + 1, std::exp(nll / count)); + fflush(stdout); + } + printf("\n"); + + nll2 /= count; + nll /= count; + const double ppl = exp(nll); + nll2 -= nll * nll; + if (nll2 > 0) { + nll2 = sqrt(nll2/(count-1)); + printf("Final estimate: PPL = %.4lf +/- %.5lf\n", ppl, nll2*ppl); + } else { + printf("Unexpected negative standard deviation of log(prob)\n"); + } + + return true; +} + +int main(int argc, char ** argv) { + + StatParams sparams; + std::vector args; + args.push_back(argv[0]); + int iarg = 1; + for (; iarg < argc-1; ++iarg) { + std::string arg{argv[iarg]}; + if (arg == "-o" || arg == "--output-file") { + sparams.ofile = argv[++iarg]; + } + else if (arg == "-ofreq" || arg == "--output-frequency") { + sparams.n_output_frequency = std::stoi(argv[++iarg]); + } + else if (arg == "-ow" || arg == "--output-weight") { + sparams.collect_output_weight = std::stoi(argv[++iarg]); + } + else if (arg == "--verbosity") { + sparams.verbosity = std::stoi(argv[++iarg]); + } else { + args.push_back(argv[iarg]); + } + } + if (iarg < argc) { + args.push_back(argv[iarg]); + } + + gpt_params params; + params.n_batch = 512; + if (!gpt_params_parse(args.size(), args.data(), params)) { + return 1; + } + + g_collector.set_parameters(std::move(sparams)); + + ggml_set_imatrix_collection(ik_collect_imatrix); + + params.logits_all = true; + params.n_batch = std::min(params.n_batch, params.n_ctx); + + print_build_info(); + + if (params.seed == LLAMA_DEFAULT_SEED) { + params.seed = time(NULL); + } + + fprintf(stderr, "%s: seed = %u\n", __func__, params.seed); + + std::mt19937 rng(params.seed); + if (params.random_prompt) { + params.prompt = gpt_random_prompt(rng); + } + + llama_backend_init(params.numa); + + llama_model * model; + llama_context * ctx; + + // load the model and apply lora adapter, if any + std::tie(model, ctx) = llama_init_from_gpt_params(params); + if (model == NULL) { + fprintf(stderr, "%s: error: unable to load model\n", __func__); + return 1; + } + + const int n_ctx_train = llama_n_ctx_train(model); + if (params.n_ctx > n_ctx_train) { + fprintf(stderr, "%s: warning: model was trained on only %d context tokens (%d specified)\n", + __func__, n_ctx_train, params.n_ctx); + } + + // print system information + { + fprintf(stderr, "\n"); + fprintf(stderr, "%s\n", get_system_info(params).c_str()); + } + + bool OK = compute_imatrix(ctx, params); + if (!OK) { + return 1; + } + + g_collector.save_imatrix(); + + llama_print_timings(ctx); + + llama_free(ctx); + llama_free_model(model); + + llama_backend_free(); + + return 0; +} diff --git a/ggml.c b/ggml.c index d2a8c0478..f5caeba08 100644 --- a/ggml.c +++ b/ggml.c @@ -394,6 +394,12 @@ static const size_t CACHE_LINE_SIZE_F32 = CACHE_LINE_SIZE/sizeof(float); static void ggml_vec_dot_f32(const int n, float * restrict s, const float * restrict x, const float * restrict y); static void ggml_vec_dot_f16(const int n, float * restrict s, ggml_fp16_t * restrict x, ggml_fp16_t * restrict y); +ggml_collect_imatrix_t g_imatrix_collect = NULL; + +void ggml_set_imatrix_collection(ggml_collect_imatrix_t imatrix_collect) { + g_imatrix_collect = imatrix_collect; +} + static const ggml_type_traits_t type_traits[GGML_TYPE_COUNT] = { [GGML_TYPE_I8] = { .type_name = "i8", @@ -9763,6 +9769,10 @@ static void ggml_compute_forward_mul_mat( const int ith = params->ith; const int nth = params->nth; + if (ith == 1 && g_imatrix_collect) { + g_imatrix_collect(src0, src1); + } + const enum ggml_type type = src0->type; const bool src1_cont = ggml_is_contiguous(src1); @@ -10066,6 +10076,10 @@ static void ggml_compute_forward_mul_mat_id( const struct ggml_tensor * src0_cur = dst->src[cur_a + 2]; + if (ith == 1 && g_imatrix_collect) { + g_imatrix_collect(src0_cur, src1); + } + const void * wdata = (src1->type == vec_dot_type) ? src1->data : params->wdata; const size_t row_size = ggml_row_size(vec_dot_type, ne10); diff --git a/ggml.h b/ggml.h index 93b42a27d..4c2ff6c66 100644 --- a/ggml.h +++ b/ggml.h @@ -2067,6 +2067,12 @@ extern "C" { GGML_API size_t ggml_quantize_chunk(enum ggml_type type, const float * src, void * dst, int start, int n, int64_t * hist); + // + // Importance matrix + // + typedef void(*ggml_collect_imatrix_t)(const struct ggml_tensor * src0, const struct ggml_tensor * src1); + GGML_API void ggml_set_imatrix_collection(ggml_collect_imatrix_t imatrix_collect); + // // gguf // From f445c0e68cf8e1faca0b2aa8dfb9d48231cec301 Mon Sep 17 00:00:00 2001 From: Georgi Gerganov Date: Fri, 12 Jan 2024 13:01:56 +0200 Subject: [PATCH 279/811] llama : fix llm_build_k_shift to use correct n_rot (#4889) * llama : fix llm_build_k_shift to use correct n_rot ggml-ci * llama : always use hparams.n_rot for ggml_rope_custom ggml-ci * convert : fix persimmon conversion to write correct n_rot --- common/common.cpp | 3 ++ convert-hf-to-gguf.py | 9 ++++- gguf-py/gguf/tensor_mapping.py | 7 ++++ llama.cpp | 65 +++++++++++++++++----------------- 4 files changed, 51 insertions(+), 33 deletions(-) diff --git a/common/common.cpp b/common/common.cpp index b2cb0e257..3aefed01d 100644 --- a/common/common.cpp +++ b/common/common.cpp @@ -1055,6 +1055,9 @@ struct llama_model_params llama_model_params_from_gpt_params(const gpt_params & } static ggml_type kv_cache_type_from_str(const std::string & s) { + if (s == "f32") { + return GGML_TYPE_F32; + } if (s == "f16") { return GGML_TYPE_F16; } diff --git a/convert-hf-to-gguf.py b/convert-hf-to-gguf.py index 203eaf64b..813aeeed6 100755 --- a/convert-hf-to-gguf.py +++ b/convert-hf-to-gguf.py @@ -817,10 +817,17 @@ class PersimmonModel(Model): hidden_size = self.hparams["hidden_size"] self.gguf_writer.add_name('persimmon-8b-chat') + self.gguf_writer.add_context_length(self.hparams["max_position_embeddings"]) self.gguf_writer.add_embedding_length(hidden_size) self.gguf_writer.add_block_count(block_count) self.gguf_writer.add_feed_forward_length(self.hparams["intermediate_size"]) - self.gguf_writer.add_rope_dimension_count(hidden_size // head_count) + + # NOTE: not sure about this change - why does the model not have a rope dimension count when it is smaller + # than the head size? + # ref: https://github.com/ggerganov/llama.cpp/pull/4889 + #self.gguf_writer.add_rope_dimension_count(hidden_size // head_count) + self.gguf_writer.add_rope_dimension_count(hidden_size // head_count // 2) + self.gguf_writer.add_head_count(head_count) self.gguf_writer.add_head_count_kv(head_count_kv) self.gguf_writer.add_rope_freq_base(self.hparams["rope_theta"]) diff --git a/gguf-py/gguf/tensor_mapping.py b/gguf-py/gguf/tensor_mapping.py index 80c1d5449..24a089037 100644 --- a/gguf-py/gguf/tensor_mapping.py +++ b/gguf-py/gguf/tensor_mapping.py @@ -57,6 +57,7 @@ class TensorNameMap: "transformer.norm_f", # mpt "ln_f", # refact bloom qwen gpt2 "language_model.encoder.final_layernorm", # persimmon + "model.final_layernorm", # persimmon "lm_head.ln", # phi2 ), @@ -98,6 +99,7 @@ class TensorNameMap: "transformer.h.{bid}.self_attention.query_key_value", # falcon "h.{bid}.self_attention.query_key_value", # bloom "language_model.encoder.layers.{bid}.self_attention.query_key_value", # persimmon + "model.layers.{bid}.self_attn.query_key_value", # persimmon "h.{bid}.attn.c_attn", # gpt2 "transformer.h.{bid}.mixer.Wqkv", # phi2 ), @@ -141,6 +143,7 @@ class TensorNameMap: "encoder.layer.{bid}.attention.output.dense", # bert "transformer.h.{bid}.attn.out_proj", # gpt-j "language_model.encoder.layers.{bid}.self_attention.dense", # persimmon + "model.layers.{bid}.self_attn.dense", # persimmon "h.{bid}.attn.c_proj", # gpt2 "transformer.h.{bid}.mixer.out_proj", # phi2 "model.layers.layers.{bid}.self_attn.o_proj", # plamo @@ -184,6 +187,7 @@ class TensorNameMap: "encoder.layer.{bid}.intermediate.dense", # bert "transformer.h.{bid}.mlp.fc_in", # gpt-j "language_model.encoder.layers.{bid}.mlp.dense_h_to_4h", # persimmon + "model.layers.{bid}.mlp.dense_h_to_4h", # persimmon "transformer.h.{bid}.mlp.w1", # qwen "h.{bid}.mlp.c_fc", # gpt2 "transformer.h.{bid}.mlp.fc1", # phi2 @@ -225,6 +229,7 @@ class TensorNameMap: "encoder.layer.{bid}.output.dense", # bert "transformer.h.{bid}.mlp.fc_out", # gpt-j "language_model.encoder.layers.{bid}.mlp.dense_4h_to_h", # persimmon + "model.layers.{bid}.mlp.dense_4h_to_h", # persimmon "h.{bid}.mlp.c_proj", # gpt2 "transformer.h.{bid}.mlp.fc2", # phi2 "model.layers.layers.{bid}.mlp.down_proj", # plamo @@ -237,10 +242,12 @@ class TensorNameMap: MODEL_TENSOR.ATTN_Q_NORM: ( "language_model.encoder.layers.{bid}.self_attention.q_layernorm", + "model.layers.{bid}.self_attn.q_layernorm", # persimmon ), MODEL_TENSOR.ATTN_K_NORM: ( "language_model.encoder.layers.{bid}.self_attention.k_layernorm", + "model.layers.{bid}.self_attn.k_layernorm", # persimmon ), MODEL_TENSOR.ROPE_FREQS: ( diff --git a/llama.cpp b/llama.cpp index d39ff94c7..0bab95563 100644 --- a/llama.cpp +++ b/llama.cpp @@ -4104,7 +4104,6 @@ static void llm_build_k_shift( struct ggml_cgraph * graph, llm_rope_type type, int64_t n_ctx, - int n_rot, float freq_base, float freq_scale, const llm_build_cb & cb) { @@ -4112,14 +4111,13 @@ static void llm_build_k_shift( const int64_t n_head_kv = hparams.n_head_kv; const int64_t n_embd_head_k = hparams.n_embd_head_k; const int64_t n_embd_k_gqa = hparams.n_embd_k_gqa(); + const int32_t n_rot = hparams.n_rot; const int32_t n_orig_ctx = cparams.n_yarn_orig_ctx; const float ext_factor = cparams.yarn_ext_factor; const float attn_factor = cparams.yarn_attn_factor; const float beta_fast = cparams.yarn_beta_fast; const float beta_slow = cparams.yarn_beta_slow; - GGML_ASSERT(n_embd_head_k % n_rot == 0); - struct ggml_tensor * K_shift = ggml_new_tensor_1d(ctx, GGML_TYPE_I32, n_ctx); cb(K_shift, "K_shift", -1); @@ -4523,7 +4521,7 @@ struct llm_build_context { // shift the entire K-cache if needed if (do_rope_shift) { - llm_build_k_shift(ctx0, hparams, cparams, kv_self, gf, LLM_ROPE, n_ctx, n_embd_head, freq_base, freq_scale, cb); + llm_build_k_shift(ctx0, hparams, cparams, kv_self, gf, LLM_ROPE, n_ctx, freq_base, freq_scale, cb); } for (int il = 0; il < n_layer; ++il) { @@ -4561,14 +4559,14 @@ struct llm_build_context { Qcur = ggml_rope_custom( ctx0, ggml_reshape_3d(ctx0, Qcur, n_embd_head, n_head, n_tokens), inp_pos, - n_embd_head, 0, 0, n_orig_ctx, freq_base, freq_scale, + hparams.n_rot, 0, 0, n_orig_ctx, freq_base, freq_scale, ext_factor, attn_factor, beta_fast, beta_slow ); cb(Qcur, "Qcur", il); Kcur = ggml_rope_custom( ctx0, ggml_reshape_3d(ctx0, Kcur, n_embd_head, n_head_kv, n_tokens), inp_pos, - n_embd_head, 0, 0, n_orig_ctx, freq_base, freq_scale, + hparams.n_rot, 0, 0, n_orig_ctx, freq_base, freq_scale, ext_factor, attn_factor, beta_fast, beta_slow ); cb(Kcur, "Kcur", il); @@ -4691,6 +4689,7 @@ struct llm_build_context { const int64_t n_embd_head = hparams.n_embd_head_v; GGML_ASSERT(n_embd_head == hparams.n_embd_head_k); + GGML_ASSERT(n_embd_head == hparams.n_rot); struct ggml_tensor * cur; struct ggml_tensor * inpL; @@ -4708,7 +4707,7 @@ struct llm_build_context { // shift the entire K-cache if needed if (do_rope_shift) { - llm_build_k_shift(ctx0, hparams, cparams, kv_self, gf, LLM_ROPE, n_ctx, n_embd_head, freq_base, freq_scale, cb); + llm_build_k_shift(ctx0, hparams, cparams, kv_self, gf, LLM_ROPE, n_ctx, freq_base, freq_scale, cb); } for (int il = 0; il < n_layer; ++il) { @@ -4734,12 +4733,12 @@ struct llm_build_context { case MODEL_7B: Qcur = ggml_rope_custom( ctx0, ggml_reshape_3d(ctx0, Qcur, n_embd_head, n_head, n_tokens), inp_pos, - n_embd_head, 0, 0, n_orig_ctx, freq_base, freq_scale, + hparams.n_rot, 0, 0, n_orig_ctx, freq_base, freq_scale, ext_factor, attn_factor, beta_fast, beta_slow ); Kcur = ggml_rope_custom( ctx0, ggml_reshape_3d(ctx0, Kcur, n_embd_head, n_head_kv, n_tokens), inp_pos, - n_embd_head, 0, 0, n_orig_ctx, freq_base, freq_scale, + hparams.n_rot, 0, 0, n_orig_ctx, freq_base, freq_scale, ext_factor, attn_factor, beta_fast, beta_slow ); break; @@ -4812,6 +4811,7 @@ struct llm_build_context { const int64_t n_embd_head = hparams.n_embd_head_v; const int64_t n_embd_gqa = hparams.n_embd_v_gqa(); GGML_ASSERT(n_embd_head == hparams.n_embd_head_k); + GGML_ASSERT(n_embd_head == hparams.n_rot); struct ggml_tensor * cur; struct ggml_tensor * inpL; @@ -4829,7 +4829,7 @@ struct llm_build_context { // shift the entire K-cache if needed if (do_rope_shift) { - llm_build_k_shift(ctx0, hparams, cparams, kv_self, gf, LLM_ROPE_NEOX, n_ctx, n_embd_head, freq_base, freq_scale, cb); + llm_build_k_shift(ctx0, hparams, cparams, kv_self, gf, LLM_ROPE_NEOX, n_ctx, freq_base, freq_scale, cb); } for (int il = 0; il < n_layer; ++il) { @@ -4870,13 +4870,13 @@ struct llm_build_context { // using mode = 2 for neox mode Qcur = ggml_rope_custom( - ctx0, Qcur, inp_pos, n_embd_head, 2, 0, n_orig_ctx, + ctx0, Qcur, inp_pos, hparams.n_rot, 2, 0, n_orig_ctx, freq_base, freq_scale, ext_factor, attn_factor, beta_fast, beta_slow ); cb(Qcur, "Qcur", il); Kcur = ggml_rope_custom( - ctx0, Kcur, inp_pos, n_embd_head, 2, 0, n_orig_ctx, + ctx0, Kcur, inp_pos, hparams.n_rot, 2, 0, n_orig_ctx, freq_base, freq_scale, ext_factor, attn_factor, beta_fast, beta_slow ); cb(Kcur, "Kcur", il); @@ -5033,9 +5033,8 @@ struct llm_build_context { struct ggml_cgraph * gf = ggml_new_graph_custom(ctx0, LLAMA_MAX_NODES, false); const int64_t n_embd_head = hparams.n_embd_head_v; - GGML_ASSERT(n_embd_head == hparams.n_embd_head_k); - - const int64_t n_rot = n_embd_head_k / 2; + GGML_ASSERT(n_embd_head == hparams.n_embd_head_k); + GGML_ASSERT(n_embd_head/2 == hparams.n_rot); struct ggml_tensor * cur; struct ggml_tensor * inpL; @@ -5052,7 +5051,7 @@ struct llm_build_context { cb(KQ_mask, "KQ_mask", -1); if (do_rope_shift) { - llm_build_k_shift(ctx0, hparams, cparams, kv_self, gf, LLM_ROPE_NEOX, n_ctx, n_embd_head, freq_base, freq_scale, cb); + llm_build_k_shift(ctx0, hparams, cparams, kv_self, gf, LLM_ROPE_NEOX, n_ctx, freq_base, freq_scale, cb); } for (int il = 0; il < n_layer; ++il) { @@ -5112,7 +5111,7 @@ struct llm_build_context { // RoPE the first n_rot of q/k, pass the other half, and concat. struct ggml_tensor * qrot = ggml_view_3d( - ctx0, tmpq, n_rot, n_head, n_tokens, + ctx0, tmpq, hparams.n_rot, n_head, n_tokens, ggml_element_size(tmpq) * n_embd_head, ggml_element_size(tmpq) * n_embd_head * n_head, 0 @@ -5120,7 +5119,7 @@ struct llm_build_context { cb(qrot, "qrot", il); struct ggml_tensor * krot = ggml_view_3d( - ctx0, tmpk, n_rot, n_head, n_tokens, + ctx0, tmpk, hparams.n_rot, n_head, n_tokens, ggml_element_size(tmpk) * n_embd_head, ggml_element_size(tmpk) * n_embd_head * n_head, 0 @@ -5129,29 +5128,29 @@ struct llm_build_context { // get the second half of tmpq, e.g tmpq[n_rot:, :, :] struct ggml_tensor * qpass = ggml_view_3d( - ctx0, tmpq, n_rot, n_head, n_tokens, + ctx0, tmpq, hparams.n_rot, n_head, n_tokens, ggml_element_size(tmpq) * n_embd_head, ggml_element_size(tmpq) * n_embd_head * n_head, - ggml_element_size(tmpq) * n_rot + ggml_element_size(tmpq) * hparams.n_rot ); cb(qpass, "qpass", il); struct ggml_tensor * kpass = ggml_view_3d( - ctx0, tmpk, n_rot, n_head, n_tokens, + ctx0, tmpk, hparams.n_rot, n_head, n_tokens, ggml_element_size(tmpk) * n_embd_head, ggml_element_size(tmpk) * n_embd_head * n_head, - ggml_element_size(tmpk) * n_rot + ggml_element_size(tmpk) * hparams.n_rot ); cb(kpass, "kpass", il); struct ggml_tensor * qrotated = ggml_rope_custom( - ctx0, qrot, inp_pos, n_rot, 2, 0, n_orig_ctx, + ctx0, qrot, inp_pos, hparams.n_rot, 2, 0, n_orig_ctx, freq_base, freq_scale, ext_factor, attn_factor, beta_fast, beta_slow ); cb(qrotated, "qrotated", il); struct ggml_tensor * krotated = ggml_rope_custom( - ctx0, krot, inp_pos, n_rot, 2, 0, n_orig_ctx, + ctx0, krot, inp_pos, hparams.n_rot, 2, 0, n_orig_ctx, freq_base, freq_scale, ext_factor, attn_factor, beta_fast, beta_slow ); cb(krotated, "krotated", il); @@ -5531,6 +5530,7 @@ struct llm_build_context { const int64_t n_embd_head = hparams.n_embd_head_v; GGML_ASSERT(n_embd_head == hparams.n_embd_head_k); + GGML_ASSERT(n_embd_head == hparams.n_rot); struct ggml_tensor * cur; struct ggml_tensor * inpL; @@ -5548,7 +5548,7 @@ struct llm_build_context { // shift the entire K-cache if needed if (do_rope_shift) { - llm_build_k_shift(ctx0, hparams, cparams, kv_self, gf, LLM_ROPE_NEOX, n_ctx, hparams.n_rot, freq_base, freq_scale, cb); + llm_build_k_shift(ctx0, hparams, cparams, kv_self, gf, LLM_ROPE_NEOX, n_ctx, freq_base, freq_scale, cb); } for (int il = 0; il < n_layer; ++il) { @@ -5661,7 +5661,7 @@ struct llm_build_context { // shift the entire K-cache if needed if (do_rope_shift) { - llm_build_k_shift(ctx0, hparams, cparams, kv_self, gf, LLM_ROPE_NEOX, n_ctx, n_embd_head, freq_base, freq_scale, cb); + llm_build_k_shift(ctx0, hparams, cparams, kv_self, gf, LLM_ROPE_NEOX, n_ctx, freq_base, freq_scale, cb); } for (int il = 0; il < n_layer; ++il) { @@ -5693,13 +5693,13 @@ struct llm_build_context { // using mode = 2 for neox mode Qcur = ggml_rope_custom( - ctx0, Qcur, inp_pos, n_embd_head, 2, 0, n_orig_ctx, + ctx0, Qcur, inp_pos, hparams.n_rot, 2, 0, n_orig_ctx, freq_base, freq_scale, ext_factor, attn_factor, beta_fast, beta_slow ); cb(Qcur, "Qcur", il); Kcur = ggml_rope_custom( - ctx0, Kcur, inp_pos, n_embd_head, 2, 0, n_orig_ctx, + ctx0, Kcur, inp_pos, hparams.n_rot, 2, 0, n_orig_ctx, freq_base, freq_scale, ext_factor, attn_factor, beta_fast, beta_slow ); cb(Kcur, "Kcur", il); @@ -5778,7 +5778,7 @@ struct llm_build_context { // shift the entire K-cache if needed if (do_rope_shift) { - llm_build_k_shift(ctx0, hparams, cparams, kv_self, gf, LLM_ROPE_NEOX, n_ctx, n_embd_head, freq_base, freq_scale, cb); + llm_build_k_shift(ctx0, hparams, cparams, kv_self, gf, LLM_ROPE_NEOX, n_ctx, freq_base, freq_scale, cb); } for (int il = 0; il < n_layer; ++il) { @@ -5874,6 +5874,7 @@ struct llm_build_context { const int64_t n_embd_head = hparams.n_embd_head_v; GGML_ASSERT(n_embd_head == hparams.n_embd_head_k); + GGML_ASSERT(n_embd_head == hparams.n_rot); struct ggml_tensor * cur; struct ggml_tensor * inpL; @@ -5891,7 +5892,7 @@ struct llm_build_context { // shift the entire K-cache if needed if (do_rope_shift) { - llm_build_k_shift(ctx0, hparams, cparams, kv_self, gf, LLM_ROPE, n_ctx, n_embd_head, freq_base, freq_scale, cb); + llm_build_k_shift(ctx0, hparams, cparams, kv_self, gf, LLM_ROPE, n_ctx, freq_base, freq_scale, cb); } for (int il = 0; il < n_layer; ++il) { @@ -5917,13 +5918,13 @@ struct llm_build_context { cb(Vcur, "Vcur", il); Qcur = ggml_rope_custom( - ctx0, ggml_reshape_3d(ctx0, Qcur, n_embd_head, n_head, n_tokens), inp_pos, + ctx0, ggml_reshape_3d(ctx0, Qcur, hparams.n_rot, n_head, n_tokens), inp_pos, n_embd_head, 2, 0, n_orig_ctx, freq_base, freq_scale, ext_factor, attn_factor, beta_fast, beta_slow); cb(Qcur, "Qcur", il); Kcur = ggml_rope_custom( - ctx0, ggml_reshape_3d(ctx0, Kcur, n_embd_head, n_head_kv, n_tokens), inp_pos, + ctx0, ggml_reshape_3d(ctx0, Kcur, hparams.n_rot, n_head_kv, n_tokens), inp_pos, n_embd_head, 2, 0, n_orig_ctx, freq_base, freq_scale, ext_factor, attn_factor, beta_fast, beta_slow); cb(Kcur, "Kcur", il); From 2d00741e12c5db4a33dfccd1125f5de4adec9a5b Mon Sep 17 00:00:00 2001 From: Georgi Gerganov Date: Fri, 12 Jan 2024 13:03:38 +0200 Subject: [PATCH 280/811] py : fix lint (#4889) --- convert-hf-to-gguf.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/convert-hf-to-gguf.py b/convert-hf-to-gguf.py index 813aeeed6..a1c79fd47 100755 --- a/convert-hf-to-gguf.py +++ b/convert-hf-to-gguf.py @@ -825,7 +825,7 @@ class PersimmonModel(Model): # NOTE: not sure about this change - why does the model not have a rope dimension count when it is smaller # than the head size? # ref: https://github.com/ggerganov/llama.cpp/pull/4889 - #self.gguf_writer.add_rope_dimension_count(hidden_size // head_count) + # self.gguf_writer.add_rope_dimension_count(hidden_size // head_count) self.gguf_writer.add_rope_dimension_count(hidden_size // head_count // 2) self.gguf_writer.add_head_count(head_count) From 4315a94366708828f949f9db89d2a8d99b634459 Mon Sep 17 00:00:00 2001 From: howlger Date: Fri, 12 Jan 2024 12:05:32 +0100 Subject: [PATCH 281/811] common : streamline the formatting of help (#4890) * common : streamline the formatting of help - Separate alternative parameters by a comma - Do not indent `--version` differently * Update common/common.cpp --------- Co-authored-by: Georgi Gerganov --- common/common.cpp | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/common/common.cpp b/common/common.cpp index 3aefed01d..062a8b4de 100644 --- a/common/common.cpp +++ b/common/common.cpp @@ -818,7 +818,7 @@ void gpt_print_usage(int /*argc*/, char ** argv, const gpt_params & params) { printf("\n"); printf("options:\n"); printf(" -h, --help show this help message and exit\n"); - printf(" --version show version and build info\n"); + printf(" --version show version and build info\n"); printf(" -i, --interactive run in interactive mode\n"); printf(" --interactive-first run in interactive mode and wait for input right away\n"); printf(" -ins, --instruct run in instruction mode (use with Alpaca models)\n"); @@ -915,7 +915,7 @@ void gpt_print_usage(int /*argc*/, char ** argv, const gpt_params & params) { printf(" number of layers to store in VRAM\n"); printf(" -ngld N, --n-gpu-layers-draft N\n"); printf(" number of layers to store in VRAM for the draft model\n"); - printf(" -ts SPLIT --tensor-split SPLIT\n"); + printf(" -ts SPLIT, --tensor-split SPLIT\n"); printf(" how to split tensors across multiple GPUs, comma-separated list of proportions, e.g. 3,1\n"); printf(" -mg i, --main-gpu i the GPU to use for scratch and small tensors\n"); #ifdef GGML_USE_CUBLAS @@ -950,7 +950,7 @@ void gpt_print_usage(int /*argc*/, char ** argv, const gpt_params & params) { printf(" --override-kv KEY=TYPE:VALUE\n"); printf(" advanced option to override model metadata by key. may be specified multiple times.\n"); printf(" types: int, float, bool. example: --override-kv tokenizer.ggml.add_bos_token=bool:false\n"); - printf(" -stc N --print-token-count N\n"); + printf(" -ptc N, --print-token-count N\n"); printf(" print token count every N tokens (default: %d)\n", params.n_print); printf("\n"); #ifndef LOG_DISABLE_LOGS From 3cabe80630c7eeb57713cd02249053a8cf6894fa Mon Sep 17 00:00:00 2001 From: Georgi Gerganov Date: Fri, 12 Jan 2024 13:10:19 +0200 Subject: [PATCH 282/811] llama : fix typo "imp_embd" -> "inp_embd" --- llama.cpp | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/llama.cpp b/llama.cpp index 0bab95563..29f8873f6 100644 --- a/llama.cpp +++ b/llama.cpp @@ -5040,7 +5040,7 @@ struct llm_build_context { struct ggml_tensor * inpL; inpL = llm_build_inp_embd(ctx0, hparams, batch, model.tok_embd, cb); - cb(inpL, "imp_embd", -1); + cb(inpL, "inp_embd", -1); // inp_pos - contains the positions struct ggml_tensor * inp_pos = ggml_new_tensor_1d(ctx0, GGML_TYPE_I32, n_tokens); From 1b280c9fffd682b6924010a4437f0275f2921fa9 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Johannes=20G=C3=A4=C3=9Fler?= Date: Fri, 12 Jan 2024 12:30:41 +0100 Subject: [PATCH 283/811] CUDA: fix softmax compile for old CUDA versions (#4862) --- ggml-cuda.cu | 34 ++++++++++++++++++---------------- 1 file changed, 18 insertions(+), 16 deletions(-) diff --git a/ggml-cuda.cu b/ggml-cuda.cu index dd19699f6..a345b0c4a 100644 --- a/ggml-cuda.cu +++ b/ggml-cuda.cu @@ -116,6 +116,8 @@ #include "ggml.h" #include "ggml-backend-impl.h" +#define CUDART_HMAX 11070 // CUDA 11.7, min. ver. for which __hmax and __hmax2 are known to work (may be higher than needed) + #define CC_PASCAL 600 #define MIN_CC_DP4A 610 // minimum compute capability for __dp4a, an intrinsic for byte-wise dot products #define CC_VOLTA 700 @@ -605,16 +607,16 @@ static __device__ __forceinline__ float2 warp_reduce_sum(float2 a) { } static __device__ __forceinline__ half2 warp_reduce_sum(half2 a) { -#if __CUDA_ARCH__ < CC_PASCAL || (defined(GGML_USE_HIPBLAS) && defined(__HIP_PLATFORM_AMD__)) - (void) a; - bad_arch(); -#else +#if !(defined(GGML_USE_HIPBLAS) && defined(__HIP_PLATFORM_AMD__)) && __CUDA_ARCH__ >= CC_PASCAL #pragma unroll for (int mask = 16; mask > 0; mask >>= 1) { a = __hadd2(a, __shfl_xor_sync(0xffffffff, a, mask, 32)); } return a; -#endif // __CUDA_ARCH__ < CC_PASCAL || (defined(GGML_USE_HIPBLAS) && defined(__HIP_PLATFORM_AMD__)) +#else + (void) a; + bad_arch(); +#endif // !(defined(GGML_USE_HIPBLAS) && defined(__HIP_PLATFORM_AMD__)) && __CUDA_ARCH__ >= CC_PASCAL } static __device__ __forceinline__ float warp_reduce_max(float x) { @@ -626,16 +628,16 @@ static __device__ __forceinline__ float warp_reduce_max(float x) { } static __device__ __forceinline__ half2 warp_reduce_max(half2 x) { -#if __CUDA_ARCH__ < CC_PASCAL || (defined(GGML_USE_HIPBLAS) && defined(__HIP_PLATFORM_AMD__)) - (void) x; - bad_arch(); -#else +#if !(defined(GGML_USE_HIPBLAS) && defined(__HIP_PLATFORM_AMD__)) && __CUDA_ARCH__ >= CC_PASCAL && CUDART_VERSION >= CUDART_HMAX #pragma unroll for (int mask = 16; mask > 0; mask >>= 1) { x = __hmax2(x, __shfl_xor_sync(0xffffffff, x, mask, 32)); } return x; -#endif // __CUDA_ARCH__ < CC_PASCAL || (defined(GGML_USE_HIPBLAS) && defined(__HIP_PLATFORM_AMD__)) +#else + (void) x; + bad_arch(); +#endif // !(defined(GGML_USE_HIPBLAS) && defined(__HIP_PLATFORM_AMD__)) && __CUDA_ARCH__ >= CC_PASCAL && CUDART_VERSION >= CUDART_HMAX } static __device__ __forceinline__ float op_repeat(const float a, const float b) { @@ -5613,7 +5615,7 @@ static __global__ void diag_mask_inf_f32(const float * x, float * dst, const int template static __global__ void soft_max_f16(const float * x, const float * y, float * dst, const int ncols_par, const int nrows_y, const float scale) { -#if !(defined(GGML_USE_HIPBLAS) && defined(__HIP_PLATFORM_AMD__)) && __CUDA_ARCH__ >= CC_PASCAL +#if !(defined(GGML_USE_HIPBLAS) && defined(__HIP_PLATFORM_AMD__)) && __CUDA_ARCH__ >= CC_PASCAL && CUDART_VERSION >= CUDART_HMAX const int ncols_data = ncols_template == 0 ? ncols_par : ncols_template; const int ncols_smem = GGML_PAD(ncols_data, 2*WARP_SIZE)/2; @@ -5738,7 +5740,7 @@ static __global__ void soft_max_f16(const float * x, const float * y, float * ds #else (void) x; (void) y; (void) dst; (void) ncols_par; (void) nrows_y; (void) scale; bad_arch(); -#endif // !(defined(GGML_USE_HIPBLAS) && defined(__HIP_PLATFORM_AMD__)) && __CUDA_ARCH__ >= CC_PASCAL +#endif // !(defined(GGML_USE_HIPBLAS) && defined(__HIP_PLATFORM_AMD__)) && __CUDA_ARCH__ >= CC_PASCAL && CUDART_VERSION >= CUDART_HMAX } template @@ -8574,15 +8576,15 @@ static void ggml_cuda_op_soft_max( float scale = 1.0f; memcpy(&scale, dst->op_params, sizeof(float)); -#if defined(GGML_USE_HIPBLAS) && defined(__HIP_PLATFORM_AMD__) - const bool use_f16_soft_max = false; -#else +#if !(defined(GGML_USE_HIPBLAS) && defined(__HIP_PLATFORM_AMD__)) && CUDART_VERSION >= CUDART_HMAX #ifdef GGML_CUDA_F16 const bool use_f16_soft_max = true; #else const bool use_f16_soft_max = false; #endif // GGML_CUDA_F16 -#endif // defined(GGML_USE_HIPBLAS) && defined(__HIP_PLATFORM_AMD__) +#else + const bool use_f16_soft_max = false; +#endif // defined(GGML_USE_HIPBLAS) && defined(__HIP_PLATFORM_AMD__) && CUDART_VERSION >= CUDART_HMAX if (use_f16_soft_max) { soft_max_f16_cuda(src0_dd, src1 ? src1_dd : nullptr, dst_dd, ne00, nrows_x, nrows_y, scale, main_stream); From 5537d9d36bfdb4379555431f574d3d78ce6e7955 Mon Sep 17 00:00:00 2001 From: Georgi Gerganov Date: Fri, 12 Jan 2024 14:33:21 +0200 Subject: [PATCH 284/811] gitignore : imatrix --- .gitignore | 1 + 1 file changed, 1 insertion(+) diff --git a/.gitignore b/.gitignore index cf1b692e9..fba207045 100644 --- a/.gitignore +++ b/.gitignore @@ -43,6 +43,7 @@ models-mnt /embedding /gguf /gguf-llama-simple +/imatrix /infill /libllama.so /llama-bench From e790eef21ce659f5c16d59f8a5c8dcf6cde0692a Mon Sep 17 00:00:00 2001 From: Zay <95888118+isaiahbjork@users.noreply.github.com> Date: Fri, 12 Jan 2024 05:48:00 -0700 Subject: [PATCH 285/811] llama.swiftui : update models layout (#4826) * Updated Models Layout - Added a models drawer - Added downloading directly from Hugging Face - Load custom models from local folder - Delete models by swiping left * trimmed trailing white space * Updated Models Layout --- .../llama.swiftui.xcodeproj/project.pbxproj | 8 +- .../llama.swiftui/Models/LlamaState.swift | 89 ++++++++ .../llama.swiftui/UI/ContentView.swift | 213 +++++++++--------- .../llama.swiftui/UI/DownloadButton.swift | 2 + .../llama.swiftui/UI/InputButton.swift | 131 +++++++++++ 5 files changed, 338 insertions(+), 105 deletions(-) create mode 100644 examples/llama.swiftui/llama.swiftui/UI/InputButton.swift diff --git a/examples/llama.swiftui/llama.swiftui.xcodeproj/project.pbxproj b/examples/llama.swiftui/llama.swiftui.xcodeproj/project.pbxproj index a8848a49f..3950b9e9d 100644 --- a/examples/llama.swiftui/llama.swiftui.xcodeproj/project.pbxproj +++ b/examples/llama.swiftui/llama.swiftui.xcodeproj/project.pbxproj @@ -8,6 +8,7 @@ /* Begin PBXBuildFile section */ 549479CB2AC9E16000E0F78B /* Metal.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = 549479CA2AC9E16000E0F78B /* Metal.framework */; }; + 79E1D9CD2B4CD16E005F8E46 /* InputButton.swift in Sources */ = {isa = PBXBuildFile; fileRef = 79E1D9CC2B4CD16E005F8E46 /* InputButton.swift */; }; 7FA3D2B32B2EA2F600543F92 /* DownloadButton.swift in Sources */ = {isa = PBXBuildFile; fileRef = 7FA3D2B22B2EA2F600543F92 /* DownloadButton.swift */; }; 8A1C83772AC328BD0096AF73 /* llama_swiftuiApp.swift in Sources */ = {isa = PBXBuildFile; fileRef = 8A1C83762AC328BD0096AF73 /* llama_swiftuiApp.swift */; }; 8A1C83792AC328BD0096AF73 /* ContentView.swift in Sources */ = {isa = PBXBuildFile; fileRef = 8A1C83782AC328BD0096AF73 /* ContentView.swift */; }; @@ -22,6 +23,7 @@ /* Begin PBXFileReference section */ 549479CA2AC9E16000E0F78B /* Metal.framework */ = {isa = PBXFileReference; lastKnownFileType = wrapper.framework; name = Metal.framework; path = System/Library/Frameworks/Metal.framework; sourceTree = SDKROOT; }; + 79E1D9CC2B4CD16E005F8E46 /* InputButton.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = InputButton.swift; sourceTree = ""; }; 7FA3D2B22B2EA2F600543F92 /* DownloadButton.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = DownloadButton.swift; sourceTree = ""; }; 8A1C83732AC328BD0096AF73 /* llama.swiftui.app */ = {isa = PBXFileReference; explicitFileType = wrapper.application; includeInIndex = 0; path = llama.swiftui.app; sourceTree = BUILT_PRODUCTS_DIR; }; 8A1C83762AC328BD0096AF73 /* llama_swiftuiApp.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = llama_swiftuiApp.swift; sourceTree = ""; }; @@ -119,6 +121,7 @@ 7FA3D2B22B2EA2F600543F92 /* DownloadButton.swift */, 8A1C83782AC328BD0096AF73 /* ContentView.swift */, F1FE20E12B465EC900B45541 /* LoadCustomButton.swift */, + 79E1D9CC2B4CD16E005F8E46 /* InputButton.swift */, ); path = UI; sourceTree = ""; @@ -213,6 +216,7 @@ 8A1C83792AC328BD0096AF73 /* ContentView.swift in Sources */, 8A1C83772AC328BD0096AF73 /* llama_swiftuiApp.swift in Sources */, 7FA3D2B32B2EA2F600543F92 /* DownloadButton.swift in Sources */, + 79E1D9CD2B4CD16E005F8E46 /* InputButton.swift in Sources */, ); runOnlyForDeploymentPostprocessing = 0; }; @@ -345,7 +349,7 @@ CLANG_ENABLE_MODULES = YES; CODE_SIGN_STYLE = Automatic; CURRENT_PROJECT_VERSION = 1; - DEVELOPMENT_TEAM = STLSG3FG8Q; + DEVELOPMENT_TEAM = K5UQJPP73A; ENABLE_PREVIEWS = YES; GENERATE_INFOPLIST_FILE = YES; INFOPLIST_KEY_UIApplicationSceneManifest_Generation = YES; @@ -377,7 +381,7 @@ CLANG_ENABLE_MODULES = YES; CODE_SIGN_STYLE = Automatic; CURRENT_PROJECT_VERSION = 1; - DEVELOPMENT_TEAM = STLSG3FG8Q; + DEVELOPMENT_TEAM = K5UQJPP73A; ENABLE_PREVIEWS = YES; GENERATE_INFOPLIST_FILE = YES; INFOPLIST_KEY_UIApplicationSceneManifest_Generation = YES; diff --git a/examples/llama.swiftui/llama.swiftui/Models/LlamaState.swift b/examples/llama.swiftui/llama.swiftui/Models/LlamaState.swift index 17cb5b9dd..5bde18917 100644 --- a/examples/llama.swiftui/llama.swiftui/Models/LlamaState.swift +++ b/examples/llama.swiftui/llama.swiftui/Models/LlamaState.swift @@ -1,9 +1,19 @@ import Foundation +struct Model: Identifiable { + var id = UUID() + var name: String + var url: String + var filename: String + var status: String? +} + @MainActor class LlamaState: ObservableObject { @Published var messageLog = "" @Published var cacheCleared = false + @Published var downloadedModels: [Model] = [] + @Published var undownloadedModels: [Model] = [] let NS_PER_S = 1_000_000_000.0 private var llamaContext: LlamaContext? @@ -13,23 +23,102 @@ class LlamaState: ObservableObject { } init() { + loadModelsFromDisk() + loadDefaultModels() + } + + private func loadModelsFromDisk() { + do { + let documentsURL = getDocumentsDirectory() + let modelURLs = try FileManager.default.contentsOfDirectory(at: documentsURL, includingPropertiesForKeys: nil, options: [.skipsHiddenFiles, .skipsSubdirectoryDescendants]) + for modelURL in modelURLs { + let modelName = modelURL.deletingPathExtension().lastPathComponent + downloadedModels.append(Model(name: modelName, url: "", filename: modelURL.lastPathComponent, status: "downloaded")) + } + } catch { + print("Error loading models from disk: \(error)") + } + } + + private func loadDefaultModels() { do { try loadModel(modelUrl: defaultModelUrl) } catch { messageLog += "Error!\n" } + + for model in defaultModels { + let fileURL = getDocumentsDirectory().appendingPathComponent(model.filename) + if FileManager.default.fileExists(atPath: fileURL.path) { + + } else { + var undownloadedModel = model + undownloadedModel.status = "download" + undownloadedModels.append(undownloadedModel) + } + } } + func getDocumentsDirectory() -> URL { + let paths = FileManager.default.urls(for: .documentDirectory, in: .userDomainMask) + return paths[0] + } + private let defaultModels: [Model] = [ + Model(name: "TinyLlama-1.1B (Q4_0, 0.6 GiB)",url: "https://huggingface.co/TheBloke/TinyLlama-1.1B-1T-OpenOrca-GGUF/resolve/main/tinyllama-1.1b-1t-openorca.Q4_0.gguf?download=true",filename: "tinyllama-1.1b-1t-openorca.Q4_0.gguf", status: "download"), + Model( + name: "TinyLlama-1.1B Chat (Q8_0, 1.1 GiB)", + url: "https://huggingface.co/TheBloke/TinyLlama-1.1B-Chat-v1.0-GGUF/resolve/main/tinyllama-1.1b-chat-v1.0.Q8_0.gguf?download=true", + filename: "tinyllama-1.1b-chat-v1.0.Q8_0.gguf", status: "download" + ), + + Model( + name: "TinyLlama-1.1B (F16, 2.2 GiB)", + url: "https://huggingface.co/ggml-org/models/resolve/main/tinyllama-1.1b/ggml-model-f16.gguf?download=true", + filename: "tinyllama-1.1b-f16.gguf", status: "download" + ), + + Model( + name: "Phi-2.7B (Q4_0, 1.6 GiB)", + url: "https://huggingface.co/ggml-org/models/resolve/main/phi-2/ggml-model-q4_0.gguf?download=true", + filename: "phi-2-q4_0.gguf", status: "download" + ), + + Model( + name: "Phi-2.7B (Q8_0, 2.8 GiB)", + url: "https://huggingface.co/ggml-org/models/resolve/main/phi-2/ggml-model-q8_0.gguf?download=true", + filename: "phi-2-q8_0.gguf", status: "download" + ), + + Model( + name: "Mistral-7B-v0.1 (Q4_0, 3.8 GiB)", + url: "https://huggingface.co/TheBloke/Mistral-7B-v0.1-GGUF/resolve/main/mistral-7b-v0.1.Q4_0.gguf?download=true", + filename: "mistral-7b-v0.1.Q4_0.gguf", status: "download" + ), + Model( + name: "OpenHermes-2.5-Mistral-7B (Q3_K_M, 3.52 GiB)", + url: "https://huggingface.co/TheBloke/OpenHermes-2.5-Mistral-7B-GGUF/resolve/main/openhermes-2.5-mistral-7b.Q3_K_M.gguf?download=true", + filename: "openhermes-2.5-mistral-7b.Q3_K_M.gguf", status: "download" + ) + ] func loadModel(modelUrl: URL?) throws { if let modelUrl { messageLog += "Loading model...\n" llamaContext = try LlamaContext.create_context(path: modelUrl.path()) messageLog += "Loaded model \(modelUrl.lastPathComponent)\n" + + // Assuming that the model is successfully loaded, update the downloaded models + updateDownloadedModels(modelName: modelUrl.lastPathComponent, status: "downloaded") } else { messageLog += "Load a model from the list below\n" } } + + private func updateDownloadedModels(modelName: String, status: String) { + undownloadedModels.removeAll { $0.name == modelName } + } + + func complete(text: String) async { guard let llamaContext else { return diff --git a/examples/llama.swiftui/llama.swiftui/UI/ContentView.swift b/examples/llama.swiftui/llama.swiftui/UI/ContentView.swift index 7c81ea256..30c2dc431 100644 --- a/examples/llama.swiftui/llama.swiftui/UI/ContentView.swift +++ b/examples/llama.swiftui/llama.swiftui/UI/ContentView.swift @@ -2,115 +2,57 @@ import SwiftUI struct ContentView: View { @StateObject var llamaState = LlamaState() - @State private var multiLineText = "" - - private static func cleanupModelCaches() { - // Delete all models (*.gguf) - let fileManager = FileManager.default - let documentsUrl = FileManager.default.urls(for: .documentDirectory, in: .userDomainMask)[0] - do { - let fileURLs = try fileManager.contentsOfDirectory(at: documentsUrl, includingPropertiesForKeys: nil) - for fileURL in fileURLs { - if fileURL.pathExtension == "gguf" { - try fileManager.removeItem(at: fileURL) - } - } - } catch { - print("Error while enumerating files \(documentsUrl.path): \(error.localizedDescription)") - } - } + @State private var showingHelp = false // To track if Help Sheet should be shown var body: some View { - VStack { - ScrollView(.vertical, showsIndicators: true) { - Text(llamaState.messageLog) - .font(.system(size: 12)) - .frame(maxWidth: .infinity, alignment: .leading) + NavigationView { + VStack { + ScrollView(.vertical, showsIndicators: true) { + Text(llamaState.messageLog) + .font(.system(size: 12)) + .frame(maxWidth: .infinity, alignment: .leading) + .padding() + .onTapGesture { + UIApplication.shared.sendAction(#selector(UIResponder.resignFirstResponder), to: nil, from: nil, for: nil) + } + } + + TextEditor(text: $multiLineText) + .frame(height: 80) + .padding() + .border(Color.gray, width: 0.5) + + HStack { + Button("Send") { + sendText() + } + + Button("Bench") { + bench() + } + + Button("Clear") { + clear() + } + + Button("Copy") { + UIPasteboard.general.string = llamaState.messageLog + } + } + .buttonStyle(.bordered) .padding() - .onTapGesture { - UIApplication.shared.sendAction(#selector(UIResponder.resignFirstResponder), to: nil, from: nil, for: nil) - } - } - TextEditor(text: $multiLineText) - .frame(height: 80) + NavigationLink(destination: DrawerView(llamaState: llamaState)) { + Text("View Models") + } .padding() - .border(Color.gray, width: 0.5) - HStack { - Button("Send") { - sendText() - } - - Button("Bench") { - bench() - } - - Button("Clear") { - clear() - } - - Button("Copy") { - UIPasteboard.general.string = llamaState.messageLog - } - }.buttonStyle(.bordered) - - VStack(alignment: .leading) { - DownloadButton( - llamaState: llamaState, - modelName: "TinyLlama-1.1B (Q4_0, 0.6 GiB)", - modelUrl: "https://huggingface.co/TheBloke/TinyLlama-1.1B-1T-OpenOrca-GGUF/resolve/main/tinyllama-1.1b-1t-openorca.Q4_0.gguf?download=true", - filename: "tinyllama-1.1b-1t-openorca.Q4_0.gguf" - ) - - DownloadButton( - llamaState: llamaState, - modelName: "TinyLlama-1.1B (Q8_0, 1.1 GiB)", - modelUrl: "https://huggingface.co/TheBloke/TinyLlama-1.1B-1T-OpenOrca-GGUF/resolve/main/tinyllama-1.1b-1t-openorca.Q8_0.gguf?download=true", - filename: "tinyllama-1.1b-1t-openorca.Q8_0.gguf" - ) - - DownloadButton( - llamaState: llamaState, - modelName: "TinyLlama-1.1B (F16, 2.2 GiB)", - modelUrl: "https://huggingface.co/ggml-org/models/resolve/main/tinyllama-1.1b/ggml-model-f16.gguf?download=true", - filename: "tinyllama-1.1b-f16.gguf" - ) - - DownloadButton( - llamaState: llamaState, - modelName: "Phi-2.7B (Q4_0, 1.6 GiB)", - modelUrl: "https://huggingface.co/ggml-org/models/resolve/main/phi-2/ggml-model-q4_0.gguf?download=true", - filename: "phi-2-q4_0.gguf" - ) - - DownloadButton( - llamaState: llamaState, - modelName: "Phi-2.7B (Q8_0, 2.8 GiB)", - modelUrl: "https://huggingface.co/ggml-org/models/resolve/main/phi-2/ggml-model-q8_0.gguf?download=true", - filename: "phi-2-q8_0.gguf" - ) - - DownloadButton( - llamaState: llamaState, - modelName: "Mistral-7B-v0.1 (Q4_0, 3.8 GiB)", - modelUrl: "https://huggingface.co/TheBloke/Mistral-7B-v0.1-GGUF/resolve/main/mistral-7b-v0.1.Q4_0.gguf?download=true", - filename: "mistral-7b-v0.1.Q4_0.gguf" - ) - - Button("Clear downloaded models") { - ContentView.cleanupModelCaches() - llamaState.cacheCleared = true - } - - LoadCustomButton(llamaState: llamaState) } - .padding(.top, 4) - .font(.system(size: 12)) - .frame(maxWidth: .infinity, alignment: .leading) + .padding() + .navigationBarTitle("Model Settings", displayMode: .inline) + } - .padding() } func sendText() { @@ -131,8 +73,73 @@ struct ContentView: View { await llamaState.clear() } } + struct DrawerView: View { + + @ObservedObject var llamaState: LlamaState + @State private var showingHelp = false + func delete(at offsets: IndexSet) { + offsets.forEach { offset in + let model = llamaState.downloadedModels[offset] + let fileURL = getDocumentsDirectory().appendingPathComponent(model.filename) + do { + try FileManager.default.removeItem(at: fileURL) + } catch { + print("Error deleting file: \(error)") + } + } + + // Remove models from downloadedModels array + llamaState.downloadedModels.remove(atOffsets: offsets) + } + + func getDocumentsDirectory() -> URL { + let paths = FileManager.default.urls(for: .documentDirectory, in: .userDomainMask) + return paths[0] + } + var body: some View { + List { + Section(header: Text("Download Models From Hugging Face")) { + HStack { + InputButton(llamaState: llamaState) + } + } + Section(header: Text("Downloaded Models")) { + ForEach(llamaState.downloadedModels) { model in + DownloadButton(llamaState: llamaState, modelName: model.name, modelUrl: model.url, filename: model.filename) + } + .onDelete(perform: delete) + } + Section(header: Text("Default Models")) { + ForEach(llamaState.undownloadedModels) { model in + DownloadButton(llamaState: llamaState, modelName: model.name, modelUrl: model.url, filename: model.filename) + } + } + + } + .listStyle(GroupedListStyle()) + .navigationBarTitle("Model Settings", displayMode: .inline).toolbar { + ToolbarItem(placement: .navigationBarTrailing) { + Button("Help") { + showingHelp = true + } + } + }.sheet(isPresented: $showingHelp) { // Sheet for help modal + VStack(alignment: .leading) { + VStack(alignment: .leading) { + Text("1. Make sure the model is in GGUF Format") + .padding() + Text("2. Copy the download link of the quantized model") + .padding() + } + Spacer() + } + } + } + } } -//#Preview { -// ContentView() -//} +struct ContentView_Previews: PreviewProvider { + static var previews: some View { + ContentView() + } +} diff --git a/examples/llama.swiftui/llama.swiftui/UI/DownloadButton.swift b/examples/llama.swiftui/llama.swiftui/UI/DownloadButton.swift index c9f322ca1..4584d6eaa 100644 --- a/examples/llama.swiftui/llama.swiftui/UI/DownloadButton.swift +++ b/examples/llama.swiftui/llama.swiftui/UI/DownloadButton.swift @@ -53,6 +53,8 @@ struct DownloadButton: View { llamaState.cacheCleared = false + let model = Model(name: modelName, url: modelUrl, filename: filename, status: "downloaded") + llamaState.downloadedModels.append(model) status = "downloaded" } } catch let err { diff --git a/examples/llama.swiftui/llama.swiftui/UI/InputButton.swift b/examples/llama.swiftui/llama.swiftui/UI/InputButton.swift new file mode 100644 index 000000000..c5ffbad4e --- /dev/null +++ b/examples/llama.swiftui/llama.swiftui/UI/InputButton.swift @@ -0,0 +1,131 @@ +import SwiftUI + +struct InputButton: View { + @ObservedObject var llamaState: LlamaState + @State private var inputLink: String = "" + @State private var status: String = "download" + @State private var filename: String = "" + + @State private var downloadTask: URLSessionDownloadTask? + @State private var progress = 0.0 + @State private var observation: NSKeyValueObservation? + + private static func extractModelInfo(from link: String) -> (modelName: String, filename: String)? { + guard let url = URL(string: link), + let lastPathComponent = url.lastPathComponent.components(separatedBy: ".").first, + let modelName = lastPathComponent.components(separatedBy: "-").dropLast().joined(separator: "-").removingPercentEncoding, + let filename = lastPathComponent.removingPercentEncoding else { + return nil + } + + return (modelName, filename) + } + + private static func getFileURL(filename: String) -> URL { + FileManager.default.urls(for: .documentDirectory, in: .userDomainMask)[0].appendingPathComponent(filename) + } + + private func download() { + guard let extractedInfo = InputButton.extractModelInfo(from: inputLink) else { + // Handle invalid link or extraction failure + return + } + + let (modelName, filename) = extractedInfo + self.filename = filename // Set the state variable + + status = "downloading" + print("Downloading model \(modelName) from \(inputLink)") + guard let url = URL(string: inputLink) else { return } + let fileURL = InputButton.getFileURL(filename: filename) + + downloadTask = URLSession.shared.downloadTask(with: url) { temporaryURL, response, error in + if let error = error { + print("Error: \(error.localizedDescription)") + return + } + + guard let response = response as? HTTPURLResponse, (200...299).contains(response.statusCode) else { + print("Server error!") + return + } + + do { + if let temporaryURL = temporaryURL { + try FileManager.default.copyItem(at: temporaryURL, to: fileURL) + print("Writing to \(filename) completed") + + llamaState.cacheCleared = false + + let model = Model(name: modelName, url: self.inputLink, filename: filename, status: "downloaded") + llamaState.downloadedModels.append(model) + status = "downloaded" + } + } catch let err { + print("Error: \(err.localizedDescription)") + } + } + + observation = downloadTask?.progress.observe(\.fractionCompleted) { progress, _ in + self.progress = progress.fractionCompleted + } + + downloadTask?.resume() + } + + var body: some View { + VStack { + HStack { + TextField("Paste Quantized Download Link", text: $inputLink) + .textFieldStyle(RoundedBorderTextFieldStyle()) + + Button(action: { + downloadTask?.cancel() + status = "download" + }) { + Text("Cancel") + } + } + + if status == "download" { + Button(action: download) { + Text("Download Custom Model") + } + } else if status == "downloading" { + Button(action: { + downloadTask?.cancel() + status = "download" + }) { + Text("Downloading \(Int(progress * 100))%") + } + } else if status == "downloaded" { + Button(action: { + let fileURL = InputButton.getFileURL(filename: self.filename) + if !FileManager.default.fileExists(atPath: fileURL.path) { + download() + return + } + do { + try llamaState.loadModel(modelUrl: fileURL) + } catch let err { + print("Error: \(err.localizedDescription)") + } + }) { + Text("Load Custom Model") + } + } else { + Text("Unknown status") + } + } + .onDisappear() { + downloadTask?.cancel() + } + .onChange(of: llamaState.cacheCleared) { newValue in + if newValue { + downloadTask?.cancel() + let fileURL = InputButton.getFileURL(filename: self.filename) + status = FileManager.default.fileExists(atPath: fileURL.path) ? "downloaded" : "download" + } + } + } +} From 930f907d3ece1eb5b0a1ec5e209983a66dcbfa68 Mon Sep 17 00:00:00 2001 From: Daniel Bevenius Date: Fri, 12 Jan 2024 18:54:53 +0100 Subject: [PATCH 286/811] export-lora : use LLAMA_FILE_MAGIC_GGLA (#4894) This commit replaces the magic number used in export-lora.cpp with the one defined in llama.h, which is indirectly included via common.h. Signed-off-by: Daniel Bevenius --- examples/export-lora/export-lora.cpp | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/examples/export-lora/export-lora.cpp b/examples/export-lora/export-lora.cpp index 58fbe204d..4cd5d99bb 100644 --- a/examples/export-lora/export-lora.cpp +++ b/examples/export-lora/export-lora.cpp @@ -245,9 +245,8 @@ static struct lora_data * load_lora(struct lora_info * info) { params_ggml.no_alloc = true; result->ctx = ggml_init(params_ggml); - uint32_t LLAMA_FILE_MAGIC_LORA = 0x67676C61; // 'ggla' uint32_t magic = file.read_u32(); - if (magic != LLAMA_FILE_MAGIC_LORA) { + if (magic != LLAMA_FILE_MAGIC_GGLA) { die_fmt("unexpected lora header file magic in '%s'", info->filename.c_str()); } uint32_t version = file.read_u32(); From 584d674be622fbf1578694ada6e62eebedbfd377 Mon Sep 17 00:00:00 2001 From: Georgi Gerganov Date: Fri, 12 Jan 2024 20:54:12 +0200 Subject: [PATCH 287/811] llama : remove redundant assert for StableLM (#4901) --- llama.cpp | 1 - 1 file changed, 1 deletion(-) diff --git a/llama.cpp b/llama.cpp index 29f8873f6..ce413f605 100644 --- a/llama.cpp +++ b/llama.cpp @@ -5530,7 +5530,6 @@ struct llm_build_context { const int64_t n_embd_head = hparams.n_embd_head_v; GGML_ASSERT(n_embd_head == hparams.n_embd_head_k); - GGML_ASSERT(n_embd_head == hparams.n_rot); struct ggml_tensor * cur; struct ggml_tensor * inpL; From e7e4df031b9e29d4b55a4e0b0295187f6b213db1 Mon Sep 17 00:00:00 2001 From: slaren Date: Fri, 12 Jan 2024 20:07:38 +0100 Subject: [PATCH 288/811] llama : ggml-backend integration (#4766) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * llama : ggml-backend integration * ggml-backend : add names to buffers * fix unmap after loading * batched-bench : add tensor_split param * llama : check for null tensor_split * ggml-backend : increase GGML_MAX_BACKENDS * improve graph splitting, partial fix for --no-kv-offload * cuda : add ggml-backend split buffer support * cuda : do not create buffer types for devices that don't exist (fixes usage without CUDA devices available) * ggml : fix null backend dereference (#4807) * ggml : fix null backend dereference * ggml : also check ggml_backend_is_cpu * test-backend-ops : check buffer allocation failures * llama : add cparam (split_mode) and command line argument (--split-mode, -sm) to configure the split mode (none, layer or row) * ggml : fix mul_mat_id work size * llama : rewrite session kv load/set without graphs * minor * llama : only initialize used backends, free backends on context free * llama : abort ctx if cuda backend init fails * llama : rewrite lora with ggml-backend and compute on CPU ggml-ci * llama : only map to a backend buffer the region of the file mapping containing the tensors used in the buffer * opencl : add ggml-backend buffer type * cuda : only use batched_cublas with batched mat muls (fixes fp16 tg perf) * llama : on Metal, by default offload the full model ggml-ci * metal : page align the data ptr (#4854) * Apply suggestions from code review Co-authored-by: Johannes Gäßler * cuda : fix split buffer free * address review comments * llama-bench : add split-mode parameter * fix whitespace * opencl : fix double initialization * server : add --split-mode parameter * use async copy and compute to improve multi-gpu performance ggml-ci * use async memcpys to copy the graph outputs to the CPU * fix opencl * use a host buffer for the cpu compute buffer for faster copies to the gpu --------- Co-authored-by: Georgi Gerganov Co-authored-by: Johannes Gäßler --- common/common.cpp | 65 +- common/common.h | 1 + examples/batched-bench/batched-bench.cpp | 3 + examples/llama-bench/llama-bench.cpp | 146 +- examples/server/server.cpp | 40 +- ggml-alloc.c | 34 +- ggml-alloc.h | 4 +- ggml-backend-impl.h | 38 +- ggml-backend.c | 693 ++++--- ggml-backend.h | 60 +- ggml-cuda.cu | 907 +++++---- ggml-cuda.h | 26 +- ggml-impl.h | 2 + ggml-metal.m | 55 +- ggml-opencl.cpp | 335 +++- ggml-opencl.h | 16 +- ggml.c | 30 +- ggml.h | 9 +- llama.cpp | 2320 +++++++++------------- llama.h | 18 +- tests/test-backend-ops.cpp | 26 +- 21 files changed, 2533 insertions(+), 2295 deletions(-) diff --git a/common/common.cpp b/common/common.cpp index 062a8b4de..322b9f91e 100644 --- a/common/common.cpp +++ b/common/common.cpp @@ -543,9 +543,8 @@ bool gpt_params_parse_ex(int argc, char ** argv, gpt_params & params) { invalid_param = true; break; } -#ifdef LLAMA_SUPPORTS_GPU_OFFLOAD params.n_gpu_layers = std::stoi(argv[i]); -#else +#ifndef LLAMA_SUPPORTS_GPU_OFFLOAD fprintf(stderr, "warning: not compiled with GPU offload support, --n-gpu-layers option will be ignored\n"); fprintf(stderr, "warning: see main README.md for information on enabling GPU BLAS support\n"); #endif @@ -554,9 +553,8 @@ bool gpt_params_parse_ex(int argc, char ** argv, gpt_params & params) { invalid_param = true; break; } -#ifdef LLAMA_SUPPORTS_GPU_OFFLOAD params.n_gpu_layers_draft = std::stoi(argv[i]); -#else +#ifndef LLAMA_SUPPORTS_GPU_OFFLOAD fprintf(stderr, "warning: not compiled with GPU offload support, --n-gpu-layers-draft option will be ignored\n"); fprintf(stderr, "warning: see main README.md for information on enabling GPU BLAS support\n"); #endif @@ -565,25 +563,44 @@ bool gpt_params_parse_ex(int argc, char ** argv, gpt_params & params) { invalid_param = true; break; } -#ifdef GGML_USE_CUBLAS params.main_gpu = std::stoi(argv[i]); -#else - fprintf(stderr, "warning: llama.cpp was compiled without cuBLAS. It is not possible to set a main GPU.\n"); -#endif +#ifndef GGML_USE_CUBLAS + fprintf(stderr, "warning: llama.cpp was compiled without cuBLAS. Setting the main GPU has no effect.\n"); +#endif // GGML_USE_CUBLAS + } else if (arg == "--split-mode" || arg == "-sm") { + if (++i >= argc) { + invalid_param = true; + break; + } + std::string arg_next = argv[i]; + if (arg_next == "none") { + params.split_mode = LLAMA_SPLIT_NONE; + } else if (arg_next == "layer") { + params.split_mode = LLAMA_SPLIT_LAYER; + } else if (arg_next == "row") { + params.split_mode = LLAMA_SPLIT_ROW; + } else { + invalid_param = true; + break; + } +#ifndef GGML_USE_CUBLAS + fprintf(stderr, "warning: llama.cpp was compiled without cuBLAS. Setting the split mode has no effect.\n"); +#endif // GGML_USE_CUBLAS } else if (arg == "--tensor-split" || arg == "-ts") { if (++i >= argc) { invalid_param = true; break; } -#ifdef GGML_USE_CUBLAS std::string arg_next = argv[i]; // split string by , and / const std::regex regex{R"([,/]+)"}; std::sregex_token_iterator it{arg_next.begin(), arg_next.end(), regex, -1}; std::vector split_arg{it, {}}; - GGML_ASSERT(split_arg.size() <= LLAMA_MAX_DEVICES); - + if (split_arg.size() >= LLAMA_MAX_DEVICES) { + invalid_param = true; + break; + } for (size_t i = 0; i < LLAMA_MAX_DEVICES; ++i) { if (i < split_arg.size()) { params.tensor_split[i] = std::stof(split_arg[i]); @@ -591,14 +608,8 @@ bool gpt_params_parse_ex(int argc, char ** argv, gpt_params & params) { params.tensor_split[i] = 0.0f; } } -#else - fprintf(stderr, "warning: llama.cpp was compiled without cuBLAS. It is not possible to set a tensor split.\n"); -#endif // GGML_USE_CUBLAS - } else if (arg == "--no-mul-mat-q" || arg == "-nommq") { -#ifdef GGML_USE_CUBLAS - params.mul_mat_q = false; -#else - fprintf(stderr, "warning: llama.cpp was compiled without cuBLAS. Disabling mul_mat_q kernels has no effect.\n"); +#ifndef GGML_USE_CUBLAS + fprintf(stderr, "warning: llama.cpp was compiled without cuBLAS. Setting a tensor split has no effect.\n"); #endif // GGML_USE_CUBLAS } else if (arg == "--no-mmap") { params.use_mmap = false; @@ -915,14 +926,15 @@ void gpt_print_usage(int /*argc*/, char ** argv, const gpt_params & params) { printf(" number of layers to store in VRAM\n"); printf(" -ngld N, --n-gpu-layers-draft N\n"); printf(" number of layers to store in VRAM for the draft model\n"); + printf(" -sm SPLIT_MODE, --split-mode SPLIT_MODE\n"); + printf(" how to split the model across multiple GPUs, one of:\n"); + printf(" - none: use one GPU only\n"); + printf(" - layer (default): split layers and KV across GPUs\n"); + printf(" - row: split rows across GPUs\n"); printf(" -ts SPLIT, --tensor-split SPLIT\n"); - printf(" how to split tensors across multiple GPUs, comma-separated list of proportions, e.g. 3,1\n"); - printf(" -mg i, --main-gpu i the GPU to use for scratch and small tensors\n"); -#ifdef GGML_USE_CUBLAS - printf(" -nommq, --no-mul-mat-q\n"); - printf(" use " GGML_CUBLAS_NAME " instead of custom mul_mat_q " GGML_CUDA_NAME " kernels.\n"); - printf(" Not recommended since this is both slower and uses more VRAM.\n"); -#endif // GGML_USE_CUBLAS + printf(" fraction of the model to offload to each GPU, comma-separated list of proportions, e.g. 3,1\n"); + printf(" -mg i, --main-gpu i the GPU to use for the model (with split-mode = none),\n"); + printf(" or for intermediate results and KV (with split-mode = row) (default: %d)\n", params.main_gpu); #endif printf(" -gan N, --grp-attn-n N\n"); printf(" group-attention factor (default: %d)\n", params.grp_attn_n); @@ -1041,6 +1053,7 @@ struct llama_model_params llama_model_params_from_gpt_params(const gpt_params & mparams.n_gpu_layers = params.n_gpu_layers; } mparams.main_gpu = params.main_gpu; + mparams.split_mode = params.split_mode; mparams.tensor_split = params.tensor_split; mparams.use_mmap = params.use_mmap; mparams.use_mlock = params.use_mlock; diff --git a/common/common.h b/common/common.h index 1359e76ab..f29be5b5a 100644 --- a/common/common.h +++ b/common/common.h @@ -59,6 +59,7 @@ struct gpt_params { float p_split = 0.1f; // speculative decoding split probability int32_t n_gpu_layers = -1; // number of layers to store in VRAM (-1 - use default) int32_t n_gpu_layers_draft = -1; // number of layers to store in VRAM for the draft model (-1 - use default) + llama_split_mode split_mode = LLAMA_SPLIT_LAYER; // how to split the model across GPUs int32_t main_gpu = 0; // the GPU that is used for scratch and small tensors float tensor_split[LLAMA_MAX_DEVICES] = {0}; // how split tensors should be distributed across GPUs int32_t n_beams = 0; // if non-zero then use beam search of given width. diff --git a/examples/batched-bench/batched-bench.cpp b/examples/batched-bench/batched-bench.cpp index 57596ed98..7924db267 100644 --- a/examples/batched-bench/batched-bench.cpp +++ b/examples/batched-bench/batched-bench.cpp @@ -88,7 +88,10 @@ int main(int argc, char ** argv) { llama_model_params model_params = llama_model_default_params(); + const std::vector t_split (LLAMA_MAX_DEVICES, 0.0f); + model_params.n_gpu_layers = n_gpu_layers; + model_params.tensor_split = t_split.data(); llama_model * model = llama_load_model_from_file(params.model.c_str(), model_params); diff --git a/examples/llama-bench/llama-bench.cpp b/examples/llama-bench/llama-bench.cpp index 7f7186cde..97325b5bd 100644 --- a/examples/llama-bench/llama-bench.cpp +++ b/examples/llama-bench/llama-bench.cpp @@ -128,6 +128,25 @@ static std::string get_gpu_info() { // command line params enum output_formats {CSV, JSON, MARKDOWN, SQL}; +static const char * output_format_str(output_formats format) { + switch (format) { + case CSV: return "csv"; + case JSON: return "json"; + case MARKDOWN: return "md"; + case SQL: return "sql"; + default: GGML_ASSERT(!"invalid output format"); + } +} + +static const char * split_mode_str(llama_split_mode mode) { + switch (mode) { + case LLAMA_SPLIT_NONE: return "none"; + case LLAMA_SPLIT_LAYER: return "layer"; + case LLAMA_SPLIT_ROW: return "row"; + default: GGML_ASSERT(!"invalid split mode"); + } +} + struct cmd_params { std::vector model; std::vector n_prompt; @@ -137,6 +156,7 @@ struct cmd_params { std::vector type_v; std::vector n_threads; std::vector n_gpu_layers; + std::vector split_mode; std::vector main_gpu; std::vector no_kv_offload; std::vector mul_mat_q; @@ -155,6 +175,7 @@ static const cmd_params cmd_params_defaults = { /* type_v */ {GGML_TYPE_F16}, /* n_threads */ {get_num_physical_cores()}, /* n_gpu_layers */ {99}, + /* split_mode */ {LLAMA_SPLIT_LAYER}, /* main_gpu */ {0}, /* no_kv_offload */ {false}, /* mul_mat_q */ {true}, @@ -169,21 +190,22 @@ static void print_usage(int /* argc */, char ** argv) { printf("\n"); printf("options:\n"); printf(" -h, --help\n"); - printf(" -m, --model (default: %s)\n", join(cmd_params_defaults.model, ",").c_str()); - printf(" -p, --n-prompt (default: %s)\n", join(cmd_params_defaults.n_prompt, ",").c_str()); - printf(" -n, --n-gen (default: %s)\n", join(cmd_params_defaults.n_gen, ",").c_str()); - printf(" -b, --batch-size (default: %s)\n", join(cmd_params_defaults.n_batch, ",").c_str()); - printf(" -ctk , --cache-type-k (default: %s)\n", join(transform_to_str(cmd_params_defaults.type_k, ggml_type_name), ",").c_str()); - printf(" -ctv , --cache-type-v (default: %s)\n", join(transform_to_str(cmd_params_defaults.type_v, ggml_type_name), ",").c_str()); - printf(" -t, --threads (default: %s)\n", join(cmd_params_defaults.n_threads, ",").c_str()); - printf(" -ngl, --n-gpu-layers (default: %s)\n", join(cmd_params_defaults.n_gpu_layers, ",").c_str()); - printf(" -mg, --main-gpu (default: %s)\n", join(cmd_params_defaults.main_gpu, ",").c_str()); - printf(" -nkvo, --no-kv-offload <0|1> (default: %s)\n", join(cmd_params_defaults.no_kv_offload, ",").c_str()); - printf(" -mmq, --mul-mat-q <0|1> (default: %s)\n", join(cmd_params_defaults.mul_mat_q, ",").c_str()); - printf(" -ts, --tensor_split \n"); - printf(" -r, --repetitions (default: %d)\n", cmd_params_defaults.reps); - printf(" -o, --output (default: %s)\n", cmd_params_defaults.output_format == CSV ? "csv" : cmd_params_defaults.output_format == JSON ? "json" : cmd_params_defaults.output_format == MARKDOWN ? "md" : "sql"); - printf(" -v, --verbose (default: %s)\n", cmd_params_defaults.verbose ? "1" : "0"); + printf(" -m, --model (default: %s)\n", join(cmd_params_defaults.model, ",").c_str()); + printf(" -p, --n-prompt (default: %s)\n", join(cmd_params_defaults.n_prompt, ",").c_str()); + printf(" -n, --n-gen (default: %s)\n", join(cmd_params_defaults.n_gen, ",").c_str()); + printf(" -b, --batch-size (default: %s)\n", join(cmd_params_defaults.n_batch, ",").c_str()); + printf(" -ctk , --cache-type-k (default: %s)\n", join(transform_to_str(cmd_params_defaults.type_k, ggml_type_name), ",").c_str()); + printf(" -ctv , --cache-type-v (default: %s)\n", join(transform_to_str(cmd_params_defaults.type_v, ggml_type_name), ",").c_str()); + printf(" -t, --threads (default: %s)\n", join(cmd_params_defaults.n_threads, ",").c_str()); + printf(" -ngl, --n-gpu-layers (default: %s)\n", join(cmd_params_defaults.n_gpu_layers, ",").c_str()); + printf(" -sm, --split-mode (default: %s)\n", join(transform_to_str(cmd_params_defaults.split_mode, split_mode_str), ",").c_str()); + printf(" -mg, --main-gpu (default: %s)\n", join(cmd_params_defaults.main_gpu, ",").c_str()); + printf(" -nkvo, --no-kv-offload <0|1> (default: %s)\n", join(cmd_params_defaults.no_kv_offload, ",").c_str()); + printf(" -mmq, --mul-mat-q <0|1> (default: %s)\n", join(cmd_params_defaults.mul_mat_q, ",").c_str()); + printf(" -ts, --tensor_split (default: 0)\n"); + printf(" -r, --repetitions (default: %d)\n", cmd_params_defaults.reps); + printf(" -o, --output (default: %s)\n", output_format_str(cmd_params_defaults.output_format)); + printf(" -v, --verbose (default: %s)\n", cmd_params_defaults.verbose ? "1" : "0"); printf("\n"); printf("Multiple values can be given for each parameter by separating them with ',' or by specifying the parameter multiple times.\n"); } @@ -306,6 +328,28 @@ static cmd_params parse_cmd_params(int argc, char ** argv) { } auto p = split(argv[i], split_delim); params.n_gpu_layers.insert(params.n_gpu_layers.end(), p.begin(), p.end()); + } else if (arg == "-sm" || arg == "--split-mode") { + if (++i >= argc) { + invalid_param = true; + break; + } + auto p = split(argv[i], split_delim); + std::vector modes; + for (const auto & m : p) { + llama_split_mode mode; + if (m == "none") { + mode = LLAMA_SPLIT_NONE; + } else if (m == "layer") { + mode = LLAMA_SPLIT_LAYER; + } else if (m == "row") { + mode = LLAMA_SPLIT_ROW; + } else { + invalid_param = true; + break; + } + modes.push_back(mode); + } + params.split_mode.insert(params.split_mode.end(), modes.begin(), modes.end()); } else if (arg == "-mg" || arg == "--main-gpu") { if (++i >= argc) { invalid_param = true; @@ -392,6 +436,7 @@ static cmd_params parse_cmd_params(int argc, char ** argv) { if (params.type_k.empty()) { params.type_k = cmd_params_defaults.type_k; } if (params.type_v.empty()) { params.type_v = cmd_params_defaults.type_v; } if (params.n_gpu_layers.empty()) { params.n_gpu_layers = cmd_params_defaults.n_gpu_layers; } + if (params.split_mode.empty()) { params.split_mode = cmd_params_defaults.split_mode; } if (params.main_gpu.empty()) { params.main_gpu = cmd_params_defaults.main_gpu; } if (params.no_kv_offload.empty()){ params.no_kv_offload = cmd_params_defaults.no_kv_offload; } if (params.mul_mat_q.empty()) { params.mul_mat_q = cmd_params_defaults.mul_mat_q; } @@ -410,6 +455,7 @@ struct cmd_params_instance { ggml_type type_v; int n_threads; int n_gpu_layers; + llama_split_mode split_mode; int main_gpu; bool no_kv_offload; bool mul_mat_q; @@ -419,6 +465,7 @@ struct cmd_params_instance { llama_model_params mparams = llama_model_default_params(); mparams.n_gpu_layers = n_gpu_layers; + mparams.split_mode = split_mode; mparams.main_gpu = main_gpu; mparams.tensor_split = tensor_split.data(); @@ -428,6 +475,7 @@ struct cmd_params_instance { bool equal_mparams(const cmd_params_instance & other) const { return model == other.model && n_gpu_layers == other.n_gpu_layers && + split_mode == other.split_mode && main_gpu == other.main_gpu && tensor_split == other.tensor_split; } @@ -446,45 +494,13 @@ struct cmd_params_instance { } }; -static std::vector get_cmd_params_instances_int(const cmd_params & params, int n_gen, int n_prompt) { - std::vector instances; - - for (const auto & m : params.model) - for (const auto & nl : params.n_gpu_layers) - for (const auto & mg : params.main_gpu) - for (const auto & ts : params.tensor_split) - for (const auto & nb : params.n_batch) - for (const auto & tk : params.type_k) - for (const auto & tv : params.type_v) - for (const auto & mmq : params.mul_mat_q) - for (const auto & nkvo : params.no_kv_offload) - for (const auto & nt : params.n_threads) { - cmd_params_instance instance = { - /* .model = */ m, - /* .n_prompt = */ n_prompt, - /* .n_gen = */ n_gen, - /* .n_batch = */ nb, - /* .type_k = */ tk, - /* .type_v = */ tv, - /* .n_threads = */ nt, - /* .n_gpu_layers = */ nl, - /* .main_gpu = */ mg, - /* .no_kv_offload= */ nkvo, - /* .mul_mat_q = */ mmq, - /* .tensor_split = */ ts, - }; - instances.push_back(instance); - } - return instances; -} - static std::vector get_cmd_params_instances(const cmd_params & params) { std::vector instances; -#if 1 // this ordering minimizes the number of times that each model needs to be reloaded for (const auto & m : params.model) for (const auto & nl : params.n_gpu_layers) + for (const auto & sm : params.split_mode) for (const auto & mg : params.main_gpu) for (const auto & ts : params.tensor_split) for (const auto & nb : params.n_batch) @@ -506,6 +522,7 @@ static std::vector get_cmd_params_instances(const cmd_param /* .type_v = */ tv, /* .n_threads = */ nt, /* .n_gpu_layers = */ nl, + /* .split_mode = */ sm, /* .main_gpu = */ mg, /* .no_kv_offload= */ nkvo, /* .mul_mat_q = */ mmq, @@ -527,6 +544,7 @@ static std::vector get_cmd_params_instances(const cmd_param /* .type_v = */ tv, /* .n_threads = */ nt, /* .n_gpu_layers = */ nl, + /* .split_mode = */ sm, /* .main_gpu = */ mg, /* .no_kv_offload= */ nkvo, /* .mul_mat_q = */ mmq, @@ -535,24 +553,6 @@ static std::vector get_cmd_params_instances(const cmd_param instances.push_back(instance); } } -#else - // this ordering separates the prompt and generation tests - for (const auto & n_prompt : params.n_prompt) { - if (n_prompt == 0) { - continue; - } - auto instances_prompt = get_cmd_params_instances_int(params, 0, n_prompt); - instances.insert(instances.end(), instances_prompt.begin(), instances_prompt.end()); - } - - for (const auto & n_gen : params.n_gen) { - if (n_gen == 0) { - continue; - } - auto instances_gen = get_cmd_params_instances_int(params, n_gen, 0); - instances.insert(instances.end(), instances_gen.begin(), instances_gen.end()); - } -#endif return instances; } @@ -576,6 +576,7 @@ struct test { ggml_type type_k; ggml_type type_v; int n_gpu_layers; + llama_split_mode split_mode; int main_gpu; bool no_kv_offload; bool mul_mat_q; @@ -597,6 +598,7 @@ struct test { type_k = inst.type_k; type_v = inst.type_v; n_gpu_layers = inst.n_gpu_layers; + split_mode = inst.split_mode; main_gpu = inst.main_gpu; no_kv_offload = inst.no_kv_offload; mul_mat_q = inst.mul_mat_q; @@ -660,7 +662,8 @@ struct test { "cpu_info", "gpu_info", "model_filename", "model_type", "model_size", "model_n_params", "n_batch", "n_threads", "type_k", "type_v", - "n_gpu_layers", "main_gpu", "no_kv_offload", + "n_gpu_layers", "split_mode", + "main_gpu", "no_kv_offload", "mul_mat_q", "tensor_split", "n_prompt", "n_gen", "test_time", "avg_ns", "stddev_ns", @@ -711,7 +714,8 @@ struct test { cpu_info, gpu_info, model_filename, model_type, std::to_string(model_size), std::to_string(model_n_params), std::to_string(n_batch), std::to_string(n_threads), ggml_type_name(type_k), ggml_type_name(type_v), - std::to_string(n_gpu_layers), std::to_string(main_gpu), std::to_string(no_kv_offload), + std::to_string(n_gpu_layers), split_mode_str(split_mode), + std::to_string(main_gpu), std::to_string(no_kv_offload), std::to_string(mul_mat_q), tensor_split_str, std::to_string(n_prompt), std::to_string(n_gen), test_time, std::to_string(avg_ns()), std::to_string(stdev_ns()), @@ -867,6 +871,9 @@ struct markdown_printer : public printer { if (field == "n_gpu_layers") { return "ngl"; } + if (field == "split_mode") { + return "sm"; + } if (field == "n_threads") { return "threads"; } @@ -907,6 +914,9 @@ struct markdown_printer : public printer { if (params.main_gpu.size() > 1 || params.main_gpu != cmd_params_defaults.main_gpu) { fields.push_back("main_gpu"); } + if (params.split_mode.size() > 1 || params.split_mode != cmd_params_defaults.split_mode) { + fields.push_back("split_mode"); + } if (params.mul_mat_q.size() > 1 || params.mul_mat_q != cmd_params_defaults.mul_mat_q) { fields.push_back("mul_mat_q"); } diff --git a/examples/server/server.cpp b/examples/server/server.cpp index 1d30a15a6..c1ab8f9dc 100644 --- a/examples/server/server.cpp +++ b/examples/server/server.cpp @@ -2005,12 +2005,15 @@ static void server_print_usage(const char *argv0, const gpt_params ¶ms, #ifdef LLAMA_SUPPORTS_GPU_OFFLOAD printf(" -ngl N, --n-gpu-layers N\n"); printf(" number of layers to store in VRAM\n"); + printf(" -sm SPLIT_MODE, --split-mode SPLIT_MODE\n"); + printf(" how to split the model across multiple GPUs, one of:\n"); + printf(" - none: use one GPU only\n"); + printf(" - layer (default): split layers and KV across GPUs\n"); + printf(" - row: split rows across GPUs\n"); printf(" -ts SPLIT --tensor-split SPLIT\n"); - printf(" how to split tensors across multiple GPUs, comma-separated list of proportions, e.g. 3,1\n"); - printf(" -mg i, --main-gpu i the GPU to use for scratch and small tensors\n"); - printf(" -nommq, --no-mul-mat-q\n"); - printf(" use cuBLAS instead of custom mul_mat_q CUDA kernels.\n"); - printf(" Not recommended since this is both slower and uses more VRAM.\n"); + printf(" fraction of the model to offload to each GPU, comma-separated list of proportions, e.g. 3,1\n"); + printf(" -mg i, --main-gpu i the GPU to use for the model (with split-mode = none),\n"); + printf(" or for intermediate results and KV (with split-mode = row)\n"); #endif printf(" -m FNAME, --model FNAME\n"); printf(" model path (default: %s)\n", params.model.c_str()); @@ -2253,6 +2256,33 @@ static void server_params_parse(int argc, char **argv, server_params &sparams, "See main README.md for information on enabling GPU BLAS support", {{"n_gpu_layers", params.n_gpu_layers}}); #endif + } + else if (arg == "--split-mode" || arg == "-sm") + { + if (++i >= argc) { + invalid_param = true; + break; + } + std::string arg_next = argv[i]; + if (arg_next == "none") + { + params.split_mode = LLAMA_SPLIT_NONE; + } + else if (arg_next == "layer") + { + params.split_mode = LLAMA_SPLIT_LAYER; + } + else if (arg_next == "row") + { + params.split_mode = LLAMA_SPLIT_ROW; + } + else { + invalid_param = true; + break; + } +#ifndef GGML_USE_CUBLAS + fprintf(stderr, "warning: llama.cpp was compiled without cuBLAS. Setting the split mode has no effect.\n"); +#endif // GGML_USE_CUBLAS } else if (arg == "--tensor-split" || arg == "-ts") { diff --git a/ggml-alloc.c b/ggml-alloc.c index a27dd54b0..89b85d348 100644 --- a/ggml-alloc.c +++ b/ggml-alloc.c @@ -102,8 +102,6 @@ void ggml_tallocr_alloc(ggml_tallocr_t alloc, struct ggml_tensor * tensor) { } } - AT_PRINTF("block %d\n", best_fit_block); - if (best_fit_block == -1) { // the last block is our last resort struct free_block * block = &alloc->free_blocks[alloc->n_free_blocks - 1]; @@ -117,6 +115,7 @@ void ggml_tallocr_alloc(ggml_tallocr_t alloc, struct ggml_tensor * tensor) { return; } } + struct free_block * block = &alloc->free_blocks[best_fit_block]; void * addr = block->addr; block->addr = (char*)block->addr + size; @@ -129,6 +128,8 @@ void ggml_tallocr_alloc(ggml_tallocr_t alloc, struct ggml_tensor * tensor) { } } + AT_PRINTF("block %d, addr %p\n", best_fit_block, addr); + tensor->data = addr; tensor->buffer = alloc->buffer; if (!alloc->measure) { @@ -229,6 +230,7 @@ void ggml_tallocr_reset(ggml_tallocr_t alloc) { alloc->free_blocks[0].size = SIZE_MAX/2; // restrict maximum size of a measure allocator to half size_t max to avoid overflows } else { alloc->free_blocks[0].size = ggml_backend_buffer_get_size(alloc->buffer) - align_offset; + ggml_backend_buffer_reset(alloc->buffer); } } @@ -263,9 +265,9 @@ ggml_tallocr_t ggml_tallocr_new_measure(size_t alignment) { return alloc; } -ggml_tallocr_t ggml_tallocr_new_measure_from_backend(struct ggml_backend * backend) { +ggml_tallocr_t ggml_tallocr_new_measure_from_buft(struct ggml_backend_buffer_type * buft) { // create a backend buffer to get the correct tensor allocation sizes - ggml_backend_buffer_t buffer = ggml_backend_alloc_buffer(backend, 1); + ggml_backend_buffer_t buffer = ggml_backend_buft_alloc_buffer(buft, 1); // TODO: move alloc initialization to a common ggml_tallocr_new_impl function ggml_tallocr_t alloc = ggml_tallocr_new_from_buffer(buffer); @@ -275,13 +277,22 @@ ggml_tallocr_t ggml_tallocr_new_measure_from_backend(struct ggml_backend * backe return alloc; } -ggml_tallocr_t ggml_tallocr_new_from_backend(struct ggml_backend * backend, size_t size) { - ggml_backend_buffer_t buffer = ggml_backend_alloc_buffer(backend, size); +ggml_tallocr_t ggml_tallocr_new_measure_from_backend(struct ggml_backend * backend) { + return ggml_tallocr_new_measure_from_buft(ggml_backend_get_default_buffer_type(backend)); +} + +ggml_tallocr_t ggml_tallocr_new_from_buft(struct ggml_backend_buffer_type * buft, size_t size) { + // create a backend buffer to get the correct tensor allocation sizes + ggml_backend_buffer_t buffer = ggml_backend_buft_alloc_buffer(buft, size); ggml_tallocr_t alloc = ggml_tallocr_new_from_buffer(buffer); alloc->buffer_owned = true; return alloc; } +ggml_tallocr_t ggml_tallocr_new_from_backend(struct ggml_backend * backend, size_t size) { + return ggml_tallocr_new_from_buft(ggml_backend_get_default_buffer_type(backend), size); +} + ggml_tallocr_t ggml_tallocr_new_from_buffer(struct ggml_backend_buffer * buffer) { ggml_tallocr_t alloc = (ggml_tallocr_t)malloc(sizeof(struct ggml_tallocr)); @@ -779,10 +790,21 @@ ggml_backend_buffer_t ggml_backend_alloc_ctx_tensors_from_buft(struct ggml_conte if (nbytes == 0) { // all the tensors in the context are already allocated +#ifndef NDEBUG + fprintf(stderr, "%s: all tensors in the context are already allocated\n", __func__); +#endif return NULL; } ggml_backend_buffer_t buffer = ggml_backend_buft_alloc_buffer(buft, nbytes); + if (buffer == NULL) { + // failed to allocate buffer +#ifndef NDEBUG + fprintf(stderr, "%s: failed to allocate buffer\n", __func__); +#endif + return NULL; + } + ggml_tallocr_t tallocr = ggml_tallocr_new_from_buffer(buffer); for (struct ggml_tensor * t = ggml_get_first_tensor(ctx); t != NULL; t = ggml_get_next_tensor(ctx, t)) { diff --git a/ggml-alloc.h b/ggml-alloc.h index 64a412468..4e5997521 100644 --- a/ggml-alloc.h +++ b/ggml-alloc.h @@ -52,8 +52,10 @@ typedef struct ggml_tallocr * ggml_tallocr_t; GGML_API ggml_tallocr_t ggml_tallocr_new(void * data, size_t size, size_t alignment); GGML_API ggml_tallocr_t ggml_tallocr_new_measure(size_t alignment); -GGML_API ggml_tallocr_t ggml_tallocr_new_from_buffer(struct ggml_backend_buffer * buffer); +GGML_API ggml_tallocr_t ggml_tallocr_new_from_buft(struct ggml_backend_buffer_type * buft, size_t size); GGML_API ggml_tallocr_t ggml_tallocr_new_from_backend(struct ggml_backend * backend, size_t size); // allocates an owned buffer +GGML_API ggml_tallocr_t ggml_tallocr_new_from_buffer(struct ggml_backend_buffer * buffer); +GGML_API ggml_tallocr_t ggml_tallocr_new_measure_from_buft(struct ggml_backend_buffer_type * buft); GGML_API ggml_tallocr_t ggml_tallocr_new_measure_from_backend(struct ggml_backend * backend); GGML_API struct ggml_backend_buffer * ggml_tallocr_get_buffer(ggml_tallocr_t talloc); diff --git a/ggml-backend-impl.h b/ggml-backend-impl.h index ca21b4743..1db32901f 100644 --- a/ggml-backend-impl.h +++ b/ggml-backend-impl.h @@ -16,9 +16,10 @@ extern "C" { typedef void * ggml_backend_buffer_type_context_t; struct ggml_backend_buffer_type_i { + const char * (*get_name) (ggml_backend_buffer_type_t buft); ggml_backend_buffer_t (*alloc_buffer) (ggml_backend_buffer_type_t buft, size_t size); size_t (*get_alignment) (ggml_backend_buffer_type_t buft); // tensor alignment - size_t (*get_alloc_size) (ggml_backend_buffer_type_t buft, struct ggml_tensor * tensor); // data size needed to allocate the tensor, including padding + size_t (*get_alloc_size) (ggml_backend_buffer_type_t buft, const struct ggml_tensor * tensor); // data size needed to allocate the tensor, including padding bool (*supports_backend)(ggml_backend_buffer_type_t buft, ggml_backend_t backend); // check if the buffer type is usable by the backend // check if tensor data is in host memory // should be equivalent to supports_backend(buft, ggml_backend_cpu_init()) @@ -34,16 +35,15 @@ extern "C" { typedef void * ggml_backend_buffer_context_t; struct ggml_backend_buffer_i { - void (*free_buffer) (ggml_backend_buffer_t buffer); - //void (*reset) (ggml_backend_buffer_t buffer); // reset any internal state due to tensor initialization, such as tensor extras - void * (*get_base) (ggml_backend_buffer_t buffer); - void (*init_tensor) (ggml_backend_buffer_t buffer, struct ggml_tensor * tensor); - void (*set_tensor) (ggml_backend_buffer_t buffer, struct ggml_tensor * tensor, const void * data, size_t offset, size_t size); - void (*get_tensor) (ggml_backend_buffer_t buffer, const struct ggml_tensor * tensor, void * data, size_t offset, size_t size); - // (optional) copy tensor between different buffer-type, allow for single-copy tranfers - void (*cpy_tensor_from)(ggml_backend_buffer_t buffer, struct ggml_tensor * src, struct ggml_tensor * dst); - void (*cpy_tensor_to) (ggml_backend_buffer_t buffer, struct ggml_tensor * src, struct ggml_tensor * dst); - void (*clear) (ggml_backend_buffer_t buffer, uint8_t value); + const char * (*get_name) (ggml_backend_buffer_t buffer); + void (*free_buffer)(ggml_backend_buffer_t buffer); + void * (*get_base) (ggml_backend_buffer_t buffer); + void (*init_tensor)(ggml_backend_buffer_t buffer, struct ggml_tensor * tensor); + void (*set_tensor) (ggml_backend_buffer_t buffer, struct ggml_tensor * tensor, const void * data, size_t offset, size_t size); + void (*get_tensor) (ggml_backend_buffer_t buffer, const struct ggml_tensor * tensor, void * data, size_t offset, size_t size); + bool (*cpy_tensor) (ggml_backend_buffer_t buffer, const struct ggml_tensor * src, struct ggml_tensor * dst); // dst is in the buffer, src may be in any buffer + void (*clear) (ggml_backend_buffer_t buffer, uint8_t value); + void (*reset) (ggml_backend_buffer_t buffer); // reset any internal state due to tensor initialization, such as tensor extras }; struct ggml_backend_buffer { @@ -51,6 +51,7 @@ extern "C" { ggml_backend_buffer_type_t buft; ggml_backend_buffer_context_t context; size_t size; + enum ggml_backend_buffer_usage usage; }; ggml_backend_buffer_t ggml_backend_buffer_init( @@ -59,6 +60,8 @@ extern "C" { ggml_backend_buffer_context_t context, size_t size); + // do not use directly, use ggml_backend_tensor_copy instead + bool ggml_backend_buffer_copy_tensor(const struct ggml_tensor * src, struct ggml_tensor * dst); // // Backend @@ -74,22 +77,20 @@ extern "C" { // buffer allocation ggml_backend_buffer_type_t (*get_default_buffer_type)(ggml_backend_t backend); - // (optional) asynchroneous tensor data access + // (optional) asynchronous tensor data access void (*set_tensor_async)(ggml_backend_t backend, struct ggml_tensor * tensor, const void * data, size_t offset, size_t size); void (*get_tensor_async)(ggml_backend_t backend, const struct ggml_tensor * tensor, void * data, size_t offset, size_t size); + bool (*cpy_tensor_async)(ggml_backend_t backend, const struct ggml_tensor * src, struct ggml_tensor * dst); - // (optional) asynchroneous tensor copy - void (*cpy_tensor_from_async)(ggml_backend_t backend, struct ggml_tensor * src, struct ggml_tensor * dst); - void (*cpy_tensor_to_async) (ggml_backend_t backend, struct ggml_tensor * src, struct ggml_tensor * dst); - + // (optional) complete all pending operations void (*synchronize)(ggml_backend_t backend); // compute graph with a plan - ggml_backend_graph_plan_t (*graph_plan_create) (ggml_backend_t backend, struct ggml_cgraph * cgraph); + ggml_backend_graph_plan_t (*graph_plan_create) (ggml_backend_t backend, const struct ggml_cgraph * cgraph); void (*graph_plan_free) (ggml_backend_t backend, ggml_backend_graph_plan_t plan); void (*graph_plan_compute)(ggml_backend_t backend, ggml_backend_graph_plan_t plan); - // compute graph without a plan + // compute graph without a plan (async) bool (*graph_compute)(ggml_backend_t backend, struct ggml_cgraph * cgraph); // check if the backend supports an operation @@ -102,7 +103,6 @@ extern "C" { ggml_backend_context_t context; }; - // // Backend registry // diff --git a/ggml-backend.c b/ggml-backend.c index 53e741cb8..4c2d8b0b2 100644 --- a/ggml-backend.c +++ b/ggml-backend.c @@ -15,6 +15,10 @@ // backend buffer type +const char * ggml_backend_buft_name(ggml_backend_buffer_type_t buft) { + return buft->iface.get_name(buft); +} + ggml_backend_buffer_t ggml_backend_buft_alloc_buffer(ggml_backend_buffer_type_t buft, size_t size) { return buft->iface.alloc_buffer(buft, size); } @@ -58,11 +62,16 @@ ggml_backend_buffer_t ggml_backend_buffer_init( /* .buft = */ buft, /* .context = */ context, /* .size = */ size, + /* .usage = */ GGML_BACKEND_BUFFER_USAGE_ANY }; return buffer; } +const char * ggml_backend_buffer_name(ggml_backend_buffer_t buffer) { + return buffer->iface.get_name(buffer); +} + void ggml_backend_buffer_free(ggml_backend_buffer_t buffer) { if (buffer == NULL) { return; @@ -94,11 +103,11 @@ void ggml_backend_buffer_init_tensor(ggml_backend_buffer_t buffer, struct ggml_t } size_t ggml_backend_buffer_get_alignment (ggml_backend_buffer_t buffer) { - return ggml_backend_buft_get_alignment(ggml_backend_buffer_type(buffer)); + return ggml_backend_buft_get_alignment(ggml_backend_buffer_get_type(buffer)); } size_t ggml_backend_buffer_get_alloc_size(ggml_backend_buffer_t buffer, struct ggml_tensor * tensor) { - return ggml_backend_buft_get_alloc_size(ggml_backend_buffer_type(buffer), tensor); + return ggml_backend_buft_get_alloc_size(ggml_backend_buffer_get_type(buffer), tensor); } void ggml_backend_buffer_clear(ggml_backend_buffer_t buffer, uint8_t value) { @@ -106,13 +115,31 @@ void ggml_backend_buffer_clear(ggml_backend_buffer_t buffer, uint8_t value) { } bool ggml_backend_buffer_is_host(ggml_backend_buffer_t buffer) { - return ggml_backend_buft_is_host(ggml_backend_buffer_type(buffer)); + return ggml_backend_buft_is_host(ggml_backend_buffer_get_type(buffer)); } -ggml_backend_buffer_type_t ggml_backend_buffer_type(ggml_backend_buffer_t buffer) { +void ggml_backend_buffer_set_usage(ggml_backend_buffer_t buffer, enum ggml_backend_buffer_usage usage) { + buffer->usage = usage; +} + +ggml_backend_buffer_type_t ggml_backend_buffer_get_type(ggml_backend_buffer_t buffer) { return buffer->buft; } +void ggml_backend_buffer_reset(ggml_backend_buffer_t buffer) { + if (buffer->iface.reset) { + buffer->iface.reset(buffer); + } +} + +bool ggml_backend_buffer_copy_tensor(const struct ggml_tensor * src, struct ggml_tensor * dst) { + ggml_backend_buffer_t dst_buf = dst->view_src ? dst->view_src->buffer : dst->buffer; + if (dst_buf->iface.cpy_tensor) { + return src->buffer->iface.cpy_tensor(dst_buf, src, dst); + } + return false; +} + // backend const char * ggml_backend_name(ggml_backend_t backend) { @@ -146,30 +173,42 @@ void ggml_backend_tensor_set_async(ggml_backend_t backend, struct ggml_tensor * GGML_ASSERT(tensor->data != NULL && "tensor not allocated"); GGML_ASSERT(offset + size <= ggml_nbytes(tensor) && "tensor write out of bounds"); - backend->iface.set_tensor_async(backend, tensor, data, offset, size); + if (backend->iface.set_tensor_async == NULL) { + ggml_backend_tensor_set(tensor, data, offset, size); + } else { + backend->iface.set_tensor_async(backend, tensor, data, offset, size); + } } void ggml_backend_tensor_get_async(ggml_backend_t backend, const struct ggml_tensor * tensor, void * data, size_t offset, size_t size) { GGML_ASSERT(tensor->data != NULL && "tensor not allocated"); GGML_ASSERT(offset + size <= ggml_nbytes(tensor) && "tensor read out of bounds"); - backend->iface.get_tensor_async(backend, tensor, data, offset, size); + if (backend->iface.get_tensor_async == NULL) { + ggml_backend_tensor_get(tensor, data, offset, size); + } else { + backend->iface.get_tensor_async(backend, tensor, data, offset, size); + } } void ggml_backend_tensor_set(struct ggml_tensor * tensor, const void * data, size_t offset, size_t size) { + ggml_backend_buffer_t buf = tensor->view_src ? tensor->view_src->buffer : tensor->buffer; + GGML_ASSERT(tensor->data != NULL && "tensor not allocated"); - GGML_ASSERT(tensor->buffer != NULL && "tensor buffer not set"); + GGML_ASSERT(buf != NULL && "tensor buffer not set"); GGML_ASSERT(offset + size <= ggml_nbytes(tensor) && "tensor write out of bounds"); - tensor->buffer->iface.set_tensor(tensor->buffer, tensor, data, offset, size); + tensor->buffer->iface.set_tensor(buf, tensor, data, offset, size); } void ggml_backend_tensor_get(const struct ggml_tensor * tensor, void * data, size_t offset, size_t size) { + ggml_backend_buffer_t buf = tensor->view_src ? tensor->view_src->buffer : tensor->buffer; + GGML_ASSERT(tensor->data != NULL && "tensor not allocated"); GGML_ASSERT(tensor->buffer != NULL && "tensor buffer not set"); GGML_ASSERT(offset + size <= ggml_nbytes(tensor) && "tensor read out of bounds"); - tensor->buffer->iface.get_tensor(tensor->buffer, tensor, data, offset, size); + tensor->buffer->iface.get_tensor(buf, tensor, data, offset, size); } void ggml_backend_synchronize(ggml_backend_t backend) { @@ -190,19 +229,10 @@ void ggml_backend_graph_plan_free(ggml_backend_t backend, ggml_backend_graph_pla void ggml_backend_graph_plan_compute(ggml_backend_t backend, ggml_backend_graph_plan_t plan) { backend->iface.graph_plan_compute(backend, plan); - - // TODO: optional sync - ggml_backend_synchronize(backend); } bool ggml_backend_graph_compute(ggml_backend_t backend, struct ggml_cgraph * cgraph) { - if (!backend->iface.graph_compute(backend, cgraph)) { - return false; - } - - // TODO: optional sync - ggml_backend_synchronize(backend); - return true; + return backend->iface.graph_compute(backend, cgraph); } bool ggml_backend_supports_op(ggml_backend_t backend, const struct ggml_tensor * op) { @@ -227,28 +257,20 @@ static bool ggml_are_same_layout(const struct ggml_tensor * a, const struct ggml } void ggml_backend_tensor_copy(struct ggml_tensor * src, struct ggml_tensor * dst) { - //printf("src: %s ne: [%d %d %d %d] nb: [%d %d %d %d]\n", src->name, (int)src->ne[0], (int)src->ne[1], (int)src->ne[2], (int)src->ne[3], (int)src->nb[0], (int)src->nb[1], (int)src->nb[2], (int)src->nb[3]); - //printf("dst: %s ne: [%d %d %d %d] nb: [%d %d %d %d]\n", dst->name, (int)dst->ne[0], (int)dst->ne[1], (int)dst->ne[2], (int)dst->ne[3], (int)dst->nb[0], (int)dst->nb[1], (int)dst->nb[2], (int)dst->nb[3]); GGML_ASSERT(ggml_are_same_layout(src, dst) && "cannot copy tensors with different layouts"); - // fprintf(stderr, "cpy tensor %s from %s to %s (%lu bytes)\n", src->name, ggml_backend_name(src->backend), ggml_backend_name(dst->backend), ggml_nbytes(src)); - if (src == dst) { return; } - // TODO: allow backends to support copy to/from same backend - - if (dst->buffer->iface.cpy_tensor_from != NULL) { - dst->buffer->iface.cpy_tensor_from(dst->buffer, src, dst); - } else if (src->buffer->iface.cpy_tensor_to != NULL) { - src->buffer->iface.cpy_tensor_to(src->buffer, src, dst); - } else { - // shouldn't be hit when copying from/to CPU - #ifndef NDEBUG - fprintf(stderr, "ggml_backend_tensor_copy: neither cpy_tensor_from nor cpy_tensor_to " - "are implemented for %s and %s, falling back to get/set\n", src->name, dst->name); - #endif + if (ggml_backend_buffer_is_host(src->buffer)) { + ggml_backend_tensor_set(dst, src->data, 0, ggml_nbytes(src)); + } else if (ggml_backend_buffer_is_host(dst->buffer)) { + ggml_backend_tensor_get(src, dst->data, 0, ggml_nbytes(src)); + } else if (!ggml_backend_buffer_copy_tensor(src, dst)) { +#ifndef NDEBUG + fprintf(stderr, "%s: warning: slow copy from %s to %s\n", __func__, ggml_backend_buffer_name(src->buffer), ggml_backend_buffer_name(dst->buffer)); +#endif size_t nbytes = ggml_nbytes(src); void * data = malloc(nbytes); ggml_backend_tensor_get(src, data, 0, nbytes); @@ -257,6 +279,31 @@ void ggml_backend_tensor_copy(struct ggml_tensor * src, struct ggml_tensor * dst } } +void ggml_backend_tensor_copy_async(ggml_backend_t backend, struct ggml_tensor * src, struct ggml_tensor * dst) { + GGML_ASSERT(ggml_are_same_layout(src, dst) && "cannot copy tensors with different layouts"); + + if (src == dst) { + return; + } + + if (ggml_backend_buft_supports_backend(src->buffer->buft, backend) && ggml_backend_buft_supports_backend(dst->buffer->buft, backend)) { + if (backend->iface.cpy_tensor_async != NULL) { + if (backend->iface.cpy_tensor_async(backend, src, dst)) { + return; + } + } + } + + size_t nbytes = ggml_nbytes(src); + if (ggml_backend_buffer_is_host(src->buffer)) { + ggml_backend_tensor_set_async(backend, dst, src->data, 0, nbytes); + } + else { + ggml_backend_tensor_copy(src, dst); + } +} + + // backend registry #define GGML_MAX_BACKENDS_REG 16 @@ -392,6 +439,12 @@ ggml_backend_buffer_t ggml_backend_reg_alloc_buffer(size_t i, size_t size) { // backend CPU +static const char * ggml_backend_cpu_buffer_name(ggml_backend_buffer_t buffer) { + return "CPU"; + + GGML_UNUSED(buffer); +} + static void * ggml_backend_cpu_buffer_get_base(ggml_backend_buffer_t buffer) { return (void *)buffer->context; } @@ -412,14 +465,12 @@ static void ggml_backend_cpu_buffer_get_tensor(ggml_backend_buffer_t buffer, con GGML_UNUSED(buffer); } -static void ggml_backend_cpu_buffer_cpy_tensor_from(ggml_backend_buffer_t buffer, struct ggml_tensor * src, struct ggml_tensor * dst) { - ggml_backend_tensor_get(src, dst->data, 0, ggml_nbytes(src)); - - GGML_UNUSED(buffer); -} - -static void ggml_backend_cpu_buffer_cpy_tensor_to(ggml_backend_buffer_t buffer, struct ggml_tensor * src, struct ggml_tensor * dst) { - ggml_backend_tensor_set(dst, src->data, 0, ggml_nbytes(src)); +static bool ggml_backend_cpu_buffer_cpy_tensor(ggml_backend_buffer_t buffer, const struct ggml_tensor * src, struct ggml_tensor * dst) { + if (ggml_backend_buffer_is_host(src->buffer)) { + memcpy(dst->data, src->data, ggml_nbytes(src)); + return true; + } + return false; GGML_UNUSED(buffer); } @@ -429,30 +480,38 @@ static void ggml_backend_cpu_buffer_clear(ggml_backend_buffer_t buffer, uint8_t } static struct ggml_backend_buffer_i cpu_backend_buffer_i = { + /* .get_name = */ ggml_backend_cpu_buffer_name, /* .free_buffer = */ ggml_backend_cpu_buffer_free_buffer, /* .get_base = */ ggml_backend_cpu_buffer_get_base, /* .init_tensor = */ NULL, // no initialization required /* .set_tensor = */ ggml_backend_cpu_buffer_set_tensor, /* .get_tensor = */ ggml_backend_cpu_buffer_get_tensor, - /* .cpy_tensor_from = */ ggml_backend_cpu_buffer_cpy_tensor_from, - /* .cpy_tensor_to = */ ggml_backend_cpu_buffer_cpy_tensor_to, + /* .cpy_tensor = */ ggml_backend_cpu_buffer_cpy_tensor, /* .clear = */ ggml_backend_cpu_buffer_clear, + /* .reset = */ NULL, }; // for buffers from ptr, free is not called static struct ggml_backend_buffer_i cpu_backend_buffer_i_from_ptr = { + /* .get_name = */ ggml_backend_cpu_buffer_name, /* .free_buffer = */ NULL, // ptr is not owned by the buffer, so it does not need to be freed /* .get_base = */ ggml_backend_cpu_buffer_get_base, /* .init_tensor = */ NULL, // no initialization required /* .set_tensor = */ ggml_backend_cpu_buffer_set_tensor, /* .get_tensor = */ ggml_backend_cpu_buffer_get_tensor, - /* .cpy_tensor_from = */ ggml_backend_cpu_buffer_cpy_tensor_from, - /* .cpy_tensor_to = */ ggml_backend_cpu_buffer_cpy_tensor_to, + /* .cpy_tensor = */ ggml_backend_cpu_buffer_cpy_tensor, /* .clear = */ ggml_backend_cpu_buffer_clear, + /* .reset = */ NULL, }; static const size_t TENSOR_ALIGNMENT = 64; // should be enough for AVX 512 +static const char * ggml_backend_cpu_buffer_type_get_name(ggml_backend_buffer_type_t buft) { + return "CPU"; + + GGML_UNUSED(buft); +} + static ggml_backend_buffer_t ggml_backend_cpu_buffer_type_alloc_buffer(ggml_backend_buffer_type_t buft, size_t size) { size += TENSOR_ALIGNMENT; // malloc may return an address that is not aligned void * data = malloc(size); // TODO: maybe use GGML_ALIGNED_MALLOC? @@ -483,6 +542,7 @@ static bool ggml_backend_cpu_buffer_type_is_host(ggml_backend_buffer_type_t buft ggml_backend_buffer_type_t ggml_backend_cpu_buffer_type(void) { static struct ggml_backend_buffer_type ggml_backend_cpu_buffer_type = { /* .iface = */ { + /* .get_name = */ ggml_backend_cpu_buffer_type_get_name, /* .alloc_buffer = */ ggml_backend_cpu_buffer_type_alloc_buffer, /* .get_alignment = */ ggml_backend_cpu_buffer_type_get_alignment, /* .get_alloc_size = */ NULL, // defaults to ggml_nbytes @@ -501,6 +561,18 @@ ggml_backend_buffer_type_t ggml_backend_cpu_buffer_type(void) { #include +static const char * ggml_backend_cpu_hbm_buffer_type_get_name(ggml_backend_buffer_type_t buft) { + return "CPU_HBM"; + + GGML_UNUSED(buft); +} + +static const char * ggml_backend_cpu_hbm_buffer_get_name(ggml_backend_buffer_t buf) { + return "CPU_HBM"; + + GGML_UNUSED(buf); +} + static void ggml_backend_cpu_hbm_buffer_free_buffer(ggml_backend_buffer_t buffer) { hbw_free(buffer->context); } @@ -514,17 +586,18 @@ static ggml_backend_buffer_t ggml_backend_cpu_hbm_buffer_type_alloc_buffer(ggml_ return NULL; } - // FIXME: this is a hack to avoid having to implement a new buffer type ggml_backend_buffer_t buffer = ggml_backend_cpu_buffer_from_ptr(ptr, size); buffer->buft = buft; + buffer->iface.get_name = ggml_backend_cpu_hbm_buffer_get_name; buffer->iface.free_buffer = ggml_backend_cpu_hbm_buffer_free_buffer; return buffer; } -ggml_backend_buffer_type_t ggml_backend_cpu_hbm_buffer_type() { +ggml_backend_buffer_type_t ggml_backend_cpu_hbm_buffer_type(void) { static struct ggml_backend_buffer_type ggml_backend_cpu_buffer_type_hbm = { /* .iface = */ { + /* .get_name = */ ggml_backend_cpu_hbm_buffer_type_get_name, /* .alloc_buffer = */ ggml_backend_cpu_hbm_buffer_type_alloc_buffer, /* .get_alignment = */ ggml_backend_cpu_buffer_type_get_alignment, /* .get_alloc_size = */ NULL, // defaults to ggml_nbytes @@ -568,7 +641,7 @@ struct ggml_backend_plan_cpu { struct ggml_cgraph cgraph; }; -static ggml_backend_graph_plan_t ggml_backend_cpu_graph_plan_create(ggml_backend_t backend, struct ggml_cgraph * cgraph) { +static ggml_backend_graph_plan_t ggml_backend_cpu_graph_plan_create(ggml_backend_t backend, const struct ggml_cgraph * cgraph) { struct ggml_backend_cpu_context * cpu_ctx = (struct ggml_backend_cpu_context *)backend->context; struct ggml_backend_plan_cpu * cpu_plan = malloc(sizeof(struct ggml_backend_plan_cpu)); @@ -634,8 +707,7 @@ static struct ggml_backend_i cpu_backend_i = { /* .get_default_buffer_type = */ ggml_backend_cpu_get_default_buffer_type, /* .set_tensor_async = */ NULL, /* .get_tensor_async = */ NULL, - /* .cpy_tensor_from_async = */ NULL, - /* .cpy_tensor_to_async = */ NULL, + /* .cpy_tensor_async = */ NULL, /* .synchronize = */ NULL, /* .graph_plan_create = */ ggml_backend_cpu_graph_plan_create, /* .graph_plan_free = */ ggml_backend_cpu_graph_plan_free, @@ -661,7 +733,7 @@ ggml_backend_t ggml_backend_cpu_init(void) { } bool ggml_backend_is_cpu(ggml_backend_t backend) { - return backend->iface.get_name == ggml_backend_cpu_name; + return backend && backend->iface.get_name == ggml_backend_cpu_name; } void ggml_backend_cpu_set_n_threads(ggml_backend_t backend_cpu, int n_threads) { @@ -685,7 +757,7 @@ static ggml_backend_t ggml_backend_reg_cpu_init(const char * params, void * user // scheduler -#define GGML_MAX_BACKENDS 4 +#define GGML_MAX_BACKENDS 16 #define GGML_MAX_SPLITS 256 #define GGML_MAX_SPLIT_INPUTS 16 @@ -695,21 +767,29 @@ struct ggml_backend_sched_split { int i_end; struct ggml_tensor * inputs[GGML_MAX_SPLIT_INPUTS]; int n_inputs; + // graph view of this split struct ggml_cgraph graph; }; struct ggml_backend_sched { + bool is_reset; // true if the scheduler has been reset since the last graph split + int n_backends; ggml_backend_t backends[GGML_MAX_BACKENDS]; + ggml_backend_buffer_type_t bufts[GGML_MAX_BACKENDS]; ggml_tallocr_t tallocs[GGML_MAX_BACKENDS]; ggml_gallocr_t galloc; + // hash keys of the nodes in the graph struct ggml_hash_set hash_set; - ggml_tallocr_t * node_talloc; // [hash_set.size] - struct ggml_tensor * (* node_copies)[GGML_MAX_BACKENDS]; // [hash_set.size][GGML_MAX_BACKENDS] + // hash values (arrays of [hash_set.size]) + ggml_tallocr_t * node_talloc; // tallocr assigned to each node (indirectly this is the backend) + struct ggml_tensor * (* node_copies)[GGML_MAX_BACKENDS]; // copies of each node for each destination backend + // copy of the graph with modified inputs struct ggml_cgraph * graph; + struct ggml_backend_sched_split splits[GGML_MAX_SPLITS]; int n_splits; @@ -750,14 +830,22 @@ static int sched_allocr_prio(ggml_backend_sched_t sched, ggml_tallocr_t allocr) return INT_MAX; } -static ggml_backend_t get_buffer_backend(ggml_backend_sched_t sched, ggml_backend_buffer_t buffer) { +static ggml_tallocr_t sched_allocr_from_buffer(ggml_backend_sched_t sched, ggml_backend_buffer_t buffer) { if (buffer == NULL) { return NULL; } + + // check if this is already allocate in a allocr buffer (from user manual allocations) + for (int i = 0; i < sched->n_backends; i++) { + if (ggml_tallocr_get_buffer(sched->tallocs[i]) == buffer) { + return sched->tallocs[i]; + } + } + // find highest prio backend that supports the buffer type for (int i = 0; i < sched->n_backends; i++) { if (ggml_backend_buft_supports_backend(buffer->buft, sched->backends[i])) { - return sched->backends[i]; + return sched->tallocs[i]; } } GGML_ASSERT(false && "tensor buffer type not supported by any backend"); @@ -767,7 +855,6 @@ static ggml_backend_t get_allocr_backend(ggml_backend_sched_t sched, ggml_talloc if (allocr == NULL) { return NULL; } - // find highest prio backend that supports the buffer type for (int i = 0; i < sched->n_backends; i++) { if (sched->tallocs[i] == allocr) { return sched->backends[i]; @@ -777,7 +864,7 @@ static ggml_backend_t get_allocr_backend(ggml_backend_sched_t sched, ggml_talloc } #if 0 -static char causes[GGML_DEFAULT_GRAPH_SIZE*8 + GGML_MAX_SPLITS*GGML_MAX_SPLIT_INPUTS][128]; // debug, remove +static char causes[GGML_DEFAULT_GRAPH_SIZE*16 + GGML_MAX_SPLITS*GGML_MAX_SPLIT_INPUTS][128]; // debug only #define SET_CAUSE(node, ...) sprintf(causes[hash_id(node)], __VA_ARGS__) #define GET_CAUSE(node) causes[hash_id(node)] #else @@ -786,45 +873,37 @@ static char causes[GGML_DEFAULT_GRAPH_SIZE*8 + GGML_MAX_SPLITS*GGML_MAX_SPLIT_IN #endif // returns the backend that should be used for the node based on the current locations -static ggml_backend_t sched_backend_from_cur(ggml_backend_sched_t sched, struct ggml_tensor * node) { - // if the dst tensor is already allocated in a buffer, we must assume that it is critical to keep it there - // ie. kv cache updates - // note that this doesn't allow fallback to CPU. need to add output tensors to the splits to copy the data back to the original backend. +static ggml_tallocr_t sched_allocr_from_cur(ggml_backend_sched_t sched, struct ggml_tensor * node) { + // assign pre-allocated nodes to their backend // dst - ggml_backend_t cur_backend = get_buffer_backend(sched, node->buffer); - if (cur_backend != NULL) { + ggml_tallocr_t cur_allocr = sched_allocr_from_buffer(sched, node->buffer); + if (cur_allocr != NULL) { SET_CAUSE(node, "1.dst"); - return cur_backend; + return cur_allocr; } - // view_src - if (node->view_src != NULL && get_buffer_backend(sched, node->view_src->buffer) != NULL) { - SET_CAUSE(node, "1.vsrc"); - return get_buffer_backend(sched, node->view_src->buffer); + if (node->view_src != NULL) { + cur_allocr = sched_allocr_from_buffer(sched, node->view_src->buffer); + if (cur_allocr != NULL) { + SET_CAUSE(node, "1.vsrc"); + return cur_allocr; + } } - - // src - int cur_prio = INT_MAX; - size_t cur_size = 0; - + // assign nodes that use weights to the backend of the weights for (int i = 0; i < GGML_MAX_SRC; i++) { const struct ggml_tensor * src = node->src[i]; if (src == NULL) { break; } - ggml_backend_t src_backend = get_buffer_backend(sched, src->buffer); - if (src_backend != NULL) { - int src_prio = sched_backend_prio(sched, src_backend); - size_t src_size = ggml_nbytes(src); - if (src_prio < cur_prio && src_size >= cur_size) { - cur_prio = src_prio; - cur_size = src_size; - cur_backend = src_backend; - SET_CAUSE(node, "1.src%d", i); - } + if (src->buffer != NULL && src->buffer->usage == GGML_BACKEND_BUFFER_USAGE_WEIGHTS) { + ggml_tallocr_t src_allocr = sched_allocr_from_buffer(sched, src->buffer); + // operations with weights are always run on the same backend as the weights + SET_CAUSE(node, "1.wgt%d", i); + return src_allocr; } } - return cur_backend; + + return NULL; } static char * fmt_size(size_t size) { @@ -857,7 +936,7 @@ static void sched_print_assignments(ggml_backend_sched_t sched, struct ggml_cgra } ggml_tallocr_t node_allocr = node_allocr(node); ggml_backend_t node_backend = node_allocr ? get_allocr_backend(sched, node_allocr) : NULL; // FIXME: - fprintf(stderr, "node #%3d (%10.10s): %20.20s (%4.4s) [%4.4s %8.8s]:", i, ggml_op_name(node->op), node->name, + fprintf(stderr, "node #%3d (%10.10s): %20.20s (%5.5s) [%5.5s %8.8s]:", i, ggml_op_name(node->op), node->name, fmt_size(ggml_nbytes(node)), node_allocr ? ggml_backend_name(node_backend) : "NULL", GET_CAUSE(node)); for (int j = 0; j < GGML_MAX_SRC; j++) { struct ggml_tensor * src = node->src[j]; @@ -866,7 +945,7 @@ static void sched_print_assignments(ggml_backend_sched_t sched, struct ggml_cgra } ggml_tallocr_t src_allocr = node_allocr(src); ggml_backend_t src_backend = src_allocr ? get_allocr_backend(sched, src_allocr) : NULL; - fprintf(stderr, " %20.20s (%4.4s) [%4.4s %8.8s]", src->name, + fprintf(stderr, " %20.20s (%5.5s) [%5.5s %8.8s]", src->name, fmt_size(ggml_nbytes(src)), src_backend ? ggml_backend_name(src_backend) : "NULL", GET_CAUSE(src)); } fprintf(stderr, "\n"); @@ -882,15 +961,17 @@ static struct ggml_tensor * ggml_dup_tensor_layout(struct ggml_context * ctx, co return dup; } + +//#define DEBUG_PASS1 +//#define DEBUG_PASS2 +//#define DEBUG_PASS3 +//#define DEBUG_PASS4 + // assigns backends to ops and splits the graph into subgraphs that can be computed on the same backend -// TODO: merge passes static void sched_split_graph(ggml_backend_sched_t sched, struct ggml_cgraph * graph) { - // reset state - size_t hash_size = sched->hash_set.size; - memset(sched->hash_set.keys, 0, sizeof(sched->hash_set.keys[0]) * hash_size); - memset(sched->node_talloc, 0, sizeof(sched->node_talloc[0]) * hash_size); - memset(sched->node_copies, 0, sizeof(sched->node_copies[0]) * hash_size); + // reset splits sched->n_splits = 0; + sched->is_reset = false; struct ggml_init_params params = { /* .mem_size = */ sizeof(sched->context_buffer), @@ -898,26 +979,22 @@ static void sched_split_graph(ggml_backend_sched_t sched, struct ggml_cgraph * g /* .no_alloc = */ true }; - if (sched->ctx != NULL) { - ggml_free(sched->ctx); - } + ggml_free(sched->ctx); sched->ctx = ggml_init(params); + if (sched->ctx == NULL) { + fprintf(stderr, "%s: failed to initialize context\n", __func__); + GGML_ASSERT(false); + } - // pass 1: assign backends to ops with allocated inputs + // pass 1: assign backends to ops with pre-allocated inputs for (int i = 0; i < graph->n_leafs; i++) { struct ggml_tensor * leaf = graph->leafs[i]; if (node_allocr(leaf) != NULL) { // do not overwrite user assignments continue; } - ggml_backend_t leaf_backend = get_buffer_backend(sched, leaf->buffer); - if (leaf_backend == NULL && leaf->view_src != NULL) { - leaf_backend = get_buffer_backend(sched, leaf->view_src->buffer); - } - if (leaf_backend != NULL) { - node_allocr(leaf) = ggml_backend_sched_get_tallocr(sched, leaf_backend); - } + node_allocr(leaf) = sched_allocr_from_cur(sched, leaf); } for (int i = 0; i < graph->n_nodes; i++) { @@ -926,50 +1003,102 @@ static void sched_split_graph(ggml_backend_sched_t sched, struct ggml_cgraph * g // do not overwrite user assignments continue; } - ggml_backend_t node_backend = sched_backend_from_cur(sched, node); - if (node_backend != NULL) { - node_allocr(node) = ggml_backend_sched_get_tallocr(sched, node_backend); + node_allocr(node) = sched_allocr_from_cur(sched, node); + // src + for (int j = 0; j < GGML_MAX_SRC; j++) { + struct ggml_tensor * src = node->src[j]; + if (src == NULL) { + break; + } + if (node_allocr(src) == NULL) { + node_allocr(src) = sched_allocr_from_cur(sched, src); + } } } - //printf("PASS 1 ASSIGNMENTS\n"); sched_print_assignments(sched, graph); +#ifdef DEBUG_PASS1 + fprintf(stderr, "PASS 1 ASSIGNMENTS\n"); sched_print_assignments(sched, graph); +#endif - // pass 2: assign backends to ops from current assignments - // TODO: - // - reuse sched_backend_from_cur - for (int i = 0; i < graph->n_nodes; i++) { - struct ggml_tensor * node = graph->nodes[i]; - ggml_tallocr_t node_allocr = node_allocr(node); - if (node_allocr == NULL) { - int cur_prio = INT_MAX; - size_t cur_size = 0; - for (int j = 0; j < GGML_MAX_SRC; j++) { - struct ggml_tensor * src = node->src[j]; - if (src == NULL) { - break; - } - ggml_tallocr_t src_allocr = node_allocr(src); - if (src_allocr != NULL) { - int src_prio = sched_allocr_prio(sched, src_allocr); - size_t src_size = ggml_nbytes(src); - if (src_prio < cur_prio && src_size >= cur_size) { - cur_prio = src_prio; - cur_size = src_size; - node_allocr = src_allocr; - SET_CAUSE(node, "2.src%d", j); - } - } + // pass 2: expand current backend assignments + // assign the same backend to adjacent nodes + // expand gpu backends (i.e. non last prio) up and down, ignoring cpu (the lowest priority backend) + // thus, cpu will never be used unless weights are on cpu, or there are no gpu ops between cpu ops + + // pass 2.1 expand gpu up + { + ggml_tallocr_t cur_allocr = NULL; + for (int i = graph->n_nodes - 1; i >= 0; i--) { + struct ggml_tensor * node = graph->nodes[i]; + if (ggml_is_view_op(node->op)) { + continue; } + ggml_tallocr_t node_allocr = node_allocr(node); if (node_allocr != NULL) { - node_allocr(node) = node_allocr; + if (sched_allocr_prio(sched, node_allocr) == sched->n_backends - 1) { + // skip cpu (lowest prio backend) + cur_allocr = NULL; + } else { + cur_allocr = node_allocr; + } + } else { + node_allocr(node) = cur_allocr; + SET_CAUSE(node, "2.1"); } } } - //printf("PASS 2 ASSIGNMENTS\n"); sched_print_assignments(sched, graph); - // pass 3: assign backends to remaining src from dst (should only be leafs) + // pass 2.2 expand gpu down + { + ggml_tallocr_t cur_allocr = NULL; + for (int i = 0; i < graph->n_nodes; i++) { + struct ggml_tensor * node = graph->nodes[i]; + if (ggml_is_view_op(node->op)) { + continue; + } + ggml_tallocr_t node_allocr = node_allocr(node); + if (node_allocr != NULL) { + if (sched_allocr_prio(sched, node_allocr) == sched->n_backends - 1) { + // skip cpu (lowest prio backend) + cur_allocr = NULL; + } else { + cur_allocr = node_allocr; + } + } else { + node_allocr(node) = cur_allocr; + SET_CAUSE(node, "2.2"); + } + } + } + + // pass 2.3 expand rest up + { + ggml_tallocr_t cur_allocr = NULL; + for (int i = graph->n_nodes - 1; i >= 0; i--) { + struct ggml_tensor * node = graph->nodes[i]; + if (ggml_is_view_op(node->op)) { + continue; + } + ggml_tallocr_t node_allocr = node_allocr(node); + if (node_allocr != NULL) { + cur_allocr = node_allocr; + } else { + node_allocr(node) = cur_allocr; + SET_CAUSE(node, "2.3"); + } + } + } +#ifdef DEBUG_PASS2 + fprintf(stderr, "PASS 2 ASSIGNMENTS\n"); sched_print_assignments(sched, graph); +#endif + + // pass 3: assign backends to remaining src from dst and view_src for (int i = 0; i < graph->n_nodes; i++) { struct ggml_tensor * node = graph->nodes[i]; - ggml_tallocr_t node_allocr = node_allocr(node); + ggml_tallocr_t cur_allocr = node_allocr(node); + if (node->view_src != NULL && cur_allocr == NULL) { + cur_allocr = node_allocr(node) = node_allocr(node->view_src); + SET_CAUSE(node, "3.vsrc"); + } for (int j = 0; j < GGML_MAX_SRC; j++) { struct ggml_tensor * src = node->src[j]; if (src == NULL) { @@ -977,81 +1106,105 @@ static void sched_split_graph(ggml_backend_sched_t sched, struct ggml_cgraph * g } ggml_tallocr_t src_allocr = node_allocr(src); if (src_allocr == NULL) { - node_allocr(src) = node_allocr; + if (src->view_src != NULL) { + // views are always on the same backend as the source + node_allocr(src) = node_allocr(src->view_src); + SET_CAUSE(src, "3.vsrc"); + } else { + node_allocr(src) = cur_allocr; + SET_CAUSE(src, "3.cur"); + } } } } - //printf("PASS 3 ASSIGNMENTS\n"); sched_print_assignments(sched, graph); +#ifdef DEBUG_PASS3 + fprintf(stderr, "PASS 3 ASSIGNMENTS\n"); sched_print_assignments(sched, graph); +#endif // pass 4: split graph, find tensors that need to be copied - // TODO: - // - when switching from a less preferred backend to a more preferred backend, check if it is possible to move the switch to an earlier point for the same cost - // find first backend - int cur_split = 0; - for (int i = 0; i < graph->n_nodes; i++) { - struct ggml_tensor * node = graph->nodes[i]; - if (node->view_src == NULL) { - sched->splits[0].tallocr = node_allocr(node); - break; - } - } - sched->splits[0].i_start = 0; - sched->splits[0].n_inputs = 0; - memset(sched->splits[0].inputs, 0, sizeof(sched->splits[0].inputs)); //HACK - ggml_tallocr_t cur_allocr = sched->splits[0].tallocr; - size_t cur_backend_id = sched_allocr_prio(sched, cur_allocr); - for (int i = 0; i < graph->n_nodes; i++) { - struct ggml_tensor * node = graph->nodes[i]; - - if (ggml_is_view_op(node->op)) { - continue; - } - - ggml_tallocr_t node_allocr = node_allocr(node); - - if (node_allocr != cur_allocr) { - sched->splits[cur_split].i_end = i; - cur_split++; - GGML_ASSERT(cur_split < GGML_MAX_SPLITS); - sched->splits[cur_split].tallocr = node_allocr; - sched->splits[cur_split].i_start = i; - sched->splits[cur_split].n_inputs = 0; - memset(sched->splits[cur_split].inputs, 0, sizeof(sched->splits[cur_split].inputs)); //HACK - cur_allocr = node_allocr; - cur_backend_id = sched_allocr_prio(sched, cur_allocr); - } - - // find inputs that are not on the same backend - for (int j = 0; j < GGML_MAX_SRC; j++) { - struct ggml_tensor * src = node->src[j]; - if (src == NULL) { + { + int cur_split = 0; + // find the backend of the first split, skipping view ops + for (int i = 0; i < graph->n_nodes; i++) { + struct ggml_tensor * node = graph->nodes[i]; + if (!ggml_is_view_op(node->op)) { + sched->splits[0].tallocr = node_allocr(node); break; } - ggml_tallocr_t src_allocr = node_allocr(src); - if (src_allocr != node_allocr) { - int n_inputs = sched->splits[cur_split].n_inputs++; - GGML_ASSERT(n_inputs < GGML_MAX_SPLIT_INPUTS); - sched->splits[cur_split].inputs[n_inputs] = (struct ggml_tensor *)src; + } + sched->splits[0].i_start = 0; + sched->splits[0].n_inputs = 0; + memset(sched->splits[0].inputs, 0, sizeof(sched->splits[0].inputs)); //HACK + ggml_tallocr_t cur_allocr = sched->splits[0].tallocr; + size_t cur_backend_id = sched_allocr_prio(sched, cur_allocr); + for (int i = 0; i < graph->n_nodes; i++) { + struct ggml_tensor * node = graph->nodes[i]; - // create copies - size_t id = hash_id(src); - if (sched->node_copies[id][cur_backend_id] == NULL) { - struct ggml_tensor * tensor_copy = ggml_dup_tensor_layout(sched->ctx, src); - sched->node_copies[id][cur_backend_id] = tensor_copy; - node_allocr(tensor_copy) = cur_allocr; - ggml_backend_t backend = get_allocr_backend(sched, cur_allocr); - ggml_format_name(tensor_copy, "%s#%s", ggml_backend_name(backend), src->name); + if (ggml_is_view_op(node->op)) { + continue; + } + + ggml_tallocr_t node_allocr = node_allocr(node); + + if (node_allocr != cur_allocr) { + sched->splits[cur_split].i_end = i; + cur_split++; + GGML_ASSERT(cur_split < GGML_MAX_SPLITS); + sched->splits[cur_split].tallocr = node_allocr; + sched->splits[cur_split].i_start = i; + sched->splits[cur_split].n_inputs = 0; + cur_allocr = node_allocr; + cur_backend_id = sched_allocr_prio(sched, cur_allocr); + } + + // find inputs that are not on the same backend + for (int j = 0; j < GGML_MAX_SRC; j++) { + struct ggml_tensor * src = node->src[j]; + if (src == NULL) { + break; + } + ggml_tallocr_t src_allocr = node_allocr(src); + GGML_ASSERT(src_allocr != NULL); // all inputs should be assigned by now + if (src_allocr != node_allocr) { + // check if the input is already in the split + bool found = false; + for (int k = 0; k < sched->splits[cur_split].n_inputs; k++) { + if (sched->splits[cur_split].inputs[k] == src) { + found = true; + break; + } + } + + if (!found) { + int n_inputs = sched->splits[cur_split].n_inputs++; + //printf("split %d input %d: %s (%s)\n", cur_split, n_inputs, src->name, ggml_backend_name(get_allocr_backend(sched, src_allocr))); + GGML_ASSERT(n_inputs < GGML_MAX_SPLIT_INPUTS); + sched->splits[cur_split].inputs[n_inputs] = src; + } + + // create a copy of the input in the split's backend + size_t id = hash_id(src); + if (sched->node_copies[id][cur_backend_id] == NULL) { + ggml_backend_t backend = get_allocr_backend(sched, cur_allocr); + struct ggml_tensor * tensor_copy = ggml_dup_tensor_layout(sched->ctx, src); + ggml_format_name(tensor_copy, "%s#%s", ggml_backend_name(backend), src->name); + + sched->node_copies[id][cur_backend_id] = tensor_copy; + node_allocr(tensor_copy) = cur_allocr; + SET_CAUSE(tensor_copy, "4.cpy"); + } + node->src[j] = sched->node_copies[id][cur_backend_id]; } - node->src[j] = sched->node_copies[id][cur_backend_id]; } } + sched->splits[cur_split].i_end = graph->n_nodes; + sched->n_splits = cur_split + 1; } - sched->splits[cur_split].i_end = graph->n_nodes; - sched->n_splits = cur_split + 1; +#ifdef DEBUG_PASS4 + fprintf(stderr, "PASS 4 ASSIGNMENTS\n"); sched_print_assignments(sched, graph); +#endif - //fprintf(stderr, "PASS 4 ASSIGNMENTS\n"); sched_print_assignments(sched, graph); fflush(stdout); - -#if 1 +#ifndef NDEBUG // sanity check: all sources should have the same backend as the node for (int i = 0; i < graph->n_nodes; i++) { struct ggml_tensor * node = graph->nodes[i]; @@ -1059,6 +1212,11 @@ static void sched_split_graph(ggml_backend_sched_t sched, struct ggml_cgraph * g if (node_allocr == NULL) { fprintf(stderr, "!!!!!!! %s has no backend\n", node->name); } + if (node->view_src != NULL && node_allocr != node_allocr(node->view_src)) { + fprintf(stderr, "!!!!!!! %s has backend %s, view_src %s has backend %s\n", + node->name, node_allocr ? ggml_backend_name(get_allocr_backend(sched, node_allocr)) : "NULL", + node->view_src->name, node_allocr(node->view_src) ? ggml_backend_name(get_allocr_backend(sched, node_allocr(node->view_src))) : "NULL"); + } for (int j = 0; j < GGML_MAX_SRC; j++) { struct ggml_tensor * src = node->src[j]; if (src == NULL) { @@ -1070,8 +1228,14 @@ static void sched_split_graph(ggml_backend_sched_t sched, struct ggml_cgraph * g node->name, node_allocr ? ggml_backend_name(get_allocr_backend(sched, node_allocr)) : "NULL", j, src->name, src_allocr ? ggml_backend_name(get_allocr_backend(sched, src_allocr)) : "NULL"); } + if (src->view_src != NULL && src_allocr != node_allocr(src->view_src)) { + fprintf(stderr, "!!!!!!! [src] %s has backend %s, view_src %s has backend %s\n", + src->name, src_allocr ? ggml_backend_name(get_allocr_backend(sched, src_allocr)) : "NULL", + src->view_src->name, node_allocr(src->view_src) ? ggml_backend_name(get_allocr_backend(sched, node_allocr(src->view_src))) : "NULL"); + } } } + fflush(stderr); #endif // create copies of the graph for each split @@ -1085,6 +1249,8 @@ static void sched_split_graph(ggml_backend_sched_t sched, struct ggml_cgraph * g for (int j = 0; j < split->n_inputs; j++) { struct ggml_tensor * input = split->inputs[j]; struct ggml_tensor * input_cpy = sched->node_copies[hash_id(input)][sched_allocr_prio(sched, split->tallocr)]; + // add a dependency to the input source so that it is not freed before the copy is done + GGML_ASSERT(input_cpy->src[0] == NULL || input_cpy->src[0] == input); input_cpy->src[0] = input; graph_copy->nodes[graph_copy->n_nodes++] = input_cpy; } @@ -1119,24 +1285,16 @@ static void sched_compute_splits(ggml_backend_sched_t sched) { uint64_t copy_start_us = ggml_time_us(); for (int j = 0; j < split->n_inputs; j++) { struct ggml_tensor * input = split->inputs[j]; - struct ggml_tensor * input_cpy = sched->node_copies[hash_id(input)][sched_backend_prio(sched, split_backend)]; - if (input->buffer == NULL) { - if (input->view_src == NULL) { - fprintf(stderr, "input %s has no buffer and no view_src\n", input->name); - exit(1); - } - // FIXME: may need to use the sched buffer instead - ggml_backend_view_init(input->view_src->buffer, input); - } - if (input_cpy->buffer == NULL) { - fprintf(stderr, "input_cpy %s has no buffer\n", input_cpy->name); - exit(1); - } - //GGML_ASSERT(input->buffer->backend != input_cpy->buffer->backend); - //GGML_ASSERT(input_cpy->buffer->backend == split_backend); - ggml_backend_tensor_copy(input, input_cpy); + struct ggml_tensor * input_cpy = sched->node_copies[hash_id(input)][split_backend_id]; + + GGML_ASSERT(input->buffer != NULL); + GGML_ASSERT(input_cpy->buffer != NULL); + + // TODO: avoid this copy if it was already copied in a previous split, and the input didn't change + // this is important to avoid copying constants such as KQ_mask and inp_pos multiple times + ggml_backend_tensor_copy_async(split_backend, input, input_cpy); } - // ggml_backend_synchronize(split_backend); + //ggml_backend_synchronize(split_backend); // necessary to measure copy time int64_t copy_end_us = ggml_time_us(); copy_us[split_backend_id] += copy_end_us - copy_start_us; @@ -1148,7 +1306,7 @@ static void sched_compute_splits(ggml_backend_sched_t sched) { uint64_t compute_start_us = ggml_time_us(); ggml_backend_graph_compute(split_backend, &split->graph); - // ggml_backend_synchronize(split_backend); + //ggml_backend_synchronize(split_backend); // necessary to measure compute time uint64_t compute_end_us = ggml_time_us(); compute_us[split_backend_id] += compute_end_us - compute_start_us; } @@ -1168,26 +1326,41 @@ static void sched_reset(ggml_backend_sched_t sched) { for (int i = 0; i < sched->n_backends; i++) { ggml_tallocr_reset(sched->tallocs[i]); } + // reset state for the next run + size_t hash_size = sched->hash_set.size; + memset(sched->hash_set.keys, 0, sizeof(sched->hash_set.keys[0]) * hash_size); + memset(sched->node_talloc, 0, sizeof(sched->node_talloc[0]) * hash_size); + memset(sched->node_copies, 0, sizeof(sched->node_copies[0]) * hash_size); + + sched->is_reset = true; } -ggml_backend_sched_t ggml_backend_sched_new(ggml_backend_t * backends, int n_backends) { +ggml_backend_sched_t ggml_backend_sched_new(ggml_backend_t * backends, ggml_backend_buffer_type_t * bufts, int n_backends, size_t graph_size) { + GGML_ASSERT(n_backends > 0); GGML_ASSERT(n_backends <= GGML_MAX_BACKENDS); - struct ggml_backend_sched * sched = malloc(sizeof(struct ggml_backend_sched)); - memset(sched, 0, sizeof(struct ggml_backend_sched)); + struct ggml_backend_sched * sched = calloc(sizeof(struct ggml_backend_sched), 1); + + // initialize hash table + sched->hash_set = ggml_hash_set_new(graph_size + GGML_MAX_SPLITS*GGML_MAX_SPLIT_INPUTS); + sched->node_talloc = calloc(sizeof(sched->node_talloc[0]) * sched->hash_set.size, 1); + sched->node_copies = calloc(sizeof(sched->node_copies[0]) * sched->hash_set.size, 1); sched->n_backends = n_backends; for (int i = 0; i < n_backends; i++) { sched->backends[i] = backends[i]; + sched->bufts[i] = bufts ? bufts[i] : ggml_backend_get_default_buffer_type(backends[i]); } sched->galloc = ggml_gallocr_new(); // init measure allocs for each backend for (int i = 0; i < n_backends; i++) { - sched->tallocs[i] = ggml_tallocr_new_measure_from_backend(backends[i]); + sched->tallocs[i] = ggml_tallocr_new_measure_from_buft(sched->bufts[i]); } + sched_reset(sched); + return sched; } @@ -1199,6 +1372,7 @@ void ggml_backend_sched_free(ggml_backend_sched_t sched) { ggml_tallocr_free(sched->tallocs[i]); } ggml_gallocr_free(sched->galloc); + ggml_free(sched->ctx); free(sched->hash_set.keys); free(sched->node_talloc); free(sched->node_copies); @@ -1206,12 +1380,7 @@ void ggml_backend_sched_free(ggml_backend_sched_t sched) { } void ggml_backend_sched_init_measure(ggml_backend_sched_t sched, struct ggml_cgraph * measure_graph) { - // initialize hash tables - size_t hash_size = measure_graph->visited_hash_table.size + GGML_MAX_SPLITS*GGML_MAX_SPLIT_INPUTS; - sched->hash_set.size = hash_size; - sched->hash_set.keys = malloc(sizeof(sched->hash_set.keys[0]) * hash_size); - sched->node_talloc = malloc(sizeof(sched->node_talloc[0]) * hash_size); - sched->node_copies = malloc(sizeof(sched->node_copies[0]) * hash_size); + GGML_ASSERT(ggml_tallocr_is_measure(sched->tallocs[0])); // can only be initialized once sched_split_graph(sched, measure_graph); sched_alloc_splits(sched); @@ -1220,28 +1389,41 @@ void ggml_backend_sched_init_measure(ggml_backend_sched_t sched, struct ggml_cgr for (int i = 0; i < sched->n_backends; i++) { size_t size = ggml_tallocr_max_size(sched->tallocs[i]); ggml_tallocr_free(sched->tallocs[i]); - sched->tallocs[i] = ggml_tallocr_new_from_backend(sched->backends[i], size); + sched->tallocs[i] = ggml_tallocr_new_from_buft(sched->bufts[i], size); } sched_reset(sched); } void ggml_backend_sched_graph_compute(ggml_backend_sched_t sched, struct ggml_cgraph * graph) { - GGML_ASSERT(sched->hash_set.size >= graph->visited_hash_table.size + GGML_MAX_SPLITS*GGML_MAX_SPLIT_INPUTS); + GGML_ASSERT((int)sched->hash_set.size >= graph->n_nodes + GGML_MAX_SPLITS*GGML_MAX_SPLIT_INPUTS); + + if (!sched->is_reset) { + sched_reset(sched); + } sched_split_graph(sched, graph); sched_alloc_splits(sched); sched_compute_splits(sched); +} + +void ggml_backend_sched_reset(ggml_backend_sched_t sched) { sched_reset(sched); } +int ggml_backend_sched_get_n_splits(ggml_backend_sched_t sched) { + return sched->n_splits; +} + ggml_tallocr_t ggml_backend_sched_get_tallocr(ggml_backend_sched_t sched, ggml_backend_t backend) { int backend_index = sched_backend_prio(sched, backend); + GGML_ASSERT(backend_index >= 0 && backend_index < sched->n_backends); return sched->tallocs[backend_index]; } ggml_backend_buffer_t ggml_backend_sched_get_buffer(ggml_backend_sched_t sched, ggml_backend_t backend) { int backend_index = sched_backend_prio(sched, backend); + GGML_ASSERT(backend_index >= 0 && backend_index < sched->n_backends); return ggml_tallocr_get_buffer(sched->tallocs[backend_index]); } @@ -1251,10 +1433,19 @@ void ggml_backend_sched_set_node_backend(ggml_backend_sched_t sched, struct ggml node_allocr(node) = sched->tallocs[backend_index]; } +ggml_backend_t ggml_backend_sched_get_node_backend(ggml_backend_sched_t sched, struct ggml_tensor * node) { + ggml_tallocr_t allocr = node_allocr(node); + if (allocr == NULL) { + return NULL; + } + return get_allocr_backend(sched, allocr); +} + // utils + void ggml_backend_view_init(ggml_backend_buffer_t buffer, struct ggml_tensor * tensor) { GGML_ASSERT(tensor->buffer == NULL); - //GGML_ASSERT(tensor->data == NULL); // views of pre-allocted tensors may have the data set, but still need to be initialized + //GGML_ASSERT(tensor->data == NULL); // views of pre-allocated tensors may have the data set in ggml_new_tensor, but still need to be initialized by the backend GGML_ASSERT(tensor->view_src != NULL); GGML_ASSERT(tensor->view_src->buffer != NULL); GGML_ASSERT(tensor->view_src->data != NULL); @@ -1320,6 +1511,7 @@ static void graph_init_tensor(struct ggml_hash_set hash_set, struct ggml_tensor struct ggml_tensor * dst = node_copies[id]; if (dst->view_src != NULL) { + graph_init_tensor(hash_set, node_copies, node_init, src->view_src); ggml_backend_view_init(dst->view_src->buffer, dst); } else { @@ -1353,6 +1545,21 @@ struct ggml_backend_graph_copy ggml_backend_graph_copy(ggml_backend_t backend, s struct ggml_context * ctx_allocated = ggml_init(params); struct ggml_context * ctx_unallocated = ggml_init(params); + if (ctx_allocated == NULL || ctx_unallocated == NULL) { + fprintf(stderr, "failed to allocate context for graph copy\n"); + free(hash_set.keys); + free(node_copies); + free(node_init); + ggml_free(ctx_allocated); + ggml_free(ctx_unallocated); + return (struct ggml_backend_graph_copy) { + /* .buffer = */ NULL, + /* .ctx_allocated = */ NULL, + /* .ctx_unallocated = */ NULL, + /* .graph = */ NULL, + }; + } + // dup nodes for (int i = 0; i < graph->n_nodes; i++) { struct ggml_tensor * node = graph->nodes[i]; @@ -1361,6 +1568,20 @@ struct ggml_backend_graph_copy ggml_backend_graph_copy(ggml_backend_t backend, s // allocate nodes ggml_backend_buffer_t buffer = ggml_backend_alloc_ctx_tensors(ctx_allocated, backend); + if (buffer == NULL) { + fprintf(stderr, "failed to allocate buffer for graph copy\n"); + free(hash_set.keys); + free(node_copies); + free(node_init); + ggml_free(ctx_allocated); + ggml_free(ctx_unallocated); + return (struct ggml_backend_graph_copy) { + /* .buffer = */ NULL, + /* .ctx_allocated = */ NULL, + /* .ctx_unallocated = */ NULL, + /* .graph = */ NULL, + }; + } //printf("copy buffer size: %zu MB\n", ggml_backend_buffer_get_size(buffer) / 1024 / 1024); @@ -1397,8 +1618,12 @@ void ggml_backend_graph_copy_free(struct ggml_backend_graph_copy copy) { ggml_free(copy.ctx_unallocated); } -void ggml_backend_compare_graph_backend(ggml_backend_t backend1, ggml_backend_t backend2, struct ggml_cgraph * graph, ggml_backend_eval_callback callback, void * user_data) { +bool ggml_backend_compare_graph_backend(ggml_backend_t backend1, ggml_backend_t backend2, struct ggml_cgraph * graph, ggml_backend_eval_callback callback, void * user_data) { struct ggml_backend_graph_copy copy = ggml_backend_graph_copy(backend2, graph); + if (copy.buffer == NULL) { + return false; + } + struct ggml_cgraph * g1 = graph; struct ggml_cgraph * g2 = copy.graph; @@ -1428,4 +1653,6 @@ void ggml_backend_compare_graph_backend(ggml_backend_t backend1, ggml_backend_t } ggml_backend_graph_copy_free(copy); + + return true; } diff --git a/ggml-backend.h b/ggml-backend.h index 85ff67b0e..4eb244af1 100644 --- a/ggml-backend.h +++ b/ggml-backend.h @@ -17,22 +17,31 @@ extern "C" { // // buffer type - GGML_API ggml_backend_buffer_t ggml_backend_buft_alloc_buffer(ggml_backend_buffer_type_t buft, size_t size); - GGML_API size_t ggml_backend_buft_get_alignment (ggml_backend_buffer_type_t buft); - GGML_API size_t ggml_backend_buft_get_alloc_size(ggml_backend_buffer_type_t buft, struct ggml_tensor * tensor); - GGML_API bool ggml_backend_buft_supports_backend(ggml_backend_buffer_type_t buft, ggml_backend_t backend); - GGML_API bool ggml_backend_buft_is_host (ggml_backend_buffer_type_t buft); + GGML_API const char * ggml_backend_buft_name (ggml_backend_buffer_type_t buft); + GGML_API ggml_backend_buffer_t ggml_backend_buft_alloc_buffer (ggml_backend_buffer_type_t buft, size_t size); + GGML_API size_t ggml_backend_buft_get_alignment (ggml_backend_buffer_type_t buft); + GGML_API size_t ggml_backend_buft_get_alloc_size (ggml_backend_buffer_type_t buft, struct ggml_tensor * tensor); + GGML_API bool ggml_backend_buft_supports_backend(ggml_backend_buffer_type_t buft, ggml_backend_t backend); + GGML_API bool ggml_backend_buft_is_host (ggml_backend_buffer_type_t buft); // buffer - GGML_API void ggml_backend_buffer_free (ggml_backend_buffer_t buffer); - GGML_API void * ggml_backend_buffer_get_base (ggml_backend_buffer_t buffer); - GGML_API size_t ggml_backend_buffer_get_size (ggml_backend_buffer_t buffer); - GGML_API void ggml_backend_buffer_init_tensor (ggml_backend_buffer_t buffer, struct ggml_tensor * tensor); - GGML_API size_t ggml_backend_buffer_get_alignment (ggml_backend_buffer_t buffer); - GGML_API size_t ggml_backend_buffer_get_alloc_size(ggml_backend_buffer_t buffer, struct ggml_tensor * tensor); - GGML_API void ggml_backend_buffer_clear (ggml_backend_buffer_t buffer, uint8_t value); - GGML_API bool ggml_backend_buffer_is_host (ggml_backend_buffer_t buffer); - GGML_API ggml_backend_buffer_type_t ggml_backend_buffer_type(ggml_backend_buffer_t buffer); + enum ggml_backend_buffer_usage { + GGML_BACKEND_BUFFER_USAGE_ANY = 0, + GGML_BACKEND_BUFFER_USAGE_WEIGHTS = 1, + }; + + GGML_API const char * ggml_backend_buffer_name (ggml_backend_buffer_t buffer); + GGML_API void ggml_backend_buffer_free (ggml_backend_buffer_t buffer); + GGML_API void * ggml_backend_buffer_get_base (ggml_backend_buffer_t buffer); + GGML_API size_t ggml_backend_buffer_get_size (ggml_backend_buffer_t buffer); + GGML_API void ggml_backend_buffer_init_tensor (ggml_backend_buffer_t buffer, struct ggml_tensor * tensor); + GGML_API size_t ggml_backend_buffer_get_alignment (ggml_backend_buffer_t buffer); + GGML_API size_t ggml_backend_buffer_get_alloc_size(ggml_backend_buffer_t buffer, struct ggml_tensor * tensor); + GGML_API void ggml_backend_buffer_clear (ggml_backend_buffer_t buffer, uint8_t value); + GGML_API bool ggml_backend_buffer_is_host (ggml_backend_buffer_t buffer); + GGML_API void ggml_backend_buffer_set_usage (ggml_backend_buffer_t buffer, enum ggml_backend_buffer_usage usage); + GGML_API ggml_backend_buffer_type_t ggml_backend_buffer_get_type (ggml_backend_buffer_t buffer); + GGML_API void ggml_backend_buffer_reset (ggml_backend_buffer_t buffer); // // Backend @@ -140,23 +149,24 @@ extern "C" { typedef struct ggml_backend_sched * ggml_backend_sched_t; // Initialize a backend scheduler - GGML_API ggml_backend_sched_t ggml_backend_sched_new(ggml_backend_t * backends, int n_backends); - - GGML_API void ggml_backend_sched_free(ggml_backend_sched_t sched); - + GGML_API ggml_backend_sched_t ggml_backend_sched_new(ggml_backend_t * backends, ggml_backend_buffer_type_t * bufts, int n_backends, size_t graph_size); + GGML_API void ggml_backend_sched_free(ggml_backend_sched_t sched); // Initialize backend buffers from a measure graph - GGML_API void ggml_backend_sched_init_measure(ggml_backend_sched_t sched, struct ggml_cgraph * measure_graph); + GGML_API void ggml_backend_sched_init_measure(ggml_backend_sched_t sched, struct ggml_cgraph * measure_graph); + // Get the number of splits of the last graph + GGML_API int ggml_backend_sched_get_n_splits(ggml_backend_sched_t sched); GGML_API ggml_tallocr_t ggml_backend_sched_get_tallocr(ggml_backend_sched_t sched, ggml_backend_t backend); GGML_API ggml_backend_buffer_t ggml_backend_sched_get_buffer (ggml_backend_sched_t sched, ggml_backend_t backend); - GGML_API void ggml_backend_sched_set_node_backend(ggml_backend_sched_t sched, struct ggml_tensor * node, ggml_backend_t backend); + GGML_API void ggml_backend_sched_set_node_backend(ggml_backend_sched_t sched, struct ggml_tensor * node, ggml_backend_t backend); + GGML_API ggml_backend_t ggml_backend_sched_get_node_backend(ggml_backend_sched_t sched, struct ggml_tensor * node); - // Allocate a graph on the backend scheduler - GGML_API void ggml_backend_sched_graph_compute( - ggml_backend_sched_t sched, - struct ggml_cgraph * graph); + // Allocate and compute graph on the backend scheduler + GGML_API void ggml_backend_sched_graph_compute(ggml_backend_sched_t sched, struct ggml_cgraph * graph); + // Reset all assignments and allocators - must be called before using the sched allocators to allocate inputs + GGML_API void ggml_backend_sched_reset(ggml_backend_sched_t sched); // // Utils @@ -176,7 +186,7 @@ extern "C" { typedef bool (*ggml_backend_eval_callback)(int node_index, struct ggml_tensor * t1, struct ggml_tensor * t2, void * user_data); // Compare the output of two backends - GGML_API void ggml_backend_compare_graph_backend(ggml_backend_t backend1, ggml_backend_t backend2, struct ggml_cgraph * graph, ggml_backend_eval_callback callback, void * user_data); + GGML_API bool ggml_backend_compare_graph_backend(ggml_backend_t backend1, ggml_backend_t backend2, struct ggml_cgraph * graph, ggml_backend_eval_callback callback, void * user_data); // Tensor initialization GGML_API void ggml_backend_tensor_alloc(ggml_backend_buffer_t buffer, struct ggml_tensor * tensor, void * addr); diff --git a/ggml-cuda.cu b/ggml-cuda.cu index a345b0c4a..2db50437c 100644 --- a/ggml-cuda.cu +++ b/ggml-cuda.cu @@ -8,8 +8,13 @@ #include #include #include +#include #include - +#include +#include +#include "ggml-cuda.h" +#include "ggml.h" +#include "ggml-backend-impl.h" #if defined(GGML_USE_HIPBLAS) #include @@ -77,6 +82,7 @@ #define cudaMemcpyKind hipMemcpyKind #define cudaMemset hipMemset #define cudaMemsetAsync hipMemsetAsync +#define cudaMemGetInfo hipMemGetInfo #define cudaOccupancyMaxPotentialBlockSize hipOccupancyMaxPotentialBlockSize #define cudaSetDevice hipSetDevice #define cudaStreamCreateWithFlags hipStreamCreateWithFlags @@ -112,10 +118,6 @@ #endif // defined(GGML_USE_HIPBLAS) -#include "ggml-cuda.h" -#include "ggml.h" -#include "ggml-backend-impl.h" - #define CUDART_HMAX 11070 // CUDA 11.7, min. ver. for which __hmax and __hmax2 are known to work (may be higher than needed) #define CC_PASCAL 600 @@ -564,7 +566,7 @@ static void ggml_cuda_set_device(const int device) { static int g_device_count = -1; static int g_main_device = 0; -static float g_tensor_split[GGML_CUDA_MAX_DEVICES] = {0}; +static std::array g_default_tensor_split = {}; struct cuda_device_capabilities { int cc; // compute capability @@ -575,10 +577,6 @@ struct cuda_device_capabilities { static cuda_device_capabilities g_device_caps[GGML_CUDA_MAX_DEVICES] = { {0, 0, false, 0} }; -static void * g_scratch_buffer = nullptr; -static size_t g_scratch_size = 0; // disabled by default -static size_t g_scratch_offset = 0; - static cublasHandle_t g_cublas_handles[GGML_CUDA_MAX_DEVICES] = {nullptr}; [[noreturn]] @@ -7548,8 +7546,9 @@ void ggml_init_cublas() { CUDA_CHECK(cudaGetDeviceProperties(&prop, id)); fprintf(stderr, " Device %d: %s, compute capability %d.%d, VMM: %s\n", id, prop.name, prop.major, prop.minor, device_vmm ? "yes" : "no"); - g_tensor_split[id] = total_vram; + g_default_tensor_split[id] = total_vram; total_vram += prop.totalGlobalMem; + #if defined(GGML_USE_HIPBLAS) && defined(__HIP_PLATFORM_AMD__) g_device_caps[id].cc = 100*prop.major + 10*prop.minor + CC_OFFSET_AMD; #else @@ -7558,7 +7557,7 @@ void ggml_init_cublas() { g_device_caps[id].smpb = prop.sharedMemPerBlock; } for (int id = 0; id < g_device_count; ++id) { - g_tensor_split[id] /= total_vram; + g_default_tensor_split[id] /= total_vram; } for (int id = 0; id < g_device_count; ++id) { @@ -7582,30 +7581,6 @@ void ggml_init_cublas() { } } -void ggml_cuda_set_tensor_split(const float * tensor_split) { - if (tensor_split == nullptr) { - return; - } - bool all_zero = true; - for (int i = 0; i < g_device_count; ++i) { - if (tensor_split[i] != 0.0f) { - all_zero = false; - break; - } - } - if (all_zero) { - return; - } - float split_sum = 0.0f; - for (int i = 0; i < g_device_count; ++i) { - g_tensor_split[i] = split_sum; - split_sum += tensor_split[i]; - } - for (int i = 0; i < g_device_count; ++i) { - g_tensor_split[i] /= split_sum; - } -} - void * ggml_cuda_host_malloc(size_t size) { if (getenv("GGML_CUDA_NO_PINNED") != nullptr) { return nullptr; @@ -8057,11 +8032,11 @@ static void ggml_cuda_op_mul_mat_q( (void) src1_ddf_i; } -static int64_t get_row_rounding(ggml_type type) { +static int64_t get_row_rounding(ggml_type type, const std::array & tensor_split) { int64_t min_compute_capability = INT_MAX; int64_t max_compute_capability = INT_MIN; for (int id = 0; id < g_device_count; ++id) { - if (g_tensor_split[id] < (id + 1 < g_device_count ? g_tensor_split[id + 1] : 1.0f)) { + if (tensor_split[id] < (id + 1 < g_device_count ? tensor_split[id + 1] : 1.0f)) { if (min_compute_capability > g_device_caps[id].cc) { min_compute_capability = g_device_caps[id].cc; } @@ -8122,6 +8097,21 @@ static int64_t get_row_rounding(ggml_type type) { #endif // defined(GGML_USE_HIPBLAS) && defined(__HIP_PLATFORM_AMD__) } +static void get_row_split(int64_t * row_low, int64_t * row_high, const ggml_tensor * tensor, const std::array & tensor_split, int id) { + const int64_t nrows = ggml_nrows(tensor); + const int64_t rounding = get_row_rounding(tensor->type, tensor_split); + + *row_low = id == 0 ? 0 : nrows*tensor_split[id]; + *row_low -= *row_low % rounding; + + if (id == g_device_count - 1) { + *row_high = nrows; + } else { + *row_high = nrows*tensor_split[id + 1]; + *row_high -= *row_high % rounding; + } +} + static void ggml_cuda_op_mul_mat_vec_q( const ggml_tensor * src0, const ggml_tensor * src1, ggml_tensor * dst, const char * src0_dd_i, const float * src1_ddf_i, const char * src1_ddq_i, float * dst_dd_i, const int64_t row_low, const int64_t row_high, const int64_t src1_ncols, @@ -8739,6 +8729,11 @@ static void ggml_cuda_set_peer_access(const int n_tokens) { peer_access_enabled = enable_peer_access; } +// FIXME: move this somewhere else +struct ggml_backend_cuda_split_buffer_type_context { + std::array tensor_split; +}; + static void ggml_cuda_op_mul_mat( const ggml_tensor * src0, const ggml_tensor * src1, ggml_tensor * dst, ggml_cuda_op_mul_mat_t op, const bool convert_src1_to_q8_1) { @@ -8790,6 +8785,14 @@ static void ggml_cuda_op_mul_mat( GGML_ASSERT(!(split && ne03 > 1)); GGML_ASSERT(!(split && ne02 < ne12)); + std::array tensor_split; + if (split) { + // TODO: check that src0->buffer->buft is a split buffer type, replace GGML_BACKEND_GPU_SPLIT check + // GGML_ASSERT(src0->buffer != nullptr && src0->buffer->buft == ...); + ggml_backend_cuda_split_buffer_type_context * buft_ctx = (ggml_backend_cuda_split_buffer_type_context *) src0->buffer->buft->context; + tensor_split = buft_ctx->tensor_split; + } + struct dev_data { cuda_pool_alloc src0_dd_alloc; cuda_pool_alloc src1_ddf_alloc; @@ -8817,17 +8820,17 @@ static void ggml_cuda_op_mul_mat( // for multi GPU, get the row boundaries from tensor split // and round to mul_mat_q tile sizes if (split) { - const int64_t rounding = get_row_rounding(src0->type); + const int64_t rounding = get_row_rounding(src0->type, tensor_split); if (id != 0) { - dev[id].row_low = ne01*g_tensor_split[id]; + dev[id].row_low = ne01*tensor_split[id]; if (dev[id].row_low < ne01) { dev[id].row_low -= dev[id].row_low % rounding; } } if (id != g_device_count - 1) { - dev[id].row_high = ne01*g_tensor_split[id + 1]; + dev[id].row_high = ne01*tensor_split[id + 1]; if (dev[id].row_high < ne01) { dev[id].row_high -= dev[id].row_high % rounding; } @@ -9373,10 +9376,17 @@ static void ggml_cuda_mul_mat(const ggml_tensor * src0, const ggml_tensor * src1 const bool split = src0->backend == GGML_BACKEND_GPU_SPLIT; int64_t min_compute_capability = INT_MAX; - for (int id = 0; id < g_device_count; ++id) { - if (min_compute_capability > g_device_caps[id].cc && g_tensor_split[id] < (id + 1 < g_device_count ? g_tensor_split[id + 1] : 1.0f)) { - min_compute_capability = g_device_caps[id].cc; + + if (split) { + ggml_backend_cuda_split_buffer_type_context * buft_ctx = (ggml_backend_cuda_split_buffer_type_context *) src0->buffer->buft->context; + auto & tensor_split = buft_ctx->tensor_split; + for (int id = 0; id < g_device_count; ++id) { + if (min_compute_capability > g_device_caps[id].cc && tensor_split[id] < (id + 1 < g_device_count ? tensor_split[id + 1] : 1.0f)) { + min_compute_capability = g_device_caps[id].cc; + } } + } else { + min_compute_capability = g_device_caps[g_main_device].cc; } #if defined(GGML_USE_HIPBLAS) && defined(__HIP_PLATFORM_AMD__) @@ -9415,7 +9425,7 @@ static void ggml_cuda_mul_mat(const ggml_tensor * src0, const ggml_tensor * src1 } else if (!split && all_on_device && !fp16_performance_good && src0->type == GGML_TYPE_F16 && !ggml_is_contiguous(src0) && !ggml_is_transposed(src1) && src1->ne[1] == 1) { // KQV single-batch ggml_cuda_mul_mat_vec_nc(src0, src1, dst); - } else if (!split && all_on_device && fp16_performance_good && src0->type == GGML_TYPE_F16 && !ggml_is_transposed(src0) && !ggml_is_transposed(src1)) { + } else if (!split && all_on_device && fp16_performance_good && src0->type == GGML_TYPE_F16 && !ggml_is_transposed(src0) && !ggml_is_transposed(src1) && src1->ne[2]*src1->ne[3] > 1) { // KQ + KQV multi-batch ggml_cuda_mul_mat_mat_batched_cublas(src0, src1, dst); } else if (src0->type == GGML_TYPE_F32) { @@ -9877,247 +9887,7 @@ static size_t ggml_nbytes_split(const struct ggml_tensor * tensor, int nrows_spl return nrows_split*ggml_row_size(tensor->type, tensor->ne[0]); } -void ggml_cuda_transform_tensor(void * data, struct ggml_tensor * tensor) { - const int64_t nrows = ggml_nrows(tensor); - - const int64_t ne0 = tensor->ne[0]; - - const size_t nb1 = tensor->nb[1]; - - ggml_backend_type backend = tensor->backend; - ggml_tensor_extra_gpu * extra = new struct ggml_tensor_extra_gpu; - memset(extra, 0, sizeof(*extra)); - - for (int id = 0; id < g_device_count; ++id) { - if (backend == GGML_BACKEND_GPU && id != g_main_device) { - continue; - } - - ggml_cuda_set_device(id); - - int64_t row_low, row_high; - if (backend == GGML_BACKEND_GPU) { - row_low = 0; - row_high = nrows; - } else if (backend == GGML_BACKEND_GPU_SPLIT) { - const int64_t rounding = get_row_rounding(tensor->type); - - row_low = id == 0 ? 0 : nrows*g_tensor_split[id]; - row_low -= row_low % rounding; - - if (id == g_device_count - 1) { - row_high = nrows; - } else { - row_high = nrows*g_tensor_split[id + 1]; - row_high -= row_high % rounding; - } - } else { - GGML_ASSERT(false); - } - if (row_low == row_high) { - continue; - } - - int64_t nrows_split = row_high - row_low; - - const size_t offset_split = row_low*nb1; - size_t size = ggml_nbytes_split(tensor, nrows_split); - const size_t original_size = size; - - // pad last row to a multiple of 512 elements to avoid out-of-bounds memory accesses - if (ne0 % MATRIX_ROW_PADDING != 0) { - size += ggml_row_size(tensor->type, MATRIX_ROW_PADDING - ne0 % MATRIX_ROW_PADDING); - } - - char * buf; - CUDA_CHECK(cudaMalloc(&buf, size)); - char * buf_host = (char *)data + offset_split; - - // set padding to 0 to avoid possible NaN values - if (size > original_size) { - CUDA_CHECK(cudaMemset(buf + original_size, 0, size - original_size)); - } - - CUDA_CHECK(cudaMemcpy(buf, buf_host, original_size, cudaMemcpyHostToDevice)); - - extra->data_device[id] = buf; - - if (backend == GGML_BACKEND_GPU_SPLIT) { - for (int64_t is = 0; is < MAX_STREAMS; ++is) { - CUDA_CHECK(cudaEventCreateWithFlags(&extra->events[id][is], cudaEventDisableTiming)); - } - } - } - - tensor->extra = extra; -} - -void ggml_cuda_free_data(struct ggml_tensor * tensor) { - if (!tensor || !tensor->extra || (tensor->backend != GGML_BACKEND_GPU && tensor->backend != GGML_BACKEND_GPU_SPLIT) ) { - return; - } - - ggml_tensor_extra_gpu * extra = (ggml_tensor_extra_gpu *) tensor->extra; - - for (int id = 0; id < g_device_count; ++id) { - ggml_cuda_set_device(id); - if (extra->data_device[id] != nullptr) { - CUDA_CHECK(cudaFree(extra->data_device[id])); - } - - for (int64_t is = 0; is < MAX_STREAMS; ++is) { - if (extra->events[id][is] != nullptr) { - CUDA_CHECK(cudaEventDestroy(extra->events[id][is])); - } - } - } - - delete extra; -} - -static ggml_tensor_extra_gpu * g_temp_tensor_extras = nullptr; -static size_t g_temp_tensor_extra_index = 0; - -static ggml_tensor_extra_gpu * ggml_cuda_alloc_temp_tensor_extra() { - if (g_temp_tensor_extras == nullptr) { - g_temp_tensor_extras = new ggml_tensor_extra_gpu[GGML_CUDA_MAX_NODES]; - } - - size_t alloc_index = g_temp_tensor_extra_index; - g_temp_tensor_extra_index = (g_temp_tensor_extra_index + 1) % GGML_CUDA_MAX_NODES; - ggml_tensor_extra_gpu * extra = &g_temp_tensor_extras[alloc_index]; - memset(extra, 0, sizeof(*extra)); - - return extra; -} - -static void ggml_cuda_assign_buffers_impl(struct ggml_tensor * tensor, bool scratch, bool force_inplace, bool no_alloc) { - if (scratch && g_scratch_size == 0) { - return; - } - - tensor->backend = GGML_BACKEND_GPU; - - // recursively assign CUDA buffers until a compute tensor is found - if (tensor->src[0] != nullptr && tensor->src[0]->backend == GGML_BACKEND_CPU) { - const ggml_op src0_op = tensor->src[0]->op; - if (src0_op == GGML_OP_RESHAPE || src0_op == GGML_OP_TRANSPOSE || src0_op == GGML_OP_VIEW || src0_op == GGML_OP_PERMUTE) { - ggml_cuda_assign_buffers_impl(tensor->src[0], scratch, force_inplace, no_alloc); - } - } - if (tensor->op == GGML_OP_CPY && tensor->src[1]->backend == GGML_BACKEND_CPU) { - ggml_cuda_assign_buffers_impl(tensor->src[1], scratch, force_inplace, no_alloc); - } - - if (scratch && no_alloc) { - return; - } - - ggml_tensor_extra_gpu * extra; - - const bool inplace = (tensor->src[0] != nullptr && tensor->src[0]->data == tensor->data) || - tensor->op == GGML_OP_VIEW || - force_inplace; - const size_t size = ggml_nbytes(tensor); - - ggml_cuda_set_device(g_main_device); - if (inplace && (tensor->src[0]->backend == GGML_BACKEND_GPU || tensor->src[0]->backend == GGML_BACKEND_GPU_SPLIT)) { - ggml_tensor_extra_gpu * src0_extra = (ggml_tensor_extra_gpu * ) tensor->src[0]->extra; - char * src0_ddc = (char *) src0_extra->data_device[g_main_device]; - size_t offset = 0; - if (tensor->op == GGML_OP_VIEW) { - memcpy(&offset, tensor->op_params, sizeof(size_t)); - } - extra = ggml_cuda_alloc_temp_tensor_extra(); - extra->data_device[g_main_device] = src0_ddc + offset; - } else if (tensor->op == GGML_OP_CPY) { - ggml_tensor_extra_gpu * src1_extra = (ggml_tensor_extra_gpu * ) tensor->src[1]->extra; - void * src1_ddv = src1_extra->data_device[g_main_device]; - extra = ggml_cuda_alloc_temp_tensor_extra(); - extra->data_device[g_main_device] = src1_ddv; - } else if (scratch) { - GGML_ASSERT(size <= g_scratch_size); - if (g_scratch_offset + size > g_scratch_size) { - g_scratch_offset = 0; - } - - char * data = (char *) g_scratch_buffer; - if (data == nullptr) { - CUDA_CHECK(cudaMalloc(&data, g_scratch_size)); - g_scratch_buffer = data; - } - extra = ggml_cuda_alloc_temp_tensor_extra(); - extra->data_device[g_main_device] = data + g_scratch_offset; - - g_scratch_offset += size; - - GGML_ASSERT(g_scratch_offset <= g_scratch_size); - } else { // allocate new buffers outside of scratch - void * data; - CUDA_CHECK(cudaMalloc(&data, size)); - CUDA_CHECK(cudaMemset(data, 0, size)); - extra = new ggml_tensor_extra_gpu; - memset(extra, 0, sizeof(*extra)); - extra->data_device[g_main_device] = data; - } - - tensor->extra = extra; -} - -void ggml_cuda_assign_scratch_offset(struct ggml_tensor * tensor, size_t offset) { - if (g_scratch_size == 0) { - return; - } - if (g_scratch_buffer == nullptr) { - ggml_cuda_set_device(g_main_device); - CUDA_CHECK(cudaMalloc(&g_scratch_buffer, g_scratch_size)); - } - - ggml_tensor_extra_gpu * extra = ggml_cuda_alloc_temp_tensor_extra(); - - const bool inplace = tensor->view_src != nullptr; - - if (inplace && (tensor->view_src->backend == GGML_BACKEND_GPU || tensor->view_src->backend == GGML_BACKEND_GPU_SPLIT)) { - ggml_tensor_extra_gpu * src0_extra = (ggml_tensor_extra_gpu * ) tensor->view_src->extra; - char * src0_ddc = (char *) src0_extra->data_device[g_main_device]; - size_t view_offset = 0; - if (tensor->op == GGML_OP_VIEW) { - memcpy(&view_offset, tensor->op_params, sizeof(size_t)); - } - extra->data_device[g_main_device] = src0_ddc + view_offset; - } else { - extra->data_device[g_main_device] = (char *) g_scratch_buffer + offset; - } - - tensor->extra = extra; -} - -void ggml_cuda_copy_to_device(struct ggml_tensor * tensor) { - GGML_ASSERT(tensor->backend == GGML_BACKEND_GPU); - GGML_ASSERT(ggml_is_contiguous(tensor)); - - ggml_tensor_extra_gpu * extra = (ggml_tensor_extra_gpu *) tensor->extra; - ggml_cuda_set_device(g_main_device); - CUDA_CHECK(cudaMemcpy(extra->data_device[g_main_device], tensor->data, ggml_nbytes(tensor), cudaMemcpyHostToDevice)); -} - -void ggml_cuda_assign_buffers(struct ggml_tensor * tensor) { - ggml_cuda_assign_buffers_impl(tensor, true, false, false); -} - -void ggml_cuda_assign_buffers_no_alloc(struct ggml_tensor * tensor) { - ggml_cuda_assign_buffers_impl(tensor, true, false, true); -} - -void ggml_cuda_assign_buffers_no_scratch(struct ggml_tensor * tensor) { - ggml_cuda_assign_buffers_impl(tensor, false, false, false); -} - -void ggml_cuda_assign_buffers_force_inplace(struct ggml_tensor * tensor) { - ggml_cuda_assign_buffers_impl(tensor, false, true, false); -} - -void ggml_cuda_set_main_device(const int main_device) { +static void ggml_cuda_set_main_device(const int main_device) { if (main_device >= g_device_count) { fprintf(stderr, "warning: cannot set main_device=%d because there are only %d devices. Using device %d instead.\n", main_device, g_device_count, g_main_device); @@ -10126,30 +9896,12 @@ void ggml_cuda_set_main_device(const int main_device) { if (g_main_device != main_device && g_device_count > 1) { g_main_device = main_device; - cudaDeviceProp prop; - CUDA_CHECK(cudaGetDeviceProperties(&prop, g_main_device)); - fprintf(stderr, "%s: using device %d (%s) as main device\n", __func__, g_main_device, prop.name); + //cudaDeviceProp prop; + //CUDA_CHECK(cudaGetDeviceProperties(&prop, g_main_device)); + //fprintf(stderr, "%s: using device %d (%s) as main device\n", __func__, g_main_device, prop.name); } } -void ggml_cuda_set_scratch_size(const size_t scratch_size) { - // this is a hack to not completely break llama.cpp when using multiple models or contexts simultaneously - // it still won't always work as expected, but it's better than nothing - if (scratch_size > g_scratch_size) { - ggml_cuda_free_scratch(); - } - g_scratch_size = std::max(g_scratch_size, scratch_size); -} - -void ggml_cuda_free_scratch() { - if (g_scratch_buffer == nullptr) { - return; - } - - CUDA_CHECK(cudaFree(g_scratch_buffer)); - g_scratch_buffer = nullptr; -} - bool ggml_cuda_compute_forward(struct ggml_compute_params * params, struct ggml_tensor * tensor) { if (!g_cublas_loaded) return false; @@ -10328,21 +10080,31 @@ void ggml_cuda_get_device_description(int device, char * description, size_t des #define UNUSED GGML_UNUSED +struct ggml_backend_cuda_context { + int device; + std::string name; +}; + // cuda buffer -struct ggml_backend_buffer_context_cuda { +struct ggml_backend_cuda_buffer_context { int device; void * dev_ptr = nullptr; ggml_tensor_extra_gpu * temp_tensor_extras = nullptr; size_t temp_tensor_extra_index = 0; + std::string name; - ggml_backend_buffer_context_cuda(int device, void * dev_ptr) : device(device), dev_ptr(dev_ptr) {} + ggml_backend_cuda_buffer_context(int device, void * dev_ptr) : + device(device), dev_ptr(dev_ptr), + name(GGML_CUDA_NAME + std::to_string(device)) { + } - ~ggml_backend_buffer_context_cuda() { + ~ggml_backend_cuda_buffer_context() { delete[] temp_tensor_extras; } ggml_tensor_extra_gpu * ggml_cuda_alloc_temp_tensor_extra() { + // TODO: remove GGML_CUDA_MAX_NODES, allocate dynamically and reuse in backend_buffer_reset if (temp_tensor_extras == nullptr) { temp_tensor_extras = new ggml_tensor_extra_gpu[GGML_CUDA_MAX_NODES]; } @@ -10356,19 +10118,28 @@ struct ggml_backend_buffer_context_cuda { } }; +static const char * ggml_backend_cuda_buffer_get_name(ggml_backend_buffer_t buffer) { + ggml_backend_cuda_buffer_context * ctx = (ggml_backend_cuda_buffer_context *)buffer->context; + return ctx->name.c_str(); +} + +static bool ggml_backend_buffer_is_cuda(ggml_backend_buffer_t buffer) { + return buffer->iface.get_name == ggml_backend_cuda_buffer_get_name; +} + static void ggml_backend_cuda_buffer_free_buffer(ggml_backend_buffer_t buffer) { - ggml_backend_buffer_context_cuda * ctx = (ggml_backend_buffer_context_cuda *)buffer->context; + ggml_backend_cuda_buffer_context * ctx = (ggml_backend_cuda_buffer_context *)buffer->context; CUDA_CHECK(cudaFree(ctx->dev_ptr)); delete ctx; } static void * ggml_backend_cuda_buffer_get_base(ggml_backend_buffer_t buffer) { - ggml_backend_buffer_context_cuda * ctx = (ggml_backend_buffer_context_cuda *)buffer->context; + ggml_backend_cuda_buffer_context * ctx = (ggml_backend_cuda_buffer_context *)buffer->context; return ctx->dev_ptr; } static void ggml_backend_cuda_buffer_init_tensor(ggml_backend_buffer_t buffer, ggml_tensor * tensor) { - ggml_backend_buffer_context_cuda * ctx = (ggml_backend_buffer_context_cuda *)buffer->context; + ggml_backend_cuda_buffer_context * ctx = (ggml_backend_cuda_buffer_context *)buffer->context; if (tensor->view_src != NULL && tensor->view_offs == 0) { assert(tensor->view_src->buffer->buft == buffer->buft); @@ -10397,14 +10168,12 @@ static void ggml_backend_cuda_buffer_init_tensor(ggml_backend_buffer_t buffer, g CUDA_CHECK(cudaMemsetAsync((char *)tensor->data + original_size, 0, padded_size - original_size, g_cudaStreams[ctx->device][0])); } } - - UNUSED(buffer); } static void ggml_backend_cuda_buffer_set_tensor(ggml_backend_buffer_t buffer, ggml_tensor * tensor, const void * data, size_t offset, size_t size) { GGML_ASSERT(tensor->backend == GGML_BACKEND_GPU); - ggml_backend_buffer_context_cuda * ctx = (ggml_backend_buffer_context_cuda *)buffer->context; + ggml_backend_cuda_buffer_context * ctx = (ggml_backend_cuda_buffer_context *)buffer->context; ggml_cuda_set_device(ctx->device); CUDA_CHECK(cudaDeviceSynchronize()); @@ -10415,49 +10184,82 @@ static void ggml_backend_cuda_buffer_set_tensor(ggml_backend_buffer_t buffer, gg static void ggml_backend_cuda_buffer_get_tensor(ggml_backend_buffer_t buffer, const ggml_tensor * tensor, void * data, size_t offset, size_t size) { GGML_ASSERT(tensor->backend == GGML_BACKEND_GPU); - ggml_backend_buffer_context_cuda * ctx = (ggml_backend_buffer_context_cuda *)buffer->context; + ggml_backend_cuda_buffer_context * ctx = (ggml_backend_cuda_buffer_context *)buffer->context; ggml_cuda_set_device(ctx->device); CUDA_CHECK(cudaDeviceSynchronize()); - CUDA_CHECK(cudaMemcpy(data, (const char *)tensor->data + offset, size, cudaMemcpyDeviceToHost)); + CUDA_CHECK(cudaDeviceSynchronize()); +} + +static bool ggml_backend_cuda_buffer_cpy_tensor(ggml_backend_buffer_t buffer, const ggml_tensor * src, ggml_tensor * dst) { + if (ggml_backend_buffer_is_cuda(src->buffer)) { + ggml_backend_cuda_buffer_context * src_ctx = (ggml_backend_cuda_buffer_context *)src->buffer->context; + ggml_backend_cuda_buffer_context * dst_ctx = (ggml_backend_cuda_buffer_context *)buffer->context; + + ggml_cuda_set_device(src_ctx->device); + CUDA_CHECK(cudaDeviceSynchronize()); + ggml_cuda_set_device(dst_ctx->device); + CUDA_CHECK(cudaDeviceSynchronize()); + CUDA_CHECK(cudaMemcpy((char *)dst->data, (const char *)src->data, ggml_nbytes(src), cudaMemcpyDeviceToDevice)); + CUDA_CHECK(cudaDeviceSynchronize()); + + return true; + } + return false; } static void ggml_backend_cuda_buffer_clear(ggml_backend_buffer_t buffer, uint8_t value) { - ggml_backend_buffer_context_cuda * ctx = (ggml_backend_buffer_context_cuda *)buffer->context; + ggml_backend_cuda_buffer_context * ctx = (ggml_backend_cuda_buffer_context *)buffer->context; ggml_cuda_set_device(ctx->device); CUDA_CHECK(cudaDeviceSynchronize()); - CUDA_CHECK(cudaMemset(ctx->dev_ptr, value, buffer->size)); + CUDA_CHECK(cudaDeviceSynchronize()); } -static struct ggml_backend_buffer_i cuda_backend_buffer_interface = { +static ggml_backend_buffer_i ggml_backend_cuda_buffer_interface = { + /* .get_name = */ ggml_backend_cuda_buffer_get_name, /* .free_buffer = */ ggml_backend_cuda_buffer_free_buffer, /* .get_base = */ ggml_backend_cuda_buffer_get_base, /* .init_tensor = */ ggml_backend_cuda_buffer_init_tensor, /* .set_tensor = */ ggml_backend_cuda_buffer_set_tensor, /* .get_tensor = */ ggml_backend_cuda_buffer_get_tensor, - /* .cpy_tensor_from = */ NULL, - /* .cpy_tensor_to = */ NULL, + /* .cpy_tensor = */ ggml_backend_cuda_buffer_cpy_tensor, /* .clear = */ ggml_backend_cuda_buffer_clear, + /* .reset = */ NULL, }; // cuda buffer type -static ggml_backend_buffer_t ggml_backend_cuda_buffer_type_alloc_buffer(ggml_backend_buffer_type_t buft, size_t size) { - int device = (int) (intptr_t) buft->context; +struct ggml_backend_cuda_buffer_type_context { + int device; + std::string name; +}; - ggml_cuda_set_device(device); +static const char * ggml_backend_cuda_buffer_type_name(ggml_backend_buffer_type_t buft) { + ggml_backend_cuda_buffer_type_context * ctx = (ggml_backend_cuda_buffer_type_context *)buft->context; + + return ctx->name.c_str(); +} + +static ggml_backend_buffer_t ggml_backend_cuda_buffer_type_alloc_buffer(ggml_backend_buffer_type_t buft, size_t size) { + ggml_backend_cuda_buffer_type_context * buft_ctx = (ggml_backend_cuda_buffer_type_context *)buft->context; + + ggml_cuda_set_device(buft_ctx->device); size = std::max(size, (size_t)1); // cudaMalloc returns null for size 0 void * dev_ptr; - CUDA_CHECK(cudaMalloc(&dev_ptr, size)); + cudaError_t err = cudaMalloc(&dev_ptr, size); + if (err != cudaSuccess) { + fprintf(stderr, "%s: allocating %.2f MiB on device %d: cudaMalloc failed: %s\n", __func__, size/1024.0/1024.0, buft_ctx->device, cudaGetErrorString(err)); + return nullptr; + } - ggml_backend_buffer_context_cuda * ctx = new ggml_backend_buffer_context_cuda(device, dev_ptr); + ggml_backend_cuda_buffer_context * ctx = new ggml_backend_cuda_buffer_context(buft_ctx->device, dev_ptr); - return ggml_backend_buffer_init(buft, cuda_backend_buffer_interface, ctx, size); + return ggml_backend_buffer_init(buft, ggml_backend_cuda_buffer_interface, ctx, size); } static size_t ggml_backend_cuda_buffer_type_get_alignment(ggml_backend_buffer_type_t buft) { @@ -10466,7 +10268,7 @@ static size_t ggml_backend_cuda_buffer_type_get_alignment(ggml_backend_buffer_ty UNUSED(buft); } -static size_t ggml_backend_cuda_buffer_type_get_alloc_size(ggml_backend_buffer_type_t buft, ggml_tensor * tensor) { +static size_t ggml_backend_cuda_buffer_type_get_alloc_size(ggml_backend_buffer_type_t buft, const ggml_tensor * tensor) { int64_t row_low = 0; int64_t row_high = ggml_nrows(tensor); int64_t nrows_split = row_high - row_low; @@ -10487,21 +10289,32 @@ static size_t ggml_backend_cuda_buffer_type_get_alloc_size(ggml_backend_buffer_t } static bool ggml_backend_cuda_buffer_type_supports_backend(ggml_backend_buffer_type_t buft, ggml_backend_t backend) { - return ggml_backend_is_cuda(backend); + if (!ggml_backend_is_cuda(backend)) { + return false; + } - UNUSED(buft); + ggml_backend_cuda_buffer_type_context * buft_ctx = (ggml_backend_cuda_buffer_type_context *)buft->context; + ggml_backend_cuda_context * cuda_ctx = (ggml_backend_cuda_context *)backend->context; + + return buft_ctx->device == cuda_ctx->device; } static ggml_backend_buffer_type_i ggml_backend_cuda_buffer_type_interface = { + /* .get_name = */ ggml_backend_cuda_buffer_type_name, /* .alloc_buffer = */ ggml_backend_cuda_buffer_type_alloc_buffer, /* .get_alignment = */ ggml_backend_cuda_buffer_type_get_alignment, /* .get_alloc_size = */ ggml_backend_cuda_buffer_type_get_alloc_size, /* .supports_backend = */ ggml_backend_cuda_buffer_type_supports_backend, - /* .is_host = */ nullptr, + /* .is_host = */ NULL, }; ggml_backend_buffer_type_t ggml_backend_cuda_buffer_type(int device) { - static struct ggml_backend_buffer_type ggml_backend_cuda_buffer_types[GGML_CUDA_MAX_DEVICES]; + // FIXME: this is not thread safe + if (device >= ggml_backend_cuda_get_device_count()) { + return nullptr; + } + + static ggml_backend_buffer_type ggml_backend_cuda_buffer_types[GGML_CUDA_MAX_DEVICES]; static bool ggml_backend_cuda_buffer_type_initialized = false; @@ -10509,7 +10322,7 @@ ggml_backend_buffer_type_t ggml_backend_cuda_buffer_type(int device) { for (int i = 0; i < GGML_CUDA_MAX_DEVICES; i++) { ggml_backend_cuda_buffer_types[i] = { /* .iface = */ ggml_backend_cuda_buffer_type_interface, - /* .context = */ (ggml_backend_buffer_type_context_t) (intptr_t) i, + /* .context = */ new ggml_backend_cuda_buffer_type_context{i, GGML_CUDA_NAME + std::to_string(i)}, }; } ggml_backend_cuda_buffer_type_initialized = true; @@ -10518,8 +10331,306 @@ ggml_backend_buffer_type_t ggml_backend_cuda_buffer_type(int device) { return &ggml_backend_cuda_buffer_types[device]; } +// cuda split buffer + +struct ggml_backend_cuda_split_buffer_context { + ~ggml_backend_cuda_split_buffer_context() { + for (ggml_tensor_extra_gpu * extra : tensor_extras) { + for (int id = 0; id < g_device_count; ++id) { + for (int64_t is = 0; is < MAX_STREAMS; ++is) { + if (extra->events[id][is] != nullptr) { + CUDA_CHECK(cudaEventDestroy(extra->events[id][is])); + } + } + if (extra->data_device[id] != nullptr) { + CUDA_CHECK(cudaFree(extra->data_device[id])); + } + } + delete extra; + } + } + + std::vector tensor_extras; +}; + +static const char * ggml_backend_cuda_split_buffer_get_name(ggml_backend_buffer_t buffer) { + return GGML_CUDA_NAME "_Split"; + + UNUSED(buffer); +} + +// unused at the moment +//static bool ggml_backend_buffer_is_cuda_split(ggml_backend_buffer_t buffer) { +// return buffer->iface.get_name == ggml_backend_cuda_split_buffer_get_name; +//} + +static void ggml_backend_cuda_split_buffer_free_buffer(ggml_backend_buffer_t buffer) { + ggml_backend_cuda_split_buffer_context * ctx = (ggml_backend_cuda_split_buffer_context *)buffer->context; + delete ctx; +} + +static void * ggml_backend_cuda_split_buffer_get_base(ggml_backend_buffer_t buffer) { + // the pointers are stored in the tensor extras, this is just a dummy address and never dereferenced + return (void *)0x1000; + + UNUSED(buffer); +} + +static void ggml_backend_cuda_split_buffer_init_tensor(ggml_backend_buffer_t buffer, ggml_tensor * tensor) { + GGML_ASSERT(tensor->view_src == nullptr); // views of split tensors are not supported + + ggml_backend_cuda_split_buffer_context * ctx = (ggml_backend_cuda_split_buffer_context *)buffer->context; + ggml_backend_cuda_split_buffer_type_context * buft_ctx = (ggml_backend_cuda_split_buffer_type_context *)buffer->buft->context; + + const int64_t ne0 = tensor->ne[0]; + + ggml_tensor_extra_gpu * extra = new ggml_tensor_extra_gpu{}; + + ctx->tensor_extras.push_back(extra); + + for (int id = 0; id < g_device_count; ++id) { + int64_t row_low, row_high; + get_row_split(&row_low, &row_high, tensor, buft_ctx->tensor_split, id); + + int64_t nrows_split = row_high - row_low; + if (nrows_split == 0) { + continue; + } + + size_t size = ggml_nbytes_split(tensor, nrows_split); + const size_t original_size = size; + + // pad last row to a multiple of 512 elements to avoid out-of-bounds memory accesses + if (ne0 % MATRIX_ROW_PADDING != 0) { + size += ggml_row_size(tensor->type, MATRIX_ROW_PADDING - ne0 % MATRIX_ROW_PADDING); + } + + // FIXME: do not crash if cudaMalloc fails + // currently, init_tensor cannot fail, it needs to be fixed in ggml-backend first + ggml_cuda_set_device(id); + char * buf; + CUDA_CHECK(cudaMalloc(&buf, size)); + + // set padding to 0 to avoid possible NaN values + if (size > original_size) { + CUDA_CHECK(cudaMemset(buf + original_size, 0, size - original_size)); + } + + extra->data_device[id] = buf; + + for (int64_t is = 0; is < MAX_STREAMS; ++is) { + CUDA_CHECK(cudaEventCreateWithFlags(&extra->events[id][is], cudaEventDisableTiming)); + } + } + tensor->backend = GGML_BACKEND_GPU_SPLIT; + tensor->extra = extra; +} + +static void ggml_backend_cuda_split_buffer_set_tensor(ggml_backend_buffer_t buffer, ggml_tensor * tensor, const void * data, size_t offset, size_t size) { + // split tensors must always be set in their entirety at once + GGML_ASSERT(offset == 0); + GGML_ASSERT(size == ggml_nbytes(tensor)); + + ggml_backend_cuda_split_buffer_type_context * buft_ctx = (ggml_backend_cuda_split_buffer_type_context *)buffer->buft->context; + + const int64_t ne0 = tensor->ne[0]; + const size_t nb1 = tensor->nb[1]; + ggml_tensor_extra_gpu * extra = (ggml_tensor_extra_gpu *)tensor->extra; + + for (int id = 0; id < g_device_count; ++id) { + int64_t row_low, row_high; + get_row_split(&row_low, &row_high, tensor, buft_ctx->tensor_split, id); + + int64_t nrows_split = row_high - row_low; + if (nrows_split == 0) { + continue; + } + + const size_t offset_split = row_low*nb1; + size_t size = ggml_nbytes_split(tensor, nrows_split); + const size_t original_size = size; + + // pad last row to a multiple of 512 elements to avoid out-of-bounds memory accesses + if (ne0 % MATRIX_ROW_PADDING != 0) { + size += ggml_row_size(tensor->type, MATRIX_ROW_PADDING - ne0 % MATRIX_ROW_PADDING); + } + + const char * buf_host = (const char *)data + offset_split; + CUDA_CHECK(cudaMemcpy(extra->data_device[id], buf_host, original_size, cudaMemcpyHostToDevice)); + } +} + +static void ggml_backend_cuda_split_buffer_get_tensor(ggml_backend_buffer_t buffer, const ggml_tensor * tensor, void * data, size_t offset, size_t size) { + // split tensors must always be set in their entirety at once + GGML_ASSERT(offset == 0); + GGML_ASSERT(size == ggml_nbytes(tensor)); + + ggml_backend_cuda_split_buffer_type_context * buft_ctx = (ggml_backend_cuda_split_buffer_type_context *)buffer->buft->context; + + const int64_t ne0 = tensor->ne[0]; + const size_t nb1 = tensor->nb[1]; + ggml_tensor_extra_gpu * extra = (ggml_tensor_extra_gpu *)tensor->extra; + + for (int id = 0; id < g_device_count; ++id) { + int64_t row_low, row_high; + get_row_split(&row_low, &row_high, tensor, buft_ctx->tensor_split, id); + + int64_t nrows_split = row_high - row_low; + if (nrows_split == 0) { + continue; + } + + const size_t offset_split = row_low*nb1; + size_t size = ggml_nbytes_split(tensor, nrows_split); + const size_t original_size = size; + + // pad last row to a multiple of 512 elements to avoid out-of-bounds memory accesses + if (ne0 % MATRIX_ROW_PADDING != 0) { + size += ggml_row_size(tensor->type, MATRIX_ROW_PADDING - ne0 % MATRIX_ROW_PADDING); + } + + char * buf_host = (char *)data + offset_split; + CUDA_CHECK(cudaMemcpy(buf_host, extra->data_device[id], original_size, cudaMemcpyDeviceToHost)); + } +} + +static void ggml_backend_cuda_split_buffer_clear(ggml_backend_buffer_t buffer, uint8_t value) { + UNUSED(buffer); + UNUSED(value); +} + +static struct ggml_backend_buffer_i ggml_backend_cuda_split_buffer_interface = { + /* .get_name = */ ggml_backend_cuda_split_buffer_get_name, + /* .free_buffer = */ ggml_backend_cuda_split_buffer_free_buffer, + /* .get_base = */ ggml_backend_cuda_split_buffer_get_base, + /* .init_tensor = */ ggml_backend_cuda_split_buffer_init_tensor, + /* .set_tensor = */ ggml_backend_cuda_split_buffer_set_tensor, + /* .get_tensor = */ ggml_backend_cuda_split_buffer_get_tensor, + /* .cpy_tensor = */ NULL, + /* .clear = */ ggml_backend_cuda_split_buffer_clear, + /* .reset = */ NULL, +}; + +// cuda split buffer type + +static const char * ggml_backend_cuda_split_buffer_type_name(ggml_backend_buffer_type_t buft) { + return GGML_CUDA_NAME "_Split"; + + UNUSED(buft); +} + +static ggml_backend_buffer_t ggml_backend_cuda_split_buffer_type_alloc_buffer(ggml_backend_buffer_type_t buft, size_t size) { + // since we don't know the exact split after rounding, we cannot allocate the device buffers at this point + // instead, we allocate them for each tensor separately in init_tensor + // however, the size still represents the maximum cumulative size of all the device buffers after the tensors are allocated, + // as returned by get_alloc_size. this limit is enforced during tensor allocation by ggml-alloc, so it must be correct. + ggml_backend_cuda_split_buffer_context * ctx = new ggml_backend_cuda_split_buffer_context(); + + return ggml_backend_buffer_init(buft, ggml_backend_cuda_split_buffer_interface, ctx, size); +} + +static size_t ggml_backend_cuda_split_buffer_type_get_alignment(ggml_backend_buffer_type_t buft) { + return 128; + + UNUSED(buft); +} + +static size_t ggml_backend_cuda_split_buffer_type_get_alloc_size(ggml_backend_buffer_type_t buft, const ggml_tensor * tensor) { + ggml_backend_cuda_split_buffer_type_context * ctx = (ggml_backend_cuda_split_buffer_type_context *)buft->context; + + size_t total_size = 0; + + const int64_t ne0 = tensor->ne[0]; + + for (int id = 0; id < g_device_count; ++id) { + int64_t row_low, row_high; + get_row_split(&row_low, &row_high, tensor, ctx->tensor_split, id); + + int64_t nrows_split = row_high - row_low; + if (nrows_split == 0) { + continue; + } + + total_size += ggml_nbytes_split(tensor, nrows_split); + + // pad last row to a multiple of 512 elements to avoid out-of-bounds memory accesses + if (ne0 % MATRIX_ROW_PADDING != 0) { + total_size += ggml_row_size(tensor->type, MATRIX_ROW_PADDING - ne0 % MATRIX_ROW_PADDING); + } + } + + return total_size; +} + +static bool ggml_backend_cuda_split_buffer_type_supports_backend(ggml_backend_buffer_type_t buft, ggml_backend_t backend) { + return ggml_backend_is_cuda(backend); + + UNUSED(buft); +} + +static bool ggml_backend_cuda_split_buffer_type_is_host(ggml_backend_buffer_type_t buft) { + return false; + + UNUSED(buft); +} + +static ggml_backend_buffer_type_i ggml_backend_cuda_split_buffer_type_interface = { + /* .get_name = */ ggml_backend_cuda_split_buffer_type_name, + /* .alloc_buffer = */ ggml_backend_cuda_split_buffer_type_alloc_buffer, + /* .get_alignment = */ ggml_backend_cuda_split_buffer_type_get_alignment, + /* .get_alloc_size = */ ggml_backend_cuda_split_buffer_type_get_alloc_size, + /* .supports_backend = */ ggml_backend_cuda_split_buffer_type_supports_backend, + /* .is_host = */ ggml_backend_cuda_split_buffer_type_is_host, +}; + +ggml_backend_buffer_type_t ggml_backend_cuda_split_buffer_type(const float * tensor_split) { + // FIXME: this is not thread safe + static std::map, struct ggml_backend_buffer_type> buft_map; + + std::array tensor_split_arr = {}; + + bool all_zero = tensor_split == nullptr || std::all_of(tensor_split, tensor_split + GGML_CUDA_MAX_DEVICES, [](float x) { return x == 0.0f; }); + if (all_zero) { + tensor_split_arr = g_default_tensor_split; + } else { + float split_sum = 0.0f; + for (int i = 0; i < g_device_count; ++i) { + tensor_split_arr[i] = split_sum; + split_sum += tensor_split[i]; + } + for (int i = 0; i < g_device_count; ++i) { + tensor_split_arr[i] /= split_sum; + } + } + + auto it = buft_map.find(tensor_split_arr); + if (it != buft_map.end()) { + return &it->second; + } + + struct ggml_backend_buffer_type buft { + /* .iface = */ ggml_backend_cuda_split_buffer_type_interface, + /* .context = */ new ggml_backend_cuda_split_buffer_type_context{tensor_split_arr}, + }; + + auto result = buft_map.emplace(tensor_split_arr, buft); + return &result.first->second; +} + // host buffer type +static const char * ggml_backend_cuda_host_buffer_type_name(ggml_backend_buffer_type_t buft) { + return GGML_CUDA_NAME "_Host"; + + UNUSED(buft); +} + +static const char * ggml_backend_cuda_host_buffer_name(ggml_backend_buffer_t buffer) { + return GGML_CUDA_NAME "_Host"; + + UNUSED(buffer); +} + static void ggml_backend_cuda_host_buffer_free_buffer(ggml_backend_buffer_t buffer) { ggml_cuda_host_free(buffer->context); } @@ -10532,9 +10643,9 @@ static ggml_backend_buffer_t ggml_backend_cuda_host_buffer_type_alloc_buffer(ggm return ggml_backend_buft_alloc_buffer(ggml_backend_cpu_buffer_type(), size); } - // FIXME: this is a hack to avoid having to implement a new buffer type ggml_backend_buffer_t buffer = ggml_backend_cpu_buffer_from_ptr(ptr, size); buffer->buft = buft; + buffer->iface.get_name = ggml_backend_cuda_host_buffer_name; buffer->iface.free_buffer = ggml_backend_cuda_host_buffer_free_buffer; return buffer; @@ -10543,6 +10654,7 @@ static ggml_backend_buffer_t ggml_backend_cuda_host_buffer_type_alloc_buffer(ggm ggml_backend_buffer_type_t ggml_backend_cuda_host_buffer_type() { static struct ggml_backend_buffer_type ggml_backend_cuda_buffer_type_host = { /* .iface = */ { + /* .get_name = */ ggml_backend_cuda_host_buffer_type_name, /* .alloc_buffer = */ ggml_backend_cuda_host_buffer_type_alloc_buffer, /* .get_alignment = */ ggml_backend_cpu_buffer_type()->iface.get_alignment, /* .get_alloc_size = */ ggml_backend_cpu_buffer_type()->iface.get_alloc_size, @@ -10557,31 +10669,27 @@ ggml_backend_buffer_type_t ggml_backend_cuda_host_buffer_type() { // backend -struct ggml_backend_context_cuda { - int device; -}; - static const char * ggml_backend_cuda_name(ggml_backend_t backend) { - return GGML_CUDA_NAME; + ggml_backend_cuda_context * cuda_ctx = (ggml_backend_cuda_context *)backend->context; - UNUSED(backend); + return cuda_ctx->name.c_str(); } static void ggml_backend_cuda_free(ggml_backend_t backend) { - ggml_backend_context_cuda * cuda_ctx = (ggml_backend_context_cuda *)backend->context; + ggml_backend_cuda_context * cuda_ctx = (ggml_backend_cuda_context *)backend->context; delete cuda_ctx; delete backend; } static ggml_backend_buffer_type_t ggml_backend_cuda_get_default_buffer_type(ggml_backend_t backend) { - ggml_backend_context_cuda * cuda_ctx = (ggml_backend_context_cuda *)backend->context; + ggml_backend_cuda_context * cuda_ctx = (ggml_backend_cuda_context *)backend->context; return ggml_backend_cuda_buffer_type(cuda_ctx->device); } static void ggml_backend_cuda_set_tensor_async(ggml_backend_t backend, ggml_tensor * tensor, const void * data, size_t offset, size_t size) { - ggml_backend_context_cuda * cuda_ctx = (ggml_backend_context_cuda *)backend->context; + ggml_backend_cuda_context * cuda_ctx = (ggml_backend_cuda_context *)backend->context; GGML_ASSERT(tensor->buffer->buft == ggml_backend_cuda_buffer_type(cuda_ctx->device) && "unsupported buffer type"); GGML_ASSERT(tensor->backend == GGML_BACKEND_GPU); @@ -10590,7 +10698,7 @@ static void ggml_backend_cuda_set_tensor_async(ggml_backend_t backend, ggml_tens } static void ggml_backend_cuda_get_tensor_async(ggml_backend_t backend, const ggml_tensor * tensor, void * data, size_t offset, size_t size) { - ggml_backend_context_cuda * cuda_ctx = (ggml_backend_context_cuda *)backend->context; + ggml_backend_cuda_context * cuda_ctx = (ggml_backend_cuda_context *)backend->context; GGML_ASSERT(tensor->buffer->buft == ggml_backend_cuda_buffer_type(cuda_ctx->device) && "unsupported buffer type"); GGML_ASSERT(tensor->backend == GGML_BACKEND_GPU); @@ -10598,39 +10706,27 @@ static void ggml_backend_cuda_get_tensor_async(ggml_backend_t backend, const ggm CUDA_CHECK(cudaMemcpyAsync(data, (const char *)tensor->data + offset, size, cudaMemcpyDeviceToHost, g_cudaStreams[cuda_ctx->device][0])); } +static bool ggml_backend_cuda_cpy_tensor_async(ggml_backend_t backend, const ggml_tensor * src, ggml_tensor * dst) { + ggml_backend_cuda_context * cuda_ctx = (ggml_backend_cuda_context *)backend->context; + + if (dst->buffer->buft == ggml_backend_cuda_buffer_type(cuda_ctx->device) && ggml_backend_buffer_is_cuda(src->buffer)) { + CUDA_CHECK(cudaMemcpyAsync(dst->data, src->data, ggml_nbytes(dst), cudaMemcpyDeviceToDevice, g_cudaStreams[cuda_ctx->device][0])); + return true; + } + + return false; +} + static void ggml_backend_cuda_synchronize(ggml_backend_t backend) { - ggml_backend_context_cuda * cuda_ctx = (ggml_backend_context_cuda *)backend->context; + ggml_backend_cuda_context * cuda_ctx = (ggml_backend_cuda_context *)backend->context; CUDA_CHECK(cudaStreamSynchronize(g_cudaStreams[cuda_ctx->device][0])); UNUSED(backend); } -static ggml_backend_graph_plan_t ggml_backend_cuda_graph_plan_create(ggml_backend_t backend, ggml_cgraph * cgraph) { - GGML_ASSERT(!"not implemented"); - - return nullptr; - - UNUSED(backend); - UNUSED(cgraph); -} - -static void ggml_backend_cuda_graph_plan_free(ggml_backend_t backend, ggml_backend_graph_plan_t plan) { - GGML_ASSERT(!"not implemented"); - - UNUSED(backend); - UNUSED(plan); -} - -static void ggml_backend_cuda_graph_plan_compute(ggml_backend_t backend, ggml_backend_graph_plan_t plan) { - GGML_ASSERT(!"not implemented"); - - UNUSED(backend); - UNUSED(plan); -} - static bool ggml_backend_cuda_graph_compute(ggml_backend_t backend, ggml_cgraph * cgraph) { - ggml_backend_context_cuda * cuda_ctx = (ggml_backend_context_cuda *)backend->context; + ggml_backend_cuda_context * cuda_ctx = (ggml_backend_cuda_context *)backend->context; ggml_cuda_set_main_device(cuda_ctx->device); @@ -10640,53 +10736,31 @@ static bool ggml_backend_cuda_graph_compute(ggml_backend_t backend, ggml_cgraph for (int i = 0; i < cgraph->n_nodes; i++) { ggml_tensor * node = cgraph->nodes[i]; - if (node->op == GGML_OP_RESHAPE || node->op == GGML_OP_TRANSPOSE || node->op == GGML_OP_VIEW || node->op == GGML_OP_PERMUTE) + if (node->op == GGML_OP_RESHAPE || node->op == GGML_OP_TRANSPOSE || node->op == GGML_OP_VIEW || node->op == GGML_OP_PERMUTE || node->op == GGML_OP_NONE) { continue; + } - assert(node->backend == GGML_BACKEND_GPU); +#ifndef NDEBUG + assert(node->backend == GGML_BACKEND_GPU || node->backend == GGML_BACKEND_GPU_SPLIT); assert(node->buffer->buft == ggml_backend_cuda_buffer_type(cuda_ctx->device)); assert(node->extra != nullptr); for (int j = 0; j < GGML_MAX_SRC; j++) { if (node->src[j] != nullptr) { - assert(node->src[j]->backend == GGML_BACKEND_GPU); + assert(node->src[j]->backend == GGML_BACKEND_GPU || node->src[j]->backend == GGML_BACKEND_GPU_SPLIT); assert(node->src[j]->buffer->buft == ggml_backend_cuda_buffer_type(cuda_ctx->device)); assert(node->src[j]->extra != nullptr); } } +#endif bool ok = ggml_cuda_compute_forward(¶ms, node); if (!ok) { fprintf(stderr, "%s: error: op not supported %s (%s)\n", __func__, node->name, ggml_op_name(node->op)); } GGML_ASSERT(ok); - -#if 0 - if (node->type == GGML_TYPE_F32) { - cudaDeviceSynchronize(); - std::vector tmp(ggml_nelements(node), 0.0f); - cudaMemcpy(tmp.data(), node->data, ggml_nelements(node)*sizeof(float), cudaMemcpyDeviceToHost); - printf("\n%s (%s) (%s %s) (%s %s): ", node->name, ggml_op_name(node->op), - ggml_type_name(node->src[0]->type), - node->src[1] ? ggml_type_name(node->src[1]->type) : "none", - node->src[0]->name, - node->src[1] ? node->src[1]->name : "none"); - double sum = 0.0; - double sq_sum = 0.0; - for (int i = 0; i < ggml_nelements(node); i++) { - printf("%f ", tmp[i]); - sum += tmp[i]; - sq_sum += tmp[i]*tmp[i]; - } - printf("\n"); - printf("sum: %f, ", sum); - printf("sq_sum: %f\n", sq_sum); - } -#endif } - UNUSED(backend); - return true; } @@ -10801,18 +10875,17 @@ static bool ggml_backend_cuda_supports_op(ggml_backend_t backend, const ggml_ten UNUSED(backend); } -static ggml_backend_i cuda_backend_i = { +static ggml_backend_i ggml_backend_cuda_interface = { /* .get_name = */ ggml_backend_cuda_name, /* .free = */ ggml_backend_cuda_free, /* .get_default_buffer_type = */ ggml_backend_cuda_get_default_buffer_type, /* .set_tensor_async = */ ggml_backend_cuda_set_tensor_async, /* .get_tensor_async = */ ggml_backend_cuda_get_tensor_async, - /* .cpy_tensor_from_async = */ NULL, - /* .cpy_tensor_to_async = */ NULL, + /* .cpy_tensor_async = */ ggml_backend_cuda_cpy_tensor_async, /* .synchronize = */ ggml_backend_cuda_synchronize, - /* .graph_plan_create = */ ggml_backend_cuda_graph_plan_create, - /* .graph_plan_free = */ ggml_backend_cuda_graph_plan_free, - /* .graph_plan_compute = */ ggml_backend_cuda_graph_plan_compute, + /* .graph_plan_create = */ NULL, + /* .graph_plan_free = */ NULL, + /* .graph_plan_compute = */ NULL, /* .graph_compute = */ ggml_backend_cuda_graph_compute, /* .supports_op = */ ggml_backend_cuda_supports_op, }; @@ -10828,12 +10901,13 @@ ggml_backend_t ggml_backend_cuda_init(int device) { // not strictly necessary, but it may reduce the overhead of the first graph_compute ggml_cuda_set_main_device(device); - ggml_backend_context_cuda * ctx = new ggml_backend_context_cuda { - /* .device = */ device + ggml_backend_cuda_context * ctx = new ggml_backend_cuda_context { + /* .device = */ device, + /* .name = */ GGML_CUDA_NAME + std::to_string(device), }; ggml_backend_t cuda_backend = new ggml_backend { - /* .interface = */ cuda_backend_i, + /* .interface = */ ggml_backend_cuda_interface, /* .context = */ ctx }; @@ -10841,9 +10915,24 @@ ggml_backend_t ggml_backend_cuda_init(int device) { } bool ggml_backend_is_cuda(ggml_backend_t backend) { - return backend->iface.get_name == ggml_backend_cuda_name; + return backend && backend->iface.get_name == ggml_backend_cuda_name; } +int ggml_backend_cuda_get_device_count() { + return ggml_cuda_get_device_count(); +} + +void ggml_backend_cuda_get_device_description(int device, char * description, size_t description_size) { + ggml_cuda_get_device_description(device, description, description_size); +} + +void ggml_backend_cuda_get_device_memory(int device, size_t * free, size_t * total) { + ggml_cuda_set_device(device); + + CUDA_CHECK(cudaMemGetInfo(free, total)); +} + +// backend registry static ggml_backend_t ggml_backend_reg_cuda_init(const char * params, void * user_data) { ggml_backend_t cuda_backend = ggml_backend_cuda_init((int) (intptr_t) user_data); return cuda_backend; diff --git a/ggml-cuda.h b/ggml-cuda.h index cdb0c0c41..d19cbf3fd 100644 --- a/ggml-cuda.h +++ b/ggml-cuda.h @@ -27,22 +27,6 @@ GGML_API void * ggml_cuda_host_malloc(size_t size); GGML_API void ggml_cuda_host_free(void * ptr); GGML_API bool ggml_cuda_can_mul_mat(const struct ggml_tensor * src0, const struct ggml_tensor * src1, struct ggml_tensor * dst); -GGML_API void ggml_cuda_set_tensor_split(const float * tensor_split); -GGML_API void ggml_cuda_transform_tensor(void * data, struct ggml_tensor * tensor); -GGML_API void ggml_cuda_free_data(struct ggml_tensor * tensor); - -GGML_API void ggml_cuda_assign_buffers(struct ggml_tensor * tensor); -GGML_API void ggml_cuda_assign_buffers_no_scratch(struct ggml_tensor * tensor); -GGML_API void ggml_cuda_assign_buffers_force_inplace(struct ggml_tensor * tensor); - -GGML_API void ggml_cuda_assign_buffers_no_alloc(struct ggml_tensor * tensor); -GGML_API void ggml_cuda_assign_scratch_offset(struct ggml_tensor * tensor, size_t offset); -GGML_API void ggml_cuda_copy_to_device(struct ggml_tensor * tensor); - -GGML_API void ggml_cuda_set_main_device(int main_device); -GGML_API void ggml_cuda_set_mul_mat_q(bool mul_mat_q); -GGML_API void ggml_cuda_set_scratch_size(size_t scratch_size); -GGML_API void ggml_cuda_free_scratch(void); GGML_API bool ggml_cuda_compute_forward(struct ggml_compute_params * params, struct ggml_tensor * tensor); GGML_API int ggml_cuda_get_device_count(void); @@ -52,13 +36,17 @@ GGML_API void ggml_cuda_get_device_description(int device, char * description, GGML_API ggml_backend_t ggml_backend_cuda_init(int device); GGML_API bool ggml_backend_is_cuda(ggml_backend_t backend); -GGML_API int ggml_backend_cuda_get_device(ggml_backend_t backend); GGML_API ggml_backend_buffer_type_t ggml_backend_cuda_buffer_type(int device); - -// pinned host buffer for use with CPU backend for faster copies between CPU and GPU +// split tensor buffer that splits matrices by rows across multiple devices +GGML_API ggml_backend_buffer_type_t ggml_backend_cuda_split_buffer_type(const float * tensor_split); +// pinned host buffer for use with the CPU backend for faster copies between CPU and GPU GGML_API ggml_backend_buffer_type_t ggml_backend_cuda_host_buffer_type(void); +GGML_API int ggml_backend_cuda_get_device_count(void); +GGML_API void ggml_backend_cuda_get_device_description(int device, char * description, size_t description_size); +GGML_API void ggml_backend_cuda_get_device_memory(int device, size_t * free, size_t * total); + #ifdef __cplusplus } #endif diff --git a/ggml-impl.h b/ggml-impl.h index 2faced080..2c58075ac 100644 --- a/ggml-impl.h +++ b/ggml-impl.h @@ -228,6 +228,8 @@ inline static float ggml_lookup_fp16_to_fp32(ggml_fp16_t f) { #define GGML_HASHTABLE_FULL ((size_t)-1) #define GGML_HASHTABLE_ALREADY_EXISTS ((size_t)-2) +struct ggml_hash_set ggml_hash_set_new(size_t size); + bool ggml_hash_contains (const struct ggml_hash_set hash_set, struct ggml_tensor * key); // returns GGML_HASHTABLE_FULL if table is full, otherwise the current index of the key or where it should be inserted diff --git a/ggml-metal.m b/ggml-metal.m index 6e5594432..c03624073 100644 --- a/ggml-metal.m +++ b/ggml-metal.m @@ -2520,10 +2520,10 @@ static void ggml_backend_metal_free_device(void) { } } -static void * ggml_backend_metal_buffer_get_base(ggml_backend_buffer_t buffer) { - struct ggml_backend_metal_buffer_context * ctx = (struct ggml_backend_metal_buffer_context *)buffer->context; +static const char * ggml_backend_metal_buffer_get_name(ggml_backend_buffer_t buffer) { + return "Metal"; - return ctx->all_data; + UNUSED(buffer); } static void ggml_backend_metal_buffer_free_buffer(ggml_backend_buffer_t buffer) { @@ -2541,6 +2541,12 @@ static void ggml_backend_metal_buffer_free_buffer(ggml_backend_buffer_t buffer) free(ctx); } +static void * ggml_backend_metal_buffer_get_base(ggml_backend_buffer_t buffer) { + struct ggml_backend_metal_buffer_context * ctx = (struct ggml_backend_metal_buffer_context *)buffer->context; + + return ctx->all_data; +} + static void ggml_backend_metal_buffer_set_tensor(ggml_backend_buffer_t buffer, struct ggml_tensor * tensor, const void * data, size_t offset, size_t size) { memcpy((char *)tensor->data + offset, data, size); @@ -2553,14 +2559,12 @@ static void ggml_backend_metal_buffer_get_tensor(ggml_backend_buffer_t buffer, c UNUSED(buffer); } -static void ggml_backend_metal_buffer_cpy_tensor_from(ggml_backend_buffer_t buffer, struct ggml_tensor * src, struct ggml_tensor * dst) { - ggml_backend_tensor_get(src, dst->data, 0, ggml_nbytes(src)); - - UNUSED(buffer); -} - -static void ggml_backend_metal_buffer_cpy_tensor_to(ggml_backend_buffer_t buffer, struct ggml_tensor * src, struct ggml_tensor * dst) { - ggml_backend_tensor_set(dst, src->data, 0, ggml_nbytes(src)); +static bool ggml_backend_metal_buffer_cpy_tensor(ggml_backend_buffer_t buffer, const struct ggml_tensor * src, struct ggml_tensor * dst) { + if (ggml_backend_buffer_is_host(src->buffer)) { + memcpy(dst->data, src->data, ggml_nbytes(src)); + return true; + } + return false; UNUSED(buffer); } @@ -2572,18 +2576,25 @@ static void ggml_backend_metal_buffer_clear(ggml_backend_buffer_t buffer, uint8_ } static struct ggml_backend_buffer_i ggml_backend_metal_buffer_i = { + /* .get_name = */ ggml_backend_metal_buffer_get_name, /* .free_buffer = */ ggml_backend_metal_buffer_free_buffer, /* .get_base = */ ggml_backend_metal_buffer_get_base, /* .init_tensor = */ NULL, /* .set_tensor = */ ggml_backend_metal_buffer_set_tensor, /* .get_tensor = */ ggml_backend_metal_buffer_get_tensor, - /* .cpy_tensor_from = */ ggml_backend_metal_buffer_cpy_tensor_from, - /* .cpy_tensor_to = */ ggml_backend_metal_buffer_cpy_tensor_to, + /* .cpy_tensor = */ ggml_backend_metal_buffer_cpy_tensor, /* .clear = */ ggml_backend_metal_buffer_clear, + /* .reset = */ NULL, }; // default buffer type +static const char * ggml_backend_metal_buffer_type_get_name(ggml_backend_buffer_type_t buft) { + return "Metal"; + + UNUSED(buft); +} + static ggml_backend_buffer_t ggml_backend_metal_buffer_type_alloc_buffer(ggml_backend_buffer_type_t buft, size_t size) { struct ggml_backend_metal_buffer_context * ctx = malloc(sizeof(struct ggml_backend_metal_buffer_context)); @@ -2656,6 +2667,7 @@ static bool ggml_backend_metal_buffer_type_is_host(ggml_backend_buffer_type_t bu ggml_backend_buffer_type_t ggml_backend_metal_buffer_type(void) { static struct ggml_backend_buffer_type ggml_backend_buffer_type_metal = { /* .iface = */ { + /* .get_name = */ ggml_backend_metal_buffer_type_get_name, /* .alloc_buffer = */ ggml_backend_metal_buffer_type_alloc_buffer, /* .get_alignment = */ ggml_backend_metal_buffer_type_get_alignment, /* .get_alloc_size = */ NULL, // defaults to ggml_nbytes @@ -2679,6 +2691,14 @@ ggml_backend_buffer_t ggml_backend_metal_buffer_from_ptr(void * data, size_t siz ctx->n_buffers = 0; const size_t size_page = sysconf(_SC_PAGESIZE); + + // page-align the data ptr + { + const uintptr_t offs = (uintptr_t) data % size_page; + data = (void *) ((char *) data - offs); + size += offs; + } + size_t size_aligned = size; if ((size_aligned % size_page) != 0) { size_aligned += (size_page - (size_aligned % size_page)); @@ -2779,14 +2799,13 @@ static bool ggml_backend_metal_supports_op(ggml_backend_t backend, const struct UNUSED(backend); } -static struct ggml_backend_i metal_backend_i = { +static struct ggml_backend_i ggml_backend_metal_i = { /* .get_name = */ ggml_backend_metal_name, /* .free = */ ggml_backend_metal_free, /* .get_default_buffer_type = */ ggml_backend_metal_get_default_buffer_type, /* .set_tensor_async = */ NULL, /* .get_tensor_async = */ NULL, - /* .cpy_tensor_from_async = */ NULL, - /* .cpy_tensor_to_async = */ NULL, + /* .cpy_tensor_async = */ NULL, /* .synchronize = */ NULL, /* .graph_plan_create = */ NULL, /* .graph_plan_free = */ NULL, @@ -2805,7 +2824,7 @@ ggml_backend_t ggml_backend_metal_init(void) { ggml_backend_t metal_backend = malloc(sizeof(struct ggml_backend)); *metal_backend = (struct ggml_backend) { - /* .interface = */ metal_backend_i, + /* .interface = */ ggml_backend_metal_i, /* .context = */ ctx, }; @@ -2813,7 +2832,7 @@ ggml_backend_t ggml_backend_metal_init(void) { } bool ggml_backend_is_metal(ggml_backend_t backend) { - return backend->iface.get_name == ggml_backend_metal_name; + return backend && backend->iface.get_name == ggml_backend_metal_name; } void ggml_backend_metal_set_n_cb(ggml_backend_t backend, int n_cb) { diff --git a/ggml-opencl.cpp b/ggml-opencl.cpp index 496f9cdca..2bb93638f 100644 --- a/ggml-opencl.cpp +++ b/ggml-opencl.cpp @@ -1,5 +1,6 @@ #include "ggml.h" #include "ggml-opencl.h" +#include "ggml-backend-impl.h" #include #include @@ -10,7 +11,7 @@ #include #include -#define CL_TARGET_OPENCL_VERSION 110 +#define CL_TARGET_OPENCL_VERSION 120 #include #if defined(_MSC_VER) @@ -929,6 +930,12 @@ static cl_program build_program_from_source(cl_context ctx, cl_device_id dev, co } void ggml_cl_init(void) { + static bool initialized = false; + if (initialized) { + return; + } + initialized = true; + cl_int err; struct cl_device; @@ -1483,8 +1490,8 @@ static void ggml_cl_mul_mat_f32(const ggml_tensor * src0, const ggml_tensor * sr } else { d_X = ggml_cl_pool_malloc(sizeof(float) * x_ne, &x_size); } - cl_mem d_Y = ggml_cl_pool_malloc(sizeof(float) * y_ne, &y_size); - cl_mem d_D = ggml_cl_pool_malloc(sizeof(float) * d_ne, &d_size); + cl_mem d_Y = src1->backend == GGML_BACKEND_GPU ? (cl_mem) src1->extra : ggml_cl_pool_malloc(sizeof(float) * y_ne, &y_size); + cl_mem d_D = dst->backend == GGML_BACKEND_GPU ? (cl_mem) dst->extra : ggml_cl_pool_malloc(sizeof(float) * d_ne, &d_size); size_t x_offset = 0; @@ -1501,7 +1508,9 @@ static void ggml_cl_mul_mat_f32(const ggml_tensor * src0, const ggml_tensor * sr for (int64_t i12 = i02 * r2, e12 = i12 + r2; i12 < e12; i12++) { // copy src1 to device - CL_CHECK(ggml_cl_h2d_tensor_2d(queue, d_Y, 0, src1, i13, i12, NULL)); + if (src1->backend == GGML_BACKEND_CPU) { + CL_CHECK(ggml_cl_h2d_tensor_2d(queue, d_Y, 0, src1, i13, i12, NULL)); + } CL_CHECK(clFinish(queue)); @@ -1522,8 +1531,10 @@ static void ggml_cl_mul_mat_f32(const ggml_tensor * src0, const ggml_tensor * sr } // copy dst to host - float * d = (float *) ((char *) dst->data + i12*nb2 + i13*nb3); - CL_CHECK(clEnqueueReadBuffer(queue, d_D, true, 0, sizeof(float) * d_ne, d, 1, &ev_sgemm, NULL)); + if (dst->backend == GGML_BACKEND_CPU) { + float * d = (float *) ((char *) dst->data + i12*nb2 + i13*nb3); + CL_CHECK(clEnqueueReadBuffer(queue, d_D, true, 0, sizeof(float) * d_ne, d, 1, &ev_sgemm, NULL)); + } } } } @@ -1532,8 +1543,12 @@ static void ggml_cl_mul_mat_f32(const ggml_tensor * src0, const ggml_tensor * sr if (src0->backend != GGML_BACKEND_GPU) { ggml_cl_pool_free(d_X, x_size); } - ggml_cl_pool_free(d_Y, y_size); - ggml_cl_pool_free(d_D, d_size); + if (src1->backend != GGML_BACKEND_GPU) { + ggml_cl_pool_free(d_Y, y_size); + } + if (dst->backend != GGML_BACKEND_GPU) { + ggml_cl_pool_free(d_D, d_size); + } } static void ggml_cl_mul_mat_f16(const ggml_tensor * src0, const ggml_tensor * src1, ggml_tensor * dst, void * wdata, size_t wsize) { @@ -1598,6 +1613,8 @@ static void ggml_cl_mul_mat_f16(const ggml_tensor * src0, const ggml_tensor * sr CL_CHECK(ggml_cl_h2d_tensor_2d(queue, d_X, 0, src0, i03, i02, NULL)); } + // FIXME: convert on device + for (int64_t i12 = i02 * r2, e12 = i12 + r2; i12 < e12; i12++) { // convert src1 to fp16 // TODO: use multiple threads @@ -1643,11 +1660,13 @@ static void ggml_cl_mul_mat_f16(const ggml_tensor * src0, const ggml_tensor * sr } // copy dst to host, then convert to float - CL_CHECK(clEnqueueReadBuffer(queue, d_D, true, 0, sizeof(ggml_fp16_t) * d_ne, tmp, 1, &ev_sgemm, NULL)); - - float * d = (float *) ((char *) dst->data + i12*nb2 + i13*nb3); - - ggml_fp16_to_fp32_row(tmp, d, d_ne); + if (dst->backend == GGML_BACKEND_CPU) { + CL_CHECK(clEnqueueReadBuffer(queue, d_D, true, 0, sizeof(ggml_fp16_t) * d_ne, tmp, 1, &ev_sgemm, NULL)); + float * d = (float *) ((char *) dst->data + i12*nb2 + i13*nb3); + ggml_fp16_to_fp32_row(tmp, d, d_ne); + } else { + // FIXME: convert dst to fp32 on device + } } } } @@ -1801,7 +1820,7 @@ static void ggml_cl_mul_mat_q_f32(const ggml_tensor * src0, const ggml_tensor * } -bool ggml_cl_can_mul_mat(const struct ggml_tensor * src0, const struct ggml_tensor * src1, struct ggml_tensor * dst) { +bool ggml_cl_can_mul_mat(const struct ggml_tensor * src0, const struct ggml_tensor * src1, const struct ggml_tensor * dst) { const int64_t ne10 = src1->ne[0]; const int64_t ne0 = dst->ne[0]; @@ -1895,3 +1914,291 @@ void ggml_cl_transform_tensor(void * data, ggml_tensor * tensor) { tensor->extra = dst; GGML_ASSERT(tensor->backend == GGML_BACKEND_GPU); } + +// ggml-backend + +// buffer + +struct ggml_backend_opencl_buffer_context { + ~ggml_backend_opencl_buffer_context() { + if (buffer) { + clReleaseMemObject(buffer); + } + for (auto * sub_buffer : sub_buffers) { + clReleaseMemObject(sub_buffer); + } + } + + cl_mem buffer; + std::vector sub_buffers; +}; + +static void * const cl_ptr_base = (void *)(uintptr_t) 0x1000; + +static const char * ggml_backend_opencl_buffer_get_name(ggml_backend_buffer_t buffer) { + return "OpenCL"; + + GGML_UNUSED(buffer); +} + +static void ggml_backend_opencl_buffer_free_buffer(ggml_backend_buffer_t buffer) { + ggml_backend_opencl_buffer_context * ctx = (ggml_backend_opencl_buffer_context *) buffer->context; + delete ctx; +} + +static void * ggml_backend_opencl_buffer_get_base(ggml_backend_buffer_t buffer) { + return cl_ptr_base; + + GGML_UNUSED(buffer); +} + +static void ggml_backend_opencl_buffer_init_tensor(ggml_backend_buffer_t buffer, ggml_tensor * tensor) { + if (tensor->view_src != NULL && tensor->view_offs == 0) { + tensor->extra = tensor->view_src->extra; + } else { + ggml_backend_opencl_buffer_context * ctx = (ggml_backend_opencl_buffer_context *) buffer->context; + cl_buffer_region region = {(size_t)((char *)tensor->data - (char *)cl_ptr_base), ggml_nbytes(tensor)}; + cl_int err; + cl_mem sub_buffer = clCreateSubBuffer(ctx->buffer, CL_MEM_READ_WRITE, CL_BUFFER_CREATE_TYPE_REGION, ®ion, &err); + CL_CHECK(err); + ctx->sub_buffers.push_back(sub_buffer); + tensor->extra = sub_buffer; + } + tensor->backend = GGML_BACKEND_GPU; +} + +static void ggml_backend_opencl_buffer_set_tensor(ggml_backend_buffer_t buffer, ggml_tensor * tensor, const void * data, size_t offset, size_t size) { + cl_mem tensor_buffer = (cl_mem) tensor->extra; + CL_CHECK(clEnqueueWriteBuffer(queue, tensor_buffer, true, offset, size, data, 0, NULL, NULL)); + CL_CHECK(clFinish(queue)); + + GGML_UNUSED(buffer); +} + +static void ggml_backend_opencl_buffer_get_tensor(ggml_backend_buffer_t buffer, const ggml_tensor * tensor, void * data, size_t offset, size_t size) { + cl_mem tensor_buffer = (cl_mem) tensor->extra; + CL_CHECK(clEnqueueReadBuffer(queue, tensor_buffer, true, offset, size, data, 0, NULL, NULL)); + CL_CHECK(clFinish(queue)); + + GGML_UNUSED(buffer); +} + +static void ggml_backend_opencl_buffer_clear(ggml_backend_buffer_t buffer, uint8_t value) { + ggml_backend_opencl_buffer_context * ctx = (ggml_backend_opencl_buffer_context *) buffer->context; + CL_CHECK(clEnqueueFillBuffer(queue, ctx->buffer, &value, sizeof(value), 0, buffer->size, 0, NULL, NULL)); + CL_CHECK(clFinish(queue)); +} + +static void ggml_backend_opencl_buffer_reset(ggml_backend_buffer_t buffer) { + ggml_backend_opencl_buffer_context * ctx = (ggml_backend_opencl_buffer_context *) buffer->context; + for (auto * sub_buffer : ctx->sub_buffers) { + clReleaseMemObject(sub_buffer); + } + ctx->sub_buffers.clear(); +} + +static ggml_backend_buffer_i ggml_backend_opencl_buffer_interface = { + /* .get_name = */ ggml_backend_opencl_buffer_get_name, + /* .free_buffer = */ ggml_backend_opencl_buffer_free_buffer, + /* .get_base = */ ggml_backend_opencl_buffer_get_base, + /* .init_tensor = */ ggml_backend_opencl_buffer_init_tensor, + /* .set_tensor = */ ggml_backend_opencl_buffer_set_tensor, + /* .get_tensor = */ ggml_backend_opencl_buffer_get_tensor, + /* .cpy_tensor = */ NULL, + /* .clear = */ ggml_backend_opencl_buffer_clear, + /* .reset = */ ggml_backend_opencl_buffer_reset, +}; + +// buffer type + +static const char * ggml_backend_opencl_buffer_type_name(ggml_backend_buffer_type_t buffer_type) { + return "OpenCL"; + + GGML_UNUSED(buffer_type); +} + +static ggml_backend_buffer_t ggml_backend_opencl_buffer_type_alloc_buffer(ggml_backend_buffer_type_t buffer_type, size_t size) { + ggml_cl_init(); + + cl_int err; + cl_mem mem = clCreateBuffer(context, CL_MEM_READ_WRITE, size, NULL, &err); + if (err != CL_SUCCESS) { + fprintf(stderr, "%s: failed to allocate %.2f MiB\n", __func__, size / 1024.0 / 1024.0); + return nullptr; + } + + ggml_backend_opencl_buffer_context * ctx = new ggml_backend_opencl_buffer_context{mem, {}}; + + return ggml_backend_buffer_init(buffer_type, ggml_backend_opencl_buffer_interface, ctx, size); +} + +static size_t ggml_backend_opencl_buffer_type_get_alignment(ggml_backend_buffer_type_t buffer_type) { + // FIXME: not thread safe, device may not be initialized yet + static cl_uint alignment = -1; + if (alignment == (cl_uint)-1) { + ggml_cl_init(); + clGetDeviceInfo(device, CL_DEVICE_MEM_BASE_ADDR_ALIGN, sizeof(cl_uint), &alignment, NULL); + } + return alignment; + + GGML_UNUSED(buffer_type); +} + +static bool ggml_backend_opencl_buffer_type_supports_backend(ggml_backend_buffer_type_t buffer_type, ggml_backend_t backend) { + //return ggml_backend_is_opencl(backend); // opencl must be used through the cpu backend + return ggml_backend_is_cpu(backend); + + GGML_UNUSED(buffer_type); +} + +static ggml_backend_buffer_type_i ggml_backend_opencl_buffer_type_interface = { + /* .get_name = */ ggml_backend_opencl_buffer_type_name, + /* .alloc_buffer = */ ggml_backend_opencl_buffer_type_alloc_buffer, + /* .get_alignment = */ ggml_backend_opencl_buffer_type_get_alignment, + /* .get_alloc_size = */ NULL, + /* .supports_backend = */ ggml_backend_opencl_buffer_type_supports_backend, + /* .is_host = */ NULL, +}; + + +ggml_backend_buffer_type_t ggml_backend_opencl_buffer_type() { + static ggml_backend_buffer_type buffer_type = { + /* .iface = */ ggml_backend_opencl_buffer_type_interface, + /* .context = */ nullptr, + }; + + return &buffer_type; +} + +#if 0 +// host buffer type + +static const char * ggml_backend_opencl_host_buffer_type_name(ggml_backend_buffer_type_t buft) { + return "CL_Host"; + + GGML_UNUSED(buft); +} + +static const char * ggml_backend_opencl_host_buffer_name(ggml_backend_buffer_t buffer) { + return "CL_Host"; + + GGML_UNUSED(buffer); +} + +static void ggml_backend_opencl_host_buffer_free_buffer(ggml_backend_buffer_t buffer) { + ggml_cl_host_free(buffer->context); +} + +static ggml_backend_buffer_t ggml_backend_opencl_host_buffer_type_alloc_buffer(ggml_backend_buffer_type_t buft, size_t size) { + void * ptr = ggml_cl_host_malloc(size); + + if (ptr == nullptr) { + // fallback to cpu buffer + return ggml_backend_buft_alloc_buffer(ggml_backend_cpu_buffer_type(), size); + } + + ggml_backend_buffer_t buffer = ggml_backend_cpu_buffer_from_ptr(ptr, size); + buffer->buft = buft; + buffer->iface.get_name = ggml_backend_opencl_host_buffer_name; + buffer->iface.free_buffer = ggml_backend_opencl_host_buffer_free_buffer; + + return buffer; +} + +ggml_backend_buffer_type_t ggml_backend_opencl_host_buffer_type() { + static struct ggml_backend_buffer_type ggml_backend_opencl_buffer_type_host = { + /* .iface = */ { + /* .get_name = */ ggml_backend_opencl_host_buffer_type_name, + /* .alloc_buffer = */ ggml_backend_opencl_host_buffer_type_alloc_buffer, + /* .get_alignment = */ ggml_backend_cpu_buffer_type()->iface.get_alignment, + /* .get_alloc_size = */ ggml_backend_cpu_buffer_type()->iface.get_alloc_size, + /* .supports_backend = */ ggml_backend_cpu_buffer_type()->iface.supports_backend, + /* .is_host = */ ggml_backend_cpu_buffer_type()->iface.is_host, + }, + /* .context = */ nullptr, + }; + + return &ggml_backend_opencl_buffer_type_host; +} + +// backend + +static const char * ggml_backend_opencl_name(ggml_backend_t backend) { + return "OpenCL"; + + GGML_UNUSED(backend); +} + +static void ggml_backend_opencl_free(ggml_backend_t backend) { + GGML_UNUSED(backend); +} + +static ggml_backend_buffer_type_t ggml_backend_opencl_get_default_buffer_type(ggml_backend_t backend) { + return ggml_backend_opencl_buffer_type(); + + GGML_UNUSED(backend); +} + +static bool ggml_backend_opencl_graph_compute(ggml_backend_t backend, ggml_cgraph * graph) { + for (int i = 0; i < graph->n_nodes; ++i) { + ggml_tensor * node = graph->nodes[i]; + switch (node->op) { + case GGML_OP_MUL_MAT: + ggml_cl_mul_mat(node->src[0], node->src[1], node, nullptr, 0); + break; + case GGML_OP_MUL: + ggml_cl_mul(node->src[0], node->src[1], node); + break; + default: + GGML_ASSERT(false); + } + } + + return true; + + GGML_UNUSED(backend); +} + +static bool ggml_backend_opencl_supports_op(ggml_backend_t backend, const ggml_tensor * op) { + switch (op->op) { + case GGML_OP_MUL_MAT: + return ggml_cl_can_mul_mat(op->src[0], op->src[1], op); + case GGML_OP_MUL: + // return ggml_can_repeat_rows(op->src[1], op->src[0]); + return true; + default: + return false; + } + + GGML_UNUSED(backend); +} + +static ggml_backend_i opencl_backend_i = { + /* .get_name = */ ggml_backend_opencl_name, + /* .free = */ ggml_backend_opencl_free, + /* .get_default_buffer_type = */ ggml_backend_opencl_get_default_buffer_type, + /* .set_tensor_async = */ NULL, + /* .get_tensor_async = */ NULL, + /* .cpy_tensor_from_async = */ NULL, + /* .cpy_tensor_to_async = */ NULL, + /* .synchronize = */ NULL, + /* .graph_plan_create = */ NULL, + /* .graph_plan_free = */ NULL, + /* .graph_plan_compute = */ NULL, + /* .graph_compute = */ ggml_backend_opencl_graph_compute, + /* .supports_op = */ ggml_backend_opencl_supports_op, +}; + +ggml_backend_t ggml_backend_opencl_init() { + ggml_backend_t backend = new ggml_backend { + /* .interface = */ opencl_backend_i, + /* .context = */ nullptr + }; + + return backend; +} + +bool ggml_backend_is_opencl(ggml_backend_t backend) { + return backend && backend->iface.get_name == ggml_backend_opencl_name; +} +#endif diff --git a/ggml-opencl.h b/ggml-opencl.h index 44d05bd64..919b00d63 100644 --- a/ggml-opencl.h +++ b/ggml-opencl.h @@ -1,6 +1,7 @@ #pragma once #include "ggml.h" +#include "ggml-backend.h" #ifdef __cplusplus extern "C" { @@ -9,17 +10,26 @@ extern "C" { GGML_API void ggml_cl_init(void); GGML_API void ggml_cl_mul(const struct ggml_tensor * src0, const struct ggml_tensor * src1, struct ggml_tensor * dst); -GGML_API bool ggml_cl_can_mul_mat(const struct ggml_tensor * src0, const struct ggml_tensor * src1, struct ggml_tensor * dst); +GGML_API bool ggml_cl_can_mul_mat(const struct ggml_tensor * src0, const struct ggml_tensor * src1, const struct ggml_tensor * dst); GGML_API size_t ggml_cl_mul_mat_get_wsize(const struct ggml_tensor * src0, const struct ggml_tensor * src1, struct ggml_tensor * dst); GGML_API void ggml_cl_mul_mat(const struct ggml_tensor * src0, const struct ggml_tensor * src1, struct ggml_tensor * dst, void * wdata, size_t wsize); -GGML_API void * ggml_cl_host_malloc(size_t size); -GGML_API void ggml_cl_host_free(void * ptr); +// GGML_API void * ggml_cl_host_malloc(size_t size); +// GGML_API void ggml_cl_host_free(void * ptr); GGML_API void ggml_cl_free_data(const struct ggml_tensor* tensor); GGML_API void ggml_cl_transform_tensor(void * data, struct ggml_tensor * tensor); +// backend API + +// GGML_API ggml_backend_t ggml_backend_opencl_init(void); + +// GGML_API bool ggml_backend_is_opencl(ggml_backend_t backend); + +GGML_API ggml_backend_buffer_type_t ggml_backend_opencl_buffer_type(void); +// GGML_API ggml_backend_buffer_type_t ggml_backend_opencl_host_buffer_type(void); + #ifdef __cplusplus } #endif diff --git a/ggml.c b/ggml.c index f5caeba08..6dbd7626c 100644 --- a/ggml.c +++ b/ggml.c @@ -2354,6 +2354,10 @@ struct ggml_context * ggml_init(struct ggml_init_params params) { } void ggml_free(struct ggml_context * ctx) { + if (ctx == NULL) { + return; + } + // make this function thread safe ggml_critical_section_start(); @@ -4362,6 +4366,23 @@ struct ggml_tensor * ggml_cpy( return ggml_cpy_impl(ctx, a, b); } +struct ggml_tensor * ggml_cast( + struct ggml_context * ctx, + struct ggml_tensor * a, + enum ggml_type type) { + bool is_node = false; + + struct ggml_tensor * result = ggml_new_tensor(ctx, type, GGML_MAX_DIMS, a->ne); + ggml_format_name(result, "%s (copy)", a->name); + + result->op = GGML_OP_CPY; + result->grad = is_node ? ggml_dup_tensor(ctx, result) : NULL; + result->src[0] = a; + result->src[1] = result; + + return result; +} + // ggml_cont static struct ggml_tensor * ggml_cont_impl( @@ -14871,7 +14892,7 @@ size_t ggml_hash_find_or_insert(struct ggml_hash_set hash_set, struct ggml_tenso return i; } -static struct ggml_hash_set ggml_hash_set_new(size_t size) { +struct ggml_hash_set ggml_hash_set_new(size_t size) { size = ggml_hash_size(size); struct ggml_hash_set result; result.size = size; @@ -16620,7 +16641,7 @@ static thread_ret_t ggml_graph_compute_thread(void * data) { return GGML_EXIT_SUCCESS; } -struct ggml_cplan ggml_graph_plan(struct ggml_cgraph * cgraph, int n_threads) { +struct ggml_cplan ggml_graph_plan(const struct ggml_cgraph * cgraph, int n_threads) { if (n_threads <= 0) { n_threads = GGML_DEFAULT_N_THREADS; } @@ -16682,14 +16703,15 @@ struct ggml_cplan ggml_graph_plan(struct ggml_cgraph * cgraph, int n_threads) { } break; case GGML_OP_MUL_MAT_ID: { + cur = 0; const struct ggml_tensor * src0 = node->src[2]; const struct ggml_tensor * src1 = node->src[1]; const enum ggml_type vec_dot_type = type_traits[src0->type].vec_dot_type; if (src1->type != vec_dot_type) { - cur = ggml_row_size(vec_dot_type, ggml_nelements(src1)); + cur += ggml_row_size(vec_dot_type, ggml_nelements(src1)); } const int n_as = ggml_get_op_params_i32(node, 1); - cur = GGML_PAD(cur, sizeof(int64_t)); // align + cur += GGML_PAD(cur, sizeof(int64_t)); // align cur += n_as * sizeof(int64_t); // matrix_row_counts cur += n_as * src1->ne[1] * sizeof(int64_t); // matrix_rows } break; diff --git a/ggml.h b/ggml.h index 4c2ff6c66..b18ba7812 100644 --- a/ggml.h +++ b/ggml.h @@ -1165,6 +1165,11 @@ extern "C" { struct ggml_tensor * a, struct ggml_tensor * b); + GGML_API struct ggml_tensor * ggml_cast( + struct ggml_context * ctx, + struct ggml_tensor * a, + enum ggml_type type); + // make contiguous GGML_API struct ggml_tensor * ggml_cont( struct ggml_context * ctx, @@ -1842,8 +1847,8 @@ extern "C" { // ggml_graph_plan() has to be called before ggml_graph_compute() // when plan.work_size > 0, caller must allocate memory for plan.work_data - GGML_API struct ggml_cplan ggml_graph_plan (struct ggml_cgraph * cgraph, int n_threads /*= GGML_DEFAULT_N_THREADS*/); - GGML_API int ggml_graph_compute(struct ggml_cgraph * cgraph, struct ggml_cplan * cplan); + GGML_API struct ggml_cplan ggml_graph_plan (const struct ggml_cgraph * cgraph, int n_threads /*= GGML_DEFAULT_N_THREADS*/); + GGML_API int ggml_graph_compute( struct ggml_cgraph * cgraph, struct ggml_cplan * cplan); // same as ggml_graph_compute() but the work data is allocated as a part of the context // note: the drawback of this API is that you must have ensured that the context has enough memory for the work data diff --git a/llama.cpp b/llama.cpp index ce413f605..fe1d8947c 100644 --- a/llama.cpp +++ b/llama.cpp @@ -1,5 +1,4 @@ #define LLAMA_API_INTERNAL -//#define LLAMA_GGML_BACKEND_CUDA_TEST // for testing only - enables ggml-cuda through ggml-backend, disables partial offloading #include "llama.h" #include "unicode.h" @@ -152,10 +151,6 @@ static bool is_float_close(float a, float b, float abs_tol) { return std::fabs(b - a) <= abs_tol; } -#ifdef GGML_USE_CPU_HBM -#include -#endif - static void zeros(std::ofstream & file, size_t n) { char zero = 0; for (size_t i = 0; i < n; ++i) { @@ -1190,12 +1185,6 @@ struct llama_mlock { #endif }; -typedef void (*offload_func_t)(struct ggml_tensor * tensor); - -static void ggml_offload_nop(struct ggml_tensor * tensor) { - (void) tensor; -} - static std::string llama_token_to_piece(const struct llama_context * ctx, llama_token token) { std::vector result(8, 0); const int n_tokens = llama_token_to_piece(llama_get_model(ctx), token, result.data(), result.size()); @@ -1211,19 +1200,14 @@ static std::string llama_token_to_piece(const struct llama_context * ctx, llama_ return std::string(result.data(), result.size()); } -static ggml_backend_buffer_type_t llama_default_buffer_type(int n_gpu_layers) { +static ggml_backend_buffer_type_t llama_default_buffer_type_cpu(bool host_buffer) { ggml_backend_buffer_type_t buft = nullptr; -#ifdef GGML_USE_METAL - if (n_gpu_layers > 0) { - buft = ggml_backend_metal_buffer_type(); +#if defined(GGML_USE_CUBLAS) + // host buffers should only be used when data is expected to be copied to/from the GPU + if (host_buffer) { + buft = ggml_backend_cuda_host_buffer_type(); } -#elif defined(GGML_USE_CUBLAS) && defined(LLAMA_GGML_BACKEND_CUDA_TEST) - if (n_gpu_layers > 0) { - buft = ggml_backend_cuda_buffer_type(0); - } -#elif defined(GGML_USE_CUBLAS) - buft = ggml_backend_cuda_host_buffer_type(); #elif defined(GGML_USE_CPU_HBM) buft = ggml_backend_cpu_hbm_buffer_type(); #endif @@ -1231,10 +1215,45 @@ static ggml_backend_buffer_type_t llama_default_buffer_type(int n_gpu_layers) { if (buft == nullptr) { buft = ggml_backend_cpu_buffer_type(); } - return buft; - GGML_UNUSED(n_gpu_layers); + GGML_UNUSED(host_buffer); +} + +static ggml_backend_buffer_type_t llama_default_buffer_type_offload(int gpu) { + ggml_backend_buffer_type_t buft = nullptr; + +#ifdef GGML_USE_METAL + buft = ggml_backend_metal_buffer_type(); +#elif defined(GGML_USE_CUBLAS) + buft = ggml_backend_cuda_buffer_type(gpu); +#elif defined(GGML_USE_CLBLAST) + buft = ggml_backend_opencl_buffer_type(); +#endif + + if (buft == nullptr) { + buft = llama_default_buffer_type_cpu(true); + } + return buft; + + GGML_UNUSED(gpu); +} + +static ggml_backend_buffer_type_t llama_default_buffer_type_split(int fallback_gpu, const float * tensor_split) { + ggml_backend_buffer_type_t buft = nullptr; + +#ifdef GGML_USE_CUBLAS + if (ggml_backend_cuda_get_device_count() > 1) { + buft = ggml_backend_cuda_split_buffer_type(tensor_split); + } +#endif + + if (buft == nullptr) { + buft = llama_default_buffer_type_offload(fallback_gpu); + } + return buft; + + GGML_UNUSED(tensor_split); } // @@ -1445,24 +1464,24 @@ struct llama_kv_cache { std::vector k_l; // per layer std::vector v_l; - struct ggml_context * ctx = NULL; + std::vector ctxs; + std::vector bufs; - ggml_backend_buffer_t buf = NULL; + size_t total_size() const { + size_t size = 0; + for (ggml_backend_buffer_t buf : bufs) { + size += ggml_backend_buffer_get_size(buf); + } + return size; + } ~llama_kv_cache() { -#if defined(GGML_USE_CUBLAS) && !defined(LLAMA_GGML_BACKEND_CUDA_TEST) - if (ggml_cublas_loaded()) { - for (size_t i = 0; i < k_l.size(); ++i) { - ggml_cuda_free_data(k_l[i]); - ggml_cuda_free_data(v_l[i]); - } - } -#endif - if (ctx) { + for (struct ggml_context * ctx : ctxs) { ggml_free(ctx); } - - ggml_backend_buffer_free(buf); + for (ggml_backend_buffer_t buf : bufs) { + ggml_backend_buffer_free(buf); + } } }; @@ -1539,16 +1558,32 @@ struct llama_model { std::vector layers; + llama_split_mode split_mode; + int main_gpu; int n_gpu_layers; // gguf metadata std::unordered_map gguf_kv; - // context - struct ggml_context * ctx = NULL; + // layer -> buffer type mapping + struct layer_buft { + layer_buft() : buft_matrix(nullptr), buft(nullptr) {} + layer_buft(ggml_backend_buffer_type_t matrix) : buft_matrix(matrix), buft(matrix) {} + layer_buft(ggml_backend_buffer_type_t matrix, ggml_backend_buffer_type_t other) : buft_matrix(matrix), buft(other) {} - // the model memory buffer - ggml_backend_buffer_t buf = NULL; + ggml_backend_buffer_type_t buft_matrix; // matrices only - used by split buffers and backends that support only matrix multiplication + ggml_backend_buffer_type_t buft; // everything else + }; + + layer_buft buft_input; + layer_buft buft_output; + std::vector buft_layer; + + // contexts where the model tensors metadata is stored + std::vector ctxs; + + // the model memory buffers for the tensor data + std::vector bufs; // model memory mapped file std::unique_ptr mapping; @@ -1564,39 +1599,32 @@ struct llama_model { int64_t t_start_us = 0; ~llama_model() { -#if defined(GGML_USE_CUBLAS) && !defined(LLAMA_GGML_BACKEND_CUDA_TEST) - if (ggml_cublas_loaded()) { - for (size_t i = 0; i < tensors_by_name.size(); ++i) { - ggml_cuda_free_data(tensors_by_name[i].second); - } - ggml_cuda_free_scratch(); - } -#endif - -#if defined(GGML_USE_CLBLAST) - for (size_t i = 0; i < tensors_by_name.size(); ++i) { - ggml_cl_free_data(tensors_by_name[i].second); - } -#endif - if (ctx) { + for (struct ggml_context * ctx : ctxs) { ggml_free(ctx); } - - ggml_backend_buffer_free(buf); + for (ggml_backend_buffer_t buf : bufs) { + ggml_backend_buffer_free(buf); + } } }; struct llama_context { llama_context(const llama_model & model) : model(model), t_start_us(model.t_start_us), t_load_us(model.t_load_us) {} ~llama_context() { - ggml_allocr_free(alloc); - ggml_backend_buffer_free(buf_alloc); - ggml_backend_free(backend); + ggml_backend_sched_free(sched); + + for (ggml_backend_t backend : backends) { + ggml_backend_free(backend); + } } llama_cparams cparams; - ggml_backend_t backend = nullptr; + std::vector backends; +#ifdef GGML_USE_METAL + ggml_backend_t backend_metal = nullptr; +#endif + ggml_backend_t backend_cpu = nullptr; const llama_model & model; @@ -1630,8 +1658,9 @@ struct llama_context { // memory buffers used to evaluate the model std::vector buf_compute_meta; - ggml_backend_buffer_t buf_alloc = NULL; - ggml_allocr * alloc = NULL; + ggml_backend_sched_t sched = nullptr; + // allocator for the input tensors + ggml_tallocr * alloc = nullptr; // temporary buffer for copying data to/from the backend std::vector> buf_copy; @@ -1646,16 +1675,17 @@ struct llama_context { // static bool llama_kv_cache_init( - const struct llama_hparams & hparams, struct llama_kv_cache & cache, + const llama_model & model, ggml_type ktype, ggml_type vtype, uint32_t n_ctx, - int n_gpu_layers, bool offload) { + const struct llama_hparams & hparams = model.hparams; + const uint32_t n_embd_k_gqa = hparams.n_embd_k_gqa(); const uint32_t n_embd_v_gqa = hparams.n_embd_v_gqa(); - const uint32_t n_layer = hparams.n_layer; + const int64_t n_layer = hparams.n_layer; cache.has_shift = false; @@ -1666,62 +1696,65 @@ static bool llama_kv_cache_init( cache.cells.clear(); cache.cells.resize(n_ctx); - struct ggml_init_params params; - params.mem_size = 2u*n_layer*ggml_tensor_overhead(); - params.mem_buffer = NULL; - params.no_alloc = true; +#ifdef GGML_USE_CLBLAST + offload = false; +#endif - cache.ctx = ggml_init(params); + // count used buffer types + std::map buft_layer_count; + if (offload) { + for (int64_t i = 0; i < n_layer; ++i) { + buft_layer_count[model.buft_layer[i].buft]++; + } + } else { + buft_layer_count[llama_default_buffer_type_cpu(true)] = n_layer; + } - size_t vram_kv_cache = 0; - - if (!cache.ctx) { - LLAMA_LOG_ERROR("%s: failed to allocate memory for kv cache\n", __func__); - return false; + // create a context for each buffer type + std::map ctx_map; + for (auto & it : buft_layer_count) { + int n_layers = it.second; + struct ggml_init_params params = { + /*.mem_size =*/ 2u*n_layers*ggml_tensor_overhead(), + /*.mem_buffer =*/ NULL, + /*.no_alloc =*/ true, + }; + ggml_context * ctx = ggml_init(params); + if (!ctx) { + LLAMA_LOG_ERROR("%s: failed to allocate context for kv cache\n", __func__); + return false; + } + ctx_map[it.first] = ctx; + cache.ctxs.push_back(ctx); } cache.k_l.reserve(n_layer); cache.v_l.reserve(n_layer); - const int i_gpu_start = (int) n_layer - n_gpu_layers; - for (int i = 0; i < (int) n_layer; i++) { - ggml_tensor * k = ggml_new_tensor_1d(cache.ctx, ktype, n_embd_k_gqa*n_ctx); - ggml_tensor * v = ggml_new_tensor_1d(cache.ctx, vtype, n_embd_v_gqa*n_ctx); + struct ggml_context * ctx = offload ? ctx_map.at(model.buft_layer[i].buft) : cache.ctxs.front(); + ggml_tensor * k = ggml_new_tensor_1d(ctx, ktype, n_embd_k_gqa*n_ctx); + ggml_tensor * v = ggml_new_tensor_1d(ctx, vtype, n_embd_v_gqa*n_ctx); ggml_format_name(k, "cache_k_l%d", i); ggml_format_name(v, "cache_v_l%d", i); cache.k_l.push_back(k); cache.v_l.push_back(v); -#if defined(GGML_USE_CUBLAS) && !defined(LLAMA_GGML_BACKEND_CUDA_TEST) - if (i >= i_gpu_start) { - if (offload) { - ggml_cuda_assign_buffers_no_scratch(k); - ggml_cuda_assign_buffers_no_scratch(v); - vram_kv_cache += ggml_nbytes(k); - vram_kv_cache += ggml_nbytes(v); - // HACK: mark tensor as allocated - k->data = v->data = (void *)(uintptr_t)1; - } + } + + // allocate tensors and initialize the buffers to avoid NaNs in the padding + for (auto it : ctx_map) { + ggml_backend_buffer_type_t buft = it.first; + ggml_context * ctx = it.second; + ggml_backend_buffer_t buf = ggml_backend_alloc_ctx_tensors_from_buft(ctx, buft); + if (!buf) { + LLAMA_LOG_ERROR("%s: failed to allocate buffer for kv cache\n", __func__); + return false; } -#endif // GGML_USE_CUBLAS + ggml_backend_buffer_clear(buf, 0); + LLAMA_LOG_INFO("%s: %10s KV buffer size = %8.2f MiB\n", __func__, ggml_backend_buffer_name(buf), ggml_backend_buffer_get_size(buf)/1024.0/1024.0); + cache.bufs.push_back(buf); } - // allocate tensors - cache.buf = ggml_backend_alloc_ctx_tensors_from_buft(cache.ctx, llama_default_buffer_type(n_gpu_layers)); - - // buf may be NULL with full offload - if (cache.buf) { - // initialize the buffer to avoid NaNs in the padding - ggml_backend_buffer_clear(cache.buf, 0); - } - - if (vram_kv_cache > 0) { - LLAMA_LOG_INFO("%s: VRAM kv self = %.2f MB\n", __func__, vram_kv_cache / 1024.0 / 1024.0); - } - - GGML_UNUSED(i_gpu_start); - GGML_UNUSED(offload); - return true; } @@ -2354,9 +2387,8 @@ struct llama_model_loader { return get_tensor_meta(get_tensor_name(i)); } - struct ggml_tensor * create_tensor_for(struct ggml_context * ctx, struct ggml_tensor * meta, ggml_backend_type backend) { + struct ggml_tensor * create_tensor_for(struct ggml_context * ctx, struct ggml_tensor * meta) { struct ggml_tensor * tensor = ggml_dup_tensor(ctx, meta); - tensor->backend = backend; // TODO: ggml_set_backend ggml_set_name(tensor, ggml_get_name(meta)); n_created++; @@ -2364,7 +2396,7 @@ struct llama_model_loader { return tensor; } - struct ggml_tensor * create_tensor(struct ggml_context * ctx, const std::string & name, const std::vector & ne, ggml_backend_type backend, bool required = true) { + struct ggml_tensor * create_tensor(struct ggml_context * ctx, const std::string & name, const std::vector & ne, bool required = true) { struct ggml_tensor * cur = ggml_get_tensor(ctx_meta, name.c_str()); if (cur == NULL) { @@ -2374,12 +2406,6 @@ struct llama_model_loader { throw std::runtime_error(format("%s: tensor '%s' not found", __func__, name.c_str())); } - if (backend == GGML_BACKEND_GPU_SPLIT) { - if (ne.size() == 1) { - throw std::runtime_error(format("%s: 1-dimensional tensor '%s' cannot be split on the GPU", __func__, name.c_str())); - } - } - { bool is_ok = true; for (size_t i = 0; i < ne.size(); ++i) { @@ -2397,7 +2423,7 @@ struct llama_model_loader { } } - return create_tensor_for(ctx, cur, backend); + return create_tensor_for(ctx, cur); } void done_getting_tensors() const { @@ -2416,26 +2442,36 @@ struct llama_model_loader { return gguf_get_data_offset(ctx_gguf) + gguf_get_tensor_offset(ctx_gguf, idx); } - void init_mapping(bool prefetch = true) { - /* - // prefetch only CPU tensors - if (use_mmap) { - size_t size_pref = 0; // prefetch - - for (int i = 0; i < gguf_get_n_tensors(ctx_gguf); i++) { - struct ggml_tensor * cur = ggml_get_tensor(ctx, gguf_get_tensor_name(ctx_gguf, i)); - if (cur->backend == GGML_BACKEND_CPU) { - size_t tensor_end = gguf_get_tensor_offset(ctx_gguf, i) + ggml_nbytes(cur); - size_pref = std::max(size_pref, tensor_end); - } - } - mapping.reset(new llama_mmap(&file, gguf_get_data_offset(ctx_gguf) + size_pref, ggml_is_numa())); - } - */ + void init_mapping(bool prefetch = true, llama_mlock * lmlock = nullptr) { // prefetch the whole file - all the data is needed anyway if (use_mmap) { mapping.reset(new llama_mmap(&file, prefetch ? -1 : 0, ggml_is_numa())); } + + // compute the total size of all tensors for progress reporting + for (int i = 0; i < gguf_get_n_tensors(ctx_gguf); i++) { + struct ggml_tensor * cur = ggml_get_tensor(ctx_meta, gguf_get_tensor_name(ctx_gguf, i)); + size_data += ggml_nbytes(cur); + } + + if (use_mmap && mapping) { + if (lmlock) { + lmlock->init(mapping->addr); + } + mmap_used_first = mapping->size; + } + } + + void get_mapping_range(size_t * first, size_t * last, ggml_context * ctx) const { + GGML_ASSERT(mapping); + + *first = mapping->size; + *last = 0; + for (ggml_tensor * tensor = ggml_get_first_tensor(ctx); tensor; tensor = ggml_get_next_tensor(ctx, tensor)) { + const size_t offs = file_offset(ggml_get_name(tensor)); + *first = std::min(*first, offs); + *last = std::max(*last, offs + ggml_nbytes(tensor)); + } } // for backwards compatibility, does not support ggml-backend @@ -2443,8 +2479,11 @@ struct llama_model_loader { const size_t offs = file_offset(ggml_get_name(cur)); if (use_mmap && mapping) { - GGML_ASSERT(cur->data == nullptr); - cur->data = (uint8_t *)mapping->addr + offs; + if (cur->data == nullptr) { + cur->data = (uint8_t *)mapping->addr + offs; + } else { + memcpy(cur->data, (uint8_t *)mapping->addr + offs, ggml_nbytes(cur)); + } } else { GGML_ASSERT(cur->data != nullptr); file.seek(offs, SEEK_SET); @@ -2452,37 +2491,23 @@ struct llama_model_loader { } } + size_t size_done = 0; + size_t size_data = 0; + size_t mmap_used_first = -1; + size_t mmap_used_last = 0; + // Returns false if cancelled by progress_callback - bool load_all_data(struct ggml_context * ctx, llama_progress_callback progress_callback, void * progress_callback_user_data, ggml_backend_buffer_t buf_mmap, llama_mlock * lmlock) const { - size_t size_data = 0; - - for (int i = 0; i < gguf_get_n_tensors(ctx_gguf); i++) { - struct ggml_tensor * cur = ggml_get_tensor(ctx, gguf_get_tensor_name(ctx_gguf, i)); - size_data += ggml_nbytes(cur); - } - - if (use_mmap && buf_mmap) { - if (lmlock) { - lmlock->init(mapping->addr); - } - } - -#if (defined(GGML_USE_CUBLAS) && !defined(LLAMA_GGML_BACKEND_CUDA_TEST)) || defined(GGML_USE_CLBLAST) - const bool legacy_offload = true; -#else - const bool legacy_offload = false; -#endif + bool load_all_data(struct ggml_context * ctx, llama_progress_callback progress_callback, void * progress_callback_user_data, ggml_backend_buffer_t buf_mmap, llama_mlock * lmlock) { + GGML_ASSERT(size_data != 0 && "call init_mapping() first"); std::vector> read_buf; - size_t size_done = 0; - - size_t mmap_first = -1; - size_t mmap_last = 0; - for (int i = 0; i < gguf_get_n_tensors(ctx_gguf); i++) { struct ggml_tensor * cur = ggml_get_tensor(ctx, gguf_get_tensor_name(ctx_gguf, i)); - GGML_ASSERT(cur); // unused tensors should have been caught by load_data already + if (!cur) { + // some tensors may be allocated in a different context + continue; + } if (progress_callback) { if (!progress_callback((float) size_done / size_data, progress_callback_user_data)) { @@ -2492,67 +2517,48 @@ struct llama_model_loader { const size_t offs = file_offset(ggml_get_name(cur)); - if (!legacy_offload || cur->backend == GGML_BACKEND_CPU) { - if (use_mmap && mapping) { - if (buf_mmap) { - ggml_backend_tensor_alloc(buf_mmap, cur, (uint8_t *) mapping->addr + offs); - if (lmlock) { - lmlock->grow_to(offs + ggml_nbytes(cur)); - } - mmap_first = std::min(mmap_first, offs); - mmap_last = std::max(mmap_last, offs + ggml_nbytes(cur)); - } else { - ggml_backend_tensor_set(cur, (uint8_t *) mapping->addr + offs, 0, ggml_nbytes(cur)); + if (use_mmap && mapping) { + if (buf_mmap && cur->data == nullptr) { + ggml_backend_tensor_alloc(buf_mmap, cur, (uint8_t *) mapping->addr + offs); + if (lmlock) { + lmlock->grow_to(offs + ggml_nbytes(cur)); } + mmap_used_first = std::min(mmap_used_first, offs); + mmap_used_last = std::max(mmap_used_last, offs + ggml_nbytes(cur)); } else { - if (ggml_backend_buffer_is_host(cur->buffer)) { - file.seek(offs, SEEK_SET); - file.read_raw(cur->data, ggml_nbytes(cur)); - } else { - read_buf.resize(ggml_nbytes(cur)); - file.seek(offs, SEEK_SET); - file.read_raw(read_buf.data(), ggml_nbytes(cur)); - ggml_backend_tensor_set(cur, read_buf.data(), 0, ggml_nbytes(cur)); - } + ggml_backend_tensor_set(cur, (uint8_t *) mapping->addr + offs, 0, ggml_nbytes(cur)); } } else { - // HACK: mark tensor as allocated - cur->data = (void *)(uintptr_t)1; - void * data; - if (use_mmap && mapping) { - data = (uint8_t *) mapping->addr + offs; + if (ggml_backend_buffer_is_host(cur->buffer)) { + file.seek(offs, SEEK_SET); + file.read_raw(cur->data, ggml_nbytes(cur)); } else { read_buf.resize(ggml_nbytes(cur)); file.seek(offs, SEEK_SET); file.read_raw(read_buf.data(), ggml_nbytes(cur)); - data = read_buf.data(); + ggml_backend_tensor_set(cur, read_buf.data(), 0, ggml_nbytes(cur)); } - -#if defined(GGML_USE_CUBLAS) && !defined(LLAMA_GGML_BACKEND_CUDA_TEST) - ggml_cuda_transform_tensor(data, cur); -#elif defined(GGML_USE_CLBLAST) - GGML_ASSERT(cur->backend == GGML_BACKEND_GPU); - ggml_cl_transform_tensor(data, cur); -#else - GGML_ASSERT(!"GPU tensor without a GPU backend"); - GGML_UNUSED(data); -#endif } size_done += ggml_nbytes(cur); } - // unmap offloaded tensors and metadata - if (use_mmap && mapping) { - mapping->unmap_fragment(0, mmap_first); - mapping->unmap_fragment(mmap_last, mapping->size); + // check if this is the last call and do final cleanup + if (size_done >= size_data) { + // unmap offloaded tensors and metadata + if (use_mmap && mapping) { + mapping->unmap_fragment(0, mmap_used_first); + if (mmap_used_last != 0) { + mapping->unmap_fragment(mmap_used_last, mapping->size); + } + } + if (progress_callback) { + // Even though the model is done loading, we still honor + // cancellation since we need to free allocations. + return progress_callback(1.0f, progress_callback_user_data); + } } - if (progress_callback) { - // Even though the model is done loading, we still honor - // cancellation since we need to free allocations. - return progress_callback(1.0f, progress_callback_user_data); - } return true; } }; @@ -3181,6 +3187,7 @@ static bool llm_load_tensors( llama_model_loader & ml, llama_model & model, int n_gpu_layers, + enum llama_split_mode split_mode, int main_gpu, const float * tensor_split, bool use_mlock, @@ -3188,702 +3195,563 @@ static bool llm_load_tensors( void * progress_callback_user_data) { model.t_start_us = ggml_time_us(); - auto & ctx = model.ctx; auto & hparams = model.hparams; + model.split_mode = split_mode; + model.main_gpu = main_gpu; model.n_gpu_layers = n_gpu_layers; - size_t ctx_size = ggml_tensor_overhead() * ml.n_tensors; + const int64_t n_layer = hparams.n_layer; + const int64_t i_gpu_start = std::max((int64_t) hparams.n_layer - n_gpu_layers, (int64_t) 0); - LLAMA_LOG_INFO("%s: ggml ctx size = %7.2f MiB\n", __func__, ctx_size/1024.0/1024.0); + // there is very little benefit to offloading the input layer, so always keep it on the CPU + model.buft_input = llama_default_buffer_type_cpu(true); - // create the ggml context + model.buft_layer.resize(n_layer); + + // assign cpu layers + for (int64_t i = 0; i < i_gpu_start; ++i) { + model.buft_layer[i] = llama_default_buffer_type_cpu(true); + } + +#ifdef GGML_USE_CUBLAS + if (split_mode == LLAMA_SPLIT_LAYER) { + // calculate the split points + int device_count = ggml_backend_cuda_get_device_count(); + bool all_zero = tensor_split == nullptr || std::all_of(tensor_split, tensor_split + device_count, [](float x) { return x == 0.0f; }); + float splits[GGML_CUDA_MAX_DEVICES]; + if (all_zero) { + // default split, by free memory + for (int i = 0; i < device_count; ++i) { + size_t total; + size_t free; + ggml_backend_cuda_get_device_memory(i, &total, &free); + splits[i] = free; + } + } else { + std::copy(tensor_split, tensor_split + device_count, splits); + } + + // sum and normalize the splits to get the split points + float split_sum = 0.0f; + for (int i = 0; i < device_count; ++i) { + split_sum += splits[i]; + splits[i] = split_sum; + } + for (int i = 0; i < device_count; ++i) { + splits[i] /= split_sum; + } + + // assign the repeating layers to the devices according to the splits + int act_gpu_layers = std::min(n_gpu_layers, (int)n_layer + 1); + for (int64_t i = i_gpu_start; i < n_layer; ++i) { + int layer_gpu = std::upper_bound(splits, splits + device_count, float(i - i_gpu_start)/act_gpu_layers) - splits; + model.buft_layer[i] = llama_default_buffer_type_offload(layer_gpu); + } + // assign the output layer + if (n_gpu_layers > n_layer) { + int layer_gpu = std::upper_bound(splits, splits + device_count, float(act_gpu_layers - 1)/act_gpu_layers) - splits; + model.buft_output = llama_default_buffer_type_offload(layer_gpu); + } else { + model.buft_output = llama_default_buffer_type_cpu(true); + } + } else +#endif { + ggml_backend_buffer_type_t split_buft; + if (split_mode == LLAMA_SPLIT_ROW) { + split_buft = llama_default_buffer_type_split(main_gpu, tensor_split); + } else { + // LLAMA_SPLIT_NONE or LLAMA_SPLIT_LAYER in backends where it is not supported + split_buft = llama_default_buffer_type_offload(main_gpu); + } + // assign the repeating layers + for (int64_t i = i_gpu_start; i < n_layer; ++i) { + model.buft_layer[i] = { + split_buft, + llama_default_buffer_type_offload(main_gpu) + }; + } + // assign the output layer + if (n_gpu_layers > n_layer) { + model.buft_output = { + split_buft, + llama_default_buffer_type_offload(main_gpu) + }; + } else { + model.buft_output = llama_default_buffer_type_cpu(true); + } + } + + // count used buffer types + std::map buft_layer_count; + buft_layer_count[model.buft_input.buft]++; + buft_layer_count[model.buft_input.buft_matrix]++; + buft_layer_count[model.buft_output.buft]++; + buft_layer_count[model.buft_output.buft_matrix]++; + for (int64_t i = 0; i < n_layer; ++i) { + buft_layer_count[model.buft_layer[i].buft]++; + buft_layer_count[model.buft_layer[i].buft_matrix]++; + } + + // create one context per buffer type + size_t ctx_size = ggml_tensor_overhead()*ml.n_tensors; + std::map ctx_map; + for (auto & it : buft_layer_count) { struct ggml_init_params params = { /*.mem_size =*/ ctx_size, /*.mem_buffer =*/ NULL, /*.no_alloc =*/ true, }; - - model.ctx = ggml_init(params); - if (!model.ctx) { - throw std::runtime_error(format("ggml_init() failed")); + ggml_context * ctx = ggml_init(params); + if (!ctx) { + throw std::runtime_error(format("failed to create context")); } + ctx_map[it.first] = ctx; + model.ctxs.push_back(ctx); } - (void) main_gpu; - - enum ggml_backend_type llama_backend_offload = GGML_BACKEND_CPU; - enum ggml_backend_type llama_backend_offload_split = GGML_BACKEND_CPU; - -#if defined(GGML_USE_CUBLAS) && !defined(LLAMA_GGML_BACKEND_CUDA_TEST) - if (ggml_cublas_loaded()) { - LLAMA_LOG_INFO("%s: using " GGML_CUDA_NAME " for GPU acceleration\n", __func__); - ggml_cuda_set_main_device(main_gpu); - - llama_backend_offload = GGML_BACKEND_GPU; - llama_backend_offload_split = GGML_BACKEND_GPU_SPLIT; - } -#elif defined(GGML_USE_CLBLAST) - LLAMA_LOG_INFO("%s: using OpenCL for GPU acceleration\n", __func__); - llama_backend_offload = GGML_BACKEND_GPU; - llama_backend_offload_split = GGML_BACKEND_GPU; -#endif + LLAMA_LOG_INFO("%s: ggml ctx size = %7.2f MiB\n", __func__, model.ctxs.size()*ctx_size/1024.0/1024.0); // create tensors for the weights { const int64_t n_embd = hparams.n_embd; const int64_t n_embd_k_gqa = hparams.n_embd_k_gqa(); const int64_t n_embd_v_gqa = hparams.n_embd_v_gqa(); - const int64_t n_layer = hparams.n_layer; + const int64_t n_embd_gqa = n_embd_v_gqa; const int64_t n_vocab = hparams.n_vocab; + const int64_t n_ff = hparams.n_ff; + + GGML_ASSERT(n_embd_gqa == n_embd_k_gqa); + + ggml_context * ctx_input = ctx_map.at(model.buft_input.buft); + ggml_context * ctx_output = ctx_map.at(model.buft_output.buft); + ggml_context * ctx_output_split = ctx_map.at(model.buft_output.buft_matrix); + auto ctx_for_layer = [&](int i) { return ctx_map.at(model.buft_layer[i].buft); }; + auto ctx_for_layer_split = [&](int i) { return ctx_map.at(model.buft_layer[i].buft_matrix); }; + + model.layers.resize(n_layer); const auto tn = LLM_TN(model.arch); switch (model.arch) { case LLM_ARCH_LLAMA: case LLM_ARCH_REFACT: { - model.tok_embd = ml.create_tensor(ctx, tn(LLM_TENSOR_TOKEN_EMBD, "weight"), {n_embd, n_vocab}, GGML_BACKEND_CPU); + model.tok_embd = ml.create_tensor(ctx_input, tn(LLM_TENSOR_TOKEN_EMBD, "weight"), {n_embd, n_vocab}); // output { - ggml_backend_type backend_norm; - ggml_backend_type backend_output; - - if (n_gpu_layers > int(n_layer)) { - backend_norm = llama_backend_offload; - backend_output = llama_backend_offload_split; - } else { - backend_norm = GGML_BACKEND_CPU; - backend_output = GGML_BACKEND_CPU; - } - - model.output_norm = ml.create_tensor(ctx, tn(LLM_TENSOR_OUTPUT_NORM, "weight"), {n_embd}, backend_norm); - model.output = ml.create_tensor(ctx, tn(LLM_TENSOR_OUTPUT, "weight"), {n_embd, n_vocab}, backend_output); + model.output_norm = ml.create_tensor(ctx_output, tn(LLM_TENSOR_OUTPUT_NORM, "weight"), {n_embd}); + model.output = ml.create_tensor(ctx_output_split, tn(LLM_TENSOR_OUTPUT, "weight"), {n_embd, n_vocab}); } - const uint32_t n_ff = hparams.n_ff; - const int64_t n_embd_gqa = n_embd_v_gqa; - GGML_ASSERT(n_embd_gqa == n_embd / hparams.n_gqa()); - GGML_ASSERT(n_embd_gqa == n_embd_k_gqa); - - const int i_gpu_start = n_layer - n_gpu_layers; - - model.layers.resize(n_layer); - - for (uint32_t i = 0; i < n_layer; ++i) { - const ggml_backend_type backend = int(i) < i_gpu_start ? GGML_BACKEND_CPU : llama_backend_offload; // NOLINT - const ggml_backend_type backend_split = int(i) < i_gpu_start ? GGML_BACKEND_CPU : llama_backend_offload_split; // NOLINT + for (int i = 0; i < n_layer; ++i) { + ggml_context * ctx_layer = ctx_for_layer(i); + ggml_context * ctx_split = ctx_for_layer_split(i); auto & layer = model.layers[i]; - layer.attn_norm = ml.create_tensor(ctx, tn(LLM_TENSOR_ATTN_NORM, "weight", i), {n_embd}, backend); + layer.attn_norm = ml.create_tensor(ctx_layer, tn(LLM_TENSOR_ATTN_NORM, "weight", i), {n_embd}); - layer.wq = ml.create_tensor(ctx, tn(LLM_TENSOR_ATTN_Q, "weight", i), {n_embd, n_embd}, backend_split); - layer.wk = ml.create_tensor(ctx, tn(LLM_TENSOR_ATTN_K, "weight", i), {n_embd, n_embd_gqa}, backend_split); - layer.wv = ml.create_tensor(ctx, tn(LLM_TENSOR_ATTN_V, "weight", i), {n_embd, n_embd_gqa}, backend_split); - layer.wo = ml.create_tensor(ctx, tn(LLM_TENSOR_ATTN_OUT, "weight", i), {n_embd, n_embd}, backend_split); + layer.wq = ml.create_tensor(ctx_split, tn(LLM_TENSOR_ATTN_Q, "weight", i), {n_embd, n_embd}); + layer.wk = ml.create_tensor(ctx_split, tn(LLM_TENSOR_ATTN_K, "weight", i), {n_embd, n_embd_gqa}); + layer.wv = ml.create_tensor(ctx_split, tn(LLM_TENSOR_ATTN_V, "weight", i), {n_embd, n_embd_gqa}); + layer.wo = ml.create_tensor(ctx_split, tn(LLM_TENSOR_ATTN_OUT, "weight", i), {n_embd, n_embd}); // optional bias tensors - layer.bq = ml.create_tensor(ctx, tn(LLM_TENSOR_ATTN_Q, "bias", i), {n_embd}, backend, false); - layer.bk = ml.create_tensor(ctx, tn(LLM_TENSOR_ATTN_K, "bias", i), {n_embd_gqa}, backend, false); - layer.bv = ml.create_tensor(ctx, tn(LLM_TENSOR_ATTN_V, "bias", i), {n_embd_gqa}, backend, false); - layer.bo = ml.create_tensor(ctx, tn(LLM_TENSOR_ATTN_OUT, "bias", i), {n_embd}, backend, false); + layer.bq = ml.create_tensor(ctx_layer, tn(LLM_TENSOR_ATTN_Q, "bias", i), {n_embd}, false); + layer.bk = ml.create_tensor(ctx_layer, tn(LLM_TENSOR_ATTN_K, "bias", i), {n_embd_gqa}, false); + layer.bv = ml.create_tensor(ctx_layer, tn(LLM_TENSOR_ATTN_V, "bias", i), {n_embd_gqa}, false); + layer.bo = ml.create_tensor(ctx_layer, tn(LLM_TENSOR_ATTN_OUT, "bias", i), {n_embd}, false); - layer.ffn_norm = ml.create_tensor(ctx, tn(LLM_TENSOR_FFN_NORM, "weight", i), {n_embd}, backend); + layer.ffn_norm = ml.create_tensor(ctx_layer, tn(LLM_TENSOR_FFN_NORM, "weight", i), {n_embd}); - layer.ffn_gate_inp = ml.create_tensor(ctx, tn(LLM_TENSOR_FFN_GATE_INP, "weight", i), {n_embd}, backend, false); + layer.ffn_gate_inp = ml.create_tensor(ctx_layer, tn(LLM_TENSOR_FFN_GATE_INP, "weight", i), {n_embd}, false); if (layer.ffn_gate_inp == nullptr) { GGML_ASSERT(hparams.n_expert == 0); GGML_ASSERT(hparams.n_expert_used == 0); - layer.ffn_gate = ml.create_tensor(ctx, tn(LLM_TENSOR_FFN_GATE, "weight", i), {n_embd, n_ff}, backend_split); - layer.ffn_down = ml.create_tensor(ctx, tn(LLM_TENSOR_FFN_DOWN, "weight", i), { n_ff, n_embd}, backend_split); - layer.ffn_up = ml.create_tensor(ctx, tn(LLM_TENSOR_FFN_UP, "weight", i), {n_embd, n_ff}, backend_split); + layer.ffn_gate = ml.create_tensor(ctx_split, tn(LLM_TENSOR_FFN_GATE, "weight", i), {n_embd, n_ff}); + layer.ffn_down = ml.create_tensor(ctx_split, tn(LLM_TENSOR_FFN_DOWN, "weight", i), { n_ff, n_embd}); + layer.ffn_up = ml.create_tensor(ctx_split, tn(LLM_TENSOR_FFN_UP, "weight", i), {n_embd, n_ff}); } else { GGML_ASSERT(hparams.n_expert > 0); GGML_ASSERT(hparams.n_expert_used > 0); // MoE branch for (uint32_t x = 0; x < hparams.n_expert; ++x) { - layer.ffn_gate_exp[x] = ml.create_tensor(ctx, tn(LLM_TENSOR_FFN_GATE_EXP, "weight", i, x), {n_embd, n_ff}, backend_split); - layer.ffn_down_exp[x] = ml.create_tensor(ctx, tn(LLM_TENSOR_FFN_DOWN_EXP, "weight", i, x), { n_ff, n_embd}, backend_split); - layer.ffn_up_exp[x] = ml.create_tensor(ctx, tn(LLM_TENSOR_FFN_UP_EXP, "weight", i, x), {n_embd, n_ff}, backend_split); + layer.ffn_gate_exp[x] = ml.create_tensor(ctx_split, tn(LLM_TENSOR_FFN_GATE_EXP, "weight", i, x), {n_embd, n_ff}); + layer.ffn_down_exp[x] = ml.create_tensor(ctx_split, tn(LLM_TENSOR_FFN_DOWN_EXP, "weight", i, x), { n_ff, n_embd}); + layer.ffn_up_exp[x] = ml.create_tensor(ctx_split, tn(LLM_TENSOR_FFN_UP_EXP, "weight", i, x), {n_embd, n_ff}); } } } } break; case LLM_ARCH_BAICHUAN: { - model.tok_embd = ml.create_tensor(ctx, tn(LLM_TENSOR_TOKEN_EMBD, "weight"), {n_embd, n_vocab}, GGML_BACKEND_CPU); + model.tok_embd = ml.create_tensor(ctx_input, tn(LLM_TENSOR_TOKEN_EMBD, "weight"), {n_embd, n_vocab}); { - ggml_backend_type backend_norm; - ggml_backend_type backend_output; - - if (n_gpu_layers > int(n_layer)) { - backend_norm = llama_backend_offload; - backend_output = llama_backend_offload_split; - } else { - backend_norm = GGML_BACKEND_CPU; - backend_output = GGML_BACKEND_CPU; - } - - model.output_norm = ml.create_tensor(ctx, tn(LLM_TENSOR_OUTPUT_NORM, "weight"), {n_embd}, backend_norm); - model.output = ml.create_tensor(ctx, tn(LLM_TENSOR_OUTPUT, "weight"), {n_embd, n_vocab}, backend_output); + model.output_norm = ml.create_tensor(ctx_output, tn(LLM_TENSOR_OUTPUT_NORM, "weight"), {n_embd}); + model.output = ml.create_tensor(ctx_output_split, tn(LLM_TENSOR_OUTPUT, "weight"), {n_embd, n_vocab}); } - const uint32_t n_ff = hparams.n_ff; - const int64_t n_embd_gqa = n_embd_v_gqa; - GGML_ASSERT(n_embd_gqa == n_embd / hparams.n_gqa()); - GGML_ASSERT(n_embd_gqa == n_embd_k_gqa); - - const int i_gpu_start = n_layer - n_gpu_layers; - - model.layers.resize(n_layer); - - for (uint32_t i = 0; i < n_layer; ++i) { - const ggml_backend_type backend = int(i) < i_gpu_start ? GGML_BACKEND_CPU : llama_backend_offload; // NOLINT - const ggml_backend_type backend_split = int(i) < i_gpu_start ? GGML_BACKEND_CPU : llama_backend_offload_split; // NOLINT + for (int i = 0; i < n_layer; ++i) { + ggml_context * ctx_layer = ctx_for_layer(i); + ggml_context * ctx_split = ctx_for_layer_split(i); auto & layer = model.layers[i]; - layer.attn_norm = ml.create_tensor(ctx, tn(LLM_TENSOR_ATTN_NORM, "weight", i), {n_embd}, backend); + layer.attn_norm = ml.create_tensor(ctx_layer, tn(LLM_TENSOR_ATTN_NORM, "weight", i), {n_embd}); - layer.wq = ml.create_tensor(ctx, tn(LLM_TENSOR_ATTN_Q, "weight", i), {n_embd, n_embd}, backend_split); - layer.wk = ml.create_tensor(ctx, tn(LLM_TENSOR_ATTN_K, "weight", i), {n_embd, n_embd_gqa}, backend_split); - layer.wv = ml.create_tensor(ctx, tn(LLM_TENSOR_ATTN_V, "weight", i), {n_embd, n_embd_gqa}, backend_split); - layer.wo = ml.create_tensor(ctx, tn(LLM_TENSOR_ATTN_OUT, "weight", i), {n_embd, n_embd}, backend_split); + layer.wq = ml.create_tensor(ctx_split, tn(LLM_TENSOR_ATTN_Q, "weight", i), {n_embd, n_embd}); + layer.wk = ml.create_tensor(ctx_split, tn(LLM_TENSOR_ATTN_K, "weight", i), {n_embd, n_embd_gqa}); + layer.wv = ml.create_tensor(ctx_split, tn(LLM_TENSOR_ATTN_V, "weight", i), {n_embd, n_embd_gqa}); + layer.wo = ml.create_tensor(ctx_split, tn(LLM_TENSOR_ATTN_OUT, "weight", i), {n_embd, n_embd}); - layer.ffn_norm = ml.create_tensor(ctx, tn(LLM_TENSOR_FFN_NORM, "weight", i), {n_embd}, backend); + layer.ffn_norm = ml.create_tensor(ctx_layer, tn(LLM_TENSOR_FFN_NORM, "weight", i), {n_embd}); - layer.ffn_gate = ml.create_tensor(ctx, tn(LLM_TENSOR_FFN_GATE, "weight", i), {n_embd, n_ff}, backend_split); - layer.ffn_down = ml.create_tensor(ctx, tn(LLM_TENSOR_FFN_DOWN, "weight", i), { n_ff, n_embd}, backend_split); - layer.ffn_up = ml.create_tensor(ctx, tn(LLM_TENSOR_FFN_UP, "weight", i), {n_embd, n_ff}, backend_split); + layer.ffn_gate = ml.create_tensor(ctx_split, tn(LLM_TENSOR_FFN_GATE, "weight", i), {n_embd, n_ff}); + layer.ffn_down = ml.create_tensor(ctx_split, tn(LLM_TENSOR_FFN_DOWN, "weight", i), { n_ff, n_embd}); + layer.ffn_up = ml.create_tensor(ctx_split, tn(LLM_TENSOR_FFN_UP, "weight", i), {n_embd, n_ff}); } } break; case LLM_ARCH_FALCON: { - model.tok_embd = ml.create_tensor(ctx, tn(LLM_TENSOR_TOKEN_EMBD, "weight"), {n_embd, n_vocab}, GGML_BACKEND_CPU); + model.tok_embd = ml.create_tensor(ctx_input, tn(LLM_TENSOR_TOKEN_EMBD, "weight"), {n_embd, n_vocab}); // output { - ggml_backend_type backend_norm; - ggml_backend_type backend_output; - - if (n_gpu_layers > int(n_layer)) { - backend_norm = llama_backend_offload; - backend_output = llama_backend_offload_split; - } else { - backend_norm = GGML_BACKEND_CPU; - backend_output = GGML_BACKEND_CPU; - } - - model.output_norm = ml.create_tensor(ctx, tn(LLM_TENSOR_OUTPUT_NORM, "weight"), {n_embd}, backend_norm); - model.output_norm_b = ml.create_tensor(ctx, tn(LLM_TENSOR_OUTPUT_NORM, "bias"), {n_embd}, backend_norm); - model.output = ml.create_tensor(ctx, tn(LLM_TENSOR_OUTPUT, "weight"), {n_embd, n_vocab}, backend_output); + model.output_norm = ml.create_tensor(ctx_output, tn(LLM_TENSOR_OUTPUT_NORM, "weight"), {n_embd}); + model.output_norm_b = ml.create_tensor(ctx_output, tn(LLM_TENSOR_OUTPUT_NORM, "bias"), {n_embd}); + model.output = ml.create_tensor(ctx_output_split, tn(LLM_TENSOR_OUTPUT, "weight"), {n_embd, n_vocab}); } - const uint32_t n_ff = hparams.n_ff; - const int64_t n_embd_gqa = n_embd_v_gqa; - GGML_ASSERT(n_embd_gqa == n_embd / hparams.n_gqa()); - GGML_ASSERT(n_embd_gqa == n_embd_k_gqa); - - const int i_gpu_start = n_layer - n_gpu_layers; - - model.layers.resize(n_layer); - - for (uint32_t i = 0; i < n_layer; ++i) { - const ggml_backend_type backend = int(i) < i_gpu_start ? GGML_BACKEND_CPU : llama_backend_offload; // NOLINT - const ggml_backend_type backend_split = int(i) < i_gpu_start ? GGML_BACKEND_CPU : llama_backend_offload_split; // NOLINT + for (int i = 0; i < n_layer; ++i) { + ggml_context * ctx_layer = ctx_for_layer(i); + ggml_context * ctx_split = ctx_for_layer_split(i); auto & layer = model.layers[i]; - layer.attn_norm = ml.create_tensor(ctx, tn(LLM_TENSOR_ATTN_NORM, "weight", i), {n_embd}, backend); - layer.attn_norm_b = ml.create_tensor(ctx, tn(LLM_TENSOR_ATTN_NORM, "bias", i), {n_embd}, backend); + layer.attn_norm = ml.create_tensor(ctx_layer, tn(LLM_TENSOR_ATTN_NORM, "weight", i), {n_embd}); + layer.attn_norm_b = ml.create_tensor(ctx_layer, tn(LLM_TENSOR_ATTN_NORM, "bias", i), {n_embd}); if (gguf_find_tensor(ml.ctx_gguf, tn(LLM_TENSOR_ATTN_NORM_2, "weight", i).c_str()) >= 0) { - layer.attn_norm_2 = ml.create_tensor(ctx, tn(LLM_TENSOR_ATTN_NORM_2, "weight", i), {n_embd}, backend); - layer.attn_norm_2_b = ml.create_tensor(ctx, tn(LLM_TENSOR_ATTN_NORM_2, "bias", i), {n_embd}, backend); + layer.attn_norm_2 = ml.create_tensor(ctx_layer, tn(LLM_TENSOR_ATTN_NORM_2, "weight", i), {n_embd}); + layer.attn_norm_2_b = ml.create_tensor(ctx_layer, tn(LLM_TENSOR_ATTN_NORM_2, "bias", i), {n_embd}); } - layer.wqkv = ml.create_tensor(ctx, tn(LLM_TENSOR_ATTN_QKV, "weight", i), {n_embd, n_embd + 2*n_embd_gqa}, backend_split); - layer.wo = ml.create_tensor(ctx, tn(LLM_TENSOR_ATTN_OUT, "weight", i), {n_embd, n_embd}, backend_split); + layer.wqkv = ml.create_tensor(ctx_split, tn(LLM_TENSOR_ATTN_QKV, "weight", i), {n_embd, n_embd + 2*n_embd_gqa}); + layer.wo = ml.create_tensor(ctx_split, tn(LLM_TENSOR_ATTN_OUT, "weight", i), {n_embd, n_embd}); - layer.ffn_down = ml.create_tensor(ctx, tn(LLM_TENSOR_FFN_DOWN, "weight", i), { n_ff, n_embd}, backend_split); - layer.ffn_up = ml.create_tensor(ctx, tn(LLM_TENSOR_FFN_UP, "weight", i), {n_embd, n_ff}, backend_split); + layer.ffn_down = ml.create_tensor(ctx_split, tn(LLM_TENSOR_FFN_DOWN, "weight", i), { n_ff, n_embd}); + layer.ffn_up = ml.create_tensor(ctx_split, tn(LLM_TENSOR_FFN_UP, "weight", i), {n_embd, n_ff}); } } break; case LLM_ARCH_STARCODER: { - model.tok_embd = ml.create_tensor(ctx, tn(LLM_TENSOR_TOKEN_EMBD, "weight"), {n_embd, n_vocab}, GGML_BACKEND_CPU); - model.pos_embd = ml.create_tensor(ctx, tn(LLM_TENSOR_POS_EMBD, "weight"), {n_embd, hparams.n_ctx_train}, GGML_BACKEND_CPU); + model.tok_embd = ml.create_tensor(ctx_input, tn(LLM_TENSOR_TOKEN_EMBD, "weight"), {n_embd, n_vocab}); + model.pos_embd = ml.create_tensor(ctx_input, tn(LLM_TENSOR_POS_EMBD, "weight"), {n_embd, hparams.n_ctx_train}); // output { - ggml_backend_type backend_norm; - ggml_backend_type backend_output; - - if (n_gpu_layers > int(n_layer)) { - backend_norm = llama_backend_offload; - backend_output = llama_backend_offload_split; - } else { - backend_norm = GGML_BACKEND_CPU; - backend_output = GGML_BACKEND_CPU; - } - - model.output_norm = ml.create_tensor(ctx, tn(LLM_TENSOR_OUTPUT_NORM, "weight"), {n_embd}, backend_norm); - model.output_norm_b = ml.create_tensor(ctx, tn(LLM_TENSOR_OUTPUT_NORM, "bias"), {n_embd}, backend_norm); - model.output = ml.create_tensor(ctx, tn(LLM_TENSOR_OUTPUT, "weight"), {n_embd, n_vocab}, backend_output); + model.output_norm = ml.create_tensor(ctx_output, tn(LLM_TENSOR_OUTPUT_NORM, "weight"), {n_embd}); + model.output_norm_b = ml.create_tensor(ctx_output, tn(LLM_TENSOR_OUTPUT_NORM, "bias"), {n_embd}); + model.output = ml.create_tensor(ctx_output_split, tn(LLM_TENSOR_OUTPUT, "weight"), {n_embd, n_vocab}); } - const uint32_t n_ff = hparams.n_ff; - const int64_t n_embd_gqa = n_embd_v_gqa; - GGML_ASSERT(n_embd_gqa == n_embd / hparams.n_gqa()); - GGML_ASSERT(n_embd_gqa == n_embd_k_gqa); - - const int i_gpu_start = n_layer - n_gpu_layers; - - model.layers.resize(n_layer); - - for (uint32_t i = 0; i < n_layer; ++i) { - const ggml_backend_type backend = int(i) < i_gpu_start ? GGML_BACKEND_CPU : llama_backend_offload; // NOLINT - const ggml_backend_type backend_split = int(i) < i_gpu_start ? GGML_BACKEND_CPU : llama_backend_offload_split; // NOLINT + for (int i = 0; i < n_layer; ++i) { + ggml_context * ctx_layer = ctx_for_layer(i); + ggml_context * ctx_split = ctx_for_layer_split(i); auto & layer = model.layers[i]; - layer.attn_norm = ml.create_tensor(ctx, tn(LLM_TENSOR_ATTN_NORM, "weight", i), {n_embd}, backend); - layer.attn_norm_b = ml.create_tensor(ctx, tn(LLM_TENSOR_ATTN_NORM, "bias", i), {n_embd}, backend); + layer.attn_norm = ml.create_tensor(ctx_layer, tn(LLM_TENSOR_ATTN_NORM, "weight", i), {n_embd}); + layer.attn_norm_b = ml.create_tensor(ctx_layer, tn(LLM_TENSOR_ATTN_NORM, "bias", i), {n_embd}); - layer.wqkv = ml.create_tensor(ctx, tn(LLM_TENSOR_ATTN_QKV, "weight", i), {n_embd, n_embd + 2*n_embd_gqa}, backend_split); - layer.bqkv = ml.create_tensor(ctx, tn(LLM_TENSOR_ATTN_QKV, "bias", i), {n_embd + 2*n_embd_gqa}, backend); + layer.wqkv = ml.create_tensor(ctx_split, tn(LLM_TENSOR_ATTN_QKV, "weight", i), {n_embd, n_embd + 2*n_embd_gqa}); + layer.bqkv = ml.create_tensor(ctx_layer, tn(LLM_TENSOR_ATTN_QKV, "bias", i), {n_embd + 2*n_embd_gqa}); - layer.wo = ml.create_tensor(ctx, tn(LLM_TENSOR_ATTN_OUT, "weight", i), {n_embd, n_embd}, backend_split); - layer.bo = ml.create_tensor(ctx, tn(LLM_TENSOR_ATTN_OUT, "bias", i), {n_embd}, backend); + layer.wo = ml.create_tensor(ctx_split, tn(LLM_TENSOR_ATTN_OUT, "weight", i), {n_embd, n_embd}); + layer.bo = ml.create_tensor(ctx_layer, tn(LLM_TENSOR_ATTN_OUT, "bias", i), {n_embd}); - layer.ffn_norm = ml.create_tensor(ctx, tn(LLM_TENSOR_FFN_NORM, "weight", i), {n_embd}, backend); - layer.ffn_norm_b = ml.create_tensor(ctx, tn(LLM_TENSOR_FFN_NORM, "bias", i), {n_embd}, backend); + layer.ffn_norm = ml.create_tensor(ctx_layer, tn(LLM_TENSOR_FFN_NORM, "weight", i), {n_embd}); + layer.ffn_norm_b = ml.create_tensor(ctx_layer, tn(LLM_TENSOR_FFN_NORM, "bias", i), {n_embd}); - layer.ffn_down = ml.create_tensor(ctx, tn(LLM_TENSOR_FFN_DOWN, "weight", i), {n_ff, n_embd}, backend_split); - layer.ffn_down_b = ml.create_tensor(ctx, tn(LLM_TENSOR_FFN_DOWN, "bias", i), {n_embd}, backend); + layer.ffn_down = ml.create_tensor(ctx_split, tn(LLM_TENSOR_FFN_DOWN, "weight", i), {n_ff, n_embd}); + layer.ffn_down_b = ml.create_tensor(ctx_layer, tn(LLM_TENSOR_FFN_DOWN, "bias", i), {n_embd}); - layer.ffn_up = ml.create_tensor(ctx, tn(LLM_TENSOR_FFN_UP, "weight", i), {n_embd, n_ff}, backend_split); - layer.ffn_up_b = ml.create_tensor(ctx, tn(LLM_TENSOR_FFN_UP, "bias", i), {n_ff}, backend); + layer.ffn_up = ml.create_tensor(ctx_split, tn(LLM_TENSOR_FFN_UP, "weight", i), {n_embd, n_ff}); + layer.ffn_up_b = ml.create_tensor(ctx_layer, tn(LLM_TENSOR_FFN_UP, "bias", i), {n_ff}); } } break; case LLM_ARCH_PERSIMMON: { - model.tok_embd = ml.create_tensor(ctx, tn(LLM_TENSOR_TOKEN_EMBD, "weight"), {n_embd, n_vocab}, GGML_BACKEND_CPU); + model.tok_embd = ml.create_tensor(ctx_input, tn(LLM_TENSOR_TOKEN_EMBD, "weight"), {n_embd, n_vocab}); { - ggml_backend_type backend_norm; - ggml_backend_type backend_output; - - if (n_gpu_layers > int(n_layer)) { - backend_norm = llama_backend_offload; - backend_output = llama_backend_offload_split; - } else { - backend_norm = GGML_BACKEND_CPU; - backend_output = GGML_BACKEND_CPU; - } - - model.output_norm = ml.create_tensor(ctx, tn(LLM_TENSOR_OUTPUT_NORM, "weight"), {n_embd}, backend_norm); - model.output_norm_b = ml.create_tensor(ctx, tn(LLM_TENSOR_OUTPUT_NORM, "bias"), {n_embd}, backend_norm); - model.output = ml.create_tensor(ctx, tn(LLM_TENSOR_OUTPUT, "weight"), {n_embd, n_vocab}, backend_output); + model.output_norm = ml.create_tensor(ctx_output, tn(LLM_TENSOR_OUTPUT_NORM, "weight"), {n_embd}); + model.output_norm_b = ml.create_tensor(ctx_output, tn(LLM_TENSOR_OUTPUT_NORM, "bias"), {n_embd}); + model.output = ml.create_tensor(ctx_output_split, tn(LLM_TENSOR_OUTPUT, "weight"), {n_embd, n_vocab}); } - const uint32_t n_ff = hparams.n_ff; - const int64_t n_embd_gqa = n_embd_v_gqa; - GGML_ASSERT(n_embd_gqa == n_embd / hparams.n_gqa()); - GGML_ASSERT(n_embd_gqa == n_embd_k_gqa); + for (int i = 0; i < n_layer; ++i) { + ggml_context * ctx_layer = ctx_for_layer(i); + ggml_context * ctx_split = ctx_for_layer_split(i); - const int i_gpu_start = n_layer - n_gpu_layers; - model.layers.resize(n_layer); - for (uint32_t i = 0; i < n_layer; ++i) { - const ggml_backend_type backend = int(i) < i_gpu_start ? GGML_BACKEND_CPU : llama_backend_offload; - const ggml_backend_type backend_split = int(i) < i_gpu_start ? GGML_BACKEND_CPU : llama_backend_offload_split; auto & layer = model.layers[i]; - layer.attn_norm = ml.create_tensor(ctx, tn(LLM_TENSOR_ATTN_NORM, "weight", i), {n_embd}, backend); - layer.attn_norm_b = ml.create_tensor(ctx, tn(LLM_TENSOR_ATTN_NORM, "bias", i), {n_embd}, backend); - layer.wqkv = ml.create_tensor(ctx, tn(LLM_TENSOR_ATTN_QKV, "weight", i), {n_embd, n_embd + 2*n_embd_gqa}, backend_split); - layer.bqkv = ml.create_tensor(ctx, tn(LLM_TENSOR_ATTN_QKV, "bias", i), {n_embd + 2*n_embd_gqa}, backend); - layer.wo = ml.create_tensor(ctx, tn(LLM_TENSOR_ATTN_OUT, "weight", i), {n_embd, n_embd}, backend_split); - layer.bo = ml.create_tensor(ctx, tn(LLM_TENSOR_ATTN_OUT, "bias", i), {n_embd}, backend); - layer.ffn_down = ml.create_tensor(ctx, tn(LLM_TENSOR_FFN_DOWN, "weight", i), {n_ff, n_embd}, backend_split); - layer.ffn_down_b = ml.create_tensor(ctx, tn(LLM_TENSOR_FFN_DOWN, "bias", i), {n_embd}, backend); - layer.ffn_up = ml.create_tensor(ctx, tn(LLM_TENSOR_FFN_UP, "weight", i), {n_embd, n_ff}, backend_split); - layer.ffn_up_b = ml.create_tensor(ctx, tn(LLM_TENSOR_FFN_UP, "bias", i), {n_ff}, backend); - layer.ffn_norm = ml.create_tensor(ctx, tn(LLM_TENSOR_FFN_NORM, "weight", i), {n_embd}, backend); - layer.ffn_norm_b = ml.create_tensor(ctx, tn(LLM_TENSOR_FFN_NORM, "bias", i), {n_embd}, backend); - layer.attn_q_norm = ml.create_tensor(ctx, tn(LLM_TENSOR_ATTN_Q_NORM, "weight", i), {64}, backend); - layer.attn_q_norm_b = ml.create_tensor(ctx, tn(LLM_TENSOR_ATTN_Q_NORM, "bias", i), {64}, backend); - layer.attn_k_norm = ml.create_tensor(ctx, tn(LLM_TENSOR_ATTN_K_NORM, "weight", i), {64}, backend); - layer.attn_k_norm_b = ml.create_tensor(ctx, tn(LLM_TENSOR_ATTN_K_NORM, "bias", i), {64}, backend); + + layer.attn_norm = ml.create_tensor(ctx_layer, tn(LLM_TENSOR_ATTN_NORM, "weight", i), {n_embd}); + layer.attn_norm_b = ml.create_tensor(ctx_layer, tn(LLM_TENSOR_ATTN_NORM, "bias", i), {n_embd}); + + layer.wqkv = ml.create_tensor(ctx_split, tn(LLM_TENSOR_ATTN_QKV, "weight", i), {n_embd, n_embd + 2*n_embd_gqa}); + layer.bqkv = ml.create_tensor(ctx_layer, tn(LLM_TENSOR_ATTN_QKV, "bias", i), {n_embd + 2*n_embd_gqa}); + + layer.wo = ml.create_tensor(ctx_split, tn(LLM_TENSOR_ATTN_OUT, "weight", i), {n_embd, n_embd}); + layer.bo = ml.create_tensor(ctx_layer, tn(LLM_TENSOR_ATTN_OUT, "bias", i), {n_embd}); + + layer.ffn_down = ml.create_tensor(ctx_split, tn(LLM_TENSOR_FFN_DOWN, "weight", i), {n_ff, n_embd}); + layer.ffn_down_b = ml.create_tensor(ctx_layer, tn(LLM_TENSOR_FFN_DOWN, "bias", i), {n_embd}); + + layer.ffn_up = ml.create_tensor(ctx_split, tn(LLM_TENSOR_FFN_UP, "weight", i), {n_embd, n_ff}); + layer.ffn_up_b = ml.create_tensor(ctx_layer, tn(LLM_TENSOR_FFN_UP, "bias", i), {n_ff}); + + layer.ffn_norm = ml.create_tensor(ctx_layer, tn(LLM_TENSOR_FFN_NORM, "weight", i), {n_embd}); + layer.ffn_norm_b = ml.create_tensor(ctx_layer, tn(LLM_TENSOR_FFN_NORM, "bias", i), {n_embd}); + + layer.attn_q_norm = ml.create_tensor(ctx_layer, tn(LLM_TENSOR_ATTN_Q_NORM, "weight", i), {64}); + layer.attn_q_norm_b = ml.create_tensor(ctx_layer, tn(LLM_TENSOR_ATTN_Q_NORM, "bias", i), {64}); + + layer.attn_k_norm = ml.create_tensor(ctx_layer, tn(LLM_TENSOR_ATTN_K_NORM, "weight", i), {64}); + layer.attn_k_norm_b = ml.create_tensor(ctx_layer, tn(LLM_TENSOR_ATTN_K_NORM, "bias", i), {64}); } } break; case LLM_ARCH_BLOOM: { - model.tok_embd = ml.create_tensor(ctx, tn(LLM_TENSOR_TOKEN_EMBD, "weight"), {n_embd, n_vocab}, GGML_BACKEND_CPU); - model.tok_norm = ml.create_tensor(ctx, tn(LLM_TENSOR_TOKEN_EMBD_NORM, "weight"), {n_embd}, GGML_BACKEND_CPU); - model.tok_norm_b = ml.create_tensor(ctx, tn(LLM_TENSOR_TOKEN_EMBD_NORM, "bias"), {n_embd}, GGML_BACKEND_CPU); + model.tok_embd = ml.create_tensor(ctx_input, tn(LLM_TENSOR_TOKEN_EMBD, "weight"), {n_embd, n_vocab}); + model.tok_norm = ml.create_tensor(ctx_input, tn(LLM_TENSOR_TOKEN_EMBD_NORM, "weight"), {n_embd}); + model.tok_norm_b = ml.create_tensor(ctx_input, tn(LLM_TENSOR_TOKEN_EMBD_NORM, "bias"), {n_embd}); // output { - ggml_backend_type backend_norm; - ggml_backend_type backend_output; - - if (n_gpu_layers > int(n_layer)) { - backend_norm = llama_backend_offload; - backend_output = llama_backend_offload_split; - } else { - backend_norm = GGML_BACKEND_CPU; - backend_output = GGML_BACKEND_CPU; - } - - model.output_norm = ml.create_tensor(ctx, tn(LLM_TENSOR_OUTPUT_NORM, "weight"), {n_embd}, backend_norm); - model.output_norm_b = ml.create_tensor(ctx, tn(LLM_TENSOR_OUTPUT_NORM, "bias"), {n_embd}, backend_norm); - model.output = ml.create_tensor(ctx, tn(LLM_TENSOR_OUTPUT, "weight"), {n_embd, n_vocab}, backend_output); + model.output_norm = ml.create_tensor(ctx_output, tn(LLM_TENSOR_OUTPUT_NORM, "weight"), {n_embd}); + model.output_norm_b = ml.create_tensor(ctx_output, tn(LLM_TENSOR_OUTPUT_NORM, "bias"), {n_embd}); + model.output = ml.create_tensor(ctx_output_split, tn(LLM_TENSOR_OUTPUT, "weight"), {n_embd, n_vocab}); } - const uint32_t n_ff = hparams.n_ff; - const int64_t n_embd_gqa = n_embd_v_gqa; - GGML_ASSERT(n_embd_gqa == n_embd / hparams.n_gqa()); - GGML_ASSERT(n_embd_gqa == n_embd_k_gqa); - - const int i_gpu_start = n_layer - n_gpu_layers; - - model.layers.resize(n_layer); - - for (uint32_t i = 0; i < n_layer; ++i) { - const ggml_backend_type backend = int(i) < i_gpu_start ? GGML_BACKEND_CPU : llama_backend_offload; // NOLINT - const ggml_backend_type backend_split = int(i) < i_gpu_start ? GGML_BACKEND_CPU : llama_backend_offload_split; // NOLINT + for (int i = 0; i < n_layer; ++i) { + ggml_context * ctx_layer = ctx_for_layer(i); + ggml_context * ctx_split = ctx_for_layer_split(i); auto & layer = model.layers[i]; - layer.attn_norm = ml.create_tensor(ctx, tn(LLM_TENSOR_ATTN_NORM, "weight", i), {n_embd}, backend); - layer.attn_norm_b = ml.create_tensor(ctx, tn(LLM_TENSOR_ATTN_NORM, "bias", i), {n_embd}, backend); + layer.attn_norm = ml.create_tensor(ctx_layer, tn(LLM_TENSOR_ATTN_NORM, "weight", i), {n_embd}); + layer.attn_norm_b = ml.create_tensor(ctx_layer, tn(LLM_TENSOR_ATTN_NORM, "bias", i), {n_embd}); - layer.wqkv = ml.create_tensor(ctx, tn(LLM_TENSOR_ATTN_QKV, "weight", i), {n_embd, n_embd + 2*n_embd_gqa}, backend_split); - layer.bqkv = ml.create_tensor(ctx, tn(LLM_TENSOR_ATTN_QKV, "bias", i), {n_embd + 2*n_embd_gqa}, backend); + layer.wqkv = ml.create_tensor(ctx_split, tn(LLM_TENSOR_ATTN_QKV, "weight", i), {n_embd, n_embd + 2*n_embd_gqa}); + layer.bqkv = ml.create_tensor(ctx_layer, tn(LLM_TENSOR_ATTN_QKV, "bias", i), {n_embd + 2*n_embd_gqa}); - layer.wo = ml.create_tensor(ctx, tn(LLM_TENSOR_ATTN_OUT, "weight", i), {n_embd, n_embd}, backend_split); - layer.bo = ml.create_tensor(ctx, tn(LLM_TENSOR_ATTN_OUT, "bias", i), {n_embd}, backend); + layer.wo = ml.create_tensor(ctx_split, tn(LLM_TENSOR_ATTN_OUT, "weight", i), {n_embd, n_embd}); + layer.bo = ml.create_tensor(ctx_layer, tn(LLM_TENSOR_ATTN_OUT, "bias", i), {n_embd}); - layer.ffn_norm = ml.create_tensor(ctx, tn(LLM_TENSOR_FFN_NORM, "weight", i), {n_embd}, backend); - layer.ffn_norm_b = ml.create_tensor(ctx, tn(LLM_TENSOR_FFN_NORM, "bias", i), {n_embd}, backend); + layer.ffn_norm = ml.create_tensor(ctx_layer, tn(LLM_TENSOR_FFN_NORM, "weight", i), {n_embd}); + layer.ffn_norm_b = ml.create_tensor(ctx_layer, tn(LLM_TENSOR_FFN_NORM, "bias", i), {n_embd}); - layer.ffn_down = ml.create_tensor(ctx, tn(LLM_TENSOR_FFN_DOWN, "weight", i), {n_ff, n_embd}, backend_split); - layer.ffn_down_b = ml.create_tensor(ctx, tn(LLM_TENSOR_FFN_DOWN, "bias", i), {n_embd}, backend); + layer.ffn_down = ml.create_tensor(ctx_split, tn(LLM_TENSOR_FFN_DOWN, "weight", i), {n_ff, n_embd}); + layer.ffn_down_b = ml.create_tensor(ctx_layer, tn(LLM_TENSOR_FFN_DOWN, "bias", i), {n_embd}); - layer.ffn_up = ml.create_tensor(ctx, tn(LLM_TENSOR_FFN_UP, "weight", i), {n_embd, n_ff}, backend_split); - layer.ffn_up_b = ml.create_tensor(ctx, tn(LLM_TENSOR_FFN_UP, "bias", i), {n_ff}, backend); + layer.ffn_up = ml.create_tensor(ctx_split, tn(LLM_TENSOR_FFN_UP, "weight", i), {n_embd, n_ff}); + layer.ffn_up_b = ml.create_tensor(ctx_layer, tn(LLM_TENSOR_FFN_UP, "bias", i), {n_ff}); } } break; case LLM_ARCH_MPT: { - model.tok_embd = ml.create_tensor(ctx, tn(LLM_TENSOR_TOKEN_EMBD, "weight"), {n_embd, n_vocab}, GGML_BACKEND_CPU); + model.tok_embd = ml.create_tensor(ctx_input, tn(LLM_TENSOR_TOKEN_EMBD, "weight"), {n_embd, n_vocab}); + // output { - ggml_backend_type backend_norm; - ggml_backend_type backend_output; - - if (n_gpu_layers > int(n_layer)) { - backend_norm = llama_backend_offload; - backend_output = llama_backend_offload_split; - } else { - backend_norm = GGML_BACKEND_CPU; - backend_output = GGML_BACKEND_CPU; - } - - model.output_norm = ml.create_tensor(ctx, tn(LLM_TENSOR_OUTPUT_NORM, "weight"), {n_embd}, backend_norm); - model.output = ml.create_tensor(ctx, tn(LLM_TENSOR_OUTPUT, "weight"), {n_embd, n_vocab}, backend_output); + model.output_norm = ml.create_tensor(ctx_output, tn(LLM_TENSOR_OUTPUT_NORM, "weight"), {n_embd}); + model.output = ml.create_tensor(ctx_output_split, tn(LLM_TENSOR_OUTPUT, "weight"), {n_embd, n_vocab}); } - const uint32_t n_ff = hparams.n_ff; - const int64_t n_embd_gqa = n_embd_v_gqa; - GGML_ASSERT(n_embd_gqa == n_embd / hparams.n_gqa()); - GGML_ASSERT(n_embd_gqa == n_embd_k_gqa); - - const int i_gpu_start = n_layer - n_gpu_layers; - - model.layers.resize(n_layer); - - for (uint32_t i = 0; i < n_layer; ++i) { - const ggml_backend_type backend = int(i) < i_gpu_start ? GGML_BACKEND_CPU : llama_backend_offload; // NOLINT - const ggml_backend_type backend_split = int(i) < i_gpu_start ? GGML_BACKEND_CPU : llama_backend_offload_split; // NOLINT + for (int i = 0; i < n_layer; ++i) { + ggml_context * ctx_layer = ctx_for_layer(i); + ggml_context * ctx_split = ctx_for_layer_split(i); auto & layer = model.layers[i]; - layer.attn_norm = ml.create_tensor(ctx, tn(LLM_TENSOR_ATTN_NORM, "weight", i), {n_embd}, backend); - layer.wqkv = ml.create_tensor(ctx, tn(LLM_TENSOR_ATTN_QKV, "weight", i), {n_embd, n_embd + 2*n_embd_gqa}, backend_split); - layer.wo = ml.create_tensor(ctx, tn(LLM_TENSOR_ATTN_OUT, "weight", i), {n_embd, n_embd}, backend_split); + layer.attn_norm = ml.create_tensor(ctx_layer, tn(LLM_TENSOR_ATTN_NORM, "weight", i), {n_embd}); - layer.ffn_norm = ml.create_tensor(ctx, tn(LLM_TENSOR_FFN_NORM, "weight", i), {n_embd}, backend); + layer.wqkv = ml.create_tensor(ctx_split, tn(LLM_TENSOR_ATTN_QKV, "weight", i), {n_embd, n_embd + 2*n_embd_gqa}); + layer.wo = ml.create_tensor(ctx_split, tn(LLM_TENSOR_ATTN_OUT, "weight", i), {n_embd, n_embd}); - layer.ffn_down = ml.create_tensor(ctx, tn(LLM_TENSOR_FFN_DOWN, "weight", i), { n_ff, n_embd}, backend_split); - layer.ffn_up = ml.create_tensor(ctx, tn(LLM_TENSOR_FFN_UP, "weight", i), {n_embd, n_ff}, backend_split); + layer.ffn_norm = ml.create_tensor(ctx_layer, tn(LLM_TENSOR_FFN_NORM, "weight", i), {n_embd}); + layer.ffn_down = ml.create_tensor(ctx_split, tn(LLM_TENSOR_FFN_DOWN, "weight", i), { n_ff, n_embd}); + layer.ffn_up = ml.create_tensor(ctx_split, tn(LLM_TENSOR_FFN_UP, "weight", i), {n_embd, n_ff}); // AWQ ScaleActivation layer - layer.ffn_act = ml.create_tensor(ctx, tn(LLM_TENSOR_FFN_ACT, "scales", i), {n_ff}, backend, false); + layer.ffn_act = ml.create_tensor(ctx_layer, tn(LLM_TENSOR_FFN_ACT, "scales", i), {n_ff}, false); } } break; case LLM_ARCH_STABLELM: { - model.tok_embd = ml.create_tensor(ctx, tn(LLM_TENSOR_TOKEN_EMBD, "weight"), {n_embd, n_vocab}, GGML_BACKEND_CPU); + model.tok_embd = ml.create_tensor(ctx_input, tn(LLM_TENSOR_TOKEN_EMBD, "weight"), {n_embd, n_vocab}); // output { - ggml_backend_type backend_norm; - ggml_backend_type backend_output; - - if (n_gpu_layers > int(n_layer)) { - backend_norm = llama_backend_offload; - backend_output = llama_backend_offload_split; - } else { - backend_norm = GGML_BACKEND_CPU; - backend_output = GGML_BACKEND_CPU; - } - - model.output_norm_b = ml.create_tensor(ctx, tn(LLM_TENSOR_OUTPUT_NORM, "bias"), {n_embd}, backend_norm); - model.output_norm = ml.create_tensor(ctx, tn(LLM_TENSOR_OUTPUT_NORM, "weight"), {n_embd}, backend_norm); - model.output = ml.create_tensor(ctx, tn(LLM_TENSOR_OUTPUT, "weight"), {n_embd, n_vocab}, backend_output); + model.output_norm_b = ml.create_tensor(ctx_output, tn(LLM_TENSOR_OUTPUT_NORM, "bias"), {n_embd}); + model.output_norm = ml.create_tensor(ctx_output, tn(LLM_TENSOR_OUTPUT_NORM, "weight"), {n_embd}); + model.output = ml.create_tensor(ctx_output_split, tn(LLM_TENSOR_OUTPUT, "weight"), {n_embd, n_vocab}); } - const uint32_t n_ff = hparams.n_ff; - const int64_t n_embd_gqa = n_embd_v_gqa; - GGML_ASSERT(n_embd_gqa == n_embd / hparams.n_gqa()); - GGML_ASSERT(n_embd_gqa == n_embd_k_gqa); - - const int i_gpu_start = n_layer - n_gpu_layers; - - model.layers.resize(n_layer); - - for (uint32_t i = 0; i < n_layer; ++i) { - /* - llama_model_loader: - tensor 4: blk.0.attn_output.weight f16 [ 2560, 2560, 1, 1 ] - */ - const ggml_backend_type backend = int(i) < i_gpu_start ? GGML_BACKEND_CPU : llama_backend_offload; // NOLINT - const ggml_backend_type backend_split = int(i) < i_gpu_start ? GGML_BACKEND_CPU : llama_backend_offload_split; // NOLINT + for (int i = 0; i < n_layer; ++i) { + ggml_context * ctx_layer = ctx_for_layer(i); + ggml_context * ctx_split = ctx_for_layer_split(i); auto & layer = model.layers[i]; - layer.attn_norm = ml.create_tensor(ctx, tn(LLM_TENSOR_ATTN_NORM, "weight", i), {n_embd}, backend); - layer.attn_norm_b = ml.create_tensor(ctx, tn(LLM_TENSOR_ATTN_NORM, "bias", i), {n_embd}, backend); + layer.attn_norm = ml.create_tensor(ctx_layer, tn(LLM_TENSOR_ATTN_NORM, "weight", i), {n_embd}); + layer.attn_norm_b = ml.create_tensor(ctx_layer, tn(LLM_TENSOR_ATTN_NORM, "bias", i), {n_embd}); - layer.wq = ml.create_tensor(ctx, tn(LLM_TENSOR_ATTN_Q, "weight", i), {n_embd, n_embd}, backend_split); - layer.wk = ml.create_tensor(ctx, tn(LLM_TENSOR_ATTN_K, "weight", i), {n_embd, n_embd_gqa}, backend_split); - layer.wv = ml.create_tensor(ctx, tn(LLM_TENSOR_ATTN_V, "weight", i), {n_embd, n_embd_gqa}, backend_split); - layer.wo = ml.create_tensor(ctx, tn(LLM_TENSOR_ATTN_OUT, "weight", i), {n_embd, n_embd}, backend_split); + layer.wq = ml.create_tensor(ctx_split, tn(LLM_TENSOR_ATTN_Q, "weight", i), {n_embd, n_embd}); + layer.wk = ml.create_tensor(ctx_split, tn(LLM_TENSOR_ATTN_K, "weight", i), {n_embd, n_embd_gqa}); + layer.wv = ml.create_tensor(ctx_split, tn(LLM_TENSOR_ATTN_V, "weight", i), {n_embd, n_embd_gqa}); + layer.wo = ml.create_tensor(ctx_split, tn(LLM_TENSOR_ATTN_OUT, "weight", i), {n_embd, n_embd}); - layer.ffn_norm = ml.create_tensor(ctx, tn(LLM_TENSOR_FFN_NORM, "weight", i), {n_embd}, backend); - layer.ffn_norm_b = ml.create_tensor(ctx, tn(LLM_TENSOR_FFN_NORM, "bias", i), {n_embd}, backend); + layer.ffn_norm = ml.create_tensor(ctx_layer, tn(LLM_TENSOR_FFN_NORM, "weight", i), {n_embd}); + layer.ffn_norm_b = ml.create_tensor(ctx_layer, tn(LLM_TENSOR_FFN_NORM, "bias", i), {n_embd}); - layer.ffn_gate = ml.create_tensor(ctx, tn(LLM_TENSOR_FFN_GATE, "weight", i), {n_embd, n_ff}, backend_split); - layer.ffn_down = ml.create_tensor(ctx, tn(LLM_TENSOR_FFN_DOWN, "weight", i), { n_ff, n_embd}, backend_split); - layer.ffn_up = ml.create_tensor(ctx, tn(LLM_TENSOR_FFN_UP, "weight", i), {n_embd, n_ff}, backend_split); + layer.ffn_gate = ml.create_tensor(ctx_split, tn(LLM_TENSOR_FFN_GATE, "weight", i), {n_embd, n_ff}); + layer.ffn_down = ml.create_tensor(ctx_split, tn(LLM_TENSOR_FFN_DOWN, "weight", i), { n_ff, n_embd}); + layer.ffn_up = ml.create_tensor(ctx_split, tn(LLM_TENSOR_FFN_UP, "weight", i), {n_embd, n_ff}); } } break; case LLM_ARCH_QWEN: { - model.tok_embd = ml.create_tensor(ctx, tn(LLM_TENSOR_TOKEN_EMBD, "weight"), {n_embd, n_vocab}, GGML_BACKEND_CPU); + model.tok_embd = ml.create_tensor(ctx_input, tn(LLM_TENSOR_TOKEN_EMBD, "weight"), {n_embd, n_vocab}); + + // output { - ggml_backend_type backend_norm; - ggml_backend_type backend_output; + model.output_norm = ml.create_tensor(ctx_output, tn(LLM_TENSOR_OUTPUT_NORM, "weight"), {n_embd}); + model.output = ml.create_tensor(ctx_output_split, tn(LLM_TENSOR_OUTPUT, "weight"), {n_embd, n_vocab}); + } - if (n_gpu_layers > int(n_layer)) { - backend_norm = llama_backend_offload; - backend_output = llama_backend_offload_split; - } else { - backend_norm = GGML_BACKEND_CPU; - backend_output = GGML_BACKEND_CPU; - } - - model.output_norm = ml.create_tensor(ctx, tn(LLM_TENSOR_OUTPUT_NORM, "weight"), {n_embd}, backend_norm); - model.output = ml.create_tensor(ctx, tn(LLM_TENSOR_OUTPUT, "weight"), {n_embd, n_vocab}, backend_output); - } - - const uint32_t n_ff = hparams.n_ff / 2; - - const int i_gpu_start = n_layer - n_gpu_layers; - - model.layers.resize(n_layer); - - for (uint32_t i = 0; i < n_layer; ++i) { - const ggml_backend_type backend = int(i) < i_gpu_start ? GGML_BACKEND_CPU : llama_backend_offload; // NOLINT - const ggml_backend_type backend_split = int(i) < i_gpu_start ? GGML_BACKEND_CPU : llama_backend_offload_split; // NOLINT + for (int i = 0; i < n_layer; ++i) { + ggml_context * ctx_layer = ctx_for_layer(i); + ggml_context * ctx_split = ctx_for_layer_split(i); auto & layer = model.layers[i]; - layer.attn_norm = ml.create_tensor(ctx, tn(LLM_TENSOR_ATTN_NORM, "weight", i), {n_embd}, backend); + layer.attn_norm = ml.create_tensor(ctx_layer, tn(LLM_TENSOR_ATTN_NORM, "weight", i), {n_embd}); - layer.wqkv = ml.create_tensor(ctx, tn(LLM_TENSOR_ATTN_QKV, "weight", i), {n_embd, n_embd * 3}, backend_split); - layer.bqkv = ml.create_tensor(ctx, tn(LLM_TENSOR_ATTN_QKV, "bias", i), {n_embd * 3}, backend); - layer.wo = ml.create_tensor(ctx, tn(LLM_TENSOR_ATTN_OUT, "weight", i), {n_embd, n_embd}, backend_split); + layer.wqkv = ml.create_tensor(ctx_split, tn(LLM_TENSOR_ATTN_QKV, "weight", i), {n_embd, n_embd*3}); + layer.bqkv = ml.create_tensor(ctx_layer, tn(LLM_TENSOR_ATTN_QKV, "bias", i), {n_embd*3}); + layer.wo = ml.create_tensor(ctx_split, tn(LLM_TENSOR_ATTN_OUT, "weight", i), {n_embd, n_embd}); - layer.ffn_norm = ml.create_tensor(ctx, tn(LLM_TENSOR_FFN_NORM, "weight", i), {n_embd}, backend); + layer.ffn_norm = ml.create_tensor(ctx_layer, tn(LLM_TENSOR_FFN_NORM, "weight", i), {n_embd}); - layer.ffn_gate = ml.create_tensor(ctx, tn(LLM_TENSOR_FFN_GATE, "weight", i), {n_embd, n_ff}, backend_split); - layer.ffn_down = ml.create_tensor(ctx, tn(LLM_TENSOR_FFN_DOWN, "weight", i), { n_ff, n_embd}, backend_split); - layer.ffn_up = ml.create_tensor(ctx, tn(LLM_TENSOR_FFN_UP, "weight", i), {n_embd, n_ff}, backend_split); + layer.ffn_gate = ml.create_tensor(ctx_split, tn(LLM_TENSOR_FFN_GATE, "weight", i), {n_embd, n_ff/2}); + layer.ffn_down = ml.create_tensor(ctx_split, tn(LLM_TENSOR_FFN_DOWN, "weight", i), {n_ff/2, n_embd}); + layer.ffn_up = ml.create_tensor(ctx_split, tn(LLM_TENSOR_FFN_UP, "weight", i), {n_embd, n_ff/2}); } } break; case LLM_ARCH_PHI2: { - model.tok_embd = ml.create_tensor(ctx, tn(LLM_TENSOR_TOKEN_EMBD, "weight"), {n_embd, n_vocab}, GGML_BACKEND_CPU); + model.tok_embd = ml.create_tensor(ctx_input, tn(LLM_TENSOR_TOKEN_EMBD, "weight"), {n_embd, n_vocab}); // output { - ggml_backend_type backend_norm; - ggml_backend_type backend_output; - - if (n_gpu_layers > int(n_layer)) { - backend_norm = llama_backend_offload; - backend_output = llama_backend_offload; - } else { - backend_norm = GGML_BACKEND_CPU; - backend_output = GGML_BACKEND_CPU; - } - - model.output_norm = ml.create_tensor(ctx, tn(LLM_TENSOR_OUTPUT_NORM, "weight"), {n_embd}, backend_norm); - model.output_norm_b = ml.create_tensor(ctx, tn(LLM_TENSOR_OUTPUT_NORM, "bias"), {n_embd}, backend_norm); - model.output = ml.create_tensor(ctx, tn(LLM_TENSOR_OUTPUT, "weight"), {n_embd, n_vocab}, backend_output); - model.output_b = ml.create_tensor(ctx, tn(LLM_TENSOR_OUTPUT, "bias"), {n_vocab}, backend_output); + model.output_norm = ml.create_tensor(ctx_output, tn(LLM_TENSOR_OUTPUT_NORM, "weight"), {n_embd}); + model.output_norm_b = ml.create_tensor(ctx_output, tn(LLM_TENSOR_OUTPUT_NORM, "bias"), {n_embd}); + model.output = ml.create_tensor(ctx_output_split, tn(LLM_TENSOR_OUTPUT, "weight"), {n_embd, n_vocab}); + model.output_b = ml.create_tensor(ctx_output, tn(LLM_TENSOR_OUTPUT, "bias"), {n_vocab}); } - const uint32_t n_ff = hparams.n_ff; - const int64_t n_embd_gqa = n_embd_v_gqa; - GGML_ASSERT(n_embd_gqa == n_embd / hparams.n_gqa()); - GGML_ASSERT(n_embd_gqa == n_embd_k_gqa); - - const int i_gpu_start = n_layer - n_gpu_layers; - - model.layers.resize(n_layer); - - for (uint32_t i = 0; i < n_layer; ++i) { - const ggml_backend_type backend = int(i) < i_gpu_start ? GGML_BACKEND_CPU : llama_backend_offload; // NOLINT - const ggml_backend_type backend_split = int(i) < i_gpu_start ? GGML_BACKEND_CPU : llama_backend_offload_split; // NOLINT + for (int i = 0; i < n_layer; ++i) { + ggml_context * ctx_layer = ctx_for_layer(i); + ggml_context * ctx_split = ctx_for_layer_split(i); auto & layer = model.layers[i]; - layer.attn_norm = ml.create_tensor(ctx, tn(LLM_TENSOR_ATTN_NORM, "weight", i), {n_embd}, backend); - layer.attn_norm_b = ml.create_tensor(ctx, tn(LLM_TENSOR_ATTN_NORM, "bias", i), {n_embd}, backend); + layer.attn_norm = ml.create_tensor(ctx_layer, tn(LLM_TENSOR_ATTN_NORM, "weight", i), {n_embd}); + layer.attn_norm_b = ml.create_tensor(ctx_layer, tn(LLM_TENSOR_ATTN_NORM, "bias", i), {n_embd}); - layer.wqkv = ml.create_tensor(ctx, tn(LLM_TENSOR_ATTN_QKV, "weight", i), {n_embd, n_embd + 2*n_embd_gqa}, backend_split); - layer.bqkv = ml.create_tensor(ctx, tn(LLM_TENSOR_ATTN_QKV, "bias", i), {n_embd + 2*n_embd_gqa}, backend); + layer.wqkv = ml.create_tensor(ctx_split, tn(LLM_TENSOR_ATTN_QKV, "weight", i), {n_embd, n_embd + 2*n_embd_gqa}); + layer.bqkv = ml.create_tensor(ctx_layer, tn(LLM_TENSOR_ATTN_QKV, "bias", i), {n_embd + 2*n_embd_gqa}); - layer.wo = ml.create_tensor(ctx, tn(LLM_TENSOR_ATTN_OUT, "weight", i), {n_embd, n_embd}, backend_split); - layer.bo = ml.create_tensor(ctx, tn(LLM_TENSOR_ATTN_OUT, "bias", i), {n_embd}, backend); + layer.wo = ml.create_tensor(ctx_split, tn(LLM_TENSOR_ATTN_OUT, "weight", i), {n_embd, n_embd}); + layer.bo = ml.create_tensor(ctx_layer, tn(LLM_TENSOR_ATTN_OUT, "bias", i), {n_embd}); - layer.ffn_down = ml.create_tensor(ctx, tn(LLM_TENSOR_FFN_DOWN, "weight", i), {n_ff, n_embd}, backend_split); - layer.ffn_down_b = ml.create_tensor(ctx, tn(LLM_TENSOR_FFN_DOWN, "bias", i), {n_embd}, backend); + layer.ffn_down = ml.create_tensor(ctx_split, tn(LLM_TENSOR_FFN_DOWN, "weight", i), {n_ff, n_embd}); + layer.ffn_down_b = ml.create_tensor(ctx_layer, tn(LLM_TENSOR_FFN_DOWN, "bias", i), {n_embd}); - layer.ffn_up = ml.create_tensor(ctx, tn(LLM_TENSOR_FFN_UP, "weight", i), {n_embd, n_ff}, backend_split); - layer.ffn_up_b = ml.create_tensor(ctx, tn(LLM_TENSOR_FFN_UP, "bias", i), {n_ff}, backend); + layer.ffn_up = ml.create_tensor(ctx_split, tn(LLM_TENSOR_FFN_UP, "weight", i), {n_embd, n_ff}); + layer.ffn_up_b = ml.create_tensor(ctx_layer, tn(LLM_TENSOR_FFN_UP, "bias", i), {n_ff}); } } break; case LLM_ARCH_PLAMO: { - model.tok_embd = ml.create_tensor(ctx, tn(LLM_TENSOR_TOKEN_EMBD, "weight"), {n_embd, n_vocab}, GGML_BACKEND_CPU); + model.tok_embd = ml.create_tensor(ctx_input, tn(LLM_TENSOR_TOKEN_EMBD, "weight"), {n_embd, n_vocab}); // output { - ggml_backend_type backend_norm; - ggml_backend_type backend_output; - - if (n_gpu_layers > int(n_layer)) { - backend_norm = llama_backend_offload; - backend_output = llama_backend_offload_split; - } else { - backend_norm = GGML_BACKEND_CPU; - backend_output = GGML_BACKEND_CPU; - } - - model.output_norm = ml.create_tensor(ctx, tn(LLM_TENSOR_OUTPUT_NORM, "weight"), {n_embd}, backend_norm); - model.output = ml.create_tensor(ctx, tn(LLM_TENSOR_OUTPUT, "weight"), {n_embd, n_vocab}, backend_output); + model.output_norm = ml.create_tensor(ctx_output, tn(LLM_TENSOR_OUTPUT_NORM, "weight"), {n_embd}); + model.output = ml.create_tensor(ctx_output_split, tn(LLM_TENSOR_OUTPUT, "weight"), {n_embd, n_vocab}); } - const uint32_t n_ff = hparams.n_ff; - const int64_t n_embd_gqa = n_embd_v_gqa; - GGML_ASSERT(n_embd_gqa == n_embd / hparams.n_gqa()); - GGML_ASSERT(n_embd_gqa == n_embd_k_gqa); - - const int i_gpu_start = n_layer - n_gpu_layers; - - model.layers.resize(n_layer); - - for (uint32_t i = 0; i < n_layer; ++i) { - const ggml_backend_type backend = int(i) < i_gpu_start ? GGML_BACKEND_CPU : llama_backend_offload; // NOLINT - const ggml_backend_type backend_split = int(i) < i_gpu_start ? GGML_BACKEND_CPU : llama_backend_offload_split; // NOLINT + for (int i = 0; i < n_layer; ++i) { + ggml_context * ctx_layer = ctx_for_layer(i); + ggml_context * ctx_split = ctx_for_layer_split(i); auto & layer = model.layers[i]; - layer.attn_norm = ml.create_tensor(ctx, tn(LLM_TENSOR_ATTN_NORM, "weight", i), {n_embd}, backend); + layer.attn_norm = ml.create_tensor(ctx_layer, tn(LLM_TENSOR_ATTN_NORM, "weight", i), {n_embd}); - layer.wq = ml.create_tensor(ctx, tn(LLM_TENSOR_ATTN_Q, "weight", i), {n_embd, n_embd}, backend_split); - layer.wk = ml.create_tensor(ctx, tn(LLM_TENSOR_ATTN_K, "weight", i), {n_embd, n_embd_gqa}, backend_split); - layer.wv = ml.create_tensor(ctx, tn(LLM_TENSOR_ATTN_V, "weight", i), {n_embd, n_embd_gqa}, backend_split); - layer.wo = ml.create_tensor(ctx, tn(LLM_TENSOR_ATTN_OUT, "weight", i), {n_embd, n_embd}, backend_split); + layer.wq = ml.create_tensor(ctx_split, tn(LLM_TENSOR_ATTN_Q, "weight", i), {n_embd, n_embd}); + layer.wk = ml.create_tensor(ctx_split, tn(LLM_TENSOR_ATTN_K, "weight", i), {n_embd, n_embd_gqa}); + layer.wv = ml.create_tensor(ctx_split, tn(LLM_TENSOR_ATTN_V, "weight", i), {n_embd, n_embd_gqa}); + layer.wo = ml.create_tensor(ctx_split, tn(LLM_TENSOR_ATTN_OUT, "weight", i), {n_embd, n_embd}); - layer.ffn_gate = ml.create_tensor(ctx, tn(LLM_TENSOR_FFN_GATE, "weight", i), {n_embd, n_ff}, backend_split); - layer.ffn_down = ml.create_tensor(ctx, tn(LLM_TENSOR_FFN_DOWN, "weight", i), { n_ff, n_embd}, backend_split); - layer.ffn_up = ml.create_tensor(ctx, tn(LLM_TENSOR_FFN_UP, "weight", i), {n_embd, n_ff}, backend_split); + layer.ffn_gate = ml.create_tensor(ctx_split, tn(LLM_TENSOR_FFN_GATE, "weight", i), {n_embd, n_ff}); + layer.ffn_down = ml.create_tensor(ctx_split, tn(LLM_TENSOR_FFN_DOWN, "weight", i), { n_ff, n_embd}); + layer.ffn_up = ml.create_tensor(ctx_split, tn(LLM_TENSOR_FFN_UP, "weight", i), {n_embd, n_ff}); } } break; case LLM_ARCH_GPT2: { - model.tok_embd = ml.create_tensor(ctx, tn(LLM_TENSOR_TOKEN_EMBD, "weight"), {n_embd, n_vocab}, GGML_BACKEND_CPU); - model.pos_embd = ml.create_tensor(ctx, tn(LLM_TENSOR_POS_EMBD, "weight"), {n_embd, hparams.n_ctx_train}, GGML_BACKEND_CPU); + model.tok_embd = ml.create_tensor(ctx_input, tn(LLM_TENSOR_TOKEN_EMBD, "weight"), {n_embd, n_vocab}); + model.pos_embd = ml.create_tensor(ctx_input, tn(LLM_TENSOR_POS_EMBD, "weight"), {n_embd, hparams.n_ctx_train}); // output { - ggml_backend_type backend_norm; - ggml_backend_type backend_output; - - if (n_gpu_layers > int(n_layer)) { - backend_norm = llama_backend_offload; - backend_output = llama_backend_offload_split; - } else { - backend_norm = GGML_BACKEND_CPU; - backend_output = GGML_BACKEND_CPU; - } - - model.output_norm = ml.create_tensor(ctx, tn(LLM_TENSOR_OUTPUT_NORM, "weight"), {n_embd}, backend_norm); - model.output_norm_b = ml.create_tensor(ctx, tn(LLM_TENSOR_OUTPUT_NORM, "bias"), {n_embd}, backend_norm); - model.output = ml.create_tensor(ctx, tn(LLM_TENSOR_OUTPUT, "weight"), {n_embd, n_vocab}, backend_output); + model.output_norm = ml.create_tensor(ctx_output, tn(LLM_TENSOR_OUTPUT_NORM, "weight"), {n_embd}); + model.output_norm_b = ml.create_tensor(ctx_output, tn(LLM_TENSOR_OUTPUT_NORM, "bias"), {n_embd}); + model.output = ml.create_tensor(ctx_output_split, tn(LLM_TENSOR_OUTPUT, "weight"), {n_embd, n_vocab}); } - const uint32_t n_ff = hparams.n_ff; - const int64_t n_embd_gqa = n_embd_v_gqa; - GGML_ASSERT(n_embd_gqa == n_embd / hparams.n_gqa()); - GGML_ASSERT(n_embd_gqa == n_embd_k_gqa); - - const int i_gpu_start = n_layer - n_gpu_layers; - - model.layers.resize(n_layer); - - for (uint32_t i = 0; i < n_layer; ++i) { - const ggml_backend_type backend = int(i) < i_gpu_start ? GGML_BACKEND_CPU : llama_backend_offload; // NOLINT - const ggml_backend_type backend_split = int(i) < i_gpu_start ? GGML_BACKEND_CPU : llama_backend_offload_split; // NOLINT + for (int i = 0; i < n_layer; ++i) { + ggml_context * ctx_layer = ctx_for_layer(i); + ggml_context * ctx_split = ctx_for_layer_split(i); auto & layer = model.layers[i]; - layer.attn_norm = ml.create_tensor(ctx, tn(LLM_TENSOR_ATTN_NORM, "weight", i), {n_embd}, backend); - layer.attn_norm_b = ml.create_tensor(ctx, tn(LLM_TENSOR_ATTN_NORM, "bias", i), {n_embd}, backend); + layer.attn_norm = ml.create_tensor(ctx_layer, tn(LLM_TENSOR_ATTN_NORM, "weight", i), {n_embd}); + layer.attn_norm_b = ml.create_tensor(ctx_layer, tn(LLM_TENSOR_ATTN_NORM, "bias", i), {n_embd}); - layer.wqkv = ml.create_tensor(ctx, tn(LLM_TENSOR_ATTN_QKV, "weight", i), {n_embd, n_embd + 2*n_embd_gqa}, backend_split); - layer.bqkv = ml.create_tensor(ctx, tn(LLM_TENSOR_ATTN_QKV, "bias", i), {n_embd + 2*n_embd_gqa}, backend); + layer.wqkv = ml.create_tensor(ctx_split, tn(LLM_TENSOR_ATTN_QKV, "weight", i), {n_embd, n_embd + 2*n_embd_gqa}); + layer.bqkv = ml.create_tensor(ctx_layer, tn(LLM_TENSOR_ATTN_QKV, "bias", i), {n_embd + 2*n_embd_gqa}); - layer.wo = ml.create_tensor(ctx, tn(LLM_TENSOR_ATTN_OUT, "weight", i), {n_embd, n_embd}, backend_split); - layer.bo = ml.create_tensor(ctx, tn(LLM_TENSOR_ATTN_OUT, "bias", i), {n_embd}, backend); + layer.wo = ml.create_tensor(ctx_split, tn(LLM_TENSOR_ATTN_OUT, "weight", i), {n_embd, n_embd}); + layer.bo = ml.create_tensor(ctx_layer, tn(LLM_TENSOR_ATTN_OUT, "bias", i), {n_embd}); - layer.ffn_norm = ml.create_tensor(ctx, tn(LLM_TENSOR_FFN_NORM, "weight", i), {n_embd}, backend); - layer.ffn_norm_b = ml.create_tensor(ctx, tn(LLM_TENSOR_FFN_NORM, "bias", i), {n_embd}, backend); + layer.ffn_norm = ml.create_tensor(ctx_layer, tn(LLM_TENSOR_FFN_NORM, "weight", i), {n_embd}); + layer.ffn_norm_b = ml.create_tensor(ctx_layer, tn(LLM_TENSOR_FFN_NORM, "bias", i), {n_embd}); - layer.ffn_down = ml.create_tensor(ctx, tn(LLM_TENSOR_FFN_DOWN, "weight", i), {n_ff, n_embd}, backend_split); - layer.ffn_down_b = ml.create_tensor(ctx, tn(LLM_TENSOR_FFN_DOWN, "bias", i), {n_embd}, backend); + layer.ffn_down = ml.create_tensor(ctx_split, tn(LLM_TENSOR_FFN_DOWN, "weight", i), {n_ff, n_embd}); + layer.ffn_down_b = ml.create_tensor(ctx_layer, tn(LLM_TENSOR_FFN_DOWN, "bias", i), {n_embd}); - layer.ffn_up = ml.create_tensor(ctx, tn(LLM_TENSOR_FFN_UP, "weight", i), {n_embd, n_ff}, backend_split); - layer.ffn_up_b = ml.create_tensor(ctx, tn(LLM_TENSOR_FFN_UP, "bias", i), {n_ff}, backend); + layer.ffn_up = ml.create_tensor(ctx_split, tn(LLM_TENSOR_FFN_UP, "weight", i), {n_embd, n_ff}); + layer.ffn_up_b = ml.create_tensor(ctx_layer, tn(LLM_TENSOR_FFN_UP, "bias", i), {n_ff}); } } break; default: @@ -3893,78 +3761,51 @@ static bool llm_load_tensors( ml.done_getting_tensors(); - ml.init_mapping(); + ml.init_mapping(true, use_mlock ? &model.mlock_mmap : nullptr); - // allocate tensors - size_t vram_weights = 0; - size_t buf_size = 0; + // create the backend buffers + std::vector> ctx_bufs; - ggml_backend_buffer_type_t buft = llama_default_buffer_type(n_gpu_layers); + for (auto & it : ctx_map) { + ggml_backend_buffer_type_t buft = it.first; + ggml_context * ctx = it.second; + ggml_backend_buffer_t buf = nullptr; - for (struct ggml_tensor * t = ggml_get_first_tensor(ctx); t != nullptr; t = ggml_get_next_tensor(ctx, t)) { - // GGML_BACKEND_GPU tensors are for CUDA and OpenCL only, which are handled separately without ggml-backend - if (t->backend == GGML_BACKEND_CPU) { - buf_size += GGML_PAD(ggml_backend_buft_get_alloc_size(buft, t), ggml_backend_buft_get_alignment(buft)); - } else { - vram_weights += ggml_nbytes(t); + // only the mmap region containing the tensors in the model is mapped to the backend buffer + // this is important for metal with apple silicon: if the entire model could be mapped to a metal buffer, then we could just use metal for all layers + // this allows using partial offloading when the model size exceeds the metal buffer size, but not the RAM size + if (ml.use_mmap && buft == llama_default_buffer_type_cpu(true)) { + size_t first, last; + ml.get_mapping_range(&first, &last, ctx); + buf = ggml_backend_cpu_buffer_from_ptr((char *) ml.mapping->addr + first, last - first); } - } - - // create backend buffer - ggml_backend_buffer_t buf_mmap = nullptr; - #ifdef GGML_USE_METAL - if (n_gpu_layers > 0) { - if (ml.use_mmap) { + else if (ml.use_mmap && buft == ggml_backend_metal_buffer_type()) { const size_t max_size = ggml_get_max_tensor_size(ctx); - model.buf = ggml_backend_metal_buffer_from_ptr(ml.mapping->addr, ml.mapping->size, max_size); - buf_mmap = model.buf; - } else { - model.buf = ggml_backend_alloc_ctx_tensors_from_buft(ctx, ggml_backend_metal_buffer_type()); + size_t first, last; + ml.get_mapping_range(&first, &last, ctx); + buf = ggml_backend_metal_buffer_from_ptr((char *) ml.mapping->addr + first, last - first, max_size); } - } -#elif defined(GGML_USE_CUBLAS) && defined(LLAMA_GGML_BACKEND_CUDA_TEST) - // for testing only - if (n_gpu_layers > 0) { - model.buf = ggml_backend_alloc_ctx_tensors_from_buft(ctx, ggml_backend_cuda_buffer_type(0)); - } #endif - - if (model.buf == nullptr) { - // CPU backend, and indirectly CUDA and OpenCL - if (ml.use_mmap) { - model.buf = ggml_backend_cpu_buffer_from_ptr(ml.mapping->addr, ml.mapping->size); - buf_mmap = model.buf; - } else { - // allocate only CPU tensors - model.buf = ggml_backend_buft_alloc_buffer(buft, buf_size); - ggml_tallocr_t alloc = ggml_tallocr_new_from_buffer(model.buf); - for (struct ggml_tensor * t = ggml_get_first_tensor(ctx); t != nullptr; t = ggml_get_next_tensor(ctx, t)) { - if (t->backend == GGML_BACKEND_CPU) { - ggml_tallocr_alloc(alloc, t); - } + else { + buf = ggml_backend_alloc_ctx_tensors_from_buft(ctx, buft); + if (buf != nullptr && use_mlock && ggml_backend_buffer_is_host(buf)) { + model.mlock_buf.init (ggml_backend_buffer_get_base(buf)); + model.mlock_buf.grow_to(ggml_backend_buffer_get_size(buf)); } - ggml_tallocr_free(alloc); } - } - - if (use_mlock && ggml_backend_buffer_is_host(model.buf)) { - model.mlock_buf.init (ggml_backend_buffer_get_base(model.buf)); - model.mlock_buf.grow_to(ggml_backend_buffer_get_size(model.buf)); + if (buf == nullptr) { + throw std::runtime_error("failed to allocate buffer"); + } + // indicate that this buffer contains weights + // this is used by ggml_backend_sched to improve op scheduling -> ops that use a weight are preferably scheduled to the backend that contains the weight + ggml_backend_buffer_set_usage(buf, GGML_BACKEND_BUFFER_USAGE_WEIGHTS); + model.bufs.push_back(buf); + ctx_bufs.emplace_back(ctx, buf); } // print memory requirements { - size_t sys_mem_required = ctx_size + buf_size; - - if (sys_mem_required > 0) { - LLAMA_LOG_INFO("%s: system memory used = %7.2f MiB\n", __func__, sys_mem_required / 1024.0 / 1024.0); - } - if (vram_weights > 0) { - LLAMA_LOG_INFO("%s: VRAM used = %7.2f MiB\n", __func__, vram_weights / 1024.0 / 1024.0); - } - -#if (defined(GGML_USE_CUBLAS) && !defined(LLAMA_GGML_BACKEND_CUDA_TEST)) || defined(GGML_USE_CLBLAST) const int n_gpu = std::min(n_gpu_layers, int(hparams.n_layer)); LLAMA_LOG_INFO("%s: offloading %d repeating layers to GPU\n", __func__, n_gpu); @@ -3976,23 +3817,26 @@ static bool llm_load_tensors( const int max_offloadable_layers = hparams.n_layer + 1; LLAMA_LOG_INFO("%s: offloaded %d/%d layers to GPU\n", __func__, std::min(n_gpu_layers, max_offloadable_layers), max_backend_supported_layers); -#endif // defined(GGML_USE_CUBLAS) || defined(GGML_USE_CLBLAST) - } -#if defined(GGML_USE_CUBLAS) && !defined(LLAMA_GGML_BACKEND_CUDA_TEST) - ggml_cuda_set_tensor_split(tensor_split); -#else - GGML_UNUSED(tensor_split); -#endif // GGML_USE_CUBLAS + for (ggml_backend_buffer_t buf : model.bufs) { + LLAMA_LOG_INFO("%s: %10s buffer size = %8.2f MiB\n", __func__, ggml_backend_buffer_name(buf), ggml_backend_buffer_get_size(buf) / 1024.0 / 1024.0); + } + } // populate tensors_by_name - for (int i = 0; i < ml.n_tensors; ++i) { - struct ggml_tensor * cur = ggml_get_tensor(ctx, ml.get_tensor_name(i)); - model.tensors_by_name.emplace_back(ggml_get_name(cur), cur); + for (ggml_context * ctx : model.ctxs) { + for (auto * cur = ggml_get_first_tensor(ctx); cur != NULL; cur = ggml_get_next_tensor(ctx, cur)) { + model.tensors_by_name.emplace_back(ggml_get_name(cur), cur); + } } - if (!ml.load_all_data(ctx, progress_callback, progress_callback_user_data, buf_mmap, use_mlock ? &model.mlock_mmap : NULL)) { - return false; + // load tensor data + for (auto & it : ctx_bufs) { + ggml_context * ctx = it.first; + ggml_backend_buffer_t buf = it.second; + if (!ml.load_all_data(ctx, progress_callback, progress_callback_user_data, buf, use_mlock ? &model.mlock_mmap : NULL)) { + return false; + } } model.mapping = std::move(ml.mapping); @@ -4026,13 +3870,13 @@ static int llama_model_load(const std::string & fname, llama_model & model, cons } if (!llm_load_tensors( - ml, model, params.n_gpu_layers, params.main_gpu, params.tensor_split, params.use_mlock, + ml, model, params.n_gpu_layers, params.split_mode, params.main_gpu, params.tensor_split, params.use_mlock, params.progress_callback, params.progress_callback_user_data )) { return -2; } } catch (const std::exception & err) { - LLAMA_LOG_ERROR("error loading model: %s\n", err.what()); + LLAMA_LOG_ERROR("%s: error loading model: %s\n", __func__, err.what()); return -1; } @@ -4476,8 +4320,6 @@ struct llm_build_context { do_rope_shift (worst_case || kv_self.has_shift), cb (cb), buf_compute_meta (lctx.buf_compute_meta) { - GGML_ASSERT(!!kv_self.ctx); - // all initializations should be done in init() } @@ -4557,6 +4399,12 @@ struct llm_build_context { cb(Vcur, "Vcur", il); } + // these nodes are added to the graph together so that they are not reordered + // by doing so, the number of splits in the graph is reduced + ggml_build_forward_expand(gf, Qcur); + ggml_build_forward_expand(gf, Kcur); + ggml_build_forward_expand(gf, Vcur); + Qcur = ggml_rope_custom( ctx0, ggml_reshape_3d(ctx0, Qcur, n_embd_head, n_head, n_tokens), inp_pos, hparams.n_rot, 0, 0, n_orig_ctx, freq_base, freq_scale, @@ -6077,199 +5925,13 @@ struct llm_build_context { } }; -// -// tensor offloading helpers -// -// TODO: will be removed with backend v2 - -enum llm_offload_func_e { - OFFLOAD_FUNC_NOP, - OFFLOAD_FUNC, - OFFLOAD_FUNC_FRC, // force offload - OFFLOAD_FUNC_KQV, - OFFLOAD_FUNC_NR, - OFFLOAD_FUNC_EMB, // embeddings - OFFLOAD_FUNC_OUT, -}; - -// TODO: will be removed with backend v2 -struct llm_offload_trie { - struct node { - ~node() { - for (int i = 0; i < 256; ++i) { - if (children[i]) { - delete children[i]; - } - } - } - - node * children[256] = { nullptr }; - llm_offload_func_e func = OFFLOAD_FUNC_NOP; - }; - - llm_offload_trie() { - root = new node; - } - - llm_offload_trie(const std::unordered_map & map) { - root = new node; - - for (const auto & kv : map) { - add(kv.first, kv.second); - } - } - - ~llm_offload_trie() { - delete root; - } - - void add(const char * name, llm_offload_func_e func) { - node * cur = root; - - for (int i = 0; ; ++i) { - const uint8_t c = name[i]; - - if (!c) { - break; - } - - if (!cur->children[c]) { - cur->children[c] = new node; - } - - cur = cur->children[c]; - } - - cur->func = func; - } - - llm_offload_func_e find(const char * name) const { - const node * cur = root; - - for (int i = 0; ; ++i) { - const uint8_t c = name[i]; - - if (!c) { - break; - } - - if (!cur->children[c]) { - return OFFLOAD_FUNC_NOP; - } - - cur = cur->children[c]; - } - - return cur->func; - } - - node * root = nullptr; -}; - -// TODO: will be removed with backend v2 -static const std::unordered_map k_offload_map = { - //{ "inp_tokens", OFFLOAD_FUNC_NR }, // TODO: missing K-quants get_rows kernel - //{ "inp_embd", OFFLOAD_FUNC_NR }, // TODO: missing K-quants get_rows kernel - { "pos_embd", OFFLOAD_FUNC_NR }, - - { "inp_pos", OFFLOAD_FUNC_FRC }, // this is often used for KQ ops (e.g. rope) - { "KQ_mask", OFFLOAD_FUNC_FRC }, - { "K_shift", OFFLOAD_FUNC_FRC }, - - { "K_shifted", OFFLOAD_FUNC }, - - { "inp_norm", OFFLOAD_FUNC_NR }, - { "inp_norm_w", OFFLOAD_FUNC_NR }, - { "inp_norm_wb", OFFLOAD_FUNC_NR }, - - { "norm", OFFLOAD_FUNC }, - { "norm_w", OFFLOAD_FUNC }, - { "norm_wb", OFFLOAD_FUNC }, - - { "attn_norm", OFFLOAD_FUNC }, - { "attn_norm_2", OFFLOAD_FUNC }, - - { "wqkv", OFFLOAD_FUNC_KQV }, - { "bqkv", OFFLOAD_FUNC_KQV }, - { "wqkv_clamped", OFFLOAD_FUNC_KQV }, - - { "tmpk", OFFLOAD_FUNC_KQV }, - { "tmpq", OFFLOAD_FUNC_KQV }, - { "tmpv", OFFLOAD_FUNC_KQV }, - { "Kcur", OFFLOAD_FUNC_KQV }, - { "Qcur", OFFLOAD_FUNC_KQV }, - { "Vcur", OFFLOAD_FUNC_KQV }, - - { "krot", OFFLOAD_FUNC_KQV }, - { "qrot", OFFLOAD_FUNC_KQV }, - { "kpass", OFFLOAD_FUNC_KQV }, - { "qpass", OFFLOAD_FUNC_KQV }, - { "krotated", OFFLOAD_FUNC_KQV }, - { "qrotated", OFFLOAD_FUNC_KQV }, - - { "q", OFFLOAD_FUNC_KQV }, - { "k", OFFLOAD_FUNC_KQV }, - { "kq", OFFLOAD_FUNC_KQV }, - { "kq_scaled", OFFLOAD_FUNC_KQV }, - { "kq_scaled_alibi", OFFLOAD_FUNC_KQV }, - { "kq_masked", OFFLOAD_FUNC_KQV }, - { "kq_soft_max", OFFLOAD_FUNC_KQV }, - { "kq_soft_max_ext", OFFLOAD_FUNC_KQV }, - { "v", OFFLOAD_FUNC_KQV }, - { "kqv", OFFLOAD_FUNC_KQV }, - { "kqv_merged", OFFLOAD_FUNC_KQV }, - { "kqv_merged_cont", OFFLOAD_FUNC_KQV }, - { "kqv_wo", OFFLOAD_FUNC_KQV }, - { "kqv_out", OFFLOAD_FUNC_KQV }, - - { "ffn_inp", OFFLOAD_FUNC }, - { "ffn_norm", OFFLOAD_FUNC }, - - { "ffn_up", OFFLOAD_FUNC }, - { "ffn_up_b", OFFLOAD_FUNC }, - { "ffn_gate", OFFLOAD_FUNC }, - { "ffn_gate_b", OFFLOAD_FUNC }, - { "ffn_gate_par", OFFLOAD_FUNC }, - { "ffn_act", OFFLOAD_FUNC }, - { "ffn_down", OFFLOAD_FUNC }, - { "ffn_down_b", OFFLOAD_FUNC }, - { "ffn_out", OFFLOAD_FUNC }, - - { "ffn_silu", OFFLOAD_FUNC }, - { "ffn_gelu", OFFLOAD_FUNC }, - { "ffn_relu", OFFLOAD_FUNC }, - { "ffn_sqr(relu)", OFFLOAD_FUNC }, - - { "ffn_moe_logits", OFFLOAD_FUNC }, - { "ffn_moe_probs", OFFLOAD_FUNC }, - { "ffn_moe_argsort", OFFLOAD_FUNC }, - { "ffn_moe_weights", OFFLOAD_FUNC }, - { "ffn_moe_weights_sum", OFFLOAD_FUNC }, - { "ffn_moe_weights_norm", OFFLOAD_FUNC }, - { "ffn_moe_weighted", OFFLOAD_FUNC }, - { "ffn_moe_up", OFFLOAD_FUNC }, - { "ffn_moe_gate", OFFLOAD_FUNC }, - { "ffn_moe_silu", OFFLOAD_FUNC }, - { "ffn_moe_gate_par", OFFLOAD_FUNC }, - { "ffn_moe_down", OFFLOAD_FUNC }, - { "ffn_moe_out", OFFLOAD_FUNC }, - - { "l_out", OFFLOAD_FUNC }, - - { "result_norm", OFFLOAD_FUNC_EMB }, - { "result_output_no_bias", OFFLOAD_FUNC_EMB }, - { "result_output", OFFLOAD_FUNC_OUT }, -}; - -static llm_offload_trie k_offload_func_trie(k_offload_map); - static struct ggml_cgraph * llama_build_graph( llama_context & lctx, const llama_batch & batch) { const auto & model = lctx.model; // check if we should build the worst-case graph (for memory measurement) - const bool worst_case = ggml_allocr_is_measure(lctx.alloc); + const bool worst_case = ggml_tallocr_is_measure(lctx.alloc); // keep track of the input that has already been allocated bool alloc_inp_tokens = false; @@ -6278,16 +5940,8 @@ static struct ggml_cgraph * llama_build_graph( bool alloc_inp_KQ_mask = false; bool alloc_inp_K_shift = false; -#if defined(GGML_USE_CUBLAS) && !defined(LLAMA_GGML_BACKEND_CUDA_TEST) - const bool do_offload = true; -#else - const bool do_offload = true; // TODO: set to false after finishing refactoring -#endif - - int n_non_view = 0; // number of non-view tensors that have been processed by the callback - // this callback allows us to apply custom logic to each tensor (e.g. ggml-alloc, offloading, etc.) - // TODO: will be removed with backend v2 + // TODO: improve handling of input and output tensors, then replace this with ggml_set_name llm_build_cb cb = [&](struct ggml_tensor * cur, const char * name, int il) { if (il >= 0) { ggml_format_name(cur, "%s-%d", name, il); @@ -6298,12 +5952,11 @@ static struct ggml_cgraph * llama_build_graph( // // allocate input tensors and set input data // - // TODO: will be removed with backend v2 if (!alloc_inp_tokens && strcmp(name, "inp_tokens") == 0) { - ggml_allocr_alloc(lctx.alloc, cur); + ggml_tallocr_alloc(lctx.alloc, cur); - if (!ggml_allocr_is_measure(lctx.alloc) && batch.token) { + if (!ggml_tallocr_is_measure(lctx.alloc) && batch.token) { const int64_t n_tokens = cur->ne[0]; ggml_backend_tensor_set(cur, batch.token, 0, n_tokens*ggml_element_size(cur)); @@ -6312,10 +5965,10 @@ static struct ggml_cgraph * llama_build_graph( alloc_inp_tokens = true; } - if (!alloc_inp_embd && strcmp(name, "inp_embd") == 0) { - ggml_allocr_alloc(lctx.alloc, cur); + if (!alloc_inp_embd && strcmp(name, "inp_embd") == 0 && batch.embd) { + ggml_tallocr_alloc(lctx.alloc, cur); - if (!ggml_allocr_is_measure(lctx.alloc) && batch.embd) { + if (!ggml_tallocr_is_measure(lctx.alloc) && batch.embd) { const int64_t n_embd = cur->ne[0]; const int64_t n_tokens = cur->ne[1]; @@ -6326,9 +5979,9 @@ static struct ggml_cgraph * llama_build_graph( } if (!alloc_inp_pos && strcmp(name, "inp_pos") == 0) { - ggml_allocr_alloc(lctx.alloc, cur); + ggml_tallocr_alloc(lctx.alloc, cur); - if (!ggml_allocr_is_measure(lctx.alloc) && batch.pos) { + if (!ggml_tallocr_is_measure(lctx.alloc) && batch.pos) { const int64_t n_tokens = cur->ne[0]; static_assert(std::is_same::value, "llama_pos must be int32_t"); @@ -6339,9 +5992,9 @@ static struct ggml_cgraph * llama_build_graph( } if (!alloc_inp_KQ_mask && strcmp(name, "KQ_mask") == 0) { - ggml_allocr_alloc(lctx.alloc, cur); + ggml_tallocr_alloc(lctx.alloc, cur); - if (!ggml_allocr_is_measure(lctx.alloc)) { + if (!ggml_tallocr_is_measure(lctx.alloc)) { const int64_t n_kv = cur->ne[0]; const int64_t n_tokens = cur->ne[1]; @@ -6379,9 +6032,9 @@ static struct ggml_cgraph * llama_build_graph( } if (!alloc_inp_K_shift && strcmp(name, "K_shift") == 0) { - ggml_allocr_alloc(lctx.alloc, cur); + ggml_tallocr_alloc(lctx.alloc, cur); - if (!ggml_allocr_is_measure(lctx.alloc)) { + if (!ggml_tallocr_is_measure(lctx.alloc)) { const int64_t n_ctx = cur->ne[0]; int32_t * data; @@ -6403,136 +6056,6 @@ static struct ggml_cgraph * llama_build_graph( alloc_inp_K_shift = true; } - - // view tensors are not processed further - if (cur->view_src != nullptr) { - return; - } - - if (cur->op != GGML_OP_NONE) { - n_non_view++; - } - - // - // offload layers - // - // TODO: will be removed with backend v2 - -//#define LLAMA_OFFLOAD_DEBUG - - if (!do_offload) { - return; - } - - const int n_layer = model.hparams.n_layer; - - const int n_gpu_layers = model.n_gpu_layers; - const int i_gpu_start = n_layer - n_gpu_layers; - - // should we offload the final norm? yes if we are not computing embeddings - const bool offload_emb = lctx.embedding.empty(); - - static const std::unordered_map> k_offload_func_name = { - { OFFLOAD_FUNC_NOP, "CPU" }, - { OFFLOAD_FUNC_OUT, "CPU" }, -#if defined(GGML_USE_CUBLAS) && !defined(LLAMA_GGML_BACKEND_CUDA_TEST) - { OFFLOAD_FUNC, "GPU (CUDA)" }, - { OFFLOAD_FUNC_FRC, "GPU (CUDA) FRC" }, - { OFFLOAD_FUNC_KQV, "GPU (CUDA) KQV" }, - { OFFLOAD_FUNC_NR, "GPU (CUDA) NR" }, - { OFFLOAD_FUNC_EMB, "GPU (CUDA) EMB" }, -#else - { OFFLOAD_FUNC, "CPU" }, - { OFFLOAD_FUNC_FRC, "CPU" }, - { OFFLOAD_FUNC_KQV, "CPU" }, - { OFFLOAD_FUNC_NR, "CPU" }, - { OFFLOAD_FUNC_EMB, "CPU" }, -#endif // GGML_USE_CUBLAS - }; - - // check the global map for what offload function to use for this tensor - llm_offload_func_e func_e = k_offload_func_trie.find(name); - - if (func_e == OFFLOAD_FUNC_NOP) { -#ifdef LLAMA_OFFLOAD_DEBUG - // if a tensor hasn't been offloaded, we warn the user - if (worst_case) { - LLAMA_LOG_WARN("%s: %32s: not offloaded (ref: %s)\n", __func__, - cur->name, "https://github.com/ggerganov/llama.cpp/pull/3837"); - } -#endif - - return; - } - - // count the number of layers and respect the provided n_gpu_layers - switch (func_e) { - case OFFLOAD_FUNC_NOP: - case OFFLOAD_FUNC_OUT: - break; - case OFFLOAD_FUNC: - if (n_gpu_layers < n_layer) { - if (il < i_gpu_start) { - func_e = OFFLOAD_FUNC_NOP; - } - } - break; - case OFFLOAD_FUNC_FRC: - if (!lctx.cparams.offload_kqv) { - func_e = OFFLOAD_FUNC_NOP; - } break; - case OFFLOAD_FUNC_KQV: - if (!lctx.cparams.offload_kqv) { - func_e = OFFLOAD_FUNC_NOP; - } else { - if (n_gpu_layers < n_layer) { - if (il < i_gpu_start) { - func_e = OFFLOAD_FUNC_NOP; - } - } - } - break; - case OFFLOAD_FUNC_NR: - if (n_gpu_layers <= n_layer + 0) { - func_e = OFFLOAD_FUNC_NOP; - } - break; - case OFFLOAD_FUNC_EMB: - if (!offload_emb || n_gpu_layers < n_layer) { - func_e = OFFLOAD_FUNC_NOP; - } - break; - default: GGML_ASSERT(false); - } - - offload_func_t func = ggml_offload_nop; - - // this is needed for compatibility with Metal for example -#if defined(GGML_USE_CUBLAS) && !defined(LLAMA_GGML_BACKEND_CUDA_TEST) - static offload_func_t ggml_offload_gpu = ggml_cuda_assign_buffers_no_alloc; -#else - static offload_func_t ggml_offload_gpu = ggml_offload_nop; -#endif - - switch (func_e) { - case OFFLOAD_FUNC_NOP: - case OFFLOAD_FUNC_OUT: func = ggml_offload_nop; break; - case OFFLOAD_FUNC: - case OFFLOAD_FUNC_KQV: - case OFFLOAD_FUNC_FRC: - case OFFLOAD_FUNC_NR: - case OFFLOAD_FUNC_EMB: func = ggml_offload_gpu; break; - default: GGML_ASSERT(false); - } - - // apply offload function to the tensor - func(cur); - -#ifdef LLAMA_OFFLOAD_DEBUG - if (worst_case) { - LLAMA_LOG_INFO("%s: %32s: %s\n", __func__, cur->name, k_offload_func_name.at(func_e).c_str()); - } -#endif }; struct ggml_cgraph * result = NULL; @@ -6600,27 +6123,6 @@ static struct ggml_cgraph * llama_build_graph( llm.free(); - if (worst_case) { - int n_non_view_total = 0; - - for (int i = 0; i < result->n_nodes; ++i) { - if (result->nodes[i]->view_src == nullptr) { - n_non_view_total++; - } - } - - LLAMA_LOG_INFO("%s: non-view tensors processed: %d/%d\n", __func__, n_non_view, n_non_view_total); - - if (n_non_view != n_non_view_total) { - LLAMA_LOG_WARN("%s: ****************************************************************\n", __func__); - LLAMA_LOG_WARN("%s: not all non-view tensors have been processed with a callback\n", __func__); - LLAMA_LOG_WARN("%s: this can indicate an inefficiency in the graph implementation\n", __func__); - LLAMA_LOG_WARN("%s: build with LLAMA_OFFLOAD_DEBUG for more info\n", __func__); - LLAMA_LOG_WARN("%s: ref: https://github.com/ggerganov/llama.cpp/pull/3837\n", __func__); - LLAMA_LOG_WARN("%s: ****************************************************************\n", __func__); - } - } - return result; } @@ -6666,8 +6168,6 @@ static int llama_decode_internal( auto & kv_self = lctx.kv_self; - GGML_ASSERT(!!kv_self.ctx); - const int64_t n_embd = hparams.n_embd; const int64_t n_vocab = hparams.n_vocab; @@ -6721,12 +6221,10 @@ static int llama_decode_internal( //printf("kv_self.n = %5d, kv_self.used = %5d, kv_self.head = %5d\n", kv_self.n, kv_self.used, kv_self.head); - ggml_allocr_reset(lctx.alloc); + ggml_backend_sched_reset(lctx.sched); ggml_cgraph * gf = llama_build_graph(lctx, batch); - ggml_allocr_alloc_graph(lctx.alloc, gf); - // the output is always the last tensor in the graph struct ggml_tensor * res = gf->nodes[gf->n_nodes - 1]; GGML_ASSERT(strcmp(res->name, "result_output") == 0); @@ -6738,30 +6236,6 @@ static int llama_decode_internal( GGML_ASSERT(strcmp(embeddings->name, "result_norm") == 0); } -#if defined(GGML_USE_CUBLAS) && !defined(LLAMA_GGML_BACKEND_CUDA_TEST) - char * buf_alloc_base = (char *)ggml_backend_buffer_get_base(lctx.buf_alloc); - for (int i = 0; i < gf->n_leafs; i++) { - ggml_tensor * node = gf->leafs[i]; - if (node->backend == GGML_BACKEND_GPU && node->extra == NULL) { - ggml_cuda_assign_scratch_offset(node, (char *)node->data - buf_alloc_base); - ggml_cuda_copy_to_device(node); - } - } - - for (int i = 0; i < gf->n_nodes; i++) { - ggml_tensor * node = gf->nodes[i]; - if (node->backend == GGML_BACKEND_GPU && node->extra == NULL) { - ggml_cuda_assign_scratch_offset(node, (char *)node->data - buf_alloc_base); - } - } - - // HACK: ggml-alloc may change the tensor backend when reusing a parent, so force output to be on the CPU here if needed - if (!lctx.embedding.empty()) { - embeddings->backend = GGML_BACKEND_CPU; - } - res->backend = GGML_BACKEND_CPU; -#endif - // LLAMA_LOG_INFO("graph build time: %.3f ms (%d nodes, %d leafs)\n", (ggml_time_us() - t_start_us)/1000.0, gf->n_nodes, gf->n_leafs); // for big prompts, if BLAS is enabled, it is better to use only one thread @@ -6784,15 +6258,17 @@ static int llama_decode_internal( #endif #ifdef GGML_USE_METAL - if (ggml_backend_is_metal(lctx.backend)) { - ggml_backend_metal_set_n_cb(lctx.backend, n_threads); + if (ggml_backend_is_metal(lctx.backend_metal)) { + ggml_backend_metal_set_n_cb(lctx.backend_metal, n_threads); } #endif - if (ggml_backend_is_cpu(lctx.backend)) { - ggml_backend_cpu_set_n_threads(lctx.backend, n_threads); + if (lctx.backend_cpu != nullptr) { + ggml_backend_cpu_set_n_threads(lctx.backend_cpu, n_threads); } - ggml_backend_graph_compute(lctx.backend, gf); + ggml_backend_sched_graph_compute(lctx.sched, gf); + + // fprintf(stderr, "splits: %d\n", ggml_backend_sched_get_n_splits(lctx.sched)); #ifdef GGML_USE_MPI ggml_mpi_graph_compute_post(lctx.ctx_mpi, gf, n_layer); @@ -6840,30 +6316,33 @@ static int llama_decode_internal( logits_out.clear(); #endif + ggml_backend_t res_backend = ggml_backend_sched_get_node_backend(lctx.sched, res); + GGML_ASSERT(res_backend != nullptr); if (batch.logits) { logits_out.resize(n_vocab * n_tokens); for (uint32_t i = 0; i < n_tokens; i++) { if (batch.logits[i] == 0) { continue; } - ggml_backend_tensor_get(res, logits_out.data() + (n_vocab*i), (n_vocab*i)*sizeof(float), n_vocab*sizeof(float)); + ggml_backend_tensor_get_async(res_backend, res, logits_out.data() + (n_vocab*i), (n_vocab*i)*sizeof(float), n_vocab*sizeof(float)); #ifndef NDEBUG logits_valid[i] = true; #endif } } else if (lctx.logits_all) { logits_out.resize(n_vocab * n_tokens); - ggml_backend_tensor_get(res, logits_out.data(), 0, n_vocab*n_tokens*sizeof(float)); + ggml_backend_tensor_get_async(res_backend, res, logits_out.data(), 0, n_vocab*n_tokens*sizeof(float)); #ifndef NDEBUG std::fill(logits_valid.begin(), logits_valid.end(), true); #endif } else { logits_out.resize(n_vocab); - ggml_backend_tensor_get(res, logits_out.data(), (n_vocab*(n_tokens - 1))*sizeof(float), n_vocab*sizeof(float)); + ggml_backend_tensor_get_async(res_backend, res, logits_out.data(), (n_vocab*(n_tokens - 1))*sizeof(float), n_vocab*sizeof(float)); #ifndef NDEBUG logits_valid[0] = true; #endif } + ggml_backend_synchronize(res_backend); } // extract embeddings @@ -6871,7 +6350,9 @@ static int llama_decode_internal( auto & embedding_out = lctx.embedding; embedding_out.resize(n_embd); - ggml_backend_tensor_get(embeddings, embedding_out.data(), (n_embd*(n_tokens - 1))*sizeof(float), n_embd*sizeof(float)); + ggml_backend_t embeddings_backend = ggml_backend_sched_get_node_backend(lctx.sched, embeddings); + ggml_backend_tensor_get_async(embeddings_backend, embeddings, embedding_out.data(), (n_embd*(n_tokens - 1))*sizeof(float), n_embd*sizeof(float)); + ggml_backend_synchronize(embeddings_backend); } // measure the performance only for the single-token evals @@ -9347,48 +8828,23 @@ static int llama_apply_lora_from_file_internal( LLAMA_LOG_INFO("%s: r = %d, alpha = %d, scaling = %.2f\n", __func__, lora_r, lora_alpha, scaling); - // create a name -> tensor map of the model to accelerate lookups - // find the max tensor size to estimate the required temporary buffer size - size_t max_tensor_size = 0; - std::unordered_map model_tensors; - for (const auto & kv : model.tensors_by_name) { - model_tensors.insert(kv); - size_t f32_size = ggml_nelements(kv.second) * sizeof(float); - max_tensor_size = std::max(max_tensor_size, f32_size); - } - - // create a temporary ggml context to store the lora tensors - // TODO: use ggml-alloc - size_t lora_ctx_size = max_tensor_size * 3; - LLAMA_LOG_INFO("%s: allocating %.f MB for lora temporary buffer\n", __func__, lora_ctx_size / 1024.0 / 1024.0); - std::vector lora_buf(lora_ctx_size); - - struct ggml_init_params params; - params.mem_size = lora_buf.size(); - params.mem_buffer = lora_buf.data(); - params.no_alloc = false; - - using unique_context = std::unique_ptr; - - unique_context lora_ctx(nullptr, ggml_free); - lora_ctx.reset(ggml_init(params)); - std::unordered_map lora_tensors; - // load base model std::unique_ptr ml; - - if (path_base_model) { + if (path_base_model) { LLAMA_LOG_INFO("%s: loading base model from '%s'\n", __func__, path_base_model); ml.reset(new llama_model_loader(path_base_model, /*use_mmap*/ true, /*kv_overrides*/ nullptr)); - ml->init_mapping(false); // no prefetching + ml->init_mapping(/*prefetch*/ false); // no prefetching } - // read tensors and apply - bool warned = false; - int n_tensors = 0; - - std::vector work_buffer; + struct tensor_meta { + std::string name; + ggml_type type; + int32_t ne[2]; + size_t offset; + }; + std::map tensor_meta_map; + // load all tensor meta while (true) { if (fin.tell() == fin.size) { // eof @@ -9401,7 +8857,7 @@ static int llama_apply_lora_from_file_internal( fin.read_raw(&n_dims, sizeof(n_dims)); fin.read_raw(&name_len, sizeof(name_len)); - fin.read_raw(&ftype, sizeof(ftype)); + fin.read_raw(&ftype, sizeof(ftype)); if (n_dims != 1 && n_dims != 2) { LLAMA_LOG_ERROR("%s: unsupported tensor dimension %d\n", __func__, n_dims); @@ -9415,31 +8871,23 @@ static int llama_apply_lora_from_file_internal( std::string name; { - GGML_ASSERT(name_len <= 1024); - char buf[1024]; + GGML_ASSERT(name_len < GGML_MAX_NAME); + char buf[GGML_MAX_NAME]; fin.read_raw(buf, name_len); name = std::string(buf, name_len); } - // check for lora suffix and get the type of tensor - const std::string lora_suffix = ".lora"; - size_t pos = name.rfind(lora_suffix); - if (pos == std::string::npos) { + // check for lora suffix + std::string lora_suffix; + if (name.length() > 6) { + lora_suffix = name.substr(name.length() - 6); + } + if (lora_suffix != ".loraA" && lora_suffix != ".loraB") { LLAMA_LOG_ERROR("%s: error: '%s' is not a lora tensor\n", __func__, name.c_str()); return 1; } - std::string lora_type = name.substr(pos + lora_suffix.length()); - std::string base_name = name; - base_name.erase(pos); - // LLAMA_LOG_INFO("%s: %s => %s (lora type %s) \n", __func__, name.c_str(), base_name.c_str(), lora_type.c_str()); - - if (model_tensors.find(base_name) == model_tensors.end()) { - LLAMA_LOG_ERROR("%s: unknown tensor '%s' in lora adapter\n", __func__, name.data()); - return 1; - } - - // create ggml tensor + // tensor type ggml_type wtype; switch (ftype) { case 0: wtype = GGML_TYPE_F32; break; @@ -9451,122 +8899,177 @@ static int llama_apply_lora_from_file_internal( return false; } } - ggml_tensor * lora_tensor = ggml_new_tensor_2d(lora_ctx.get(), wtype, ne[0], ne[1]); - ggml_set_name(lora_tensor, name.c_str()); - // load tensor data + // data offset size_t offset = fin.tell(); - size_t tensor_data_size = ggml_nbytes(lora_tensor); offset = (offset + 31) & -32; - fin.seek(offset, SEEK_SET); - fin.read_raw(lora_tensor->data, tensor_data_size); - lora_tensors[name] = lora_tensor; + // skip tensor data + fin.seek(offset + ggml_row_size(wtype, ne[0]) * ne[1], SEEK_SET); - // check if we have both A and B tensors and apply - if (lora_tensors.find(base_name + ".loraA") != lora_tensors.end() && - lora_tensors.find(base_name + ".loraB") != lora_tensors.end()) { + tensor_meta_map.emplace(name, tensor_meta{ name, wtype, { ne[0], ne[1] }, offset }); + } - ggml_tensor * dest_t = model_tensors[base_name]; + bool warned = false; + int n_tensors = 0; - offload_func_t offload_func = ggml_offload_nop; - offload_func_t offload_func_force_inplace = ggml_offload_nop; + // apply + ggml_backend_t backend_cpu = ggml_backend_cpu_init(); + if (backend_cpu == nullptr) { + LLAMA_LOG_ERROR("%s: error: failed to initialize cpu backend\n", __func__); + return 1; + } + ggml_backend_cpu_set_n_threads(backend_cpu, n_threads); -#if defined(GGML_USE_CUBLAS) && !defined(LLAMA_GGML_BACKEND_CUDA_TEST) - if (dest_t->backend == GGML_BACKEND_GPU || dest_t->backend == GGML_BACKEND_GPU_SPLIT) { - if (dest_t->type != GGML_TYPE_F16) { - throw std::runtime_error(format( - "%s: error: the simultaneous use of LoRAs and GPU acceleration is only supported for f16 models. dest_t->type: %d", __func__, dest_t->type)); - } - offload_func = ggml_cuda_assign_buffers; - offload_func_force_inplace = ggml_cuda_assign_buffers_force_inplace; - } -#endif // GGML_USE_CUBLAS + std::vector> read_buf; + for (const auto & it : model.tensors_by_name) { + const std::string & base_name = it.first; + ggml_tensor * model_t = it.second; - ggml_tensor * base_t; - if (ml) { - struct gguf_context * ctx_gguf = ml->ctx_gguf; + if (tensor_meta_map.find(base_name + ".loraA") == tensor_meta_map.end() || + tensor_meta_map.find(base_name + ".loraB") == tensor_meta_map.end()) { + continue; + } - // load from base model - if (gguf_find_tensor(ctx_gguf, base_name.c_str()) < 0) { - LLAMA_LOG_ERROR("%s: error: tensor '%s' not found in base model\n", __func__, base_name.c_str()); - return 1; - } + tensor_meta & metaA = tensor_meta_map.at(base_name + ".loraA"); + tensor_meta & metaB = tensor_meta_map.at(base_name + ".loraB"); - base_t = ml->get_tensor_meta(base_name.c_str()); - ml->load_data_for(base_t); - } else { - base_t = dest_t; - } + ggml_init_params lora_init_params = { + /* .mem_size */ ggml_tensor_overhead()*128 + ggml_graph_overhead(), + /* .mem_buffer */ nullptr, + /* .no_alloc */ true, + }; + ggml_context * lora_ctx = ggml_init(lora_init_params); + if (lora_ctx == nullptr) { + LLAMA_LOG_ERROR("%s: error: failed to initialize lora context\n", __func__); + ggml_backend_free(backend_cpu); + return 1; + } - if (ggml_is_quantized(base_t->type)) { - if (!warned) { - LLAMA_LOG_WARN("%s: warning: using a lora adapter with a quantized model may result in poor quality, " - "use a f16 or f32 base model with --lora-base\n", __func__); - warned = true; - } - } + // create tensors + ggml_tensor * loraA = ggml_new_tensor_2d(lora_ctx, metaA.type, metaA.ne[0], metaA.ne[1]); + ggml_tensor * loraB = ggml_new_tensor_2d(lora_ctx, metaB.type, metaB.ne[0], metaB.ne[1]); + ggml_set_name(loraA, metaA.name.c_str()); + ggml_set_name(loraB, metaB.name.c_str()); - ggml_tensor * loraA = lora_tensors[base_name + ".loraA"]; - GGML_ASSERT(loraA->type == GGML_TYPE_F32); - ggml_set_name(loraA, "loraA"); - - ggml_tensor * loraB = lora_tensors[base_name + ".loraB"]; - GGML_ASSERT(loraB->type == GGML_TYPE_F32); - ggml_set_name(loraB, "loraB"); - - if (base_t->ne[0] != loraA->ne[1] || base_t->ne[1] != loraB->ne[1]) { - LLAMA_LOG_ERROR("%s: incompatible tensor dimensions (%" PRId64 " and %" PRId64 ");" - " are you sure that this adapter is for this model?\n", __func__, base_t->ne[0], loraA->ne[1]); + ggml_tensor * base_t; + if (ml) { + if (gguf_find_tensor(ml->ctx_gguf, base_name.c_str()) < 0) { + LLAMA_LOG_ERROR("%s: error: tensor '%s' not found in base model\n", __func__, base_name.c_str()); return 1; } + base_t = ggml_dup_tensor(lora_ctx, ml->get_tensor_meta(base_name.c_str())); + } else { + base_t = ggml_dup_tensor(lora_ctx, model_t); + } + ggml_set_name(base_t, base_name.c_str()); + // allocate in backend buffer + ggml_backend_buffer_t lora_buf = ggml_backend_alloc_ctx_tensors_from_buft(lora_ctx, ggml_backend_cpu_buffer_type()); + if (lora_buf == nullptr) { + LLAMA_LOG_ERROR("%s: error: failed to allocate lora tensors\n", __func__); + return 1; + } + + // load tensor data + auto load_tensor = [&read_buf, &fin](const tensor_meta & tensor_meta, ggml_tensor * tensor) { + read_buf.resize(ggml_nbytes(tensor)); + fin.seek(tensor_meta.offset, SEEK_SET); + fin.read_raw(read_buf.data(), ggml_nbytes(tensor)); + ggml_backend_tensor_set(tensor, read_buf.data(), 0, read_buf.size()); + }; + load_tensor(metaA, loraA); + load_tensor(metaB, loraB); + + // load base model tensor data + if (ml) { + ml->load_data_for(base_t); + } else { + ggml_backend_tensor_copy(model_t, base_t); + } + + if (ggml_is_quantized(base_t->type) && !warned) { + LLAMA_LOG_WARN("%s: warning: using a lora adapter with a quantized model may result in poor quality, " + "use a f16 or f32 base model with --lora-base\n", __func__); + warned = true; + } + + if (base_t->ne[0] != loraA->ne[1] || base_t->ne[1] != loraB->ne[1]) { + LLAMA_LOG_ERROR("%s: incompatible tensor dimensions (%" PRId64 " and %" PRId64 ");" + " are you sure that this adapter is for this model?\n", __func__, base_t->ne[0], loraA->ne[1]); + ggml_free(lora_ctx); + ggml_backend_buffer_free(lora_buf); + ggml_backend_free(backend_cpu); + return 1; + } + + auto build_lora_graph = [&]() { // w = w + BA*s - ggml_tensor * BA = ggml_mul_mat(lora_ctx.get(), loraA, loraB); - offload_func(BA); + ggml_tensor * BA = ggml_mul_mat(lora_ctx, loraA, loraB); ggml_set_name(BA, "BA"); if (scaling != 1.0f) { - BA = ggml_scale_inplace(lora_ctx.get(), BA, scaling); - offload_func(BA); + BA = ggml_scale(lora_ctx, BA, scaling); ggml_set_name(BA, "BA_scaled"); } ggml_tensor * r; - if (base_t == dest_t) { - r = ggml_add_inplace(lora_ctx.get(), dest_t, BA); - offload_func_force_inplace(r); - ggml_set_name(r, "r_add_inplace"); - } - else { - r = ggml_add(lora_ctx.get(), base_t, BA); - offload_func(r); - ggml_set_name(r, "r_add"); + r = ggml_add_inplace(lora_ctx, base_t, BA); + ggml_set_name(r, "r_add"); - r = ggml_cpy(lora_ctx.get(), r, dest_t); - offload_func(r); - ggml_set_name(r, "r_cpy"); + if (base_t->type != model_t->type) { + // convert the result to the model type + r = ggml_cast(lora_ctx, r, model_t->type); + ggml_set_name(r, "r_cast"); } - struct ggml_cgraph * gf = ggml_new_graph(lora_ctx.get()); - ggml_build_forward_expand(gf, r); + return r; + }; - ggml_graph_compute_helper(work_buffer, gf, n_threads); + ggml_cgraph * gf = ggml_new_graph(lora_ctx); + ggml_tensor * r = build_lora_graph(); + ggml_build_forward_expand(gf, r); - // the tensors in the adapter must be sorted such that loraA and loraB of the same tensor are next to each other - GGML_ASSERT(lora_tensors.size() == 2); + ggml_backend_buffer_t graph_buf = ggml_backend_alloc_ctx_tensors_from_buft(lora_ctx, ggml_backend_cpu_buffer_type()); + if (graph_buf == nullptr) { + LLAMA_LOG_ERROR("%s: error: failed to allocate graph tensors\n", __func__); + ggml_free(lora_ctx); + ggml_backend_buffer_free(lora_buf); + ggml_backend_free(backend_cpu); + return 1; + } - // we won't need these tensors again, reset the context to save memory - lora_ctx.reset(ggml_init(params)); - lora_tensors.clear(); + ggml_backend_graph_compute(backend_cpu, gf); - n_tensors++; - if (n_tensors % 4 == 0) { - LLAMA_LOG_INFO("."); - } + ggml_backend_tensor_set(model_t, r->data, 0, ggml_nbytes(r)); + +#if 0 + // TODO: use scheduler with fallback to CPU for less copies between CPU and GPU + //ggml_backend_sched_t sched = ggml_backend_sched_new(backends.data(), backends.size(), GGML_DEFAULT_GRAPH_SIZE); + + // sched compute + ggml_build_forward_expand(gf, build_graph()); + ggml_backend_sched_init_measure(sched, gf); + + // create the graph again, since the previous one was destroyed by the measure + ggml_graph_clear(gf); + ggml_build_forward_expand(gf, build_graph()); + ggml_backend_sched_graph_compute(sched, gf); + ggml_backend_sched_free(sched); +#endif + + ggml_backend_buffer_free(lora_buf); + ggml_backend_buffer_free(graph_buf); + ggml_free(lora_ctx); + + n_tensors++; + if (n_tensors % 4 == 0) { + LLAMA_LOG_INFO("."); } } + ggml_backend_free(backend_cpu); + const int64_t t_lora_us = ggml_time_us() - t_start_lora_us; LLAMA_LOG_INFO(" done (%.2f ms)\n", t_lora_us / 1000.0); @@ -9579,6 +9082,7 @@ static int llama_apply_lora_from_file_internal( struct llama_model_params llama_model_default_params() { struct llama_model_params result = { /*.n_gpu_layers =*/ 0, + /*.split_mode =*/ LLAMA_SPLIT_LAYER, /*.main_gpu =*/ 0, /*.tensor_split =*/ nullptr, /*.progress_callback =*/ nullptr, @@ -9590,7 +9094,8 @@ struct llama_model_params llama_model_default_params() { }; #ifdef GGML_USE_METAL - result.n_gpu_layers = 1; + // note: we usually have plenty of VRAM, so by default offload all layers to the GPU + result.n_gpu_layers = 999; #endif return result; @@ -9780,41 +9285,53 @@ struct llama_context * llama_new_context_with_model( GGML_ASSERT(hparams.n_embd_head_k % ggml_blck_size(type_k) == 0); GGML_ASSERT(hparams.n_embd_head_v % ggml_blck_size(type_v) == 0); - // reserve memory for context buffers if (!hparams.vocab_only) { - // initialize backend + // initialize backends #ifdef GGML_USE_METAL if (model->n_gpu_layers > 0) { - ctx->backend = ggml_backend_metal_init(); - if (ctx->backend == nullptr) { + ctx->backend_metal = ggml_backend_metal_init(); + if (ctx->backend_metal == nullptr) { LLAMA_LOG_ERROR("%s: failed to initialize Metal backend\n", __func__); + llama_free(ctx); + return nullptr; } + ctx->backends.push_back(ctx->backend_metal); } -#elif defined(GGML_USE_CUBLAS) && defined(LLAMA_GGML_BACKEND_CUDA_TEST) - // for testing only +#elif defined(GGML_USE_CUBLAS) if (model->n_gpu_layers > 0) { - ctx->backend = ggml_backend_cuda_init(0); - if (ctx->backend == nullptr) { - LLAMA_LOG_ERROR("%s: failed to initialize CUDA backend\n", __func__); + // with split_mode LLAMA_SPLIT_NONE or LLAMA_SPLIT_ROW, only the main GPU backend is used + if (model->split_mode == LLAMA_SPLIT_NONE || model->split_mode == LLAMA_SPLIT_ROW) { + ggml_backend_t backend = ggml_backend_cuda_init(model->main_gpu); + if (backend == nullptr) { + LLAMA_LOG_ERROR("%s: failed to initialize CUDA%d backend\n", __func__, model->main_gpu); + llama_free(ctx); + return nullptr; + } + ctx->backends.push_back(backend); + } else { + // LLAMA_SPLIT_LAYER requires a backend for each GPU + for (int device = 0; device < ggml_backend_cuda_get_device_count(); ++device) { + ggml_backend_t backend = ggml_backend_cuda_init(device); + if (backend == nullptr) { + LLAMA_LOG_ERROR("%s: failed to initialize CUDA%d backend\n", __func__, device); + llama_free(ctx); + return nullptr; + } + ctx->backends.push_back(backend); + } } } #endif - - if (ctx->backend == nullptr && ggml_backend_buffer_is_host(model->buf)) { - ctx->backend = ggml_backend_cpu_init(); - if (ctx->backend == nullptr) { - LLAMA_LOG_ERROR("%s: failed to initialize CPU backend\n", __func__); - } - } - - if (ctx->backend == nullptr) { - LLAMA_LOG_ERROR("%s: failed to initialize a backend\n", __func__); - delete ctx; + ctx->backend_cpu = ggml_backend_cpu_init(); + if (ctx->backend_cpu == nullptr) { + LLAMA_LOG_ERROR("%s: failed to initialize CPU backend\n", __func__); + llama_free(ctx); return nullptr; } + ctx->backends.push_back(ctx->backend_cpu); - if (!llama_kv_cache_init(ctx->model.hparams, ctx->kv_self, type_k, type_v, - cparams.n_ctx, model->n_gpu_layers, cparams.offload_kqv)) { + if (!llama_kv_cache_init(ctx->kv_self, ctx->model, type_k, type_v, + cparams.n_ctx, cparams.offload_kqv)) { LLAMA_LOG_ERROR("%s: llama_kv_cache_init() failed for self-attention cache\n", __func__); llama_free(ctx); return nullptr; @@ -9850,11 +9367,22 @@ struct llama_context * llama_new_context_with_model( } { - // the compute buffer is used to store the tensor and graph structs, while the allocator buffer is used for the tensor data + // buffer types used for the compute buffer of each backend + std::vector backend_buft; + for (auto * backend : ctx->backends) { + if (ggml_backend_is_cpu(backend)) { + // use host buffers for the CPU backend compute buffer + backend_buft.push_back(llama_default_buffer_type_cpu(true)); + } else { + backend_buft.push_back(ggml_backend_get_default_buffer_type(backend)); + } + } + + // buffer used to store the computation graph and the tensor meta data ctx->buf_compute_meta.resize(ggml_tensor_overhead()*LLAMA_MAX_NODES + ggml_graph_overhead()); - // create measure allocator - ctx->alloc = ggml_allocr_new_measure_from_backend(ctx->backend); + ctx->sched = ggml_backend_sched_new(ctx->backends.data(), backend_buft.data(), ctx->backends.size(), LLAMA_MAX_NODES); + ctx->alloc = ggml_backend_sched_get_tallocr(ctx->sched, ctx->backend_cpu); // build worst-case graph int n_tokens = (int)std::min(cparams.n_ctx, cparams.n_batch); @@ -9862,50 +9390,19 @@ struct llama_context * llama_new_context_with_model( llama_token token = llama_token_bos(&ctx->model); // not actually used by llama_build_graph, but required to choose between token and embedding inputs graph ggml_cgraph * gf = llama_build_graph(*ctx, llama_batch_get_one(&token, n_tokens, n_past, 0)); - // measure memory requirements for the graph - size_t alloc_size = ggml_allocr_alloc_graph(ctx->alloc, gf); + // initialize scheduler with the worst-case graph + ggml_backend_sched_init_measure(ctx->sched, gf); + // note: the number of splits during measure is higher than during inference due to the kv shift + int n_splits = ggml_backend_sched_get_n_splits(ctx->sched); + LLAMA_LOG_INFO("%s: graph splits (measure): %d\n", __func__, n_splits); + ctx->alloc = ggml_backend_sched_get_tallocr(ctx->sched, ctx->backend_cpu); - LLAMA_LOG_INFO("%s: compute buffer total size = %.2f MiB\n", __func__, (ctx->buf_compute_meta.size() + alloc_size) / 1024.0 / 1024.0); - - // create allocator again with exact memory requirements - ggml_allocr_free(ctx->alloc); - - ctx->buf_alloc = ggml_backend_alloc_buffer(ctx->backend, alloc_size); - ctx->alloc = ggml_allocr_new_from_buffer(ctx->buf_alloc); -#if defined(GGML_USE_CUBLAS) && !defined(LLAMA_GGML_BACKEND_CUDA_TEST) - if (model->n_gpu_layers > 0) { - // the CPU buffer adds this padding in case the malloc buffer is not aligned, so we need to do the same for the GPU buffer, since we use the same offsets - ggml_cuda_set_scratch_size(alloc_size + 64); - LLAMA_LOG_INFO("%s: VRAM scratch buffer: %.2f MiB\n", __func__, alloc_size / 1024.0 / 1024.0); - - // calculate total VRAM usage - auto add_tensor = [](const ggml_tensor * t, size_t & size) { - if (t->backend == GGML_BACKEND_GPU || t->backend == GGML_BACKEND_GPU_SPLIT) { - size += ggml_nbytes(t); - } - }; - size_t model_vram_size = 0; - for (const auto & kv : model->tensors_by_name) { - add_tensor(kv.second, model_vram_size); - } - - size_t kv_vram_size = 0; - for (auto & k : ctx->kv_self.k_l) { - add_tensor(k, kv_vram_size); - } - for (auto & v : ctx->kv_self.v_l) { - add_tensor(v, kv_vram_size); - } - - size_t ctx_vram_size = alloc_size + kv_vram_size; - size_t total_vram_size = model_vram_size + ctx_vram_size; - - LLAMA_LOG_INFO("%s: total VRAM used: %.2f MiB (model: %.2f MiB, context: %.2f MiB)\n", __func__, - total_vram_size / 1024.0 / 1024.0, - model_vram_size / 1024.0 / 1024.0, - ctx_vram_size / 1024.0 / 1024.0); + for (ggml_backend_t backend : ctx->backends) { + ggml_backend_buffer_t buf = ggml_backend_sched_get_buffer(ctx->sched, backend); + LLAMA_LOG_INFO("%s: %10s compute buffer size = %8.2f MiB\n", __func__, + ggml_backend_buffer_name(buf), + ggml_backend_buffer_get_size(buf) / 1024.0 / 1024.0); } -#endif } } @@ -10002,9 +9499,8 @@ int32_t llama_model_meta_val_str_by_index(const struct llama_model * model, int3 } int32_t llama_model_desc(const struct llama_model * model, char * buf, size_t buf_size) { - return snprintf(buf, buf_size, "%s %s%s %s", + return snprintf(buf, buf_size, "%s %s %s", llama_model_arch_name(model->arch).c_str(), - model->hparams.n_expert > 0 ? (std::to_string(model->hparams.n_expert) + "x").c_str() : "", llama_model_type_name(model->type), llama_model_ftype_name(model->ftype).c_str()); } @@ -10026,7 +9522,14 @@ uint64_t llama_model_n_params(const struct llama_model * model) { } struct ggml_tensor * llama_get_model_tensor(struct llama_model * model, const char * name) { - return ggml_get_tensor(model->ctx, name); + auto it = std::find_if(model->tensors_by_name.begin(), model->tensors_by_name.end(), + [name](const std::pair & it) { + return it.first == name; + }); + if (it == model->tensors_by_name.end()) { + return nullptr; + } + return it->second; } uint32_t llama_model_quantize( @@ -10211,7 +9714,7 @@ size_t llama_get_state_size(const struct llama_context * ctx) { const size_t s_embedding = ctx->embedding.size() * sizeof(float); const size_t s_kv_size = sizeof(size_t); const size_t s_kv_ntok = sizeof(int); - const size_t s_kv = ggml_backend_buffer_get_size(ctx->kv_self.buf); + const size_t s_kv = ctx->kv_self.total_size(); const size_t s_total = ( + s_rng_size @@ -10340,7 +9843,7 @@ static void llama_copy_state_data_internal(struct llama_context * ctx, llama_dat const auto n_embd_v_gqa = hparams.n_embd_v_gqa(); const auto n_ctx = cparams.n_ctx; - const size_t kv_buf_size = ggml_backend_buffer_get_size(kv_self.buf); + const size_t kv_buf_size = kv_self.total_size(); const uint32_t kv_head = kv_self.head; const uint32_t kv_size = kv_self.size; const uint32_t kv_used = kv_self.used; @@ -10353,46 +9856,19 @@ static void llama_copy_state_data_internal(struct llama_context * ctx, llama_dat if (kv_buf_size) { const size_t elt_size = ggml_element_size(kv_self.k_l[0]); - ggml_context * cpy_ctx = ggml_init({ 6*n_layer*ggml_tensor_overhead() + ggml_graph_overhead(), NULL, /* no_alloc */ true }); - ggml_cgraph * gf = ggml_new_graph(cpy_ctx); - - std::vector kout2d(n_layer); - std::vector vout2d(n_layer); - - for (int il = 0; il < (int) n_layer; ++il) { - kout2d[il] = ggml_new_tensor_2d(cpy_ctx, kv_self.k_l[il]->type, n_embd_k_gqa, kv_head); - vout2d[il] = ggml_new_tensor_2d(cpy_ctx, kv_self.v_l[il]->type, kv_head, n_embd_v_gqa); - - ggml_tensor * k2d = ggml_view_2d(cpy_ctx, kv_self.k_l[il], - n_embd_k_gqa, kv_head, - elt_size*n_embd_k_gqa, 0); - - ggml_tensor * v2d = ggml_view_2d(cpy_ctx, kv_self.v_l[il], - kv_head, n_embd_v_gqa, - elt_size*n_ctx, 0); - - ggml_build_forward_expand(gf, ggml_cpy(cpy_ctx, k2d, kout2d[il])); - ggml_build_forward_expand(gf, ggml_cpy(cpy_ctx, v2d, vout2d[il])); - } - - ggml_backend_buffer_t buf = ggml_backend_alloc_ctx_tensors(cpy_ctx, ctx->backend); - - ggml_backend_graph_compute(ctx->backend, gf); - std::vector tmp_buf; for (int il = 0; il < (int) n_layer; ++il) { - tmp_buf.resize(ggml_nbytes(kout2d[il])); - ggml_backend_tensor_get(kout2d[il], tmp_buf.data(), 0, tmp_buf.size()); + tmp_buf.resize(elt_size*n_embd_k_gqa*kv_head); + ggml_backend_tensor_get(kv_self.k_l[il], tmp_buf.data(), 0, tmp_buf.size()); data_ctx->write(tmp_buf.data(), tmp_buf.size()); - tmp_buf.resize(ggml_nbytes(vout2d[il])); - ggml_backend_tensor_get(vout2d[il], tmp_buf.data(), 0, tmp_buf.size()); - data_ctx->write(tmp_buf.data(), tmp_buf.size()); + // v is not contiguous, copy row by row + tmp_buf.resize(elt_size*kv_head); + for (int ir = 0; ir < (int) n_embd_v_gqa; ++ir) { + ggml_backend_tensor_get(kv_self.v_l[il], tmp_buf.data(), ir*elt_size*n_ctx, tmp_buf.size()); + data_ctx->write(tmp_buf.data(), tmp_buf.size()); + } } - - ggml_free(cpy_ctx); - - ggml_backend_buffer_free(buf); } for (uint32_t i = 0; i < kv_size; ++i) { @@ -10491,48 +9967,22 @@ size_t llama_set_state_data(struct llama_context * ctx, uint8_t * src) { memcpy(&kv_used, inp, sizeof(kv_used)); inp += sizeof(kv_used); if (kv_buf_size) { - GGML_ASSERT(ggml_backend_buffer_get_size(kv_self.buf) == kv_buf_size); + GGML_ASSERT(kv_self.total_size() == kv_buf_size); const size_t elt_size = ggml_element_size(kv_self.k_l[0]); - ggml_context * cpy_ctx = ggml_init({ 6*n_layer*ggml_tensor_overhead() + ggml_graph_overhead(), NULL, /* no_alloc */ true }); - ggml_cgraph * gf = ggml_new_graph(cpy_ctx); + for (int il = 0; il < (int) n_layer; ++il) { + size_t k_size = elt_size*n_embd_k_gqa*kv_head; + ggml_backend_tensor_set(kv_self.k_l[il], inp, 0, k_size); + inp += k_size; - std::vector kin2d(n_layer); - std::vector vin2d(n_layer); - - for (int il = 0; il < n_layer; ++il) { - kin2d[il] = ggml_new_tensor_2d(cpy_ctx, kv_self.k_l[il]->type, n_embd_k_gqa, kv_head); - vin2d[il] = ggml_new_tensor_2d(cpy_ctx, kv_self.v_l[il]->type, kv_head, n_embd_v_gqa); - - ggml_tensor * k2d = ggml_view_2d(cpy_ctx, kv_self.k_l[il], - n_embd_k_gqa, kv_head, - elt_size*n_embd_k_gqa, 0); - - ggml_tensor * v2d = ggml_view_2d(cpy_ctx, kv_self.v_l[il], - kv_head, n_embd_v_gqa, - elt_size*n_ctx, 0); - - ggml_build_forward_expand(gf, ggml_cpy(cpy_ctx, kin2d[il], k2d)); - ggml_build_forward_expand(gf, ggml_cpy(cpy_ctx, vin2d[il], v2d)); + // v is not contiguous, copy row by row + size_t v_row_size = elt_size*kv_head; + for (int ir = 0; ir < (int) n_embd_v_gqa; ++ir) { + ggml_backend_tensor_set(kv_self.v_l[il], inp, ir*elt_size*n_ctx, v_row_size); + inp += v_row_size; + } } - - ggml_backend_buffer_t buf = ggml_backend_alloc_ctx_tensors(cpy_ctx, ctx->backend); - - // load data into the tensors - for (int il = 0; il < n_layer; ++il) { - ggml_backend_tensor_set(kin2d[il], inp, 0, ggml_nbytes(kin2d[il])); - inp += ggml_nbytes(kin2d[il]); - - ggml_backend_tensor_set(vin2d[il], inp, 0, ggml_nbytes(vin2d[il])); - inp += ggml_nbytes(vin2d[il]); - } - - ggml_backend_graph_compute(ctx->backend, gf); - - ggml_free(cpy_ctx); - - ggml_backend_buffer_free(buf); } ctx->kv_self.head = kv_head; diff --git a/llama.h b/llama.h index 43d41b8f6..689e12d7c 100644 --- a/llama.h +++ b/llama.h @@ -118,6 +118,12 @@ extern "C" { LLAMA_ROPE_SCALING_MAX_VALUE = LLAMA_ROPE_SCALING_YARN, }; + enum llama_split_mode { + LLAMA_SPLIT_NONE = 0, // single GPU + LLAMA_SPLIT_LAYER = 1, // split layers and KV across GPUs + LLAMA_SPLIT_ROW = 2, // split rows across GPUs + }; + typedef struct llama_token_data { llama_token id; // token id float logit; // log-odds of the token @@ -180,8 +186,16 @@ extern "C" { struct llama_model_params { int32_t n_gpu_layers; // number of layers to store in VRAM - int32_t main_gpu; // the GPU that is used for scratch and small tensors - const float * tensor_split; // how to split layers across multiple GPUs (size: LLAMA_MAX_DEVICES) + enum llama_split_mode split_mode; // how to split the model across multiple GPUs + + // main_gpu interpretation depends on split_mode: + // LLAMA_SPLIT_NONE: the GPU that is used for the entire model + // LLAMA_SPLIT_ROW: the GPU that is used for small tensors and intermediate results + // LLAMA_SPLIT_LAYER: ignored + int32_t main_gpu; + + // proportion of the model (layers or rows) to offload to each GPU, size: LLAMA_MAX_DEVICES + const float * tensor_split; // Called with a progress value between 0.0 and 1.0. Pass NULL to disable. // If the provided progress_callback returns true, model loading continues. diff --git a/tests/test-backend-ops.cpp b/tests/test-backend-ops.cpp index 7a60d7743..d9b8b106a 100644 --- a/tests/test-backend-ops.cpp +++ b/tests/test-backend-ops.cpp @@ -376,6 +376,11 @@ struct test_case { // allocate ggml_backend_buffer_t buf = ggml_backend_alloc_ctx_tensors(ctx, backend1); + if (buf == NULL) { + printf("failed to allocate tensors [%s] ", ggml_backend_name(backend1)); + ggml_free(ctx); + return false; + } // build graph ggml_build_forward_expand(gf, out); @@ -463,19 +468,23 @@ struct test_case { GGML_UNUSED(index); }; - ggml_backend_compare_graph_backend(backend1, backend2, gf, callback, &ud); + const bool cmp_ok = ggml_backend_compare_graph_backend(backend1, backend2, gf, callback, &ud); - if (ud.ok) { - printf("\033[1;32mOK\033[0m\n"); - } else { - printf("\033[1;31mFAIL\033[0m\n"); + if (!cmp_ok) { + printf("compare failed "); } ggml_backend_buffer_free(buf); ggml_free(ctx); - return ud.ok; + if (ud.ok && cmp_ok) { + printf("\033[1;32mOK\033[0m\n"); + return true; + } + + printf("\033[1;31mFAIL\033[0m\n"); + return false; } bool eval_perf(ggml_backend_t backend, const char * op_name) { @@ -519,6 +528,11 @@ struct test_case { // allocate ggml_backend_buffer_t buf = ggml_backend_alloc_ctx_tensors(ctx, backend); + if (buf == NULL) { + printf("failed to allocate tensors\n"); + ggml_free(ctx); + return false; + } // randomize tensors initialize_tensors(ctx); From 3fe81781e3bf98b8e44946240a19f3a6ad08a11a Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Johannes=20G=C3=A4=C3=9Fler?= Date: Fri, 12 Jan 2024 20:38:54 +0100 Subject: [PATCH 289/811] CUDA: faster q8_0 -> f16 dequantization (#4895) --- ggml-cuda.cu | 57 ++++++++++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 57 insertions(+) diff --git a/ggml-cuda.cu b/ggml-cuda.cu index 2db50437c..bd3814c72 100644 --- a/ggml-cuda.cu +++ b/ggml-cuda.cu @@ -523,6 +523,8 @@ static_assert(sizeof(block_iq2_xs) == sizeof(ggml_fp16_t) + QK_K/8*sizeof(uint16 #define CUDA_ACC_BLOCK_SIZE 256 #define CUDA_IM2COL_BLOCK_SIZE 256 +#define CUDA_Q8_0_NE_ALIGN 2048 + // dmmv = dequantize_mul_mat_vec #ifndef GGML_CUDA_DMMV_X #define GGML_CUDA_DMMV_X 32 @@ -2327,6 +2329,45 @@ static __global__ void convert_unary(const void * __restrict__ vx, dst_t * __res y[i] = x[i]; } +template +static __global__ void dequantize_block_q8_0_f16(const void * __restrict__ vx, half * __restrict__ y, const int k) { +#if __CUDA_ARCH__ >= CC_PASCAL + constexpr int nint = CUDA_Q8_0_NE_ALIGN/sizeof(int) + WARP_SIZE; + + const int i0 = CUDA_Q8_0_NE_ALIGN*blockIdx.x; + const int * x0 = ((int *) vx) + blockIdx.x * nint; + half2 * y2 = (half2 *) (y + i0); + + __shared__ int vals[nint]; + +#pragma unroll + for (int ix0 = 0; ix0 < nint; ix0 += WARP_SIZE) { + if (need_check && i0*sizeof(block_q8_0)/QK8_0 + sizeof(int)*(ix0 + threadIdx.x) >= k*sizeof(block_q8_0)/QK8_0) { + break; + } + + const int ix = ix0 + threadIdx.x; + vals[ix] = x0[ix]; + } + +#pragma unroll + for (int iy = 0; iy < CUDA_Q8_0_NE_ALIGN; iy += 2*WARP_SIZE) { + if (need_check && i0 + iy + 2*threadIdx.x >= k) { + return; + } + + const half * b0 = ((const half *) vals) + (sizeof(block_q8_0)/sizeof(half)) * ((iy + 2*threadIdx.x)/QK8_0); + const half d = *b0; + const char2 qs = ((const char2 *) (b0 + 1))[threadIdx.x % (QK8_0/2)]; + + y2[iy/2 + threadIdx.x] = __hmul2(make_half2(qs.x, qs.y), __half2half2(d)); + } +#else + (void) vx; (void) y; (void) k; + bad_arch(); +#endif // __CUDA_ARCH__ >= CC_PASCAL +} + // VDR = vec dot ratio, how many contiguous integers each thread processes when the vec dot kernel is called // MMVQ = mul_mat_vec_q, MMQ = mul_mat_q @@ -6181,6 +6222,17 @@ static void dequantize_block_cuda(const void * __restrict__ vx, dst_t * __restri dequantize_block<<>>(vx, y, k); } +static void dequantize_block_q8_0_f16_cuda(const void * __restrict__ vx, half * __restrict__ y, const int k, cudaStream_t stream) { + const int num_blocks = (k + CUDA_Q8_0_NE_ALIGN - 1) / CUDA_Q8_0_NE_ALIGN; + if (k % CUDA_Q8_0_NE_ALIGN == 0) { + const bool need_check = false; + dequantize_block_q8_0_f16<<>>(vx, y, k); + } else { + const bool need_check = true; + dequantize_block_q8_0_f16<<>>(vx, y, k); + } +} + template static void dequantize_row_q2_K_cuda(const void * vx, dst_t * y, const int k, cudaStream_t stream) { const int nb = k / QK_K; @@ -6246,6 +6298,7 @@ static void convert_unary_cuda(const void * __restrict__ vx, dst_t * __restrict_ } static to_fp16_cuda_t ggml_get_to_fp16_cuda(ggml_type type) { + int id; switch (type) { case GGML_TYPE_Q4_0: return dequantize_block_cuda; @@ -6256,6 +6309,10 @@ static to_fp16_cuda_t ggml_get_to_fp16_cuda(ggml_type type) { case GGML_TYPE_Q5_1: return dequantize_block_cuda; case GGML_TYPE_Q8_0: + CUDA_CHECK(cudaGetDevice(&id)); + if (g_device_caps[id].cc >= CC_PASCAL) { + return dequantize_block_q8_0_f16_cuda; + } return dequantize_block_cuda; case GGML_TYPE_Q2_K: return dequantize_row_q2_K_cuda; From 52ee4540c0f2e11d52c839db6eb51d014ce060e1 Mon Sep 17 00:00:00 2001 From: Maximilian Winter Date: Fri, 12 Jan 2024 20:46:45 +0100 Subject: [PATCH 290/811] examples : add pydantic models to GBNF grammar generator (#4883) * Create pydantic-models-to-grammar.py * Added some comments for usage * Refactored Grammar Generator Added example and usage instruction. * Update pydantic_models_to_grammar.py * Update pydantic-models-to-grammar-examples.py * Renamed module and imported it. * Update pydantic-models-to-grammar.py * Renamed file and fixed grammar generator issue. --- .../pydantic-models-to-grammar-examples.py | 136 ++ examples/pydantic_models_to_grammar.py | 1151 +++++++++++++++++ 2 files changed, 1287 insertions(+) create mode 100644 examples/pydantic-models-to-grammar-examples.py create mode 100644 examples/pydantic_models_to_grammar.py diff --git a/examples/pydantic-models-to-grammar-examples.py b/examples/pydantic-models-to-grammar-examples.py new file mode 100644 index 000000000..a8a4919cf --- /dev/null +++ b/examples/pydantic-models-to-grammar-examples.py @@ -0,0 +1,136 @@ +# Function calling example using pydantic models. + +import json +from enum import Enum +from typing import Union, Optional + +import requests +from pydantic import BaseModel, Field + +import importlib +from pydantic_models_to_grammar import generate_gbnf_grammar_and_documentation + +# Function to get completion on the llama.cpp server with grammar. +def create_completion(prompt, grammar): + headers = {"Content-Type": "application/json"} + data = {"prompt": prompt, "grammar": grammar} + + response = requests.post("http://127.0.0.1:8080/completion", headers=headers, json=data) + data = response.json() + + print(data["content"]) + return data["content"] + + +# A function for the agent to send a message to the user. +class SendMessageToUser(BaseModel): + """ + Send a message to the User. + """ + chain_of_thought: str = Field(..., description="Your chain of thought while sending the message.") + message: str = Field(..., description="Message you want to send to the user.") + + def run(self): + print(self.message) + + +# Enum for the calculator function. +class MathOperation(Enum): + ADD = "add" + SUBTRACT = "subtract" + MULTIPLY = "multiply" + DIVIDE = "divide" + + +# Very simple calculator tool for the agent. +class Calculator(BaseModel): + """ + Perform a math operation on two numbers. + """ + number_one: Union[int, float] = Field(..., description="First number.") + operation: MathOperation = Field(..., description="Math operation to perform.") + number_two: Union[int, float] = Field(..., description="Second number.") + + def run(self): + if self.operation == MathOperation.ADD: + return self.number_one + self.number_two + elif self.operation == MathOperation.SUBTRACT: + return self.number_one - self.number_two + elif self.operation == MathOperation.MULTIPLY: + return self.number_one * self.number_two + elif self.operation == MathOperation.DIVIDE: + return self.number_one / self.number_two + else: + raise ValueError("Unknown operation.") + + +# Here the grammar gets generated by passing the available function models to generate_gbnf_grammar_and_documentation function. This also generates a documentation usable by the LLM. +# pydantic_model_list is the list of pydanitc models +# outer_object_name is an optional name for an outer object around the actual model object. Like a "function" object with "function_parameters" which contains the actual model object. If None, no outer object will be generated +# outer_object_content is the name of outer object content. +# model_prefix is the optional prefix for models in the documentation. (Default="Output Model") +# fields_prefix is the prefix for the model fields in the documentation. (Default="Output Fields") +gbnf_grammar, documentation = generate_gbnf_grammar_and_documentation( + pydantic_model_list=[SendMessageToUser, Calculator], outer_object_name="function", + outer_object_content="function_parameters", model_prefix="Function", fields_prefix="Parameters") + +print(gbnf_grammar) +print(documentation) + +system_message = "You are an advanced AI, tasked to assist the user by calling functions in JSON format. The following are the available functions and their parameters and types:\n\n" + documentation + +user_message = "What is 42 * 42?" +prompt = f"<|im_start|>system\n{system_message}<|im_end|>\n<|im_start|>user\n{user_message}<|im_end|>\n<|im_start|>assistant" + +text = create_completion(prompt=prompt, grammar=gbnf_grammar) +# This should output something like this: +# { +# "function": "calculator", +# "function_parameters": { +# "number_one": 42, +# "operation": "multiply", +# "number_two": 42 +# } +# } +function_dictionary = json.loads(text) +if function_dictionary["function"] == "calculator": + function_parameters = {**function_dictionary["function_parameters"]} + + print(Calculator(**function_parameters).run()) + # This should output: 1764 + + +# A example structured output based on pydantic models. The LLM will create an entry for a Book database out of an unstructured text. +class Category(Enum): + """ + The category of the book. + """ + Fiction = "Fiction" + NonFiction = "Non-Fiction" + + +class Book(BaseModel): + """ + Represents an entry about a book. + """ + title: str = Field(..., description="Title of the book.") + author: str = Field(..., description="Author of the book.") + published_year: Optional[int] = Field(..., description="Publishing year of the book.") + keywords: list[str] = Field(..., description="A list of keywords.") + category: Category = Field(..., description="Category of the book.") + summary: str = Field(..., description="Summary of the book.") + + +# We need no additional parameters other than our list of pydantic models. +gbnf_grammar, documentation = generate_gbnf_grammar_and_documentation([Book]) + +system_message = "You are an advanced AI, tasked to create a dataset entry in JSON for a Book. The following is the expected output model:\n\n" + documentation + +text = """The Feynman Lectures on Physics is a physics textbook based on some lectures by Richard Feynman, a Nobel laureate who has sometimes been called "The Great Explainer". The lectures were presented before undergraduate students at the California Institute of Technology (Caltech), during 1961–1963. The book's co-authors are Feynman, Robert B. Leighton, and Matthew Sands.""" +prompt = f"<|im_start|>system\n{system_message}<|im_end|>\n<|im_start|>user\n{text}<|im_end|>\n<|im_start|>assistant" + +text = create_completion(prompt=prompt, grammar=gbnf_grammar) + +json_data = json.loads(text) + +print(Book(**json_data)) diff --git a/examples/pydantic_models_to_grammar.py b/examples/pydantic_models_to_grammar.py new file mode 100644 index 000000000..41b98fdc1 --- /dev/null +++ b/examples/pydantic_models_to_grammar.py @@ -0,0 +1,1151 @@ +import inspect +import json +from copy import copy +from inspect import isclass, getdoc +from types import NoneType + +from pydantic import BaseModel, create_model, Field +from typing import Any, Type, List, get_args, get_origin, Tuple, Union, Optional, _GenericAlias +from enum import Enum +from typing import get_type_hints, Callable +import re + + +class PydanticDataType(Enum): + """ + Defines the data types supported by the grammar_generator. + + Attributes: + STRING (str): Represents a string data type. + BOOLEAN (str): Represents a boolean data type. + INTEGER (str): Represents an integer data type. + FLOAT (str): Represents a float data type. + OBJECT (str): Represents an object data type. + ARRAY (str): Represents an array data type. + ENUM (str): Represents an enum data type. + CUSTOM_CLASS (str): Represents a custom class data type. + """ + STRING = "string" + TRIPLE_QUOTED_STRING = "triple_quoted_string" + MARKDOWN_STRING = "markdown_string" + BOOLEAN = "boolean" + INTEGER = "integer" + FLOAT = "float" + OBJECT = "object" + ARRAY = "array" + ENUM = "enum" + ANY = "any" + NULL = "null" + CUSTOM_CLASS = "custom-class" + CUSTOM_DICT = "custom-dict" + SET = "set" + + +def map_pydantic_type_to_gbnf(pydantic_type: Type[Any]) -> str: + if isclass(pydantic_type) and issubclass(pydantic_type, str): + return PydanticDataType.STRING.value + elif isclass(pydantic_type) and issubclass(pydantic_type, bool): + return PydanticDataType.BOOLEAN.value + elif isclass(pydantic_type) and issubclass(pydantic_type, int): + return PydanticDataType.INTEGER.value + elif isclass(pydantic_type) and issubclass(pydantic_type, float): + return PydanticDataType.FLOAT.value + elif isclass(pydantic_type) and issubclass(pydantic_type, Enum): + return PydanticDataType.ENUM.value + + elif isclass(pydantic_type) and issubclass(pydantic_type, BaseModel): + return format_model_and_field_name(pydantic_type.__name__) + elif get_origin(pydantic_type) == list: + element_type = get_args(pydantic_type)[0] + return f"{map_pydantic_type_to_gbnf(element_type)}-list" + elif get_origin(pydantic_type) == set: + element_type = get_args(pydantic_type)[0] + return f"{map_pydantic_type_to_gbnf(element_type)}-set" + elif get_origin(pydantic_type) == Union: + union_types = get_args(pydantic_type) + union_rules = [map_pydantic_type_to_gbnf(ut) for ut in union_types] + return f"union-{'-or-'.join(union_rules)}" + elif get_origin(pydantic_type) == Optional: + element_type = get_args(pydantic_type)[0] + return f"optional-{map_pydantic_type_to_gbnf(element_type)}" + elif isclass(pydantic_type): + return f"{PydanticDataType.CUSTOM_CLASS.value}-{format_model_and_field_name(pydantic_type.__name__)}" + elif get_origin(pydantic_type) == dict: + key_type, value_type = get_args(pydantic_type) + return f"custom-dict-key-type-{format_model_and_field_name(map_pydantic_type_to_gbnf(key_type))}-value-type-{format_model_and_field_name(map_pydantic_type_to_gbnf(value_type))}" + else: + return "unknown" + + +def format_model_and_field_name(model_name: str) -> str: + parts = re.findall('[A-Z][^A-Z]*', model_name) + if not parts: # Check if the list is empty + return model_name.lower().replace("_", "-") + return '-'.join(part.lower().replace("_", "-") for part in parts) + + +def generate_list_rule(element_type): + """ + Generate a GBNF rule for a list of a given element type. + + :param element_type: The type of the elements in the list (e.g., 'string'). + :return: A string representing the GBNF rule for a list of the given type. + """ + rule_name = f"{map_pydantic_type_to_gbnf(element_type)}-list" + element_rule = map_pydantic_type_to_gbnf(element_type) + list_rule = fr'{rule_name} ::= "[" {element_rule} ("," {element_rule})* "]"' + return list_rule + + +def get_members_structure(cls, rule_name): + if issubclass(cls, Enum): + # Handle Enum types + members = [f'\"\\\"{member.value}\\\"\"' for name, member in cls.__members__.items()] + return f"{cls.__name__.lower()} ::= " + " | ".join(members) + if cls.__annotations__ and cls.__annotations__ != {}: + result = f'{rule_name} ::= "{{"' + type_list_rules = [] + # Modify this comprehension + members = [f' \"\\\"{name}\\\"\" ":" {map_pydantic_type_to_gbnf(param_type)}' + for name, param_type in cls.__annotations__.items() + if name != 'self'] + + result += '"," '.join(members) + result += ' "}"' + return result, type_list_rules + elif rule_name == "custom-class-any": + result = f'{rule_name} ::= ' + result += 'value' + type_list_rules = [] + return result, type_list_rules + else: + init_signature = inspect.signature(cls.__init__) + parameters = init_signature.parameters + result = f'{rule_name} ::= "{{"' + type_list_rules = [] + # Modify this comprehension too + members = [f' \"\\\"{name}\\\"\" ":" {map_pydantic_type_to_gbnf(param.annotation)}' + for name, param in parameters.items() + if name != 'self' and param.annotation != inspect.Parameter.empty] + + result += '", "'.join(members) + result += ' "}"' + return result, type_list_rules + + +def regex_to_gbnf(regex_pattern: str) -> str: + """ + Translate a basic regex pattern to a GBNF rule. + Note: This function handles only a subset of simple regex patterns. + """ + gbnf_rule = regex_pattern + + # Translate common regex components to GBNF + gbnf_rule = gbnf_rule.replace('\\d', '[0-9]') + gbnf_rule = gbnf_rule.replace('\\s', '[ \t\n]') + + # Handle quantifiers and other regex syntax that is similar in GBNF + # (e.g., '*', '+', '?', character classes) + + return gbnf_rule + + +def generate_gbnf_integer_rules(max_digit=None, min_digit=None): + """ + + Generate GBNF Integer Rules + + Generates GBNF (Generalized Backus-Naur Form) rules for integers based on the given maximum and minimum digits. + + Parameters: + max_digit (int): The maximum number of digits for the integer. Default is None. + min_digit (int): The minimum number of digits for the integer. Default is None. + + Returns: + integer_rule (str): The identifier for the integer rule generated. + additional_rules (list): A list of additional rules generated based on the given maximum and minimum digits. + + """ + additional_rules = [] + + # Define the rule identifier based on max_digit and min_digit + integer_rule = "integer-part" + if max_digit is not None: + integer_rule += f"-max{max_digit}" + if min_digit is not None: + integer_rule += f"-min{min_digit}" + + # Handling Integer Rules + if max_digit is not None or min_digit is not None: + # Start with an empty rule part + integer_rule_part = '' + + # Add mandatory digits as per min_digit + if min_digit is not None: + integer_rule_part += '[0-9] ' * min_digit + + # Add optional digits up to max_digit + if max_digit is not None: + optional_digits = max_digit - (min_digit if min_digit is not None else 0) + integer_rule_part += ''.join(['[0-9]? ' for _ in range(optional_digits)]) + + # Trim the rule part and append it to additional rules + integer_rule_part = integer_rule_part.strip() + if integer_rule_part: + additional_rules.append(f'{integer_rule} ::= {integer_rule_part}') + + return integer_rule, additional_rules + + +def generate_gbnf_float_rules(max_digit=None, min_digit=None, max_precision=None, min_precision=None): + """ + Generate GBNF float rules based on the given constraints. + + :param max_digit: Maximum number of digits in the integer part (default: None) + :param min_digit: Minimum number of digits in the integer part (default: None) + :param max_precision: Maximum number of digits in the fractional part (default: None) + :param min_precision: Minimum number of digits in the fractional part (default: None) + :return: A tuple containing the float rule and additional rules as a list + + Example Usage: + max_digit = 3 + min_digit = 1 + max_precision = 2 + min_precision = 1 + generate_gbnf_float_rules(max_digit, min_digit, max_precision, min_precision) + + Output: + ('float-3-1-2-1', ['integer-part-max3-min1 ::= [0-9] [0-9] [0-9]?', 'fractional-part-max2-min1 ::= [0-9] [0-9]?', 'float-3-1-2-1 ::= integer-part-max3-min1 "." fractional-part-max2-min + *1']) + + Note: + GBNF stands for Generalized Backus-Naur Form, which is a notation technique to specify the syntax of programming languages or other formal grammars. + """ + additional_rules = [] + + # Define the integer part rule + integer_part_rule = "integer-part" + (f"-max{max_digit}" if max_digit is not None else "") + ( + f"-min{min_digit}" if min_digit is not None else "") + + # Define the fractional part rule based on precision constraints + fractional_part_rule = "fractional-part" + fractional_rule_part = '' + if max_precision is not None or min_precision is not None: + fractional_part_rule += (f"-max{max_precision}" if max_precision is not None else "") + ( + f"-min{min_precision}" if min_precision is not None else "") + # Minimum number of digits + fractional_rule_part = '[0-9]' * (min_precision if min_precision is not None else 1) + # Optional additional digits + fractional_rule_part += ''.join([' [0-9]?'] * ( + (max_precision - (min_precision if min_precision is not None else 1)) if max_precision is not None else 0)) + additional_rules.append(f'{fractional_part_rule} ::= {fractional_rule_part}') + + # Define the float rule + float_rule = f"float-{max_digit if max_digit is not None else 'X'}-{min_digit if min_digit is not None else 'X'}-{max_precision if max_precision is not None else 'X'}-{min_precision if min_precision is not None else 'X'}" + additional_rules.append(f'{float_rule} ::= {integer_part_rule} "." {fractional_part_rule}') + + # Generating the integer part rule definition, if necessary + if max_digit is not None or min_digit is not None: + integer_rule_part = '[0-9]' + if min_digit is not None and min_digit > 1: + integer_rule_part += ' [0-9]' * (min_digit - 1) + if max_digit is not None: + integer_rule_part += ''.join([' [0-9]?'] * (max_digit - (min_digit if min_digit is not None else 1))) + additional_rules.append(f'{integer_part_rule} ::= {integer_rule_part.strip()}') + + return float_rule, additional_rules + + +def generate_gbnf_rule_for_type(model_name, field_name, + field_type, is_optional, processed_models, created_rules, + field_info=None) -> \ + Tuple[str, list]: + """ + Generate GBNF rule for a given field type. + + :param model_name: Name of the model. + + :param field_name: Name of the field. + :param field_type: Type of the field. + :param is_optional: Whether the field is optional. + :param processed_models: List of processed models. + :param created_rules: List of created rules. + :param field_info: Additional information about the field (optional). + + :return: Tuple containing the GBNF type and a list of additional rules. + :rtype: Tuple[str, list] + """ + rules = [] + + field_name = format_model_and_field_name(field_name) + gbnf_type = map_pydantic_type_to_gbnf(field_type) + + if isclass(field_type) and issubclass(field_type, BaseModel): + nested_model_name = format_model_and_field_name(field_type.__name__) + nested_model_rules = generate_gbnf_grammar(field_type, processed_models, created_rules) + rules.extend(nested_model_rules) + gbnf_type, rules = nested_model_name, rules + elif isclass(field_type) and issubclass(field_type, Enum): + enum_values = [f'\"\\\"{e.value}\\\"\"' for e in field_type] # Adding escaped quotes + enum_rule = f"{model_name}-{field_name} ::= {' | '.join(enum_values)}" + rules.append(enum_rule) + gbnf_type, rules = model_name + "-" + field_name, rules + elif get_origin(field_type) == list or field_type == list: # Array + element_type = get_args(field_type)[0] + element_rule_name, additional_rules = generate_gbnf_rule_for_type(model_name, + f"{field_name}-element", + element_type, is_optional, processed_models, + created_rules) + rules.extend(additional_rules) + array_rule = f"""{model_name}-{field_name} ::= "[" ws {element_rule_name} ("," ws {element_rule_name})* "]" """ + rules.append(array_rule) + gbnf_type, rules = model_name + "-" + field_name, rules + + elif get_origin(field_type) == set or field_type == set: # Array + element_type = get_args(field_type)[0] + element_rule_name, additional_rules = generate_gbnf_rule_for_type(model_name, + f"{field_name}-element", + element_type, is_optional, processed_models, + created_rules) + rules.extend(additional_rules) + array_rule = f"""{model_name}-{field_name} ::= "[" ws {element_rule_name} ("," ws {element_rule_name})* "]" """ + rules.append(array_rule) + gbnf_type, rules = model_name + "-" + field_name, rules + + elif gbnf_type.startswith("custom-class-"): + nested_model_rules, field_types = get_members_structure(field_type, gbnf_type) + rules.append(nested_model_rules) + elif gbnf_type.startswith("custom-dict-"): + key_type, value_type = get_args(field_type) + + additional_key_type, additional_key_rules = generate_gbnf_rule_for_type(model_name, + f"{field_name}-key-type", + key_type, is_optional, processed_models, + created_rules) + additional_value_type, additional_value_rules = generate_gbnf_rule_for_type(model_name, + f"{field_name}-value-type", + value_type, is_optional, + processed_models, created_rules) + gbnf_type = fr'{gbnf_type} ::= "{{" ( {additional_key_type} ":" {additional_value_type} ("," {additional_key_type} ":" {additional_value_type})* )? "}}" ' + + rules.extend(additional_key_rules) + rules.extend(additional_value_rules) + elif gbnf_type.startswith("union-"): + union_types = get_args(field_type) + union_rules = [] + + for union_type in union_types: + if isinstance(union_type, _GenericAlias): + union_gbnf_type, union_rules_list = generate_gbnf_rule_for_type(model_name, + field_name, union_type, + False, + processed_models, created_rules) + union_rules.append(union_gbnf_type) + rules.extend(union_rules_list) + + + elif not issubclass(union_type, NoneType): + union_gbnf_type, union_rules_list = generate_gbnf_rule_for_type(model_name, + field_name, union_type, + False, + processed_models, created_rules) + union_rules.append(union_gbnf_type) + rules.extend(union_rules_list) + + # Defining the union grammar rule separately + if len(union_rules) == 1: + union_grammar_rule = f"{model_name}-{field_name}-optional ::= {' | '.join(union_rules)} | null" + else: + union_grammar_rule = f"{model_name}-{field_name}-union ::= {' | '.join(union_rules)}" + rules.append(union_grammar_rule) + if len(union_rules) == 1: + gbnf_type = f"{model_name}-{field_name}-optional" + else: + gbnf_type = f"{model_name}-{field_name}-union" + elif isclass(field_type) and issubclass(field_type, str): + if field_info and hasattr(field_info, 'json_schema_extra') and field_info.json_schema_extra is not None: + + triple_quoted_string = field_info.json_schema_extra.get('triple_quoted_string', False) + markdown_string = field_info.json_schema_extra.get('markdown_string', False) + + gbnf_type = PydanticDataType.TRIPLE_QUOTED_STRING.value if triple_quoted_string else PydanticDataType.STRING.value + gbnf_type = PydanticDataType.MARKDOWN_STRING.value if markdown_string else gbnf_type + + elif field_info and hasattr(field_info, 'pattern'): + # Convert regex pattern to grammar rule + regex_pattern = field_info.regex.pattern + gbnf_type = f"pattern-{field_name} ::= {regex_to_gbnf(regex_pattern)}" + else: + gbnf_type = PydanticDataType.STRING.value + + elif isclass(field_type) and issubclass(field_type, float) and field_info and hasattr(field_info, + 'json_schema_extra') and field_info.json_schema_extra is not None: + # Retrieve precision attributes for floats + max_precision = field_info.json_schema_extra.get('max_precision') if field_info and hasattr(field_info, + 'json_schema_extra') else None + min_precision = field_info.json_schema_extra.get('min_precision') if field_info and hasattr(field_info, + 'json_schema_extra') else None + max_digits = field_info.json_schema_extra.get('max_digit') if field_info and hasattr(field_info, + 'json_schema_extra') else None + min_digits = field_info.json_schema_extra.get('min_digit') if field_info and hasattr(field_info, + 'json_schema_extra') else None + + # Generate GBNF rule for float with given attributes + gbnf_type, rules = generate_gbnf_float_rules(max_digit=max_digits, min_digit=min_digits, + max_precision=max_precision, + min_precision=min_precision) + + elif isclass(field_type) and issubclass(field_type, int) and field_info and hasattr(field_info, + 'json_schema_extra') and field_info.json_schema_extra is not None: + # Retrieve digit attributes for integers + max_digits = field_info.json_schema_extra.get('max_digit') if field_info and hasattr(field_info, + 'json_schema_extra') else None + min_digits = field_info.json_schema_extra.get('min_digit') if field_info and hasattr(field_info, + 'json_schema_extra') else None + + # Generate GBNF rule for integer with given attributes + gbnf_type, rules = generate_gbnf_integer_rules(max_digit=max_digits, min_digit=min_digits) + else: + gbnf_type, rules = gbnf_type, [] + + if gbnf_type not in created_rules: + return gbnf_type, rules + else: + if gbnf_type in created_rules: + return gbnf_type, rules + + +def generate_gbnf_grammar(model: Type[BaseModel], processed_models: set, created_rules: dict) -> (list, bool, bool): + """ + + Generate GBnF Grammar + + Generates a GBnF grammar for a given model. + + :param model: A Pydantic model class to generate the grammar for. Must be a subclass of BaseModel. + :param processed_models: A set of already processed models to prevent infinite recursion. + :param created_rules: A dict containing already created rules to prevent duplicates. + :return: A list of GBnF grammar rules in string format. And two booleans indicating if an extra markdown or triple quoted string is in the grammar. + Example Usage: + ``` + model = MyModel + processed_models = set() + created_rules = dict() + + gbnf_grammar = generate_gbnf_grammar(model, processed_models, created_rules) + ``` + """ + if model in processed_models: + return [] + + processed_models.add(model) + model_name = format_model_and_field_name(model.__name__) + + if not issubclass(model, BaseModel): + # For non-Pydantic classes, generate model_fields from __annotations__ or __init__ + if hasattr(model, '__annotations__') and model.__annotations__: + model_fields = {name: (typ, ...) for name, typ in model.__annotations__.items()} + else: + init_signature = inspect.signature(model.__init__) + parameters = init_signature.parameters + model_fields = {name: (param.annotation, param.default) for name, param in parameters.items() + if name != 'self'} + else: + # For Pydantic models, use model_fields and check for ellipsis (required fields) + model_fields = model.__annotations__ + + model_rule_parts = [] + nested_rules = [] + has_markdown_code_block = False + has_triple_quoted_string = False + look_for_markdown_code_block = False + look_for_triple_quoted_string = False + for field_name, field_info in model_fields.items(): + if not issubclass(model, BaseModel): + field_type, default_value = field_info + # Check if the field is optional (not required) + is_optional = (default_value is not inspect.Parameter.empty) and (default_value is not Ellipsis) + else: + field_type = field_info + field_info = model.model_fields[field_name] + is_optional = field_info.is_required is False and get_origin(field_type) is Optional + rule_name, additional_rules = generate_gbnf_rule_for_type(model_name, + format_model_and_field_name(field_name), + field_type, is_optional, + processed_models, created_rules, field_info) + look_for_markdown_code_block = True if rule_name == "markdown_string" else False + look_for_triple_quoted_string = True if rule_name == "triple_quoted_string" else False + if not look_for_markdown_code_block and not look_for_triple_quoted_string: + if rule_name not in created_rules: + created_rules[rule_name] = additional_rules + model_rule_parts.append(f' ws \"\\\"{field_name}\\\"\" ": " {rule_name}') # Adding escaped quotes + nested_rules.extend(additional_rules) + else: + has_triple_quoted_string = look_for_markdown_code_block + has_markdown_code_block = look_for_triple_quoted_string + + fields_joined = r' "," "\n" '.join(model_rule_parts) + model_rule = fr'{model_name} ::= "{{" "\n" {fields_joined} "\n" ws "}}"' + + if look_for_markdown_code_block or look_for_triple_quoted_string: + model_rule += ' ws "}"' + + if has_triple_quoted_string: + model_rule += '"\\n" triple-quoted-string' + if has_markdown_code_block: + model_rule += '"\\n" markdown-code-block' + all_rules = [model_rule] + nested_rules + + return all_rules, has_markdown_code_block, has_triple_quoted_string + + +def generate_gbnf_grammar_from_pydantic_models(models: List[Type[BaseModel]], outer_object_name: str = None, + outer_object_content: str = None, list_of_outputs: bool = False) -> str: + """ + Generate GBNF Grammar from Pydantic Models. + + This method takes a list of Pydantic models and uses them to generate a GBNF grammar string. The generated grammar string can be used for parsing and validating data using the generated + * grammar. + + Parameters: + models (List[Type[BaseModel]]): A list of Pydantic models to generate the grammar from. + outer_object_name (str): Outer object name for the GBNF grammar. If None, no outer object will be generated. Eg. "function" for function calling. + outer_object_content (str): Content for the outer rule in the GBNF grammar. Eg. "function_parameters" or "params" for function calling. + list_of_outputs (str, optional): Allows a list of output objects + Returns: + str: The generated GBNF grammar string. + + Examples: + models = [UserModel, PostModel] + grammar = generate_gbnf_grammar_from_pydantic(models) + print(grammar) + # Output: + # root ::= UserModel | PostModel + # ... + """ + processed_models = set() + all_rules = [] + created_rules = {} + if outer_object_name is None: + + for model in models: + model_rules, _, _ = generate_gbnf_grammar(model, + processed_models, created_rules) + all_rules.extend(model_rules) + + if list_of_outputs: + root_rule = r'root ::= ws "[" grammar-models ("," grammar-models)* "]"' + "\n" + else: + root_rule = r'root ::= ws grammar-models' + "\n" + root_rule += "grammar-models ::= " + " | ".join( + [format_model_and_field_name(model.__name__) for model in models]) + all_rules.insert(0, root_rule) + return "\n".join(all_rules) + elif outer_object_name is not None: + if list_of_outputs: + root_rule = fr'root ::= ws "[" {format_model_and_field_name(outer_object_name)} ("," {format_model_and_field_name(outer_object_name)})* "]"' + "\n" + else: + root_rule = f"root ::= {format_model_and_field_name(outer_object_name)}\n" + + model_rule = fr'{format_model_and_field_name(outer_object_name)} ::= ws "{{" ws "\"{outer_object_name}\"" ": " grammar-models' + + fields_joined = " | ".join( + [fr'{format_model_and_field_name(model.__name__)}-grammar-model' for model in models]) + + grammar_model_rules = f'\ngrammar-models ::= {fields_joined}' + mod_rules = [] + for model in models: + mod_rule = fr'{format_model_and_field_name(model.__name__)}-grammar-model ::= ws' + mod_rule += fr'"\"{format_model_and_field_name(model.__name__)}\"" "," ws "\"{outer_object_content}\"" ws ":" ws {format_model_and_field_name(model.__name__)}' + '\n' + mod_rules.append(mod_rule) + grammar_model_rules += "\n" + "\n".join(mod_rules) + look_for_markdown_code_block = False + look_for_triple_quoted_string = False + for model in models: + model_rules, markdown_block, triple_quoted_string = generate_gbnf_grammar(model, + processed_models, created_rules) + all_rules.extend(model_rules) + if markdown_block: + look_for_markdown_code_block = True + + if triple_quoted_string: + look_for_triple_quoted_string = True + + if not look_for_markdown_code_block and not look_for_triple_quoted_string: + model_rule += ' ws "}"' + all_rules.insert(0, root_rule + model_rule + grammar_model_rules) + return "\n".join(all_rules) + + +def get_primitive_grammar(grammar): + """ + Returns the needed GBNF primitive grammar for a given GBNF grammar string. + + Args: + grammar (str): The string containing the GBNF grammar. + + Returns: + str: GBNF primitive grammar string. + """ + type_list = [] + if "string-list" in grammar: + type_list.append(str) + if "boolean-list" in grammar: + type_list.append(bool) + if "integer-list" in grammar: + type_list.append(int) + if "float-list" in grammar: + type_list.append(float) + additional_grammar = [generate_list_rule(t) for t in type_list] + primitive_grammar = r""" +boolean ::= "true" | "false" +null ::= "null" +string ::= "\"" ( + [^"\\] | + "\\" (["\\/bfnrt] | "u" [0-9a-fA-F] [0-9a-fA-F] [0-9a-fA-F] [0-9a-fA-F]) + )* "\"" ws +ws ::= ([ \t\n] ws)? +float ::= ("-"? ([0-9] | [1-9] [0-9]*)) ("." [0-9]+)? ([eE] [-+]? [0-9]+)? ws + +integer ::= [0-9]+""" + + any_block = "" + if "custom-class-any" in grammar: + any_block = ''' +value ::= object | array | string | number | boolean | null + +object ::= + "{" ws ( + string ":" ws value + ("," ws string ":" ws value)* + )? "}" ws + +array ::= + "[" ws ( + value + ("," ws value)* + )? "]" ws + +number ::= integer | float''' + + markdown_code_block_grammar = "" + if "markdown-code-block" in grammar: + markdown_code_block_grammar = r''' +markdown-code-block ::= opening-triple-ticks markdown-code-block-content closing-triple-ticks +markdown-code-block-content ::= ( [^`] | "`" [^`] | "`" "`" [^`] )* +opening-triple-ticks ::= "```" "python" "\n" | "```" "c" "\n" | "```" "cpp" "\n" | "```" "txt" "\n" | "```" "text" "\n" | "```" "json" "\n" | "```" "javascript" "\n" | "```" "css" "\n" | "```" "html" "\n" | "```" "markdown" "\n" +closing-triple-ticks ::= "```" "\n"''' + + if "triple-quoted-string" in grammar: + markdown_code_block_grammar = r""" +triple-quoted-string ::= triple-quotes triple-quoted-string-content triple-quotes +triple-quoted-string-content ::= ( [^'] | "'" [^'] | "'" "'" [^'] )* +triple-quotes ::= "'''" """ + return "\n" + '\n'.join(additional_grammar) + any_block + primitive_grammar + markdown_code_block_grammar + + +def generate_field_markdown(field_name: str, field_type: Type[Any], model: Type[BaseModel], depth=1) -> str: + indent = ' ' * depth + field_markdown = f"{indent}- **{field_name}** (`{field_type.__name__}`): " + + # Extracting field description from Pydantic Field using __model_fields__ + field_info = model.model_fields.get(field_name) + field_description = field_info.description if field_info and field_info.description else "No description available." + + field_markdown += field_description + '\n' + + # Handling nested BaseModel fields + if isclass(field_type) and issubclass(field_type, BaseModel): + field_markdown += f"{indent} - Details:\n" + for name, type_ in field_type.__annotations__.items(): + field_markdown += generate_field_markdown(name, type_, field_type, depth + 2) + + return field_markdown + + +def generate_markdown_report(pydantic_models: List[Type[BaseModel]]) -> str: + markdown = "" + for model in pydantic_models: + markdown += f"### {format_model_and_field_name(model.__name__)}\n" + + # Check if the model's docstring is different from BaseModel's docstring + class_doc = getdoc(model) + base_class_doc = getdoc(BaseModel) + class_description = class_doc if class_doc and class_doc != base_class_doc else "No specific description available." + + markdown += f"{class_description}\n\n" + markdown += "#### Fields\n" + + if isclass(model) and issubclass(model, BaseModel): + for name, field_type in model.__annotations__.items(): + markdown += generate_field_markdown(format_model_and_field_name(name), field_type, model) + markdown += "\n" + + return markdown + + +def format_json_example(example: dict, depth: int) -> str: + """ + Format a JSON example into a readable string with indentation. + + Args: + example (dict): JSON example to be formatted. + depth (int): Indentation depth. + + Returns: + str: Formatted JSON example string. + """ + indent = ' ' * depth + formatted_example = '{\n' + for key, value in example.items(): + value_text = f"'{value}'" if isinstance(value, str) else value + formatted_example += f"{indent}{key}: {value_text},\n" + formatted_example = formatted_example.rstrip(',\n') + '\n' + indent + '}' + return formatted_example + + +def generate_text_documentation(pydantic_models: List[Type[BaseModel]], model_prefix="Model", + fields_prefix="Fields", documentation_with_field_description=True) -> str: + """ + Generate text documentation for a list of Pydantic models. + + Args: + pydantic_models (List[Type[BaseModel]]): List of Pydantic model classes. + model_prefix (str): Prefix for the model section. + fields_prefix (str): Prefix for the fields section. + documentation_with_field_description (bool): Include field descriptions in the documentation. + + Returns: + str: Generated text documentation. + """ + documentation = "" + pyd_models = [(model, True) for model in pydantic_models] + for model, add_prefix in pyd_models: + if add_prefix: + documentation += f"{model_prefix}: {format_model_and_field_name(model.__name__)}\n" + else: + documentation += f"Model: {format_model_and_field_name(model.__name__)}\n" + + # Handling multi-line model description with proper indentation + + class_doc = getdoc(model) + base_class_doc = getdoc(BaseModel) + class_description = class_doc if class_doc and class_doc != base_class_doc else "" + if class_description != "": + documentation += " Description: " + documentation += "\n" + format_multiline_description(class_description, 2) + "\n" + + if add_prefix: + # Indenting the fields section + documentation += f" {fields_prefix}:\n" + else: + documentation += f" Fields:\n" + if isclass(model) and issubclass(model, BaseModel): + for name, field_type in model.__annotations__.items(): + # if name == "markdown_code_block": + # continue + if get_origin(field_type) == list: + element_type = get_args(field_type)[0] + if isclass(element_type) and issubclass(element_type, BaseModel): + pyd_models.append((element_type, False)) + if get_origin(field_type) == Union: + element_types = get_args(field_type) + for element_type in element_types: + if isclass(element_type) and issubclass(element_type, BaseModel): + pyd_models.append((element_type, False)) + documentation += generate_field_text(name, field_type, model, + documentation_with_field_description=documentation_with_field_description) + documentation += "\n" + + if hasattr(model, 'Config') and hasattr(model.Config, + 'json_schema_extra') and 'example' in model.Config.json_schema_extra: + documentation += f" Expected Example Output for {format_model_and_field_name(model.__name__)}:\n" + json_example = json.dumps(model.Config.json_schema_extra['example']) + documentation += format_multiline_description(json_example, 2) + "\n" + + return documentation + + +def generate_field_text(field_name: str, field_type: Type[Any], model: Type[BaseModel], depth=1, + documentation_with_field_description=True) -> str: + """ + Generate text documentation for a Pydantic model field. + + Args: + field_name (str): Name of the field. + field_type (Type[Any]): Type of the field. + model (Type[BaseModel]): Pydantic model class. + depth (int): Indentation depth in the documentation. + documentation_with_field_description (bool): Include field descriptions in the documentation. + + Returns: + str: Generated text documentation for the field. + """ + indent = ' ' * depth + + field_info = model.model_fields.get(field_name) + field_description = field_info.description if field_info and field_info.description else "" + + if get_origin(field_type) == list: + element_type = get_args(field_type)[0] + field_text = f"{indent}{field_name} ({format_model_and_field_name(field_type.__name__)} of {format_model_and_field_name(element_type.__name__)})" + if field_description != "": + field_text += ":\n" + else: + field_text += "\n" + elif get_origin(field_type) == Union: + element_types = get_args(field_type) + types = [] + for element_type in element_types: + types.append(format_model_and_field_name(element_type.__name__)) + field_text = f"{indent}{field_name} ({' or '.join(types)})" + if field_description != "": + field_text += ":\n" + else: + field_text += "\n" + else: + field_text = f"{indent}{field_name} ({format_model_and_field_name(field_type.__name__)})" + if field_description != "": + field_text += ":\n" + else: + field_text += "\n" + + if not documentation_with_field_description: + return field_text + + if field_description != "": + field_text += f"{indent} Description: " + field_description + "\n" + + # Check for and include field-specific examples if available + if hasattr(model, 'Config') and hasattr(model.Config, + 'json_schema_extra') and 'example' in model.Config.json_schema_extra: + field_example = model.Config.json_schema_extra['example'].get(field_name) + if field_example is not None: + example_text = f"'{field_example}'" if isinstance(field_example, str) else field_example + field_text += f"{indent} Example: {example_text}\n" + + if isclass(field_type) and issubclass(field_type, BaseModel): + field_text += f"{indent} Details:\n" + for name, type_ in field_type.__annotations__.items(): + field_text += generate_field_text(name, type_, field_type, depth + 2) + + return field_text + + +def format_multiline_description(description: str, indent_level: int) -> str: + """ + Format a multiline description with proper indentation. + + Args: + description (str): Multiline description. + indent_level (int): Indentation level. + + Returns: + str: Formatted multiline description. + """ + indent = ' ' * indent_level + return indent + description.replace('\n', '\n' + indent) + + +def save_gbnf_grammar_and_documentation(grammar, documentation, grammar_file_path="./grammar.gbnf", + documentation_file_path="./grammar_documentation.md"): + """ + Save GBNF grammar and documentation to specified files. + + Args: + grammar (str): GBNF grammar string. + documentation (str): Documentation string. + grammar_file_path (str): File path to save the GBNF grammar. + documentation_file_path (str): File path to save the documentation. + + Returns: + None + """ + try: + with open(grammar_file_path, 'w') as file: + file.write(grammar + get_primitive_grammar(grammar)) + print(f"Grammar successfully saved to {grammar_file_path}") + except IOError as e: + print(f"An error occurred while saving the grammar file: {e}") + + try: + with open(documentation_file_path, 'w') as file: + file.write(documentation) + print(f"Documentation successfully saved to {documentation_file_path}") + except IOError as e: + print(f"An error occurred while saving the documentation file: {e}") + + +def remove_empty_lines(string): + """ + Remove empty lines from a string. + + Args: + string (str): Input string. + + Returns: + str: String with empty lines removed. + """ + lines = string.splitlines() + non_empty_lines = [line for line in lines if line.strip() != ""] + string_no_empty_lines = "\n".join(non_empty_lines) + return string_no_empty_lines + + +def generate_and_save_gbnf_grammar_and_documentation(pydantic_model_list, + grammar_file_path="./generated_grammar.gbnf", + documentation_file_path="./generated_grammar_documentation.md", + outer_object_name: str = None, + outer_object_content: str = None, + model_prefix: str = "Output Model", + fields_prefix: str = "Output Fields", + list_of_outputs: bool = False, + documentation_with_field_description=True): + """ + Generate GBNF grammar and documentation, and save them to specified files. + + Args: + pydantic_model_list: List of Pydantic model classes. + grammar_file_path (str): File path to save the generated GBNF grammar. + documentation_file_path (str): File path to save the generated documentation. + outer_object_name (str): Outer object name for the GBNF grammar. If None, no outer object will be generated. Eg. "function" for function calling. + outer_object_content (str): Content for the outer rule in the GBNF grammar. Eg. "function_parameters" or "params" for function calling. + model_prefix (str): Prefix for the model section in the documentation. + fields_prefix (str): Prefix for the fields section in the documentation. + list_of_outputs (bool): Whether the output is a list of items. + documentation_with_field_description (bool): Include field descriptions in the documentation. + + Returns: + None + """ + documentation = generate_text_documentation(pydantic_model_list, model_prefix, fields_prefix, + documentation_with_field_description=documentation_with_field_description) + grammar = generate_gbnf_grammar_from_pydantic_models(pydantic_model_list, outer_object_name, + outer_object_content, list_of_outputs) + grammar = remove_empty_lines(grammar) + save_gbnf_grammar_and_documentation(grammar, documentation, grammar_file_path, documentation_file_path) + + +def generate_gbnf_grammar_and_documentation(pydantic_model_list, outer_object_name: str = None, + outer_object_content: str = None, + model_prefix: str = "Output Model", + fields_prefix: str = "Output Fields", list_of_outputs: bool = False, + documentation_with_field_description=True): + """ + Generate GBNF grammar and documentation for a list of Pydantic models. + + Args: + pydantic_model_list: List of Pydantic model classes. + outer_object_name (str): Outer object name for the GBNF grammar. If None, no outer object will be generated. Eg. "function" for function calling. + outer_object_content (str): Content for the outer rule in the GBNF grammar. Eg. "function_parameters" or "params" for function calling. + model_prefix (str): Prefix for the model section in the documentation. + fields_prefix (str): Prefix for the fields section in the documentation. + list_of_outputs (bool): Whether the output is a list of items. + documentation_with_field_description (bool): Include field descriptions in the documentation. + + Returns: + tuple: GBNF grammar string, documentation string. + """ + documentation = generate_text_documentation(copy(pydantic_model_list), model_prefix, fields_prefix, + documentation_with_field_description=documentation_with_field_description) + grammar = generate_gbnf_grammar_from_pydantic_models(pydantic_model_list, outer_object_name, + outer_object_content, list_of_outputs) + grammar = remove_empty_lines(grammar + get_primitive_grammar(grammar)) + return grammar, documentation + + +def generate_gbnf_grammar_and_documentation_from_dictionaries(dictionaries: List[dict], + outer_object_name: str = None, + outer_object_content: str = None, + model_prefix: str = "Output Model", + fields_prefix: str = "Output Fields", + list_of_outputs: bool = False, + documentation_with_field_description=True): + """ + Generate GBNF grammar and documentation from a list of dictionaries. + + Args: + dictionaries (List[dict]): List of dictionaries representing Pydantic models. + outer_object_name (str): Outer object name for the GBNF grammar. If None, no outer object will be generated. Eg. "function" for function calling. + outer_object_content (str): Content for the outer rule in the GBNF grammar. Eg. "function_parameters" or "params" for function calling. + model_prefix (str): Prefix for the model section in the documentation. + fields_prefix (str): Prefix for the fields section in the documentation. + list_of_outputs (bool): Whether the output is a list of items. + documentation_with_field_description (bool): Include field descriptions in the documentation. + + Returns: + tuple: GBNF grammar string, documentation string. + """ + pydantic_model_list = create_dynamic_models_from_dictionaries(dictionaries) + documentation = generate_text_documentation(copy(pydantic_model_list), model_prefix, fields_prefix, + documentation_with_field_description=documentation_with_field_description) + grammar = generate_gbnf_grammar_from_pydantic_models(pydantic_model_list, outer_object_name, + outer_object_content, list_of_outputs) + grammar = remove_empty_lines(grammar + get_primitive_grammar(grammar)) + return grammar, documentation + + +def create_dynamic_model_from_function(func: Callable): + """ + Creates a dynamic Pydantic model from a given function's type hints and adds the function as a 'run' method. + + Args: + func (Callable): A function with type hints from which to create the model. + + Returns: + A dynamic Pydantic model class with the provided function as a 'run' method. + """ + # Extracting type hints from the provided function + type_hints = get_type_hints(func) + type_hints.pop('return', None) + + # Handling default values and annotations + dynamic_fields = {} + defaults = getattr(func, '__defaults__', ()) or () + defaults_index = len(type_hints) - len(defaults) + + for index, (name, typ) in enumerate(type_hints.items()): + if index >= defaults_index: + default_value = defaults[index - defaults_index] + dynamic_fields[name] = (typ, default_value) + else: + dynamic_fields[name] = (typ, ...) + + # Creating the dynamic model + dynamicModel = create_model(f'{func.__name__}', **dynamic_fields) + + dynamicModel.__doc__ = getdoc(func) + + # Wrapping the original function to handle instance 'self' + def run_method_wrapper(self): + func_args = {name: getattr(self, name) for name in type_hints} + return func(**func_args) + + # Adding the wrapped function as a 'run' method + setattr(dynamicModel, 'run', run_method_wrapper) + + return dynamicModel + + +def add_run_method_to_dynamic_model(model: Type[BaseModel], func: Callable): + """ + Add a 'run' method to a dynamic Pydantic model, using the provided function. + + Args: + - model (Type[BaseModel]): Dynamic Pydantic model class. + - func (Callable): Function to be added as a 'run' method to the model. + + Returns: + - Type[BaseModel]: Pydantic model class with the added 'run' method. + """ + + def run_method_wrapper(self): + func_args = {name: getattr(self, name) for name in model.model_fields} + return func(**func_args) + + # Adding the wrapped function as a 'run' method + setattr(model, 'run', run_method_wrapper) + + return model + + +def create_dynamic_models_from_dictionaries(dictionaries: List[dict]): + """ + Create a list of dynamic Pydantic model classes from a list of dictionaries. + + Args: + - dictionaries (List[dict]): List of dictionaries representing model structures. + + Returns: + - List[Type[BaseModel]]: List of generated dynamic Pydantic model classes. + """ + dynamic_models = [] + for func in dictionaries: + model_name = format_model_and_field_name(func.get("name", "")) + dyn_model = convert_dictionary_to_to_pydantic_model(func, model_name) + dynamic_models.append(dyn_model) + return dynamic_models + + +def map_grammar_names_to_pydantic_model_class(pydantic_model_list): + output = {} + for model in pydantic_model_list: + output[format_model_and_field_name(model.__name__)] = model + + return output + + +from enum import Enum + + +def json_schema_to_python_types(schema): + type_map = { + 'any': Any, + 'string': str, + 'number': float, + 'integer': int, + 'boolean': bool, + 'array': list, + } + return type_map[schema] + + +def list_to_enum(enum_name, values): + return Enum(enum_name, {value: value for value in values}) + + +def convert_dictionary_to_to_pydantic_model(dictionary: dict, model_name: str = 'CustomModel') -> Type[BaseModel]: + """ + Convert a dictionary to a Pydantic model class. + + Args: + - dictionary (dict): Dictionary representing the model structure. + - model_name (str): Name of the generated Pydantic model. + + Returns: + - Type[BaseModel]: Generated Pydantic model class. + """ + fields = {} + + if "properties" in dictionary: + for field_name, field_data in dictionary.get("properties", {}).items(): + if field_data == 'object': + submodel = convert_dictionary_to_to_pydantic_model(dictionary, f'{model_name}_{field_name}') + fields[field_name] = (submodel, ...) + else: + field_type = field_data.get('type', 'str') + + if field_data.get("enum", []): + fields[field_name] = (list_to_enum(field_name, field_data.get("enum", [])), ...) + if field_type == "array": + items = field_data.get("items", {}) + if items != {}: + array = {"properties": items} + array_type = convert_dictionary_to_to_pydantic_model(array, f'{model_name}_{field_name}_items') + fields[field_name] = (List[array_type], ...) + else: + fields[field_name] = (list, ...) + elif field_type == 'object': + submodel = convert_dictionary_to_to_pydantic_model(field_data, f'{model_name}_{field_name}') + fields[field_name] = (submodel, ...) + else: + field_type = json_schema_to_python_types(field_type) + fields[field_name] = (field_type, ...) + if "function" in dictionary: + + for field_name, field_data in dictionary.get("function", {}).items(): + if field_name == "name": + model_name = field_data + elif field_name == "description": + fields["__doc__"] = field_data + elif field_name == "parameters": + return convert_dictionary_to_to_pydantic_model(field_data, f'{model_name}') + if "parameters" in dictionary: + field_data = {"function": dictionary} + return convert_dictionary_to_to_pydantic_model(field_data, f'{model_name}') + + custom_model = create_model(model_name, **fields) + return custom_model + + + From fa5c1fb44a2724292da545d6b7cf2a1ac0e0b989 Mon Sep 17 00:00:00 2001 From: slaren Date: Fri, 12 Jan 2024 20:38:34 +0100 Subject: [PATCH 291/811] backend_sched : fix assignments ggml-ci --- ggml-backend.c | 20 ++++++++++++++++++++ 1 file changed, 20 insertions(+) diff --git a/ggml-backend.c b/ggml-backend.c index 4c2d8b0b2..505dbba47 100644 --- a/ggml-backend.c +++ b/ggml-backend.c @@ -1087,6 +1087,24 @@ static void sched_split_graph(ggml_backend_sched_t sched, struct ggml_cgraph * g } } } + + // pass 2.4 expand rest down + { + ggml_tallocr_t cur_allocr = NULL; + for (int i = 0; i < graph->n_nodes; i++) { + struct ggml_tensor * node = graph->nodes[i]; + if (ggml_is_view_op(node->op)) { + continue; + } + ggml_tallocr_t node_allocr = node_allocr(node); + if (node_allocr != NULL) { + cur_allocr = node_allocr; + } else { + node_allocr(node) = cur_allocr; + SET_CAUSE(node, "2.4"); + } + } + } #ifdef DEBUG_PASS2 fprintf(stderr, "PASS 2 ASSIGNMENTS\n"); sched_print_assignments(sched, graph); #endif @@ -1146,6 +1164,8 @@ static void sched_split_graph(ggml_backend_sched_t sched, struct ggml_cgraph * g ggml_tallocr_t node_allocr = node_allocr(node); + GGML_ASSERT(node_allocr != NULL); // all nodes should be assigned by now + if (node_allocr != cur_allocr) { sched->splits[cur_split].i_end = i; cur_split++; From f238461236f4e0e18cac1a554af23c7deadc9b01 Mon Sep 17 00:00:00 2001 From: Georgi Gerganov Date: Fri, 12 Jan 2024 14:02:30 +0200 Subject: [PATCH 292/811] ggml : fix 32-bit ARM compat for IQ2_XS (whisper/1758) * ggml : fix 32-bit ARM compat * ggml : fix fix * ggml : fix fix fix --- ggml-quants.c | 39 +++++++++++++++++++++++++++++++++++---- 1 file changed, 35 insertions(+), 4 deletions(-) diff --git a/ggml-quants.c b/ggml-quants.c index a24b4b244..601d155d7 100644 --- a/ggml-quants.c +++ b/ggml-quants.c @@ -272,10 +272,13 @@ static inline float hsum_float_4x4(const __m128 a, const __m128 b, const __m128 // vaddvq_s16 // vpaddq_s16 +// vpaddq_s32 // vaddvq_s32 // vaddvq_f32 // vmaxvq_f32 // vcvtnq_s32_f32 +// vzip1_u8 +// vzip2_u8 inline static int32_t vaddvq_s16(int16x8_t v) { return @@ -291,6 +294,12 @@ inline static int16x8_t vpaddq_s16(int16x8_t a, int16x8_t b) { return vcombine_s16(a0, b0); } +inline static int32x4_t vpaddq_s32(int32x4_t a, int32x4_t b) { + int32x2_t a0 = vpadd_s32(vget_low_s32(a), vget_high_s32(a)); + int32x2_t b0 = vpadd_s32(vget_low_s32(b), vget_high_s32(b)); + return vcombine_s32(a0, b0); +} + inline static int32_t vaddvq_s32(int32x4_t v) { return vgetq_lane_s32(v, 0) + vgetq_lane_s32(v, 1) + vgetq_lane_s32(v, 2) + vgetq_lane_s32(v, 3); } @@ -316,6 +325,28 @@ inline static int32x4_t vcvtnq_s32_f32(float32x4_t v) { return res; } +inline static uint8x8_t vzip1_u8(uint8x8_t a, uint8x8_t b) { + uint8x8_t res; + + res[0] = a[0]; res[1] = b[0]; + res[2] = a[1]; res[3] = b[1]; + res[4] = a[2]; res[5] = b[2]; + res[6] = a[3]; res[7] = b[3]; + + return res; +} + +inline static uint8x8_t vzip2_u8(uint8x8_t a, uint8x8_t b) { + uint8x8_t res; + + res[0] = a[4]; res[1] = b[4]; + res[2] = a[5]; res[3] = b[5]; + res[4] = a[6]; res[5] = b[6]; + res[6] = a[7]; res[7] = b[7]; + + return res; +} + // vld1q_s16_x2 // vld1q_u8_x2 // vld1q_u8_x4 @@ -7554,9 +7585,9 @@ void ggml_vec_dot_iq2_xs_q8_K(const int n, float * restrict s, const void * rest const uint64_t * signs64 = (const uint64_t *)keven_signs_q2xs; - int8x16x4_t q2u; - int8x16x4_t q2s; - int8x16x4_t q8b; + ggml_int8x16x4_t q2u; + ggml_int8x16x4_t q2s; + ggml_int8x16x4_t q8b; int32x4x4_t scales32; @@ -7578,7 +7609,7 @@ void ggml_vec_dot_iq2_xs_q8_K(const int n, float * restrict s, const void * rest scales32.val[3] = vreinterpretq_s32_u32(vmovl_u16(vget_high_u16(scales2))); int32x4_t sumi = vdupq_n_s32(0); for (int ib64 = 0; ib64 < QK_K/64; ++ib64) { - q8b = vld1q_s8_x4(q8); q8 += 64; + q8b = ggml_vld1q_s8_x4(q8); q8 += 64; q2u.val[0] = vcombine_s8(vld1_s8((const void *)(iq2xs_grid + (q2[0] & 511))), vld1_s8((const void *)(iq2xs_grid + (q2[1] & 511)))); q2u.val[1] = vcombine_s8(vld1_s8((const void *)(iq2xs_grid + (q2[2] & 511))), vld1_s8((const void *)(iq2xs_grid + (q2[3] & 511)))); q2u.val[2] = vcombine_s8(vld1_s8((const void *)(iq2xs_grid + (q2[4] & 511))), vld1_s8((const void *)(iq2xs_grid + (q2[5] & 511)))); From de473f5f8e19ba5e659cdf5af65fb9251dce16c5 Mon Sep 17 00:00:00 2001 From: Georgi Gerganov Date: Fri, 12 Jan 2024 22:02:43 +0200 Subject: [PATCH 293/811] sync : ggml --- scripts/sync-ggml.last | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/scripts/sync-ggml.last b/scripts/sync-ggml.last index 3e2c579d5..edcdb530a 100644 --- a/scripts/sync-ggml.last +++ b/scripts/sync-ggml.last @@ -1 +1 @@ -979cc23b345006504cfc1f67c0fdf627805e3319 +400c07f00508e6f60fb25405444b5669c365b0a9 From 15ebe59210e7fd9817ff67f51fa1a5ee2d004294 Mon Sep 17 00:00:00 2001 From: Georgi Gerganov Date: Sat, 13 Jan 2024 13:44:37 +0200 Subject: [PATCH 294/811] convert : update phi-2 to latest HF repo (#4903) * convert : update phi-2 to latest HF repo ggml-ci * py : try to fix flake stuff --- convert-hf-to-gguf.py | 39 +++++++++++++++++++++---------- gguf-py/gguf/constants.py | 3 +++ gguf-py/gguf/tensor_mapping.py | 2 ++ llama.cpp | 42 ++++++++++++++++++++++++++-------- 4 files changed, 65 insertions(+), 21 deletions(-) diff --git a/convert-hf-to-gguf.py b/convert-hf-to-gguf.py index a1c79fd47..b133f3b49 100755 --- a/convert-hf-to-gguf.py +++ b/convert-hf-to-gguf.py @@ -23,6 +23,15 @@ if 'NO_LOCAL_GGUF' not in os.environ: import gguf +# check for any of the given keys in the dictionary and return the value of the first key found +def get_key_opts(d, keys): + for k in keys: + if k in d: + return d[k] + print(f"Could not find any of {keys}") + sys.exit() + + ###### MODEL DEFINITIONS ###### class SentencePieceTokenTypes(IntEnum): @@ -257,10 +266,11 @@ class Model: toktypes.append(gguf.TokenType.USER_DEFINED) elif reverse_vocab[i] in added_vocab: tokens.append(reverse_vocab[i]) - if tokenizer.added_tokens_decoder[i].special: - toktypes.append(gguf.TokenType.CONTROL) - else: - toktypes.append(gguf.TokenType.USER_DEFINED) + if hasattr(tokenizer, "added_tokens_decoder"): + if tokenizer.added_tokens_decoder[i].special: + toktypes.append(gguf.TokenType.CONTROL) + else: + toktypes.append(gguf.TokenType.USER_DEFINED) else: tokens.append(reverse_vocab[i]) toktypes.append(gguf.TokenType.NORMAL) @@ -1068,17 +1078,22 @@ class GPT2Model(Model): class Phi2Model(Model): def set_gguf_parameters(self): - block_count = self.hparams["n_layer"] + block_count = get_key_opts(self.hparams, ["num_hidden_layers", "n_layer"]) + + rot_pct = get_key_opts(self.hparams, ["partial_rotary_factor"]) + n_embd = get_key_opts(self.hparams, ["hidden_size", "n_embd"]) + n_head = get_key_opts(self.hparams, ["num_attention_heads", "n_head"]) self.gguf_writer.add_name("Phi2") - self.gguf_writer.add_context_length(self.hparams["n_positions"]) - self.gguf_writer.add_embedding_length(self.hparams["n_embd"]) - self.gguf_writer.add_feed_forward_length(4 * self.hparams["n_embd"]) + self.gguf_writer.add_context_length(get_key_opts(self.hparams, ["n_positions", "max_position_embeddings"])) + + self.gguf_writer.add_embedding_length(n_embd) + self.gguf_writer.add_feed_forward_length(4 * n_embd) self.gguf_writer.add_block_count(block_count) - self.gguf_writer.add_head_count(self.hparams["n_head"]) - self.gguf_writer.add_head_count_kv(self.hparams["n_head"]) - self.gguf_writer.add_layer_norm_eps(self.hparams["layer_norm_epsilon"]) - self.gguf_writer.add_rope_dimension_count(self.hparams["rotary_dim"]) + self.gguf_writer.add_head_count(n_head) + self.gguf_writer.add_head_count_kv(n_head) + self.gguf_writer.add_layer_norm_eps(get_key_opts(self.hparams, ["layer_norm_epsilon", "layer_norm_eps"])) + self.gguf_writer.add_rope_dimension_count(int(rot_pct * n_embd) // n_head) self.gguf_writer.add_file_type(self.ftype) self.gguf_writer.add_add_bos_token(False) diff --git a/gguf-py/gguf/constants.py b/gguf-py/gguf/constants.py index f0a1c51f8..972b4e9a7 100644 --- a/gguf-py/gguf/constants.py +++ b/gguf-py/gguf/constants.py @@ -389,6 +389,9 @@ MODEL_TENSORS: dict[MODEL_ARCH, list[MODEL_TENSOR]] = { MODEL_TENSOR.OUTPUT, MODEL_TENSOR.ATTN_NORM, MODEL_TENSOR.ATTN_QKV, + MODEL_TENSOR.ATTN_Q, + MODEL_TENSOR.ATTN_K, + MODEL_TENSOR.ATTN_V, MODEL_TENSOR.ATTN_OUT, MODEL_TENSOR.FFN_NORM, MODEL_TENSOR.FFN_DOWN, diff --git a/gguf-py/gguf/tensor_mapping.py b/gguf-py/gguf/tensor_mapping.py index 24a089037..e5b146106 100644 --- a/gguf-py/gguf/tensor_mapping.py +++ b/gguf-py/gguf/tensor_mapping.py @@ -191,6 +191,7 @@ class TensorNameMap: "transformer.h.{bid}.mlp.w1", # qwen "h.{bid}.mlp.c_fc", # gpt2 "transformer.h.{bid}.mlp.fc1", # phi2 + "model.layers.{bid}.mlp.fc1", # phi2 "model.layers.layers.{bid}.mlp.up_proj", # plamo ), @@ -232,6 +233,7 @@ class TensorNameMap: "model.layers.{bid}.mlp.dense_4h_to_h", # persimmon "h.{bid}.mlp.c_proj", # gpt2 "transformer.h.{bid}.mlp.fc2", # phi2 + "model.layers.{bid}.mlp.fc2", # phi2 "model.layers.layers.{bid}.mlp.down_proj", # plamo ), diff --git a/llama.cpp b/llama.cpp index fe1d8947c..1d2eb569f 100644 --- a/llama.cpp +++ b/llama.cpp @@ -574,6 +574,9 @@ static std::map> LLM_TENSOR_NAMES = { LLM_TENSOR_OUTPUT, "output" }, { LLM_TENSOR_ATTN_NORM, "blk.%d.attn_norm" }, { LLM_TENSOR_ATTN_QKV, "blk.%d.attn_qkv" }, + { LLM_TENSOR_ATTN_Q, "blk.%d.attn_q" }, + { LLM_TENSOR_ATTN_K, "blk.%d.attn_k" }, + { LLM_TENSOR_ATTN_V, "blk.%d.attn_v" }, { LLM_TENSOR_ATTN_OUT, "blk.%d.attn_output" }, { LLM_TENSOR_FFN_DOWN, "blk.%d.ffn_down" }, { LLM_TENSOR_FFN_UP, "blk.%d.ffn_up" }, @@ -3676,8 +3679,19 @@ static bool llm_load_tensors( layer.attn_norm = ml.create_tensor(ctx_layer, tn(LLM_TENSOR_ATTN_NORM, "weight", i), {n_embd}); layer.attn_norm_b = ml.create_tensor(ctx_layer, tn(LLM_TENSOR_ATTN_NORM, "bias", i), {n_embd}); - layer.wqkv = ml.create_tensor(ctx_split, tn(LLM_TENSOR_ATTN_QKV, "weight", i), {n_embd, n_embd + 2*n_embd_gqa}); - layer.bqkv = ml.create_tensor(ctx_layer, tn(LLM_TENSOR_ATTN_QKV, "bias", i), {n_embd + 2*n_embd_gqa}); + layer.wqkv = ml.create_tensor(ctx_split, tn(LLM_TENSOR_ATTN_QKV, "weight", i), {n_embd, n_embd + 2*n_embd_gqa}, false); + layer.bqkv = ml.create_tensor(ctx_layer, tn(LLM_TENSOR_ATTN_QKV, "bias", i), {n_embd + 2*n_embd_gqa}, false); + + if (layer.wqkv == nullptr) { + layer.wq = ml.create_tensor(ctx_split, tn(LLM_TENSOR_ATTN_Q, "weight", i), {n_embd, n_embd}); + layer.bq = ml.create_tensor(ctx_layer, tn(LLM_TENSOR_ATTN_Q, "bias", i), {n_embd}); + + layer.wk = ml.create_tensor(ctx_split, tn(LLM_TENSOR_ATTN_K, "weight", i), {n_embd, n_embd_gqa}); + layer.bk = ml.create_tensor(ctx_layer, tn(LLM_TENSOR_ATTN_K, "bias", i), {n_embd_gqa}); + + layer.wv = ml.create_tensor(ctx_split, tn(LLM_TENSOR_ATTN_V, "weight", i), {n_embd, n_embd_gqa}); + layer.bv = ml.create_tensor(ctx_layer, tn(LLM_TENSOR_ATTN_V, "bias", i), {n_embd_gqa}); + } layer.wo = ml.create_tensor(ctx_split, tn(LLM_TENSOR_ATTN_OUT, "weight", i), {n_embd, n_embd}); layer.bo = ml.create_tensor(ctx_layer, tn(LLM_TENSOR_ATTN_OUT, "bias", i), {n_embd}); @@ -5637,15 +5651,25 @@ struct llm_build_context { // self-attention { - cur = ggml_mul_mat(ctx0, model.layers[il].wqkv, attn_norm_output); - cb(cur, "wqkv", il); + struct ggml_tensor * Qcur = nullptr; + struct ggml_tensor * Kcur = nullptr; + struct ggml_tensor * Vcur = nullptr; - cur = ggml_add(ctx0, cur, model.layers[il].bqkv); - cb(cur, "bqkv", il); + if (model.layers[il].wqkv) { + cur = ggml_mul_mat(ctx0, model.layers[il].wqkv, attn_norm_output); + cb(cur, "wqkv", il); - struct ggml_tensor * Qcur = ggml_cont(ctx0, ggml_view_2d(ctx0, cur, n_embd, n_tokens, cur->nb[1], 0*sizeof(float)*(n_embd))); - struct ggml_tensor * Kcur = ggml_cont(ctx0, ggml_view_2d(ctx0, cur, n_embd_gqa, n_tokens, cur->nb[1], 1*sizeof(float)*(n_embd))); - struct ggml_tensor * Vcur = ggml_cont(ctx0, ggml_view_2d(ctx0, cur, n_embd_gqa, n_tokens, cur->nb[1], 1*sizeof(float)*(n_embd + n_embd_gqa))); + cur = ggml_add(ctx0, cur, model.layers[il].bqkv); + cb(cur, "bqkv", il); + + Qcur = ggml_cont(ctx0, ggml_view_2d(ctx0, cur, n_embd, n_tokens, cur->nb[1], 0*sizeof(float)*(n_embd))); + Kcur = ggml_cont(ctx0, ggml_view_2d(ctx0, cur, n_embd_gqa, n_tokens, cur->nb[1], 1*sizeof(float)*(n_embd))); + Vcur = ggml_cont(ctx0, ggml_view_2d(ctx0, cur, n_embd_gqa, n_tokens, cur->nb[1], 1*sizeof(float)*(n_embd + n_embd_gqa))); + } else { + Qcur = ggml_add(ctx0, ggml_mul_mat(ctx0, model.layers[il].wq, attn_norm_output), model.layers[il].bq); + Kcur = ggml_add(ctx0, ggml_mul_mat(ctx0, model.layers[il].wk, attn_norm_output), model.layers[il].bk); + Vcur = ggml_add(ctx0, ggml_mul_mat(ctx0, model.layers[il].wv, attn_norm_output), model.layers[il].bv); + } cb(Qcur, "Qcur", il); cb(Kcur, "Kcur", il); From ee8243adaa9a9f51ff449213383874e49efe368f Mon Sep 17 00:00:00 2001 From: makomk Date: Sat, 13 Jan 2024 14:16:11 +0000 Subject: [PATCH 295/811] server : fix crash with multimodal models without BOS token (#4904) --- examples/server/server.cpp | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/examples/server/server.cpp b/examples/server/server.cpp index c1ab8f9dc..7b33aea1f 100644 --- a/examples/server/server.cpp +++ b/examples/server/server.cpp @@ -1835,7 +1835,7 @@ struct llama_server_context slot.cache_tokens = prompt_tokens; - if (slot.n_past == slot.num_prompt_tokens) + if (slot.n_past == slot.num_prompt_tokens && slot.n_past > 0) { // we have to evaluate at least 1 token to generate logits. LOG_TEE("slot %d : we have to evaluate at least 1 token to generate logits\n", slot.id); From 356327feb3f66980ab687040495d722696d98970 Mon Sep 17 00:00:00 2001 From: Ziad Ben Hadj-Alouane Date: Sat, 13 Jan 2024 09:20:46 -0500 Subject: [PATCH 296/811] server : fix deadlock that occurs in multi-prompt scenarios (#4905) * * fix deadlock * * dont ruint all whitespace --- examples/server/server.cpp | 22 +++++++++++++++++----- 1 file changed, 17 insertions(+), 5 deletions(-) diff --git a/examples/server/server.cpp b/examples/server/server.cpp index 7b33aea1f..79eacf828 100644 --- a/examples/server/server.cpp +++ b/examples/server/server.cpp @@ -1350,14 +1350,17 @@ struct llama_server_context res.result_json["model"] = slot.oaicompat_model; } + queue_results.push_back(res); + condition_results.notify_all(); + + // done with results, unlock + lock.unlock(); + // parent multitask, if any, needs to be updated if (slot.multitask_id != -1) { update_multi_task(slot.multitask_id, slot.task_id, res); } - - queue_results.push_back(res); - condition_results.notify_all(); } void send_embedding(llama_client_slot &slot) @@ -1603,6 +1606,7 @@ struct llama_server_context } // remove finished multitasks from the queue of multitasks, and add the corresponding result to the result queue + std::vector agg_results; auto queue_iterator = queue_multitasks.begin(); while (queue_iterator != queue_multitasks.end()) { @@ -1623,8 +1627,9 @@ struct llama_server_context } aggregate_result.result_json = json{ "results", result_jsons }; - std::lock_guard lock(mutex_results); - queue_results.push_back(aggregate_result); + + agg_results.push_back(aggregate_result); + condition_results.notify_all(); queue_iterator = queue_multitasks.erase(queue_iterator); @@ -1634,6 +1639,13 @@ struct llama_server_context ++queue_iterator; } } + + // done with tasks, unlock + lock.unlock(); + + // copy aggregate results of complete multi-tasks to the results queue + std::lock_guard lock_results(mutex_results); + queue_results.insert(queue_results.end(), agg_results.begin(), agg_results.end()); } bool update_slots() { From 7dc78764e2ff86512e6e31cb0fcb8087df4b4708 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Johannes=20G=C3=A4=C3=9Fler?= Date: Sat, 13 Jan 2024 15:52:53 +0100 Subject: [PATCH 297/811] compare-llama-bench: tweak output format (#4910) --- scripts/compare-llama-bench.py | 34 ++++++++++++++++++++++++++-------- 1 file changed, 26 insertions(+), 8 deletions(-) diff --git a/scripts/compare-llama-bench.py b/scripts/compare-llama-bench.py index bc1714487..70737f976 100755 --- a/scripts/compare-llama-bench.py +++ b/scripts/compare-llama-bench.py @@ -10,15 +10,15 @@ import sqlite3 try: import git from tabulate import tabulate -except ImportError: +except ImportError as e: print("ERROR: the following Python libraries are required: GitPython, tabulate.") - sys.exit(1) + raise e # Properties by which to differentiate results per commit: KEY_PROPERTIES = [ - "cuda", "opencl", "metal", "gpu_blas", "blas", "cpu_info", "gpu_info", "model_filename", - "model_type", "model_size", "model_n_params", "n_batch", "n_threads", "type_k", "type_v", - "n_gpu_layers", "main_gpu", "no_kv_offload", "mul_mat_q", "tensor_split", "n_prompt", "n_gen" + "cpu_info", "gpu_info", "n_gpu_layers", "main_gpu", "cuda", "opencl", "metal", "gpu_blas", + "blas", "model_filename", "model_type", "model_size", "model_n_params", "n_batch", "n_threads", + "type_k", "type_v", "no_kv_offload", "mul_mat_q", "tensor_split", "n_prompt", "n_gen" ] # Properties that are boolean and are converted to Yes/No for the table: @@ -37,6 +37,7 @@ PRETTY_NAMES = { DEFAULT_SHOW = ["model_type"] # Always show these properties by default. DEFAULT_HIDE = ["model_filename"] # Always hide these properties by default. GPU_NAME_STRIP = ["NVIDIA GeForce ", "Tesla ", "AMD Radeon "] # Strip prefixes for smaller tables. +MODEL_SUFFIX_REPLACE = {" - Small": "_S", " - Medium": "_M", " - Large": "_L"} DESCRIPTION = """Creates tables from llama-bench data written to an SQLite database. Example usage (Linux): @@ -308,8 +309,13 @@ else: if gpu_blas and "gpu_info" not in properties_different: show.append("gpu_info") - show += DEFAULT_SHOW show += properties_different + + index_default = 0 + for prop in ["cpu_info", "gpu_info", "n_gpu_layers", "main_gpu"]: + if prop in show: + index_default += 1 + show = show[:index_default] + DEFAULT_SHOW + show[index_default:] for prop in DEFAULT_HIDE: try: show.remove(prop) @@ -334,6 +340,12 @@ for bool_property in BOOL_PROPERTIES: for row_table in table: row_table[ip] = "Yes" if int(row_table[ip]) == 1 else "No" +if "model_type" in show: + ip = show.index("model_type") + for (old, new) in MODEL_SUFFIX_REPLACE.items(): + for row_table in table: + row_table[ip] = row_table[ip].replace(old, new) + if "model_size" in show: ip = show.index("model_size") for row_table in table: @@ -341,10 +353,16 @@ if "model_size" in show: if "gpu_info" in show: ip = show.index("gpu_info") - for gns in GPU_NAME_STRIP: - for row_table in table: + for row_table in table: + for gns in GPU_NAME_STRIP: row_table[ip] = row_table[ip].replace(gns, "") + gpu_names = row_table[ip].split("/") + num_gpus = len(gpu_names) + all_names_the_same = len(set(gpu_names)) == 1 + if len(gpu_names) >= 2 and all_names_the_same: + row_table[ip] = f"{num_gpus}x {gpu_names[0]}" + headers = [PRETTY_NAMES[p] for p in show] headers += ["Test", f"t/s {name_baseline}", f"t/s {name_compare}", "Speedup"] From b38b5e93ae31019e87f692b69d27124eae6aac02 Mon Sep 17 00:00:00 2001 From: Georgi Gerganov Date: Sat, 13 Jan 2024 18:03:45 +0200 Subject: [PATCH 298/811] metal : refactor kernel loading code (#4794) * metal : detect more GPU families * metal : refactor kernel loading * metal : set kernel family requirements * metal : fix kernel init + fix compile options * metal : take into account simdgroup reduction support * metal : print only skipped kernels * metal : fix check for simdgroup reduction support * metal : check for Metal 3 * metal : free allocations * metal : normalize encoder:setComputePipelineStatus calls ggml-ci * metal : fix Metal3 family check ggml-ci * metal : check for simdgroup matrix mul. feature ggml-ci --- ggml-metal.m | 1048 +++++++++++++++++++++++++------------------------- 1 file changed, 530 insertions(+), 518 deletions(-) diff --git a/ggml-metal.m b/ggml-metal.m index c03624073..6c28a7ee3 100644 --- a/ggml-metal.m +++ b/ggml-metal.m @@ -26,6 +26,8 @@ #define GGML_MAX_CONCUR (2*GGML_DEFAULT_GRAPH_SIZE) +#define GGML_METAL_MAX_KERNELS 256 + struct ggml_metal_buffer { const char * name; @@ -35,6 +37,134 @@ struct ggml_metal_buffer { id metal; }; +struct ggml_metal_kernel { + id function; + id pipeline; +}; + +enum ggml_metal_kernel_type { + GGML_METAL_KERNEL_TYPE_ADD, + GGML_METAL_KERNEL_TYPE_ADD_ROW, + GGML_METAL_KERNEL_TYPE_MUL, + GGML_METAL_KERNEL_TYPE_MUL_ROW, + GGML_METAL_KERNEL_TYPE_DIV, + GGML_METAL_KERNEL_TYPE_DIV_ROW, + GGML_METAL_KERNEL_TYPE_SCALE, + GGML_METAL_KERNEL_TYPE_SCALE_4, + GGML_METAL_KERNEL_TYPE_TANH, + GGML_METAL_KERNEL_TYPE_RELU, + GGML_METAL_KERNEL_TYPE_GELU, + GGML_METAL_KERNEL_TYPE_GELU_QUICK, + GGML_METAL_KERNEL_TYPE_SILU, + GGML_METAL_KERNEL_TYPE_SOFT_MAX, + GGML_METAL_KERNEL_TYPE_SOFT_MAX_4, + GGML_METAL_KERNEL_TYPE_DIAG_MASK_INF, + GGML_METAL_KERNEL_TYPE_DIAG_MASK_INF_8, + GGML_METAL_KERNEL_TYPE_GET_ROWS_F32, + GGML_METAL_KERNEL_TYPE_GET_ROWS_F16, + GGML_METAL_KERNEL_TYPE_GET_ROWS_Q4_0, + GGML_METAL_KERNEL_TYPE_GET_ROWS_Q4_1, + GGML_METAL_KERNEL_TYPE_GET_ROWS_Q5_0, + GGML_METAL_KERNEL_TYPE_GET_ROWS_Q5_1, + GGML_METAL_KERNEL_TYPE_GET_ROWS_Q8_0, + GGML_METAL_KERNEL_TYPE_GET_ROWS_Q2_K, + GGML_METAL_KERNEL_TYPE_GET_ROWS_Q3_K, + GGML_METAL_KERNEL_TYPE_GET_ROWS_Q4_K, + GGML_METAL_KERNEL_TYPE_GET_ROWS_Q5_K, + GGML_METAL_KERNEL_TYPE_GET_ROWS_Q6_K, + GGML_METAL_KERNEL_TYPE_GET_ROWS_IQ2_XXS, + GGML_METAL_KERNEL_TYPE_GET_ROWS_IQ2_XS, + GGML_METAL_KERNEL_TYPE_GET_ROWS_I32, + GGML_METAL_KERNEL_TYPE_RMS_NORM, + GGML_METAL_KERNEL_TYPE_GROUP_NORM, + GGML_METAL_KERNEL_TYPE_NORM, + GGML_METAL_KERNEL_TYPE_MUL_MV_F32_F32, + GGML_METAL_KERNEL_TYPE_MUL_MV_F16_F16, + GGML_METAL_KERNEL_TYPE_MUL_MV_F16_F32, + GGML_METAL_KERNEL_TYPE_MUL_MV_F16_F32_1ROW, + GGML_METAL_KERNEL_TYPE_MUL_MV_F16_F32_L4, + GGML_METAL_KERNEL_TYPE_MUL_MV_Q4_0_F32, + GGML_METAL_KERNEL_TYPE_MUL_MV_Q4_1_F32, + GGML_METAL_KERNEL_TYPE_MUL_MV_Q5_0_F32, + GGML_METAL_KERNEL_TYPE_MUL_MV_Q5_1_F32, + GGML_METAL_KERNEL_TYPE_MUL_MV_Q8_0_F32, + GGML_METAL_KERNEL_TYPE_MUL_MV_Q2_K_F32, + GGML_METAL_KERNEL_TYPE_MUL_MV_Q3_K_F32, + GGML_METAL_KERNEL_TYPE_MUL_MV_Q4_K_F32, + GGML_METAL_KERNEL_TYPE_MUL_MV_Q5_K_F32, + GGML_METAL_KERNEL_TYPE_MUL_MV_Q6_K_F32, + GGML_METAL_KERNEL_TYPE_MUL_MV_IQ2_XXS_F32, + GGML_METAL_KERNEL_TYPE_MUL_MV_IQ2_XS_F32, + GGML_METAL_KERNEL_TYPE_MUL_MV_ID_F32_F32, + //GGML_METAL_KERNEL_TYPE_MUL_MV_ID_F16_F16, + GGML_METAL_KERNEL_TYPE_MUL_MV_ID_F16_F32, + //GGML_METAL_KERNEL_TYPE_MUL_MV_ID_F16_F32_1ROW, + //GGML_METAL_KERNEL_TYPE_MUL_MV_ID_F16_F32_L4, + GGML_METAL_KERNEL_TYPE_MUL_MV_ID_Q4_0_F32, + GGML_METAL_KERNEL_TYPE_MUL_MV_ID_Q4_1_F32, + GGML_METAL_KERNEL_TYPE_MUL_MV_ID_Q5_0_F32, + GGML_METAL_KERNEL_TYPE_MUL_MV_ID_Q5_1_F32, + GGML_METAL_KERNEL_TYPE_MUL_MV_ID_Q8_0_F32, + GGML_METAL_KERNEL_TYPE_MUL_MV_ID_Q2_K_F32, + GGML_METAL_KERNEL_TYPE_MUL_MV_ID_Q3_K_F32, + GGML_METAL_KERNEL_TYPE_MUL_MV_ID_Q4_K_F32, + GGML_METAL_KERNEL_TYPE_MUL_MV_ID_Q5_K_F32, + GGML_METAL_KERNEL_TYPE_MUL_MV_ID_Q6_K_F32, + GGML_METAL_KERNEL_TYPE_MUL_MV_ID_IQ2_XXS_F32, + GGML_METAL_KERNEL_TYPE_MUL_MV_ID_IQ2_XS_F32, + GGML_METAL_KERNEL_TYPE_MUL_MM_F32_F32, + GGML_METAL_KERNEL_TYPE_MUL_MM_F16_F32, + GGML_METAL_KERNEL_TYPE_MUL_MM_Q4_0_F32, + GGML_METAL_KERNEL_TYPE_MUL_MM_Q4_1_F32, + GGML_METAL_KERNEL_TYPE_MUL_MM_Q5_0_F32, + GGML_METAL_KERNEL_TYPE_MUL_MM_Q5_1_F32, + GGML_METAL_KERNEL_TYPE_MUL_MM_Q8_0_F32, + GGML_METAL_KERNEL_TYPE_MUL_MM_Q2_K_F32, + GGML_METAL_KERNEL_TYPE_MUL_MM_Q3_K_F32, + GGML_METAL_KERNEL_TYPE_MUL_MM_Q4_K_F32, + GGML_METAL_KERNEL_TYPE_MUL_MM_Q5_K_F32, + GGML_METAL_KERNEL_TYPE_MUL_MM_Q6_K_F32, + GGML_METAL_KERNEL_TYPE_MUL_MM_IQ2_XXS_F32, + GGML_METAL_KERNEL_TYPE_MUL_MM_IQ2_XS_F32, + GGML_METAL_KERNEL_TYPE_MUL_MM_ID_F32_F32, + GGML_METAL_KERNEL_TYPE_MUL_MM_ID_F16_F32, + GGML_METAL_KERNEL_TYPE_MUL_MM_ID_Q4_0_F32, + GGML_METAL_KERNEL_TYPE_MUL_MM_ID_Q4_1_F32, + GGML_METAL_KERNEL_TYPE_MUL_MM_ID_Q5_0_F32, + GGML_METAL_KERNEL_TYPE_MUL_MM_ID_Q5_1_F32, + GGML_METAL_KERNEL_TYPE_MUL_MM_ID_Q8_0_F32, + GGML_METAL_KERNEL_TYPE_MUL_MM_ID_Q2_K_F32, + GGML_METAL_KERNEL_TYPE_MUL_MM_ID_Q3_K_F32, + GGML_METAL_KERNEL_TYPE_MUL_MM_ID_Q4_K_F32, + GGML_METAL_KERNEL_TYPE_MUL_MM_ID_Q5_K_F32, + GGML_METAL_KERNEL_TYPE_MUL_MM_ID_Q6_K_F32, + GGML_METAL_KERNEL_TYPE_MUL_MM_ID_IQ2_XXS_F32, + GGML_METAL_KERNEL_TYPE_MUL_MM_ID_IQ2_XS_F32, + GGML_METAL_KERNEL_TYPE_ROPE_F32, + GGML_METAL_KERNEL_TYPE_ROPE_F16, + GGML_METAL_KERNEL_TYPE_ALIBI_F32, + GGML_METAL_KERNEL_TYPE_IM2COL_F16, + GGML_METAL_KERNEL_TYPE_UPSCALE_F32, + GGML_METAL_KERNEL_TYPE_PAD_F32, + GGML_METAL_KERNEL_TYPE_ARGSORT_F32_I32_ASC, + GGML_METAL_KERNEL_TYPE_ARGSORT_F32_I32_DESC, + GGML_METAL_KERNEL_TYPE_LEAKY_RELU_F32, + GGML_METAL_KERNEL_TYPE_CPY_F32_F16, + GGML_METAL_KERNEL_TYPE_CPY_F32_F32, + GGML_METAL_KERNEL_TYPE_CPY_F32_Q8_0, + GGML_METAL_KERNEL_TYPE_CPY_F32_Q4_0, + GGML_METAL_KERNEL_TYPE_CPY_F32_Q4_1, + //GGML_METAL_KERNEL_TYPE_CPY_F32_Q5_0, + //GGML_METAL_KERNEL_TYPE_CPY_F32_Q5_1, + GGML_METAL_KERNEL_TYPE_CPY_F16_F16, + GGML_METAL_KERNEL_TYPE_CPY_F16_F32, + GGML_METAL_KERNEL_TYPE_CONCAT, + GGML_METAL_KERNEL_TYPE_SQR, + GGML_METAL_KERNEL_TYPE_SUM_ROWS, + + GGML_METAL_KERNEL_TYPE_COUNT +}; + struct ggml_metal_context { int n_cb; @@ -50,134 +180,13 @@ struct ggml_metal_context { int n_buffers; struct ggml_metal_buffer buffers[GGML_METAL_MAX_BUFFERS]; + struct ggml_metal_kernel kernels[GGML_METAL_MAX_KERNELS]; + int concur_list[GGML_MAX_CONCUR]; int concur_list_len; - // custom kernels -#define GGML_METAL_DECL_KERNEL(name) \ - id function_##name; \ - id pipeline_##name - - GGML_METAL_DECL_KERNEL(add); - GGML_METAL_DECL_KERNEL(add_row); // TODO: avoid this extra kernel, instead extend the "add" kernel to support broadcast - GGML_METAL_DECL_KERNEL(mul); - GGML_METAL_DECL_KERNEL(mul_row); // TODO: avoid this extra kernel, instead extend the "mul" kernel to support broadcast - GGML_METAL_DECL_KERNEL(div); - GGML_METAL_DECL_KERNEL(div_row); - GGML_METAL_DECL_KERNEL(scale); - GGML_METAL_DECL_KERNEL(scale_4); - GGML_METAL_DECL_KERNEL(tanh); - GGML_METAL_DECL_KERNEL(relu); - GGML_METAL_DECL_KERNEL(gelu); - GGML_METAL_DECL_KERNEL(gelu_quick); - GGML_METAL_DECL_KERNEL(silu); - GGML_METAL_DECL_KERNEL(soft_max); - GGML_METAL_DECL_KERNEL(soft_max_4); - GGML_METAL_DECL_KERNEL(diag_mask_inf); - GGML_METAL_DECL_KERNEL(diag_mask_inf_8); - GGML_METAL_DECL_KERNEL(get_rows_f32); - GGML_METAL_DECL_KERNEL(get_rows_f16); - GGML_METAL_DECL_KERNEL(get_rows_q4_0); - GGML_METAL_DECL_KERNEL(get_rows_q4_1); - GGML_METAL_DECL_KERNEL(get_rows_q5_0); - GGML_METAL_DECL_KERNEL(get_rows_q5_1); - GGML_METAL_DECL_KERNEL(get_rows_q8_0); - GGML_METAL_DECL_KERNEL(get_rows_q2_K); - GGML_METAL_DECL_KERNEL(get_rows_q3_K); - GGML_METAL_DECL_KERNEL(get_rows_q4_K); - GGML_METAL_DECL_KERNEL(get_rows_q5_K); - GGML_METAL_DECL_KERNEL(get_rows_q6_K); - GGML_METAL_DECL_KERNEL(get_rows_i32); - GGML_METAL_DECL_KERNEL(get_rows_iq2_xxs); - GGML_METAL_DECL_KERNEL(get_rows_iq2_xs); - GGML_METAL_DECL_KERNEL(rms_norm); - GGML_METAL_DECL_KERNEL(group_norm); - GGML_METAL_DECL_KERNEL(norm); - GGML_METAL_DECL_KERNEL(mul_mv_f32_f32); - GGML_METAL_DECL_KERNEL(mul_mv_f16_f16); - GGML_METAL_DECL_KERNEL(mul_mv_f16_f32); - GGML_METAL_DECL_KERNEL(mul_mv_f16_f32_1row); - GGML_METAL_DECL_KERNEL(mul_mv_f16_f32_l4); - GGML_METAL_DECL_KERNEL(mul_mv_q4_0_f32); - GGML_METAL_DECL_KERNEL(mul_mv_q4_1_f32); - GGML_METAL_DECL_KERNEL(mul_mv_q5_0_f32); - GGML_METAL_DECL_KERNEL(mul_mv_q5_1_f32); - GGML_METAL_DECL_KERNEL(mul_mv_q8_0_f32); - GGML_METAL_DECL_KERNEL(mul_mv_q2_K_f32); - GGML_METAL_DECL_KERNEL(mul_mv_q3_K_f32); - GGML_METAL_DECL_KERNEL(mul_mv_q4_K_f32); - GGML_METAL_DECL_KERNEL(mul_mv_q5_K_f32); - GGML_METAL_DECL_KERNEL(mul_mv_q6_K_f32); - GGML_METAL_DECL_KERNEL(mul_mv_iq2_xxs_f32); - GGML_METAL_DECL_KERNEL(mul_mv_iq2_xs_f32); - GGML_METAL_DECL_KERNEL(mul_mv_id_f32_f32); - //GGML_METAL_DECL_KERNEL(mul_mv_id_f16_f16); - GGML_METAL_DECL_KERNEL(mul_mv_id_f16_f32); - //GGML_METAL_DECL_KERNEL(mul_mv_id_f16_f32_1row); - //GGML_METAL_DECL_KERNEL(mul_mv_id_f16_f32_l4); - GGML_METAL_DECL_KERNEL(mul_mv_id_q4_0_f32); - GGML_METAL_DECL_KERNEL(mul_mv_id_q4_1_f32); - GGML_METAL_DECL_KERNEL(mul_mv_id_q5_0_f32); - GGML_METAL_DECL_KERNEL(mul_mv_id_q5_1_f32); - GGML_METAL_DECL_KERNEL(mul_mv_id_q8_0_f32); - GGML_METAL_DECL_KERNEL(mul_mv_id_q2_K_f32); - GGML_METAL_DECL_KERNEL(mul_mv_id_q3_K_f32); - GGML_METAL_DECL_KERNEL(mul_mv_id_q4_K_f32); - GGML_METAL_DECL_KERNEL(mul_mv_id_q5_K_f32); - GGML_METAL_DECL_KERNEL(mul_mv_id_q6_K_f32); - GGML_METAL_DECL_KERNEL(mul_mv_id_iq2_xxs_f32); - GGML_METAL_DECL_KERNEL(mul_mv_id_iq2_xs_f32); - GGML_METAL_DECL_KERNEL(mul_mm_f32_f32); - GGML_METAL_DECL_KERNEL(mul_mm_f16_f32); - GGML_METAL_DECL_KERNEL(mul_mm_q4_0_f32); - GGML_METAL_DECL_KERNEL(mul_mm_q4_1_f32); - GGML_METAL_DECL_KERNEL(mul_mm_q5_0_f32); - GGML_METAL_DECL_KERNEL(mul_mm_q5_1_f32); - GGML_METAL_DECL_KERNEL(mul_mm_q8_0_f32); - GGML_METAL_DECL_KERNEL(mul_mm_q2_K_f32); - GGML_METAL_DECL_KERNEL(mul_mm_q3_K_f32); - GGML_METAL_DECL_KERNEL(mul_mm_q4_K_f32); - GGML_METAL_DECL_KERNEL(mul_mm_q5_K_f32); - GGML_METAL_DECL_KERNEL(mul_mm_q6_K_f32); - GGML_METAL_DECL_KERNEL(mul_mm_iq2_xxs_f32); - GGML_METAL_DECL_KERNEL(mul_mm_iq2_xs_f32); - GGML_METAL_DECL_KERNEL(mul_mm_id_f32_f32); - GGML_METAL_DECL_KERNEL(mul_mm_id_f16_f32); - GGML_METAL_DECL_KERNEL(mul_mm_id_q4_0_f32); - GGML_METAL_DECL_KERNEL(mul_mm_id_q4_1_f32); - GGML_METAL_DECL_KERNEL(mul_mm_id_q5_0_f32); - GGML_METAL_DECL_KERNEL(mul_mm_id_q5_1_f32); - GGML_METAL_DECL_KERNEL(mul_mm_id_q8_0_f32); - GGML_METAL_DECL_KERNEL(mul_mm_id_q2_K_f32); - GGML_METAL_DECL_KERNEL(mul_mm_id_q3_K_f32); - GGML_METAL_DECL_KERNEL(mul_mm_id_q4_K_f32); - GGML_METAL_DECL_KERNEL(mul_mm_id_q5_K_f32); - GGML_METAL_DECL_KERNEL(mul_mm_id_q6_K_f32); - GGML_METAL_DECL_KERNEL(mul_mm_id_iq2_xxs_f32); - GGML_METAL_DECL_KERNEL(mul_mm_id_iq2_xs_f32); - GGML_METAL_DECL_KERNEL(rope_f32); - GGML_METAL_DECL_KERNEL(rope_f16); - GGML_METAL_DECL_KERNEL(alibi_f32); - GGML_METAL_DECL_KERNEL(im2col_f16); - GGML_METAL_DECL_KERNEL(upscale_f32); - GGML_METAL_DECL_KERNEL(pad_f32); - GGML_METAL_DECL_KERNEL(argsort_f32_i32_asc); - GGML_METAL_DECL_KERNEL(argsort_f32_i32_desc); - GGML_METAL_DECL_KERNEL(leaky_relu_f32); - GGML_METAL_DECL_KERNEL(cpy_f32_f16); - GGML_METAL_DECL_KERNEL(cpy_f32_f32); - GGML_METAL_DECL_KERNEL(cpy_f32_q8_0); - GGML_METAL_DECL_KERNEL(cpy_f32_q4_0); - GGML_METAL_DECL_KERNEL(cpy_f32_q4_1); - //GGML_METAL_DECL_KERNEL(cpy_f32_q5_0); - //GGML_METAL_DECL_KERNEL(cpy_f32_q5_1); - GGML_METAL_DECL_KERNEL(cpy_f16_f16); - GGML_METAL_DECL_KERNEL(cpy_f16_f32); - GGML_METAL_DECL_KERNEL(concat); - GGML_METAL_DECL_KERNEL(sqr); - GGML_METAL_DECL_KERNEL(sum_rows); - -#undef GGML_METAL_DECL_KERNEL + bool support_simdgroup_reduction; + bool support_simdgroup_mm; }; // MSL code @@ -298,19 +307,22 @@ struct ggml_metal_context * ggml_metal_init(int n_cb) { return NULL; } - MTLCompileOptions* options = nil; + // dictionary of preprocessor macros + NSMutableDictionary * prep = [NSMutableDictionary dictionary]; + #ifdef GGML_QKK_64 - options = [MTLCompileOptions new]; - options.preprocessorMacros = @{ @"QK_K" : @(64) }; + prep[@"QK_K"] = @(64); #endif - // try to disable fast-math - // NOTE: this seems to have no effect whatsoever - // instead, in order to disable fast-math, we have to build default.metallib from the command line - // using xcrun -sdk macosx metal -fno-fast-math -c ggml-metal.metal -o ggml-metal.air - // and go through the "pre-compiled library found" path above + + MTLCompileOptions* options = [MTLCompileOptions new]; + options.preprocessorMacros = prep; + //[options setFastMathEnabled:false]; ctx->library = [ctx->device newLibraryWithSource:src options:options error:&error]; + + [options release]; + [prep release]; } if (error) { @@ -323,16 +335,41 @@ struct ggml_metal_context * ggml_metal_init(int n_cb) { // print MTL GPU family: GGML_METAL_LOG_INFO("%s: GPU name: %s\n", __func__, [[ctx->device name] UTF8String]); + const NSInteger MTLGPUFamilyMetal3 = 5001; + // determine max supported GPU family // https://developer.apple.com/metal/Metal-Shading-Language-Specification.pdf // https://developer.apple.com/metal/Metal-Feature-Set-Tables.pdf - for (int i = MTLGPUFamilyApple1 + 20; i >= MTLGPUFamilyApple1; --i) { - if ([ctx->device supportsFamily:i]) { - GGML_METAL_LOG_INFO("%s: GPU family: MTLGPUFamilyApple%d (%d)\n", __func__, i - (int) MTLGPUFamilyApple1 + 1, i); - break; + { + for (int i = MTLGPUFamilyApple1 + 20; i >= MTLGPUFamilyApple1; --i) { + if ([ctx->device supportsFamily:i]) { + GGML_METAL_LOG_INFO("%s: GPU family: MTLGPUFamilyApple%d (%d)\n", __func__, i - (int) MTLGPUFamilyApple1 + 1, i); + break; + } + } + + for (int i = MTLGPUFamilyCommon1 + 5; i >= MTLGPUFamilyCommon1; --i) { + if ([ctx->device supportsFamily:i]) { + GGML_METAL_LOG_INFO("%s: GPU family: MTLGPUFamilyCommon%d (%d)\n", __func__, i - (int) MTLGPUFamilyCommon1 + 1, i); + break; + } + } + + for (int i = MTLGPUFamilyMetal3 + 5; i >= MTLGPUFamilyMetal3; --i) { + if ([ctx->device supportsFamily:i]) { + GGML_METAL_LOG_INFO("%s: GPU family: MTLGPUFamilyMetal%d (%d)\n", __func__, i - (int) MTLGPUFamilyMetal3 + 3, i); + break; + } } } + ctx->support_simdgroup_reduction = [ctx->device supportsFamily:MTLGPUFamilyApple7]; + ctx->support_simdgroup_reduction |= [ctx->device supportsFamily:MTLGPUFamilyMetal3]; + + ctx->support_simdgroup_mm = [ctx->device supportsFamily:MTLGPUFamilyApple7]; + + GGML_METAL_LOG_INFO("%s: simdgroup reduction support = %s\n", __func__, ctx->support_simdgroup_reduction ? "true" : "false"); + GGML_METAL_LOG_INFO("%s: simdgroup matrix mul. support = %s\n", __func__, ctx->support_simdgroup_mm ? "true" : "false"); GGML_METAL_LOG_INFO("%s: hasUnifiedMemory = %s\n", __func__, ctx->device.hasUnifiedMemory ? "true" : "false"); GGML_METAL_LOG_INFO("%s: recommendedMaxWorkingSetSize = %8.2f MB\n", __func__, ctx->device.recommendedMaxWorkingSetSize / 1e6); if (ctx->device.maxTransferRate != 0) { @@ -346,141 +383,152 @@ struct ggml_metal_context * ggml_metal_init(int n_cb) { { NSError * error = nil; + for (int i = 0; i < GGML_METAL_MAX_KERNELS; ++i) { + ctx->kernels[i].function = nil; + ctx->kernels[i].pipeline = nil; + } + /* - GGML_METAL_LOG_INFO("%s: loaded %-32s %16p | th_max = %4d | th_width = %4d\n", __func__, "kernel_"#name, (void *) ctx->pipeline_##name, \ - (int) ctx->pipeline_##name.maxTotalThreadsPerThreadgroup, \ - (int) ctx->pipeline_##name.threadExecutionWidth); \ + GGML_METAL_LOG_INFO("%s: loaded %-32s %16p | th_max = %4d | th_width = %4d\n", __func__, "kernel_"#name, (void *) kernel->pipeline, \ + (int) kernel->pipeline.maxTotalThreadsPerThreadgroup, \ + (int) kernel->pipeline.threadExecutionWidth); \ */ -#define GGML_METAL_ADD_KERNEL(name) \ - ctx->function_##name = [ctx->library newFunctionWithName:@"kernel_"#name]; \ - ctx->pipeline_##name = [ctx->device newComputePipelineStateWithFunction:ctx->function_##name error:&error]; \ - if (error) { \ - GGML_METAL_LOG_ERROR("%s: error: load pipeline error: %s\n", __func__, [[error description] UTF8String]); \ - return NULL; \ +#define GGML_METAL_ADD_KERNEL(e, name, supported) \ + if (supported) { \ + struct ggml_metal_kernel * kernel = &ctx->kernels[e]; \ + kernel->function = [ctx->library newFunctionWithName:@"kernel_"#name]; \ + kernel->pipeline = [ctx->device newComputePipelineStateWithFunction:kernel->function error:&error]; \ + GGML_METAL_LOG_INFO("%s: loaded %-32s %16p | th_max = %4d | th_width = %4d\n", __func__, "kernel_"#name, (void *) kernel->pipeline, \ + (int) kernel->pipeline.maxTotalThreadsPerThreadgroup, \ + (int) kernel->pipeline.threadExecutionWidth); \ + if (error) { \ + GGML_METAL_LOG_ERROR("%s: error: load pipeline error: %s\n", __func__, [[error description] UTF8String]); \ + return NULL; \ + } \ + } else { \ + GGML_METAL_LOG_WARN("%s: skipping %-32s (not supported)\n", __func__, "kernel_"#name); \ } - GGML_METAL_ADD_KERNEL(add); - GGML_METAL_ADD_KERNEL(add_row); - GGML_METAL_ADD_KERNEL(mul); - GGML_METAL_ADD_KERNEL(mul_row); - GGML_METAL_ADD_KERNEL(div); - GGML_METAL_ADD_KERNEL(div_row); - GGML_METAL_ADD_KERNEL(scale); - GGML_METAL_ADD_KERNEL(scale_4); - GGML_METAL_ADD_KERNEL(tanh); - GGML_METAL_ADD_KERNEL(relu); - GGML_METAL_ADD_KERNEL(gelu); - GGML_METAL_ADD_KERNEL(gelu_quick); - GGML_METAL_ADD_KERNEL(silu); - GGML_METAL_ADD_KERNEL(soft_max); - GGML_METAL_ADD_KERNEL(soft_max_4); - GGML_METAL_ADD_KERNEL(diag_mask_inf); - GGML_METAL_ADD_KERNEL(diag_mask_inf_8); - GGML_METAL_ADD_KERNEL(get_rows_f32); - GGML_METAL_ADD_KERNEL(get_rows_f16); - GGML_METAL_ADD_KERNEL(get_rows_q4_0); - GGML_METAL_ADD_KERNEL(get_rows_q4_1); - GGML_METAL_ADD_KERNEL(get_rows_q5_0); - GGML_METAL_ADD_KERNEL(get_rows_q5_1); - GGML_METAL_ADD_KERNEL(get_rows_q8_0); - GGML_METAL_ADD_KERNEL(get_rows_q2_K); - GGML_METAL_ADD_KERNEL(get_rows_q3_K); - GGML_METAL_ADD_KERNEL(get_rows_q4_K); - GGML_METAL_ADD_KERNEL(get_rows_q5_K); - GGML_METAL_ADD_KERNEL(get_rows_q6_K); - GGML_METAL_ADD_KERNEL(get_rows_i32); - GGML_METAL_ADD_KERNEL(get_rows_iq2_xxs); - GGML_METAL_ADD_KERNEL(get_rows_iq2_xs); - GGML_METAL_ADD_KERNEL(rms_norm); - GGML_METAL_ADD_KERNEL(group_norm); - GGML_METAL_ADD_KERNEL(norm); - GGML_METAL_ADD_KERNEL(mul_mv_f32_f32); - GGML_METAL_ADD_KERNEL(mul_mv_f16_f16); - GGML_METAL_ADD_KERNEL(mul_mv_f16_f32); - GGML_METAL_ADD_KERNEL(mul_mv_f16_f32_1row); - GGML_METAL_ADD_KERNEL(mul_mv_f16_f32_l4); - GGML_METAL_ADD_KERNEL(mul_mv_q4_0_f32); - GGML_METAL_ADD_KERNEL(mul_mv_q4_1_f32); - GGML_METAL_ADD_KERNEL(mul_mv_q5_0_f32); - GGML_METAL_ADD_KERNEL(mul_mv_q5_1_f32); - GGML_METAL_ADD_KERNEL(mul_mv_q8_0_f32); - GGML_METAL_ADD_KERNEL(mul_mv_q2_K_f32); - GGML_METAL_ADD_KERNEL(mul_mv_q3_K_f32); - GGML_METAL_ADD_KERNEL(mul_mv_q4_K_f32); - GGML_METAL_ADD_KERNEL(mul_mv_q5_K_f32); - GGML_METAL_ADD_KERNEL(mul_mv_q6_K_f32); - GGML_METAL_ADD_KERNEL(mul_mv_iq2_xxs_f32); - GGML_METAL_ADD_KERNEL(mul_mv_iq2_xs_f32); - GGML_METAL_ADD_KERNEL(mul_mv_id_f32_f32); - //GGML_METAL_ADD_KERNEL(mul_mv_id_f16_f16); - GGML_METAL_ADD_KERNEL(mul_mv_id_f16_f32); - //GGML_METAL_ADD_KERNEL(mul_mv_id_f16_f32_1row); - //GGML_METAL_ADD_KERNEL(mul_mv_id_f16_f32_l4); - GGML_METAL_ADD_KERNEL(mul_mv_id_q4_0_f32); - GGML_METAL_ADD_KERNEL(mul_mv_id_q4_1_f32); - GGML_METAL_ADD_KERNEL(mul_mv_id_q5_0_f32); - GGML_METAL_ADD_KERNEL(mul_mv_id_q5_1_f32); - GGML_METAL_ADD_KERNEL(mul_mv_id_q8_0_f32); - GGML_METAL_ADD_KERNEL(mul_mv_id_q2_K_f32); - GGML_METAL_ADD_KERNEL(mul_mv_id_q3_K_f32); - GGML_METAL_ADD_KERNEL(mul_mv_id_q4_K_f32); - GGML_METAL_ADD_KERNEL(mul_mv_id_q5_K_f32); - GGML_METAL_ADD_KERNEL(mul_mv_id_q6_K_f32); - GGML_METAL_ADD_KERNEL(mul_mv_id_iq2_xxs_f32); - GGML_METAL_ADD_KERNEL(mul_mv_id_iq2_xs_f32); - if ([ctx->device supportsFamily:MTLGPUFamilyApple7]) { - GGML_METAL_ADD_KERNEL(mul_mm_f32_f32); - GGML_METAL_ADD_KERNEL(mul_mm_f16_f32); - GGML_METAL_ADD_KERNEL(mul_mm_q4_0_f32); - GGML_METAL_ADD_KERNEL(mul_mm_q4_1_f32); - GGML_METAL_ADD_KERNEL(mul_mm_q5_0_f32); - GGML_METAL_ADD_KERNEL(mul_mm_q5_1_f32); - GGML_METAL_ADD_KERNEL(mul_mm_q8_0_f32); - GGML_METAL_ADD_KERNEL(mul_mm_q2_K_f32); - GGML_METAL_ADD_KERNEL(mul_mm_q3_K_f32); - GGML_METAL_ADD_KERNEL(mul_mm_q4_K_f32); - GGML_METAL_ADD_KERNEL(mul_mm_q5_K_f32); - GGML_METAL_ADD_KERNEL(mul_mm_q6_K_f32); - GGML_METAL_ADD_KERNEL(mul_mm_iq2_xxs_f32); - GGML_METAL_ADD_KERNEL(mul_mm_iq2_xs_f32); - GGML_METAL_ADD_KERNEL(mul_mm_id_f32_f32); - GGML_METAL_ADD_KERNEL(mul_mm_id_f16_f32); - GGML_METAL_ADD_KERNEL(mul_mm_id_q4_0_f32); - GGML_METAL_ADD_KERNEL(mul_mm_id_q4_1_f32); - GGML_METAL_ADD_KERNEL(mul_mm_id_q5_0_f32); - GGML_METAL_ADD_KERNEL(mul_mm_id_q5_1_f32); - GGML_METAL_ADD_KERNEL(mul_mm_id_q8_0_f32); - GGML_METAL_ADD_KERNEL(mul_mm_id_q2_K_f32); - GGML_METAL_ADD_KERNEL(mul_mm_id_q3_K_f32); - GGML_METAL_ADD_KERNEL(mul_mm_id_q4_K_f32); - GGML_METAL_ADD_KERNEL(mul_mm_id_q5_K_f32); - GGML_METAL_ADD_KERNEL(mul_mm_id_q6_K_f32); - GGML_METAL_ADD_KERNEL(mul_mm_id_iq2_xxs_f32); - GGML_METAL_ADD_KERNEL(mul_mm_id_iq2_xs_f32); - } - GGML_METAL_ADD_KERNEL(rope_f32); - GGML_METAL_ADD_KERNEL(rope_f16); - GGML_METAL_ADD_KERNEL(alibi_f32); - GGML_METAL_ADD_KERNEL(im2col_f16); - GGML_METAL_ADD_KERNEL(upscale_f32); - GGML_METAL_ADD_KERNEL(pad_f32); - GGML_METAL_ADD_KERNEL(argsort_f32_i32_asc); - GGML_METAL_ADD_KERNEL(argsort_f32_i32_desc); - GGML_METAL_ADD_KERNEL(leaky_relu_f32); - GGML_METAL_ADD_KERNEL(cpy_f32_f16); - GGML_METAL_ADD_KERNEL(cpy_f32_f32); - GGML_METAL_ADD_KERNEL(cpy_f32_q8_0); - GGML_METAL_ADD_KERNEL(cpy_f32_q4_0); - GGML_METAL_ADD_KERNEL(cpy_f32_q4_1); - //GGML_METAL_ADD_KERNEL(cpy_f32_q5_0); - //GGML_METAL_ADD_KERNEL(cpy_f32_q5_1); - GGML_METAL_ADD_KERNEL(cpy_f16_f16); - GGML_METAL_ADD_KERNEL(cpy_f16_f32); - GGML_METAL_ADD_KERNEL(concat); - GGML_METAL_ADD_KERNEL(sqr); - GGML_METAL_ADD_KERNEL(sum_rows); + // simd_sum and simd_max requires MTLGPUFamilyApple7 -#undef GGML_METAL_ADD_KERNEL + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_ADD, add, true); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_ADD_ROW, add_row, true); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL, mul, true); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_ROW, mul_row, true); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_DIV, div, true); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_DIV_ROW, div_row, true); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_SCALE, scale, true); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_SCALE_4, scale_4, true); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_TANH, tanh, true); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_RELU, relu, true); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_GELU, gelu, true); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_GELU_QUICK, gelu_quick, true); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_SILU, silu, true); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_SOFT_MAX, soft_max, ctx->support_simdgroup_reduction); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_SOFT_MAX_4, soft_max_4, ctx->support_simdgroup_reduction); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_DIAG_MASK_INF, diag_mask_inf, true); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_DIAG_MASK_INF_8, diag_mask_inf_8, true); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_GET_ROWS_F32, get_rows_f32, true); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_GET_ROWS_F16, get_rows_f16, true); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_GET_ROWS_Q4_0, get_rows_q4_0, true); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_GET_ROWS_Q4_1, get_rows_q4_1, true); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_GET_ROWS_Q5_0, get_rows_q5_0, true); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_GET_ROWS_Q5_1, get_rows_q5_1, true); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_GET_ROWS_Q8_0, get_rows_q8_0, true); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_GET_ROWS_Q2_K, get_rows_q2_K, true); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_GET_ROWS_Q3_K, get_rows_q3_K, true); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_GET_ROWS_Q4_K, get_rows_q4_K, true); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_GET_ROWS_Q5_K, get_rows_q5_K, true); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_GET_ROWS_Q6_K, get_rows_q6_K, true); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_GET_ROWS_IQ2_XXS, get_rows_iq2_xxs, true); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_GET_ROWS_IQ2_XS, get_rows_iq2_xs, true); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_GET_ROWS_I32, get_rows_i32, true); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_RMS_NORM, rms_norm, ctx->support_simdgroup_reduction); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_GROUP_NORM, group_norm, ctx->support_simdgroup_reduction); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_NORM, norm, true); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MV_F32_F32, mul_mv_f32_f32, ctx->support_simdgroup_reduction); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MV_F16_F16, mul_mv_f16_f16, ctx->support_simdgroup_reduction); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MV_F16_F32, mul_mv_f16_f32, ctx->support_simdgroup_reduction); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MV_F16_F32_1ROW, mul_mv_f16_f32_1row, ctx->support_simdgroup_reduction); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MV_F16_F32_L4, mul_mv_f16_f32_l4, ctx->support_simdgroup_reduction); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MV_Q4_0_F32, mul_mv_q4_0_f32, ctx->support_simdgroup_reduction); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MV_Q4_1_F32, mul_mv_q4_1_f32, ctx->support_simdgroup_reduction); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MV_Q5_0_F32, mul_mv_q5_0_f32, ctx->support_simdgroup_reduction); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MV_Q5_1_F32, mul_mv_q5_1_f32, ctx->support_simdgroup_reduction); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MV_Q8_0_F32, mul_mv_q8_0_f32, ctx->support_simdgroup_reduction); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MV_Q2_K_F32, mul_mv_q2_K_f32, ctx->support_simdgroup_reduction); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MV_Q3_K_F32, mul_mv_q3_K_f32, ctx->support_simdgroup_reduction); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MV_Q4_K_F32, mul_mv_q4_K_f32, ctx->support_simdgroup_reduction); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MV_Q5_K_F32, mul_mv_q5_K_f32, ctx->support_simdgroup_reduction); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MV_Q6_K_F32, mul_mv_q6_K_f32, ctx->support_simdgroup_reduction); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MV_IQ2_XXS_F32, mul_mv_iq2_xxs_f32, ctx->support_simdgroup_reduction); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MV_IQ2_XS_F32, mul_mv_iq2_xs_f32, ctx->support_simdgroup_reduction); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MV_ID_F32_F32, mul_mv_id_f32_f32, ctx->support_simdgroup_reduction); + //GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MV_ID_F16_F16, mul_mv_id_f16_f16, ctx->support_simdgroup_reduction); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MV_ID_F16_F32, mul_mv_id_f16_f32, ctx->support_simdgroup_reduction); + //GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MV_ID_F16_F32_1ROW, mul_mv_id_f16_f32_1row, ctx->support_simdgroup_reduction); + //GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MV_ID_F16_F32_L4, mul_mv_id_f16_f32_l4, ctx->support_simdgroup_reduction); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MV_ID_Q4_0_F32, mul_mv_id_q4_0_f32, ctx->support_simdgroup_reduction); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MV_ID_Q4_1_F32, mul_mv_id_q4_1_f32, ctx->support_simdgroup_reduction); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MV_ID_Q5_0_F32, mul_mv_id_q5_0_f32, ctx->support_simdgroup_reduction); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MV_ID_Q5_1_F32, mul_mv_id_q5_1_f32, ctx->support_simdgroup_reduction); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MV_ID_Q8_0_F32, mul_mv_id_q8_0_f32, ctx->support_simdgroup_reduction); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MV_ID_Q2_K_F32, mul_mv_id_q2_K_f32, ctx->support_simdgroup_reduction); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MV_ID_Q3_K_F32, mul_mv_id_q3_K_f32, ctx->support_simdgroup_reduction); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MV_ID_Q4_K_F32, mul_mv_id_q4_K_f32, ctx->support_simdgroup_reduction); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MV_ID_Q5_K_F32, mul_mv_id_q5_K_f32, ctx->support_simdgroup_reduction); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MV_ID_Q6_K_F32, mul_mv_id_q6_K_f32, ctx->support_simdgroup_reduction); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MV_ID_IQ2_XXS_F32, mul_mv_id_iq2_xxs_f32, ctx->support_simdgroup_reduction); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MV_ID_IQ2_XS_F32, mul_mv_id_iq2_xs_f32, ctx->support_simdgroup_reduction); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MM_F32_F32, mul_mm_f32_f32, ctx->support_simdgroup_mm); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MM_F16_F32, mul_mm_f16_f32, ctx->support_simdgroup_mm); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MM_Q4_0_F32, mul_mm_q4_0_f32, ctx->support_simdgroup_mm); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MM_Q4_1_F32, mul_mm_q4_1_f32, ctx->support_simdgroup_mm); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MM_Q5_0_F32, mul_mm_q5_0_f32, ctx->support_simdgroup_mm); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MM_Q5_1_F32, mul_mm_q5_1_f32, ctx->support_simdgroup_mm); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MM_Q8_0_F32, mul_mm_q8_0_f32, ctx->support_simdgroup_mm); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MM_Q2_K_F32, mul_mm_q2_K_f32, ctx->support_simdgroup_mm); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MM_Q3_K_F32, mul_mm_q3_K_f32, ctx->support_simdgroup_mm); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MM_Q4_K_F32, mul_mm_q4_K_f32, ctx->support_simdgroup_mm); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MM_Q5_K_F32, mul_mm_q5_K_f32, ctx->support_simdgroup_mm); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MM_Q6_K_F32, mul_mm_q6_K_f32, ctx->support_simdgroup_mm); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MM_IQ2_XXS_F32, mul_mm_iq2_xxs_f32, ctx->support_simdgroup_mm); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MM_IQ2_XS_F32, mul_mm_iq2_xs_f32, ctx->support_simdgroup_mm); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MM_ID_F32_F32, mul_mm_id_f32_f32, ctx->support_simdgroup_mm); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MM_ID_F16_F32, mul_mm_id_f16_f32, ctx->support_simdgroup_mm); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MM_ID_Q4_0_F32, mul_mm_id_q4_0_f32, ctx->support_simdgroup_mm); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MM_ID_Q4_1_F32, mul_mm_id_q4_1_f32, ctx->support_simdgroup_mm); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MM_ID_Q5_0_F32, mul_mm_id_q5_0_f32, ctx->support_simdgroup_mm); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MM_ID_Q5_1_F32, mul_mm_id_q5_1_f32, ctx->support_simdgroup_mm); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MM_ID_Q8_0_F32, mul_mm_id_q8_0_f32, ctx->support_simdgroup_mm); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MM_ID_Q2_K_F32, mul_mm_id_q2_K_f32, ctx->support_simdgroup_mm); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MM_ID_Q3_K_F32, mul_mm_id_q3_K_f32, ctx->support_simdgroup_mm); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MM_ID_Q4_K_F32, mul_mm_id_q4_K_f32, ctx->support_simdgroup_mm); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MM_ID_Q5_K_F32, mul_mm_id_q5_K_f32, ctx->support_simdgroup_mm); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MM_ID_Q6_K_F32, mul_mm_id_q6_K_f32, ctx->support_simdgroup_mm); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MM_ID_IQ2_XXS_F32, mul_mm_id_iq2_xxs_f32, ctx->support_simdgroup_mm); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MM_ID_IQ2_XS_F32, mul_mm_id_iq2_xs_f32, ctx->support_simdgroup_mm); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_ROPE_F32, rope_f32, true); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_ROPE_F16, rope_f16, true); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_ALIBI_F32, alibi_f32, true); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_IM2COL_F16, im2col_f16, true); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_UPSCALE_F32, upscale_f32, true); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_PAD_F32, pad_f32, true); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_ARGSORT_F32_I32_ASC, argsort_f32_i32_asc, true); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_ARGSORT_F32_I32_DESC, argsort_f32_i32_desc, true); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_LEAKY_RELU_F32, leaky_relu_f32, true); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_CPY_F32_F16, cpy_f32_f16, true); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_CPY_F32_F32, cpy_f32_f32, true); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_CPY_F32_Q8_0, cpy_f32_q8_0, true); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_CPY_F32_Q4_0, cpy_f32_q4_0, true); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_CPY_F32_Q4_1, cpy_f32_q4_1, true); + //GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_CPY_F32_Q5_0, cpy_f32_q5_0, true); + //GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_CPY_F32_Q5_1, cpy_f32_q5_1, true); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_CPY_F16_F16, cpy_f16_f16, true); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_CPY_F16_F32, cpy_f16_f32, true); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_CONCAT, concat, true); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_SQR, sqr, true); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_SUM_ROWS, sum_rows, true); } return ctx; @@ -488,137 +536,21 @@ struct ggml_metal_context * ggml_metal_init(int n_cb) { void ggml_metal_free(struct ggml_metal_context * ctx) { GGML_METAL_LOG_INFO("%s: deallocating\n", __func__); -#define GGML_METAL_DEL_KERNEL(name) \ - [ctx->function_##name release]; \ - [ctx->pipeline_##name release]; - - GGML_METAL_DEL_KERNEL(add); - GGML_METAL_DEL_KERNEL(add_row); - GGML_METAL_DEL_KERNEL(mul); - GGML_METAL_DEL_KERNEL(mul_row); - GGML_METAL_DEL_KERNEL(div); - GGML_METAL_DEL_KERNEL(div_row); - GGML_METAL_DEL_KERNEL(scale); - GGML_METAL_DEL_KERNEL(scale_4); - GGML_METAL_DEL_KERNEL(tanh); - GGML_METAL_DEL_KERNEL(relu); - GGML_METAL_DEL_KERNEL(gelu); - GGML_METAL_DEL_KERNEL(gelu_quick); - GGML_METAL_DEL_KERNEL(silu); - GGML_METAL_DEL_KERNEL(soft_max); - GGML_METAL_DEL_KERNEL(soft_max_4); - GGML_METAL_DEL_KERNEL(diag_mask_inf); - GGML_METAL_DEL_KERNEL(diag_mask_inf_8); - GGML_METAL_DEL_KERNEL(get_rows_f32); - GGML_METAL_DEL_KERNEL(get_rows_f16); - GGML_METAL_DEL_KERNEL(get_rows_q4_0); - GGML_METAL_DEL_KERNEL(get_rows_q4_1); - GGML_METAL_DEL_KERNEL(get_rows_q5_0); - GGML_METAL_DEL_KERNEL(get_rows_q5_1); - GGML_METAL_DEL_KERNEL(get_rows_q8_0); - GGML_METAL_DEL_KERNEL(get_rows_q2_K); - GGML_METAL_DEL_KERNEL(get_rows_q3_K); - GGML_METAL_DEL_KERNEL(get_rows_q4_K); - GGML_METAL_DEL_KERNEL(get_rows_q5_K); - GGML_METAL_DEL_KERNEL(get_rows_q6_K); - GGML_METAL_DEL_KERNEL(get_rows_i32); - GGML_METAL_DEL_KERNEL(get_rows_iq2_xxs); - GGML_METAL_DEL_KERNEL(get_rows_iq2_xs); - GGML_METAL_DEL_KERNEL(rms_norm); - GGML_METAL_DEL_KERNEL(group_norm); - GGML_METAL_DEL_KERNEL(norm); - GGML_METAL_DEL_KERNEL(mul_mv_f32_f32); - GGML_METAL_DEL_KERNEL(mul_mv_f16_f16); - GGML_METAL_DEL_KERNEL(mul_mv_f16_f32); - GGML_METAL_DEL_KERNEL(mul_mv_f16_f32_1row); - GGML_METAL_DEL_KERNEL(mul_mv_f16_f32_l4); - GGML_METAL_DEL_KERNEL(mul_mv_q4_0_f32); - GGML_METAL_DEL_KERNEL(mul_mv_q4_1_f32); - GGML_METAL_DEL_KERNEL(mul_mv_q5_0_f32); - GGML_METAL_DEL_KERNEL(mul_mv_q5_1_f32); - GGML_METAL_DEL_KERNEL(mul_mv_q8_0_f32); - GGML_METAL_DEL_KERNEL(mul_mv_q2_K_f32); - GGML_METAL_DEL_KERNEL(mul_mv_q3_K_f32); - GGML_METAL_DEL_KERNEL(mul_mv_q4_K_f32); - GGML_METAL_DEL_KERNEL(mul_mv_q5_K_f32); - GGML_METAL_DEL_KERNEL(mul_mv_q6_K_f32); - GGML_METAL_DEL_KERNEL(mul_mv_iq2_xxs_f32); - GGML_METAL_DEL_KERNEL(mul_mv_iq2_xs_f32); - GGML_METAL_DEL_KERNEL(mul_mv_id_f32_f32); - //GGML_METAL_DEL_KERNEL(mul_mv_id_f16_f16); - GGML_METAL_DEL_KERNEL(mul_mv_id_f16_f32); - //GGML_METAL_DEL_KERNEL(mul_mv_id_f16_f32_1row); - //GGML_METAL_DEL_KERNEL(mul_mv_id_f16_f32_l4); - GGML_METAL_DEL_KERNEL(mul_mv_id_q4_0_f32); - GGML_METAL_DEL_KERNEL(mul_mv_id_q4_1_f32); - GGML_METAL_DEL_KERNEL(mul_mv_id_q5_0_f32); - GGML_METAL_DEL_KERNEL(mul_mv_id_q5_1_f32); - GGML_METAL_DEL_KERNEL(mul_mv_id_q8_0_f32); - GGML_METAL_DEL_KERNEL(mul_mv_id_q2_K_f32); - GGML_METAL_DEL_KERNEL(mul_mv_id_q3_K_f32); - GGML_METAL_DEL_KERNEL(mul_mv_id_q4_K_f32); - GGML_METAL_DEL_KERNEL(mul_mv_id_q5_K_f32); - GGML_METAL_DEL_KERNEL(mul_mv_id_q6_K_f32); - GGML_METAL_DEL_KERNEL(mul_mv_id_iq2_xxs_f32); - GGML_METAL_DEL_KERNEL(mul_mv_id_iq2_xs_f32); - if ([ctx->device supportsFamily:MTLGPUFamilyApple7]) { - GGML_METAL_DEL_KERNEL(mul_mm_f32_f32); - GGML_METAL_DEL_KERNEL(mul_mm_f16_f32); - GGML_METAL_DEL_KERNEL(mul_mm_q4_0_f32); - GGML_METAL_DEL_KERNEL(mul_mm_q4_1_f32); - GGML_METAL_DEL_KERNEL(mul_mm_q5_0_f32); - GGML_METAL_DEL_KERNEL(mul_mm_q5_1_f32); - GGML_METAL_DEL_KERNEL(mul_mm_q8_0_f32); - GGML_METAL_DEL_KERNEL(mul_mm_q2_K_f32); - GGML_METAL_DEL_KERNEL(mul_mm_q3_K_f32); - GGML_METAL_DEL_KERNEL(mul_mm_q4_K_f32); - GGML_METAL_DEL_KERNEL(mul_mm_q5_K_f32); - GGML_METAL_DEL_KERNEL(mul_mm_q6_K_f32); - GGML_METAL_DEL_KERNEL(mul_mm_iq2_xxs_f32); - GGML_METAL_DEL_KERNEL(mul_mm_iq2_xs_f32); - GGML_METAL_DEL_KERNEL(mul_mm_id_f32_f32); - GGML_METAL_DEL_KERNEL(mul_mm_id_f16_f32); - GGML_METAL_DEL_KERNEL(mul_mm_id_q4_0_f32); - GGML_METAL_DEL_KERNEL(mul_mm_id_q4_1_f32); - GGML_METAL_DEL_KERNEL(mul_mm_id_q5_0_f32); - GGML_METAL_DEL_KERNEL(mul_mm_id_q5_1_f32); - GGML_METAL_DEL_KERNEL(mul_mm_id_q8_0_f32); - GGML_METAL_DEL_KERNEL(mul_mm_id_q2_K_f32); - GGML_METAL_DEL_KERNEL(mul_mm_id_q3_K_f32); - GGML_METAL_DEL_KERNEL(mul_mm_id_q4_K_f32); - GGML_METAL_DEL_KERNEL(mul_mm_id_q5_K_f32); - GGML_METAL_DEL_KERNEL(mul_mm_id_q6_K_f32); - GGML_METAL_DEL_KERNEL(mul_mm_id_iq2_xxs_f32); - GGML_METAL_DEL_KERNEL(mul_mm_id_iq2_xs_f32); - } - GGML_METAL_DEL_KERNEL(rope_f32); - GGML_METAL_DEL_KERNEL(rope_f16); - GGML_METAL_DEL_KERNEL(alibi_f32); - GGML_METAL_DEL_KERNEL(im2col_f16); - GGML_METAL_DEL_KERNEL(upscale_f32); - GGML_METAL_DEL_KERNEL(pad_f32); - GGML_METAL_DEL_KERNEL(argsort_f32_i32_asc); - GGML_METAL_DEL_KERNEL(argsort_f32_i32_desc); - GGML_METAL_DEL_KERNEL(leaky_relu_f32); - GGML_METAL_DEL_KERNEL(cpy_f32_f16); - GGML_METAL_DEL_KERNEL(cpy_f32_f32); - GGML_METAL_DEL_KERNEL(cpy_f32_q8_0); - GGML_METAL_DEL_KERNEL(cpy_f32_q4_0); - GGML_METAL_DEL_KERNEL(cpy_f32_q4_1); - //GGML_METAL_DEL_KERNEL(cpy_f32_q5_0); - //GGML_METAL_DEL_KERNEL(cpy_f32_q5_1); - GGML_METAL_DEL_KERNEL(cpy_f16_f16); - GGML_METAL_DEL_KERNEL(cpy_f16_f32); - GGML_METAL_DEL_KERNEL(concat); - GGML_METAL_DEL_KERNEL(sqr); - GGML_METAL_DEL_KERNEL(sum_rows); - -#undef GGML_METAL_DEL_KERNEL for (int i = 0; i < ctx->n_buffers; ++i) { [ctx->buffers[i].metal release]; } + for (int i = 0; i < GGML_METAL_MAX_KERNELS; ++i) { + if (ctx->kernels[i].pipeline) { + [ctx->kernels[i].pipeline release]; + } + + if (ctx->kernels[i].function) { + [ctx->kernels[i].function release]; + } + } + [ctx->library release]; [ctx->queue release]; [ctx->device release]; @@ -930,7 +862,7 @@ void ggml_metal_graph_find_concurrency( } } -static bool ggml_metal_supports_op(const struct ggml_tensor * op) { +static bool ggml_metal_supports_op(const struct ggml_metal_context * ctx, const struct ggml_tensor * op) { switch (op->op) { case GGML_OP_UNARY: switch (ggml_get_unary_op(op)) { @@ -956,9 +888,11 @@ static bool ggml_metal_supports_op(const struct ggml_tensor * op) { case GGML_OP_SCALE: case GGML_OP_SQR: case GGML_OP_SUM_ROWS: + return true; case GGML_OP_SOFT_MAX: case GGML_OP_RMS_NORM: case GGML_OP_GROUP_NORM: + return ctx->support_simdgroup_reduction; case GGML_OP_NORM: case GGML_OP_ALIBI: case GGML_OP_ROPE: @@ -967,9 +901,10 @@ static bool ggml_metal_supports_op(const struct ggml_tensor * op) { case GGML_OP_PAD: case GGML_OP_ARGSORT: case GGML_OP_LEAKY_RELU: + return true; case GGML_OP_MUL_MAT: case GGML_OP_MUL_MAT_ID: - return true; + return ctx->support_simdgroup_reduction; case GGML_OP_CPY: case GGML_OP_DUP: case GGML_OP_CONT: @@ -1007,6 +942,7 @@ static bool ggml_metal_supports_op(const struct ggml_tensor * op) { return false; } } + bool ggml_metal_graph_compute( struct ggml_metal_context * ctx, struct ggml_cgraph * gf) { @@ -1077,7 +1013,7 @@ bool ggml_metal_graph_compute( } break; } - if (!ggml_metal_supports_op(dst)) { + if (!ggml_metal_supports_op(ctx, dst)) { GGML_METAL_LOG_ERROR("%s: error: unsupported op '%s'\n", __func__, ggml_op_desc(dst)); GGML_ASSERT(!"unsupported op"); } @@ -1143,7 +1079,9 @@ bool ggml_metal_graph_compute( { const int64_t nb = ne00; - [encoder setComputePipelineState:ctx->pipeline_concat]; + id pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_CONCAT].pipeline; + + [encoder setComputePipelineState:pipeline]; [encoder setBuffer:id_src0 offset:offs_src0 atIndex:0]; [encoder setBuffer:id_src1 offset:offs_src1 atIndex:1]; [encoder setBuffer:id_dst offset:offs_dst atIndex:2]; @@ -1197,18 +1135,18 @@ bool ggml_metal_graph_compute( nb = ne00 / 4; switch (dst->op) { - case GGML_OP_ADD: pipeline = ctx->pipeline_add_row; break; - case GGML_OP_MUL: pipeline = ctx->pipeline_mul_row; break; - case GGML_OP_DIV: pipeline = ctx->pipeline_div_row; break; + case GGML_OP_ADD: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_ADD_ROW].pipeline; break; + case GGML_OP_MUL: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_ROW].pipeline; break; + case GGML_OP_DIV: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_DIV_ROW].pipeline; break; default: GGML_ASSERT(false); } bcast_row = true; } else { switch (dst->op) { - case GGML_OP_ADD: pipeline = ctx->pipeline_add; break; - case GGML_OP_MUL: pipeline = ctx->pipeline_mul; break; - case GGML_OP_DIV: pipeline = ctx->pipeline_div; break; + case GGML_OP_ADD: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_ADD].pipeline; break; + case GGML_OP_MUL: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL].pipeline; break; + case GGML_OP_DIV: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_DIV].pipeline; break; default: GGML_ASSERT(false); } } @@ -1275,9 +1213,9 @@ bool ggml_metal_graph_compute( // not sure how to avoid this // TODO: make a simpler cpy_bytes kernel - const int nth = MIN((int) ctx->pipeline_cpy_f32_f32.maxTotalThreadsPerThreadgroup, ne00); + const id pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_CPY_F32_F32].pipeline; - [encoder setComputePipelineState:ctx->pipeline_cpy_f32_f32]; + [encoder setComputePipelineState:pipeline]; [encoder setBuffer:id_src0 offset:offs_src0 atIndex:0]; [encoder setBuffer:id_dst offset:offs_dst atIndex:1]; [encoder setBytes:&ne00 length:sizeof( int64_t) atIndex:2]; @@ -1297,10 +1235,14 @@ bool ggml_metal_graph_compute( [encoder setBytes:&nb2 length:sizeof(uint64_t) atIndex:16]; [encoder setBytes:&nb3 length:sizeof(uint64_t) atIndex:17]; + const int nth = MIN((int) pipeline.maxTotalThreadsPerThreadgroup, ne00); + [encoder dispatchThreadgroups:MTLSizeMake(ne01, ne02, ne03) threadsPerThreadgroup:MTLSizeMake(nth, 1, 1)]; } - [encoder setComputePipelineState:ctx->pipeline_add]; + const id pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_ADD].pipeline; + + [encoder setComputePipelineState:pipeline]; [encoder setBuffer:id_src0 offset:offs_src0 atIndex:0]; [encoder setBuffer:id_src1 offset:offs_src1 atIndex:1]; [encoder setBuffer:id_dst offset:offs_dst atIndex:2]; @@ -1330,7 +1272,7 @@ bool ggml_metal_graph_compute( [encoder setBytes:&pnb3 length:sizeof(pnb3) atIndex:26]; [encoder setBytes:&offs length:sizeof(offs) atIndex:27]; - const int nth = MIN((int) ctx->pipeline_add.maxTotalThreadsPerThreadgroup, ne00); + const int nth = MIN((int) pipeline.maxTotalThreadsPerThreadgroup, ne00); [encoder dispatchThreadgroups:MTLSizeMake(ne11, ne12, ne13) threadsPerThreadgroup:MTLSizeMake(nth, 1, 1)]; } break; @@ -1342,13 +1284,16 @@ bool ggml_metal_graph_compute( int64_t n = ggml_nelements(dst); + id pipeline = nil; + if (n % 4 == 0) { n /= 4; - [encoder setComputePipelineState:ctx->pipeline_scale_4]; + pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_SCALE_4].pipeline; } else { - [encoder setComputePipelineState:ctx->pipeline_scale]; + pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_SCALE].pipeline; } + [encoder setComputePipelineState:pipeline]; [encoder setBuffer:id_src0 offset:offs_src0 atIndex:0]; [encoder setBuffer:id_dst offset:offs_dst atIndex:1]; [encoder setBytes:&scale length:sizeof(scale) atIndex:2]; @@ -1359,7 +1304,9 @@ bool ggml_metal_graph_compute( switch (ggml_get_unary_op(gf->nodes[i])) { case GGML_UNARY_OP_TANH: { - [encoder setComputePipelineState:ctx->pipeline_tanh]; + id pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_TANH].pipeline; + + [encoder setComputePipelineState:pipeline]; [encoder setBuffer:id_src0 offset:offs_src0 atIndex:0]; [encoder setBuffer:id_dst offset:offs_dst atIndex:1]; @@ -1369,7 +1316,9 @@ bool ggml_metal_graph_compute( } break; case GGML_UNARY_OP_RELU: { - [encoder setComputePipelineState:ctx->pipeline_relu]; + id pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_RELU].pipeline; + + [encoder setComputePipelineState:pipeline]; [encoder setBuffer:id_src0 offset:offs_src0 atIndex:0]; [encoder setBuffer:id_dst offset:offs_dst atIndex:1]; @@ -1379,7 +1328,9 @@ bool ggml_metal_graph_compute( } break; case GGML_UNARY_OP_GELU: { - [encoder setComputePipelineState:ctx->pipeline_gelu]; + id pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_GELU].pipeline; + + [encoder setComputePipelineState:pipeline]; [encoder setBuffer:id_src0 offset:offs_src0 atIndex:0]; [encoder setBuffer:id_dst offset:offs_dst atIndex:1]; @@ -1390,7 +1341,9 @@ bool ggml_metal_graph_compute( } break; case GGML_UNARY_OP_GELU_QUICK: { - [encoder setComputePipelineState:ctx->pipeline_gelu_quick]; + id pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_GELU_QUICK].pipeline; + + [encoder setComputePipelineState:pipeline]; [encoder setBuffer:id_src0 offset:offs_src0 atIndex:0]; [encoder setBuffer:id_dst offset:offs_dst atIndex:1]; @@ -1401,7 +1354,9 @@ bool ggml_metal_graph_compute( } break; case GGML_UNARY_OP_SILU: { - [encoder setComputePipelineState:ctx->pipeline_silu]; + id pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_SILU].pipeline; + + [encoder setComputePipelineState:pipeline]; [encoder setBuffer:id_src0 offset:offs_src0 atIndex:0]; [encoder setBuffer:id_dst offset:offs_dst atIndex:1]; @@ -1420,18 +1375,23 @@ bool ggml_metal_graph_compute( { GGML_ASSERT(ggml_is_contiguous(src0)); - [encoder setComputePipelineState:ctx->pipeline_sqr]; + id pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_SQR].pipeline; + + [encoder setComputePipelineState:pipeline]; [encoder setBuffer:id_src0 offset:offs_src0 atIndex:0]; [encoder setBuffer:id_dst offset:offs_dst atIndex:1]; const int64_t n = ggml_nelements(dst); + [encoder dispatchThreadgroups:MTLSizeMake(n, 1, 1) threadsPerThreadgroup:MTLSizeMake(1, 1, 1)]; } break; case GGML_OP_SUM_ROWS: { GGML_ASSERT(src0->nb[0] == ggml_type_size(src0->type)); - [encoder setComputePipelineState:ctx->pipeline_sum_rows]; + id pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_SUM_ROWS].pipeline; + + [encoder setComputePipelineState:pipeline]; [encoder setBuffer:id_src0 offset:offs_src0 atIndex:0]; [encoder setBuffer:id_dst offset:offs_dst atIndex:1]; [encoder setBytes:&ne00 length:sizeof(ne00) atIndex:2]; @@ -1465,20 +1425,23 @@ bool ggml_metal_graph_compute( { int nth = 32; // SIMD width + id pipeline = nil; + if (ne00%4 == 0) { while (nth < ne00/4 && nth < 256) { nth *= 2; } - [encoder setComputePipelineState:ctx->pipeline_soft_max_4]; + pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_SOFT_MAX_4].pipeline; } else { while (nth < ne00 && nth < 1024) { nth *= 2; } - [encoder setComputePipelineState:ctx->pipeline_soft_max]; + pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_SOFT_MAX].pipeline; } const float scale = ((float *) dst->op_params)[0]; + [encoder setComputePipelineState:pipeline]; [encoder setBuffer:id_src0 offset:offs_src0 atIndex:0]; if (id_src1) { [encoder setBuffer:id_src1 offset:offs_src1 atIndex:1]; @@ -1498,11 +1461,15 @@ bool ggml_metal_graph_compute( { const int n_past = ((int32_t *)(dst->op_params))[0]; + id pipeline = nil; + if (ne00%8 == 0) { - [encoder setComputePipelineState:ctx->pipeline_diag_mask_inf_8]; + pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_DIAG_MASK_INF_8].pipeline; } else { - [encoder setComputePipelineState:ctx->pipeline_diag_mask_inf]; + pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_DIAG_MASK_INF].pipeline; } + + [encoder setComputePipelineState:pipeline]; [encoder setBuffer:id_src0 offset:offs_src0 atIndex:0]; [encoder setBuffer:id_dst offset:offs_dst atIndex:1]; [encoder setBytes:&ne00 length:sizeof(ne00) atIndex:2]; @@ -1562,23 +1529,28 @@ bool ggml_metal_graph_compute( ne00 % 32 == 0 && ne00 >= 64 && (ne11 > ne11_mm_min || (ggml_is_quantized(src0t) && ne12 > 1))) { //printf("matrix: ne00 = %6d, ne01 = %6d, ne02 = %6d, ne11 = %6d, ne12 = %6d\n", ne00, ne01, ne02, ne11, ne12); + + id pipeline = nil; + switch (src0->type) { - case GGML_TYPE_F32: [encoder setComputePipelineState:ctx->pipeline_mul_mm_f32_f32]; break; - case GGML_TYPE_F16: [encoder setComputePipelineState:ctx->pipeline_mul_mm_f16_f32]; break; - case GGML_TYPE_Q4_0: [encoder setComputePipelineState:ctx->pipeline_mul_mm_q4_0_f32]; break; - case GGML_TYPE_Q4_1: [encoder setComputePipelineState:ctx->pipeline_mul_mm_q4_1_f32]; break; - case GGML_TYPE_Q5_0: [encoder setComputePipelineState:ctx->pipeline_mul_mm_q5_0_f32]; break; - case GGML_TYPE_Q5_1: [encoder setComputePipelineState:ctx->pipeline_mul_mm_q5_1_f32]; break; - case GGML_TYPE_Q8_0: [encoder setComputePipelineState:ctx->pipeline_mul_mm_q8_0_f32]; break; - case GGML_TYPE_Q2_K: [encoder setComputePipelineState:ctx->pipeline_mul_mm_q2_K_f32]; break; - case GGML_TYPE_Q3_K: [encoder setComputePipelineState:ctx->pipeline_mul_mm_q3_K_f32]; break; - case GGML_TYPE_Q4_K: [encoder setComputePipelineState:ctx->pipeline_mul_mm_q4_K_f32]; break; - case GGML_TYPE_Q5_K: [encoder setComputePipelineState:ctx->pipeline_mul_mm_q5_K_f32]; break; - case GGML_TYPE_Q6_K: [encoder setComputePipelineState:ctx->pipeline_mul_mm_q6_K_f32]; break; - case GGML_TYPE_IQ2_XXS: [encoder setComputePipelineState:ctx->pipeline_mul_mm_iq2_xxs_f32]; break; - case GGML_TYPE_IQ2_XS : [encoder setComputePipelineState:ctx->pipeline_mul_mm_iq2_xs_f32]; break; + case GGML_TYPE_F32: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MM_F32_F32 ].pipeline; break; + case GGML_TYPE_F16: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MM_F16_F32 ].pipeline; break; + case GGML_TYPE_Q4_0: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MM_Q4_0_F32 ].pipeline; break; + case GGML_TYPE_Q4_1: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MM_Q4_1_F32 ].pipeline; break; + case GGML_TYPE_Q5_0: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MM_Q5_0_F32 ].pipeline; break; + case GGML_TYPE_Q5_1: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MM_Q5_1_F32 ].pipeline; break; + case GGML_TYPE_Q8_0: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MM_Q8_0_F32 ].pipeline; break; + case GGML_TYPE_Q2_K: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MM_Q2_K_F32 ].pipeline; break; + case GGML_TYPE_Q3_K: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MM_Q3_K_F32 ].pipeline; break; + case GGML_TYPE_Q4_K: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MM_Q4_K_F32 ].pipeline; break; + case GGML_TYPE_Q5_K: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MM_Q5_K_F32 ].pipeline; break; + case GGML_TYPE_Q6_K: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MM_Q6_K_F32 ].pipeline; break; + case GGML_TYPE_IQ2_XXS: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MM_IQ2_XXS_F32].pipeline; break; + case GGML_TYPE_IQ2_XS: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MM_IQ2_XS_F32 ].pipeline; break; default: GGML_ASSERT(false && "MUL MAT-MAT not implemented"); } + + [encoder setComputePipelineState:pipeline]; [encoder setBuffer:id_src0 offset:offs_src0 atIndex:0]; [encoder setBuffer:id_src1 offset:offs_src1 atIndex:1]; [encoder setBuffer:id_dst offset:offs_dst atIndex:2]; @@ -1602,12 +1574,14 @@ bool ggml_metal_graph_compute( int nrows = 1; //printf("vector: ne00 = %6d, ne01 = %6d, ne02 = %6d, ne11 = %6d, ne12 = %6d\n", ne00, ne01, ne02, ne11, ne12); + id pipeline = nil; + // use custom matrix x vector kernel switch (src0t) { case GGML_TYPE_F32: { GGML_ASSERT(src1t == GGML_TYPE_F32); - [encoder setComputePipelineState:ctx->pipeline_mul_mv_f32_f32]; + pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MV_F32_F32].pipeline; nrows = 4; } break; case GGML_TYPE_F16: @@ -1616,16 +1590,16 @@ bool ggml_metal_graph_compute( nth1 = 1; if (src1t == GGML_TYPE_F32) { if (ne11 * ne12 < 4) { - [encoder setComputePipelineState:ctx->pipeline_mul_mv_f16_f32_1row]; + pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MV_F16_F32_1ROW].pipeline; } else if (ne00 >= 128 && ne01 >= 8 && ne00%4 == 0) { - [encoder setComputePipelineState:ctx->pipeline_mul_mv_f16_f32_l4]; + pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MV_F16_F32_L4].pipeline; nrows = ne11; } else { - [encoder setComputePipelineState:ctx->pipeline_mul_mv_f16_f32]; + pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MV_F16_F32].pipeline; nrows = 4; } } else { - [encoder setComputePipelineState:ctx->pipeline_mul_mv_f16_f16]; + pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MV_F16_F16].pipeline; nrows = 4; } } break; @@ -1633,73 +1607,73 @@ bool ggml_metal_graph_compute( { nth0 = 8; nth1 = 8; - [encoder setComputePipelineState:ctx->pipeline_mul_mv_q4_0_f32]; + pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MV_Q4_0_F32].pipeline; } break; case GGML_TYPE_Q4_1: { nth0 = 8; nth1 = 8; - [encoder setComputePipelineState:ctx->pipeline_mul_mv_q4_1_f32]; + pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MV_Q4_1_F32].pipeline; } break; case GGML_TYPE_Q5_0: { nth0 = 8; nth1 = 8; - [encoder setComputePipelineState:ctx->pipeline_mul_mv_q5_0_f32]; + pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MV_Q5_0_F32].pipeline; } break; case GGML_TYPE_Q5_1: { nth0 = 8; nth1 = 8; - [encoder setComputePipelineState:ctx->pipeline_mul_mv_q5_1_f32]; + pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MV_Q5_1_F32].pipeline; } break; case GGML_TYPE_Q8_0: { nth0 = 8; nth1 = 8; - [encoder setComputePipelineState:ctx->pipeline_mul_mv_q8_0_f32]; + pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MV_Q8_0_F32].pipeline; } break; case GGML_TYPE_Q2_K: { nth0 = 2; nth1 = 32; - [encoder setComputePipelineState:ctx->pipeline_mul_mv_q2_K_f32]; + pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MV_Q2_K_F32].pipeline; } break; case GGML_TYPE_Q3_K: { nth0 = 2; nth1 = 32; - [encoder setComputePipelineState:ctx->pipeline_mul_mv_q3_K_f32]; + pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MV_Q3_K_F32].pipeline; } break; case GGML_TYPE_Q4_K: { nth0 = 4; //1; nth1 = 8; //32; - [encoder setComputePipelineState:ctx->pipeline_mul_mv_q4_K_f32]; + pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MV_Q4_K_F32].pipeline; } break; case GGML_TYPE_Q5_K: { nth0 = 2; nth1 = 32; - [encoder setComputePipelineState:ctx->pipeline_mul_mv_q5_K_f32]; + pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MV_Q5_K_F32].pipeline; } break; case GGML_TYPE_Q6_K: { nth0 = 2; nth1 = 32; - [encoder setComputePipelineState:ctx->pipeline_mul_mv_q6_K_f32]; + pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MV_Q6_K_F32].pipeline; } break; case GGML_TYPE_IQ2_XXS: { nth0 = 4; nth1 = 16; - [encoder setComputePipelineState:ctx->pipeline_mul_mv_iq2_xxs_f32]; + pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MV_IQ2_XXS_F32].pipeline; } break; case GGML_TYPE_IQ2_XS: { nth0 = 4; nth1 = 16; - [encoder setComputePipelineState:ctx->pipeline_mul_mv_iq2_xs_f32]; + pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MV_IQ2_XS_F32].pipeline; } break; default: { @@ -1712,6 +1686,7 @@ bool ggml_metal_graph_compute( GGML_ASSERT(ne00 >= nth0*nth1); } + [encoder setComputePipelineState:pipeline]; [encoder setBuffer:id_src0 offset:offs_src0 atIndex:0]; [encoder setBuffer:id_src1 offset:offs_src1 atIndex:1]; [encoder setBuffer:id_dst offset:offs_dst atIndex:2]; @@ -1818,23 +1793,28 @@ bool ggml_metal_graph_compute( if ([ctx->device supportsFamily:MTLGPUFamilyApple7] && ne20 % 32 == 0 && ne20 >= 64 && ne11 > ne11_mm_min) { + + id pipeline = nil; + switch (src2->type) { - case GGML_TYPE_F32: [encoder setComputePipelineState:ctx->pipeline_mul_mm_id_f32_f32]; break; - case GGML_TYPE_F16: [encoder setComputePipelineState:ctx->pipeline_mul_mm_id_f16_f32]; break; - case GGML_TYPE_Q4_0: [encoder setComputePipelineState:ctx->pipeline_mul_mm_id_q4_0_f32]; break; - case GGML_TYPE_Q4_1: [encoder setComputePipelineState:ctx->pipeline_mul_mm_id_q4_1_f32]; break; - case GGML_TYPE_Q5_0: [encoder setComputePipelineState:ctx->pipeline_mul_mm_id_q5_0_f32]; break; - case GGML_TYPE_Q5_1: [encoder setComputePipelineState:ctx->pipeline_mul_mm_id_q5_1_f32]; break; - case GGML_TYPE_Q8_0: [encoder setComputePipelineState:ctx->pipeline_mul_mm_id_q8_0_f32]; break; - case GGML_TYPE_Q2_K: [encoder setComputePipelineState:ctx->pipeline_mul_mm_id_q2_K_f32]; break; - case GGML_TYPE_Q3_K: [encoder setComputePipelineState:ctx->pipeline_mul_mm_id_q3_K_f32]; break; - case GGML_TYPE_Q4_K: [encoder setComputePipelineState:ctx->pipeline_mul_mm_id_q4_K_f32]; break; - case GGML_TYPE_Q5_K: [encoder setComputePipelineState:ctx->pipeline_mul_mm_id_q5_K_f32]; break; - case GGML_TYPE_Q6_K: [encoder setComputePipelineState:ctx->pipeline_mul_mm_id_q6_K_f32]; break; - case GGML_TYPE_IQ2_XXS: [encoder setComputePipelineState:ctx->pipeline_mul_mm_id_iq2_xxs_f32]; break; - case GGML_TYPE_IQ2_XS : [encoder setComputePipelineState:ctx->pipeline_mul_mm_id_iq2_xs_f32]; break; + case GGML_TYPE_F32: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MM_ID_F32_F32 ].pipeline; break; + case GGML_TYPE_F16: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MM_ID_F16_F32 ].pipeline; break; + case GGML_TYPE_Q4_0: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MM_ID_Q4_0_F32 ].pipeline; break; + case GGML_TYPE_Q4_1: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MM_ID_Q4_1_F32 ].pipeline; break; + case GGML_TYPE_Q5_0: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MM_ID_Q5_0_F32 ].pipeline; break; + case GGML_TYPE_Q5_1: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MM_ID_Q5_1_F32 ].pipeline; break; + case GGML_TYPE_Q8_0: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MM_ID_Q8_0_F32 ].pipeline; break; + case GGML_TYPE_Q2_K: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MM_ID_Q2_K_F32 ].pipeline; break; + case GGML_TYPE_Q3_K: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MM_ID_Q3_K_F32 ].pipeline; break; + case GGML_TYPE_Q4_K: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MM_ID_Q4_K_F32 ].pipeline; break; + case GGML_TYPE_Q5_K: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MM_ID_Q5_K_F32 ].pipeline; break; + case GGML_TYPE_Q6_K: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MM_ID_Q6_K_F32 ].pipeline; break; + case GGML_TYPE_IQ2_XXS: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MM_ID_IQ2_XXS_F32].pipeline; break; + case GGML_TYPE_IQ2_XS: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MM_ID_IQ2_XS_F32 ].pipeline; break; default: GGML_ASSERT(false && "MUL_MAT_ID not implemented"); } + + [encoder setComputePipelineState:pipeline]; [encoder setBuffer:id_src0 offset:offs_src0 atIndex:0]; [encoder setBuffer:id_src1 offset:offs_src1 atIndex:1]; [encoder setBuffer:id_dst offset:offs_dst atIndex:2]; @@ -1874,91 +1854,93 @@ bool ggml_metal_graph_compute( int nrows = 1; //printf("vector: ne00 = %6d, ne01 = %6d, ne02 = %6d, ne11 = %6d, ne12 = %6d\n", ne00, ne01, ne02, ne11, ne12); + id pipeline = nil; + // use custom matrix x vector kernel switch (src2t) { case GGML_TYPE_F32: { GGML_ASSERT(src1t == GGML_TYPE_F32); - [encoder setComputePipelineState:ctx->pipeline_mul_mv_id_f32_f32]; + pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MV_ID_F32_F32].pipeline; } break; case GGML_TYPE_F16: { GGML_ASSERT(src1t == GGML_TYPE_F32); nth0 = 32; nth1 = 1; - [encoder setComputePipelineState:ctx->pipeline_mul_mv_id_f16_f32]; + pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MV_ID_F16_F32].pipeline; } break; case GGML_TYPE_Q4_0: { nth0 = 8; nth1 = 8; - [encoder setComputePipelineState:ctx->pipeline_mul_mv_id_q4_0_f32]; + pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MV_ID_Q4_0_F32].pipeline; } break; case GGML_TYPE_Q4_1: { nth0 = 8; nth1 = 8; - [encoder setComputePipelineState:ctx->pipeline_mul_mv_id_q4_1_f32]; + pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MV_ID_Q4_1_F32].pipeline; } break; case GGML_TYPE_Q5_0: { nth0 = 8; nth1 = 8; - [encoder setComputePipelineState:ctx->pipeline_mul_mv_id_q5_0_f32]; + pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MV_ID_Q5_0_F32].pipeline; } break; case GGML_TYPE_Q5_1: { nth0 = 8; nth1 = 8; - [encoder setComputePipelineState:ctx->pipeline_mul_mv_id_q5_1_f32]; + pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MV_ID_Q5_1_F32].pipeline; } break; case GGML_TYPE_Q8_0: { nth0 = 8; nth1 = 8; - [encoder setComputePipelineState:ctx->pipeline_mul_mv_id_q8_0_f32]; + pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MV_ID_Q8_0_F32].pipeline; } break; case GGML_TYPE_Q2_K: { nth0 = 2; nth1 = 32; - [encoder setComputePipelineState:ctx->pipeline_mul_mv_id_q2_K_f32]; + pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MV_ID_Q2_K_F32].pipeline; } break; case GGML_TYPE_Q3_K: { nth0 = 2; nth1 = 32; - [encoder setComputePipelineState:ctx->pipeline_mul_mv_id_q3_K_f32]; + pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MV_ID_Q3_K_F32].pipeline; } break; case GGML_TYPE_Q4_K: { nth0 = 4; //1; nth1 = 8; //32; - [encoder setComputePipelineState:ctx->pipeline_mul_mv_id_q4_K_f32]; + pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MV_ID_Q4_K_F32].pipeline; } break; case GGML_TYPE_Q5_K: { nth0 = 2; nth1 = 32; - [encoder setComputePipelineState:ctx->pipeline_mul_mv_id_q5_K_f32]; + pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MV_ID_Q5_K_F32].pipeline; } break; case GGML_TYPE_Q6_K: { nth0 = 2; nth1 = 32; - [encoder setComputePipelineState:ctx->pipeline_mul_mv_id_q6_K_f32]; + pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MV_ID_Q6_K_F32].pipeline; } break; case GGML_TYPE_IQ2_XXS: { nth0 = 4; nth1 = 16; - [encoder setComputePipelineState:ctx->pipeline_mul_mv_id_iq2_xxs_f32]; + pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MV_ID_IQ2_XXS_F32].pipeline; } break; case GGML_TYPE_IQ2_XS: { nth0 = 4; nth1 = 16; - [encoder setComputePipelineState:ctx->pipeline_mul_mv_id_iq2_xs_f32]; + pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MV_ID_IQ2_XS_F32].pipeline; } break; default: { @@ -1973,6 +1955,7 @@ bool ggml_metal_graph_compute( const int64_t _ne1 = 1; // kernels needs a reference in constant memory + [encoder setComputePipelineState:pipeline]; [encoder setBuffer:id_src0 offset:offs_src0 atIndex:0]; [encoder setBuffer:id_src1 offset:offs_src1 atIndex:1]; [encoder setBuffer:id_dst offset:offs_dst atIndex:2]; @@ -2040,25 +2023,28 @@ bool ggml_metal_graph_compute( } break; case GGML_OP_GET_ROWS: { + id pipeline = nil; + switch (src0->type) { - case GGML_TYPE_F32: [encoder setComputePipelineState:ctx->pipeline_get_rows_f32]; break; - case GGML_TYPE_F16: [encoder setComputePipelineState:ctx->pipeline_get_rows_f16]; break; - case GGML_TYPE_Q4_0: [encoder setComputePipelineState:ctx->pipeline_get_rows_q4_0]; break; - case GGML_TYPE_Q4_1: [encoder setComputePipelineState:ctx->pipeline_get_rows_q4_1]; break; - case GGML_TYPE_Q5_0: [encoder setComputePipelineState:ctx->pipeline_get_rows_q5_0]; break; - case GGML_TYPE_Q5_1: [encoder setComputePipelineState:ctx->pipeline_get_rows_q5_1]; break; - case GGML_TYPE_Q8_0: [encoder setComputePipelineState:ctx->pipeline_get_rows_q8_0]; break; - case GGML_TYPE_Q2_K: [encoder setComputePipelineState:ctx->pipeline_get_rows_q2_K]; break; - case GGML_TYPE_Q3_K: [encoder setComputePipelineState:ctx->pipeline_get_rows_q3_K]; break; - case GGML_TYPE_Q4_K: [encoder setComputePipelineState:ctx->pipeline_get_rows_q4_K]; break; - case GGML_TYPE_Q5_K: [encoder setComputePipelineState:ctx->pipeline_get_rows_q5_K]; break; - case GGML_TYPE_Q6_K: [encoder setComputePipelineState:ctx->pipeline_get_rows_q6_K]; break; - case GGML_TYPE_I32: [encoder setComputePipelineState:ctx->pipeline_get_rows_i32]; break; - case GGML_TYPE_IQ2_XXS: [encoder setComputePipelineState:ctx->pipeline_get_rows_iq2_xxs]; break; - case GGML_TYPE_IQ2_XS : [encoder setComputePipelineState:ctx->pipeline_get_rows_iq2_xs]; break; + case GGML_TYPE_F32: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_GET_ROWS_F32 ].pipeline; break; + case GGML_TYPE_F16: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_GET_ROWS_F16 ].pipeline; break; + case GGML_TYPE_Q4_0: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_GET_ROWS_Q4_0 ].pipeline; break; + case GGML_TYPE_Q4_1: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_GET_ROWS_Q4_1 ].pipeline; break; + case GGML_TYPE_Q5_0: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_GET_ROWS_Q5_0 ].pipeline; break; + case GGML_TYPE_Q5_1: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_GET_ROWS_Q5_1 ].pipeline; break; + case GGML_TYPE_Q8_0: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_GET_ROWS_Q8_0 ].pipeline; break; + case GGML_TYPE_Q2_K: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_GET_ROWS_Q2_K ].pipeline; break; + case GGML_TYPE_Q3_K: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_GET_ROWS_Q3_K ].pipeline; break; + case GGML_TYPE_Q4_K: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_GET_ROWS_Q4_K ].pipeline; break; + case GGML_TYPE_Q5_K: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_GET_ROWS_Q5_K ].pipeline; break; + case GGML_TYPE_Q6_K: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_GET_ROWS_Q6_K ].pipeline; break; + case GGML_TYPE_IQ2_XXS: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_GET_ROWS_IQ2_XXS].pipeline; break; + case GGML_TYPE_IQ2_XS: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_GET_ROWS_IQ2_XS ].pipeline; break; + case GGML_TYPE_I32: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_GET_ROWS_I32 ].pipeline; break; default: GGML_ASSERT(false && "not implemented"); } + [encoder setComputePipelineState:pipeline]; [encoder setBuffer:id_src0 offset:offs_src0 atIndex:0]; [encoder setBuffer:id_src1 offset:offs_src1 atIndex:1]; [encoder setBuffer:id_dst offset:offs_dst atIndex:2]; @@ -2086,7 +2072,9 @@ bool ggml_metal_graph_compute( nth *= 2; } - [encoder setComputePipelineState:ctx->pipeline_rms_norm]; + id pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_RMS_NORM].pipeline; + + [encoder setComputePipelineState:pipeline]; [encoder setBuffer:id_src0 offset:offs_src0 atIndex:0]; [encoder setBuffer:id_dst offset:offs_dst atIndex:1]; [encoder setBytes:&ne00 length:sizeof( int64_t) atIndex:2]; @@ -2115,7 +2103,9 @@ bool ggml_metal_graph_compute( // nth *= 2; //} - [encoder setComputePipelineState:ctx->pipeline_group_norm]; + id pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_GROUP_NORM].pipeline; + + [encoder setComputePipelineState:pipeline]; [encoder setBuffer:id_src0 offset:offs_src0 atIndex:0]; [encoder setBuffer:id_dst offset:offs_dst atIndex:1]; [encoder setBytes:&ne00 length:sizeof( int64_t) atIndex:2]; @@ -2137,7 +2127,9 @@ bool ggml_metal_graph_compute( const int nth = MIN(256, ne00); - [encoder setComputePipelineState:ctx->pipeline_norm]; + id pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_NORM].pipeline; + + [encoder setComputePipelineState:pipeline]; [encoder setBuffer:id_src0 offset:offs_src0 atIndex:0]; [encoder setBuffer:id_dst offset:offs_dst atIndex:1]; [encoder setBytes:&ne00 length:sizeof( int64_t) atIndex:2]; @@ -2164,7 +2156,9 @@ bool ggml_metal_graph_compute( const float m0 = powf(2.0f, -(max_bias) / n_heads_log2_floor); const float m1 = powf(2.0f, -(max_bias / 2.0f) / n_heads_log2_floor); - [encoder setComputePipelineState:ctx->pipeline_alibi_f32]; + id pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_ALIBI_F32].pipeline; + + [encoder setComputePipelineState:pipeline]; [encoder setBuffer:id_src0 offset:offs_src0 atIndex:0]; [encoder setBuffer:id_dst offset:offs_dst atIndex:1]; [encoder setBytes:&ne00 length:sizeof( int64_t) atIndex:2]; @@ -2209,12 +2203,15 @@ bool ggml_metal_graph_compute( memcpy(&beta_fast, (int32_t *) dst->op_params + 9, sizeof(float)); memcpy(&beta_slow, (int32_t *) dst->op_params + 10, sizeof(float)); + id pipeline = nil; + switch (src0->type) { - case GGML_TYPE_F32: [encoder setComputePipelineState:ctx->pipeline_rope_f32]; break; - case GGML_TYPE_F16: [encoder setComputePipelineState:ctx->pipeline_rope_f16]; break; + case GGML_TYPE_F32: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_ROPE_F32].pipeline; break; + case GGML_TYPE_F16: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_ROPE_F16].pipeline; break; default: GGML_ASSERT(false); }; + [encoder setComputePipelineState:pipeline]; [encoder setBuffer:id_src0 offset:offs_src0 atIndex:0]; [encoder setBuffer:id_src1 offset:offs_src1 atIndex:1]; [encoder setBuffer:id_dst offset:offs_dst atIndex:2]; @@ -2277,12 +2274,15 @@ bool ggml_metal_graph_compute( const int32_t ofs0 = src1->nb[is_2D ? 3 : 2] / 4; const int32_t ofs1 = src1->nb[is_2D ? 2 : 1] / 4; + id pipeline = nil; + switch (src0->type) { case GGML_TYPE_F32: GGML_ASSERT(false && "not implemented"); break; - case GGML_TYPE_F16: [encoder setComputePipelineState:ctx->pipeline_im2col_f16]; break; + case GGML_TYPE_F16: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_IM2COL_F16].pipeline; break; default: GGML_ASSERT(false); }; + [encoder setComputePipelineState:pipeline]; [encoder setBuffer:id_src1 offset:offs_src1 atIndex:0]; [encoder setBuffer:id_dst offset:offs_dst atIndex:1]; [encoder setBytes:&ofs0 length:sizeof( int32_t) atIndex:2]; @@ -2305,7 +2305,9 @@ bool ggml_metal_graph_compute( const int sf = dst->op_params[0]; - [encoder setComputePipelineState:ctx->pipeline_upscale_f32]; + const id pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_UPSCALE_F32].pipeline; + + [encoder setComputePipelineState:pipeline]; [encoder setBuffer:id_src0 offset:offs_src0 atIndex:0]; [encoder setBuffer:id_dst offset:offs_dst atIndex:1]; [encoder setBytes:&ne00 length:sizeof(ne00) atIndex:2]; @@ -2326,7 +2328,7 @@ bool ggml_metal_graph_compute( [encoder setBytes:&nb3 length:sizeof(nb3) atIndex:17]; [encoder setBytes:&sf length:sizeof(sf) atIndex:18]; - const int nth = MIN((int) ctx->pipeline_upscale_f32.maxTotalThreadsPerThreadgroup, ne0); + const int nth = MIN((int) pipeline.maxTotalThreadsPerThreadgroup, ne0); [encoder dispatchThreadgroups:MTLSizeMake(ne1, ne2, ne3) threadsPerThreadgroup:MTLSizeMake(nth, 1, 1)]; } break; @@ -2334,7 +2336,9 @@ bool ggml_metal_graph_compute( { GGML_ASSERT(src0->type == GGML_TYPE_F32); - [encoder setComputePipelineState:ctx->pipeline_pad_f32]; + id pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_PAD_F32].pipeline; + + [encoder setComputePipelineState:pipeline]; [encoder setBuffer:id_src0 offset:offs_src0 atIndex:0]; [encoder setBuffer:id_dst offset:offs_dst atIndex:1]; [encoder setBytes:&ne00 length:sizeof(ne00) atIndex:2]; @@ -2367,12 +2371,15 @@ bool ggml_metal_graph_compute( enum ggml_sort_order order = (enum ggml_sort_order) dst->op_params[0]; + id pipeline = nil; + switch (order) { - case GGML_SORT_ASC: [encoder setComputePipelineState:ctx->pipeline_argsort_f32_i32_asc]; break; - case GGML_SORT_DESC: [encoder setComputePipelineState:ctx->pipeline_argsort_f32_i32_desc]; break; + case GGML_SORT_ASC: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_ARGSORT_F32_I32_ASC].pipeline; break; + case GGML_SORT_DESC: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_ARGSORT_F32_I32_DESC].pipeline; break; default: GGML_ASSERT(false); }; + [encoder setComputePipelineState:pipeline]; [encoder setBuffer:id_src0 offset:offs_src0 atIndex:0]; [encoder setBuffer:id_dst offset:offs_dst atIndex:1]; [encoder setBytes:&ne00 length:sizeof( int64_t) atIndex:2]; @@ -2386,7 +2393,9 @@ bool ggml_metal_graph_compute( float slope; memcpy(&slope, dst->op_params, sizeof(float)); - [encoder setComputePipelineState:ctx->pipeline_leaky_relu_f32]; + id pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_LEAKY_RELU_F32].pipeline; + + [encoder setComputePipelineState:pipeline]; [encoder setBuffer:id_src0 offset:offs_src0 atIndex:0]; [encoder setBuffer:id_dst offset:offs_dst atIndex:1]; [encoder setBytes:&slope length:sizeof(slope) atIndex:2]; @@ -2403,33 +2412,36 @@ bool ggml_metal_graph_compute( int nth = MIN(1024, ne00/ggml_blck_size(src0->type)); + id pipeline = nil; + switch (src0t) { case GGML_TYPE_F32: { GGML_ASSERT(ne0 % ggml_blck_size(dst->type) == 0); switch (dstt) { - case GGML_TYPE_F16: [encoder setComputePipelineState:ctx->pipeline_cpy_f32_f16]; break; - case GGML_TYPE_F32: [encoder setComputePipelineState:ctx->pipeline_cpy_f32_f32]; break; - case GGML_TYPE_Q8_0: [encoder setComputePipelineState:ctx->pipeline_cpy_f32_q8_0]; break; - case GGML_TYPE_Q4_0: [encoder setComputePipelineState:ctx->pipeline_cpy_f32_q4_0]; break; - case GGML_TYPE_Q4_1: [encoder setComputePipelineState:ctx->pipeline_cpy_f32_q4_1]; break; - //case GGML_TYPE_Q5_0: [encoder setComputePipelineState:ctx->pipeline_cpy_f32_q5_0]; break; - //case GGML_TYPE_Q5_1: [encoder setComputePipelineState:ctx->pipeline_cpy_f32_q5_1]; break; + case GGML_TYPE_F16: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_CPY_F32_F16].pipeline; break; + case GGML_TYPE_F32: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_CPY_F32_F32].pipeline; break; + case GGML_TYPE_Q8_0: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_CPY_F32_Q8_0].pipeline; break; + case GGML_TYPE_Q4_0: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_CPY_F32_Q4_0].pipeline; break; + case GGML_TYPE_Q4_1: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_CPY_F32_Q4_1].pipeline; break; + //case GGML_TYPE_Q5_0: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_CPY_F32_Q5_0].pipeline; break; + //case GGML_TYPE_Q5_1: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_CPY_F32_Q5_1].pipeline; break; default: GGML_ASSERT(false && "not implemented"); }; } break; case GGML_TYPE_F16: { switch (dstt) { - case GGML_TYPE_F16: [encoder setComputePipelineState:ctx->pipeline_cpy_f16_f16]; break; - case GGML_TYPE_F32: [encoder setComputePipelineState:ctx->pipeline_cpy_f16_f32]; break; + case GGML_TYPE_F16: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_CPY_F16_F16].pipeline; break; + case GGML_TYPE_F32: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_CPY_F16_F32].pipeline; break; default: GGML_ASSERT(false && "not implemented"); }; } break; default: GGML_ASSERT(false && "not implemented"); } + [encoder setComputePipelineState:pipeline]; [encoder setBuffer:id_src0 offset:offs_src0 atIndex:0]; [encoder setBuffer:id_dst offset:offs_dst atIndex:1]; [encoder setBytes:&ne00 length:sizeof( int64_t) atIndex:2]; @@ -2794,9 +2806,9 @@ static bool ggml_backend_metal_graph_compute(ggml_backend_t backend, struct ggml } static bool ggml_backend_metal_supports_op(ggml_backend_t backend, const struct ggml_tensor * op) { - return ggml_metal_supports_op(op); + struct ggml_metal_context * metal_ctx = (struct ggml_metal_context *)backend->context; - UNUSED(backend); + return ggml_metal_supports_op(metal_ctx, op); } static struct ggml_backend_i ggml_backend_metal_i = { From c30b1ef39aeba497a943416d2897d69fee055b96 Mon Sep 17 00:00:00 2001 From: texmex76 <40733439+texmex76@users.noreply.github.com> Date: Sat, 13 Jan 2024 17:06:20 +0100 Subject: [PATCH 299/811] gguf : fix potential infinite for-loop (#4600) Co-authored-by: Bernhard Gstrein --- ggml.c | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/ggml.c b/ggml.c index 6dbd7626c..de6ef34bd 100644 --- a/ggml.c +++ b/ggml.c @@ -19184,7 +19184,7 @@ void gguf_free(struct gguf_context * ctx) { if (ctx->kv) { // free string memory - not great.. - for (uint32_t i = 0; i < ctx->header.n_kv; ++i) { + for (uint64_t i = 0; i < ctx->header.n_kv; ++i) { struct gguf_kv * kv = &ctx->kv[i]; if (kv->key.data) { @@ -19200,7 +19200,7 @@ void gguf_free(struct gguf_context * ctx) { if (kv->type == GGUF_TYPE_ARRAY) { if (kv->value.arr.data) { if (kv->value.arr.type == GGUF_TYPE_STRING) { - for (uint32_t j = 0; j < kv->value.arr.n; ++j) { + for (uint64_t j = 0; j < kv->value.arr.n; ++j) { struct gguf_str * str = &((struct gguf_str *) kv->value.arr.data)[j]; if (str->data) { free(str->data); @@ -19216,7 +19216,7 @@ void gguf_free(struct gguf_context * ctx) { } if (ctx->infos) { - for (uint32_t i = 0; i < ctx->header.n_tensors; ++i) { + for (uint64_t i = 0; i < ctx->header.n_tensors; ++i) { struct gguf_tensor_info * info = &ctx->infos[i]; if (info->name.data) { From 722d33f34ec74c6f7046109f936d0928ffe171bc Mon Sep 17 00:00:00 2001 From: Yann Follet <131855179+YannFollet@users.noreply.github.com> Date: Sun, 14 Jan 2024 00:09:08 +0800 Subject: [PATCH 300/811] main : add parameter --no-display-prompt (#4541) * add the parameter : --no-display-prompt , combine with --log-disable it will display only the generated tokens * remove empty line --------- Co-authored-by: Georgi Gerganov --- common/common.cpp | 6 +++++- common/common.h | 1 + examples/main/main.cpp | 7 ++++++- 3 files changed, 12 insertions(+), 2 deletions(-) diff --git a/common/common.cpp b/common/common.cpp index 322b9f91e..c11006bcb 100644 --- a/common/common.cpp +++ b/common/common.cpp @@ -617,6 +617,8 @@ bool gpt_params_parse_ex(int argc, char ** argv, gpt_params & params) { params.numa = true; } else if (arg == "--verbose-prompt") { params.verbose_prompt = true; + } else if (arg == "--no-display-prompt") { + params.display_prompt = false; } else if (arg == "-r" || arg == "--reverse-prompt") { if (++i >= argc) { invalid_param = true; @@ -936,11 +938,12 @@ void gpt_print_usage(int /*argc*/, char ** argv, const gpt_params & params) { printf(" -mg i, --main-gpu i the GPU to use for the model (with split-mode = none),\n"); printf(" or for intermediate results and KV (with split-mode = row) (default: %d)\n", params.main_gpu); #endif + printf(" --verbose-prompt print a verbose prompt before generation (default: %s)\n", params.verbose_prompt ? "true" : "false"); + printf(" --no-display-prompt don't print prompt at generation (default: %s)\n", !params.display_prompt ? "true" : "false"); printf(" -gan N, --grp-attn-n N\n"); printf(" group-attention factor (default: %d)\n", params.grp_attn_n); printf(" -gaw N, --grp-attn-w N\n"); printf(" group-attention width (default: %.1f)\n", (double)params.grp_attn_w); - printf(" --verbose-prompt print prompt before generation\n"); printf(" -dkvc, --dump-kv-cache\n"); printf(" verbose print of the KV cache\n"); printf(" -nkvo, --no-kv-offload\n"); @@ -1582,6 +1585,7 @@ void dump_non_result_info_yaml(FILE * stream, const gpt_params & params, const l fprintf(stream, "min_p: %f # default: 0.0\n", sparams.min_p); fprintf(stream, "typical_p: %f # default: 1.0\n", sparams.typical_p); fprintf(stream, "verbose_prompt: %s # default: false\n", params.verbose_prompt ? "true" : "false"); + fprintf(stream, "display_prompt: %s # default: true\n", params.display_prompt ? "true" : "false"); } // diff --git a/common/common.h b/common/common.h index f29be5b5a..096468243 100644 --- a/common/common.h +++ b/common/common.h @@ -126,6 +126,7 @@ struct gpt_params { bool use_mlock = false; // use mlock to keep model in memory bool numa = false; // attempt optimizations that help on some NUMA systems bool verbose_prompt = false; // print prompt tokens before generation + bool display_prompt = true; // print prompt before generation bool infill = false; // use infill mode bool dump_kv_cache = false; // dump the KV cache contents for debugging purposes bool no_kv_offload = false; // disable KV offloading diff --git a/examples/main/main.cpp b/examples/main/main.cpp index c53b29978..58b7f807a 100644 --- a/examples/main/main.cpp +++ b/examples/main/main.cpp @@ -477,6 +477,7 @@ int main(int argc, char ** argv) { bool is_antiprompt = false; bool input_echo = true; + bool display = true; bool need_to_save_session = !path_session.empty() && n_matching_session_tokens < embd_inp.size(); int n_past = 0; @@ -491,6 +492,7 @@ int main(int argc, char ** argv) { // the first thing we will do is to output the prompt, so set color accordingly console::set_display(console::prompt); + display = params.display_prompt; std::vector embd; std::vector embd_guidance; @@ -707,7 +709,7 @@ int main(int argc, char ** argv) { } // display text - if (input_echo) { + if (input_echo && display) { for (auto id : embd) { const std::string token_str = llama_token_to_piece(ctx, id); printf("%s", token_str.c_str()); @@ -724,6 +726,7 @@ int main(int argc, char ** argv) { // reset color to default if there is no pending user input if (input_echo && (int) embd_inp.size() == n_consumed) { console::set_display(console::reset); + display = true; } // if not currently processing queued inputs; @@ -796,6 +799,7 @@ int main(int argc, char ** argv) { // color user input only console::set_display(console::user_input); + display = params.display_prompt; std::string line; bool another_line = true; @@ -806,6 +810,7 @@ int main(int argc, char ** argv) { // done taking input, reset color console::set_display(console::reset); + display = true; // Add tokens to embd only if the input buffer is non-empty // Entering a empty line lets the user pass control back From 6b48ed089377330cdb362970a51c1c89b6d857a8 Mon Sep 17 00:00:00 2001 From: Someone Date: Sat, 13 Jan 2024 16:29:16 +0000 Subject: [PATCH 301/811] workflows: unbreak nix-build-aarch64, and split it out (#4915) The fix should be just the `sudo apt-get update` --- .github/workflows/nix-ci-aarch64.yml | 55 ++++++++++++++++++++++++++++ .github/workflows/nix-ci.yml | 41 --------------------- 2 files changed, 55 insertions(+), 41 deletions(-) create mode 100644 .github/workflows/nix-ci-aarch64.yml diff --git a/.github/workflows/nix-ci-aarch64.yml b/.github/workflows/nix-ci-aarch64.yml new file mode 100644 index 000000000..be7c26d40 --- /dev/null +++ b/.github/workflows/nix-ci-aarch64.yml @@ -0,0 +1,55 @@ +name: Nix aarch64 builds + +on: + workflow_dispatch: # allows manual triggering + push: + branches: + - master + paths: ['.github/workflows/**', '**/CMakeLists.txt', '**/Makefile', '**/*.h', '**/*.hpp', '**/*.c', '**/*.cpp', '**/*.cu', '**/*.swift', '**/*.m', '**/*.sh', '**/*.py', '**/*.nix'] + pull_request: + types: [opened, synchronize, reopened] + paths: ['**/CMakeLists.txt', '**/Makefile', '**/*.h', '**/*.hpp', '**/*.c', '**/*.cpp', '**/*.cu', '**/*.swift', '**/*.m', '**/*.sh', '**/*.py', '**/*.nix'] + +jobs: + nix-build-aarch64: + if: ${{ vars.CACHIX_NAME != '' }} + runs-on: ubuntu-latest + steps: + - name: Checkout repository + uses: actions/checkout@v4 + - name: Install QEMU + # Copy-paste from https://github.com/orgs/community/discussions/8305#discussioncomment-5888654 + run: | + sudo apt-get update + sudo apt-get install -y qemu-user-static qemu-system-aarch64 + sudo usermod -a -G kvm $USER + - name: Install Nix + uses: DeterminateSystems/nix-installer-action@v9 + with: + github-token: ${{ secrets.GITHUB_TOKEN }} + extra-conf: | + extra-platforms = aarch64-linux + extra-system-features = nixos-test kvm + extra-substituters = https://${{ vars.CACHIX_NAME }}.cachix.org https://cuda-maintainers.cachix.org + extra-trusted-public-keys = ${{ vars.CACHIX_PUBLIC_KEY }} cuda-maintainers.cachix.org-1:0dq3bujKpuEPMCX6U4WylrUDZ9JyUG0VpVZa7CNfq5E= + - uses: DeterminateSystems/magic-nix-cache-action@v2 + with: + upstream-cache: https://${{ matrix.cachixName }}.cachix.org + - name: Set-up cachix to push the results to + uses: cachix/cachix-action@v13 + with: + authToken: '${{ secrets.CACHIX_AUTH_TOKEN }}' + name: ${{ vars.CACHIX_NAME }} + - name: Show all output paths + run: > + nix run github:nix-community/nix-eval-jobs + -- --gc-roots-dir gcroot + --flake + ".#packages.aarch64-linux" + - name: Build + run: > + nix run github:Mic92/nix-fast-build + -- --skip-cached --no-nom + --systems aarch64-linux + --flake + ".#checks.aarch64-linux" diff --git a/.github/workflows/nix-ci.yml b/.github/workflows/nix-ci.yml index a38c6ead4..845b93bfb 100644 --- a/.github/workflows/nix-ci.yml +++ b/.github/workflows/nix-ci.yml @@ -69,44 +69,3 @@ jobs: -- --skip-cached --no-nom --flake ".#checks.$(nix eval --raw --impure --expr builtins.currentSystem)" - nix-build-aarch64: - if: ${{ vars.CACHIX_NAME != '' }} - runs-on: ubuntu-latest - steps: - - name: Checkout repository - uses: actions/checkout@v4 - - name: Install QEMU - # Copy-paste from https://github.com/orgs/community/discussions/8305#discussioncomment-5888654 - run: | - sudo apt-get install -y qemu-user-static qemu-system-aarch64 - sudo usermod -a -G kvm $USER - - name: Install Nix - uses: DeterminateSystems/nix-installer-action@v9 - with: - github-token: ${{ secrets.GITHUB_TOKEN }} - extra-conf: | - extra-platforms = aarch64-linux - extra-system-features = nixos-test kvm - extra-substituters = https://${{ vars.CACHIX_NAME }}.cachix.org https://cuda-maintainers.cachix.org - extra-trusted-public-keys = ${{ vars.CACHIX_PUBLIC_KEY }} cuda-maintainers.cachix.org-1:0dq3bujKpuEPMCX6U4WylrUDZ9JyUG0VpVZa7CNfq5E= - - uses: DeterminateSystems/magic-nix-cache-action@v2 - with: - upstream-cache: https://${{ matrix.cachixName }}.cachix.org - - name: Set-up cachix to push the results to - uses: cachix/cachix-action@v13 - with: - authToken: '${{ secrets.CACHIX_AUTH_TOKEN }}' - name: ${{ vars.CACHIX_NAME }} - - name: Show all output paths - run: > - nix run github:nix-community/nix-eval-jobs - -- --gc-roots-dir gcroot - --flake - ".#packages.aarch64-linux" - - name: Build - run: > - nix run github:Mic92/nix-fast-build - -- --skip-cached --no-nom - --systems aarch64-linux - --flake - ".#checks.aarch64-linux" From df845cc982e7e2ea7b9900e29d55b15338faa78d Mon Sep 17 00:00:00 2001 From: David Friehs Date: Sat, 13 Jan 2024 17:29:43 +0100 Subject: [PATCH 302/811] llama : minimize size used for state save/load (#4820) * examples : save-load-state: save only required state * llama : only reserve n_vocab * n_batch at most for logits llama_decode asserts that only n_batch tokens are passed each call, and n_ctx is expected to be bigger than n_batch. * llama : always reserve n_vocab * n_batch for logits llama_context de-serialization breaks if the contexts have differing capacity for logits and llama_decode will at maximum resize to n_vocab * n_batch. * llama : only save and restore used logits for batch sizes of 512 this reduces save state in the best case by around 62 MB, which can be a lot if planning to save on each message to allow regenerating messages. * llama : use ostringstream and istringstream for save and load * llama : serialize rng into minimum amount of space required * llama : break session version due to serialization changes --- examples/save-load-state/save-load-state.cpp | 21 ++++---- llama.cpp | 53 +++++++------------- llama.h | 2 +- 3 files changed, 29 insertions(+), 47 deletions(-) diff --git a/examples/save-load-state/save-load-state.cpp b/examples/save-load-state/save-load-state.cpp index 48d801110..ef952e2bd 100644 --- a/examples/save-load-state/save-load-state.cpp +++ b/examples/save-load-state/save-load-state.cpp @@ -45,13 +45,13 @@ int main(int argc, char ** argv) { // save state (rng, logits, embedding and kv_cache) to file { std::vector state_mem(llama_get_state_size(ctx)); + const size_t written = llama_copy_state_data(ctx, state_mem.data()); - { - FILE *fp_write = fopen("dump_state.bin", "wb"); - llama_copy_state_data(ctx, state_mem.data()); // could also copy directly to memory mapped file - fwrite(state_mem.data(), 1, state_mem.size(), fp_write); - fclose(fp_write); - } + FILE *fp_write = fopen("dump_state.bin", "wb"); + fwrite(state_mem.data(), 1, written, fp_write); + fclose(fp_write); + + fprintf(stderr, "%s : serialized state into %zd out of a maximum of %zd bytes\n", __func__, written, state_mem.size()); } // save state (last tokens) @@ -100,18 +100,17 @@ int main(int argc, char ** argv) { std::vector state_mem(llama_get_state_size(ctx2)); FILE * fp_read = fopen("dump_state.bin", "rb"); + const size_t read = fread(state_mem.data(), 1, state_mem.size(), fp_read); + fclose(fp_read); - const size_t ret = fread(state_mem.data(), 1, state_mem.size(), fp_read); - if (ret != state_mem.size()) { + if (read != llama_set_state_data(ctx2, state_mem.data())) { fprintf(stderr, "\n%s : failed to read state\n", __func__); llama_free(ctx2); llama_free_model(model); return 1; } - llama_set_state_data(ctx2, state_mem.data()); - - fclose(fp_read); + fprintf(stderr, "%s : deserialized state from %zd out of a maximum of %zd bytes\n", __func__, read, state_mem.size()); } // restore state (last tokens) diff --git a/llama.cpp b/llama.cpp index 1d2eb569f..275456088 100644 --- a/llama.cpp +++ b/llama.cpp @@ -9379,12 +9379,8 @@ struct llama_context * llama_new_context_with_model( ggml_type_name(type_v), (float)memory_size_v / (1024.0f * 1024.0f)); } - // resized during inference - if (params.logits_all) { - ctx->logits.reserve(cparams.n_ctx*hparams.n_vocab); - } else { - ctx->logits.reserve(hparams.n_vocab); - } + // resized during inference, reserve maximum + ctx->logits.reserve(hparams.n_vocab*cparams.n_batch); if (params.embedding){ ctx->embedding.resize(hparams.n_embd); @@ -9731,8 +9727,8 @@ size_t llama_get_state_size(const struct llama_context * ctx) { // for reference, std::mt19937(1337) serializes to 6701 bytes. const size_t s_rng_size = sizeof(size_t); const size_t s_rng = LLAMA_MAX_RNG_STATE; - const size_t s_logits_capacity = sizeof(size_t); const size_t s_logits_size = sizeof(size_t); + // assume worst case for logits although only currently set ones are serialized const size_t s_logits = ctx->logits.capacity() * sizeof(float); const size_t s_embedding_size = sizeof(size_t); const size_t s_embedding = ctx->embedding.size() * sizeof(float); @@ -9743,7 +9739,6 @@ size_t llama_get_state_size(const struct llama_context * ctx) { const size_t s_total = ( + s_rng_size + s_rng - + s_logits_capacity + s_logits_size + s_logits + s_embedding_size @@ -9812,37 +9807,27 @@ struct llama_data_file_context : llama_data_context { static void llama_copy_state_data_internal(struct llama_context * ctx, llama_data_context * data_ctx) { // copy rng { - std::stringstream rng_ss; + std::ostringstream rng_ss; rng_ss << ctx->rng; - const size_t rng_size = rng_ss.str().size(); - char rng_buf[LLAMA_MAX_RNG_STATE]; + const std::string & rng_str = rng_ss.str(); + const size_t rng_size = rng_str.size(); - memset(&rng_buf[0], 0, LLAMA_MAX_RNG_STATE); - memcpy(&rng_buf[0], rng_ss.str().data(), rng_ss.str().size()); + GGML_ASSERT(rng_size <= LLAMA_MAX_RNG_STATE); - data_ctx->write(&rng_size, sizeof(rng_size)); - data_ctx->write(&rng_buf[0], LLAMA_MAX_RNG_STATE); + data_ctx->write(&rng_size, sizeof(rng_size)); + data_ctx->write(rng_str.data(), rng_size); } // copy logits { - const size_t logits_cap = ctx->logits.capacity(); const size_t logits_size = ctx->logits.size(); - data_ctx->write(&logits_cap, sizeof(logits_cap)); data_ctx->write(&logits_size, sizeof(logits_size)); if (logits_size) { data_ctx->write(ctx->logits.data(), logits_size * sizeof(float)); } - - // If there is a gap between the size and the capacity, write padding - size_t padding_size = (logits_cap - logits_size) * sizeof(float); - if (padding_size > 0) { - std::vector padding(padding_size, 0); // Create a buffer filled with zeros - data_ctx->write(padding.data(), padding_size); - } } // copy embeddings @@ -9925,13 +9910,13 @@ size_t llama_set_state_data(struct llama_context * ctx, uint8_t * src) { // set rng { size_t rng_size; - char rng_buf[LLAMA_MAX_RNG_STATE]; + memcpy(&rng_size, inp, sizeof(rng_size)); inp += sizeof(rng_size); - memcpy(&rng_size, inp, sizeof(rng_size)); inp += sizeof(rng_size); - memcpy(&rng_buf[0], inp, LLAMA_MAX_RNG_STATE); inp += LLAMA_MAX_RNG_STATE; + GGML_ASSERT(rng_size <= LLAMA_MAX_RNG_STATE); - std::stringstream rng_ss; - rng_ss.str(std::string(&rng_buf[0], rng_size)); + std::string rng_str((char *)inp, rng_size); inp += rng_size; + + std::istringstream rng_ss(rng_str); rng_ss >> ctx->rng; GGML_ASSERT(!rng_ss.fail()); @@ -9939,20 +9924,18 @@ size_t llama_set_state_data(struct llama_context * ctx, uint8_t * src) { // set logits { - size_t logits_cap; size_t logits_size; - memcpy(&logits_cap, inp, sizeof(logits_cap)); inp += sizeof(logits_cap); memcpy(&logits_size, inp, sizeof(logits_size)); inp += sizeof(logits_size); - GGML_ASSERT(ctx->logits.capacity() == logits_cap); + GGML_ASSERT(ctx->logits.capacity() >= logits_size); if (logits_size) { ctx->logits.resize(logits_size); - memcpy(ctx->logits.data(), inp, logits_size * sizeof(float)); - } - inp += logits_cap * sizeof(float); + memcpy(ctx->logits.data(), inp, logits_size * sizeof(float)); + inp += logits_size * sizeof(float); + } } // set embeddings diff --git a/llama.h b/llama.h index 689e12d7c..01d6fafaa 100644 --- a/llama.h +++ b/llama.h @@ -43,7 +43,7 @@ #define LLAMA_FILE_MAGIC_GGSN 0x6767736eu // 'ggsn' #define LLAMA_SESSION_MAGIC LLAMA_FILE_MAGIC_GGSN -#define LLAMA_SESSION_VERSION 3 +#define LLAMA_SESSION_VERSION 4 #if defined(GGML_USE_CUBLAS) || defined(GGML_USE_CLBLAST) || defined(GGML_USE_METAL) // Defined when llama.cpp is compiled with support for offloading model layers to GPU. From 2d57de525541247132e354f561ff48775fba5d85 Mon Sep 17 00:00:00 2001 From: Georgi Gerganov Date: Sat, 13 Jan 2024 18:46:37 +0200 Subject: [PATCH 303/811] metal : disable log for loaded kernels (#4794) --- ggml-metal.m | 3 --- 1 file changed, 3 deletions(-) diff --git a/ggml-metal.m b/ggml-metal.m index 6c28a7ee3..57e444827 100644 --- a/ggml-metal.m +++ b/ggml-metal.m @@ -398,9 +398,6 @@ struct ggml_metal_context * ggml_metal_init(int n_cb) { struct ggml_metal_kernel * kernel = &ctx->kernels[e]; \ kernel->function = [ctx->library newFunctionWithName:@"kernel_"#name]; \ kernel->pipeline = [ctx->device newComputePipelineStateWithFunction:kernel->function error:&error]; \ - GGML_METAL_LOG_INFO("%s: loaded %-32s %16p | th_max = %4d | th_width = %4d\n", __func__, "kernel_"#name, (void *) kernel->pipeline, \ - (int) kernel->pipeline.maxTotalThreadsPerThreadgroup, \ - (int) kernel->pipeline.threadExecutionWidth); \ if (error) { \ GGML_METAL_LOG_ERROR("%s: error: load pipeline error: %s\n", __func__, [[error description] UTF8String]); \ return NULL; \ From f172de03f11465dc6c5a0fc3a22f8ec254c6832c Mon Sep 17 00:00:00 2001 From: Georgi Gerganov Date: Sat, 13 Jan 2024 18:47:38 +0200 Subject: [PATCH 304/811] llama : fix detokenization of non-special added-tokens (#4916) Co-authored-by: goerch --- llama.cpp | 26 ++++++++++++++++++-------- 1 file changed, 18 insertions(+), 8 deletions(-) diff --git a/llama.cpp b/llama.cpp index 275456088..2190ea7aa 100644 --- a/llama.cpp +++ b/llama.cpp @@ -10305,6 +10305,8 @@ int32_t llama_token_to_piece(const struct llama_model * model, llama_token token if (0 <= token && token < llama_n_vocab(model)) { switch (llama_vocab_get_type(model->vocab)) { case LLAMA_VOCAB_TYPE_SPM: { + // NOTE: we accept all unsupported token types, + // suppressing them like CONTROL tokens. if (llama_is_normal_token(model->vocab, token)) { std::string result = model->vocab.id_to_token[token].text; llama_unescape_whitespace(result); @@ -10313,6 +10315,13 @@ int32_t llama_token_to_piece(const struct llama_model * model, llama_token token } memcpy(buf, result.c_str(), result.length()); return result.length(); + } else if (llama_is_user_defined_token(model->vocab, token)) { + std::string result = model->vocab.id_to_token[token].text; + if (length < (int) result.length()) { + return -result.length(); + } + memcpy(buf, result.c_str(), result.length()); + return result.length(); } else if (llama_is_unknown_token(model->vocab, token)) { // NOLINT if (length < 3) { return -3; @@ -10327,14 +10336,12 @@ int32_t llama_token_to_piece(const struct llama_model * model, llama_token token } buf[0] = llama_token_to_byte(model->vocab, token); return 1; - } else { - // TODO: for now we accept all unsupported token types, - // suppressing them like CONTROL tokens. - // GGML_ASSERT(false); } break; } case LLAMA_VOCAB_TYPE_BPE: { + // NOTE: we accept all unsupported token types, + // suppressing them like CONTROL tokens. if (llama_is_normal_token(model->vocab, token)) { std::string result = model->vocab.id_to_token[token].text; result = llama_decode_text(result); @@ -10343,12 +10350,15 @@ int32_t llama_token_to_piece(const struct llama_model * model, llama_token token } memcpy(buf, result.c_str(), result.length()); return result.length(); + } else if (llama_is_user_defined_token(model->vocab, token)) { + std::string result = model->vocab.id_to_token[token].text; + if (length < (int) result.length()) { + return -result.length(); + } + memcpy(buf, result.c_str(), result.length()); + return result.length(); } else if (llama_is_control_token(model->vocab, token)) { ; - } else { - // TODO: for now we accept all unsupported token types, - // suppressing them like CONTROL tokens. - // GGML_ASSERT(false); } break; } From 0ea069b87bd296c556824e57455433b6c0357340 Mon Sep 17 00:00:00 2001 From: Georgi Gerganov Date: Sat, 13 Jan 2024 19:31:26 +0200 Subject: [PATCH 305/811] server : fix prompt caching with system prompt (#4914) --- examples/server/server.cpp | 18 ++++++++++++++---- 1 file changed, 14 insertions(+), 4 deletions(-) diff --git a/examples/server/server.cpp b/examples/server/server.cpp index 79eacf828..93f999298 100644 --- a/examples/server/server.cpp +++ b/examples/server/server.cpp @@ -1180,8 +1180,9 @@ struct llama_server_context return slot.images.size() > 0; } - void send_error(task_server& task, std::string error) + void send_error(task_server& task, const std::string &error) { + LOG_TEE("task %i - error: %s\n", task.id, error.c_str()); std::unique_lock lock(mutex_results); task_result res; res.id = task.id; @@ -1570,12 +1571,22 @@ struct llama_server_context LOG_TEE("slot unavailable\n"); // send error result send_error(task, "slot unavailable"); - return; + break; } if (task.data.contains("system_prompt")) { + if (!all_slots_are_idle) { + send_error(task, "system prompt can only be updated when all slots are idle"); + break; + } process_system_prompt_data(task.data["system_prompt"]); + + // reset cache_tokens for all slots + for (llama_client_slot &slot : slots) + { + slot.cache_tokens.clear(); + } } slot->reset(); @@ -1652,8 +1663,7 @@ struct llama_server_context // attend tasks process_tasks(); - // update the system prompt wait until all slots are idle state - if (system_need_update && all_slots_are_idle) + if (system_need_update) { LOG_TEE("updating system prompt\n"); update_system_prompt(); From 4be5ef556de830c5c4f6e45c05ef4427823fe607 Mon Sep 17 00:00:00 2001 From: Georgi Gerganov Date: Sat, 13 Jan 2024 20:45:45 +0200 Subject: [PATCH 306/811] metal : remove old API (#4919) ggml-ci --- Makefile | 9 -- examples/CMakeLists.txt | 3 - examples/metal/CMakeLists.txt | 4 - examples/metal/metal.cpp | 103 ------------- ggml-metal.h | 55 +------ ggml-metal.m | 276 +++------------------------------- llama.cpp | 4 +- 7 files changed, 27 insertions(+), 427 deletions(-) delete mode 100644 examples/metal/CMakeLists.txt delete mode 100644 examples/metal/metal.cpp diff --git a/Makefile b/Makefile index 05fe9a0f6..995b89f7a 100644 --- a/Makefile +++ b/Makefile @@ -43,10 +43,6 @@ ifeq ($(UNAME_S),Darwin) endif endif -ifneq '' '$(or $(filter clean,$(MAKECMDGOALS)),$(LLAMA_METAL))' -BUILD_TARGETS += metal -endif - default: $(BUILD_TARGETS) test: $(TEST_TARGETS) @@ -671,11 +667,6 @@ lookup: examples/lookup/lookup.cpp ggml.o llama.o $(COMMON_DEPS) $(OBJS) passkey: examples/passkey/passkey.cpp ggml.o llama.o $(COMMON_DEPS) $(OBJS) $(CXX) $(CXXFLAGS) $(filter-out %.h,$^) -o $@ $(LDFLAGS) -ifdef LLAMA_METAL -metal: examples/metal/metal.cpp ggml.o $(OBJS) - $(CXX) $(CXXFLAGS) $^ -o $@ $(LDFLAGS) -endif - ifeq ($(UNAME_S),Darwin) swift: examples/batched.swift (cd examples/batched.swift; make build) diff --git a/examples/CMakeLists.txt b/examples/CMakeLists.txt index fa127a3aa..f67d74c55 100644 --- a/examples/CMakeLists.txt +++ b/examples/CMakeLists.txt @@ -37,9 +37,6 @@ else() add_subdirectory(lookup) add_subdirectory(train-text-from-scratch) add_subdirectory(imatrix) - if (LLAMA_METAL) - add_subdirectory(metal) - endif() if (LLAMA_BUILD_SERVER) add_subdirectory(server) endif() diff --git a/examples/metal/CMakeLists.txt b/examples/metal/CMakeLists.txt deleted file mode 100644 index f16d49165..000000000 --- a/examples/metal/CMakeLists.txt +++ /dev/null @@ -1,4 +0,0 @@ -set(TEST_TARGET metal) -add_executable(${TEST_TARGET} metal.cpp) -install(TARGETS ${TARGET} RUNTIME) -target_link_libraries(${TEST_TARGET} PRIVATE ggml) diff --git a/examples/metal/metal.cpp b/examples/metal/metal.cpp deleted file mode 100644 index 16c1146f9..000000000 --- a/examples/metal/metal.cpp +++ /dev/null @@ -1,103 +0,0 @@ -// Evaluate a statically exported ggml computation graph with Metal -// -// - First, export a LLaMA graph: -// -// $ ./bin/main -m ../models/7B/ggml-model-q4_0.gguf --export -// -// - Run this tool to evaluate the exported graph: -// -// $ ./bin/metal llama.ggml -// -// The purpose of this tool is mostly for debugging and demonstration purposes. -// The main limitation of exporting computation graphs is that their sizes are static which often -// can be a problem for real-world applications. -// - -#include "ggml.h" -#include "ggml-metal.h" - -#include -#include -#include - -int main(int argc, char ** argv) { - ggml_time_init(); - - if (argc != 2) { - fprintf(stderr, "Usage: %s llama.ggml\n", argv[0]); - return -1; - } - - const char * fname_cgraph = argv[1]; - - // load the compute graph - struct ggml_context * ctx_data = NULL; - struct ggml_context * ctx_eval = NULL; - - struct ggml_cgraph * gf = ggml_graph_import(fname_cgraph, &ctx_data, &ctx_eval); - - // this allocates all Metal resources and memory buffers - auto * ctx_metal = ggml_metal_init(1); - - const size_t max_size_data = ggml_get_max_tensor_size(ctx_data); - const size_t max_size_eval = ggml_get_max_tensor_size(ctx_eval); - ggml_metal_add_buffer(ctx_metal, "data", ggml_get_mem_buffer(ctx_data), ggml_get_mem_size(ctx_data), max_size_data); - ggml_metal_add_buffer(ctx_metal, "eval", ggml_get_mem_buffer(ctx_eval), ggml_get_mem_size(ctx_eval), max_size_eval); - - // main - { - struct ggml_tensor * input = ggml_graph_get_tensor(gf, "embd"); - *(int32_t *) input->data = 1; // BOS - - ggml_metal_set_tensor(ctx_metal, input); - - // warmup - ggml_metal_graph_compute(ctx_metal, gf); - - const int n_iter = 16; - - const int64_t t0 = ggml_time_us(); - - // the actual inference happens here - for (int i = 0; i < n_iter; ++i) { - ggml_metal_graph_compute(ctx_metal, gf); - } - - const int64_t t1 = ggml_time_us(); - - printf("time: %.2f ms, %.2f ms/tok\n", (t1 - t0) / 1000.0, (t1 - t0) / 1000.0 / n_iter); - } - - // debug output - { - struct ggml_tensor * logits = gf->nodes[gf->n_nodes - 1]; - ggml_metal_get_tensor(ctx_metal, logits); - - float * ptr = (float *) ggml_get_data(logits); - - printf("logits: "); - for (int i = 0; i < 10; i++) { - printf("%8.4f ", ptr[i]); - } - printf("\n"); - int imax = 0; - double sum = 0.0; - double vmax = -1e9; - for (int i = 0; i < 32000; i++) { - sum += (double) ptr[i]; - if (ptr[i] > vmax) { - vmax = ptr[i]; - imax = i; - } - } - printf("sum: %f, imax = %d, vmax = %f\n", sum, imax, vmax); - } - - ggml_metal_free(ctx_metal); - - ggml_free(ctx_data); - ggml_free(ctx_eval); - - return 0; -} - diff --git a/ggml-metal.h b/ggml-metal.h index c4b7325da..cd5e2995f 100644 --- a/ggml-metal.h +++ b/ggml-metal.h @@ -36,64 +36,13 @@ struct ggml_cgraph; extern "C" { #endif -// -// internal API -// temporary exposed to user-code -// - -struct ggml_metal_context; - -void ggml_metal_log_set_callback(ggml_log_callback log_callback, void * user_data); - -// number of command buffers to use -struct ggml_metal_context * ggml_metal_init(int n_cb); -void ggml_metal_free(struct ggml_metal_context * ctx); - -void * ggml_metal_host_malloc(size_t n); -void ggml_metal_host_free (void * data); - -// set the number of command buffers to use -void ggml_metal_set_n_cb(struct ggml_metal_context * ctx, int n_cb); - -// creates a mapping between a host memory buffer and a device memory buffer -// - make sure to map all buffers used in the graph before calling ggml_metal_graph_compute -// - the mapping is used during computation to determine the arguments of the compute kernels -// - you don't need to keep the host memory buffer allocated as it is never accessed by Metal -// - max_size specifies the maximum size of a tensor and is used to create shared views such -// that it is guaranteed that the tensor will fit in at least one of the views -// -bool ggml_metal_add_buffer( - struct ggml_metal_context * ctx, - const char * name, - void * data, - size_t size, - size_t max_size); - -// set data from host memory into the device -void ggml_metal_set_tensor(struct ggml_metal_context * ctx, struct ggml_tensor * t); - -// get data from the device into host memory -void ggml_metal_get_tensor(struct ggml_metal_context * ctx, struct ggml_tensor * t); - -// try to find operations that can be run concurrently in the graph -// you should run it again if the topology of your graph changes -void ggml_metal_graph_find_concurrency(struct ggml_metal_context * ctx, struct ggml_cgraph * gf, bool check_mem); - -// if the graph has been optimized for concurrently dispatch, return length of the concur_list if optimized -int ggml_metal_if_optimized(struct ggml_metal_context * ctx); - -// output the concur_list for ggml_alloc -int * ggml_metal_get_concur_list(struct ggml_metal_context * ctx); - -// same as ggml_graph_compute but uses Metal -// creates gf->n_threads command buffers in parallel -bool ggml_metal_graph_compute(struct ggml_metal_context * ctx, struct ggml_cgraph * gf); - // // backend API // user-code should use only these functions // +GGML_API void ggml_backend_metal_log_set_callback(ggml_log_callback log_callback, void * user_data); + GGML_API ggml_backend_t ggml_backend_metal_init(void); GGML_API bool ggml_backend_is_metal(ggml_backend_t backend); diff --git a/ggml-metal.m b/ggml-metal.m index 57e444827..cae52c983 100644 --- a/ggml-metal.m +++ b/ggml-metal.m @@ -24,8 +24,6 @@ #define UNUSED(x) (void)(x) -#define GGML_MAX_CONCUR (2*GGML_DEFAULT_GRAPH_SIZE) - #define GGML_METAL_MAX_KERNELS 256 struct ggml_metal_buffer { @@ -182,9 +180,6 @@ struct ggml_metal_context { struct ggml_metal_kernel kernels[GGML_METAL_MAX_KERNELS]; - int concur_list[GGML_MAX_CONCUR]; - int concur_list_len; - bool support_simdgroup_reduction; bool support_simdgroup_mm; }; @@ -200,7 +195,6 @@ struct ggml_metal_context { @implementation GGMLMetalClass @end - static void ggml_metal_default_log_callback(enum ggml_log_level level, const char * msg, void * user_data) { fprintf(stderr, "%s", msg); @@ -211,11 +205,6 @@ static void ggml_metal_default_log_callback(enum ggml_log_level level, const cha ggml_log_callback ggml_metal_log_callback = ggml_metal_default_log_callback; void * ggml_metal_log_user_data = NULL; -void ggml_metal_log_set_callback(ggml_log_callback log_callback, void * user_data) { - ggml_metal_log_callback = log_callback; - ggml_metal_log_user_data = user_data; -} - GGML_ATTRIBUTE_FORMAT(2, 3) static void ggml_metal_log(enum ggml_log_level level, const char * format, ...){ if (ggml_metal_log_callback != NULL) { @@ -238,7 +227,18 @@ static void ggml_metal_log(enum ggml_log_level level, const char * format, ...){ } } -struct ggml_metal_context * ggml_metal_init(int n_cb) { +static void * ggml_metal_host_malloc(size_t n) { + void * data = NULL; + const int result = posix_memalign((void **) &data, sysconf(_SC_PAGESIZE), n); + if (result != 0) { + GGML_METAL_LOG_ERROR("%s: error: posix_memalign failed\n", __func__); + return NULL; + } + + return data; +} + +static struct ggml_metal_context * ggml_metal_init(int n_cb) { GGML_METAL_LOG_INFO("%s: allocating\n", __func__); id device; @@ -264,7 +264,6 @@ struct ggml_metal_context * ggml_metal_init(int n_cb) { ctx->n_cb = MIN(n_cb, GGML_METAL_MAX_BUFFERS); ctx->queue = [ctx->device newCommandQueue]; ctx->n_buffers = 0; - ctx->concur_list_len = 0; ctx->d_queue = dispatch_queue_create("ggml-metal", DISPATCH_QUEUE_CONCURRENT); @@ -531,7 +530,7 @@ struct ggml_metal_context * ggml_metal_init(int n_cb) { return ctx; } -void ggml_metal_free(struct ggml_metal_context * ctx) { +static void ggml_metal_free(struct ggml_metal_context * ctx) { GGML_METAL_LOG_INFO("%s: deallocating\n", __func__); for (int i = 0; i < ctx->n_buffers; ++i) { @@ -557,33 +556,6 @@ void ggml_metal_free(struct ggml_metal_context * ctx) { free(ctx); } -void * ggml_metal_host_malloc(size_t n) { - void * data = NULL; - const int result = posix_memalign((void **) &data, sysconf(_SC_PAGESIZE), n); - if (result != 0) { - GGML_METAL_LOG_ERROR("%s: error: posix_memalign failed\n", __func__); - return NULL; - } - - return data; -} - -void ggml_metal_host_free(void * data) { - free(data); -} - -void ggml_metal_set_n_cb(struct ggml_metal_context * ctx, int n_cb) { - ctx->n_cb = MIN(n_cb, GGML_METAL_MAX_BUFFERS); -} - -int ggml_metal_if_optimized(struct ggml_metal_context * ctx) { - return ctx->concur_list_len; -} - -int * ggml_metal_get_concur_list(struct ggml_metal_context * ctx) { - return ctx->concur_list; -} - // temporarily defined here for compatibility between ggml-backend and the old API struct ggml_backend_metal_buffer { @@ -656,209 +628,6 @@ static id ggml_metal_get_buffer(struct ggml_metal_context * ctx, stru return nil; } -bool ggml_metal_add_buffer( - struct ggml_metal_context * ctx, - const char * name, - void * data, - size_t size, - size_t max_size) { - if (ctx->n_buffers >= GGML_METAL_MAX_BUFFERS) { - GGML_METAL_LOG_ERROR("%s: error: too many buffers\n", __func__); - return false; - } - - if (data) { - // verify that the buffer does not overlap with any of the existing buffers - for (int i = 0; i < ctx->n_buffers; ++i) { - const int64_t ioffs = (int64_t) data - (int64_t) ctx->buffers[i].data; - - if (ioffs >= 0 && ioffs < (int64_t) ctx->buffers[i].size) { - GGML_METAL_LOG_ERROR("%s: error: buffer '%s' overlaps with '%s'\n", __func__, name, ctx->buffers[i].name); - return false; - } - } - - const size_t size_page = sysconf(_SC_PAGESIZE); - - size_t size_aligned = size; - if ((size_aligned % size_page) != 0) { - size_aligned += (size_page - (size_aligned % size_page)); - } - - // the buffer fits into the max buffer size allowed by the device - if (size_aligned <= ctx->device.maxBufferLength) { - ctx->buffers[ctx->n_buffers].name = name; - ctx->buffers[ctx->n_buffers].data = data; - ctx->buffers[ctx->n_buffers].size = size; - - ctx->buffers[ctx->n_buffers].metal = [ctx->device newBufferWithBytesNoCopy:data length:size_aligned options:MTLResourceStorageModeShared deallocator:nil]; - - if (ctx->buffers[ctx->n_buffers].metal == nil) { - GGML_METAL_LOG_ERROR("%s: error: failed to allocate '%-16s' buffer, size = %8.2f MiB\n", __func__, name, size_aligned / 1024.0 / 1024.0); - return false; - } - - GGML_METAL_LOG_INFO("%s: allocated '%-16s' buffer, size = %8.2f MiB", __func__, name, size_aligned / 1024.0 / 1024.0); - - ++ctx->n_buffers; - } else { - // this overlap between the views will guarantee that the tensor with the maximum size will fully fit into - // one of the views - const size_t size_ovlp = ((max_size + size_page - 1) / size_page + 1) * size_page; // round-up 2 pages just in case - const size_t size_step = ctx->device.maxBufferLength - size_ovlp; - const size_t size_view = ctx->device.maxBufferLength; - - for (size_t i = 0; i < size; i += size_step) { - const size_t size_step_aligned = (i + size_view <= size) ? size_view : (size_aligned - i); - - ctx->buffers[ctx->n_buffers].name = name; - ctx->buffers[ctx->n_buffers].data = (void *) ((uint8_t *) data + i); - ctx->buffers[ctx->n_buffers].size = size_step_aligned; - - ctx->buffers[ctx->n_buffers].metal = [ctx->device newBufferWithBytesNoCopy:(void *) ((uint8_t *) data + i) length:size_step_aligned options:MTLResourceStorageModeShared deallocator:nil]; - - if (ctx->buffers[ctx->n_buffers].metal == nil) { - GGML_METAL_LOG_ERROR("%s: error: failed to allocate '%-16s' buffer, size = %8.2f MiB\n", __func__, name, size_step_aligned / 1024.0 / 1024.0); - return false; - } - - GGML_METAL_LOG_INFO("%s: allocated '%-16s' buffer, size = %8.2f MiB, offs = %12ld", __func__, name, size_step_aligned / 1024.0 / 1024.0, i); - if (i + size_step < size) { - GGML_METAL_LOG_INFO("\n"); - } - - ++ctx->n_buffers; - } - } - -#if TARGET_OS_OSX - GGML_METAL_LOG_INFO(", (%8.2f / %8.2f)", - ctx->device.currentAllocatedSize / 1024.0 / 1024.0, - ctx->device.recommendedMaxWorkingSetSize / 1024.0 / 1024.0); - - if (ctx->device.currentAllocatedSize > ctx->device.recommendedMaxWorkingSetSize) { - GGML_METAL_LOG_WARN("%s: warning: current allocated size is greater than the recommended max working set size\n", __func__); - } else { - GGML_METAL_LOG_INFO("\n"); - } -#else - GGML_METAL_LOG_INFO(", (%8.2f)\n", ctx->device.currentAllocatedSize / 1024.0 / 1024.0); -#endif - } - - return true; -} - -void ggml_metal_set_tensor( - struct ggml_metal_context * ctx, - struct ggml_tensor * t) { - size_t offs; - id id_dst = ggml_metal_get_buffer(ctx, t, &offs); - - memcpy((void *) ((uint8_t *) id_dst.contents + offs), t->data, ggml_nbytes(t)); -} - -void ggml_metal_get_tensor( - struct ggml_metal_context * ctx, - struct ggml_tensor * t) { - size_t offs; - id id_src = ggml_metal_get_buffer(ctx, t, &offs); - - memcpy(t->data, (void *) ((uint8_t *) id_src.contents + offs), ggml_nbytes(t)); -} - -void ggml_metal_graph_find_concurrency( - struct ggml_metal_context * ctx, - struct ggml_cgraph * gf, bool check_mem) { - int search_depth = gf->n_nodes; //we only find concurrency in this range to avoid wasting too much time - int nodes_unused[GGML_MAX_CONCUR]; - - for (int i = 0; i < GGML_MAX_CONCUR; i++) { ctx->concur_list[i] = 0; } - for (int i = 0; i < gf->n_nodes; i++) { nodes_unused[i] = 1; } - ctx->concur_list_len = 0; - - int n_left = gf->n_nodes; - int n_start = 0; // all nodes before n_start at nodes_unused array have been sorted and store back to ctx->concur_list - int level_pos = 0; // at ctx->concur_list, the last layer (level) ends at level_pos - - while (n_left > 0) { - // number of nodes at a layer (that can be issued concurrently) - int concurrency = 0; - for (int i = n_start; i < ((n_start + search_depth > gf->n_nodes) ? gf->n_nodes : n_start + search_depth); i++) { - if (nodes_unused[i]) { - // if the requirements for gf->nodes[i] are satisfied - int exe_flag = 1; - - // scan all srcs - for (int src_ind = 0; src_ind < GGML_MAX_SRC; src_ind++) { - struct ggml_tensor * src_cur = gf->nodes[i]->src[src_ind]; - if (src_cur) { - // if is leaf nodes it's satisfied. - // TODO: ggml_is_leaf() - if (src_cur->op == GGML_OP_NONE && src_cur->grad == NULL) { - continue; - } - - // otherwise this src should be the output from previous nodes. - int is_found = 0; - - // scan 2*search_depth back because we inserted barrier. - //for (int j = ((level_pos - 2*search_depth) < 0 ? 0 : (level_pos - 2*search_depth)); j < level_pos; j++) { - for (int j = MAX(0, level_pos - 2*search_depth); j < level_pos; j++) { - if (ctx->concur_list[j] >= 0 && gf->nodes[ctx->concur_list[j]] == src_cur) { - is_found = 1; - break; - } - } - if (is_found == 0) { - exe_flag = 0; - break; - } - } - } - if (exe_flag && check_mem) { - // check if nodes[i]'s data will be overwritten by a node before nodes[i]. - // if node[5] and node[3] write to the same memory region, then we can't issue node[5] before node[3] - int64_t data_start = (int64_t) gf->nodes[i]->data; - int64_t length = (int64_t) ggml_nbytes(gf->nodes[i]); - for (int j = n_start; j < i; j++) { - if (nodes_unused[j] && gf->nodes[j]->op != GGML_OP_RESHAPE \ - && gf->nodes[j]->op != GGML_OP_VIEW \ - && gf->nodes[j]->op != GGML_OP_TRANSPOSE \ - && gf->nodes[j]->op != GGML_OP_PERMUTE) { - if (((int64_t)gf->nodes[j]->data) >= data_start + length || \ - ((int64_t)gf->nodes[j]->data) + (int64_t) ggml_nbytes(gf->nodes[j]) <= data_start) { - continue; - } - - exe_flag = 0; - } - } - } - if (exe_flag) { - ctx->concur_list[level_pos + concurrency] = i; - nodes_unused[i] = 0; - concurrency++; - ctx->concur_list_len++; - } - } - } - n_left -= concurrency; - // adding a barrier different layer - ctx->concur_list[level_pos + concurrency] = -1; - ctx->concur_list_len++; - // jump all sorted nodes at nodes_bak - while (!nodes_unused[n_start]) { - n_start++; - } - level_pos += concurrency + 1; - } - - if (ctx->concur_list_len > GGML_MAX_CONCUR) { - GGML_METAL_LOG_WARN("%s: too many elements for metal ctx->concur_list!\n", __func__); - } -} - static bool ggml_metal_supports_op(const struct ggml_metal_context * ctx, const struct ggml_tensor * op) { switch (op->op) { case GGML_OP_UNARY: @@ -940,19 +709,15 @@ static bool ggml_metal_supports_op(const struct ggml_metal_context * ctx, const } } -bool ggml_metal_graph_compute( +static bool ggml_metal_graph_compute( struct ggml_metal_context * ctx, struct ggml_cgraph * gf) { @autoreleasepool { - // if there is ctx->concur_list, dispatch concurrently - // else fallback to serial dispatch MTLComputePassDescriptor * edesc = MTLComputePassDescriptor.computePassDescriptor; - const bool has_concur = ctx->concur_list_len && ctx->concur_list_len <= GGML_MAX_CONCUR; - - const int n_nodes = has_concur ? ctx->concur_list_len : gf->n_nodes; - edesc.dispatchType = has_concur ? MTLDispatchTypeConcurrent : MTLDispatchTypeSerial; + const int n_nodes = gf->n_nodes; + edesc.dispatchType = MTLDispatchTypeSerial; // create multiple command buffers and enqueue them // then, we encode the graph into the command buffers in parallel @@ -983,7 +748,7 @@ bool ggml_metal_graph_compute( const int node_end = MIN((cb_idx == n_cb - 1) ? n_nodes : (cb_idx + 1) * n_nodes_per_cb, n_nodes); for (int ind = node_start; ind < node_end; ++ind) { - const int i = has_concur ? ctx->concur_list[ind] : ind; + const int i = ind; if (i == -1) { [encoder memoryBarrierWithScope:MTLBarrierScopeBuffers]; @@ -2823,6 +2588,11 @@ static struct ggml_backend_i ggml_backend_metal_i = { /* .supports_op = */ ggml_backend_metal_supports_op, }; +void ggml_backend_metal_log_set_callback(ggml_log_callback log_callback, void * user_data) { + ggml_metal_log_callback = log_callback; + ggml_metal_log_user_data = user_data; +} + ggml_backend_t ggml_backend_metal_init(void) { struct ggml_metal_context * ctx = ggml_metal_init(GGML_DEFAULT_N_THREADS); @@ -2849,7 +2619,7 @@ void ggml_backend_metal_set_n_cb(ggml_backend_t backend, int n_cb) { struct ggml_metal_context * ctx = (struct ggml_metal_context *)backend->context; - ggml_metal_set_n_cb(ctx, n_cb); + ctx->n_cb = MIN(n_cb, GGML_METAL_MAX_BUFFERS); } bool ggml_backend_metal_supports_family(ggml_backend_t backend, int family) { diff --git a/llama.cpp b/llama.cpp index 2190ea7aa..66494974a 100644 --- a/llama.cpp +++ b/llama.cpp @@ -1266,7 +1266,7 @@ static ggml_backend_buffer_type_t llama_default_buffer_type_split(int fallback_g struct llama_state { llama_state() { #ifdef GGML_USE_METAL - ggml_metal_log_set_callback(log_callback, log_callback_user_data); + ggml_backend_metal_log_set_callback(log_callback, log_callback_user_data); #endif } @@ -10470,7 +10470,7 @@ void llama_log_set(ggml_log_callback log_callback, void * user_data) { g_state.log_callback = log_callback ? log_callback : llama_log_callback_default; g_state.log_callback_user_data = user_data; #ifdef GGML_USE_METAL - ggml_metal_log_set_callback(g_state.log_callback, g_state.log_callback_user_data); + ggml_backend_metal_log_set_callback(g_state.log_callback, g_state.log_callback_user_data); #endif } From c71d608ce7a1584bf5072f197919dd24f3a6163f Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Johannes=20G=C3=A4=C3=9Fler?= Date: Sat, 13 Jan 2024 21:41:37 +0100 Subject: [PATCH 307/811] ggml: cache sin/cos for RoPE (#4908) --- ggml.c | 46 ++++++++++++++++++++++++++++++++-------------- 1 file changed, 32 insertions(+), 14 deletions(-) diff --git a/ggml.c b/ggml.c index de6ef34bd..bcfb6652c 100644 --- a/ggml.c +++ b/ggml.c @@ -11638,6 +11638,21 @@ static float ggml_rope_yarn_corr_dim(int n_dims, int n_orig_ctx, float n_rot, fl return n_dims * logf(n_orig_ctx / (n_rot * 2 * (float)M_PI)) / (2 * logf(base)); } +static void ggml_rope_cache_init( + float theta_base, float freq_scale, float corr_dims[2], int64_t ne0, float ext_factor, float mscale, + float * cache, float sin_sign, float theta_scale +) { + float theta = theta_base; + for (int64_t i0 = 0; i0 < ne0; i0 += 2) { + rope_yarn( + theta, freq_scale, corr_dims, i0, ext_factor, mscale, &cache[i0 + 0], &cache[i0 + 1] + ); + cache[i0 + 1] *= sin_sign; + + theta *= theta_scale; + } +} + void ggml_rope_yarn_corr_dims( int n_dims, int n_orig_ctx, float freq_base, float beta_fast, float beta_slow, float dims[2] ) { @@ -11720,6 +11735,12 @@ static void ggml_compute_forward_rope_f32( for (int64_t i3 = 0; i3 < ne3; i3++) { for (int64_t i2 = 0; i2 < ne2; i2++) { const int64_t p = pos[i2]; + + float * cache = (float *) params->wdata + (ne0 + CACHE_LINE_SIZE_F32)*ith; + if (!is_glm && !is_neox) { // TODO: cache sin/cos for glm, neox + ggml_rope_cache_init(p, freq_scale, corr_dims, ne0, ext_factor, attn_factor, cache, sin_sign, theta_scale); + } + for (int64_t i1 = 0; i1 < ne1; i1++) { if (ir++ < ir0) continue; if (ir > ir1) break; @@ -11753,18 +11774,13 @@ static void ggml_compute_forward_rope_f32( } } else if (!is_neox) { for (int64_t i0 = 0; i0 < ne0; i0 += 2) { - float cos_theta, sin_theta; - rope_yarn( - theta_base, freq_scale, corr_dims, i0, ext_factor, attn_factor, &cos_theta, &sin_theta - ); - sin_theta *= sin_sign; + const float cos_theta = cache[i0 + 0]; + const float sin_theta = cache[i0 + 1]; // zeta scaling for xPos only: float zeta = xpos_base != 0.0f ? powf((i0 + 0.4f * ne0) / (1.4f * ne0), p / xpos_base) : 1.0f; if (xpos_down) zeta = 1.0f / zeta; - theta_base *= theta_scale; - const float * const src = (float *)((char *) src0->data + i3*nb03 + i2*nb02 + i1*nb01 + i0*nb00); float * dst_data = (float *)((char *) dst->data + i3*nb3 + i2*nb2 + i1*nb1 + i0*nb0); @@ -11888,6 +11904,12 @@ static void ggml_compute_forward_rope_f16( for (int64_t i3 = 0; i3 < ne3; i3++) { for (int64_t i2 = 0; i2 < ne2; i2++) { const int64_t p = pos[i2]; + + float * cache = (float *) params->wdata + (ne0 + CACHE_LINE_SIZE_F32)*ith; + if (!is_glm && !is_neox) { // TODO: cache sin/cos for glm, neox + ggml_rope_cache_init(p, freq_scale, corr_dims, ne0, ext_factor, attn_factor, cache, sin_sign, theta_scale); + } + for (int64_t i1 = 0; i1 < ne1; i1++) { if (ir++ < ir0) continue; if (ir > ir1) break; @@ -11921,13 +11943,8 @@ static void ggml_compute_forward_rope_f16( } } else if (!is_neox) { for (int64_t i0 = 0; i0 < ne0; i0 += 2) { - float cos_theta, sin_theta; - rope_yarn( - theta_base, freq_scale, corr_dims, i0, ext_factor, attn_factor, &cos_theta, &sin_theta - ); - sin_theta *= sin_sign; - - theta_base *= theta_scale; + const float cos_theta = cache[i0 + 0]; + const float sin_theta = cache[i0 + 1]; const ggml_fp16_t * const src = (ggml_fp16_t *)((char *) src0->data + i3*nb03 + i2*nb02 + i1*nb01 + i0*nb00); ggml_fp16_t * dst_data = (ggml_fp16_t *)((char *) dst->data + i3*nb3 + i2*nb2 + i1*nb1 + i0*nb0); @@ -16722,6 +16739,7 @@ struct ggml_cplan ggml_graph_plan(const struct ggml_cgraph * cgraph, int n_threa } } break; case GGML_OP_SOFT_MAX: + case GGML_OP_ROPE: { cur = ggml_type_size(GGML_TYPE_F32) * node->ne[0] * n_tasks; } break; From 76484fbfd355df388f71d6edaa98e1692a74de7e Mon Sep 17 00:00:00 2001 From: Georgi Gerganov Date: Sun, 14 Jan 2024 00:14:46 +0200 Subject: [PATCH 308/811] sync : ggml --- scripts/sync-ggml.last | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/scripts/sync-ggml.last b/scripts/sync-ggml.last index edcdb530a..753d227a7 100644 --- a/scripts/sync-ggml.last +++ b/scripts/sync-ggml.last @@ -1 +1 @@ -400c07f00508e6f60fb25405444b5669c365b0a9 +1890780da4ea10db88736fcde85f285abf6c64b0 From 807179ec583dcb882f97d9704577c06beb2c5ec9 Mon Sep 17 00:00:00 2001 From: Kawrakow <48489457+ikawrakow@users.noreply.github.com> Date: Sun, 14 Jan 2024 09:44:30 +0200 Subject: [PATCH 309/811] Make Q3_K_S be the same as olf Q3_K_L for Mixtral-8x7B (#4906) Co-authored-by: Iwan Kawrakow --- llama.cpp | 13 ++++++++++--- 1 file changed, 10 insertions(+), 3 deletions(-) diff --git a/llama.cpp b/llama.cpp index 66494974a..8e20e72a2 100644 --- a/llama.cpp +++ b/llama.cpp @@ -8489,9 +8489,16 @@ static ggml_type get_k_quant_type(quantize_state_internal & qs, ggml_type new_ty ++qs.i_feed_forward_w2; } else if (name.find("attn_output.weight") != std::string::npos) { if (arch != LLM_ARCH_FALCON) { - if (ftype == LLAMA_FTYPE_MOSTLY_Q2_K ) new_type = GGML_TYPE_Q3_K; - else if (ftype == LLAMA_FTYPE_MOSTLY_Q3_K_M) new_type = GGML_TYPE_Q4_K; - else if (ftype == LLAMA_FTYPE_MOSTLY_Q3_K_L) new_type = GGML_TYPE_Q5_K; + if (qs.model.hparams.n_expert == 8) { + if (ftype == LLAMA_FTYPE_MOSTLY_Q2_K || ftype == LLAMA_FTYPE_MOSTLY_Q3_K_S || ftype == LLAMA_FTYPE_MOSTLY_Q3_K_M || + ftype == LLAMA_FTYPE_MOSTLY_Q4_K_S || ftype == LLAMA_FTYPE_MOSTLY_Q4_K_M) { + new_type = GGML_TYPE_Q5_K; + } + } else { + if (ftype == LLAMA_FTYPE_MOSTLY_Q2_K ) new_type = GGML_TYPE_Q3_K; + else if (ftype == LLAMA_FTYPE_MOSTLY_Q3_K_M) new_type = GGML_TYPE_Q4_K; + else if (ftype == LLAMA_FTYPE_MOSTLY_Q3_K_L) new_type = GGML_TYPE_Q5_K; + } } else { if (ftype == LLAMA_FTYPE_MOSTLY_Q3_K_L) new_type = GGML_TYPE_Q4_K; } From 147b17ac94a24d524e367cda26a9ff6245689f34 Mon Sep 17 00:00:00 2001 From: Kawrakow <48489457+ikawrakow@users.noreply.github.com> Date: Sun, 14 Jan 2024 09:45:56 +0200 Subject: [PATCH 310/811] 2-bit quantizations (#4897) * imatrix: load * imatrix: WIP * imatrix: Add Q2_K quantization * imatrix: also guard against Q2_K_S quantization without importance matrix * imatrix: guard even more against low-bit quantization misuse --------- Co-authored-by: Iwan Kawrakow --- examples/benchmark/benchmark-matmult.cpp | 4 +- examples/quantize/quantize.cpp | 133 +++- ggml-quants.c | 950 +++++++++++++++++++++-- ggml-quants.h | 12 +- ggml.c | 36 +- ggml.h | 9 +- llama.cpp | 84 +- llama.h | 1 + tests/test-backend-ops.cpp | 2 +- 9 files changed, 1149 insertions(+), 82 deletions(-) diff --git a/examples/benchmark/benchmark-matmult.cpp b/examples/benchmark/benchmark-matmult.cpp index 434e1d6bd..e89f3de2f 100644 --- a/examples/benchmark/benchmark-matmult.cpp +++ b/examples/benchmark/benchmark-matmult.cpp @@ -194,7 +194,7 @@ int main(int argc, char ** argv) { // Set up a the benchmark matrices // printf("Creating new tensor q11 & Running quantize\n"); struct ggml_tensor * q11 = ggml_new_tensor_2d(ctx, qtype, sizex, sizey); - ggml_quantize_chunk(qtype, (const float *) m11->data, q11->data, 0, nelements, hist_cur.data()); + ggml_quantize_chunk(qtype, (const float *) m11->data, q11->data, 0, nelements/m11->ne[0], m11->ne[0], hist_cur.data(), nullptr); // Set up a the compute graph // printf("Creating new tensor q31\n"); @@ -207,7 +207,7 @@ int main(int argc, char ** argv) { // Set up a second graph computation to make sure we override the CPU cache lines // printf("Creating new tensor q12 & Running quantize\n"); struct ggml_tensor * q12 = ggml_new_tensor_2d(ctx, qtype, sizex, sizey); - ggml_quantize_chunk(qtype, (const float *) m12->data, q12->data, 0, nelements, hist_cur.data()); + ggml_quantize_chunk(qtype, (const float *) m12->data, q12->data, 0, nelements/m12->ne[0], m12->ne[0], hist_cur.data(), nullptr); // printf("Creating new tensor q32\n"); struct ggml_tensor * q32 = ggml_mul_mat(ctx, q12, m2); diff --git a/examples/quantize/quantize.cpp b/examples/quantize/quantize.cpp index f878f6911..f4e2175f1 100644 --- a/examples/quantize/quantize.cpp +++ b/examples/quantize/quantize.cpp @@ -5,6 +5,10 @@ #include #include #include +#include +#include +#include +#include struct quant_option { std::string name; @@ -17,6 +21,8 @@ static const std::vector QUANT_OPTIONS = { { "Q4_1", LLAMA_FTYPE_MOSTLY_Q4_1, " 3.90G, +0.1585 ppl @ LLaMA-v1-7B", }, { "Q5_0", LLAMA_FTYPE_MOSTLY_Q5_0, " 4.33G, +0.0683 ppl @ LLaMA-v1-7B", }, { "Q5_1", LLAMA_FTYPE_MOSTLY_Q5_1, " 4.70G, +0.0349 ppl @ LLaMA-v1-7B", }, + { "IQ2_XXS",LLAMA_FTYPE_MOSTLY_IQ2_XXS," 2.06 bpw quantization", }, + { "IQ2_XS", LLAMA_FTYPE_MOSTLY_IQ2_XS, " 2.31 bpw quantization", }, { "Q2_K", LLAMA_FTYPE_MOSTLY_Q2_K, " 2.63G, +0.6717 ppl @ LLaMA-v1-7B", }, { "Q2_K_S", LLAMA_FTYPE_MOSTLY_Q2_K_S, " 2.16G, +9.0634 ppl @ LLaMA-v1-7B", }, { "Q3_K", LLAMA_FTYPE_MOSTLY_Q3_K_M, "alias for Q3_K_M" }, @@ -72,10 +78,14 @@ static bool try_parse_ftype(const std::string & ftype_str_in, llama_ftype & ftyp // [[noreturn]] static void usage(const char * executable) { - printf("usage: %s [--help] [--allow-requantize] [--leave-output-tensor] [--pure] model-f32.gguf [model-quant.gguf] type [nthreads]\n\n", executable); + printf("usage: %s [--help] [--allow-requantize] [--leave-output-tensor] [--pure] [--imatrix] [--include-weights] [--exclude-weights] model-f32.gguf [model-quant.gguf] type [nthreads]\n\n", executable); printf(" --allow-requantize: Allows requantizing tensors that have already been quantized. Warning: This can severely reduce quality compared to quantizing from 16bit or 32bit\n"); printf(" --leave-output-tensor: Will leave output.weight un(re)quantized. Increases model size but may also increase quality, especially when requantizing\n"); printf(" --pure: Disable k-quant mixtures and quantize all tensors to the same type\n"); + printf(" --imatrixfile_name: use data in file_name as importance matrix for quant optimizations\n"); + printf(" --include-weights tensor_name: use importance matrix for this/these tensor(s)\n"); + printf(" --exclude-weights tensor_name: use importance matrix for this/these tensor(s)\n"); + printf("Note: --include-weights and --exclude-weights cannot be used together\n"); printf("\nAllowed quantization types:\n"); for (auto & it : QUANT_OPTIONS) { if (it.name != "COPY") { @@ -83,11 +93,93 @@ static void usage(const char * executable) { } else { printf(" "); } - printf("%-6s : %s\n", it.name.c_str(), it.desc.c_str()); + printf("%-7s : %s\n", it.name.c_str(), it.desc.c_str()); } exit(1); } +static void load_imatrix(const std::string& imatrix_file, std::unordered_map>& imatrix_data) { + std::ifstream in(imatrix_file.c_str(), std::ios::binary); + if (!in) { + printf("%s: failed to open %s\n",__func__,imatrix_file.c_str()); + return; + } + int n_entries; + in.read((char*)&n_entries, sizeof(n_entries)); + if (in.fail() || n_entries < 1) { + printf("%s: no data in file %s\n", __func__, imatrix_file.c_str()); + return; + } + for (int i = 0; i < n_entries; ++i) { + int len; in.read((char *)&len, sizeof(len)); + std::vector name_as_vec(len+1); + in.read((char *)name_as_vec.data(), len); + if (in.fail()) { + printf("%s: failed reading name for entry %d from %s\n",__func__,i+1,imatrix_file.c_str()); + return; + } + name_as_vec[len] = 0; + std::string name{name_as_vec.data()}; + auto& e = imatrix_data[std::move(name)]; + int ncall; + in.read((char*)&ncall, sizeof(ncall)); + int nval; + in.read((char *)&nval, sizeof(nval)); + if (in.fail() || nval < 1) { + printf("%s: failed reading number of values for entry %d\n",__func__,i); + imatrix_data = {}; + return; + } + e.resize(nval); + in.read((char*)e.data(), nval*sizeof(float)); + if (in.fail()) { + printf("%s: failed reading data for entry %d\n",__func__,i); + imatrix_data = {}; + return; + } + if (ncall > 0) { + for (auto& v : e) v /= ncall; + } + } + printf("%s: loaded %d importance matrix entries from %s\n",__func__,int(imatrix_data.size()),imatrix_file.c_str()); +} + +static void prepare_imatrix(const std::string& imatrix_file, + const std::vector& included_weights, + const std::vector& excluded_weights, + std::unordered_map>& imatrix_data) { + if (!imatrix_file.empty()) { + load_imatrix(imatrix_file, imatrix_data); + } + if (imatrix_data.empty()) { + return; + } + if (!excluded_weights.empty()) { + for (auto& name : excluded_weights) { + for (auto it = imatrix_data.begin(); it != imatrix_data.end(); ) { + auto pos = it->first.find(name); + if (pos != std::string::npos) it = imatrix_data.erase(it); + else ++it; + } + } + } + if (!included_weights.empty()) { + std::unordered_map> tmp; + for (auto& name : included_weights) { + for (auto& e : imatrix_data) { + auto pos = e.first.find(name); + if (pos != std::string::npos) { + tmp.emplace(std::move(e)); + } + } + } + imatrix_data = std::move(tmp); + } + if (!imatrix_data.empty()) { + printf("%s: have %d importance matrix entries\n", __func__, int(imatrix_data.size())); + } +} + int main(int argc, char ** argv) { if (argc < 3) { usage(argv[0]); @@ -96,6 +188,8 @@ int main(int argc, char ** argv) { llama_model_quantize_params params = llama_model_quantize_default_params(); int arg_idx = 1; + std::string imatrix_file; + std::vector included_weights, excluded_weights; for (; arg_idx < argc && strncmp(argv[arg_idx], "--", 2) == 0; arg_idx++) { if (strcmp(argv[arg_idx], "--leave-output-tensor") == 0) { @@ -104,14 +198,42 @@ int main(int argc, char ** argv) { params.allow_requantize = true; } else if (strcmp(argv[arg_idx], "--pure") == 0) { params.pure = true; + } else if (strcmp(argv[arg_idx], "--imatrix") == 0) { + if (arg_idx < argc-1) { + imatrix_file = argv[++arg_idx]; + } else { + usage(argv[0]); + } + } else if (strcmp(argv[arg_idx], "--include-weights") == 0) { + if (arg_idx < argc-1) { + included_weights.push_back(argv[++arg_idx]); + } else { + usage(argv[0]); + } + } else if (strcmp(argv[arg_idx], "--exclude-weights") == 0) { + if (arg_idx < argc-1) { + excluded_weights.push_back(argv[++arg_idx]); + } else { + usage(argv[0]); + } } else { usage(argv[0]); } } if (argc - arg_idx < 2) { + printf("%s: bad arguments\n", argv[0]); usage(argv[0]); } + if (!included_weights.empty() && !excluded_weights.empty()) { + usage(argv[0]); + } + + std::unordered_map> imatrix_data; + prepare_imatrix(imatrix_file, included_weights, excluded_weights, imatrix_data); + if (!imatrix_data.empty()) { + params.imatrix = &imatrix_data; + } llama_backend_init(false); @@ -163,6 +285,13 @@ int main(int argc, char ** argv) { } } + if ((params.ftype == LLAMA_FTYPE_MOSTLY_IQ2_XS || params.ftype == LLAMA_FTYPE_MOSTLY_IQ2_XXS || params.ftype == LLAMA_FTYPE_MOSTLY_Q2_K_S) && imatrix_data.empty()) { + fprintf(stderr, "\n===============================================================================================\n"); + fprintf(stderr, "Please do not use IQ2_XXS, IQ2_XS or Q2_K_S quantization without an importance matrix\n"); + fprintf(stderr, "===============================================================================================\n\n\n"); + return 1; + } + print_build_info(); fprintf(stderr, "%s: quantizing '%s' to '%s' as %s", __func__, fname_inp.c_str(), fname_out.c_str(), ftype_str.c_str()); diff --git a/ggml-quants.c b/ggml-quants.c index 601d155d7..9290d54cf 100644 --- a/ggml-quants.c +++ b/ggml-quants.c @@ -5,6 +5,8 @@ #include #include #include +#include // for qsort +#include // for GGML_ASSERT #ifdef __ARM_NEON @@ -1639,6 +1641,241 @@ size_t ggml_quantize_q2_K(const float * restrict src, void * restrict dst, int n return (n/QK_K*sizeof(block_q2_K)); } +static float make_qkx3_quants(int n, int nmax, const float * restrict x, const float * restrict weights, + uint8_t * restrict L, float * restrict the_min, uint8_t * restrict Laux, + float rmin, float rdelta, int nstep, bool use_mad) { + float min = x[0]; + float max = x[0]; + float sum_w = weights ? weights[0] : x[0]*x[0]; + float sum_x = sum_w * x[0]; + for (int i = 1; i < n; ++i) { + if (x[i] < min) min = x[i]; + if (x[i] > max) max = x[i]; + float w = weights ? weights[i] : x[i]*x[i]; + sum_w += w; + sum_x += w * x[i]; + } + if (min > 0) { + min = 0; + } + if (max <= min) { + for (int i = 0; i < n; ++i) L[i] = 0; + *the_min = -min; + return 0.f; + } + float iscale = nmax/(max - min); + float scale = 1/iscale; + float best_mad = 0; + for (int i = 0; i < n; ++i) { + int l = nearest_int(iscale*(x[i] - min)); + L[i] = MAX(0, MIN(nmax, l)); + float diff = scale * L[i] + min - x[i]; + diff = use_mad ? fabsf(diff) : diff*diff; + float w = weights ? weights[i] : x[i]*x[i]; + best_mad += w * diff; + } + if (nstep < 1) { + *the_min = -min; + return scale; + } + for (int is = 0; is <= nstep; ++is) { + iscale = (rmin + rdelta*is + nmax)/(max - min); + float sum_l = 0, sum_l2 = 0, sum_xl = 0; + for (int i = 0; i < n; ++i) { + int l = nearest_int(iscale*(x[i] - min)); + l = MAX(0, MIN(nmax, l)); + Laux[i] = l; + float w = weights ? weights[i] : x[i]*x[i]; + sum_l += w*l; + sum_l2 += w*l*l; + sum_xl += w*l*x[i]; + } + float D = sum_w * sum_l2 - sum_l * sum_l; + if (D > 0) { + float this_scale = (sum_w * sum_xl - sum_x * sum_l)/D; + float this_min = (sum_l2 * sum_x - sum_l * sum_xl)/D; + if (this_min > 0) { + this_min = 0; + this_scale = sum_xl / sum_l2; + } + float mad = 0; + for (int i = 0; i < n; ++i) { + float diff = this_scale * Laux[i] + this_min - x[i]; + diff = use_mad ? fabsf(diff) : diff*diff; + float w = weights ? weights[i] : x[i]*x[i]; + mad += w * diff; + } + if (mad < best_mad) { + for (int i = 0; i < n; ++i) { + L[i] = Laux[i]; + } + best_mad = mad; + scale = this_scale; + min = this_min; + } + } + } + *the_min = -min; + return scale; +} + +static float make_qp_quants(int n, int nmax, const float * restrict x, uint8_t * restrict L, const float * quant_weights) { + float max = 0; + for (int i = 0; i < n; ++i) { + max = MAX(max, x[i]); + } + if (!max) { // all zero + for (int i = 0; i < n; ++i) { L[i] = 0; } + return 0.f; + } + float iscale = nmax / max; + for (int i = 0; i < n; ++i) { + L[i] = nearest_int(iscale * x[i]); + } + float scale = 1/iscale; + float best_mse = 0; + for (int i = 0; i < n; ++i) { + float diff = x[i] - scale*L[i]; + float w = quant_weights[i]; + best_mse += w*diff*diff; + } + for (int is = -4; is <= 4; ++is) { + if (is == 0) continue; + float iscale_is = (0.1f*is + nmax)/max; + float scale_is = 1/iscale_is; + float mse = 0; + for (int i = 0; i < n; ++i) { + int l = nearest_int(iscale_is*x[i]); + l = MIN(nmax, l); + float diff = x[i] - scale_is*l; + float w = quant_weights[i]; + mse += w*diff*diff; + } + if (mse < best_mse) { + best_mse = mse; + iscale = iscale_is; + } + } + float sumlx = 0; + float suml2 = 0; + for (int i = 0; i < n; ++i) { + int l = nearest_int(iscale * x[i]); + l = MIN(nmax, l); + L[i] = l; + float w = quant_weights[i]; + sumlx += w*x[i]*l; + suml2 += w*l*l; + } + for (int itry = 0; itry < 5; ++itry) { + int n_changed = 0; + for (int i = 0; i < n; ++i) { + float w = quant_weights[i]; + float slx = sumlx - w*x[i]*L[i]; + float sl2 = suml2 - w*L[i]*L[i]; + if (slx > 0 && sl2 > 0) { + int new_l = nearest_int(x[i] * sl2 / slx); + new_l = MIN(nmax, new_l); + if (new_l != L[i]) { + slx += w*x[i]*new_l; + sl2 += w*new_l*new_l; + if (slx*slx*suml2 > sumlx*sumlx*sl2) { + L[i] = new_l; sumlx = slx; suml2 = sl2; + ++n_changed; + } + } + } + } + if (!n_changed) { + break; + } + } + return sumlx / suml2; +} + +static void quantize_row_q2_K_impl(const float * restrict x, block_q2_K * restrict y, int k, const float * restrict quant_weights) { + GGML_ASSERT(quant_weights); + assert(k % QK_K == 0); + const int nb = k / QK_K; + const bool requantize = true; + + uint8_t L[QK_K]; + uint8_t Laux[16]; + float mins[QK_K/16]; + float scales[QK_K/16]; + float sw[QK_K/16]; + float weight[QK_K/16]; + uint8_t Ls[QK_K/16], Lm[QK_K/16]; + + for (int i = 0; i < nb; i++) { + memset(sw, 0, QK_K/16*sizeof(float)); + float sumx2 = 0; + for (int j = 0; j < QK_K; ++j) sumx2 += x[j]*x[j]; + float sigma2 = sumx2/QK_K; + for (int j = 0; j < QK_K/16; ++j) { + const float * restrict qw = quant_weights + QK_K * i + 16*j; + for (int l = 0; l < 16; ++l) weight[l] = qw[l] * sqrtf(sigma2 + x[16*j + l]*x[16*j + l]); + for (int l = 0; l < 16; ++l) sw[j] += weight[l]; + scales[j] = make_qkx3_quants(16, 3, x + 16*j, weight, L + 16*j, &mins[j], Laux, -0.9f, 0.05f, 36, false); + } + + float dm = make_qp_quants(QK_K/16, 15, scales, Ls, sw); + float mm = make_qp_quants(QK_K/16, 15, mins, Lm, sw); + y[i].d = GGML_FP32_TO_FP16(dm); + y[i].dmin = GGML_FP32_TO_FP16(mm); + dm = GGML_FP16_TO_FP32(y[i].d); + mm = GGML_FP16_TO_FP32(y[i].dmin); + + for (int j = 0; j < QK_K/16; ++j) { + y[i].scales[j] = Ls[j] | (Lm[j] << 4); + } + + if (requantize) { + for (int j = 0; j < QK_K/16; ++j) { + const float d = dm * (y[i].scales[j] & 0xF); + if (!d) continue; + const float m = mm * (y[i].scales[j] >> 4); + for (int ii = 0; ii < 16; ++ii) { + int l = nearest_int((x[16*j + ii] + m)/d); + l = MAX(0, MIN(3, l)); + L[16*j + ii] = l; + } + } + } + +#if QK_K == 256 + for (int j = 0; j < QK_K; j += 128) { + for (int l = 0; l < 32; ++l) { + y[i].qs[j/4 + l] = L[j + l] | (L[j + l + 32] << 2) | (L[j + l + 64] << 4) | (L[j + l + 96] << 6); + } + } +#else + for (int l = 0; l < 16; ++l) { + y[i].qs[l] = L[l] | (L[l + 16] << 2) | (L[l + 32] << 4) | (L[l + 48] << 6); + } +#endif + + x += QK_K; + + } +} + +size_t quantize_q2_K(const float * src, void * dst, int nrow, int n_per_row, int64_t * hist, const float * quant_weights) { + (void)hist; + int row_size = ggml_row_size(GGML_TYPE_Q2_K, n_per_row); + if (!quant_weights) { + quantize_row_q2_K_reference(src, dst, nrow*n_per_row); + } + else { + char * qrow = (char *)dst; + for (int row = 0; row < nrow; ++row) { + quantize_row_q2_K_impl(src, (block_q2_K*)qrow, n_per_row, quant_weights); + src += n_per_row; + qrow += row_size; + } + } + return nrow * row_size; +} + //========================= 3-bit (de)-quantization void quantize_row_q3_K_reference(const float * restrict x, block_q3_K * restrict y, int k) { @@ -2584,14 +2821,6 @@ static const uint8_t ksigns_iq2xs[128] = { static const uint8_t kmask_iq2xs[8] = {1, 2, 4, 8, 16, 32, 64, 128}; -void quantize_row_iq2_xxs_reference(const float * restrict x, block_iq2_xxs * restrict y, int k) { - (void)x; - (void)y; - (void)k; - assert(k % QK_K == 0); - //fprintf(stderr, "=========================== %s: not implemented\n", __func__); -} - void dequantize_row_iq2_xxs(const block_iq2_xxs * restrict x, float * restrict y, int k) { assert(k % QK_K == 0); const int nb = k / QK_K; @@ -2618,33 +2847,8 @@ void dequantize_row_iq2_xxs(const block_iq2_xxs * restrict x, float * restrict y } } -void quantize_row_iq2_xxs(const float * restrict x, void * restrict vy, int k) { - assert(k % QK_K == 0); - block_iq2_xxs * restrict y = vy; - quantize_row_iq2_xxs_reference(x, y, k); -} - -size_t ggml_quantize_iq2_xxs(const float * src, void * dst, int n, int k, int64_t * hist) { - assert(k % QK_K == 0); - (void)hist; // TODO: collect histograms - - for (int j = 0; j < n; j += k) { - block_iq2_xxs * restrict y = (block_iq2_xxs *)dst + j/QK_K; - quantize_row_iq2_xxs_reference(src + j, y, k); - } - return (n/QK_K*sizeof(block_iq2_xxs)); -} - // ====================== 2.3125 bpw (de)-quantization -void quantize_row_iq2_xs_reference(const float * restrict x, block_iq2_xs * restrict y, int k) { - (void)x; - (void)y; - (void)k; - assert(k % QK_K == 0); - //fprintf(stderr, "=========================== %s: not implemented\n", __func__); -} - void dequantize_row_iq2_xs(const block_iq2_xs * restrict x, float * restrict y, int k) { assert(k % QK_K == 0); const int nb = k / QK_K; @@ -2670,23 +2874,6 @@ void dequantize_row_iq2_xs(const block_iq2_xs * restrict x, float * restrict y, } } -void quantize_row_iq2_xs(const float * restrict x, void * restrict vy, int k) { - assert(k % QK_K == 0); - block_iq2_xs * restrict y = vy; - quantize_row_iq2_xs_reference(x, y, k); -} - -size_t ggml_quantize_iq2_xs(const float * src, void * dst, int n, int k, int64_t * hist) { - assert(k % QK_K == 0); - (void)hist; // TODO: collect histograms - - for (int j = 0; j < n; j += k) { - block_iq2_xs * restrict y = (block_iq2_xs *)dst + j/QK_K; - quantize_row_iq2_xs_reference(src + j, y, k); - } - return (n/QK_K*sizeof(block_iq2_xs)); -} - //===================================== Q8_K ============================================== void quantize_row_q8_K_reference(const float * restrict x, block_q8_K * restrict y, int k) { @@ -7730,3 +7917,666 @@ void ggml_vec_dot_iq2_xs_q8_K(const int n, float * restrict s, const void * rest *s = 0.125f * sumf; #endif } + +// ================================ IQ2 quantization ============================================= + +typedef struct { + uint64_t * grid; + int * map; + uint16_t * neighbours; +} iq2_entry_t; + +static iq2_entry_t iq2_data[2] = { + {NULL, NULL, NULL}, + {NULL, NULL, NULL}, +}; + +static inline int iq2_data_index(int grid_size) { + GGML_ASSERT(grid_size == 256 || grid_size == 512); + return grid_size == 256 ? 0 : 1; +} + +static int iq2_compare_func(const void * left, const void * right) { + const int * l = (const int *)left; + const int * r = (const int *)right; + return l[0] < r[0] ? -1 : l[0] > r[0] ? 1 : l[1] < r[1] ? -1 : l[1] > r[1] ? 1 : 0; +} + +static void q2xs_init_impl(int grid_size) { + const int gindex = iq2_data_index(grid_size); + if (iq2_data[gindex].grid) { + return; + } + static const uint16_t kgrid_256[256] = { + 0, 2, 5, 8, 10, 17, 20, 32, 34, 40, 42, 65, 68, 80, 88, 97, + 100, 128, 130, 138, 162, 257, 260, 272, 277, 320, 388, 408, 512, 514, 546, 642, + 1025, 1028, 1040, 1057, 1060, 1088, 1090, 1096, 1120, 1153, 1156, 1168, 1188, 1280, 1282, 1288, + 1312, 1350, 1385, 1408, 1425, 1545, 1552, 1600, 1668, 1700, 2048, 2053, 2056, 2068, 2088, 2113, + 2116, 2128, 2130, 2184, 2308, 2368, 2562, 2580, 4097, 4100, 4112, 4129, 4160, 4192, 4228, 4240, + 4245, 4352, 4360, 4384, 4432, 4442, 4480, 4644, 4677, 5120, 5128, 5152, 5157, 5193, 5248, 5400, + 5474, 5632, 5654, 6145, 6148, 6160, 6208, 6273, 6400, 6405, 6560, 6737, 8192, 8194, 8202, 8260, + 8289, 8320, 8322, 8489, 8520, 8704, 8706, 9217, 9220, 9232, 9280, 9302, 9472, 9537, 9572, 9872, + 10248, 10272, 10388, 10820, 16385, 16388, 16400, 16408, 16417, 16420, 16448, 16456, 16470, 16480, 16513, 16516, + 16528, 16640, 16672, 16737, 16768, 16773, 16897, 16912, 16968, 16982, 17000, 17408, 17416, 17440, 17536, 17561, + 17682, 17700, 17920, 18433, 18436, 18448, 18496, 18501, 18688, 18776, 18785, 18818, 19013, 19088, 20480, 20488, + 20497, 20505, 20512, 20608, 20616, 20740, 20802, 20900, 21137, 21648, 21650, 21770, 22017, 22100, 22528, 22545, + 22553, 22628, 22848, 23048, 24580, 24592, 24640, 24680, 24832, 24917, 25112, 25184, 25600, 25605, 25872, 25874, + 25988, 26690, 32768, 32770, 32778, 32833, 32898, 33028, 33048, 33088, 33297, 33793, 33796, 33808, 33813, 33856, + 33888, 34048, 34118, 34196, 34313, 34368, 34400, 34818, 35076, 35345, 36868, 36880, 36900, 36928, 37025, 37142, + 37248, 37445, 37888, 37922, 37956, 38225, 39041, 39200, 40962, 41040, 41093, 41225, 41472, 42008, 43088, 43268, + }; + static const uint16_t kgrid_512[512] = { + 0, 2, 5, 8, 10, 17, 20, 22, 25, 32, 34, 37, 40, 65, 68, 70, + 73, 80, 82, 85, 88, 97, 100, 128, 130, 133, 136, 145, 148, 153, 160, 257, + 260, 262, 265, 272, 274, 277, 280, 282, 289, 292, 320, 322, 325, 328, 337, 340, + 352, 360, 385, 388, 400, 512, 514, 517, 520, 529, 532, 544, 577, 580, 592, 597, + 640, 650, 1025, 1028, 1030, 1033, 1040, 1042, 1045, 1048, 1057, 1060, 1088, 1090, 1093, 1096, + 1105, 1108, 1110, 1120, 1153, 1156, 1168, 1280, 1282, 1285, 1288, 1297, 1300, 1312, 1345, 1348, + 1360, 1377, 1408, 1537, 1540, 1552, 1574, 1600, 1602, 1668, 2048, 2050, 2053, 2056, 2058, 2065, + 2068, 2080, 2085, 2113, 2116, 2128, 2136, 2176, 2208, 2218, 2305, 2308, 2320, 2368, 2433, 2441, + 2560, 2592, 2600, 2710, 2720, 4097, 4100, 4102, 4105, 4112, 4114, 4117, 4120, 4129, 4132, 4160, + 4162, 4165, 4168, 4177, 4180, 4192, 4202, 4225, 4228, 4240, 4352, 4354, 4357, 4360, 4369, 4372, + 4384, 4417, 4420, 4432, 4480, 4500, 4502, 4609, 4612, 4614, 4624, 4672, 4704, 5120, 5122, 5125, + 5128, 5137, 5140, 5152, 5185, 5188, 5193, 5200, 5220, 5248, 5377, 5380, 5392, 5440, 5632, 5652, + 5705, 6145, 6148, 6160, 6162, 6208, 6228, 6278, 6400, 6405, 6502, 6737, 6825, 8192, 8194, 8197, + 8200, 8202, 8209, 8212, 8224, 8257, 8260, 8272, 8320, 8352, 8449, 8452, 8464, 8512, 8520, 8549, + 8704, 8738, 8832, 8872, 9217, 9220, 9232, 9257, 9280, 9472, 9537, 9554, 9625, 9729, 9754, 9894, + 10240, 10248, 10250, 10272, 10325, 10376, 10402, 10600, 10640, 10760, 10784, 10882, 10888, 10890, 16385, 16388, + 16390, 16393, 16400, 16402, 16405, 16408, 16417, 16420, 16448, 16450, 16453, 16456, 16458, 16465, 16468, 16480, + 16485, 16513, 16516, 16528, 16640, 16642, 16645, 16648, 16657, 16660, 16672, 16705, 16708, 16720, 16768, 16773, + 16802, 16897, 16900, 16912, 16914, 16937, 16960, 17408, 17410, 17413, 17416, 17425, 17428, 17433, 17440, 17473, + 17476, 17488, 17536, 17556, 17665, 17668, 17680, 17700, 17728, 17818, 17920, 17930, 17988, 18000, 18433, 18436, + 18448, 18496, 18501, 18516, 18530, 18688, 18705, 18756, 18768, 18793, 18948, 20480, 20482, 20485, 20488, 20497, + 20500, 20512, 20520, 20545, 20548, 20560, 20608, 20737, 20740, 20752, 20757, 20800, 20802, 20992, 21060, 21162, + 21505, 21508, 21520, 21537, 21568, 21600, 21633, 21665, 21760, 21768, 21888, 21896, 22049, 22120, 22177, 22528, + 22548, 22593, 22608, 22681, 22810, 22848, 22850, 23173, 24577, 24580, 24592, 24640, 24660, 24674, 24710, 24745, + 24832, 25124, 25162, 25234, 25600, 25622, 25872, 25920, 25925, 26020, 26625, 26730, 26917, 27142, 27220, 27234, + 32768, 32770, 32773, 32776, 32785, 32788, 32800, 32810, 32833, 32836, 32848, 32896, 32898, 32936, 32938, 33025, + 33028, 33030, 33040, 33088, 33105, 33113, 33280, 33312, 33408, 33410, 33440, 33448, 33793, 33796, 33808, 33810, + 33813, 33856, 33888, 33929, 34048, 34116, 34213, 34328, 34410, 34816, 34824, 34853, 34906, 34944, 34946, 34984, + 35078, 35362, 35456, 35464, 35478, 35496, 36865, 36868, 36880, 36928, 36950, 36996, 37120, 37154, 37220, 37462, + 37513, 37888, 37893, 37956, 37968, 37976, 38185, 38288, 38290, 38465, 38993, 39078, 39241, 39445, 39520, 40960, + 40962, 40968, 40970, 40992, 41002, 41120, 41297, 41305, 41382, 41472, 41474, 41480, 41514, 41600, 41632, 42048, + 42133, 42597, 42648, 43018, 43040, 43042, 43048, 43168, 43176, 43268, 43396, 43398, 43560, 43562, 43665, 43690, + }; + const int kmap_size = 43692; + const int nwant = 2; + const uint16_t * kgrid = grid_size == 256 ? kgrid_256 : kgrid_512; + uint64_t * kgrid_q2xs; + int * kmap_q2xs; + uint16_t * kneighbors_q2xs; + + printf("================================================================= %s(grid_size = %d)\n", __func__, grid_size); + uint64_t * the_grid = (uint64_t *)malloc(grid_size*sizeof(uint64_t)); + for (int k = 0; k < grid_size; ++k) { + int8_t * pos = (int8_t *)(the_grid + k); + for (int i = 0; i < 8; ++i) { + int l = (kgrid[k] >> 2*i) & 0x3; + pos[i] = 2*l + 1; + } + } + kgrid_q2xs = the_grid; + iq2_data[gindex].grid = the_grid; + kmap_q2xs = (int *)malloc(kmap_size*sizeof(int)); + iq2_data[gindex].map = kmap_q2xs; + for (int i = 0; i < kmap_size; ++i) kmap_q2xs[i] = -1; + uint64_t aux64; + uint8_t * aux8 = (uint8_t *)&aux64; + for (int i = 0; i < grid_size; ++i) { + aux64 = kgrid_q2xs[i]; + uint16_t index = 0; + for (int k=0; k<8; ++k) { + uint16_t q = (aux8[k] - 1)/2; + index |= (q << 2*k); + } + kmap_q2xs[index] = i; + } + int8_t pos[8]; + int * dist2 = (int *)malloc(2*grid_size*sizeof(int)); + int num_neighbors = 0, num_not_in_map = 0; + for (int i = 0; i < kmap_size; ++i) { + if (kmap_q2xs[i] >= 0) continue; + ++num_not_in_map; + for (int k = 0; k < 8; ++k) { + int l = (i >> 2*k) & 0x3; + pos[k] = 2*l + 1; + } + for (int j = 0; j < grid_size; ++j) { + const int8_t * pg = (const int8_t *)(kgrid_q2xs + j); + int d2 = 0; + for (int k = 0; k < 8; ++k) d2 += (pg[k] - pos[k])*(pg[k] - pos[k]); + dist2[2*j+0] = d2; + dist2[2*j+1] = j; + } + qsort(dist2, grid_size, 2*sizeof(int), iq2_compare_func); + int n = 0; int d2 = dist2[0]; + int nhave = 1; + for (int j = 0; j < grid_size; ++j) { + if (dist2[2*j] > d2) { + if (nhave == nwant) break; + d2 = dist2[2*j]; + ++nhave; + } + ++n; + } + num_neighbors += n; + } + printf("%s: %d neighbours in total\n", __func__, num_neighbors); + kneighbors_q2xs = (uint16_t *)malloc((num_neighbors + num_not_in_map)*sizeof(uint16_t)); + iq2_data[gindex].neighbours = kneighbors_q2xs; + int counter = 0; + for (int i = 0; i < kmap_size; ++i) { + if (kmap_q2xs[i] >= 0) continue; + for (int k = 0; k < 8; ++k) { + int l = (i >> 2*k) & 0x3; + pos[k] = 2*l + 1; + } + for (int j = 0; j < grid_size; ++j) { + const int8_t * pg = (const int8_t *)(kgrid_q2xs + j); + int d2 = 0; + for (int k = 0; k < 8; ++k) d2 += (pg[k] - pos[k])*(pg[k] - pos[k]); + dist2[2*j+0] = d2; + dist2[2*j+1] = j; + } + qsort(dist2, grid_size, 2*sizeof(int), iq2_compare_func); + kmap_q2xs[i] = -(counter + 1); + int d2 = dist2[0]; + uint16_t * start = &kneighbors_q2xs[counter++]; + int n = 0, nhave = 1; + for (int j = 0; j < grid_size; ++j) { + if (dist2[2*j] > d2) { + if (nhave == nwant) break; + d2 = dist2[2*j]; + ++nhave; + } + kneighbors_q2xs[counter++] = dist2[2*j+1]; + ++n; + } + *start = n; + } + free(dist2); +} + +void ggml_init_iq2_quantization(enum ggml_type type) { + if (type == GGML_TYPE_IQ2_XXS) { + q2xs_init_impl(256); + } + else if (type == GGML_TYPE_IQ2_XS) { + q2xs_init_impl(512); + } + else { + fprintf(stderr, "======================== Why are you calling %s with type %d?\n", __func__, (int)type); + } +} + +static void q2xs_deinit_impl(int grid_size) { + GGML_ASSERT(grid_size == 256 || grid_size == 512 || grid_size == 1024); + const int gindex = iq2_data_index(grid_size); + if (iq2_data[gindex].grid) { + free(iq2_data[gindex].grid); iq2_data[gindex].grid = NULL; + free(iq2_data[gindex].map); iq2_data[gindex].map = NULL; + free(iq2_data[gindex].neighbours); iq2_data[gindex].neighbours = NULL; + } +} + +void ggml_deinit_iq2_quantization(enum ggml_type type) { + if (type == GGML_TYPE_IQ2_XXS) { + q2xs_deinit_impl(256); + } + else if (type == GGML_TYPE_IQ2_XS) { + q2xs_deinit_impl(512); + } + else { + fprintf(stderr, "======================== Why are you calling %s with type %d?\n", __func__, (int)type); + } +} + +static int iq2_find_best_neighbour(const uint16_t * restrict neighbours, const uint64_t * restrict grid, + const float * restrict xval, const float * restrict weight, float scale, int8_t * restrict L) { + int num_neighbors = neighbours[0]; + GGML_ASSERT(num_neighbors > 0); + float best_d2 = FLT_MAX; + int grid_index = -1; + for (int j = 1; j <= num_neighbors; ++j) { + const int8_t * pg = (const int8_t *)(grid + neighbours[j]); + float d2 = 0; + for (int i = 0; i < 8; ++i) { + float q = pg[i]; + float diff = scale*q - xval[i]; + d2 += weight[i]*diff*diff; + } + if (d2 < best_d2) { + best_d2 = d2; grid_index = neighbours[j]; + } + } + GGML_ASSERT(grid_index >= 0); + const int8_t * pg = (const int8_t *)(grid + grid_index); + for (int i = 0; i < 8; ++i) L[i] = (pg[i] - 1)/2; + return grid_index; +} + +static void quantize_row_iq2_xxs_impl(const float * restrict x, void * restrict vy, int n, const float * restrict quant_weights) { + + const int gindex = iq2_data_index(256); + + const uint64_t * kgrid_q2xs = iq2_data[gindex].grid; + const int * kmap_q2xs = iq2_data[gindex].map; + const uint16_t * kneighbors_q2xs = iq2_data[gindex].neighbours; + + GGML_ASSERT(quant_weights); + GGML_ASSERT(kgrid_q2xs); + GGML_ASSERT(kmap_q2xs); + GGML_ASSERT(kneighbors_q2xs); + GGML_ASSERT(n%QK_K == 0); + + const int kMaxQ = 3; + + const int nbl = n/256; + + block_iq2_xxs * y = vy; + + float scales[QK_K/32]; + float weight[32]; + float xval[32]; + int8_t L[32]; + int8_t Laux[32]; + float waux[32]; + bool is_on_grid[4]; + bool is_on_grid_aux[4]; + uint8_t block_signs[4]; + uint32_t q2[2*(QK_K/32)]; + + for (int ibl = 0; ibl < nbl; ++ibl) { + + y[ibl].d = GGML_FP32_TO_FP16(0.f); + memset(q2, 0, QK_K/4); + + float max_scale = 0; + + const float * xbl = x + QK_K*ibl; + float sumx2 = 0; + for (int i = 0; i < QK_K; ++i) sumx2 += xbl[i]*xbl[i]; + float sigma2 = sumx2/QK_K; + + for (int ib = 0; ib < QK_K/32; ++ib) { + const float * xb = xbl + 32*ib; + const float * qw = quant_weights + QK_K*ibl + 32*ib; + for (int i = 0; i < 32; ++i) weight[i] = qw[i] * sqrtf(sigma2 + xb[i]*xb[i]); + for (int i = 0; i < 32; ++i) waux[i] = sqrtf(weight[i]); + for (int k = 0; k < 4; ++k) { + int nflip = 0; + uint8_t s = 0; + for (int i = 0; i < 8; ++i) { + if (xb[8*k + i] >= 0) xval[8*k + i] = xb[8*k + i]; + else { + xval[8*k + i] = -xb[8*k + i]; ++nflip; s |= (1 << i); + } + } + if (nflip%2) { + int imin = 0; float min = weight[8*k+imin]*xb[8*k+imin]*xb[8*k+imin]; + for (int i = 1; i < 8; ++i) { + float ax = weight[8*k+i]*xb[8*k+i]*xb[8*k+i]; + if (ax < min) { + min = ax; imin = i; + } + } + xval[8*k+imin] = -xval[8*k+imin]; + s ^= (1 << imin); + } + block_signs[k] = s & 127; + } + float max = xval[0]; + for (int i = 1; i < 32; ++i) max = MAX(max, xval[i]); + if (!max) { + scales[ib] = 0; + memset(L, 0, 32); + continue; + } + float best = 0; + float scale = max/(2*kMaxQ-1); + for (int is = -9; is <= 9; ++is) { + float id = (2*kMaxQ-1+is*0.1f)/max; + float this_scale = 1/id; + for (int k = 0; k < 4; ++k) { + for (int i = 0; i < 8; ++i) { + int l = nearest_int(0.5f*(id*xval[8*k+i]-1)); + Laux[8*k+i] = MAX(0, MIN(kMaxQ-1, l)); + } + uint16_t u = 0; + for (int i = 0; i < 8; ++i) u |= (Laux[8*k+i] << 2*i); + int grid_index = kmap_q2xs[u]; + is_on_grid_aux[k] = true; + if (grid_index < 0) { + is_on_grid_aux[k] = false; + const uint16_t * neighbours = kneighbors_q2xs - kmap_q2xs[u] - 1; + grid_index = iq2_find_best_neighbour(neighbours, kgrid_q2xs, xval + 8*k, waux + 8*k, this_scale, Laux + 8*k); + } + } + float sumqx = 0, sumq2 = 0; + for (int i = 0; i < 32; ++i) { + float w = weight[i]; + float q = 2*Laux[i] + 1; + sumqx += w*xval[i]*q; + sumq2 += w*q*q; + } + if (sumq2 > 0 && sumqx*sumqx > best*sumq2) { + scale = sumqx/sumq2; best = scale*sumqx; + for (int i = 0; i < 32; ++i) L[i] = Laux[i]; + for (int k = 0; k < 4; ++k) is_on_grid[k] = is_on_grid_aux[k]; + } + } + int n_not_ongrid = 0; + for (int k = 0; k < 4; ++k) if (!is_on_grid[k]) ++n_not_ongrid; + if (n_not_ongrid > 0 && scale > 0) { + float id = 1/scale; + for (int k = 0; k < 4; ++k) { + if (is_on_grid[k]) continue; + uint16_t u = 0; + for (int i = 0; i < 8; ++i) { + int l = nearest_int(0.5f*(id*xval[8*k+i]-1)); + l = MAX(0, MIN(kMaxQ-1, l)); + u |= (l << 2*i); + } + int grid_index = kmap_q2xs[u]; + if (grid_index < 0) { + const uint16_t * neighbours = kneighbors_q2xs - kmap_q2xs[u] - 1; + grid_index = iq2_find_best_neighbour(neighbours, kgrid_q2xs, xval + 8*k, waux + 8*k, scale, L + 8*k); + } + const int8_t * pg = (const int8_t *)(kgrid_q2xs + grid_index); + for (int i = 0; i < 8; ++i) L[8*k+i] = (pg[i] - 1)/2; + } + float sumqx = 0, sumq2 = 0; + for (int i = 0; i < 32; ++i) { + float w = weight[i]; + float q = 2*L[i] + 1; + sumqx += w*xval[i]*q; + sumq2 += w*q*q; + } + if (sumq2 > 0) scale = sumqx/sumq2; + } + if (scale < 0) { + // This should never happen, but just in case, flip scale so that it is positive (we use uint's to encode the scale) + // and correspondingly flip quant signs. + scale = -scale; + for (int k = 0; k < 4; ++k) block_signs[k] = (~block_signs[k]) & 127; + } + for (int k = 0; k < 4; ++k) { + uint16_t u = 0; + for (int i = 0; i < 8; ++i) u |= (L[8*k+i] << 2*i); + int grid_index = kmap_q2xs[u]; + if (grid_index < 0) { + printf("Oops: found point %u not on grid:", u); + for (int i = 0; i < 8; ++i) printf(" %d", L[8*k+i]); + printf("\n"); + GGML_ASSERT(false); + } + q2[2*ib+0] |= (grid_index << 8*k); + q2[2*ib+1] |= (block_signs[k] << 7*k); + } + GGML_ASSERT(scale >= 0); + scales[ib] = scale; + max_scale = MAX(max_scale, scale); + } + + if (!max_scale) { + memset(y[ibl].qs, 0, QK_K/4); + continue; + } + + float d = max_scale/31; + y[ibl].d = GGML_FP32_TO_FP16(d); + float id = 1/d; + float sumqx = 0, sumq2 = 0; + for (int ib = 0; ib < QK_K/32; ++ib) { + int l = nearest_int(0.5f*(id*scales[ib]-1)); + l = MAX(0, MIN(15, l)); + q2[2*ib+1] |= ((uint32_t)l << 28); + const float * xb = xbl + 32*ib; + const float * qw = quant_weights + QK_K*ibl + 32*ib; + for (int i = 0; i < 32; ++i) weight[i] = qw[i] * sqrtf(sigma2 + xb[i]*xb[i]); + const uint8_t * aux8 = (const uint8_t *)(q2 + 2*ib); + const float db = d * (1 + 2*l); + uint32_t u = 0; + for (int k = 0; k < 4; ++k) { + const int8_t * signs = keven_signs_q2xs + 8*((q2[2*ib+1] >> 7*k) & 127); + const float * xk = xb + 8*k; + const float * wk = weight + 8*k; + const uint8_t * grid = (const uint8_t *)(kgrid_q2xs + aux8[k]); + float best_mse = 0; int best_index = aux8[k]; + for (int j = 0; j < 8; ++j) { + float diff = db * grid[j] * signs[j] - xk[j]; + best_mse += wk[j] * diff * diff; + } + for (int idx = 0; idx < 256; ++idx) { + grid = (const uint8_t *)(kgrid_q2xs + idx); + float mse = 0; + for (int j = 0; j < 8; ++j) { + float diff = db * grid[j] * signs[j] - xk[j]; + mse += wk[j] * diff * diff; + } + if (mse < best_mse) { + best_mse = mse; best_index = idx; + } + } + u |= (best_index << 8*k); + grid = (const uint8_t *)(kgrid_q2xs + best_index); + //grid = (const uint8_t *)(kgrid_q2xs + aux8[k]); + for (int j = 0; j < 8; ++j) { + float q = db * grid[j] * signs[j]; + sumqx += wk[j] * q * xk[j]; + sumq2 += wk[j] * q * q; + } + } + q2[2*ib] = u; + if (sumq2 > 0) y[ibl].d = GGML_FP32_TO_FP16(d*sumqx/sumq2); + } + memcpy(y[ibl].qs, q2, QK_K/4); + } +} + +static void quantize_row_iq2_xs_impl(const float * restrict x, void * restrict vy, int n, const float * restrict quant_weights) { + + const int gindex = iq2_data_index(512); + + const uint64_t * kgrid_q2xs = iq2_data[gindex].grid; + const int * kmap_q2xs = iq2_data[gindex].map; + const uint16_t * kneighbors_q2xs = iq2_data[gindex].neighbours; + + GGML_ASSERT(quant_weights); + GGML_ASSERT(kmap_q2xs); + GGML_ASSERT(kgrid_q2xs); + GGML_ASSERT(kneighbors_q2xs); + GGML_ASSERT(n%QK_K == 0); + + const int kMaxQ = 3; + + const int nbl = n/256; + + block_iq2_xs * y = vy; + + float scales[QK_K/16]; + float weight[16]; + float xval[16]; + int8_t L[16]; + int8_t Laux[16]; + float waux[16]; + bool is_on_grid[2]; + bool is_on_grid_aux[2]; + uint8_t block_signs[2]; + uint16_t q2[2*(QK_K/16)]; + + for (int ibl = 0; ibl < nbl; ++ibl) { + + y[ibl].d = GGML_FP32_TO_FP16(0.f); + memset(q2, 0, QK_K/4); + memset(y[ibl].scales, 0, QK_K/32); + + float max_scale = 0; + + const float * xbl = x + QK_K*ibl; + float sumx2 = 0; + for (int i = 0; i < QK_K; ++i) sumx2 += xbl[i]*xbl[i]; + float sigma2 = sumx2/QK_K; + + for (int ib = 0; ib < QK_K/16; ++ib) { + const float * xb = xbl + 16*ib; + const float * qw = quant_weights + QK_K*ibl + 16*ib; + for (int i = 0; i < 16; ++i) weight[i] = qw[i] * sqrtf(sigma2 + xb[i]*xb[i]); + for (int i = 0; i < 16; ++i) waux[i] = sqrtf(weight[i]); + for (int k = 0; k < 2; ++k) { + int nflip = 0; + uint8_t s = 0; + for (int i = 0; i < 8; ++i) { + if (xb[8*k + i] >= 0) xval[8*k + i] = xb[8*k + i]; + else { + xval[8*k + i] = -xb[8*k + i]; ++nflip; s |= (1 << i); + } + } + if (nflip%2) { + int imin = 0; float min = weight[8*k+imin]*xb[8*k+imin]*xb[8*k+imin]; + for (int i = 1; i < 8; ++i) { + float ax = weight[8*k+i]*xb[8*k+i]*xb[8*k+i]; + if (ax < min) { + min = ax; imin = i; + } + } + xval[8*k+imin] = -xval[8*k+imin]; + s ^= (1 << imin); + } + block_signs[k] = s & 127; + } + float max = xval[0]; + for (int i = 1; i < 16; ++i) max = MAX(max, xval[i]); + if (!max) { + scales[ib] = 0; + memset(L, 0, 16); + continue; + } + float best = 0; + float scale = max/(2*kMaxQ-1); + is_on_grid[0] = is_on_grid[1] = true; + for (int is = -9; is <= 9; ++is) { + float id = (2*kMaxQ-1+is*0.1f)/max; + float this_scale = 1/id; + for (int k = 0; k < 2; ++k) { + for (int i = 0; i < 8; ++i) { + int l = nearest_int(0.5f*(id*xval[8*k+i]-1)); + Laux[8*k+i] = MAX(0, MIN(kMaxQ-1, l)); + } + uint16_t u = 0; + for (int i = 0; i < 8; ++i) u |= (Laux[8*k+i] << 2*i); + int grid_index = kmap_q2xs[u]; + is_on_grid_aux[k] = true; + if (grid_index < 0) { + is_on_grid_aux[k] = false; + const uint16_t * neighbours = kneighbors_q2xs - kmap_q2xs[u] - 1; + grid_index = iq2_find_best_neighbour(neighbours, kgrid_q2xs, xval + 8*k, waux + 8*k, this_scale, Laux + 8*k); + } + } + float sumqx = 0, sumq2 = 0; + for (int i = 0; i < 16; ++i) { + float w = weight[i]; + float q = 2*Laux[i] + 1; + sumqx += w*xval[i]*q; + sumq2 += w*q*q; + } + if (sumq2 > 0 && sumqx*sumqx > best*sumq2) { + scale = sumqx/sumq2; best = scale*sumqx; + for (int i = 0; i < 16; ++i) L[i] = Laux[i]; + for (int k = 0; k < 2; ++k) is_on_grid[k] = is_on_grid_aux[k]; + } + } + int n_not_ongrid = 0; + for (int k = 0; k < 2; ++k) if (!is_on_grid[k]) ++n_not_ongrid; + if (n_not_ongrid > 0 && scale > 0) { + float id = 1/scale; + for (int k = 0; k < 2; ++k) { + if (is_on_grid[k]) continue; + uint16_t u = 0; + for (int i = 0; i < 8; ++i) { + int l = nearest_int(0.5f*(id*xval[8*k+i]-1)); + l = MAX(0, MIN(kMaxQ-1, l)); + u |= (l << 2*i); + L[8*k + i] = l; + } + int grid_index = kmap_q2xs[u]; + if (grid_index < 0) { + const uint16_t * neighbours = kneighbors_q2xs - kmap_q2xs[u] - 1; + grid_index = iq2_find_best_neighbour(neighbours, kgrid_q2xs, xval + 8*k, waux + 8*k, scale, L + 8*k); + } + } + float sumqx = 0, sumq2 = 0; + for (int i = 0; i < 16; ++i) { + float w = weight[i]; + float q = 2*L[i] + 1; + sumqx += w*xval[i]*q; + sumq2 += w*q*q; + } + if (sumq2 > 0) scale = sumqx/sumq2; + } + if (scale < 0) { + scale = -scale; + for (int k = 0; k < 2; ++k) block_signs[k] = (~block_signs[k]) & 127; + } + for (int k = 0; k < 2; ++k) { + uint16_t u = 0; + for (int i = 0; i < 8; ++i) u |= (L[8*k+i] << 2*i); + int grid_index = kmap_q2xs[u]; + if (grid_index < 0) { + printf("Oops: found point %u not on grid:", u); + for (int i = 0; i < 8; ++i) printf(" %d", L[8*k+i]); + printf("\n"); + GGML_ASSERT(false); + } + q2[2*ib+k] = grid_index | (block_signs[k] << 9); + } + GGML_ASSERT(scale >= 0); + scales[ib] = scale; + max_scale = MAX(max_scale, scale); + } + + if (!max_scale) { + memset(y[ibl].qs, 0, QK_K/4); + continue; + } + + float d = max_scale/31; + y[ibl].d = GGML_FP32_TO_FP16(d); + float id = 1/d; + for (int ib = 0; ib < QK_K/16; ++ib) { + int l = nearest_int(0.5f*(id*scales[ib]-1)); + l = MAX(0, MIN(15, l)); + if (ib%2 == 0) y[ibl].scales[ib/2] = l; + else y[ibl].scales[ib/2] |= (l << 4); + } + memcpy(y[ibl].qs, q2, QK_K/4); + + } +} + +size_t quantize_iq2_xxs(const float * src, void * dst, int nrow, int n_per_row, int64_t * hist, const float * quant_weights) { + (void)hist; + GGML_ASSERT(n_per_row%QK_K == 0); + int nblock = n_per_row/QK_K; + char * qrow = (char *)dst; + for (int row = 0; row < nrow; ++row) { + quantize_row_iq2_xxs_impl(src, qrow, n_per_row, quant_weights); + src += n_per_row; + qrow += nblock*sizeof(block_iq2_xxs); + } + return nrow * nblock * sizeof(block_iq2_xxs); +} + +size_t quantize_iq2_xs(const float * src, void * dst, int nrow, int n_per_row, int64_t * hist, const float * quant_weights) { + (void)hist; + GGML_ASSERT(n_per_row%QK_K == 0); + int nblock = n_per_row/QK_K; + char * qrow = (char *)dst; + for (int row = 0; row < nrow; ++row) { + quantize_row_iq2_xs_impl(src, qrow, n_per_row, quant_weights); + src += n_per_row; + qrow += nblock*sizeof(block_iq2_xs); + } + return nrow * nblock * sizeof(block_iq2_xs); +} + diff --git a/ggml-quants.h b/ggml-quants.h index df5e7ae80..e5d110230 100644 --- a/ggml-quants.h +++ b/ggml-quants.h @@ -196,8 +196,6 @@ void quantize_row_q4_K_reference(const float * restrict x, block_q4_K * restrict void quantize_row_q5_K_reference(const float * restrict x, block_q5_K * restrict y, int k); void quantize_row_q6_K_reference(const float * restrict x, block_q6_K * restrict y, int k); void quantize_row_q8_K_reference(const float * restrict x, block_q8_K * restrict y, int k); -void quantize_row_iq2_xxs_reference(const float * restrict x, block_iq2_xxs * restrict y, int k); -void quantize_row_iq2_xs_reference (const float * restrict x, block_iq2_xs * restrict y, int k); void quantize_row_q4_0(const float * restrict x, void * restrict y, int k); void quantize_row_q4_1(const float * restrict x, void * restrict y, int k); @@ -212,8 +210,6 @@ void quantize_row_q4_K(const float * restrict x, void * restrict y, int k); void quantize_row_q5_K(const float * restrict x, void * restrict y, int k); void quantize_row_q6_K(const float * restrict x, void * restrict y, int k); void quantize_row_q8_K(const float * restrict x, void * restrict y, int k); -void quantize_row_iq2_xxs(const float * restrict x, void * restrict y, int k); -void quantize_row_iq2_xs (const float * restrict x, void * restrict y, int k); // Dequantization void dequantize_row_q4_0(const block_q4_0 * restrict x, float * restrict y, int k); @@ -246,3 +242,11 @@ void ggml_vec_dot_q5_K_q8_K(int n, float * restrict s, const void * restrict vx, void ggml_vec_dot_q6_K_q8_K(int n, float * restrict s, const void * restrict vx, const void * restrict vy); void ggml_vec_dot_iq2_xxs_q8_K(int n, float * restrict s, const void * restrict vx, const void * restrict vy); void ggml_vec_dot_iq2_xs_q8_K (int n, float * restrict s, const void * restrict vx, const void * restrict vy); + +// +// Quantization utilizing an importance matrix (a.k.a. "Activation aWare Quantization") +// +size_t quantize_iq2_xxs(const float * src, void * dst, int nrows, int n_per_row, int64_t * hist, const float * imatrix); +size_t quantize_iq2_xs (const float * src, void * dst, int nrows, int n_per_row, int64_t * hist, const float * imatrix); +size_t quantize_q2_K (const float * src, void * dst, int nrows, int n_per_row, int64_t * hist, const float * imatrix); + diff --git a/ggml.c b/ggml.c index bcfb6652c..52467475a 100644 --- a/ggml.c +++ b/ggml.c @@ -585,8 +585,8 @@ static const ggml_type_traits_t type_traits[GGML_TYPE_COUNT] = { .type_size = sizeof(block_iq2_xxs), .is_quantized = true, .to_float = (ggml_to_float_t) dequantize_row_iq2_xxs, - .from_float = quantize_row_iq2_xxs, - .from_float_reference = (ggml_from_float_t) quantize_row_iq2_xxs_reference, + .from_float = NULL, + .from_float_reference = NULL, .vec_dot = ggml_vec_dot_iq2_xxs_q8_K, .vec_dot_type = GGML_TYPE_Q8_K, }, @@ -596,8 +596,8 @@ static const ggml_type_traits_t type_traits[GGML_TYPE_COUNT] = { .type_size = sizeof(block_iq2_xs), .is_quantized = true, .to_float = (ggml_to_float_t) dequantize_row_iq2_xs, - .from_float = quantize_row_iq2_xs, - .from_float_reference = (ggml_from_float_t) quantize_row_iq2_xs_reference, + .from_float = NULL, + .from_float_reference = NULL, .vec_dot = ggml_vec_dot_iq2_xs_q8_K, .vec_dot_type = GGML_TYPE_Q8_K, }, @@ -18665,8 +18665,11 @@ size_t ggml_quantize_q8_0(const float * src, void * dst, int n, int k, int64_t * return (n/QK8_0*sizeof(block_q8_0)); } -size_t ggml_quantize_chunk(enum ggml_type type, const float * src, void * dst, int start, int n, int64_t * hist) { +size_t ggml_quantize_chunk(enum ggml_type type, const float * src, void * dst, int start, + int nrows, int n_per_row, int64_t * hist, const float * imatrix) { + (void)imatrix; size_t result = 0; + int n = nrows * n_per_row; switch (type) { case GGML_TYPE_Q4_0: { @@ -18701,8 +18704,11 @@ size_t ggml_quantize_chunk(enum ggml_type type, const float * src, void * dst, i case GGML_TYPE_Q2_K: { GGML_ASSERT(start % QK_K == 0); - block_q2_K * block = (block_q2_K*)dst + start / QK_K; - result = ggml_quantize_q2_K(src + start, block, n, n, hist); + GGML_ASSERT(start % n_per_row == 0); + size_t start_row = start / n_per_row; + size_t row_size = ggml_row_size(type, n_per_row); + result = quantize_q2_K(src + start, (char *)dst + start_row * row_size, nrows, n_per_row, hist, imatrix); + GGML_ASSERT(result == row_size * nrows); } break; case GGML_TYPE_Q3_K: { @@ -18731,14 +18737,22 @@ size_t ggml_quantize_chunk(enum ggml_type type, const float * src, void * dst, i case GGML_TYPE_IQ2_XXS: { GGML_ASSERT(start % QK_K == 0); - block_iq2_xxs * block = (block_iq2_xxs*)dst + start / QK_K; - result = ggml_quantize_iq2_xxs(src + start, block, n, n, hist); + GGML_ASSERT(start % n_per_row == 0); + GGML_ASSERT(imatrix); + size_t start_row = start / n_per_row; + size_t row_size = ggml_row_size(type, n_per_row); + result = quantize_iq2_xxs(src + start, (char *)dst + start_row * row_size, nrows, n_per_row, hist, imatrix); + GGML_ASSERT(result == row_size * nrows); } break; case GGML_TYPE_IQ2_XS: { GGML_ASSERT(start % QK_K == 0); - block_iq2_xs * block = (block_iq2_xs*)dst + start / QK_K; - result = ggml_quantize_iq2_xs(src + start, block, n, n, hist); + GGML_ASSERT(start % n_per_row == 0); + GGML_ASSERT(imatrix); + size_t start_row = start / n_per_row; + size_t row_size = ggml_row_size(type, n_per_row); + result = quantize_iq2_xs(src + start, (char *)dst + start_row * row_size, nrows, n_per_row, hist, imatrix); + GGML_ASSERT(result == row_size * nrows); } break; case GGML_TYPE_F16: { diff --git a/ggml.h b/ggml.h index b18ba7812..1187074f7 100644 --- a/ggml.h +++ b/ggml.h @@ -2067,10 +2067,13 @@ extern "C" { GGML_API size_t ggml_quantize_q4_K(const float * src, void * dst, int n, int k, int64_t * hist); GGML_API size_t ggml_quantize_q5_K(const float * src, void * dst, int n, int k, int64_t * hist); GGML_API size_t ggml_quantize_q6_K(const float * src, void * dst, int n, int k, int64_t * hist); - GGML_API size_t ggml_quantize_iq2_xxs(const float * src, void * dst, int n, int k, int64_t * hist); - GGML_API size_t ggml_quantize_iq2_xs (const float * src, void * dst, int n, int k, int64_t * hist); - GGML_API size_t ggml_quantize_chunk(enum ggml_type type, const float * src, void * dst, int start, int n, int64_t * hist); + GGML_API size_t ggml_quantize_chunk(enum ggml_type type, const float * src, void * dst, + int start, int nrows, int n_per_row, int64_t * hist, const float * imatrix); + + // These are needed for IQ2_XS and IQ2_XXS quantizations + GGML_API void ggml_init_iq2_quantization(enum ggml_type type); + GGML_API void ggml_deinit_iq2_quantization(enum ggml_type type); // // Importance matrix diff --git a/llama.cpp b/llama.cpp index 8e20e72a2..107b05114 100644 --- a/llama.cpp +++ b/llama.cpp @@ -8429,9 +8429,23 @@ static ggml_type get_k_quant_type(quantize_state_internal & qs, ggml_type new_ty if (arch == LLM_ARCH_FALCON || nx % QK_K != 0) { new_type = GGML_TYPE_Q8_0; } + else if (ftype == LLAMA_FTYPE_MOSTLY_IQ2_XXS || ftype == LLAMA_FTYPE_MOSTLY_IQ2_XS) { + new_type = GGML_TYPE_Q5_K; + } else if (new_type != GGML_TYPE_Q8_0) { new_type = GGML_TYPE_Q6_K; } + } else if (ftype == LLAMA_FTYPE_MOSTLY_IQ2_XXS || ftype == LLAMA_FTYPE_MOSTLY_IQ2_XS) { + if (name.find("attn_v.weight") != std::string::npos) { + if (qs.model.hparams.n_gqa() >= 4 || qs.model.hparams.n_expert >= 4) new_type = GGML_TYPE_Q4_K; + else new_type = GGML_TYPE_Q2_K; + ++qs.i_attention_wv; + } + else if (name.find("ffn_down") != std::string::npos) { + if (qs.i_feed_forward_w2 < qs.n_feed_forward_w2/8) new_type = GGML_TYPE_Q2_K; + ++qs.i_feed_forward_w2; + } + else if (name == "token_embd.weight") new_type = GGML_TYPE_Q2_K; } else if (name.find("attn_v.weight") != std::string::npos) { if (ftype == LLAMA_FTYPE_MOSTLY_Q2_K) new_type = GGML_TYPE_Q3_K; else if (ftype == LLAMA_FTYPE_MOSTLY_Q3_K_M) { @@ -8601,6 +8615,13 @@ static void llama_model_quantize_internal(const std::string & fname_inp, const s if (params->only_copy) { ftype = model.ftype; } + const std::unordered_map> * imatrix_data = nullptr; + if (params->imatrix) { + imatrix_data = static_cast>*>(params->imatrix); + if (imatrix_data) { + printf("================================ Have weights data with %d entries\n",int(imatrix_data->size())); + } + } const size_t align = GGUF_DEFAULT_ALIGNMENT; struct gguf_context * ctx_out = gguf_init_empty(); @@ -8658,6 +8679,8 @@ static void llama_model_quantize_internal(const std::string & fname_inp, const s // placeholder for the meta data ::zeros(fout, meta_size); + std::set used_iq2; + for (int i = 0; i < ml.n_tensors; ++i) { struct ggml_tensor * tensor = ml.get_tensor_meta(i); @@ -8710,6 +8733,35 @@ static void llama_model_quantize_internal(const std::string & fname_inp, const s } else { const size_t nelements = ggml_nelements(tensor); + if ((new_type == GGML_TYPE_IQ2_XXS || new_type == GGML_TYPE_IQ2_XS) && used_iq2.find(new_type) == used_iq2.end()) { + ggml_init_iq2_quantization(new_type); + used_iq2.insert(new_type); + } + + const float * imatrix = nullptr; + if (imatrix_data) { + auto it = imatrix_data->find(tensor->name); + if (it == imatrix_data->end()) { + printf("\n====== %s: did not find weights for %s\n", __func__, tensor->name); + } else { + if (it->second.size() == (size_t)tensor->ne[0]) { + imatrix = it->second.data(); + } else { + printf("\n====== %s: imatrix size %d is different from tensor size %d for %s\n", __func__, + int(it->second.size()), int(tensor->ne[0]), tensor->name); + } + } + } + if ((new_type == GGML_TYPE_IQ2_XXS || + new_type == GGML_TYPE_IQ2_XS || + (new_type == GGML_TYPE_Q2_K && params->ftype == LLAMA_FTYPE_MOSTLY_Q2_K_S && strcmp(tensor->name, "token_embd.weight") != 0)) && !imatrix) { + fprintf(stderr, "\n\n============================================================\n"); + fprintf(stderr, "Missing importance matrix for tensor %s in a very low-bit quantization\n", tensor->name); + fprintf(stderr, "The result will be garbage, so bailing out\n"); + fprintf(stderr, "============================================================\n\n"); + throw std::runtime_error(format("Missing importance matrix for tensor %s in a very low-bit quantization", tensor->name)); + } + float * f32_data; if (tensor->type == GGML_TYPE_F32) { @@ -8730,21 +8782,28 @@ static void llama_model_quantize_internal(const std::string & fname_inp, const s new_data = work.data(); std::array hist_cur = {}; - static const int chunk_size = 32 * 512; + const int n_per_row = tensor->ne[0]; + const int nrows = nelements / n_per_row; + + static const int min_chunk_size = 32 * 512; + const int chunk_size = n_per_row >= min_chunk_size ? n_per_row : n_per_row * ((min_chunk_size + n_per_row - 1)/n_per_row); + const int nchunk = (nelements + chunk_size - 1)/chunk_size; const int nthread_use = nthread > 1 ? std::max(1, std::min(nthread, nchunk)) : 1; if (nthread_use < 2) { - new_size = ggml_quantize_chunk(new_type, f32_data, new_data, 0, nelements, hist_cur.data()); + new_size = ggml_quantize_chunk(new_type, f32_data, new_data, 0, nrows, n_per_row, hist_cur.data(), imatrix); } else { - size_t counter = 0; + int counter = 0; new_size = 0; - auto compute = [&mutex, &counter, &hist_cur, &new_size, new_type, f32_data, new_data, nelements]() { + auto compute = [&mutex, &counter, &hist_cur, &new_size, new_type, f32_data, new_data, chunk_size, + nrows, n_per_row, imatrix]() { std::array local_hist = {}; + const int nrows_per_chunk = chunk_size / n_per_row; size_t local_size = 0; while (true) { std::unique_lock lock(mutex); - size_t first = counter; counter += chunk_size; - if (first >= nelements) { + int first_row = counter; counter += nrows_per_chunk; + if (first_row >= nrows) { if (local_size > 0) { for (int j=0; j %8.2f MiB | hist: ", ggml_nbytes(tensor)/1024.0/1024.0, new_size/1024.0/1024.0); + LLAMA_LOG_INFO("size = %8.2f MiB -> %8.2f MiB", ggml_nbytes(tensor)/1024.0/1024.0, new_size/1024.0/1024.0); int64_t tot_count = 0; for (size_t i = 0; i < hist_cur.size(); i++) { hist_all[i] += hist_cur[i]; @@ -8774,6 +8834,7 @@ static void llama_model_quantize_internal(const std::string & fname_inp, const s } if (tot_count > 0) { + LLAMA_LOG_INFO(" | hist: "); for (size_t i = 0; i < hist_cur.size(); i++) { LLAMA_LOG_INFO("%5.3f ", hist_cur[i] / float(nelements)); } @@ -8802,6 +8863,10 @@ static void llama_model_quantize_internal(const std::string & fname_inp, const s fout.close(); + for (auto type : used_iq2) { + ggml_deinit_iq2_quantization(type); + } + gguf_free(ctx_out); LLAMA_LOG_INFO("%s: model size = %8.2f MB\n", __func__, total_size_org/1024.0/1024.0); @@ -9166,6 +9231,7 @@ struct llama_model_quantize_params llama_model_quantize_default_params() { /*.quantize_output_tensor =*/ true, /*.only_copy =*/ false, /*.pure =*/ false, + /*.imatrix =*/ nullptr, }; return result; diff --git a/llama.h b/llama.h index 01d6fafaa..79c8335b6 100644 --- a/llama.h +++ b/llama.h @@ -249,6 +249,7 @@ extern "C" { bool quantize_output_tensor; // quantize output.weight bool only_copy; // only copy tensors - ftype, allow_requantize and quantize_output_tensor are ignored bool pure; // disable k-quant mixtures and quantize all tensors to the same type + void * imatrix; // pointer to importance matrix data } llama_model_quantize_params; // grammar types diff --git a/tests/test-backend-ops.cpp b/tests/test-backend-ops.cpp index d9b8b106a..22a7856d4 100644 --- a/tests/test-backend-ops.cpp +++ b/tests/test-backend-ops.cpp @@ -56,7 +56,7 @@ static void init_tensor_uniform(ggml_tensor * tensor, float min = -1.0f, float m GGML_ASSERT(size % ggml_blck_size(tensor->type) == 0); std::vector dataq(ggml_row_size(tensor->type, size)); int64_t hist[16]; - ggml_quantize_chunk(tensor->type, data.data(), dataq.data(), 0, size, hist); + ggml_quantize_chunk(tensor->type, data.data(), dataq.data(), 0, size/tensor->ne[0], tensor->ne[0], hist, nullptr); ggml_backend_tensor_set(tensor, dataq.data(), 0, dataq.size()); } else if (tensor->type == GGML_TYPE_I8 || tensor->type == GGML_TYPE_I16 || tensor->type == GGML_TYPE_I32) { // This is going to create some weird integers though. From ac32902a87147f78d63c931aa8a23dee762660e7 Mon Sep 17 00:00:00 2001 From: Karthik Kumar Viswanathan <195178+guilt@users.noreply.github.com> Date: Sun, 14 Jan 2024 00:41:44 -0800 Subject: [PATCH 311/811] llama : support WinXP build with MinGW 8.1.0 (#3419) --- CMakeLists.txt | 8 ++++++-- llama.cpp | 4 ++++ 2 files changed, 10 insertions(+), 2 deletions(-) diff --git a/CMakeLists.txt b/CMakeLists.txt index 668669c6d..2741568ed 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -1,4 +1,4 @@ -cmake_minimum_required(VERSION 3.13) # for add_link_options +cmake_minimum_required(VERSION 3.14) # for add_link_options and implicit target directories. project("llama.cpp" C CXX) set(CMAKE_EXPORT_COMPILE_COMMANDS ON) @@ -76,6 +76,10 @@ if (NOT MSVC) option(LLAMA_F16C "llama: enable F16C" ${INS_ENB}) endif() +if (WIN32) + option(LLAMA_WIN_VER "llama: Windows Version" 0x602) +endif() + # 3rd party libs option(LLAMA_ACCELERATE "llama: enable Accelerate framework" ON) option(LLAMA_BLAS "llama: use BLAS" OFF) @@ -686,7 +690,7 @@ endif() if (MINGW) # Target Windows 8 for PrefetchVirtualMemory - add_compile_definitions(_WIN32_WINNT=0x602) + add_compile_definitions(_WIN32_WINNT=${LLAMA_WIN_VER}) endif() # diff --git a/llama.cpp b/llama.cpp index 107b05114..51e9bdaed 100644 --- a/llama.cpp +++ b/llama.cpp @@ -987,6 +987,7 @@ struct llama_mmap { } if (prefetch > 0) { +#if _WIN32_WINNT >= 0x602 // PrefetchVirtualMemory is only present on Windows 8 and above, so we dynamically load it BOOL (WINAPI *pPrefetchVirtualMemory) (HANDLE, ULONG_PTR, PWIN32_MEMORY_RANGE_ENTRY, ULONG); HMODULE hKernel32 = GetModuleHandleW(L"kernel32.dll"); @@ -1004,6 +1005,9 @@ struct llama_mmap { llama_format_win_err(GetLastError()).c_str()); } } +#else + throw std::runtime_error("PrefetchVirtualMemory unavailable"); +#endif } } From 5f5fe1bd608fa2ed42af97b5f2ea31be6625fc48 Mon Sep 17 00:00:00 2001 From: Alex Azarov Date: Sun, 14 Jan 2024 09:44:39 +0100 Subject: [PATCH 312/811] metal : correctly set SIMD support flags on iOS (#4923) * Correctly set support_simdgroup_reduction and support_simdgroup_mm on iPhone/iPad * log a little bit more info on iOS --- ggml-metal.m | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/ggml-metal.m b/ggml-metal.m index cae52c983..2ca726055 100644 --- a/ggml-metal.m +++ b/ggml-metal.m @@ -330,7 +330,6 @@ static struct ggml_metal_context * ggml_metal_init(int n_cb) { } } -#if TARGET_OS_OSX // print MTL GPU family: GGML_METAL_LOG_INFO("%s: GPU name: %s\n", __func__, [[ctx->device name] UTF8String]); @@ -370,6 +369,7 @@ static struct ggml_metal_context * ggml_metal_init(int n_cb) { GGML_METAL_LOG_INFO("%s: simdgroup reduction support = %s\n", __func__, ctx->support_simdgroup_reduction ? "true" : "false"); GGML_METAL_LOG_INFO("%s: simdgroup matrix mul. support = %s\n", __func__, ctx->support_simdgroup_mm ? "true" : "false"); GGML_METAL_LOG_INFO("%s: hasUnifiedMemory = %s\n", __func__, ctx->device.hasUnifiedMemory ? "true" : "false"); +#if TARGET_OS_OSX GGML_METAL_LOG_INFO("%s: recommendedMaxWorkingSetSize = %8.2f MB\n", __func__, ctx->device.recommendedMaxWorkingSetSize / 1e6); if (ctx->device.maxTransferRate != 0) { GGML_METAL_LOG_INFO("%s: maxTransferRate = %8.2f MB/s\n", __func__, ctx->device.maxTransferRate / 1e6); From a128c38de862431f1aae9ccc40b792fbc1b8b682 Mon Sep 17 00:00:00 2001 From: Kawrakow <48489457+ikawrakow@users.noreply.github.com> Date: Sun, 14 Jan 2024 10:53:39 +0200 Subject: [PATCH 313/811] Fix ffn_down quantization mix for MoE models (#4927) * Fix ffn_down quantization mix for MoE models In #4872 I did not consider the part where every third tensor is quantized with more bits. Fir MoE this leads to tensors of the same layer being quantized with different number of bits, which is not considered as a possibility in the inference implementation (it is assumed all experts use the same quantization). * Fix the fix * Review suggestion --------- Co-authored-by: Iwan Kawrakow --- llama.cpp | 34 ++++++++++++++++++++++++++-------- 1 file changed, 26 insertions(+), 8 deletions(-) diff --git a/llama.cpp b/llama.cpp index 51e9bdaed..b1d6015e2 100644 --- a/llama.cpp +++ b/llama.cpp @@ -8480,13 +8480,31 @@ static ggml_type get_k_quant_type(quantize_state_internal & qs, ggml_type new_ty new_type = GGML_TYPE_Q8_0; } } else if (name.find("ffn_down") != std::string::npos) { + const int n_expert = std::max(1, (int)qs.model.hparams.n_expert); + int i_layer, n_layer; + if (n_expert == 1) { + i_layer = qs.i_feed_forward_w2; + n_layer = qs.n_feed_forward_w2; + } else { + // Believe it or not, "experts" in the FFN of Mixtral-8x7B are not consecutive, but iccasionally randomly + // sprinkled in the model. Hence, simply dividing i_feed_forward_w2 by n_expert does not work + // for getting the current layer as I initially thought, and we need to resort to parsing the + // tensor name. + n_layer = qs.n_feed_forward_w2 / n_expert; + if (sscanf(name.c_str(), "blk.%d.ffn_down", &i_layer) != 1) { + throw std::runtime_error(format("Failed to determine layer for tensor %s", name.c_str())); + } + if (i_layer < 0 || i_layer >= n_layer) { + throw std::runtime_error(format("Bad layer %d for tensor %s. Must be in [0, %d)", i_layer, name.c_str(), n_layer)); + } + } if (ftype == LLAMA_FTYPE_MOSTLY_Q2_K) new_type = GGML_TYPE_Q3_K; else if (ftype == LLAMA_FTYPE_MOSTLY_Q2_K_S) { - if (qs.i_feed_forward_w2 < qs.n_feed_forward_w2/8) new_type = GGML_TYPE_Q4_K; + if (i_layer < n_layer/8) new_type = GGML_TYPE_Q4_K; } else if (ftype == LLAMA_FTYPE_MOSTLY_Q3_K_M) { - new_type = qs.i_feed_forward_w2 < qs.n_feed_forward_w2/16 ? GGML_TYPE_Q5_K - : arch != LLM_ARCH_FALCON || use_more_bits(qs.i_feed_forward_w2, qs.n_feed_forward_w2) ? GGML_TYPE_Q4_K + new_type = i_layer < n_layer/16 ? GGML_TYPE_Q5_K + : arch != LLM_ARCH_FALCON || use_more_bits(i_layer, n_layer) ? GGML_TYPE_Q4_K : GGML_TYPE_Q3_K; } else if (ftype == LLAMA_FTYPE_MOSTLY_Q3_K_L) { @@ -8494,14 +8512,14 @@ static ggml_type get_k_quant_type(quantize_state_internal & qs, ggml_type new_ty } else if (ftype == LLAMA_FTYPE_MOSTLY_Q4_K_M) { if (arch == LLM_ARCH_FALCON) { - new_type = qs.i_feed_forward_w2 < qs.n_feed_forward_w2/16 ? GGML_TYPE_Q6_K : - use_more_bits(qs.i_feed_forward_w2, qs.n_feed_forward_w2) ? GGML_TYPE_Q5_K : GGML_TYPE_Q4_K; + new_type = i_layer < n_layer/16 ? GGML_TYPE_Q6_K : + use_more_bits(i_layer, n_layer) ? GGML_TYPE_Q5_K : GGML_TYPE_Q4_K; } else { - if (use_more_bits(qs.i_feed_forward_w2, qs.n_feed_forward_w2)) new_type = GGML_TYPE_Q6_K; + if (use_more_bits(i_layer, n_layer)) new_type = GGML_TYPE_Q6_K; } } - else if (ftype == LLAMA_FTYPE_MOSTLY_Q5_K_M && use_more_bits(qs.i_feed_forward_w2, qs.n_feed_forward_w2)) new_type = GGML_TYPE_Q6_K; - else if (ftype == LLAMA_FTYPE_MOSTLY_Q4_K_S && arch != LLM_ARCH_FALCON && qs.i_feed_forward_w2 < qs.n_feed_forward_w2/8) { + else if (ftype == LLAMA_FTYPE_MOSTLY_Q5_K_M && use_more_bits(i_layer, n_layer)) new_type = GGML_TYPE_Q6_K; + else if (ftype == LLAMA_FTYPE_MOSTLY_Q4_K_S && arch != LLM_ARCH_FALCON && i_layer < n_layer/8) { new_type = GGML_TYPE_Q5_K; } ++qs.i_feed_forward_w2; From 03c526749041c863b0cd842b26b8907e1ea0e0b1 Mon Sep 17 00:00:00 2001 From: Georgi Gerganov Date: Sun, 14 Jan 2024 11:03:19 +0200 Subject: [PATCH 314/811] llama : use LLAMA_LOG_ macros for logging --- llama.cpp | 46 +++++++++++++++++++++++----------------------- 1 file changed, 23 insertions(+), 23 deletions(-) diff --git a/llama.cpp b/llama.cpp index b1d6015e2..51821965e 100644 --- a/llama.cpp +++ b/llama.cpp @@ -1114,7 +1114,7 @@ struct llama_mlock { suggest = false; } - fprintf(stderr, "warning: failed to mlock %zu-byte buffer (after previously locking %zu bytes): %s\n%s", + LLAMA_LOG_WARN("warning: failed to mlock %zu-byte buffer (after previously locking %zu bytes): %s\n%s", size, this->size, errmsg, suggest ? MLOCK_SUGGESTION : ""); return false; } @@ -1123,7 +1123,7 @@ struct llama_mlock { static void raw_unlock(void * addr, size_t size) { if (munlock(addr, size)) { - fprintf(stderr, "warning: failed to munlock buffer: %s\n", std::strerror(errno)); + LLAMA_LOG_WARN("warning: failed to munlock buffer: %s\n", std::strerror(errno)); } } #elif defined(_WIN32) @@ -1141,7 +1141,7 @@ struct llama_mlock { return true; } if (tries == 2) { - fprintf(stderr, "warning: failed to VirtualLock %zu-byte buffer (after previously locking %zu bytes): %s\n", + LLAMA_LOG_WARN("warning: failed to VirtualLock %zu-byte buffer (after previously locking %zu bytes): %s\n", len, size, llama_format_win_err(GetLastError()).c_str()); return false; } @@ -1150,7 +1150,7 @@ struct llama_mlock { // set size and try again. SIZE_T min_ws_size, max_ws_size; if (!GetProcessWorkingSetSize(GetCurrentProcess(), &min_ws_size, &max_ws_size)) { - fprintf(stderr, "warning: GetProcessWorkingSetSize failed: %s\n", + LLAMA_LOG_WARN("warning: GetProcessWorkingSetSize failed: %s\n", llama_format_win_err(GetLastError()).c_str()); return false; } @@ -1163,7 +1163,7 @@ struct llama_mlock { min_ws_size += increment; max_ws_size += increment; if (!SetProcessWorkingSetSize(GetCurrentProcess(), min_ws_size, max_ws_size)) { - fprintf(stderr, "warning: SetProcessWorkingSetSize failed: %s\n", + LLAMA_LOG_WARN("warning: SetProcessWorkingSetSize failed: %s\n", llama_format_win_err(GetLastError()).c_str()); return false; } @@ -1172,7 +1172,7 @@ struct llama_mlock { static void raw_unlock(void * ptr, size_t len) { if (!VirtualUnlock(ptr, len)) { - fprintf(stderr, "warning: failed to VirtualUnlock buffer: %s\n", + LLAMA_LOG_WARN("warning: failed to VirtualUnlock buffer: %s\n", llama_format_win_err(GetLastError()).c_str()); } } @@ -1184,7 +1184,7 @@ struct llama_mlock { } bool raw_lock(const void * addr, size_t len) const { - fprintf(stderr, "warning: mlock not supported on this system\n"); + LLAMA_LOG_WARN("warning: mlock not supported on this system\n"); return false; } @@ -2085,13 +2085,13 @@ namespace GGUFMeta { __func__, override_type_to_str(override->tag), override->key); switch (override->tag) { case LLAMA_KV_OVERRIDE_BOOL: { - printf("%s\n", override->bool_value ? "true" : "false"); + LLAMA_LOG_INFO("%s\n", override->bool_value ? "true" : "false"); } break; case LLAMA_KV_OVERRIDE_INT: { - printf("%" PRId64 "\n", override->int_value); + LLAMA_LOG_INFO("%" PRId64 "\n", override->int_value); } break; case LLAMA_KV_OVERRIDE_FLOAT: { - printf("%.6f\n", override->float_value); + LLAMA_LOG_INFO("%.6f\n", override->float_value); } break; default: // Shouldn't be possible to end up here, but just in case... @@ -6993,7 +6993,7 @@ static void tokenizer_st_partition(const llama_vocab & vocab, std::forward_list< if (match + special_token.length() > raw_text_base_offset + raw_text_base_length) break; #ifdef PRETOKENIZERDEBUG - fprintf(stderr, "FF: (%ld %ld %ld) '%s'\n", raw_text->length(), raw_text_base_offset, raw_text_base_length, raw_text->substr(raw_text_base_offset, raw_text_base_length).c_str()); + LLAMA_LOG_WARN("FF: (%ld %ld %ld) '%s'\n", raw_text->length(), raw_text_base_offset, raw_text_base_length, raw_text->substr(raw_text_base_offset, raw_text_base_length).c_str()); #endif auto source = std::distance(buffer.begin(), it); @@ -7006,7 +7006,7 @@ static void tokenizer_st_partition(const llama_vocab & vocab, std::forward_list< buffer.emplace_after(it, (*raw_text), left_reminder_offset, left_reminder_length); #ifdef PRETOKENIZERDEBUG - fprintf(stderr, "FL: (%ld %ld) '%s'\n", left_reminder_offset, left_reminder_length, raw_text->substr(left_reminder_offset, left_reminder_length).c_str()); + LLAMA_LOG_WARN("FL: (%ld %ld) '%s'\n", left_reminder_offset, left_reminder_length, raw_text->substr(left_reminder_offset, left_reminder_length).c_str()); #endif it++; } @@ -7022,7 +7022,7 @@ static void tokenizer_st_partition(const llama_vocab & vocab, std::forward_list< buffer.emplace_after(it, (*raw_text), right_reminder_offset, right_reminder_length); #ifdef PRETOKENIZERDEBUG - fprintf(stderr, "FR: (%ld %ld) '%s'\n", right_reminder_offset, right_reminder_length, raw_text->substr(right_reminder_offset, right_reminder_length).c_str()); + LLAMA_LOG_WARN("FR: (%ld %ld) '%s'\n", right_reminder_offset, right_reminder_length, raw_text->substr(right_reminder_offset, right_reminder_length).c_str()); #endif it++; @@ -7038,7 +7038,7 @@ static void tokenizer_st_partition(const llama_vocab & vocab, std::forward_list< raw_text_base_length = right_reminder_length; #ifdef PRETOKENIZERDEBUG - fprintf(stderr, "RR: (%ld %ld) '%s'\n", raw_text_base_offset, raw_text_base_length, raw_text->substr(raw_text_base_offset, raw_text_base_length).c_str()); + LLAMA_LOG_WARN("RR: (%ld %ld) '%s'\n", raw_text_base_offset, raw_text_base_length, raw_text->substr(raw_text_base_offset, raw_text_base_length).c_str()); #endif } else { if (source == 0) { @@ -7095,7 +7095,7 @@ static std::vector llama_tokenize_internal(const llama_vocab & } #ifdef PRETOKENIZERDEBUG - fprintf(stderr,"TT: (%ld %ld %ld) '%s'\n", raw_text.length(), fragment.offset, fragment.length, raw_text.c_str()); + LLAMA_LOG_WARN(TT: (%ld %ld %ld) '%s'\n", raw_text.length(), fragment.offset, fragment.length, raw_text.c_str()); #endif llm_tokenizer_spm tokenizer(vocab); llama_escape_whitespace(raw_text); @@ -7116,7 +7116,7 @@ static std::vector llama_tokenize_internal(const llama_vocab & auto raw_text = fragment.raw_text.substr(fragment.offset, fragment.length); #ifdef PRETOKENIZERDEBUG - fprintf(stderr,"TT: (%ld %ld %ld) '%s'\n", raw_text.length(), fragment.offset, fragment.length, raw_text.c_str()); + LLAMA_LOG_WARN(TT: (%ld %ld %ld) '%s'\n", raw_text.length(), fragment.offset, fragment.length, raw_text.c_str()); #endif llm_tokenizer_bpe tokenizer(vocab); tokenizer.tokenize(raw_text, output); @@ -8641,7 +8641,7 @@ static void llama_model_quantize_internal(const std::string & fname_inp, const s if (params->imatrix) { imatrix_data = static_cast>*>(params->imatrix); if (imatrix_data) { - printf("================================ Have weights data with %d entries\n",int(imatrix_data->size())); + LLAMA_LOG_INFO("================================ Have weights data with %d entries\n",int(imatrix_data->size())); } } @@ -8764,12 +8764,12 @@ static void llama_model_quantize_internal(const std::string & fname_inp, const s if (imatrix_data) { auto it = imatrix_data->find(tensor->name); if (it == imatrix_data->end()) { - printf("\n====== %s: did not find weights for %s\n", __func__, tensor->name); + LLAMA_LOG_INFO("\n====== %s: did not find weights for %s\n", __func__, tensor->name); } else { if (it->second.size() == (size_t)tensor->ne[0]) { imatrix = it->second.data(); } else { - printf("\n====== %s: imatrix size %d is different from tensor size %d for %s\n", __func__, + LLAMA_LOG_INFO("\n====== %s: imatrix size %d is different from tensor size %d for %s\n", __func__, int(it->second.size()), int(tensor->ne[0]), tensor->name); } } @@ -8777,10 +8777,10 @@ static void llama_model_quantize_internal(const std::string & fname_inp, const s if ((new_type == GGML_TYPE_IQ2_XXS || new_type == GGML_TYPE_IQ2_XS || (new_type == GGML_TYPE_Q2_K && params->ftype == LLAMA_FTYPE_MOSTLY_Q2_K_S && strcmp(tensor->name, "token_embd.weight") != 0)) && !imatrix) { - fprintf(stderr, "\n\n============================================================\n"); - fprintf(stderr, "Missing importance matrix for tensor %s in a very low-bit quantization\n", tensor->name); - fprintf(stderr, "The result will be garbage, so bailing out\n"); - fprintf(stderr, "============================================================\n\n"); + LLAMA_LOG_ERROR("\n\n============================================================\n"); + LLAMA_LOG_ERROR("Missing importance matrix for tensor %s in a very low-bit quantization\n", tensor->name); + LLAMA_LOG_ERROR("The result will be garbage, so bailing out\n"); + LLAMA_LOG_ERROR("============================================================\n\n"); throw std::runtime_error(format("Missing importance matrix for tensor %s in a very low-bit quantization", tensor->name)); } From 9408cfdad6b1c090a7e1419d4434edc260b7e47e Mon Sep 17 00:00:00 2001 From: Georgi Gerganov Date: Sun, 14 Jan 2024 11:08:09 +0200 Subject: [PATCH 315/811] scripts : sync-ggml-am.sh option to skip commits --- scripts/sync-ggml-am.sh | 14 +++++++++++++- scripts/sync-ggml.last | 2 +- 2 files changed, 14 insertions(+), 2 deletions(-) diff --git a/scripts/sync-ggml-am.sh b/scripts/sync-ggml-am.sh index 248cf1023..6b2514a11 100755 --- a/scripts/sync-ggml-am.sh +++ b/scripts/sync-ggml-am.sh @@ -5,7 +5,7 @@ # Usage: # # $ cd /path/to/llama.cpp -# $ ./scripts/sync-ggml-am.sh +# $ ./scripts/sync-ggml-am.sh -skip hash0,hash1,hash2... # set -e @@ -24,6 +24,11 @@ fi lc=$(cat $SRC_LLAMA/scripts/sync-ggml.last) echo "Syncing ggml changes since commit $lc" +to_skip="" +if [ "$1" == "-skip" ]; then + to_skip=$2 +fi + cd $SRC_GGML git log --oneline $lc..HEAD @@ -40,6 +45,13 @@ if [ -f $SRC_LLAMA/ggml-src.patch ]; then fi while read c; do + if [ -n "$to_skip" ]; then + if [[ $to_skip == *"$c"* ]]; then + echo "Skipping $c" + continue + fi + fi + git format-patch -k $c~1..$c --stdout -- \ include/ggml/ggml*.h \ src/ggml*.h \ diff --git a/scripts/sync-ggml.last b/scripts/sync-ggml.last index 753d227a7..be9e408fb 100644 --- a/scripts/sync-ggml.last +++ b/scripts/sync-ggml.last @@ -1 +1 @@ -1890780da4ea10db88736fcde85f285abf6c64b0 +b306d6e996ec0ace77118fa5098822cdc7f9c88f From bb0c1392479398f9aba86d9ec98db0b95ede6e6d Mon Sep 17 00:00:00 2001 From: Georgi Gerganov Date: Sun, 14 Jan 2024 13:26:53 +0200 Subject: [PATCH 316/811] llama : check LLAMA_TRACE env for extra logging (#4929) * llama : minor fix indent * llama : check LLAMA_TRACE env for extra logging ggml-ci --- llama.cpp | 32 ++++++++++++++++++-------------- 1 file changed, 18 insertions(+), 14 deletions(-) diff --git a/llama.cpp b/llama.cpp index 51821965e..63f37ecdb 100644 --- a/llama.cpp +++ b/llama.cpp @@ -2190,6 +2190,11 @@ struct llama_model_loader { LLM_KV llm_kv = LLM_KV(LLM_ARCH_UNKNOWN); llama_model_loader(const std::string & fname, bool use_mmap, const struct llama_model_kv_override * param_overrides_p) : file(fname.c_str(), "rb") { + int trace = 0; + if (getenv("LLAMA_TRACE")) { + trace = atoi(getenv("LLAMA_TRACE")); + } + struct gguf_init_params params = { /*.no_alloc = */ true, /*.ctx = */ &ctx_meta, @@ -2242,11 +2247,10 @@ struct llama_model_loader { type_max = type; } - // TODO: make runtime configurable -#if 0 - struct ggml_tensor * meta = ggml_get_tensor(ctx_meta, gguf_get_tensor_name(ctx_gguf, i)); - LLAMA_LOG_INFO("%s: - tensor %4d: %32s %-8s [ %s ]\n", __func__, i, ggml_get_name(meta), ggml_type_name(type), llama_format_tensor_shape(meta).c_str()); -#endif + if (trace > 0) { + struct ggml_tensor * meta = ggml_get_tensor(ctx_meta, gguf_get_tensor_name(ctx_gguf, i)); + LLAMA_LOG_INFO("%s: - tensor %4d: %32s %-8s [ %s ]\n", __func__, i, ggml_get_name(meta), ggml_type_name(type), llama_format_tensor_shape(meta).c_str()); + } } switch (type_max) { @@ -6451,15 +6455,15 @@ static uint8_t llama_token_to_byte(const llama_vocab& vocab, llama_token id) { static llama_token llama_byte_to_token(const llama_vocab & vocab, uint8_t ch) { static const char * hex = "0123456789ABCDEF"; switch (llama_vocab_get_type(vocab)) { - case LLAMA_VOCAB_TYPE_SPM: { - const char buf[7] = { '<', '0', 'x', hex[ch >> 4], hex[ch & 15], '>', 0 }; - return vocab.token_to_id.at(buf); - } - case LLAMA_VOCAB_TYPE_BPE: { - return vocab.token_to_id.at(bytes_to_unicode_bpe(ch)); - } - default: - GGML_ASSERT(false); + case LLAMA_VOCAB_TYPE_SPM: { + const char buf[7] = { '<', '0', 'x', hex[ch >> 4], hex[ch & 15], '>', 0 }; + return vocab.token_to_id.at(buf); + } + case LLAMA_VOCAB_TYPE_BPE: { + return vocab.token_to_id.at(bytes_to_unicode_bpe(ch)); + } + default: + GGML_ASSERT(false); } } From 467a882fd2e5b6172897b49aa45aa29bd3f27685 Mon Sep 17 00:00:00 2001 From: Kawrakow <48489457+ikawrakow@users.noreply.github.com> Date: Sun, 14 Jan 2024 16:21:12 +0200 Subject: [PATCH 317/811] Add ability to use importance matrix for all k-quants (#4930) Co-authored-by: Iwan Kawrakow --- examples/quantize/quantize.cpp | 2 +- ggml-quants.c | 443 ++++++++++++++++++++++++++++++++- ggml-quants.h | 5 +- ggml.c | 28 ++- 4 files changed, 462 insertions(+), 16 deletions(-) diff --git a/examples/quantize/quantize.cpp b/examples/quantize/quantize.cpp index f4e2175f1..2ae046933 100644 --- a/examples/quantize/quantize.cpp +++ b/examples/quantize/quantize.cpp @@ -82,7 +82,7 @@ static void usage(const char * executable) { printf(" --allow-requantize: Allows requantizing tensors that have already been quantized. Warning: This can severely reduce quality compared to quantizing from 16bit or 32bit\n"); printf(" --leave-output-tensor: Will leave output.weight un(re)quantized. Increases model size but may also increase quality, especially when requantizing\n"); printf(" --pure: Disable k-quant mixtures and quantize all tensors to the same type\n"); - printf(" --imatrixfile_name: use data in file_name as importance matrix for quant optimizations\n"); + printf(" --imatrix file_name: use data in file_name as importance matrix for quant optimizations\n"); printf(" --include-weights tensor_name: use importance matrix for this/these tensor(s)\n"); printf(" --exclude-weights tensor_name: use importance matrix for this/these tensor(s)\n"); printf("Note: --include-weights and --exclude-weights cannot be used together\n"); diff --git a/ggml-quants.c b/ggml-quants.c index 9290d54cf..0750fe1bb 100644 --- a/ggml-quants.c +++ b/ggml-quants.c @@ -1244,7 +1244,8 @@ static inline int nearest_int(float fval) { return (i & 0x007fffff) - 0x00400000; } -static float make_qx_quants(int n, int nmax, const float * restrict x, int8_t * restrict L, int rmse_type) { +static float make_qx_quants(int n, int nmax, const float * restrict x, int8_t * restrict L, int rmse_type, + const float * restrict qw) { float max = 0; float amax = 0; for (int i = 0; i < n; ++i) { @@ -1270,14 +1271,13 @@ static float make_qx_quants(int n, int nmax, const float * restrict x, int8_t * rmse_type = -rmse_type; return_early = true; } - int weight_type = rmse_type%2; float sumlx = 0; float suml2 = 0; for (int i = 0; i < n; ++i) { int l = nearest_int(iscale * x[i]); l = MAX(-nmax, MIN(nmax-1, l)); L[i] = l + nmax; - float w = weight_type == 1 ? x[i] * x[i] : 1; + float w = qw ? qw[i] : rmse_type == 1 ? x[i] * x[i] : rmse_type == 2 ? 1 : rmse_type == 3 ? fabsf(x[i]) : sqrtf(fabsf(x[i])); sumlx += w*x[i]*l; suml2 += w*l*l; } @@ -1293,7 +1293,7 @@ static float make_qx_quants(int n, int nmax, const float * restrict x, int8_t * for (int i = 0; i < n; ++i) { int l = nearest_int(iscale * x[i]); l = MAX(-nmax, MIN(nmax-1, l)); - float w = weight_type == 1 ? x[i] * x[i] : 1; + float w = qw ? qw[i] : rmse_type == 1 ? x[i] * x[i] : rmse_type == 2 ? 1 : rmse_type == 3 ? fabsf(x[i]) : sqrtf(fabsf(x[i])); sumlx += w*x[i]*l; suml2 += w*l*l; } @@ -2089,6 +2089,112 @@ size_t ggml_quantize_q3_K(const float * restrict src, void * restrict dst, int n return (n/QK_K*sizeof(block_q3_K)); } +static void quantize_row_q3_K_impl(const float * restrict x, block_q3_K * restrict y, int n_per_row, const float * restrict quant_weights) { +#if QK_K != 256 + (void)quant_weights; + quantize_row_q3_K_reference(x, y, n_per_row); +#else + assert(n_per_row % QK_K == 0); + const int nb = n_per_row / QK_K; + + int8_t L[QK_K]; + float scales[QK_K / 16]; + float weight[16]; + float sw[QK_K / 16]; + int8_t Ls[QK_K / 16]; + + for (int i = 0; i < nb; i++) { + + float sumx2 = 0; + for (int j = 0; j < QK_K; ++j) sumx2 += x[j]*x[j]; + float sigma2 = 2*sumx2/QK_K; + + for (int j = 0; j < QK_K/16; ++j) { + if (quant_weights) { + const float * qw = quant_weights ? quant_weights + QK_K * i + 16*j : NULL; + for (int l = 0; l < 16; ++l) weight[l] = qw[l] * sqrtf(sigma2 + x[16*j+l]*x[16*j+l]); + } else { + for (int l = 0; l < 16; ++l) weight[l] = x[16*j+l]*x[16*j+l]; + } + float sumw = 0; + for (int l = 0; l < 16; ++l) sumw += weight[l]; + sw[j] = sumw; + + scales[j] = make_qx_quants(16, 4, x + 16*j, L + 16*j, 1, weight); + + } + + memset(y[i].scales, 0, 12); + + float d_block = make_qx_quants(QK_K/16, 32, scales, Ls, 1, sw); + for (int j = 0; j < QK_K/16; ++j) { + int l = Ls[j]; + if (j < 8) { + y[i].scales[j] = l & 0xF; + } else { + y[i].scales[j-8] |= ((l & 0xF) << 4); + } + l >>= 4; + y[i].scales[j%4 + 8] |= (l << (2*(j/4))); + } + y[i].d = GGML_FP32_TO_FP16(d_block); + + int8_t sc; + for (int j = 0; j < QK_K/16; ++j) { + sc = j < 8 ? y[i].scales[j] & 0xF : y[i].scales[j-8] >> 4; + sc = (sc | (((y[i].scales[8 + j%4] >> (2*(j/4))) & 3) << 4)) - 32; + float d = GGML_FP16_TO_FP32(y[i].d) * sc; + if (!d) { + continue; + } + for (int ii = 0; ii < 16; ++ii) { + int l = nearest_int(x[16*j + ii]/d); + l = MAX(-4, MIN(3, l)); + L[16*j + ii] = l + 4; + } + } + + memset(y[i].hmask, 0, QK_K/8); + // We put the high-bit for the 1st 8 quants into bit 0, the next 8 into bit 1, etc. + int m = 0; + uint8_t hm = 1; + for (int j = 0; j < QK_K; ++j) { + if (L[j] > 3) { + y[i].hmask[m] |= hm; + L[j] -= 4; + } + if (++m == QK_K/8) { + m = 0; hm <<= 1; + } + } + for (int j = 0; j < QK_K; j += 128) { + for (int l = 0; l < 32; ++l) { + y[i].qs[j/4 + l] = L[j + l] | (L[j + l + 32] << 2) | (L[j + l + 64] << 4) | (L[j + l + 96] << 6); + } + } + + x += QK_K; + } +#endif +} + +size_t quantize_q3_K(const float * src, void * dst, int nrow, int n_per_row, int64_t * hist, const float * quant_weights) { + (void)hist; + int row_size = ggml_row_size(GGML_TYPE_Q3_K, n_per_row); + if (!quant_weights) { + quantize_row_q3_K_reference(src, dst, nrow*n_per_row); + } + else { + char * qrow = (char *)dst; + for (int row = 0; row < nrow; ++row) { + quantize_row_q3_K_impl(src, (block_q3_K*)qrow, n_per_row, quant_weights); + src += n_per_row; + qrow += row_size; + } + } + return nrow * row_size; +} + // ====================== 4-bit (de)-quantization void quantize_row_q4_K_reference(const float * restrict x, block_q4_K * restrict y, int k) { @@ -2254,6 +2360,108 @@ size_t ggml_quantize_q4_K(const float * restrict src, void * restrict dst, int n return (n/QK_K*sizeof(block_q4_K)); } +static void quantize_row_q4_K_impl(const float * restrict x, block_q4_K * restrict y, int n_per_row, const float * quant_weights) { +#if QK_K != 256 + (void)quant_weights; + quantize_row_q4_K_reference(x, y, n_per_row); +#else + assert(n_per_row % QK_K == 0); + const int nb = n_per_row / QK_K; + + uint8_t L[QK_K]; + uint8_t Laux[32]; + float weights[32]; + float mins[QK_K/32]; + float scales[QK_K/32]; + + for (int i = 0; i < nb; i++) { + + float sum_x2 = 0; + for (int l = 0; l < QK_K; ++l) sum_x2 += x[l] * x[l]; + float sigma2 = sum_x2/QK_K; + float av_x = sqrtf(sigma2); + + float max_scale = 0; // as we are deducting the min, scales are always positive + float max_min = 0; + for (int j = 0; j < QK_K/32; ++j) { + if (quant_weights) { + const float * qw = quant_weights + QK_K*i + 32*j; + for (int l = 0; l < 32; ++l) weights[l] = qw[l] * sqrtf(sigma2 + x[32*j + l]*x[32*j + l]); + } else { + for (int l = 0; l < 32; ++l) weights[l] = av_x + fabsf(x[32*j + l]); + } + scales[j] = make_qkx3_quants(32, 15, x + 32*j, weights, L + 32*j, &mins[j], Laux, -0.9f, 0.05f, 36, false); + //scales[j] = make_qkx2_quants(32, 15, x + 32*j, weights, L + 32*j, &mins[j], Laux, -1.f, 0.1f, 20, false); + float scale = scales[j]; + if (scale > max_scale) { + max_scale = scale; + } + float min = mins[j]; + if (min > max_min) { + max_min = min; + } + } + + float inv_scale = max_scale > 0 ? 63.f/max_scale : 0.f; + float inv_min = max_min > 0 ? 63.f/max_min : 0.f; + for (int j = 0; j < QK_K/32; ++j) { + uint8_t ls = nearest_int(inv_scale*scales[j]); + uint8_t lm = nearest_int(inv_min*mins[j]); + ls = MIN(63, ls); + lm = MIN(63, lm); + if (j < 4) { + y[i].scales[j] = ls; + y[i].scales[j+4] = lm; + } else { + y[i].scales[j+4] = (ls & 0xF) | ((lm & 0xF) << 4); + y[i].scales[j-4] |= ((ls >> 4) << 6); + y[i].scales[j-0] |= ((lm >> 4) << 6); + } + } + y[i].d = GGML_FP32_TO_FP16(max_scale/63.f); + y[i].dmin = GGML_FP32_TO_FP16(max_min/63.f); + + uint8_t sc, m; + for (int j = 0; j < QK_K/32; ++j) { + get_scale_min_k4(j, y[i].scales, &sc, &m); + const float d = GGML_FP16_TO_FP32(y[i].d) * sc; + if (!d) continue; + const float dm = GGML_FP16_TO_FP32(y[i].dmin) * m; + for (int ii = 0; ii < 32; ++ii) { + int l = nearest_int((x[32*j + ii] + dm)/d); + l = MAX(0, MIN(15, l)); + L[32*j + ii] = l; + } + } + uint8_t * q = y[i].qs; + for (int j = 0; j < QK_K; j += 64) { + for (int l = 0; l < 32; ++l) q[l] = L[j + l] | (L[j + l + 32] << 4); + q += 32; + } + + x += QK_K; + + } +#endif +} + +size_t quantize_q4_K(const float * src, void * dst, int nrow, int n_per_row, int64_t * hist, const float * quant_weights) { + (void)hist; + int row_size = ggml_row_size(GGML_TYPE_Q4_K, n_per_row); + if (!quant_weights) { + quantize_row_q4_K_reference(src, dst, nrow*n_per_row); + } + else { + char * qrow = (char *)dst; + for (int row = 0; row < nrow; ++row) { + quantize_row_q4_K_impl(src, (block_q4_K*)qrow, n_per_row, quant_weights); + src += n_per_row; + qrow += row_size; + } + } + return nrow * row_size; +} + // ====================== 5-bit (de)-quantization void quantize_row_q5_K_reference(const float * restrict x, block_q5_K * restrict y, int k) { @@ -2349,7 +2557,7 @@ void quantize_row_q5_K_reference(const float * restrict x, block_q5_K * restrict #else float max_scale = 0, amax = 0; for (int j = 0; j < QK_K/16; ++j) { - scales[j] = make_qx_quants(16, 16, x + 16*j, L + 16*j, 1); + scales[j] = make_qx_quants(16, 16, x + 16*j, L + 16*j, 1, NULL); float abs_scale = fabsf(scales[j]); if (abs_scale > amax) { amax = abs_scale; @@ -2460,6 +2668,123 @@ size_t ggml_quantize_q5_K(const float * restrict src, void * restrict dst, int n return (n/QK_K*sizeof(block_q5_K)); } +static void quantize_row_q5_K_impl(const float * restrict x, block_q5_K * restrict y, int n_per_row, const float * quant_weights) { +#if QK_K != 256 + (void)quant_weights; + quantize_row_q5_K_reference(x, y, n_per_row); +#else + assert(n_per_row % QK_K == 0); + const int nb = n_per_row / QK_K; + + uint8_t L[QK_K]; + float mins[QK_K/32]; + float scales[QK_K/32]; + float weights[32]; + uint8_t Laux[32]; + + for (int i = 0; i < nb; i++) { + + float sum_x2 = 0; + for (int l = 0; l < QK_K; ++l) sum_x2 += x[l] * x[l]; + float sigma2 = sum_x2/QK_K; + float av_x = sqrtf(sigma2); + + float max_scale = 0; // as we are deducting the min, scales are always positive + float max_min = 0; + for (int j = 0; j < QK_K/32; ++j) { + if (quant_weights) { + const float * qw = quant_weights + QK_K*i + 32*j; + for (int l = 0; l < 32; ++l) weights[l] = qw[l] * sqrtf(sigma2 + x[32*j + l]*x[32*j + l]); + } else { + for (int l = 0; l < 32; ++l) weights[l] = av_x + fabsf(x[32*j + l]); + } + scales[j] = make_qkx3_quants(32, 31, x + 32*j, weights, L + 32*j, &mins[j], Laux, -0.9f, 0.05f, 36, false); + float scale = scales[j]; + if (scale > max_scale) { + max_scale = scale; + } + float min = mins[j]; + if (min > max_min) { + max_min = min; + } + } + + float inv_scale = max_scale > 0 ? 63.f/max_scale : 0.f; + float inv_min = max_min > 0 ? 63.f/max_min : 0.f; + for (int j = 0; j < QK_K/32; ++j) { + uint8_t ls = nearest_int(inv_scale*scales[j]); + uint8_t lm = nearest_int(inv_min*mins[j]); + ls = MIN(63, ls); + lm = MIN(63, lm); + if (j < 4) { + y[i].scales[j] = ls; + y[i].scales[j+4] = lm; + } else { + y[i].scales[j+4] = (ls & 0xF) | ((lm & 0xF) << 4); + y[i].scales[j-4] |= ((ls >> 4) << 6); + y[i].scales[j-0] |= ((lm >> 4) << 6); + } + } + y[i].d = GGML_FP32_TO_FP16(max_scale/63.f); + y[i].dmin = GGML_FP32_TO_FP16(max_min/63.f); + + uint8_t sc, m; + for (int j = 0; j < QK_K/32; ++j) { + get_scale_min_k4(j, y[i].scales, &sc, &m); + const float d = GGML_FP16_TO_FP32(y[i].d) * sc; + if (!d) continue; + const float dm = GGML_FP16_TO_FP32(y[i].dmin) * m; + for (int ii = 0; ii < 32; ++ii) { + int l = nearest_int((x[32*j + ii] + dm)/d); + l = MAX(0, MIN(31, l)); + L[32*j + ii] = l; + } + } + + uint8_t * restrict qh = y[i].qh; + uint8_t * restrict ql = y[i].qs; + memset(qh, 0, QK_K/8); + + uint8_t m1 = 1, m2 = 2; + for (int n = 0; n < QK_K; n += 64) { + for (int j = 0; j < 32; ++j) { + int l1 = L[n + j]; + if (l1 > 15) { + l1 -= 16; qh[j] |= m1; + } + int l2 = L[n + j + 32]; + if (l2 > 15) { + l2 -= 16; qh[j] |= m2; + } + ql[j] = l1 | (l2 << 4); + } + m1 <<= 2; m2 <<= 2; + ql += 32; + } + + x += QK_K; + + } +#endif +} + +size_t quantize_q5_K(const float * src, void * dst, int nrow, int n_per_row, int64_t * hist, const float * quant_weights) { + (void)hist; + int row_size = ggml_row_size(GGML_TYPE_Q5_K, n_per_row); + if (!quant_weights) { + quantize_row_q5_K_reference(src, dst, nrow*n_per_row); + } + else { + char * qrow = (char *)dst; + for (int row = 0; row < nrow; ++row) { + quantize_row_q5_K_impl(src, (block_q5_K*)qrow, n_per_row, quant_weights); + src += n_per_row; + qrow += row_size; + } + } + return nrow * row_size; +} + // ====================== 6-bit (de)-quantization void quantize_row_q6_K_reference(const float * restrict x, block_q6_K * restrict y, int k) { @@ -2476,7 +2801,7 @@ void quantize_row_q6_K_reference(const float * restrict x, block_q6_K * restrict for (int ib = 0; ib < QK_K/16; ++ib) { - const float scale = make_qx_quants(16, 32, x + 16*ib, L + 16*ib, 1); + const float scale = make_qx_quants(16, 32, x + 16*ib, L + 16*ib, 1, NULL); scales[ib] = scale; const float abs_scale = fabsf(scale); @@ -2608,6 +2933,112 @@ size_t ggml_quantize_q6_K(const float * src, void * dst, int n, int k, int64_t * return (n/QK_K*sizeof(block_q6_K)); } +static void quantize_row_q6_K_impl(const float * restrict x, block_q6_K * restrict y, int n_per_row, const float * quant_weights) { +#if QK_K != 256 + (void)quant_weights; + quantize_row_q6_K_reference(x, y, n_per_row); +#else + assert(n_per_row % QK_K == 0); + const int nb = n_per_row / QK_K; + + int8_t L[QK_K]; + float scales[QK_K/16]; + //float weights[16]; + + for (int i = 0; i < nb; i++) { + + //float sum_x2 = 0; + //for (int j = 0; j < QK_K; ++j) sum_x2 += x[j]*x[j]; + //float sigma2 = sum_x2/QK_K; + + float max_scale = 0; + float max_abs_scale = 0; + + for (int ib = 0; ib < QK_K/16; ++ib) { + + float scale; + if (quant_weights) { + const float * qw = quant_weights + QK_K*i + 16*ib; + //for (int j = 0; j < 16; ++j) weights[j] = qw[j] * sqrtf(sigma2 + x[16*ib + j]*x[16*ib + j]); + //scale = make_qx_quants(16, 32, x + 16*ib, L + 16*ib, 1, weights); + scale = make_qx_quants(16, 32, x + 16*ib, L + 16*ib, 1, qw); + } else { + scale = make_qx_quants(16, 32, x + 16*ib, L + 16*ib, 1, NULL); + } + scales[ib] = scale; + + const float abs_scale = fabsf(scale); + if (abs_scale > max_abs_scale) { + max_abs_scale = abs_scale; + max_scale = scale; + } + + } + + if (!max_abs_scale) { + memset(&y[i], 0, sizeof(block_q6_K)); + y[i].d = GGML_FP32_TO_FP16(0.f); + x += QK_K; + continue; + } + + float iscale = -128.f/max_scale; + y[i].d = GGML_FP32_TO_FP16(1/iscale); + for (int ib = 0; ib < QK_K/16; ++ib) { + y[i].scales[ib] = MIN(127, nearest_int(iscale*scales[ib])); + } + + for (int j = 0; j < QK_K/16; ++j) { + float d = GGML_FP16_TO_FP32(y[i].d) * y[i].scales[j]; + if (!d) { + continue; + } + for (int ii = 0; ii < 16; ++ii) { + int l = nearest_int(x[16*j + ii]/d); + l = MAX(-32, MIN(31, l)); + L[16*j + ii] = l + 32; + } + } + + uint8_t * restrict ql = y[i].ql; + uint8_t * restrict qh = y[i].qh; + for (int j = 0; j < QK_K; j += 128) { + for (int l = 0; l < 32; ++l) { + const uint8_t q1 = L[j + l + 0] & 0xF; + const uint8_t q2 = L[j + l + 32] & 0xF; + const uint8_t q3 = L[j + l + 64] & 0xF; + const uint8_t q4 = L[j + l + 96] & 0xF; + ql[l+ 0] = q1 | (q3 << 4); + ql[l+32] = q2 | (q4 << 4); + qh[l] = (L[j + l] >> 4) | ((L[j + l + 32] >> 4) << 2) | ((L[j + l + 64] >> 4) << 4) | ((L[j + l + 96] >> 4) << 6); + } + ql += 64; + qh += 32; + } + + x += QK_K; + + } +#endif +} + +size_t quantize_q6_K(const float * src, void * dst, int nrow, int n_per_row, int64_t * hist, const float * quant_weights) { + (void)hist; + int row_size = ggml_row_size(GGML_TYPE_Q6_K, n_per_row); + if (!quant_weights) { + quantize_row_q6_K_reference(src, dst, nrow*n_per_row); + } + else { + char * qrow = (char *)dst; + for (int row = 0; row < nrow; ++row) { + quantize_row_q6_K_impl(src, (block_q6_K*)qrow, n_per_row, quant_weights); + src += n_per_row; + qrow += row_size; + } + } + return nrow * row_size; +} + // ====================== "True" 2-bit (de)-quantization static const uint64_t iq2xxs_grid[256] = { diff --git a/ggml-quants.h b/ggml-quants.h index e5d110230..99467936a 100644 --- a/ggml-quants.h +++ b/ggml-quants.h @@ -249,4 +249,7 @@ void ggml_vec_dot_iq2_xs_q8_K (int n, float * restrict s, const void * restrict size_t quantize_iq2_xxs(const float * src, void * dst, int nrows, int n_per_row, int64_t * hist, const float * imatrix); size_t quantize_iq2_xs (const float * src, void * dst, int nrows, int n_per_row, int64_t * hist, const float * imatrix); size_t quantize_q2_K (const float * src, void * dst, int nrows, int n_per_row, int64_t * hist, const float * imatrix); - +size_t quantize_q3_K (const float * src, void * dst, int nrows, int n_per_row, int64_t * hist, const float * imatrix); +size_t quantize_q4_K (const float * src, void * dst, int nrows, int n_per_row, int64_t * hist, const float * imatrix); +size_t quantize_q5_K (const float * src, void * dst, int nrows, int n_per_row, int64_t * hist, const float * imatrix); +size_t quantize_q6_K (const float * src, void * dst, int nrows, int n_per_row, int64_t * hist, const float * imatrix); diff --git a/ggml.c b/ggml.c index 52467475a..ef5888ab2 100644 --- a/ggml.c +++ b/ggml.c @@ -18713,26 +18713,38 @@ size_t ggml_quantize_chunk(enum ggml_type type, const float * src, void * dst, i case GGML_TYPE_Q3_K: { GGML_ASSERT(start % QK_K == 0); - block_q3_K * block = (block_q3_K*)dst + start / QK_K; - result = ggml_quantize_q3_K(src + start, block, n, n, hist); + GGML_ASSERT(start % n_per_row == 0); + size_t start_row = start / n_per_row; + size_t row_size = ggml_row_size(type, n_per_row); + result = quantize_q3_K(src + start, (char *)dst + start_row * row_size, nrows, n_per_row, hist, imatrix); + GGML_ASSERT(result == row_size * nrows); } break; case GGML_TYPE_Q4_K: { GGML_ASSERT(start % QK_K == 0); - block_q4_K * block = (block_q4_K*)dst + start / QK_K; - result = ggml_quantize_q4_K(src + start, block, n, n, hist); + GGML_ASSERT(start % n_per_row == 0); + size_t start_row = start / n_per_row; + size_t row_size = ggml_row_size(type, n_per_row); + result = quantize_q4_K(src + start, (char *)dst + start_row * row_size, nrows, n_per_row, hist, imatrix); + GGML_ASSERT(result == row_size * nrows); } break; case GGML_TYPE_Q5_K: { GGML_ASSERT(start % QK_K == 0); - block_q5_K * block = (block_q5_K*)dst + start / QK_K; - result = ggml_quantize_q5_K(src + start, block, n, n, hist); + GGML_ASSERT(start % n_per_row == 0); + size_t start_row = start / n_per_row; + size_t row_size = ggml_row_size(type, n_per_row); + result = quantize_q5_K(src + start, (char *)dst + start_row * row_size, nrows, n_per_row, hist, imatrix); + GGML_ASSERT(result == row_size * nrows); } break; case GGML_TYPE_Q6_K: { GGML_ASSERT(start % QK_K == 0); - block_q6_K * block = (block_q6_K*)dst + start / QK_K; - result = ggml_quantize_q6_K(src + start, block, n, n, hist); + GGML_ASSERT(start % n_per_row == 0); + size_t start_row = start / n_per_row; + size_t row_size = ggml_row_size(type, n_per_row); + result = quantize_q6_K(src + start, (char *)dst + start_row * row_size, nrows, n_per_row, hist, imatrix); + GGML_ASSERT(result == row_size * nrows); } break; case GGML_TYPE_IQ2_XXS: { From a836c8f534ab789b02da149fbdaf7735500bff74 Mon Sep 17 00:00:00 2001 From: David Pflug Date: Sun, 14 Jan 2024 10:46:00 -0500 Subject: [PATCH 318/811] llama : fix missing quotes (#4937) --- llama.cpp | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/llama.cpp b/llama.cpp index 63f37ecdb..7af38718c 100644 --- a/llama.cpp +++ b/llama.cpp @@ -7099,7 +7099,7 @@ static std::vector llama_tokenize_internal(const llama_vocab & } #ifdef PRETOKENIZERDEBUG - LLAMA_LOG_WARN(TT: (%ld %ld %ld) '%s'\n", raw_text.length(), fragment.offset, fragment.length, raw_text.c_str()); + LLAMA_LOG_WARN("TT: (%ld %ld %ld) '%s'\n", raw_text.length(), fragment.offset, fragment.length, raw_text.c_str()); #endif llm_tokenizer_spm tokenizer(vocab); llama_escape_whitespace(raw_text); @@ -7120,7 +7120,7 @@ static std::vector llama_tokenize_internal(const llama_vocab & auto raw_text = fragment.raw_text.substr(fragment.offset, fragment.length); #ifdef PRETOKENIZERDEBUG - LLAMA_LOG_WARN(TT: (%ld %ld %ld) '%s'\n", raw_text.length(), fragment.offset, fragment.length, raw_text.c_str()); + LLAMA_LOG_WARN("TT: (%ld %ld %ld) '%s'\n", raw_text.length(), fragment.offset, fragment.length, raw_text.c_str()); #endif llm_tokenizer_bpe tokenizer(vocab); tokenizer.tokenize(raw_text, output); From 4a3156de2fac9a8ee4279de7804d4e352dcfe121 Mon Sep 17 00:00:00 2001 From: Kawrakow <48489457+ikawrakow@users.noreply.github.com> Date: Mon, 15 Jan 2024 07:48:06 +0200 Subject: [PATCH 319/811] CUDA: faster dequantize kernels for Q4_0 and Q4_1 (#4938) Co-authored-by: Iwan Kawrakow --- ggml-cuda.cu | 77 +++++++++++++++++++++++++++++++++++++++++++++++++--- 1 file changed, 73 insertions(+), 4 deletions(-) diff --git a/ggml-cuda.cu b/ggml-cuda.cu index bd3814c72..a870718a7 100644 --- a/ggml-cuda.cu +++ b/ggml-cuda.cu @@ -1105,6 +1105,61 @@ static __device__ __forceinline__ void dequantize_q8_0(const void * vx, const in #endif // GGML_CUDA_F16 } +template +static __global__ void dequantize_block_q4_0(const void * __restrict__ vx, dst_t * __restrict__ yy, int nb32) { + + const int i = blockIdx.x; + + // assume 32 threads + const int tid = threadIdx.x; + const int il = tid/8; + const int ir = tid%8; + const int ib = 8*i + ir; + if (ib >= nb32) { + return; + } + + dst_t * y = yy + 256*i + 32*ir + 4*il; + + const block_q4_0 * x = (const block_q4_0 *)vx + ib; + const float d = __half2float(x->d); + const float dm = -8*d; + + const uint8_t * q = x->qs + 4*il; + + for (int l = 0; l < 4; ++l) { + y[l+ 0] = d * (q[l] & 0xF) + dm; + y[l+16] = d * (q[l] >> 4) + dm; + } +} + +template +static __global__ void dequantize_block_q4_1(const void * __restrict__ vx, dst_t * __restrict__ yy, int nb32) { + + const int i = blockIdx.x; + + // assume 32 threads + const int tid = threadIdx.x; + const int il = tid/8; + const int ir = tid%8; + const int ib = 8*i + ir; + if (ib >= nb32) { + return; + } + + dst_t * y = yy + 256*i + 32*ir + 4*il; + + const block_q4_1 * x = (const block_q4_1 *)vx + ib; + const float2 d = __half22float2(x->dm); + + const uint8_t * q = x->qs + 4*il; + + for (int l = 0; l < 4; ++l) { + y[l+ 0] = d.x * (q[l] & 0xF) + d.y; + y[l+16] = d.x * (q[l] >> 4) + d.y; + } +} + //================================== k-quants template @@ -6253,6 +6308,20 @@ static void dequantize_row_q3_K_cuda(const void * vx, dst_t * y, const int k, cu #endif } +template +static void dequantize_q4_0_cuda(const void * vx, dst_t * y, const int k, cudaStream_t stream) { + const int nb32 = k / 32; + const int nb = (k + 255) / 256; + dequantize_block_q4_0<<>>(vx, y, nb32); +} + +template +static void dequantize_q4_1_cuda(const void * vx, dst_t * y, const int k, cudaStream_t stream) { + const int nb32 = k / 32; + const int nb = (k + 255) / 256; + dequantize_block_q4_1<<>>(vx, y, nb32); +} + template static void dequantize_row_q4_K_cuda(const void * vx, dst_t * y, const int k, cudaStream_t stream) { const int nb = k / QK_K; @@ -6301,9 +6370,9 @@ static to_fp16_cuda_t ggml_get_to_fp16_cuda(ggml_type type) { int id; switch (type) { case GGML_TYPE_Q4_0: - return dequantize_block_cuda; + return dequantize_q4_0_cuda; case GGML_TYPE_Q4_1: - return dequantize_block_cuda; + return dequantize_q4_1_cuda; case GGML_TYPE_Q5_0: return dequantize_block_cuda; case GGML_TYPE_Q5_1: @@ -6338,9 +6407,9 @@ static to_fp16_cuda_t ggml_get_to_fp16_cuda(ggml_type type) { static to_fp32_cuda_t ggml_get_to_fp32_cuda(ggml_type type) { switch (type) { case GGML_TYPE_Q4_0: - return dequantize_block_cuda; + return dequantize_q4_0_cuda; case GGML_TYPE_Q4_1: - return dequantize_block_cuda; + return dequantize_q4_1_cuda; case GGML_TYPE_Q5_0: return dequantize_block_cuda; case GGML_TYPE_Q5_1: From 2faaef39799c97a53bec3898141478700da25757 Mon Sep 17 00:00:00 2001 From: Kawrakow <48489457+ikawrakow@users.noreply.github.com> Date: Mon, 15 Jan 2024 10:09:38 +0200 Subject: [PATCH 320/811] llama : check for 256 divisibility for IQ2_XS, IQ2_XXS (#4950) Co-authored-by: Iwan Kawrakow --- llama.cpp | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/llama.cpp b/llama.cpp index 7af38718c..f9718060d 100644 --- a/llama.cpp +++ b/llama.cpp @@ -8559,7 +8559,8 @@ static ggml_type get_k_quant_type(quantize_state_internal & qs, ggml_type new_ty //} bool convert_incompatible_tensor = false; if (new_type == GGML_TYPE_Q2_K || new_type == GGML_TYPE_Q3_K || new_type == GGML_TYPE_Q4_K || - new_type == GGML_TYPE_Q5_K || new_type == GGML_TYPE_Q6_K) { + new_type == GGML_TYPE_Q5_K || new_type == GGML_TYPE_Q6_K || + new_type == GGML_TYPE_IQ2_XS || new_type == GGML_TYPE_IQ2_XXS) { int nx = tensor->ne[0]; int ny = tensor->ne[1]; if (nx % QK_K != 0) { @@ -8571,6 +8572,8 @@ static ggml_type get_k_quant_type(quantize_state_internal & qs, ggml_type new_ty } if (convert_incompatible_tensor) { switch (new_type) { + case GGML_TYPE_IQ2_XXS: + case GGML_TYPE_IQ2_XS: case GGML_TYPE_Q2_K: new_type = GGML_TYPE_Q4_0; break; case GGML_TYPE_Q3_K: new_type = GGML_TYPE_Q4_1; break; case GGML_TYPE_Q4_K: new_type = GGML_TYPE_Q5_0; break; From ddb008d845cd50bb090bf051f570130524042936 Mon Sep 17 00:00:00 2001 From: Georgi Gerganov Date: Mon, 15 Jan 2024 13:27:00 +0200 Subject: [PATCH 321/811] cuda : fix dequantize kernel names (#4938) --- ggml-cuda.cu | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/ggml-cuda.cu b/ggml-cuda.cu index a870718a7..c3e14bc96 100644 --- a/ggml-cuda.cu +++ b/ggml-cuda.cu @@ -6309,14 +6309,14 @@ static void dequantize_row_q3_K_cuda(const void * vx, dst_t * y, const int k, cu } template -static void dequantize_q4_0_cuda(const void * vx, dst_t * y, const int k, cudaStream_t stream) { +static void dequantize_row_q4_0_cuda(const void * vx, dst_t * y, const int k, cudaStream_t stream) { const int nb32 = k / 32; const int nb = (k + 255) / 256; dequantize_block_q4_0<<>>(vx, y, nb32); } template -static void dequantize_q4_1_cuda(const void * vx, dst_t * y, const int k, cudaStream_t stream) { +static void dequantize_row_q4_1_cuda(const void * vx, dst_t * y, const int k, cudaStream_t stream) { const int nb32 = k / 32; const int nb = (k + 255) / 256; dequantize_block_q4_1<<>>(vx, y, nb32); @@ -6370,9 +6370,9 @@ static to_fp16_cuda_t ggml_get_to_fp16_cuda(ggml_type type) { int id; switch (type) { case GGML_TYPE_Q4_0: - return dequantize_q4_0_cuda; + return dequantize_row_q4_0_cuda; case GGML_TYPE_Q4_1: - return dequantize_q4_1_cuda; + return dequantize_row_q4_1_cuda; case GGML_TYPE_Q5_0: return dequantize_block_cuda; case GGML_TYPE_Q5_1: @@ -6407,9 +6407,9 @@ static to_fp16_cuda_t ggml_get_to_fp16_cuda(ggml_type type) { static to_fp32_cuda_t ggml_get_to_fp32_cuda(ggml_type type) { switch (type) { case GGML_TYPE_Q4_0: - return dequantize_q4_0_cuda; + return dequantize_row_q4_0_cuda; case GGML_TYPE_Q4_1: - return dequantize_q4_1_cuda; + return dequantize_row_q4_1_cuda; case GGML_TYPE_Q5_0: return dequantize_block_cuda; case GGML_TYPE_Q5_1: From d9aa4ffa6e0296d42f1f676dd85de97c8491eb73 Mon Sep 17 00:00:00 2001 From: "Victor Z. Peng" Date: Mon, 15 Jan 2024 04:41:46 -0800 Subject: [PATCH 322/811] awq-py : fix typo in awq-py/README.md (#4947) --- awq-py/README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/awq-py/README.md b/awq-py/README.md index 59354f4e3..16e68d027 100644 --- a/awq-py/README.md +++ b/awq-py/README.md @@ -43,7 +43,7 @@ Example for llama model # For llama7b and llama2 models python convert.py models/llama-7b/ --awq-path awq_cache/llama-7b-w4-g128.pt --outfile models/llama_7b_fp16.gguf # For mistral and mpt models -python convert-hf-to-gguf.py models/mpt-7b/ --awq-path awq_cache/llama-7b-w4-g128.pt --outfile models/mpt_7b_fp16.gguf +python convert-hf-to-gguf.py models/mpt-7b/ --awq-path awq_cache/mpt-7b-w4-g128.pt --outfile models/mpt_7b_fp16.gguf ``` ## Quantize From 4483396751c79dea540808b9cb9238245d06da2b Mon Sep 17 00:00:00 2001 From: David Friehs Date: Mon, 15 Jan 2024 14:06:52 +0100 Subject: [PATCH 323/811] llama : apply classifier-free guidance to logits directly (#4951) --- common/sampling.cpp | 9 ++++--- llama.cpp | 60 ++++++++++++++++++++++++++++++--------------- llama.h | 17 +++++++++---- 3 files changed, 57 insertions(+), 29 deletions(-) diff --git a/common/sampling.cpp b/common/sampling.cpp index 8e45909f1..dd1ffeb1b 100644 --- a/common/sampling.cpp +++ b/common/sampling.cpp @@ -190,6 +190,11 @@ static llama_token llama_sampling_sample_impl( logits[it->first] += it->second; } + if (ctx_cfg) { + float * logits_guidance = llama_get_logits_ith(ctx_cfg, idx); + llama_sample_apply_guidance(ctx_main, logits, logits_guidance, params.cfg_scale); + } + cur.clear(); for (llama_token token_id = 0; token_id < n_vocab; token_id++) { @@ -198,10 +203,6 @@ static llama_token llama_sampling_sample_impl( llama_token_data_array cur_p = { cur.data(), cur.size(), false }; - if (ctx_cfg) { - llama_sample_classifier_free_guidance(ctx_main, &cur_p, ctx_cfg, params.cfg_scale); - } - // apply penalties const auto& penalty_tokens = params.use_penalty_prompt_tokens ? params.penalty_prompt_tokens : prev; const int penalty_tokens_used_size = std::min((int)penalty_tokens.size(), penalty_last_n); diff --git a/llama.cpp b/llama.cpp index f9718060d..46c4d11c8 100644 --- a/llama.cpp +++ b/llama.cpp @@ -7898,39 +7898,59 @@ static void llama_log_softmax(float * array, size_t size) { } } +void llama_sample_apply_guidance( + struct llama_context * ctx, + float * logits, + float * logits_guidance, + float scale) { + GGML_ASSERT(ctx); + + const auto t_start_sample_us = ggml_time_us(); + const auto n_vocab = llama_n_vocab(llama_get_model(ctx)); + + llama_log_softmax(logits, n_vocab); + llama_log_softmax(logits_guidance, n_vocab); + + for (int i = 0; i < n_vocab; ++i) { + auto & l = logits[i]; + const auto & g = logits_guidance[i]; + + l = scale * (l - g) + g; + } + + ctx->t_sample_us += ggml_time_us() - t_start_sample_us; +} + void llama_sample_classifier_free_guidance( struct llama_context * ctx, llama_token_data_array * candidates, struct llama_context * guidance_ctx, float scale) { - int64_t t_start_sample_us = ggml_time_us(); - GGML_ASSERT(ctx); + int64_t t_start_sample_us; - auto n_vocab = llama_n_vocab(llama_get_model(ctx)); + t_start_sample_us = ggml_time_us(); + const size_t n_vocab = llama_n_vocab(llama_get_model(ctx)); - GGML_ASSERT(n_vocab == (int)candidates->size); + GGML_ASSERT(n_vocab == candidates->size); GGML_ASSERT(!candidates->sorted); - std::vector logits_base; - logits_base.reserve(candidates->size); - for (size_t i = 0; i < candidates->size; ++i) { - logits_base.push_back(candidates->data[i].logit); - } - llama_log_softmax(logits_base.data(), candidates->size); - - float* logits_guidance = llama_get_logits(guidance_ctx); - llama_log_softmax(logits_guidance, n_vocab); - - for (int i = 0; i < n_vocab; ++i) { - float logit_guidance = logits_guidance[i]; - float logit_base = logits_base[i]; - candidates->data[i].logit = scale * (logit_base - logit_guidance) + logit_guidance; + std::vector logits_base(n_vocab); + for (size_t i = 0; i < n_vocab; ++i) { + logits_base[i] = candidates->data[i].logit; } - if (ctx) { - ctx->t_sample_us += ggml_time_us() - t_start_sample_us; + float * logits_guidance = llama_get_logits(guidance_ctx); + + ctx->t_sample_us += ggml_time_us() - t_start_sample_us; + llama_sample_apply_guidance(ctx, logits_base.data(), logits_guidance, scale); + t_start_sample_us = ggml_time_us(); + + for (size_t i = 0; i < n_vocab; ++i) { + candidates->data[i].logit = logits_base[i]; } + + ctx->t_sample_us += ggml_time_us() - t_start_sample_us; } llama_token llama_sample_token_mirostat(struct llama_context * ctx, llama_token_data_array * candidates, float tau, float eta, int32_t m, float * mu) { diff --git a/llama.h b/llama.h index 79c8335b6..a570b0d69 100644 --- a/llama.h +++ b/llama.h @@ -714,14 +714,21 @@ extern "C" { float penalty_present); /// @details Apply classifier-free guidance to the logits as described in academic paper "Stay on topic with Classifier-Free Guidance" https://arxiv.org/abs/2306.17806 - /// @param candidates A vector of `llama_token_data` containing the candidate tokens, the logits must be directly extracted from the original generation context without being sorted. - /// @params guidance_ctx A separate context from the same model. Other than a negative prompt at the beginning, it should have all generated and user input tokens copied from the main context. - /// @params scale Guidance strength. 1.0f means no guidance. Higher values mean stronger guidance. - LLAMA_API void llama_sample_classifier_free_guidance( + /// @param logits Logits extracted from the original generation context. + /// @param logits_guidance Logits extracted from a separate context from the same model. Other than a negative prompt at the beginning, it should have all generated and user input tokens copied from the main context. + /// @param scale Guidance strength. 1.0f means no guidance. Higher values mean stronger guidance. + LLAMA_API void llama_sample_apply_guidance( + struct llama_context * ctx, + float * logits, + float * logits_guidance, + float scale); + + LLAMA_API DEPRECATED(void llama_sample_classifier_free_guidance( struct llama_context * ctx, llama_token_data_array * candidates, struct llama_context * guidance_ctx, - float scale); + float scale), + "use llama_sample_apply_guidance() instead"); /// @details Sorts candidate tokens by their logits in descending order and calculate probabilities based on logits. LLAMA_API void llama_sample_softmax( From 3e5ca7931c68152e4ec18d126e9c832dd84914c8 Mon Sep 17 00:00:00 2001 From: ngc92 <7938269+ngc92@users.noreply.github.com> Date: Mon, 15 Jan 2024 20:40:48 +0200 Subject: [PATCH 324/811] pass cpu-architecture arguments only to host code (C;C++) (#4943) --- CMakeLists.txt | 34 +++++++++++++++++++--------------- 1 file changed, 19 insertions(+), 15 deletions(-) diff --git a/CMakeLists.txt b/CMakeLists.txt index 2741568ed..7bd640966 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -594,6 +594,13 @@ if (NOT MSVC) endif() endif() +function(add_compile_option_cpp ARG) + # Adds a compile option to C/C++ only, but not for Cuda. + # Use, e.g., for CPU-architecture flags. + add_compile_options($<$:${ARG}>) + add_compile_options($<$:${ARG}>) +endfunction() + if ((${CMAKE_SYSTEM_PROCESSOR} MATCHES "arm") OR (${CMAKE_SYSTEM_PROCESSOR} MATCHES "aarch64") OR ("${CMAKE_GENERATOR_PLATFORM_LWR}" MATCHES "arm64")) message(STATUS "ARM detected") if (MSVC) @@ -628,8 +635,7 @@ elseif (${CMAKE_SYSTEM_PROCESSOR} MATCHES "^(x86_64|i686|AMD64)$" OR "${CMAKE_GE include(cmake/FindSIMD.cmake) endif () if (LLAMA_AVX512) - add_compile_options($<$:/arch:AVX512>) - add_compile_options($<$:/arch:AVX512>) + add_compile_option_cpp(/arch:AVX512) # MSVC has no compile-time flags enabling specific # AVX512 extensions, neither it defines the # macros corresponding to the extensions. @@ -643,37 +649,35 @@ elseif (${CMAKE_SYSTEM_PROCESSOR} MATCHES "^(x86_64|i686|AMD64)$" OR "${CMAKE_GE add_compile_definitions($<$:__AVX512VNNI__>) endif() elseif (LLAMA_AVX2) - add_compile_options($<$:/arch:AVX2>) - add_compile_options($<$:/arch:AVX2>) + add_compile_option_cpp(/arch:AVX2) elseif (LLAMA_AVX) - add_compile_options($<$:/arch:AVX>) - add_compile_options($<$:/arch:AVX>) + add_compile_option_cpp(/arch:AVX) endif() else() if (LLAMA_NATIVE) - add_compile_options(-march=native) + add_compile_option_cpp(-march=native) endif() if (LLAMA_F16C) - add_compile_options(-mf16c) + add_compile_option_cpp(-mf16c) endif() if (LLAMA_FMA) - add_compile_options(-mfma) + add_compile_option_cpp(-mfma) endif() if (LLAMA_AVX) - add_compile_options(-mavx) + add_compile_option_cpp(-mavx) endif() if (LLAMA_AVX2) - add_compile_options(-mavx2) + add_compile_option_cpp(-mavx2) endif() if (LLAMA_AVX512) - add_compile_options(-mavx512f) - add_compile_options(-mavx512bw) + add_compile_option_cpp(-mavx512f) + add_compile_option_cpp(-mavx512bw) endif() if (LLAMA_AVX512_VBMI) - add_compile_options(-mavx512vbmi) + add_compile_option_cpp(-mavx512vbmi) endif() if (LLAMA_AVX512_VNNI) - add_compile_options(-mavx512vnni) + add_compile_option_cpp(-mavx512vnni) endif() endif() elseif (${CMAKE_SYSTEM_PROCESSOR} MATCHES "ppc64") From e0324285a569d0583cf2f4a07a2402221ee25f58 Mon Sep 17 00:00:00 2001 From: stduhpf Date: Tue, 16 Jan 2024 12:04:32 +0100 Subject: [PATCH 325/811] speculative : threading options (#4959) * speculative: expose draft threading * fix usage format * accept -td and -tbd args * speculative: revert default behavior when -td is unspecified * fix trailing whitespace --- common/common.cpp | 22 ++++++++++++++++++++++ common/common.h | 2 ++ examples/speculative/speculative.cpp | 4 ++++ 3 files changed, 28 insertions(+) diff --git a/common/common.cpp b/common/common.cpp index c11006bcb..2b0865fff 100644 --- a/common/common.cpp +++ b/common/common.cpp @@ -167,6 +167,24 @@ bool gpt_params_parse_ex(int argc, char ** argv, gpt_params & params) { if (params.n_threads_batch <= 0) { params.n_threads_batch = std::thread::hardware_concurrency(); } + } else if (arg == "-td" || arg == "--threads-draft") { + if (++i >= argc) { + invalid_param = true; + break; + } + params.n_threads_draft = std::stoi(argv[i]); + if (params.n_threads_draft <= 0) { + params.n_threads_draft = std::thread::hardware_concurrency(); + } + } else if (arg == "-tbd" || arg == "--threads-batch-draft") { + if (++i >= argc) { + invalid_param = true; + break; + } + params.n_threads_batch_draft = std::stoi(argv[i]); + if (params.n_threads_batch_draft <= 0) { + params.n_threads_batch_draft = std::thread::hardware_concurrency(); + } } else if (arg == "-p" || arg == "--prompt") { if (++i >= argc) { invalid_param = true; @@ -845,6 +863,10 @@ void gpt_print_usage(int /*argc*/, char ** argv, const gpt_params & params) { printf(" -t N, --threads N number of threads to use during generation (default: %d)\n", params.n_threads); printf(" -tb N, --threads-batch N\n"); printf(" number of threads to use during batch and prompt processing (default: same as --threads)\n"); + printf(" -td N, --threads-draft N"); + printf(" number of threads to use during generation (default: same as --threads)"); + printf(" -tbd N, --threads-batch-draft N\n"); + printf(" number of threads to use during batch and prompt processing (default: same as --threads-draft)\n"); printf(" -p PROMPT, --prompt PROMPT\n"); printf(" prompt to start generation with (default: empty)\n"); printf(" -e, --escape process prompt escapes sequences (\\n, \\r, \\t, \\', \\\", \\\\)\n"); diff --git a/common/common.h b/common/common.h index 096468243..1f43e6282 100644 --- a/common/common.h +++ b/common/common.h @@ -46,7 +46,9 @@ struct gpt_params { uint32_t seed = -1; // RNG seed int32_t n_threads = get_num_physical_cores(); + int32_t n_threads_draft = -1; int32_t n_threads_batch = -1; // number of threads to use for batch processing (-1 = use n_threads) + int32_t n_threads_batch_draft = -1; int32_t n_predict = -1; // new tokens to predict int32_t n_ctx = 512; // context size int32_t n_batch = 512; // batch size for prompt processing (must be >=32 to use BLAS) diff --git a/examples/speculative/speculative.cpp b/examples/speculative/speculative.cpp index 20f1fb5bf..7b3af01f3 100644 --- a/examples/speculative/speculative.cpp +++ b/examples/speculative/speculative.cpp @@ -65,6 +65,10 @@ int main(int argc, char ** argv) { // load the draft model params.model = params.model_draft; params.n_gpu_layers = params.n_gpu_layers_draft; + if (params.n_threads_draft > 0) { + params.n_threads = params.n_threads_draft; + } + params.n_threads_batch = params.n_threads_batch_draft; std::tie(model_dft, ctx_dft) = llama_init_from_gpt_params(params); { From d75c232e1da56f19ac4d2530dadbe0ab3a11fde5 Mon Sep 17 00:00:00 2001 From: Daniel Bevenius Date: Tue, 16 Jan 2024 12:14:19 +0100 Subject: [PATCH 326/811] finetune : use LLAMA_FILE_MAGIC_GGLA (#4961) This commit replaces the magic number LLAMA_FILE_MAGIC_LORA used in finetune.cpp with LLAMA_FILE_MAGIC_GGLA defined in llama.h. Signed-off-by: Daniel Bevenius --- examples/finetune/finetune.cpp | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/examples/finetune/finetune.cpp b/examples/finetune/finetune.cpp index eaca42fc1..a6620fd73 100644 --- a/examples/finetune/finetune.cpp +++ b/examples/finetune/finetune.cpp @@ -1138,9 +1138,8 @@ static void save_as_llama_lora(const char * filename, struct my_llama_lora * lor return tn_buf.data(); }; - uint32_t LLAMA_FILE_MAGIC_LORA = 0x67676C61; // 'ggla' // write_magic - file.write_u32(LLAMA_FILE_MAGIC_LORA); // magic + file.write_u32(LLAMA_FILE_MAGIC_GGLA); // magic file.write_u32(1); // version // write_hparams file.write_u32(lora->hparams.lora_r); From a0b3ac8c48b66206b9c5921ce57bd5c0ea6557c3 Mon Sep 17 00:00:00 2001 From: Justine Tunney Date: Tue, 16 Jan 2024 03:16:33 -0800 Subject: [PATCH 327/811] ggml : introduce GGML_CALL function annotation (#4850) This change makes it possible to build ggml-cuda.cu and ggml-metal.m as independent dynamic shared objects, that may be conditionally linked at runtime in a multiplatform binary. It introduces a GGML_CALL annotation that documents which functions have a cyclic call relationship, between the application code and GPU modules. This change does nothing, unless the build defines -DGGML_MULTIPLATFORM which causes back-references and function pointers to conform to MS ABI which is supported by NVCC, ROCm, XCode, GCC and Clang across platforms --- ggml-backend-impl.h | 60 +++++++++++----------- ggml-backend.c | 80 ++++++++++++++--------------- ggml-backend.h | 50 +++++++++--------- ggml-cuda.cu | 121 ++++++++++++++++++++++---------------------- ggml-cuda.h | 32 ++++++------ ggml-metal.h | 4 +- ggml-metal.m | 42 +++++++-------- ggml.c | 32 ++++++------ ggml.h | 58 ++++++++++++--------- 9 files changed, 244 insertions(+), 235 deletions(-) diff --git a/ggml-backend-impl.h b/ggml-backend-impl.h index 1db32901f..1397828d9 100644 --- a/ggml-backend-impl.h +++ b/ggml-backend-impl.h @@ -16,14 +16,14 @@ extern "C" { typedef void * ggml_backend_buffer_type_context_t; struct ggml_backend_buffer_type_i { - const char * (*get_name) (ggml_backend_buffer_type_t buft); - ggml_backend_buffer_t (*alloc_buffer) (ggml_backend_buffer_type_t buft, size_t size); - size_t (*get_alignment) (ggml_backend_buffer_type_t buft); // tensor alignment - size_t (*get_alloc_size) (ggml_backend_buffer_type_t buft, const struct ggml_tensor * tensor); // data size needed to allocate the tensor, including padding - bool (*supports_backend)(ggml_backend_buffer_type_t buft, ggml_backend_t backend); // check if the buffer type is usable by the backend + const char * (*GGML_CALL get_name) (ggml_backend_buffer_type_t buft); + ggml_backend_buffer_t (*GGML_CALL alloc_buffer) (ggml_backend_buffer_type_t buft, size_t size); + size_t (*GGML_CALL get_alignment) (ggml_backend_buffer_type_t buft); // tensor alignment + size_t (*GGML_CALL get_alloc_size) (ggml_backend_buffer_type_t buft, const struct ggml_tensor * tensor); // data size needed to allocate the tensor, including padding + bool (*GGML_CALL supports_backend)(ggml_backend_buffer_type_t buft, ggml_backend_t backend); // check if the buffer type is usable by the backend // check if tensor data is in host memory // should be equivalent to supports_backend(buft, ggml_backend_cpu_init()) - bool (*is_host) (ggml_backend_buffer_type_t buft); + bool (*GGML_CALL is_host) (ggml_backend_buffer_type_t buft); }; struct ggml_backend_buffer_type { @@ -35,15 +35,15 @@ extern "C" { typedef void * ggml_backend_buffer_context_t; struct ggml_backend_buffer_i { - const char * (*get_name) (ggml_backend_buffer_t buffer); - void (*free_buffer)(ggml_backend_buffer_t buffer); - void * (*get_base) (ggml_backend_buffer_t buffer); - void (*init_tensor)(ggml_backend_buffer_t buffer, struct ggml_tensor * tensor); - void (*set_tensor) (ggml_backend_buffer_t buffer, struct ggml_tensor * tensor, const void * data, size_t offset, size_t size); - void (*get_tensor) (ggml_backend_buffer_t buffer, const struct ggml_tensor * tensor, void * data, size_t offset, size_t size); - bool (*cpy_tensor) (ggml_backend_buffer_t buffer, const struct ggml_tensor * src, struct ggml_tensor * dst); // dst is in the buffer, src may be in any buffer - void (*clear) (ggml_backend_buffer_t buffer, uint8_t value); - void (*reset) (ggml_backend_buffer_t buffer); // reset any internal state due to tensor initialization, such as tensor extras + const char * (*GGML_CALL get_name) (ggml_backend_buffer_t buffer); + void (*GGML_CALL free_buffer)(ggml_backend_buffer_t buffer); + void * (*GGML_CALL get_base) (ggml_backend_buffer_t buffer); + void (*GGML_CALL init_tensor)(ggml_backend_buffer_t buffer, struct ggml_tensor * tensor); + void (*GGML_CALL set_tensor) (ggml_backend_buffer_t buffer, struct ggml_tensor * tensor, const void * data, size_t offset, size_t size); + void (*GGML_CALL get_tensor) (ggml_backend_buffer_t buffer, const struct ggml_tensor * tensor, void * data, size_t offset, size_t size); + bool (*GGML_CALL cpy_tensor) (ggml_backend_buffer_t buffer, const struct ggml_tensor * src, struct ggml_tensor * dst); // dst is in the buffer, src may be in any buffer + void (*GGML_CALL clear) (ggml_backend_buffer_t buffer, uint8_t value); + void (*GGML_CALL reset) (ggml_backend_buffer_t buffer); // reset any internal state due to tensor initialization, such as tensor extras }; struct ggml_backend_buffer { @@ -54,7 +54,7 @@ extern "C" { enum ggml_backend_buffer_usage usage; }; - ggml_backend_buffer_t ggml_backend_buffer_init( + GGML_CALL ggml_backend_buffer_t ggml_backend_buffer_init( ggml_backend_buffer_type_t buft, struct ggml_backend_buffer_i iface, ggml_backend_buffer_context_t context, @@ -70,31 +70,31 @@ extern "C" { typedef void * ggml_backend_context_t; struct ggml_backend_i { - const char * (*get_name)(ggml_backend_t backend); + const char * (*GGML_CALL get_name)(ggml_backend_t backend); - void (*free)(ggml_backend_t backend); + void (*GGML_CALL free)(ggml_backend_t backend); // buffer allocation - ggml_backend_buffer_type_t (*get_default_buffer_type)(ggml_backend_t backend); + ggml_backend_buffer_type_t (*GGML_CALL get_default_buffer_type)(ggml_backend_t backend); // (optional) asynchronous tensor data access - void (*set_tensor_async)(ggml_backend_t backend, struct ggml_tensor * tensor, const void * data, size_t offset, size_t size); - void (*get_tensor_async)(ggml_backend_t backend, const struct ggml_tensor * tensor, void * data, size_t offset, size_t size); - bool (*cpy_tensor_async)(ggml_backend_t backend, const struct ggml_tensor * src, struct ggml_tensor * dst); + void (*GGML_CALL set_tensor_async)(ggml_backend_t backend, struct ggml_tensor * tensor, const void * data, size_t offset, size_t size); + void (*GGML_CALL get_tensor_async)(ggml_backend_t backend, const struct ggml_tensor * tensor, void * data, size_t offset, size_t size); + bool (*GGML_CALL cpy_tensor_async)(ggml_backend_t backend, const struct ggml_tensor * src, struct ggml_tensor * dst); // (optional) complete all pending operations - void (*synchronize)(ggml_backend_t backend); + void (*GGML_CALL synchronize)(ggml_backend_t backend); // compute graph with a plan - ggml_backend_graph_plan_t (*graph_plan_create) (ggml_backend_t backend, const struct ggml_cgraph * cgraph); - void (*graph_plan_free) (ggml_backend_t backend, ggml_backend_graph_plan_t plan); - void (*graph_plan_compute)(ggml_backend_t backend, ggml_backend_graph_plan_t plan); + ggml_backend_graph_plan_t (*GGML_CALL graph_plan_create) (ggml_backend_t backend, const struct ggml_cgraph * cgraph); + void (*GGML_CALL graph_plan_free) (ggml_backend_t backend, ggml_backend_graph_plan_t plan); + void (*GGML_CALL graph_plan_compute)(ggml_backend_t backend, ggml_backend_graph_plan_t plan); // compute graph without a plan (async) - bool (*graph_compute)(ggml_backend_t backend, struct ggml_cgraph * cgraph); + bool (*GGML_CALL graph_compute)(ggml_backend_t backend, struct ggml_cgraph * cgraph); // check if the backend supports an operation - bool (*supports_op)(ggml_backend_t backend, const struct ggml_tensor * op); + bool (*GGML_CALL supports_op)(ggml_backend_t backend, const struct ggml_tensor * op); }; struct ggml_backend { @@ -107,9 +107,9 @@ extern "C" { // Backend registry // - typedef ggml_backend_t (*ggml_backend_init_fn)(const char * params, void * user_data); + typedef ggml_backend_t (*GGML_CALL ggml_backend_init_fn)(const char * params, void * user_data); - void ggml_backend_register(const char * name, ggml_backend_init_fn init_fn, ggml_backend_buffer_type_t default_buffer_type, void * user_data); + GGML_CALL void ggml_backend_register(const char * name, ggml_backend_init_fn init_fn, ggml_backend_buffer_type_t default_buffer_type, void * user_data); #ifdef __cplusplus } diff --git a/ggml-backend.c b/ggml-backend.c index 505dbba47..f5424fb90 100644 --- a/ggml-backend.c +++ b/ggml-backend.c @@ -19,7 +19,7 @@ const char * ggml_backend_buft_name(ggml_backend_buffer_type_t buft) { return buft->iface.get_name(buft); } -ggml_backend_buffer_t ggml_backend_buft_alloc_buffer(ggml_backend_buffer_type_t buft, size_t size) { +GGML_CALL ggml_backend_buffer_t ggml_backend_buft_alloc_buffer(ggml_backend_buffer_type_t buft, size_t size) { return buft->iface.alloc_buffer(buft, size); } @@ -27,7 +27,7 @@ size_t ggml_backend_buft_get_alignment(ggml_backend_buffer_type_t buft) { return buft->iface.get_alignment(buft); } -size_t ggml_backend_buft_get_alloc_size(ggml_backend_buffer_type_t buft, struct ggml_tensor * tensor) { +GGML_CALL size_t ggml_backend_buft_get_alloc_size(ggml_backend_buffer_type_t buft, struct ggml_tensor * tensor) { // get_alloc_size is optional, defaults to ggml_nbytes if (buft->iface.get_alloc_size) { return buft->iface.get_alloc_size(buft, tensor); @@ -48,7 +48,7 @@ bool ggml_backend_buft_is_host(ggml_backend_buffer_type_t buft) { // backend buffer -ggml_backend_buffer_t ggml_backend_buffer_init( +GGML_CALL ggml_backend_buffer_t ggml_backend_buffer_init( ggml_backend_buffer_type_t buft, struct ggml_backend_buffer_i iface, ggml_backend_buffer_context_t context, @@ -95,7 +95,7 @@ void * ggml_backend_buffer_get_base(ggml_backend_buffer_t buffer) { return base; } -void ggml_backend_buffer_init_tensor(ggml_backend_buffer_t buffer, struct ggml_tensor * tensor) { +GGML_CALL void ggml_backend_buffer_init_tensor(ggml_backend_buffer_t buffer, struct ggml_tensor * tensor) { // init_tensor is optional if (buffer->iface.init_tensor) { buffer->iface.init_tensor(buffer, tensor); @@ -191,7 +191,7 @@ void ggml_backend_tensor_get_async(ggml_backend_t backend, const struct ggml_ten } } -void ggml_backend_tensor_set(struct ggml_tensor * tensor, const void * data, size_t offset, size_t size) { +GGML_CALL void ggml_backend_tensor_set(struct ggml_tensor * tensor, const void * data, size_t offset, size_t size) { ggml_backend_buffer_t buf = tensor->view_src ? tensor->view_src->buffer : tensor->buffer; GGML_ASSERT(tensor->data != NULL && "tensor not allocated"); @@ -201,7 +201,7 @@ void ggml_backend_tensor_set(struct ggml_tensor * tensor, const void * data, siz tensor->buffer->iface.set_tensor(buf, tensor, data, offset, size); } -void ggml_backend_tensor_get(const struct ggml_tensor * tensor, void * data, size_t offset, size_t size) { +GGML_CALL void ggml_backend_tensor_get(const struct ggml_tensor * tensor, void * data, size_t offset, size_t size) { ggml_backend_buffer_t buf = tensor->view_src ? tensor->view_src->buffer : tensor->buffer; GGML_ASSERT(tensor->data != NULL && "tensor not allocated"); @@ -318,9 +318,9 @@ struct ggml_backend_reg { static struct ggml_backend_reg ggml_backend_registry[GGML_MAX_BACKENDS_REG]; static size_t ggml_backend_registry_count = 0; -static ggml_backend_t ggml_backend_reg_cpu_init(const char * params, void * user_data); +GGML_CALL static ggml_backend_t ggml_backend_reg_cpu_init(const char * params, void * user_data); -static void ggml_backend_registry_init(void) { +GGML_CALL static void ggml_backend_registry_init(void) { static bool initialized = false; if (initialized) { @@ -333,18 +333,18 @@ static void ggml_backend_registry_init(void) { // add forward decls here to avoid including the backend headers #ifdef GGML_USE_CUBLAS - extern void ggml_backend_cuda_reg_devices(void); + extern GGML_CALL void ggml_backend_cuda_reg_devices(void); ggml_backend_cuda_reg_devices(); #endif #ifdef GGML_USE_METAL - extern ggml_backend_t ggml_backend_reg_metal_init(const char * params, void * user_data); - extern ggml_backend_buffer_type_t ggml_backend_metal_buffer_type(void); + extern GGML_CALL ggml_backend_t ggml_backend_reg_metal_init(const char * params, void * user_data); + extern GGML_CALL ggml_backend_buffer_type_t ggml_backend_metal_buffer_type(void); ggml_backend_register("Metal", ggml_backend_reg_metal_init, ggml_backend_metal_buffer_type(), NULL); #endif } -void ggml_backend_register(const char * name, ggml_backend_init_fn init_fn, ggml_backend_buffer_type_t default_buffer_type, void * user_data) { +GGML_CALL void ggml_backend_register(const char * name, ggml_backend_init_fn init_fn, ggml_backend_buffer_type_t default_buffer_type, void * user_data) { GGML_ASSERT(ggml_backend_registry_count < GGML_MAX_BACKENDS_REG); size_t id = ggml_backend_registry_count; @@ -439,33 +439,33 @@ ggml_backend_buffer_t ggml_backend_reg_alloc_buffer(size_t i, size_t size) { // backend CPU -static const char * ggml_backend_cpu_buffer_name(ggml_backend_buffer_t buffer) { +GGML_CALL static const char * ggml_backend_cpu_buffer_name(ggml_backend_buffer_t buffer) { return "CPU"; GGML_UNUSED(buffer); } -static void * ggml_backend_cpu_buffer_get_base(ggml_backend_buffer_t buffer) { +GGML_CALL static void * ggml_backend_cpu_buffer_get_base(ggml_backend_buffer_t buffer) { return (void *)buffer->context; } -static void ggml_backend_cpu_buffer_free_buffer(ggml_backend_buffer_t buffer) { +GGML_CALL static void ggml_backend_cpu_buffer_free_buffer(ggml_backend_buffer_t buffer) { free(buffer->context); } -static void ggml_backend_cpu_buffer_set_tensor(ggml_backend_buffer_t buffer, struct ggml_tensor * tensor, const void * data, size_t offset, size_t size) { +GGML_CALL static void ggml_backend_cpu_buffer_set_tensor(ggml_backend_buffer_t buffer, struct ggml_tensor * tensor, const void * data, size_t offset, size_t size) { memcpy((char *)tensor->data + offset, data, size); GGML_UNUSED(buffer); } -static void ggml_backend_cpu_buffer_get_tensor(ggml_backend_buffer_t buffer, const struct ggml_tensor * tensor, void * data, size_t offset, size_t size) { +GGML_CALL static void ggml_backend_cpu_buffer_get_tensor(ggml_backend_buffer_t buffer, const struct ggml_tensor * tensor, void * data, size_t offset, size_t size) { memcpy(data, (const char *)tensor->data + offset, size); GGML_UNUSED(buffer); } -static bool ggml_backend_cpu_buffer_cpy_tensor(ggml_backend_buffer_t buffer, const struct ggml_tensor * src, struct ggml_tensor * dst) { +GGML_CALL static bool ggml_backend_cpu_buffer_cpy_tensor(ggml_backend_buffer_t buffer, const struct ggml_tensor * src, struct ggml_tensor * dst) { if (ggml_backend_buffer_is_host(src->buffer)) { memcpy(dst->data, src->data, ggml_nbytes(src)); return true; @@ -475,7 +475,7 @@ static bool ggml_backend_cpu_buffer_cpy_tensor(ggml_backend_buffer_t buffer, con GGML_UNUSED(buffer); } -static void ggml_backend_cpu_buffer_clear(ggml_backend_buffer_t buffer, uint8_t value) { +GGML_CALL static void ggml_backend_cpu_buffer_clear(ggml_backend_buffer_t buffer, uint8_t value) { memset(buffer->context, value, buffer->size); } @@ -506,13 +506,13 @@ static struct ggml_backend_buffer_i cpu_backend_buffer_i_from_ptr = { static const size_t TENSOR_ALIGNMENT = 64; // should be enough for AVX 512 -static const char * ggml_backend_cpu_buffer_type_get_name(ggml_backend_buffer_type_t buft) { +GGML_CALL static const char * ggml_backend_cpu_buffer_type_get_name(ggml_backend_buffer_type_t buft) { return "CPU"; GGML_UNUSED(buft); } -static ggml_backend_buffer_t ggml_backend_cpu_buffer_type_alloc_buffer(ggml_backend_buffer_type_t buft, size_t size) { +GGML_CALL static ggml_backend_buffer_t ggml_backend_cpu_buffer_type_alloc_buffer(ggml_backend_buffer_type_t buft, size_t size) { size += TENSOR_ALIGNMENT; // malloc may return an address that is not aligned void * data = malloc(size); // TODO: maybe use GGML_ALIGNED_MALLOC? @@ -521,25 +521,25 @@ static ggml_backend_buffer_t ggml_backend_cpu_buffer_type_alloc_buffer(ggml_back return ggml_backend_buffer_init(buft, cpu_backend_buffer_i, data, size); } -static size_t ggml_backend_cpu_buffer_type_get_alignment(ggml_backend_buffer_type_t buft) { +GGML_CALL static size_t ggml_backend_cpu_buffer_type_get_alignment(ggml_backend_buffer_type_t buft) { return TENSOR_ALIGNMENT; GGML_UNUSED(buft); } -static bool ggml_backend_cpu_buffer_type_supports_backend(ggml_backend_buffer_type_t buft, ggml_backend_t backend) { +GGML_CALL static bool ggml_backend_cpu_buffer_type_supports_backend(ggml_backend_buffer_type_t buft, ggml_backend_t backend) { return ggml_backend_is_cpu(backend); GGML_UNUSED(buft); } -static bool ggml_backend_cpu_buffer_type_is_host(ggml_backend_buffer_type_t buft) { +GGML_CALL static bool ggml_backend_cpu_buffer_type_is_host(ggml_backend_buffer_type_t buft) { return true; GGML_UNUSED(buft); } -ggml_backend_buffer_type_t ggml_backend_cpu_buffer_type(void) { +GGML_CALL ggml_backend_buffer_type_t ggml_backend_cpu_buffer_type(void) { static struct ggml_backend_buffer_type ggml_backend_cpu_buffer_type = { /* .iface = */ { /* .get_name = */ ggml_backend_cpu_buffer_type_get_name, @@ -561,23 +561,23 @@ ggml_backend_buffer_type_t ggml_backend_cpu_buffer_type(void) { #include -static const char * ggml_backend_cpu_hbm_buffer_type_get_name(ggml_backend_buffer_type_t buft) { +GGML_CALL static const char * ggml_backend_cpu_hbm_buffer_type_get_name(ggml_backend_buffer_type_t buft) { return "CPU_HBM"; GGML_UNUSED(buft); } -static const char * ggml_backend_cpu_hbm_buffer_get_name(ggml_backend_buffer_t buf) { +GGML_CALL static const char * ggml_backend_cpu_hbm_buffer_get_name(ggml_backend_buffer_t buf) { return "CPU_HBM"; GGML_UNUSED(buf); } -static void ggml_backend_cpu_hbm_buffer_free_buffer(ggml_backend_buffer_t buffer) { +GGML_CALL static void ggml_backend_cpu_hbm_buffer_free_buffer(ggml_backend_buffer_t buffer) { hbw_free(buffer->context); } -static ggml_backend_buffer_t ggml_backend_cpu_hbm_buffer_type_alloc_buffer(ggml_backend_buffer_type_t buft, size_t size) { +GGML_CALL static ggml_backend_buffer_t ggml_backend_cpu_hbm_buffer_type_alloc_buffer(ggml_backend_buffer_type_t buft, size_t size) { //void * ptr = hbw_malloc(size); void * ptr; int result = hbw_posix_memalign(&ptr, ggml_backend_cpu_buffer_type_get_alignment(buft), size); @@ -617,20 +617,20 @@ struct ggml_backend_cpu_context { size_t work_size; }; -static const char * ggml_backend_cpu_name(ggml_backend_t backend) { +GGML_CALL static const char * ggml_backend_cpu_name(ggml_backend_t backend) { return "CPU"; GGML_UNUSED(backend); } -static void ggml_backend_cpu_free(ggml_backend_t backend) { +GGML_CALL static void ggml_backend_cpu_free(ggml_backend_t backend) { struct ggml_backend_cpu_context * cpu_ctx = (struct ggml_backend_cpu_context *)backend->context; free(cpu_ctx->work_data); free(cpu_ctx); free(backend); } -static ggml_backend_buffer_type_t ggml_backend_cpu_get_default_buffer_type(ggml_backend_t backend) { +GGML_CALL static ggml_backend_buffer_type_t ggml_backend_cpu_get_default_buffer_type(ggml_backend_t backend) { return ggml_backend_cpu_buffer_type(); GGML_UNUSED(backend); @@ -641,7 +641,7 @@ struct ggml_backend_plan_cpu { struct ggml_cgraph cgraph; }; -static ggml_backend_graph_plan_t ggml_backend_cpu_graph_plan_create(ggml_backend_t backend, const struct ggml_cgraph * cgraph) { +GGML_CALL static ggml_backend_graph_plan_t ggml_backend_cpu_graph_plan_create(ggml_backend_t backend, const struct ggml_cgraph * cgraph) { struct ggml_backend_cpu_context * cpu_ctx = (struct ggml_backend_cpu_context *)backend->context; struct ggml_backend_plan_cpu * cpu_plan = malloc(sizeof(struct ggml_backend_plan_cpu)); @@ -656,7 +656,7 @@ static ggml_backend_graph_plan_t ggml_backend_cpu_graph_plan_create(ggml_backend return cpu_plan; } -static void ggml_backend_cpu_graph_plan_free(ggml_backend_t backend, ggml_backend_graph_plan_t plan) { +GGML_CALL static void ggml_backend_cpu_graph_plan_free(ggml_backend_t backend, ggml_backend_graph_plan_t plan) { struct ggml_backend_plan_cpu * cpu_plan = (struct ggml_backend_plan_cpu *)plan; free(cpu_plan->cplan.work_data); @@ -665,7 +665,7 @@ static void ggml_backend_cpu_graph_plan_free(ggml_backend_t backend, ggml_backen GGML_UNUSED(backend); } -static void ggml_backend_cpu_graph_plan_compute(ggml_backend_t backend, ggml_backend_graph_plan_t plan) { +GGML_CALL static void ggml_backend_cpu_graph_plan_compute(ggml_backend_t backend, ggml_backend_graph_plan_t plan) { struct ggml_backend_plan_cpu * cpu_plan = (struct ggml_backend_plan_cpu *)plan; ggml_graph_compute(&cpu_plan->cgraph, &cpu_plan->cplan); @@ -673,7 +673,7 @@ static void ggml_backend_cpu_graph_plan_compute(ggml_backend_t backend, ggml_bac GGML_UNUSED(backend); } -static bool ggml_backend_cpu_graph_compute(ggml_backend_t backend, struct ggml_cgraph * cgraph) { +GGML_CALL static bool ggml_backend_cpu_graph_compute(ggml_backend_t backend, struct ggml_cgraph * cgraph) { struct ggml_backend_cpu_context * cpu_ctx = (struct ggml_backend_cpu_context *)backend->context; struct ggml_cplan cplan = ggml_graph_plan(cgraph, cpu_ctx->n_threads); @@ -690,7 +690,7 @@ static bool ggml_backend_cpu_graph_compute(ggml_backend_t backend, struct ggml_c return true; } -static bool ggml_backend_cpu_supports_op(ggml_backend_t backend, const struct ggml_tensor * op) { +GGML_CALL static bool ggml_backend_cpu_supports_op(ggml_backend_t backend, const struct ggml_tensor * op) { switch (op->op) { case GGML_OP_MUL_MAT: return op->src[1]->type == GGML_TYPE_F32 || op->src[1]->type == ggml_internal_get_type_traits(op->src[0]->type).vec_dot_type; @@ -732,7 +732,7 @@ ggml_backend_t ggml_backend_cpu_init(void) { return cpu_backend; } -bool ggml_backend_is_cpu(ggml_backend_t backend) { +GGML_CALL bool ggml_backend_is_cpu(ggml_backend_t backend) { return backend && backend->iface.get_name == ggml_backend_cpu_name; } @@ -743,11 +743,11 @@ void ggml_backend_cpu_set_n_threads(ggml_backend_t backend_cpu, int n_threads) { ctx->n_threads = n_threads; } -ggml_backend_buffer_t ggml_backend_cpu_buffer_from_ptr(void * ptr, size_t size) { +GGML_CALL ggml_backend_buffer_t ggml_backend_cpu_buffer_from_ptr(void * ptr, size_t size) { return ggml_backend_buffer_init(ggml_backend_cpu_buffer_type(), cpu_backend_buffer_i_from_ptr, ptr, size); } -static ggml_backend_t ggml_backend_reg_cpu_init(const char * params, void * user_data) { +GGML_CALL static ggml_backend_t ggml_backend_reg_cpu_init(const char * params, void * user_data) { return ggml_backend_cpu_init(); GGML_UNUSED(params); diff --git a/ggml-backend.h b/ggml-backend.h index 4eb244af1..12b4b4ab7 100644 --- a/ggml-backend.h +++ b/ggml-backend.h @@ -17,12 +17,12 @@ extern "C" { // // buffer type - GGML_API const char * ggml_backend_buft_name (ggml_backend_buffer_type_t buft); - GGML_API ggml_backend_buffer_t ggml_backend_buft_alloc_buffer (ggml_backend_buffer_type_t buft, size_t size); - GGML_API size_t ggml_backend_buft_get_alignment (ggml_backend_buffer_type_t buft); - GGML_API size_t ggml_backend_buft_get_alloc_size (ggml_backend_buffer_type_t buft, struct ggml_tensor * tensor); - GGML_API bool ggml_backend_buft_supports_backend(ggml_backend_buffer_type_t buft, ggml_backend_t backend); - GGML_API bool ggml_backend_buft_is_host (ggml_backend_buffer_type_t buft); + GGML_API const char * ggml_backend_buft_name (ggml_backend_buffer_type_t buft); + GGML_API GGML_CALL ggml_backend_buffer_t ggml_backend_buft_alloc_buffer (ggml_backend_buffer_type_t buft, size_t size); + GGML_API size_t ggml_backend_buft_get_alignment (ggml_backend_buffer_type_t buft); + GGML_API GGML_CALL size_t ggml_backend_buft_get_alloc_size (ggml_backend_buffer_type_t buft, struct ggml_tensor * tensor); + GGML_API bool ggml_backend_buft_supports_backend(ggml_backend_buffer_type_t buft, ggml_backend_t backend); + GGML_API bool ggml_backend_buft_is_host (ggml_backend_buffer_type_t buft); // buffer enum ggml_backend_buffer_usage { @@ -30,18 +30,18 @@ extern "C" { GGML_BACKEND_BUFFER_USAGE_WEIGHTS = 1, }; - GGML_API const char * ggml_backend_buffer_name (ggml_backend_buffer_t buffer); - GGML_API void ggml_backend_buffer_free (ggml_backend_buffer_t buffer); - GGML_API void * ggml_backend_buffer_get_base (ggml_backend_buffer_t buffer); - GGML_API size_t ggml_backend_buffer_get_size (ggml_backend_buffer_t buffer); - GGML_API void ggml_backend_buffer_init_tensor (ggml_backend_buffer_t buffer, struct ggml_tensor * tensor); - GGML_API size_t ggml_backend_buffer_get_alignment (ggml_backend_buffer_t buffer); - GGML_API size_t ggml_backend_buffer_get_alloc_size(ggml_backend_buffer_t buffer, struct ggml_tensor * tensor); - GGML_API void ggml_backend_buffer_clear (ggml_backend_buffer_t buffer, uint8_t value); - GGML_API bool ggml_backend_buffer_is_host (ggml_backend_buffer_t buffer); - GGML_API void ggml_backend_buffer_set_usage (ggml_backend_buffer_t buffer, enum ggml_backend_buffer_usage usage); - GGML_API ggml_backend_buffer_type_t ggml_backend_buffer_get_type (ggml_backend_buffer_t buffer); - GGML_API void ggml_backend_buffer_reset (ggml_backend_buffer_t buffer); + GGML_API const char * ggml_backend_buffer_name (ggml_backend_buffer_t buffer); + GGML_API void ggml_backend_buffer_free (ggml_backend_buffer_t buffer); + GGML_API void * ggml_backend_buffer_get_base (ggml_backend_buffer_t buffer); + GGML_API size_t ggml_backend_buffer_get_size (ggml_backend_buffer_t buffer); + GGML_API GGML_CALL void ggml_backend_buffer_init_tensor (ggml_backend_buffer_t buffer, struct ggml_tensor * tensor); + GGML_API size_t ggml_backend_buffer_get_alignment (ggml_backend_buffer_t buffer); + GGML_API size_t ggml_backend_buffer_get_alloc_size(ggml_backend_buffer_t buffer, struct ggml_tensor * tensor); + GGML_API void ggml_backend_buffer_clear (ggml_backend_buffer_t buffer, uint8_t value); + GGML_API bool ggml_backend_buffer_is_host (ggml_backend_buffer_t buffer); + GGML_API void ggml_backend_buffer_set_usage (ggml_backend_buffer_t buffer, enum ggml_backend_buffer_usage usage); + GGML_API ggml_backend_buffer_type_t ggml_backend_buffer_get_type (ggml_backend_buffer_t buffer); + GGML_API void ggml_backend_buffer_reset (ggml_backend_buffer_t buffer); // // Backend @@ -58,8 +58,8 @@ extern "C" { GGML_API void ggml_backend_tensor_set_async(ggml_backend_t backend, struct ggml_tensor * tensor, const void * data, size_t offset, size_t size); GGML_API void ggml_backend_tensor_get_async(ggml_backend_t backend, const struct ggml_tensor * tensor, void * data, size_t offset, size_t size); - GGML_API void ggml_backend_tensor_set( struct ggml_tensor * tensor, const void * data, size_t offset, size_t size); - GGML_API void ggml_backend_tensor_get(const struct ggml_tensor * tensor, void * data, size_t offset, size_t size); + GGML_API GGML_CALL void ggml_backend_tensor_set( struct ggml_tensor * tensor, const void * data, size_t offset, size_t size); + GGML_API GGML_CALL void ggml_backend_tensor_get(const struct ggml_tensor * tensor, void * data, size_t offset, size_t size); GGML_API void ggml_backend_synchronize(ggml_backend_t backend); @@ -80,13 +80,13 @@ extern "C" { GGML_API ggml_backend_t ggml_backend_cpu_init(void); - GGML_API bool ggml_backend_is_cpu(ggml_backend_t backend); - GGML_API void ggml_backend_cpu_set_n_threads(ggml_backend_t backend_cpu, int n_threads); + GGML_API GGML_CALL bool ggml_backend_is_cpu (ggml_backend_t backend); + GGML_API void ggml_backend_cpu_set_n_threads(ggml_backend_t backend_cpu, int n_threads); // Create a backend buffer from an existing pointer - GGML_API ggml_backend_buffer_t ggml_backend_cpu_buffer_from_ptr(void * ptr, size_t size); + GGML_API GGML_CALL ggml_backend_buffer_t ggml_backend_cpu_buffer_from_ptr(void * ptr, size_t size); - GGML_API ggml_backend_buffer_type_t ggml_backend_cpu_buffer_type(void); + GGML_API GGML_CALL ggml_backend_buffer_type_t ggml_backend_cpu_buffer_type(void); #ifdef GGML_USE_CPU_HBM GGML_API ggml_backend_buffer_type_t ggml_backend_cpu_hbm_buffer_type(void); @@ -183,7 +183,7 @@ extern "C" { GGML_API struct ggml_backend_graph_copy ggml_backend_graph_copy(ggml_backend_t backend, struct ggml_cgraph * graph); GGML_API void ggml_backend_graph_copy_free(struct ggml_backend_graph_copy copy); - typedef bool (*ggml_backend_eval_callback)(int node_index, struct ggml_tensor * t1, struct ggml_tensor * t2, void * user_data); + typedef bool (*GGML_CALL ggml_backend_eval_callback)(int node_index, struct ggml_tensor * t1, struct ggml_tensor * t2, void * user_data); // Compare the output of two backends GGML_API bool ggml_backend_compare_graph_backend(ggml_backend_t backend1, ggml_backend_t backend2, struct ggml_cgraph * graph, ggml_backend_eval_callback callback, void * user_data); diff --git a/ggml-cuda.cu b/ggml-cuda.cu index c3e14bc96..568c411af 100644 --- a/ggml-cuda.cu +++ b/ggml-cuda.cu @@ -7615,11 +7615,11 @@ struct cuda_pool_alloc { static bool g_cublas_loaded = false; -bool ggml_cublas_loaded(void) { +GGML_CALL bool ggml_cublas_loaded(void) { return g_cublas_loaded; } -void ggml_init_cublas() { +GGML_CALL void ggml_init_cublas() { static bool initialized = false; if (!initialized) { @@ -7707,7 +7707,7 @@ void ggml_init_cublas() { } } -void * ggml_cuda_host_malloc(size_t size) { +GGML_CALL void * ggml_cuda_host_malloc(size_t size) { if (getenv("GGML_CUDA_NO_PINNED") != nullptr) { return nullptr; } @@ -7725,7 +7725,7 @@ void * ggml_cuda_host_malloc(size_t size) { return ptr; } -void ggml_cuda_host_free(void * ptr) { +GGML_CALL void ggml_cuda_host_free(void * ptr) { CUDA_CHECK(cudaFreeHost(ptr)); } @@ -9242,7 +9242,7 @@ static void ggml_cuda_rms_norm(const ggml_tensor * src0, const ggml_tensor * src ggml_cuda_op_flatten(src0, src1, dst, ggml_cuda_op_rms_norm); } -bool ggml_cuda_can_mul_mat(const struct ggml_tensor * src0, const struct ggml_tensor * src1, struct ggml_tensor * dst) { +GGML_CALL bool ggml_cuda_can_mul_mat(const struct ggml_tensor * src0, const struct ggml_tensor * src1, struct ggml_tensor * dst) { if (!g_cublas_loaded) return false; const int64_t ne10 = src1->ne[0]; @@ -10013,7 +10013,7 @@ static size_t ggml_nbytes_split(const struct ggml_tensor * tensor, int nrows_spl return nrows_split*ggml_row_size(tensor->type, tensor->ne[0]); } -static void ggml_cuda_set_main_device(const int main_device) { +GGML_CALL static void ggml_cuda_set_main_device(const int main_device) { if (main_device >= g_device_count) { fprintf(stderr, "warning: cannot set main_device=%d because there are only %d devices. Using device %d instead.\n", main_device, g_device_count, g_main_device); @@ -10028,7 +10028,7 @@ static void ggml_cuda_set_main_device(const int main_device) { } } -bool ggml_cuda_compute_forward(struct ggml_compute_params * params, struct ggml_tensor * tensor) { +GGML_CALL bool ggml_cuda_compute_forward(struct ggml_compute_params * params, struct ggml_tensor * tensor) { if (!g_cublas_loaded) return false; ggml_cuda_func_t func; @@ -10186,7 +10186,7 @@ bool ggml_cuda_compute_forward(struct ggml_compute_params * params, struct ggml_ return true; } -int ggml_cuda_get_device_count() { +GGML_CALL int ggml_cuda_get_device_count() { int device_count; if (cudaGetDeviceCount(&device_count) != cudaSuccess) { return 0; @@ -10194,7 +10194,7 @@ int ggml_cuda_get_device_count() { return device_count; } -void ggml_cuda_get_device_description(int device, char * description, size_t description_size) { +GGML_CALL void ggml_cuda_get_device_description(int device, char * description, size_t description_size) { cudaDeviceProp prop; CUDA_CHECK(cudaGetDeviceProperties(&prop, device)); snprintf(description, description_size, "%s", prop.name); @@ -10244,27 +10244,27 @@ struct ggml_backend_cuda_buffer_context { } }; -static const char * ggml_backend_cuda_buffer_get_name(ggml_backend_buffer_t buffer) { +GGML_CALL static const char * ggml_backend_cuda_buffer_get_name(ggml_backend_buffer_t buffer) { ggml_backend_cuda_buffer_context * ctx = (ggml_backend_cuda_buffer_context *)buffer->context; return ctx->name.c_str(); } -static bool ggml_backend_buffer_is_cuda(ggml_backend_buffer_t buffer) { +GGML_CALL static bool ggml_backend_buffer_is_cuda(ggml_backend_buffer_t buffer) { return buffer->iface.get_name == ggml_backend_cuda_buffer_get_name; } -static void ggml_backend_cuda_buffer_free_buffer(ggml_backend_buffer_t buffer) { +GGML_CALL static void ggml_backend_cuda_buffer_free_buffer(ggml_backend_buffer_t buffer) { ggml_backend_cuda_buffer_context * ctx = (ggml_backend_cuda_buffer_context *)buffer->context; CUDA_CHECK(cudaFree(ctx->dev_ptr)); delete ctx; } -static void * ggml_backend_cuda_buffer_get_base(ggml_backend_buffer_t buffer) { +GGML_CALL static void * ggml_backend_cuda_buffer_get_base(ggml_backend_buffer_t buffer) { ggml_backend_cuda_buffer_context * ctx = (ggml_backend_cuda_buffer_context *)buffer->context; return ctx->dev_ptr; } -static void ggml_backend_cuda_buffer_init_tensor(ggml_backend_buffer_t buffer, ggml_tensor * tensor) { +GGML_CALL static void ggml_backend_cuda_buffer_init_tensor(ggml_backend_buffer_t buffer, ggml_tensor * tensor) { ggml_backend_cuda_buffer_context * ctx = (ggml_backend_cuda_buffer_context *)buffer->context; if (tensor->view_src != NULL && tensor->view_offs == 0) { @@ -10296,7 +10296,7 @@ static void ggml_backend_cuda_buffer_init_tensor(ggml_backend_buffer_t buffer, g } } -static void ggml_backend_cuda_buffer_set_tensor(ggml_backend_buffer_t buffer, ggml_tensor * tensor, const void * data, size_t offset, size_t size) { +GGML_CALL static void ggml_backend_cuda_buffer_set_tensor(ggml_backend_buffer_t buffer, ggml_tensor * tensor, const void * data, size_t offset, size_t size) { GGML_ASSERT(tensor->backend == GGML_BACKEND_GPU); ggml_backend_cuda_buffer_context * ctx = (ggml_backend_cuda_buffer_context *)buffer->context; @@ -10307,7 +10307,7 @@ static void ggml_backend_cuda_buffer_set_tensor(ggml_backend_buffer_t buffer, gg CUDA_CHECK(cudaDeviceSynchronize()); } -static void ggml_backend_cuda_buffer_get_tensor(ggml_backend_buffer_t buffer, const ggml_tensor * tensor, void * data, size_t offset, size_t size) { +GGML_CALL static void ggml_backend_cuda_buffer_get_tensor(ggml_backend_buffer_t buffer, const ggml_tensor * tensor, void * data, size_t offset, size_t size) { GGML_ASSERT(tensor->backend == GGML_BACKEND_GPU); ggml_backend_cuda_buffer_context * ctx = (ggml_backend_cuda_buffer_context *)buffer->context; @@ -10318,7 +10318,7 @@ static void ggml_backend_cuda_buffer_get_tensor(ggml_backend_buffer_t buffer, co CUDA_CHECK(cudaDeviceSynchronize()); } -static bool ggml_backend_cuda_buffer_cpy_tensor(ggml_backend_buffer_t buffer, const ggml_tensor * src, ggml_tensor * dst) { +GGML_CALL static bool ggml_backend_cuda_buffer_cpy_tensor(ggml_backend_buffer_t buffer, const ggml_tensor * src, ggml_tensor * dst) { if (ggml_backend_buffer_is_cuda(src->buffer)) { ggml_backend_cuda_buffer_context * src_ctx = (ggml_backend_cuda_buffer_context *)src->buffer->context; ggml_backend_cuda_buffer_context * dst_ctx = (ggml_backend_cuda_buffer_context *)buffer->context; @@ -10335,7 +10335,7 @@ static bool ggml_backend_cuda_buffer_cpy_tensor(ggml_backend_buffer_t buffer, co return false; } -static void ggml_backend_cuda_buffer_clear(ggml_backend_buffer_t buffer, uint8_t value) { +GGML_CALL static void ggml_backend_cuda_buffer_clear(ggml_backend_buffer_t buffer, uint8_t value) { ggml_backend_cuda_buffer_context * ctx = (ggml_backend_cuda_buffer_context *)buffer->context; ggml_cuda_set_device(ctx->device); @@ -10357,19 +10357,18 @@ static ggml_backend_buffer_i ggml_backend_cuda_buffer_interface = { }; // cuda buffer type - struct ggml_backend_cuda_buffer_type_context { int device; std::string name; }; -static const char * ggml_backend_cuda_buffer_type_name(ggml_backend_buffer_type_t buft) { +GGML_CALL static const char * ggml_backend_cuda_buffer_type_name(ggml_backend_buffer_type_t buft) { ggml_backend_cuda_buffer_type_context * ctx = (ggml_backend_cuda_buffer_type_context *)buft->context; return ctx->name.c_str(); } -static ggml_backend_buffer_t ggml_backend_cuda_buffer_type_alloc_buffer(ggml_backend_buffer_type_t buft, size_t size) { +GGML_CALL static ggml_backend_buffer_t ggml_backend_cuda_buffer_type_alloc_buffer(ggml_backend_buffer_type_t buft, size_t size) { ggml_backend_cuda_buffer_type_context * buft_ctx = (ggml_backend_cuda_buffer_type_context *)buft->context; ggml_cuda_set_device(buft_ctx->device); @@ -10388,13 +10387,13 @@ static ggml_backend_buffer_t ggml_backend_cuda_buffer_type_alloc_buffer(ggml_bac return ggml_backend_buffer_init(buft, ggml_backend_cuda_buffer_interface, ctx, size); } -static size_t ggml_backend_cuda_buffer_type_get_alignment(ggml_backend_buffer_type_t buft) { +GGML_CALL static size_t ggml_backend_cuda_buffer_type_get_alignment(ggml_backend_buffer_type_t buft) { return 128; UNUSED(buft); } -static size_t ggml_backend_cuda_buffer_type_get_alloc_size(ggml_backend_buffer_type_t buft, const ggml_tensor * tensor) { +GGML_CALL static size_t ggml_backend_cuda_buffer_type_get_alloc_size(ggml_backend_buffer_type_t buft, const ggml_tensor * tensor) { int64_t row_low = 0; int64_t row_high = ggml_nrows(tensor); int64_t nrows_split = row_high - row_low; @@ -10414,7 +10413,7 @@ static size_t ggml_backend_cuda_buffer_type_get_alloc_size(ggml_backend_buffer_t UNUSED(buft); } -static bool ggml_backend_cuda_buffer_type_supports_backend(ggml_backend_buffer_type_t buft, ggml_backend_t backend) { +GGML_CALL static bool ggml_backend_cuda_buffer_type_supports_backend(ggml_backend_buffer_type_t buft, ggml_backend_t backend) { if (!ggml_backend_is_cuda(backend)) { return false; } @@ -10434,7 +10433,7 @@ static ggml_backend_buffer_type_i ggml_backend_cuda_buffer_type_interface = { /* .is_host = */ NULL, }; -ggml_backend_buffer_type_t ggml_backend_cuda_buffer_type(int device) { +GGML_CALL ggml_backend_buffer_type_t ggml_backend_cuda_buffer_type(int device) { // FIXME: this is not thread safe if (device >= ggml_backend_cuda_get_device_count()) { return nullptr; @@ -10479,7 +10478,7 @@ struct ggml_backend_cuda_split_buffer_context { std::vector tensor_extras; }; -static const char * ggml_backend_cuda_split_buffer_get_name(ggml_backend_buffer_t buffer) { +GGML_CALL static const char * ggml_backend_cuda_split_buffer_get_name(ggml_backend_buffer_t buffer) { return GGML_CUDA_NAME "_Split"; UNUSED(buffer); @@ -10490,19 +10489,19 @@ static const char * ggml_backend_cuda_split_buffer_get_name(ggml_backend_buffer_ // return buffer->iface.get_name == ggml_backend_cuda_split_buffer_get_name; //} -static void ggml_backend_cuda_split_buffer_free_buffer(ggml_backend_buffer_t buffer) { +GGML_CALL static void ggml_backend_cuda_split_buffer_free_buffer(ggml_backend_buffer_t buffer) { ggml_backend_cuda_split_buffer_context * ctx = (ggml_backend_cuda_split_buffer_context *)buffer->context; delete ctx; } -static void * ggml_backend_cuda_split_buffer_get_base(ggml_backend_buffer_t buffer) { +GGML_CALL static void * ggml_backend_cuda_split_buffer_get_base(ggml_backend_buffer_t buffer) { // the pointers are stored in the tensor extras, this is just a dummy address and never dereferenced return (void *)0x1000; UNUSED(buffer); } -static void ggml_backend_cuda_split_buffer_init_tensor(ggml_backend_buffer_t buffer, ggml_tensor * tensor) { +GGML_CALL static void ggml_backend_cuda_split_buffer_init_tensor(ggml_backend_buffer_t buffer, ggml_tensor * tensor) { GGML_ASSERT(tensor->view_src == nullptr); // views of split tensors are not supported ggml_backend_cuda_split_buffer_context * ctx = (ggml_backend_cuda_split_buffer_context *)buffer->context; @@ -10552,7 +10551,7 @@ static void ggml_backend_cuda_split_buffer_init_tensor(ggml_backend_buffer_t buf tensor->extra = extra; } -static void ggml_backend_cuda_split_buffer_set_tensor(ggml_backend_buffer_t buffer, ggml_tensor * tensor, const void * data, size_t offset, size_t size) { +GGML_CALL static void ggml_backend_cuda_split_buffer_set_tensor(ggml_backend_buffer_t buffer, ggml_tensor * tensor, const void * data, size_t offset, size_t size) { // split tensors must always be set in their entirety at once GGML_ASSERT(offset == 0); GGML_ASSERT(size == ggml_nbytes(tensor)); @@ -10586,7 +10585,7 @@ static void ggml_backend_cuda_split_buffer_set_tensor(ggml_backend_buffer_t buff } } -static void ggml_backend_cuda_split_buffer_get_tensor(ggml_backend_buffer_t buffer, const ggml_tensor * tensor, void * data, size_t offset, size_t size) { +GGML_CALL static void ggml_backend_cuda_split_buffer_get_tensor(ggml_backend_buffer_t buffer, const ggml_tensor * tensor, void * data, size_t offset, size_t size) { // split tensors must always be set in their entirety at once GGML_ASSERT(offset == 0); GGML_ASSERT(size == ggml_nbytes(tensor)); @@ -10620,7 +10619,7 @@ static void ggml_backend_cuda_split_buffer_get_tensor(ggml_backend_buffer_t buff } } -static void ggml_backend_cuda_split_buffer_clear(ggml_backend_buffer_t buffer, uint8_t value) { +GGML_CALL static void ggml_backend_cuda_split_buffer_clear(ggml_backend_buffer_t buffer, uint8_t value) { UNUSED(buffer); UNUSED(value); } @@ -10639,13 +10638,13 @@ static struct ggml_backend_buffer_i ggml_backend_cuda_split_buffer_interface = { // cuda split buffer type -static const char * ggml_backend_cuda_split_buffer_type_name(ggml_backend_buffer_type_t buft) { +GGML_CALL static const char * ggml_backend_cuda_split_buffer_type_name(ggml_backend_buffer_type_t buft) { return GGML_CUDA_NAME "_Split"; UNUSED(buft); } -static ggml_backend_buffer_t ggml_backend_cuda_split_buffer_type_alloc_buffer(ggml_backend_buffer_type_t buft, size_t size) { +GGML_CALL static ggml_backend_buffer_t ggml_backend_cuda_split_buffer_type_alloc_buffer(ggml_backend_buffer_type_t buft, size_t size) { // since we don't know the exact split after rounding, we cannot allocate the device buffers at this point // instead, we allocate them for each tensor separately in init_tensor // however, the size still represents the maximum cumulative size of all the device buffers after the tensors are allocated, @@ -10655,13 +10654,13 @@ static ggml_backend_buffer_t ggml_backend_cuda_split_buffer_type_alloc_buffer(gg return ggml_backend_buffer_init(buft, ggml_backend_cuda_split_buffer_interface, ctx, size); } -static size_t ggml_backend_cuda_split_buffer_type_get_alignment(ggml_backend_buffer_type_t buft) { +GGML_CALL static size_t ggml_backend_cuda_split_buffer_type_get_alignment(ggml_backend_buffer_type_t buft) { return 128; UNUSED(buft); } -static size_t ggml_backend_cuda_split_buffer_type_get_alloc_size(ggml_backend_buffer_type_t buft, const ggml_tensor * tensor) { +GGML_CALL static size_t ggml_backend_cuda_split_buffer_type_get_alloc_size(ggml_backend_buffer_type_t buft, const ggml_tensor * tensor) { ggml_backend_cuda_split_buffer_type_context * ctx = (ggml_backend_cuda_split_buffer_type_context *)buft->context; size_t total_size = 0; @@ -10688,13 +10687,13 @@ static size_t ggml_backend_cuda_split_buffer_type_get_alloc_size(ggml_backend_bu return total_size; } -static bool ggml_backend_cuda_split_buffer_type_supports_backend(ggml_backend_buffer_type_t buft, ggml_backend_t backend) { +GGML_CALL static bool ggml_backend_cuda_split_buffer_type_supports_backend(ggml_backend_buffer_type_t buft, ggml_backend_t backend) { return ggml_backend_is_cuda(backend); UNUSED(buft); } -static bool ggml_backend_cuda_split_buffer_type_is_host(ggml_backend_buffer_type_t buft) { +GGML_CALL static bool ggml_backend_cuda_split_buffer_type_is_host(ggml_backend_buffer_type_t buft) { return false; UNUSED(buft); @@ -10709,7 +10708,7 @@ static ggml_backend_buffer_type_i ggml_backend_cuda_split_buffer_type_interface /* .is_host = */ ggml_backend_cuda_split_buffer_type_is_host, }; -ggml_backend_buffer_type_t ggml_backend_cuda_split_buffer_type(const float * tensor_split) { +GGML_CALL ggml_backend_buffer_type_t ggml_backend_cuda_split_buffer_type(const float * tensor_split) { // FIXME: this is not thread safe static std::map, struct ggml_backend_buffer_type> buft_map; @@ -10745,23 +10744,23 @@ ggml_backend_buffer_type_t ggml_backend_cuda_split_buffer_type(const float * ten // host buffer type -static const char * ggml_backend_cuda_host_buffer_type_name(ggml_backend_buffer_type_t buft) { +GGML_CALL static const char * ggml_backend_cuda_host_buffer_type_name(ggml_backend_buffer_type_t buft) { return GGML_CUDA_NAME "_Host"; UNUSED(buft); } -static const char * ggml_backend_cuda_host_buffer_name(ggml_backend_buffer_t buffer) { +GGML_CALL static const char * ggml_backend_cuda_host_buffer_name(ggml_backend_buffer_t buffer) { return GGML_CUDA_NAME "_Host"; UNUSED(buffer); } -static void ggml_backend_cuda_host_buffer_free_buffer(ggml_backend_buffer_t buffer) { +GGML_CALL static void ggml_backend_cuda_host_buffer_free_buffer(ggml_backend_buffer_t buffer) { ggml_cuda_host_free(buffer->context); } -static ggml_backend_buffer_t ggml_backend_cuda_host_buffer_type_alloc_buffer(ggml_backend_buffer_type_t buft, size_t size) { +GGML_CALL static ggml_backend_buffer_t ggml_backend_cuda_host_buffer_type_alloc_buffer(ggml_backend_buffer_type_t buft, size_t size) { void * ptr = ggml_cuda_host_malloc(size); if (ptr == nullptr) { @@ -10777,7 +10776,7 @@ static ggml_backend_buffer_t ggml_backend_cuda_host_buffer_type_alloc_buffer(ggm return buffer; } -ggml_backend_buffer_type_t ggml_backend_cuda_host_buffer_type() { +GGML_CALL ggml_backend_buffer_type_t ggml_backend_cuda_host_buffer_type() { static struct ggml_backend_buffer_type ggml_backend_cuda_buffer_type_host = { /* .iface = */ { /* .get_name = */ ggml_backend_cuda_host_buffer_type_name, @@ -10795,26 +10794,26 @@ ggml_backend_buffer_type_t ggml_backend_cuda_host_buffer_type() { // backend -static const char * ggml_backend_cuda_name(ggml_backend_t backend) { +GGML_CALL static const char * ggml_backend_cuda_name(ggml_backend_t backend) { ggml_backend_cuda_context * cuda_ctx = (ggml_backend_cuda_context *)backend->context; return cuda_ctx->name.c_str(); } -static void ggml_backend_cuda_free(ggml_backend_t backend) { +GGML_CALL static void ggml_backend_cuda_free(ggml_backend_t backend) { ggml_backend_cuda_context * cuda_ctx = (ggml_backend_cuda_context *)backend->context; delete cuda_ctx; delete backend; } -static ggml_backend_buffer_type_t ggml_backend_cuda_get_default_buffer_type(ggml_backend_t backend) { +GGML_CALL static ggml_backend_buffer_type_t ggml_backend_cuda_get_default_buffer_type(ggml_backend_t backend) { ggml_backend_cuda_context * cuda_ctx = (ggml_backend_cuda_context *)backend->context; return ggml_backend_cuda_buffer_type(cuda_ctx->device); } -static void ggml_backend_cuda_set_tensor_async(ggml_backend_t backend, ggml_tensor * tensor, const void * data, size_t offset, size_t size) { +GGML_CALL static void ggml_backend_cuda_set_tensor_async(ggml_backend_t backend, ggml_tensor * tensor, const void * data, size_t offset, size_t size) { ggml_backend_cuda_context * cuda_ctx = (ggml_backend_cuda_context *)backend->context; GGML_ASSERT(tensor->buffer->buft == ggml_backend_cuda_buffer_type(cuda_ctx->device) && "unsupported buffer type"); @@ -10823,7 +10822,7 @@ static void ggml_backend_cuda_set_tensor_async(ggml_backend_t backend, ggml_tens CUDA_CHECK(cudaMemcpyAsync((char *)tensor->data + offset, data, size, cudaMemcpyHostToDevice, g_cudaStreams[cuda_ctx->device][0])); } -static void ggml_backend_cuda_get_tensor_async(ggml_backend_t backend, const ggml_tensor * tensor, void * data, size_t offset, size_t size) { +GGML_CALL static void ggml_backend_cuda_get_tensor_async(ggml_backend_t backend, const ggml_tensor * tensor, void * data, size_t offset, size_t size) { ggml_backend_cuda_context * cuda_ctx = (ggml_backend_cuda_context *)backend->context; GGML_ASSERT(tensor->buffer->buft == ggml_backend_cuda_buffer_type(cuda_ctx->device) && "unsupported buffer type"); @@ -10832,7 +10831,7 @@ static void ggml_backend_cuda_get_tensor_async(ggml_backend_t backend, const ggm CUDA_CHECK(cudaMemcpyAsync(data, (const char *)tensor->data + offset, size, cudaMemcpyDeviceToHost, g_cudaStreams[cuda_ctx->device][0])); } -static bool ggml_backend_cuda_cpy_tensor_async(ggml_backend_t backend, const ggml_tensor * src, ggml_tensor * dst) { +GGML_CALL static bool ggml_backend_cuda_cpy_tensor_async(ggml_backend_t backend, const ggml_tensor * src, ggml_tensor * dst) { ggml_backend_cuda_context * cuda_ctx = (ggml_backend_cuda_context *)backend->context; if (dst->buffer->buft == ggml_backend_cuda_buffer_type(cuda_ctx->device) && ggml_backend_buffer_is_cuda(src->buffer)) { @@ -10843,7 +10842,7 @@ static bool ggml_backend_cuda_cpy_tensor_async(ggml_backend_t backend, const ggm return false; } -static void ggml_backend_cuda_synchronize(ggml_backend_t backend) { +GGML_CALL static void ggml_backend_cuda_synchronize(ggml_backend_t backend) { ggml_backend_cuda_context * cuda_ctx = (ggml_backend_cuda_context *)backend->context; CUDA_CHECK(cudaStreamSynchronize(g_cudaStreams[cuda_ctx->device][0])); @@ -10851,7 +10850,7 @@ static void ggml_backend_cuda_synchronize(ggml_backend_t backend) { UNUSED(backend); } -static bool ggml_backend_cuda_graph_compute(ggml_backend_t backend, ggml_cgraph * cgraph) { +GGML_CALL static bool ggml_backend_cuda_graph_compute(ggml_backend_t backend, ggml_cgraph * cgraph) { ggml_backend_cuda_context * cuda_ctx = (ggml_backend_cuda_context *)backend->context; ggml_cuda_set_main_device(cuda_ctx->device); @@ -10890,7 +10889,7 @@ static bool ggml_backend_cuda_graph_compute(ggml_backend_t backend, ggml_cgraph return true; } -static bool ggml_backend_cuda_supports_op(ggml_backend_t backend, const ggml_tensor * op) { +GGML_CALL static bool ggml_backend_cuda_supports_op(ggml_backend_t backend, const ggml_tensor * op) { switch (op->op) { case GGML_OP_UNARY: switch (ggml_get_unary_op(op)) { @@ -11016,7 +11015,7 @@ static ggml_backend_i ggml_backend_cuda_interface = { /* .supports_op = */ ggml_backend_cuda_supports_op, }; -ggml_backend_t ggml_backend_cuda_init(int device) { +GGML_CALL ggml_backend_t ggml_backend_cuda_init(int device) { ggml_init_cublas(); // TODO: remove from ggml.c if (device < 0 || device >= ggml_cuda_get_device_count()) { @@ -11040,35 +11039,35 @@ ggml_backend_t ggml_backend_cuda_init(int device) { return cuda_backend; } -bool ggml_backend_is_cuda(ggml_backend_t backend) { +GGML_CALL bool ggml_backend_is_cuda(ggml_backend_t backend) { return backend && backend->iface.get_name == ggml_backend_cuda_name; } -int ggml_backend_cuda_get_device_count() { +GGML_CALL int ggml_backend_cuda_get_device_count() { return ggml_cuda_get_device_count(); } -void ggml_backend_cuda_get_device_description(int device, char * description, size_t description_size) { +GGML_CALL void ggml_backend_cuda_get_device_description(int device, char * description, size_t description_size) { ggml_cuda_get_device_description(device, description, description_size); } -void ggml_backend_cuda_get_device_memory(int device, size_t * free, size_t * total) { +GGML_CALL void ggml_backend_cuda_get_device_memory(int device, size_t * free, size_t * total) { ggml_cuda_set_device(device); CUDA_CHECK(cudaMemGetInfo(free, total)); } // backend registry -static ggml_backend_t ggml_backend_reg_cuda_init(const char * params, void * user_data) { +GGML_CALL static ggml_backend_t ggml_backend_reg_cuda_init(const char * params, void * user_data) { ggml_backend_t cuda_backend = ggml_backend_cuda_init((int) (intptr_t) user_data); return cuda_backend; UNUSED(params); } -extern "C" int ggml_backend_cuda_reg_devices(); +extern "C" GGML_CALL int ggml_backend_cuda_reg_devices(); -int ggml_backend_cuda_reg_devices() { +GGML_CALL int ggml_backend_cuda_reg_devices() { int device_count = ggml_cuda_get_device_count(); //int device_count = 1; // DEBUG: some tools require delaying CUDA initialization for (int i = 0; i < device_count; i++) { diff --git a/ggml-cuda.h b/ggml-cuda.h index d19cbf3fd..b1ebd61d7 100644 --- a/ggml-cuda.h +++ b/ggml-cuda.h @@ -18,34 +18,34 @@ extern "C" { #define GGML_CUDA_MAX_DEVICES 16 // Always success. To check if CUDA is actually loaded, use `ggml_cublas_loaded`. -GGML_API void ggml_init_cublas(void); +GGML_API GGML_CALL void ggml_init_cublas(void); // Returns `true` if there are available CUDA devices and cublas loads successfully; otherwise, it returns `false`. -GGML_API bool ggml_cublas_loaded(void); +GGML_API GGML_CALL bool ggml_cublas_loaded(void); -GGML_API void * ggml_cuda_host_malloc(size_t size); -GGML_API void ggml_cuda_host_free(void * ptr); +GGML_API GGML_CALL void * ggml_cuda_host_malloc(size_t size); +GGML_API GGML_CALL void ggml_cuda_host_free(void * ptr); -GGML_API bool ggml_cuda_can_mul_mat(const struct ggml_tensor * src0, const struct ggml_tensor * src1, struct ggml_tensor * dst); -GGML_API bool ggml_cuda_compute_forward(struct ggml_compute_params * params, struct ggml_tensor * tensor); +GGML_API GGML_CALL bool ggml_cuda_can_mul_mat(const struct ggml_tensor * src0, const struct ggml_tensor * src1, struct ggml_tensor * dst); +GGML_API GGML_CALL bool ggml_cuda_compute_forward(struct ggml_compute_params * params, struct ggml_tensor * tensor); -GGML_API int ggml_cuda_get_device_count(void); -GGML_API void ggml_cuda_get_device_description(int device, char * description, size_t description_size); +GGML_API GGML_CALL int ggml_cuda_get_device_count(void); +GGML_API GGML_CALL void ggml_cuda_get_device_description(int device, char * description, size_t description_size); // backend API -GGML_API ggml_backend_t ggml_backend_cuda_init(int device); +GGML_API GGML_CALL ggml_backend_t ggml_backend_cuda_init(int device); -GGML_API bool ggml_backend_is_cuda(ggml_backend_t backend); +GGML_API GGML_CALL bool ggml_backend_is_cuda(ggml_backend_t backend); -GGML_API ggml_backend_buffer_type_t ggml_backend_cuda_buffer_type(int device); +GGML_API GGML_CALL ggml_backend_buffer_type_t ggml_backend_cuda_buffer_type(int device); // split tensor buffer that splits matrices by rows across multiple devices -GGML_API ggml_backend_buffer_type_t ggml_backend_cuda_split_buffer_type(const float * tensor_split); +GGML_API GGML_CALL ggml_backend_buffer_type_t ggml_backend_cuda_split_buffer_type(const float * tensor_split); // pinned host buffer for use with the CPU backend for faster copies between CPU and GPU -GGML_API ggml_backend_buffer_type_t ggml_backend_cuda_host_buffer_type(void); +GGML_API GGML_CALL ggml_backend_buffer_type_t ggml_backend_cuda_host_buffer_type(void); -GGML_API int ggml_backend_cuda_get_device_count(void); -GGML_API void ggml_backend_cuda_get_device_description(int device, char * description, size_t description_size); -GGML_API void ggml_backend_cuda_get_device_memory(int device, size_t * free, size_t * total); +GGML_API GGML_CALL int ggml_backend_cuda_get_device_count(void); +GGML_API GGML_CALL void ggml_backend_cuda_get_device_description(int device, char * description, size_t description_size); +GGML_API GGML_CALL void ggml_backend_cuda_get_device_memory(int device, size_t * free, size_t * total); #ifdef __cplusplus } diff --git a/ggml-metal.h b/ggml-metal.h index cd5e2995f..8b0bfc5f1 100644 --- a/ggml-metal.h +++ b/ggml-metal.h @@ -47,11 +47,11 @@ GGML_API ggml_backend_t ggml_backend_metal_init(void); GGML_API bool ggml_backend_is_metal(ggml_backend_t backend); -GGML_API ggml_backend_buffer_t ggml_backend_metal_buffer_from_ptr(void * data, size_t size, size_t max_size); +GGML_API GGML_CALL ggml_backend_buffer_t ggml_backend_metal_buffer_from_ptr(void * data, size_t size, size_t max_size); GGML_API void ggml_backend_metal_set_n_cb(ggml_backend_t backend, int n_cb); -GGML_API ggml_backend_buffer_type_t ggml_backend_metal_buffer_type(void); +GGML_API GGML_CALL ggml_backend_buffer_type_t ggml_backend_metal_buffer_type(void); // helper to check if the device supports a specific family // ideally, the user code should be doing these checks diff --git a/ggml-metal.m b/ggml-metal.m index 2ca726055..867f2fd48 100644 --- a/ggml-metal.m +++ b/ggml-metal.m @@ -2294,13 +2294,13 @@ static void ggml_backend_metal_free_device(void) { } } -static const char * ggml_backend_metal_buffer_get_name(ggml_backend_buffer_t buffer) { +GGML_CALL static const char * ggml_backend_metal_buffer_get_name(ggml_backend_buffer_t buffer) { return "Metal"; UNUSED(buffer); } -static void ggml_backend_metal_buffer_free_buffer(ggml_backend_buffer_t buffer) { +GGML_CALL static void ggml_backend_metal_buffer_free_buffer(ggml_backend_buffer_t buffer) { struct ggml_backend_metal_buffer_context * ctx = (struct ggml_backend_metal_buffer_context *)buffer->context; for (int i = 0; i < ctx->n_buffers; i++) { @@ -2315,25 +2315,25 @@ static void ggml_backend_metal_buffer_free_buffer(ggml_backend_buffer_t buffer) free(ctx); } -static void * ggml_backend_metal_buffer_get_base(ggml_backend_buffer_t buffer) { +GGML_CALL static void * ggml_backend_metal_buffer_get_base(ggml_backend_buffer_t buffer) { struct ggml_backend_metal_buffer_context * ctx = (struct ggml_backend_metal_buffer_context *)buffer->context; return ctx->all_data; } -static void ggml_backend_metal_buffer_set_tensor(ggml_backend_buffer_t buffer, struct ggml_tensor * tensor, const void * data, size_t offset, size_t size) { +GGML_CALL static void ggml_backend_metal_buffer_set_tensor(ggml_backend_buffer_t buffer, struct ggml_tensor * tensor, const void * data, size_t offset, size_t size) { memcpy((char *)tensor->data + offset, data, size); UNUSED(buffer); } -static void ggml_backend_metal_buffer_get_tensor(ggml_backend_buffer_t buffer, const struct ggml_tensor * tensor, void * data, size_t offset, size_t size) { +GGML_CALL static void ggml_backend_metal_buffer_get_tensor(ggml_backend_buffer_t buffer, const struct ggml_tensor * tensor, void * data, size_t offset, size_t size) { memcpy(data, (const char *)tensor->data + offset, size); UNUSED(buffer); } -static bool ggml_backend_metal_buffer_cpy_tensor(ggml_backend_buffer_t buffer, const struct ggml_tensor * src, struct ggml_tensor * dst) { +GGML_CALL static bool ggml_backend_metal_buffer_cpy_tensor(ggml_backend_buffer_t buffer, const struct ggml_tensor * src, struct ggml_tensor * dst) { if (ggml_backend_buffer_is_host(src->buffer)) { memcpy(dst->data, src->data, ggml_nbytes(src)); return true; @@ -2343,7 +2343,7 @@ static bool ggml_backend_metal_buffer_cpy_tensor(ggml_backend_buffer_t buffer, c UNUSED(buffer); } -static void ggml_backend_metal_buffer_clear(ggml_backend_buffer_t buffer, uint8_t value) { +GGML_CALL static void ggml_backend_metal_buffer_clear(ggml_backend_buffer_t buffer, uint8_t value) { struct ggml_backend_metal_buffer_context * ctx = (struct ggml_backend_metal_buffer_context *)buffer->context; memset(ctx->all_data, value, ctx->all_size); @@ -2363,13 +2363,13 @@ static struct ggml_backend_buffer_i ggml_backend_metal_buffer_i = { // default buffer type -static const char * ggml_backend_metal_buffer_type_get_name(ggml_backend_buffer_type_t buft) { +GGML_CALL static const char * ggml_backend_metal_buffer_type_get_name(ggml_backend_buffer_type_t buft) { return "Metal"; UNUSED(buft); } -static ggml_backend_buffer_t ggml_backend_metal_buffer_type_alloc_buffer(ggml_backend_buffer_type_t buft, size_t size) { +GGML_CALL static ggml_backend_buffer_t ggml_backend_metal_buffer_type_alloc_buffer(ggml_backend_buffer_type_t buft, size_t size) { struct ggml_backend_metal_buffer_context * ctx = malloc(sizeof(struct ggml_backend_metal_buffer_context)); const size_t size_page = sysconf(_SC_PAGESIZE); @@ -2421,24 +2421,24 @@ static ggml_backend_buffer_t ggml_backend_metal_buffer_type_alloc_buffer(ggml_ba return ggml_backend_buffer_init(buft, ggml_backend_metal_buffer_i, ctx, size); } -static size_t ggml_backend_metal_buffer_type_get_alignment(ggml_backend_buffer_type_t buft) { +GGML_CALL static size_t ggml_backend_metal_buffer_type_get_alignment(ggml_backend_buffer_type_t buft) { return 32; UNUSED(buft); } -static bool ggml_backend_metal_buffer_type_supports_backend(ggml_backend_buffer_type_t buft, ggml_backend_t backend) { +GGML_CALL static bool ggml_backend_metal_buffer_type_supports_backend(ggml_backend_buffer_type_t buft, ggml_backend_t backend) { return ggml_backend_is_metal(backend) || ggml_backend_is_cpu(backend); UNUSED(buft); } -static bool ggml_backend_metal_buffer_type_is_host(ggml_backend_buffer_type_t buft) { +GGML_CALL static bool ggml_backend_metal_buffer_type_is_host(ggml_backend_buffer_type_t buft) { return true; UNUSED(buft); } -ggml_backend_buffer_type_t ggml_backend_metal_buffer_type(void) { +GGML_CALL ggml_backend_buffer_type_t ggml_backend_metal_buffer_type(void) { static struct ggml_backend_buffer_type ggml_backend_buffer_type_metal = { /* .iface = */ { /* .get_name = */ ggml_backend_metal_buffer_type_get_name, @@ -2456,7 +2456,7 @@ ggml_backend_buffer_type_t ggml_backend_metal_buffer_type(void) { // buffer from ptr -ggml_backend_buffer_t ggml_backend_metal_buffer_from_ptr(void * data, size_t size, size_t max_size) { +GGML_CALL ggml_backend_buffer_t ggml_backend_metal_buffer_from_ptr(void * data, size_t size, size_t max_size) { struct ggml_backend_metal_buffer_context * ctx = malloc(sizeof(struct ggml_backend_metal_buffer_context)); ctx->all_data = data; @@ -2543,31 +2543,31 @@ ggml_backend_buffer_t ggml_backend_metal_buffer_from_ptr(void * data, size_t siz // backend -static const char * ggml_backend_metal_name(ggml_backend_t backend) { +GGML_CALL static const char * ggml_backend_metal_name(ggml_backend_t backend) { return "Metal"; UNUSED(backend); } -static void ggml_backend_metal_free(ggml_backend_t backend) { +GGML_CALL static void ggml_backend_metal_free(ggml_backend_t backend) { struct ggml_metal_context * ctx = (struct ggml_metal_context *)backend->context; ggml_metal_free(ctx); free(backend); } -static ggml_backend_buffer_type_t ggml_backend_metal_get_default_buffer_type(ggml_backend_t backend) { +GGML_CALL static ggml_backend_buffer_type_t ggml_backend_metal_get_default_buffer_type(ggml_backend_t backend) { return ggml_backend_metal_buffer_type(); UNUSED(backend); } -static bool ggml_backend_metal_graph_compute(ggml_backend_t backend, struct ggml_cgraph * cgraph) { +GGML_CALL static bool ggml_backend_metal_graph_compute(ggml_backend_t backend, struct ggml_cgraph * cgraph) { struct ggml_metal_context * metal_ctx = (struct ggml_metal_context *)backend->context; return ggml_metal_graph_compute(metal_ctx, cgraph); } -static bool ggml_backend_metal_supports_op(ggml_backend_t backend, const struct ggml_tensor * op) { +GGML_CALL static bool ggml_backend_metal_supports_op(ggml_backend_t backend, const struct ggml_tensor * op) { struct ggml_metal_context * metal_ctx = (struct ggml_metal_context *)backend->context; return ggml_metal_supports_op(metal_ctx, op); @@ -2630,9 +2630,9 @@ bool ggml_backend_metal_supports_family(ggml_backend_t backend, int family) { return [ctx->device supportsFamily:(MTLGPUFamilyApple1 + family - 1)]; } -ggml_backend_t ggml_backend_reg_metal_init(const char * params, void * user_data); // silence warning +GGML_CALL ggml_backend_t ggml_backend_reg_metal_init(const char * params, void * user_data); // silence warning -ggml_backend_t ggml_backend_reg_metal_init(const char * params, void * user_data) { +GGML_CALL ggml_backend_t ggml_backend_reg_metal_init(const char * params, void * user_data) { return ggml_backend_metal_init(); GGML_UNUSED(params); diff --git a/ggml.c b/ggml.c index ef5888ab2..5779f32d2 100644 --- a/ggml.c +++ b/ggml.c @@ -1990,19 +1990,19 @@ void ggml_print_objects(const struct ggml_context * ctx) { GGML_PRINT("%s: --- end ---\n", __func__); } -int64_t ggml_nelements(const struct ggml_tensor * tensor) { +GGML_CALL int64_t ggml_nelements(const struct ggml_tensor * tensor) { static_assert(GGML_MAX_DIMS == 4, "GGML_MAX_DIMS is not 4 - update this function"); return tensor->ne[0]*tensor->ne[1]*tensor->ne[2]*tensor->ne[3]; } -int64_t ggml_nrows(const struct ggml_tensor * tensor) { +GGML_CALL int64_t ggml_nrows(const struct ggml_tensor * tensor) { static_assert(GGML_MAX_DIMS == 4, "GGML_MAX_DIMS is not 4 - update this function"); return tensor->ne[1]*tensor->ne[2]*tensor->ne[3]; } -size_t ggml_nbytes(const struct ggml_tensor * tensor) { +GGML_CALL size_t ggml_nbytes(const struct ggml_tensor * tensor) { size_t nbytes; size_t blck_size = ggml_blck_size(tensor->type); if (blck_size == 1) { @@ -2025,15 +2025,15 @@ size_t ggml_nbytes_pad(const struct ggml_tensor * tensor) { return GGML_PAD(ggml_nbytes(tensor), GGML_MEM_ALIGN); } -int ggml_blck_size(enum ggml_type type) { +GGML_CALL int ggml_blck_size(enum ggml_type type) { return type_traits[type].blck_size; } -size_t ggml_type_size(enum ggml_type type) { +GGML_CALL size_t ggml_type_size(enum ggml_type type) { return type_traits[type].type_size; } -size_t ggml_row_size(enum ggml_type type, int64_t ne) { +GGML_CALL size_t ggml_row_size(enum ggml_type type, int64_t ne) { assert(ne % ggml_blck_size(type) == 0); return ggml_type_size(type)*ne/ggml_blck_size(type); } @@ -2042,15 +2042,15 @@ double ggml_type_sizef(enum ggml_type type) { return ((double)(type_traits[type].type_size))/type_traits[type].blck_size; } -const char * ggml_type_name(enum ggml_type type) { +GGML_CALL const char * ggml_type_name(enum ggml_type type) { return type_traits[type].type_name; } -bool ggml_is_quantized(enum ggml_type type) { +GGML_CALL bool ggml_is_quantized(enum ggml_type type) { return type_traits[type].is_quantized; } -const char * ggml_op_name(enum ggml_op op) { +GGML_CALL const char * ggml_op_name(enum ggml_op op) { return GGML_OP_NAME[op]; } @@ -2062,7 +2062,7 @@ const char * ggml_unary_op_name(enum ggml_unary_op op) { return GGML_UNARY_OP_NAME[op]; } -const char * ggml_op_desc(const struct ggml_tensor * t) { +GGML_CALL const char * ggml_op_desc(const struct ggml_tensor * t) { if (t->op == GGML_OP_UNARY) { enum ggml_unary_op uop = ggml_get_unary_op(t); return ggml_unary_op_name(uop); @@ -2072,7 +2072,7 @@ const char * ggml_op_desc(const struct ggml_tensor * t) { } } -size_t ggml_element_size(const struct ggml_tensor * tensor) { +GGML_CALL size_t ggml_element_size(const struct ggml_tensor * tensor) { return ggml_type_size(tensor->type); } @@ -2154,11 +2154,11 @@ size_t ggml_tensor_overhead(void) { return GGML_OBJECT_SIZE + GGML_TENSOR_SIZE; } -bool ggml_is_transposed(const struct ggml_tensor * tensor) { +GGML_CALL bool ggml_is_transposed(const struct ggml_tensor * tensor) { return tensor->nb[0] > tensor->nb[1]; } -bool ggml_is_contiguous(const struct ggml_tensor * tensor) { +GGML_CALL bool ggml_is_contiguous(const struct ggml_tensor * tensor) { static_assert(GGML_MAX_DIMS == 4, "GGML_MAX_DIMS is not 4 - update this function"); return @@ -2177,7 +2177,7 @@ static inline bool ggml_is_contiguous_except_dim_1(const struct ggml_tensor * te tensor->nb[3] == tensor->nb[2]*tensor->ne[2]; } -bool ggml_is_permuted(const struct ggml_tensor * tensor) { +GGML_CALL bool ggml_is_permuted(const struct ggml_tensor * tensor) { static_assert(GGML_MAX_DIMS == 4, "GGML_MAX_DIMS is not 4 - update this function"); return tensor->nb[0] > tensor->nb[1] || tensor->nb[1] > tensor->nb[2] || tensor->nb[2] > tensor->nb[3]; @@ -3079,7 +3079,7 @@ float * ggml_get_data_f32(const struct ggml_tensor * tensor) { return (float *)(tensor->data); } -enum ggml_unary_op ggml_get_unary_op(const struct ggml_tensor * tensor) { +GGML_CALL enum ggml_unary_op ggml_get_unary_op(const struct ggml_tensor * tensor) { GGML_ASSERT(tensor->op == GGML_OP_UNARY); return (enum ggml_unary_op) ggml_get_op_params_i32(tensor, 0); } @@ -11653,7 +11653,7 @@ static void ggml_rope_cache_init( } } -void ggml_rope_yarn_corr_dims( +GGML_CALL void ggml_rope_yarn_corr_dims( int n_dims, int n_orig_ctx, float freq_base, float beta_fast, float beta_slow, float dims[2] ) { // start and end correction dims diff --git a/ggml.h b/ggml.h index 1187074f7..837c52e68 100644 --- a/ggml.h +++ b/ggml.h @@ -187,6 +187,16 @@ # define GGML_API #endif +#ifdef GGML_MULTIPLATFORM +# if defined(_WIN32) +# define GGML_CALL +# else +# define GGML_CALL __attribute__((__ms_abi__)) +# endif +#else +# define GGML_CALL +#endif + // TODO: support for clang #ifdef __GNUC__ # define GGML_DEPRECATED(func, hint) func __attribute__((deprecated(hint))) @@ -649,41 +659,41 @@ extern "C" { GGML_API void ggml_print_object (const struct ggml_object * obj); GGML_API void ggml_print_objects(const struct ggml_context * ctx); - GGML_API int64_t ggml_nelements (const struct ggml_tensor * tensor); - GGML_API int64_t ggml_nrows (const struct ggml_tensor * tensor); - GGML_API size_t ggml_nbytes (const struct ggml_tensor * tensor); - GGML_API size_t ggml_nbytes_pad (const struct ggml_tensor * tensor); // same as ggml_nbytes() but padded to GGML_MEM_ALIGN + GGML_API GGML_CALL int64_t ggml_nelements (const struct ggml_tensor * tensor); + GGML_API GGML_CALL int64_t ggml_nrows (const struct ggml_tensor * tensor); + GGML_API GGML_CALL size_t ggml_nbytes (const struct ggml_tensor * tensor); + GGML_API size_t ggml_nbytes_pad (const struct ggml_tensor * tensor); // same as ggml_nbytes() but padded to GGML_MEM_ALIGN - GGML_API int ggml_blck_size(enum ggml_type type); - GGML_API size_t ggml_type_size(enum ggml_type type); // size in bytes for all elements in a block - GGML_API size_t ggml_row_size (enum ggml_type type, int64_t ne); // size in bytes for all elements in a row + GGML_API GGML_CALL int ggml_blck_size(enum ggml_type type); + GGML_API GGML_CALL size_t ggml_type_size(enum ggml_type type); // size in bytes for all elements in a block + GGML_API GGML_CALL size_t ggml_row_size (enum ggml_type type, int64_t ne); // size in bytes for all elements in a row GGML_DEPRECATED( GGML_API double ggml_type_sizef(enum ggml_type type), // ggml_type_size()/ggml_blck_size() as float "use ggml_row_size() instead"); - GGML_API const char * ggml_type_name(enum ggml_type type); - GGML_API const char * ggml_op_name (enum ggml_op op); - GGML_API const char * ggml_op_symbol(enum ggml_op op); + GGML_API GGML_CALL const char * ggml_type_name(enum ggml_type type); + GGML_API GGML_CALL const char * ggml_op_name (enum ggml_op op); + GGML_API const char * ggml_op_symbol(enum ggml_op op); - GGML_API const char * ggml_unary_op_name(enum ggml_unary_op op); - GGML_API const char * ggml_op_desc(const struct ggml_tensor * t); // unary or op name + GGML_API const char * ggml_unary_op_name(enum ggml_unary_op op); + GGML_API GGML_CALL const char * ggml_op_desc(const struct ggml_tensor * t); // unary or op name - GGML_API size_t ggml_element_size(const struct ggml_tensor * tensor); + GGML_API GGML_CALL size_t ggml_element_size(const struct ggml_tensor * tensor); - GGML_API bool ggml_is_quantized(enum ggml_type type); + GGML_API GGML_CALL bool ggml_is_quantized(enum ggml_type type); // TODO: temporary until model loading of ggml examples is refactored GGML_API enum ggml_type ggml_ftype_to_ggml_type(enum ggml_ftype ftype); - GGML_API bool ggml_is_transposed(const struct ggml_tensor * tensor); - GGML_API bool ggml_is_contiguous(const struct ggml_tensor * tensor); - GGML_API bool ggml_is_permuted (const struct ggml_tensor * tensor); - GGML_API bool ggml_is_scalar (const struct ggml_tensor * tensor); - GGML_API bool ggml_is_vector (const struct ggml_tensor * tensor); - GGML_API bool ggml_is_matrix (const struct ggml_tensor * tensor); - GGML_API bool ggml_is_3d (const struct ggml_tensor * tensor); - GGML_API int ggml_n_dims (const struct ggml_tensor * tensor); // returns 1 for scalars + GGML_API GGML_CALL bool ggml_is_transposed(const struct ggml_tensor * tensor); + GGML_API GGML_CALL bool ggml_is_contiguous(const struct ggml_tensor * tensor); + GGML_API GGML_CALL bool ggml_is_permuted (const struct ggml_tensor * tensor); + GGML_API bool ggml_is_scalar (const struct ggml_tensor * tensor); + GGML_API bool ggml_is_vector (const struct ggml_tensor * tensor); + GGML_API bool ggml_is_matrix (const struct ggml_tensor * tensor); + GGML_API bool ggml_is_3d (const struct ggml_tensor * tensor); + GGML_API int ggml_n_dims (const struct ggml_tensor * tensor); // returns 1 for scalars GGML_API bool ggml_are_same_shape(const struct ggml_tensor * t0, const struct ggml_tensor * t1); @@ -770,7 +780,7 @@ extern "C" { GGML_API void * ggml_get_data (const struct ggml_tensor * tensor); GGML_API float * ggml_get_data_f32(const struct ggml_tensor * tensor); - GGML_API enum ggml_unary_op ggml_get_unary_op(const struct ggml_tensor * tensor); + GGML_API GGML_CALL enum ggml_unary_op ggml_get_unary_op(const struct ggml_tensor * tensor); GGML_API const char * ggml_get_name (const struct ggml_tensor * tensor); GGML_API struct ggml_tensor * ggml_set_name ( struct ggml_tensor * tensor, const char * name); @@ -1413,7 +1423,7 @@ extern "C" { float beta_slow); // compute correction dims for YaRN RoPE scaling - void ggml_rope_yarn_corr_dims( + GGML_CALL void ggml_rope_yarn_corr_dims( int n_dims, int n_orig_ctx, float freq_base, float beta_fast, float beta_slow, float dims[2]); // xPos RoPE, in-place, returns view(a) From 122ed4840cc6d209df6043e027f9f8a03aee01da Mon Sep 17 00:00:00 2001 From: Maximilian Winter Date: Tue, 16 Jan 2024 13:10:48 +0100 Subject: [PATCH 328/811] examples : fix and improv docs for the grammar generator (#4909) * Create pydantic-models-to-grammar.py * Added some comments for usage * Refactored Grammar Generator Added example and usage instruction. * Update pydantic_models_to_grammar.py * Update pydantic-models-to-grammar-examples.py * Renamed module and imported it. * Update pydantic-models-to-grammar.py * Renamed file and fixed grammar generator issue. * Fixed some issues and bugs of the grammar generator. Imporved Documentation * Update pydantic_models_to_grammar.py --- examples/pydantic_models_to_grammar.py | 877 +++++++++++++++---------- 1 file changed, 519 insertions(+), 358 deletions(-) diff --git a/examples/pydantic_models_to_grammar.py b/examples/pydantic_models_to_grammar.py index 41b98fdc1..848c1c367 100644 --- a/examples/pydantic_models_to_grammar.py +++ b/examples/pydantic_models_to_grammar.py @@ -4,6 +4,7 @@ from copy import copy from inspect import isclass, getdoc from types import NoneType +from docstring_parser import parse from pydantic import BaseModel, create_model, Field from typing import Any, Type, List, get_args, get_origin, Tuple, Union, Optional, _GenericAlias from enum import Enum @@ -25,9 +26,10 @@ class PydanticDataType(Enum): ENUM (str): Represents an enum data type. CUSTOM_CLASS (str): Represents a custom class data type. """ + STRING = "string" TRIPLE_QUOTED_STRING = "triple_quoted_string" - MARKDOWN_STRING = "markdown_string" + MARKDOWN_CODE_BLOCK = "markdown_code_block" BOOLEAN = "boolean" INTEGER = "integer" FLOAT = "float" @@ -78,10 +80,10 @@ def map_pydantic_type_to_gbnf(pydantic_type: Type[Any]) -> str: def format_model_and_field_name(model_name: str) -> str: - parts = re.findall('[A-Z][^A-Z]*', model_name) + parts = re.findall("[A-Z][^A-Z]*", model_name) if not parts: # Check if the list is empty return model_name.lower().replace("_", "-") - return '-'.join(part.lower().replace("_", "-") for part in parts) + return "-".join(part.lower().replace("_", "-") for part in parts) def generate_list_rule(element_type): @@ -93,29 +95,31 @@ def generate_list_rule(element_type): """ rule_name = f"{map_pydantic_type_to_gbnf(element_type)}-list" element_rule = map_pydantic_type_to_gbnf(element_type) - list_rule = fr'{rule_name} ::= "[" {element_rule} ("," {element_rule})* "]"' + list_rule = rf'{rule_name} ::= "[" {element_rule} ("," {element_rule})* "]"' return list_rule def get_members_structure(cls, rule_name): if issubclass(cls, Enum): # Handle Enum types - members = [f'\"\\\"{member.value}\\\"\"' for name, member in cls.__members__.items()] + members = [f'"\\"{member.value}\\""' for name, member in cls.__members__.items()] return f"{cls.__name__.lower()} ::= " + " | ".join(members) if cls.__annotations__ and cls.__annotations__ != {}: result = f'{rule_name} ::= "{{"' type_list_rules = [] # Modify this comprehension - members = [f' \"\\\"{name}\\\"\" ":" {map_pydantic_type_to_gbnf(param_type)}' - for name, param_type in cls.__annotations__.items() - if name != 'self'] + members = [ + f' "\\"{name}\\"" ":" {map_pydantic_type_to_gbnf(param_type)}' + for name, param_type in cls.__annotations__.items() + if name != "self" + ] result += '"," '.join(members) result += ' "}"' return result, type_list_rules elif rule_name == "custom-class-any": - result = f'{rule_name} ::= ' - result += 'value' + result = f"{rule_name} ::= " + result += "value" type_list_rules = [] return result, type_list_rules else: @@ -124,9 +128,11 @@ def get_members_structure(cls, rule_name): result = f'{rule_name} ::= "{{"' type_list_rules = [] # Modify this comprehension too - members = [f' \"\\\"{name}\\\"\" ":" {map_pydantic_type_to_gbnf(param.annotation)}' - for name, param in parameters.items() - if name != 'self' and param.annotation != inspect.Parameter.empty] + members = [ + f' "\\"{name}\\"" ":" {map_pydantic_type_to_gbnf(param.annotation)}' + for name, param in parameters.items() + if name != "self" and param.annotation != inspect.Parameter.empty + ] result += '", "'.join(members) result += ' "}"' @@ -141,8 +147,8 @@ def regex_to_gbnf(regex_pattern: str) -> str: gbnf_rule = regex_pattern # Translate common regex components to GBNF - gbnf_rule = gbnf_rule.replace('\\d', '[0-9]') - gbnf_rule = gbnf_rule.replace('\\s', '[ \t\n]') + gbnf_rule = gbnf_rule.replace("\\d", "[0-9]") + gbnf_rule = gbnf_rule.replace("\\s", "[ \t\n]") # Handle quantifiers and other regex syntax that is similar in GBNF # (e.g., '*', '+', '?', character classes) @@ -158,12 +164,12 @@ def generate_gbnf_integer_rules(max_digit=None, min_digit=None): Generates GBNF (Generalized Backus-Naur Form) rules for integers based on the given maximum and minimum digits. Parameters: - max_digit (int): The maximum number of digits for the integer. Default is None. - min_digit (int): The minimum number of digits for the integer. Default is None. + max_digit (int): The maximum number of digits for the integer. Default is None. + min_digit (int): The minimum number of digits for the integer. Default is None. Returns: - integer_rule (str): The identifier for the integer rule generated. - additional_rules (list): A list of additional rules generated based on the given maximum and minimum digits. + integer_rule (str): The identifier for the integer rule generated. + additional_rules (list): A list of additional rules generated based on the given maximum and minimum digits. """ additional_rules = [] @@ -178,21 +184,21 @@ def generate_gbnf_integer_rules(max_digit=None, min_digit=None): # Handling Integer Rules if max_digit is not None or min_digit is not None: # Start with an empty rule part - integer_rule_part = '' + integer_rule_part = "" # Add mandatory digits as per min_digit if min_digit is not None: - integer_rule_part += '[0-9] ' * min_digit + integer_rule_part += "[0-9] " * min_digit # Add optional digits up to max_digit if max_digit is not None: optional_digits = max_digit - (min_digit if min_digit is not None else 0) - integer_rule_part += ''.join(['[0-9]? ' for _ in range(optional_digits)]) + integer_rule_part += "".join(["[0-9]? " for _ in range(optional_digits)]) # Trim the rule part and append it to additional rules integer_rule_part = integer_rule_part.strip() if integer_rule_part: - additional_rules.append(f'{integer_rule} ::= {integer_rule_part}') + additional_rules.append(f"{integer_rule} ::= {integer_rule_part}") return integer_rule, additional_rules @@ -224,21 +230,26 @@ def generate_gbnf_float_rules(max_digit=None, min_digit=None, max_precision=None additional_rules = [] # Define the integer part rule - integer_part_rule = "integer-part" + (f"-max{max_digit}" if max_digit is not None else "") + ( + integer_part_rule = ( + "integer-part" + (f"-max{max_digit}" if max_digit is not None else "") + ( f"-min{min_digit}" if min_digit is not None else "") + ) # Define the fractional part rule based on precision constraints fractional_part_rule = "fractional-part" - fractional_rule_part = '' + fractional_rule_part = "" if max_precision is not None or min_precision is not None: fractional_part_rule += (f"-max{max_precision}" if max_precision is not None else "") + ( - f"-min{min_precision}" if min_precision is not None else "") + f"-min{min_precision}" if min_precision is not None else "" + ) # Minimum number of digits - fractional_rule_part = '[0-9]' * (min_precision if min_precision is not None else 1) + fractional_rule_part = "[0-9]" * (min_precision if min_precision is not None else 1) # Optional additional digits - fractional_rule_part += ''.join([' [0-9]?'] * ( - (max_precision - (min_precision if min_precision is not None else 1)) if max_precision is not None else 0)) - additional_rules.append(f'{fractional_part_rule} ::= {fractional_rule_part}') + fractional_rule_part += "".join( + [" [0-9]?"] * ((max_precision - ( + min_precision if min_precision is not None else 1)) if max_precision is not None else 0) + ) + additional_rules.append(f"{fractional_part_rule} ::= {fractional_rule_part}") # Define the float rule float_rule = f"float-{max_digit if max_digit is not None else 'X'}-{min_digit if min_digit is not None else 'X'}-{max_precision if max_precision is not None else 'X'}-{min_precision if min_precision is not None else 'X'}" @@ -246,20 +257,19 @@ def generate_gbnf_float_rules(max_digit=None, min_digit=None, max_precision=None # Generating the integer part rule definition, if necessary if max_digit is not None or min_digit is not None: - integer_rule_part = '[0-9]' + integer_rule_part = "[0-9]" if min_digit is not None and min_digit > 1: - integer_rule_part += ' [0-9]' * (min_digit - 1) + integer_rule_part += " [0-9]" * (min_digit - 1) if max_digit is not None: - integer_rule_part += ''.join([' [0-9]?'] * (max_digit - (min_digit if min_digit is not None else 1))) - additional_rules.append(f'{integer_part_rule} ::= {integer_rule_part.strip()}') + integer_rule_part += "".join([" [0-9]?"] * (max_digit - (min_digit if min_digit is not None else 1))) + additional_rules.append(f"{integer_part_rule} ::= {integer_rule_part.strip()}") return float_rule, additional_rules -def generate_gbnf_rule_for_type(model_name, field_name, - field_type, is_optional, processed_models, created_rules, - field_info=None) -> \ - Tuple[str, list]: +def generate_gbnf_rule_for_type( + model_name, field_name, field_type, is_optional, processed_models, created_rules, field_info=None +) -> Tuple[str, list]: """ Generate GBNF rule for a given field type. @@ -282,20 +292,19 @@ def generate_gbnf_rule_for_type(model_name, field_name, if isclass(field_type) and issubclass(field_type, BaseModel): nested_model_name = format_model_and_field_name(field_type.__name__) - nested_model_rules = generate_gbnf_grammar(field_type, processed_models, created_rules) + nested_model_rules, _ = generate_gbnf_grammar(field_type, processed_models, created_rules) rules.extend(nested_model_rules) gbnf_type, rules = nested_model_name, rules elif isclass(field_type) and issubclass(field_type, Enum): - enum_values = [f'\"\\\"{e.value}\\\"\"' for e in field_type] # Adding escaped quotes + enum_values = [f'"\\"{e.value}\\""' for e in field_type] # Adding escaped quotes enum_rule = f"{model_name}-{field_name} ::= {' | '.join(enum_values)}" rules.append(enum_rule) gbnf_type, rules = model_name + "-" + field_name, rules - elif get_origin(field_type) == list or field_type == list: # Array + elif get_origin(field_type) == list: # Array element_type = get_args(field_type)[0] - element_rule_name, additional_rules = generate_gbnf_rule_for_type(model_name, - f"{field_name}-element", - element_type, is_optional, processed_models, - created_rules) + element_rule_name, additional_rules = generate_gbnf_rule_for_type( + model_name, f"{field_name}-element", element_type, is_optional, processed_models, created_rules + ) rules.extend(additional_rules) array_rule = f"""{model_name}-{field_name} ::= "[" ws {element_rule_name} ("," ws {element_rule_name})* "]" """ rules.append(array_rule) @@ -303,10 +312,9 @@ def generate_gbnf_rule_for_type(model_name, field_name, elif get_origin(field_type) == set or field_type == set: # Array element_type = get_args(field_type)[0] - element_rule_name, additional_rules = generate_gbnf_rule_for_type(model_name, - f"{field_name}-element", - element_type, is_optional, processed_models, - created_rules) + element_rule_name, additional_rules = generate_gbnf_rule_for_type( + model_name, f"{field_name}-element", element_type, is_optional, processed_models, created_rules + ) rules.extend(additional_rules) array_rule = f"""{model_name}-{field_name} ::= "[" ws {element_rule_name} ("," ws {element_rule_name})* "]" """ rules.append(array_rule) @@ -318,15 +326,13 @@ def generate_gbnf_rule_for_type(model_name, field_name, elif gbnf_type.startswith("custom-dict-"): key_type, value_type = get_args(field_type) - additional_key_type, additional_key_rules = generate_gbnf_rule_for_type(model_name, - f"{field_name}-key-type", - key_type, is_optional, processed_models, - created_rules) - additional_value_type, additional_value_rules = generate_gbnf_rule_for_type(model_name, - f"{field_name}-value-type", - value_type, is_optional, - processed_models, created_rules) - gbnf_type = fr'{gbnf_type} ::= "{{" ( {additional_key_type} ":" {additional_value_type} ("," {additional_key_type} ":" {additional_value_type})* )? "}}" ' + additional_key_type, additional_key_rules = generate_gbnf_rule_for_type( + model_name, f"{field_name}-key-type", key_type, is_optional, processed_models, created_rules + ) + additional_value_type, additional_value_rules = generate_gbnf_rule_for_type( + model_name, f"{field_name}-value-type", value_type, is_optional, processed_models, created_rules + ) + gbnf_type = rf'{gbnf_type} ::= "{{" ( {additional_key_type} ": " {additional_value_type} ("," "\n" ws {additional_key_type} ":" {additional_value_type})* )? "}}" ' rules.extend(additional_key_rules) rules.extend(additional_value_rules) @@ -336,19 +342,16 @@ def generate_gbnf_rule_for_type(model_name, field_name, for union_type in union_types: if isinstance(union_type, _GenericAlias): - union_gbnf_type, union_rules_list = generate_gbnf_rule_for_type(model_name, - field_name, union_type, - False, - processed_models, created_rules) + union_gbnf_type, union_rules_list = generate_gbnf_rule_for_type( + model_name, field_name, union_type, False, processed_models, created_rules + ) union_rules.append(union_gbnf_type) rules.extend(union_rules_list) - elif not issubclass(union_type, NoneType): - union_gbnf_type, union_rules_list = generate_gbnf_rule_for_type(model_name, - field_name, union_type, - False, - processed_models, created_rules) + union_gbnf_type, union_rules_list = generate_gbnf_rule_for_type( + model_name, field_name, union_type, False, processed_models, created_rules + ) union_rules.append(union_gbnf_type) rules.extend(union_rules_list) @@ -363,45 +366,58 @@ def generate_gbnf_rule_for_type(model_name, field_name, else: gbnf_type = f"{model_name}-{field_name}-union" elif isclass(field_type) and issubclass(field_type, str): - if field_info and hasattr(field_info, 'json_schema_extra') and field_info.json_schema_extra is not None: - - triple_quoted_string = field_info.json_schema_extra.get('triple_quoted_string', False) - markdown_string = field_info.json_schema_extra.get('markdown_string', False) + if field_info and hasattr(field_info, "json_schema_extra") and field_info.json_schema_extra is not None: + triple_quoted_string = field_info.json_schema_extra.get("triple_quoted_string", False) + markdown_string = field_info.json_schema_extra.get("markdown_code_block", False) gbnf_type = PydanticDataType.TRIPLE_QUOTED_STRING.value if triple_quoted_string else PydanticDataType.STRING.value - gbnf_type = PydanticDataType.MARKDOWN_STRING.value if markdown_string else gbnf_type + gbnf_type = PydanticDataType.MARKDOWN_CODE_BLOCK.value if markdown_string else gbnf_type - elif field_info and hasattr(field_info, 'pattern'): + elif field_info and hasattr(field_info, "pattern"): # Convert regex pattern to grammar rule regex_pattern = field_info.regex.pattern gbnf_type = f"pattern-{field_name} ::= {regex_to_gbnf(regex_pattern)}" else: gbnf_type = PydanticDataType.STRING.value - elif isclass(field_type) and issubclass(field_type, float) and field_info and hasattr(field_info, - 'json_schema_extra') and field_info.json_schema_extra is not None: + elif ( + isclass(field_type) + and issubclass(field_type, float) + and field_info + and hasattr(field_info, "json_schema_extra") + and field_info.json_schema_extra is not None + ): # Retrieve precision attributes for floats - max_precision = field_info.json_schema_extra.get('max_precision') if field_info and hasattr(field_info, - 'json_schema_extra') else None - min_precision = field_info.json_schema_extra.get('min_precision') if field_info and hasattr(field_info, - 'json_schema_extra') else None - max_digits = field_info.json_schema_extra.get('max_digit') if field_info and hasattr(field_info, - 'json_schema_extra') else None - min_digits = field_info.json_schema_extra.get('min_digit') if field_info and hasattr(field_info, - 'json_schema_extra') else None + max_precision = ( + field_info.json_schema_extra.get("max_precision") if field_info and hasattr(field_info, + "json_schema_extra") else None + ) + min_precision = ( + field_info.json_schema_extra.get("min_precision") if field_info and hasattr(field_info, + "json_schema_extra") else None + ) + max_digits = field_info.json_schema_extra.get("max_digit") if field_info and hasattr(field_info, + "json_schema_extra") else None + min_digits = field_info.json_schema_extra.get("min_digit") if field_info and hasattr(field_info, + "json_schema_extra") else None # Generate GBNF rule for float with given attributes - gbnf_type, rules = generate_gbnf_float_rules(max_digit=max_digits, min_digit=min_digits, - max_precision=max_precision, - min_precision=min_precision) + gbnf_type, rules = generate_gbnf_float_rules( + max_digit=max_digits, min_digit=min_digits, max_precision=max_precision, min_precision=min_precision + ) - elif isclass(field_type) and issubclass(field_type, int) and field_info and hasattr(field_info, - 'json_schema_extra') and field_info.json_schema_extra is not None: + elif ( + isclass(field_type) + and issubclass(field_type, int) + and field_info + and hasattr(field_info, "json_schema_extra") + and field_info.json_schema_extra is not None + ): # Retrieve digit attributes for integers - max_digits = field_info.json_schema_extra.get('max_digit') if field_info and hasattr(field_info, - 'json_schema_extra') else None - min_digits = field_info.json_schema_extra.get('min_digit') if field_info and hasattr(field_info, - 'json_schema_extra') else None + max_digits = field_info.json_schema_extra.get("max_digit") if field_info and hasattr(field_info, + "json_schema_extra") else None + min_digits = field_info.json_schema_extra.get("min_digit") if field_info and hasattr(field_info, + "json_schema_extra") else None # Generate GBNF rule for integer with given attributes gbnf_type, rules = generate_gbnf_integer_rules(max_digit=max_digits, min_digit=min_digits) @@ -443,13 +459,13 @@ def generate_gbnf_grammar(model: Type[BaseModel], processed_models: set, created if not issubclass(model, BaseModel): # For non-Pydantic classes, generate model_fields from __annotations__ or __init__ - if hasattr(model, '__annotations__') and model.__annotations__: + if hasattr(model, "__annotations__") and model.__annotations__: model_fields = {name: (typ, ...) for name, typ in model.__annotations__.items()} else: init_signature = inspect.signature(model.__init__) parameters = init_signature.parameters - model_fields = {name: (param.annotation, param.default) for name, param in parameters.items() - if name != 'self'} + model_fields = {name: (param.annotation, param.default) for name, param in parameters.items() if + name != "self"} else: # For Pydantic models, use model_fields and check for ellipsis (required fields) model_fields = model.__annotations__ @@ -469,51 +485,55 @@ def generate_gbnf_grammar(model: Type[BaseModel], processed_models: set, created field_type = field_info field_info = model.model_fields[field_name] is_optional = field_info.is_required is False and get_origin(field_type) is Optional - rule_name, additional_rules = generate_gbnf_rule_for_type(model_name, - format_model_and_field_name(field_name), - field_type, is_optional, - processed_models, created_rules, field_info) - look_for_markdown_code_block = True if rule_name == "markdown_string" else False + rule_name, additional_rules = generate_gbnf_rule_for_type( + model_name, format_model_and_field_name(field_name), field_type, is_optional, processed_models, + created_rules, field_info + ) + look_for_markdown_code_block = True if rule_name == "markdown_code_block" else False look_for_triple_quoted_string = True if rule_name == "triple_quoted_string" else False if not look_for_markdown_code_block and not look_for_triple_quoted_string: if rule_name not in created_rules: created_rules[rule_name] = additional_rules - model_rule_parts.append(f' ws \"\\\"{field_name}\\\"\" ": " {rule_name}') # Adding escaped quotes + model_rule_parts.append(f' ws "\\"{field_name}\\"" ":" ws {rule_name}') # Adding escaped quotes nested_rules.extend(additional_rules) else: - has_triple_quoted_string = look_for_markdown_code_block - has_markdown_code_block = look_for_triple_quoted_string + has_triple_quoted_string = look_for_triple_quoted_string + has_markdown_code_block = look_for_markdown_code_block fields_joined = r' "," "\n" '.join(model_rule_parts) - model_rule = fr'{model_name} ::= "{{" "\n" {fields_joined} "\n" ws "}}"' - - if look_for_markdown_code_block or look_for_triple_quoted_string: - model_rule += ' ws "}"' + model_rule = rf'{model_name} ::= "{{" "\n" {fields_joined} "\n" ws "}}"' + has_special_string = False if has_triple_quoted_string: + model_rule += '"\\n" ws "}"' model_rule += '"\\n" triple-quoted-string' + has_special_string = True if has_markdown_code_block: + model_rule += '"\\n" ws "}"' model_rule += '"\\n" markdown-code-block' + has_special_string = True all_rules = [model_rule] + nested_rules - return all_rules, has_markdown_code_block, has_triple_quoted_string + return all_rules, has_special_string -def generate_gbnf_grammar_from_pydantic_models(models: List[Type[BaseModel]], outer_object_name: str = None, - outer_object_content: str = None, list_of_outputs: bool = False) -> str: +def generate_gbnf_grammar_from_pydantic_models( + models: List[Type[BaseModel]], outer_object_name: str = None, outer_object_content: str = None, + list_of_outputs: bool = False +) -> str: """ Generate GBNF Grammar from Pydantic Models. This method takes a list of Pydantic models and uses them to generate a GBNF grammar string. The generated grammar string can be used for parsing and validating data using the generated * grammar. - Parameters: - models (List[Type[BaseModel]]): A list of Pydantic models to generate the grammar from. - outer_object_name (str): Outer object name for the GBNF grammar. If None, no outer object will be generated. Eg. "function" for function calling. - outer_object_content (str): Content for the outer rule in the GBNF grammar. Eg. "function_parameters" or "params" for function calling. - list_of_outputs (str, optional): Allows a list of output objects + Args: + models (List[Type[BaseModel]]): A list of Pydantic models to generate the grammar from. + outer_object_name (str): Outer object name for the GBNF grammar. If None, no outer object will be generated. Eg. "function" for function calling. + outer_object_content (str): Content for the outer rule in the GBNF grammar. Eg. "function_parameters" or "params" for function calling. + list_of_outputs (str, optional): Allows a list of output objects Returns: - str: The generated GBNF grammar string. + str: The generated GBNF grammar string. Examples: models = [UserModel, PostModel] @@ -527,52 +547,53 @@ def generate_gbnf_grammar_from_pydantic_models(models: List[Type[BaseModel]], ou all_rules = [] created_rules = {} if outer_object_name is None: - for model in models: - model_rules, _, _ = generate_gbnf_grammar(model, - processed_models, created_rules) + model_rules, _ = generate_gbnf_grammar(model, processed_models, created_rules) all_rules.extend(model_rules) if list_of_outputs: - root_rule = r'root ::= ws "[" grammar-models ("," grammar-models)* "]"' + "\n" + root_rule = r'root ::= (" "| "\n") "[" ws grammar-models ("," ws grammar-models)* ws "]"' + "\n" else: - root_rule = r'root ::= ws grammar-models' + "\n" + root_rule = r'root ::= (" "| "\n") grammar-models' + "\n" root_rule += "grammar-models ::= " + " | ".join( [format_model_and_field_name(model.__name__) for model in models]) all_rules.insert(0, root_rule) return "\n".join(all_rules) elif outer_object_name is not None: if list_of_outputs: - root_rule = fr'root ::= ws "[" {format_model_and_field_name(outer_object_name)} ("," {format_model_and_field_name(outer_object_name)})* "]"' + "\n" + root_rule = ( + rf'root ::= (" "| "\n") "[" ws {format_model_and_field_name(outer_object_name)} ("," ws {format_model_and_field_name(outer_object_name)})* ws "]"' + + "\n" + ) else: root_rule = f"root ::= {format_model_and_field_name(outer_object_name)}\n" - model_rule = fr'{format_model_and_field_name(outer_object_name)} ::= ws "{{" ws "\"{outer_object_name}\"" ": " grammar-models' + model_rule = ( + rf'{format_model_and_field_name(outer_object_name)} ::= (" "| "\n") "{{" ws "\"{outer_object_name}\"" ":" ws grammar-models' + ) fields_joined = " | ".join( - [fr'{format_model_and_field_name(model.__name__)}-grammar-model' for model in models]) + [rf"{format_model_and_field_name(model.__name__)}-grammar-model" for model in models]) - grammar_model_rules = f'\ngrammar-models ::= {fields_joined}' + grammar_model_rules = f"\ngrammar-models ::= {fields_joined}" mod_rules = [] for model in models: - mod_rule = fr'{format_model_and_field_name(model.__name__)}-grammar-model ::= ws' - mod_rule += fr'"\"{format_model_and_field_name(model.__name__)}\"" "," ws "\"{outer_object_content}\"" ws ":" ws {format_model_and_field_name(model.__name__)}' + '\n' + mod_rule = rf"{format_model_and_field_name(model.__name__)}-grammar-model ::= " + mod_rule += ( + rf'"\"{model.__name__}\"" "," ws "\"{outer_object_content}\"" ":" ws {format_model_and_field_name(model.__name__)}' + "\n" + ) mod_rules.append(mod_rule) grammar_model_rules += "\n" + "\n".join(mod_rules) - look_for_markdown_code_block = False - look_for_triple_quoted_string = False + for model in models: - model_rules, markdown_block, triple_quoted_string = generate_gbnf_grammar(model, - processed_models, created_rules) + model_rules, has_special_string = generate_gbnf_grammar(model, processed_models, + created_rules) + + if not has_special_string: + model_rules[0] += r'"\n" ws "}"' + all_rules.extend(model_rules) - if markdown_block: - look_for_markdown_code_block = True - if triple_quoted_string: - look_for_triple_quoted_string = True - - if not look_for_markdown_code_block and not look_for_triple_quoted_string: - model_rule += ' ws "}"' all_rules.insert(0, root_rule + model_rule + grammar_model_rules) return "\n".join(all_rules) @@ -582,10 +603,10 @@ def get_primitive_grammar(grammar): Returns the needed GBNF primitive grammar for a given GBNF grammar string. Args: - grammar (str): The string containing the GBNF grammar. + grammar (str): The string containing the GBNF grammar. Returns: - str: GBNF primitive grammar string. + str: GBNF primitive grammar string. """ type_list = [] if "string-list" in grammar: @@ -611,7 +632,7 @@ integer ::= [0-9]+""" any_block = "" if "custom-class-any" in grammar: - any_block = ''' + any_block = """ value ::= object | array | string | number | boolean | null object ::= @@ -626,7 +647,7 @@ array ::= ("," ws value)* )? "]" ws -number ::= integer | float''' +number ::= integer | float""" markdown_code_block_grammar = "" if "markdown-code-block" in grammar: @@ -641,90 +662,32 @@ closing-triple-ticks ::= "```" "\n"''' triple-quoted-string ::= triple-quotes triple-quoted-string-content triple-quotes triple-quoted-string-content ::= ( [^'] | "'" [^'] | "'" "'" [^'] )* triple-quotes ::= "'''" """ - return "\n" + '\n'.join(additional_grammar) + any_block + primitive_grammar + markdown_code_block_grammar + return "\n" + "\n".join(additional_grammar) + any_block + primitive_grammar + markdown_code_block_grammar -def generate_field_markdown(field_name: str, field_type: Type[Any], model: Type[BaseModel], depth=1) -> str: - indent = ' ' * depth - field_markdown = f"{indent}- **{field_name}** (`{field_type.__name__}`): " - - # Extracting field description from Pydantic Field using __model_fields__ - field_info = model.model_fields.get(field_name) - field_description = field_info.description if field_info and field_info.description else "No description available." - - field_markdown += field_description + '\n' - - # Handling nested BaseModel fields - if isclass(field_type) and issubclass(field_type, BaseModel): - field_markdown += f"{indent} - Details:\n" - for name, type_ in field_type.__annotations__.items(): - field_markdown += generate_field_markdown(name, type_, field_type, depth + 2) - - return field_markdown - - -def generate_markdown_report(pydantic_models: List[Type[BaseModel]]) -> str: - markdown = "" - for model in pydantic_models: - markdown += f"### {format_model_and_field_name(model.__name__)}\n" - - # Check if the model's docstring is different from BaseModel's docstring - class_doc = getdoc(model) - base_class_doc = getdoc(BaseModel) - class_description = class_doc if class_doc and class_doc != base_class_doc else "No specific description available." - - markdown += f"{class_description}\n\n" - markdown += "#### Fields\n" - - if isclass(model) and issubclass(model, BaseModel): - for name, field_type in model.__annotations__.items(): - markdown += generate_field_markdown(format_model_and_field_name(name), field_type, model) - markdown += "\n" - - return markdown - - -def format_json_example(example: dict, depth: int) -> str: +def generate_markdown_documentation( + pydantic_models: List[Type[BaseModel]], model_prefix="Model", fields_prefix="Fields", + documentation_with_field_description=True +) -> str: """ - Format a JSON example into a readable string with indentation. + Generate markdown documentation for a list of Pydantic models. Args: - example (dict): JSON example to be formatted. - depth (int): Indentation depth. + pydantic_models (List[Type[BaseModel]]): List of Pydantic model classes. + model_prefix (str): Prefix for the model section. + fields_prefix (str): Prefix for the fields section. + documentation_with_field_description (bool): Include field descriptions in the documentation. Returns: - str: Formatted JSON example string. - """ - indent = ' ' * depth - formatted_example = '{\n' - for key, value in example.items(): - value_text = f"'{value}'" if isinstance(value, str) else value - formatted_example += f"{indent}{key}: {value_text},\n" - formatted_example = formatted_example.rstrip(',\n') + '\n' + indent + '}' - return formatted_example - - -def generate_text_documentation(pydantic_models: List[Type[BaseModel]], model_prefix="Model", - fields_prefix="Fields", documentation_with_field_description=True) -> str: - """ - Generate text documentation for a list of Pydantic models. - - Args: - pydantic_models (List[Type[BaseModel]]): List of Pydantic model classes. - model_prefix (str): Prefix for the model section. - fields_prefix (str): Prefix for the fields section. - documentation_with_field_description (bool): Include field descriptions in the documentation. - - Returns: - str: Generated text documentation. + str: Generated text documentation. """ documentation = "" pyd_models = [(model, True) for model in pydantic_models] for model, add_prefix in pyd_models: if add_prefix: - documentation += f"{model_prefix}: {format_model_and_field_name(model.__name__)}\n" + documentation += f"{model_prefix}: {model.__name__}\n" else: - documentation += f"Model: {format_model_and_field_name(model.__name__)}\n" + documentation += f"Model: {model.__name__}\n" # Handling multi-line model description with proper indentation @@ -733,7 +696,7 @@ def generate_text_documentation(pydantic_models: List[Type[BaseModel]], model_pr class_description = class_doc if class_doc and class_doc != base_class_doc else "" if class_description != "": documentation += " Description: " - documentation += "\n" + format_multiline_description(class_description, 2) + "\n" + documentation += format_multiline_description(class_description, 0) + "\n" if add_prefix: # Indenting the fields section @@ -753,35 +716,192 @@ def generate_text_documentation(pydantic_models: List[Type[BaseModel]], model_pr for element_type in element_types: if isclass(element_type) and issubclass(element_type, BaseModel): pyd_models.append((element_type, False)) - documentation += generate_field_text(name, field_type, model, - documentation_with_field_description=documentation_with_field_description) + documentation += generate_field_markdown( + name, field_type, model, documentation_with_field_description=documentation_with_field_description + ) documentation += "\n" - if hasattr(model, 'Config') and hasattr(model.Config, - 'json_schema_extra') and 'example' in model.Config.json_schema_extra: + if hasattr(model, "Config") and hasattr(model.Config, + "json_schema_extra") and "example" in model.Config.json_schema_extra: documentation += f" Expected Example Output for {format_model_and_field_name(model.__name__)}:\n" - json_example = json.dumps(model.Config.json_schema_extra['example']) + json_example = json.dumps(model.Config.json_schema_extra["example"]) documentation += format_multiline_description(json_example, 2) + "\n" return documentation -def generate_field_text(field_name: str, field_type: Type[Any], model: Type[BaseModel], depth=1, - documentation_with_field_description=True) -> str: +def generate_field_markdown( + field_name: str, field_type: Type[Any], model: Type[BaseModel], depth=1, + documentation_with_field_description=True +) -> str: + """ + Generate markdown documentation for a Pydantic model field. + + Args: + field_name (str): Name of the field. + field_type (Type[Any]): Type of the field. + model (Type[BaseModel]): Pydantic model class. + depth (int): Indentation depth in the documentation. + documentation_with_field_description (bool): Include field descriptions in the documentation. + + Returns: + str: Generated text documentation for the field. + """ + indent = " " * depth + + field_info = model.model_fields.get(field_name) + field_description = field_info.description if field_info and field_info.description else "" + + if get_origin(field_type) == list: + element_type = get_args(field_type)[0] + field_text = f"{indent}{field_name} ({format_model_and_field_name(field_type.__name__)} of {format_model_and_field_name(element_type.__name__)})" + if field_description != "": + field_text += ":\n" + else: + field_text += "\n" + elif get_origin(field_type) == Union: + element_types = get_args(field_type) + types = [] + for element_type in element_types: + types.append(format_model_and_field_name(element_type.__name__)) + field_text = f"{indent}{field_name} ({' or '.join(types)})" + if field_description != "": + field_text += ":\n" + else: + field_text += "\n" + else: + field_text = f"{indent}{field_name} ({format_model_and_field_name(field_type.__name__)})" + if field_description != "": + field_text += ":\n" + else: + field_text += "\n" + + if not documentation_with_field_description: + return field_text + + if field_description != "": + field_text += f" Description: " + field_description + "\n" + + # Check for and include field-specific examples if available + if hasattr(model, "Config") and hasattr(model.Config, + "json_schema_extra") and "example" in model.Config.json_schema_extra: + field_example = model.Config.json_schema_extra["example"].get(field_name) + if field_example is not None: + example_text = f"'{field_example}'" if isinstance(field_example, str) else field_example + field_text += f"{indent} Example: {example_text}\n" + + if isclass(field_type) and issubclass(field_type, BaseModel): + field_text += f"{indent} Details:\n" + for name, type_ in field_type.__annotations__.items(): + field_text += generate_field_markdown(name, type_, field_type, depth + 2) + + return field_text + + +def format_json_example(example: dict, depth: int) -> str: + """ + Format a JSON example into a readable string with indentation. + + Args: + example (dict): JSON example to be formatted. + depth (int): Indentation depth. + + Returns: + str: Formatted JSON example string. + """ + indent = " " * depth + formatted_example = "{\n" + for key, value in example.items(): + value_text = f"'{value}'" if isinstance(value, str) else value + formatted_example += f"{indent}{key}: {value_text},\n" + formatted_example = formatted_example.rstrip(",\n") + "\n" + indent + "}" + return formatted_example + + +def generate_text_documentation( + pydantic_models: List[Type[BaseModel]], model_prefix="Model", fields_prefix="Fields", + documentation_with_field_description=True +) -> str: + """ + Generate text documentation for a list of Pydantic models. + + Args: + pydantic_models (List[Type[BaseModel]]): List of Pydantic model classes. + model_prefix (str): Prefix for the model section. + fields_prefix (str): Prefix for the fields section. + documentation_with_field_description (bool): Include field descriptions in the documentation. + + Returns: + str: Generated text documentation. + """ + documentation = "" + pyd_models = [(model, True) for model in pydantic_models] + for model, add_prefix in pyd_models: + if add_prefix: + documentation += f"{model_prefix}: {model.__name__}\n" + else: + documentation += f"Model: {model.__name__}\n" + + # Handling multi-line model description with proper indentation + + class_doc = getdoc(model) + base_class_doc = getdoc(BaseModel) + class_description = class_doc if class_doc and class_doc != base_class_doc else "" + if class_description != "": + documentation += " Description: " + documentation += "\n" + format_multiline_description(class_description, 2) + "\n" + + if isclass(model) and issubclass(model, BaseModel): + documentation_fields = "" + for name, field_type in model.__annotations__.items(): + # if name == "markdown_code_block": + # continue + if get_origin(field_type) == list: + element_type = get_args(field_type)[0] + if isclass(element_type) and issubclass(element_type, BaseModel): + pyd_models.append((element_type, False)) + if get_origin(field_type) == Union: + element_types = get_args(field_type) + for element_type in element_types: + if isclass(element_type) and issubclass(element_type, BaseModel): + pyd_models.append((element_type, False)) + documentation_fields += generate_field_text( + name, field_type, model, documentation_with_field_description=documentation_with_field_description + ) + if documentation_fields != "": + if add_prefix: + documentation += f" {fields_prefix}:\n{documentation_fields}" + else: + documentation += f" Fields:\n{documentation_fields}" + documentation += "\n" + + if hasattr(model, "Config") and hasattr(model.Config, + "json_schema_extra") and "example" in model.Config.json_schema_extra: + documentation += f" Expected Example Output for {format_model_and_field_name(model.__name__)}:\n" + json_example = json.dumps(model.Config.json_schema_extra["example"]) + documentation += format_multiline_description(json_example, 2) + "\n" + + return documentation + + +def generate_field_text( + field_name: str, field_type: Type[Any], model: Type[BaseModel], depth=1, + documentation_with_field_description=True +) -> str: """ Generate text documentation for a Pydantic model field. Args: - field_name (str): Name of the field. - field_type (Type[Any]): Type of the field. - model (Type[BaseModel]): Pydantic model class. - depth (int): Indentation depth in the documentation. - documentation_with_field_description (bool): Include field descriptions in the documentation. + field_name (str): Name of the field. + field_type (Type[Any]): Type of the field. + model (Type[BaseModel]): Pydantic model class. + depth (int): Indentation depth in the documentation. + documentation_with_field_description (bool): Include field descriptions in the documentation. Returns: - str: Generated text documentation for the field. + str: Generated text documentation for the field. """ - indent = ' ' * depth + indent = " " * depth field_info = model.model_fields.get(field_name) field_description = field_info.description if field_info and field_info.description else "" @@ -817,9 +937,9 @@ def generate_field_text(field_name: str, field_type: Type[Any], model: Type[Base field_text += f"{indent} Description: " + field_description + "\n" # Check for and include field-specific examples if available - if hasattr(model, 'Config') and hasattr(model.Config, - 'json_schema_extra') and 'example' in model.Config.json_schema_extra: - field_example = model.Config.json_schema_extra['example'].get(field_name) + if hasattr(model, "Config") and hasattr(model.Config, + "json_schema_extra") and "example" in model.Config.json_schema_extra: + field_example = model.Config.json_schema_extra["example"].get(field_name) if field_example is not None: example_text = f"'{field_example}'" if isinstance(field_example, str) else field_example field_text += f"{indent} Example: {example_text}\n" @@ -837,39 +957,40 @@ def format_multiline_description(description: str, indent_level: int) -> str: Format a multiline description with proper indentation. Args: - description (str): Multiline description. - indent_level (int): Indentation level. + description (str): Multiline description. + indent_level (int): Indentation level. Returns: - str: Formatted multiline description. + str: Formatted multiline description. """ - indent = ' ' * indent_level - return indent + description.replace('\n', '\n' + indent) + indent = " " * indent_level + return indent + description.replace("\n", "\n" + indent) -def save_gbnf_grammar_and_documentation(grammar, documentation, grammar_file_path="./grammar.gbnf", - documentation_file_path="./grammar_documentation.md"): +def save_gbnf_grammar_and_documentation( + grammar, documentation, grammar_file_path="./grammar.gbnf", documentation_file_path="./grammar_documentation.md" +): """ Save GBNF grammar and documentation to specified files. Args: - grammar (str): GBNF grammar string. - documentation (str): Documentation string. - grammar_file_path (str): File path to save the GBNF grammar. - documentation_file_path (str): File path to save the documentation. + grammar (str): GBNF grammar string. + documentation (str): Documentation string. + grammar_file_path (str): File path to save the GBNF grammar. + documentation_file_path (str): File path to save the documentation. Returns: - None + None """ try: - with open(grammar_file_path, 'w') as file: + with open(grammar_file_path, "w") as file: file.write(grammar + get_primitive_grammar(grammar)) print(f"Grammar successfully saved to {grammar_file_path}") except IOError as e: print(f"An error occurred while saving the grammar file: {e}") try: - with open(documentation_file_path, 'w') as file: + with open(documentation_file_path, "w") as file: file.write(documentation) print(f"Documentation successfully saved to {documentation_file_path}") except IOError as e: @@ -881,10 +1002,10 @@ def remove_empty_lines(string): Remove empty lines from a string. Args: - string (str): Input string. + string (str): Input string. Returns: - str: String with empty lines removed. + str: String with empty lines removed. """ lines = string.splitlines() non_empty_lines = [line for line in lines if line.strip() != ""] @@ -892,95 +1013,109 @@ def remove_empty_lines(string): return string_no_empty_lines -def generate_and_save_gbnf_grammar_and_documentation(pydantic_model_list, - grammar_file_path="./generated_grammar.gbnf", - documentation_file_path="./generated_grammar_documentation.md", - outer_object_name: str = None, - outer_object_content: str = None, - model_prefix: str = "Output Model", - fields_prefix: str = "Output Fields", - list_of_outputs: bool = False, - documentation_with_field_description=True): +def generate_and_save_gbnf_grammar_and_documentation( + pydantic_model_list, + grammar_file_path="./generated_grammar.gbnf", + documentation_file_path="./generated_grammar_documentation.md", + outer_object_name: str = None, + outer_object_content: str = None, + model_prefix: str = "Output Model", + fields_prefix: str = "Output Fields", + list_of_outputs: bool = False, + documentation_with_field_description=True, +): """ Generate GBNF grammar and documentation, and save them to specified files. Args: - pydantic_model_list: List of Pydantic model classes. - grammar_file_path (str): File path to save the generated GBNF grammar. - documentation_file_path (str): File path to save the generated documentation. - outer_object_name (str): Outer object name for the GBNF grammar. If None, no outer object will be generated. Eg. "function" for function calling. - outer_object_content (str): Content for the outer rule in the GBNF grammar. Eg. "function_parameters" or "params" for function calling. - model_prefix (str): Prefix for the model section in the documentation. - fields_prefix (str): Prefix for the fields section in the documentation. - list_of_outputs (bool): Whether the output is a list of items. - documentation_with_field_description (bool): Include field descriptions in the documentation. + pydantic_model_list: List of Pydantic model classes. + grammar_file_path (str): File path to save the generated GBNF grammar. + documentation_file_path (str): File path to save the generated documentation. + outer_object_name (str): Outer object name for the GBNF grammar. If None, no outer object will be generated. Eg. "function" for function calling. + outer_object_content (str): Content for the outer rule in the GBNF grammar. Eg. "function_parameters" or "params" for function calling. + model_prefix (str): Prefix for the model section in the documentation. + fields_prefix (str): Prefix for the fields section in the documentation. + list_of_outputs (bool): Whether the output is a list of items. + documentation_with_field_description (bool): Include field descriptions in the documentation. Returns: - None + None """ - documentation = generate_text_documentation(pydantic_model_list, model_prefix, fields_prefix, - documentation_with_field_description=documentation_with_field_description) - grammar = generate_gbnf_grammar_from_pydantic_models(pydantic_model_list, outer_object_name, - outer_object_content, list_of_outputs) + documentation = generate_markdown_documentation( + pydantic_model_list, model_prefix, fields_prefix, + documentation_with_field_description=documentation_with_field_description + ) + grammar = generate_gbnf_grammar_from_pydantic_models(pydantic_model_list, outer_object_name, outer_object_content, + list_of_outputs) grammar = remove_empty_lines(grammar) save_gbnf_grammar_and_documentation(grammar, documentation, grammar_file_path, documentation_file_path) -def generate_gbnf_grammar_and_documentation(pydantic_model_list, outer_object_name: str = None, - outer_object_content: str = None, - model_prefix: str = "Output Model", - fields_prefix: str = "Output Fields", list_of_outputs: bool = False, - documentation_with_field_description=True): +def generate_gbnf_grammar_and_documentation( + pydantic_model_list, + outer_object_name: str = None, + outer_object_content: str = None, + model_prefix: str = "Output Model", + fields_prefix: str = "Output Fields", + list_of_outputs: bool = False, + documentation_with_field_description=True, +): """ Generate GBNF grammar and documentation for a list of Pydantic models. Args: - pydantic_model_list: List of Pydantic model classes. - outer_object_name (str): Outer object name for the GBNF grammar. If None, no outer object will be generated. Eg. "function" for function calling. - outer_object_content (str): Content for the outer rule in the GBNF grammar. Eg. "function_parameters" or "params" for function calling. - model_prefix (str): Prefix for the model section in the documentation. - fields_prefix (str): Prefix for the fields section in the documentation. - list_of_outputs (bool): Whether the output is a list of items. - documentation_with_field_description (bool): Include field descriptions in the documentation. + pydantic_model_list: List of Pydantic model classes. + outer_object_name (str): Outer object name for the GBNF grammar. If None, no outer object will be generated. Eg. "function" for function calling. + outer_object_content (str): Content for the outer rule in the GBNF grammar. Eg. "function_parameters" or "params" for function calling. + model_prefix (str): Prefix for the model section in the documentation. + fields_prefix (str): Prefix for the fields section in the documentation. + list_of_outputs (bool): Whether the output is a list of items. + documentation_with_field_description (bool): Include field descriptions in the documentation. Returns: - tuple: GBNF grammar string, documentation string. + tuple: GBNF grammar string, documentation string. """ - documentation = generate_text_documentation(copy(pydantic_model_list), model_prefix, fields_prefix, - documentation_with_field_description=documentation_with_field_description) - grammar = generate_gbnf_grammar_from_pydantic_models(pydantic_model_list, outer_object_name, - outer_object_content, list_of_outputs) + documentation = generate_markdown_documentation( + copy(pydantic_model_list), model_prefix, fields_prefix, + documentation_with_field_description=documentation_with_field_description + ) + grammar = generate_gbnf_grammar_from_pydantic_models(pydantic_model_list, outer_object_name, outer_object_content, + list_of_outputs) grammar = remove_empty_lines(grammar + get_primitive_grammar(grammar)) return grammar, documentation -def generate_gbnf_grammar_and_documentation_from_dictionaries(dictionaries: List[dict], - outer_object_name: str = None, - outer_object_content: str = None, - model_prefix: str = "Output Model", - fields_prefix: str = "Output Fields", - list_of_outputs: bool = False, - documentation_with_field_description=True): +def generate_gbnf_grammar_and_documentation_from_dictionaries( + dictionaries: List[dict], + outer_object_name: str = None, + outer_object_content: str = None, + model_prefix: str = "Output Model", + fields_prefix: str = "Output Fields", + list_of_outputs: bool = False, + documentation_with_field_description=True, +): """ Generate GBNF grammar and documentation from a list of dictionaries. Args: - dictionaries (List[dict]): List of dictionaries representing Pydantic models. - outer_object_name (str): Outer object name for the GBNF grammar. If None, no outer object will be generated. Eg. "function" for function calling. - outer_object_content (str): Content for the outer rule in the GBNF grammar. Eg. "function_parameters" or "params" for function calling. - model_prefix (str): Prefix for the model section in the documentation. - fields_prefix (str): Prefix for the fields section in the documentation. - list_of_outputs (bool): Whether the output is a list of items. - documentation_with_field_description (bool): Include field descriptions in the documentation. + dictionaries (List[dict]): List of dictionaries representing Pydantic models. + outer_object_name (str): Outer object name for the GBNF grammar. If None, no outer object will be generated. Eg. "function" for function calling. + outer_object_content (str): Content for the outer rule in the GBNF grammar. Eg. "function_parameters" or "params" for function calling. + model_prefix (str): Prefix for the model section in the documentation. + fields_prefix (str): Prefix for the fields section in the documentation. + list_of_outputs (bool): Whether the output is a list of items. + documentation_with_field_description (bool): Include field descriptions in the documentation. Returns: - tuple: GBNF grammar string, documentation string. + tuple: GBNF grammar string, documentation string. """ pydantic_model_list = create_dynamic_models_from_dictionaries(dictionaries) - documentation = generate_text_documentation(copy(pydantic_model_list), model_prefix, fields_prefix, - documentation_with_field_description=documentation_with_field_description) - grammar = generate_gbnf_grammar_from_pydantic_models(pydantic_model_list, outer_object_name, - outer_object_content, list_of_outputs) + documentation = generate_markdown_documentation( + copy(pydantic_model_list), model_prefix, fields_prefix, + documentation_with_field_description=documentation_with_field_description + ) + grammar = generate_gbnf_grammar_from_pydantic_models(pydantic_model_list, outer_object_name, outer_object_content, + list_of_outputs) grammar = remove_empty_lines(grammar + get_primitive_grammar(grammar)) return grammar, documentation @@ -990,41 +1125,61 @@ def create_dynamic_model_from_function(func: Callable): Creates a dynamic Pydantic model from a given function's type hints and adds the function as a 'run' method. Args: - func (Callable): A function with type hints from which to create the model. + func (Callable): A function with type hints from which to create the model. Returns: - A dynamic Pydantic model class with the provided function as a 'run' method. + A dynamic Pydantic model class with the provided function as a 'run' method. """ - # Extracting type hints from the provided function - type_hints = get_type_hints(func) - type_hints.pop('return', None) - # Handling default values and annotations + # Get the signature of the function + sig = inspect.signature(func) + + # Parse the docstring + docstring = parse(func.__doc__) + dynamic_fields = {} - defaults = getattr(func, '__defaults__', ()) or () - defaults_index = len(type_hints) - len(defaults) + param_docs = [] + for param in sig.parameters.values(): + # Exclude 'self' parameter + if param.name == "self": + continue - for index, (name, typ) in enumerate(type_hints.items()): - if index >= defaults_index: - default_value = defaults[index - defaults_index] - dynamic_fields[name] = (typ, default_value) + # Assert that the parameter has a type annotation + if param.annotation == inspect.Parameter.empty: + raise TypeError(f"Parameter '{param.name}' in function '{func.__name__}' lacks a type annotation") + + # Find the parameter's description in the docstring + param_doc = next((d for d in docstring.params if d.arg_name == param.name), None) + + # Assert that the parameter has a description + if not param_doc or not param_doc.description: + raise ValueError( + f"Parameter '{param.name}' in function '{func.__name__}' lacks a description in the docstring") + + # Add parameter details to the schema + param_doc = next((d for d in docstring.params if d.arg_name == param.name), None) + param_docs.append((param.name, param_doc)) + if param.default == inspect.Parameter.empty: + default_value = ... else: - dynamic_fields[name] = (typ, ...) - + default_value = param.default + dynamic_fields[param.name] = ( + param.annotation if param.annotation != inspect.Parameter.empty else str, default_value) # Creating the dynamic model - dynamicModel = create_model(f'{func.__name__}', **dynamic_fields) + dynamic_model = create_model(f"{func.__name__}", **dynamic_fields) - dynamicModel.__doc__ = getdoc(func) + for param_doc in param_docs: + dynamic_model.model_fields[param_doc[0]].description = param_doc[1].description + + dynamic_model.__doc__ = docstring.short_description - # Wrapping the original function to handle instance 'self' def run_method_wrapper(self): - func_args = {name: getattr(self, name) for name in type_hints} + func_args = {name: getattr(self, name) for name, _ in dynamic_fields.items()} return func(**func_args) # Adding the wrapped function as a 'run' method - setattr(dynamicModel, 'run', run_method_wrapper) - - return dynamicModel + setattr(dynamic_model, "run", run_method_wrapper) + return dynamic_model def add_run_method_to_dynamic_model(model: Type[BaseModel], func: Callable): @@ -1032,11 +1187,11 @@ def add_run_method_to_dynamic_model(model: Type[BaseModel], func: Callable): Add a 'run' method to a dynamic Pydantic model, using the provided function. Args: - - model (Type[BaseModel]): Dynamic Pydantic model class. - - func (Callable): Function to be added as a 'run' method to the model. + model (Type[BaseModel]): Dynamic Pydantic model class. + func (Callable): Function to be added as a 'run' method to the model. Returns: - - Type[BaseModel]: Pydantic model class with the added 'run' method. + Type[BaseModel]: Pydantic model class with the added 'run' method. """ def run_method_wrapper(self): @@ -1044,7 +1199,7 @@ def add_run_method_to_dynamic_model(model: Type[BaseModel], func: Callable): return func(**func_args) # Adding the wrapped function as a 'run' method - setattr(model, 'run', run_method_wrapper) + setattr(model, "run", run_method_wrapper) return model @@ -1054,15 +1209,15 @@ def create_dynamic_models_from_dictionaries(dictionaries: List[dict]): Create a list of dynamic Pydantic model classes from a list of dictionaries. Args: - - dictionaries (List[dict]): List of dictionaries representing model structures. + dictionaries (List[dict]): List of dictionaries representing model structures. Returns: - - List[Type[BaseModel]]: List of generated dynamic Pydantic model classes. + List[Type[BaseModel]]: List of generated dynamic Pydantic model classes. """ dynamic_models = [] for func in dictionaries: model_name = format_model_and_field_name(func.get("name", "")) - dyn_model = convert_dictionary_to_to_pydantic_model(func, model_name) + dyn_model = convert_dictionary_to_pydantic_model(func, model_name) dynamic_models.append(dyn_model) return dynamic_models @@ -1080,12 +1235,12 @@ from enum import Enum def json_schema_to_python_types(schema): type_map = { - 'any': Any, - 'string': str, - 'number': float, - 'integer': int, - 'boolean': bool, - 'array': list, + "any": Any, + "string": str, + "number": float, + "integer": int, + "boolean": bool, + "array": list, } return type_map[schema] @@ -1094,58 +1249,64 @@ def list_to_enum(enum_name, values): return Enum(enum_name, {value: value for value in values}) -def convert_dictionary_to_to_pydantic_model(dictionary: dict, model_name: str = 'CustomModel') -> Type[BaseModel]: +def convert_dictionary_to_pydantic_model(dictionary: dict, model_name: str = "CustomModel") -> Type[BaseModel]: """ Convert a dictionary to a Pydantic model class. Args: - - dictionary (dict): Dictionary representing the model structure. - - model_name (str): Name of the generated Pydantic model. + dictionary (dict): Dictionary representing the model structure. + model_name (str): Name of the generated Pydantic model. Returns: - - Type[BaseModel]: Generated Pydantic model class. + Type[BaseModel]: Generated Pydantic model class. """ fields = {} if "properties" in dictionary: for field_name, field_data in dictionary.get("properties", {}).items(): - if field_data == 'object': - submodel = convert_dictionary_to_to_pydantic_model(dictionary, f'{model_name}_{field_name}') + if field_data == "object": + submodel = convert_dictionary_to_pydantic_model(dictionary, f"{model_name}_{field_name}") fields[field_name] = (submodel, ...) else: - field_type = field_data.get('type', 'str') + field_type = field_data.get("type", "str") if field_data.get("enum", []): fields[field_name] = (list_to_enum(field_name, field_data.get("enum", [])), ...) - if field_type == "array": + elif field_type == "array": items = field_data.get("items", {}) if items != {}: array = {"properties": items} - array_type = convert_dictionary_to_to_pydantic_model(array, f'{model_name}_{field_name}_items') + array_type = convert_dictionary_to_pydantic_model(array, f"{model_name}_{field_name}_items") fields[field_name] = (List[array_type], ...) else: fields[field_name] = (list, ...) - elif field_type == 'object': - submodel = convert_dictionary_to_to_pydantic_model(field_data, f'{model_name}_{field_name}') + elif field_type == "object": + submodel = convert_dictionary_to_pydantic_model(field_data, f"{model_name}_{field_name}") fields[field_name] = (submodel, ...) + elif field_type == "required": + required = field_data.get("enum", []) + for key, field in fields.items(): + if key not in required: + fields[key] = (Optional[fields[key][0]], ...) else: field_type = json_schema_to_python_types(field_type) fields[field_name] = (field_type, ...) if "function" in dictionary: - for field_name, field_data in dictionary.get("function", {}).items(): if field_name == "name": model_name = field_data elif field_name == "description": fields["__doc__"] = field_data elif field_name == "parameters": - return convert_dictionary_to_to_pydantic_model(field_data, f'{model_name}') + return convert_dictionary_to_pydantic_model(field_data, f"{model_name}") + if "parameters" in dictionary: field_data = {"function": dictionary} - return convert_dictionary_to_to_pydantic_model(field_data, f'{model_name}') - + return convert_dictionary_to_pydantic_model(field_data, f"{model_name}") + if "required" in dictionary: + required = dictionary.get("required", []) + for key, field in fields.items(): + if key not in required: + fields[key] = (Optional[fields[key][0]], ...) custom_model = create_model(model_name, **fields) return custom_model - - - From 7c8d3abd1a17c28fc56b1a4814bc4b29f91d7454 Mon Sep 17 00:00:00 2001 From: Alex Azarov Date: Tue, 16 Jan 2024 14:33:02 +0100 Subject: [PATCH 329/811] metal : log `recommendedMaxWorkingSetSize` on iOS 16+ (#4936) * metal: Log `recommendedMaxWorkingSetSize` on iOS 16+ * Only log on iOS and macOS, ignoring tvOS and other platforms * Check for Xcode version before using recommendedMaxWorkingSetSize --------- Co-authored-by: Georgi Gerganov --- ggml-metal.m | 58 ++++++++++++++++++++++++---------------------------- 1 file changed, 27 insertions(+), 31 deletions(-) diff --git a/ggml-metal.m b/ggml-metal.m index 867f2fd48..44134d1d9 100644 --- a/ggml-metal.m +++ b/ggml-metal.m @@ -369,8 +369,12 @@ static struct ggml_metal_context * ggml_metal_init(int n_cb) { GGML_METAL_LOG_INFO("%s: simdgroup reduction support = %s\n", __func__, ctx->support_simdgroup_reduction ? "true" : "false"); GGML_METAL_LOG_INFO("%s: simdgroup matrix mul. support = %s\n", __func__, ctx->support_simdgroup_mm ? "true" : "false"); GGML_METAL_LOG_INFO("%s: hasUnifiedMemory = %s\n", __func__, ctx->device.hasUnifiedMemory ? "true" : "false"); -#if TARGET_OS_OSX - GGML_METAL_LOG_INFO("%s: recommendedMaxWorkingSetSize = %8.2f MB\n", __func__, ctx->device.recommendedMaxWorkingSetSize / 1e6); + +#if TARGET_OS_OSX || (TARGET_OS_IOS && __clang_major__ >= 15) + if (@available(macOS 10.12, iOS 16.0, *)) { + GGML_METAL_LOG_INFO("%s: recommendedMaxWorkingSetSize = %8.2f MB\n", __func__, ctx->device.recommendedMaxWorkingSetSize / 1e6); + } +#elif TARGET_OS_OSX if (ctx->device.maxTransferRate != 0) { GGML_METAL_LOG_INFO("%s: maxTransferRate = %8.2f MB/s\n", __func__, ctx->device.maxTransferRate / 1e6); } else { @@ -2369,6 +2373,25 @@ GGML_CALL static const char * ggml_backend_metal_buffer_type_get_name(ggml_backe UNUSED(buft); } +static void ggml_backend_metal_log_allocated_size(id device) { +#if TARGET_OS_OSX || (TARGET_OS_IOS && __clang_major__ >= 15) + if (@available(macOS 10.12, iOS 16.0, *)) { + GGML_METAL_LOG_INFO(", (%8.2f / %8.2f)", + device.currentAllocatedSize / 1024.0 / 1024.0, + device.recommendedMaxWorkingSetSize / 1024.0 / 1024.0); + + if (device.currentAllocatedSize > device.recommendedMaxWorkingSetSize) { + GGML_METAL_LOG_WARN("%s: warning: current allocated size is greater than the recommended max working set size\n", __func__); + } else { + GGML_METAL_LOG_INFO("\n"); + } + } else { + GGML_METAL_LOG_INFO(", (%8.2f)\n", device.currentAllocatedSize / 1024.0 / 1024.0); + } +#endif + UNUSED(device); +} + GGML_CALL static ggml_backend_buffer_t ggml_backend_metal_buffer_type_alloc_buffer(ggml_backend_buffer_type_t buft, size_t size) { struct ggml_backend_metal_buffer_context * ctx = malloc(sizeof(struct ggml_backend_metal_buffer_context)); @@ -2401,22 +2424,7 @@ GGML_CALL static ggml_backend_buffer_t ggml_backend_metal_buffer_type_alloc_buff } GGML_METAL_LOG_INFO("%s: allocated buffer, size = %8.2f MiB", __func__, size_aligned / 1024.0 / 1024.0); - - -#if TARGET_OS_OSX - GGML_METAL_LOG_INFO(", (%8.2f / %8.2f)", - device.currentAllocatedSize / 1024.0 / 1024.0, - device.recommendedMaxWorkingSetSize / 1024.0 / 1024.0); - - if (device.currentAllocatedSize > device.recommendedMaxWorkingSetSize) { - GGML_METAL_LOG_WARN("%s: warning: current allocated size is greater than the recommended max working set size\n", __func__); - } else { - GGML_METAL_LOG_INFO("\n"); - } -#else - GGML_METAL_LOG_INFO(", (%8.2f)\n", device.currentAllocatedSize / 1024.0 / 1024.0); -#endif - + ggml_backend_metal_log_allocated_size(device); return ggml_backend_buffer_init(buft, ggml_backend_metal_buffer_i, ctx, size); } @@ -2524,19 +2532,7 @@ GGML_CALL ggml_backend_buffer_t ggml_backend_metal_buffer_from_ptr(void * data, } } -#if TARGET_OS_OSX - GGML_METAL_LOG_INFO(", (%8.2f / %8.2f)", - device.currentAllocatedSize / 1024.0 / 1024.0, - device.recommendedMaxWorkingSetSize / 1024.0 / 1024.0); - - if (device.currentAllocatedSize > device.recommendedMaxWorkingSetSize) { - GGML_METAL_LOG_WARN("%s: warning: current allocated size is greater than the recommended max working set size\n", __func__); - } else { - GGML_METAL_LOG_INFO("\n"); - } -#else - GGML_METAL_LOG_INFO(", (%8.2f)\n", device.currentAllocatedSize / 1024.0 / 1024.0); -#endif + ggml_backend_metal_log_allocated_size(device); return ggml_backend_buffer_init(ggml_backend_metal_buffer_type(), ggml_backend_metal_buffer_i, ctx, size); } From 3a48d558a69c88ac17efcaa5900cd9eb19596ac4 Mon Sep 17 00:00:00 2001 From: Alex Azarov Date: Tue, 16 Jan 2024 14:41:27 +0100 Subject: [PATCH 330/811] metal : replace loop of dispatch_async with dispatch_apply (#4934) * Replace loop of dispatch_async with dispatch_apply * Update ggml-metal.m --------- Co-authored-by: Georgi Gerganov --- ggml-metal.m | 2882 +++++++++++++++++++++++++------------------------- 1 file changed, 1439 insertions(+), 1443 deletions(-) diff --git a/ggml-metal.m b/ggml-metal.m index 44134d1d9..c21dc465a 100644 --- a/ggml-metal.m +++ b/ggml-metal.m @@ -737,1475 +737,249 @@ static bool ggml_metal_graph_compute( ctx->command_encoders[i] = [ctx->command_buffers[i] computeCommandEncoderWithDescriptor: edesc]; } - for (int cb_idx = 0; cb_idx < n_cb; ++cb_idx) { - const int n_nodes_per_cb = (n_nodes + n_cb - 1) / n_cb; + const int n_nodes_per_cb = (n_nodes + n_cb - 1) / n_cb; + dispatch_apply(n_cb, ctx->d_queue, ^(size_t iter) { + const int cb_idx = iter; - dispatch_async(ctx->d_queue, ^{ - size_t offs_src0 = 0; - size_t offs_src1 = 0; - size_t offs_dst = 0; + size_t offs_src0 = 0; + size_t offs_src1 = 0; + size_t offs_dst = 0; - id command_buffer = ctx->command_buffers[cb_idx]; - id encoder = ctx->command_encoders[cb_idx]; + id command_buffer = ctx->command_buffers[cb_idx]; + id encoder = ctx->command_encoders[cb_idx]; - const int node_start = (cb_idx + 0) * n_nodes_per_cb; - const int node_end = MIN((cb_idx == n_cb - 1) ? n_nodes : (cb_idx + 1) * n_nodes_per_cb, n_nodes); + const int node_start = (cb_idx + 0) * n_nodes_per_cb; + const int node_end = MIN((cb_idx == n_cb - 1) ? n_nodes : (cb_idx + 1) * n_nodes_per_cb, n_nodes); - for (int ind = node_start; ind < node_end; ++ind) { - const int i = ind; + for (int ind = node_start; ind < node_end; ++ind) { + const int i = ind; - if (i == -1) { - [encoder memoryBarrierWithScope:MTLBarrierScopeBuffers]; - continue; - } + if (i == -1) { + [encoder memoryBarrierWithScope:MTLBarrierScopeBuffers]; + continue; + } - //GGML_METAL_LOG_INFO("%s: encoding node %3d, op = %8s\n", __func__, i, ggml_op_name(gf->nodes[i]->op)); + //GGML_METAL_LOG_INFO("%s: encoding node %3d, op = %8s\n", __func__, i, ggml_op_name(gf->nodes[i]->op)); - struct ggml_tensor * src0 = gf->nodes[i]->src[0]; - struct ggml_tensor * src1 = gf->nodes[i]->src[1]; - struct ggml_tensor * dst = gf->nodes[i]; + struct ggml_tensor * src0 = gf->nodes[i]->src[0]; + struct ggml_tensor * src1 = gf->nodes[i]->src[1]; + struct ggml_tensor * dst = gf->nodes[i]; - switch (dst->op) { - case GGML_OP_NONE: - case GGML_OP_RESHAPE: - case GGML_OP_VIEW: - case GGML_OP_TRANSPOSE: - case GGML_OP_PERMUTE: - { - // noop -> next node - } continue; - default: - { - } break; - } + switch (dst->op) { + case GGML_OP_NONE: + case GGML_OP_RESHAPE: + case GGML_OP_VIEW: + case GGML_OP_TRANSPOSE: + case GGML_OP_PERMUTE: + { + // noop -> next node + } continue; + default: + { + } break; + } - if (!ggml_metal_supports_op(ctx, dst)) { - GGML_METAL_LOG_ERROR("%s: error: unsupported op '%s'\n", __func__, ggml_op_desc(dst)); - GGML_ASSERT(!"unsupported op"); - } + if (!ggml_metal_supports_op(ctx, dst)) { + GGML_METAL_LOG_ERROR("%s: error: unsupported op '%s'\n", __func__, ggml_op_desc(dst)); + GGML_ASSERT(!"unsupported op"); + } #ifndef GGML_METAL_NDEBUG - [encoder pushDebugGroup:[NSString stringWithCString:ggml_op_desc(dst) encoding:NSUTF8StringEncoding]]; + [encoder pushDebugGroup:[NSString stringWithCString:ggml_op_desc(dst) encoding:NSUTF8StringEncoding]]; #endif - const int64_t ne00 = src0 ? src0->ne[0] : 0; - const int64_t ne01 = src0 ? src0->ne[1] : 0; - const int64_t ne02 = src0 ? src0->ne[2] : 0; - const int64_t ne03 = src0 ? src0->ne[3] : 0; + const int64_t ne00 = src0 ? src0->ne[0] : 0; + const int64_t ne01 = src0 ? src0->ne[1] : 0; + const int64_t ne02 = src0 ? src0->ne[2] : 0; + const int64_t ne03 = src0 ? src0->ne[3] : 0; - const uint64_t nb00 = src0 ? src0->nb[0] : 0; - const uint64_t nb01 = src0 ? src0->nb[1] : 0; - const uint64_t nb02 = src0 ? src0->nb[2] : 0; - const uint64_t nb03 = src0 ? src0->nb[3] : 0; + const uint64_t nb00 = src0 ? src0->nb[0] : 0; + const uint64_t nb01 = src0 ? src0->nb[1] : 0; + const uint64_t nb02 = src0 ? src0->nb[2] : 0; + const uint64_t nb03 = src0 ? src0->nb[3] : 0; - const int64_t ne10 = src1 ? src1->ne[0] : 0; - const int64_t ne11 = src1 ? src1->ne[1] : 0; - const int64_t ne12 = src1 ? src1->ne[2] : 0; - const int64_t ne13 = src1 ? src1->ne[3] : 0; UNUSED(ne13); + const int64_t ne10 = src1 ? src1->ne[0] : 0; + const int64_t ne11 = src1 ? src1->ne[1] : 0; + const int64_t ne12 = src1 ? src1->ne[2] : 0; + const int64_t ne13 = src1 ? src1->ne[3] : 0; UNUSED(ne13); - const uint64_t nb10 = src1 ? src1->nb[0] : 0; - const uint64_t nb11 = src1 ? src1->nb[1] : 0; - const uint64_t nb12 = src1 ? src1->nb[2] : 0; - const uint64_t nb13 = src1 ? src1->nb[3] : 0; UNUSED(nb13); + const uint64_t nb10 = src1 ? src1->nb[0] : 0; + const uint64_t nb11 = src1 ? src1->nb[1] : 0; + const uint64_t nb12 = src1 ? src1->nb[2] : 0; + const uint64_t nb13 = src1 ? src1->nb[3] : 0; UNUSED(nb13); - const int64_t ne0 = dst ? dst->ne[0] : 0; - const int64_t ne1 = dst ? dst->ne[1] : 0; - const int64_t ne2 = dst ? dst->ne[2] : 0; - const int64_t ne3 = dst ? dst->ne[3] : 0; + const int64_t ne0 = dst ? dst->ne[0] : 0; + const int64_t ne1 = dst ? dst->ne[1] : 0; + const int64_t ne2 = dst ? dst->ne[2] : 0; + const int64_t ne3 = dst ? dst->ne[3] : 0; - const uint64_t nb0 = dst ? dst->nb[0] : 0; - const uint64_t nb1 = dst ? dst->nb[1] : 0; - const uint64_t nb2 = dst ? dst->nb[2] : 0; - const uint64_t nb3 = dst ? dst->nb[3] : 0; + const uint64_t nb0 = dst ? dst->nb[0] : 0; + const uint64_t nb1 = dst ? dst->nb[1] : 0; + const uint64_t nb2 = dst ? dst->nb[2] : 0; + const uint64_t nb3 = dst ? dst->nb[3] : 0; - const enum ggml_type src0t = src0 ? src0->type : GGML_TYPE_COUNT; - const enum ggml_type src1t = src1 ? src1->type : GGML_TYPE_COUNT; - const enum ggml_type dstt = dst ? dst->type : GGML_TYPE_COUNT; + const enum ggml_type src0t = src0 ? src0->type : GGML_TYPE_COUNT; + const enum ggml_type src1t = src1 ? src1->type : GGML_TYPE_COUNT; + const enum ggml_type dstt = dst ? dst->type : GGML_TYPE_COUNT; - id id_src0 = src0 ? ggml_metal_get_buffer(ctx, src0, &offs_src0) : nil; - id id_src1 = src1 ? ggml_metal_get_buffer(ctx, src1, &offs_src1) : nil; - id id_dst = dst ? ggml_metal_get_buffer(ctx, dst, &offs_dst) : nil; + id id_src0 = src0 ? ggml_metal_get_buffer(ctx, src0, &offs_src0) : nil; + id id_src1 = src1 ? ggml_metal_get_buffer(ctx, src1, &offs_src1) : nil; + id id_dst = dst ? ggml_metal_get_buffer(ctx, dst, &offs_dst) : nil; - //GGML_METAL_LOG_INFO("%s: op - %s\n", __func__, ggml_op_name(dst->op)); - //if (src0) { - // GGML_METAL_LOG_INFO("%s: src0 - %4s [%5lld, %5lld, %5lld], %d, %s\n", __func__, ggml_type_name(src0t), ne00, ne01, ne02, - // ggml_is_contiguous(src0), src0->name); - //} - //if (src1) { - // GGML_METAL_LOG_INFO("%s: src1 - %4s [%5lld, %5lld, %5lld], %d, %s\n", __func__, ggml_type_name(src1t), ne10, ne11, ne12, - // ggml_is_contiguous(src1), src1->name); - //} - //if (dst) { - // GGML_METAL_LOG_INFO("%s: dst - %4s [%5lld, %5lld, %5lld], 1, %s\n", __func__, ggml_type_name(dstt), ne0, ne1, ne2, - // dst->name); - //} + //GGML_METAL_LOG_INFO("%s: op - %s\n", __func__, ggml_op_name(dst->op)); + //if (src0) { + // GGML_METAL_LOG_INFO("%s: src0 - %4s [%5lld, %5lld, %5lld], %d, %s\n", __func__, ggml_type_name(src0t), ne00, ne01, ne02, + // ggml_is_contiguous(src0), src0->name); + //} + //if (src1) { + // GGML_METAL_LOG_INFO("%s: src1 - %4s [%5lld, %5lld, %5lld], %d, %s\n", __func__, ggml_type_name(src1t), ne10, ne11, ne12, + // ggml_is_contiguous(src1), src1->name); + //} + //if (dst) { + // GGML_METAL_LOG_INFO("%s: dst - %4s [%5lld, %5lld, %5lld], 1, %s\n", __func__, ggml_type_name(dstt), ne0, ne1, ne2, + // dst->name); + //} - switch (dst->op) { - case GGML_OP_CONCAT: - { - const int64_t nb = ne00; + switch (dst->op) { + case GGML_OP_CONCAT: + { + const int64_t nb = ne00; - id pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_CONCAT].pipeline; + id pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_CONCAT].pipeline; - [encoder setComputePipelineState:pipeline]; - [encoder setBuffer:id_src0 offset:offs_src0 atIndex:0]; - [encoder setBuffer:id_src1 offset:offs_src1 atIndex:1]; - [encoder setBuffer:id_dst offset:offs_dst atIndex:2]; - [encoder setBytes:&ne00 length:sizeof(ne00) atIndex:3]; - [encoder setBytes:&ne01 length:sizeof(ne01) atIndex:4]; - [encoder setBytes:&ne02 length:sizeof(ne02) atIndex:5]; - [encoder setBytes:&ne03 length:sizeof(ne03) atIndex:6]; - [encoder setBytes:&nb00 length:sizeof(nb00) atIndex:7]; - [encoder setBytes:&nb01 length:sizeof(nb01) atIndex:8]; - [encoder setBytes:&nb02 length:sizeof(nb02) atIndex:9]; - [encoder setBytes:&nb03 length:sizeof(nb03) atIndex:10]; - [encoder setBytes:&ne10 length:sizeof(ne10) atIndex:11]; - [encoder setBytes:&ne11 length:sizeof(ne11) atIndex:12]; - [encoder setBytes:&ne12 length:sizeof(ne12) atIndex:13]; - [encoder setBytes:&ne13 length:sizeof(ne13) atIndex:14]; - [encoder setBytes:&nb10 length:sizeof(nb10) atIndex:15]; - [encoder setBytes:&nb11 length:sizeof(nb11) atIndex:16]; - [encoder setBytes:&nb12 length:sizeof(nb12) atIndex:17]; - [encoder setBytes:&nb13 length:sizeof(nb13) atIndex:18]; - [encoder setBytes:&ne0 length:sizeof(ne0) atIndex:19]; - [encoder setBytes:&ne1 length:sizeof(ne1) atIndex:20]; - [encoder setBytes:&ne2 length:sizeof(ne2) atIndex:21]; - [encoder setBytes:&ne3 length:sizeof(ne3) atIndex:22]; - [encoder setBytes:&nb0 length:sizeof(nb0) atIndex:23]; - [encoder setBytes:&nb1 length:sizeof(nb1) atIndex:24]; - [encoder setBytes:&nb2 length:sizeof(nb2) atIndex:25]; - [encoder setBytes:&nb3 length:sizeof(nb3) atIndex:26]; - [encoder setBytes:&nb length:sizeof(nb) atIndex:27]; + [encoder setComputePipelineState:pipeline]; + [encoder setBuffer:id_src0 offset:offs_src0 atIndex:0]; + [encoder setBuffer:id_src1 offset:offs_src1 atIndex:1]; + [encoder setBuffer:id_dst offset:offs_dst atIndex:2]; + [encoder setBytes:&ne00 length:sizeof(ne00) atIndex:3]; + [encoder setBytes:&ne01 length:sizeof(ne01) atIndex:4]; + [encoder setBytes:&ne02 length:sizeof(ne02) atIndex:5]; + [encoder setBytes:&ne03 length:sizeof(ne03) atIndex:6]; + [encoder setBytes:&nb00 length:sizeof(nb00) atIndex:7]; + [encoder setBytes:&nb01 length:sizeof(nb01) atIndex:8]; + [encoder setBytes:&nb02 length:sizeof(nb02) atIndex:9]; + [encoder setBytes:&nb03 length:sizeof(nb03) atIndex:10]; + [encoder setBytes:&ne10 length:sizeof(ne10) atIndex:11]; + [encoder setBytes:&ne11 length:sizeof(ne11) atIndex:12]; + [encoder setBytes:&ne12 length:sizeof(ne12) atIndex:13]; + [encoder setBytes:&ne13 length:sizeof(ne13) atIndex:14]; + [encoder setBytes:&nb10 length:sizeof(nb10) atIndex:15]; + [encoder setBytes:&nb11 length:sizeof(nb11) atIndex:16]; + [encoder setBytes:&nb12 length:sizeof(nb12) atIndex:17]; + [encoder setBytes:&nb13 length:sizeof(nb13) atIndex:18]; + [encoder setBytes:&ne0 length:sizeof(ne0) atIndex:19]; + [encoder setBytes:&ne1 length:sizeof(ne1) atIndex:20]; + [encoder setBytes:&ne2 length:sizeof(ne2) atIndex:21]; + [encoder setBytes:&ne3 length:sizeof(ne3) atIndex:22]; + [encoder setBytes:&nb0 length:sizeof(nb0) atIndex:23]; + [encoder setBytes:&nb1 length:sizeof(nb1) atIndex:24]; + [encoder setBytes:&nb2 length:sizeof(nb2) atIndex:25]; + [encoder setBytes:&nb3 length:sizeof(nb3) atIndex:26]; + [encoder setBytes:&nb length:sizeof(nb) atIndex:27]; - const int nth = MIN(1024, ne0); + const int nth = MIN(1024, ne0); - [encoder dispatchThreadgroups:MTLSizeMake(ne1, ne2, ne3) threadsPerThreadgroup:MTLSizeMake(nth, 1, 1)]; - } break; - case GGML_OP_ADD: - case GGML_OP_MUL: - case GGML_OP_DIV: - { - const size_t offs = 0; + [encoder dispatchThreadgroups:MTLSizeMake(ne1, ne2, ne3) threadsPerThreadgroup:MTLSizeMake(nth, 1, 1)]; + } break; + case GGML_OP_ADD: + case GGML_OP_MUL: + case GGML_OP_DIV: + { + const size_t offs = 0; - bool bcast_row = false; + bool bcast_row = false; - int64_t nb = ne00; + int64_t nb = ne00; - id pipeline = nil; + id pipeline = nil; - if (ggml_nelements(src1) == ne10 && ggml_is_contiguous(src1) && ne00 % 4 == 0 && ne10 % 4 == 0) { - GGML_ASSERT(ggml_is_contiguous(src0)); - - // src1 is a row - GGML_ASSERT(ne11 == 1); - - nb = ne00 / 4; - switch (dst->op) { - case GGML_OP_ADD: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_ADD_ROW].pipeline; break; - case GGML_OP_MUL: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_ROW].pipeline; break; - case GGML_OP_DIV: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_DIV_ROW].pipeline; break; - default: GGML_ASSERT(false); - } - - bcast_row = true; - } else { - switch (dst->op) { - case GGML_OP_ADD: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_ADD].pipeline; break; - case GGML_OP_MUL: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL].pipeline; break; - case GGML_OP_DIV: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_DIV].pipeline; break; - default: GGML_ASSERT(false); - } - } - - [encoder setComputePipelineState:pipeline]; - [encoder setBuffer:id_src0 offset:offs_src0 atIndex:0]; - [encoder setBuffer:id_src1 offset:offs_src1 atIndex:1]; - [encoder setBuffer:id_dst offset:offs_dst atIndex:2]; - [encoder setBytes:&ne00 length:sizeof(ne00) atIndex:3]; - [encoder setBytes:&ne01 length:sizeof(ne01) atIndex:4]; - [encoder setBytes:&ne02 length:sizeof(ne02) atIndex:5]; - [encoder setBytes:&ne03 length:sizeof(ne03) atIndex:6]; - [encoder setBytes:&nb00 length:sizeof(nb00) atIndex:7]; - [encoder setBytes:&nb01 length:sizeof(nb01) atIndex:8]; - [encoder setBytes:&nb02 length:sizeof(nb02) atIndex:9]; - [encoder setBytes:&nb03 length:sizeof(nb03) atIndex:10]; - [encoder setBytes:&ne10 length:sizeof(ne10) atIndex:11]; - [encoder setBytes:&ne11 length:sizeof(ne11) atIndex:12]; - [encoder setBytes:&ne12 length:sizeof(ne12) atIndex:13]; - [encoder setBytes:&ne13 length:sizeof(ne13) atIndex:14]; - [encoder setBytes:&nb10 length:sizeof(nb10) atIndex:15]; - [encoder setBytes:&nb11 length:sizeof(nb11) atIndex:16]; - [encoder setBytes:&nb12 length:sizeof(nb12) atIndex:17]; - [encoder setBytes:&nb13 length:sizeof(nb13) atIndex:18]; - [encoder setBytes:&ne0 length:sizeof(ne0) atIndex:19]; - [encoder setBytes:&ne1 length:sizeof(ne1) atIndex:20]; - [encoder setBytes:&ne2 length:sizeof(ne2) atIndex:21]; - [encoder setBytes:&ne3 length:sizeof(ne3) atIndex:22]; - [encoder setBytes:&nb0 length:sizeof(nb0) atIndex:23]; - [encoder setBytes:&nb1 length:sizeof(nb1) atIndex:24]; - [encoder setBytes:&nb2 length:sizeof(nb2) atIndex:25]; - [encoder setBytes:&nb3 length:sizeof(nb3) atIndex:26]; - [encoder setBytes:&offs length:sizeof(offs) atIndex:27]; - [encoder setBytes:&nb length:sizeof(nb) atIndex:28]; - - if (bcast_row) { - const int64_t n = ggml_nelements(dst)/4; - - [encoder dispatchThreadgroups:MTLSizeMake(n, 1, 1) threadsPerThreadgroup:MTLSizeMake(1, 1, 1)]; - } else { - const int nth = MIN((int) pipeline.maxTotalThreadsPerThreadgroup, ne0); - - [encoder dispatchThreadgroups:MTLSizeMake(ne01, ne02, ne03) threadsPerThreadgroup:MTLSizeMake(nth, 1, 1)]; - } - } break; - case GGML_OP_ACC: - { - GGML_ASSERT(src0t == GGML_TYPE_F32); - GGML_ASSERT(src1t == GGML_TYPE_F32); - GGML_ASSERT(dstt == GGML_TYPE_F32); - - GGML_ASSERT(ggml_is_contiguous(src0)); - GGML_ASSERT(ggml_is_contiguous(src1)); - - const size_t pnb1 = ((int32_t *) dst->op_params)[0]; - const size_t pnb2 = ((int32_t *) dst->op_params)[1]; - const size_t pnb3 = ((int32_t *) dst->op_params)[2]; - const size_t offs = ((int32_t *) dst->op_params)[3]; - - const bool inplace = (bool) ((int32_t *) dst->op_params)[4]; - - if (!inplace) { - // run a separete kernel to cpy src->dst - // not sure how to avoid this - // TODO: make a simpler cpy_bytes kernel - - const id pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_CPY_F32_F32].pipeline; - - [encoder setComputePipelineState:pipeline]; - [encoder setBuffer:id_src0 offset:offs_src0 atIndex:0]; - [encoder setBuffer:id_dst offset:offs_dst atIndex:1]; - [encoder setBytes:&ne00 length:sizeof( int64_t) atIndex:2]; - [encoder setBytes:&ne01 length:sizeof( int64_t) atIndex:3]; - [encoder setBytes:&ne02 length:sizeof( int64_t) atIndex:4]; - [encoder setBytes:&ne03 length:sizeof( int64_t) atIndex:5]; - [encoder setBytes:&nb00 length:sizeof(uint64_t) atIndex:6]; - [encoder setBytes:&nb01 length:sizeof(uint64_t) atIndex:7]; - [encoder setBytes:&nb02 length:sizeof(uint64_t) atIndex:8]; - [encoder setBytes:&nb03 length:sizeof(uint64_t) atIndex:9]; - [encoder setBytes:&ne0 length:sizeof( int64_t) atIndex:10]; - [encoder setBytes:&ne1 length:sizeof( int64_t) atIndex:11]; - [encoder setBytes:&ne2 length:sizeof( int64_t) atIndex:12]; - [encoder setBytes:&ne3 length:sizeof( int64_t) atIndex:13]; - [encoder setBytes:&nb0 length:sizeof(uint64_t) atIndex:14]; - [encoder setBytes:&nb1 length:sizeof(uint64_t) atIndex:15]; - [encoder setBytes:&nb2 length:sizeof(uint64_t) atIndex:16]; - [encoder setBytes:&nb3 length:sizeof(uint64_t) atIndex:17]; - - const int nth = MIN((int) pipeline.maxTotalThreadsPerThreadgroup, ne00); - - [encoder dispatchThreadgroups:MTLSizeMake(ne01, ne02, ne03) threadsPerThreadgroup:MTLSizeMake(nth, 1, 1)]; - } - - const id pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_ADD].pipeline; - - [encoder setComputePipelineState:pipeline]; - [encoder setBuffer:id_src0 offset:offs_src0 atIndex:0]; - [encoder setBuffer:id_src1 offset:offs_src1 atIndex:1]; - [encoder setBuffer:id_dst offset:offs_dst atIndex:2]; - [encoder setBytes:&ne00 length:sizeof(ne00) atIndex:3]; - [encoder setBytes:&ne01 length:sizeof(ne01) atIndex:4]; - [encoder setBytes:&ne02 length:sizeof(ne02) atIndex:5]; - [encoder setBytes:&ne03 length:sizeof(ne03) atIndex:6]; - [encoder setBytes:&nb00 length:sizeof(nb00) atIndex:7]; - [encoder setBytes:&pnb1 length:sizeof(pnb1) atIndex:8]; - [encoder setBytes:&pnb2 length:sizeof(pnb2) atIndex:9]; - [encoder setBytes:&pnb3 length:sizeof(pnb3) atIndex:10]; - [encoder setBytes:&ne10 length:sizeof(ne10) atIndex:11]; - [encoder setBytes:&ne11 length:sizeof(ne11) atIndex:12]; - [encoder setBytes:&ne12 length:sizeof(ne12) atIndex:13]; - [encoder setBytes:&ne13 length:sizeof(ne13) atIndex:14]; - [encoder setBytes:&nb10 length:sizeof(nb10) atIndex:15]; - [encoder setBytes:&nb11 length:sizeof(nb11) atIndex:16]; - [encoder setBytes:&nb12 length:sizeof(nb12) atIndex:17]; - [encoder setBytes:&nb13 length:sizeof(nb13) atIndex:18]; - [encoder setBytes:&ne0 length:sizeof(ne0) atIndex:19]; - [encoder setBytes:&ne1 length:sizeof(ne1) atIndex:20]; - [encoder setBytes:&ne2 length:sizeof(ne2) atIndex:21]; - [encoder setBytes:&ne3 length:sizeof(ne3) atIndex:22]; - [encoder setBytes:&nb0 length:sizeof(nb0) atIndex:23]; - [encoder setBytes:&pnb1 length:sizeof(pnb1) atIndex:24]; - [encoder setBytes:&pnb2 length:sizeof(pnb2) atIndex:25]; - [encoder setBytes:&pnb3 length:sizeof(pnb3) atIndex:26]; - [encoder setBytes:&offs length:sizeof(offs) atIndex:27]; - - const int nth = MIN((int) pipeline.maxTotalThreadsPerThreadgroup, ne00); - - [encoder dispatchThreadgroups:MTLSizeMake(ne11, ne12, ne13) threadsPerThreadgroup:MTLSizeMake(nth, 1, 1)]; - } break; - case GGML_OP_SCALE: - { + if (ggml_nelements(src1) == ne10 && ggml_is_contiguous(src1) && ne00 % 4 == 0 && ne10 % 4 == 0) { GGML_ASSERT(ggml_is_contiguous(src0)); - const float scale = *(const float *) dst->op_params; + // src1 is a row + GGML_ASSERT(ne11 == 1); - int64_t n = ggml_nelements(dst); - - id pipeline = nil; - - if (n % 4 == 0) { - n /= 4; - pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_SCALE_4].pipeline; - } else { - pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_SCALE].pipeline; + nb = ne00 / 4; + switch (dst->op) { + case GGML_OP_ADD: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_ADD_ROW].pipeline; break; + case GGML_OP_MUL: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_ROW].pipeline; break; + case GGML_OP_DIV: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_DIV_ROW].pipeline; break; + default: GGML_ASSERT(false); } - [encoder setComputePipelineState:pipeline]; - [encoder setBuffer:id_src0 offset:offs_src0 atIndex:0]; - [encoder setBuffer:id_dst offset:offs_dst atIndex:1]; - [encoder setBytes:&scale length:sizeof(scale) atIndex:2]; + bcast_row = true; + } else { + switch (dst->op) { + case GGML_OP_ADD: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_ADD].pipeline; break; + case GGML_OP_MUL: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL].pipeline; break; + case GGML_OP_DIV: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_DIV].pipeline; break; + default: GGML_ASSERT(false); + } + } + + [encoder setComputePipelineState:pipeline]; + [encoder setBuffer:id_src0 offset:offs_src0 atIndex:0]; + [encoder setBuffer:id_src1 offset:offs_src1 atIndex:1]; + [encoder setBuffer:id_dst offset:offs_dst atIndex:2]; + [encoder setBytes:&ne00 length:sizeof(ne00) atIndex:3]; + [encoder setBytes:&ne01 length:sizeof(ne01) atIndex:4]; + [encoder setBytes:&ne02 length:sizeof(ne02) atIndex:5]; + [encoder setBytes:&ne03 length:sizeof(ne03) atIndex:6]; + [encoder setBytes:&nb00 length:sizeof(nb00) atIndex:7]; + [encoder setBytes:&nb01 length:sizeof(nb01) atIndex:8]; + [encoder setBytes:&nb02 length:sizeof(nb02) atIndex:9]; + [encoder setBytes:&nb03 length:sizeof(nb03) atIndex:10]; + [encoder setBytes:&ne10 length:sizeof(ne10) atIndex:11]; + [encoder setBytes:&ne11 length:sizeof(ne11) atIndex:12]; + [encoder setBytes:&ne12 length:sizeof(ne12) atIndex:13]; + [encoder setBytes:&ne13 length:sizeof(ne13) atIndex:14]; + [encoder setBytes:&nb10 length:sizeof(nb10) atIndex:15]; + [encoder setBytes:&nb11 length:sizeof(nb11) atIndex:16]; + [encoder setBytes:&nb12 length:sizeof(nb12) atIndex:17]; + [encoder setBytes:&nb13 length:sizeof(nb13) atIndex:18]; + [encoder setBytes:&ne0 length:sizeof(ne0) atIndex:19]; + [encoder setBytes:&ne1 length:sizeof(ne1) atIndex:20]; + [encoder setBytes:&ne2 length:sizeof(ne2) atIndex:21]; + [encoder setBytes:&ne3 length:sizeof(ne3) atIndex:22]; + [encoder setBytes:&nb0 length:sizeof(nb0) atIndex:23]; + [encoder setBytes:&nb1 length:sizeof(nb1) atIndex:24]; + [encoder setBytes:&nb2 length:sizeof(nb2) atIndex:25]; + [encoder setBytes:&nb3 length:sizeof(nb3) atIndex:26]; + [encoder setBytes:&offs length:sizeof(offs) atIndex:27]; + [encoder setBytes:&nb length:sizeof(nb) atIndex:28]; + + if (bcast_row) { + const int64_t n = ggml_nelements(dst)/4; [encoder dispatchThreadgroups:MTLSizeMake(n, 1, 1) threadsPerThreadgroup:MTLSizeMake(1, 1, 1)]; - } break; - case GGML_OP_UNARY: - switch (ggml_get_unary_op(gf->nodes[i])) { - case GGML_UNARY_OP_TANH: - { - id pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_TANH].pipeline; - - [encoder setComputePipelineState:pipeline]; - [encoder setBuffer:id_src0 offset:offs_src0 atIndex:0]; - [encoder setBuffer:id_dst offset:offs_dst atIndex:1]; - - const int64_t n = ggml_nelements(dst); - - [encoder dispatchThreadgroups:MTLSizeMake(n, 1, 1) threadsPerThreadgroup:MTLSizeMake(1, 1, 1)]; - } break; - case GGML_UNARY_OP_RELU: - { - id pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_RELU].pipeline; - - [encoder setComputePipelineState:pipeline]; - [encoder setBuffer:id_src0 offset:offs_src0 atIndex:0]; - [encoder setBuffer:id_dst offset:offs_dst atIndex:1]; - - const int64_t n = ggml_nelements(dst); - - [encoder dispatchThreadgroups:MTLSizeMake(n, 1, 1) threadsPerThreadgroup:MTLSizeMake(1, 1, 1)]; - } break; - case GGML_UNARY_OP_GELU: - { - id pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_GELU].pipeline; - - [encoder setComputePipelineState:pipeline]; - [encoder setBuffer:id_src0 offset:offs_src0 atIndex:0]; - [encoder setBuffer:id_dst offset:offs_dst atIndex:1]; - - const int64_t n = ggml_nelements(dst); - GGML_ASSERT(n % 4 == 0); - - [encoder dispatchThreadgroups:MTLSizeMake(n/4, 1, 1) threadsPerThreadgroup:MTLSizeMake(1, 1, 1)]; - } break; - case GGML_UNARY_OP_GELU_QUICK: - { - id pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_GELU_QUICK].pipeline; - - [encoder setComputePipelineState:pipeline]; - [encoder setBuffer:id_src0 offset:offs_src0 atIndex:0]; - [encoder setBuffer:id_dst offset:offs_dst atIndex:1]; - - const int64_t n = ggml_nelements(dst); - GGML_ASSERT(n % 4 == 0); - - [encoder dispatchThreadgroups:MTLSizeMake(n/4, 1, 1) threadsPerThreadgroup:MTLSizeMake(1, 1, 1)]; - } break; - case GGML_UNARY_OP_SILU: - { - id pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_SILU].pipeline; - - [encoder setComputePipelineState:pipeline]; - [encoder setBuffer:id_src0 offset:offs_src0 atIndex:0]; - [encoder setBuffer:id_dst offset:offs_dst atIndex:1]; - - const int64_t n = ggml_nelements(dst); - GGML_ASSERT(n % 4 == 0); - - [encoder dispatchThreadgroups:MTLSizeMake(n/4, 1, 1) threadsPerThreadgroup:MTLSizeMake(1, 1, 1)]; - } break; - default: - { - GGML_METAL_LOG_WARN("%s: node %3d, op = %8s not implemented\n", __func__, i, ggml_op_name(dst->op)); - GGML_ASSERT(false); - } - } break; - case GGML_OP_SQR: - { - GGML_ASSERT(ggml_is_contiguous(src0)); - - id pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_SQR].pipeline; - - [encoder setComputePipelineState:pipeline]; - [encoder setBuffer:id_src0 offset:offs_src0 atIndex:0]; - [encoder setBuffer:id_dst offset:offs_dst atIndex:1]; - - const int64_t n = ggml_nelements(dst); - - [encoder dispatchThreadgroups:MTLSizeMake(n, 1, 1) threadsPerThreadgroup:MTLSizeMake(1, 1, 1)]; - } break; - case GGML_OP_SUM_ROWS: - { - GGML_ASSERT(src0->nb[0] == ggml_type_size(src0->type)); - - id pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_SUM_ROWS].pipeline; - - [encoder setComputePipelineState:pipeline]; - [encoder setBuffer:id_src0 offset:offs_src0 atIndex:0]; - [encoder setBuffer:id_dst offset:offs_dst atIndex:1]; - [encoder setBytes:&ne00 length:sizeof(ne00) atIndex:2]; - [encoder setBytes:&ne01 length:sizeof(ne01) atIndex:3]; - [encoder setBytes:&ne02 length:sizeof(ne02) atIndex:4]; - [encoder setBytes:&ne03 length:sizeof(ne03) atIndex:5]; - [encoder setBytes:&nb00 length:sizeof(nb00) atIndex:6]; - [encoder setBytes:&nb01 length:sizeof(nb01) atIndex:7]; - [encoder setBytes:&nb02 length:sizeof(nb02) atIndex:8]; - [encoder setBytes:&nb03 length:sizeof(nb03) atIndex:9]; - [encoder setBytes:&ne10 length:sizeof(ne10) atIndex:10]; - [encoder setBytes:&ne11 length:sizeof(ne11) atIndex:11]; - [encoder setBytes:&ne12 length:sizeof(ne12) atIndex:12]; - [encoder setBytes:&ne13 length:sizeof(ne13) atIndex:13]; - [encoder setBytes:&nb10 length:sizeof(nb10) atIndex:14]; - [encoder setBytes:&nb11 length:sizeof(nb11) atIndex:15]; - [encoder setBytes:&nb12 length:sizeof(nb12) atIndex:16]; - [encoder setBytes:&nb13 length:sizeof(nb13) atIndex:17]; - [encoder setBytes:&ne0 length:sizeof(ne0) atIndex:18]; - [encoder setBytes:&ne1 length:sizeof(ne1) atIndex:19]; - [encoder setBytes:&ne2 length:sizeof(ne2) atIndex:20]; - [encoder setBytes:&ne3 length:sizeof(ne3) atIndex:21]; - [encoder setBytes:&nb0 length:sizeof(nb0) atIndex:22]; - [encoder setBytes:&nb1 length:sizeof(nb1) atIndex:23]; - [encoder setBytes:&nb2 length:sizeof(nb2) atIndex:24]; - [encoder setBytes:&nb3 length:sizeof(nb3) atIndex:25]; - - [encoder dispatchThreadgroups:MTLSizeMake(ne01, ne02, ne03) threadsPerThreadgroup:MTLSizeMake(1, 1, 1)]; - } break; - case GGML_OP_SOFT_MAX: - { - int nth = 32; // SIMD width - - id pipeline = nil; - - if (ne00%4 == 0) { - while (nth < ne00/4 && nth < 256) { - nth *= 2; - } - pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_SOFT_MAX_4].pipeline; - } else { - while (nth < ne00 && nth < 1024) { - nth *= 2; - } - pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_SOFT_MAX].pipeline; - } - - const float scale = ((float *) dst->op_params)[0]; - - [encoder setComputePipelineState:pipeline]; - [encoder setBuffer:id_src0 offset:offs_src0 atIndex:0]; - if (id_src1) { - [encoder setBuffer:id_src1 offset:offs_src1 atIndex:1]; - } else { - [encoder setBuffer:id_src0 offset:offs_src0 atIndex:1]; - } - [encoder setBuffer:id_dst offset:offs_dst atIndex:2]; - [encoder setBytes:&ne00 length:sizeof(ne00) atIndex:3]; - [encoder setBytes:&ne01 length:sizeof(ne01) atIndex:4]; - [encoder setBytes:&ne02 length:sizeof(ne02) atIndex:5]; - [encoder setBytes:&scale length:sizeof(scale) atIndex:6]; - [encoder setThreadgroupMemoryLength:32*sizeof(float) atIndex:0]; - - [encoder dispatchThreadgroups:MTLSizeMake(ne01*ne02*ne03, 1, 1) threadsPerThreadgroup:MTLSizeMake(nth, 1, 1)]; - } break; - case GGML_OP_DIAG_MASK_INF: - { - const int n_past = ((int32_t *)(dst->op_params))[0]; - - id pipeline = nil; - - if (ne00%8 == 0) { - pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_DIAG_MASK_INF_8].pipeline; - } else { - pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_DIAG_MASK_INF].pipeline; - } - - [encoder setComputePipelineState:pipeline]; - [encoder setBuffer:id_src0 offset:offs_src0 atIndex:0]; - [encoder setBuffer:id_dst offset:offs_dst atIndex:1]; - [encoder setBytes:&ne00 length:sizeof(ne00) atIndex:2]; - [encoder setBytes:&ne01 length:sizeof(ne01) atIndex:3]; - [encoder setBytes:&n_past length:sizeof(int) atIndex:4]; - - if (ne00%8 == 0) { - [encoder dispatchThreadgroups:MTLSizeMake(ne00*ne01*ne02/8, 1, 1) threadsPerThreadgroup:MTLSizeMake(1, 1, 1)]; - } - else { - [encoder dispatchThreadgroups:MTLSizeMake(ne00, ne01, ne02) threadsPerThreadgroup:MTLSizeMake(1, 1, 1)]; - } - } break; - case GGML_OP_MUL_MAT: - { - GGML_ASSERT(ne00 == ne10); - - // TODO: assert that dim2 and dim3 are contiguous - GGML_ASSERT(ne12 % ne02 == 0); - GGML_ASSERT(ne13 % ne03 == 0); - - const uint r2 = ne12/ne02; - const uint r3 = ne13/ne03; - - // find the break-even point where the matrix-matrix kernel becomes more efficient compared - // to the matrix-vector kernel - int ne11_mm_min = 1; - -#if 0 - // the numbers below are measured on M2 Ultra for 7B and 13B models - // these numbers do not translate to other devices or model sizes - // TODO: need to find a better approach - if ([ctx->device.name isEqualToString:@"Apple M2 Ultra"]) { - switch (src0t) { - case GGML_TYPE_F16: ne11_mm_min = 2; break; - case GGML_TYPE_Q8_0: ne11_mm_min = 7; break; - case GGML_TYPE_Q2_K: ne11_mm_min = 15; break; - case GGML_TYPE_Q3_K: ne11_mm_min = 7; break; - case GGML_TYPE_Q4_0: - case GGML_TYPE_Q4_1: ne11_mm_min = 15; break; - case GGML_TYPE_Q4_K: ne11_mm_min = 11; break; - case GGML_TYPE_Q5_0: // not tested yet - case GGML_TYPE_Q5_1: ne11_mm_min = 13; break; // not tested yet - case GGML_TYPE_Q5_K: ne11_mm_min = 7; break; - case GGML_TYPE_Q6_K: ne11_mm_min = 7; break; - default: ne11_mm_min = 1; break; - } - } -#endif - - // for now the matrix-matrix multiplication kernel only works on A14+/M1+ SoCs - // AMD GPU and older A-chips will reuse matrix-vector multiplication kernel - if ([ctx->device supportsFamily:MTLGPUFamilyApple7] && - !ggml_is_transposed(src0) && - !ggml_is_transposed(src1) && - src1t == GGML_TYPE_F32 && - ne00 % 32 == 0 && ne00 >= 64 && - (ne11 > ne11_mm_min || (ggml_is_quantized(src0t) && ne12 > 1))) { - //printf("matrix: ne00 = %6d, ne01 = %6d, ne02 = %6d, ne11 = %6d, ne12 = %6d\n", ne00, ne01, ne02, ne11, ne12); - - id pipeline = nil; - - switch (src0->type) { - case GGML_TYPE_F32: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MM_F32_F32 ].pipeline; break; - case GGML_TYPE_F16: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MM_F16_F32 ].pipeline; break; - case GGML_TYPE_Q4_0: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MM_Q4_0_F32 ].pipeline; break; - case GGML_TYPE_Q4_1: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MM_Q4_1_F32 ].pipeline; break; - case GGML_TYPE_Q5_0: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MM_Q5_0_F32 ].pipeline; break; - case GGML_TYPE_Q5_1: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MM_Q5_1_F32 ].pipeline; break; - case GGML_TYPE_Q8_0: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MM_Q8_0_F32 ].pipeline; break; - case GGML_TYPE_Q2_K: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MM_Q2_K_F32 ].pipeline; break; - case GGML_TYPE_Q3_K: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MM_Q3_K_F32 ].pipeline; break; - case GGML_TYPE_Q4_K: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MM_Q4_K_F32 ].pipeline; break; - case GGML_TYPE_Q5_K: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MM_Q5_K_F32 ].pipeline; break; - case GGML_TYPE_Q6_K: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MM_Q6_K_F32 ].pipeline; break; - case GGML_TYPE_IQ2_XXS: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MM_IQ2_XXS_F32].pipeline; break; - case GGML_TYPE_IQ2_XS: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MM_IQ2_XS_F32 ].pipeline; break; - default: GGML_ASSERT(false && "MUL MAT-MAT not implemented"); - } - - [encoder setComputePipelineState:pipeline]; - [encoder setBuffer:id_src0 offset:offs_src0 atIndex:0]; - [encoder setBuffer:id_src1 offset:offs_src1 atIndex:1]; - [encoder setBuffer:id_dst offset:offs_dst atIndex:2]; - [encoder setBytes:&ne00 length:sizeof(ne00) atIndex:3]; - [encoder setBytes:&ne02 length:sizeof(ne02) atIndex:4]; - [encoder setBytes:&nb01 length:sizeof(nb01) atIndex:5]; - [encoder setBytes:&nb02 length:sizeof(nb02) atIndex:6]; - [encoder setBytes:&ne12 length:sizeof(ne12) atIndex:7]; - [encoder setBytes:&nb10 length:sizeof(nb10) atIndex:8]; - [encoder setBytes:&nb11 length:sizeof(nb11) atIndex:9]; - [encoder setBytes:&nb12 length:sizeof(nb12) atIndex:10]; - [encoder setBytes:&ne0 length:sizeof(ne0) atIndex:11]; - [encoder setBytes:&ne1 length:sizeof(ne1) atIndex:12]; - [encoder setBytes:&r2 length:sizeof(r2) atIndex:13]; - [encoder setBytes:&r3 length:sizeof(r3) atIndex:14]; - [encoder setThreadgroupMemoryLength:8192 atIndex:0]; - [encoder dispatchThreadgroups:MTLSizeMake( (ne11 + 31)/32, (ne01 + 63)/64, ne12*ne13) threadsPerThreadgroup:MTLSizeMake(128, 1, 1)]; - } else { - int nth0 = 32; - int nth1 = 1; - int nrows = 1; - //printf("vector: ne00 = %6d, ne01 = %6d, ne02 = %6d, ne11 = %6d, ne12 = %6d\n", ne00, ne01, ne02, ne11, ne12); - - id pipeline = nil; - - // use custom matrix x vector kernel - switch (src0t) { - case GGML_TYPE_F32: - { - GGML_ASSERT(src1t == GGML_TYPE_F32); - pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MV_F32_F32].pipeline; - nrows = 4; - } break; - case GGML_TYPE_F16: - { - nth0 = 32; - nth1 = 1; - if (src1t == GGML_TYPE_F32) { - if (ne11 * ne12 < 4) { - pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MV_F16_F32_1ROW].pipeline; - } else if (ne00 >= 128 && ne01 >= 8 && ne00%4 == 0) { - pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MV_F16_F32_L4].pipeline; - nrows = ne11; - } else { - pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MV_F16_F32].pipeline; - nrows = 4; - } - } else { - pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MV_F16_F16].pipeline; - nrows = 4; - } - } break; - case GGML_TYPE_Q4_0: - { - nth0 = 8; - nth1 = 8; - pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MV_Q4_0_F32].pipeline; - } break; - case GGML_TYPE_Q4_1: - { - nth0 = 8; - nth1 = 8; - pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MV_Q4_1_F32].pipeline; - } break; - case GGML_TYPE_Q5_0: - { - nth0 = 8; - nth1 = 8; - pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MV_Q5_0_F32].pipeline; - } break; - case GGML_TYPE_Q5_1: - { - nth0 = 8; - nth1 = 8; - pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MV_Q5_1_F32].pipeline; - } break; - case GGML_TYPE_Q8_0: - { - nth0 = 8; - nth1 = 8; - pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MV_Q8_0_F32].pipeline; - } break; - case GGML_TYPE_Q2_K: - { - nth0 = 2; - nth1 = 32; - pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MV_Q2_K_F32].pipeline; - } break; - case GGML_TYPE_Q3_K: - { - nth0 = 2; - nth1 = 32; - pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MV_Q3_K_F32].pipeline; - } break; - case GGML_TYPE_Q4_K: - { - nth0 = 4; //1; - nth1 = 8; //32; - pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MV_Q4_K_F32].pipeline; - } break; - case GGML_TYPE_Q5_K: - { - nth0 = 2; - nth1 = 32; - pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MV_Q5_K_F32].pipeline; - } break; - case GGML_TYPE_Q6_K: - { - nth0 = 2; - nth1 = 32; - pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MV_Q6_K_F32].pipeline; - } break; - case GGML_TYPE_IQ2_XXS: - { - nth0 = 4; - nth1 = 16; - pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MV_IQ2_XXS_F32].pipeline; - } break; - case GGML_TYPE_IQ2_XS: - { - nth0 = 4; - nth1 = 16; - pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MV_IQ2_XS_F32].pipeline; - } break; - default: - { - GGML_METAL_LOG_ERROR("Asserting on type %d\n", (int)src0t); - GGML_ASSERT(false && "not implemented"); - } - }; - - if (ggml_is_quantized(src0t)) { - GGML_ASSERT(ne00 >= nth0*nth1); - } - - [encoder setComputePipelineState:pipeline]; - [encoder setBuffer:id_src0 offset:offs_src0 atIndex:0]; - [encoder setBuffer:id_src1 offset:offs_src1 atIndex:1]; - [encoder setBuffer:id_dst offset:offs_dst atIndex:2]; - [encoder setBytes:&ne00 length:sizeof(ne00) atIndex:3]; - [encoder setBytes:&ne01 length:sizeof(ne01) atIndex:4]; - [encoder setBytes:&ne02 length:sizeof(ne02) atIndex:5]; - [encoder setBytes:&nb00 length:sizeof(nb00) atIndex:6]; - [encoder setBytes:&nb01 length:sizeof(nb01) atIndex:7]; - [encoder setBytes:&nb02 length:sizeof(nb02) atIndex:8]; - [encoder setBytes:&ne10 length:sizeof(ne10) atIndex:9]; - [encoder setBytes:&ne11 length:sizeof(ne11) atIndex:10]; - [encoder setBytes:&ne12 length:sizeof(ne12) atIndex:11]; - [encoder setBytes:&nb10 length:sizeof(nb10) atIndex:12]; - [encoder setBytes:&nb11 length:sizeof(nb11) atIndex:13]; - [encoder setBytes:&nb12 length:sizeof(nb12) atIndex:14]; - [encoder setBytes:&ne0 length:sizeof(ne0) atIndex:15]; - [encoder setBytes:&ne1 length:sizeof(ne1) atIndex:16]; - [encoder setBytes:&r2 length:sizeof(r2) atIndex:17]; - [encoder setBytes:&r3 length:sizeof(r3) atIndex:18]; - - if (src0t == GGML_TYPE_Q4_0 || src0t == GGML_TYPE_Q4_1 || - src0t == GGML_TYPE_Q5_0 || src0t == GGML_TYPE_Q5_1 || src0t == GGML_TYPE_Q8_0 || - src0t == GGML_TYPE_Q2_K) { // || src0t == GGML_TYPE_Q4_K) { - [encoder dispatchThreadgroups:MTLSizeMake((ne01 + 7)/8, ne11, ne12*ne13) threadsPerThreadgroup:MTLSizeMake(nth0, nth1, 1)]; - } - else if (src0t == GGML_TYPE_IQ2_XXS || src0t == GGML_TYPE_IQ2_XS) { - const int mem_size = src0t == GGML_TYPE_IQ2_XXS ? 256*8+128 : 512*8+128; - [encoder setThreadgroupMemoryLength:mem_size atIndex:0]; - [encoder dispatchThreadgroups:MTLSizeMake((ne01 + 7)/8, ne11, ne12*ne13) threadsPerThreadgroup:MTLSizeMake(nth0, nth1, 1)]; - } - else if (src0t == GGML_TYPE_Q4_K) { - [encoder dispatchThreadgroups:MTLSizeMake((ne01 + 3)/4, ne11, ne12*ne13) threadsPerThreadgroup:MTLSizeMake(nth0, nth1, 1)]; - } - else if (src0t == GGML_TYPE_Q3_K) { -#ifdef GGML_QKK_64 - [encoder dispatchThreadgroups:MTLSizeMake((ne01 + 1)/2, ne11, ne12*ne13) threadsPerThreadgroup:MTLSizeMake(nth0, nth1, 1)]; -#else - [encoder dispatchThreadgroups:MTLSizeMake((ne01 + 3)/4, ne11, ne12*ne13) threadsPerThreadgroup:MTLSizeMake(nth0, nth1, 1)]; -#endif - } - else if (src0t == GGML_TYPE_Q5_K) { - [encoder dispatchThreadgroups:MTLSizeMake((ne01 + 3)/4, ne11, ne12*ne13) threadsPerThreadgroup:MTLSizeMake(nth0, nth1, 1)]; - } - else if (src0t == GGML_TYPE_Q6_K) { - [encoder dispatchThreadgroups:MTLSizeMake((ne01 + 1)/2, ne11, ne12*ne13) threadsPerThreadgroup:MTLSizeMake(nth0, nth1, 1)]; - } else { - const int64_t ny = (ne11 + nrows - 1)/nrows; - [encoder dispatchThreadgroups:MTLSizeMake(ne01, ny, ne12*ne13) threadsPerThreadgroup:MTLSizeMake(nth0, nth1, 1)]; - } - } - } break; - case GGML_OP_MUL_MAT_ID: - { - //GGML_ASSERT(ne00 == ne10); - //GGML_ASSERT(ne03 == ne13); - - GGML_ASSERT(src0t == GGML_TYPE_I32); - - const int n_as = ((int32_t *) dst->op_params)[1]; - - // TODO: make this more general - GGML_ASSERT(n_as <= 8); - - // max size of the src1ids array in the kernel stack - GGML_ASSERT(ne11 <= 512); - - struct ggml_tensor * src2 = gf->nodes[i]->src[2]; - - const int64_t ne20 = src2 ? src2->ne[0] : 0; - const int64_t ne21 = src2 ? src2->ne[1] : 0; - const int64_t ne22 = src2 ? src2->ne[2] : 0; - const int64_t ne23 = src2 ? src2->ne[3] : 0; GGML_UNUSED(ne23); - - const uint64_t nb20 = src2 ? src2->nb[0] : 0; GGML_UNUSED(nb20); - const uint64_t nb21 = src2 ? src2->nb[1] : 0; - const uint64_t nb22 = src2 ? src2->nb[2] : 0; - const uint64_t nb23 = src2 ? src2->nb[3] : 0; GGML_UNUSED(nb23); - - const enum ggml_type src2t = src2 ? src2->type : GGML_TYPE_COUNT; GGML_UNUSED(src2t); - - GGML_ASSERT(!ggml_is_transposed(src2)); - GGML_ASSERT(!ggml_is_transposed(src1)); - - GGML_ASSERT(src1t == GGML_TYPE_F32); - - const uint r2 = ne12/ne22; - const uint r3 = ne13/ne23; - - // find the break-even point where the matrix-matrix kernel becomes more efficient compared - // to the matrix-vector kernel - int ne11_mm_min = n_as; - - const int idx = ((int32_t *) dst->op_params)[0]; - - // batch size - GGML_ASSERT(ne01 == ne11); - - // for now the matrix-matrix multiplication kernel only works on A14+/M1+ SoCs - // AMD GPU and older A-chips will reuse matrix-vector multiplication kernel - // !!! - // TODO: for now, always use mat-vec kernels until we figure out how to improve the - // indirect matrix multiplication - // !!! - if ([ctx->device supportsFamily:MTLGPUFamilyApple7] && - ne20 % 32 == 0 && ne20 >= 64 && - ne11 > ne11_mm_min) { - - id pipeline = nil; - - switch (src2->type) { - case GGML_TYPE_F32: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MM_ID_F32_F32 ].pipeline; break; - case GGML_TYPE_F16: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MM_ID_F16_F32 ].pipeline; break; - case GGML_TYPE_Q4_0: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MM_ID_Q4_0_F32 ].pipeline; break; - case GGML_TYPE_Q4_1: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MM_ID_Q4_1_F32 ].pipeline; break; - case GGML_TYPE_Q5_0: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MM_ID_Q5_0_F32 ].pipeline; break; - case GGML_TYPE_Q5_1: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MM_ID_Q5_1_F32 ].pipeline; break; - case GGML_TYPE_Q8_0: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MM_ID_Q8_0_F32 ].pipeline; break; - case GGML_TYPE_Q2_K: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MM_ID_Q2_K_F32 ].pipeline; break; - case GGML_TYPE_Q3_K: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MM_ID_Q3_K_F32 ].pipeline; break; - case GGML_TYPE_Q4_K: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MM_ID_Q4_K_F32 ].pipeline; break; - case GGML_TYPE_Q5_K: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MM_ID_Q5_K_F32 ].pipeline; break; - case GGML_TYPE_Q6_K: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MM_ID_Q6_K_F32 ].pipeline; break; - case GGML_TYPE_IQ2_XXS: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MM_ID_IQ2_XXS_F32].pipeline; break; - case GGML_TYPE_IQ2_XS: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MM_ID_IQ2_XS_F32 ].pipeline; break; - default: GGML_ASSERT(false && "MUL_MAT_ID not implemented"); - } - - [encoder setComputePipelineState:pipeline]; - [encoder setBuffer:id_src0 offset:offs_src0 atIndex:0]; - [encoder setBuffer:id_src1 offset:offs_src1 atIndex:1]; - [encoder setBuffer:id_dst offset:offs_dst atIndex:2]; - [encoder setBytes:&nb01 length:sizeof(nb01) atIndex:3]; - [encoder setBytes:&ne20 length:sizeof(ne20) atIndex:4]; - [encoder setBytes:&ne22 length:sizeof(ne22) atIndex:5]; - [encoder setBytes:&nb21 length:sizeof(nb21) atIndex:6]; - [encoder setBytes:&nb22 length:sizeof(nb22) atIndex:7]; - [encoder setBytes:&ne12 length:sizeof(ne12) atIndex:8]; - [encoder setBytes:&ne13 length:sizeof(ne13) atIndex:9]; - [encoder setBytes:&nb10 length:sizeof(nb10) atIndex:10]; - [encoder setBytes:&nb11 length:sizeof(nb11) atIndex:11]; - [encoder setBytes:&nb12 length:sizeof(nb12) atIndex:12]; - [encoder setBytes:&ne0 length:sizeof(ne0) atIndex:13]; - [encoder setBytes:&ne1 length:sizeof(ne1) atIndex:14]; - [encoder setBytes:&nb1 length:sizeof(nb1) atIndex:15]; - [encoder setBytes:&r2 length:sizeof(r2) atIndex:16]; - [encoder setBytes:&r3 length:sizeof(r3) atIndex:17]; - [encoder setBytes:&idx length:sizeof(idx) atIndex:18]; - // TODO: how to make this an array? read Metal docs - for (int j = 0; j < 8; ++j) { - // NOTE: this is done like this to avoid uninitialized kernel arguments when n_as < 8 - struct ggml_tensor * src_cur = dst->src[2 + (j % n_as)]; - - size_t offs_src_cur = 0; - id id_src_cur = ggml_metal_get_buffer(ctx, src_cur, &offs_src_cur); - - [encoder setBuffer:id_src_cur offset:offs_src_cur atIndex:19 + j]; - } - - [encoder setThreadgroupMemoryLength:8192 atIndex:0]; - - [encoder dispatchThreadgroups:MTLSizeMake((ne11 + 31)/32, (ne21 + 63)/64, n_as*ne12*ne13) threadsPerThreadgroup:MTLSizeMake(128, 1, 1)]; - } else { - int nth0 = 32; - int nth1 = 1; - int nrows = 1; - //printf("vector: ne00 = %6d, ne01 = %6d, ne02 = %6d, ne11 = %6d, ne12 = %6d\n", ne00, ne01, ne02, ne11, ne12); - - id pipeline = nil; - - // use custom matrix x vector kernel - switch (src2t) { - case GGML_TYPE_F32: - { - GGML_ASSERT(src1t == GGML_TYPE_F32); - pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MV_ID_F32_F32].pipeline; - } break; - case GGML_TYPE_F16: - { - GGML_ASSERT(src1t == GGML_TYPE_F32); - nth0 = 32; - nth1 = 1; - pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MV_ID_F16_F32].pipeline; - } break; - case GGML_TYPE_Q4_0: - { - nth0 = 8; - nth1 = 8; - pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MV_ID_Q4_0_F32].pipeline; - } break; - case GGML_TYPE_Q4_1: - { - nth0 = 8; - nth1 = 8; - pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MV_ID_Q4_1_F32].pipeline; - } break; - case GGML_TYPE_Q5_0: - { - nth0 = 8; - nth1 = 8; - pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MV_ID_Q5_0_F32].pipeline; - } break; - case GGML_TYPE_Q5_1: - { - nth0 = 8; - nth1 = 8; - pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MV_ID_Q5_1_F32].pipeline; - } break; - case GGML_TYPE_Q8_0: - { - nth0 = 8; - nth1 = 8; - pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MV_ID_Q8_0_F32].pipeline; - } break; - case GGML_TYPE_Q2_K: - { - nth0 = 2; - nth1 = 32; - pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MV_ID_Q2_K_F32].pipeline; - } break; - case GGML_TYPE_Q3_K: - { - nth0 = 2; - nth1 = 32; - pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MV_ID_Q3_K_F32].pipeline; - } break; - case GGML_TYPE_Q4_K: - { - nth0 = 4; //1; - nth1 = 8; //32; - pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MV_ID_Q4_K_F32].pipeline; - } break; - case GGML_TYPE_Q5_K: - { - nth0 = 2; - nth1 = 32; - pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MV_ID_Q5_K_F32].pipeline; - } break; - case GGML_TYPE_Q6_K: - { - nth0 = 2; - nth1 = 32; - pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MV_ID_Q6_K_F32].pipeline; - } break; - case GGML_TYPE_IQ2_XXS: - { - nth0 = 4; - nth1 = 16; - pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MV_ID_IQ2_XXS_F32].pipeline; - } break; - case GGML_TYPE_IQ2_XS: - { - nth0 = 4; - nth1 = 16; - pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MV_ID_IQ2_XS_F32].pipeline; - } break; - default: - { - GGML_METAL_LOG_ERROR("Asserting on type %d\n", (int)src2t); - GGML_ASSERT(false && "not implemented"); - } - }; - - if (ggml_is_quantized(src2t)) { - GGML_ASSERT(ne20 >= nth0*nth1); - } - - const int64_t _ne1 = 1; // kernels needs a reference in constant memory - - [encoder setComputePipelineState:pipeline]; - [encoder setBuffer:id_src0 offset:offs_src0 atIndex:0]; - [encoder setBuffer:id_src1 offset:offs_src1 atIndex:1]; - [encoder setBuffer:id_dst offset:offs_dst atIndex:2]; - [encoder setBytes:&nb01 length:sizeof(nb01) atIndex:3]; - [encoder setBytes:&ne20 length:sizeof(ne20) atIndex:4]; - [encoder setBytes:&ne21 length:sizeof(ne21) atIndex:5]; - [encoder setBytes:&ne22 length:sizeof(ne22) atIndex:6]; - [encoder setBytes:&nb20 length:sizeof(nb20) atIndex:7]; - [encoder setBytes:&nb21 length:sizeof(nb21) atIndex:8]; - [encoder setBytes:&nb22 length:sizeof(nb22) atIndex:9]; - [encoder setBytes:&ne10 length:sizeof(ne10) atIndex:10]; - [encoder setBytes:&_ne1 length:sizeof(_ne1) atIndex:11]; - [encoder setBytes:&ne12 length:sizeof(ne12) atIndex:12]; - [encoder setBytes:&ne13 length:sizeof(ne13) atIndex:13]; - [encoder setBytes:&nb10 length:sizeof(nb10) atIndex:14]; - [encoder setBytes:&nb11 length:sizeof(nb11) atIndex:15]; - [encoder setBytes:&nb12 length:sizeof(nb12) atIndex:16]; - [encoder setBytes:&ne0 length:sizeof(ne0) atIndex:17]; - [encoder setBytes:&_ne1 length:sizeof(_ne1) atIndex:18]; - [encoder setBytes:&nb1 length:sizeof(nb1) atIndex:19]; - [encoder setBytes:&r2 length:sizeof(r2) atIndex:20]; - [encoder setBytes:&r3 length:sizeof(r3) atIndex:21]; - [encoder setBytes:&idx length:sizeof(idx) atIndex:22]; - // TODO: how to make this an array? read Metal docs - for (int j = 0; j < 8; ++j) { - // NOTE: this is done like this to avoid uninitialized kernel arguments when n_as < 8 - struct ggml_tensor * src_cur = dst->src[2 + (j % n_as)]; - - size_t offs_src_cur = 0; - id id_src_cur = ggml_metal_get_buffer(ctx, src_cur, &offs_src_cur); - - [encoder setBuffer:id_src_cur offset:offs_src_cur atIndex:23 + j]; - } - - if (src2t == GGML_TYPE_Q4_0 || src2t == GGML_TYPE_Q4_1 || - src2t == GGML_TYPE_Q5_0 || src2t == GGML_TYPE_Q5_1 || src2t == GGML_TYPE_Q8_0 || - src2t == GGML_TYPE_Q2_K) { // || src2t == GGML_TYPE_Q4_K) { - [encoder dispatchThreadgroups:MTLSizeMake((ne21 + 7)/8, _ne1, ne01*ne12*ne13) threadsPerThreadgroup:MTLSizeMake(nth0, nth1, 1)]; - } - else if (src2t == GGML_TYPE_IQ2_XXS || src2t == GGML_TYPE_IQ2_XS) { - const int mem_size = src2t == GGML_TYPE_IQ2_XXS ? 256*8+128 : 512*8+128; - [encoder setThreadgroupMemoryLength:mem_size atIndex:0]; - [encoder dispatchThreadgroups:MTLSizeMake((ne21 + 7)/8, _ne1, ne01*ne12*ne13) threadsPerThreadgroup:MTLSizeMake(nth0, nth1, 1)]; - } - else if (src2t == GGML_TYPE_Q4_K) { - [encoder dispatchThreadgroups:MTLSizeMake((ne21 + 3)/4, _ne1, ne01*ne12*ne13) threadsPerThreadgroup:MTLSizeMake(nth0, nth1, 1)]; - } - else if (src2t == GGML_TYPE_Q3_K) { -#ifdef GGML_QKK_64 - [encoder dispatchThreadgroups:MTLSizeMake((ne21 + 1)/2, _ne1, ne01*ne12*ne13) threadsPerThreadgroup:MTLSizeMake(nth0, nth1, 1)]; -#else - [encoder dispatchThreadgroups:MTLSizeMake((ne21 + 3)/4, _ne1, ne01*ne12*ne13) threadsPerThreadgroup:MTLSizeMake(nth0, nth1, 1)]; -#endif - } - else if (src2t == GGML_TYPE_Q5_K) { - [encoder dispatchThreadgroups:MTLSizeMake((ne21 + 3)/4, _ne1, ne01*ne12*ne13) threadsPerThreadgroup:MTLSizeMake(nth0, nth1, 1)]; - } - else if (src2t == GGML_TYPE_Q6_K) { - [encoder dispatchThreadgroups:MTLSizeMake((ne21 + 1)/2, _ne1, ne01*ne12*ne13) threadsPerThreadgroup:MTLSizeMake(nth0, nth1, 1)]; - } else { - const int64_t ny = (_ne1 + nrows - 1)/nrows; - [encoder dispatchThreadgroups:MTLSizeMake(ne21, ny, ne01*ne12*ne13) threadsPerThreadgroup:MTLSizeMake(nth0, nth1, 1)]; - } - } - } break; - case GGML_OP_GET_ROWS: - { - id pipeline = nil; - - switch (src0->type) { - case GGML_TYPE_F32: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_GET_ROWS_F32 ].pipeline; break; - case GGML_TYPE_F16: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_GET_ROWS_F16 ].pipeline; break; - case GGML_TYPE_Q4_0: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_GET_ROWS_Q4_0 ].pipeline; break; - case GGML_TYPE_Q4_1: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_GET_ROWS_Q4_1 ].pipeline; break; - case GGML_TYPE_Q5_0: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_GET_ROWS_Q5_0 ].pipeline; break; - case GGML_TYPE_Q5_1: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_GET_ROWS_Q5_1 ].pipeline; break; - case GGML_TYPE_Q8_0: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_GET_ROWS_Q8_0 ].pipeline; break; - case GGML_TYPE_Q2_K: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_GET_ROWS_Q2_K ].pipeline; break; - case GGML_TYPE_Q3_K: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_GET_ROWS_Q3_K ].pipeline; break; - case GGML_TYPE_Q4_K: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_GET_ROWS_Q4_K ].pipeline; break; - case GGML_TYPE_Q5_K: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_GET_ROWS_Q5_K ].pipeline; break; - case GGML_TYPE_Q6_K: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_GET_ROWS_Q6_K ].pipeline; break; - case GGML_TYPE_IQ2_XXS: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_GET_ROWS_IQ2_XXS].pipeline; break; - case GGML_TYPE_IQ2_XS: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_GET_ROWS_IQ2_XS ].pipeline; break; - case GGML_TYPE_I32: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_GET_ROWS_I32 ].pipeline; break; - default: GGML_ASSERT(false && "not implemented"); - } - - [encoder setComputePipelineState:pipeline]; - [encoder setBuffer:id_src0 offset:offs_src0 atIndex:0]; - [encoder setBuffer:id_src1 offset:offs_src1 atIndex:1]; - [encoder setBuffer:id_dst offset:offs_dst atIndex:2]; - [encoder setBytes:&ne00 length:sizeof( int64_t) atIndex:3]; - [encoder setBytes:&nb01 length:sizeof(uint64_t) atIndex:4]; - [encoder setBytes:&nb02 length:sizeof(uint64_t) atIndex:5]; - [encoder setBytes:&ne10 length:sizeof( int64_t) atIndex:6]; - [encoder setBytes:&nb10 length:sizeof( int64_t) atIndex:7]; - [encoder setBytes:&nb11 length:sizeof( int64_t) atIndex:8]; - [encoder setBytes:&nb1 length:sizeof(uint64_t) atIndex:9]; - [encoder setBytes:&nb2 length:sizeof(uint64_t) atIndex:10]; - - [encoder dispatchThreadgroups:MTLSizeMake(ne10, ne11, 1) threadsPerThreadgroup:MTLSizeMake(32, 1, 1)]; - } break; - case GGML_OP_RMS_NORM: - { - GGML_ASSERT(ne00 % 4 == 0); - - float eps; - memcpy(&eps, dst->op_params, sizeof(float)); - - int nth = 32; // SIMD width - - while (nth < ne00/4 && nth < 1024) { - nth *= 2; - } - - id pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_RMS_NORM].pipeline; - - [encoder setComputePipelineState:pipeline]; - [encoder setBuffer:id_src0 offset:offs_src0 atIndex:0]; - [encoder setBuffer:id_dst offset:offs_dst atIndex:1]; - [encoder setBytes:&ne00 length:sizeof( int64_t) atIndex:2]; - [encoder setBytes:&nb01 length:sizeof(uint64_t) atIndex:3]; - [encoder setBytes:&eps length:sizeof( float) atIndex:4]; - [encoder setThreadgroupMemoryLength:32*sizeof(float) atIndex:0]; - - const int64_t nrows = ggml_nrows(src0); - - [encoder dispatchThreadgroups:MTLSizeMake(nrows, 1, 1) threadsPerThreadgroup:MTLSizeMake(nth, 1, 1)]; - } break; - case GGML_OP_GROUP_NORM: - { - GGML_ASSERT(ne00 % 4 == 0); - - //float eps; - //memcpy(&eps, dst->op_params, sizeof(float)); - - const float eps = 1e-6f; // TODO: temporarily hardcoded - - const int32_t n_groups = ((int32_t *) dst->op_params)[0]; - - int nth = 32; // SIMD width - - //while (nth < ne00/4 && nth < 1024) { - // nth *= 2; - //} - - id pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_GROUP_NORM].pipeline; - - [encoder setComputePipelineState:pipeline]; - [encoder setBuffer:id_src0 offset:offs_src0 atIndex:0]; - [encoder setBuffer:id_dst offset:offs_dst atIndex:1]; - [encoder setBytes:&ne00 length:sizeof( int64_t) atIndex:2]; - [encoder setBytes:&ne01 length:sizeof( int64_t) atIndex:3]; - [encoder setBytes:&ne02 length:sizeof( int64_t) atIndex:4]; - [encoder setBytes:&nb00 length:sizeof(uint64_t) atIndex:5]; - [encoder setBytes:&nb01 length:sizeof(uint64_t) atIndex:6]; - [encoder setBytes:&nb02 length:sizeof(uint64_t) atIndex:7]; - [encoder setBytes:&n_groups length:sizeof( int32_t) atIndex:8]; - [encoder setBytes:&eps length:sizeof( float) atIndex:9]; - [encoder setThreadgroupMemoryLength:32*sizeof(float) atIndex:0]; - - [encoder dispatchThreadgroups:MTLSizeMake(n_groups, 1, 1) threadsPerThreadgroup:MTLSizeMake(nth, 1, 1)]; - } break; - case GGML_OP_NORM: - { - float eps; - memcpy(&eps, dst->op_params, sizeof(float)); - - const int nth = MIN(256, ne00); - - id pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_NORM].pipeline; - - [encoder setComputePipelineState:pipeline]; - [encoder setBuffer:id_src0 offset:offs_src0 atIndex:0]; - [encoder setBuffer:id_dst offset:offs_dst atIndex:1]; - [encoder setBytes:&ne00 length:sizeof( int64_t) atIndex:2]; - [encoder setBytes:&nb01 length:sizeof(uint64_t) atIndex:3]; - [encoder setBytes:&eps length:sizeof( float) atIndex:4]; - [encoder setThreadgroupMemoryLength:GGML_PAD(nth*sizeof(float), 16) atIndex:0]; - - const int64_t nrows = ggml_nrows(src0); - - [encoder dispatchThreadgroups:MTLSizeMake(nrows, 1, 1) threadsPerThreadgroup:MTLSizeMake(nth, 1, 1)]; - } break; - case GGML_OP_ALIBI: - { - GGML_ASSERT((src0t == GGML_TYPE_F32)); - - const int nth = MIN(1024, ne00); - - //const int n_past = ((int32_t *) dst->op_params)[0]; - const int n_head = ((int32_t *) dst->op_params)[1]; - float max_bias; - memcpy(&max_bias, (int32_t *) dst->op_params + 2, sizeof(float)); - - const int n_heads_log2_floor = 1 << (int) floor(log2(n_head)); - const float m0 = powf(2.0f, -(max_bias) / n_heads_log2_floor); - const float m1 = powf(2.0f, -(max_bias / 2.0f) / n_heads_log2_floor); - - id pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_ALIBI_F32].pipeline; - - [encoder setComputePipelineState:pipeline]; - [encoder setBuffer:id_src0 offset:offs_src0 atIndex:0]; - [encoder setBuffer:id_dst offset:offs_dst atIndex:1]; - [encoder setBytes:&ne00 length:sizeof( int64_t) atIndex:2]; - [encoder setBytes:&ne01 length:sizeof( int64_t) atIndex:3]; - [encoder setBytes:&ne02 length:sizeof( int64_t) atIndex:4]; - [encoder setBytes:&ne03 length:sizeof( int64_t) atIndex:5]; - [encoder setBytes:&nb00 length:sizeof(uint64_t) atIndex:6]; - [encoder setBytes:&nb01 length:sizeof(uint64_t) atIndex:7]; - [encoder setBytes:&nb02 length:sizeof(uint64_t) atIndex:8]; - [encoder setBytes:&nb03 length:sizeof(uint64_t) atIndex:9]; - [encoder setBytes:&ne0 length:sizeof( int64_t) atIndex:10]; - [encoder setBytes:&ne1 length:sizeof( int64_t) atIndex:11]; - [encoder setBytes:&ne2 length:sizeof( int64_t) atIndex:12]; - [encoder setBytes:&ne3 length:sizeof( int64_t) atIndex:13]; - [encoder setBytes:&nb0 length:sizeof(uint64_t) atIndex:14]; - [encoder setBytes:&nb1 length:sizeof(uint64_t) atIndex:15]; - [encoder setBytes:&nb2 length:sizeof(uint64_t) atIndex:16]; - [encoder setBytes:&nb3 length:sizeof(uint64_t) atIndex:17]; - [encoder setBytes:&m0 length:sizeof( float) atIndex:18]; - [encoder setBytes:&m1 length:sizeof( float) atIndex:19]; - [encoder setBytes:&n_heads_log2_floor length:sizeof(int) atIndex:20]; - - [encoder dispatchThreadgroups:MTLSizeMake(ne01, ne02, ne03) threadsPerThreadgroup:MTLSizeMake(nth, 1, 1)]; - } break; - case GGML_OP_ROPE: - { - GGML_ASSERT(ne10 == ne02); - - const int nth = MIN(1024, ne00); - - const int n_past = ((int32_t *) dst->op_params)[0]; - const int n_dims = ((int32_t *) dst->op_params)[1]; - const int mode = ((int32_t *) dst->op_params)[2]; - // skip 3, n_ctx, used in GLM RoPE, unimplemented in metal - const int n_orig_ctx = ((int32_t *) dst->op_params)[4]; - - float freq_base, freq_scale, ext_factor, attn_factor, beta_fast, beta_slow; - memcpy(&freq_base, (int32_t *) dst->op_params + 5, sizeof(float)); - memcpy(&freq_scale, (int32_t *) dst->op_params + 6, sizeof(float)); - memcpy(&ext_factor, (int32_t *) dst->op_params + 7, sizeof(float)); - memcpy(&attn_factor, (int32_t *) dst->op_params + 8, sizeof(float)); - memcpy(&beta_fast, (int32_t *) dst->op_params + 9, sizeof(float)); - memcpy(&beta_slow, (int32_t *) dst->op_params + 10, sizeof(float)); - - id pipeline = nil; - - switch (src0->type) { - case GGML_TYPE_F32: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_ROPE_F32].pipeline; break; - case GGML_TYPE_F16: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_ROPE_F16].pipeline; break; - default: GGML_ASSERT(false); - }; - - [encoder setComputePipelineState:pipeline]; - [encoder setBuffer:id_src0 offset:offs_src0 atIndex:0]; - [encoder setBuffer:id_src1 offset:offs_src1 atIndex:1]; - [encoder setBuffer:id_dst offset:offs_dst atIndex:2]; - [encoder setBytes:&ne00 length:sizeof( int64_t) atIndex:3]; - [encoder setBytes:&ne01 length:sizeof( int64_t) atIndex:4]; - [encoder setBytes:&ne02 length:sizeof( int64_t) atIndex:5]; - [encoder setBytes:&ne03 length:sizeof( int64_t) atIndex:6]; - [encoder setBytes:&nb00 length:sizeof(uint64_t) atIndex:7]; - [encoder setBytes:&nb01 length:sizeof(uint64_t) atIndex:8]; - [encoder setBytes:&nb02 length:sizeof(uint64_t) atIndex:9]; - [encoder setBytes:&nb03 length:sizeof(uint64_t) atIndex:10]; - [encoder setBytes:&ne0 length:sizeof( int64_t) atIndex:11]; - [encoder setBytes:&ne1 length:sizeof( int64_t) atIndex:12]; - [encoder setBytes:&ne2 length:sizeof( int64_t) atIndex:13]; - [encoder setBytes:&ne3 length:sizeof( int64_t) atIndex:14]; - [encoder setBytes:&nb0 length:sizeof(uint64_t) atIndex:15]; - [encoder setBytes:&nb1 length:sizeof(uint64_t) atIndex:16]; - [encoder setBytes:&nb2 length:sizeof(uint64_t) atIndex:17]; - [encoder setBytes:&nb3 length:sizeof(uint64_t) atIndex:18]; - [encoder setBytes:&n_past length:sizeof( int) atIndex:19]; - [encoder setBytes:&n_dims length:sizeof( int) atIndex:20]; - [encoder setBytes:&mode length:sizeof( int) atIndex:21]; - [encoder setBytes:&n_orig_ctx length:sizeof( int) atIndex:22]; - [encoder setBytes:&freq_base length:sizeof( float) atIndex:23]; - [encoder setBytes:&freq_scale length:sizeof( float) atIndex:24]; - [encoder setBytes:&ext_factor length:sizeof( float) atIndex:25]; - [encoder setBytes:&attn_factor length:sizeof( float) atIndex:26]; - [encoder setBytes:&beta_fast length:sizeof( float) atIndex:27]; - [encoder setBytes:&beta_slow length:sizeof( float) atIndex:28]; - - [encoder dispatchThreadgroups:MTLSizeMake(ne01, ne02, ne03) threadsPerThreadgroup:MTLSizeMake(nth, 1, 1)]; - } break; - case GGML_OP_IM2COL: - { - GGML_ASSERT(src0->type == GGML_TYPE_F16); - GGML_ASSERT(src1->type == GGML_TYPE_F32); - GGML_ASSERT( dst->type == GGML_TYPE_F16); - - const int32_t s0 = ((const int32_t *)(dst->op_params))[0]; - const int32_t s1 = ((const int32_t *)(dst->op_params))[1]; - const int32_t p0 = ((const int32_t *)(dst->op_params))[2]; - const int32_t p1 = ((const int32_t *)(dst->op_params))[3]; - const int32_t d0 = ((const int32_t *)(dst->op_params))[4]; - const int32_t d1 = ((const int32_t *)(dst->op_params))[5]; - const bool is_2D = ((const int32_t *)(dst->op_params))[6] == 1; - - const int32_t N = src1->ne[is_2D ? 3 : 2]; - const int32_t IC = src1->ne[is_2D ? 2 : 1]; - const int32_t IH = is_2D ? src1->ne[1] : 1; - const int32_t IW = src1->ne[0]; - - const int32_t KH = is_2D ? src0->ne[1] : 1; - const int32_t KW = src0->ne[0]; - - const int32_t OH = is_2D ? dst->ne[2] : 1; - const int32_t OW = dst->ne[1]; - - const int32_t CHW = IC * KH * KW; - - const int32_t ofs0 = src1->nb[is_2D ? 3 : 2] / 4; - const int32_t ofs1 = src1->nb[is_2D ? 2 : 1] / 4; - - id pipeline = nil; - - switch (src0->type) { - case GGML_TYPE_F32: GGML_ASSERT(false && "not implemented"); break; - case GGML_TYPE_F16: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_IM2COL_F16].pipeline; break; - default: GGML_ASSERT(false); - }; - - [encoder setComputePipelineState:pipeline]; - [encoder setBuffer:id_src1 offset:offs_src1 atIndex:0]; - [encoder setBuffer:id_dst offset:offs_dst atIndex:1]; - [encoder setBytes:&ofs0 length:sizeof( int32_t) atIndex:2]; - [encoder setBytes:&ofs1 length:sizeof( int32_t) atIndex:3]; - [encoder setBytes:&IW length:sizeof( int32_t) atIndex:4]; - [encoder setBytes:&IH length:sizeof( int32_t) atIndex:5]; - [encoder setBytes:&CHW length:sizeof( int32_t) atIndex:6]; - [encoder setBytes:&s0 length:sizeof( int32_t) atIndex:7]; - [encoder setBytes:&s1 length:sizeof( int32_t) atIndex:8]; - [encoder setBytes:&p0 length:sizeof( int32_t) atIndex:9]; - [encoder setBytes:&p1 length:sizeof( int32_t) atIndex:10]; - [encoder setBytes:&d0 length:sizeof( int32_t) atIndex:11]; - [encoder setBytes:&d1 length:sizeof( int32_t) atIndex:12]; - - [encoder dispatchThreadgroups:MTLSizeMake(IC, OH, OW) threadsPerThreadgroup:MTLSizeMake(N, KH, KW)]; - } break; - case GGML_OP_UPSCALE: - { - GGML_ASSERT(src0->type == GGML_TYPE_F32); - - const int sf = dst->op_params[0]; - - const id pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_UPSCALE_F32].pipeline; - - [encoder setComputePipelineState:pipeline]; - [encoder setBuffer:id_src0 offset:offs_src0 atIndex:0]; - [encoder setBuffer:id_dst offset:offs_dst atIndex:1]; - [encoder setBytes:&ne00 length:sizeof(ne00) atIndex:2]; - [encoder setBytes:&ne01 length:sizeof(ne01) atIndex:3]; - [encoder setBytes:&ne02 length:sizeof(ne02) atIndex:4]; - [encoder setBytes:&ne03 length:sizeof(ne03) atIndex:5]; - [encoder setBytes:&nb00 length:sizeof(nb00) atIndex:6]; - [encoder setBytes:&nb01 length:sizeof(nb01) atIndex:7]; - [encoder setBytes:&nb02 length:sizeof(nb02) atIndex:8]; - [encoder setBytes:&nb03 length:sizeof(nb03) atIndex:9]; - [encoder setBytes:&ne0 length:sizeof(ne0) atIndex:10]; - [encoder setBytes:&ne1 length:sizeof(ne1) atIndex:11]; - [encoder setBytes:&ne2 length:sizeof(ne2) atIndex:12]; - [encoder setBytes:&ne3 length:sizeof(ne3) atIndex:13]; - [encoder setBytes:&nb0 length:sizeof(nb0) atIndex:14]; - [encoder setBytes:&nb1 length:sizeof(nb1) atIndex:15]; - [encoder setBytes:&nb2 length:sizeof(nb2) atIndex:16]; - [encoder setBytes:&nb3 length:sizeof(nb3) atIndex:17]; - [encoder setBytes:&sf length:sizeof(sf) atIndex:18]; - + } else { const int nth = MIN((int) pipeline.maxTotalThreadsPerThreadgroup, ne0); - [encoder dispatchThreadgroups:MTLSizeMake(ne1, ne2, ne3) threadsPerThreadgroup:MTLSizeMake(nth, 1, 1)]; - } break; - case GGML_OP_PAD: - { - GGML_ASSERT(src0->type == GGML_TYPE_F32); + [encoder dispatchThreadgroups:MTLSizeMake(ne01, ne02, ne03) threadsPerThreadgroup:MTLSizeMake(nth, 1, 1)]; + } + } break; + case GGML_OP_ACC: + { + GGML_ASSERT(src0t == GGML_TYPE_F32); + GGML_ASSERT(src1t == GGML_TYPE_F32); + GGML_ASSERT(dstt == GGML_TYPE_F32); - id pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_PAD_F32].pipeline; + GGML_ASSERT(ggml_is_contiguous(src0)); + GGML_ASSERT(ggml_is_contiguous(src1)); - [encoder setComputePipelineState:pipeline]; - [encoder setBuffer:id_src0 offset:offs_src0 atIndex:0]; - [encoder setBuffer:id_dst offset:offs_dst atIndex:1]; - [encoder setBytes:&ne00 length:sizeof(ne00) atIndex:2]; - [encoder setBytes:&ne01 length:sizeof(ne01) atIndex:3]; - [encoder setBytes:&ne02 length:sizeof(ne02) atIndex:4]; - [encoder setBytes:&ne03 length:sizeof(ne03) atIndex:5]; - [encoder setBytes:&nb00 length:sizeof(nb00) atIndex:6]; - [encoder setBytes:&nb01 length:sizeof(nb01) atIndex:7]; - [encoder setBytes:&nb02 length:sizeof(nb02) atIndex:8]; - [encoder setBytes:&nb03 length:sizeof(nb03) atIndex:9]; - [encoder setBytes:&ne0 length:sizeof(ne0) atIndex:10]; - [encoder setBytes:&ne1 length:sizeof(ne1) atIndex:11]; - [encoder setBytes:&ne2 length:sizeof(ne2) atIndex:12]; - [encoder setBytes:&ne3 length:sizeof(ne3) atIndex:13]; - [encoder setBytes:&nb0 length:sizeof(nb0) atIndex:14]; - [encoder setBytes:&nb1 length:sizeof(nb1) atIndex:15]; - [encoder setBytes:&nb2 length:sizeof(nb2) atIndex:16]; - [encoder setBytes:&nb3 length:sizeof(nb3) atIndex:17]; + const size_t pnb1 = ((int32_t *) dst->op_params)[0]; + const size_t pnb2 = ((int32_t *) dst->op_params)[1]; + const size_t pnb3 = ((int32_t *) dst->op_params)[2]; + const size_t offs = ((int32_t *) dst->op_params)[3]; - const int nth = MIN(1024, ne0); + const bool inplace = (bool) ((int32_t *) dst->op_params)[4]; - [encoder dispatchThreadgroups:MTLSizeMake(ne1, ne2, ne3) threadsPerThreadgroup:MTLSizeMake(nth, 1, 1)]; - } break; - case GGML_OP_ARGSORT: - { - GGML_ASSERT(src0->type == GGML_TYPE_F32); - GGML_ASSERT( dst->type == GGML_TYPE_I32); + if (!inplace) { + // run a separete kernel to cpy src->dst + // not sure how to avoid this + // TODO: make a simpler cpy_bytes kernel - const int nrows = ggml_nrows(src0); - - enum ggml_sort_order order = (enum ggml_sort_order) dst->op_params[0]; - - id pipeline = nil; - - switch (order) { - case GGML_SORT_ASC: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_ARGSORT_F32_I32_ASC].pipeline; break; - case GGML_SORT_DESC: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_ARGSORT_F32_I32_DESC].pipeline; break; - default: GGML_ASSERT(false); - }; - - [encoder setComputePipelineState:pipeline]; - [encoder setBuffer:id_src0 offset:offs_src0 atIndex:0]; - [encoder setBuffer:id_dst offset:offs_dst atIndex:1]; - [encoder setBytes:&ne00 length:sizeof( int64_t) atIndex:2]; - - [encoder dispatchThreadgroups:MTLSizeMake(1, nrows, 1) threadsPerThreadgroup:MTLSizeMake(ne00, 1, 1)]; - } break; - case GGML_OP_LEAKY_RELU: - { - GGML_ASSERT(src0->type == GGML_TYPE_F32); - - float slope; - memcpy(&slope, dst->op_params, sizeof(float)); - - id pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_LEAKY_RELU_F32].pipeline; - - [encoder setComputePipelineState:pipeline]; - [encoder setBuffer:id_src0 offset:offs_src0 atIndex:0]; - [encoder setBuffer:id_dst offset:offs_dst atIndex:1]; - [encoder setBytes:&slope length:sizeof(slope) atIndex:2]; - - const int64_t n = ggml_nelements(dst); - - [encoder dispatchThreadgroups:MTLSizeMake(n, 1, 1) threadsPerThreadgroup:MTLSizeMake(1, 1, 1)]; - } break; - case GGML_OP_DUP: - case GGML_OP_CPY: - case GGML_OP_CONT: - { - GGML_ASSERT(ne00 % ggml_blck_size(src0->type) == 0); - - int nth = MIN(1024, ne00/ggml_blck_size(src0->type)); - - id pipeline = nil; - - switch (src0t) { - case GGML_TYPE_F32: - { - GGML_ASSERT(ne0 % ggml_blck_size(dst->type) == 0); - - switch (dstt) { - case GGML_TYPE_F16: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_CPY_F32_F16].pipeline; break; - case GGML_TYPE_F32: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_CPY_F32_F32].pipeline; break; - case GGML_TYPE_Q8_0: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_CPY_F32_Q8_0].pipeline; break; - case GGML_TYPE_Q4_0: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_CPY_F32_Q4_0].pipeline; break; - case GGML_TYPE_Q4_1: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_CPY_F32_Q4_1].pipeline; break; - //case GGML_TYPE_Q5_0: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_CPY_F32_Q5_0].pipeline; break; - //case GGML_TYPE_Q5_1: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_CPY_F32_Q5_1].pipeline; break; - default: GGML_ASSERT(false && "not implemented"); - }; - } break; - case GGML_TYPE_F16: - { - switch (dstt) { - case GGML_TYPE_F16: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_CPY_F16_F16].pipeline; break; - case GGML_TYPE_F32: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_CPY_F16_F32].pipeline; break; - default: GGML_ASSERT(false && "not implemented"); - }; - } break; - default: GGML_ASSERT(false && "not implemented"); - } + const id pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_CPY_F32_F32].pipeline; [encoder setComputePipelineState:pipeline]; [encoder setBuffer:id_src0 offset:offs_src0 atIndex:0]; @@ -2227,31 +1001,1253 @@ static bool ggml_metal_graph_compute( [encoder setBytes:&nb2 length:sizeof(uint64_t) atIndex:16]; [encoder setBytes:&nb3 length:sizeof(uint64_t) atIndex:17]; + const int nth = MIN((int) pipeline.maxTotalThreadsPerThreadgroup, ne00); + [encoder dispatchThreadgroups:MTLSizeMake(ne01, ne02, ne03) threadsPerThreadgroup:MTLSizeMake(nth, 1, 1)]; - } break; - default: - { - GGML_METAL_LOG_ERROR("%s: error: node %3d, op = %8s not implemented\n", __func__, i, ggml_op_name(dst->op)); - GGML_ASSERT(false); } - } + + const id pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_ADD].pipeline; + + [encoder setComputePipelineState:pipeline]; + [encoder setBuffer:id_src0 offset:offs_src0 atIndex:0]; + [encoder setBuffer:id_src1 offset:offs_src1 atIndex:1]; + [encoder setBuffer:id_dst offset:offs_dst atIndex:2]; + [encoder setBytes:&ne00 length:sizeof(ne00) atIndex:3]; + [encoder setBytes:&ne01 length:sizeof(ne01) atIndex:4]; + [encoder setBytes:&ne02 length:sizeof(ne02) atIndex:5]; + [encoder setBytes:&ne03 length:sizeof(ne03) atIndex:6]; + [encoder setBytes:&nb00 length:sizeof(nb00) atIndex:7]; + [encoder setBytes:&pnb1 length:sizeof(pnb1) atIndex:8]; + [encoder setBytes:&pnb2 length:sizeof(pnb2) atIndex:9]; + [encoder setBytes:&pnb3 length:sizeof(pnb3) atIndex:10]; + [encoder setBytes:&ne10 length:sizeof(ne10) atIndex:11]; + [encoder setBytes:&ne11 length:sizeof(ne11) atIndex:12]; + [encoder setBytes:&ne12 length:sizeof(ne12) atIndex:13]; + [encoder setBytes:&ne13 length:sizeof(ne13) atIndex:14]; + [encoder setBytes:&nb10 length:sizeof(nb10) atIndex:15]; + [encoder setBytes:&nb11 length:sizeof(nb11) atIndex:16]; + [encoder setBytes:&nb12 length:sizeof(nb12) atIndex:17]; + [encoder setBytes:&nb13 length:sizeof(nb13) atIndex:18]; + [encoder setBytes:&ne0 length:sizeof(ne0) atIndex:19]; + [encoder setBytes:&ne1 length:sizeof(ne1) atIndex:20]; + [encoder setBytes:&ne2 length:sizeof(ne2) atIndex:21]; + [encoder setBytes:&ne3 length:sizeof(ne3) atIndex:22]; + [encoder setBytes:&nb0 length:sizeof(nb0) atIndex:23]; + [encoder setBytes:&pnb1 length:sizeof(pnb1) atIndex:24]; + [encoder setBytes:&pnb2 length:sizeof(pnb2) atIndex:25]; + [encoder setBytes:&pnb3 length:sizeof(pnb3) atIndex:26]; + [encoder setBytes:&offs length:sizeof(offs) atIndex:27]; + + const int nth = MIN((int) pipeline.maxTotalThreadsPerThreadgroup, ne00); + + [encoder dispatchThreadgroups:MTLSizeMake(ne11, ne12, ne13) threadsPerThreadgroup:MTLSizeMake(nth, 1, 1)]; + } break; + case GGML_OP_SCALE: + { + GGML_ASSERT(ggml_is_contiguous(src0)); + + const float scale = *(const float *) dst->op_params; + + int64_t n = ggml_nelements(dst); + + id pipeline = nil; + + if (n % 4 == 0) { + n /= 4; + pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_SCALE_4].pipeline; + } else { + pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_SCALE].pipeline; + } + + [encoder setComputePipelineState:pipeline]; + [encoder setBuffer:id_src0 offset:offs_src0 atIndex:0]; + [encoder setBuffer:id_dst offset:offs_dst atIndex:1]; + [encoder setBytes:&scale length:sizeof(scale) atIndex:2]; + + [encoder dispatchThreadgroups:MTLSizeMake(n, 1, 1) threadsPerThreadgroup:MTLSizeMake(1, 1, 1)]; + } break; + case GGML_OP_UNARY: + switch (ggml_get_unary_op(gf->nodes[i])) { + case GGML_UNARY_OP_TANH: + { + id pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_TANH].pipeline; + + [encoder setComputePipelineState:pipeline]; + [encoder setBuffer:id_src0 offset:offs_src0 atIndex:0]; + [encoder setBuffer:id_dst offset:offs_dst atIndex:1]; + + const int64_t n = ggml_nelements(dst); + + [encoder dispatchThreadgroups:MTLSizeMake(n, 1, 1) threadsPerThreadgroup:MTLSizeMake(1, 1, 1)]; + } break; + case GGML_UNARY_OP_RELU: + { + id pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_RELU].pipeline; + + [encoder setComputePipelineState:pipeline]; + [encoder setBuffer:id_src0 offset:offs_src0 atIndex:0]; + [encoder setBuffer:id_dst offset:offs_dst atIndex:1]; + + const int64_t n = ggml_nelements(dst); + + [encoder dispatchThreadgroups:MTLSizeMake(n, 1, 1) threadsPerThreadgroup:MTLSizeMake(1, 1, 1)]; + } break; + case GGML_UNARY_OP_GELU: + { + id pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_GELU].pipeline; + + [encoder setComputePipelineState:pipeline]; + [encoder setBuffer:id_src0 offset:offs_src0 atIndex:0]; + [encoder setBuffer:id_dst offset:offs_dst atIndex:1]; + + const int64_t n = ggml_nelements(dst); + GGML_ASSERT(n % 4 == 0); + + [encoder dispatchThreadgroups:MTLSizeMake(n/4, 1, 1) threadsPerThreadgroup:MTLSizeMake(1, 1, 1)]; + } break; + case GGML_UNARY_OP_GELU_QUICK: + { + id pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_GELU_QUICK].pipeline; + + [encoder setComputePipelineState:pipeline]; + [encoder setBuffer:id_src0 offset:offs_src0 atIndex:0]; + [encoder setBuffer:id_dst offset:offs_dst atIndex:1]; + + const int64_t n = ggml_nelements(dst); + GGML_ASSERT(n % 4 == 0); + + [encoder dispatchThreadgroups:MTLSizeMake(n/4, 1, 1) threadsPerThreadgroup:MTLSizeMake(1, 1, 1)]; + } break; + case GGML_UNARY_OP_SILU: + { + id pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_SILU].pipeline; + + [encoder setComputePipelineState:pipeline]; + [encoder setBuffer:id_src0 offset:offs_src0 atIndex:0]; + [encoder setBuffer:id_dst offset:offs_dst atIndex:1]; + + const int64_t n = ggml_nelements(dst); + GGML_ASSERT(n % 4 == 0); + + [encoder dispatchThreadgroups:MTLSizeMake(n/4, 1, 1) threadsPerThreadgroup:MTLSizeMake(1, 1, 1)]; + } break; + default: + { + GGML_METAL_LOG_WARN("%s: node %3d, op = %8s not implemented\n", __func__, i, ggml_op_name(dst->op)); + GGML_ASSERT(false); + } + } break; + case GGML_OP_SQR: + { + GGML_ASSERT(ggml_is_contiguous(src0)); + + id pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_SQR].pipeline; + + [encoder setComputePipelineState:pipeline]; + [encoder setBuffer:id_src0 offset:offs_src0 atIndex:0]; + [encoder setBuffer:id_dst offset:offs_dst atIndex:1]; + + const int64_t n = ggml_nelements(dst); + + [encoder dispatchThreadgroups:MTLSizeMake(n, 1, 1) threadsPerThreadgroup:MTLSizeMake(1, 1, 1)]; + } break; + case GGML_OP_SUM_ROWS: + { + GGML_ASSERT(src0->nb[0] == ggml_type_size(src0->type)); + + id pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_SUM_ROWS].pipeline; + + [encoder setComputePipelineState:pipeline]; + [encoder setBuffer:id_src0 offset:offs_src0 atIndex:0]; + [encoder setBuffer:id_dst offset:offs_dst atIndex:1]; + [encoder setBytes:&ne00 length:sizeof(ne00) atIndex:2]; + [encoder setBytes:&ne01 length:sizeof(ne01) atIndex:3]; + [encoder setBytes:&ne02 length:sizeof(ne02) atIndex:4]; + [encoder setBytes:&ne03 length:sizeof(ne03) atIndex:5]; + [encoder setBytes:&nb00 length:sizeof(nb00) atIndex:6]; + [encoder setBytes:&nb01 length:sizeof(nb01) atIndex:7]; + [encoder setBytes:&nb02 length:sizeof(nb02) atIndex:8]; + [encoder setBytes:&nb03 length:sizeof(nb03) atIndex:9]; + [encoder setBytes:&ne10 length:sizeof(ne10) atIndex:10]; + [encoder setBytes:&ne11 length:sizeof(ne11) atIndex:11]; + [encoder setBytes:&ne12 length:sizeof(ne12) atIndex:12]; + [encoder setBytes:&ne13 length:sizeof(ne13) atIndex:13]; + [encoder setBytes:&nb10 length:sizeof(nb10) atIndex:14]; + [encoder setBytes:&nb11 length:sizeof(nb11) atIndex:15]; + [encoder setBytes:&nb12 length:sizeof(nb12) atIndex:16]; + [encoder setBytes:&nb13 length:sizeof(nb13) atIndex:17]; + [encoder setBytes:&ne0 length:sizeof(ne0) atIndex:18]; + [encoder setBytes:&ne1 length:sizeof(ne1) atIndex:19]; + [encoder setBytes:&ne2 length:sizeof(ne2) atIndex:20]; + [encoder setBytes:&ne3 length:sizeof(ne3) atIndex:21]; + [encoder setBytes:&nb0 length:sizeof(nb0) atIndex:22]; + [encoder setBytes:&nb1 length:sizeof(nb1) atIndex:23]; + [encoder setBytes:&nb2 length:sizeof(nb2) atIndex:24]; + [encoder setBytes:&nb3 length:sizeof(nb3) atIndex:25]; + + [encoder dispatchThreadgroups:MTLSizeMake(ne01, ne02, ne03) threadsPerThreadgroup:MTLSizeMake(1, 1, 1)]; + } break; + case GGML_OP_SOFT_MAX: + { + int nth = 32; // SIMD width + + id pipeline = nil; + + if (ne00%4 == 0) { + while (nth < ne00/4 && nth < 256) { + nth *= 2; + } + pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_SOFT_MAX_4].pipeline; + } else { + while (nth < ne00 && nth < 1024) { + nth *= 2; + } + pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_SOFT_MAX].pipeline; + } + + const float scale = ((float *) dst->op_params)[0]; + + [encoder setComputePipelineState:pipeline]; + [encoder setBuffer:id_src0 offset:offs_src0 atIndex:0]; + if (id_src1) { + [encoder setBuffer:id_src1 offset:offs_src1 atIndex:1]; + } else { + [encoder setBuffer:id_src0 offset:offs_src0 atIndex:1]; + } + [encoder setBuffer:id_dst offset:offs_dst atIndex:2]; + [encoder setBytes:&ne00 length:sizeof(ne00) atIndex:3]; + [encoder setBytes:&ne01 length:sizeof(ne01) atIndex:4]; + [encoder setBytes:&ne02 length:sizeof(ne02) atIndex:5]; + [encoder setBytes:&scale length:sizeof(scale) atIndex:6]; + [encoder setThreadgroupMemoryLength:32*sizeof(float) atIndex:0]; + + [encoder dispatchThreadgroups:MTLSizeMake(ne01*ne02*ne03, 1, 1) threadsPerThreadgroup:MTLSizeMake(nth, 1, 1)]; + } break; + case GGML_OP_DIAG_MASK_INF: + { + const int n_past = ((int32_t *)(dst->op_params))[0]; + + id pipeline = nil; + + if (ne00%8 == 0) { + pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_DIAG_MASK_INF_8].pipeline; + } else { + pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_DIAG_MASK_INF].pipeline; + } + + [encoder setComputePipelineState:pipeline]; + [encoder setBuffer:id_src0 offset:offs_src0 atIndex:0]; + [encoder setBuffer:id_dst offset:offs_dst atIndex:1]; + [encoder setBytes:&ne00 length:sizeof(ne00) atIndex:2]; + [encoder setBytes:&ne01 length:sizeof(ne01) atIndex:3]; + [encoder setBytes:&n_past length:sizeof(int) atIndex:4]; + + if (ne00%8 == 0) { + [encoder dispatchThreadgroups:MTLSizeMake(ne00*ne01*ne02/8, 1, 1) threadsPerThreadgroup:MTLSizeMake(1, 1, 1)]; + } + else { + [encoder dispatchThreadgroups:MTLSizeMake(ne00, ne01, ne02) threadsPerThreadgroup:MTLSizeMake(1, 1, 1)]; + } + } break; + case GGML_OP_MUL_MAT: + { + GGML_ASSERT(ne00 == ne10); + + // TODO: assert that dim2 and dim3 are contiguous + GGML_ASSERT(ne12 % ne02 == 0); + GGML_ASSERT(ne13 % ne03 == 0); + + const uint r2 = ne12/ne02; + const uint r3 = ne13/ne03; + + // find the break-even point where the matrix-matrix kernel becomes more efficient compared + // to the matrix-vector kernel + int ne11_mm_min = 1; + +#if 0 + // the numbers below are measured on M2 Ultra for 7B and 13B models + // these numbers do not translate to other devices or model sizes + // TODO: need to find a better approach + if ([ctx->device.name isEqualToString:@"Apple M2 Ultra"]) { + switch (src0t) { + case GGML_TYPE_F16: ne11_mm_min = 2; break; + case GGML_TYPE_Q8_0: ne11_mm_min = 7; break; + case GGML_TYPE_Q2_K: ne11_mm_min = 15; break; + case GGML_TYPE_Q3_K: ne11_mm_min = 7; break; + case GGML_TYPE_Q4_0: + case GGML_TYPE_Q4_1: ne11_mm_min = 15; break; + case GGML_TYPE_Q4_K: ne11_mm_min = 11; break; + case GGML_TYPE_Q5_0: // not tested yet + case GGML_TYPE_Q5_1: ne11_mm_min = 13; break; // not tested yet + case GGML_TYPE_Q5_K: ne11_mm_min = 7; break; + case GGML_TYPE_Q6_K: ne11_mm_min = 7; break; + default: ne11_mm_min = 1; break; + } + } +#endif + + // for now the matrix-matrix multiplication kernel only works on A14+/M1+ SoCs + // AMD GPU and older A-chips will reuse matrix-vector multiplication kernel + if ([ctx->device supportsFamily:MTLGPUFamilyApple7] && + !ggml_is_transposed(src0) && + !ggml_is_transposed(src1) && + src1t == GGML_TYPE_F32 && + ne00 % 32 == 0 && ne00 >= 64 && + (ne11 > ne11_mm_min || (ggml_is_quantized(src0t) && ne12 > 1))) { + //printf("matrix: ne00 = %6d, ne01 = %6d, ne02 = %6d, ne11 = %6d, ne12 = %6d\n", ne00, ne01, ne02, ne11, ne12); + + id pipeline = nil; + + switch (src0->type) { + case GGML_TYPE_F32: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MM_F32_F32 ].pipeline; break; + case GGML_TYPE_F16: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MM_F16_F32 ].pipeline; break; + case GGML_TYPE_Q4_0: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MM_Q4_0_F32 ].pipeline; break; + case GGML_TYPE_Q4_1: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MM_Q4_1_F32 ].pipeline; break; + case GGML_TYPE_Q5_0: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MM_Q5_0_F32 ].pipeline; break; + case GGML_TYPE_Q5_1: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MM_Q5_1_F32 ].pipeline; break; + case GGML_TYPE_Q8_0: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MM_Q8_0_F32 ].pipeline; break; + case GGML_TYPE_Q2_K: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MM_Q2_K_F32 ].pipeline; break; + case GGML_TYPE_Q3_K: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MM_Q3_K_F32 ].pipeline; break; + case GGML_TYPE_Q4_K: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MM_Q4_K_F32 ].pipeline; break; + case GGML_TYPE_Q5_K: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MM_Q5_K_F32 ].pipeline; break; + case GGML_TYPE_Q6_K: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MM_Q6_K_F32 ].pipeline; break; + case GGML_TYPE_IQ2_XXS: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MM_IQ2_XXS_F32].pipeline; break; + case GGML_TYPE_IQ2_XS: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MM_IQ2_XS_F32 ].pipeline; break; + default: GGML_ASSERT(false && "MUL MAT-MAT not implemented"); + } + + [encoder setComputePipelineState:pipeline]; + [encoder setBuffer:id_src0 offset:offs_src0 atIndex:0]; + [encoder setBuffer:id_src1 offset:offs_src1 atIndex:1]; + [encoder setBuffer:id_dst offset:offs_dst atIndex:2]; + [encoder setBytes:&ne00 length:sizeof(ne00) atIndex:3]; + [encoder setBytes:&ne02 length:sizeof(ne02) atIndex:4]; + [encoder setBytes:&nb01 length:sizeof(nb01) atIndex:5]; + [encoder setBytes:&nb02 length:sizeof(nb02) atIndex:6]; + [encoder setBytes:&ne12 length:sizeof(ne12) atIndex:7]; + [encoder setBytes:&nb10 length:sizeof(nb10) atIndex:8]; + [encoder setBytes:&nb11 length:sizeof(nb11) atIndex:9]; + [encoder setBytes:&nb12 length:sizeof(nb12) atIndex:10]; + [encoder setBytes:&ne0 length:sizeof(ne0) atIndex:11]; + [encoder setBytes:&ne1 length:sizeof(ne1) atIndex:12]; + [encoder setBytes:&r2 length:sizeof(r2) atIndex:13]; + [encoder setBytes:&r3 length:sizeof(r3) atIndex:14]; + [encoder setThreadgroupMemoryLength:8192 atIndex:0]; + [encoder dispatchThreadgroups:MTLSizeMake( (ne11 + 31)/32, (ne01 + 63)/64, ne12*ne13) threadsPerThreadgroup:MTLSizeMake(128, 1, 1)]; + } else { + int nth0 = 32; + int nth1 = 1; + int nrows = 1; + //printf("vector: ne00 = %6d, ne01 = %6d, ne02 = %6d, ne11 = %6d, ne12 = %6d\n", ne00, ne01, ne02, ne11, ne12); + + id pipeline = nil; + + // use custom matrix x vector kernel + switch (src0t) { + case GGML_TYPE_F32: + { + GGML_ASSERT(src1t == GGML_TYPE_F32); + pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MV_F32_F32].pipeline; + nrows = 4; + } break; + case GGML_TYPE_F16: + { + nth0 = 32; + nth1 = 1; + if (src1t == GGML_TYPE_F32) { + if (ne11 * ne12 < 4) { + pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MV_F16_F32_1ROW].pipeline; + } else if (ne00 >= 128 && ne01 >= 8 && ne00%4 == 0) { + pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MV_F16_F32_L4].pipeline; + nrows = ne11; + } else { + pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MV_F16_F32].pipeline; + nrows = 4; + } + } else { + pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MV_F16_F16].pipeline; + nrows = 4; + } + } break; + case GGML_TYPE_Q4_0: + { + nth0 = 8; + nth1 = 8; + pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MV_Q4_0_F32].pipeline; + } break; + case GGML_TYPE_Q4_1: + { + nth0 = 8; + nth1 = 8; + pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MV_Q4_1_F32].pipeline; + } break; + case GGML_TYPE_Q5_0: + { + nth0 = 8; + nth1 = 8; + pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MV_Q5_0_F32].pipeline; + } break; + case GGML_TYPE_Q5_1: + { + nth0 = 8; + nth1 = 8; + pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MV_Q5_1_F32].pipeline; + } break; + case GGML_TYPE_Q8_0: + { + nth0 = 8; + nth1 = 8; + pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MV_Q8_0_F32].pipeline; + } break; + case GGML_TYPE_Q2_K: + { + nth0 = 2; + nth1 = 32; + pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MV_Q2_K_F32].pipeline; + } break; + case GGML_TYPE_Q3_K: + { + nth0 = 2; + nth1 = 32; + pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MV_Q3_K_F32].pipeline; + } break; + case GGML_TYPE_Q4_K: + { + nth0 = 4; //1; + nth1 = 8; //32; + pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MV_Q4_K_F32].pipeline; + } break; + case GGML_TYPE_Q5_K: + { + nth0 = 2; + nth1 = 32; + pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MV_Q5_K_F32].pipeline; + } break; + case GGML_TYPE_Q6_K: + { + nth0 = 2; + nth1 = 32; + pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MV_Q6_K_F32].pipeline; + } break; + case GGML_TYPE_IQ2_XXS: + { + nth0 = 4; + nth1 = 16; + pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MV_IQ2_XXS_F32].pipeline; + } break; + case GGML_TYPE_IQ2_XS: + { + nth0 = 4; + nth1 = 16; + pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MV_IQ2_XS_F32].pipeline; + } break; + default: + { + GGML_METAL_LOG_ERROR("Asserting on type %d\n", (int)src0t); + GGML_ASSERT(false && "not implemented"); + } + }; + + if (ggml_is_quantized(src0t)) { + GGML_ASSERT(ne00 >= nth0*nth1); + } + + [encoder setComputePipelineState:pipeline]; + [encoder setBuffer:id_src0 offset:offs_src0 atIndex:0]; + [encoder setBuffer:id_src1 offset:offs_src1 atIndex:1]; + [encoder setBuffer:id_dst offset:offs_dst atIndex:2]; + [encoder setBytes:&ne00 length:sizeof(ne00) atIndex:3]; + [encoder setBytes:&ne01 length:sizeof(ne01) atIndex:4]; + [encoder setBytes:&ne02 length:sizeof(ne02) atIndex:5]; + [encoder setBytes:&nb00 length:sizeof(nb00) atIndex:6]; + [encoder setBytes:&nb01 length:sizeof(nb01) atIndex:7]; + [encoder setBytes:&nb02 length:sizeof(nb02) atIndex:8]; + [encoder setBytes:&ne10 length:sizeof(ne10) atIndex:9]; + [encoder setBytes:&ne11 length:sizeof(ne11) atIndex:10]; + [encoder setBytes:&ne12 length:sizeof(ne12) atIndex:11]; + [encoder setBytes:&nb10 length:sizeof(nb10) atIndex:12]; + [encoder setBytes:&nb11 length:sizeof(nb11) atIndex:13]; + [encoder setBytes:&nb12 length:sizeof(nb12) atIndex:14]; + [encoder setBytes:&ne0 length:sizeof(ne0) atIndex:15]; + [encoder setBytes:&ne1 length:sizeof(ne1) atIndex:16]; + [encoder setBytes:&r2 length:sizeof(r2) atIndex:17]; + [encoder setBytes:&r3 length:sizeof(r3) atIndex:18]; + + if (src0t == GGML_TYPE_Q4_0 || src0t == GGML_TYPE_Q4_1 || + src0t == GGML_TYPE_Q5_0 || src0t == GGML_TYPE_Q5_1 || src0t == GGML_TYPE_Q8_0 || + src0t == GGML_TYPE_Q2_K) { // || src0t == GGML_TYPE_Q4_K) { + [encoder dispatchThreadgroups:MTLSizeMake((ne01 + 7)/8, ne11, ne12*ne13) threadsPerThreadgroup:MTLSizeMake(nth0, nth1, 1)]; + } + else if (src0t == GGML_TYPE_IQ2_XXS || src0t == GGML_TYPE_IQ2_XS) { + const int mem_size = src0t == GGML_TYPE_IQ2_XXS ? 256*8+128 : 512*8+128; + [encoder setThreadgroupMemoryLength:mem_size atIndex:0]; + [encoder dispatchThreadgroups:MTLSizeMake((ne01 + 7)/8, ne11, ne12*ne13) threadsPerThreadgroup:MTLSizeMake(nth0, nth1, 1)]; + } + else if (src0t == GGML_TYPE_Q4_K) { + [encoder dispatchThreadgroups:MTLSizeMake((ne01 + 3)/4, ne11, ne12*ne13) threadsPerThreadgroup:MTLSizeMake(nth0, nth1, 1)]; + } + else if (src0t == GGML_TYPE_Q3_K) { +#ifdef GGML_QKK_64 + [encoder dispatchThreadgroups:MTLSizeMake((ne01 + 1)/2, ne11, ne12*ne13) threadsPerThreadgroup:MTLSizeMake(nth0, nth1, 1)]; +#else + [encoder dispatchThreadgroups:MTLSizeMake((ne01 + 3)/4, ne11, ne12*ne13) threadsPerThreadgroup:MTLSizeMake(nth0, nth1, 1)]; +#endif + } + else if (src0t == GGML_TYPE_Q5_K) { + [encoder dispatchThreadgroups:MTLSizeMake((ne01 + 3)/4, ne11, ne12*ne13) threadsPerThreadgroup:MTLSizeMake(nth0, nth1, 1)]; + } + else if (src0t == GGML_TYPE_Q6_K) { + [encoder dispatchThreadgroups:MTLSizeMake((ne01 + 1)/2, ne11, ne12*ne13) threadsPerThreadgroup:MTLSizeMake(nth0, nth1, 1)]; + } else { + const int64_t ny = (ne11 + nrows - 1)/nrows; + [encoder dispatchThreadgroups:MTLSizeMake(ne01, ny, ne12*ne13) threadsPerThreadgroup:MTLSizeMake(nth0, nth1, 1)]; + } + } + } break; + case GGML_OP_MUL_MAT_ID: + { + //GGML_ASSERT(ne00 == ne10); + //GGML_ASSERT(ne03 == ne13); + + GGML_ASSERT(src0t == GGML_TYPE_I32); + + const int n_as = ((int32_t *) dst->op_params)[1]; + + // TODO: make this more general + GGML_ASSERT(n_as <= 8); + + // max size of the src1ids array in the kernel stack + GGML_ASSERT(ne11 <= 512); + + struct ggml_tensor * src2 = gf->nodes[i]->src[2]; + + const int64_t ne20 = src2 ? src2->ne[0] : 0; + const int64_t ne21 = src2 ? src2->ne[1] : 0; + const int64_t ne22 = src2 ? src2->ne[2] : 0; + const int64_t ne23 = src2 ? src2->ne[3] : 0; GGML_UNUSED(ne23); + + const uint64_t nb20 = src2 ? src2->nb[0] : 0; GGML_UNUSED(nb20); + const uint64_t nb21 = src2 ? src2->nb[1] : 0; + const uint64_t nb22 = src2 ? src2->nb[2] : 0; + const uint64_t nb23 = src2 ? src2->nb[3] : 0; GGML_UNUSED(nb23); + + const enum ggml_type src2t = src2 ? src2->type : GGML_TYPE_COUNT; GGML_UNUSED(src2t); + + GGML_ASSERT(!ggml_is_transposed(src2)); + GGML_ASSERT(!ggml_is_transposed(src1)); + + GGML_ASSERT(src1t == GGML_TYPE_F32); + + const uint r2 = ne12/ne22; + const uint r3 = ne13/ne23; + + // find the break-even point where the matrix-matrix kernel becomes more efficient compared + // to the matrix-vector kernel + int ne11_mm_min = n_as; + + const int idx = ((int32_t *) dst->op_params)[0]; + + // batch size + GGML_ASSERT(ne01 == ne11); + + // for now the matrix-matrix multiplication kernel only works on A14+/M1+ SoCs + // AMD GPU and older A-chips will reuse matrix-vector multiplication kernel + // !!! + // TODO: for now, always use mat-vec kernels until we figure out how to improve the + // indirect matrix multiplication + // !!! + if ([ctx->device supportsFamily:MTLGPUFamilyApple7] && + ne20 % 32 == 0 && ne20 >= 64 && + ne11 > ne11_mm_min) { + + id pipeline = nil; + + switch (src2->type) { + case GGML_TYPE_F32: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MM_ID_F32_F32 ].pipeline; break; + case GGML_TYPE_F16: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MM_ID_F16_F32 ].pipeline; break; + case GGML_TYPE_Q4_0: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MM_ID_Q4_0_F32 ].pipeline; break; + case GGML_TYPE_Q4_1: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MM_ID_Q4_1_F32 ].pipeline; break; + case GGML_TYPE_Q5_0: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MM_ID_Q5_0_F32 ].pipeline; break; + case GGML_TYPE_Q5_1: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MM_ID_Q5_1_F32 ].pipeline; break; + case GGML_TYPE_Q8_0: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MM_ID_Q8_0_F32 ].pipeline; break; + case GGML_TYPE_Q2_K: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MM_ID_Q2_K_F32 ].pipeline; break; + case GGML_TYPE_Q3_K: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MM_ID_Q3_K_F32 ].pipeline; break; + case GGML_TYPE_Q4_K: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MM_ID_Q4_K_F32 ].pipeline; break; + case GGML_TYPE_Q5_K: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MM_ID_Q5_K_F32 ].pipeline; break; + case GGML_TYPE_Q6_K: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MM_ID_Q6_K_F32 ].pipeline; break; + case GGML_TYPE_IQ2_XXS: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MM_ID_IQ2_XXS_F32].pipeline; break; + case GGML_TYPE_IQ2_XS: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MM_ID_IQ2_XS_F32 ].pipeline; break; + default: GGML_ASSERT(false && "MUL_MAT_ID not implemented"); + } + + [encoder setComputePipelineState:pipeline]; + [encoder setBuffer:id_src0 offset:offs_src0 atIndex:0]; + [encoder setBuffer:id_src1 offset:offs_src1 atIndex:1]; + [encoder setBuffer:id_dst offset:offs_dst atIndex:2]; + [encoder setBytes:&nb01 length:sizeof(nb01) atIndex:3]; + [encoder setBytes:&ne20 length:sizeof(ne20) atIndex:4]; + [encoder setBytes:&ne22 length:sizeof(ne22) atIndex:5]; + [encoder setBytes:&nb21 length:sizeof(nb21) atIndex:6]; + [encoder setBytes:&nb22 length:sizeof(nb22) atIndex:7]; + [encoder setBytes:&ne12 length:sizeof(ne12) atIndex:8]; + [encoder setBytes:&ne13 length:sizeof(ne13) atIndex:9]; + [encoder setBytes:&nb10 length:sizeof(nb10) atIndex:10]; + [encoder setBytes:&nb11 length:sizeof(nb11) atIndex:11]; + [encoder setBytes:&nb12 length:sizeof(nb12) atIndex:12]; + [encoder setBytes:&ne0 length:sizeof(ne0) atIndex:13]; + [encoder setBytes:&ne1 length:sizeof(ne1) atIndex:14]; + [encoder setBytes:&nb1 length:sizeof(nb1) atIndex:15]; + [encoder setBytes:&r2 length:sizeof(r2) atIndex:16]; + [encoder setBytes:&r3 length:sizeof(r3) atIndex:17]; + [encoder setBytes:&idx length:sizeof(idx) atIndex:18]; + // TODO: how to make this an array? read Metal docs + for (int j = 0; j < 8; ++j) { + // NOTE: this is done like this to avoid uninitialized kernel arguments when n_as < 8 + struct ggml_tensor * src_cur = dst->src[2 + (j % n_as)]; + + size_t offs_src_cur = 0; + id id_src_cur = ggml_metal_get_buffer(ctx, src_cur, &offs_src_cur); + + [encoder setBuffer:id_src_cur offset:offs_src_cur atIndex:19 + j]; + } + + [encoder setThreadgroupMemoryLength:8192 atIndex:0]; + + [encoder dispatchThreadgroups:MTLSizeMake((ne11 + 31)/32, (ne21 + 63)/64, n_as*ne12*ne13) threadsPerThreadgroup:MTLSizeMake(128, 1, 1)]; + } else { + int nth0 = 32; + int nth1 = 1; + int nrows = 1; + //printf("vector: ne00 = %6d, ne01 = %6d, ne02 = %6d, ne11 = %6d, ne12 = %6d\n", ne00, ne01, ne02, ne11, ne12); + + id pipeline = nil; + + // use custom matrix x vector kernel + switch (src2t) { + case GGML_TYPE_F32: + { + GGML_ASSERT(src1t == GGML_TYPE_F32); + pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MV_ID_F32_F32].pipeline; + } break; + case GGML_TYPE_F16: + { + GGML_ASSERT(src1t == GGML_TYPE_F32); + nth0 = 32; + nth1 = 1; + pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MV_ID_F16_F32].pipeline; + } break; + case GGML_TYPE_Q4_0: + { + nth0 = 8; + nth1 = 8; + pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MV_ID_Q4_0_F32].pipeline; + } break; + case GGML_TYPE_Q4_1: + { + nth0 = 8; + nth1 = 8; + pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MV_ID_Q4_1_F32].pipeline; + } break; + case GGML_TYPE_Q5_0: + { + nth0 = 8; + nth1 = 8; + pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MV_ID_Q5_0_F32].pipeline; + } break; + case GGML_TYPE_Q5_1: + { + nth0 = 8; + nth1 = 8; + pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MV_ID_Q5_1_F32].pipeline; + } break; + case GGML_TYPE_Q8_0: + { + nth0 = 8; + nth1 = 8; + pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MV_ID_Q8_0_F32].pipeline; + } break; + case GGML_TYPE_Q2_K: + { + nth0 = 2; + nth1 = 32; + pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MV_ID_Q2_K_F32].pipeline; + } break; + case GGML_TYPE_Q3_K: + { + nth0 = 2; + nth1 = 32; + pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MV_ID_Q3_K_F32].pipeline; + } break; + case GGML_TYPE_Q4_K: + { + nth0 = 4; //1; + nth1 = 8; //32; + pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MV_ID_Q4_K_F32].pipeline; + } break; + case GGML_TYPE_Q5_K: + { + nth0 = 2; + nth1 = 32; + pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MV_ID_Q5_K_F32].pipeline; + } break; + case GGML_TYPE_Q6_K: + { + nth0 = 2; + nth1 = 32; + pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MV_ID_Q6_K_F32].pipeline; + } break; + case GGML_TYPE_IQ2_XXS: + { + nth0 = 4; + nth1 = 16; + pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MV_ID_IQ2_XXS_F32].pipeline; + } break; + case GGML_TYPE_IQ2_XS: + { + nth0 = 4; + nth1 = 16; + pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MV_ID_IQ2_XS_F32].pipeline; + } break; + default: + { + GGML_METAL_LOG_ERROR("Asserting on type %d\n", (int)src2t); + GGML_ASSERT(false && "not implemented"); + } + }; + + if (ggml_is_quantized(src2t)) { + GGML_ASSERT(ne20 >= nth0*nth1); + } + + const int64_t _ne1 = 1; // kernels needs a reference in constant memory + + [encoder setComputePipelineState:pipeline]; + [encoder setBuffer:id_src0 offset:offs_src0 atIndex:0]; + [encoder setBuffer:id_src1 offset:offs_src1 atIndex:1]; + [encoder setBuffer:id_dst offset:offs_dst atIndex:2]; + [encoder setBytes:&nb01 length:sizeof(nb01) atIndex:3]; + [encoder setBytes:&ne20 length:sizeof(ne20) atIndex:4]; + [encoder setBytes:&ne21 length:sizeof(ne21) atIndex:5]; + [encoder setBytes:&ne22 length:sizeof(ne22) atIndex:6]; + [encoder setBytes:&nb20 length:sizeof(nb20) atIndex:7]; + [encoder setBytes:&nb21 length:sizeof(nb21) atIndex:8]; + [encoder setBytes:&nb22 length:sizeof(nb22) atIndex:9]; + [encoder setBytes:&ne10 length:sizeof(ne10) atIndex:10]; + [encoder setBytes:&_ne1 length:sizeof(_ne1) atIndex:11]; + [encoder setBytes:&ne12 length:sizeof(ne12) atIndex:12]; + [encoder setBytes:&ne13 length:sizeof(ne13) atIndex:13]; + [encoder setBytes:&nb10 length:sizeof(nb10) atIndex:14]; + [encoder setBytes:&nb11 length:sizeof(nb11) atIndex:15]; + [encoder setBytes:&nb12 length:sizeof(nb12) atIndex:16]; + [encoder setBytes:&ne0 length:sizeof(ne0) atIndex:17]; + [encoder setBytes:&_ne1 length:sizeof(_ne1) atIndex:18]; + [encoder setBytes:&nb1 length:sizeof(nb1) atIndex:19]; + [encoder setBytes:&r2 length:sizeof(r2) atIndex:20]; + [encoder setBytes:&r3 length:sizeof(r3) atIndex:21]; + [encoder setBytes:&idx length:sizeof(idx) atIndex:22]; + // TODO: how to make this an array? read Metal docs + for (int j = 0; j < 8; ++j) { + // NOTE: this is done like this to avoid uninitialized kernel arguments when n_as < 8 + struct ggml_tensor * src_cur = dst->src[2 + (j % n_as)]; + + size_t offs_src_cur = 0; + id id_src_cur = ggml_metal_get_buffer(ctx, src_cur, &offs_src_cur); + + [encoder setBuffer:id_src_cur offset:offs_src_cur atIndex:23 + j]; + } + + if (src2t == GGML_TYPE_Q4_0 || src2t == GGML_TYPE_Q4_1 || + src2t == GGML_TYPE_Q5_0 || src2t == GGML_TYPE_Q5_1 || src2t == GGML_TYPE_Q8_0 || + src2t == GGML_TYPE_Q2_K) { // || src2t == GGML_TYPE_Q4_K) { + [encoder dispatchThreadgroups:MTLSizeMake((ne21 + 7)/8, _ne1, ne01*ne12*ne13) threadsPerThreadgroup:MTLSizeMake(nth0, nth1, 1)]; + } + else if (src2t == GGML_TYPE_IQ2_XXS || src2t == GGML_TYPE_IQ2_XS) { + const int mem_size = src2t == GGML_TYPE_IQ2_XXS ? 256*8+128 : 512*8+128; + [encoder setThreadgroupMemoryLength:mem_size atIndex:0]; + [encoder dispatchThreadgroups:MTLSizeMake((ne21 + 7)/8, _ne1, ne01*ne12*ne13) threadsPerThreadgroup:MTLSizeMake(nth0, nth1, 1)]; + } + else if (src2t == GGML_TYPE_Q4_K) { + [encoder dispatchThreadgroups:MTLSizeMake((ne21 + 3)/4, _ne1, ne01*ne12*ne13) threadsPerThreadgroup:MTLSizeMake(nth0, nth1, 1)]; + } + else if (src2t == GGML_TYPE_Q3_K) { +#ifdef GGML_QKK_64 + [encoder dispatchThreadgroups:MTLSizeMake((ne21 + 1)/2, _ne1, ne01*ne12*ne13) threadsPerThreadgroup:MTLSizeMake(nth0, nth1, 1)]; +#else + [encoder dispatchThreadgroups:MTLSizeMake((ne21 + 3)/4, _ne1, ne01*ne12*ne13) threadsPerThreadgroup:MTLSizeMake(nth0, nth1, 1)]; +#endif + } + else if (src2t == GGML_TYPE_Q5_K) { + [encoder dispatchThreadgroups:MTLSizeMake((ne21 + 3)/4, _ne1, ne01*ne12*ne13) threadsPerThreadgroup:MTLSizeMake(nth0, nth1, 1)]; + } + else if (src2t == GGML_TYPE_Q6_K) { + [encoder dispatchThreadgroups:MTLSizeMake((ne21 + 1)/2, _ne1, ne01*ne12*ne13) threadsPerThreadgroup:MTLSizeMake(nth0, nth1, 1)]; + } else { + const int64_t ny = (_ne1 + nrows - 1)/nrows; + [encoder dispatchThreadgroups:MTLSizeMake(ne21, ny, ne01*ne12*ne13) threadsPerThreadgroup:MTLSizeMake(nth0, nth1, 1)]; + } + } + } break; + case GGML_OP_GET_ROWS: + { + id pipeline = nil; + + switch (src0->type) { + case GGML_TYPE_F32: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_GET_ROWS_F32 ].pipeline; break; + case GGML_TYPE_F16: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_GET_ROWS_F16 ].pipeline; break; + case GGML_TYPE_Q4_0: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_GET_ROWS_Q4_0 ].pipeline; break; + case GGML_TYPE_Q4_1: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_GET_ROWS_Q4_1 ].pipeline; break; + case GGML_TYPE_Q5_0: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_GET_ROWS_Q5_0 ].pipeline; break; + case GGML_TYPE_Q5_1: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_GET_ROWS_Q5_1 ].pipeline; break; + case GGML_TYPE_Q8_0: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_GET_ROWS_Q8_0 ].pipeline; break; + case GGML_TYPE_Q2_K: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_GET_ROWS_Q2_K ].pipeline; break; + case GGML_TYPE_Q3_K: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_GET_ROWS_Q3_K ].pipeline; break; + case GGML_TYPE_Q4_K: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_GET_ROWS_Q4_K ].pipeline; break; + case GGML_TYPE_Q5_K: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_GET_ROWS_Q5_K ].pipeline; break; + case GGML_TYPE_Q6_K: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_GET_ROWS_Q6_K ].pipeline; break; + case GGML_TYPE_IQ2_XXS: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_GET_ROWS_IQ2_XXS].pipeline; break; + case GGML_TYPE_IQ2_XS: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_GET_ROWS_IQ2_XS ].pipeline; break; + case GGML_TYPE_I32: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_GET_ROWS_I32 ].pipeline; break; + default: GGML_ASSERT(false && "not implemented"); + } + + [encoder setComputePipelineState:pipeline]; + [encoder setBuffer:id_src0 offset:offs_src0 atIndex:0]; + [encoder setBuffer:id_src1 offset:offs_src1 atIndex:1]; + [encoder setBuffer:id_dst offset:offs_dst atIndex:2]; + [encoder setBytes:&ne00 length:sizeof( int64_t) atIndex:3]; + [encoder setBytes:&nb01 length:sizeof(uint64_t) atIndex:4]; + [encoder setBytes:&nb02 length:sizeof(uint64_t) atIndex:5]; + [encoder setBytes:&ne10 length:sizeof( int64_t) atIndex:6]; + [encoder setBytes:&nb10 length:sizeof( int64_t) atIndex:7]; + [encoder setBytes:&nb11 length:sizeof( int64_t) atIndex:8]; + [encoder setBytes:&nb1 length:sizeof(uint64_t) atIndex:9]; + [encoder setBytes:&nb2 length:sizeof(uint64_t) atIndex:10]; + + [encoder dispatchThreadgroups:MTLSizeMake(ne10, ne11, 1) threadsPerThreadgroup:MTLSizeMake(32, 1, 1)]; + } break; + case GGML_OP_RMS_NORM: + { + GGML_ASSERT(ne00 % 4 == 0); + + float eps; + memcpy(&eps, dst->op_params, sizeof(float)); + + int nth = 32; // SIMD width + + while (nth < ne00/4 && nth < 1024) { + nth *= 2; + } + + id pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_RMS_NORM].pipeline; + + [encoder setComputePipelineState:pipeline]; + [encoder setBuffer:id_src0 offset:offs_src0 atIndex:0]; + [encoder setBuffer:id_dst offset:offs_dst atIndex:1]; + [encoder setBytes:&ne00 length:sizeof( int64_t) atIndex:2]; + [encoder setBytes:&nb01 length:sizeof(uint64_t) atIndex:3]; + [encoder setBytes:&eps length:sizeof( float) atIndex:4]; + [encoder setThreadgroupMemoryLength:32*sizeof(float) atIndex:0]; + + const int64_t nrows = ggml_nrows(src0); + + [encoder dispatchThreadgroups:MTLSizeMake(nrows, 1, 1) threadsPerThreadgroup:MTLSizeMake(nth, 1, 1)]; + } break; + case GGML_OP_GROUP_NORM: + { + GGML_ASSERT(ne00 % 4 == 0); + + //float eps; + //memcpy(&eps, dst->op_params, sizeof(float)); + + const float eps = 1e-6f; // TODO: temporarily hardcoded + + const int32_t n_groups = ((int32_t *) dst->op_params)[0]; + + int nth = 32; // SIMD width + + //while (nth < ne00/4 && nth < 1024) { + // nth *= 2; + //} + + id pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_GROUP_NORM].pipeline; + + [encoder setComputePipelineState:pipeline]; + [encoder setBuffer:id_src0 offset:offs_src0 atIndex:0]; + [encoder setBuffer:id_dst offset:offs_dst atIndex:1]; + [encoder setBytes:&ne00 length:sizeof( int64_t) atIndex:2]; + [encoder setBytes:&ne01 length:sizeof( int64_t) atIndex:3]; + [encoder setBytes:&ne02 length:sizeof( int64_t) atIndex:4]; + [encoder setBytes:&nb00 length:sizeof(uint64_t) atIndex:5]; + [encoder setBytes:&nb01 length:sizeof(uint64_t) atIndex:6]; + [encoder setBytes:&nb02 length:sizeof(uint64_t) atIndex:7]; + [encoder setBytes:&n_groups length:sizeof( int32_t) atIndex:8]; + [encoder setBytes:&eps length:sizeof( float) atIndex:9]; + [encoder setThreadgroupMemoryLength:32*sizeof(float) atIndex:0]; + + [encoder dispatchThreadgroups:MTLSizeMake(n_groups, 1, 1) threadsPerThreadgroup:MTLSizeMake(nth, 1, 1)]; + } break; + case GGML_OP_NORM: + { + float eps; + memcpy(&eps, dst->op_params, sizeof(float)); + + const int nth = MIN(256, ne00); + + id pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_NORM].pipeline; + + [encoder setComputePipelineState:pipeline]; + [encoder setBuffer:id_src0 offset:offs_src0 atIndex:0]; + [encoder setBuffer:id_dst offset:offs_dst atIndex:1]; + [encoder setBytes:&ne00 length:sizeof( int64_t) atIndex:2]; + [encoder setBytes:&nb01 length:sizeof(uint64_t) atIndex:3]; + [encoder setBytes:&eps length:sizeof( float) atIndex:4]; + [encoder setThreadgroupMemoryLength:GGML_PAD(nth*sizeof(float), 16) atIndex:0]; + + const int64_t nrows = ggml_nrows(src0); + + [encoder dispatchThreadgroups:MTLSizeMake(nrows, 1, 1) threadsPerThreadgroup:MTLSizeMake(nth, 1, 1)]; + } break; + case GGML_OP_ALIBI: + { + GGML_ASSERT((src0t == GGML_TYPE_F32)); + + const int nth = MIN(1024, ne00); + + //const int n_past = ((int32_t *) dst->op_params)[0]; + const int n_head = ((int32_t *) dst->op_params)[1]; + float max_bias; + memcpy(&max_bias, (int32_t *) dst->op_params + 2, sizeof(float)); + + const int n_heads_log2_floor = 1 << (int) floor(log2(n_head)); + const float m0 = powf(2.0f, -(max_bias) / n_heads_log2_floor); + const float m1 = powf(2.0f, -(max_bias / 2.0f) / n_heads_log2_floor); + + id pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_ALIBI_F32].pipeline; + + [encoder setComputePipelineState:pipeline]; + [encoder setBuffer:id_src0 offset:offs_src0 atIndex:0]; + [encoder setBuffer:id_dst offset:offs_dst atIndex:1]; + [encoder setBytes:&ne00 length:sizeof( int64_t) atIndex:2]; + [encoder setBytes:&ne01 length:sizeof( int64_t) atIndex:3]; + [encoder setBytes:&ne02 length:sizeof( int64_t) atIndex:4]; + [encoder setBytes:&ne03 length:sizeof( int64_t) atIndex:5]; + [encoder setBytes:&nb00 length:sizeof(uint64_t) atIndex:6]; + [encoder setBytes:&nb01 length:sizeof(uint64_t) atIndex:7]; + [encoder setBytes:&nb02 length:sizeof(uint64_t) atIndex:8]; + [encoder setBytes:&nb03 length:sizeof(uint64_t) atIndex:9]; + [encoder setBytes:&ne0 length:sizeof( int64_t) atIndex:10]; + [encoder setBytes:&ne1 length:sizeof( int64_t) atIndex:11]; + [encoder setBytes:&ne2 length:sizeof( int64_t) atIndex:12]; + [encoder setBytes:&ne3 length:sizeof( int64_t) atIndex:13]; + [encoder setBytes:&nb0 length:sizeof(uint64_t) atIndex:14]; + [encoder setBytes:&nb1 length:sizeof(uint64_t) atIndex:15]; + [encoder setBytes:&nb2 length:sizeof(uint64_t) atIndex:16]; + [encoder setBytes:&nb3 length:sizeof(uint64_t) atIndex:17]; + [encoder setBytes:&m0 length:sizeof( float) atIndex:18]; + [encoder setBytes:&m1 length:sizeof( float) atIndex:19]; + [encoder setBytes:&n_heads_log2_floor length:sizeof(int) atIndex:20]; + + [encoder dispatchThreadgroups:MTLSizeMake(ne01, ne02, ne03) threadsPerThreadgroup:MTLSizeMake(nth, 1, 1)]; + } break; + case GGML_OP_ROPE: + { + GGML_ASSERT(ne10 == ne02); + + const int nth = MIN(1024, ne00); + + const int n_past = ((int32_t *) dst->op_params)[0]; + const int n_dims = ((int32_t *) dst->op_params)[1]; + const int mode = ((int32_t *) dst->op_params)[2]; + // skip 3, n_ctx, used in GLM RoPE, unimplemented in metal + const int n_orig_ctx = ((int32_t *) dst->op_params)[4]; + + float freq_base, freq_scale, ext_factor, attn_factor, beta_fast, beta_slow; + memcpy(&freq_base, (int32_t *) dst->op_params + 5, sizeof(float)); + memcpy(&freq_scale, (int32_t *) dst->op_params + 6, sizeof(float)); + memcpy(&ext_factor, (int32_t *) dst->op_params + 7, sizeof(float)); + memcpy(&attn_factor, (int32_t *) dst->op_params + 8, sizeof(float)); + memcpy(&beta_fast, (int32_t *) dst->op_params + 9, sizeof(float)); + memcpy(&beta_slow, (int32_t *) dst->op_params + 10, sizeof(float)); + + id pipeline = nil; + + switch (src0->type) { + case GGML_TYPE_F32: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_ROPE_F32].pipeline; break; + case GGML_TYPE_F16: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_ROPE_F16].pipeline; break; + default: GGML_ASSERT(false); + }; + + [encoder setComputePipelineState:pipeline]; + [encoder setBuffer:id_src0 offset:offs_src0 atIndex:0]; + [encoder setBuffer:id_src1 offset:offs_src1 atIndex:1]; + [encoder setBuffer:id_dst offset:offs_dst atIndex:2]; + [encoder setBytes:&ne00 length:sizeof( int64_t) atIndex:3]; + [encoder setBytes:&ne01 length:sizeof( int64_t) atIndex:4]; + [encoder setBytes:&ne02 length:sizeof( int64_t) atIndex:5]; + [encoder setBytes:&ne03 length:sizeof( int64_t) atIndex:6]; + [encoder setBytes:&nb00 length:sizeof(uint64_t) atIndex:7]; + [encoder setBytes:&nb01 length:sizeof(uint64_t) atIndex:8]; + [encoder setBytes:&nb02 length:sizeof(uint64_t) atIndex:9]; + [encoder setBytes:&nb03 length:sizeof(uint64_t) atIndex:10]; + [encoder setBytes:&ne0 length:sizeof( int64_t) atIndex:11]; + [encoder setBytes:&ne1 length:sizeof( int64_t) atIndex:12]; + [encoder setBytes:&ne2 length:sizeof( int64_t) atIndex:13]; + [encoder setBytes:&ne3 length:sizeof( int64_t) atIndex:14]; + [encoder setBytes:&nb0 length:sizeof(uint64_t) atIndex:15]; + [encoder setBytes:&nb1 length:sizeof(uint64_t) atIndex:16]; + [encoder setBytes:&nb2 length:sizeof(uint64_t) atIndex:17]; + [encoder setBytes:&nb3 length:sizeof(uint64_t) atIndex:18]; + [encoder setBytes:&n_past length:sizeof( int) atIndex:19]; + [encoder setBytes:&n_dims length:sizeof( int) atIndex:20]; + [encoder setBytes:&mode length:sizeof( int) atIndex:21]; + [encoder setBytes:&n_orig_ctx length:sizeof( int) atIndex:22]; + [encoder setBytes:&freq_base length:sizeof( float) atIndex:23]; + [encoder setBytes:&freq_scale length:sizeof( float) atIndex:24]; + [encoder setBytes:&ext_factor length:sizeof( float) atIndex:25]; + [encoder setBytes:&attn_factor length:sizeof( float) atIndex:26]; + [encoder setBytes:&beta_fast length:sizeof( float) atIndex:27]; + [encoder setBytes:&beta_slow length:sizeof( float) atIndex:28]; + + [encoder dispatchThreadgroups:MTLSizeMake(ne01, ne02, ne03) threadsPerThreadgroup:MTLSizeMake(nth, 1, 1)]; + } break; + case GGML_OP_IM2COL: + { + GGML_ASSERT(src0->type == GGML_TYPE_F16); + GGML_ASSERT(src1->type == GGML_TYPE_F32); + GGML_ASSERT( dst->type == GGML_TYPE_F16); + + const int32_t s0 = ((const int32_t *)(dst->op_params))[0]; + const int32_t s1 = ((const int32_t *)(dst->op_params))[1]; + const int32_t p0 = ((const int32_t *)(dst->op_params))[2]; + const int32_t p1 = ((const int32_t *)(dst->op_params))[3]; + const int32_t d0 = ((const int32_t *)(dst->op_params))[4]; + const int32_t d1 = ((const int32_t *)(dst->op_params))[5]; + const bool is_2D = ((const int32_t *)(dst->op_params))[6] == 1; + + const int32_t N = src1->ne[is_2D ? 3 : 2]; + const int32_t IC = src1->ne[is_2D ? 2 : 1]; + const int32_t IH = is_2D ? src1->ne[1] : 1; + const int32_t IW = src1->ne[0]; + + const int32_t KH = is_2D ? src0->ne[1] : 1; + const int32_t KW = src0->ne[0]; + + const int32_t OH = is_2D ? dst->ne[2] : 1; + const int32_t OW = dst->ne[1]; + + const int32_t CHW = IC * KH * KW; + + const int32_t ofs0 = src1->nb[is_2D ? 3 : 2] / 4; + const int32_t ofs1 = src1->nb[is_2D ? 2 : 1] / 4; + + id pipeline = nil; + + switch (src0->type) { + case GGML_TYPE_F32: GGML_ASSERT(false && "not implemented"); break; + case GGML_TYPE_F16: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_IM2COL_F16].pipeline; break; + default: GGML_ASSERT(false); + }; + + [encoder setComputePipelineState:pipeline]; + [encoder setBuffer:id_src1 offset:offs_src1 atIndex:0]; + [encoder setBuffer:id_dst offset:offs_dst atIndex:1]; + [encoder setBytes:&ofs0 length:sizeof( int32_t) atIndex:2]; + [encoder setBytes:&ofs1 length:sizeof( int32_t) atIndex:3]; + [encoder setBytes:&IW length:sizeof( int32_t) atIndex:4]; + [encoder setBytes:&IH length:sizeof( int32_t) atIndex:5]; + [encoder setBytes:&CHW length:sizeof( int32_t) atIndex:6]; + [encoder setBytes:&s0 length:sizeof( int32_t) atIndex:7]; + [encoder setBytes:&s1 length:sizeof( int32_t) atIndex:8]; + [encoder setBytes:&p0 length:sizeof( int32_t) atIndex:9]; + [encoder setBytes:&p1 length:sizeof( int32_t) atIndex:10]; + [encoder setBytes:&d0 length:sizeof( int32_t) atIndex:11]; + [encoder setBytes:&d1 length:sizeof( int32_t) atIndex:12]; + + [encoder dispatchThreadgroups:MTLSizeMake(IC, OH, OW) threadsPerThreadgroup:MTLSizeMake(N, KH, KW)]; + } break; + case GGML_OP_UPSCALE: + { + GGML_ASSERT(src0->type == GGML_TYPE_F32); + + const int sf = dst->op_params[0]; + + const id pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_UPSCALE_F32].pipeline; + + [encoder setComputePipelineState:pipeline]; + [encoder setBuffer:id_src0 offset:offs_src0 atIndex:0]; + [encoder setBuffer:id_dst offset:offs_dst atIndex:1]; + [encoder setBytes:&ne00 length:sizeof(ne00) atIndex:2]; + [encoder setBytes:&ne01 length:sizeof(ne01) atIndex:3]; + [encoder setBytes:&ne02 length:sizeof(ne02) atIndex:4]; + [encoder setBytes:&ne03 length:sizeof(ne03) atIndex:5]; + [encoder setBytes:&nb00 length:sizeof(nb00) atIndex:6]; + [encoder setBytes:&nb01 length:sizeof(nb01) atIndex:7]; + [encoder setBytes:&nb02 length:sizeof(nb02) atIndex:8]; + [encoder setBytes:&nb03 length:sizeof(nb03) atIndex:9]; + [encoder setBytes:&ne0 length:sizeof(ne0) atIndex:10]; + [encoder setBytes:&ne1 length:sizeof(ne1) atIndex:11]; + [encoder setBytes:&ne2 length:sizeof(ne2) atIndex:12]; + [encoder setBytes:&ne3 length:sizeof(ne3) atIndex:13]; + [encoder setBytes:&nb0 length:sizeof(nb0) atIndex:14]; + [encoder setBytes:&nb1 length:sizeof(nb1) atIndex:15]; + [encoder setBytes:&nb2 length:sizeof(nb2) atIndex:16]; + [encoder setBytes:&nb3 length:sizeof(nb3) atIndex:17]; + [encoder setBytes:&sf length:sizeof(sf) atIndex:18]; + + const int nth = MIN((int) pipeline.maxTotalThreadsPerThreadgroup, ne0); + + [encoder dispatchThreadgroups:MTLSizeMake(ne1, ne2, ne3) threadsPerThreadgroup:MTLSizeMake(nth, 1, 1)]; + } break; + case GGML_OP_PAD: + { + GGML_ASSERT(src0->type == GGML_TYPE_F32); + + id pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_PAD_F32].pipeline; + + [encoder setComputePipelineState:pipeline]; + [encoder setBuffer:id_src0 offset:offs_src0 atIndex:0]; + [encoder setBuffer:id_dst offset:offs_dst atIndex:1]; + [encoder setBytes:&ne00 length:sizeof(ne00) atIndex:2]; + [encoder setBytes:&ne01 length:sizeof(ne01) atIndex:3]; + [encoder setBytes:&ne02 length:sizeof(ne02) atIndex:4]; + [encoder setBytes:&ne03 length:sizeof(ne03) atIndex:5]; + [encoder setBytes:&nb00 length:sizeof(nb00) atIndex:6]; + [encoder setBytes:&nb01 length:sizeof(nb01) atIndex:7]; + [encoder setBytes:&nb02 length:sizeof(nb02) atIndex:8]; + [encoder setBytes:&nb03 length:sizeof(nb03) atIndex:9]; + [encoder setBytes:&ne0 length:sizeof(ne0) atIndex:10]; + [encoder setBytes:&ne1 length:sizeof(ne1) atIndex:11]; + [encoder setBytes:&ne2 length:sizeof(ne2) atIndex:12]; + [encoder setBytes:&ne3 length:sizeof(ne3) atIndex:13]; + [encoder setBytes:&nb0 length:sizeof(nb0) atIndex:14]; + [encoder setBytes:&nb1 length:sizeof(nb1) atIndex:15]; + [encoder setBytes:&nb2 length:sizeof(nb2) atIndex:16]; + [encoder setBytes:&nb3 length:sizeof(nb3) atIndex:17]; + + const int nth = MIN(1024, ne0); + + [encoder dispatchThreadgroups:MTLSizeMake(ne1, ne2, ne3) threadsPerThreadgroup:MTLSizeMake(nth, 1, 1)]; + } break; + case GGML_OP_ARGSORT: + { + GGML_ASSERT(src0->type == GGML_TYPE_F32); + GGML_ASSERT( dst->type == GGML_TYPE_I32); + + const int nrows = ggml_nrows(src0); + + enum ggml_sort_order order = (enum ggml_sort_order) dst->op_params[0]; + + id pipeline = nil; + + switch (order) { + case GGML_SORT_ASC: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_ARGSORT_F32_I32_ASC].pipeline; break; + case GGML_SORT_DESC: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_ARGSORT_F32_I32_DESC].pipeline; break; + default: GGML_ASSERT(false); + }; + + [encoder setComputePipelineState:pipeline]; + [encoder setBuffer:id_src0 offset:offs_src0 atIndex:0]; + [encoder setBuffer:id_dst offset:offs_dst atIndex:1]; + [encoder setBytes:&ne00 length:sizeof( int64_t) atIndex:2]; + + [encoder dispatchThreadgroups:MTLSizeMake(1, nrows, 1) threadsPerThreadgroup:MTLSizeMake(ne00, 1, 1)]; + } break; + case GGML_OP_LEAKY_RELU: + { + GGML_ASSERT(src0->type == GGML_TYPE_F32); + + float slope; + memcpy(&slope, dst->op_params, sizeof(float)); + + id pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_LEAKY_RELU_F32].pipeline; + + [encoder setComputePipelineState:pipeline]; + [encoder setBuffer:id_src0 offset:offs_src0 atIndex:0]; + [encoder setBuffer:id_dst offset:offs_dst atIndex:1]; + [encoder setBytes:&slope length:sizeof(slope) atIndex:2]; + + const int64_t n = ggml_nelements(dst); + + [encoder dispatchThreadgroups:MTLSizeMake(n, 1, 1) threadsPerThreadgroup:MTLSizeMake(1, 1, 1)]; + } break; + case GGML_OP_DUP: + case GGML_OP_CPY: + case GGML_OP_CONT: + { + GGML_ASSERT(ne00 % ggml_blck_size(src0->type) == 0); + + int nth = MIN(1024, ne00/ggml_blck_size(src0->type)); + + id pipeline = nil; + + switch (src0t) { + case GGML_TYPE_F32: + { + GGML_ASSERT(ne0 % ggml_blck_size(dst->type) == 0); + + switch (dstt) { + case GGML_TYPE_F16: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_CPY_F32_F16].pipeline; break; + case GGML_TYPE_F32: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_CPY_F32_F32].pipeline; break; + case GGML_TYPE_Q8_0: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_CPY_F32_Q8_0].pipeline; break; + case GGML_TYPE_Q4_0: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_CPY_F32_Q4_0].pipeline; break; + case GGML_TYPE_Q4_1: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_CPY_F32_Q4_1].pipeline; break; + //case GGML_TYPE_Q5_0: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_CPY_F32_Q5_0].pipeline; break; + //case GGML_TYPE_Q5_1: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_CPY_F32_Q5_1].pipeline; break; + default: GGML_ASSERT(false && "not implemented"); + }; + } break; + case GGML_TYPE_F16: + { + switch (dstt) { + case GGML_TYPE_F16: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_CPY_F16_F16].pipeline; break; + case GGML_TYPE_F32: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_CPY_F16_F32].pipeline; break; + default: GGML_ASSERT(false && "not implemented"); + }; + } break; + default: GGML_ASSERT(false && "not implemented"); + } + + [encoder setComputePipelineState:pipeline]; + [encoder setBuffer:id_src0 offset:offs_src0 atIndex:0]; + [encoder setBuffer:id_dst offset:offs_dst atIndex:1]; + [encoder setBytes:&ne00 length:sizeof( int64_t) atIndex:2]; + [encoder setBytes:&ne01 length:sizeof( int64_t) atIndex:3]; + [encoder setBytes:&ne02 length:sizeof( int64_t) atIndex:4]; + [encoder setBytes:&ne03 length:sizeof( int64_t) atIndex:5]; + [encoder setBytes:&nb00 length:sizeof(uint64_t) atIndex:6]; + [encoder setBytes:&nb01 length:sizeof(uint64_t) atIndex:7]; + [encoder setBytes:&nb02 length:sizeof(uint64_t) atIndex:8]; + [encoder setBytes:&nb03 length:sizeof(uint64_t) atIndex:9]; + [encoder setBytes:&ne0 length:sizeof( int64_t) atIndex:10]; + [encoder setBytes:&ne1 length:sizeof( int64_t) atIndex:11]; + [encoder setBytes:&ne2 length:sizeof( int64_t) atIndex:12]; + [encoder setBytes:&ne3 length:sizeof( int64_t) atIndex:13]; + [encoder setBytes:&nb0 length:sizeof(uint64_t) atIndex:14]; + [encoder setBytes:&nb1 length:sizeof(uint64_t) atIndex:15]; + [encoder setBytes:&nb2 length:sizeof(uint64_t) atIndex:16]; + [encoder setBytes:&nb3 length:sizeof(uint64_t) atIndex:17]; + + [encoder dispatchThreadgroups:MTLSizeMake(ne01, ne02, ne03) threadsPerThreadgroup:MTLSizeMake(nth, 1, 1)]; + } break; + default: + { + GGML_METAL_LOG_ERROR("%s: error: node %3d, op = %8s not implemented\n", __func__, i, ggml_op_name(dst->op)); + GGML_ASSERT(false); + } + } #ifndef GGML_METAL_NDEBUG - [encoder popDebugGroup]; + [encoder popDebugGroup]; #endif - } + } - if (encoder != nil) { - [encoder endEncoding]; - encoder = nil; - } + if (encoder != nil) { + [encoder endEncoding]; + encoder = nil; + } - [command_buffer commit]; - }); - } - - // wait for all threads to finish - dispatch_barrier_sync(ctx->d_queue, ^{}); + [command_buffer commit]; + }); // check status of command buffers // needed to detect if the device ran out-of-memory for example (#1881) From 862f5e41ab1fdf12d6f59455aad3f5dd8258f805 Mon Sep 17 00:00:00 2001 From: Neuman Vong Date: Wed, 17 Jan 2024 00:47:34 +1100 Subject: [PATCH 331/811] android : introduce starter project example (#4926) * Introduce starter project for Android Based on examples/llama.swiftui. * Add github workflow * Set NDK version * Only build arm64-v8a in CI * Sync bench code * Rename CI prop to skip-armeabi-v7a * Remove unused tests --- .github/workflows/build.yml | 25 ++ examples/llama.android/.gitignore | 33 ++ examples/llama.android/README.md | 0 examples/llama.android/app/.gitignore | 1 + examples/llama.android/app/build.gradle.kts | 91 ++++ examples/llama.android/app/proguard-rules.pro | 21 + .../app/src/main/AndroidManifest.xml | 30 ++ .../app/src/main/cpp/CMakeLists.txt | 50 +++ .../app/src/main/cpp/llama-android.cpp | 394 ++++++++++++++++++ .../java/com/example/llama/Downloadable.kt | 119 ++++++ .../src/main/java/com/example/llama/Llm.kt | 172 ++++++++ .../java/com/example/llama/MainActivity.kt | 154 +++++++ .../java/com/example/llama/MainViewModel.kt | 104 +++++ .../java/com/example/llama/ui/theme/Color.kt | 11 + .../java/com/example/llama/ui/theme/Theme.kt | 70 ++++ .../java/com/example/llama/ui/theme/Type.kt | 34 ++ .../res/drawable/ic_launcher_background.xml | 170 ++++++++ .../res/drawable/ic_launcher_foreground.xml | 30 ++ .../main/res/mipmap-anydpi/ic_launcher.xml | 6 + .../res/mipmap-anydpi/ic_launcher_round.xml | 6 + .../src/main/res/mipmap-hdpi/ic_launcher.webp | Bin 0 -> 1404 bytes .../res/mipmap-hdpi/ic_launcher_round.webp | Bin 0 -> 2898 bytes .../src/main/res/mipmap-mdpi/ic_launcher.webp | Bin 0 -> 982 bytes .../res/mipmap-mdpi/ic_launcher_round.webp | Bin 0 -> 1772 bytes .../main/res/mipmap-xhdpi/ic_launcher.webp | Bin 0 -> 1900 bytes .../res/mipmap-xhdpi/ic_launcher_round.webp | Bin 0 -> 3918 bytes .../main/res/mipmap-xxhdpi/ic_launcher.webp | Bin 0 -> 2884 bytes .../res/mipmap-xxhdpi/ic_launcher_round.webp | Bin 0 -> 5914 bytes .../main/res/mipmap-xxxhdpi/ic_launcher.webp | Bin 0 -> 3844 bytes .../res/mipmap-xxxhdpi/ic_launcher_round.webp | Bin 0 -> 7778 bytes .../app/src/main/res/values/colors.xml | 10 + .../app/src/main/res/values/strings.xml | 3 + .../app/src/main/res/values/themes.xml | 5 + .../app/src/main/res/xml/backup_rules.xml | 13 + .../main/res/xml/data_extraction_rules.xml | 19 + examples/llama.android/build.gradle.kts | 5 + examples/llama.android/gradle.properties | 23 + .../gradle/wrapper/gradle-wrapper.jar | Bin 0 -> 59203 bytes .../gradle/wrapper/gradle-wrapper.properties | 6 + examples/llama.android/gradlew | 185 ++++++++ examples/llama.android/settings.gradle.kts | 17 + 41 files changed, 1807 insertions(+) create mode 100644 examples/llama.android/.gitignore create mode 100644 examples/llama.android/README.md create mode 100644 examples/llama.android/app/.gitignore create mode 100644 examples/llama.android/app/build.gradle.kts create mode 100644 examples/llama.android/app/proguard-rules.pro create mode 100644 examples/llama.android/app/src/main/AndroidManifest.xml create mode 100644 examples/llama.android/app/src/main/cpp/CMakeLists.txt create mode 100644 examples/llama.android/app/src/main/cpp/llama-android.cpp create mode 100644 examples/llama.android/app/src/main/java/com/example/llama/Downloadable.kt create mode 100644 examples/llama.android/app/src/main/java/com/example/llama/Llm.kt create mode 100644 examples/llama.android/app/src/main/java/com/example/llama/MainActivity.kt create mode 100644 examples/llama.android/app/src/main/java/com/example/llama/MainViewModel.kt create mode 100644 examples/llama.android/app/src/main/java/com/example/llama/ui/theme/Color.kt create mode 100644 examples/llama.android/app/src/main/java/com/example/llama/ui/theme/Theme.kt create mode 100644 examples/llama.android/app/src/main/java/com/example/llama/ui/theme/Type.kt create mode 100644 examples/llama.android/app/src/main/res/drawable/ic_launcher_background.xml create mode 100644 examples/llama.android/app/src/main/res/drawable/ic_launcher_foreground.xml create mode 100644 examples/llama.android/app/src/main/res/mipmap-anydpi/ic_launcher.xml create mode 100644 examples/llama.android/app/src/main/res/mipmap-anydpi/ic_launcher_round.xml create mode 100644 examples/llama.android/app/src/main/res/mipmap-hdpi/ic_launcher.webp create mode 100644 examples/llama.android/app/src/main/res/mipmap-hdpi/ic_launcher_round.webp create mode 100644 examples/llama.android/app/src/main/res/mipmap-mdpi/ic_launcher.webp create mode 100644 examples/llama.android/app/src/main/res/mipmap-mdpi/ic_launcher_round.webp create mode 100644 examples/llama.android/app/src/main/res/mipmap-xhdpi/ic_launcher.webp create mode 100644 examples/llama.android/app/src/main/res/mipmap-xhdpi/ic_launcher_round.webp create mode 100644 examples/llama.android/app/src/main/res/mipmap-xxhdpi/ic_launcher.webp create mode 100644 examples/llama.android/app/src/main/res/mipmap-xxhdpi/ic_launcher_round.webp create mode 100644 examples/llama.android/app/src/main/res/mipmap-xxxhdpi/ic_launcher.webp create mode 100644 examples/llama.android/app/src/main/res/mipmap-xxxhdpi/ic_launcher_round.webp create mode 100644 examples/llama.android/app/src/main/res/values/colors.xml create mode 100644 examples/llama.android/app/src/main/res/values/strings.xml create mode 100644 examples/llama.android/app/src/main/res/values/themes.xml create mode 100644 examples/llama.android/app/src/main/res/xml/backup_rules.xml create mode 100644 examples/llama.android/app/src/main/res/xml/data_extraction_rules.xml create mode 100644 examples/llama.android/build.gradle.kts create mode 100644 examples/llama.android/gradle.properties create mode 100644 examples/llama.android/gradle/wrapper/gradle-wrapper.jar create mode 100644 examples/llama.android/gradle/wrapper/gradle-wrapper.properties create mode 100755 examples/llama.android/gradlew create mode 100644 examples/llama.android/settings.gradle.kts diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index 0a28a1111..367df07a7 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -515,6 +515,31 @@ jobs: - name: Build Xcode project run: xcodebuild -project examples/llama.swiftui/llama.swiftui.xcodeproj -scheme llama.swiftui -sdk iphoneos CODE_SIGNING_REQUIRED=NO CODE_SIGN_IDENTITY= -destination 'generic/platform=iOS' build + android-build: + runs-on: ubuntu-latest + + steps: + - name: Clone + uses: actions/checkout@v3 + + - name: Set up JDK + uses: actions/setup-java@v3 + with: + java-version: 17 + distribution: zulu + + - name: Setup Android SDK + uses: android-actions/setup-android@v3 + with: + log-accepted-android-sdk-licenses: false + + - name: Build + run: | + cd examples/llama.android + + # Skip armeabi-v7a for now (https://github.com/llvm/llvm-project/issues/65820). + ./gradlew build --no-daemon -Pskip-armeabi-v7a + # freeBSD-latest: # runs-on: macos-12 # steps: diff --git a/examples/llama.android/.gitignore b/examples/llama.android/.gitignore new file mode 100644 index 000000000..347e252ef --- /dev/null +++ b/examples/llama.android/.gitignore @@ -0,0 +1,33 @@ +# Gradle files +.gradle/ +build/ + +# Local configuration file (sdk path, etc) +local.properties + +# Log/OS Files +*.log + +# Android Studio generated files and folders +captures/ +.externalNativeBuild/ +.cxx/ +*.apk +output.json + +# IntelliJ +*.iml +.idea/ +misc.xml +deploymentTargetDropDown.xml +render.experimental.xml + +# Keystore files +*.jks +*.keystore + +# Google Services (e.g. APIs or Firebase) +google-services.json + +# Android Profiling +*.hprof diff --git a/examples/llama.android/README.md b/examples/llama.android/README.md new file mode 100644 index 000000000..e69de29bb diff --git a/examples/llama.android/app/.gitignore b/examples/llama.android/app/.gitignore new file mode 100644 index 000000000..796b96d1c --- /dev/null +++ b/examples/llama.android/app/.gitignore @@ -0,0 +1 @@ +/build diff --git a/examples/llama.android/app/build.gradle.kts b/examples/llama.android/app/build.gradle.kts new file mode 100644 index 000000000..7815a8025 --- /dev/null +++ b/examples/llama.android/app/build.gradle.kts @@ -0,0 +1,91 @@ +plugins { + id("com.android.application") + id("org.jetbrains.kotlin.android") +} + +android { + namespace = "com.example.llama" + compileSdk = 34 + + ndkVersion = "26.1.10909125" + + defaultConfig { + applicationId = "com.example.llama" + minSdk = 33 + targetSdk = 34 + versionCode = 1 + versionName = "1.0" + + testInstrumentationRunner = "androidx.test.runner.AndroidJUnitRunner" + vectorDrawables { + useSupportLibrary = true + } + ndk { + // Workaround for https://github.com/llvm/llvm-project/issues/65820 + // affecting armeabi-v7a. Skip armeabi-v7a when invoked with + // -Pskip-armeabi-v7a (e.g., ./gradlew build -Pskip-armeabi-v7a). + if (project.hasProperty("skip-armeabi-v7a")) { + abiFilters += listOf("arm64-v8a", "x86_64", "x86") + } + } + externalNativeBuild { + cmake { + cppFlags += listOf() + arguments += listOf() + } + } + } + + buildTypes { + release { + isMinifyEnabled = false + proguardFiles( + getDefaultProguardFile("proguard-android-optimize.txt"), + "proguard-rules.pro" + ) + } + } + compileOptions { + sourceCompatibility = JavaVersion.VERSION_1_8 + targetCompatibility = JavaVersion.VERSION_1_8 + } + kotlinOptions { + jvmTarget = "1.8" + } + buildFeatures { + compose = true + } + composeOptions { + kotlinCompilerExtensionVersion = "1.5.1" + } + packaging { + resources { + excludes += "/META-INF/{AL2.0,LGPL2.1}" + } + } + externalNativeBuild { + cmake { + path = file("src/main/cpp/CMakeLists.txt") + version = "3.22.1" + } + } +} + +dependencies { + + implementation("androidx.core:core-ktx:1.12.0") + implementation("androidx.lifecycle:lifecycle-runtime-ktx:2.6.2") + implementation("androidx.activity:activity-compose:1.8.2") + implementation(platform("androidx.compose:compose-bom:2023.08.00")) + implementation("androidx.compose.ui:ui") + implementation("androidx.compose.ui:ui-graphics") + implementation("androidx.compose.ui:ui-tooling-preview") + implementation("androidx.compose.material3:material3") + testImplementation("junit:junit:4.13.2") + androidTestImplementation("androidx.test.ext:junit:1.1.5") + androidTestImplementation("androidx.test.espresso:espresso-core:3.5.1") + androidTestImplementation(platform("androidx.compose:compose-bom:2023.08.00")) + androidTestImplementation("androidx.compose.ui:ui-test-junit4") + debugImplementation("androidx.compose.ui:ui-tooling") + debugImplementation("androidx.compose.ui:ui-test-manifest") +} diff --git a/examples/llama.android/app/proguard-rules.pro b/examples/llama.android/app/proguard-rules.pro new file mode 100644 index 000000000..f1b424510 --- /dev/null +++ b/examples/llama.android/app/proguard-rules.pro @@ -0,0 +1,21 @@ +# Add project specific ProGuard rules here. +# You can control the set of applied configuration files using the +# proguardFiles setting in build.gradle. +# +# For more details, see +# http://developer.android.com/guide/developing/tools/proguard.html + +# If your project uses WebView with JS, uncomment the following +# and specify the fully qualified class name to the JavaScript interface +# class: +#-keepclassmembers class fqcn.of.javascript.interface.for.webview { +# public *; +#} + +# Uncomment this to preserve the line number information for +# debugging stack traces. +#-keepattributes SourceFile,LineNumberTable + +# If you keep the line number information, uncomment this to +# hide the original source file name. +#-renamesourcefileattribute SourceFile diff --git a/examples/llama.android/app/src/main/AndroidManifest.xml b/examples/llama.android/app/src/main/AndroidManifest.xml new file mode 100644 index 000000000..41a358a29 --- /dev/null +++ b/examples/llama.android/app/src/main/AndroidManifest.xml @@ -0,0 +1,30 @@ + + + + + + + + + + + + + + + + + diff --git a/examples/llama.android/app/src/main/cpp/CMakeLists.txt b/examples/llama.android/app/src/main/cpp/CMakeLists.txt new file mode 100644 index 000000000..85139329a --- /dev/null +++ b/examples/llama.android/app/src/main/cpp/CMakeLists.txt @@ -0,0 +1,50 @@ + +# For more information about using CMake with Android Studio, read the +# documentation: https://d.android.com/studio/projects/add-native-code.html. +# For more examples on how to use CMake, see https://github.com/android/ndk-samples. + +# Sets the minimum CMake version required for this project. +cmake_minimum_required(VERSION 3.22.1) + +# Declares the project name. The project name can be accessed via ${ PROJECT_NAME}, +# Since this is the top level CMakeLists.txt, the project name is also accessible +# with ${CMAKE_PROJECT_NAME} (both CMake variables are in-sync within the top level +# build script scope). +project("llama-android") + +include(FetchContent) +FetchContent_Declare( + llama + GIT_REPOSITORY https://github.com/ggerganov/llama.cpp + GIT_TAG master +) + +# Also provides "common" +FetchContent_MakeAvailable(llama) + +# Creates and names a library, sets it as either STATIC +# or SHARED, and provides the relative paths to its source code. +# You can define multiple libraries, and CMake builds them for you. +# Gradle automatically packages shared libraries with your APK. +# +# In this top level CMakeLists.txt, ${CMAKE_PROJECT_NAME} is used to define +# the target library name; in the sub-module's CMakeLists.txt, ${PROJECT_NAME} +# is preferred for the same purpose. +# +# In order to load a library into your app from Java/Kotlin, you must call +# System.loadLibrary() and pass the name of the library defined here; +# for GameActivity/NativeActivity derived applications, the same library name must be +# used in the AndroidManifest.xml file. +add_library(${CMAKE_PROJECT_NAME} SHARED + # List C/C++ source files with relative paths to this CMakeLists.txt. + llama-android.cpp) + +# Specifies libraries CMake should link to your target library. You +# can link libraries from various origins, such as libraries defined in this +# build script, prebuilt third-party libraries, or Android system libraries. +target_link_libraries(${CMAKE_PROJECT_NAME} + # List libraries link to the target library + llama + common + android + log) diff --git a/examples/llama.android/app/src/main/cpp/llama-android.cpp b/examples/llama.android/app/src/main/cpp/llama-android.cpp new file mode 100644 index 000000000..d5e705dce --- /dev/null +++ b/examples/llama.android/app/src/main/cpp/llama-android.cpp @@ -0,0 +1,394 @@ +#include +#include +#include +#include +#include +#include +#include "llama.h" +#include "common/common.h" + +// Write C++ code here. +// +// Do not forget to dynamically load the C++ library into your application. +// +// For instance, +// +// In MainActivity.java: +// static { +// System.loadLibrary("llama-android"); +// } +// +// Or, in MainActivity.kt: +// companion object { +// init { +// System.loadLibrary("llama-android") +// } +// } + +#define TAG "llama-android.cpp" +#define LOGi(...) __android_log_print(ANDROID_LOG_INFO, TAG, __VA_ARGS__) +#define LOGe(...) __android_log_print(ANDROID_LOG_ERROR, TAG, __VA_ARGS__) + +jclass la_int_var; +jmethodID la_int_var_value; +jmethodID la_int_var_inc; + +static void log_callback(ggml_log_level level, const char * fmt, void * data) { + if (level == GGML_LOG_LEVEL_ERROR) __android_log_print(ANDROID_LOG_ERROR, TAG, fmt, data); + else if (level == GGML_LOG_LEVEL_INFO) __android_log_print(ANDROID_LOG_INFO, TAG, fmt, data); + else if (level == GGML_LOG_LEVEL_WARN) __android_log_print(ANDROID_LOG_WARN, TAG, fmt, data); + else __android_log_print(ANDROID_LOG_DEFAULT, TAG, fmt, data); +} + +extern "C" +JNIEXPORT jlong JNICALL +Java_com_example_llama_Llm_load_1model(JNIEnv *env, jobject, jstring filename) { + llama_model_params model_params = llama_model_default_params(); + + auto path_to_model = env->GetStringUTFChars(filename, 0); + LOGi("Loading model from %s", path_to_model); + + auto model = llama_load_model_from_file(path_to_model, model_params); + env->ReleaseStringUTFChars(filename, path_to_model); + + if (!model) { + LOGe("load_model() failed"); + env->ThrowNew(env->FindClass("java/lang/IllegalStateException"), "load_model() failed"); + return 0; + } + + return reinterpret_cast(model); +} + +extern "C" +JNIEXPORT void JNICALL +Java_com_example_llama_Llm_free_1model(JNIEnv *, jobject, jlong model) { + llama_free_model(reinterpret_cast(model)); +} + +extern "C" +JNIEXPORT jlong JNICALL +Java_com_example_llama_Llm_new_1context(JNIEnv *env, jobject, jlong jmodel) { + auto model = reinterpret_cast(jmodel); + + if (!model) { + LOGe("new_context(): model cannot be null"); + env->ThrowNew(env->FindClass("java/lang/IllegalArgumentException"), "Model cannot be null"); + return 0; + } + + int n_threads = std::max(1, std::min(8, (int) sysconf(_SC_NPROCESSORS_ONLN) - 2)); + LOGi("Using %d threads", n_threads); + + llama_context_params ctx_params = llama_context_default_params(); + ctx_params.seed = 1234; + ctx_params.n_ctx = 2048; + ctx_params.n_threads = n_threads; + ctx_params.n_threads_batch = n_threads; + + llama_context * context = llama_new_context_with_model(model, ctx_params); + + if (!context) { + LOGe("llama_new_context_with_model() returned null)"); + env->ThrowNew(env->FindClass("java/lang/IllegalStateException"), + "llama_new_context_with_model() returned null)"); + return 0; + } + + return reinterpret_cast(context); +} + +extern "C" +JNIEXPORT void JNICALL +Java_com_example_llama_Llm_free_1context(JNIEnv *, jobject, jlong context) { + llama_free(reinterpret_cast(context)); +} + +extern "C" +JNIEXPORT void JNICALL +Java_com_example_llama_Llm_backend_1free(JNIEnv *, jobject) { + llama_backend_free(); +} + +extern "C" +JNIEXPORT void JNICALL +Java_com_example_llama_Llm_log_1to_1android(JNIEnv *, jobject) { + llama_log_set(log_callback, NULL); +} + +extern "C" +JNIEXPORT jstring JNICALL +Java_com_example_llama_Llm_bench_1model( + JNIEnv *env, + jobject, + jlong context_pointer, + jlong model_pointer, + jlong batch_pointer, + jint pp, + jint tg, + jint pl, + jint nr + ) { + auto pp_avg = 0.0; + auto tg_avg = 0.0; + auto pp_std = 0.0; + auto tg_std = 0.0; + + const auto context = reinterpret_cast(context_pointer); + const auto model = reinterpret_cast(model_pointer); + const auto batch = reinterpret_cast(batch_pointer); + + const int n_ctx = llama_n_ctx(context); + + LOGi("n_ctx = %d", n_ctx); + + int i, j; + int nri; + for (nri = 0; nri < nr; nri++) { + LOGi("Benchmark prompt processing (pp)"); + + llama_batch_clear(*batch); + + const int n_tokens = pp; + for (i = 0; i < n_tokens; i++) { + llama_batch_add(*batch, 0, i, { 0 }, false); + } + + batch->logits[batch->n_tokens - 1] = true; + llama_kv_cache_clear(context); + + const auto t_pp_start = ggml_time_us(); + if (llama_decode(context, *batch) != 0) { + LOGi("llama_decode() failed during prompt processing"); + } + const auto t_pp_end = ggml_time_us(); + + // bench text generation + + LOGi("Benchmark text generation (tg)"); + + llama_kv_cache_clear(context); + const auto t_tg_start = ggml_time_us(); + for (i = 0; i < tg; i++) { + + llama_batch_clear(*batch); + for (j = 0; j < pl; j++) { + llama_batch_add(*batch, 0, i, { j }, true); + } + + LOGi("llama_decode() text generation: %d", i); + if (llama_decode(context, *batch) != 0) { + LOGi("llama_decode() failed during text generation"); + } + } + + const auto t_tg_end = ggml_time_us(); + + llama_kv_cache_clear(context); + + const auto t_pp = double(t_pp_end - t_pp_start) / 1000000.0; + const auto t_tg = double(t_tg_end - t_tg_start) / 1000000.0; + + const auto speed_pp = double(pp) / t_pp; + const auto speed_tg = double(pl * tg) / t_tg; + + pp_avg += speed_pp; + tg_avg += speed_tg; + + pp_std += speed_pp * speed_pp; + tg_std += speed_tg * speed_tg; + + LOGi("pp %f t/s, tg %f t/s", speed_pp, speed_tg); + } + + pp_avg /= double(nr); + tg_avg /= double(nr); + + if (nr > 1) { + pp_std = sqrt(pp_std / double(nr - 1) - pp_avg * pp_avg * double(nr) / double(nr - 1)); + tg_std = sqrt(tg_std / double(nr - 1) - tg_avg * tg_avg * double(nr) / double(nr - 1)); + } else { + pp_std = 0; + tg_std = 0; + } + + char model_desc[128]; + llama_model_desc(model, model_desc, sizeof(model_desc)); + + const auto model_size = double(llama_model_size(model)) / 1024.0 / 1024.0 / 1024.0; + const auto model_n_params = double(llama_model_n_params(model)) / 1e9; + + const auto backend = "(Android)"; // TODO: What should this be? + + std::stringstream result; + result << std::setprecision(2); + result << "| model | size | params | backend | test | t/s |\n"; + result << "| --- | --- | --- | --- | --- | --- |\n"; + result << "| " << model_desc << " | " << model_size << "GiB | " << model_n_params << "B | " << backend << " | pp " << pp << " | " << pp_avg << " ± " << pp_std << " |\n"; + result << "| " << model_desc << " | " << model_size << "GiB | " << model_n_params << "B | " << backend << " | tg " << tg << " | " << tg_avg << " ± " << tg_std << " |\n"; + + return env->NewStringUTF(result.str().c_str()); +} + +extern "C" +JNIEXPORT void JNICALL +Java_com_example_llama_Llm_free_1batch(JNIEnv *, jobject, jlong batch_pointer) { + llama_batch_free(*reinterpret_cast(batch_pointer)); +} + +extern "C" +JNIEXPORT jlong JNICALL +Java_com_example_llama_Llm_new_1batch(JNIEnv *, jobject, jint n_tokens, jint embd, jint n_seq_max) { + + // Source: Copy of llama.cpp:llama_batch_init but heap-allocated. + + llama_batch *batch = new llama_batch { + 0, + nullptr, + nullptr, + nullptr, + nullptr, + nullptr, + nullptr, + 0, + 0, + 0, + }; + + if (embd) { + batch->embd = (float *) malloc(sizeof(float) * n_tokens * embd); + } else { + batch->token = (llama_token *) malloc(sizeof(llama_token) * n_tokens); + } + + batch->pos = (llama_pos *) malloc(sizeof(llama_pos) * n_tokens); + batch->n_seq_id = (int32_t *) malloc(sizeof(int32_t) * n_tokens); + batch->seq_id = (llama_seq_id **) malloc(sizeof(llama_seq_id *) * n_tokens); + for (int i = 0; i < n_tokens; ++i) { + batch->seq_id[i] = (llama_seq_id *) malloc(sizeof(llama_seq_id) * n_seq_max); + } + batch->logits = (int8_t *) malloc(sizeof(int8_t) * n_tokens); + + return reinterpret_cast(batch); +} + +extern "C" +JNIEXPORT void JNICALL +Java_com_example_llama_Llm_backend_1init(JNIEnv *, jobject, jboolean numa) { + llama_backend_init(numa); +} + +extern "C" +JNIEXPORT jstring JNICALL +Java_com_example_llama_Llm_system_1info(JNIEnv *env, jobject) { + return env->NewStringUTF(llama_print_system_info()); +} + +extern "C" +JNIEXPORT jint JNICALL +Java_com_example_llama_Llm_completion_1init( + JNIEnv *env, + jobject, + jlong context_pointer, + jlong batch_pointer, + jstring jtext, + jint n_len + ) { + + const auto text = env->GetStringUTFChars(jtext, 0); + const auto context = reinterpret_cast(context_pointer); + const auto batch = reinterpret_cast(batch_pointer); + + const auto tokens_list = llama_tokenize(context, text, 1); + + auto n_ctx = llama_n_ctx(context); + auto n_kv_req = tokens_list.size() + (n_len - tokens_list.size()); + + LOGi("n_len = %d, n_ctx = %d, n_kv_req = %d", n_len, n_ctx, n_kv_req); + + if (n_kv_req > n_ctx) { + LOGe("error: n_kv_req > n_ctx, the required KV cache size is not big enough"); + } + + for (auto id : tokens_list) { + LOGi("%s", llama_token_to_piece(context, id).c_str()); + } + + llama_batch_clear(*batch); + + // evaluate the initial prompt + for (auto i = 0; i < tokens_list.size(); i++) { + llama_batch_add(*batch, tokens_list[i], i, { 0 }, false); + } + + // llama_decode will output logits only for the last token of the prompt + batch->logits[batch->n_tokens - 1] = true; + + if (llama_decode(context, *batch) != 0) { + LOGe("llama_decode() failed"); + } + + env->ReleaseStringUTFChars(jtext, text); + + return batch->n_tokens; +} + +extern "C" +JNIEXPORT jstring JNICALL +Java_com_example_llama_Llm_completion_1loop( + JNIEnv * env, + jobject, + jlong context_pointer, + jlong batch_pointer, + jint n_len, + jobject intvar_ncur +) { + const auto context = reinterpret_cast(context_pointer); + const auto batch = reinterpret_cast(batch_pointer); + const auto model = llama_get_model(context); + + if (!la_int_var) la_int_var = env->GetObjectClass(intvar_ncur); + if (!la_int_var_value) la_int_var_value = env->GetMethodID(la_int_var, "getValue", "()I"); + if (!la_int_var_inc) la_int_var_inc = env->GetMethodID(la_int_var, "inc", "()V"); + + auto n_vocab = llama_n_vocab(model); + auto logits = llama_get_logits_ith(context, batch->n_tokens - 1); + + std::vector candidates; + candidates.reserve(n_vocab); + + for (llama_token token_id = 0; token_id < n_vocab; token_id++) { + candidates.emplace_back(llama_token_data{ token_id, logits[token_id], 0.0f }); + } + + llama_token_data_array candidates_p = { candidates.data(), candidates.size(), false }; + + // sample the most likely token + const auto new_token_id = llama_sample_token_greedy(context, &candidates_p); + + const auto n_cur = env->CallIntMethod(intvar_ncur, la_int_var_value); + if (new_token_id == llama_token_eos(model) || n_cur == n_len) { + return env->NewStringUTF(""); + } + + auto new_token_chars = llama_token_to_piece(context, new_token_id); + LOGi("new_token_chars: `%s`", new_token_chars.c_str()); + auto new_token = env->NewStringUTF(new_token_chars.c_str()); + + llama_batch_clear(*batch); + llama_batch_add(*batch, new_token_id, n_cur, { 0 }, true); + + env->CallVoidMethod(intvar_ncur, la_int_var_inc); + + if (llama_decode(context, *batch) != 0) { + LOGe("llama_decode() returned null"); + } + + return new_token; +} + +extern "C" +JNIEXPORT void JNICALL +Java_com_example_llama_Llm_kv_1cache_1clear(JNIEnv *, jobject, jlong context) { + llama_kv_cache_clear(reinterpret_cast(context)); +} diff --git a/examples/llama.android/app/src/main/java/com/example/llama/Downloadable.kt b/examples/llama.android/app/src/main/java/com/example/llama/Downloadable.kt new file mode 100644 index 000000000..78c231ae5 --- /dev/null +++ b/examples/llama.android/app/src/main/java/com/example/llama/Downloadable.kt @@ -0,0 +1,119 @@ +package com.example.llama + +import android.app.DownloadManager +import android.net.Uri +import android.util.Log +import androidx.compose.material3.Button +import androidx.compose.material3.Text +import androidx.compose.runtime.Composable +import androidx.compose.runtime.getValue +import androidx.compose.runtime.mutableDoubleStateOf +import androidx.compose.runtime.mutableStateOf +import androidx.compose.runtime.remember +import androidx.compose.runtime.rememberCoroutineScope +import androidx.compose.runtime.setValue +import androidx.core.database.getLongOrNull +import androidx.core.net.toUri +import kotlinx.coroutines.delay +import kotlinx.coroutines.launch +import java.io.File + +data class Downloadable(val name: String, val source: Uri, val destination: File) { + companion object { + @JvmStatic + private val tag: String? = this::class.qualifiedName + + sealed interface State + data object Ready: State + data class Downloading(val id: Long): State + data class Downloaded(val downloadable: Downloadable): State + data class Error(val message: String): State + + @JvmStatic + @Composable + fun Button(viewModel: MainViewModel, dm: DownloadManager, item: Downloadable) { + var status: State by remember { + mutableStateOf( + if (item.destination.exists()) Downloaded(item) + else Ready + ) + } + var progress by remember { mutableDoubleStateOf(0.0) } + + val coroutineScope = rememberCoroutineScope() + + suspend fun waitForDownload(result: Downloading, item: Downloadable): State { + while (true) { + val cursor = dm.query(DownloadManager.Query().setFilterById(result.id)) + + if (cursor == null) { + Log.e(tag, "dm.query() returned null") + return Error("dm.query() returned null") + } + + if (!cursor.moveToFirst() || cursor.count < 1) { + cursor.close() + Log.i(tag, "cursor.moveToFirst() returned false or cursor.count < 1, download canceled?") + return Ready + } + + val pix = cursor.getColumnIndex(DownloadManager.COLUMN_BYTES_DOWNLOADED_SO_FAR) + val tix = cursor.getColumnIndex(DownloadManager.COLUMN_TOTAL_SIZE_BYTES) + val sofar = cursor.getLongOrNull(pix) ?: 0 + val total = cursor.getLongOrNull(tix) ?: 1 + cursor.close() + + if (sofar == total) { + return Downloaded(item) + } + + progress = (sofar * 1.0) / total + + delay(1000L) + } + } + + fun onClick() { + when (val s = status) { + is Downloaded -> { + viewModel.load(item.destination.path) + } + + is Downloading -> { + coroutineScope.launch { + status = waitForDownload(s, item) + } + } + + else -> { + item.destination.delete() + + val request = DownloadManager.Request(item.source).apply { + setTitle("Downloading model") + setDescription("Downloading model: ${item.name}") + setAllowedNetworkTypes(DownloadManager.Request.NETWORK_WIFI) + setDestinationUri(item.destination.toUri()) + } + + viewModel.log("Saving ${item.name} to ${item.destination.path}") + Log.i(tag, "Saving ${item.name} to ${item.destination.path}") + + val id = dm.enqueue(request) + status = Downloading(id) + onClick() + } + } + } + + Button(onClick = { onClick() }, enabled = status !is Downloading) { + when (status) { + is Downloading -> Text(text = "Downloading ${(progress * 100).toInt()}%") + is Downloaded -> Text("Load ${item.name}") + is Ready -> Text("Download ${item.name}") + is Error -> Text("Download ${item.name}") + } + } + } + + } +} diff --git a/examples/llama.android/app/src/main/java/com/example/llama/Llm.kt b/examples/llama.android/app/src/main/java/com/example/llama/Llm.kt new file mode 100644 index 000000000..5f3270372 --- /dev/null +++ b/examples/llama.android/app/src/main/java/com/example/llama/Llm.kt @@ -0,0 +1,172 @@ +package com.example.llama + +import android.util.Log +import kotlinx.coroutines.CoroutineDispatcher +import kotlinx.coroutines.asCoroutineDispatcher +import kotlinx.coroutines.flow.Flow +import kotlinx.coroutines.flow.flow +import kotlinx.coroutines.flow.flowOn +import kotlinx.coroutines.withContext +import java.util.concurrent.Executors +import kotlin.concurrent.thread + +class Llm { + private val tag: String? = this::class.simpleName + + private val threadLocalState: ThreadLocal = ThreadLocal.withInitial { State.Idle } + + private val runLoop: CoroutineDispatcher = Executors.newSingleThreadExecutor { + thread(start = false, name = "Llm-RunLoop") { + Log.d(tag, "Dedicated thread for native code: ${Thread.currentThread().name}") + + // No-op if called more than once. + System.loadLibrary("llama-android") + + // Set llama log handler to Android + log_to_android() + backend_init(false) + + Log.d(tag, system_info()) + + it.run() + }.apply { + uncaughtExceptionHandler = Thread.UncaughtExceptionHandler { _, exception: Throwable -> + Log.e(tag, "Unhandled exception", exception) + } + } + }.asCoroutineDispatcher() + + private val nlen: Int = 64 + + private external fun log_to_android() + private external fun load_model(filename: String): Long + private external fun free_model(model: Long) + private external fun new_context(model: Long): Long + private external fun free_context(context: Long) + private external fun backend_init(numa: Boolean) + private external fun backend_free() + private external fun free_batch(batch: Long) + private external fun new_batch(nTokens: Int, embd: Int, nSeqMax: Int): Long + private external fun bench_model( + context: Long, + model: Long, + batch: Long, + pp: Int, + tg: Int, + pl: Int, + nr: Int + ): String + + private external fun system_info(): String + + private external fun completion_init( + context: Long, + batch: Long, + text: String, + nLen: Int + ): Int + + private external fun completion_loop( + context: Long, + batch: Long, + nLen: Int, + ncur: IntVar + ): String + + private external fun kv_cache_clear(context: Long) + + suspend fun bench(pp: Int, tg: Int, pl: Int, nr: Int = 1): String { + return withContext(runLoop) { + when (val state = threadLocalState.get()) { + is State.Loaded -> { + Log.d(tag, "bench(): $state") + bench_model(state.context, state.model, state.batch, pp, tg, pl, nr) + } + + else -> throw IllegalStateException("No model loaded") + } + } + } + + suspend fun load(pathToModel: String) { + withContext(runLoop) { + when (threadLocalState.get()) { + is State.Idle -> { + val model = load_model(pathToModel) + if (model == 0L) throw IllegalStateException("load_model() failed") + + val context = new_context(model) + if (context == 0L) throw IllegalStateException("new_context() failed") + + val batch = new_batch(512, 0, 1) + if (batch == 0L) throw IllegalStateException("new_batch() failed") + + Log.i(tag, "Loaded model $pathToModel") + threadLocalState.set(State.Loaded(model, context, batch)) + } + else -> throw IllegalStateException("Model already loaded") + } + } + } + + fun send(message: String): Flow = flow { + when (val state = threadLocalState.get()) { + is State.Loaded -> { + val ncur = IntVar(completion_init(state.context, state.batch, message, nlen)) + while (ncur.value <= nlen) { + val str = completion_loop(state.context, state.batch, nlen, ncur) + if (str.isEmpty()) { + break + } + emit(str) + } + kv_cache_clear(state.context) + } + else -> {} + } + }.flowOn(runLoop) + + /** + * Unloads the model and frees resources. + * + * This is a no-op if there's no model loaded. + */ + suspend fun unload() { + withContext(runLoop) { + when (val state = threadLocalState.get()) { + is State.Loaded -> { + free_context(state.context) + free_model(state.model) + free_batch(state.batch) + + threadLocalState.set(State.Idle) + } + else -> {} + } + } + } + + companion object { + private class IntVar(value: Int) { + @Volatile + var value: Int = value + private set + + fun inc() { + synchronized(this) { + value += 1 + } + } + } + + private sealed interface State { + data object Idle: State + data class Loaded(val model: Long, val context: Long, val batch: Long): State + } + + // Enforce only one instance of Llm. + private val _instance: Llm = Llm() + + fun instance(): Llm = _instance + } +} diff --git a/examples/llama.android/app/src/main/java/com/example/llama/MainActivity.kt b/examples/llama.android/app/src/main/java/com/example/llama/MainActivity.kt new file mode 100644 index 000000000..9da04f7d3 --- /dev/null +++ b/examples/llama.android/app/src/main/java/com/example/llama/MainActivity.kt @@ -0,0 +1,154 @@ +package com.example.llama + +import android.app.ActivityManager +import android.app.DownloadManager +import android.content.ClipData +import android.content.ClipboardManager +import android.net.Uri +import android.os.Bundle +import android.os.StrictMode +import android.os.StrictMode.VmPolicy +import android.text.format.Formatter +import androidx.activity.ComponentActivity +import androidx.activity.compose.setContent +import androidx.activity.viewModels +import androidx.compose.foundation.layout.Box +import androidx.compose.foundation.layout.Column +import androidx.compose.foundation.layout.Row +import androidx.compose.foundation.layout.fillMaxSize +import androidx.compose.foundation.layout.padding +import androidx.compose.foundation.lazy.LazyColumn +import androidx.compose.foundation.lazy.items +import androidx.compose.foundation.lazy.rememberLazyListState +import androidx.compose.material3.Button +import androidx.compose.material3.LocalContentColor +import androidx.compose.material3.MaterialTheme +import androidx.compose.material3.OutlinedTextField +import androidx.compose.material3.Surface +import androidx.compose.material3.Text +import androidx.compose.runtime.Composable +import androidx.compose.ui.Modifier +import androidx.compose.ui.unit.dp +import androidx.core.content.getSystemService +import com.example.llama.ui.theme.LlamaAndroidTheme +import java.io.File + +class MainActivity( + activityManager: ActivityManager? = null, + downloadManager: DownloadManager? = null, + clipboardManager: ClipboardManager? = null, +): ComponentActivity() { + private val tag: String? = this::class.simpleName + + private val activityManager by lazy { activityManager ?: getSystemService()!! } + private val downloadManager by lazy { downloadManager ?: getSystemService()!! } + private val clipboardManager by lazy { clipboardManager ?: getSystemService()!! } + + private val viewModel: MainViewModel by viewModels() + + // Get a MemoryInfo object for the device's current memory status. + private fun availableMemory(): ActivityManager.MemoryInfo { + return ActivityManager.MemoryInfo().also { memoryInfo -> + activityManager.getMemoryInfo(memoryInfo) + } + } + + override fun onCreate(savedInstanceState: Bundle?) { + super.onCreate(savedInstanceState) + + StrictMode.setVmPolicy( + VmPolicy.Builder(StrictMode.getVmPolicy()) + .detectLeakedClosableObjects() + .build() + ) + + val free = Formatter.formatFileSize(this, availableMemory().availMem) + val total = Formatter.formatFileSize(this, availableMemory().totalMem) + + viewModel.log("Current memory: $free / $total") + viewModel.log("Downloads directory: ${getExternalFilesDir(null)}") + + val extFilesDir = getExternalFilesDir(null) + + val models = listOf( + Downloadable( + "Phi-2 7B (Q4_0, 1.6 GiB)", + Uri.parse("https://huggingface.co/ggml-org/models/resolve/main/phi-2/ggml-model-q4_0.gguf?download=true"), + File(extFilesDir, "phi-2-q4_0.gguf"), + ), + Downloadable( + "TinyLlama 1.1B (f16, 2.2 GiB)", + Uri.parse("https://huggingface.co/ggml-org/models/resolve/main/tinyllama-1.1b/ggml-model-f16.gguf?download=true"), + File(extFilesDir, "tinyllama-1.1-f16.gguf"), + ), + Downloadable( + "Phi 2 DPO (Q3_K_M, 1.48 GiB)", + Uri.parse("https://huggingface.co/TheBloke/phi-2-dpo-GGUF/resolve/main/phi-2-dpo.Q3_K_M.gguf?download=true"), + File(extFilesDir, "phi-2-dpo.Q3_K_M.gguf") + ), + ) + + setContent { + LlamaAndroidTheme { + // A surface container using the 'background' color from the theme + Surface( + modifier = Modifier.fillMaxSize(), + color = MaterialTheme.colorScheme.background + ) { + MainCompose( + viewModel, + clipboardManager, + downloadManager, + models, + ) + } + + } + } + } +} + +@Composable +fun MainCompose( + viewModel: MainViewModel, + clipboard: ClipboardManager, + dm: DownloadManager, + models: List +) { + Column { + val scrollState = rememberLazyListState() + + Box(modifier = Modifier.weight(1f)) { + LazyColumn(state = scrollState) { + items(viewModel.messages) { + Text( + it, + style = MaterialTheme.typography.bodyLarge.copy(color = LocalContentColor.current), + modifier = Modifier.padding(16.dp) + ) + } + } + } + OutlinedTextField( + value = viewModel.message, + onValueChange = { viewModel.updateMessage(it) }, + label = { Text("Message") }, + ) + Row { + Button({ viewModel.send() }) { Text("Send") } + Button({ viewModel.bench(8, 4, 1) }) { Text("Bench") } + Button({ viewModel.clear() }) { Text("Clear") } + Button({ + viewModel.messages.joinToString("\n").let { + clipboard.setPrimaryClip(ClipData.newPlainText("", it)) + } + }) { Text("Copy") } + } + + Column { + for (model in models) { + Downloadable.Button(viewModel, dm, model) + } + } + } +} diff --git a/examples/llama.android/app/src/main/java/com/example/llama/MainViewModel.kt b/examples/llama.android/app/src/main/java/com/example/llama/MainViewModel.kt new file mode 100644 index 000000000..be95e2221 --- /dev/null +++ b/examples/llama.android/app/src/main/java/com/example/llama/MainViewModel.kt @@ -0,0 +1,104 @@ +package com.example.llama + +import android.util.Log +import androidx.compose.runtime.getValue +import androidx.compose.runtime.mutableStateOf +import androidx.compose.runtime.setValue +import androidx.lifecycle.ViewModel +import androidx.lifecycle.viewModelScope +import kotlinx.coroutines.flow.catch +import kotlinx.coroutines.launch + +class MainViewModel(private val llm: Llm = Llm.instance()): ViewModel() { + companion object { + @JvmStatic + private val NanosPerSecond = 1_000_000_000.0 + } + + private val tag: String? = this::class.simpleName + + var messages by mutableStateOf(listOf("Initializing...")) + private set + + var message by mutableStateOf("") + private set + + override fun onCleared() { + super.onCleared() + + viewModelScope.launch { + try { + llm.unload() + } catch (exc: IllegalStateException) { + messages += exc.message!! + } + } + } + + fun send() { + val text = message + message = "" + + // Add to messages console. + messages += text + messages += "" + + viewModelScope.launch { + llm.send(text) + .catch { + Log.e(tag, "send() failed", it) + messages += it.message!! + } + .collect { messages = messages.dropLast(1) + (messages.last() + it) } + } + } + + fun bench(pp: Int, tg: Int, pl: Int, nr: Int = 1) { + viewModelScope.launch { + try { + val start = System.nanoTime() + val warmupResult = llm.bench(pp, tg, pl, nr) + val end = System.nanoTime() + + messages += warmupResult + + val warmup = (end - start).toDouble() / NanosPerSecond + messages += "Warm up time: $warmup seconds, please wait..." + + if (warmup > 5.0) { + messages += "Warm up took too long, aborting benchmark" + return@launch + } + + messages += llm.bench(512, 128, 1, 3) + } catch (exc: IllegalStateException) { + Log.e(tag, "bench() failed", exc) + messages += exc.message!! + } + } + } + + fun load(pathToModel: String) { + viewModelScope.launch { + try { + llm.load(pathToModel) + messages += "Loaded $pathToModel" + } catch (exc: IllegalStateException) { + Log.e(tag, "load() failed", exc) + messages += exc.message!! + } + } + } + + fun updateMessage(newMessage: String) { + message = newMessage + } + + fun clear() { + messages = listOf() + } + + fun log(message: String) { + messages += message + } +} diff --git a/examples/llama.android/app/src/main/java/com/example/llama/ui/theme/Color.kt b/examples/llama.android/app/src/main/java/com/example/llama/ui/theme/Color.kt new file mode 100644 index 000000000..40c30e8d9 --- /dev/null +++ b/examples/llama.android/app/src/main/java/com/example/llama/ui/theme/Color.kt @@ -0,0 +1,11 @@ +package com.example.llama.ui.theme + +import androidx.compose.ui.graphics.Color + +val Purple80 = Color(0xFFD0BCFF) +val PurpleGrey80 = Color(0xFFCCC2DC) +val Pink80 = Color(0xFFEFB8C8) + +val Purple40 = Color(0xFF6650a4) +val PurpleGrey40 = Color(0xFF625b71) +val Pink40 = Color(0xFF7D5260) diff --git a/examples/llama.android/app/src/main/java/com/example/llama/ui/theme/Theme.kt b/examples/llama.android/app/src/main/java/com/example/llama/ui/theme/Theme.kt new file mode 100644 index 000000000..e742220a8 --- /dev/null +++ b/examples/llama.android/app/src/main/java/com/example/llama/ui/theme/Theme.kt @@ -0,0 +1,70 @@ +package com.example.llama.ui.theme + +import android.app.Activity +import android.os.Build +import androidx.compose.foundation.isSystemInDarkTheme +import androidx.compose.material3.MaterialTheme +import androidx.compose.material3.darkColorScheme +import androidx.compose.material3.dynamicDarkColorScheme +import androidx.compose.material3.dynamicLightColorScheme +import androidx.compose.material3.lightColorScheme +import androidx.compose.runtime.Composable +import androidx.compose.runtime.SideEffect +import androidx.compose.ui.graphics.toArgb +import androidx.compose.ui.platform.LocalContext +import androidx.compose.ui.platform.LocalView +import androidx.core.view.WindowCompat + +private val DarkColorScheme = darkColorScheme( + primary = Purple80, + secondary = PurpleGrey80, + tertiary = Pink80 +) + +private val LightColorScheme = lightColorScheme( + primary = Purple40, + secondary = PurpleGrey40, + tertiary = Pink40 + + /* Other default colors to override + background = Color(0xFFFFFBFE), + surface = Color(0xFFFFFBFE), + onPrimary = Color.White, + onSecondary = Color.White, + onTertiary = Color.White, + onBackground = Color(0xFF1C1B1F), + onSurface = Color(0xFF1C1B1F), + */ +) + +@Composable +fun LlamaAndroidTheme( + darkTheme: Boolean = isSystemInDarkTheme(), + // Dynamic color is available on Android 12+ + dynamicColor: Boolean = true, + content: @Composable () -> Unit +) { + val colorScheme = when { + dynamicColor && Build.VERSION.SDK_INT >= Build.VERSION_CODES.S -> { + val context = LocalContext.current + if (darkTheme) dynamicDarkColorScheme(context) else dynamicLightColorScheme(context) + } + + darkTheme -> DarkColorScheme + else -> LightColorScheme + } + val view = LocalView.current + if (!view.isInEditMode) { + SideEffect { + val window = (view.context as Activity).window + window.statusBarColor = colorScheme.primary.toArgb() + WindowCompat.getInsetsController(window, view).isAppearanceLightStatusBars = darkTheme + } + } + + MaterialTheme( + colorScheme = colorScheme, + typography = Typography, + content = content + ) +} diff --git a/examples/llama.android/app/src/main/java/com/example/llama/ui/theme/Type.kt b/examples/llama.android/app/src/main/java/com/example/llama/ui/theme/Type.kt new file mode 100644 index 000000000..0b87946ca --- /dev/null +++ b/examples/llama.android/app/src/main/java/com/example/llama/ui/theme/Type.kt @@ -0,0 +1,34 @@ +package com.example.llama.ui.theme + +import androidx.compose.material3.Typography +import androidx.compose.ui.text.TextStyle +import androidx.compose.ui.text.font.FontFamily +import androidx.compose.ui.text.font.FontWeight +import androidx.compose.ui.unit.sp + +// Set of Material typography styles to start with +val Typography = Typography( + bodyLarge = TextStyle( + fontFamily = FontFamily.Default, + fontWeight = FontWeight.Normal, + fontSize = 16.sp, + lineHeight = 24.sp, + letterSpacing = 0.5.sp + ) + /* Other default text styles to override + titleLarge = TextStyle( + fontFamily = FontFamily.Default, + fontWeight = FontWeight.Normal, + fontSize = 22.sp, + lineHeight = 28.sp, + letterSpacing = 0.sp + ), + labelSmall = TextStyle( + fontFamily = FontFamily.Default, + fontWeight = FontWeight.Medium, + fontSize = 11.sp, + lineHeight = 16.sp, + letterSpacing = 0.5.sp + ) + */ +) diff --git a/examples/llama.android/app/src/main/res/drawable/ic_launcher_background.xml b/examples/llama.android/app/src/main/res/drawable/ic_launcher_background.xml new file mode 100644 index 000000000..07d5da9cb --- /dev/null +++ b/examples/llama.android/app/src/main/res/drawable/ic_launcher_background.xml @@ -0,0 +1,170 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/examples/llama.android/app/src/main/res/drawable/ic_launcher_foreground.xml b/examples/llama.android/app/src/main/res/drawable/ic_launcher_foreground.xml new file mode 100644 index 000000000..7706ab9e6 --- /dev/null +++ b/examples/llama.android/app/src/main/res/drawable/ic_launcher_foreground.xml @@ -0,0 +1,30 @@ + + + + + + + + + + + diff --git a/examples/llama.android/app/src/main/res/mipmap-anydpi/ic_launcher.xml b/examples/llama.android/app/src/main/res/mipmap-anydpi/ic_launcher.xml new file mode 100644 index 000000000..b3e26b4c6 --- /dev/null +++ b/examples/llama.android/app/src/main/res/mipmap-anydpi/ic_launcher.xml @@ -0,0 +1,6 @@ + + + + + + diff --git a/examples/llama.android/app/src/main/res/mipmap-anydpi/ic_launcher_round.xml b/examples/llama.android/app/src/main/res/mipmap-anydpi/ic_launcher_round.xml new file mode 100644 index 000000000..b3e26b4c6 --- /dev/null +++ b/examples/llama.android/app/src/main/res/mipmap-anydpi/ic_launcher_round.xml @@ -0,0 +1,6 @@ + + + + + + diff --git a/examples/llama.android/app/src/main/res/mipmap-hdpi/ic_launcher.webp b/examples/llama.android/app/src/main/res/mipmap-hdpi/ic_launcher.webp new file mode 100644 index 0000000000000000000000000000000000000000..c209e78ecd372343283f4157dcfd918ec5165bb3 GIT binary patch literal 1404 zcmV-?1%vuhNk&F=1pok7MM6+kP&il$0000G0000-002h-06|PpNX!5L00Dqw+t%{r zzW2vH!KF=w&cMnnN@{whkTw+#mAh0SV?YL=)3MimFYCWp#fpdtz~8$hD5VPuQgtcN zXl<@<#Cme5f5yr2h%@8TWh?)bSK`O z^Z@d={gn7J{iyxL_y_%J|L>ep{dUxUP8a{byupH&!UNR*OutO~0{*T4q5R6@ApLF! z5{w?Z150gC7#>(VHFJZ-^6O@PYp{t!jH(_Z*nzTK4 zkc{fLE4Q3|mA2`CWQ3{8;gxGizgM!zccbdQoOLZc8hThi-IhN90RFT|zlxh3Ty&VG z?Fe{#9RrRnxzsu|Lg2ddugg7k%>0JeD+{XZ7>Z~{=|M+sh1MF7~ zz>To~`~LVQe1nNoR-gEzkpe{Ak^7{{ZBk2i_<+`Bq<^GB!RYG+z)h;Y3+<{zlMUYd zrd*W4w&jZ0%kBuDZ1EW&KLpyR7r2=}fF2%0VwHM4pUs}ZI2egi#DRMYZPek*^H9YK zay4Iy3WXFG(F14xYsoDA|KXgGc5%2DhmQ1gFCkrgHBm!lXG8I5h*uf{rn48Z!_@ z4Bk6TJAB2CKYqPjiX&mWoW>OPFGd$wqroa($ne7EUK;#3VYkXaew%Kh^3OrMhtjYN?XEoY`tRPQsAkH-DSL^QqyN0>^ zmC>{#F14jz4GeW{pJoRpLFa_*GI{?T93^rX7SPQgT@LbLqpNA}<@2wH;q493)G=1Y z#-sCiRNX~qf3KgiFzB3I>4Z%AfS(3$`-aMIBU+6?gbgDb!)L~A)je+;fR0jWLL-Fu z4)P{c7{B4Hp91&%??2$v9iRSFnuckHUm}or9seH6 z>%NbT+5*@L5(I9j@06@(!{ZI?U0=pKn8uwIg&L{JV14+8s2hnvbRrU|hZCd}IJu7*;;ECgO%8_*W Kmw_-CKmY()leWbG literal 0 HcmV?d00001 diff --git a/examples/llama.android/app/src/main/res/mipmap-hdpi/ic_launcher_round.webp b/examples/llama.android/app/src/main/res/mipmap-hdpi/ic_launcher_round.webp new file mode 100644 index 0000000000000000000000000000000000000000..b2dfe3d1ba5cf3ee31b3ecc1ced89044a1f3b7a9 GIT binary patch literal 2898 zcmV-Y3$650Nk&FW3jhFDMM6+kP&il$0000G0000-002h-06|PpNWB9900E$G+qN-D z+81ABX7q?;bwx%xBg?kcwr$(C-Tex-ZCkHUw(Y9#+`E5-zuONG5fgw~E2WDng@Bc@ z24xy+R1n%~6xI#u9vJ8zREI)sb<&Il(016}Z~V1n^PU3-_H17A*Bf^o)&{_uBv}Py zulRfeE8g(g6HFhk_?o_;0@tz?1I+l+Y#Q*;RVC?(ud`_cU-~n|AX-b`JHrOIqn(-t&rOg-o`#C zh0LPxmbOAEb;zHTu!R3LDh1QO zZTf-|lJNUxi-PpcbRjw3n~n-pG;$+dIF6eqM5+L();B2O2tQ~|p{PlpNcvDbd1l%c zLtXn%lu(3!aNK!V#+HNn_D3lp z2%l+hK-nsj|Bi9;V*WIcQRTt5j90A<=am+cc`J zTYIN|PsYAhJ|=&h*4wI4ebv-C=Be#u>}%m;a{IGmJDU`0snWS&$9zdrT(z8#{OZ_Y zxwJx!ZClUi%YJjD6Xz@OP8{ieyJB=tn?>zaI-4JN;rr`JQbb%y5h2O-?_V@7pG_+y z(lqAsqYr!NyVb0C^|uclHaeecG)Sz;WV?rtoqOdAAN{j%?Uo%owya(F&qps@Id|Of zo@~Y-(YmfB+chv^%*3g4k3R0WqvuYUIA+8^SGJ{2Bl$X&X&v02>+0$4?di(34{pt* zG=f#yMs@Y|b&=HyH3k4yP&goF2LJ#tBLJNNDo6lG06r}ghC-pC4Q*=x3;|+W04zte zAl>l4kzUBQFYF(E`KJy?ZXd1tnfbH+Z~SMmA21KokJNs#eqcXWKUIC>{TuoKe^vhF z);H)o`t9j~`$h1D`#bxe@E`oE`cM9w(@)5Bp8BNukIwM>wZHfd0S;5bcXA*5KT3bj zc&_~`&{z7u{Et!Z_k78H75gXf4g8<_ul!H$eVspPeU3j&&Au=2R*Zp#M9$9s;fqwgzfiX=E_?BwVcfx3tG9Q-+<5fw z%Hs64z)@Q*%s3_Xd5>S4dg$s>@rN^ixeVj*tqu3ZV)biDcFf&l?lGwsa zWj3rvK}?43c{IruV2L`hUU0t^MemAn3U~x3$4mFDxj=Byowu^Q+#wKRPrWywLjIAp z9*n}eQ9-gZmnd9Y0WHtwi2sn6n~?i#n9VN1B*074_VbZZ=WrpkMYr{RsI ztM_8X1)J*DZejxkjOTRJ&a*lrvMKBQURNP#K)a5wIitfu(CFYV4FT?LUB$jVwJSZz zNBFTWg->Yk0j&h3e*a5>B=-xM7dE`IuOQna!u$OoxLlE;WdrNlN)1 z7**de7-hZ!(%_ZllHBLg`Ir#|t>2$*xVOZ-ADZKTN?{(NUeLU9GbuG-+Axf*AZ-P1 z0ZZ*fx+ck4{XtFsbcc%GRStht@q!m*ImssGwuK+P@%gEK!f5dHymg<9nSCXsB6 zQ*{<`%^bxB($Z@5286^-A(tR;r+p7B%^%$N5h%lb*Vlz-?DL9x;!j<5>~kmXP$E}m zQV|7uv4SwFs0jUervsxVUm>&9Y3DBIzc1XW|CUZrUdb<&{@D5yuLe%Xniw^x&{A2s z0q1+owDSfc3Gs?ht;3jw49c#mmrViUfX-yvc_B*wY|Lo7; zGh!t2R#BHx{1wFXReX*~`NS-LpSX z#TV*miO^~B9PF%O0huw!1Zv>^d0G3$^8dsC6VI!$oKDKiXdJt{mGkyA`+Gwd4D-^1qtNTUK)`N*=NTG-6}=5k6suNfdLt*dt8D| z%H#$k)z#ZRcf|zDWB|pn<3+7Nz>?WW9WdkO5(a^m+D4WRJ9{wc>Y}IN)2Kbgn;_O? zGqdr&9~|$Y0tP=N(k7^Eu;iO*w+f%W`20BNo)=Xa@M_)+o$4LXJyiw{F?a633SC{B zl~9FH%?^Rm*LVz`lkULs)%idDX^O)SxQol(3jDRyBVR!7d`;ar+D7do)jQ}m`g$TevUD5@?*P8)voa?kEe@_hl{_h8j&5eB-5FrYW&*FHVt$ z$kRF9Nstj%KRzpjdd_9wO=4zO8ritN*NPk_9avYrsF(!4))tm{Ga#OY z(r{0buexOzu7+rw8E08Gxd`LTOID{*AC1m*6Nw@osfB%0oBF5sf<~wH1kL;sd zo)k6^VyRFU`)dt*iX^9&QtWbo6yE8XXH?`ztvpiOLgI3R+=MOBQ9=rMVgi<*CU%+d1PQQ0a1U=&b0vkF207%xU0ssI2 literal 0 HcmV?d00001 diff --git a/examples/llama.android/app/src/main/res/mipmap-mdpi/ic_launcher.webp b/examples/llama.android/app/src/main/res/mipmap-mdpi/ic_launcher.webp new file mode 100644 index 0000000000000000000000000000000000000000..4f0f1d64e58ba64d180ce43ee13bf9a17835fbca GIT binary patch literal 982 zcmV;{11bDcNk&G_0{{S5MM6+kP&il$0000G0000l001ul06|PpNU8t;00Dqo+t#w^ z^1csucXz7-Qrhzl9HuHB%l>&>1tG2^vb*E&k^T3$FG1eQZ51g$uv4V+kI`0<^1Z@N zk?Jjh$olyC%l>)Xq;7!>{iBj&BjJ`P&$fsCfpve_epJOBkTF?nu-B7D!hO=2ZR}

C%4 zc_9eOXvPbC4kzU8YowIA8cW~Uv|eB&yYwAObSwL2vY~UYI7NXPvf3b+c^?wcs~_t{ ze_m66-0)^{JdOMKPwjpQ@Sna!*?$wTZ~su*tNv7o!gXT!GRgivP}ec?5>l1!7<(rT zds|8x(qGc673zrvYIz;J23FG{9nHMnAuP}NpAED^laz3mAN1sy+NXK)!6v1FxQ;lh zOBLA>$~P3r4b*NcqR;y6pwyhZ3_PiDb|%n1gGjl3ZU}ujInlP{eks-#oA6>rh&g+!f`hv#_%JrgYPu z(U^&XLW^QX7F9Z*SRPpQl{B%x)_AMp^}_v~?j7 zapvHMKxSf*Mtyx8I}-<*UGn3)oHd(nn=)BZ`d$lDBwq_GL($_TPaS{UeevT(AJ`p0 z9%+hQb6z)U9qjbuXjg|dExCLjpS8$VKQ55VsIC%@{N5t{NsW)=hNGI`J=x97_kbz@ E0Of=7!TQj4N+cqN`nQhxvX7dAV-`K|Ub$-q+H-5I?Tx0g9jWxd@A|?POE8`3b8fO$T))xP* z(X?&brZw({`)WU&rdAs1iTa0x6F@PIxJ&&L|dpySV!ID|iUhjCcKz(@mE z!x@~W#3H<)4Ae(4eQJRk`Iz3<1)6^m)0b_4_TRZ+cz#eD3f8V;2r-1fE!F}W zEi0MEkTTx}8i1{`l_6vo0(Vuh0HD$I4SjZ=?^?k82R51bC)2D_{y8mi_?X^=U?2|F{Vr7s!k(AZC$O#ZMyavHhlQ7 zUR~QXuH~#o#>(b$u4?s~HLF*3IcF7023AlwAYudn0FV~|odGH^05AYPEfR)8p`i{n zwg3zPVp{+wOsxKc>)(pMupKF!Y2HoUqQ3|Yu|8lwR=?5zZuhG6J?H`bSNk_wPoM{u zSL{c@pY7+c2kck>`^q1^^gR0QB7Y?KUD{vz-uVX~;V-rW)PDcI)$_UjgVV?S?=oLR zf4}zz{#*R_{LkiJ#0RdQLNC^2Vp%JPEUvG9ra2BVZ92(p9h7Ka@!yf9(lj#}>+|u* z;^_?KWdzkM`6gqPo9;;r6&JEa)}R3X{(CWv?NvgLeOTq$cZXqf7|sPImi-7cS8DCN zGf;DVt3Am`>hH3{4-WzH43Ftx)SofNe^-#|0HdCo<+8Qs!}TZP{HH8~z5n`ExcHuT zDL1m&|DVpIy=xsLO>8k92HcmfSKhflQ0H~9=^-{#!I1g(;+44xw~=* zxvNz35vfsQE)@)Zsp*6_GjYD};Squ83<_?^SbALb{a`j<0Gn%6JY!zhp=Fg}Ga2|8 z52e1WU%^L1}15Ex0fF$e@eCT(()_P zvV?CA%#Sy08_U6VPt4EtmVQraWJX` zh=N|WQ>LgrvF~R&qOfB$!%D3cGv?;Xh_z$z7k&s4N)$WYf*k=|*jCEkO19{h_(%W4 zPuOqbCw`SeAX*R}UUsbVsgtuG?xs(#Ikx9`JZoQFz0n*7ZG@Fv@kZk`gzO$HoA9kN z8U5{-yY zvV{`&WKU2$mZeoBmiJrEdzUZAv1sRxpePdg1)F*X^Y)zp^Y*R;;z~vOv-z&)&G)JQ{m!C9cmziu1^nHA z`#`0c>@PnQ9CJKgC5NjJD8HM3|KC(g5nnCq$n0Gsu_DXk36@ql%npEye|?%RmG)

FJ$wK}0tWNB{uH;AM~i literal 0 HcmV?d00001 diff --git a/examples/llama.android/app/src/main/res/mipmap-xhdpi/ic_launcher.webp b/examples/llama.android/app/src/main/res/mipmap-xhdpi/ic_launcher.webp new file mode 100644 index 0000000000000000000000000000000000000000..948a3070fe34c611c42c0d3ad3013a0dce358be0 GIT binary patch literal 1900 zcmV-y2b1_xNk&Fw2LJ$9MM6+kP&il$0000G0001A003VA06|PpNH75a00DqwTbm-~ zullQTcXxO9ki!OCRx^i?oR|n!<8G0=kI^!JSjFi-LL*`V;ET0H2IXfU0*i>o6o6Gy zRq6Ap5(_{XLdXcL-MzlN`ugSdZY_`jXhcENAu)N_0?GhF))9R;E`!bo9p?g?SRgw_ zEXHhFG$0{qYOqhdX<(wE4N@es3VIo$%il%6xP9gjiBri+2pI6aY4 zJbgh-Ud|V%3O!IcHKQx1FQH(_*TK;1>FQWbt^$K1zNn^cczkBs=QHCYZ8b&l!UV{K z{L0$KCf_&KR^}&2Fe|L&?1I7~pBENnCtCuH3sjcx6$c zwqkNkru);ie``q+_QI;IYLD9OV0ZxkuyBz|5<$1BH|vtey$> z5oto4=l-R-Aaq`Dk0}o9N0VrkqW_#;!u{!bJLDq%0092{Ghe=F;(kn} z+sQ@1=UlX30+2nWjkL$B^b!H2^QYO@iFc0{(-~yXj2TWz?VG{v`Jg zg}WyYnwGgn>{HFaG7E~pt=)sOO}*yd(UU-D(E&x{xKEl6OcU?pl)K%#U$dn1mDF19 zSw@l8G!GNFB3c3VVK0?uyqN&utT-D5%NM4g-3@Sii9tSXKtwce~uF zS&Jn746EW^wV~8zdQ1XC28~kXu8+Yo9p!<8h&(Q({J*4DBglPdpe4M_mD8AguZFn~ ztiuO~{6Bx?SfO~_ZV(GIboeR9~hAym{{fV|VM=77MxDrbW6`ujX z<3HF(>Zr;#*uCvC*bpoSr~C$h?_%nXps@A)=l_;({Fo#6Y1+Zv`!T5HB+)#^-Ud_; zBwftPN=d8Vx)*O1Mj+0oO=mZ+NVH*ptNDC-&zZ7Hwho6UQ#l-yNvc0Cm+2$$6YUk2D2t#vdZX-u3>-Be1u9gtTBiMB^xwWQ_rgvGpZ6(C@e23c!^K=>ai-Rqu zhqT`ZQof;9Bu!AD(i^PCbYV%yha9zuoKMp`U^z;3!+&d@Hud&_iy!O-$b9ZLcSRh? z)R|826w}TU!J#X6P%@Zh=La$I6zXa#h!B;{qfug}O%z@K{EZECu6zl)7CiNi%xti0 zB{OKfAj83~iJvmpTU|&q1^?^cIMn2RQ?jeSB95l}{DrEPTW{_gmU_pqTc)h@4T>~& zluq3)GM=xa(#^VU5}@FNqpc$?#SbVsX!~RH*5p0p@w z;~v{QMX0^bFT1!cXGM8K9FP+=9~-d~#TK#ZE{4umGT=;dfvWi?rYj;^l_Zxywze`W z^Cr{55U@*BalS}K%Czii_80e0#0#Zkhlij4-~I@}`-JFJ7$5{>LnoJSs??J8kWVl6|8A}RCGAu9^rAsfCE=2}tHwl93t0C?#+jMpvr7O3`2=tr{Hg$=HlnjVG^ewm|Js0J*kfPa6*GhtB>`fN!m#9J(sU!?(OSfzY*zS(FJ<-Vb zfAIg+`U)YaXv#sY(c--|X zEB+TVyZ%Ie4L$gi#Fc++`h6%vzsS$pjz9aLt+ZL(g;n$Dzy5=m=_TV(3H8^C{r0xd zp#a%}ht55dOq?yhwYPrtp-m1xXp;4X;)NhxxUpgP%XTLmO zcjaFva^}dP3$&sfFTIR_jC=2pHh9kpI@2(6V*GQo7Ws)`j)hd+tr@P~gR*2gO@+1? zG<`_tB+LJuF|SZ9tIec;h%}}6WClT`L>HSW?E{Hp1h^+mlbf_$9zA>!ug>NALJsO{ mU%z=YwVD?}XMya)Bp;vlyE5&E_6!fzx9pwrdz474!~g(M6R?N? literal 0 HcmV?d00001 diff --git a/examples/llama.android/app/src/main/res/mipmap-xhdpi/ic_launcher_round.webp b/examples/llama.android/app/src/main/res/mipmap-xhdpi/ic_launcher_round.webp new file mode 100644 index 0000000000000000000000000000000000000000..1b9a6956b3acdc11f40ce2bb3f6efbd845cc243f GIT binary patch literal 3918 zcmV-U53%r4Nk&FS4*&pHMM6+kP&il$0000G0001A003VA06|PpNSy@$00HoY|G(*G z+qV7x14$dSO^Re!iqt-AAIE9iwr$(CZQJL$blA4B`>;C3fBY6Q8_YSjb2%a=fc}4E zrSzssacq<^nmW|Rs93PJni30R<8w<(bK_$LO4L?!_OxLl$}K$MUEllnMK|rg=f3;y z*?;3j|Nh>)p0JQ3A~rf(MibH2r+)3cyV1qF&;8m{w-S*y+0mM){KTK^M5}ksc`qX3 zy>rf^b>~l>SSHds8(I@hz3&PD@LmEs4&prkT=BjsBCXTMhN$_)+kvnl0bLKW5rEsj z*d#KXGDB4P&>etx0X+`R19yC=LS)j!mgs5M0L~+o-T~Jl!p!AJxnGAhV%~rhYUL4hlWhgES3Kb5oA&X z{}?3OBSS-{!v$nCIGj->(-TAG)8LR{htr41^gxsT8yqt2@DEG6Yl`Uma3Nd4;YUoW zTbkYl3CMU5ypMF3EIkYmWL|*BknM`0+Kq6CpvO(y$#j94e+q{vI{Zp8cV_6RK!`&C zob$*5Q|$IZ09dW=L!V zw@#2wviu|<#3lgGE8GEhcx+zBt`} zOwP8j9X%^f7i_bth4PiJ$LYtFJSCN$3xwDN;8mr*B;CJwBP2G0TMq0uNt7S^DO_wE zepk!Wrn#Z#03j{`c*Rf~y3o7?J}w?tEELRUR2cgxB*Y{LzA#pxHgf}q?u5idu>077 zd^=p)`nA}6e`|@`p?u}YU66PP_MA}Zqqe!c{nK&z%Jwq1N4e_q<#4g^xaz=ao;u|6 zwpRcW2Lax=ZGbx=Q*HhlJ`Ns#Y*r0*%!T?P*TTiX;rb)$CGLz=rSUum$)3Qyv{BL2 zO*=OI2|%(Yz~`pNEOnLp>+?T@glq-DujlIp?hdJeZ7ctP4_OKx|5@EOps3rr(pWzg zK4d3&oN-X2qN(d_MkfwB4I)_)!I_6nj2iA9u^pQ{;GckGLxBGrJUM2Wdda!k)Y>lq zmjws>dVQ*vW9lvEMkiN3wE-__6OWD0txS&Qn0n22cyj4Q*8(nG4!G{6OOwNvsrPIL zCl-$W9UwkEUVuLwyD%|inbOF*xMODZ4VMEVAq_zUxZ+K#Gdqf!DW$5f)?7UNOFMz! zrB~tuu=6X2FE(p^iqgxr+?ZK;=yz`e;C$#_@D9Lj-+TDVOrva>(#*PVbaHO>A)mhl z07OJWCqYC60518$!&c`eNBcBW%GnfaQ*$eazV^2_AW?j)h;J1nUjN(I9=0+!RVx~% z3@Tf!P0TE+98jA?WceK-}A1% zW!K)lyKcGqy#M~})315-A#2NXQ`?6NR#Apo=S!oF=JfpX>iR*49ec{7AN$xxpK{D$ z2d%Fz&rdfSqourN$~Y^NFIMV1CZ?J*bMx~H3k&meGtH@q9ra2vZxmA$S(#jaaj-g4 ztJmxG+DLV<*q<|sDXPp$X>E)#S}Vm&sRaO5P&goh2><}FEdZSXDqsL$06sAkh(e+v zAsBhKSRexgwg6tIy~GFJzaTxXD(}|+0eOwFDA%rn`X;MVwDHT9=4=g%OaJ9s%3b9>9EUTnnp0t;2Zpa{*>mk~hZqItE_!dQ zOtC>8`$l|mV43Jbudf0N6&&X;{=z}Zi}d1`2qmJ}i|0*GsulD3>GgQXHN)pkR6sf1 z?5ZU%&xtL}oH;YiAA)d*^Ndw2T$+Mjuzyzz@-SM`9df7LqTxLuIwC~S0092~+=qYv z@*ja;?Wt!T!{U?c*Z0YtGe)XbI&y-?B&G2$`JDM)(dIV9G`Sc#6?sI60de6kv+)Qb zUW~2|WjvJq3TA8`0+sWA3zRhY9a~ow)O~&StBkG2{*{TGiY~S8ep{V&Vo2l<6LWsu z^#p0-v*t2?3&aA1)ozu|%efSR=XnpX$lvTeRdKlvM!@|pM5p2w3u-6 zU>}t2xiYLS+{|%C65AzX+23Mtlq?BS&YdYcYsVjoiE&rT>;Necn6l^K)T^lmE`5u{ zm1i+-a-gc;Z&v-{;8r)z6NYfBUv+=_L}ef}qa9FX01)+Aaf+;xj(mL6|JUzGJR1|fnanb%?BPPIp>SCjP|8qE5qJ{=n5ZGw?81z3(k;pzH%1CtlX50{E7h)$h{qGKfzC`e2o`*IqA#tjA z`Fz&^%$b9F*N`)U-#6>a)Z`55`$Dd0cfcs0$d13^ONrdCu9xcv_=n#WQo8stcz3jP9|2EvdI-RhJM3%Q%oM&!OlShM|0 z?gz?wHZSnm45njLtsz8PVT1S&jAlbKg5kVam$p16=EK@Sj4EP0OtH zmJDmdc^v)x>56Qg_wmYHz6h)>kl_h$>0@J!ypv%APmjZTAQVLy6Fu50RGY&JAVNhx zrF_qG6`x9MkT;1SFWo$)l{M$;3qUDn9JwE}z zRl#E_bDRJFii61kPgBybIgp8dNW!Cc1b*^YYk-#oWLJvtM_v^hQx~9?8LD4VFFxBF z3MlrsSC%f9Oupn*ctPL0U1fwfX?`tRhPD{PSLFPQOmIt$mDy0SgpNVvHS+f#Do>h1Gn?LZU9(KaN>Q_=Y*_T zvtD7%_u^^+{g`0VGzg(VZrpVQ6Ub5M=tI_p7T93R8@3Zulu3|#{iNcu!oiHxZ4Rf*( zfmiN$$ru(*_Zqn=`Gq#OuHRTSwp7uH_SokR&|)RuW5yo=Z|_4?qU-JU+tpt>!B&Is z@N(=SG;bpVc;AO@zbmMM zScqq1)b-ZQIrs={oD}|?6y{$HNB1U0^LsBh8JI&3!GBZxOXI<}&5-$lgkAaYqhOTb z?2vEnZ$-kk;*M_17(upJF3%+iH*s0-r{vttXVB2OUwI1s^+G(Ft(U8gYFXC}#P&E^ z>T@C^tS`Z7{6HT4_nF~n>JlZtk5&qDBl6r|^kzQYe`wq!C)n@$c>WOPA61NDFj<<6 zGW71NMMhwAl!U-yqrq2xrSFqRCI8acw7?}3j;ynxo*-b7Co;g5r%^j=H@9({PXXBf z@r>U>>N;E)81wx`B4f%{PB~MHka_);%kBCb(d|Jy5!MqJ%2p`t&@L)4$T2j&-WHvG zv3(uyA_gwqNu(k?jQTtv3dgPKRZoH8prxe7>pQBW5L&dpumS&5Ld2?(sCpJjvc4L5 zEnh&?91WVm)ZdTj=fjJ$pPDdgAttLXuke+?KdKxu*;kTC(r!tQk6;gxj4h%FdHAt(^M3YvYj(!tOeN)+Hvj6+< zzyJRG?^lZfWuR#t!tUKP&(?%3v&Zd$R2YN>lB(Lq`OInY48%4%yTv2 zYe1{G`3)(PDEio5Y@-I5tUf`c%%OCJMtSW56g3iEg%3`$7XSJJHyA z<|7&N)5Xrlgv~%BO24eFd;Hd;uiK%D`EdK|quUeRZDqbh9l)%j%J#0lfrZumvA<_w zu&=AVvdChf6}eqh(bUz`(`Ue*p01{fBAcTgKyDYLs_I+YyJEk+rM@avU~>fB$n)HS zM7pfJydu`i%gfS<{PF94kZDv$t>06sAkheDzu40NJ$5CMW%n^Lls?8^p^QGWURbKu3ZduZQZ((s2? zzE`}<{;Zt7<$C|9R8A~DJ~@%x>TfP zF>TX8)@v|t)q4GjRt<}5s6hLHwRel7>V@&r-O|Av(yh;Q1A{E>Ir>p+%dHD|=l+lT zpr(Dg&>#Nu=!)6bCLr-ZS%|;h)Ij$+e@r8_{qO19QvDe=&1tmpY*0lcA^Cc-#{9fQ z<~$*<&P$Q<_jy#<$40PMofM7aQ}C=jphI`4kLg}Z7CIN#26D{-4v-_CA-LiE@(%{y!BzsU%gG`Q?sjLUf%qFSl0y)2#ae*+EI>s|i`d^V$Dn)qmzqRq6VJRY|{4ujsIU%#bnqU6MR&-1I_43=|5(6Jr;Jvert) zE?S|Tmn}Tv<-??sxV5@9t}3D=>YZ0JrQe$CO~|EY=Lj9RM&4svQHPQL6%pV5fPFiH zfXDx;l@~et{*{U*#c#Dvzu)|znDO7$#CRx)Z&yp-}SrD{&|(MQtfUz~n35@RLfUy=aqrhCX0M}J_r5QsK~NmRCR|Nm&L z41UdsLjWxSUlL41r^0K&nCCK>fdR-!MYjFg(z9_mF^C|#ZQw?`)f6uVzF^`bRnVY& zo}@M06J&_+>w9@jpaO4snmU;0t-(zYW1qVBHtuD!d?%?AtN7Plp><-1Y8Rqb20ZaP zTCgn*-Sri4Q8Xn>=gNaWQ57%!D35UkA@ksOlPB*Dvw}t02ENAqw|kFhn%ZyyW%+t{ zNdM!uqEM^;2}f+tECHbwLmH*!nZVrb$-az%t50Y2pg(HqhvY-^-lb}>^6l{$jOI6} zo_kBzj%8aX|6H5M0Y<)7pzz_wLkIpRm!;PzY)9+24wk2&TT{w--phDGDCOz{cN_ca zpnm7`$oDy=HX%0i-`769*0M6(e5j-?(?24%)<)&46y0e&6@HCDZAm9W6Ib#Y#BF6- z=30crHGg+RRTe%VBC>T00OV6F+gQDAK38Ne3N9bm|62tPccBJi)5{B z4zc^Db72XiBd}v$CF|yU{Z=M|DZ%-(XarYNclODlb1Kz1_EKLy(NSLCN`eUl(rBCL zT*jx@wNvze0|TSqgE(QArOZU)_?qH(sj#TwzElLs9q)(0u!_P|R%Cy_0JFQxgGV>1 zz4?_uq<8_gM0`c*Hh|;UMz~vrg1gQXp{ufg`hM_qU;U>+zmvc5blCLSq@PrEBSGR# z&8=2Z4uXN`F3p73ueD1l{s{k$WipAvSh5W7ABe?4)t;r@V?y`bNB5FvBuE|0VRTb< zM1Hn^?DSsJY+sX@T5xW=#>T9VEV|?<(=6|ge$X6Sb05!LFdjDcoq*gM(Zq=t;_)Le&jyt(&9jzR73noru`a# zN*<`KwGa^gZU3-)MSLF0aFag#f0<>E(bYTeHmtdbns#|I)-$)mJ`q9ctQ8g0=ET?| zdO}eZ*b_p>ygRTtR^5Ggdam=Zb5wmd{}np+Jn1d_=M`~P=M67jj})fH4ztb5yQqQW z^C|C&^LHAK-u+ooIK)yM)QM?t;|<{P;;{`p=BclzAN#JzL4jCwXkQB1Dy{=^KR`=~ zTrr)y7eiYBzSNs_DvO=4A6#EgGS-zY%Vi)N*Yb`U;6o}KR}dq{r9pT5wqZ@3NOE8- z9-(}D|Nc5732CSYQbL)!gPQ#RbD8BhK3dl{sUuPvei0tkvnJBxDEAYTesU8H$)g(Plra{VH(v3u^CO1~(+ zU0O7#)jaS4{NcwA+LuSm&VBcX2#Im3xg)W}ySNw%->orn1taZ&+d)}8gJTqA!u|5P z{yv?zol_3|(1(%M(EVU=cp?L`{Pi|ixk{U)*guFML3P!OSlz;zGA#T+E@8@cgQ_mv1o7RSU=Zo_82F?&&2r;WE z@wk}JHYEZ9nYUc(Vv~iTCa3u8e4q(yq<29VoNbKk|`mq%I6u)My=gPIDuUb&lzf4`MEA9^g8u z)vp8|$$HE9m_BTV?lOosIGa4jud=jIbw)O2eCMfyw2*S8?hjWw^nqws$O*M$3I1)x zR0PWFb3$ySOcGTe1dz%N0l;RPc`x%05FtT^f^j{YCP}*Q=lvp4$ZXrTZQHhO+w%wJn3c8j%+5C3UAFD&%8dBl_qi9D5g8fry}6Ev z2_Q~)5^N$!IU`BPh1O|=BxQ#*C5*}`lluC515$lxc-vNC)IgW=K|=z7o%cWFpndn= zX}f{`!VK02_kU+Q5a3m37J;c} zTzbxteE{GNf?yLt5X=Bzc-mio^Up0nunMCgp*ZJ;%MJvPM3QK)BryP(_v@ei4UvHr z6+sbCifQaOkL6-;5fL8$W($zZ_;CZp305C;~$hhRquZr-r)jjd1z z31%ZK{-(`P#|Um_Sivn@p$-vz46uqT>QG0B1w9znfS9A8PB2LaHdzA|_)yjXVR*l{ zkcu3@vEf7bxH0nkh`q?8FmoO_Ucui*>_a~P?qQrlZ9@+D7%MTpSnztpylXrt5!-k8_QPB?YL8Kx_On8WD zgT+111d(Op$^$&KLAN5+@?>f7F4~wFi(8TL8+szgVmcMDTp5l&k6~=rA{Dt}!gb^r zSWY<)M7D|Z2P0cEodj6E42PV>&>DFmQpgt)E-|#sSUU@uKed+F680H@<;-x{p|nuH4!_mn85rx>wz;0mPi2ZkL#k6;sznu?cXh!T0S>{w6 zL^gvR05NY64l*<+_L>On$rjx9!US;l;LX6@z}yi#2XHh)F@Oo+l)h%fq$v}DNmF2> zfs^_t0)3N-W<9-N?uedVv{)-J0W5mh#29QM5R5h&KuiRM=0Zvnf#lF=K#WlCgc#9c zS;qvh(P$!_a8JwyhI^ZJV2k+B6Z^64?w|1?5gyo6y{}923CRZfYVe1#?F% z7h2SUiNO3;T#JUOyovSs@@C1GtwipycA=*x5{BpIZ_#GCMuV8XK=x;qCNy{d7?wA~ zC+=vjls;ci&zW=6$H~4^K%v{p}Ab?U%C6Z4p%eC<3ExqU$XR<}LLF67A$Sr20DR_pJ3yeBa~ z^sw{V0FI5;UpwXsScYuhbqGQ`YQ25;6p6W^+tgL&;Ml;>S3CGpSZ>VrTn0m1$y$HU z&65)I!c?oREz};c=nLCliriqQX->4uivHTgd${GqeAlf*!P^B|jkU|*IdNP(&6C>4 zqOW$)Nw9nvjy^&`?E|gotDV{JmJ9Q~vuhy<`^C4XIUDt|j4o6rK^e8_(=YqC zuaR6TRVf@tUFHB079o4MBIh{M~4>WwnGgesQH*3?w(RA%hCZ*7)b!aNV=yOQ%o_Y=Lt0Sl*(9^jfRnC210Om$=y>*o|3z} zAR&vAdrB#mWoaB0fJSw9xw|Am$fzK>rx-~R#7IFSAwdu_EI|SRfB*yl0w8oX09H^q zAjl2?0I)v*odGJ40FVGaF&2qJq9Gv`>V>2r0|c`GX8h>CX8eHcOy>S0@<;M3<_6UM z7yCEpug5NZL!H_0>Hg_HasQGxR`rY&Z{geOy?N92Z z{lER^um|$*?*G63*njwc(R?NT)Bei*3jVzR>FWUDb^gKhtL4A=kE_1p-%Fo2`!8M} z(0AjuCiS;G{?*^1tB-uY%=)SRx&D)pK4u@>f6@KPe3}2j_har$>HqzH;UCR^ssFD0 z7h+VLO4o@_Yt>>AeaZKUxqyvxWCAjKB>qjQ30UA)#w z&=RmdwlT`7a8J8Yae=7*c8XL|{@%wA8uvCqfsNX^?UZsS>wX}QD{K}ad4y~iO*p%4 z_cS{u7Ek%?WV6em2(U9#d8(&JDirb^u~7wK4+xP$iiI6IlD|a&S)6o=kG;59N|>K1 zn(0mUqbG3YIY7dQd+*4~)`!S9m7H6HP6YcKHhBc#b%1L}VIisp%;TckEkcu0>lo@u995$<*Em;XNodjTiCdC%R+TX|_ZR#|1`RR|`^@Teh zl#w@8fI1FTx2Dy+{blUT{`^kY*V-AZUd?ZZqCS4gW(kY5?retkLbF=>p=59Nl|=sf zo1Pc|{{N4>5nt#627ylGF`3n>X%`w%bw-Y~zWM_{Si$dc82|=YhISal{N7OY?O`C4 zD|qb}6nLWJ`hUyL+E>-;ricg9J@ZNYP(x(Sct&OI$Y!QWr*=^VN;G3#i>^1n4e#Je zOVhbFbLpXVu*16enDM+ic;97@R~u&kh__kgP#!R`*rQEnA+_dLkNP~L`0alC|J;c; zeiK=s8;BsLE)KbG3BD&Br@(Ha@SBT&$?xX`=$;eeel=|R_dIr6-Ro?=HEjnsJ_b`1 zK6Yg^-6;^2aW!xeTK)A~3Rm|L^FCHB_I>jIju7ZGo&N_1*QHkxH2!!%@o4iZ?vntS;&zJdPe1dH#04YD93A44o-MpfD zP{rn_aq>U%RDvC2+bp;xPlsOzauIi3*Lf42`jVKKZCRuKdYhi>FDuL2l=v{$BCN#Q6796s%r-AG$Q^t(3c@ zD?w0UhYr11@feiyl9kY_@H8~|xlmO<8PfQmj1!$@WieW@VxR@Psxfe-v9WCi1+f>F4VL?0O~K7T?m4-u|pSkBpUJZZe*16_wAp zSYZ@;k`3;W3UHKUWc8QeI}0jH5Ly=cGWQPw(Kr2fm=-5L(d`lcXofy8tJY3@Tuadz zYWXR{mW7XT!RF#RVCe%}=tM*O6!AD3^(!8un~opNI%Uko7$5t@<8+?; zTxDys(MyyGsUjtSu9$+|_-t!U3fVb1dkK?l`17<+jfl=hrBHnDSV>^R1=TnQeyqbW z>ov#l%!1|S!1>8UUxIdhQq`_klcHVx0{?#>K3#$4GlXncwldt!g17TcvKq-jo_996 z>oA=tH9CqRl6Yw?Uc`am!V?lHJbizOJaVaScf1UP5e7Dbgabq=b!B~T&_F6?ooU>w%x0A zH~&MHJ=q`fCH{U<7MDXE4SD32cDZA)WJeWkllJ`UspWaS#eDe^kg^oU_A14UE9zG-a^g{xaXf$})Wik>gT zl#dkzGr(;h0JZDuFn(+k8wNq?PZ5grQ<+sM?wBGt@JnH6v0#or-5wBQWKU~(S_> zkE!tc*ZJ1Y&*p(xX84POb3cClRMd!^qJ#CAZfIepEj-<`VURS_yCz0(?*Ixcj4 z-!zV1_QZhpm=0<;*(nm+F>T=)o?ep@CK5I%g^VAA+RB25ab?7)A~z~egru=I1S|@v zH7tXV!0wmGS^qj#e+MY;C5eUjEAp$Y?LDkS^QPZ}8WN85?r$u<-Epi;yZ1|J2J`se z$D6DpH~2F=eI0B&=UFAUnJvZAmClJlK)sutJ?M>xpZiWV&0=G4MZP+x+p>EX=HbCz zxls%Mw?*u^;LbHWIWCyq+yi)`GmFn9J112CZda_u@YIP%i;srFg_paU02Ifij*7}l z&CF-(3|>*a|+vbNR`^RP=9G?ymEJ0Z~)d&c*UE$UMepZ zcITr{0WqhxkjUnM15js_gW=e3Uh|y6ZReaXHIz-=p`x5VvB&rH9y>Amv@^WmXFEw) zQXYrk3feir=a{jMQ+wDIkkFnZ$k{sJakHn*?u za%4b!00ev8NVLM1TY=cl?KB&55BY_MU-sg?c>=Dbz_W{(Z~c?HJi*XpYL)C6Bd8WH zt+v-#0&o~@t4qESi*)+eW%@VD0|o^yF)n0hME$UtXF$*Lvh}7sso{`|pn*JDIy5^Fm3s$5*zEE=?u5<=l8FJc3r%+H} zdfoNl2J0^~!-*mOL5o-x32|e0Im*E!yY7F7E5N)W3>+v_LBydlEx?4$RL5f2oYRD# zaR0wv(-p~wO0eLDl3K=%`{5+0Gd$ktO=W)gWlGZJ0`K z$_RNA=ckrfa;H0KA~dR^p�(p-{x$&=IACIfoAR!za)F-^da-t3#0Dycnp zwO~NVXwXCl;jE<}>%@xz|=8fIJAB?>+E{7)|4l${4ngA3G|=r z2Dyv;VVWSgZx9Wj>qUjleGl3Ei9K4>h!(lPS%8VOG>Xu0%6VDz^O=bjJmuP7>DeUv zrbI}MlHB^^d?{zv6d=@_ZD2lg1&G7UjnVN{1}9WkaM3H~btX0GtSzB+tZ^qRgWo4m z!GmimlG$=wgXCnr6j@m<1gAL46#T~5Bnm=2{^@>|t&`9mkEPddj zAvG~@Tv~TAm2i%VW}R-g(Z0)z-Y|szHr@rk>4MAyG*Ma*7Yh#H7(!-5>DZ@8r;_dx z{prSe<>~099F8vsYd2xff7uAS%7{S)f(|@me3t2$iy&NEc7OUEchp@9A|X;;IA>8!oX+y(BKJ$EzV* znR$z;!L$s7uy@{OT~nG#B!NRraT8(X##Ho!0r_o@gg0CA-9H^;-uE&?$2$nHv_00o z%cbuUc-tCx$Uh&EZ4Nf4Zgqv)Y6>usG3>GeQnxx_Z6+PcbX-+ysbt1hQ`K1LDpOE? zrAhIZhSN9yVIAOa22gn577tbc&i3|3V8NWy&!tw##`}9*x}gtI^h1DzZRA>UuaJG) zaZ7j)dq!O}{?#8Y7~7i6fHh4{`pL?>-18|p!S75Y#^DM>-S3)vuZG+Q7l@ek zQP~#cBpWgg#mApc_sPYjpw8odQuRokmTkzcNl`^CcKB7e&;zViV;{Y{o^Y$%7i0m# z62%#1Lq!RC?}lK>%mp}T!3Xv;L*0v*>USLm``N%>w>@fwC+#T&Tx2bN4w(20JB}oU zuSa6v^kXi0xPs?pbaOHnyiqq6By1EZY9OZ^^QA>{q-Hsd&m`pbQ%8121aWG-F5xf zlZ%;B{;C>X19|`^_?dVyCq>n+41w7|!tUS!{9rHlbhX=SZO5CQ^;!Du_E7*`GiR^Q w)2!4MKjfSAeNo!9>IaV6aUZ*?W>} zs4%E?srLW`CJh0GCIK@hTkrW7A15Iu%N&?Q^$0+!{Tv&|t^Y@u%!L zglTg&?Q5q#ijZ;&HBQ?FNPp;k3J5!&{^+SGq?AX~SiOM9jJMRpyP?RCr@z38AQyy&WRMaC;n4una$~nJKSp?q|s8F00c9?Q! zY_ovvjTFm+DeQM^LXJ#v0}6HRt3R1%5PT*}W!k8BEM;Jrj8dIceFo2fhzTqaB3KKk zGlCLI)gU25(#u6ch6GeB1k@eHq7l{EHXv0n6xE#ws#ri}08kkCf8hUt{|Ejb`2YW* zvg}0nSSX1m=76s?sZhRY$K=3dpJ+y*eDULGnL2}4>4nvW^7_<~wIM_5fjvwt4h1|g z)g0Z6ZFq9j<~9~b8((~TN{Z?ZQfw|is&Xp~AC61sj;xItKyCHdI|tCMC_LbXF>~vR z=w6V3^H=W4CbAgR4#xw}ETTwu2guW~=Crl@SMXv85jQ=%y!s^?m4PI0My7MWICO;- z175jm%&PcPWh8QdOU(#8bp4!N7ET-+)N}N2zk2)8ch|4Q&lPFNQgT-thu053`r*h3 z_8dI@G;`zn;lH$zX3RzIk`E8~`J=BBdR}qD%n@vVG1834)!pS1Y?zVkJGtsa(sB~y zNfMYKsOJb%5J(0ivK8d+l2D2y&5X!cg3BG!AJ}910|_${nF}sC1QF^nLIhzXk-Y#x z0)&1iK!O;Og0Ky!;`b~v%b$`S4E&fB)1NB4v@8wr( z&+NX4e^&o)ecb=)dd~C!{(1e6t?&9j{l8%U*k4)?`(L3;Qjw z#w7FS+U(94MaJKS!J9O8^$)36_J8;thW#2$y9i{bB{?M{QS_inZIJ!jwqAbfXYVd$ zQ5fC$6Nc9hFi8m^;oI-%C#BS|c8vy+@{jx6hFcf^_;2VRgkoN(0h!_VSGmgNPRsxI z8$rTo0LaYq-H5i&gtj81=&xU?H-Y2==G@uQV7E`@+2E9XQW@{&j`?EOktk|Ho{HU>ZqDzvgjwBmdex z&uZNd2C1h{{}2k6Ys9$*nFP3;K%u!MhW`uZy7Sn`1M1zs@Es&;z*Z>Gsh@-3Fe6pE zQD2@cqF((NrRevgvLsvM_8;;iNyJ5nyPyy?e!kvKjGj`6diRFBEe49Oa7wwkJFV7Z z$YT&DWloYu-H?3<0BKn9L&JYDT-SK~*6c5pi18P26$JESKRYj{T7Zk6KiRJcbvOO*{P56Q6s8msbeI3>|j>K9}Q9UBeq*inXKemCm`-<5|-$ZyN4u$(3 z&HcvqehFD%5Yrmykg-^d`=BSa8(i=>ZoC77^mWY{evp(km@aHqhUECBz76YiR+VYK zY_avFC~V3$=`6C4JhfHAQ@DZtUOwH`L;oYX6zK0-uI^?hS$ALfq}A7evR;ohJHij} zHSZdW?EKv9U1s4oD*<(0oQ*;MaQ6@cvGL zuHCPgm_NhVsgp^sfr*ia^Db}swo1?O(_Q2)y+S$CBm+g=9wCOUPbz(x)_GbaKa@A7 zuI&!ynLiZRT#V%_y_-D`0Z5lT*auoe{(U5NylTzFSJW()W-#F6*&A`LNO1bV#Y;QJ zSbLBnp|B^dtK|KIWC|No>JjWBWE@n7O)x{&^E(WMeMvp57#qA8m* zeTow*U@_86B#Fm*rxyYu5PRWaWHx8y> z*qmHEp(AMDl0v)ij(AY8fnH=~ZwwjVAbu*m5;xPfidh@ov6d8g zfJsi&!QyK53Es%sC39ts;54V68koALD4b|%tNHW0bIkZAJKa=W&FomJSEDT>W1xIX z1x%Z>AvNIsSPLcn3RTcHXb@KB?cuM)=x6fcIx>&(GxqZ8w3p#jJ(GVgc*`c0HG}dv zIop&Qim!K1NFwic%07KcjWgHBPUkq7f~lj;TPqVGTiT#cUeim>;nY`>h@a*S{qQex zQ`z62WK|Mj)Y{tfF{;T4P;c8$Q|KU?Joh zIkA^z%X7z|r>4aTh@|StTi!-r1D!g=zb#3d#{{&K3CqE$Iz-UH<%37c zRfkO`&uM%#AD3PHv`g5t0e^O%nVL0d{Xlx^EjEC3#skF@`zl-7PF^0oxW)1!C!JxR zWvuAHH?)61FKA1QeT*_sY7;_Id#!GmV4n`MO{~sv}VLSK` zXRw=Y=Clz*00B(5y^K;gCZMAzjT5+c3IC=)l(9VIDdatpxj3y89WwI|bH&$!ZEvp` zPR!T@#!(|KfI-w?!&+7$N3F6>tD{YO4Qg$d_`nNEdfVCha9vaPn0jI0`)`@*72hq! zpU5ND^P*RoEkbD5o#az(-g=Y)L>HH>Oc%}$ zT3Rs_ih0;4+Lv4Y;@Iv(;fUbQ=i-G(#>vghec~*j(I#r|5mqFiJBpzi&hzEcD{u$< zRsm0BVYn=pT;0>R(itW|*D&;O%bOc7et9ACaH#J>z3A1A~6fdP>pmbM%xzm4>|;c_?B+%sl;Qs2{t!60$^u zH1t@9^6>;?!FuusnISi$f5CL&;z?EqJN$FBuWDA#D5`cy_UvCFIVvf{c?4N0teh;d zET$7aVbj08KTQS!x?Nd1Is8q8qFzs}a=!@nJ;7FSfCY^T@D-gpw`w<6e#X3+;O}1h z$%I!M)0bg|EKUA04Qjn@+x{Rj8vt6Wn!R|3A92z}^$KfF5(#CWr4y#~re1CN4i4w0 z#GsypBR{xA3Er7sgAi(|}1-W?s~n$7?K|9WL8kpVfw-;#b9 z+mn;=ep!162U5R>_t}fOt~tE?s#m( zO-S$7>Ay6*hHdZ)7_oU915WYYCIX;hFI-U2EWYX!pllONr@Q--2o~`!isi6vTPLJ4@(|o=%NHYjo0_S&q*UQIROw@*N-By@PaQ&;YxFZ0aR zX&}LeOEz);#m~Hwm^VAY8DK}b$F4bo{jMN?d!lxKPhNklzr^Cd`0f4oJr^z=I|l`* zm8AHm*fPV`0=lF3Pnnp}&J0N1X@}-D94YvmUabFrLGSnTz7Mu^21F#O5tN#CuY9Vh zUZBH=ez%h*wkf0hBtXJh1SN3d+IF{gzT7lp)j}n?03lt;XSQRAh7qd&v;RwTYDuQ# zbI2*r<>?x-G0@hM{;%{VBD7nLKt~D`T~-HAt5;h%i0_=Ifs=yHma5dhJ+QMG?Ux(a z|E?1CMy1!~oA`FP!k~iG=t&5#>bVdz=peT8HMB6Y)#7PpETtNryT^+Rv3vpJaF^zP z{H}0-LyV9Fu21ID%wO9f1IKlFr1p4c{o-?03vyB-tr5duk^&L$;m_|f$vs`^Sl{j2 z95}oY{LlY+=ZS%J+tZoXCd0*sSU7w^gjovXn+g7uyra5{cU49@yHf#Z^Jl-$9cIfo z+AJuxH$VLb=#+uBbVmUjnx zxb1pZ@-O9=AIk4@S)m6fJ2?{HrNYwwnL3a45muuNjr;6$O`bGEM0T4A2_S$t=86*- zcO+0mywg*j#A4mU}enR_!cGmIYQ;qwfchWtFEXL)AK%*;=j znYne+hS4EMy3S)C*mZ1KI>!+)0V@9!N6H$Y}~MJ{rYuf zz^KljIWvFi-?#?V@LPR&c6Nn{!=XM z>}-h$S76;$H{E{Y%@^zlmOl^efBwa%UU+jJD9UVukQ3ti_kH-?H*RC0?M1W%FCvMB zM_+v6fk$6X2sx)-p~B3&Kl{nscK}pNLM*qjtpaf9>AU{-iPKQZR8yCg!TY}Qg*(;) z)gdvCcB%kppZc$VdvsK@)3l1{&DG!d_6OHOS`y=ITLEVu`unSKA2E%JD*DVX{LJ}K z9l>hMRDqxQh0lnpGHpVYneX}eA3Pt|2v%=q;rt)``R|#bDyB)OXY&vI_@|*}h}G?^ z@aZ4_!7cQPX`!fW_?{oT1NTwHs#l5L-0`E|y@48<3Q^HFf8=Idi zpJYD%1MkII!~|7I^WGo)IF=?{>ACnjJ_WUi39C}!Q{QnheVJqeKKqq5^o5CBde(g9 zvw$X6^jz_^E2$wSw4!q5*RG(C2_^XO$HBn_55vbl44OnTTRwRaePP0vo{K)U1#99& z<>rq7V&V(<&@I%MFoN5zrY}sz=(*-L&}1QQ*a%`u25h{cFj===17eB_uGuzG&byQ< zrm8BJZl4r_E$3k|Wo6FW0-6M7>qac5uFQsQcmkLWGfeH74S3Z_rJ!jgN++!@i=HW8 zkyjI(oPH-+-N#Qc^-mpNO`bc6r=2-<%&Wy5K1vfFJB(L_IkpS6fY^NmuL8qsgj>MD zn~BHH9WM~32_3vd=W&B)k7F9q%stJx+b_L_X-4zr^LVUMCmyCTA3sWtkvsmME?Xiy z?xOSfB=_$oY06~J-HcCq&)qcW{j;uP;?Dm}=hkq?zh&n!;m((-G-u_t|6x399Q;>A zgNpxoJNj{u|MFDH7Rhq@FCAl0dE|ddnl!oh9{Lq?@JDoR6L;C941IK`ISfdE$4S zE0AUQ8+2|Ncl_q5QkSp#AODp~(^mfP&%Au@@|TBQwoP`UU+V{6u8|)6ZA{~uKmQ*M zmrMTDU8S~8Eqi{^v0Ug&5Upcm#y7Z1(RbgZAG8jB$eRwCspQ)>5;U)oGZ&E5aeR*K z8Yt`Y0$G))Yd(Y3KH}tA4`-_QmNke5hU_|nq=xtyjwW(_o?itz>B>WM&^63bNdQ)k@-IgDHW*RW$Xo9#RzrTrCn7L2H{9Amq|qNg@#eZY=|P zCoI?2s+L)zsM%WX(NbVEY^`C>lFjIBYmJ6@DKJ0ZT4&F&WHW!dwa%QzOG!?jY_2(S zDcEzZbz*2Q!43|z))9yOP9X1Xt%DXzwY(3tl-TR=Qb_MbZYRrooh;dYYmS!U_as1(=YVB?Q_A|tNu5Ut&_q3jbfDM zoFxT^uEuH`nX3*sB%K?GuHUkweYReBwnHqh3P)~`+s3+Tj!rDA1e)8vuBv5J*IsxC zkd^~b(aGzArj08{>cnzOuy04C+C`}gb|Yz-1avxeWzev3NzcHbz_&4W@QCr$z3~w=8Ua- z`;vfG1~BP8CyLb=F7t1am~ph_#|O%$khSJ9%Vtcn)YmpgQxF?xM^_Vb+5fnpB^W0I`f%X8gb9#X{Q-yJG0{Z56aWeI&zPxnf5pdJA38bM`cYnS#x)% z`n1tFf$i)W-hGm(f9mde^=X@NcV_lFb=P`4&CI&H=IArijGwdCk&X@uQ$5xmj!~^? z#$ROCI)V-~t%L%GS#wo@U27ddR`4`3)WoB{R-4snfNrfee|kI8^bu#yDgYqOwas9# zmcb`3!kRJ`Cr=_tq)8aMt{aGtUZsqwVlj6DgCGre>AEt&x8H_in!x@uwgExIh|-mA zjdaC(29~CTVSaaF7HPbql&*9Uo8P@f)>LqCXclr}peS7_1BQ28u9PO8Eq1@`l3q9o zkfKCaO2?T?ZyA6loW<#9_c^O=m<&h}CA!ineAD@=(gbq`vyT|tiJ6#^B1$P;;qax` z55k&Q?wEh#87niLo*+n4L@65J(Nz~=Ya%7^(miLb(E>A3B@|Jjl;FU&D>o|9#7PJH z?|ago!o;WC^h=|T7PVBg(DAB}72cyUS zb(f>Bwbr!F1eTCO5fpj<{PqhY5>143p?~5ZA5H40);=@M#MYvrB6gqHbU_!GSY??i z%s=>-ciA4*zOOZHds0a(kWewZ4h(k8h(ua7HX)Au&mY~H8KY6(_cb$_&fA@QjIW-*heP3%$d!m5^AdnT}`12qA^c@!g3DOwZ5WwE2?)-yU z!)Vx#Mtxt?FzFTwK!77sy7)sMzUd->w4^bxtpM2j!b1pjgyk zGKwWGeb4)^zjy{9Es&PU1}gwg?|J#L$KJB7ett9@4M%-nGtIQr0>Fl@8-yh`-+1ed zS6r}(MeSvgSoFmH*_WPu@i?}!AB~2?;i&IxrkNg~cQ9Som98tcq)k^|eeER|Zl77t za-TVUc;DNvzVXJ%w52+#weN?+;i#{f#!Oc&z?81*N>^e~ltRS%ZI@lR{rs()HmqG! zx*}ZrI-EZ}ckJMiy>A^oofwDfC~IH)z8{VHKGT@#E5I(Ll&+MnMCl>~AV7+>Gi%mF zkU1QlKASdR0B80!YhP<$Ywi0?W2Ux45oPfxv9QolWzJPD^weBfvo4SONxP35106sAmh(e+vAs0GboFD@PvNs)jNPvarhW}0YliZEg{Gazv z+JDIpoojRVPr<*C|BTq<`6ga{5q^8^!|0cxe=rZ!zxH3%f5ZO0cQ*Z<^$Yt2{|Ek0 zyT|*F+CO@K;(owBKtGg!S^xj-Z~rga2m6nxKl9J=fBSuNKW_dLKWhJKeg^-Xe`^1? z`TyJj)8E!#>_3Y?uKrwqq3LJ#SGU>AzUO|6`nR^u&3FNN_jGOc zw)Nw`wr3yIKhgcee6IaN=ws>M{6677%)hPwx&HzC(f&u~&)6@b2kNRzBDQAP0*H73 zq%McOmRk{B3i47qRe=DA*$&odrbEJZ*pV9XXa&p@wlW~@Yfs>V{yiTtplMhgM*-Bz zsSnlq&pG;z0OUN%$~$3=g1UF+G*>+17eRbBf3=y79J}KR8owon@$1Z7MIrvvWWH)34nK2SD)GsrJ{l z1Cl#oVo3A8qY3e=aF)qzms~FG#2$LzT=gs&aVMOj>(%{y<&O0cG!nCiESl~x=^dF{ zKvj8F1K8Ng171wwM5Fh4KoQw`_c6#y$(5cAm7e}~nJ#A*fx+c9;y#&W!#VukR)ugk zKp3=+;Ut+IYn%m+r4d*<`L2h%aDnX5}^!5R|H;(34AoVWjRx(msBZvk;rCI*|~ zdOijqI@9Z{Vu!~jvHW{lBa$rnl4+!s_5sfK3bCGk-B%iDe&@-}+%fOKU|(9?V1 zHE8&@4z)Kx!RAvAs z!Wic9=o#(bg?kc-G68-m(jZ`^=XGUXb)}t(%&~sjFnV^sEX%hSy6UKC4iOhgV=BHV z2w`4g7Y=s#Vu2B_?#VQ|hP39@eArgfX>-0S+dd&^mx0*wp}>)x;c4RUgxz%;oNe?& z-7-lJ@Y^2^C;=qJsxx5|xF)*pTGhch2B&kxtn;f!7=gznk}I3}Dh}(CoMXgA5-p&kS202!l?!fT3t|HG*rIP~mS* z$Wjo}jq3}z$Qq!9yrtd3fM0N629ZM?LU$nv@Tv9b7I;D|;0H2dsA~g7Z7zp1| zB)XmrkMgF6OQr|R)HHD^TE{Y#j!~SR?b`Xt3Qs`B+x<hxexYeAjMUWdZ-*n9%(1)Wb(n2U<><7&9dwGJmrob)4%H? zlQ%z+L-^$dFhhH|@u$%97Qz?*Ynh2VG@q|?8vY&L74&fs&_b&3$x&Oyjl~LQDRRap zJU4U*R+(2Dd!G+lh8!V{pT_UJn+^1Qg6$` zqkNm(a#hWyc6SP+p5=C4HL8-m`pO`5o~`-LI?_h5CsH?F_%?nDodmz&pWR20WTpJE z?N|wSzLjMUK8E)a2tI}Lf;+;*M|h3Y(U#>)g1>zk9|Hd}oZAa2 zLYBWBoSW!Ts!RwXr^8h+U*@{9{zqS^iH)Op<;r`Uw~nc}<^$V~_i%$GFjaG?X1@E|M`h)nekvFKt`Dh-f>@|0-`Xoq)o` zx;JmzDfOV9qCx|EVpogEe0LK~tGS?5$$L_i6P$P6wIsCQaP_;d{{N=iV@+8LI}o#( zvo*Ejy=IIn{rdIQh1&q-{EuohpVOjJ^Q3lD*YTp37$^RRgn8ihpdu5{Ct%5-KO!VL zcNB6dUajXI9jkm-P|i3~GB-A(X`P1Oqqb$tcku)UJw0w3GeUijb__#QT4j%64z%EeB7S?jlWwx_7&+EEvB|6N=kV}DwnyAlX=?j`) zmU#!$*^@NIu#n_d7;WoJV@*Fbv9|yJO4;n|BNF2xy(54RyB>t~8lUOUW$&2%Nwi1y zx6JxW88>U2$#qhl^6KUbtmg9}D0o5vYDT7kWJthLGkpGnN4T>{St^_EU>4;DmLF9o zr|LqsA8_MoNLQ=}w?8u!ziSZ@PC#Y<#9uJFo-ozVo6D;<8j^1$c|qAE3ZTE5i~zmE z$BU5lw6l=EWsg^y^;8>r9qH{xfL|~PZYK#md$zZ0?o11gV<*WSW~cgy2GYGQir%wf zt4iW8D+;s*;RGrmd(-T<@2&j(Cb9xhV*l-x`TpK`xq|7p?5R%5*s!69?2c!cC*VY* z2DE^9pvOPLU!1e}wA8S8opcTJ3`NB>hY=JQnL~QFXR4K8A$BqJnoEB$wn-%u@E6Mh zCfMF4kusv3N!(aHC}4)Xs^xoOwXd%e^6pi5|DZo=Q25j+6HlJ^7FodH6y1bMROR^q zGu6)fopS`h%Sw<;ZH%TEPf+#81-#_v+@8nlR0jLcIDKQtLleOC)6yLZgC!D9X3GgS zohwU{v$jl=quD#Go^hB{`@Qw*a%`(^jyT~=q^bWgGzRj;|12J55HWdCWV}EB|K=%N z3Nq-qxJJ`>^|1MNN+q}zTB&ooE3j==AgK@^UW<^oSbeALa2peF)Th6{@sj0KyMNHZ zksk1+MXN2tv+22A%cQOGpS9)77(uP9mh+!5T5ERLvF@b}$+WvXM45Z?-kCa)fb~f1 znVbTD$Gx-0Zxc`0D@YgHakge6SL0H`-vN_x?AP0>iGH0_EE&=v83hMJgaKAI0jJXm zVxVz;X<$v6WW7}fxROO7vr#YLP;;lij5VrX{;>7kK6TtOH&6|Ar^xo>00%+u$C4@# z>!jOt6*3><171+WxoZnKDTzJtDRw+T030;yI}~uV@9fCnei^I*j>Bp&mzP2d=FPb_ zCM*l_+$LDR3B*a!A$g#>xsrZvw0lckxmMg>0aQd7tPyN=t{dgXb;Ie+T8{fZH=gdu zM7Rg9c(kg(Jg0?ARRRl=AONFKrvFj)lTY$KfT%6^6s`mk*ABGhsce*LsoD>K{z_M2 ziPpnu+lw22PfF!CoId^6n*G4H(Ix+#+N{C(da7t1BYMGEaE#PdpOLxsVD5riQXHp@OX;`S`8VnpM~)I920w~<3|mo0 zf8~Az`*?2?H&gZ&*K&bRkV@qzvMlRHXys8*Ze2+1c?5o!^+$&MHxB@4Ee5cke52R! zmn7AZtY6ST%ixgU5)%$%QcwHj7Es-Qu^kLAPwy%7pGBw_4Q9#da^W2$}axNHr03)_nw z5?yuNmXrI5HgS46)c5&}B)Tts49oU92>3xBLLy}FMUW=84DQbVq^;7_e7|(Sdz|&J z73N+M`rc2rt*oSWu#7S{*s~nH6HRHJS1SmzeXk|;CA)FI4bat3<%}nkB%;;?=F>B7ms9QSxv#@+69;@>QaR?REYX4&)=itG>rM{<{A79Rmk)`5ON#GL`*KX%}Ihk3w(RtM-WLt z?f&FLF}4N^yE!(pZ&Yj&Bc`~K0@4_}*0Om?wN|}4WJ>WL;G^H2*QpgEkGA~OET-Km zkwz|5{6dnz1U<2Pe9DNL>3g5FEIvp1jzP&2K#z~j%g6!7B;^zF+o95?fV{3mnB8*RMhCDNp>Am-3e@jNfMj?jHV$MWjk!DDKP zkAz$Y?Sr)!GUOX}qTQ5aMh|wq1uq}~joWyKl=b_LboM#wi{CMuz5x6BKlA-qy++cM01D3b7`uD z#l6M4pI;JCypO8JZ6?U&wNxR!{4oB_ zlV!x9+-&Qy6{%MQ{~yoZGkKiTSC`YS_j22~G;xUV855g2&C(zm^V!(wpcm@zn{%!g z4}JGo(sGZ1O~to-}le

UmY2RIYtNPVDpE$%vda+HD#3m z&VuXJ{BK&Qe+rBa7eq}Q(bq|tn(RrJAk|ztj2(i{d>nmQnM?;HF2k&9sA6up5tmjl z7lySlzMbifH17-m-Lwa_F&e7nOH?ESi3#ckR3tsM+jsck3`oG!uMS}|eAwVXv>}qxwq?QY%QJ0}r@^;fhuUA9W z*BVl>TGo&N004@xSiwDUXUvp51sVmqO3m)=B55aPwf@0=e}cN+$-BdKxY`YrT_4)0 z_d10#i44Q*rFr8MC>*)v$EJvz``(pb{e&*6k+b zsMz%($|1+8hn8c2?P(l@;Rb&CsZeYoCI3?2!LqjbwPXW3z4G$Qfj=cT5Yb%vY0(AX oeb?AaKtwrnc|$|zzw9vfvn^aJJ!zd)XFXqqy0000001=f@-~a#s literal 0 HcmV?d00001 diff --git a/examples/llama.android/app/src/main/res/values/colors.xml b/examples/llama.android/app/src/main/res/values/colors.xml new file mode 100644 index 000000000..ca1931bca --- /dev/null +++ b/examples/llama.android/app/src/main/res/values/colors.xml @@ -0,0 +1,10 @@ + + + #FFBB86FC + #FF6200EE + #FF3700B3 + #FF03DAC5 + #FF018786 + #FF000000 + #FFFFFFFF + diff --git a/examples/llama.android/app/src/main/res/values/strings.xml b/examples/llama.android/app/src/main/res/values/strings.xml new file mode 100644 index 000000000..7a9d314e2 --- /dev/null +++ b/examples/llama.android/app/src/main/res/values/strings.xml @@ -0,0 +1,3 @@ + + LlamaAndroid + diff --git a/examples/llama.android/app/src/main/res/values/themes.xml b/examples/llama.android/app/src/main/res/values/themes.xml new file mode 100644 index 000000000..8a24fda56 --- /dev/null +++ b/examples/llama.android/app/src/main/res/values/themes.xml @@ -0,0 +1,5 @@ + + + + + + + + + +

+
+ + + + +)LITERAL"; +unsigned int index_html_len = sizeof(index_html); diff --git a/examples/server/index.js.hpp b/examples/server/index.js.hpp index e09b3c8c5..647abe116 100644 --- a/examples/server/index.js.hpp +++ b/examples/server/index.js.hpp @@ -1,1903 +1,4 @@ -unsigned char index_js[] = { - 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x74, 0x28, 0x29, - 0x7b, 0x74, 0x68, 0x72, 0x6f, 0x77, 0x20, 0x6e, 0x65, 0x77, 0x20, 0x45, - 0x72, 0x72, 0x6f, 0x72, 0x28, 0x22, 0x43, 0x79, 0x63, 0x6c, 0x65, 0x20, - 0x64, 0x65, 0x74, 0x65, 0x63, 0x74, 0x65, 0x64, 0x22, 0x29, 0x7d, 0x63, - 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x6e, 0x3d, 0x53, 0x79, 0x6d, 0x62, 0x6f, - 0x6c, 0x2e, 0x66, 0x6f, 0x72, 0x28, 0x22, 0x70, 0x72, 0x65, 0x61, 0x63, - 0x74, 0x2d, 0x73, 0x69, 0x67, 0x6e, 0x61, 0x6c, 0x73, 0x22, 0x29, 0x3b, - 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x65, 0x28, 0x29, - 0x7b, 0x69, 0x66, 0x28, 0x66, 0x3e, 0x31, 0x29, 0x7b, 0x66, 0x2d, 0x2d, - 0x3b, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x7d, 0x6c, 0x65, 0x74, 0x20, - 0x74, 0x2c, 0x6e, 0x3d, 0x21, 0x31, 0x3b, 0x77, 0x68, 0x69, 0x6c, 0x65, - 0x28, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x21, 0x3d, 0x3d, 0x6f, 0x29, - 0x7b, 0x6c, 0x65, 0x74, 0x20, 0x5f, 0x3d, 0x6f, 0x3b, 0x6f, 0x3d, 0x76, - 0x6f, 0x69, 0x64, 0x20, 0x30, 0x3b, 0x73, 0x2b, 0x2b, 0x3b, 0x77, 0x68, - 0x69, 0x6c, 0x65, 0x28, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x21, 0x3d, - 0x3d, 0x5f, 0x29, 0x7b, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x69, 0x3d, - 0x5f, 0x2e, 0x6f, 0x3b, 0x5f, 0x2e, 0x6f, 0x3d, 0x76, 0x6f, 0x69, 0x64, - 0x20, 0x30, 0x3b, 0x5f, 0x2e, 0x66, 0x26, 0x3d, 0x2d, 0x33, 0x3b, 0x69, - 0x66, 0x28, 0x21, 0x28, 0x38, 0x26, 0x5f, 0x2e, 0x66, 0x29, 0x26, 0x26, - 0x70, 0x28, 0x5f, 0x29, 0x29, 0x74, 0x72, 0x79, 0x7b, 0x5f, 0x2e, 0x63, - 0x28, 0x29, 0x7d, 0x63, 0x61, 0x74, 0x63, 0x68, 0x28, 0x65, 0x29, 0x7b, - 0x69, 0x66, 0x28, 0x21, 0x6e, 0x29, 0x7b, 0x74, 0x3d, 0x65, 0x3b, 0x6e, - 0x3d, 0x21, 0x30, 0x7d, 0x7d, 0x5f, 0x3d, 0x69, 0x7d, 0x7d, 0x73, 0x3d, - 0x30, 0x3b, 0x66, 0x2d, 0x2d, 0x3b, 0x69, 0x66, 0x28, 0x6e, 0x29, 0x74, - 0x68, 0x72, 0x6f, 0x77, 0x20, 0x74, 0x7d, 0x66, 0x75, 0x6e, 0x63, 0x74, - 0x69, 0x6f, 0x6e, 0x20, 0x5f, 0x28, 0x74, 0x29, 0x7b, 0x69, 0x66, 0x28, - 0x66, 0x3e, 0x30, 0x29, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x74, - 0x28, 0x29, 0x3b, 0x66, 0x2b, 0x2b, 0x3b, 0x74, 0x72, 0x79, 0x7b, 0x72, - 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x74, 0x28, 0x29, 0x7d, 0x66, 0x69, - 0x6e, 0x61, 0x6c, 0x6c, 0x79, 0x7b, 0x65, 0x28, 0x29, 0x7d, 0x7d, 0x6c, - 0x65, 0x74, 0x20, 0x69, 0x2c, 0x6f, 0x2c, 0x72, 0x3d, 0x30, 0x3b, 0x66, - 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x75, 0x28, 0x74, 0x29, - 0x7b, 0x69, 0x66, 0x28, 0x72, 0x3e, 0x30, 0x29, 0x72, 0x65, 0x74, 0x75, - 0x72, 0x6e, 0x20, 0x74, 0x28, 0x29, 0x3b, 0x63, 0x6f, 0x6e, 0x73, 0x74, - 0x20, 0x6e, 0x3d, 0x69, 0x3b, 0x69, 0x3d, 0x76, 0x6f, 0x69, 0x64, 0x20, - 0x30, 0x3b, 0x72, 0x2b, 0x2b, 0x3b, 0x74, 0x72, 0x79, 0x7b, 0x72, 0x65, - 0x74, 0x75, 0x72, 0x6e, 0x20, 0x74, 0x28, 0x29, 0x7d, 0x66, 0x69, 0x6e, - 0x61, 0x6c, 0x6c, 0x79, 0x7b, 0x72, 0x2d, 0x2d, 0x3b, 0x69, 0x3d, 0x6e, - 0x7d, 0x7d, 0x6c, 0x65, 0x74, 0x20, 0x66, 0x3d, 0x30, 0x2c, 0x73, 0x3d, - 0x30, 0x2c, 0x6c, 0x3d, 0x30, 0x3b, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, - 0x6f, 0x6e, 0x20, 0x63, 0x28, 0x74, 0x29, 0x7b, 0x69, 0x66, 0x28, 0x76, - 0x6f, 0x69, 0x64, 0x20, 0x30, 0x3d, 0x3d, 0x3d, 0x69, 0x29, 0x72, 0x65, - 0x74, 0x75, 0x72, 0x6e, 0x3b, 0x6c, 0x65, 0x74, 0x20, 0x6e, 0x3d, 0x74, - 0x2e, 0x6e, 0x3b, 0x69, 0x66, 0x28, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, - 0x3d, 0x3d, 0x3d, 0x6e, 0x7c, 0x7c, 0x6e, 0x2e, 0x74, 0x21, 0x3d, 0x3d, - 0x69, 0x29, 0x7b, 0x6e, 0x3d, 0x7b, 0x69, 0x3a, 0x30, 0x2c, 0x53, 0x3a, - 0x74, 0x2c, 0x70, 0x3a, 0x69, 0x2e, 0x73, 0x2c, 0x6e, 0x3a, 0x76, 0x6f, - 0x69, 0x64, 0x20, 0x30, 0x2c, 0x74, 0x3a, 0x69, 0x2c, 0x65, 0x3a, 0x76, - 0x6f, 0x69, 0x64, 0x20, 0x30, 0x2c, 0x78, 0x3a, 0x76, 0x6f, 0x69, 0x64, - 0x20, 0x30, 0x2c, 0x72, 0x3a, 0x6e, 0x7d, 0x3b, 0x69, 0x66, 0x28, 0x76, - 0x6f, 0x69, 0x64, 0x20, 0x30, 0x21, 0x3d, 0x3d, 0x69, 0x2e, 0x73, 0x29, - 0x69, 0x2e, 0x73, 0x2e, 0x6e, 0x3d, 0x6e, 0x3b, 0x69, 0x2e, 0x73, 0x3d, - 0x6e, 0x3b, 0x74, 0x2e, 0x6e, 0x3d, 0x6e, 0x3b, 0x69, 0x66, 0x28, 0x33, - 0x32, 0x26, 0x69, 0x2e, 0x66, 0x29, 0x74, 0x2e, 0x53, 0x28, 0x6e, 0x29, - 0x3b, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x6e, 0x7d, 0x65, 0x6c, - 0x73, 0x65, 0x20, 0x69, 0x66, 0x28, 0x2d, 0x31, 0x3d, 0x3d, 0x3d, 0x6e, - 0x2e, 0x69, 0x29, 0x7b, 0x6e, 0x2e, 0x69, 0x3d, 0x30, 0x3b, 0x69, 0x66, - 0x28, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x21, 0x3d, 0x3d, 0x6e, 0x2e, - 0x6e, 0x29, 0x7b, 0x6e, 0x2e, 0x6e, 0x2e, 0x70, 0x3d, 0x6e, 0x2e, 0x70, - 0x3b, 0x69, 0x66, 0x28, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x21, 0x3d, - 0x3d, 0x6e, 0x2e, 0x70, 0x29, 0x6e, 0x2e, 0x70, 0x2e, 0x6e, 0x3d, 0x6e, - 0x2e, 0x6e, 0x3b, 0x6e, 0x2e, 0x70, 0x3d, 0x69, 0x2e, 0x73, 0x3b, 0x6e, - 0x2e, 0x6e, 0x3d, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x3b, 0x69, 0x2e, - 0x73, 0x2e, 0x6e, 0x3d, 0x6e, 0x3b, 0x69, 0x2e, 0x73, 0x3d, 0x6e, 0x7d, - 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x6e, 0x7d, 0x7d, 0x66, 0x75, - 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x68, 0x28, 0x74, 0x29, 0x7b, - 0x74, 0x68, 0x69, 0x73, 0x2e, 0x76, 0x3d, 0x74, 0x3b, 0x74, 0x68, 0x69, - 0x73, 0x2e, 0x69, 0x3d, 0x30, 0x3b, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x6e, - 0x3d, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x3b, 0x74, 0x68, 0x69, 0x73, - 0x2e, 0x74, 0x3d, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x7d, 0x68, 0x2e, - 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x74, 0x79, 0x70, 0x65, 0x2e, 0x62, 0x72, - 0x61, 0x6e, 0x64, 0x3d, 0x6e, 0x3b, 0x68, 0x2e, 0x70, 0x72, 0x6f, 0x74, - 0x6f, 0x74, 0x79, 0x70, 0x65, 0x2e, 0x68, 0x3d, 0x66, 0x75, 0x6e, 0x63, - 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x29, 0x7b, 0x72, 0x65, 0x74, 0x75, 0x72, - 0x6e, 0x21, 0x30, 0x7d, 0x3b, 0x68, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, - 0x74, 0x79, 0x70, 0x65, 0x2e, 0x53, 0x3d, 0x66, 0x75, 0x6e, 0x63, 0x74, - 0x69, 0x6f, 0x6e, 0x28, 0x74, 0x29, 0x7b, 0x69, 0x66, 0x28, 0x74, 0x68, - 0x69, 0x73, 0x2e, 0x74, 0x21, 0x3d, 0x3d, 0x74, 0x26, 0x26, 0x76, 0x6f, - 0x69, 0x64, 0x20, 0x30, 0x3d, 0x3d, 0x3d, 0x74, 0x2e, 0x65, 0x29, 0x7b, - 0x74, 0x2e, 0x78, 0x3d, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x74, 0x3b, 0x69, - 0x66, 0x28, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x21, 0x3d, 0x3d, 0x74, - 0x68, 0x69, 0x73, 0x2e, 0x74, 0x29, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x74, - 0x2e, 0x65, 0x3d, 0x74, 0x3b, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x74, 0x3d, - 0x74, 0x7d, 0x7d, 0x3b, 0x68, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x74, - 0x79, 0x70, 0x65, 0x2e, 0x55, 0x3d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, - 0x6f, 0x6e, 0x28, 0x74, 0x29, 0x7b, 0x69, 0x66, 0x28, 0x76, 0x6f, 0x69, - 0x64, 0x20, 0x30, 0x21, 0x3d, 0x3d, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x74, - 0x29, 0x7b, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x6e, 0x3d, 0x74, 0x2e, - 0x65, 0x2c, 0x65, 0x3d, 0x74, 0x2e, 0x78, 0x3b, 0x69, 0x66, 0x28, 0x76, - 0x6f, 0x69, 0x64, 0x20, 0x30, 0x21, 0x3d, 0x3d, 0x6e, 0x29, 0x7b, 0x6e, - 0x2e, 0x78, 0x3d, 0x65, 0x3b, 0x74, 0x2e, 0x65, 0x3d, 0x76, 0x6f, 0x69, - 0x64, 0x20, 0x30, 0x7d, 0x69, 0x66, 0x28, 0x76, 0x6f, 0x69, 0x64, 0x20, - 0x30, 0x21, 0x3d, 0x3d, 0x65, 0x29, 0x7b, 0x65, 0x2e, 0x65, 0x3d, 0x6e, - 0x3b, 0x74, 0x2e, 0x78, 0x3d, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x7d, - 0x69, 0x66, 0x28, 0x74, 0x3d, 0x3d, 0x3d, 0x74, 0x68, 0x69, 0x73, 0x2e, - 0x74, 0x29, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x74, 0x3d, 0x65, 0x7d, 0x7d, - 0x3b, 0x68, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x74, 0x79, 0x70, 0x65, - 0x2e, 0x73, 0x75, 0x62, 0x73, 0x63, 0x72, 0x69, 0x62, 0x65, 0x3d, 0x66, - 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x74, 0x29, 0x7b, 0x63, - 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x6e, 0x3d, 0x74, 0x68, 0x69, 0x73, 0x3b, - 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x77, 0x28, 0x28, 0x66, 0x75, - 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x29, 0x7b, 0x63, 0x6f, 0x6e, - 0x73, 0x74, 0x20, 0x65, 0x3d, 0x6e, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, - 0x2c, 0x5f, 0x3d, 0x33, 0x32, 0x26, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x66, - 0x3b, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x66, 0x26, 0x3d, 0x2d, 0x33, 0x33, - 0x3b, 0x74, 0x72, 0x79, 0x7b, 0x74, 0x28, 0x65, 0x29, 0x7d, 0x66, 0x69, - 0x6e, 0x61, 0x6c, 0x6c, 0x79, 0x7b, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x66, - 0x7c, 0x3d, 0x5f, 0x7d, 0x7d, 0x29, 0x29, 0x7d, 0x3b, 0x68, 0x2e, 0x70, - 0x72, 0x6f, 0x74, 0x6f, 0x74, 0x79, 0x70, 0x65, 0x2e, 0x76, 0x61, 0x6c, - 0x75, 0x65, 0x4f, 0x66, 0x3d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, - 0x6e, 0x28, 0x29, 0x7b, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x74, - 0x68, 0x69, 0x73, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x7d, 0x3b, 0x68, - 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x74, 0x79, 0x70, 0x65, 0x2e, 0x74, - 0x6f, 0x53, 0x74, 0x72, 0x69, 0x6e, 0x67, 0x3d, 0x66, 0x75, 0x6e, 0x63, - 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x29, 0x7b, 0x72, 0x65, 0x74, 0x75, 0x72, - 0x6e, 0x20, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, - 0x2b, 0x22, 0x22, 0x7d, 0x3b, 0x68, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, - 0x74, 0x79, 0x70, 0x65, 0x2e, 0x74, 0x6f, 0x4a, 0x53, 0x4f, 0x4e, 0x3d, - 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x29, 0x7b, 0x72, - 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x76, - 0x61, 0x6c, 0x75, 0x65, 0x7d, 0x3b, 0x68, 0x2e, 0x70, 0x72, 0x6f, 0x74, - 0x6f, 0x74, 0x79, 0x70, 0x65, 0x2e, 0x70, 0x65, 0x65, 0x6b, 0x3d, 0x66, - 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x29, 0x7b, 0x72, 0x65, - 0x74, 0x75, 0x72, 0x6e, 0x20, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x76, 0x7d, - 0x3b, 0x4f, 0x62, 0x6a, 0x65, 0x63, 0x74, 0x2e, 0x64, 0x65, 0x66, 0x69, - 0x6e, 0x65, 0x50, 0x72, 0x6f, 0x70, 0x65, 0x72, 0x74, 0x79, 0x28, 0x68, - 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x74, 0x79, 0x70, 0x65, 0x2c, 0x22, - 0x76, 0x61, 0x6c, 0x75, 0x65, 0x22, 0x2c, 0x7b, 0x67, 0x65, 0x74, 0x28, - 0x29, 0x7b, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x74, 0x3d, 0x63, 0x28, - 0x74, 0x68, 0x69, 0x73, 0x29, 0x3b, 0x69, 0x66, 0x28, 0x76, 0x6f, 0x69, - 0x64, 0x20, 0x30, 0x21, 0x3d, 0x3d, 0x74, 0x29, 0x74, 0x2e, 0x69, 0x3d, - 0x74, 0x68, 0x69, 0x73, 0x2e, 0x69, 0x3b, 0x72, 0x65, 0x74, 0x75, 0x72, - 0x6e, 0x20, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x76, 0x7d, 0x2c, 0x73, 0x65, - 0x74, 0x28, 0x6e, 0x29, 0x7b, 0x69, 0x66, 0x28, 0x69, 0x20, 0x69, 0x6e, - 0x73, 0x74, 0x61, 0x6e, 0x63, 0x65, 0x6f, 0x66, 0x20, 0x79, 0x29, 0x21, - 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x29, 0x7b, 0x74, - 0x68, 0x72, 0x6f, 0x77, 0x20, 0x6e, 0x65, 0x77, 0x20, 0x45, 0x72, 0x72, - 0x6f, 0x72, 0x28, 0x22, 0x43, 0x6f, 0x6d, 0x70, 0x75, 0x74, 0x65, 0x64, - 0x20, 0x63, 0x61, 0x6e, 0x6e, 0x6f, 0x74, 0x20, 0x68, 0x61, 0x76, 0x65, - 0x20, 0x73, 0x69, 0x64, 0x65, 0x2d, 0x65, 0x66, 0x66, 0x65, 0x63, 0x74, - 0x73, 0x22, 0x29, 0x7d, 0x28, 0x29, 0x3b, 0x69, 0x66, 0x28, 0x6e, 0x21, - 0x3d, 0x3d, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x76, 0x29, 0x7b, 0x69, 0x66, - 0x28, 0x73, 0x3e, 0x31, 0x30, 0x30, 0x29, 0x74, 0x28, 0x29, 0x3b, 0x74, - 0x68, 0x69, 0x73, 0x2e, 0x76, 0x3d, 0x6e, 0x3b, 0x74, 0x68, 0x69, 0x73, - 0x2e, 0x69, 0x2b, 0x2b, 0x3b, 0x6c, 0x2b, 0x2b, 0x3b, 0x66, 0x2b, 0x2b, - 0x3b, 0x74, 0x72, 0x79, 0x7b, 0x66, 0x6f, 0x72, 0x28, 0x6c, 0x65, 0x74, - 0x20, 0x74, 0x3d, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x74, 0x3b, 0x76, 0x6f, - 0x69, 0x64, 0x20, 0x30, 0x21, 0x3d, 0x3d, 0x74, 0x3b, 0x74, 0x3d, 0x74, - 0x2e, 0x78, 0x29, 0x74, 0x2e, 0x74, 0x2e, 0x4e, 0x28, 0x29, 0x7d, 0x66, - 0x69, 0x6e, 0x61, 0x6c, 0x6c, 0x79, 0x7b, 0x65, 0x28, 0x29, 0x7d, 0x7d, - 0x7d, 0x7d, 0x29, 0x3b, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, - 0x20, 0x61, 0x28, 0x74, 0x29, 0x7b, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, - 0x20, 0x6e, 0x65, 0x77, 0x20, 0x68, 0x28, 0x74, 0x29, 0x7d, 0x66, 0x75, - 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x70, 0x28, 0x74, 0x29, 0x7b, - 0x66, 0x6f, 0x72, 0x28, 0x6c, 0x65, 0x74, 0x20, 0x6e, 0x3d, 0x74, 0x2e, - 0x73, 0x3b, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x21, 0x3d, 0x3d, 0x6e, - 0x3b, 0x6e, 0x3d, 0x6e, 0x2e, 0x6e, 0x29, 0x69, 0x66, 0x28, 0x6e, 0x2e, - 0x53, 0x2e, 0x69, 0x21, 0x3d, 0x3d, 0x6e, 0x2e, 0x69, 0x7c, 0x7c, 0x21, - 0x6e, 0x2e, 0x53, 0x2e, 0x68, 0x28, 0x29, 0x7c, 0x7c, 0x6e, 0x2e, 0x53, - 0x2e, 0x69, 0x21, 0x3d, 0x3d, 0x6e, 0x2e, 0x69, 0x29, 0x72, 0x65, 0x74, - 0x75, 0x72, 0x6e, 0x21, 0x30, 0x3b, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, - 0x21, 0x31, 0x7d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, - 0x64, 0x28, 0x74, 0x29, 0x7b, 0x66, 0x6f, 0x72, 0x28, 0x6c, 0x65, 0x74, - 0x20, 0x6e, 0x3d, 0x74, 0x2e, 0x73, 0x3b, 0x76, 0x6f, 0x69, 0x64, 0x20, - 0x30, 0x21, 0x3d, 0x3d, 0x6e, 0x3b, 0x6e, 0x3d, 0x6e, 0x2e, 0x6e, 0x29, - 0x7b, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x65, 0x3d, 0x6e, 0x2e, 0x53, - 0x2e, 0x6e, 0x3b, 0x69, 0x66, 0x28, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, - 0x21, 0x3d, 0x3d, 0x65, 0x29, 0x6e, 0x2e, 0x72, 0x3d, 0x65, 0x3b, 0x6e, - 0x2e, 0x53, 0x2e, 0x6e, 0x3d, 0x6e, 0x3b, 0x6e, 0x2e, 0x69, 0x3d, 0x2d, - 0x31, 0x3b, 0x69, 0x66, 0x28, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x3d, - 0x3d, 0x3d, 0x6e, 0x2e, 0x6e, 0x29, 0x7b, 0x74, 0x2e, 0x73, 0x3d, 0x6e, - 0x3b, 0x62, 0x72, 0x65, 0x61, 0x6b, 0x7d, 0x7d, 0x7d, 0x66, 0x75, 0x6e, - 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x76, 0x28, 0x74, 0x29, 0x7b, 0x6c, - 0x65, 0x74, 0x20, 0x6e, 0x2c, 0x65, 0x3d, 0x74, 0x2e, 0x73, 0x3b, 0x77, - 0x68, 0x69, 0x6c, 0x65, 0x28, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x21, - 0x3d, 0x3d, 0x65, 0x29, 0x7b, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x74, - 0x3d, 0x65, 0x2e, 0x70, 0x3b, 0x69, 0x66, 0x28, 0x2d, 0x31, 0x3d, 0x3d, - 0x3d, 0x65, 0x2e, 0x69, 0x29, 0x7b, 0x65, 0x2e, 0x53, 0x2e, 0x55, 0x28, - 0x65, 0x29, 0x3b, 0x69, 0x66, 0x28, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, - 0x21, 0x3d, 0x3d, 0x74, 0x29, 0x74, 0x2e, 0x6e, 0x3d, 0x65, 0x2e, 0x6e, - 0x3b, 0x69, 0x66, 0x28, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x21, 0x3d, - 0x3d, 0x65, 0x2e, 0x6e, 0x29, 0x65, 0x2e, 0x6e, 0x2e, 0x70, 0x3d, 0x74, - 0x7d, 0x65, 0x6c, 0x73, 0x65, 0x20, 0x6e, 0x3d, 0x65, 0x3b, 0x65, 0x2e, - 0x53, 0x2e, 0x6e, 0x3d, 0x65, 0x2e, 0x72, 0x3b, 0x69, 0x66, 0x28, 0x76, - 0x6f, 0x69, 0x64, 0x20, 0x30, 0x21, 0x3d, 0x3d, 0x65, 0x2e, 0x72, 0x29, - 0x65, 0x2e, 0x72, 0x3d, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x3b, 0x65, - 0x3d, 0x74, 0x7d, 0x74, 0x2e, 0x73, 0x3d, 0x6e, 0x7d, 0x66, 0x75, 0x6e, - 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x79, 0x28, 0x74, 0x29, 0x7b, 0x68, - 0x2e, 0x63, 0x61, 0x6c, 0x6c, 0x28, 0x74, 0x68, 0x69, 0x73, 0x2c, 0x76, - 0x6f, 0x69, 0x64, 0x20, 0x30, 0x29, 0x3b, 0x74, 0x68, 0x69, 0x73, 0x2e, - 0x78, 0x3d, 0x74, 0x3b, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x73, 0x3d, 0x76, - 0x6f, 0x69, 0x64, 0x20, 0x30, 0x3b, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x67, - 0x3d, 0x6c, 0x2d, 0x31, 0x3b, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x66, 0x3d, - 0x34, 0x7d, 0x28, 0x79, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x74, 0x79, - 0x70, 0x65, 0x3d, 0x6e, 0x65, 0x77, 0x20, 0x68, 0x29, 0x2e, 0x68, 0x3d, - 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x29, 0x7b, 0x74, - 0x68, 0x69, 0x73, 0x2e, 0x66, 0x26, 0x3d, 0x2d, 0x33, 0x3b, 0x69, 0x66, - 0x28, 0x31, 0x26, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x66, 0x29, 0x72, 0x65, - 0x74, 0x75, 0x72, 0x6e, 0x21, 0x31, 0x3b, 0x69, 0x66, 0x28, 0x33, 0x32, - 0x3d, 0x3d, 0x28, 0x33, 0x36, 0x26, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x66, - 0x29, 0x29, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x21, 0x30, 0x3b, 0x74, - 0x68, 0x69, 0x73, 0x2e, 0x66, 0x26, 0x3d, 0x2d, 0x35, 0x3b, 0x69, 0x66, - 0x28, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x67, 0x3d, 0x3d, 0x3d, 0x6c, 0x29, - 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x21, 0x30, 0x3b, 0x74, 0x68, 0x69, - 0x73, 0x2e, 0x67, 0x3d, 0x6c, 0x3b, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x66, - 0x7c, 0x3d, 0x31, 0x3b, 0x69, 0x66, 0x28, 0x74, 0x68, 0x69, 0x73, 0x2e, - 0x69, 0x3e, 0x30, 0x26, 0x26, 0x21, 0x70, 0x28, 0x74, 0x68, 0x69, 0x73, - 0x29, 0x29, 0x7b, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x66, 0x26, 0x3d, 0x2d, - 0x32, 0x3b, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x21, 0x30, 0x7d, 0x63, - 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x74, 0x3d, 0x69, 0x3b, 0x74, 0x72, 0x79, - 0x7b, 0x64, 0x28, 0x74, 0x68, 0x69, 0x73, 0x29, 0x3b, 0x69, 0x3d, 0x74, - 0x68, 0x69, 0x73, 0x3b, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x74, 0x3d, - 0x74, 0x68, 0x69, 0x73, 0x2e, 0x78, 0x28, 0x29, 0x3b, 0x69, 0x66, 0x28, - 0x31, 0x36, 0x26, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x66, 0x7c, 0x7c, 0x74, - 0x68, 0x69, 0x73, 0x2e, 0x76, 0x21, 0x3d, 0x3d, 0x74, 0x7c, 0x7c, 0x30, - 0x3d, 0x3d, 0x3d, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x69, 0x29, 0x7b, 0x74, - 0x68, 0x69, 0x73, 0x2e, 0x76, 0x3d, 0x74, 0x3b, 0x74, 0x68, 0x69, 0x73, - 0x2e, 0x66, 0x26, 0x3d, 0x2d, 0x31, 0x37, 0x3b, 0x74, 0x68, 0x69, 0x73, - 0x2e, 0x69, 0x2b, 0x2b, 0x7d, 0x7d, 0x63, 0x61, 0x74, 0x63, 0x68, 0x28, - 0x74, 0x29, 0x7b, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x76, 0x3d, 0x74, 0x3b, - 0x74, 0x68, 0x69, 0x73, 0x2e, 0x66, 0x7c, 0x3d, 0x31, 0x36, 0x3b, 0x74, - 0x68, 0x69, 0x73, 0x2e, 0x69, 0x2b, 0x2b, 0x7d, 0x69, 0x3d, 0x74, 0x3b, - 0x76, 0x28, 0x74, 0x68, 0x69, 0x73, 0x29, 0x3b, 0x74, 0x68, 0x69, 0x73, - 0x2e, 0x66, 0x26, 0x3d, 0x2d, 0x32, 0x3b, 0x72, 0x65, 0x74, 0x75, 0x72, - 0x6e, 0x21, 0x30, 0x7d, 0x3b, 0x79, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, - 0x74, 0x79, 0x70, 0x65, 0x2e, 0x53, 0x3d, 0x66, 0x75, 0x6e, 0x63, 0x74, - 0x69, 0x6f, 0x6e, 0x28, 0x74, 0x29, 0x7b, 0x69, 0x66, 0x28, 0x76, 0x6f, - 0x69, 0x64, 0x20, 0x30, 0x3d, 0x3d, 0x3d, 0x74, 0x68, 0x69, 0x73, 0x2e, - 0x74, 0x29, 0x7b, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x66, 0x7c, 0x3d, 0x33, - 0x36, 0x3b, 0x66, 0x6f, 0x72, 0x28, 0x6c, 0x65, 0x74, 0x20, 0x74, 0x3d, - 0x74, 0x68, 0x69, 0x73, 0x2e, 0x73, 0x3b, 0x76, 0x6f, 0x69, 0x64, 0x20, - 0x30, 0x21, 0x3d, 0x3d, 0x74, 0x3b, 0x74, 0x3d, 0x74, 0x2e, 0x6e, 0x29, - 0x74, 0x2e, 0x53, 0x2e, 0x53, 0x28, 0x74, 0x29, 0x7d, 0x68, 0x2e, 0x70, - 0x72, 0x6f, 0x74, 0x6f, 0x74, 0x79, 0x70, 0x65, 0x2e, 0x53, 0x2e, 0x63, - 0x61, 0x6c, 0x6c, 0x28, 0x74, 0x68, 0x69, 0x73, 0x2c, 0x74, 0x29, 0x7d, - 0x3b, 0x79, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x74, 0x79, 0x70, 0x65, - 0x2e, 0x55, 0x3d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, - 0x74, 0x29, 0x7b, 0x69, 0x66, 0x28, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, - 0x21, 0x3d, 0x3d, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x74, 0x29, 0x7b, 0x68, - 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x74, 0x79, 0x70, 0x65, 0x2e, 0x55, - 0x2e, 0x63, 0x61, 0x6c, 0x6c, 0x28, 0x74, 0x68, 0x69, 0x73, 0x2c, 0x74, - 0x29, 0x3b, 0x69, 0x66, 0x28, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x3d, - 0x3d, 0x3d, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x74, 0x29, 0x7b, 0x74, 0x68, - 0x69, 0x73, 0x2e, 0x66, 0x26, 0x3d, 0x2d, 0x33, 0x33, 0x3b, 0x66, 0x6f, - 0x72, 0x28, 0x6c, 0x65, 0x74, 0x20, 0x74, 0x3d, 0x74, 0x68, 0x69, 0x73, - 0x2e, 0x73, 0x3b, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x21, 0x3d, 0x3d, - 0x74, 0x3b, 0x74, 0x3d, 0x74, 0x2e, 0x6e, 0x29, 0x74, 0x2e, 0x53, 0x2e, - 0x55, 0x28, 0x74, 0x29, 0x7d, 0x7d, 0x7d, 0x3b, 0x79, 0x2e, 0x70, 0x72, - 0x6f, 0x74, 0x6f, 0x74, 0x79, 0x70, 0x65, 0x2e, 0x4e, 0x3d, 0x66, 0x75, - 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x29, 0x7b, 0x69, 0x66, 0x28, - 0x21, 0x28, 0x32, 0x26, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x66, 0x29, 0x29, - 0x7b, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x66, 0x7c, 0x3d, 0x36, 0x3b, 0x66, - 0x6f, 0x72, 0x28, 0x6c, 0x65, 0x74, 0x20, 0x74, 0x3d, 0x74, 0x68, 0x69, - 0x73, 0x2e, 0x74, 0x3b, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x21, 0x3d, - 0x3d, 0x74, 0x3b, 0x74, 0x3d, 0x74, 0x2e, 0x78, 0x29, 0x74, 0x2e, 0x74, - 0x2e, 0x4e, 0x28, 0x29, 0x7d, 0x7d, 0x3b, 0x79, 0x2e, 0x70, 0x72, 0x6f, - 0x74, 0x6f, 0x74, 0x79, 0x70, 0x65, 0x2e, 0x70, 0x65, 0x65, 0x6b, 0x3d, - 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x29, 0x7b, 0x69, - 0x66, 0x28, 0x21, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x68, 0x28, 0x29, 0x29, - 0x74, 0x28, 0x29, 0x3b, 0x69, 0x66, 0x28, 0x31, 0x36, 0x26, 0x74, 0x68, - 0x69, 0x73, 0x2e, 0x66, 0x29, 0x74, 0x68, 0x72, 0x6f, 0x77, 0x20, 0x74, - 0x68, 0x69, 0x73, 0x2e, 0x76, 0x3b, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, - 0x20, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x76, 0x7d, 0x3b, 0x4f, 0x62, 0x6a, - 0x65, 0x63, 0x74, 0x2e, 0x64, 0x65, 0x66, 0x69, 0x6e, 0x65, 0x50, 0x72, - 0x6f, 0x70, 0x65, 0x72, 0x74, 0x79, 0x28, 0x79, 0x2e, 0x70, 0x72, 0x6f, - 0x74, 0x6f, 0x74, 0x79, 0x70, 0x65, 0x2c, 0x22, 0x76, 0x61, 0x6c, 0x75, - 0x65, 0x22, 0x2c, 0x7b, 0x67, 0x65, 0x74, 0x28, 0x29, 0x7b, 0x69, 0x66, - 0x28, 0x31, 0x26, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x66, 0x29, 0x74, 0x28, - 0x29, 0x3b, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x6e, 0x3d, 0x63, 0x28, - 0x74, 0x68, 0x69, 0x73, 0x29, 0x3b, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x68, - 0x28, 0x29, 0x3b, 0x69, 0x66, 0x28, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, - 0x21, 0x3d, 0x3d, 0x6e, 0x29, 0x6e, 0x2e, 0x69, 0x3d, 0x74, 0x68, 0x69, - 0x73, 0x2e, 0x69, 0x3b, 0x69, 0x66, 0x28, 0x31, 0x36, 0x26, 0x74, 0x68, - 0x69, 0x73, 0x2e, 0x66, 0x29, 0x74, 0x68, 0x72, 0x6f, 0x77, 0x20, 0x74, - 0x68, 0x69, 0x73, 0x2e, 0x76, 0x3b, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, - 0x20, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x76, 0x7d, 0x7d, 0x29, 0x3b, 0x66, - 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x6d, 0x28, 0x74, 0x29, - 0x7b, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x6e, 0x65, 0x77, 0x20, - 0x79, 0x28, 0x74, 0x29, 0x7d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, - 0x6e, 0x20, 0x67, 0x28, 0x74, 0x29, 0x7b, 0x63, 0x6f, 0x6e, 0x73, 0x74, - 0x20, 0x6e, 0x3d, 0x74, 0x2e, 0x75, 0x3b, 0x74, 0x2e, 0x75, 0x3d, 0x76, - 0x6f, 0x69, 0x64, 0x20, 0x30, 0x3b, 0x69, 0x66, 0x28, 0x22, 0x66, 0x75, - 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x22, 0x3d, 0x3d, 0x74, 0x79, 0x70, - 0x65, 0x6f, 0x66, 0x20, 0x6e, 0x29, 0x7b, 0x66, 0x2b, 0x2b, 0x3b, 0x63, - 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x5f, 0x3d, 0x69, 0x3b, 0x69, 0x3d, 0x76, - 0x6f, 0x69, 0x64, 0x20, 0x30, 0x3b, 0x74, 0x72, 0x79, 0x7b, 0x6e, 0x28, - 0x29, 0x7d, 0x63, 0x61, 0x74, 0x63, 0x68, 0x28, 0x6e, 0x29, 0x7b, 0x74, - 0x2e, 0x66, 0x26, 0x3d, 0x2d, 0x32, 0x3b, 0x74, 0x2e, 0x66, 0x7c, 0x3d, - 0x38, 0x3b, 0x62, 0x28, 0x74, 0x29, 0x3b, 0x74, 0x68, 0x72, 0x6f, 0x77, - 0x20, 0x6e, 0x7d, 0x66, 0x69, 0x6e, 0x61, 0x6c, 0x6c, 0x79, 0x7b, 0x69, - 0x3d, 0x5f, 0x3b, 0x65, 0x28, 0x29, 0x7d, 0x7d, 0x7d, 0x66, 0x75, 0x6e, - 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x62, 0x28, 0x74, 0x29, 0x7b, 0x66, - 0x6f, 0x72, 0x28, 0x6c, 0x65, 0x74, 0x20, 0x6e, 0x3d, 0x74, 0x2e, 0x73, - 0x3b, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x21, 0x3d, 0x3d, 0x6e, 0x3b, - 0x6e, 0x3d, 0x6e, 0x2e, 0x6e, 0x29, 0x6e, 0x2e, 0x53, 0x2e, 0x55, 0x28, - 0x6e, 0x29, 0x3b, 0x74, 0x2e, 0x78, 0x3d, 0x76, 0x6f, 0x69, 0x64, 0x20, - 0x30, 0x3b, 0x74, 0x2e, 0x73, 0x3d, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, - 0x3b, 0x67, 0x28, 0x74, 0x29, 0x7d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, - 0x6f, 0x6e, 0x20, 0x6b, 0x28, 0x74, 0x29, 0x7b, 0x69, 0x66, 0x28, 0x69, - 0x21, 0x3d, 0x3d, 0x74, 0x68, 0x69, 0x73, 0x29, 0x74, 0x68, 0x72, 0x6f, - 0x77, 0x20, 0x6e, 0x65, 0x77, 0x20, 0x45, 0x72, 0x72, 0x6f, 0x72, 0x28, - 0x22, 0x4f, 0x75, 0x74, 0x2d, 0x6f, 0x66, 0x2d, 0x6f, 0x72, 0x64, 0x65, - 0x72, 0x20, 0x65, 0x66, 0x66, 0x65, 0x63, 0x74, 0x22, 0x29, 0x3b, 0x76, - 0x28, 0x74, 0x68, 0x69, 0x73, 0x29, 0x3b, 0x69, 0x3d, 0x74, 0x3b, 0x74, - 0x68, 0x69, 0x73, 0x2e, 0x66, 0x26, 0x3d, 0x2d, 0x32, 0x3b, 0x69, 0x66, - 0x28, 0x38, 0x26, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x66, 0x29, 0x62, 0x28, - 0x74, 0x68, 0x69, 0x73, 0x29, 0x3b, 0x65, 0x28, 0x29, 0x7d, 0x66, 0x75, - 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x53, 0x28, 0x74, 0x29, 0x7b, - 0x74, 0x68, 0x69, 0x73, 0x2e, 0x78, 0x3d, 0x74, 0x3b, 0x74, 0x68, 0x69, - 0x73, 0x2e, 0x75, 0x3d, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x3b, 0x74, - 0x68, 0x69, 0x73, 0x2e, 0x73, 0x3d, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, - 0x3b, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x6f, 0x3d, 0x76, 0x6f, 0x69, 0x64, - 0x20, 0x30, 0x3b, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x66, 0x3d, 0x33, 0x32, - 0x7d, 0x53, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x74, 0x79, 0x70, 0x65, - 0x2e, 0x63, 0x3d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, - 0x29, 0x7b, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x74, 0x3d, 0x74, 0x68, - 0x69, 0x73, 0x2e, 0x53, 0x28, 0x29, 0x3b, 0x74, 0x72, 0x79, 0x7b, 0x69, - 0x66, 0x28, 0x38, 0x26, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x66, 0x29, 0x72, - 0x65, 0x74, 0x75, 0x72, 0x6e, 0x3b, 0x69, 0x66, 0x28, 0x76, 0x6f, 0x69, - 0x64, 0x20, 0x30, 0x3d, 0x3d, 0x3d, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x78, - 0x29, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x3b, 0x63, 0x6f, 0x6e, 0x73, - 0x74, 0x20, 0x6e, 0x3d, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x78, 0x28, 0x29, - 0x3b, 0x69, 0x66, 0x28, 0x22, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, - 0x6e, 0x22, 0x3d, 0x3d, 0x74, 0x79, 0x70, 0x65, 0x6f, 0x66, 0x20, 0x6e, - 0x29, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x75, 0x3d, 0x6e, 0x7d, 0x66, 0x69, - 0x6e, 0x61, 0x6c, 0x6c, 0x79, 0x7b, 0x74, 0x28, 0x29, 0x7d, 0x7d, 0x3b, - 0x53, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x74, 0x79, 0x70, 0x65, 0x2e, - 0x53, 0x3d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x29, - 0x7b, 0x69, 0x66, 0x28, 0x31, 0x26, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x66, - 0x29, 0x74, 0x28, 0x29, 0x3b, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x66, 0x7c, - 0x3d, 0x31, 0x3b, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x66, 0x26, 0x3d, 0x2d, - 0x39, 0x3b, 0x67, 0x28, 0x74, 0x68, 0x69, 0x73, 0x29, 0x3b, 0x64, 0x28, - 0x74, 0x68, 0x69, 0x73, 0x29, 0x3b, 0x66, 0x2b, 0x2b, 0x3b, 0x63, 0x6f, - 0x6e, 0x73, 0x74, 0x20, 0x6e, 0x3d, 0x69, 0x3b, 0x69, 0x3d, 0x74, 0x68, - 0x69, 0x73, 0x3b, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x6b, 0x2e, - 0x62, 0x69, 0x6e, 0x64, 0x28, 0x74, 0x68, 0x69, 0x73, 0x2c, 0x6e, 0x29, - 0x7d, 0x3b, 0x53, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x74, 0x79, 0x70, - 0x65, 0x2e, 0x4e, 0x3d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, - 0x28, 0x29, 0x7b, 0x69, 0x66, 0x28, 0x21, 0x28, 0x32, 0x26, 0x74, 0x68, - 0x69, 0x73, 0x2e, 0x66, 0x29, 0x29, 0x7b, 0x74, 0x68, 0x69, 0x73, 0x2e, - 0x66, 0x7c, 0x3d, 0x32, 0x3b, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x6f, 0x3d, - 0x6f, 0x3b, 0x6f, 0x3d, 0x74, 0x68, 0x69, 0x73, 0x7d, 0x7d, 0x3b, 0x53, - 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x74, 0x79, 0x70, 0x65, 0x2e, 0x64, - 0x3d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x29, 0x7b, - 0x74, 0x68, 0x69, 0x73, 0x2e, 0x66, 0x7c, 0x3d, 0x38, 0x3b, 0x69, 0x66, - 0x28, 0x21, 0x28, 0x31, 0x26, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x66, 0x29, - 0x29, 0x62, 0x28, 0x74, 0x68, 0x69, 0x73, 0x29, 0x7d, 0x3b, 0x66, 0x75, - 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x77, 0x28, 0x74, 0x29, 0x7b, - 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x6e, 0x3d, 0x6e, 0x65, 0x77, 0x20, - 0x53, 0x28, 0x74, 0x29, 0x3b, 0x74, 0x72, 0x79, 0x7b, 0x6e, 0x2e, 0x63, - 0x28, 0x29, 0x7d, 0x63, 0x61, 0x74, 0x63, 0x68, 0x28, 0x74, 0x29, 0x7b, - 0x6e, 0x2e, 0x64, 0x28, 0x29, 0x3b, 0x74, 0x68, 0x72, 0x6f, 0x77, 0x20, - 0x74, 0x7d, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x6e, 0x2e, 0x64, - 0x2e, 0x62, 0x69, 0x6e, 0x64, 0x28, 0x6e, 0x29, 0x7d, 0x76, 0x61, 0x72, - 0x20, 0x78, 0x2c, 0x43, 0x2c, 0x45, 0x2c, 0x55, 0x2c, 0x48, 0x2c, 0x50, - 0x2c, 0x4e, 0x2c, 0x24, 0x2c, 0x44, 0x2c, 0x54, 0x3d, 0x7b, 0x7d, 0x2c, - 0x56, 0x3d, 0x5b, 0x5d, 0x2c, 0x41, 0x3d, 0x2f, 0x61, 0x63, 0x69, 0x74, - 0x7c, 0x65, 0x78, 0x28, 0x3f, 0x3a, 0x73, 0x7c, 0x67, 0x7c, 0x6e, 0x7c, - 0x70, 0x7c, 0x24, 0x29, 0x7c, 0x72, 0x70, 0x68, 0x7c, 0x67, 0x72, 0x69, - 0x64, 0x7c, 0x6f, 0x77, 0x73, 0x7c, 0x6d, 0x6e, 0x63, 0x7c, 0x6e, 0x74, - 0x77, 0x7c, 0x69, 0x6e, 0x65, 0x5b, 0x63, 0x68, 0x5d, 0x7c, 0x7a, 0x6f, - 0x6f, 0x7c, 0x5e, 0x6f, 0x72, 0x64, 0x7c, 0x69, 0x74, 0x65, 0x72, 0x61, - 0x2f, 0x69, 0x2c, 0x46, 0x3d, 0x41, 0x72, 0x72, 0x61, 0x79, 0x2e, 0x69, - 0x73, 0x41, 0x72, 0x72, 0x61, 0x79, 0x3b, 0x66, 0x75, 0x6e, 0x63, 0x74, - 0x69, 0x6f, 0x6e, 0x20, 0x4d, 0x28, 0x74, 0x2c, 0x6e, 0x29, 0x7b, 0x66, - 0x6f, 0x72, 0x28, 0x76, 0x61, 0x72, 0x20, 0x65, 0x20, 0x69, 0x6e, 0x20, - 0x6e, 0x29, 0x74, 0x5b, 0x65, 0x5d, 0x3d, 0x6e, 0x5b, 0x65, 0x5d, 0x3b, - 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x74, 0x7d, 0x66, 0x75, 0x6e, - 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x57, 0x28, 0x74, 0x29, 0x7b, 0x76, - 0x61, 0x72, 0x20, 0x6e, 0x3d, 0x74, 0x2e, 0x70, 0x61, 0x72, 0x65, 0x6e, - 0x74, 0x4e, 0x6f, 0x64, 0x65, 0x3b, 0x6e, 0x26, 0x26, 0x6e, 0x2e, 0x72, - 0x65, 0x6d, 0x6f, 0x76, 0x65, 0x43, 0x68, 0x69, 0x6c, 0x64, 0x28, 0x74, - 0x29, 0x7d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x4c, - 0x28, 0x74, 0x2c, 0x6e, 0x2c, 0x65, 0x29, 0x7b, 0x76, 0x61, 0x72, 0x20, - 0x5f, 0x2c, 0x69, 0x2c, 0x6f, 0x2c, 0x72, 0x3d, 0x7b, 0x7d, 0x3b, 0x66, - 0x6f, 0x72, 0x28, 0x6f, 0x20, 0x69, 0x6e, 0x20, 0x6e, 0x29, 0x22, 0x6b, - 0x65, 0x79, 0x22, 0x3d, 0x3d, 0x6f, 0x3f, 0x5f, 0x3d, 0x6e, 0x5b, 0x6f, - 0x5d, 0x3a, 0x22, 0x72, 0x65, 0x66, 0x22, 0x3d, 0x3d, 0x6f, 0x3f, 0x69, - 0x3d, 0x6e, 0x5b, 0x6f, 0x5d, 0x3a, 0x72, 0x5b, 0x6f, 0x5d, 0x3d, 0x6e, - 0x5b, 0x6f, 0x5d, 0x3b, 0x69, 0x66, 0x28, 0x61, 0x72, 0x67, 0x75, 0x6d, - 0x65, 0x6e, 0x74, 0x73, 0x2e, 0x6c, 0x65, 0x6e, 0x67, 0x74, 0x68, 0x3e, - 0x32, 0x26, 0x26, 0x28, 0x72, 0x2e, 0x63, 0x68, 0x69, 0x6c, 0x64, 0x72, - 0x65, 0x6e, 0x3d, 0x61, 0x72, 0x67, 0x75, 0x6d, 0x65, 0x6e, 0x74, 0x73, - 0x2e, 0x6c, 0x65, 0x6e, 0x67, 0x74, 0x68, 0x3e, 0x33, 0x3f, 0x78, 0x2e, - 0x63, 0x61, 0x6c, 0x6c, 0x28, 0x61, 0x72, 0x67, 0x75, 0x6d, 0x65, 0x6e, - 0x74, 0x73, 0x2c, 0x32, 0x29, 0x3a, 0x65, 0x29, 0x2c, 0x22, 0x66, 0x75, - 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x22, 0x3d, 0x3d, 0x74, 0x79, 0x70, - 0x65, 0x6f, 0x66, 0x20, 0x74, 0x26, 0x26, 0x6e, 0x75, 0x6c, 0x6c, 0x21, - 0x3d, 0x74, 0x2e, 0x64, 0x65, 0x66, 0x61, 0x75, 0x6c, 0x74, 0x50, 0x72, - 0x6f, 0x70, 0x73, 0x29, 0x66, 0x6f, 0x72, 0x28, 0x6f, 0x20, 0x69, 0x6e, - 0x20, 0x74, 0x2e, 0x64, 0x65, 0x66, 0x61, 0x75, 0x6c, 0x74, 0x50, 0x72, - 0x6f, 0x70, 0x73, 0x29, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x3d, 0x3d, - 0x3d, 0x72, 0x5b, 0x6f, 0x5d, 0x26, 0x26, 0x28, 0x72, 0x5b, 0x6f, 0x5d, - 0x3d, 0x74, 0x2e, 0x64, 0x65, 0x66, 0x61, 0x75, 0x6c, 0x74, 0x50, 0x72, - 0x6f, 0x70, 0x73, 0x5b, 0x6f, 0x5d, 0x29, 0x3b, 0x72, 0x65, 0x74, 0x75, - 0x72, 0x6e, 0x20, 0x4f, 0x28, 0x74, 0x2c, 0x72, 0x2c, 0x5f, 0x2c, 0x69, - 0x2c, 0x6e, 0x75, 0x6c, 0x6c, 0x29, 0x7d, 0x66, 0x75, 0x6e, 0x63, 0x74, - 0x69, 0x6f, 0x6e, 0x20, 0x4f, 0x28, 0x74, 0x2c, 0x6e, 0x2c, 0x65, 0x2c, - 0x5f, 0x2c, 0x69, 0x29, 0x7b, 0x76, 0x61, 0x72, 0x20, 0x6f, 0x3d, 0x7b, - 0x74, 0x79, 0x70, 0x65, 0x3a, 0x74, 0x2c, 0x70, 0x72, 0x6f, 0x70, 0x73, - 0x3a, 0x6e, 0x2c, 0x6b, 0x65, 0x79, 0x3a, 0x65, 0x2c, 0x72, 0x65, 0x66, - 0x3a, 0x5f, 0x2c, 0x5f, 0x5f, 0x6b, 0x3a, 0x6e, 0x75, 0x6c, 0x6c, 0x2c, - 0x5f, 0x5f, 0x3a, 0x6e, 0x75, 0x6c, 0x6c, 0x2c, 0x5f, 0x5f, 0x62, 0x3a, - 0x30, 0x2c, 0x5f, 0x5f, 0x65, 0x3a, 0x6e, 0x75, 0x6c, 0x6c, 0x2c, 0x5f, - 0x5f, 0x64, 0x3a, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x2c, 0x5f, 0x5f, - 0x63, 0x3a, 0x6e, 0x75, 0x6c, 0x6c, 0x2c, 0x63, 0x6f, 0x6e, 0x73, 0x74, - 0x72, 0x75, 0x63, 0x74, 0x6f, 0x72, 0x3a, 0x76, 0x6f, 0x69, 0x64, 0x20, - 0x30, 0x2c, 0x5f, 0x5f, 0x76, 0x3a, 0x6e, 0x75, 0x6c, 0x6c, 0x3d, 0x3d, - 0x69, 0x3f, 0x2b, 0x2b, 0x45, 0x3a, 0x69, 0x2c, 0x5f, 0x5f, 0x69, 0x3a, - 0x2d, 0x31, 0x2c, 0x5f, 0x5f, 0x75, 0x3a, 0x30, 0x7d, 0x3b, 0x72, 0x65, - 0x74, 0x75, 0x72, 0x6e, 0x20, 0x6e, 0x75, 0x6c, 0x6c, 0x3d, 0x3d, 0x69, - 0x26, 0x26, 0x6e, 0x75, 0x6c, 0x6c, 0x21, 0x3d, 0x43, 0x2e, 0x76, 0x6e, - 0x6f, 0x64, 0x65, 0x26, 0x26, 0x43, 0x2e, 0x76, 0x6e, 0x6f, 0x64, 0x65, - 0x28, 0x6f, 0x29, 0x2c, 0x6f, 0x7d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, - 0x6f, 0x6e, 0x20, 0x52, 0x28, 0x29, 0x7b, 0x72, 0x65, 0x74, 0x75, 0x72, - 0x6e, 0x7b, 0x63, 0x75, 0x72, 0x72, 0x65, 0x6e, 0x74, 0x3a, 0x6e, 0x75, - 0x6c, 0x6c, 0x7d, 0x7d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, - 0x20, 0x6a, 0x28, 0x74, 0x29, 0x7b, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, - 0x20, 0x74, 0x2e, 0x63, 0x68, 0x69, 0x6c, 0x64, 0x72, 0x65, 0x6e, 0x7d, - 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x49, 0x28, 0x74, - 0x2c, 0x6e, 0x29, 0x7b, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x70, 0x72, 0x6f, - 0x70, 0x73, 0x3d, 0x74, 0x2c, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x63, 0x6f, - 0x6e, 0x74, 0x65, 0x78, 0x74, 0x3d, 0x6e, 0x7d, 0x66, 0x75, 0x6e, 0x63, - 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x71, 0x28, 0x74, 0x2c, 0x6e, 0x29, 0x7b, - 0x69, 0x66, 0x28, 0x6e, 0x75, 0x6c, 0x6c, 0x3d, 0x3d, 0x6e, 0x29, 0x72, - 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x74, 0x2e, 0x5f, 0x5f, 0x3f, 0x71, - 0x28, 0x74, 0x2e, 0x5f, 0x5f, 0x2c, 0x74, 0x2e, 0x5f, 0x5f, 0x69, 0x2b, - 0x31, 0x29, 0x3a, 0x6e, 0x75, 0x6c, 0x6c, 0x3b, 0x66, 0x6f, 0x72, 0x28, - 0x76, 0x61, 0x72, 0x20, 0x65, 0x3b, 0x6e, 0x3c, 0x74, 0x2e, 0x5f, 0x5f, - 0x6b, 0x2e, 0x6c, 0x65, 0x6e, 0x67, 0x74, 0x68, 0x3b, 0x6e, 0x2b, 0x2b, - 0x29, 0x69, 0x66, 0x28, 0x6e, 0x75, 0x6c, 0x6c, 0x21, 0x3d, 0x28, 0x65, - 0x3d, 0x74, 0x2e, 0x5f, 0x5f, 0x6b, 0x5b, 0x6e, 0x5d, 0x29, 0x26, 0x26, - 0x6e, 0x75, 0x6c, 0x6c, 0x21, 0x3d, 0x65, 0x2e, 0x5f, 0x5f, 0x65, 0x29, - 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x65, 0x2e, 0x5f, 0x5f, 0x65, - 0x3b, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x22, 0x66, 0x75, 0x6e, 0x63, - 0x74, 0x69, 0x6f, 0x6e, 0x22, 0x3d, 0x3d, 0x74, 0x79, 0x70, 0x65, 0x6f, - 0x66, 0x20, 0x74, 0x2e, 0x74, 0x79, 0x70, 0x65, 0x3f, 0x71, 0x28, 0x74, - 0x29, 0x3a, 0x6e, 0x75, 0x6c, 0x6c, 0x7d, 0x66, 0x75, 0x6e, 0x63, 0x74, - 0x69, 0x6f, 0x6e, 0x20, 0x42, 0x28, 0x74, 0x29, 0x7b, 0x76, 0x61, 0x72, - 0x20, 0x6e, 0x2c, 0x65, 0x3b, 0x69, 0x66, 0x28, 0x6e, 0x75, 0x6c, 0x6c, - 0x21, 0x3d, 0x28, 0x74, 0x3d, 0x74, 0x2e, 0x5f, 0x5f, 0x29, 0x26, 0x26, - 0x6e, 0x75, 0x6c, 0x6c, 0x21, 0x3d, 0x74, 0x2e, 0x5f, 0x5f, 0x63, 0x29, - 0x7b, 0x66, 0x6f, 0x72, 0x28, 0x74, 0x2e, 0x5f, 0x5f, 0x65, 0x3d, 0x74, - 0x2e, 0x5f, 0x5f, 0x63, 0x2e, 0x62, 0x61, 0x73, 0x65, 0x3d, 0x6e, 0x75, - 0x6c, 0x6c, 0x2c, 0x6e, 0x3d, 0x30, 0x3b, 0x6e, 0x3c, 0x74, 0x2e, 0x5f, - 0x5f, 0x6b, 0x2e, 0x6c, 0x65, 0x6e, 0x67, 0x74, 0x68, 0x3b, 0x6e, 0x2b, - 0x2b, 0x29, 0x69, 0x66, 0x28, 0x6e, 0x75, 0x6c, 0x6c, 0x21, 0x3d, 0x28, - 0x65, 0x3d, 0x74, 0x2e, 0x5f, 0x5f, 0x6b, 0x5b, 0x6e, 0x5d, 0x29, 0x26, - 0x26, 0x6e, 0x75, 0x6c, 0x6c, 0x21, 0x3d, 0x65, 0x2e, 0x5f, 0x5f, 0x65, - 0x29, 0x7b, 0x74, 0x2e, 0x5f, 0x5f, 0x65, 0x3d, 0x74, 0x2e, 0x5f, 0x5f, - 0x63, 0x2e, 0x62, 0x61, 0x73, 0x65, 0x3d, 0x65, 0x2e, 0x5f, 0x5f, 0x65, - 0x3b, 0x62, 0x72, 0x65, 0x61, 0x6b, 0x7d, 0x72, 0x65, 0x74, 0x75, 0x72, - 0x6e, 0x20, 0x42, 0x28, 0x74, 0x29, 0x7d, 0x7d, 0x66, 0x75, 0x6e, 0x63, - 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x47, 0x28, 0x74, 0x29, 0x7b, 0x28, 0x21, - 0x74, 0x2e, 0x5f, 0x5f, 0x64, 0x26, 0x26, 0x28, 0x74, 0x2e, 0x5f, 0x5f, - 0x64, 0x3d, 0x21, 0x30, 0x29, 0x26, 0x26, 0x48, 0x2e, 0x70, 0x75, 0x73, - 0x68, 0x28, 0x74, 0x29, 0x26, 0x26, 0x21, 0x7a, 0x2e, 0x5f, 0x5f, 0x72, - 0x2b, 0x2b, 0x7c, 0x7c, 0x50, 0x21, 0x3d, 0x3d, 0x43, 0x2e, 0x64, 0x65, - 0x62, 0x6f, 0x75, 0x6e, 0x63, 0x65, 0x52, 0x65, 0x6e, 0x64, 0x65, 0x72, - 0x69, 0x6e, 0x67, 0x29, 0x26, 0x26, 0x28, 0x28, 0x50, 0x3d, 0x43, 0x2e, - 0x64, 0x65, 0x62, 0x6f, 0x75, 0x6e, 0x63, 0x65, 0x52, 0x65, 0x6e, 0x64, - 0x65, 0x72, 0x69, 0x6e, 0x67, 0x29, 0x7c, 0x7c, 0x4e, 0x29, 0x28, 0x7a, - 0x29, 0x7d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x7a, - 0x28, 0x29, 0x7b, 0x76, 0x61, 0x72, 0x20, 0x74, 0x2c, 0x6e, 0x2c, 0x65, - 0x2c, 0x5f, 0x2c, 0x69, 0x2c, 0x6f, 0x2c, 0x72, 0x2c, 0x75, 0x2c, 0x66, - 0x3b, 0x66, 0x6f, 0x72, 0x28, 0x48, 0x2e, 0x73, 0x6f, 0x72, 0x74, 0x28, - 0x24, 0x29, 0x3b, 0x74, 0x3d, 0x48, 0x2e, 0x73, 0x68, 0x69, 0x66, 0x74, - 0x28, 0x29, 0x3b, 0x29, 0x74, 0x2e, 0x5f, 0x5f, 0x64, 0x26, 0x26, 0x28, - 0x6e, 0x3d, 0x48, 0x2e, 0x6c, 0x65, 0x6e, 0x67, 0x74, 0x68, 0x2c, 0x5f, - 0x3d, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x2c, 0x6f, 0x3d, 0x28, 0x69, - 0x3d, 0x28, 0x65, 0x3d, 0x74, 0x29, 0x2e, 0x5f, 0x5f, 0x76, 0x29, 0x2e, - 0x5f, 0x5f, 0x65, 0x2c, 0x75, 0x3d, 0x5b, 0x5d, 0x2c, 0x66, 0x3d, 0x5b, - 0x5d, 0x2c, 0x28, 0x72, 0x3d, 0x65, 0x2e, 0x5f, 0x5f, 0x50, 0x29, 0x26, - 0x26, 0x28, 0x28, 0x5f, 0x3d, 0x4d, 0x28, 0x7b, 0x7d, 0x2c, 0x69, 0x29, - 0x29, 0x2e, 0x5f, 0x5f, 0x76, 0x3d, 0x69, 0x2e, 0x5f, 0x5f, 0x76, 0x2b, - 0x31, 0x2c, 0x43, 0x2e, 0x76, 0x6e, 0x6f, 0x64, 0x65, 0x26, 0x26, 0x43, - 0x2e, 0x76, 0x6e, 0x6f, 0x64, 0x65, 0x28, 0x5f, 0x29, 0x2c, 0x5f, 0x74, - 0x28, 0x72, 0x2c, 0x5f, 0x2c, 0x69, 0x2c, 0x65, 0x2e, 0x5f, 0x5f, 0x6e, - 0x2c, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x21, 0x3d, 0x3d, 0x72, 0x2e, - 0x6f, 0x77, 0x6e, 0x65, 0x72, 0x53, 0x56, 0x47, 0x45, 0x6c, 0x65, 0x6d, - 0x65, 0x6e, 0x74, 0x2c, 0x33, 0x32, 0x26, 0x69, 0x2e, 0x5f, 0x5f, 0x75, - 0x3f, 0x5b, 0x6f, 0x5d, 0x3a, 0x6e, 0x75, 0x6c, 0x6c, 0x2c, 0x75, 0x2c, - 0x6e, 0x75, 0x6c, 0x6c, 0x3d, 0x3d, 0x6f, 0x3f, 0x71, 0x28, 0x69, 0x29, - 0x3a, 0x6f, 0x2c, 0x21, 0x21, 0x28, 0x33, 0x32, 0x26, 0x69, 0x2e, 0x5f, - 0x5f, 0x75, 0x29, 0x2c, 0x66, 0x29, 0x2c, 0x5f, 0x2e, 0x5f, 0x5f, 0x2e, - 0x5f, 0x5f, 0x6b, 0x5b, 0x5f, 0x2e, 0x5f, 0x5f, 0x69, 0x5d, 0x3d, 0x5f, - 0x2c, 0x69, 0x74, 0x28, 0x75, 0x2c, 0x5f, 0x2c, 0x66, 0x29, 0x2c, 0x5f, - 0x2e, 0x5f, 0x5f, 0x65, 0x21, 0x3d, 0x6f, 0x26, 0x26, 0x42, 0x28, 0x5f, - 0x29, 0x29, 0x2c, 0x48, 0x2e, 0x6c, 0x65, 0x6e, 0x67, 0x74, 0x68, 0x3e, - 0x6e, 0x26, 0x26, 0x48, 0x2e, 0x73, 0x6f, 0x72, 0x74, 0x28, 0x24, 0x29, - 0x29, 0x3b, 0x7a, 0x2e, 0x5f, 0x5f, 0x72, 0x3d, 0x30, 0x7d, 0x66, 0x75, - 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x4a, 0x28, 0x74, 0x2c, 0x6e, - 0x2c, 0x65, 0x2c, 0x5f, 0x2c, 0x69, 0x2c, 0x6f, 0x2c, 0x72, 0x2c, 0x75, - 0x2c, 0x66, 0x2c, 0x73, 0x2c, 0x6c, 0x29, 0x7b, 0x76, 0x61, 0x72, 0x20, - 0x63, 0x2c, 0x68, 0x2c, 0x61, 0x2c, 0x70, 0x2c, 0x64, 0x2c, 0x76, 0x3d, - 0x5f, 0x26, 0x26, 0x5f, 0x2e, 0x5f, 0x5f, 0x6b, 0x7c, 0x7c, 0x56, 0x2c, - 0x79, 0x3d, 0x6e, 0x2e, 0x6c, 0x65, 0x6e, 0x67, 0x74, 0x68, 0x3b, 0x66, - 0x6f, 0x72, 0x28, 0x65, 0x2e, 0x5f, 0x5f, 0x64, 0x3d, 0x66, 0x2c, 0x4b, - 0x28, 0x65, 0x2c, 0x6e, 0x2c, 0x76, 0x29, 0x2c, 0x66, 0x3d, 0x65, 0x2e, - 0x5f, 0x5f, 0x64, 0x2c, 0x63, 0x3d, 0x30, 0x3b, 0x63, 0x3c, 0x79, 0x3b, - 0x63, 0x2b, 0x2b, 0x29, 0x6e, 0x75, 0x6c, 0x6c, 0x21, 0x3d, 0x28, 0x61, - 0x3d, 0x65, 0x2e, 0x5f, 0x5f, 0x6b, 0x5b, 0x63, 0x5d, 0x29, 0x26, 0x26, - 0x22, 0x62, 0x6f, 0x6f, 0x6c, 0x65, 0x61, 0x6e, 0x22, 0x21, 0x3d, 0x74, - 0x79, 0x70, 0x65, 0x6f, 0x66, 0x20, 0x61, 0x26, 0x26, 0x22, 0x66, 0x75, - 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x22, 0x21, 0x3d, 0x74, 0x79, 0x70, - 0x65, 0x6f, 0x66, 0x20, 0x61, 0x26, 0x26, 0x28, 0x68, 0x3d, 0x2d, 0x31, - 0x3d, 0x3d, 0x3d, 0x61, 0x2e, 0x5f, 0x5f, 0x69, 0x3f, 0x54, 0x3a, 0x76, - 0x5b, 0x61, 0x2e, 0x5f, 0x5f, 0x69, 0x5d, 0x7c, 0x7c, 0x54, 0x2c, 0x61, - 0x2e, 0x5f, 0x5f, 0x69, 0x3d, 0x63, 0x2c, 0x5f, 0x74, 0x28, 0x74, 0x2c, - 0x61, 0x2c, 0x68, 0x2c, 0x69, 0x2c, 0x6f, 0x2c, 0x72, 0x2c, 0x75, 0x2c, - 0x66, 0x2c, 0x73, 0x2c, 0x6c, 0x29, 0x2c, 0x70, 0x3d, 0x61, 0x2e, 0x5f, - 0x5f, 0x65, 0x2c, 0x61, 0x2e, 0x72, 0x65, 0x66, 0x26, 0x26, 0x68, 0x2e, - 0x72, 0x65, 0x66, 0x21, 0x3d, 0x61, 0x2e, 0x72, 0x65, 0x66, 0x26, 0x26, - 0x28, 0x68, 0x2e, 0x72, 0x65, 0x66, 0x26, 0x26, 0x72, 0x74, 0x28, 0x68, - 0x2e, 0x72, 0x65, 0x66, 0x2c, 0x6e, 0x75, 0x6c, 0x6c, 0x2c, 0x61, 0x29, - 0x2c, 0x6c, 0x2e, 0x70, 0x75, 0x73, 0x68, 0x28, 0x61, 0x2e, 0x72, 0x65, - 0x66, 0x2c, 0x61, 0x2e, 0x5f, 0x5f, 0x63, 0x7c, 0x7c, 0x70, 0x2c, 0x61, - 0x29, 0x29, 0x2c, 0x6e, 0x75, 0x6c, 0x6c, 0x3d, 0x3d, 0x64, 0x26, 0x26, - 0x6e, 0x75, 0x6c, 0x6c, 0x21, 0x3d, 0x70, 0x26, 0x26, 0x28, 0x64, 0x3d, - 0x70, 0x29, 0x2c, 0x36, 0x35, 0x35, 0x33, 0x36, 0x26, 0x61, 0x2e, 0x5f, - 0x5f, 0x75, 0x7c, 0x7c, 0x68, 0x2e, 0x5f, 0x5f, 0x6b, 0x3d, 0x3d, 0x3d, - 0x61, 0x2e, 0x5f, 0x5f, 0x6b, 0x3f, 0x66, 0x3d, 0x51, 0x28, 0x61, 0x2c, - 0x66, 0x2c, 0x74, 0x29, 0x3a, 0x22, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, - 0x6f, 0x6e, 0x22, 0x3d, 0x3d, 0x74, 0x79, 0x70, 0x65, 0x6f, 0x66, 0x20, - 0x61, 0x2e, 0x74, 0x79, 0x70, 0x65, 0x26, 0x26, 0x76, 0x6f, 0x69, 0x64, - 0x20, 0x30, 0x21, 0x3d, 0x3d, 0x61, 0x2e, 0x5f, 0x5f, 0x64, 0x3f, 0x66, - 0x3d, 0x61, 0x2e, 0x5f, 0x5f, 0x64, 0x3a, 0x70, 0x26, 0x26, 0x28, 0x66, - 0x3d, 0x70, 0x2e, 0x6e, 0x65, 0x78, 0x74, 0x53, 0x69, 0x62, 0x6c, 0x69, - 0x6e, 0x67, 0x29, 0x2c, 0x61, 0x2e, 0x5f, 0x5f, 0x64, 0x3d, 0x76, 0x6f, - 0x69, 0x64, 0x20, 0x30, 0x2c, 0x61, 0x2e, 0x5f, 0x5f, 0x75, 0x26, 0x3d, - 0x2d, 0x31, 0x39, 0x36, 0x36, 0x30, 0x39, 0x29, 0x3b, 0x65, 0x2e, 0x5f, - 0x5f, 0x64, 0x3d, 0x66, 0x2c, 0x65, 0x2e, 0x5f, 0x5f, 0x65, 0x3d, 0x64, - 0x7d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x4b, 0x28, - 0x74, 0x2c, 0x6e, 0x2c, 0x65, 0x29, 0x7b, 0x76, 0x61, 0x72, 0x20, 0x5f, - 0x2c, 0x69, 0x2c, 0x6f, 0x2c, 0x72, 0x2c, 0x75, 0x2c, 0x66, 0x3d, 0x6e, - 0x2e, 0x6c, 0x65, 0x6e, 0x67, 0x74, 0x68, 0x2c, 0x73, 0x3d, 0x65, 0x2e, - 0x6c, 0x65, 0x6e, 0x67, 0x74, 0x68, 0x2c, 0x6c, 0x3d, 0x73, 0x2c, 0x63, - 0x3d, 0x30, 0x3b, 0x66, 0x6f, 0x72, 0x28, 0x74, 0x2e, 0x5f, 0x5f, 0x6b, - 0x3d, 0x5b, 0x5d, 0x2c, 0x5f, 0x3d, 0x30, 0x3b, 0x5f, 0x3c, 0x66, 0x3b, - 0x5f, 0x2b, 0x2b, 0x29, 0x6e, 0x75, 0x6c, 0x6c, 0x21, 0x3d, 0x28, 0x69, - 0x3d, 0x74, 0x2e, 0x5f, 0x5f, 0x6b, 0x5b, 0x5f, 0x5d, 0x3d, 0x6e, 0x75, - 0x6c, 0x6c, 0x3d, 0x3d, 0x28, 0x69, 0x3d, 0x6e, 0x5b, 0x5f, 0x5d, 0x29, - 0x7c, 0x7c, 0x22, 0x62, 0x6f, 0x6f, 0x6c, 0x65, 0x61, 0x6e, 0x22, 0x3d, - 0x3d, 0x74, 0x79, 0x70, 0x65, 0x6f, 0x66, 0x20, 0x69, 0x7c, 0x7c, 0x22, - 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x22, 0x3d, 0x3d, 0x74, - 0x79, 0x70, 0x65, 0x6f, 0x66, 0x20, 0x69, 0x3f, 0x6e, 0x75, 0x6c, 0x6c, - 0x3a, 0x22, 0x73, 0x74, 0x72, 0x69, 0x6e, 0x67, 0x22, 0x3d, 0x3d, 0x74, - 0x79, 0x70, 0x65, 0x6f, 0x66, 0x20, 0x69, 0x7c, 0x7c, 0x22, 0x6e, 0x75, - 0x6d, 0x62, 0x65, 0x72, 0x22, 0x3d, 0x3d, 0x74, 0x79, 0x70, 0x65, 0x6f, - 0x66, 0x20, 0x69, 0x7c, 0x7c, 0x22, 0x62, 0x69, 0x67, 0x69, 0x6e, 0x74, - 0x22, 0x3d, 0x3d, 0x74, 0x79, 0x70, 0x65, 0x6f, 0x66, 0x20, 0x69, 0x7c, - 0x7c, 0x69, 0x2e, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x72, 0x75, 0x63, 0x74, - 0x6f, 0x72, 0x3d, 0x3d, 0x53, 0x74, 0x72, 0x69, 0x6e, 0x67, 0x3f, 0x4f, - 0x28, 0x6e, 0x75, 0x6c, 0x6c, 0x2c, 0x69, 0x2c, 0x6e, 0x75, 0x6c, 0x6c, - 0x2c, 0x6e, 0x75, 0x6c, 0x6c, 0x2c, 0x69, 0x29, 0x3a, 0x46, 0x28, 0x69, - 0x29, 0x3f, 0x4f, 0x28, 0x6a, 0x2c, 0x7b, 0x63, 0x68, 0x69, 0x6c, 0x64, - 0x72, 0x65, 0x6e, 0x3a, 0x69, 0x7d, 0x2c, 0x6e, 0x75, 0x6c, 0x6c, 0x2c, - 0x6e, 0x75, 0x6c, 0x6c, 0x2c, 0x6e, 0x75, 0x6c, 0x6c, 0x29, 0x3a, 0x76, - 0x6f, 0x69, 0x64, 0x20, 0x30, 0x3d, 0x3d, 0x3d, 0x69, 0x2e, 0x63, 0x6f, - 0x6e, 0x73, 0x74, 0x72, 0x75, 0x63, 0x74, 0x6f, 0x72, 0x26, 0x26, 0x69, - 0x2e, 0x5f, 0x5f, 0x62, 0x3e, 0x30, 0x3f, 0x4f, 0x28, 0x69, 0x2e, 0x74, - 0x79, 0x70, 0x65, 0x2c, 0x69, 0x2e, 0x70, 0x72, 0x6f, 0x70, 0x73, 0x2c, - 0x69, 0x2e, 0x6b, 0x65, 0x79, 0x2c, 0x69, 0x2e, 0x72, 0x65, 0x66, 0x3f, - 0x69, 0x2e, 0x72, 0x65, 0x66, 0x3a, 0x6e, 0x75, 0x6c, 0x6c, 0x2c, 0x69, - 0x2e, 0x5f, 0x5f, 0x76, 0x29, 0x3a, 0x69, 0x29, 0x3f, 0x28, 0x69, 0x2e, - 0x5f, 0x5f, 0x3d, 0x74, 0x2c, 0x69, 0x2e, 0x5f, 0x5f, 0x62, 0x3d, 0x74, - 0x2e, 0x5f, 0x5f, 0x62, 0x2b, 0x31, 0x2c, 0x75, 0x3d, 0x59, 0x28, 0x69, - 0x2c, 0x65, 0x2c, 0x72, 0x3d, 0x5f, 0x2b, 0x63, 0x2c, 0x6c, 0x29, 0x2c, - 0x69, 0x2e, 0x5f, 0x5f, 0x69, 0x3d, 0x75, 0x2c, 0x6f, 0x3d, 0x6e, 0x75, - 0x6c, 0x6c, 0x2c, 0x2d, 0x31, 0x21, 0x3d, 0x3d, 0x75, 0x26, 0x26, 0x28, - 0x6c, 0x2d, 0x2d, 0x2c, 0x28, 0x6f, 0x3d, 0x65, 0x5b, 0x75, 0x5d, 0x29, - 0x26, 0x26, 0x28, 0x6f, 0x2e, 0x5f, 0x5f, 0x75, 0x7c, 0x3d, 0x31, 0x33, - 0x31, 0x30, 0x37, 0x32, 0x29, 0x29, 0x2c, 0x6e, 0x75, 0x6c, 0x6c, 0x3d, - 0x3d, 0x6f, 0x7c, 0x7c, 0x6e, 0x75, 0x6c, 0x6c, 0x3d, 0x3d, 0x3d, 0x6f, - 0x2e, 0x5f, 0x5f, 0x76, 0x3f, 0x28, 0x2d, 0x31, 0x3d, 0x3d, 0x75, 0x26, - 0x26, 0x63, 0x2d, 0x2d, 0x2c, 0x22, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, - 0x6f, 0x6e, 0x22, 0x21, 0x3d, 0x74, 0x79, 0x70, 0x65, 0x6f, 0x66, 0x20, - 0x69, 0x2e, 0x74, 0x79, 0x70, 0x65, 0x26, 0x26, 0x28, 0x69, 0x2e, 0x5f, - 0x5f, 0x75, 0x7c, 0x3d, 0x36, 0x35, 0x35, 0x33, 0x36, 0x29, 0x29, 0x3a, - 0x75, 0x21, 0x3d, 0x3d, 0x72, 0x26, 0x26, 0x28, 0x75, 0x3d, 0x3d, 0x3d, - 0x72, 0x2b, 0x31, 0x3f, 0x63, 0x2b, 0x2b, 0x3a, 0x75, 0x3e, 0x72, 0x3f, - 0x6c, 0x3e, 0x66, 0x2d, 0x72, 0x3f, 0x63, 0x2b, 0x3d, 0x75, 0x2d, 0x72, - 0x3a, 0x63, 0x2d, 0x2d, 0x3a, 0x63, 0x3d, 0x75, 0x3c, 0x72, 0x26, 0x26, - 0x75, 0x3d, 0x3d, 0x72, 0x2d, 0x31, 0x3f, 0x75, 0x2d, 0x72, 0x3a, 0x30, - 0x2c, 0x75, 0x21, 0x3d, 0x3d, 0x5f, 0x2b, 0x63, 0x26, 0x26, 0x28, 0x69, - 0x2e, 0x5f, 0x5f, 0x75, 0x7c, 0x3d, 0x36, 0x35, 0x35, 0x33, 0x36, 0x29, - 0x29, 0x29, 0x3a, 0x28, 0x6f, 0x3d, 0x65, 0x5b, 0x5f, 0x5d, 0x29, 0x26, - 0x26, 0x6e, 0x75, 0x6c, 0x6c, 0x3d, 0x3d, 0x6f, 0x2e, 0x6b, 0x65, 0x79, - 0x26, 0x26, 0x6f, 0x2e, 0x5f, 0x5f, 0x65, 0x26, 0x26, 0x28, 0x6f, 0x2e, - 0x5f, 0x5f, 0x65, 0x3d, 0x3d, 0x74, 0x2e, 0x5f, 0x5f, 0x64, 0x26, 0x26, - 0x28, 0x74, 0x2e, 0x5f, 0x5f, 0x64, 0x3d, 0x71, 0x28, 0x6f, 0x29, 0x29, - 0x2c, 0x75, 0x74, 0x28, 0x6f, 0x2c, 0x6f, 0x2c, 0x21, 0x31, 0x29, 0x2c, - 0x65, 0x5b, 0x5f, 0x5d, 0x3d, 0x6e, 0x75, 0x6c, 0x6c, 0x2c, 0x6c, 0x2d, - 0x2d, 0x29, 0x3b, 0x69, 0x66, 0x28, 0x6c, 0x29, 0x66, 0x6f, 0x72, 0x28, - 0x5f, 0x3d, 0x30, 0x3b, 0x5f, 0x3c, 0x73, 0x3b, 0x5f, 0x2b, 0x2b, 0x29, - 0x6e, 0x75, 0x6c, 0x6c, 0x21, 0x3d, 0x28, 0x6f, 0x3d, 0x65, 0x5b, 0x5f, - 0x5d, 0x29, 0x26, 0x26, 0x30, 0x3d, 0x3d, 0x28, 0x31, 0x33, 0x31, 0x30, - 0x37, 0x32, 0x26, 0x6f, 0x2e, 0x5f, 0x5f, 0x75, 0x29, 0x26, 0x26, 0x28, - 0x6f, 0x2e, 0x5f, 0x5f, 0x65, 0x3d, 0x3d, 0x74, 0x2e, 0x5f, 0x5f, 0x64, - 0x26, 0x26, 0x28, 0x74, 0x2e, 0x5f, 0x5f, 0x64, 0x3d, 0x71, 0x28, 0x6f, - 0x29, 0x29, 0x2c, 0x75, 0x74, 0x28, 0x6f, 0x2c, 0x6f, 0x29, 0x29, 0x7d, - 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x51, 0x28, 0x74, - 0x2c, 0x6e, 0x2c, 0x65, 0x29, 0x7b, 0x76, 0x61, 0x72, 0x20, 0x5f, 0x2c, - 0x69, 0x3b, 0x69, 0x66, 0x28, 0x22, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, - 0x6f, 0x6e, 0x22, 0x3d, 0x3d, 0x74, 0x79, 0x70, 0x65, 0x6f, 0x66, 0x20, - 0x74, 0x2e, 0x74, 0x79, 0x70, 0x65, 0x29, 0x7b, 0x66, 0x6f, 0x72, 0x28, - 0x5f, 0x3d, 0x74, 0x2e, 0x5f, 0x5f, 0x6b, 0x2c, 0x69, 0x3d, 0x30, 0x3b, - 0x5f, 0x26, 0x26, 0x69, 0x3c, 0x5f, 0x2e, 0x6c, 0x65, 0x6e, 0x67, 0x74, - 0x68, 0x3b, 0x69, 0x2b, 0x2b, 0x29, 0x5f, 0x5b, 0x69, 0x5d, 0x26, 0x26, - 0x28, 0x5f, 0x5b, 0x69, 0x5d, 0x2e, 0x5f, 0x5f, 0x3d, 0x74, 0x2c, 0x6e, - 0x3d, 0x51, 0x28, 0x5f, 0x5b, 0x69, 0x5d, 0x2c, 0x6e, 0x2c, 0x65, 0x29, - 0x29, 0x3b, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x6e, 0x7d, 0x72, - 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x74, 0x2e, 0x5f, 0x5f, 0x65, 0x21, - 0x3d, 0x6e, 0x26, 0x26, 0x28, 0x65, 0x2e, 0x69, 0x6e, 0x73, 0x65, 0x72, - 0x74, 0x42, 0x65, 0x66, 0x6f, 0x72, 0x65, 0x28, 0x74, 0x2e, 0x5f, 0x5f, - 0x65, 0x2c, 0x6e, 0x7c, 0x7c, 0x6e, 0x75, 0x6c, 0x6c, 0x29, 0x2c, 0x6e, - 0x3d, 0x74, 0x2e, 0x5f, 0x5f, 0x65, 0x29, 0x2c, 0x6e, 0x26, 0x26, 0x6e, - 0x2e, 0x6e, 0x65, 0x78, 0x74, 0x53, 0x69, 0x62, 0x6c, 0x69, 0x6e, 0x67, - 0x7d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x58, 0x28, - 0x74, 0x2c, 0x6e, 0x29, 0x7b, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, - 0x6e, 0x3d, 0x6e, 0x7c, 0x7c, 0x5b, 0x5d, 0x2c, 0x6e, 0x75, 0x6c, 0x6c, - 0x3d, 0x3d, 0x74, 0x7c, 0x7c, 0x22, 0x62, 0x6f, 0x6f, 0x6c, 0x65, 0x61, - 0x6e, 0x22, 0x3d, 0x3d, 0x74, 0x79, 0x70, 0x65, 0x6f, 0x66, 0x20, 0x74, - 0x7c, 0x7c, 0x28, 0x46, 0x28, 0x74, 0x29, 0x3f, 0x74, 0x2e, 0x73, 0x6f, - 0x6d, 0x65, 0x28, 0x28, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, - 0x28, 0x74, 0x29, 0x7b, 0x58, 0x28, 0x74, 0x2c, 0x6e, 0x29, 0x7d, 0x29, - 0x29, 0x3a, 0x6e, 0x2e, 0x70, 0x75, 0x73, 0x68, 0x28, 0x74, 0x29, 0x29, - 0x2c, 0x6e, 0x7d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, - 0x59, 0x28, 0x74, 0x2c, 0x6e, 0x2c, 0x65, 0x2c, 0x5f, 0x29, 0x7b, 0x76, - 0x61, 0x72, 0x20, 0x69, 0x3d, 0x74, 0x2e, 0x6b, 0x65, 0x79, 0x2c, 0x6f, - 0x3d, 0x74, 0x2e, 0x74, 0x79, 0x70, 0x65, 0x2c, 0x72, 0x3d, 0x65, 0x2d, - 0x31, 0x2c, 0x75, 0x3d, 0x65, 0x2b, 0x31, 0x2c, 0x66, 0x3d, 0x6e, 0x5b, - 0x65, 0x5d, 0x3b, 0x69, 0x66, 0x28, 0x6e, 0x75, 0x6c, 0x6c, 0x3d, 0x3d, - 0x3d, 0x66, 0x7c, 0x7c, 0x66, 0x26, 0x26, 0x69, 0x3d, 0x3d, 0x66, 0x2e, - 0x6b, 0x65, 0x79, 0x26, 0x26, 0x6f, 0x3d, 0x3d, 0x3d, 0x66, 0x2e, 0x74, - 0x79, 0x70, 0x65, 0x29, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x65, - 0x3b, 0x69, 0x66, 0x28, 0x5f, 0x3e, 0x28, 0x6e, 0x75, 0x6c, 0x6c, 0x21, - 0x3d, 0x66, 0x26, 0x26, 0x30, 0x3d, 0x3d, 0x28, 0x31, 0x33, 0x31, 0x30, - 0x37, 0x32, 0x26, 0x66, 0x2e, 0x5f, 0x5f, 0x75, 0x29, 0x3f, 0x31, 0x3a, - 0x30, 0x29, 0x29, 0x66, 0x6f, 0x72, 0x28, 0x3b, 0x72, 0x3e, 0x3d, 0x30, - 0x7c, 0x7c, 0x75, 0x3c, 0x6e, 0x2e, 0x6c, 0x65, 0x6e, 0x67, 0x74, 0x68, - 0x3b, 0x29, 0x7b, 0x69, 0x66, 0x28, 0x72, 0x3e, 0x3d, 0x30, 0x29, 0x7b, - 0x69, 0x66, 0x28, 0x28, 0x66, 0x3d, 0x6e, 0x5b, 0x72, 0x5d, 0x29, 0x26, - 0x26, 0x30, 0x3d, 0x3d, 0x28, 0x31, 0x33, 0x31, 0x30, 0x37, 0x32, 0x26, - 0x66, 0x2e, 0x5f, 0x5f, 0x75, 0x29, 0x26, 0x26, 0x69, 0x3d, 0x3d, 0x66, - 0x2e, 0x6b, 0x65, 0x79, 0x26, 0x26, 0x6f, 0x3d, 0x3d, 0x3d, 0x66, 0x2e, - 0x74, 0x79, 0x70, 0x65, 0x29, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, - 0x72, 0x3b, 0x72, 0x2d, 0x2d, 0x7d, 0x69, 0x66, 0x28, 0x75, 0x3c, 0x6e, - 0x2e, 0x6c, 0x65, 0x6e, 0x67, 0x74, 0x68, 0x29, 0x7b, 0x69, 0x66, 0x28, - 0x28, 0x66, 0x3d, 0x6e, 0x5b, 0x75, 0x5d, 0x29, 0x26, 0x26, 0x30, 0x3d, - 0x3d, 0x28, 0x31, 0x33, 0x31, 0x30, 0x37, 0x32, 0x26, 0x66, 0x2e, 0x5f, - 0x5f, 0x75, 0x29, 0x26, 0x26, 0x69, 0x3d, 0x3d, 0x66, 0x2e, 0x6b, 0x65, - 0x79, 0x26, 0x26, 0x6f, 0x3d, 0x3d, 0x3d, 0x66, 0x2e, 0x74, 0x79, 0x70, - 0x65, 0x29, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x75, 0x3b, 0x75, - 0x2b, 0x2b, 0x7d, 0x7d, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x2d, 0x31, - 0x7d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x5a, 0x28, - 0x74, 0x2c, 0x6e, 0x2c, 0x65, 0x29, 0x7b, 0x22, 0x2d, 0x22, 0x3d, 0x3d, - 0x3d, 0x6e, 0x5b, 0x30, 0x5d, 0x3f, 0x74, 0x2e, 0x73, 0x65, 0x74, 0x50, - 0x72, 0x6f, 0x70, 0x65, 0x72, 0x74, 0x79, 0x28, 0x6e, 0x2c, 0x6e, 0x75, - 0x6c, 0x6c, 0x3d, 0x3d, 0x65, 0x3f, 0x22, 0x22, 0x3a, 0x65, 0x29, 0x3a, - 0x74, 0x5b, 0x6e, 0x5d, 0x3d, 0x6e, 0x75, 0x6c, 0x6c, 0x3d, 0x3d, 0x65, - 0x3f, 0x22, 0x22, 0x3a, 0x22, 0x6e, 0x75, 0x6d, 0x62, 0x65, 0x72, 0x22, - 0x21, 0x3d, 0x74, 0x79, 0x70, 0x65, 0x6f, 0x66, 0x20, 0x65, 0x7c, 0x7c, - 0x41, 0x2e, 0x74, 0x65, 0x73, 0x74, 0x28, 0x6e, 0x29, 0x3f, 0x65, 0x3a, - 0x65, 0x2b, 0x22, 0x70, 0x78, 0x22, 0x7d, 0x66, 0x75, 0x6e, 0x63, 0x74, - 0x69, 0x6f, 0x6e, 0x20, 0x74, 0x74, 0x28, 0x74, 0x2c, 0x6e, 0x2c, 0x65, - 0x2c, 0x5f, 0x2c, 0x69, 0x29, 0x7b, 0x76, 0x61, 0x72, 0x20, 0x6f, 0x3b, - 0x74, 0x3a, 0x69, 0x66, 0x28, 0x22, 0x73, 0x74, 0x79, 0x6c, 0x65, 0x22, - 0x3d, 0x3d, 0x3d, 0x6e, 0x29, 0x69, 0x66, 0x28, 0x22, 0x73, 0x74, 0x72, - 0x69, 0x6e, 0x67, 0x22, 0x3d, 0x3d, 0x74, 0x79, 0x70, 0x65, 0x6f, 0x66, - 0x20, 0x65, 0x29, 0x74, 0x2e, 0x73, 0x74, 0x79, 0x6c, 0x65, 0x2e, 0x63, - 0x73, 0x73, 0x54, 0x65, 0x78, 0x74, 0x3d, 0x65, 0x3b, 0x65, 0x6c, 0x73, - 0x65, 0x7b, 0x69, 0x66, 0x28, 0x22, 0x73, 0x74, 0x72, 0x69, 0x6e, 0x67, - 0x22, 0x3d, 0x3d, 0x74, 0x79, 0x70, 0x65, 0x6f, 0x66, 0x20, 0x5f, 0x26, - 0x26, 0x28, 0x74, 0x2e, 0x73, 0x74, 0x79, 0x6c, 0x65, 0x2e, 0x63, 0x73, - 0x73, 0x54, 0x65, 0x78, 0x74, 0x3d, 0x5f, 0x3d, 0x22, 0x22, 0x29, 0x2c, - 0x5f, 0x29, 0x66, 0x6f, 0x72, 0x28, 0x6e, 0x20, 0x69, 0x6e, 0x20, 0x5f, - 0x29, 0x65, 0x26, 0x26, 0x6e, 0x20, 0x69, 0x6e, 0x20, 0x65, 0x7c, 0x7c, - 0x5a, 0x28, 0x74, 0x2e, 0x73, 0x74, 0x79, 0x6c, 0x65, 0x2c, 0x6e, 0x2c, - 0x22, 0x22, 0x29, 0x3b, 0x69, 0x66, 0x28, 0x65, 0x29, 0x66, 0x6f, 0x72, - 0x28, 0x6e, 0x20, 0x69, 0x6e, 0x20, 0x65, 0x29, 0x5f, 0x26, 0x26, 0x65, - 0x5b, 0x6e, 0x5d, 0x3d, 0x3d, 0x3d, 0x5f, 0x5b, 0x6e, 0x5d, 0x7c, 0x7c, - 0x5a, 0x28, 0x74, 0x2e, 0x73, 0x74, 0x79, 0x6c, 0x65, 0x2c, 0x6e, 0x2c, - 0x65, 0x5b, 0x6e, 0x5d, 0x29, 0x7d, 0x65, 0x6c, 0x73, 0x65, 0x20, 0x69, - 0x66, 0x28, 0x22, 0x6f, 0x22, 0x3d, 0x3d, 0x3d, 0x6e, 0x5b, 0x30, 0x5d, - 0x26, 0x26, 0x22, 0x6e, 0x22, 0x3d, 0x3d, 0x3d, 0x6e, 0x5b, 0x31, 0x5d, - 0x29, 0x6f, 0x3d, 0x6e, 0x21, 0x3d, 0x3d, 0x28, 0x6e, 0x3d, 0x6e, 0x2e, - 0x72, 0x65, 0x70, 0x6c, 0x61, 0x63, 0x65, 0x28, 0x2f, 0x28, 0x50, 0x6f, - 0x69, 0x6e, 0x74, 0x65, 0x72, 0x43, 0x61, 0x70, 0x74, 0x75, 0x72, 0x65, - 0x29, 0x24, 0x7c, 0x43, 0x61, 0x70, 0x74, 0x75, 0x72, 0x65, 0x24, 0x2f, - 0x2c, 0x22, 0x24, 0x31, 0x22, 0x29, 0x29, 0x2c, 0x6e, 0x3d, 0x6e, 0x2e, - 0x74, 0x6f, 0x4c, 0x6f, 0x77, 0x65, 0x72, 0x43, 0x61, 0x73, 0x65, 0x28, - 0x29, 0x69, 0x6e, 0x20, 0x74, 0x3f, 0x6e, 0x2e, 0x74, 0x6f, 0x4c, 0x6f, - 0x77, 0x65, 0x72, 0x43, 0x61, 0x73, 0x65, 0x28, 0x29, 0x2e, 0x73, 0x6c, - 0x69, 0x63, 0x65, 0x28, 0x32, 0x29, 0x3a, 0x6e, 0x2e, 0x73, 0x6c, 0x69, - 0x63, 0x65, 0x28, 0x32, 0x29, 0x2c, 0x74, 0x2e, 0x6c, 0x7c, 0x7c, 0x28, - 0x74, 0x2e, 0x6c, 0x3d, 0x7b, 0x7d, 0x29, 0x2c, 0x74, 0x2e, 0x6c, 0x5b, - 0x6e, 0x2b, 0x6f, 0x5d, 0x3d, 0x65, 0x2c, 0x65, 0x3f, 0x5f, 0x3f, 0x65, - 0x2e, 0x75, 0x3d, 0x5f, 0x2e, 0x75, 0x3a, 0x28, 0x65, 0x2e, 0x75, 0x3d, - 0x44, 0x61, 0x74, 0x65, 0x2e, 0x6e, 0x6f, 0x77, 0x28, 0x29, 0x2c, 0x74, - 0x2e, 0x61, 0x64, 0x64, 0x45, 0x76, 0x65, 0x6e, 0x74, 0x4c, 0x69, 0x73, - 0x74, 0x65, 0x6e, 0x65, 0x72, 0x28, 0x6e, 0x2c, 0x6f, 0x3f, 0x65, 0x74, - 0x3a, 0x6e, 0x74, 0x2c, 0x6f, 0x29, 0x29, 0x3a, 0x74, 0x2e, 0x72, 0x65, - 0x6d, 0x6f, 0x76, 0x65, 0x45, 0x76, 0x65, 0x6e, 0x74, 0x4c, 0x69, 0x73, - 0x74, 0x65, 0x6e, 0x65, 0x72, 0x28, 0x6e, 0x2c, 0x6f, 0x3f, 0x65, 0x74, - 0x3a, 0x6e, 0x74, 0x2c, 0x6f, 0x29, 0x3b, 0x65, 0x6c, 0x73, 0x65, 0x7b, - 0x69, 0x66, 0x28, 0x69, 0x29, 0x6e, 0x3d, 0x6e, 0x2e, 0x72, 0x65, 0x70, - 0x6c, 0x61, 0x63, 0x65, 0x28, 0x2f, 0x78, 0x6c, 0x69, 0x6e, 0x6b, 0x28, - 0x48, 0x7c, 0x3a, 0x68, 0x29, 0x2f, 0x2c, 0x22, 0x68, 0x22, 0x29, 0x2e, - 0x72, 0x65, 0x70, 0x6c, 0x61, 0x63, 0x65, 0x28, 0x2f, 0x73, 0x4e, 0x61, - 0x6d, 0x65, 0x24, 0x2f, 0x2c, 0x22, 0x73, 0x22, 0x29, 0x3b, 0x65, 0x6c, - 0x73, 0x65, 0x20, 0x69, 0x66, 0x28, 0x22, 0x77, 0x69, 0x64, 0x74, 0x68, - 0x22, 0x21, 0x3d, 0x3d, 0x6e, 0x26, 0x26, 0x22, 0x68, 0x65, 0x69, 0x67, - 0x68, 0x74, 0x22, 0x21, 0x3d, 0x3d, 0x6e, 0x26, 0x26, 0x22, 0x68, 0x72, - 0x65, 0x66, 0x22, 0x21, 0x3d, 0x3d, 0x6e, 0x26, 0x26, 0x22, 0x6c, 0x69, - 0x73, 0x74, 0x22, 0x21, 0x3d, 0x3d, 0x6e, 0x26, 0x26, 0x22, 0x66, 0x6f, - 0x72, 0x6d, 0x22, 0x21, 0x3d, 0x3d, 0x6e, 0x26, 0x26, 0x22, 0x74, 0x61, - 0x62, 0x49, 0x6e, 0x64, 0x65, 0x78, 0x22, 0x21, 0x3d, 0x3d, 0x6e, 0x26, - 0x26, 0x22, 0x64, 0x6f, 0x77, 0x6e, 0x6c, 0x6f, 0x61, 0x64, 0x22, 0x21, - 0x3d, 0x3d, 0x6e, 0x26, 0x26, 0x22, 0x72, 0x6f, 0x77, 0x53, 0x70, 0x61, - 0x6e, 0x22, 0x21, 0x3d, 0x3d, 0x6e, 0x26, 0x26, 0x22, 0x63, 0x6f, 0x6c, - 0x53, 0x70, 0x61, 0x6e, 0x22, 0x21, 0x3d, 0x3d, 0x6e, 0x26, 0x26, 0x22, - 0x72, 0x6f, 0x6c, 0x65, 0x22, 0x21, 0x3d, 0x3d, 0x6e, 0x26, 0x26, 0x6e, - 0x20, 0x69, 0x6e, 0x20, 0x74, 0x29, 0x74, 0x72, 0x79, 0x7b, 0x74, 0x5b, - 0x6e, 0x5d, 0x3d, 0x6e, 0x75, 0x6c, 0x6c, 0x3d, 0x3d, 0x65, 0x3f, 0x22, - 0x22, 0x3a, 0x65, 0x3b, 0x62, 0x72, 0x65, 0x61, 0x6b, 0x20, 0x74, 0x7d, - 0x63, 0x61, 0x74, 0x63, 0x68, 0x28, 0x74, 0x29, 0x7b, 0x7d, 0x22, 0x66, - 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x22, 0x3d, 0x3d, 0x74, 0x79, - 0x70, 0x65, 0x6f, 0x66, 0x20, 0x65, 0x7c, 0x7c, 0x28, 0x6e, 0x75, 0x6c, - 0x6c, 0x3d, 0x3d, 0x65, 0x7c, 0x7c, 0x21, 0x31, 0x3d, 0x3d, 0x3d, 0x65, - 0x26, 0x26, 0x22, 0x2d, 0x22, 0x21, 0x3d, 0x3d, 0x6e, 0x5b, 0x34, 0x5d, - 0x3f, 0x74, 0x2e, 0x72, 0x65, 0x6d, 0x6f, 0x76, 0x65, 0x41, 0x74, 0x74, - 0x72, 0x69, 0x62, 0x75, 0x74, 0x65, 0x28, 0x6e, 0x29, 0x3a, 0x74, 0x2e, - 0x73, 0x65, 0x74, 0x41, 0x74, 0x74, 0x72, 0x69, 0x62, 0x75, 0x74, 0x65, - 0x28, 0x6e, 0x2c, 0x65, 0x29, 0x29, 0x7d, 0x7d, 0x66, 0x75, 0x6e, 0x63, - 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x6e, 0x74, 0x28, 0x74, 0x29, 0x7b, 0x76, - 0x61, 0x72, 0x20, 0x6e, 0x3d, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x6c, 0x5b, - 0x74, 0x2e, 0x74, 0x79, 0x70, 0x65, 0x2b, 0x21, 0x31, 0x5d, 0x3b, 0x69, - 0x66, 0x28, 0x74, 0x2e, 0x74, 0x29, 0x7b, 0x69, 0x66, 0x28, 0x74, 0x2e, - 0x74, 0x3c, 0x3d, 0x6e, 0x2e, 0x75, 0x29, 0x72, 0x65, 0x74, 0x75, 0x72, - 0x6e, 0x7d, 0x65, 0x6c, 0x73, 0x65, 0x20, 0x74, 0x2e, 0x74, 0x3d, 0x44, - 0x61, 0x74, 0x65, 0x2e, 0x6e, 0x6f, 0x77, 0x28, 0x29, 0x3b, 0x72, 0x65, - 0x74, 0x75, 0x72, 0x6e, 0x20, 0x6e, 0x28, 0x43, 0x2e, 0x65, 0x76, 0x65, - 0x6e, 0x74, 0x3f, 0x43, 0x2e, 0x65, 0x76, 0x65, 0x6e, 0x74, 0x28, 0x74, - 0x29, 0x3a, 0x74, 0x29, 0x7d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, - 0x6e, 0x20, 0x65, 0x74, 0x28, 0x74, 0x29, 0x7b, 0x72, 0x65, 0x74, 0x75, - 0x72, 0x6e, 0x20, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x6c, 0x5b, 0x74, 0x2e, - 0x74, 0x79, 0x70, 0x65, 0x2b, 0x21, 0x30, 0x5d, 0x28, 0x43, 0x2e, 0x65, - 0x76, 0x65, 0x6e, 0x74, 0x3f, 0x43, 0x2e, 0x65, 0x76, 0x65, 0x6e, 0x74, - 0x28, 0x74, 0x29, 0x3a, 0x74, 0x29, 0x7d, 0x66, 0x75, 0x6e, 0x63, 0x74, - 0x69, 0x6f, 0x6e, 0x20, 0x5f, 0x74, 0x28, 0x74, 0x2c, 0x6e, 0x2c, 0x65, - 0x2c, 0x5f, 0x2c, 0x69, 0x2c, 0x6f, 0x2c, 0x72, 0x2c, 0x75, 0x2c, 0x66, - 0x2c, 0x73, 0x29, 0x7b, 0x76, 0x61, 0x72, 0x20, 0x6c, 0x2c, 0x63, 0x2c, - 0x68, 0x2c, 0x61, 0x2c, 0x70, 0x2c, 0x64, 0x2c, 0x76, 0x2c, 0x79, 0x2c, - 0x6d, 0x2c, 0x67, 0x2c, 0x62, 0x2c, 0x6b, 0x2c, 0x53, 0x2c, 0x77, 0x2c, - 0x78, 0x2c, 0x45, 0x3d, 0x6e, 0x2e, 0x74, 0x79, 0x70, 0x65, 0x3b, 0x69, - 0x66, 0x28, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x21, 0x3d, 0x3d, 0x6e, - 0x2e, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x72, 0x75, 0x63, 0x74, 0x6f, 0x72, - 0x29, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x6e, 0x75, 0x6c, 0x6c, - 0x3b, 0x31, 0x32, 0x38, 0x26, 0x65, 0x2e, 0x5f, 0x5f, 0x75, 0x26, 0x26, - 0x28, 0x66, 0x3d, 0x21, 0x21, 0x28, 0x33, 0x32, 0x26, 0x65, 0x2e, 0x5f, - 0x5f, 0x75, 0x29, 0x2c, 0x6f, 0x3d, 0x5b, 0x75, 0x3d, 0x6e, 0x2e, 0x5f, - 0x5f, 0x65, 0x3d, 0x65, 0x2e, 0x5f, 0x5f, 0x65, 0x5d, 0x29, 0x2c, 0x28, - 0x6c, 0x3d, 0x43, 0x2e, 0x5f, 0x5f, 0x62, 0x29, 0x26, 0x26, 0x6c, 0x28, - 0x6e, 0x29, 0x3b, 0x74, 0x3a, 0x69, 0x66, 0x28, 0x22, 0x66, 0x75, 0x6e, - 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x22, 0x3d, 0x3d, 0x74, 0x79, 0x70, 0x65, - 0x6f, 0x66, 0x20, 0x45, 0x29, 0x74, 0x72, 0x79, 0x7b, 0x69, 0x66, 0x28, - 0x79, 0x3d, 0x6e, 0x2e, 0x70, 0x72, 0x6f, 0x70, 0x73, 0x2c, 0x6d, 0x3d, - 0x28, 0x6c, 0x3d, 0x45, 0x2e, 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x78, 0x74, - 0x54, 0x79, 0x70, 0x65, 0x29, 0x26, 0x26, 0x5f, 0x5b, 0x6c, 0x2e, 0x5f, - 0x5f, 0x63, 0x5d, 0x2c, 0x67, 0x3d, 0x6c, 0x3f, 0x6d, 0x3f, 0x6d, 0x2e, - 0x70, 0x72, 0x6f, 0x70, 0x73, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3a, - 0x6c, 0x2e, 0x5f, 0x5f, 0x3a, 0x5f, 0x2c, 0x65, 0x2e, 0x5f, 0x5f, 0x63, - 0x3f, 0x76, 0x3d, 0x28, 0x63, 0x3d, 0x6e, 0x2e, 0x5f, 0x5f, 0x63, 0x3d, - 0x65, 0x2e, 0x5f, 0x5f, 0x63, 0x29, 0x2e, 0x5f, 0x5f, 0x3d, 0x63, 0x2e, - 0x5f, 0x5f, 0x45, 0x3a, 0x28, 0x22, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x74, - 0x79, 0x70, 0x65, 0x22, 0x69, 0x6e, 0x20, 0x45, 0x26, 0x26, 0x45, 0x2e, - 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x74, 0x79, 0x70, 0x65, 0x2e, 0x72, 0x65, - 0x6e, 0x64, 0x65, 0x72, 0x3f, 0x6e, 0x2e, 0x5f, 0x5f, 0x63, 0x3d, 0x63, - 0x3d, 0x6e, 0x65, 0x77, 0x20, 0x45, 0x28, 0x79, 0x2c, 0x67, 0x29, 0x3a, - 0x28, 0x6e, 0x2e, 0x5f, 0x5f, 0x63, 0x3d, 0x63, 0x3d, 0x6e, 0x65, 0x77, - 0x20, 0x49, 0x28, 0x79, 0x2c, 0x67, 0x29, 0x2c, 0x63, 0x2e, 0x63, 0x6f, - 0x6e, 0x73, 0x74, 0x72, 0x75, 0x63, 0x74, 0x6f, 0x72, 0x3d, 0x45, 0x2c, - 0x63, 0x2e, 0x72, 0x65, 0x6e, 0x64, 0x65, 0x72, 0x3d, 0x66, 0x74, 0x29, - 0x2c, 0x6d, 0x26, 0x26, 0x6d, 0x2e, 0x73, 0x75, 0x62, 0x28, 0x63, 0x29, - 0x2c, 0x63, 0x2e, 0x70, 0x72, 0x6f, 0x70, 0x73, 0x3d, 0x79, 0x2c, 0x63, - 0x2e, 0x73, 0x74, 0x61, 0x74, 0x65, 0x7c, 0x7c, 0x28, 0x63, 0x2e, 0x73, - 0x74, 0x61, 0x74, 0x65, 0x3d, 0x7b, 0x7d, 0x29, 0x2c, 0x63, 0x2e, 0x63, - 0x6f, 0x6e, 0x74, 0x65, 0x78, 0x74, 0x3d, 0x67, 0x2c, 0x63, 0x2e, 0x5f, - 0x5f, 0x6e, 0x3d, 0x5f, 0x2c, 0x68, 0x3d, 0x63, 0x2e, 0x5f, 0x5f, 0x64, - 0x3d, 0x21, 0x30, 0x2c, 0x63, 0x2e, 0x5f, 0x5f, 0x68, 0x3d, 0x5b, 0x5d, - 0x2c, 0x63, 0x2e, 0x5f, 0x73, 0x62, 0x3d, 0x5b, 0x5d, 0x29, 0x2c, 0x6e, - 0x75, 0x6c, 0x6c, 0x3d, 0x3d, 0x63, 0x2e, 0x5f, 0x5f, 0x73, 0x26, 0x26, - 0x28, 0x63, 0x2e, 0x5f, 0x5f, 0x73, 0x3d, 0x63, 0x2e, 0x73, 0x74, 0x61, - 0x74, 0x65, 0x29, 0x2c, 0x6e, 0x75, 0x6c, 0x6c, 0x21, 0x3d, 0x45, 0x2e, - 0x67, 0x65, 0x74, 0x44, 0x65, 0x72, 0x69, 0x76, 0x65, 0x64, 0x53, 0x74, - 0x61, 0x74, 0x65, 0x46, 0x72, 0x6f, 0x6d, 0x50, 0x72, 0x6f, 0x70, 0x73, - 0x26, 0x26, 0x28, 0x63, 0x2e, 0x5f, 0x5f, 0x73, 0x3d, 0x3d, 0x63, 0x2e, - 0x73, 0x74, 0x61, 0x74, 0x65, 0x26, 0x26, 0x28, 0x63, 0x2e, 0x5f, 0x5f, - 0x73, 0x3d, 0x4d, 0x28, 0x7b, 0x7d, 0x2c, 0x63, 0x2e, 0x5f, 0x5f, 0x73, - 0x29, 0x29, 0x2c, 0x4d, 0x28, 0x63, 0x2e, 0x5f, 0x5f, 0x73, 0x2c, 0x45, - 0x2e, 0x67, 0x65, 0x74, 0x44, 0x65, 0x72, 0x69, 0x76, 0x65, 0x64, 0x53, - 0x74, 0x61, 0x74, 0x65, 0x46, 0x72, 0x6f, 0x6d, 0x50, 0x72, 0x6f, 0x70, - 0x73, 0x28, 0x79, 0x2c, 0x63, 0x2e, 0x5f, 0x5f, 0x73, 0x29, 0x29, 0x29, - 0x2c, 0x61, 0x3d, 0x63, 0x2e, 0x70, 0x72, 0x6f, 0x70, 0x73, 0x2c, 0x70, - 0x3d, 0x63, 0x2e, 0x73, 0x74, 0x61, 0x74, 0x65, 0x2c, 0x63, 0x2e, 0x5f, - 0x5f, 0x76, 0x3d, 0x6e, 0x2c, 0x68, 0x29, 0x6e, 0x75, 0x6c, 0x6c, 0x3d, - 0x3d, 0x45, 0x2e, 0x67, 0x65, 0x74, 0x44, 0x65, 0x72, 0x69, 0x76, 0x65, - 0x64, 0x53, 0x74, 0x61, 0x74, 0x65, 0x46, 0x72, 0x6f, 0x6d, 0x50, 0x72, - 0x6f, 0x70, 0x73, 0x26, 0x26, 0x6e, 0x75, 0x6c, 0x6c, 0x21, 0x3d, 0x63, - 0x2e, 0x63, 0x6f, 0x6d, 0x70, 0x6f, 0x6e, 0x65, 0x6e, 0x74, 0x57, 0x69, - 0x6c, 0x6c, 0x4d, 0x6f, 0x75, 0x6e, 0x74, 0x26, 0x26, 0x63, 0x2e, 0x63, - 0x6f, 0x6d, 0x70, 0x6f, 0x6e, 0x65, 0x6e, 0x74, 0x57, 0x69, 0x6c, 0x6c, - 0x4d, 0x6f, 0x75, 0x6e, 0x74, 0x28, 0x29, 0x2c, 0x6e, 0x75, 0x6c, 0x6c, - 0x21, 0x3d, 0x63, 0x2e, 0x63, 0x6f, 0x6d, 0x70, 0x6f, 0x6e, 0x65, 0x6e, - 0x74, 0x44, 0x69, 0x64, 0x4d, 0x6f, 0x75, 0x6e, 0x74, 0x26, 0x26, 0x63, - 0x2e, 0x5f, 0x5f, 0x68, 0x2e, 0x70, 0x75, 0x73, 0x68, 0x28, 0x63, 0x2e, - 0x63, 0x6f, 0x6d, 0x70, 0x6f, 0x6e, 0x65, 0x6e, 0x74, 0x44, 0x69, 0x64, - 0x4d, 0x6f, 0x75, 0x6e, 0x74, 0x29, 0x3b, 0x65, 0x6c, 0x73, 0x65, 0x7b, - 0x69, 0x66, 0x28, 0x6e, 0x75, 0x6c, 0x6c, 0x3d, 0x3d, 0x45, 0x2e, 0x67, - 0x65, 0x74, 0x44, 0x65, 0x72, 0x69, 0x76, 0x65, 0x64, 0x53, 0x74, 0x61, - 0x74, 0x65, 0x46, 0x72, 0x6f, 0x6d, 0x50, 0x72, 0x6f, 0x70, 0x73, 0x26, - 0x26, 0x79, 0x21, 0x3d, 0x3d, 0x61, 0x26, 0x26, 0x6e, 0x75, 0x6c, 0x6c, - 0x21, 0x3d, 0x63, 0x2e, 0x63, 0x6f, 0x6d, 0x70, 0x6f, 0x6e, 0x65, 0x6e, - 0x74, 0x57, 0x69, 0x6c, 0x6c, 0x52, 0x65, 0x63, 0x65, 0x69, 0x76, 0x65, - 0x50, 0x72, 0x6f, 0x70, 0x73, 0x26, 0x26, 0x63, 0x2e, 0x63, 0x6f, 0x6d, - 0x70, 0x6f, 0x6e, 0x65, 0x6e, 0x74, 0x57, 0x69, 0x6c, 0x6c, 0x52, 0x65, - 0x63, 0x65, 0x69, 0x76, 0x65, 0x50, 0x72, 0x6f, 0x70, 0x73, 0x28, 0x79, - 0x2c, 0x67, 0x29, 0x2c, 0x21, 0x63, 0x2e, 0x5f, 0x5f, 0x65, 0x26, 0x26, - 0x28, 0x6e, 0x75, 0x6c, 0x6c, 0x21, 0x3d, 0x63, 0x2e, 0x73, 0x68, 0x6f, - 0x75, 0x6c, 0x64, 0x43, 0x6f, 0x6d, 0x70, 0x6f, 0x6e, 0x65, 0x6e, 0x74, - 0x55, 0x70, 0x64, 0x61, 0x74, 0x65, 0x26, 0x26, 0x21, 0x31, 0x3d, 0x3d, - 0x3d, 0x63, 0x2e, 0x73, 0x68, 0x6f, 0x75, 0x6c, 0x64, 0x43, 0x6f, 0x6d, - 0x70, 0x6f, 0x6e, 0x65, 0x6e, 0x74, 0x55, 0x70, 0x64, 0x61, 0x74, 0x65, - 0x28, 0x79, 0x2c, 0x63, 0x2e, 0x5f, 0x5f, 0x73, 0x2c, 0x67, 0x29, 0x7c, - 0x7c, 0x6e, 0x2e, 0x5f, 0x5f, 0x76, 0x3d, 0x3d, 0x3d, 0x65, 0x2e, 0x5f, - 0x5f, 0x76, 0x29, 0x29, 0x7b, 0x66, 0x6f, 0x72, 0x28, 0x6e, 0x2e, 0x5f, - 0x5f, 0x76, 0x21, 0x3d, 0x3d, 0x65, 0x2e, 0x5f, 0x5f, 0x76, 0x26, 0x26, - 0x28, 0x63, 0x2e, 0x70, 0x72, 0x6f, 0x70, 0x73, 0x3d, 0x79, 0x2c, 0x63, - 0x2e, 0x73, 0x74, 0x61, 0x74, 0x65, 0x3d, 0x63, 0x2e, 0x5f, 0x5f, 0x73, - 0x2c, 0x63, 0x2e, 0x5f, 0x5f, 0x64, 0x3d, 0x21, 0x31, 0x29, 0x2c, 0x6e, - 0x2e, 0x5f, 0x5f, 0x65, 0x3d, 0x65, 0x2e, 0x5f, 0x5f, 0x65, 0x2c, 0x6e, - 0x2e, 0x5f, 0x5f, 0x6b, 0x3d, 0x65, 0x2e, 0x5f, 0x5f, 0x6b, 0x2c, 0x6e, - 0x2e, 0x5f, 0x5f, 0x6b, 0x2e, 0x66, 0x6f, 0x72, 0x45, 0x61, 0x63, 0x68, - 0x28, 0x28, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x74, - 0x29, 0x7b, 0x74, 0x26, 0x26, 0x28, 0x74, 0x2e, 0x5f, 0x5f, 0x3d, 0x6e, - 0x29, 0x7d, 0x29, 0x29, 0x2c, 0x62, 0x3d, 0x30, 0x3b, 0x62, 0x3c, 0x63, - 0x2e, 0x5f, 0x73, 0x62, 0x2e, 0x6c, 0x65, 0x6e, 0x67, 0x74, 0x68, 0x3b, - 0x62, 0x2b, 0x2b, 0x29, 0x63, 0x2e, 0x5f, 0x5f, 0x68, 0x2e, 0x70, 0x75, - 0x73, 0x68, 0x28, 0x63, 0x2e, 0x5f, 0x73, 0x62, 0x5b, 0x62, 0x5d, 0x29, - 0x3b, 0x63, 0x2e, 0x5f, 0x73, 0x62, 0x3d, 0x5b, 0x5d, 0x2c, 0x63, 0x2e, - 0x5f, 0x5f, 0x68, 0x2e, 0x6c, 0x65, 0x6e, 0x67, 0x74, 0x68, 0x26, 0x26, - 0x72, 0x2e, 0x70, 0x75, 0x73, 0x68, 0x28, 0x63, 0x29, 0x3b, 0x62, 0x72, - 0x65, 0x61, 0x6b, 0x20, 0x74, 0x7d, 0x6e, 0x75, 0x6c, 0x6c, 0x21, 0x3d, - 0x63, 0x2e, 0x63, 0x6f, 0x6d, 0x70, 0x6f, 0x6e, 0x65, 0x6e, 0x74, 0x57, - 0x69, 0x6c, 0x6c, 0x55, 0x70, 0x64, 0x61, 0x74, 0x65, 0x26, 0x26, 0x63, - 0x2e, 0x63, 0x6f, 0x6d, 0x70, 0x6f, 0x6e, 0x65, 0x6e, 0x74, 0x57, 0x69, - 0x6c, 0x6c, 0x55, 0x70, 0x64, 0x61, 0x74, 0x65, 0x28, 0x79, 0x2c, 0x63, - 0x2e, 0x5f, 0x5f, 0x73, 0x2c, 0x67, 0x29, 0x2c, 0x6e, 0x75, 0x6c, 0x6c, - 0x21, 0x3d, 0x63, 0x2e, 0x63, 0x6f, 0x6d, 0x70, 0x6f, 0x6e, 0x65, 0x6e, - 0x74, 0x44, 0x69, 0x64, 0x55, 0x70, 0x64, 0x61, 0x74, 0x65, 0x26, 0x26, - 0x63, 0x2e, 0x5f, 0x5f, 0x68, 0x2e, 0x70, 0x75, 0x73, 0x68, 0x28, 0x28, - 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x29, 0x7b, 0x63, - 0x2e, 0x63, 0x6f, 0x6d, 0x70, 0x6f, 0x6e, 0x65, 0x6e, 0x74, 0x44, 0x69, - 0x64, 0x55, 0x70, 0x64, 0x61, 0x74, 0x65, 0x28, 0x61, 0x2c, 0x70, 0x2c, - 0x64, 0x29, 0x7d, 0x29, 0x29, 0x7d, 0x69, 0x66, 0x28, 0x63, 0x2e, 0x63, - 0x6f, 0x6e, 0x74, 0x65, 0x78, 0x74, 0x3d, 0x67, 0x2c, 0x63, 0x2e, 0x70, - 0x72, 0x6f, 0x70, 0x73, 0x3d, 0x79, 0x2c, 0x63, 0x2e, 0x5f, 0x5f, 0x50, - 0x3d, 0x74, 0x2c, 0x63, 0x2e, 0x5f, 0x5f, 0x65, 0x3d, 0x21, 0x31, 0x2c, - 0x6b, 0x3d, 0x43, 0x2e, 0x5f, 0x5f, 0x72, 0x2c, 0x53, 0x3d, 0x30, 0x2c, - 0x22, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x74, 0x79, 0x70, 0x65, 0x22, 0x69, - 0x6e, 0x20, 0x45, 0x26, 0x26, 0x45, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, - 0x74, 0x79, 0x70, 0x65, 0x2e, 0x72, 0x65, 0x6e, 0x64, 0x65, 0x72, 0x29, - 0x7b, 0x66, 0x6f, 0x72, 0x28, 0x63, 0x2e, 0x73, 0x74, 0x61, 0x74, 0x65, - 0x3d, 0x63, 0x2e, 0x5f, 0x5f, 0x73, 0x2c, 0x63, 0x2e, 0x5f, 0x5f, 0x64, - 0x3d, 0x21, 0x31, 0x2c, 0x6b, 0x26, 0x26, 0x6b, 0x28, 0x6e, 0x29, 0x2c, - 0x6c, 0x3d, 0x63, 0x2e, 0x72, 0x65, 0x6e, 0x64, 0x65, 0x72, 0x28, 0x63, - 0x2e, 0x70, 0x72, 0x6f, 0x70, 0x73, 0x2c, 0x63, 0x2e, 0x73, 0x74, 0x61, - 0x74, 0x65, 0x2c, 0x63, 0x2e, 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x78, 0x74, - 0x29, 0x2c, 0x77, 0x3d, 0x30, 0x3b, 0x77, 0x3c, 0x63, 0x2e, 0x5f, 0x73, - 0x62, 0x2e, 0x6c, 0x65, 0x6e, 0x67, 0x74, 0x68, 0x3b, 0x77, 0x2b, 0x2b, - 0x29, 0x63, 0x2e, 0x5f, 0x5f, 0x68, 0x2e, 0x70, 0x75, 0x73, 0x68, 0x28, - 0x63, 0x2e, 0x5f, 0x73, 0x62, 0x5b, 0x77, 0x5d, 0x29, 0x3b, 0x63, 0x2e, - 0x5f, 0x73, 0x62, 0x3d, 0x5b, 0x5d, 0x7d, 0x65, 0x6c, 0x73, 0x65, 0x20, - 0x64, 0x6f, 0x7b, 0x63, 0x2e, 0x5f, 0x5f, 0x64, 0x3d, 0x21, 0x31, 0x2c, - 0x6b, 0x26, 0x26, 0x6b, 0x28, 0x6e, 0x29, 0x2c, 0x6c, 0x3d, 0x63, 0x2e, - 0x72, 0x65, 0x6e, 0x64, 0x65, 0x72, 0x28, 0x63, 0x2e, 0x70, 0x72, 0x6f, - 0x70, 0x73, 0x2c, 0x63, 0x2e, 0x73, 0x74, 0x61, 0x74, 0x65, 0x2c, 0x63, - 0x2e, 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x78, 0x74, 0x29, 0x2c, 0x63, 0x2e, - 0x73, 0x74, 0x61, 0x74, 0x65, 0x3d, 0x63, 0x2e, 0x5f, 0x5f, 0x73, 0x7d, - 0x77, 0x68, 0x69, 0x6c, 0x65, 0x28, 0x63, 0x2e, 0x5f, 0x5f, 0x64, 0x26, - 0x26, 0x2b, 0x2b, 0x53, 0x3c, 0x32, 0x35, 0x29, 0x3b, 0x63, 0x2e, 0x73, - 0x74, 0x61, 0x74, 0x65, 0x3d, 0x63, 0x2e, 0x5f, 0x5f, 0x73, 0x2c, 0x6e, - 0x75, 0x6c, 0x6c, 0x21, 0x3d, 0x63, 0x2e, 0x67, 0x65, 0x74, 0x43, 0x68, - 0x69, 0x6c, 0x64, 0x43, 0x6f, 0x6e, 0x74, 0x65, 0x78, 0x74, 0x26, 0x26, - 0x28, 0x5f, 0x3d, 0x4d, 0x28, 0x4d, 0x28, 0x7b, 0x7d, 0x2c, 0x5f, 0x29, - 0x2c, 0x63, 0x2e, 0x67, 0x65, 0x74, 0x43, 0x68, 0x69, 0x6c, 0x64, 0x43, - 0x6f, 0x6e, 0x74, 0x65, 0x78, 0x74, 0x28, 0x29, 0x29, 0x29, 0x2c, 0x68, - 0x7c, 0x7c, 0x6e, 0x75, 0x6c, 0x6c, 0x3d, 0x3d, 0x63, 0x2e, 0x67, 0x65, - 0x74, 0x53, 0x6e, 0x61, 0x70, 0x73, 0x68, 0x6f, 0x74, 0x42, 0x65, 0x66, - 0x6f, 0x72, 0x65, 0x55, 0x70, 0x64, 0x61, 0x74, 0x65, 0x7c, 0x7c, 0x28, - 0x64, 0x3d, 0x63, 0x2e, 0x67, 0x65, 0x74, 0x53, 0x6e, 0x61, 0x70, 0x73, - 0x68, 0x6f, 0x74, 0x42, 0x65, 0x66, 0x6f, 0x72, 0x65, 0x55, 0x70, 0x64, - 0x61, 0x74, 0x65, 0x28, 0x61, 0x2c, 0x70, 0x29, 0x29, 0x2c, 0x4a, 0x28, - 0x74, 0x2c, 0x46, 0x28, 0x78, 0x3d, 0x6e, 0x75, 0x6c, 0x6c, 0x21, 0x3d, - 0x6c, 0x26, 0x26, 0x6c, 0x2e, 0x74, 0x79, 0x70, 0x65, 0x3d, 0x3d, 0x3d, - 0x6a, 0x26, 0x26, 0x6e, 0x75, 0x6c, 0x6c, 0x3d, 0x3d, 0x6c, 0x2e, 0x6b, - 0x65, 0x79, 0x3f, 0x6c, 0x2e, 0x70, 0x72, 0x6f, 0x70, 0x73, 0x2e, 0x63, - 0x68, 0x69, 0x6c, 0x64, 0x72, 0x65, 0x6e, 0x3a, 0x6c, 0x29, 0x3f, 0x78, - 0x3a, 0x5b, 0x78, 0x5d, 0x2c, 0x6e, 0x2c, 0x65, 0x2c, 0x5f, 0x2c, 0x69, - 0x2c, 0x6f, 0x2c, 0x72, 0x2c, 0x75, 0x2c, 0x66, 0x2c, 0x73, 0x29, 0x2c, - 0x63, 0x2e, 0x62, 0x61, 0x73, 0x65, 0x3d, 0x6e, 0x2e, 0x5f, 0x5f, 0x65, - 0x2c, 0x6e, 0x2e, 0x5f, 0x5f, 0x75, 0x26, 0x3d, 0x2d, 0x31, 0x36, 0x31, - 0x2c, 0x63, 0x2e, 0x5f, 0x5f, 0x68, 0x2e, 0x6c, 0x65, 0x6e, 0x67, 0x74, - 0x68, 0x26, 0x26, 0x72, 0x2e, 0x70, 0x75, 0x73, 0x68, 0x28, 0x63, 0x29, - 0x2c, 0x76, 0x26, 0x26, 0x28, 0x63, 0x2e, 0x5f, 0x5f, 0x45, 0x3d, 0x63, - 0x2e, 0x5f, 0x5f, 0x3d, 0x6e, 0x75, 0x6c, 0x6c, 0x29, 0x7d, 0x63, 0x61, - 0x74, 0x63, 0x68, 0x28, 0x74, 0x29, 0x7b, 0x6e, 0x2e, 0x5f, 0x5f, 0x76, - 0x3d, 0x6e, 0x75, 0x6c, 0x6c, 0x2c, 0x66, 0x7c, 0x7c, 0x6e, 0x75, 0x6c, - 0x6c, 0x21, 0x3d, 0x6f, 0x3f, 0x28, 0x6e, 0x2e, 0x5f, 0x5f, 0x65, 0x3d, - 0x75, 0x2c, 0x6e, 0x2e, 0x5f, 0x5f, 0x75, 0x7c, 0x3d, 0x66, 0x3f, 0x31, - 0x36, 0x30, 0x3a, 0x33, 0x32, 0x2c, 0x6f, 0x5b, 0x6f, 0x2e, 0x69, 0x6e, - 0x64, 0x65, 0x78, 0x4f, 0x66, 0x28, 0x75, 0x29, 0x5d, 0x3d, 0x6e, 0x75, - 0x6c, 0x6c, 0x29, 0x3a, 0x28, 0x6e, 0x2e, 0x5f, 0x5f, 0x65, 0x3d, 0x65, - 0x2e, 0x5f, 0x5f, 0x65, 0x2c, 0x6e, 0x2e, 0x5f, 0x5f, 0x6b, 0x3d, 0x65, - 0x2e, 0x5f, 0x5f, 0x6b, 0x29, 0x2c, 0x43, 0x2e, 0x5f, 0x5f, 0x65, 0x28, - 0x74, 0x2c, 0x6e, 0x2c, 0x65, 0x29, 0x7d, 0x65, 0x6c, 0x73, 0x65, 0x20, - 0x6e, 0x75, 0x6c, 0x6c, 0x3d, 0x3d, 0x6f, 0x26, 0x26, 0x6e, 0x2e, 0x5f, - 0x5f, 0x76, 0x3d, 0x3d, 0x3d, 0x65, 0x2e, 0x5f, 0x5f, 0x76, 0x3f, 0x28, - 0x6e, 0x2e, 0x5f, 0x5f, 0x6b, 0x3d, 0x65, 0x2e, 0x5f, 0x5f, 0x6b, 0x2c, - 0x6e, 0x2e, 0x5f, 0x5f, 0x65, 0x3d, 0x65, 0x2e, 0x5f, 0x5f, 0x65, 0x29, - 0x3a, 0x6e, 0x2e, 0x5f, 0x5f, 0x65, 0x3d, 0x6f, 0x74, 0x28, 0x65, 0x2e, - 0x5f, 0x5f, 0x65, 0x2c, 0x6e, 0x2c, 0x65, 0x2c, 0x5f, 0x2c, 0x69, 0x2c, - 0x6f, 0x2c, 0x72, 0x2c, 0x66, 0x2c, 0x73, 0x29, 0x3b, 0x28, 0x6c, 0x3d, - 0x43, 0x2e, 0x64, 0x69, 0x66, 0x66, 0x65, 0x64, 0x29, 0x26, 0x26, 0x6c, - 0x28, 0x6e, 0x29, 0x7d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, - 0x20, 0x69, 0x74, 0x28, 0x74, 0x2c, 0x6e, 0x2c, 0x65, 0x29, 0x7b, 0x6e, - 0x2e, 0x5f, 0x5f, 0x64, 0x3d, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x3b, - 0x66, 0x6f, 0x72, 0x28, 0x76, 0x61, 0x72, 0x20, 0x5f, 0x3d, 0x30, 0x3b, - 0x5f, 0x3c, 0x65, 0x2e, 0x6c, 0x65, 0x6e, 0x67, 0x74, 0x68, 0x3b, 0x5f, - 0x2b, 0x2b, 0x29, 0x72, 0x74, 0x28, 0x65, 0x5b, 0x5f, 0x5d, 0x2c, 0x65, - 0x5b, 0x2b, 0x2b, 0x5f, 0x5d, 0x2c, 0x65, 0x5b, 0x2b, 0x2b, 0x5f, 0x5d, - 0x29, 0x3b, 0x43, 0x2e, 0x5f, 0x5f, 0x63, 0x26, 0x26, 0x43, 0x2e, 0x5f, - 0x5f, 0x63, 0x28, 0x6e, 0x2c, 0x74, 0x29, 0x2c, 0x74, 0x2e, 0x73, 0x6f, - 0x6d, 0x65, 0x28, 0x28, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, - 0x28, 0x6e, 0x29, 0x7b, 0x74, 0x72, 0x79, 0x7b, 0x74, 0x3d, 0x6e, 0x2e, - 0x5f, 0x5f, 0x68, 0x2c, 0x6e, 0x2e, 0x5f, 0x5f, 0x68, 0x3d, 0x5b, 0x5d, - 0x2c, 0x74, 0x2e, 0x73, 0x6f, 0x6d, 0x65, 0x28, 0x28, 0x66, 0x75, 0x6e, - 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x74, 0x29, 0x7b, 0x74, 0x2e, 0x63, - 0x61, 0x6c, 0x6c, 0x28, 0x6e, 0x29, 0x7d, 0x29, 0x29, 0x7d, 0x63, 0x61, - 0x74, 0x63, 0x68, 0x28, 0x74, 0x29, 0x7b, 0x43, 0x2e, 0x5f, 0x5f, 0x65, - 0x28, 0x74, 0x2c, 0x6e, 0x2e, 0x5f, 0x5f, 0x76, 0x29, 0x7d, 0x7d, 0x29, - 0x29, 0x7d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x6f, - 0x74, 0x28, 0x74, 0x2c, 0x6e, 0x2c, 0x65, 0x2c, 0x5f, 0x2c, 0x69, 0x2c, - 0x6f, 0x2c, 0x72, 0x2c, 0x75, 0x2c, 0x66, 0x29, 0x7b, 0x76, 0x61, 0x72, - 0x20, 0x73, 0x2c, 0x6c, 0x2c, 0x63, 0x2c, 0x68, 0x2c, 0x61, 0x2c, 0x70, - 0x2c, 0x64, 0x2c, 0x76, 0x3d, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x70, 0x73, - 0x2c, 0x79, 0x3d, 0x6e, 0x2e, 0x70, 0x72, 0x6f, 0x70, 0x73, 0x2c, 0x6d, - 0x3d, 0x6e, 0x2e, 0x74, 0x79, 0x70, 0x65, 0x3b, 0x69, 0x66, 0x28, 0x22, - 0x73, 0x76, 0x67, 0x22, 0x3d, 0x3d, 0x3d, 0x6d, 0x26, 0x26, 0x28, 0x69, - 0x3d, 0x21, 0x30, 0x29, 0x2c, 0x6e, 0x75, 0x6c, 0x6c, 0x21, 0x3d, 0x6f, - 0x29, 0x66, 0x6f, 0x72, 0x28, 0x73, 0x3d, 0x30, 0x3b, 0x73, 0x3c, 0x6f, - 0x2e, 0x6c, 0x65, 0x6e, 0x67, 0x74, 0x68, 0x3b, 0x73, 0x2b, 0x2b, 0x29, - 0x69, 0x66, 0x28, 0x28, 0x61, 0x3d, 0x6f, 0x5b, 0x73, 0x5d, 0x29, 0x26, - 0x26, 0x22, 0x73, 0x65, 0x74, 0x41, 0x74, 0x74, 0x72, 0x69, 0x62, 0x75, - 0x74, 0x65, 0x22, 0x69, 0x6e, 0x20, 0x61, 0x3d, 0x3d, 0x21, 0x21, 0x6d, - 0x26, 0x26, 0x28, 0x6d, 0x3f, 0x61, 0x2e, 0x6c, 0x6f, 0x63, 0x61, 0x6c, - 0x4e, 0x61, 0x6d, 0x65, 0x3d, 0x3d, 0x3d, 0x6d, 0x3a, 0x33, 0x3d, 0x3d, - 0x3d, 0x61, 0x2e, 0x6e, 0x6f, 0x64, 0x65, 0x54, 0x79, 0x70, 0x65, 0x29, - 0x29, 0x7b, 0x74, 0x3d, 0x61, 0x2c, 0x6f, 0x5b, 0x73, 0x5d, 0x3d, 0x6e, - 0x75, 0x6c, 0x6c, 0x3b, 0x62, 0x72, 0x65, 0x61, 0x6b, 0x7d, 0x69, 0x66, - 0x28, 0x6e, 0x75, 0x6c, 0x6c, 0x3d, 0x3d, 0x74, 0x29, 0x7b, 0x69, 0x66, - 0x28, 0x6e, 0x75, 0x6c, 0x6c, 0x3d, 0x3d, 0x3d, 0x6d, 0x29, 0x72, 0x65, - 0x74, 0x75, 0x72, 0x6e, 0x20, 0x64, 0x6f, 0x63, 0x75, 0x6d, 0x65, 0x6e, - 0x74, 0x2e, 0x63, 0x72, 0x65, 0x61, 0x74, 0x65, 0x54, 0x65, 0x78, 0x74, - 0x4e, 0x6f, 0x64, 0x65, 0x28, 0x79, 0x29, 0x3b, 0x74, 0x3d, 0x69, 0x3f, - 0x64, 0x6f, 0x63, 0x75, 0x6d, 0x65, 0x6e, 0x74, 0x2e, 0x63, 0x72, 0x65, - 0x61, 0x74, 0x65, 0x45, 0x6c, 0x65, 0x6d, 0x65, 0x6e, 0x74, 0x4e, 0x53, - 0x28, 0x22, 0x68, 0x74, 0x74, 0x70, 0x3a, 0x2f, 0x2f, 0x77, 0x77, 0x77, - 0x2e, 0x77, 0x33, 0x2e, 0x6f, 0x72, 0x67, 0x2f, 0x32, 0x30, 0x30, 0x30, - 0x2f, 0x73, 0x76, 0x67, 0x22, 0x2c, 0x6d, 0x29, 0x3a, 0x64, 0x6f, 0x63, - 0x75, 0x6d, 0x65, 0x6e, 0x74, 0x2e, 0x63, 0x72, 0x65, 0x61, 0x74, 0x65, - 0x45, 0x6c, 0x65, 0x6d, 0x65, 0x6e, 0x74, 0x28, 0x6d, 0x2c, 0x79, 0x2e, - 0x69, 0x73, 0x26, 0x26, 0x79, 0x29, 0x2c, 0x6f, 0x3d, 0x6e, 0x75, 0x6c, - 0x6c, 0x2c, 0x75, 0x3d, 0x21, 0x31, 0x7d, 0x69, 0x66, 0x28, 0x6e, 0x75, - 0x6c, 0x6c, 0x3d, 0x3d, 0x3d, 0x6d, 0x29, 0x76, 0x3d, 0x3d, 0x3d, 0x79, - 0x7c, 0x7c, 0x75, 0x26, 0x26, 0x74, 0x2e, 0x64, 0x61, 0x74, 0x61, 0x3d, - 0x3d, 0x3d, 0x79, 0x7c, 0x7c, 0x28, 0x74, 0x2e, 0x64, 0x61, 0x74, 0x61, - 0x3d, 0x79, 0x29, 0x3b, 0x65, 0x6c, 0x73, 0x65, 0x7b, 0x69, 0x66, 0x28, - 0x6f, 0x3d, 0x6f, 0x26, 0x26, 0x78, 0x2e, 0x63, 0x61, 0x6c, 0x6c, 0x28, - 0x74, 0x2e, 0x63, 0x68, 0x69, 0x6c, 0x64, 0x4e, 0x6f, 0x64, 0x65, 0x73, - 0x29, 0x2c, 0x76, 0x3d, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x70, 0x73, 0x7c, - 0x7c, 0x54, 0x2c, 0x21, 0x75, 0x26, 0x26, 0x6e, 0x75, 0x6c, 0x6c, 0x21, - 0x3d, 0x6f, 0x29, 0x66, 0x6f, 0x72, 0x28, 0x76, 0x3d, 0x7b, 0x7d, 0x2c, - 0x73, 0x3d, 0x30, 0x3b, 0x73, 0x3c, 0x74, 0x2e, 0x61, 0x74, 0x74, 0x72, - 0x69, 0x62, 0x75, 0x74, 0x65, 0x73, 0x2e, 0x6c, 0x65, 0x6e, 0x67, 0x74, - 0x68, 0x3b, 0x73, 0x2b, 0x2b, 0x29, 0x76, 0x5b, 0x28, 0x61, 0x3d, 0x74, - 0x2e, 0x61, 0x74, 0x74, 0x72, 0x69, 0x62, 0x75, 0x74, 0x65, 0x73, 0x5b, - 0x73, 0x5d, 0x29, 0x2e, 0x6e, 0x61, 0x6d, 0x65, 0x5d, 0x3d, 0x61, 0x2e, - 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3b, 0x66, 0x6f, 0x72, 0x28, 0x73, 0x20, - 0x69, 0x6e, 0x20, 0x76, 0x29, 0x61, 0x3d, 0x76, 0x5b, 0x73, 0x5d, 0x2c, - 0x22, 0x63, 0x68, 0x69, 0x6c, 0x64, 0x72, 0x65, 0x6e, 0x22, 0x3d, 0x3d, - 0x73, 0x7c, 0x7c, 0x28, 0x22, 0x64, 0x61, 0x6e, 0x67, 0x65, 0x72, 0x6f, - 0x75, 0x73, 0x6c, 0x79, 0x53, 0x65, 0x74, 0x49, 0x6e, 0x6e, 0x65, 0x72, - 0x48, 0x54, 0x4d, 0x4c, 0x22, 0x3d, 0x3d, 0x73, 0x3f, 0x63, 0x3d, 0x61, - 0x3a, 0x22, 0x6b, 0x65, 0x79, 0x22, 0x3d, 0x3d, 0x3d, 0x73, 0x7c, 0x7c, - 0x73, 0x20, 0x69, 0x6e, 0x20, 0x79, 0x7c, 0x7c, 0x74, 0x74, 0x28, 0x74, - 0x2c, 0x73, 0x2c, 0x6e, 0x75, 0x6c, 0x6c, 0x2c, 0x61, 0x2c, 0x69, 0x29, - 0x29, 0x3b, 0x66, 0x6f, 0x72, 0x28, 0x73, 0x20, 0x69, 0x6e, 0x20, 0x79, - 0x29, 0x61, 0x3d, 0x79, 0x5b, 0x73, 0x5d, 0x2c, 0x22, 0x63, 0x68, 0x69, - 0x6c, 0x64, 0x72, 0x65, 0x6e, 0x22, 0x3d, 0x3d, 0x73, 0x3f, 0x68, 0x3d, - 0x61, 0x3a, 0x22, 0x64, 0x61, 0x6e, 0x67, 0x65, 0x72, 0x6f, 0x75, 0x73, - 0x6c, 0x79, 0x53, 0x65, 0x74, 0x49, 0x6e, 0x6e, 0x65, 0x72, 0x48, 0x54, - 0x4d, 0x4c, 0x22, 0x3d, 0x3d, 0x73, 0x3f, 0x6c, 0x3d, 0x61, 0x3a, 0x22, - 0x76, 0x61, 0x6c, 0x75, 0x65, 0x22, 0x3d, 0x3d, 0x73, 0x3f, 0x70, 0x3d, - 0x61, 0x3a, 0x22, 0x63, 0x68, 0x65, 0x63, 0x6b, 0x65, 0x64, 0x22, 0x3d, - 0x3d, 0x73, 0x3f, 0x64, 0x3d, 0x61, 0x3a, 0x22, 0x6b, 0x65, 0x79, 0x22, - 0x3d, 0x3d, 0x3d, 0x73, 0x7c, 0x7c, 0x75, 0x26, 0x26, 0x22, 0x66, 0x75, - 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x22, 0x21, 0x3d, 0x74, 0x79, 0x70, - 0x65, 0x6f, 0x66, 0x20, 0x61, 0x7c, 0x7c, 0x76, 0x5b, 0x73, 0x5d, 0x3d, - 0x3d, 0x3d, 0x61, 0x7c, 0x7c, 0x74, 0x74, 0x28, 0x74, 0x2c, 0x73, 0x2c, - 0x61, 0x2c, 0x76, 0x5b, 0x73, 0x5d, 0x2c, 0x69, 0x29, 0x3b, 0x69, 0x66, - 0x28, 0x6c, 0x29, 0x75, 0x7c, 0x7c, 0x63, 0x26, 0x26, 0x28, 0x6c, 0x2e, - 0x5f, 0x5f, 0x68, 0x74, 0x6d, 0x6c, 0x3d, 0x3d, 0x3d, 0x63, 0x2e, 0x5f, - 0x5f, 0x68, 0x74, 0x6d, 0x6c, 0x7c, 0x7c, 0x6c, 0x2e, 0x5f, 0x5f, 0x68, - 0x74, 0x6d, 0x6c, 0x3d, 0x3d, 0x3d, 0x74, 0x2e, 0x69, 0x6e, 0x6e, 0x65, - 0x72, 0x48, 0x54, 0x4d, 0x4c, 0x29, 0x7c, 0x7c, 0x28, 0x74, 0x2e, 0x69, - 0x6e, 0x6e, 0x65, 0x72, 0x48, 0x54, 0x4d, 0x4c, 0x3d, 0x6c, 0x2e, 0x5f, - 0x5f, 0x68, 0x74, 0x6d, 0x6c, 0x29, 0x2c, 0x6e, 0x2e, 0x5f, 0x5f, 0x6b, - 0x3d, 0x5b, 0x5d, 0x3b, 0x65, 0x6c, 0x73, 0x65, 0x20, 0x69, 0x66, 0x28, - 0x63, 0x26, 0x26, 0x28, 0x74, 0x2e, 0x69, 0x6e, 0x6e, 0x65, 0x72, 0x48, - 0x54, 0x4d, 0x4c, 0x3d, 0x22, 0x22, 0x29, 0x2c, 0x4a, 0x28, 0x74, 0x2c, - 0x46, 0x28, 0x68, 0x29, 0x3f, 0x68, 0x3a, 0x5b, 0x68, 0x5d, 0x2c, 0x6e, - 0x2c, 0x65, 0x2c, 0x5f, 0x2c, 0x69, 0x26, 0x26, 0x22, 0x66, 0x6f, 0x72, - 0x65, 0x69, 0x67, 0x6e, 0x4f, 0x62, 0x6a, 0x65, 0x63, 0x74, 0x22, 0x21, - 0x3d, 0x3d, 0x6d, 0x2c, 0x6f, 0x2c, 0x72, 0x2c, 0x6f, 0x3f, 0x6f, 0x5b, - 0x30, 0x5d, 0x3a, 0x65, 0x2e, 0x5f, 0x5f, 0x6b, 0x26, 0x26, 0x71, 0x28, - 0x65, 0x2c, 0x30, 0x29, 0x2c, 0x75, 0x2c, 0x66, 0x29, 0x2c, 0x6e, 0x75, - 0x6c, 0x6c, 0x21, 0x3d, 0x6f, 0x29, 0x66, 0x6f, 0x72, 0x28, 0x73, 0x3d, - 0x6f, 0x2e, 0x6c, 0x65, 0x6e, 0x67, 0x74, 0x68, 0x3b, 0x73, 0x2d, 0x2d, - 0x3b, 0x29, 0x6e, 0x75, 0x6c, 0x6c, 0x21, 0x3d, 0x6f, 0x5b, 0x73, 0x5d, - 0x26, 0x26, 0x57, 0x28, 0x6f, 0x5b, 0x73, 0x5d, 0x29, 0x3b, 0x75, 0x7c, - 0x7c, 0x28, 0x73, 0x3d, 0x22, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x22, 0x2c, - 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x21, 0x3d, 0x3d, 0x70, 0x26, 0x26, - 0x28, 0x70, 0x21, 0x3d, 0x3d, 0x74, 0x5b, 0x73, 0x5d, 0x7c, 0x7c, 0x22, - 0x70, 0x72, 0x6f, 0x67, 0x72, 0x65, 0x73, 0x73, 0x22, 0x3d, 0x3d, 0x3d, - 0x6d, 0x26, 0x26, 0x21, 0x70, 0x7c, 0x7c, 0x22, 0x6f, 0x70, 0x74, 0x69, - 0x6f, 0x6e, 0x22, 0x3d, 0x3d, 0x3d, 0x6d, 0x26, 0x26, 0x70, 0x21, 0x3d, - 0x3d, 0x76, 0x5b, 0x73, 0x5d, 0x29, 0x26, 0x26, 0x74, 0x74, 0x28, 0x74, - 0x2c, 0x73, 0x2c, 0x70, 0x2c, 0x76, 0x5b, 0x73, 0x5d, 0x2c, 0x21, 0x31, - 0x29, 0x2c, 0x73, 0x3d, 0x22, 0x63, 0x68, 0x65, 0x63, 0x6b, 0x65, 0x64, - 0x22, 0x2c, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x21, 0x3d, 0x3d, 0x64, - 0x26, 0x26, 0x64, 0x21, 0x3d, 0x3d, 0x74, 0x5b, 0x73, 0x5d, 0x26, 0x26, - 0x74, 0x74, 0x28, 0x74, 0x2c, 0x73, 0x2c, 0x64, 0x2c, 0x76, 0x5b, 0x73, - 0x5d, 0x2c, 0x21, 0x31, 0x29, 0x29, 0x7d, 0x72, 0x65, 0x74, 0x75, 0x72, - 0x6e, 0x20, 0x74, 0x7d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, - 0x20, 0x72, 0x74, 0x28, 0x74, 0x2c, 0x6e, 0x2c, 0x65, 0x29, 0x7b, 0x74, - 0x72, 0x79, 0x7b, 0x22, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, - 0x22, 0x3d, 0x3d, 0x74, 0x79, 0x70, 0x65, 0x6f, 0x66, 0x20, 0x74, 0x3f, - 0x74, 0x28, 0x6e, 0x29, 0x3a, 0x74, 0x2e, 0x63, 0x75, 0x72, 0x72, 0x65, - 0x6e, 0x74, 0x3d, 0x6e, 0x7d, 0x63, 0x61, 0x74, 0x63, 0x68, 0x28, 0x74, - 0x29, 0x7b, 0x43, 0x2e, 0x5f, 0x5f, 0x65, 0x28, 0x74, 0x2c, 0x65, 0x29, - 0x7d, 0x7d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x75, - 0x74, 0x28, 0x74, 0x2c, 0x6e, 0x2c, 0x65, 0x29, 0x7b, 0x76, 0x61, 0x72, - 0x20, 0x5f, 0x2c, 0x69, 0x3b, 0x69, 0x66, 0x28, 0x43, 0x2e, 0x75, 0x6e, - 0x6d, 0x6f, 0x75, 0x6e, 0x74, 0x26, 0x26, 0x43, 0x2e, 0x75, 0x6e, 0x6d, - 0x6f, 0x75, 0x6e, 0x74, 0x28, 0x74, 0x29, 0x2c, 0x28, 0x5f, 0x3d, 0x74, - 0x2e, 0x72, 0x65, 0x66, 0x29, 0x26, 0x26, 0x28, 0x5f, 0x2e, 0x63, 0x75, - 0x72, 0x72, 0x65, 0x6e, 0x74, 0x26, 0x26, 0x5f, 0x2e, 0x63, 0x75, 0x72, - 0x72, 0x65, 0x6e, 0x74, 0x21, 0x3d, 0x3d, 0x74, 0x2e, 0x5f, 0x5f, 0x65, - 0x7c, 0x7c, 0x72, 0x74, 0x28, 0x5f, 0x2c, 0x6e, 0x75, 0x6c, 0x6c, 0x2c, - 0x6e, 0x29, 0x29, 0x2c, 0x6e, 0x75, 0x6c, 0x6c, 0x21, 0x3d, 0x28, 0x5f, - 0x3d, 0x74, 0x2e, 0x5f, 0x5f, 0x63, 0x29, 0x29, 0x7b, 0x69, 0x66, 0x28, - 0x5f, 0x2e, 0x63, 0x6f, 0x6d, 0x70, 0x6f, 0x6e, 0x65, 0x6e, 0x74, 0x57, - 0x69, 0x6c, 0x6c, 0x55, 0x6e, 0x6d, 0x6f, 0x75, 0x6e, 0x74, 0x29, 0x74, - 0x72, 0x79, 0x7b, 0x5f, 0x2e, 0x63, 0x6f, 0x6d, 0x70, 0x6f, 0x6e, 0x65, - 0x6e, 0x74, 0x57, 0x69, 0x6c, 0x6c, 0x55, 0x6e, 0x6d, 0x6f, 0x75, 0x6e, - 0x74, 0x28, 0x29, 0x7d, 0x63, 0x61, 0x74, 0x63, 0x68, 0x28, 0x74, 0x29, - 0x7b, 0x43, 0x2e, 0x5f, 0x5f, 0x65, 0x28, 0x74, 0x2c, 0x6e, 0x29, 0x7d, - 0x5f, 0x2e, 0x62, 0x61, 0x73, 0x65, 0x3d, 0x5f, 0x2e, 0x5f, 0x5f, 0x50, - 0x3d, 0x6e, 0x75, 0x6c, 0x6c, 0x2c, 0x74, 0x2e, 0x5f, 0x5f, 0x63, 0x3d, - 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x7d, 0x69, 0x66, 0x28, 0x5f, 0x3d, - 0x74, 0x2e, 0x5f, 0x5f, 0x6b, 0x29, 0x66, 0x6f, 0x72, 0x28, 0x69, 0x3d, - 0x30, 0x3b, 0x69, 0x3c, 0x5f, 0x2e, 0x6c, 0x65, 0x6e, 0x67, 0x74, 0x68, - 0x3b, 0x69, 0x2b, 0x2b, 0x29, 0x5f, 0x5b, 0x69, 0x5d, 0x26, 0x26, 0x75, - 0x74, 0x28, 0x5f, 0x5b, 0x69, 0x5d, 0x2c, 0x6e, 0x2c, 0x65, 0x7c, 0x7c, - 0x22, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x22, 0x21, 0x3d, - 0x74, 0x79, 0x70, 0x65, 0x6f, 0x66, 0x20, 0x74, 0x2e, 0x74, 0x79, 0x70, - 0x65, 0x29, 0x3b, 0x65, 0x7c, 0x7c, 0x6e, 0x75, 0x6c, 0x6c, 0x3d, 0x3d, - 0x74, 0x2e, 0x5f, 0x5f, 0x65, 0x7c, 0x7c, 0x57, 0x28, 0x74, 0x2e, 0x5f, - 0x5f, 0x65, 0x29, 0x2c, 0x74, 0x2e, 0x5f, 0x5f, 0x3d, 0x74, 0x2e, 0x5f, - 0x5f, 0x65, 0x3d, 0x74, 0x2e, 0x5f, 0x5f, 0x64, 0x3d, 0x76, 0x6f, 0x69, - 0x64, 0x20, 0x30, 0x7d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, - 0x20, 0x66, 0x74, 0x28, 0x74, 0x2c, 0x6e, 0x2c, 0x65, 0x29, 0x7b, 0x72, - 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x63, - 0x6f, 0x6e, 0x73, 0x74, 0x72, 0x75, 0x63, 0x74, 0x6f, 0x72, 0x28, 0x74, - 0x2c, 0x65, 0x29, 0x7d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, - 0x20, 0x73, 0x74, 0x28, 0x74, 0x2c, 0x6e, 0x2c, 0x65, 0x29, 0x7b, 0x76, - 0x61, 0x72, 0x20, 0x5f, 0x2c, 0x69, 0x2c, 0x6f, 0x2c, 0x72, 0x3b, 0x43, - 0x2e, 0x5f, 0x5f, 0x26, 0x26, 0x43, 0x2e, 0x5f, 0x5f, 0x28, 0x74, 0x2c, - 0x6e, 0x29, 0x2c, 0x69, 0x3d, 0x28, 0x5f, 0x3d, 0x22, 0x66, 0x75, 0x6e, - 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x22, 0x3d, 0x3d, 0x74, 0x79, 0x70, 0x65, - 0x6f, 0x66, 0x20, 0x65, 0x29, 0x3f, 0x6e, 0x75, 0x6c, 0x6c, 0x3a, 0x65, - 0x26, 0x26, 0x65, 0x2e, 0x5f, 0x5f, 0x6b, 0x7c, 0x7c, 0x6e, 0x2e, 0x5f, - 0x5f, 0x6b, 0x2c, 0x6f, 0x3d, 0x5b, 0x5d, 0x2c, 0x72, 0x3d, 0x5b, 0x5d, - 0x2c, 0x5f, 0x74, 0x28, 0x6e, 0x2c, 0x74, 0x3d, 0x28, 0x21, 0x5f, 0x26, - 0x26, 0x65, 0x7c, 0x7c, 0x6e, 0x29, 0x2e, 0x5f, 0x5f, 0x6b, 0x3d, 0x4c, - 0x28, 0x6a, 0x2c, 0x6e, 0x75, 0x6c, 0x6c, 0x2c, 0x5b, 0x74, 0x5d, 0x29, - 0x2c, 0x69, 0x7c, 0x7c, 0x54, 0x2c, 0x54, 0x2c, 0x76, 0x6f, 0x69, 0x64, - 0x20, 0x30, 0x21, 0x3d, 0x3d, 0x6e, 0x2e, 0x6f, 0x77, 0x6e, 0x65, 0x72, - 0x53, 0x56, 0x47, 0x45, 0x6c, 0x65, 0x6d, 0x65, 0x6e, 0x74, 0x2c, 0x21, - 0x5f, 0x26, 0x26, 0x65, 0x3f, 0x5b, 0x65, 0x5d, 0x3a, 0x69, 0x3f, 0x6e, - 0x75, 0x6c, 0x6c, 0x3a, 0x6e, 0x2e, 0x66, 0x69, 0x72, 0x73, 0x74, 0x43, - 0x68, 0x69, 0x6c, 0x64, 0x3f, 0x78, 0x2e, 0x63, 0x61, 0x6c, 0x6c, 0x28, - 0x6e, 0x2e, 0x63, 0x68, 0x69, 0x6c, 0x64, 0x4e, 0x6f, 0x64, 0x65, 0x73, - 0x29, 0x3a, 0x6e, 0x75, 0x6c, 0x6c, 0x2c, 0x6f, 0x2c, 0x21, 0x5f, 0x26, - 0x26, 0x65, 0x3f, 0x65, 0x3a, 0x69, 0x3f, 0x69, 0x2e, 0x5f, 0x5f, 0x65, - 0x3a, 0x6e, 0x2e, 0x66, 0x69, 0x72, 0x73, 0x74, 0x43, 0x68, 0x69, 0x6c, - 0x64, 0x2c, 0x5f, 0x2c, 0x72, 0x29, 0x2c, 0x69, 0x74, 0x28, 0x6f, 0x2c, - 0x74, 0x2c, 0x72, 0x29, 0x7d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, - 0x6e, 0x20, 0x6c, 0x74, 0x28, 0x74, 0x2c, 0x6e, 0x29, 0x7b, 0x73, 0x74, - 0x28, 0x74, 0x2c, 0x6e, 0x2c, 0x6c, 0x74, 0x29, 0x7d, 0x66, 0x75, 0x6e, - 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x63, 0x74, 0x28, 0x74, 0x2c, 0x6e, - 0x2c, 0x65, 0x29, 0x7b, 0x76, 0x61, 0x72, 0x20, 0x5f, 0x2c, 0x69, 0x2c, - 0x6f, 0x2c, 0x72, 0x2c, 0x75, 0x3d, 0x4d, 0x28, 0x7b, 0x7d, 0x2c, 0x74, - 0x2e, 0x70, 0x72, 0x6f, 0x70, 0x73, 0x29, 0x3b, 0x66, 0x6f, 0x72, 0x28, - 0x6f, 0x20, 0x69, 0x6e, 0x20, 0x74, 0x2e, 0x74, 0x79, 0x70, 0x65, 0x26, - 0x26, 0x74, 0x2e, 0x74, 0x79, 0x70, 0x65, 0x2e, 0x64, 0x65, 0x66, 0x61, - 0x75, 0x6c, 0x74, 0x50, 0x72, 0x6f, 0x70, 0x73, 0x26, 0x26, 0x28, 0x72, - 0x3d, 0x74, 0x2e, 0x74, 0x79, 0x70, 0x65, 0x2e, 0x64, 0x65, 0x66, 0x61, - 0x75, 0x6c, 0x74, 0x50, 0x72, 0x6f, 0x70, 0x73, 0x29, 0x2c, 0x6e, 0x29, - 0x22, 0x6b, 0x65, 0x79, 0x22, 0x3d, 0x3d, 0x6f, 0x3f, 0x5f, 0x3d, 0x6e, - 0x5b, 0x6f, 0x5d, 0x3a, 0x22, 0x72, 0x65, 0x66, 0x22, 0x3d, 0x3d, 0x6f, - 0x3f, 0x69, 0x3d, 0x6e, 0x5b, 0x6f, 0x5d, 0x3a, 0x75, 0x5b, 0x6f, 0x5d, - 0x3d, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x3d, 0x3d, 0x3d, 0x6e, 0x5b, - 0x6f, 0x5d, 0x26, 0x26, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x21, 0x3d, - 0x3d, 0x72, 0x3f, 0x72, 0x5b, 0x6f, 0x5d, 0x3a, 0x6e, 0x5b, 0x6f, 0x5d, - 0x3b, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x61, 0x72, 0x67, 0x75, - 0x6d, 0x65, 0x6e, 0x74, 0x73, 0x2e, 0x6c, 0x65, 0x6e, 0x67, 0x74, 0x68, - 0x3e, 0x32, 0x26, 0x26, 0x28, 0x75, 0x2e, 0x63, 0x68, 0x69, 0x6c, 0x64, - 0x72, 0x65, 0x6e, 0x3d, 0x61, 0x72, 0x67, 0x75, 0x6d, 0x65, 0x6e, 0x74, - 0x73, 0x2e, 0x6c, 0x65, 0x6e, 0x67, 0x74, 0x68, 0x3e, 0x33, 0x3f, 0x78, - 0x2e, 0x63, 0x61, 0x6c, 0x6c, 0x28, 0x61, 0x72, 0x67, 0x75, 0x6d, 0x65, - 0x6e, 0x74, 0x73, 0x2c, 0x32, 0x29, 0x3a, 0x65, 0x29, 0x2c, 0x4f, 0x28, - 0x74, 0x2e, 0x74, 0x79, 0x70, 0x65, 0x2c, 0x75, 0x2c, 0x5f, 0x7c, 0x7c, - 0x74, 0x2e, 0x6b, 0x65, 0x79, 0x2c, 0x69, 0x7c, 0x7c, 0x74, 0x2e, 0x72, - 0x65, 0x66, 0x2c, 0x6e, 0x75, 0x6c, 0x6c, 0x29, 0x7d, 0x66, 0x75, 0x6e, - 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x68, 0x74, 0x28, 0x74, 0x2c, 0x6e, - 0x29, 0x7b, 0x76, 0x61, 0x72, 0x20, 0x65, 0x3d, 0x7b, 0x5f, 0x5f, 0x63, - 0x3a, 0x6e, 0x3d, 0x22, 0x5f, 0x5f, 0x63, 0x43, 0x22, 0x2b, 0x44, 0x2b, - 0x2b, 0x2c, 0x5f, 0x5f, 0x3a, 0x74, 0x2c, 0x43, 0x6f, 0x6e, 0x73, 0x75, - 0x6d, 0x65, 0x72, 0x3a, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, - 0x28, 0x74, 0x2c, 0x6e, 0x29, 0x7b, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, - 0x20, 0x74, 0x2e, 0x63, 0x68, 0x69, 0x6c, 0x64, 0x72, 0x65, 0x6e, 0x28, - 0x6e, 0x29, 0x7d, 0x2c, 0x50, 0x72, 0x6f, 0x76, 0x69, 0x64, 0x65, 0x72, - 0x3a, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x74, 0x29, - 0x7b, 0x76, 0x61, 0x72, 0x20, 0x65, 0x2c, 0x5f, 0x3b, 0x72, 0x65, 0x74, - 0x75, 0x72, 0x6e, 0x20, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x67, 0x65, 0x74, - 0x43, 0x68, 0x69, 0x6c, 0x64, 0x43, 0x6f, 0x6e, 0x74, 0x65, 0x78, 0x74, - 0x7c, 0x7c, 0x28, 0x65, 0x3d, 0x5b, 0x5d, 0x2c, 0x28, 0x5f, 0x3d, 0x7b, - 0x7d, 0x29, 0x5b, 0x6e, 0x5d, 0x3d, 0x74, 0x68, 0x69, 0x73, 0x2c, 0x74, - 0x68, 0x69, 0x73, 0x2e, 0x67, 0x65, 0x74, 0x43, 0x68, 0x69, 0x6c, 0x64, - 0x43, 0x6f, 0x6e, 0x74, 0x65, 0x78, 0x74, 0x3d, 0x66, 0x75, 0x6e, 0x63, - 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x29, 0x7b, 0x72, 0x65, 0x74, 0x75, 0x72, - 0x6e, 0x20, 0x5f, 0x7d, 0x2c, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x73, 0x68, - 0x6f, 0x75, 0x6c, 0x64, 0x43, 0x6f, 0x6d, 0x70, 0x6f, 0x6e, 0x65, 0x6e, - 0x74, 0x55, 0x70, 0x64, 0x61, 0x74, 0x65, 0x3d, 0x66, 0x75, 0x6e, 0x63, - 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x74, 0x29, 0x7b, 0x74, 0x68, 0x69, 0x73, - 0x2e, 0x70, 0x72, 0x6f, 0x70, 0x73, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, - 0x21, 0x3d, 0x3d, 0x74, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x26, 0x26, - 0x65, 0x2e, 0x73, 0x6f, 0x6d, 0x65, 0x28, 0x28, 0x66, 0x75, 0x6e, 0x63, - 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x74, 0x29, 0x7b, 0x74, 0x2e, 0x5f, 0x5f, - 0x65, 0x3d, 0x21, 0x30, 0x2c, 0x47, 0x28, 0x74, 0x29, 0x7d, 0x29, 0x29, - 0x7d, 0x2c, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x73, 0x75, 0x62, 0x3d, 0x66, - 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x74, 0x29, 0x7b, 0x65, - 0x2e, 0x70, 0x75, 0x73, 0x68, 0x28, 0x74, 0x29, 0x3b, 0x76, 0x61, 0x72, - 0x20, 0x6e, 0x3d, 0x74, 0x2e, 0x63, 0x6f, 0x6d, 0x70, 0x6f, 0x6e, 0x65, - 0x6e, 0x74, 0x57, 0x69, 0x6c, 0x6c, 0x55, 0x6e, 0x6d, 0x6f, 0x75, 0x6e, - 0x74, 0x3b, 0x74, 0x2e, 0x63, 0x6f, 0x6d, 0x70, 0x6f, 0x6e, 0x65, 0x6e, - 0x74, 0x57, 0x69, 0x6c, 0x6c, 0x55, 0x6e, 0x6d, 0x6f, 0x75, 0x6e, 0x74, - 0x3d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x29, 0x7b, - 0x65, 0x2e, 0x73, 0x70, 0x6c, 0x69, 0x63, 0x65, 0x28, 0x65, 0x2e, 0x69, - 0x6e, 0x64, 0x65, 0x78, 0x4f, 0x66, 0x28, 0x74, 0x29, 0x2c, 0x31, 0x29, - 0x2c, 0x6e, 0x26, 0x26, 0x6e, 0x2e, 0x63, 0x61, 0x6c, 0x6c, 0x28, 0x74, - 0x29, 0x7d, 0x7d, 0x29, 0x2c, 0x74, 0x2e, 0x63, 0x68, 0x69, 0x6c, 0x64, - 0x72, 0x65, 0x6e, 0x7d, 0x7d, 0x3b, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, - 0x20, 0x65, 0x2e, 0x50, 0x72, 0x6f, 0x76, 0x69, 0x64, 0x65, 0x72, 0x2e, - 0x5f, 0x5f, 0x3d, 0x65, 0x2e, 0x43, 0x6f, 0x6e, 0x73, 0x75, 0x6d, 0x65, - 0x72, 0x2e, 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x78, 0x74, 0x54, 0x79, 0x70, - 0x65, 0x3d, 0x65, 0x7d, 0x78, 0x3d, 0x56, 0x2e, 0x73, 0x6c, 0x69, 0x63, - 0x65, 0x2c, 0x43, 0x3d, 0x7b, 0x5f, 0x5f, 0x65, 0x3a, 0x66, 0x75, 0x6e, - 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x74, 0x2c, 0x6e, 0x2c, 0x65, 0x2c, - 0x5f, 0x29, 0x7b, 0x66, 0x6f, 0x72, 0x28, 0x76, 0x61, 0x72, 0x20, 0x69, - 0x2c, 0x6f, 0x2c, 0x72, 0x3b, 0x6e, 0x3d, 0x6e, 0x2e, 0x5f, 0x5f, 0x3b, - 0x29, 0x69, 0x66, 0x28, 0x28, 0x69, 0x3d, 0x6e, 0x2e, 0x5f, 0x5f, 0x63, - 0x29, 0x26, 0x26, 0x21, 0x69, 0x2e, 0x5f, 0x5f, 0x29, 0x74, 0x72, 0x79, - 0x7b, 0x69, 0x66, 0x28, 0x28, 0x6f, 0x3d, 0x69, 0x2e, 0x63, 0x6f, 0x6e, - 0x73, 0x74, 0x72, 0x75, 0x63, 0x74, 0x6f, 0x72, 0x29, 0x26, 0x26, 0x6e, - 0x75, 0x6c, 0x6c, 0x21, 0x3d, 0x6f, 0x2e, 0x67, 0x65, 0x74, 0x44, 0x65, - 0x72, 0x69, 0x76, 0x65, 0x64, 0x53, 0x74, 0x61, 0x74, 0x65, 0x46, 0x72, - 0x6f, 0x6d, 0x45, 0x72, 0x72, 0x6f, 0x72, 0x26, 0x26, 0x28, 0x69, 0x2e, - 0x73, 0x65, 0x74, 0x53, 0x74, 0x61, 0x74, 0x65, 0x28, 0x6f, 0x2e, 0x67, - 0x65, 0x74, 0x44, 0x65, 0x72, 0x69, 0x76, 0x65, 0x64, 0x53, 0x74, 0x61, - 0x74, 0x65, 0x46, 0x72, 0x6f, 0x6d, 0x45, 0x72, 0x72, 0x6f, 0x72, 0x28, - 0x74, 0x29, 0x29, 0x2c, 0x72, 0x3d, 0x69, 0x2e, 0x5f, 0x5f, 0x64, 0x29, - 0x2c, 0x6e, 0x75, 0x6c, 0x6c, 0x21, 0x3d, 0x69, 0x2e, 0x63, 0x6f, 0x6d, - 0x70, 0x6f, 0x6e, 0x65, 0x6e, 0x74, 0x44, 0x69, 0x64, 0x43, 0x61, 0x74, - 0x63, 0x68, 0x26, 0x26, 0x28, 0x69, 0x2e, 0x63, 0x6f, 0x6d, 0x70, 0x6f, - 0x6e, 0x65, 0x6e, 0x74, 0x44, 0x69, 0x64, 0x43, 0x61, 0x74, 0x63, 0x68, - 0x28, 0x74, 0x2c, 0x5f, 0x7c, 0x7c, 0x7b, 0x7d, 0x29, 0x2c, 0x72, 0x3d, - 0x69, 0x2e, 0x5f, 0x5f, 0x64, 0x29, 0x2c, 0x72, 0x29, 0x72, 0x65, 0x74, - 0x75, 0x72, 0x6e, 0x20, 0x69, 0x2e, 0x5f, 0x5f, 0x45, 0x3d, 0x69, 0x7d, - 0x63, 0x61, 0x74, 0x63, 0x68, 0x28, 0x6e, 0x29, 0x7b, 0x74, 0x3d, 0x6e, - 0x7d, 0x74, 0x68, 0x72, 0x6f, 0x77, 0x20, 0x74, 0x7d, 0x7d, 0x2c, 0x45, - 0x3d, 0x30, 0x2c, 0x55, 0x3d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, - 0x6e, 0x28, 0x74, 0x29, 0x7b, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, - 0x6e, 0x75, 0x6c, 0x6c, 0x21, 0x3d, 0x74, 0x26, 0x26, 0x6e, 0x75, 0x6c, - 0x6c, 0x3d, 0x3d, 0x74, 0x2e, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x72, 0x75, - 0x63, 0x74, 0x6f, 0x72, 0x7d, 0x2c, 0x49, 0x2e, 0x70, 0x72, 0x6f, 0x74, - 0x6f, 0x74, 0x79, 0x70, 0x65, 0x2e, 0x73, 0x65, 0x74, 0x53, 0x74, 0x61, - 0x74, 0x65, 0x3d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, - 0x74, 0x2c, 0x6e, 0x29, 0x7b, 0x76, 0x61, 0x72, 0x20, 0x65, 0x3b, 0x65, - 0x3d, 0x6e, 0x75, 0x6c, 0x6c, 0x21, 0x3d, 0x74, 0x68, 0x69, 0x73, 0x2e, - 0x5f, 0x5f, 0x73, 0x26, 0x26, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x5f, 0x5f, - 0x73, 0x21, 0x3d, 0x3d, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x73, 0x74, 0x61, - 0x74, 0x65, 0x3f, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x5f, 0x5f, 0x73, 0x3a, - 0x74, 0x68, 0x69, 0x73, 0x2e, 0x5f, 0x5f, 0x73, 0x3d, 0x4d, 0x28, 0x7b, - 0x7d, 0x2c, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x73, 0x74, 0x61, 0x74, 0x65, - 0x29, 0x2c, 0x22, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x22, - 0x3d, 0x3d, 0x74, 0x79, 0x70, 0x65, 0x6f, 0x66, 0x20, 0x74, 0x26, 0x26, - 0x28, 0x74, 0x3d, 0x74, 0x28, 0x4d, 0x28, 0x7b, 0x7d, 0x2c, 0x65, 0x29, - 0x2c, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x70, 0x72, 0x6f, 0x70, 0x73, 0x29, - 0x29, 0x2c, 0x74, 0x26, 0x26, 0x4d, 0x28, 0x65, 0x2c, 0x74, 0x29, 0x2c, - 0x6e, 0x75, 0x6c, 0x6c, 0x21, 0x3d, 0x74, 0x26, 0x26, 0x74, 0x68, 0x69, - 0x73, 0x2e, 0x5f, 0x5f, 0x76, 0x26, 0x26, 0x28, 0x6e, 0x26, 0x26, 0x74, - 0x68, 0x69, 0x73, 0x2e, 0x5f, 0x73, 0x62, 0x2e, 0x70, 0x75, 0x73, 0x68, - 0x28, 0x6e, 0x29, 0x2c, 0x47, 0x28, 0x74, 0x68, 0x69, 0x73, 0x29, 0x29, - 0x7d, 0x2c, 0x49, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x74, 0x79, 0x70, - 0x65, 0x2e, 0x66, 0x6f, 0x72, 0x63, 0x65, 0x55, 0x70, 0x64, 0x61, 0x74, - 0x65, 0x3d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x74, - 0x29, 0x7b, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x5f, 0x5f, 0x76, 0x26, 0x26, - 0x28, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x5f, 0x5f, 0x65, 0x3d, 0x21, 0x30, - 0x2c, 0x74, 0x26, 0x26, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x5f, 0x5f, 0x68, - 0x2e, 0x70, 0x75, 0x73, 0x68, 0x28, 0x74, 0x29, 0x2c, 0x47, 0x28, 0x74, - 0x68, 0x69, 0x73, 0x29, 0x29, 0x7d, 0x2c, 0x49, 0x2e, 0x70, 0x72, 0x6f, - 0x74, 0x6f, 0x74, 0x79, 0x70, 0x65, 0x2e, 0x72, 0x65, 0x6e, 0x64, 0x65, - 0x72, 0x3d, 0x6a, 0x2c, 0x48, 0x3d, 0x5b, 0x5d, 0x2c, 0x4e, 0x3d, 0x22, - 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x22, 0x3d, 0x3d, 0x74, - 0x79, 0x70, 0x65, 0x6f, 0x66, 0x20, 0x50, 0x72, 0x6f, 0x6d, 0x69, 0x73, - 0x65, 0x3f, 0x50, 0x72, 0x6f, 0x6d, 0x69, 0x73, 0x65, 0x2e, 0x70, 0x72, - 0x6f, 0x74, 0x6f, 0x74, 0x79, 0x70, 0x65, 0x2e, 0x74, 0x68, 0x65, 0x6e, - 0x2e, 0x62, 0x69, 0x6e, 0x64, 0x28, 0x50, 0x72, 0x6f, 0x6d, 0x69, 0x73, - 0x65, 0x2e, 0x72, 0x65, 0x73, 0x6f, 0x6c, 0x76, 0x65, 0x28, 0x29, 0x29, - 0x3a, 0x73, 0x65, 0x74, 0x54, 0x69, 0x6d, 0x65, 0x6f, 0x75, 0x74, 0x2c, - 0x24, 0x3d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x74, - 0x2c, 0x6e, 0x29, 0x7b, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x74, - 0x2e, 0x5f, 0x5f, 0x76, 0x2e, 0x5f, 0x5f, 0x62, 0x2d, 0x6e, 0x2e, 0x5f, - 0x5f, 0x76, 0x2e, 0x5f, 0x5f, 0x62, 0x7d, 0x2c, 0x7a, 0x2e, 0x5f, 0x5f, - 0x72, 0x3d, 0x30, 0x2c, 0x44, 0x3d, 0x30, 0x3b, 0x76, 0x61, 0x72, 0x20, - 0x61, 0x74, 0x2c, 0x70, 0x74, 0x2c, 0x64, 0x74, 0x2c, 0x76, 0x74, 0x2c, - 0x79, 0x74, 0x3d, 0x30, 0x2c, 0x6d, 0x74, 0x3d, 0x5b, 0x5d, 0x2c, 0x67, - 0x74, 0x3d, 0x5b, 0x5d, 0x2c, 0x62, 0x74, 0x3d, 0x43, 0x2e, 0x5f, 0x5f, - 0x62, 0x2c, 0x6b, 0x74, 0x3d, 0x43, 0x2e, 0x5f, 0x5f, 0x72, 0x2c, 0x53, - 0x74, 0x3d, 0x43, 0x2e, 0x64, 0x69, 0x66, 0x66, 0x65, 0x64, 0x2c, 0x77, - 0x74, 0x3d, 0x43, 0x2e, 0x5f, 0x5f, 0x63, 0x2c, 0x78, 0x74, 0x3d, 0x43, - 0x2e, 0x75, 0x6e, 0x6d, 0x6f, 0x75, 0x6e, 0x74, 0x3b, 0x66, 0x75, 0x6e, - 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x43, 0x74, 0x28, 0x74, 0x2c, 0x6e, - 0x29, 0x7b, 0x43, 0x2e, 0x5f, 0x5f, 0x68, 0x26, 0x26, 0x43, 0x2e, 0x5f, - 0x5f, 0x68, 0x28, 0x70, 0x74, 0x2c, 0x74, 0x2c, 0x79, 0x74, 0x7c, 0x7c, - 0x6e, 0x29, 0x2c, 0x79, 0x74, 0x3d, 0x30, 0x3b, 0x76, 0x61, 0x72, 0x20, - 0x65, 0x3d, 0x70, 0x74, 0x2e, 0x5f, 0x5f, 0x48, 0x7c, 0x7c, 0x28, 0x70, - 0x74, 0x2e, 0x5f, 0x5f, 0x48, 0x3d, 0x7b, 0x5f, 0x5f, 0x3a, 0x5b, 0x5d, - 0x2c, 0x5f, 0x5f, 0x68, 0x3a, 0x5b, 0x5d, 0x7d, 0x29, 0x3b, 0x72, 0x65, - 0x74, 0x75, 0x72, 0x6e, 0x20, 0x74, 0x3e, 0x3d, 0x65, 0x2e, 0x5f, 0x5f, - 0x2e, 0x6c, 0x65, 0x6e, 0x67, 0x74, 0x68, 0x26, 0x26, 0x65, 0x2e, 0x5f, - 0x5f, 0x2e, 0x70, 0x75, 0x73, 0x68, 0x28, 0x7b, 0x5f, 0x5f, 0x56, 0x3a, - 0x67, 0x74, 0x7d, 0x29, 0x2c, 0x65, 0x2e, 0x5f, 0x5f, 0x5b, 0x74, 0x5d, - 0x7d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x45, 0x74, - 0x28, 0x74, 0x29, 0x7b, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x79, - 0x74, 0x3d, 0x31, 0x2c, 0x55, 0x74, 0x28, 0x71, 0x74, 0x2c, 0x74, 0x29, - 0x7d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x55, 0x74, - 0x28, 0x74, 0x2c, 0x6e, 0x2c, 0x65, 0x29, 0x7b, 0x76, 0x61, 0x72, 0x20, - 0x5f, 0x3d, 0x43, 0x74, 0x28, 0x61, 0x74, 0x2b, 0x2b, 0x2c, 0x32, 0x29, - 0x3b, 0x69, 0x66, 0x28, 0x5f, 0x2e, 0x74, 0x3d, 0x74, 0x2c, 0x21, 0x5f, - 0x2e, 0x5f, 0x5f, 0x63, 0x26, 0x26, 0x28, 0x5f, 0x2e, 0x5f, 0x5f, 0x3d, - 0x5b, 0x65, 0x3f, 0x65, 0x28, 0x6e, 0x29, 0x3a, 0x71, 0x74, 0x28, 0x76, - 0x6f, 0x69, 0x64, 0x20, 0x30, 0x2c, 0x6e, 0x29, 0x2c, 0x66, 0x75, 0x6e, - 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x74, 0x29, 0x7b, 0x76, 0x61, 0x72, - 0x20, 0x6e, 0x3d, 0x5f, 0x2e, 0x5f, 0x5f, 0x4e, 0x3f, 0x5f, 0x2e, 0x5f, - 0x5f, 0x4e, 0x5b, 0x30, 0x5d, 0x3a, 0x5f, 0x2e, 0x5f, 0x5f, 0x5b, 0x30, - 0x5d, 0x2c, 0x65, 0x3d, 0x5f, 0x2e, 0x74, 0x28, 0x6e, 0x2c, 0x74, 0x29, - 0x3b, 0x6e, 0x21, 0x3d, 0x3d, 0x65, 0x26, 0x26, 0x28, 0x5f, 0x2e, 0x5f, - 0x5f, 0x4e, 0x3d, 0x5b, 0x65, 0x2c, 0x5f, 0x2e, 0x5f, 0x5f, 0x5b, 0x31, - 0x5d, 0x5d, 0x2c, 0x5f, 0x2e, 0x5f, 0x5f, 0x63, 0x2e, 0x73, 0x65, 0x74, - 0x53, 0x74, 0x61, 0x74, 0x65, 0x28, 0x7b, 0x7d, 0x29, 0x29, 0x7d, 0x5d, - 0x2c, 0x5f, 0x2e, 0x5f, 0x5f, 0x63, 0x3d, 0x70, 0x74, 0x2c, 0x21, 0x70, - 0x74, 0x2e, 0x75, 0x29, 0x29, 0x7b, 0x76, 0x61, 0x72, 0x20, 0x69, 0x3d, - 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x74, 0x2c, 0x6e, - 0x2c, 0x65, 0x29, 0x7b, 0x69, 0x66, 0x28, 0x21, 0x5f, 0x2e, 0x5f, 0x5f, - 0x63, 0x2e, 0x5f, 0x5f, 0x48, 0x29, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, - 0x21, 0x30, 0x3b, 0x76, 0x61, 0x72, 0x20, 0x69, 0x3d, 0x5f, 0x2e, 0x5f, - 0x5f, 0x63, 0x2e, 0x5f, 0x5f, 0x48, 0x2e, 0x5f, 0x5f, 0x2e, 0x66, 0x69, - 0x6c, 0x74, 0x65, 0x72, 0x28, 0x28, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, - 0x6f, 0x6e, 0x28, 0x74, 0x29, 0x7b, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, - 0x20, 0x74, 0x2e, 0x5f, 0x5f, 0x63, 0x7d, 0x29, 0x29, 0x3b, 0x69, 0x66, - 0x28, 0x69, 0x2e, 0x65, 0x76, 0x65, 0x72, 0x79, 0x28, 0x28, 0x66, 0x75, - 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x74, 0x29, 0x7b, 0x72, 0x65, - 0x74, 0x75, 0x72, 0x6e, 0x21, 0x74, 0x2e, 0x5f, 0x5f, 0x4e, 0x7d, 0x29, - 0x29, 0x29, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x21, 0x6f, 0x7c, 0x7c, - 0x6f, 0x2e, 0x63, 0x61, 0x6c, 0x6c, 0x28, 0x74, 0x68, 0x69, 0x73, 0x2c, - 0x74, 0x2c, 0x6e, 0x2c, 0x65, 0x29, 0x3b, 0x76, 0x61, 0x72, 0x20, 0x72, - 0x3d, 0x21, 0x31, 0x3b, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x69, - 0x2e, 0x66, 0x6f, 0x72, 0x45, 0x61, 0x63, 0x68, 0x28, 0x28, 0x66, 0x75, - 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x74, 0x29, 0x7b, 0x69, 0x66, - 0x28, 0x74, 0x2e, 0x5f, 0x5f, 0x4e, 0x29, 0x7b, 0x76, 0x61, 0x72, 0x20, - 0x6e, 0x3d, 0x74, 0x2e, 0x5f, 0x5f, 0x5b, 0x30, 0x5d, 0x3b, 0x74, 0x2e, - 0x5f, 0x5f, 0x3d, 0x74, 0x2e, 0x5f, 0x5f, 0x4e, 0x2c, 0x74, 0x2e, 0x5f, - 0x5f, 0x4e, 0x3d, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x2c, 0x6e, 0x21, - 0x3d, 0x3d, 0x74, 0x2e, 0x5f, 0x5f, 0x5b, 0x30, 0x5d, 0x26, 0x26, 0x28, - 0x72, 0x3d, 0x21, 0x30, 0x29, 0x7d, 0x7d, 0x29, 0x29, 0x2c, 0x21, 0x28, - 0x21, 0x72, 0x26, 0x26, 0x5f, 0x2e, 0x5f, 0x5f, 0x63, 0x2e, 0x70, 0x72, - 0x6f, 0x70, 0x73, 0x3d, 0x3d, 0x3d, 0x74, 0x29, 0x26, 0x26, 0x28, 0x21, - 0x6f, 0x7c, 0x7c, 0x6f, 0x2e, 0x63, 0x61, 0x6c, 0x6c, 0x28, 0x74, 0x68, - 0x69, 0x73, 0x2c, 0x74, 0x2c, 0x6e, 0x2c, 0x65, 0x29, 0x29, 0x7d, 0x3b, - 0x70, 0x74, 0x2e, 0x75, 0x3d, 0x21, 0x30, 0x3b, 0x76, 0x61, 0x72, 0x20, - 0x6f, 0x3d, 0x70, 0x74, 0x2e, 0x73, 0x68, 0x6f, 0x75, 0x6c, 0x64, 0x43, - 0x6f, 0x6d, 0x70, 0x6f, 0x6e, 0x65, 0x6e, 0x74, 0x55, 0x70, 0x64, 0x61, - 0x74, 0x65, 0x2c, 0x72, 0x3d, 0x70, 0x74, 0x2e, 0x63, 0x6f, 0x6d, 0x70, - 0x6f, 0x6e, 0x65, 0x6e, 0x74, 0x57, 0x69, 0x6c, 0x6c, 0x55, 0x70, 0x64, - 0x61, 0x74, 0x65, 0x3b, 0x70, 0x74, 0x2e, 0x63, 0x6f, 0x6d, 0x70, 0x6f, - 0x6e, 0x65, 0x6e, 0x74, 0x57, 0x69, 0x6c, 0x6c, 0x55, 0x70, 0x64, 0x61, - 0x74, 0x65, 0x3d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, - 0x74, 0x2c, 0x6e, 0x2c, 0x65, 0x29, 0x7b, 0x69, 0x66, 0x28, 0x74, 0x68, - 0x69, 0x73, 0x2e, 0x5f, 0x5f, 0x65, 0x29, 0x7b, 0x76, 0x61, 0x72, 0x20, - 0x5f, 0x3d, 0x6f, 0x3b, 0x6f, 0x3d, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, - 0x2c, 0x69, 0x28, 0x74, 0x2c, 0x6e, 0x2c, 0x65, 0x29, 0x2c, 0x6f, 0x3d, - 0x5f, 0x7d, 0x72, 0x26, 0x26, 0x72, 0x2e, 0x63, 0x61, 0x6c, 0x6c, 0x28, - 0x74, 0x68, 0x69, 0x73, 0x2c, 0x74, 0x2c, 0x6e, 0x2c, 0x65, 0x29, 0x7d, - 0x2c, 0x70, 0x74, 0x2e, 0x73, 0x68, 0x6f, 0x75, 0x6c, 0x64, 0x43, 0x6f, - 0x6d, 0x70, 0x6f, 0x6e, 0x65, 0x6e, 0x74, 0x55, 0x70, 0x64, 0x61, 0x74, - 0x65, 0x3d, 0x69, 0x7d, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x5f, - 0x2e, 0x5f, 0x5f, 0x4e, 0x7c, 0x7c, 0x5f, 0x2e, 0x5f, 0x5f, 0x7d, 0x66, - 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x48, 0x74, 0x28, 0x74, - 0x2c, 0x6e, 0x29, 0x7b, 0x76, 0x61, 0x72, 0x20, 0x65, 0x3d, 0x43, 0x74, - 0x28, 0x61, 0x74, 0x2b, 0x2b, 0x2c, 0x33, 0x29, 0x3b, 0x21, 0x43, 0x2e, - 0x5f, 0x5f, 0x73, 0x26, 0x26, 0x49, 0x74, 0x28, 0x65, 0x2e, 0x5f, 0x5f, - 0x48, 0x2c, 0x6e, 0x29, 0x26, 0x26, 0x28, 0x65, 0x2e, 0x5f, 0x5f, 0x3d, - 0x74, 0x2c, 0x65, 0x2e, 0x69, 0x3d, 0x6e, 0x2c, 0x70, 0x74, 0x2e, 0x5f, - 0x5f, 0x48, 0x2e, 0x5f, 0x5f, 0x68, 0x2e, 0x70, 0x75, 0x73, 0x68, 0x28, - 0x65, 0x29, 0x29, 0x7d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, - 0x20, 0x50, 0x74, 0x28, 0x74, 0x2c, 0x6e, 0x29, 0x7b, 0x76, 0x61, 0x72, - 0x20, 0x65, 0x3d, 0x43, 0x74, 0x28, 0x61, 0x74, 0x2b, 0x2b, 0x2c, 0x34, - 0x29, 0x3b, 0x21, 0x43, 0x2e, 0x5f, 0x5f, 0x73, 0x26, 0x26, 0x49, 0x74, - 0x28, 0x65, 0x2e, 0x5f, 0x5f, 0x48, 0x2c, 0x6e, 0x29, 0x26, 0x26, 0x28, - 0x65, 0x2e, 0x5f, 0x5f, 0x3d, 0x74, 0x2c, 0x65, 0x2e, 0x69, 0x3d, 0x6e, - 0x2c, 0x70, 0x74, 0x2e, 0x5f, 0x5f, 0x68, 0x2e, 0x70, 0x75, 0x73, 0x68, - 0x28, 0x65, 0x29, 0x29, 0x7d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, - 0x6e, 0x20, 0x4e, 0x74, 0x28, 0x74, 0x29, 0x7b, 0x72, 0x65, 0x74, 0x75, - 0x72, 0x6e, 0x20, 0x79, 0x74, 0x3d, 0x35, 0x2c, 0x44, 0x74, 0x28, 0x28, - 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x29, 0x7b, 0x72, - 0x65, 0x74, 0x75, 0x72, 0x6e, 0x7b, 0x63, 0x75, 0x72, 0x72, 0x65, 0x6e, - 0x74, 0x3a, 0x74, 0x7d, 0x7d, 0x29, 0x2c, 0x5b, 0x5d, 0x29, 0x7d, 0x66, - 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x24, 0x74, 0x28, 0x74, - 0x2c, 0x6e, 0x2c, 0x65, 0x29, 0x7b, 0x79, 0x74, 0x3d, 0x36, 0x2c, 0x50, - 0x74, 0x28, 0x28, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, - 0x29, 0x7b, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x22, 0x66, 0x75, 0x6e, - 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x22, 0x3d, 0x3d, 0x74, 0x79, 0x70, 0x65, - 0x6f, 0x66, 0x20, 0x74, 0x3f, 0x28, 0x74, 0x28, 0x6e, 0x28, 0x29, 0x29, - 0x2c, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x29, 0x7b, - 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x74, 0x28, 0x6e, 0x75, 0x6c, - 0x6c, 0x29, 0x7d, 0x29, 0x3a, 0x74, 0x3f, 0x28, 0x74, 0x2e, 0x63, 0x75, - 0x72, 0x72, 0x65, 0x6e, 0x74, 0x3d, 0x6e, 0x28, 0x29, 0x2c, 0x66, 0x75, - 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x29, 0x7b, 0x72, 0x65, 0x74, - 0x75, 0x72, 0x6e, 0x20, 0x74, 0x2e, 0x63, 0x75, 0x72, 0x72, 0x65, 0x6e, - 0x74, 0x3d, 0x6e, 0x75, 0x6c, 0x6c, 0x7d, 0x29, 0x3a, 0x76, 0x6f, 0x69, - 0x64, 0x20, 0x30, 0x7d, 0x29, 0x2c, 0x6e, 0x75, 0x6c, 0x6c, 0x3d, 0x3d, - 0x65, 0x3f, 0x65, 0x3a, 0x65, 0x2e, 0x63, 0x6f, 0x6e, 0x63, 0x61, 0x74, - 0x28, 0x74, 0x29, 0x29, 0x7d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, - 0x6e, 0x20, 0x44, 0x74, 0x28, 0x74, 0x2c, 0x6e, 0x29, 0x7b, 0x76, 0x61, - 0x72, 0x20, 0x65, 0x3d, 0x43, 0x74, 0x28, 0x61, 0x74, 0x2b, 0x2b, 0x2c, - 0x37, 0x29, 0x3b, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x49, 0x74, - 0x28, 0x65, 0x2e, 0x5f, 0x5f, 0x48, 0x2c, 0x6e, 0x29, 0x3f, 0x28, 0x65, - 0x2e, 0x5f, 0x5f, 0x56, 0x3d, 0x74, 0x28, 0x29, 0x2c, 0x65, 0x2e, 0x69, - 0x3d, 0x6e, 0x2c, 0x65, 0x2e, 0x5f, 0x5f, 0x68, 0x3d, 0x74, 0x2c, 0x65, - 0x2e, 0x5f, 0x5f, 0x56, 0x29, 0x3a, 0x65, 0x2e, 0x5f, 0x5f, 0x7d, 0x66, - 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x54, 0x74, 0x28, 0x74, - 0x2c, 0x6e, 0x29, 0x7b, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x79, - 0x74, 0x3d, 0x38, 0x2c, 0x44, 0x74, 0x28, 0x28, 0x66, 0x75, 0x6e, 0x63, - 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x29, 0x7b, 0x72, 0x65, 0x74, 0x75, 0x72, - 0x6e, 0x20, 0x74, 0x7d, 0x29, 0x2c, 0x6e, 0x29, 0x7d, 0x66, 0x75, 0x6e, - 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x56, 0x74, 0x28, 0x74, 0x29, 0x7b, - 0x76, 0x61, 0x72, 0x20, 0x6e, 0x3d, 0x70, 0x74, 0x2e, 0x63, 0x6f, 0x6e, - 0x74, 0x65, 0x78, 0x74, 0x5b, 0x74, 0x2e, 0x5f, 0x5f, 0x63, 0x5d, 0x2c, - 0x65, 0x3d, 0x43, 0x74, 0x28, 0x61, 0x74, 0x2b, 0x2b, 0x2c, 0x39, 0x29, - 0x3b, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x65, 0x2e, 0x63, 0x3d, - 0x74, 0x2c, 0x6e, 0x3f, 0x28, 0x6e, 0x75, 0x6c, 0x6c, 0x3d, 0x3d, 0x65, - 0x2e, 0x5f, 0x5f, 0x26, 0x26, 0x28, 0x65, 0x2e, 0x5f, 0x5f, 0x3d, 0x21, - 0x30, 0x2c, 0x6e, 0x2e, 0x73, 0x75, 0x62, 0x28, 0x70, 0x74, 0x29, 0x29, - 0x2c, 0x6e, 0x2e, 0x70, 0x72, 0x6f, 0x70, 0x73, 0x2e, 0x76, 0x61, 0x6c, - 0x75, 0x65, 0x29, 0x3a, 0x74, 0x2e, 0x5f, 0x5f, 0x7d, 0x66, 0x75, 0x6e, - 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x41, 0x74, 0x28, 0x74, 0x2c, 0x6e, - 0x29, 0x7b, 0x43, 0x2e, 0x75, 0x73, 0x65, 0x44, 0x65, 0x62, 0x75, 0x67, - 0x56, 0x61, 0x6c, 0x75, 0x65, 0x26, 0x26, 0x43, 0x2e, 0x75, 0x73, 0x65, - 0x44, 0x65, 0x62, 0x75, 0x67, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x28, 0x6e, - 0x3f, 0x6e, 0x28, 0x74, 0x29, 0x3a, 0x74, 0x29, 0x7d, 0x66, 0x75, 0x6e, - 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x46, 0x74, 0x28, 0x74, 0x29, 0x7b, - 0x76, 0x61, 0x72, 0x20, 0x6e, 0x3d, 0x43, 0x74, 0x28, 0x61, 0x74, 0x2b, - 0x2b, 0x2c, 0x31, 0x30, 0x29, 0x2c, 0x65, 0x3d, 0x45, 0x74, 0x28, 0x29, - 0x3b, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x6e, 0x2e, 0x5f, 0x5f, - 0x3d, 0x74, 0x2c, 0x70, 0x74, 0x2e, 0x63, 0x6f, 0x6d, 0x70, 0x6f, 0x6e, - 0x65, 0x6e, 0x74, 0x44, 0x69, 0x64, 0x43, 0x61, 0x74, 0x63, 0x68, 0x7c, - 0x7c, 0x28, 0x70, 0x74, 0x2e, 0x63, 0x6f, 0x6d, 0x70, 0x6f, 0x6e, 0x65, - 0x6e, 0x74, 0x44, 0x69, 0x64, 0x43, 0x61, 0x74, 0x63, 0x68, 0x3d, 0x66, - 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x74, 0x2c, 0x5f, 0x29, - 0x7b, 0x6e, 0x2e, 0x5f, 0x5f, 0x26, 0x26, 0x6e, 0x2e, 0x5f, 0x5f, 0x28, - 0x74, 0x2c, 0x5f, 0x29, 0x2c, 0x65, 0x5b, 0x31, 0x5d, 0x28, 0x74, 0x29, - 0x7d, 0x29, 0x2c, 0x5b, 0x65, 0x5b, 0x30, 0x5d, 0x2c, 0x66, 0x75, 0x6e, - 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x29, 0x7b, 0x65, 0x5b, 0x31, 0x5d, - 0x28, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x29, 0x7d, 0x5d, 0x7d, 0x66, - 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x4d, 0x74, 0x28, 0x29, - 0x7b, 0x76, 0x61, 0x72, 0x20, 0x74, 0x3d, 0x43, 0x74, 0x28, 0x61, 0x74, - 0x2b, 0x2b, 0x2c, 0x31, 0x31, 0x29, 0x3b, 0x69, 0x66, 0x28, 0x21, 0x74, - 0x2e, 0x5f, 0x5f, 0x29, 0x7b, 0x66, 0x6f, 0x72, 0x28, 0x76, 0x61, 0x72, - 0x20, 0x6e, 0x3d, 0x70, 0x74, 0x2e, 0x5f, 0x5f, 0x76, 0x3b, 0x6e, 0x75, - 0x6c, 0x6c, 0x21, 0x3d, 0x3d, 0x6e, 0x26, 0x26, 0x21, 0x6e, 0x2e, 0x5f, - 0x5f, 0x6d, 0x26, 0x26, 0x6e, 0x75, 0x6c, 0x6c, 0x21, 0x3d, 0x3d, 0x6e, - 0x2e, 0x5f, 0x5f, 0x3b, 0x29, 0x6e, 0x3d, 0x6e, 0x2e, 0x5f, 0x5f, 0x3b, - 0x76, 0x61, 0x72, 0x20, 0x65, 0x3d, 0x6e, 0x2e, 0x5f, 0x5f, 0x6d, 0x7c, - 0x7c, 0x28, 0x6e, 0x2e, 0x5f, 0x5f, 0x6d, 0x3d, 0x5b, 0x30, 0x2c, 0x30, - 0x5d, 0x29, 0x3b, 0x74, 0x2e, 0x5f, 0x5f, 0x3d, 0x22, 0x50, 0x22, 0x2b, - 0x65, 0x5b, 0x30, 0x5d, 0x2b, 0x22, 0x2d, 0x22, 0x2b, 0x65, 0x5b, 0x31, - 0x5d, 0x2b, 0x2b, 0x7d, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x74, - 0x2e, 0x5f, 0x5f, 0x7d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, - 0x20, 0x57, 0x74, 0x28, 0x29, 0x7b, 0x66, 0x6f, 0x72, 0x28, 0x76, 0x61, - 0x72, 0x20, 0x74, 0x3b, 0x74, 0x3d, 0x6d, 0x74, 0x2e, 0x73, 0x68, 0x69, - 0x66, 0x74, 0x28, 0x29, 0x3b, 0x29, 0x69, 0x66, 0x28, 0x74, 0x2e, 0x5f, - 0x5f, 0x50, 0x26, 0x26, 0x74, 0x2e, 0x5f, 0x5f, 0x48, 0x29, 0x74, 0x72, - 0x79, 0x7b, 0x74, 0x2e, 0x5f, 0x5f, 0x48, 0x2e, 0x5f, 0x5f, 0x68, 0x2e, - 0x66, 0x6f, 0x72, 0x45, 0x61, 0x63, 0x68, 0x28, 0x52, 0x74, 0x29, 0x2c, - 0x74, 0x2e, 0x5f, 0x5f, 0x48, 0x2e, 0x5f, 0x5f, 0x68, 0x2e, 0x66, 0x6f, - 0x72, 0x45, 0x61, 0x63, 0x68, 0x28, 0x6a, 0x74, 0x29, 0x2c, 0x74, 0x2e, - 0x5f, 0x5f, 0x48, 0x2e, 0x5f, 0x5f, 0x68, 0x3d, 0x5b, 0x5d, 0x7d, 0x63, - 0x61, 0x74, 0x63, 0x68, 0x28, 0x75, 0x29, 0x7b, 0x74, 0x2e, 0x5f, 0x5f, - 0x48, 0x2e, 0x5f, 0x5f, 0x68, 0x3d, 0x5b, 0x5d, 0x2c, 0x43, 0x2e, 0x5f, - 0x5f, 0x65, 0x28, 0x75, 0x2c, 0x74, 0x2e, 0x5f, 0x5f, 0x76, 0x29, 0x7d, - 0x7d, 0x43, 0x2e, 0x5f, 0x5f, 0x62, 0x3d, 0x66, 0x75, 0x6e, 0x63, 0x74, - 0x69, 0x6f, 0x6e, 0x28, 0x74, 0x29, 0x7b, 0x70, 0x74, 0x3d, 0x6e, 0x75, - 0x6c, 0x6c, 0x2c, 0x62, 0x74, 0x26, 0x26, 0x62, 0x74, 0x28, 0x74, 0x29, - 0x7d, 0x2c, 0x43, 0x2e, 0x5f, 0x5f, 0x72, 0x3d, 0x66, 0x75, 0x6e, 0x63, - 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x74, 0x29, 0x7b, 0x6b, 0x74, 0x26, 0x26, - 0x6b, 0x74, 0x28, 0x74, 0x29, 0x2c, 0x61, 0x74, 0x3d, 0x30, 0x3b, 0x76, - 0x61, 0x72, 0x20, 0x6e, 0x3d, 0x28, 0x70, 0x74, 0x3d, 0x74, 0x2e, 0x5f, - 0x5f, 0x63, 0x29, 0x2e, 0x5f, 0x5f, 0x48, 0x3b, 0x6e, 0x26, 0x26, 0x28, - 0x64, 0x74, 0x3d, 0x3d, 0x3d, 0x70, 0x74, 0x3f, 0x28, 0x6e, 0x2e, 0x5f, - 0x5f, 0x68, 0x3d, 0x5b, 0x5d, 0x2c, 0x70, 0x74, 0x2e, 0x5f, 0x5f, 0x68, - 0x3d, 0x5b, 0x5d, 0x2c, 0x6e, 0x2e, 0x5f, 0x5f, 0x2e, 0x66, 0x6f, 0x72, - 0x45, 0x61, 0x63, 0x68, 0x28, 0x28, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, - 0x6f, 0x6e, 0x28, 0x74, 0x29, 0x7b, 0x74, 0x2e, 0x5f, 0x5f, 0x4e, 0x26, - 0x26, 0x28, 0x74, 0x2e, 0x5f, 0x5f, 0x3d, 0x74, 0x2e, 0x5f, 0x5f, 0x4e, - 0x29, 0x2c, 0x74, 0x2e, 0x5f, 0x5f, 0x56, 0x3d, 0x67, 0x74, 0x2c, 0x74, - 0x2e, 0x5f, 0x5f, 0x4e, 0x3d, 0x74, 0x2e, 0x69, 0x3d, 0x76, 0x6f, 0x69, - 0x64, 0x20, 0x30, 0x7d, 0x29, 0x29, 0x29, 0x3a, 0x28, 0x6e, 0x2e, 0x5f, - 0x5f, 0x68, 0x2e, 0x66, 0x6f, 0x72, 0x45, 0x61, 0x63, 0x68, 0x28, 0x52, - 0x74, 0x29, 0x2c, 0x6e, 0x2e, 0x5f, 0x5f, 0x68, 0x2e, 0x66, 0x6f, 0x72, - 0x45, 0x61, 0x63, 0x68, 0x28, 0x6a, 0x74, 0x29, 0x2c, 0x6e, 0x2e, 0x5f, - 0x5f, 0x68, 0x3d, 0x5b, 0x5d, 0x2c, 0x61, 0x74, 0x3d, 0x30, 0x29, 0x29, - 0x2c, 0x64, 0x74, 0x3d, 0x70, 0x74, 0x7d, 0x2c, 0x43, 0x2e, 0x64, 0x69, - 0x66, 0x66, 0x65, 0x64, 0x3d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, - 0x6e, 0x28, 0x74, 0x29, 0x7b, 0x53, 0x74, 0x26, 0x26, 0x53, 0x74, 0x28, - 0x74, 0x29, 0x3b, 0x76, 0x61, 0x72, 0x20, 0x6e, 0x3d, 0x74, 0x2e, 0x5f, - 0x5f, 0x63, 0x3b, 0x6e, 0x26, 0x26, 0x6e, 0x2e, 0x5f, 0x5f, 0x48, 0x26, - 0x26, 0x28, 0x6e, 0x2e, 0x5f, 0x5f, 0x48, 0x2e, 0x5f, 0x5f, 0x68, 0x2e, - 0x6c, 0x65, 0x6e, 0x67, 0x74, 0x68, 0x26, 0x26, 0x28, 0x31, 0x21, 0x3d, - 0x3d, 0x6d, 0x74, 0x2e, 0x70, 0x75, 0x73, 0x68, 0x28, 0x6e, 0x29, 0x26, - 0x26, 0x76, 0x74, 0x3d, 0x3d, 0x3d, 0x43, 0x2e, 0x72, 0x65, 0x71, 0x75, - 0x65, 0x73, 0x74, 0x41, 0x6e, 0x69, 0x6d, 0x61, 0x74, 0x69, 0x6f, 0x6e, - 0x46, 0x72, 0x61, 0x6d, 0x65, 0x7c, 0x7c, 0x28, 0x28, 0x76, 0x74, 0x3d, - 0x43, 0x2e, 0x72, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x41, 0x6e, 0x69, - 0x6d, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x46, 0x72, 0x61, 0x6d, 0x65, 0x29, - 0x7c, 0x7c, 0x4f, 0x74, 0x29, 0x28, 0x57, 0x74, 0x29, 0x29, 0x2c, 0x6e, - 0x2e, 0x5f, 0x5f, 0x48, 0x2e, 0x5f, 0x5f, 0x2e, 0x66, 0x6f, 0x72, 0x45, - 0x61, 0x63, 0x68, 0x28, 0x28, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, - 0x6e, 0x28, 0x74, 0x29, 0x7b, 0x74, 0x2e, 0x69, 0x26, 0x26, 0x28, 0x74, - 0x2e, 0x5f, 0x5f, 0x48, 0x3d, 0x74, 0x2e, 0x69, 0x29, 0x2c, 0x74, 0x2e, - 0x5f, 0x5f, 0x56, 0x21, 0x3d, 0x3d, 0x67, 0x74, 0x26, 0x26, 0x28, 0x74, - 0x2e, 0x5f, 0x5f, 0x3d, 0x74, 0x2e, 0x5f, 0x5f, 0x56, 0x29, 0x2c, 0x74, - 0x2e, 0x69, 0x3d, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x2c, 0x74, 0x2e, - 0x5f, 0x5f, 0x56, 0x3d, 0x67, 0x74, 0x7d, 0x29, 0x29, 0x29, 0x2c, 0x64, - 0x74, 0x3d, 0x70, 0x74, 0x3d, 0x6e, 0x75, 0x6c, 0x6c, 0x7d, 0x2c, 0x43, - 0x2e, 0x5f, 0x5f, 0x63, 0x3d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, - 0x6e, 0x28, 0x74, 0x2c, 0x6e, 0x29, 0x7b, 0x6e, 0x2e, 0x73, 0x6f, 0x6d, - 0x65, 0x28, 0x28, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, - 0x74, 0x29, 0x7b, 0x74, 0x72, 0x79, 0x7b, 0x74, 0x2e, 0x5f, 0x5f, 0x68, - 0x2e, 0x66, 0x6f, 0x72, 0x45, 0x61, 0x63, 0x68, 0x28, 0x52, 0x74, 0x29, - 0x2c, 0x74, 0x2e, 0x5f, 0x5f, 0x68, 0x3d, 0x74, 0x2e, 0x5f, 0x5f, 0x68, - 0x2e, 0x66, 0x69, 0x6c, 0x74, 0x65, 0x72, 0x28, 0x28, 0x66, 0x75, 0x6e, - 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x74, 0x29, 0x7b, 0x72, 0x65, 0x74, - 0x75, 0x72, 0x6e, 0x21, 0x74, 0x2e, 0x5f, 0x5f, 0x7c, 0x7c, 0x6a, 0x74, - 0x28, 0x74, 0x29, 0x7d, 0x29, 0x29, 0x7d, 0x63, 0x61, 0x74, 0x63, 0x68, - 0x28, 0x6c, 0x29, 0x7b, 0x6e, 0x2e, 0x73, 0x6f, 0x6d, 0x65, 0x28, 0x28, - 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x74, 0x29, 0x7b, - 0x74, 0x2e, 0x5f, 0x5f, 0x68, 0x26, 0x26, 0x28, 0x74, 0x2e, 0x5f, 0x5f, - 0x68, 0x3d, 0x5b, 0x5d, 0x29, 0x7d, 0x29, 0x29, 0x2c, 0x6e, 0x3d, 0x5b, - 0x5d, 0x2c, 0x43, 0x2e, 0x5f, 0x5f, 0x65, 0x28, 0x6c, 0x2c, 0x74, 0x2e, - 0x5f, 0x5f, 0x76, 0x29, 0x7d, 0x7d, 0x29, 0x29, 0x2c, 0x77, 0x74, 0x26, - 0x26, 0x77, 0x74, 0x28, 0x74, 0x2c, 0x6e, 0x29, 0x7d, 0x2c, 0x43, 0x2e, - 0x75, 0x6e, 0x6d, 0x6f, 0x75, 0x6e, 0x74, 0x3d, 0x66, 0x75, 0x6e, 0x63, - 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x74, 0x29, 0x7b, 0x78, 0x74, 0x26, 0x26, - 0x78, 0x74, 0x28, 0x74, 0x29, 0x3b, 0x76, 0x61, 0x72, 0x20, 0x6e, 0x2c, - 0x65, 0x3d, 0x74, 0x2e, 0x5f, 0x5f, 0x63, 0x3b, 0x65, 0x26, 0x26, 0x65, - 0x2e, 0x5f, 0x5f, 0x48, 0x26, 0x26, 0x28, 0x65, 0x2e, 0x5f, 0x5f, 0x48, - 0x2e, 0x5f, 0x5f, 0x2e, 0x66, 0x6f, 0x72, 0x45, 0x61, 0x63, 0x68, 0x28, - 0x28, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x74, 0x29, - 0x7b, 0x74, 0x72, 0x79, 0x7b, 0x52, 0x74, 0x28, 0x74, 0x29, 0x7d, 0x63, - 0x61, 0x74, 0x63, 0x68, 0x28, 0x74, 0x29, 0x7b, 0x6e, 0x3d, 0x74, 0x7d, - 0x7d, 0x29, 0x29, 0x2c, 0x65, 0x2e, 0x5f, 0x5f, 0x48, 0x3d, 0x76, 0x6f, - 0x69, 0x64, 0x20, 0x30, 0x2c, 0x6e, 0x26, 0x26, 0x43, 0x2e, 0x5f, 0x5f, - 0x65, 0x28, 0x6e, 0x2c, 0x65, 0x2e, 0x5f, 0x5f, 0x76, 0x29, 0x29, 0x7d, - 0x3b, 0x76, 0x61, 0x72, 0x20, 0x4c, 0x74, 0x3d, 0x22, 0x66, 0x75, 0x6e, - 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x22, 0x3d, 0x3d, 0x74, 0x79, 0x70, 0x65, - 0x6f, 0x66, 0x20, 0x72, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x41, 0x6e, - 0x69, 0x6d, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x46, 0x72, 0x61, 0x6d, 0x65, - 0x3b, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x4f, 0x74, - 0x28, 0x74, 0x29, 0x7b, 0x76, 0x61, 0x72, 0x20, 0x6e, 0x2c, 0x65, 0x3d, - 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x29, 0x7b, 0x63, - 0x6c, 0x65, 0x61, 0x72, 0x54, 0x69, 0x6d, 0x65, 0x6f, 0x75, 0x74, 0x28, - 0x5f, 0x29, 0x2c, 0x4c, 0x74, 0x26, 0x26, 0x63, 0x61, 0x6e, 0x63, 0x65, - 0x6c, 0x41, 0x6e, 0x69, 0x6d, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x46, 0x72, - 0x61, 0x6d, 0x65, 0x28, 0x6e, 0x29, 0x2c, 0x73, 0x65, 0x74, 0x54, 0x69, - 0x6d, 0x65, 0x6f, 0x75, 0x74, 0x28, 0x74, 0x29, 0x7d, 0x2c, 0x5f, 0x3d, - 0x73, 0x65, 0x74, 0x54, 0x69, 0x6d, 0x65, 0x6f, 0x75, 0x74, 0x28, 0x65, - 0x2c, 0x31, 0x30, 0x30, 0x29, 0x3b, 0x4c, 0x74, 0x26, 0x26, 0x28, 0x6e, - 0x3d, 0x72, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x41, 0x6e, 0x69, 0x6d, - 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x46, 0x72, 0x61, 0x6d, 0x65, 0x28, 0x65, - 0x29, 0x29, 0x7d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, - 0x52, 0x74, 0x28, 0x74, 0x29, 0x7b, 0x76, 0x61, 0x72, 0x20, 0x6e, 0x3d, - 0x70, 0x74, 0x2c, 0x65, 0x3d, 0x74, 0x2e, 0x5f, 0x5f, 0x63, 0x3b, 0x22, - 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x22, 0x3d, 0x3d, 0x74, - 0x79, 0x70, 0x65, 0x6f, 0x66, 0x20, 0x65, 0x26, 0x26, 0x28, 0x74, 0x2e, - 0x5f, 0x5f, 0x63, 0x3d, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x2c, 0x65, - 0x28, 0x29, 0x29, 0x2c, 0x70, 0x74, 0x3d, 0x6e, 0x7d, 0x66, 0x75, 0x6e, - 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x6a, 0x74, 0x28, 0x74, 0x29, 0x7b, - 0x76, 0x61, 0x72, 0x20, 0x6e, 0x3d, 0x70, 0x74, 0x3b, 0x74, 0x2e, 0x5f, - 0x5f, 0x63, 0x3d, 0x74, 0x2e, 0x5f, 0x5f, 0x28, 0x29, 0x2c, 0x70, 0x74, - 0x3d, 0x6e, 0x7d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, - 0x49, 0x74, 0x28, 0x74, 0x2c, 0x6e, 0x29, 0x7b, 0x72, 0x65, 0x74, 0x75, - 0x72, 0x6e, 0x21, 0x74, 0x7c, 0x7c, 0x74, 0x2e, 0x6c, 0x65, 0x6e, 0x67, - 0x74, 0x68, 0x21, 0x3d, 0x3d, 0x6e, 0x2e, 0x6c, 0x65, 0x6e, 0x67, 0x74, - 0x68, 0x7c, 0x7c, 0x6e, 0x2e, 0x73, 0x6f, 0x6d, 0x65, 0x28, 0x28, 0x66, - 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x6e, 0x2c, 0x65, 0x29, - 0x7b, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x6e, 0x21, 0x3d, 0x3d, - 0x74, 0x5b, 0x65, 0x5d, 0x7d, 0x29, 0x29, 0x7d, 0x66, 0x75, 0x6e, 0x63, - 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x71, 0x74, 0x28, 0x74, 0x2c, 0x6e, 0x29, - 0x7b, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x22, 0x66, 0x75, 0x6e, 0x63, - 0x74, 0x69, 0x6f, 0x6e, 0x22, 0x3d, 0x3d, 0x74, 0x79, 0x70, 0x65, 0x6f, - 0x66, 0x20, 0x6e, 0x3f, 0x6e, 0x28, 0x74, 0x29, 0x3a, 0x6e, 0x7d, 0x66, - 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x42, 0x74, 0x28, 0x74, - 0x2c, 0x6e, 0x29, 0x7b, 0x43, 0x5b, 0x74, 0x5d, 0x3d, 0x6e, 0x2e, 0x62, - 0x69, 0x6e, 0x64, 0x28, 0x6e, 0x75, 0x6c, 0x6c, 0x2c, 0x43, 0x5b, 0x74, - 0x5d, 0x7c, 0x7c, 0x28, 0x28, 0x29, 0x3d, 0x3e, 0x7b, 0x7d, 0x29, 0x29, - 0x7d, 0x6c, 0x65, 0x74, 0x20, 0x47, 0x74, 0x2c, 0x7a, 0x74, 0x3b, 0x66, - 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x4a, 0x74, 0x28, 0x74, - 0x29, 0x7b, 0x69, 0x66, 0x28, 0x7a, 0x74, 0x29, 0x7a, 0x74, 0x28, 0x29, - 0x3b, 0x7a, 0x74, 0x3d, 0x74, 0x26, 0x26, 0x74, 0x2e, 0x53, 0x28, 0x29, - 0x7d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x4b, 0x74, - 0x28, 0x7b, 0x64, 0x61, 0x74, 0x61, 0x3a, 0x74, 0x7d, 0x29, 0x7b, 0x63, - 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x6e, 0x3d, 0x58, 0x74, 0x28, 0x74, 0x29, - 0x3b, 0x6e, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3d, 0x74, 0x3b, 0x63, - 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x65, 0x3d, 0x44, 0x74, 0x28, 0x28, 0x29, - 0x3d, 0x3e, 0x7b, 0x6c, 0x65, 0x74, 0x20, 0x74, 0x3d, 0x74, 0x68, 0x69, - 0x73, 0x2e, 0x5f, 0x5f, 0x76, 0x3b, 0x77, 0x68, 0x69, 0x6c, 0x65, 0x28, - 0x74, 0x3d, 0x74, 0x2e, 0x5f, 0x5f, 0x29, 0x69, 0x66, 0x28, 0x74, 0x2e, - 0x5f, 0x5f, 0x63, 0x29, 0x7b, 0x74, 0x2e, 0x5f, 0x5f, 0x63, 0x2e, 0x5f, - 0x5f, 0x24, 0x66, 0x7c, 0x3d, 0x34, 0x3b, 0x62, 0x72, 0x65, 0x61, 0x6b, - 0x7d, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x5f, 0x5f, 0x24, 0x75, 0x2e, 0x63, - 0x3d, 0x28, 0x29, 0x3d, 0x3e, 0x7b, 0x76, 0x61, 0x72, 0x20, 0x74, 0x3b, - 0x69, 0x66, 0x28, 0x21, 0x55, 0x28, 0x65, 0x2e, 0x70, 0x65, 0x65, 0x6b, - 0x28, 0x29, 0x29, 0x26, 0x26, 0x33, 0x3d, 0x3d, 0x3d, 0x28, 0x6e, 0x75, - 0x6c, 0x6c, 0x3d, 0x3d, 0x28, 0x74, 0x3d, 0x74, 0x68, 0x69, 0x73, 0x2e, - 0x62, 0x61, 0x73, 0x65, 0x29, 0x3f, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, - 0x3a, 0x74, 0x2e, 0x6e, 0x6f, 0x64, 0x65, 0x54, 0x79, 0x70, 0x65, 0x29, - 0x29, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x62, 0x61, 0x73, 0x65, 0x2e, 0x64, - 0x61, 0x74, 0x61, 0x3d, 0x65, 0x2e, 0x70, 0x65, 0x65, 0x6b, 0x28, 0x29, - 0x3b, 0x65, 0x6c, 0x73, 0x65, 0x7b, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x5f, - 0x5f, 0x24, 0x66, 0x7c, 0x3d, 0x31, 0x3b, 0x74, 0x68, 0x69, 0x73, 0x2e, - 0x73, 0x65, 0x74, 0x53, 0x74, 0x61, 0x74, 0x65, 0x28, 0x7b, 0x7d, 0x29, - 0x7d, 0x7d, 0x3b, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x6d, 0x28, - 0x28, 0x29, 0x3d, 0x3e, 0x7b, 0x6c, 0x65, 0x74, 0x20, 0x74, 0x3d, 0x6e, - 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, - 0x3b, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x30, 0x3d, 0x3d, 0x3d, - 0x74, 0x3f, 0x30, 0x3a, 0x21, 0x30, 0x3d, 0x3d, 0x3d, 0x74, 0x3f, 0x22, - 0x22, 0x3a, 0x74, 0x7c, 0x7c, 0x22, 0x22, 0x7d, 0x29, 0x7d, 0x2c, 0x5b, - 0x5d, 0x29, 0x3b, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x65, 0x2e, - 0x76, 0x61, 0x6c, 0x75, 0x65, 0x7d, 0x4b, 0x74, 0x2e, 0x64, 0x69, 0x73, - 0x70, 0x6c, 0x61, 0x79, 0x4e, 0x61, 0x6d, 0x65, 0x3d, 0x22, 0x5f, 0x73, - 0x74, 0x22, 0x3b, 0x4f, 0x62, 0x6a, 0x65, 0x63, 0x74, 0x2e, 0x64, 0x65, - 0x66, 0x69, 0x6e, 0x65, 0x50, 0x72, 0x6f, 0x70, 0x65, 0x72, 0x74, 0x69, - 0x65, 0x73, 0x28, 0x68, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x74, 0x79, - 0x70, 0x65, 0x2c, 0x7b, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x72, 0x75, 0x63, - 0x74, 0x6f, 0x72, 0x3a, 0x7b, 0x63, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x75, - 0x72, 0x61, 0x62, 0x6c, 0x65, 0x3a, 0x21, 0x30, 0x2c, 0x76, 0x61, 0x6c, - 0x75, 0x65, 0x3a, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x7d, 0x2c, 0x74, - 0x79, 0x70, 0x65, 0x3a, 0x7b, 0x63, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x75, - 0x72, 0x61, 0x62, 0x6c, 0x65, 0x3a, 0x21, 0x30, 0x2c, 0x76, 0x61, 0x6c, - 0x75, 0x65, 0x3a, 0x4b, 0x74, 0x7d, 0x2c, 0x70, 0x72, 0x6f, 0x70, 0x73, - 0x3a, 0x7b, 0x63, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x75, 0x72, 0x61, 0x62, - 0x6c, 0x65, 0x3a, 0x21, 0x30, 0x2c, 0x67, 0x65, 0x74, 0x28, 0x29, 0x7b, - 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x7b, 0x64, 0x61, 0x74, 0x61, 0x3a, - 0x74, 0x68, 0x69, 0x73, 0x7d, 0x7d, 0x7d, 0x2c, 0x5f, 0x5f, 0x62, 0x3a, - 0x7b, 0x63, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x75, 0x72, 0x61, 0x62, 0x6c, - 0x65, 0x3a, 0x21, 0x30, 0x2c, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3a, 0x31, - 0x7d, 0x7d, 0x29, 0x3b, 0x42, 0x74, 0x28, 0x22, 0x5f, 0x5f, 0x62, 0x22, - 0x2c, 0x28, 0x74, 0x2c, 0x6e, 0x29, 0x3d, 0x3e, 0x7b, 0x69, 0x66, 0x28, - 0x22, 0x73, 0x74, 0x72, 0x69, 0x6e, 0x67, 0x22, 0x3d, 0x3d, 0x74, 0x79, - 0x70, 0x65, 0x6f, 0x66, 0x20, 0x6e, 0x2e, 0x74, 0x79, 0x70, 0x65, 0x29, - 0x7b, 0x6c, 0x65, 0x74, 0x20, 0x74, 0x2c, 0x65, 0x3d, 0x6e, 0x2e, 0x70, - 0x72, 0x6f, 0x70, 0x73, 0x3b, 0x66, 0x6f, 0x72, 0x28, 0x6c, 0x65, 0x74, - 0x20, 0x5f, 0x20, 0x69, 0x6e, 0x20, 0x65, 0x29, 0x7b, 0x69, 0x66, 0x28, - 0x22, 0x63, 0x68, 0x69, 0x6c, 0x64, 0x72, 0x65, 0x6e, 0x22, 0x3d, 0x3d, - 0x3d, 0x5f, 0x29, 0x63, 0x6f, 0x6e, 0x74, 0x69, 0x6e, 0x75, 0x65, 0x3b, - 0x6c, 0x65, 0x74, 0x20, 0x69, 0x3d, 0x65, 0x5b, 0x5f, 0x5d, 0x3b, 0x69, - 0x66, 0x28, 0x69, 0x20, 0x69, 0x6e, 0x73, 0x74, 0x61, 0x6e, 0x63, 0x65, - 0x6f, 0x66, 0x20, 0x68, 0x29, 0x7b, 0x69, 0x66, 0x28, 0x21, 0x74, 0x29, - 0x6e, 0x2e, 0x5f, 0x5f, 0x6e, 0x70, 0x3d, 0x74, 0x3d, 0x7b, 0x7d, 0x3b, - 0x74, 0x5b, 0x5f, 0x5d, 0x3d, 0x69, 0x3b, 0x65, 0x5b, 0x5f, 0x5d, 0x3d, - 0x69, 0x2e, 0x70, 0x65, 0x65, 0x6b, 0x28, 0x29, 0x7d, 0x7d, 0x7d, 0x74, - 0x28, 0x6e, 0x29, 0x7d, 0x29, 0x3b, 0x42, 0x74, 0x28, 0x22, 0x5f, 0x5f, - 0x72, 0x22, 0x2c, 0x28, 0x74, 0x2c, 0x6e, 0x29, 0x3d, 0x3e, 0x7b, 0x4a, - 0x74, 0x28, 0x29, 0x3b, 0x6c, 0x65, 0x74, 0x20, 0x65, 0x2c, 0x5f, 0x3d, - 0x6e, 0x2e, 0x5f, 0x5f, 0x63, 0x3b, 0x69, 0x66, 0x28, 0x5f, 0x29, 0x7b, - 0x5f, 0x2e, 0x5f, 0x5f, 0x24, 0x66, 0x26, 0x3d, 0x2d, 0x32, 0x3b, 0x65, - 0x3d, 0x5f, 0x2e, 0x5f, 0x5f, 0x24, 0x75, 0x3b, 0x69, 0x66, 0x28, 0x76, - 0x6f, 0x69, 0x64, 0x20, 0x30, 0x3d, 0x3d, 0x3d, 0x65, 0x29, 0x5f, 0x2e, - 0x5f, 0x5f, 0x24, 0x75, 0x3d, 0x65, 0x3d, 0x66, 0x75, 0x6e, 0x63, 0x74, - 0x69, 0x6f, 0x6e, 0x28, 0x74, 0x29, 0x7b, 0x6c, 0x65, 0x74, 0x20, 0x6e, - 0x3b, 0x77, 0x28, 0x28, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, - 0x28, 0x29, 0x7b, 0x6e, 0x3d, 0x74, 0x68, 0x69, 0x73, 0x7d, 0x29, 0x29, - 0x3b, 0x6e, 0x2e, 0x63, 0x3d, 0x28, 0x29, 0x3d, 0x3e, 0x7b, 0x5f, 0x2e, - 0x5f, 0x5f, 0x24, 0x66, 0x7c, 0x3d, 0x31, 0x3b, 0x5f, 0x2e, 0x73, 0x65, - 0x74, 0x53, 0x74, 0x61, 0x74, 0x65, 0x28, 0x7b, 0x7d, 0x29, 0x7d, 0x3b, - 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x6e, 0x7d, 0x28, 0x29, 0x7d, - 0x47, 0x74, 0x3d, 0x5f, 0x3b, 0x4a, 0x74, 0x28, 0x65, 0x29, 0x3b, 0x74, - 0x28, 0x6e, 0x29, 0x7d, 0x29, 0x3b, 0x42, 0x74, 0x28, 0x22, 0x5f, 0x5f, - 0x65, 0x22, 0x2c, 0x28, 0x74, 0x2c, 0x6e, 0x2c, 0x65, 0x2c, 0x5f, 0x29, - 0x3d, 0x3e, 0x7b, 0x4a, 0x74, 0x28, 0x29, 0x3b, 0x47, 0x74, 0x3d, 0x76, - 0x6f, 0x69, 0x64, 0x20, 0x30, 0x3b, 0x74, 0x28, 0x6e, 0x2c, 0x65, 0x2c, - 0x5f, 0x29, 0x7d, 0x29, 0x3b, 0x42, 0x74, 0x28, 0x22, 0x64, 0x69, 0x66, - 0x66, 0x65, 0x64, 0x22, 0x2c, 0x28, 0x74, 0x2c, 0x6e, 0x29, 0x3d, 0x3e, - 0x7b, 0x4a, 0x74, 0x28, 0x29, 0x3b, 0x47, 0x74, 0x3d, 0x76, 0x6f, 0x69, - 0x64, 0x20, 0x30, 0x3b, 0x6c, 0x65, 0x74, 0x20, 0x65, 0x3b, 0x69, 0x66, - 0x28, 0x22, 0x73, 0x74, 0x72, 0x69, 0x6e, 0x67, 0x22, 0x3d, 0x3d, 0x74, - 0x79, 0x70, 0x65, 0x6f, 0x66, 0x20, 0x6e, 0x2e, 0x74, 0x79, 0x70, 0x65, - 0x26, 0x26, 0x28, 0x65, 0x3d, 0x6e, 0x2e, 0x5f, 0x5f, 0x65, 0x29, 0x29, - 0x7b, 0x6c, 0x65, 0x74, 0x20, 0x74, 0x3d, 0x6e, 0x2e, 0x5f, 0x5f, 0x6e, - 0x70, 0x2c, 0x5f, 0x3d, 0x6e, 0x2e, 0x70, 0x72, 0x6f, 0x70, 0x73, 0x3b, - 0x69, 0x66, 0x28, 0x74, 0x29, 0x7b, 0x6c, 0x65, 0x74, 0x20, 0x6e, 0x3d, - 0x65, 0x2e, 0x55, 0x3b, 0x69, 0x66, 0x28, 0x6e, 0x29, 0x66, 0x6f, 0x72, - 0x28, 0x6c, 0x65, 0x74, 0x20, 0x65, 0x20, 0x69, 0x6e, 0x20, 0x6e, 0x29, - 0x7b, 0x6c, 0x65, 0x74, 0x20, 0x5f, 0x3d, 0x6e, 0x5b, 0x65, 0x5d, 0x3b, - 0x69, 0x66, 0x28, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x21, 0x3d, 0x3d, - 0x5f, 0x26, 0x26, 0x21, 0x28, 0x65, 0x20, 0x69, 0x6e, 0x20, 0x74, 0x29, - 0x29, 0x7b, 0x5f, 0x2e, 0x64, 0x28, 0x29, 0x3b, 0x6e, 0x5b, 0x65, 0x5d, - 0x3d, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x7d, 0x7d, 0x65, 0x6c, 0x73, - 0x65, 0x7b, 0x6e, 0x3d, 0x7b, 0x7d, 0x3b, 0x65, 0x2e, 0x55, 0x3d, 0x6e, - 0x7d, 0x66, 0x6f, 0x72, 0x28, 0x6c, 0x65, 0x74, 0x20, 0x69, 0x20, 0x69, - 0x6e, 0x20, 0x74, 0x29, 0x7b, 0x6c, 0x65, 0x74, 0x20, 0x6f, 0x3d, 0x6e, - 0x5b, 0x69, 0x5d, 0x2c, 0x72, 0x3d, 0x74, 0x5b, 0x69, 0x5d, 0x3b, 0x69, - 0x66, 0x28, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x3d, 0x3d, 0x3d, 0x6f, - 0x29, 0x7b, 0x6f, 0x3d, 0x51, 0x74, 0x28, 0x65, 0x2c, 0x69, 0x2c, 0x72, - 0x2c, 0x5f, 0x29, 0x3b, 0x6e, 0x5b, 0x69, 0x5d, 0x3d, 0x6f, 0x7d, 0x65, - 0x6c, 0x73, 0x65, 0x20, 0x6f, 0x2e, 0x6f, 0x28, 0x72, 0x2c, 0x5f, 0x29, - 0x7d, 0x7d, 0x7d, 0x74, 0x28, 0x6e, 0x29, 0x7d, 0x29, 0x3b, 0x66, 0x75, - 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x51, 0x74, 0x28, 0x74, 0x2c, - 0x6e, 0x2c, 0x65, 0x2c, 0x5f, 0x29, 0x7b, 0x63, 0x6f, 0x6e, 0x73, 0x74, - 0x20, 0x69, 0x3d, 0x6e, 0x20, 0x69, 0x6e, 0x20, 0x74, 0x26, 0x26, 0x76, - 0x6f, 0x69, 0x64, 0x20, 0x30, 0x3d, 0x3d, 0x3d, 0x74, 0x2e, 0x6f, 0x77, - 0x6e, 0x65, 0x72, 0x53, 0x56, 0x47, 0x45, 0x6c, 0x65, 0x6d, 0x65, 0x6e, - 0x74, 0x2c, 0x6f, 0x3d, 0x61, 0x28, 0x65, 0x29, 0x3b, 0x72, 0x65, 0x74, - 0x75, 0x72, 0x6e, 0x7b, 0x6f, 0x3a, 0x28, 0x74, 0x2c, 0x6e, 0x29, 0x3d, - 0x3e, 0x7b, 0x6f, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3d, 0x74, 0x3b, - 0x5f, 0x3d, 0x6e, 0x7d, 0x2c, 0x64, 0x3a, 0x77, 0x28, 0x28, 0x29, 0x3d, - 0x3e, 0x7b, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x65, 0x3d, 0x6f, 0x2e, - 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3b, - 0x69, 0x66, 0x28, 0x5f, 0x5b, 0x6e, 0x5d, 0x21, 0x3d, 0x3d, 0x65, 0x29, - 0x7b, 0x5f, 0x5b, 0x6e, 0x5d, 0x3d, 0x65, 0x3b, 0x69, 0x66, 0x28, 0x69, - 0x29, 0x74, 0x5b, 0x6e, 0x5d, 0x3d, 0x65, 0x3b, 0x65, 0x6c, 0x73, 0x65, - 0x20, 0x69, 0x66, 0x28, 0x65, 0x29, 0x74, 0x2e, 0x73, 0x65, 0x74, 0x41, - 0x74, 0x74, 0x72, 0x69, 0x62, 0x75, 0x74, 0x65, 0x28, 0x6e, 0x2c, 0x65, - 0x29, 0x3b, 0x65, 0x6c, 0x73, 0x65, 0x20, 0x74, 0x2e, 0x72, 0x65, 0x6d, - 0x6f, 0x76, 0x65, 0x41, 0x74, 0x74, 0x72, 0x69, 0x62, 0x75, 0x74, 0x65, - 0x28, 0x6e, 0x29, 0x7d, 0x7d, 0x29, 0x7d, 0x7d, 0x42, 0x74, 0x28, 0x22, - 0x75, 0x6e, 0x6d, 0x6f, 0x75, 0x6e, 0x74, 0x22, 0x2c, 0x28, 0x74, 0x2c, - 0x6e, 0x29, 0x3d, 0x3e, 0x7b, 0x69, 0x66, 0x28, 0x22, 0x73, 0x74, 0x72, - 0x69, 0x6e, 0x67, 0x22, 0x3d, 0x3d, 0x74, 0x79, 0x70, 0x65, 0x6f, 0x66, - 0x20, 0x6e, 0x2e, 0x74, 0x79, 0x70, 0x65, 0x29, 0x7b, 0x6c, 0x65, 0x74, - 0x20, 0x74, 0x3d, 0x6e, 0x2e, 0x5f, 0x5f, 0x65, 0x3b, 0x69, 0x66, 0x28, - 0x74, 0x29, 0x7b, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x6e, 0x3d, 0x74, - 0x2e, 0x55, 0x3b, 0x69, 0x66, 0x28, 0x6e, 0x29, 0x7b, 0x74, 0x2e, 0x55, - 0x3d, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x3b, 0x66, 0x6f, 0x72, 0x28, - 0x6c, 0x65, 0x74, 0x20, 0x74, 0x20, 0x69, 0x6e, 0x20, 0x6e, 0x29, 0x7b, - 0x6c, 0x65, 0x74, 0x20, 0x65, 0x3d, 0x6e, 0x5b, 0x74, 0x5d, 0x3b, 0x69, - 0x66, 0x28, 0x65, 0x29, 0x65, 0x2e, 0x64, 0x28, 0x29, 0x7d, 0x7d, 0x7d, - 0x7d, 0x65, 0x6c, 0x73, 0x65, 0x7b, 0x6c, 0x65, 0x74, 0x20, 0x74, 0x3d, - 0x6e, 0x2e, 0x5f, 0x5f, 0x63, 0x3b, 0x69, 0x66, 0x28, 0x74, 0x29, 0x7b, - 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x6e, 0x3d, 0x74, 0x2e, 0x5f, 0x5f, - 0x24, 0x75, 0x3b, 0x69, 0x66, 0x28, 0x6e, 0x29, 0x7b, 0x74, 0x2e, 0x5f, - 0x5f, 0x24, 0x75, 0x3d, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x3b, 0x6e, - 0x2e, 0x64, 0x28, 0x29, 0x7d, 0x7d, 0x7d, 0x74, 0x28, 0x6e, 0x29, 0x7d, - 0x29, 0x3b, 0x42, 0x74, 0x28, 0x22, 0x5f, 0x5f, 0x68, 0x22, 0x2c, 0x28, - 0x74, 0x2c, 0x6e, 0x2c, 0x65, 0x2c, 0x5f, 0x29, 0x3d, 0x3e, 0x7b, 0x69, - 0x66, 0x28, 0x5f, 0x3c, 0x33, 0x7c, 0x7c, 0x39, 0x3d, 0x3d, 0x3d, 0x5f, - 0x29, 0x6e, 0x2e, 0x5f, 0x5f, 0x24, 0x66, 0x7c, 0x3d, 0x32, 0x3b, 0x74, - 0x28, 0x6e, 0x2c, 0x65, 0x2c, 0x5f, 0x29, 0x7d, 0x29, 0x3b, 0x49, 0x2e, - 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x74, 0x79, 0x70, 0x65, 0x2e, 0x73, 0x68, - 0x6f, 0x75, 0x6c, 0x64, 0x43, 0x6f, 0x6d, 0x70, 0x6f, 0x6e, 0x65, 0x6e, - 0x74, 0x55, 0x70, 0x64, 0x61, 0x74, 0x65, 0x3d, 0x66, 0x75, 0x6e, 0x63, - 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x74, 0x2c, 0x6e, 0x29, 0x7b, 0x63, 0x6f, - 0x6e, 0x73, 0x74, 0x20, 0x65, 0x3d, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x5f, - 0x5f, 0x24, 0x75, 0x3b, 0x69, 0x66, 0x28, 0x21, 0x28, 0x65, 0x26, 0x26, - 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x21, 0x3d, 0x3d, 0x65, 0x2e, 0x73, - 0x7c, 0x7c, 0x34, 0x26, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x5f, 0x5f, 0x24, - 0x66, 0x29, 0x29, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x21, 0x30, 0x3b, - 0x69, 0x66, 0x28, 0x33, 0x26, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x5f, 0x5f, - 0x24, 0x66, 0x29, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x21, 0x30, 0x3b, - 0x66, 0x6f, 0x72, 0x28, 0x6c, 0x65, 0x74, 0x20, 0x5f, 0x20, 0x69, 0x6e, - 0x20, 0x6e, 0x29, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x21, 0x30, 0x3b, - 0x66, 0x6f, 0x72, 0x28, 0x6c, 0x65, 0x74, 0x20, 0x5f, 0x20, 0x69, 0x6e, - 0x20, 0x74, 0x29, 0x69, 0x66, 0x28, 0x22, 0x5f, 0x5f, 0x73, 0x6f, 0x75, - 0x72, 0x63, 0x65, 0x22, 0x21, 0x3d, 0x3d, 0x5f, 0x26, 0x26, 0x74, 0x5b, - 0x5f, 0x5d, 0x21, 0x3d, 0x3d, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x70, 0x72, - 0x6f, 0x70, 0x73, 0x5b, 0x5f, 0x5d, 0x29, 0x72, 0x65, 0x74, 0x75, 0x72, - 0x6e, 0x21, 0x30, 0x3b, 0x66, 0x6f, 0x72, 0x28, 0x6c, 0x65, 0x74, 0x20, - 0x5f, 0x20, 0x69, 0x6e, 0x20, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x70, 0x72, - 0x6f, 0x70, 0x73, 0x29, 0x69, 0x66, 0x28, 0x21, 0x28, 0x5f, 0x20, 0x69, - 0x6e, 0x20, 0x74, 0x29, 0x29, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x21, - 0x30, 0x3b, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x21, 0x31, 0x7d, 0x3b, - 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x58, 0x74, 0x28, - 0x74, 0x29, 0x7b, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x44, 0x74, - 0x28, 0x28, 0x29, 0x3d, 0x3e, 0x61, 0x28, 0x74, 0x29, 0x2c, 0x5b, 0x5d, - 0x29, 0x7d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x59, - 0x74, 0x28, 0x74, 0x29, 0x7b, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x6e, - 0x3d, 0x4e, 0x74, 0x28, 0x74, 0x29, 0x3b, 0x6e, 0x2e, 0x63, 0x75, 0x72, - 0x72, 0x65, 0x6e, 0x74, 0x3d, 0x74, 0x3b, 0x47, 0x74, 0x2e, 0x5f, 0x5f, - 0x24, 0x66, 0x7c, 0x3d, 0x34, 0x3b, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, - 0x20, 0x44, 0x74, 0x28, 0x28, 0x29, 0x3d, 0x3e, 0x6d, 0x28, 0x28, 0x29, - 0x3d, 0x3e, 0x6e, 0x2e, 0x63, 0x75, 0x72, 0x72, 0x65, 0x6e, 0x74, 0x28, - 0x29, 0x29, 0x2c, 0x5b, 0x5d, 0x29, 0x7d, 0x66, 0x75, 0x6e, 0x63, 0x74, - 0x69, 0x6f, 0x6e, 0x20, 0x5a, 0x74, 0x28, 0x74, 0x29, 0x7b, 0x63, 0x6f, - 0x6e, 0x73, 0x74, 0x20, 0x6e, 0x3d, 0x4e, 0x74, 0x28, 0x74, 0x29, 0x3b, - 0x6e, 0x2e, 0x63, 0x75, 0x72, 0x72, 0x65, 0x6e, 0x74, 0x3d, 0x74, 0x3b, - 0x48, 0x74, 0x28, 0x28, 0x29, 0x3d, 0x3e, 0x77, 0x28, 0x28, 0x29, 0x3d, - 0x3e, 0x6e, 0x2e, 0x63, 0x75, 0x72, 0x72, 0x65, 0x6e, 0x74, 0x28, 0x29, - 0x29, 0x2c, 0x5b, 0x5d, 0x29, 0x7d, 0x76, 0x61, 0x72, 0x20, 0x74, 0x6e, - 0x3d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x74, 0x2c, - 0x6e, 0x2c, 0x65, 0x2c, 0x5f, 0x29, 0x7b, 0x76, 0x61, 0x72, 0x20, 0x69, - 0x3b, 0x6e, 0x5b, 0x30, 0x5d, 0x3d, 0x30, 0x3b, 0x66, 0x6f, 0x72, 0x28, - 0x76, 0x61, 0x72, 0x20, 0x6f, 0x3d, 0x31, 0x3b, 0x6f, 0x3c, 0x6e, 0x2e, - 0x6c, 0x65, 0x6e, 0x67, 0x74, 0x68, 0x3b, 0x6f, 0x2b, 0x2b, 0x29, 0x7b, - 0x76, 0x61, 0x72, 0x20, 0x72, 0x3d, 0x6e, 0x5b, 0x6f, 0x2b, 0x2b, 0x5d, - 0x2c, 0x75, 0x3d, 0x6e, 0x5b, 0x6f, 0x5d, 0x3f, 0x28, 0x6e, 0x5b, 0x30, - 0x5d, 0x7c, 0x3d, 0x72, 0x3f, 0x31, 0x3a, 0x32, 0x2c, 0x65, 0x5b, 0x6e, - 0x5b, 0x6f, 0x2b, 0x2b, 0x5d, 0x5d, 0x29, 0x3a, 0x6e, 0x5b, 0x2b, 0x2b, - 0x6f, 0x5d, 0x3b, 0x33, 0x3d, 0x3d, 0x3d, 0x72, 0x3f, 0x5f, 0x5b, 0x30, - 0x5d, 0x3d, 0x75, 0x3a, 0x34, 0x3d, 0x3d, 0x3d, 0x72, 0x3f, 0x5f, 0x5b, - 0x31, 0x5d, 0x3d, 0x4f, 0x62, 0x6a, 0x65, 0x63, 0x74, 0x2e, 0x61, 0x73, - 0x73, 0x69, 0x67, 0x6e, 0x28, 0x5f, 0x5b, 0x31, 0x5d, 0x7c, 0x7c, 0x7b, - 0x7d, 0x2c, 0x75, 0x29, 0x3a, 0x35, 0x3d, 0x3d, 0x3d, 0x72, 0x3f, 0x28, - 0x5f, 0x5b, 0x31, 0x5d, 0x3d, 0x5f, 0x5b, 0x31, 0x5d, 0x7c, 0x7c, 0x7b, - 0x7d, 0x29, 0x5b, 0x6e, 0x5b, 0x2b, 0x2b, 0x6f, 0x5d, 0x5d, 0x3d, 0x75, - 0x3a, 0x36, 0x3d, 0x3d, 0x3d, 0x72, 0x3f, 0x5f, 0x5b, 0x31, 0x5d, 0x5b, - 0x6e, 0x5b, 0x2b, 0x2b, 0x6f, 0x5d, 0x5d, 0x2b, 0x3d, 0x75, 0x2b, 0x22, - 0x22, 0x3a, 0x72, 0x3f, 0x28, 0x69, 0x3d, 0x74, 0x2e, 0x61, 0x70, 0x70, - 0x6c, 0x79, 0x28, 0x75, 0x2c, 0x74, 0x6e, 0x28, 0x74, 0x2c, 0x75, 0x2c, - 0x65, 0x2c, 0x5b, 0x22, 0x22, 0x2c, 0x6e, 0x75, 0x6c, 0x6c, 0x5d, 0x29, - 0x29, 0x2c, 0x5f, 0x2e, 0x70, 0x75, 0x73, 0x68, 0x28, 0x69, 0x29, 0x2c, - 0x75, 0x5b, 0x30, 0x5d, 0x3f, 0x6e, 0x5b, 0x30, 0x5d, 0x7c, 0x3d, 0x32, - 0x3a, 0x28, 0x6e, 0x5b, 0x6f, 0x2d, 0x32, 0x5d, 0x3d, 0x30, 0x2c, 0x6e, - 0x5b, 0x6f, 0x5d, 0x3d, 0x69, 0x29, 0x29, 0x3a, 0x5f, 0x2e, 0x70, 0x75, - 0x73, 0x68, 0x28, 0x75, 0x29, 0x7d, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, - 0x20, 0x5f, 0x7d, 0x2c, 0x6e, 0x6e, 0x3d, 0x6e, 0x65, 0x77, 0x20, 0x4d, - 0x61, 0x70, 0x3b, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, - 0x65, 0x6e, 0x28, 0x74, 0x29, 0x7b, 0x76, 0x61, 0x72, 0x20, 0x6e, 0x3d, - 0x6e, 0x6e, 0x2e, 0x67, 0x65, 0x74, 0x28, 0x74, 0x68, 0x69, 0x73, 0x29, - 0x3b, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x6e, 0x7c, 0x7c, 0x28, - 0x6e, 0x3d, 0x6e, 0x65, 0x77, 0x20, 0x4d, 0x61, 0x70, 0x2c, 0x6e, 0x6e, - 0x2e, 0x73, 0x65, 0x74, 0x28, 0x74, 0x68, 0x69, 0x73, 0x2c, 0x6e, 0x29, - 0x29, 0x2c, 0x28, 0x6e, 0x3d, 0x74, 0x6e, 0x28, 0x74, 0x68, 0x69, 0x73, - 0x2c, 0x6e, 0x2e, 0x67, 0x65, 0x74, 0x28, 0x74, 0x29, 0x7c, 0x7c, 0x28, - 0x6e, 0x2e, 0x73, 0x65, 0x74, 0x28, 0x74, 0x2c, 0x6e, 0x3d, 0x66, 0x75, - 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x74, 0x29, 0x7b, 0x66, 0x6f, - 0x72, 0x28, 0x76, 0x61, 0x72, 0x20, 0x6e, 0x2c, 0x65, 0x2c, 0x5f, 0x3d, - 0x31, 0x2c, 0x69, 0x3d, 0x22, 0x22, 0x2c, 0x6f, 0x3d, 0x22, 0x22, 0x2c, - 0x72, 0x3d, 0x5b, 0x30, 0x5d, 0x2c, 0x75, 0x3d, 0x66, 0x75, 0x6e, 0x63, - 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x74, 0x29, 0x7b, 0x31, 0x3d, 0x3d, 0x3d, - 0x5f, 0x26, 0x26, 0x28, 0x74, 0x7c, 0x7c, 0x28, 0x69, 0x3d, 0x69, 0x2e, - 0x72, 0x65, 0x70, 0x6c, 0x61, 0x63, 0x65, 0x28, 0x2f, 0x5e, 0x5c, 0x73, - 0x2a, 0x5c, 0x6e, 0x5c, 0x73, 0x2a, 0x7c, 0x5c, 0x73, 0x2a, 0x5c, 0x6e, - 0x5c, 0x73, 0x2a, 0x24, 0x2f, 0x67, 0x2c, 0x22, 0x22, 0x29, 0x29, 0x29, - 0x3f, 0x72, 0x2e, 0x70, 0x75, 0x73, 0x68, 0x28, 0x30, 0x2c, 0x74, 0x2c, - 0x69, 0x29, 0x3a, 0x33, 0x3d, 0x3d, 0x3d, 0x5f, 0x26, 0x26, 0x28, 0x74, - 0x7c, 0x7c, 0x69, 0x29, 0x3f, 0x28, 0x72, 0x2e, 0x70, 0x75, 0x73, 0x68, - 0x28, 0x33, 0x2c, 0x74, 0x2c, 0x69, 0x29, 0x2c, 0x5f, 0x3d, 0x32, 0x29, - 0x3a, 0x32, 0x3d, 0x3d, 0x3d, 0x5f, 0x26, 0x26, 0x22, 0x2e, 0x2e, 0x2e, - 0x22, 0x3d, 0x3d, 0x3d, 0x69, 0x26, 0x26, 0x74, 0x3f, 0x72, 0x2e, 0x70, - 0x75, 0x73, 0x68, 0x28, 0x34, 0x2c, 0x74, 0x2c, 0x30, 0x29, 0x3a, 0x32, - 0x3d, 0x3d, 0x3d, 0x5f, 0x26, 0x26, 0x69, 0x26, 0x26, 0x21, 0x74, 0x3f, - 0x72, 0x2e, 0x70, 0x75, 0x73, 0x68, 0x28, 0x35, 0x2c, 0x30, 0x2c, 0x21, - 0x30, 0x2c, 0x69, 0x29, 0x3a, 0x5f, 0x3e, 0x3d, 0x35, 0x26, 0x26, 0x28, - 0x28, 0x69, 0x7c, 0x7c, 0x21, 0x74, 0x26, 0x26, 0x35, 0x3d, 0x3d, 0x3d, - 0x5f, 0x29, 0x26, 0x26, 0x28, 0x72, 0x2e, 0x70, 0x75, 0x73, 0x68, 0x28, - 0x5f, 0x2c, 0x30, 0x2c, 0x69, 0x2c, 0x65, 0x29, 0x2c, 0x5f, 0x3d, 0x36, - 0x29, 0x2c, 0x74, 0x26, 0x26, 0x28, 0x72, 0x2e, 0x70, 0x75, 0x73, 0x68, - 0x28, 0x5f, 0x2c, 0x74, 0x2c, 0x30, 0x2c, 0x65, 0x29, 0x2c, 0x5f, 0x3d, - 0x36, 0x29, 0x29, 0x2c, 0x69, 0x3d, 0x22, 0x22, 0x7d, 0x2c, 0x66, 0x3d, - 0x30, 0x3b, 0x66, 0x3c, 0x74, 0x2e, 0x6c, 0x65, 0x6e, 0x67, 0x74, 0x68, - 0x3b, 0x66, 0x2b, 0x2b, 0x29, 0x7b, 0x66, 0x26, 0x26, 0x28, 0x31, 0x3d, - 0x3d, 0x3d, 0x5f, 0x26, 0x26, 0x75, 0x28, 0x29, 0x2c, 0x75, 0x28, 0x66, - 0x29, 0x29, 0x3b, 0x66, 0x6f, 0x72, 0x28, 0x76, 0x61, 0x72, 0x20, 0x73, - 0x3d, 0x30, 0x3b, 0x73, 0x3c, 0x74, 0x5b, 0x66, 0x5d, 0x2e, 0x6c, 0x65, - 0x6e, 0x67, 0x74, 0x68, 0x3b, 0x73, 0x2b, 0x2b, 0x29, 0x6e, 0x3d, 0x74, - 0x5b, 0x66, 0x5d, 0x5b, 0x73, 0x5d, 0x2c, 0x31, 0x3d, 0x3d, 0x3d, 0x5f, - 0x3f, 0x22, 0x3c, 0x22, 0x3d, 0x3d, 0x3d, 0x6e, 0x3f, 0x28, 0x75, 0x28, - 0x29, 0x2c, 0x72, 0x3d, 0x5b, 0x72, 0x5d, 0x2c, 0x5f, 0x3d, 0x33, 0x29, - 0x3a, 0x69, 0x2b, 0x3d, 0x6e, 0x3a, 0x34, 0x3d, 0x3d, 0x3d, 0x5f, 0x3f, - 0x22, 0x2d, 0x2d, 0x22, 0x3d, 0x3d, 0x3d, 0x69, 0x26, 0x26, 0x22, 0x3e, - 0x22, 0x3d, 0x3d, 0x3d, 0x6e, 0x3f, 0x28, 0x5f, 0x3d, 0x31, 0x2c, 0x69, - 0x3d, 0x22, 0x22, 0x29, 0x3a, 0x69, 0x3d, 0x6e, 0x2b, 0x69, 0x5b, 0x30, - 0x5d, 0x3a, 0x6f, 0x3f, 0x6e, 0x3d, 0x3d, 0x3d, 0x6f, 0x3f, 0x6f, 0x3d, - 0x22, 0x22, 0x3a, 0x69, 0x2b, 0x3d, 0x6e, 0x3a, 0x27, 0x22, 0x27, 0x3d, - 0x3d, 0x3d, 0x6e, 0x7c, 0x7c, 0x22, 0x27, 0x22, 0x3d, 0x3d, 0x3d, 0x6e, - 0x3f, 0x6f, 0x3d, 0x6e, 0x3a, 0x22, 0x3e, 0x22, 0x3d, 0x3d, 0x3d, 0x6e, - 0x3f, 0x28, 0x75, 0x28, 0x29, 0x2c, 0x5f, 0x3d, 0x31, 0x29, 0x3a, 0x5f, - 0x26, 0x26, 0x28, 0x22, 0x3d, 0x22, 0x3d, 0x3d, 0x3d, 0x6e, 0x3f, 0x28, - 0x5f, 0x3d, 0x35, 0x2c, 0x65, 0x3d, 0x69, 0x2c, 0x69, 0x3d, 0x22, 0x22, - 0x29, 0x3a, 0x22, 0x2f, 0x22, 0x3d, 0x3d, 0x3d, 0x6e, 0x26, 0x26, 0x28, - 0x5f, 0x3c, 0x35, 0x7c, 0x7c, 0x22, 0x3e, 0x22, 0x3d, 0x3d, 0x3d, 0x74, - 0x5b, 0x66, 0x5d, 0x5b, 0x73, 0x2b, 0x31, 0x5d, 0x29, 0x3f, 0x28, 0x75, - 0x28, 0x29, 0x2c, 0x33, 0x3d, 0x3d, 0x3d, 0x5f, 0x26, 0x26, 0x28, 0x72, - 0x3d, 0x72, 0x5b, 0x30, 0x5d, 0x29, 0x2c, 0x5f, 0x3d, 0x72, 0x2c, 0x28, - 0x72, 0x3d, 0x72, 0x5b, 0x30, 0x5d, 0x29, 0x2e, 0x70, 0x75, 0x73, 0x68, - 0x28, 0x32, 0x2c, 0x30, 0x2c, 0x5f, 0x29, 0x2c, 0x5f, 0x3d, 0x30, 0x29, - 0x3a, 0x22, 0x20, 0x22, 0x3d, 0x3d, 0x3d, 0x6e, 0x7c, 0x7c, 0x22, 0x5c, - 0x74, 0x22, 0x3d, 0x3d, 0x3d, 0x6e, 0x7c, 0x7c, 0x22, 0x5c, 0x6e, 0x22, - 0x3d, 0x3d, 0x3d, 0x6e, 0x7c, 0x7c, 0x22, 0x5c, 0x72, 0x22, 0x3d, 0x3d, - 0x3d, 0x6e, 0x3f, 0x28, 0x75, 0x28, 0x29, 0x2c, 0x5f, 0x3d, 0x32, 0x29, - 0x3a, 0x69, 0x2b, 0x3d, 0x6e, 0x29, 0x2c, 0x33, 0x3d, 0x3d, 0x3d, 0x5f, - 0x26, 0x26, 0x22, 0x21, 0x2d, 0x2d, 0x22, 0x3d, 0x3d, 0x3d, 0x69, 0x26, - 0x26, 0x28, 0x5f, 0x3d, 0x34, 0x2c, 0x72, 0x3d, 0x72, 0x5b, 0x30, 0x5d, - 0x29, 0x7d, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x75, 0x28, 0x29, - 0x2c, 0x72, 0x7d, 0x28, 0x74, 0x29, 0x29, 0x2c, 0x6e, 0x29, 0x2c, 0x61, - 0x72, 0x67, 0x75, 0x6d, 0x65, 0x6e, 0x74, 0x73, 0x2c, 0x5b, 0x5d, 0x29, - 0x29, 0x2e, 0x6c, 0x65, 0x6e, 0x67, 0x74, 0x68, 0x3e, 0x31, 0x3f, 0x6e, - 0x3a, 0x6e, 0x5b, 0x30, 0x5d, 0x7d, 0x76, 0x61, 0x72, 0x20, 0x5f, 0x6e, - 0x3d, 0x65, 0x6e, 0x2e, 0x62, 0x69, 0x6e, 0x64, 0x28, 0x4c, 0x29, 0x3b, - 0x65, 0x78, 0x70, 0x6f, 0x72, 0x74, 0x7b, 0x49, 0x20, 0x61, 0x73, 0x20, - 0x43, 0x6f, 0x6d, 0x70, 0x6f, 0x6e, 0x65, 0x6e, 0x74, 0x2c, 0x6a, 0x20, - 0x61, 0x73, 0x20, 0x46, 0x72, 0x61, 0x67, 0x6d, 0x65, 0x6e, 0x74, 0x2c, - 0x68, 0x20, 0x61, 0x73, 0x20, 0x53, 0x69, 0x67, 0x6e, 0x61, 0x6c, 0x2c, - 0x5f, 0x20, 0x61, 0x73, 0x20, 0x62, 0x61, 0x74, 0x63, 0x68, 0x2c, 0x63, - 0x74, 0x20, 0x61, 0x73, 0x20, 0x63, 0x6c, 0x6f, 0x6e, 0x65, 0x45, 0x6c, - 0x65, 0x6d, 0x65, 0x6e, 0x74, 0x2c, 0x6d, 0x20, 0x61, 0x73, 0x20, 0x63, - 0x6f, 0x6d, 0x70, 0x75, 0x74, 0x65, 0x64, 0x2c, 0x68, 0x74, 0x20, 0x61, - 0x73, 0x20, 0x63, 0x72, 0x65, 0x61, 0x74, 0x65, 0x43, 0x6f, 0x6e, 0x74, - 0x65, 0x78, 0x74, 0x2c, 0x4c, 0x20, 0x61, 0x73, 0x20, 0x63, 0x72, 0x65, - 0x61, 0x74, 0x65, 0x45, 0x6c, 0x65, 0x6d, 0x65, 0x6e, 0x74, 0x2c, 0x52, - 0x20, 0x61, 0x73, 0x20, 0x63, 0x72, 0x65, 0x61, 0x74, 0x65, 0x52, 0x65, - 0x66, 0x2c, 0x77, 0x20, 0x61, 0x73, 0x20, 0x65, 0x66, 0x66, 0x65, 0x63, - 0x74, 0x2c, 0x4c, 0x20, 0x61, 0x73, 0x20, 0x68, 0x2c, 0x5f, 0x6e, 0x20, - 0x61, 0x73, 0x20, 0x68, 0x74, 0x6d, 0x6c, 0x2c, 0x6c, 0x74, 0x20, 0x61, - 0x73, 0x20, 0x68, 0x79, 0x64, 0x72, 0x61, 0x74, 0x65, 0x2c, 0x55, 0x20, - 0x61, 0x73, 0x20, 0x69, 0x73, 0x56, 0x61, 0x6c, 0x69, 0x64, 0x45, 0x6c, - 0x65, 0x6d, 0x65, 0x6e, 0x74, 0x2c, 0x43, 0x20, 0x61, 0x73, 0x20, 0x6f, - 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x2c, 0x73, 0x74, 0x20, 0x61, 0x73, - 0x20, 0x72, 0x65, 0x6e, 0x64, 0x65, 0x72, 0x2c, 0x61, 0x20, 0x61, 0x73, - 0x20, 0x73, 0x69, 0x67, 0x6e, 0x61, 0x6c, 0x2c, 0x58, 0x20, 0x61, 0x73, - 0x20, 0x74, 0x6f, 0x43, 0x68, 0x69, 0x6c, 0x64, 0x41, 0x72, 0x72, 0x61, - 0x79, 0x2c, 0x75, 0x20, 0x61, 0x73, 0x20, 0x75, 0x6e, 0x74, 0x72, 0x61, - 0x63, 0x6b, 0x65, 0x64, 0x2c, 0x54, 0x74, 0x20, 0x61, 0x73, 0x20, 0x75, - 0x73, 0x65, 0x43, 0x61, 0x6c, 0x6c, 0x62, 0x61, 0x63, 0x6b, 0x2c, 0x59, - 0x74, 0x20, 0x61, 0x73, 0x20, 0x75, 0x73, 0x65, 0x43, 0x6f, 0x6d, 0x70, - 0x75, 0x74, 0x65, 0x64, 0x2c, 0x56, 0x74, 0x20, 0x61, 0x73, 0x20, 0x75, - 0x73, 0x65, 0x43, 0x6f, 0x6e, 0x74, 0x65, 0x78, 0x74, 0x2c, 0x41, 0x74, - 0x20, 0x61, 0x73, 0x20, 0x75, 0x73, 0x65, 0x44, 0x65, 0x62, 0x75, 0x67, - 0x56, 0x61, 0x6c, 0x75, 0x65, 0x2c, 0x48, 0x74, 0x20, 0x61, 0x73, 0x20, - 0x75, 0x73, 0x65, 0x45, 0x66, 0x66, 0x65, 0x63, 0x74, 0x2c, 0x46, 0x74, - 0x20, 0x61, 0x73, 0x20, 0x75, 0x73, 0x65, 0x45, 0x72, 0x72, 0x6f, 0x72, - 0x42, 0x6f, 0x75, 0x6e, 0x64, 0x61, 0x72, 0x79, 0x2c, 0x4d, 0x74, 0x20, - 0x61, 0x73, 0x20, 0x75, 0x73, 0x65, 0x49, 0x64, 0x2c, 0x24, 0x74, 0x20, - 0x61, 0x73, 0x20, 0x75, 0x73, 0x65, 0x49, 0x6d, 0x70, 0x65, 0x72, 0x61, - 0x74, 0x69, 0x76, 0x65, 0x48, 0x61, 0x6e, 0x64, 0x6c, 0x65, 0x2c, 0x50, - 0x74, 0x20, 0x61, 0x73, 0x20, 0x75, 0x73, 0x65, 0x4c, 0x61, 0x79, 0x6f, - 0x75, 0x74, 0x45, 0x66, 0x66, 0x65, 0x63, 0x74, 0x2c, 0x44, 0x74, 0x20, - 0x61, 0x73, 0x20, 0x75, 0x73, 0x65, 0x4d, 0x65, 0x6d, 0x6f, 0x2c, 0x55, - 0x74, 0x20, 0x61, 0x73, 0x20, 0x75, 0x73, 0x65, 0x52, 0x65, 0x64, 0x75, - 0x63, 0x65, 0x72, 0x2c, 0x4e, 0x74, 0x20, 0x61, 0x73, 0x20, 0x75, 0x73, - 0x65, 0x52, 0x65, 0x66, 0x2c, 0x58, 0x74, 0x20, 0x61, 0x73, 0x20, 0x75, - 0x73, 0x65, 0x53, 0x69, 0x67, 0x6e, 0x61, 0x6c, 0x2c, 0x5a, 0x74, 0x20, - 0x61, 0x73, 0x20, 0x75, 0x73, 0x65, 0x53, 0x69, 0x67, 0x6e, 0x61, 0x6c, - 0x45, 0x66, 0x66, 0x65, 0x63, 0x74, 0x2c, 0x45, 0x74, 0x20, 0x61, 0x73, - 0x20, 0x75, 0x73, 0x65, 0x53, 0x74, 0x61, 0x74, 0x65, 0x7d, 0x3b, 0x0a -}; -unsigned int index_js_len = 22800; +const char index_js[] = R"LITERAL( +function t(){throw new Error("Cycle detected")}const n=Symbol.for("preact-signals");function e(){if(f>1){f--;return}let t,n=!1;while(void 0!==o){let _=o;o=void 0;s++;while(void 0!==_){const i=_.o;_.o=void 0;_.f&=-3;if(!(8&_.f)&&p(_))try{_.c()}catch(e){if(!n){t=e;n=!0}}_=i}}s=0;f--;if(n)throw t}function _(t){if(f>0)return t();f++;try{return t()}finally{e()}}let i,o,r=0;function u(t){if(r>0)return t();const n=i;i=void 0;r++;try{return t()}finally{r--;i=n}}let f=0,s=0,l=0;function c(t){if(void 0===i)return;let n=t.n;if(void 0===n||n.t!==i){n={i:0,S:t,p:i.s,n:void 0,t:i,e:void 0,x:void 0,r:n};if(void 0!==i.s)i.s.n=n;i.s=n;t.n=n;if(32&i.f)t.S(n);return n}else if(-1===n.i){n.i=0;if(void 0!==n.n){n.n.p=n.p;if(void 0!==n.p)n.p.n=n.n;n.p=i.s;n.n=void 0;i.s.n=n;i.s=n}return n}}function h(t){this.v=t;this.i=0;this.n=void 0;this.t=void 0}h.prototype.brand=n;h.prototype.h=function(){return!0};h.prototype.S=function(t){if(this.t!==t&&void 0===t.e){t.x=this.t;if(void 0!==this.t)this.t.e=t;this.t=t}};h.prototype.U=function(t){if(void 0!==this.t){const n=t.e,e=t.x;if(void 0!==n){n.x=e;t.e=void 0}if(void 0!==e){e.e=n;t.x=void 0}if(t===this.t)this.t=e}};h.prototype.subscribe=function(t){const n=this;return w((function(){const e=n.value,_=32&this.f;this.f&=-33;try{t(e)}finally{this.f|=_}}))};h.prototype.valueOf=function(){return this.value};h.prototype.toString=function(){return this.value+""};h.prototype.toJSON=function(){return this.value};h.prototype.peek=function(){return this.v};Object.defineProperty(h.prototype,"value",{get(){const t=c(this);if(void 0!==t)t.i=this.i;return this.v},set(n){if(i instanceof y)!function(){throw new Error("Computed cannot have side-effects")}();if(n!==this.v){if(s>100)t();this.v=n;this.i++;l++;f++;try{for(let t=this.t;void 0!==t;t=t.x)t.t.N()}finally{e()}}}});function a(t){return new h(t)}function p(t){for(let n=t.s;void 0!==n;n=n.n)if(n.S.i!==n.i||!n.S.h()||n.S.i!==n.i)return!0;return!1}function d(t){for(let n=t.s;void 0!==n;n=n.n){const e=n.S.n;if(void 0!==e)n.r=e;n.S.n=n;n.i=-1;if(void 0===n.n){t.s=n;break}}}function v(t){let n,e=t.s;while(void 0!==e){const t=e.p;if(-1===e.i){e.S.U(e);if(void 0!==t)t.n=e.n;if(void 0!==e.n)e.n.p=t}else n=e;e.S.n=e.r;if(void 0!==e.r)e.r=void 0;e=t}t.s=n}function y(t){h.call(this,void 0);this.x=t;this.s=void 0;this.g=l-1;this.f=4}(y.prototype=new h).h=function(){this.f&=-3;if(1&this.f)return!1;if(32==(36&this.f))return!0;this.f&=-5;if(this.g===l)return!0;this.g=l;this.f|=1;if(this.i>0&&!p(this)){this.f&=-2;return!0}const t=i;try{d(this);i=this;const t=this.x();if(16&this.f||this.v!==t||0===this.i){this.v=t;this.f&=-17;this.i++}}catch(t){this.v=t;this.f|=16;this.i++}i=t;v(this);this.f&=-2;return!0};y.prototype.S=function(t){if(void 0===this.t){this.f|=36;for(let t=this.s;void 0!==t;t=t.n)t.S.S(t)}h.prototype.S.call(this,t)};y.prototype.U=function(t){if(void 0!==this.t){h.prototype.U.call(this,t);if(void 0===this.t){this.f&=-33;for(let t=this.s;void 0!==t;t=t.n)t.S.U(t)}}};y.prototype.N=function(){if(!(2&this.f)){this.f|=6;for(let t=this.t;void 0!==t;t=t.x)t.t.N()}};y.prototype.peek=function(){if(!this.h())t();if(16&this.f)throw this.v;return this.v};Object.defineProperty(y.prototype,"value",{get(){if(1&this.f)t();const n=c(this);this.h();if(void 0!==n)n.i=this.i;if(16&this.f)throw this.v;return this.v}});function m(t){return new y(t)}function g(t){const n=t.u;t.u=void 0;if("function"==typeof n){f++;const _=i;i=void 0;try{n()}catch(n){t.f&=-2;t.f|=8;b(t);throw n}finally{i=_;e()}}}function b(t){for(let n=t.s;void 0!==n;n=n.n)n.S.U(n);t.x=void 0;t.s=void 0;g(t)}function k(t){if(i!==this)throw new Error("Out-of-order effect");v(this);i=t;this.f&=-2;if(8&this.f)b(this);e()}function S(t){this.x=t;this.u=void 0;this.s=void 0;this.o=void 0;this.f=32}S.prototype.c=function(){const t=this.S();try{if(8&this.f)return;if(void 0===this.x)return;const n=this.x();if("function"==typeof n)this.u=n}finally{t()}};S.prototype.S=function(){if(1&this.f)t();this.f|=1;this.f&=-9;g(this);d(this);f++;const n=i;i=this;return k.bind(this,n)};S.prototype.N=function(){if(!(2&this.f)){this.f|=2;this.o=o;o=this}};S.prototype.d=function(){this.f|=8;if(!(1&this.f))b(this)};function w(t){const n=new S(t);try{n.c()}catch(t){n.d();throw t}return n.d.bind(n)}var x,C,E,U,H,P,N,$,D,T={},V=[],A=/acit|ex(?:s|g|n|p|$)|rph|grid|ows|mnc|ntw|ine[ch]|zoo|^ord|itera/i,F=Array.isArray;function M(t,n){for(var e in n)t[e]=n[e];return t}function W(t){var n=t.parentNode;n&&n.removeChild(t)}function L(t,n,e){var _,i,o,r={};for(o in n)"key"==o?_=n[o]:"ref"==o?i=n[o]:r[o]=n[o];if(arguments.length>2&&(r.children=arguments.length>3?x.call(arguments,2):e),"function"==typeof t&&null!=t.defaultProps)for(o in t.defaultProps)void 0===r[o]&&(r[o]=t.defaultProps[o]);return O(t,r,_,i,null)}function O(t,n,e,_,i){var o={type:t,props:n,key:e,ref:_,__k:null,__:null,__b:0,__e:null,__d:void 0,__c:null,constructor:void 0,__v:null==i?++E:i,__i:-1,__u:0};return null==i&&null!=C.vnode&&C.vnode(o),o}function R(){return{current:null}}function j(t){return t.children}function I(t,n){this.props=t,this.context=n}function q(t,n){if(null==n)return t.__?q(t.__,t.__i+1):null;for(var e;nn&&H.sort($));z.__r=0}function J(t,n,e,_,i,o,r,u,f,s,l){var c,h,a,p,d,v=_&&_.__k||V,y=n.length;for(e.__d=f,K(e,n,v),f=e.__d,c=0;c0?O(i.type,i.props,i.key,i.ref?i.ref:null,i.__v):i)?(i.__=t,i.__b=t.__b+1,u=Y(i,e,r=_+c,l),i.__i=u,o=null,-1!==u&&(l--,(o=e[u])&&(o.__u|=131072)),null==o||null===o.__v?(-1==u&&c--,"function"!=typeof i.type&&(i.__u|=65536)):u!==r&&(u===r+1?c++:u>r?l>f-r?c+=u-r:c--:c=u(null!=f&&0==(131072&f.__u)?1:0))for(;r>=0||u=0){if((f=n[r])&&0==(131072&f.__u)&&i==f.key&&o===f.type)return r;r--}if(u2&&(u.children=arguments.length>3?x.call(arguments,2):e),O(t.type,u,_||t.key,i||t.ref,null)}function ht(t,n){var e={__c:n="__cC"+D++,__:t,Consumer:function(t,n){return t.children(n)},Provider:function(t){var e,_;return this.getChildContext||(e=[],(_={})[n]=this,this.getChildContext=function(){return _},this.shouldComponentUpdate=function(t){this.props.value!==t.value&&e.some((function(t){t.__e=!0,G(t)}))},this.sub=function(t){e.push(t);var n=t.componentWillUnmount;t.componentWillUnmount=function(){e.splice(e.indexOf(t),1),n&&n.call(t)}}),t.children}};return e.Provider.__=e.Consumer.contextType=e}x=V.slice,C={__e:function(t,n,e,_){for(var i,o,r;n=n.__;)if((i=n.__c)&&!i.__)try{if((o=i.constructor)&&null!=o.getDerivedStateFromError&&(i.setState(o.getDerivedStateFromError(t)),r=i.__d),null!=i.componentDidCatch&&(i.componentDidCatch(t,_||{}),r=i.__d),r)return i.__E=i}catch(n){t=n}throw t}},E=0,U=function(t){return null!=t&&null==t.constructor},I.prototype.setState=function(t,n){var e;e=null!=this.__s&&this.__s!==this.state?this.__s:this.__s=M({},this.state),"function"==typeof t&&(t=t(M({},e),this.props)),t&&M(e,t),null!=t&&this.__v&&(n&&this._sb.push(n),G(this))},I.prototype.forceUpdate=function(t){this.__v&&(this.__e=!0,t&&this.__h.push(t),G(this))},I.prototype.render=j,H=[],N="function"==typeof Promise?Promise.prototype.then.bind(Promise.resolve()):setTimeout,$=function(t,n){return t.__v.__b-n.__v.__b},z.__r=0,D=0;var at,pt,dt,vt,yt=0,mt=[],gt=[],bt=C.__b,kt=C.__r,St=C.diffed,wt=C.__c,xt=C.unmount;function Ct(t,n){C.__h&&C.__h(pt,t,yt||n),yt=0;var e=pt.__H||(pt.__H={__:[],__h:[]});return t>=e.__.length&&e.__.push({__V:gt}),e.__[t]}function Et(t){return yt=1,Ut(qt,t)}function Ut(t,n,e){var _=Ct(at++,2);if(_.t=t,!_.__c&&(_.__=[e?e(n):qt(void 0,n),function(t){var n=_.__N?_.__N[0]:_.__[0],e=_.t(n,t);n!==e&&(_.__N=[e,_.__[1]],_.__c.setState({}))}],_.__c=pt,!pt.u)){var i=function(t,n,e){if(!_.__c.__H)return!0;var i=_.__c.__H.__.filter((function(t){return t.__c}));if(i.every((function(t){return!t.__N})))return!o||o.call(this,t,n,e);var r=!1;return i.forEach((function(t){if(t.__N){var n=t.__[0];t.__=t.__N,t.__N=void 0,n!==t.__[0]&&(r=!0)}})),!(!r&&_.__c.props===t)&&(!o||o.call(this,t,n,e))};pt.u=!0;var o=pt.shouldComponentUpdate,r=pt.componentWillUpdate;pt.componentWillUpdate=function(t,n,e){if(this.__e){var _=o;o=void 0,i(t,n,e),o=_}r&&r.call(this,t,n,e)},pt.shouldComponentUpdate=i}return _.__N||_.__}function Ht(t,n){var e=Ct(at++,3);!C.__s&&It(e.__H,n)&&(e.__=t,e.i=n,pt.__H.__h.push(e))}function Pt(t,n){var e=Ct(at++,4);!C.__s&&It(e.__H,n)&&(e.__=t,e.i=n,pt.__h.push(e))}function Nt(t){return yt=5,Dt((function(){return{current:t}}),[])}function $t(t,n,e){yt=6,Pt((function(){return"function"==typeof t?(t(n()),function(){return t(null)}):t?(t.current=n(),function(){return t.current=null}):void 0}),null==e?e:e.concat(t))}function Dt(t,n){var e=Ct(at++,7);return It(e.__H,n)?(e.__V=t(),e.i=n,e.__h=t,e.__V):e.__}function Tt(t,n){return yt=8,Dt((function(){return t}),n)}function Vt(t){var n=pt.context[t.__c],e=Ct(at++,9);return e.c=t,n?(null==e.__&&(e.__=!0,n.sub(pt)),n.props.value):t.__}function At(t,n){C.useDebugValue&&C.useDebugValue(n?n(t):t)}function Ft(t){var n=Ct(at++,10),e=Et();return n.__=t,pt.componentDidCatch||(pt.componentDidCatch=function(t,_){n.__&&n.__(t,_),e[1](t)}),[e[0],function(){e[1](void 0)}]}function Mt(){var t=Ct(at++,11);if(!t.__){for(var n=pt.__v;null!==n&&!n.__m&&null!==n.__;)n=n.__;var e=n.__m||(n.__m=[0,0]);t.__="P"+e[0]+"-"+e[1]++}return t.__}function Wt(){for(var t;t=mt.shift();)if(t.__P&&t.__H)try{t.__H.__h.forEach(Rt),t.__H.__h.forEach(jt),t.__H.__h=[]}catch(u){t.__H.__h=[],C.__e(u,t.__v)}}C.__b=function(t){pt=null,bt&&bt(t)},C.__r=function(t){kt&&kt(t),at=0;var n=(pt=t.__c).__H;n&&(dt===pt?(n.__h=[],pt.__h=[],n.__.forEach((function(t){t.__N&&(t.__=t.__N),t.__V=gt,t.__N=t.i=void 0}))):(n.__h.forEach(Rt),n.__h.forEach(jt),n.__h=[],at=0)),dt=pt},C.diffed=function(t){St&&St(t);var n=t.__c;n&&n.__H&&(n.__H.__h.length&&(1!==mt.push(n)&&vt===C.requestAnimationFrame||((vt=C.requestAnimationFrame)||Ot)(Wt)),n.__H.__.forEach((function(t){t.i&&(t.__H=t.i),t.__V!==gt&&(t.__=t.__V),t.i=void 0,t.__V=gt}))),dt=pt=null},C.__c=function(t,n){n.some((function(t){try{t.__h.forEach(Rt),t.__h=t.__h.filter((function(t){return!t.__||jt(t)}))}catch(l){n.some((function(t){t.__h&&(t.__h=[])})),n=[],C.__e(l,t.__v)}})),wt&&wt(t,n)},C.unmount=function(t){xt&&xt(t);var n,e=t.__c;e&&e.__H&&(e.__H.__.forEach((function(t){try{Rt(t)}catch(t){n=t}})),e.__H=void 0,n&&C.__e(n,e.__v))};var Lt="function"==typeof requestAnimationFrame;function Ot(t){var n,e=function(){clearTimeout(_),Lt&&cancelAnimationFrame(n),setTimeout(t)},_=setTimeout(e,100);Lt&&(n=requestAnimationFrame(e))}function Rt(t){var n=pt,e=t.__c;"function"==typeof e&&(t.__c=void 0,e()),pt=n}function jt(t){var n=pt;t.__c=t.__(),pt=n}function It(t,n){return!t||t.length!==n.length||n.some((function(n,e){return n!==t[e]}))}function qt(t,n){return"function"==typeof n?n(t):n}function Bt(t,n){C[t]=n.bind(null,C[t]||(()=>{}))}let Gt,zt;function Jt(t){if(zt)zt();zt=t&&t.S()}function Kt({data:t}){const n=Xt(t);n.value=t;const e=Dt(()=>{let t=this.__v;while(t=t.__)if(t.__c){t.__c.__$f|=4;break}this.__$u.c=()=>{var t;if(!U(e.peek())&&3===(null==(t=this.base)?void 0:t.nodeType))this.base.data=e.peek();else{this.__$f|=1;this.setState({})}};return m(()=>{let t=n.value.value;return 0===t?0:!0===t?"":t||""})},[]);return e.value}Kt.displayName="_st";Object.defineProperties(h.prototype,{constructor:{configurable:!0,value:void 0},type:{configurable:!0,value:Kt},props:{configurable:!0,get(){return{data:this}}},__b:{configurable:!0,value:1}});Bt("__b",(t,n)=>{if("string"==typeof n.type){let t,e=n.props;for(let _ in e){if("children"===_)continue;let i=e[_];if(i instanceof h){if(!t)n.__np=t={};t[_]=i;e[_]=i.peek()}}}t(n)});Bt("__r",(t,n)=>{Jt();let e,_=n.__c;if(_){_.__$f&=-2;e=_.__$u;if(void 0===e)_.__$u=e=function(t){let n;w((function(){n=this}));n.c=()=>{_.__$f|=1;_.setState({})};return n}()}Gt=_;Jt(e);t(n)});Bt("__e",(t,n,e,_)=>{Jt();Gt=void 0;t(n,e,_)});Bt("diffed",(t,n)=>{Jt();Gt=void 0;let e;if("string"==typeof n.type&&(e=n.__e)){let t=n.__np,_=n.props;if(t){let n=e.U;if(n)for(let e in n){let _=n[e];if(void 0!==_&&!(e in t)){_.d();n[e]=void 0}}else{n={};e.U=n}for(let i in t){let o=n[i],r=t[i];if(void 0===o){o=Qt(e,i,r,_);n[i]=o}else o.o(r,_)}}}t(n)});function Qt(t,n,e,_){const i=n in t&&void 0===t.ownerSVGElement,o=a(e);return{o:(t,n)=>{o.value=t;_=n},d:w(()=>{const e=o.value.value;if(_[n]!==e){_[n]=e;if(i)t[n]=e;else if(e)t.setAttribute(n,e);else t.removeAttribute(n)}})}}Bt("unmount",(t,n)=>{if("string"==typeof n.type){let t=n.__e;if(t){const n=t.U;if(n){t.U=void 0;for(let t in n){let e=n[t];if(e)e.d()}}}}else{let t=n.__c;if(t){const n=t.__$u;if(n){t.__$u=void 0;n.d()}}}t(n)});Bt("__h",(t,n,e,_)=>{if(_<3||9===_)n.__$f|=2;t(n,e,_)});I.prototype.shouldComponentUpdate=function(t,n){const e=this.__$u;if(!(e&&void 0!==e.s||4&this.__$f))return!0;if(3&this.__$f)return!0;for(let _ in n)return!0;for(let _ in t)if("__source"!==_&&t[_]!==this.props[_])return!0;for(let _ in this.props)if(!(_ in t))return!0;return!1};function Xt(t){return Dt(()=>a(t),[])}function Yt(t){const n=Nt(t);n.current=t;Gt.__$f|=4;return Dt(()=>m(()=>n.current()),[])}function Zt(t){const n=Nt(t);n.current=t;Ht(()=>w(()=>n.current()),[])}var tn=function(t,n,e,_){var i;n[0]=0;for(var o=1;o=5&&((i||!t&&5===_)&&(r.push(_,0,i,e),_=6),t&&(r.push(_,t,0,e),_=6)),i=""},f=0;f"===n?(_=1,i=""):i=n+i[0]:o?n===o?o="":i+=n:'"'===n||"'"===n?o=n:">"===n?(u(),_=1):_&&("="===n?(_=5,e=i,i=""):"/"===n&&(_<5||">"===t[f][s+1])?(u(),3===_&&(r=r[0]),_=r,(r=r[0]).push(2,0,_),_=0):" "===n||"\t"===n||"\n"===n||"\r"===n?(u(),_=2):i+=n),3===_&&"!--"===i&&(_=4,r=r[0])}return u(),r}(t)),n),arguments,[])).length>1?n:n[0]}var _n=en.bind(L);export{I as Component,j as Fragment,h as Signal,_ as batch,ct as cloneElement,m as computed,ht as createContext,L as createElement,R as createRef,w as effect,L as h,_n as html,lt as hydrate,U as isValidElement,C as options,st as render,a as signal,X as toChildArray,u as untracked,Tt as useCallback,Yt as useComputed,Vt as useContext,At as useDebugValue,Ht as useEffect,Ft as useErrorBoundary,Mt as useId,$t as useImperativeHandle,Pt as useLayoutEffect,Dt as useMemo,Ut as useReducer,Nt as useRef,Xt as useSignal,Zt as useSignalEffect,Et as useState}; +)LITERAL"; +unsigned int index_js_len = sizeof(index_js); diff --git a/examples/server/json-schema-to-grammar.mjs.hpp b/examples/server/json-schema-to-grammar.mjs.hpp index 0a05c369d..83b22d670 100644 --- a/examples/server/json-schema-to-grammar.mjs.hpp +++ b/examples/server/json-schema-to-grammar.mjs.hpp @@ -1,311 +1,115 @@ -unsigned char json_schema_to_grammar_mjs[] = { - 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x53, 0x50, 0x41, 0x43, 0x45, 0x5f, - 0x52, 0x55, 0x4c, 0x45, 0x20, 0x3d, 0x20, 0x27, 0x22, 0x20, 0x22, 0x3f, - 0x27, 0x3b, 0x0a, 0x0a, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x50, 0x52, - 0x49, 0x4d, 0x49, 0x54, 0x49, 0x56, 0x45, 0x5f, 0x52, 0x55, 0x4c, 0x45, - 0x53, 0x20, 0x3d, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x62, 0x6f, 0x6f, 0x6c, - 0x65, 0x61, 0x6e, 0x3a, 0x20, 0x27, 0x28, 0x22, 0x74, 0x72, 0x75, 0x65, - 0x22, 0x20, 0x7c, 0x20, 0x22, 0x66, 0x61, 0x6c, 0x73, 0x65, 0x22, 0x29, - 0x20, 0x73, 0x70, 0x61, 0x63, 0x65, 0x27, 0x2c, 0x0a, 0x20, 0x20, 0x6e, - 0x75, 0x6d, 0x62, 0x65, 0x72, 0x3a, 0x20, 0x27, 0x28, 0x22, 0x2d, 0x22, - 0x3f, 0x20, 0x28, 0x5b, 0x30, 0x2d, 0x39, 0x5d, 0x20, 0x7c, 0x20, 0x5b, - 0x31, 0x2d, 0x39, 0x5d, 0x20, 0x5b, 0x30, 0x2d, 0x39, 0x5d, 0x2a, 0x29, - 0x29, 0x20, 0x28, 0x22, 0x2e, 0x22, 0x20, 0x5b, 0x30, 0x2d, 0x39, 0x5d, - 0x2b, 0x29, 0x3f, 0x20, 0x28, 0x5b, 0x65, 0x45, 0x5d, 0x20, 0x5b, 0x2d, - 0x2b, 0x5d, 0x3f, 0x20, 0x5b, 0x30, 0x2d, 0x39, 0x5d, 0x2b, 0x29, 0x3f, - 0x20, 0x73, 0x70, 0x61, 0x63, 0x65, 0x27, 0x2c, 0x0a, 0x20, 0x20, 0x69, - 0x6e, 0x74, 0x65, 0x67, 0x65, 0x72, 0x3a, 0x20, 0x27, 0x28, 0x22, 0x2d, - 0x22, 0x3f, 0x20, 0x28, 0x5b, 0x30, 0x2d, 0x39, 0x5d, 0x20, 0x7c, 0x20, - 0x5b, 0x31, 0x2d, 0x39, 0x5d, 0x20, 0x5b, 0x30, 0x2d, 0x39, 0x5d, 0x2a, - 0x29, 0x29, 0x20, 0x73, 0x70, 0x61, 0x63, 0x65, 0x27, 0x2c, 0x0a, 0x20, - 0x20, 0x73, 0x74, 0x72, 0x69, 0x6e, 0x67, 0x3a, 0x20, 0x60, 0x20, 0x22, - 0x5c, 0x5c, 0x22, 0x22, 0x20, 0x28, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x5b, 0x5e, 0x22, 0x5c, 0x5c, 0x5c, 0x5c, 0x5d, 0x20, - 0x7c, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x22, 0x5c, - 0x5c, 0x5c, 0x5c, 0x22, 0x20, 0x28, 0x5b, 0x22, 0x5c, 0x5c, 0x5c, 0x5c, - 0x2f, 0x62, 0x66, 0x6e, 0x72, 0x74, 0x5d, 0x20, 0x7c, 0x20, 0x22, 0x75, - 0x22, 0x20, 0x5b, 0x30, 0x2d, 0x39, 0x61, 0x2d, 0x66, 0x41, 0x2d, 0x46, - 0x5d, 0x20, 0x5b, 0x30, 0x2d, 0x39, 0x61, 0x2d, 0x66, 0x41, 0x2d, 0x46, - 0x5d, 0x20, 0x5b, 0x30, 0x2d, 0x39, 0x61, 0x2d, 0x66, 0x41, 0x2d, 0x46, - 0x5d, 0x20, 0x5b, 0x30, 0x2d, 0x39, 0x61, 0x2d, 0x66, 0x41, 0x2d, 0x46, - 0x5d, 0x29, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x29, 0x2a, 0x20, - 0x22, 0x5c, 0x5c, 0x22, 0x22, 0x20, 0x73, 0x70, 0x61, 0x63, 0x65, 0x60, - 0x2c, 0x0a, 0x20, 0x20, 0x6e, 0x75, 0x6c, 0x6c, 0x3a, 0x20, 0x27, 0x22, - 0x6e, 0x75, 0x6c, 0x6c, 0x22, 0x20, 0x73, 0x70, 0x61, 0x63, 0x65, 0x27, - 0x2c, 0x0a, 0x7d, 0x3b, 0x0a, 0x0a, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, - 0x49, 0x4e, 0x56, 0x41, 0x4c, 0x49, 0x44, 0x5f, 0x52, 0x55, 0x4c, 0x45, - 0x5f, 0x43, 0x48, 0x41, 0x52, 0x53, 0x5f, 0x52, 0x45, 0x20, 0x3d, 0x20, - 0x2f, 0x5b, 0x5e, 0x5c, 0x64, 0x41, 0x2d, 0x5a, 0x61, 0x2d, 0x7a, 0x2d, - 0x5d, 0x2b, 0x2f, 0x67, 0x3b, 0x0a, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, - 0x47, 0x52, 0x41, 0x4d, 0x4d, 0x41, 0x52, 0x5f, 0x4c, 0x49, 0x54, 0x45, - 0x52, 0x41, 0x4c, 0x5f, 0x45, 0x53, 0x43, 0x41, 0x50, 0x45, 0x5f, 0x52, - 0x45, 0x20, 0x3d, 0x20, 0x2f, 0x5b, 0x5c, 0x6e, 0x5c, 0x72, 0x22, 0x5d, - 0x2f, 0x67, 0x3b, 0x0a, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x47, 0x52, - 0x41, 0x4d, 0x4d, 0x41, 0x52, 0x5f, 0x4c, 0x49, 0x54, 0x45, 0x52, 0x41, - 0x4c, 0x5f, 0x45, 0x53, 0x43, 0x41, 0x50, 0x45, 0x53, 0x20, 0x3d, 0x20, - 0x7b, 0x27, 0x5c, 0x72, 0x27, 0x3a, 0x20, 0x27, 0x5c, 0x5c, 0x72, 0x27, - 0x2c, 0x20, 0x27, 0x5c, 0x6e, 0x27, 0x3a, 0x20, 0x27, 0x5c, 0x5c, 0x6e, - 0x27, 0x2c, 0x20, 0x27, 0x22, 0x27, 0x3a, 0x20, 0x27, 0x5c, 0x5c, 0x22, - 0x27, 0x7d, 0x3b, 0x0a, 0x0a, 0x65, 0x78, 0x70, 0x6f, 0x72, 0x74, 0x20, - 0x63, 0x6c, 0x61, 0x73, 0x73, 0x20, 0x53, 0x63, 0x68, 0x65, 0x6d, 0x61, - 0x43, 0x6f, 0x6e, 0x76, 0x65, 0x72, 0x74, 0x65, 0x72, 0x20, 0x7b, 0x0a, - 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x72, 0x75, 0x63, 0x74, 0x6f, - 0x72, 0x28, 0x70, 0x72, 0x6f, 0x70, 0x4f, 0x72, 0x64, 0x65, 0x72, 0x29, - 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x74, 0x68, 0x69, 0x73, 0x2e, - 0x5f, 0x70, 0x72, 0x6f, 0x70, 0x4f, 0x72, 0x64, 0x65, 0x72, 0x20, 0x3d, - 0x20, 0x70, 0x72, 0x6f, 0x70, 0x4f, 0x72, 0x64, 0x65, 0x72, 0x20, 0x7c, - 0x7c, 0x20, 0x7b, 0x7d, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x74, 0x68, - 0x69, 0x73, 0x2e, 0x5f, 0x72, 0x75, 0x6c, 0x65, 0x73, 0x20, 0x3d, 0x20, - 0x6e, 0x65, 0x77, 0x20, 0x4d, 0x61, 0x70, 0x28, 0x29, 0x3b, 0x0a, 0x20, - 0x20, 0x20, 0x20, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x5f, 0x72, 0x75, 0x6c, - 0x65, 0x73, 0x2e, 0x73, 0x65, 0x74, 0x28, 0x27, 0x73, 0x70, 0x61, 0x63, - 0x65, 0x27, 0x2c, 0x20, 0x53, 0x50, 0x41, 0x43, 0x45, 0x5f, 0x52, 0x55, - 0x4c, 0x45, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x7d, 0x0a, 0x0a, 0x20, 0x20, - 0x5f, 0x66, 0x6f, 0x72, 0x6d, 0x61, 0x74, 0x4c, 0x69, 0x74, 0x65, 0x72, - 0x61, 0x6c, 0x28, 0x6c, 0x69, 0x74, 0x65, 0x72, 0x61, 0x6c, 0x29, 0x20, - 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, - 0x65, 0x73, 0x63, 0x61, 0x70, 0x65, 0x64, 0x20, 0x3d, 0x20, 0x4a, 0x53, - 0x4f, 0x4e, 0x2e, 0x73, 0x74, 0x72, 0x69, 0x6e, 0x67, 0x69, 0x66, 0x79, - 0x28, 0x6c, 0x69, 0x74, 0x65, 0x72, 0x61, 0x6c, 0x29, 0x2e, 0x72, 0x65, - 0x70, 0x6c, 0x61, 0x63, 0x65, 0x28, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x47, 0x52, 0x41, 0x4d, 0x4d, 0x41, 0x52, 0x5f, 0x4c, 0x49, 0x54, - 0x45, 0x52, 0x41, 0x4c, 0x5f, 0x45, 0x53, 0x43, 0x41, 0x50, 0x45, 0x5f, - 0x52, 0x45, 0x2c, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x6d, 0x20, - 0x3d, 0x3e, 0x20, 0x47, 0x52, 0x41, 0x4d, 0x4d, 0x41, 0x52, 0x5f, 0x4c, - 0x49, 0x54, 0x45, 0x52, 0x41, 0x4c, 0x5f, 0x45, 0x53, 0x43, 0x41, 0x50, - 0x45, 0x53, 0x5b, 0x6d, 0x5d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x29, 0x3b, - 0x0a, 0x20, 0x20, 0x20, 0x20, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, - 0x60, 0x22, 0x24, 0x7b, 0x65, 0x73, 0x63, 0x61, 0x70, 0x65, 0x64, 0x7d, - 0x22, 0x60, 0x3b, 0x0a, 0x20, 0x20, 0x7d, 0x0a, 0x0a, 0x20, 0x20, 0x5f, - 0x61, 0x64, 0x64, 0x52, 0x75, 0x6c, 0x65, 0x28, 0x6e, 0x61, 0x6d, 0x65, - 0x2c, 0x20, 0x72, 0x75, 0x6c, 0x65, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, - 0x20, 0x20, 0x6c, 0x65, 0x74, 0x20, 0x65, 0x73, 0x63, 0x4e, 0x61, 0x6d, - 0x65, 0x20, 0x3d, 0x20, 0x6e, 0x61, 0x6d, 0x65, 0x2e, 0x72, 0x65, 0x70, - 0x6c, 0x61, 0x63, 0x65, 0x28, 0x49, 0x4e, 0x56, 0x41, 0x4c, 0x49, 0x44, - 0x5f, 0x52, 0x55, 0x4c, 0x45, 0x5f, 0x43, 0x48, 0x41, 0x52, 0x53, 0x5f, - 0x52, 0x45, 0x2c, 0x20, 0x27, 0x2d, 0x27, 0x29, 0x3b, 0x0a, 0x20, 0x20, - 0x20, 0x20, 0x6c, 0x65, 0x74, 0x20, 0x6b, 0x65, 0x79, 0x20, 0x3d, 0x20, - 0x65, 0x73, 0x63, 0x4e, 0x61, 0x6d, 0x65, 0x3b, 0x0a, 0x0a, 0x20, 0x20, - 0x20, 0x20, 0x69, 0x66, 0x20, 0x28, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x5f, - 0x72, 0x75, 0x6c, 0x65, 0x73, 0x2e, 0x68, 0x61, 0x73, 0x28, 0x65, 0x73, - 0x63, 0x4e, 0x61, 0x6d, 0x65, 0x29, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x69, 0x66, 0x20, 0x28, 0x74, 0x68, 0x69, 0x73, - 0x2e, 0x5f, 0x72, 0x75, 0x6c, 0x65, 0x73, 0x2e, 0x67, 0x65, 0x74, 0x28, - 0x65, 0x73, 0x63, 0x4e, 0x61, 0x6d, 0x65, 0x29, 0x20, 0x3d, 0x3d, 0x3d, - 0x20, 0x72, 0x75, 0x6c, 0x65, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, - 0x6b, 0x65, 0x79, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, - 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x6c, 0x65, 0x74, 0x20, - 0x69, 0x20, 0x3d, 0x20, 0x30, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x77, 0x68, 0x69, 0x6c, 0x65, 0x20, 0x28, 0x74, 0x68, 0x69, 0x73, - 0x2e, 0x5f, 0x72, 0x75, 0x6c, 0x65, 0x73, 0x2e, 0x68, 0x61, 0x73, 0x28, - 0x60, 0x24, 0x7b, 0x65, 0x73, 0x63, 0x4e, 0x61, 0x6d, 0x65, 0x7d, 0x24, - 0x7b, 0x69, 0x7d, 0x60, 0x29, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x69, 0x20, 0x2b, 0x3d, 0x20, 0x31, 0x3b, - 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x6b, 0x65, 0x79, 0x20, 0x3d, 0x20, 0x60, 0x24, 0x7b, - 0x65, 0x73, 0x63, 0x4e, 0x61, 0x6d, 0x65, 0x7d, 0x24, 0x7b, 0x69, 0x7d, - 0x60, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x0a, 0x20, 0x20, - 0x20, 0x20, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x5f, 0x72, 0x75, 0x6c, 0x65, - 0x73, 0x2e, 0x73, 0x65, 0x74, 0x28, 0x6b, 0x65, 0x79, 0x2c, 0x20, 0x72, - 0x75, 0x6c, 0x65, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x72, 0x65, - 0x74, 0x75, 0x72, 0x6e, 0x20, 0x6b, 0x65, 0x79, 0x3b, 0x0a, 0x20, 0x20, - 0x7d, 0x0a, 0x0a, 0x20, 0x20, 0x76, 0x69, 0x73, 0x69, 0x74, 0x28, 0x73, - 0x63, 0x68, 0x65, 0x6d, 0x61, 0x2c, 0x20, 0x6e, 0x61, 0x6d, 0x65, 0x29, - 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, - 0x20, 0x73, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x54, 0x79, 0x70, 0x65, 0x20, - 0x3d, 0x20, 0x73, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x2e, 0x74, 0x79, 0x70, - 0x65, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, - 0x20, 0x72, 0x75, 0x6c, 0x65, 0x4e, 0x61, 0x6d, 0x65, 0x20, 0x3d, 0x20, - 0x6e, 0x61, 0x6d, 0x65, 0x20, 0x7c, 0x7c, 0x20, 0x27, 0x72, 0x6f, 0x6f, - 0x74, 0x27, 0x3b, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x69, 0x66, 0x20, - 0x28, 0x73, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x2e, 0x6f, 0x6e, 0x65, 0x4f, - 0x66, 0x20, 0x7c, 0x7c, 0x20, 0x73, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x2e, - 0x61, 0x6e, 0x79, 0x4f, 0x66, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x72, 0x75, 0x6c, - 0x65, 0x20, 0x3d, 0x20, 0x28, 0x73, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x2e, - 0x6f, 0x6e, 0x65, 0x4f, 0x66, 0x20, 0x7c, 0x7c, 0x20, 0x73, 0x63, 0x68, - 0x65, 0x6d, 0x61, 0x2e, 0x61, 0x6e, 0x79, 0x4f, 0x66, 0x29, 0x2e, 0x6d, - 0x61, 0x70, 0x28, 0x28, 0x61, 0x6c, 0x74, 0x53, 0x63, 0x68, 0x65, 0x6d, - 0x61, 0x2c, 0x20, 0x69, 0x29, 0x20, 0x3d, 0x3e, 0x0a, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x76, 0x69, - 0x73, 0x69, 0x74, 0x28, 0x61, 0x6c, 0x74, 0x53, 0x63, 0x68, 0x65, 0x6d, - 0x61, 0x2c, 0x20, 0x60, 0x24, 0x7b, 0x6e, 0x61, 0x6d, 0x65, 0x7d, 0x24, - 0x7b, 0x6e, 0x61, 0x6d, 0x65, 0x20, 0x3f, 0x20, 0x22, 0x2d, 0x22, 0x20, - 0x3a, 0x20, 0x22, 0x22, 0x7d, 0x24, 0x7b, 0x69, 0x7d, 0x60, 0x29, 0x0a, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x29, 0x2e, 0x6a, 0x6f, 0x69, 0x6e, - 0x28, 0x27, 0x20, 0x7c, 0x20, 0x27, 0x29, 0x3b, 0x0a, 0x0a, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x74, - 0x68, 0x69, 0x73, 0x2e, 0x5f, 0x61, 0x64, 0x64, 0x52, 0x75, 0x6c, 0x65, - 0x28, 0x72, 0x75, 0x6c, 0x65, 0x4e, 0x61, 0x6d, 0x65, 0x2c, 0x20, 0x72, - 0x75, 0x6c, 0x65, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x20, - 0x65, 0x6c, 0x73, 0x65, 0x20, 0x69, 0x66, 0x20, 0x28, 0x27, 0x63, 0x6f, - 0x6e, 0x73, 0x74, 0x27, 0x20, 0x69, 0x6e, 0x20, 0x73, 0x63, 0x68, 0x65, - 0x6d, 0x61, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x74, 0x68, 0x69, 0x73, 0x2e, - 0x5f, 0x61, 0x64, 0x64, 0x52, 0x75, 0x6c, 0x65, 0x28, 0x72, 0x75, 0x6c, - 0x65, 0x4e, 0x61, 0x6d, 0x65, 0x2c, 0x20, 0x74, 0x68, 0x69, 0x73, 0x2e, - 0x5f, 0x66, 0x6f, 0x72, 0x6d, 0x61, 0x74, 0x4c, 0x69, 0x74, 0x65, 0x72, - 0x61, 0x6c, 0x28, 0x73, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x2e, 0x63, 0x6f, - 0x6e, 0x73, 0x74, 0x29, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x7d, - 0x20, 0x65, 0x6c, 0x73, 0x65, 0x20, 0x69, 0x66, 0x20, 0x28, 0x27, 0x65, - 0x6e, 0x75, 0x6d, 0x27, 0x20, 0x69, 0x6e, 0x20, 0x73, 0x63, 0x68, 0x65, - 0x6d, 0x61, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x72, 0x75, 0x6c, 0x65, 0x20, 0x3d, - 0x20, 0x73, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x2e, 0x65, 0x6e, 0x75, 0x6d, - 0x2e, 0x6d, 0x61, 0x70, 0x28, 0x76, 0x20, 0x3d, 0x3e, 0x20, 0x74, 0x68, - 0x69, 0x73, 0x2e, 0x5f, 0x66, 0x6f, 0x72, 0x6d, 0x61, 0x74, 0x4c, 0x69, - 0x74, 0x65, 0x72, 0x61, 0x6c, 0x28, 0x76, 0x29, 0x29, 0x2e, 0x6a, 0x6f, - 0x69, 0x6e, 0x28, 0x27, 0x20, 0x7c, 0x20, 0x27, 0x29, 0x3b, 0x0a, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, - 0x74, 0x68, 0x69, 0x73, 0x2e, 0x5f, 0x61, 0x64, 0x64, 0x52, 0x75, 0x6c, - 0x65, 0x28, 0x72, 0x75, 0x6c, 0x65, 0x4e, 0x61, 0x6d, 0x65, 0x2c, 0x20, - 0x72, 0x75, 0x6c, 0x65, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x7d, - 0x20, 0x65, 0x6c, 0x73, 0x65, 0x20, 0x69, 0x66, 0x20, 0x28, 0x73, 0x63, - 0x68, 0x65, 0x6d, 0x61, 0x54, 0x79, 0x70, 0x65, 0x20, 0x3d, 0x3d, 0x3d, - 0x20, 0x27, 0x6f, 0x62, 0x6a, 0x65, 0x63, 0x74, 0x27, 0x20, 0x26, 0x26, - 0x20, 0x27, 0x70, 0x72, 0x6f, 0x70, 0x65, 0x72, 0x74, 0x69, 0x65, 0x73, - 0x27, 0x20, 0x69, 0x6e, 0x20, 0x73, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x29, - 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x2f, 0x2f, 0x20, - 0x54, 0x4f, 0x44, 0x4f, 0x3a, 0x20, 0x60, 0x72, 0x65, 0x71, 0x75, 0x69, - 0x72, 0x65, 0x64, 0x60, 0x20, 0x6b, 0x65, 0x79, 0x77, 0x6f, 0x72, 0x64, - 0x20, 0x28, 0x66, 0x72, 0x6f, 0x6d, 0x20, 0x70, 0x79, 0x74, 0x68, 0x6f, - 0x6e, 0x20, 0x69, 0x6d, 0x70, 0x6c, 0x65, 0x6d, 0x65, 0x6e, 0x74, 0x61, - 0x74, 0x69, 0x6f, 0x6e, 0x29, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x70, 0x72, 0x6f, 0x70, 0x4f, 0x72, - 0x64, 0x65, 0x72, 0x20, 0x3d, 0x20, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x5f, - 0x70, 0x72, 0x6f, 0x70, 0x4f, 0x72, 0x64, 0x65, 0x72, 0x3b, 0x0a, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x70, - 0x72, 0x6f, 0x70, 0x50, 0x61, 0x69, 0x72, 0x73, 0x20, 0x3d, 0x20, 0x4f, - 0x62, 0x6a, 0x65, 0x63, 0x74, 0x2e, 0x65, 0x6e, 0x74, 0x72, 0x69, 0x65, - 0x73, 0x28, 0x73, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x2e, 0x70, 0x72, 0x6f, - 0x70, 0x65, 0x72, 0x74, 0x69, 0x65, 0x73, 0x29, 0x2e, 0x73, 0x6f, 0x72, - 0x74, 0x28, 0x28, 0x61, 0x2c, 0x20, 0x62, 0x29, 0x20, 0x3d, 0x3e, 0x20, - 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x2f, 0x2f, - 0x20, 0x73, 0x6f, 0x72, 0x74, 0x20, 0x62, 0x79, 0x20, 0x70, 0x6f, 0x73, - 0x69, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x69, 0x6e, 0x20, 0x70, 0x72, 0x6f, - 0x70, 0x5f, 0x6f, 0x72, 0x64, 0x65, 0x72, 0x20, 0x28, 0x69, 0x66, 0x20, - 0x73, 0x70, 0x65, 0x63, 0x69, 0x66, 0x69, 0x65, 0x64, 0x29, 0x20, 0x74, - 0x68, 0x65, 0x6e, 0x20, 0x62, 0x79, 0x20, 0x6b, 0x65, 0x79, 0x0a, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, - 0x20, 0x6f, 0x72, 0x64, 0x65, 0x72, 0x41, 0x20, 0x3d, 0x20, 0x74, 0x79, - 0x70, 0x65, 0x6f, 0x66, 0x20, 0x70, 0x72, 0x6f, 0x70, 0x4f, 0x72, 0x64, - 0x65, 0x72, 0x5b, 0x61, 0x5b, 0x30, 0x5d, 0x5d, 0x20, 0x3d, 0x3d, 0x3d, - 0x20, 0x27, 0x6e, 0x75, 0x6d, 0x62, 0x65, 0x72, 0x27, 0x20, 0x3f, 0x20, - 0x70, 0x72, 0x6f, 0x70, 0x4f, 0x72, 0x64, 0x65, 0x72, 0x5b, 0x61, 0x5b, - 0x30, 0x5d, 0x5d, 0x20, 0x3a, 0x20, 0x49, 0x6e, 0x66, 0x69, 0x6e, 0x69, - 0x74, 0x79, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x6f, 0x72, 0x64, 0x65, 0x72, 0x42, - 0x20, 0x3d, 0x20, 0x74, 0x79, 0x70, 0x65, 0x6f, 0x66, 0x20, 0x70, 0x72, - 0x6f, 0x70, 0x4f, 0x72, 0x64, 0x65, 0x72, 0x5b, 0x62, 0x5b, 0x30, 0x5d, - 0x5d, 0x20, 0x3d, 0x3d, 0x3d, 0x20, 0x27, 0x6e, 0x75, 0x6d, 0x62, 0x65, - 0x72, 0x27, 0x20, 0x3f, 0x20, 0x70, 0x72, 0x6f, 0x70, 0x4f, 0x72, 0x64, - 0x65, 0x72, 0x5b, 0x62, 0x5b, 0x30, 0x5d, 0x5d, 0x20, 0x3a, 0x20, 0x49, - 0x6e, 0x66, 0x69, 0x6e, 0x69, 0x74, 0x79, 0x3b, 0x0a, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, - 0x6f, 0x72, 0x64, 0x65, 0x72, 0x41, 0x20, 0x2d, 0x20, 0x6f, 0x72, 0x64, - 0x65, 0x72, 0x42, 0x20, 0x7c, 0x7c, 0x20, 0x61, 0x5b, 0x30, 0x5d, 0x2e, - 0x6c, 0x6f, 0x63, 0x61, 0x6c, 0x65, 0x43, 0x6f, 0x6d, 0x70, 0x61, 0x72, - 0x65, 0x28, 0x62, 0x5b, 0x30, 0x5d, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x7d, 0x29, 0x3b, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x6c, 0x65, 0x74, 0x20, 0x72, 0x75, 0x6c, 0x65, 0x20, 0x3d, - 0x20, 0x27, 0x22, 0x7b, 0x22, 0x20, 0x73, 0x70, 0x61, 0x63, 0x65, 0x27, - 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x70, 0x72, 0x6f, 0x70, - 0x50, 0x61, 0x69, 0x72, 0x73, 0x2e, 0x66, 0x6f, 0x72, 0x45, 0x61, 0x63, - 0x68, 0x28, 0x28, 0x5b, 0x70, 0x72, 0x6f, 0x70, 0x4e, 0x61, 0x6d, 0x65, - 0x2c, 0x20, 0x70, 0x72, 0x6f, 0x70, 0x53, 0x63, 0x68, 0x65, 0x6d, 0x61, - 0x5d, 0x2c, 0x20, 0x69, 0x29, 0x20, 0x3d, 0x3e, 0x20, 0x7b, 0x0a, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, - 0x20, 0x70, 0x72, 0x6f, 0x70, 0x52, 0x75, 0x6c, 0x65, 0x4e, 0x61, 0x6d, - 0x65, 0x20, 0x3d, 0x20, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x76, 0x69, 0x73, - 0x69, 0x74, 0x28, 0x70, 0x72, 0x6f, 0x70, 0x53, 0x63, 0x68, 0x65, 0x6d, - 0x61, 0x2c, 0x20, 0x60, 0x24, 0x7b, 0x6e, 0x61, 0x6d, 0x65, 0x7d, 0x24, - 0x7b, 0x6e, 0x61, 0x6d, 0x65, 0x20, 0x3f, 0x20, 0x22, 0x2d, 0x22, 0x20, - 0x3a, 0x20, 0x22, 0x22, 0x7d, 0x24, 0x7b, 0x70, 0x72, 0x6f, 0x70, 0x4e, - 0x61, 0x6d, 0x65, 0x7d, 0x60, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x69, 0x66, 0x20, 0x28, 0x69, 0x20, 0x3e, 0x20, - 0x30, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x72, 0x75, 0x6c, 0x65, 0x20, 0x2b, 0x3d, 0x20, 0x27, - 0x20, 0x22, 0x2c, 0x22, 0x20, 0x73, 0x70, 0x61, 0x63, 0x65, 0x27, 0x3b, - 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x72, 0x75, 0x6c, 0x65, 0x20, - 0x2b, 0x3d, 0x20, 0x60, 0x20, 0x24, 0x7b, 0x74, 0x68, 0x69, 0x73, 0x2e, - 0x5f, 0x66, 0x6f, 0x72, 0x6d, 0x61, 0x74, 0x4c, 0x69, 0x74, 0x65, 0x72, - 0x61, 0x6c, 0x28, 0x70, 0x72, 0x6f, 0x70, 0x4e, 0x61, 0x6d, 0x65, 0x29, - 0x7d, 0x20, 0x73, 0x70, 0x61, 0x63, 0x65, 0x20, 0x22, 0x3a, 0x22, 0x20, - 0x73, 0x70, 0x61, 0x63, 0x65, 0x20, 0x24, 0x7b, 0x70, 0x72, 0x6f, 0x70, - 0x52, 0x75, 0x6c, 0x65, 0x4e, 0x61, 0x6d, 0x65, 0x7d, 0x60, 0x3b, 0x0a, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x29, 0x3b, 0x0a, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x72, 0x75, 0x6c, 0x65, 0x20, 0x2b, 0x3d, 0x20, - 0x27, 0x20, 0x22, 0x7d, 0x22, 0x20, 0x73, 0x70, 0x61, 0x63, 0x65, 0x27, - 0x3b, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x72, 0x65, 0x74, - 0x75, 0x72, 0x6e, 0x20, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x5f, 0x61, 0x64, - 0x64, 0x52, 0x75, 0x6c, 0x65, 0x28, 0x72, 0x75, 0x6c, 0x65, 0x4e, 0x61, - 0x6d, 0x65, 0x2c, 0x20, 0x72, 0x75, 0x6c, 0x65, 0x29, 0x3b, 0x0a, 0x20, - 0x20, 0x20, 0x20, 0x7d, 0x20, 0x65, 0x6c, 0x73, 0x65, 0x20, 0x69, 0x66, - 0x20, 0x28, 0x73, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x54, 0x79, 0x70, 0x65, - 0x20, 0x3d, 0x3d, 0x3d, 0x20, 0x27, 0x61, 0x72, 0x72, 0x61, 0x79, 0x27, - 0x20, 0x26, 0x26, 0x20, 0x27, 0x69, 0x74, 0x65, 0x6d, 0x73, 0x27, 0x20, - 0x69, 0x6e, 0x20, 0x73, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x29, 0x20, 0x7b, - 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x2f, 0x2f, 0x20, 0x54, 0x4f, - 0x44, 0x4f, 0x20, 0x60, 0x70, 0x72, 0x65, 0x66, 0x69, 0x78, 0x49, 0x74, - 0x65, 0x6d, 0x73, 0x60, 0x20, 0x6b, 0x65, 0x79, 0x77, 0x6f, 0x72, 0x64, - 0x20, 0x28, 0x66, 0x72, 0x6f, 0x6d, 0x20, 0x70, 0x79, 0x74, 0x68, 0x6f, - 0x6e, 0x20, 0x69, 0x6d, 0x70, 0x6c, 0x65, 0x6d, 0x65, 0x6e, 0x74, 0x61, - 0x74, 0x69, 0x6f, 0x6e, 0x29, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x69, 0x74, 0x65, 0x6d, 0x52, 0x75, - 0x6c, 0x65, 0x4e, 0x61, 0x6d, 0x65, 0x20, 0x3d, 0x20, 0x74, 0x68, 0x69, - 0x73, 0x2e, 0x76, 0x69, 0x73, 0x69, 0x74, 0x28, 0x73, 0x63, 0x68, 0x65, - 0x6d, 0x61, 0x2e, 0x69, 0x74, 0x65, 0x6d, 0x73, 0x2c, 0x20, 0x60, 0x24, - 0x7b, 0x6e, 0x61, 0x6d, 0x65, 0x7d, 0x24, 0x7b, 0x6e, 0x61, 0x6d, 0x65, - 0x20, 0x3f, 0x20, 0x22, 0x2d, 0x22, 0x20, 0x3a, 0x20, 0x22, 0x22, 0x7d, - 0x69, 0x74, 0x65, 0x6d, 0x60, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x72, 0x75, 0x6c, 0x65, - 0x20, 0x3d, 0x20, 0x60, 0x22, 0x5b, 0x22, 0x20, 0x73, 0x70, 0x61, 0x63, - 0x65, 0x20, 0x28, 0x24, 0x7b, 0x69, 0x74, 0x65, 0x6d, 0x52, 0x75, 0x6c, - 0x65, 0x4e, 0x61, 0x6d, 0x65, 0x7d, 0x20, 0x28, 0x22, 0x2c, 0x22, 0x20, - 0x73, 0x70, 0x61, 0x63, 0x65, 0x20, 0x24, 0x7b, 0x69, 0x74, 0x65, 0x6d, - 0x52, 0x75, 0x6c, 0x65, 0x4e, 0x61, 0x6d, 0x65, 0x7d, 0x29, 0x2a, 0x29, - 0x3f, 0x20, 0x22, 0x5d, 0x22, 0x20, 0x73, 0x70, 0x61, 0x63, 0x65, 0x60, - 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x72, 0x65, 0x74, 0x75, - 0x72, 0x6e, 0x20, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x5f, 0x61, 0x64, 0x64, - 0x52, 0x75, 0x6c, 0x65, 0x28, 0x72, 0x75, 0x6c, 0x65, 0x4e, 0x61, 0x6d, - 0x65, 0x2c, 0x20, 0x72, 0x75, 0x6c, 0x65, 0x29, 0x3b, 0x0a, 0x20, 0x20, - 0x20, 0x20, 0x7d, 0x20, 0x65, 0x6c, 0x73, 0x65, 0x20, 0x7b, 0x0a, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x69, 0x66, 0x20, 0x28, 0x21, 0x50, 0x52, - 0x49, 0x4d, 0x49, 0x54, 0x49, 0x56, 0x45, 0x5f, 0x52, 0x55, 0x4c, 0x45, - 0x53, 0x5b, 0x73, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x54, 0x79, 0x70, 0x65, - 0x5d, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x74, 0x68, 0x72, 0x6f, 0x77, 0x20, 0x6e, 0x65, 0x77, 0x20, 0x45, - 0x72, 0x72, 0x6f, 0x72, 0x28, 0x60, 0x55, 0x6e, 0x72, 0x65, 0x63, 0x6f, - 0x67, 0x6e, 0x69, 0x7a, 0x65, 0x64, 0x20, 0x73, 0x63, 0x68, 0x65, 0x6d, - 0x61, 0x3a, 0x20, 0x24, 0x7b, 0x4a, 0x53, 0x4f, 0x4e, 0x2e, 0x73, 0x74, - 0x72, 0x69, 0x6e, 0x67, 0x69, 0x66, 0x79, 0x28, 0x73, 0x63, 0x68, 0x65, - 0x6d, 0x61, 0x29, 0x7d, 0x60, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x72, 0x65, - 0x74, 0x75, 0x72, 0x6e, 0x20, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x5f, 0x61, - 0x64, 0x64, 0x52, 0x75, 0x6c, 0x65, 0x28, 0x0a, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x72, 0x75, 0x6c, 0x65, 0x4e, 0x61, 0x6d, 0x65, - 0x20, 0x3d, 0x3d, 0x3d, 0x20, 0x27, 0x72, 0x6f, 0x6f, 0x74, 0x27, 0x20, - 0x3f, 0x20, 0x27, 0x72, 0x6f, 0x6f, 0x74, 0x27, 0x20, 0x3a, 0x20, 0x73, - 0x63, 0x68, 0x65, 0x6d, 0x61, 0x54, 0x79, 0x70, 0x65, 0x2c, 0x0a, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x50, 0x52, 0x49, 0x4d, 0x49, - 0x54, 0x49, 0x56, 0x45, 0x5f, 0x52, 0x55, 0x4c, 0x45, 0x53, 0x5b, 0x73, - 0x63, 0x68, 0x65, 0x6d, 0x61, 0x54, 0x79, 0x70, 0x65, 0x5d, 0x0a, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, - 0x7d, 0x0a, 0x20, 0x20, 0x7d, 0x0a, 0x0a, 0x20, 0x20, 0x66, 0x6f, 0x72, - 0x6d, 0x61, 0x74, 0x47, 0x72, 0x61, 0x6d, 0x6d, 0x61, 0x72, 0x28, 0x29, - 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x6c, 0x65, 0x74, 0x20, 0x67, - 0x72, 0x61, 0x6d, 0x6d, 0x61, 0x72, 0x20, 0x3d, 0x20, 0x27, 0x27, 0x3b, - 0x0a, 0x20, 0x20, 0x20, 0x20, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x5f, 0x72, - 0x75, 0x6c, 0x65, 0x73, 0x2e, 0x66, 0x6f, 0x72, 0x45, 0x61, 0x63, 0x68, - 0x28, 0x28, 0x72, 0x75, 0x6c, 0x65, 0x2c, 0x20, 0x6e, 0x61, 0x6d, 0x65, - 0x29, 0x20, 0x3d, 0x3e, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x67, 0x72, 0x61, 0x6d, 0x6d, 0x61, 0x72, 0x20, 0x2b, 0x3d, 0x20, - 0x60, 0x24, 0x7b, 0x6e, 0x61, 0x6d, 0x65, 0x7d, 0x20, 0x3a, 0x3a, 0x3d, - 0x20, 0x24, 0x7b, 0x72, 0x75, 0x6c, 0x65, 0x7d, 0x5c, 0x6e, 0x60, 0x3b, - 0x0a, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, - 0x20, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x67, 0x72, 0x61, 0x6d, - 0x6d, 0x61, 0x72, 0x3b, 0x0a, 0x20, 0x20, 0x7d, 0x0a, 0x7d, 0x0a +const char json_schema_to_grammar_mjs[] = R"LITERAL( +const SPACE_RULE = '" "?'; + +const PRIMITIVE_RULES = { + boolean: '("true" | "false") space', + number: '("-"? ([0-9] | [1-9] [0-9]*)) ("." [0-9]+)? ([eE] [-+]? [0-9]+)? space', + integer: '("-"? ([0-9] | [1-9] [0-9]*)) space', + string: ` "\\"" ( + [^"\\\\] | + "\\\\" (["\\\\/bfnrt] | "u" [0-9a-fA-F] [0-9a-fA-F] [0-9a-fA-F] [0-9a-fA-F]) + )* "\\"" space`, + null: '"null" space', }; -unsigned int json_schema_to_grammar_mjs_len = 3695; + +const INVALID_RULE_CHARS_RE = /[^\dA-Za-z-]+/g; +const GRAMMAR_LITERAL_ESCAPE_RE = /[\n\r"]/g; +const GRAMMAR_LITERAL_ESCAPES = {'\r': '\\r', '\n': '\\n', '"': '\\"'}; + +export class SchemaConverter { + constructor(propOrder) { + this._propOrder = propOrder || {}; + this._rules = new Map(); + this._rules.set('space', SPACE_RULE); + } + + _formatLiteral(literal) { + const escaped = JSON.stringify(literal).replace( + GRAMMAR_LITERAL_ESCAPE_RE, + m => GRAMMAR_LITERAL_ESCAPES[m] + ); + return `"${escaped}"`; + } + + _addRule(name, rule) { + let escName = name.replace(INVALID_RULE_CHARS_RE, '-'); + let key = escName; + + if (this._rules.has(escName)) { + if (this._rules.get(escName) === rule) { + return key; + } + + let i = 0; + while (this._rules.has(`${escName}${i}`)) { + i += 1; + } + key = `${escName}${i}`; + } + + this._rules.set(key, rule); + return key; + } + + visit(schema, name) { + const schemaType = schema.type; + const ruleName = name || 'root'; + + if (schema.oneOf || schema.anyOf) { + const rule = (schema.oneOf || schema.anyOf).map((altSchema, i) => + this.visit(altSchema, `${name}${name ? "-" : ""}${i}`) + ).join(' | '); + + return this._addRule(ruleName, rule); + } else if ('const' in schema) { + return this._addRule(ruleName, this._formatLiteral(schema.const)); + } else if ('enum' in schema) { + const rule = schema.enum.map(v => this._formatLiteral(v)).join(' | '); + return this._addRule(ruleName, rule); + } else if (schemaType === 'object' && 'properties' in schema) { + // TODO: `required` keyword (from python implementation) + const propOrder = this._propOrder; + const propPairs = Object.entries(schema.properties).sort((a, b) => { + // sort by position in prop_order (if specified) then by key + const orderA = typeof propOrder[a[0]] === 'number' ? propOrder[a[0]] : Infinity; + const orderB = typeof propOrder[b[0]] === 'number' ? propOrder[b[0]] : Infinity; + return orderA - orderB || a[0].localeCompare(b[0]); + }); + + let rule = '"{" space'; + propPairs.forEach(([propName, propSchema], i) => { + const propRuleName = this.visit(propSchema, `${name}${name ? "-" : ""}${propName}`); + if (i > 0) { + rule += ' "," space'; + } + rule += ` ${this._formatLiteral(propName)} space ":" space ${propRuleName}`; + }); + rule += ' "}" space'; + + return this._addRule(ruleName, rule); + } else if (schemaType === 'array' && 'items' in schema) { + // TODO `prefixItems` keyword (from python implementation) + const itemRuleName = this.visit(schema.items, `${name}${name ? "-" : ""}item`); + const rule = `"[" space (${itemRuleName} ("," space ${itemRuleName})*)? "]" space`; + return this._addRule(ruleName, rule); + } else { + if (!PRIMITIVE_RULES[schemaType]) { + throw new Error(`Unrecognized schema: ${JSON.stringify(schema)}`); + } + return this._addRule( + ruleName === 'root' ? 'root' : schemaType, + PRIMITIVE_RULES[schemaType] + ); + } + } + + formatGrammar() { + let grammar = ''; + this._rules.forEach((rule, name) => { + grammar += `${name} ::= ${rule}\n`; + }); + return grammar; + } +} +)LITERAL"; +unsigned int json_schema_to_grammar_mjs_len = sizeof(json_schema_to_grammar_mjs); From e6f291d15844398f8326940fe5ad7f2e02b5aa56 Mon Sep 17 00:00:00 2001 From: Georgi Gerganov Date: Tue, 30 Jan 2024 20:17:30 +0200 Subject: [PATCH 470/811] server : fix context shift (#5195) * server : fix context shift + simplify self-extend * server : take system_tokens into account * server : more n_past fixes * server : rever n_past_se changes --- examples/server/chat.sh | 1 + examples/server/server.cpp | 109 ++++++++++++++++++++----------------- 2 files changed, 60 insertions(+), 50 deletions(-) diff --git a/examples/server/chat.sh b/examples/server/chat.sh index 014360121..da0a6ca68 100755 --- a/examples/server/chat.sh +++ b/examples/server/chat.sh @@ -48,6 +48,7 @@ chat_completion() { top_p: 0.9, n_keep: $n_keep, n_predict: 256, + cache_prompt: true, stop: ["\n### Human:"], stream: true }')" diff --git a/examples/server/server.cpp b/examples/server/server.cpp index 11dd82c33..21bdce8ed 100644 --- a/examples/server/server.cpp +++ b/examples/server/server.cpp @@ -185,7 +185,7 @@ struct llama_client_slot llama_sampling_context *ctx_sampling = nullptr; int32_t ga_i = 0; // group-attention state - int32_t ga_n = 1;// group-attention factor + int32_t ga_n = 1; // group-attention factor int32_t ga_w = 512; // group-attention width int32_t n_past_se = 0; // self-extend @@ -219,7 +219,8 @@ struct llama_client_slot sent_token_probs_index = 0; infill = false; ga_i = 0; - n_past_se = 0; + n_past_se = 0; + generated_token_probs.clear(); for (slot_image & img : images) @@ -1227,7 +1228,7 @@ struct llama_server_context std::vector append_tokens = tokenize(json_prompt, false); // has next image for (int i = 0; i < (int) append_tokens.size(); ++i) { - llama_batch_add(batch, append_tokens[i], slot.n_past, { slot.id }, true); + llama_batch_add(batch, append_tokens[i], system_tokens.size() + slot.n_past, { slot.id }, true); slot.n_past += 1; } } @@ -1295,6 +1296,8 @@ struct llama_server_context for (llama_client_slot &slot : slots) { slot.cache_tokens.clear(); + slot.n_past = 0; + slot.n_past_se = 0; } } @@ -1364,26 +1367,26 @@ struct llama_server_context kv_cache_clear(); } return true; - } else { - task_server task; - task.type = TASK_TYPE_NEXT_RESPONSE; - task.target_id = -1; - queue_tasks.post(task); } + task_server task; + task.type = TASK_TYPE_NEXT_RESPONSE; + task.target_id = -1; + queue_tasks.post(task); + for (llama_client_slot &slot : slots) { if (slot.ga_n == 1) { - if (slot.is_processing() && slot.cache_tokens.size() >= (size_t) slot.n_ctx) + if (slot.is_processing() && system_tokens.size() + slot.cache_tokens.size() >= (size_t) slot.n_ctx) { // Shift context - const int n_left = slot.n_past - slot.params.n_keep - 1; + const int n_left = system_tokens.size() + slot.n_past - slot.params.n_keep - 1; const int n_discard = n_left / 2; LOG_TEE("slot %d: context shift - n_keep = %d, n_left = %d, n_discard = %d\n", slot.id, slot.params.n_keep, n_left, n_discard); llama_kv_cache_seq_rm (ctx, slot.id, slot.params.n_keep + 1 , slot.params.n_keep + n_discard + 1); - llama_kv_cache_seq_shift(ctx, slot.id, slot.params.n_keep + 1 + n_discard, slot.n_past, -n_discard); + llama_kv_cache_seq_shift(ctx, slot.id, slot.params.n_keep + 1 + n_discard, system_tokens.size() + slot.n_past, -n_discard); for (size_t i = slot.params.n_keep + 1 + n_discard; i < slot.cache_tokens.size(); i++) { @@ -1429,8 +1432,10 @@ struct llama_server_context slot.i_batch = batch.n_tokens; const int32_t slot_npast = slot.n_past_se > 0 ? slot.n_past_se : slot.n_past; - llama_batch_add(batch, slot.sampled, system_tokens.size() + slot_npast, { slot.id }, true); + // TODO: we always have to take into account the "system_tokens" + // this is not great and needs to be improved somehow + llama_batch_add(batch, slot.sampled, system_tokens.size() + slot_npast, { slot.id }, true); slot.n_past += 1; } @@ -1481,8 +1486,8 @@ struct llama_server_context prefix_tokens.insert(prefix_tokens.begin(), llama_token_prefix(model)); prefix_tokens.insert(prefix_tokens.begin(), llama_token_bos(model)); // always add BOS - prefix_tokens.insert(prefix_tokens.end(), llama_token_suffix(model)); - prefix_tokens.insert(prefix_tokens.end(), suffix_tokens.begin(), suffix_tokens.end()); + prefix_tokens.insert(prefix_tokens.end(), llama_token_suffix(model)); + prefix_tokens.insert(prefix_tokens.end(), suffix_tokens.begin(), suffix_tokens.end()); prefix_tokens.push_back(llama_token_middle(model)); prompt_tokens = prefix_tokens; } @@ -1582,8 +1587,8 @@ struct llama_server_context } LOG_VERBOSE("prompt ingested", { - {"n_past", slot.n_past}, - {"cached", tokens_to_str(ctx, slot.cache_tokens.cbegin(), slot.cache_tokens.cbegin() + slot.n_past)}, + {"n_past", slot.n_past}, + {"cached", tokens_to_str(ctx, slot.cache_tokens.cbegin(), slot.cache_tokens.cbegin() + slot.n_past)}, {"to_eval", tokens_to_str(ctx, slot.cache_tokens.cbegin() + slot.n_past, slot.cache_tokens.cend())}, }); @@ -1591,10 +1596,13 @@ struct llama_server_context // process the prefix of first image std::vector prefix_tokens = has_images ? tokenize(slot.images[0].prefix_prompt, add_bos_token) : prompt_tokens; + int32_t slot_npast = slot.n_past_se > 0 ? slot.n_past_se : slot.n_past; - int ga_i = slot.ga_i; + + int32_t ga_i = slot.ga_i; int32_t ga_n = slot.ga_n; int32_t ga_w = slot.ga_w; + for (; slot.n_past < (int) prefix_tokens.size(); ++slot.n_past) { if (slot.ga_n != 1) @@ -1606,7 +1614,7 @@ struct llama_server_context } } llama_batch_add(batch, prefix_tokens[slot.n_past], system_tokens.size() + slot_npast, {slot.id }, false); - slot_npast += 1; + slot_npast++; } if (has_images && !ingest_images(slot, n_batch)) @@ -1666,6 +1674,7 @@ struct llama_server_context slot.n_past_se += n_tokens; } } + llama_batch batch_view = { n_tokens, @@ -1782,51 +1791,51 @@ static void server_print_usage(const char *argv0, const gpt_params ¶ms, printf(" not recommended: doubles context memory required and no measurable increase in quality\n"); if (llama_mlock_supported()) { - printf(" --mlock force system to keep model in RAM rather than swapping or compressing\n"); + printf(" --mlock force system to keep model in RAM rather than swapping or compressing\n"); } if (llama_mmap_supported()) { - printf(" --no-mmap do not memory-map model (slower load but may reduce pageouts if not using mlock)\n"); + printf(" --no-mmap do not memory-map model (slower load but may reduce pageouts if not using mlock)\n"); } - printf(" --numa attempt optimizations that help on some NUMA systems\n"); + printf(" --numa attempt optimizations that help on some NUMA systems\n"); #ifdef LLAMA_SUPPORTS_GPU_OFFLOAD printf(" -ngl N, --n-gpu-layers N\n"); - printf(" number of layers to store in VRAM\n"); + printf(" number of layers to store in VRAM\n"); printf(" -sm SPLIT_MODE, --split-mode SPLIT_MODE\n"); - printf(" how to split the model across multiple GPUs, one of:\n"); - printf(" - none: use one GPU only\n"); - printf(" - layer (default): split layers and KV across GPUs\n"); - printf(" - row: split rows across GPUs\n"); + printf(" how to split the model across multiple GPUs, one of:\n"); + printf(" - none: use one GPU only\n"); + printf(" - layer (default): split layers and KV across GPUs\n"); + printf(" - row: split rows across GPUs\n"); printf(" -ts SPLIT --tensor-split SPLIT\n"); - printf(" fraction of the model to offload to each GPU, comma-separated list of proportions, e.g. 3,1\n"); - printf(" -mg i, --main-gpu i the GPU to use for the model (with split-mode = none),\n"); - printf(" or for intermediate results and KV (with split-mode = row)\n"); + printf(" fraction of the model to offload to each GPU, comma-separated list of proportions, e.g. 3,1\n"); + printf(" -mg i, --main-gpu i the GPU to use for the model (with split-mode = none),\n"); + printf(" or for intermediate results and KV (with split-mode = row)\n"); #endif printf(" -m FNAME, --model FNAME\n"); - printf(" model path (default: %s)\n", params.model.c_str()); + printf(" model path (default: %s)\n", params.model.c_str()); printf(" -a ALIAS, --alias ALIAS\n"); - printf(" set an alias for the model, will be added as `model` field in completion response\n"); - printf(" --lora FNAME apply LoRA adapter (implies --no-mmap)\n"); - printf(" --lora-base FNAME optional model to use as a base for the layers modified by the LoRA adapter\n"); - printf(" --host ip address to listen (default (default: %s)\n", sparams.hostname.c_str()); - printf(" --port PORT port to listen (default (default: %d)\n", sparams.port); - printf(" --path PUBLIC_PATH path from which to serve static files (default %s)\n", sparams.public_path.c_str()); - printf(" --api-key API_KEY optional api key to enhance server security. If set, requests must include this key for access.\n"); - printf(" --api-key-file FNAME path to file containing api keys delimited by new lines. If set, requests must include one of the keys for access.\n"); - printf(" -to N, --timeout N server read/write timeout in seconds (default: %d)\n", sparams.read_timeout); - printf(" --embedding enable embedding vector output (default: %s)\n", params.embedding ? "enabled" : "disabled"); - printf(" -np N, --parallel N number of slots for process requests (default: %d)\n", params.n_parallel); - printf(" -cb, --cont-batching enable continuous batching (a.k.a dynamic batching) (default: disabled)\n"); - printf(" -spf FNAME, --system-prompt-file FNAME\n"); - printf(" Set a file to load a system prompt (initial prompt of all slots), this is useful for chat applications.\n"); - printf(" --mmproj MMPROJ_FILE path to a multimodal projector file for LLaVA.\n"); - printf(" --log-disable disables logging to a file.\n"); + printf(" set an alias for the model, will be added as `model` field in completion response\n"); + printf(" --lora FNAME apply LoRA adapter (implies --no-mmap)\n"); + printf(" --lora-base FNAME optional model to use as a base for the layers modified by the LoRA adapter\n"); + printf(" --host ip address to listen (default (default: %s)\n", sparams.hostname.c_str()); + printf(" --port PORT port to listen (default (default: %d)\n", sparams.port); + printf(" --path PUBLIC_PATH path from which to serve static files (default %s)\n", sparams.public_path.c_str()); + printf(" --api-key API_KEY optional api key to enhance server security. If set, requests must include this key for access.\n"); + printf(" --api-key-file FNAME path to file containing api keys delimited by new lines. If set, requests must include one of the keys for access.\n"); + printf(" -to N, --timeout N server read/write timeout in seconds (default: %d)\n", sparams.read_timeout); + printf(" --embedding enable embedding vector output (default: %s)\n", params.embedding ? "enabled" : "disabled"); + printf(" -np N, --parallel N number of slots for process requests (default: %d)\n", params.n_parallel); + printf(" -cb, --cont-batching enable continuous batching (a.k.a dynamic batching) (default: disabled)\n"); + printf(" -spf FNAME, --system-prompt-file FNAME\n"); + printf(" set a file to load a system prompt (initial prompt of all slots), this is useful for chat applications.\n"); + printf(" --mmproj MMPROJ_FILE path to a multimodal projector file for LLaVA.\n"); + printf(" --log-disable disables logging to a file.\n"); printf("\n"); printf(" --override-kv KEY=TYPE:VALUE\n"); - printf(" advanced option to override model metadata by key. may be specified multiple times.\n"); - printf(" types: int, float, bool. example: --override-kv tokenizer.ggml.add_bos_token=bool:false\n"); - printf(" -gan N, --grp-attn-n N Set the group attention factor to extend context size through self-extend(default: 1=disabled), used together with group attention width `--grp-attn-w`"); - printf(" -gaw N, --grp-attn-w N Set the group attention width to extend context size through self-extend(default: 512), used together with group attention factor `--grp-attn-n`"); + printf(" advanced option to override model metadata by key. may be specified multiple times.\n"); + printf(" types: int, float, bool. example: --override-kv tokenizer.ggml.add_bos_token=bool:false\n"); + printf(" -gan N, --grp-attn-n N set the group attention factor to extend context size through self-extend(default: 1=disabled), used together with group attention width `--grp-attn-w`"); + printf(" -gaw N, --grp-attn-w N set the group attention width to extend context size through self-extend(default: 512), used together with group attention factor `--grp-attn-n`"); printf("\n"); } From e0085fdf7c758f0bc2746fc106fb29dd9df959de Mon Sep 17 00:00:00 2001 From: Georgi Gerganov Date: Tue, 30 Jan 2024 21:19:26 +0200 Subject: [PATCH 471/811] Revert "server : change deps.sh xxd files to string literals (#5221)" This reverts commit 4003be0e5feef320f3707786f22722b73cff9356. --- examples/server/completion.js.hpp | 651 ++- examples/server/deps.sh | 11 +- examples/server/index.html.hpp | 3829 ++++++++++++----- examples/server/index.js.hpp | 1907 +++++++- .../server/json-schema-to-grammar.mjs.hpp | 424 +- 5 files changed, 5454 insertions(+), 1368 deletions(-) diff --git a/examples/server/completion.js.hpp b/examples/server/completion.js.hpp index 5609ee3bf..fe5f81228 100644 --- a/examples/server/completion.js.hpp +++ b/examples/server/completion.js.hpp @@ -1,204 +1,449 @@ -const char completion_js[] = R"LITERAL( -const paramDefaults = { - stream: true, - n_predict: 500, - temperature: 0.2, - stop: ["
"] +unsigned char completion_js[] = { + 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x44, + 0x65, 0x66, 0x61, 0x75, 0x6c, 0x74, 0x73, 0x20, 0x3d, 0x20, 0x7b, 0x0a, + 0x20, 0x20, 0x73, 0x74, 0x72, 0x65, 0x61, 0x6d, 0x3a, 0x20, 0x74, 0x72, + 0x75, 0x65, 0x2c, 0x0a, 0x20, 0x20, 0x6e, 0x5f, 0x70, 0x72, 0x65, 0x64, + 0x69, 0x63, 0x74, 0x3a, 0x20, 0x35, 0x30, 0x30, 0x2c, 0x0a, 0x20, 0x20, + 0x74, 0x65, 0x6d, 0x70, 0x65, 0x72, 0x61, 0x74, 0x75, 0x72, 0x65, 0x3a, + 0x20, 0x30, 0x2e, 0x32, 0x2c, 0x0a, 0x20, 0x20, 0x73, 0x74, 0x6f, 0x70, + 0x3a, 0x20, 0x5b, 0x22, 0x3c, 0x2f, 0x73, 0x3e, 0x22, 0x5d, 0x0a, 0x7d, + 0x3b, 0x0a, 0x0a, 0x6c, 0x65, 0x74, 0x20, 0x67, 0x65, 0x6e, 0x65, 0x72, + 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x5f, 0x73, 0x65, 0x74, 0x74, 0x69, 0x6e, + 0x67, 0x73, 0x20, 0x3d, 0x20, 0x6e, 0x75, 0x6c, 0x6c, 0x3b, 0x0a, 0x0a, + 0x0a, 0x2f, 0x2f, 0x20, 0x43, 0x6f, 0x6d, 0x70, 0x6c, 0x65, 0x74, 0x65, + 0x73, 0x20, 0x74, 0x68, 0x65, 0x20, 0x70, 0x72, 0x6f, 0x6d, 0x70, 0x74, + 0x20, 0x61, 0x73, 0x20, 0x61, 0x20, 0x67, 0x65, 0x6e, 0x65, 0x72, 0x61, + 0x74, 0x6f, 0x72, 0x2e, 0x20, 0x52, 0x65, 0x63, 0x6f, 0x6d, 0x6d, 0x65, + 0x6e, 0x64, 0x65, 0x64, 0x20, 0x66, 0x6f, 0x72, 0x20, 0x6d, 0x6f, 0x73, + 0x74, 0x20, 0x75, 0x73, 0x65, 0x20, 0x63, 0x61, 0x73, 0x65, 0x73, 0x2e, + 0x0a, 0x2f, 0x2f, 0x0a, 0x2f, 0x2f, 0x20, 0x45, 0x78, 0x61, 0x6d, 0x70, + 0x6c, 0x65, 0x3a, 0x0a, 0x2f, 0x2f, 0x0a, 0x2f, 0x2f, 0x20, 0x20, 0x20, + 0x20, 0x69, 0x6d, 0x70, 0x6f, 0x72, 0x74, 0x20, 0x7b, 0x20, 0x6c, 0x6c, + 0x61, 0x6d, 0x61, 0x20, 0x7d, 0x20, 0x66, 0x72, 0x6f, 0x6d, 0x20, 0x27, + 0x2f, 0x63, 0x6f, 0x6d, 0x70, 0x6c, 0x65, 0x74, 0x69, 0x6f, 0x6e, 0x2e, + 0x6a, 0x73, 0x27, 0x0a, 0x2f, 0x2f, 0x0a, 0x2f, 0x2f, 0x20, 0x20, 0x20, + 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x72, 0x65, 0x71, 0x75, 0x65, + 0x73, 0x74, 0x20, 0x3d, 0x20, 0x6c, 0x6c, 0x61, 0x6d, 0x61, 0x28, 0x22, + 0x54, 0x65, 0x6c, 0x6c, 0x20, 0x6d, 0x65, 0x20, 0x61, 0x20, 0x6a, 0x6f, + 0x6b, 0x65, 0x22, 0x2c, 0x20, 0x7b, 0x6e, 0x5f, 0x70, 0x72, 0x65, 0x64, + 0x69, 0x63, 0x74, 0x3a, 0x20, 0x38, 0x30, 0x30, 0x7d, 0x29, 0x0a, 0x2f, + 0x2f, 0x20, 0x20, 0x20, 0x20, 0x66, 0x6f, 0x72, 0x20, 0x61, 0x77, 0x61, + 0x69, 0x74, 0x20, 0x28, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x63, 0x68, + 0x75, 0x6e, 0x6b, 0x20, 0x6f, 0x66, 0x20, 0x72, 0x65, 0x71, 0x75, 0x65, + 0x73, 0x74, 0x29, 0x20, 0x7b, 0x0a, 0x2f, 0x2f, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x64, 0x6f, 0x63, 0x75, 0x6d, 0x65, 0x6e, 0x74, 0x2e, 0x77, + 0x72, 0x69, 0x74, 0x65, 0x28, 0x63, 0x68, 0x75, 0x6e, 0x6b, 0x2e, 0x64, + 0x61, 0x74, 0x61, 0x2e, 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x29, + 0x0a, 0x2f, 0x2f, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x2f, 0x2f, 0x0a, + 0x65, 0x78, 0x70, 0x6f, 0x72, 0x74, 0x20, 0x61, 0x73, 0x79, 0x6e, 0x63, + 0x20, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x2a, 0x20, 0x6c, + 0x6c, 0x61, 0x6d, 0x61, 0x28, 0x70, 0x72, 0x6f, 0x6d, 0x70, 0x74, 0x2c, + 0x20, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x20, 0x3d, 0x20, 0x7b, 0x7d, + 0x2c, 0x20, 0x63, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x20, 0x3d, 0x20, 0x7b, + 0x7d, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x6c, 0x65, 0x74, 0x20, 0x63, + 0x6f, 0x6e, 0x74, 0x72, 0x6f, 0x6c, 0x6c, 0x65, 0x72, 0x20, 0x3d, 0x20, + 0x63, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x2e, 0x63, 0x6f, 0x6e, 0x74, 0x72, + 0x6f, 0x6c, 0x6c, 0x65, 0x72, 0x3b, 0x0a, 0x0a, 0x20, 0x20, 0x69, 0x66, + 0x20, 0x28, 0x21, 0x63, 0x6f, 0x6e, 0x74, 0x72, 0x6f, 0x6c, 0x6c, 0x65, + 0x72, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, + 0x74, 0x72, 0x6f, 0x6c, 0x6c, 0x65, 0x72, 0x20, 0x3d, 0x20, 0x6e, 0x65, + 0x77, 0x20, 0x41, 0x62, 0x6f, 0x72, 0x74, 0x43, 0x6f, 0x6e, 0x74, 0x72, + 0x6f, 0x6c, 0x6c, 0x65, 0x72, 0x28, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x7d, + 0x0a, 0x0a, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x63, 0x6f, + 0x6d, 0x70, 0x6c, 0x65, 0x74, 0x69, 0x6f, 0x6e, 0x50, 0x61, 0x72, 0x61, + 0x6d, 0x73, 0x20, 0x3d, 0x20, 0x7b, 0x20, 0x2e, 0x2e, 0x2e, 0x70, 0x61, + 0x72, 0x61, 0x6d, 0x44, 0x65, 0x66, 0x61, 0x75, 0x6c, 0x74, 0x73, 0x2c, + 0x20, 0x2e, 0x2e, 0x2e, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x2c, 0x20, + 0x70, 0x72, 0x6f, 0x6d, 0x70, 0x74, 0x20, 0x7d, 0x3b, 0x0a, 0x0a, 0x20, + 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x72, 0x65, 0x73, 0x70, 0x6f, + 0x6e, 0x73, 0x65, 0x20, 0x3d, 0x20, 0x61, 0x77, 0x61, 0x69, 0x74, 0x20, + 0x66, 0x65, 0x74, 0x63, 0x68, 0x28, 0x22, 0x2f, 0x63, 0x6f, 0x6d, 0x70, + 0x6c, 0x65, 0x74, 0x69, 0x6f, 0x6e, 0x22, 0x2c, 0x20, 0x7b, 0x0a, 0x20, + 0x20, 0x20, 0x20, 0x6d, 0x65, 0x74, 0x68, 0x6f, 0x64, 0x3a, 0x20, 0x27, + 0x50, 0x4f, 0x53, 0x54, 0x27, 0x2c, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x62, + 0x6f, 0x64, 0x79, 0x3a, 0x20, 0x4a, 0x53, 0x4f, 0x4e, 0x2e, 0x73, 0x74, + 0x72, 0x69, 0x6e, 0x67, 0x69, 0x66, 0x79, 0x28, 0x63, 0x6f, 0x6d, 0x70, + 0x6c, 0x65, 0x74, 0x69, 0x6f, 0x6e, 0x50, 0x61, 0x72, 0x61, 0x6d, 0x73, + 0x29, 0x2c, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x68, 0x65, 0x61, 0x64, 0x65, + 0x72, 0x73, 0x3a, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x27, 0x43, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x27, + 0x3a, 0x20, 0x27, 0x6b, 0x65, 0x65, 0x70, 0x2d, 0x61, 0x6c, 0x69, 0x76, + 0x65, 0x27, 0x2c, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x27, 0x43, + 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x2d, 0x54, 0x79, 0x70, 0x65, 0x27, + 0x3a, 0x20, 0x27, 0x61, 0x70, 0x70, 0x6c, 0x69, 0x63, 0x61, 0x74, 0x69, + 0x6f, 0x6e, 0x2f, 0x6a, 0x73, 0x6f, 0x6e, 0x27, 0x2c, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x27, 0x41, 0x63, 0x63, 0x65, 0x70, 0x74, 0x27, + 0x3a, 0x20, 0x27, 0x74, 0x65, 0x78, 0x74, 0x2f, 0x65, 0x76, 0x65, 0x6e, + 0x74, 0x2d, 0x73, 0x74, 0x72, 0x65, 0x61, 0x6d, 0x27, 0x2c, 0x0a, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x2e, 0x2e, 0x2e, 0x28, 0x70, 0x61, 0x72, + 0x61, 0x6d, 0x73, 0x2e, 0x61, 0x70, 0x69, 0x5f, 0x6b, 0x65, 0x79, 0x20, + 0x3f, 0x20, 0x7b, 0x27, 0x41, 0x75, 0x74, 0x68, 0x6f, 0x72, 0x69, 0x7a, + 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x27, 0x3a, 0x20, 0x60, 0x42, 0x65, 0x61, + 0x72, 0x65, 0x72, 0x20, 0x24, 0x7b, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, + 0x2e, 0x61, 0x70, 0x69, 0x5f, 0x6b, 0x65, 0x79, 0x7d, 0x60, 0x7d, 0x20, + 0x3a, 0x20, 0x7b, 0x7d, 0x29, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x2c, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x73, 0x69, 0x67, 0x6e, 0x61, 0x6c, 0x3a, + 0x20, 0x63, 0x6f, 0x6e, 0x74, 0x72, 0x6f, 0x6c, 0x6c, 0x65, 0x72, 0x2e, + 0x73, 0x69, 0x67, 0x6e, 0x61, 0x6c, 0x2c, 0x0a, 0x20, 0x20, 0x7d, 0x29, + 0x3b, 0x0a, 0x0a, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x72, + 0x65, 0x61, 0x64, 0x65, 0x72, 0x20, 0x3d, 0x20, 0x72, 0x65, 0x73, 0x70, + 0x6f, 0x6e, 0x73, 0x65, 0x2e, 0x62, 0x6f, 0x64, 0x79, 0x2e, 0x67, 0x65, + 0x74, 0x52, 0x65, 0x61, 0x64, 0x65, 0x72, 0x28, 0x29, 0x3b, 0x0a, 0x20, + 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x64, 0x65, 0x63, 0x6f, 0x64, + 0x65, 0x72, 0x20, 0x3d, 0x20, 0x6e, 0x65, 0x77, 0x20, 0x54, 0x65, 0x78, + 0x74, 0x44, 0x65, 0x63, 0x6f, 0x64, 0x65, 0x72, 0x28, 0x29, 0x3b, 0x0a, + 0x0a, 0x20, 0x20, 0x6c, 0x65, 0x74, 0x20, 0x63, 0x6f, 0x6e, 0x74, 0x65, + 0x6e, 0x74, 0x20, 0x3d, 0x20, 0x22, 0x22, 0x3b, 0x0a, 0x20, 0x20, 0x6c, + 0x65, 0x74, 0x20, 0x6c, 0x65, 0x66, 0x74, 0x6f, 0x76, 0x65, 0x72, 0x20, + 0x3d, 0x20, 0x22, 0x22, 0x3b, 0x20, 0x2f, 0x2f, 0x20, 0x42, 0x75, 0x66, + 0x66, 0x65, 0x72, 0x20, 0x66, 0x6f, 0x72, 0x20, 0x70, 0x61, 0x72, 0x74, + 0x69, 0x61, 0x6c, 0x6c, 0x79, 0x20, 0x72, 0x65, 0x61, 0x64, 0x20, 0x6c, + 0x69, 0x6e, 0x65, 0x73, 0x0a, 0x0a, 0x20, 0x20, 0x74, 0x72, 0x79, 0x20, + 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x6c, 0x65, 0x74, 0x20, 0x63, 0x6f, + 0x6e, 0x74, 0x20, 0x3d, 0x20, 0x74, 0x72, 0x75, 0x65, 0x3b, 0x0a, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x77, 0x68, 0x69, 0x6c, 0x65, 0x20, 0x28, 0x63, + 0x6f, 0x6e, 0x74, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x72, 0x65, 0x73, 0x75, 0x6c, + 0x74, 0x20, 0x3d, 0x20, 0x61, 0x77, 0x61, 0x69, 0x74, 0x20, 0x72, 0x65, + 0x61, 0x64, 0x65, 0x72, 0x2e, 0x72, 0x65, 0x61, 0x64, 0x28, 0x29, 0x3b, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x69, 0x66, 0x20, 0x28, 0x72, + 0x65, 0x73, 0x75, 0x6c, 0x74, 0x2e, 0x64, 0x6f, 0x6e, 0x65, 0x29, 0x20, + 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x62, 0x72, + 0x65, 0x61, 0x6b, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, + 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x2f, 0x2f, 0x20, 0x41, + 0x64, 0x64, 0x20, 0x61, 0x6e, 0x79, 0x20, 0x6c, 0x65, 0x66, 0x74, 0x6f, + 0x76, 0x65, 0x72, 0x20, 0x64, 0x61, 0x74, 0x61, 0x20, 0x74, 0x6f, 0x20, + 0x74, 0x68, 0x65, 0x20, 0x63, 0x75, 0x72, 0x72, 0x65, 0x6e, 0x74, 0x20, + 0x63, 0x68, 0x75, 0x6e, 0x6b, 0x20, 0x6f, 0x66, 0x20, 0x64, 0x61, 0x74, + 0x61, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, + 0x74, 0x20, 0x74, 0x65, 0x78, 0x74, 0x20, 0x3d, 0x20, 0x6c, 0x65, 0x66, + 0x74, 0x6f, 0x76, 0x65, 0x72, 0x20, 0x2b, 0x20, 0x64, 0x65, 0x63, 0x6f, + 0x64, 0x65, 0x72, 0x2e, 0x64, 0x65, 0x63, 0x6f, 0x64, 0x65, 0x28, 0x72, + 0x65, 0x73, 0x75, 0x6c, 0x74, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x29, + 0x3b, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x2f, 0x2f, 0x20, + 0x43, 0x68, 0x65, 0x63, 0x6b, 0x20, 0x69, 0x66, 0x20, 0x74, 0x68, 0x65, + 0x20, 0x6c, 0x61, 0x73, 0x74, 0x20, 0x63, 0x68, 0x61, 0x72, 0x61, 0x63, + 0x74, 0x65, 0x72, 0x20, 0x69, 0x73, 0x20, 0x61, 0x20, 0x6c, 0x69, 0x6e, + 0x65, 0x20, 0x62, 0x72, 0x65, 0x61, 0x6b, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x65, 0x6e, 0x64, 0x73, + 0x57, 0x69, 0x74, 0x68, 0x4c, 0x69, 0x6e, 0x65, 0x42, 0x72, 0x65, 0x61, + 0x6b, 0x20, 0x3d, 0x20, 0x74, 0x65, 0x78, 0x74, 0x2e, 0x65, 0x6e, 0x64, + 0x73, 0x57, 0x69, 0x74, 0x68, 0x28, 0x27, 0x5c, 0x6e, 0x27, 0x29, 0x3b, + 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x2f, 0x2f, 0x20, 0x53, + 0x70, 0x6c, 0x69, 0x74, 0x20, 0x74, 0x68, 0x65, 0x20, 0x74, 0x65, 0x78, + 0x74, 0x20, 0x69, 0x6e, 0x74, 0x6f, 0x20, 0x6c, 0x69, 0x6e, 0x65, 0x73, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x6c, 0x65, 0x74, 0x20, 0x6c, + 0x69, 0x6e, 0x65, 0x73, 0x20, 0x3d, 0x20, 0x74, 0x65, 0x78, 0x74, 0x2e, + 0x73, 0x70, 0x6c, 0x69, 0x74, 0x28, 0x27, 0x5c, 0x6e, 0x27, 0x29, 0x3b, + 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x2f, 0x2f, 0x20, 0x49, + 0x66, 0x20, 0x74, 0x68, 0x65, 0x20, 0x74, 0x65, 0x78, 0x74, 0x20, 0x64, + 0x6f, 0x65, 0x73, 0x6e, 0x27, 0x74, 0x20, 0x65, 0x6e, 0x64, 0x20, 0x77, + 0x69, 0x74, 0x68, 0x20, 0x61, 0x20, 0x6c, 0x69, 0x6e, 0x65, 0x20, 0x62, + 0x72, 0x65, 0x61, 0x6b, 0x2c, 0x20, 0x74, 0x68, 0x65, 0x6e, 0x20, 0x74, + 0x68, 0x65, 0x20, 0x6c, 0x61, 0x73, 0x74, 0x20, 0x6c, 0x69, 0x6e, 0x65, + 0x20, 0x69, 0x73, 0x20, 0x69, 0x6e, 0x63, 0x6f, 0x6d, 0x70, 0x6c, 0x65, + 0x74, 0x65, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x2f, 0x2f, 0x20, + 0x53, 0x74, 0x6f, 0x72, 0x65, 0x20, 0x69, 0x74, 0x20, 0x69, 0x6e, 0x20, + 0x6c, 0x65, 0x66, 0x74, 0x6f, 0x76, 0x65, 0x72, 0x20, 0x74, 0x6f, 0x20, + 0x62, 0x65, 0x20, 0x61, 0x64, 0x64, 0x65, 0x64, 0x20, 0x74, 0x6f, 0x20, + 0x74, 0x68, 0x65, 0x20, 0x6e, 0x65, 0x78, 0x74, 0x20, 0x63, 0x68, 0x75, + 0x6e, 0x6b, 0x20, 0x6f, 0x66, 0x20, 0x64, 0x61, 0x74, 0x61, 0x0a, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x69, 0x66, 0x20, 0x28, 0x21, 0x65, 0x6e, + 0x64, 0x73, 0x57, 0x69, 0x74, 0x68, 0x4c, 0x69, 0x6e, 0x65, 0x42, 0x72, + 0x65, 0x61, 0x6b, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x6c, 0x65, 0x66, 0x74, 0x6f, 0x76, 0x65, 0x72, 0x20, + 0x3d, 0x20, 0x6c, 0x69, 0x6e, 0x65, 0x73, 0x2e, 0x70, 0x6f, 0x70, 0x28, + 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x20, 0x65, + 0x6c, 0x73, 0x65, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x6c, 0x65, 0x66, 0x74, 0x6f, 0x76, 0x65, 0x72, 0x20, 0x3d, + 0x20, 0x22, 0x22, 0x3b, 0x20, 0x2f, 0x2f, 0x20, 0x52, 0x65, 0x73, 0x65, + 0x74, 0x20, 0x6c, 0x65, 0x66, 0x74, 0x6f, 0x76, 0x65, 0x72, 0x20, 0x69, + 0x66, 0x20, 0x77, 0x65, 0x20, 0x68, 0x61, 0x76, 0x65, 0x20, 0x61, 0x20, + 0x6c, 0x69, 0x6e, 0x65, 0x20, 0x62, 0x72, 0x65, 0x61, 0x6b, 0x20, 0x61, + 0x74, 0x20, 0x74, 0x68, 0x65, 0x20, 0x65, 0x6e, 0x64, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x2f, 0x2f, 0x20, 0x50, 0x61, 0x72, 0x73, 0x65, 0x20, 0x61, 0x6c, + 0x6c, 0x20, 0x73, 0x73, 0x65, 0x20, 0x65, 0x76, 0x65, 0x6e, 0x74, 0x73, + 0x20, 0x61, 0x6e, 0x64, 0x20, 0x61, 0x64, 0x64, 0x20, 0x74, 0x68, 0x65, + 0x6d, 0x20, 0x74, 0x6f, 0x20, 0x72, 0x65, 0x73, 0x75, 0x6c, 0x74, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, + 0x72, 0x65, 0x67, 0x65, 0x78, 0x20, 0x3d, 0x20, 0x2f, 0x5e, 0x28, 0x5c, + 0x53, 0x2b, 0x29, 0x3a, 0x5c, 0x73, 0x28, 0x2e, 0x2a, 0x29, 0x24, 0x2f, + 0x67, 0x6d, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x66, 0x6f, + 0x72, 0x20, 0x28, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x6c, 0x69, 0x6e, + 0x65, 0x20, 0x6f, 0x66, 0x20, 0x6c, 0x69, 0x6e, 0x65, 0x73, 0x29, 0x20, + 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, + 0x6e, 0x73, 0x74, 0x20, 0x6d, 0x61, 0x74, 0x63, 0x68, 0x20, 0x3d, 0x20, + 0x72, 0x65, 0x67, 0x65, 0x78, 0x2e, 0x65, 0x78, 0x65, 0x63, 0x28, 0x6c, + 0x69, 0x6e, 0x65, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x69, 0x66, 0x20, 0x28, 0x6d, 0x61, 0x74, 0x63, 0x68, 0x29, + 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x72, 0x65, 0x73, 0x75, 0x6c, 0x74, 0x5b, 0x6d, 0x61, 0x74, 0x63, + 0x68, 0x5b, 0x31, 0x5d, 0x5d, 0x20, 0x3d, 0x20, 0x6d, 0x61, 0x74, 0x63, + 0x68, 0x5b, 0x32, 0x5d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x2f, 0x2f, 0x20, 0x73, 0x69, 0x6e, 0x63, 0x65, 0x20, + 0x77, 0x65, 0x20, 0x6b, 0x6e, 0x6f, 0x77, 0x20, 0x74, 0x68, 0x69, 0x73, + 0x20, 0x69, 0x73, 0x20, 0x6c, 0x6c, 0x61, 0x6d, 0x61, 0x2e, 0x63, 0x70, + 0x70, 0x2c, 0x20, 0x6c, 0x65, 0x74, 0x27, 0x73, 0x20, 0x6a, 0x75, 0x73, + 0x74, 0x20, 0x64, 0x65, 0x63, 0x6f, 0x64, 0x65, 0x20, 0x74, 0x68, 0x65, + 0x20, 0x6a, 0x73, 0x6f, 0x6e, 0x20, 0x69, 0x6e, 0x20, 0x64, 0x61, 0x74, + 0x61, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x69, 0x66, 0x20, 0x28, 0x72, 0x65, 0x73, 0x75, 0x6c, 0x74, 0x2e, 0x64, + 0x61, 0x74, 0x61, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x72, 0x65, 0x73, 0x75, 0x6c, + 0x74, 0x2e, 0x64, 0x61, 0x74, 0x61, 0x20, 0x3d, 0x20, 0x4a, 0x53, 0x4f, + 0x4e, 0x2e, 0x70, 0x61, 0x72, 0x73, 0x65, 0x28, 0x72, 0x65, 0x73, 0x75, + 0x6c, 0x74, 0x2e, 0x64, 0x61, 0x74, 0x61, 0x29, 0x3b, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, + 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x20, 0x2b, 0x3d, 0x20, 0x72, 0x65, 0x73, + 0x75, 0x6c, 0x74, 0x2e, 0x64, 0x61, 0x74, 0x61, 0x2e, 0x63, 0x6f, 0x6e, + 0x74, 0x65, 0x6e, 0x74, 0x3b, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x2f, 0x2f, 0x20, 0x79, 0x69, + 0x65, 0x6c, 0x64, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x79, 0x69, 0x65, 0x6c, 0x64, 0x20, 0x72, 0x65, + 0x73, 0x75, 0x6c, 0x74, 0x3b, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x2f, 0x2f, 0x20, 0x69, 0x66, + 0x20, 0x77, 0x65, 0x20, 0x67, 0x6f, 0x74, 0x20, 0x61, 0x20, 0x73, 0x74, + 0x6f, 0x70, 0x20, 0x74, 0x6f, 0x6b, 0x65, 0x6e, 0x20, 0x66, 0x72, 0x6f, + 0x6d, 0x20, 0x73, 0x65, 0x72, 0x76, 0x65, 0x72, 0x2c, 0x20, 0x77, 0x65, + 0x20, 0x77, 0x69, 0x6c, 0x6c, 0x20, 0x62, 0x72, 0x65, 0x61, 0x6b, 0x20, + 0x68, 0x65, 0x72, 0x65, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x69, 0x66, 0x20, 0x28, 0x72, 0x65, 0x73, + 0x75, 0x6c, 0x74, 0x2e, 0x64, 0x61, 0x74, 0x61, 0x2e, 0x73, 0x74, 0x6f, + 0x70, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x69, 0x66, 0x20, 0x28, 0x72, + 0x65, 0x73, 0x75, 0x6c, 0x74, 0x2e, 0x64, 0x61, 0x74, 0x61, 0x2e, 0x67, + 0x65, 0x6e, 0x65, 0x72, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x5f, 0x73, 0x65, + 0x74, 0x74, 0x69, 0x6e, 0x67, 0x73, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x67, 0x65, 0x6e, 0x65, 0x72, 0x61, 0x74, 0x69, 0x6f, 0x6e, + 0x5f, 0x73, 0x65, 0x74, 0x74, 0x69, 0x6e, 0x67, 0x73, 0x20, 0x3d, 0x20, + 0x72, 0x65, 0x73, 0x75, 0x6c, 0x74, 0x2e, 0x64, 0x61, 0x74, 0x61, 0x2e, + 0x67, 0x65, 0x6e, 0x65, 0x72, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x5f, 0x73, + 0x65, 0x74, 0x74, 0x69, 0x6e, 0x67, 0x73, 0x3b, 0x0a, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x74, 0x20, 0x3d, 0x20, 0x66, 0x61, + 0x6c, 0x73, 0x65, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x62, 0x72, 0x65, 0x61, 0x6b, + 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x69, 0x66, 0x20, 0x28, 0x72, 0x65, 0x73, 0x75, 0x6c, 0x74, + 0x2e, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x72, 0x65, + 0x73, 0x75, 0x6c, 0x74, 0x2e, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x20, 0x3d, + 0x20, 0x4a, 0x53, 0x4f, 0x4e, 0x2e, 0x70, 0x61, 0x72, 0x73, 0x65, 0x28, + 0x72, 0x65, 0x73, 0x75, 0x6c, 0x74, 0x2e, 0x65, 0x72, 0x72, 0x6f, 0x72, + 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x6f, 0x6c, 0x65, 0x2e, 0x65, + 0x72, 0x72, 0x6f, 0x72, 0x28, 0x60, 0x6c, 0x6c, 0x61, 0x6d, 0x61, 0x2e, + 0x63, 0x70, 0x70, 0x20, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x3a, 0x20, 0x24, + 0x7b, 0x72, 0x65, 0x73, 0x75, 0x6c, 0x74, 0x2e, 0x65, 0x72, 0x72, 0x6f, + 0x72, 0x2e, 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x7d, 0x60, 0x29, + 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x7d, 0x0a, 0x20, 0x20, 0x7d, 0x20, 0x63, 0x61, 0x74, 0x63, 0x68, 0x20, + 0x28, 0x65, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x69, 0x66, + 0x20, 0x28, 0x65, 0x2e, 0x6e, 0x61, 0x6d, 0x65, 0x20, 0x21, 0x3d, 0x3d, + 0x20, 0x27, 0x41, 0x62, 0x6f, 0x72, 0x74, 0x45, 0x72, 0x72, 0x6f, 0x72, + 0x27, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, + 0x6f, 0x6e, 0x73, 0x6f, 0x6c, 0x65, 0x2e, 0x65, 0x72, 0x72, 0x6f, 0x72, + 0x28, 0x22, 0x6c, 0x6c, 0x61, 0x6d, 0x61, 0x20, 0x65, 0x72, 0x72, 0x6f, + 0x72, 0x3a, 0x20, 0x22, 0x2c, 0x20, 0x65, 0x29, 0x3b, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x74, 0x68, 0x72, 0x6f, + 0x77, 0x20, 0x65, 0x3b, 0x0a, 0x20, 0x20, 0x7d, 0x0a, 0x20, 0x20, 0x66, + 0x69, 0x6e, 0x61, 0x6c, 0x6c, 0x79, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, + 0x20, 0x63, 0x6f, 0x6e, 0x74, 0x72, 0x6f, 0x6c, 0x6c, 0x65, 0x72, 0x2e, + 0x61, 0x62, 0x6f, 0x72, 0x74, 0x28, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x7d, + 0x0a, 0x0a, 0x20, 0x20, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x63, + 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x3b, 0x0a, 0x7d, 0x0a, 0x0a, 0x2f, + 0x2f, 0x20, 0x43, 0x61, 0x6c, 0x6c, 0x20, 0x6c, 0x6c, 0x61, 0x6d, 0x61, + 0x2c, 0x20, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x61, 0x6e, 0x20, + 0x65, 0x76, 0x65, 0x6e, 0x74, 0x20, 0x74, 0x61, 0x72, 0x67, 0x65, 0x74, + 0x20, 0x74, 0x68, 0x61, 0x74, 0x20, 0x79, 0x6f, 0x75, 0x20, 0x63, 0x61, + 0x6e, 0x20, 0x73, 0x75, 0x62, 0x73, 0x63, 0x72, 0x69, 0x62, 0x65, 0x20, + 0x74, 0x6f, 0x0a, 0x2f, 0x2f, 0x0a, 0x2f, 0x2f, 0x20, 0x45, 0x78, 0x61, + 0x6d, 0x70, 0x6c, 0x65, 0x3a, 0x0a, 0x2f, 0x2f, 0x0a, 0x2f, 0x2f, 0x20, + 0x20, 0x20, 0x20, 0x69, 0x6d, 0x70, 0x6f, 0x72, 0x74, 0x20, 0x7b, 0x20, + 0x6c, 0x6c, 0x61, 0x6d, 0x61, 0x45, 0x76, 0x65, 0x6e, 0x74, 0x54, 0x61, + 0x72, 0x67, 0x65, 0x74, 0x20, 0x7d, 0x20, 0x66, 0x72, 0x6f, 0x6d, 0x20, + 0x27, 0x2f, 0x63, 0x6f, 0x6d, 0x70, 0x6c, 0x65, 0x74, 0x69, 0x6f, 0x6e, + 0x2e, 0x6a, 0x73, 0x27, 0x0a, 0x2f, 0x2f, 0x0a, 0x2f, 0x2f, 0x20, 0x20, + 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x63, 0x6f, 0x6e, 0x6e, + 0x20, 0x3d, 0x20, 0x6c, 0x6c, 0x61, 0x6d, 0x61, 0x45, 0x76, 0x65, 0x6e, + 0x74, 0x54, 0x61, 0x72, 0x67, 0x65, 0x74, 0x28, 0x70, 0x72, 0x6f, 0x6d, + 0x70, 0x74, 0x29, 0x0a, 0x2f, 0x2f, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, + 0x6e, 0x6e, 0x2e, 0x61, 0x64, 0x64, 0x45, 0x76, 0x65, 0x6e, 0x74, 0x4c, + 0x69, 0x73, 0x74, 0x65, 0x6e, 0x65, 0x72, 0x28, 0x22, 0x6d, 0x65, 0x73, + 0x73, 0x61, 0x67, 0x65, 0x22, 0x2c, 0x20, 0x28, 0x63, 0x68, 0x75, 0x6e, + 0x6b, 0x29, 0x20, 0x3d, 0x3e, 0x20, 0x7b, 0x0a, 0x2f, 0x2f, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x64, 0x6f, 0x63, 0x75, 0x6d, 0x65, 0x6e, 0x74, + 0x2e, 0x77, 0x72, 0x69, 0x74, 0x65, 0x28, 0x63, 0x68, 0x75, 0x6e, 0x6b, + 0x2e, 0x64, 0x65, 0x74, 0x61, 0x69, 0x6c, 0x2e, 0x63, 0x6f, 0x6e, 0x74, + 0x65, 0x6e, 0x74, 0x29, 0x0a, 0x2f, 0x2f, 0x20, 0x20, 0x20, 0x20, 0x7d, + 0x29, 0x0a, 0x2f, 0x2f, 0x0a, 0x65, 0x78, 0x70, 0x6f, 0x72, 0x74, 0x20, + 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x6c, 0x6c, 0x61, 0x6d, 0x61, 0x45, + 0x76, 0x65, 0x6e, 0x74, 0x54, 0x61, 0x72, 0x67, 0x65, 0x74, 0x20, 0x3d, + 0x20, 0x28, 0x70, 0x72, 0x6f, 0x6d, 0x70, 0x74, 0x2c, 0x20, 0x70, 0x61, + 0x72, 0x61, 0x6d, 0x73, 0x20, 0x3d, 0x20, 0x7b, 0x7d, 0x2c, 0x20, 0x63, + 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x20, 0x3d, 0x20, 0x7b, 0x7d, 0x29, 0x20, + 0x3d, 0x3e, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, + 0x20, 0x65, 0x76, 0x65, 0x6e, 0x74, 0x54, 0x61, 0x72, 0x67, 0x65, 0x74, + 0x20, 0x3d, 0x20, 0x6e, 0x65, 0x77, 0x20, 0x45, 0x76, 0x65, 0x6e, 0x74, + 0x54, 0x61, 0x72, 0x67, 0x65, 0x74, 0x28, 0x29, 0x3b, 0x0a, 0x20, 0x20, + 0x28, 0x61, 0x73, 0x79, 0x6e, 0x63, 0x20, 0x28, 0x29, 0x20, 0x3d, 0x3e, + 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x6c, 0x65, 0x74, 0x20, 0x63, + 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x20, 0x3d, 0x20, 0x22, 0x22, 0x3b, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x66, 0x6f, 0x72, 0x20, 0x61, 0x77, 0x61, + 0x69, 0x74, 0x20, 0x28, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x63, 0x68, + 0x75, 0x6e, 0x6b, 0x20, 0x6f, 0x66, 0x20, 0x6c, 0x6c, 0x61, 0x6d, 0x61, + 0x28, 0x70, 0x72, 0x6f, 0x6d, 0x70, 0x74, 0x2c, 0x20, 0x70, 0x61, 0x72, + 0x61, 0x6d, 0x73, 0x2c, 0x20, 0x63, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x29, + 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x69, 0x66, + 0x20, 0x28, 0x63, 0x68, 0x75, 0x6e, 0x6b, 0x2e, 0x64, 0x61, 0x74, 0x61, + 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x20, 0x2b, 0x3d, 0x20, 0x63, + 0x68, 0x75, 0x6e, 0x6b, 0x2e, 0x64, 0x61, 0x74, 0x61, 0x2e, 0x63, 0x6f, + 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x65, 0x76, 0x65, 0x6e, 0x74, 0x54, 0x61, 0x72, 0x67, + 0x65, 0x74, 0x2e, 0x64, 0x69, 0x73, 0x70, 0x61, 0x74, 0x63, 0x68, 0x45, + 0x76, 0x65, 0x6e, 0x74, 0x28, 0x6e, 0x65, 0x77, 0x20, 0x43, 0x75, 0x73, + 0x74, 0x6f, 0x6d, 0x45, 0x76, 0x65, 0x6e, 0x74, 0x28, 0x22, 0x6d, 0x65, + 0x73, 0x73, 0x61, 0x67, 0x65, 0x22, 0x2c, 0x20, 0x7b, 0x20, 0x64, 0x65, + 0x74, 0x61, 0x69, 0x6c, 0x3a, 0x20, 0x63, 0x68, 0x75, 0x6e, 0x6b, 0x2e, + 0x64, 0x61, 0x74, 0x61, 0x20, 0x7d, 0x29, 0x29, 0x3b, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x69, 0x66, 0x20, 0x28, 0x63, 0x68, 0x75, 0x6e, 0x6b, 0x2e, 0x64, 0x61, + 0x74, 0x61, 0x2e, 0x67, 0x65, 0x6e, 0x65, 0x72, 0x61, 0x74, 0x69, 0x6f, + 0x6e, 0x5f, 0x73, 0x65, 0x74, 0x74, 0x69, 0x6e, 0x67, 0x73, 0x29, 0x20, + 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x65, 0x76, + 0x65, 0x6e, 0x74, 0x54, 0x61, 0x72, 0x67, 0x65, 0x74, 0x2e, 0x64, 0x69, + 0x73, 0x70, 0x61, 0x74, 0x63, 0x68, 0x45, 0x76, 0x65, 0x6e, 0x74, 0x28, + 0x6e, 0x65, 0x77, 0x20, 0x43, 0x75, 0x73, 0x74, 0x6f, 0x6d, 0x45, 0x76, + 0x65, 0x6e, 0x74, 0x28, 0x22, 0x67, 0x65, 0x6e, 0x65, 0x72, 0x61, 0x74, + 0x69, 0x6f, 0x6e, 0x5f, 0x73, 0x65, 0x74, 0x74, 0x69, 0x6e, 0x67, 0x73, + 0x22, 0x2c, 0x20, 0x7b, 0x20, 0x64, 0x65, 0x74, 0x61, 0x69, 0x6c, 0x3a, + 0x20, 0x63, 0x68, 0x75, 0x6e, 0x6b, 0x2e, 0x64, 0x61, 0x74, 0x61, 0x2e, + 0x67, 0x65, 0x6e, 0x65, 0x72, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x5f, 0x73, + 0x65, 0x74, 0x74, 0x69, 0x6e, 0x67, 0x73, 0x20, 0x7d, 0x29, 0x29, 0x3b, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x69, 0x66, 0x20, 0x28, 0x63, 0x68, 0x75, 0x6e, 0x6b, + 0x2e, 0x64, 0x61, 0x74, 0x61, 0x2e, 0x74, 0x69, 0x6d, 0x69, 0x6e, 0x67, + 0x73, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x65, 0x76, 0x65, 0x6e, 0x74, 0x54, 0x61, 0x72, 0x67, 0x65, 0x74, + 0x2e, 0x64, 0x69, 0x73, 0x70, 0x61, 0x74, 0x63, 0x68, 0x45, 0x76, 0x65, + 0x6e, 0x74, 0x28, 0x6e, 0x65, 0x77, 0x20, 0x43, 0x75, 0x73, 0x74, 0x6f, + 0x6d, 0x45, 0x76, 0x65, 0x6e, 0x74, 0x28, 0x22, 0x74, 0x69, 0x6d, 0x69, + 0x6e, 0x67, 0x73, 0x22, 0x2c, 0x20, 0x7b, 0x20, 0x64, 0x65, 0x74, 0x61, + 0x69, 0x6c, 0x3a, 0x20, 0x63, 0x68, 0x75, 0x6e, 0x6b, 0x2e, 0x64, 0x61, + 0x74, 0x61, 0x2e, 0x74, 0x69, 0x6d, 0x69, 0x6e, 0x67, 0x73, 0x20, 0x7d, + 0x29, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x65, 0x76, + 0x65, 0x6e, 0x74, 0x54, 0x61, 0x72, 0x67, 0x65, 0x74, 0x2e, 0x64, 0x69, + 0x73, 0x70, 0x61, 0x74, 0x63, 0x68, 0x45, 0x76, 0x65, 0x6e, 0x74, 0x28, + 0x6e, 0x65, 0x77, 0x20, 0x43, 0x75, 0x73, 0x74, 0x6f, 0x6d, 0x45, 0x76, + 0x65, 0x6e, 0x74, 0x28, 0x22, 0x64, 0x6f, 0x6e, 0x65, 0x22, 0x2c, 0x20, + 0x7b, 0x20, 0x64, 0x65, 0x74, 0x61, 0x69, 0x6c, 0x3a, 0x20, 0x7b, 0x20, + 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x20, 0x7d, 0x20, 0x7d, 0x29, + 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x7d, 0x29, 0x28, 0x29, 0x3b, 0x0a, 0x20, + 0x20, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x65, 0x76, 0x65, 0x6e, + 0x74, 0x54, 0x61, 0x72, 0x67, 0x65, 0x74, 0x3b, 0x0a, 0x7d, 0x0a, 0x0a, + 0x2f, 0x2f, 0x20, 0x43, 0x61, 0x6c, 0x6c, 0x20, 0x6c, 0x6c, 0x61, 0x6d, + 0x61, 0x2c, 0x20, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x61, 0x20, + 0x70, 0x72, 0x6f, 0x6d, 0x69, 0x73, 0x65, 0x20, 0x74, 0x68, 0x61, 0x74, + 0x20, 0x72, 0x65, 0x73, 0x6f, 0x6c, 0x76, 0x65, 0x73, 0x20, 0x74, 0x6f, + 0x20, 0x74, 0x68, 0x65, 0x20, 0x63, 0x6f, 0x6d, 0x70, 0x6c, 0x65, 0x74, + 0x65, 0x64, 0x20, 0x74, 0x65, 0x78, 0x74, 0x2e, 0x20, 0x54, 0x68, 0x69, + 0x73, 0x20, 0x64, 0x6f, 0x65, 0x73, 0x20, 0x6e, 0x6f, 0x74, 0x20, 0x73, + 0x75, 0x70, 0x70, 0x6f, 0x72, 0x74, 0x20, 0x73, 0x74, 0x72, 0x65, 0x61, + 0x6d, 0x69, 0x6e, 0x67, 0x0a, 0x2f, 0x2f, 0x0a, 0x2f, 0x2f, 0x20, 0x45, + 0x78, 0x61, 0x6d, 0x70, 0x6c, 0x65, 0x3a, 0x0a, 0x2f, 0x2f, 0x0a, 0x2f, + 0x2f, 0x20, 0x20, 0x20, 0x20, 0x20, 0x6c, 0x6c, 0x61, 0x6d, 0x61, 0x50, + 0x72, 0x6f, 0x6d, 0x69, 0x73, 0x65, 0x28, 0x70, 0x72, 0x6f, 0x6d, 0x70, + 0x74, 0x29, 0x2e, 0x74, 0x68, 0x65, 0x6e, 0x28, 0x28, 0x63, 0x6f, 0x6e, + 0x74, 0x65, 0x6e, 0x74, 0x29, 0x20, 0x3d, 0x3e, 0x20, 0x7b, 0x0a, 0x2f, + 0x2f, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x64, 0x6f, 0x63, 0x75, + 0x6d, 0x65, 0x6e, 0x74, 0x2e, 0x77, 0x72, 0x69, 0x74, 0x65, 0x28, 0x63, + 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x29, 0x0a, 0x2f, 0x2f, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x7d, 0x29, 0x0a, 0x2f, 0x2f, 0x0a, 0x2f, 0x2f, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x6f, 0x72, 0x0a, 0x2f, 0x2f, 0x0a, 0x2f, 0x2f, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x63, + 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x20, 0x3d, 0x20, 0x61, 0x77, 0x61, + 0x69, 0x74, 0x20, 0x6c, 0x6c, 0x61, 0x6d, 0x61, 0x50, 0x72, 0x6f, 0x6d, + 0x69, 0x73, 0x65, 0x28, 0x70, 0x72, 0x6f, 0x6d, 0x70, 0x74, 0x29, 0x0a, + 0x2f, 0x2f, 0x20, 0x20, 0x20, 0x20, 0x20, 0x64, 0x6f, 0x63, 0x75, 0x6d, + 0x65, 0x6e, 0x74, 0x2e, 0x77, 0x72, 0x69, 0x74, 0x65, 0x28, 0x63, 0x6f, + 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x29, 0x0a, 0x2f, 0x2f, 0x0a, 0x65, 0x78, + 0x70, 0x6f, 0x72, 0x74, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x6c, + 0x6c, 0x61, 0x6d, 0x61, 0x50, 0x72, 0x6f, 0x6d, 0x69, 0x73, 0x65, 0x20, + 0x3d, 0x20, 0x28, 0x70, 0x72, 0x6f, 0x6d, 0x70, 0x74, 0x2c, 0x20, 0x70, + 0x61, 0x72, 0x61, 0x6d, 0x73, 0x20, 0x3d, 0x20, 0x7b, 0x7d, 0x2c, 0x20, + 0x63, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x20, 0x3d, 0x20, 0x7b, 0x7d, 0x29, + 0x20, 0x3d, 0x3e, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x72, 0x65, 0x74, 0x75, + 0x72, 0x6e, 0x20, 0x6e, 0x65, 0x77, 0x20, 0x50, 0x72, 0x6f, 0x6d, 0x69, + 0x73, 0x65, 0x28, 0x61, 0x73, 0x79, 0x6e, 0x63, 0x20, 0x28, 0x72, 0x65, + 0x73, 0x6f, 0x6c, 0x76, 0x65, 0x2c, 0x20, 0x72, 0x65, 0x6a, 0x65, 0x63, + 0x74, 0x29, 0x20, 0x3d, 0x3e, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x6c, 0x65, 0x74, 0x20, 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x20, + 0x3d, 0x20, 0x22, 0x22, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x74, 0x72, + 0x79, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x66, 0x6f, + 0x72, 0x20, 0x61, 0x77, 0x61, 0x69, 0x74, 0x20, 0x28, 0x63, 0x6f, 0x6e, + 0x73, 0x74, 0x20, 0x63, 0x68, 0x75, 0x6e, 0x6b, 0x20, 0x6f, 0x66, 0x20, + 0x6c, 0x6c, 0x61, 0x6d, 0x61, 0x28, 0x70, 0x72, 0x6f, 0x6d, 0x70, 0x74, + 0x2c, 0x20, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x2c, 0x20, 0x63, 0x6f, + 0x6e, 0x66, 0x69, 0x67, 0x29, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, + 0x20, 0x2b, 0x3d, 0x20, 0x63, 0x68, 0x75, 0x6e, 0x6b, 0x2e, 0x64, 0x61, + 0x74, 0x61, 0x2e, 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x3b, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x72, 0x65, 0x73, 0x6f, 0x6c, 0x76, 0x65, 0x28, 0x63, 0x6f, + 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x7d, 0x20, 0x63, 0x61, 0x74, 0x63, 0x68, 0x20, 0x28, 0x65, 0x72, 0x72, + 0x6f, 0x72, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x72, 0x65, 0x6a, 0x65, 0x63, 0x74, 0x28, 0x65, 0x72, 0x72, 0x6f, 0x72, + 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x20, 0x20, 0x7d, + 0x29, 0x3b, 0x0a, 0x7d, 0x3b, 0x0a, 0x0a, 0x2f, 0x2a, 0x2a, 0x0a, 0x20, + 0x2a, 0x20, 0x28, 0x64, 0x65, 0x70, 0x72, 0x65, 0x63, 0x61, 0x74, 0x65, + 0x64, 0x29, 0x0a, 0x20, 0x2a, 0x2f, 0x0a, 0x65, 0x78, 0x70, 0x6f, 0x72, + 0x74, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x6c, 0x6c, 0x61, 0x6d, + 0x61, 0x43, 0x6f, 0x6d, 0x70, 0x6c, 0x65, 0x74, 0x65, 0x20, 0x3d, 0x20, + 0x61, 0x73, 0x79, 0x6e, 0x63, 0x20, 0x28, 0x70, 0x61, 0x72, 0x61, 0x6d, + 0x73, 0x2c, 0x20, 0x63, 0x6f, 0x6e, 0x74, 0x72, 0x6f, 0x6c, 0x6c, 0x65, + 0x72, 0x2c, 0x20, 0x63, 0x61, 0x6c, 0x6c, 0x62, 0x61, 0x63, 0x6b, 0x29, + 0x20, 0x3d, 0x3e, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x66, 0x6f, 0x72, 0x20, + 0x61, 0x77, 0x61, 0x69, 0x74, 0x20, 0x28, 0x63, 0x6f, 0x6e, 0x73, 0x74, + 0x20, 0x63, 0x68, 0x75, 0x6e, 0x6b, 0x20, 0x6f, 0x66, 0x20, 0x6c, 0x6c, + 0x61, 0x6d, 0x61, 0x28, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x2e, 0x70, + 0x72, 0x6f, 0x6d, 0x70, 0x74, 0x2c, 0x20, 0x70, 0x61, 0x72, 0x61, 0x6d, + 0x73, 0x2c, 0x20, 0x7b, 0x20, 0x63, 0x6f, 0x6e, 0x74, 0x72, 0x6f, 0x6c, + 0x6c, 0x65, 0x72, 0x20, 0x7d, 0x29, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x63, 0x61, 0x6c, 0x6c, 0x62, 0x61, 0x63, 0x6b, 0x28, 0x63, + 0x68, 0x75, 0x6e, 0x6b, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x7d, 0x0a, 0x7d, + 0x0a, 0x0a, 0x2f, 0x2f, 0x20, 0x47, 0x65, 0x74, 0x20, 0x74, 0x68, 0x65, + 0x20, 0x6d, 0x6f, 0x64, 0x65, 0x6c, 0x20, 0x69, 0x6e, 0x66, 0x6f, 0x20, + 0x66, 0x72, 0x6f, 0x6d, 0x20, 0x74, 0x68, 0x65, 0x20, 0x73, 0x65, 0x72, + 0x76, 0x65, 0x72, 0x2e, 0x20, 0x54, 0x68, 0x69, 0x73, 0x20, 0x69, 0x73, + 0x20, 0x75, 0x73, 0x65, 0x66, 0x75, 0x6c, 0x20, 0x66, 0x6f, 0x72, 0x20, + 0x67, 0x65, 0x74, 0x74, 0x69, 0x6e, 0x67, 0x20, 0x74, 0x68, 0x65, 0x20, + 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x78, 0x74, 0x20, 0x77, 0x69, 0x6e, 0x64, + 0x6f, 0x77, 0x20, 0x61, 0x6e, 0x64, 0x20, 0x73, 0x6f, 0x20, 0x6f, 0x6e, + 0x2e, 0x0a, 0x65, 0x78, 0x70, 0x6f, 0x72, 0x74, 0x20, 0x63, 0x6f, 0x6e, + 0x73, 0x74, 0x20, 0x6c, 0x6c, 0x61, 0x6d, 0x61, 0x4d, 0x6f, 0x64, 0x65, + 0x6c, 0x49, 0x6e, 0x66, 0x6f, 0x20, 0x3d, 0x20, 0x61, 0x73, 0x79, 0x6e, + 0x63, 0x20, 0x28, 0x29, 0x20, 0x3d, 0x3e, 0x20, 0x7b, 0x0a, 0x20, 0x20, + 0x69, 0x66, 0x20, 0x28, 0x21, 0x67, 0x65, 0x6e, 0x65, 0x72, 0x61, 0x74, + 0x69, 0x6f, 0x6e, 0x5f, 0x73, 0x65, 0x74, 0x74, 0x69, 0x6e, 0x67, 0x73, + 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x67, 0x65, 0x6e, 0x65, + 0x72, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x5f, 0x73, 0x65, 0x74, 0x74, 0x69, + 0x6e, 0x67, 0x73, 0x20, 0x3d, 0x20, 0x61, 0x77, 0x61, 0x69, 0x74, 0x20, + 0x66, 0x65, 0x74, 0x63, 0x68, 0x28, 0x22, 0x2f, 0x6d, 0x6f, 0x64, 0x65, + 0x6c, 0x2e, 0x6a, 0x73, 0x6f, 0x6e, 0x22, 0x29, 0x2e, 0x74, 0x68, 0x65, + 0x6e, 0x28, 0x72, 0x20, 0x3d, 0x3e, 0x20, 0x72, 0x2e, 0x6a, 0x73, 0x6f, + 0x6e, 0x28, 0x29, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x7d, 0x0a, 0x20, 0x20, + 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x67, 0x65, 0x6e, 0x65, 0x72, + 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x5f, 0x73, 0x65, 0x74, 0x74, 0x69, 0x6e, + 0x67, 0x73, 0x3b, 0x0a, 0x7d, 0x0a }; - -let generation_settings = null; - - -// Completes the prompt as a generator. Recommended for most use cases. -// -// Example: -// -// import { llama } from '/completion.js' -// -// const request = llama("Tell me a joke", {n_predict: 800}) -// for await (const chunk of request) { -// document.write(chunk.data.content) -// } -// -export async function* llama(prompt, params = {}, config = {}) { - let controller = config.controller; - - if (!controller) { - controller = new AbortController(); - } - - const completionParams = { ...paramDefaults, ...params, prompt }; - - const response = await fetch("/completion", { - method: 'POST', - body: JSON.stringify(completionParams), - headers: { - 'Connection': 'keep-alive', - 'Content-Type': 'application/json', - 'Accept': 'text/event-stream', - ...(params.api_key ? {'Authorization': `Bearer ${params.api_key}`} : {}) - }, - signal: controller.signal, - }); - - const reader = response.body.getReader(); - const decoder = new TextDecoder(); - - let content = ""; - let leftover = ""; // Buffer for partially read lines - - try { - let cont = true; - - while (cont) { - const result = await reader.read(); - if (result.done) { - break; - } - - // Add any leftover data to the current chunk of data - const text = leftover + decoder.decode(result.value); - - // Check if the last character is a line break - const endsWithLineBreak = text.endsWith('\n'); - - // Split the text into lines - let lines = text.split('\n'); - - // If the text doesn't end with a line break, then the last line is incomplete - // Store it in leftover to be added to the next chunk of data - if (!endsWithLineBreak) { - leftover = lines.pop(); - } else { - leftover = ""; // Reset leftover if we have a line break at the end - } - - // Parse all sse events and add them to result - const regex = /^(\S+):\s(.*)$/gm; - for (const line of lines) { - const match = regex.exec(line); - if (match) { - result[match[1]] = match[2] - // since we know this is llama.cpp, let's just decode the json in data - if (result.data) { - result.data = JSON.parse(result.data); - content += result.data.content; - - // yield - yield result; - - // if we got a stop token from server, we will break here - if (result.data.stop) { - if (result.data.generation_settings) { - generation_settings = result.data.generation_settings; - } - cont = false; - break; - } - } - if (result.error) { - result.error = JSON.parse(result.error); - if (result.error.content.includes('slot unavailable')) { - // Throw an error to be caught by upstream callers - throw new Error('slot unavailable'); - } else { - console.error(`llama.cpp error: ${result.error.content}`); - } - } - if (result.error) { - result.error = JSON.parse(result.error); - console.error(`llama.cpp error: ${result.error.content}`); - } - } - } - } - } catch (e) { - if (e.name !== 'AbortError') { - console.error("llama error: ", e); - } - throw e; - } - finally { - controller.abort(); - } - - return content; -} - -// Call llama, return an event target that you can subscribe to -// -// Example: -// -// import { llamaEventTarget } from '/completion.js' -// -// const conn = llamaEventTarget(prompt) -// conn.addEventListener("message", (chunk) => { -// document.write(chunk.detail.content) -// }) -// -export const llamaEventTarget = (prompt, params = {}, config = {}) => { - const eventTarget = new EventTarget(); - (async () => { - let content = ""; - for await (const chunk of llama(prompt, params, config)) { - if (chunk.data) { - content += chunk.data.content; - eventTarget.dispatchEvent(new CustomEvent("message", { detail: chunk.data })); - } - if (chunk.data.generation_settings) { - eventTarget.dispatchEvent(new CustomEvent("generation_settings", { detail: chunk.data.generation_settings })); - } - if (chunk.data.timings) { - eventTarget.dispatchEvent(new CustomEvent("timings", { detail: chunk.data.timings })); - } - } - eventTarget.dispatchEvent(new CustomEvent("done", { detail: { content } })); - })(); - return eventTarget; -} - -// Call llama, return a promise that resolves to the completed text. This does not support streaming -// -// Example: -// -// llamaPromise(prompt).then((content) => { -// document.write(content) -// }) -// -// or -// -// const content = await llamaPromise(prompt) -// document.write(content) -// -export const llamaPromise = (prompt, params = {}, config = {}) => { - return new Promise(async (resolve, reject) => { - let content = ""; - try { - for await (const chunk of llama(prompt, params, config)) { - content += chunk.data.content; - } - resolve(content); - } catch (error) { - reject(error); - } - }); -}; - -/** - * (deprecated) - */ -export const llamaComplete = async (params, controller, callback) => { - for await (const chunk of llama(params.prompt, params, { controller })) { - callback(chunk); - } -} - -// Get the model info from the server. This is useful for getting the context window and so on. -export const llamaModelInfo = async () => { - if (!generation_settings) { - generation_settings = await fetch("/model.json").then(r => r.json()); - } - return generation_settings; -} -)LITERAL"; -unsigned int completion_js_len = sizeof(completion_js); +unsigned int completion_js_len = 5346; diff --git a/examples/server/deps.sh b/examples/server/deps.sh index c0a9de9f9..ea23e6450 100755 --- a/examples/server/deps.sh +++ b/examples/server/deps.sh @@ -15,13 +15,6 @@ cd $PUBLIC for FILE in $FILES; do echo "generate $FILE.hpp" - # Use C++11 string literals instead of ugly xxd. - f=$(echo $FILE | sed 's/\./_/g' -e 's/-/_/g') - echo "const char $f[] = R\"LITERAL(" > $DIR/$FILE.hpp - cat $FILE >> $DIR/$FILE.hpp - echo ")LITERAL\";" >> $DIR/$FILE.hpp - echo "unsigned int ${f}_len = sizeof($f);" >> $DIR/$FILE.hpp - - #Deprecated old xxd - #xxd -i $FILE > $DIR/$FILE.hpp + # use simple flag for old version of xxd + xxd -i $FILE > $DIR/$FILE.hpp done diff --git a/examples/server/index.html.hpp b/examples/server/index.html.hpp index 603d12068..20551520e 100644 --- a/examples/server/index.html.hpp +++ b/examples/server/index.html.hpp @@ -1,1038 +1,2791 @@ -const char index_html[] = R"LITERAL( - - - - - - - llama.cpp - chat - - - - - - - -
- -
-
- - - - -)LITERAL"; -unsigned int index_html_len = sizeof(index_html); +unsigned char index_html[] = { + 0x3c, 0x68, 0x74, 0x6d, 0x6c, 0x3e, 0x0a, 0x0a, 0x3c, 0x68, 0x65, 0x61, + 0x64, 0x3e, 0x0a, 0x20, 0x20, 0x3c, 0x6d, 0x65, 0x74, 0x61, 0x20, 0x63, + 0x68, 0x61, 0x72, 0x73, 0x65, 0x74, 0x3d, 0x22, 0x55, 0x54, 0x46, 0x2d, + 0x38, 0x22, 0x3e, 0x0a, 0x20, 0x20, 0x3c, 0x6d, 0x65, 0x74, 0x61, 0x20, + 0x6e, 0x61, 0x6d, 0x65, 0x3d, 0x22, 0x76, 0x69, 0x65, 0x77, 0x70, 0x6f, + 0x72, 0x74, 0x22, 0x20, 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x3d, + 0x22, 0x77, 0x69, 0x64, 0x74, 0x68, 0x3d, 0x64, 0x65, 0x76, 0x69, 0x63, + 0x65, 0x2d, 0x77, 0x69, 0x64, 0x74, 0x68, 0x2c, 0x20, 0x69, 0x6e, 0x69, + 0x74, 0x69, 0x61, 0x6c, 0x2d, 0x73, 0x63, 0x61, 0x6c, 0x65, 0x3d, 0x31, + 0x2c, 0x20, 0x6d, 0x61, 0x78, 0x69, 0x6d, 0x75, 0x6d, 0x2d, 0x73, 0x63, + 0x61, 0x6c, 0x65, 0x3d, 0x31, 0x22, 0x20, 0x2f, 0x3e, 0x0a, 0x20, 0x20, + 0x3c, 0x6d, 0x65, 0x74, 0x61, 0x20, 0x6e, 0x61, 0x6d, 0x65, 0x3d, 0x22, + 0x63, 0x6f, 0x6c, 0x6f, 0x72, 0x2d, 0x73, 0x63, 0x68, 0x65, 0x6d, 0x65, + 0x22, 0x20, 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x3d, 0x22, 0x6c, + 0x69, 0x67, 0x68, 0x74, 0x20, 0x64, 0x61, 0x72, 0x6b, 0x22, 0x3e, 0x0a, + 0x20, 0x20, 0x3c, 0x74, 0x69, 0x74, 0x6c, 0x65, 0x3e, 0x6c, 0x6c, 0x61, + 0x6d, 0x61, 0x2e, 0x63, 0x70, 0x70, 0x20, 0x2d, 0x20, 0x63, 0x68, 0x61, + 0x74, 0x3c, 0x2f, 0x74, 0x69, 0x74, 0x6c, 0x65, 0x3e, 0x0a, 0x0a, 0x20, + 0x20, 0x3c, 0x73, 0x74, 0x79, 0x6c, 0x65, 0x3e, 0x0a, 0x20, 0x20, 0x20, + 0x20, 0x62, 0x6f, 0x64, 0x79, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x66, 0x6f, 0x6e, 0x74, 0x2d, 0x66, 0x61, 0x6d, 0x69, 0x6c, + 0x79, 0x3a, 0x20, 0x73, 0x79, 0x73, 0x74, 0x65, 0x6d, 0x2d, 0x75, 0x69, + 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x66, 0x6f, 0x6e, 0x74, + 0x2d, 0x73, 0x69, 0x7a, 0x65, 0x3a, 0x20, 0x39, 0x30, 0x25, 0x3b, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x23, + 0x63, 0x6f, 0x6e, 0x74, 0x61, 0x69, 0x6e, 0x65, 0x72, 0x20, 0x7b, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x6d, 0x61, 0x72, 0x67, 0x69, 0x6e, + 0x3a, 0x20, 0x30, 0x65, 0x6d, 0x20, 0x61, 0x75, 0x74, 0x6f, 0x3b, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x64, 0x69, 0x73, 0x70, 0x6c, 0x61, + 0x79, 0x3a, 0x20, 0x66, 0x6c, 0x65, 0x78, 0x3b, 0x0a, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x66, 0x6c, 0x65, 0x78, 0x2d, 0x64, 0x69, 0x72, 0x65, + 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x3a, 0x20, 0x63, 0x6f, 0x6c, 0x75, 0x6d, + 0x6e, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x6a, 0x75, 0x73, + 0x74, 0x69, 0x66, 0x79, 0x2d, 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, + 0x3a, 0x20, 0x73, 0x70, 0x61, 0x63, 0x65, 0x2d, 0x62, 0x65, 0x74, 0x77, + 0x65, 0x65, 0x6e, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x68, + 0x65, 0x69, 0x67, 0x68, 0x74, 0x3a, 0x20, 0x31, 0x30, 0x30, 0x25, 0x3b, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x6d, 0x61, 0x69, 0x6e, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x6d, 0x61, 0x72, 0x67, 0x69, 0x6e, 0x3a, 0x20, 0x33, 0x70, 0x78, + 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x64, 0x69, 0x73, 0x70, + 0x6c, 0x61, 0x79, 0x3a, 0x20, 0x66, 0x6c, 0x65, 0x78, 0x3b, 0x0a, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x66, 0x6c, 0x65, 0x78, 0x2d, 0x64, 0x69, + 0x72, 0x65, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x3a, 0x20, 0x63, 0x6f, 0x6c, + 0x75, 0x6d, 0x6e, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x6a, + 0x75, 0x73, 0x74, 0x69, 0x66, 0x79, 0x2d, 0x63, 0x6f, 0x6e, 0x74, 0x65, + 0x6e, 0x74, 0x3a, 0x20, 0x73, 0x70, 0x61, 0x63, 0x65, 0x2d, 0x62, 0x65, + 0x74, 0x77, 0x65, 0x65, 0x6e, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x67, 0x61, 0x70, 0x3a, 0x20, 0x31, 0x65, 0x6d, 0x3b, 0x0a, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x66, 0x6c, 0x65, 0x78, 0x2d, 0x67, + 0x72, 0x6f, 0x77, 0x3a, 0x20, 0x31, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x6f, 0x76, 0x65, 0x72, 0x66, 0x6c, 0x6f, 0x77, 0x2d, 0x79, + 0x3a, 0x20, 0x61, 0x75, 0x74, 0x6f, 0x3b, 0x0a, 0x0a, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x62, 0x6f, 0x72, 0x64, 0x65, 0x72, 0x3a, 0x20, 0x31, + 0x70, 0x78, 0x20, 0x73, 0x6f, 0x6c, 0x69, 0x64, 0x20, 0x23, 0x63, 0x63, + 0x63, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x62, 0x6f, 0x72, + 0x64, 0x65, 0x72, 0x2d, 0x72, 0x61, 0x64, 0x69, 0x75, 0x73, 0x3a, 0x20, + 0x35, 0x70, 0x78, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x70, + 0x61, 0x64, 0x64, 0x69, 0x6e, 0x67, 0x3a, 0x20, 0x30, 0x2e, 0x35, 0x65, + 0x6d, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x62, 0x6f, 0x64, 0x79, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x6d, 0x61, 0x78, 0x2d, 0x77, 0x69, 0x64, 0x74, 0x68, + 0x3a, 0x20, 0x36, 0x30, 0x30, 0x70, 0x78, 0x3b, 0x0a, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x6d, 0x69, 0x6e, 0x2d, 0x77, 0x69, 0x64, 0x74, 0x68, + 0x3a, 0x20, 0x33, 0x30, 0x30, 0x70, 0x78, 0x3b, 0x0a, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x6c, 0x69, 0x6e, 0x65, 0x2d, 0x68, 0x65, 0x69, 0x67, + 0x68, 0x74, 0x3a, 0x20, 0x31, 0x2e, 0x32, 0x3b, 0x0a, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x6d, 0x61, 0x72, 0x67, 0x69, 0x6e, 0x3a, 0x20, 0x30, + 0x20, 0x61, 0x75, 0x74, 0x6f, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x70, 0x61, 0x64, 0x64, 0x69, 0x6e, 0x67, 0x3a, 0x20, 0x30, 0x20, + 0x30, 0x2e, 0x35, 0x65, 0x6d, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x7d, + 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x70, 0x20, 0x7b, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x6f, 0x76, 0x65, 0x72, 0x66, 0x6c, 0x6f, 0x77, + 0x2d, 0x77, 0x72, 0x61, 0x70, 0x3a, 0x20, 0x62, 0x72, 0x65, 0x61, 0x6b, + 0x2d, 0x77, 0x6f, 0x72, 0x64, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x77, 0x6f, 0x72, 0x64, 0x2d, 0x77, 0x72, 0x61, 0x70, 0x3a, 0x20, + 0x62, 0x72, 0x65, 0x61, 0x6b, 0x2d, 0x77, 0x6f, 0x72, 0x64, 0x3b, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x68, 0x79, 0x70, 0x68, 0x65, 0x6e, + 0x73, 0x3a, 0x20, 0x61, 0x75, 0x74, 0x6f, 0x3b, 0x0a, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x6d, 0x61, 0x72, 0x67, 0x69, 0x6e, 0x2d, 0x74, 0x6f, + 0x70, 0x3a, 0x20, 0x30, 0x2e, 0x35, 0x65, 0x6d, 0x3b, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x6d, 0x61, 0x72, 0x67, 0x69, 0x6e, 0x2d, 0x62, + 0x6f, 0x74, 0x74, 0x6f, 0x6d, 0x3a, 0x20, 0x30, 0x2e, 0x35, 0x65, 0x6d, + 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x0a, 0x20, 0x20, 0x20, + 0x20, 0x23, 0x77, 0x72, 0x69, 0x74, 0x65, 0x20, 0x66, 0x6f, 0x72, 0x6d, + 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x6d, 0x61, 0x72, + 0x67, 0x69, 0x6e, 0x3a, 0x20, 0x31, 0x65, 0x6d, 0x20, 0x30, 0x20, 0x30, + 0x20, 0x30, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x64, 0x69, + 0x73, 0x70, 0x6c, 0x61, 0x79, 0x3a, 0x20, 0x66, 0x6c, 0x65, 0x78, 0x3b, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x66, 0x6c, 0x65, 0x78, 0x2d, + 0x64, 0x69, 0x72, 0x65, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x3a, 0x20, 0x63, + 0x6f, 0x6c, 0x75, 0x6d, 0x6e, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x67, 0x61, 0x70, 0x3a, 0x20, 0x30, 0x2e, 0x35, 0x65, 0x6d, 0x3b, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x61, 0x6c, 0x69, 0x67, 0x6e, + 0x2d, 0x69, 0x74, 0x65, 0x6d, 0x73, 0x3a, 0x20, 0x73, 0x74, 0x72, 0x65, + 0x74, 0x63, 0x68, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x2e, 0x72, 0x69, 0x67, 0x68, 0x74, 0x20, 0x7b, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x64, 0x69, 0x73, 0x70, 0x6c, + 0x61, 0x79, 0x3a, 0x20, 0x66, 0x6c, 0x65, 0x78, 0x3b, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x66, 0x6c, 0x65, 0x78, 0x2d, 0x64, 0x69, 0x72, + 0x65, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x3a, 0x20, 0x72, 0x6f, 0x77, 0x3b, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x67, 0x61, 0x70, 0x3a, 0x20, + 0x30, 0x2e, 0x35, 0x65, 0x6d, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x6a, 0x75, 0x73, 0x74, 0x69, 0x66, 0x79, 0x2d, 0x63, 0x6f, 0x6e, + 0x74, 0x65, 0x6e, 0x74, 0x3a, 0x20, 0x66, 0x6c, 0x65, 0x78, 0x2d, 0x65, + 0x6e, 0x64, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x0a, 0x20, + 0x20, 0x20, 0x20, 0x66, 0x69, 0x65, 0x6c, 0x64, 0x73, 0x65, 0x74, 0x20, + 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x62, 0x6f, 0x72, 0x64, + 0x65, 0x72, 0x3a, 0x20, 0x6e, 0x6f, 0x6e, 0x65, 0x3b, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x70, 0x61, 0x64, 0x64, 0x69, 0x6e, 0x67, 0x3a, + 0x20, 0x30, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x6d, 0x61, + 0x72, 0x67, 0x69, 0x6e, 0x3a, 0x20, 0x30, 0x3b, 0x0a, 0x20, 0x20, 0x20, + 0x20, 0x7d, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x66, 0x69, 0x65, 0x6c, + 0x64, 0x73, 0x65, 0x74, 0x2e, 0x74, 0x77, 0x6f, 0x20, 0x7b, 0x0a, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x64, 0x69, 0x73, 0x70, 0x6c, 0x61, 0x79, + 0x3a, 0x20, 0x67, 0x72, 0x69, 0x64, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x67, 0x72, 0x69, 0x64, 0x2d, 0x74, 0x65, 0x6d, 0x70, 0x6c, + 0x61, 0x74, 0x65, 0x3a, 0x20, 0x22, 0x61, 0x20, 0x61, 0x22, 0x3b, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x67, 0x61, 0x70, 0x3a, 0x20, 0x31, + 0x65, 0x6d, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x0a, 0x20, + 0x20, 0x20, 0x20, 0x66, 0x69, 0x65, 0x6c, 0x64, 0x73, 0x65, 0x74, 0x2e, + 0x74, 0x68, 0x72, 0x65, 0x65, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x64, 0x69, 0x73, 0x70, 0x6c, 0x61, 0x79, 0x3a, 0x20, 0x67, + 0x72, 0x69, 0x64, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x67, + 0x72, 0x69, 0x64, 0x2d, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, + 0x3a, 0x20, 0x22, 0x61, 0x20, 0x61, 0x20, 0x61, 0x22, 0x3b, 0x0a, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x67, 0x61, 0x70, 0x3a, 0x20, 0x31, 0x65, + 0x6d, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x64, 0x65, 0x74, 0x61, 0x69, 0x6c, 0x73, 0x20, 0x7b, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x62, 0x6f, 0x72, 0x64, 0x65, 0x72, + 0x3a, 0x20, 0x31, 0x70, 0x78, 0x20, 0x73, 0x6f, 0x6c, 0x69, 0x64, 0x20, + 0x23, 0x61, 0x61, 0x61, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x62, 0x6f, 0x72, 0x64, 0x65, 0x72, 0x2d, 0x72, 0x61, 0x64, 0x69, 0x75, + 0x73, 0x3a, 0x20, 0x34, 0x70, 0x78, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x70, 0x61, 0x64, 0x64, 0x69, 0x6e, 0x67, 0x3a, 0x20, 0x30, + 0x2e, 0x35, 0x65, 0x6d, 0x20, 0x30, 0x2e, 0x35, 0x65, 0x6d, 0x20, 0x30, + 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x6d, 0x61, 0x72, 0x67, + 0x69, 0x6e, 0x2d, 0x74, 0x6f, 0x70, 0x3a, 0x20, 0x30, 0x2e, 0x35, 0x65, + 0x6d, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x73, 0x75, 0x6d, 0x6d, 0x61, 0x72, 0x79, 0x20, 0x7b, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x66, 0x6f, 0x6e, 0x74, 0x2d, 0x77, + 0x65, 0x69, 0x67, 0x68, 0x74, 0x3a, 0x20, 0x62, 0x6f, 0x6c, 0x64, 0x3b, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x6d, 0x61, 0x72, 0x67, 0x69, + 0x6e, 0x3a, 0x20, 0x2d, 0x30, 0x2e, 0x35, 0x65, 0x6d, 0x20, 0x2d, 0x30, + 0x2e, 0x35, 0x65, 0x6d, 0x20, 0x30, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x70, 0x61, 0x64, 0x64, 0x69, 0x6e, 0x67, 0x3a, 0x20, 0x30, + 0x2e, 0x35, 0x65, 0x6d, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x63, 0x75, 0x72, 0x73, 0x6f, 0x72, 0x3a, 0x20, 0x70, 0x6f, 0x69, 0x6e, + 0x74, 0x65, 0x72, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x64, 0x65, 0x74, 0x61, 0x69, 0x6c, 0x73, 0x5b, + 0x6f, 0x70, 0x65, 0x6e, 0x5d, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x70, 0x61, 0x64, 0x64, 0x69, 0x6e, 0x67, 0x3a, 0x20, 0x30, + 0x2e, 0x35, 0x65, 0x6d, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x2e, 0x70, 0x72, 0x6f, 0x62, 0x2d, 0x73, + 0x65, 0x74, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x70, + 0x61, 0x64, 0x64, 0x69, 0x6e, 0x67, 0x3a, 0x20, 0x30, 0x2e, 0x33, 0x65, + 0x6d, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x62, 0x6f, 0x72, + 0x64, 0x65, 0x72, 0x2d, 0x62, 0x6f, 0x74, 0x74, 0x6f, 0x6d, 0x3a, 0x20, + 0x31, 0x70, 0x78, 0x20, 0x73, 0x6f, 0x6c, 0x69, 0x64, 0x20, 0x23, 0x63, + 0x63, 0x63, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x0a, 0x20, + 0x20, 0x20, 0x20, 0x2e, 0x70, 0x6f, 0x70, 0x6f, 0x76, 0x65, 0x72, 0x2d, + 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x20, 0x7b, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x70, 0x6f, 0x73, 0x69, 0x74, 0x69, 0x6f, 0x6e, + 0x3a, 0x20, 0x61, 0x62, 0x73, 0x6f, 0x6c, 0x75, 0x74, 0x65, 0x3b, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x62, 0x61, 0x63, 0x6b, 0x67, 0x72, + 0x6f, 0x75, 0x6e, 0x64, 0x2d, 0x63, 0x6f, 0x6c, 0x6f, 0x72, 0x3a, 0x20, + 0x77, 0x68, 0x69, 0x74, 0x65, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x70, 0x61, 0x64, 0x64, 0x69, 0x6e, 0x67, 0x3a, 0x20, 0x30, 0x2e, + 0x32, 0x65, 0x6d, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x62, + 0x6f, 0x78, 0x2d, 0x73, 0x68, 0x61, 0x64, 0x6f, 0x77, 0x3a, 0x20, 0x30, + 0x20, 0x30, 0x20, 0x31, 0x30, 0x70, 0x78, 0x20, 0x72, 0x67, 0x62, 0x61, + 0x28, 0x30, 0x2c, 0x20, 0x30, 0x2c, 0x20, 0x30, 0x2c, 0x20, 0x30, 0x2e, + 0x31, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x0a, 0x20, + 0x20, 0x20, 0x20, 0x74, 0x65, 0x78, 0x74, 0x61, 0x72, 0x65, 0x61, 0x20, + 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x70, 0x61, 0x64, 0x64, + 0x69, 0x6e, 0x67, 0x3a, 0x20, 0x35, 0x70, 0x78, 0x3b, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x66, 0x6c, 0x65, 0x78, 0x2d, 0x67, 0x72, 0x6f, + 0x77, 0x3a, 0x20, 0x31, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x77, 0x69, 0x64, 0x74, 0x68, 0x3a, 0x20, 0x31, 0x30, 0x30, 0x25, 0x3b, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x70, 0x72, 0x65, 0x20, 0x63, 0x6f, 0x64, 0x65, 0x20, 0x7b, 0x0a, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x64, 0x69, 0x73, 0x70, 0x6c, 0x61, 0x79, + 0x3a, 0x20, 0x62, 0x6c, 0x6f, 0x63, 0x6b, 0x3b, 0x0a, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x62, 0x61, 0x63, 0x6b, 0x67, 0x72, 0x6f, 0x75, 0x6e, + 0x64, 0x2d, 0x63, 0x6f, 0x6c, 0x6f, 0x72, 0x3a, 0x20, 0x23, 0x32, 0x32, + 0x32, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6c, + 0x6f, 0x72, 0x3a, 0x20, 0x23, 0x64, 0x64, 0x64, 0x3b, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x7d, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x64, + 0x65, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x66, 0x6f, + 0x6e, 0x74, 0x2d, 0x66, 0x61, 0x6d, 0x69, 0x6c, 0x79, 0x3a, 0x20, 0x6d, + 0x6f, 0x6e, 0x6f, 0x73, 0x70, 0x61, 0x63, 0x65, 0x3b, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x70, 0x61, 0x64, 0x64, 0x69, 0x6e, 0x67, 0x3a, + 0x20, 0x30, 0x2e, 0x31, 0x65, 0x6d, 0x20, 0x30, 0x2e, 0x33, 0x65, 0x6d, + 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x62, 0x6f, 0x72, 0x64, + 0x65, 0x72, 0x2d, 0x72, 0x61, 0x64, 0x69, 0x75, 0x73, 0x3a, 0x20, 0x33, + 0x70, 0x78, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x0a, 0x20, + 0x20, 0x20, 0x20, 0x66, 0x69, 0x65, 0x6c, 0x64, 0x73, 0x65, 0x74, 0x20, + 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x6d, 0x61, 0x72, 0x67, 0x69, 0x6e, 0x3a, 0x20, 0x30, 0x2e, + 0x35, 0x65, 0x6d, 0x20, 0x30, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x64, 0x69, 0x73, 0x70, 0x6c, 0x61, 0x79, 0x3a, 0x20, 0x62, 0x6c, + 0x6f, 0x63, 0x6b, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x66, 0x69, 0x65, 0x6c, 0x64, 0x73, 0x65, 0x74, + 0x20, 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x2e, 0x73, 0x6c, 0x69, 0x6d, 0x20, + 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x6d, 0x61, 0x72, 0x67, + 0x69, 0x6e, 0x3a, 0x20, 0x30, 0x20, 0x30, 0x2e, 0x35, 0x65, 0x6d, 0x3b, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x64, 0x69, 0x73, 0x70, 0x6c, + 0x61, 0x79, 0x3a, 0x20, 0x69, 0x6e, 0x6c, 0x69, 0x6e, 0x65, 0x3b, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x68, + 0x65, 0x61, 0x64, 0x65, 0x72, 0x2c, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x66, + 0x6f, 0x6f, 0x74, 0x65, 0x72, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x74, 0x65, 0x78, 0x74, 0x2d, 0x61, 0x6c, 0x69, 0x67, 0x6e, + 0x3a, 0x20, 0x63, 0x65, 0x6e, 0x74, 0x65, 0x72, 0x3b, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x7d, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x66, 0x6f, 0x6f, + 0x74, 0x65, 0x72, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x66, 0x6f, 0x6e, 0x74, 0x2d, 0x73, 0x69, 0x7a, 0x65, 0x3a, 0x20, 0x38, + 0x30, 0x25, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, + 0x6c, 0x6f, 0x72, 0x3a, 0x20, 0x23, 0x38, 0x38, 0x38, 0x3b, 0x0a, 0x20, + 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x2e, 0x6d, + 0x6f, 0x64, 0x65, 0x2d, 0x63, 0x68, 0x61, 0x74, 0x20, 0x74, 0x65, 0x78, + 0x74, 0x61, 0x72, 0x65, 0x61, 0x5b, 0x6e, 0x61, 0x6d, 0x65, 0x3d, 0x70, + 0x72, 0x6f, 0x6d, 0x70, 0x74, 0x5d, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x68, 0x65, 0x69, 0x67, 0x68, 0x74, 0x3a, 0x20, 0x34, + 0x2e, 0x35, 0x65, 0x6d, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x2e, 0x6d, 0x6f, 0x64, 0x65, 0x2d, 0x63, + 0x6f, 0x6d, 0x70, 0x6c, 0x65, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x74, 0x65, + 0x78, 0x74, 0x61, 0x72, 0x65, 0x61, 0x5b, 0x6e, 0x61, 0x6d, 0x65, 0x3d, + 0x70, 0x72, 0x6f, 0x6d, 0x70, 0x74, 0x5d, 0x20, 0x7b, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x68, 0x65, 0x69, 0x67, 0x68, 0x74, 0x3a, 0x20, + 0x31, 0x30, 0x65, 0x6d, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x5b, 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x6e, + 0x74, 0x65, 0x64, 0x69, 0x74, 0x61, 0x62, 0x6c, 0x65, 0x5d, 0x20, 0x7b, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x64, 0x69, 0x73, 0x70, 0x6c, + 0x61, 0x79, 0x3a, 0x20, 0x69, 0x6e, 0x6c, 0x69, 0x6e, 0x65, 0x2d, 0x62, + 0x6c, 0x6f, 0x63, 0x6b, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x77, 0x68, 0x69, 0x74, 0x65, 0x2d, 0x73, 0x70, 0x61, 0x63, 0x65, 0x3a, + 0x20, 0x70, 0x72, 0x65, 0x2d, 0x77, 0x72, 0x61, 0x70, 0x3b, 0x0a, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x6f, 0x75, 0x74, 0x6c, 0x69, 0x6e, 0x65, + 0x3a, 0x20, 0x30, 0x70, 0x78, 0x20, 0x73, 0x6f, 0x6c, 0x69, 0x64, 0x20, + 0x74, 0x72, 0x61, 0x6e, 0x73, 0x70, 0x61, 0x72, 0x65, 0x6e, 0x74, 0x3b, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x40, 0x6b, 0x65, 0x79, 0x66, 0x72, 0x61, 0x6d, 0x65, 0x73, 0x20, 0x6c, + 0x6f, 0x61, 0x64, 0x69, 0x6e, 0x67, 0x2d, 0x62, 0x67, 0x2d, 0x77, 0x69, + 0x70, 0x65, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x30, + 0x25, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x62, 0x61, 0x63, 0x6b, 0x67, 0x72, 0x6f, 0x75, 0x6e, 0x64, 0x2d, 0x70, + 0x6f, 0x73, 0x69, 0x74, 0x69, 0x6f, 0x6e, 0x3a, 0x20, 0x30, 0x25, 0x3b, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x31, 0x30, 0x30, 0x25, 0x20, 0x7b, 0x0a, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x62, 0x61, 0x63, 0x6b, 0x67, + 0x72, 0x6f, 0x75, 0x6e, 0x64, 0x2d, 0x70, 0x6f, 0x73, 0x69, 0x74, 0x69, + 0x6f, 0x6e, 0x3a, 0x20, 0x31, 0x30, 0x30, 0x25, 0x3b, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x2e, 0x6c, 0x6f, 0x61, 0x64, 0x69, 0x6e, + 0x67, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x2d, 0x2d, + 0x6c, 0x6f, 0x61, 0x64, 0x69, 0x6e, 0x67, 0x2d, 0x63, 0x6f, 0x6c, 0x6f, + 0x72, 0x2d, 0x31, 0x3a, 0x20, 0x23, 0x65, 0x65, 0x65, 0x65, 0x65, 0x65, + 0x30, 0x30, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x2d, 0x2d, + 0x6c, 0x6f, 0x61, 0x64, 0x69, 0x6e, 0x67, 0x2d, 0x63, 0x6f, 0x6c, 0x6f, + 0x72, 0x2d, 0x32, 0x3a, 0x20, 0x23, 0x65, 0x65, 0x65, 0x65, 0x65, 0x65, + 0x66, 0x66, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x62, 0x61, + 0x63, 0x6b, 0x67, 0x72, 0x6f, 0x75, 0x6e, 0x64, 0x2d, 0x73, 0x69, 0x7a, + 0x65, 0x3a, 0x20, 0x35, 0x30, 0x25, 0x20, 0x31, 0x30, 0x30, 0x25, 0x3b, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x62, 0x61, 0x63, 0x6b, 0x67, + 0x72, 0x6f, 0x75, 0x6e, 0x64, 0x2d, 0x69, 0x6d, 0x61, 0x67, 0x65, 0x3a, + 0x20, 0x6c, 0x69, 0x6e, 0x65, 0x61, 0x72, 0x2d, 0x67, 0x72, 0x61, 0x64, + 0x69, 0x65, 0x6e, 0x74, 0x28, 0x39, 0x30, 0x64, 0x65, 0x67, 0x2c, 0x20, + 0x76, 0x61, 0x72, 0x28, 0x2d, 0x2d, 0x6c, 0x6f, 0x61, 0x64, 0x69, 0x6e, + 0x67, 0x2d, 0x63, 0x6f, 0x6c, 0x6f, 0x72, 0x2d, 0x31, 0x29, 0x2c, 0x20, + 0x76, 0x61, 0x72, 0x28, 0x2d, 0x2d, 0x6c, 0x6f, 0x61, 0x64, 0x69, 0x6e, + 0x67, 0x2d, 0x63, 0x6f, 0x6c, 0x6f, 0x72, 0x2d, 0x32, 0x29, 0x2c, 0x20, + 0x76, 0x61, 0x72, 0x28, 0x2d, 0x2d, 0x6c, 0x6f, 0x61, 0x64, 0x69, 0x6e, + 0x67, 0x2d, 0x63, 0x6f, 0x6c, 0x6f, 0x72, 0x2d, 0x31, 0x29, 0x29, 0x3b, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x61, 0x6e, 0x69, 0x6d, 0x61, + 0x74, 0x69, 0x6f, 0x6e, 0x3a, 0x20, 0x6c, 0x6f, 0x61, 0x64, 0x69, 0x6e, + 0x67, 0x2d, 0x62, 0x67, 0x2d, 0x77, 0x69, 0x70, 0x65, 0x20, 0x32, 0x73, + 0x20, 0x6c, 0x69, 0x6e, 0x65, 0x61, 0x72, 0x20, 0x69, 0x6e, 0x66, 0x69, + 0x6e, 0x69, 0x74, 0x65, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x40, 0x6d, 0x65, 0x64, 0x69, 0x61, 0x20, + 0x28, 0x70, 0x72, 0x65, 0x66, 0x65, 0x72, 0x73, 0x2d, 0x63, 0x6f, 0x6c, + 0x6f, 0x72, 0x2d, 0x73, 0x63, 0x68, 0x65, 0x6d, 0x65, 0x3a, 0x20, 0x64, + 0x61, 0x72, 0x6b, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x2e, 0x6c, 0x6f, 0x61, 0x64, 0x69, 0x6e, 0x67, 0x20, 0x7b, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x2d, 0x2d, 0x6c, 0x6f, + 0x61, 0x64, 0x69, 0x6e, 0x67, 0x2d, 0x63, 0x6f, 0x6c, 0x6f, 0x72, 0x2d, + 0x31, 0x3a, 0x20, 0x23, 0x32, 0x32, 0x32, 0x32, 0x32, 0x32, 0x30, 0x30, + 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x2d, 0x2d, + 0x6c, 0x6f, 0x61, 0x64, 0x69, 0x6e, 0x67, 0x2d, 0x63, 0x6f, 0x6c, 0x6f, + 0x72, 0x2d, 0x32, 0x3a, 0x20, 0x23, 0x32, 0x32, 0x32, 0x32, 0x32, 0x32, + 0x66, 0x66, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x2e, 0x70, 0x6f, 0x70, 0x6f, + 0x76, 0x65, 0x72, 0x2d, 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x20, + 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x62, 0x61, + 0x63, 0x6b, 0x67, 0x72, 0x6f, 0x75, 0x6e, 0x64, 0x2d, 0x63, 0x6f, 0x6c, + 0x6f, 0x72, 0x3a, 0x20, 0x62, 0x6c, 0x61, 0x63, 0x6b, 0x3b, 0x0a, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x7d, + 0x0a, 0x20, 0x20, 0x3c, 0x2f, 0x73, 0x74, 0x79, 0x6c, 0x65, 0x3e, 0x0a, + 0x0a, 0x20, 0x20, 0x3c, 0x73, 0x63, 0x72, 0x69, 0x70, 0x74, 0x20, 0x74, + 0x79, 0x70, 0x65, 0x3d, 0x22, 0x6d, 0x6f, 0x64, 0x75, 0x6c, 0x65, 0x22, + 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x69, 0x6d, 0x70, 0x6f, 0x72, 0x74, + 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x68, 0x74, 0x6d, + 0x6c, 0x2c, 0x20, 0x68, 0x2c, 0x20, 0x73, 0x69, 0x67, 0x6e, 0x61, 0x6c, + 0x2c, 0x20, 0x65, 0x66, 0x66, 0x65, 0x63, 0x74, 0x2c, 0x20, 0x63, 0x6f, + 0x6d, 0x70, 0x75, 0x74, 0x65, 0x64, 0x2c, 0x20, 0x72, 0x65, 0x6e, 0x64, + 0x65, 0x72, 0x2c, 0x20, 0x75, 0x73, 0x65, 0x53, 0x69, 0x67, 0x6e, 0x61, + 0x6c, 0x2c, 0x20, 0x75, 0x73, 0x65, 0x45, 0x66, 0x66, 0x65, 0x63, 0x74, + 0x2c, 0x20, 0x75, 0x73, 0x65, 0x52, 0x65, 0x66, 0x2c, 0x20, 0x43, 0x6f, + 0x6d, 0x70, 0x6f, 0x6e, 0x65, 0x6e, 0x74, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x7d, 0x20, 0x66, 0x72, 0x6f, 0x6d, 0x20, 0x27, 0x2f, 0x69, 0x6e, 0x64, + 0x65, 0x78, 0x2e, 0x6a, 0x73, 0x27, 0x3b, 0x0a, 0x0a, 0x20, 0x20, 0x20, + 0x20, 0x69, 0x6d, 0x70, 0x6f, 0x72, 0x74, 0x20, 0x7b, 0x20, 0x6c, 0x6c, + 0x61, 0x6d, 0x61, 0x20, 0x7d, 0x20, 0x66, 0x72, 0x6f, 0x6d, 0x20, 0x27, + 0x2f, 0x63, 0x6f, 0x6d, 0x70, 0x6c, 0x65, 0x74, 0x69, 0x6f, 0x6e, 0x2e, + 0x6a, 0x73, 0x27, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x69, 0x6d, 0x70, + 0x6f, 0x72, 0x74, 0x20, 0x7b, 0x20, 0x53, 0x63, 0x68, 0x65, 0x6d, 0x61, + 0x43, 0x6f, 0x6e, 0x76, 0x65, 0x72, 0x74, 0x65, 0x72, 0x20, 0x7d, 0x20, + 0x66, 0x72, 0x6f, 0x6d, 0x20, 0x27, 0x2f, 0x6a, 0x73, 0x6f, 0x6e, 0x2d, + 0x73, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x2d, 0x74, 0x6f, 0x2d, 0x67, 0x72, + 0x61, 0x6d, 0x6d, 0x61, 0x72, 0x2e, 0x6d, 0x6a, 0x73, 0x27, 0x3b, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x6c, 0x65, 0x74, 0x20, 0x73, 0x65, 0x6c, 0x65, + 0x63, 0x74, 0x65, 0x64, 0x5f, 0x69, 0x6d, 0x61, 0x67, 0x65, 0x20, 0x3d, + 0x20, 0x66, 0x61, 0x6c, 0x73, 0x65, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x76, 0x61, 0x72, 0x20, 0x73, 0x6c, 0x6f, 0x74, 0x5f, 0x69, 0x64, 0x20, + 0x3d, 0x20, 0x2d, 0x31, 0x3b, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x63, + 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x73, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, + 0x20, 0x3d, 0x20, 0x73, 0x69, 0x67, 0x6e, 0x61, 0x6c, 0x28, 0x7b, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x70, 0x72, 0x6f, 0x6d, 0x70, 0x74, + 0x3a, 0x20, 0x22, 0x54, 0x68, 0x69, 0x73, 0x20, 0x69, 0x73, 0x20, 0x61, + 0x20, 0x63, 0x6f, 0x6e, 0x76, 0x65, 0x72, 0x73, 0x61, 0x74, 0x69, 0x6f, + 0x6e, 0x20, 0x62, 0x65, 0x74, 0x77, 0x65, 0x65, 0x6e, 0x20, 0x55, 0x73, + 0x65, 0x72, 0x20, 0x61, 0x6e, 0x64, 0x20, 0x4c, 0x6c, 0x61, 0x6d, 0x61, + 0x2c, 0x20, 0x61, 0x20, 0x66, 0x72, 0x69, 0x65, 0x6e, 0x64, 0x6c, 0x79, + 0x20, 0x63, 0x68, 0x61, 0x74, 0x62, 0x6f, 0x74, 0x2e, 0x20, 0x4c, 0x6c, + 0x61, 0x6d, 0x61, 0x20, 0x69, 0x73, 0x20, 0x68, 0x65, 0x6c, 0x70, 0x66, + 0x75, 0x6c, 0x2c, 0x20, 0x6b, 0x69, 0x6e, 0x64, 0x2c, 0x20, 0x68, 0x6f, + 0x6e, 0x65, 0x73, 0x74, 0x2c, 0x20, 0x67, 0x6f, 0x6f, 0x64, 0x20, 0x61, + 0x74, 0x20, 0x77, 0x72, 0x69, 0x74, 0x69, 0x6e, 0x67, 0x2c, 0x20, 0x61, + 0x6e, 0x64, 0x20, 0x6e, 0x65, 0x76, 0x65, 0x72, 0x20, 0x66, 0x61, 0x69, + 0x6c, 0x73, 0x20, 0x74, 0x6f, 0x20, 0x61, 0x6e, 0x73, 0x77, 0x65, 0x72, + 0x20, 0x61, 0x6e, 0x79, 0x20, 0x72, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, + 0x73, 0x20, 0x69, 0x6d, 0x6d, 0x65, 0x64, 0x69, 0x61, 0x74, 0x65, 0x6c, + 0x79, 0x20, 0x61, 0x6e, 0x64, 0x20, 0x77, 0x69, 0x74, 0x68, 0x20, 0x70, + 0x72, 0x65, 0x63, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x2e, 0x22, 0x2c, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, + 0x74, 0x65, 0x3a, 0x20, 0x22, 0x7b, 0x7b, 0x70, 0x72, 0x6f, 0x6d, 0x70, + 0x74, 0x7d, 0x7d, 0x5c, 0x6e, 0x5c, 0x6e, 0x7b, 0x7b, 0x68, 0x69, 0x73, + 0x74, 0x6f, 0x72, 0x79, 0x7d, 0x7d, 0x5c, 0x6e, 0x7b, 0x7b, 0x63, 0x68, + 0x61, 0x72, 0x7d, 0x7d, 0x3a, 0x22, 0x2c, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x68, 0x69, 0x73, 0x74, 0x6f, 0x72, 0x79, 0x54, 0x65, 0x6d, + 0x70, 0x6c, 0x61, 0x74, 0x65, 0x3a, 0x20, 0x22, 0x7b, 0x7b, 0x6e, 0x61, + 0x6d, 0x65, 0x7d, 0x7d, 0x3a, 0x20, 0x7b, 0x7b, 0x6d, 0x65, 0x73, 0x73, + 0x61, 0x67, 0x65, 0x7d, 0x7d, 0x22, 0x2c, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x74, 0x72, 0x61, 0x6e, 0x73, 0x63, 0x72, 0x69, 0x70, 0x74, + 0x3a, 0x20, 0x5b, 0x5d, 0x2c, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x74, 0x79, 0x70, 0x65, 0x3a, 0x20, 0x22, 0x63, 0x68, 0x61, 0x74, 0x22, + 0x2c, 0x20, 0x20, 0x2f, 0x2f, 0x20, 0x22, 0x63, 0x68, 0x61, 0x74, 0x22, + 0x20, 0x7c, 0x20, 0x22, 0x63, 0x6f, 0x6d, 0x70, 0x6c, 0x65, 0x74, 0x69, + 0x6f, 0x6e, 0x22, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x68, + 0x61, 0x72, 0x3a, 0x20, 0x22, 0x4c, 0x6c, 0x61, 0x6d, 0x61, 0x22, 0x2c, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x75, 0x73, 0x65, 0x72, 0x3a, + 0x20, 0x22, 0x55, 0x73, 0x65, 0x72, 0x22, 0x2c, 0x0a, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x69, 0x6d, 0x61, 0x67, 0x65, 0x5f, 0x73, 0x65, 0x6c, + 0x65, 0x63, 0x74, 0x65, 0x64, 0x3a, 0x20, 0x27, 0x27, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x7d, 0x29, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, + 0x6e, 0x73, 0x74, 0x20, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x20, 0x3d, + 0x20, 0x73, 0x69, 0x67, 0x6e, 0x61, 0x6c, 0x28, 0x7b, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x6e, 0x5f, 0x70, 0x72, 0x65, 0x64, 0x69, 0x63, + 0x74, 0x3a, 0x20, 0x34, 0x30, 0x30, 0x2c, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x74, 0x65, 0x6d, 0x70, 0x65, 0x72, 0x61, 0x74, 0x75, 0x72, + 0x65, 0x3a, 0x20, 0x30, 0x2e, 0x37, 0x2c, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x72, 0x65, 0x70, 0x65, 0x61, 0x74, 0x5f, 0x6c, 0x61, 0x73, + 0x74, 0x5f, 0x6e, 0x3a, 0x20, 0x32, 0x35, 0x36, 0x2c, 0x20, 0x2f, 0x2f, + 0x20, 0x30, 0x20, 0x3d, 0x20, 0x64, 0x69, 0x73, 0x61, 0x62, 0x6c, 0x65, + 0x20, 0x70, 0x65, 0x6e, 0x61, 0x6c, 0x74, 0x79, 0x2c, 0x20, 0x2d, 0x31, + 0x20, 0x3d, 0x20, 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x78, 0x74, 0x20, 0x73, + 0x69, 0x7a, 0x65, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x72, 0x65, + 0x70, 0x65, 0x61, 0x74, 0x5f, 0x70, 0x65, 0x6e, 0x61, 0x6c, 0x74, 0x79, + 0x3a, 0x20, 0x31, 0x2e, 0x31, 0x38, 0x2c, 0x20, 0x2f, 0x2f, 0x20, 0x31, + 0x2e, 0x30, 0x20, 0x3d, 0x20, 0x64, 0x69, 0x73, 0x61, 0x62, 0x6c, 0x65, + 0x64, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x74, 0x6f, 0x70, 0x5f, + 0x6b, 0x3a, 0x20, 0x34, 0x30, 0x2c, 0x20, 0x2f, 0x2f, 0x20, 0x3c, 0x3d, + 0x20, 0x30, 0x20, 0x74, 0x6f, 0x20, 0x75, 0x73, 0x65, 0x20, 0x76, 0x6f, + 0x63, 0x61, 0x62, 0x20, 0x73, 0x69, 0x7a, 0x65, 0x0a, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x74, 0x6f, 0x70, 0x5f, 0x70, 0x3a, 0x20, 0x30, 0x2e, + 0x39, 0x35, 0x2c, 0x20, 0x2f, 0x2f, 0x20, 0x31, 0x2e, 0x30, 0x20, 0x3d, + 0x20, 0x64, 0x69, 0x73, 0x61, 0x62, 0x6c, 0x65, 0x64, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x6d, 0x69, 0x6e, 0x5f, 0x70, 0x3a, 0x20, 0x30, + 0x2e, 0x30, 0x35, 0x2c, 0x20, 0x2f, 0x2f, 0x20, 0x30, 0x20, 0x3d, 0x20, + 0x64, 0x69, 0x73, 0x61, 0x62, 0x6c, 0x65, 0x64, 0x0a, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x74, 0x66, 0x73, 0x5f, 0x7a, 0x3a, 0x20, 0x31, 0x2e, + 0x30, 0x2c, 0x20, 0x2f, 0x2f, 0x20, 0x31, 0x2e, 0x30, 0x20, 0x3d, 0x20, + 0x64, 0x69, 0x73, 0x61, 0x62, 0x6c, 0x65, 0x64, 0x0a, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x74, 0x79, 0x70, 0x69, 0x63, 0x61, 0x6c, 0x5f, 0x70, + 0x3a, 0x20, 0x31, 0x2e, 0x30, 0x2c, 0x20, 0x2f, 0x2f, 0x20, 0x31, 0x2e, + 0x30, 0x20, 0x3d, 0x20, 0x64, 0x69, 0x73, 0x61, 0x62, 0x6c, 0x65, 0x64, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x70, 0x72, 0x65, 0x73, 0x65, + 0x6e, 0x63, 0x65, 0x5f, 0x70, 0x65, 0x6e, 0x61, 0x6c, 0x74, 0x79, 0x3a, + 0x20, 0x30, 0x2e, 0x30, 0x2c, 0x20, 0x2f, 0x2f, 0x20, 0x30, 0x2e, 0x30, + 0x20, 0x3d, 0x20, 0x64, 0x69, 0x73, 0x61, 0x62, 0x6c, 0x65, 0x64, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x66, 0x72, 0x65, 0x71, 0x75, 0x65, + 0x6e, 0x63, 0x79, 0x5f, 0x70, 0x65, 0x6e, 0x61, 0x6c, 0x74, 0x79, 0x3a, + 0x20, 0x30, 0x2e, 0x30, 0x2c, 0x20, 0x2f, 0x2f, 0x20, 0x30, 0x2e, 0x30, + 0x20, 0x3d, 0x20, 0x64, 0x69, 0x73, 0x61, 0x62, 0x6c, 0x65, 0x64, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x6d, 0x69, 0x72, 0x6f, 0x73, 0x74, + 0x61, 0x74, 0x3a, 0x20, 0x30, 0x2c, 0x20, 0x2f, 0x2f, 0x20, 0x30, 0x2f, + 0x31, 0x2f, 0x32, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x6d, 0x69, + 0x72, 0x6f, 0x73, 0x74, 0x61, 0x74, 0x5f, 0x74, 0x61, 0x75, 0x3a, 0x20, + 0x35, 0x2c, 0x20, 0x2f, 0x2f, 0x20, 0x74, 0x61, 0x72, 0x67, 0x65, 0x74, + 0x20, 0x65, 0x6e, 0x74, 0x72, 0x6f, 0x70, 0x79, 0x0a, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x6d, 0x69, 0x72, 0x6f, 0x73, 0x74, 0x61, 0x74, 0x5f, + 0x65, 0x74, 0x61, 0x3a, 0x20, 0x30, 0x2e, 0x31, 0x2c, 0x20, 0x2f, 0x2f, + 0x20, 0x6c, 0x65, 0x61, 0x72, 0x6e, 0x69, 0x6e, 0x67, 0x20, 0x72, 0x61, + 0x74, 0x65, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x67, 0x72, 0x61, + 0x6d, 0x6d, 0x61, 0x72, 0x3a, 0x20, 0x27, 0x27, 0x2c, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x6e, 0x5f, 0x70, 0x72, 0x6f, 0x62, 0x73, 0x3a, + 0x20, 0x30, 0x2c, 0x20, 0x2f, 0x2f, 0x20, 0x6e, 0x6f, 0x20, 0x63, 0x6f, + 0x6d, 0x70, 0x6c, 0x65, 0x74, 0x69, 0x6f, 0x6e, 0x5f, 0x70, 0x72, 0x6f, + 0x62, 0x61, 0x62, 0x69, 0x6c, 0x69, 0x74, 0x69, 0x65, 0x73, 0x2c, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x69, 0x6d, 0x61, 0x67, 0x65, 0x5f, + 0x64, 0x61, 0x74, 0x61, 0x3a, 0x20, 0x5b, 0x5d, 0x2c, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x63, 0x61, 0x63, 0x68, 0x65, 0x5f, 0x70, 0x72, + 0x6f, 0x6d, 0x70, 0x74, 0x3a, 0x20, 0x74, 0x72, 0x75, 0x65, 0x2c, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x61, 0x70, 0x69, 0x5f, 0x6b, 0x65, + 0x79, 0x3a, 0x20, 0x27, 0x27, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x29, + 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x2f, 0x2a, 0x20, 0x53, 0x54, 0x41, + 0x52, 0x54, 0x3a, 0x20, 0x53, 0x75, 0x70, 0x70, 0x6f, 0x72, 0x74, 0x20, + 0x66, 0x6f, 0x72, 0x20, 0x73, 0x74, 0x6f, 0x72, 0x69, 0x6e, 0x67, 0x20, + 0x70, 0x72, 0x6f, 0x6d, 0x70, 0x74, 0x20, 0x74, 0x65, 0x6d, 0x70, 0x6c, + 0x61, 0x74, 0x65, 0x73, 0x20, 0x61, 0x6e, 0x64, 0x20, 0x70, 0x61, 0x72, + 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, 0x73, 0x20, 0x69, 0x6e, 0x20, 0x62, + 0x72, 0x6f, 0x77, 0x73, 0x65, 0x72, 0x73, 0x20, 0x4c, 0x6f, 0x63, 0x61, + 0x6c, 0x53, 0x74, 0x6f, 0x72, 0x61, 0x67, 0x65, 0x20, 0x2a, 0x2f, 0x0a, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x6c, + 0x6f, 0x63, 0x61, 0x6c, 0x5f, 0x73, 0x74, 0x6f, 0x72, 0x61, 0x67, 0x65, + 0x5f, 0x73, 0x74, 0x6f, 0x72, 0x61, 0x67, 0x65, 0x4b, 0x65, 0x79, 0x20, + 0x3d, 0x20, 0x22, 0x6c, 0x6c, 0x61, 0x6d, 0x61, 0x63, 0x70, 0x70, 0x5f, + 0x73, 0x65, 0x72, 0x76, 0x65, 0x72, 0x5f, 0x6c, 0x6f, 0x63, 0x61, 0x6c, + 0x5f, 0x73, 0x74, 0x6f, 0x72, 0x61, 0x67, 0x65, 0x22, 0x3b, 0x0a, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, + 0x20, 0x6c, 0x6f, 0x63, 0x61, 0x6c, 0x5f, 0x73, 0x74, 0x6f, 0x72, 0x61, + 0x67, 0x65, 0x5f, 0x73, 0x65, 0x74, 0x44, 0x61, 0x74, 0x61, 0x46, 0x72, + 0x6f, 0x6d, 0x4f, 0x62, 0x6a, 0x65, 0x63, 0x74, 0x28, 0x74, 0x61, 0x67, + 0x2c, 0x20, 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x29, 0x20, 0x7b, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x6c, 0x6f, 0x63, 0x61, 0x6c, + 0x53, 0x74, 0x6f, 0x72, 0x61, 0x67, 0x65, 0x2e, 0x73, 0x65, 0x74, 0x49, + 0x74, 0x65, 0x6d, 0x28, 0x6c, 0x6f, 0x63, 0x61, 0x6c, 0x5f, 0x73, 0x74, + 0x6f, 0x72, 0x61, 0x67, 0x65, 0x5f, 0x73, 0x74, 0x6f, 0x72, 0x61, 0x67, + 0x65, 0x4b, 0x65, 0x79, 0x20, 0x2b, 0x20, 0x27, 0x2f, 0x27, 0x20, 0x2b, + 0x20, 0x74, 0x61, 0x67, 0x2c, 0x20, 0x4a, 0x53, 0x4f, 0x4e, 0x2e, 0x73, + 0x74, 0x72, 0x69, 0x6e, 0x67, 0x69, 0x66, 0x79, 0x28, 0x63, 0x6f, 0x6e, + 0x74, 0x65, 0x6e, 0x74, 0x29, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x7d, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x66, 0x75, 0x6e, 0x63, 0x74, + 0x69, 0x6f, 0x6e, 0x20, 0x6c, 0x6f, 0x63, 0x61, 0x6c, 0x5f, 0x73, 0x74, + 0x6f, 0x72, 0x61, 0x67, 0x65, 0x5f, 0x73, 0x65, 0x74, 0x44, 0x61, 0x74, + 0x61, 0x46, 0x72, 0x6f, 0x6d, 0x52, 0x61, 0x77, 0x54, 0x65, 0x78, 0x74, + 0x28, 0x74, 0x61, 0x67, 0x2c, 0x20, 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x6e, + 0x74, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x6c, + 0x6f, 0x63, 0x61, 0x6c, 0x53, 0x74, 0x6f, 0x72, 0x61, 0x67, 0x65, 0x2e, + 0x73, 0x65, 0x74, 0x49, 0x74, 0x65, 0x6d, 0x28, 0x6c, 0x6f, 0x63, 0x61, + 0x6c, 0x5f, 0x73, 0x74, 0x6f, 0x72, 0x61, 0x67, 0x65, 0x5f, 0x73, 0x74, + 0x6f, 0x72, 0x61, 0x67, 0x65, 0x4b, 0x65, 0x79, 0x20, 0x2b, 0x20, 0x27, + 0x2f, 0x27, 0x20, 0x2b, 0x20, 0x74, 0x61, 0x67, 0x2c, 0x20, 0x63, 0x6f, + 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x7d, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x66, 0x75, 0x6e, 0x63, 0x74, + 0x69, 0x6f, 0x6e, 0x20, 0x6c, 0x6f, 0x63, 0x61, 0x6c, 0x5f, 0x73, 0x74, + 0x6f, 0x72, 0x61, 0x67, 0x65, 0x5f, 0x67, 0x65, 0x74, 0x44, 0x61, 0x74, + 0x61, 0x41, 0x73, 0x4f, 0x62, 0x6a, 0x65, 0x63, 0x74, 0x28, 0x74, 0x61, + 0x67, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, + 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x69, 0x74, 0x65, 0x6d, 0x20, 0x3d, 0x20, + 0x6c, 0x6f, 0x63, 0x61, 0x6c, 0x53, 0x74, 0x6f, 0x72, 0x61, 0x67, 0x65, + 0x2e, 0x67, 0x65, 0x74, 0x49, 0x74, 0x65, 0x6d, 0x28, 0x6c, 0x6f, 0x63, + 0x61, 0x6c, 0x5f, 0x73, 0x74, 0x6f, 0x72, 0x61, 0x67, 0x65, 0x5f, 0x73, + 0x74, 0x6f, 0x72, 0x61, 0x67, 0x65, 0x4b, 0x65, 0x79, 0x20, 0x2b, 0x20, + 0x27, 0x2f, 0x27, 0x20, 0x2b, 0x20, 0x74, 0x61, 0x67, 0x29, 0x3b, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x69, 0x66, 0x20, 0x28, 0x21, 0x69, + 0x74, 0x65, 0x6d, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x6e, 0x75, + 0x6c, 0x6c, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x20, + 0x65, 0x6c, 0x73, 0x65, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x4a, 0x53, + 0x4f, 0x4e, 0x2e, 0x70, 0x61, 0x72, 0x73, 0x65, 0x28, 0x69, 0x74, 0x65, + 0x6d, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x66, + 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x6c, 0x6f, 0x63, 0x61, + 0x6c, 0x5f, 0x73, 0x74, 0x6f, 0x72, 0x61, 0x67, 0x65, 0x5f, 0x67, 0x65, + 0x74, 0x44, 0x61, 0x74, 0x61, 0x41, 0x73, 0x52, 0x61, 0x77, 0x54, 0x65, + 0x78, 0x74, 0x28, 0x74, 0x61, 0x67, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x69, 0x74, + 0x65, 0x6d, 0x20, 0x3d, 0x20, 0x6c, 0x6f, 0x63, 0x61, 0x6c, 0x53, 0x74, + 0x6f, 0x72, 0x61, 0x67, 0x65, 0x2e, 0x67, 0x65, 0x74, 0x49, 0x74, 0x65, + 0x6d, 0x28, 0x6c, 0x6f, 0x63, 0x61, 0x6c, 0x5f, 0x73, 0x74, 0x6f, 0x72, + 0x61, 0x67, 0x65, 0x5f, 0x73, 0x74, 0x6f, 0x72, 0x61, 0x67, 0x65, 0x4b, + 0x65, 0x79, 0x20, 0x2b, 0x20, 0x27, 0x2f, 0x27, 0x20, 0x2b, 0x20, 0x74, + 0x61, 0x67, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x69, + 0x66, 0x20, 0x28, 0x21, 0x69, 0x74, 0x65, 0x6d, 0x29, 0x20, 0x7b, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x72, 0x65, 0x74, 0x75, + 0x72, 0x6e, 0x20, 0x6e, 0x75, 0x6c, 0x6c, 0x3b, 0x0a, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x7d, 0x20, 0x65, 0x6c, 0x73, 0x65, 0x20, 0x7b, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x72, 0x65, 0x74, 0x75, + 0x72, 0x6e, 0x20, 0x69, 0x74, 0x65, 0x6d, 0x3b, 0x0a, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x2f, 0x2f, 0x20, 0x63, 0x72, 0x65, 0x61, 0x74, + 0x65, 0x20, 0x61, 0x20, 0x63, 0x6f, 0x6e, 0x74, 0x61, 0x69, 0x6e, 0x65, + 0x72, 0x20, 0x66, 0x6f, 0x72, 0x20, 0x75, 0x73, 0x65, 0x72, 0x20, 0x74, + 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x73, 0x20, 0x61, 0x6e, 0x64, + 0x20, 0x73, 0x65, 0x74, 0x74, 0x69, 0x6e, 0x67, 0x73, 0x0a, 0x0a, 0x20, + 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x73, 0x61, 0x76, + 0x65, 0x64, 0x55, 0x73, 0x65, 0x72, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, + 0x74, 0x65, 0x73, 0x20, 0x3d, 0x20, 0x73, 0x69, 0x67, 0x6e, 0x61, 0x6c, + 0x28, 0x7b, 0x7d, 0x29, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, + 0x73, 0x74, 0x20, 0x73, 0x65, 0x6c, 0x65, 0x63, 0x74, 0x65, 0x64, 0x55, + 0x73, 0x65, 0x72, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x20, + 0x3d, 0x20, 0x73, 0x69, 0x67, 0x6e, 0x61, 0x6c, 0x28, 0x7b, 0x20, 0x6e, + 0x61, 0x6d, 0x65, 0x3a, 0x20, 0x27, 0x27, 0x2c, 0x20, 0x74, 0x65, 0x6d, + 0x70, 0x6c, 0x61, 0x74, 0x65, 0x3a, 0x20, 0x7b, 0x20, 0x73, 0x65, 0x73, + 0x73, 0x69, 0x6f, 0x6e, 0x3a, 0x20, 0x7b, 0x7d, 0x2c, 0x20, 0x70, 0x61, + 0x72, 0x61, 0x6d, 0x73, 0x3a, 0x20, 0x7b, 0x7d, 0x20, 0x7d, 0x20, 0x7d, + 0x29, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x2f, 0x2f, 0x20, 0x6c, 0x65, + 0x74, 0x27, 0x73, 0x20, 0x69, 0x6d, 0x70, 0x6f, 0x72, 0x74, 0x20, 0x6c, + 0x6f, 0x63, 0x61, 0x6c, 0x6c, 0x79, 0x20, 0x73, 0x61, 0x76, 0x65, 0x64, + 0x20, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x73, 0x20, 0x61, + 0x6e, 0x64, 0x20, 0x73, 0x65, 0x74, 0x74, 0x69, 0x6e, 0x67, 0x73, 0x20, + 0x69, 0x66, 0x20, 0x74, 0x68, 0x65, 0x72, 0x65, 0x20, 0x61, 0x72, 0x65, + 0x20, 0x61, 0x6e, 0x79, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x2f, 0x2f, 0x20, + 0x75, 0x73, 0x65, 0x72, 0x20, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, + 0x65, 0x73, 0x20, 0x61, 0x6e, 0x64, 0x20, 0x73, 0x65, 0x74, 0x74, 0x69, + 0x6e, 0x67, 0x73, 0x20, 0x61, 0x72, 0x65, 0x20, 0x73, 0x74, 0x6f, 0x72, + 0x65, 0x64, 0x20, 0x69, 0x6e, 0x20, 0x6f, 0x6e, 0x65, 0x20, 0x6f, 0x62, + 0x6a, 0x65, 0x63, 0x74, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x2f, 0x2f, 0x20, + 0x69, 0x6e, 0x20, 0x66, 0x6f, 0x72, 0x6d, 0x20, 0x6f, 0x66, 0x20, 0x7b, + 0x20, 0x22, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x6e, 0x61, + 0x6d, 0x65, 0x22, 0x3a, 0x20, 0x22, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, + 0x74, 0x65, 0x64, 0x61, 0x74, 0x61, 0x22, 0x20, 0x7d, 0x20, 0x61, 0x6e, + 0x64, 0x20, 0x7b, 0x20, 0x22, 0x73, 0x65, 0x74, 0x74, 0x69, 0x6e, 0x67, + 0x73, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x6e, 0x61, 0x6d, + 0x65, 0x22, 0x3a, 0x22, 0x73, 0x65, 0x74, 0x74, 0x69, 0x6e, 0x67, 0x73, + 0x64, 0x61, 0x74, 0x61, 0x22, 0x20, 0x7d, 0x0a, 0x0a, 0x20, 0x20, 0x20, + 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x6f, 0x6c, 0x65, 0x2e, 0x6c, 0x6f, 0x67, + 0x28, 0x27, 0x49, 0x6d, 0x70, 0x6f, 0x72, 0x74, 0x69, 0x6e, 0x67, 0x20, + 0x73, 0x61, 0x76, 0x65, 0x64, 0x20, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, + 0x74, 0x65, 0x73, 0x27, 0x29, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x6c, + 0x65, 0x74, 0x20, 0x69, 0x6d, 0x70, 0x6f, 0x72, 0x74, 0x65, 0x64, 0x54, + 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x73, 0x20, 0x3d, 0x20, 0x6c, + 0x6f, 0x63, 0x61, 0x6c, 0x5f, 0x73, 0x74, 0x6f, 0x72, 0x61, 0x67, 0x65, + 0x5f, 0x67, 0x65, 0x74, 0x44, 0x61, 0x74, 0x61, 0x41, 0x73, 0x4f, 0x62, + 0x6a, 0x65, 0x63, 0x74, 0x28, 0x27, 0x75, 0x73, 0x65, 0x72, 0x5f, 0x74, + 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x73, 0x27, 0x29, 0x0a, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x69, 0x66, 0x20, 0x28, 0x69, 0x6d, 0x70, 0x6f, + 0x72, 0x74, 0x65, 0x64, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, + 0x73, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x2f, + 0x2f, 0x20, 0x73, 0x61, 0x76, 0x65, 0x64, 0x20, 0x74, 0x65, 0x6d, 0x70, + 0x6c, 0x61, 0x74, 0x65, 0x73, 0x20, 0x77, 0x65, 0x72, 0x65, 0x20, 0x73, + 0x75, 0x63, 0x63, 0x65, 0x73, 0x73, 0x66, 0x75, 0x6c, 0x6c, 0x79, 0x20, + 0x69, 0x6d, 0x70, 0x6f, 0x72, 0x74, 0x65, 0x64, 0x2e, 0x0a, 0x0a, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x6f, 0x6c, 0x65, + 0x2e, 0x6c, 0x6f, 0x67, 0x28, 0x27, 0x50, 0x72, 0x6f, 0x63, 0x65, 0x73, + 0x73, 0x69, 0x6e, 0x67, 0x20, 0x73, 0x61, 0x76, 0x65, 0x64, 0x20, 0x74, + 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x73, 0x20, 0x61, 0x6e, 0x64, + 0x20, 0x75, 0x70, 0x64, 0x61, 0x74, 0x69, 0x6e, 0x67, 0x20, 0x64, 0x65, + 0x66, 0x61, 0x75, 0x6c, 0x74, 0x20, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, + 0x74, 0x65, 0x27, 0x29, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x70, + 0x61, 0x72, 0x61, 0x6d, 0x73, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x20, + 0x3d, 0x20, 0x7b, 0x20, 0x2e, 0x2e, 0x2e, 0x70, 0x61, 0x72, 0x61, 0x6d, + 0x73, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2c, 0x20, 0x69, 0x6d, 0x61, + 0x67, 0x65, 0x5f, 0x64, 0x61, 0x74, 0x61, 0x3a, 0x20, 0x5b, 0x5d, 0x20, + 0x7d, 0x3b, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x2f, 0x2f, + 0x63, 0x6f, 0x6e, 0x73, 0x6f, 0x6c, 0x65, 0x2e, 0x6c, 0x6f, 0x67, 0x28, + 0x69, 0x6d, 0x70, 0x6f, 0x72, 0x74, 0x65, 0x64, 0x54, 0x65, 0x6d, 0x70, + 0x6c, 0x61, 0x74, 0x65, 0x73, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x73, 0x61, 0x76, 0x65, 0x64, 0x55, 0x73, 0x65, 0x72, 0x54, + 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x73, 0x2e, 0x76, 0x61, 0x6c, + 0x75, 0x65, 0x20, 0x3d, 0x20, 0x69, 0x6d, 0x70, 0x6f, 0x72, 0x74, 0x65, + 0x64, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x73, 0x3b, 0x0a, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x2f, 0x2f, 0x6f, 0x76, 0x65, + 0x72, 0x72, 0x69, 0x64, 0x65, 0x20, 0x64, 0x65, 0x66, 0x61, 0x75, 0x6c, + 0x74, 0x20, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x0a, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x73, 0x61, 0x76, 0x65, 0x64, 0x55, 0x73, + 0x65, 0x72, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x73, 0x2e, + 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x64, 0x65, 0x66, 0x61, 0x75, 0x6c, + 0x74, 0x20, 0x3d, 0x20, 0x7b, 0x20, 0x73, 0x65, 0x73, 0x73, 0x69, 0x6f, + 0x6e, 0x3a, 0x20, 0x73, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x2e, 0x76, + 0x61, 0x6c, 0x75, 0x65, 0x2c, 0x20, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, + 0x3a, 0x20, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x2e, 0x76, 0x61, 0x6c, + 0x75, 0x65, 0x20, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x6c, + 0x6f, 0x63, 0x61, 0x6c, 0x5f, 0x73, 0x74, 0x6f, 0x72, 0x61, 0x67, 0x65, + 0x5f, 0x73, 0x65, 0x74, 0x44, 0x61, 0x74, 0x61, 0x46, 0x72, 0x6f, 0x6d, + 0x4f, 0x62, 0x6a, 0x65, 0x63, 0x74, 0x28, 0x27, 0x75, 0x73, 0x65, 0x72, + 0x5f, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x73, 0x27, 0x2c, + 0x20, 0x73, 0x61, 0x76, 0x65, 0x64, 0x55, 0x73, 0x65, 0x72, 0x54, 0x65, + 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x73, 0x2e, 0x76, 0x61, 0x6c, 0x75, + 0x65, 0x29, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x20, 0x65, 0x6c, 0x73, + 0x65, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x2f, 0x2f, + 0x20, 0x6e, 0x6f, 0x20, 0x73, 0x61, 0x76, 0x65, 0x64, 0x20, 0x74, 0x65, + 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x73, 0x20, 0x64, 0x65, 0x74, 0x65, + 0x63, 0x74, 0x65, 0x64, 0x2e, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x6f, 0x6c, 0x65, 0x2e, 0x6c, 0x6f, 0x67, + 0x28, 0x27, 0x49, 0x6e, 0x69, 0x74, 0x69, 0x61, 0x6c, 0x69, 0x7a, 0x69, + 0x6e, 0x67, 0x20, 0x4c, 0x6f, 0x63, 0x61, 0x6c, 0x53, 0x74, 0x6f, 0x72, + 0x61, 0x67, 0x65, 0x20, 0x61, 0x6e, 0x64, 0x20, 0x73, 0x61, 0x76, 0x69, + 0x6e, 0x67, 0x20, 0x64, 0x65, 0x66, 0x61, 0x75, 0x6c, 0x74, 0x20, 0x74, + 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x27, 0x29, 0x0a, 0x0a, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x73, 0x61, 0x76, 0x65, 0x64, 0x55, 0x73, + 0x65, 0x72, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x73, 0x2e, + 0x76, 0x61, 0x6c, 0x75, 0x65, 0x20, 0x3d, 0x20, 0x7b, 0x20, 0x22, 0x64, + 0x65, 0x66, 0x61, 0x75, 0x6c, 0x74, 0x22, 0x3a, 0x20, 0x7b, 0x20, 0x73, + 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x3a, 0x20, 0x73, 0x65, 0x73, 0x73, + 0x69, 0x6f, 0x6e, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2c, 0x20, 0x70, + 0x61, 0x72, 0x61, 0x6d, 0x73, 0x3a, 0x20, 0x70, 0x61, 0x72, 0x61, 0x6d, + 0x73, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x20, 0x7d, 0x20, 0x7d, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x6c, 0x6f, 0x63, 0x61, 0x6c, 0x5f, + 0x73, 0x74, 0x6f, 0x72, 0x61, 0x67, 0x65, 0x5f, 0x73, 0x65, 0x74, 0x44, + 0x61, 0x74, 0x61, 0x46, 0x72, 0x6f, 0x6d, 0x4f, 0x62, 0x6a, 0x65, 0x63, + 0x74, 0x28, 0x27, 0x75, 0x73, 0x65, 0x72, 0x5f, 0x74, 0x65, 0x6d, 0x70, + 0x6c, 0x61, 0x74, 0x65, 0x73, 0x27, 0x2c, 0x20, 0x73, 0x61, 0x76, 0x65, + 0x64, 0x55, 0x73, 0x65, 0x72, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, + 0x65, 0x73, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x29, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x7d, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x66, 0x75, 0x6e, + 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x75, 0x73, 0x65, 0x72, 0x54, 0x65, + 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x52, 0x65, 0x73, 0x65, 0x74, 0x54, + 0x6f, 0x44, 0x65, 0x66, 0x61, 0x75, 0x6c, 0x74, 0x28, 0x29, 0x20, 0x7b, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x6f, + 0x6c, 0x65, 0x2e, 0x6c, 0x6f, 0x67, 0x28, 0x27, 0x52, 0x65, 0x73, 0x65, + 0x74, 0x74, 0x69, 0x6e, 0x67, 0x20, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, + 0x74, 0x65, 0x20, 0x74, 0x6f, 0x20, 0x64, 0x65, 0x66, 0x61, 0x75, 0x6c, + 0x74, 0x27, 0x29, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x73, 0x65, + 0x6c, 0x65, 0x63, 0x74, 0x65, 0x64, 0x55, 0x73, 0x65, 0x72, 0x54, 0x65, + 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, + 0x2e, 0x6e, 0x61, 0x6d, 0x65, 0x20, 0x3d, 0x20, 0x27, 0x64, 0x65, 0x66, + 0x61, 0x75, 0x6c, 0x74, 0x27, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x73, 0x65, 0x6c, 0x65, 0x63, 0x74, 0x65, 0x64, 0x55, 0x73, 0x65, + 0x72, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x2e, 0x76, 0x61, + 0x6c, 0x75, 0x65, 0x2e, 0x64, 0x61, 0x74, 0x61, 0x20, 0x3d, 0x20, 0x73, + 0x61, 0x76, 0x65, 0x64, 0x55, 0x73, 0x65, 0x72, 0x54, 0x65, 0x6d, 0x70, + 0x6c, 0x61, 0x74, 0x65, 0x73, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x5b, + 0x27, 0x64, 0x65, 0x66, 0x61, 0x75, 0x6c, 0x74, 0x27, 0x5d, 0x3b, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x66, + 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x75, 0x73, 0x65, 0x72, + 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x41, 0x70, 0x70, 0x6c, + 0x79, 0x28, 0x74, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x73, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x2e, 0x76, 0x61, 0x6c, + 0x75, 0x65, 0x20, 0x3d, 0x20, 0x74, 0x2e, 0x64, 0x61, 0x74, 0x61, 0x2e, + 0x73, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x3b, 0x0a, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x73, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x2e, 0x76, + 0x61, 0x6c, 0x75, 0x65, 0x20, 0x3d, 0x20, 0x7b, 0x20, 0x2e, 0x2e, 0x2e, + 0x73, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x2e, 0x76, 0x61, 0x6c, 0x75, + 0x65, 0x2c, 0x20, 0x69, 0x6d, 0x61, 0x67, 0x65, 0x5f, 0x73, 0x65, 0x6c, + 0x65, 0x63, 0x74, 0x65, 0x64, 0x3a, 0x20, 0x27, 0x27, 0x20, 0x7d, 0x3b, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x70, 0x61, 0x72, 0x61, 0x6d, + 0x73, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x20, 0x3d, 0x20, 0x74, 0x2e, + 0x64, 0x61, 0x74, 0x61, 0x2e, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x3b, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x70, 0x61, 0x72, 0x61, 0x6d, + 0x73, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x20, 0x3d, 0x20, 0x7b, 0x20, + 0x2e, 0x2e, 0x2e, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x2e, 0x76, 0x61, + 0x6c, 0x75, 0x65, 0x2c, 0x20, 0x69, 0x6d, 0x61, 0x67, 0x65, 0x5f, 0x64, + 0x61, 0x74, 0x61, 0x3a, 0x20, 0x5b, 0x5d, 0x20, 0x7d, 0x3b, 0x0a, 0x20, + 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x66, 0x75, + 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x75, 0x73, 0x65, 0x72, 0x54, + 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x52, 0x65, 0x73, 0x65, 0x74, + 0x54, 0x6f, 0x44, 0x65, 0x66, 0x61, 0x75, 0x6c, 0x74, 0x41, 0x6e, 0x64, + 0x41, 0x70, 0x70, 0x6c, 0x79, 0x28, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x75, 0x73, 0x65, 0x72, 0x54, 0x65, 0x6d, 0x70, + 0x6c, 0x61, 0x74, 0x65, 0x52, 0x65, 0x73, 0x65, 0x74, 0x54, 0x6f, 0x44, + 0x65, 0x66, 0x61, 0x75, 0x6c, 0x74, 0x28, 0x29, 0x0a, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x75, 0x73, 0x65, 0x72, 0x54, 0x65, 0x6d, 0x70, 0x6c, + 0x61, 0x74, 0x65, 0x41, 0x70, 0x70, 0x6c, 0x79, 0x28, 0x73, 0x65, 0x6c, + 0x65, 0x63, 0x74, 0x65, 0x64, 0x55, 0x73, 0x65, 0x72, 0x54, 0x65, 0x6d, + 0x70, 0x6c, 0x61, 0x74, 0x65, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x29, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x75, 0x73, 0x65, + 0x72, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x4c, 0x6f, 0x61, + 0x64, 0x41, 0x6e, 0x64, 0x41, 0x70, 0x70, 0x6c, 0x79, 0x41, 0x75, 0x74, + 0x6f, 0x73, 0x61, 0x76, 0x65, 0x64, 0x28, 0x29, 0x20, 0x7b, 0x0a, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x2f, 0x2f, 0x20, 0x67, 0x65, 0x74, 0x20, + 0x61, 0x75, 0x74, 0x6f, 0x73, 0x61, 0x76, 0x65, 0x64, 0x20, 0x6c, 0x61, + 0x73, 0x74, 0x20, 0x75, 0x73, 0x65, 0x64, 0x20, 0x74, 0x65, 0x6d, 0x70, + 0x6c, 0x61, 0x74, 0x65, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x6c, + 0x65, 0x74, 0x20, 0x6c, 0x61, 0x73, 0x74, 0x55, 0x73, 0x65, 0x64, 0x54, + 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x20, 0x3d, 0x20, 0x6c, 0x6f, + 0x63, 0x61, 0x6c, 0x5f, 0x73, 0x74, 0x6f, 0x72, 0x61, 0x67, 0x65, 0x5f, + 0x67, 0x65, 0x74, 0x44, 0x61, 0x74, 0x61, 0x41, 0x73, 0x4f, 0x62, 0x6a, + 0x65, 0x63, 0x74, 0x28, 0x27, 0x75, 0x73, 0x65, 0x72, 0x5f, 0x74, 0x65, + 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x73, 0x5f, 0x6c, 0x61, 0x73, 0x74, + 0x27, 0x29, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x69, 0x66, + 0x20, 0x28, 0x6c, 0x61, 0x73, 0x74, 0x55, 0x73, 0x65, 0x64, 0x54, 0x65, + 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x29, 0x20, 0x7b, 0x0a, 0x0a, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x6f, + 0x6c, 0x65, 0x2e, 0x6c, 0x6f, 0x67, 0x28, 0x27, 0x41, 0x75, 0x74, 0x6f, + 0x73, 0x61, 0x76, 0x65, 0x64, 0x20, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, + 0x74, 0x65, 0x20, 0x66, 0x6f, 0x75, 0x6e, 0x64, 0x2c, 0x20, 0x72, 0x65, + 0x73, 0x74, 0x6f, 0x72, 0x69, 0x6e, 0x67, 0x27, 0x29, 0x0a, 0x0a, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x73, 0x65, 0x6c, 0x65, 0x63, + 0x74, 0x65, 0x64, 0x55, 0x73, 0x65, 0x72, 0x54, 0x65, 0x6d, 0x70, 0x6c, + 0x61, 0x74, 0x65, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x20, 0x3d, 0x20, + 0x6c, 0x61, 0x73, 0x74, 0x55, 0x73, 0x65, 0x64, 0x54, 0x65, 0x6d, 0x70, + 0x6c, 0x61, 0x74, 0x65, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x65, 0x6c, 0x73, 0x65, 0x20, + 0x7b, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, + 0x6f, 0x6e, 0x73, 0x6f, 0x6c, 0x65, 0x2e, 0x6c, 0x6f, 0x67, 0x28, 0x27, + 0x4e, 0x6f, 0x20, 0x61, 0x75, 0x74, 0x6f, 0x73, 0x61, 0x76, 0x65, 0x64, + 0x20, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x20, 0x66, 0x6f, + 0x75, 0x6e, 0x64, 0x2c, 0x20, 0x75, 0x73, 0x69, 0x6e, 0x67, 0x20, 0x64, + 0x65, 0x66, 0x61, 0x75, 0x6c, 0x74, 0x20, 0x74, 0x65, 0x6d, 0x70, 0x6c, + 0x61, 0x74, 0x65, 0x27, 0x29, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x2f, 0x2f, 0x20, 0x6e, 0x6f, 0x20, 0x61, 0x75, 0x74, 0x6f, + 0x73, 0x61, 0x76, 0x65, 0x64, 0x20, 0x6c, 0x61, 0x73, 0x74, 0x20, 0x75, + 0x73, 0x65, 0x64, 0x20, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, + 0x20, 0x77, 0x61, 0x73, 0x20, 0x66, 0x6f, 0x75, 0x6e, 0x64, 0x2c, 0x20, + 0x73, 0x6f, 0x20, 0x6c, 0x6f, 0x61, 0x64, 0x20, 0x66, 0x72, 0x6f, 0x6d, + 0x20, 0x64, 0x65, 0x66, 0x61, 0x75, 0x6c, 0x74, 0x2e, 0x0a, 0x0a, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x75, 0x73, 0x65, 0x72, 0x54, + 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x52, 0x65, 0x73, 0x65, 0x74, + 0x54, 0x6f, 0x44, 0x65, 0x66, 0x61, 0x75, 0x6c, 0x74, 0x28, 0x29, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x0a, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x6f, 0x6c, 0x65, 0x2e, 0x6c, + 0x6f, 0x67, 0x28, 0x27, 0x41, 0x70, 0x70, 0x6c, 0x79, 0x69, 0x6e, 0x67, + 0x20, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x27, 0x29, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x2f, 0x2f, 0x20, 0x61, 0x6e, 0x64, + 0x20, 0x75, 0x70, 0x64, 0x61, 0x74, 0x65, 0x20, 0x69, 0x6e, 0x74, 0x65, + 0x72, 0x6e, 0x61, 0x6c, 0x20, 0x64, 0x61, 0x74, 0x61, 0x20, 0x66, 0x72, + 0x6f, 0x6d, 0x20, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x73, + 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x75, 0x73, 0x65, 0x72, + 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x41, 0x70, 0x70, 0x6c, + 0x79, 0x28, 0x73, 0x65, 0x6c, 0x65, 0x63, 0x74, 0x65, 0x64, 0x55, 0x73, + 0x65, 0x72, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x2e, 0x76, + 0x61, 0x6c, 0x75, 0x65, 0x29, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x2f, 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x6f, + 0x6c, 0x65, 0x2e, 0x6c, 0x6f, 0x67, 0x28, 0x73, 0x61, 0x76, 0x65, 0x64, + 0x55, 0x73, 0x65, 0x72, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, + 0x73, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x29, 0x0a, 0x20, 0x20, 0x20, + 0x20, 0x2f, 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x6f, 0x6c, 0x65, 0x2e, 0x6c, + 0x6f, 0x67, 0x28, 0x73, 0x65, 0x6c, 0x65, 0x63, 0x74, 0x65, 0x64, 0x55, + 0x73, 0x65, 0x72, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x2e, + 0x76, 0x61, 0x6c, 0x75, 0x65, 0x29, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x75, 0x73, 0x65, + 0x72, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x41, 0x75, 0x74, + 0x6f, 0x73, 0x61, 0x76, 0x65, 0x28, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x6f, 0x6c, 0x65, 0x2e, + 0x6c, 0x6f, 0x67, 0x28, 0x27, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, + 0x65, 0x20, 0x41, 0x75, 0x74, 0x6f, 0x73, 0x61, 0x76, 0x65, 0x2e, 0x2e, + 0x2e, 0x27, 0x29, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x69, 0x66, + 0x20, 0x28, 0x73, 0x65, 0x6c, 0x65, 0x63, 0x74, 0x65, 0x64, 0x55, 0x73, + 0x65, 0x72, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x2e, 0x76, + 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x6e, 0x61, 0x6d, 0x65, 0x20, 0x3d, 0x3d, + 0x20, 0x27, 0x64, 0x65, 0x66, 0x61, 0x75, 0x6c, 0x74, 0x27, 0x29, 0x20, + 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x2f, 0x2f, + 0x20, 0x77, 0x65, 0x20, 0x64, 0x6f, 0x6e, 0x27, 0x74, 0x20, 0x77, 0x61, + 0x6e, 0x74, 0x20, 0x74, 0x6f, 0x20, 0x73, 0x61, 0x76, 0x65, 0x20, 0x6f, + 0x76, 0x65, 0x72, 0x20, 0x64, 0x65, 0x66, 0x61, 0x75, 0x6c, 0x74, 0x20, + 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x2c, 0x20, 0x73, 0x6f, + 0x20, 0x6c, 0x65, 0x74, 0x27, 0x73, 0x20, 0x63, 0x72, 0x65, 0x61, 0x74, + 0x65, 0x20, 0x61, 0x20, 0x6e, 0x65, 0x77, 0x20, 0x6f, 0x6e, 0x65, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x6c, 0x65, 0x74, 0x20, + 0x6e, 0x65, 0x77, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x4e, + 0x61, 0x6d, 0x65, 0x20, 0x3d, 0x20, 0x27, 0x55, 0x73, 0x65, 0x72, 0x54, + 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x2d, 0x27, 0x20, 0x2b, 0x20, + 0x44, 0x61, 0x74, 0x65, 0x2e, 0x6e, 0x6f, 0x77, 0x28, 0x29, 0x2e, 0x74, + 0x6f, 0x53, 0x74, 0x72, 0x69, 0x6e, 0x67, 0x28, 0x29, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x6c, 0x65, 0x74, 0x20, 0x6e, 0x65, + 0x77, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x20, 0x3d, 0x20, + 0x7b, 0x20, 0x27, 0x6e, 0x61, 0x6d, 0x65, 0x27, 0x3a, 0x20, 0x6e, 0x65, + 0x77, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x4e, 0x61, 0x6d, + 0x65, 0x2c, 0x20, 0x27, 0x64, 0x61, 0x74, 0x61, 0x27, 0x3a, 0x20, 0x7b, + 0x20, 0x27, 0x73, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x27, 0x3a, 0x20, + 0x73, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x2e, 0x76, 0x61, 0x6c, 0x75, + 0x65, 0x2c, 0x20, 0x27, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x27, 0x3a, + 0x20, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x2e, 0x76, 0x61, 0x6c, 0x75, + 0x65, 0x20, 0x7d, 0x20, 0x7d, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x6f, 0x6c, 0x65, 0x2e, 0x6c, + 0x6f, 0x67, 0x28, 0x27, 0x53, 0x61, 0x76, 0x69, 0x6e, 0x67, 0x20, 0x61, + 0x73, 0x20, 0x27, 0x20, 0x2b, 0x20, 0x6e, 0x65, 0x77, 0x54, 0x65, 0x6d, + 0x70, 0x6c, 0x61, 0x74, 0x65, 0x4e, 0x61, 0x6d, 0x65, 0x29, 0x0a, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x2f, 0x2f, 0x20, 0x73, + 0x61, 0x76, 0x65, 0x20, 0x69, 0x6e, 0x20, 0x74, 0x68, 0x65, 0x20, 0x61, + 0x75, 0x74, 0x6f, 0x73, 0x61, 0x76, 0x65, 0x20, 0x73, 0x6c, 0x6f, 0x74, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x6c, 0x6f, 0x63, + 0x61, 0x6c, 0x5f, 0x73, 0x74, 0x6f, 0x72, 0x61, 0x67, 0x65, 0x5f, 0x73, + 0x65, 0x74, 0x44, 0x61, 0x74, 0x61, 0x46, 0x72, 0x6f, 0x6d, 0x4f, 0x62, + 0x6a, 0x65, 0x63, 0x74, 0x28, 0x27, 0x75, 0x73, 0x65, 0x72, 0x5f, 0x74, + 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x73, 0x5f, 0x6c, 0x61, 0x73, + 0x74, 0x27, 0x2c, 0x20, 0x6e, 0x65, 0x77, 0x54, 0x65, 0x6d, 0x70, 0x6c, + 0x61, 0x74, 0x65, 0x29, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x2f, 0x2f, 0x20, 0x61, 0x6e, 0x64, 0x20, 0x6c, 0x6f, 0x61, + 0x64, 0x20, 0x69, 0x74, 0x20, 0x62, 0x61, 0x63, 0x6b, 0x20, 0x61, 0x6e, + 0x64, 0x20, 0x61, 0x70, 0x70, 0x6c, 0x79, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x75, 0x73, 0x65, 0x72, 0x54, 0x65, 0x6d, 0x70, + 0x6c, 0x61, 0x74, 0x65, 0x4c, 0x6f, 0x61, 0x64, 0x41, 0x6e, 0x64, 0x41, + 0x70, 0x70, 0x6c, 0x79, 0x41, 0x75, 0x74, 0x6f, 0x73, 0x61, 0x76, 0x65, + 0x64, 0x28, 0x29, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x20, + 0x65, 0x6c, 0x73, 0x65, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x6c, 0x6f, 0x63, 0x61, 0x6c, 0x5f, 0x73, 0x74, 0x6f, + 0x72, 0x61, 0x67, 0x65, 0x5f, 0x73, 0x65, 0x74, 0x44, 0x61, 0x74, 0x61, + 0x46, 0x72, 0x6f, 0x6d, 0x4f, 0x62, 0x6a, 0x65, 0x63, 0x74, 0x28, 0x27, + 0x75, 0x73, 0x65, 0x72, 0x5f, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, + 0x65, 0x73, 0x5f, 0x6c, 0x61, 0x73, 0x74, 0x27, 0x2c, 0x20, 0x7b, 0x20, + 0x27, 0x6e, 0x61, 0x6d, 0x65, 0x27, 0x3a, 0x20, 0x73, 0x65, 0x6c, 0x65, + 0x63, 0x74, 0x65, 0x64, 0x55, 0x73, 0x65, 0x72, 0x54, 0x65, 0x6d, 0x70, + 0x6c, 0x61, 0x74, 0x65, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x6e, + 0x61, 0x6d, 0x65, 0x2c, 0x20, 0x27, 0x64, 0x61, 0x74, 0x61, 0x27, 0x3a, + 0x20, 0x7b, 0x20, 0x27, 0x73, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x27, + 0x3a, 0x20, 0x73, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x2e, 0x76, 0x61, + 0x6c, 0x75, 0x65, 0x2c, 0x20, 0x27, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, + 0x27, 0x3a, 0x20, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x2e, 0x76, 0x61, + 0x6c, 0x75, 0x65, 0x20, 0x7d, 0x20, 0x7d, 0x29, 0x0a, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x6f, 0x6c, 0x65, 0x2e, + 0x6c, 0x6f, 0x67, 0x28, 0x27, 0x43, 0x68, 0x65, 0x63, 0x6b, 0x69, 0x6e, + 0x67, 0x20, 0x66, 0x6f, 0x72, 0x20, 0x61, 0x75, 0x74, 0x6f, 0x73, 0x61, + 0x76, 0x65, 0x64, 0x20, 0x6c, 0x61, 0x73, 0x74, 0x20, 0x75, 0x73, 0x65, + 0x64, 0x20, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x27, 0x29, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x75, 0x73, 0x65, 0x72, 0x54, 0x65, 0x6d, + 0x70, 0x6c, 0x61, 0x74, 0x65, 0x4c, 0x6f, 0x61, 0x64, 0x41, 0x6e, 0x64, + 0x41, 0x70, 0x70, 0x6c, 0x79, 0x41, 0x75, 0x74, 0x6f, 0x73, 0x61, 0x76, + 0x65, 0x64, 0x28, 0x29, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x2f, 0x2a, + 0x20, 0x45, 0x4e, 0x44, 0x3a, 0x20, 0x53, 0x75, 0x70, 0x70, 0x6f, 0x72, + 0x74, 0x20, 0x66, 0x6f, 0x72, 0x20, 0x73, 0x74, 0x6f, 0x72, 0x69, 0x6e, + 0x67, 0x20, 0x70, 0x72, 0x6f, 0x6d, 0x70, 0x74, 0x20, 0x74, 0x65, 0x6d, + 0x70, 0x6c, 0x61, 0x74, 0x65, 0x73, 0x20, 0x61, 0x6e, 0x64, 0x20, 0x70, + 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, 0x73, 0x20, 0x69, 0x6e, + 0x20, 0x62, 0x72, 0x6f, 0x77, 0x73, 0x65, 0x72, 0x73, 0x20, 0x4c, 0x6f, + 0x63, 0x61, 0x6c, 0x53, 0x74, 0x6f, 0x72, 0x61, 0x67, 0x65, 0x20, 0x2a, + 0x2f, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, + 0x20, 0x6c, 0x6c, 0x61, 0x6d, 0x61, 0x53, 0x74, 0x61, 0x74, 0x73, 0x20, + 0x3d, 0x20, 0x73, 0x69, 0x67, 0x6e, 0x61, 0x6c, 0x28, 0x6e, 0x75, 0x6c, + 0x6c, 0x29, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, + 0x20, 0x63, 0x6f, 0x6e, 0x74, 0x72, 0x6f, 0x6c, 0x6c, 0x65, 0x72, 0x20, + 0x3d, 0x20, 0x73, 0x69, 0x67, 0x6e, 0x61, 0x6c, 0x28, 0x6e, 0x75, 0x6c, + 0x6c, 0x29, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x2f, 0x2f, 0x20, 0x63, + 0x75, 0x72, 0x72, 0x65, 0x6e, 0x74, 0x6c, 0x79, 0x20, 0x67, 0x65, 0x6e, + 0x65, 0x72, 0x61, 0x74, 0x69, 0x6e, 0x67, 0x20, 0x61, 0x20, 0x63, 0x6f, + 0x6d, 0x70, 0x6c, 0x65, 0x74, 0x69, 0x6f, 0x6e, 0x3f, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x67, 0x65, 0x6e, 0x65, + 0x72, 0x61, 0x74, 0x69, 0x6e, 0x67, 0x20, 0x3d, 0x20, 0x63, 0x6f, 0x6d, + 0x70, 0x75, 0x74, 0x65, 0x64, 0x28, 0x28, 0x29, 0x20, 0x3d, 0x3e, 0x20, + 0x63, 0x6f, 0x6e, 0x74, 0x72, 0x6f, 0x6c, 0x6c, 0x65, 0x72, 0x2e, 0x76, + 0x61, 0x6c, 0x75, 0x65, 0x20, 0x21, 0x3d, 0x20, 0x6e, 0x75, 0x6c, 0x6c, + 0x29, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x2f, 0x2f, 0x20, 0x68, 0x61, + 0x73, 0x20, 0x74, 0x68, 0x65, 0x20, 0x75, 0x73, 0x65, 0x72, 0x20, 0x73, + 0x74, 0x61, 0x72, 0x74, 0x65, 0x64, 0x20, 0x61, 0x20, 0x63, 0x68, 0x61, + 0x74, 0x3f, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, + 0x20, 0x63, 0x68, 0x61, 0x74, 0x53, 0x74, 0x61, 0x72, 0x74, 0x65, 0x64, + 0x20, 0x3d, 0x20, 0x63, 0x6f, 0x6d, 0x70, 0x75, 0x74, 0x65, 0x64, 0x28, + 0x28, 0x29, 0x20, 0x3d, 0x3e, 0x20, 0x73, 0x65, 0x73, 0x73, 0x69, 0x6f, + 0x6e, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x74, 0x72, 0x61, 0x6e, + 0x73, 0x63, 0x72, 0x69, 0x70, 0x74, 0x2e, 0x6c, 0x65, 0x6e, 0x67, 0x74, + 0x68, 0x20, 0x3e, 0x20, 0x30, 0x29, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x74, 0x72, 0x61, 0x6e, 0x73, 0x63, + 0x72, 0x69, 0x70, 0x74, 0x55, 0x70, 0x64, 0x61, 0x74, 0x65, 0x20, 0x3d, + 0x20, 0x28, 0x74, 0x72, 0x61, 0x6e, 0x73, 0x63, 0x72, 0x69, 0x70, 0x74, + 0x29, 0x20, 0x3d, 0x3e, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x73, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x2e, 0x76, 0x61, 0x6c, + 0x75, 0x65, 0x20, 0x3d, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x2e, 0x2e, 0x2e, 0x73, 0x65, 0x73, 0x73, 0x69, 0x6f, + 0x6e, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2c, 0x0a, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x74, 0x72, 0x61, 0x6e, 0x73, 0x63, 0x72, + 0x69, 0x70, 0x74, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x2f, + 0x2f, 0x20, 0x73, 0x69, 0x6d, 0x70, 0x6c, 0x65, 0x20, 0x74, 0x65, 0x6d, + 0x70, 0x6c, 0x61, 0x74, 0x65, 0x20, 0x72, 0x65, 0x70, 0x6c, 0x61, 0x63, + 0x65, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, + 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x20, 0x3d, 0x20, 0x28, + 0x73, 0x74, 0x72, 0x2c, 0x20, 0x65, 0x78, 0x74, 0x72, 0x61, 0x53, 0x65, + 0x74, 0x74, 0x69, 0x6e, 0x67, 0x73, 0x29, 0x20, 0x3d, 0x3e, 0x20, 0x7b, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x6c, 0x65, 0x74, 0x20, 0x73, + 0x65, 0x74, 0x74, 0x69, 0x6e, 0x67, 0x73, 0x20, 0x3d, 0x20, 0x73, 0x65, + 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3b, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x69, 0x66, 0x20, 0x28, 0x65, + 0x78, 0x74, 0x72, 0x61, 0x53, 0x65, 0x74, 0x74, 0x69, 0x6e, 0x67, 0x73, + 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x73, 0x65, 0x74, 0x74, 0x69, 0x6e, 0x67, 0x73, 0x20, 0x3d, 0x20, 0x7b, + 0x20, 0x2e, 0x2e, 0x2e, 0x73, 0x65, 0x74, 0x74, 0x69, 0x6e, 0x67, 0x73, + 0x2c, 0x20, 0x2e, 0x2e, 0x2e, 0x65, 0x78, 0x74, 0x72, 0x61, 0x53, 0x65, + 0x74, 0x74, 0x69, 0x6e, 0x67, 0x73, 0x20, 0x7d, 0x3b, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x53, 0x74, 0x72, 0x69, 0x6e, + 0x67, 0x28, 0x73, 0x74, 0x72, 0x29, 0x2e, 0x72, 0x65, 0x70, 0x6c, 0x61, + 0x63, 0x65, 0x41, 0x6c, 0x6c, 0x28, 0x2f, 0x5c, 0x7b, 0x5c, 0x7b, 0x28, + 0x2e, 0x2a, 0x3f, 0x29, 0x5c, 0x7d, 0x5c, 0x7d, 0x2f, 0x67, 0x2c, 0x20, + 0x28, 0x5f, 0x2c, 0x20, 0x6b, 0x65, 0x79, 0x29, 0x20, 0x3d, 0x3e, 0x20, + 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x28, 0x73, 0x65, 0x74, + 0x74, 0x69, 0x6e, 0x67, 0x73, 0x5b, 0x6b, 0x65, 0x79, 0x5d, 0x29, 0x29, + 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x0a, 0x20, 0x20, 0x20, + 0x20, 0x61, 0x73, 0x79, 0x6e, 0x63, 0x20, 0x66, 0x75, 0x6e, 0x63, 0x74, + 0x69, 0x6f, 0x6e, 0x20, 0x72, 0x75, 0x6e, 0x4c, 0x6c, 0x61, 0x6d, 0x61, + 0x28, 0x70, 0x72, 0x6f, 0x6d, 0x70, 0x74, 0x2c, 0x20, 0x6c, 0x6c, 0x61, + 0x6d, 0x61, 0x50, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x2c, 0x20, 0x63, 0x68, + 0x61, 0x72, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x63, 0x75, 0x72, 0x72, 0x65, 0x6e, + 0x74, 0x4d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x73, 0x20, 0x3d, 0x20, + 0x5b, 0x5d, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, + 0x6e, 0x73, 0x74, 0x20, 0x68, 0x69, 0x73, 0x74, 0x6f, 0x72, 0x79, 0x20, + 0x3d, 0x20, 0x73, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x2e, 0x76, 0x61, + 0x6c, 0x75, 0x65, 0x2e, 0x74, 0x72, 0x61, 0x6e, 0x73, 0x63, 0x72, 0x69, + 0x70, 0x74, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x69, 0x66, + 0x20, 0x28, 0x63, 0x6f, 0x6e, 0x74, 0x72, 0x6f, 0x6c, 0x6c, 0x65, 0x72, + 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x74, 0x68, 0x72, 0x6f, 0x77, 0x20, + 0x6e, 0x65, 0x77, 0x20, 0x45, 0x72, 0x72, 0x6f, 0x72, 0x28, 0x22, 0x61, + 0x6c, 0x72, 0x65, 0x61, 0x64, 0x79, 0x20, 0x72, 0x75, 0x6e, 0x6e, 0x69, + 0x6e, 0x67, 0x22, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x74, + 0x72, 0x6f, 0x6c, 0x6c, 0x65, 0x72, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, + 0x20, 0x3d, 0x20, 0x6e, 0x65, 0x77, 0x20, 0x41, 0x62, 0x6f, 0x72, 0x74, + 0x43, 0x6f, 0x6e, 0x74, 0x72, 0x6f, 0x6c, 0x6c, 0x65, 0x72, 0x28, 0x29, + 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x66, 0x6f, 0x72, 0x20, + 0x61, 0x77, 0x61, 0x69, 0x74, 0x20, 0x28, 0x63, 0x6f, 0x6e, 0x73, 0x74, + 0x20, 0x63, 0x68, 0x75, 0x6e, 0x6b, 0x20, 0x6f, 0x66, 0x20, 0x6c, 0x6c, + 0x61, 0x6d, 0x61, 0x28, 0x70, 0x72, 0x6f, 0x6d, 0x70, 0x74, 0x2c, 0x20, + 0x6c, 0x6c, 0x61, 0x6d, 0x61, 0x50, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x2c, + 0x20, 0x7b, 0x20, 0x63, 0x6f, 0x6e, 0x74, 0x72, 0x6f, 0x6c, 0x6c, 0x65, + 0x72, 0x3a, 0x20, 0x63, 0x6f, 0x6e, 0x74, 0x72, 0x6f, 0x6c, 0x6c, 0x65, + 0x72, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x20, 0x7d, 0x29, 0x29, 0x20, + 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, + 0x6e, 0x73, 0x74, 0x20, 0x64, 0x61, 0x74, 0x61, 0x20, 0x3d, 0x20, 0x63, + 0x68, 0x75, 0x6e, 0x6b, 0x2e, 0x64, 0x61, 0x74, 0x61, 0x3b, 0x0a, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x69, 0x66, 0x20, 0x28, + 0x64, 0x61, 0x74, 0x61, 0x2e, 0x73, 0x74, 0x6f, 0x70, 0x29, 0x20, 0x7b, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x77, + 0x68, 0x69, 0x6c, 0x65, 0x20, 0x28, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x75, 0x72, 0x72, 0x65, + 0x6e, 0x74, 0x4d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x73, 0x2e, 0x6c, + 0x65, 0x6e, 0x67, 0x74, 0x68, 0x20, 0x3e, 0x20, 0x30, 0x20, 0x26, 0x26, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x63, 0x75, 0x72, 0x72, 0x65, 0x6e, 0x74, 0x4d, 0x65, 0x73, 0x73, + 0x61, 0x67, 0x65, 0x73, 0x5b, 0x63, 0x75, 0x72, 0x72, 0x65, 0x6e, 0x74, + 0x4d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x73, 0x2e, 0x6c, 0x65, 0x6e, + 0x67, 0x74, 0x68, 0x20, 0x2d, 0x20, 0x31, 0x5d, 0x2e, 0x63, 0x6f, 0x6e, + 0x74, 0x65, 0x6e, 0x74, 0x2e, 0x6d, 0x61, 0x74, 0x63, 0x68, 0x28, 0x2f, + 0x5c, 0x6e, 0x24, 0x2f, 0x29, 0x20, 0x21, 0x3d, 0x20, 0x6e, 0x75, 0x6c, + 0x6c, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x63, 0x75, 0x72, 0x72, 0x65, 0x6e, 0x74, 0x4d, + 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x73, 0x2e, 0x70, 0x6f, 0x70, 0x28, + 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x74, 0x72, 0x61, 0x6e, 0x73, 0x63, 0x72, 0x69, 0x70, 0x74, 0x55, + 0x70, 0x64, 0x61, 0x74, 0x65, 0x28, 0x5b, 0x2e, 0x2e, 0x2e, 0x68, 0x69, + 0x73, 0x74, 0x6f, 0x72, 0x79, 0x2c, 0x20, 0x5b, 0x63, 0x68, 0x61, 0x72, + 0x2c, 0x20, 0x63, 0x75, 0x72, 0x72, 0x65, 0x6e, 0x74, 0x4d, 0x65, 0x73, + 0x73, 0x61, 0x67, 0x65, 0x73, 0x5d, 0x5d, 0x29, 0x0a, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x6f, + 0x6c, 0x65, 0x2e, 0x6c, 0x6f, 0x67, 0x28, 0x22, 0x43, 0x6f, 0x6d, 0x70, + 0x6c, 0x65, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x66, 0x69, 0x6e, 0x69, 0x73, + 0x68, 0x65, 0x64, 0x3a, 0x20, 0x27, 0x22, 0x2c, 0x20, 0x63, 0x75, 0x72, + 0x72, 0x65, 0x6e, 0x74, 0x4d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x73, + 0x2e, 0x6d, 0x61, 0x70, 0x28, 0x6d, 0x73, 0x67, 0x20, 0x3d, 0x3e, 0x20, + 0x6d, 0x73, 0x67, 0x2e, 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x29, + 0x2e, 0x6a, 0x6f, 0x69, 0x6e, 0x28, 0x27, 0x27, 0x29, 0x2c, 0x20, 0x22, + 0x27, 0x2c, 0x20, 0x73, 0x75, 0x6d, 0x6d, 0x61, 0x72, 0x79, 0x3a, 0x20, + 0x22, 0x2c, 0x20, 0x64, 0x61, 0x74, 0x61, 0x29, 0x3b, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x20, 0x65, 0x6c, 0x73, 0x65, + 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x63, 0x75, 0x72, 0x72, 0x65, 0x6e, 0x74, 0x4d, 0x65, 0x73, 0x73, + 0x61, 0x67, 0x65, 0x73, 0x2e, 0x70, 0x75, 0x73, 0x68, 0x28, 0x64, 0x61, + 0x74, 0x61, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x73, 0x6c, 0x6f, 0x74, 0x5f, 0x69, 0x64, 0x20, 0x3d, + 0x20, 0x64, 0x61, 0x74, 0x61, 0x2e, 0x73, 0x6c, 0x6f, 0x74, 0x5f, 0x69, + 0x64, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x69, 0x66, 0x20, 0x28, 0x73, 0x65, 0x6c, 0x65, 0x63, 0x74, 0x65, + 0x64, 0x5f, 0x69, 0x6d, 0x61, 0x67, 0x65, 0x20, 0x26, 0x26, 0x20, 0x21, + 0x64, 0x61, 0x74, 0x61, 0x2e, 0x6d, 0x75, 0x6c, 0x74, 0x69, 0x6d, 0x6f, + 0x64, 0x61, 0x6c, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x61, 0x6c, 0x65, 0x72, 0x74, + 0x28, 0x22, 0x54, 0x68, 0x65, 0x20, 0x73, 0x65, 0x72, 0x76, 0x65, 0x72, + 0x20, 0x77, 0x61, 0x73, 0x20, 0x6e, 0x6f, 0x74, 0x20, 0x63, 0x6f, 0x6d, + 0x70, 0x69, 0x6c, 0x65, 0x64, 0x20, 0x66, 0x6f, 0x72, 0x20, 0x6d, 0x75, + 0x6c, 0x74, 0x69, 0x6d, 0x6f, 0x64, 0x61, 0x6c, 0x20, 0x6f, 0x72, 0x20, + 0x74, 0x68, 0x65, 0x20, 0x6d, 0x6f, 0x64, 0x65, 0x6c, 0x20, 0x70, 0x72, + 0x6f, 0x6a, 0x65, 0x63, 0x74, 0x6f, 0x72, 0x20, 0x63, 0x61, 0x6e, 0x27, + 0x74, 0x20, 0x62, 0x65, 0x20, 0x6c, 0x6f, 0x61, 0x64, 0x65, 0x64, 0x2e, + 0x22, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x3b, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x74, 0x72, + 0x61, 0x6e, 0x73, 0x63, 0x72, 0x69, 0x70, 0x74, 0x55, 0x70, 0x64, 0x61, + 0x74, 0x65, 0x28, 0x5b, 0x2e, 0x2e, 0x2e, 0x68, 0x69, 0x73, 0x74, 0x6f, + 0x72, 0x79, 0x2c, 0x20, 0x5b, 0x63, 0x68, 0x61, 0x72, 0x2c, 0x20, 0x63, + 0x75, 0x72, 0x72, 0x65, 0x6e, 0x74, 0x4d, 0x65, 0x73, 0x73, 0x61, 0x67, + 0x65, 0x73, 0x5d, 0x5d, 0x29, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x7d, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x69, 0x66, 0x20, 0x28, 0x64, 0x61, 0x74, 0x61, 0x2e, 0x74, 0x69, + 0x6d, 0x69, 0x6e, 0x67, 0x73, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x6c, 0x6c, 0x61, 0x6d, 0x61, + 0x53, 0x74, 0x61, 0x74, 0x73, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x20, + 0x3d, 0x20, 0x64, 0x61, 0x74, 0x61, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x7d, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, + 0x74, 0x72, 0x6f, 0x6c, 0x6c, 0x65, 0x72, 0x2e, 0x76, 0x61, 0x6c, 0x75, + 0x65, 0x20, 0x3d, 0x20, 0x6e, 0x75, 0x6c, 0x6c, 0x3b, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x7d, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x2f, 0x2f, 0x20, + 0x73, 0x65, 0x6e, 0x64, 0x20, 0x6d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, + 0x20, 0x74, 0x6f, 0x20, 0x73, 0x65, 0x72, 0x76, 0x65, 0x72, 0x0a, 0x20, + 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x63, 0x68, 0x61, + 0x74, 0x20, 0x3d, 0x20, 0x61, 0x73, 0x79, 0x6e, 0x63, 0x20, 0x28, 0x6d, + 0x73, 0x67, 0x29, 0x20, 0x3d, 0x3e, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x69, 0x66, 0x20, 0x28, 0x63, 0x6f, 0x6e, 0x74, 0x72, + 0x6f, 0x6c, 0x6c, 0x65, 0x72, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x29, + 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, + 0x6f, 0x6e, 0x73, 0x6f, 0x6c, 0x65, 0x2e, 0x6c, 0x6f, 0x67, 0x28, 0x27, + 0x61, 0x6c, 0x72, 0x65, 0x61, 0x64, 0x79, 0x20, 0x72, 0x75, 0x6e, 0x6e, + 0x69, 0x6e, 0x67, 0x2e, 0x2e, 0x2e, 0x27, 0x29, 0x3b, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, + 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x0a, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x74, 0x72, 0x61, 0x6e, 0x73, 0x63, 0x72, + 0x69, 0x70, 0x74, 0x55, 0x70, 0x64, 0x61, 0x74, 0x65, 0x28, 0x5b, 0x2e, + 0x2e, 0x2e, 0x73, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x2e, 0x76, 0x61, + 0x6c, 0x75, 0x65, 0x2e, 0x74, 0x72, 0x61, 0x6e, 0x73, 0x63, 0x72, 0x69, + 0x70, 0x74, 0x2c, 0x20, 0x5b, 0x22, 0x7b, 0x7b, 0x75, 0x73, 0x65, 0x72, + 0x7d, 0x7d, 0x22, 0x2c, 0x20, 0x6d, 0x73, 0x67, 0x5d, 0x5d, 0x29, 0x0a, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x6c, 0x65, 0x74, 0x20, 0x70, + 0x72, 0x6f, 0x6d, 0x70, 0x74, 0x20, 0x3d, 0x20, 0x74, 0x65, 0x6d, 0x70, + 0x6c, 0x61, 0x74, 0x65, 0x28, 0x73, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, + 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x74, 0x65, 0x6d, 0x70, 0x6c, + 0x61, 0x74, 0x65, 0x2c, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x6d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x3a, 0x20, + 0x6d, 0x73, 0x67, 0x2c, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x68, 0x69, 0x73, 0x74, 0x6f, 0x72, 0x79, 0x3a, 0x20, 0x73, 0x65, + 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2e, + 0x74, 0x72, 0x61, 0x6e, 0x73, 0x63, 0x72, 0x69, 0x70, 0x74, 0x2e, 0x66, + 0x6c, 0x61, 0x74, 0x4d, 0x61, 0x70, 0x28, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x28, 0x5b, 0x6e, 0x61, 0x6d, 0x65, + 0x2c, 0x20, 0x64, 0x61, 0x74, 0x61, 0x5d, 0x29, 0x20, 0x3d, 0x3e, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x28, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x73, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x2e, 0x76, 0x61, 0x6c, 0x75, + 0x65, 0x2e, 0x68, 0x69, 0x73, 0x74, 0x6f, 0x72, 0x79, 0x54, 0x65, 0x6d, + 0x70, 0x6c, 0x61, 0x74, 0x65, 0x2c, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7b, 0x0a, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x6e, 0x61, 0x6d, 0x65, 0x2c, 0x0a, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x6d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x3a, 0x20, 0x41, 0x72, + 0x72, 0x61, 0x79, 0x2e, 0x69, 0x73, 0x41, 0x72, 0x72, 0x61, 0x79, 0x28, + 0x64, 0x61, 0x74, 0x61, 0x29, 0x20, 0x3f, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x64, 0x61, 0x74, 0x61, 0x2e, 0x6d, 0x61, 0x70, 0x28, 0x6d, + 0x73, 0x67, 0x20, 0x3d, 0x3e, 0x20, 0x6d, 0x73, 0x67, 0x2e, 0x63, 0x6f, + 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x29, 0x2e, 0x6a, 0x6f, 0x69, 0x6e, 0x28, + 0x27, 0x27, 0x29, 0x2e, 0x72, 0x65, 0x70, 0x6c, 0x61, 0x63, 0x65, 0x28, + 0x2f, 0x5e, 0x5c, 0x73, 0x2f, 0x2c, 0x20, 0x27, 0x27, 0x29, 0x20, 0x3a, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x64, 0x61, 0x74, 0x61, 0x2c, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x29, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x29, 0x2e, 0x6a, 0x6f, 0x69, 0x6e, 0x28, 0x22, 0x5c, + 0x6e, 0x22, 0x29, 0x2c, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, + 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x69, 0x66, 0x20, + 0x28, 0x73, 0x65, 0x6c, 0x65, 0x63, 0x74, 0x65, 0x64, 0x5f, 0x69, 0x6d, + 0x61, 0x67, 0x65, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x70, 0x72, 0x6f, 0x6d, 0x70, 0x74, 0x20, 0x3d, 0x20, + 0x60, 0x41, 0x20, 0x63, 0x68, 0x61, 0x74, 0x20, 0x62, 0x65, 0x74, 0x77, + 0x65, 0x65, 0x6e, 0x20, 0x61, 0x20, 0x63, 0x75, 0x72, 0x69, 0x6f, 0x75, + 0x73, 0x20, 0x68, 0x75, 0x6d, 0x61, 0x6e, 0x20, 0x61, 0x6e, 0x64, 0x20, + 0x61, 0x6e, 0x20, 0x61, 0x72, 0x74, 0x69, 0x66, 0x69, 0x63, 0x69, 0x61, + 0x6c, 0x20, 0x69, 0x6e, 0x74, 0x65, 0x6c, 0x6c, 0x69, 0x67, 0x65, 0x6e, + 0x63, 0x65, 0x20, 0x61, 0x73, 0x73, 0x69, 0x73, 0x74, 0x61, 0x6e, 0x74, + 0x2e, 0x20, 0x54, 0x68, 0x65, 0x20, 0x61, 0x73, 0x73, 0x69, 0x73, 0x74, + 0x61, 0x6e, 0x74, 0x20, 0x67, 0x69, 0x76, 0x65, 0x73, 0x20, 0x68, 0x65, + 0x6c, 0x70, 0x66, 0x75, 0x6c, 0x2c, 0x20, 0x64, 0x65, 0x74, 0x61, 0x69, + 0x6c, 0x65, 0x64, 0x2c, 0x20, 0x61, 0x6e, 0x64, 0x20, 0x70, 0x6f, 0x6c, + 0x69, 0x74, 0x65, 0x20, 0x61, 0x6e, 0x73, 0x77, 0x65, 0x72, 0x73, 0x20, + 0x74, 0x6f, 0x20, 0x74, 0x68, 0x65, 0x20, 0x68, 0x75, 0x6d, 0x61, 0x6e, + 0x27, 0x73, 0x20, 0x71, 0x75, 0x65, 0x73, 0x74, 0x69, 0x6f, 0x6e, 0x73, + 0x2e, 0x5c, 0x6e, 0x55, 0x53, 0x45, 0x52, 0x3a, 0x5b, 0x69, 0x6d, 0x67, + 0x2d, 0x31, 0x30, 0x5d, 0x24, 0x7b, 0x6d, 0x73, 0x67, 0x7d, 0x5c, 0x6e, + 0x41, 0x53, 0x53, 0x49, 0x53, 0x54, 0x41, 0x4e, 0x54, 0x3a, 0x60, 0x3b, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x61, 0x77, 0x61, 0x69, 0x74, 0x20, 0x72, 0x75, 0x6e, + 0x4c, 0x6c, 0x61, 0x6d, 0x61, 0x28, 0x70, 0x72, 0x6f, 0x6d, 0x70, 0x74, + 0x2c, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x2e, 0x2e, 0x2e, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x2e, 0x76, 0x61, + 0x6c, 0x75, 0x65, 0x2c, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x73, 0x6c, 0x6f, 0x74, 0x5f, 0x69, 0x64, 0x3a, 0x20, 0x73, 0x6c, + 0x6f, 0x74, 0x5f, 0x69, 0x64, 0x2c, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x73, 0x74, 0x6f, 0x70, 0x3a, 0x20, 0x5b, 0x22, 0x3c, + 0x2f, 0x73, 0x3e, 0x22, 0x2c, 0x20, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, + 0x74, 0x65, 0x28, 0x22, 0x7b, 0x7b, 0x63, 0x68, 0x61, 0x72, 0x7d, 0x7d, + 0x3a, 0x22, 0x29, 0x2c, 0x20, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, + 0x65, 0x28, 0x22, 0x7b, 0x7b, 0x75, 0x73, 0x65, 0x72, 0x7d, 0x7d, 0x3a, + 0x22, 0x29, 0x5d, 0x2c, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, + 0x2c, 0x20, 0x22, 0x7b, 0x7b, 0x63, 0x68, 0x61, 0x72, 0x7d, 0x7d, 0x22, + 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x72, 0x75, 0x6e, 0x43, + 0x6f, 0x6d, 0x70, 0x6c, 0x65, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x3d, 0x20, + 0x28, 0x29, 0x20, 0x3d, 0x3e, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x69, 0x66, 0x20, 0x28, 0x63, 0x6f, 0x6e, 0x74, 0x72, 0x6f, + 0x6c, 0x6c, 0x65, 0x72, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x29, 0x20, + 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, + 0x6e, 0x73, 0x6f, 0x6c, 0x65, 0x2e, 0x6c, 0x6f, 0x67, 0x28, 0x27, 0x61, + 0x6c, 0x72, 0x65, 0x61, 0x64, 0x79, 0x20, 0x72, 0x75, 0x6e, 0x6e, 0x69, + 0x6e, 0x67, 0x2e, 0x2e, 0x2e, 0x27, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x3b, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x7b, 0x20, 0x70, + 0x72, 0x6f, 0x6d, 0x70, 0x74, 0x20, 0x7d, 0x20, 0x3d, 0x20, 0x73, 0x65, + 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3b, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x74, 0x72, 0x61, 0x6e, 0x73, + 0x63, 0x72, 0x69, 0x70, 0x74, 0x55, 0x70, 0x64, 0x61, 0x74, 0x65, 0x28, + 0x5b, 0x2e, 0x2e, 0x2e, 0x73, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x2e, + 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x74, 0x72, 0x61, 0x6e, 0x73, 0x63, + 0x72, 0x69, 0x70, 0x74, 0x2c, 0x20, 0x5b, 0x22, 0x22, 0x2c, 0x20, 0x70, + 0x72, 0x6f, 0x6d, 0x70, 0x74, 0x5d, 0x5d, 0x29, 0x3b, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x72, 0x75, 0x6e, 0x4c, 0x6c, 0x61, 0x6d, 0x61, + 0x28, 0x70, 0x72, 0x6f, 0x6d, 0x70, 0x74, 0x2c, 0x20, 0x7b, 0x0a, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x2e, 0x2e, 0x2e, 0x70, 0x61, + 0x72, 0x61, 0x6d, 0x73, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2c, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x73, 0x6c, 0x6f, 0x74, + 0x5f, 0x69, 0x64, 0x3a, 0x20, 0x73, 0x6c, 0x6f, 0x74, 0x5f, 0x69, 0x64, + 0x2c, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x73, 0x74, + 0x6f, 0x70, 0x3a, 0x20, 0x5b, 0x5d, 0x2c, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x7d, 0x2c, 0x20, 0x22, 0x22, 0x29, 0x2e, 0x66, 0x69, 0x6e, + 0x61, 0x6c, 0x6c, 0x79, 0x28, 0x28, 0x29, 0x20, 0x3d, 0x3e, 0x20, 0x7b, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x73, 0x65, 0x73, + 0x73, 0x69, 0x6f, 0x6e, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x70, + 0x72, 0x6f, 0x6d, 0x70, 0x74, 0x20, 0x3d, 0x20, 0x73, 0x65, 0x73, 0x73, + 0x69, 0x6f, 0x6e, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x74, 0x72, + 0x61, 0x6e, 0x73, 0x63, 0x72, 0x69, 0x70, 0x74, 0x2e, 0x6d, 0x61, 0x70, + 0x28, 0x28, 0x5b, 0x5f, 0x2c, 0x20, 0x64, 0x61, 0x74, 0x61, 0x5d, 0x29, + 0x20, 0x3d, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x41, 0x72, 0x72, 0x61, 0x79, 0x2e, 0x69, 0x73, 0x41, 0x72, + 0x72, 0x61, 0x79, 0x28, 0x64, 0x61, 0x74, 0x61, 0x29, 0x20, 0x3f, 0x20, + 0x64, 0x61, 0x74, 0x61, 0x2e, 0x6d, 0x61, 0x70, 0x28, 0x6d, 0x73, 0x67, + 0x20, 0x3d, 0x3e, 0x20, 0x6d, 0x73, 0x67, 0x2e, 0x63, 0x6f, 0x6e, 0x74, + 0x65, 0x6e, 0x74, 0x29, 0x2e, 0x6a, 0x6f, 0x69, 0x6e, 0x28, 0x27, 0x27, + 0x29, 0x20, 0x3a, 0x20, 0x64, 0x61, 0x74, 0x61, 0x0a, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x29, 0x2e, 0x6a, 0x6f, 0x69, 0x6e, 0x28, + 0x27, 0x27, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x73, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x2e, 0x76, 0x61, 0x6c, + 0x75, 0x65, 0x2e, 0x74, 0x72, 0x61, 0x6e, 0x73, 0x63, 0x72, 0x69, 0x70, + 0x74, 0x20, 0x3d, 0x20, 0x5b, 0x5d, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x7d, 0x29, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x73, 0x74, + 0x6f, 0x70, 0x20, 0x3d, 0x20, 0x28, 0x65, 0x29, 0x20, 0x3d, 0x3e, 0x20, + 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x65, 0x2e, 0x70, 0x72, + 0x65, 0x76, 0x65, 0x6e, 0x74, 0x44, 0x65, 0x66, 0x61, 0x75, 0x6c, 0x74, + 0x28, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x69, 0x66, + 0x20, 0x28, 0x63, 0x6f, 0x6e, 0x74, 0x72, 0x6f, 0x6c, 0x6c, 0x65, 0x72, + 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x74, 0x72, 0x6f, + 0x6c, 0x6c, 0x65, 0x72, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x61, + 0x62, 0x6f, 0x72, 0x74, 0x28, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x74, 0x72, 0x6f, 0x6c, 0x6c, + 0x65, 0x72, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x20, 0x3d, 0x20, 0x6e, + 0x75, 0x6c, 0x6c, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x72, 0x65, 0x73, 0x65, 0x74, 0x20, + 0x3d, 0x20, 0x28, 0x65, 0x29, 0x20, 0x3d, 0x3e, 0x20, 0x7b, 0x0a, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x73, 0x74, 0x6f, 0x70, 0x28, 0x65, 0x29, + 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x74, 0x72, 0x61, 0x6e, + 0x73, 0x63, 0x72, 0x69, 0x70, 0x74, 0x55, 0x70, 0x64, 0x61, 0x74, 0x65, + 0x28, 0x5b, 0x5d, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x75, + 0x70, 0x6c, 0x6f, 0x61, 0x64, 0x49, 0x6d, 0x61, 0x67, 0x65, 0x20, 0x3d, + 0x20, 0x28, 0x65, 0x29, 0x20, 0x3d, 0x3e, 0x20, 0x7b, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x65, 0x2e, 0x70, 0x72, 0x65, 0x76, 0x65, 0x6e, + 0x74, 0x44, 0x65, 0x66, 0x61, 0x75, 0x6c, 0x74, 0x28, 0x29, 0x3b, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x64, 0x6f, 0x63, 0x75, 0x6d, 0x65, + 0x6e, 0x74, 0x2e, 0x67, 0x65, 0x74, 0x45, 0x6c, 0x65, 0x6d, 0x65, 0x6e, + 0x74, 0x42, 0x79, 0x49, 0x64, 0x28, 0x22, 0x66, 0x69, 0x6c, 0x65, 0x49, + 0x6e, 0x70, 0x75, 0x74, 0x22, 0x29, 0x2e, 0x63, 0x6c, 0x69, 0x63, 0x6b, + 0x28, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x64, 0x6f, + 0x63, 0x75, 0x6d, 0x65, 0x6e, 0x74, 0x2e, 0x67, 0x65, 0x74, 0x45, 0x6c, + 0x65, 0x6d, 0x65, 0x6e, 0x74, 0x42, 0x79, 0x49, 0x64, 0x28, 0x22, 0x66, + 0x69, 0x6c, 0x65, 0x49, 0x6e, 0x70, 0x75, 0x74, 0x22, 0x29, 0x2e, 0x61, + 0x64, 0x64, 0x45, 0x76, 0x65, 0x6e, 0x74, 0x4c, 0x69, 0x73, 0x74, 0x65, + 0x6e, 0x65, 0x72, 0x28, 0x22, 0x63, 0x68, 0x61, 0x6e, 0x67, 0x65, 0x22, + 0x2c, 0x20, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x28, + 0x65, 0x76, 0x65, 0x6e, 0x74, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x73, + 0x65, 0x6c, 0x65, 0x63, 0x74, 0x65, 0x64, 0x46, 0x69, 0x6c, 0x65, 0x20, + 0x3d, 0x20, 0x65, 0x76, 0x65, 0x6e, 0x74, 0x2e, 0x74, 0x61, 0x72, 0x67, + 0x65, 0x74, 0x2e, 0x66, 0x69, 0x6c, 0x65, 0x73, 0x5b, 0x30, 0x5d, 0x3b, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x69, 0x66, 0x20, + 0x28, 0x73, 0x65, 0x6c, 0x65, 0x63, 0x74, 0x65, 0x64, 0x46, 0x69, 0x6c, + 0x65, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x72, 0x65, 0x61, + 0x64, 0x65, 0x72, 0x20, 0x3d, 0x20, 0x6e, 0x65, 0x77, 0x20, 0x46, 0x69, + 0x6c, 0x65, 0x52, 0x65, 0x61, 0x64, 0x65, 0x72, 0x28, 0x29, 0x3b, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x72, 0x65, + 0x61, 0x64, 0x65, 0x72, 0x2e, 0x6f, 0x6e, 0x6c, 0x6f, 0x61, 0x64, 0x20, + 0x3d, 0x20, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x28, + 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x69, 0x6d, + 0x61, 0x67, 0x65, 0x5f, 0x64, 0x61, 0x74, 0x61, 0x20, 0x3d, 0x20, 0x72, + 0x65, 0x61, 0x64, 0x65, 0x72, 0x2e, 0x72, 0x65, 0x73, 0x75, 0x6c, 0x74, + 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x73, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x2e, 0x76, 0x61, + 0x6c, 0x75, 0x65, 0x20, 0x3d, 0x20, 0x7b, 0x20, 0x2e, 0x2e, 0x2e, 0x73, + 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, + 0x2c, 0x20, 0x69, 0x6d, 0x61, 0x67, 0x65, 0x5f, 0x73, 0x65, 0x6c, 0x65, + 0x63, 0x74, 0x65, 0x64, 0x3a, 0x20, 0x69, 0x6d, 0x61, 0x67, 0x65, 0x5f, + 0x64, 0x61, 0x74, 0x61, 0x20, 0x7d, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x70, 0x61, 0x72, 0x61, + 0x6d, 0x73, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x20, 0x3d, 0x20, 0x7b, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x2e, 0x2e, 0x2e, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, + 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2c, 0x20, 0x69, 0x6d, 0x61, 0x67, + 0x65, 0x5f, 0x64, 0x61, 0x74, 0x61, 0x3a, 0x20, 0x5b, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x7b, 0x20, 0x64, 0x61, 0x74, 0x61, 0x3a, 0x20, 0x69, 0x6d, + 0x61, 0x67, 0x65, 0x5f, 0x64, 0x61, 0x74, 0x61, 0x2e, 0x72, 0x65, 0x70, + 0x6c, 0x61, 0x63, 0x65, 0x28, 0x2f, 0x64, 0x61, 0x74, 0x61, 0x3a, 0x69, + 0x6d, 0x61, 0x67, 0x65, 0x5c, 0x2f, 0x5b, 0x5e, 0x3b, 0x5d, 0x2b, 0x3b, + 0x62, 0x61, 0x73, 0x65, 0x36, 0x34, 0x2c, 0x2f, 0x2c, 0x20, 0x27, 0x27, + 0x29, 0x2c, 0x20, 0x69, 0x64, 0x3a, 0x20, 0x31, 0x30, 0x20, 0x7d, 0x5d, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x7d, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x73, 0x65, 0x6c, 0x65, 0x63, 0x74, 0x65, 0x64, 0x5f, 0x69, + 0x6d, 0x61, 0x67, 0x65, 0x20, 0x3d, 0x20, 0x74, 0x72, 0x75, 0x65, 0x3b, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x72, + 0x65, 0x61, 0x64, 0x65, 0x72, 0x2e, 0x72, 0x65, 0x61, 0x64, 0x41, 0x73, + 0x44, 0x61, 0x74, 0x61, 0x55, 0x52, 0x4c, 0x28, 0x73, 0x65, 0x6c, 0x65, + 0x63, 0x74, 0x65, 0x64, 0x46, 0x69, 0x6c, 0x65, 0x29, 0x3b, 0x0a, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x7d, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x7d, + 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, + 0x6f, 0x6e, 0x20, 0x4d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x49, 0x6e, + 0x70, 0x75, 0x74, 0x28, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x6d, 0x65, 0x73, 0x73, + 0x61, 0x67, 0x65, 0x20, 0x3d, 0x20, 0x75, 0x73, 0x65, 0x53, 0x69, 0x67, + 0x6e, 0x61, 0x6c, 0x28, 0x22, 0x22, 0x29, 0x0a, 0x0a, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x73, 0x75, 0x62, + 0x6d, 0x69, 0x74, 0x20, 0x3d, 0x20, 0x28, 0x65, 0x29, 0x20, 0x3d, 0x3e, + 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x73, + 0x74, 0x6f, 0x70, 0x28, 0x65, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x63, 0x68, 0x61, 0x74, 0x28, 0x6d, 0x65, 0x73, + 0x73, 0x61, 0x67, 0x65, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x29, 0x3b, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x6d, 0x65, 0x73, + 0x73, 0x61, 0x67, 0x65, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x20, 0x3d, + 0x20, 0x22, 0x22, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, + 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, + 0x74, 0x20, 0x65, 0x6e, 0x74, 0x65, 0x72, 0x53, 0x75, 0x62, 0x6d, 0x69, + 0x74, 0x73, 0x20, 0x3d, 0x20, 0x28, 0x65, 0x76, 0x65, 0x6e, 0x74, 0x29, + 0x20, 0x3d, 0x3e, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x69, 0x66, 0x20, 0x28, 0x65, 0x76, 0x65, 0x6e, 0x74, 0x2e, + 0x77, 0x68, 0x69, 0x63, 0x68, 0x20, 0x3d, 0x3d, 0x3d, 0x20, 0x31, 0x33, + 0x20, 0x26, 0x26, 0x20, 0x21, 0x65, 0x76, 0x65, 0x6e, 0x74, 0x2e, 0x73, + 0x68, 0x69, 0x66, 0x74, 0x4b, 0x65, 0x79, 0x29, 0x20, 0x7b, 0x0a, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x73, 0x75, 0x62, + 0x6d, 0x69, 0x74, 0x28, 0x65, 0x76, 0x65, 0x6e, 0x74, 0x29, 0x3b, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x68, 0x74, 0x6d, 0x6c, + 0x60, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x66, + 0x6f, 0x72, 0x6d, 0x20, 0x6f, 0x6e, 0x73, 0x75, 0x62, 0x6d, 0x69, 0x74, + 0x3d, 0x24, 0x7b, 0x73, 0x75, 0x62, 0x6d, 0x69, 0x74, 0x7d, 0x3e, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x64, + 0x69, 0x76, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x3c, 0x74, 0x65, 0x78, 0x74, 0x61, 0x72, 0x65, + 0x61, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6c, 0x61, 0x73, 0x73, 0x4e, 0x61, + 0x6d, 0x65, 0x3d, 0x24, 0x7b, 0x67, 0x65, 0x6e, 0x65, 0x72, 0x61, 0x74, + 0x69, 0x6e, 0x67, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x20, 0x3f, 0x20, + 0x22, 0x6c, 0x6f, 0x61, 0x64, 0x69, 0x6e, 0x67, 0x22, 0x20, 0x3a, 0x20, + 0x6e, 0x75, 0x6c, 0x6c, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x6f, 0x6e, 0x69, + 0x6e, 0x70, 0x75, 0x74, 0x3d, 0x24, 0x7b, 0x28, 0x65, 0x29, 0x20, 0x3d, + 0x3e, 0x20, 0x6d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x2e, 0x76, 0x61, + 0x6c, 0x75, 0x65, 0x20, 0x3d, 0x20, 0x65, 0x2e, 0x74, 0x61, 0x72, 0x67, + 0x65, 0x74, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x7d, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x6f, 0x6e, 0x6b, 0x65, 0x79, 0x70, 0x72, 0x65, 0x73, 0x73, 0x3d, + 0x24, 0x7b, 0x65, 0x6e, 0x74, 0x65, 0x72, 0x53, 0x75, 0x62, 0x6d, 0x69, + 0x74, 0x73, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x70, 0x6c, 0x61, 0x63, 0x65, + 0x68, 0x6f, 0x6c, 0x64, 0x65, 0x72, 0x3d, 0x22, 0x53, 0x61, 0x79, 0x20, + 0x73, 0x6f, 0x6d, 0x65, 0x74, 0x68, 0x69, 0x6e, 0x67, 0x2e, 0x2e, 0x2e, + 0x22, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x72, 0x6f, 0x77, 0x73, 0x3d, 0x32, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x74, 0x79, 0x70, 0x65, 0x3d, 0x22, 0x74, 0x65, 0x78, + 0x74, 0x22, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3d, + 0x22, 0x24, 0x7b, 0x6d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x7d, 0x22, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x2f, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x3c, 0x2f, 0x64, 0x69, 0x76, 0x3e, 0x0a, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x64, 0x69, 0x76, 0x20, + 0x63, 0x6c, 0x61, 0x73, 0x73, 0x3d, 0x22, 0x72, 0x69, 0x67, 0x68, 0x74, + 0x22, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x3c, 0x62, 0x75, 0x74, 0x74, 0x6f, 0x6e, 0x20, 0x74, + 0x79, 0x70, 0x65, 0x3d, 0x22, 0x73, 0x75, 0x62, 0x6d, 0x69, 0x74, 0x22, + 0x20, 0x64, 0x69, 0x73, 0x61, 0x62, 0x6c, 0x65, 0x64, 0x3d, 0x24, 0x7b, + 0x67, 0x65, 0x6e, 0x65, 0x72, 0x61, 0x74, 0x69, 0x6e, 0x67, 0x2e, 0x76, + 0x61, 0x6c, 0x75, 0x65, 0x7d, 0x3e, 0x53, 0x65, 0x6e, 0x64, 0x3c, 0x2f, + 0x62, 0x75, 0x74, 0x74, 0x6f, 0x6e, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x62, 0x75, 0x74, + 0x74, 0x6f, 0x6e, 0x20, 0x6f, 0x6e, 0x63, 0x6c, 0x69, 0x63, 0x6b, 0x3d, + 0x24, 0x7b, 0x75, 0x70, 0x6c, 0x6f, 0x61, 0x64, 0x49, 0x6d, 0x61, 0x67, + 0x65, 0x7d, 0x3e, 0x55, 0x70, 0x6c, 0x6f, 0x61, 0x64, 0x20, 0x49, 0x6d, + 0x61, 0x67, 0x65, 0x3c, 0x2f, 0x62, 0x75, 0x74, 0x74, 0x6f, 0x6e, 0x3e, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x3c, 0x62, 0x75, 0x74, 0x74, 0x6f, 0x6e, 0x20, 0x6f, 0x6e, 0x63, + 0x6c, 0x69, 0x63, 0x6b, 0x3d, 0x24, 0x7b, 0x73, 0x74, 0x6f, 0x70, 0x7d, + 0x20, 0x64, 0x69, 0x73, 0x61, 0x62, 0x6c, 0x65, 0x64, 0x3d, 0x24, 0x7b, + 0x21, 0x67, 0x65, 0x6e, 0x65, 0x72, 0x61, 0x74, 0x69, 0x6e, 0x67, 0x2e, + 0x76, 0x61, 0x6c, 0x75, 0x65, 0x7d, 0x3e, 0x53, 0x74, 0x6f, 0x70, 0x3c, + 0x2f, 0x62, 0x75, 0x74, 0x74, 0x6f, 0x6e, 0x3e, 0x0a, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x62, 0x75, + 0x74, 0x74, 0x6f, 0x6e, 0x20, 0x6f, 0x6e, 0x63, 0x6c, 0x69, 0x63, 0x6b, + 0x3d, 0x24, 0x7b, 0x72, 0x65, 0x73, 0x65, 0x74, 0x7d, 0x3e, 0x52, 0x65, + 0x73, 0x65, 0x74, 0x3c, 0x2f, 0x62, 0x75, 0x74, 0x74, 0x6f, 0x6e, 0x3e, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, + 0x2f, 0x64, 0x69, 0x76, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x3c, 0x2f, 0x66, 0x6f, 0x72, 0x6d, 0x3e, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x60, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, + 0x6e, 0x20, 0x43, 0x6f, 0x6d, 0x70, 0x6c, 0x65, 0x74, 0x69, 0x6f, 0x6e, + 0x43, 0x6f, 0x6e, 0x74, 0x72, 0x6f, 0x6c, 0x73, 0x28, 0x29, 0x20, 0x7b, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, + 0x20, 0x73, 0x75, 0x62, 0x6d, 0x69, 0x74, 0x20, 0x3d, 0x20, 0x28, 0x65, + 0x29, 0x20, 0x3d, 0x3e, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x73, 0x74, 0x6f, 0x70, 0x28, 0x65, 0x29, 0x3b, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x72, 0x75, 0x6e, 0x43, + 0x6f, 0x6d, 0x70, 0x6c, 0x65, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x29, 0x3b, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x68, 0x74, + 0x6d, 0x6c, 0x60, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x3c, 0x64, 0x69, 0x76, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x3c, 0x62, 0x75, 0x74, 0x74, 0x6f, 0x6e, 0x20, + 0x6f, 0x6e, 0x63, 0x6c, 0x69, 0x63, 0x6b, 0x3d, 0x24, 0x7b, 0x73, 0x75, + 0x62, 0x6d, 0x69, 0x74, 0x7d, 0x20, 0x74, 0x79, 0x70, 0x65, 0x3d, 0x22, + 0x62, 0x75, 0x74, 0x74, 0x6f, 0x6e, 0x22, 0x20, 0x64, 0x69, 0x73, 0x61, + 0x62, 0x6c, 0x65, 0x64, 0x3d, 0x24, 0x7b, 0x67, 0x65, 0x6e, 0x65, 0x72, + 0x61, 0x74, 0x69, 0x6e, 0x67, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x7d, + 0x3e, 0x53, 0x74, 0x61, 0x72, 0x74, 0x3c, 0x2f, 0x62, 0x75, 0x74, 0x74, + 0x6f, 0x6e, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x3c, 0x62, 0x75, 0x74, 0x74, 0x6f, 0x6e, 0x20, 0x6f, 0x6e, + 0x63, 0x6c, 0x69, 0x63, 0x6b, 0x3d, 0x24, 0x7b, 0x73, 0x74, 0x6f, 0x70, + 0x7d, 0x20, 0x64, 0x69, 0x73, 0x61, 0x62, 0x6c, 0x65, 0x64, 0x3d, 0x24, + 0x7b, 0x21, 0x67, 0x65, 0x6e, 0x65, 0x72, 0x61, 0x74, 0x69, 0x6e, 0x67, + 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x7d, 0x3e, 0x53, 0x74, 0x6f, 0x70, + 0x3c, 0x2f, 0x62, 0x75, 0x74, 0x74, 0x6f, 0x6e, 0x3e, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x62, 0x75, 0x74, + 0x74, 0x6f, 0x6e, 0x20, 0x6f, 0x6e, 0x63, 0x6c, 0x69, 0x63, 0x6b, 0x3d, + 0x24, 0x7b, 0x72, 0x65, 0x73, 0x65, 0x74, 0x7d, 0x3e, 0x52, 0x65, 0x73, + 0x65, 0x74, 0x3c, 0x2f, 0x62, 0x75, 0x74, 0x74, 0x6f, 0x6e, 0x3e, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x2f, 0x64, 0x69, + 0x76, 0x3e, 0x60, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x43, 0x68, + 0x61, 0x74, 0x4c, 0x6f, 0x67, 0x20, 0x3d, 0x20, 0x28, 0x70, 0x72, 0x6f, + 0x70, 0x73, 0x29, 0x20, 0x3d, 0x3e, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x6d, 0x65, 0x73, + 0x73, 0x61, 0x67, 0x65, 0x73, 0x20, 0x3d, 0x20, 0x73, 0x65, 0x73, 0x73, + 0x69, 0x6f, 0x6e, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x74, 0x72, + 0x61, 0x6e, 0x73, 0x63, 0x72, 0x69, 0x70, 0x74, 0x3b, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x63, 0x6f, + 0x6e, 0x74, 0x61, 0x69, 0x6e, 0x65, 0x72, 0x20, 0x3d, 0x20, 0x75, 0x73, + 0x65, 0x52, 0x65, 0x66, 0x28, 0x6e, 0x75, 0x6c, 0x6c, 0x29, 0x0a, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x75, 0x73, 0x65, 0x45, 0x66, 0x66, + 0x65, 0x63, 0x74, 0x28, 0x28, 0x29, 0x20, 0x3d, 0x3e, 0x20, 0x7b, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x2f, 0x2f, 0x20, 0x73, + 0x63, 0x72, 0x6f, 0x6c, 0x6c, 0x20, 0x74, 0x6f, 0x20, 0x62, 0x6f, 0x74, + 0x74, 0x6f, 0x6d, 0x20, 0x28, 0x69, 0x66, 0x20, 0x6e, 0x65, 0x65, 0x64, + 0x65, 0x64, 0x29, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x70, 0x61, 0x72, 0x65, 0x6e, 0x74, + 0x20, 0x3d, 0x20, 0x63, 0x6f, 0x6e, 0x74, 0x61, 0x69, 0x6e, 0x65, 0x72, + 0x2e, 0x63, 0x75, 0x72, 0x72, 0x65, 0x6e, 0x74, 0x2e, 0x70, 0x61, 0x72, + 0x65, 0x6e, 0x74, 0x45, 0x6c, 0x65, 0x6d, 0x65, 0x6e, 0x74, 0x3b, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x69, 0x66, 0x20, 0x28, + 0x70, 0x61, 0x72, 0x65, 0x6e, 0x74, 0x20, 0x26, 0x26, 0x20, 0x70, 0x61, + 0x72, 0x65, 0x6e, 0x74, 0x2e, 0x73, 0x63, 0x72, 0x6f, 0x6c, 0x6c, 0x48, + 0x65, 0x69, 0x67, 0x68, 0x74, 0x20, 0x3c, 0x3d, 0x20, 0x70, 0x61, 0x72, + 0x65, 0x6e, 0x74, 0x2e, 0x73, 0x63, 0x72, 0x6f, 0x6c, 0x6c, 0x54, 0x6f, + 0x70, 0x20, 0x2b, 0x20, 0x70, 0x61, 0x72, 0x65, 0x6e, 0x74, 0x2e, 0x6f, + 0x66, 0x66, 0x73, 0x65, 0x74, 0x48, 0x65, 0x69, 0x67, 0x68, 0x74, 0x20, + 0x2b, 0x20, 0x33, 0x30, 0x30, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x70, 0x61, 0x72, 0x65, 0x6e, + 0x74, 0x2e, 0x73, 0x63, 0x72, 0x6f, 0x6c, 0x6c, 0x54, 0x6f, 0x28, 0x30, + 0x2c, 0x20, 0x70, 0x61, 0x72, 0x65, 0x6e, 0x74, 0x2e, 0x73, 0x63, 0x72, + 0x6f, 0x6c, 0x6c, 0x48, 0x65, 0x69, 0x67, 0x68, 0x74, 0x29, 0x0a, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x7d, 0x2c, 0x20, 0x5b, 0x6d, 0x65, 0x73, 0x73, 0x61, + 0x67, 0x65, 0x73, 0x5d, 0x29, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x69, 0x73, 0x43, 0x6f, 0x6d, + 0x70, 0x6c, 0x65, 0x74, 0x69, 0x6f, 0x6e, 0x4d, 0x6f, 0x64, 0x65, 0x20, + 0x3d, 0x20, 0x73, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x2e, 0x76, 0x61, + 0x6c, 0x75, 0x65, 0x2e, 0x74, 0x79, 0x70, 0x65, 0x20, 0x3d, 0x3d, 0x3d, + 0x20, 0x27, 0x63, 0x6f, 0x6d, 0x70, 0x6c, 0x65, 0x74, 0x69, 0x6f, 0x6e, + 0x27, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, + 0x74, 0x20, 0x63, 0x68, 0x61, 0x74, 0x4c, 0x69, 0x6e, 0x65, 0x20, 0x3d, + 0x20, 0x28, 0x5b, 0x75, 0x73, 0x65, 0x72, 0x2c, 0x20, 0x64, 0x61, 0x74, + 0x61, 0x5d, 0x2c, 0x20, 0x69, 0x6e, 0x64, 0x65, 0x78, 0x29, 0x20, 0x3d, + 0x3e, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x6c, 0x65, 0x74, 0x20, 0x6d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, + 0x74, 0x20, 0x69, 0x73, 0x41, 0x72, 0x72, 0x61, 0x79, 0x4d, 0x65, 0x73, + 0x73, 0x61, 0x67, 0x65, 0x20, 0x3d, 0x20, 0x41, 0x72, 0x72, 0x61, 0x79, + 0x2e, 0x69, 0x73, 0x41, 0x72, 0x72, 0x61, 0x79, 0x28, 0x64, 0x61, 0x74, + 0x61, 0x29, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x69, + 0x66, 0x20, 0x28, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x2e, 0x76, 0x61, + 0x6c, 0x75, 0x65, 0x2e, 0x6e, 0x5f, 0x70, 0x72, 0x6f, 0x62, 0x73, 0x20, + 0x3e, 0x20, 0x30, 0x20, 0x26, 0x26, 0x20, 0x69, 0x73, 0x41, 0x72, 0x72, + 0x61, 0x79, 0x4d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x29, 0x20, 0x7b, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x6d, + 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x20, 0x3d, 0x20, 0x68, 0x74, 0x6d, + 0x6c, 0x60, 0x3c, 0x24, 0x7b, 0x50, 0x72, 0x6f, 0x62, 0x61, 0x62, 0x69, + 0x6c, 0x69, 0x74, 0x69, 0x65, 0x73, 0x7d, 0x20, 0x64, 0x61, 0x74, 0x61, + 0x3d, 0x24, 0x7b, 0x64, 0x61, 0x74, 0x61, 0x7d, 0x20, 0x2f, 0x3e, 0x60, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x20, 0x65, + 0x6c, 0x73, 0x65, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x74, 0x65, + 0x78, 0x74, 0x20, 0x3d, 0x20, 0x69, 0x73, 0x41, 0x72, 0x72, 0x61, 0x79, + 0x4d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x20, 0x3f, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x64, 0x61, + 0x74, 0x61, 0x2e, 0x6d, 0x61, 0x70, 0x28, 0x6d, 0x73, 0x67, 0x20, 0x3d, + 0x3e, 0x20, 0x6d, 0x73, 0x67, 0x2e, 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x6e, + 0x74, 0x29, 0x2e, 0x6a, 0x6f, 0x69, 0x6e, 0x28, 0x27, 0x27, 0x29, 0x2e, + 0x72, 0x65, 0x70, 0x6c, 0x61, 0x63, 0x65, 0x28, 0x2f, 0x5e, 0x5c, 0x73, + 0x2b, 0x2f, 0x2c, 0x20, 0x27, 0x27, 0x29, 0x20, 0x3a, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x64, 0x61, + 0x74, 0x61, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x6d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x20, 0x3d, 0x20, + 0x69, 0x73, 0x43, 0x6f, 0x6d, 0x70, 0x6c, 0x65, 0x74, 0x69, 0x6f, 0x6e, + 0x4d, 0x6f, 0x64, 0x65, 0x20, 0x3f, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x74, 0x65, 0x78, 0x74, 0x20, + 0x3a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x68, 0x74, 0x6d, 0x6c, 0x60, 0x3c, 0x24, 0x7b, 0x4d, 0x61, + 0x72, 0x6b, 0x64, 0x6f, 0x77, 0x6e, 0x69, 0x73, 0x68, 0x7d, 0x20, 0x74, + 0x65, 0x78, 0x74, 0x3d, 0x24, 0x7b, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, + 0x74, 0x65, 0x28, 0x74, 0x65, 0x78, 0x74, 0x29, 0x7d, 0x20, 0x2f, 0x3e, + 0x60, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x69, 0x66, 0x20, 0x28, + 0x75, 0x73, 0x65, 0x72, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, + 0x20, 0x68, 0x74, 0x6d, 0x6c, 0x60, 0x3c, 0x70, 0x20, 0x6b, 0x65, 0x79, + 0x3d, 0x24, 0x7b, 0x69, 0x6e, 0x64, 0x65, 0x78, 0x7d, 0x3e, 0x3c, 0x73, + 0x74, 0x72, 0x6f, 0x6e, 0x67, 0x3e, 0x24, 0x7b, 0x74, 0x65, 0x6d, 0x70, + 0x6c, 0x61, 0x74, 0x65, 0x28, 0x75, 0x73, 0x65, 0x72, 0x29, 0x7d, 0x3a, + 0x3c, 0x2f, 0x73, 0x74, 0x72, 0x6f, 0x6e, 0x67, 0x3e, 0x20, 0x24, 0x7b, + 0x6d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x7d, 0x3c, 0x2f, 0x70, 0x3e, + 0x60, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x20, + 0x65, 0x6c, 0x73, 0x65, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, + 0x69, 0x73, 0x43, 0x6f, 0x6d, 0x70, 0x6c, 0x65, 0x74, 0x69, 0x6f, 0x6e, + 0x4d, 0x6f, 0x64, 0x65, 0x20, 0x3f, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x68, 0x74, 0x6d, 0x6c, 0x60, + 0x3c, 0x73, 0x70, 0x61, 0x6e, 0x20, 0x6b, 0x65, 0x79, 0x3d, 0x24, 0x7b, + 0x69, 0x6e, 0x64, 0x65, 0x78, 0x7d, 0x3e, 0x24, 0x7b, 0x6d, 0x65, 0x73, + 0x73, 0x61, 0x67, 0x65, 0x7d, 0x3c, 0x2f, 0x73, 0x70, 0x61, 0x6e, 0x3e, + 0x60, 0x20, 0x3a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x68, 0x74, 0x6d, 0x6c, 0x60, 0x3c, 0x70, 0x20, + 0x6b, 0x65, 0x79, 0x3d, 0x24, 0x7b, 0x69, 0x6e, 0x64, 0x65, 0x78, 0x7d, + 0x3e, 0x24, 0x7b, 0x6d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x7d, 0x3c, + 0x2f, 0x70, 0x3e, 0x60, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x3b, 0x0a, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, + 0x20, 0x68, 0x61, 0x6e, 0x64, 0x6c, 0x65, 0x43, 0x6f, 0x6d, 0x70, 0x6c, + 0x65, 0x74, 0x69, 0x6f, 0x6e, 0x45, 0x64, 0x69, 0x74, 0x20, 0x3d, 0x20, + 0x28, 0x65, 0x29, 0x20, 0x3d, 0x3e, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x73, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, + 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x6d, 0x70, + 0x74, 0x20, 0x3d, 0x20, 0x65, 0x2e, 0x74, 0x61, 0x72, 0x67, 0x65, 0x74, + 0x2e, 0x69, 0x6e, 0x6e, 0x65, 0x72, 0x54, 0x65, 0x78, 0x74, 0x3b, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x73, 0x65, 0x73, 0x73, + 0x69, 0x6f, 0x6e, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x74, 0x72, + 0x61, 0x6e, 0x73, 0x63, 0x72, 0x69, 0x70, 0x74, 0x20, 0x3d, 0x20, 0x5b, + 0x5d, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, + 0x20, 0x68, 0x74, 0x6d, 0x6c, 0x60, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x3c, 0x64, 0x69, 0x76, 0x20, 0x69, 0x64, 0x3d, 0x22, + 0x63, 0x68, 0x61, 0x74, 0x22, 0x20, 0x72, 0x65, 0x66, 0x3d, 0x24, 0x7b, + 0x63, 0x6f, 0x6e, 0x74, 0x61, 0x69, 0x6e, 0x65, 0x72, 0x7d, 0x20, 0x6b, + 0x65, 0x79, 0x3d, 0x24, 0x7b, 0x6d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, + 0x73, 0x2e, 0x6c, 0x65, 0x6e, 0x67, 0x74, 0x68, 0x7d, 0x3e, 0x0a, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x69, 0x6d, + 0x67, 0x20, 0x73, 0x74, 0x79, 0x6c, 0x65, 0x3d, 0x22, 0x77, 0x69, 0x64, + 0x74, 0x68, 0x3a, 0x20, 0x36, 0x30, 0x25, 0x3b, 0x24, 0x7b, 0x21, 0x73, + 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, + 0x2e, 0x69, 0x6d, 0x61, 0x67, 0x65, 0x5f, 0x73, 0x65, 0x6c, 0x65, 0x63, + 0x74, 0x65, 0x64, 0x20, 0x3f, 0x20, 0x60, 0x64, 0x69, 0x73, 0x70, 0x6c, + 0x61, 0x79, 0x3a, 0x20, 0x6e, 0x6f, 0x6e, 0x65, 0x3b, 0x60, 0x20, 0x3a, + 0x20, 0x60, 0x60, 0x7d, 0x22, 0x20, 0x73, 0x72, 0x63, 0x3d, 0x22, 0x24, + 0x7b, 0x73, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x2e, 0x76, 0x61, 0x6c, + 0x75, 0x65, 0x2e, 0x69, 0x6d, 0x61, 0x67, 0x65, 0x5f, 0x73, 0x65, 0x6c, + 0x65, 0x63, 0x74, 0x65, 0x64, 0x7d, 0x22, 0x2f, 0x3e, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x73, 0x70, 0x61, + 0x6e, 0x20, 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x65, 0x64, 0x69, + 0x74, 0x61, 0x62, 0x6c, 0x65, 0x3d, 0x24, 0x7b, 0x69, 0x73, 0x43, 0x6f, + 0x6d, 0x70, 0x6c, 0x65, 0x74, 0x69, 0x6f, 0x6e, 0x4d, 0x6f, 0x64, 0x65, + 0x7d, 0x20, 0x72, 0x65, 0x66, 0x3d, 0x24, 0x7b, 0x63, 0x6f, 0x6e, 0x74, + 0x61, 0x69, 0x6e, 0x65, 0x72, 0x7d, 0x20, 0x6f, 0x6e, 0x69, 0x6e, 0x70, + 0x75, 0x74, 0x3d, 0x24, 0x7b, 0x68, 0x61, 0x6e, 0x64, 0x6c, 0x65, 0x43, + 0x6f, 0x6d, 0x70, 0x6c, 0x65, 0x74, 0x69, 0x6f, 0x6e, 0x45, 0x64, 0x69, + 0x74, 0x7d, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x24, 0x7b, 0x6d, 0x65, 0x73, 0x73, 0x61, 0x67, + 0x65, 0x73, 0x2e, 0x66, 0x6c, 0x61, 0x74, 0x4d, 0x61, 0x70, 0x28, 0x63, + 0x68, 0x61, 0x74, 0x4c, 0x69, 0x6e, 0x65, 0x29, 0x7d, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x2f, 0x73, 0x70, + 0x61, 0x6e, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x3c, 0x2f, 0x64, 0x69, 0x76, 0x3e, 0x60, 0x3b, 0x0a, 0x20, 0x20, 0x20, + 0x20, 0x7d, 0x3b, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, + 0x73, 0x74, 0x20, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x46, 0x6f, 0x72, + 0x6d, 0x20, 0x3d, 0x20, 0x28, 0x70, 0x72, 0x6f, 0x70, 0x73, 0x29, 0x20, + 0x3d, 0x3e, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, + 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x75, 0x70, 0x64, 0x61, 0x74, 0x65, 0x53, + 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x20, 0x3d, 0x20, 0x28, 0x65, 0x6c, + 0x29, 0x20, 0x3d, 0x3e, 0x20, 0x73, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, + 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x20, 0x3d, 0x20, 0x7b, 0x20, 0x2e, + 0x2e, 0x2e, 0x73, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x2e, 0x76, 0x61, + 0x6c, 0x75, 0x65, 0x2c, 0x20, 0x5b, 0x65, 0x6c, 0x2e, 0x74, 0x61, 0x72, + 0x67, 0x65, 0x74, 0x2e, 0x6e, 0x61, 0x6d, 0x65, 0x5d, 0x3a, 0x20, 0x65, + 0x6c, 0x2e, 0x74, 0x61, 0x72, 0x67, 0x65, 0x74, 0x2e, 0x76, 0x61, 0x6c, + 0x75, 0x65, 0x20, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, + 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x75, 0x70, 0x64, 0x61, 0x74, 0x65, 0x50, + 0x61, 0x72, 0x61, 0x6d, 0x73, 0x20, 0x3d, 0x20, 0x28, 0x65, 0x6c, 0x29, + 0x20, 0x3d, 0x3e, 0x20, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x2e, 0x76, + 0x61, 0x6c, 0x75, 0x65, 0x20, 0x3d, 0x20, 0x7b, 0x20, 0x2e, 0x2e, 0x2e, + 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, + 0x2c, 0x20, 0x5b, 0x65, 0x6c, 0x2e, 0x74, 0x61, 0x72, 0x67, 0x65, 0x74, + 0x2e, 0x6e, 0x61, 0x6d, 0x65, 0x5d, 0x3a, 0x20, 0x65, 0x6c, 0x2e, 0x74, + 0x61, 0x72, 0x67, 0x65, 0x74, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x20, + 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, + 0x74, 0x20, 0x75, 0x70, 0x64, 0x61, 0x74, 0x65, 0x50, 0x61, 0x72, 0x61, + 0x6d, 0x73, 0x46, 0x6c, 0x6f, 0x61, 0x74, 0x20, 0x3d, 0x20, 0x28, 0x65, + 0x6c, 0x29, 0x20, 0x3d, 0x3e, 0x20, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, + 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x20, 0x3d, 0x20, 0x7b, 0x20, 0x2e, + 0x2e, 0x2e, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x2e, 0x76, 0x61, 0x6c, + 0x75, 0x65, 0x2c, 0x20, 0x5b, 0x65, 0x6c, 0x2e, 0x74, 0x61, 0x72, 0x67, + 0x65, 0x74, 0x2e, 0x6e, 0x61, 0x6d, 0x65, 0x5d, 0x3a, 0x20, 0x70, 0x61, + 0x72, 0x73, 0x65, 0x46, 0x6c, 0x6f, 0x61, 0x74, 0x28, 0x65, 0x6c, 0x2e, + 0x74, 0x61, 0x72, 0x67, 0x65, 0x74, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, + 0x29, 0x20, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, + 0x6e, 0x73, 0x74, 0x20, 0x75, 0x70, 0x64, 0x61, 0x74, 0x65, 0x50, 0x61, + 0x72, 0x61, 0x6d, 0x73, 0x49, 0x6e, 0x74, 0x20, 0x3d, 0x20, 0x28, 0x65, + 0x6c, 0x29, 0x20, 0x3d, 0x3e, 0x20, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, + 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x20, 0x3d, 0x20, 0x7b, 0x20, 0x2e, + 0x2e, 0x2e, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x2e, 0x76, 0x61, 0x6c, + 0x75, 0x65, 0x2c, 0x20, 0x5b, 0x65, 0x6c, 0x2e, 0x74, 0x61, 0x72, 0x67, + 0x65, 0x74, 0x2e, 0x6e, 0x61, 0x6d, 0x65, 0x5d, 0x3a, 0x20, 0x4d, 0x61, + 0x74, 0x68, 0x2e, 0x66, 0x6c, 0x6f, 0x6f, 0x72, 0x28, 0x70, 0x61, 0x72, + 0x73, 0x65, 0x46, 0x6c, 0x6f, 0x61, 0x74, 0x28, 0x65, 0x6c, 0x2e, 0x74, + 0x61, 0x72, 0x67, 0x65, 0x74, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x29, + 0x29, 0x20, 0x7d, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, + 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x67, 0x72, 0x61, 0x6d, 0x6d, 0x61, 0x72, + 0x4a, 0x73, 0x6f, 0x6e, 0x53, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x50, 0x72, + 0x6f, 0x70, 0x4f, 0x72, 0x64, 0x65, 0x72, 0x20, 0x3d, 0x20, 0x73, 0x69, + 0x67, 0x6e, 0x61, 0x6c, 0x28, 0x27, 0x27, 0x29, 0x0a, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x75, 0x70, 0x64, + 0x61, 0x74, 0x65, 0x47, 0x72, 0x61, 0x6d, 0x6d, 0x61, 0x72, 0x4a, 0x73, + 0x6f, 0x6e, 0x53, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x50, 0x72, 0x6f, 0x70, + 0x4f, 0x72, 0x64, 0x65, 0x72, 0x20, 0x3d, 0x20, 0x28, 0x65, 0x6c, 0x29, + 0x20, 0x3d, 0x3e, 0x20, 0x67, 0x72, 0x61, 0x6d, 0x6d, 0x61, 0x72, 0x4a, + 0x73, 0x6f, 0x6e, 0x53, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x50, 0x72, 0x6f, + 0x70, 0x4f, 0x72, 0x64, 0x65, 0x72, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, + 0x20, 0x3d, 0x20, 0x65, 0x6c, 0x2e, 0x74, 0x61, 0x72, 0x67, 0x65, 0x74, + 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x63, 0x6f, 0x6e, 0x76, 0x65, + 0x72, 0x74, 0x4a, 0x53, 0x4f, 0x4e, 0x53, 0x63, 0x68, 0x65, 0x6d, 0x61, + 0x47, 0x72, 0x61, 0x6d, 0x6d, 0x61, 0x72, 0x20, 0x3d, 0x20, 0x28, 0x29, + 0x20, 0x3d, 0x3e, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x74, 0x72, 0x79, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, + 0x73, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x20, 0x3d, 0x20, 0x4a, 0x53, 0x4f, + 0x4e, 0x2e, 0x70, 0x61, 0x72, 0x73, 0x65, 0x28, 0x70, 0x61, 0x72, 0x61, + 0x6d, 0x73, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x67, 0x72, 0x61, + 0x6d, 0x6d, 0x61, 0x72, 0x29, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x63, 0x6f, + 0x6e, 0x76, 0x65, 0x72, 0x74, 0x65, 0x72, 0x20, 0x3d, 0x20, 0x6e, 0x65, + 0x77, 0x20, 0x53, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x43, 0x6f, 0x6e, 0x76, + 0x65, 0x72, 0x74, 0x65, 0x72, 0x28, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x67, 0x72, 0x61, 0x6d, 0x6d, + 0x61, 0x72, 0x4a, 0x73, 0x6f, 0x6e, 0x53, 0x63, 0x68, 0x65, 0x6d, 0x61, + 0x50, 0x72, 0x6f, 0x70, 0x4f, 0x72, 0x64, 0x65, 0x72, 0x2e, 0x76, 0x61, + 0x6c, 0x75, 0x65, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x2e, 0x73, 0x70, 0x6c, 0x69, 0x74, + 0x28, 0x27, 0x2c, 0x27, 0x29, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x2e, 0x72, 0x65, 0x64, + 0x75, 0x63, 0x65, 0x28, 0x28, 0x61, 0x63, 0x63, 0x2c, 0x20, 0x63, 0x75, + 0x72, 0x2c, 0x20, 0x69, 0x29, 0x20, 0x3d, 0x3e, 0x20, 0x28, 0x7b, 0x20, + 0x2e, 0x2e, 0x2e, 0x61, 0x63, 0x63, 0x2c, 0x20, 0x5b, 0x63, 0x75, 0x72, + 0x2e, 0x74, 0x72, 0x69, 0x6d, 0x28, 0x29, 0x5d, 0x3a, 0x20, 0x69, 0x20, + 0x7d, 0x29, 0x2c, 0x20, 0x7b, 0x7d, 0x29, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x29, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x76, 0x65, 0x72, + 0x74, 0x65, 0x72, 0x2e, 0x76, 0x69, 0x73, 0x69, 0x74, 0x28, 0x73, 0x63, + 0x68, 0x65, 0x6d, 0x61, 0x2c, 0x20, 0x27, 0x27, 0x29, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x70, 0x61, 0x72, 0x61, + 0x6d, 0x73, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x20, 0x3d, 0x20, 0x7b, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x2e, 0x2e, 0x2e, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x2e, 0x76, + 0x61, 0x6c, 0x75, 0x65, 0x2c, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x67, 0x72, 0x61, 0x6d, 0x6d, 0x61, + 0x72, 0x3a, 0x20, 0x63, 0x6f, 0x6e, 0x76, 0x65, 0x72, 0x74, 0x65, 0x72, + 0x2e, 0x66, 0x6f, 0x72, 0x6d, 0x61, 0x74, 0x47, 0x72, 0x61, 0x6d, 0x6d, + 0x61, 0x72, 0x28, 0x29, 0x2c, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x7d, 0x20, 0x63, 0x61, 0x74, 0x63, 0x68, 0x20, 0x28, 0x65, + 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x61, 0x6c, 0x65, 0x72, 0x74, 0x28, 0x60, 0x43, 0x6f, 0x6e, + 0x76, 0x65, 0x72, 0x74, 0x20, 0x66, 0x61, 0x69, 0x6c, 0x65, 0x64, 0x3a, + 0x20, 0x24, 0x7b, 0x65, 0x2e, 0x6d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, + 0x7d, 0x60, 0x29, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x0a, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x46, + 0x6c, 0x6f, 0x61, 0x74, 0x46, 0x69, 0x65, 0x6c, 0x64, 0x20, 0x3d, 0x20, + 0x28, 0x7b, 0x20, 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x2c, 0x20, 0x6d, 0x61, + 0x78, 0x2c, 0x20, 0x6d, 0x69, 0x6e, 0x2c, 0x20, 0x6e, 0x61, 0x6d, 0x65, + 0x2c, 0x20, 0x73, 0x74, 0x65, 0x70, 0x2c, 0x20, 0x76, 0x61, 0x6c, 0x75, + 0x65, 0x20, 0x7d, 0x29, 0x20, 0x3d, 0x3e, 0x20, 0x7b, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, + 0x20, 0x68, 0x74, 0x6d, 0x6c, 0x60, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x64, 0x69, 0x76, 0x3e, 0x0a, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, + 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x20, 0x66, 0x6f, 0x72, 0x3d, 0x22, 0x24, + 0x7b, 0x6e, 0x61, 0x6d, 0x65, 0x7d, 0x22, 0x3e, 0x24, 0x7b, 0x6c, 0x61, + 0x62, 0x65, 0x6c, 0x7d, 0x3c, 0x2f, 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x3e, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x3c, 0x69, 0x6e, 0x70, 0x75, 0x74, 0x20, 0x74, 0x79, 0x70, 0x65, + 0x3d, 0x22, 0x72, 0x61, 0x6e, 0x67, 0x65, 0x22, 0x20, 0x69, 0x64, 0x3d, + 0x22, 0x24, 0x7b, 0x6e, 0x61, 0x6d, 0x65, 0x7d, 0x22, 0x20, 0x6d, 0x69, + 0x6e, 0x3d, 0x22, 0x24, 0x7b, 0x6d, 0x69, 0x6e, 0x7d, 0x22, 0x20, 0x6d, + 0x61, 0x78, 0x3d, 0x22, 0x24, 0x7b, 0x6d, 0x61, 0x78, 0x7d, 0x22, 0x20, + 0x73, 0x74, 0x65, 0x70, 0x3d, 0x22, 0x24, 0x7b, 0x73, 0x74, 0x65, 0x70, + 0x7d, 0x22, 0x20, 0x6e, 0x61, 0x6d, 0x65, 0x3d, 0x22, 0x24, 0x7b, 0x6e, + 0x61, 0x6d, 0x65, 0x7d, 0x22, 0x20, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3d, + 0x22, 0x24, 0x7b, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x7d, 0x22, 0x20, 0x6f, + 0x6e, 0x69, 0x6e, 0x70, 0x75, 0x74, 0x3d, 0x24, 0x7b, 0x75, 0x70, 0x64, + 0x61, 0x74, 0x65, 0x50, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x46, 0x6c, 0x6f, + 0x61, 0x74, 0x7d, 0x20, 0x2f, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x73, 0x70, 0x61, 0x6e, + 0x3e, 0x24, 0x7b, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x7d, 0x3c, 0x2f, 0x73, + 0x70, 0x61, 0x6e, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x3c, 0x2f, 0x64, 0x69, 0x76, 0x3e, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x60, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x7d, 0x3b, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x49, 0x6e, 0x74, 0x46, 0x69, 0x65, + 0x6c, 0x64, 0x20, 0x3d, 0x20, 0x28, 0x7b, 0x20, 0x6c, 0x61, 0x62, 0x65, + 0x6c, 0x2c, 0x20, 0x6d, 0x61, 0x78, 0x2c, 0x20, 0x6d, 0x69, 0x6e, 0x2c, + 0x20, 0x6e, 0x61, 0x6d, 0x65, 0x2c, 0x20, 0x76, 0x61, 0x6c, 0x75, 0x65, + 0x20, 0x7d, 0x29, 0x20, 0x3d, 0x3e, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, + 0x68, 0x74, 0x6d, 0x6c, 0x60, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x3c, 0x64, 0x69, 0x76, 0x3e, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x6c, + 0x61, 0x62, 0x65, 0x6c, 0x20, 0x66, 0x6f, 0x72, 0x3d, 0x22, 0x24, 0x7b, + 0x6e, 0x61, 0x6d, 0x65, 0x7d, 0x22, 0x3e, 0x24, 0x7b, 0x6c, 0x61, 0x62, + 0x65, 0x6c, 0x7d, 0x3c, 0x2f, 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x3e, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x3c, 0x69, 0x6e, 0x70, 0x75, 0x74, 0x20, 0x74, 0x79, 0x70, 0x65, 0x3d, + 0x22, 0x72, 0x61, 0x6e, 0x67, 0x65, 0x22, 0x20, 0x69, 0x64, 0x3d, 0x22, + 0x24, 0x7b, 0x6e, 0x61, 0x6d, 0x65, 0x7d, 0x22, 0x20, 0x6d, 0x69, 0x6e, + 0x3d, 0x22, 0x24, 0x7b, 0x6d, 0x69, 0x6e, 0x7d, 0x22, 0x20, 0x6d, 0x61, + 0x78, 0x3d, 0x22, 0x24, 0x7b, 0x6d, 0x61, 0x78, 0x7d, 0x22, 0x20, 0x6e, + 0x61, 0x6d, 0x65, 0x3d, 0x22, 0x24, 0x7b, 0x6e, 0x61, 0x6d, 0x65, 0x7d, + 0x22, 0x20, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3d, 0x22, 0x24, 0x7b, 0x76, + 0x61, 0x6c, 0x75, 0x65, 0x7d, 0x22, 0x20, 0x6f, 0x6e, 0x69, 0x6e, 0x70, + 0x75, 0x74, 0x3d, 0x24, 0x7b, 0x75, 0x70, 0x64, 0x61, 0x74, 0x65, 0x50, + 0x61, 0x72, 0x61, 0x6d, 0x73, 0x49, 0x6e, 0x74, 0x7d, 0x20, 0x2f, 0x3e, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x3c, 0x73, 0x70, 0x61, 0x6e, 0x3e, 0x24, 0x7b, 0x76, 0x61, 0x6c, + 0x75, 0x65, 0x7d, 0x3c, 0x2f, 0x73, 0x70, 0x61, 0x6e, 0x3e, 0x0a, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x2f, 0x64, + 0x69, 0x76, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x60, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x3b, 0x0a, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, + 0x75, 0x73, 0x65, 0x72, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, + 0x52, 0x65, 0x73, 0x65, 0x74, 0x20, 0x3d, 0x20, 0x28, 0x65, 0x29, 0x20, + 0x3d, 0x3e, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x65, 0x2e, 0x70, 0x72, 0x65, 0x76, 0x65, 0x6e, 0x74, 0x44, 0x65, + 0x66, 0x61, 0x75, 0x6c, 0x74, 0x28, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x75, 0x73, 0x65, 0x72, 0x54, 0x65, 0x6d, + 0x70, 0x6c, 0x61, 0x74, 0x65, 0x52, 0x65, 0x73, 0x65, 0x74, 0x54, 0x6f, + 0x44, 0x65, 0x66, 0x61, 0x75, 0x6c, 0x74, 0x41, 0x6e, 0x64, 0x41, 0x70, + 0x70, 0x6c, 0x79, 0x28, 0x29, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x7d, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, + 0x73, 0x74, 0x20, 0x55, 0x73, 0x65, 0x72, 0x54, 0x65, 0x6d, 0x70, 0x6c, + 0x61, 0x74, 0x65, 0x52, 0x65, 0x73, 0x65, 0x74, 0x42, 0x75, 0x74, 0x74, + 0x6f, 0x6e, 0x20, 0x3d, 0x20, 0x28, 0x29, 0x20, 0x3d, 0x3e, 0x20, 0x7b, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x69, 0x66, 0x20, + 0x28, 0x73, 0x65, 0x6c, 0x65, 0x63, 0x74, 0x65, 0x64, 0x55, 0x73, 0x65, + 0x72, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x2e, 0x76, 0x61, + 0x6c, 0x75, 0x65, 0x2e, 0x6e, 0x61, 0x6d, 0x65, 0x20, 0x3d, 0x3d, 0x20, + 0x27, 0x64, 0x65, 0x66, 0x61, 0x75, 0x6c, 0x74, 0x27, 0x29, 0x20, 0x7b, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x72, + 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x68, 0x74, 0x6d, 0x6c, 0x60, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x3c, 0x62, 0x75, 0x74, 0x74, 0x6f, 0x6e, 0x20, 0x64, 0x69, 0x73, 0x61, + 0x62, 0x6c, 0x65, 0x64, 0x3e, 0x55, 0x73, 0x69, 0x6e, 0x67, 0x20, 0x64, + 0x65, 0x66, 0x61, 0x75, 0x6c, 0x74, 0x20, 0x74, 0x65, 0x6d, 0x70, 0x6c, + 0x61, 0x74, 0x65, 0x3c, 0x2f, 0x62, 0x75, 0x74, 0x74, 0x6f, 0x6e, 0x3e, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x60, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x72, 0x65, 0x74, 0x75, + 0x72, 0x6e, 0x20, 0x68, 0x74, 0x6d, 0x6c, 0x60, 0x0a, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x62, 0x75, 0x74, 0x74, + 0x6f, 0x6e, 0x20, 0x6f, 0x6e, 0x63, 0x6c, 0x69, 0x63, 0x6b, 0x3d, 0x24, + 0x7b, 0x75, 0x73, 0x65, 0x72, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, + 0x65, 0x52, 0x65, 0x73, 0x65, 0x74, 0x7d, 0x3e, 0x52, 0x65, 0x73, 0x65, + 0x74, 0x20, 0x61, 0x6c, 0x6c, 0x20, 0x74, 0x6f, 0x20, 0x64, 0x65, 0x66, + 0x61, 0x75, 0x6c, 0x74, 0x3c, 0x2f, 0x62, 0x75, 0x74, 0x74, 0x6f, 0x6e, + 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x60, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x3b, 0x0a, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x75, 0x73, 0x65, 0x45, 0x66, 0x66, 0x65, 0x63, + 0x74, 0x28, 0x28, 0x29, 0x20, 0x3d, 0x3e, 0x20, 0x7b, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x2f, 0x2f, 0x20, 0x61, 0x75, 0x74, + 0x6f, 0x73, 0x61, 0x76, 0x65, 0x20, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, + 0x74, 0x65, 0x20, 0x6f, 0x6e, 0x20, 0x65, 0x76, 0x65, 0x72, 0x79, 0x20, + 0x63, 0x68, 0x61, 0x6e, 0x67, 0x65, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x75, 0x73, 0x65, 0x72, 0x54, 0x65, 0x6d, 0x70, 0x6c, + 0x61, 0x74, 0x65, 0x41, 0x75, 0x74, 0x6f, 0x73, 0x61, 0x76, 0x65, 0x28, + 0x29, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x2c, 0x20, 0x5b, + 0x73, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x2e, 0x76, 0x61, 0x6c, 0x75, + 0x65, 0x2c, 0x20, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x2e, 0x76, 0x61, + 0x6c, 0x75, 0x65, 0x5d, 0x29, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x47, 0x72, 0x61, 0x6d, 0x6d, + 0x61, 0x72, 0x43, 0x6f, 0x6e, 0x74, 0x72, 0x6f, 0x6c, 0x20, 0x3d, 0x20, + 0x28, 0x29, 0x20, 0x3d, 0x3e, 0x20, 0x28, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x68, 0x74, 0x6d, 0x6c, 0x60, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x64, 0x69, 0x76, + 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x3c, 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x20, 0x66, 0x6f, 0x72, + 0x3d, 0x22, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x22, 0x3e, + 0x47, 0x72, 0x61, 0x6d, 0x6d, 0x61, 0x72, 0x3c, 0x2f, 0x6c, 0x61, 0x62, + 0x65, 0x6c, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x3c, 0x74, 0x65, 0x78, 0x74, 0x61, 0x72, 0x65, + 0x61, 0x20, 0x69, 0x64, 0x3d, 0x22, 0x67, 0x72, 0x61, 0x6d, 0x6d, 0x61, + 0x72, 0x22, 0x20, 0x6e, 0x61, 0x6d, 0x65, 0x3d, 0x22, 0x67, 0x72, 0x61, + 0x6d, 0x6d, 0x61, 0x72, 0x22, 0x20, 0x70, 0x6c, 0x61, 0x63, 0x65, 0x68, + 0x6f, 0x6c, 0x64, 0x65, 0x72, 0x3d, 0x22, 0x55, 0x73, 0x65, 0x20, 0x67, + 0x62, 0x6e, 0x66, 0x20, 0x6f, 0x72, 0x20, 0x4a, 0x53, 0x4f, 0x4e, 0x20, + 0x53, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x2b, 0x63, 0x6f, 0x6e, 0x76, 0x65, + 0x72, 0x74, 0x22, 0x20, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3d, 0x22, 0x24, + 0x7b, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x2e, 0x76, 0x61, 0x6c, 0x75, + 0x65, 0x2e, 0x67, 0x72, 0x61, 0x6d, 0x6d, 0x61, 0x72, 0x7d, 0x22, 0x20, + 0x72, 0x6f, 0x77, 0x73, 0x3d, 0x34, 0x20, 0x6f, 0x6e, 0x69, 0x6e, 0x70, + 0x75, 0x74, 0x3d, 0x24, 0x7b, 0x75, 0x70, 0x64, 0x61, 0x74, 0x65, 0x50, + 0x61, 0x72, 0x61, 0x6d, 0x73, 0x7d, 0x2f, 0x3e, 0x0a, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x69, 0x6e, + 0x70, 0x75, 0x74, 0x20, 0x74, 0x79, 0x70, 0x65, 0x3d, 0x22, 0x74, 0x65, + 0x78, 0x74, 0x22, 0x20, 0x6e, 0x61, 0x6d, 0x65, 0x3d, 0x22, 0x70, 0x72, + 0x6f, 0x70, 0x2d, 0x6f, 0x72, 0x64, 0x65, 0x72, 0x22, 0x20, 0x70, 0x6c, + 0x61, 0x63, 0x65, 0x68, 0x6f, 0x6c, 0x64, 0x65, 0x72, 0x3d, 0x22, 0x6f, + 0x72, 0x64, 0x65, 0x72, 0x3a, 0x20, 0x70, 0x72, 0x6f, 0x70, 0x31, 0x2c, + 0x70, 0x72, 0x6f, 0x70, 0x32, 0x2c, 0x70, 0x72, 0x6f, 0x70, 0x33, 0x22, + 0x20, 0x6f, 0x6e, 0x69, 0x6e, 0x70, 0x75, 0x74, 0x3d, 0x24, 0x7b, 0x75, + 0x70, 0x64, 0x61, 0x74, 0x65, 0x47, 0x72, 0x61, 0x6d, 0x6d, 0x61, 0x72, + 0x4a, 0x73, 0x6f, 0x6e, 0x53, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x50, 0x72, + 0x6f, 0x70, 0x4f, 0x72, 0x64, 0x65, 0x72, 0x7d, 0x20, 0x2f, 0x3e, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x3c, 0x62, 0x75, 0x74, 0x74, 0x6f, 0x6e, 0x20, 0x74, 0x79, 0x70, 0x65, + 0x3d, 0x22, 0x62, 0x75, 0x74, 0x74, 0x6f, 0x6e, 0x22, 0x20, 0x6f, 0x6e, + 0x63, 0x6c, 0x69, 0x63, 0x6b, 0x3d, 0x24, 0x7b, 0x63, 0x6f, 0x6e, 0x76, + 0x65, 0x72, 0x74, 0x4a, 0x53, 0x4f, 0x4e, 0x53, 0x63, 0x68, 0x65, 0x6d, + 0x61, 0x47, 0x72, 0x61, 0x6d, 0x6d, 0x61, 0x72, 0x7d, 0x3e, 0x43, 0x6f, + 0x6e, 0x76, 0x65, 0x72, 0x74, 0x20, 0x4a, 0x53, 0x4f, 0x4e, 0x20, 0x53, + 0x63, 0x68, 0x65, 0x6d, 0x61, 0x3c, 0x2f, 0x62, 0x75, 0x74, 0x74, 0x6f, + 0x6e, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x3c, 0x2f, 0x64, 0x69, 0x76, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x60, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x29, 0x3b, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x50, 0x72, 0x6f, 0x6d, 0x70, 0x74, + 0x43, 0x6f, 0x6e, 0x74, 0x72, 0x6f, 0x6c, 0x46, 0x69, 0x65, 0x6c, 0x64, + 0x53, 0x65, 0x74, 0x20, 0x3d, 0x20, 0x28, 0x29, 0x20, 0x3d, 0x3e, 0x20, + 0x28, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x68, 0x74, + 0x6d, 0x6c, 0x60, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x3c, 0x66, 0x69, 0x65, 0x6c, 0x64, 0x73, 0x65, 0x74, 0x3e, 0x0a, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x64, 0x69, + 0x76, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x3c, 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x20, 0x68, 0x74, + 0x6d, 0x6c, 0x46, 0x6f, 0x72, 0x3d, 0x22, 0x70, 0x72, 0x6f, 0x6d, 0x70, + 0x74, 0x22, 0x3e, 0x50, 0x72, 0x6f, 0x6d, 0x70, 0x74, 0x3c, 0x2f, 0x6c, + 0x61, 0x62, 0x65, 0x6c, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x74, 0x65, 0x78, 0x74, 0x61, + 0x72, 0x65, 0x61, 0x20, 0x74, 0x79, 0x70, 0x65, 0x3d, 0x22, 0x74, 0x65, + 0x78, 0x74, 0x22, 0x20, 0x6e, 0x61, 0x6d, 0x65, 0x3d, 0x22, 0x70, 0x72, + 0x6f, 0x6d, 0x70, 0x74, 0x22, 0x20, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3d, + 0x22, 0x24, 0x7b, 0x73, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x2e, 0x76, + 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x6d, 0x70, 0x74, 0x7d, + 0x22, 0x20, 0x6f, 0x6e, 0x69, 0x6e, 0x70, 0x75, 0x74, 0x3d, 0x24, 0x7b, + 0x75, 0x70, 0x64, 0x61, 0x74, 0x65, 0x53, 0x65, 0x73, 0x73, 0x69, 0x6f, + 0x6e, 0x7d, 0x2f, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x3c, 0x2f, 0x64, 0x69, 0x76, 0x3e, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x2f, 0x66, 0x69, 0x65, 0x6c, + 0x64, 0x73, 0x65, 0x74, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x60, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x29, 0x3b, + 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, + 0x74, 0x20, 0x43, 0x68, 0x61, 0x74, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, + 0x46, 0x6f, 0x72, 0x6d, 0x20, 0x3d, 0x20, 0x28, 0x29, 0x20, 0x3d, 0x3e, + 0x20, 0x28, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x68, + 0x74, 0x6d, 0x6c, 0x60, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x24, 0x7b, 0x50, 0x72, 0x6f, 0x6d, 0x70, 0x74, 0x43, + 0x6f, 0x6e, 0x74, 0x72, 0x6f, 0x6c, 0x46, 0x69, 0x65, 0x6c, 0x64, 0x53, + 0x65, 0x74, 0x28, 0x29, 0x7d, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x66, 0x69, 0x65, 0x6c, 0x64, 0x73, + 0x65, 0x74, 0x20, 0x63, 0x6c, 0x61, 0x73, 0x73, 0x3d, 0x22, 0x74, 0x77, + 0x6f, 0x22, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x3c, 0x64, 0x69, 0x76, 0x3e, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x3c, 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x20, 0x66, 0x6f, 0x72, 0x3d, 0x22, + 0x75, 0x73, 0x65, 0x72, 0x22, 0x3e, 0x55, 0x73, 0x65, 0x72, 0x20, 0x6e, + 0x61, 0x6d, 0x65, 0x3c, 0x2f, 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x3e, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x3c, 0x69, 0x6e, 0x70, 0x75, 0x74, 0x20, 0x74, 0x79, 0x70, + 0x65, 0x3d, 0x22, 0x74, 0x65, 0x78, 0x74, 0x22, 0x20, 0x6e, 0x61, 0x6d, + 0x65, 0x3d, 0x22, 0x75, 0x73, 0x65, 0x72, 0x22, 0x20, 0x76, 0x61, 0x6c, + 0x75, 0x65, 0x3d, 0x22, 0x24, 0x7b, 0x73, 0x65, 0x73, 0x73, 0x69, 0x6f, + 0x6e, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x75, 0x73, 0x65, 0x72, + 0x7d, 0x22, 0x20, 0x6f, 0x6e, 0x69, 0x6e, 0x70, 0x75, 0x74, 0x3d, 0x24, + 0x7b, 0x75, 0x70, 0x64, 0x61, 0x74, 0x65, 0x53, 0x65, 0x73, 0x73, 0x69, + 0x6f, 0x6e, 0x7d, 0x20, 0x2f, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x2f, 0x64, 0x69, 0x76, + 0x3e, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x3c, 0x64, 0x69, 0x76, 0x3e, 0x0a, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, + 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x20, 0x66, 0x6f, 0x72, 0x3d, 0x22, 0x62, + 0x6f, 0x74, 0x22, 0x3e, 0x42, 0x6f, 0x74, 0x20, 0x6e, 0x61, 0x6d, 0x65, + 0x3c, 0x2f, 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x3e, 0x0a, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, + 0x69, 0x6e, 0x70, 0x75, 0x74, 0x20, 0x74, 0x79, 0x70, 0x65, 0x3d, 0x22, + 0x74, 0x65, 0x78, 0x74, 0x22, 0x20, 0x6e, 0x61, 0x6d, 0x65, 0x3d, 0x22, + 0x63, 0x68, 0x61, 0x72, 0x22, 0x20, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3d, + 0x22, 0x24, 0x7b, 0x73, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x2e, 0x76, + 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x63, 0x68, 0x61, 0x72, 0x7d, 0x22, 0x20, + 0x6f, 0x6e, 0x69, 0x6e, 0x70, 0x75, 0x74, 0x3d, 0x24, 0x7b, 0x75, 0x70, + 0x64, 0x61, 0x74, 0x65, 0x53, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x7d, + 0x20, 0x2f, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x3c, 0x2f, 0x64, 0x69, 0x76, 0x3e, 0x0a, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x2f, 0x66, + 0x69, 0x65, 0x6c, 0x64, 0x73, 0x65, 0x74, 0x3e, 0x0a, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x66, 0x69, 0x65, + 0x6c, 0x64, 0x73, 0x65, 0x74, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x64, 0x69, 0x76, 0x3e, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x3c, 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x20, 0x66, 0x6f, + 0x72, 0x3d, 0x22, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x22, + 0x3e, 0x50, 0x72, 0x6f, 0x6d, 0x70, 0x74, 0x20, 0x74, 0x65, 0x6d, 0x70, + 0x6c, 0x61, 0x74, 0x65, 0x3c, 0x2f, 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x3e, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x3c, 0x74, 0x65, 0x78, 0x74, 0x61, 0x72, 0x65, 0x61, + 0x20, 0x69, 0x64, 0x3d, 0x22, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, + 0x65, 0x22, 0x20, 0x6e, 0x61, 0x6d, 0x65, 0x3d, 0x22, 0x74, 0x65, 0x6d, + 0x70, 0x6c, 0x61, 0x74, 0x65, 0x22, 0x20, 0x76, 0x61, 0x6c, 0x75, 0x65, + 0x3d, 0x22, 0x24, 0x7b, 0x73, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x2e, + 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, + 0x74, 0x65, 0x7d, 0x22, 0x20, 0x72, 0x6f, 0x77, 0x73, 0x3d, 0x34, 0x20, + 0x6f, 0x6e, 0x69, 0x6e, 0x70, 0x75, 0x74, 0x3d, 0x24, 0x7b, 0x75, 0x70, + 0x64, 0x61, 0x74, 0x65, 0x53, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x7d, + 0x2f, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x3c, 0x2f, 0x64, 0x69, 0x76, 0x3e, 0x0a, 0x0a, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, + 0x64, 0x69, 0x76, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x6c, 0x61, 0x62, 0x65, + 0x6c, 0x20, 0x66, 0x6f, 0x72, 0x3d, 0x22, 0x74, 0x65, 0x6d, 0x70, 0x6c, + 0x61, 0x74, 0x65, 0x22, 0x3e, 0x43, 0x68, 0x61, 0x74, 0x20, 0x68, 0x69, + 0x73, 0x74, 0x6f, 0x72, 0x79, 0x20, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, + 0x74, 0x65, 0x3c, 0x2f, 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x3e, 0x0a, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x3c, 0x74, 0x65, 0x78, 0x74, 0x61, 0x72, 0x65, 0x61, 0x20, 0x69, + 0x64, 0x3d, 0x22, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x22, + 0x20, 0x6e, 0x61, 0x6d, 0x65, 0x3d, 0x22, 0x68, 0x69, 0x73, 0x74, 0x6f, + 0x72, 0x79, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x22, 0x20, + 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3d, 0x22, 0x24, 0x7b, 0x73, 0x65, 0x73, + 0x73, 0x69, 0x6f, 0x6e, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x68, + 0x69, 0x73, 0x74, 0x6f, 0x72, 0x79, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, + 0x74, 0x65, 0x7d, 0x22, 0x20, 0x72, 0x6f, 0x77, 0x73, 0x3d, 0x31, 0x20, + 0x6f, 0x6e, 0x69, 0x6e, 0x70, 0x75, 0x74, 0x3d, 0x24, 0x7b, 0x75, 0x70, + 0x64, 0x61, 0x74, 0x65, 0x53, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x7d, + 0x2f, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x3c, 0x2f, 0x64, 0x69, 0x76, 0x3e, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x24, 0x7b, + 0x47, 0x72, 0x61, 0x6d, 0x6d, 0x61, 0x72, 0x43, 0x6f, 0x6e, 0x74, 0x72, + 0x6f, 0x6c, 0x28, 0x29, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x3c, 0x2f, 0x66, 0x69, 0x65, 0x6c, 0x64, 0x73, + 0x65, 0x74, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x60, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x29, 0x3b, 0x0a, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x43, 0x6f, + 0x6d, 0x70, 0x6c, 0x65, 0x74, 0x69, 0x6f, 0x6e, 0x43, 0x6f, 0x6e, 0x66, + 0x69, 0x67, 0x46, 0x6f, 0x72, 0x6d, 0x20, 0x3d, 0x20, 0x28, 0x29, 0x20, + 0x3d, 0x3e, 0x20, 0x28, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x68, 0x74, 0x6d, 0x6c, 0x60, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x24, 0x7b, 0x50, 0x72, 0x6f, 0x6d, 0x70, + 0x74, 0x43, 0x6f, 0x6e, 0x74, 0x72, 0x6f, 0x6c, 0x46, 0x69, 0x65, 0x6c, + 0x64, 0x53, 0x65, 0x74, 0x28, 0x29, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x66, 0x69, 0x65, 0x6c, 0x64, + 0x73, 0x65, 0x74, 0x3e, 0x24, 0x7b, 0x47, 0x72, 0x61, 0x6d, 0x6d, 0x61, + 0x72, 0x43, 0x6f, 0x6e, 0x74, 0x72, 0x6f, 0x6c, 0x28, 0x29, 0x7d, 0x3c, + 0x2f, 0x66, 0x69, 0x65, 0x6c, 0x64, 0x73, 0x65, 0x74, 0x3e, 0x0a, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x60, 0x0a, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x29, 0x3b, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x68, 0x74, 0x6d, 0x6c, + 0x60, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x66, + 0x6f, 0x72, 0x6d, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x3c, 0x66, 0x69, 0x65, 0x6c, 0x64, 0x73, 0x65, 0x74, + 0x20, 0x63, 0x6c, 0x61, 0x73, 0x73, 0x3d, 0x22, 0x74, 0x77, 0x6f, 0x22, + 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x3c, 0x24, 0x7b, 0x55, 0x73, 0x65, 0x72, 0x54, 0x65, 0x6d, + 0x70, 0x6c, 0x61, 0x74, 0x65, 0x52, 0x65, 0x73, 0x65, 0x74, 0x42, 0x75, + 0x74, 0x74, 0x6f, 0x6e, 0x7d, 0x2f, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x64, 0x69, 0x76, + 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x3c, 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x20, 0x63, + 0x6c, 0x61, 0x73, 0x73, 0x3d, 0x22, 0x73, 0x6c, 0x69, 0x6d, 0x22, 0x3e, + 0x3c, 0x69, 0x6e, 0x70, 0x75, 0x74, 0x20, 0x74, 0x79, 0x70, 0x65, 0x3d, + 0x22, 0x72, 0x61, 0x64, 0x69, 0x6f, 0x22, 0x20, 0x6e, 0x61, 0x6d, 0x65, + 0x3d, 0x22, 0x74, 0x79, 0x70, 0x65, 0x22, 0x20, 0x76, 0x61, 0x6c, 0x75, + 0x65, 0x3d, 0x22, 0x63, 0x68, 0x61, 0x74, 0x22, 0x20, 0x63, 0x68, 0x65, + 0x63, 0x6b, 0x65, 0x64, 0x3d, 0x24, 0x7b, 0x73, 0x65, 0x73, 0x73, 0x69, + 0x6f, 0x6e, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x74, 0x79, 0x70, + 0x65, 0x20, 0x3d, 0x3d, 0x3d, 0x20, 0x22, 0x63, 0x68, 0x61, 0x74, 0x22, + 0x7d, 0x20, 0x6f, 0x6e, 0x69, 0x6e, 0x70, 0x75, 0x74, 0x3d, 0x24, 0x7b, + 0x75, 0x70, 0x64, 0x61, 0x74, 0x65, 0x53, 0x65, 0x73, 0x73, 0x69, 0x6f, + 0x6e, 0x7d, 0x20, 0x2f, 0x3e, 0x20, 0x43, 0x68, 0x61, 0x74, 0x3c, 0x2f, + 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x6c, 0x61, + 0x62, 0x65, 0x6c, 0x20, 0x63, 0x6c, 0x61, 0x73, 0x73, 0x3d, 0x22, 0x73, + 0x6c, 0x69, 0x6d, 0x22, 0x3e, 0x3c, 0x69, 0x6e, 0x70, 0x75, 0x74, 0x20, + 0x74, 0x79, 0x70, 0x65, 0x3d, 0x22, 0x72, 0x61, 0x64, 0x69, 0x6f, 0x22, + 0x20, 0x6e, 0x61, 0x6d, 0x65, 0x3d, 0x22, 0x74, 0x79, 0x70, 0x65, 0x22, + 0x20, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3d, 0x22, 0x63, 0x6f, 0x6d, 0x70, + 0x6c, 0x65, 0x74, 0x69, 0x6f, 0x6e, 0x22, 0x20, 0x63, 0x68, 0x65, 0x63, + 0x6b, 0x65, 0x64, 0x3d, 0x24, 0x7b, 0x73, 0x65, 0x73, 0x73, 0x69, 0x6f, + 0x6e, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x74, 0x79, 0x70, 0x65, + 0x20, 0x3d, 0x3d, 0x3d, 0x20, 0x22, 0x63, 0x6f, 0x6d, 0x70, 0x6c, 0x65, + 0x74, 0x69, 0x6f, 0x6e, 0x22, 0x7d, 0x20, 0x6f, 0x6e, 0x69, 0x6e, 0x70, + 0x75, 0x74, 0x3d, 0x24, 0x7b, 0x75, 0x70, 0x64, 0x61, 0x74, 0x65, 0x53, + 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x7d, 0x20, 0x2f, 0x3e, 0x20, 0x43, + 0x6f, 0x6d, 0x70, 0x6c, 0x65, 0x74, 0x69, 0x6f, 0x6e, 0x3c, 0x2f, 0x6c, + 0x61, 0x62, 0x65, 0x6c, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x2f, 0x64, 0x69, 0x76, 0x3e, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, + 0x2f, 0x66, 0x69, 0x65, 0x6c, 0x64, 0x73, 0x65, 0x74, 0x3e, 0x0a, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x24, 0x7b, + 0x73, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x2e, 0x76, 0x61, 0x6c, 0x75, + 0x65, 0x2e, 0x74, 0x79, 0x70, 0x65, 0x20, 0x3d, 0x3d, 0x3d, 0x20, 0x27, + 0x63, 0x68, 0x61, 0x74, 0x27, 0x20, 0x3f, 0x20, 0x43, 0x68, 0x61, 0x74, + 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x46, 0x6f, 0x72, 0x6d, 0x28, 0x29, + 0x20, 0x3a, 0x20, 0x43, 0x6f, 0x6d, 0x70, 0x6c, 0x65, 0x74, 0x69, 0x6f, + 0x6e, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x46, 0x6f, 0x72, 0x6d, 0x28, + 0x29, 0x7d, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x3c, 0x66, 0x69, 0x65, 0x6c, 0x64, 0x73, 0x65, 0x74, 0x20, + 0x63, 0x6c, 0x61, 0x73, 0x73, 0x3d, 0x22, 0x74, 0x77, 0x6f, 0x22, 0x3e, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x24, 0x7b, 0x49, 0x6e, 0x74, 0x46, 0x69, 0x65, 0x6c, 0x64, 0x28, + 0x7b, 0x20, 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x3a, 0x20, 0x22, 0x50, 0x72, + 0x65, 0x64, 0x69, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x22, 0x2c, 0x20, + 0x6d, 0x61, 0x78, 0x3a, 0x20, 0x32, 0x30, 0x34, 0x38, 0x2c, 0x20, 0x6d, + 0x69, 0x6e, 0x3a, 0x20, 0x2d, 0x31, 0x2c, 0x20, 0x6e, 0x61, 0x6d, 0x65, + 0x3a, 0x20, 0x22, 0x6e, 0x5f, 0x70, 0x72, 0x65, 0x64, 0x69, 0x63, 0x74, + 0x22, 0x2c, 0x20, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3a, 0x20, 0x70, 0x61, + 0x72, 0x61, 0x6d, 0x73, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x6e, + 0x5f, 0x70, 0x72, 0x65, 0x64, 0x69, 0x63, 0x74, 0x20, 0x7d, 0x29, 0x7d, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x24, 0x7b, 0x46, 0x6c, 0x6f, 0x61, 0x74, 0x46, 0x69, 0x65, 0x6c, + 0x64, 0x28, 0x7b, 0x20, 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x3a, 0x20, 0x22, + 0x54, 0x65, 0x6d, 0x70, 0x65, 0x72, 0x61, 0x74, 0x75, 0x72, 0x65, 0x22, + 0x2c, 0x20, 0x6d, 0x61, 0x78, 0x3a, 0x20, 0x32, 0x2e, 0x30, 0x2c, 0x20, + 0x6d, 0x69, 0x6e, 0x3a, 0x20, 0x30, 0x2e, 0x30, 0x2c, 0x20, 0x6e, 0x61, + 0x6d, 0x65, 0x3a, 0x20, 0x22, 0x74, 0x65, 0x6d, 0x70, 0x65, 0x72, 0x61, + 0x74, 0x75, 0x72, 0x65, 0x22, 0x2c, 0x20, 0x73, 0x74, 0x65, 0x70, 0x3a, + 0x20, 0x30, 0x2e, 0x30, 0x31, 0x2c, 0x20, 0x76, 0x61, 0x6c, 0x75, 0x65, + 0x3a, 0x20, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x2e, 0x76, 0x61, 0x6c, + 0x75, 0x65, 0x2e, 0x74, 0x65, 0x6d, 0x70, 0x65, 0x72, 0x61, 0x74, 0x75, + 0x72, 0x65, 0x20, 0x7d, 0x29, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x24, 0x7b, 0x46, 0x6c, 0x6f, + 0x61, 0x74, 0x46, 0x69, 0x65, 0x6c, 0x64, 0x28, 0x7b, 0x20, 0x6c, 0x61, + 0x62, 0x65, 0x6c, 0x3a, 0x20, 0x22, 0x50, 0x65, 0x6e, 0x61, 0x6c, 0x69, + 0x7a, 0x65, 0x20, 0x72, 0x65, 0x70, 0x65, 0x61, 0x74, 0x20, 0x73, 0x65, + 0x71, 0x75, 0x65, 0x6e, 0x63, 0x65, 0x22, 0x2c, 0x20, 0x6d, 0x61, 0x78, + 0x3a, 0x20, 0x32, 0x2e, 0x30, 0x2c, 0x20, 0x6d, 0x69, 0x6e, 0x3a, 0x20, + 0x30, 0x2e, 0x30, 0x2c, 0x20, 0x6e, 0x61, 0x6d, 0x65, 0x3a, 0x20, 0x22, + 0x72, 0x65, 0x70, 0x65, 0x61, 0x74, 0x5f, 0x70, 0x65, 0x6e, 0x61, 0x6c, + 0x74, 0x79, 0x22, 0x2c, 0x20, 0x73, 0x74, 0x65, 0x70, 0x3a, 0x20, 0x30, + 0x2e, 0x30, 0x31, 0x2c, 0x20, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3a, 0x20, + 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, + 0x2e, 0x72, 0x65, 0x70, 0x65, 0x61, 0x74, 0x5f, 0x70, 0x65, 0x6e, 0x61, + 0x6c, 0x74, 0x79, 0x20, 0x7d, 0x29, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x24, 0x7b, 0x49, 0x6e, + 0x74, 0x46, 0x69, 0x65, 0x6c, 0x64, 0x28, 0x7b, 0x20, 0x6c, 0x61, 0x62, + 0x65, 0x6c, 0x3a, 0x20, 0x22, 0x43, 0x6f, 0x6e, 0x73, 0x69, 0x64, 0x65, + 0x72, 0x20, 0x4e, 0x20, 0x74, 0x6f, 0x6b, 0x65, 0x6e, 0x73, 0x20, 0x66, + 0x6f, 0x72, 0x20, 0x70, 0x65, 0x6e, 0x61, 0x6c, 0x69, 0x7a, 0x65, 0x22, + 0x2c, 0x20, 0x6d, 0x61, 0x78, 0x3a, 0x20, 0x32, 0x30, 0x34, 0x38, 0x2c, + 0x20, 0x6d, 0x69, 0x6e, 0x3a, 0x20, 0x30, 0x2c, 0x20, 0x6e, 0x61, 0x6d, + 0x65, 0x3a, 0x20, 0x22, 0x72, 0x65, 0x70, 0x65, 0x61, 0x74, 0x5f, 0x6c, + 0x61, 0x73, 0x74, 0x5f, 0x6e, 0x22, 0x2c, 0x20, 0x76, 0x61, 0x6c, 0x75, + 0x65, 0x3a, 0x20, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x2e, 0x76, 0x61, + 0x6c, 0x75, 0x65, 0x2e, 0x72, 0x65, 0x70, 0x65, 0x61, 0x74, 0x5f, 0x6c, + 0x61, 0x73, 0x74, 0x5f, 0x6e, 0x20, 0x7d, 0x29, 0x7d, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x24, 0x7b, + 0x49, 0x6e, 0x74, 0x46, 0x69, 0x65, 0x6c, 0x64, 0x28, 0x7b, 0x20, 0x6c, + 0x61, 0x62, 0x65, 0x6c, 0x3a, 0x20, 0x22, 0x54, 0x6f, 0x70, 0x2d, 0x4b, + 0x20, 0x73, 0x61, 0x6d, 0x70, 0x6c, 0x69, 0x6e, 0x67, 0x22, 0x2c, 0x20, + 0x6d, 0x61, 0x78, 0x3a, 0x20, 0x31, 0x30, 0x30, 0x2c, 0x20, 0x6d, 0x69, + 0x6e, 0x3a, 0x20, 0x2d, 0x31, 0x2c, 0x20, 0x6e, 0x61, 0x6d, 0x65, 0x3a, + 0x20, 0x22, 0x74, 0x6f, 0x70, 0x5f, 0x6b, 0x22, 0x2c, 0x20, 0x76, 0x61, + 0x6c, 0x75, 0x65, 0x3a, 0x20, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x2e, + 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x74, 0x6f, 0x70, 0x5f, 0x6b, 0x20, + 0x7d, 0x29, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x24, 0x7b, 0x46, 0x6c, 0x6f, 0x61, 0x74, 0x46, + 0x69, 0x65, 0x6c, 0x64, 0x28, 0x7b, 0x20, 0x6c, 0x61, 0x62, 0x65, 0x6c, + 0x3a, 0x20, 0x22, 0x54, 0x6f, 0x70, 0x2d, 0x50, 0x20, 0x73, 0x61, 0x6d, + 0x70, 0x6c, 0x69, 0x6e, 0x67, 0x22, 0x2c, 0x20, 0x6d, 0x61, 0x78, 0x3a, + 0x20, 0x31, 0x2e, 0x30, 0x2c, 0x20, 0x6d, 0x69, 0x6e, 0x3a, 0x20, 0x30, + 0x2e, 0x30, 0x2c, 0x20, 0x6e, 0x61, 0x6d, 0x65, 0x3a, 0x20, 0x22, 0x74, + 0x6f, 0x70, 0x5f, 0x70, 0x22, 0x2c, 0x20, 0x73, 0x74, 0x65, 0x70, 0x3a, + 0x20, 0x30, 0x2e, 0x30, 0x31, 0x2c, 0x20, 0x76, 0x61, 0x6c, 0x75, 0x65, + 0x3a, 0x20, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x2e, 0x76, 0x61, 0x6c, + 0x75, 0x65, 0x2e, 0x74, 0x6f, 0x70, 0x5f, 0x70, 0x20, 0x7d, 0x29, 0x7d, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x24, 0x7b, 0x46, 0x6c, 0x6f, 0x61, 0x74, 0x46, 0x69, 0x65, 0x6c, + 0x64, 0x28, 0x7b, 0x20, 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x3a, 0x20, 0x22, + 0x4d, 0x69, 0x6e, 0x2d, 0x50, 0x20, 0x73, 0x61, 0x6d, 0x70, 0x6c, 0x69, + 0x6e, 0x67, 0x22, 0x2c, 0x20, 0x6d, 0x61, 0x78, 0x3a, 0x20, 0x31, 0x2e, + 0x30, 0x2c, 0x20, 0x6d, 0x69, 0x6e, 0x3a, 0x20, 0x30, 0x2e, 0x30, 0x2c, + 0x20, 0x6e, 0x61, 0x6d, 0x65, 0x3a, 0x20, 0x22, 0x6d, 0x69, 0x6e, 0x5f, + 0x70, 0x22, 0x2c, 0x20, 0x73, 0x74, 0x65, 0x70, 0x3a, 0x20, 0x30, 0x2e, + 0x30, 0x31, 0x2c, 0x20, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3a, 0x20, 0x70, + 0x61, 0x72, 0x61, 0x6d, 0x73, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2e, + 0x6d, 0x69, 0x6e, 0x5f, 0x70, 0x20, 0x7d, 0x29, 0x7d, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x2f, 0x66, 0x69, + 0x65, 0x6c, 0x64, 0x73, 0x65, 0x74, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x64, 0x65, 0x74, 0x61, 0x69, + 0x6c, 0x73, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x3c, 0x73, 0x75, 0x6d, 0x6d, 0x61, 0x72, 0x79, + 0x3e, 0x4d, 0x6f, 0x72, 0x65, 0x20, 0x6f, 0x70, 0x74, 0x69, 0x6f, 0x6e, + 0x73, 0x3c, 0x2f, 0x73, 0x75, 0x6d, 0x6d, 0x61, 0x72, 0x79, 0x3e, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x3c, 0x66, 0x69, 0x65, 0x6c, 0x64, 0x73, 0x65, 0x74, 0x20, 0x63, 0x6c, + 0x61, 0x73, 0x73, 0x3d, 0x22, 0x74, 0x77, 0x6f, 0x22, 0x3e, 0x0a, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x24, 0x7b, 0x46, 0x6c, 0x6f, 0x61, 0x74, 0x46, 0x69, 0x65, 0x6c, + 0x64, 0x28, 0x7b, 0x20, 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x3a, 0x20, 0x22, + 0x54, 0x46, 0x53, 0x2d, 0x5a, 0x22, 0x2c, 0x20, 0x6d, 0x61, 0x78, 0x3a, + 0x20, 0x31, 0x2e, 0x30, 0x2c, 0x20, 0x6d, 0x69, 0x6e, 0x3a, 0x20, 0x30, + 0x2e, 0x30, 0x2c, 0x20, 0x6e, 0x61, 0x6d, 0x65, 0x3a, 0x20, 0x22, 0x74, + 0x66, 0x73, 0x5f, 0x7a, 0x22, 0x2c, 0x20, 0x73, 0x74, 0x65, 0x70, 0x3a, + 0x20, 0x30, 0x2e, 0x30, 0x31, 0x2c, 0x20, 0x76, 0x61, 0x6c, 0x75, 0x65, + 0x3a, 0x20, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x2e, 0x76, 0x61, 0x6c, + 0x75, 0x65, 0x2e, 0x74, 0x66, 0x73, 0x5f, 0x7a, 0x20, 0x7d, 0x29, 0x7d, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x24, 0x7b, 0x46, 0x6c, 0x6f, 0x61, 0x74, 0x46, 0x69, + 0x65, 0x6c, 0x64, 0x28, 0x7b, 0x20, 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x3a, + 0x20, 0x22, 0x54, 0x79, 0x70, 0x69, 0x63, 0x61, 0x6c, 0x20, 0x50, 0x22, + 0x2c, 0x20, 0x6d, 0x61, 0x78, 0x3a, 0x20, 0x31, 0x2e, 0x30, 0x2c, 0x20, + 0x6d, 0x69, 0x6e, 0x3a, 0x20, 0x30, 0x2e, 0x30, 0x2c, 0x20, 0x6e, 0x61, + 0x6d, 0x65, 0x3a, 0x20, 0x22, 0x74, 0x79, 0x70, 0x69, 0x63, 0x61, 0x6c, + 0x5f, 0x70, 0x22, 0x2c, 0x20, 0x73, 0x74, 0x65, 0x70, 0x3a, 0x20, 0x30, + 0x2e, 0x30, 0x31, 0x2c, 0x20, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3a, 0x20, + 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, + 0x2e, 0x74, 0x79, 0x70, 0x69, 0x63, 0x61, 0x6c, 0x5f, 0x70, 0x20, 0x7d, + 0x29, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x24, 0x7b, 0x46, 0x6c, 0x6f, 0x61, 0x74, + 0x46, 0x69, 0x65, 0x6c, 0x64, 0x28, 0x7b, 0x20, 0x6c, 0x61, 0x62, 0x65, + 0x6c, 0x3a, 0x20, 0x22, 0x50, 0x72, 0x65, 0x73, 0x65, 0x6e, 0x63, 0x65, + 0x20, 0x70, 0x65, 0x6e, 0x61, 0x6c, 0x74, 0x79, 0x22, 0x2c, 0x20, 0x6d, + 0x61, 0x78, 0x3a, 0x20, 0x31, 0x2e, 0x30, 0x2c, 0x20, 0x6d, 0x69, 0x6e, + 0x3a, 0x20, 0x30, 0x2e, 0x30, 0x2c, 0x20, 0x6e, 0x61, 0x6d, 0x65, 0x3a, + 0x20, 0x22, 0x70, 0x72, 0x65, 0x73, 0x65, 0x6e, 0x63, 0x65, 0x5f, 0x70, + 0x65, 0x6e, 0x61, 0x6c, 0x74, 0x79, 0x22, 0x2c, 0x20, 0x73, 0x74, 0x65, + 0x70, 0x3a, 0x20, 0x30, 0x2e, 0x30, 0x31, 0x2c, 0x20, 0x76, 0x61, 0x6c, + 0x75, 0x65, 0x3a, 0x20, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x2e, 0x76, + 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x70, 0x72, 0x65, 0x73, 0x65, 0x6e, 0x63, + 0x65, 0x5f, 0x70, 0x65, 0x6e, 0x61, 0x6c, 0x74, 0x79, 0x20, 0x7d, 0x29, + 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x24, 0x7b, 0x46, 0x6c, 0x6f, 0x61, 0x74, 0x46, + 0x69, 0x65, 0x6c, 0x64, 0x28, 0x7b, 0x20, 0x6c, 0x61, 0x62, 0x65, 0x6c, + 0x3a, 0x20, 0x22, 0x46, 0x72, 0x65, 0x71, 0x75, 0x65, 0x6e, 0x63, 0x79, + 0x20, 0x70, 0x65, 0x6e, 0x61, 0x6c, 0x74, 0x79, 0x22, 0x2c, 0x20, 0x6d, + 0x61, 0x78, 0x3a, 0x20, 0x31, 0x2e, 0x30, 0x2c, 0x20, 0x6d, 0x69, 0x6e, + 0x3a, 0x20, 0x30, 0x2e, 0x30, 0x2c, 0x20, 0x6e, 0x61, 0x6d, 0x65, 0x3a, + 0x20, 0x22, 0x66, 0x72, 0x65, 0x71, 0x75, 0x65, 0x6e, 0x63, 0x79, 0x5f, + 0x70, 0x65, 0x6e, 0x61, 0x6c, 0x74, 0x79, 0x22, 0x2c, 0x20, 0x73, 0x74, + 0x65, 0x70, 0x3a, 0x20, 0x30, 0x2e, 0x30, 0x31, 0x2c, 0x20, 0x76, 0x61, + 0x6c, 0x75, 0x65, 0x3a, 0x20, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x2e, + 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x66, 0x72, 0x65, 0x71, 0x75, 0x65, + 0x6e, 0x63, 0x79, 0x5f, 0x70, 0x65, 0x6e, 0x61, 0x6c, 0x74, 0x79, 0x20, + 0x7d, 0x29, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x3c, 0x2f, 0x66, 0x69, 0x65, 0x6c, 0x64, 0x73, + 0x65, 0x74, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x3c, 0x68, 0x72, 0x20, 0x2f, 0x3e, 0x0a, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, + 0x66, 0x69, 0x65, 0x6c, 0x64, 0x73, 0x65, 0x74, 0x20, 0x63, 0x6c, 0x61, + 0x73, 0x73, 0x3d, 0x22, 0x74, 0x68, 0x72, 0x65, 0x65, 0x22, 0x3e, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x3c, 0x64, 0x69, 0x76, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x3c, 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x3e, 0x3c, 0x69, 0x6e, 0x70, 0x75, + 0x74, 0x20, 0x74, 0x79, 0x70, 0x65, 0x3d, 0x22, 0x72, 0x61, 0x64, 0x69, + 0x6f, 0x22, 0x20, 0x6e, 0x61, 0x6d, 0x65, 0x3d, 0x22, 0x6d, 0x69, 0x72, + 0x6f, 0x73, 0x74, 0x61, 0x74, 0x22, 0x20, 0x76, 0x61, 0x6c, 0x75, 0x65, + 0x3d, 0x22, 0x30, 0x22, 0x20, 0x63, 0x68, 0x65, 0x63, 0x6b, 0x65, 0x64, + 0x3d, 0x24, 0x7b, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x2e, 0x76, 0x61, + 0x6c, 0x75, 0x65, 0x2e, 0x6d, 0x69, 0x72, 0x6f, 0x73, 0x74, 0x61, 0x74, + 0x20, 0x3d, 0x3d, 0x20, 0x30, 0x7d, 0x20, 0x6f, 0x6e, 0x69, 0x6e, 0x70, + 0x75, 0x74, 0x3d, 0x24, 0x7b, 0x75, 0x70, 0x64, 0x61, 0x74, 0x65, 0x50, + 0x61, 0x72, 0x61, 0x6d, 0x73, 0x49, 0x6e, 0x74, 0x7d, 0x20, 0x2f, 0x3e, + 0x20, 0x6e, 0x6f, 0x20, 0x4d, 0x69, 0x72, 0x6f, 0x73, 0x74, 0x61, 0x74, + 0x3c, 0x2f, 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x3e, 0x0a, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x3c, 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x3e, 0x3c, 0x69, 0x6e, 0x70, + 0x75, 0x74, 0x20, 0x74, 0x79, 0x70, 0x65, 0x3d, 0x22, 0x72, 0x61, 0x64, + 0x69, 0x6f, 0x22, 0x20, 0x6e, 0x61, 0x6d, 0x65, 0x3d, 0x22, 0x6d, 0x69, + 0x72, 0x6f, 0x73, 0x74, 0x61, 0x74, 0x22, 0x20, 0x76, 0x61, 0x6c, 0x75, + 0x65, 0x3d, 0x22, 0x31, 0x22, 0x20, 0x63, 0x68, 0x65, 0x63, 0x6b, 0x65, + 0x64, 0x3d, 0x24, 0x7b, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x2e, 0x76, + 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x6d, 0x69, 0x72, 0x6f, 0x73, 0x74, 0x61, + 0x74, 0x20, 0x3d, 0x3d, 0x20, 0x31, 0x7d, 0x20, 0x6f, 0x6e, 0x69, 0x6e, + 0x70, 0x75, 0x74, 0x3d, 0x24, 0x7b, 0x75, 0x70, 0x64, 0x61, 0x74, 0x65, + 0x50, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x49, 0x6e, 0x74, 0x7d, 0x20, 0x2f, + 0x3e, 0x20, 0x4d, 0x69, 0x72, 0x6f, 0x73, 0x74, 0x61, 0x74, 0x20, 0x76, + 0x31, 0x3c, 0x2f, 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x3e, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x3c, 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x3e, 0x3c, 0x69, 0x6e, + 0x70, 0x75, 0x74, 0x20, 0x74, 0x79, 0x70, 0x65, 0x3d, 0x22, 0x72, 0x61, + 0x64, 0x69, 0x6f, 0x22, 0x20, 0x6e, 0x61, 0x6d, 0x65, 0x3d, 0x22, 0x6d, + 0x69, 0x72, 0x6f, 0x73, 0x74, 0x61, 0x74, 0x22, 0x20, 0x76, 0x61, 0x6c, + 0x75, 0x65, 0x3d, 0x22, 0x32, 0x22, 0x20, 0x63, 0x68, 0x65, 0x63, 0x6b, + 0x65, 0x64, 0x3d, 0x24, 0x7b, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x2e, + 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x6d, 0x69, 0x72, 0x6f, 0x73, 0x74, + 0x61, 0x74, 0x20, 0x3d, 0x3d, 0x20, 0x32, 0x7d, 0x20, 0x6f, 0x6e, 0x69, + 0x6e, 0x70, 0x75, 0x74, 0x3d, 0x24, 0x7b, 0x75, 0x70, 0x64, 0x61, 0x74, + 0x65, 0x50, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x49, 0x6e, 0x74, 0x7d, 0x20, + 0x2f, 0x3e, 0x20, 0x4d, 0x69, 0x72, 0x6f, 0x73, 0x74, 0x61, 0x74, 0x20, + 0x76, 0x32, 0x3c, 0x2f, 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x3e, 0x0a, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x3c, 0x2f, 0x64, 0x69, 0x76, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x24, 0x7b, + 0x46, 0x6c, 0x6f, 0x61, 0x74, 0x46, 0x69, 0x65, 0x6c, 0x64, 0x28, 0x7b, + 0x20, 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x3a, 0x20, 0x22, 0x4d, 0x69, 0x72, + 0x6f, 0x73, 0x74, 0x61, 0x74, 0x20, 0x74, 0x61, 0x75, 0x22, 0x2c, 0x20, + 0x6d, 0x61, 0x78, 0x3a, 0x20, 0x31, 0x30, 0x2e, 0x30, 0x2c, 0x20, 0x6d, + 0x69, 0x6e, 0x3a, 0x20, 0x30, 0x2e, 0x30, 0x2c, 0x20, 0x6e, 0x61, 0x6d, + 0x65, 0x3a, 0x20, 0x22, 0x6d, 0x69, 0x72, 0x6f, 0x73, 0x74, 0x61, 0x74, + 0x5f, 0x74, 0x61, 0x75, 0x22, 0x2c, 0x20, 0x73, 0x74, 0x65, 0x70, 0x3a, + 0x20, 0x30, 0x2e, 0x30, 0x31, 0x2c, 0x20, 0x76, 0x61, 0x6c, 0x75, 0x65, + 0x3a, 0x20, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x2e, 0x76, 0x61, 0x6c, + 0x75, 0x65, 0x2e, 0x6d, 0x69, 0x72, 0x6f, 0x73, 0x74, 0x61, 0x74, 0x5f, + 0x74, 0x61, 0x75, 0x20, 0x7d, 0x29, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x24, 0x7b, + 0x46, 0x6c, 0x6f, 0x61, 0x74, 0x46, 0x69, 0x65, 0x6c, 0x64, 0x28, 0x7b, + 0x20, 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x3a, 0x20, 0x22, 0x4d, 0x69, 0x72, + 0x6f, 0x73, 0x74, 0x61, 0x74, 0x20, 0x65, 0x74, 0x61, 0x22, 0x2c, 0x20, + 0x6d, 0x61, 0x78, 0x3a, 0x20, 0x31, 0x2e, 0x30, 0x2c, 0x20, 0x6d, 0x69, + 0x6e, 0x3a, 0x20, 0x30, 0x2e, 0x30, 0x2c, 0x20, 0x6e, 0x61, 0x6d, 0x65, + 0x3a, 0x20, 0x22, 0x6d, 0x69, 0x72, 0x6f, 0x73, 0x74, 0x61, 0x74, 0x5f, + 0x65, 0x74, 0x61, 0x22, 0x2c, 0x20, 0x73, 0x74, 0x65, 0x70, 0x3a, 0x20, + 0x30, 0x2e, 0x30, 0x31, 0x2c, 0x20, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3a, + 0x20, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x2e, 0x76, 0x61, 0x6c, 0x75, + 0x65, 0x2e, 0x6d, 0x69, 0x72, 0x6f, 0x73, 0x74, 0x61, 0x74, 0x5f, 0x65, + 0x74, 0x61, 0x20, 0x7d, 0x29, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x2f, 0x66, 0x69, 0x65, + 0x6c, 0x64, 0x73, 0x65, 0x74, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x66, 0x69, 0x65, 0x6c, + 0x64, 0x73, 0x65, 0x74, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x24, 0x7b, 0x49, 0x6e, + 0x74, 0x46, 0x69, 0x65, 0x6c, 0x64, 0x28, 0x7b, 0x20, 0x6c, 0x61, 0x62, + 0x65, 0x6c, 0x3a, 0x20, 0x22, 0x53, 0x68, 0x6f, 0x77, 0x20, 0x50, 0x72, + 0x6f, 0x62, 0x61, 0x62, 0x69, 0x6c, 0x69, 0x74, 0x69, 0x65, 0x73, 0x22, + 0x2c, 0x20, 0x6d, 0x61, 0x78, 0x3a, 0x20, 0x31, 0x30, 0x2c, 0x20, 0x6d, + 0x69, 0x6e, 0x3a, 0x20, 0x30, 0x2c, 0x20, 0x6e, 0x61, 0x6d, 0x65, 0x3a, + 0x20, 0x22, 0x6e, 0x5f, 0x70, 0x72, 0x6f, 0x62, 0x73, 0x22, 0x2c, 0x20, + 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3a, 0x20, 0x70, 0x61, 0x72, 0x61, 0x6d, + 0x73, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x6e, 0x5f, 0x70, 0x72, + 0x6f, 0x62, 0x73, 0x20, 0x7d, 0x29, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x2f, 0x66, 0x69, + 0x65, 0x6c, 0x64, 0x73, 0x65, 0x74, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x66, 0x69, 0x65, + 0x6c, 0x64, 0x73, 0x65, 0x74, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x6c, 0x61, + 0x62, 0x65, 0x6c, 0x20, 0x66, 0x6f, 0x72, 0x3d, 0x22, 0x61, 0x70, 0x69, + 0x5f, 0x6b, 0x65, 0x79, 0x22, 0x3e, 0x41, 0x50, 0x49, 0x20, 0x4b, 0x65, + 0x79, 0x3c, 0x2f, 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x3e, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x3c, 0x69, 0x6e, 0x70, 0x75, 0x74, 0x20, 0x74, 0x79, 0x70, 0x65, 0x3d, + 0x22, 0x74, 0x65, 0x78, 0x74, 0x22, 0x20, 0x6e, 0x61, 0x6d, 0x65, 0x3d, + 0x22, 0x61, 0x70, 0x69, 0x5f, 0x6b, 0x65, 0x79, 0x22, 0x20, 0x76, 0x61, + 0x6c, 0x75, 0x65, 0x3d, 0x22, 0x24, 0x7b, 0x70, 0x61, 0x72, 0x61, 0x6d, + 0x73, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x61, 0x70, 0x69, 0x5f, + 0x6b, 0x65, 0x79, 0x7d, 0x22, 0x20, 0x70, 0x6c, 0x61, 0x63, 0x65, 0x68, + 0x6f, 0x6c, 0x64, 0x65, 0x72, 0x3d, 0x22, 0x45, 0x6e, 0x74, 0x65, 0x72, + 0x20, 0x41, 0x50, 0x49, 0x20, 0x6b, 0x65, 0x79, 0x22, 0x20, 0x6f, 0x6e, + 0x69, 0x6e, 0x70, 0x75, 0x74, 0x3d, 0x24, 0x7b, 0x75, 0x70, 0x64, 0x61, + 0x74, 0x65, 0x50, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x7d, 0x20, 0x2f, 0x3e, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x3c, 0x2f, 0x66, 0x69, 0x65, 0x6c, 0x64, 0x73, 0x65, 0x74, 0x3e, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, + 0x2f, 0x64, 0x65, 0x74, 0x61, 0x69, 0x6c, 0x73, 0x3e, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x2f, 0x66, 0x6f, 0x72, 0x6d, + 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x60, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x7d, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, + 0x73, 0x74, 0x20, 0x70, 0x72, 0x6f, 0x62, 0x43, 0x6f, 0x6c, 0x6f, 0x72, + 0x20, 0x3d, 0x20, 0x28, 0x70, 0x29, 0x20, 0x3d, 0x3e, 0x20, 0x7b, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, + 0x72, 0x20, 0x3d, 0x20, 0x4d, 0x61, 0x74, 0x68, 0x2e, 0x66, 0x6c, 0x6f, + 0x6f, 0x72, 0x28, 0x31, 0x39, 0x32, 0x20, 0x2a, 0x20, 0x28, 0x31, 0x20, + 0x2d, 0x20, 0x70, 0x29, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x67, 0x20, 0x3d, 0x20, 0x4d, + 0x61, 0x74, 0x68, 0x2e, 0x66, 0x6c, 0x6f, 0x6f, 0x72, 0x28, 0x31, 0x39, + 0x32, 0x20, 0x2a, 0x20, 0x70, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x60, 0x72, 0x67, + 0x62, 0x61, 0x28, 0x24, 0x7b, 0x72, 0x7d, 0x2c, 0x24, 0x7b, 0x67, 0x7d, + 0x2c, 0x30, 0x2c, 0x30, 0x2e, 0x33, 0x29, 0x60, 0x3b, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x7d, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, + 0x73, 0x74, 0x20, 0x50, 0x72, 0x6f, 0x62, 0x61, 0x62, 0x69, 0x6c, 0x69, + 0x74, 0x69, 0x65, 0x73, 0x20, 0x3d, 0x20, 0x28, 0x70, 0x61, 0x72, 0x61, + 0x6d, 0x73, 0x29, 0x20, 0x3d, 0x3e, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x70, 0x61, + 0x72, 0x61, 0x6d, 0x73, 0x2e, 0x64, 0x61, 0x74, 0x61, 0x2e, 0x6d, 0x61, + 0x70, 0x28, 0x6d, 0x73, 0x67, 0x20, 0x3d, 0x3e, 0x20, 0x7b, 0x0a, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, + 0x20, 0x7b, 0x20, 0x63, 0x6f, 0x6d, 0x70, 0x6c, 0x65, 0x74, 0x69, 0x6f, + 0x6e, 0x5f, 0x70, 0x72, 0x6f, 0x62, 0x61, 0x62, 0x69, 0x6c, 0x69, 0x74, + 0x69, 0x65, 0x73, 0x20, 0x7d, 0x20, 0x3d, 0x20, 0x6d, 0x73, 0x67, 0x3b, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x69, 0x66, 0x20, + 0x28, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x21, 0x63, 0x6f, 0x6d, 0x70, 0x6c, 0x65, 0x74, 0x69, 0x6f, 0x6e, 0x5f, + 0x70, 0x72, 0x6f, 0x62, 0x61, 0x62, 0x69, 0x6c, 0x69, 0x74, 0x69, 0x65, + 0x73, 0x20, 0x7c, 0x7c, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6d, 0x70, 0x6c, 0x65, 0x74, 0x69, 0x6f, + 0x6e, 0x5f, 0x70, 0x72, 0x6f, 0x62, 0x61, 0x62, 0x69, 0x6c, 0x69, 0x74, + 0x69, 0x65, 0x73, 0x2e, 0x6c, 0x65, 0x6e, 0x67, 0x74, 0x68, 0x20, 0x3d, + 0x3d, 0x3d, 0x20, 0x30, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x29, 0x20, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x6d, 0x73, + 0x67, 0x2e, 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x0a, 0x0a, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x69, 0x66, 0x20, 0x28, 0x63, + 0x6f, 0x6d, 0x70, 0x6c, 0x65, 0x74, 0x69, 0x6f, 0x6e, 0x5f, 0x70, 0x72, + 0x6f, 0x62, 0x61, 0x62, 0x69, 0x6c, 0x69, 0x74, 0x69, 0x65, 0x73, 0x2e, + 0x6c, 0x65, 0x6e, 0x67, 0x74, 0x68, 0x20, 0x3e, 0x20, 0x31, 0x29, 0x20, + 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x2f, 0x2f, 0x20, 0x4e, 0x6f, 0x74, 0x20, 0x66, 0x6f, 0x72, 0x20, 0x62, + 0x79, 0x74, 0x65, 0x20, 0x70, 0x61, 0x69, 0x72, 0x0a, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x69, 0x66, 0x20, 0x28, 0x63, + 0x6f, 0x6d, 0x70, 0x6c, 0x65, 0x74, 0x69, 0x6f, 0x6e, 0x5f, 0x70, 0x72, + 0x6f, 0x62, 0x61, 0x62, 0x69, 0x6c, 0x69, 0x74, 0x69, 0x65, 0x73, 0x5b, + 0x30, 0x5d, 0x2e, 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x2e, 0x73, + 0x74, 0x61, 0x72, 0x74, 0x73, 0x57, 0x69, 0x74, 0x68, 0x28, 0x27, 0x62, + 0x79, 0x74, 0x65, 0x3a, 0x20, 0x5c, 0x5c, 0x27, 0x29, 0x29, 0x20, 0x72, + 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x6d, 0x73, 0x67, 0x2e, 0x63, 0x6f, + 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x73, + 0x70, 0x6c, 0x69, 0x74, 0x44, 0x61, 0x74, 0x61, 0x20, 0x3d, 0x20, 0x63, + 0x6f, 0x6d, 0x70, 0x6c, 0x65, 0x74, 0x69, 0x6f, 0x6e, 0x5f, 0x70, 0x72, + 0x6f, 0x62, 0x61, 0x62, 0x69, 0x6c, 0x69, 0x74, 0x69, 0x65, 0x73, 0x2e, + 0x6d, 0x61, 0x70, 0x28, 0x70, 0x72, 0x6f, 0x62, 0x20, 0x3d, 0x3e, 0x20, + 0x28, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x3a, 0x20, + 0x70, 0x72, 0x6f, 0x62, 0x2e, 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, + 0x2c, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x63, 0x6f, 0x6d, 0x70, 0x6c, 0x65, 0x74, 0x69, 0x6f, 0x6e, + 0x5f, 0x70, 0x72, 0x6f, 0x62, 0x61, 0x62, 0x69, 0x6c, 0x69, 0x74, 0x69, + 0x65, 0x73, 0x3a, 0x20, 0x5b, 0x70, 0x72, 0x6f, 0x62, 0x5d, 0x0a, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x29, 0x29, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x72, + 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x68, 0x74, 0x6d, 0x6c, 0x60, 0x3c, + 0x24, 0x7b, 0x50, 0x72, 0x6f, 0x62, 0x61, 0x62, 0x69, 0x6c, 0x69, 0x74, + 0x69, 0x65, 0x73, 0x7d, 0x20, 0x64, 0x61, 0x74, 0x61, 0x3d, 0x24, 0x7b, + 0x73, 0x70, 0x6c, 0x69, 0x74, 0x44, 0x61, 0x74, 0x61, 0x7d, 0x20, 0x2f, + 0x3e, 0x60, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, + 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, + 0x6e, 0x73, 0x74, 0x20, 0x7b, 0x20, 0x70, 0x72, 0x6f, 0x62, 0x73, 0x2c, + 0x20, 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x20, 0x7d, 0x20, 0x3d, + 0x20, 0x63, 0x6f, 0x6d, 0x70, 0x6c, 0x65, 0x74, 0x69, 0x6f, 0x6e, 0x5f, + 0x70, 0x72, 0x6f, 0x62, 0x61, 0x62, 0x69, 0x6c, 0x69, 0x74, 0x69, 0x65, + 0x73, 0x5b, 0x30, 0x5d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x66, 0x6f, 0x75, 0x6e, 0x64, + 0x20, 0x3d, 0x20, 0x70, 0x72, 0x6f, 0x62, 0x73, 0x2e, 0x66, 0x69, 0x6e, + 0x64, 0x28, 0x70, 0x20, 0x3d, 0x3e, 0x20, 0x70, 0x2e, 0x74, 0x6f, 0x6b, + 0x5f, 0x73, 0x74, 0x72, 0x20, 0x3d, 0x3d, 0x3d, 0x20, 0x6d, 0x73, 0x67, + 0x2e, 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x29, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, + 0x70, 0x43, 0x6f, 0x6c, 0x6f, 0x72, 0x20, 0x3d, 0x20, 0x66, 0x6f, 0x75, + 0x6e, 0x64, 0x20, 0x3f, 0x20, 0x70, 0x72, 0x6f, 0x62, 0x43, 0x6f, 0x6c, + 0x6f, 0x72, 0x28, 0x66, 0x6f, 0x75, 0x6e, 0x64, 0x2e, 0x70, 0x72, 0x6f, + 0x62, 0x29, 0x20, 0x3a, 0x20, 0x27, 0x74, 0x72, 0x61, 0x6e, 0x73, 0x70, + 0x61, 0x72, 0x65, 0x6e, 0x74, 0x27, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x70, 0x6f, + 0x70, 0x6f, 0x76, 0x65, 0x72, 0x43, 0x68, 0x69, 0x6c, 0x64, 0x72, 0x65, + 0x6e, 0x20, 0x3d, 0x20, 0x68, 0x74, 0x6d, 0x6c, 0x60, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x64, 0x69, 0x76, + 0x20, 0x63, 0x6c, 0x61, 0x73, 0x73, 0x3d, 0x22, 0x70, 0x72, 0x6f, 0x62, + 0x2d, 0x73, 0x65, 0x74, 0x22, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x24, 0x7b, 0x70, 0x72, 0x6f, + 0x62, 0x73, 0x2e, 0x6d, 0x61, 0x70, 0x28, 0x28, 0x70, 0x2c, 0x20, 0x69, + 0x6e, 0x64, 0x65, 0x78, 0x29, 0x20, 0x3d, 0x3e, 0x20, 0x7b, 0x0a, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x72, 0x65, 0x74, + 0x75, 0x72, 0x6e, 0x20, 0x68, 0x74, 0x6d, 0x6c, 0x60, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x3c, 0x64, 0x69, 0x76, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x6b, 0x65, 0x79, 0x3d, 0x24, 0x7b, 0x69, 0x6e, 0x64, 0x65, 0x78, + 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x74, 0x69, 0x74, 0x6c, + 0x65, 0x3d, 0x24, 0x7b, 0x60, 0x70, 0x72, 0x6f, 0x62, 0x3a, 0x20, 0x24, + 0x7b, 0x70, 0x2e, 0x70, 0x72, 0x6f, 0x62, 0x7d, 0x60, 0x7d, 0x0a, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x73, 0x74, 0x79, 0x6c, 0x65, 0x3d, 0x24, + 0x7b, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x70, 0x61, 0x64, 0x64, 0x69, 0x6e, 0x67, + 0x3a, 0x20, 0x27, 0x30, 0x2e, 0x33, 0x65, 0x6d, 0x27, 0x2c, 0x0a, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x62, 0x61, 0x63, 0x6b, 0x67, 0x72, 0x6f, 0x75, 0x6e, 0x64, 0x43, + 0x6f, 0x6c, 0x6f, 0x72, 0x3a, 0x20, 0x70, 0x2e, 0x74, 0x6f, 0x6b, 0x5f, + 0x73, 0x74, 0x72, 0x20, 0x3d, 0x3d, 0x3d, 0x20, 0x63, 0x6f, 0x6e, 0x74, + 0x65, 0x6e, 0x74, 0x20, 0x3f, 0x20, 0x70, 0x72, 0x6f, 0x62, 0x43, 0x6f, + 0x6c, 0x6f, 0x72, 0x28, 0x70, 0x2e, 0x70, 0x72, 0x6f, 0x62, 0x29, 0x20, + 0x3a, 0x20, 0x27, 0x74, 0x72, 0x61, 0x6e, 0x73, 0x70, 0x61, 0x72, 0x65, + 0x6e, 0x74, 0x27, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x7d, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3e, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x73, 0x70, 0x61, 0x6e, + 0x3e, 0x24, 0x7b, 0x70, 0x2e, 0x74, 0x6f, 0x6b, 0x5f, 0x73, 0x74, 0x72, + 0x7d, 0x3a, 0x20, 0x3c, 0x2f, 0x73, 0x70, 0x61, 0x6e, 0x3e, 0x0a, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x73, 0x70, 0x61, 0x6e, 0x3e, 0x24, + 0x7b, 0x4d, 0x61, 0x74, 0x68, 0x2e, 0x66, 0x6c, 0x6f, 0x6f, 0x72, 0x28, + 0x70, 0x2e, 0x70, 0x72, 0x6f, 0x62, 0x20, 0x2a, 0x20, 0x31, 0x30, 0x30, + 0x29, 0x7d, 0x25, 0x3c, 0x2f, 0x73, 0x70, 0x61, 0x6e, 0x3e, 0x0a, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x3c, 0x2f, 0x64, 0x69, 0x76, 0x3e, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x60, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x29, + 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x3c, 0x2f, 0x64, 0x69, 0x76, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x60, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x68, 0x74, 0x6d, + 0x6c, 0x60, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x3c, 0x24, 0x7b, 0x50, 0x6f, 0x70, 0x6f, 0x76, 0x65, 0x72, 0x7d, + 0x20, 0x73, 0x74, 0x79, 0x6c, 0x65, 0x3d, 0x24, 0x7b, 0x7b, 0x20, 0x62, + 0x61, 0x63, 0x6b, 0x67, 0x72, 0x6f, 0x75, 0x6e, 0x64, 0x43, 0x6f, 0x6c, + 0x6f, 0x72, 0x3a, 0x20, 0x70, 0x43, 0x6f, 0x6c, 0x6f, 0x72, 0x20, 0x7d, + 0x7d, 0x20, 0x70, 0x6f, 0x70, 0x6f, 0x76, 0x65, 0x72, 0x43, 0x68, 0x69, + 0x6c, 0x64, 0x72, 0x65, 0x6e, 0x3d, 0x24, 0x7b, 0x70, 0x6f, 0x70, 0x6f, + 0x76, 0x65, 0x72, 0x43, 0x68, 0x69, 0x6c, 0x64, 0x72, 0x65, 0x6e, 0x7d, + 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x24, 0x7b, 0x6d, 0x73, 0x67, 0x2e, 0x63, 0x6f, 0x6e, 0x74, + 0x65, 0x6e, 0x74, 0x2e, 0x6d, 0x61, 0x74, 0x63, 0x68, 0x28, 0x2f, 0x5c, + 0x6e, 0x2f, 0x67, 0x69, 0x6d, 0x29, 0x20, 0x3f, 0x20, 0x68, 0x74, 0x6d, + 0x6c, 0x60, 0x3c, 0x62, 0x72, 0x20, 0x2f, 0x3e, 0x60, 0x20, 0x3a, 0x20, + 0x6d, 0x73, 0x67, 0x2e, 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x7d, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, + 0x2f, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x60, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x29, 0x3b, 0x0a, 0x20, + 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x2f, 0x2f, + 0x20, 0x70, 0x6f, 0x6f, 0x72, 0x20, 0x6d, 0x61, 0x6e, 0x73, 0x20, 0x6d, + 0x61, 0x72, 0x6b, 0x64, 0x6f, 0x77, 0x6e, 0x20, 0x72, 0x65, 0x70, 0x6c, + 0x61, 0x63, 0x65, 0x6d, 0x65, 0x6e, 0x74, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x4d, 0x61, 0x72, 0x6b, 0x64, 0x6f, + 0x77, 0x6e, 0x69, 0x73, 0x68, 0x20, 0x3d, 0x20, 0x28, 0x70, 0x61, 0x72, + 0x61, 0x6d, 0x73, 0x29, 0x20, 0x3d, 0x3e, 0x20, 0x7b, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x6d, 0x64, + 0x20, 0x3d, 0x20, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x2e, 0x74, 0x65, + 0x78, 0x74, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x2e, + 0x72, 0x65, 0x70, 0x6c, 0x61, 0x63, 0x65, 0x28, 0x2f, 0x26, 0x2f, 0x67, + 0x2c, 0x20, 0x27, 0x26, 0x61, 0x6d, 0x70, 0x3b, 0x27, 0x29, 0x0a, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x2e, 0x72, 0x65, 0x70, 0x6c, + 0x61, 0x63, 0x65, 0x28, 0x2f, 0x3c, 0x2f, 0x67, 0x2c, 0x20, 0x27, 0x26, + 0x6c, 0x74, 0x3b, 0x27, 0x29, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x2e, 0x72, 0x65, 0x70, 0x6c, 0x61, 0x63, 0x65, 0x28, 0x2f, + 0x3e, 0x2f, 0x67, 0x2c, 0x20, 0x27, 0x26, 0x67, 0x74, 0x3b, 0x27, 0x29, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x2e, 0x72, 0x65, + 0x70, 0x6c, 0x61, 0x63, 0x65, 0x28, 0x2f, 0x5e, 0x23, 0x7b, 0x31, 0x2c, + 0x36, 0x7d, 0x20, 0x28, 0x2e, 0x2a, 0x29, 0x24, 0x2f, 0x67, 0x69, 0x6d, + 0x2c, 0x20, 0x27, 0x3c, 0x68, 0x33, 0x3e, 0x24, 0x31, 0x3c, 0x2f, 0x68, + 0x33, 0x3e, 0x27, 0x29, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x2e, 0x72, 0x65, 0x70, 0x6c, 0x61, 0x63, 0x65, 0x28, 0x2f, 0x5c, + 0x2a, 0x5c, 0x2a, 0x28, 0x2e, 0x2a, 0x3f, 0x29, 0x5c, 0x2a, 0x5c, 0x2a, + 0x2f, 0x67, 0x2c, 0x20, 0x27, 0x3c, 0x73, 0x74, 0x72, 0x6f, 0x6e, 0x67, + 0x3e, 0x24, 0x31, 0x3c, 0x2f, 0x73, 0x74, 0x72, 0x6f, 0x6e, 0x67, 0x3e, + 0x27, 0x29, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x2e, + 0x72, 0x65, 0x70, 0x6c, 0x61, 0x63, 0x65, 0x28, 0x2f, 0x5f, 0x5f, 0x28, + 0x2e, 0x2a, 0x3f, 0x29, 0x5f, 0x5f, 0x2f, 0x67, 0x2c, 0x20, 0x27, 0x3c, + 0x73, 0x74, 0x72, 0x6f, 0x6e, 0x67, 0x3e, 0x24, 0x31, 0x3c, 0x2f, 0x73, + 0x74, 0x72, 0x6f, 0x6e, 0x67, 0x3e, 0x27, 0x29, 0x0a, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x2e, 0x72, 0x65, 0x70, 0x6c, 0x61, 0x63, + 0x65, 0x28, 0x2f, 0x5c, 0x2a, 0x28, 0x2e, 0x2a, 0x3f, 0x29, 0x5c, 0x2a, + 0x2f, 0x67, 0x2c, 0x20, 0x27, 0x3c, 0x65, 0x6d, 0x3e, 0x24, 0x31, 0x3c, + 0x2f, 0x65, 0x6d, 0x3e, 0x27, 0x29, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x2e, 0x72, 0x65, 0x70, 0x6c, 0x61, 0x63, 0x65, 0x28, + 0x2f, 0x5f, 0x28, 0x2e, 0x2a, 0x3f, 0x29, 0x5f, 0x2f, 0x67, 0x2c, 0x20, + 0x27, 0x3c, 0x65, 0x6d, 0x3e, 0x24, 0x31, 0x3c, 0x2f, 0x65, 0x6d, 0x3e, + 0x27, 0x29, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x2e, + 0x72, 0x65, 0x70, 0x6c, 0x61, 0x63, 0x65, 0x28, 0x2f, 0x60, 0x60, 0x60, + 0x2e, 0x2a, 0x3f, 0x5c, 0x6e, 0x28, 0x5b, 0x5c, 0x73, 0x5c, 0x53, 0x5d, + 0x2a, 0x3f, 0x29, 0x60, 0x60, 0x60, 0x2f, 0x67, 0x2c, 0x20, 0x27, 0x3c, + 0x70, 0x72, 0x65, 0x3e, 0x3c, 0x63, 0x6f, 0x64, 0x65, 0x3e, 0x24, 0x31, + 0x3c, 0x2f, 0x63, 0x6f, 0x64, 0x65, 0x3e, 0x3c, 0x2f, 0x70, 0x72, 0x65, + 0x3e, 0x27, 0x29, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x2e, 0x72, 0x65, 0x70, 0x6c, 0x61, 0x63, 0x65, 0x28, 0x2f, 0x60, 0x28, + 0x2e, 0x2a, 0x3f, 0x29, 0x60, 0x2f, 0x67, 0x2c, 0x20, 0x27, 0x3c, 0x63, + 0x6f, 0x64, 0x65, 0x3e, 0x24, 0x31, 0x3c, 0x2f, 0x63, 0x6f, 0x64, 0x65, + 0x3e, 0x27, 0x29, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x2e, 0x72, 0x65, 0x70, 0x6c, 0x61, 0x63, 0x65, 0x28, 0x2f, 0x5c, 0x6e, + 0x2f, 0x67, 0x69, 0x6d, 0x2c, 0x20, 0x27, 0x3c, 0x62, 0x72, 0x20, 0x2f, + 0x3e, 0x27, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x72, + 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x68, 0x74, 0x6d, 0x6c, 0x60, 0x3c, + 0x73, 0x70, 0x61, 0x6e, 0x20, 0x64, 0x61, 0x6e, 0x67, 0x65, 0x72, 0x6f, + 0x75, 0x73, 0x6c, 0x79, 0x53, 0x65, 0x74, 0x49, 0x6e, 0x6e, 0x65, 0x72, + 0x48, 0x54, 0x4d, 0x4c, 0x3d, 0x24, 0x7b, 0x7b, 0x20, 0x5f, 0x5f, 0x68, + 0x74, 0x6d, 0x6c, 0x3a, 0x20, 0x6d, 0x64, 0x20, 0x7d, 0x7d, 0x20, 0x2f, + 0x3e, 0x60, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x3b, 0x0a, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x4d, 0x6f, + 0x64, 0x65, 0x6c, 0x47, 0x65, 0x6e, 0x65, 0x72, 0x61, 0x74, 0x69, 0x6f, + 0x6e, 0x49, 0x6e, 0x66, 0x6f, 0x20, 0x3d, 0x20, 0x28, 0x70, 0x61, 0x72, + 0x61, 0x6d, 0x73, 0x29, 0x20, 0x3d, 0x3e, 0x20, 0x7b, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x69, 0x66, 0x20, 0x28, 0x21, 0x6c, 0x6c, 0x61, + 0x6d, 0x61, 0x53, 0x74, 0x61, 0x74, 0x73, 0x2e, 0x76, 0x61, 0x6c, 0x75, + 0x65, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x68, 0x74, 0x6d, 0x6c, + 0x60, 0x3c, 0x73, 0x70, 0x61, 0x6e, 0x2f, 0x3e, 0x60, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x68, 0x74, 0x6d, 0x6c, 0x60, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x73, 0x70, + 0x61, 0x6e, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x24, 0x7b, 0x6c, 0x6c, 0x61, 0x6d, 0x61, 0x53, 0x74, 0x61, + 0x74, 0x73, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x74, 0x6f, 0x6b, + 0x65, 0x6e, 0x73, 0x5f, 0x70, 0x72, 0x65, 0x64, 0x69, 0x63, 0x74, 0x65, + 0x64, 0x7d, 0x20, 0x70, 0x72, 0x65, 0x64, 0x69, 0x63, 0x74, 0x65, 0x64, + 0x2c, 0x20, 0x24, 0x7b, 0x6c, 0x6c, 0x61, 0x6d, 0x61, 0x53, 0x74, 0x61, + 0x74, 0x73, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x74, 0x6f, 0x6b, + 0x65, 0x6e, 0x73, 0x5f, 0x63, 0x61, 0x63, 0x68, 0x65, 0x64, 0x7d, 0x20, + 0x63, 0x61, 0x63, 0x68, 0x65, 0x64, 0x2c, 0x20, 0x24, 0x7b, 0x6c, 0x6c, + 0x61, 0x6d, 0x61, 0x53, 0x74, 0x61, 0x74, 0x73, 0x2e, 0x76, 0x61, 0x6c, + 0x75, 0x65, 0x2e, 0x74, 0x69, 0x6d, 0x69, 0x6e, 0x67, 0x73, 0x2e, 0x70, + 0x72, 0x65, 0x64, 0x69, 0x63, 0x74, 0x65, 0x64, 0x5f, 0x70, 0x65, 0x72, + 0x5f, 0x74, 0x6f, 0x6b, 0x65, 0x6e, 0x5f, 0x6d, 0x73, 0x2e, 0x74, 0x6f, + 0x46, 0x69, 0x78, 0x65, 0x64, 0x28, 0x29, 0x7d, 0x6d, 0x73, 0x20, 0x70, + 0x65, 0x72, 0x20, 0x74, 0x6f, 0x6b, 0x65, 0x6e, 0x2c, 0x20, 0x24, 0x7b, + 0x6c, 0x6c, 0x61, 0x6d, 0x61, 0x53, 0x74, 0x61, 0x74, 0x73, 0x2e, 0x76, + 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x74, 0x69, 0x6d, 0x69, 0x6e, 0x67, 0x73, + 0x2e, 0x70, 0x72, 0x65, 0x64, 0x69, 0x63, 0x74, 0x65, 0x64, 0x5f, 0x70, + 0x65, 0x72, 0x5f, 0x73, 0x65, 0x63, 0x6f, 0x6e, 0x64, 0x2e, 0x74, 0x6f, + 0x46, 0x69, 0x78, 0x65, 0x64, 0x28, 0x32, 0x29, 0x7d, 0x20, 0x74, 0x6f, + 0x6b, 0x65, 0x6e, 0x73, 0x20, 0x70, 0x65, 0x72, 0x20, 0x73, 0x65, 0x63, + 0x6f, 0x6e, 0x64, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x3c, 0x2f, 0x73, 0x70, 0x61, 0x6e, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x60, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x0a, 0x20, + 0x20, 0x20, 0x20, 0x2f, 0x2f, 0x20, 0x73, 0x69, 0x6d, 0x70, 0x6c, 0x65, + 0x20, 0x70, 0x6f, 0x70, 0x6f, 0x76, 0x65, 0x72, 0x20, 0x69, 0x6d, 0x70, + 0x6c, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, + 0x50, 0x6f, 0x70, 0x6f, 0x76, 0x65, 0x72, 0x20, 0x3d, 0x20, 0x28, 0x70, + 0x72, 0x6f, 0x70, 0x73, 0x29, 0x20, 0x3d, 0x3e, 0x20, 0x7b, 0x0a, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x69, + 0x73, 0x4f, 0x70, 0x65, 0x6e, 0x20, 0x3d, 0x20, 0x75, 0x73, 0x65, 0x53, + 0x69, 0x67, 0x6e, 0x61, 0x6c, 0x28, 0x66, 0x61, 0x6c, 0x73, 0x65, 0x29, + 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, + 0x74, 0x20, 0x70, 0x6f, 0x73, 0x69, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x3d, + 0x20, 0x75, 0x73, 0x65, 0x53, 0x69, 0x67, 0x6e, 0x61, 0x6c, 0x28, 0x7b, + 0x20, 0x74, 0x6f, 0x70, 0x3a, 0x20, 0x27, 0x30, 0x70, 0x78, 0x27, 0x2c, + 0x20, 0x6c, 0x65, 0x66, 0x74, 0x3a, 0x20, 0x27, 0x30, 0x70, 0x78, 0x27, + 0x20, 0x7d, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, + 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x62, 0x75, 0x74, 0x74, 0x6f, 0x6e, 0x52, + 0x65, 0x66, 0x20, 0x3d, 0x20, 0x75, 0x73, 0x65, 0x52, 0x65, 0x66, 0x28, + 0x6e, 0x75, 0x6c, 0x6c, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x70, 0x6f, 0x70, 0x6f, 0x76, + 0x65, 0x72, 0x52, 0x65, 0x66, 0x20, 0x3d, 0x20, 0x75, 0x73, 0x65, 0x52, + 0x65, 0x66, 0x28, 0x6e, 0x75, 0x6c, 0x6c, 0x29, 0x3b, 0x0a, 0x0a, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x74, + 0x6f, 0x67, 0x67, 0x6c, 0x65, 0x50, 0x6f, 0x70, 0x6f, 0x76, 0x65, 0x72, + 0x20, 0x3d, 0x20, 0x28, 0x29, 0x20, 0x3d, 0x3e, 0x20, 0x7b, 0x0a, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x69, 0x66, 0x20, 0x28, 0x62, + 0x75, 0x74, 0x74, 0x6f, 0x6e, 0x52, 0x65, 0x66, 0x2e, 0x63, 0x75, 0x72, + 0x72, 0x65, 0x6e, 0x74, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, + 0x72, 0x65, 0x63, 0x74, 0x20, 0x3d, 0x20, 0x62, 0x75, 0x74, 0x74, 0x6f, + 0x6e, 0x52, 0x65, 0x66, 0x2e, 0x63, 0x75, 0x72, 0x72, 0x65, 0x6e, 0x74, + 0x2e, 0x67, 0x65, 0x74, 0x42, 0x6f, 0x75, 0x6e, 0x64, 0x69, 0x6e, 0x67, + 0x43, 0x6c, 0x69, 0x65, 0x6e, 0x74, 0x52, 0x65, 0x63, 0x74, 0x28, 0x29, + 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x70, 0x6f, 0x73, 0x69, 0x74, 0x69, 0x6f, 0x6e, 0x2e, 0x76, 0x61, 0x6c, + 0x75, 0x65, 0x20, 0x3d, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x74, 0x6f, 0x70, 0x3a, 0x20, + 0x60, 0x24, 0x7b, 0x72, 0x65, 0x63, 0x74, 0x2e, 0x62, 0x6f, 0x74, 0x74, + 0x6f, 0x6d, 0x20, 0x2b, 0x20, 0x77, 0x69, 0x6e, 0x64, 0x6f, 0x77, 0x2e, + 0x73, 0x63, 0x72, 0x6f, 0x6c, 0x6c, 0x59, 0x7d, 0x70, 0x78, 0x60, 0x2c, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x6c, 0x65, 0x66, 0x74, 0x3a, 0x20, 0x60, 0x24, 0x7b, 0x72, 0x65, + 0x63, 0x74, 0x2e, 0x6c, 0x65, 0x66, 0x74, 0x20, 0x2b, 0x20, 0x77, 0x69, + 0x6e, 0x64, 0x6f, 0x77, 0x2e, 0x73, 0x63, 0x72, 0x6f, 0x6c, 0x6c, 0x58, + 0x7d, 0x70, 0x78, 0x60, 0x2c, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x7d, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x69, 0x73, 0x4f, 0x70, 0x65, 0x6e, 0x2e, 0x76, 0x61, 0x6c, 0x75, + 0x65, 0x20, 0x3d, 0x20, 0x21, 0x69, 0x73, 0x4f, 0x70, 0x65, 0x6e, 0x2e, + 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x7d, 0x3b, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, + 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x68, 0x61, 0x6e, 0x64, 0x6c, 0x65, 0x43, + 0x6c, 0x69, 0x63, 0x6b, 0x4f, 0x75, 0x74, 0x73, 0x69, 0x64, 0x65, 0x20, + 0x3d, 0x20, 0x28, 0x65, 0x76, 0x65, 0x6e, 0x74, 0x29, 0x20, 0x3d, 0x3e, + 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x69, + 0x66, 0x20, 0x28, 0x70, 0x6f, 0x70, 0x6f, 0x76, 0x65, 0x72, 0x52, 0x65, + 0x66, 0x2e, 0x63, 0x75, 0x72, 0x72, 0x65, 0x6e, 0x74, 0x20, 0x26, 0x26, + 0x20, 0x21, 0x70, 0x6f, 0x70, 0x6f, 0x76, 0x65, 0x72, 0x52, 0x65, 0x66, + 0x2e, 0x63, 0x75, 0x72, 0x72, 0x65, 0x6e, 0x74, 0x2e, 0x63, 0x6f, 0x6e, + 0x74, 0x61, 0x69, 0x6e, 0x73, 0x28, 0x65, 0x76, 0x65, 0x6e, 0x74, 0x2e, + 0x74, 0x61, 0x72, 0x67, 0x65, 0x74, 0x29, 0x20, 0x26, 0x26, 0x20, 0x21, + 0x62, 0x75, 0x74, 0x74, 0x6f, 0x6e, 0x52, 0x65, 0x66, 0x2e, 0x63, 0x75, + 0x72, 0x72, 0x65, 0x6e, 0x74, 0x2e, 0x63, 0x6f, 0x6e, 0x74, 0x61, 0x69, + 0x6e, 0x73, 0x28, 0x65, 0x76, 0x65, 0x6e, 0x74, 0x2e, 0x74, 0x61, 0x72, + 0x67, 0x65, 0x74, 0x29, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x69, 0x73, 0x4f, 0x70, 0x65, 0x6e, + 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x20, 0x3d, 0x20, 0x66, 0x61, 0x6c, + 0x73, 0x65, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x3b, 0x0a, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x75, 0x73, 0x65, 0x45, 0x66, 0x66, + 0x65, 0x63, 0x74, 0x28, 0x28, 0x29, 0x20, 0x3d, 0x3e, 0x20, 0x7b, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x64, 0x6f, 0x63, 0x75, + 0x6d, 0x65, 0x6e, 0x74, 0x2e, 0x61, 0x64, 0x64, 0x45, 0x76, 0x65, 0x6e, + 0x74, 0x4c, 0x69, 0x73, 0x74, 0x65, 0x6e, 0x65, 0x72, 0x28, 0x27, 0x6d, + 0x6f, 0x75, 0x73, 0x65, 0x64, 0x6f, 0x77, 0x6e, 0x27, 0x2c, 0x20, 0x68, + 0x61, 0x6e, 0x64, 0x6c, 0x65, 0x43, 0x6c, 0x69, 0x63, 0x6b, 0x4f, 0x75, + 0x74, 0x73, 0x69, 0x64, 0x65, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x28, + 0x29, 0x20, 0x3d, 0x3e, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x64, 0x6f, 0x63, 0x75, 0x6d, 0x65, 0x6e, + 0x74, 0x2e, 0x72, 0x65, 0x6d, 0x6f, 0x76, 0x65, 0x45, 0x76, 0x65, 0x6e, + 0x74, 0x4c, 0x69, 0x73, 0x74, 0x65, 0x6e, 0x65, 0x72, 0x28, 0x27, 0x6d, + 0x6f, 0x75, 0x73, 0x65, 0x64, 0x6f, 0x77, 0x6e, 0x27, 0x2c, 0x20, 0x68, + 0x61, 0x6e, 0x64, 0x6c, 0x65, 0x43, 0x6c, 0x69, 0x63, 0x6b, 0x4f, 0x75, + 0x74, 0x73, 0x69, 0x64, 0x65, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x7d, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x7d, 0x2c, 0x20, 0x5b, 0x5d, 0x29, 0x3b, 0x0a, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x68, + 0x74, 0x6d, 0x6c, 0x60, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x3c, 0x73, 0x70, 0x61, 0x6e, 0x20, 0x73, 0x74, 0x79, 0x6c, 0x65, + 0x3d, 0x24, 0x7b, 0x70, 0x72, 0x6f, 0x70, 0x73, 0x2e, 0x73, 0x74, 0x79, + 0x6c, 0x65, 0x7d, 0x20, 0x72, 0x65, 0x66, 0x3d, 0x24, 0x7b, 0x62, 0x75, + 0x74, 0x74, 0x6f, 0x6e, 0x52, 0x65, 0x66, 0x7d, 0x20, 0x6f, 0x6e, 0x43, + 0x6c, 0x69, 0x63, 0x6b, 0x3d, 0x24, 0x7b, 0x74, 0x6f, 0x67, 0x67, 0x6c, + 0x65, 0x50, 0x6f, 0x70, 0x6f, 0x76, 0x65, 0x72, 0x7d, 0x3e, 0x24, 0x7b, + 0x70, 0x72, 0x6f, 0x70, 0x73, 0x2e, 0x63, 0x68, 0x69, 0x6c, 0x64, 0x72, + 0x65, 0x6e, 0x7d, 0x3c, 0x2f, 0x73, 0x70, 0x61, 0x6e, 0x3e, 0x0a, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x24, 0x7b, 0x69, 0x73, 0x4f, + 0x70, 0x65, 0x6e, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x20, 0x26, 0x26, + 0x20, 0x68, 0x74, 0x6d, 0x6c, 0x60, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x24, 0x7b, 0x50, 0x6f, 0x72, 0x74, + 0x61, 0x6c, 0x7d, 0x20, 0x69, 0x6e, 0x74, 0x6f, 0x3d, 0x22, 0x23, 0x70, + 0x6f, 0x72, 0x74, 0x61, 0x6c, 0x22, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x64, 0x69, 0x76, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x72, 0x65, 0x66, 0x3d, 0x24, 0x7b, 0x70, 0x6f, 0x70, + 0x6f, 0x76, 0x65, 0x72, 0x52, 0x65, 0x66, 0x7d, 0x0a, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, + 0x6c, 0x61, 0x73, 0x73, 0x3d, 0x22, 0x70, 0x6f, 0x70, 0x6f, 0x76, 0x65, + 0x72, 0x2d, 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x22, 0x0a, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x73, 0x74, 0x79, 0x6c, 0x65, 0x3d, 0x24, 0x7b, 0x7b, 0x0a, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x74, + 0x6f, 0x70, 0x3a, 0x20, 0x70, 0x6f, 0x73, 0x69, 0x74, 0x69, 0x6f, 0x6e, + 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x74, 0x6f, 0x70, 0x2c, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x6c, 0x65, 0x66, 0x74, 0x3a, 0x20, 0x70, 0x6f, 0x73, 0x69, 0x74, 0x69, + 0x6f, 0x6e, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x6c, 0x65, 0x66, + 0x74, 0x2c, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x7d, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x24, 0x7b, 0x70, 0x72, + 0x6f, 0x70, 0x73, 0x2e, 0x70, 0x6f, 0x70, 0x6f, 0x76, 0x65, 0x72, 0x43, + 0x68, 0x69, 0x6c, 0x64, 0x72, 0x65, 0x6e, 0x7d, 0x0a, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x2f, 0x64, + 0x69, 0x76, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x3c, 0x2f, 0x24, 0x7b, 0x50, 0x6f, 0x72, 0x74, 0x61, 0x6c, + 0x7d, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x60, + 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x60, 0x3b, 0x0a, 0x20, + 0x20, 0x20, 0x20, 0x7d, 0x3b, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x2f, + 0x2f, 0x20, 0x53, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x3a, 0x20, 0x70, 0x72, + 0x65, 0x61, 0x63, 0x74, 0x2d, 0x70, 0x6f, 0x72, 0x74, 0x61, 0x6c, 0x20, + 0x28, 0x68, 0x74, 0x74, 0x70, 0x73, 0x3a, 0x2f, 0x2f, 0x67, 0x69, 0x74, + 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x64, 0x65, 0x76, 0x65, + 0x6c, 0x6f, 0x70, 0x69, 0x74, 0x2f, 0x70, 0x72, 0x65, 0x61, 0x63, 0x74, + 0x2d, 0x70, 0x6f, 0x72, 0x74, 0x61, 0x6c, 0x2f, 0x62, 0x6c, 0x6f, 0x62, + 0x2f, 0x6d, 0x61, 0x73, 0x74, 0x65, 0x72, 0x2f, 0x73, 0x72, 0x63, 0x2f, + 0x70, 0x72, 0x65, 0x61, 0x63, 0x74, 0x2d, 0x70, 0x6f, 0x72, 0x74, 0x61, + 0x6c, 0x2e, 0x6a, 0x73, 0x29, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x2f, 0x2a, + 0x2a, 0x20, 0x52, 0x65, 0x64, 0x69, 0x72, 0x65, 0x63, 0x74, 0x20, 0x72, + 0x65, 0x6e, 0x64, 0x65, 0x72, 0x69, 0x6e, 0x67, 0x20, 0x6f, 0x66, 0x20, + 0x64, 0x65, 0x73, 0x63, 0x65, 0x6e, 0x64, 0x61, 0x6e, 0x74, 0x73, 0x20, + 0x69, 0x6e, 0x74, 0x6f, 0x20, 0x74, 0x68, 0x65, 0x20, 0x67, 0x69, 0x76, + 0x65, 0x6e, 0x20, 0x43, 0x53, 0x53, 0x20, 0x73, 0x65, 0x6c, 0x65, 0x63, + 0x74, 0x6f, 0x72, 0x20, 0x2a, 0x2f, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x63, + 0x6c, 0x61, 0x73, 0x73, 0x20, 0x50, 0x6f, 0x72, 0x74, 0x61, 0x6c, 0x20, + 0x65, 0x78, 0x74, 0x65, 0x6e, 0x64, 0x73, 0x20, 0x43, 0x6f, 0x6d, 0x70, + 0x6f, 0x6e, 0x65, 0x6e, 0x74, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x63, 0x6f, 0x6d, 0x70, 0x6f, 0x6e, 0x65, 0x6e, 0x74, 0x44, + 0x69, 0x64, 0x55, 0x70, 0x64, 0x61, 0x74, 0x65, 0x28, 0x70, 0x72, 0x6f, + 0x70, 0x73, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x66, 0x6f, 0x72, 0x20, 0x28, 0x6c, 0x65, 0x74, 0x20, 0x69, + 0x20, 0x69, 0x6e, 0x20, 0x70, 0x72, 0x6f, 0x70, 0x73, 0x29, 0x20, 0x7b, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x69, + 0x66, 0x20, 0x28, 0x70, 0x72, 0x6f, 0x70, 0x73, 0x5b, 0x69, 0x5d, 0x20, + 0x21, 0x3d, 0x3d, 0x20, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x70, 0x72, 0x6f, + 0x70, 0x73, 0x5b, 0x69, 0x5d, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x72, 0x65, 0x74, + 0x75, 0x72, 0x6e, 0x20, 0x73, 0x65, 0x74, 0x54, 0x69, 0x6d, 0x65, 0x6f, + 0x75, 0x74, 0x28, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x72, 0x65, 0x6e, 0x64, + 0x65, 0x72, 0x4c, 0x61, 0x79, 0x65, 0x72, 0x29, 0x3b, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x7d, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, + 0x6f, 0x6d, 0x70, 0x6f, 0x6e, 0x65, 0x6e, 0x74, 0x44, 0x69, 0x64, 0x4d, + 0x6f, 0x75, 0x6e, 0x74, 0x28, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x69, 0x73, + 0x4d, 0x6f, 0x75, 0x6e, 0x74, 0x65, 0x64, 0x20, 0x3d, 0x20, 0x74, 0x72, + 0x75, 0x65, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x74, 0x68, 0x69, 0x73, 0x2e, 0x72, 0x65, 0x6e, 0x64, 0x65, 0x72, 0x4c, + 0x61, 0x79, 0x65, 0x72, 0x20, 0x3d, 0x20, 0x74, 0x68, 0x69, 0x73, 0x2e, + 0x72, 0x65, 0x6e, 0x64, 0x65, 0x72, 0x4c, 0x61, 0x79, 0x65, 0x72, 0x2e, + 0x62, 0x69, 0x6e, 0x64, 0x28, 0x74, 0x68, 0x69, 0x73, 0x29, 0x3b, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x74, 0x68, 0x69, 0x73, + 0x2e, 0x72, 0x65, 0x6e, 0x64, 0x65, 0x72, 0x4c, 0x61, 0x79, 0x65, 0x72, + 0x28, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6d, 0x70, 0x6f, + 0x6e, 0x65, 0x6e, 0x74, 0x57, 0x69, 0x6c, 0x6c, 0x55, 0x6e, 0x6d, 0x6f, + 0x75, 0x6e, 0x74, 0x28, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x72, 0x65, 0x6e, + 0x64, 0x65, 0x72, 0x4c, 0x61, 0x79, 0x65, 0x72, 0x28, 0x66, 0x61, 0x6c, + 0x73, 0x65, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x69, 0x73, 0x4d, 0x6f, 0x75, 0x6e, + 0x74, 0x65, 0x64, 0x20, 0x3d, 0x20, 0x66, 0x61, 0x6c, 0x73, 0x65, 0x3b, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x69, 0x66, 0x20, + 0x28, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x72, 0x65, 0x6d, 0x6f, 0x74, 0x65, + 0x20, 0x26, 0x26, 0x20, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x72, 0x65, 0x6d, + 0x6f, 0x74, 0x65, 0x2e, 0x70, 0x61, 0x72, 0x65, 0x6e, 0x74, 0x4e, 0x6f, + 0x64, 0x65, 0x29, 0x20, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x72, 0x65, 0x6d, + 0x6f, 0x74, 0x65, 0x2e, 0x70, 0x61, 0x72, 0x65, 0x6e, 0x74, 0x4e, 0x6f, + 0x64, 0x65, 0x2e, 0x72, 0x65, 0x6d, 0x6f, 0x76, 0x65, 0x43, 0x68, 0x69, + 0x6c, 0x64, 0x28, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x72, 0x65, 0x6d, 0x6f, + 0x74, 0x65, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, + 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x66, 0x69, 0x6e, 0x64, + 0x4e, 0x6f, 0x64, 0x65, 0x28, 0x6e, 0x6f, 0x64, 0x65, 0x29, 0x20, 0x7b, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x72, 0x65, 0x74, + 0x75, 0x72, 0x6e, 0x20, 0x74, 0x79, 0x70, 0x65, 0x6f, 0x66, 0x20, 0x6e, + 0x6f, 0x64, 0x65, 0x20, 0x3d, 0x3d, 0x3d, 0x20, 0x27, 0x73, 0x74, 0x72, + 0x69, 0x6e, 0x67, 0x27, 0x20, 0x3f, 0x20, 0x64, 0x6f, 0x63, 0x75, 0x6d, + 0x65, 0x6e, 0x74, 0x2e, 0x71, 0x75, 0x65, 0x72, 0x79, 0x53, 0x65, 0x6c, + 0x65, 0x63, 0x74, 0x6f, 0x72, 0x28, 0x6e, 0x6f, 0x64, 0x65, 0x29, 0x20, + 0x3a, 0x20, 0x6e, 0x6f, 0x64, 0x65, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x7d, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x72, + 0x65, 0x6e, 0x64, 0x65, 0x72, 0x4c, 0x61, 0x79, 0x65, 0x72, 0x28, 0x73, + 0x68, 0x6f, 0x77, 0x20, 0x3d, 0x20, 0x74, 0x72, 0x75, 0x65, 0x29, 0x20, + 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x69, 0x66, + 0x20, 0x28, 0x21, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x69, 0x73, 0x4d, 0x6f, + 0x75, 0x6e, 0x74, 0x65, 0x64, 0x29, 0x20, 0x72, 0x65, 0x74, 0x75, 0x72, + 0x6e, 0x3b, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x2f, 0x2f, 0x20, 0x63, 0x6c, 0x65, 0x61, 0x6e, 0x20, 0x75, 0x70, 0x20, + 0x6f, 0x6c, 0x64, 0x20, 0x6e, 0x6f, 0x64, 0x65, 0x20, 0x69, 0x66, 0x20, + 0x6d, 0x6f, 0x76, 0x69, 0x6e, 0x67, 0x20, 0x62, 0x61, 0x73, 0x65, 0x73, + 0x3a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x69, 0x66, + 0x20, 0x28, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x70, 0x72, 0x6f, 0x70, 0x73, + 0x2e, 0x69, 0x6e, 0x74, 0x6f, 0x20, 0x21, 0x3d, 0x3d, 0x20, 0x74, 0x68, + 0x69, 0x73, 0x2e, 0x69, 0x6e, 0x74, 0x6f, 0x50, 0x6f, 0x69, 0x6e, 0x74, + 0x65, 0x72, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x69, 0x6e, 0x74, + 0x6f, 0x50, 0x6f, 0x69, 0x6e, 0x74, 0x65, 0x72, 0x20, 0x3d, 0x20, 0x74, + 0x68, 0x69, 0x73, 0x2e, 0x70, 0x72, 0x6f, 0x70, 0x73, 0x2e, 0x69, 0x6e, + 0x74, 0x6f, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x69, 0x66, 0x20, 0x28, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x69, + 0x6e, 0x74, 0x6f, 0x20, 0x26, 0x26, 0x20, 0x74, 0x68, 0x69, 0x73, 0x2e, + 0x72, 0x65, 0x6d, 0x6f, 0x74, 0x65, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x74, 0x68, + 0x69, 0x73, 0x2e, 0x72, 0x65, 0x6d, 0x6f, 0x74, 0x65, 0x20, 0x3d, 0x20, + 0x72, 0x65, 0x6e, 0x64, 0x65, 0x72, 0x28, 0x68, 0x74, 0x6d, 0x6c, 0x60, + 0x3c, 0x24, 0x7b, 0x50, 0x6f, 0x72, 0x74, 0x61, 0x6c, 0x50, 0x72, 0x6f, + 0x78, 0x79, 0x7d, 0x20, 0x2f, 0x3e, 0x60, 0x2c, 0x20, 0x74, 0x68, 0x69, + 0x73, 0x2e, 0x69, 0x6e, 0x74, 0x6f, 0x2c, 0x20, 0x74, 0x68, 0x69, 0x73, + 0x2e, 0x72, 0x65, 0x6d, 0x6f, 0x74, 0x65, 0x29, 0x3b, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x74, 0x68, 0x69, 0x73, + 0x2e, 0x69, 0x6e, 0x74, 0x6f, 0x20, 0x3d, 0x20, 0x74, 0x68, 0x69, 0x73, + 0x2e, 0x66, 0x69, 0x6e, 0x64, 0x4e, 0x6f, 0x64, 0x65, 0x28, 0x74, 0x68, + 0x69, 0x73, 0x2e, 0x70, 0x72, 0x6f, 0x70, 0x73, 0x2e, 0x69, 0x6e, 0x74, + 0x6f, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x7d, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x74, + 0x68, 0x69, 0x73, 0x2e, 0x72, 0x65, 0x6d, 0x6f, 0x74, 0x65, 0x20, 0x3d, + 0x20, 0x72, 0x65, 0x6e, 0x64, 0x65, 0x72, 0x28, 0x68, 0x74, 0x6d, 0x6c, + 0x60, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x3c, 0x24, 0x7b, 0x50, 0x6f, 0x72, 0x74, 0x61, 0x6c, 0x50, 0x72, 0x6f, + 0x78, 0x79, 0x7d, 0x20, 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x78, 0x74, 0x3d, + 0x24, 0x7b, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x63, 0x6f, 0x6e, 0x74, 0x65, + 0x78, 0x74, 0x7d, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x24, 0x7b, 0x73, 0x68, 0x6f, 0x77, 0x20, + 0x26, 0x26, 0x20, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x70, 0x72, 0x6f, 0x70, + 0x73, 0x2e, 0x63, 0x68, 0x69, 0x6c, 0x64, 0x72, 0x65, 0x6e, 0x20, 0x7c, + 0x7c, 0x20, 0x6e, 0x75, 0x6c, 0x6c, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x2f, 0x24, 0x7b, 0x50, 0x6f, + 0x72, 0x74, 0x61, 0x6c, 0x50, 0x72, 0x6f, 0x78, 0x79, 0x7d, 0x3e, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x60, 0x2c, 0x20, 0x74, + 0x68, 0x69, 0x73, 0x2e, 0x69, 0x6e, 0x74, 0x6f, 0x2c, 0x20, 0x74, 0x68, + 0x69, 0x73, 0x2e, 0x72, 0x65, 0x6d, 0x6f, 0x74, 0x65, 0x29, 0x3b, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x0a, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x72, 0x65, 0x6e, 0x64, 0x65, 0x72, 0x28, 0x29, 0x20, + 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x72, 0x65, + 0x74, 0x75, 0x72, 0x6e, 0x20, 0x6e, 0x75, 0x6c, 0x6c, 0x3b, 0x0a, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x7d, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x2f, 0x2f, 0x20, 0x68, 0x69, 0x67, 0x68, + 0x2d, 0x6f, 0x72, 0x64, 0x65, 0x72, 0x20, 0x63, 0x6f, 0x6d, 0x70, 0x6f, + 0x6e, 0x65, 0x6e, 0x74, 0x20, 0x74, 0x68, 0x61, 0x74, 0x20, 0x72, 0x65, + 0x6e, 0x64, 0x65, 0x72, 0x73, 0x20, 0x69, 0x74, 0x73, 0x20, 0x66, 0x69, + 0x72, 0x73, 0x74, 0x20, 0x63, 0x68, 0x69, 0x6c, 0x64, 0x20, 0x69, 0x66, + 0x20, 0x69, 0x74, 0x20, 0x65, 0x78, 0x69, 0x73, 0x74, 0x73, 0x2e, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x2f, 0x2f, 0x20, 0x75, 0x73, 0x65, 0x64, 0x20, + 0x61, 0x73, 0x20, 0x61, 0x20, 0x63, 0x6f, 0x6e, 0x64, 0x69, 0x74, 0x69, + 0x6f, 0x6e, 0x61, 0x6c, 0x20, 0x72, 0x65, 0x6e, 0x64, 0x65, 0x72, 0x69, + 0x6e, 0x67, 0x20, 0x70, 0x72, 0x6f, 0x78, 0x79, 0x2e, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x63, 0x6c, 0x61, 0x73, 0x73, 0x20, 0x50, 0x6f, 0x72, 0x74, + 0x61, 0x6c, 0x50, 0x72, 0x6f, 0x78, 0x79, 0x20, 0x65, 0x78, 0x74, 0x65, + 0x6e, 0x64, 0x73, 0x20, 0x43, 0x6f, 0x6d, 0x70, 0x6f, 0x6e, 0x65, 0x6e, + 0x74, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x67, 0x65, + 0x74, 0x43, 0x68, 0x69, 0x6c, 0x64, 0x43, 0x6f, 0x6e, 0x74, 0x65, 0x78, + 0x74, 0x28, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x74, 0x68, 0x69, + 0x73, 0x2e, 0x70, 0x72, 0x6f, 0x70, 0x73, 0x2e, 0x63, 0x6f, 0x6e, 0x74, + 0x65, 0x78, 0x74, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x72, 0x65, 0x6e, 0x64, 0x65, + 0x72, 0x28, 0x7b, 0x20, 0x63, 0x68, 0x69, 0x6c, 0x64, 0x72, 0x65, 0x6e, + 0x20, 0x7d, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x63, 0x68, 0x69, + 0x6c, 0x64, 0x72, 0x65, 0x6e, 0x20, 0x7c, 0x7c, 0x20, 0x6e, 0x75, 0x6c, + 0x6c, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x20, + 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x66, 0x75, + 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x41, 0x70, 0x70, 0x28, 0x70, + 0x72, 0x6f, 0x70, 0x73, 0x29, 0x20, 0x7b, 0x0a, 0x0a, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x68, 0x74, + 0x6d, 0x6c, 0x60, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x3c, 0x64, 0x69, 0x76, 0x20, 0x63, 0x6c, 0x61, 0x73, 0x73, 0x3d, 0x22, + 0x6d, 0x6f, 0x64, 0x65, 0x2d, 0x24, 0x7b, 0x73, 0x65, 0x73, 0x73, 0x69, + 0x6f, 0x6e, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x74, 0x79, 0x70, + 0x65, 0x7d, 0x22, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x3c, 0x68, 0x65, 0x61, 0x64, 0x65, 0x72, 0x3e, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x3c, 0x68, 0x31, 0x3e, 0x6c, 0x6c, 0x61, 0x6d, 0x61, 0x2e, 0x63, 0x70, + 0x70, 0x3c, 0x2f, 0x68, 0x31, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x2f, 0x68, 0x65, 0x61, 0x64, 0x65, + 0x72, 0x3e, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x3c, 0x6d, 0x61, 0x69, 0x6e, 0x20, 0x69, 0x64, 0x3d, 0x22, + 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x22, 0x3e, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x24, + 0x7b, 0x63, 0x68, 0x61, 0x74, 0x53, 0x74, 0x61, 0x72, 0x74, 0x65, 0x64, + 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x20, 0x3f, 0x20, 0x43, 0x68, 0x61, + 0x74, 0x4c, 0x6f, 0x67, 0x20, 0x3a, 0x20, 0x43, 0x6f, 0x6e, 0x66, 0x69, + 0x67, 0x46, 0x6f, 0x72, 0x6d, 0x7d, 0x20, 0x2f, 0x3e, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x2f, 0x6d, 0x61, + 0x69, 0x6e, 0x3e, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x3c, 0x73, 0x65, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, + 0x69, 0x64, 0x3d, 0x22, 0x77, 0x72, 0x69, 0x74, 0x65, 0x22, 0x3e, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x3c, 0x24, 0x7b, 0x73, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x2e, 0x76, + 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x74, 0x79, 0x70, 0x65, 0x20, 0x3d, 0x3d, + 0x3d, 0x20, 0x27, 0x63, 0x68, 0x61, 0x74, 0x27, 0x20, 0x3f, 0x20, 0x4d, + 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x49, 0x6e, 0x70, 0x75, 0x74, 0x20, + 0x3a, 0x20, 0x43, 0x6f, 0x6d, 0x70, 0x6c, 0x65, 0x74, 0x69, 0x6f, 0x6e, + 0x43, 0x6f, 0x6e, 0x74, 0x72, 0x6f, 0x6c, 0x73, 0x7d, 0x20, 0x2f, 0x3e, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, + 0x2f, 0x73, 0x65, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x3e, 0x0a, 0x0a, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x66, 0x6f, + 0x6f, 0x74, 0x65, 0x72, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x70, 0x3e, 0x3c, 0x24, 0x7b, + 0x4d, 0x6f, 0x64, 0x65, 0x6c, 0x47, 0x65, 0x6e, 0x65, 0x72, 0x61, 0x74, + 0x69, 0x6f, 0x6e, 0x49, 0x6e, 0x66, 0x6f, 0x7d, 0x20, 0x2f, 0x3e, 0x3c, + 0x2f, 0x70, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x3c, 0x70, 0x3e, 0x50, 0x6f, 0x77, 0x65, 0x72, + 0x65, 0x64, 0x20, 0x62, 0x79, 0x20, 0x3c, 0x61, 0x20, 0x68, 0x72, 0x65, + 0x66, 0x3d, 0x22, 0x68, 0x74, 0x74, 0x70, 0x73, 0x3a, 0x2f, 0x2f, 0x67, + 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x67, 0x67, + 0x65, 0x72, 0x67, 0x61, 0x6e, 0x6f, 0x76, 0x2f, 0x6c, 0x6c, 0x61, 0x6d, + 0x61, 0x2e, 0x63, 0x70, 0x70, 0x22, 0x3e, 0x6c, 0x6c, 0x61, 0x6d, 0x61, + 0x2e, 0x63, 0x70, 0x70, 0x3c, 0x2f, 0x61, 0x3e, 0x20, 0x61, 0x6e, 0x64, + 0x20, 0x3c, 0x61, 0x20, 0x68, 0x72, 0x65, 0x66, 0x3d, 0x22, 0x68, 0x74, + 0x74, 0x70, 0x73, 0x3a, 0x2f, 0x2f, 0x67, 0x67, 0x6d, 0x6c, 0x2e, 0x61, + 0x69, 0x22, 0x3e, 0x67, 0x67, 0x6d, 0x6c, 0x2e, 0x61, 0x69, 0x3c, 0x2f, + 0x61, 0x3e, 0x2e, 0x3c, 0x2f, 0x70, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x2f, 0x66, 0x6f, 0x6f, 0x74, + 0x65, 0x72, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x3c, 0x2f, 0x64, 0x69, 0x76, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x60, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x0a, 0x20, + 0x20, 0x20, 0x20, 0x72, 0x65, 0x6e, 0x64, 0x65, 0x72, 0x28, 0x68, 0x28, + 0x41, 0x70, 0x70, 0x29, 0x2c, 0x20, 0x64, 0x6f, 0x63, 0x75, 0x6d, 0x65, + 0x6e, 0x74, 0x2e, 0x71, 0x75, 0x65, 0x72, 0x79, 0x53, 0x65, 0x6c, 0x65, + 0x63, 0x74, 0x6f, 0x72, 0x28, 0x27, 0x23, 0x63, 0x6f, 0x6e, 0x74, 0x61, + 0x69, 0x6e, 0x65, 0x72, 0x27, 0x29, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x3c, + 0x2f, 0x73, 0x63, 0x72, 0x69, 0x70, 0x74, 0x3e, 0x0a, 0x3c, 0x2f, 0x68, + 0x65, 0x61, 0x64, 0x3e, 0x0a, 0x0a, 0x3c, 0x62, 0x6f, 0x64, 0x79, 0x3e, + 0x0a, 0x20, 0x20, 0x3c, 0x64, 0x69, 0x76, 0x20, 0x69, 0x64, 0x3d, 0x22, + 0x63, 0x6f, 0x6e, 0x74, 0x61, 0x69, 0x6e, 0x65, 0x72, 0x22, 0x3e, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x3c, 0x69, 0x6e, 0x70, 0x75, 0x74, 0x20, 0x74, + 0x79, 0x70, 0x65, 0x3d, 0x22, 0x66, 0x69, 0x6c, 0x65, 0x22, 0x20, 0x69, + 0x64, 0x3d, 0x22, 0x66, 0x69, 0x6c, 0x65, 0x49, 0x6e, 0x70, 0x75, 0x74, + 0x22, 0x20, 0x61, 0x63, 0x63, 0x65, 0x70, 0x74, 0x3d, 0x22, 0x69, 0x6d, + 0x61, 0x67, 0x65, 0x2f, 0x2a, 0x22, 0x20, 0x73, 0x74, 0x79, 0x6c, 0x65, + 0x3d, 0x22, 0x64, 0x69, 0x73, 0x70, 0x6c, 0x61, 0x79, 0x3a, 0x20, 0x6e, + 0x6f, 0x6e, 0x65, 0x3b, 0x22, 0x3e, 0x0a, 0x20, 0x20, 0x3c, 0x2f, 0x64, + 0x69, 0x76, 0x3e, 0x0a, 0x20, 0x20, 0x3c, 0x64, 0x69, 0x76, 0x20, 0x69, + 0x64, 0x3d, 0x22, 0x70, 0x6f, 0x72, 0x74, 0x61, 0x6c, 0x22, 0x3e, 0x3c, + 0x2f, 0x64, 0x69, 0x76, 0x3e, 0x0a, 0x3c, 0x2f, 0x62, 0x6f, 0x64, 0x79, + 0x3e, 0x0a, 0x0a, 0x3c, 0x2f, 0x68, 0x74, 0x6d, 0x6c, 0x3e, 0x0a, 0x0a +}; +unsigned int index_html_len = 33456; diff --git a/examples/server/index.js.hpp b/examples/server/index.js.hpp index 647abe116..e09b3c8c5 100644 --- a/examples/server/index.js.hpp +++ b/examples/server/index.js.hpp @@ -1,4 +1,1903 @@ -const char index_js[] = R"LITERAL( -function t(){throw new Error("Cycle detected")}const n=Symbol.for("preact-signals");function e(){if(f>1){f--;return}let t,n=!1;while(void 0!==o){let _=o;o=void 0;s++;while(void 0!==_){const i=_.o;_.o=void 0;_.f&=-3;if(!(8&_.f)&&p(_))try{_.c()}catch(e){if(!n){t=e;n=!0}}_=i}}s=0;f--;if(n)throw t}function _(t){if(f>0)return t();f++;try{return t()}finally{e()}}let i,o,r=0;function u(t){if(r>0)return t();const n=i;i=void 0;r++;try{return t()}finally{r--;i=n}}let f=0,s=0,l=0;function c(t){if(void 0===i)return;let n=t.n;if(void 0===n||n.t!==i){n={i:0,S:t,p:i.s,n:void 0,t:i,e:void 0,x:void 0,r:n};if(void 0!==i.s)i.s.n=n;i.s=n;t.n=n;if(32&i.f)t.S(n);return n}else if(-1===n.i){n.i=0;if(void 0!==n.n){n.n.p=n.p;if(void 0!==n.p)n.p.n=n.n;n.p=i.s;n.n=void 0;i.s.n=n;i.s=n}return n}}function h(t){this.v=t;this.i=0;this.n=void 0;this.t=void 0}h.prototype.brand=n;h.prototype.h=function(){return!0};h.prototype.S=function(t){if(this.t!==t&&void 0===t.e){t.x=this.t;if(void 0!==this.t)this.t.e=t;this.t=t}};h.prototype.U=function(t){if(void 0!==this.t){const n=t.e,e=t.x;if(void 0!==n){n.x=e;t.e=void 0}if(void 0!==e){e.e=n;t.x=void 0}if(t===this.t)this.t=e}};h.prototype.subscribe=function(t){const n=this;return w((function(){const e=n.value,_=32&this.f;this.f&=-33;try{t(e)}finally{this.f|=_}}))};h.prototype.valueOf=function(){return this.value};h.prototype.toString=function(){return this.value+""};h.prototype.toJSON=function(){return this.value};h.prototype.peek=function(){return this.v};Object.defineProperty(h.prototype,"value",{get(){const t=c(this);if(void 0!==t)t.i=this.i;return this.v},set(n){if(i instanceof y)!function(){throw new Error("Computed cannot have side-effects")}();if(n!==this.v){if(s>100)t();this.v=n;this.i++;l++;f++;try{for(let t=this.t;void 0!==t;t=t.x)t.t.N()}finally{e()}}}});function a(t){return new h(t)}function p(t){for(let n=t.s;void 0!==n;n=n.n)if(n.S.i!==n.i||!n.S.h()||n.S.i!==n.i)return!0;return!1}function d(t){for(let n=t.s;void 0!==n;n=n.n){const e=n.S.n;if(void 0!==e)n.r=e;n.S.n=n;n.i=-1;if(void 0===n.n){t.s=n;break}}}function v(t){let n,e=t.s;while(void 0!==e){const t=e.p;if(-1===e.i){e.S.U(e);if(void 0!==t)t.n=e.n;if(void 0!==e.n)e.n.p=t}else n=e;e.S.n=e.r;if(void 0!==e.r)e.r=void 0;e=t}t.s=n}function y(t){h.call(this,void 0);this.x=t;this.s=void 0;this.g=l-1;this.f=4}(y.prototype=new h).h=function(){this.f&=-3;if(1&this.f)return!1;if(32==(36&this.f))return!0;this.f&=-5;if(this.g===l)return!0;this.g=l;this.f|=1;if(this.i>0&&!p(this)){this.f&=-2;return!0}const t=i;try{d(this);i=this;const t=this.x();if(16&this.f||this.v!==t||0===this.i){this.v=t;this.f&=-17;this.i++}}catch(t){this.v=t;this.f|=16;this.i++}i=t;v(this);this.f&=-2;return!0};y.prototype.S=function(t){if(void 0===this.t){this.f|=36;for(let t=this.s;void 0!==t;t=t.n)t.S.S(t)}h.prototype.S.call(this,t)};y.prototype.U=function(t){if(void 0!==this.t){h.prototype.U.call(this,t);if(void 0===this.t){this.f&=-33;for(let t=this.s;void 0!==t;t=t.n)t.S.U(t)}}};y.prototype.N=function(){if(!(2&this.f)){this.f|=6;for(let t=this.t;void 0!==t;t=t.x)t.t.N()}};y.prototype.peek=function(){if(!this.h())t();if(16&this.f)throw this.v;return this.v};Object.defineProperty(y.prototype,"value",{get(){if(1&this.f)t();const n=c(this);this.h();if(void 0!==n)n.i=this.i;if(16&this.f)throw this.v;return this.v}});function m(t){return new y(t)}function g(t){const n=t.u;t.u=void 0;if("function"==typeof n){f++;const _=i;i=void 0;try{n()}catch(n){t.f&=-2;t.f|=8;b(t);throw n}finally{i=_;e()}}}function b(t){for(let n=t.s;void 0!==n;n=n.n)n.S.U(n);t.x=void 0;t.s=void 0;g(t)}function k(t){if(i!==this)throw new Error("Out-of-order effect");v(this);i=t;this.f&=-2;if(8&this.f)b(this);e()}function S(t){this.x=t;this.u=void 0;this.s=void 0;this.o=void 0;this.f=32}S.prototype.c=function(){const t=this.S();try{if(8&this.f)return;if(void 0===this.x)return;const n=this.x();if("function"==typeof n)this.u=n}finally{t()}};S.prototype.S=function(){if(1&this.f)t();this.f|=1;this.f&=-9;g(this);d(this);f++;const n=i;i=this;return k.bind(this,n)};S.prototype.N=function(){if(!(2&this.f)){this.f|=2;this.o=o;o=this}};S.prototype.d=function(){this.f|=8;if(!(1&this.f))b(this)};function w(t){const n=new S(t);try{n.c()}catch(t){n.d();throw t}return n.d.bind(n)}var x,C,E,U,H,P,N,$,D,T={},V=[],A=/acit|ex(?:s|g|n|p|$)|rph|grid|ows|mnc|ntw|ine[ch]|zoo|^ord|itera/i,F=Array.isArray;function M(t,n){for(var e in n)t[e]=n[e];return t}function W(t){var n=t.parentNode;n&&n.removeChild(t)}function L(t,n,e){var _,i,o,r={};for(o in n)"key"==o?_=n[o]:"ref"==o?i=n[o]:r[o]=n[o];if(arguments.length>2&&(r.children=arguments.length>3?x.call(arguments,2):e),"function"==typeof t&&null!=t.defaultProps)for(o in t.defaultProps)void 0===r[o]&&(r[o]=t.defaultProps[o]);return O(t,r,_,i,null)}function O(t,n,e,_,i){var o={type:t,props:n,key:e,ref:_,__k:null,__:null,__b:0,__e:null,__d:void 0,__c:null,constructor:void 0,__v:null==i?++E:i,__i:-1,__u:0};return null==i&&null!=C.vnode&&C.vnode(o),o}function R(){return{current:null}}function j(t){return t.children}function I(t,n){this.props=t,this.context=n}function q(t,n){if(null==n)return t.__?q(t.__,t.__i+1):null;for(var e;nn&&H.sort($));z.__r=0}function J(t,n,e,_,i,o,r,u,f,s,l){var c,h,a,p,d,v=_&&_.__k||V,y=n.length;for(e.__d=f,K(e,n,v),f=e.__d,c=0;c0?O(i.type,i.props,i.key,i.ref?i.ref:null,i.__v):i)?(i.__=t,i.__b=t.__b+1,u=Y(i,e,r=_+c,l),i.__i=u,o=null,-1!==u&&(l--,(o=e[u])&&(o.__u|=131072)),null==o||null===o.__v?(-1==u&&c--,"function"!=typeof i.type&&(i.__u|=65536)):u!==r&&(u===r+1?c++:u>r?l>f-r?c+=u-r:c--:c=u(null!=f&&0==(131072&f.__u)?1:0))for(;r>=0||u=0){if((f=n[r])&&0==(131072&f.__u)&&i==f.key&&o===f.type)return r;r--}if(u2&&(u.children=arguments.length>3?x.call(arguments,2):e),O(t.type,u,_||t.key,i||t.ref,null)}function ht(t,n){var e={__c:n="__cC"+D++,__:t,Consumer:function(t,n){return t.children(n)},Provider:function(t){var e,_;return this.getChildContext||(e=[],(_={})[n]=this,this.getChildContext=function(){return _},this.shouldComponentUpdate=function(t){this.props.value!==t.value&&e.some((function(t){t.__e=!0,G(t)}))},this.sub=function(t){e.push(t);var n=t.componentWillUnmount;t.componentWillUnmount=function(){e.splice(e.indexOf(t),1),n&&n.call(t)}}),t.children}};return e.Provider.__=e.Consumer.contextType=e}x=V.slice,C={__e:function(t,n,e,_){for(var i,o,r;n=n.__;)if((i=n.__c)&&!i.__)try{if((o=i.constructor)&&null!=o.getDerivedStateFromError&&(i.setState(o.getDerivedStateFromError(t)),r=i.__d),null!=i.componentDidCatch&&(i.componentDidCatch(t,_||{}),r=i.__d),r)return i.__E=i}catch(n){t=n}throw t}},E=0,U=function(t){return null!=t&&null==t.constructor},I.prototype.setState=function(t,n){var e;e=null!=this.__s&&this.__s!==this.state?this.__s:this.__s=M({},this.state),"function"==typeof t&&(t=t(M({},e),this.props)),t&&M(e,t),null!=t&&this.__v&&(n&&this._sb.push(n),G(this))},I.prototype.forceUpdate=function(t){this.__v&&(this.__e=!0,t&&this.__h.push(t),G(this))},I.prototype.render=j,H=[],N="function"==typeof Promise?Promise.prototype.then.bind(Promise.resolve()):setTimeout,$=function(t,n){return t.__v.__b-n.__v.__b},z.__r=0,D=0;var at,pt,dt,vt,yt=0,mt=[],gt=[],bt=C.__b,kt=C.__r,St=C.diffed,wt=C.__c,xt=C.unmount;function Ct(t,n){C.__h&&C.__h(pt,t,yt||n),yt=0;var e=pt.__H||(pt.__H={__:[],__h:[]});return t>=e.__.length&&e.__.push({__V:gt}),e.__[t]}function Et(t){return yt=1,Ut(qt,t)}function Ut(t,n,e){var _=Ct(at++,2);if(_.t=t,!_.__c&&(_.__=[e?e(n):qt(void 0,n),function(t){var n=_.__N?_.__N[0]:_.__[0],e=_.t(n,t);n!==e&&(_.__N=[e,_.__[1]],_.__c.setState({}))}],_.__c=pt,!pt.u)){var i=function(t,n,e){if(!_.__c.__H)return!0;var i=_.__c.__H.__.filter((function(t){return t.__c}));if(i.every((function(t){return!t.__N})))return!o||o.call(this,t,n,e);var r=!1;return i.forEach((function(t){if(t.__N){var n=t.__[0];t.__=t.__N,t.__N=void 0,n!==t.__[0]&&(r=!0)}})),!(!r&&_.__c.props===t)&&(!o||o.call(this,t,n,e))};pt.u=!0;var o=pt.shouldComponentUpdate,r=pt.componentWillUpdate;pt.componentWillUpdate=function(t,n,e){if(this.__e){var _=o;o=void 0,i(t,n,e),o=_}r&&r.call(this,t,n,e)},pt.shouldComponentUpdate=i}return _.__N||_.__}function Ht(t,n){var e=Ct(at++,3);!C.__s&&It(e.__H,n)&&(e.__=t,e.i=n,pt.__H.__h.push(e))}function Pt(t,n){var e=Ct(at++,4);!C.__s&&It(e.__H,n)&&(e.__=t,e.i=n,pt.__h.push(e))}function Nt(t){return yt=5,Dt((function(){return{current:t}}),[])}function $t(t,n,e){yt=6,Pt((function(){return"function"==typeof t?(t(n()),function(){return t(null)}):t?(t.current=n(),function(){return t.current=null}):void 0}),null==e?e:e.concat(t))}function Dt(t,n){var e=Ct(at++,7);return It(e.__H,n)?(e.__V=t(),e.i=n,e.__h=t,e.__V):e.__}function Tt(t,n){return yt=8,Dt((function(){return t}),n)}function Vt(t){var n=pt.context[t.__c],e=Ct(at++,9);return e.c=t,n?(null==e.__&&(e.__=!0,n.sub(pt)),n.props.value):t.__}function At(t,n){C.useDebugValue&&C.useDebugValue(n?n(t):t)}function Ft(t){var n=Ct(at++,10),e=Et();return n.__=t,pt.componentDidCatch||(pt.componentDidCatch=function(t,_){n.__&&n.__(t,_),e[1](t)}),[e[0],function(){e[1](void 0)}]}function Mt(){var t=Ct(at++,11);if(!t.__){for(var n=pt.__v;null!==n&&!n.__m&&null!==n.__;)n=n.__;var e=n.__m||(n.__m=[0,0]);t.__="P"+e[0]+"-"+e[1]++}return t.__}function Wt(){for(var t;t=mt.shift();)if(t.__P&&t.__H)try{t.__H.__h.forEach(Rt),t.__H.__h.forEach(jt),t.__H.__h=[]}catch(u){t.__H.__h=[],C.__e(u,t.__v)}}C.__b=function(t){pt=null,bt&&bt(t)},C.__r=function(t){kt&&kt(t),at=0;var n=(pt=t.__c).__H;n&&(dt===pt?(n.__h=[],pt.__h=[],n.__.forEach((function(t){t.__N&&(t.__=t.__N),t.__V=gt,t.__N=t.i=void 0}))):(n.__h.forEach(Rt),n.__h.forEach(jt),n.__h=[],at=0)),dt=pt},C.diffed=function(t){St&&St(t);var n=t.__c;n&&n.__H&&(n.__H.__h.length&&(1!==mt.push(n)&&vt===C.requestAnimationFrame||((vt=C.requestAnimationFrame)||Ot)(Wt)),n.__H.__.forEach((function(t){t.i&&(t.__H=t.i),t.__V!==gt&&(t.__=t.__V),t.i=void 0,t.__V=gt}))),dt=pt=null},C.__c=function(t,n){n.some((function(t){try{t.__h.forEach(Rt),t.__h=t.__h.filter((function(t){return!t.__||jt(t)}))}catch(l){n.some((function(t){t.__h&&(t.__h=[])})),n=[],C.__e(l,t.__v)}})),wt&&wt(t,n)},C.unmount=function(t){xt&&xt(t);var n,e=t.__c;e&&e.__H&&(e.__H.__.forEach((function(t){try{Rt(t)}catch(t){n=t}})),e.__H=void 0,n&&C.__e(n,e.__v))};var Lt="function"==typeof requestAnimationFrame;function Ot(t){var n,e=function(){clearTimeout(_),Lt&&cancelAnimationFrame(n),setTimeout(t)},_=setTimeout(e,100);Lt&&(n=requestAnimationFrame(e))}function Rt(t){var n=pt,e=t.__c;"function"==typeof e&&(t.__c=void 0,e()),pt=n}function jt(t){var n=pt;t.__c=t.__(),pt=n}function It(t,n){return!t||t.length!==n.length||n.some((function(n,e){return n!==t[e]}))}function qt(t,n){return"function"==typeof n?n(t):n}function Bt(t,n){C[t]=n.bind(null,C[t]||(()=>{}))}let Gt,zt;function Jt(t){if(zt)zt();zt=t&&t.S()}function Kt({data:t}){const n=Xt(t);n.value=t;const e=Dt(()=>{let t=this.__v;while(t=t.__)if(t.__c){t.__c.__$f|=4;break}this.__$u.c=()=>{var t;if(!U(e.peek())&&3===(null==(t=this.base)?void 0:t.nodeType))this.base.data=e.peek();else{this.__$f|=1;this.setState({})}};return m(()=>{let t=n.value.value;return 0===t?0:!0===t?"":t||""})},[]);return e.value}Kt.displayName="_st";Object.defineProperties(h.prototype,{constructor:{configurable:!0,value:void 0},type:{configurable:!0,value:Kt},props:{configurable:!0,get(){return{data:this}}},__b:{configurable:!0,value:1}});Bt("__b",(t,n)=>{if("string"==typeof n.type){let t,e=n.props;for(let _ in e){if("children"===_)continue;let i=e[_];if(i instanceof h){if(!t)n.__np=t={};t[_]=i;e[_]=i.peek()}}}t(n)});Bt("__r",(t,n)=>{Jt();let e,_=n.__c;if(_){_.__$f&=-2;e=_.__$u;if(void 0===e)_.__$u=e=function(t){let n;w((function(){n=this}));n.c=()=>{_.__$f|=1;_.setState({})};return n}()}Gt=_;Jt(e);t(n)});Bt("__e",(t,n,e,_)=>{Jt();Gt=void 0;t(n,e,_)});Bt("diffed",(t,n)=>{Jt();Gt=void 0;let e;if("string"==typeof n.type&&(e=n.__e)){let t=n.__np,_=n.props;if(t){let n=e.U;if(n)for(let e in n){let _=n[e];if(void 0!==_&&!(e in t)){_.d();n[e]=void 0}}else{n={};e.U=n}for(let i in t){let o=n[i],r=t[i];if(void 0===o){o=Qt(e,i,r,_);n[i]=o}else o.o(r,_)}}}t(n)});function Qt(t,n,e,_){const i=n in t&&void 0===t.ownerSVGElement,o=a(e);return{o:(t,n)=>{o.value=t;_=n},d:w(()=>{const e=o.value.value;if(_[n]!==e){_[n]=e;if(i)t[n]=e;else if(e)t.setAttribute(n,e);else t.removeAttribute(n)}})}}Bt("unmount",(t,n)=>{if("string"==typeof n.type){let t=n.__e;if(t){const n=t.U;if(n){t.U=void 0;for(let t in n){let e=n[t];if(e)e.d()}}}}else{let t=n.__c;if(t){const n=t.__$u;if(n){t.__$u=void 0;n.d()}}}t(n)});Bt("__h",(t,n,e,_)=>{if(_<3||9===_)n.__$f|=2;t(n,e,_)});I.prototype.shouldComponentUpdate=function(t,n){const e=this.__$u;if(!(e&&void 0!==e.s||4&this.__$f))return!0;if(3&this.__$f)return!0;for(let _ in n)return!0;for(let _ in t)if("__source"!==_&&t[_]!==this.props[_])return!0;for(let _ in this.props)if(!(_ in t))return!0;return!1};function Xt(t){return Dt(()=>a(t),[])}function Yt(t){const n=Nt(t);n.current=t;Gt.__$f|=4;return Dt(()=>m(()=>n.current()),[])}function Zt(t){const n=Nt(t);n.current=t;Ht(()=>w(()=>n.current()),[])}var tn=function(t,n,e,_){var i;n[0]=0;for(var o=1;o=5&&((i||!t&&5===_)&&(r.push(_,0,i,e),_=6),t&&(r.push(_,t,0,e),_=6)),i=""},f=0;f"===n?(_=1,i=""):i=n+i[0]:o?n===o?o="":i+=n:'"'===n||"'"===n?o=n:">"===n?(u(),_=1):_&&("="===n?(_=5,e=i,i=""):"/"===n&&(_<5||">"===t[f][s+1])?(u(),3===_&&(r=r[0]),_=r,(r=r[0]).push(2,0,_),_=0):" "===n||"\t"===n||"\n"===n||"\r"===n?(u(),_=2):i+=n),3===_&&"!--"===i&&(_=4,r=r[0])}return u(),r}(t)),n),arguments,[])).length>1?n:n[0]}var _n=en.bind(L);export{I as Component,j as Fragment,h as Signal,_ as batch,ct as cloneElement,m as computed,ht as createContext,L as createElement,R as createRef,w as effect,L as h,_n as html,lt as hydrate,U as isValidElement,C as options,st as render,a as signal,X as toChildArray,u as untracked,Tt as useCallback,Yt as useComputed,Vt as useContext,At as useDebugValue,Ht as useEffect,Ft as useErrorBoundary,Mt as useId,$t as useImperativeHandle,Pt as useLayoutEffect,Dt as useMemo,Ut as useReducer,Nt as useRef,Xt as useSignal,Zt as useSignalEffect,Et as useState}; -)LITERAL"; -unsigned int index_js_len = sizeof(index_js); +unsigned char index_js[] = { + 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x74, 0x28, 0x29, + 0x7b, 0x74, 0x68, 0x72, 0x6f, 0x77, 0x20, 0x6e, 0x65, 0x77, 0x20, 0x45, + 0x72, 0x72, 0x6f, 0x72, 0x28, 0x22, 0x43, 0x79, 0x63, 0x6c, 0x65, 0x20, + 0x64, 0x65, 0x74, 0x65, 0x63, 0x74, 0x65, 0x64, 0x22, 0x29, 0x7d, 0x63, + 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x6e, 0x3d, 0x53, 0x79, 0x6d, 0x62, 0x6f, + 0x6c, 0x2e, 0x66, 0x6f, 0x72, 0x28, 0x22, 0x70, 0x72, 0x65, 0x61, 0x63, + 0x74, 0x2d, 0x73, 0x69, 0x67, 0x6e, 0x61, 0x6c, 0x73, 0x22, 0x29, 0x3b, + 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x65, 0x28, 0x29, + 0x7b, 0x69, 0x66, 0x28, 0x66, 0x3e, 0x31, 0x29, 0x7b, 0x66, 0x2d, 0x2d, + 0x3b, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x7d, 0x6c, 0x65, 0x74, 0x20, + 0x74, 0x2c, 0x6e, 0x3d, 0x21, 0x31, 0x3b, 0x77, 0x68, 0x69, 0x6c, 0x65, + 0x28, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x21, 0x3d, 0x3d, 0x6f, 0x29, + 0x7b, 0x6c, 0x65, 0x74, 0x20, 0x5f, 0x3d, 0x6f, 0x3b, 0x6f, 0x3d, 0x76, + 0x6f, 0x69, 0x64, 0x20, 0x30, 0x3b, 0x73, 0x2b, 0x2b, 0x3b, 0x77, 0x68, + 0x69, 0x6c, 0x65, 0x28, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x21, 0x3d, + 0x3d, 0x5f, 0x29, 0x7b, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x69, 0x3d, + 0x5f, 0x2e, 0x6f, 0x3b, 0x5f, 0x2e, 0x6f, 0x3d, 0x76, 0x6f, 0x69, 0x64, + 0x20, 0x30, 0x3b, 0x5f, 0x2e, 0x66, 0x26, 0x3d, 0x2d, 0x33, 0x3b, 0x69, + 0x66, 0x28, 0x21, 0x28, 0x38, 0x26, 0x5f, 0x2e, 0x66, 0x29, 0x26, 0x26, + 0x70, 0x28, 0x5f, 0x29, 0x29, 0x74, 0x72, 0x79, 0x7b, 0x5f, 0x2e, 0x63, + 0x28, 0x29, 0x7d, 0x63, 0x61, 0x74, 0x63, 0x68, 0x28, 0x65, 0x29, 0x7b, + 0x69, 0x66, 0x28, 0x21, 0x6e, 0x29, 0x7b, 0x74, 0x3d, 0x65, 0x3b, 0x6e, + 0x3d, 0x21, 0x30, 0x7d, 0x7d, 0x5f, 0x3d, 0x69, 0x7d, 0x7d, 0x73, 0x3d, + 0x30, 0x3b, 0x66, 0x2d, 0x2d, 0x3b, 0x69, 0x66, 0x28, 0x6e, 0x29, 0x74, + 0x68, 0x72, 0x6f, 0x77, 0x20, 0x74, 0x7d, 0x66, 0x75, 0x6e, 0x63, 0x74, + 0x69, 0x6f, 0x6e, 0x20, 0x5f, 0x28, 0x74, 0x29, 0x7b, 0x69, 0x66, 0x28, + 0x66, 0x3e, 0x30, 0x29, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x74, + 0x28, 0x29, 0x3b, 0x66, 0x2b, 0x2b, 0x3b, 0x74, 0x72, 0x79, 0x7b, 0x72, + 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x74, 0x28, 0x29, 0x7d, 0x66, 0x69, + 0x6e, 0x61, 0x6c, 0x6c, 0x79, 0x7b, 0x65, 0x28, 0x29, 0x7d, 0x7d, 0x6c, + 0x65, 0x74, 0x20, 0x69, 0x2c, 0x6f, 0x2c, 0x72, 0x3d, 0x30, 0x3b, 0x66, + 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x75, 0x28, 0x74, 0x29, + 0x7b, 0x69, 0x66, 0x28, 0x72, 0x3e, 0x30, 0x29, 0x72, 0x65, 0x74, 0x75, + 0x72, 0x6e, 0x20, 0x74, 0x28, 0x29, 0x3b, 0x63, 0x6f, 0x6e, 0x73, 0x74, + 0x20, 0x6e, 0x3d, 0x69, 0x3b, 0x69, 0x3d, 0x76, 0x6f, 0x69, 0x64, 0x20, + 0x30, 0x3b, 0x72, 0x2b, 0x2b, 0x3b, 0x74, 0x72, 0x79, 0x7b, 0x72, 0x65, + 0x74, 0x75, 0x72, 0x6e, 0x20, 0x74, 0x28, 0x29, 0x7d, 0x66, 0x69, 0x6e, + 0x61, 0x6c, 0x6c, 0x79, 0x7b, 0x72, 0x2d, 0x2d, 0x3b, 0x69, 0x3d, 0x6e, + 0x7d, 0x7d, 0x6c, 0x65, 0x74, 0x20, 0x66, 0x3d, 0x30, 0x2c, 0x73, 0x3d, + 0x30, 0x2c, 0x6c, 0x3d, 0x30, 0x3b, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, + 0x6f, 0x6e, 0x20, 0x63, 0x28, 0x74, 0x29, 0x7b, 0x69, 0x66, 0x28, 0x76, + 0x6f, 0x69, 0x64, 0x20, 0x30, 0x3d, 0x3d, 0x3d, 0x69, 0x29, 0x72, 0x65, + 0x74, 0x75, 0x72, 0x6e, 0x3b, 0x6c, 0x65, 0x74, 0x20, 0x6e, 0x3d, 0x74, + 0x2e, 0x6e, 0x3b, 0x69, 0x66, 0x28, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, + 0x3d, 0x3d, 0x3d, 0x6e, 0x7c, 0x7c, 0x6e, 0x2e, 0x74, 0x21, 0x3d, 0x3d, + 0x69, 0x29, 0x7b, 0x6e, 0x3d, 0x7b, 0x69, 0x3a, 0x30, 0x2c, 0x53, 0x3a, + 0x74, 0x2c, 0x70, 0x3a, 0x69, 0x2e, 0x73, 0x2c, 0x6e, 0x3a, 0x76, 0x6f, + 0x69, 0x64, 0x20, 0x30, 0x2c, 0x74, 0x3a, 0x69, 0x2c, 0x65, 0x3a, 0x76, + 0x6f, 0x69, 0x64, 0x20, 0x30, 0x2c, 0x78, 0x3a, 0x76, 0x6f, 0x69, 0x64, + 0x20, 0x30, 0x2c, 0x72, 0x3a, 0x6e, 0x7d, 0x3b, 0x69, 0x66, 0x28, 0x76, + 0x6f, 0x69, 0x64, 0x20, 0x30, 0x21, 0x3d, 0x3d, 0x69, 0x2e, 0x73, 0x29, + 0x69, 0x2e, 0x73, 0x2e, 0x6e, 0x3d, 0x6e, 0x3b, 0x69, 0x2e, 0x73, 0x3d, + 0x6e, 0x3b, 0x74, 0x2e, 0x6e, 0x3d, 0x6e, 0x3b, 0x69, 0x66, 0x28, 0x33, + 0x32, 0x26, 0x69, 0x2e, 0x66, 0x29, 0x74, 0x2e, 0x53, 0x28, 0x6e, 0x29, + 0x3b, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x6e, 0x7d, 0x65, 0x6c, + 0x73, 0x65, 0x20, 0x69, 0x66, 0x28, 0x2d, 0x31, 0x3d, 0x3d, 0x3d, 0x6e, + 0x2e, 0x69, 0x29, 0x7b, 0x6e, 0x2e, 0x69, 0x3d, 0x30, 0x3b, 0x69, 0x66, + 0x28, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x21, 0x3d, 0x3d, 0x6e, 0x2e, + 0x6e, 0x29, 0x7b, 0x6e, 0x2e, 0x6e, 0x2e, 0x70, 0x3d, 0x6e, 0x2e, 0x70, + 0x3b, 0x69, 0x66, 0x28, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x21, 0x3d, + 0x3d, 0x6e, 0x2e, 0x70, 0x29, 0x6e, 0x2e, 0x70, 0x2e, 0x6e, 0x3d, 0x6e, + 0x2e, 0x6e, 0x3b, 0x6e, 0x2e, 0x70, 0x3d, 0x69, 0x2e, 0x73, 0x3b, 0x6e, + 0x2e, 0x6e, 0x3d, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x3b, 0x69, 0x2e, + 0x73, 0x2e, 0x6e, 0x3d, 0x6e, 0x3b, 0x69, 0x2e, 0x73, 0x3d, 0x6e, 0x7d, + 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x6e, 0x7d, 0x7d, 0x66, 0x75, + 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x68, 0x28, 0x74, 0x29, 0x7b, + 0x74, 0x68, 0x69, 0x73, 0x2e, 0x76, 0x3d, 0x74, 0x3b, 0x74, 0x68, 0x69, + 0x73, 0x2e, 0x69, 0x3d, 0x30, 0x3b, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x6e, + 0x3d, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x3b, 0x74, 0x68, 0x69, 0x73, + 0x2e, 0x74, 0x3d, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x7d, 0x68, 0x2e, + 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x74, 0x79, 0x70, 0x65, 0x2e, 0x62, 0x72, + 0x61, 0x6e, 0x64, 0x3d, 0x6e, 0x3b, 0x68, 0x2e, 0x70, 0x72, 0x6f, 0x74, + 0x6f, 0x74, 0x79, 0x70, 0x65, 0x2e, 0x68, 0x3d, 0x66, 0x75, 0x6e, 0x63, + 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x29, 0x7b, 0x72, 0x65, 0x74, 0x75, 0x72, + 0x6e, 0x21, 0x30, 0x7d, 0x3b, 0x68, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, + 0x74, 0x79, 0x70, 0x65, 0x2e, 0x53, 0x3d, 0x66, 0x75, 0x6e, 0x63, 0x74, + 0x69, 0x6f, 0x6e, 0x28, 0x74, 0x29, 0x7b, 0x69, 0x66, 0x28, 0x74, 0x68, + 0x69, 0x73, 0x2e, 0x74, 0x21, 0x3d, 0x3d, 0x74, 0x26, 0x26, 0x76, 0x6f, + 0x69, 0x64, 0x20, 0x30, 0x3d, 0x3d, 0x3d, 0x74, 0x2e, 0x65, 0x29, 0x7b, + 0x74, 0x2e, 0x78, 0x3d, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x74, 0x3b, 0x69, + 0x66, 0x28, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x21, 0x3d, 0x3d, 0x74, + 0x68, 0x69, 0x73, 0x2e, 0x74, 0x29, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x74, + 0x2e, 0x65, 0x3d, 0x74, 0x3b, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x74, 0x3d, + 0x74, 0x7d, 0x7d, 0x3b, 0x68, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x74, + 0x79, 0x70, 0x65, 0x2e, 0x55, 0x3d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, + 0x6f, 0x6e, 0x28, 0x74, 0x29, 0x7b, 0x69, 0x66, 0x28, 0x76, 0x6f, 0x69, + 0x64, 0x20, 0x30, 0x21, 0x3d, 0x3d, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x74, + 0x29, 0x7b, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x6e, 0x3d, 0x74, 0x2e, + 0x65, 0x2c, 0x65, 0x3d, 0x74, 0x2e, 0x78, 0x3b, 0x69, 0x66, 0x28, 0x76, + 0x6f, 0x69, 0x64, 0x20, 0x30, 0x21, 0x3d, 0x3d, 0x6e, 0x29, 0x7b, 0x6e, + 0x2e, 0x78, 0x3d, 0x65, 0x3b, 0x74, 0x2e, 0x65, 0x3d, 0x76, 0x6f, 0x69, + 0x64, 0x20, 0x30, 0x7d, 0x69, 0x66, 0x28, 0x76, 0x6f, 0x69, 0x64, 0x20, + 0x30, 0x21, 0x3d, 0x3d, 0x65, 0x29, 0x7b, 0x65, 0x2e, 0x65, 0x3d, 0x6e, + 0x3b, 0x74, 0x2e, 0x78, 0x3d, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x7d, + 0x69, 0x66, 0x28, 0x74, 0x3d, 0x3d, 0x3d, 0x74, 0x68, 0x69, 0x73, 0x2e, + 0x74, 0x29, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x74, 0x3d, 0x65, 0x7d, 0x7d, + 0x3b, 0x68, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x74, 0x79, 0x70, 0x65, + 0x2e, 0x73, 0x75, 0x62, 0x73, 0x63, 0x72, 0x69, 0x62, 0x65, 0x3d, 0x66, + 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x74, 0x29, 0x7b, 0x63, + 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x6e, 0x3d, 0x74, 0x68, 0x69, 0x73, 0x3b, + 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x77, 0x28, 0x28, 0x66, 0x75, + 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x29, 0x7b, 0x63, 0x6f, 0x6e, + 0x73, 0x74, 0x20, 0x65, 0x3d, 0x6e, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, + 0x2c, 0x5f, 0x3d, 0x33, 0x32, 0x26, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x66, + 0x3b, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x66, 0x26, 0x3d, 0x2d, 0x33, 0x33, + 0x3b, 0x74, 0x72, 0x79, 0x7b, 0x74, 0x28, 0x65, 0x29, 0x7d, 0x66, 0x69, + 0x6e, 0x61, 0x6c, 0x6c, 0x79, 0x7b, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x66, + 0x7c, 0x3d, 0x5f, 0x7d, 0x7d, 0x29, 0x29, 0x7d, 0x3b, 0x68, 0x2e, 0x70, + 0x72, 0x6f, 0x74, 0x6f, 0x74, 0x79, 0x70, 0x65, 0x2e, 0x76, 0x61, 0x6c, + 0x75, 0x65, 0x4f, 0x66, 0x3d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, + 0x6e, 0x28, 0x29, 0x7b, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x74, + 0x68, 0x69, 0x73, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x7d, 0x3b, 0x68, + 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x74, 0x79, 0x70, 0x65, 0x2e, 0x74, + 0x6f, 0x53, 0x74, 0x72, 0x69, 0x6e, 0x67, 0x3d, 0x66, 0x75, 0x6e, 0x63, + 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x29, 0x7b, 0x72, 0x65, 0x74, 0x75, 0x72, + 0x6e, 0x20, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, + 0x2b, 0x22, 0x22, 0x7d, 0x3b, 0x68, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, + 0x74, 0x79, 0x70, 0x65, 0x2e, 0x74, 0x6f, 0x4a, 0x53, 0x4f, 0x4e, 0x3d, + 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x29, 0x7b, 0x72, + 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x76, + 0x61, 0x6c, 0x75, 0x65, 0x7d, 0x3b, 0x68, 0x2e, 0x70, 0x72, 0x6f, 0x74, + 0x6f, 0x74, 0x79, 0x70, 0x65, 0x2e, 0x70, 0x65, 0x65, 0x6b, 0x3d, 0x66, + 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x29, 0x7b, 0x72, 0x65, + 0x74, 0x75, 0x72, 0x6e, 0x20, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x76, 0x7d, + 0x3b, 0x4f, 0x62, 0x6a, 0x65, 0x63, 0x74, 0x2e, 0x64, 0x65, 0x66, 0x69, + 0x6e, 0x65, 0x50, 0x72, 0x6f, 0x70, 0x65, 0x72, 0x74, 0x79, 0x28, 0x68, + 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x74, 0x79, 0x70, 0x65, 0x2c, 0x22, + 0x76, 0x61, 0x6c, 0x75, 0x65, 0x22, 0x2c, 0x7b, 0x67, 0x65, 0x74, 0x28, + 0x29, 0x7b, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x74, 0x3d, 0x63, 0x28, + 0x74, 0x68, 0x69, 0x73, 0x29, 0x3b, 0x69, 0x66, 0x28, 0x76, 0x6f, 0x69, + 0x64, 0x20, 0x30, 0x21, 0x3d, 0x3d, 0x74, 0x29, 0x74, 0x2e, 0x69, 0x3d, + 0x74, 0x68, 0x69, 0x73, 0x2e, 0x69, 0x3b, 0x72, 0x65, 0x74, 0x75, 0x72, + 0x6e, 0x20, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x76, 0x7d, 0x2c, 0x73, 0x65, + 0x74, 0x28, 0x6e, 0x29, 0x7b, 0x69, 0x66, 0x28, 0x69, 0x20, 0x69, 0x6e, + 0x73, 0x74, 0x61, 0x6e, 0x63, 0x65, 0x6f, 0x66, 0x20, 0x79, 0x29, 0x21, + 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x29, 0x7b, 0x74, + 0x68, 0x72, 0x6f, 0x77, 0x20, 0x6e, 0x65, 0x77, 0x20, 0x45, 0x72, 0x72, + 0x6f, 0x72, 0x28, 0x22, 0x43, 0x6f, 0x6d, 0x70, 0x75, 0x74, 0x65, 0x64, + 0x20, 0x63, 0x61, 0x6e, 0x6e, 0x6f, 0x74, 0x20, 0x68, 0x61, 0x76, 0x65, + 0x20, 0x73, 0x69, 0x64, 0x65, 0x2d, 0x65, 0x66, 0x66, 0x65, 0x63, 0x74, + 0x73, 0x22, 0x29, 0x7d, 0x28, 0x29, 0x3b, 0x69, 0x66, 0x28, 0x6e, 0x21, + 0x3d, 0x3d, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x76, 0x29, 0x7b, 0x69, 0x66, + 0x28, 0x73, 0x3e, 0x31, 0x30, 0x30, 0x29, 0x74, 0x28, 0x29, 0x3b, 0x74, + 0x68, 0x69, 0x73, 0x2e, 0x76, 0x3d, 0x6e, 0x3b, 0x74, 0x68, 0x69, 0x73, + 0x2e, 0x69, 0x2b, 0x2b, 0x3b, 0x6c, 0x2b, 0x2b, 0x3b, 0x66, 0x2b, 0x2b, + 0x3b, 0x74, 0x72, 0x79, 0x7b, 0x66, 0x6f, 0x72, 0x28, 0x6c, 0x65, 0x74, + 0x20, 0x74, 0x3d, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x74, 0x3b, 0x76, 0x6f, + 0x69, 0x64, 0x20, 0x30, 0x21, 0x3d, 0x3d, 0x74, 0x3b, 0x74, 0x3d, 0x74, + 0x2e, 0x78, 0x29, 0x74, 0x2e, 0x74, 0x2e, 0x4e, 0x28, 0x29, 0x7d, 0x66, + 0x69, 0x6e, 0x61, 0x6c, 0x6c, 0x79, 0x7b, 0x65, 0x28, 0x29, 0x7d, 0x7d, + 0x7d, 0x7d, 0x29, 0x3b, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, + 0x20, 0x61, 0x28, 0x74, 0x29, 0x7b, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, + 0x20, 0x6e, 0x65, 0x77, 0x20, 0x68, 0x28, 0x74, 0x29, 0x7d, 0x66, 0x75, + 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x70, 0x28, 0x74, 0x29, 0x7b, + 0x66, 0x6f, 0x72, 0x28, 0x6c, 0x65, 0x74, 0x20, 0x6e, 0x3d, 0x74, 0x2e, + 0x73, 0x3b, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x21, 0x3d, 0x3d, 0x6e, + 0x3b, 0x6e, 0x3d, 0x6e, 0x2e, 0x6e, 0x29, 0x69, 0x66, 0x28, 0x6e, 0x2e, + 0x53, 0x2e, 0x69, 0x21, 0x3d, 0x3d, 0x6e, 0x2e, 0x69, 0x7c, 0x7c, 0x21, + 0x6e, 0x2e, 0x53, 0x2e, 0x68, 0x28, 0x29, 0x7c, 0x7c, 0x6e, 0x2e, 0x53, + 0x2e, 0x69, 0x21, 0x3d, 0x3d, 0x6e, 0x2e, 0x69, 0x29, 0x72, 0x65, 0x74, + 0x75, 0x72, 0x6e, 0x21, 0x30, 0x3b, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, + 0x21, 0x31, 0x7d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, + 0x64, 0x28, 0x74, 0x29, 0x7b, 0x66, 0x6f, 0x72, 0x28, 0x6c, 0x65, 0x74, + 0x20, 0x6e, 0x3d, 0x74, 0x2e, 0x73, 0x3b, 0x76, 0x6f, 0x69, 0x64, 0x20, + 0x30, 0x21, 0x3d, 0x3d, 0x6e, 0x3b, 0x6e, 0x3d, 0x6e, 0x2e, 0x6e, 0x29, + 0x7b, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x65, 0x3d, 0x6e, 0x2e, 0x53, + 0x2e, 0x6e, 0x3b, 0x69, 0x66, 0x28, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, + 0x21, 0x3d, 0x3d, 0x65, 0x29, 0x6e, 0x2e, 0x72, 0x3d, 0x65, 0x3b, 0x6e, + 0x2e, 0x53, 0x2e, 0x6e, 0x3d, 0x6e, 0x3b, 0x6e, 0x2e, 0x69, 0x3d, 0x2d, + 0x31, 0x3b, 0x69, 0x66, 0x28, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x3d, + 0x3d, 0x3d, 0x6e, 0x2e, 0x6e, 0x29, 0x7b, 0x74, 0x2e, 0x73, 0x3d, 0x6e, + 0x3b, 0x62, 0x72, 0x65, 0x61, 0x6b, 0x7d, 0x7d, 0x7d, 0x66, 0x75, 0x6e, + 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x76, 0x28, 0x74, 0x29, 0x7b, 0x6c, + 0x65, 0x74, 0x20, 0x6e, 0x2c, 0x65, 0x3d, 0x74, 0x2e, 0x73, 0x3b, 0x77, + 0x68, 0x69, 0x6c, 0x65, 0x28, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x21, + 0x3d, 0x3d, 0x65, 0x29, 0x7b, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x74, + 0x3d, 0x65, 0x2e, 0x70, 0x3b, 0x69, 0x66, 0x28, 0x2d, 0x31, 0x3d, 0x3d, + 0x3d, 0x65, 0x2e, 0x69, 0x29, 0x7b, 0x65, 0x2e, 0x53, 0x2e, 0x55, 0x28, + 0x65, 0x29, 0x3b, 0x69, 0x66, 0x28, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, + 0x21, 0x3d, 0x3d, 0x74, 0x29, 0x74, 0x2e, 0x6e, 0x3d, 0x65, 0x2e, 0x6e, + 0x3b, 0x69, 0x66, 0x28, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x21, 0x3d, + 0x3d, 0x65, 0x2e, 0x6e, 0x29, 0x65, 0x2e, 0x6e, 0x2e, 0x70, 0x3d, 0x74, + 0x7d, 0x65, 0x6c, 0x73, 0x65, 0x20, 0x6e, 0x3d, 0x65, 0x3b, 0x65, 0x2e, + 0x53, 0x2e, 0x6e, 0x3d, 0x65, 0x2e, 0x72, 0x3b, 0x69, 0x66, 0x28, 0x76, + 0x6f, 0x69, 0x64, 0x20, 0x30, 0x21, 0x3d, 0x3d, 0x65, 0x2e, 0x72, 0x29, + 0x65, 0x2e, 0x72, 0x3d, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x3b, 0x65, + 0x3d, 0x74, 0x7d, 0x74, 0x2e, 0x73, 0x3d, 0x6e, 0x7d, 0x66, 0x75, 0x6e, + 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x79, 0x28, 0x74, 0x29, 0x7b, 0x68, + 0x2e, 0x63, 0x61, 0x6c, 0x6c, 0x28, 0x74, 0x68, 0x69, 0x73, 0x2c, 0x76, + 0x6f, 0x69, 0x64, 0x20, 0x30, 0x29, 0x3b, 0x74, 0x68, 0x69, 0x73, 0x2e, + 0x78, 0x3d, 0x74, 0x3b, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x73, 0x3d, 0x76, + 0x6f, 0x69, 0x64, 0x20, 0x30, 0x3b, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x67, + 0x3d, 0x6c, 0x2d, 0x31, 0x3b, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x66, 0x3d, + 0x34, 0x7d, 0x28, 0x79, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x74, 0x79, + 0x70, 0x65, 0x3d, 0x6e, 0x65, 0x77, 0x20, 0x68, 0x29, 0x2e, 0x68, 0x3d, + 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x29, 0x7b, 0x74, + 0x68, 0x69, 0x73, 0x2e, 0x66, 0x26, 0x3d, 0x2d, 0x33, 0x3b, 0x69, 0x66, + 0x28, 0x31, 0x26, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x66, 0x29, 0x72, 0x65, + 0x74, 0x75, 0x72, 0x6e, 0x21, 0x31, 0x3b, 0x69, 0x66, 0x28, 0x33, 0x32, + 0x3d, 0x3d, 0x28, 0x33, 0x36, 0x26, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x66, + 0x29, 0x29, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x21, 0x30, 0x3b, 0x74, + 0x68, 0x69, 0x73, 0x2e, 0x66, 0x26, 0x3d, 0x2d, 0x35, 0x3b, 0x69, 0x66, + 0x28, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x67, 0x3d, 0x3d, 0x3d, 0x6c, 0x29, + 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x21, 0x30, 0x3b, 0x74, 0x68, 0x69, + 0x73, 0x2e, 0x67, 0x3d, 0x6c, 0x3b, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x66, + 0x7c, 0x3d, 0x31, 0x3b, 0x69, 0x66, 0x28, 0x74, 0x68, 0x69, 0x73, 0x2e, + 0x69, 0x3e, 0x30, 0x26, 0x26, 0x21, 0x70, 0x28, 0x74, 0x68, 0x69, 0x73, + 0x29, 0x29, 0x7b, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x66, 0x26, 0x3d, 0x2d, + 0x32, 0x3b, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x21, 0x30, 0x7d, 0x63, + 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x74, 0x3d, 0x69, 0x3b, 0x74, 0x72, 0x79, + 0x7b, 0x64, 0x28, 0x74, 0x68, 0x69, 0x73, 0x29, 0x3b, 0x69, 0x3d, 0x74, + 0x68, 0x69, 0x73, 0x3b, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x74, 0x3d, + 0x74, 0x68, 0x69, 0x73, 0x2e, 0x78, 0x28, 0x29, 0x3b, 0x69, 0x66, 0x28, + 0x31, 0x36, 0x26, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x66, 0x7c, 0x7c, 0x74, + 0x68, 0x69, 0x73, 0x2e, 0x76, 0x21, 0x3d, 0x3d, 0x74, 0x7c, 0x7c, 0x30, + 0x3d, 0x3d, 0x3d, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x69, 0x29, 0x7b, 0x74, + 0x68, 0x69, 0x73, 0x2e, 0x76, 0x3d, 0x74, 0x3b, 0x74, 0x68, 0x69, 0x73, + 0x2e, 0x66, 0x26, 0x3d, 0x2d, 0x31, 0x37, 0x3b, 0x74, 0x68, 0x69, 0x73, + 0x2e, 0x69, 0x2b, 0x2b, 0x7d, 0x7d, 0x63, 0x61, 0x74, 0x63, 0x68, 0x28, + 0x74, 0x29, 0x7b, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x76, 0x3d, 0x74, 0x3b, + 0x74, 0x68, 0x69, 0x73, 0x2e, 0x66, 0x7c, 0x3d, 0x31, 0x36, 0x3b, 0x74, + 0x68, 0x69, 0x73, 0x2e, 0x69, 0x2b, 0x2b, 0x7d, 0x69, 0x3d, 0x74, 0x3b, + 0x76, 0x28, 0x74, 0x68, 0x69, 0x73, 0x29, 0x3b, 0x74, 0x68, 0x69, 0x73, + 0x2e, 0x66, 0x26, 0x3d, 0x2d, 0x32, 0x3b, 0x72, 0x65, 0x74, 0x75, 0x72, + 0x6e, 0x21, 0x30, 0x7d, 0x3b, 0x79, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, + 0x74, 0x79, 0x70, 0x65, 0x2e, 0x53, 0x3d, 0x66, 0x75, 0x6e, 0x63, 0x74, + 0x69, 0x6f, 0x6e, 0x28, 0x74, 0x29, 0x7b, 0x69, 0x66, 0x28, 0x76, 0x6f, + 0x69, 0x64, 0x20, 0x30, 0x3d, 0x3d, 0x3d, 0x74, 0x68, 0x69, 0x73, 0x2e, + 0x74, 0x29, 0x7b, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x66, 0x7c, 0x3d, 0x33, + 0x36, 0x3b, 0x66, 0x6f, 0x72, 0x28, 0x6c, 0x65, 0x74, 0x20, 0x74, 0x3d, + 0x74, 0x68, 0x69, 0x73, 0x2e, 0x73, 0x3b, 0x76, 0x6f, 0x69, 0x64, 0x20, + 0x30, 0x21, 0x3d, 0x3d, 0x74, 0x3b, 0x74, 0x3d, 0x74, 0x2e, 0x6e, 0x29, + 0x74, 0x2e, 0x53, 0x2e, 0x53, 0x28, 0x74, 0x29, 0x7d, 0x68, 0x2e, 0x70, + 0x72, 0x6f, 0x74, 0x6f, 0x74, 0x79, 0x70, 0x65, 0x2e, 0x53, 0x2e, 0x63, + 0x61, 0x6c, 0x6c, 0x28, 0x74, 0x68, 0x69, 0x73, 0x2c, 0x74, 0x29, 0x7d, + 0x3b, 0x79, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x74, 0x79, 0x70, 0x65, + 0x2e, 0x55, 0x3d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, + 0x74, 0x29, 0x7b, 0x69, 0x66, 0x28, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, + 0x21, 0x3d, 0x3d, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x74, 0x29, 0x7b, 0x68, + 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x74, 0x79, 0x70, 0x65, 0x2e, 0x55, + 0x2e, 0x63, 0x61, 0x6c, 0x6c, 0x28, 0x74, 0x68, 0x69, 0x73, 0x2c, 0x74, + 0x29, 0x3b, 0x69, 0x66, 0x28, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x3d, + 0x3d, 0x3d, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x74, 0x29, 0x7b, 0x74, 0x68, + 0x69, 0x73, 0x2e, 0x66, 0x26, 0x3d, 0x2d, 0x33, 0x33, 0x3b, 0x66, 0x6f, + 0x72, 0x28, 0x6c, 0x65, 0x74, 0x20, 0x74, 0x3d, 0x74, 0x68, 0x69, 0x73, + 0x2e, 0x73, 0x3b, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x21, 0x3d, 0x3d, + 0x74, 0x3b, 0x74, 0x3d, 0x74, 0x2e, 0x6e, 0x29, 0x74, 0x2e, 0x53, 0x2e, + 0x55, 0x28, 0x74, 0x29, 0x7d, 0x7d, 0x7d, 0x3b, 0x79, 0x2e, 0x70, 0x72, + 0x6f, 0x74, 0x6f, 0x74, 0x79, 0x70, 0x65, 0x2e, 0x4e, 0x3d, 0x66, 0x75, + 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x29, 0x7b, 0x69, 0x66, 0x28, + 0x21, 0x28, 0x32, 0x26, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x66, 0x29, 0x29, + 0x7b, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x66, 0x7c, 0x3d, 0x36, 0x3b, 0x66, + 0x6f, 0x72, 0x28, 0x6c, 0x65, 0x74, 0x20, 0x74, 0x3d, 0x74, 0x68, 0x69, + 0x73, 0x2e, 0x74, 0x3b, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x21, 0x3d, + 0x3d, 0x74, 0x3b, 0x74, 0x3d, 0x74, 0x2e, 0x78, 0x29, 0x74, 0x2e, 0x74, + 0x2e, 0x4e, 0x28, 0x29, 0x7d, 0x7d, 0x3b, 0x79, 0x2e, 0x70, 0x72, 0x6f, + 0x74, 0x6f, 0x74, 0x79, 0x70, 0x65, 0x2e, 0x70, 0x65, 0x65, 0x6b, 0x3d, + 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x29, 0x7b, 0x69, + 0x66, 0x28, 0x21, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x68, 0x28, 0x29, 0x29, + 0x74, 0x28, 0x29, 0x3b, 0x69, 0x66, 0x28, 0x31, 0x36, 0x26, 0x74, 0x68, + 0x69, 0x73, 0x2e, 0x66, 0x29, 0x74, 0x68, 0x72, 0x6f, 0x77, 0x20, 0x74, + 0x68, 0x69, 0x73, 0x2e, 0x76, 0x3b, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, + 0x20, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x76, 0x7d, 0x3b, 0x4f, 0x62, 0x6a, + 0x65, 0x63, 0x74, 0x2e, 0x64, 0x65, 0x66, 0x69, 0x6e, 0x65, 0x50, 0x72, + 0x6f, 0x70, 0x65, 0x72, 0x74, 0x79, 0x28, 0x79, 0x2e, 0x70, 0x72, 0x6f, + 0x74, 0x6f, 0x74, 0x79, 0x70, 0x65, 0x2c, 0x22, 0x76, 0x61, 0x6c, 0x75, + 0x65, 0x22, 0x2c, 0x7b, 0x67, 0x65, 0x74, 0x28, 0x29, 0x7b, 0x69, 0x66, + 0x28, 0x31, 0x26, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x66, 0x29, 0x74, 0x28, + 0x29, 0x3b, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x6e, 0x3d, 0x63, 0x28, + 0x74, 0x68, 0x69, 0x73, 0x29, 0x3b, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x68, + 0x28, 0x29, 0x3b, 0x69, 0x66, 0x28, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, + 0x21, 0x3d, 0x3d, 0x6e, 0x29, 0x6e, 0x2e, 0x69, 0x3d, 0x74, 0x68, 0x69, + 0x73, 0x2e, 0x69, 0x3b, 0x69, 0x66, 0x28, 0x31, 0x36, 0x26, 0x74, 0x68, + 0x69, 0x73, 0x2e, 0x66, 0x29, 0x74, 0x68, 0x72, 0x6f, 0x77, 0x20, 0x74, + 0x68, 0x69, 0x73, 0x2e, 0x76, 0x3b, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, + 0x20, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x76, 0x7d, 0x7d, 0x29, 0x3b, 0x66, + 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x6d, 0x28, 0x74, 0x29, + 0x7b, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x6e, 0x65, 0x77, 0x20, + 0x79, 0x28, 0x74, 0x29, 0x7d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, + 0x6e, 0x20, 0x67, 0x28, 0x74, 0x29, 0x7b, 0x63, 0x6f, 0x6e, 0x73, 0x74, + 0x20, 0x6e, 0x3d, 0x74, 0x2e, 0x75, 0x3b, 0x74, 0x2e, 0x75, 0x3d, 0x76, + 0x6f, 0x69, 0x64, 0x20, 0x30, 0x3b, 0x69, 0x66, 0x28, 0x22, 0x66, 0x75, + 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x22, 0x3d, 0x3d, 0x74, 0x79, 0x70, + 0x65, 0x6f, 0x66, 0x20, 0x6e, 0x29, 0x7b, 0x66, 0x2b, 0x2b, 0x3b, 0x63, + 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x5f, 0x3d, 0x69, 0x3b, 0x69, 0x3d, 0x76, + 0x6f, 0x69, 0x64, 0x20, 0x30, 0x3b, 0x74, 0x72, 0x79, 0x7b, 0x6e, 0x28, + 0x29, 0x7d, 0x63, 0x61, 0x74, 0x63, 0x68, 0x28, 0x6e, 0x29, 0x7b, 0x74, + 0x2e, 0x66, 0x26, 0x3d, 0x2d, 0x32, 0x3b, 0x74, 0x2e, 0x66, 0x7c, 0x3d, + 0x38, 0x3b, 0x62, 0x28, 0x74, 0x29, 0x3b, 0x74, 0x68, 0x72, 0x6f, 0x77, + 0x20, 0x6e, 0x7d, 0x66, 0x69, 0x6e, 0x61, 0x6c, 0x6c, 0x79, 0x7b, 0x69, + 0x3d, 0x5f, 0x3b, 0x65, 0x28, 0x29, 0x7d, 0x7d, 0x7d, 0x66, 0x75, 0x6e, + 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x62, 0x28, 0x74, 0x29, 0x7b, 0x66, + 0x6f, 0x72, 0x28, 0x6c, 0x65, 0x74, 0x20, 0x6e, 0x3d, 0x74, 0x2e, 0x73, + 0x3b, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x21, 0x3d, 0x3d, 0x6e, 0x3b, + 0x6e, 0x3d, 0x6e, 0x2e, 0x6e, 0x29, 0x6e, 0x2e, 0x53, 0x2e, 0x55, 0x28, + 0x6e, 0x29, 0x3b, 0x74, 0x2e, 0x78, 0x3d, 0x76, 0x6f, 0x69, 0x64, 0x20, + 0x30, 0x3b, 0x74, 0x2e, 0x73, 0x3d, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, + 0x3b, 0x67, 0x28, 0x74, 0x29, 0x7d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, + 0x6f, 0x6e, 0x20, 0x6b, 0x28, 0x74, 0x29, 0x7b, 0x69, 0x66, 0x28, 0x69, + 0x21, 0x3d, 0x3d, 0x74, 0x68, 0x69, 0x73, 0x29, 0x74, 0x68, 0x72, 0x6f, + 0x77, 0x20, 0x6e, 0x65, 0x77, 0x20, 0x45, 0x72, 0x72, 0x6f, 0x72, 0x28, + 0x22, 0x4f, 0x75, 0x74, 0x2d, 0x6f, 0x66, 0x2d, 0x6f, 0x72, 0x64, 0x65, + 0x72, 0x20, 0x65, 0x66, 0x66, 0x65, 0x63, 0x74, 0x22, 0x29, 0x3b, 0x76, + 0x28, 0x74, 0x68, 0x69, 0x73, 0x29, 0x3b, 0x69, 0x3d, 0x74, 0x3b, 0x74, + 0x68, 0x69, 0x73, 0x2e, 0x66, 0x26, 0x3d, 0x2d, 0x32, 0x3b, 0x69, 0x66, + 0x28, 0x38, 0x26, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x66, 0x29, 0x62, 0x28, + 0x74, 0x68, 0x69, 0x73, 0x29, 0x3b, 0x65, 0x28, 0x29, 0x7d, 0x66, 0x75, + 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x53, 0x28, 0x74, 0x29, 0x7b, + 0x74, 0x68, 0x69, 0x73, 0x2e, 0x78, 0x3d, 0x74, 0x3b, 0x74, 0x68, 0x69, + 0x73, 0x2e, 0x75, 0x3d, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x3b, 0x74, + 0x68, 0x69, 0x73, 0x2e, 0x73, 0x3d, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, + 0x3b, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x6f, 0x3d, 0x76, 0x6f, 0x69, 0x64, + 0x20, 0x30, 0x3b, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x66, 0x3d, 0x33, 0x32, + 0x7d, 0x53, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x74, 0x79, 0x70, 0x65, + 0x2e, 0x63, 0x3d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, + 0x29, 0x7b, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x74, 0x3d, 0x74, 0x68, + 0x69, 0x73, 0x2e, 0x53, 0x28, 0x29, 0x3b, 0x74, 0x72, 0x79, 0x7b, 0x69, + 0x66, 0x28, 0x38, 0x26, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x66, 0x29, 0x72, + 0x65, 0x74, 0x75, 0x72, 0x6e, 0x3b, 0x69, 0x66, 0x28, 0x76, 0x6f, 0x69, + 0x64, 0x20, 0x30, 0x3d, 0x3d, 0x3d, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x78, + 0x29, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x3b, 0x63, 0x6f, 0x6e, 0x73, + 0x74, 0x20, 0x6e, 0x3d, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x78, 0x28, 0x29, + 0x3b, 0x69, 0x66, 0x28, 0x22, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, + 0x6e, 0x22, 0x3d, 0x3d, 0x74, 0x79, 0x70, 0x65, 0x6f, 0x66, 0x20, 0x6e, + 0x29, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x75, 0x3d, 0x6e, 0x7d, 0x66, 0x69, + 0x6e, 0x61, 0x6c, 0x6c, 0x79, 0x7b, 0x74, 0x28, 0x29, 0x7d, 0x7d, 0x3b, + 0x53, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x74, 0x79, 0x70, 0x65, 0x2e, + 0x53, 0x3d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x29, + 0x7b, 0x69, 0x66, 0x28, 0x31, 0x26, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x66, + 0x29, 0x74, 0x28, 0x29, 0x3b, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x66, 0x7c, + 0x3d, 0x31, 0x3b, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x66, 0x26, 0x3d, 0x2d, + 0x39, 0x3b, 0x67, 0x28, 0x74, 0x68, 0x69, 0x73, 0x29, 0x3b, 0x64, 0x28, + 0x74, 0x68, 0x69, 0x73, 0x29, 0x3b, 0x66, 0x2b, 0x2b, 0x3b, 0x63, 0x6f, + 0x6e, 0x73, 0x74, 0x20, 0x6e, 0x3d, 0x69, 0x3b, 0x69, 0x3d, 0x74, 0x68, + 0x69, 0x73, 0x3b, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x6b, 0x2e, + 0x62, 0x69, 0x6e, 0x64, 0x28, 0x74, 0x68, 0x69, 0x73, 0x2c, 0x6e, 0x29, + 0x7d, 0x3b, 0x53, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x74, 0x79, 0x70, + 0x65, 0x2e, 0x4e, 0x3d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, + 0x28, 0x29, 0x7b, 0x69, 0x66, 0x28, 0x21, 0x28, 0x32, 0x26, 0x74, 0x68, + 0x69, 0x73, 0x2e, 0x66, 0x29, 0x29, 0x7b, 0x74, 0x68, 0x69, 0x73, 0x2e, + 0x66, 0x7c, 0x3d, 0x32, 0x3b, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x6f, 0x3d, + 0x6f, 0x3b, 0x6f, 0x3d, 0x74, 0x68, 0x69, 0x73, 0x7d, 0x7d, 0x3b, 0x53, + 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x74, 0x79, 0x70, 0x65, 0x2e, 0x64, + 0x3d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x29, 0x7b, + 0x74, 0x68, 0x69, 0x73, 0x2e, 0x66, 0x7c, 0x3d, 0x38, 0x3b, 0x69, 0x66, + 0x28, 0x21, 0x28, 0x31, 0x26, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x66, 0x29, + 0x29, 0x62, 0x28, 0x74, 0x68, 0x69, 0x73, 0x29, 0x7d, 0x3b, 0x66, 0x75, + 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x77, 0x28, 0x74, 0x29, 0x7b, + 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x6e, 0x3d, 0x6e, 0x65, 0x77, 0x20, + 0x53, 0x28, 0x74, 0x29, 0x3b, 0x74, 0x72, 0x79, 0x7b, 0x6e, 0x2e, 0x63, + 0x28, 0x29, 0x7d, 0x63, 0x61, 0x74, 0x63, 0x68, 0x28, 0x74, 0x29, 0x7b, + 0x6e, 0x2e, 0x64, 0x28, 0x29, 0x3b, 0x74, 0x68, 0x72, 0x6f, 0x77, 0x20, + 0x74, 0x7d, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x6e, 0x2e, 0x64, + 0x2e, 0x62, 0x69, 0x6e, 0x64, 0x28, 0x6e, 0x29, 0x7d, 0x76, 0x61, 0x72, + 0x20, 0x78, 0x2c, 0x43, 0x2c, 0x45, 0x2c, 0x55, 0x2c, 0x48, 0x2c, 0x50, + 0x2c, 0x4e, 0x2c, 0x24, 0x2c, 0x44, 0x2c, 0x54, 0x3d, 0x7b, 0x7d, 0x2c, + 0x56, 0x3d, 0x5b, 0x5d, 0x2c, 0x41, 0x3d, 0x2f, 0x61, 0x63, 0x69, 0x74, + 0x7c, 0x65, 0x78, 0x28, 0x3f, 0x3a, 0x73, 0x7c, 0x67, 0x7c, 0x6e, 0x7c, + 0x70, 0x7c, 0x24, 0x29, 0x7c, 0x72, 0x70, 0x68, 0x7c, 0x67, 0x72, 0x69, + 0x64, 0x7c, 0x6f, 0x77, 0x73, 0x7c, 0x6d, 0x6e, 0x63, 0x7c, 0x6e, 0x74, + 0x77, 0x7c, 0x69, 0x6e, 0x65, 0x5b, 0x63, 0x68, 0x5d, 0x7c, 0x7a, 0x6f, + 0x6f, 0x7c, 0x5e, 0x6f, 0x72, 0x64, 0x7c, 0x69, 0x74, 0x65, 0x72, 0x61, + 0x2f, 0x69, 0x2c, 0x46, 0x3d, 0x41, 0x72, 0x72, 0x61, 0x79, 0x2e, 0x69, + 0x73, 0x41, 0x72, 0x72, 0x61, 0x79, 0x3b, 0x66, 0x75, 0x6e, 0x63, 0x74, + 0x69, 0x6f, 0x6e, 0x20, 0x4d, 0x28, 0x74, 0x2c, 0x6e, 0x29, 0x7b, 0x66, + 0x6f, 0x72, 0x28, 0x76, 0x61, 0x72, 0x20, 0x65, 0x20, 0x69, 0x6e, 0x20, + 0x6e, 0x29, 0x74, 0x5b, 0x65, 0x5d, 0x3d, 0x6e, 0x5b, 0x65, 0x5d, 0x3b, + 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x74, 0x7d, 0x66, 0x75, 0x6e, + 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x57, 0x28, 0x74, 0x29, 0x7b, 0x76, + 0x61, 0x72, 0x20, 0x6e, 0x3d, 0x74, 0x2e, 0x70, 0x61, 0x72, 0x65, 0x6e, + 0x74, 0x4e, 0x6f, 0x64, 0x65, 0x3b, 0x6e, 0x26, 0x26, 0x6e, 0x2e, 0x72, + 0x65, 0x6d, 0x6f, 0x76, 0x65, 0x43, 0x68, 0x69, 0x6c, 0x64, 0x28, 0x74, + 0x29, 0x7d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x4c, + 0x28, 0x74, 0x2c, 0x6e, 0x2c, 0x65, 0x29, 0x7b, 0x76, 0x61, 0x72, 0x20, + 0x5f, 0x2c, 0x69, 0x2c, 0x6f, 0x2c, 0x72, 0x3d, 0x7b, 0x7d, 0x3b, 0x66, + 0x6f, 0x72, 0x28, 0x6f, 0x20, 0x69, 0x6e, 0x20, 0x6e, 0x29, 0x22, 0x6b, + 0x65, 0x79, 0x22, 0x3d, 0x3d, 0x6f, 0x3f, 0x5f, 0x3d, 0x6e, 0x5b, 0x6f, + 0x5d, 0x3a, 0x22, 0x72, 0x65, 0x66, 0x22, 0x3d, 0x3d, 0x6f, 0x3f, 0x69, + 0x3d, 0x6e, 0x5b, 0x6f, 0x5d, 0x3a, 0x72, 0x5b, 0x6f, 0x5d, 0x3d, 0x6e, + 0x5b, 0x6f, 0x5d, 0x3b, 0x69, 0x66, 0x28, 0x61, 0x72, 0x67, 0x75, 0x6d, + 0x65, 0x6e, 0x74, 0x73, 0x2e, 0x6c, 0x65, 0x6e, 0x67, 0x74, 0x68, 0x3e, + 0x32, 0x26, 0x26, 0x28, 0x72, 0x2e, 0x63, 0x68, 0x69, 0x6c, 0x64, 0x72, + 0x65, 0x6e, 0x3d, 0x61, 0x72, 0x67, 0x75, 0x6d, 0x65, 0x6e, 0x74, 0x73, + 0x2e, 0x6c, 0x65, 0x6e, 0x67, 0x74, 0x68, 0x3e, 0x33, 0x3f, 0x78, 0x2e, + 0x63, 0x61, 0x6c, 0x6c, 0x28, 0x61, 0x72, 0x67, 0x75, 0x6d, 0x65, 0x6e, + 0x74, 0x73, 0x2c, 0x32, 0x29, 0x3a, 0x65, 0x29, 0x2c, 0x22, 0x66, 0x75, + 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x22, 0x3d, 0x3d, 0x74, 0x79, 0x70, + 0x65, 0x6f, 0x66, 0x20, 0x74, 0x26, 0x26, 0x6e, 0x75, 0x6c, 0x6c, 0x21, + 0x3d, 0x74, 0x2e, 0x64, 0x65, 0x66, 0x61, 0x75, 0x6c, 0x74, 0x50, 0x72, + 0x6f, 0x70, 0x73, 0x29, 0x66, 0x6f, 0x72, 0x28, 0x6f, 0x20, 0x69, 0x6e, + 0x20, 0x74, 0x2e, 0x64, 0x65, 0x66, 0x61, 0x75, 0x6c, 0x74, 0x50, 0x72, + 0x6f, 0x70, 0x73, 0x29, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x3d, 0x3d, + 0x3d, 0x72, 0x5b, 0x6f, 0x5d, 0x26, 0x26, 0x28, 0x72, 0x5b, 0x6f, 0x5d, + 0x3d, 0x74, 0x2e, 0x64, 0x65, 0x66, 0x61, 0x75, 0x6c, 0x74, 0x50, 0x72, + 0x6f, 0x70, 0x73, 0x5b, 0x6f, 0x5d, 0x29, 0x3b, 0x72, 0x65, 0x74, 0x75, + 0x72, 0x6e, 0x20, 0x4f, 0x28, 0x74, 0x2c, 0x72, 0x2c, 0x5f, 0x2c, 0x69, + 0x2c, 0x6e, 0x75, 0x6c, 0x6c, 0x29, 0x7d, 0x66, 0x75, 0x6e, 0x63, 0x74, + 0x69, 0x6f, 0x6e, 0x20, 0x4f, 0x28, 0x74, 0x2c, 0x6e, 0x2c, 0x65, 0x2c, + 0x5f, 0x2c, 0x69, 0x29, 0x7b, 0x76, 0x61, 0x72, 0x20, 0x6f, 0x3d, 0x7b, + 0x74, 0x79, 0x70, 0x65, 0x3a, 0x74, 0x2c, 0x70, 0x72, 0x6f, 0x70, 0x73, + 0x3a, 0x6e, 0x2c, 0x6b, 0x65, 0x79, 0x3a, 0x65, 0x2c, 0x72, 0x65, 0x66, + 0x3a, 0x5f, 0x2c, 0x5f, 0x5f, 0x6b, 0x3a, 0x6e, 0x75, 0x6c, 0x6c, 0x2c, + 0x5f, 0x5f, 0x3a, 0x6e, 0x75, 0x6c, 0x6c, 0x2c, 0x5f, 0x5f, 0x62, 0x3a, + 0x30, 0x2c, 0x5f, 0x5f, 0x65, 0x3a, 0x6e, 0x75, 0x6c, 0x6c, 0x2c, 0x5f, + 0x5f, 0x64, 0x3a, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x2c, 0x5f, 0x5f, + 0x63, 0x3a, 0x6e, 0x75, 0x6c, 0x6c, 0x2c, 0x63, 0x6f, 0x6e, 0x73, 0x74, + 0x72, 0x75, 0x63, 0x74, 0x6f, 0x72, 0x3a, 0x76, 0x6f, 0x69, 0x64, 0x20, + 0x30, 0x2c, 0x5f, 0x5f, 0x76, 0x3a, 0x6e, 0x75, 0x6c, 0x6c, 0x3d, 0x3d, + 0x69, 0x3f, 0x2b, 0x2b, 0x45, 0x3a, 0x69, 0x2c, 0x5f, 0x5f, 0x69, 0x3a, + 0x2d, 0x31, 0x2c, 0x5f, 0x5f, 0x75, 0x3a, 0x30, 0x7d, 0x3b, 0x72, 0x65, + 0x74, 0x75, 0x72, 0x6e, 0x20, 0x6e, 0x75, 0x6c, 0x6c, 0x3d, 0x3d, 0x69, + 0x26, 0x26, 0x6e, 0x75, 0x6c, 0x6c, 0x21, 0x3d, 0x43, 0x2e, 0x76, 0x6e, + 0x6f, 0x64, 0x65, 0x26, 0x26, 0x43, 0x2e, 0x76, 0x6e, 0x6f, 0x64, 0x65, + 0x28, 0x6f, 0x29, 0x2c, 0x6f, 0x7d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, + 0x6f, 0x6e, 0x20, 0x52, 0x28, 0x29, 0x7b, 0x72, 0x65, 0x74, 0x75, 0x72, + 0x6e, 0x7b, 0x63, 0x75, 0x72, 0x72, 0x65, 0x6e, 0x74, 0x3a, 0x6e, 0x75, + 0x6c, 0x6c, 0x7d, 0x7d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, + 0x20, 0x6a, 0x28, 0x74, 0x29, 0x7b, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, + 0x20, 0x74, 0x2e, 0x63, 0x68, 0x69, 0x6c, 0x64, 0x72, 0x65, 0x6e, 0x7d, + 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x49, 0x28, 0x74, + 0x2c, 0x6e, 0x29, 0x7b, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x70, 0x72, 0x6f, + 0x70, 0x73, 0x3d, 0x74, 0x2c, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x63, 0x6f, + 0x6e, 0x74, 0x65, 0x78, 0x74, 0x3d, 0x6e, 0x7d, 0x66, 0x75, 0x6e, 0x63, + 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x71, 0x28, 0x74, 0x2c, 0x6e, 0x29, 0x7b, + 0x69, 0x66, 0x28, 0x6e, 0x75, 0x6c, 0x6c, 0x3d, 0x3d, 0x6e, 0x29, 0x72, + 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x74, 0x2e, 0x5f, 0x5f, 0x3f, 0x71, + 0x28, 0x74, 0x2e, 0x5f, 0x5f, 0x2c, 0x74, 0x2e, 0x5f, 0x5f, 0x69, 0x2b, + 0x31, 0x29, 0x3a, 0x6e, 0x75, 0x6c, 0x6c, 0x3b, 0x66, 0x6f, 0x72, 0x28, + 0x76, 0x61, 0x72, 0x20, 0x65, 0x3b, 0x6e, 0x3c, 0x74, 0x2e, 0x5f, 0x5f, + 0x6b, 0x2e, 0x6c, 0x65, 0x6e, 0x67, 0x74, 0x68, 0x3b, 0x6e, 0x2b, 0x2b, + 0x29, 0x69, 0x66, 0x28, 0x6e, 0x75, 0x6c, 0x6c, 0x21, 0x3d, 0x28, 0x65, + 0x3d, 0x74, 0x2e, 0x5f, 0x5f, 0x6b, 0x5b, 0x6e, 0x5d, 0x29, 0x26, 0x26, + 0x6e, 0x75, 0x6c, 0x6c, 0x21, 0x3d, 0x65, 0x2e, 0x5f, 0x5f, 0x65, 0x29, + 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x65, 0x2e, 0x5f, 0x5f, 0x65, + 0x3b, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x22, 0x66, 0x75, 0x6e, 0x63, + 0x74, 0x69, 0x6f, 0x6e, 0x22, 0x3d, 0x3d, 0x74, 0x79, 0x70, 0x65, 0x6f, + 0x66, 0x20, 0x74, 0x2e, 0x74, 0x79, 0x70, 0x65, 0x3f, 0x71, 0x28, 0x74, + 0x29, 0x3a, 0x6e, 0x75, 0x6c, 0x6c, 0x7d, 0x66, 0x75, 0x6e, 0x63, 0x74, + 0x69, 0x6f, 0x6e, 0x20, 0x42, 0x28, 0x74, 0x29, 0x7b, 0x76, 0x61, 0x72, + 0x20, 0x6e, 0x2c, 0x65, 0x3b, 0x69, 0x66, 0x28, 0x6e, 0x75, 0x6c, 0x6c, + 0x21, 0x3d, 0x28, 0x74, 0x3d, 0x74, 0x2e, 0x5f, 0x5f, 0x29, 0x26, 0x26, + 0x6e, 0x75, 0x6c, 0x6c, 0x21, 0x3d, 0x74, 0x2e, 0x5f, 0x5f, 0x63, 0x29, + 0x7b, 0x66, 0x6f, 0x72, 0x28, 0x74, 0x2e, 0x5f, 0x5f, 0x65, 0x3d, 0x74, + 0x2e, 0x5f, 0x5f, 0x63, 0x2e, 0x62, 0x61, 0x73, 0x65, 0x3d, 0x6e, 0x75, + 0x6c, 0x6c, 0x2c, 0x6e, 0x3d, 0x30, 0x3b, 0x6e, 0x3c, 0x74, 0x2e, 0x5f, + 0x5f, 0x6b, 0x2e, 0x6c, 0x65, 0x6e, 0x67, 0x74, 0x68, 0x3b, 0x6e, 0x2b, + 0x2b, 0x29, 0x69, 0x66, 0x28, 0x6e, 0x75, 0x6c, 0x6c, 0x21, 0x3d, 0x28, + 0x65, 0x3d, 0x74, 0x2e, 0x5f, 0x5f, 0x6b, 0x5b, 0x6e, 0x5d, 0x29, 0x26, + 0x26, 0x6e, 0x75, 0x6c, 0x6c, 0x21, 0x3d, 0x65, 0x2e, 0x5f, 0x5f, 0x65, + 0x29, 0x7b, 0x74, 0x2e, 0x5f, 0x5f, 0x65, 0x3d, 0x74, 0x2e, 0x5f, 0x5f, + 0x63, 0x2e, 0x62, 0x61, 0x73, 0x65, 0x3d, 0x65, 0x2e, 0x5f, 0x5f, 0x65, + 0x3b, 0x62, 0x72, 0x65, 0x61, 0x6b, 0x7d, 0x72, 0x65, 0x74, 0x75, 0x72, + 0x6e, 0x20, 0x42, 0x28, 0x74, 0x29, 0x7d, 0x7d, 0x66, 0x75, 0x6e, 0x63, + 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x47, 0x28, 0x74, 0x29, 0x7b, 0x28, 0x21, + 0x74, 0x2e, 0x5f, 0x5f, 0x64, 0x26, 0x26, 0x28, 0x74, 0x2e, 0x5f, 0x5f, + 0x64, 0x3d, 0x21, 0x30, 0x29, 0x26, 0x26, 0x48, 0x2e, 0x70, 0x75, 0x73, + 0x68, 0x28, 0x74, 0x29, 0x26, 0x26, 0x21, 0x7a, 0x2e, 0x5f, 0x5f, 0x72, + 0x2b, 0x2b, 0x7c, 0x7c, 0x50, 0x21, 0x3d, 0x3d, 0x43, 0x2e, 0x64, 0x65, + 0x62, 0x6f, 0x75, 0x6e, 0x63, 0x65, 0x52, 0x65, 0x6e, 0x64, 0x65, 0x72, + 0x69, 0x6e, 0x67, 0x29, 0x26, 0x26, 0x28, 0x28, 0x50, 0x3d, 0x43, 0x2e, + 0x64, 0x65, 0x62, 0x6f, 0x75, 0x6e, 0x63, 0x65, 0x52, 0x65, 0x6e, 0x64, + 0x65, 0x72, 0x69, 0x6e, 0x67, 0x29, 0x7c, 0x7c, 0x4e, 0x29, 0x28, 0x7a, + 0x29, 0x7d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x7a, + 0x28, 0x29, 0x7b, 0x76, 0x61, 0x72, 0x20, 0x74, 0x2c, 0x6e, 0x2c, 0x65, + 0x2c, 0x5f, 0x2c, 0x69, 0x2c, 0x6f, 0x2c, 0x72, 0x2c, 0x75, 0x2c, 0x66, + 0x3b, 0x66, 0x6f, 0x72, 0x28, 0x48, 0x2e, 0x73, 0x6f, 0x72, 0x74, 0x28, + 0x24, 0x29, 0x3b, 0x74, 0x3d, 0x48, 0x2e, 0x73, 0x68, 0x69, 0x66, 0x74, + 0x28, 0x29, 0x3b, 0x29, 0x74, 0x2e, 0x5f, 0x5f, 0x64, 0x26, 0x26, 0x28, + 0x6e, 0x3d, 0x48, 0x2e, 0x6c, 0x65, 0x6e, 0x67, 0x74, 0x68, 0x2c, 0x5f, + 0x3d, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x2c, 0x6f, 0x3d, 0x28, 0x69, + 0x3d, 0x28, 0x65, 0x3d, 0x74, 0x29, 0x2e, 0x5f, 0x5f, 0x76, 0x29, 0x2e, + 0x5f, 0x5f, 0x65, 0x2c, 0x75, 0x3d, 0x5b, 0x5d, 0x2c, 0x66, 0x3d, 0x5b, + 0x5d, 0x2c, 0x28, 0x72, 0x3d, 0x65, 0x2e, 0x5f, 0x5f, 0x50, 0x29, 0x26, + 0x26, 0x28, 0x28, 0x5f, 0x3d, 0x4d, 0x28, 0x7b, 0x7d, 0x2c, 0x69, 0x29, + 0x29, 0x2e, 0x5f, 0x5f, 0x76, 0x3d, 0x69, 0x2e, 0x5f, 0x5f, 0x76, 0x2b, + 0x31, 0x2c, 0x43, 0x2e, 0x76, 0x6e, 0x6f, 0x64, 0x65, 0x26, 0x26, 0x43, + 0x2e, 0x76, 0x6e, 0x6f, 0x64, 0x65, 0x28, 0x5f, 0x29, 0x2c, 0x5f, 0x74, + 0x28, 0x72, 0x2c, 0x5f, 0x2c, 0x69, 0x2c, 0x65, 0x2e, 0x5f, 0x5f, 0x6e, + 0x2c, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x21, 0x3d, 0x3d, 0x72, 0x2e, + 0x6f, 0x77, 0x6e, 0x65, 0x72, 0x53, 0x56, 0x47, 0x45, 0x6c, 0x65, 0x6d, + 0x65, 0x6e, 0x74, 0x2c, 0x33, 0x32, 0x26, 0x69, 0x2e, 0x5f, 0x5f, 0x75, + 0x3f, 0x5b, 0x6f, 0x5d, 0x3a, 0x6e, 0x75, 0x6c, 0x6c, 0x2c, 0x75, 0x2c, + 0x6e, 0x75, 0x6c, 0x6c, 0x3d, 0x3d, 0x6f, 0x3f, 0x71, 0x28, 0x69, 0x29, + 0x3a, 0x6f, 0x2c, 0x21, 0x21, 0x28, 0x33, 0x32, 0x26, 0x69, 0x2e, 0x5f, + 0x5f, 0x75, 0x29, 0x2c, 0x66, 0x29, 0x2c, 0x5f, 0x2e, 0x5f, 0x5f, 0x2e, + 0x5f, 0x5f, 0x6b, 0x5b, 0x5f, 0x2e, 0x5f, 0x5f, 0x69, 0x5d, 0x3d, 0x5f, + 0x2c, 0x69, 0x74, 0x28, 0x75, 0x2c, 0x5f, 0x2c, 0x66, 0x29, 0x2c, 0x5f, + 0x2e, 0x5f, 0x5f, 0x65, 0x21, 0x3d, 0x6f, 0x26, 0x26, 0x42, 0x28, 0x5f, + 0x29, 0x29, 0x2c, 0x48, 0x2e, 0x6c, 0x65, 0x6e, 0x67, 0x74, 0x68, 0x3e, + 0x6e, 0x26, 0x26, 0x48, 0x2e, 0x73, 0x6f, 0x72, 0x74, 0x28, 0x24, 0x29, + 0x29, 0x3b, 0x7a, 0x2e, 0x5f, 0x5f, 0x72, 0x3d, 0x30, 0x7d, 0x66, 0x75, + 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x4a, 0x28, 0x74, 0x2c, 0x6e, + 0x2c, 0x65, 0x2c, 0x5f, 0x2c, 0x69, 0x2c, 0x6f, 0x2c, 0x72, 0x2c, 0x75, + 0x2c, 0x66, 0x2c, 0x73, 0x2c, 0x6c, 0x29, 0x7b, 0x76, 0x61, 0x72, 0x20, + 0x63, 0x2c, 0x68, 0x2c, 0x61, 0x2c, 0x70, 0x2c, 0x64, 0x2c, 0x76, 0x3d, + 0x5f, 0x26, 0x26, 0x5f, 0x2e, 0x5f, 0x5f, 0x6b, 0x7c, 0x7c, 0x56, 0x2c, + 0x79, 0x3d, 0x6e, 0x2e, 0x6c, 0x65, 0x6e, 0x67, 0x74, 0x68, 0x3b, 0x66, + 0x6f, 0x72, 0x28, 0x65, 0x2e, 0x5f, 0x5f, 0x64, 0x3d, 0x66, 0x2c, 0x4b, + 0x28, 0x65, 0x2c, 0x6e, 0x2c, 0x76, 0x29, 0x2c, 0x66, 0x3d, 0x65, 0x2e, + 0x5f, 0x5f, 0x64, 0x2c, 0x63, 0x3d, 0x30, 0x3b, 0x63, 0x3c, 0x79, 0x3b, + 0x63, 0x2b, 0x2b, 0x29, 0x6e, 0x75, 0x6c, 0x6c, 0x21, 0x3d, 0x28, 0x61, + 0x3d, 0x65, 0x2e, 0x5f, 0x5f, 0x6b, 0x5b, 0x63, 0x5d, 0x29, 0x26, 0x26, + 0x22, 0x62, 0x6f, 0x6f, 0x6c, 0x65, 0x61, 0x6e, 0x22, 0x21, 0x3d, 0x74, + 0x79, 0x70, 0x65, 0x6f, 0x66, 0x20, 0x61, 0x26, 0x26, 0x22, 0x66, 0x75, + 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x22, 0x21, 0x3d, 0x74, 0x79, 0x70, + 0x65, 0x6f, 0x66, 0x20, 0x61, 0x26, 0x26, 0x28, 0x68, 0x3d, 0x2d, 0x31, + 0x3d, 0x3d, 0x3d, 0x61, 0x2e, 0x5f, 0x5f, 0x69, 0x3f, 0x54, 0x3a, 0x76, + 0x5b, 0x61, 0x2e, 0x5f, 0x5f, 0x69, 0x5d, 0x7c, 0x7c, 0x54, 0x2c, 0x61, + 0x2e, 0x5f, 0x5f, 0x69, 0x3d, 0x63, 0x2c, 0x5f, 0x74, 0x28, 0x74, 0x2c, + 0x61, 0x2c, 0x68, 0x2c, 0x69, 0x2c, 0x6f, 0x2c, 0x72, 0x2c, 0x75, 0x2c, + 0x66, 0x2c, 0x73, 0x2c, 0x6c, 0x29, 0x2c, 0x70, 0x3d, 0x61, 0x2e, 0x5f, + 0x5f, 0x65, 0x2c, 0x61, 0x2e, 0x72, 0x65, 0x66, 0x26, 0x26, 0x68, 0x2e, + 0x72, 0x65, 0x66, 0x21, 0x3d, 0x61, 0x2e, 0x72, 0x65, 0x66, 0x26, 0x26, + 0x28, 0x68, 0x2e, 0x72, 0x65, 0x66, 0x26, 0x26, 0x72, 0x74, 0x28, 0x68, + 0x2e, 0x72, 0x65, 0x66, 0x2c, 0x6e, 0x75, 0x6c, 0x6c, 0x2c, 0x61, 0x29, + 0x2c, 0x6c, 0x2e, 0x70, 0x75, 0x73, 0x68, 0x28, 0x61, 0x2e, 0x72, 0x65, + 0x66, 0x2c, 0x61, 0x2e, 0x5f, 0x5f, 0x63, 0x7c, 0x7c, 0x70, 0x2c, 0x61, + 0x29, 0x29, 0x2c, 0x6e, 0x75, 0x6c, 0x6c, 0x3d, 0x3d, 0x64, 0x26, 0x26, + 0x6e, 0x75, 0x6c, 0x6c, 0x21, 0x3d, 0x70, 0x26, 0x26, 0x28, 0x64, 0x3d, + 0x70, 0x29, 0x2c, 0x36, 0x35, 0x35, 0x33, 0x36, 0x26, 0x61, 0x2e, 0x5f, + 0x5f, 0x75, 0x7c, 0x7c, 0x68, 0x2e, 0x5f, 0x5f, 0x6b, 0x3d, 0x3d, 0x3d, + 0x61, 0x2e, 0x5f, 0x5f, 0x6b, 0x3f, 0x66, 0x3d, 0x51, 0x28, 0x61, 0x2c, + 0x66, 0x2c, 0x74, 0x29, 0x3a, 0x22, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, + 0x6f, 0x6e, 0x22, 0x3d, 0x3d, 0x74, 0x79, 0x70, 0x65, 0x6f, 0x66, 0x20, + 0x61, 0x2e, 0x74, 0x79, 0x70, 0x65, 0x26, 0x26, 0x76, 0x6f, 0x69, 0x64, + 0x20, 0x30, 0x21, 0x3d, 0x3d, 0x61, 0x2e, 0x5f, 0x5f, 0x64, 0x3f, 0x66, + 0x3d, 0x61, 0x2e, 0x5f, 0x5f, 0x64, 0x3a, 0x70, 0x26, 0x26, 0x28, 0x66, + 0x3d, 0x70, 0x2e, 0x6e, 0x65, 0x78, 0x74, 0x53, 0x69, 0x62, 0x6c, 0x69, + 0x6e, 0x67, 0x29, 0x2c, 0x61, 0x2e, 0x5f, 0x5f, 0x64, 0x3d, 0x76, 0x6f, + 0x69, 0x64, 0x20, 0x30, 0x2c, 0x61, 0x2e, 0x5f, 0x5f, 0x75, 0x26, 0x3d, + 0x2d, 0x31, 0x39, 0x36, 0x36, 0x30, 0x39, 0x29, 0x3b, 0x65, 0x2e, 0x5f, + 0x5f, 0x64, 0x3d, 0x66, 0x2c, 0x65, 0x2e, 0x5f, 0x5f, 0x65, 0x3d, 0x64, + 0x7d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x4b, 0x28, + 0x74, 0x2c, 0x6e, 0x2c, 0x65, 0x29, 0x7b, 0x76, 0x61, 0x72, 0x20, 0x5f, + 0x2c, 0x69, 0x2c, 0x6f, 0x2c, 0x72, 0x2c, 0x75, 0x2c, 0x66, 0x3d, 0x6e, + 0x2e, 0x6c, 0x65, 0x6e, 0x67, 0x74, 0x68, 0x2c, 0x73, 0x3d, 0x65, 0x2e, + 0x6c, 0x65, 0x6e, 0x67, 0x74, 0x68, 0x2c, 0x6c, 0x3d, 0x73, 0x2c, 0x63, + 0x3d, 0x30, 0x3b, 0x66, 0x6f, 0x72, 0x28, 0x74, 0x2e, 0x5f, 0x5f, 0x6b, + 0x3d, 0x5b, 0x5d, 0x2c, 0x5f, 0x3d, 0x30, 0x3b, 0x5f, 0x3c, 0x66, 0x3b, + 0x5f, 0x2b, 0x2b, 0x29, 0x6e, 0x75, 0x6c, 0x6c, 0x21, 0x3d, 0x28, 0x69, + 0x3d, 0x74, 0x2e, 0x5f, 0x5f, 0x6b, 0x5b, 0x5f, 0x5d, 0x3d, 0x6e, 0x75, + 0x6c, 0x6c, 0x3d, 0x3d, 0x28, 0x69, 0x3d, 0x6e, 0x5b, 0x5f, 0x5d, 0x29, + 0x7c, 0x7c, 0x22, 0x62, 0x6f, 0x6f, 0x6c, 0x65, 0x61, 0x6e, 0x22, 0x3d, + 0x3d, 0x74, 0x79, 0x70, 0x65, 0x6f, 0x66, 0x20, 0x69, 0x7c, 0x7c, 0x22, + 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x22, 0x3d, 0x3d, 0x74, + 0x79, 0x70, 0x65, 0x6f, 0x66, 0x20, 0x69, 0x3f, 0x6e, 0x75, 0x6c, 0x6c, + 0x3a, 0x22, 0x73, 0x74, 0x72, 0x69, 0x6e, 0x67, 0x22, 0x3d, 0x3d, 0x74, + 0x79, 0x70, 0x65, 0x6f, 0x66, 0x20, 0x69, 0x7c, 0x7c, 0x22, 0x6e, 0x75, + 0x6d, 0x62, 0x65, 0x72, 0x22, 0x3d, 0x3d, 0x74, 0x79, 0x70, 0x65, 0x6f, + 0x66, 0x20, 0x69, 0x7c, 0x7c, 0x22, 0x62, 0x69, 0x67, 0x69, 0x6e, 0x74, + 0x22, 0x3d, 0x3d, 0x74, 0x79, 0x70, 0x65, 0x6f, 0x66, 0x20, 0x69, 0x7c, + 0x7c, 0x69, 0x2e, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x72, 0x75, 0x63, 0x74, + 0x6f, 0x72, 0x3d, 0x3d, 0x53, 0x74, 0x72, 0x69, 0x6e, 0x67, 0x3f, 0x4f, + 0x28, 0x6e, 0x75, 0x6c, 0x6c, 0x2c, 0x69, 0x2c, 0x6e, 0x75, 0x6c, 0x6c, + 0x2c, 0x6e, 0x75, 0x6c, 0x6c, 0x2c, 0x69, 0x29, 0x3a, 0x46, 0x28, 0x69, + 0x29, 0x3f, 0x4f, 0x28, 0x6a, 0x2c, 0x7b, 0x63, 0x68, 0x69, 0x6c, 0x64, + 0x72, 0x65, 0x6e, 0x3a, 0x69, 0x7d, 0x2c, 0x6e, 0x75, 0x6c, 0x6c, 0x2c, + 0x6e, 0x75, 0x6c, 0x6c, 0x2c, 0x6e, 0x75, 0x6c, 0x6c, 0x29, 0x3a, 0x76, + 0x6f, 0x69, 0x64, 0x20, 0x30, 0x3d, 0x3d, 0x3d, 0x69, 0x2e, 0x63, 0x6f, + 0x6e, 0x73, 0x74, 0x72, 0x75, 0x63, 0x74, 0x6f, 0x72, 0x26, 0x26, 0x69, + 0x2e, 0x5f, 0x5f, 0x62, 0x3e, 0x30, 0x3f, 0x4f, 0x28, 0x69, 0x2e, 0x74, + 0x79, 0x70, 0x65, 0x2c, 0x69, 0x2e, 0x70, 0x72, 0x6f, 0x70, 0x73, 0x2c, + 0x69, 0x2e, 0x6b, 0x65, 0x79, 0x2c, 0x69, 0x2e, 0x72, 0x65, 0x66, 0x3f, + 0x69, 0x2e, 0x72, 0x65, 0x66, 0x3a, 0x6e, 0x75, 0x6c, 0x6c, 0x2c, 0x69, + 0x2e, 0x5f, 0x5f, 0x76, 0x29, 0x3a, 0x69, 0x29, 0x3f, 0x28, 0x69, 0x2e, + 0x5f, 0x5f, 0x3d, 0x74, 0x2c, 0x69, 0x2e, 0x5f, 0x5f, 0x62, 0x3d, 0x74, + 0x2e, 0x5f, 0x5f, 0x62, 0x2b, 0x31, 0x2c, 0x75, 0x3d, 0x59, 0x28, 0x69, + 0x2c, 0x65, 0x2c, 0x72, 0x3d, 0x5f, 0x2b, 0x63, 0x2c, 0x6c, 0x29, 0x2c, + 0x69, 0x2e, 0x5f, 0x5f, 0x69, 0x3d, 0x75, 0x2c, 0x6f, 0x3d, 0x6e, 0x75, + 0x6c, 0x6c, 0x2c, 0x2d, 0x31, 0x21, 0x3d, 0x3d, 0x75, 0x26, 0x26, 0x28, + 0x6c, 0x2d, 0x2d, 0x2c, 0x28, 0x6f, 0x3d, 0x65, 0x5b, 0x75, 0x5d, 0x29, + 0x26, 0x26, 0x28, 0x6f, 0x2e, 0x5f, 0x5f, 0x75, 0x7c, 0x3d, 0x31, 0x33, + 0x31, 0x30, 0x37, 0x32, 0x29, 0x29, 0x2c, 0x6e, 0x75, 0x6c, 0x6c, 0x3d, + 0x3d, 0x6f, 0x7c, 0x7c, 0x6e, 0x75, 0x6c, 0x6c, 0x3d, 0x3d, 0x3d, 0x6f, + 0x2e, 0x5f, 0x5f, 0x76, 0x3f, 0x28, 0x2d, 0x31, 0x3d, 0x3d, 0x75, 0x26, + 0x26, 0x63, 0x2d, 0x2d, 0x2c, 0x22, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, + 0x6f, 0x6e, 0x22, 0x21, 0x3d, 0x74, 0x79, 0x70, 0x65, 0x6f, 0x66, 0x20, + 0x69, 0x2e, 0x74, 0x79, 0x70, 0x65, 0x26, 0x26, 0x28, 0x69, 0x2e, 0x5f, + 0x5f, 0x75, 0x7c, 0x3d, 0x36, 0x35, 0x35, 0x33, 0x36, 0x29, 0x29, 0x3a, + 0x75, 0x21, 0x3d, 0x3d, 0x72, 0x26, 0x26, 0x28, 0x75, 0x3d, 0x3d, 0x3d, + 0x72, 0x2b, 0x31, 0x3f, 0x63, 0x2b, 0x2b, 0x3a, 0x75, 0x3e, 0x72, 0x3f, + 0x6c, 0x3e, 0x66, 0x2d, 0x72, 0x3f, 0x63, 0x2b, 0x3d, 0x75, 0x2d, 0x72, + 0x3a, 0x63, 0x2d, 0x2d, 0x3a, 0x63, 0x3d, 0x75, 0x3c, 0x72, 0x26, 0x26, + 0x75, 0x3d, 0x3d, 0x72, 0x2d, 0x31, 0x3f, 0x75, 0x2d, 0x72, 0x3a, 0x30, + 0x2c, 0x75, 0x21, 0x3d, 0x3d, 0x5f, 0x2b, 0x63, 0x26, 0x26, 0x28, 0x69, + 0x2e, 0x5f, 0x5f, 0x75, 0x7c, 0x3d, 0x36, 0x35, 0x35, 0x33, 0x36, 0x29, + 0x29, 0x29, 0x3a, 0x28, 0x6f, 0x3d, 0x65, 0x5b, 0x5f, 0x5d, 0x29, 0x26, + 0x26, 0x6e, 0x75, 0x6c, 0x6c, 0x3d, 0x3d, 0x6f, 0x2e, 0x6b, 0x65, 0x79, + 0x26, 0x26, 0x6f, 0x2e, 0x5f, 0x5f, 0x65, 0x26, 0x26, 0x28, 0x6f, 0x2e, + 0x5f, 0x5f, 0x65, 0x3d, 0x3d, 0x74, 0x2e, 0x5f, 0x5f, 0x64, 0x26, 0x26, + 0x28, 0x74, 0x2e, 0x5f, 0x5f, 0x64, 0x3d, 0x71, 0x28, 0x6f, 0x29, 0x29, + 0x2c, 0x75, 0x74, 0x28, 0x6f, 0x2c, 0x6f, 0x2c, 0x21, 0x31, 0x29, 0x2c, + 0x65, 0x5b, 0x5f, 0x5d, 0x3d, 0x6e, 0x75, 0x6c, 0x6c, 0x2c, 0x6c, 0x2d, + 0x2d, 0x29, 0x3b, 0x69, 0x66, 0x28, 0x6c, 0x29, 0x66, 0x6f, 0x72, 0x28, + 0x5f, 0x3d, 0x30, 0x3b, 0x5f, 0x3c, 0x73, 0x3b, 0x5f, 0x2b, 0x2b, 0x29, + 0x6e, 0x75, 0x6c, 0x6c, 0x21, 0x3d, 0x28, 0x6f, 0x3d, 0x65, 0x5b, 0x5f, + 0x5d, 0x29, 0x26, 0x26, 0x30, 0x3d, 0x3d, 0x28, 0x31, 0x33, 0x31, 0x30, + 0x37, 0x32, 0x26, 0x6f, 0x2e, 0x5f, 0x5f, 0x75, 0x29, 0x26, 0x26, 0x28, + 0x6f, 0x2e, 0x5f, 0x5f, 0x65, 0x3d, 0x3d, 0x74, 0x2e, 0x5f, 0x5f, 0x64, + 0x26, 0x26, 0x28, 0x74, 0x2e, 0x5f, 0x5f, 0x64, 0x3d, 0x71, 0x28, 0x6f, + 0x29, 0x29, 0x2c, 0x75, 0x74, 0x28, 0x6f, 0x2c, 0x6f, 0x29, 0x29, 0x7d, + 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x51, 0x28, 0x74, + 0x2c, 0x6e, 0x2c, 0x65, 0x29, 0x7b, 0x76, 0x61, 0x72, 0x20, 0x5f, 0x2c, + 0x69, 0x3b, 0x69, 0x66, 0x28, 0x22, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, + 0x6f, 0x6e, 0x22, 0x3d, 0x3d, 0x74, 0x79, 0x70, 0x65, 0x6f, 0x66, 0x20, + 0x74, 0x2e, 0x74, 0x79, 0x70, 0x65, 0x29, 0x7b, 0x66, 0x6f, 0x72, 0x28, + 0x5f, 0x3d, 0x74, 0x2e, 0x5f, 0x5f, 0x6b, 0x2c, 0x69, 0x3d, 0x30, 0x3b, + 0x5f, 0x26, 0x26, 0x69, 0x3c, 0x5f, 0x2e, 0x6c, 0x65, 0x6e, 0x67, 0x74, + 0x68, 0x3b, 0x69, 0x2b, 0x2b, 0x29, 0x5f, 0x5b, 0x69, 0x5d, 0x26, 0x26, + 0x28, 0x5f, 0x5b, 0x69, 0x5d, 0x2e, 0x5f, 0x5f, 0x3d, 0x74, 0x2c, 0x6e, + 0x3d, 0x51, 0x28, 0x5f, 0x5b, 0x69, 0x5d, 0x2c, 0x6e, 0x2c, 0x65, 0x29, + 0x29, 0x3b, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x6e, 0x7d, 0x72, + 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x74, 0x2e, 0x5f, 0x5f, 0x65, 0x21, + 0x3d, 0x6e, 0x26, 0x26, 0x28, 0x65, 0x2e, 0x69, 0x6e, 0x73, 0x65, 0x72, + 0x74, 0x42, 0x65, 0x66, 0x6f, 0x72, 0x65, 0x28, 0x74, 0x2e, 0x5f, 0x5f, + 0x65, 0x2c, 0x6e, 0x7c, 0x7c, 0x6e, 0x75, 0x6c, 0x6c, 0x29, 0x2c, 0x6e, + 0x3d, 0x74, 0x2e, 0x5f, 0x5f, 0x65, 0x29, 0x2c, 0x6e, 0x26, 0x26, 0x6e, + 0x2e, 0x6e, 0x65, 0x78, 0x74, 0x53, 0x69, 0x62, 0x6c, 0x69, 0x6e, 0x67, + 0x7d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x58, 0x28, + 0x74, 0x2c, 0x6e, 0x29, 0x7b, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, + 0x6e, 0x3d, 0x6e, 0x7c, 0x7c, 0x5b, 0x5d, 0x2c, 0x6e, 0x75, 0x6c, 0x6c, + 0x3d, 0x3d, 0x74, 0x7c, 0x7c, 0x22, 0x62, 0x6f, 0x6f, 0x6c, 0x65, 0x61, + 0x6e, 0x22, 0x3d, 0x3d, 0x74, 0x79, 0x70, 0x65, 0x6f, 0x66, 0x20, 0x74, + 0x7c, 0x7c, 0x28, 0x46, 0x28, 0x74, 0x29, 0x3f, 0x74, 0x2e, 0x73, 0x6f, + 0x6d, 0x65, 0x28, 0x28, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, + 0x28, 0x74, 0x29, 0x7b, 0x58, 0x28, 0x74, 0x2c, 0x6e, 0x29, 0x7d, 0x29, + 0x29, 0x3a, 0x6e, 0x2e, 0x70, 0x75, 0x73, 0x68, 0x28, 0x74, 0x29, 0x29, + 0x2c, 0x6e, 0x7d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, + 0x59, 0x28, 0x74, 0x2c, 0x6e, 0x2c, 0x65, 0x2c, 0x5f, 0x29, 0x7b, 0x76, + 0x61, 0x72, 0x20, 0x69, 0x3d, 0x74, 0x2e, 0x6b, 0x65, 0x79, 0x2c, 0x6f, + 0x3d, 0x74, 0x2e, 0x74, 0x79, 0x70, 0x65, 0x2c, 0x72, 0x3d, 0x65, 0x2d, + 0x31, 0x2c, 0x75, 0x3d, 0x65, 0x2b, 0x31, 0x2c, 0x66, 0x3d, 0x6e, 0x5b, + 0x65, 0x5d, 0x3b, 0x69, 0x66, 0x28, 0x6e, 0x75, 0x6c, 0x6c, 0x3d, 0x3d, + 0x3d, 0x66, 0x7c, 0x7c, 0x66, 0x26, 0x26, 0x69, 0x3d, 0x3d, 0x66, 0x2e, + 0x6b, 0x65, 0x79, 0x26, 0x26, 0x6f, 0x3d, 0x3d, 0x3d, 0x66, 0x2e, 0x74, + 0x79, 0x70, 0x65, 0x29, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x65, + 0x3b, 0x69, 0x66, 0x28, 0x5f, 0x3e, 0x28, 0x6e, 0x75, 0x6c, 0x6c, 0x21, + 0x3d, 0x66, 0x26, 0x26, 0x30, 0x3d, 0x3d, 0x28, 0x31, 0x33, 0x31, 0x30, + 0x37, 0x32, 0x26, 0x66, 0x2e, 0x5f, 0x5f, 0x75, 0x29, 0x3f, 0x31, 0x3a, + 0x30, 0x29, 0x29, 0x66, 0x6f, 0x72, 0x28, 0x3b, 0x72, 0x3e, 0x3d, 0x30, + 0x7c, 0x7c, 0x75, 0x3c, 0x6e, 0x2e, 0x6c, 0x65, 0x6e, 0x67, 0x74, 0x68, + 0x3b, 0x29, 0x7b, 0x69, 0x66, 0x28, 0x72, 0x3e, 0x3d, 0x30, 0x29, 0x7b, + 0x69, 0x66, 0x28, 0x28, 0x66, 0x3d, 0x6e, 0x5b, 0x72, 0x5d, 0x29, 0x26, + 0x26, 0x30, 0x3d, 0x3d, 0x28, 0x31, 0x33, 0x31, 0x30, 0x37, 0x32, 0x26, + 0x66, 0x2e, 0x5f, 0x5f, 0x75, 0x29, 0x26, 0x26, 0x69, 0x3d, 0x3d, 0x66, + 0x2e, 0x6b, 0x65, 0x79, 0x26, 0x26, 0x6f, 0x3d, 0x3d, 0x3d, 0x66, 0x2e, + 0x74, 0x79, 0x70, 0x65, 0x29, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, + 0x72, 0x3b, 0x72, 0x2d, 0x2d, 0x7d, 0x69, 0x66, 0x28, 0x75, 0x3c, 0x6e, + 0x2e, 0x6c, 0x65, 0x6e, 0x67, 0x74, 0x68, 0x29, 0x7b, 0x69, 0x66, 0x28, + 0x28, 0x66, 0x3d, 0x6e, 0x5b, 0x75, 0x5d, 0x29, 0x26, 0x26, 0x30, 0x3d, + 0x3d, 0x28, 0x31, 0x33, 0x31, 0x30, 0x37, 0x32, 0x26, 0x66, 0x2e, 0x5f, + 0x5f, 0x75, 0x29, 0x26, 0x26, 0x69, 0x3d, 0x3d, 0x66, 0x2e, 0x6b, 0x65, + 0x79, 0x26, 0x26, 0x6f, 0x3d, 0x3d, 0x3d, 0x66, 0x2e, 0x74, 0x79, 0x70, + 0x65, 0x29, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x75, 0x3b, 0x75, + 0x2b, 0x2b, 0x7d, 0x7d, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x2d, 0x31, + 0x7d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x5a, 0x28, + 0x74, 0x2c, 0x6e, 0x2c, 0x65, 0x29, 0x7b, 0x22, 0x2d, 0x22, 0x3d, 0x3d, + 0x3d, 0x6e, 0x5b, 0x30, 0x5d, 0x3f, 0x74, 0x2e, 0x73, 0x65, 0x74, 0x50, + 0x72, 0x6f, 0x70, 0x65, 0x72, 0x74, 0x79, 0x28, 0x6e, 0x2c, 0x6e, 0x75, + 0x6c, 0x6c, 0x3d, 0x3d, 0x65, 0x3f, 0x22, 0x22, 0x3a, 0x65, 0x29, 0x3a, + 0x74, 0x5b, 0x6e, 0x5d, 0x3d, 0x6e, 0x75, 0x6c, 0x6c, 0x3d, 0x3d, 0x65, + 0x3f, 0x22, 0x22, 0x3a, 0x22, 0x6e, 0x75, 0x6d, 0x62, 0x65, 0x72, 0x22, + 0x21, 0x3d, 0x74, 0x79, 0x70, 0x65, 0x6f, 0x66, 0x20, 0x65, 0x7c, 0x7c, + 0x41, 0x2e, 0x74, 0x65, 0x73, 0x74, 0x28, 0x6e, 0x29, 0x3f, 0x65, 0x3a, + 0x65, 0x2b, 0x22, 0x70, 0x78, 0x22, 0x7d, 0x66, 0x75, 0x6e, 0x63, 0x74, + 0x69, 0x6f, 0x6e, 0x20, 0x74, 0x74, 0x28, 0x74, 0x2c, 0x6e, 0x2c, 0x65, + 0x2c, 0x5f, 0x2c, 0x69, 0x29, 0x7b, 0x76, 0x61, 0x72, 0x20, 0x6f, 0x3b, + 0x74, 0x3a, 0x69, 0x66, 0x28, 0x22, 0x73, 0x74, 0x79, 0x6c, 0x65, 0x22, + 0x3d, 0x3d, 0x3d, 0x6e, 0x29, 0x69, 0x66, 0x28, 0x22, 0x73, 0x74, 0x72, + 0x69, 0x6e, 0x67, 0x22, 0x3d, 0x3d, 0x74, 0x79, 0x70, 0x65, 0x6f, 0x66, + 0x20, 0x65, 0x29, 0x74, 0x2e, 0x73, 0x74, 0x79, 0x6c, 0x65, 0x2e, 0x63, + 0x73, 0x73, 0x54, 0x65, 0x78, 0x74, 0x3d, 0x65, 0x3b, 0x65, 0x6c, 0x73, + 0x65, 0x7b, 0x69, 0x66, 0x28, 0x22, 0x73, 0x74, 0x72, 0x69, 0x6e, 0x67, + 0x22, 0x3d, 0x3d, 0x74, 0x79, 0x70, 0x65, 0x6f, 0x66, 0x20, 0x5f, 0x26, + 0x26, 0x28, 0x74, 0x2e, 0x73, 0x74, 0x79, 0x6c, 0x65, 0x2e, 0x63, 0x73, + 0x73, 0x54, 0x65, 0x78, 0x74, 0x3d, 0x5f, 0x3d, 0x22, 0x22, 0x29, 0x2c, + 0x5f, 0x29, 0x66, 0x6f, 0x72, 0x28, 0x6e, 0x20, 0x69, 0x6e, 0x20, 0x5f, + 0x29, 0x65, 0x26, 0x26, 0x6e, 0x20, 0x69, 0x6e, 0x20, 0x65, 0x7c, 0x7c, + 0x5a, 0x28, 0x74, 0x2e, 0x73, 0x74, 0x79, 0x6c, 0x65, 0x2c, 0x6e, 0x2c, + 0x22, 0x22, 0x29, 0x3b, 0x69, 0x66, 0x28, 0x65, 0x29, 0x66, 0x6f, 0x72, + 0x28, 0x6e, 0x20, 0x69, 0x6e, 0x20, 0x65, 0x29, 0x5f, 0x26, 0x26, 0x65, + 0x5b, 0x6e, 0x5d, 0x3d, 0x3d, 0x3d, 0x5f, 0x5b, 0x6e, 0x5d, 0x7c, 0x7c, + 0x5a, 0x28, 0x74, 0x2e, 0x73, 0x74, 0x79, 0x6c, 0x65, 0x2c, 0x6e, 0x2c, + 0x65, 0x5b, 0x6e, 0x5d, 0x29, 0x7d, 0x65, 0x6c, 0x73, 0x65, 0x20, 0x69, + 0x66, 0x28, 0x22, 0x6f, 0x22, 0x3d, 0x3d, 0x3d, 0x6e, 0x5b, 0x30, 0x5d, + 0x26, 0x26, 0x22, 0x6e, 0x22, 0x3d, 0x3d, 0x3d, 0x6e, 0x5b, 0x31, 0x5d, + 0x29, 0x6f, 0x3d, 0x6e, 0x21, 0x3d, 0x3d, 0x28, 0x6e, 0x3d, 0x6e, 0x2e, + 0x72, 0x65, 0x70, 0x6c, 0x61, 0x63, 0x65, 0x28, 0x2f, 0x28, 0x50, 0x6f, + 0x69, 0x6e, 0x74, 0x65, 0x72, 0x43, 0x61, 0x70, 0x74, 0x75, 0x72, 0x65, + 0x29, 0x24, 0x7c, 0x43, 0x61, 0x70, 0x74, 0x75, 0x72, 0x65, 0x24, 0x2f, + 0x2c, 0x22, 0x24, 0x31, 0x22, 0x29, 0x29, 0x2c, 0x6e, 0x3d, 0x6e, 0x2e, + 0x74, 0x6f, 0x4c, 0x6f, 0x77, 0x65, 0x72, 0x43, 0x61, 0x73, 0x65, 0x28, + 0x29, 0x69, 0x6e, 0x20, 0x74, 0x3f, 0x6e, 0x2e, 0x74, 0x6f, 0x4c, 0x6f, + 0x77, 0x65, 0x72, 0x43, 0x61, 0x73, 0x65, 0x28, 0x29, 0x2e, 0x73, 0x6c, + 0x69, 0x63, 0x65, 0x28, 0x32, 0x29, 0x3a, 0x6e, 0x2e, 0x73, 0x6c, 0x69, + 0x63, 0x65, 0x28, 0x32, 0x29, 0x2c, 0x74, 0x2e, 0x6c, 0x7c, 0x7c, 0x28, + 0x74, 0x2e, 0x6c, 0x3d, 0x7b, 0x7d, 0x29, 0x2c, 0x74, 0x2e, 0x6c, 0x5b, + 0x6e, 0x2b, 0x6f, 0x5d, 0x3d, 0x65, 0x2c, 0x65, 0x3f, 0x5f, 0x3f, 0x65, + 0x2e, 0x75, 0x3d, 0x5f, 0x2e, 0x75, 0x3a, 0x28, 0x65, 0x2e, 0x75, 0x3d, + 0x44, 0x61, 0x74, 0x65, 0x2e, 0x6e, 0x6f, 0x77, 0x28, 0x29, 0x2c, 0x74, + 0x2e, 0x61, 0x64, 0x64, 0x45, 0x76, 0x65, 0x6e, 0x74, 0x4c, 0x69, 0x73, + 0x74, 0x65, 0x6e, 0x65, 0x72, 0x28, 0x6e, 0x2c, 0x6f, 0x3f, 0x65, 0x74, + 0x3a, 0x6e, 0x74, 0x2c, 0x6f, 0x29, 0x29, 0x3a, 0x74, 0x2e, 0x72, 0x65, + 0x6d, 0x6f, 0x76, 0x65, 0x45, 0x76, 0x65, 0x6e, 0x74, 0x4c, 0x69, 0x73, + 0x74, 0x65, 0x6e, 0x65, 0x72, 0x28, 0x6e, 0x2c, 0x6f, 0x3f, 0x65, 0x74, + 0x3a, 0x6e, 0x74, 0x2c, 0x6f, 0x29, 0x3b, 0x65, 0x6c, 0x73, 0x65, 0x7b, + 0x69, 0x66, 0x28, 0x69, 0x29, 0x6e, 0x3d, 0x6e, 0x2e, 0x72, 0x65, 0x70, + 0x6c, 0x61, 0x63, 0x65, 0x28, 0x2f, 0x78, 0x6c, 0x69, 0x6e, 0x6b, 0x28, + 0x48, 0x7c, 0x3a, 0x68, 0x29, 0x2f, 0x2c, 0x22, 0x68, 0x22, 0x29, 0x2e, + 0x72, 0x65, 0x70, 0x6c, 0x61, 0x63, 0x65, 0x28, 0x2f, 0x73, 0x4e, 0x61, + 0x6d, 0x65, 0x24, 0x2f, 0x2c, 0x22, 0x73, 0x22, 0x29, 0x3b, 0x65, 0x6c, + 0x73, 0x65, 0x20, 0x69, 0x66, 0x28, 0x22, 0x77, 0x69, 0x64, 0x74, 0x68, + 0x22, 0x21, 0x3d, 0x3d, 0x6e, 0x26, 0x26, 0x22, 0x68, 0x65, 0x69, 0x67, + 0x68, 0x74, 0x22, 0x21, 0x3d, 0x3d, 0x6e, 0x26, 0x26, 0x22, 0x68, 0x72, + 0x65, 0x66, 0x22, 0x21, 0x3d, 0x3d, 0x6e, 0x26, 0x26, 0x22, 0x6c, 0x69, + 0x73, 0x74, 0x22, 0x21, 0x3d, 0x3d, 0x6e, 0x26, 0x26, 0x22, 0x66, 0x6f, + 0x72, 0x6d, 0x22, 0x21, 0x3d, 0x3d, 0x6e, 0x26, 0x26, 0x22, 0x74, 0x61, + 0x62, 0x49, 0x6e, 0x64, 0x65, 0x78, 0x22, 0x21, 0x3d, 0x3d, 0x6e, 0x26, + 0x26, 0x22, 0x64, 0x6f, 0x77, 0x6e, 0x6c, 0x6f, 0x61, 0x64, 0x22, 0x21, + 0x3d, 0x3d, 0x6e, 0x26, 0x26, 0x22, 0x72, 0x6f, 0x77, 0x53, 0x70, 0x61, + 0x6e, 0x22, 0x21, 0x3d, 0x3d, 0x6e, 0x26, 0x26, 0x22, 0x63, 0x6f, 0x6c, + 0x53, 0x70, 0x61, 0x6e, 0x22, 0x21, 0x3d, 0x3d, 0x6e, 0x26, 0x26, 0x22, + 0x72, 0x6f, 0x6c, 0x65, 0x22, 0x21, 0x3d, 0x3d, 0x6e, 0x26, 0x26, 0x6e, + 0x20, 0x69, 0x6e, 0x20, 0x74, 0x29, 0x74, 0x72, 0x79, 0x7b, 0x74, 0x5b, + 0x6e, 0x5d, 0x3d, 0x6e, 0x75, 0x6c, 0x6c, 0x3d, 0x3d, 0x65, 0x3f, 0x22, + 0x22, 0x3a, 0x65, 0x3b, 0x62, 0x72, 0x65, 0x61, 0x6b, 0x20, 0x74, 0x7d, + 0x63, 0x61, 0x74, 0x63, 0x68, 0x28, 0x74, 0x29, 0x7b, 0x7d, 0x22, 0x66, + 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x22, 0x3d, 0x3d, 0x74, 0x79, + 0x70, 0x65, 0x6f, 0x66, 0x20, 0x65, 0x7c, 0x7c, 0x28, 0x6e, 0x75, 0x6c, + 0x6c, 0x3d, 0x3d, 0x65, 0x7c, 0x7c, 0x21, 0x31, 0x3d, 0x3d, 0x3d, 0x65, + 0x26, 0x26, 0x22, 0x2d, 0x22, 0x21, 0x3d, 0x3d, 0x6e, 0x5b, 0x34, 0x5d, + 0x3f, 0x74, 0x2e, 0x72, 0x65, 0x6d, 0x6f, 0x76, 0x65, 0x41, 0x74, 0x74, + 0x72, 0x69, 0x62, 0x75, 0x74, 0x65, 0x28, 0x6e, 0x29, 0x3a, 0x74, 0x2e, + 0x73, 0x65, 0x74, 0x41, 0x74, 0x74, 0x72, 0x69, 0x62, 0x75, 0x74, 0x65, + 0x28, 0x6e, 0x2c, 0x65, 0x29, 0x29, 0x7d, 0x7d, 0x66, 0x75, 0x6e, 0x63, + 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x6e, 0x74, 0x28, 0x74, 0x29, 0x7b, 0x76, + 0x61, 0x72, 0x20, 0x6e, 0x3d, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x6c, 0x5b, + 0x74, 0x2e, 0x74, 0x79, 0x70, 0x65, 0x2b, 0x21, 0x31, 0x5d, 0x3b, 0x69, + 0x66, 0x28, 0x74, 0x2e, 0x74, 0x29, 0x7b, 0x69, 0x66, 0x28, 0x74, 0x2e, + 0x74, 0x3c, 0x3d, 0x6e, 0x2e, 0x75, 0x29, 0x72, 0x65, 0x74, 0x75, 0x72, + 0x6e, 0x7d, 0x65, 0x6c, 0x73, 0x65, 0x20, 0x74, 0x2e, 0x74, 0x3d, 0x44, + 0x61, 0x74, 0x65, 0x2e, 0x6e, 0x6f, 0x77, 0x28, 0x29, 0x3b, 0x72, 0x65, + 0x74, 0x75, 0x72, 0x6e, 0x20, 0x6e, 0x28, 0x43, 0x2e, 0x65, 0x76, 0x65, + 0x6e, 0x74, 0x3f, 0x43, 0x2e, 0x65, 0x76, 0x65, 0x6e, 0x74, 0x28, 0x74, + 0x29, 0x3a, 0x74, 0x29, 0x7d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, + 0x6e, 0x20, 0x65, 0x74, 0x28, 0x74, 0x29, 0x7b, 0x72, 0x65, 0x74, 0x75, + 0x72, 0x6e, 0x20, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x6c, 0x5b, 0x74, 0x2e, + 0x74, 0x79, 0x70, 0x65, 0x2b, 0x21, 0x30, 0x5d, 0x28, 0x43, 0x2e, 0x65, + 0x76, 0x65, 0x6e, 0x74, 0x3f, 0x43, 0x2e, 0x65, 0x76, 0x65, 0x6e, 0x74, + 0x28, 0x74, 0x29, 0x3a, 0x74, 0x29, 0x7d, 0x66, 0x75, 0x6e, 0x63, 0x74, + 0x69, 0x6f, 0x6e, 0x20, 0x5f, 0x74, 0x28, 0x74, 0x2c, 0x6e, 0x2c, 0x65, + 0x2c, 0x5f, 0x2c, 0x69, 0x2c, 0x6f, 0x2c, 0x72, 0x2c, 0x75, 0x2c, 0x66, + 0x2c, 0x73, 0x29, 0x7b, 0x76, 0x61, 0x72, 0x20, 0x6c, 0x2c, 0x63, 0x2c, + 0x68, 0x2c, 0x61, 0x2c, 0x70, 0x2c, 0x64, 0x2c, 0x76, 0x2c, 0x79, 0x2c, + 0x6d, 0x2c, 0x67, 0x2c, 0x62, 0x2c, 0x6b, 0x2c, 0x53, 0x2c, 0x77, 0x2c, + 0x78, 0x2c, 0x45, 0x3d, 0x6e, 0x2e, 0x74, 0x79, 0x70, 0x65, 0x3b, 0x69, + 0x66, 0x28, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x21, 0x3d, 0x3d, 0x6e, + 0x2e, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x72, 0x75, 0x63, 0x74, 0x6f, 0x72, + 0x29, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x6e, 0x75, 0x6c, 0x6c, + 0x3b, 0x31, 0x32, 0x38, 0x26, 0x65, 0x2e, 0x5f, 0x5f, 0x75, 0x26, 0x26, + 0x28, 0x66, 0x3d, 0x21, 0x21, 0x28, 0x33, 0x32, 0x26, 0x65, 0x2e, 0x5f, + 0x5f, 0x75, 0x29, 0x2c, 0x6f, 0x3d, 0x5b, 0x75, 0x3d, 0x6e, 0x2e, 0x5f, + 0x5f, 0x65, 0x3d, 0x65, 0x2e, 0x5f, 0x5f, 0x65, 0x5d, 0x29, 0x2c, 0x28, + 0x6c, 0x3d, 0x43, 0x2e, 0x5f, 0x5f, 0x62, 0x29, 0x26, 0x26, 0x6c, 0x28, + 0x6e, 0x29, 0x3b, 0x74, 0x3a, 0x69, 0x66, 0x28, 0x22, 0x66, 0x75, 0x6e, + 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x22, 0x3d, 0x3d, 0x74, 0x79, 0x70, 0x65, + 0x6f, 0x66, 0x20, 0x45, 0x29, 0x74, 0x72, 0x79, 0x7b, 0x69, 0x66, 0x28, + 0x79, 0x3d, 0x6e, 0x2e, 0x70, 0x72, 0x6f, 0x70, 0x73, 0x2c, 0x6d, 0x3d, + 0x28, 0x6c, 0x3d, 0x45, 0x2e, 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x78, 0x74, + 0x54, 0x79, 0x70, 0x65, 0x29, 0x26, 0x26, 0x5f, 0x5b, 0x6c, 0x2e, 0x5f, + 0x5f, 0x63, 0x5d, 0x2c, 0x67, 0x3d, 0x6c, 0x3f, 0x6d, 0x3f, 0x6d, 0x2e, + 0x70, 0x72, 0x6f, 0x70, 0x73, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3a, + 0x6c, 0x2e, 0x5f, 0x5f, 0x3a, 0x5f, 0x2c, 0x65, 0x2e, 0x5f, 0x5f, 0x63, + 0x3f, 0x76, 0x3d, 0x28, 0x63, 0x3d, 0x6e, 0x2e, 0x5f, 0x5f, 0x63, 0x3d, + 0x65, 0x2e, 0x5f, 0x5f, 0x63, 0x29, 0x2e, 0x5f, 0x5f, 0x3d, 0x63, 0x2e, + 0x5f, 0x5f, 0x45, 0x3a, 0x28, 0x22, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x74, + 0x79, 0x70, 0x65, 0x22, 0x69, 0x6e, 0x20, 0x45, 0x26, 0x26, 0x45, 0x2e, + 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x74, 0x79, 0x70, 0x65, 0x2e, 0x72, 0x65, + 0x6e, 0x64, 0x65, 0x72, 0x3f, 0x6e, 0x2e, 0x5f, 0x5f, 0x63, 0x3d, 0x63, + 0x3d, 0x6e, 0x65, 0x77, 0x20, 0x45, 0x28, 0x79, 0x2c, 0x67, 0x29, 0x3a, + 0x28, 0x6e, 0x2e, 0x5f, 0x5f, 0x63, 0x3d, 0x63, 0x3d, 0x6e, 0x65, 0x77, + 0x20, 0x49, 0x28, 0x79, 0x2c, 0x67, 0x29, 0x2c, 0x63, 0x2e, 0x63, 0x6f, + 0x6e, 0x73, 0x74, 0x72, 0x75, 0x63, 0x74, 0x6f, 0x72, 0x3d, 0x45, 0x2c, + 0x63, 0x2e, 0x72, 0x65, 0x6e, 0x64, 0x65, 0x72, 0x3d, 0x66, 0x74, 0x29, + 0x2c, 0x6d, 0x26, 0x26, 0x6d, 0x2e, 0x73, 0x75, 0x62, 0x28, 0x63, 0x29, + 0x2c, 0x63, 0x2e, 0x70, 0x72, 0x6f, 0x70, 0x73, 0x3d, 0x79, 0x2c, 0x63, + 0x2e, 0x73, 0x74, 0x61, 0x74, 0x65, 0x7c, 0x7c, 0x28, 0x63, 0x2e, 0x73, + 0x74, 0x61, 0x74, 0x65, 0x3d, 0x7b, 0x7d, 0x29, 0x2c, 0x63, 0x2e, 0x63, + 0x6f, 0x6e, 0x74, 0x65, 0x78, 0x74, 0x3d, 0x67, 0x2c, 0x63, 0x2e, 0x5f, + 0x5f, 0x6e, 0x3d, 0x5f, 0x2c, 0x68, 0x3d, 0x63, 0x2e, 0x5f, 0x5f, 0x64, + 0x3d, 0x21, 0x30, 0x2c, 0x63, 0x2e, 0x5f, 0x5f, 0x68, 0x3d, 0x5b, 0x5d, + 0x2c, 0x63, 0x2e, 0x5f, 0x73, 0x62, 0x3d, 0x5b, 0x5d, 0x29, 0x2c, 0x6e, + 0x75, 0x6c, 0x6c, 0x3d, 0x3d, 0x63, 0x2e, 0x5f, 0x5f, 0x73, 0x26, 0x26, + 0x28, 0x63, 0x2e, 0x5f, 0x5f, 0x73, 0x3d, 0x63, 0x2e, 0x73, 0x74, 0x61, + 0x74, 0x65, 0x29, 0x2c, 0x6e, 0x75, 0x6c, 0x6c, 0x21, 0x3d, 0x45, 0x2e, + 0x67, 0x65, 0x74, 0x44, 0x65, 0x72, 0x69, 0x76, 0x65, 0x64, 0x53, 0x74, + 0x61, 0x74, 0x65, 0x46, 0x72, 0x6f, 0x6d, 0x50, 0x72, 0x6f, 0x70, 0x73, + 0x26, 0x26, 0x28, 0x63, 0x2e, 0x5f, 0x5f, 0x73, 0x3d, 0x3d, 0x63, 0x2e, + 0x73, 0x74, 0x61, 0x74, 0x65, 0x26, 0x26, 0x28, 0x63, 0x2e, 0x5f, 0x5f, + 0x73, 0x3d, 0x4d, 0x28, 0x7b, 0x7d, 0x2c, 0x63, 0x2e, 0x5f, 0x5f, 0x73, + 0x29, 0x29, 0x2c, 0x4d, 0x28, 0x63, 0x2e, 0x5f, 0x5f, 0x73, 0x2c, 0x45, + 0x2e, 0x67, 0x65, 0x74, 0x44, 0x65, 0x72, 0x69, 0x76, 0x65, 0x64, 0x53, + 0x74, 0x61, 0x74, 0x65, 0x46, 0x72, 0x6f, 0x6d, 0x50, 0x72, 0x6f, 0x70, + 0x73, 0x28, 0x79, 0x2c, 0x63, 0x2e, 0x5f, 0x5f, 0x73, 0x29, 0x29, 0x29, + 0x2c, 0x61, 0x3d, 0x63, 0x2e, 0x70, 0x72, 0x6f, 0x70, 0x73, 0x2c, 0x70, + 0x3d, 0x63, 0x2e, 0x73, 0x74, 0x61, 0x74, 0x65, 0x2c, 0x63, 0x2e, 0x5f, + 0x5f, 0x76, 0x3d, 0x6e, 0x2c, 0x68, 0x29, 0x6e, 0x75, 0x6c, 0x6c, 0x3d, + 0x3d, 0x45, 0x2e, 0x67, 0x65, 0x74, 0x44, 0x65, 0x72, 0x69, 0x76, 0x65, + 0x64, 0x53, 0x74, 0x61, 0x74, 0x65, 0x46, 0x72, 0x6f, 0x6d, 0x50, 0x72, + 0x6f, 0x70, 0x73, 0x26, 0x26, 0x6e, 0x75, 0x6c, 0x6c, 0x21, 0x3d, 0x63, + 0x2e, 0x63, 0x6f, 0x6d, 0x70, 0x6f, 0x6e, 0x65, 0x6e, 0x74, 0x57, 0x69, + 0x6c, 0x6c, 0x4d, 0x6f, 0x75, 0x6e, 0x74, 0x26, 0x26, 0x63, 0x2e, 0x63, + 0x6f, 0x6d, 0x70, 0x6f, 0x6e, 0x65, 0x6e, 0x74, 0x57, 0x69, 0x6c, 0x6c, + 0x4d, 0x6f, 0x75, 0x6e, 0x74, 0x28, 0x29, 0x2c, 0x6e, 0x75, 0x6c, 0x6c, + 0x21, 0x3d, 0x63, 0x2e, 0x63, 0x6f, 0x6d, 0x70, 0x6f, 0x6e, 0x65, 0x6e, + 0x74, 0x44, 0x69, 0x64, 0x4d, 0x6f, 0x75, 0x6e, 0x74, 0x26, 0x26, 0x63, + 0x2e, 0x5f, 0x5f, 0x68, 0x2e, 0x70, 0x75, 0x73, 0x68, 0x28, 0x63, 0x2e, + 0x63, 0x6f, 0x6d, 0x70, 0x6f, 0x6e, 0x65, 0x6e, 0x74, 0x44, 0x69, 0x64, + 0x4d, 0x6f, 0x75, 0x6e, 0x74, 0x29, 0x3b, 0x65, 0x6c, 0x73, 0x65, 0x7b, + 0x69, 0x66, 0x28, 0x6e, 0x75, 0x6c, 0x6c, 0x3d, 0x3d, 0x45, 0x2e, 0x67, + 0x65, 0x74, 0x44, 0x65, 0x72, 0x69, 0x76, 0x65, 0x64, 0x53, 0x74, 0x61, + 0x74, 0x65, 0x46, 0x72, 0x6f, 0x6d, 0x50, 0x72, 0x6f, 0x70, 0x73, 0x26, + 0x26, 0x79, 0x21, 0x3d, 0x3d, 0x61, 0x26, 0x26, 0x6e, 0x75, 0x6c, 0x6c, + 0x21, 0x3d, 0x63, 0x2e, 0x63, 0x6f, 0x6d, 0x70, 0x6f, 0x6e, 0x65, 0x6e, + 0x74, 0x57, 0x69, 0x6c, 0x6c, 0x52, 0x65, 0x63, 0x65, 0x69, 0x76, 0x65, + 0x50, 0x72, 0x6f, 0x70, 0x73, 0x26, 0x26, 0x63, 0x2e, 0x63, 0x6f, 0x6d, + 0x70, 0x6f, 0x6e, 0x65, 0x6e, 0x74, 0x57, 0x69, 0x6c, 0x6c, 0x52, 0x65, + 0x63, 0x65, 0x69, 0x76, 0x65, 0x50, 0x72, 0x6f, 0x70, 0x73, 0x28, 0x79, + 0x2c, 0x67, 0x29, 0x2c, 0x21, 0x63, 0x2e, 0x5f, 0x5f, 0x65, 0x26, 0x26, + 0x28, 0x6e, 0x75, 0x6c, 0x6c, 0x21, 0x3d, 0x63, 0x2e, 0x73, 0x68, 0x6f, + 0x75, 0x6c, 0x64, 0x43, 0x6f, 0x6d, 0x70, 0x6f, 0x6e, 0x65, 0x6e, 0x74, + 0x55, 0x70, 0x64, 0x61, 0x74, 0x65, 0x26, 0x26, 0x21, 0x31, 0x3d, 0x3d, + 0x3d, 0x63, 0x2e, 0x73, 0x68, 0x6f, 0x75, 0x6c, 0x64, 0x43, 0x6f, 0x6d, + 0x70, 0x6f, 0x6e, 0x65, 0x6e, 0x74, 0x55, 0x70, 0x64, 0x61, 0x74, 0x65, + 0x28, 0x79, 0x2c, 0x63, 0x2e, 0x5f, 0x5f, 0x73, 0x2c, 0x67, 0x29, 0x7c, + 0x7c, 0x6e, 0x2e, 0x5f, 0x5f, 0x76, 0x3d, 0x3d, 0x3d, 0x65, 0x2e, 0x5f, + 0x5f, 0x76, 0x29, 0x29, 0x7b, 0x66, 0x6f, 0x72, 0x28, 0x6e, 0x2e, 0x5f, + 0x5f, 0x76, 0x21, 0x3d, 0x3d, 0x65, 0x2e, 0x5f, 0x5f, 0x76, 0x26, 0x26, + 0x28, 0x63, 0x2e, 0x70, 0x72, 0x6f, 0x70, 0x73, 0x3d, 0x79, 0x2c, 0x63, + 0x2e, 0x73, 0x74, 0x61, 0x74, 0x65, 0x3d, 0x63, 0x2e, 0x5f, 0x5f, 0x73, + 0x2c, 0x63, 0x2e, 0x5f, 0x5f, 0x64, 0x3d, 0x21, 0x31, 0x29, 0x2c, 0x6e, + 0x2e, 0x5f, 0x5f, 0x65, 0x3d, 0x65, 0x2e, 0x5f, 0x5f, 0x65, 0x2c, 0x6e, + 0x2e, 0x5f, 0x5f, 0x6b, 0x3d, 0x65, 0x2e, 0x5f, 0x5f, 0x6b, 0x2c, 0x6e, + 0x2e, 0x5f, 0x5f, 0x6b, 0x2e, 0x66, 0x6f, 0x72, 0x45, 0x61, 0x63, 0x68, + 0x28, 0x28, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x74, + 0x29, 0x7b, 0x74, 0x26, 0x26, 0x28, 0x74, 0x2e, 0x5f, 0x5f, 0x3d, 0x6e, + 0x29, 0x7d, 0x29, 0x29, 0x2c, 0x62, 0x3d, 0x30, 0x3b, 0x62, 0x3c, 0x63, + 0x2e, 0x5f, 0x73, 0x62, 0x2e, 0x6c, 0x65, 0x6e, 0x67, 0x74, 0x68, 0x3b, + 0x62, 0x2b, 0x2b, 0x29, 0x63, 0x2e, 0x5f, 0x5f, 0x68, 0x2e, 0x70, 0x75, + 0x73, 0x68, 0x28, 0x63, 0x2e, 0x5f, 0x73, 0x62, 0x5b, 0x62, 0x5d, 0x29, + 0x3b, 0x63, 0x2e, 0x5f, 0x73, 0x62, 0x3d, 0x5b, 0x5d, 0x2c, 0x63, 0x2e, + 0x5f, 0x5f, 0x68, 0x2e, 0x6c, 0x65, 0x6e, 0x67, 0x74, 0x68, 0x26, 0x26, + 0x72, 0x2e, 0x70, 0x75, 0x73, 0x68, 0x28, 0x63, 0x29, 0x3b, 0x62, 0x72, + 0x65, 0x61, 0x6b, 0x20, 0x74, 0x7d, 0x6e, 0x75, 0x6c, 0x6c, 0x21, 0x3d, + 0x63, 0x2e, 0x63, 0x6f, 0x6d, 0x70, 0x6f, 0x6e, 0x65, 0x6e, 0x74, 0x57, + 0x69, 0x6c, 0x6c, 0x55, 0x70, 0x64, 0x61, 0x74, 0x65, 0x26, 0x26, 0x63, + 0x2e, 0x63, 0x6f, 0x6d, 0x70, 0x6f, 0x6e, 0x65, 0x6e, 0x74, 0x57, 0x69, + 0x6c, 0x6c, 0x55, 0x70, 0x64, 0x61, 0x74, 0x65, 0x28, 0x79, 0x2c, 0x63, + 0x2e, 0x5f, 0x5f, 0x73, 0x2c, 0x67, 0x29, 0x2c, 0x6e, 0x75, 0x6c, 0x6c, + 0x21, 0x3d, 0x63, 0x2e, 0x63, 0x6f, 0x6d, 0x70, 0x6f, 0x6e, 0x65, 0x6e, + 0x74, 0x44, 0x69, 0x64, 0x55, 0x70, 0x64, 0x61, 0x74, 0x65, 0x26, 0x26, + 0x63, 0x2e, 0x5f, 0x5f, 0x68, 0x2e, 0x70, 0x75, 0x73, 0x68, 0x28, 0x28, + 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x29, 0x7b, 0x63, + 0x2e, 0x63, 0x6f, 0x6d, 0x70, 0x6f, 0x6e, 0x65, 0x6e, 0x74, 0x44, 0x69, + 0x64, 0x55, 0x70, 0x64, 0x61, 0x74, 0x65, 0x28, 0x61, 0x2c, 0x70, 0x2c, + 0x64, 0x29, 0x7d, 0x29, 0x29, 0x7d, 0x69, 0x66, 0x28, 0x63, 0x2e, 0x63, + 0x6f, 0x6e, 0x74, 0x65, 0x78, 0x74, 0x3d, 0x67, 0x2c, 0x63, 0x2e, 0x70, + 0x72, 0x6f, 0x70, 0x73, 0x3d, 0x79, 0x2c, 0x63, 0x2e, 0x5f, 0x5f, 0x50, + 0x3d, 0x74, 0x2c, 0x63, 0x2e, 0x5f, 0x5f, 0x65, 0x3d, 0x21, 0x31, 0x2c, + 0x6b, 0x3d, 0x43, 0x2e, 0x5f, 0x5f, 0x72, 0x2c, 0x53, 0x3d, 0x30, 0x2c, + 0x22, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x74, 0x79, 0x70, 0x65, 0x22, 0x69, + 0x6e, 0x20, 0x45, 0x26, 0x26, 0x45, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, + 0x74, 0x79, 0x70, 0x65, 0x2e, 0x72, 0x65, 0x6e, 0x64, 0x65, 0x72, 0x29, + 0x7b, 0x66, 0x6f, 0x72, 0x28, 0x63, 0x2e, 0x73, 0x74, 0x61, 0x74, 0x65, + 0x3d, 0x63, 0x2e, 0x5f, 0x5f, 0x73, 0x2c, 0x63, 0x2e, 0x5f, 0x5f, 0x64, + 0x3d, 0x21, 0x31, 0x2c, 0x6b, 0x26, 0x26, 0x6b, 0x28, 0x6e, 0x29, 0x2c, + 0x6c, 0x3d, 0x63, 0x2e, 0x72, 0x65, 0x6e, 0x64, 0x65, 0x72, 0x28, 0x63, + 0x2e, 0x70, 0x72, 0x6f, 0x70, 0x73, 0x2c, 0x63, 0x2e, 0x73, 0x74, 0x61, + 0x74, 0x65, 0x2c, 0x63, 0x2e, 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x78, 0x74, + 0x29, 0x2c, 0x77, 0x3d, 0x30, 0x3b, 0x77, 0x3c, 0x63, 0x2e, 0x5f, 0x73, + 0x62, 0x2e, 0x6c, 0x65, 0x6e, 0x67, 0x74, 0x68, 0x3b, 0x77, 0x2b, 0x2b, + 0x29, 0x63, 0x2e, 0x5f, 0x5f, 0x68, 0x2e, 0x70, 0x75, 0x73, 0x68, 0x28, + 0x63, 0x2e, 0x5f, 0x73, 0x62, 0x5b, 0x77, 0x5d, 0x29, 0x3b, 0x63, 0x2e, + 0x5f, 0x73, 0x62, 0x3d, 0x5b, 0x5d, 0x7d, 0x65, 0x6c, 0x73, 0x65, 0x20, + 0x64, 0x6f, 0x7b, 0x63, 0x2e, 0x5f, 0x5f, 0x64, 0x3d, 0x21, 0x31, 0x2c, + 0x6b, 0x26, 0x26, 0x6b, 0x28, 0x6e, 0x29, 0x2c, 0x6c, 0x3d, 0x63, 0x2e, + 0x72, 0x65, 0x6e, 0x64, 0x65, 0x72, 0x28, 0x63, 0x2e, 0x70, 0x72, 0x6f, + 0x70, 0x73, 0x2c, 0x63, 0x2e, 0x73, 0x74, 0x61, 0x74, 0x65, 0x2c, 0x63, + 0x2e, 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x78, 0x74, 0x29, 0x2c, 0x63, 0x2e, + 0x73, 0x74, 0x61, 0x74, 0x65, 0x3d, 0x63, 0x2e, 0x5f, 0x5f, 0x73, 0x7d, + 0x77, 0x68, 0x69, 0x6c, 0x65, 0x28, 0x63, 0x2e, 0x5f, 0x5f, 0x64, 0x26, + 0x26, 0x2b, 0x2b, 0x53, 0x3c, 0x32, 0x35, 0x29, 0x3b, 0x63, 0x2e, 0x73, + 0x74, 0x61, 0x74, 0x65, 0x3d, 0x63, 0x2e, 0x5f, 0x5f, 0x73, 0x2c, 0x6e, + 0x75, 0x6c, 0x6c, 0x21, 0x3d, 0x63, 0x2e, 0x67, 0x65, 0x74, 0x43, 0x68, + 0x69, 0x6c, 0x64, 0x43, 0x6f, 0x6e, 0x74, 0x65, 0x78, 0x74, 0x26, 0x26, + 0x28, 0x5f, 0x3d, 0x4d, 0x28, 0x4d, 0x28, 0x7b, 0x7d, 0x2c, 0x5f, 0x29, + 0x2c, 0x63, 0x2e, 0x67, 0x65, 0x74, 0x43, 0x68, 0x69, 0x6c, 0x64, 0x43, + 0x6f, 0x6e, 0x74, 0x65, 0x78, 0x74, 0x28, 0x29, 0x29, 0x29, 0x2c, 0x68, + 0x7c, 0x7c, 0x6e, 0x75, 0x6c, 0x6c, 0x3d, 0x3d, 0x63, 0x2e, 0x67, 0x65, + 0x74, 0x53, 0x6e, 0x61, 0x70, 0x73, 0x68, 0x6f, 0x74, 0x42, 0x65, 0x66, + 0x6f, 0x72, 0x65, 0x55, 0x70, 0x64, 0x61, 0x74, 0x65, 0x7c, 0x7c, 0x28, + 0x64, 0x3d, 0x63, 0x2e, 0x67, 0x65, 0x74, 0x53, 0x6e, 0x61, 0x70, 0x73, + 0x68, 0x6f, 0x74, 0x42, 0x65, 0x66, 0x6f, 0x72, 0x65, 0x55, 0x70, 0x64, + 0x61, 0x74, 0x65, 0x28, 0x61, 0x2c, 0x70, 0x29, 0x29, 0x2c, 0x4a, 0x28, + 0x74, 0x2c, 0x46, 0x28, 0x78, 0x3d, 0x6e, 0x75, 0x6c, 0x6c, 0x21, 0x3d, + 0x6c, 0x26, 0x26, 0x6c, 0x2e, 0x74, 0x79, 0x70, 0x65, 0x3d, 0x3d, 0x3d, + 0x6a, 0x26, 0x26, 0x6e, 0x75, 0x6c, 0x6c, 0x3d, 0x3d, 0x6c, 0x2e, 0x6b, + 0x65, 0x79, 0x3f, 0x6c, 0x2e, 0x70, 0x72, 0x6f, 0x70, 0x73, 0x2e, 0x63, + 0x68, 0x69, 0x6c, 0x64, 0x72, 0x65, 0x6e, 0x3a, 0x6c, 0x29, 0x3f, 0x78, + 0x3a, 0x5b, 0x78, 0x5d, 0x2c, 0x6e, 0x2c, 0x65, 0x2c, 0x5f, 0x2c, 0x69, + 0x2c, 0x6f, 0x2c, 0x72, 0x2c, 0x75, 0x2c, 0x66, 0x2c, 0x73, 0x29, 0x2c, + 0x63, 0x2e, 0x62, 0x61, 0x73, 0x65, 0x3d, 0x6e, 0x2e, 0x5f, 0x5f, 0x65, + 0x2c, 0x6e, 0x2e, 0x5f, 0x5f, 0x75, 0x26, 0x3d, 0x2d, 0x31, 0x36, 0x31, + 0x2c, 0x63, 0x2e, 0x5f, 0x5f, 0x68, 0x2e, 0x6c, 0x65, 0x6e, 0x67, 0x74, + 0x68, 0x26, 0x26, 0x72, 0x2e, 0x70, 0x75, 0x73, 0x68, 0x28, 0x63, 0x29, + 0x2c, 0x76, 0x26, 0x26, 0x28, 0x63, 0x2e, 0x5f, 0x5f, 0x45, 0x3d, 0x63, + 0x2e, 0x5f, 0x5f, 0x3d, 0x6e, 0x75, 0x6c, 0x6c, 0x29, 0x7d, 0x63, 0x61, + 0x74, 0x63, 0x68, 0x28, 0x74, 0x29, 0x7b, 0x6e, 0x2e, 0x5f, 0x5f, 0x76, + 0x3d, 0x6e, 0x75, 0x6c, 0x6c, 0x2c, 0x66, 0x7c, 0x7c, 0x6e, 0x75, 0x6c, + 0x6c, 0x21, 0x3d, 0x6f, 0x3f, 0x28, 0x6e, 0x2e, 0x5f, 0x5f, 0x65, 0x3d, + 0x75, 0x2c, 0x6e, 0x2e, 0x5f, 0x5f, 0x75, 0x7c, 0x3d, 0x66, 0x3f, 0x31, + 0x36, 0x30, 0x3a, 0x33, 0x32, 0x2c, 0x6f, 0x5b, 0x6f, 0x2e, 0x69, 0x6e, + 0x64, 0x65, 0x78, 0x4f, 0x66, 0x28, 0x75, 0x29, 0x5d, 0x3d, 0x6e, 0x75, + 0x6c, 0x6c, 0x29, 0x3a, 0x28, 0x6e, 0x2e, 0x5f, 0x5f, 0x65, 0x3d, 0x65, + 0x2e, 0x5f, 0x5f, 0x65, 0x2c, 0x6e, 0x2e, 0x5f, 0x5f, 0x6b, 0x3d, 0x65, + 0x2e, 0x5f, 0x5f, 0x6b, 0x29, 0x2c, 0x43, 0x2e, 0x5f, 0x5f, 0x65, 0x28, + 0x74, 0x2c, 0x6e, 0x2c, 0x65, 0x29, 0x7d, 0x65, 0x6c, 0x73, 0x65, 0x20, + 0x6e, 0x75, 0x6c, 0x6c, 0x3d, 0x3d, 0x6f, 0x26, 0x26, 0x6e, 0x2e, 0x5f, + 0x5f, 0x76, 0x3d, 0x3d, 0x3d, 0x65, 0x2e, 0x5f, 0x5f, 0x76, 0x3f, 0x28, + 0x6e, 0x2e, 0x5f, 0x5f, 0x6b, 0x3d, 0x65, 0x2e, 0x5f, 0x5f, 0x6b, 0x2c, + 0x6e, 0x2e, 0x5f, 0x5f, 0x65, 0x3d, 0x65, 0x2e, 0x5f, 0x5f, 0x65, 0x29, + 0x3a, 0x6e, 0x2e, 0x5f, 0x5f, 0x65, 0x3d, 0x6f, 0x74, 0x28, 0x65, 0x2e, + 0x5f, 0x5f, 0x65, 0x2c, 0x6e, 0x2c, 0x65, 0x2c, 0x5f, 0x2c, 0x69, 0x2c, + 0x6f, 0x2c, 0x72, 0x2c, 0x66, 0x2c, 0x73, 0x29, 0x3b, 0x28, 0x6c, 0x3d, + 0x43, 0x2e, 0x64, 0x69, 0x66, 0x66, 0x65, 0x64, 0x29, 0x26, 0x26, 0x6c, + 0x28, 0x6e, 0x29, 0x7d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, + 0x20, 0x69, 0x74, 0x28, 0x74, 0x2c, 0x6e, 0x2c, 0x65, 0x29, 0x7b, 0x6e, + 0x2e, 0x5f, 0x5f, 0x64, 0x3d, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x3b, + 0x66, 0x6f, 0x72, 0x28, 0x76, 0x61, 0x72, 0x20, 0x5f, 0x3d, 0x30, 0x3b, + 0x5f, 0x3c, 0x65, 0x2e, 0x6c, 0x65, 0x6e, 0x67, 0x74, 0x68, 0x3b, 0x5f, + 0x2b, 0x2b, 0x29, 0x72, 0x74, 0x28, 0x65, 0x5b, 0x5f, 0x5d, 0x2c, 0x65, + 0x5b, 0x2b, 0x2b, 0x5f, 0x5d, 0x2c, 0x65, 0x5b, 0x2b, 0x2b, 0x5f, 0x5d, + 0x29, 0x3b, 0x43, 0x2e, 0x5f, 0x5f, 0x63, 0x26, 0x26, 0x43, 0x2e, 0x5f, + 0x5f, 0x63, 0x28, 0x6e, 0x2c, 0x74, 0x29, 0x2c, 0x74, 0x2e, 0x73, 0x6f, + 0x6d, 0x65, 0x28, 0x28, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, + 0x28, 0x6e, 0x29, 0x7b, 0x74, 0x72, 0x79, 0x7b, 0x74, 0x3d, 0x6e, 0x2e, + 0x5f, 0x5f, 0x68, 0x2c, 0x6e, 0x2e, 0x5f, 0x5f, 0x68, 0x3d, 0x5b, 0x5d, + 0x2c, 0x74, 0x2e, 0x73, 0x6f, 0x6d, 0x65, 0x28, 0x28, 0x66, 0x75, 0x6e, + 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x74, 0x29, 0x7b, 0x74, 0x2e, 0x63, + 0x61, 0x6c, 0x6c, 0x28, 0x6e, 0x29, 0x7d, 0x29, 0x29, 0x7d, 0x63, 0x61, + 0x74, 0x63, 0x68, 0x28, 0x74, 0x29, 0x7b, 0x43, 0x2e, 0x5f, 0x5f, 0x65, + 0x28, 0x74, 0x2c, 0x6e, 0x2e, 0x5f, 0x5f, 0x76, 0x29, 0x7d, 0x7d, 0x29, + 0x29, 0x7d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x6f, + 0x74, 0x28, 0x74, 0x2c, 0x6e, 0x2c, 0x65, 0x2c, 0x5f, 0x2c, 0x69, 0x2c, + 0x6f, 0x2c, 0x72, 0x2c, 0x75, 0x2c, 0x66, 0x29, 0x7b, 0x76, 0x61, 0x72, + 0x20, 0x73, 0x2c, 0x6c, 0x2c, 0x63, 0x2c, 0x68, 0x2c, 0x61, 0x2c, 0x70, + 0x2c, 0x64, 0x2c, 0x76, 0x3d, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x70, 0x73, + 0x2c, 0x79, 0x3d, 0x6e, 0x2e, 0x70, 0x72, 0x6f, 0x70, 0x73, 0x2c, 0x6d, + 0x3d, 0x6e, 0x2e, 0x74, 0x79, 0x70, 0x65, 0x3b, 0x69, 0x66, 0x28, 0x22, + 0x73, 0x76, 0x67, 0x22, 0x3d, 0x3d, 0x3d, 0x6d, 0x26, 0x26, 0x28, 0x69, + 0x3d, 0x21, 0x30, 0x29, 0x2c, 0x6e, 0x75, 0x6c, 0x6c, 0x21, 0x3d, 0x6f, + 0x29, 0x66, 0x6f, 0x72, 0x28, 0x73, 0x3d, 0x30, 0x3b, 0x73, 0x3c, 0x6f, + 0x2e, 0x6c, 0x65, 0x6e, 0x67, 0x74, 0x68, 0x3b, 0x73, 0x2b, 0x2b, 0x29, + 0x69, 0x66, 0x28, 0x28, 0x61, 0x3d, 0x6f, 0x5b, 0x73, 0x5d, 0x29, 0x26, + 0x26, 0x22, 0x73, 0x65, 0x74, 0x41, 0x74, 0x74, 0x72, 0x69, 0x62, 0x75, + 0x74, 0x65, 0x22, 0x69, 0x6e, 0x20, 0x61, 0x3d, 0x3d, 0x21, 0x21, 0x6d, + 0x26, 0x26, 0x28, 0x6d, 0x3f, 0x61, 0x2e, 0x6c, 0x6f, 0x63, 0x61, 0x6c, + 0x4e, 0x61, 0x6d, 0x65, 0x3d, 0x3d, 0x3d, 0x6d, 0x3a, 0x33, 0x3d, 0x3d, + 0x3d, 0x61, 0x2e, 0x6e, 0x6f, 0x64, 0x65, 0x54, 0x79, 0x70, 0x65, 0x29, + 0x29, 0x7b, 0x74, 0x3d, 0x61, 0x2c, 0x6f, 0x5b, 0x73, 0x5d, 0x3d, 0x6e, + 0x75, 0x6c, 0x6c, 0x3b, 0x62, 0x72, 0x65, 0x61, 0x6b, 0x7d, 0x69, 0x66, + 0x28, 0x6e, 0x75, 0x6c, 0x6c, 0x3d, 0x3d, 0x74, 0x29, 0x7b, 0x69, 0x66, + 0x28, 0x6e, 0x75, 0x6c, 0x6c, 0x3d, 0x3d, 0x3d, 0x6d, 0x29, 0x72, 0x65, + 0x74, 0x75, 0x72, 0x6e, 0x20, 0x64, 0x6f, 0x63, 0x75, 0x6d, 0x65, 0x6e, + 0x74, 0x2e, 0x63, 0x72, 0x65, 0x61, 0x74, 0x65, 0x54, 0x65, 0x78, 0x74, + 0x4e, 0x6f, 0x64, 0x65, 0x28, 0x79, 0x29, 0x3b, 0x74, 0x3d, 0x69, 0x3f, + 0x64, 0x6f, 0x63, 0x75, 0x6d, 0x65, 0x6e, 0x74, 0x2e, 0x63, 0x72, 0x65, + 0x61, 0x74, 0x65, 0x45, 0x6c, 0x65, 0x6d, 0x65, 0x6e, 0x74, 0x4e, 0x53, + 0x28, 0x22, 0x68, 0x74, 0x74, 0x70, 0x3a, 0x2f, 0x2f, 0x77, 0x77, 0x77, + 0x2e, 0x77, 0x33, 0x2e, 0x6f, 0x72, 0x67, 0x2f, 0x32, 0x30, 0x30, 0x30, + 0x2f, 0x73, 0x76, 0x67, 0x22, 0x2c, 0x6d, 0x29, 0x3a, 0x64, 0x6f, 0x63, + 0x75, 0x6d, 0x65, 0x6e, 0x74, 0x2e, 0x63, 0x72, 0x65, 0x61, 0x74, 0x65, + 0x45, 0x6c, 0x65, 0x6d, 0x65, 0x6e, 0x74, 0x28, 0x6d, 0x2c, 0x79, 0x2e, + 0x69, 0x73, 0x26, 0x26, 0x79, 0x29, 0x2c, 0x6f, 0x3d, 0x6e, 0x75, 0x6c, + 0x6c, 0x2c, 0x75, 0x3d, 0x21, 0x31, 0x7d, 0x69, 0x66, 0x28, 0x6e, 0x75, + 0x6c, 0x6c, 0x3d, 0x3d, 0x3d, 0x6d, 0x29, 0x76, 0x3d, 0x3d, 0x3d, 0x79, + 0x7c, 0x7c, 0x75, 0x26, 0x26, 0x74, 0x2e, 0x64, 0x61, 0x74, 0x61, 0x3d, + 0x3d, 0x3d, 0x79, 0x7c, 0x7c, 0x28, 0x74, 0x2e, 0x64, 0x61, 0x74, 0x61, + 0x3d, 0x79, 0x29, 0x3b, 0x65, 0x6c, 0x73, 0x65, 0x7b, 0x69, 0x66, 0x28, + 0x6f, 0x3d, 0x6f, 0x26, 0x26, 0x78, 0x2e, 0x63, 0x61, 0x6c, 0x6c, 0x28, + 0x74, 0x2e, 0x63, 0x68, 0x69, 0x6c, 0x64, 0x4e, 0x6f, 0x64, 0x65, 0x73, + 0x29, 0x2c, 0x76, 0x3d, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x70, 0x73, 0x7c, + 0x7c, 0x54, 0x2c, 0x21, 0x75, 0x26, 0x26, 0x6e, 0x75, 0x6c, 0x6c, 0x21, + 0x3d, 0x6f, 0x29, 0x66, 0x6f, 0x72, 0x28, 0x76, 0x3d, 0x7b, 0x7d, 0x2c, + 0x73, 0x3d, 0x30, 0x3b, 0x73, 0x3c, 0x74, 0x2e, 0x61, 0x74, 0x74, 0x72, + 0x69, 0x62, 0x75, 0x74, 0x65, 0x73, 0x2e, 0x6c, 0x65, 0x6e, 0x67, 0x74, + 0x68, 0x3b, 0x73, 0x2b, 0x2b, 0x29, 0x76, 0x5b, 0x28, 0x61, 0x3d, 0x74, + 0x2e, 0x61, 0x74, 0x74, 0x72, 0x69, 0x62, 0x75, 0x74, 0x65, 0x73, 0x5b, + 0x73, 0x5d, 0x29, 0x2e, 0x6e, 0x61, 0x6d, 0x65, 0x5d, 0x3d, 0x61, 0x2e, + 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3b, 0x66, 0x6f, 0x72, 0x28, 0x73, 0x20, + 0x69, 0x6e, 0x20, 0x76, 0x29, 0x61, 0x3d, 0x76, 0x5b, 0x73, 0x5d, 0x2c, + 0x22, 0x63, 0x68, 0x69, 0x6c, 0x64, 0x72, 0x65, 0x6e, 0x22, 0x3d, 0x3d, + 0x73, 0x7c, 0x7c, 0x28, 0x22, 0x64, 0x61, 0x6e, 0x67, 0x65, 0x72, 0x6f, + 0x75, 0x73, 0x6c, 0x79, 0x53, 0x65, 0x74, 0x49, 0x6e, 0x6e, 0x65, 0x72, + 0x48, 0x54, 0x4d, 0x4c, 0x22, 0x3d, 0x3d, 0x73, 0x3f, 0x63, 0x3d, 0x61, + 0x3a, 0x22, 0x6b, 0x65, 0x79, 0x22, 0x3d, 0x3d, 0x3d, 0x73, 0x7c, 0x7c, + 0x73, 0x20, 0x69, 0x6e, 0x20, 0x79, 0x7c, 0x7c, 0x74, 0x74, 0x28, 0x74, + 0x2c, 0x73, 0x2c, 0x6e, 0x75, 0x6c, 0x6c, 0x2c, 0x61, 0x2c, 0x69, 0x29, + 0x29, 0x3b, 0x66, 0x6f, 0x72, 0x28, 0x73, 0x20, 0x69, 0x6e, 0x20, 0x79, + 0x29, 0x61, 0x3d, 0x79, 0x5b, 0x73, 0x5d, 0x2c, 0x22, 0x63, 0x68, 0x69, + 0x6c, 0x64, 0x72, 0x65, 0x6e, 0x22, 0x3d, 0x3d, 0x73, 0x3f, 0x68, 0x3d, + 0x61, 0x3a, 0x22, 0x64, 0x61, 0x6e, 0x67, 0x65, 0x72, 0x6f, 0x75, 0x73, + 0x6c, 0x79, 0x53, 0x65, 0x74, 0x49, 0x6e, 0x6e, 0x65, 0x72, 0x48, 0x54, + 0x4d, 0x4c, 0x22, 0x3d, 0x3d, 0x73, 0x3f, 0x6c, 0x3d, 0x61, 0x3a, 0x22, + 0x76, 0x61, 0x6c, 0x75, 0x65, 0x22, 0x3d, 0x3d, 0x73, 0x3f, 0x70, 0x3d, + 0x61, 0x3a, 0x22, 0x63, 0x68, 0x65, 0x63, 0x6b, 0x65, 0x64, 0x22, 0x3d, + 0x3d, 0x73, 0x3f, 0x64, 0x3d, 0x61, 0x3a, 0x22, 0x6b, 0x65, 0x79, 0x22, + 0x3d, 0x3d, 0x3d, 0x73, 0x7c, 0x7c, 0x75, 0x26, 0x26, 0x22, 0x66, 0x75, + 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x22, 0x21, 0x3d, 0x74, 0x79, 0x70, + 0x65, 0x6f, 0x66, 0x20, 0x61, 0x7c, 0x7c, 0x76, 0x5b, 0x73, 0x5d, 0x3d, + 0x3d, 0x3d, 0x61, 0x7c, 0x7c, 0x74, 0x74, 0x28, 0x74, 0x2c, 0x73, 0x2c, + 0x61, 0x2c, 0x76, 0x5b, 0x73, 0x5d, 0x2c, 0x69, 0x29, 0x3b, 0x69, 0x66, + 0x28, 0x6c, 0x29, 0x75, 0x7c, 0x7c, 0x63, 0x26, 0x26, 0x28, 0x6c, 0x2e, + 0x5f, 0x5f, 0x68, 0x74, 0x6d, 0x6c, 0x3d, 0x3d, 0x3d, 0x63, 0x2e, 0x5f, + 0x5f, 0x68, 0x74, 0x6d, 0x6c, 0x7c, 0x7c, 0x6c, 0x2e, 0x5f, 0x5f, 0x68, + 0x74, 0x6d, 0x6c, 0x3d, 0x3d, 0x3d, 0x74, 0x2e, 0x69, 0x6e, 0x6e, 0x65, + 0x72, 0x48, 0x54, 0x4d, 0x4c, 0x29, 0x7c, 0x7c, 0x28, 0x74, 0x2e, 0x69, + 0x6e, 0x6e, 0x65, 0x72, 0x48, 0x54, 0x4d, 0x4c, 0x3d, 0x6c, 0x2e, 0x5f, + 0x5f, 0x68, 0x74, 0x6d, 0x6c, 0x29, 0x2c, 0x6e, 0x2e, 0x5f, 0x5f, 0x6b, + 0x3d, 0x5b, 0x5d, 0x3b, 0x65, 0x6c, 0x73, 0x65, 0x20, 0x69, 0x66, 0x28, + 0x63, 0x26, 0x26, 0x28, 0x74, 0x2e, 0x69, 0x6e, 0x6e, 0x65, 0x72, 0x48, + 0x54, 0x4d, 0x4c, 0x3d, 0x22, 0x22, 0x29, 0x2c, 0x4a, 0x28, 0x74, 0x2c, + 0x46, 0x28, 0x68, 0x29, 0x3f, 0x68, 0x3a, 0x5b, 0x68, 0x5d, 0x2c, 0x6e, + 0x2c, 0x65, 0x2c, 0x5f, 0x2c, 0x69, 0x26, 0x26, 0x22, 0x66, 0x6f, 0x72, + 0x65, 0x69, 0x67, 0x6e, 0x4f, 0x62, 0x6a, 0x65, 0x63, 0x74, 0x22, 0x21, + 0x3d, 0x3d, 0x6d, 0x2c, 0x6f, 0x2c, 0x72, 0x2c, 0x6f, 0x3f, 0x6f, 0x5b, + 0x30, 0x5d, 0x3a, 0x65, 0x2e, 0x5f, 0x5f, 0x6b, 0x26, 0x26, 0x71, 0x28, + 0x65, 0x2c, 0x30, 0x29, 0x2c, 0x75, 0x2c, 0x66, 0x29, 0x2c, 0x6e, 0x75, + 0x6c, 0x6c, 0x21, 0x3d, 0x6f, 0x29, 0x66, 0x6f, 0x72, 0x28, 0x73, 0x3d, + 0x6f, 0x2e, 0x6c, 0x65, 0x6e, 0x67, 0x74, 0x68, 0x3b, 0x73, 0x2d, 0x2d, + 0x3b, 0x29, 0x6e, 0x75, 0x6c, 0x6c, 0x21, 0x3d, 0x6f, 0x5b, 0x73, 0x5d, + 0x26, 0x26, 0x57, 0x28, 0x6f, 0x5b, 0x73, 0x5d, 0x29, 0x3b, 0x75, 0x7c, + 0x7c, 0x28, 0x73, 0x3d, 0x22, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x22, 0x2c, + 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x21, 0x3d, 0x3d, 0x70, 0x26, 0x26, + 0x28, 0x70, 0x21, 0x3d, 0x3d, 0x74, 0x5b, 0x73, 0x5d, 0x7c, 0x7c, 0x22, + 0x70, 0x72, 0x6f, 0x67, 0x72, 0x65, 0x73, 0x73, 0x22, 0x3d, 0x3d, 0x3d, + 0x6d, 0x26, 0x26, 0x21, 0x70, 0x7c, 0x7c, 0x22, 0x6f, 0x70, 0x74, 0x69, + 0x6f, 0x6e, 0x22, 0x3d, 0x3d, 0x3d, 0x6d, 0x26, 0x26, 0x70, 0x21, 0x3d, + 0x3d, 0x76, 0x5b, 0x73, 0x5d, 0x29, 0x26, 0x26, 0x74, 0x74, 0x28, 0x74, + 0x2c, 0x73, 0x2c, 0x70, 0x2c, 0x76, 0x5b, 0x73, 0x5d, 0x2c, 0x21, 0x31, + 0x29, 0x2c, 0x73, 0x3d, 0x22, 0x63, 0x68, 0x65, 0x63, 0x6b, 0x65, 0x64, + 0x22, 0x2c, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x21, 0x3d, 0x3d, 0x64, + 0x26, 0x26, 0x64, 0x21, 0x3d, 0x3d, 0x74, 0x5b, 0x73, 0x5d, 0x26, 0x26, + 0x74, 0x74, 0x28, 0x74, 0x2c, 0x73, 0x2c, 0x64, 0x2c, 0x76, 0x5b, 0x73, + 0x5d, 0x2c, 0x21, 0x31, 0x29, 0x29, 0x7d, 0x72, 0x65, 0x74, 0x75, 0x72, + 0x6e, 0x20, 0x74, 0x7d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, + 0x20, 0x72, 0x74, 0x28, 0x74, 0x2c, 0x6e, 0x2c, 0x65, 0x29, 0x7b, 0x74, + 0x72, 0x79, 0x7b, 0x22, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, + 0x22, 0x3d, 0x3d, 0x74, 0x79, 0x70, 0x65, 0x6f, 0x66, 0x20, 0x74, 0x3f, + 0x74, 0x28, 0x6e, 0x29, 0x3a, 0x74, 0x2e, 0x63, 0x75, 0x72, 0x72, 0x65, + 0x6e, 0x74, 0x3d, 0x6e, 0x7d, 0x63, 0x61, 0x74, 0x63, 0x68, 0x28, 0x74, + 0x29, 0x7b, 0x43, 0x2e, 0x5f, 0x5f, 0x65, 0x28, 0x74, 0x2c, 0x65, 0x29, + 0x7d, 0x7d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x75, + 0x74, 0x28, 0x74, 0x2c, 0x6e, 0x2c, 0x65, 0x29, 0x7b, 0x76, 0x61, 0x72, + 0x20, 0x5f, 0x2c, 0x69, 0x3b, 0x69, 0x66, 0x28, 0x43, 0x2e, 0x75, 0x6e, + 0x6d, 0x6f, 0x75, 0x6e, 0x74, 0x26, 0x26, 0x43, 0x2e, 0x75, 0x6e, 0x6d, + 0x6f, 0x75, 0x6e, 0x74, 0x28, 0x74, 0x29, 0x2c, 0x28, 0x5f, 0x3d, 0x74, + 0x2e, 0x72, 0x65, 0x66, 0x29, 0x26, 0x26, 0x28, 0x5f, 0x2e, 0x63, 0x75, + 0x72, 0x72, 0x65, 0x6e, 0x74, 0x26, 0x26, 0x5f, 0x2e, 0x63, 0x75, 0x72, + 0x72, 0x65, 0x6e, 0x74, 0x21, 0x3d, 0x3d, 0x74, 0x2e, 0x5f, 0x5f, 0x65, + 0x7c, 0x7c, 0x72, 0x74, 0x28, 0x5f, 0x2c, 0x6e, 0x75, 0x6c, 0x6c, 0x2c, + 0x6e, 0x29, 0x29, 0x2c, 0x6e, 0x75, 0x6c, 0x6c, 0x21, 0x3d, 0x28, 0x5f, + 0x3d, 0x74, 0x2e, 0x5f, 0x5f, 0x63, 0x29, 0x29, 0x7b, 0x69, 0x66, 0x28, + 0x5f, 0x2e, 0x63, 0x6f, 0x6d, 0x70, 0x6f, 0x6e, 0x65, 0x6e, 0x74, 0x57, + 0x69, 0x6c, 0x6c, 0x55, 0x6e, 0x6d, 0x6f, 0x75, 0x6e, 0x74, 0x29, 0x74, + 0x72, 0x79, 0x7b, 0x5f, 0x2e, 0x63, 0x6f, 0x6d, 0x70, 0x6f, 0x6e, 0x65, + 0x6e, 0x74, 0x57, 0x69, 0x6c, 0x6c, 0x55, 0x6e, 0x6d, 0x6f, 0x75, 0x6e, + 0x74, 0x28, 0x29, 0x7d, 0x63, 0x61, 0x74, 0x63, 0x68, 0x28, 0x74, 0x29, + 0x7b, 0x43, 0x2e, 0x5f, 0x5f, 0x65, 0x28, 0x74, 0x2c, 0x6e, 0x29, 0x7d, + 0x5f, 0x2e, 0x62, 0x61, 0x73, 0x65, 0x3d, 0x5f, 0x2e, 0x5f, 0x5f, 0x50, + 0x3d, 0x6e, 0x75, 0x6c, 0x6c, 0x2c, 0x74, 0x2e, 0x5f, 0x5f, 0x63, 0x3d, + 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x7d, 0x69, 0x66, 0x28, 0x5f, 0x3d, + 0x74, 0x2e, 0x5f, 0x5f, 0x6b, 0x29, 0x66, 0x6f, 0x72, 0x28, 0x69, 0x3d, + 0x30, 0x3b, 0x69, 0x3c, 0x5f, 0x2e, 0x6c, 0x65, 0x6e, 0x67, 0x74, 0x68, + 0x3b, 0x69, 0x2b, 0x2b, 0x29, 0x5f, 0x5b, 0x69, 0x5d, 0x26, 0x26, 0x75, + 0x74, 0x28, 0x5f, 0x5b, 0x69, 0x5d, 0x2c, 0x6e, 0x2c, 0x65, 0x7c, 0x7c, + 0x22, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x22, 0x21, 0x3d, + 0x74, 0x79, 0x70, 0x65, 0x6f, 0x66, 0x20, 0x74, 0x2e, 0x74, 0x79, 0x70, + 0x65, 0x29, 0x3b, 0x65, 0x7c, 0x7c, 0x6e, 0x75, 0x6c, 0x6c, 0x3d, 0x3d, + 0x74, 0x2e, 0x5f, 0x5f, 0x65, 0x7c, 0x7c, 0x57, 0x28, 0x74, 0x2e, 0x5f, + 0x5f, 0x65, 0x29, 0x2c, 0x74, 0x2e, 0x5f, 0x5f, 0x3d, 0x74, 0x2e, 0x5f, + 0x5f, 0x65, 0x3d, 0x74, 0x2e, 0x5f, 0x5f, 0x64, 0x3d, 0x76, 0x6f, 0x69, + 0x64, 0x20, 0x30, 0x7d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, + 0x20, 0x66, 0x74, 0x28, 0x74, 0x2c, 0x6e, 0x2c, 0x65, 0x29, 0x7b, 0x72, + 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x63, + 0x6f, 0x6e, 0x73, 0x74, 0x72, 0x75, 0x63, 0x74, 0x6f, 0x72, 0x28, 0x74, + 0x2c, 0x65, 0x29, 0x7d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, + 0x20, 0x73, 0x74, 0x28, 0x74, 0x2c, 0x6e, 0x2c, 0x65, 0x29, 0x7b, 0x76, + 0x61, 0x72, 0x20, 0x5f, 0x2c, 0x69, 0x2c, 0x6f, 0x2c, 0x72, 0x3b, 0x43, + 0x2e, 0x5f, 0x5f, 0x26, 0x26, 0x43, 0x2e, 0x5f, 0x5f, 0x28, 0x74, 0x2c, + 0x6e, 0x29, 0x2c, 0x69, 0x3d, 0x28, 0x5f, 0x3d, 0x22, 0x66, 0x75, 0x6e, + 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x22, 0x3d, 0x3d, 0x74, 0x79, 0x70, 0x65, + 0x6f, 0x66, 0x20, 0x65, 0x29, 0x3f, 0x6e, 0x75, 0x6c, 0x6c, 0x3a, 0x65, + 0x26, 0x26, 0x65, 0x2e, 0x5f, 0x5f, 0x6b, 0x7c, 0x7c, 0x6e, 0x2e, 0x5f, + 0x5f, 0x6b, 0x2c, 0x6f, 0x3d, 0x5b, 0x5d, 0x2c, 0x72, 0x3d, 0x5b, 0x5d, + 0x2c, 0x5f, 0x74, 0x28, 0x6e, 0x2c, 0x74, 0x3d, 0x28, 0x21, 0x5f, 0x26, + 0x26, 0x65, 0x7c, 0x7c, 0x6e, 0x29, 0x2e, 0x5f, 0x5f, 0x6b, 0x3d, 0x4c, + 0x28, 0x6a, 0x2c, 0x6e, 0x75, 0x6c, 0x6c, 0x2c, 0x5b, 0x74, 0x5d, 0x29, + 0x2c, 0x69, 0x7c, 0x7c, 0x54, 0x2c, 0x54, 0x2c, 0x76, 0x6f, 0x69, 0x64, + 0x20, 0x30, 0x21, 0x3d, 0x3d, 0x6e, 0x2e, 0x6f, 0x77, 0x6e, 0x65, 0x72, + 0x53, 0x56, 0x47, 0x45, 0x6c, 0x65, 0x6d, 0x65, 0x6e, 0x74, 0x2c, 0x21, + 0x5f, 0x26, 0x26, 0x65, 0x3f, 0x5b, 0x65, 0x5d, 0x3a, 0x69, 0x3f, 0x6e, + 0x75, 0x6c, 0x6c, 0x3a, 0x6e, 0x2e, 0x66, 0x69, 0x72, 0x73, 0x74, 0x43, + 0x68, 0x69, 0x6c, 0x64, 0x3f, 0x78, 0x2e, 0x63, 0x61, 0x6c, 0x6c, 0x28, + 0x6e, 0x2e, 0x63, 0x68, 0x69, 0x6c, 0x64, 0x4e, 0x6f, 0x64, 0x65, 0x73, + 0x29, 0x3a, 0x6e, 0x75, 0x6c, 0x6c, 0x2c, 0x6f, 0x2c, 0x21, 0x5f, 0x26, + 0x26, 0x65, 0x3f, 0x65, 0x3a, 0x69, 0x3f, 0x69, 0x2e, 0x5f, 0x5f, 0x65, + 0x3a, 0x6e, 0x2e, 0x66, 0x69, 0x72, 0x73, 0x74, 0x43, 0x68, 0x69, 0x6c, + 0x64, 0x2c, 0x5f, 0x2c, 0x72, 0x29, 0x2c, 0x69, 0x74, 0x28, 0x6f, 0x2c, + 0x74, 0x2c, 0x72, 0x29, 0x7d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, + 0x6e, 0x20, 0x6c, 0x74, 0x28, 0x74, 0x2c, 0x6e, 0x29, 0x7b, 0x73, 0x74, + 0x28, 0x74, 0x2c, 0x6e, 0x2c, 0x6c, 0x74, 0x29, 0x7d, 0x66, 0x75, 0x6e, + 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x63, 0x74, 0x28, 0x74, 0x2c, 0x6e, + 0x2c, 0x65, 0x29, 0x7b, 0x76, 0x61, 0x72, 0x20, 0x5f, 0x2c, 0x69, 0x2c, + 0x6f, 0x2c, 0x72, 0x2c, 0x75, 0x3d, 0x4d, 0x28, 0x7b, 0x7d, 0x2c, 0x74, + 0x2e, 0x70, 0x72, 0x6f, 0x70, 0x73, 0x29, 0x3b, 0x66, 0x6f, 0x72, 0x28, + 0x6f, 0x20, 0x69, 0x6e, 0x20, 0x74, 0x2e, 0x74, 0x79, 0x70, 0x65, 0x26, + 0x26, 0x74, 0x2e, 0x74, 0x79, 0x70, 0x65, 0x2e, 0x64, 0x65, 0x66, 0x61, + 0x75, 0x6c, 0x74, 0x50, 0x72, 0x6f, 0x70, 0x73, 0x26, 0x26, 0x28, 0x72, + 0x3d, 0x74, 0x2e, 0x74, 0x79, 0x70, 0x65, 0x2e, 0x64, 0x65, 0x66, 0x61, + 0x75, 0x6c, 0x74, 0x50, 0x72, 0x6f, 0x70, 0x73, 0x29, 0x2c, 0x6e, 0x29, + 0x22, 0x6b, 0x65, 0x79, 0x22, 0x3d, 0x3d, 0x6f, 0x3f, 0x5f, 0x3d, 0x6e, + 0x5b, 0x6f, 0x5d, 0x3a, 0x22, 0x72, 0x65, 0x66, 0x22, 0x3d, 0x3d, 0x6f, + 0x3f, 0x69, 0x3d, 0x6e, 0x5b, 0x6f, 0x5d, 0x3a, 0x75, 0x5b, 0x6f, 0x5d, + 0x3d, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x3d, 0x3d, 0x3d, 0x6e, 0x5b, + 0x6f, 0x5d, 0x26, 0x26, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x21, 0x3d, + 0x3d, 0x72, 0x3f, 0x72, 0x5b, 0x6f, 0x5d, 0x3a, 0x6e, 0x5b, 0x6f, 0x5d, + 0x3b, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x61, 0x72, 0x67, 0x75, + 0x6d, 0x65, 0x6e, 0x74, 0x73, 0x2e, 0x6c, 0x65, 0x6e, 0x67, 0x74, 0x68, + 0x3e, 0x32, 0x26, 0x26, 0x28, 0x75, 0x2e, 0x63, 0x68, 0x69, 0x6c, 0x64, + 0x72, 0x65, 0x6e, 0x3d, 0x61, 0x72, 0x67, 0x75, 0x6d, 0x65, 0x6e, 0x74, + 0x73, 0x2e, 0x6c, 0x65, 0x6e, 0x67, 0x74, 0x68, 0x3e, 0x33, 0x3f, 0x78, + 0x2e, 0x63, 0x61, 0x6c, 0x6c, 0x28, 0x61, 0x72, 0x67, 0x75, 0x6d, 0x65, + 0x6e, 0x74, 0x73, 0x2c, 0x32, 0x29, 0x3a, 0x65, 0x29, 0x2c, 0x4f, 0x28, + 0x74, 0x2e, 0x74, 0x79, 0x70, 0x65, 0x2c, 0x75, 0x2c, 0x5f, 0x7c, 0x7c, + 0x74, 0x2e, 0x6b, 0x65, 0x79, 0x2c, 0x69, 0x7c, 0x7c, 0x74, 0x2e, 0x72, + 0x65, 0x66, 0x2c, 0x6e, 0x75, 0x6c, 0x6c, 0x29, 0x7d, 0x66, 0x75, 0x6e, + 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x68, 0x74, 0x28, 0x74, 0x2c, 0x6e, + 0x29, 0x7b, 0x76, 0x61, 0x72, 0x20, 0x65, 0x3d, 0x7b, 0x5f, 0x5f, 0x63, + 0x3a, 0x6e, 0x3d, 0x22, 0x5f, 0x5f, 0x63, 0x43, 0x22, 0x2b, 0x44, 0x2b, + 0x2b, 0x2c, 0x5f, 0x5f, 0x3a, 0x74, 0x2c, 0x43, 0x6f, 0x6e, 0x73, 0x75, + 0x6d, 0x65, 0x72, 0x3a, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, + 0x28, 0x74, 0x2c, 0x6e, 0x29, 0x7b, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, + 0x20, 0x74, 0x2e, 0x63, 0x68, 0x69, 0x6c, 0x64, 0x72, 0x65, 0x6e, 0x28, + 0x6e, 0x29, 0x7d, 0x2c, 0x50, 0x72, 0x6f, 0x76, 0x69, 0x64, 0x65, 0x72, + 0x3a, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x74, 0x29, + 0x7b, 0x76, 0x61, 0x72, 0x20, 0x65, 0x2c, 0x5f, 0x3b, 0x72, 0x65, 0x74, + 0x75, 0x72, 0x6e, 0x20, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x67, 0x65, 0x74, + 0x43, 0x68, 0x69, 0x6c, 0x64, 0x43, 0x6f, 0x6e, 0x74, 0x65, 0x78, 0x74, + 0x7c, 0x7c, 0x28, 0x65, 0x3d, 0x5b, 0x5d, 0x2c, 0x28, 0x5f, 0x3d, 0x7b, + 0x7d, 0x29, 0x5b, 0x6e, 0x5d, 0x3d, 0x74, 0x68, 0x69, 0x73, 0x2c, 0x74, + 0x68, 0x69, 0x73, 0x2e, 0x67, 0x65, 0x74, 0x43, 0x68, 0x69, 0x6c, 0x64, + 0x43, 0x6f, 0x6e, 0x74, 0x65, 0x78, 0x74, 0x3d, 0x66, 0x75, 0x6e, 0x63, + 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x29, 0x7b, 0x72, 0x65, 0x74, 0x75, 0x72, + 0x6e, 0x20, 0x5f, 0x7d, 0x2c, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x73, 0x68, + 0x6f, 0x75, 0x6c, 0x64, 0x43, 0x6f, 0x6d, 0x70, 0x6f, 0x6e, 0x65, 0x6e, + 0x74, 0x55, 0x70, 0x64, 0x61, 0x74, 0x65, 0x3d, 0x66, 0x75, 0x6e, 0x63, + 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x74, 0x29, 0x7b, 0x74, 0x68, 0x69, 0x73, + 0x2e, 0x70, 0x72, 0x6f, 0x70, 0x73, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, + 0x21, 0x3d, 0x3d, 0x74, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x26, 0x26, + 0x65, 0x2e, 0x73, 0x6f, 0x6d, 0x65, 0x28, 0x28, 0x66, 0x75, 0x6e, 0x63, + 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x74, 0x29, 0x7b, 0x74, 0x2e, 0x5f, 0x5f, + 0x65, 0x3d, 0x21, 0x30, 0x2c, 0x47, 0x28, 0x74, 0x29, 0x7d, 0x29, 0x29, + 0x7d, 0x2c, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x73, 0x75, 0x62, 0x3d, 0x66, + 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x74, 0x29, 0x7b, 0x65, + 0x2e, 0x70, 0x75, 0x73, 0x68, 0x28, 0x74, 0x29, 0x3b, 0x76, 0x61, 0x72, + 0x20, 0x6e, 0x3d, 0x74, 0x2e, 0x63, 0x6f, 0x6d, 0x70, 0x6f, 0x6e, 0x65, + 0x6e, 0x74, 0x57, 0x69, 0x6c, 0x6c, 0x55, 0x6e, 0x6d, 0x6f, 0x75, 0x6e, + 0x74, 0x3b, 0x74, 0x2e, 0x63, 0x6f, 0x6d, 0x70, 0x6f, 0x6e, 0x65, 0x6e, + 0x74, 0x57, 0x69, 0x6c, 0x6c, 0x55, 0x6e, 0x6d, 0x6f, 0x75, 0x6e, 0x74, + 0x3d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x29, 0x7b, + 0x65, 0x2e, 0x73, 0x70, 0x6c, 0x69, 0x63, 0x65, 0x28, 0x65, 0x2e, 0x69, + 0x6e, 0x64, 0x65, 0x78, 0x4f, 0x66, 0x28, 0x74, 0x29, 0x2c, 0x31, 0x29, + 0x2c, 0x6e, 0x26, 0x26, 0x6e, 0x2e, 0x63, 0x61, 0x6c, 0x6c, 0x28, 0x74, + 0x29, 0x7d, 0x7d, 0x29, 0x2c, 0x74, 0x2e, 0x63, 0x68, 0x69, 0x6c, 0x64, + 0x72, 0x65, 0x6e, 0x7d, 0x7d, 0x3b, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, + 0x20, 0x65, 0x2e, 0x50, 0x72, 0x6f, 0x76, 0x69, 0x64, 0x65, 0x72, 0x2e, + 0x5f, 0x5f, 0x3d, 0x65, 0x2e, 0x43, 0x6f, 0x6e, 0x73, 0x75, 0x6d, 0x65, + 0x72, 0x2e, 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x78, 0x74, 0x54, 0x79, 0x70, + 0x65, 0x3d, 0x65, 0x7d, 0x78, 0x3d, 0x56, 0x2e, 0x73, 0x6c, 0x69, 0x63, + 0x65, 0x2c, 0x43, 0x3d, 0x7b, 0x5f, 0x5f, 0x65, 0x3a, 0x66, 0x75, 0x6e, + 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x74, 0x2c, 0x6e, 0x2c, 0x65, 0x2c, + 0x5f, 0x29, 0x7b, 0x66, 0x6f, 0x72, 0x28, 0x76, 0x61, 0x72, 0x20, 0x69, + 0x2c, 0x6f, 0x2c, 0x72, 0x3b, 0x6e, 0x3d, 0x6e, 0x2e, 0x5f, 0x5f, 0x3b, + 0x29, 0x69, 0x66, 0x28, 0x28, 0x69, 0x3d, 0x6e, 0x2e, 0x5f, 0x5f, 0x63, + 0x29, 0x26, 0x26, 0x21, 0x69, 0x2e, 0x5f, 0x5f, 0x29, 0x74, 0x72, 0x79, + 0x7b, 0x69, 0x66, 0x28, 0x28, 0x6f, 0x3d, 0x69, 0x2e, 0x63, 0x6f, 0x6e, + 0x73, 0x74, 0x72, 0x75, 0x63, 0x74, 0x6f, 0x72, 0x29, 0x26, 0x26, 0x6e, + 0x75, 0x6c, 0x6c, 0x21, 0x3d, 0x6f, 0x2e, 0x67, 0x65, 0x74, 0x44, 0x65, + 0x72, 0x69, 0x76, 0x65, 0x64, 0x53, 0x74, 0x61, 0x74, 0x65, 0x46, 0x72, + 0x6f, 0x6d, 0x45, 0x72, 0x72, 0x6f, 0x72, 0x26, 0x26, 0x28, 0x69, 0x2e, + 0x73, 0x65, 0x74, 0x53, 0x74, 0x61, 0x74, 0x65, 0x28, 0x6f, 0x2e, 0x67, + 0x65, 0x74, 0x44, 0x65, 0x72, 0x69, 0x76, 0x65, 0x64, 0x53, 0x74, 0x61, + 0x74, 0x65, 0x46, 0x72, 0x6f, 0x6d, 0x45, 0x72, 0x72, 0x6f, 0x72, 0x28, + 0x74, 0x29, 0x29, 0x2c, 0x72, 0x3d, 0x69, 0x2e, 0x5f, 0x5f, 0x64, 0x29, + 0x2c, 0x6e, 0x75, 0x6c, 0x6c, 0x21, 0x3d, 0x69, 0x2e, 0x63, 0x6f, 0x6d, + 0x70, 0x6f, 0x6e, 0x65, 0x6e, 0x74, 0x44, 0x69, 0x64, 0x43, 0x61, 0x74, + 0x63, 0x68, 0x26, 0x26, 0x28, 0x69, 0x2e, 0x63, 0x6f, 0x6d, 0x70, 0x6f, + 0x6e, 0x65, 0x6e, 0x74, 0x44, 0x69, 0x64, 0x43, 0x61, 0x74, 0x63, 0x68, + 0x28, 0x74, 0x2c, 0x5f, 0x7c, 0x7c, 0x7b, 0x7d, 0x29, 0x2c, 0x72, 0x3d, + 0x69, 0x2e, 0x5f, 0x5f, 0x64, 0x29, 0x2c, 0x72, 0x29, 0x72, 0x65, 0x74, + 0x75, 0x72, 0x6e, 0x20, 0x69, 0x2e, 0x5f, 0x5f, 0x45, 0x3d, 0x69, 0x7d, + 0x63, 0x61, 0x74, 0x63, 0x68, 0x28, 0x6e, 0x29, 0x7b, 0x74, 0x3d, 0x6e, + 0x7d, 0x74, 0x68, 0x72, 0x6f, 0x77, 0x20, 0x74, 0x7d, 0x7d, 0x2c, 0x45, + 0x3d, 0x30, 0x2c, 0x55, 0x3d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, + 0x6e, 0x28, 0x74, 0x29, 0x7b, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, + 0x6e, 0x75, 0x6c, 0x6c, 0x21, 0x3d, 0x74, 0x26, 0x26, 0x6e, 0x75, 0x6c, + 0x6c, 0x3d, 0x3d, 0x74, 0x2e, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x72, 0x75, + 0x63, 0x74, 0x6f, 0x72, 0x7d, 0x2c, 0x49, 0x2e, 0x70, 0x72, 0x6f, 0x74, + 0x6f, 0x74, 0x79, 0x70, 0x65, 0x2e, 0x73, 0x65, 0x74, 0x53, 0x74, 0x61, + 0x74, 0x65, 0x3d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, + 0x74, 0x2c, 0x6e, 0x29, 0x7b, 0x76, 0x61, 0x72, 0x20, 0x65, 0x3b, 0x65, + 0x3d, 0x6e, 0x75, 0x6c, 0x6c, 0x21, 0x3d, 0x74, 0x68, 0x69, 0x73, 0x2e, + 0x5f, 0x5f, 0x73, 0x26, 0x26, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x5f, 0x5f, + 0x73, 0x21, 0x3d, 0x3d, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x73, 0x74, 0x61, + 0x74, 0x65, 0x3f, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x5f, 0x5f, 0x73, 0x3a, + 0x74, 0x68, 0x69, 0x73, 0x2e, 0x5f, 0x5f, 0x73, 0x3d, 0x4d, 0x28, 0x7b, + 0x7d, 0x2c, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x73, 0x74, 0x61, 0x74, 0x65, + 0x29, 0x2c, 0x22, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x22, + 0x3d, 0x3d, 0x74, 0x79, 0x70, 0x65, 0x6f, 0x66, 0x20, 0x74, 0x26, 0x26, + 0x28, 0x74, 0x3d, 0x74, 0x28, 0x4d, 0x28, 0x7b, 0x7d, 0x2c, 0x65, 0x29, + 0x2c, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x70, 0x72, 0x6f, 0x70, 0x73, 0x29, + 0x29, 0x2c, 0x74, 0x26, 0x26, 0x4d, 0x28, 0x65, 0x2c, 0x74, 0x29, 0x2c, + 0x6e, 0x75, 0x6c, 0x6c, 0x21, 0x3d, 0x74, 0x26, 0x26, 0x74, 0x68, 0x69, + 0x73, 0x2e, 0x5f, 0x5f, 0x76, 0x26, 0x26, 0x28, 0x6e, 0x26, 0x26, 0x74, + 0x68, 0x69, 0x73, 0x2e, 0x5f, 0x73, 0x62, 0x2e, 0x70, 0x75, 0x73, 0x68, + 0x28, 0x6e, 0x29, 0x2c, 0x47, 0x28, 0x74, 0x68, 0x69, 0x73, 0x29, 0x29, + 0x7d, 0x2c, 0x49, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x74, 0x79, 0x70, + 0x65, 0x2e, 0x66, 0x6f, 0x72, 0x63, 0x65, 0x55, 0x70, 0x64, 0x61, 0x74, + 0x65, 0x3d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x74, + 0x29, 0x7b, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x5f, 0x5f, 0x76, 0x26, 0x26, + 0x28, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x5f, 0x5f, 0x65, 0x3d, 0x21, 0x30, + 0x2c, 0x74, 0x26, 0x26, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x5f, 0x5f, 0x68, + 0x2e, 0x70, 0x75, 0x73, 0x68, 0x28, 0x74, 0x29, 0x2c, 0x47, 0x28, 0x74, + 0x68, 0x69, 0x73, 0x29, 0x29, 0x7d, 0x2c, 0x49, 0x2e, 0x70, 0x72, 0x6f, + 0x74, 0x6f, 0x74, 0x79, 0x70, 0x65, 0x2e, 0x72, 0x65, 0x6e, 0x64, 0x65, + 0x72, 0x3d, 0x6a, 0x2c, 0x48, 0x3d, 0x5b, 0x5d, 0x2c, 0x4e, 0x3d, 0x22, + 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x22, 0x3d, 0x3d, 0x74, + 0x79, 0x70, 0x65, 0x6f, 0x66, 0x20, 0x50, 0x72, 0x6f, 0x6d, 0x69, 0x73, + 0x65, 0x3f, 0x50, 0x72, 0x6f, 0x6d, 0x69, 0x73, 0x65, 0x2e, 0x70, 0x72, + 0x6f, 0x74, 0x6f, 0x74, 0x79, 0x70, 0x65, 0x2e, 0x74, 0x68, 0x65, 0x6e, + 0x2e, 0x62, 0x69, 0x6e, 0x64, 0x28, 0x50, 0x72, 0x6f, 0x6d, 0x69, 0x73, + 0x65, 0x2e, 0x72, 0x65, 0x73, 0x6f, 0x6c, 0x76, 0x65, 0x28, 0x29, 0x29, + 0x3a, 0x73, 0x65, 0x74, 0x54, 0x69, 0x6d, 0x65, 0x6f, 0x75, 0x74, 0x2c, + 0x24, 0x3d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x74, + 0x2c, 0x6e, 0x29, 0x7b, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x74, + 0x2e, 0x5f, 0x5f, 0x76, 0x2e, 0x5f, 0x5f, 0x62, 0x2d, 0x6e, 0x2e, 0x5f, + 0x5f, 0x76, 0x2e, 0x5f, 0x5f, 0x62, 0x7d, 0x2c, 0x7a, 0x2e, 0x5f, 0x5f, + 0x72, 0x3d, 0x30, 0x2c, 0x44, 0x3d, 0x30, 0x3b, 0x76, 0x61, 0x72, 0x20, + 0x61, 0x74, 0x2c, 0x70, 0x74, 0x2c, 0x64, 0x74, 0x2c, 0x76, 0x74, 0x2c, + 0x79, 0x74, 0x3d, 0x30, 0x2c, 0x6d, 0x74, 0x3d, 0x5b, 0x5d, 0x2c, 0x67, + 0x74, 0x3d, 0x5b, 0x5d, 0x2c, 0x62, 0x74, 0x3d, 0x43, 0x2e, 0x5f, 0x5f, + 0x62, 0x2c, 0x6b, 0x74, 0x3d, 0x43, 0x2e, 0x5f, 0x5f, 0x72, 0x2c, 0x53, + 0x74, 0x3d, 0x43, 0x2e, 0x64, 0x69, 0x66, 0x66, 0x65, 0x64, 0x2c, 0x77, + 0x74, 0x3d, 0x43, 0x2e, 0x5f, 0x5f, 0x63, 0x2c, 0x78, 0x74, 0x3d, 0x43, + 0x2e, 0x75, 0x6e, 0x6d, 0x6f, 0x75, 0x6e, 0x74, 0x3b, 0x66, 0x75, 0x6e, + 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x43, 0x74, 0x28, 0x74, 0x2c, 0x6e, + 0x29, 0x7b, 0x43, 0x2e, 0x5f, 0x5f, 0x68, 0x26, 0x26, 0x43, 0x2e, 0x5f, + 0x5f, 0x68, 0x28, 0x70, 0x74, 0x2c, 0x74, 0x2c, 0x79, 0x74, 0x7c, 0x7c, + 0x6e, 0x29, 0x2c, 0x79, 0x74, 0x3d, 0x30, 0x3b, 0x76, 0x61, 0x72, 0x20, + 0x65, 0x3d, 0x70, 0x74, 0x2e, 0x5f, 0x5f, 0x48, 0x7c, 0x7c, 0x28, 0x70, + 0x74, 0x2e, 0x5f, 0x5f, 0x48, 0x3d, 0x7b, 0x5f, 0x5f, 0x3a, 0x5b, 0x5d, + 0x2c, 0x5f, 0x5f, 0x68, 0x3a, 0x5b, 0x5d, 0x7d, 0x29, 0x3b, 0x72, 0x65, + 0x74, 0x75, 0x72, 0x6e, 0x20, 0x74, 0x3e, 0x3d, 0x65, 0x2e, 0x5f, 0x5f, + 0x2e, 0x6c, 0x65, 0x6e, 0x67, 0x74, 0x68, 0x26, 0x26, 0x65, 0x2e, 0x5f, + 0x5f, 0x2e, 0x70, 0x75, 0x73, 0x68, 0x28, 0x7b, 0x5f, 0x5f, 0x56, 0x3a, + 0x67, 0x74, 0x7d, 0x29, 0x2c, 0x65, 0x2e, 0x5f, 0x5f, 0x5b, 0x74, 0x5d, + 0x7d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x45, 0x74, + 0x28, 0x74, 0x29, 0x7b, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x79, + 0x74, 0x3d, 0x31, 0x2c, 0x55, 0x74, 0x28, 0x71, 0x74, 0x2c, 0x74, 0x29, + 0x7d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x55, 0x74, + 0x28, 0x74, 0x2c, 0x6e, 0x2c, 0x65, 0x29, 0x7b, 0x76, 0x61, 0x72, 0x20, + 0x5f, 0x3d, 0x43, 0x74, 0x28, 0x61, 0x74, 0x2b, 0x2b, 0x2c, 0x32, 0x29, + 0x3b, 0x69, 0x66, 0x28, 0x5f, 0x2e, 0x74, 0x3d, 0x74, 0x2c, 0x21, 0x5f, + 0x2e, 0x5f, 0x5f, 0x63, 0x26, 0x26, 0x28, 0x5f, 0x2e, 0x5f, 0x5f, 0x3d, + 0x5b, 0x65, 0x3f, 0x65, 0x28, 0x6e, 0x29, 0x3a, 0x71, 0x74, 0x28, 0x76, + 0x6f, 0x69, 0x64, 0x20, 0x30, 0x2c, 0x6e, 0x29, 0x2c, 0x66, 0x75, 0x6e, + 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x74, 0x29, 0x7b, 0x76, 0x61, 0x72, + 0x20, 0x6e, 0x3d, 0x5f, 0x2e, 0x5f, 0x5f, 0x4e, 0x3f, 0x5f, 0x2e, 0x5f, + 0x5f, 0x4e, 0x5b, 0x30, 0x5d, 0x3a, 0x5f, 0x2e, 0x5f, 0x5f, 0x5b, 0x30, + 0x5d, 0x2c, 0x65, 0x3d, 0x5f, 0x2e, 0x74, 0x28, 0x6e, 0x2c, 0x74, 0x29, + 0x3b, 0x6e, 0x21, 0x3d, 0x3d, 0x65, 0x26, 0x26, 0x28, 0x5f, 0x2e, 0x5f, + 0x5f, 0x4e, 0x3d, 0x5b, 0x65, 0x2c, 0x5f, 0x2e, 0x5f, 0x5f, 0x5b, 0x31, + 0x5d, 0x5d, 0x2c, 0x5f, 0x2e, 0x5f, 0x5f, 0x63, 0x2e, 0x73, 0x65, 0x74, + 0x53, 0x74, 0x61, 0x74, 0x65, 0x28, 0x7b, 0x7d, 0x29, 0x29, 0x7d, 0x5d, + 0x2c, 0x5f, 0x2e, 0x5f, 0x5f, 0x63, 0x3d, 0x70, 0x74, 0x2c, 0x21, 0x70, + 0x74, 0x2e, 0x75, 0x29, 0x29, 0x7b, 0x76, 0x61, 0x72, 0x20, 0x69, 0x3d, + 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x74, 0x2c, 0x6e, + 0x2c, 0x65, 0x29, 0x7b, 0x69, 0x66, 0x28, 0x21, 0x5f, 0x2e, 0x5f, 0x5f, + 0x63, 0x2e, 0x5f, 0x5f, 0x48, 0x29, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, + 0x21, 0x30, 0x3b, 0x76, 0x61, 0x72, 0x20, 0x69, 0x3d, 0x5f, 0x2e, 0x5f, + 0x5f, 0x63, 0x2e, 0x5f, 0x5f, 0x48, 0x2e, 0x5f, 0x5f, 0x2e, 0x66, 0x69, + 0x6c, 0x74, 0x65, 0x72, 0x28, 0x28, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, + 0x6f, 0x6e, 0x28, 0x74, 0x29, 0x7b, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, + 0x20, 0x74, 0x2e, 0x5f, 0x5f, 0x63, 0x7d, 0x29, 0x29, 0x3b, 0x69, 0x66, + 0x28, 0x69, 0x2e, 0x65, 0x76, 0x65, 0x72, 0x79, 0x28, 0x28, 0x66, 0x75, + 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x74, 0x29, 0x7b, 0x72, 0x65, + 0x74, 0x75, 0x72, 0x6e, 0x21, 0x74, 0x2e, 0x5f, 0x5f, 0x4e, 0x7d, 0x29, + 0x29, 0x29, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x21, 0x6f, 0x7c, 0x7c, + 0x6f, 0x2e, 0x63, 0x61, 0x6c, 0x6c, 0x28, 0x74, 0x68, 0x69, 0x73, 0x2c, + 0x74, 0x2c, 0x6e, 0x2c, 0x65, 0x29, 0x3b, 0x76, 0x61, 0x72, 0x20, 0x72, + 0x3d, 0x21, 0x31, 0x3b, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x69, + 0x2e, 0x66, 0x6f, 0x72, 0x45, 0x61, 0x63, 0x68, 0x28, 0x28, 0x66, 0x75, + 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x74, 0x29, 0x7b, 0x69, 0x66, + 0x28, 0x74, 0x2e, 0x5f, 0x5f, 0x4e, 0x29, 0x7b, 0x76, 0x61, 0x72, 0x20, + 0x6e, 0x3d, 0x74, 0x2e, 0x5f, 0x5f, 0x5b, 0x30, 0x5d, 0x3b, 0x74, 0x2e, + 0x5f, 0x5f, 0x3d, 0x74, 0x2e, 0x5f, 0x5f, 0x4e, 0x2c, 0x74, 0x2e, 0x5f, + 0x5f, 0x4e, 0x3d, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x2c, 0x6e, 0x21, + 0x3d, 0x3d, 0x74, 0x2e, 0x5f, 0x5f, 0x5b, 0x30, 0x5d, 0x26, 0x26, 0x28, + 0x72, 0x3d, 0x21, 0x30, 0x29, 0x7d, 0x7d, 0x29, 0x29, 0x2c, 0x21, 0x28, + 0x21, 0x72, 0x26, 0x26, 0x5f, 0x2e, 0x5f, 0x5f, 0x63, 0x2e, 0x70, 0x72, + 0x6f, 0x70, 0x73, 0x3d, 0x3d, 0x3d, 0x74, 0x29, 0x26, 0x26, 0x28, 0x21, + 0x6f, 0x7c, 0x7c, 0x6f, 0x2e, 0x63, 0x61, 0x6c, 0x6c, 0x28, 0x74, 0x68, + 0x69, 0x73, 0x2c, 0x74, 0x2c, 0x6e, 0x2c, 0x65, 0x29, 0x29, 0x7d, 0x3b, + 0x70, 0x74, 0x2e, 0x75, 0x3d, 0x21, 0x30, 0x3b, 0x76, 0x61, 0x72, 0x20, + 0x6f, 0x3d, 0x70, 0x74, 0x2e, 0x73, 0x68, 0x6f, 0x75, 0x6c, 0x64, 0x43, + 0x6f, 0x6d, 0x70, 0x6f, 0x6e, 0x65, 0x6e, 0x74, 0x55, 0x70, 0x64, 0x61, + 0x74, 0x65, 0x2c, 0x72, 0x3d, 0x70, 0x74, 0x2e, 0x63, 0x6f, 0x6d, 0x70, + 0x6f, 0x6e, 0x65, 0x6e, 0x74, 0x57, 0x69, 0x6c, 0x6c, 0x55, 0x70, 0x64, + 0x61, 0x74, 0x65, 0x3b, 0x70, 0x74, 0x2e, 0x63, 0x6f, 0x6d, 0x70, 0x6f, + 0x6e, 0x65, 0x6e, 0x74, 0x57, 0x69, 0x6c, 0x6c, 0x55, 0x70, 0x64, 0x61, + 0x74, 0x65, 0x3d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, + 0x74, 0x2c, 0x6e, 0x2c, 0x65, 0x29, 0x7b, 0x69, 0x66, 0x28, 0x74, 0x68, + 0x69, 0x73, 0x2e, 0x5f, 0x5f, 0x65, 0x29, 0x7b, 0x76, 0x61, 0x72, 0x20, + 0x5f, 0x3d, 0x6f, 0x3b, 0x6f, 0x3d, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, + 0x2c, 0x69, 0x28, 0x74, 0x2c, 0x6e, 0x2c, 0x65, 0x29, 0x2c, 0x6f, 0x3d, + 0x5f, 0x7d, 0x72, 0x26, 0x26, 0x72, 0x2e, 0x63, 0x61, 0x6c, 0x6c, 0x28, + 0x74, 0x68, 0x69, 0x73, 0x2c, 0x74, 0x2c, 0x6e, 0x2c, 0x65, 0x29, 0x7d, + 0x2c, 0x70, 0x74, 0x2e, 0x73, 0x68, 0x6f, 0x75, 0x6c, 0x64, 0x43, 0x6f, + 0x6d, 0x70, 0x6f, 0x6e, 0x65, 0x6e, 0x74, 0x55, 0x70, 0x64, 0x61, 0x74, + 0x65, 0x3d, 0x69, 0x7d, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x5f, + 0x2e, 0x5f, 0x5f, 0x4e, 0x7c, 0x7c, 0x5f, 0x2e, 0x5f, 0x5f, 0x7d, 0x66, + 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x48, 0x74, 0x28, 0x74, + 0x2c, 0x6e, 0x29, 0x7b, 0x76, 0x61, 0x72, 0x20, 0x65, 0x3d, 0x43, 0x74, + 0x28, 0x61, 0x74, 0x2b, 0x2b, 0x2c, 0x33, 0x29, 0x3b, 0x21, 0x43, 0x2e, + 0x5f, 0x5f, 0x73, 0x26, 0x26, 0x49, 0x74, 0x28, 0x65, 0x2e, 0x5f, 0x5f, + 0x48, 0x2c, 0x6e, 0x29, 0x26, 0x26, 0x28, 0x65, 0x2e, 0x5f, 0x5f, 0x3d, + 0x74, 0x2c, 0x65, 0x2e, 0x69, 0x3d, 0x6e, 0x2c, 0x70, 0x74, 0x2e, 0x5f, + 0x5f, 0x48, 0x2e, 0x5f, 0x5f, 0x68, 0x2e, 0x70, 0x75, 0x73, 0x68, 0x28, + 0x65, 0x29, 0x29, 0x7d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, + 0x20, 0x50, 0x74, 0x28, 0x74, 0x2c, 0x6e, 0x29, 0x7b, 0x76, 0x61, 0x72, + 0x20, 0x65, 0x3d, 0x43, 0x74, 0x28, 0x61, 0x74, 0x2b, 0x2b, 0x2c, 0x34, + 0x29, 0x3b, 0x21, 0x43, 0x2e, 0x5f, 0x5f, 0x73, 0x26, 0x26, 0x49, 0x74, + 0x28, 0x65, 0x2e, 0x5f, 0x5f, 0x48, 0x2c, 0x6e, 0x29, 0x26, 0x26, 0x28, + 0x65, 0x2e, 0x5f, 0x5f, 0x3d, 0x74, 0x2c, 0x65, 0x2e, 0x69, 0x3d, 0x6e, + 0x2c, 0x70, 0x74, 0x2e, 0x5f, 0x5f, 0x68, 0x2e, 0x70, 0x75, 0x73, 0x68, + 0x28, 0x65, 0x29, 0x29, 0x7d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, + 0x6e, 0x20, 0x4e, 0x74, 0x28, 0x74, 0x29, 0x7b, 0x72, 0x65, 0x74, 0x75, + 0x72, 0x6e, 0x20, 0x79, 0x74, 0x3d, 0x35, 0x2c, 0x44, 0x74, 0x28, 0x28, + 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x29, 0x7b, 0x72, + 0x65, 0x74, 0x75, 0x72, 0x6e, 0x7b, 0x63, 0x75, 0x72, 0x72, 0x65, 0x6e, + 0x74, 0x3a, 0x74, 0x7d, 0x7d, 0x29, 0x2c, 0x5b, 0x5d, 0x29, 0x7d, 0x66, + 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x24, 0x74, 0x28, 0x74, + 0x2c, 0x6e, 0x2c, 0x65, 0x29, 0x7b, 0x79, 0x74, 0x3d, 0x36, 0x2c, 0x50, + 0x74, 0x28, 0x28, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, + 0x29, 0x7b, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x22, 0x66, 0x75, 0x6e, + 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x22, 0x3d, 0x3d, 0x74, 0x79, 0x70, 0x65, + 0x6f, 0x66, 0x20, 0x74, 0x3f, 0x28, 0x74, 0x28, 0x6e, 0x28, 0x29, 0x29, + 0x2c, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x29, 0x7b, + 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x74, 0x28, 0x6e, 0x75, 0x6c, + 0x6c, 0x29, 0x7d, 0x29, 0x3a, 0x74, 0x3f, 0x28, 0x74, 0x2e, 0x63, 0x75, + 0x72, 0x72, 0x65, 0x6e, 0x74, 0x3d, 0x6e, 0x28, 0x29, 0x2c, 0x66, 0x75, + 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x29, 0x7b, 0x72, 0x65, 0x74, + 0x75, 0x72, 0x6e, 0x20, 0x74, 0x2e, 0x63, 0x75, 0x72, 0x72, 0x65, 0x6e, + 0x74, 0x3d, 0x6e, 0x75, 0x6c, 0x6c, 0x7d, 0x29, 0x3a, 0x76, 0x6f, 0x69, + 0x64, 0x20, 0x30, 0x7d, 0x29, 0x2c, 0x6e, 0x75, 0x6c, 0x6c, 0x3d, 0x3d, + 0x65, 0x3f, 0x65, 0x3a, 0x65, 0x2e, 0x63, 0x6f, 0x6e, 0x63, 0x61, 0x74, + 0x28, 0x74, 0x29, 0x29, 0x7d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, + 0x6e, 0x20, 0x44, 0x74, 0x28, 0x74, 0x2c, 0x6e, 0x29, 0x7b, 0x76, 0x61, + 0x72, 0x20, 0x65, 0x3d, 0x43, 0x74, 0x28, 0x61, 0x74, 0x2b, 0x2b, 0x2c, + 0x37, 0x29, 0x3b, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x49, 0x74, + 0x28, 0x65, 0x2e, 0x5f, 0x5f, 0x48, 0x2c, 0x6e, 0x29, 0x3f, 0x28, 0x65, + 0x2e, 0x5f, 0x5f, 0x56, 0x3d, 0x74, 0x28, 0x29, 0x2c, 0x65, 0x2e, 0x69, + 0x3d, 0x6e, 0x2c, 0x65, 0x2e, 0x5f, 0x5f, 0x68, 0x3d, 0x74, 0x2c, 0x65, + 0x2e, 0x5f, 0x5f, 0x56, 0x29, 0x3a, 0x65, 0x2e, 0x5f, 0x5f, 0x7d, 0x66, + 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x54, 0x74, 0x28, 0x74, + 0x2c, 0x6e, 0x29, 0x7b, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x79, + 0x74, 0x3d, 0x38, 0x2c, 0x44, 0x74, 0x28, 0x28, 0x66, 0x75, 0x6e, 0x63, + 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x29, 0x7b, 0x72, 0x65, 0x74, 0x75, 0x72, + 0x6e, 0x20, 0x74, 0x7d, 0x29, 0x2c, 0x6e, 0x29, 0x7d, 0x66, 0x75, 0x6e, + 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x56, 0x74, 0x28, 0x74, 0x29, 0x7b, + 0x76, 0x61, 0x72, 0x20, 0x6e, 0x3d, 0x70, 0x74, 0x2e, 0x63, 0x6f, 0x6e, + 0x74, 0x65, 0x78, 0x74, 0x5b, 0x74, 0x2e, 0x5f, 0x5f, 0x63, 0x5d, 0x2c, + 0x65, 0x3d, 0x43, 0x74, 0x28, 0x61, 0x74, 0x2b, 0x2b, 0x2c, 0x39, 0x29, + 0x3b, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x65, 0x2e, 0x63, 0x3d, + 0x74, 0x2c, 0x6e, 0x3f, 0x28, 0x6e, 0x75, 0x6c, 0x6c, 0x3d, 0x3d, 0x65, + 0x2e, 0x5f, 0x5f, 0x26, 0x26, 0x28, 0x65, 0x2e, 0x5f, 0x5f, 0x3d, 0x21, + 0x30, 0x2c, 0x6e, 0x2e, 0x73, 0x75, 0x62, 0x28, 0x70, 0x74, 0x29, 0x29, + 0x2c, 0x6e, 0x2e, 0x70, 0x72, 0x6f, 0x70, 0x73, 0x2e, 0x76, 0x61, 0x6c, + 0x75, 0x65, 0x29, 0x3a, 0x74, 0x2e, 0x5f, 0x5f, 0x7d, 0x66, 0x75, 0x6e, + 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x41, 0x74, 0x28, 0x74, 0x2c, 0x6e, + 0x29, 0x7b, 0x43, 0x2e, 0x75, 0x73, 0x65, 0x44, 0x65, 0x62, 0x75, 0x67, + 0x56, 0x61, 0x6c, 0x75, 0x65, 0x26, 0x26, 0x43, 0x2e, 0x75, 0x73, 0x65, + 0x44, 0x65, 0x62, 0x75, 0x67, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x28, 0x6e, + 0x3f, 0x6e, 0x28, 0x74, 0x29, 0x3a, 0x74, 0x29, 0x7d, 0x66, 0x75, 0x6e, + 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x46, 0x74, 0x28, 0x74, 0x29, 0x7b, + 0x76, 0x61, 0x72, 0x20, 0x6e, 0x3d, 0x43, 0x74, 0x28, 0x61, 0x74, 0x2b, + 0x2b, 0x2c, 0x31, 0x30, 0x29, 0x2c, 0x65, 0x3d, 0x45, 0x74, 0x28, 0x29, + 0x3b, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x6e, 0x2e, 0x5f, 0x5f, + 0x3d, 0x74, 0x2c, 0x70, 0x74, 0x2e, 0x63, 0x6f, 0x6d, 0x70, 0x6f, 0x6e, + 0x65, 0x6e, 0x74, 0x44, 0x69, 0x64, 0x43, 0x61, 0x74, 0x63, 0x68, 0x7c, + 0x7c, 0x28, 0x70, 0x74, 0x2e, 0x63, 0x6f, 0x6d, 0x70, 0x6f, 0x6e, 0x65, + 0x6e, 0x74, 0x44, 0x69, 0x64, 0x43, 0x61, 0x74, 0x63, 0x68, 0x3d, 0x66, + 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x74, 0x2c, 0x5f, 0x29, + 0x7b, 0x6e, 0x2e, 0x5f, 0x5f, 0x26, 0x26, 0x6e, 0x2e, 0x5f, 0x5f, 0x28, + 0x74, 0x2c, 0x5f, 0x29, 0x2c, 0x65, 0x5b, 0x31, 0x5d, 0x28, 0x74, 0x29, + 0x7d, 0x29, 0x2c, 0x5b, 0x65, 0x5b, 0x30, 0x5d, 0x2c, 0x66, 0x75, 0x6e, + 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x29, 0x7b, 0x65, 0x5b, 0x31, 0x5d, + 0x28, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x29, 0x7d, 0x5d, 0x7d, 0x66, + 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x4d, 0x74, 0x28, 0x29, + 0x7b, 0x76, 0x61, 0x72, 0x20, 0x74, 0x3d, 0x43, 0x74, 0x28, 0x61, 0x74, + 0x2b, 0x2b, 0x2c, 0x31, 0x31, 0x29, 0x3b, 0x69, 0x66, 0x28, 0x21, 0x74, + 0x2e, 0x5f, 0x5f, 0x29, 0x7b, 0x66, 0x6f, 0x72, 0x28, 0x76, 0x61, 0x72, + 0x20, 0x6e, 0x3d, 0x70, 0x74, 0x2e, 0x5f, 0x5f, 0x76, 0x3b, 0x6e, 0x75, + 0x6c, 0x6c, 0x21, 0x3d, 0x3d, 0x6e, 0x26, 0x26, 0x21, 0x6e, 0x2e, 0x5f, + 0x5f, 0x6d, 0x26, 0x26, 0x6e, 0x75, 0x6c, 0x6c, 0x21, 0x3d, 0x3d, 0x6e, + 0x2e, 0x5f, 0x5f, 0x3b, 0x29, 0x6e, 0x3d, 0x6e, 0x2e, 0x5f, 0x5f, 0x3b, + 0x76, 0x61, 0x72, 0x20, 0x65, 0x3d, 0x6e, 0x2e, 0x5f, 0x5f, 0x6d, 0x7c, + 0x7c, 0x28, 0x6e, 0x2e, 0x5f, 0x5f, 0x6d, 0x3d, 0x5b, 0x30, 0x2c, 0x30, + 0x5d, 0x29, 0x3b, 0x74, 0x2e, 0x5f, 0x5f, 0x3d, 0x22, 0x50, 0x22, 0x2b, + 0x65, 0x5b, 0x30, 0x5d, 0x2b, 0x22, 0x2d, 0x22, 0x2b, 0x65, 0x5b, 0x31, + 0x5d, 0x2b, 0x2b, 0x7d, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x74, + 0x2e, 0x5f, 0x5f, 0x7d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, + 0x20, 0x57, 0x74, 0x28, 0x29, 0x7b, 0x66, 0x6f, 0x72, 0x28, 0x76, 0x61, + 0x72, 0x20, 0x74, 0x3b, 0x74, 0x3d, 0x6d, 0x74, 0x2e, 0x73, 0x68, 0x69, + 0x66, 0x74, 0x28, 0x29, 0x3b, 0x29, 0x69, 0x66, 0x28, 0x74, 0x2e, 0x5f, + 0x5f, 0x50, 0x26, 0x26, 0x74, 0x2e, 0x5f, 0x5f, 0x48, 0x29, 0x74, 0x72, + 0x79, 0x7b, 0x74, 0x2e, 0x5f, 0x5f, 0x48, 0x2e, 0x5f, 0x5f, 0x68, 0x2e, + 0x66, 0x6f, 0x72, 0x45, 0x61, 0x63, 0x68, 0x28, 0x52, 0x74, 0x29, 0x2c, + 0x74, 0x2e, 0x5f, 0x5f, 0x48, 0x2e, 0x5f, 0x5f, 0x68, 0x2e, 0x66, 0x6f, + 0x72, 0x45, 0x61, 0x63, 0x68, 0x28, 0x6a, 0x74, 0x29, 0x2c, 0x74, 0x2e, + 0x5f, 0x5f, 0x48, 0x2e, 0x5f, 0x5f, 0x68, 0x3d, 0x5b, 0x5d, 0x7d, 0x63, + 0x61, 0x74, 0x63, 0x68, 0x28, 0x75, 0x29, 0x7b, 0x74, 0x2e, 0x5f, 0x5f, + 0x48, 0x2e, 0x5f, 0x5f, 0x68, 0x3d, 0x5b, 0x5d, 0x2c, 0x43, 0x2e, 0x5f, + 0x5f, 0x65, 0x28, 0x75, 0x2c, 0x74, 0x2e, 0x5f, 0x5f, 0x76, 0x29, 0x7d, + 0x7d, 0x43, 0x2e, 0x5f, 0x5f, 0x62, 0x3d, 0x66, 0x75, 0x6e, 0x63, 0x74, + 0x69, 0x6f, 0x6e, 0x28, 0x74, 0x29, 0x7b, 0x70, 0x74, 0x3d, 0x6e, 0x75, + 0x6c, 0x6c, 0x2c, 0x62, 0x74, 0x26, 0x26, 0x62, 0x74, 0x28, 0x74, 0x29, + 0x7d, 0x2c, 0x43, 0x2e, 0x5f, 0x5f, 0x72, 0x3d, 0x66, 0x75, 0x6e, 0x63, + 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x74, 0x29, 0x7b, 0x6b, 0x74, 0x26, 0x26, + 0x6b, 0x74, 0x28, 0x74, 0x29, 0x2c, 0x61, 0x74, 0x3d, 0x30, 0x3b, 0x76, + 0x61, 0x72, 0x20, 0x6e, 0x3d, 0x28, 0x70, 0x74, 0x3d, 0x74, 0x2e, 0x5f, + 0x5f, 0x63, 0x29, 0x2e, 0x5f, 0x5f, 0x48, 0x3b, 0x6e, 0x26, 0x26, 0x28, + 0x64, 0x74, 0x3d, 0x3d, 0x3d, 0x70, 0x74, 0x3f, 0x28, 0x6e, 0x2e, 0x5f, + 0x5f, 0x68, 0x3d, 0x5b, 0x5d, 0x2c, 0x70, 0x74, 0x2e, 0x5f, 0x5f, 0x68, + 0x3d, 0x5b, 0x5d, 0x2c, 0x6e, 0x2e, 0x5f, 0x5f, 0x2e, 0x66, 0x6f, 0x72, + 0x45, 0x61, 0x63, 0x68, 0x28, 0x28, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, + 0x6f, 0x6e, 0x28, 0x74, 0x29, 0x7b, 0x74, 0x2e, 0x5f, 0x5f, 0x4e, 0x26, + 0x26, 0x28, 0x74, 0x2e, 0x5f, 0x5f, 0x3d, 0x74, 0x2e, 0x5f, 0x5f, 0x4e, + 0x29, 0x2c, 0x74, 0x2e, 0x5f, 0x5f, 0x56, 0x3d, 0x67, 0x74, 0x2c, 0x74, + 0x2e, 0x5f, 0x5f, 0x4e, 0x3d, 0x74, 0x2e, 0x69, 0x3d, 0x76, 0x6f, 0x69, + 0x64, 0x20, 0x30, 0x7d, 0x29, 0x29, 0x29, 0x3a, 0x28, 0x6e, 0x2e, 0x5f, + 0x5f, 0x68, 0x2e, 0x66, 0x6f, 0x72, 0x45, 0x61, 0x63, 0x68, 0x28, 0x52, + 0x74, 0x29, 0x2c, 0x6e, 0x2e, 0x5f, 0x5f, 0x68, 0x2e, 0x66, 0x6f, 0x72, + 0x45, 0x61, 0x63, 0x68, 0x28, 0x6a, 0x74, 0x29, 0x2c, 0x6e, 0x2e, 0x5f, + 0x5f, 0x68, 0x3d, 0x5b, 0x5d, 0x2c, 0x61, 0x74, 0x3d, 0x30, 0x29, 0x29, + 0x2c, 0x64, 0x74, 0x3d, 0x70, 0x74, 0x7d, 0x2c, 0x43, 0x2e, 0x64, 0x69, + 0x66, 0x66, 0x65, 0x64, 0x3d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, + 0x6e, 0x28, 0x74, 0x29, 0x7b, 0x53, 0x74, 0x26, 0x26, 0x53, 0x74, 0x28, + 0x74, 0x29, 0x3b, 0x76, 0x61, 0x72, 0x20, 0x6e, 0x3d, 0x74, 0x2e, 0x5f, + 0x5f, 0x63, 0x3b, 0x6e, 0x26, 0x26, 0x6e, 0x2e, 0x5f, 0x5f, 0x48, 0x26, + 0x26, 0x28, 0x6e, 0x2e, 0x5f, 0x5f, 0x48, 0x2e, 0x5f, 0x5f, 0x68, 0x2e, + 0x6c, 0x65, 0x6e, 0x67, 0x74, 0x68, 0x26, 0x26, 0x28, 0x31, 0x21, 0x3d, + 0x3d, 0x6d, 0x74, 0x2e, 0x70, 0x75, 0x73, 0x68, 0x28, 0x6e, 0x29, 0x26, + 0x26, 0x76, 0x74, 0x3d, 0x3d, 0x3d, 0x43, 0x2e, 0x72, 0x65, 0x71, 0x75, + 0x65, 0x73, 0x74, 0x41, 0x6e, 0x69, 0x6d, 0x61, 0x74, 0x69, 0x6f, 0x6e, + 0x46, 0x72, 0x61, 0x6d, 0x65, 0x7c, 0x7c, 0x28, 0x28, 0x76, 0x74, 0x3d, + 0x43, 0x2e, 0x72, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x41, 0x6e, 0x69, + 0x6d, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x46, 0x72, 0x61, 0x6d, 0x65, 0x29, + 0x7c, 0x7c, 0x4f, 0x74, 0x29, 0x28, 0x57, 0x74, 0x29, 0x29, 0x2c, 0x6e, + 0x2e, 0x5f, 0x5f, 0x48, 0x2e, 0x5f, 0x5f, 0x2e, 0x66, 0x6f, 0x72, 0x45, + 0x61, 0x63, 0x68, 0x28, 0x28, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, + 0x6e, 0x28, 0x74, 0x29, 0x7b, 0x74, 0x2e, 0x69, 0x26, 0x26, 0x28, 0x74, + 0x2e, 0x5f, 0x5f, 0x48, 0x3d, 0x74, 0x2e, 0x69, 0x29, 0x2c, 0x74, 0x2e, + 0x5f, 0x5f, 0x56, 0x21, 0x3d, 0x3d, 0x67, 0x74, 0x26, 0x26, 0x28, 0x74, + 0x2e, 0x5f, 0x5f, 0x3d, 0x74, 0x2e, 0x5f, 0x5f, 0x56, 0x29, 0x2c, 0x74, + 0x2e, 0x69, 0x3d, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x2c, 0x74, 0x2e, + 0x5f, 0x5f, 0x56, 0x3d, 0x67, 0x74, 0x7d, 0x29, 0x29, 0x29, 0x2c, 0x64, + 0x74, 0x3d, 0x70, 0x74, 0x3d, 0x6e, 0x75, 0x6c, 0x6c, 0x7d, 0x2c, 0x43, + 0x2e, 0x5f, 0x5f, 0x63, 0x3d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, + 0x6e, 0x28, 0x74, 0x2c, 0x6e, 0x29, 0x7b, 0x6e, 0x2e, 0x73, 0x6f, 0x6d, + 0x65, 0x28, 0x28, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, + 0x74, 0x29, 0x7b, 0x74, 0x72, 0x79, 0x7b, 0x74, 0x2e, 0x5f, 0x5f, 0x68, + 0x2e, 0x66, 0x6f, 0x72, 0x45, 0x61, 0x63, 0x68, 0x28, 0x52, 0x74, 0x29, + 0x2c, 0x74, 0x2e, 0x5f, 0x5f, 0x68, 0x3d, 0x74, 0x2e, 0x5f, 0x5f, 0x68, + 0x2e, 0x66, 0x69, 0x6c, 0x74, 0x65, 0x72, 0x28, 0x28, 0x66, 0x75, 0x6e, + 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x74, 0x29, 0x7b, 0x72, 0x65, 0x74, + 0x75, 0x72, 0x6e, 0x21, 0x74, 0x2e, 0x5f, 0x5f, 0x7c, 0x7c, 0x6a, 0x74, + 0x28, 0x74, 0x29, 0x7d, 0x29, 0x29, 0x7d, 0x63, 0x61, 0x74, 0x63, 0x68, + 0x28, 0x6c, 0x29, 0x7b, 0x6e, 0x2e, 0x73, 0x6f, 0x6d, 0x65, 0x28, 0x28, + 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x74, 0x29, 0x7b, + 0x74, 0x2e, 0x5f, 0x5f, 0x68, 0x26, 0x26, 0x28, 0x74, 0x2e, 0x5f, 0x5f, + 0x68, 0x3d, 0x5b, 0x5d, 0x29, 0x7d, 0x29, 0x29, 0x2c, 0x6e, 0x3d, 0x5b, + 0x5d, 0x2c, 0x43, 0x2e, 0x5f, 0x5f, 0x65, 0x28, 0x6c, 0x2c, 0x74, 0x2e, + 0x5f, 0x5f, 0x76, 0x29, 0x7d, 0x7d, 0x29, 0x29, 0x2c, 0x77, 0x74, 0x26, + 0x26, 0x77, 0x74, 0x28, 0x74, 0x2c, 0x6e, 0x29, 0x7d, 0x2c, 0x43, 0x2e, + 0x75, 0x6e, 0x6d, 0x6f, 0x75, 0x6e, 0x74, 0x3d, 0x66, 0x75, 0x6e, 0x63, + 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x74, 0x29, 0x7b, 0x78, 0x74, 0x26, 0x26, + 0x78, 0x74, 0x28, 0x74, 0x29, 0x3b, 0x76, 0x61, 0x72, 0x20, 0x6e, 0x2c, + 0x65, 0x3d, 0x74, 0x2e, 0x5f, 0x5f, 0x63, 0x3b, 0x65, 0x26, 0x26, 0x65, + 0x2e, 0x5f, 0x5f, 0x48, 0x26, 0x26, 0x28, 0x65, 0x2e, 0x5f, 0x5f, 0x48, + 0x2e, 0x5f, 0x5f, 0x2e, 0x66, 0x6f, 0x72, 0x45, 0x61, 0x63, 0x68, 0x28, + 0x28, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x74, 0x29, + 0x7b, 0x74, 0x72, 0x79, 0x7b, 0x52, 0x74, 0x28, 0x74, 0x29, 0x7d, 0x63, + 0x61, 0x74, 0x63, 0x68, 0x28, 0x74, 0x29, 0x7b, 0x6e, 0x3d, 0x74, 0x7d, + 0x7d, 0x29, 0x29, 0x2c, 0x65, 0x2e, 0x5f, 0x5f, 0x48, 0x3d, 0x76, 0x6f, + 0x69, 0x64, 0x20, 0x30, 0x2c, 0x6e, 0x26, 0x26, 0x43, 0x2e, 0x5f, 0x5f, + 0x65, 0x28, 0x6e, 0x2c, 0x65, 0x2e, 0x5f, 0x5f, 0x76, 0x29, 0x29, 0x7d, + 0x3b, 0x76, 0x61, 0x72, 0x20, 0x4c, 0x74, 0x3d, 0x22, 0x66, 0x75, 0x6e, + 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x22, 0x3d, 0x3d, 0x74, 0x79, 0x70, 0x65, + 0x6f, 0x66, 0x20, 0x72, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x41, 0x6e, + 0x69, 0x6d, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x46, 0x72, 0x61, 0x6d, 0x65, + 0x3b, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x4f, 0x74, + 0x28, 0x74, 0x29, 0x7b, 0x76, 0x61, 0x72, 0x20, 0x6e, 0x2c, 0x65, 0x3d, + 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x29, 0x7b, 0x63, + 0x6c, 0x65, 0x61, 0x72, 0x54, 0x69, 0x6d, 0x65, 0x6f, 0x75, 0x74, 0x28, + 0x5f, 0x29, 0x2c, 0x4c, 0x74, 0x26, 0x26, 0x63, 0x61, 0x6e, 0x63, 0x65, + 0x6c, 0x41, 0x6e, 0x69, 0x6d, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x46, 0x72, + 0x61, 0x6d, 0x65, 0x28, 0x6e, 0x29, 0x2c, 0x73, 0x65, 0x74, 0x54, 0x69, + 0x6d, 0x65, 0x6f, 0x75, 0x74, 0x28, 0x74, 0x29, 0x7d, 0x2c, 0x5f, 0x3d, + 0x73, 0x65, 0x74, 0x54, 0x69, 0x6d, 0x65, 0x6f, 0x75, 0x74, 0x28, 0x65, + 0x2c, 0x31, 0x30, 0x30, 0x29, 0x3b, 0x4c, 0x74, 0x26, 0x26, 0x28, 0x6e, + 0x3d, 0x72, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x41, 0x6e, 0x69, 0x6d, + 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x46, 0x72, 0x61, 0x6d, 0x65, 0x28, 0x65, + 0x29, 0x29, 0x7d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, + 0x52, 0x74, 0x28, 0x74, 0x29, 0x7b, 0x76, 0x61, 0x72, 0x20, 0x6e, 0x3d, + 0x70, 0x74, 0x2c, 0x65, 0x3d, 0x74, 0x2e, 0x5f, 0x5f, 0x63, 0x3b, 0x22, + 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x22, 0x3d, 0x3d, 0x74, + 0x79, 0x70, 0x65, 0x6f, 0x66, 0x20, 0x65, 0x26, 0x26, 0x28, 0x74, 0x2e, + 0x5f, 0x5f, 0x63, 0x3d, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x2c, 0x65, + 0x28, 0x29, 0x29, 0x2c, 0x70, 0x74, 0x3d, 0x6e, 0x7d, 0x66, 0x75, 0x6e, + 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x6a, 0x74, 0x28, 0x74, 0x29, 0x7b, + 0x76, 0x61, 0x72, 0x20, 0x6e, 0x3d, 0x70, 0x74, 0x3b, 0x74, 0x2e, 0x5f, + 0x5f, 0x63, 0x3d, 0x74, 0x2e, 0x5f, 0x5f, 0x28, 0x29, 0x2c, 0x70, 0x74, + 0x3d, 0x6e, 0x7d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, + 0x49, 0x74, 0x28, 0x74, 0x2c, 0x6e, 0x29, 0x7b, 0x72, 0x65, 0x74, 0x75, + 0x72, 0x6e, 0x21, 0x74, 0x7c, 0x7c, 0x74, 0x2e, 0x6c, 0x65, 0x6e, 0x67, + 0x74, 0x68, 0x21, 0x3d, 0x3d, 0x6e, 0x2e, 0x6c, 0x65, 0x6e, 0x67, 0x74, + 0x68, 0x7c, 0x7c, 0x6e, 0x2e, 0x73, 0x6f, 0x6d, 0x65, 0x28, 0x28, 0x66, + 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x6e, 0x2c, 0x65, 0x29, + 0x7b, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x6e, 0x21, 0x3d, 0x3d, + 0x74, 0x5b, 0x65, 0x5d, 0x7d, 0x29, 0x29, 0x7d, 0x66, 0x75, 0x6e, 0x63, + 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x71, 0x74, 0x28, 0x74, 0x2c, 0x6e, 0x29, + 0x7b, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x22, 0x66, 0x75, 0x6e, 0x63, + 0x74, 0x69, 0x6f, 0x6e, 0x22, 0x3d, 0x3d, 0x74, 0x79, 0x70, 0x65, 0x6f, + 0x66, 0x20, 0x6e, 0x3f, 0x6e, 0x28, 0x74, 0x29, 0x3a, 0x6e, 0x7d, 0x66, + 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x42, 0x74, 0x28, 0x74, + 0x2c, 0x6e, 0x29, 0x7b, 0x43, 0x5b, 0x74, 0x5d, 0x3d, 0x6e, 0x2e, 0x62, + 0x69, 0x6e, 0x64, 0x28, 0x6e, 0x75, 0x6c, 0x6c, 0x2c, 0x43, 0x5b, 0x74, + 0x5d, 0x7c, 0x7c, 0x28, 0x28, 0x29, 0x3d, 0x3e, 0x7b, 0x7d, 0x29, 0x29, + 0x7d, 0x6c, 0x65, 0x74, 0x20, 0x47, 0x74, 0x2c, 0x7a, 0x74, 0x3b, 0x66, + 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x4a, 0x74, 0x28, 0x74, + 0x29, 0x7b, 0x69, 0x66, 0x28, 0x7a, 0x74, 0x29, 0x7a, 0x74, 0x28, 0x29, + 0x3b, 0x7a, 0x74, 0x3d, 0x74, 0x26, 0x26, 0x74, 0x2e, 0x53, 0x28, 0x29, + 0x7d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x4b, 0x74, + 0x28, 0x7b, 0x64, 0x61, 0x74, 0x61, 0x3a, 0x74, 0x7d, 0x29, 0x7b, 0x63, + 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x6e, 0x3d, 0x58, 0x74, 0x28, 0x74, 0x29, + 0x3b, 0x6e, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3d, 0x74, 0x3b, 0x63, + 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x65, 0x3d, 0x44, 0x74, 0x28, 0x28, 0x29, + 0x3d, 0x3e, 0x7b, 0x6c, 0x65, 0x74, 0x20, 0x74, 0x3d, 0x74, 0x68, 0x69, + 0x73, 0x2e, 0x5f, 0x5f, 0x76, 0x3b, 0x77, 0x68, 0x69, 0x6c, 0x65, 0x28, + 0x74, 0x3d, 0x74, 0x2e, 0x5f, 0x5f, 0x29, 0x69, 0x66, 0x28, 0x74, 0x2e, + 0x5f, 0x5f, 0x63, 0x29, 0x7b, 0x74, 0x2e, 0x5f, 0x5f, 0x63, 0x2e, 0x5f, + 0x5f, 0x24, 0x66, 0x7c, 0x3d, 0x34, 0x3b, 0x62, 0x72, 0x65, 0x61, 0x6b, + 0x7d, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x5f, 0x5f, 0x24, 0x75, 0x2e, 0x63, + 0x3d, 0x28, 0x29, 0x3d, 0x3e, 0x7b, 0x76, 0x61, 0x72, 0x20, 0x74, 0x3b, + 0x69, 0x66, 0x28, 0x21, 0x55, 0x28, 0x65, 0x2e, 0x70, 0x65, 0x65, 0x6b, + 0x28, 0x29, 0x29, 0x26, 0x26, 0x33, 0x3d, 0x3d, 0x3d, 0x28, 0x6e, 0x75, + 0x6c, 0x6c, 0x3d, 0x3d, 0x28, 0x74, 0x3d, 0x74, 0x68, 0x69, 0x73, 0x2e, + 0x62, 0x61, 0x73, 0x65, 0x29, 0x3f, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, + 0x3a, 0x74, 0x2e, 0x6e, 0x6f, 0x64, 0x65, 0x54, 0x79, 0x70, 0x65, 0x29, + 0x29, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x62, 0x61, 0x73, 0x65, 0x2e, 0x64, + 0x61, 0x74, 0x61, 0x3d, 0x65, 0x2e, 0x70, 0x65, 0x65, 0x6b, 0x28, 0x29, + 0x3b, 0x65, 0x6c, 0x73, 0x65, 0x7b, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x5f, + 0x5f, 0x24, 0x66, 0x7c, 0x3d, 0x31, 0x3b, 0x74, 0x68, 0x69, 0x73, 0x2e, + 0x73, 0x65, 0x74, 0x53, 0x74, 0x61, 0x74, 0x65, 0x28, 0x7b, 0x7d, 0x29, + 0x7d, 0x7d, 0x3b, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x6d, 0x28, + 0x28, 0x29, 0x3d, 0x3e, 0x7b, 0x6c, 0x65, 0x74, 0x20, 0x74, 0x3d, 0x6e, + 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, + 0x3b, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x30, 0x3d, 0x3d, 0x3d, + 0x74, 0x3f, 0x30, 0x3a, 0x21, 0x30, 0x3d, 0x3d, 0x3d, 0x74, 0x3f, 0x22, + 0x22, 0x3a, 0x74, 0x7c, 0x7c, 0x22, 0x22, 0x7d, 0x29, 0x7d, 0x2c, 0x5b, + 0x5d, 0x29, 0x3b, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x65, 0x2e, + 0x76, 0x61, 0x6c, 0x75, 0x65, 0x7d, 0x4b, 0x74, 0x2e, 0x64, 0x69, 0x73, + 0x70, 0x6c, 0x61, 0x79, 0x4e, 0x61, 0x6d, 0x65, 0x3d, 0x22, 0x5f, 0x73, + 0x74, 0x22, 0x3b, 0x4f, 0x62, 0x6a, 0x65, 0x63, 0x74, 0x2e, 0x64, 0x65, + 0x66, 0x69, 0x6e, 0x65, 0x50, 0x72, 0x6f, 0x70, 0x65, 0x72, 0x74, 0x69, + 0x65, 0x73, 0x28, 0x68, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x74, 0x79, + 0x70, 0x65, 0x2c, 0x7b, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x72, 0x75, 0x63, + 0x74, 0x6f, 0x72, 0x3a, 0x7b, 0x63, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x75, + 0x72, 0x61, 0x62, 0x6c, 0x65, 0x3a, 0x21, 0x30, 0x2c, 0x76, 0x61, 0x6c, + 0x75, 0x65, 0x3a, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x7d, 0x2c, 0x74, + 0x79, 0x70, 0x65, 0x3a, 0x7b, 0x63, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x75, + 0x72, 0x61, 0x62, 0x6c, 0x65, 0x3a, 0x21, 0x30, 0x2c, 0x76, 0x61, 0x6c, + 0x75, 0x65, 0x3a, 0x4b, 0x74, 0x7d, 0x2c, 0x70, 0x72, 0x6f, 0x70, 0x73, + 0x3a, 0x7b, 0x63, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x75, 0x72, 0x61, 0x62, + 0x6c, 0x65, 0x3a, 0x21, 0x30, 0x2c, 0x67, 0x65, 0x74, 0x28, 0x29, 0x7b, + 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x7b, 0x64, 0x61, 0x74, 0x61, 0x3a, + 0x74, 0x68, 0x69, 0x73, 0x7d, 0x7d, 0x7d, 0x2c, 0x5f, 0x5f, 0x62, 0x3a, + 0x7b, 0x63, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x75, 0x72, 0x61, 0x62, 0x6c, + 0x65, 0x3a, 0x21, 0x30, 0x2c, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3a, 0x31, + 0x7d, 0x7d, 0x29, 0x3b, 0x42, 0x74, 0x28, 0x22, 0x5f, 0x5f, 0x62, 0x22, + 0x2c, 0x28, 0x74, 0x2c, 0x6e, 0x29, 0x3d, 0x3e, 0x7b, 0x69, 0x66, 0x28, + 0x22, 0x73, 0x74, 0x72, 0x69, 0x6e, 0x67, 0x22, 0x3d, 0x3d, 0x74, 0x79, + 0x70, 0x65, 0x6f, 0x66, 0x20, 0x6e, 0x2e, 0x74, 0x79, 0x70, 0x65, 0x29, + 0x7b, 0x6c, 0x65, 0x74, 0x20, 0x74, 0x2c, 0x65, 0x3d, 0x6e, 0x2e, 0x70, + 0x72, 0x6f, 0x70, 0x73, 0x3b, 0x66, 0x6f, 0x72, 0x28, 0x6c, 0x65, 0x74, + 0x20, 0x5f, 0x20, 0x69, 0x6e, 0x20, 0x65, 0x29, 0x7b, 0x69, 0x66, 0x28, + 0x22, 0x63, 0x68, 0x69, 0x6c, 0x64, 0x72, 0x65, 0x6e, 0x22, 0x3d, 0x3d, + 0x3d, 0x5f, 0x29, 0x63, 0x6f, 0x6e, 0x74, 0x69, 0x6e, 0x75, 0x65, 0x3b, + 0x6c, 0x65, 0x74, 0x20, 0x69, 0x3d, 0x65, 0x5b, 0x5f, 0x5d, 0x3b, 0x69, + 0x66, 0x28, 0x69, 0x20, 0x69, 0x6e, 0x73, 0x74, 0x61, 0x6e, 0x63, 0x65, + 0x6f, 0x66, 0x20, 0x68, 0x29, 0x7b, 0x69, 0x66, 0x28, 0x21, 0x74, 0x29, + 0x6e, 0x2e, 0x5f, 0x5f, 0x6e, 0x70, 0x3d, 0x74, 0x3d, 0x7b, 0x7d, 0x3b, + 0x74, 0x5b, 0x5f, 0x5d, 0x3d, 0x69, 0x3b, 0x65, 0x5b, 0x5f, 0x5d, 0x3d, + 0x69, 0x2e, 0x70, 0x65, 0x65, 0x6b, 0x28, 0x29, 0x7d, 0x7d, 0x7d, 0x74, + 0x28, 0x6e, 0x29, 0x7d, 0x29, 0x3b, 0x42, 0x74, 0x28, 0x22, 0x5f, 0x5f, + 0x72, 0x22, 0x2c, 0x28, 0x74, 0x2c, 0x6e, 0x29, 0x3d, 0x3e, 0x7b, 0x4a, + 0x74, 0x28, 0x29, 0x3b, 0x6c, 0x65, 0x74, 0x20, 0x65, 0x2c, 0x5f, 0x3d, + 0x6e, 0x2e, 0x5f, 0x5f, 0x63, 0x3b, 0x69, 0x66, 0x28, 0x5f, 0x29, 0x7b, + 0x5f, 0x2e, 0x5f, 0x5f, 0x24, 0x66, 0x26, 0x3d, 0x2d, 0x32, 0x3b, 0x65, + 0x3d, 0x5f, 0x2e, 0x5f, 0x5f, 0x24, 0x75, 0x3b, 0x69, 0x66, 0x28, 0x76, + 0x6f, 0x69, 0x64, 0x20, 0x30, 0x3d, 0x3d, 0x3d, 0x65, 0x29, 0x5f, 0x2e, + 0x5f, 0x5f, 0x24, 0x75, 0x3d, 0x65, 0x3d, 0x66, 0x75, 0x6e, 0x63, 0x74, + 0x69, 0x6f, 0x6e, 0x28, 0x74, 0x29, 0x7b, 0x6c, 0x65, 0x74, 0x20, 0x6e, + 0x3b, 0x77, 0x28, 0x28, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, + 0x28, 0x29, 0x7b, 0x6e, 0x3d, 0x74, 0x68, 0x69, 0x73, 0x7d, 0x29, 0x29, + 0x3b, 0x6e, 0x2e, 0x63, 0x3d, 0x28, 0x29, 0x3d, 0x3e, 0x7b, 0x5f, 0x2e, + 0x5f, 0x5f, 0x24, 0x66, 0x7c, 0x3d, 0x31, 0x3b, 0x5f, 0x2e, 0x73, 0x65, + 0x74, 0x53, 0x74, 0x61, 0x74, 0x65, 0x28, 0x7b, 0x7d, 0x29, 0x7d, 0x3b, + 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x6e, 0x7d, 0x28, 0x29, 0x7d, + 0x47, 0x74, 0x3d, 0x5f, 0x3b, 0x4a, 0x74, 0x28, 0x65, 0x29, 0x3b, 0x74, + 0x28, 0x6e, 0x29, 0x7d, 0x29, 0x3b, 0x42, 0x74, 0x28, 0x22, 0x5f, 0x5f, + 0x65, 0x22, 0x2c, 0x28, 0x74, 0x2c, 0x6e, 0x2c, 0x65, 0x2c, 0x5f, 0x29, + 0x3d, 0x3e, 0x7b, 0x4a, 0x74, 0x28, 0x29, 0x3b, 0x47, 0x74, 0x3d, 0x76, + 0x6f, 0x69, 0x64, 0x20, 0x30, 0x3b, 0x74, 0x28, 0x6e, 0x2c, 0x65, 0x2c, + 0x5f, 0x29, 0x7d, 0x29, 0x3b, 0x42, 0x74, 0x28, 0x22, 0x64, 0x69, 0x66, + 0x66, 0x65, 0x64, 0x22, 0x2c, 0x28, 0x74, 0x2c, 0x6e, 0x29, 0x3d, 0x3e, + 0x7b, 0x4a, 0x74, 0x28, 0x29, 0x3b, 0x47, 0x74, 0x3d, 0x76, 0x6f, 0x69, + 0x64, 0x20, 0x30, 0x3b, 0x6c, 0x65, 0x74, 0x20, 0x65, 0x3b, 0x69, 0x66, + 0x28, 0x22, 0x73, 0x74, 0x72, 0x69, 0x6e, 0x67, 0x22, 0x3d, 0x3d, 0x74, + 0x79, 0x70, 0x65, 0x6f, 0x66, 0x20, 0x6e, 0x2e, 0x74, 0x79, 0x70, 0x65, + 0x26, 0x26, 0x28, 0x65, 0x3d, 0x6e, 0x2e, 0x5f, 0x5f, 0x65, 0x29, 0x29, + 0x7b, 0x6c, 0x65, 0x74, 0x20, 0x74, 0x3d, 0x6e, 0x2e, 0x5f, 0x5f, 0x6e, + 0x70, 0x2c, 0x5f, 0x3d, 0x6e, 0x2e, 0x70, 0x72, 0x6f, 0x70, 0x73, 0x3b, + 0x69, 0x66, 0x28, 0x74, 0x29, 0x7b, 0x6c, 0x65, 0x74, 0x20, 0x6e, 0x3d, + 0x65, 0x2e, 0x55, 0x3b, 0x69, 0x66, 0x28, 0x6e, 0x29, 0x66, 0x6f, 0x72, + 0x28, 0x6c, 0x65, 0x74, 0x20, 0x65, 0x20, 0x69, 0x6e, 0x20, 0x6e, 0x29, + 0x7b, 0x6c, 0x65, 0x74, 0x20, 0x5f, 0x3d, 0x6e, 0x5b, 0x65, 0x5d, 0x3b, + 0x69, 0x66, 0x28, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x21, 0x3d, 0x3d, + 0x5f, 0x26, 0x26, 0x21, 0x28, 0x65, 0x20, 0x69, 0x6e, 0x20, 0x74, 0x29, + 0x29, 0x7b, 0x5f, 0x2e, 0x64, 0x28, 0x29, 0x3b, 0x6e, 0x5b, 0x65, 0x5d, + 0x3d, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x7d, 0x7d, 0x65, 0x6c, 0x73, + 0x65, 0x7b, 0x6e, 0x3d, 0x7b, 0x7d, 0x3b, 0x65, 0x2e, 0x55, 0x3d, 0x6e, + 0x7d, 0x66, 0x6f, 0x72, 0x28, 0x6c, 0x65, 0x74, 0x20, 0x69, 0x20, 0x69, + 0x6e, 0x20, 0x74, 0x29, 0x7b, 0x6c, 0x65, 0x74, 0x20, 0x6f, 0x3d, 0x6e, + 0x5b, 0x69, 0x5d, 0x2c, 0x72, 0x3d, 0x74, 0x5b, 0x69, 0x5d, 0x3b, 0x69, + 0x66, 0x28, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x3d, 0x3d, 0x3d, 0x6f, + 0x29, 0x7b, 0x6f, 0x3d, 0x51, 0x74, 0x28, 0x65, 0x2c, 0x69, 0x2c, 0x72, + 0x2c, 0x5f, 0x29, 0x3b, 0x6e, 0x5b, 0x69, 0x5d, 0x3d, 0x6f, 0x7d, 0x65, + 0x6c, 0x73, 0x65, 0x20, 0x6f, 0x2e, 0x6f, 0x28, 0x72, 0x2c, 0x5f, 0x29, + 0x7d, 0x7d, 0x7d, 0x74, 0x28, 0x6e, 0x29, 0x7d, 0x29, 0x3b, 0x66, 0x75, + 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x51, 0x74, 0x28, 0x74, 0x2c, + 0x6e, 0x2c, 0x65, 0x2c, 0x5f, 0x29, 0x7b, 0x63, 0x6f, 0x6e, 0x73, 0x74, + 0x20, 0x69, 0x3d, 0x6e, 0x20, 0x69, 0x6e, 0x20, 0x74, 0x26, 0x26, 0x76, + 0x6f, 0x69, 0x64, 0x20, 0x30, 0x3d, 0x3d, 0x3d, 0x74, 0x2e, 0x6f, 0x77, + 0x6e, 0x65, 0x72, 0x53, 0x56, 0x47, 0x45, 0x6c, 0x65, 0x6d, 0x65, 0x6e, + 0x74, 0x2c, 0x6f, 0x3d, 0x61, 0x28, 0x65, 0x29, 0x3b, 0x72, 0x65, 0x74, + 0x75, 0x72, 0x6e, 0x7b, 0x6f, 0x3a, 0x28, 0x74, 0x2c, 0x6e, 0x29, 0x3d, + 0x3e, 0x7b, 0x6f, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3d, 0x74, 0x3b, + 0x5f, 0x3d, 0x6e, 0x7d, 0x2c, 0x64, 0x3a, 0x77, 0x28, 0x28, 0x29, 0x3d, + 0x3e, 0x7b, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x65, 0x3d, 0x6f, 0x2e, + 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3b, + 0x69, 0x66, 0x28, 0x5f, 0x5b, 0x6e, 0x5d, 0x21, 0x3d, 0x3d, 0x65, 0x29, + 0x7b, 0x5f, 0x5b, 0x6e, 0x5d, 0x3d, 0x65, 0x3b, 0x69, 0x66, 0x28, 0x69, + 0x29, 0x74, 0x5b, 0x6e, 0x5d, 0x3d, 0x65, 0x3b, 0x65, 0x6c, 0x73, 0x65, + 0x20, 0x69, 0x66, 0x28, 0x65, 0x29, 0x74, 0x2e, 0x73, 0x65, 0x74, 0x41, + 0x74, 0x74, 0x72, 0x69, 0x62, 0x75, 0x74, 0x65, 0x28, 0x6e, 0x2c, 0x65, + 0x29, 0x3b, 0x65, 0x6c, 0x73, 0x65, 0x20, 0x74, 0x2e, 0x72, 0x65, 0x6d, + 0x6f, 0x76, 0x65, 0x41, 0x74, 0x74, 0x72, 0x69, 0x62, 0x75, 0x74, 0x65, + 0x28, 0x6e, 0x29, 0x7d, 0x7d, 0x29, 0x7d, 0x7d, 0x42, 0x74, 0x28, 0x22, + 0x75, 0x6e, 0x6d, 0x6f, 0x75, 0x6e, 0x74, 0x22, 0x2c, 0x28, 0x74, 0x2c, + 0x6e, 0x29, 0x3d, 0x3e, 0x7b, 0x69, 0x66, 0x28, 0x22, 0x73, 0x74, 0x72, + 0x69, 0x6e, 0x67, 0x22, 0x3d, 0x3d, 0x74, 0x79, 0x70, 0x65, 0x6f, 0x66, + 0x20, 0x6e, 0x2e, 0x74, 0x79, 0x70, 0x65, 0x29, 0x7b, 0x6c, 0x65, 0x74, + 0x20, 0x74, 0x3d, 0x6e, 0x2e, 0x5f, 0x5f, 0x65, 0x3b, 0x69, 0x66, 0x28, + 0x74, 0x29, 0x7b, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x6e, 0x3d, 0x74, + 0x2e, 0x55, 0x3b, 0x69, 0x66, 0x28, 0x6e, 0x29, 0x7b, 0x74, 0x2e, 0x55, + 0x3d, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x3b, 0x66, 0x6f, 0x72, 0x28, + 0x6c, 0x65, 0x74, 0x20, 0x74, 0x20, 0x69, 0x6e, 0x20, 0x6e, 0x29, 0x7b, + 0x6c, 0x65, 0x74, 0x20, 0x65, 0x3d, 0x6e, 0x5b, 0x74, 0x5d, 0x3b, 0x69, + 0x66, 0x28, 0x65, 0x29, 0x65, 0x2e, 0x64, 0x28, 0x29, 0x7d, 0x7d, 0x7d, + 0x7d, 0x65, 0x6c, 0x73, 0x65, 0x7b, 0x6c, 0x65, 0x74, 0x20, 0x74, 0x3d, + 0x6e, 0x2e, 0x5f, 0x5f, 0x63, 0x3b, 0x69, 0x66, 0x28, 0x74, 0x29, 0x7b, + 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x6e, 0x3d, 0x74, 0x2e, 0x5f, 0x5f, + 0x24, 0x75, 0x3b, 0x69, 0x66, 0x28, 0x6e, 0x29, 0x7b, 0x74, 0x2e, 0x5f, + 0x5f, 0x24, 0x75, 0x3d, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x3b, 0x6e, + 0x2e, 0x64, 0x28, 0x29, 0x7d, 0x7d, 0x7d, 0x74, 0x28, 0x6e, 0x29, 0x7d, + 0x29, 0x3b, 0x42, 0x74, 0x28, 0x22, 0x5f, 0x5f, 0x68, 0x22, 0x2c, 0x28, + 0x74, 0x2c, 0x6e, 0x2c, 0x65, 0x2c, 0x5f, 0x29, 0x3d, 0x3e, 0x7b, 0x69, + 0x66, 0x28, 0x5f, 0x3c, 0x33, 0x7c, 0x7c, 0x39, 0x3d, 0x3d, 0x3d, 0x5f, + 0x29, 0x6e, 0x2e, 0x5f, 0x5f, 0x24, 0x66, 0x7c, 0x3d, 0x32, 0x3b, 0x74, + 0x28, 0x6e, 0x2c, 0x65, 0x2c, 0x5f, 0x29, 0x7d, 0x29, 0x3b, 0x49, 0x2e, + 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x74, 0x79, 0x70, 0x65, 0x2e, 0x73, 0x68, + 0x6f, 0x75, 0x6c, 0x64, 0x43, 0x6f, 0x6d, 0x70, 0x6f, 0x6e, 0x65, 0x6e, + 0x74, 0x55, 0x70, 0x64, 0x61, 0x74, 0x65, 0x3d, 0x66, 0x75, 0x6e, 0x63, + 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x74, 0x2c, 0x6e, 0x29, 0x7b, 0x63, 0x6f, + 0x6e, 0x73, 0x74, 0x20, 0x65, 0x3d, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x5f, + 0x5f, 0x24, 0x75, 0x3b, 0x69, 0x66, 0x28, 0x21, 0x28, 0x65, 0x26, 0x26, + 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x21, 0x3d, 0x3d, 0x65, 0x2e, 0x73, + 0x7c, 0x7c, 0x34, 0x26, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x5f, 0x5f, 0x24, + 0x66, 0x29, 0x29, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x21, 0x30, 0x3b, + 0x69, 0x66, 0x28, 0x33, 0x26, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x5f, 0x5f, + 0x24, 0x66, 0x29, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x21, 0x30, 0x3b, + 0x66, 0x6f, 0x72, 0x28, 0x6c, 0x65, 0x74, 0x20, 0x5f, 0x20, 0x69, 0x6e, + 0x20, 0x6e, 0x29, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x21, 0x30, 0x3b, + 0x66, 0x6f, 0x72, 0x28, 0x6c, 0x65, 0x74, 0x20, 0x5f, 0x20, 0x69, 0x6e, + 0x20, 0x74, 0x29, 0x69, 0x66, 0x28, 0x22, 0x5f, 0x5f, 0x73, 0x6f, 0x75, + 0x72, 0x63, 0x65, 0x22, 0x21, 0x3d, 0x3d, 0x5f, 0x26, 0x26, 0x74, 0x5b, + 0x5f, 0x5d, 0x21, 0x3d, 0x3d, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x70, 0x72, + 0x6f, 0x70, 0x73, 0x5b, 0x5f, 0x5d, 0x29, 0x72, 0x65, 0x74, 0x75, 0x72, + 0x6e, 0x21, 0x30, 0x3b, 0x66, 0x6f, 0x72, 0x28, 0x6c, 0x65, 0x74, 0x20, + 0x5f, 0x20, 0x69, 0x6e, 0x20, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x70, 0x72, + 0x6f, 0x70, 0x73, 0x29, 0x69, 0x66, 0x28, 0x21, 0x28, 0x5f, 0x20, 0x69, + 0x6e, 0x20, 0x74, 0x29, 0x29, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x21, + 0x30, 0x3b, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x21, 0x31, 0x7d, 0x3b, + 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x58, 0x74, 0x28, + 0x74, 0x29, 0x7b, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x44, 0x74, + 0x28, 0x28, 0x29, 0x3d, 0x3e, 0x61, 0x28, 0x74, 0x29, 0x2c, 0x5b, 0x5d, + 0x29, 0x7d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x59, + 0x74, 0x28, 0x74, 0x29, 0x7b, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x6e, + 0x3d, 0x4e, 0x74, 0x28, 0x74, 0x29, 0x3b, 0x6e, 0x2e, 0x63, 0x75, 0x72, + 0x72, 0x65, 0x6e, 0x74, 0x3d, 0x74, 0x3b, 0x47, 0x74, 0x2e, 0x5f, 0x5f, + 0x24, 0x66, 0x7c, 0x3d, 0x34, 0x3b, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, + 0x20, 0x44, 0x74, 0x28, 0x28, 0x29, 0x3d, 0x3e, 0x6d, 0x28, 0x28, 0x29, + 0x3d, 0x3e, 0x6e, 0x2e, 0x63, 0x75, 0x72, 0x72, 0x65, 0x6e, 0x74, 0x28, + 0x29, 0x29, 0x2c, 0x5b, 0x5d, 0x29, 0x7d, 0x66, 0x75, 0x6e, 0x63, 0x74, + 0x69, 0x6f, 0x6e, 0x20, 0x5a, 0x74, 0x28, 0x74, 0x29, 0x7b, 0x63, 0x6f, + 0x6e, 0x73, 0x74, 0x20, 0x6e, 0x3d, 0x4e, 0x74, 0x28, 0x74, 0x29, 0x3b, + 0x6e, 0x2e, 0x63, 0x75, 0x72, 0x72, 0x65, 0x6e, 0x74, 0x3d, 0x74, 0x3b, + 0x48, 0x74, 0x28, 0x28, 0x29, 0x3d, 0x3e, 0x77, 0x28, 0x28, 0x29, 0x3d, + 0x3e, 0x6e, 0x2e, 0x63, 0x75, 0x72, 0x72, 0x65, 0x6e, 0x74, 0x28, 0x29, + 0x29, 0x2c, 0x5b, 0x5d, 0x29, 0x7d, 0x76, 0x61, 0x72, 0x20, 0x74, 0x6e, + 0x3d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x74, 0x2c, + 0x6e, 0x2c, 0x65, 0x2c, 0x5f, 0x29, 0x7b, 0x76, 0x61, 0x72, 0x20, 0x69, + 0x3b, 0x6e, 0x5b, 0x30, 0x5d, 0x3d, 0x30, 0x3b, 0x66, 0x6f, 0x72, 0x28, + 0x76, 0x61, 0x72, 0x20, 0x6f, 0x3d, 0x31, 0x3b, 0x6f, 0x3c, 0x6e, 0x2e, + 0x6c, 0x65, 0x6e, 0x67, 0x74, 0x68, 0x3b, 0x6f, 0x2b, 0x2b, 0x29, 0x7b, + 0x76, 0x61, 0x72, 0x20, 0x72, 0x3d, 0x6e, 0x5b, 0x6f, 0x2b, 0x2b, 0x5d, + 0x2c, 0x75, 0x3d, 0x6e, 0x5b, 0x6f, 0x5d, 0x3f, 0x28, 0x6e, 0x5b, 0x30, + 0x5d, 0x7c, 0x3d, 0x72, 0x3f, 0x31, 0x3a, 0x32, 0x2c, 0x65, 0x5b, 0x6e, + 0x5b, 0x6f, 0x2b, 0x2b, 0x5d, 0x5d, 0x29, 0x3a, 0x6e, 0x5b, 0x2b, 0x2b, + 0x6f, 0x5d, 0x3b, 0x33, 0x3d, 0x3d, 0x3d, 0x72, 0x3f, 0x5f, 0x5b, 0x30, + 0x5d, 0x3d, 0x75, 0x3a, 0x34, 0x3d, 0x3d, 0x3d, 0x72, 0x3f, 0x5f, 0x5b, + 0x31, 0x5d, 0x3d, 0x4f, 0x62, 0x6a, 0x65, 0x63, 0x74, 0x2e, 0x61, 0x73, + 0x73, 0x69, 0x67, 0x6e, 0x28, 0x5f, 0x5b, 0x31, 0x5d, 0x7c, 0x7c, 0x7b, + 0x7d, 0x2c, 0x75, 0x29, 0x3a, 0x35, 0x3d, 0x3d, 0x3d, 0x72, 0x3f, 0x28, + 0x5f, 0x5b, 0x31, 0x5d, 0x3d, 0x5f, 0x5b, 0x31, 0x5d, 0x7c, 0x7c, 0x7b, + 0x7d, 0x29, 0x5b, 0x6e, 0x5b, 0x2b, 0x2b, 0x6f, 0x5d, 0x5d, 0x3d, 0x75, + 0x3a, 0x36, 0x3d, 0x3d, 0x3d, 0x72, 0x3f, 0x5f, 0x5b, 0x31, 0x5d, 0x5b, + 0x6e, 0x5b, 0x2b, 0x2b, 0x6f, 0x5d, 0x5d, 0x2b, 0x3d, 0x75, 0x2b, 0x22, + 0x22, 0x3a, 0x72, 0x3f, 0x28, 0x69, 0x3d, 0x74, 0x2e, 0x61, 0x70, 0x70, + 0x6c, 0x79, 0x28, 0x75, 0x2c, 0x74, 0x6e, 0x28, 0x74, 0x2c, 0x75, 0x2c, + 0x65, 0x2c, 0x5b, 0x22, 0x22, 0x2c, 0x6e, 0x75, 0x6c, 0x6c, 0x5d, 0x29, + 0x29, 0x2c, 0x5f, 0x2e, 0x70, 0x75, 0x73, 0x68, 0x28, 0x69, 0x29, 0x2c, + 0x75, 0x5b, 0x30, 0x5d, 0x3f, 0x6e, 0x5b, 0x30, 0x5d, 0x7c, 0x3d, 0x32, + 0x3a, 0x28, 0x6e, 0x5b, 0x6f, 0x2d, 0x32, 0x5d, 0x3d, 0x30, 0x2c, 0x6e, + 0x5b, 0x6f, 0x5d, 0x3d, 0x69, 0x29, 0x29, 0x3a, 0x5f, 0x2e, 0x70, 0x75, + 0x73, 0x68, 0x28, 0x75, 0x29, 0x7d, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, + 0x20, 0x5f, 0x7d, 0x2c, 0x6e, 0x6e, 0x3d, 0x6e, 0x65, 0x77, 0x20, 0x4d, + 0x61, 0x70, 0x3b, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, + 0x65, 0x6e, 0x28, 0x74, 0x29, 0x7b, 0x76, 0x61, 0x72, 0x20, 0x6e, 0x3d, + 0x6e, 0x6e, 0x2e, 0x67, 0x65, 0x74, 0x28, 0x74, 0x68, 0x69, 0x73, 0x29, + 0x3b, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x6e, 0x7c, 0x7c, 0x28, + 0x6e, 0x3d, 0x6e, 0x65, 0x77, 0x20, 0x4d, 0x61, 0x70, 0x2c, 0x6e, 0x6e, + 0x2e, 0x73, 0x65, 0x74, 0x28, 0x74, 0x68, 0x69, 0x73, 0x2c, 0x6e, 0x29, + 0x29, 0x2c, 0x28, 0x6e, 0x3d, 0x74, 0x6e, 0x28, 0x74, 0x68, 0x69, 0x73, + 0x2c, 0x6e, 0x2e, 0x67, 0x65, 0x74, 0x28, 0x74, 0x29, 0x7c, 0x7c, 0x28, + 0x6e, 0x2e, 0x73, 0x65, 0x74, 0x28, 0x74, 0x2c, 0x6e, 0x3d, 0x66, 0x75, + 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x74, 0x29, 0x7b, 0x66, 0x6f, + 0x72, 0x28, 0x76, 0x61, 0x72, 0x20, 0x6e, 0x2c, 0x65, 0x2c, 0x5f, 0x3d, + 0x31, 0x2c, 0x69, 0x3d, 0x22, 0x22, 0x2c, 0x6f, 0x3d, 0x22, 0x22, 0x2c, + 0x72, 0x3d, 0x5b, 0x30, 0x5d, 0x2c, 0x75, 0x3d, 0x66, 0x75, 0x6e, 0x63, + 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x74, 0x29, 0x7b, 0x31, 0x3d, 0x3d, 0x3d, + 0x5f, 0x26, 0x26, 0x28, 0x74, 0x7c, 0x7c, 0x28, 0x69, 0x3d, 0x69, 0x2e, + 0x72, 0x65, 0x70, 0x6c, 0x61, 0x63, 0x65, 0x28, 0x2f, 0x5e, 0x5c, 0x73, + 0x2a, 0x5c, 0x6e, 0x5c, 0x73, 0x2a, 0x7c, 0x5c, 0x73, 0x2a, 0x5c, 0x6e, + 0x5c, 0x73, 0x2a, 0x24, 0x2f, 0x67, 0x2c, 0x22, 0x22, 0x29, 0x29, 0x29, + 0x3f, 0x72, 0x2e, 0x70, 0x75, 0x73, 0x68, 0x28, 0x30, 0x2c, 0x74, 0x2c, + 0x69, 0x29, 0x3a, 0x33, 0x3d, 0x3d, 0x3d, 0x5f, 0x26, 0x26, 0x28, 0x74, + 0x7c, 0x7c, 0x69, 0x29, 0x3f, 0x28, 0x72, 0x2e, 0x70, 0x75, 0x73, 0x68, + 0x28, 0x33, 0x2c, 0x74, 0x2c, 0x69, 0x29, 0x2c, 0x5f, 0x3d, 0x32, 0x29, + 0x3a, 0x32, 0x3d, 0x3d, 0x3d, 0x5f, 0x26, 0x26, 0x22, 0x2e, 0x2e, 0x2e, + 0x22, 0x3d, 0x3d, 0x3d, 0x69, 0x26, 0x26, 0x74, 0x3f, 0x72, 0x2e, 0x70, + 0x75, 0x73, 0x68, 0x28, 0x34, 0x2c, 0x74, 0x2c, 0x30, 0x29, 0x3a, 0x32, + 0x3d, 0x3d, 0x3d, 0x5f, 0x26, 0x26, 0x69, 0x26, 0x26, 0x21, 0x74, 0x3f, + 0x72, 0x2e, 0x70, 0x75, 0x73, 0x68, 0x28, 0x35, 0x2c, 0x30, 0x2c, 0x21, + 0x30, 0x2c, 0x69, 0x29, 0x3a, 0x5f, 0x3e, 0x3d, 0x35, 0x26, 0x26, 0x28, + 0x28, 0x69, 0x7c, 0x7c, 0x21, 0x74, 0x26, 0x26, 0x35, 0x3d, 0x3d, 0x3d, + 0x5f, 0x29, 0x26, 0x26, 0x28, 0x72, 0x2e, 0x70, 0x75, 0x73, 0x68, 0x28, + 0x5f, 0x2c, 0x30, 0x2c, 0x69, 0x2c, 0x65, 0x29, 0x2c, 0x5f, 0x3d, 0x36, + 0x29, 0x2c, 0x74, 0x26, 0x26, 0x28, 0x72, 0x2e, 0x70, 0x75, 0x73, 0x68, + 0x28, 0x5f, 0x2c, 0x74, 0x2c, 0x30, 0x2c, 0x65, 0x29, 0x2c, 0x5f, 0x3d, + 0x36, 0x29, 0x29, 0x2c, 0x69, 0x3d, 0x22, 0x22, 0x7d, 0x2c, 0x66, 0x3d, + 0x30, 0x3b, 0x66, 0x3c, 0x74, 0x2e, 0x6c, 0x65, 0x6e, 0x67, 0x74, 0x68, + 0x3b, 0x66, 0x2b, 0x2b, 0x29, 0x7b, 0x66, 0x26, 0x26, 0x28, 0x31, 0x3d, + 0x3d, 0x3d, 0x5f, 0x26, 0x26, 0x75, 0x28, 0x29, 0x2c, 0x75, 0x28, 0x66, + 0x29, 0x29, 0x3b, 0x66, 0x6f, 0x72, 0x28, 0x76, 0x61, 0x72, 0x20, 0x73, + 0x3d, 0x30, 0x3b, 0x73, 0x3c, 0x74, 0x5b, 0x66, 0x5d, 0x2e, 0x6c, 0x65, + 0x6e, 0x67, 0x74, 0x68, 0x3b, 0x73, 0x2b, 0x2b, 0x29, 0x6e, 0x3d, 0x74, + 0x5b, 0x66, 0x5d, 0x5b, 0x73, 0x5d, 0x2c, 0x31, 0x3d, 0x3d, 0x3d, 0x5f, + 0x3f, 0x22, 0x3c, 0x22, 0x3d, 0x3d, 0x3d, 0x6e, 0x3f, 0x28, 0x75, 0x28, + 0x29, 0x2c, 0x72, 0x3d, 0x5b, 0x72, 0x5d, 0x2c, 0x5f, 0x3d, 0x33, 0x29, + 0x3a, 0x69, 0x2b, 0x3d, 0x6e, 0x3a, 0x34, 0x3d, 0x3d, 0x3d, 0x5f, 0x3f, + 0x22, 0x2d, 0x2d, 0x22, 0x3d, 0x3d, 0x3d, 0x69, 0x26, 0x26, 0x22, 0x3e, + 0x22, 0x3d, 0x3d, 0x3d, 0x6e, 0x3f, 0x28, 0x5f, 0x3d, 0x31, 0x2c, 0x69, + 0x3d, 0x22, 0x22, 0x29, 0x3a, 0x69, 0x3d, 0x6e, 0x2b, 0x69, 0x5b, 0x30, + 0x5d, 0x3a, 0x6f, 0x3f, 0x6e, 0x3d, 0x3d, 0x3d, 0x6f, 0x3f, 0x6f, 0x3d, + 0x22, 0x22, 0x3a, 0x69, 0x2b, 0x3d, 0x6e, 0x3a, 0x27, 0x22, 0x27, 0x3d, + 0x3d, 0x3d, 0x6e, 0x7c, 0x7c, 0x22, 0x27, 0x22, 0x3d, 0x3d, 0x3d, 0x6e, + 0x3f, 0x6f, 0x3d, 0x6e, 0x3a, 0x22, 0x3e, 0x22, 0x3d, 0x3d, 0x3d, 0x6e, + 0x3f, 0x28, 0x75, 0x28, 0x29, 0x2c, 0x5f, 0x3d, 0x31, 0x29, 0x3a, 0x5f, + 0x26, 0x26, 0x28, 0x22, 0x3d, 0x22, 0x3d, 0x3d, 0x3d, 0x6e, 0x3f, 0x28, + 0x5f, 0x3d, 0x35, 0x2c, 0x65, 0x3d, 0x69, 0x2c, 0x69, 0x3d, 0x22, 0x22, + 0x29, 0x3a, 0x22, 0x2f, 0x22, 0x3d, 0x3d, 0x3d, 0x6e, 0x26, 0x26, 0x28, + 0x5f, 0x3c, 0x35, 0x7c, 0x7c, 0x22, 0x3e, 0x22, 0x3d, 0x3d, 0x3d, 0x74, + 0x5b, 0x66, 0x5d, 0x5b, 0x73, 0x2b, 0x31, 0x5d, 0x29, 0x3f, 0x28, 0x75, + 0x28, 0x29, 0x2c, 0x33, 0x3d, 0x3d, 0x3d, 0x5f, 0x26, 0x26, 0x28, 0x72, + 0x3d, 0x72, 0x5b, 0x30, 0x5d, 0x29, 0x2c, 0x5f, 0x3d, 0x72, 0x2c, 0x28, + 0x72, 0x3d, 0x72, 0x5b, 0x30, 0x5d, 0x29, 0x2e, 0x70, 0x75, 0x73, 0x68, + 0x28, 0x32, 0x2c, 0x30, 0x2c, 0x5f, 0x29, 0x2c, 0x5f, 0x3d, 0x30, 0x29, + 0x3a, 0x22, 0x20, 0x22, 0x3d, 0x3d, 0x3d, 0x6e, 0x7c, 0x7c, 0x22, 0x5c, + 0x74, 0x22, 0x3d, 0x3d, 0x3d, 0x6e, 0x7c, 0x7c, 0x22, 0x5c, 0x6e, 0x22, + 0x3d, 0x3d, 0x3d, 0x6e, 0x7c, 0x7c, 0x22, 0x5c, 0x72, 0x22, 0x3d, 0x3d, + 0x3d, 0x6e, 0x3f, 0x28, 0x75, 0x28, 0x29, 0x2c, 0x5f, 0x3d, 0x32, 0x29, + 0x3a, 0x69, 0x2b, 0x3d, 0x6e, 0x29, 0x2c, 0x33, 0x3d, 0x3d, 0x3d, 0x5f, + 0x26, 0x26, 0x22, 0x21, 0x2d, 0x2d, 0x22, 0x3d, 0x3d, 0x3d, 0x69, 0x26, + 0x26, 0x28, 0x5f, 0x3d, 0x34, 0x2c, 0x72, 0x3d, 0x72, 0x5b, 0x30, 0x5d, + 0x29, 0x7d, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x75, 0x28, 0x29, + 0x2c, 0x72, 0x7d, 0x28, 0x74, 0x29, 0x29, 0x2c, 0x6e, 0x29, 0x2c, 0x61, + 0x72, 0x67, 0x75, 0x6d, 0x65, 0x6e, 0x74, 0x73, 0x2c, 0x5b, 0x5d, 0x29, + 0x29, 0x2e, 0x6c, 0x65, 0x6e, 0x67, 0x74, 0x68, 0x3e, 0x31, 0x3f, 0x6e, + 0x3a, 0x6e, 0x5b, 0x30, 0x5d, 0x7d, 0x76, 0x61, 0x72, 0x20, 0x5f, 0x6e, + 0x3d, 0x65, 0x6e, 0x2e, 0x62, 0x69, 0x6e, 0x64, 0x28, 0x4c, 0x29, 0x3b, + 0x65, 0x78, 0x70, 0x6f, 0x72, 0x74, 0x7b, 0x49, 0x20, 0x61, 0x73, 0x20, + 0x43, 0x6f, 0x6d, 0x70, 0x6f, 0x6e, 0x65, 0x6e, 0x74, 0x2c, 0x6a, 0x20, + 0x61, 0x73, 0x20, 0x46, 0x72, 0x61, 0x67, 0x6d, 0x65, 0x6e, 0x74, 0x2c, + 0x68, 0x20, 0x61, 0x73, 0x20, 0x53, 0x69, 0x67, 0x6e, 0x61, 0x6c, 0x2c, + 0x5f, 0x20, 0x61, 0x73, 0x20, 0x62, 0x61, 0x74, 0x63, 0x68, 0x2c, 0x63, + 0x74, 0x20, 0x61, 0x73, 0x20, 0x63, 0x6c, 0x6f, 0x6e, 0x65, 0x45, 0x6c, + 0x65, 0x6d, 0x65, 0x6e, 0x74, 0x2c, 0x6d, 0x20, 0x61, 0x73, 0x20, 0x63, + 0x6f, 0x6d, 0x70, 0x75, 0x74, 0x65, 0x64, 0x2c, 0x68, 0x74, 0x20, 0x61, + 0x73, 0x20, 0x63, 0x72, 0x65, 0x61, 0x74, 0x65, 0x43, 0x6f, 0x6e, 0x74, + 0x65, 0x78, 0x74, 0x2c, 0x4c, 0x20, 0x61, 0x73, 0x20, 0x63, 0x72, 0x65, + 0x61, 0x74, 0x65, 0x45, 0x6c, 0x65, 0x6d, 0x65, 0x6e, 0x74, 0x2c, 0x52, + 0x20, 0x61, 0x73, 0x20, 0x63, 0x72, 0x65, 0x61, 0x74, 0x65, 0x52, 0x65, + 0x66, 0x2c, 0x77, 0x20, 0x61, 0x73, 0x20, 0x65, 0x66, 0x66, 0x65, 0x63, + 0x74, 0x2c, 0x4c, 0x20, 0x61, 0x73, 0x20, 0x68, 0x2c, 0x5f, 0x6e, 0x20, + 0x61, 0x73, 0x20, 0x68, 0x74, 0x6d, 0x6c, 0x2c, 0x6c, 0x74, 0x20, 0x61, + 0x73, 0x20, 0x68, 0x79, 0x64, 0x72, 0x61, 0x74, 0x65, 0x2c, 0x55, 0x20, + 0x61, 0x73, 0x20, 0x69, 0x73, 0x56, 0x61, 0x6c, 0x69, 0x64, 0x45, 0x6c, + 0x65, 0x6d, 0x65, 0x6e, 0x74, 0x2c, 0x43, 0x20, 0x61, 0x73, 0x20, 0x6f, + 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x2c, 0x73, 0x74, 0x20, 0x61, 0x73, + 0x20, 0x72, 0x65, 0x6e, 0x64, 0x65, 0x72, 0x2c, 0x61, 0x20, 0x61, 0x73, + 0x20, 0x73, 0x69, 0x67, 0x6e, 0x61, 0x6c, 0x2c, 0x58, 0x20, 0x61, 0x73, + 0x20, 0x74, 0x6f, 0x43, 0x68, 0x69, 0x6c, 0x64, 0x41, 0x72, 0x72, 0x61, + 0x79, 0x2c, 0x75, 0x20, 0x61, 0x73, 0x20, 0x75, 0x6e, 0x74, 0x72, 0x61, + 0x63, 0x6b, 0x65, 0x64, 0x2c, 0x54, 0x74, 0x20, 0x61, 0x73, 0x20, 0x75, + 0x73, 0x65, 0x43, 0x61, 0x6c, 0x6c, 0x62, 0x61, 0x63, 0x6b, 0x2c, 0x59, + 0x74, 0x20, 0x61, 0x73, 0x20, 0x75, 0x73, 0x65, 0x43, 0x6f, 0x6d, 0x70, + 0x75, 0x74, 0x65, 0x64, 0x2c, 0x56, 0x74, 0x20, 0x61, 0x73, 0x20, 0x75, + 0x73, 0x65, 0x43, 0x6f, 0x6e, 0x74, 0x65, 0x78, 0x74, 0x2c, 0x41, 0x74, + 0x20, 0x61, 0x73, 0x20, 0x75, 0x73, 0x65, 0x44, 0x65, 0x62, 0x75, 0x67, + 0x56, 0x61, 0x6c, 0x75, 0x65, 0x2c, 0x48, 0x74, 0x20, 0x61, 0x73, 0x20, + 0x75, 0x73, 0x65, 0x45, 0x66, 0x66, 0x65, 0x63, 0x74, 0x2c, 0x46, 0x74, + 0x20, 0x61, 0x73, 0x20, 0x75, 0x73, 0x65, 0x45, 0x72, 0x72, 0x6f, 0x72, + 0x42, 0x6f, 0x75, 0x6e, 0x64, 0x61, 0x72, 0x79, 0x2c, 0x4d, 0x74, 0x20, + 0x61, 0x73, 0x20, 0x75, 0x73, 0x65, 0x49, 0x64, 0x2c, 0x24, 0x74, 0x20, + 0x61, 0x73, 0x20, 0x75, 0x73, 0x65, 0x49, 0x6d, 0x70, 0x65, 0x72, 0x61, + 0x74, 0x69, 0x76, 0x65, 0x48, 0x61, 0x6e, 0x64, 0x6c, 0x65, 0x2c, 0x50, + 0x74, 0x20, 0x61, 0x73, 0x20, 0x75, 0x73, 0x65, 0x4c, 0x61, 0x79, 0x6f, + 0x75, 0x74, 0x45, 0x66, 0x66, 0x65, 0x63, 0x74, 0x2c, 0x44, 0x74, 0x20, + 0x61, 0x73, 0x20, 0x75, 0x73, 0x65, 0x4d, 0x65, 0x6d, 0x6f, 0x2c, 0x55, + 0x74, 0x20, 0x61, 0x73, 0x20, 0x75, 0x73, 0x65, 0x52, 0x65, 0x64, 0x75, + 0x63, 0x65, 0x72, 0x2c, 0x4e, 0x74, 0x20, 0x61, 0x73, 0x20, 0x75, 0x73, + 0x65, 0x52, 0x65, 0x66, 0x2c, 0x58, 0x74, 0x20, 0x61, 0x73, 0x20, 0x75, + 0x73, 0x65, 0x53, 0x69, 0x67, 0x6e, 0x61, 0x6c, 0x2c, 0x5a, 0x74, 0x20, + 0x61, 0x73, 0x20, 0x75, 0x73, 0x65, 0x53, 0x69, 0x67, 0x6e, 0x61, 0x6c, + 0x45, 0x66, 0x66, 0x65, 0x63, 0x74, 0x2c, 0x45, 0x74, 0x20, 0x61, 0x73, + 0x20, 0x75, 0x73, 0x65, 0x53, 0x74, 0x61, 0x74, 0x65, 0x7d, 0x3b, 0x0a +}; +unsigned int index_js_len = 22800; diff --git a/examples/server/json-schema-to-grammar.mjs.hpp b/examples/server/json-schema-to-grammar.mjs.hpp index 83b22d670..0a05c369d 100644 --- a/examples/server/json-schema-to-grammar.mjs.hpp +++ b/examples/server/json-schema-to-grammar.mjs.hpp @@ -1,115 +1,311 @@ -const char json_schema_to_grammar_mjs[] = R"LITERAL( -const SPACE_RULE = '" "?'; - -const PRIMITIVE_RULES = { - boolean: '("true" | "false") space', - number: '("-"? ([0-9] | [1-9] [0-9]*)) ("." [0-9]+)? ([eE] [-+]? [0-9]+)? space', - integer: '("-"? ([0-9] | [1-9] [0-9]*)) space', - string: ` "\\"" ( - [^"\\\\] | - "\\\\" (["\\\\/bfnrt] | "u" [0-9a-fA-F] [0-9a-fA-F] [0-9a-fA-F] [0-9a-fA-F]) - )* "\\"" space`, - null: '"null" space', +unsigned char json_schema_to_grammar_mjs[] = { + 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x53, 0x50, 0x41, 0x43, 0x45, 0x5f, + 0x52, 0x55, 0x4c, 0x45, 0x20, 0x3d, 0x20, 0x27, 0x22, 0x20, 0x22, 0x3f, + 0x27, 0x3b, 0x0a, 0x0a, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x50, 0x52, + 0x49, 0x4d, 0x49, 0x54, 0x49, 0x56, 0x45, 0x5f, 0x52, 0x55, 0x4c, 0x45, + 0x53, 0x20, 0x3d, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x62, 0x6f, 0x6f, 0x6c, + 0x65, 0x61, 0x6e, 0x3a, 0x20, 0x27, 0x28, 0x22, 0x74, 0x72, 0x75, 0x65, + 0x22, 0x20, 0x7c, 0x20, 0x22, 0x66, 0x61, 0x6c, 0x73, 0x65, 0x22, 0x29, + 0x20, 0x73, 0x70, 0x61, 0x63, 0x65, 0x27, 0x2c, 0x0a, 0x20, 0x20, 0x6e, + 0x75, 0x6d, 0x62, 0x65, 0x72, 0x3a, 0x20, 0x27, 0x28, 0x22, 0x2d, 0x22, + 0x3f, 0x20, 0x28, 0x5b, 0x30, 0x2d, 0x39, 0x5d, 0x20, 0x7c, 0x20, 0x5b, + 0x31, 0x2d, 0x39, 0x5d, 0x20, 0x5b, 0x30, 0x2d, 0x39, 0x5d, 0x2a, 0x29, + 0x29, 0x20, 0x28, 0x22, 0x2e, 0x22, 0x20, 0x5b, 0x30, 0x2d, 0x39, 0x5d, + 0x2b, 0x29, 0x3f, 0x20, 0x28, 0x5b, 0x65, 0x45, 0x5d, 0x20, 0x5b, 0x2d, + 0x2b, 0x5d, 0x3f, 0x20, 0x5b, 0x30, 0x2d, 0x39, 0x5d, 0x2b, 0x29, 0x3f, + 0x20, 0x73, 0x70, 0x61, 0x63, 0x65, 0x27, 0x2c, 0x0a, 0x20, 0x20, 0x69, + 0x6e, 0x74, 0x65, 0x67, 0x65, 0x72, 0x3a, 0x20, 0x27, 0x28, 0x22, 0x2d, + 0x22, 0x3f, 0x20, 0x28, 0x5b, 0x30, 0x2d, 0x39, 0x5d, 0x20, 0x7c, 0x20, + 0x5b, 0x31, 0x2d, 0x39, 0x5d, 0x20, 0x5b, 0x30, 0x2d, 0x39, 0x5d, 0x2a, + 0x29, 0x29, 0x20, 0x73, 0x70, 0x61, 0x63, 0x65, 0x27, 0x2c, 0x0a, 0x20, + 0x20, 0x73, 0x74, 0x72, 0x69, 0x6e, 0x67, 0x3a, 0x20, 0x60, 0x20, 0x22, + 0x5c, 0x5c, 0x22, 0x22, 0x20, 0x28, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x5b, 0x5e, 0x22, 0x5c, 0x5c, 0x5c, 0x5c, 0x5d, 0x20, + 0x7c, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x22, 0x5c, + 0x5c, 0x5c, 0x5c, 0x22, 0x20, 0x28, 0x5b, 0x22, 0x5c, 0x5c, 0x5c, 0x5c, + 0x2f, 0x62, 0x66, 0x6e, 0x72, 0x74, 0x5d, 0x20, 0x7c, 0x20, 0x22, 0x75, + 0x22, 0x20, 0x5b, 0x30, 0x2d, 0x39, 0x61, 0x2d, 0x66, 0x41, 0x2d, 0x46, + 0x5d, 0x20, 0x5b, 0x30, 0x2d, 0x39, 0x61, 0x2d, 0x66, 0x41, 0x2d, 0x46, + 0x5d, 0x20, 0x5b, 0x30, 0x2d, 0x39, 0x61, 0x2d, 0x66, 0x41, 0x2d, 0x46, + 0x5d, 0x20, 0x5b, 0x30, 0x2d, 0x39, 0x61, 0x2d, 0x66, 0x41, 0x2d, 0x46, + 0x5d, 0x29, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x29, 0x2a, 0x20, + 0x22, 0x5c, 0x5c, 0x22, 0x22, 0x20, 0x73, 0x70, 0x61, 0x63, 0x65, 0x60, + 0x2c, 0x0a, 0x20, 0x20, 0x6e, 0x75, 0x6c, 0x6c, 0x3a, 0x20, 0x27, 0x22, + 0x6e, 0x75, 0x6c, 0x6c, 0x22, 0x20, 0x73, 0x70, 0x61, 0x63, 0x65, 0x27, + 0x2c, 0x0a, 0x7d, 0x3b, 0x0a, 0x0a, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, + 0x49, 0x4e, 0x56, 0x41, 0x4c, 0x49, 0x44, 0x5f, 0x52, 0x55, 0x4c, 0x45, + 0x5f, 0x43, 0x48, 0x41, 0x52, 0x53, 0x5f, 0x52, 0x45, 0x20, 0x3d, 0x20, + 0x2f, 0x5b, 0x5e, 0x5c, 0x64, 0x41, 0x2d, 0x5a, 0x61, 0x2d, 0x7a, 0x2d, + 0x5d, 0x2b, 0x2f, 0x67, 0x3b, 0x0a, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, + 0x47, 0x52, 0x41, 0x4d, 0x4d, 0x41, 0x52, 0x5f, 0x4c, 0x49, 0x54, 0x45, + 0x52, 0x41, 0x4c, 0x5f, 0x45, 0x53, 0x43, 0x41, 0x50, 0x45, 0x5f, 0x52, + 0x45, 0x20, 0x3d, 0x20, 0x2f, 0x5b, 0x5c, 0x6e, 0x5c, 0x72, 0x22, 0x5d, + 0x2f, 0x67, 0x3b, 0x0a, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x47, 0x52, + 0x41, 0x4d, 0x4d, 0x41, 0x52, 0x5f, 0x4c, 0x49, 0x54, 0x45, 0x52, 0x41, + 0x4c, 0x5f, 0x45, 0x53, 0x43, 0x41, 0x50, 0x45, 0x53, 0x20, 0x3d, 0x20, + 0x7b, 0x27, 0x5c, 0x72, 0x27, 0x3a, 0x20, 0x27, 0x5c, 0x5c, 0x72, 0x27, + 0x2c, 0x20, 0x27, 0x5c, 0x6e, 0x27, 0x3a, 0x20, 0x27, 0x5c, 0x5c, 0x6e, + 0x27, 0x2c, 0x20, 0x27, 0x22, 0x27, 0x3a, 0x20, 0x27, 0x5c, 0x5c, 0x22, + 0x27, 0x7d, 0x3b, 0x0a, 0x0a, 0x65, 0x78, 0x70, 0x6f, 0x72, 0x74, 0x20, + 0x63, 0x6c, 0x61, 0x73, 0x73, 0x20, 0x53, 0x63, 0x68, 0x65, 0x6d, 0x61, + 0x43, 0x6f, 0x6e, 0x76, 0x65, 0x72, 0x74, 0x65, 0x72, 0x20, 0x7b, 0x0a, + 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x72, 0x75, 0x63, 0x74, 0x6f, + 0x72, 0x28, 0x70, 0x72, 0x6f, 0x70, 0x4f, 0x72, 0x64, 0x65, 0x72, 0x29, + 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x74, 0x68, 0x69, 0x73, 0x2e, + 0x5f, 0x70, 0x72, 0x6f, 0x70, 0x4f, 0x72, 0x64, 0x65, 0x72, 0x20, 0x3d, + 0x20, 0x70, 0x72, 0x6f, 0x70, 0x4f, 0x72, 0x64, 0x65, 0x72, 0x20, 0x7c, + 0x7c, 0x20, 0x7b, 0x7d, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x74, 0x68, + 0x69, 0x73, 0x2e, 0x5f, 0x72, 0x75, 0x6c, 0x65, 0x73, 0x20, 0x3d, 0x20, + 0x6e, 0x65, 0x77, 0x20, 0x4d, 0x61, 0x70, 0x28, 0x29, 0x3b, 0x0a, 0x20, + 0x20, 0x20, 0x20, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x5f, 0x72, 0x75, 0x6c, + 0x65, 0x73, 0x2e, 0x73, 0x65, 0x74, 0x28, 0x27, 0x73, 0x70, 0x61, 0x63, + 0x65, 0x27, 0x2c, 0x20, 0x53, 0x50, 0x41, 0x43, 0x45, 0x5f, 0x52, 0x55, + 0x4c, 0x45, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x7d, 0x0a, 0x0a, 0x20, 0x20, + 0x5f, 0x66, 0x6f, 0x72, 0x6d, 0x61, 0x74, 0x4c, 0x69, 0x74, 0x65, 0x72, + 0x61, 0x6c, 0x28, 0x6c, 0x69, 0x74, 0x65, 0x72, 0x61, 0x6c, 0x29, 0x20, + 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, + 0x65, 0x73, 0x63, 0x61, 0x70, 0x65, 0x64, 0x20, 0x3d, 0x20, 0x4a, 0x53, + 0x4f, 0x4e, 0x2e, 0x73, 0x74, 0x72, 0x69, 0x6e, 0x67, 0x69, 0x66, 0x79, + 0x28, 0x6c, 0x69, 0x74, 0x65, 0x72, 0x61, 0x6c, 0x29, 0x2e, 0x72, 0x65, + 0x70, 0x6c, 0x61, 0x63, 0x65, 0x28, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x47, 0x52, 0x41, 0x4d, 0x4d, 0x41, 0x52, 0x5f, 0x4c, 0x49, 0x54, + 0x45, 0x52, 0x41, 0x4c, 0x5f, 0x45, 0x53, 0x43, 0x41, 0x50, 0x45, 0x5f, + 0x52, 0x45, 0x2c, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x6d, 0x20, + 0x3d, 0x3e, 0x20, 0x47, 0x52, 0x41, 0x4d, 0x4d, 0x41, 0x52, 0x5f, 0x4c, + 0x49, 0x54, 0x45, 0x52, 0x41, 0x4c, 0x5f, 0x45, 0x53, 0x43, 0x41, 0x50, + 0x45, 0x53, 0x5b, 0x6d, 0x5d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x29, 0x3b, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, + 0x60, 0x22, 0x24, 0x7b, 0x65, 0x73, 0x63, 0x61, 0x70, 0x65, 0x64, 0x7d, + 0x22, 0x60, 0x3b, 0x0a, 0x20, 0x20, 0x7d, 0x0a, 0x0a, 0x20, 0x20, 0x5f, + 0x61, 0x64, 0x64, 0x52, 0x75, 0x6c, 0x65, 0x28, 0x6e, 0x61, 0x6d, 0x65, + 0x2c, 0x20, 0x72, 0x75, 0x6c, 0x65, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x6c, 0x65, 0x74, 0x20, 0x65, 0x73, 0x63, 0x4e, 0x61, 0x6d, + 0x65, 0x20, 0x3d, 0x20, 0x6e, 0x61, 0x6d, 0x65, 0x2e, 0x72, 0x65, 0x70, + 0x6c, 0x61, 0x63, 0x65, 0x28, 0x49, 0x4e, 0x56, 0x41, 0x4c, 0x49, 0x44, + 0x5f, 0x52, 0x55, 0x4c, 0x45, 0x5f, 0x43, 0x48, 0x41, 0x52, 0x53, 0x5f, + 0x52, 0x45, 0x2c, 0x20, 0x27, 0x2d, 0x27, 0x29, 0x3b, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x6c, 0x65, 0x74, 0x20, 0x6b, 0x65, 0x79, 0x20, 0x3d, 0x20, + 0x65, 0x73, 0x63, 0x4e, 0x61, 0x6d, 0x65, 0x3b, 0x0a, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x69, 0x66, 0x20, 0x28, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x5f, + 0x72, 0x75, 0x6c, 0x65, 0x73, 0x2e, 0x68, 0x61, 0x73, 0x28, 0x65, 0x73, + 0x63, 0x4e, 0x61, 0x6d, 0x65, 0x29, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x69, 0x66, 0x20, 0x28, 0x74, 0x68, 0x69, 0x73, + 0x2e, 0x5f, 0x72, 0x75, 0x6c, 0x65, 0x73, 0x2e, 0x67, 0x65, 0x74, 0x28, + 0x65, 0x73, 0x63, 0x4e, 0x61, 0x6d, 0x65, 0x29, 0x20, 0x3d, 0x3d, 0x3d, + 0x20, 0x72, 0x75, 0x6c, 0x65, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, + 0x6b, 0x65, 0x79, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, + 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x6c, 0x65, 0x74, 0x20, + 0x69, 0x20, 0x3d, 0x20, 0x30, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x77, 0x68, 0x69, 0x6c, 0x65, 0x20, 0x28, 0x74, 0x68, 0x69, 0x73, + 0x2e, 0x5f, 0x72, 0x75, 0x6c, 0x65, 0x73, 0x2e, 0x68, 0x61, 0x73, 0x28, + 0x60, 0x24, 0x7b, 0x65, 0x73, 0x63, 0x4e, 0x61, 0x6d, 0x65, 0x7d, 0x24, + 0x7b, 0x69, 0x7d, 0x60, 0x29, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x69, 0x20, 0x2b, 0x3d, 0x20, 0x31, 0x3b, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x6b, 0x65, 0x79, 0x20, 0x3d, 0x20, 0x60, 0x24, 0x7b, + 0x65, 0x73, 0x63, 0x4e, 0x61, 0x6d, 0x65, 0x7d, 0x24, 0x7b, 0x69, 0x7d, + 0x60, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x5f, 0x72, 0x75, 0x6c, 0x65, + 0x73, 0x2e, 0x73, 0x65, 0x74, 0x28, 0x6b, 0x65, 0x79, 0x2c, 0x20, 0x72, + 0x75, 0x6c, 0x65, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x72, 0x65, + 0x74, 0x75, 0x72, 0x6e, 0x20, 0x6b, 0x65, 0x79, 0x3b, 0x0a, 0x20, 0x20, + 0x7d, 0x0a, 0x0a, 0x20, 0x20, 0x76, 0x69, 0x73, 0x69, 0x74, 0x28, 0x73, + 0x63, 0x68, 0x65, 0x6d, 0x61, 0x2c, 0x20, 0x6e, 0x61, 0x6d, 0x65, 0x29, + 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, + 0x20, 0x73, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x54, 0x79, 0x70, 0x65, 0x20, + 0x3d, 0x20, 0x73, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x2e, 0x74, 0x79, 0x70, + 0x65, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, + 0x20, 0x72, 0x75, 0x6c, 0x65, 0x4e, 0x61, 0x6d, 0x65, 0x20, 0x3d, 0x20, + 0x6e, 0x61, 0x6d, 0x65, 0x20, 0x7c, 0x7c, 0x20, 0x27, 0x72, 0x6f, 0x6f, + 0x74, 0x27, 0x3b, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x69, 0x66, 0x20, + 0x28, 0x73, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x2e, 0x6f, 0x6e, 0x65, 0x4f, + 0x66, 0x20, 0x7c, 0x7c, 0x20, 0x73, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x2e, + 0x61, 0x6e, 0x79, 0x4f, 0x66, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x72, 0x75, 0x6c, + 0x65, 0x20, 0x3d, 0x20, 0x28, 0x73, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x2e, + 0x6f, 0x6e, 0x65, 0x4f, 0x66, 0x20, 0x7c, 0x7c, 0x20, 0x73, 0x63, 0x68, + 0x65, 0x6d, 0x61, 0x2e, 0x61, 0x6e, 0x79, 0x4f, 0x66, 0x29, 0x2e, 0x6d, + 0x61, 0x70, 0x28, 0x28, 0x61, 0x6c, 0x74, 0x53, 0x63, 0x68, 0x65, 0x6d, + 0x61, 0x2c, 0x20, 0x69, 0x29, 0x20, 0x3d, 0x3e, 0x0a, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x76, 0x69, + 0x73, 0x69, 0x74, 0x28, 0x61, 0x6c, 0x74, 0x53, 0x63, 0x68, 0x65, 0x6d, + 0x61, 0x2c, 0x20, 0x60, 0x24, 0x7b, 0x6e, 0x61, 0x6d, 0x65, 0x7d, 0x24, + 0x7b, 0x6e, 0x61, 0x6d, 0x65, 0x20, 0x3f, 0x20, 0x22, 0x2d, 0x22, 0x20, + 0x3a, 0x20, 0x22, 0x22, 0x7d, 0x24, 0x7b, 0x69, 0x7d, 0x60, 0x29, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x29, 0x2e, 0x6a, 0x6f, 0x69, 0x6e, + 0x28, 0x27, 0x20, 0x7c, 0x20, 0x27, 0x29, 0x3b, 0x0a, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x74, + 0x68, 0x69, 0x73, 0x2e, 0x5f, 0x61, 0x64, 0x64, 0x52, 0x75, 0x6c, 0x65, + 0x28, 0x72, 0x75, 0x6c, 0x65, 0x4e, 0x61, 0x6d, 0x65, 0x2c, 0x20, 0x72, + 0x75, 0x6c, 0x65, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x20, + 0x65, 0x6c, 0x73, 0x65, 0x20, 0x69, 0x66, 0x20, 0x28, 0x27, 0x63, 0x6f, + 0x6e, 0x73, 0x74, 0x27, 0x20, 0x69, 0x6e, 0x20, 0x73, 0x63, 0x68, 0x65, + 0x6d, 0x61, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x74, 0x68, 0x69, 0x73, 0x2e, + 0x5f, 0x61, 0x64, 0x64, 0x52, 0x75, 0x6c, 0x65, 0x28, 0x72, 0x75, 0x6c, + 0x65, 0x4e, 0x61, 0x6d, 0x65, 0x2c, 0x20, 0x74, 0x68, 0x69, 0x73, 0x2e, + 0x5f, 0x66, 0x6f, 0x72, 0x6d, 0x61, 0x74, 0x4c, 0x69, 0x74, 0x65, 0x72, + 0x61, 0x6c, 0x28, 0x73, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x2e, 0x63, 0x6f, + 0x6e, 0x73, 0x74, 0x29, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x7d, + 0x20, 0x65, 0x6c, 0x73, 0x65, 0x20, 0x69, 0x66, 0x20, 0x28, 0x27, 0x65, + 0x6e, 0x75, 0x6d, 0x27, 0x20, 0x69, 0x6e, 0x20, 0x73, 0x63, 0x68, 0x65, + 0x6d, 0x61, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x72, 0x75, 0x6c, 0x65, 0x20, 0x3d, + 0x20, 0x73, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x2e, 0x65, 0x6e, 0x75, 0x6d, + 0x2e, 0x6d, 0x61, 0x70, 0x28, 0x76, 0x20, 0x3d, 0x3e, 0x20, 0x74, 0x68, + 0x69, 0x73, 0x2e, 0x5f, 0x66, 0x6f, 0x72, 0x6d, 0x61, 0x74, 0x4c, 0x69, + 0x74, 0x65, 0x72, 0x61, 0x6c, 0x28, 0x76, 0x29, 0x29, 0x2e, 0x6a, 0x6f, + 0x69, 0x6e, 0x28, 0x27, 0x20, 0x7c, 0x20, 0x27, 0x29, 0x3b, 0x0a, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, + 0x74, 0x68, 0x69, 0x73, 0x2e, 0x5f, 0x61, 0x64, 0x64, 0x52, 0x75, 0x6c, + 0x65, 0x28, 0x72, 0x75, 0x6c, 0x65, 0x4e, 0x61, 0x6d, 0x65, 0x2c, 0x20, + 0x72, 0x75, 0x6c, 0x65, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x7d, + 0x20, 0x65, 0x6c, 0x73, 0x65, 0x20, 0x69, 0x66, 0x20, 0x28, 0x73, 0x63, + 0x68, 0x65, 0x6d, 0x61, 0x54, 0x79, 0x70, 0x65, 0x20, 0x3d, 0x3d, 0x3d, + 0x20, 0x27, 0x6f, 0x62, 0x6a, 0x65, 0x63, 0x74, 0x27, 0x20, 0x26, 0x26, + 0x20, 0x27, 0x70, 0x72, 0x6f, 0x70, 0x65, 0x72, 0x74, 0x69, 0x65, 0x73, + 0x27, 0x20, 0x69, 0x6e, 0x20, 0x73, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x29, + 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x2f, 0x2f, 0x20, + 0x54, 0x4f, 0x44, 0x4f, 0x3a, 0x20, 0x60, 0x72, 0x65, 0x71, 0x75, 0x69, + 0x72, 0x65, 0x64, 0x60, 0x20, 0x6b, 0x65, 0x79, 0x77, 0x6f, 0x72, 0x64, + 0x20, 0x28, 0x66, 0x72, 0x6f, 0x6d, 0x20, 0x70, 0x79, 0x74, 0x68, 0x6f, + 0x6e, 0x20, 0x69, 0x6d, 0x70, 0x6c, 0x65, 0x6d, 0x65, 0x6e, 0x74, 0x61, + 0x74, 0x69, 0x6f, 0x6e, 0x29, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x70, 0x72, 0x6f, 0x70, 0x4f, 0x72, + 0x64, 0x65, 0x72, 0x20, 0x3d, 0x20, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x5f, + 0x70, 0x72, 0x6f, 0x70, 0x4f, 0x72, 0x64, 0x65, 0x72, 0x3b, 0x0a, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x70, + 0x72, 0x6f, 0x70, 0x50, 0x61, 0x69, 0x72, 0x73, 0x20, 0x3d, 0x20, 0x4f, + 0x62, 0x6a, 0x65, 0x63, 0x74, 0x2e, 0x65, 0x6e, 0x74, 0x72, 0x69, 0x65, + 0x73, 0x28, 0x73, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x2e, 0x70, 0x72, 0x6f, + 0x70, 0x65, 0x72, 0x74, 0x69, 0x65, 0x73, 0x29, 0x2e, 0x73, 0x6f, 0x72, + 0x74, 0x28, 0x28, 0x61, 0x2c, 0x20, 0x62, 0x29, 0x20, 0x3d, 0x3e, 0x20, + 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x2f, 0x2f, + 0x20, 0x73, 0x6f, 0x72, 0x74, 0x20, 0x62, 0x79, 0x20, 0x70, 0x6f, 0x73, + 0x69, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x69, 0x6e, 0x20, 0x70, 0x72, 0x6f, + 0x70, 0x5f, 0x6f, 0x72, 0x64, 0x65, 0x72, 0x20, 0x28, 0x69, 0x66, 0x20, + 0x73, 0x70, 0x65, 0x63, 0x69, 0x66, 0x69, 0x65, 0x64, 0x29, 0x20, 0x74, + 0x68, 0x65, 0x6e, 0x20, 0x62, 0x79, 0x20, 0x6b, 0x65, 0x79, 0x0a, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, + 0x20, 0x6f, 0x72, 0x64, 0x65, 0x72, 0x41, 0x20, 0x3d, 0x20, 0x74, 0x79, + 0x70, 0x65, 0x6f, 0x66, 0x20, 0x70, 0x72, 0x6f, 0x70, 0x4f, 0x72, 0x64, + 0x65, 0x72, 0x5b, 0x61, 0x5b, 0x30, 0x5d, 0x5d, 0x20, 0x3d, 0x3d, 0x3d, + 0x20, 0x27, 0x6e, 0x75, 0x6d, 0x62, 0x65, 0x72, 0x27, 0x20, 0x3f, 0x20, + 0x70, 0x72, 0x6f, 0x70, 0x4f, 0x72, 0x64, 0x65, 0x72, 0x5b, 0x61, 0x5b, + 0x30, 0x5d, 0x5d, 0x20, 0x3a, 0x20, 0x49, 0x6e, 0x66, 0x69, 0x6e, 0x69, + 0x74, 0x79, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x6f, 0x72, 0x64, 0x65, 0x72, 0x42, + 0x20, 0x3d, 0x20, 0x74, 0x79, 0x70, 0x65, 0x6f, 0x66, 0x20, 0x70, 0x72, + 0x6f, 0x70, 0x4f, 0x72, 0x64, 0x65, 0x72, 0x5b, 0x62, 0x5b, 0x30, 0x5d, + 0x5d, 0x20, 0x3d, 0x3d, 0x3d, 0x20, 0x27, 0x6e, 0x75, 0x6d, 0x62, 0x65, + 0x72, 0x27, 0x20, 0x3f, 0x20, 0x70, 0x72, 0x6f, 0x70, 0x4f, 0x72, 0x64, + 0x65, 0x72, 0x5b, 0x62, 0x5b, 0x30, 0x5d, 0x5d, 0x20, 0x3a, 0x20, 0x49, + 0x6e, 0x66, 0x69, 0x6e, 0x69, 0x74, 0x79, 0x3b, 0x0a, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, + 0x6f, 0x72, 0x64, 0x65, 0x72, 0x41, 0x20, 0x2d, 0x20, 0x6f, 0x72, 0x64, + 0x65, 0x72, 0x42, 0x20, 0x7c, 0x7c, 0x20, 0x61, 0x5b, 0x30, 0x5d, 0x2e, + 0x6c, 0x6f, 0x63, 0x61, 0x6c, 0x65, 0x43, 0x6f, 0x6d, 0x70, 0x61, 0x72, + 0x65, 0x28, 0x62, 0x5b, 0x30, 0x5d, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x7d, 0x29, 0x3b, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x6c, 0x65, 0x74, 0x20, 0x72, 0x75, 0x6c, 0x65, 0x20, 0x3d, + 0x20, 0x27, 0x22, 0x7b, 0x22, 0x20, 0x73, 0x70, 0x61, 0x63, 0x65, 0x27, + 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x70, 0x72, 0x6f, 0x70, + 0x50, 0x61, 0x69, 0x72, 0x73, 0x2e, 0x66, 0x6f, 0x72, 0x45, 0x61, 0x63, + 0x68, 0x28, 0x28, 0x5b, 0x70, 0x72, 0x6f, 0x70, 0x4e, 0x61, 0x6d, 0x65, + 0x2c, 0x20, 0x70, 0x72, 0x6f, 0x70, 0x53, 0x63, 0x68, 0x65, 0x6d, 0x61, + 0x5d, 0x2c, 0x20, 0x69, 0x29, 0x20, 0x3d, 0x3e, 0x20, 0x7b, 0x0a, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, + 0x20, 0x70, 0x72, 0x6f, 0x70, 0x52, 0x75, 0x6c, 0x65, 0x4e, 0x61, 0x6d, + 0x65, 0x20, 0x3d, 0x20, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x76, 0x69, 0x73, + 0x69, 0x74, 0x28, 0x70, 0x72, 0x6f, 0x70, 0x53, 0x63, 0x68, 0x65, 0x6d, + 0x61, 0x2c, 0x20, 0x60, 0x24, 0x7b, 0x6e, 0x61, 0x6d, 0x65, 0x7d, 0x24, + 0x7b, 0x6e, 0x61, 0x6d, 0x65, 0x20, 0x3f, 0x20, 0x22, 0x2d, 0x22, 0x20, + 0x3a, 0x20, 0x22, 0x22, 0x7d, 0x24, 0x7b, 0x70, 0x72, 0x6f, 0x70, 0x4e, + 0x61, 0x6d, 0x65, 0x7d, 0x60, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x69, 0x66, 0x20, 0x28, 0x69, 0x20, 0x3e, 0x20, + 0x30, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x72, 0x75, 0x6c, 0x65, 0x20, 0x2b, 0x3d, 0x20, 0x27, + 0x20, 0x22, 0x2c, 0x22, 0x20, 0x73, 0x70, 0x61, 0x63, 0x65, 0x27, 0x3b, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x72, 0x75, 0x6c, 0x65, 0x20, + 0x2b, 0x3d, 0x20, 0x60, 0x20, 0x24, 0x7b, 0x74, 0x68, 0x69, 0x73, 0x2e, + 0x5f, 0x66, 0x6f, 0x72, 0x6d, 0x61, 0x74, 0x4c, 0x69, 0x74, 0x65, 0x72, + 0x61, 0x6c, 0x28, 0x70, 0x72, 0x6f, 0x70, 0x4e, 0x61, 0x6d, 0x65, 0x29, + 0x7d, 0x20, 0x73, 0x70, 0x61, 0x63, 0x65, 0x20, 0x22, 0x3a, 0x22, 0x20, + 0x73, 0x70, 0x61, 0x63, 0x65, 0x20, 0x24, 0x7b, 0x70, 0x72, 0x6f, 0x70, + 0x52, 0x75, 0x6c, 0x65, 0x4e, 0x61, 0x6d, 0x65, 0x7d, 0x60, 0x3b, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x29, 0x3b, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x72, 0x75, 0x6c, 0x65, 0x20, 0x2b, 0x3d, 0x20, + 0x27, 0x20, 0x22, 0x7d, 0x22, 0x20, 0x73, 0x70, 0x61, 0x63, 0x65, 0x27, + 0x3b, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x72, 0x65, 0x74, + 0x75, 0x72, 0x6e, 0x20, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x5f, 0x61, 0x64, + 0x64, 0x52, 0x75, 0x6c, 0x65, 0x28, 0x72, 0x75, 0x6c, 0x65, 0x4e, 0x61, + 0x6d, 0x65, 0x2c, 0x20, 0x72, 0x75, 0x6c, 0x65, 0x29, 0x3b, 0x0a, 0x20, + 0x20, 0x20, 0x20, 0x7d, 0x20, 0x65, 0x6c, 0x73, 0x65, 0x20, 0x69, 0x66, + 0x20, 0x28, 0x73, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x54, 0x79, 0x70, 0x65, + 0x20, 0x3d, 0x3d, 0x3d, 0x20, 0x27, 0x61, 0x72, 0x72, 0x61, 0x79, 0x27, + 0x20, 0x26, 0x26, 0x20, 0x27, 0x69, 0x74, 0x65, 0x6d, 0x73, 0x27, 0x20, + 0x69, 0x6e, 0x20, 0x73, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x29, 0x20, 0x7b, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x2f, 0x2f, 0x20, 0x54, 0x4f, + 0x44, 0x4f, 0x20, 0x60, 0x70, 0x72, 0x65, 0x66, 0x69, 0x78, 0x49, 0x74, + 0x65, 0x6d, 0x73, 0x60, 0x20, 0x6b, 0x65, 0x79, 0x77, 0x6f, 0x72, 0x64, + 0x20, 0x28, 0x66, 0x72, 0x6f, 0x6d, 0x20, 0x70, 0x79, 0x74, 0x68, 0x6f, + 0x6e, 0x20, 0x69, 0x6d, 0x70, 0x6c, 0x65, 0x6d, 0x65, 0x6e, 0x74, 0x61, + 0x74, 0x69, 0x6f, 0x6e, 0x29, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x69, 0x74, 0x65, 0x6d, 0x52, 0x75, + 0x6c, 0x65, 0x4e, 0x61, 0x6d, 0x65, 0x20, 0x3d, 0x20, 0x74, 0x68, 0x69, + 0x73, 0x2e, 0x76, 0x69, 0x73, 0x69, 0x74, 0x28, 0x73, 0x63, 0x68, 0x65, + 0x6d, 0x61, 0x2e, 0x69, 0x74, 0x65, 0x6d, 0x73, 0x2c, 0x20, 0x60, 0x24, + 0x7b, 0x6e, 0x61, 0x6d, 0x65, 0x7d, 0x24, 0x7b, 0x6e, 0x61, 0x6d, 0x65, + 0x20, 0x3f, 0x20, 0x22, 0x2d, 0x22, 0x20, 0x3a, 0x20, 0x22, 0x22, 0x7d, + 0x69, 0x74, 0x65, 0x6d, 0x60, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x72, 0x75, 0x6c, 0x65, + 0x20, 0x3d, 0x20, 0x60, 0x22, 0x5b, 0x22, 0x20, 0x73, 0x70, 0x61, 0x63, + 0x65, 0x20, 0x28, 0x24, 0x7b, 0x69, 0x74, 0x65, 0x6d, 0x52, 0x75, 0x6c, + 0x65, 0x4e, 0x61, 0x6d, 0x65, 0x7d, 0x20, 0x28, 0x22, 0x2c, 0x22, 0x20, + 0x73, 0x70, 0x61, 0x63, 0x65, 0x20, 0x24, 0x7b, 0x69, 0x74, 0x65, 0x6d, + 0x52, 0x75, 0x6c, 0x65, 0x4e, 0x61, 0x6d, 0x65, 0x7d, 0x29, 0x2a, 0x29, + 0x3f, 0x20, 0x22, 0x5d, 0x22, 0x20, 0x73, 0x70, 0x61, 0x63, 0x65, 0x60, + 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x72, 0x65, 0x74, 0x75, + 0x72, 0x6e, 0x20, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x5f, 0x61, 0x64, 0x64, + 0x52, 0x75, 0x6c, 0x65, 0x28, 0x72, 0x75, 0x6c, 0x65, 0x4e, 0x61, 0x6d, + 0x65, 0x2c, 0x20, 0x72, 0x75, 0x6c, 0x65, 0x29, 0x3b, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x7d, 0x20, 0x65, 0x6c, 0x73, 0x65, 0x20, 0x7b, 0x0a, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x69, 0x66, 0x20, 0x28, 0x21, 0x50, 0x52, + 0x49, 0x4d, 0x49, 0x54, 0x49, 0x56, 0x45, 0x5f, 0x52, 0x55, 0x4c, 0x45, + 0x53, 0x5b, 0x73, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x54, 0x79, 0x70, 0x65, + 0x5d, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x74, 0x68, 0x72, 0x6f, 0x77, 0x20, 0x6e, 0x65, 0x77, 0x20, 0x45, + 0x72, 0x72, 0x6f, 0x72, 0x28, 0x60, 0x55, 0x6e, 0x72, 0x65, 0x63, 0x6f, + 0x67, 0x6e, 0x69, 0x7a, 0x65, 0x64, 0x20, 0x73, 0x63, 0x68, 0x65, 0x6d, + 0x61, 0x3a, 0x20, 0x24, 0x7b, 0x4a, 0x53, 0x4f, 0x4e, 0x2e, 0x73, 0x74, + 0x72, 0x69, 0x6e, 0x67, 0x69, 0x66, 0x79, 0x28, 0x73, 0x63, 0x68, 0x65, + 0x6d, 0x61, 0x29, 0x7d, 0x60, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x72, 0x65, + 0x74, 0x75, 0x72, 0x6e, 0x20, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x5f, 0x61, + 0x64, 0x64, 0x52, 0x75, 0x6c, 0x65, 0x28, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x72, 0x75, 0x6c, 0x65, 0x4e, 0x61, 0x6d, 0x65, + 0x20, 0x3d, 0x3d, 0x3d, 0x20, 0x27, 0x72, 0x6f, 0x6f, 0x74, 0x27, 0x20, + 0x3f, 0x20, 0x27, 0x72, 0x6f, 0x6f, 0x74, 0x27, 0x20, 0x3a, 0x20, 0x73, + 0x63, 0x68, 0x65, 0x6d, 0x61, 0x54, 0x79, 0x70, 0x65, 0x2c, 0x0a, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x50, 0x52, 0x49, 0x4d, 0x49, + 0x54, 0x49, 0x56, 0x45, 0x5f, 0x52, 0x55, 0x4c, 0x45, 0x53, 0x5b, 0x73, + 0x63, 0x68, 0x65, 0x6d, 0x61, 0x54, 0x79, 0x70, 0x65, 0x5d, 0x0a, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x7d, 0x0a, 0x20, 0x20, 0x7d, 0x0a, 0x0a, 0x20, 0x20, 0x66, 0x6f, 0x72, + 0x6d, 0x61, 0x74, 0x47, 0x72, 0x61, 0x6d, 0x6d, 0x61, 0x72, 0x28, 0x29, + 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x6c, 0x65, 0x74, 0x20, 0x67, + 0x72, 0x61, 0x6d, 0x6d, 0x61, 0x72, 0x20, 0x3d, 0x20, 0x27, 0x27, 0x3b, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x5f, 0x72, + 0x75, 0x6c, 0x65, 0x73, 0x2e, 0x66, 0x6f, 0x72, 0x45, 0x61, 0x63, 0x68, + 0x28, 0x28, 0x72, 0x75, 0x6c, 0x65, 0x2c, 0x20, 0x6e, 0x61, 0x6d, 0x65, + 0x29, 0x20, 0x3d, 0x3e, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x67, 0x72, 0x61, 0x6d, 0x6d, 0x61, 0x72, 0x20, 0x2b, 0x3d, 0x20, + 0x60, 0x24, 0x7b, 0x6e, 0x61, 0x6d, 0x65, 0x7d, 0x20, 0x3a, 0x3a, 0x3d, + 0x20, 0x24, 0x7b, 0x72, 0x75, 0x6c, 0x65, 0x7d, 0x5c, 0x6e, 0x60, 0x3b, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, + 0x20, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x67, 0x72, 0x61, 0x6d, + 0x6d, 0x61, 0x72, 0x3b, 0x0a, 0x20, 0x20, 0x7d, 0x0a, 0x7d, 0x0a }; - -const INVALID_RULE_CHARS_RE = /[^\dA-Za-z-]+/g; -const GRAMMAR_LITERAL_ESCAPE_RE = /[\n\r"]/g; -const GRAMMAR_LITERAL_ESCAPES = {'\r': '\\r', '\n': '\\n', '"': '\\"'}; - -export class SchemaConverter { - constructor(propOrder) { - this._propOrder = propOrder || {}; - this._rules = new Map(); - this._rules.set('space', SPACE_RULE); - } - - _formatLiteral(literal) { - const escaped = JSON.stringify(literal).replace( - GRAMMAR_LITERAL_ESCAPE_RE, - m => GRAMMAR_LITERAL_ESCAPES[m] - ); - return `"${escaped}"`; - } - - _addRule(name, rule) { - let escName = name.replace(INVALID_RULE_CHARS_RE, '-'); - let key = escName; - - if (this._rules.has(escName)) { - if (this._rules.get(escName) === rule) { - return key; - } - - let i = 0; - while (this._rules.has(`${escName}${i}`)) { - i += 1; - } - key = `${escName}${i}`; - } - - this._rules.set(key, rule); - return key; - } - - visit(schema, name) { - const schemaType = schema.type; - const ruleName = name || 'root'; - - if (schema.oneOf || schema.anyOf) { - const rule = (schema.oneOf || schema.anyOf).map((altSchema, i) => - this.visit(altSchema, `${name}${name ? "-" : ""}${i}`) - ).join(' | '); - - return this._addRule(ruleName, rule); - } else if ('const' in schema) { - return this._addRule(ruleName, this._formatLiteral(schema.const)); - } else if ('enum' in schema) { - const rule = schema.enum.map(v => this._formatLiteral(v)).join(' | '); - return this._addRule(ruleName, rule); - } else if (schemaType === 'object' && 'properties' in schema) { - // TODO: `required` keyword (from python implementation) - const propOrder = this._propOrder; - const propPairs = Object.entries(schema.properties).sort((a, b) => { - // sort by position in prop_order (if specified) then by key - const orderA = typeof propOrder[a[0]] === 'number' ? propOrder[a[0]] : Infinity; - const orderB = typeof propOrder[b[0]] === 'number' ? propOrder[b[0]] : Infinity; - return orderA - orderB || a[0].localeCompare(b[0]); - }); - - let rule = '"{" space'; - propPairs.forEach(([propName, propSchema], i) => { - const propRuleName = this.visit(propSchema, `${name}${name ? "-" : ""}${propName}`); - if (i > 0) { - rule += ' "," space'; - } - rule += ` ${this._formatLiteral(propName)} space ":" space ${propRuleName}`; - }); - rule += ' "}" space'; - - return this._addRule(ruleName, rule); - } else if (schemaType === 'array' && 'items' in schema) { - // TODO `prefixItems` keyword (from python implementation) - const itemRuleName = this.visit(schema.items, `${name}${name ? "-" : ""}item`); - const rule = `"[" space (${itemRuleName} ("," space ${itemRuleName})*)? "]" space`; - return this._addRule(ruleName, rule); - } else { - if (!PRIMITIVE_RULES[schemaType]) { - throw new Error(`Unrecognized schema: ${JSON.stringify(schema)}`); - } - return this._addRule( - ruleName === 'root' ? 'root' : schemaType, - PRIMITIVE_RULES[schemaType] - ); - } - } - - formatGrammar() { - let grammar = ''; - this._rules.forEach((rule, name) => { - grammar += `${name} ::= ${rule}\n`; - }); - return grammar; - } -} -)LITERAL"; -unsigned int json_schema_to_grammar_mjs_len = sizeof(json_schema_to_grammar_mjs); +unsigned int json_schema_to_grammar_mjs_len = 3695; From e8dc55d0065d076d4c20f3c4bfca562701b4edfe Mon Sep 17 00:00:00 2001 From: Jared Van Bortel Date: Tue, 30 Jan 2024 19:04:37 -0500 Subject: [PATCH 472/811] kompute : llama-bench support and ggml_cpu_has_kompute() (#5226) --- common/common.cpp | 1 + examples/llama-bench/llama-bench.cpp | 15 +++++++++++---- ggml.c | 11 ++++++++++- ggml.h | 1 + llama.cpp | 5 ----- 5 files changed, 23 insertions(+), 10 deletions(-) diff --git a/common/common.cpp b/common/common.cpp index 288013676..0dd1c50cf 100644 --- a/common/common.cpp +++ b/common/common.cpp @@ -1521,6 +1521,7 @@ void dump_non_result_info_yaml(FILE * stream, const gpt_params & params, const l fprintf(stream, "cpu_has_avx512_vnni: %s\n", ggml_cpu_has_avx512_vnni() ? "true" : "false"); fprintf(stream, "cpu_has_cublas: %s\n", ggml_cpu_has_cublas() ? "true" : "false"); fprintf(stream, "cpu_has_clblast: %s\n", ggml_cpu_has_clblast() ? "true" : "false"); + fprintf(stream, "cpu_has_kompute: %s\n", ggml_cpu_has_kompute() ? "true" : "false"); fprintf(stream, "cpu_has_fma: %s\n", ggml_cpu_has_fma() ? "true" : "false"); fprintf(stream, "cpu_has_gpublas: %s\n", ggml_cpu_has_gpublas() ? "true" : "false"); fprintf(stream, "cpu_has_neon: %s\n", ggml_cpu_has_neon() ? "true" : "false"); diff --git a/examples/llama-bench/llama-bench.cpp b/examples/llama-bench/llama-bench.cpp index f239415d3..542cc7bb8 100644 --- a/examples/llama-bench/llama-bench.cpp +++ b/examples/llama-bench/llama-bench.cpp @@ -563,6 +563,7 @@ struct test { static const bool cuda; static const bool opencl; static const bool vulkan; + static const bool kompute; static const bool metal; static const bool gpu_blas; static const bool blas; @@ -647,6 +648,9 @@ struct test { if (vulkan) { return "Vulkan"; } + if (kompute) { + return "Kompute"; + } if (metal) { return "Metal"; } @@ -662,7 +666,7 @@ struct test { static const std::vector & get_fields() { static const std::vector fields = { "build_commit", "build_number", - "cuda", "opencl", "vulkan", "metal", "gpu_blas", "blas", + "cuda", "opencl", "vulkan", "kompute", "metal", "gpu_blas", "blas", "cpu_info", "gpu_info", "model_filename", "model_type", "model_size", "model_n_params", "n_batch", "n_threads", "type_k", "type_v", @@ -686,8 +690,9 @@ struct test { field == "avg_ns" || field == "stddev_ns") { return INT; } - if (field == "cuda" || field == "opencl" || field == "vulkan"|| field == "metal" || field == "gpu_blas" || field == "blas" || - field == "f16_kv" || field == "no_kv_offload" || field == "mul_mat_q") { + if (field == "cuda" || field == "opencl" || field == "vulkan" || field == "kompute" || field == "metal" || + field == "gpu_blas" || field == "blas" || field == "f16_kv" || field == "no_kv_offload" || + field == "mul_mat_q") { return BOOL; } if (field == "avg_ts" || field == "stddev_ts") { @@ -714,7 +719,8 @@ struct test { } std::vector values = { build_commit, std::to_string(build_number), - std::to_string(cuda), std::to_string(opencl), std::to_string(vulkan), std::to_string(metal), std::to_string(gpu_blas), std::to_string(blas), + std::to_string(cuda), std::to_string(opencl), std::to_string(vulkan), std::to_string(vulkan), + std::to_string(metal), std::to_string(gpu_blas), std::to_string(blas), cpu_info, gpu_info, model_filename, model_type, std::to_string(model_size), std::to_string(model_n_params), std::to_string(n_batch), std::to_string(n_threads), ggml_type_name(type_k), ggml_type_name(type_v), @@ -743,6 +749,7 @@ const int test::build_number = LLAMA_BUILD_NUMBER; const bool test::cuda = !!ggml_cpu_has_cublas(); const bool test::opencl = !!ggml_cpu_has_clblast(); const bool test::vulkan = !!ggml_cpu_has_vulkan(); +const bool test::kompute = !!ggml_cpu_has_kompute(); const bool test::metal = !!ggml_cpu_has_metal(); const bool test::gpu_blas = !!ggml_cpu_has_gpublas(); const bool test::blas = !!ggml_cpu_has_blas(); diff --git a/ggml.c b/ggml.c index a7a9ea319..b2c8baaa8 100644 --- a/ggml.c +++ b/ggml.c @@ -20473,6 +20473,14 @@ int ggml_cpu_has_vulkan(void) { #endif } +int ggml_cpu_has_kompute(void) { +#if defined(GGML_USE_KOMPUTE) + return 1; +#else + return 0; +#endif +} + int ggml_cpu_has_sycl(void) { #if defined(GGML_USE_SYCL) return 1; @@ -20482,7 +20490,8 @@ int ggml_cpu_has_sycl(void) { } int ggml_cpu_has_gpublas(void) { - return ggml_cpu_has_cublas() || ggml_cpu_has_clblast() || ggml_cpu_has_vulkan() || ggml_cpu_has_sycl(); + return ggml_cpu_has_cublas() || ggml_cpu_has_clblast() || ggml_cpu_has_vulkan() || ggml_cpu_has_kompute() || + ggml_cpu_has_sycl(); } int ggml_cpu_has_sse3(void) { diff --git a/ggml.h b/ggml.h index bf782e6ad..afc87b843 100644 --- a/ggml.h +++ b/ggml.h @@ -2266,6 +2266,7 @@ extern "C" { GGML_API int ggml_cpu_has_cublas (void); GGML_API int ggml_cpu_has_clblast (void); GGML_API int ggml_cpu_has_vulkan (void); + GGML_API int ggml_cpu_has_kompute (void); GGML_API int ggml_cpu_has_gpublas (void); GGML_API int ggml_cpu_has_sse3 (void); GGML_API int ggml_cpu_has_ssse3 (void); diff --git a/llama.cpp b/llama.cpp index 7b9a5c079..a490eeab2 100644 --- a/llama.cpp +++ b/llama.cpp @@ -6878,11 +6878,6 @@ static int llama_decode_internal( n_threads = std::min(4, n_threads); } - const bool fully_offloaded = model.n_gpu_layers >= (int) hparams.n_layer + 1; - if ((ggml_cpu_has_cublas() || ggml_cpu_has_vulkan()) && fully_offloaded) { - n_threads = 1; - } - #ifdef GGML_USE_MPI const int64_t n_layer = hparams.n_layer; ggml_mpi_graph_compute_pre(lctx.ctx_mpi, gf, n_layer); From 01684139c352561840ae55ec627ab58abc3e06ab Mon Sep 17 00:00:00 2001 From: Neo Zhang Jianyu Date: Wed, 31 Jan 2024 10:38:07 +0800 Subject: [PATCH 473/811] support SYCL backend windows build (#5208) * support SYCL backend windows build * add windows build in CI * add for win build CI * correct install oneMKL * fix install issue * fix ci * fix install cmd * fix install cmd * fix install cmd * fix install cmd * fix install cmd * fix win build * fix win build * fix win build * restore other CI part * restore as base * rm no new line * fix no new line issue, add -j * fix grammer issue * allow to trigger manually, fix format issue * fix format * add newline * fix format * fix format * fix format issuse --------- Co-authored-by: Abhilash Majumder <30946547+abhilash1910@users.noreply.github.com> --- .github/workflows/build.yml | 25 ++++ .github/workflows/editorconfig.yml | 6 + .gitignore | 1 + CMakeLists.txt | 6 +- README_sycl.md => README-sycl.md | 198 +++++++++++++++++++++++++++-- README.md | 4 +- examples/sycl/win-build-sycl.bat | 23 ++++ examples/sycl/win-run-llama2.bat | 13 ++ scripts/install-oneapi.bat | 19 +++ 9 files changed, 281 insertions(+), 14 deletions(-) rename README_sycl.md => README-sycl.md (58%) create mode 100644 examples/sycl/win-build-sycl.bat create mode 100644 examples/sycl/win-run-llama2.bat create mode 100644 scripts/install-oneapi.bat diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index fb719a550..c6db1666e 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -565,6 +565,31 @@ jobs: path: | cudart-llama-bin-win-cu${{ matrix.cuda }}-x64.zip + windows-latest-cmake-sycl: + runs-on: windows-latest + defaults: + run: + shell: bash + + env: + WINDOWS_BASEKIT_URL: https://registrationcenter-download.intel.com/akdlm/IRC_NAS/62641e01-1e8d-4ace-91d6-ae03f7f8a71f/w_BaseKit_p_2024.0.0.49563_offline.exe + WINDOWS_DPCPP_MKL: intel.oneapi.win.cpp-dpcpp-common:intel.oneapi.win.mkl.devel + + + steps: + - name: Clone + id: checkout + uses: actions/checkout@v3 + with: + fetch-depth: 0 + + - name: Install + run: scripts/install-oneapi.bat $WINDOWS_BASEKIT_URL $WINDOWS_DPCPP_MKL + + - name: Build + id: cmake_build + run: examples/sycl/win-build-sycl.bat + ios-xcode-build: runs-on: macos-latest diff --git a/.github/workflows/editorconfig.yml b/.github/workflows/editorconfig.yml index b4e535acf..0e0993cd4 100644 --- a/.github/workflows/editorconfig.yml +++ b/.github/workflows/editorconfig.yml @@ -1,6 +1,12 @@ name: EditorConfig Checker on: + workflow_dispatch: # allows manual triggering + inputs: + create_release: + description: 'Create new release' + required: true + type: boolean push: branches: - master diff --git a/.gitignore b/.gitignore index cb0069bfb..b84459b92 100644 --- a/.gitignore +++ b/.gitignore @@ -89,3 +89,4 @@ examples/jeopardy/results.txt poetry.lock poetry.toml +nppBackup diff --git a/CMakeLists.txt b/CMakeLists.txt index 65a6f3971..15a1101aa 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -507,7 +507,11 @@ if (LLAMA_SYCL) set(GGML_HEADERS_SYCL ggml.h ggml-sycl.h) set(GGML_SOURCES_SYCL ggml-sycl.cpp) - set(LLAMA_EXTRA_LIBS ${LLAMA_EXTRA_LIBS} sycl OpenCL mkl_core pthread m dl mkl_sycl_blas mkl_intel_ilp64 mkl_tbb_thread) + if (WIN32) + set(LLAMA_EXTRA_LIBS ${LLAMA_EXTRA_LIBS} -fsycl sycl7 OpenCL mkl_sycl_blas_dll.lib mkl_intel_ilp64_dll.lib mkl_sequential_dll.lib mkl_core_dll.lib) + else() + set(LLAMA_EXTRA_LIBS ${LLAMA_EXTRA_LIBS} -fsycl OpenCL mkl_core pthread m dl mkl_sycl_blas mkl_intel_ilp64 mkl_tbb_thread) + endif() endif() if (LLAMA_KOMPUTE) diff --git a/README_sycl.md b/README-sycl.md similarity index 58% rename from README_sycl.md rename to README-sycl.md index d5a1818f5..2b2cfe03a 100644 --- a/README_sycl.md +++ b/README-sycl.md @@ -8,10 +8,14 @@ [Linux](#linux) +[Windows](#windows) + [Environment Variable](#environment-variable) [Known Issue](#known-issue) +[Q&A](#q&a) + [Todo](#todo) ## Background @@ -33,7 +37,7 @@ For Intel CPU, recommend to use llama.cpp for X86 (Intel MKL building). |OS|Status|Verified| |-|-|-| |Linux|Support|Ubuntu 22.04| -|Windows|Ongoing| | +|Windows|Support|Windows 11| ## Intel GPU @@ -42,7 +46,7 @@ For Intel CPU, recommend to use llama.cpp for X86 (Intel MKL building). |-|-|-| |Intel Data Center Max Series| Support| Max 1550| |Intel Data Center Flex Series| Support| Flex 170| -|Intel Arc Series| Support| Arc 770| +|Intel Arc Series| Support| Arc 770, 730M| |Intel built-in Arc GPU| Support| built-in Arc GPU in Meteor Lake| |Intel iGPU| Support| iGPU in i5-1250P, i7-1165G7| @@ -131,6 +135,7 @@ cmake .. -DLLAMA_SYCL=ON -DCMAKE_C_COMPILER=icx -DCMAKE_CXX_COMPILER=icpx #build all binary cmake --build . --config Release -v +cd .. ``` or @@ -195,7 +200,7 @@ GGML_SYCL_DEVICE=0 ./build/bin/main -m models/llama-2-7b.Q4_0.gguf -p "Building or run by script: ``` -./examples/sycl/run_llama2.sh +./examples/sycl/run-llama2.sh ``` Note: @@ -205,11 +210,175 @@ Note: 5. Check the device ID in output -Like: +Like: ``` Using device **0** (Intel(R) Arc(TM) A770 Graphics) as main device ``` +## Windows + +### Setup Environment + +1. Install Intel GPU driver. + +Please install Intel GPU driver by official guide: [Install GPU Drivers](https://www.intel.com/content/www/us/en/products/docs/discrete-gpus/arc/software/drivers.html). + +2. Install Intel® oneAPI Base toolkit. + +a. Please follow the procedure in [Get the Intel® oneAPI Base Toolkit ](https://www.intel.com/content/www/us/en/developer/tools/oneapi/base-toolkit.html). + +Recommend to install to default folder: **/opt/intel/oneapi**. + +Following guide uses the default folder as example. If you use other folder, please modify the following guide info with your folder. + +b. Enable oneAPI running environment: + +- In Search, input 'oneAPI'. + +Search & open "Intel oneAPI command prompt for Intel 64 for Visual Studio 2022" + +- In Run: + +In CMD: +``` +"C:\Program Files (x86)\Intel\oneAPI\setvars.bat" intel64 +``` + +c. Check GPU + +In oneAPI command line: + +``` +sycl-ls +``` + +There should be one or more level-zero devices. Like **[ext_oneapi_level_zero:gpu:0]**. + +Output (example): +``` +[opencl:acc:0] Intel(R) FPGA Emulation Platform for OpenCL(TM), Intel(R) FPGA Emulation Device OpenCL 1.2 [2023.16.10.0.17_160000] +[opencl:cpu:1] Intel(R) OpenCL, 11th Gen Intel(R) Core(TM) i7-1185G7 @ 3.00GHz OpenCL 3.0 (Build 0) [2023.16.10.0.17_160000] +[opencl:gpu:2] Intel(R) OpenCL Graphics, Intel(R) Iris(R) Xe Graphics OpenCL 3.0 NEO [31.0.101.5186] +[ext_oneapi_level_zero:gpu:0] Intel(R) Level-Zero, Intel(R) Iris(R) Xe Graphics 1.3 [1.3.28044] + +``` + +3. Install cmake & make + +a. Download & install cmake for windows: https://cmake.org/download/ + +b. Download & install make for windows provided by mingw-w64: https://www.mingw-w64.org/downloads/ + + +### Build locally: + +In oneAPI command line window: + +``` +mkdir -p build +cd build +@call "C:\Program Files (x86)\Intel\oneAPI\setvars.bat" intel64 --force + +:: for FP16 +:: faster for long-prompt inference +:: cmake -G "MinGW Makefiles" .. -DLLAMA_SYCL=ON -DCMAKE_C_COMPILER=icx -DCMAKE_CXX_COMPILER=icx -DCMAKE_BUILD_TYPE=Release -DLLAMA_SYCL_F16=ON + +:: for FP32 +cmake -G "MinGW Makefiles" .. -DLLAMA_SYCL=ON -DCMAKE_C_COMPILER=icx -DCMAKE_CXX_COMPILER=icx -DCMAKE_BUILD_TYPE=Release + + +:: build example/main only +:: make main + +:: build all binary +make -j +cd .. +``` + +or + +``` +.\examples\sycl\win-build-sycl.bat +``` + +Note: + +- By default, it will build for all binary files. It will take more time. To reduce the time, we recommend to build for **example/main** only. + +### Run + +1. Put model file to folder **models** + +2. Enable oneAPI running environment + +- In Search, input 'oneAPI'. + +Search & open "Intel oneAPI command prompt for Intel 64 for Visual Studio 2022" + +- In Run: + +In CMD: +``` +"C:\Program Files (x86)\Intel\oneAPI\setvars.bat" intel64 +``` + +3. List device ID + +Run without parameter: + +``` +build\bin\ls-sycl-device.exe + +or + +build\bin\main.exe +``` + +Check the ID in startup log, like: + +``` +found 4 SYCL devices: + Device 0: Intel(R) Arc(TM) A770 Graphics, compute capability 1.3, + max compute_units 512, max work group size 1024, max sub group size 32, global mem size 16225243136 + Device 1: Intel(R) FPGA Emulation Device, compute capability 1.2, + max compute_units 24, max work group size 67108864, max sub group size 64, global mem size 67065057280 + Device 2: 13th Gen Intel(R) Core(TM) i7-13700K, compute capability 3.0, + max compute_units 24, max work group size 8192, max sub group size 64, global mem size 67065057280 + Device 3: Intel(R) Arc(TM) A770 Graphics, compute capability 3.0, + max compute_units 512, max work group size 1024, max sub group size 32, global mem size 16225243136 + +``` + +|Attribute|Note| +|-|-| +|compute capability 1.3|Level-zero running time, recommended | +|compute capability 3.0|OpenCL running time, slower than level-zero in most cases| + +4. Set device ID and execute llama.cpp + +Set device ID = 0 by **set GGML_SYCL_DEVICE=0** + +``` +set GGML_SYCL_DEVICE=0 +build\bin\main.exe -m models\llama-2-7b.Q4_0.gguf -p "Building a website can be done in 10 simple steps:\nStep 1:" -n 400 -e -ngl 33 -s 0 +``` +or run by script: + +``` +.\examples\sycl\win-run-llama2.bat +``` + +Note: + +- By default, mmap is used to read model file. In some cases, it leads to the hang issue. Recommend to use parameter **--no-mmap** to disable mmap() to skip this issue. + + +5. Check the device ID in output + +Like: +``` +Using device **0** (Intel(R) Arc(TM) A770 Graphics) as main device +``` ## Environment Variable @@ -220,7 +389,7 @@ Using device **0** (Intel(R) Arc(TM) A770 Graphics) as main device |LLAMA_SYCL|ON (mandatory)|Enable build with SYCL code path.
For FP32/FP16, LLAMA_SYCL=ON is mandatory.| |LLAMA_SYCL_F16|ON (optional)|Enable FP16 build with SYCL code path. Faster for long-prompt inference.
For FP32, not set it.| |CMAKE_C_COMPILER|icx|Use icx compiler for SYCL code path| -|CMAKE_CXX_COMPILER|icpx|use icpx for SYCL code path| +|CMAKE_CXX_COMPILER|icpx (Linux), icx (Windows)|use icpx/icx for SYCL code path| #### Running @@ -232,19 +401,24 @@ Using device **0** (Intel(R) Arc(TM) A770 Graphics) as main device ## Known Issue -- Error: `error while loading shared libraries: libsycl.so.7: cannot open shared object file: No such file or directory`. - - Miss to enable oneAPI running environment. - - Install oneAPI base toolkit and enable it by: `source /opt/intel/oneapi/setvars.sh`. - - - Hang during startup llama.cpp use mmap as default way to read model file and copy to GPU. In some system, memcpy will be abnormal and block. Solution: add **--no-mmap**. +## Q&A + +- Error: `error while loading shared libraries: libsycl.so.7: cannot open shared object file: No such file or directory`. + + Miss to enable oneAPI running environment. + + Install oneAPI base toolkit and enable it by: `source /opt/intel/oneapi/setvars.sh`. + +- In Windows, no result, not error. + + Miss to enable oneAPI running environment. + ## Todo - Support to build in Windows. diff --git a/README.md b/README.md index b37348a74..7746cb510 100644 --- a/README.md +++ b/README.md @@ -10,6 +10,8 @@ Inference of [LLaMA](https://arxiv.org/abs/2302.13971) model in pure C/C++ ### Hot topics +- ⚠️ Incoming backends: https://github.com/ggerganov/llama.cpp/discussions/5138 + - [SYCL backend](README-sycl.md) is ready (1/28/2024), support Linux/Windows in Intel GPUs (iGPU, Arc/Flex/Max series) - New SOTA quantized models, including pure 2-bits: https://huggingface.co/ikawrakow - Collecting Apple Silicon performance stats: - M-series: https://github.com/ggerganov/llama.cpp/discussions/4167 @@ -604,7 +606,7 @@ Building the program with BLAS support may lead to some performance improvements llama.cpp based on SYCL is used to support Intel GPU (Data Center Max series, Flex series, Arc series, Built-in GPU and iGPU). - For detailed info, please refer to [llama.cpp for SYCL](README_sycl.md). + For detailed info, please refer to [llama.cpp for SYCL](README-sycl.md). ### Prepare Data & Run diff --git a/examples/sycl/win-build-sycl.bat b/examples/sycl/win-build-sycl.bat new file mode 100644 index 000000000..f9d43f8ed --- /dev/null +++ b/examples/sycl/win-build-sycl.bat @@ -0,0 +1,23 @@ + +:: MIT license +:: Copyright (C) 2024 Intel Corporation +:: SPDX-License-Identifier: MIT + +mkdir -p build +cd build +@call "C:\Program Files (x86)\Intel\oneAPI\setvars.bat" intel64 --force + +:: for FP16 +:: faster for long-prompt inference +:: cmake -G "MinGW Makefiles" .. -DLLAMA_SYCL=ON -DCMAKE_C_COMPILER=icx -DCMAKE_CXX_COMPILER=icx -DCMAKE_BUILD_TYPE=Release -DLLAMA_SYCL_F16=ON + +:: for FP32 +cmake -G "MinGW Makefiles" .. -DLLAMA_SYCL=ON -DCMAKE_C_COMPILER=icx -DCMAKE_CXX_COMPILER=icx -DCMAKE_BUILD_TYPE=Release + + +:: build example/main only +:: make main + +:: build all binary +make -j +cd .. diff --git a/examples/sycl/win-run-llama2.bat b/examples/sycl/win-run-llama2.bat new file mode 100644 index 000000000..28d935541 --- /dev/null +++ b/examples/sycl/win-run-llama2.bat @@ -0,0 +1,13 @@ +:: MIT license +:: Copyright (C) 2024 Intel Corporation +:: SPDX-License-Identifier: MIT + +INPUT2="Building a website can be done in 10 simple steps:\nStep 1:" +@call "C:\Program Files (x86)\Intel\oneAPI\setvars.bat" intel64 --force + + +set GGML_SYCL_DEVICE=0 +rem set GGML_SYCL_DEBUG=1 +.\build\bin\main.exe -m models\llama-2-7b.Q4_0.gguf -p %INPUT2% -n 400 -e -ngl 33 -s 0 + + diff --git a/scripts/install-oneapi.bat b/scripts/install-oneapi.bat new file mode 100644 index 000000000..e99bef14a --- /dev/null +++ b/scripts/install-oneapi.bat @@ -0,0 +1,19 @@ +:: MIT license +:: Copyright (C) 2024 Intel Corporation +:: SPDX-License-Identifier: MIT + + +set URL=%1 +set COMPONENTS=%2 + +curl.exe --output %TEMP%\webimage.exe --url %URL% --retry 5 --retry-delay 5 +start /b /wait %TEMP%\webimage.exe -s -x -f webimage_extracted --log extract.log +del %TEMP%\webimage.exe +if "%COMPONENTS%"=="" ( + webimage_extracted\bootstrapper.exe -s --action install --eula=accept -p=NEED_VS2017_INTEGRATION=0 -p=NEED_VS2019_INTEGRATION=0 -p=NEED_VS2022_INTEGRATION=0 --log-dir=. +) else ( + webimage_extracted\bootstrapper.exe -s --action install --components=%COMPONENTS% --eula=accept -p=NEED_VS2017_INTEGRATION=0 -p=NEED_VS2019_INTEGRATION=0 -p=NEED_VS2022_INTEGRATION=0 --log-dir=. +) +set installer_exit_code=%ERRORLEVEL% +rd /s/q "webimage_extracted" +exit /b %installer_exit_code% From d62520eb2cc1d7168a30edec6110e1daefbd959f Mon Sep 17 00:00:00 2001 From: Yiming Cui Date: Wed, 31 Jan 2024 11:04:21 +0800 Subject: [PATCH 474/811] Fix typos of IQ2_XXS and IQ3_XXS in llama.cpp (#5231) --- llama.cpp | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/llama.cpp b/llama.cpp index a490eeab2..bb23689fa 100644 --- a/llama.cpp +++ b/llama.cpp @@ -2713,10 +2713,10 @@ static std::string llama_model_ftype_name(llama_ftype ftype) { case LLAMA_FTYPE_MOSTLY_Q5_K_S: return "Q5_K - Small"; case LLAMA_FTYPE_MOSTLY_Q5_K_M: return "Q5_K - Medium"; case LLAMA_FTYPE_MOSTLY_Q6_K: return "Q6_K"; - case LLAMA_FTYPE_MOSTLY_IQ2_XXS:return "IQ2_XSS - 2.0625 bpw"; + case LLAMA_FTYPE_MOSTLY_IQ2_XXS:return "IQ2_XXS - 2.0625 bpw"; case LLAMA_FTYPE_MOSTLY_IQ2_XS: return "IQ2_XS - 2.3125 bpw"; case LLAMA_FTYPE_MOSTLY_Q3_K_XS:return "Q3_K - Extra small"; - case LLAMA_FTYPE_MOSTLY_IQ3_XXS:return "IQ3_XSS - 3.0625 bpw"; + case LLAMA_FTYPE_MOSTLY_IQ3_XXS:return "IQ3_XXS - 3.0625 bpw"; default: return "unknown, may not work"; } From f8e9140cb46eebaa867e1184a9946e4840eec772 Mon Sep 17 00:00:00 2001 From: 0cc4m Date: Wed, 31 Jan 2024 11:44:19 +0100 Subject: [PATCH 475/811] Vulkan Fixes (#5223) * Fix Vulkan F16 models * Fix Vulkan context shift crash * Add Vulkan to common.cpp dump_non_result_info_yaml function * Fix bug in Vulkan CPY op * Fix small matrix multiplication errors in AMD GPUs on Windows or with amdvlk Co-authored-by: Engininja2 <139037756+Engininja2@users.noreply.github.com> --------- Co-authored-by: Engininja2 <139037756+Engininja2@users.noreply.github.com> --- common/common.cpp | 1 + ggml-vulkan-shaders.hpp | 1952 +++++++++++++---------------------- ggml-vulkan.cpp | 14 +- ggml_vk_generate_shaders.py | 4 +- 4 files changed, 704 insertions(+), 1267 deletions(-) diff --git a/common/common.cpp b/common/common.cpp index 0dd1c50cf..9d976c7c8 100644 --- a/common/common.cpp +++ b/common/common.cpp @@ -1520,6 +1520,7 @@ void dump_non_result_info_yaml(FILE * stream, const gpt_params & params, const l fprintf(stream, "cpu_has_avx512_vbmi: %s\n", ggml_cpu_has_avx512_vbmi() ? "true" : "false"); fprintf(stream, "cpu_has_avx512_vnni: %s\n", ggml_cpu_has_avx512_vnni() ? "true" : "false"); fprintf(stream, "cpu_has_cublas: %s\n", ggml_cpu_has_cublas() ? "true" : "false"); + fprintf(stream, "cpu_has_vulkan: %s\n", ggml_cpu_has_vulkan() ? "true" : "false"); fprintf(stream, "cpu_has_clblast: %s\n", ggml_cpu_has_clblast() ? "true" : "false"); fprintf(stream, "cpu_has_kompute: %s\n", ggml_cpu_has_kompute() ? "true" : "false"); fprintf(stream, "cpu_has_fma: %s\n", ggml_cpu_has_fma() ? "true" : "false"); diff --git a/ggml-vulkan-shaders.hpp b/ggml-vulkan-shaders.hpp index 321e36383..e2e9be22c 100644 --- a/ggml-vulkan-shaders.hpp +++ b/ggml-vulkan-shaders.hpp @@ -890,7 +890,7 @@ const uint64_t cpy_f32_f32_len = 2472; unsigned char dequant_f16_data[] = { 0x03,0x02,0x23,0x07,0x00,0x05,0x01,0x00,0x0b,0x00,0x0d,0x00, -0x87,0x02,0x00,0x00,0x00,0x00,0x00,0x00,0x11,0x00,0x02,0x00, +0x81,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x11,0x00,0x02,0x00, 0x01,0x00,0x00,0x00,0x11,0x00,0x02,0x00,0x09,0x00,0x00,0x00, 0x11,0x00,0x02,0x00,0x51,0x11,0x00,0x00,0x0b,0x00,0x06,0x00, 0x01,0x00,0x00,0x00,0x47,0x4c,0x53,0x4c,0x2e,0x73,0x74,0x64, @@ -898,7 +898,7 @@ unsigned char dequant_f16_data[] = { 0x00,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x0f,0x00,0x09,0x00, 0x05,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x6d,0x61,0x69,0x6e, 0x00,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x16,0x00,0x00,0x00, -0x51,0x00,0x00,0x00,0x5f,0x00,0x00,0x00,0x10,0x00,0x06,0x00, +0x4f,0x00,0x00,0x00,0x5d,0x00,0x00,0x00,0x10,0x00,0x06,0x00, 0x04,0x00,0x00,0x00,0x11,0x00,0x00,0x00,0x00,0x01,0x00,0x00, 0x01,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x47,0x00,0x04,0x00, 0x0c,0x00,0x00,0x00,0x0b,0x00,0x00,0x00,0x1c,0x00,0x00,0x00, @@ -910,23 +910,23 @@ unsigned char dequant_f16_data[] = { 0x48,0x00,0x05,0x00,0x14,0x00,0x00,0x00,0x03,0x00,0x00,0x00, 0x23,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x47,0x00,0x03,0x00, 0x14,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x4e,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x02,0x00,0x00,0x00, -0x48,0x00,0x04,0x00,0x4f,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x18,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x4f,0x00,0x00,0x00, +0x4c,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x02,0x00,0x00,0x00, +0x48,0x00,0x04,0x00,0x4d,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0x18,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x4d,0x00,0x00,0x00, 0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x47,0x00,0x03,0x00,0x4f,0x00,0x00,0x00,0x02,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x51,0x00,0x00,0x00,0x22,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x51,0x00,0x00,0x00, +0x47,0x00,0x03,0x00,0x4d,0x00,0x00,0x00,0x02,0x00,0x00,0x00, +0x47,0x00,0x04,0x00,0x4f,0x00,0x00,0x00,0x22,0x00,0x00,0x00, +0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x4f,0x00,0x00,0x00, 0x21,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x5c,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x02,0x00,0x00,0x00, -0x48,0x00,0x04,0x00,0x5d,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x19,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x5d,0x00,0x00,0x00, +0x5a,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x02,0x00,0x00,0x00, +0x48,0x00,0x04,0x00,0x5b,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0x19,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x5b,0x00,0x00,0x00, 0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x47,0x00,0x03,0x00,0x5d,0x00,0x00,0x00,0x02,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x5f,0x00,0x00,0x00,0x22,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x5f,0x00,0x00,0x00, +0x47,0x00,0x03,0x00,0x5b,0x00,0x00,0x00,0x02,0x00,0x00,0x00, +0x47,0x00,0x04,0x00,0x5d,0x00,0x00,0x00,0x22,0x00,0x00,0x00, +0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x5d,0x00,0x00,0x00, 0x21,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x80,0x00,0x00,0x00,0x0b,0x00,0x00,0x00,0x19,0x00,0x00,0x00, +0x7e,0x00,0x00,0x00,0x0b,0x00,0x00,0x00,0x19,0x00,0x00,0x00, 0x13,0x00,0x02,0x00,0x02,0x00,0x00,0x00,0x21,0x00,0x03,0x00, 0x03,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x15,0x00,0x04,0x00, 0x06,0x00,0x00,0x00,0x20,0x00,0x00,0x00,0x01,0x00,0x00,0x00, @@ -945,330 +945,109 @@ unsigned char dequant_f16_data[] = { 0x16,0x00,0x00,0x00,0x09,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, 0x06,0x00,0x00,0x00,0x17,0x00,0x00,0x00,0x01,0x00,0x00,0x00, 0x20,0x00,0x04,0x00,0x18,0x00,0x00,0x00,0x09,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x1b,0x00,0x00,0x00,0x20,0x00,0x00,0x00,0x14,0x00,0x02,0x00, -0x24,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x2e,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x37,0x00,0x00,0x00,0x02,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x48,0x00,0x00,0x00, -0x10,0x00,0x00,0x00,0x16,0x00,0x03,0x00,0x4a,0x00,0x00,0x00, -0x10,0x00,0x00,0x00,0x1d,0x00,0x03,0x00,0x4e,0x00,0x00,0x00, -0x4a,0x00,0x00,0x00,0x1e,0x00,0x03,0x00,0x4f,0x00,0x00,0x00, -0x4e,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x50,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x4f,0x00,0x00,0x00,0x3b,0x00,0x04,0x00, -0x50,0x00,0x00,0x00,0x51,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0x20,0x00,0x04,0x00,0x54,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0x4a,0x00,0x00,0x00,0x1d,0x00,0x03,0x00,0x5c,0x00,0x00,0x00, -0x4a,0x00,0x00,0x00,0x1e,0x00,0x03,0x00,0x5d,0x00,0x00,0x00, -0x5c,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x5e,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x5d,0x00,0x00,0x00,0x3b,0x00,0x04,0x00, -0x5e,0x00,0x00,0x00,0x5f,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x61,0x00,0x00,0x00, -0x03,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x09,0x00,0x00,0x00, -0x79,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x09,0x00,0x00,0x00,0x7f,0x00,0x00,0x00,0x00,0x01,0x00,0x00, -0x2c,0x00,0x06,0x00,0x0a,0x00,0x00,0x00,0x80,0x00,0x00,0x00, -0x7f,0x00,0x00,0x00,0x79,0x00,0x00,0x00,0x79,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x6c,0x02,0x00,0x00, -0x11,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x6d,0x02,0x00,0x00,0x12,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x6e,0x02,0x00,0x00,0x13,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x6f,0x02,0x00,0x00, -0x04,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x70,0x02,0x00,0x00,0x14,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x71,0x02,0x00,0x00,0x05,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x72,0x02,0x00,0x00, -0x15,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x73,0x02,0x00,0x00,0x06,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x74,0x02,0x00,0x00,0x16,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x75,0x02,0x00,0x00, -0x07,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x76,0x02,0x00,0x00,0x17,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x77,0x02,0x00,0x00,0x08,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x78,0x02,0x00,0x00, -0x18,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x79,0x02,0x00,0x00,0x09,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x7a,0x02,0x00,0x00,0x19,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x7b,0x02,0x00,0x00, -0x0a,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x7c,0x02,0x00,0x00,0x1a,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x7d,0x02,0x00,0x00,0x0b,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x7e,0x02,0x00,0x00, -0x1b,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x7f,0x02,0x00,0x00,0x0c,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x80,0x02,0x00,0x00,0x1c,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x81,0x02,0x00,0x00, -0x0d,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x82,0x02,0x00,0x00,0x1d,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x83,0x02,0x00,0x00,0x0e,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x84,0x02,0x00,0x00, -0x1e,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x85,0x02,0x00,0x00,0x0f,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x86,0x02,0x00,0x00,0x1f,0x00,0x00,0x00, -0x36,0x00,0x05,0x00,0x02,0x00,0x00,0x00,0x04,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x03,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, -0x05,0x00,0x00,0x00,0xf7,0x00,0x03,0x00,0x81,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0xfb,0x00,0x03,0x00,0x0d,0x00,0x00,0x00, -0x82,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0x82,0x00,0x00,0x00, -0x41,0x00,0x05,0x00,0x0e,0x00,0x00,0x00,0x0f,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x0d,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x09,0x00,0x00,0x00,0x10,0x00,0x00,0x00,0x0f,0x00,0x00,0x00, -0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x11,0x00,0x00,0x00, -0x10,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x18,0x00,0x00,0x00, -0x19,0x00,0x00,0x00,0x16,0x00,0x00,0x00,0x17,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x1a,0x00,0x00,0x00, -0x19,0x00,0x00,0x00,0x87,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x1c,0x00,0x00,0x00,0x1a,0x00,0x00,0x00,0x1b,0x00,0x00,0x00, -0x8b,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x1d,0x00,0x00,0x00, -0x11,0x00,0x00,0x00,0x1c,0x00,0x00,0x00,0x87,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x11,0x00,0x00,0x00, -0x1c,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x26,0x00,0x00,0x00,0x1d,0x00,0x00,0x00,0x1b,0x00,0x00,0x00, -0xaf,0x00,0x05,0x00,0x24,0x00,0x00,0x00,0x29,0x00,0x00,0x00, -0x26,0x00,0x00,0x00,0x1a,0x00,0x00,0x00,0xa8,0x00,0x04,0x00, -0x24,0x00,0x00,0x00,0x2a,0x00,0x00,0x00,0x29,0x00,0x00,0x00, -0xf7,0x00,0x03,0x00,0x2c,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0xfa,0x00,0x04,0x00,0x2a,0x00,0x00,0x00,0x2b,0x00,0x00,0x00, -0x2c,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0x2b,0x00,0x00,0x00, -0x41,0x00,0x05,0x00,0x18,0x00,0x00,0x00,0x2f,0x00,0x00,0x00, -0x16,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x30,0x00,0x00,0x00,0x2f,0x00,0x00,0x00, -0xaf,0x00,0x05,0x00,0x24,0x00,0x00,0x00,0x31,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x30,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, -0x2c,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0x2c,0x00,0x00,0x00, -0xf5,0x00,0x07,0x00,0x24,0x00,0x00,0x00,0x32,0x00,0x00,0x00, -0x29,0x00,0x00,0x00,0x82,0x00,0x00,0x00,0x31,0x00,0x00,0x00, -0x2b,0x00,0x00,0x00,0xf7,0x00,0x03,0x00,0x34,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0x32,0x00,0x00,0x00, -0x33,0x00,0x00,0x00,0x34,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, -0x33,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0x81,0x00,0x00,0x00, -0xf8,0x00,0x02,0x00,0x34,0x00,0x00,0x00,0x41,0x00,0x05,0x00, -0x18,0x00,0x00,0x00,0x38,0x00,0x00,0x00,0x16,0x00,0x00,0x00, -0x37,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x39,0x00,0x00,0x00,0x38,0x00,0x00,0x00,0x87,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x3a,0x00,0x00,0x00,0x39,0x00,0x00,0x00, -0x1b,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x3e,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x3a,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x40,0x00,0x00,0x00, -0x3e,0x00,0x00,0x00,0x1d,0x00,0x00,0x00,0x41,0x00,0x06,0x00, -0x54,0x00,0x00,0x00,0x55,0x00,0x00,0x00,0x51,0x00,0x00,0x00, -0x2e,0x00,0x00,0x00,0x40,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x4a,0x00,0x00,0x00,0x56,0x00,0x00,0x00,0x55,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x58,0x00,0x00,0x00, -0x40,0x00,0x00,0x00,0x17,0x00,0x00,0x00,0x41,0x00,0x06,0x00, -0x54,0x00,0x00,0x00,0x59,0x00,0x00,0x00,0x51,0x00,0x00,0x00, -0x2e,0x00,0x00,0x00,0x58,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x4a,0x00,0x00,0x00,0x5a,0x00,0x00,0x00,0x59,0x00,0x00,0x00, -0x41,0x00,0x05,0x00,0x18,0x00,0x00,0x00,0x62,0x00,0x00,0x00, -0x16,0x00,0x00,0x00,0x61,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x63,0x00,0x00,0x00,0x62,0x00,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x64,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x63,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x67,0x00,0x00,0x00,0x64,0x00,0x00,0x00, -0x26,0x00,0x00,0x00,0x41,0x00,0x06,0x00,0x54,0x00,0x00,0x00, -0x6e,0x00,0x00,0x00,0x5f,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0x67,0x00,0x00,0x00,0x3e,0x00,0x03,0x00,0x6e,0x00,0x00,0x00, -0x56,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x78,0x00,0x00,0x00,0x67,0x00,0x00,0x00,0x48,0x00,0x00,0x00, -0x41,0x00,0x06,0x00,0x54,0x00,0x00,0x00,0x7c,0x00,0x00,0x00, -0x5f,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0x78,0x00,0x00,0x00, -0x3e,0x00,0x03,0x00,0x7c,0x00,0x00,0x00,0x5a,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x4a,0x00,0x00,0x00,0x92,0x00,0x00,0x00, -0x55,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x4a,0x00,0x00,0x00, -0x95,0x00,0x00,0x00,0x59,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x9c,0x00,0x00,0x00,0x67,0x00,0x00,0x00, -0x17,0x00,0x00,0x00,0x41,0x00,0x06,0x00,0x54,0x00,0x00,0x00, -0x9f,0x00,0x00,0x00,0x5f,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0x9c,0x00,0x00,0x00,0x3e,0x00,0x03,0x00,0x9f,0x00,0x00,0x00, -0x92,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xa6,0x00,0x00,0x00,0x67,0x00,0x00,0x00,0x6c,0x02,0x00,0x00, -0x41,0x00,0x06,0x00,0x54,0x00,0x00,0x00,0xa8,0x00,0x00,0x00, -0x5f,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0xa6,0x00,0x00,0x00, -0x3e,0x00,0x03,0x00,0xa8,0x00,0x00,0x00,0x95,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x4a,0x00,0x00,0x00,0xb2,0x00,0x00,0x00, -0x55,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x4a,0x00,0x00,0x00, -0xb5,0x00,0x00,0x00,0x59,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xbc,0x00,0x00,0x00,0x67,0x00,0x00,0x00, -0x37,0x00,0x00,0x00,0x41,0x00,0x06,0x00,0x54,0x00,0x00,0x00, -0xbf,0x00,0x00,0x00,0x5f,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0xbc,0x00,0x00,0x00,0x3e,0x00,0x03,0x00,0xbf,0x00,0x00,0x00, -0xb2,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xc6,0x00,0x00,0x00,0x67,0x00,0x00,0x00,0x6d,0x02,0x00,0x00, -0x41,0x00,0x06,0x00,0x54,0x00,0x00,0x00,0xc8,0x00,0x00,0x00, -0x5f,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0xc6,0x00,0x00,0x00, -0x3e,0x00,0x03,0x00,0xc8,0x00,0x00,0x00,0xb5,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x4a,0x00,0x00,0x00,0xd2,0x00,0x00,0x00, -0x55,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x4a,0x00,0x00,0x00, -0xd5,0x00,0x00,0x00,0x59,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xdc,0x00,0x00,0x00,0x67,0x00,0x00,0x00, -0x61,0x00,0x00,0x00,0x41,0x00,0x06,0x00,0x54,0x00,0x00,0x00, -0xdf,0x00,0x00,0x00,0x5f,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0xdc,0x00,0x00,0x00,0x3e,0x00,0x03,0x00,0xdf,0x00,0x00,0x00, -0xd2,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xe6,0x00,0x00,0x00,0x67,0x00,0x00,0x00,0x6e,0x02,0x00,0x00, -0x41,0x00,0x06,0x00,0x54,0x00,0x00,0x00,0xe8,0x00,0x00,0x00, -0x5f,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0xe6,0x00,0x00,0x00, -0x3e,0x00,0x03,0x00,0xe8,0x00,0x00,0x00,0xd5,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x4a,0x00,0x00,0x00,0xf2,0x00,0x00,0x00, -0x55,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x4a,0x00,0x00,0x00, -0xf5,0x00,0x00,0x00,0x59,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xfc,0x00,0x00,0x00,0x67,0x00,0x00,0x00, -0x6f,0x02,0x00,0x00,0x41,0x00,0x06,0x00,0x54,0x00,0x00,0x00, -0xff,0x00,0x00,0x00,0x5f,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0xfc,0x00,0x00,0x00,0x3e,0x00,0x03,0x00,0xff,0x00,0x00,0x00, -0xf2,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x06,0x01,0x00,0x00,0x67,0x00,0x00,0x00,0x70,0x02,0x00,0x00, -0x41,0x00,0x06,0x00,0x54,0x00,0x00,0x00,0x08,0x01,0x00,0x00, -0x5f,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0x06,0x01,0x00,0x00, -0x3e,0x00,0x03,0x00,0x08,0x01,0x00,0x00,0xf5,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x4a,0x00,0x00,0x00,0x12,0x01,0x00,0x00, -0x55,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x4a,0x00,0x00,0x00, -0x15,0x01,0x00,0x00,0x59,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x1c,0x01,0x00,0x00,0x67,0x00,0x00,0x00, -0x71,0x02,0x00,0x00,0x41,0x00,0x06,0x00,0x54,0x00,0x00,0x00, -0x1f,0x01,0x00,0x00,0x5f,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0x1c,0x01,0x00,0x00,0x3e,0x00,0x03,0x00,0x1f,0x01,0x00,0x00, -0x12,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x26,0x01,0x00,0x00,0x67,0x00,0x00,0x00,0x72,0x02,0x00,0x00, -0x41,0x00,0x06,0x00,0x54,0x00,0x00,0x00,0x28,0x01,0x00,0x00, -0x5f,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0x26,0x01,0x00,0x00, -0x3e,0x00,0x03,0x00,0x28,0x01,0x00,0x00,0x15,0x01,0x00,0x00, -0x3d,0x00,0x04,0x00,0x4a,0x00,0x00,0x00,0x32,0x01,0x00,0x00, -0x55,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x4a,0x00,0x00,0x00, -0x35,0x01,0x00,0x00,0x59,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x3c,0x01,0x00,0x00,0x67,0x00,0x00,0x00, -0x73,0x02,0x00,0x00,0x41,0x00,0x06,0x00,0x54,0x00,0x00,0x00, -0x3f,0x01,0x00,0x00,0x5f,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0x3c,0x01,0x00,0x00,0x3e,0x00,0x03,0x00,0x3f,0x01,0x00,0x00, -0x32,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x46,0x01,0x00,0x00,0x67,0x00,0x00,0x00,0x74,0x02,0x00,0x00, -0x41,0x00,0x06,0x00,0x54,0x00,0x00,0x00,0x48,0x01,0x00,0x00, -0x5f,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0x46,0x01,0x00,0x00, -0x3e,0x00,0x03,0x00,0x48,0x01,0x00,0x00,0x35,0x01,0x00,0x00, -0x3d,0x00,0x04,0x00,0x4a,0x00,0x00,0x00,0x52,0x01,0x00,0x00, -0x55,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x4a,0x00,0x00,0x00, -0x55,0x01,0x00,0x00,0x59,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x5c,0x01,0x00,0x00,0x67,0x00,0x00,0x00, -0x75,0x02,0x00,0x00,0x41,0x00,0x06,0x00,0x54,0x00,0x00,0x00, -0x5f,0x01,0x00,0x00,0x5f,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0x5c,0x01,0x00,0x00,0x3e,0x00,0x03,0x00,0x5f,0x01,0x00,0x00, -0x52,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x66,0x01,0x00,0x00,0x67,0x00,0x00,0x00,0x76,0x02,0x00,0x00, -0x41,0x00,0x06,0x00,0x54,0x00,0x00,0x00,0x68,0x01,0x00,0x00, -0x5f,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0x66,0x01,0x00,0x00, -0x3e,0x00,0x03,0x00,0x68,0x01,0x00,0x00,0x55,0x01,0x00,0x00, -0x3d,0x00,0x04,0x00,0x4a,0x00,0x00,0x00,0x72,0x01,0x00,0x00, -0x55,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x4a,0x00,0x00,0x00, -0x75,0x01,0x00,0x00,0x59,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x7c,0x01,0x00,0x00,0x67,0x00,0x00,0x00, -0x77,0x02,0x00,0x00,0x41,0x00,0x06,0x00,0x54,0x00,0x00,0x00, -0x7f,0x01,0x00,0x00,0x5f,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0x7c,0x01,0x00,0x00,0x3e,0x00,0x03,0x00,0x7f,0x01,0x00,0x00, -0x72,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x86,0x01,0x00,0x00,0x67,0x00,0x00,0x00,0x78,0x02,0x00,0x00, -0x41,0x00,0x06,0x00,0x54,0x00,0x00,0x00,0x88,0x01,0x00,0x00, -0x5f,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0x86,0x01,0x00,0x00, -0x3e,0x00,0x03,0x00,0x88,0x01,0x00,0x00,0x75,0x01,0x00,0x00, -0x3d,0x00,0x04,0x00,0x4a,0x00,0x00,0x00,0x92,0x01,0x00,0x00, -0x55,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x4a,0x00,0x00,0x00, -0x95,0x01,0x00,0x00,0x59,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x9c,0x01,0x00,0x00,0x67,0x00,0x00,0x00, -0x79,0x02,0x00,0x00,0x41,0x00,0x06,0x00,0x54,0x00,0x00,0x00, -0x9f,0x01,0x00,0x00,0x5f,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0x9c,0x01,0x00,0x00,0x3e,0x00,0x03,0x00,0x9f,0x01,0x00,0x00, -0x92,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xa6,0x01,0x00,0x00,0x67,0x00,0x00,0x00,0x7a,0x02,0x00,0x00, -0x41,0x00,0x06,0x00,0x54,0x00,0x00,0x00,0xa8,0x01,0x00,0x00, -0x5f,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0xa6,0x01,0x00,0x00, -0x3e,0x00,0x03,0x00,0xa8,0x01,0x00,0x00,0x95,0x01,0x00,0x00, -0x3d,0x00,0x04,0x00,0x4a,0x00,0x00,0x00,0xb2,0x01,0x00,0x00, -0x55,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x4a,0x00,0x00,0x00, -0xb5,0x01,0x00,0x00,0x59,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xbc,0x01,0x00,0x00,0x67,0x00,0x00,0x00, -0x7b,0x02,0x00,0x00,0x41,0x00,0x06,0x00,0x54,0x00,0x00,0x00, -0xbf,0x01,0x00,0x00,0x5f,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0xbc,0x01,0x00,0x00,0x3e,0x00,0x03,0x00,0xbf,0x01,0x00,0x00, -0xb2,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xc6,0x01,0x00,0x00,0x67,0x00,0x00,0x00,0x7c,0x02,0x00,0x00, -0x41,0x00,0x06,0x00,0x54,0x00,0x00,0x00,0xc8,0x01,0x00,0x00, -0x5f,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0xc6,0x01,0x00,0x00, -0x3e,0x00,0x03,0x00,0xc8,0x01,0x00,0x00,0xb5,0x01,0x00,0x00, -0x3d,0x00,0x04,0x00,0x4a,0x00,0x00,0x00,0xd2,0x01,0x00,0x00, -0x55,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x4a,0x00,0x00,0x00, -0xd5,0x01,0x00,0x00,0x59,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xdc,0x01,0x00,0x00,0x67,0x00,0x00,0x00, -0x7d,0x02,0x00,0x00,0x41,0x00,0x06,0x00,0x54,0x00,0x00,0x00, -0xdf,0x01,0x00,0x00,0x5f,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0xdc,0x01,0x00,0x00,0x3e,0x00,0x03,0x00,0xdf,0x01,0x00,0x00, -0xd2,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xe6,0x01,0x00,0x00,0x67,0x00,0x00,0x00,0x7e,0x02,0x00,0x00, -0x41,0x00,0x06,0x00,0x54,0x00,0x00,0x00,0xe8,0x01,0x00,0x00, -0x5f,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0xe6,0x01,0x00,0x00, -0x3e,0x00,0x03,0x00,0xe8,0x01,0x00,0x00,0xd5,0x01,0x00,0x00, -0x3d,0x00,0x04,0x00,0x4a,0x00,0x00,0x00,0xf2,0x01,0x00,0x00, -0x55,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x4a,0x00,0x00,0x00, -0xf5,0x01,0x00,0x00,0x59,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xfc,0x01,0x00,0x00,0x67,0x00,0x00,0x00, -0x7f,0x02,0x00,0x00,0x41,0x00,0x06,0x00,0x54,0x00,0x00,0x00, -0xff,0x01,0x00,0x00,0x5f,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0xfc,0x01,0x00,0x00,0x3e,0x00,0x03,0x00,0xff,0x01,0x00,0x00, -0xf2,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x06,0x02,0x00,0x00,0x67,0x00,0x00,0x00,0x80,0x02,0x00,0x00, -0x41,0x00,0x06,0x00,0x54,0x00,0x00,0x00,0x08,0x02,0x00,0x00, -0x5f,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0x06,0x02,0x00,0x00, -0x3e,0x00,0x03,0x00,0x08,0x02,0x00,0x00,0xf5,0x01,0x00,0x00, -0x3d,0x00,0x04,0x00,0x4a,0x00,0x00,0x00,0x12,0x02,0x00,0x00, -0x55,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x4a,0x00,0x00,0x00, -0x15,0x02,0x00,0x00,0x59,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x1c,0x02,0x00,0x00,0x67,0x00,0x00,0x00, -0x81,0x02,0x00,0x00,0x41,0x00,0x06,0x00,0x54,0x00,0x00,0x00, -0x1f,0x02,0x00,0x00,0x5f,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0x1c,0x02,0x00,0x00,0x3e,0x00,0x03,0x00,0x1f,0x02,0x00,0x00, -0x12,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x26,0x02,0x00,0x00,0x67,0x00,0x00,0x00,0x82,0x02,0x00,0x00, -0x41,0x00,0x06,0x00,0x54,0x00,0x00,0x00,0x28,0x02,0x00,0x00, -0x5f,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0x26,0x02,0x00,0x00, -0x3e,0x00,0x03,0x00,0x28,0x02,0x00,0x00,0x15,0x02,0x00,0x00, -0x3d,0x00,0x04,0x00,0x4a,0x00,0x00,0x00,0x32,0x02,0x00,0x00, -0x55,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x4a,0x00,0x00,0x00, -0x35,0x02,0x00,0x00,0x59,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x3c,0x02,0x00,0x00,0x67,0x00,0x00,0x00, -0x83,0x02,0x00,0x00,0x41,0x00,0x06,0x00,0x54,0x00,0x00,0x00, -0x3f,0x02,0x00,0x00,0x5f,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0x3c,0x02,0x00,0x00,0x3e,0x00,0x03,0x00,0x3f,0x02,0x00,0x00, -0x32,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x46,0x02,0x00,0x00,0x67,0x00,0x00,0x00,0x84,0x02,0x00,0x00, -0x41,0x00,0x06,0x00,0x54,0x00,0x00,0x00,0x48,0x02,0x00,0x00, -0x5f,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0x46,0x02,0x00,0x00, -0x3e,0x00,0x03,0x00,0x48,0x02,0x00,0x00,0x35,0x02,0x00,0x00, -0x3d,0x00,0x04,0x00,0x4a,0x00,0x00,0x00,0x52,0x02,0x00,0x00, -0x55,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x4a,0x00,0x00,0x00, -0x55,0x02,0x00,0x00,0x59,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x5c,0x02,0x00,0x00,0x67,0x00,0x00,0x00, -0x85,0x02,0x00,0x00,0x41,0x00,0x06,0x00,0x54,0x00,0x00,0x00, -0x5f,0x02,0x00,0x00,0x5f,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0x5c,0x02,0x00,0x00,0x3e,0x00,0x03,0x00,0x5f,0x02,0x00,0x00, -0x52,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x66,0x02,0x00,0x00,0x67,0x00,0x00,0x00,0x86,0x02,0x00,0x00, -0x41,0x00,0x06,0x00,0x54,0x00,0x00,0x00,0x68,0x02,0x00,0x00, -0x5f,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0x66,0x02,0x00,0x00, -0x3e,0x00,0x03,0x00,0x68,0x02,0x00,0x00,0x55,0x02,0x00,0x00, -0xf9,0x00,0x02,0x00,0x81,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, -0x81,0x00,0x00,0x00,0xfd,0x00,0x01,0x00,0x38,0x00,0x01,0x00, - +0x06,0x00,0x00,0x00,0x14,0x00,0x02,0x00,0x23,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x2d,0x00,0x00,0x00, +0x00,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x36,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x16,0x00,0x03,0x00, +0x48,0x00,0x00,0x00,0x10,0x00,0x00,0x00,0x1d,0x00,0x03,0x00, +0x4c,0x00,0x00,0x00,0x48,0x00,0x00,0x00,0x1e,0x00,0x03,0x00, +0x4d,0x00,0x00,0x00,0x4c,0x00,0x00,0x00,0x20,0x00,0x04,0x00, +0x4e,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x4d,0x00,0x00,0x00, +0x3b,0x00,0x04,0x00,0x4e,0x00,0x00,0x00,0x4f,0x00,0x00,0x00, +0x0c,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x52,0x00,0x00,0x00, +0x0c,0x00,0x00,0x00,0x48,0x00,0x00,0x00,0x1d,0x00,0x03,0x00, +0x5a,0x00,0x00,0x00,0x48,0x00,0x00,0x00,0x1e,0x00,0x03,0x00, +0x5b,0x00,0x00,0x00,0x5a,0x00,0x00,0x00,0x20,0x00,0x04,0x00, +0x5c,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x5b,0x00,0x00,0x00, +0x3b,0x00,0x04,0x00,0x5c,0x00,0x00,0x00,0x5d,0x00,0x00,0x00, +0x0c,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x5f,0x00,0x00,0x00,0x03,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x09,0x00,0x00,0x00,0x77,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x09,0x00,0x00,0x00,0x7d,0x00,0x00,0x00, +0x00,0x01,0x00,0x00,0x2c,0x00,0x06,0x00,0x0a,0x00,0x00,0x00, +0x7e,0x00,0x00,0x00,0x7d,0x00,0x00,0x00,0x77,0x00,0x00,0x00, +0x77,0x00,0x00,0x00,0x36,0x00,0x05,0x00,0x02,0x00,0x00,0x00, +0x04,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x03,0x00,0x00,0x00, +0xf8,0x00,0x02,0x00,0x05,0x00,0x00,0x00,0xf7,0x00,0x03,0x00, +0x7f,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xfb,0x00,0x03,0x00, +0x0d,0x00,0x00,0x00,0x80,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, +0x80,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x0e,0x00,0x00,0x00, +0x0f,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x0d,0x00,0x00,0x00, +0x3d,0x00,0x04,0x00,0x09,0x00,0x00,0x00,0x10,0x00,0x00,0x00, +0x0f,0x00,0x00,0x00,0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x11,0x00,0x00,0x00,0x10,0x00,0x00,0x00,0x41,0x00,0x05,0x00, +0x18,0x00,0x00,0x00,0x19,0x00,0x00,0x00,0x16,0x00,0x00,0x00, +0x17,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x1a,0x00,0x00,0x00,0x19,0x00,0x00,0x00,0x87,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x1b,0x00,0x00,0x00,0x1a,0x00,0x00,0x00, +0x17,0x00,0x00,0x00,0x8b,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x1c,0x00,0x00,0x00,0x11,0x00,0x00,0x00,0x1b,0x00,0x00,0x00, +0x87,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x22,0x00,0x00,0x00, +0x11,0x00,0x00,0x00,0x1b,0x00,0x00,0x00,0xaf,0x00,0x05,0x00, +0x23,0x00,0x00,0x00,0x28,0x00,0x00,0x00,0x1c,0x00,0x00,0x00, +0x1a,0x00,0x00,0x00,0xa8,0x00,0x04,0x00,0x23,0x00,0x00,0x00, +0x29,0x00,0x00,0x00,0x28,0x00,0x00,0x00,0xf7,0x00,0x03,0x00, +0x2b,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, +0x29,0x00,0x00,0x00,0x2a,0x00,0x00,0x00,0x2b,0x00,0x00,0x00, +0xf8,0x00,0x02,0x00,0x2a,0x00,0x00,0x00,0x41,0x00,0x05,0x00, +0x18,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0x16,0x00,0x00,0x00, +0x2d,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x2f,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0xaf,0x00,0x05,0x00, +0x23,0x00,0x00,0x00,0x30,0x00,0x00,0x00,0x22,0x00,0x00,0x00, +0x2f,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0x2b,0x00,0x00,0x00, +0xf8,0x00,0x02,0x00,0x2b,0x00,0x00,0x00,0xf5,0x00,0x07,0x00, +0x23,0x00,0x00,0x00,0x31,0x00,0x00,0x00,0x28,0x00,0x00,0x00, +0x80,0x00,0x00,0x00,0x30,0x00,0x00,0x00,0x2a,0x00,0x00,0x00, +0xf7,0x00,0x03,0x00,0x33,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0xfa,0x00,0x04,0x00,0x31,0x00,0x00,0x00,0x32,0x00,0x00,0x00, +0x33,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0x32,0x00,0x00,0x00, +0xf9,0x00,0x02,0x00,0x7f,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, +0x33,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x18,0x00,0x00,0x00, +0x37,0x00,0x00,0x00,0x16,0x00,0x00,0x00,0x36,0x00,0x00,0x00, +0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x38,0x00,0x00,0x00, +0x37,0x00,0x00,0x00,0x87,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x39,0x00,0x00,0x00,0x38,0x00,0x00,0x00,0x17,0x00,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x3d,0x00,0x00,0x00, +0x22,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x3f,0x00,0x00,0x00,0x3d,0x00,0x00,0x00, +0x1c,0x00,0x00,0x00,0x41,0x00,0x06,0x00,0x52,0x00,0x00,0x00, +0x53,0x00,0x00,0x00,0x4f,0x00,0x00,0x00,0x2d,0x00,0x00,0x00, +0x3f,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x48,0x00,0x00,0x00, +0x54,0x00,0x00,0x00,0x53,0x00,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x56,0x00,0x00,0x00,0x3f,0x00,0x00,0x00, +0x17,0x00,0x00,0x00,0x41,0x00,0x06,0x00,0x52,0x00,0x00,0x00, +0x57,0x00,0x00,0x00,0x4f,0x00,0x00,0x00,0x2d,0x00,0x00,0x00, +0x56,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x48,0x00,0x00,0x00, +0x58,0x00,0x00,0x00,0x57,0x00,0x00,0x00,0x41,0x00,0x05,0x00, +0x18,0x00,0x00,0x00,0x60,0x00,0x00,0x00,0x16,0x00,0x00,0x00, +0x5f,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x61,0x00,0x00,0x00,0x60,0x00,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x62,0x00,0x00,0x00,0x22,0x00,0x00,0x00, +0x61,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x65,0x00,0x00,0x00,0x62,0x00,0x00,0x00,0x1c,0x00,0x00,0x00, +0x41,0x00,0x06,0x00,0x52,0x00,0x00,0x00,0x6c,0x00,0x00,0x00, +0x5d,0x00,0x00,0x00,0x2d,0x00,0x00,0x00,0x65,0x00,0x00,0x00, +0x3e,0x00,0x03,0x00,0x6c,0x00,0x00,0x00,0x54,0x00,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x76,0x00,0x00,0x00, +0x65,0x00,0x00,0x00,0x17,0x00,0x00,0x00,0x41,0x00,0x06,0x00, +0x52,0x00,0x00,0x00,0x7a,0x00,0x00,0x00,0x5d,0x00,0x00,0x00, +0x2d,0x00,0x00,0x00,0x76,0x00,0x00,0x00,0x3e,0x00,0x03,0x00, +0x7a,0x00,0x00,0x00,0x58,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, +0x7f,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0x7f,0x00,0x00,0x00, +0xfd,0x00,0x01,0x00,0x38,0x00,0x01,0x00, }; -const uint64_t dequant_f16_len = 4392; +const uint64_t dequant_f16_len = 1748; unsigned char dequant_f16_fp32_data[] = { 0x03,0x02,0x23,0x07,0x00,0x05,0x01,0x00,0x0b,0x00,0x0d,0x00, -0xc8,0x02,0x00,0x00,0x00,0x00,0x00,0x00,0x11,0x00,0x02,0x00, +0x86,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x11,0x00,0x02,0x00, 0x01,0x00,0x00,0x00,0x11,0x00,0x02,0x00,0x51,0x11,0x00,0x00, 0x0b,0x00,0x06,0x00,0x01,0x00,0x00,0x00,0x47,0x4c,0x53,0x4c, 0x2e,0x73,0x74,0x64,0x2e,0x34,0x35,0x30,0x00,0x00,0x00,0x00, 0x0e,0x00,0x03,0x00,0x00,0x00,0x00,0x00,0x01,0x00,0x00,0x00, 0x0f,0x00,0x09,0x00,0x05,0x00,0x00,0x00,0x04,0x00,0x00,0x00, 0x6d,0x61,0x69,0x6e,0x00,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0x16,0x00,0x00,0x00,0x52,0x00,0x00,0x00,0x62,0x00,0x00,0x00, +0x16,0x00,0x00,0x00,0x50,0x00,0x00,0x00,0x60,0x00,0x00,0x00, 0x10,0x00,0x06,0x00,0x04,0x00,0x00,0x00,0x11,0x00,0x00,0x00, 0x00,0x01,0x00,0x00,0x01,0x00,0x00,0x00,0x01,0x00,0x00,0x00, 0x47,0x00,0x04,0x00,0x0c,0x00,0x00,0x00,0x0b,0x00,0x00,0x00, @@ -1280,23 +1059,23 @@ unsigned char dequant_f16_fp32_data[] = { 0x08,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x14,0x00,0x00,0x00, 0x03,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, 0x47,0x00,0x03,0x00,0x14,0x00,0x00,0x00,0x02,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x4f,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x02,0x00,0x00,0x00,0x48,0x00,0x04,0x00,0x50,0x00,0x00,0x00, +0x47,0x00,0x04,0x00,0x4d,0x00,0x00,0x00,0x06,0x00,0x00,0x00, +0x02,0x00,0x00,0x00,0x48,0x00,0x04,0x00,0x4e,0x00,0x00,0x00, 0x00,0x00,0x00,0x00,0x18,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0x50,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x47,0x00,0x03,0x00,0x50,0x00,0x00,0x00, -0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x52,0x00,0x00,0x00, +0x4e,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00, +0x00,0x00,0x00,0x00,0x47,0x00,0x03,0x00,0x4e,0x00,0x00,0x00, +0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x50,0x00,0x00,0x00, 0x22,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x52,0x00,0x00,0x00,0x21,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x5f,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x02,0x00,0x00,0x00,0x48,0x00,0x04,0x00,0x60,0x00,0x00,0x00, +0x50,0x00,0x00,0x00,0x21,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0x47,0x00,0x04,0x00,0x5d,0x00,0x00,0x00,0x06,0x00,0x00,0x00, +0x02,0x00,0x00,0x00,0x48,0x00,0x04,0x00,0x5e,0x00,0x00,0x00, 0x00,0x00,0x00,0x00,0x19,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0x60,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x47,0x00,0x03,0x00,0x60,0x00,0x00,0x00, -0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x62,0x00,0x00,0x00, +0x5e,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00, +0x00,0x00,0x00,0x00,0x47,0x00,0x03,0x00,0x5e,0x00,0x00,0x00, +0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x60,0x00,0x00,0x00, 0x22,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x62,0x00,0x00,0x00,0x21,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x85,0x00,0x00,0x00,0x0b,0x00,0x00,0x00, +0x60,0x00,0x00,0x00,0x21,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0x47,0x00,0x04,0x00,0x83,0x00,0x00,0x00,0x0b,0x00,0x00,0x00, 0x19,0x00,0x00,0x00,0x13,0x00,0x02,0x00,0x02,0x00,0x00,0x00, 0x21,0x00,0x03,0x00,0x03,0x00,0x00,0x00,0x02,0x00,0x00,0x00, 0x15,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x20,0x00,0x00,0x00, @@ -1315,405 +1094,105 @@ unsigned char dequant_f16_fp32_data[] = { 0x15,0x00,0x00,0x00,0x16,0x00,0x00,0x00,0x09,0x00,0x00,0x00, 0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x17,0x00,0x00,0x00, 0x01,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x18,0x00,0x00,0x00, -0x09,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x1b,0x00,0x00,0x00,0x20,0x00,0x00,0x00, -0x14,0x00,0x02,0x00,0x24,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x37,0x00,0x00,0x00, -0x02,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x48,0x00,0x00,0x00,0x10,0x00,0x00,0x00,0x16,0x00,0x03,0x00, -0x4a,0x00,0x00,0x00,0x20,0x00,0x00,0x00,0x16,0x00,0x03,0x00, -0x4e,0x00,0x00,0x00,0x10,0x00,0x00,0x00,0x1d,0x00,0x03,0x00, -0x4f,0x00,0x00,0x00,0x4e,0x00,0x00,0x00,0x1e,0x00,0x03,0x00, -0x50,0x00,0x00,0x00,0x4f,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0x51,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x50,0x00,0x00,0x00, -0x3b,0x00,0x04,0x00,0x51,0x00,0x00,0x00,0x52,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x55,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x4e,0x00,0x00,0x00,0x1d,0x00,0x03,0x00, -0x5f,0x00,0x00,0x00,0x4e,0x00,0x00,0x00,0x1e,0x00,0x03,0x00, -0x60,0x00,0x00,0x00,0x5f,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0x61,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x60,0x00,0x00,0x00, -0x3b,0x00,0x04,0x00,0x61,0x00,0x00,0x00,0x62,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x64,0x00,0x00,0x00,0x03,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x09,0x00,0x00,0x00,0x7d,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x09,0x00,0x00,0x00,0x84,0x00,0x00,0x00, -0x00,0x01,0x00,0x00,0x2c,0x00,0x06,0x00,0x0a,0x00,0x00,0x00, -0x85,0x00,0x00,0x00,0x84,0x00,0x00,0x00,0x7d,0x00,0x00,0x00, -0x7d,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0xad,0x02,0x00,0x00,0x11,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0xae,0x02,0x00,0x00,0x12,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0xaf,0x02,0x00,0x00, -0x13,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0xb0,0x02,0x00,0x00,0x04,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0xb1,0x02,0x00,0x00,0x14,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0xb2,0x02,0x00,0x00, -0x05,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0xb3,0x02,0x00,0x00,0x15,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0xb4,0x02,0x00,0x00,0x06,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0xb5,0x02,0x00,0x00, -0x16,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0xb6,0x02,0x00,0x00,0x07,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0xb7,0x02,0x00,0x00,0x17,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0xb8,0x02,0x00,0x00, -0x08,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0xb9,0x02,0x00,0x00,0x18,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0xba,0x02,0x00,0x00,0x09,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0xbb,0x02,0x00,0x00, -0x19,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0xbc,0x02,0x00,0x00,0x0a,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0xbd,0x02,0x00,0x00,0x1a,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0xbe,0x02,0x00,0x00, -0x0b,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0xbf,0x02,0x00,0x00,0x1b,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0xc0,0x02,0x00,0x00,0x0c,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0xc1,0x02,0x00,0x00, -0x1c,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0xc2,0x02,0x00,0x00,0x0d,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0xc3,0x02,0x00,0x00,0x1d,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0xc4,0x02,0x00,0x00, -0x0e,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0xc5,0x02,0x00,0x00,0x1e,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0xc6,0x02,0x00,0x00,0x0f,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0xc7,0x02,0x00,0x00, -0x1f,0x00,0x00,0x00,0x36,0x00,0x05,0x00,0x02,0x00,0x00,0x00, -0x04,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x03,0x00,0x00,0x00, -0xf8,0x00,0x02,0x00,0x05,0x00,0x00,0x00,0xf7,0x00,0x03,0x00, -0x86,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xfb,0x00,0x03,0x00, -0x0d,0x00,0x00,0x00,0x87,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, -0x87,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x0e,0x00,0x00,0x00, -0x0f,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x0d,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x09,0x00,0x00,0x00,0x10,0x00,0x00,0x00, -0x0f,0x00,0x00,0x00,0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x11,0x00,0x00,0x00,0x10,0x00,0x00,0x00,0x41,0x00,0x05,0x00, -0x18,0x00,0x00,0x00,0x19,0x00,0x00,0x00,0x16,0x00,0x00,0x00, -0x17,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x1a,0x00,0x00,0x00,0x19,0x00,0x00,0x00,0x87,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x1c,0x00,0x00,0x00,0x1a,0x00,0x00,0x00, -0x1b,0x00,0x00,0x00,0x8b,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x1d,0x00,0x00,0x00,0x11,0x00,0x00,0x00,0x1c,0x00,0x00,0x00, -0x87,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x11,0x00,0x00,0x00,0x1c,0x00,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x26,0x00,0x00,0x00,0x1d,0x00,0x00,0x00, -0x1b,0x00,0x00,0x00,0xaf,0x00,0x05,0x00,0x24,0x00,0x00,0x00, -0x29,0x00,0x00,0x00,0x26,0x00,0x00,0x00,0x1a,0x00,0x00,0x00, -0xa8,0x00,0x04,0x00,0x24,0x00,0x00,0x00,0x2a,0x00,0x00,0x00, -0x29,0x00,0x00,0x00,0xf7,0x00,0x03,0x00,0x2c,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0x2a,0x00,0x00,0x00, -0x2b,0x00,0x00,0x00,0x2c,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, -0x2b,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x18,0x00,0x00,0x00, -0x2f,0x00,0x00,0x00,0x16,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x30,0x00,0x00,0x00, -0x2f,0x00,0x00,0x00,0xaf,0x00,0x05,0x00,0x24,0x00,0x00,0x00, -0x31,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x30,0x00,0x00,0x00, -0xf9,0x00,0x02,0x00,0x2c,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, -0x2c,0x00,0x00,0x00,0xf5,0x00,0x07,0x00,0x24,0x00,0x00,0x00, -0x32,0x00,0x00,0x00,0x29,0x00,0x00,0x00,0x87,0x00,0x00,0x00, -0x31,0x00,0x00,0x00,0x2b,0x00,0x00,0x00,0xf7,0x00,0x03,0x00, -0x34,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, -0x32,0x00,0x00,0x00,0x33,0x00,0x00,0x00,0x34,0x00,0x00,0x00, -0xf8,0x00,0x02,0x00,0x33,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, -0x86,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0x34,0x00,0x00,0x00, -0x41,0x00,0x05,0x00,0x18,0x00,0x00,0x00,0x38,0x00,0x00,0x00, -0x16,0x00,0x00,0x00,0x37,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0x09,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x14,0x00,0x02,0x00, +0x23,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x2d,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x36,0x00,0x00,0x00,0x02,0x00,0x00,0x00, +0x16,0x00,0x03,0x00,0x48,0x00,0x00,0x00,0x20,0x00,0x00,0x00, +0x16,0x00,0x03,0x00,0x4c,0x00,0x00,0x00,0x10,0x00,0x00,0x00, +0x1d,0x00,0x03,0x00,0x4d,0x00,0x00,0x00,0x4c,0x00,0x00,0x00, +0x1e,0x00,0x03,0x00,0x4e,0x00,0x00,0x00,0x4d,0x00,0x00,0x00, +0x20,0x00,0x04,0x00,0x4f,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, +0x4e,0x00,0x00,0x00,0x3b,0x00,0x04,0x00,0x4f,0x00,0x00,0x00, +0x50,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x20,0x00,0x04,0x00, +0x53,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x4c,0x00,0x00,0x00, +0x1d,0x00,0x03,0x00,0x5d,0x00,0x00,0x00,0x4c,0x00,0x00,0x00, +0x1e,0x00,0x03,0x00,0x5e,0x00,0x00,0x00,0x5d,0x00,0x00,0x00, +0x20,0x00,0x04,0x00,0x5f,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, +0x5e,0x00,0x00,0x00,0x3b,0x00,0x04,0x00,0x5f,0x00,0x00,0x00, +0x60,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x62,0x00,0x00,0x00,0x03,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x09,0x00,0x00,0x00,0x7b,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x09,0x00,0x00,0x00, +0x82,0x00,0x00,0x00,0x00,0x01,0x00,0x00,0x2c,0x00,0x06,0x00, +0x0a,0x00,0x00,0x00,0x83,0x00,0x00,0x00,0x82,0x00,0x00,0x00, +0x7b,0x00,0x00,0x00,0x7b,0x00,0x00,0x00,0x36,0x00,0x05,0x00, +0x02,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0x03,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0x05,0x00,0x00,0x00, +0xf7,0x00,0x03,0x00,0x84,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0xfb,0x00,0x03,0x00,0x0d,0x00,0x00,0x00,0x85,0x00,0x00,0x00, +0xf8,0x00,0x02,0x00,0x85,0x00,0x00,0x00,0x41,0x00,0x05,0x00, +0x0e,0x00,0x00,0x00,0x0f,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, +0x0d,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x09,0x00,0x00,0x00, +0x10,0x00,0x00,0x00,0x0f,0x00,0x00,0x00,0x7c,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x11,0x00,0x00,0x00,0x10,0x00,0x00,0x00, +0x41,0x00,0x05,0x00,0x18,0x00,0x00,0x00,0x19,0x00,0x00,0x00, +0x16,0x00,0x00,0x00,0x17,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x1a,0x00,0x00,0x00,0x19,0x00,0x00,0x00, +0x87,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x1b,0x00,0x00,0x00, +0x1a,0x00,0x00,0x00,0x17,0x00,0x00,0x00,0x8b,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x1c,0x00,0x00,0x00,0x11,0x00,0x00,0x00, +0x1b,0x00,0x00,0x00,0x87,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x22,0x00,0x00,0x00,0x11,0x00,0x00,0x00,0x1b,0x00,0x00,0x00, +0xaf,0x00,0x05,0x00,0x23,0x00,0x00,0x00,0x28,0x00,0x00,0x00, +0x1c,0x00,0x00,0x00,0x1a,0x00,0x00,0x00,0xa8,0x00,0x04,0x00, +0x23,0x00,0x00,0x00,0x29,0x00,0x00,0x00,0x28,0x00,0x00,0x00, +0xf7,0x00,0x03,0x00,0x2b,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0xfa,0x00,0x04,0x00,0x29,0x00,0x00,0x00,0x2a,0x00,0x00,0x00, +0x2b,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0x2a,0x00,0x00,0x00, +0x41,0x00,0x05,0x00,0x18,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, +0x16,0x00,0x00,0x00,0x2d,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x2f,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, +0xaf,0x00,0x05,0x00,0x23,0x00,0x00,0x00,0x30,0x00,0x00,0x00, +0x22,0x00,0x00,0x00,0x2f,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, +0x2b,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0x2b,0x00,0x00,0x00, +0xf5,0x00,0x07,0x00,0x23,0x00,0x00,0x00,0x31,0x00,0x00,0x00, +0x28,0x00,0x00,0x00,0x85,0x00,0x00,0x00,0x30,0x00,0x00,0x00, +0x2a,0x00,0x00,0x00,0xf7,0x00,0x03,0x00,0x33,0x00,0x00,0x00, +0x00,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0x31,0x00,0x00,0x00, +0x32,0x00,0x00,0x00,0x33,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, +0x32,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0x84,0x00,0x00,0x00, +0xf8,0x00,0x02,0x00,0x33,0x00,0x00,0x00,0x41,0x00,0x05,0x00, +0x18,0x00,0x00,0x00,0x37,0x00,0x00,0x00,0x16,0x00,0x00,0x00, +0x36,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x38,0x00,0x00,0x00,0x37,0x00,0x00,0x00,0x87,0x00,0x05,0x00, 0x06,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x38,0x00,0x00,0x00, -0x87,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x3a,0x00,0x00,0x00, -0x39,0x00,0x00,0x00,0x1b,0x00,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x3e,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x3a,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x40,0x00,0x00,0x00,0x3e,0x00,0x00,0x00,0x1d,0x00,0x00,0x00, -0x41,0x00,0x06,0x00,0x55,0x00,0x00,0x00,0x56,0x00,0x00,0x00, -0x52,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0x40,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x4e,0x00,0x00,0x00,0x57,0x00,0x00,0x00, -0x56,0x00,0x00,0x00,0x73,0x00,0x04,0x00,0x4a,0x00,0x00,0x00, -0x58,0x00,0x00,0x00,0x57,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x5a,0x00,0x00,0x00,0x40,0x00,0x00,0x00, -0x17,0x00,0x00,0x00,0x41,0x00,0x06,0x00,0x55,0x00,0x00,0x00, -0x5b,0x00,0x00,0x00,0x52,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0x5a,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x4e,0x00,0x00,0x00, -0x5c,0x00,0x00,0x00,0x5b,0x00,0x00,0x00,0x73,0x00,0x04,0x00, -0x4a,0x00,0x00,0x00,0x5d,0x00,0x00,0x00,0x5c,0x00,0x00,0x00, -0x41,0x00,0x05,0x00,0x18,0x00,0x00,0x00,0x65,0x00,0x00,0x00, -0x16,0x00,0x00,0x00,0x64,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x66,0x00,0x00,0x00,0x65,0x00,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x67,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x66,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x6a,0x00,0x00,0x00,0x67,0x00,0x00,0x00, -0x26,0x00,0x00,0x00,0x73,0x00,0x04,0x00,0x4e,0x00,0x00,0x00, -0x71,0x00,0x00,0x00,0x58,0x00,0x00,0x00,0x41,0x00,0x06,0x00, -0x55,0x00,0x00,0x00,0x72,0x00,0x00,0x00,0x62,0x00,0x00,0x00, -0x2e,0x00,0x00,0x00,0x6a,0x00,0x00,0x00,0x3e,0x00,0x03,0x00, -0x72,0x00,0x00,0x00,0x71,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x7c,0x00,0x00,0x00,0x6a,0x00,0x00,0x00, -0x48,0x00,0x00,0x00,0x73,0x00,0x04,0x00,0x4e,0x00,0x00,0x00, -0x80,0x00,0x00,0x00,0x5d,0x00,0x00,0x00,0x41,0x00,0x06,0x00, -0x55,0x00,0x00,0x00,0x81,0x00,0x00,0x00,0x62,0x00,0x00,0x00, -0x2e,0x00,0x00,0x00,0x7c,0x00,0x00,0x00,0x3e,0x00,0x03,0x00, -0x81,0x00,0x00,0x00,0x80,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x4e,0x00,0x00,0x00,0x97,0x00,0x00,0x00,0x56,0x00,0x00,0x00, -0x73,0x00,0x04,0x00,0x4a,0x00,0x00,0x00,0x98,0x00,0x00,0x00, -0x97,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x4e,0x00,0x00,0x00, -0x9b,0x00,0x00,0x00,0x5b,0x00,0x00,0x00,0x73,0x00,0x04,0x00, -0x4a,0x00,0x00,0x00,0x9c,0x00,0x00,0x00,0x9b,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xa3,0x00,0x00,0x00, -0x6a,0x00,0x00,0x00,0x17,0x00,0x00,0x00,0x73,0x00,0x04,0x00, -0x4e,0x00,0x00,0x00,0xa6,0x00,0x00,0x00,0x98,0x00,0x00,0x00, -0x41,0x00,0x06,0x00,0x55,0x00,0x00,0x00,0xa7,0x00,0x00,0x00, -0x62,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0xa3,0x00,0x00,0x00, -0x3e,0x00,0x03,0x00,0xa7,0x00,0x00,0x00,0xa6,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xae,0x00,0x00,0x00, -0x6a,0x00,0x00,0x00,0xad,0x02,0x00,0x00,0x73,0x00,0x04,0x00, -0x4e,0x00,0x00,0x00,0xb0,0x00,0x00,0x00,0x9c,0x00,0x00,0x00, -0x41,0x00,0x06,0x00,0x55,0x00,0x00,0x00,0xb1,0x00,0x00,0x00, -0x62,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0xae,0x00,0x00,0x00, -0x3e,0x00,0x03,0x00,0xb1,0x00,0x00,0x00,0xb0,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x4e,0x00,0x00,0x00,0xbb,0x00,0x00,0x00, -0x56,0x00,0x00,0x00,0x73,0x00,0x04,0x00,0x4a,0x00,0x00,0x00, -0xbc,0x00,0x00,0x00,0xbb,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x4e,0x00,0x00,0x00,0xbf,0x00,0x00,0x00,0x5b,0x00,0x00,0x00, -0x73,0x00,0x04,0x00,0x4a,0x00,0x00,0x00,0xc0,0x00,0x00,0x00, -0xbf,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xc7,0x00,0x00,0x00,0x6a,0x00,0x00,0x00,0x37,0x00,0x00,0x00, -0x73,0x00,0x04,0x00,0x4e,0x00,0x00,0x00,0xca,0x00,0x00,0x00, -0xbc,0x00,0x00,0x00,0x41,0x00,0x06,0x00,0x55,0x00,0x00,0x00, -0xcb,0x00,0x00,0x00,0x62,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0xc7,0x00,0x00,0x00,0x3e,0x00,0x03,0x00,0xcb,0x00,0x00,0x00, -0xca,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xd2,0x00,0x00,0x00,0x6a,0x00,0x00,0x00,0xae,0x02,0x00,0x00, -0x73,0x00,0x04,0x00,0x4e,0x00,0x00,0x00,0xd4,0x00,0x00,0x00, -0xc0,0x00,0x00,0x00,0x41,0x00,0x06,0x00,0x55,0x00,0x00,0x00, -0xd5,0x00,0x00,0x00,0x62,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0xd2,0x00,0x00,0x00,0x3e,0x00,0x03,0x00,0xd5,0x00,0x00,0x00, -0xd4,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x4e,0x00,0x00,0x00, -0xdf,0x00,0x00,0x00,0x56,0x00,0x00,0x00,0x73,0x00,0x04,0x00, -0x4a,0x00,0x00,0x00,0xe0,0x00,0x00,0x00,0xdf,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x4e,0x00,0x00,0x00,0xe3,0x00,0x00,0x00, -0x5b,0x00,0x00,0x00,0x73,0x00,0x04,0x00,0x4a,0x00,0x00,0x00, -0xe4,0x00,0x00,0x00,0xe3,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xeb,0x00,0x00,0x00,0x6a,0x00,0x00,0x00, -0x64,0x00,0x00,0x00,0x73,0x00,0x04,0x00,0x4e,0x00,0x00,0x00, -0xee,0x00,0x00,0x00,0xe0,0x00,0x00,0x00,0x41,0x00,0x06,0x00, -0x55,0x00,0x00,0x00,0xef,0x00,0x00,0x00,0x62,0x00,0x00,0x00, -0x2e,0x00,0x00,0x00,0xeb,0x00,0x00,0x00,0x3e,0x00,0x03,0x00, -0xef,0x00,0x00,0x00,0xee,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xf6,0x00,0x00,0x00,0x6a,0x00,0x00,0x00, -0xaf,0x02,0x00,0x00,0x73,0x00,0x04,0x00,0x4e,0x00,0x00,0x00, -0xf8,0x00,0x00,0x00,0xe4,0x00,0x00,0x00,0x41,0x00,0x06,0x00, -0x55,0x00,0x00,0x00,0xf9,0x00,0x00,0x00,0x62,0x00,0x00,0x00, -0x2e,0x00,0x00,0x00,0xf6,0x00,0x00,0x00,0x3e,0x00,0x03,0x00, -0xf9,0x00,0x00,0x00,0xf8,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x4e,0x00,0x00,0x00,0x03,0x01,0x00,0x00,0x56,0x00,0x00,0x00, -0x73,0x00,0x04,0x00,0x4a,0x00,0x00,0x00,0x04,0x01,0x00,0x00, -0x03,0x01,0x00,0x00,0x3d,0x00,0x04,0x00,0x4e,0x00,0x00,0x00, -0x07,0x01,0x00,0x00,0x5b,0x00,0x00,0x00,0x73,0x00,0x04,0x00, -0x4a,0x00,0x00,0x00,0x08,0x01,0x00,0x00,0x07,0x01,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x0f,0x01,0x00,0x00, -0x6a,0x00,0x00,0x00,0xb0,0x02,0x00,0x00,0x73,0x00,0x04,0x00, -0x4e,0x00,0x00,0x00,0x12,0x01,0x00,0x00,0x04,0x01,0x00,0x00, -0x41,0x00,0x06,0x00,0x55,0x00,0x00,0x00,0x13,0x01,0x00,0x00, -0x62,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0x0f,0x01,0x00,0x00, -0x3e,0x00,0x03,0x00,0x13,0x01,0x00,0x00,0x12,0x01,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x1a,0x01,0x00,0x00, -0x6a,0x00,0x00,0x00,0xb1,0x02,0x00,0x00,0x73,0x00,0x04,0x00, -0x4e,0x00,0x00,0x00,0x1c,0x01,0x00,0x00,0x08,0x01,0x00,0x00, -0x41,0x00,0x06,0x00,0x55,0x00,0x00,0x00,0x1d,0x01,0x00,0x00, -0x62,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0x1a,0x01,0x00,0x00, -0x3e,0x00,0x03,0x00,0x1d,0x01,0x00,0x00,0x1c,0x01,0x00,0x00, -0x3d,0x00,0x04,0x00,0x4e,0x00,0x00,0x00,0x27,0x01,0x00,0x00, -0x56,0x00,0x00,0x00,0x73,0x00,0x04,0x00,0x4a,0x00,0x00,0x00, -0x28,0x01,0x00,0x00,0x27,0x01,0x00,0x00,0x3d,0x00,0x04,0x00, -0x4e,0x00,0x00,0x00,0x2b,0x01,0x00,0x00,0x5b,0x00,0x00,0x00, -0x73,0x00,0x04,0x00,0x4a,0x00,0x00,0x00,0x2c,0x01,0x00,0x00, -0x2b,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x33,0x01,0x00,0x00,0x6a,0x00,0x00,0x00,0xb2,0x02,0x00,0x00, -0x73,0x00,0x04,0x00,0x4e,0x00,0x00,0x00,0x36,0x01,0x00,0x00, -0x28,0x01,0x00,0x00,0x41,0x00,0x06,0x00,0x55,0x00,0x00,0x00, -0x37,0x01,0x00,0x00,0x62,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0x33,0x01,0x00,0x00,0x3e,0x00,0x03,0x00,0x37,0x01,0x00,0x00, -0x36,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x3e,0x01,0x00,0x00,0x6a,0x00,0x00,0x00,0xb3,0x02,0x00,0x00, -0x73,0x00,0x04,0x00,0x4e,0x00,0x00,0x00,0x40,0x01,0x00,0x00, -0x2c,0x01,0x00,0x00,0x41,0x00,0x06,0x00,0x55,0x00,0x00,0x00, -0x41,0x01,0x00,0x00,0x62,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0x3e,0x01,0x00,0x00,0x3e,0x00,0x03,0x00,0x41,0x01,0x00,0x00, -0x40,0x01,0x00,0x00,0x3d,0x00,0x04,0x00,0x4e,0x00,0x00,0x00, -0x4b,0x01,0x00,0x00,0x56,0x00,0x00,0x00,0x73,0x00,0x04,0x00, -0x4a,0x00,0x00,0x00,0x4c,0x01,0x00,0x00,0x4b,0x01,0x00,0x00, -0x3d,0x00,0x04,0x00,0x4e,0x00,0x00,0x00,0x4f,0x01,0x00,0x00, -0x5b,0x00,0x00,0x00,0x73,0x00,0x04,0x00,0x4a,0x00,0x00,0x00, -0x50,0x01,0x00,0x00,0x4f,0x01,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x57,0x01,0x00,0x00,0x6a,0x00,0x00,0x00, -0xb4,0x02,0x00,0x00,0x73,0x00,0x04,0x00,0x4e,0x00,0x00,0x00, -0x5a,0x01,0x00,0x00,0x4c,0x01,0x00,0x00,0x41,0x00,0x06,0x00, -0x55,0x00,0x00,0x00,0x5b,0x01,0x00,0x00,0x62,0x00,0x00,0x00, -0x2e,0x00,0x00,0x00,0x57,0x01,0x00,0x00,0x3e,0x00,0x03,0x00, -0x5b,0x01,0x00,0x00,0x5a,0x01,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x62,0x01,0x00,0x00,0x6a,0x00,0x00,0x00, -0xb5,0x02,0x00,0x00,0x73,0x00,0x04,0x00,0x4e,0x00,0x00,0x00, -0x64,0x01,0x00,0x00,0x50,0x01,0x00,0x00,0x41,0x00,0x06,0x00, -0x55,0x00,0x00,0x00,0x65,0x01,0x00,0x00,0x62,0x00,0x00,0x00, -0x2e,0x00,0x00,0x00,0x62,0x01,0x00,0x00,0x3e,0x00,0x03,0x00, -0x65,0x01,0x00,0x00,0x64,0x01,0x00,0x00,0x3d,0x00,0x04,0x00, -0x4e,0x00,0x00,0x00,0x6f,0x01,0x00,0x00,0x56,0x00,0x00,0x00, -0x73,0x00,0x04,0x00,0x4a,0x00,0x00,0x00,0x70,0x01,0x00,0x00, -0x6f,0x01,0x00,0x00,0x3d,0x00,0x04,0x00,0x4e,0x00,0x00,0x00, -0x73,0x01,0x00,0x00,0x5b,0x00,0x00,0x00,0x73,0x00,0x04,0x00, -0x4a,0x00,0x00,0x00,0x74,0x01,0x00,0x00,0x73,0x01,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x7b,0x01,0x00,0x00, -0x6a,0x00,0x00,0x00,0xb6,0x02,0x00,0x00,0x73,0x00,0x04,0x00, -0x4e,0x00,0x00,0x00,0x7e,0x01,0x00,0x00,0x70,0x01,0x00,0x00, -0x41,0x00,0x06,0x00,0x55,0x00,0x00,0x00,0x7f,0x01,0x00,0x00, -0x62,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0x7b,0x01,0x00,0x00, -0x3e,0x00,0x03,0x00,0x7f,0x01,0x00,0x00,0x7e,0x01,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x86,0x01,0x00,0x00, -0x6a,0x00,0x00,0x00,0xb7,0x02,0x00,0x00,0x73,0x00,0x04,0x00, -0x4e,0x00,0x00,0x00,0x88,0x01,0x00,0x00,0x74,0x01,0x00,0x00, -0x41,0x00,0x06,0x00,0x55,0x00,0x00,0x00,0x89,0x01,0x00,0x00, -0x62,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0x86,0x01,0x00,0x00, -0x3e,0x00,0x03,0x00,0x89,0x01,0x00,0x00,0x88,0x01,0x00,0x00, -0x3d,0x00,0x04,0x00,0x4e,0x00,0x00,0x00,0x93,0x01,0x00,0x00, -0x56,0x00,0x00,0x00,0x73,0x00,0x04,0x00,0x4a,0x00,0x00,0x00, -0x94,0x01,0x00,0x00,0x93,0x01,0x00,0x00,0x3d,0x00,0x04,0x00, -0x4e,0x00,0x00,0x00,0x97,0x01,0x00,0x00,0x5b,0x00,0x00,0x00, -0x73,0x00,0x04,0x00,0x4a,0x00,0x00,0x00,0x98,0x01,0x00,0x00, -0x97,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x9f,0x01,0x00,0x00,0x6a,0x00,0x00,0x00,0xb8,0x02,0x00,0x00, -0x73,0x00,0x04,0x00,0x4e,0x00,0x00,0x00,0xa2,0x01,0x00,0x00, -0x94,0x01,0x00,0x00,0x41,0x00,0x06,0x00,0x55,0x00,0x00,0x00, -0xa3,0x01,0x00,0x00,0x62,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0x9f,0x01,0x00,0x00,0x3e,0x00,0x03,0x00,0xa3,0x01,0x00,0x00, -0xa2,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xaa,0x01,0x00,0x00,0x6a,0x00,0x00,0x00,0xb9,0x02,0x00,0x00, -0x73,0x00,0x04,0x00,0x4e,0x00,0x00,0x00,0xac,0x01,0x00,0x00, -0x98,0x01,0x00,0x00,0x41,0x00,0x06,0x00,0x55,0x00,0x00,0x00, -0xad,0x01,0x00,0x00,0x62,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0xaa,0x01,0x00,0x00,0x3e,0x00,0x03,0x00,0xad,0x01,0x00,0x00, -0xac,0x01,0x00,0x00,0x3d,0x00,0x04,0x00,0x4e,0x00,0x00,0x00, -0xb7,0x01,0x00,0x00,0x56,0x00,0x00,0x00,0x73,0x00,0x04,0x00, -0x4a,0x00,0x00,0x00,0xb8,0x01,0x00,0x00,0xb7,0x01,0x00,0x00, -0x3d,0x00,0x04,0x00,0x4e,0x00,0x00,0x00,0xbb,0x01,0x00,0x00, -0x5b,0x00,0x00,0x00,0x73,0x00,0x04,0x00,0x4a,0x00,0x00,0x00, -0xbc,0x01,0x00,0x00,0xbb,0x01,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xc3,0x01,0x00,0x00,0x6a,0x00,0x00,0x00, -0xba,0x02,0x00,0x00,0x73,0x00,0x04,0x00,0x4e,0x00,0x00,0x00, -0xc6,0x01,0x00,0x00,0xb8,0x01,0x00,0x00,0x41,0x00,0x06,0x00, -0x55,0x00,0x00,0x00,0xc7,0x01,0x00,0x00,0x62,0x00,0x00,0x00, -0x2e,0x00,0x00,0x00,0xc3,0x01,0x00,0x00,0x3e,0x00,0x03,0x00, -0xc7,0x01,0x00,0x00,0xc6,0x01,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xce,0x01,0x00,0x00,0x6a,0x00,0x00,0x00, -0xbb,0x02,0x00,0x00,0x73,0x00,0x04,0x00,0x4e,0x00,0x00,0x00, -0xd0,0x01,0x00,0x00,0xbc,0x01,0x00,0x00,0x41,0x00,0x06,0x00, -0x55,0x00,0x00,0x00,0xd1,0x01,0x00,0x00,0x62,0x00,0x00,0x00, -0x2e,0x00,0x00,0x00,0xce,0x01,0x00,0x00,0x3e,0x00,0x03,0x00, -0xd1,0x01,0x00,0x00,0xd0,0x01,0x00,0x00,0x3d,0x00,0x04,0x00, -0x4e,0x00,0x00,0x00,0xdb,0x01,0x00,0x00,0x56,0x00,0x00,0x00, -0x73,0x00,0x04,0x00,0x4a,0x00,0x00,0x00,0xdc,0x01,0x00,0x00, -0xdb,0x01,0x00,0x00,0x3d,0x00,0x04,0x00,0x4e,0x00,0x00,0x00, -0xdf,0x01,0x00,0x00,0x5b,0x00,0x00,0x00,0x73,0x00,0x04,0x00, -0x4a,0x00,0x00,0x00,0xe0,0x01,0x00,0x00,0xdf,0x01,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xe7,0x01,0x00,0x00, -0x6a,0x00,0x00,0x00,0xbc,0x02,0x00,0x00,0x73,0x00,0x04,0x00, -0x4e,0x00,0x00,0x00,0xea,0x01,0x00,0x00,0xdc,0x01,0x00,0x00, -0x41,0x00,0x06,0x00,0x55,0x00,0x00,0x00,0xeb,0x01,0x00,0x00, -0x62,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0xe7,0x01,0x00,0x00, -0x3e,0x00,0x03,0x00,0xeb,0x01,0x00,0x00,0xea,0x01,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xf2,0x01,0x00,0x00, -0x6a,0x00,0x00,0x00,0xbd,0x02,0x00,0x00,0x73,0x00,0x04,0x00, -0x4e,0x00,0x00,0x00,0xf4,0x01,0x00,0x00,0xe0,0x01,0x00,0x00, -0x41,0x00,0x06,0x00,0x55,0x00,0x00,0x00,0xf5,0x01,0x00,0x00, -0x62,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0xf2,0x01,0x00,0x00, -0x3e,0x00,0x03,0x00,0xf5,0x01,0x00,0x00,0xf4,0x01,0x00,0x00, -0x3d,0x00,0x04,0x00,0x4e,0x00,0x00,0x00,0xff,0x01,0x00,0x00, -0x56,0x00,0x00,0x00,0x73,0x00,0x04,0x00,0x4a,0x00,0x00,0x00, -0x00,0x02,0x00,0x00,0xff,0x01,0x00,0x00,0x3d,0x00,0x04,0x00, -0x4e,0x00,0x00,0x00,0x03,0x02,0x00,0x00,0x5b,0x00,0x00,0x00, -0x73,0x00,0x04,0x00,0x4a,0x00,0x00,0x00,0x04,0x02,0x00,0x00, -0x03,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x0b,0x02,0x00,0x00,0x6a,0x00,0x00,0x00,0xbe,0x02,0x00,0x00, -0x73,0x00,0x04,0x00,0x4e,0x00,0x00,0x00,0x0e,0x02,0x00,0x00, -0x00,0x02,0x00,0x00,0x41,0x00,0x06,0x00,0x55,0x00,0x00,0x00, -0x0f,0x02,0x00,0x00,0x62,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0x0b,0x02,0x00,0x00,0x3e,0x00,0x03,0x00,0x0f,0x02,0x00,0x00, -0x0e,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x16,0x02,0x00,0x00,0x6a,0x00,0x00,0x00,0xbf,0x02,0x00,0x00, -0x73,0x00,0x04,0x00,0x4e,0x00,0x00,0x00,0x18,0x02,0x00,0x00, -0x04,0x02,0x00,0x00,0x41,0x00,0x06,0x00,0x55,0x00,0x00,0x00, -0x19,0x02,0x00,0x00,0x62,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0x16,0x02,0x00,0x00,0x3e,0x00,0x03,0x00,0x19,0x02,0x00,0x00, -0x18,0x02,0x00,0x00,0x3d,0x00,0x04,0x00,0x4e,0x00,0x00,0x00, -0x23,0x02,0x00,0x00,0x56,0x00,0x00,0x00,0x73,0x00,0x04,0x00, -0x4a,0x00,0x00,0x00,0x24,0x02,0x00,0x00,0x23,0x02,0x00,0x00, -0x3d,0x00,0x04,0x00,0x4e,0x00,0x00,0x00,0x27,0x02,0x00,0x00, -0x5b,0x00,0x00,0x00,0x73,0x00,0x04,0x00,0x4a,0x00,0x00,0x00, -0x28,0x02,0x00,0x00,0x27,0x02,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x2f,0x02,0x00,0x00,0x6a,0x00,0x00,0x00, -0xc0,0x02,0x00,0x00,0x73,0x00,0x04,0x00,0x4e,0x00,0x00,0x00, -0x32,0x02,0x00,0x00,0x24,0x02,0x00,0x00,0x41,0x00,0x06,0x00, -0x55,0x00,0x00,0x00,0x33,0x02,0x00,0x00,0x62,0x00,0x00,0x00, -0x2e,0x00,0x00,0x00,0x2f,0x02,0x00,0x00,0x3e,0x00,0x03,0x00, -0x33,0x02,0x00,0x00,0x32,0x02,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x3a,0x02,0x00,0x00,0x6a,0x00,0x00,0x00, -0xc1,0x02,0x00,0x00,0x73,0x00,0x04,0x00,0x4e,0x00,0x00,0x00, -0x3c,0x02,0x00,0x00,0x28,0x02,0x00,0x00,0x41,0x00,0x06,0x00, -0x55,0x00,0x00,0x00,0x3d,0x02,0x00,0x00,0x62,0x00,0x00,0x00, -0x2e,0x00,0x00,0x00,0x3a,0x02,0x00,0x00,0x3e,0x00,0x03,0x00, -0x3d,0x02,0x00,0x00,0x3c,0x02,0x00,0x00,0x3d,0x00,0x04,0x00, -0x4e,0x00,0x00,0x00,0x47,0x02,0x00,0x00,0x56,0x00,0x00,0x00, -0x73,0x00,0x04,0x00,0x4a,0x00,0x00,0x00,0x48,0x02,0x00,0x00, -0x47,0x02,0x00,0x00,0x3d,0x00,0x04,0x00,0x4e,0x00,0x00,0x00, -0x4b,0x02,0x00,0x00,0x5b,0x00,0x00,0x00,0x73,0x00,0x04,0x00, -0x4a,0x00,0x00,0x00,0x4c,0x02,0x00,0x00,0x4b,0x02,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x53,0x02,0x00,0x00, -0x6a,0x00,0x00,0x00,0xc2,0x02,0x00,0x00,0x73,0x00,0x04,0x00, -0x4e,0x00,0x00,0x00,0x56,0x02,0x00,0x00,0x48,0x02,0x00,0x00, -0x41,0x00,0x06,0x00,0x55,0x00,0x00,0x00,0x57,0x02,0x00,0x00, -0x62,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0x53,0x02,0x00,0x00, -0x3e,0x00,0x03,0x00,0x57,0x02,0x00,0x00,0x56,0x02,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x5e,0x02,0x00,0x00, -0x6a,0x00,0x00,0x00,0xc3,0x02,0x00,0x00,0x73,0x00,0x04,0x00, -0x4e,0x00,0x00,0x00,0x60,0x02,0x00,0x00,0x4c,0x02,0x00,0x00, -0x41,0x00,0x06,0x00,0x55,0x00,0x00,0x00,0x61,0x02,0x00,0x00, -0x62,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0x5e,0x02,0x00,0x00, -0x3e,0x00,0x03,0x00,0x61,0x02,0x00,0x00,0x60,0x02,0x00,0x00, -0x3d,0x00,0x04,0x00,0x4e,0x00,0x00,0x00,0x6b,0x02,0x00,0x00, -0x56,0x00,0x00,0x00,0x73,0x00,0x04,0x00,0x4a,0x00,0x00,0x00, -0x6c,0x02,0x00,0x00,0x6b,0x02,0x00,0x00,0x3d,0x00,0x04,0x00, -0x4e,0x00,0x00,0x00,0x6f,0x02,0x00,0x00,0x5b,0x00,0x00,0x00, -0x73,0x00,0x04,0x00,0x4a,0x00,0x00,0x00,0x70,0x02,0x00,0x00, -0x6f,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x77,0x02,0x00,0x00,0x6a,0x00,0x00,0x00,0xc4,0x02,0x00,0x00, -0x73,0x00,0x04,0x00,0x4e,0x00,0x00,0x00,0x7a,0x02,0x00,0x00, -0x6c,0x02,0x00,0x00,0x41,0x00,0x06,0x00,0x55,0x00,0x00,0x00, -0x7b,0x02,0x00,0x00,0x62,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0x77,0x02,0x00,0x00,0x3e,0x00,0x03,0x00,0x7b,0x02,0x00,0x00, -0x7a,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x82,0x02,0x00,0x00,0x6a,0x00,0x00,0x00,0xc5,0x02,0x00,0x00, -0x73,0x00,0x04,0x00,0x4e,0x00,0x00,0x00,0x84,0x02,0x00,0x00, -0x70,0x02,0x00,0x00,0x41,0x00,0x06,0x00,0x55,0x00,0x00,0x00, -0x85,0x02,0x00,0x00,0x62,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0x82,0x02,0x00,0x00,0x3e,0x00,0x03,0x00,0x85,0x02,0x00,0x00, -0x84,0x02,0x00,0x00,0x3d,0x00,0x04,0x00,0x4e,0x00,0x00,0x00, -0x8f,0x02,0x00,0x00,0x56,0x00,0x00,0x00,0x73,0x00,0x04,0x00, -0x4a,0x00,0x00,0x00,0x90,0x02,0x00,0x00,0x8f,0x02,0x00,0x00, -0x3d,0x00,0x04,0x00,0x4e,0x00,0x00,0x00,0x93,0x02,0x00,0x00, -0x5b,0x00,0x00,0x00,0x73,0x00,0x04,0x00,0x4a,0x00,0x00,0x00, -0x94,0x02,0x00,0x00,0x93,0x02,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x9b,0x02,0x00,0x00,0x6a,0x00,0x00,0x00, -0xc6,0x02,0x00,0x00,0x73,0x00,0x04,0x00,0x4e,0x00,0x00,0x00, -0x9e,0x02,0x00,0x00,0x90,0x02,0x00,0x00,0x41,0x00,0x06,0x00, -0x55,0x00,0x00,0x00,0x9f,0x02,0x00,0x00,0x62,0x00,0x00,0x00, -0x2e,0x00,0x00,0x00,0x9b,0x02,0x00,0x00,0x3e,0x00,0x03,0x00, -0x9f,0x02,0x00,0x00,0x9e,0x02,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xa6,0x02,0x00,0x00,0x6a,0x00,0x00,0x00, -0xc7,0x02,0x00,0x00,0x73,0x00,0x04,0x00,0x4e,0x00,0x00,0x00, -0xa8,0x02,0x00,0x00,0x94,0x02,0x00,0x00,0x41,0x00,0x06,0x00, -0x55,0x00,0x00,0x00,0xa9,0x02,0x00,0x00,0x62,0x00,0x00,0x00, -0x2e,0x00,0x00,0x00,0xa6,0x02,0x00,0x00,0x3e,0x00,0x03,0x00, -0xa9,0x02,0x00,0x00,0xa8,0x02,0x00,0x00,0xf9,0x00,0x02,0x00, -0x86,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0x86,0x00,0x00,0x00, -0xfd,0x00,0x01,0x00,0x38,0x00,0x01,0x00, +0x17,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x3d,0x00,0x00,0x00,0x22,0x00,0x00,0x00,0x39,0x00,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x3f,0x00,0x00,0x00, +0x3d,0x00,0x00,0x00,0x1c,0x00,0x00,0x00,0x41,0x00,0x06,0x00, +0x53,0x00,0x00,0x00,0x54,0x00,0x00,0x00,0x50,0x00,0x00,0x00, +0x2d,0x00,0x00,0x00,0x3f,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0x4c,0x00,0x00,0x00,0x55,0x00,0x00,0x00,0x54,0x00,0x00,0x00, +0x73,0x00,0x04,0x00,0x48,0x00,0x00,0x00,0x56,0x00,0x00,0x00, +0x55,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x58,0x00,0x00,0x00,0x3f,0x00,0x00,0x00,0x17,0x00,0x00,0x00, +0x41,0x00,0x06,0x00,0x53,0x00,0x00,0x00,0x59,0x00,0x00,0x00, +0x50,0x00,0x00,0x00,0x2d,0x00,0x00,0x00,0x58,0x00,0x00,0x00, +0x3d,0x00,0x04,0x00,0x4c,0x00,0x00,0x00,0x5a,0x00,0x00,0x00, +0x59,0x00,0x00,0x00,0x73,0x00,0x04,0x00,0x48,0x00,0x00,0x00, +0x5b,0x00,0x00,0x00,0x5a,0x00,0x00,0x00,0x41,0x00,0x05,0x00, +0x18,0x00,0x00,0x00,0x63,0x00,0x00,0x00,0x16,0x00,0x00,0x00, +0x62,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x64,0x00,0x00,0x00,0x63,0x00,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x65,0x00,0x00,0x00,0x22,0x00,0x00,0x00, +0x64,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x68,0x00,0x00,0x00,0x65,0x00,0x00,0x00,0x1c,0x00,0x00,0x00, +0x73,0x00,0x04,0x00,0x4c,0x00,0x00,0x00,0x6f,0x00,0x00,0x00, +0x56,0x00,0x00,0x00,0x41,0x00,0x06,0x00,0x53,0x00,0x00,0x00, +0x70,0x00,0x00,0x00,0x60,0x00,0x00,0x00,0x2d,0x00,0x00,0x00, +0x68,0x00,0x00,0x00,0x3e,0x00,0x03,0x00,0x70,0x00,0x00,0x00, +0x6f,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x7a,0x00,0x00,0x00,0x68,0x00,0x00,0x00,0x17,0x00,0x00,0x00, +0x73,0x00,0x04,0x00,0x4c,0x00,0x00,0x00,0x7e,0x00,0x00,0x00, +0x5b,0x00,0x00,0x00,0x41,0x00,0x06,0x00,0x53,0x00,0x00,0x00, +0x7f,0x00,0x00,0x00,0x60,0x00,0x00,0x00,0x2d,0x00,0x00,0x00, +0x7a,0x00,0x00,0x00,0x3e,0x00,0x03,0x00,0x7f,0x00,0x00,0x00, +0x7e,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0x84,0x00,0x00,0x00, +0xf8,0x00,0x02,0x00,0x84,0x00,0x00,0x00,0xfd,0x00,0x01,0x00, +0x38,0x00,0x01,0x00, }; -const uint64_t dequant_f16_fp32_len = 5420; +const uint64_t dequant_f16_fp32_len = 1816; unsigned char dequant_q2_K_data[] = { 0x03,0x02,0x23,0x07,0x00,0x05,0x01,0x00,0x0b,0x00,0x0d,0x00, @@ -15313,7 +14792,7 @@ const uint64_t gelu_f32_len = 1408; unsigned char get_rows_f16_data[] = { 0x03,0x02,0x23,0x07,0x00,0x05,0x01,0x00,0x0b,0x00,0x0d,0x00, -0x7a,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x11,0x00,0x02,0x00, +0x77,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x11,0x00,0x02,0x00, 0x01,0x00,0x00,0x00,0x11,0x00,0x02,0x00,0x09,0x00,0x00,0x00, 0x11,0x00,0x02,0x00,0x51,0x11,0x00,0x00,0x0b,0x00,0x06,0x00, 0x01,0x00,0x00,0x00,0x47,0x4c,0x53,0x4c,0x2e,0x73,0x74,0x64, @@ -15321,7 +14800,7 @@ unsigned char get_rows_f16_data[] = { 0x00,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x0f,0x00,0x0a,0x00, 0x05,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x6d,0x61,0x69,0x6e, 0x00,0x00,0x00,0x00,0x0b,0x00,0x00,0x00,0x1f,0x00,0x00,0x00, -0x2d,0x00,0x00,0x00,0x57,0x00,0x00,0x00,0x65,0x00,0x00,0x00, +0x2d,0x00,0x00,0x00,0x55,0x00,0x00,0x00,0x63,0x00,0x00,0x00, 0x10,0x00,0x06,0x00,0x04,0x00,0x00,0x00,0x11,0x00,0x00,0x00, 0x00,0x02,0x00,0x00,0x01,0x00,0x00,0x00,0x01,0x00,0x00,0x00, 0x47,0x00,0x04,0x00,0x0b,0x00,0x00,0x00,0x0b,0x00,0x00,0x00, @@ -15341,22 +14820,184 @@ unsigned char get_rows_f16_data[] = { 0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x2d,0x00,0x00,0x00, 0x22,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00, 0x2d,0x00,0x00,0x00,0x21,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x54,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x02,0x00,0x00,0x00,0x48,0x00,0x04,0x00,0x55,0x00,0x00,0x00, +0x47,0x00,0x04,0x00,0x52,0x00,0x00,0x00,0x06,0x00,0x00,0x00, +0x02,0x00,0x00,0x00,0x48,0x00,0x04,0x00,0x53,0x00,0x00,0x00, 0x00,0x00,0x00,0x00,0x18,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0x55,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x47,0x00,0x03,0x00,0x55,0x00,0x00,0x00, -0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x57,0x00,0x00,0x00, +0x53,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00, +0x00,0x00,0x00,0x00,0x47,0x00,0x03,0x00,0x53,0x00,0x00,0x00, +0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x55,0x00,0x00,0x00, 0x22,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x57,0x00,0x00,0x00,0x21,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x62,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x02,0x00,0x00,0x00,0x48,0x00,0x04,0x00,0x63,0x00,0x00,0x00, +0x55,0x00,0x00,0x00,0x21,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0x47,0x00,0x04,0x00,0x60,0x00,0x00,0x00,0x06,0x00,0x00,0x00, +0x02,0x00,0x00,0x00,0x48,0x00,0x04,0x00,0x61,0x00,0x00,0x00, 0x00,0x00,0x00,0x00,0x19,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0x63,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x47,0x00,0x03,0x00,0x63,0x00,0x00,0x00, -0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x65,0x00,0x00,0x00, +0x61,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00, +0x00,0x00,0x00,0x00,0x47,0x00,0x03,0x00,0x61,0x00,0x00,0x00, +0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x63,0x00,0x00,0x00, 0x22,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x65,0x00,0x00,0x00,0x21,0x00,0x00,0x00,0x02,0x00,0x00,0x00, +0x63,0x00,0x00,0x00,0x21,0x00,0x00,0x00,0x02,0x00,0x00,0x00, +0x47,0x00,0x04,0x00,0x74,0x00,0x00,0x00,0x0b,0x00,0x00,0x00, +0x19,0x00,0x00,0x00,0x13,0x00,0x02,0x00,0x02,0x00,0x00,0x00, +0x21,0x00,0x03,0x00,0x03,0x00,0x00,0x00,0x02,0x00,0x00,0x00, +0x15,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x20,0x00,0x00,0x00, +0x00,0x00,0x00,0x00,0x17,0x00,0x04,0x00,0x09,0x00,0x00,0x00, +0x06,0x00,0x00,0x00,0x03,0x00,0x00,0x00,0x20,0x00,0x04,0x00, +0x0a,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x09,0x00,0x00,0x00, +0x3b,0x00,0x04,0x00,0x0a,0x00,0x00,0x00,0x0b,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x0c,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x20,0x00,0x04,0x00, +0x0d,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x06,0x00,0x00,0x00, +0x15,0x00,0x04,0x00,0x10,0x00,0x00,0x00,0x20,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x10,0x00,0x00,0x00, +0x12,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x16,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0x16,0x00,0x03,0x00,0x1c,0x00,0x00,0x00,0x20,0x00,0x00,0x00, +0x1e,0x00,0x06,0x00,0x1d,0x00,0x00,0x00,0x06,0x00,0x00,0x00, +0x06,0x00,0x00,0x00,0x1c,0x00,0x00,0x00,0x1c,0x00,0x00,0x00, +0x20,0x00,0x04,0x00,0x1e,0x00,0x00,0x00,0x09,0x00,0x00,0x00, +0x1d,0x00,0x00,0x00,0x3b,0x00,0x04,0x00,0x1e,0x00,0x00,0x00, +0x1f,0x00,0x00,0x00,0x09,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x10,0x00,0x00,0x00,0x20,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0x20,0x00,0x04,0x00,0x21,0x00,0x00,0x00,0x09,0x00,0x00,0x00, +0x06,0x00,0x00,0x00,0x14,0x00,0x02,0x00,0x24,0x00,0x00,0x00, +0x1d,0x00,0x03,0x00,0x2a,0x00,0x00,0x00,0x10,0x00,0x00,0x00, +0x1e,0x00,0x03,0x00,0x2b,0x00,0x00,0x00,0x2a,0x00,0x00,0x00, +0x20,0x00,0x04,0x00,0x2c,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, +0x2b,0x00,0x00,0x00,0x3b,0x00,0x04,0x00,0x2c,0x00,0x00,0x00, +0x2d,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x10,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0x20,0x00,0x04,0x00,0x30,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, +0x10,0x00,0x00,0x00,0x16,0x00,0x03,0x00,0x4e,0x00,0x00,0x00, +0x10,0x00,0x00,0x00,0x1d,0x00,0x03,0x00,0x52,0x00,0x00,0x00, +0x4e,0x00,0x00,0x00,0x1e,0x00,0x03,0x00,0x53,0x00,0x00,0x00, +0x52,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x54,0x00,0x00,0x00, +0x0c,0x00,0x00,0x00,0x53,0x00,0x00,0x00,0x3b,0x00,0x04,0x00, +0x54,0x00,0x00,0x00,0x55,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, +0x20,0x00,0x04,0x00,0x58,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, +0x4e,0x00,0x00,0x00,0x1d,0x00,0x03,0x00,0x60,0x00,0x00,0x00, +0x4e,0x00,0x00,0x00,0x1e,0x00,0x03,0x00,0x61,0x00,0x00,0x00, +0x60,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x62,0x00,0x00,0x00, +0x0c,0x00,0x00,0x00,0x61,0x00,0x00,0x00,0x3b,0x00,0x04,0x00, +0x62,0x00,0x00,0x00,0x63,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x73,0x00,0x00,0x00, +0x00,0x02,0x00,0x00,0x2c,0x00,0x06,0x00,0x09,0x00,0x00,0x00, +0x74,0x00,0x00,0x00,0x73,0x00,0x00,0x00,0x16,0x00,0x00,0x00, +0x16,0x00,0x00,0x00,0x36,0x00,0x05,0x00,0x02,0x00,0x00,0x00, +0x04,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x03,0x00,0x00,0x00, +0xf8,0x00,0x02,0x00,0x05,0x00,0x00,0x00,0xf7,0x00,0x03,0x00, +0x75,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xfb,0x00,0x03,0x00, +0x0c,0x00,0x00,0x00,0x76,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, +0x76,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x0d,0x00,0x00,0x00, +0x0e,0x00,0x00,0x00,0x0b,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, +0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x0f,0x00,0x00,0x00, +0x0e,0x00,0x00,0x00,0x7c,0x00,0x04,0x00,0x10,0x00,0x00,0x00, +0x11,0x00,0x00,0x00,0x0f,0x00,0x00,0x00,0x84,0x00,0x05,0x00, +0x10,0x00,0x00,0x00,0x13,0x00,0x00,0x00,0x11,0x00,0x00,0x00, +0x12,0x00,0x00,0x00,0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x14,0x00,0x00,0x00,0x13,0x00,0x00,0x00,0x41,0x00,0x05,0x00, +0x0d,0x00,0x00,0x00,0x17,0x00,0x00,0x00,0x0b,0x00,0x00,0x00, +0x16,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x18,0x00,0x00,0x00,0x17,0x00,0x00,0x00,0x7c,0x00,0x04,0x00, +0x10,0x00,0x00,0x00,0x19,0x00,0x00,0x00,0x18,0x00,0x00,0x00, +0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x1a,0x00,0x00,0x00, +0x19,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x21,0x00,0x00,0x00, +0x22,0x00,0x00,0x00,0x1f,0x00,0x00,0x00,0x20,0x00,0x00,0x00, +0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x23,0x00,0x00,0x00, +0x22,0x00,0x00,0x00,0xae,0x00,0x05,0x00,0x24,0x00,0x00,0x00, +0x25,0x00,0x00,0x00,0x14,0x00,0x00,0x00,0x23,0x00,0x00,0x00, +0xf7,0x00,0x03,0x00,0x27,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0xfa,0x00,0x04,0x00,0x25,0x00,0x00,0x00,0x26,0x00,0x00,0x00, +0x27,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0x26,0x00,0x00,0x00, +0xf9,0x00,0x02,0x00,0x75,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, +0x27,0x00,0x00,0x00,0x41,0x00,0x06,0x00,0x30,0x00,0x00,0x00, +0x31,0x00,0x00,0x00,0x2d,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, +0x1a,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x10,0x00,0x00,0x00, +0x32,0x00,0x00,0x00,0x31,0x00,0x00,0x00,0x7c,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x33,0x00,0x00,0x00,0x32,0x00,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x38,0x00,0x00,0x00, +0x33,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x3a,0x00,0x00,0x00,0x38,0x00,0x00,0x00, +0x14,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x3f,0x00,0x00,0x00,0x1a,0x00,0x00,0x00,0x23,0x00,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x41,0x00,0x00,0x00, +0x3f,0x00,0x00,0x00,0x14,0x00,0x00,0x00,0x86,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x44,0x00,0x00,0x00,0x3a,0x00,0x00,0x00, +0x16,0x00,0x00,0x00,0x89,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x47,0x00,0x00,0x00,0x3a,0x00,0x00,0x00,0x16,0x00,0x00,0x00, +0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x48,0x00,0x00,0x00, +0x47,0x00,0x00,0x00,0x16,0x00,0x00,0x00,0x89,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x4c,0x00,0x00,0x00,0x41,0x00,0x00,0x00, +0x16,0x00,0x00,0x00,0x82,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x4d,0x00,0x00,0x00,0x41,0x00,0x00,0x00,0x4c,0x00,0x00,0x00, +0x41,0x00,0x06,0x00,0x58,0x00,0x00,0x00,0x59,0x00,0x00,0x00, +0x55,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0x44,0x00,0x00,0x00, +0x3d,0x00,0x04,0x00,0x4e,0x00,0x00,0x00,0x5a,0x00,0x00,0x00, +0x59,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x5c,0x00,0x00,0x00,0x44,0x00,0x00,0x00,0x16,0x00,0x00,0x00, +0x41,0x00,0x06,0x00,0x58,0x00,0x00,0x00,0x5d,0x00,0x00,0x00, +0x55,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0x5c,0x00,0x00,0x00, +0x3d,0x00,0x04,0x00,0x4e,0x00,0x00,0x00,0x5e,0x00,0x00,0x00, +0x5d,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x66,0x00,0x00,0x00,0x4d,0x00,0x00,0x00,0x48,0x00,0x00,0x00, +0x41,0x00,0x06,0x00,0x58,0x00,0x00,0x00,0x6b,0x00,0x00,0x00, +0x63,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0x66,0x00,0x00,0x00, +0x3e,0x00,0x03,0x00,0x6b,0x00,0x00,0x00,0x5a,0x00,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x6f,0x00,0x00,0x00, +0x66,0x00,0x00,0x00,0x16,0x00,0x00,0x00,0x41,0x00,0x06,0x00, +0x58,0x00,0x00,0x00,0x72,0x00,0x00,0x00,0x63,0x00,0x00,0x00, +0x2e,0x00,0x00,0x00,0x6f,0x00,0x00,0x00,0x3e,0x00,0x03,0x00, +0x72,0x00,0x00,0x00,0x5e,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, +0x75,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0x75,0x00,0x00,0x00, +0xfd,0x00,0x01,0x00,0x38,0x00,0x01,0x00, +}; +const uint64_t get_rows_f16_len = 1892; + +unsigned char get_rows_f16_f32_data[] = { +0x03,0x02,0x23,0x07,0x00,0x05,0x01,0x00,0x0b,0x00,0x0d,0x00, +0x7a,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x11,0x00,0x02,0x00, +0x01,0x00,0x00,0x00,0x11,0x00,0x02,0x00,0x09,0x00,0x00,0x00, +0x11,0x00,0x02,0x00,0x51,0x11,0x00,0x00,0x0b,0x00,0x06,0x00, +0x01,0x00,0x00,0x00,0x47,0x4c,0x53,0x4c,0x2e,0x73,0x74,0x64, +0x2e,0x34,0x35,0x30,0x00,0x00,0x00,0x00,0x0e,0x00,0x03,0x00, +0x00,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x0f,0x00,0x0a,0x00, +0x05,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x6d,0x61,0x69,0x6e, +0x00,0x00,0x00,0x00,0x0b,0x00,0x00,0x00,0x1f,0x00,0x00,0x00, +0x2d,0x00,0x00,0x00,0x55,0x00,0x00,0x00,0x63,0x00,0x00,0x00, +0x10,0x00,0x06,0x00,0x04,0x00,0x00,0x00,0x11,0x00,0x00,0x00, +0x00,0x02,0x00,0x00,0x01,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0x47,0x00,0x04,0x00,0x0b,0x00,0x00,0x00,0x0b,0x00,0x00,0x00, +0x1c,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x1d,0x00,0x00,0x00, +0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0x48,0x00,0x05,0x00,0x1d,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0x23,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x48,0x00,0x05,0x00, +0x1d,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x23,0x00,0x00,0x00, +0x08,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x1d,0x00,0x00,0x00, +0x03,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, +0x47,0x00,0x03,0x00,0x1d,0x00,0x00,0x00,0x02,0x00,0x00,0x00, +0x47,0x00,0x04,0x00,0x2a,0x00,0x00,0x00,0x06,0x00,0x00,0x00, +0x04,0x00,0x00,0x00,0x48,0x00,0x04,0x00,0x2b,0x00,0x00,0x00, +0x00,0x00,0x00,0x00,0x18,0x00,0x00,0x00,0x48,0x00,0x05,0x00, +0x2b,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00, +0x00,0x00,0x00,0x00,0x47,0x00,0x03,0x00,0x2b,0x00,0x00,0x00, +0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x2d,0x00,0x00,0x00, +0x22,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00, +0x2d,0x00,0x00,0x00,0x21,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0x47,0x00,0x04,0x00,0x52,0x00,0x00,0x00,0x06,0x00,0x00,0x00, +0x02,0x00,0x00,0x00,0x48,0x00,0x04,0x00,0x53,0x00,0x00,0x00, +0x00,0x00,0x00,0x00,0x18,0x00,0x00,0x00,0x48,0x00,0x05,0x00, +0x53,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00, +0x00,0x00,0x00,0x00,0x47,0x00,0x03,0x00,0x53,0x00,0x00,0x00, +0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x55,0x00,0x00,0x00, +0x22,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00, +0x55,0x00,0x00,0x00,0x21,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0x47,0x00,0x04,0x00,0x60,0x00,0x00,0x00,0x06,0x00,0x00,0x00, +0x04,0x00,0x00,0x00,0x48,0x00,0x04,0x00,0x61,0x00,0x00,0x00, +0x00,0x00,0x00,0x00,0x19,0x00,0x00,0x00,0x48,0x00,0x05,0x00, +0x61,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00, +0x00,0x00,0x00,0x00,0x47,0x00,0x03,0x00,0x61,0x00,0x00,0x00, +0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x63,0x00,0x00,0x00, +0x22,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00, +0x63,0x00,0x00,0x00,0x21,0x00,0x00,0x00,0x02,0x00,0x00,0x00, 0x47,0x00,0x04,0x00,0x77,0x00,0x00,0x00,0x0b,0x00,0x00,0x00, 0x19,0x00,0x00,0x00,0x13,0x00,0x02,0x00,0x02,0x00,0x00,0x00, 0x21,0x00,0x03,0x00,0x03,0x00,0x00,0x00,0x02,0x00,0x00,0x00, @@ -15388,198 +15029,28 @@ unsigned char get_rows_f16_data[] = { 0x2d,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, 0x10,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0x00,0x00,0x00,0x00, 0x20,0x00,0x04,0x00,0x30,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0x10,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x44,0x00,0x00,0x00,0x20,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x49,0x00,0x00,0x00,0x02,0x00,0x00,0x00, -0x16,0x00,0x03,0x00,0x50,0x00,0x00,0x00,0x10,0x00,0x00,0x00, -0x1d,0x00,0x03,0x00,0x54,0x00,0x00,0x00,0x50,0x00,0x00,0x00, -0x1e,0x00,0x03,0x00,0x55,0x00,0x00,0x00,0x54,0x00,0x00,0x00, -0x20,0x00,0x04,0x00,0x56,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0x55,0x00,0x00,0x00,0x3b,0x00,0x04,0x00,0x56,0x00,0x00,0x00, -0x57,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0x5a,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x50,0x00,0x00,0x00, -0x1d,0x00,0x03,0x00,0x62,0x00,0x00,0x00,0x50,0x00,0x00,0x00, -0x1e,0x00,0x03,0x00,0x63,0x00,0x00,0x00,0x62,0x00,0x00,0x00, -0x20,0x00,0x04,0x00,0x64,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0x63,0x00,0x00,0x00,0x3b,0x00,0x04,0x00,0x64,0x00,0x00,0x00, -0x65,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x71,0x00,0x00,0x00,0x10,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x76,0x00,0x00,0x00, -0x00,0x02,0x00,0x00,0x2c,0x00,0x06,0x00,0x09,0x00,0x00,0x00, -0x77,0x00,0x00,0x00,0x76,0x00,0x00,0x00,0x16,0x00,0x00,0x00, -0x16,0x00,0x00,0x00,0x36,0x00,0x05,0x00,0x02,0x00,0x00,0x00, -0x04,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x03,0x00,0x00,0x00, -0xf8,0x00,0x02,0x00,0x05,0x00,0x00,0x00,0xf7,0x00,0x03,0x00, -0x78,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xfb,0x00,0x03,0x00, -0x0c,0x00,0x00,0x00,0x79,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, -0x79,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x0d,0x00,0x00,0x00, -0x0e,0x00,0x00,0x00,0x0b,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x0f,0x00,0x00,0x00, -0x0e,0x00,0x00,0x00,0x7c,0x00,0x04,0x00,0x10,0x00,0x00,0x00, -0x11,0x00,0x00,0x00,0x0f,0x00,0x00,0x00,0x84,0x00,0x05,0x00, -0x10,0x00,0x00,0x00,0x13,0x00,0x00,0x00,0x11,0x00,0x00,0x00, -0x12,0x00,0x00,0x00,0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x14,0x00,0x00,0x00,0x13,0x00,0x00,0x00,0x41,0x00,0x05,0x00, -0x0d,0x00,0x00,0x00,0x17,0x00,0x00,0x00,0x0b,0x00,0x00,0x00, -0x16,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x18,0x00,0x00,0x00,0x17,0x00,0x00,0x00,0x7c,0x00,0x04,0x00, -0x10,0x00,0x00,0x00,0x19,0x00,0x00,0x00,0x18,0x00,0x00,0x00, -0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x1a,0x00,0x00,0x00, -0x19,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x21,0x00,0x00,0x00, -0x22,0x00,0x00,0x00,0x1f,0x00,0x00,0x00,0x20,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x22,0x00,0x00,0x00,0xae,0x00,0x05,0x00,0x24,0x00,0x00,0x00, -0x25,0x00,0x00,0x00,0x14,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0xf7,0x00,0x03,0x00,0x27,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0xfa,0x00,0x04,0x00,0x25,0x00,0x00,0x00,0x26,0x00,0x00,0x00, -0x27,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0x26,0x00,0x00,0x00, -0xf9,0x00,0x02,0x00,0x78,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, -0x27,0x00,0x00,0x00,0x41,0x00,0x06,0x00,0x30,0x00,0x00,0x00, -0x31,0x00,0x00,0x00,0x2d,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0x1a,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x10,0x00,0x00,0x00, -0x32,0x00,0x00,0x00,0x31,0x00,0x00,0x00,0x7c,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x33,0x00,0x00,0x00,0x32,0x00,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x38,0x00,0x00,0x00, -0x33,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x3a,0x00,0x00,0x00,0x38,0x00,0x00,0x00, -0x14,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x3f,0x00,0x00,0x00,0x1a,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x41,0x00,0x00,0x00, -0x3f,0x00,0x00,0x00,0x14,0x00,0x00,0x00,0x86,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x45,0x00,0x00,0x00,0x3a,0x00,0x00,0x00, -0x44,0x00,0x00,0x00,0x89,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x48,0x00,0x00,0x00,0x3a,0x00,0x00,0x00,0x44,0x00,0x00,0x00, -0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x4a,0x00,0x00,0x00, -0x48,0x00,0x00,0x00,0x49,0x00,0x00,0x00,0x89,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x4e,0x00,0x00,0x00,0x41,0x00,0x00,0x00, -0x44,0x00,0x00,0x00,0x82,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x4f,0x00,0x00,0x00,0x41,0x00,0x00,0x00,0x4e,0x00,0x00,0x00, -0x41,0x00,0x06,0x00,0x5a,0x00,0x00,0x00,0x5b,0x00,0x00,0x00, -0x57,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0x45,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x50,0x00,0x00,0x00,0x5c,0x00,0x00,0x00, -0x5b,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x5e,0x00,0x00,0x00,0x45,0x00,0x00,0x00,0x16,0x00,0x00,0x00, -0x41,0x00,0x06,0x00,0x5a,0x00,0x00,0x00,0x5f,0x00,0x00,0x00, -0x57,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0x5e,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x50,0x00,0x00,0x00,0x60,0x00,0x00,0x00, -0x5f,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x68,0x00,0x00,0x00,0x4f,0x00,0x00,0x00,0x4a,0x00,0x00,0x00, -0x41,0x00,0x06,0x00,0x5a,0x00,0x00,0x00,0x6d,0x00,0x00,0x00, -0x65,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0x68,0x00,0x00,0x00, -0x3e,0x00,0x03,0x00,0x6d,0x00,0x00,0x00,0x5c,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x72,0x00,0x00,0x00, -0x68,0x00,0x00,0x00,0x71,0x00,0x00,0x00,0x41,0x00,0x06,0x00, -0x5a,0x00,0x00,0x00,0x75,0x00,0x00,0x00,0x65,0x00,0x00,0x00, -0x2e,0x00,0x00,0x00,0x72,0x00,0x00,0x00,0x3e,0x00,0x03,0x00, -0x75,0x00,0x00,0x00,0x60,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, -0x78,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0x78,0x00,0x00,0x00, -0xfd,0x00,0x01,0x00,0x38,0x00,0x01,0x00, -}; -const uint64_t get_rows_f16_len = 1940; - -unsigned char get_rows_f16_f32_data[] = { -0x03,0x02,0x23,0x07,0x00,0x05,0x01,0x00,0x0b,0x00,0x0d,0x00, -0x7d,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x11,0x00,0x02,0x00, -0x01,0x00,0x00,0x00,0x11,0x00,0x02,0x00,0x09,0x00,0x00,0x00, -0x11,0x00,0x02,0x00,0x51,0x11,0x00,0x00,0x0b,0x00,0x06,0x00, -0x01,0x00,0x00,0x00,0x47,0x4c,0x53,0x4c,0x2e,0x73,0x74,0x64, -0x2e,0x34,0x35,0x30,0x00,0x00,0x00,0x00,0x0e,0x00,0x03,0x00, -0x00,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x0f,0x00,0x0a,0x00, -0x05,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x6d,0x61,0x69,0x6e, -0x00,0x00,0x00,0x00,0x0b,0x00,0x00,0x00,0x1f,0x00,0x00,0x00, -0x2d,0x00,0x00,0x00,0x57,0x00,0x00,0x00,0x65,0x00,0x00,0x00, -0x10,0x00,0x06,0x00,0x04,0x00,0x00,0x00,0x11,0x00,0x00,0x00, -0x00,0x02,0x00,0x00,0x01,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x0b,0x00,0x00,0x00,0x0b,0x00,0x00,0x00, -0x1c,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x1d,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x48,0x00,0x05,0x00,0x1d,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0x1d,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x08,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x1d,0x00,0x00,0x00, -0x03,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0x47,0x00,0x03,0x00,0x1d,0x00,0x00,0x00,0x02,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x2a,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x04,0x00,0x00,0x00,0x48,0x00,0x04,0x00,0x2b,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x18,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0x2b,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x47,0x00,0x03,0x00,0x2b,0x00,0x00,0x00, -0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x2d,0x00,0x00,0x00, -0x22,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x2d,0x00,0x00,0x00,0x21,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x54,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x02,0x00,0x00,0x00,0x48,0x00,0x04,0x00,0x55,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x18,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0x55,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x47,0x00,0x03,0x00,0x55,0x00,0x00,0x00, -0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x57,0x00,0x00,0x00, -0x22,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x57,0x00,0x00,0x00,0x21,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x62,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x04,0x00,0x00,0x00,0x48,0x00,0x04,0x00,0x63,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x19,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0x63,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x47,0x00,0x03,0x00,0x63,0x00,0x00,0x00, -0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x65,0x00,0x00,0x00, -0x22,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x65,0x00,0x00,0x00,0x21,0x00,0x00,0x00,0x02,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x7a,0x00,0x00,0x00,0x0b,0x00,0x00,0x00, -0x19,0x00,0x00,0x00,0x13,0x00,0x02,0x00,0x02,0x00,0x00,0x00, -0x21,0x00,0x03,0x00,0x03,0x00,0x00,0x00,0x02,0x00,0x00,0x00, -0x15,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x20,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x17,0x00,0x04,0x00,0x09,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x03,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0x0a,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x09,0x00,0x00,0x00, -0x3b,0x00,0x04,0x00,0x0a,0x00,0x00,0x00,0x0b,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0x0d,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x15,0x00,0x04,0x00,0x10,0x00,0x00,0x00,0x20,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x10,0x00,0x00,0x00, -0x12,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x16,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x16,0x00,0x03,0x00,0x1c,0x00,0x00,0x00,0x20,0x00,0x00,0x00, -0x1e,0x00,0x06,0x00,0x1d,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x1c,0x00,0x00,0x00,0x1c,0x00,0x00,0x00, -0x20,0x00,0x04,0x00,0x1e,0x00,0x00,0x00,0x09,0x00,0x00,0x00, -0x1d,0x00,0x00,0x00,0x3b,0x00,0x04,0x00,0x1e,0x00,0x00,0x00, -0x1f,0x00,0x00,0x00,0x09,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x10,0x00,0x00,0x00,0x20,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x20,0x00,0x04,0x00,0x21,0x00,0x00,0x00,0x09,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x14,0x00,0x02,0x00,0x24,0x00,0x00,0x00, -0x1d,0x00,0x03,0x00,0x2a,0x00,0x00,0x00,0x10,0x00,0x00,0x00, -0x1e,0x00,0x03,0x00,0x2b,0x00,0x00,0x00,0x2a,0x00,0x00,0x00, -0x20,0x00,0x04,0x00,0x2c,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0x2b,0x00,0x00,0x00,0x3b,0x00,0x04,0x00,0x2c,0x00,0x00,0x00, -0x2d,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x10,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x20,0x00,0x04,0x00,0x30,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0x10,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x44,0x00,0x00,0x00,0x20,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x49,0x00,0x00,0x00,0x02,0x00,0x00,0x00, -0x16,0x00,0x03,0x00,0x50,0x00,0x00,0x00,0x10,0x00,0x00,0x00, -0x1d,0x00,0x03,0x00,0x54,0x00,0x00,0x00,0x50,0x00,0x00,0x00, -0x1e,0x00,0x03,0x00,0x55,0x00,0x00,0x00,0x54,0x00,0x00,0x00, -0x20,0x00,0x04,0x00,0x56,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0x55,0x00,0x00,0x00,0x3b,0x00,0x04,0x00,0x56,0x00,0x00,0x00, -0x57,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0x5a,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x50,0x00,0x00,0x00, -0x1d,0x00,0x03,0x00,0x62,0x00,0x00,0x00,0x1c,0x00,0x00,0x00, -0x1e,0x00,0x03,0x00,0x63,0x00,0x00,0x00,0x62,0x00,0x00,0x00, -0x20,0x00,0x04,0x00,0x64,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0x63,0x00,0x00,0x00,0x3b,0x00,0x04,0x00,0x64,0x00,0x00,0x00, -0x65,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0x6e,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x1c,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x73,0x00,0x00,0x00, -0x10,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x79,0x00,0x00,0x00,0x00,0x02,0x00,0x00,0x2c,0x00,0x06,0x00, -0x09,0x00,0x00,0x00,0x7a,0x00,0x00,0x00,0x79,0x00,0x00,0x00, +0x10,0x00,0x00,0x00,0x16,0x00,0x03,0x00,0x4e,0x00,0x00,0x00, +0x10,0x00,0x00,0x00,0x1d,0x00,0x03,0x00,0x52,0x00,0x00,0x00, +0x4e,0x00,0x00,0x00,0x1e,0x00,0x03,0x00,0x53,0x00,0x00,0x00, +0x52,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x54,0x00,0x00,0x00, +0x0c,0x00,0x00,0x00,0x53,0x00,0x00,0x00,0x3b,0x00,0x04,0x00, +0x54,0x00,0x00,0x00,0x55,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, +0x20,0x00,0x04,0x00,0x58,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, +0x4e,0x00,0x00,0x00,0x1d,0x00,0x03,0x00,0x60,0x00,0x00,0x00, +0x1c,0x00,0x00,0x00,0x1e,0x00,0x03,0x00,0x61,0x00,0x00,0x00, +0x60,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x62,0x00,0x00,0x00, +0x0c,0x00,0x00,0x00,0x61,0x00,0x00,0x00,0x3b,0x00,0x04,0x00, +0x62,0x00,0x00,0x00,0x63,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, +0x20,0x00,0x04,0x00,0x6c,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, +0x1c,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x76,0x00,0x00,0x00,0x00,0x02,0x00,0x00,0x2c,0x00,0x06,0x00, +0x09,0x00,0x00,0x00,0x77,0x00,0x00,0x00,0x76,0x00,0x00,0x00, 0x16,0x00,0x00,0x00,0x16,0x00,0x00,0x00,0x36,0x00,0x05,0x00, 0x02,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x00,0x00,0x00,0x00, 0x03,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0x05,0x00,0x00,0x00, -0xf7,0x00,0x03,0x00,0x7b,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0xfb,0x00,0x03,0x00,0x0c,0x00,0x00,0x00,0x7c,0x00,0x00,0x00, -0xf8,0x00,0x02,0x00,0x7c,0x00,0x00,0x00,0x41,0x00,0x05,0x00, +0xf7,0x00,0x03,0x00,0x78,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0xfb,0x00,0x03,0x00,0x0c,0x00,0x00,0x00,0x79,0x00,0x00,0x00, +0xf8,0x00,0x02,0x00,0x79,0x00,0x00,0x00,0x41,0x00,0x05,0x00, 0x0d,0x00,0x00,0x00,0x0e,0x00,0x00,0x00,0x0b,0x00,0x00,0x00, 0x0c,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, 0x0f,0x00,0x00,0x00,0x0e,0x00,0x00,0x00,0x7c,0x00,0x04,0x00, @@ -15600,7 +15071,7 @@ unsigned char get_rows_f16_f32_data[] = { 0x23,0x00,0x00,0x00,0xf7,0x00,0x03,0x00,0x27,0x00,0x00,0x00, 0x00,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0x25,0x00,0x00,0x00, 0x26,0x00,0x00,0x00,0x27,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, -0x26,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0x7b,0x00,0x00,0x00, +0x26,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0x78,0x00,0x00,0x00, 0xf8,0x00,0x02,0x00,0x27,0x00,0x00,0x00,0x41,0x00,0x06,0x00, 0x30,0x00,0x00,0x00,0x31,0x00,0x00,0x00,0x2d,0x00,0x00,0x00, 0x2e,0x00,0x00,0x00,0x1a,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, @@ -15613,51 +15084,51 @@ unsigned char get_rows_f16_f32_data[] = { 0x06,0x00,0x00,0x00,0x3f,0x00,0x00,0x00,0x1a,0x00,0x00,0x00, 0x23,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, 0x41,0x00,0x00,0x00,0x3f,0x00,0x00,0x00,0x14,0x00,0x00,0x00, -0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x45,0x00,0x00,0x00, -0x3a,0x00,0x00,0x00,0x44,0x00,0x00,0x00,0x89,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x48,0x00,0x00,0x00,0x3a,0x00,0x00,0x00, -0x44,0x00,0x00,0x00,0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x4a,0x00,0x00,0x00,0x48,0x00,0x00,0x00,0x49,0x00,0x00,0x00, -0x89,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x4e,0x00,0x00,0x00, -0x41,0x00,0x00,0x00,0x44,0x00,0x00,0x00,0x82,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x4f,0x00,0x00,0x00,0x41,0x00,0x00,0x00, -0x4e,0x00,0x00,0x00,0x41,0x00,0x06,0x00,0x5a,0x00,0x00,0x00, -0x5b,0x00,0x00,0x00,0x57,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0x45,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x50,0x00,0x00,0x00, -0x5c,0x00,0x00,0x00,0x5b,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x5e,0x00,0x00,0x00,0x45,0x00,0x00,0x00, -0x16,0x00,0x00,0x00,0x41,0x00,0x06,0x00,0x5a,0x00,0x00,0x00, -0x5f,0x00,0x00,0x00,0x57,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0x5e,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x50,0x00,0x00,0x00, -0x60,0x00,0x00,0x00,0x5f,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x68,0x00,0x00,0x00,0x4f,0x00,0x00,0x00, -0x4a,0x00,0x00,0x00,0x73,0x00,0x04,0x00,0x1c,0x00,0x00,0x00, -0x6d,0x00,0x00,0x00,0x5c,0x00,0x00,0x00,0x41,0x00,0x06,0x00, -0x6e,0x00,0x00,0x00,0x6f,0x00,0x00,0x00,0x65,0x00,0x00,0x00, -0x2e,0x00,0x00,0x00,0x68,0x00,0x00,0x00,0x3e,0x00,0x03,0x00, -0x6f,0x00,0x00,0x00,0x6d,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x74,0x00,0x00,0x00,0x68,0x00,0x00,0x00, -0x73,0x00,0x00,0x00,0x73,0x00,0x04,0x00,0x1c,0x00,0x00,0x00, -0x77,0x00,0x00,0x00,0x60,0x00,0x00,0x00,0x41,0x00,0x06,0x00, -0x6e,0x00,0x00,0x00,0x78,0x00,0x00,0x00,0x65,0x00,0x00,0x00, -0x2e,0x00,0x00,0x00,0x74,0x00,0x00,0x00,0x3e,0x00,0x03,0x00, -0x78,0x00,0x00,0x00,0x77,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, -0x7b,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0x7b,0x00,0x00,0x00, +0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x44,0x00,0x00,0x00, +0x3a,0x00,0x00,0x00,0x16,0x00,0x00,0x00,0x89,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x47,0x00,0x00,0x00,0x3a,0x00,0x00,0x00, +0x16,0x00,0x00,0x00,0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x48,0x00,0x00,0x00,0x47,0x00,0x00,0x00,0x16,0x00,0x00,0x00, +0x89,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x4c,0x00,0x00,0x00, +0x41,0x00,0x00,0x00,0x16,0x00,0x00,0x00,0x82,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x4d,0x00,0x00,0x00,0x41,0x00,0x00,0x00, +0x4c,0x00,0x00,0x00,0x41,0x00,0x06,0x00,0x58,0x00,0x00,0x00, +0x59,0x00,0x00,0x00,0x55,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, +0x44,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x4e,0x00,0x00,0x00, +0x5a,0x00,0x00,0x00,0x59,0x00,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x5c,0x00,0x00,0x00,0x44,0x00,0x00,0x00, +0x16,0x00,0x00,0x00,0x41,0x00,0x06,0x00,0x58,0x00,0x00,0x00, +0x5d,0x00,0x00,0x00,0x55,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, +0x5c,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x4e,0x00,0x00,0x00, +0x5e,0x00,0x00,0x00,0x5d,0x00,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x66,0x00,0x00,0x00,0x4d,0x00,0x00,0x00, +0x48,0x00,0x00,0x00,0x73,0x00,0x04,0x00,0x1c,0x00,0x00,0x00, +0x6b,0x00,0x00,0x00,0x5a,0x00,0x00,0x00,0x41,0x00,0x06,0x00, +0x6c,0x00,0x00,0x00,0x6d,0x00,0x00,0x00,0x63,0x00,0x00,0x00, +0x2e,0x00,0x00,0x00,0x66,0x00,0x00,0x00,0x3e,0x00,0x03,0x00, +0x6d,0x00,0x00,0x00,0x6b,0x00,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x71,0x00,0x00,0x00,0x66,0x00,0x00,0x00, +0x16,0x00,0x00,0x00,0x73,0x00,0x04,0x00,0x1c,0x00,0x00,0x00, +0x74,0x00,0x00,0x00,0x5e,0x00,0x00,0x00,0x41,0x00,0x06,0x00, +0x6c,0x00,0x00,0x00,0x75,0x00,0x00,0x00,0x63,0x00,0x00,0x00, +0x2e,0x00,0x00,0x00,0x71,0x00,0x00,0x00,0x3e,0x00,0x03,0x00, +0x75,0x00,0x00,0x00,0x74,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, +0x78,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0x78,0x00,0x00,0x00, 0xfd,0x00,0x01,0x00,0x38,0x00,0x01,0x00, }; -const uint64_t get_rows_f16_f32_len = 1988; +const uint64_t get_rows_f16_f32_len = 1940; unsigned char get_rows_f16_f32_fp32_data[] = { 0x03,0x02,0x23,0x07,0x00,0x05,0x01,0x00,0x0b,0x00,0x0d,0x00, -0x7d,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x11,0x00,0x02,0x00, +0x7a,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x11,0x00,0x02,0x00, 0x01,0x00,0x00,0x00,0x11,0x00,0x02,0x00,0x51,0x11,0x00,0x00, 0x0b,0x00,0x06,0x00,0x01,0x00,0x00,0x00,0x47,0x4c,0x53,0x4c, 0x2e,0x73,0x74,0x64,0x2e,0x34,0x35,0x30,0x00,0x00,0x00,0x00, 0x0e,0x00,0x03,0x00,0x00,0x00,0x00,0x00,0x01,0x00,0x00,0x00, 0x0f,0x00,0x0a,0x00,0x05,0x00,0x00,0x00,0x04,0x00,0x00,0x00, 0x6d,0x61,0x69,0x6e,0x00,0x00,0x00,0x00,0x0b,0x00,0x00,0x00, -0x1f,0x00,0x00,0x00,0x2d,0x00,0x00,0x00,0x57,0x00,0x00,0x00, -0x67,0x00,0x00,0x00,0x10,0x00,0x06,0x00,0x04,0x00,0x00,0x00, +0x1f,0x00,0x00,0x00,0x2d,0x00,0x00,0x00,0x55,0x00,0x00,0x00, +0x65,0x00,0x00,0x00,0x10,0x00,0x06,0x00,0x04,0x00,0x00,0x00, 0x11,0x00,0x00,0x00,0x00,0x02,0x00,0x00,0x01,0x00,0x00,0x00, 0x01,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x0b,0x00,0x00,0x00, 0x0b,0x00,0x00,0x00,0x1c,0x00,0x00,0x00,0x48,0x00,0x05,0x00, @@ -15676,23 +15147,23 @@ unsigned char get_rows_f16_f32_fp32_data[] = { 0x2b,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00, 0x2d,0x00,0x00,0x00,0x22,0x00,0x00,0x00,0x00,0x00,0x00,0x00, 0x47,0x00,0x04,0x00,0x2d,0x00,0x00,0x00,0x21,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x54,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x52,0x00,0x00,0x00, 0x06,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x48,0x00,0x04,0x00, -0x55,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x18,0x00,0x00,0x00, -0x48,0x00,0x05,0x00,0x55,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0x53,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x18,0x00,0x00,0x00, +0x48,0x00,0x05,0x00,0x53,0x00,0x00,0x00,0x00,0x00,0x00,0x00, 0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x03,0x00, -0x55,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x57,0x00,0x00,0x00,0x22,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x57,0x00,0x00,0x00,0x21,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x64,0x00,0x00,0x00, +0x53,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00, +0x55,0x00,0x00,0x00,0x22,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0x47,0x00,0x04,0x00,0x55,0x00,0x00,0x00,0x21,0x00,0x00,0x00, +0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x62,0x00,0x00,0x00, 0x06,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x48,0x00,0x04,0x00, -0x65,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x19,0x00,0x00,0x00, -0x48,0x00,0x05,0x00,0x65,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0x63,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x19,0x00,0x00,0x00, +0x48,0x00,0x05,0x00,0x63,0x00,0x00,0x00,0x00,0x00,0x00,0x00, 0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x03,0x00, -0x65,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x67,0x00,0x00,0x00,0x22,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x67,0x00,0x00,0x00,0x21,0x00,0x00,0x00, -0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x7a,0x00,0x00,0x00, +0x63,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00, +0x65,0x00,0x00,0x00,0x22,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0x47,0x00,0x04,0x00,0x65,0x00,0x00,0x00,0x21,0x00,0x00,0x00, +0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x77,0x00,0x00,0x00, 0x0b,0x00,0x00,0x00,0x19,0x00,0x00,0x00,0x13,0x00,0x02,0x00, 0x02,0x00,0x00,0x00,0x21,0x00,0x03,0x00,0x03,0x00,0x00,0x00, 0x02,0x00,0x00,0x00,0x15,0x00,0x04,0x00,0x06,0x00,0x00,0x00, @@ -15723,32 +15194,28 @@ unsigned char get_rows_f16_f32_fp32_data[] = { 0x2c,0x00,0x00,0x00,0x2d,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, 0x2b,0x00,0x04,0x00,0x10,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, 0x00,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x30,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x10,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x44,0x00,0x00,0x00,0x20,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x49,0x00,0x00,0x00, -0x02,0x00,0x00,0x00,0x16,0x00,0x03,0x00,0x53,0x00,0x00,0x00, -0x10,0x00,0x00,0x00,0x1d,0x00,0x03,0x00,0x54,0x00,0x00,0x00, -0x53,0x00,0x00,0x00,0x1e,0x00,0x03,0x00,0x55,0x00,0x00,0x00, -0x54,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x56,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x55,0x00,0x00,0x00,0x3b,0x00,0x04,0x00, -0x56,0x00,0x00,0x00,0x57,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0x20,0x00,0x04,0x00,0x5a,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0x53,0x00,0x00,0x00,0x1d,0x00,0x03,0x00,0x64,0x00,0x00,0x00, -0x1c,0x00,0x00,0x00,0x1e,0x00,0x03,0x00,0x65,0x00,0x00,0x00, -0x64,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x66,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x65,0x00,0x00,0x00,0x3b,0x00,0x04,0x00, -0x66,0x00,0x00,0x00,0x67,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0x20,0x00,0x04,0x00,0x6f,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0x1c,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x74,0x00,0x00,0x00,0x10,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x79,0x00,0x00,0x00,0x00,0x02,0x00,0x00, -0x2c,0x00,0x06,0x00,0x09,0x00,0x00,0x00,0x7a,0x00,0x00,0x00, -0x79,0x00,0x00,0x00,0x16,0x00,0x00,0x00,0x16,0x00,0x00,0x00, +0x0c,0x00,0x00,0x00,0x10,0x00,0x00,0x00,0x16,0x00,0x03,0x00, +0x51,0x00,0x00,0x00,0x10,0x00,0x00,0x00,0x1d,0x00,0x03,0x00, +0x52,0x00,0x00,0x00,0x51,0x00,0x00,0x00,0x1e,0x00,0x03,0x00, +0x53,0x00,0x00,0x00,0x52,0x00,0x00,0x00,0x20,0x00,0x04,0x00, +0x54,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x53,0x00,0x00,0x00, +0x3b,0x00,0x04,0x00,0x54,0x00,0x00,0x00,0x55,0x00,0x00,0x00, +0x0c,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x58,0x00,0x00,0x00, +0x0c,0x00,0x00,0x00,0x51,0x00,0x00,0x00,0x1d,0x00,0x03,0x00, +0x62,0x00,0x00,0x00,0x1c,0x00,0x00,0x00,0x1e,0x00,0x03,0x00, +0x63,0x00,0x00,0x00,0x62,0x00,0x00,0x00,0x20,0x00,0x04,0x00, +0x64,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x63,0x00,0x00,0x00, +0x3b,0x00,0x04,0x00,0x64,0x00,0x00,0x00,0x65,0x00,0x00,0x00, +0x0c,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x6d,0x00,0x00,0x00, +0x0c,0x00,0x00,0x00,0x1c,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x76,0x00,0x00,0x00,0x00,0x02,0x00,0x00, +0x2c,0x00,0x06,0x00,0x09,0x00,0x00,0x00,0x77,0x00,0x00,0x00, +0x76,0x00,0x00,0x00,0x16,0x00,0x00,0x00,0x16,0x00,0x00,0x00, 0x36,0x00,0x05,0x00,0x02,0x00,0x00,0x00,0x04,0x00,0x00,0x00, 0x00,0x00,0x00,0x00,0x03,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, -0x05,0x00,0x00,0x00,0xf7,0x00,0x03,0x00,0x7b,0x00,0x00,0x00, +0x05,0x00,0x00,0x00,0xf7,0x00,0x03,0x00,0x78,0x00,0x00,0x00, 0x00,0x00,0x00,0x00,0xfb,0x00,0x03,0x00,0x0c,0x00,0x00,0x00, -0x7c,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0x7c,0x00,0x00,0x00, +0x79,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0x79,0x00,0x00,0x00, 0x41,0x00,0x05,0x00,0x0d,0x00,0x00,0x00,0x0e,0x00,0x00,0x00, 0x0b,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, 0x06,0x00,0x00,0x00,0x0f,0x00,0x00,0x00,0x0e,0x00,0x00,0x00, @@ -15770,7 +15237,7 @@ unsigned char get_rows_f16_f32_fp32_data[] = { 0x27,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, 0x25,0x00,0x00,0x00,0x26,0x00,0x00,0x00,0x27,0x00,0x00,0x00, 0xf8,0x00,0x02,0x00,0x26,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, -0x7b,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0x27,0x00,0x00,0x00, +0x78,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0x27,0x00,0x00,0x00, 0x41,0x00,0x06,0x00,0x30,0x00,0x00,0x00,0x31,0x00,0x00,0x00, 0x2d,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0x1a,0x00,0x00,0x00, 0x3d,0x00,0x04,0x00,0x10,0x00,0x00,0x00,0x32,0x00,0x00,0x00, @@ -15783,51 +15250,51 @@ unsigned char get_rows_f16_f32_fp32_data[] = { 0x1a,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x80,0x00,0x05,0x00, 0x06,0x00,0x00,0x00,0x41,0x00,0x00,0x00,0x3f,0x00,0x00,0x00, 0x14,0x00,0x00,0x00,0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x45,0x00,0x00,0x00,0x3a,0x00,0x00,0x00,0x44,0x00,0x00,0x00, -0x89,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x48,0x00,0x00,0x00, -0x3a,0x00,0x00,0x00,0x44,0x00,0x00,0x00,0x86,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x4a,0x00,0x00,0x00,0x48,0x00,0x00,0x00, -0x49,0x00,0x00,0x00,0x89,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x4e,0x00,0x00,0x00,0x41,0x00,0x00,0x00,0x44,0x00,0x00,0x00, -0x82,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x4f,0x00,0x00,0x00, -0x41,0x00,0x00,0x00,0x4e,0x00,0x00,0x00,0x41,0x00,0x06,0x00, -0x5a,0x00,0x00,0x00,0x5b,0x00,0x00,0x00,0x57,0x00,0x00,0x00, -0x2e,0x00,0x00,0x00,0x45,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x53,0x00,0x00,0x00,0x5c,0x00,0x00,0x00,0x5b,0x00,0x00,0x00, -0x73,0x00,0x04,0x00,0x1c,0x00,0x00,0x00,0x5d,0x00,0x00,0x00, -0x5c,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x5f,0x00,0x00,0x00,0x45,0x00,0x00,0x00,0x16,0x00,0x00,0x00, -0x41,0x00,0x06,0x00,0x5a,0x00,0x00,0x00,0x60,0x00,0x00,0x00, -0x57,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0x5f,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x53,0x00,0x00,0x00,0x61,0x00,0x00,0x00, -0x60,0x00,0x00,0x00,0x73,0x00,0x04,0x00,0x1c,0x00,0x00,0x00, -0x62,0x00,0x00,0x00,0x61,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x6a,0x00,0x00,0x00,0x4f,0x00,0x00,0x00, -0x4a,0x00,0x00,0x00,0x41,0x00,0x06,0x00,0x6f,0x00,0x00,0x00, -0x70,0x00,0x00,0x00,0x67,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0x6a,0x00,0x00,0x00,0x3e,0x00,0x03,0x00,0x70,0x00,0x00,0x00, -0x5d,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x75,0x00,0x00,0x00,0x6a,0x00,0x00,0x00,0x74,0x00,0x00,0x00, -0x41,0x00,0x06,0x00,0x6f,0x00,0x00,0x00,0x78,0x00,0x00,0x00, -0x67,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0x75,0x00,0x00,0x00, -0x3e,0x00,0x03,0x00,0x78,0x00,0x00,0x00,0x62,0x00,0x00,0x00, -0xf9,0x00,0x02,0x00,0x7b,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, -0x7b,0x00,0x00,0x00,0xfd,0x00,0x01,0x00,0x38,0x00,0x01,0x00, +0x44,0x00,0x00,0x00,0x3a,0x00,0x00,0x00,0x16,0x00,0x00,0x00, +0x89,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x47,0x00,0x00,0x00, +0x3a,0x00,0x00,0x00,0x16,0x00,0x00,0x00,0x86,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x48,0x00,0x00,0x00,0x47,0x00,0x00,0x00, +0x16,0x00,0x00,0x00,0x89,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x4c,0x00,0x00,0x00,0x41,0x00,0x00,0x00,0x16,0x00,0x00,0x00, +0x82,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x4d,0x00,0x00,0x00, +0x41,0x00,0x00,0x00,0x4c,0x00,0x00,0x00,0x41,0x00,0x06,0x00, +0x58,0x00,0x00,0x00,0x59,0x00,0x00,0x00,0x55,0x00,0x00,0x00, +0x2e,0x00,0x00,0x00,0x44,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0x51,0x00,0x00,0x00,0x5a,0x00,0x00,0x00,0x59,0x00,0x00,0x00, +0x73,0x00,0x04,0x00,0x1c,0x00,0x00,0x00,0x5b,0x00,0x00,0x00, +0x5a,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x5d,0x00,0x00,0x00,0x44,0x00,0x00,0x00,0x16,0x00,0x00,0x00, +0x41,0x00,0x06,0x00,0x58,0x00,0x00,0x00,0x5e,0x00,0x00,0x00, +0x55,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0x5d,0x00,0x00,0x00, +0x3d,0x00,0x04,0x00,0x51,0x00,0x00,0x00,0x5f,0x00,0x00,0x00, +0x5e,0x00,0x00,0x00,0x73,0x00,0x04,0x00,0x1c,0x00,0x00,0x00, +0x60,0x00,0x00,0x00,0x5f,0x00,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x68,0x00,0x00,0x00,0x4d,0x00,0x00,0x00, +0x48,0x00,0x00,0x00,0x41,0x00,0x06,0x00,0x6d,0x00,0x00,0x00, +0x6e,0x00,0x00,0x00,0x65,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, +0x68,0x00,0x00,0x00,0x3e,0x00,0x03,0x00,0x6e,0x00,0x00,0x00, +0x5b,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x72,0x00,0x00,0x00,0x68,0x00,0x00,0x00,0x16,0x00,0x00,0x00, +0x41,0x00,0x06,0x00,0x6d,0x00,0x00,0x00,0x75,0x00,0x00,0x00, +0x65,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0x72,0x00,0x00,0x00, +0x3e,0x00,0x03,0x00,0x75,0x00,0x00,0x00,0x60,0x00,0x00,0x00, +0xf9,0x00,0x02,0x00,0x78,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, +0x78,0x00,0x00,0x00,0xfd,0x00,0x01,0x00,0x38,0x00,0x01,0x00, }; -const uint64_t get_rows_f16_f32_fp32_len = 1980; +const uint64_t get_rows_f16_f32_fp32_len = 1932; unsigned char get_rows_f16_fp32_data[] = { 0x03,0x02,0x23,0x07,0x00,0x05,0x01,0x00,0x0b,0x00,0x0d,0x00, -0x7e,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x11,0x00,0x02,0x00, +0x7b,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x11,0x00,0x02,0x00, 0x01,0x00,0x00,0x00,0x11,0x00,0x02,0x00,0x51,0x11,0x00,0x00, 0x0b,0x00,0x06,0x00,0x01,0x00,0x00,0x00,0x47,0x4c,0x53,0x4c, 0x2e,0x73,0x74,0x64,0x2e,0x34,0x35,0x30,0x00,0x00,0x00,0x00, 0x0e,0x00,0x03,0x00,0x00,0x00,0x00,0x00,0x01,0x00,0x00,0x00, 0x0f,0x00,0x0a,0x00,0x05,0x00,0x00,0x00,0x04,0x00,0x00,0x00, 0x6d,0x61,0x69,0x6e,0x00,0x00,0x00,0x00,0x0b,0x00,0x00,0x00, -0x1f,0x00,0x00,0x00,0x2d,0x00,0x00,0x00,0x57,0x00,0x00,0x00, -0x67,0x00,0x00,0x00,0x10,0x00,0x06,0x00,0x04,0x00,0x00,0x00, +0x1f,0x00,0x00,0x00,0x2d,0x00,0x00,0x00,0x55,0x00,0x00,0x00, +0x65,0x00,0x00,0x00,0x10,0x00,0x06,0x00,0x04,0x00,0x00,0x00, 0x11,0x00,0x00,0x00,0x00,0x02,0x00,0x00,0x01,0x00,0x00,0x00, 0x01,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x0b,0x00,0x00,0x00, 0x0b,0x00,0x00,0x00,0x1c,0x00,0x00,0x00,0x48,0x00,0x05,0x00, @@ -15846,23 +15313,23 @@ unsigned char get_rows_f16_fp32_data[] = { 0x2b,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00, 0x2d,0x00,0x00,0x00,0x22,0x00,0x00,0x00,0x00,0x00,0x00,0x00, 0x47,0x00,0x04,0x00,0x2d,0x00,0x00,0x00,0x21,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x54,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x52,0x00,0x00,0x00, 0x06,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x48,0x00,0x04,0x00, -0x55,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x18,0x00,0x00,0x00, -0x48,0x00,0x05,0x00,0x55,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0x53,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x18,0x00,0x00,0x00, +0x48,0x00,0x05,0x00,0x53,0x00,0x00,0x00,0x00,0x00,0x00,0x00, 0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x03,0x00, -0x55,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x57,0x00,0x00,0x00,0x22,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x57,0x00,0x00,0x00,0x21,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x64,0x00,0x00,0x00, +0x53,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00, +0x55,0x00,0x00,0x00,0x22,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0x47,0x00,0x04,0x00,0x55,0x00,0x00,0x00,0x21,0x00,0x00,0x00, +0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x62,0x00,0x00,0x00, 0x06,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x48,0x00,0x04,0x00, -0x65,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x19,0x00,0x00,0x00, -0x48,0x00,0x05,0x00,0x65,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0x63,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x19,0x00,0x00,0x00, +0x48,0x00,0x05,0x00,0x63,0x00,0x00,0x00,0x00,0x00,0x00,0x00, 0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x03,0x00, -0x65,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x67,0x00,0x00,0x00,0x22,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x67,0x00,0x00,0x00,0x21,0x00,0x00,0x00, -0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x7b,0x00,0x00,0x00, +0x63,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00, +0x65,0x00,0x00,0x00,0x22,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0x47,0x00,0x04,0x00,0x65,0x00,0x00,0x00,0x21,0x00,0x00,0x00, +0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x78,0x00,0x00,0x00, 0x0b,0x00,0x00,0x00,0x19,0x00,0x00,0x00,0x13,0x00,0x02,0x00, 0x02,0x00,0x00,0x00,0x21,0x00,0x03,0x00,0x03,0x00,0x00,0x00, 0x02,0x00,0x00,0x00,0x15,0x00,0x04,0x00,0x06,0x00,0x00,0x00, @@ -15893,31 +15360,27 @@ unsigned char get_rows_f16_fp32_data[] = { 0x2c,0x00,0x00,0x00,0x2d,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, 0x2b,0x00,0x04,0x00,0x10,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, 0x00,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x30,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x10,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x44,0x00,0x00,0x00,0x20,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x49,0x00,0x00,0x00, -0x02,0x00,0x00,0x00,0x16,0x00,0x03,0x00,0x53,0x00,0x00,0x00, -0x10,0x00,0x00,0x00,0x1d,0x00,0x03,0x00,0x54,0x00,0x00,0x00, -0x53,0x00,0x00,0x00,0x1e,0x00,0x03,0x00,0x55,0x00,0x00,0x00, -0x54,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x56,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x55,0x00,0x00,0x00,0x3b,0x00,0x04,0x00, -0x56,0x00,0x00,0x00,0x57,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0x20,0x00,0x04,0x00,0x5a,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0x53,0x00,0x00,0x00,0x1d,0x00,0x03,0x00,0x64,0x00,0x00,0x00, -0x53,0x00,0x00,0x00,0x1e,0x00,0x03,0x00,0x65,0x00,0x00,0x00, -0x64,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x66,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x65,0x00,0x00,0x00,0x3b,0x00,0x04,0x00, -0x66,0x00,0x00,0x00,0x67,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x74,0x00,0x00,0x00, -0x10,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x7a,0x00,0x00,0x00,0x00,0x02,0x00,0x00,0x2c,0x00,0x06,0x00, -0x09,0x00,0x00,0x00,0x7b,0x00,0x00,0x00,0x7a,0x00,0x00,0x00, +0x0c,0x00,0x00,0x00,0x10,0x00,0x00,0x00,0x16,0x00,0x03,0x00, +0x51,0x00,0x00,0x00,0x10,0x00,0x00,0x00,0x1d,0x00,0x03,0x00, +0x52,0x00,0x00,0x00,0x51,0x00,0x00,0x00,0x1e,0x00,0x03,0x00, +0x53,0x00,0x00,0x00,0x52,0x00,0x00,0x00,0x20,0x00,0x04,0x00, +0x54,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x53,0x00,0x00,0x00, +0x3b,0x00,0x04,0x00,0x54,0x00,0x00,0x00,0x55,0x00,0x00,0x00, +0x0c,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x58,0x00,0x00,0x00, +0x0c,0x00,0x00,0x00,0x51,0x00,0x00,0x00,0x1d,0x00,0x03,0x00, +0x62,0x00,0x00,0x00,0x51,0x00,0x00,0x00,0x1e,0x00,0x03,0x00, +0x63,0x00,0x00,0x00,0x62,0x00,0x00,0x00,0x20,0x00,0x04,0x00, +0x64,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x63,0x00,0x00,0x00, +0x3b,0x00,0x04,0x00,0x64,0x00,0x00,0x00,0x65,0x00,0x00,0x00, +0x0c,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x77,0x00,0x00,0x00,0x00,0x02,0x00,0x00,0x2c,0x00,0x06,0x00, +0x09,0x00,0x00,0x00,0x78,0x00,0x00,0x00,0x77,0x00,0x00,0x00, 0x16,0x00,0x00,0x00,0x16,0x00,0x00,0x00,0x36,0x00,0x05,0x00, 0x02,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x00,0x00,0x00,0x00, 0x03,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0x05,0x00,0x00,0x00, -0xf7,0x00,0x03,0x00,0x7c,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0xfb,0x00,0x03,0x00,0x0c,0x00,0x00,0x00,0x7d,0x00,0x00,0x00, -0xf8,0x00,0x02,0x00,0x7d,0x00,0x00,0x00,0x41,0x00,0x05,0x00, +0xf7,0x00,0x03,0x00,0x79,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0xfb,0x00,0x03,0x00,0x0c,0x00,0x00,0x00,0x7a,0x00,0x00,0x00, +0xf8,0x00,0x02,0x00,0x7a,0x00,0x00,0x00,0x41,0x00,0x05,0x00, 0x0d,0x00,0x00,0x00,0x0e,0x00,0x00,0x00,0x0b,0x00,0x00,0x00, 0x0c,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, 0x0f,0x00,0x00,0x00,0x0e,0x00,0x00,0x00,0x7c,0x00,0x04,0x00, @@ -15938,7 +15401,7 @@ unsigned char get_rows_f16_fp32_data[] = { 0x23,0x00,0x00,0x00,0xf7,0x00,0x03,0x00,0x27,0x00,0x00,0x00, 0x00,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0x25,0x00,0x00,0x00, 0x26,0x00,0x00,0x00,0x27,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, -0x26,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0x7c,0x00,0x00,0x00, +0x26,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0x79,0x00,0x00,0x00, 0xf8,0x00,0x02,0x00,0x27,0x00,0x00,0x00,0x41,0x00,0x06,0x00, 0x30,0x00,0x00,0x00,0x31,0x00,0x00,0x00,0x2d,0x00,0x00,0x00, 0x2e,0x00,0x00,0x00,0x1a,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, @@ -15951,42 +15414,42 @@ unsigned char get_rows_f16_fp32_data[] = { 0x06,0x00,0x00,0x00,0x3f,0x00,0x00,0x00,0x1a,0x00,0x00,0x00, 0x23,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, 0x41,0x00,0x00,0x00,0x3f,0x00,0x00,0x00,0x14,0x00,0x00,0x00, -0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x45,0x00,0x00,0x00, -0x3a,0x00,0x00,0x00,0x44,0x00,0x00,0x00,0x89,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x48,0x00,0x00,0x00,0x3a,0x00,0x00,0x00, -0x44,0x00,0x00,0x00,0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x4a,0x00,0x00,0x00,0x48,0x00,0x00,0x00,0x49,0x00,0x00,0x00, -0x89,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x4e,0x00,0x00,0x00, -0x41,0x00,0x00,0x00,0x44,0x00,0x00,0x00,0x82,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x4f,0x00,0x00,0x00,0x41,0x00,0x00,0x00, -0x4e,0x00,0x00,0x00,0x41,0x00,0x06,0x00,0x5a,0x00,0x00,0x00, -0x5b,0x00,0x00,0x00,0x57,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0x45,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x53,0x00,0x00,0x00, -0x5c,0x00,0x00,0x00,0x5b,0x00,0x00,0x00,0x73,0x00,0x04,0x00, -0x1c,0x00,0x00,0x00,0x5d,0x00,0x00,0x00,0x5c,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x5f,0x00,0x00,0x00, -0x45,0x00,0x00,0x00,0x16,0x00,0x00,0x00,0x41,0x00,0x06,0x00, -0x5a,0x00,0x00,0x00,0x60,0x00,0x00,0x00,0x57,0x00,0x00,0x00, -0x2e,0x00,0x00,0x00,0x5f,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x53,0x00,0x00,0x00,0x61,0x00,0x00,0x00,0x60,0x00,0x00,0x00, -0x73,0x00,0x04,0x00,0x1c,0x00,0x00,0x00,0x62,0x00,0x00,0x00, -0x61,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x6a,0x00,0x00,0x00,0x4f,0x00,0x00,0x00,0x4a,0x00,0x00,0x00, -0x73,0x00,0x04,0x00,0x53,0x00,0x00,0x00,0x6f,0x00,0x00,0x00, -0x5d,0x00,0x00,0x00,0x41,0x00,0x06,0x00,0x5a,0x00,0x00,0x00, -0x70,0x00,0x00,0x00,0x67,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0x6a,0x00,0x00,0x00,0x3e,0x00,0x03,0x00,0x70,0x00,0x00,0x00, -0x6f,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x75,0x00,0x00,0x00,0x6a,0x00,0x00,0x00,0x74,0x00,0x00,0x00, -0x73,0x00,0x04,0x00,0x53,0x00,0x00,0x00,0x78,0x00,0x00,0x00, -0x62,0x00,0x00,0x00,0x41,0x00,0x06,0x00,0x5a,0x00,0x00,0x00, -0x79,0x00,0x00,0x00,0x67,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0x75,0x00,0x00,0x00,0x3e,0x00,0x03,0x00,0x79,0x00,0x00,0x00, -0x78,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0x7c,0x00,0x00,0x00, -0xf8,0x00,0x02,0x00,0x7c,0x00,0x00,0x00,0xfd,0x00,0x01,0x00, +0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x44,0x00,0x00,0x00, +0x3a,0x00,0x00,0x00,0x16,0x00,0x00,0x00,0x89,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x47,0x00,0x00,0x00,0x3a,0x00,0x00,0x00, +0x16,0x00,0x00,0x00,0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x48,0x00,0x00,0x00,0x47,0x00,0x00,0x00,0x16,0x00,0x00,0x00, +0x89,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x4c,0x00,0x00,0x00, +0x41,0x00,0x00,0x00,0x16,0x00,0x00,0x00,0x82,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x4d,0x00,0x00,0x00,0x41,0x00,0x00,0x00, +0x4c,0x00,0x00,0x00,0x41,0x00,0x06,0x00,0x58,0x00,0x00,0x00, +0x59,0x00,0x00,0x00,0x55,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, +0x44,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x51,0x00,0x00,0x00, +0x5a,0x00,0x00,0x00,0x59,0x00,0x00,0x00,0x73,0x00,0x04,0x00, +0x1c,0x00,0x00,0x00,0x5b,0x00,0x00,0x00,0x5a,0x00,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x5d,0x00,0x00,0x00, +0x44,0x00,0x00,0x00,0x16,0x00,0x00,0x00,0x41,0x00,0x06,0x00, +0x58,0x00,0x00,0x00,0x5e,0x00,0x00,0x00,0x55,0x00,0x00,0x00, +0x2e,0x00,0x00,0x00,0x5d,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0x51,0x00,0x00,0x00,0x5f,0x00,0x00,0x00,0x5e,0x00,0x00,0x00, +0x73,0x00,0x04,0x00,0x1c,0x00,0x00,0x00,0x60,0x00,0x00,0x00, +0x5f,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x68,0x00,0x00,0x00,0x4d,0x00,0x00,0x00,0x48,0x00,0x00,0x00, +0x73,0x00,0x04,0x00,0x51,0x00,0x00,0x00,0x6d,0x00,0x00,0x00, +0x5b,0x00,0x00,0x00,0x41,0x00,0x06,0x00,0x58,0x00,0x00,0x00, +0x6e,0x00,0x00,0x00,0x65,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, +0x68,0x00,0x00,0x00,0x3e,0x00,0x03,0x00,0x6e,0x00,0x00,0x00, +0x6d,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x72,0x00,0x00,0x00,0x68,0x00,0x00,0x00,0x16,0x00,0x00,0x00, +0x73,0x00,0x04,0x00,0x51,0x00,0x00,0x00,0x75,0x00,0x00,0x00, +0x60,0x00,0x00,0x00,0x41,0x00,0x06,0x00,0x58,0x00,0x00,0x00, +0x76,0x00,0x00,0x00,0x65,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, +0x72,0x00,0x00,0x00,0x3e,0x00,0x03,0x00,0x76,0x00,0x00,0x00, +0x75,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0x79,0x00,0x00,0x00, +0xf8,0x00,0x02,0x00,0x79,0x00,0x00,0x00,0xfd,0x00,0x01,0x00, 0x38,0x00,0x01,0x00, }; -const uint64_t get_rows_f16_fp32_len = 1996; +const uint64_t get_rows_f16_fp32_len = 1948; unsigned char get_rows_q4_0_data[] = { 0x03,0x02,0x23,0x07,0x00,0x05,0x01,0x00,0x0b,0x00,0x0d,0x00, @@ -52701,7 +52164,7 @@ const uint64_t mul_f32_len = 1456; unsigned char mul_mat_vec_f16_f32_data[] = { 0x03,0x02,0x23,0x07,0x00,0x05,0x01,0x00,0x0b,0x00,0x0d,0x00, -0xba,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x11,0x00,0x02,0x00, +0xb6,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x11,0x00,0x02,0x00, 0x01,0x00,0x00,0x00,0x11,0x00,0x02,0x00,0x51,0x11,0x00,0x00, 0x0b,0x00,0x06,0x00,0x01,0x00,0x00,0x00,0x47,0x4c,0x53,0x4c, 0x2e,0x73,0x74,0x64,0x2e,0x34,0x35,0x30,0x00,0x00,0x00,0x00, @@ -52709,9 +52172,9 @@ unsigned char mul_mat_vec_f16_f32_data[] = { 0x0f,0x00,0x0c,0x00,0x05,0x00,0x00,0x00,0x04,0x00,0x00,0x00, 0x6d,0x61,0x69,0x6e,0x00,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, 0x13,0x00,0x00,0x00,0x1b,0x00,0x00,0x00,0x2a,0x00,0x00,0x00, -0x51,0x00,0x00,0x00,0x66,0x00,0x00,0x00,0xad,0x00,0x00,0x00, +0x51,0x00,0x00,0x00,0x65,0x00,0x00,0x00,0xaa,0x00,0x00,0x00, 0x10,0x00,0x06,0x00,0x04,0x00,0x00,0x00,0x11,0x00,0x00,0x00, -0x20,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x01,0x00,0x00,0x00, 0x47,0x00,0x04,0x00,0x0c,0x00,0x00,0x00,0x0b,0x00,0x00,0x00, 0x1a,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x13,0x00,0x00,0x00, 0x0b,0x00,0x00,0x00,0x1b,0x00,0x00,0x00,0x48,0x00,0x05,0x00, @@ -52729,23 +52192,23 @@ unsigned char mul_mat_vec_f16_f32_data[] = { 0x47,0x00,0x04,0x00,0x51,0x00,0x00,0x00,0x22,0x00,0x00,0x00, 0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x51,0x00,0x00,0x00, 0x21,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x63,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x04,0x00,0x00,0x00, -0x48,0x00,0x04,0x00,0x64,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x18,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x64,0x00,0x00,0x00, +0x62,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x04,0x00,0x00,0x00, +0x48,0x00,0x04,0x00,0x63,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0x18,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x63,0x00,0x00,0x00, 0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x47,0x00,0x03,0x00,0x64,0x00,0x00,0x00,0x02,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x66,0x00,0x00,0x00,0x22,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x66,0x00,0x00,0x00, +0x47,0x00,0x03,0x00,0x63,0x00,0x00,0x00,0x02,0x00,0x00,0x00, +0x47,0x00,0x04,0x00,0x65,0x00,0x00,0x00,0x22,0x00,0x00,0x00, +0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x65,0x00,0x00,0x00, 0x21,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0xaa,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x04,0x00,0x00,0x00, -0x48,0x00,0x04,0x00,0xab,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x19,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0xab,0x00,0x00,0x00, +0xa7,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x04,0x00,0x00,0x00, +0x48,0x00,0x04,0x00,0xa8,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0x19,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0xa8,0x00,0x00,0x00, 0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x47,0x00,0x03,0x00,0xab,0x00,0x00,0x00,0x02,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0xad,0x00,0x00,0x00,0x22,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0xad,0x00,0x00,0x00, +0x47,0x00,0x03,0x00,0xa8,0x00,0x00,0x00,0x02,0x00,0x00,0x00, +0x47,0x00,0x04,0x00,0xaa,0x00,0x00,0x00,0x22,0x00,0x00,0x00, +0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0xaa,0x00,0x00,0x00, 0x21,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0xb5,0x00,0x00,0x00,0x0b,0x00,0x00,0x00,0x19,0x00,0x00,0x00, +0xb2,0x00,0x00,0x00,0x0b,0x00,0x00,0x00,0x19,0x00,0x00,0x00, 0x13,0x00,0x02,0x00,0x02,0x00,0x00,0x00,0x21,0x00,0x03,0x00, 0x03,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x15,0x00,0x04,0x00, 0x06,0x00,0x00,0x00,0x20,0x00,0x00,0x00,0x01,0x00,0x00,0x00, @@ -52760,7 +52223,7 @@ unsigned char mul_mat_vec_f16_f32_data[] = { 0x3b,0x00,0x04,0x00,0x0b,0x00,0x00,0x00,0x13,0x00,0x00,0x00, 0x01,0x00,0x00,0x00,0x16,0x00,0x03,0x00,0x17,0x00,0x00,0x00, 0x20,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x09,0x00,0x00,0x00, -0x18,0x00,0x00,0x00,0x20,0x00,0x00,0x00,0x1c,0x00,0x04,0x00, +0x18,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x1c,0x00,0x04,0x00, 0x19,0x00,0x00,0x00,0x17,0x00,0x00,0x00,0x18,0x00,0x00,0x00, 0x20,0x00,0x04,0x00,0x1a,0x00,0x00,0x00,0x04,0x00,0x00,0x00, 0x19,0x00,0x00,0x00,0x3b,0x00,0x04,0x00,0x1a,0x00,0x00,0x00, @@ -52775,7 +52238,7 @@ unsigned char mul_mat_vec_f16_f32_data[] = { 0x29,0x00,0x00,0x00,0x2a,0x00,0x00,0x00,0x09,0x00,0x00,0x00, 0x20,0x00,0x04,0x00,0x2b,0x00,0x00,0x00,0x09,0x00,0x00,0x00, 0x06,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x2e,0x00,0x00,0x00,0x20,0x00,0x00,0x00,0x14,0x00,0x02,0x00, +0x2e,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x14,0x00,0x02,0x00, 0x30,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, 0x35,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x16,0x00,0x03,0x00, 0x4d,0x00,0x00,0x00,0x10,0x00,0x00,0x00,0x1d,0x00,0x03,0x00, @@ -52784,26 +52247,22 @@ unsigned char mul_mat_vec_f16_f32_data[] = { 0x50,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x4f,0x00,0x00,0x00, 0x3b,0x00,0x04,0x00,0x50,0x00,0x00,0x00,0x51,0x00,0x00,0x00, 0x0c,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x54,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x4d,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x59,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x1d,0x00,0x03,0x00,0x63,0x00,0x00,0x00,0x17,0x00,0x00,0x00, -0x1e,0x00,0x03,0x00,0x64,0x00,0x00,0x00,0x63,0x00,0x00,0x00, -0x20,0x00,0x04,0x00,0x65,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0x64,0x00,0x00,0x00,0x3b,0x00,0x04,0x00,0x65,0x00,0x00,0x00, -0x66,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0x6e,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x17,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x09,0x00,0x00,0x00,0x77,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x80,0x00,0x00,0x00,0x10,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x09,0x00,0x00,0x00,0x8b,0x00,0x00,0x00,0x02,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x09,0x00,0x00,0x00,0x8c,0x00,0x00,0x00, -0x08,0x01,0x00,0x00,0x1d,0x00,0x03,0x00,0xaa,0x00,0x00,0x00, -0x17,0x00,0x00,0x00,0x1e,0x00,0x03,0x00,0xab,0x00,0x00,0x00, -0xaa,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0xac,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0xab,0x00,0x00,0x00,0x3b,0x00,0x04,0x00, -0xac,0x00,0x00,0x00,0xad,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0x2c,0x00,0x06,0x00,0x0a,0x00,0x00,0x00,0xb5,0x00,0x00,0x00, -0x18,0x00,0x00,0x00,0x77,0x00,0x00,0x00,0x77,0x00,0x00,0x00, +0x0c,0x00,0x00,0x00,0x4d,0x00,0x00,0x00,0x1d,0x00,0x03,0x00, +0x62,0x00,0x00,0x00,0x17,0x00,0x00,0x00,0x1e,0x00,0x03,0x00, +0x63,0x00,0x00,0x00,0x62,0x00,0x00,0x00,0x20,0x00,0x04,0x00, +0x64,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x63,0x00,0x00,0x00, +0x3b,0x00,0x04,0x00,0x64,0x00,0x00,0x00,0x65,0x00,0x00,0x00, +0x0c,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x6d,0x00,0x00,0x00, +0x0c,0x00,0x00,0x00,0x17,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x09,0x00,0x00,0x00,0x88,0x00,0x00,0x00,0x02,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x09,0x00,0x00,0x00,0x89,0x00,0x00,0x00, +0x08,0x01,0x00,0x00,0x1d,0x00,0x03,0x00,0xa7,0x00,0x00,0x00, +0x17,0x00,0x00,0x00,0x1e,0x00,0x03,0x00,0xa8,0x00,0x00,0x00, +0xa7,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0xa9,0x00,0x00,0x00, +0x0c,0x00,0x00,0x00,0xa8,0x00,0x00,0x00,0x3b,0x00,0x04,0x00, +0xa9,0x00,0x00,0x00,0xaa,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, +0x2c,0x00,0x06,0x00,0x0a,0x00,0x00,0x00,0xb2,0x00,0x00,0x00, +0x18,0x00,0x00,0x00,0x18,0x00,0x00,0x00,0x18,0x00,0x00,0x00, 0x36,0x00,0x05,0x00,0x02,0x00,0x00,0x00,0x04,0x00,0x00,0x00, 0x00,0x00,0x00,0x00,0x03,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, 0x05,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x0e,0x00,0x00,0x00, @@ -52819,122 +52278,91 @@ unsigned char mul_mat_vec_f16_f32_data[] = { 0x1b,0x00,0x00,0x00,0x16,0x00,0x00,0x00,0x3e,0x00,0x03,0x00, 0x1f,0x00,0x00,0x00,0x1d,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, 0x22,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0x22,0x00,0x00,0x00, -0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0xb8,0x00,0x00,0x00, -0x21,0x00,0x00,0x00,0x05,0x00,0x00,0x00,0x8a,0x00,0x00,0x00, +0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0xb5,0x00,0x00,0x00, +0x21,0x00,0x00,0x00,0x05,0x00,0x00,0x00,0x87,0x00,0x00,0x00, 0x23,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x2b,0x00,0x00,0x00, 0x2c,0x00,0x00,0x00,0x2a,0x00,0x00,0x00,0x21,0x00,0x00,0x00, 0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x2d,0x00,0x00,0x00, 0x2c,0x00,0x00,0x00,0x87,0x00,0x05,0x00,0x06,0x00,0x00,0x00, 0x2f,0x00,0x00,0x00,0x2d,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, 0xb1,0x00,0x05,0x00,0x30,0x00,0x00,0x00,0x31,0x00,0x00,0x00, -0xb8,0x00,0x00,0x00,0x2f,0x00,0x00,0x00,0xf6,0x00,0x04,0x00, +0xb5,0x00,0x00,0x00,0x2f,0x00,0x00,0x00,0xf6,0x00,0x04,0x00, 0x24,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x01,0x00,0x00,0x00, 0xfa,0x00,0x04,0x00,0x31,0x00,0x00,0x00,0x23,0x00,0x00,0x00, 0x24,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0x23,0x00,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x34,0x00,0x00,0x00, -0xb8,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x37,0x00,0x00,0x00,0x35,0x00,0x00,0x00, -0x16,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x38,0x00,0x00,0x00,0x34,0x00,0x00,0x00,0x37,0x00,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x3d,0x00,0x00,0x00, -0x11,0x00,0x00,0x00,0x2d,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x3f,0x00,0x00,0x00,0x3d,0x00,0x00,0x00, -0x38,0x00,0x00,0x00,0x87,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x40,0x00,0x00,0x00,0x3f,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0x8b,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x43,0x00,0x00,0x00, -0x38,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0x87,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x44,0x00,0x00,0x00,0x43,0x00,0x00,0x00, -0x35,0x00,0x00,0x00,0x82,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x49,0x00,0x00,0x00,0x38,0x00,0x00,0x00,0x43,0x00,0x00,0x00, -0x41,0x00,0x06,0x00,0x54,0x00,0x00,0x00,0x55,0x00,0x00,0x00, -0x51,0x00,0x00,0x00,0x21,0x00,0x00,0x00,0x40,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x4d,0x00,0x00,0x00,0x56,0x00,0x00,0x00, -0x55,0x00,0x00,0x00,0x73,0x00,0x04,0x00,0x17,0x00,0x00,0x00, -0x57,0x00,0x00,0x00,0x56,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x5a,0x00,0x00,0x00,0x40,0x00,0x00,0x00, -0x59,0x00,0x00,0x00,0x41,0x00,0x06,0x00,0x54,0x00,0x00,0x00, -0x5b,0x00,0x00,0x00,0x51,0x00,0x00,0x00,0x21,0x00,0x00,0x00, -0x5a,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x4d,0x00,0x00,0x00, -0x5c,0x00,0x00,0x00,0x5b,0x00,0x00,0x00,0x73,0x00,0x04,0x00, -0x17,0x00,0x00,0x00,0x5d,0x00,0x00,0x00,0x5c,0x00,0x00,0x00, -0x41,0x00,0x05,0x00,0x2b,0x00,0x00,0x00,0x67,0x00,0x00,0x00, -0x2a,0x00,0x00,0x00,0x59,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x68,0x00,0x00,0x00,0x67,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x6a,0x00,0x00,0x00, -0x68,0x00,0x00,0x00,0x49,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x6c,0x00,0x00,0x00,0x6a,0x00,0x00,0x00, -0x44,0x00,0x00,0x00,0x41,0x00,0x06,0x00,0x6e,0x00,0x00,0x00, -0x6f,0x00,0x00,0x00,0x66,0x00,0x00,0x00,0x21,0x00,0x00,0x00, -0x6c,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x17,0x00,0x00,0x00, -0x70,0x00,0x00,0x00,0x6f,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x17,0x00,0x00,0x00,0x73,0x00,0x00,0x00,0x1f,0x00,0x00,0x00, -0x0c,0x00,0x08,0x00,0x17,0x00,0x00,0x00,0x74,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x32,0x00,0x00,0x00,0x57,0x00,0x00,0x00, -0x70,0x00,0x00,0x00,0x73,0x00,0x00,0x00,0x3e,0x00,0x03,0x00, -0x1f,0x00,0x00,0x00,0x74,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x81,0x00,0x00,0x00,0x6c,0x00,0x00,0x00, -0x80,0x00,0x00,0x00,0x41,0x00,0x06,0x00,0x6e,0x00,0x00,0x00, -0x82,0x00,0x00,0x00,0x66,0x00,0x00,0x00,0x21,0x00,0x00,0x00, -0x81,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x17,0x00,0x00,0x00, -0x83,0x00,0x00,0x00,0x82,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x17,0x00,0x00,0x00,0x86,0x00,0x00,0x00,0x1f,0x00,0x00,0x00, -0x0c,0x00,0x08,0x00,0x17,0x00,0x00,0x00,0x87,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x32,0x00,0x00,0x00,0x5d,0x00,0x00,0x00, -0x83,0x00,0x00,0x00,0x86,0x00,0x00,0x00,0x3e,0x00,0x03,0x00, -0x1f,0x00,0x00,0x00,0x87,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x8a,0x00,0x00,0x00,0xb8,0x00,0x00,0x00, -0x35,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0x22,0x00,0x00,0x00, -0xf8,0x00,0x02,0x00,0x24,0x00,0x00,0x00,0xe0,0x00,0x04,0x00, -0x8b,0x00,0x00,0x00,0x8b,0x00,0x00,0x00,0x8c,0x00,0x00,0x00, -0xf9,0x00,0x02,0x00,0x8e,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, -0x8e,0x00,0x00,0x00,0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00, -0xb9,0x00,0x00,0x00,0x80,0x00,0x00,0x00,0x24,0x00,0x00,0x00, -0xa5,0x00,0x00,0x00,0x91,0x00,0x00,0x00,0xad,0x00,0x05,0x00, -0x30,0x00,0x00,0x00,0x94,0x00,0x00,0x00,0xb9,0x00,0x00,0x00, -0x21,0x00,0x00,0x00,0xf6,0x00,0x04,0x00,0x90,0x00,0x00,0x00, -0x91,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, -0x94,0x00,0x00,0x00,0x8f,0x00,0x00,0x00,0x90,0x00,0x00,0x00, -0xf8,0x00,0x02,0x00,0x8f,0x00,0x00,0x00,0xb1,0x00,0x05,0x00, -0x30,0x00,0x00,0x00,0x97,0x00,0x00,0x00,0x16,0x00,0x00,0x00, -0xb9,0x00,0x00,0x00,0xf7,0x00,0x03,0x00,0x99,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0x97,0x00,0x00,0x00, -0x98,0x00,0x00,0x00,0x99,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, -0x98,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x9d,0x00,0x00,0x00,0x16,0x00,0x00,0x00,0xb9,0x00,0x00,0x00, -0x41,0x00,0x05,0x00,0x1e,0x00,0x00,0x00,0x9e,0x00,0x00,0x00, -0x1b,0x00,0x00,0x00,0x9d,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x17,0x00,0x00,0x00,0x9f,0x00,0x00,0x00,0x9e,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x17,0x00,0x00,0x00,0xa1,0x00,0x00,0x00, -0x1f,0x00,0x00,0x00,0x81,0x00,0x05,0x00,0x17,0x00,0x00,0x00, -0xa2,0x00,0x00,0x00,0xa1,0x00,0x00,0x00,0x9f,0x00,0x00,0x00, -0x3e,0x00,0x03,0x00,0x1f,0x00,0x00,0x00,0xa2,0x00,0x00,0x00, -0xf9,0x00,0x02,0x00,0x99,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, -0x99,0x00,0x00,0x00,0xe0,0x00,0x04,0x00,0x8b,0x00,0x00,0x00, -0x8b,0x00,0x00,0x00,0x8c,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, -0x91,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0x91,0x00,0x00,0x00, -0xc3,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xa5,0x00,0x00,0x00, -0xb9,0x00,0x00,0x00,0x59,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, -0x8e,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0x90,0x00,0x00,0x00, -0xaa,0x00,0x05,0x00,0x30,0x00,0x00,0x00,0xa7,0x00,0x00,0x00, -0x16,0x00,0x00,0x00,0x21,0x00,0x00,0x00,0xf7,0x00,0x03,0x00, -0xa9,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, -0xa7,0x00,0x00,0x00,0xa8,0x00,0x00,0x00,0xa9,0x00,0x00,0x00, -0xf8,0x00,0x02,0x00,0xa8,0x00,0x00,0x00,0x41,0x00,0x05,0x00, -0x2b,0x00,0x00,0x00,0xae,0x00,0x00,0x00,0x2a,0x00,0x00,0x00, -0x35,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0xaf,0x00,0x00,0x00,0xae,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xb1,0x00,0x00,0x00,0xaf,0x00,0x00,0x00, -0x11,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x1e,0x00,0x00,0x00, -0xb2,0x00,0x00,0x00,0x1b,0x00,0x00,0x00,0x21,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x17,0x00,0x00,0x00,0xb3,0x00,0x00,0x00, -0xb2,0x00,0x00,0x00,0x41,0x00,0x06,0x00,0x6e,0x00,0x00,0x00, -0xb4,0x00,0x00,0x00,0xad,0x00,0x00,0x00,0x21,0x00,0x00,0x00, -0xb1,0x00,0x00,0x00,0x3e,0x00,0x03,0x00,0xb4,0x00,0x00,0x00, -0xb3,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0xa9,0x00,0x00,0x00, -0xf8,0x00,0x02,0x00,0xa9,0x00,0x00,0x00,0xfd,0x00,0x01,0x00, -0x38,0x00,0x01,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x37,0x00,0x00,0x00, +0x35,0x00,0x00,0x00,0x16,0x00,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x38,0x00,0x00,0x00,0xb5,0x00,0x00,0x00, +0x37,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x3d,0x00,0x00,0x00,0x11,0x00,0x00,0x00,0x2d,0x00,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x3f,0x00,0x00,0x00, +0x3d,0x00,0x00,0x00,0x38,0x00,0x00,0x00,0x87,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x40,0x00,0x00,0x00,0x3f,0x00,0x00,0x00, +0x2e,0x00,0x00,0x00,0x8b,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x43,0x00,0x00,0x00,0x38,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, +0x87,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x44,0x00,0x00,0x00, +0x43,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0x82,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x49,0x00,0x00,0x00,0x38,0x00,0x00,0x00, +0x43,0x00,0x00,0x00,0x41,0x00,0x06,0x00,0x54,0x00,0x00,0x00, +0x55,0x00,0x00,0x00,0x51,0x00,0x00,0x00,0x21,0x00,0x00,0x00, +0x40,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x4d,0x00,0x00,0x00, +0x56,0x00,0x00,0x00,0x55,0x00,0x00,0x00,0x73,0x00,0x04,0x00, +0x17,0x00,0x00,0x00,0x57,0x00,0x00,0x00,0x56,0x00,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x59,0x00,0x00,0x00, +0x40,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0x41,0x00,0x06,0x00, +0x54,0x00,0x00,0x00,0x5a,0x00,0x00,0x00,0x51,0x00,0x00,0x00, +0x21,0x00,0x00,0x00,0x59,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0x4d,0x00,0x00,0x00,0x5b,0x00,0x00,0x00,0x5a,0x00,0x00,0x00, +0x73,0x00,0x04,0x00,0x17,0x00,0x00,0x00,0x5c,0x00,0x00,0x00, +0x5b,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x2b,0x00,0x00,0x00, +0x66,0x00,0x00,0x00,0x2a,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, +0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x67,0x00,0x00,0x00, +0x66,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x69,0x00,0x00,0x00,0x67,0x00,0x00,0x00,0x49,0x00,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x6b,0x00,0x00,0x00, +0x69,0x00,0x00,0x00,0x44,0x00,0x00,0x00,0x41,0x00,0x06,0x00, +0x6d,0x00,0x00,0x00,0x6e,0x00,0x00,0x00,0x65,0x00,0x00,0x00, +0x21,0x00,0x00,0x00,0x6b,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0x17,0x00,0x00,0x00,0x6f,0x00,0x00,0x00,0x6e,0x00,0x00,0x00, +0x3d,0x00,0x04,0x00,0x17,0x00,0x00,0x00,0x72,0x00,0x00,0x00, +0x1f,0x00,0x00,0x00,0x0c,0x00,0x08,0x00,0x17,0x00,0x00,0x00, +0x73,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x32,0x00,0x00,0x00, +0x57,0x00,0x00,0x00,0x6f,0x00,0x00,0x00,0x72,0x00,0x00,0x00, +0x3e,0x00,0x03,0x00,0x1f,0x00,0x00,0x00,0x73,0x00,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x7e,0x00,0x00,0x00, +0x6b,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0x41,0x00,0x06,0x00, +0x6d,0x00,0x00,0x00,0x7f,0x00,0x00,0x00,0x65,0x00,0x00,0x00, +0x21,0x00,0x00,0x00,0x7e,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0x17,0x00,0x00,0x00,0x80,0x00,0x00,0x00,0x7f,0x00,0x00,0x00, +0x3d,0x00,0x04,0x00,0x17,0x00,0x00,0x00,0x83,0x00,0x00,0x00, +0x1f,0x00,0x00,0x00,0x0c,0x00,0x08,0x00,0x17,0x00,0x00,0x00, +0x84,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x32,0x00,0x00,0x00, +0x5c,0x00,0x00,0x00,0x80,0x00,0x00,0x00,0x83,0x00,0x00,0x00, +0x3e,0x00,0x03,0x00,0x1f,0x00,0x00,0x00,0x84,0x00,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x87,0x00,0x00,0x00, +0xb5,0x00,0x00,0x00,0x35,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, +0x22,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0x24,0x00,0x00,0x00, +0xe0,0x00,0x04,0x00,0x88,0x00,0x00,0x00,0x88,0x00,0x00,0x00, +0x89,0x00,0x00,0x00,0xaa,0x00,0x05,0x00,0x30,0x00,0x00,0x00, +0xa4,0x00,0x00,0x00,0x16,0x00,0x00,0x00,0x21,0x00,0x00,0x00, +0xf7,0x00,0x03,0x00,0xa6,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0xfa,0x00,0x04,0x00,0xa4,0x00,0x00,0x00,0xa5,0x00,0x00,0x00, +0xa6,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0xa5,0x00,0x00,0x00, +0x41,0x00,0x05,0x00,0x2b,0x00,0x00,0x00,0xab,0x00,0x00,0x00, +0x2a,0x00,0x00,0x00,0x35,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0xac,0x00,0x00,0x00,0xab,0x00,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xae,0x00,0x00,0x00, +0xac,0x00,0x00,0x00,0x11,0x00,0x00,0x00,0x41,0x00,0x05,0x00, +0x1e,0x00,0x00,0x00,0xaf,0x00,0x00,0x00,0x1b,0x00,0x00,0x00, +0x21,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x17,0x00,0x00,0x00, +0xb0,0x00,0x00,0x00,0xaf,0x00,0x00,0x00,0x41,0x00,0x06,0x00, +0x6d,0x00,0x00,0x00,0xb1,0x00,0x00,0x00,0xaa,0x00,0x00,0x00, +0x21,0x00,0x00,0x00,0xae,0x00,0x00,0x00,0x3e,0x00,0x03,0x00, +0xb1,0x00,0x00,0x00,0xb0,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, +0xa6,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0xa6,0x00,0x00,0x00, +0xfd,0x00,0x01,0x00,0x38,0x00,0x01,0x00, }; -const uint64_t mul_mat_vec_f16_f32_len = 2788; +const uint64_t mul_mat_vec_f16_f32_len = 2372; unsigned char mul_mat_vec_nc_f16_f32_data[] = { 0x03,0x02,0x23,0x07,0x00,0x05,0x01,0x00,0x0b,0x00,0x0d,0x00, diff --git a/ggml-vulkan.cpp b/ggml-vulkan.cpp index 1d93ec6bb..bccc40bf5 100644 --- a/ggml-vulkan.cpp +++ b/ggml-vulkan.cpp @@ -817,7 +817,7 @@ static void ggml_vk_load_shaders() { // mulmat std::initializer_list warptile_l = { 128, 128, 128, 16, vk_device.subgroup_size * 2, 64, 2, 4, 4, vk_device.subgroup_size }; std::initializer_list warptile_m = { 128, 64, 64, 16, vk_device.subgroup_size, 32, 2, 4, 2, vk_device.subgroup_size }; - std::initializer_list warptile_s = { vk_device.subgroup_size, 32, 32, 8, 32, 32, 2, 2, 2, vk_device.subgroup_size }; + std::initializer_list warptile_s = { vk_device.subgroup_size, 32, 32, 16, 32, 32, 2, 2, 2, vk_device.subgroup_size }; std::array l_wg_denoms = {128, 128, 1 }; std::array m_wg_denoms = { 64, 64, 1 }; @@ -2873,7 +2873,8 @@ static void ggml_vk_op_f32(vk_context * ctx, const ggml_tensor * src0, const ggm if (op == GGML_OP_CPY) { GGML_ASSERT(!transfer_src0); GGML_ASSERT(!transfer_src1); - d_sz = dst->ne[1] * dst->nb[1]; + x_sz = ggml_nbytes(src0); + d_sz = ggml_nbytes(dst); if (extra->offset + d_sz >= d_D->size) { d_sz = VK_WHOLE_SIZE; @@ -4556,8 +4557,15 @@ GGML_CALL static bool ggml_backend_vk_graph_compute(ggml_backend_t backend, ggml } ggml_vk_preallocate_buffers(); + int last_node = cgraph->n_nodes - 1; + + // If the last op in the cgraph isn't backend GPU, the command buffer doesn't get closed properly + while (last_node > 0 && cgraph->nodes[last_node]->backend != GGML_BACKEND_GPU) { + last_node -= 1; + } + for (int i = 0; i < cgraph->n_nodes; i++) { - ggml_vk_build_graph(cgraph->nodes[i], i == cgraph->n_nodes - 1); + ggml_vk_build_graph(cgraph->nodes[i], i == last_node); } ggml_compute_params params = {}; diff --git a/ggml_vk_generate_shaders.py b/ggml_vk_generate_shaders.py index d0861fde4..6b1b82bf3 100644 --- a/ggml_vk_generate_shaders.py +++ b/ggml_vk_generate_shaders.py @@ -19,8 +19,8 @@ shader_int8_ext = """ # Type-specific defines shader_f16_defines = """ -#define QUANT_K 32 -#define QUANT_R 2 +#define QUANT_K 1 +#define QUANT_R 1 #define A_TYPE float16_t """ From dabcc5b471348e4ae03ddacc41e19ad75fb2f041 Mon Sep 17 00:00:00 2001 From: slaren Date: Wed, 31 Jan 2024 13:43:03 +0100 Subject: [PATCH 476/811] ggml : limit n_threads to the max n_tasks (#5238) --- ggml.c | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/ggml.c b/ggml.c index b2c8baaa8..afd9c6c61 100644 --- a/ggml.c +++ b/ggml.c @@ -16985,12 +16985,16 @@ struct ggml_cplan ggml_graph_plan(const struct ggml_cgraph * cgraph, int n_threa struct ggml_cplan cplan; memset(&cplan, 0, sizeof(struct ggml_cplan)); + int max_tasks = 1; + // thread scheduling for the different operations + work buffer size estimation for (int i = 0; i < cgraph->n_nodes; i++) { struct ggml_tensor * node = cgraph->nodes[i]; const int n_tasks = ggml_get_n_tasks(node, n_threads); + max_tasks = MAX(max_tasks, n_tasks); + size_t cur = 0; switch (node->op) { @@ -17157,7 +17161,7 @@ struct ggml_cplan ggml_graph_plan(const struct ggml_cgraph * cgraph, int n_threa work_size += CACHE_LINE_SIZE*(n_threads - 1); } - cplan.n_threads = n_threads; + cplan.n_threads = MIN(max_tasks, n_threads); cplan.work_size = work_size; cplan.work_data = NULL; From b2b9f025e7821e78bd501d75d01838c26de07a57 Mon Sep 17 00:00:00 2001 From: Neo Zhang Jianyu Date: Wed, 31 Jan 2024 21:04:46 +0800 Subject: [PATCH 477/811] format license text, restore apache license by legal suggestion (#5233) --- examples/sycl/ls-sycl-device.cpp | 10 ++++++---- ggml-sycl.cpp | 15 +++++++++++---- ggml-sycl.h | 9 +++++---- 3 files changed, 22 insertions(+), 12 deletions(-) diff --git a/examples/sycl/ls-sycl-device.cpp b/examples/sycl/ls-sycl-device.cpp index 42847154a..52442e4ca 100644 --- a/examples/sycl/ls-sycl-device.cpp +++ b/examples/sycl/ls-sycl-device.cpp @@ -1,7 +1,9 @@ -/*MIT license - Copyright (C) 2024 Intel Corporation - SPDX-License-Identifier: MIT -*/ +// +// MIT license +// Copyright (C) 2024 Intel Corporation +// SPDX-License-Identifier: MIT +// + #include "ggml-sycl.h" diff --git a/ggml-sycl.cpp b/ggml-sycl.cpp index 3fc346975..1cc55ef52 100644 --- a/ggml-sycl.cpp +++ b/ggml-sycl.cpp @@ -1,7 +1,14 @@ -/*MIT license - Copyright (C) 2024 Intel Corporation - SPDX-License-Identifier: MIT -*/ +// +// MIT license +// Copyright (C) 2024 Intel Corporation +// SPDX-License-Identifier: MIT +// + +// +// Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions. +// See https://llvm.org/LICENSE.txt for license information. +// SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception +// #include #include diff --git a/ggml-sycl.h b/ggml-sycl.h index 0eabb53cc..ba0c61473 100644 --- a/ggml-sycl.h +++ b/ggml-sycl.h @@ -1,7 +1,8 @@ -/*MIT license - Copyright (C) 2024 Intel Corporation - SPDX-License-Identifier: MIT -*/ +// +// MIT license +// Copyright (C) 2024 Intel Corporation +// SPDX-License-Identifier: MIT +// #pragma once From 15606309a05ccf7fadbaad5538cb7c32acb1e06b Mon Sep 17 00:00:00 2001 From: JidongZhang-THU <1119708529@qq.com> Date: Wed, 31 Jan 2024 21:10:15 +0800 Subject: [PATCH 478/811] llava : add MobileVLM support (#5132) * New Feature: 1. Sum_Rows: fix cuda kernel overflow fix block shape error when nrows too big 2. Im2Col: Support Batch in cuda Support f32 to f32 both in cpu && cuda 3. DepthWiseConv: Support by Im2Col && MulMat 4. Pool_2d: Supoort avg pooling in cuda 5. HardSigmoid: Imp in cuda 6. HardSwish: Imp in cuda * fix tabs instead of spaces * code clean * CUDA POOL2D * ADD POOL2D test case in test-backend-ops.cpp * code clean * fix pool2d_kernel nits * fix bug in pool2d kernel * fix avg pooling, count_include_pad nits * test-backend-ops : add more pool_2d tests * cuda : fix warnings and formatting * ggml : check types in release builds too in pool_2d * test-backend-ops : remove f16 pool_2d tests * cuda : more style fixes * Add assert in ggml_cuda_op_pool2d * pool2d float padding fallback * test-backend-ops : add dst_type to im2col --------- Co-authored-by: slaren --- examples/llava/MobileVLM-README.md | 58 +++++++- ggml-cuda.cu | 209 ++++++++++++++++++++++++++--- ggml.c | 118 +++++++++++++--- ggml.h | 3 +- tests/test-backend-ops.cpp | 74 +++++++++- 5 files changed, 421 insertions(+), 41 deletions(-) diff --git a/examples/llava/MobileVLM-README.md b/examples/llava/MobileVLM-README.md index c6258eba6..9eba791da 100644 --- a/examples/llava/MobileVLM-README.md +++ b/examples/llava/MobileVLM-README.md @@ -111,17 +111,71 @@ llama_print_timings: eval time = 1279.03 ms / 18 runs ( 71.06 m llama_print_timings: total time = 34570.79 ms ``` +## Orin compile and run +### compile +```sh +make LLAMA_CUBLAS=1 CUDA_DOCKER_ARCH=sm_87 LLAMA_CUDA_F16=1 -j 32 +``` + +### run on Orin +### case 1 +**input** +```sh +./llava-cli \ + -m /data/local/tmp/ggml-model-q4_k.gguf \ + --mmproj /data/local/tmp/mmproj-model-f16.gguf \ + --image /data/local/tmp/demo.jpeg \ + -p "A chat between a curious user and an artificial intelligence assistant. The assistant gives helpful, detailed, and polite answers to the user's questions. USER: \nWho is the author of this book? \nAnswer the question using a single word or phrase. ASSISTANT:" \ + --n-gpu-layers 999 +``` +**output** +```sh + +encode_image_with_clip: image encoded in 296.62 ms by CLIP ( 2.06 ms per image patch) + + Susan Wise Bauer + +llama_print_timings: load time = 1067.64 ms +llama_print_timings: sample time = 1.53 ms / 6 runs ( 0.25 ms per token, 3934.43 tokens per second) +llama_print_timings: prompt eval time = 306.84 ms / 246 tokens ( 1.25 ms per token, 801.72 tokens per second) +llama_print_timings: eval time = 91.50 ms / 6 runs ( 15.25 ms per token, 65.58 tokens per second) +llama_print_timings: total time = 1352.63 ms / 252 tokens +``` + +### case 2 +**input** +```sh +./llava-cli \ + -m /data/local/tmp/ggml-model-q4_k.gguf \ + --mmproj /data/local/tmp/mmproj-model-f16.gguf \ + -p "A chat between a curious user and an artificial intelligence assistant. The assistant gives helpful, detailed, and polite answers to the user's questions. USER: \nWhat is in the image? ASSISTANT:" \ + --n-gpu-layers 999 + +``` +**output** +```sh +encode_image_with_clip: image encoded in 302.15 ms by CLIP ( 2.10 ms per image patch) + + The image features a cat lying in the grass. + +llama_print_timings: load time = 1057.07 ms +llama_print_timings: sample time = 3.27 ms / 11 runs ( 0.30 ms per token, 3360.83 tokens per second) +llama_print_timings: prompt eval time = 213.60 ms / 232 tokens ( 0.92 ms per token, 1086.14 tokens per second) +llama_print_timings: eval time = 166.65 ms / 11 runs ( 15.15 ms per token, 66.01 tokens per second) +llama_print_timings: total time = 1365.47 ms / 243 tokens +``` + ## Minor shortcomings The `n_patch` of output in `ldp` is 1/4 of the input. In order to implement quickly, we uniformly modified `clip_n_patches` function to a quarter. when counting the time consumption, the calculated time will be 4 times bigger than the real cost. ## TODO -- [ ] Support non-CPU backend for the new operators, such as `depthwise`, `hardswish`, `hardsigmoid` +- [x] Support non-CPU backend for the new operators, such as `depthwise`, `hardswish`, `hardsigmoid` - [ ] Optimize LDP projector performance - Optimize the structure definition to avoid unnecessary memory rearrangements, to reduce the use of `ggml_permute_cpy`; - Optimize operator implementation (ARM CPU/NVIDIA GPU): such as depthwise conv, hardswish, hardsigmoid, etc. -- [ ] run MobileVLM on `Jetson Orin` +- [x] run MobileVLM on `Jetson Orin` - [ ] Support more model variants, such as `MobileVLM-3B`. diff --git a/ggml-cuda.cu b/ggml-cuda.cu index 949bc8a1c..e56595742 100644 --- a/ggml-cuda.cu +++ b/ggml-cuda.cu @@ -524,6 +524,8 @@ static_assert(sizeof(block_iq3_xxs) == sizeof(ggml_fp16_t) + 3*(QK_K/8), "wrong #define CUDA_SILU_BLOCK_SIZE 256 #define CUDA_TANH_BLOCK_SIZE 256 #define CUDA_RELU_BLOCK_SIZE 256 +#define CUDA_HARDSIGMOID_BLOCK_SIZE 256 +#define CUDA_HARDSWISH_BLOCK_SIZE 256 #define CUDA_SQR_BLOCK_SIZE 256 #define CUDA_CPY_BLOCK_SIZE 32 #define CUDA_SCALE_BLOCK_SIZE 256 @@ -540,6 +542,7 @@ static_assert(sizeof(block_iq3_xxs) == sizeof(ggml_fp16_t) + 3*(QK_K/8), "wrong #define CUDA_PAD_BLOCK_SIZE 256 #define CUDA_ACC_BLOCK_SIZE 256 #define CUDA_IM2COL_BLOCK_SIZE 256 +#define CUDA_POOL2D_BLOCK_SIZE 256 #define CUDA_Q8_0_NE_ALIGN 2048 @@ -823,6 +826,24 @@ static __global__ void relu_f32(const float * x, float * dst, const int k) { dst[i] = fmaxf(x[i], 0); } +static __global__ void hardsigmoid_f32(const float * x, float * dst, const int k) { + const int i = blockDim.x*blockIdx.x + threadIdx.x; + + if (i >= k) { + return; + } + dst[i] = fminf(1.0f, fmaxf(0.0f, (x[i] + 3.0f) / 6.0f)); +} + +static __global__ void hardswish_f32(const float * x, float * dst, const int k) { + const int i = blockDim.x*blockIdx.x + threadIdx.x; + + if (i >= k) { + return; + } + dst[i] = x[i] * fminf(1.0f, fmaxf(0.0f, (x[i] + 3.0f) / 6.0f)); +} + static __global__ void leaky_relu_f32(const float * x, float * dst, const int k, const float negative_slope) { const int i = blockDim.x*blockIdx.x + threadIdx.x; if (i >= k) { @@ -5823,7 +5844,7 @@ static __global__ void alibi_f32(const float * x, float * dst, const int ncols, } static __global__ void k_sum_rows_f32(const float * x, float * dst, const int ncols) { - const int row = blockIdx.y; + const int row = blockIdx.x; const int col = threadIdx.x; float sum = 0.0f; @@ -6145,9 +6166,10 @@ static __global__ void clamp_f32(const float * x, float * dst, const float min, dst[i] = x[i] < min ? min : (x[i] > max ? max : x[i]); } -static __global__ void im2col_f32_f16( - const float * x, half * dst, - int offset_delta, int IW, int IH, int OW, int KW, int KH, int pelements, int CHW, +template +static __global__ void im2col_kernel( + const float * x, T * dst, int batch_offset, + int offset_delta, int IC, int IW, int IH, int OH, int OW, int KW, int KH, int pelements, int CHW, int s0, int s1, int p0, int p1, int d0, int d1) { const int i = threadIdx.x + blockIdx.x * blockDim.x; if (i >= pelements) { @@ -6160,21 +6182,73 @@ static __global__ void im2col_f32_f16( const int ky = (i - kd) / OW; const int ix = i % OW; + const int oh = blockIdx.y; + const int batch = blockIdx.z / IC; + const int ic = blockIdx.z % IC; + const int64_t iiw = ix * s0 + kx * d0 - p0; - const int64_t iih = blockIdx.y * s1 + ky * d1 - p1; + const int64_t iih = oh * s1 + ky * d1 - p1; const int64_t offset_dst = - (blockIdx.y * OW + ix) * CHW + - (blockIdx.z * (KW * KH) + ky * KW + kx); + ((batch * OH + oh) * OW + ix) * CHW + + (ic * (KW * KH) + ky * KW + kx); if (iih < 0 || iih >= IH || iiw < 0 || iiw >= IW) { - dst[offset_dst] = __float2half(0.0f); + dst[offset_dst] = 0.0f; } else { - const int64_t offset_src = blockIdx.z * offset_delta; - dst[offset_dst] = __float2half(x[offset_src + iih * IW + iiw]); + const int64_t offset_src = ic * offset_delta + batch * batch_offset; + dst[offset_dst] = x[offset_src + iih * IW + iiw]; } } +template +static __global__ void pool2d_nchw_kernel( + const int ih, const int iw, const int oh, const int ow, + const int kh, const int kw, const int sh, const int sw, + const int ph, const int pw, const int parallel_elements, + const Ti* src, To* dst, const enum ggml_op_pool op) { + int idx = threadIdx.x + blockIdx.x * blockDim.x; + if (idx >= parallel_elements) { + return; + } + + const int I_HW = ih * iw; + const int O_HW = oh * ow; + const int nc = idx / O_HW; + const int cur_oh = idx % O_HW / ow; + const int cur_ow = idx % O_HW % ow; + const Ti* i_ptr = src + nc * I_HW; + To* o_ptr = dst + nc * O_HW; + const int start_h = cur_oh * sh - ph; + const int bh = max(0, start_h); + const int eh = min(ih, start_h + kh); + const int start_w = cur_ow * sw - pw; + const int bw = max(0, start_w); + const int ew = min(iw, start_w + kw); + const To scale = 1. / (kh * kw); + To res = 0; + + switch (op) { + case GGML_OP_POOL_AVG: res = 0; break; + case GGML_OP_POOL_MAX: res = -FLT_MAX; break; + } + + for (int i = bh; i < eh; i += 1) { + for (int j = bw; j < ew; j += 1) { + #if __CUDA_ARCH__ >= 350 + Ti cur = __ldg(i_ptr + i * iw + j); + #else + Ti cur = i_ptr[i * iw + j]; + #endif + switch (op) { + case GGML_OP_POOL_AVG: res += cur * scale; break; + case GGML_OP_POOL_MAX: res = max(res, (To)cur); break; + } + } + } + o_ptr[cur_oh * ow + cur_ow] = res; +} + template static void get_rows_cuda(const ggml_tensor * src0, const ggml_tensor * src1, ggml_tensor * dst, const void * src0_dd, const int32_t * src1_dd, float * dst_dd, cudaStream_t stream) { @@ -6388,6 +6462,16 @@ static void relu_f32_cuda(const float * x, float * dst, const int k, cudaStream_ relu_f32<<>>(x, dst, k); } +static void hardsigmoid_f32_cuda(const float * x, float * dst, const int k, cudaStream_t stream) { + const int num_blocks = (k + CUDA_HARDSIGMOID_BLOCK_SIZE - 1) / CUDA_HARDSIGMOID_BLOCK_SIZE; + hardsigmoid_f32<<>>(x, dst, k); +} + +static void hardswish_f32_cuda(const float * x, float * dst, const int k, cudaStream_t stream) { + const int num_blocks = (k + CUDA_HARDSWISH_BLOCK_SIZE - 1) / CUDA_HARDSWISH_BLOCK_SIZE; + hardswish_f32<<>>(x, dst, k); +} + static void leaky_relu_f32_cuda(const float * x, float * dst, const int k, const float negative_slope, cudaStream_t stream) { const int num_blocks = (k + CUDA_RELU_BLOCK_SIZE - 1) / CUDA_RELU_BLOCK_SIZE; leaky_relu_f32<<>>(x, dst, k, negative_slope); @@ -7475,7 +7559,7 @@ static void alibi_f32_cuda(const float * x, float * dst, const int ncols, const static void sum_rows_f32_cuda(const float * x, float * dst, const int ncols, const int nrows, cudaStream_t stream) { const dim3 block_dims(WARP_SIZE, 1, 1); - const dim3 block_nums(1, nrows, 1); + const dim3 block_nums(nrows, 1, 1); k_sum_rows_f32<<>>(x, dst, ncols); } @@ -7587,14 +7671,15 @@ static void soft_max_f32_cuda(const float * x, const float * y, float * dst, con } } -static void im2col_f32_f16_cuda(const float* x, half* dst, +template +static void im2col_cuda(const float* x, T* dst, int IW, int IH, int OW, int OH, int KW, int KH, int IC, - int offset_delta, + int batch, int batch_offset, int offset_delta, int s0,int s1,int p0,int p1,int d0,int d1, cudaStream_t stream) { const int parallel_elements = OW * KW * KH; const int num_blocks = (parallel_elements + CUDA_IM2COL_BLOCK_SIZE - 1) / CUDA_IM2COL_BLOCK_SIZE; - dim3 block_nums(num_blocks, OH, IC); - im2col_f32_f16<<>>(x, dst, offset_delta, IW, IH, OW, KW, KH, parallel_elements, (IC * KH * KW), s0, s1, p0, p1, d0, d1); + dim3 block_nums(num_blocks, OH, batch * IC); + im2col_kernel<<>>(x, dst, batch_offset, offset_delta, IC, IW, IH, OH, OW, KW, KH, parallel_elements, (IC * KH * KW), s0, s1, p0, p1, d0, d1); } // buffer pool for cuda @@ -8179,6 +8264,34 @@ static void ggml_cuda_op_relu( (void) src1_dd; } +static void ggml_cuda_op_hardsigmoid( + const ggml_tensor * src0, const ggml_tensor * src1, ggml_tensor * dst, + const float * src0_dd, const float * src1_dd, float * dst_dd, cudaStream_t main_stream) { + + GGML_ASSERT(src0->type == GGML_TYPE_F32); + GGML_ASSERT( dst->type == GGML_TYPE_F32); + + hardsigmoid_f32_cuda(src0_dd, dst_dd, ggml_nelements(src0), main_stream); + + (void) src1; + (void) dst; + (void) src1_dd; +} + +static void ggml_cuda_op_hardswish( + const ggml_tensor * src0, const ggml_tensor * src1, ggml_tensor * dst, + const float * src0_dd, const float * src1_dd, float * dst_dd, cudaStream_t main_stream) { + + GGML_ASSERT(src0->type == GGML_TYPE_F32); + GGML_ASSERT( dst->type == GGML_TYPE_F32); + + hardswish_f32_cuda(src0_dd, dst_dd, ggml_nelements(src0), main_stream); + + (void) src1; + (void) dst; + (void) src1_dd; +} + static void ggml_cuda_op_leaky_relu( const ggml_tensor * src0, const ggml_tensor * src1, ggml_tensor * dst, const float * src0_dd, const float * src1_dd, float * dst_dd, cudaStream_t main_stream) { @@ -8810,13 +8923,46 @@ static void ggml_cuda_op_alibi( (void) src1_dd; } +static void ggml_cuda_op_pool2d( + const ggml_tensor * src0, const ggml_tensor * src1, ggml_tensor * dst, + const float * src0_dd, const float * src1_dd, float * dst_dd, cudaStream_t main_stream) { + + GGML_ASSERT(src0->type == GGML_TYPE_F32); + GGML_ASSERT( dst->type == GGML_TYPE_F32); + + const int32_t * opts = (const int32_t *)dst->op_params; + enum ggml_op_pool op = static_cast(opts[0]); + const int k0 = opts[1]; + const int k1 = opts[2]; + const int s0 = opts[3]; + const int s1 = opts[4]; + const int p0 = opts[5]; + const int p1 = opts[6]; + + const int64_t IH = src0->ne[1]; + const int64_t IW = src0->ne[0]; + + const int64_t N = dst->ne[3]; + const int64_t OC = dst->ne[2]; + const int64_t OH = dst->ne[1]; + const int64_t OW = dst->ne[0]; + + const int parallel_elements = N * OC * OH * OW; + const int num_blocks = (parallel_elements + CUDA_POOL2D_BLOCK_SIZE - 1) / CUDA_POOL2D_BLOCK_SIZE; + dim3 block_nums(num_blocks); + pool2d_nchw_kernel<<>>(IH, IW, OH, OW, k1, k0, s1, s0, p1, p0, parallel_elements, src0_dd, dst_dd, op); + + (void) src1; + (void) src1_dd; +} + static void ggml_cuda_op_im2col( const ggml_tensor * src0, const ggml_tensor * src1, ggml_tensor * dst, const float * src0_dd, const float * src1_dd, float * dst_dd, cudaStream_t main_stream) { GGML_ASSERT(src0->type == GGML_TYPE_F16); GGML_ASSERT(src1->type == GGML_TYPE_F32); - GGML_ASSERT( dst->type == GGML_TYPE_F16); + GGML_ASSERT( dst->type == GGML_TYPE_F16 || dst->type == GGML_TYPE_F32); const int32_t s0 = ((const int32_t*)(dst->op_params))[0]; const int32_t s1 = ((const int32_t*)(dst->op_params))[1]; @@ -8838,8 +8984,14 @@ static void ggml_cuda_op_im2col( const int64_t OW = dst->ne[1]; const size_t delta_offset = src1->nb[is_2D ? 2 : 1] / 4; // nb is byte offset, src is type float32 + const int64_t batch = src1->ne[3]; + const size_t batch_offset = src1->nb[3] / 4; // nb is byte offset, src is type float32 - im2col_f32_f16_cuda(src1_dd, (half*) dst_dd, IW, IH, OW, OH, KW, KH, IC, delta_offset, s0, s1, p0, p1, d0, d1, main_stream); + if(dst->type == GGML_TYPE_F16) { + im2col_cuda(src1_dd, (half*) dst_dd, IW, IH, OW, OH, KW, KH, IC, batch, batch_offset, delta_offset, s0, s1, p0, p1, d0, d1, main_stream); + } else { + im2col_cuda(src1_dd, (float*) dst_dd, IW, IH, OW, OH, KW, KH, IC, batch, batch_offset, delta_offset, s0, s1, p0, p1, d0, d1, main_stream); + } (void) src0; (void) src0_dd; @@ -9435,6 +9587,13 @@ static void ggml_cuda_relu(const ggml_tensor * src0, const ggml_tensor * src1, g ggml_cuda_op_flatten(src0, src1, dst, ggml_cuda_op_relu); } +static void ggml_cuda_hardsigmoid(const ggml_tensor * src0, const ggml_tensor * src1, ggml_tensor * dst) { + ggml_cuda_op_flatten(src0, src1, dst, ggml_cuda_op_hardsigmoid); +} + +static void ggml_cuda_hardswish(const ggml_tensor * src0, const ggml_tensor * src1, ggml_tensor * dst) { + ggml_cuda_op_flatten(src0, src1, dst, ggml_cuda_op_hardswish); +} static void ggml_cuda_leaky_relu(const ggml_tensor * src0, const ggml_tensor * src1, ggml_tensor * dst) { ggml_cuda_op_flatten(src0, src1, dst, ggml_cuda_op_leaky_relu); } @@ -10220,6 +10379,10 @@ static void ggml_cuda_alibi(const ggml_tensor * src0, const ggml_tensor * src1, ggml_cuda_op_flatten(src0, src1, dst, ggml_cuda_op_alibi); } +static void ggml_cuda_pool2d(const ggml_tensor * src0, const ggml_tensor * src1, ggml_tensor * dst) { + ggml_cuda_op_flatten(src0, src1, dst, ggml_cuda_op_pool2d); +} + static void ggml_cuda_im2col(const ggml_tensor * src0, const ggml_tensor * src1, ggml_tensor * dst) { ggml_cuda_op_flatten(src0, src1, dst, ggml_cuda_op_im2col); } @@ -10321,6 +10484,12 @@ GGML_CALL bool ggml_cuda_compute_forward(struct ggml_compute_params * params, st case GGML_UNARY_OP_RELU: func = ggml_cuda_relu; break; + case GGML_UNARY_OP_HARDSIGMOID: + func = ggml_cuda_hardsigmoid; + break; + case GGML_UNARY_OP_HARDSWISH: + func = ggml_cuda_hardswish; + break; default: return false; } @@ -10395,6 +10564,9 @@ GGML_CALL bool ggml_cuda_compute_forward(struct ggml_compute_params * params, st case GGML_OP_IM2COL: func = ggml_cuda_im2col; break; + case GGML_OP_POOL_2D: + func = ggml_cuda_pool2d; + break; case GGML_OP_SUM_ROWS: func = ggml_cuda_sum_rows; break; @@ -11123,6 +11295,8 @@ GGML_CALL static bool ggml_backend_cuda_supports_op(ggml_backend_t backend, cons case GGML_UNARY_OP_GELU: case GGML_UNARY_OP_SILU: case GGML_UNARY_OP_RELU: + case GGML_UNARY_OP_HARDSIGMOID: + case GGML_UNARY_OP_HARDSWISH: case GGML_UNARY_OP_GELU_QUICK: case GGML_UNARY_OP_TANH: return true; @@ -11221,6 +11395,7 @@ GGML_CALL static bool ggml_backend_cuda_supports_op(ggml_backend_t backend, cons case GGML_OP_ROPE: case GGML_OP_ALIBI: case GGML_OP_IM2COL: + case GGML_OP_POOL_2D: case GGML_OP_SUM_ROWS: case GGML_OP_ARGSORT: case GGML_OP_ACC: diff --git a/ggml.c b/ggml.c index afd9c6c61..ee994c875 100644 --- a/ggml.c +++ b/ggml.c @@ -5349,7 +5349,7 @@ GGML_API struct ggml_tensor * ggml_conv_1d( int s0, int p0, int d0) { - struct ggml_tensor * im2col = ggml_im2col(ctx, a, b, s0, 0, p0, 0, d0, 0, false); // [N, OL, IC * K] + struct ggml_tensor * im2col = ggml_im2col(ctx, a, b, s0, 0, p0, 0, d0, 0, false, GGML_TYPE_F16); // [N, OL, IC * K] struct ggml_tensor * result = ggml_mul_mat(ctx, @@ -5427,16 +5427,15 @@ struct ggml_tensor * ggml_conv_depthwise_2d( int p1, int d0, int d1) { + struct ggml_tensor * new_a = ggml_reshape_4d(ctx, a, a->ne[0], a->ne[1], 1, a->ne[2] * a->ne[3]); struct ggml_tensor * im2col = ggml_im2col(ctx, new_a, ggml_reshape_4d(ctx, b, b->ne[0], b->ne[1], 1, b->ne[2] * b->ne[3]), - s0, s1, p0, p1, d0, d1, true); // [N * IC, OH, OW, KH * KW] - - struct ggml_tensor * result = - ggml_mul_mat(ctx, - ggml_reshape_4d(ctx, new_a, (new_a->ne[0] * new_a->ne[1]), new_a->ne[2], new_a->ne[3], 1), // [OC,1, KH, KW] => [1, OC, 1, KH * KW] - ggml_reshape_4d(ctx, im2col, im2col->ne[0], im2col->ne[2] * im2col->ne[1], b->ne[2], b->ne[3])); // [N * IC, OH, OW, KH * KW] => [N, IC, OH * OW, KH * KW] + s0, s1, p0, p1, d0, d1, true, GGML_TYPE_F16); // [N * IC, OH, OW, KH * KW] + struct ggml_tensor * new_b = ggml_reshape_4d(ctx, im2col, im2col->ne[0], im2col->ne[2] * im2col->ne[1], b->ne[2], b->ne[3]); // [N * IC, OH, OW, KH * KW] => [N, IC, OH * OW, KH * KW] + new_a = ggml_reshape_4d(ctx, new_a, (new_a->ne[0] * new_a->ne[1]), new_a->ne[2], new_a->ne[3], 1); // [OC,1, KH, KW] => [1, OC, 1, KH * KW] + struct ggml_tensor * result = ggml_mul_mat(ctx, new_a, new_b); result = ggml_reshape_4d(ctx, result, im2col->ne[1], im2col->ne[2], b->ne[2], b->ne[3]); // [N, OC, OH, OW] return result; @@ -5457,7 +5456,8 @@ struct ggml_tensor * ggml_im2col( int p1, int d0, int d1, - bool is_2D) { + bool is_2D, + enum ggml_type dst_type) { if(is_2D) { GGML_ASSERT(a->ne[2] == b->ne[2]); @@ -5481,7 +5481,7 @@ struct ggml_tensor * ggml_im2col( is_2D ? b->ne[3] : 1, }; - struct ggml_tensor * result = ggml_new_tensor(ctx, GGML_TYPE_F16, 4, ne); + struct ggml_tensor * result = ggml_new_tensor(ctx, dst_type, 4, ne); int32_t params[] = { s0, s1, p0, p1, d0, d1, (is_2D ? 1 : 0) }; ggml_set_op_params(result, params, sizeof(params)); @@ -5506,7 +5506,7 @@ struct ggml_tensor * ggml_conv_2d( int p1, int d0, int d1) { - struct ggml_tensor * im2col = ggml_im2col(ctx, a, b, s0, s1, p0, p1, d0, d1, true); // [N, OH, OW, IC * KH * KW] + struct ggml_tensor * im2col = ggml_im2col(ctx, a, b, s0, s1, p0, p1, d0, d1, true, GGML_TYPE_F16); // [N, OH, OW, IC * KH * KW] struct ggml_tensor * result = ggml_mul_mat(ctx, @@ -5632,12 +5632,13 @@ struct ggml_tensor * ggml_pool_2d( is_node = true; } + struct ggml_tensor * result; const int64_t ne[3] = { ggml_calc_pool_output_size(a->ne[0], k0, s0, p0), ggml_calc_pool_output_size(a->ne[1], k1, s1, p1), a->ne[2], }; - struct ggml_tensor * result = ggml_new_tensor(ctx, GGML_TYPE_F32, 3, ne); + result = ggml_new_tensor(ctx, GGML_TYPE_F32, 3, ne); int32_t params[] = { op, k0, k1, s0, s1, p0, p1 }; ggml_set_op_params(result, params, sizeof(params)); @@ -5645,7 +5646,6 @@ struct ggml_tensor * ggml_pool_2d( result->op = GGML_OP_POOL_2D; result->grad = is_node ? ggml_dup_tensor(ctx, result) : NULL; result->src[0] = a; - return result; } @@ -12493,6 +12493,92 @@ static void ggml_compute_forward_conv_transpose_1d( } } +// src0: kernel [OC, IC, KH, KW] +// src1: image [N, IC, IH, IW] +// dst: result [N, OH, OW, IC*KH*KW] +static void ggml_compute_forward_im2col_f32( + const struct ggml_compute_params * params, + const struct ggml_tensor * src0, + const struct ggml_tensor * src1, + struct ggml_tensor * dst) { + GGML_ASSERT(src0->type == GGML_TYPE_F16); + GGML_ASSERT(src1->type == GGML_TYPE_F32); + GGML_ASSERT( dst->type == GGML_TYPE_F32); + + int64_t t0 = ggml_perf_time_us(); + UNUSED(t0); + + GGML_TENSOR_BINARY_OP_LOCALS; + + const int32_t s0 = ((const int32_t *)(dst->op_params))[0]; + const int32_t s1 = ((const int32_t *)(dst->op_params))[1]; + const int32_t p0 = ((const int32_t *)(dst->op_params))[2]; + const int32_t p1 = ((const int32_t *)(dst->op_params))[3]; + const int32_t d0 = ((const int32_t *)(dst->op_params))[4]; + const int32_t d1 = ((const int32_t *)(dst->op_params))[5]; + const bool is_2D = ((const int32_t *)(dst->op_params))[6] == 1; + + const int ith = params->ith; + const int nth = params->nth; + + const int64_t N = is_2D ? ne13 : ne12; + const int64_t IC = is_2D ? ne12 : ne11; + const int64_t IH = is_2D ? ne11 : 1; + const int64_t IW = ne10; + + const int64_t KH = is_2D ? ne01 : 1; + const int64_t KW = ne00; + + const int64_t OH = is_2D ? ne2 : 1; + const int64_t OW = ne1; + + int ofs0 = is_2D ? nb13 : nb12; + int ofs1 = is_2D ? nb12 : nb11; + + GGML_ASSERT(nb00 == sizeof(ggml_fp16_t)); + GGML_ASSERT(nb10 == sizeof(float)); + + if (params->type == GGML_TASK_INIT) { + return; + } + + if (params->type == GGML_TASK_FINALIZE) { + return; + } + + // im2col: [N, IC, IH, IW] => [N, OH, OW, IC*KH*KW] + { + float * const wdata = (float *) dst->data; + + for (int64_t in = 0; in < N; in++) { + for (int64_t ioh = 0; ioh < OH; ioh++) { // 1 + for (int64_t iow = 0; iow < OW; iow++) { + for (int64_t iic = ith; iic < IC; iic += nth) { + + // micro kernel + float * dst_data = wdata + (in*OH*OW + ioh*OW + iow)*(IC*KH*KW); // [IC, KH, KW] + const float * const src_data = (float *)((char *) src1->data + in*ofs0 + iic*ofs1); // [IH, IW] + + for (int64_t ikh = 0; ikh < KH; ikh++) { // 1 + for (int64_t ikw = 0; ikw < KW; ikw++) { + const int64_t iiw = iow*s0 + ikw*d0 - p0; + const int64_t iih = ioh*s1 + ikh*d1 - p1; + + if (iih < 0 || iih >= IH || iiw < 0 || iiw >= IW) { + dst_data[iic*(KH*KW) + ikh*KW + ikw] = 0; + } else { + dst_data[iic*(KH*KW) + ikh*KW + ikw] = (src_data[iih*IW + iiw]); + } + } + } + } + } + } + } + } +} + + // src0: kernel [OC, IC, KH, KW] // src1: image [N, IC, IH, IW] // dst: result [N, OH, OW, IC*KH*KW] @@ -12583,14 +12669,14 @@ static void ggml_compute_forward_im2col( const struct ggml_tensor * src0, const struct ggml_tensor * src1, struct ggml_tensor * dst) { - switch (src0->type) { + switch (dst->type) { case GGML_TYPE_F16: { ggml_compute_forward_im2col_f16(params, src0, src1, dst); } break; case GGML_TYPE_F32: { - GGML_ASSERT(false); + ggml_compute_forward_im2col_f32(params, src0, src1, dst); } break; default: { @@ -12781,8 +12867,8 @@ static void ggml_compute_forward_pool_2d( const struct ggml_compute_params * params, const struct ggml_tensor * src, struct ggml_tensor * dst) { - assert(src->type == GGML_TYPE_F32); - assert(params->ith == 0); + GGML_ASSERT(src->type == GGML_TYPE_F32); + GGML_ASSERT(params->ith == 0); if (params->type == GGML_TASK_INIT || params->type == GGML_TASK_FINALIZE) { return; diff --git a/ggml.h b/ggml.h index afc87b843..e0a4799f3 100644 --- a/ggml.h +++ b/ggml.h @@ -1495,7 +1495,8 @@ extern "C" { int p1, int d0, int d1, - bool is_2D); + bool is_2D, + enum ggml_type dst_type); GGML_API struct ggml_tensor * ggml_conv_depthwise_2d( struct ggml_context * ctx, diff --git a/tests/test-backend-ops.cpp b/tests/test-backend-ops.cpp index 1d29070b6..eb06123d2 100644 --- a/tests/test-backend-ops.cpp +++ b/tests/test-backend-ops.cpp @@ -227,6 +227,14 @@ static std::string var_to_str(ggml_type type) { return ggml_type_name(type); } +static std::string var_to_str(ggml_op_pool pool) { + switch (pool) { + case GGML_OP_POOL_AVG: return "avg"; + case GGML_OP_POOL_MAX: return "max"; + default: return std::to_string(pool); + } +} + #define VARS_TO_STR1(a) VAR_TO_STR(a) #define VARS_TO_STR2(a, b) VAR_TO_STR(a) + "," + VAR_TO_STR(b) #define VARS_TO_STR3(a, b, c) VAR_TO_STR(a) + "," + VARS_TO_STR2(b, c) @@ -238,6 +246,7 @@ static std::string var_to_str(ggml_type type) { #define VARS_TO_STR9(a, b, c, d, e, f, g, h, i) VAR_TO_STR(a) + "," + VARS_TO_STR8(b, c, d, e, f, g, h, i) #define VARS_TO_STR10(a, b, c, d, e, f, g, h, i, j) VAR_TO_STR(a) + "," + VARS_TO_STR9(b, c, d, e, f, g, h, i, j) #define VARS_TO_STR11(a, b, c, d, e, f, g, h, i, j, k) VAR_TO_STR(a) + "," + VARS_TO_STR10(b, c, d, e, f, g, h, i, j, k) +#define VARS_TO_STR12(a, b, c, d, e, f, g, h, i, j, k, l) VAR_TO_STR(a) + "," + VARS_TO_STR11(b, c, d, e, f, g, h, i, j, k, l) #ifdef GGML_USE_SYCL static bool inline _isinf(float f) { @@ -1162,10 +1171,45 @@ struct test_alibi : public test_case { } }; +// GGML_OP_POOL2D +struct test_pool2d : public test_case { + enum ggml_op_pool pool_type; + const ggml_type type_input; + const std::array ne_input; + // kernel size + const int k0; + const int k1; + // stride + const int s0; + const int s1; + // padding + const int p0; + const int p1; + + std::string vars() override { + return VARS_TO_STR9(pool_type, type_input, ne_input, k0, k1, s0, s1, p0, p1); + } + + test_pool2d(ggml_op_pool pool_type = GGML_OP_POOL_AVG, + ggml_type type_input = GGML_TYPE_F32, + std::array ne_input = {10, 10, 3, 1}, // [input_width, input_height, input_channels, 1] + int k0 = 3, int k1 = 3, + int s0 = 1, int s1 = 1, + int p0 = 1, int p1 = 1) + : pool_type(pool_type), type_input(type_input), ne_input(ne_input), k0(k0), k1(k1), s0(s0), s1(s1), p0(p0), p1(p1) {} + + ggml_tensor * build_graph(ggml_context * ctx) override { + ggml_tensor * input = ggml_new_tensor(ctx, type_input, 4, ne_input.data()); + ggml_tensor * out = ggml_pool_2d(ctx, input, pool_type, k0, k1, s0, s1, p0, p1); + return out; + } +}; + // GGML_OP_IM2COL struct test_im2col : public test_case { const ggml_type type_input; const ggml_type type_kernel; + const ggml_type dst_type; const std::array ne_input; const std::array ne_kernel; // stride @@ -1181,22 +1225,22 @@ struct test_im2col : public test_case { const bool is_2D; std::string vars() override { - return VARS_TO_STR11(type_input, type_kernel, ne_input, ne_kernel, s0, s1, p0, p1, d0, d1, is_2D); + return VARS_TO_STR12(type_input, type_kernel, dst_type, ne_input, ne_kernel, s0, s1, p0, p1, d0, d1, is_2D); } - test_im2col(ggml_type type_input = GGML_TYPE_F32, ggml_type type_kernel = GGML_TYPE_F16, + test_im2col(ggml_type type_input = GGML_TYPE_F32, ggml_type type_kernel = GGML_TYPE_F16, ggml_type dst_type = GGML_TYPE_F32, std::array ne_input = {10, 10, 3, 1}, // [input_width, input_height, input_channels, 1] std::array ne_kernel = {3, 3, 3, 1}, // [kernel_width, kernel_height, input_channels, 1] int s0 = 1, int s1 = 1, int p0 = 1, int p1 = 1, int d0 = 1, int d1 = 1, bool is_2D = true) - : type_input(type_input), type_kernel(type_kernel), ne_input(ne_input), ne_kernel(ne_kernel), s0(s0), s1(s1), p0(p0), p1(p1), d0(d0), d1(d1), is_2D(is_2D) {} + : type_input(type_input), type_kernel(type_kernel), dst_type(dst_type), ne_input(ne_input), ne_kernel(ne_kernel), s0(s0), s1(s1), p0(p0), p1(p1), d0(d0), d1(d1), is_2D(is_2D) {} ggml_tensor * build_graph(ggml_context * ctx) override { ggml_tensor * input = ggml_new_tensor(ctx, type_input, 4, ne_input.data()); ggml_tensor * kernel = ggml_new_tensor(ctx, type_kernel, 4, ne_kernel.data()); - ggml_tensor * out = ggml_im2col(ctx, kernel, input, s0, s1, p0, p1, d0, d1, is_2D); + ggml_tensor * out = ggml_im2col(ctx, kernel, input, s0, s1, p0, p1, d0, d1, is_2D, dst_type); return out; } }; @@ -1912,6 +1956,27 @@ static bool test_backend(ggml_backend_t backend, test_mode mode, const char * op } } + for (ggml_type type_input : {GGML_TYPE_F32}) { + for (ggml_op_pool pool_type : {GGML_OP_POOL_AVG, GGML_OP_POOL_MAX}) { + for (int k0 : {1, 3}) { + for (int k1 : {1, 3}) { + for (int s0 : {1, 2}) { + for (int s1 : {1, 2}) { + for (int p0 : {0, 1}) { + for (int p1 : {0, 1}) { + test_cases.emplace_back(new test_pool2d(pool_type, type_input, {10, 10, 3, 1}, k0, k1, s0, s1, p0, p1)); + } + } + } + } + } + } + } + } + + test_cases.emplace_back(new test_im2col(GGML_TYPE_F32, GGML_TYPE_F16, GGML_TYPE_F32)); + test_cases.emplace_back(new test_im2col(GGML_TYPE_F32, GGML_TYPE_F16, GGML_TYPE_F16)); + test_cases.emplace_back(new test_repeat(GGML_TYPE_F32, {10, 10, 10, 10}, {1, 1, 1, 1})); test_cases.emplace_back(new test_repeat(GGML_TYPE_F32, {10, 10, 10, 10}, {2, 1, 1, 1})); test_cases.emplace_back(new test_repeat(GGML_TYPE_F32, {10, 10, 10, 10}, {1, 2, 1, 1})); @@ -2049,7 +2114,6 @@ static bool test_backend(ggml_backend_t backend, test_mode mode, const char * op } test_cases.emplace_back(new test_alibi()); - test_cases.emplace_back(new test_im2col()); test_cases.emplace_back(new test_concat(GGML_TYPE_F32)); test_cases.emplace_back(new test_concat(GGML_TYPE_I32)); From efb7bdbbd061d087c788598b97992c653f992ddd Mon Sep 17 00:00:00 2001 From: Georgi Gerganov Date: Wed, 31 Jan 2024 15:35:41 +0200 Subject: [PATCH 479/811] metal : add im2col F32 dst support (#5132) --- ggml-metal.m | 13 ++++++++++--- ggml-metal.metal | 33 +++++++++++++++++++++++++++++---- 2 files changed, 39 insertions(+), 7 deletions(-) diff --git a/ggml-metal.m b/ggml-metal.m index f87859552..5260ed827 100644 --- a/ggml-metal.m +++ b/ggml-metal.m @@ -135,6 +135,7 @@ enum ggml_metal_kernel_type { GGML_METAL_KERNEL_TYPE_ROPE_F16, GGML_METAL_KERNEL_TYPE_ALIBI_F32, GGML_METAL_KERNEL_TYPE_IM2COL_F16, + GGML_METAL_KERNEL_TYPE_IM2COL_F32, GGML_METAL_KERNEL_TYPE_UPSCALE_F32, GGML_METAL_KERNEL_TYPE_PAD_F32, GGML_METAL_KERNEL_TYPE_ARGSORT_F32_I32_ASC, @@ -506,6 +507,7 @@ static struct ggml_metal_context * ggml_metal_init(int n_cb) { GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_ROPE_F16, rope_f16, true); GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_ALIBI_F32, alibi_f32, true); GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_IM2COL_F16, im2col_f16, true); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_IM2COL_F32, im2col_f32, true); GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_UPSCALE_F32, upscale_f32, true); GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_PAD_F32, pad_f32, true); GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_ARGSORT_F32_I32_ASC, argsort_f32_i32_asc, true); @@ -630,6 +632,10 @@ static bool ggml_metal_supports_op(const struct ggml_metal_context * ctx, const case GGML_OP_ALIBI: case GGML_OP_ROPE: case GGML_OP_IM2COL: + return true; + case GGML_OP_POOL_1D: + case GGML_OP_POOL_2D: + return false; case GGML_OP_UPSCALE: case GGML_OP_PAD: case GGML_OP_ARGSORT: @@ -2015,7 +2021,7 @@ static bool ggml_metal_graph_compute( { GGML_ASSERT(src0->type == GGML_TYPE_F16); GGML_ASSERT(src1->type == GGML_TYPE_F32); - GGML_ASSERT( dst->type == GGML_TYPE_F16); + GGML_ASSERT( dst->type == GGML_TYPE_F16 || dst->type == GGML_TYPE_F32); const int32_t s0 = ((const int32_t *)(dst->op_params))[0]; const int32_t s1 = ((const int32_t *)(dst->op_params))[1]; @@ -2023,6 +2029,7 @@ static bool ggml_metal_graph_compute( const int32_t p1 = ((const int32_t *)(dst->op_params))[3]; const int32_t d0 = ((const int32_t *)(dst->op_params))[4]; const int32_t d1 = ((const int32_t *)(dst->op_params))[5]; + const bool is_2D = ((const int32_t *)(dst->op_params))[6] == 1; const int32_t N = src1->ne[is_2D ? 3 : 2]; @@ -2043,8 +2050,8 @@ static bool ggml_metal_graph_compute( id pipeline = nil; - switch (src0->type) { - case GGML_TYPE_F32: GGML_ASSERT(false && "not implemented"); break; + switch (dst->type) { + case GGML_TYPE_F32: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_IM2COL_F32].pipeline; break; case GGML_TYPE_F16: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_IM2COL_F16].pipeline; break; default: GGML_ASSERT(false); }; diff --git a/ggml-metal.metal b/ggml-metal.metal index 2614d82e8..efed6ad46 100644 --- a/ggml-metal.metal +++ b/ggml-metal.metal @@ -1775,9 +1775,29 @@ kernel void kernel_rope( template [[host_name("kernel_rope_f32")]] kernel rope_t kernel_rope; template [[host_name("kernel_rope_f16")]] kernel rope_t kernel_rope; -kernel void kernel_im2col_f16( +typedef void (im2col_t)( device const float * x, - device half * dst, + device char * dst, + constant int32_t & ofs0, + constant int32_t & ofs1, + constant int32_t & IW, + constant int32_t & IH, + constant int32_t & CHW, + constant int32_t & s0, + constant int32_t & s1, + constant int32_t & p0, + constant int32_t & p1, + constant int32_t & d0, + constant int32_t & d1, + uint3 tgpig[[threadgroup_position_in_grid]], + uint3 tgpg[[threadgroups_per_grid]], + uint3 tpitg[[thread_position_in_threadgroup]], + uint3 ntg[[threads_per_threadgroup]]); + +template +kernel void kernel_im2col( + device const float * x, + device char * dst, constant int32_t & ofs0, constant int32_t & ofs1, constant int32_t & IW, @@ -1800,14 +1820,19 @@ kernel void kernel_im2col_f16( (tpitg[0] * tgpg[1] * tgpg[2] + tgpig[1] * tgpg[2] + tgpig[2]) * CHW + (tgpig[0] * (ntg[1] * ntg[2]) + tpitg[1] * ntg[2] + tpitg[2]); + device T * pdst = (device T *) (dst); + if (iih < 0 || iih >= IH || iiw < 0 || iiw >= IW) { - dst[offset_dst] = 0.0f; + pdst[offset_dst] = 0.0f; } else { const int32_t offset_src = tpitg[0] * ofs0 + tgpig[0] * ofs1; - dst[offset_dst] = x[offset_src + iih * IW + iiw]; + pdst[offset_dst] = x[offset_src + iih * IW + iiw]; } } +template [[host_name("kernel_im2col_f32")]] kernel im2col_t kernel_im2col; +template [[host_name("kernel_im2col_f16")]] kernel im2col_t kernel_im2col; + kernel void kernel_upscale_f32( device const char * src0, device char * dst, From 5cb04dbc16d1da38c8fdcc0111b40e67d00dd1c3 Mon Sep 17 00:00:00 2001 From: Georgi Gerganov Date: Wed, 31 Jan 2024 17:30:17 +0200 Subject: [PATCH 480/811] llama : remove LLAMA_MAX_DEVICES and LLAMA_SUPPORTS_GPU_OFFLOAD (#5240) * llama : remove LLAMA_MAX_DEVICES from llama.h ggml-ci * Update llama.cpp Co-authored-by: slaren * server : remove LLAMA_MAX_DEVICES ggml-ci * llama : remove LLAMA_SUPPORTS_GPU_OFFLOAD ggml-ci * train : remove LLAMA_SUPPORTS_GPU_OFFLOAD * readme : add deprecation notice * readme : change deprecation notice to "remove" and fix url * llama : remove gpu includes from llama.h ggml-ci --------- Co-authored-by: slaren --- README.md | 3 +- common/common.cpp | 56 ++++++++++---------- common/common.h | 66 ++++++++++++------------ common/train.cpp | 12 ++--- examples/batched-bench/batched-bench.cpp | 2 +- examples/llama-bench/llama-bench.cpp | 16 +++--- examples/server/server.cpp | 44 ++++++++-------- llama.cpp | 39 +++++++++++--- llama.h | 29 ++++------- 9 files changed, 143 insertions(+), 124 deletions(-) diff --git a/README.md b/README.md index 7746cb510..e6ed1d429 100644 --- a/README.md +++ b/README.md @@ -10,7 +10,8 @@ Inference of [LLaMA](https://arxiv.org/abs/2302.13971) model in pure C/C++ ### Hot topics -- ⚠️ Incoming backends: https://github.com/ggerganov/llama.cpp/discussions/5138 +- Remove LLAMA_MAX_DEVICES and LLAMA_SUPPORTS_GPU_OFFLOAD: https://github.com/ggerganov/llama.cpp/pull/5240 +- Incoming backends: https://github.com/ggerganov/llama.cpp/discussions/5138 - [SYCL backend](README-sycl.md) is ready (1/28/2024), support Linux/Windows in Intel GPUs (iGPU, Arc/Flex/Max series) - New SOTA quantized models, including pure 2-bits: https://huggingface.co/ikawrakow - Collecting Apple Silicon performance stats: diff --git a/common/common.cpp b/common/common.cpp index 9d976c7c8..ce739b15c 100644 --- a/common/common.cpp +++ b/common/common.cpp @@ -583,20 +583,20 @@ bool gpt_params_parse_ex(int argc, char ** argv, gpt_params & params) { break; } params.n_gpu_layers = std::stoi(argv[i]); -#ifndef LLAMA_SUPPORTS_GPU_OFFLOAD - fprintf(stderr, "warning: not compiled with GPU offload support, --n-gpu-layers option will be ignored\n"); - fprintf(stderr, "warning: see main README.md for information on enabling GPU BLAS support\n"); -#endif + if (!llama_supports_gpu_offload()) { + fprintf(stderr, "warning: not compiled with GPU offload support, --n-gpu-layers option will be ignored\n"); + fprintf(stderr, "warning: see main README.md for information on enabling GPU BLAS support\n"); + } } else if (arg == "--gpu-layers-draft" || arg == "-ngld" || arg == "--n-gpu-layers-draft") { if (++i >= argc) { invalid_param = true; break; } params.n_gpu_layers_draft = std::stoi(argv[i]); -#ifndef LLAMA_SUPPORTS_GPU_OFFLOAD - fprintf(stderr, "warning: not compiled with GPU offload support, --n-gpu-layers-draft option will be ignored\n"); - fprintf(stderr, "warning: see main README.md for information on enabling GPU BLAS support\n"); -#endif + if (!llama_supports_gpu_offload()) { + fprintf(stderr, "warning: not compiled with GPU offload support, --n-gpu-layers-draft option will be ignored\n"); + fprintf(stderr, "warning: see main README.md for information on enabling GPU BLAS support\n"); + } } else if (arg == "--main-gpu" || arg == "-mg") { if (++i >= argc) { invalid_param = true; @@ -637,11 +637,11 @@ bool gpt_params_parse_ex(int argc, char ** argv, gpt_params & params) { const std::regex regex{R"([,/]+)"}; std::sregex_token_iterator it{arg_next.begin(), arg_next.end(), regex, -1}; std::vector split_arg{it, {}}; - if (split_arg.size() >= LLAMA_MAX_DEVICES) { + if (split_arg.size() >= llama_max_devices()) { invalid_param = true; break; } - for (size_t i = 0; i < LLAMA_MAX_DEVICES; ++i) { + for (size_t i = 0; i < llama_max_devices(); ++i) { if (i < split_arg.size()) { params.tensor_split[i] = std::stof(split_arg[i]); } else { @@ -989,30 +989,30 @@ void gpt_print_usage(int /*argc*/, char ** argv, const gpt_params & params) { printf(" -cb, --cont-batching enable continuous batching (a.k.a dynamic batching) (default: disabled)\n"); printf(" --mmproj MMPROJ_FILE path to a multimodal projector file for LLaVA. see examples/llava/README.md\n"); printf(" --image IMAGE_FILE path to an image file. use with multimodal models\n"); - if (llama_mlock_supported()) { + if (llama_supports_mlock()) { printf(" --mlock force system to keep model in RAM rather than swapping or compressing\n"); } - if (llama_mmap_supported()) { + if (llama_supports_mmap()) { printf(" --no-mmap do not memory-map model (slower load but may reduce pageouts if not using mlock)\n"); } printf(" --numa attempt optimizations that help on some NUMA systems\n"); printf(" if run without this previously, it is recommended to drop the system page cache before using this\n"); printf(" see https://github.com/ggerganov/llama.cpp/issues/1437\n"); -#ifdef LLAMA_SUPPORTS_GPU_OFFLOAD - printf(" -ngl N, --n-gpu-layers N\n"); - printf(" number of layers to store in VRAM\n"); - printf(" -ngld N, --n-gpu-layers-draft N\n"); - printf(" number of layers to store in VRAM for the draft model\n"); - printf(" -sm SPLIT_MODE, --split-mode SPLIT_MODE\n"); - printf(" how to split the model across multiple GPUs, one of:\n"); - printf(" - none: use one GPU only\n"); - printf(" - layer (default): split layers and KV across GPUs\n"); - printf(" - row: split rows across GPUs\n"); - printf(" -ts SPLIT, --tensor-split SPLIT\n"); - printf(" fraction of the model to offload to each GPU, comma-separated list of proportions, e.g. 3,1\n"); - printf(" -mg i, --main-gpu i the GPU to use for the model (with split-mode = none),\n"); - printf(" or for intermediate results and KV (with split-mode = row) (default: %d)\n", params.main_gpu); -#endif // LLAMA_SUPPORTS_GPU_OFFLOAD + if (llama_supports_gpu_offload()) { + printf(" -ngl N, --n-gpu-layers N\n"); + printf(" number of layers to store in VRAM\n"); + printf(" -ngld N, --n-gpu-layers-draft N\n"); + printf(" number of layers to store in VRAM for the draft model\n"); + printf(" -sm SPLIT_MODE, --split-mode SPLIT_MODE\n"); + printf(" how to split the model across multiple GPUs, one of:\n"); + printf(" - none: use one GPU only\n"); + printf(" - layer (default): split layers and KV across GPUs\n"); + printf(" - row: split rows across GPUs\n"); + printf(" -ts SPLIT, --tensor-split SPLIT\n"); + printf(" fraction of the model to offload to each GPU, comma-separated list of proportions, e.g. 3,1\n"); + printf(" -mg i, --main-gpu i the GPU to use for the model (with split-mode = none),\n"); + printf(" or for intermediate results and KV (with split-mode = row) (default: %d)\n", params.main_gpu); + } printf(" --verbose-prompt print a verbose prompt before generation (default: %s)\n", params.verbose_prompt ? "true" : "false"); printf(" --no-display-prompt don't print prompt at generation (default: %s)\n", !params.display_prompt ? "true" : "false"); printf(" -gan N, --grp-attn-n N\n"); @@ -1651,7 +1651,7 @@ void dump_non_result_info_yaml(FILE * stream, const gpt_params & params, const l fprintf(stream, "cont_batching: %s # default: false\n", params.cont_batching ? "true" : "false"); fprintf(stream, "temp: %f # default: 0.8\n", sparams.temp); - const std::vector tensor_split_vector(params.tensor_split, params.tensor_split + LLAMA_MAX_DEVICES); + const std::vector tensor_split_vector(params.tensor_split, params.tensor_split + llama_max_devices()); dump_vector_float_yaml(stream, "tensor_split", tensor_split_vector); fprintf(stream, "tfs: %f # default: 1.0\n", sparams.tfs_z); diff --git a/common/common.h b/common/common.h index 214a379b5..24a99d728 100644 --- a/common/common.h +++ b/common/common.h @@ -43,40 +43,40 @@ extern char const *LLAMA_BUILD_TARGET; int32_t get_num_physical_cores(); struct gpt_params { - uint32_t seed = -1; // RNG seed + uint32_t seed = -1; // RNG seed - int32_t n_threads = get_num_physical_cores(); - int32_t n_threads_draft = -1; - int32_t n_threads_batch = -1; // number of threads to use for batch processing (-1 = use n_threads) - int32_t n_threads_batch_draft = -1; - int32_t n_predict = -1; // new tokens to predict - int32_t n_ctx = 512; // context size - int32_t n_batch = 512; // batch size for prompt processing (must be >=32 to use BLAS) - int32_t n_keep = 0; // number of tokens to keep from initial prompt - int32_t n_draft = 8; // number of tokens to draft during speculative decoding - int32_t n_chunks = -1; // max number of chunks to process (-1 = unlimited) - int32_t n_parallel = 1; // number of parallel sequences to decode - int32_t n_sequences = 1; // number of sequences to decode - float p_accept = 0.5f; // speculative decoding accept probability - float p_split = 0.1f; // speculative decoding split probability - int32_t n_gpu_layers = -1; // number of layers to store in VRAM (-1 - use default) - int32_t n_gpu_layers_draft = -1; // number of layers to store in VRAM for the draft model (-1 - use default) - llama_split_mode split_mode = LLAMA_SPLIT_LAYER; // how to split the model across GPUs - int32_t main_gpu = 0; // the GPU that is used for scratch and small tensors - float tensor_split[LLAMA_MAX_DEVICES] = {0}; // how split tensors should be distributed across GPUs - int32_t n_beams = 0; // if non-zero then use beam search of given width. - int32_t grp_attn_n = 1; // group-attention factor - int32_t grp_attn_w = 512; // group-attention width - int32_t n_print = -1; // print token count every n tokens (-1 = disabled) - float rope_freq_base = 0.0f; // RoPE base frequency - float rope_freq_scale = 0.0f; // RoPE frequency scaling factor - float yarn_ext_factor = -1.0f; // YaRN extrapolation mix factor - float yarn_attn_factor = 1.0f; // YaRN magnitude scaling factor - float yarn_beta_fast = 32.0f; // YaRN low correction dim - float yarn_beta_slow = 1.0f; // YaRN high correction dim - int32_t yarn_orig_ctx = 0; // YaRN original context length - int8_t rope_scaling_type = LLAMA_ROPE_SCALING_UNSPECIFIED; // TODO: better to be int32_t for alignment - // pinging @cebtenzzre + int32_t n_threads = get_num_physical_cores(); + int32_t n_threads_draft = -1; + int32_t n_threads_batch = -1; // number of threads to use for batch processing (-1 = use n_threads) + int32_t n_threads_batch_draft = -1; + int32_t n_predict = -1; // new tokens to predict + int32_t n_ctx = 512; // context size + int32_t n_batch = 512; // batch size for prompt processing (must be >=32 to use BLAS) + int32_t n_keep = 0; // number of tokens to keep from initial prompt + int32_t n_draft = 8; // number of tokens to draft during speculative decoding + int32_t n_chunks = -1; // max number of chunks to process (-1 = unlimited) + int32_t n_parallel = 1; // number of parallel sequences to decode + int32_t n_sequences = 1; // number of sequences to decode + float p_accept = 0.5f; // speculative decoding accept probability + float p_split = 0.1f; // speculative decoding split probability + int32_t n_gpu_layers = -1; // number of layers to store in VRAM (-1 - use default) + int32_t n_gpu_layers_draft = -1; // number of layers to store in VRAM for the draft model (-1 - use default) + llama_split_mode split_mode = LLAMA_SPLIT_LAYER; // how to split the model across GPUs + int32_t main_gpu = 0; // the GPU that is used for scratch and small tensors + float tensor_split[128] = {0}; // how split tensors should be distributed across GPUs + int32_t n_beams = 0; // if non-zero then use beam search of given width. + int32_t grp_attn_n = 1; // group-attention factor + int32_t grp_attn_w = 512; // group-attention width + int32_t n_print = -1; // print token count every n tokens (-1 = disabled) + float rope_freq_base = 0.0f; // RoPE base frequency + float rope_freq_scale = 0.0f; // RoPE frequency scaling factor + float yarn_ext_factor = -1.0f; // YaRN extrapolation mix factor + float yarn_attn_factor = 1.0f; // YaRN magnitude scaling factor + float yarn_beta_fast = 32.0f; // YaRN low correction dim + float yarn_beta_slow = 1.0f; // YaRN high correction dim + int32_t yarn_orig_ctx = 0; // YaRN original context length + int8_t rope_scaling_type = LLAMA_ROPE_SCALING_UNSPECIFIED; // TODO: better to be int32_t for alignment + // pinging @cebtenzzre // // sampling parameters struct llama_sampling_params sparams; diff --git a/common/train.cpp b/common/train.cpp index e6f2f7a2f..e4c3d5df6 100644 --- a/common/train.cpp +++ b/common/train.cpp @@ -1363,12 +1363,12 @@ bool consume_common_train_arg( *invalid_param = true; return true; } -#ifdef LLAMA_SUPPORTS_GPU_OFFLOAD - params->n_gpu_layers = std::stoi(argv[i]); -#else - fprintf(stderr, "warning: not compiled with GPU offload support, --n-gpu-layers option will be ignored\n"); - fprintf(stderr, "warning: see main README.md for information on enabling GPU BLAS support\n"); -#endif + if (llama_supports_gpu_offload()) { + params->n_gpu_layers = std::stoi(argv[i]); + } else { + fprintf(stderr, "warning: not compiled with GPU offload support, --n-gpu-layers option will be ignored\n"); + fprintf(stderr, "warning: see main README.md for information on enabling GPU BLAS support\n"); + } } else if (arg == "-h" || arg == "--help") { params->print_usage = true; return true; diff --git a/examples/batched-bench/batched-bench.cpp b/examples/batched-bench/batched-bench.cpp index 7924db267..b52d68457 100644 --- a/examples/batched-bench/batched-bench.cpp +++ b/examples/batched-bench/batched-bench.cpp @@ -88,7 +88,7 @@ int main(int argc, char ** argv) { llama_model_params model_params = llama_model_default_params(); - const std::vector t_split (LLAMA_MAX_DEVICES, 0.0f); + const std::vector t_split(llama_max_devices(), 0.0f); model_params.n_gpu_layers = n_gpu_layers; model_params.tensor_split = t_split.data(); diff --git a/examples/llama-bench/llama-bench.cpp b/examples/llama-bench/llama-bench.cpp index 542cc7bb8..c5a6f744e 100644 --- a/examples/llama-bench/llama-bench.cpp +++ b/examples/llama-bench/llama-bench.cpp @@ -160,7 +160,7 @@ struct cmd_params { std::vector main_gpu; std::vector no_kv_offload; std::vector mul_mat_q; - std::vector> tensor_split; + std::vector> tensor_split; int reps; bool verbose; output_formats output_format; @@ -179,7 +179,7 @@ static const cmd_params cmd_params_defaults = { /* main_gpu */ {0}, /* no_kv_offload */ {false}, /* mul_mat_q */ {true}, - /* tensor_split */ {{}}, + /* tensor_split */ {std::vector(llama_max_devices(), 0.0f)}, /* reps */ 5, /* verbose */ false, /* output_format */ MARKDOWN @@ -380,10 +380,10 @@ static cmd_params parse_cmd_params(int argc, char ** argv) { const std::regex regex{R"([;/]+)"}; std::sregex_token_iterator it{ts.begin(), ts.end(), regex, -1}; std::vector split_arg{it, {}}; - GGML_ASSERT(split_arg.size() <= LLAMA_MAX_DEVICES); + GGML_ASSERT(split_arg.size() <= llama_max_devices()); - std::array tensor_split; - for (size_t i = 0; i < LLAMA_MAX_DEVICES; ++i) { + std::vector tensor_split(llama_max_devices()); + for (size_t i = 0; i < llama_max_devices(); ++i) { if (i < split_arg.size()) { tensor_split[i] = std::stof(split_arg[i]); } else { @@ -459,7 +459,7 @@ struct cmd_params_instance { int main_gpu; bool no_kv_offload; bool mul_mat_q; - std::array tensor_split; + std::vector tensor_split; llama_model_params to_llama_mparams() const { llama_model_params mparams = llama_model_default_params(); @@ -582,7 +582,7 @@ struct test { int main_gpu; bool no_kv_offload; bool mul_mat_q; - std::array tensor_split; + std::vector tensor_split; int n_prompt; int n_gen; std::string test_time; @@ -704,7 +704,7 @@ struct test { std::vector get_values() const { std::string tensor_split_str; int max_nonzero = 0; - for (int i = 0; i < LLAMA_MAX_DEVICES; i++) { + for (size_t i = 0; i < llama_max_devices(); i++) { if (tensor_split[i] > 0) { max_nonzero = i; } diff --git a/examples/server/server.cpp b/examples/server/server.cpp index 21bdce8ed..ea77125ea 100644 --- a/examples/server/server.cpp +++ b/examples/server/server.cpp @@ -1789,28 +1789,28 @@ static void server_print_usage(const char *argv0, const gpt_params ¶ms, printf(" -b N, --batch-size N batch size for prompt processing (default: %d)\n", params.n_batch); printf(" --memory-f32 use f32 instead of f16 for memory key+value (default: disabled)\n"); printf(" not recommended: doubles context memory required and no measurable increase in quality\n"); - if (llama_mlock_supported()) + if (llama_supports_mlock()) { printf(" --mlock force system to keep model in RAM rather than swapping or compressing\n"); } - if (llama_mmap_supported()) + if (llama_supports_mmap()) { printf(" --no-mmap do not memory-map model (slower load but may reduce pageouts if not using mlock)\n"); } printf(" --numa attempt optimizations that help on some NUMA systems\n"); -#ifdef LLAMA_SUPPORTS_GPU_OFFLOAD - printf(" -ngl N, --n-gpu-layers N\n"); - printf(" number of layers to store in VRAM\n"); - printf(" -sm SPLIT_MODE, --split-mode SPLIT_MODE\n"); - printf(" how to split the model across multiple GPUs, one of:\n"); - printf(" - none: use one GPU only\n"); - printf(" - layer (default): split layers and KV across GPUs\n"); - printf(" - row: split rows across GPUs\n"); - printf(" -ts SPLIT --tensor-split SPLIT\n"); - printf(" fraction of the model to offload to each GPU, comma-separated list of proportions, e.g. 3,1\n"); - printf(" -mg i, --main-gpu i the GPU to use for the model (with split-mode = none),\n"); - printf(" or for intermediate results and KV (with split-mode = row)\n"); -#endif + if (llama_supports_gpu_offload()) { + printf(" -ngl N, --n-gpu-layers N\n"); + printf(" number of layers to store in VRAM\n"); + printf(" -sm SPLIT_MODE, --split-mode SPLIT_MODE\n"); + printf(" how to split the model across multiple GPUs, one of:\n"); + printf(" - none: use one GPU only\n"); + printf(" - layer (default): split layers and KV across GPUs\n"); + printf(" - row: split rows across GPUs\n"); + printf(" -ts SPLIT --tensor-split SPLIT\n"); + printf(" fraction of the model to offload to each GPU, comma-separated list of proportions, e.g. 3,1\n"); + printf(" -mg i, --main-gpu i the GPU to use for the model (with split-mode = none),\n"); + printf(" or for intermediate results and KV (with split-mode = row)\n"); + } printf(" -m FNAME, --model FNAME\n"); printf(" model path (default: %s)\n", params.model.c_str()); printf(" -a ALIAS, --alias ALIAS\n"); @@ -2066,13 +2066,13 @@ static void server_params_parse(int argc, char **argv, server_params &sparams, invalid_param = true; break; } -#ifdef LLAMA_SUPPORTS_GPU_OFFLOAD - params.n_gpu_layers = std::stoi(argv[i]); -#else - LOG_WARNING("Not compiled with GPU offload support, --n-gpu-layers option will be ignored. " + if (llama_supports_gpu_offload()) { + params.n_gpu_layers = std::stoi(argv[i]); + } else { + LOG_WARNING("Not compiled with GPU offload support, --n-gpu-layers option will be ignored. " "See main README.md for information on enabling GPU BLAS support", {{"n_gpu_layers", params.n_gpu_layers}}); -#endif + } } else if (arg == "--split-mode" || arg == "-sm") { @@ -2115,9 +2115,9 @@ static void server_params_parse(int argc, char **argv, server_params &sparams, const std::regex regex{R"([,/]+)"}; std::sregex_token_iterator it{arg_next.begin(), arg_next.end(), regex, -1}; std::vector split_arg{it, {}}; - GGML_ASSERT(split_arg.size() <= LLAMA_MAX_DEVICES); + GGML_ASSERT(split_arg.size() <= llama_max_devices()); - for (size_t i_device = 0; i_device < LLAMA_MAX_DEVICES; ++i_device) + for (size_t i_device = 0; i_device < llama_max_devices(); ++i_device) { if (i_device < split_arg.size()) { diff --git a/llama.cpp b/llama.cpp index bb23689fa..9b249ba9c 100644 --- a/llama.cpp +++ b/llama.cpp @@ -10090,18 +10090,45 @@ struct llama_model_quantize_params llama_model_quantize_default_params() { return result; } -int32_t llama_max_devices(void) { - return LLAMA_MAX_DEVICES; +size_t llama_max_devices(void) { +#if defined(GGML_USE_METAL) + return 1; +#elif defined(GGML_USE_CUBLAS) + return GGML_CUDA_MAX_DEVICES; +#elif defined(GGML_USE_SYCL) + return GGML_SYCL_MAX_DEVICES; +#else + return 1; +#endif } -bool llama_mmap_supported(void) { +bool llama_supports_mmap(void) { return llama_mmap::SUPPORTED; } -bool llama_mlock_supported(void) { +bool llama_supports_mlock(void) { return llama_mlock::SUPPORTED; } +bool llama_supports_gpu_offload(void) { +#if defined(GGML_USE_CUBLAS) || defined(GGML_USE_CLBLAST) || defined(GGML_USE_METAL) || defined(GGML_USE_VULKAN) || \ + defined(GGML_USE_SYCL) || defined(GGML_USE_KOMPUTE) + // Defined when llama.cpp is compiled with support for offloading model layers to GPU. + return true; +#else + return false; +#endif +} + +// deprecated: +bool llama_mmap_supported(void) { + return llama_supports_mmap(); +} + +bool llama_mlock_supported(void) { + return llama_supports_mlock(); +} + void llama_backend_init(bool numa) { ggml_time_init(); @@ -10133,8 +10160,8 @@ int64_t llama_time_us(void) { } struct llama_model * llama_load_model_from_file( - const char * path_model, - struct llama_model_params params) { + const char * path_model, + struct llama_model_params params) { ggml_time_init(); llama_model * model = new llama_model; diff --git a/llama.h b/llama.h index 17d43d039..9a60e9bfb 100644 --- a/llama.h +++ b/llama.h @@ -3,15 +3,7 @@ #include "ggml.h" #include "ggml-backend.h" -#ifdef GGML_USE_CUBLAS -#include "ggml-cuda.h" -#define LLAMA_MAX_DEVICES GGML_CUDA_MAX_DEVICES -#elif defined(GGML_USE_SYCL) -#include "ggml-sycl.h" -#define LLAMA_MAX_DEVICES GGML_SYCL_MAX_DEVICES -#else -#define LLAMA_MAX_DEVICES 1 -#endif // GGML_USE_CUBLAS + #include #include #include @@ -49,12 +41,6 @@ #define LLAMA_SESSION_MAGIC LLAMA_FILE_MAGIC_GGSN #define LLAMA_SESSION_VERSION 4 -#if defined(GGML_USE_CUBLAS) || defined(GGML_USE_CLBLAST) || defined(GGML_USE_METAL) || defined(GGML_USE_VULKAN) || \ - defined(GGML_USE_SYCL) || defined(GGML_USE_KOMPUTE) -// Defined when llama.cpp is compiled with support for offloading model layers to GPU. -#define LLAMA_SUPPORTS_GPU_OFFLOAD -#endif - #ifdef __cplusplus extern "C" { #endif @@ -201,7 +187,7 @@ extern "C" { // LLAMA_SPLIT_LAYER: ignored int32_t main_gpu; - // proportion of the model (layers or rows) to offload to each GPU, size: LLAMA_MAX_DEVICES + // proportion of the model (layers or rows) to offload to each GPU, size: llama_max_devices() const float * tensor_split; // Called with a progress value between 0.0 and 1.0. Pass NULL to disable. @@ -338,9 +324,14 @@ extern "C" { LLAMA_API int64_t llama_time_us(void); - LLAMA_API int32_t llama_max_devices(void); - LLAMA_API bool llama_mmap_supported (void); - LLAMA_API bool llama_mlock_supported(void); + LLAMA_API size_t llama_max_devices(void); + + LLAMA_API bool llama_supports_mmap (void); + LLAMA_API bool llama_supports_mlock (void); + LLAMA_API bool llama_supports_gpu_offload(void); + + LLAMA_API DEPRECATED(bool llama_mmap_supported (void), "use llama_supports_mmap() instead"); + LLAMA_API DEPRECATED(bool llama_mlock_supported(void), "use llama_supports_mlock() instead"); LLAMA_API const struct llama_model * llama_get_model(const struct llama_context * ctx); From d3bac7d58408c602ec1f1e423695f1df8410bb03 Mon Sep 17 00:00:00 2001 From: Georgi Gerganov Date: Wed, 31 Jan 2024 18:47:10 +0200 Subject: [PATCH 481/811] llama : reorder build_orion() at correct place (#5118) --- llama.cpp | 239 +++++++++++++++++++++++++++--------------------------- 1 file changed, 119 insertions(+), 120 deletions(-) diff --git a/llama.cpp b/llama.cpp index 9b249ba9c..02b0a485a 100644 --- a/llama.cpp +++ b/llama.cpp @@ -4666,126 +4666,6 @@ struct llm_build_context { ctx0 = nullptr; } } - struct ggml_cgraph * build_orion() { - struct ggml_cgraph * gf = ggml_new_graph_custom(ctx0, LLAMA_MAX_NODES, false); - - const int64_t n_embd_head = hparams.n_embd_head_v; - GGML_ASSERT(n_embd_head == hparams.n_embd_head_k); - GGML_ASSERT(n_embd_head == hparams.n_rot); - - struct ggml_tensor * cur; - struct ggml_tensor * inpL; - - inpL = llm_build_inp_embd(ctx0, hparams, batch, model.tok_embd, lctx.inp_tokens, lctx.inp_embd, cb); - cb(inpL, "inp_embd", -1); - - // inp_pos - contains the positions - struct ggml_tensor * inp_pos = ggml_view_1d(ctx0, lctx.inp_pos, n_tokens, 0); - cb(inp_pos, "inp_pos", -1); - - // KQ_mask (mask for 1 head, it will be broadcasted to all heads) - struct ggml_tensor * KQ_mask = ggml_view_2d(ctx0, lctx.inp_KQ_mask, n_kv, n_tokens, n_kv*ggml_type_size(lctx.inp_KQ_mask->type), 0); - cb(KQ_mask, "KQ_mask", -1); - - // shift the entire K-cache if needed - if (do_rope_shift) { - llm_build_k_shift(ctx0, hparams, cparams, kv_self, gf, lctx.inp_K_shift, LLM_ROPE, n_ctx, freq_base, freq_scale, cb); - } - - for (int il = 0; il < n_layer; ++il) { - struct ggml_tensor * inpSA = inpL; - - // norm - cur = llm_build_norm(ctx0, inpL, hparams, - model.layers[il].attn_norm, model.layers[il].attn_norm_b, - LLM_NORM, cb, il); - cb(cur, "attn_norm", il); - - // self-attention - { - // compute Q and K and RoPE them - struct ggml_tensor * Qcur = ggml_mul_mat(ctx0, model.layers[il].wq, cur); - cb(Qcur, "Qcur", il); - // if (model.layers[il].bq) { - // Qcur = ggml_add(ctx0, Qcur, model.layers[il].bq); - // cb(Qcur, "Qcur", il); - // } - - struct ggml_tensor * Kcur = ggml_mul_mat(ctx0, model.layers[il].wk, cur); - cb(Kcur, "Kcur", il); - // if (model.layers[il].bk) { - // Kcur = ggml_add(ctx0, Kcur, model.layers[il].bk); - // cb(Kcur, "Kcur", il); - // } - - struct ggml_tensor * Vcur = ggml_mul_mat(ctx0, model.layers[il].wv, cur); - cb(Vcur, "Vcur", il); - // if (model.layers[il].bv) { - // Vcur = ggml_add(ctx0, Vcur, model.layers[il].bv); - // cb(Vcur, "Vcur", il); - // } - - Qcur = ggml_rope_custom( - ctx0, ggml_reshape_3d(ctx0, Qcur, n_embd_head, n_head, n_tokens), inp_pos, - hparams.n_rot, 2, 0, n_orig_ctx, freq_base, freq_scale, - ext_factor, attn_factor, beta_fast, beta_slow - ); - cb(Qcur, "Qcur", il); - - Kcur = ggml_rope_custom( - ctx0, ggml_reshape_3d(ctx0, Kcur, n_embd_head, n_head_kv, n_tokens), inp_pos, - hparams.n_rot, 2, 0, n_orig_ctx, freq_base, freq_scale, - ext_factor, attn_factor, beta_fast, beta_slow - ); - cb(Kcur, "Kcur", il); - - cur = llm_build_kv(ctx0, model, hparams, kv_self, gf, - model.layers[il].wo, NULL, - Kcur, Vcur, Qcur, KQ_mask, n_ctx, n_tokens, kv_head, n_kv, -1.0f, 1.0f/sqrtf(float(n_embd_head)), cb, il); - cb(cur, "kqv_out", il); - } - - struct ggml_tensor * ffn_inp = ggml_add(ctx0, cur, inpSA); - cb(ffn_inp, "ffn_inp", il); - - // feed-forward network - cur = llm_build_norm(ctx0, ffn_inp, hparams, - model.layers[il].ffn_norm, model.layers[il].ffn_norm_b, - LLM_NORM, cb, il); - cb(cur, "ffn_norm", il); - - cur = llm_build_ffn(ctx0, cur, - model.layers[il].ffn_up, NULL, - model.layers[il].ffn_gate, NULL, - model.layers[il].ffn_down, NULL, - NULL, - LLM_FFN_SILU, LLM_FFN_PAR, cb, il); - cb(cur, "ffn_out", il); - - cur = ggml_add(ctx0, cur, ffn_inp); - cb(cur, "l_out", il); - - // input for next layer - inpL = cur; - } - - cur = inpL; - - cur = llm_build_norm(ctx0, cur, hparams, - model.output_norm, model.output_norm_b, - LLM_NORM, cb, -1); - cb(cur, "result_norm", -1); - - // lm_head - cur = ggml_mul_mat(ctx0, model.output, cur); - cb(cur, "result_output", -1); - - ggml_build_forward_expand(gf, cur); - - return gf; - } - - struct ggml_cgraph * build_llama() { struct ggml_cgraph * gf = ggml_new_graph_custom(ctx0, LLAMA_MAX_NODES, false); @@ -6589,6 +6469,125 @@ struct llm_build_context { return gf; } + + struct ggml_cgraph * build_orion() { + struct ggml_cgraph * gf = ggml_new_graph_custom(ctx0, LLAMA_MAX_NODES, false); + + const int64_t n_embd_head = hparams.n_embd_head_v; + GGML_ASSERT(n_embd_head == hparams.n_embd_head_k); + GGML_ASSERT(n_embd_head == hparams.n_rot); + + struct ggml_tensor * cur; + struct ggml_tensor * inpL; + + inpL = llm_build_inp_embd(ctx0, hparams, batch, model.tok_embd, lctx.inp_tokens, lctx.inp_embd, cb); + cb(inpL, "inp_embd", -1); + + // inp_pos - contains the positions + struct ggml_tensor * inp_pos = ggml_view_1d(ctx0, lctx.inp_pos, n_tokens, 0); + cb(inp_pos, "inp_pos", -1); + + // KQ_mask (mask for 1 head, it will be broadcasted to all heads) + struct ggml_tensor * KQ_mask = ggml_view_2d(ctx0, lctx.inp_KQ_mask, n_kv, n_tokens, n_kv*ggml_type_size(lctx.inp_KQ_mask->type), 0); + cb(KQ_mask, "KQ_mask", -1); + + // shift the entire K-cache if needed + if (do_rope_shift) { + llm_build_k_shift(ctx0, hparams, cparams, kv_self, gf, lctx.inp_K_shift, LLM_ROPE, n_ctx, freq_base, freq_scale, cb); + } + + for (int il = 0; il < n_layer; ++il) { + struct ggml_tensor * inpSA = inpL; + + // norm + cur = llm_build_norm(ctx0, inpL, hparams, + model.layers[il].attn_norm, model.layers[il].attn_norm_b, + LLM_NORM, cb, il); + cb(cur, "attn_norm", il); + + // self-attention + { + // compute Q and K and RoPE them + struct ggml_tensor * Qcur = ggml_mul_mat(ctx0, model.layers[il].wq, cur); + cb(Qcur, "Qcur", il); + // if (model.layers[il].bq) { + // Qcur = ggml_add(ctx0, Qcur, model.layers[il].bq); + // cb(Qcur, "Qcur", il); + // } + + struct ggml_tensor * Kcur = ggml_mul_mat(ctx0, model.layers[il].wk, cur); + cb(Kcur, "Kcur", il); + // if (model.layers[il].bk) { + // Kcur = ggml_add(ctx0, Kcur, model.layers[il].bk); + // cb(Kcur, "Kcur", il); + // } + + struct ggml_tensor * Vcur = ggml_mul_mat(ctx0, model.layers[il].wv, cur); + cb(Vcur, "Vcur", il); + // if (model.layers[il].bv) { + // Vcur = ggml_add(ctx0, Vcur, model.layers[il].bv); + // cb(Vcur, "Vcur", il); + // } + + Qcur = ggml_rope_custom( + ctx0, ggml_reshape_3d(ctx0, Qcur, n_embd_head, n_head, n_tokens), inp_pos, + hparams.n_rot, 2, 0, n_orig_ctx, freq_base, freq_scale, + ext_factor, attn_factor, beta_fast, beta_slow + ); + cb(Qcur, "Qcur", il); + + Kcur = ggml_rope_custom( + ctx0, ggml_reshape_3d(ctx0, Kcur, n_embd_head, n_head_kv, n_tokens), inp_pos, + hparams.n_rot, 2, 0, n_orig_ctx, freq_base, freq_scale, + ext_factor, attn_factor, beta_fast, beta_slow + ); + cb(Kcur, "Kcur", il); + + cur = llm_build_kv(ctx0, model, hparams, kv_self, gf, + model.layers[il].wo, NULL, + Kcur, Vcur, Qcur, KQ_mask, n_ctx, n_tokens, kv_head, n_kv, -1.0f, 1.0f/sqrtf(float(n_embd_head)), cb, il); + cb(cur, "kqv_out", il); + } + + struct ggml_tensor * ffn_inp = ggml_add(ctx0, cur, inpSA); + cb(ffn_inp, "ffn_inp", il); + + // feed-forward network + cur = llm_build_norm(ctx0, ffn_inp, hparams, + model.layers[il].ffn_norm, model.layers[il].ffn_norm_b, + LLM_NORM, cb, il); + cb(cur, "ffn_norm", il); + + cur = llm_build_ffn(ctx0, cur, + model.layers[il].ffn_up, NULL, + model.layers[il].ffn_gate, NULL, + model.layers[il].ffn_down, NULL, + NULL, + LLM_FFN_SILU, LLM_FFN_PAR, cb, il); + cb(cur, "ffn_out", il); + + cur = ggml_add(ctx0, cur, ffn_inp); + cb(cur, "l_out", il); + + // input for next layer + inpL = cur; + } + + cur = inpL; + + cur = llm_build_norm(ctx0, cur, hparams, + model.output_norm, model.output_norm_b, + LLM_NORM, cb, -1); + cb(cur, "result_norm", -1); + + // lm_head + cur = ggml_mul_mat(ctx0, model.output, cur); + cb(cur, "result_output", -1); + + ggml_build_forward_expand(gf, cur); + + return gf; + } }; static struct ggml_cgraph * llama_build_graph( From 1cfb5372cf5707c8ec6dde7c874f4a44a6c4c915 Mon Sep 17 00:00:00 2001 From: Eve <139727413+netrunnereve@users.noreply.github.com> Date: Wed, 31 Jan 2024 19:21:55 +0000 Subject: [PATCH 482/811] Fix broken Vulkan Cmake (properly) (#5230) * build vulkan as object * vulkan ci --- .github/workflows/build.yml | 6 ++++-- CMakeLists.txt | 8 ++------ 2 files changed, 6 insertions(+), 8 deletions(-) diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index c6db1666e..f4c374ce5 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -356,6 +356,8 @@ jobs: defines: '-DLLAMA_NATIVE=OFF -DLLAMA_BUILD_SERVER=ON -DLLAMA_BLAS=ON -DBUILD_SHARED_LIBS=ON -DLLAMA_BLAS_VENDOR=OpenBLAS -DBLAS_INCLUDE_DIRS="$env:RUNNER_TEMP/openblas/include" -DBLAS_LIBRARIES="$env:RUNNER_TEMP/openblas/lib/openblas.lib"' - build: 'kompute' defines: '-DLLAMA_NATIVE=OFF -DLLAMA_BUILD_SERVER=ON -DLLAMA_KOMPUTE=ON -DKOMPUTE_OPT_DISABLE_VULKAN_VERSION_CHECK=ON -DBUILD_SHARED_LIBS=ON' + - build: 'vulkan' + defines: '-DLLAMA_NATIVE=OFF -DLLAMA_BUILD_SERVER=ON -DLLAMA_VULKAN=ON -DBUILD_SHARED_LIBS=ON' steps: - name: Clone @@ -406,7 +408,7 @@ jobs: - name: Install Vulkan SDK id: get_vulkan - if: ${{ matrix.build == 'kompute' }} + if: ${{ matrix.build == 'kompute' || matrix.build == 'vulkan' }} run: | curl.exe -o $env:RUNNER_TEMP/VulkanSDK-Installer.exe -L "https://sdk.lunarg.com/sdk/download/${env:VULKAN_VERSION}/windows/VulkanSDK-${env:VULKAN_VERSION}-Installer.exe" & "$env:RUNNER_TEMP\VulkanSDK-Installer.exe" --accept-licenses --default-answer --confirm-command install @@ -451,7 +453,7 @@ jobs: - name: Test id: cmake_test # not all machines have native AVX-512 - if: ${{ matrix.build != 'clblast' && matrix.build != 'kompute' && (matrix.build != 'avx512' || env.HAS_AVX512F == '1') }} + if: ${{ matrix.build != 'clblast' && matrix.build != 'kompute' && matrix.build != 'vulkan' && (matrix.build != 'avx512' || env.HAS_AVX512F == '1') }} run: | cd build ctest -L main -C Release --verbose --timeout 900 diff --git a/CMakeLists.txt b/CMakeLists.txt index 15a1101aa..1ee455b3a 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -423,10 +423,7 @@ if (LLAMA_VULKAN) if (Vulkan_FOUND) message(STATUS "Vulkan found") - set(GGML_HEADERS_VULKAN ggml-vulkan.h) - set(GGML_SOURCES_VULKAN ggml-vulkan.cpp) - - add_library(ggml-vulkan STATIC ggml-vulkan.cpp ggml-vulkan.h) + add_library(ggml-vulkan OBJECT ggml-vulkan.cpp ggml-vulkan.h) if (BUILD_SHARED_LIBS) set_target_properties(ggml-vulkan PROPERTIES POSITION_INDEPENDENT_CODE ON) endif() @@ -1012,7 +1009,6 @@ add_library(ggml OBJECT ggml-quants.h ${GGML_SOURCES_CUDA} ${GGML_HEADERS_CUDA} ${GGML_SOURCES_OPENCL} ${GGML_HEADERS_OPENCL} - ${GGML_SOURCES_VULKAN} ${GGML_HEADERS_VULKAN} ${GGML_SOURCES_METAL} ${GGML_HEADERS_METAL} ${GGML_SOURCES_MPI} ${GGML_HEADERS_MPI} ${GGML_SOURCES_EXTRA} ${GGML_HEADERS_EXTRA} @@ -1094,7 +1090,7 @@ install(FILES ${CMAKE_CURRENT_BINARY_DIR}/LlamaConfig.cmake DESTINATION ${CMAKE_INSTALL_LIBDIR}/cmake/Llama) set(GGML_PUBLIC_HEADERS "ggml.h" "ggml-alloc.h" "ggml-backend.h" - "${GGML_HEADERS_CUDA}" "${GGML_HEADERS_OPENCL}" "${GGML_HEADERS_VULKAN}" + "${GGML_HEADERS_CUDA}" "${GGML_HEADERS_OPENCL}" "${GGML_HEADERS_METAL}" "${GGML_HEADERS_MPI}" "${GGML_HEADERS_EXTRA}") set_target_properties(ggml PROPERTIES PUBLIC_HEADER "${GGML_PUBLIC_HEADERS}") From ce32060198b7e2d6a13a9b8e1e1369e3c295ae2a Mon Sep 17 00:00:00 2001 From: Guoteng <32697156+SolenoidWGT@users.noreply.github.com> Date: Thu, 1 Feb 2024 17:19:51 +0800 Subject: [PATCH 483/811] llama : support InternLM2 (#5184) * support InternLM2 inference * add add_space_prefix KV pair --- convert-hf-to-gguf.py | 152 ++++++++++++++++++++++++ gguf-py/gguf/constants.py | 18 +++ gguf-py/gguf/gguf_writer.py | 3 + gguf-py/gguf/tensor_mapping.py | 14 ++- llama.cpp | 205 ++++++++++++++++++++++++++++++++- 5 files changed, 387 insertions(+), 5 deletions(-) diff --git a/convert-hf-to-gguf.py b/convert-hf-to-gguf.py index 6ab7f486e..4ebab07b3 100755 --- a/convert-hf-to-gguf.py +++ b/convert-hf-to-gguf.py @@ -203,6 +203,8 @@ class Model: return CodeShellModel if model_architecture == "OrionForCausalLM": return OrionModel + if model_architecture == "InternLM2ForCausalLM": + return InternLM2Model return Model def _is_model_safetensors(self) -> bool: @@ -254,6 +256,8 @@ class Model: return gguf.MODEL_ARCH.CODESHELL if arch == "OrionForCausalLM": return gguf.MODEL_ARCH.ORION + if arch == "InternLM2ForCausalLM": + return gguf.MODEL_ARCH.INTERNLM2 raise NotImplementedError(f'Architecture "{arch}" not supported!') @@ -1344,6 +1348,154 @@ class CodeShellModel(Model): self.gguf_writer.add_tensor("output.weight", data) print(name, f"=> output.weight, shape = {data.shape}, {old_dtype} --> {data.dtype}") + +class InternLM2Model(Model): + def set_vocab(self): + # (TODO): Is there a better way? + # Copy from _set_vocab_sentencepiece, The only difference is that we will treat the character + # \x00 specially and convert it into an emoji character to prevent it from being mistakenly + # recognized as an empty string in C++. + from sentencepiece import SentencePieceProcessor + from sentencepiece import sentencepiece_model_pb2 as model + + tokenizer_path = self.dir_model / 'tokenizer.model' + + tokens: list[bytes] = [] + scores: list[float] = [] + toktypes: list[int] = [] + + if not tokenizer_path.is_file(): + print(f'Error: Missing {tokenizer_path}', file=sys.stderr) + sys.exit(1) + + sentencepiece_model = model.ModelProto() + sentencepiece_model.ParseFromString(open(tokenizer_path, "rb").read()) + add_prefix = sentencepiece_model.normalizer_spec.add_dummy_prefix + + tokenizer = SentencePieceProcessor(str(tokenizer_path)) + vocab_size = self.hparams.get('vocab_size', tokenizer.vocab_size()) + + for token_id in range(vocab_size): + piece = tokenizer.id_to_piece(token_id) + text = piece.encode("utf-8") + score = tokenizer.get_score(token_id) + if text == b"\x00": + # (TODO): fixme + # Hack here and replace the \x00 characters. + print(f"InternLM2 convert token '{text}' to '🐉'!") + text = "🐉" + + toktype = SentencePieceTokenTypes.NORMAL + if tokenizer.is_unknown(token_id): + toktype = SentencePieceTokenTypes.UNKNOWN + elif tokenizer.is_control(token_id): + toktype = SentencePieceTokenTypes.CONTROL + elif tokenizer.is_unused(token_id): + toktype = SentencePieceTokenTypes.UNUSED + elif tokenizer.is_byte(token_id): + toktype = SentencePieceTokenTypes.BYTE + + tokens.append(text) + scores.append(score) + toktypes.append(toktype) + + added_tokens_file = self.dir_model / 'added_tokens.json' + if added_tokens_file.is_file(): + with open(added_tokens_file, "r", encoding="utf-8") as f: + added_tokens_json = json.load(f) + + for key in added_tokens_json: + tokens.append(key.encode("utf-8")) + scores.append(-1000.0) + toktypes.append(SentencePieceTokenTypes.USER_DEFINED) + + self.gguf_writer.add_tokenizer_model("llama") + self.gguf_writer.add_token_list(tokens) + self.gguf_writer.add_token_scores(scores) + self.gguf_writer.add_token_types(toktypes) + self.gguf_writer.add_add_space_prefix(add_prefix) + + special_vocab = gguf.SpecialVocab(self.dir_model, n_vocab=len(tokens)) + special_vocab.add_to_gguf(self.gguf_writer) + + def set_gguf_parameters(self): + self.gguf_writer.add_name("InternLM2") + self.gguf_writer.add_context_length(self.hparams["max_position_embeddings"]) + self.gguf_writer.add_block_count(self.hparams["num_hidden_layers"]) + self.gguf_writer.add_embedding_length(self.hparams["hidden_size"]) + self.gguf_writer.add_feed_forward_length(self.hparams["intermediate_size"]) + self.gguf_writer.add_rope_freq_base(self.hparams["rope_theta"]) + self.gguf_writer.add_head_count(self.hparams["num_attention_heads"]) + self.gguf_writer.add_layer_norm_rms_eps(self.hparams["rms_norm_eps"]) + self.gguf_writer.add_head_count_kv(self.hparams["num_key_value_heads"]) + + def post_write_tensors(self, tensor_map, name, data_torch): + old_dtype = data_torch.dtype + + # convert any unsupported data types to float32 + if data_torch.dtype not in (torch.float16, torch.float32): + data_torch = data_torch.to(torch.float32) + + data = data_torch.squeeze().numpy() + + # map tensor names + new_name = tensor_map.get_name(name, try_suffixes=(".weight", ".bias")) + if new_name is None: + print(f"Can not map tensor {name!r}") + sys.exit() + + n_dims = len(data.shape) + data_dtype = data.dtype + + # if f32 desired, convert any float16 to float32 + if self.ftype == 0 and data_dtype == np.float16: + data = data.astype(np.float32) + + # TODO: Why cant we use these float16 as-is? There should be not reason to store float16 as float32 + if self.ftype == 1 and data_dtype == np.float16 and n_dims == 1: + data = data.astype(np.float32) + + # if f16 desired, convert any float32 2-dim weight tensors to float16 + if self.ftype == 1 and data_dtype == np.float32 and name.endswith(".weight") and n_dims == 2: + data = data.astype(np.float16) + + print(f"{new_name}, n_dims = {n_dims}, {old_dtype} --> {data.dtype}") + self.gguf_writer.add_tensor(new_name, data) + + def write_tensors(self): + from einops import rearrange + + num_heads = self.hparams.get("num_attention_heads") + num_kv_heads = self.hparams.get("num_key_value_heads") + hidden_size = self.hparams.get("hidden_size") + q_per_kv = num_heads // num_kv_heads + head_dim = hidden_size // num_heads + num_groups = num_heads // q_per_kv + + block_count = self.hparams["num_hidden_layers"] + model_kv = dict(self.get_tensors()) + tensor_map = gguf.get_tensor_name_map(self.model_arch, block_count) + qkv_pattern = r"model\.layers\.(\d+)\.attention\.wqkv" + for name, data_torch in model_kv.items(): + # we don't need these + if name.endswith(".rotary_emb.inv_freq"): + continue + + if re.match(qkv_pattern, name): + bid = re.findall(qkv_pattern, name)[0] + qkv = data_torch + qkv = rearrange(qkv.T, " o (g n i) ->o g n i", g=num_groups, n=q_per_kv + 2, i=head_dim) + q, k, v = qkv[..., : q_per_kv, :], qkv[..., q_per_kv: q_per_kv + 1, :], qkv[..., q_per_kv + 1: q_per_kv + 2, :] + q = rearrange(q, " o g n i -> o (g n i)").T + k = rearrange(k, " o g n i -> o (g n i)").T + v = rearrange(v, " o g n i -> o (g n i)").T + self.post_write_tensors(tensor_map, f"model.layers.{bid}.attention.wq.weight", q) + self.post_write_tensors(tensor_map, f"model.layers.{bid}.attention.wk.weight", k) + self.post_write_tensors(tensor_map, f"model.layers.{bid}.attention.wv.weight", v) + else: + self.post_write_tensors(tensor_map, name, data_torch) + + ###### CONVERSION LOGIC ###### diff --git a/gguf-py/gguf/constants.py b/gguf-py/gguf/constants.py index f5c933a41..ed8e26f83 100644 --- a/gguf-py/gguf/constants.py +++ b/gguf-py/gguf/constants.py @@ -72,6 +72,7 @@ class Keys: PAD_ID = "tokenizer.ggml.padding_token_id" ADD_BOS = "tokenizer.ggml.add_bos_token" ADD_EOS = "tokenizer.ggml.add_eos_token" + ADD_PREFIX = "tokenizer.ggml.add_space_prefix" HF_JSON = "tokenizer.huggingface.json" RWKV = "tokenizer.rwkv.world" CHAT_TEMPLATE = "tokenizer.chat_template" @@ -102,6 +103,7 @@ class MODEL_ARCH(IntEnum): PLAMO = auto() CODESHELL = auto() ORION = auto() + INTERNLM2 = auto() class MODEL_TENSOR(IntEnum): @@ -153,6 +155,7 @@ MODEL_ARCH_NAMES: dict[MODEL_ARCH, str] = { MODEL_ARCH.PLAMO: "plamo", MODEL_ARCH.CODESHELL: "codeshell", MODEL_ARCH.ORION: "orion", + MODEL_ARCH.INTERNLM2: "internlm2", } TENSOR_NAMES: dict[MODEL_TENSOR, str] = { @@ -446,6 +449,21 @@ MODEL_TENSORS: dict[MODEL_ARCH, list[MODEL_TENSOR]] = { MODEL_TENSOR.FFN_DOWN, MODEL_TENSOR.FFN_UP, ], + MODEL_ARCH.INTERNLM2: [ + MODEL_TENSOR.TOKEN_EMBD, + MODEL_TENSOR.OUTPUT_NORM, + MODEL_TENSOR.OUTPUT, + MODEL_TENSOR.ATTN_NORM, + MODEL_TENSOR.ATTN_Q, + MODEL_TENSOR.ATTN_K, + MODEL_TENSOR.ATTN_V, + MODEL_TENSOR.ATTN_OUT, + MODEL_TENSOR.ATTN_ROT_EMBD, + MODEL_TENSOR.FFN_NORM, + MODEL_TENSOR.FFN_GATE, + MODEL_TENSOR.FFN_DOWN, + MODEL_TENSOR.FFN_UP, + ], # TODO } diff --git a/gguf-py/gguf/gguf_writer.py b/gguf-py/gguf/gguf_writer.py index d93aaa877..16808196e 100644 --- a/gguf-py/gguf/gguf_writer.py +++ b/gguf-py/gguf/gguf_writer.py @@ -411,6 +411,9 @@ class GGUFWriter: def add_add_eos_token(self, value: bool) -> None: self.add_bool(Keys.Tokenizer.ADD_EOS, value) + def add_add_space_prefix(self, value: bool) -> None: + self.add_bool(Keys.Tokenizer.ADD_PREFIX, value) + def add_chat_template(self, value: str) -> None: self.add_string(Keys.Tokenizer.CHAT_TEMPLATE, value) diff --git a/gguf-py/gguf/tensor_mapping.py b/gguf-py/gguf/tensor_mapping.py index de177af13..4f16d8504 100644 --- a/gguf-py/gguf/tensor_mapping.py +++ b/gguf-py/gguf/tensor_mapping.py @@ -19,6 +19,7 @@ class TensorNameMap: "language_model.embedding.word_embeddings", # persimmon "wte", # gpt2 "transformer.embd.wte", # phi2 + "model.tok_embeddings", # internlm2 ), # Token type embeddings @@ -42,7 +43,7 @@ class TensorNameMap: MODEL_TENSOR.OUTPUT: ( "embed_out", # gptneox "lm_head", # gpt2 mpt falcon llama-hf baichuan qwen - "output", # llama-pth bloom + "output", # llama-pth bloom internlm2 "word_embeddings_for_head", # persimmon "lm_head.linear", # phi2 ), @@ -51,7 +52,7 @@ class TensorNameMap: MODEL_TENSOR.OUTPUT_NORM: ( "gpt_neox.final_layer_norm", # gptneox "transformer.ln_f", # gpt2 gpt-j falcon - "model.norm", # llama-hf baichuan + "model.norm", # llama-hf baichuan internlm2 "norm", # llama-pth "embeddings.LayerNorm", # bert "transformer.norm_f", # mpt @@ -84,6 +85,7 @@ class TensorNameMap: "h.{bid}.ln_1", # gpt2 "transformer.h.{bid}.ln", # phi2 "model.layers.layers.{bid}.norm", # plamo + "model.layers.{bid}.attention_norm", # internlm2 ), # Attention norm 2 @@ -111,6 +113,7 @@ class TensorNameMap: "encoder.layer.{bid}.attention.self.query", # bert "transformer.h.{bid}.attn.q_proj", # gpt-j "model.layers.layers.{bid}.self_attn.q_proj", # plamo + "model.layers.{bid}.attention.wq" # internlm2 ), # Attention key @@ -120,6 +123,7 @@ class TensorNameMap: "encoder.layer.{bid}.attention.self.key", # bert "transformer.h.{bid}.attn.k_proj", # gpt-j "model.layers.layers.{bid}.self_attn.k_proj", # plamo + "model.layers.{bid}.attention.wk" # internlm2 ), # Attention value @@ -129,6 +133,7 @@ class TensorNameMap: "encoder.layer.{bid}.attention.self.value", # bert "transformer.h.{bid}.attn.v_proj", # gpt-j "model.layers.layers.{bid}.self_attn.v_proj", # plamo + "model.layers.{bid}.attention.wv" # internlm2 ), # Attention output @@ -147,6 +152,7 @@ class TensorNameMap: "h.{bid}.attn.c_proj", # gpt2 "transformer.h.{bid}.mixer.out_proj", # phi2 "model.layers.layers.{bid}.self_attn.o_proj", # plamo + "model.layers.{bid}.attention.wo", # internlm2 ), # Rotary embeddings @@ -169,6 +175,7 @@ class TensorNameMap: "language_model.encoder.layers.{bid}.post_attention_layernorm", # persimmon "model.layers.{bid}.ln2", # yi "h.{bid}.ln_2", # gpt2 + "model.layers.{bid}.ffn_norm", # internlm2 ), MODEL_TENSOR.FFN_GATE_INP: ( @@ -194,6 +201,7 @@ class TensorNameMap: "transformer.h.{bid}.mlp.fc1", # phi2 "model.layers.{bid}.mlp.fc1", # phi2 "model.layers.layers.{bid}.mlp.up_proj", # plamo + "model.layers.{bid}.feed_forward.w3", # internlm2 ), MODEL_TENSOR.FFN_UP_EXP: ( @@ -212,6 +220,7 @@ class TensorNameMap: "layers.{bid}.feed_forward.w1", # llama-pth "transformer.h.{bid}.mlp.w2", # qwen "model.layers.layers.{bid}.mlp.gate_proj", # plamo + "model.layers.{bid}.feed_forward.w1", # internlm2 ), MODEL_TENSOR.FFN_GATE_EXP: ( @@ -236,6 +245,7 @@ class TensorNameMap: "transformer.h.{bid}.mlp.fc2", # phi2 "model.layers.{bid}.mlp.fc2", # phi2 "model.layers.layers.{bid}.mlp.down_proj", # plamo + "model.layers.{bid}.feed_forward.w2", # internlm2 ), MODEL_TENSOR.FFN_DOWN_EXP: ( diff --git a/llama.cpp b/llama.cpp index 02b0a485a..e8f44c2cb 100644 --- a/llama.cpp +++ b/llama.cpp @@ -204,6 +204,7 @@ enum llm_arch { LLM_ARCH_PLAMO, LLM_ARCH_CODESHELL, LLM_ARCH_ORION, + LLM_ARCH_INTERNLM2, LLM_ARCH_UNKNOWN, }; @@ -226,6 +227,7 @@ static std::map LLM_ARCH_NAMES = { { LLM_ARCH_PLAMO, "plamo" }, { LLM_ARCH_CODESHELL, "codeshell" }, { LLM_ARCH_ORION, "orion" }, + { LLM_ARCH_INTERNLM2, "internlm2" }, }; enum llm_kv { @@ -278,6 +280,7 @@ enum llm_kv { LLM_KV_TOKENIZER_PAD_ID, LLM_KV_TOKENIZER_ADD_BOS, LLM_KV_TOKENIZER_ADD_EOS, + LLM_KV_TOKENIZER_ADD_PREFIX, LLM_KV_TOKENIZER_HF_JSON, LLM_KV_TOKENIZER_RWKV, }; @@ -332,6 +335,7 @@ static std::map LLM_KV_NAMES = { { LLM_KV_TOKENIZER_PAD_ID, "tokenizer.ggml.padding_token_id" }, { LLM_KV_TOKENIZER_ADD_BOS, "tokenizer.ggml.add_bos_token" }, { LLM_KV_TOKENIZER_ADD_EOS, "tokenizer.ggml.add_eos_token" }, + { LLM_KV_TOKENIZER_ADD_PREFIX, "tokenizer.ggml.add_space_prefix" }, { LLM_KV_TOKENIZER_HF_JSON, "tokenizer.huggingface.json" }, { LLM_KV_TOKENIZER_RWKV, "tokenizer.rwkv.world" }, }; @@ -669,7 +673,23 @@ static std::map> LLM_TENSOR_NAMES = { LLM_TENSOR_FFN_UP, "blk.%d.ffn_up" }, }, }, - + { + LLM_ARCH_INTERNLM2, + { + { LLM_TENSOR_TOKEN_EMBD, "token_embd" }, + { LLM_TENSOR_OUTPUT_NORM, "output_norm" }, + { LLM_TENSOR_OUTPUT, "output" }, + { LLM_TENSOR_ATTN_NORM, "blk.%d.attn_norm" }, + { LLM_TENSOR_ATTN_Q, "blk.%d.attn_q" }, + { LLM_TENSOR_ATTN_K, "blk.%d.attn_k" }, + { LLM_TENSOR_ATTN_V, "blk.%d.attn_v" }, + { LLM_TENSOR_ATTN_OUT, "blk.%d.attn_output" }, + { LLM_TENSOR_FFN_NORM, "blk.%d.ffn_norm" }, + { LLM_TENSOR_FFN_GATE, "blk.%d.ffn_gate" }, + { LLM_TENSOR_FFN_DOWN, "blk.%d.ffn_down" }, + { LLM_TENSOR_FFN_UP, "blk.%d.ffn_up" }, + }, + }, { LLM_ARCH_UNKNOWN, { @@ -1377,6 +1397,7 @@ enum e_model { MODEL_13B, MODEL_14B, MODEL_15B, + MODEL_20B, MODEL_30B, MODEL_34B, MODEL_40B, @@ -1618,6 +1639,8 @@ struct llama_vocab { id special_suffix_id = 32008; id special_eot_id = 32010; + bool add_space_prefix = true; + int find_bpe_rank(const std::string & token_left, const std::string & token_right) const { GGML_ASSERT(token_left.find(' ') == std::string::npos); GGML_ASSERT(token_left.find('\n') == std::string::npos); @@ -2731,6 +2754,7 @@ static const char * llama_model_type_name(e_model type) { case MODEL_13B: return "13B"; case MODEL_14B: return "14B"; case MODEL_15B: return "15B"; + case MODEL_20B: return "20B"; case MODEL_30B: return "30B"; case MODEL_34B: return "34B"; case MODEL_40B: return "40B"; @@ -2743,6 +2767,14 @@ static const char * llama_model_type_name(e_model type) { default: return "?B"; } } +static const char * llama_model_vocab_type_name(enum llama_vocab_type type){ + switch (type) { + case LLAMA_VOCAB_TYPE_SPM: return "SPM"; + case LLAMA_VOCAB_TYPE_BPE: return "BPE"; + default: return "unknown"; + } +} + static void llm_load_arch(llama_model_loader & ml, llama_model & model) { model.arch = ml.get_arch(); @@ -3006,6 +3038,15 @@ static void llm_load_hparams( default: model.type = e_model::MODEL_UNKNOWN; } } break; + case LLM_ARCH_INTERNLM2: + { + ml.get_key(LLM_KV_ATTENTION_LAYERNORM_RMS_EPS, hparams.f_norm_rms_eps); + switch (hparams.n_layer) { + case 32: model.type = e_model::MODEL_7B; break; + case 48: model.type = e_model::MODEL_20B; break; + default: model.type = e_model::MODEL_UNKNOWN; + } + } break; default: (void)0; } @@ -3057,6 +3098,11 @@ static void llm_load_vocab( vocab.special_unk_id = 0; vocab.special_sep_id = -1; vocab.special_pad_id = -1; + + const int add_space_prefix_keyidx = gguf_find_key(ctx, kv(LLM_KV_TOKENIZER_ADD_PREFIX).c_str()); + if (add_space_prefix_keyidx != -1) { + vocab.add_space_prefix = gguf_get_val_bool(ctx, add_space_prefix_keyidx); + } // The default value of add_space_prefix is true. } else if (tokenizer_name == "gpt2") { vocab.type = LLAMA_VOCAB_TYPE_BPE; @@ -3269,7 +3315,7 @@ static void llm_load_print_meta(llama_model_loader & ml, llama_model & model) { // hparams LLAMA_LOG_INFO("%s: format = %s\n", __func__, llama_file_version_name(ml.fver)); LLAMA_LOG_INFO("%s: arch = %s\n", __func__, LLM_ARCH_NAMES.at(model.arch).c_str()); - LLAMA_LOG_INFO("%s: vocab type = %s\n", __func__, vocab.type == LLAMA_VOCAB_TYPE_SPM ? "SPM" : "BPE"); // TODO: fix + LLAMA_LOG_INFO("%s: vocab type = %s\n", __func__, llama_model_vocab_type_name(vocab.type)); LLAMA_LOG_INFO("%s: n_vocab = %u\n", __func__, hparams.n_vocab); LLAMA_LOG_INFO("%s: n_merges = %u\n", __func__, (int) vocab.bpe_ranks.size()); LLAMA_LOG_INFO("%s: n_ctx_train = %u\n", __func__, hparams.n_ctx_train); @@ -4018,8 +4064,35 @@ static bool llm_load_tensors( layer.ffn_up = ml.create_tensor(ctx_split, tn(LLM_TENSOR_FFN_UP, "weight", i), {n_embd, n_ff}); } } break; + case LLM_ARCH_INTERNLM2: + { + model.tok_embd = ml.create_tensor(ctx_input, tn(LLM_TENSOR_TOKEN_EMBD, "weight"), {n_embd, n_vocab}); + // output + { + model.output_norm = ml.create_tensor(ctx_output, tn(LLM_TENSOR_OUTPUT_NORM, "weight"), {n_embd}); + model.output = ml.create_tensor(ctx_output_split, tn(LLM_TENSOR_OUTPUT, "weight"), {n_embd, n_vocab}); + } + for (int i = 0; i < n_layer; ++i) { + ggml_context * ctx_layer = ctx_for_layer(i); + ggml_context * ctx_split = ctx_for_layer_split(i); + + auto & layer = model.layers[i]; + + layer.attn_norm = ml.create_tensor(ctx_layer, tn(LLM_TENSOR_ATTN_NORM, "weight", i), {n_embd}); + // layer.wqkv = ml.create_tensor(ctx_split, tn(LLM_TENSOR_ATTN_QKV, "weight", i), {n_embd, n_embd + 2*n_embd_gqa}); + layer.wq = ml.create_tensor(ctx_split, tn(LLM_TENSOR_ATTN_Q, "weight", i), {n_embd, n_embd}); + layer.wk = ml.create_tensor(ctx_split, tn(LLM_TENSOR_ATTN_K, "weight", i), {n_embd, n_embd_gqa}); + layer.wv = ml.create_tensor(ctx_split, tn(LLM_TENSOR_ATTN_V, "weight", i), {n_embd, n_embd_gqa}); + + layer.wo = ml.create_tensor(ctx_split, tn(LLM_TENSOR_ATTN_OUT, "weight", i), {n_embd, n_embd}); + layer.ffn_norm = ml.create_tensor(ctx_layer, tn(LLM_TENSOR_FFN_NORM, "weight", i), {n_embd}); + layer.ffn_gate = ml.create_tensor(ctx_split, tn(LLM_TENSOR_FFN_GATE, "weight", i), {n_embd, n_ff}); + layer.ffn_down = ml.create_tensor(ctx_split, tn(LLM_TENSOR_FFN_DOWN, "weight", i), { n_ff, n_embd}); + layer.ffn_up = ml.create_tensor(ctx_split, tn(LLM_TENSOR_FFN_UP, "weight", i), {n_embd, n_ff}); + } + } break; default: throw std::runtime_error("unknown architecture"); } @@ -6588,6 +6661,126 @@ struct llm_build_context { return gf; } + + struct ggml_cgraph * build_internlm2() { + struct ggml_cgraph * gf = ggml_new_graph_custom(ctx0, LLAMA_MAX_NODES, false); + + const int64_t n_embd_head = hparams.n_embd_head_v; + GGML_ASSERT(n_embd_head == hparams.n_embd_head_k); + GGML_ASSERT(n_embd_head == hparams.n_rot); + + struct ggml_tensor * cur; + struct ggml_tensor * inpL; + + inpL = llm_build_inp_embd(ctx0, hparams, batch, model.tok_embd, lctx.inp_tokens, lctx.inp_embd, cb); + cb(inpL, "inp_embd", -1); + + // inp_pos - contains the positions + struct ggml_tensor * inp_pos = ggml_view_1d(ctx0, lctx.inp_pos, n_tokens, 0); + cb(inp_pos, "inp_pos", -1); + + // KQ_mask (mask for 1 head, it will be broadcasted to all heads) + struct ggml_tensor * KQ_mask = ggml_view_2d(ctx0, lctx.inp_KQ_mask, n_kv, n_tokens, n_kv*ggml_type_size(lctx.inp_KQ_mask->type), 0); + cb(KQ_mask, "KQ_mask", -1); + + // shift the entire K-cache if needed + if (do_rope_shift) { + llm_build_k_shift(ctx0, hparams, cparams, kv_self, gf, lctx.inp_K_shift, LLM_ROPE, n_ctx, freq_base, freq_scale, cb); + } + + for (int il = 0; il < n_layer; ++il) { + struct ggml_tensor * inpSA = inpL; + + // norm + cur = llm_build_norm(ctx0, inpL, hparams, + model.layers[il].attn_norm, NULL, + LLM_NORM_RMS, cb, il); + cb(cur, "attn_norm", il); + + // self-attention + { + // compute Q and K and RoPE them + struct ggml_tensor * Qcur = ggml_mul_mat(ctx0, model.layers[il].wq, cur); + cb(Qcur, "Qcur", il); + if (model.layers[il].bq) { + Qcur = ggml_add(ctx0, Qcur, model.layers[il].bq); + cb(Qcur, "Qcur", il); + } + + struct ggml_tensor * Kcur = ggml_mul_mat(ctx0, model.layers[il].wk, cur); + cb(Kcur, "Kcur", il); + if (model.layers[il].bk) { + Kcur = ggml_add(ctx0, Kcur, model.layers[il].bk); + cb(Kcur, "Kcur", il); + } + + struct ggml_tensor * Vcur = ggml_mul_mat(ctx0, model.layers[il].wv, cur); + cb(Vcur, "Vcur", il); + if (model.layers[il].bv) { + Vcur = ggml_add(ctx0, Vcur, model.layers[il].bv); + cb(Vcur, "Vcur", il); + } + + Qcur = ggml_rope_custom( + ctx0, ggml_reshape_3d(ctx0, Qcur, n_embd_head, n_head, n_tokens), inp_pos, + hparams.n_rot, 0, 0, n_orig_ctx, freq_base, freq_scale, + ext_factor, attn_factor, beta_fast, beta_slow + ); + cb(Qcur, "Qcur", il); + + Kcur = ggml_rope_custom( + ctx0, ggml_reshape_3d(ctx0, Kcur, n_embd_head, n_head_kv, n_tokens), inp_pos, + hparams.n_rot, 0, 0, n_orig_ctx, freq_base, freq_scale, + ext_factor, attn_factor, beta_fast, beta_slow + ); + cb(Kcur, "Kcur", il); + + cur = llm_build_kv(ctx0, model, hparams, kv_self, gf, + model.layers[il].wo, model.layers[il].bo, + Kcur, Vcur, Qcur, KQ_mask, n_ctx, n_tokens, kv_head, n_kv, -1.0f, 1.0f/sqrtf(float(n_embd_head)), cb, il); + cb(cur, "kqv_out", il); + } + + struct ggml_tensor * ffn_inp = ggml_add(ctx0, cur, inpSA); + cb(ffn_inp, "ffn_inp", il); + + // feed-forward network + cur = llm_build_norm(ctx0, ffn_inp, hparams, + model.layers[il].ffn_norm, NULL, + LLM_NORM_RMS, cb, il); + cb(cur, "ffn_norm", il); + + cur = llm_build_ffn(ctx0, cur, + model.layers[il].ffn_up, NULL, + model.layers[il].ffn_gate, NULL, + model.layers[il].ffn_down, NULL, + NULL, + LLM_FFN_SILU, LLM_FFN_PAR, cb, il); + cb(cur, "ffn_out", il); + + cur = ggml_add(ctx0, cur, ffn_inp); + cb(cur, "l_out", il); + + // input for next layer + inpL = cur; + } + + cur = inpL; + + cur = llm_build_norm(ctx0, cur, hparams, + model.output_norm, NULL, + LLM_NORM_RMS, cb, -1); + cb(cur, "result_norm", -1); + + // lm_head + cur = ggml_mul_mat(ctx0, model.output, cur); + cb(cur, "result_output", -1); + + ggml_build_forward_expand(gf, cur); + + return gf; + } + }; static struct ggml_cgraph * llama_build_graph( @@ -6746,6 +6939,10 @@ static struct ggml_cgraph * llama_build_graph( { result = llm.build_orion(); } break; + case LLM_ARCH_INTERNLM2: + { + result = llm.build_internlm2(); + } break; default: GGML_ASSERT(false); } @@ -7688,7 +7885,9 @@ static std::vector llama_tokenize_internal(const llama_vocab & // auto raw_text = fragment.raw_text.substr(fragment.offset, fragment.length); if (&fragment == &fragment_buffer.front()) { - raw_text = " " + raw_text; // prefix with space if the first token is not special + if (vocab.add_space_prefix) { + raw_text = " " + raw_text; // prefix with space if the first token is not special + } } #ifdef PRETOKENIZERDEBUG From d71ac90985854b0905e1abba778e407e17f9f887 Mon Sep 17 00:00:00 2001 From: Ali Nehzat Date: Fri, 2 Feb 2024 02:18:53 +1100 Subject: [PATCH 484/811] make : generate .a library for static linking (#5205) --- Makefile | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/Makefile b/Makefile index 781f0bf8c..bf9e085de 100644 --- a/Makefile +++ b/Makefile @@ -586,8 +586,11 @@ train.o: common/train.cpp common/train.h libllama.so: llama.o ggml.o $(OBJS) $(CXX) $(CXXFLAGS) -shared -fPIC -o $@ $^ $(LDFLAGS) +libllama.a: llama.o ggml.o $(OBJS) $(COMMON_DEPS) + ar rcs libllama.a llama.o ggml.o $(OBJS) $(COMMON_DEPS) + clean: - rm -vrf *.o tests/*.o *.so *.dll benchmark-matmult common/build-info.cpp *.dot $(COV_TARGETS) $(BUILD_TARGETS) $(TEST_TARGETS) + rm -vrf *.o tests/*.o *.so *.a *.dll benchmark-matmult common/build-info.cpp *.dot $(COV_TARGETS) $(BUILD_TARGETS) $(TEST_TARGETS) # # Examples From 8ca511cadee2c67f0bd8c7034a2513778ee9a1b7 Mon Sep 17 00:00:00 2001 From: slaren Date: Thu, 1 Feb 2024 18:30:17 +0100 Subject: [PATCH 485/811] cuda : fix LLAMA_CUDA_F16 (#5262) --- ggml-cuda.cu | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/ggml-cuda.cu b/ggml-cuda.cu index e56595742..3242a0b4a 100644 --- a/ggml-cuda.cu +++ b/ggml-cuda.cu @@ -8657,9 +8657,9 @@ static void ggml_cuda_op_dequantize_mul_mat_vec( if (src1_convert_f16) { src1_dfloat = src1_dfloat_a.alloc(ne00); - ggml_cpy_f32_f16_cuda((const char *) src1_ddf_i, (char *) src1_dfloat, ne00, - ne00, 1, sizeof(float), 0, 0, - ne00, 1, sizeof(half), 0, 0, stream); + const to_fp16_cuda_t to_fp16_cuda = ggml_get_to_fp16_cuda(src1->type); + GGML_ASSERT(to_fp16_cuda != nullptr); + to_fp16_cuda(src1_ddf_i, src1_dfloat, ne00, stream); } #else const dfloat * src1_dfloat = (const dfloat *) src1_ddf_i; // dfloat == float, no conversion From 4d0924a8902010d31bd737b6f1f594943d120d0f Mon Sep 17 00:00:00 2001 From: 0cc4m Date: Thu, 1 Feb 2024 19:25:24 +0100 Subject: [PATCH 486/811] Vulkan Phi Fix for AMD Proprietary Drivers (#5260) * Replace tanh to avoid NaN in gelu shader on AMD proprietary driver * Fix another Vulkan CPY buffer size bug --- ggml-vulkan-shaders.hpp | 132 +++++++++++++++++++----------------- ggml-vulkan.cpp | 17 +++-- ggml_vk_generate_shaders.py | 3 +- 3 files changed, 83 insertions(+), 69 deletions(-) diff --git a/ggml-vulkan-shaders.hpp b/ggml-vulkan-shaders.hpp index e2e9be22c..195410c02 100644 --- a/ggml-vulkan-shaders.hpp +++ b/ggml-vulkan-shaders.hpp @@ -14670,14 +14670,14 @@ const uint64_t f32_to_f16_fp32_len = 1596; unsigned char gelu_f32_data[] = { 0x03,0x02,0x23,0x07,0x00,0x05,0x01,0x00,0x0b,0x00,0x0d,0x00, -0x45,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x11,0x00,0x02,0x00, +0x4b,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x11,0x00,0x02,0x00, 0x01,0x00,0x00,0x00,0x0b,0x00,0x06,0x00,0x01,0x00,0x00,0x00, 0x47,0x4c,0x53,0x4c,0x2e,0x73,0x74,0x64,0x2e,0x34,0x35,0x30, 0x00,0x00,0x00,0x00,0x0e,0x00,0x03,0x00,0x00,0x00,0x00,0x00, 0x01,0x00,0x00,0x00,0x0f,0x00,0x09,0x00,0x05,0x00,0x00,0x00, 0x04,0x00,0x00,0x00,0x6d,0x61,0x69,0x6e,0x00,0x00,0x00,0x00, 0x0b,0x00,0x00,0x00,0x14,0x00,0x00,0x00,0x24,0x00,0x00,0x00, -0x2c,0x00,0x00,0x00,0x10,0x00,0x06,0x00,0x04,0x00,0x00,0x00, +0x38,0x00,0x00,0x00,0x10,0x00,0x06,0x00,0x04,0x00,0x00,0x00, 0x11,0x00,0x00,0x00,0x00,0x02,0x00,0x00,0x01,0x00,0x00,0x00, 0x01,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x0b,0x00,0x00,0x00, 0x0b,0x00,0x00,0x00,0x1c,0x00,0x00,0x00,0x48,0x00,0x05,0x00, @@ -14696,15 +14696,15 @@ unsigned char gelu_f32_data[] = { 0x22,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00, 0x24,0x00,0x00,0x00,0x22,0x00,0x00,0x00,0x00,0x00,0x00,0x00, 0x47,0x00,0x04,0x00,0x24,0x00,0x00,0x00,0x21,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x29,0x00,0x00,0x00, +0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x35,0x00,0x00,0x00, 0x06,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x48,0x00,0x04,0x00, -0x2a,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x19,0x00,0x00,0x00, -0x48,0x00,0x05,0x00,0x2a,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0x36,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x19,0x00,0x00,0x00, +0x48,0x00,0x05,0x00,0x36,0x00,0x00,0x00,0x00,0x00,0x00,0x00, 0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x03,0x00, -0x2a,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x2c,0x00,0x00,0x00,0x22,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x2c,0x00,0x00,0x00,0x21,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x42,0x00,0x00,0x00, +0x36,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00, +0x38,0x00,0x00,0x00,0x22,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0x47,0x00,0x04,0x00,0x38,0x00,0x00,0x00,0x21,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x48,0x00,0x00,0x00, 0x0b,0x00,0x00,0x00,0x19,0x00,0x00,0x00,0x13,0x00,0x02,0x00, 0x02,0x00,0x00,0x00,0x21,0x00,0x03,0x00,0x03,0x00,0x00,0x00, 0x02,0x00,0x00,0x00,0x15,0x00,0x04,0x00,0x06,0x00,0x00,0x00, @@ -14731,64 +14731,70 @@ unsigned char gelu_f32_data[] = { 0x23,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x22,0x00,0x00,0x00, 0x3b,0x00,0x04,0x00,0x23,0x00,0x00,0x00,0x24,0x00,0x00,0x00, 0x0c,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x26,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x11,0x00,0x00,0x00,0x1d,0x00,0x03,0x00, -0x29,0x00,0x00,0x00,0x11,0x00,0x00,0x00,0x1e,0x00,0x03,0x00, -0x2a,0x00,0x00,0x00,0x29,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0x2b,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x2a,0x00,0x00,0x00, -0x3b,0x00,0x04,0x00,0x2b,0x00,0x00,0x00,0x2c,0x00,0x00,0x00, +0x0c,0x00,0x00,0x00,0x11,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x11,0x00,0x00,0x00,0x2a,0x00,0x00,0x00,0x2a,0x42,0x4c,0x3f, +0x2b,0x00,0x04,0x00,0x11,0x00,0x00,0x00,0x2d,0x00,0x00,0x00, +0x00,0x00,0x80,0x3f,0x2b,0x00,0x04,0x00,0x11,0x00,0x00,0x00, +0x2e,0x00,0x00,0x00,0x13,0x27,0x37,0x3d,0x1d,0x00,0x03,0x00, +0x35,0x00,0x00,0x00,0x11,0x00,0x00,0x00,0x1e,0x00,0x03,0x00, +0x36,0x00,0x00,0x00,0x35,0x00,0x00,0x00,0x20,0x00,0x04,0x00, +0x37,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x36,0x00,0x00,0x00, +0x3b,0x00,0x04,0x00,0x37,0x00,0x00,0x00,0x38,0x00,0x00,0x00, 0x0c,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x11,0x00,0x00,0x00, -0x2e,0x00,0x00,0x00,0x00,0x00,0x00,0x3f,0x2b,0x00,0x04,0x00, -0x11,0x00,0x00,0x00,0x31,0x00,0x00,0x00,0x00,0x00,0x80,0x3f, -0x2b,0x00,0x04,0x00,0x11,0x00,0x00,0x00,0x32,0x00,0x00,0x00, -0x2a,0x42,0x4c,0x3f,0x2b,0x00,0x04,0x00,0x11,0x00,0x00,0x00, -0x35,0x00,0x00,0x00,0x13,0x27,0x37,0x3d,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x40,0x00,0x00,0x00,0x00,0x02,0x00,0x00, -0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x41,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x2c,0x00,0x06,0x00,0x09,0x00,0x00,0x00, -0x42,0x00,0x00,0x00,0x40,0x00,0x00,0x00,0x41,0x00,0x00,0x00, -0x41,0x00,0x00,0x00,0x36,0x00,0x05,0x00,0x02,0x00,0x00,0x00, -0x04,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x03,0x00,0x00,0x00, -0xf8,0x00,0x02,0x00,0x05,0x00,0x00,0x00,0xf7,0x00,0x03,0x00, -0x43,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xfb,0x00,0x03,0x00, -0x0c,0x00,0x00,0x00,0x44,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, -0x44,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x0d,0x00,0x00,0x00, -0x0e,0x00,0x00,0x00,0x0b,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x0f,0x00,0x00,0x00, -0x0e,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x17,0x00,0x00,0x00, -0x18,0x00,0x00,0x00,0x14,0x00,0x00,0x00,0x16,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x19,0x00,0x00,0x00, -0x18,0x00,0x00,0x00,0xae,0x00,0x05,0x00,0x1a,0x00,0x00,0x00, -0x1b,0x00,0x00,0x00,0x0f,0x00,0x00,0x00,0x19,0x00,0x00,0x00, -0xf7,0x00,0x03,0x00,0x1d,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0xfa,0x00,0x04,0x00,0x1b,0x00,0x00,0x00,0x1c,0x00,0x00,0x00, -0x1d,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0x1c,0x00,0x00,0x00, -0xf9,0x00,0x02,0x00,0x43,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, -0x1d,0x00,0x00,0x00,0x41,0x00,0x06,0x00,0x26,0x00,0x00,0x00, -0x27,0x00,0x00,0x00,0x24,0x00,0x00,0x00,0x16,0x00,0x00,0x00, -0x0f,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x11,0x00,0x00,0x00, -0x28,0x00,0x00,0x00,0x27,0x00,0x00,0x00,0x85,0x00,0x05,0x00, +0x3a,0x00,0x00,0x00,0x00,0x00,0x00,0x3f,0x2b,0x00,0x04,0x00, +0x11,0x00,0x00,0x00,0x3d,0x00,0x00,0x00,0x00,0x00,0x00,0x40, +0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x46,0x00,0x00,0x00, +0x00,0x02,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x47,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x2c,0x00,0x06,0x00, +0x09,0x00,0x00,0x00,0x48,0x00,0x00,0x00,0x46,0x00,0x00,0x00, +0x47,0x00,0x00,0x00,0x47,0x00,0x00,0x00,0x36,0x00,0x05,0x00, +0x02,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0x03,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0x05,0x00,0x00,0x00, +0xf7,0x00,0x03,0x00,0x49,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0xfb,0x00,0x03,0x00,0x0c,0x00,0x00,0x00,0x4a,0x00,0x00,0x00, +0xf8,0x00,0x02,0x00,0x4a,0x00,0x00,0x00,0x41,0x00,0x05,0x00, +0x0d,0x00,0x00,0x00,0x0e,0x00,0x00,0x00,0x0b,0x00,0x00,0x00, +0x0c,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x0f,0x00,0x00,0x00,0x0e,0x00,0x00,0x00,0x41,0x00,0x05,0x00, +0x17,0x00,0x00,0x00,0x18,0x00,0x00,0x00,0x14,0x00,0x00,0x00, +0x16,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x19,0x00,0x00,0x00,0x18,0x00,0x00,0x00,0xae,0x00,0x05,0x00, +0x1a,0x00,0x00,0x00,0x1b,0x00,0x00,0x00,0x0f,0x00,0x00,0x00, +0x19,0x00,0x00,0x00,0xf7,0x00,0x03,0x00,0x1d,0x00,0x00,0x00, +0x00,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0x1b,0x00,0x00,0x00, +0x1c,0x00,0x00,0x00,0x1d,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, +0x1c,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0x49,0x00,0x00,0x00, +0xf8,0x00,0x02,0x00,0x1d,0x00,0x00,0x00,0x41,0x00,0x06,0x00, +0x26,0x00,0x00,0x00,0x27,0x00,0x00,0x00,0x24,0x00,0x00,0x00, +0x16,0x00,0x00,0x00,0x0f,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0x11,0x00,0x00,0x00,0x28,0x00,0x00,0x00,0x27,0x00,0x00,0x00, +0x85,0x00,0x05,0x00,0x11,0x00,0x00,0x00,0x2c,0x00,0x00,0x00, +0x2a,0x00,0x00,0x00,0x28,0x00,0x00,0x00,0x85,0x00,0x05,0x00, 0x11,0x00,0x00,0x00,0x30,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, +0x28,0x00,0x00,0x00,0x0c,0x00,0x08,0x00,0x11,0x00,0x00,0x00, +0x33,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x32,0x00,0x00,0x00, +0x30,0x00,0x00,0x00,0x28,0x00,0x00,0x00,0x2d,0x00,0x00,0x00, +0x85,0x00,0x05,0x00,0x11,0x00,0x00,0x00,0x34,0x00,0x00,0x00, +0x2c,0x00,0x00,0x00,0x33,0x00,0x00,0x00,0x85,0x00,0x05,0x00, +0x11,0x00,0x00,0x00,0x3c,0x00,0x00,0x00,0x3a,0x00,0x00,0x00, 0x28,0x00,0x00,0x00,0x85,0x00,0x05,0x00,0x11,0x00,0x00,0x00, -0x34,0x00,0x00,0x00,0x32,0x00,0x00,0x00,0x28,0x00,0x00,0x00, -0x85,0x00,0x05,0x00,0x11,0x00,0x00,0x00,0x37,0x00,0x00,0x00, -0x35,0x00,0x00,0x00,0x28,0x00,0x00,0x00,0x0c,0x00,0x08,0x00, -0x11,0x00,0x00,0x00,0x3a,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x32,0x00,0x00,0x00,0x37,0x00,0x00,0x00,0x28,0x00,0x00,0x00, -0x31,0x00,0x00,0x00,0x85,0x00,0x05,0x00,0x11,0x00,0x00,0x00, -0x3b,0x00,0x00,0x00,0x34,0x00,0x00,0x00,0x3a,0x00,0x00,0x00, -0x0c,0x00,0x06,0x00,0x11,0x00,0x00,0x00,0x3c,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x15,0x00,0x00,0x00,0x3b,0x00,0x00,0x00, -0x81,0x00,0x05,0x00,0x11,0x00,0x00,0x00,0x3d,0x00,0x00,0x00, -0x31,0x00,0x00,0x00,0x3c,0x00,0x00,0x00,0x85,0x00,0x05,0x00, -0x11,0x00,0x00,0x00,0x3e,0x00,0x00,0x00,0x30,0x00,0x00,0x00, -0x3d,0x00,0x00,0x00,0x41,0x00,0x06,0x00,0x26,0x00,0x00,0x00, -0x3f,0x00,0x00,0x00,0x2c,0x00,0x00,0x00,0x16,0x00,0x00,0x00, -0x0f,0x00,0x00,0x00,0x3e,0x00,0x03,0x00,0x3f,0x00,0x00,0x00, -0x3e,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0x43,0x00,0x00,0x00, -0xf8,0x00,0x02,0x00,0x43,0x00,0x00,0x00,0xfd,0x00,0x01,0x00, -0x38,0x00,0x01,0x00, +0x3f,0x00,0x00,0x00,0x3d,0x00,0x00,0x00,0x34,0x00,0x00,0x00, +0x0c,0x00,0x06,0x00,0x11,0x00,0x00,0x00,0x40,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0x1b,0x00,0x00,0x00,0x3f,0x00,0x00,0x00, +0x81,0x00,0x05,0x00,0x11,0x00,0x00,0x00,0x41,0x00,0x00,0x00, +0x40,0x00,0x00,0x00,0x2d,0x00,0x00,0x00,0x88,0x00,0x05,0x00, +0x11,0x00,0x00,0x00,0x42,0x00,0x00,0x00,0x3d,0x00,0x00,0x00, +0x41,0x00,0x00,0x00,0x83,0x00,0x05,0x00,0x11,0x00,0x00,0x00, +0x43,0x00,0x00,0x00,0x3d,0x00,0x00,0x00,0x42,0x00,0x00,0x00, +0x85,0x00,0x05,0x00,0x11,0x00,0x00,0x00,0x44,0x00,0x00,0x00, +0x3c,0x00,0x00,0x00,0x43,0x00,0x00,0x00,0x41,0x00,0x06,0x00, +0x26,0x00,0x00,0x00,0x45,0x00,0x00,0x00,0x38,0x00,0x00,0x00, +0x16,0x00,0x00,0x00,0x0f,0x00,0x00,0x00,0x3e,0x00,0x03,0x00, +0x45,0x00,0x00,0x00,0x44,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, +0x49,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0x49,0x00,0x00,0x00, +0xfd,0x00,0x01,0x00,0x38,0x00,0x01,0x00, }; -const uint64_t gelu_f32_len = 1408; +const uint64_t gelu_f32_len = 1484; unsigned char get_rows_f16_data[] = { 0x03,0x02,0x23,0x07,0x00,0x05,0x01,0x00,0x0b,0x00,0x0d,0x00, diff --git a/ggml-vulkan.cpp b/ggml-vulkan.cpp index bccc40bf5..b1e0006bb 100644 --- a/ggml-vulkan.cpp +++ b/ggml-vulkan.cpp @@ -2876,6 +2876,9 @@ static void ggml_vk_op_f32(vk_context * ctx, const ggml_tensor * src0, const ggm x_sz = ggml_nbytes(src0); d_sz = ggml_nbytes(dst); + if (extra_src0->offset + x_sz >= d_X->size) { + x_sz = VK_WHOLE_SIZE; + } if (extra->offset + d_sz >= d_D->size) { d_sz = VK_WHOLE_SIZE; } @@ -2911,12 +2914,16 @@ static void ggml_vk_op_f32(vk_context * ctx, const ggml_tensor * src0, const ggm break; } - x_sz *= ne02 * ne03; - if (y_sz != VK_WHOLE_SIZE) { - y_sz *= ne12 * ne13; - } if (op != GGML_OP_CPY) { - d_sz *= ne02 * ne03; + if (x_sz != VK_WHOLE_SIZE) { + x_sz *= ne02 * ne03; + } + if (y_sz != VK_WHOLE_SIZE) { + y_sz *= ne12 * ne13; + } + if (d_sz != VK_WHOLE_SIZE) { + d_sz *= ne02 * ne03; + } } if (!use_src1 && op == GGML_OP_SOFT_MAX) { diff --git a/ggml_vk_generate_shaders.py b/ggml_vk_generate_shaders.py index 6b1b82bf3..67981a751 100644 --- a/ggml_vk_generate_shaders.py +++ b/ggml_vk_generate_shaders.py @@ -1689,7 +1689,8 @@ void main() { } const float xi = float(data_a[i]); - data_d[i] = D_TYPE(0.5f*xi*(1.0f + tanh(SQRT_2_OVER_PI*xi*(1.0f + GELU_COEF_A*xi*xi)))); + const float val = SQRT_2_OVER_PI*xi*(1.0f + GELU_COEF_A*xi*xi); + data_d[i] = D_TYPE(0.5f*xi*(2.0f - 2.0f / (exp(2 * val) + 1))); } """ From 128dcbd3c9c4b12f42b560a4430427d7b2828628 Mon Sep 17 00:00:00 2001 From: Neo Zhang Jianyu Date: Fri, 2 Feb 2024 03:48:53 +0800 Subject: [PATCH 487/811] add --no-mmap in llama-bench (#5257) * add --no-mmap, show sycl backend * fix conflict * fix code format, change print for --no-mmap * ren no_mmap to mmap, show mmap when not default value in printer * update guide for mmap * mv position to reduce model reload --- README-sycl.md | 2 +- examples/llama-bench/llama-bench.cpp | 60 +++++++++++++++++++++++++--- ggml-sycl.cpp | 34 +++++++++++++++- ggml-sycl.h | 3 +- 4 files changed, 89 insertions(+), 10 deletions(-) diff --git a/README-sycl.md b/README-sycl.md index 2b2cfe03a..b8ee212b8 100644 --- a/README-sycl.md +++ b/README-sycl.md @@ -405,7 +405,7 @@ Using device **0** (Intel(R) Arc(TM) A770 Graphics) as main device llama.cpp use mmap as default way to read model file and copy to GPU. In some system, memcpy will be abnormal and block. - Solution: add **--no-mmap**. + Solution: add **--no-mmap** or **--mmap 0**. ## Q&A diff --git a/examples/llama-bench/llama-bench.cpp b/examples/llama-bench/llama-bench.cpp index c5a6f744e..e36c061a2 100644 --- a/examples/llama-bench/llama-bench.cpp +++ b/examples/llama-bench/llama-bench.cpp @@ -20,6 +20,7 @@ #include "llama.h" #include "common.h" #include "ggml-cuda.h" +#include "ggml-sycl.h" // utils static uint64_t get_time_ns() { @@ -120,6 +121,22 @@ static std::string get_gpu_info() { id += "/"; } } +#endif +#ifdef GGML_USE_SYCL + int device_list[GGML_SYCL_MAX_DEVICES]; + ggml_sycl_get_gpu_list(device_list, GGML_SYCL_MAX_DEVICES); + + for (int i = 0; i < GGML_SYCL_MAX_DEVICES; i++) { + if (device_list[i] >0 ){ + char buf[128]; + ggml_sycl_get_device_description(i, buf, sizeof(buf)); + id += buf; + id += "/"; + } + } + if (id.length() >2 ) { + id.pop_back(); + } #endif // TODO: other backends return id; @@ -161,6 +178,7 @@ struct cmd_params { std::vector no_kv_offload; std::vector mul_mat_q; std::vector> tensor_split; + std::vector use_mmap; int reps; bool verbose; output_formats output_format; @@ -180,6 +198,7 @@ static const cmd_params cmd_params_defaults = { /* no_kv_offload */ {false}, /* mul_mat_q */ {true}, /* tensor_split */ {std::vector(llama_max_devices(), 0.0f)}, + /* use_mmap */ {true}, /* reps */ 5, /* verbose */ false, /* output_format */ MARKDOWN @@ -201,6 +220,7 @@ static void print_usage(int /* argc */, char ** argv) { printf(" -sm, --split-mode (default: %s)\n", join(transform_to_str(cmd_params_defaults.split_mode, split_mode_str), ",").c_str()); printf(" -mg, --main-gpu (default: %s)\n", join(cmd_params_defaults.main_gpu, ",").c_str()); printf(" -nkvo, --no-kv-offload <0|1> (default: %s)\n", join(cmd_params_defaults.no_kv_offload, ",").c_str()); + printf(" -mmp, --mmap <0|1> (default: %s)\n", join(cmd_params_defaults.use_mmap, ",").c_str()); printf(" -mmq, --mul-mat-q <0|1> (default: %s)\n", join(cmd_params_defaults.mul_mat_q, ",").c_str()); printf(" -ts, --tensor_split (default: 0)\n"); printf(" -r, --repetitions (default: %d)\n", cmd_params_defaults.reps); @@ -370,6 +390,13 @@ static cmd_params parse_cmd_params(int argc, char ** argv) { } auto p = split(argv[i], split_delim); params.mul_mat_q.insert(params.mul_mat_q.end(), p.begin(), p.end()); + } else if (arg == "-mmp" || arg == "--mmap") { + if (++i >= argc) { + invalid_param = true; + break; + } + auto p = split(argv[i], split_delim); + params.use_mmap.insert(params.use_mmap.end(), p.begin(), p.end()); } else if (arg == "-ts" || arg == "--tensor-split") { if (++i >= argc) { invalid_param = true; @@ -441,6 +468,7 @@ static cmd_params parse_cmd_params(int argc, char ** argv) { if (params.no_kv_offload.empty()){ params.no_kv_offload = cmd_params_defaults.no_kv_offload; } if (params.mul_mat_q.empty()) { params.mul_mat_q = cmd_params_defaults.mul_mat_q; } if (params.tensor_split.empty()) { params.tensor_split = cmd_params_defaults.tensor_split; } + if (params.use_mmap.empty()) { params.use_mmap = cmd_params_defaults.use_mmap; } if (params.n_threads.empty()) { params.n_threads = cmd_params_defaults.n_threads; } return params; @@ -460,6 +488,7 @@ struct cmd_params_instance { bool no_kv_offload; bool mul_mat_q; std::vector tensor_split; + bool use_mmap; llama_model_params to_llama_mparams() const { llama_model_params mparams = llama_model_default_params(); @@ -468,6 +497,7 @@ struct cmd_params_instance { mparams.split_mode = split_mode; mparams.main_gpu = main_gpu; mparams.tensor_split = tensor_split.data(); + mparams.use_mmap = use_mmap; return mparams; } @@ -477,6 +507,7 @@ struct cmd_params_instance { n_gpu_layers == other.n_gpu_layers && split_mode == other.split_mode && main_gpu == other.main_gpu && + use_mmap == other.use_mmap && tensor_split == other.tensor_split; } @@ -503,6 +534,7 @@ static std::vector get_cmd_params_instances(const cmd_param for (const auto & sm : params.split_mode) for (const auto & mg : params.main_gpu) for (const auto & ts : params.tensor_split) + for (const auto & mmp : params.use_mmap) for (const auto & nb : params.n_batch) for (const auto & tk : params.type_k) for (const auto & tv : params.type_v) @@ -527,6 +559,7 @@ static std::vector get_cmd_params_instances(const cmd_param /* .no_kv_offload= */ nkvo, /* .mul_mat_q = */ mmq, /* .tensor_split = */ ts, + /* .use_mmap = */ mmp, }; instances.push_back(instance); } @@ -549,6 +582,7 @@ static std::vector get_cmd_params_instances(const cmd_param /* .no_kv_offload= */ nkvo, /* .mul_mat_q = */ mmq, /* .tensor_split = */ ts, + /* .use_mmap = */ mmp, }; instances.push_back(instance); } @@ -565,6 +599,7 @@ struct test { static const bool vulkan; static const bool kompute; static const bool metal; + static const bool sycl; static const bool gpu_blas; static const bool blas; static const std::string cpu_info; @@ -583,6 +618,7 @@ struct test { bool no_kv_offload; bool mul_mat_q; std::vector tensor_split; + bool use_mmap; int n_prompt; int n_gen; std::string test_time; @@ -605,6 +641,7 @@ struct test { no_kv_offload = inst.no_kv_offload; mul_mat_q = inst.mul_mat_q; tensor_split = inst.tensor_split; + use_mmap = inst.use_mmap; n_prompt = inst.n_prompt; n_gen = inst.n_gen; // RFC 3339 date-time format @@ -654,25 +691,29 @@ struct test { if (metal) { return "Metal"; } + if (sycl) { + return GGML_SYCL_NAME; + } if (gpu_blas) { return "GPU BLAS"; } if (blas) { return "BLAS"; } + return "CPU"; } static const std::vector & get_fields() { static const std::vector fields = { "build_commit", "build_number", - "cuda", "opencl", "vulkan", "kompute", "metal", "gpu_blas", "blas", + "cuda", "opencl", "vulkan", "kompute", "metal", "sycl", "gpu_blas", "blas", "cpu_info", "gpu_info", "model_filename", "model_type", "model_size", "model_n_params", "n_batch", "n_threads", "type_k", "type_v", "n_gpu_layers", "split_mode", "main_gpu", "no_kv_offload", - "mul_mat_q", "tensor_split", + "mul_mat_q", "tensor_split", "use_mmap", "n_prompt", "n_gen", "test_time", "avg_ns", "stddev_ns", "avg_ts", "stddev_ts" @@ -691,8 +732,8 @@ struct test { return INT; } if (field == "cuda" || field == "opencl" || field == "vulkan" || field == "kompute" || field == "metal" || - field == "gpu_blas" || field == "blas" || field == "f16_kv" || field == "no_kv_offload" || - field == "mul_mat_q") { + field == "gpu_blas" || field == "blas" || field == "sycl" ||field == "f16_kv" || field == "no_kv_offload" || + field == "mul_mat_q" || field == "use_mmap") { return BOOL; } if (field == "avg_ts" || field == "stddev_ts") { @@ -720,13 +761,13 @@ struct test { std::vector values = { build_commit, std::to_string(build_number), std::to_string(cuda), std::to_string(opencl), std::to_string(vulkan), std::to_string(vulkan), - std::to_string(metal), std::to_string(gpu_blas), std::to_string(blas), + std::to_string(metal), std::to_string(sycl), std::to_string(gpu_blas), std::to_string(blas), cpu_info, gpu_info, model_filename, model_type, std::to_string(model_size), std::to_string(model_n_params), std::to_string(n_batch), std::to_string(n_threads), ggml_type_name(type_k), ggml_type_name(type_v), std::to_string(n_gpu_layers), split_mode_str(split_mode), std::to_string(main_gpu), std::to_string(no_kv_offload), - std::to_string(mul_mat_q), tensor_split_str, + std::to_string(mul_mat_q), tensor_split_str, std::to_string(use_mmap), std::to_string(n_prompt), std::to_string(n_gen), test_time, std::to_string(avg_ns()), std::to_string(stdev_ns()), std::to_string(avg_ts()), std::to_string(stdev_ts()) @@ -753,6 +794,7 @@ const bool test::kompute = !!ggml_cpu_has_kompute(); const bool test::metal = !!ggml_cpu_has_metal(); const bool test::gpu_blas = !!ggml_cpu_has_gpublas(); const bool test::blas = !!ggml_cpu_has_blas(); +const bool test::sycl = !!ggml_cpu_has_sycl(); const std::string test::cpu_info = get_cpu_info(); const std::string test::gpu_info = get_gpu_info(); @@ -895,6 +937,9 @@ struct markdown_printer : public printer { if (field == "no_kv_offload") { return "nkvo"; } + if (field == "use_mmap") { + return "mmap"; + } if (field == "tensor_split") { return "ts"; } @@ -938,6 +983,9 @@ struct markdown_printer : public printer { if (params.tensor_split.size() > 1 || params.tensor_split != cmd_params_defaults.tensor_split) { fields.push_back("tensor_split"); } + if (params.use_mmap.size() > 1 || params.use_mmap != cmd_params_defaults.use_mmap) { + fields.push_back("use_mmap"); + } fields.push_back("test"); fields.push_back("t/s"); diff --git a/ggml-sycl.cpp b/ggml-sycl.cpp index 1cc55ef52..e8ba48353 100644 --- a/ggml-sycl.cpp +++ b/ggml-sycl.cpp @@ -2928,7 +2928,6 @@ void ggml_sycl_set_main_device(int main_device); void ggml_sycl_set_mul_mat_q(bool mul_mat_q); void ggml_sycl_set_scratch_size(size_t scratch_size); void ggml_sycl_free_scratch(void); -int ggml_sycl_get_device_count(void); void ggml_sycl_get_device_description(int device, char * description, size_t description_size); bool ggml_backend_is_sycl(ggml_backend_t backend); int ggml_backend_sycl_get_device(ggml_backend_t backend); @@ -14493,6 +14492,37 @@ bool ggml_sycl_compute_forward(struct ggml_compute_params * params, struct ggml_ return true; } +GGML_API GGML_CALL void ggml_sycl_get_gpu_list(int *id_list, int max_len) try { + int max_compute_units = -1; + for(int i=0;i Date: Thu, 1 Feb 2024 23:20:13 -0800 Subject: [PATCH 488/811] llama : fix memory leak in llama_batch_free (#5252) The llama_batch_init allocates memory for a fixed number of tokens. However, the llama_batch_free only frees memory for the number of tokens that were added to the batch. This change-set uses a null terminated array for the batch seq_id, and frees all the elements until the nullptr is reached. This change-set also changes the name of the first parameter from `n_tokens` to `n_tokens_alloc` to more clearly indicate that this value is the number of tokens allocated to the batch, not the number of tokens in the batch. --- llama.cpp | 20 +++++++++++--------- 1 file changed, 11 insertions(+), 9 deletions(-) diff --git a/llama.cpp b/llama.cpp index e8f44c2cb..6bf7f9efb 100644 --- a/llama.cpp +++ b/llama.cpp @@ -11377,22 +11377,24 @@ struct llama_batch llama_batch_get_one( }; } -struct llama_batch llama_batch_init(int32_t n_tokens, int32_t embd, int32_t n_seq_max) { +struct llama_batch llama_batch_init(int32_t n_tokens_alloc, int32_t embd, int32_t n_seq_max) { llama_batch batch = { 0, nullptr, nullptr, nullptr, nullptr, nullptr, nullptr, 0, 0, 0, }; if (embd) { - batch.embd = (float *) malloc(sizeof(float) * n_tokens * embd); + batch.embd = (float *) malloc(sizeof(float) * n_tokens_alloc * embd); } else { - batch.token = (llama_token *) malloc(sizeof(llama_token) * n_tokens); + batch.token = (llama_token *) malloc(sizeof(llama_token) * n_tokens_alloc); } - batch.pos = (llama_pos *) malloc(sizeof(llama_pos) * n_tokens); - batch.n_seq_id = (int32_t *) malloc(sizeof(int32_t) * n_tokens); - batch.seq_id = (llama_seq_id **) malloc(sizeof(llama_seq_id *) * n_tokens); - for (int i = 0; i < n_tokens; ++i) { + batch.pos = (llama_pos *) malloc(sizeof(llama_pos) * n_tokens_alloc); + batch.n_seq_id = (int32_t *) malloc(sizeof(int32_t) * n_tokens_alloc); + batch.seq_id = (llama_seq_id **) malloc(sizeof(llama_seq_id *) * (n_tokens_alloc + 1)); + for (int i = 0; i < n_tokens_alloc; ++i) { batch.seq_id[i] = (llama_seq_id *) malloc(sizeof(llama_seq_id) * n_seq_max); } - batch.logits = (int8_t *) malloc(sizeof(int8_t) * n_tokens); + batch.seq_id[n_tokens_alloc] = nullptr; + + batch.logits = (int8_t *) malloc(sizeof(int8_t) * n_tokens_alloc); return batch; } @@ -11403,7 +11405,7 @@ void llama_batch_free(struct llama_batch batch) { if (batch.pos) free(batch.pos); if (batch.n_seq_id) free(batch.n_seq_id); if (batch.seq_id) { - for (int i = 0; i < batch.n_tokens; ++i) { + for (int i = 0; batch.seq_id[i] != nullptr; ++i) { free(batch.seq_id[i]); } free(batch.seq_id); From af3ba5d94627d337e32a95129e31a3064c459f6b Mon Sep 17 00:00:00 2001 From: Neo Zhang Jianyu Date: Fri, 2 Feb 2024 15:53:27 +0800 Subject: [PATCH 489/811] [SYCL] update guide of SYCL backend (#5254) * update guide for make installation, memory, gguf model link, rm todo for windows build * add vs install requirement * update for gpu device check * update help of llama-bench * fix grammer issues --- README-sycl.md | 64 +++++++++++++++++++++++++++----- examples/llama-bench/README.md | 34 ++++++++++------- examples/sycl/win-run-llama2.bat | 2 +- 3 files changed, 77 insertions(+), 23 deletions(-) diff --git a/README-sycl.md b/README-sycl.md index b8ee212b8..f7edc1c3e 100644 --- a/README-sycl.md +++ b/README-sycl.md @@ -42,6 +42,8 @@ For Intel CPU, recommend to use llama.cpp for X86 (Intel MKL building). ## Intel GPU +### Verified + |Intel GPU| Status | Verified Model| |-|-|-| |Intel Data Center Max Series| Support| Max 1550| @@ -50,6 +52,17 @@ For Intel CPU, recommend to use llama.cpp for X86 (Intel MKL building). |Intel built-in Arc GPU| Support| built-in Arc GPU in Meteor Lake| |Intel iGPU| Support| iGPU in i5-1250P, i7-1165G7| +Note: If the EUs (Execution Unit) in iGPU is less than 80, the inference speed will be too slow to use. + +### Memory + +The memory is a limitation to run LLM on GPUs. + +When run llama.cpp, there is print log to show the applied memory on GPU. You could know how much memory to be used in your case. Like `llm_load_tensors: buffer size = 3577.56 MiB`. + +For iGPU, please make sure the shared memory from host memory is enough. For llama-2-7b.Q4_0, recommend the host memory is 8GB+. + +For dGPU, please make sure the device memory is enough. For llama-2-7b.Q4_0, recommend the device memory is 4GB+. ## Linux @@ -105,7 +118,7 @@ source /opt/intel/oneapi/setvars.sh sycl-ls ``` -There should be one or more level-zero devices. Like **[ext_oneapi_level_zero:gpu:0]**. +There should be one or more level-zero devices. Please confirm that at least one GPU is present, like **[ext_oneapi_level_zero:gpu:0]**. Output (example): ``` @@ -152,6 +165,8 @@ Note: 1. Put model file to folder **models** +You could download [llama-2-7b.Q4_0.gguf](https://huggingface.co/TheBloke/Llama-2-7B-GGUF/blob/main/llama-2-7b.Q4_0.gguf) as example. + 2. Enable oneAPI running environment ``` @@ -223,7 +238,13 @@ Using device **0** (Intel(R) Arc(TM) A770 Graphics) as main device Please install Intel GPU driver by official guide: [Install GPU Drivers](https://www.intel.com/content/www/us/en/products/docs/discrete-gpus/arc/software/drivers.html). -2. Install Intel® oneAPI Base toolkit. +Note: **The driver is mandatory for compute function**. + +2. Install Visual Studio. + +Please install [Visual Studio](https://visualstudio.microsoft.com/) which impact oneAPI environment enabling in Windows. + +3. Install Intel® oneAPI Base toolkit. a. Please follow the procedure in [Get the Intel® oneAPI Base Toolkit ](https://www.intel.com/content/www/us/en/developer/tools/oneapi/base-toolkit.html). @@ -252,7 +273,7 @@ In oneAPI command line: sycl-ls ``` -There should be one or more level-zero devices. Like **[ext_oneapi_level_zero:gpu:0]**. +There should be one or more level-zero devices. Please confirm that at least one GPU is present, like **[ext_oneapi_level_zero:gpu:0]**. Output (example): ``` @@ -260,15 +281,21 @@ Output (example): [opencl:cpu:1] Intel(R) OpenCL, 11th Gen Intel(R) Core(TM) i7-1185G7 @ 3.00GHz OpenCL 3.0 (Build 0) [2023.16.10.0.17_160000] [opencl:gpu:2] Intel(R) OpenCL Graphics, Intel(R) Iris(R) Xe Graphics OpenCL 3.0 NEO [31.0.101.5186] [ext_oneapi_level_zero:gpu:0] Intel(R) Level-Zero, Intel(R) Iris(R) Xe Graphics 1.3 [1.3.28044] - ``` -3. Install cmake & make +4. Install cmake & make -a. Download & install cmake for windows: https://cmake.org/download/ +a. Download & install cmake for Windows: https://cmake.org/download/ -b. Download & install make for windows provided by mingw-w64: https://www.mingw-w64.org/downloads/ +b. Download & install make for Windows provided by mingw-w64 +- Download binary package for Windows in https://github.com/niXman/mingw-builds-binaries/releases. + + Like [x86_64-13.2.0-release-win32-seh-msvcrt-rt_v11-rev1.7z](https://github.com/niXman/mingw-builds-binaries/releases/download/13.2.0-rt_v11-rev1/x86_64-13.2.0-release-win32-seh-msvcrt-rt_v11-rev1.7z). + +- Unzip the binary package. In the **bin** sub-folder and rename **xxx-make.exe** to **make.exe**. + +- Add the **bin** folder path in the Windows system PATH environment. ### Build locally: @@ -309,6 +336,8 @@ Note: 1. Put model file to folder **models** +You could download [llama-2-7b.Q4_0.gguf](https://huggingface.co/TheBloke/Llama-2-7B-GGUF/blob/main/llama-2-7b.Q4_0.gguf) as example. + 2. Enable oneAPI running environment - In Search, input 'oneAPI'. @@ -419,8 +448,25 @@ Using device **0** (Intel(R) Arc(TM) A770 Graphics) as main device Miss to enable oneAPI running environment. +- Meet compile error. + + Remove folder **build** and try again. + +- I can **not** see **[ext_oneapi_level_zero:gpu:0]** afer install GPU driver in Linux. + + Please run **sudo sycl-ls**. + + If you see it in result, please add video/render group to your ID: + + ``` + sudo usermod -aG render username + sudo usermod -aG video username + ``` + + Then **relogin**. + + If you do not see it, please check the installation GPU steps again. + ## Todo -- Support to build in Windows. - - Support multiple cards. diff --git a/examples/llama-bench/README.md b/examples/llama-bench/README.md index d02824bfa..374e40a7d 100644 --- a/examples/llama-bench/README.md +++ b/examples/llama-bench/README.md @@ -23,19 +23,23 @@ usage: ./llama-bench [options] options: -h, --help - -m, --model (default: models/7B/ggml-model-q4_0.gguf) - -p, --n-prompt (default: 512) - -n, --n-gen (default: 128) - -b, --batch-size (default: 512) - --memory-f32 <0|1> (default: 0) - -t, --threads (default: 16) - -ngl N, --n-gpu-layers (default: 99) - -mg i, --main-gpu (default: 0) - -mmq, --mul-mat-q <0|1> (default: 1) - -ts, --tensor_split - -r, --repetitions (default: 5) - -o, --output (default: md) - -v, --verbose (default: 0) + -m, --model (default: models/7B/ggml-model-q4_0.gguf) + -p, --n-prompt (default: 512) + -n, --n-gen (default: 128) + -b, --batch-size (default: 512) + -ctk , --cache-type-k (default: f16) + -ctv , --cache-type-v (default: f16) + -t, --threads (default: 112) + -ngl, --n-gpu-layers (default: 99) + -sm, --split-mode (default: layer) + -mg, --main-gpu (default: 0) + -nkvo, --no-kv-offload <0|1> (default: 0) + -mmp, --mmap <0|1> (default: 1) + -mmq, --mul-mat-q <0|1> (default: 1) + -ts, --tensor_split (default: 0) + -r, --repetitions (default: 5) + -o, --output (default: md) + -v, --verbose (default: 0) Multiple values can be given for each parameter by separating them with ',' or by specifying the parameter multiple times. ``` @@ -51,6 +55,10 @@ Each test is repeated the number of times given by `-r`, and the results are ave For a description of the other options, see the [main example](../main/README.md). +Note: + +- When using SYCL backend, there would be hang issue in some cases. Please set `--mmp 0`. + ## Examples ### Text generation with different models diff --git a/examples/sycl/win-run-llama2.bat b/examples/sycl/win-run-llama2.bat index 28d935541..cf621c675 100644 --- a/examples/sycl/win-run-llama2.bat +++ b/examples/sycl/win-run-llama2.bat @@ -2,7 +2,7 @@ :: Copyright (C) 2024 Intel Corporation :: SPDX-License-Identifier: MIT -INPUT2="Building a website can be done in 10 simple steps:\nStep 1:" +set INPUT2="Building a website can be done in 10 simple steps:\nStep 1:" @call "C:\Program Files (x86)\Intel\oneAPI\setvars.bat" intel64 --force From e805f0fa9951081ce0a86378a7aa52b6f636b82d Mon Sep 17 00:00:00 2001 From: "Meng, Hengyu" Date: Fri, 2 Feb 2024 15:54:14 +0800 Subject: [PATCH 490/811] [SYCL] get MAX_MEM_ALLOC from device property (#5270) * get max alloc size from device prop * fix macro typo --- ggml-sycl.cpp | 26 ++++++++++++++++++++++---- 1 file changed, 22 insertions(+), 4 deletions(-) diff --git a/ggml-sycl.cpp b/ggml-sycl.cpp index e8ba48353..4ee2eed38 100644 --- a/ggml-sycl.cpp +++ b/ggml-sycl.cpp @@ -337,6 +337,7 @@ namespace dpct } size_t get_global_mem_size() const { return _global_mem_size; } size_t get_local_mem_size() const { return _local_mem_size; } + size_t get_max_mem_alloc_size() const { return _max_mem_alloc_size; } /// Returns the maximum clock rate of device's global memory in kHz. If /// compiler does not support this API then returns default value 3200000 kHz. unsigned int get_memory_clock_rate() const { return _memory_clock_rate; } @@ -398,6 +399,10 @@ namespace dpct { _local_mem_size = local_mem_size; } + void set_max_mem_alloc_size(size_t max_mem_alloc_size) + { + _max_mem_alloc_size = max_mem_alloc_size; + } void set_max_work_group_size(int max_work_group_size) { _max_work_group_size = max_work_group_size; @@ -465,6 +470,7 @@ namespace dpct int _max_register_size_per_work_group; size_t _global_mem_size; size_t _local_mem_size; + size_t _max_mem_alloc_size; size_t _max_nd_range_size[3]; int _max_nd_range_size_i[3]; uint32_t _device_id; @@ -516,6 +522,7 @@ namespace dpct dev.get_info()); prop.set_global_mem_size(dev.get_info()); prop.set_local_mem_size(dev.get_info()); + prop.set_max_mem_alloc_size(dev.get_info()); #if (defined(SYCL_EXT_INTEL_DEVICE_INFO) && SYCL_EXT_INTEL_DEVICE_INFO >= 6) if (dev.has(sycl::aspect::ext_intel_memory_clock_rate)) @@ -644,6 +651,11 @@ namespace dpct return get_device_info().get_global_mem_size(); } + size_t get_max_mem_alloc_size() const + { + return get_device_info().get_max_mem_alloc_size(); + } + /// Get the number of bytes of free and total memory on the SYCL device. /// \param [out] free_memory The number of bytes of free memory on the SYCL device. /// \param [out] total_memory The number of bytes of total memory on the SYCL device. @@ -11311,10 +11323,10 @@ void ggml_init_sycl() try { GGML_ASSERT(g_all_sycl_device_count <= GGML_SYCL_MAX_DEVICES); int64_t total_vram = 0; -#if defined(GGML_SYCL_FP16) - fprintf(stderr, "%s: GGML_SYCL_FP16: yes\n", __func__); +#if defined(GGML_SYCL_F16) + fprintf(stderr, "%s: GGML_SYCL_F16: yes\n", __func__); #else - fprintf(stderr, "%s: GGML_SYCL_FP16: no\n", __func__); + fprintf(stderr, "%s: GGML_SYCL_F16: no\n", __func__); #endif @@ -14788,6 +14800,12 @@ static size_t ggml_backend_sycl_buffer_type_get_alignment(ggml_backend_buffer_ty UNUSED(buft); } +static size_t ggml_backend_sycl_buffer_type_get_max_size(ggml_backend_buffer_type_t buft) { + return dpct::get_current_device().get_max_mem_alloc_size(); + + UNUSED(buft); +} + static size_t ggml_backend_sycl_buffer_type_get_alloc_size(ggml_backend_buffer_type_t buft, const ggml_tensor * tensor) { int64_t row_low = 0; int64_t row_high = ggml_nrows(tensor); @@ -14818,7 +14836,7 @@ static ggml_backend_buffer_type_i ggml_backend_sycl_buffer_type_interface = { /* .get_name = */ ggml_backend_sycl_buffer_type_name, /* .alloc_buffer = */ ggml_backend_sycl_buffer_type_alloc_buffer, /* .get_alignment = */ ggml_backend_sycl_buffer_type_get_alignment, - /* .get_max_size = */ NULL, // TODO: return device.maxBufferLength + /* .get_max_size = */ ggml_backend_sycl_buffer_type_get_max_size, /* .get_alloc_size = */ ggml_backend_sycl_buffer_type_get_alloc_size, /* .supports_backend = */ ggml_backend_sycl_buffer_type_supports_backend, /* .is_host = */ nullptr, From 6b91b1e0a92ac2e4e269eec6361ca53a61ced6c6 Mon Sep 17 00:00:00 2001 From: Xuan Son Nguyen Date: Fri, 2 Feb 2024 08:56:31 +0100 Subject: [PATCH 491/811] docker : add build for SYCL, Vulkan + update readme (#5228) * add vulkan dockerfile * intel dockerfile: compile sycl by default * fix vulkan dockerfile * add docs for vulkan * docs: sycl build in docker * docs: remove trailing spaces * docs: sycl: add docker section * docs: clarify install vulkan SDK outside docker * sycl: use intel/oneapi-basekit docker image * docs: correct TOC * docs: correct docker image for Intel oneMKL --- .devops/main-intel.Dockerfile | 16 ++--- .devops/main-vulkan.Dockerfile | 29 +++++++++ .devops/server-intel.Dockerfile | 15 +++-- .devops/server-vulkan.Dockerfile | 29 +++++++++ README-sycl.md | 102 +++++++++++++++++++------------ README.md | 64 ++++++++++++++----- 6 files changed, 188 insertions(+), 67 deletions(-) create mode 100644 .devops/main-vulkan.Dockerfile create mode 100644 .devops/server-vulkan.Dockerfile diff --git a/.devops/main-intel.Dockerfile b/.devops/main-intel.Dockerfile index e1e6acc24..572e5d8ea 100644 --- a/.devops/main-intel.Dockerfile +++ b/.devops/main-intel.Dockerfile @@ -1,8 +1,8 @@ ARG ONEAPI_VERSION=2024.0.1-devel-ubuntu22.04 -ARG UBUNTU_VERSION=22.04 -FROM intel/hpckit:$ONEAPI_VERSION as build +FROM intel/oneapi-basekit:$ONEAPI_VERSION as build +ARG LLAMA_SYCL_F16=OFF RUN apt-get update && \ apt-get install -y git @@ -10,16 +10,18 @@ WORKDIR /app COPY . . -# for some reasons, "-DLLAMA_BLAS=ON -DLLAMA_BLAS_VENDOR=Intel10_64lp -DLLAMA_NATIVE=ON" give worse performance RUN mkdir build && \ cd build && \ - cmake .. -DCMAKE_C_COMPILER=icx -DCMAKE_CXX_COMPILER=icpx && \ - cmake --build . --config Release --target main server + if [ "${LLAMA_SYCL_F16}" = "ON" ]; then \ + echo "LLAMA_SYCL_F16 is set" && \ + export OPT_SYCL_F16="-DLLAMA_SYCL_F16=ON"; \ + fi && \ + cmake .. -DLLAMA_SYCL=ON -DCMAKE_C_COMPILER=icx -DCMAKE_CXX_COMPILER=icpx ${OPT_SYCL_F16} && \ + cmake --build . --config Release --target main -FROM ubuntu:$UBUNTU_VERSION as runtime +FROM intel/oneapi-basekit:$ONEAPI_VERSION as runtime COPY --from=build /app/build/bin/main /main -COPY --from=build /app/build/bin/server /server ENV LC_ALL=C.utf8 diff --git a/.devops/main-vulkan.Dockerfile b/.devops/main-vulkan.Dockerfile new file mode 100644 index 000000000..bca460365 --- /dev/null +++ b/.devops/main-vulkan.Dockerfile @@ -0,0 +1,29 @@ +ARG UBUNTU_VERSION=jammy + +FROM ubuntu:$UBUNTU_VERSION as build + +# Install build tools +RUN apt update && apt install -y git build-essential cmake wget + +# Install Vulkan SDK +RUN wget -qO - https://packages.lunarg.com/lunarg-signing-key-pub.asc | apt-key add - && \ + wget -qO /etc/apt/sources.list.d/lunarg-vulkan-jammy.list https://packages.lunarg.com/vulkan/lunarg-vulkan-jammy.list && \ + apt update -y && \ + apt-get install -y vulkan-sdk + +# Build it +WORKDIR /app +COPY . . +RUN mkdir build && \ + cd build && \ + cmake .. -DLLAMA_VULKAN=1 && \ + cmake --build . --config Release --target main + +# Clean up +WORKDIR / +RUN cp /app/build/bin/main /main && \ + rm -rf /app + +ENV LC_ALL=C.utf8 + +ENTRYPOINT [ "/main" ] diff --git a/.devops/server-intel.Dockerfile b/.devops/server-intel.Dockerfile index e343d278c..312f2df80 100644 --- a/.devops/server-intel.Dockerfile +++ b/.devops/server-intel.Dockerfile @@ -1,8 +1,8 @@ ARG ONEAPI_VERSION=2024.0.1-devel-ubuntu22.04 -ARG UBUNTU_VERSION=22.04 -FROM intel/hpckit:$ONEAPI_VERSION as build +FROM intel/oneapi-basekit:$ONEAPI_VERSION as build +ARG LLAMA_SYCL_F16=OFF RUN apt-get update && \ apt-get install -y git @@ -10,13 +10,16 @@ WORKDIR /app COPY . . -# for some reasons, "-DLLAMA_BLAS=ON -DLLAMA_BLAS_VENDOR=Intel10_64lp -DLLAMA_NATIVE=ON" give worse performance RUN mkdir build && \ cd build && \ - cmake .. -DCMAKE_C_COMPILER=icx -DCMAKE_CXX_COMPILER=icpx && \ - cmake --build . --config Release --target main server + if [ "${LLAMA_SYCL_F16}" = "ON" ]; then \ + echo "LLAMA_SYCL_F16 is set" && \ + export OPT_SYCL_F16="-DLLAMA_SYCL_F16=ON"; \ + fi && \ + cmake .. -DLLAMA_SYCL=ON -DCMAKE_C_COMPILER=icx -DCMAKE_CXX_COMPILER=icpx ${OPT_SYCL_F16} && \ + cmake --build . --config Release --target server -FROM ubuntu:$UBUNTU_VERSION as runtime +FROM intel/oneapi-basekit:$ONEAPI_VERSION as runtime COPY --from=build /app/build/bin/server /server diff --git a/.devops/server-vulkan.Dockerfile b/.devops/server-vulkan.Dockerfile new file mode 100644 index 000000000..e0add6fc3 --- /dev/null +++ b/.devops/server-vulkan.Dockerfile @@ -0,0 +1,29 @@ +ARG UBUNTU_VERSION=jammy + +FROM ubuntu:$UBUNTU_VERSION as build + +# Install build tools +RUN apt update && apt install -y git build-essential cmake wget + +# Install Vulkan SDK +RUN wget -qO - https://packages.lunarg.com/lunarg-signing-key-pub.asc | apt-key add - && \ + wget -qO /etc/apt/sources.list.d/lunarg-vulkan-jammy.list https://packages.lunarg.com/vulkan/lunarg-vulkan-jammy.list && \ + apt update -y && \ + apt-get install -y vulkan-sdk + +# Build it +WORKDIR /app +COPY . . +RUN mkdir build && \ + cd build && \ + cmake .. -DLLAMA_VULKAN=1 && \ + cmake --build . --config Release --target server + +# Clean up +WORKDIR / +RUN cp /app/build/bin/server /server && \ + rm -rf /app + +ENV LC_ALL=C.utf8 + +ENTRYPOINT [ "/server" ] diff --git a/README-sycl.md b/README-sycl.md index f7edc1c3e..7aa4274a9 100644 --- a/README-sycl.md +++ b/README-sycl.md @@ -1,22 +1,15 @@ # llama.cpp for SYCL -[Background](#background) - -[OS](#os) - -[Intel GPU](#intel-gpu) - -[Linux](#linux) - -[Windows](#windows) - -[Environment Variable](#environment-variable) - -[Known Issue](#known-issue) - -[Q&A](#q&a) - -[Todo](#todo) +- [Background](#background) +- [OS](#os) +- [Intel GPU](#intel-gpu) +- [Docker](#docker) +- [Linux](#linux) +- [Windows](#windows) +- [Environment Variable](#environment-variable) +- [Known Issue](#known-issue) +- [Q&A](#q&a) +- [Todo](#todo) ## Background @@ -36,7 +29,7 @@ For Intel CPU, recommend to use llama.cpp for X86 (Intel MKL building). |OS|Status|Verified| |-|-|-| -|Linux|Support|Ubuntu 22.04| +|Linux|Support|Ubuntu 22.04, Fedora Silverblue 39| |Windows|Support|Windows 11| @@ -50,7 +43,7 @@ For Intel CPU, recommend to use llama.cpp for X86 (Intel MKL building). |Intel Data Center Flex Series| Support| Flex 170| |Intel Arc Series| Support| Arc 770, 730M| |Intel built-in Arc GPU| Support| built-in Arc GPU in Meteor Lake| -|Intel iGPU| Support| iGPU in i5-1250P, i7-1165G7| +|Intel iGPU| Support| iGPU in i5-1250P, i7-1260P, i7-1165G7| Note: If the EUs (Execution Unit) in iGPU is less than 80, the inference speed will be too slow to use. @@ -64,6 +57,38 @@ For iGPU, please make sure the shared memory from host memory is enough. For lla For dGPU, please make sure the device memory is enough. For llama-2-7b.Q4_0, recommend the device memory is 4GB+. +## Docker + +Note: +- Only docker on Linux is tested. Docker on WSL may not work. +- You may need to install Intel GPU driver on the host machine (See the [Linux](#linux) section to know how to do that) + +### Build the image + +You can choose between **F16** and **F32** build. F16 is faster for long-prompt inference. + + +```sh +# For F16: +#docker build -t llama-cpp-sycl --build-arg="LLAMA_SYCL_F16=ON" -f .devops/main-intel.Dockerfile . + +# Or, for F32: +docker build -t llama-cpp-sycl -f .devops/main-intel.Dockerfile . + +# Note: you can also use the ".devops/main-server.Dockerfile", which compiles the "server" example +``` + +### Run + +```sh +# Firstly, find all the DRI cards: +ls -la /dev/dri +# Then, pick the card that you want to use. + +# For example with "/dev/dri/card1" +docker run -it --rm -v "$(pwd):/app:Z" --device /dev/dri/renderD128:/dev/dri/renderD128 --device /dev/dri/card1:/dev/dri/card1 llama-cpp-sycl -m "/app/models/YOUR_MODEL_FILE" -p "Building a website can be done in 10 simple steps:" -n 400 -e -ngl 33 +``` + ## Linux ### Setup Environment @@ -76,7 +101,7 @@ Note: for iGPU, please install the client GPU driver. b. Add user to group: video, render. -``` +```sh sudo usermod -aG render username sudo usermod -aG video username ``` @@ -85,7 +110,7 @@ Note: re-login to enable it. c. Check -``` +```sh sudo apt install clinfo sudo clinfo -l ``` @@ -103,7 +128,6 @@ Platform #0: Intel(R) OpenCL HD Graphics 2. Install Intel® oneAPI Base toolkit. - a. Please follow the procedure in [Get the Intel® oneAPI Base Toolkit ](https://www.intel.com/content/www/us/en/developer/tools/oneapi/base-toolkit.html). Recommend to install to default folder: **/opt/intel/oneapi**. @@ -112,7 +136,7 @@ Following guide use the default folder as example. If you use other folder, plea b. Check -``` +```sh source /opt/intel/oneapi/setvars.sh sycl-ls @@ -131,21 +155,25 @@ Output (example): 2. Build locally: -``` +Note: +- You can choose between **F16** and **F32** build. F16 is faster for long-prompt inference. +- By default, it will build for all binary files. It will take more time. To reduce the time, we recommend to build for **example/main** only. + +```sh mkdir -p build cd build source /opt/intel/oneapi/setvars.sh -#for FP16 -#cmake .. -DLLAMA_SYCL=ON -DCMAKE_C_COMPILER=icx -DCMAKE_CXX_COMPILER=icpx -DLLAMA_SYCL_F16=ON # faster for long-prompt inference +# For FP16: +#cmake .. -DLLAMA_SYCL=ON -DCMAKE_C_COMPILER=icx -DCMAKE_CXX_COMPILER=icpx -DLLAMA_SYCL_F16=ON -#for FP32 +# Or, for FP32: cmake .. -DLLAMA_SYCL=ON -DCMAKE_C_COMPILER=icx -DCMAKE_CXX_COMPILER=icpx -#build example/main only +# Build example/main only #cmake --build . --config Release --target main -#build all binary +# Or, build all binary cmake --build . --config Release -v cd .. @@ -153,14 +181,10 @@ cd .. or -``` +```sh ./examples/sycl/build.sh ``` -Note: - -- By default, it will build for all binary files. It will take more time. To reduce the time, we recommend to build for **example/main** only. - ### Run 1. Put model file to folder **models** @@ -177,10 +201,10 @@ source /opt/intel/oneapi/setvars.sh Run without parameter: -``` +```sh ./build/bin/ls-sycl-device -or +# or running the "main" executable and look at the output log: ./build/bin/main ``` @@ -209,13 +233,13 @@ found 4 SYCL devices: Set device ID = 0 by **GGML_SYCL_DEVICE=0** -``` +```sh GGML_SYCL_DEVICE=0 ./build/bin/main -m models/llama-2-7b.Q4_0.gguf -p "Building a website can be done in 10 simple steps:" -n 400 -e -ngl 33 ``` or run by script: -``` -./examples/sycl/run-llama2.sh +```sh +./examples/sycl/run_llama2.sh ``` Note: diff --git a/README.md b/README.md index e6ed1d429..af1f09fa0 100644 --- a/README.md +++ b/README.md @@ -393,28 +393,28 @@ Building the program with BLAS support may lead to some performance improvements Check [BLIS.md](docs/BLIS.md) for more information. +- #### SYCL + SYCL is a higher-level programming model to improve programming productivity on various hardware accelerators. + + llama.cpp based on SYCL is used to **support Intel GPU** (Data Center Max series, Flex series, Arc series, Built-in GPU and iGPU). + + For detailed info, please refer to [llama.cpp for SYCL](README-sycl.md). + - #### Intel oneMKL + Building through oneAPI compilers will make avx_vnni instruction set available for intel processors that do not support avx512 and avx512_vnni. Please note that this build config **does not support Intel GPU**. For Intel GPU support, please refer to [llama.cpp for SYCL](./README-sycl.md). + - Using manual oneAPI installation: By default, `LLAMA_BLAS_VENDOR` is set to `Generic`, so if you already sourced intel environment script and assign `-DLLAMA_BLAS=ON` in cmake, the mkl version of Blas will automatically been selected. Otherwise please install oneAPI and follow the below steps: ```bash mkdir build cd build - source /opt/intel/oneapi/setvars.sh # You can skip this step if in oneapi-runtime docker image, only required for manual installation + source /opt/intel/oneapi/setvars.sh # You can skip this step if in oneapi-basekit docker image, only required for manual installation cmake .. -DLLAMA_BLAS=ON -DLLAMA_BLAS_VENDOR=Intel10_64lp -DCMAKE_C_COMPILER=icx -DCMAKE_CXX_COMPILER=icpx -DLLAMA_NATIVE=ON cmake --build . --config Release ``` - Using oneAPI docker image: - If you do not want to source the environment vars and install oneAPI manually, you can also build the code using intel docker container: [oneAPI-runtime](https://hub.docker.com/r/intel/oneapi-runtime) - - ```bash - mkdir build - cd build - cmake .. -DLLAMA_BLAS=ON -DLLAMA_BLAS_VENDOR=Intel10_64lp -DCMAKE_C_COMPILER=icx -DCMAKE_CXX_COMPILER=icpx -DLLAMA_NATIVE=ON - cmake --build . --config Release - ``` - - Building through oneAPI compilers will make avx_vnni instruction set available for intel processors that do not support avx512 and avx512_vnni. + If you do not want to source the environment vars and install oneAPI manually, you can also build the code using intel docker container: [oneAPI-basekit](https://hub.docker.com/r/intel/oneapi-basekit). Then, you can use the commands given above. Check [Optimizing and Running LLaMA2 on Intel® CPU](https://www.intel.com/content/www/us/en/content-details/791610/optimizing-and-running-llama2-on-intel-cpu.html) for more information. @@ -601,14 +601,48 @@ Building the program with BLAS support may lead to some performance improvements You can get a list of platforms and devices from the `clinfo -l` command, etc. -- #### SYCL +- #### Vulkan - SYCL is a higher-level programming model to improve programming productivity on various hardware accelerators. + **With docker**: - llama.cpp based on SYCL is used to support Intel GPU (Data Center Max series, Flex series, Arc series, Built-in GPU and iGPU). + You don't need to install Vulkan SDK. It will be installed inside the container. - For detailed info, please refer to [llama.cpp for SYCL](README-sycl.md). + ```sh + # Build the image + docker build -t llama-cpp-vulkan -f .devops/main-vulkan.Dockerfile . + # Then, use it: + docker run -it --rm -v "$(pwd):/app:Z" --device /dev/dri/renderD128:/dev/dri/renderD128 --device /dev/dri/card1:/dev/dri/card1 llama-cpp-vulkan -m "/app/models/YOUR_MODEL_FILE" -p "Building a website can be done in 10 simple steps:" -n 400 -e -ngl 33 + ``` + + **Without docker**: + + Firstly, you need to make sure you installed [Vulkan SDK](https://vulkan.lunarg.com/doc/view/latest/linux/getting_started_ubuntu.html) + + For example, on Ubuntu 22.04 (jammy), use the command below: + + ```bash + wget -qO - https://packages.lunarg.com/lunarg-signing-key-pub.asc | apt-key add - + wget -qO /etc/apt/sources.list.d/lunarg-vulkan-jammy.list https://packages.lunarg.com/vulkan/lunarg-vulkan-jammy.list + apt update -y + apt-get install -y vulkan-sdk + # To verify the installation, use the command below: + vulkaninfo + ``` + + Then, build llama.cpp using the cmake command below: + + ```bash + mkdir -p build + cd build + cmake .. -DLLAMA_VULKAN=1 + cmake --build . --config Release + # Test the output binary (with "-ngl 33" to offload all layers to GPU) + ./bin/main -m "PATH_TO_MODEL" -p "Hi you how are you" -n 50 -e -ngl 33 -t 4 + + # You should see in the output, ggml_vulkan detected your GPU. For example: + # ggml_vulkan: Using Intel(R) Graphics (ADL GT2) | uma: 1 | fp16: 1 | warp size: 32 + ``` ### Prepare Data & Run From b05102fe8cfa9893851c6bf6efd15cdc20b6afa2 Mon Sep 17 00:00:00 2001 From: AidanBeltonS <87009434+AidanBeltonS@users.noreply.github.com> Date: Fri, 2 Feb 2024 08:39:48 +0000 Subject: [PATCH 492/811] Tidy ggml-sycl (#5261) * Tidy some code in ggml-sycl * Remove blank space * Remove std::printf comments --------- Co-authored-by: Abhilash Majumder <30946547+abhilash1910@users.noreply.github.com> --- ggml-sycl.cpp | 47 ++++++++++++++++++++++++----------------------- 1 file changed, 24 insertions(+), 23 deletions(-) diff --git a/ggml-sycl.cpp b/ggml-sycl.cpp index 4ee2eed38..ac75f8e16 100644 --- a/ggml-sycl.cpp +++ b/ggml-sycl.cpp @@ -1366,6 +1366,7 @@ namespace dpct } #else return q.memcpy(to_ptr, from_ptr, size, dep_events); + GGML_UNUSED(direction); #endif // DPCT_USM_LEVEL_NONE } @@ -1667,7 +1668,7 @@ namespace dpct using Ty = typename DataType::T2; Ty s_h; if (get_pointer_attribute(q, s) == pointer_access_attribute::device_only) - detail::dpct_memcpy(q, (void *)&s_h, (void *)s, sizeof(T), device_to_host) + detail::dpct_memcpy(q, (void *)&s_h, (const void *)s, sizeof(T), device_to_host) .wait(); else s_h = *reinterpret_cast(s); @@ -1691,6 +1692,20 @@ namespace dpct int ldb, const void *beta, void *c, int ldc) { #ifndef __INTEL_MKL__ + GGML_UNUSED(q); + GGML_UNUSED(a_trans); + GGML_UNUSED(b_trans); + GGML_UNUSED(m); + GGML_UNUSED(n); + GGML_UNUSED(k); + GGML_UNUSED(alpha); + GGML_UNUSED(a); + GGML_UNUSED(lda); + GGML_UNUSED(b); + GGML_UNUSED(ldb); + GGML_UNUSED(beta); + GGML_UNUSED(c); + GGML_UNUSED(ldc); throw std::runtime_error("The oneAPI Math Kernel Library (oneMKL) Interfaces " "Project does not support this API."); #else @@ -1830,7 +1845,7 @@ namespace dpct template T permute_sub_group_by_xor(sycl::sub_group g, T x, unsigned int mask, - int logical_sub_group_size = 32) + unsigned int logical_sub_group_size = 32) { unsigned int id = g.get_local_linear_id(); unsigned int start_index = @@ -2160,6 +2175,7 @@ namespace dpct } #else return q.memcpy(to_ptr, from_ptr, size, dep_events); + GGML_UNUSED(direction); #endif // DPCT_USM_LEVEL_NONE } @@ -3302,7 +3318,7 @@ void log_ggml_var_device(const char*name, float *src, size_t total_elements, boo std::ofstream logfile; logfile.open(filename); // printf("local buf element %d\n", total_elements); - for(int i=0; ibackend == GGML_BACKEND_GPU && device_id == g_main_device ? ne0 : row_diff; - const int compute_capability = g_device_caps[id].cc; #ifdef GGML_SYCL_F16 bool use_fp16 = true; // TODO(Yu) SYCL capability check #else @@ -12691,7 +12700,7 @@ static void ggml_sycl_set_peer_access(const int n_tokens) { continue; } - int can_access_peer; + // int can_access_peer; // SYCL_CHECK(syclDeviceCanAccessPeer(&can_access_peer, id, id_other)); // if (can_access_peer) { // if (enable_peer_access) { @@ -12716,7 +12725,6 @@ static void ggml_sycl_op_mul_mat(const ggml_tensor *src0, const int64_t ne01 = src0->ne[1]; const int64_t ne02 = src0->ne[2]; const int64_t ne03 = src0->ne[3]; - const int64_t nrows0 = ggml_nrows(src0); const int64_t ne10 = src1->ne[0]; const int64_t ne11 = src1->ne[1]; @@ -13812,13 +13820,6 @@ static void ggml_sycl_mul_mat_id(const ggml_tensor *src0, src1_row_extra.data_device[g_main_device_index] = src1_contiguous.get(); dst_row_extra.data_device[g_main_device_index] = dst_contiguous.get(); - const dpct::memcpy_direction src1_kind = - src1->backend == GGML_BACKEND_CPU ? dpct::host_to_device - : dpct::device_to_device; - const dpct::memcpy_direction dst_kind = dst->backend == GGML_BACKEND_CPU - ? dpct::device_to_host - : dpct::device_to_device; - for (int32_t row_id = 0; row_id < n_as; ++row_id) { const struct ggml_tensor * src0_row = dst->src[row_id + 2]; From 2d40085c26794e29c434480b9e06738e89e5686f Mon Sep 17 00:00:00 2001 From: Mirror Azure <54669636+MirrorAzure@users.noreply.github.com> Date: Fri, 2 Feb 2024 14:39:09 +0300 Subject: [PATCH 493/811] py : add check for '.attn.masked_bias' layers to GPT2model (#5281) --- convert-hf-to-gguf.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/convert-hf-to-gguf.py b/convert-hf-to-gguf.py index 4ebab07b3..a6ffd128b 100755 --- a/convert-hf-to-gguf.py +++ b/convert-hf-to-gguf.py @@ -1138,7 +1138,7 @@ class GPT2Model(Model): for name, data_torch in self.get_tensors(): # we don't need these - if name.endswith((".attention.masked_bias", ".attention.bias", ".attention.rotary_emb.inv_freq", ".attn.bias")): + if name.endswith((".attention.masked_bias", ".attention.bias", ".attention.rotary_emb.inv_freq", ".attn.bias", ".attn.masked_bias")): continue if name.endswith((".c_attn.weight", ".c_proj.weight", ".c_fc.weight", ".c_proj.weight")): From e437b37fd0b2b97e6c6ff1045ec7f901faa6498a Mon Sep 17 00:00:00 2001 From: Georgi Gerganov Date: Fri, 2 Feb 2024 14:23:40 +0200 Subject: [PATCH 494/811] scripts : parse wtype in server-llm.sh (#5167) * scripts : parse wtype in server-llm.sh * scripts : fix check for wfile --- scripts/server-llm.sh | 26 +++++++++++++++++++++++++- 1 file changed, 25 insertions(+), 1 deletion(-) diff --git a/scripts/server-llm.sh b/scripts/server-llm.sh index 7bf0929bb..0b83cdbbc 100644 --- a/scripts/server-llm.sh +++ b/scripts/server-llm.sh @@ -141,6 +141,28 @@ for wt in "${wtypes[@]}"; do wfiles+=("") done +# map wtype input to index +if [[ ! -z "$wtype" ]]; then + iw=-1 + is=0 + for wt in "${wtypes[@]}"; do + # uppercase + uwt=$(echo "$wt" | tr '[:lower:]' '[:upper:]') + if [[ "$uwt" == "$wtype" ]]; then + iw=$is + break + fi + is=$((is+1)) + done + + if [[ $iw -eq -1 ]]; then + printf "[-] Invalid weight type: %s\n" "$wtype" + exit 1 + fi + + wtype="$iw" +fi + # sample repos repos=( "https://huggingface.co/TheBloke/Llama-2-7B-GGUF" @@ -252,8 +274,10 @@ for file in $model_files; do printf " %2d) %s %s\n" $iw "$have" "$file" done +wfile="${wfiles[$wtype]}" + # ask for weights type until provided and available -while [[ -z "$wtype" ]]; do +while [[ -z "$wfile" ]]; do printf "\n" read -p "[+] Select weight type: " wtype wfile="${wfiles[$wtype]}" From 191221178f51b6e81122c5bda0fd79620e547d07 Mon Sep 17 00:00:00 2001 From: kalomaze <66376113+kalomaze@users.noreply.github.com> Date: Fri, 2 Feb 2024 08:15:30 -0600 Subject: [PATCH 495/811] perplexity : fix KL divergence calculations on Windows (#5273) --- examples/perplexity/perplexity.cpp | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/examples/perplexity/perplexity.cpp b/examples/perplexity/perplexity.cpp index 8d2204969..4b08145cd 100644 --- a/examples/perplexity/perplexity.cpp +++ b/examples/perplexity/perplexity.cpp @@ -457,14 +457,14 @@ static results_perplexity perplexity(llama_context * ctx, const gpt_params & par std::ofstream logits_stream; if (!params.logits_file.empty()) { - logits_stream.open(params.logits_file.c_str()); + logits_stream.open(params.logits_file.c_str(), std::ios::binary); if (!logits_stream.is_open()) { fprintf(stderr, "%s: failed to open %s for writing\n", __func__, params.logits_file.c_str()); return {}; } fprintf(stderr, "%s: saving all logits to %s\n", __func__, params.logits_file.c_str()); logits_stream.write("_logits_", 8); - logits_stream.write((const char *)&n_ctx, sizeof(n_ctx)); + logits_stream.write(reinterpret_cast(&n_ctx), sizeof(n_ctx)); } auto tim1 = std::chrono::high_resolution_clock::now(); From a305dba8ff642e57f538f42010868fe0bc5262a1 Mon Sep 17 00:00:00 2001 From: AidanBeltonS <87009434+AidanBeltonS@users.noreply.github.com> Date: Sat, 3 Feb 2024 08:11:37 +0000 Subject: [PATCH 496/811] Fix im2col with 32fp (#5286) --- ggml-sycl.cpp | 17 +++++++++++------ 1 file changed, 11 insertions(+), 6 deletions(-) diff --git a/ggml-sycl.cpp b/ggml-sycl.cpp index ac75f8e16..51445b5e7 100644 --- a/ggml-sycl.cpp +++ b/ggml-sycl.cpp @@ -8247,7 +8247,8 @@ static void clamp_f32(const float * x, float * dst, const float min, const float dst[i] = x[i] < min ? min : (x[i] > max ? max : x[i]); } -static void im2col_f32_f16(const float *x, sycl::half *dst, int offset_delta, +template +static void im2col_kernel(const float *x, T *dst, int offset_delta, int IW, int IH, int OW, int KW, int KH, int pelements, int CHW, int s0, int s1, int p0, int p1, int d0, int d1, @@ -11019,7 +11020,8 @@ static void soft_max_f32_sycl(const float *x, const float *y, float *dst, }); } -static void im2col_f32_f16_sycl(const float *x, sycl::half *dst, int IW, int IH, +template +static void im2col_sycl(const float *x, T *dst, int IW, int IH, int OW, int OH, int KW, int KH, int IC, int offset_delta, int s0, int s1, int p0, int p1, int d0, int d1, @@ -11036,7 +11038,7 @@ static void im2col_f32_f16_sycl(const float *x, sycl::half *dst, int IW, int IH, sycl::range<3>(1, 1, SYCL_IM2COL_BLOCK_SIZE), sycl::range<3>(1, 1, SYCL_IM2COL_BLOCK_SIZE)), [=](sycl::nd_item<3> item_ct1) { - im2col_f32_f16(x, dst, offset_delta, IW, IH, OW, KW, KH, + im2col_kernel(x, dst, offset_delta, IW, IH, OW, KW, KH, parallel_elements, (IC * KH * KW), s0, s1, p0, p1, d0, d1, item_ct1); }); @@ -12424,7 +12426,7 @@ inline void ggml_sycl_op_im2col(const ggml_tensor *src0, GGML_ASSERT(src0->type == GGML_TYPE_F16); GGML_ASSERT(src1->type == GGML_TYPE_F32); - GGML_ASSERT( dst->type == GGML_TYPE_F16); + GGML_ASSERT( dst->type == GGML_TYPE_F16 || dst->type == GGML_TYPE_F32); const int32_t s0 = ((const int32_t*)(dst->op_params))[0]; const int32_t s1 = ((const int32_t*)(dst->op_params))[1]; @@ -12447,8 +12449,11 @@ inline void ggml_sycl_op_im2col(const ggml_tensor *src0, const size_t delta_offset = src1->nb[is_2D ? 2 : 1] / 4; // nb is byte offset, src is type float32 - im2col_f32_f16_sycl(src1_dd, (sycl::half *)dst_dd, IW, IH, OW, OH, KW, KH, - IC, delta_offset, s0, s1, p0, p1, d0, d1, main_stream); + if (dst->type == GGML_TYPE_F16) { + im2col_sycl(src1_dd, (sycl::half *)dst_dd, IW, IH, OW, OH, KW, KH, IC, delta_offset, s0, s1, p0, p1, d0, d1, main_stream); + } else { + im2col_sycl(src1_dd, (float *)dst_dd, IW, IH, OW, OH, KW, KH, IC, delta_offset, s0, s1, p0, p1, d0, d1, main_stream); + } (void) src0; (void) src0_dd; From 6a66c5071a74a96c4f52cf1015a092acd18c3714 Mon Sep 17 00:00:00 2001 From: BADR Date: Sat, 3 Feb 2024 12:20:26 +0100 Subject: [PATCH 497/811] readme : add tenere in the ui tools list (#5284) --- README.md | 1 + 1 file changed, 1 insertion(+) diff --git a/README.md b/README.md index af1f09fa0..4a9bdf314 100644 --- a/README.md +++ b/README.md @@ -143,6 +143,7 @@ as the main playground for developing new features for the [ggml](https://github - [psugihara/FreeChat](https://github.com/psugihara/FreeChat) - [ptsochantaris/emeltal](https://github.com/ptsochantaris/emeltal) - [iohub/collama](https://github.com/iohub/coLLaMA) +- [pythops/tenere](https://github.com/pythops/tenere) --- From 1ec3332ade60aeb1494ace2211cf1a966db6d770 Mon Sep 17 00:00:00 2001 From: Jared Van Bortel Date: Sat, 3 Feb 2024 06:22:06 -0500 Subject: [PATCH 498/811] YaRN : store rope scaling type as int32_t in memory (#5285) * YaRN : store rope scaling type as int32_t in memory * llama : store mapped names as const char * --- common/common.h | 3 +-- llama.cpp | 24 ++++++++++++------------ llama.h | 2 +- 3 files changed, 14 insertions(+), 15 deletions(-) diff --git a/common/common.h b/common/common.h index 24a99d728..62de25d6a 100644 --- a/common/common.h +++ b/common/common.h @@ -75,8 +75,7 @@ struct gpt_params { float yarn_beta_fast = 32.0f; // YaRN low correction dim float yarn_beta_slow = 1.0f; // YaRN high correction dim int32_t yarn_orig_ctx = 0; // YaRN original context length - int8_t rope_scaling_type = LLAMA_ROPE_SCALING_UNSPECIFIED; // TODO: better to be int32_t for alignment - // pinging @cebtenzzre + int32_t rope_scaling_type = LLAMA_ROPE_SCALING_UNSPECIFIED; // // sampling parameters struct llama_sampling_params sparams; diff --git a/llama.cpp b/llama.cpp index 6bf7f9efb..4787a92fe 100644 --- a/llama.cpp +++ b/llama.cpp @@ -208,7 +208,7 @@ enum llm_arch { LLM_ARCH_UNKNOWN, }; -static std::map LLM_ARCH_NAMES = { +static std::map LLM_ARCH_NAMES = { { LLM_ARCH_LLAMA, "llama" }, { LLM_ARCH_FALCON, "falcon" }, { LLM_ARCH_GPT2, "gpt2" }, @@ -285,7 +285,7 @@ enum llm_kv { LLM_KV_TOKENIZER_RWKV, }; -static std::map LLM_KV_NAMES = { +static std::map LLM_KV_NAMES = { { LLM_KV_GENERAL_ARCHITECTURE, "general.architecture" }, { LLM_KV_GENERAL_QUANTIZATION_VERSION, "general.quantization_version" }, { LLM_KV_GENERAL_ALIGNMENT, "general.alignment" }, @@ -346,7 +346,7 @@ struct LLM_KV { llm_arch arch; std::string operator()(llm_kv kv) const { - return ::format(LLM_KV_NAMES[kv].c_str(), LLM_ARCH_NAMES[arch].c_str()); + return ::format(LLM_KV_NAMES[kv], LLM_ARCH_NAMES[arch]); } }; @@ -747,13 +747,13 @@ struct LLM_TN { // gguf helpers // -static std::map LLAMA_ROPE_SCALING_TYPES = { +static std::map LLAMA_ROPE_SCALING_TYPES = { { LLAMA_ROPE_SCALING_NONE, "none" }, { LLAMA_ROPE_SCALING_LINEAR, "linear" }, { LLAMA_ROPE_SCALING_YARN, "yarn" }, }; -static int8_t llama_rope_scaling_type_from_string(const std::string & name) { +static int32_t llama_rope_scaling_type_from_string(const std::string & name) { for (const auto & kv : LLAMA_ROPE_SCALING_TYPES) { if (kv.second == name) { return kv.first; @@ -1415,6 +1415,7 @@ static const size_t GiB = 1024*MiB; struct llama_hparams { bool vocab_only; + bool rope_finetuned; uint32_t n_vocab; uint32_t n_ctx_train; // context size the model was trained on uint32_t n_embd; @@ -1434,8 +1435,7 @@ struct llama_hparams { float rope_freq_base_train; float rope_freq_scale_train; uint32_t n_yarn_orig_ctx; - int8_t rope_scaling_type_train : 3; - bool rope_finetuned : 1; + int32_t rope_scaling_type_train; float f_clamp_kqv; float f_max_alibi_bias; @@ -2701,7 +2701,7 @@ struct llama_model_loader { // load LLaMA models // -static std::string llama_model_arch_name(llm_arch arch) { +static const char * llama_model_arch_name(llm_arch arch) { auto it = LLM_ARCH_NAMES.find(arch); if (it == LLM_ARCH_NAMES.end()) { return "unknown"; @@ -3310,11 +3310,11 @@ static void llm_load_print_meta(llama_model_loader & ml, llama_model & model) { const auto & hparams = model.hparams; const auto & vocab = model.vocab; - const auto rope_scaling_type = LLAMA_ROPE_SCALING_TYPES.at(hparams.rope_scaling_type_train); + const char * rope_scaling_type = LLAMA_ROPE_SCALING_TYPES.at(hparams.rope_scaling_type_train); // hparams LLAMA_LOG_INFO("%s: format = %s\n", __func__, llama_file_version_name(ml.fver)); - LLAMA_LOG_INFO("%s: arch = %s\n", __func__, LLM_ARCH_NAMES.at(model.arch).c_str()); + LLAMA_LOG_INFO("%s: arch = %s\n", __func__, LLM_ARCH_NAMES.at(model.arch)); LLAMA_LOG_INFO("%s: vocab type = %s\n", __func__, llama_model_vocab_type_name(vocab.type)); LLAMA_LOG_INFO("%s: n_vocab = %u\n", __func__, hparams.n_vocab); LLAMA_LOG_INFO("%s: n_merges = %u\n", __func__, (int) vocab.bpe_ranks.size()); @@ -3336,7 +3336,7 @@ static void llm_load_print_meta(llama_model_loader & ml, llama_model & model) { LLAMA_LOG_INFO("%s: n_ff = %u\n", __func__, hparams.n_ff); LLAMA_LOG_INFO("%s: n_expert = %u\n", __func__, hparams.n_expert); LLAMA_LOG_INFO("%s: n_expert_used = %u\n", __func__, hparams.n_expert_used); - LLAMA_LOG_INFO("%s: rope scaling = %s\n", __func__, rope_scaling_type.c_str()); + LLAMA_LOG_INFO("%s: rope scaling = %s\n", __func__, rope_scaling_type); LLAMA_LOG_INFO("%s: freq_base_train = %.1f\n", __func__, hparams.rope_freq_base_train); LLAMA_LOG_INFO("%s: freq_scale_train = %g\n", __func__, hparams.rope_freq_scale_train); LLAMA_LOG_INFO("%s: n_yarn_orig_ctx = %u\n", __func__, hparams.n_yarn_orig_ctx); @@ -10735,7 +10735,7 @@ int32_t llama_model_meta_val_str_by_index(const struct llama_model * model, int3 int32_t llama_model_desc(const struct llama_model * model, char * buf, size_t buf_size) { return snprintf(buf, buf_size, "%s %s %s", - llama_model_arch_name(model->arch).c_str(), + llama_model_arch_name(model->arch), llama_model_type_name(model->type), llama_model_ftype_name(model->ftype).c_str()); } diff --git a/llama.h b/llama.h index 9a60e9bfb..cec4158bc 100644 --- a/llama.h +++ b/llama.h @@ -213,7 +213,7 @@ extern "C" { uint32_t n_batch; // prompt processing maximum batch size uint32_t n_threads; // number of threads to use for generation uint32_t n_threads_batch; // number of threads to use for batch processing - int8_t rope_scaling_type; // RoPE scaling type, from `enum llama_rope_scaling_type` + int32_t rope_scaling_type; // RoPE scaling type, from `enum llama_rope_scaling_type` // ref: https://github.com/ggerganov/llama.cpp/pull/2054 float rope_freq_base; // RoPE base frequency, 0 = from model From 52bb63c7082c859c3f1dfc527227e6a95b299c7c Mon Sep 17 00:00:00 2001 From: Michael Klimenko Date: Sat, 3 Feb 2024 12:23:37 +0100 Subject: [PATCH 499/811] refactor : switch to emplace_back to avoid extra object (#5291) --- common/common.cpp | 8 ++--- examples/llama-bench/llama-bench.cpp | 34 +++++++++++----------- examples/main/main.cpp | 4 +-- examples/perplexity/perplexity.cpp | 8 ++--- examples/quantize-stats/quantize-stats.cpp | 4 +-- examples/quantize/quantize.cpp | 4 +-- examples/server/server.cpp | 8 ++--- tests/test-llama-grammar.cpp | 2 +- 8 files changed, 36 insertions(+), 36 deletions(-) diff --git a/common/common.cpp b/common/common.cpp index ce739b15c..3302caa20 100644 --- a/common/common.cpp +++ b/common/common.cpp @@ -515,7 +515,7 @@ bool gpt_params_parse_ex(int argc, char ** argv, gpt_params & params) { invalid_param = true; break; } - params.lora_adapter.push_back(std::make_tuple(argv[i], 1.0f)); + params.lora_adapter.emplace_back(argv[i], 1.0f); params.use_mmap = false; } else if (arg == "--lora-scaled") { if (++i >= argc) { @@ -527,7 +527,7 @@ bool gpt_params_parse_ex(int argc, char ** argv, gpt_params & params) { invalid_param = true; break; } - params.lora_adapter.push_back(std::make_tuple(lora_adapter, std::stof(argv[i]))); + params.lora_adapter.emplace_back(lora_adapter, std::stof(argv[i])); params.use_mmap = false; } else if (arg == "--lora-base") { if (++i >= argc) { @@ -664,7 +664,7 @@ bool gpt_params_parse_ex(int argc, char ** argv, gpt_params & params) { invalid_param = true; break; } - params.antiprompt.push_back(argv[i]); + params.antiprompt.emplace_back(argv[i]); } else if (arg == "-ld" || arg == "--logdir") { if (++i >= argc) { invalid_param = true; @@ -880,7 +880,7 @@ bool gpt_params_parse_ex(int argc, char ** argv, gpt_params & params) { } if (!params.kv_overrides.empty()) { - params.kv_overrides.emplace_back(llama_model_kv_override()); + params.kv_overrides.emplace_back(); params.kv_overrides.back().key[0] = 0; } diff --git a/examples/llama-bench/llama-bench.cpp b/examples/llama-bench/llama-bench.cpp index e36c061a2..ddb0ba064 100644 --- a/examples/llama-bench/llama-bench.cpp +++ b/examples/llama-bench/llama-bench.cpp @@ -948,46 +948,46 @@ struct markdown_printer : public printer { void print_header(const cmd_params & params) override { // select fields to print - fields.push_back("model"); - fields.push_back("size"); - fields.push_back("params"); - fields.push_back("backend"); + fields.emplace_back("model"); + fields.emplace_back("size"); + fields.emplace_back("params"); + fields.emplace_back("backend"); bool is_cpu_backend = test::get_backend() == "CPU" || test::get_backend() == "BLAS"; if (!is_cpu_backend) { - fields.push_back("n_gpu_layers"); + fields.emplace_back("n_gpu_layers"); } if (params.n_threads.size() > 1 || params.n_threads != cmd_params_defaults.n_threads || is_cpu_backend) { - fields.push_back("n_threads"); + fields.emplace_back("n_threads"); } if (params.n_batch.size() > 1 || params.n_batch != cmd_params_defaults.n_batch) { - fields.push_back("n_batch"); + fields.emplace_back("n_batch"); } if (params.type_k.size() > 1 || params.type_k != cmd_params_defaults.type_k) { - fields.push_back("type_k"); + fields.emplace_back("type_k"); } if (params.type_v.size() > 1 || params.type_v != cmd_params_defaults.type_v) { - fields.push_back("type_v"); + fields.emplace_back("type_v"); } if (params.main_gpu.size() > 1 || params.main_gpu != cmd_params_defaults.main_gpu) { - fields.push_back("main_gpu"); + fields.emplace_back("main_gpu"); } if (params.split_mode.size() > 1 || params.split_mode != cmd_params_defaults.split_mode) { - fields.push_back("split_mode"); + fields.emplace_back("split_mode"); } if (params.mul_mat_q.size() > 1 || params.mul_mat_q != cmd_params_defaults.mul_mat_q) { - fields.push_back("mul_mat_q"); + fields.emplace_back("mul_mat_q"); } if (params.no_kv_offload.size() > 1 || params.no_kv_offload != cmd_params_defaults.no_kv_offload) { - fields.push_back("no_kv_offload"); + fields.emplace_back("no_kv_offload"); } if (params.tensor_split.size() > 1 || params.tensor_split != cmd_params_defaults.tensor_split) { - fields.push_back("tensor_split"); + fields.emplace_back("tensor_split"); } if (params.use_mmap.size() > 1 || params.use_mmap != cmd_params_defaults.use_mmap) { - fields.push_back("use_mmap"); + fields.emplace_back("use_mmap"); } - fields.push_back("test"); - fields.push_back("t/s"); + fields.emplace_back("test"); + fields.emplace_back("t/s"); fprintf(fout, "|"); for (const auto & field : fields) { diff --git a/examples/main/main.cpp b/examples/main/main.cpp index 1c6138d23..0ed4d79f9 100644 --- a/examples/main/main.cpp +++ b/examples/main/main.cpp @@ -352,12 +352,12 @@ int main(int argc, char ** argv) { // in instruct mode, we inject a prefix and a suffix to each input by the user if (params.instruct) { params.interactive_first = true; - params.antiprompt.push_back("### Instruction:\n\n"); + params.antiprompt.emplace_back("### Instruction:\n\n"); } // similar for chatml mode else if (params.chatml) { params.interactive_first = true; - params.antiprompt.push_back("<|im_start|>user\n"); + params.antiprompt.emplace_back("<|im_start|>user\n"); } // enable interactive mode if interactive start is specified diff --git a/examples/perplexity/perplexity.cpp b/examples/perplexity/perplexity.cpp index 4b08145cd..b2c131d4c 100644 --- a/examples/perplexity/perplexity.cpp +++ b/examples/perplexity/perplexity.cpp @@ -881,7 +881,7 @@ static void hellaswag_score(llama_context * ctx, const gpt_params & params) { size_t li = hs_cur.common_prefix; for (int s = 0; s < 4; ++s) { for (size_t j = hs_cur.common_prefix; j < hs_cur.seq_tokens[s].size() - 1; j++) { - eval_pairs.push_back(std::make_pair(hs_cur.i_batch + li++, hs_cur.seq_tokens[s][j + 1])); + eval_pairs.emplace_back(hs_cur.i_batch + li++, hs_cur.seq_tokens[s][j + 1]); } ++li; } @@ -1159,13 +1159,13 @@ static void winogrande_score(llama_context * ctx, const gpt_params & params) { const int last_1st = task.seq_tokens[0].size() - n_base1 > 1 ? 1 : 0; size_t li = n_base1 - 1; for (size_t j = n_base1-1; j < task.seq_tokens[0].size()-1-last_1st; ++j) { - eval_pairs.push_back(std::make_pair(task.i_batch + li++, task.seq_tokens[0][j+1])); + eval_pairs.emplace_back(task.i_batch + li++, task.seq_tokens[0][j+1]); } const auto& n_base2 = skip_choice ? task.n_base2 : task.common_prefix; const int last_2nd = task.seq_tokens[1].size() - n_base2 > 1 ? 1 : 0; li = task.seq_tokens[0].size() - task.common_prefix + n_base2 - 1; for (size_t j = n_base2-1; j < task.seq_tokens[1].size()-1-last_2nd; ++j) { - eval_pairs.push_back(std::make_pair(task.i_batch + li++, task.seq_tokens[1][j+1])); + eval_pairs.emplace_back(task.i_batch + li++, task.seq_tokens[1][j+1]); } } compute_logprobs(batch_logits.data(), n_vocab, workers, eval_pairs, eval_results); @@ -1524,7 +1524,7 @@ static void multiple_choice_score(llama_context * ctx, const gpt_params & params size_t li = cur_task.common_prefix; for (int s = 0; s < int(cur_task.seq_tokens.size()); ++s) { for (size_t j = cur_task.common_prefix; j < cur_task.seq_tokens[s].size() - 1; j++) { - eval_pairs.push_back(std::make_pair(cur_task.i_batch + li++, cur_task.seq_tokens[s][j + 1])); + eval_pairs.emplace_back(cur_task.i_batch + li++, cur_task.seq_tokens[s][j + 1]); } ++li; } diff --git a/examples/quantize-stats/quantize-stats.cpp b/examples/quantize-stats/quantize-stats.cpp index 6d5f213dc..1d05f1391 100644 --- a/examples/quantize-stats/quantize-stats.cpp +++ b/examples/quantize-stats/quantize-stats.cpp @@ -257,13 +257,13 @@ int main(int argc, char ** argv) { invalid_param = true; break; } - params.include_layers.push_back(argv[i]); + params.include_layers.emplace_back(argv[i]); } else if (arg == "-L" || arg == "--exclude-layer") { if (++i >= argc) { invalid_param = true; break; } - params.exclude_layers.push_back(argv[i]); + params.exclude_layers.emplace_back(argv[i]); } else if (arg == "-t" || arg == "--type") { if (++i >= argc) { invalid_param = true; diff --git a/examples/quantize/quantize.cpp b/examples/quantize/quantize.cpp index a9673f0d4..85f403ffc 100644 --- a/examples/quantize/quantize.cpp +++ b/examples/quantize/quantize.cpp @@ -208,13 +208,13 @@ int main(int argc, char ** argv) { } } else if (strcmp(argv[arg_idx], "--include-weights") == 0) { if (arg_idx < argc-1) { - included_weights.push_back(argv[++arg_idx]); + included_weights.emplace_back(argv[++arg_idx]); } else { usage(argv[0]); } } else if (strcmp(argv[arg_idx], "--exclude-weights") == 0) { if (arg_idx < argc-1) { - excluded_weights.push_back(argv[++arg_idx]); + excluded_weights.emplace_back(argv[++arg_idx]); } else { usage(argv[0]); } diff --git a/examples/server/server.cpp b/examples/server/server.cpp index ea77125ea..a9f8cb369 100644 --- a/examples/server/server.cpp +++ b/examples/server/server.cpp @@ -1884,7 +1884,7 @@ static void server_params_parse(int argc, char **argv, server_params &sparams, invalid_param = true; break; } - sparams.api_keys.push_back(argv[i]); + sparams.api_keys.emplace_back(argv[i]); } else if (arg == "--api-key-file") { @@ -2160,7 +2160,7 @@ static void server_params_parse(int argc, char **argv, server_params &sparams, invalid_param = true; break; } - params.lora_adapter.push_back(std::make_tuple(argv[i], 1.0f)); + params.lora_adapter.emplace_back(argv[i], 1.0f); params.use_mmap = false; } else if (arg == "--lora-scaled") @@ -2176,7 +2176,7 @@ static void server_params_parse(int argc, char **argv, server_params &sparams, invalid_param = true; break; } - params.lora_adapter.push_back(std::make_tuple(lora_adapter, std::stof(argv[i]))); + params.lora_adapter.emplace_back(lora_adapter, std::stof(argv[i])); params.use_mmap = false; } else if (arg == "--lora-base") @@ -2318,7 +2318,7 @@ static void server_params_parse(int argc, char **argv, server_params &sparams, } } if (!params.kv_overrides.empty()) { - params.kv_overrides.emplace_back(llama_model_kv_override()); + params.kv_overrides.emplace_back(); params.kv_overrides.back().key[0] = 0; } diff --git a/tests/test-llama-grammar.cpp b/tests/test-llama-grammar.cpp index 78fc41117..16ebe753f 100644 --- a/tests/test-llama-grammar.cpp +++ b/tests/test-llama-grammar.cpp @@ -105,7 +105,7 @@ int main() for (auto rule : expected_rules) { - parsed_grammar.rules.push_back({}); + parsed_grammar.rules.emplace_back(); for (auto element : rule) { parsed_grammar.rules.back().push_back(element); From e920ed393d989ed35625ddaf182ebb52cda07fcd Mon Sep 17 00:00:00 2001 From: 0cc4m Date: Sat, 3 Feb 2024 18:15:00 +0100 Subject: [PATCH 500/811] Vulkan Intel Fixes, Optimizations and Debugging Flags (#5301) * Fix Vulkan on Intel ARC Optimize matmul for Intel ARC Add Vulkan dequant test * Add Vulkan debug and validate flags to Make and CMakeLists.txt * Enable asynchronous transfers in Vulkan backend * Fix flake8 * Disable Vulkan async backend functions for now * Also add Vulkan run tests command to Makefile and CMakeLists.txt --- CMakeLists.txt | 20 + Makefile | 12 + ggml-vulkan-shaders.hpp | 10922 +++------------------------------- ggml-vulkan.cpp | 420 +- ggml_vk_generate_shaders.py | 213 +- 5 files changed, 1257 insertions(+), 10330 deletions(-) diff --git a/CMakeLists.txt b/CMakeLists.txt index 1ee455b3a..c156c4824 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -100,6 +100,10 @@ option(LLAMA_HIPBLAS "llama: use hipBLAS" option(LLAMA_HIP_UMA "llama: use HIP unified memory architecture" OFF) option(LLAMA_CLBLAST "llama: use CLBlast" OFF) option(LLAMA_VULKAN "llama: use Vulkan" OFF) +option(LLAMA_VULKAN_CHECK_RESULTS "llama: run Vulkan op checks" OFF) +option(LLAMA_VULKAN_DEBUG "llama: enable Vulkan debug output" OFF) +option(LLAMA_VULKAN_VALIDATE "llama: enable Vulkan validation" OFF) +option(LLAMA_VULKAN_RUN_TESTS "llama: run Vulkan tests" OFF) option(LLAMA_METAL "llama: use Metal" ${LLAMA_METAL_DEFAULT}) option(LLAMA_METAL_NDEBUG "llama: disable Metal debugging" OFF) option(LLAMA_METAL_SHADER_DEBUG "llama: compile Metal with -fno-fast-math" OFF) @@ -431,6 +435,22 @@ if (LLAMA_VULKAN) add_compile_definitions(GGML_USE_VULKAN) + if (LLAMA_VULKAN_CHECK_RESULTS) + target_compile_definitions(ggml-vulkan PRIVATE GGML_VULKAN_CHECK_RESULTS) + endif() + + if (LLAMA_VULKAN_DEBUG) + target_compile_definitions(ggml-vulkan PRIVATE GGML_VULKAN_DEBUG) + endif() + + if (LLAMA_VULKAN_VALIDATE) + target_compile_definitions(ggml-vulkan PRIVATE GGML_VULKAN_VALIDATE) + endif() + + if (LLAMA_VULKAN_RUN_TESTS) + target_compile_definitions(ggml-vulkan PRIVATE GGML_VULKAN_RUN_TESTS) + endif() + set(LLAMA_EXTRA_LIBS ${LLAMA_EXTRA_LIBS} ggml-vulkan) else() message(WARNING "Vulkan not found") diff --git a/Makefile b/Makefile index bf9e085de..a55d15888 100644 --- a/Makefile +++ b/Makefile @@ -457,6 +457,18 @@ ifdef LLAMA_VULKAN_CHECK_RESULTS MK_CPPFLAGS += -DGGML_VULKAN_CHECK_RESULTS endif +ifdef LLAMA_VULKAN_DEBUG + MK_CPPFLAGS += -DGGML_VULKAN_DEBUG +endif + +ifdef LLAMA_VULKAN_VALIDATE + MK_CPPFLAGS += -DGGML_VULKAN_VALIDATE +endif + +ifdef LLAMA_VULKAN_RUN_TESTS + MK_CPPFLAGS += -DGGML_VULKAN_RUN_TESTS +endif + ggml-vulkan.o: ggml-vulkan.cpp ggml-vulkan.h $(CXX) $(CXXFLAGS) -c $< -o $@ endif # LLAMA_VULKAN diff --git a/ggml-vulkan-shaders.hpp b/ggml-vulkan-shaders.hpp index 195410c02..e5e7a8414 100644 --- a/ggml-vulkan-shaders.hpp +++ b/ggml-vulkan-shaders.hpp @@ -890,156 +890,6 @@ const uint64_t cpy_f32_f32_len = 2472; unsigned char dequant_f16_data[] = { 0x03,0x02,0x23,0x07,0x00,0x05,0x01,0x00,0x0b,0x00,0x0d,0x00, -0x81,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x11,0x00,0x02,0x00, -0x01,0x00,0x00,0x00,0x11,0x00,0x02,0x00,0x09,0x00,0x00,0x00, -0x11,0x00,0x02,0x00,0x51,0x11,0x00,0x00,0x0b,0x00,0x06,0x00, -0x01,0x00,0x00,0x00,0x47,0x4c,0x53,0x4c,0x2e,0x73,0x74,0x64, -0x2e,0x34,0x35,0x30,0x00,0x00,0x00,0x00,0x0e,0x00,0x03,0x00, -0x00,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x0f,0x00,0x09,0x00, -0x05,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x6d,0x61,0x69,0x6e, -0x00,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x16,0x00,0x00,0x00, -0x4f,0x00,0x00,0x00,0x5d,0x00,0x00,0x00,0x10,0x00,0x06,0x00, -0x04,0x00,0x00,0x00,0x11,0x00,0x00,0x00,0x00,0x01,0x00,0x00, -0x01,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x0c,0x00,0x00,0x00,0x0b,0x00,0x00,0x00,0x1c,0x00,0x00,0x00, -0x48,0x00,0x05,0x00,0x14,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0x14,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x04,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x14,0x00,0x00,0x00, -0x02,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x08,0x00,0x00,0x00, -0x48,0x00,0x05,0x00,0x14,0x00,0x00,0x00,0x03,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x47,0x00,0x03,0x00, -0x14,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x4c,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x02,0x00,0x00,0x00, -0x48,0x00,0x04,0x00,0x4d,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x18,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x4d,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x47,0x00,0x03,0x00,0x4d,0x00,0x00,0x00,0x02,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x4f,0x00,0x00,0x00,0x22,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x4f,0x00,0x00,0x00, -0x21,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x5a,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x02,0x00,0x00,0x00, -0x48,0x00,0x04,0x00,0x5b,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x19,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x5b,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x47,0x00,0x03,0x00,0x5b,0x00,0x00,0x00,0x02,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x5d,0x00,0x00,0x00,0x22,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x5d,0x00,0x00,0x00, -0x21,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x7e,0x00,0x00,0x00,0x0b,0x00,0x00,0x00,0x19,0x00,0x00,0x00, -0x13,0x00,0x02,0x00,0x02,0x00,0x00,0x00,0x21,0x00,0x03,0x00, -0x03,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x15,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x20,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x15,0x00,0x04,0x00,0x09,0x00,0x00,0x00,0x20,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x17,0x00,0x04,0x00,0x0a,0x00,0x00,0x00, -0x09,0x00,0x00,0x00,0x03,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0x0b,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x0a,0x00,0x00,0x00, -0x3b,0x00,0x04,0x00,0x0b,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x09,0x00,0x00,0x00, -0x0d,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0x0e,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x09,0x00,0x00,0x00, -0x1e,0x00,0x06,0x00,0x14,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x20,0x00,0x04,0x00,0x15,0x00,0x00,0x00,0x09,0x00,0x00,0x00, -0x14,0x00,0x00,0x00,0x3b,0x00,0x04,0x00,0x15,0x00,0x00,0x00, -0x16,0x00,0x00,0x00,0x09,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x17,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x20,0x00,0x04,0x00,0x18,0x00,0x00,0x00,0x09,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x14,0x00,0x02,0x00,0x23,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x2d,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x36,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x16,0x00,0x03,0x00, -0x48,0x00,0x00,0x00,0x10,0x00,0x00,0x00,0x1d,0x00,0x03,0x00, -0x4c,0x00,0x00,0x00,0x48,0x00,0x00,0x00,0x1e,0x00,0x03,0x00, -0x4d,0x00,0x00,0x00,0x4c,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0x4e,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x4d,0x00,0x00,0x00, -0x3b,0x00,0x04,0x00,0x4e,0x00,0x00,0x00,0x4f,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x52,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x48,0x00,0x00,0x00,0x1d,0x00,0x03,0x00, -0x5a,0x00,0x00,0x00,0x48,0x00,0x00,0x00,0x1e,0x00,0x03,0x00, -0x5b,0x00,0x00,0x00,0x5a,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0x5c,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x5b,0x00,0x00,0x00, -0x3b,0x00,0x04,0x00,0x5c,0x00,0x00,0x00,0x5d,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x5f,0x00,0x00,0x00,0x03,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x09,0x00,0x00,0x00,0x77,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x09,0x00,0x00,0x00,0x7d,0x00,0x00,0x00, -0x00,0x01,0x00,0x00,0x2c,0x00,0x06,0x00,0x0a,0x00,0x00,0x00, -0x7e,0x00,0x00,0x00,0x7d,0x00,0x00,0x00,0x77,0x00,0x00,0x00, -0x77,0x00,0x00,0x00,0x36,0x00,0x05,0x00,0x02,0x00,0x00,0x00, -0x04,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x03,0x00,0x00,0x00, -0xf8,0x00,0x02,0x00,0x05,0x00,0x00,0x00,0xf7,0x00,0x03,0x00, -0x7f,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xfb,0x00,0x03,0x00, -0x0d,0x00,0x00,0x00,0x80,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, -0x80,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x0e,0x00,0x00,0x00, -0x0f,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x0d,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x09,0x00,0x00,0x00,0x10,0x00,0x00,0x00, -0x0f,0x00,0x00,0x00,0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x11,0x00,0x00,0x00,0x10,0x00,0x00,0x00,0x41,0x00,0x05,0x00, -0x18,0x00,0x00,0x00,0x19,0x00,0x00,0x00,0x16,0x00,0x00,0x00, -0x17,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x1a,0x00,0x00,0x00,0x19,0x00,0x00,0x00,0x87,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x1b,0x00,0x00,0x00,0x1a,0x00,0x00,0x00, -0x17,0x00,0x00,0x00,0x8b,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x1c,0x00,0x00,0x00,0x11,0x00,0x00,0x00,0x1b,0x00,0x00,0x00, -0x87,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x22,0x00,0x00,0x00, -0x11,0x00,0x00,0x00,0x1b,0x00,0x00,0x00,0xaf,0x00,0x05,0x00, -0x23,0x00,0x00,0x00,0x28,0x00,0x00,0x00,0x1c,0x00,0x00,0x00, -0x1a,0x00,0x00,0x00,0xa8,0x00,0x04,0x00,0x23,0x00,0x00,0x00, -0x29,0x00,0x00,0x00,0x28,0x00,0x00,0x00,0xf7,0x00,0x03,0x00, -0x2b,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, -0x29,0x00,0x00,0x00,0x2a,0x00,0x00,0x00,0x2b,0x00,0x00,0x00, -0xf8,0x00,0x02,0x00,0x2a,0x00,0x00,0x00,0x41,0x00,0x05,0x00, -0x18,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0x16,0x00,0x00,0x00, -0x2d,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x2f,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0xaf,0x00,0x05,0x00, -0x23,0x00,0x00,0x00,0x30,0x00,0x00,0x00,0x22,0x00,0x00,0x00, -0x2f,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0x2b,0x00,0x00,0x00, -0xf8,0x00,0x02,0x00,0x2b,0x00,0x00,0x00,0xf5,0x00,0x07,0x00, -0x23,0x00,0x00,0x00,0x31,0x00,0x00,0x00,0x28,0x00,0x00,0x00, -0x80,0x00,0x00,0x00,0x30,0x00,0x00,0x00,0x2a,0x00,0x00,0x00, -0xf7,0x00,0x03,0x00,0x33,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0xfa,0x00,0x04,0x00,0x31,0x00,0x00,0x00,0x32,0x00,0x00,0x00, -0x33,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0x32,0x00,0x00,0x00, -0xf9,0x00,0x02,0x00,0x7f,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, -0x33,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x18,0x00,0x00,0x00, -0x37,0x00,0x00,0x00,0x16,0x00,0x00,0x00,0x36,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x38,0x00,0x00,0x00, -0x37,0x00,0x00,0x00,0x87,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x39,0x00,0x00,0x00,0x38,0x00,0x00,0x00,0x17,0x00,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x3d,0x00,0x00,0x00, -0x22,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x3f,0x00,0x00,0x00,0x3d,0x00,0x00,0x00, -0x1c,0x00,0x00,0x00,0x41,0x00,0x06,0x00,0x52,0x00,0x00,0x00, -0x53,0x00,0x00,0x00,0x4f,0x00,0x00,0x00,0x2d,0x00,0x00,0x00, -0x3f,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x48,0x00,0x00,0x00, -0x54,0x00,0x00,0x00,0x53,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x56,0x00,0x00,0x00,0x3f,0x00,0x00,0x00, -0x17,0x00,0x00,0x00,0x41,0x00,0x06,0x00,0x52,0x00,0x00,0x00, -0x57,0x00,0x00,0x00,0x4f,0x00,0x00,0x00,0x2d,0x00,0x00,0x00, -0x56,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x48,0x00,0x00,0x00, -0x58,0x00,0x00,0x00,0x57,0x00,0x00,0x00,0x41,0x00,0x05,0x00, -0x18,0x00,0x00,0x00,0x60,0x00,0x00,0x00,0x16,0x00,0x00,0x00, -0x5f,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x61,0x00,0x00,0x00,0x60,0x00,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x62,0x00,0x00,0x00,0x22,0x00,0x00,0x00, -0x61,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x65,0x00,0x00,0x00,0x62,0x00,0x00,0x00,0x1c,0x00,0x00,0x00, -0x41,0x00,0x06,0x00,0x52,0x00,0x00,0x00,0x6c,0x00,0x00,0x00, -0x5d,0x00,0x00,0x00,0x2d,0x00,0x00,0x00,0x65,0x00,0x00,0x00, -0x3e,0x00,0x03,0x00,0x6c,0x00,0x00,0x00,0x54,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x76,0x00,0x00,0x00, -0x65,0x00,0x00,0x00,0x17,0x00,0x00,0x00,0x41,0x00,0x06,0x00, -0x52,0x00,0x00,0x00,0x7a,0x00,0x00,0x00,0x5d,0x00,0x00,0x00, -0x2d,0x00,0x00,0x00,0x76,0x00,0x00,0x00,0x3e,0x00,0x03,0x00, -0x7a,0x00,0x00,0x00,0x58,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, -0x7f,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0x7f,0x00,0x00,0x00, -0xfd,0x00,0x01,0x00,0x38,0x00,0x01,0x00, -}; -const uint64_t dequant_f16_len = 1748; - -unsigned char dequant_f16_fp32_data[] = { -0x03,0x02,0x23,0x07,0x00,0x05,0x01,0x00,0x0b,0x00,0x0d,0x00, 0x86,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x11,0x00,0x02,0x00, 0x01,0x00,0x00,0x00,0x11,0x00,0x02,0x00,0x51,0x11,0x00,0x00, 0x0b,0x00,0x06,0x00,0x01,0x00,0x00,0x00,0x47,0x4c,0x53,0x4c, @@ -1192,344 +1042,10 @@ unsigned char dequant_f16_fp32_data[] = { 0xf8,0x00,0x02,0x00,0x84,0x00,0x00,0x00,0xfd,0x00,0x01,0x00, 0x38,0x00,0x01,0x00, }; -const uint64_t dequant_f16_fp32_len = 1816; +const uint64_t dequant_f16_len = 1816; unsigned char dequant_q2_K_data[] = { 0x03,0x02,0x23,0x07,0x00,0x05,0x01,0x00,0x0b,0x00,0x0d,0x00, -0x0c,0x01,0x00,0x00,0x00,0x00,0x00,0x00,0x11,0x00,0x02,0x00, -0x01,0x00,0x00,0x00,0x11,0x00,0x02,0x00,0x09,0x00,0x00,0x00, -0x11,0x00,0x02,0x00,0x27,0x00,0x00,0x00,0x11,0x00,0x02,0x00, -0x51,0x11,0x00,0x00,0x11,0x00,0x02,0x00,0x60,0x11,0x00,0x00, -0x0b,0x00,0x06,0x00,0x01,0x00,0x00,0x00,0x47,0x4c,0x53,0x4c, -0x2e,0x73,0x74,0x64,0x2e,0x34,0x35,0x30,0x00,0x00,0x00,0x00, -0x0e,0x00,0x03,0x00,0x00,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x0f,0x00,0x0a,0x00,0x05,0x00,0x00,0x00,0x04,0x00,0x00,0x00, -0x6d,0x61,0x69,0x6e,0x00,0x00,0x00,0x00,0x17,0x00,0x00,0x00, -0x25,0x00,0x00,0x00,0x33,0x00,0x00,0x00,0x63,0x00,0x00,0x00, -0x7b,0x00,0x00,0x00,0x10,0x00,0x06,0x00,0x04,0x00,0x00,0x00, -0x11,0x00,0x00,0x00,0x40,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x17,0x00,0x00,0x00, -0x0b,0x00,0x00,0x00,0x1a,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x23,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x04,0x00,0x00,0x00, -0x48,0x00,0x05,0x00,0x23,0x00,0x00,0x00,0x02,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x08,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0x23,0x00,0x00,0x00,0x03,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x47,0x00,0x03,0x00,0x23,0x00,0x00,0x00, -0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x33,0x00,0x00,0x00, -0x0b,0x00,0x00,0x00,0x1b,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x5a,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x5c,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x5f,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x48,0x00,0x05,0x00,0x5f,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x10,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0x5f,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x50,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x60,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x54,0x00,0x00,0x00,0x48,0x00,0x04,0x00, -0x61,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x18,0x00,0x00,0x00, -0x48,0x00,0x05,0x00,0x61,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x03,0x00, -0x61,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x63,0x00,0x00,0x00,0x22,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x63,0x00,0x00,0x00,0x21,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x78,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x48,0x00,0x04,0x00, -0x79,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x19,0x00,0x00,0x00, -0x48,0x00,0x05,0x00,0x79,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x03,0x00, -0x79,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x7b,0x00,0x00,0x00,0x22,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x7b,0x00,0x00,0x00,0x21,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0xfc,0x00,0x00,0x00, -0x0b,0x00,0x00,0x00,0x19,0x00,0x00,0x00,0x13,0x00,0x02,0x00, -0x02,0x00,0x00,0x00,0x21,0x00,0x03,0x00,0x03,0x00,0x00,0x00, -0x02,0x00,0x00,0x00,0x15,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x20,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x09,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x10,0x00,0x00,0x00, -0x00,0x01,0x00,0x00,0x14,0x00,0x02,0x00,0x11,0x00,0x00,0x00, -0x15,0x00,0x04,0x00,0x14,0x00,0x00,0x00,0x20,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x17,0x00,0x04,0x00,0x15,0x00,0x00,0x00, -0x14,0x00,0x00,0x00,0x03,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0x16,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x15,0x00,0x00,0x00, -0x3b,0x00,0x04,0x00,0x16,0x00,0x00,0x00,0x17,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x14,0x00,0x00,0x00, -0x18,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0x19,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x14,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x14,0x00,0x00,0x00,0x1c,0x00,0x00,0x00, -0x00,0x01,0x00,0x00,0x1e,0x00,0x06,0x00,0x23,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x24,0x00,0x00,0x00, -0x09,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x3b,0x00,0x04,0x00, -0x24,0x00,0x00,0x00,0x25,0x00,0x00,0x00,0x09,0x00,0x00,0x00, -0x20,0x00,0x04,0x00,0x26,0x00,0x00,0x00,0x09,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x29,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x3b,0x00,0x04,0x00, -0x16,0x00,0x00,0x00,0x33,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x39,0x00,0x00,0x00, -0x20,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x41,0x00,0x00,0x00,0x08,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x45,0x00,0x00,0x00,0x10,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x4b,0x00,0x00,0x00, -0x80,0x00,0x00,0x00,0x15,0x00,0x04,0x00,0x56,0x00,0x00,0x00, -0x08,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x14,0x00,0x00,0x00,0x59,0x00,0x00,0x00,0x10,0x00,0x00,0x00, -0x1c,0x00,0x04,0x00,0x5a,0x00,0x00,0x00,0x56,0x00,0x00,0x00, -0x59,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x14,0x00,0x00,0x00, -0x5b,0x00,0x00,0x00,0x40,0x00,0x00,0x00,0x1c,0x00,0x04,0x00, -0x5c,0x00,0x00,0x00,0x56,0x00,0x00,0x00,0x5b,0x00,0x00,0x00, -0x16,0x00,0x03,0x00,0x5d,0x00,0x00,0x00,0x10,0x00,0x00,0x00, -0x17,0x00,0x04,0x00,0x5e,0x00,0x00,0x00,0x5d,0x00,0x00,0x00, -0x02,0x00,0x00,0x00,0x1e,0x00,0x05,0x00,0x5f,0x00,0x00,0x00, -0x5a,0x00,0x00,0x00,0x5c,0x00,0x00,0x00,0x5e,0x00,0x00,0x00, -0x1d,0x00,0x03,0x00,0x60,0x00,0x00,0x00,0x5f,0x00,0x00,0x00, -0x1e,0x00,0x03,0x00,0x61,0x00,0x00,0x00,0x60,0x00,0x00,0x00, -0x20,0x00,0x04,0x00,0x62,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0x61,0x00,0x00,0x00,0x3b,0x00,0x04,0x00,0x62,0x00,0x00,0x00, -0x63,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0x69,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x56,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x6f,0x00,0x00,0x00, -0x02,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x70,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x5d,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x14,0x00,0x00,0x00,0x75,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x1d,0x00,0x03,0x00,0x78,0x00,0x00,0x00,0x5d,0x00,0x00,0x00, -0x1e,0x00,0x03,0x00,0x79,0x00,0x00,0x00,0x78,0x00,0x00,0x00, -0x20,0x00,0x04,0x00,0x7a,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0x79,0x00,0x00,0x00,0x3b,0x00,0x04,0x00,0x7a,0x00,0x00,0x00, -0x7b,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x86,0x00,0x00,0x00,0x0f,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x8c,0x00,0x00,0x00, -0x03,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x97,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0xbc,0x00,0x00,0x00,0x40,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0xdb,0x00,0x00,0x00, -0x60,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0xe0,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x2c,0x00,0x06,0x00, -0x15,0x00,0x00,0x00,0xfc,0x00,0x00,0x00,0x5b,0x00,0x00,0x00, -0x75,0x00,0x00,0x00,0x75,0x00,0x00,0x00,0x2a,0x00,0x03,0x00, -0x11,0x00,0x00,0x00,0xff,0x00,0x00,0x00,0x29,0x00,0x03,0x00, -0x11,0x00,0x00,0x00,0x02,0x01,0x00,0x00,0x36,0x00,0x05,0x00, -0x02,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x03,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0x05,0x00,0x00,0x00, -0xf7,0x00,0x03,0x00,0xfd,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0xfb,0x00,0x03,0x00,0x18,0x00,0x00,0x00,0xfe,0x00,0x00,0x00, -0xf8,0x00,0x02,0x00,0xfe,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, -0x0a,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0x0a,0x00,0x00,0x00, -0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0x05,0x01,0x00,0x00, -0x09,0x00,0x00,0x00,0xfe,0x00,0x00,0x00,0xfb,0x00,0x00,0x00, -0x0d,0x00,0x00,0x00,0xb1,0x00,0x05,0x00,0x11,0x00,0x00,0x00, -0x12,0x00,0x00,0x00,0x05,0x01,0x00,0x00,0x10,0x00,0x00,0x00, -0xf6,0x00,0x04,0x00,0x0c,0x00,0x00,0x00,0x0d,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0x12,0x00,0x00,0x00, -0x0b,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, -0x0b,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x19,0x00,0x00,0x00, -0x1a,0x00,0x00,0x00,0x17,0x00,0x00,0x00,0x18,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x14,0x00,0x00,0x00,0x1b,0x00,0x00,0x00, -0x1a,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x14,0x00,0x00,0x00, -0x1d,0x00,0x00,0x00,0x1b,0x00,0x00,0x00,0x1c,0x00,0x00,0x00, -0x7c,0x00,0x04,0x00,0x14,0x00,0x00,0x00,0x1f,0x00,0x00,0x00, -0x05,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x14,0x00,0x00,0x00, -0x20,0x00,0x00,0x00,0x1d,0x00,0x00,0x00,0x1f,0x00,0x00,0x00, -0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x21,0x00,0x00,0x00, -0x20,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x26,0x00,0x00,0x00, -0x27,0x00,0x00,0x00,0x25,0x00,0x00,0x00,0x09,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x28,0x00,0x00,0x00, -0x27,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x26,0x00,0x00,0x00, -0x2a,0x00,0x00,0x00,0x25,0x00,0x00,0x00,0x29,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x2b,0x00,0x00,0x00, -0x2a,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x2c,0x00,0x00,0x00,0x28,0x00,0x00,0x00,0x2b,0x00,0x00,0x00, -0x87,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x2d,0x00,0x00,0x00, -0x2c,0x00,0x00,0x00,0x10,0x00,0x00,0x00,0xaf,0x00,0x05,0x00, -0x11,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0x21,0x00,0x00,0x00, -0x2d,0x00,0x00,0x00,0xf7,0x00,0x03,0x00,0x30,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0x2e,0x00,0x00,0x00, -0x2f,0x00,0x00,0x00,0x30,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, -0x2f,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0x0c,0x00,0x00,0x00, -0xf8,0x00,0x02,0x00,0x30,0x00,0x00,0x00,0x41,0x00,0x05,0x00, -0x19,0x00,0x00,0x00,0x34,0x00,0x00,0x00,0x33,0x00,0x00,0x00, -0x18,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x14,0x00,0x00,0x00, -0x35,0x00,0x00,0x00,0x34,0x00,0x00,0x00,0x7c,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x36,0x00,0x00,0x00,0x35,0x00,0x00,0x00, -0x87,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x3a,0x00,0x00,0x00, -0x36,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x3e,0x00,0x00,0x00,0x39,0x00,0x00,0x00, -0x3a,0x00,0x00,0x00,0x82,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x3f,0x00,0x00,0x00,0x36,0x00,0x00,0x00,0x3e,0x00,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x43,0x00,0x00,0x00, -0x41,0x00,0x00,0x00,0x3a,0x00,0x00,0x00,0x87,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x46,0x00,0x00,0x00,0x3f,0x00,0x00,0x00, -0x45,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x47,0x00,0x00,0x00,0x43,0x00,0x00,0x00,0x46,0x00,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x4a,0x00,0x00,0x00, -0x21,0x00,0x00,0x00,0x10,0x00,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x4d,0x00,0x00,0x00,0x4b,0x00,0x00,0x00, -0x3a,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x4e,0x00,0x00,0x00,0x4a,0x00,0x00,0x00,0x4d,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x50,0x00,0x00,0x00, -0x4e,0x00,0x00,0x00,0x3f,0x00,0x00,0x00,0x41,0x00,0x08,0x00, -0x69,0x00,0x00,0x00,0x6a,0x00,0x00,0x00,0x63,0x00,0x00,0x00, -0x09,0x00,0x00,0x00,0x21,0x00,0x00,0x00,0x29,0x00,0x00,0x00, -0x36,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x56,0x00,0x00,0x00, -0x6b,0x00,0x00,0x00,0x6a,0x00,0x00,0x00,0x41,0x00,0x08,0x00, -0x70,0x00,0x00,0x00,0x71,0x00,0x00,0x00,0x63,0x00,0x00,0x00, -0x09,0x00,0x00,0x00,0x21,0x00,0x00,0x00,0x6f,0x00,0x00,0x00, -0x18,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x5d,0x00,0x00,0x00, -0x72,0x00,0x00,0x00,0x71,0x00,0x00,0x00,0x41,0x00,0x08,0x00, -0x70,0x00,0x00,0x00,0x76,0x00,0x00,0x00,0x63,0x00,0x00,0x00, -0x09,0x00,0x00,0x00,0x21,0x00,0x00,0x00,0x6f,0x00,0x00,0x00, -0x75,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x5d,0x00,0x00,0x00, -0x77,0x00,0x00,0x00,0x76,0x00,0x00,0x00,0x41,0x00,0x08,0x00, -0x69,0x00,0x00,0x00,0x82,0x00,0x00,0x00,0x63,0x00,0x00,0x00, -0x09,0x00,0x00,0x00,0x21,0x00,0x00,0x00,0x09,0x00,0x00,0x00, -0x47,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x56,0x00,0x00,0x00, -0x83,0x00,0x00,0x00,0x82,0x00,0x00,0x00,0x71,0x00,0x04,0x00, -0x14,0x00,0x00,0x00,0x84,0x00,0x00,0x00,0x83,0x00,0x00,0x00, -0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x85,0x00,0x00,0x00, -0x84,0x00,0x00,0x00,0xc7,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x87,0x00,0x00,0x00,0x85,0x00,0x00,0x00,0x86,0x00,0x00,0x00, -0xc2,0x00,0x05,0x00,0x56,0x00,0x00,0x00,0x89,0x00,0x00,0x00, -0x6b,0x00,0x00,0x00,0x09,0x00,0x00,0x00,0x71,0x00,0x04,0x00, -0x14,0x00,0x00,0x00,0x8a,0x00,0x00,0x00,0x89,0x00,0x00,0x00, -0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x8b,0x00,0x00,0x00, -0x8a,0x00,0x00,0x00,0xc7,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x8d,0x00,0x00,0x00,0x8b,0x00,0x00,0x00,0x8c,0x00,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x8e,0x00,0x00,0x00, -0x87,0x00,0x00,0x00,0x8d,0x00,0x00,0x00,0x6f,0x00,0x04,0x00, -0x5d,0x00,0x00,0x00,0x8f,0x00,0x00,0x00,0x8e,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x56,0x00,0x00,0x00,0x96,0x00,0x00,0x00, -0x82,0x00,0x00,0x00,0xc2,0x00,0x05,0x00,0x56,0x00,0x00,0x00, -0x98,0x00,0x00,0x00,0x96,0x00,0x00,0x00,0x97,0x00,0x00,0x00, -0x70,0x00,0x04,0x00,0x5d,0x00,0x00,0x00,0x99,0x00,0x00,0x00, -0x98,0x00,0x00,0x00,0x85,0x00,0x05,0x00,0x5d,0x00,0x00,0x00, -0x9a,0x00,0x00,0x00,0x77,0x00,0x00,0x00,0x99,0x00,0x00,0x00, -0x7f,0x00,0x04,0x00,0x5d,0x00,0x00,0x00,0x08,0x01,0x00,0x00, -0x9a,0x00,0x00,0x00,0x0c,0x00,0x08,0x00,0x5d,0x00,0x00,0x00, -0x9b,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x32,0x00,0x00,0x00, -0x72,0x00,0x00,0x00,0x8f,0x00,0x00,0x00,0x08,0x01,0x00,0x00, -0x41,0x00,0x06,0x00,0x70,0x00,0x00,0x00,0x9c,0x00,0x00,0x00, -0x7b,0x00,0x00,0x00,0x09,0x00,0x00,0x00,0x50,0x00,0x00,0x00, -0x3e,0x00,0x03,0x00,0x9c,0x00,0x00,0x00,0x9b,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x9e,0x00,0x00,0x00, -0x50,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xa2,0x00,0x00,0x00,0x47,0x00,0x00,0x00, -0x6f,0x00,0x00,0x00,0x41,0x00,0x08,0x00,0x69,0x00,0x00,0x00, -0xa3,0x00,0x00,0x00,0x63,0x00,0x00,0x00,0x09,0x00,0x00,0x00, -0x21,0x00,0x00,0x00,0x09,0x00,0x00,0x00,0xa2,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x56,0x00,0x00,0x00,0xa4,0x00,0x00,0x00, -0xa3,0x00,0x00,0x00,0x71,0x00,0x04,0x00,0x14,0x00,0x00,0x00, -0xa5,0x00,0x00,0x00,0xa4,0x00,0x00,0x00,0x7c,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0xa6,0x00,0x00,0x00,0xa5,0x00,0x00,0x00, -0xc7,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xa7,0x00,0x00,0x00, -0xa6,0x00,0x00,0x00,0x86,0x00,0x00,0x00,0xc2,0x00,0x05,0x00, -0x56,0x00,0x00,0x00,0xa9,0x00,0x00,0x00,0x6b,0x00,0x00,0x00, -0x6f,0x00,0x00,0x00,0x71,0x00,0x04,0x00,0x14,0x00,0x00,0x00, -0xaa,0x00,0x00,0x00,0xa9,0x00,0x00,0x00,0x7c,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0xab,0x00,0x00,0x00,0xaa,0x00,0x00,0x00, -0xc7,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xac,0x00,0x00,0x00, -0xab,0x00,0x00,0x00,0x8c,0x00,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xad,0x00,0x00,0x00,0xa7,0x00,0x00,0x00, -0xac,0x00,0x00,0x00,0x6f,0x00,0x04,0x00,0x5d,0x00,0x00,0x00, -0xae,0x00,0x00,0x00,0xad,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x56,0x00,0x00,0x00,0xb5,0x00,0x00,0x00,0xa3,0x00,0x00,0x00, -0xc2,0x00,0x05,0x00,0x56,0x00,0x00,0x00,0xb6,0x00,0x00,0x00, -0xb5,0x00,0x00,0x00,0x97,0x00,0x00,0x00,0x70,0x00,0x04,0x00, -0x5d,0x00,0x00,0x00,0xb7,0x00,0x00,0x00,0xb6,0x00,0x00,0x00, -0x85,0x00,0x05,0x00,0x5d,0x00,0x00,0x00,0xb8,0x00,0x00,0x00, -0x77,0x00,0x00,0x00,0xb7,0x00,0x00,0x00,0x7f,0x00,0x04,0x00, -0x5d,0x00,0x00,0x00,0x09,0x01,0x00,0x00,0xb8,0x00,0x00,0x00, -0x0c,0x00,0x08,0x00,0x5d,0x00,0x00,0x00,0xb9,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x32,0x00,0x00,0x00,0x72,0x00,0x00,0x00, -0xae,0x00,0x00,0x00,0x09,0x01,0x00,0x00,0x41,0x00,0x06,0x00, -0x70,0x00,0x00,0x00,0xba,0x00,0x00,0x00,0x7b,0x00,0x00,0x00, -0x09,0x00,0x00,0x00,0x9e,0x00,0x00,0x00,0x3e,0x00,0x03,0x00, -0xba,0x00,0x00,0x00,0xb9,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xbd,0x00,0x00,0x00,0x50,0x00,0x00,0x00, -0xbc,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xc1,0x00,0x00,0x00,0x47,0x00,0x00,0x00,0x97,0x00,0x00,0x00, -0x41,0x00,0x08,0x00,0x69,0x00,0x00,0x00,0xc2,0x00,0x00,0x00, -0x63,0x00,0x00,0x00,0x09,0x00,0x00,0x00,0x21,0x00,0x00,0x00, -0x09,0x00,0x00,0x00,0xc1,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x56,0x00,0x00,0x00,0xc3,0x00,0x00,0x00,0xc2,0x00,0x00,0x00, -0x71,0x00,0x04,0x00,0x14,0x00,0x00,0x00,0xc4,0x00,0x00,0x00, -0xc3,0x00,0x00,0x00,0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0xc5,0x00,0x00,0x00,0xc4,0x00,0x00,0x00,0xc7,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xc6,0x00,0x00,0x00,0xc5,0x00,0x00,0x00, -0x86,0x00,0x00,0x00,0xc2,0x00,0x05,0x00,0x56,0x00,0x00,0x00, -0xc8,0x00,0x00,0x00,0x6b,0x00,0x00,0x00,0x97,0x00,0x00,0x00, -0x71,0x00,0x04,0x00,0x14,0x00,0x00,0x00,0xc9,0x00,0x00,0x00, -0xc8,0x00,0x00,0x00,0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0xca,0x00,0x00,0x00,0xc9,0x00,0x00,0x00,0xc7,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xcb,0x00,0x00,0x00,0xca,0x00,0x00,0x00, -0x8c,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xcc,0x00,0x00,0x00,0xc6,0x00,0x00,0x00,0xcb,0x00,0x00,0x00, -0x6f,0x00,0x04,0x00,0x5d,0x00,0x00,0x00,0xcd,0x00,0x00,0x00, -0xcc,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x56,0x00,0x00,0x00, -0xd4,0x00,0x00,0x00,0xc2,0x00,0x00,0x00,0xc2,0x00,0x05,0x00, -0x56,0x00,0x00,0x00,0xd5,0x00,0x00,0x00,0xd4,0x00,0x00,0x00, -0x97,0x00,0x00,0x00,0x70,0x00,0x04,0x00,0x5d,0x00,0x00,0x00, -0xd6,0x00,0x00,0x00,0xd5,0x00,0x00,0x00,0x85,0x00,0x05,0x00, -0x5d,0x00,0x00,0x00,0xd7,0x00,0x00,0x00,0x77,0x00,0x00,0x00, -0xd6,0x00,0x00,0x00,0x7f,0x00,0x04,0x00,0x5d,0x00,0x00,0x00, -0x0a,0x01,0x00,0x00,0xd7,0x00,0x00,0x00,0x0c,0x00,0x08,0x00, -0x5d,0x00,0x00,0x00,0xd8,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x32,0x00,0x00,0x00,0x72,0x00,0x00,0x00,0xcd,0x00,0x00,0x00, -0x0a,0x01,0x00,0x00,0x41,0x00,0x06,0x00,0x70,0x00,0x00,0x00, -0xd9,0x00,0x00,0x00,0x7b,0x00,0x00,0x00,0x09,0x00,0x00,0x00, -0xbd,0x00,0x00,0x00,0x3e,0x00,0x03,0x00,0xd9,0x00,0x00,0x00, -0xd8,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xdc,0x00,0x00,0x00,0x50,0x00,0x00,0x00,0xdb,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xe1,0x00,0x00,0x00, -0x47,0x00,0x00,0x00,0xe0,0x00,0x00,0x00,0x41,0x00,0x08,0x00, -0x69,0x00,0x00,0x00,0xe2,0x00,0x00,0x00,0x63,0x00,0x00,0x00, -0x09,0x00,0x00,0x00,0x21,0x00,0x00,0x00,0x09,0x00,0x00,0x00, -0xe1,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x56,0x00,0x00,0x00, -0xe3,0x00,0x00,0x00,0xe2,0x00,0x00,0x00,0x71,0x00,0x04,0x00, -0x14,0x00,0x00,0x00,0xe4,0x00,0x00,0x00,0xe3,0x00,0x00,0x00, -0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0xe5,0x00,0x00,0x00, -0xe4,0x00,0x00,0x00,0xc7,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xe6,0x00,0x00,0x00,0xe5,0x00,0x00,0x00,0x86,0x00,0x00,0x00, -0xc2,0x00,0x05,0x00,0x56,0x00,0x00,0x00,0xe8,0x00,0x00,0x00, -0x6b,0x00,0x00,0x00,0xe0,0x00,0x00,0x00,0x71,0x00,0x04,0x00, -0x14,0x00,0x00,0x00,0xe9,0x00,0x00,0x00,0xe8,0x00,0x00,0x00, -0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0xea,0x00,0x00,0x00, -0xe9,0x00,0x00,0x00,0xc7,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xeb,0x00,0x00,0x00,0xea,0x00,0x00,0x00,0x8c,0x00,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xec,0x00,0x00,0x00, -0xe6,0x00,0x00,0x00,0xeb,0x00,0x00,0x00,0x6f,0x00,0x04,0x00, -0x5d,0x00,0x00,0x00,0xed,0x00,0x00,0x00,0xec,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x56,0x00,0x00,0x00,0xf4,0x00,0x00,0x00, -0xe2,0x00,0x00,0x00,0xc2,0x00,0x05,0x00,0x56,0x00,0x00,0x00, -0xf5,0x00,0x00,0x00,0xf4,0x00,0x00,0x00,0x97,0x00,0x00,0x00, -0x70,0x00,0x04,0x00,0x5d,0x00,0x00,0x00,0xf6,0x00,0x00,0x00, -0xf5,0x00,0x00,0x00,0x85,0x00,0x05,0x00,0x5d,0x00,0x00,0x00, -0xf7,0x00,0x00,0x00,0x77,0x00,0x00,0x00,0xf6,0x00,0x00,0x00, -0x7f,0x00,0x04,0x00,0x5d,0x00,0x00,0x00,0x0b,0x01,0x00,0x00, -0xf7,0x00,0x00,0x00,0x0c,0x00,0x08,0x00,0x5d,0x00,0x00,0x00, -0xf8,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x32,0x00,0x00,0x00, -0x72,0x00,0x00,0x00,0xed,0x00,0x00,0x00,0x0b,0x01,0x00,0x00, -0x41,0x00,0x06,0x00,0x70,0x00,0x00,0x00,0xf9,0x00,0x00,0x00, -0x7b,0x00,0x00,0x00,0x09,0x00,0x00,0x00,0xdc,0x00,0x00,0x00, -0x3e,0x00,0x03,0x00,0xf9,0x00,0x00,0x00,0xf8,0x00,0x00,0x00, -0xf9,0x00,0x02,0x00,0x0d,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, -0x0d,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xfb,0x00,0x00,0x00,0x05,0x01,0x00,0x00,0x29,0x00,0x00,0x00, -0xf9,0x00,0x02,0x00,0x0a,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, -0x0c,0x00,0x00,0x00,0xf5,0x00,0x07,0x00,0x11,0x00,0x00,0x00, -0x06,0x01,0x00,0x00,0xff,0x00,0x00,0x00,0x0a,0x00,0x00,0x00, -0x02,0x01,0x00,0x00,0x2f,0x00,0x00,0x00,0xf7,0x00,0x03,0x00, -0x03,0x01,0x00,0x00,0x00,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, -0x06,0x01,0x00,0x00,0xfd,0x00,0x00,0x00,0x03,0x01,0x00,0x00, -0xf8,0x00,0x02,0x00,0x03,0x01,0x00,0x00,0xf9,0x00,0x02,0x00, -0xfd,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0xfd,0x00,0x00,0x00, -0xfd,0x00,0x01,0x00,0x38,0x00,0x01,0x00, -}; -const uint64_t dequant_q2_K_len = 3956; - -unsigned char dequant_q2_K_fp32_data[] = { -0x03,0x02,0x23,0x07,0x00,0x05,0x01,0x00,0x0b,0x00,0x0d,0x00, 0x13,0x01,0x00,0x00,0x00,0x00,0x00,0x00,0x11,0x00,0x02,0x00, 0x01,0x00,0x00,0x00,0x11,0x00,0x02,0x00,0x27,0x00,0x00,0x00, 0x11,0x00,0x02,0x00,0x51,0x11,0x00,0x00,0x11,0x00,0x02,0x00, @@ -1869,414 +1385,10 @@ unsigned char dequant_q2_K_fp32_data[] = { 0x04,0x01,0x00,0x00,0xfd,0x00,0x01,0x00,0x38,0x00,0x01,0x00, }; -const uint64_t dequant_q2_K_fp32_len = 4056; +const uint64_t dequant_q2_K_len = 4056; unsigned char dequant_q3_K_data[] = { 0x03,0x02,0x23,0x07,0x00,0x05,0x01,0x00,0x0b,0x00,0x0d,0x00, -0x3f,0x01,0x00,0x00,0x00,0x00,0x00,0x00,0x11,0x00,0x02,0x00, -0x01,0x00,0x00,0x00,0x11,0x00,0x02,0x00,0x09,0x00,0x00,0x00, -0x11,0x00,0x02,0x00,0x27,0x00,0x00,0x00,0x11,0x00,0x02,0x00, -0x51,0x11,0x00,0x00,0x11,0x00,0x02,0x00,0x60,0x11,0x00,0x00, -0x0b,0x00,0x06,0x00,0x01,0x00,0x00,0x00,0x47,0x4c,0x53,0x4c, -0x2e,0x73,0x74,0x64,0x2e,0x34,0x35,0x30,0x00,0x00,0x00,0x00, -0x0e,0x00,0x03,0x00,0x00,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x0f,0x00,0x0a,0x00,0x05,0x00,0x00,0x00,0x04,0x00,0x00,0x00, -0x6d,0x61,0x69,0x6e,0x00,0x00,0x00,0x00,0x17,0x00,0x00,0x00, -0x25,0x00,0x00,0x00,0x33,0x00,0x00,0x00,0x7b,0x00,0x00,0x00, -0x07,0x01,0x00,0x00,0x10,0x00,0x06,0x00,0x04,0x00,0x00,0x00, -0x11,0x00,0x00,0x00,0x40,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x17,0x00,0x00,0x00, -0x0b,0x00,0x00,0x00,0x1a,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x23,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x04,0x00,0x00,0x00, -0x48,0x00,0x05,0x00,0x23,0x00,0x00,0x00,0x02,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x08,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0x23,0x00,0x00,0x00,0x03,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x47,0x00,0x03,0x00,0x23,0x00,0x00,0x00, -0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x33,0x00,0x00,0x00, -0x0b,0x00,0x00,0x00,0x1b,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x71,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x73,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x75,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0x77,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x77,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x20,0x00,0x00,0x00, -0x48,0x00,0x05,0x00,0x77,0x00,0x00,0x00,0x02,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x60,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0x77,0x00,0x00,0x00,0x03,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x6c,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x78,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x6e,0x00,0x00,0x00,0x48,0x00,0x04,0x00, -0x79,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x18,0x00,0x00,0x00, -0x48,0x00,0x05,0x00,0x79,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x03,0x00, -0x79,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x7b,0x00,0x00,0x00,0x22,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x7b,0x00,0x00,0x00,0x21,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x04,0x01,0x00,0x00, -0x06,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x48,0x00,0x04,0x00, -0x05,0x01,0x00,0x00,0x00,0x00,0x00,0x00,0x19,0x00,0x00,0x00, -0x48,0x00,0x05,0x00,0x05,0x01,0x00,0x00,0x00,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x03,0x00, -0x05,0x01,0x00,0x00,0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x07,0x01,0x00,0x00,0x22,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x07,0x01,0x00,0x00,0x21,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x2c,0x01,0x00,0x00, -0x0b,0x00,0x00,0x00,0x19,0x00,0x00,0x00,0x13,0x00,0x02,0x00, -0x02,0x00,0x00,0x00,0x21,0x00,0x03,0x00,0x03,0x00,0x00,0x00, -0x02,0x00,0x00,0x00,0x15,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x20,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x09,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x10,0x00,0x00,0x00, -0x00,0x01,0x00,0x00,0x14,0x00,0x02,0x00,0x11,0x00,0x00,0x00, -0x15,0x00,0x04,0x00,0x14,0x00,0x00,0x00,0x20,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x17,0x00,0x04,0x00,0x15,0x00,0x00,0x00, -0x14,0x00,0x00,0x00,0x03,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0x16,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x15,0x00,0x00,0x00, -0x3b,0x00,0x04,0x00,0x16,0x00,0x00,0x00,0x17,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x14,0x00,0x00,0x00, -0x18,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0x19,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x14,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x14,0x00,0x00,0x00,0x1c,0x00,0x00,0x00, -0x00,0x01,0x00,0x00,0x1e,0x00,0x06,0x00,0x23,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x24,0x00,0x00,0x00, -0x09,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x3b,0x00,0x04,0x00, -0x24,0x00,0x00,0x00,0x25,0x00,0x00,0x00,0x09,0x00,0x00,0x00, -0x20,0x00,0x04,0x00,0x26,0x00,0x00,0x00,0x09,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x29,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x3b,0x00,0x04,0x00, -0x16,0x00,0x00,0x00,0x33,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x37,0x00,0x00,0x00, -0x04,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x3b,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x41,0x00,0x00,0x00,0x10,0x00,0x00,0x00, -0x15,0x00,0x04,0x00,0x52,0x00,0x00,0x00,0x08,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x15,0x00,0x04,0x00,0x5a,0x00,0x00,0x00, -0x08,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x5e,0x00,0x00,0x00,0x08,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x14,0x00,0x00,0x00,0x70,0x00,0x00,0x00, -0x20,0x00,0x00,0x00,0x1c,0x00,0x04,0x00,0x71,0x00,0x00,0x00, -0x52,0x00,0x00,0x00,0x70,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x14,0x00,0x00,0x00,0x72,0x00,0x00,0x00,0x40,0x00,0x00,0x00, -0x1c,0x00,0x04,0x00,0x73,0x00,0x00,0x00,0x52,0x00,0x00,0x00, -0x72,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x14,0x00,0x00,0x00, -0x74,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x1c,0x00,0x04,0x00, -0x75,0x00,0x00,0x00,0x52,0x00,0x00,0x00,0x74,0x00,0x00,0x00, -0x16,0x00,0x03,0x00,0x76,0x00,0x00,0x00,0x10,0x00,0x00,0x00, -0x1e,0x00,0x06,0x00,0x77,0x00,0x00,0x00,0x71,0x00,0x00,0x00, -0x73,0x00,0x00,0x00,0x75,0x00,0x00,0x00,0x76,0x00,0x00,0x00, -0x1d,0x00,0x03,0x00,0x78,0x00,0x00,0x00,0x77,0x00,0x00,0x00, -0x1e,0x00,0x03,0x00,0x79,0x00,0x00,0x00,0x78,0x00,0x00,0x00, -0x20,0x00,0x04,0x00,0x7a,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0x79,0x00,0x00,0x00,0x3b,0x00,0x04,0x00,0x7a,0x00,0x00,0x00, -0x7b,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0x7f,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x52,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x84,0x00,0x00,0x00, -0x0f,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x8e,0x00,0x00,0x00,0x03,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0xad,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0xd3,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0xe1,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x76,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0xe8,0x00,0x00,0x00,0x20,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0xef,0x00,0x00,0x00, -0x80,0x00,0x00,0x00,0x1d,0x00,0x03,0x00,0x04,0x01,0x00,0x00, -0x76,0x00,0x00,0x00,0x1e,0x00,0x03,0x00,0x05,0x01,0x00,0x00, -0x04,0x01,0x00,0x00,0x20,0x00,0x04,0x00,0x06,0x01,0x00,0x00, -0x0c,0x00,0x00,0x00,0x05,0x01,0x00,0x00,0x3b,0x00,0x04,0x00, -0x06,0x01,0x00,0x00,0x07,0x01,0x00,0x00,0x0c,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x14,0x00,0x00,0x00,0x2b,0x01,0x00,0x00, -0x01,0x00,0x00,0x00,0x2c,0x00,0x06,0x00,0x15,0x00,0x00,0x00, -0x2c,0x01,0x00,0x00,0x72,0x00,0x00,0x00,0x2b,0x01,0x00,0x00, -0x2b,0x01,0x00,0x00,0x2a,0x00,0x03,0x00,0x11,0x00,0x00,0x00, -0x2f,0x01,0x00,0x00,0x29,0x00,0x03,0x00,0x11,0x00,0x00,0x00, -0x32,0x01,0x00,0x00,0x36,0x00,0x05,0x00,0x02,0x00,0x00,0x00, -0x04,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x03,0x00,0x00,0x00, -0xf8,0x00,0x02,0x00,0x05,0x00,0x00,0x00,0xf7,0x00,0x03,0x00, -0x2d,0x01,0x00,0x00,0x00,0x00,0x00,0x00,0xfb,0x00,0x03,0x00, -0x18,0x00,0x00,0x00,0x2e,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, -0x2e,0x01,0x00,0x00,0xf9,0x00,0x02,0x00,0x0a,0x00,0x00,0x00, -0xf8,0x00,0x02,0x00,0x0a,0x00,0x00,0x00,0xf5,0x00,0x07,0x00, -0x06,0x00,0x00,0x00,0x35,0x01,0x00,0x00,0x09,0x00,0x00,0x00, -0x2e,0x01,0x00,0x00,0x2a,0x01,0x00,0x00,0x0d,0x00,0x00,0x00, -0xb1,0x00,0x05,0x00,0x11,0x00,0x00,0x00,0x12,0x00,0x00,0x00, -0x35,0x01,0x00,0x00,0x10,0x00,0x00,0x00,0xf6,0x00,0x04,0x00, -0x0c,0x00,0x00,0x00,0x0d,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0xfa,0x00,0x04,0x00,0x12,0x00,0x00,0x00,0x0b,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0x0b,0x00,0x00,0x00, -0x41,0x00,0x05,0x00,0x19,0x00,0x00,0x00,0x1a,0x00,0x00,0x00, -0x17,0x00,0x00,0x00,0x18,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x14,0x00,0x00,0x00,0x1b,0x00,0x00,0x00,0x1a,0x00,0x00,0x00, -0x84,0x00,0x05,0x00,0x14,0x00,0x00,0x00,0x1d,0x00,0x00,0x00, -0x1b,0x00,0x00,0x00,0x1c,0x00,0x00,0x00,0x7c,0x00,0x04,0x00, -0x14,0x00,0x00,0x00,0x1f,0x00,0x00,0x00,0x35,0x01,0x00,0x00, -0x80,0x00,0x05,0x00,0x14,0x00,0x00,0x00,0x20,0x00,0x00,0x00, -0x1d,0x00,0x00,0x00,0x1f,0x00,0x00,0x00,0x7c,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x21,0x00,0x00,0x00,0x20,0x00,0x00,0x00, -0x41,0x00,0x05,0x00,0x26,0x00,0x00,0x00,0x27,0x00,0x00,0x00, -0x25,0x00,0x00,0x00,0x09,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x28,0x00,0x00,0x00,0x27,0x00,0x00,0x00, -0x41,0x00,0x05,0x00,0x26,0x00,0x00,0x00,0x2a,0x00,0x00,0x00, -0x25,0x00,0x00,0x00,0x29,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x2b,0x00,0x00,0x00,0x2a,0x00,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x2c,0x00,0x00,0x00, -0x28,0x00,0x00,0x00,0x2b,0x00,0x00,0x00,0x87,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x2d,0x00,0x00,0x00,0x2c,0x00,0x00,0x00, -0x10,0x00,0x00,0x00,0xaf,0x00,0x05,0x00,0x11,0x00,0x00,0x00, -0x2e,0x00,0x00,0x00,0x21,0x00,0x00,0x00,0x2d,0x00,0x00,0x00, -0xf7,0x00,0x03,0x00,0x30,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0xfa,0x00,0x04,0x00,0x2e,0x00,0x00,0x00,0x2f,0x00,0x00,0x00, -0x30,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0x2f,0x00,0x00,0x00, -0xf9,0x00,0x02,0x00,0x0c,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, -0x30,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x19,0x00,0x00,0x00, -0x34,0x00,0x00,0x00,0x33,0x00,0x00,0x00,0x18,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x14,0x00,0x00,0x00,0x35,0x00,0x00,0x00, -0x34,0x00,0x00,0x00,0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x36,0x00,0x00,0x00,0x35,0x00,0x00,0x00,0x87,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x38,0x00,0x00,0x00,0x36,0x00,0x00,0x00, -0x37,0x00,0x00,0x00,0x87,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x3c,0x00,0x00,0x00,0x38,0x00,0x00,0x00,0x3b,0x00,0x00,0x00, -0x8b,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x3f,0x00,0x00,0x00, -0x38,0x00,0x00,0x00,0x3b,0x00,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x43,0x00,0x00,0x00,0x41,0x00,0x00,0x00, -0x3f,0x00,0x00,0x00,0x8b,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x47,0x00,0x00,0x00,0x36,0x00,0x00,0x00,0x37,0x00,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x48,0x00,0x00,0x00, -0x37,0x00,0x00,0x00,0x47,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x49,0x00,0x00,0x00,0x43,0x00,0x00,0x00, -0x48,0x00,0x00,0x00,0x87,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x4c,0x00,0x00,0x00,0x3c,0x00,0x00,0x00,0x37,0x00,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x50,0x00,0x00,0x00, -0x37,0x00,0x00,0x00,0x4c,0x00,0x00,0x00,0x82,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x51,0x00,0x00,0x00,0x3c,0x00,0x00,0x00, -0x50,0x00,0x00,0x00,0xc4,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x59,0x00,0x00,0x00,0x29,0x00,0x00,0x00,0x3c,0x00,0x00,0x00, -0x72,0x00,0x04,0x00,0x5a,0x00,0x00,0x00,0x5b,0x00,0x00,0x00, -0x59,0x00,0x00,0x00,0x7c,0x00,0x04,0x00,0x52,0x00,0x00,0x00, -0x5c,0x00,0x00,0x00,0x5b,0x00,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x60,0x00,0x00,0x00,0x5e,0x00,0x00,0x00, -0x4c,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x62,0x00,0x00,0x00,0x3b,0x00,0x00,0x00,0x51,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x63,0x00,0x00,0x00, -0x60,0x00,0x00,0x00,0x62,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x65,0x00,0x00,0x00,0x63,0x00,0x00,0x00, -0x3f,0x00,0x00,0x00,0xb1,0x00,0x05,0x00,0x11,0x00,0x00,0x00, -0x6c,0x00,0x00,0x00,0x65,0x00,0x00,0x00,0x37,0x00,0x00,0x00, -0xf7,0x00,0x03,0x00,0x6f,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0xfa,0x00,0x04,0x00,0x6c,0x00,0x00,0x00,0x6e,0x00,0x00,0x00, -0x92,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0x6e,0x00,0x00,0x00, -0x82,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x7e,0x00,0x00,0x00, -0x65,0x00,0x00,0x00,0x09,0x00,0x00,0x00,0x41,0x00,0x08,0x00, -0x7f,0x00,0x00,0x00,0x80,0x00,0x00,0x00,0x7b,0x00,0x00,0x00, -0x09,0x00,0x00,0x00,0x21,0x00,0x00,0x00,0x3b,0x00,0x00,0x00, -0x7e,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x52,0x00,0x00,0x00, -0x81,0x00,0x00,0x00,0x80,0x00,0x00,0x00,0x71,0x00,0x04,0x00, -0x14,0x00,0x00,0x00,0x82,0x00,0x00,0x00,0x81,0x00,0x00,0x00, -0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x83,0x00,0x00,0x00, -0x82,0x00,0x00,0x00,0xc7,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x85,0x00,0x00,0x00,0x83,0x00,0x00,0x00,0x84,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x88,0x00,0x00,0x00, -0x65,0x00,0x00,0x00,0x5e,0x00,0x00,0x00,0x41,0x00,0x08,0x00, -0x7f,0x00,0x00,0x00,0x89,0x00,0x00,0x00,0x7b,0x00,0x00,0x00, -0x09,0x00,0x00,0x00,0x21,0x00,0x00,0x00,0x3b,0x00,0x00,0x00, -0x88,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x52,0x00,0x00,0x00, -0x8a,0x00,0x00,0x00,0x89,0x00,0x00,0x00,0xc2,0x00,0x05,0x00, -0x52,0x00,0x00,0x00,0x8b,0x00,0x00,0x00,0x8a,0x00,0x00,0x00, -0x09,0x00,0x00,0x00,0x71,0x00,0x04,0x00,0x14,0x00,0x00,0x00, -0x8c,0x00,0x00,0x00,0x8b,0x00,0x00,0x00,0x7c,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x8d,0x00,0x00,0x00,0x8c,0x00,0x00,0x00, -0xc7,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x8f,0x00,0x00,0x00, -0x8d,0x00,0x00,0x00,0x8e,0x00,0x00,0x00,0xc4,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x90,0x00,0x00,0x00,0x8f,0x00,0x00,0x00, -0x37,0x00,0x00,0x00,0xc5,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x91,0x00,0x00,0x00,0x85,0x00,0x00,0x00,0x90,0x00,0x00,0x00, -0xf9,0x00,0x02,0x00,0x6f,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, -0x92,0x00,0x00,0x00,0xb1,0x00,0x05,0x00,0x11,0x00,0x00,0x00, -0x94,0x00,0x00,0x00,0x65,0x00,0x00,0x00,0x5e,0x00,0x00,0x00, -0xf7,0x00,0x03,0x00,0x97,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0xfa,0x00,0x04,0x00,0x94,0x00,0x00,0x00,0x96,0x00,0x00,0x00, -0xab,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0x96,0x00,0x00,0x00, -0x82,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x9a,0x00,0x00,0x00, -0x65,0x00,0x00,0x00,0x09,0x00,0x00,0x00,0x41,0x00,0x08,0x00, -0x7f,0x00,0x00,0x00,0x9b,0x00,0x00,0x00,0x7b,0x00,0x00,0x00, -0x09,0x00,0x00,0x00,0x21,0x00,0x00,0x00,0x3b,0x00,0x00,0x00, -0x9a,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x52,0x00,0x00,0x00, -0x9c,0x00,0x00,0x00,0x9b,0x00,0x00,0x00,0x71,0x00,0x04,0x00, -0x14,0x00,0x00,0x00,0x9d,0x00,0x00,0x00,0x9c,0x00,0x00,0x00, -0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x9e,0x00,0x00,0x00, -0x9d,0x00,0x00,0x00,0xc7,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x9f,0x00,0x00,0x00,0x9e,0x00,0x00,0x00,0x84,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xa2,0x00,0x00,0x00, -0x65,0x00,0x00,0x00,0x37,0x00,0x00,0x00,0x41,0x00,0x08,0x00, -0x7f,0x00,0x00,0x00,0xa3,0x00,0x00,0x00,0x7b,0x00,0x00,0x00, -0x09,0x00,0x00,0x00,0x21,0x00,0x00,0x00,0x3b,0x00,0x00,0x00, -0xa2,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x52,0x00,0x00,0x00, -0xa4,0x00,0x00,0x00,0xa3,0x00,0x00,0x00,0xc2,0x00,0x05,0x00, -0x52,0x00,0x00,0x00,0xa5,0x00,0x00,0x00,0xa4,0x00,0x00,0x00, -0x3b,0x00,0x00,0x00,0x71,0x00,0x04,0x00,0x14,0x00,0x00,0x00, -0xa6,0x00,0x00,0x00,0xa5,0x00,0x00,0x00,0x7c,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0xa7,0x00,0x00,0x00,0xa6,0x00,0x00,0x00, -0xc7,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xa8,0x00,0x00,0x00, -0xa7,0x00,0x00,0x00,0x8e,0x00,0x00,0x00,0xc4,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xa9,0x00,0x00,0x00,0xa8,0x00,0x00,0x00, -0x37,0x00,0x00,0x00,0xc5,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xaa,0x00,0x00,0x00,0x9f,0x00,0x00,0x00,0xa9,0x00,0x00,0x00, -0xf9,0x00,0x02,0x00,0x97,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, -0xab,0x00,0x00,0x00,0xb1,0x00,0x05,0x00,0x11,0x00,0x00,0x00, -0xae,0x00,0x00,0x00,0x65,0x00,0x00,0x00,0xad,0x00,0x00,0x00, -0xf7,0x00,0x03,0x00,0xb1,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0xfa,0x00,0x04,0x00,0xae,0x00,0x00,0x00,0xb0,0x00,0x00,0x00, -0xc5,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0xb0,0x00,0x00,0x00, -0x82,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xb4,0x00,0x00,0x00, -0x65,0x00,0x00,0x00,0x5e,0x00,0x00,0x00,0x41,0x00,0x08,0x00, -0x7f,0x00,0x00,0x00,0xb5,0x00,0x00,0x00,0x7b,0x00,0x00,0x00, -0x09,0x00,0x00,0x00,0x21,0x00,0x00,0x00,0x3b,0x00,0x00,0x00, -0xb4,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x52,0x00,0x00,0x00, -0xb6,0x00,0x00,0x00,0xb5,0x00,0x00,0x00,0xc2,0x00,0x05,0x00, -0x52,0x00,0x00,0x00,0xb7,0x00,0x00,0x00,0xb6,0x00,0x00,0x00, -0x37,0x00,0x00,0x00,0x71,0x00,0x04,0x00,0x14,0x00,0x00,0x00, -0xb8,0x00,0x00,0x00,0xb7,0x00,0x00,0x00,0x7c,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0xb9,0x00,0x00,0x00,0xb8,0x00,0x00,0x00, -0x41,0x00,0x08,0x00,0x7f,0x00,0x00,0x00,0xbd,0x00,0x00,0x00, -0x7b,0x00,0x00,0x00,0x09,0x00,0x00,0x00,0x21,0x00,0x00,0x00, -0x3b,0x00,0x00,0x00,0x65,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x52,0x00,0x00,0x00,0xbe,0x00,0x00,0x00,0xbd,0x00,0x00,0x00, -0xc2,0x00,0x05,0x00,0x52,0x00,0x00,0x00,0xbf,0x00,0x00,0x00, -0xbe,0x00,0x00,0x00,0x37,0x00,0x00,0x00,0x71,0x00,0x04,0x00, -0x14,0x00,0x00,0x00,0xc0,0x00,0x00,0x00,0xbf,0x00,0x00,0x00, -0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0xc1,0x00,0x00,0x00, -0xc0,0x00,0x00,0x00,0xc7,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xc2,0x00,0x00,0x00,0xc1,0x00,0x00,0x00,0x8e,0x00,0x00,0x00, -0xc4,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xc3,0x00,0x00,0x00, -0xc2,0x00,0x00,0x00,0x37,0x00,0x00,0x00,0xc5,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xc4,0x00,0x00,0x00,0xb9,0x00,0x00,0x00, -0xc3,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0xb1,0x00,0x00,0x00, -0xf8,0x00,0x02,0x00,0xc5,0x00,0x00,0x00,0x82,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xc8,0x00,0x00,0x00,0x65,0x00,0x00,0x00, -0x5e,0x00,0x00,0x00,0x41,0x00,0x08,0x00,0x7f,0x00,0x00,0x00, -0xc9,0x00,0x00,0x00,0x7b,0x00,0x00,0x00,0x09,0x00,0x00,0x00, -0x21,0x00,0x00,0x00,0x3b,0x00,0x00,0x00,0xc8,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x52,0x00,0x00,0x00,0xca,0x00,0x00,0x00, -0xc9,0x00,0x00,0x00,0xc2,0x00,0x05,0x00,0x52,0x00,0x00,0x00, -0xcb,0x00,0x00,0x00,0xca,0x00,0x00,0x00,0x37,0x00,0x00,0x00, -0x71,0x00,0x04,0x00,0x14,0x00,0x00,0x00,0xcc,0x00,0x00,0x00, -0xcb,0x00,0x00,0x00,0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0xcd,0x00,0x00,0x00,0xcc,0x00,0x00,0x00,0x82,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xd0,0x00,0x00,0x00,0x65,0x00,0x00,0x00, -0x37,0x00,0x00,0x00,0x41,0x00,0x08,0x00,0x7f,0x00,0x00,0x00, -0xd1,0x00,0x00,0x00,0x7b,0x00,0x00,0x00,0x09,0x00,0x00,0x00, -0x21,0x00,0x00,0x00,0x3b,0x00,0x00,0x00,0xd0,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x52,0x00,0x00,0x00,0xd2,0x00,0x00,0x00, -0xd1,0x00,0x00,0x00,0xc2,0x00,0x05,0x00,0x52,0x00,0x00,0x00, -0xd4,0x00,0x00,0x00,0xd2,0x00,0x00,0x00,0xd3,0x00,0x00,0x00, -0x71,0x00,0x04,0x00,0x14,0x00,0x00,0x00,0xd5,0x00,0x00,0x00, -0xd4,0x00,0x00,0x00,0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0xd6,0x00,0x00,0x00,0xd5,0x00,0x00,0x00,0xc7,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xd7,0x00,0x00,0x00,0xd6,0x00,0x00,0x00, -0x8e,0x00,0x00,0x00,0xc4,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xd8,0x00,0x00,0x00,0xd7,0x00,0x00,0x00,0x37,0x00,0x00,0x00, -0xc5,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xd9,0x00,0x00,0x00, -0xcd,0x00,0x00,0x00,0xd8,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, -0xb1,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0xb1,0x00,0x00,0x00, -0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0x36,0x01,0x00,0x00, -0xc4,0x00,0x00,0x00,0xb0,0x00,0x00,0x00,0xd9,0x00,0x00,0x00, -0xc5,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0x97,0x00,0x00,0x00, -0xf8,0x00,0x02,0x00,0x97,0x00,0x00,0x00,0xf5,0x00,0x07,0x00, -0x06,0x00,0x00,0x00,0x37,0x01,0x00,0x00,0xaa,0x00,0x00,0x00, -0x96,0x00,0x00,0x00,0x36,0x01,0x00,0x00,0xb1,0x00,0x00,0x00, -0xf9,0x00,0x02,0x00,0x6f,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, -0x6f,0x00,0x00,0x00,0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00, -0x38,0x01,0x00,0x00,0x91,0x00,0x00,0x00,0x6e,0x00,0x00,0x00, -0x37,0x01,0x00,0x00,0x97,0x00,0x00,0x00,0x72,0x00,0x04,0x00, -0x5a,0x00,0x00,0x00,0xdd,0x00,0x00,0x00,0x38,0x01,0x00,0x00, -0x41,0x00,0x07,0x00,0xe1,0x00,0x00,0x00,0xe2,0x00,0x00,0x00, -0x7b,0x00,0x00,0x00,0x09,0x00,0x00,0x00,0x21,0x00,0x00,0x00, -0x8e,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x76,0x00,0x00,0x00, -0xe3,0x00,0x00,0x00,0xe2,0x00,0x00,0x00,0x72,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0xe7,0x00,0x00,0x00,0xdd,0x00,0x00,0x00, -0x82,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xe9,0x00,0x00,0x00, -0xe7,0x00,0x00,0x00,0xe8,0x00,0x00,0x00,0x6f,0x00,0x04,0x00, -0x76,0x00,0x00,0x00,0xea,0x00,0x00,0x00,0xe9,0x00,0x00,0x00, -0x85,0x00,0x05,0x00,0x76,0x00,0x00,0x00,0xeb,0x00,0x00,0x00, -0xe3,0x00,0x00,0x00,0xea,0x00,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xee,0x00,0x00,0x00,0x21,0x00,0x00,0x00, -0x10,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xf1,0x00,0x00,0x00,0xef,0x00,0x00,0x00,0x4c,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xf2,0x00,0x00,0x00, -0xee,0x00,0x00,0x00,0xf1,0x00,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xf4,0x00,0x00,0x00,0xe8,0x00,0x00,0x00, -0x51,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xf5,0x00,0x00,0x00,0xf2,0x00,0x00,0x00,0xf4,0x00,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xf8,0x00,0x00,0x00, -0xe8,0x00,0x00,0x00,0x4c,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, -0xfb,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0xfb,0x00,0x00,0x00, -0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0x39,0x01,0x00,0x00, -0x49,0x00,0x00,0x00,0x6f,0x00,0x00,0x00,0x28,0x01,0x00,0x00, -0xfc,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x02,0x01,0x00,0x00,0x49,0x00,0x00,0x00,0x37,0x00,0x00,0x00, -0xb1,0x00,0x05,0x00,0x11,0x00,0x00,0x00,0x03,0x01,0x00,0x00, -0x39,0x01,0x00,0x00,0x02,0x01,0x00,0x00,0xf6,0x00,0x04,0x00, -0xfd,0x00,0x00,0x00,0xfc,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0xfa,0x00,0x04,0x00,0x03,0x01,0x00,0x00,0xfc,0x00,0x00,0x00, -0xfd,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0xfc,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x0a,0x01,0x00,0x00, -0xf5,0x00,0x00,0x00,0x39,0x01,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x0f,0x01,0x00,0x00,0xf8,0x00,0x00,0x00, -0x39,0x01,0x00,0x00,0x41,0x00,0x08,0x00,0x7f,0x00,0x00,0x00, -0x10,0x01,0x00,0x00,0x7b,0x00,0x00,0x00,0x09,0x00,0x00,0x00, -0x21,0x00,0x00,0x00,0x29,0x00,0x00,0x00,0x0f,0x01,0x00,0x00, -0x3d,0x00,0x04,0x00,0x52,0x00,0x00,0x00,0x11,0x01,0x00,0x00, -0x10,0x01,0x00,0x00,0xc2,0x00,0x05,0x00,0x52,0x00,0x00,0x00, -0x13,0x01,0x00,0x00,0x11,0x01,0x00,0x00,0x62,0x00,0x00,0x00, -0x71,0x00,0x04,0x00,0x14,0x00,0x00,0x00,0x14,0x01,0x00,0x00, -0x13,0x01,0x00,0x00,0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x15,0x01,0x00,0x00,0x14,0x01,0x00,0x00,0xc7,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x16,0x01,0x00,0x00,0x15,0x01,0x00,0x00, -0x8e,0x00,0x00,0x00,0x72,0x00,0x04,0x00,0x5a,0x00,0x00,0x00, -0x17,0x01,0x00,0x00,0x16,0x01,0x00,0x00,0x72,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x18,0x01,0x00,0x00,0x17,0x01,0x00,0x00, -0x41,0x00,0x08,0x00,0x7f,0x00,0x00,0x00,0x1b,0x01,0x00,0x00, -0x7b,0x00,0x00,0x00,0x09,0x00,0x00,0x00,0x21,0x00,0x00,0x00, -0x09,0x00,0x00,0x00,0x39,0x01,0x00,0x00,0x3d,0x00,0x04,0x00, -0x52,0x00,0x00,0x00,0x1c,0x01,0x00,0x00,0x1b,0x01,0x00,0x00, -0xc7,0x00,0x05,0x00,0x52,0x00,0x00,0x00,0x1e,0x01,0x00,0x00, -0x1c,0x01,0x00,0x00,0x5c,0x00,0x00,0x00,0x71,0x00,0x04,0x00, -0x14,0x00,0x00,0x00,0x1f,0x01,0x00,0x00,0x1e,0x01,0x00,0x00, -0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x20,0x01,0x00,0x00, -0x1f,0x01,0x00,0x00,0xab,0x00,0x05,0x00,0x11,0x00,0x00,0x00, -0x21,0x01,0x00,0x00,0x20,0x01,0x00,0x00,0x09,0x00,0x00,0x00, -0xa9,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x22,0x01,0x00,0x00, -0x21,0x01,0x00,0x00,0x09,0x00,0x00,0x00,0x37,0x00,0x00,0x00, -0x82,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x23,0x01,0x00,0x00, -0x18,0x01,0x00,0x00,0x22,0x01,0x00,0x00,0x6f,0x00,0x04,0x00, -0x76,0x00,0x00,0x00,0x24,0x01,0x00,0x00,0x23,0x01,0x00,0x00, -0x85,0x00,0x05,0x00,0x76,0x00,0x00,0x00,0x25,0x01,0x00,0x00, -0xeb,0x00,0x00,0x00,0x24,0x01,0x00,0x00,0x41,0x00,0x06,0x00, -0xe1,0x00,0x00,0x00,0x26,0x01,0x00,0x00,0x07,0x01,0x00,0x00, -0x09,0x00,0x00,0x00,0x0a,0x01,0x00,0x00,0x3e,0x00,0x03,0x00, -0x26,0x01,0x00,0x00,0x25,0x01,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x28,0x01,0x00,0x00,0x39,0x01,0x00,0x00, -0x29,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0xfb,0x00,0x00,0x00, -0xf8,0x00,0x02,0x00,0xfd,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, -0x0d,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0x0d,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x2a,0x01,0x00,0x00, -0x35,0x01,0x00,0x00,0x29,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, -0x0a,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0x0c,0x00,0x00,0x00, -0xf5,0x00,0x07,0x00,0x11,0x00,0x00,0x00,0x3e,0x01,0x00,0x00, -0x2f,0x01,0x00,0x00,0x0a,0x00,0x00,0x00,0x32,0x01,0x00,0x00, -0x2f,0x00,0x00,0x00,0xf7,0x00,0x03,0x00,0x33,0x01,0x00,0x00, -0x00,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0x3e,0x01,0x00,0x00, -0x2d,0x01,0x00,0x00,0x33,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, -0x33,0x01,0x00,0x00,0xf9,0x00,0x02,0x00,0x2d,0x01,0x00,0x00, -0xf8,0x00,0x02,0x00,0x2d,0x01,0x00,0x00,0xfd,0x00,0x01,0x00, -0x38,0x00,0x01,0x00, -}; -const uint64_t dequant_q3_K_len = 4792; - -unsigned char dequant_q3_K_fp32_data[] = { -0x03,0x02,0x23,0x07,0x00,0x05,0x01,0x00,0x0b,0x00,0x0d,0x00, 0x42,0x01,0x00,0x00,0x00,0x00,0x00,0x00,0x11,0x00,0x02,0x00, 0x01,0x00,0x00,0x00,0x11,0x00,0x02,0x00,0x27,0x00,0x00,0x00, 0x11,0x00,0x02,0x00,0x51,0x11,0x00,0x00,0x11,0x00,0x02,0x00, @@ -2680,709 +1792,10 @@ unsigned char dequant_q3_K_fp32_data[] = { 0xf8,0x00,0x02,0x00,0x30,0x01,0x00,0x00,0xfd,0x00,0x01,0x00, 0x38,0x00,0x01,0x00, }; -const uint64_t dequant_q3_K_fp32_len = 4828; +const uint64_t dequant_q3_K_len = 4828; unsigned char dequant_q4_0_data[] = { 0x03,0x02,0x23,0x07,0x00,0x05,0x01,0x00,0x0b,0x00,0x0d,0x00, -0xf7,0x02,0x00,0x00,0x00,0x00,0x00,0x00,0x11,0x00,0x02,0x00, -0x01,0x00,0x00,0x00,0x11,0x00,0x02,0x00,0x09,0x00,0x00,0x00, -0x11,0x00,0x02,0x00,0x27,0x00,0x00,0x00,0x11,0x00,0x02,0x00, -0x51,0x11,0x00,0x00,0x11,0x00,0x02,0x00,0x60,0x11,0x00,0x00, -0x0b,0x00,0x06,0x00,0x01,0x00,0x00,0x00,0x47,0x4c,0x53,0x4c, -0x2e,0x73,0x74,0x64,0x2e,0x34,0x35,0x30,0x00,0x00,0x00,0x00, -0x0e,0x00,0x03,0x00,0x00,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x0f,0x00,0x09,0x00,0x05,0x00,0x00,0x00,0x04,0x00,0x00,0x00, -0x6d,0x61,0x69,0x6e,0x00,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0x16,0x00,0x00,0x00,0x54,0x00,0x00,0x00,0x77,0x00,0x00,0x00, -0x10,0x00,0x06,0x00,0x04,0x00,0x00,0x00,0x11,0x00,0x00,0x00, -0x00,0x01,0x00,0x00,0x01,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x0c,0x00,0x00,0x00,0x0b,0x00,0x00,0x00, -0x1c,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x14,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x48,0x00,0x05,0x00,0x14,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0x14,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x08,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x14,0x00,0x00,0x00, -0x03,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0x47,0x00,0x03,0x00,0x14,0x00,0x00,0x00,0x02,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x4f,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x50,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x48,0x00,0x05,0x00,0x50,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x51,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x12,0x00,0x00,0x00, -0x48,0x00,0x04,0x00,0x52,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x18,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x52,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x47,0x00,0x03,0x00,0x52,0x00,0x00,0x00,0x02,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x54,0x00,0x00,0x00,0x22,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x54,0x00,0x00,0x00, -0x21,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x74,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x02,0x00,0x00,0x00, -0x48,0x00,0x04,0x00,0x75,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x19,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x75,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x47,0x00,0x03,0x00,0x75,0x00,0x00,0x00,0x02,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x77,0x00,0x00,0x00,0x22,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x77,0x00,0x00,0x00, -0x21,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x97,0x00,0x00,0x00,0x0b,0x00,0x00,0x00,0x19,0x00,0x00,0x00, -0x13,0x00,0x02,0x00,0x02,0x00,0x00,0x00,0x21,0x00,0x03,0x00, -0x03,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x15,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x20,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x15,0x00,0x04,0x00,0x09,0x00,0x00,0x00,0x20,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x17,0x00,0x04,0x00,0x0a,0x00,0x00,0x00, -0x09,0x00,0x00,0x00,0x03,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0x0b,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x0a,0x00,0x00,0x00, -0x3b,0x00,0x04,0x00,0x0b,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x09,0x00,0x00,0x00, -0x0d,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0x0e,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x09,0x00,0x00,0x00, -0x1e,0x00,0x06,0x00,0x14,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x20,0x00,0x04,0x00,0x15,0x00,0x00,0x00,0x09,0x00,0x00,0x00, -0x14,0x00,0x00,0x00,0x3b,0x00,0x04,0x00,0x15,0x00,0x00,0x00, -0x16,0x00,0x00,0x00,0x09,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x17,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x20,0x00,0x04,0x00,0x18,0x00,0x00,0x00,0x09,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x1b,0x00,0x00,0x00,0x20,0x00,0x00,0x00,0x14,0x00,0x02,0x00, -0x24,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x2e,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x37,0x00,0x00,0x00,0x02,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x48,0x00,0x00,0x00, -0x10,0x00,0x00,0x00,0x16,0x00,0x03,0x00,0x4a,0x00,0x00,0x00, -0x10,0x00,0x00,0x00,0x15,0x00,0x04,0x00,0x4d,0x00,0x00,0x00, -0x08,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x09,0x00,0x00,0x00,0x4e,0x00,0x00,0x00,0x10,0x00,0x00,0x00, -0x1c,0x00,0x04,0x00,0x4f,0x00,0x00,0x00,0x4d,0x00,0x00,0x00, -0x4e,0x00,0x00,0x00,0x1e,0x00,0x04,0x00,0x50,0x00,0x00,0x00, -0x4a,0x00,0x00,0x00,0x4f,0x00,0x00,0x00,0x1d,0x00,0x03,0x00, -0x51,0x00,0x00,0x00,0x50,0x00,0x00,0x00,0x1e,0x00,0x03,0x00, -0x52,0x00,0x00,0x00,0x51,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0x53,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x52,0x00,0x00,0x00, -0x3b,0x00,0x04,0x00,0x53,0x00,0x00,0x00,0x54,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x56,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x4a,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0x5d,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x4d,0x00,0x00,0x00, -0x17,0x00,0x04,0x00,0x60,0x00,0x00,0x00,0x4a,0x00,0x00,0x00, -0x02,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x66,0x00,0x00,0x00,0x0f,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x6a,0x00,0x00,0x00,0x04,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x4a,0x00,0x00,0x00,0x6f,0x00,0x00,0x00, -0x00,0x48,0x00,0x00,0x1d,0x00,0x03,0x00,0x74,0x00,0x00,0x00, -0x4a,0x00,0x00,0x00,0x1e,0x00,0x03,0x00,0x75,0x00,0x00,0x00, -0x74,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x76,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x75,0x00,0x00,0x00,0x3b,0x00,0x04,0x00, -0x76,0x00,0x00,0x00,0x77,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x79,0x00,0x00,0x00, -0x03,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x09,0x00,0x00,0x00, -0x90,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x09,0x00,0x00,0x00,0x96,0x00,0x00,0x00,0x00,0x01,0x00,0x00, -0x2c,0x00,0x06,0x00,0x0a,0x00,0x00,0x00,0x97,0x00,0x00,0x00, -0x96,0x00,0x00,0x00,0x90,0x00,0x00,0x00,0x90,0x00,0x00,0x00, -0x2c,0x00,0x05,0x00,0x60,0x00,0x00,0x00,0xa2,0x00,0x00,0x00, -0x6f,0x00,0x00,0x00,0x6f,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0xde,0x02,0x00,0x00,0x11,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0xdf,0x02,0x00,0x00, -0x12,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0xe0,0x02,0x00,0x00,0x13,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0xe1,0x02,0x00,0x00,0x14,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0xe2,0x02,0x00,0x00, -0x05,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0xe3,0x02,0x00,0x00,0x15,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0xe4,0x02,0x00,0x00,0x06,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0xe5,0x02,0x00,0x00, -0x16,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0xe6,0x02,0x00,0x00,0x07,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0xe7,0x02,0x00,0x00,0x17,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0xe8,0x02,0x00,0x00, -0x08,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0xe9,0x02,0x00,0x00,0x18,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0xea,0x02,0x00,0x00,0x09,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0xeb,0x02,0x00,0x00, -0x19,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0xec,0x02,0x00,0x00,0x0a,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0xed,0x02,0x00,0x00,0x1a,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0xee,0x02,0x00,0x00, -0x0b,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0xef,0x02,0x00,0x00,0x1b,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0xf0,0x02,0x00,0x00,0x0c,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0xf1,0x02,0x00,0x00, -0x1c,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0xf2,0x02,0x00,0x00,0x0d,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0xf3,0x02,0x00,0x00,0x1d,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0xf4,0x02,0x00,0x00, -0x0e,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0xf5,0x02,0x00,0x00,0x1e,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0xf6,0x02,0x00,0x00,0x1f,0x00,0x00,0x00, -0x36,0x00,0x05,0x00,0x02,0x00,0x00,0x00,0x04,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x03,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, -0x05,0x00,0x00,0x00,0xf7,0x00,0x03,0x00,0x98,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0xfb,0x00,0x03,0x00,0x0d,0x00,0x00,0x00, -0x99,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0x99,0x00,0x00,0x00, -0x41,0x00,0x05,0x00,0x0e,0x00,0x00,0x00,0x0f,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x0d,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x09,0x00,0x00,0x00,0x10,0x00,0x00,0x00,0x0f,0x00,0x00,0x00, -0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x11,0x00,0x00,0x00, -0x10,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x18,0x00,0x00,0x00, -0x19,0x00,0x00,0x00,0x16,0x00,0x00,0x00,0x17,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x1a,0x00,0x00,0x00, -0x19,0x00,0x00,0x00,0x87,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x1c,0x00,0x00,0x00,0x1a,0x00,0x00,0x00,0x1b,0x00,0x00,0x00, -0x8b,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x1d,0x00,0x00,0x00, -0x11,0x00,0x00,0x00,0x1c,0x00,0x00,0x00,0x87,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x11,0x00,0x00,0x00, -0x1c,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x26,0x00,0x00,0x00,0x1d,0x00,0x00,0x00,0x1b,0x00,0x00,0x00, -0xaf,0x00,0x05,0x00,0x24,0x00,0x00,0x00,0x29,0x00,0x00,0x00, -0x26,0x00,0x00,0x00,0x1a,0x00,0x00,0x00,0xa8,0x00,0x04,0x00, -0x24,0x00,0x00,0x00,0x2a,0x00,0x00,0x00,0x29,0x00,0x00,0x00, -0xf7,0x00,0x03,0x00,0x2c,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0xfa,0x00,0x04,0x00,0x2a,0x00,0x00,0x00,0x2b,0x00,0x00,0x00, -0x2c,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0x2b,0x00,0x00,0x00, -0x41,0x00,0x05,0x00,0x18,0x00,0x00,0x00,0x2f,0x00,0x00,0x00, -0x16,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x30,0x00,0x00,0x00,0x2f,0x00,0x00,0x00, -0xaf,0x00,0x05,0x00,0x24,0x00,0x00,0x00,0x31,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x30,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, -0x2c,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0x2c,0x00,0x00,0x00, -0xf5,0x00,0x07,0x00,0x24,0x00,0x00,0x00,0x32,0x00,0x00,0x00, -0x29,0x00,0x00,0x00,0x99,0x00,0x00,0x00,0x31,0x00,0x00,0x00, -0x2b,0x00,0x00,0x00,0xf7,0x00,0x03,0x00,0x34,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0x32,0x00,0x00,0x00, -0x33,0x00,0x00,0x00,0x34,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, -0x33,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0x98,0x00,0x00,0x00, -0xf8,0x00,0x02,0x00,0x34,0x00,0x00,0x00,0x41,0x00,0x05,0x00, -0x18,0x00,0x00,0x00,0x38,0x00,0x00,0x00,0x16,0x00,0x00,0x00, -0x37,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x39,0x00,0x00,0x00,0x38,0x00,0x00,0x00,0x87,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x3a,0x00,0x00,0x00,0x39,0x00,0x00,0x00, -0x1b,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x3e,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x3a,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x40,0x00,0x00,0x00, -0x3e,0x00,0x00,0x00,0x1d,0x00,0x00,0x00,0x41,0x00,0x07,0x00, -0x56,0x00,0x00,0x00,0x57,0x00,0x00,0x00,0x54,0x00,0x00,0x00, -0x2e,0x00,0x00,0x00,0x40,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x4a,0x00,0x00,0x00,0x58,0x00,0x00,0x00, -0x57,0x00,0x00,0x00,0x41,0x00,0x08,0x00,0x5d,0x00,0x00,0x00, -0x5e,0x00,0x00,0x00,0x54,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0x40,0x00,0x00,0x00,0x17,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x4d,0x00,0x00,0x00,0x5f,0x00,0x00,0x00, -0x5e,0x00,0x00,0x00,0x71,0x00,0x04,0x00,0x09,0x00,0x00,0x00, -0x64,0x00,0x00,0x00,0x5f,0x00,0x00,0x00,0x7c,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x65,0x00,0x00,0x00,0x64,0x00,0x00,0x00, -0xc7,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x67,0x00,0x00,0x00, -0x65,0x00,0x00,0x00,0x66,0x00,0x00,0x00,0x6f,0x00,0x04,0x00, -0x4a,0x00,0x00,0x00,0x68,0x00,0x00,0x00,0x67,0x00,0x00,0x00, -0xc2,0x00,0x05,0x00,0x4d,0x00,0x00,0x00,0x6b,0x00,0x00,0x00, -0x5f,0x00,0x00,0x00,0x6a,0x00,0x00,0x00,0x70,0x00,0x04,0x00, -0x4a,0x00,0x00,0x00,0x6c,0x00,0x00,0x00,0x6b,0x00,0x00,0x00, -0x50,0x00,0x05,0x00,0x60,0x00,0x00,0x00,0x6d,0x00,0x00,0x00, -0x68,0x00,0x00,0x00,0x6c,0x00,0x00,0x00,0x83,0x00,0x05,0x00, -0x60,0x00,0x00,0x00,0x71,0x00,0x00,0x00,0x6d,0x00,0x00,0x00, -0xa2,0x00,0x00,0x00,0x8e,0x00,0x05,0x00,0x60,0x00,0x00,0x00, -0x73,0x00,0x00,0x00,0x71,0x00,0x00,0x00,0x58,0x00,0x00,0x00, -0x41,0x00,0x05,0x00,0x18,0x00,0x00,0x00,0x7a,0x00,0x00,0x00, -0x16,0x00,0x00,0x00,0x79,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x7b,0x00,0x00,0x00,0x7a,0x00,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x7c,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x7b,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x7f,0x00,0x00,0x00,0x7c,0x00,0x00,0x00, -0x26,0x00,0x00,0x00,0x51,0x00,0x05,0x00,0x4a,0x00,0x00,0x00, -0x84,0x00,0x00,0x00,0x73,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x41,0x00,0x06,0x00,0x56,0x00,0x00,0x00,0x85,0x00,0x00,0x00, -0x77,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0x7f,0x00,0x00,0x00, -0x3e,0x00,0x03,0x00,0x85,0x00,0x00,0x00,0x84,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x8f,0x00,0x00,0x00, -0x7f,0x00,0x00,0x00,0x48,0x00,0x00,0x00,0x51,0x00,0x05,0x00, -0x4a,0x00,0x00,0x00,0x92,0x00,0x00,0x00,0x73,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x41,0x00,0x06,0x00,0x56,0x00,0x00,0x00, -0x93,0x00,0x00,0x00,0x77,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0x8f,0x00,0x00,0x00,0x3e,0x00,0x03,0x00,0x93,0x00,0x00,0x00, -0x92,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x4a,0x00,0x00,0x00, -0xa9,0x00,0x00,0x00,0x57,0x00,0x00,0x00,0x41,0x00,0x08,0x00, -0x5d,0x00,0x00,0x00,0xaa,0x00,0x00,0x00,0x54,0x00,0x00,0x00, -0x2e,0x00,0x00,0x00,0x40,0x00,0x00,0x00,0x17,0x00,0x00,0x00, -0x17,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x4d,0x00,0x00,0x00, -0xab,0x00,0x00,0x00,0xaa,0x00,0x00,0x00,0x71,0x00,0x04,0x00, -0x09,0x00,0x00,0x00,0xac,0x00,0x00,0x00,0xab,0x00,0x00,0x00, -0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0xad,0x00,0x00,0x00, -0xac,0x00,0x00,0x00,0xc7,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xae,0x00,0x00,0x00,0xad,0x00,0x00,0x00,0x66,0x00,0x00,0x00, -0x6f,0x00,0x04,0x00,0x4a,0x00,0x00,0x00,0xaf,0x00,0x00,0x00, -0xae,0x00,0x00,0x00,0xc2,0x00,0x05,0x00,0x4d,0x00,0x00,0x00, -0xb0,0x00,0x00,0x00,0xab,0x00,0x00,0x00,0x6a,0x00,0x00,0x00, -0x70,0x00,0x04,0x00,0x4a,0x00,0x00,0x00,0xb1,0x00,0x00,0x00, -0xb0,0x00,0x00,0x00,0x50,0x00,0x05,0x00,0x60,0x00,0x00,0x00, -0xb2,0x00,0x00,0x00,0xaf,0x00,0x00,0x00,0xb1,0x00,0x00,0x00, -0x83,0x00,0x05,0x00,0x60,0x00,0x00,0x00,0xb3,0x00,0x00,0x00, -0xb2,0x00,0x00,0x00,0xa2,0x00,0x00,0x00,0x8e,0x00,0x05,0x00, -0x60,0x00,0x00,0x00,0xb4,0x00,0x00,0x00,0xb3,0x00,0x00,0x00, -0xa9,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xba,0x00,0x00,0x00,0x7f,0x00,0x00,0x00,0x17,0x00,0x00,0x00, -0x51,0x00,0x05,0x00,0x4a,0x00,0x00,0x00,0xbc,0x00,0x00,0x00, -0xb4,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x41,0x00,0x06,0x00, -0x56,0x00,0x00,0x00,0xbd,0x00,0x00,0x00,0x77,0x00,0x00,0x00, -0x2e,0x00,0x00,0x00,0xba,0x00,0x00,0x00,0x3e,0x00,0x03,0x00, -0xbd,0x00,0x00,0x00,0xbc,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xc4,0x00,0x00,0x00,0x7f,0x00,0x00,0x00, -0xde,0x02,0x00,0x00,0x51,0x00,0x05,0x00,0x4a,0x00,0x00,0x00, -0xc5,0x00,0x00,0x00,0xb4,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x41,0x00,0x06,0x00,0x56,0x00,0x00,0x00,0xc6,0x00,0x00,0x00, -0x77,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0xc4,0x00,0x00,0x00, -0x3e,0x00,0x03,0x00,0xc6,0x00,0x00,0x00,0xc5,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x4a,0x00,0x00,0x00,0xcf,0x00,0x00,0x00, -0x57,0x00,0x00,0x00,0x41,0x00,0x08,0x00,0x5d,0x00,0x00,0x00, -0xd0,0x00,0x00,0x00,0x54,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0x40,0x00,0x00,0x00,0x17,0x00,0x00,0x00,0x37,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x4d,0x00,0x00,0x00,0xd1,0x00,0x00,0x00, -0xd0,0x00,0x00,0x00,0x71,0x00,0x04,0x00,0x09,0x00,0x00,0x00, -0xd2,0x00,0x00,0x00,0xd1,0x00,0x00,0x00,0x7c,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0xd3,0x00,0x00,0x00,0xd2,0x00,0x00,0x00, -0xc7,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xd4,0x00,0x00,0x00, -0xd3,0x00,0x00,0x00,0x66,0x00,0x00,0x00,0x6f,0x00,0x04,0x00, -0x4a,0x00,0x00,0x00,0xd5,0x00,0x00,0x00,0xd4,0x00,0x00,0x00, -0xc2,0x00,0x05,0x00,0x4d,0x00,0x00,0x00,0xd6,0x00,0x00,0x00, -0xd1,0x00,0x00,0x00,0x6a,0x00,0x00,0x00,0x70,0x00,0x04,0x00, -0x4a,0x00,0x00,0x00,0xd7,0x00,0x00,0x00,0xd6,0x00,0x00,0x00, -0x50,0x00,0x05,0x00,0x60,0x00,0x00,0x00,0xd8,0x00,0x00,0x00, -0xd5,0x00,0x00,0x00,0xd7,0x00,0x00,0x00,0x83,0x00,0x05,0x00, -0x60,0x00,0x00,0x00,0xd9,0x00,0x00,0x00,0xd8,0x00,0x00,0x00, -0xa2,0x00,0x00,0x00,0x8e,0x00,0x05,0x00,0x60,0x00,0x00,0x00, -0xda,0x00,0x00,0x00,0xd9,0x00,0x00,0x00,0xcf,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xe0,0x00,0x00,0x00, -0x7f,0x00,0x00,0x00,0x37,0x00,0x00,0x00,0x51,0x00,0x05,0x00, -0x4a,0x00,0x00,0x00,0xe2,0x00,0x00,0x00,0xda,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x41,0x00,0x06,0x00,0x56,0x00,0x00,0x00, -0xe3,0x00,0x00,0x00,0x77,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0xe0,0x00,0x00,0x00,0x3e,0x00,0x03,0x00,0xe3,0x00,0x00,0x00, -0xe2,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xea,0x00,0x00,0x00,0x7f,0x00,0x00,0x00,0xdf,0x02,0x00,0x00, -0x51,0x00,0x05,0x00,0x4a,0x00,0x00,0x00,0xeb,0x00,0x00,0x00, -0xda,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x41,0x00,0x06,0x00, -0x56,0x00,0x00,0x00,0xec,0x00,0x00,0x00,0x77,0x00,0x00,0x00, -0x2e,0x00,0x00,0x00,0xea,0x00,0x00,0x00,0x3e,0x00,0x03,0x00, -0xec,0x00,0x00,0x00,0xeb,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x4a,0x00,0x00,0x00,0xf5,0x00,0x00,0x00,0x57,0x00,0x00,0x00, -0x41,0x00,0x08,0x00,0x5d,0x00,0x00,0x00,0xf6,0x00,0x00,0x00, -0x54,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0x40,0x00,0x00,0x00, -0x17,0x00,0x00,0x00,0x79,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x4d,0x00,0x00,0x00,0xf7,0x00,0x00,0x00,0xf6,0x00,0x00,0x00, -0x71,0x00,0x04,0x00,0x09,0x00,0x00,0x00,0xf8,0x00,0x00,0x00, -0xf7,0x00,0x00,0x00,0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0xf9,0x00,0x00,0x00,0xf8,0x00,0x00,0x00,0xc7,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xfa,0x00,0x00,0x00,0xf9,0x00,0x00,0x00, -0x66,0x00,0x00,0x00,0x6f,0x00,0x04,0x00,0x4a,0x00,0x00,0x00, -0xfb,0x00,0x00,0x00,0xfa,0x00,0x00,0x00,0xc2,0x00,0x05,0x00, -0x4d,0x00,0x00,0x00,0xfc,0x00,0x00,0x00,0xf7,0x00,0x00,0x00, -0x6a,0x00,0x00,0x00,0x70,0x00,0x04,0x00,0x4a,0x00,0x00,0x00, -0xfd,0x00,0x00,0x00,0xfc,0x00,0x00,0x00,0x50,0x00,0x05,0x00, -0x60,0x00,0x00,0x00,0xfe,0x00,0x00,0x00,0xfb,0x00,0x00,0x00, -0xfd,0x00,0x00,0x00,0x83,0x00,0x05,0x00,0x60,0x00,0x00,0x00, -0xff,0x00,0x00,0x00,0xfe,0x00,0x00,0x00,0xa2,0x00,0x00,0x00, -0x8e,0x00,0x05,0x00,0x60,0x00,0x00,0x00,0x00,0x01,0x00,0x00, -0xff,0x00,0x00,0x00,0xf5,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x06,0x01,0x00,0x00,0x7f,0x00,0x00,0x00, -0x79,0x00,0x00,0x00,0x51,0x00,0x05,0x00,0x4a,0x00,0x00,0x00, -0x08,0x01,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x00,0x00,0x00, -0x41,0x00,0x06,0x00,0x56,0x00,0x00,0x00,0x09,0x01,0x00,0x00, -0x77,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0x06,0x01,0x00,0x00, -0x3e,0x00,0x03,0x00,0x09,0x01,0x00,0x00,0x08,0x01,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x10,0x01,0x00,0x00, -0x7f,0x00,0x00,0x00,0xe0,0x02,0x00,0x00,0x51,0x00,0x05,0x00, -0x4a,0x00,0x00,0x00,0x11,0x01,0x00,0x00,0x00,0x01,0x00,0x00, -0x01,0x00,0x00,0x00,0x41,0x00,0x06,0x00,0x56,0x00,0x00,0x00, -0x12,0x01,0x00,0x00,0x77,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0x10,0x01,0x00,0x00,0x3e,0x00,0x03,0x00,0x12,0x01,0x00,0x00, -0x11,0x01,0x00,0x00,0x3d,0x00,0x04,0x00,0x4a,0x00,0x00,0x00, -0x1b,0x01,0x00,0x00,0x57,0x00,0x00,0x00,0x41,0x00,0x08,0x00, -0x5d,0x00,0x00,0x00,0x1c,0x01,0x00,0x00,0x54,0x00,0x00,0x00, -0x2e,0x00,0x00,0x00,0x40,0x00,0x00,0x00,0x17,0x00,0x00,0x00, -0x6a,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x4d,0x00,0x00,0x00, -0x1d,0x01,0x00,0x00,0x1c,0x01,0x00,0x00,0x71,0x00,0x04,0x00, -0x09,0x00,0x00,0x00,0x1e,0x01,0x00,0x00,0x1d,0x01,0x00,0x00, -0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x1f,0x01,0x00,0x00, -0x1e,0x01,0x00,0x00,0xc7,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x20,0x01,0x00,0x00,0x1f,0x01,0x00,0x00,0x66,0x00,0x00,0x00, -0x6f,0x00,0x04,0x00,0x4a,0x00,0x00,0x00,0x21,0x01,0x00,0x00, -0x20,0x01,0x00,0x00,0xc2,0x00,0x05,0x00,0x4d,0x00,0x00,0x00, -0x22,0x01,0x00,0x00,0x1d,0x01,0x00,0x00,0x6a,0x00,0x00,0x00, -0x70,0x00,0x04,0x00,0x4a,0x00,0x00,0x00,0x23,0x01,0x00,0x00, -0x22,0x01,0x00,0x00,0x50,0x00,0x05,0x00,0x60,0x00,0x00,0x00, -0x24,0x01,0x00,0x00,0x21,0x01,0x00,0x00,0x23,0x01,0x00,0x00, -0x83,0x00,0x05,0x00,0x60,0x00,0x00,0x00,0x25,0x01,0x00,0x00, -0x24,0x01,0x00,0x00,0xa2,0x00,0x00,0x00,0x8e,0x00,0x05,0x00, -0x60,0x00,0x00,0x00,0x26,0x01,0x00,0x00,0x25,0x01,0x00,0x00, -0x1b,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x2c,0x01,0x00,0x00,0x7f,0x00,0x00,0x00,0x6a,0x00,0x00,0x00, -0x51,0x00,0x05,0x00,0x4a,0x00,0x00,0x00,0x2e,0x01,0x00,0x00, -0x26,0x01,0x00,0x00,0x00,0x00,0x00,0x00,0x41,0x00,0x06,0x00, -0x56,0x00,0x00,0x00,0x2f,0x01,0x00,0x00,0x77,0x00,0x00,0x00, -0x2e,0x00,0x00,0x00,0x2c,0x01,0x00,0x00,0x3e,0x00,0x03,0x00, -0x2f,0x01,0x00,0x00,0x2e,0x01,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x36,0x01,0x00,0x00,0x7f,0x00,0x00,0x00, -0xe1,0x02,0x00,0x00,0x51,0x00,0x05,0x00,0x4a,0x00,0x00,0x00, -0x37,0x01,0x00,0x00,0x26,0x01,0x00,0x00,0x01,0x00,0x00,0x00, -0x41,0x00,0x06,0x00,0x56,0x00,0x00,0x00,0x38,0x01,0x00,0x00, -0x77,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0x36,0x01,0x00,0x00, -0x3e,0x00,0x03,0x00,0x38,0x01,0x00,0x00,0x37,0x01,0x00,0x00, -0x3d,0x00,0x04,0x00,0x4a,0x00,0x00,0x00,0x41,0x01,0x00,0x00, -0x57,0x00,0x00,0x00,0x41,0x00,0x08,0x00,0x5d,0x00,0x00,0x00, -0x42,0x01,0x00,0x00,0x54,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0x40,0x00,0x00,0x00,0x17,0x00,0x00,0x00,0xe2,0x02,0x00,0x00, -0x3d,0x00,0x04,0x00,0x4d,0x00,0x00,0x00,0x43,0x01,0x00,0x00, -0x42,0x01,0x00,0x00,0x71,0x00,0x04,0x00,0x09,0x00,0x00,0x00, -0x44,0x01,0x00,0x00,0x43,0x01,0x00,0x00,0x7c,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x45,0x01,0x00,0x00,0x44,0x01,0x00,0x00, -0xc7,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x46,0x01,0x00,0x00, -0x45,0x01,0x00,0x00,0x66,0x00,0x00,0x00,0x6f,0x00,0x04,0x00, -0x4a,0x00,0x00,0x00,0x47,0x01,0x00,0x00,0x46,0x01,0x00,0x00, -0xc2,0x00,0x05,0x00,0x4d,0x00,0x00,0x00,0x48,0x01,0x00,0x00, -0x43,0x01,0x00,0x00,0x6a,0x00,0x00,0x00,0x70,0x00,0x04,0x00, -0x4a,0x00,0x00,0x00,0x49,0x01,0x00,0x00,0x48,0x01,0x00,0x00, -0x50,0x00,0x05,0x00,0x60,0x00,0x00,0x00,0x4a,0x01,0x00,0x00, -0x47,0x01,0x00,0x00,0x49,0x01,0x00,0x00,0x83,0x00,0x05,0x00, -0x60,0x00,0x00,0x00,0x4b,0x01,0x00,0x00,0x4a,0x01,0x00,0x00, -0xa2,0x00,0x00,0x00,0x8e,0x00,0x05,0x00,0x60,0x00,0x00,0x00, -0x4c,0x01,0x00,0x00,0x4b,0x01,0x00,0x00,0x41,0x01,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x52,0x01,0x00,0x00, -0x7f,0x00,0x00,0x00,0xe2,0x02,0x00,0x00,0x51,0x00,0x05,0x00, -0x4a,0x00,0x00,0x00,0x54,0x01,0x00,0x00,0x4c,0x01,0x00,0x00, -0x00,0x00,0x00,0x00,0x41,0x00,0x06,0x00,0x56,0x00,0x00,0x00, -0x55,0x01,0x00,0x00,0x77,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0x52,0x01,0x00,0x00,0x3e,0x00,0x03,0x00,0x55,0x01,0x00,0x00, -0x54,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x5c,0x01,0x00,0x00,0x7f,0x00,0x00,0x00,0xe3,0x02,0x00,0x00, -0x51,0x00,0x05,0x00,0x4a,0x00,0x00,0x00,0x5d,0x01,0x00,0x00, -0x4c,0x01,0x00,0x00,0x01,0x00,0x00,0x00,0x41,0x00,0x06,0x00, -0x56,0x00,0x00,0x00,0x5e,0x01,0x00,0x00,0x77,0x00,0x00,0x00, -0x2e,0x00,0x00,0x00,0x5c,0x01,0x00,0x00,0x3e,0x00,0x03,0x00, -0x5e,0x01,0x00,0x00,0x5d,0x01,0x00,0x00,0x3d,0x00,0x04,0x00, -0x4a,0x00,0x00,0x00,0x67,0x01,0x00,0x00,0x57,0x00,0x00,0x00, -0x41,0x00,0x08,0x00,0x5d,0x00,0x00,0x00,0x68,0x01,0x00,0x00, -0x54,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0x40,0x00,0x00,0x00, -0x17,0x00,0x00,0x00,0xe4,0x02,0x00,0x00,0x3d,0x00,0x04,0x00, -0x4d,0x00,0x00,0x00,0x69,0x01,0x00,0x00,0x68,0x01,0x00,0x00, -0x71,0x00,0x04,0x00,0x09,0x00,0x00,0x00,0x6a,0x01,0x00,0x00, -0x69,0x01,0x00,0x00,0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x6b,0x01,0x00,0x00,0x6a,0x01,0x00,0x00,0xc7,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x6c,0x01,0x00,0x00,0x6b,0x01,0x00,0x00, -0x66,0x00,0x00,0x00,0x6f,0x00,0x04,0x00,0x4a,0x00,0x00,0x00, -0x6d,0x01,0x00,0x00,0x6c,0x01,0x00,0x00,0xc2,0x00,0x05,0x00, -0x4d,0x00,0x00,0x00,0x6e,0x01,0x00,0x00,0x69,0x01,0x00,0x00, -0x6a,0x00,0x00,0x00,0x70,0x00,0x04,0x00,0x4a,0x00,0x00,0x00, -0x6f,0x01,0x00,0x00,0x6e,0x01,0x00,0x00,0x50,0x00,0x05,0x00, -0x60,0x00,0x00,0x00,0x70,0x01,0x00,0x00,0x6d,0x01,0x00,0x00, -0x6f,0x01,0x00,0x00,0x83,0x00,0x05,0x00,0x60,0x00,0x00,0x00, -0x71,0x01,0x00,0x00,0x70,0x01,0x00,0x00,0xa2,0x00,0x00,0x00, -0x8e,0x00,0x05,0x00,0x60,0x00,0x00,0x00,0x72,0x01,0x00,0x00, -0x71,0x01,0x00,0x00,0x67,0x01,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x78,0x01,0x00,0x00,0x7f,0x00,0x00,0x00, -0xe4,0x02,0x00,0x00,0x51,0x00,0x05,0x00,0x4a,0x00,0x00,0x00, -0x7a,0x01,0x00,0x00,0x72,0x01,0x00,0x00,0x00,0x00,0x00,0x00, -0x41,0x00,0x06,0x00,0x56,0x00,0x00,0x00,0x7b,0x01,0x00,0x00, -0x77,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0x78,0x01,0x00,0x00, -0x3e,0x00,0x03,0x00,0x7b,0x01,0x00,0x00,0x7a,0x01,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x82,0x01,0x00,0x00, -0x7f,0x00,0x00,0x00,0xe5,0x02,0x00,0x00,0x51,0x00,0x05,0x00, -0x4a,0x00,0x00,0x00,0x83,0x01,0x00,0x00,0x72,0x01,0x00,0x00, -0x01,0x00,0x00,0x00,0x41,0x00,0x06,0x00,0x56,0x00,0x00,0x00, -0x84,0x01,0x00,0x00,0x77,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0x82,0x01,0x00,0x00,0x3e,0x00,0x03,0x00,0x84,0x01,0x00,0x00, -0x83,0x01,0x00,0x00,0x3d,0x00,0x04,0x00,0x4a,0x00,0x00,0x00, -0x8d,0x01,0x00,0x00,0x57,0x00,0x00,0x00,0x41,0x00,0x08,0x00, -0x5d,0x00,0x00,0x00,0x8e,0x01,0x00,0x00,0x54,0x00,0x00,0x00, -0x2e,0x00,0x00,0x00,0x40,0x00,0x00,0x00,0x17,0x00,0x00,0x00, -0xe6,0x02,0x00,0x00,0x3d,0x00,0x04,0x00,0x4d,0x00,0x00,0x00, -0x8f,0x01,0x00,0x00,0x8e,0x01,0x00,0x00,0x71,0x00,0x04,0x00, -0x09,0x00,0x00,0x00,0x90,0x01,0x00,0x00,0x8f,0x01,0x00,0x00, -0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x91,0x01,0x00,0x00, -0x90,0x01,0x00,0x00,0xc7,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x92,0x01,0x00,0x00,0x91,0x01,0x00,0x00,0x66,0x00,0x00,0x00, -0x6f,0x00,0x04,0x00,0x4a,0x00,0x00,0x00,0x93,0x01,0x00,0x00, -0x92,0x01,0x00,0x00,0xc2,0x00,0x05,0x00,0x4d,0x00,0x00,0x00, -0x94,0x01,0x00,0x00,0x8f,0x01,0x00,0x00,0x6a,0x00,0x00,0x00, -0x70,0x00,0x04,0x00,0x4a,0x00,0x00,0x00,0x95,0x01,0x00,0x00, -0x94,0x01,0x00,0x00,0x50,0x00,0x05,0x00,0x60,0x00,0x00,0x00, -0x96,0x01,0x00,0x00,0x93,0x01,0x00,0x00,0x95,0x01,0x00,0x00, -0x83,0x00,0x05,0x00,0x60,0x00,0x00,0x00,0x97,0x01,0x00,0x00, -0x96,0x01,0x00,0x00,0xa2,0x00,0x00,0x00,0x8e,0x00,0x05,0x00, -0x60,0x00,0x00,0x00,0x98,0x01,0x00,0x00,0x97,0x01,0x00,0x00, -0x8d,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x9e,0x01,0x00,0x00,0x7f,0x00,0x00,0x00,0xe6,0x02,0x00,0x00, -0x51,0x00,0x05,0x00,0x4a,0x00,0x00,0x00,0xa0,0x01,0x00,0x00, -0x98,0x01,0x00,0x00,0x00,0x00,0x00,0x00,0x41,0x00,0x06,0x00, -0x56,0x00,0x00,0x00,0xa1,0x01,0x00,0x00,0x77,0x00,0x00,0x00, -0x2e,0x00,0x00,0x00,0x9e,0x01,0x00,0x00,0x3e,0x00,0x03,0x00, -0xa1,0x01,0x00,0x00,0xa0,0x01,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xa8,0x01,0x00,0x00,0x7f,0x00,0x00,0x00, -0xe7,0x02,0x00,0x00,0x51,0x00,0x05,0x00,0x4a,0x00,0x00,0x00, -0xa9,0x01,0x00,0x00,0x98,0x01,0x00,0x00,0x01,0x00,0x00,0x00, -0x41,0x00,0x06,0x00,0x56,0x00,0x00,0x00,0xaa,0x01,0x00,0x00, -0x77,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0xa8,0x01,0x00,0x00, -0x3e,0x00,0x03,0x00,0xaa,0x01,0x00,0x00,0xa9,0x01,0x00,0x00, -0x3d,0x00,0x04,0x00,0x4a,0x00,0x00,0x00,0xb3,0x01,0x00,0x00, -0x57,0x00,0x00,0x00,0x41,0x00,0x08,0x00,0x5d,0x00,0x00,0x00, -0xb4,0x01,0x00,0x00,0x54,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0x40,0x00,0x00,0x00,0x17,0x00,0x00,0x00,0xe8,0x02,0x00,0x00, -0x3d,0x00,0x04,0x00,0x4d,0x00,0x00,0x00,0xb5,0x01,0x00,0x00, -0xb4,0x01,0x00,0x00,0x71,0x00,0x04,0x00,0x09,0x00,0x00,0x00, -0xb6,0x01,0x00,0x00,0xb5,0x01,0x00,0x00,0x7c,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0xb7,0x01,0x00,0x00,0xb6,0x01,0x00,0x00, -0xc7,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xb8,0x01,0x00,0x00, -0xb7,0x01,0x00,0x00,0x66,0x00,0x00,0x00,0x6f,0x00,0x04,0x00, -0x4a,0x00,0x00,0x00,0xb9,0x01,0x00,0x00,0xb8,0x01,0x00,0x00, -0xc2,0x00,0x05,0x00,0x4d,0x00,0x00,0x00,0xba,0x01,0x00,0x00, -0xb5,0x01,0x00,0x00,0x6a,0x00,0x00,0x00,0x70,0x00,0x04,0x00, -0x4a,0x00,0x00,0x00,0xbb,0x01,0x00,0x00,0xba,0x01,0x00,0x00, -0x50,0x00,0x05,0x00,0x60,0x00,0x00,0x00,0xbc,0x01,0x00,0x00, -0xb9,0x01,0x00,0x00,0xbb,0x01,0x00,0x00,0x83,0x00,0x05,0x00, -0x60,0x00,0x00,0x00,0xbd,0x01,0x00,0x00,0xbc,0x01,0x00,0x00, -0xa2,0x00,0x00,0x00,0x8e,0x00,0x05,0x00,0x60,0x00,0x00,0x00, -0xbe,0x01,0x00,0x00,0xbd,0x01,0x00,0x00,0xb3,0x01,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xc4,0x01,0x00,0x00, -0x7f,0x00,0x00,0x00,0xe8,0x02,0x00,0x00,0x51,0x00,0x05,0x00, -0x4a,0x00,0x00,0x00,0xc6,0x01,0x00,0x00,0xbe,0x01,0x00,0x00, -0x00,0x00,0x00,0x00,0x41,0x00,0x06,0x00,0x56,0x00,0x00,0x00, -0xc7,0x01,0x00,0x00,0x77,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0xc4,0x01,0x00,0x00,0x3e,0x00,0x03,0x00,0xc7,0x01,0x00,0x00, -0xc6,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xce,0x01,0x00,0x00,0x7f,0x00,0x00,0x00,0xe9,0x02,0x00,0x00, -0x51,0x00,0x05,0x00,0x4a,0x00,0x00,0x00,0xcf,0x01,0x00,0x00, -0xbe,0x01,0x00,0x00,0x01,0x00,0x00,0x00,0x41,0x00,0x06,0x00, -0x56,0x00,0x00,0x00,0xd0,0x01,0x00,0x00,0x77,0x00,0x00,0x00, -0x2e,0x00,0x00,0x00,0xce,0x01,0x00,0x00,0x3e,0x00,0x03,0x00, -0xd0,0x01,0x00,0x00,0xcf,0x01,0x00,0x00,0x3d,0x00,0x04,0x00, -0x4a,0x00,0x00,0x00,0xd9,0x01,0x00,0x00,0x57,0x00,0x00,0x00, -0x41,0x00,0x08,0x00,0x5d,0x00,0x00,0x00,0xda,0x01,0x00,0x00, -0x54,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0x40,0x00,0x00,0x00, -0x17,0x00,0x00,0x00,0xea,0x02,0x00,0x00,0x3d,0x00,0x04,0x00, -0x4d,0x00,0x00,0x00,0xdb,0x01,0x00,0x00,0xda,0x01,0x00,0x00, -0x71,0x00,0x04,0x00,0x09,0x00,0x00,0x00,0xdc,0x01,0x00,0x00, -0xdb,0x01,0x00,0x00,0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0xdd,0x01,0x00,0x00,0xdc,0x01,0x00,0x00,0xc7,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xde,0x01,0x00,0x00,0xdd,0x01,0x00,0x00, -0x66,0x00,0x00,0x00,0x6f,0x00,0x04,0x00,0x4a,0x00,0x00,0x00, -0xdf,0x01,0x00,0x00,0xde,0x01,0x00,0x00,0xc2,0x00,0x05,0x00, -0x4d,0x00,0x00,0x00,0xe0,0x01,0x00,0x00,0xdb,0x01,0x00,0x00, -0x6a,0x00,0x00,0x00,0x70,0x00,0x04,0x00,0x4a,0x00,0x00,0x00, -0xe1,0x01,0x00,0x00,0xe0,0x01,0x00,0x00,0x50,0x00,0x05,0x00, -0x60,0x00,0x00,0x00,0xe2,0x01,0x00,0x00,0xdf,0x01,0x00,0x00, -0xe1,0x01,0x00,0x00,0x83,0x00,0x05,0x00,0x60,0x00,0x00,0x00, -0xe3,0x01,0x00,0x00,0xe2,0x01,0x00,0x00,0xa2,0x00,0x00,0x00, -0x8e,0x00,0x05,0x00,0x60,0x00,0x00,0x00,0xe4,0x01,0x00,0x00, -0xe3,0x01,0x00,0x00,0xd9,0x01,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xea,0x01,0x00,0x00,0x7f,0x00,0x00,0x00, -0xea,0x02,0x00,0x00,0x51,0x00,0x05,0x00,0x4a,0x00,0x00,0x00, -0xec,0x01,0x00,0x00,0xe4,0x01,0x00,0x00,0x00,0x00,0x00,0x00, -0x41,0x00,0x06,0x00,0x56,0x00,0x00,0x00,0xed,0x01,0x00,0x00, -0x77,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0xea,0x01,0x00,0x00, -0x3e,0x00,0x03,0x00,0xed,0x01,0x00,0x00,0xec,0x01,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xf4,0x01,0x00,0x00, -0x7f,0x00,0x00,0x00,0xeb,0x02,0x00,0x00,0x51,0x00,0x05,0x00, -0x4a,0x00,0x00,0x00,0xf5,0x01,0x00,0x00,0xe4,0x01,0x00,0x00, -0x01,0x00,0x00,0x00,0x41,0x00,0x06,0x00,0x56,0x00,0x00,0x00, -0xf6,0x01,0x00,0x00,0x77,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0xf4,0x01,0x00,0x00,0x3e,0x00,0x03,0x00,0xf6,0x01,0x00,0x00, -0xf5,0x01,0x00,0x00,0x3d,0x00,0x04,0x00,0x4a,0x00,0x00,0x00, -0xff,0x01,0x00,0x00,0x57,0x00,0x00,0x00,0x41,0x00,0x08,0x00, -0x5d,0x00,0x00,0x00,0x00,0x02,0x00,0x00,0x54,0x00,0x00,0x00, -0x2e,0x00,0x00,0x00,0x40,0x00,0x00,0x00,0x17,0x00,0x00,0x00, -0xec,0x02,0x00,0x00,0x3d,0x00,0x04,0x00,0x4d,0x00,0x00,0x00, -0x01,0x02,0x00,0x00,0x00,0x02,0x00,0x00,0x71,0x00,0x04,0x00, -0x09,0x00,0x00,0x00,0x02,0x02,0x00,0x00,0x01,0x02,0x00,0x00, -0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x03,0x02,0x00,0x00, -0x02,0x02,0x00,0x00,0xc7,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x04,0x02,0x00,0x00,0x03,0x02,0x00,0x00,0x66,0x00,0x00,0x00, -0x6f,0x00,0x04,0x00,0x4a,0x00,0x00,0x00,0x05,0x02,0x00,0x00, -0x04,0x02,0x00,0x00,0xc2,0x00,0x05,0x00,0x4d,0x00,0x00,0x00, -0x06,0x02,0x00,0x00,0x01,0x02,0x00,0x00,0x6a,0x00,0x00,0x00, -0x70,0x00,0x04,0x00,0x4a,0x00,0x00,0x00,0x07,0x02,0x00,0x00, -0x06,0x02,0x00,0x00,0x50,0x00,0x05,0x00,0x60,0x00,0x00,0x00, -0x08,0x02,0x00,0x00,0x05,0x02,0x00,0x00,0x07,0x02,0x00,0x00, -0x83,0x00,0x05,0x00,0x60,0x00,0x00,0x00,0x09,0x02,0x00,0x00, -0x08,0x02,0x00,0x00,0xa2,0x00,0x00,0x00,0x8e,0x00,0x05,0x00, -0x60,0x00,0x00,0x00,0x0a,0x02,0x00,0x00,0x09,0x02,0x00,0x00, -0xff,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x10,0x02,0x00,0x00,0x7f,0x00,0x00,0x00,0xec,0x02,0x00,0x00, -0x51,0x00,0x05,0x00,0x4a,0x00,0x00,0x00,0x12,0x02,0x00,0x00, -0x0a,0x02,0x00,0x00,0x00,0x00,0x00,0x00,0x41,0x00,0x06,0x00, -0x56,0x00,0x00,0x00,0x13,0x02,0x00,0x00,0x77,0x00,0x00,0x00, -0x2e,0x00,0x00,0x00,0x10,0x02,0x00,0x00,0x3e,0x00,0x03,0x00, -0x13,0x02,0x00,0x00,0x12,0x02,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x1a,0x02,0x00,0x00,0x7f,0x00,0x00,0x00, -0xed,0x02,0x00,0x00,0x51,0x00,0x05,0x00,0x4a,0x00,0x00,0x00, -0x1b,0x02,0x00,0x00,0x0a,0x02,0x00,0x00,0x01,0x00,0x00,0x00, -0x41,0x00,0x06,0x00,0x56,0x00,0x00,0x00,0x1c,0x02,0x00,0x00, -0x77,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0x1a,0x02,0x00,0x00, -0x3e,0x00,0x03,0x00,0x1c,0x02,0x00,0x00,0x1b,0x02,0x00,0x00, -0x3d,0x00,0x04,0x00,0x4a,0x00,0x00,0x00,0x25,0x02,0x00,0x00, -0x57,0x00,0x00,0x00,0x41,0x00,0x08,0x00,0x5d,0x00,0x00,0x00, -0x26,0x02,0x00,0x00,0x54,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0x40,0x00,0x00,0x00,0x17,0x00,0x00,0x00,0xee,0x02,0x00,0x00, -0x3d,0x00,0x04,0x00,0x4d,0x00,0x00,0x00,0x27,0x02,0x00,0x00, -0x26,0x02,0x00,0x00,0x71,0x00,0x04,0x00,0x09,0x00,0x00,0x00, -0x28,0x02,0x00,0x00,0x27,0x02,0x00,0x00,0x7c,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x29,0x02,0x00,0x00,0x28,0x02,0x00,0x00, -0xc7,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x2a,0x02,0x00,0x00, -0x29,0x02,0x00,0x00,0x66,0x00,0x00,0x00,0x6f,0x00,0x04,0x00, -0x4a,0x00,0x00,0x00,0x2b,0x02,0x00,0x00,0x2a,0x02,0x00,0x00, -0xc2,0x00,0x05,0x00,0x4d,0x00,0x00,0x00,0x2c,0x02,0x00,0x00, -0x27,0x02,0x00,0x00,0x6a,0x00,0x00,0x00,0x70,0x00,0x04,0x00, -0x4a,0x00,0x00,0x00,0x2d,0x02,0x00,0x00,0x2c,0x02,0x00,0x00, -0x50,0x00,0x05,0x00,0x60,0x00,0x00,0x00,0x2e,0x02,0x00,0x00, -0x2b,0x02,0x00,0x00,0x2d,0x02,0x00,0x00,0x83,0x00,0x05,0x00, -0x60,0x00,0x00,0x00,0x2f,0x02,0x00,0x00,0x2e,0x02,0x00,0x00, -0xa2,0x00,0x00,0x00,0x8e,0x00,0x05,0x00,0x60,0x00,0x00,0x00, -0x30,0x02,0x00,0x00,0x2f,0x02,0x00,0x00,0x25,0x02,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x36,0x02,0x00,0x00, -0x7f,0x00,0x00,0x00,0xee,0x02,0x00,0x00,0x51,0x00,0x05,0x00, -0x4a,0x00,0x00,0x00,0x38,0x02,0x00,0x00,0x30,0x02,0x00,0x00, -0x00,0x00,0x00,0x00,0x41,0x00,0x06,0x00,0x56,0x00,0x00,0x00, -0x39,0x02,0x00,0x00,0x77,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0x36,0x02,0x00,0x00,0x3e,0x00,0x03,0x00,0x39,0x02,0x00,0x00, -0x38,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x40,0x02,0x00,0x00,0x7f,0x00,0x00,0x00,0xef,0x02,0x00,0x00, -0x51,0x00,0x05,0x00,0x4a,0x00,0x00,0x00,0x41,0x02,0x00,0x00, -0x30,0x02,0x00,0x00,0x01,0x00,0x00,0x00,0x41,0x00,0x06,0x00, -0x56,0x00,0x00,0x00,0x42,0x02,0x00,0x00,0x77,0x00,0x00,0x00, -0x2e,0x00,0x00,0x00,0x40,0x02,0x00,0x00,0x3e,0x00,0x03,0x00, -0x42,0x02,0x00,0x00,0x41,0x02,0x00,0x00,0x3d,0x00,0x04,0x00, -0x4a,0x00,0x00,0x00,0x4b,0x02,0x00,0x00,0x57,0x00,0x00,0x00, -0x41,0x00,0x08,0x00,0x5d,0x00,0x00,0x00,0x4c,0x02,0x00,0x00, -0x54,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0x40,0x00,0x00,0x00, -0x17,0x00,0x00,0x00,0xf0,0x02,0x00,0x00,0x3d,0x00,0x04,0x00, -0x4d,0x00,0x00,0x00,0x4d,0x02,0x00,0x00,0x4c,0x02,0x00,0x00, -0x71,0x00,0x04,0x00,0x09,0x00,0x00,0x00,0x4e,0x02,0x00,0x00, -0x4d,0x02,0x00,0x00,0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x4f,0x02,0x00,0x00,0x4e,0x02,0x00,0x00,0xc7,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x50,0x02,0x00,0x00,0x4f,0x02,0x00,0x00, -0x66,0x00,0x00,0x00,0x6f,0x00,0x04,0x00,0x4a,0x00,0x00,0x00, -0x51,0x02,0x00,0x00,0x50,0x02,0x00,0x00,0xc2,0x00,0x05,0x00, -0x4d,0x00,0x00,0x00,0x52,0x02,0x00,0x00,0x4d,0x02,0x00,0x00, -0x6a,0x00,0x00,0x00,0x70,0x00,0x04,0x00,0x4a,0x00,0x00,0x00, -0x53,0x02,0x00,0x00,0x52,0x02,0x00,0x00,0x50,0x00,0x05,0x00, -0x60,0x00,0x00,0x00,0x54,0x02,0x00,0x00,0x51,0x02,0x00,0x00, -0x53,0x02,0x00,0x00,0x83,0x00,0x05,0x00,0x60,0x00,0x00,0x00, -0x55,0x02,0x00,0x00,0x54,0x02,0x00,0x00,0xa2,0x00,0x00,0x00, -0x8e,0x00,0x05,0x00,0x60,0x00,0x00,0x00,0x56,0x02,0x00,0x00, -0x55,0x02,0x00,0x00,0x4b,0x02,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x5c,0x02,0x00,0x00,0x7f,0x00,0x00,0x00, -0xf0,0x02,0x00,0x00,0x51,0x00,0x05,0x00,0x4a,0x00,0x00,0x00, -0x5e,0x02,0x00,0x00,0x56,0x02,0x00,0x00,0x00,0x00,0x00,0x00, -0x41,0x00,0x06,0x00,0x56,0x00,0x00,0x00,0x5f,0x02,0x00,0x00, -0x77,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0x5c,0x02,0x00,0x00, -0x3e,0x00,0x03,0x00,0x5f,0x02,0x00,0x00,0x5e,0x02,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x66,0x02,0x00,0x00, -0x7f,0x00,0x00,0x00,0xf1,0x02,0x00,0x00,0x51,0x00,0x05,0x00, -0x4a,0x00,0x00,0x00,0x67,0x02,0x00,0x00,0x56,0x02,0x00,0x00, -0x01,0x00,0x00,0x00,0x41,0x00,0x06,0x00,0x56,0x00,0x00,0x00, -0x68,0x02,0x00,0x00,0x77,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0x66,0x02,0x00,0x00,0x3e,0x00,0x03,0x00,0x68,0x02,0x00,0x00, -0x67,0x02,0x00,0x00,0x3d,0x00,0x04,0x00,0x4a,0x00,0x00,0x00, -0x71,0x02,0x00,0x00,0x57,0x00,0x00,0x00,0x41,0x00,0x08,0x00, -0x5d,0x00,0x00,0x00,0x72,0x02,0x00,0x00,0x54,0x00,0x00,0x00, -0x2e,0x00,0x00,0x00,0x40,0x00,0x00,0x00,0x17,0x00,0x00,0x00, -0xf2,0x02,0x00,0x00,0x3d,0x00,0x04,0x00,0x4d,0x00,0x00,0x00, -0x73,0x02,0x00,0x00,0x72,0x02,0x00,0x00,0x71,0x00,0x04,0x00, -0x09,0x00,0x00,0x00,0x74,0x02,0x00,0x00,0x73,0x02,0x00,0x00, -0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x75,0x02,0x00,0x00, -0x74,0x02,0x00,0x00,0xc7,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x76,0x02,0x00,0x00,0x75,0x02,0x00,0x00,0x66,0x00,0x00,0x00, -0x6f,0x00,0x04,0x00,0x4a,0x00,0x00,0x00,0x77,0x02,0x00,0x00, -0x76,0x02,0x00,0x00,0xc2,0x00,0x05,0x00,0x4d,0x00,0x00,0x00, -0x78,0x02,0x00,0x00,0x73,0x02,0x00,0x00,0x6a,0x00,0x00,0x00, -0x70,0x00,0x04,0x00,0x4a,0x00,0x00,0x00,0x79,0x02,0x00,0x00, -0x78,0x02,0x00,0x00,0x50,0x00,0x05,0x00,0x60,0x00,0x00,0x00, -0x7a,0x02,0x00,0x00,0x77,0x02,0x00,0x00,0x79,0x02,0x00,0x00, -0x83,0x00,0x05,0x00,0x60,0x00,0x00,0x00,0x7b,0x02,0x00,0x00, -0x7a,0x02,0x00,0x00,0xa2,0x00,0x00,0x00,0x8e,0x00,0x05,0x00, -0x60,0x00,0x00,0x00,0x7c,0x02,0x00,0x00,0x7b,0x02,0x00,0x00, -0x71,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x82,0x02,0x00,0x00,0x7f,0x00,0x00,0x00,0xf2,0x02,0x00,0x00, -0x51,0x00,0x05,0x00,0x4a,0x00,0x00,0x00,0x84,0x02,0x00,0x00, -0x7c,0x02,0x00,0x00,0x00,0x00,0x00,0x00,0x41,0x00,0x06,0x00, -0x56,0x00,0x00,0x00,0x85,0x02,0x00,0x00,0x77,0x00,0x00,0x00, -0x2e,0x00,0x00,0x00,0x82,0x02,0x00,0x00,0x3e,0x00,0x03,0x00, -0x85,0x02,0x00,0x00,0x84,0x02,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x8c,0x02,0x00,0x00,0x7f,0x00,0x00,0x00, -0xf3,0x02,0x00,0x00,0x51,0x00,0x05,0x00,0x4a,0x00,0x00,0x00, -0x8d,0x02,0x00,0x00,0x7c,0x02,0x00,0x00,0x01,0x00,0x00,0x00, -0x41,0x00,0x06,0x00,0x56,0x00,0x00,0x00,0x8e,0x02,0x00,0x00, -0x77,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0x8c,0x02,0x00,0x00, -0x3e,0x00,0x03,0x00,0x8e,0x02,0x00,0x00,0x8d,0x02,0x00,0x00, -0x3d,0x00,0x04,0x00,0x4a,0x00,0x00,0x00,0x97,0x02,0x00,0x00, -0x57,0x00,0x00,0x00,0x41,0x00,0x08,0x00,0x5d,0x00,0x00,0x00, -0x98,0x02,0x00,0x00,0x54,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0x40,0x00,0x00,0x00,0x17,0x00,0x00,0x00,0xf4,0x02,0x00,0x00, -0x3d,0x00,0x04,0x00,0x4d,0x00,0x00,0x00,0x99,0x02,0x00,0x00, -0x98,0x02,0x00,0x00,0x71,0x00,0x04,0x00,0x09,0x00,0x00,0x00, -0x9a,0x02,0x00,0x00,0x99,0x02,0x00,0x00,0x7c,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x9b,0x02,0x00,0x00,0x9a,0x02,0x00,0x00, -0xc7,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x9c,0x02,0x00,0x00, -0x9b,0x02,0x00,0x00,0x66,0x00,0x00,0x00,0x6f,0x00,0x04,0x00, -0x4a,0x00,0x00,0x00,0x9d,0x02,0x00,0x00,0x9c,0x02,0x00,0x00, -0xc2,0x00,0x05,0x00,0x4d,0x00,0x00,0x00,0x9e,0x02,0x00,0x00, -0x99,0x02,0x00,0x00,0x6a,0x00,0x00,0x00,0x70,0x00,0x04,0x00, -0x4a,0x00,0x00,0x00,0x9f,0x02,0x00,0x00,0x9e,0x02,0x00,0x00, -0x50,0x00,0x05,0x00,0x60,0x00,0x00,0x00,0xa0,0x02,0x00,0x00, -0x9d,0x02,0x00,0x00,0x9f,0x02,0x00,0x00,0x83,0x00,0x05,0x00, -0x60,0x00,0x00,0x00,0xa1,0x02,0x00,0x00,0xa0,0x02,0x00,0x00, -0xa2,0x00,0x00,0x00,0x8e,0x00,0x05,0x00,0x60,0x00,0x00,0x00, -0xa2,0x02,0x00,0x00,0xa1,0x02,0x00,0x00,0x97,0x02,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xa8,0x02,0x00,0x00, -0x7f,0x00,0x00,0x00,0xf4,0x02,0x00,0x00,0x51,0x00,0x05,0x00, -0x4a,0x00,0x00,0x00,0xaa,0x02,0x00,0x00,0xa2,0x02,0x00,0x00, -0x00,0x00,0x00,0x00,0x41,0x00,0x06,0x00,0x56,0x00,0x00,0x00, -0xab,0x02,0x00,0x00,0x77,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0xa8,0x02,0x00,0x00,0x3e,0x00,0x03,0x00,0xab,0x02,0x00,0x00, -0xaa,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xb2,0x02,0x00,0x00,0x7f,0x00,0x00,0x00,0xf5,0x02,0x00,0x00, -0x51,0x00,0x05,0x00,0x4a,0x00,0x00,0x00,0xb3,0x02,0x00,0x00, -0xa2,0x02,0x00,0x00,0x01,0x00,0x00,0x00,0x41,0x00,0x06,0x00, -0x56,0x00,0x00,0x00,0xb4,0x02,0x00,0x00,0x77,0x00,0x00,0x00, -0x2e,0x00,0x00,0x00,0xb2,0x02,0x00,0x00,0x3e,0x00,0x03,0x00, -0xb4,0x02,0x00,0x00,0xb3,0x02,0x00,0x00,0x3d,0x00,0x04,0x00, -0x4a,0x00,0x00,0x00,0xbd,0x02,0x00,0x00,0x57,0x00,0x00,0x00, -0x41,0x00,0x08,0x00,0x5d,0x00,0x00,0x00,0xbe,0x02,0x00,0x00, -0x54,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0x40,0x00,0x00,0x00, -0x17,0x00,0x00,0x00,0x66,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x4d,0x00,0x00,0x00,0xbf,0x02,0x00,0x00,0xbe,0x02,0x00,0x00, -0x71,0x00,0x04,0x00,0x09,0x00,0x00,0x00,0xc0,0x02,0x00,0x00, -0xbf,0x02,0x00,0x00,0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0xc1,0x02,0x00,0x00,0xc0,0x02,0x00,0x00,0xc7,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xc2,0x02,0x00,0x00,0xc1,0x02,0x00,0x00, -0x66,0x00,0x00,0x00,0x6f,0x00,0x04,0x00,0x4a,0x00,0x00,0x00, -0xc3,0x02,0x00,0x00,0xc2,0x02,0x00,0x00,0xc2,0x00,0x05,0x00, -0x4d,0x00,0x00,0x00,0xc4,0x02,0x00,0x00,0xbf,0x02,0x00,0x00, -0x6a,0x00,0x00,0x00,0x70,0x00,0x04,0x00,0x4a,0x00,0x00,0x00, -0xc5,0x02,0x00,0x00,0xc4,0x02,0x00,0x00,0x50,0x00,0x05,0x00, -0x60,0x00,0x00,0x00,0xc6,0x02,0x00,0x00,0xc3,0x02,0x00,0x00, -0xc5,0x02,0x00,0x00,0x83,0x00,0x05,0x00,0x60,0x00,0x00,0x00, -0xc7,0x02,0x00,0x00,0xc6,0x02,0x00,0x00,0xa2,0x00,0x00,0x00, -0x8e,0x00,0x05,0x00,0x60,0x00,0x00,0x00,0xc8,0x02,0x00,0x00, -0xc7,0x02,0x00,0x00,0xbd,0x02,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xce,0x02,0x00,0x00,0x7f,0x00,0x00,0x00, -0x66,0x00,0x00,0x00,0x51,0x00,0x05,0x00,0x4a,0x00,0x00,0x00, -0xd0,0x02,0x00,0x00,0xc8,0x02,0x00,0x00,0x00,0x00,0x00,0x00, -0x41,0x00,0x06,0x00,0x56,0x00,0x00,0x00,0xd1,0x02,0x00,0x00, -0x77,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0xce,0x02,0x00,0x00, -0x3e,0x00,0x03,0x00,0xd1,0x02,0x00,0x00,0xd0,0x02,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xd8,0x02,0x00,0x00, -0x7f,0x00,0x00,0x00,0xf6,0x02,0x00,0x00,0x51,0x00,0x05,0x00, -0x4a,0x00,0x00,0x00,0xd9,0x02,0x00,0x00,0xc8,0x02,0x00,0x00, -0x01,0x00,0x00,0x00,0x41,0x00,0x06,0x00,0x56,0x00,0x00,0x00, -0xda,0x02,0x00,0x00,0x77,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0xd8,0x02,0x00,0x00,0x3e,0x00,0x03,0x00,0xda,0x02,0x00,0x00, -0xd9,0x02,0x00,0x00,0xf9,0x00,0x02,0x00,0x98,0x00,0x00,0x00, -0xf8,0x00,0x02,0x00,0x98,0x00,0x00,0x00,0xfd,0x00,0x01,0x00, -0x38,0x00,0x01,0x00, -}; -const uint64_t dequant_q4_0_len = 8332; - -unsigned char dequant_q4_0_fp32_data[] = { -0x03,0x02,0x23,0x07,0x00,0x05,0x01,0x00,0x0b,0x00,0x0d,0x00, 0x19,0x03,0x00,0x00,0x00,0x00,0x00,0x00,0x11,0x00,0x02,0x00, 0x01,0x00,0x00,0x00,0x11,0x00,0x02,0x00,0x51,0x11,0x00,0x00, 0x11,0x00,0x02,0x00,0x60,0x11,0x00,0x00,0x0b,0x00,0x06,0x00, @@ -4122,758 +2535,10 @@ unsigned char dequant_q4_0_fp32_data[] = { 0x9b,0x00,0x00,0x00,0xfd,0x00,0x01,0x00,0x38,0x00,0x01,0x00, }; -const uint64_t dequant_q4_0_fp32_len = 8856; +const uint64_t dequant_q4_0_len = 8856; unsigned char dequant_q4_1_data[] = { 0x03,0x02,0x23,0x07,0x00,0x05,0x01,0x00,0x0b,0x00,0x0d,0x00, -0x27,0x03,0x00,0x00,0x00,0x00,0x00,0x00,0x11,0x00,0x02,0x00, -0x01,0x00,0x00,0x00,0x11,0x00,0x02,0x00,0x09,0x00,0x00,0x00, -0x11,0x00,0x02,0x00,0x27,0x00,0x00,0x00,0x11,0x00,0x02,0x00, -0x51,0x11,0x00,0x00,0x11,0x00,0x02,0x00,0x60,0x11,0x00,0x00, -0x0b,0x00,0x06,0x00,0x01,0x00,0x00,0x00,0x47,0x4c,0x53,0x4c, -0x2e,0x73,0x74,0x64,0x2e,0x34,0x35,0x30,0x00,0x00,0x00,0x00, -0x0e,0x00,0x03,0x00,0x00,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x0f,0x00,0x09,0x00,0x05,0x00,0x00,0x00,0x04,0x00,0x00,0x00, -0x6d,0x61,0x69,0x6e,0x00,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0x16,0x00,0x00,0x00,0x54,0x00,0x00,0x00,0x7b,0x00,0x00,0x00, -0x10,0x00,0x06,0x00,0x04,0x00,0x00,0x00,0x11,0x00,0x00,0x00, -0x00,0x01,0x00,0x00,0x01,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x0c,0x00,0x00,0x00,0x0b,0x00,0x00,0x00, -0x1c,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x14,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x48,0x00,0x05,0x00,0x14,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0x14,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x08,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x14,0x00,0x00,0x00, -0x03,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0x47,0x00,0x03,0x00,0x14,0x00,0x00,0x00,0x02,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x4f,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x50,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x48,0x00,0x05,0x00,0x50,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0x50,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x04,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x51,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x14,0x00,0x00,0x00,0x48,0x00,0x04,0x00, -0x52,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x18,0x00,0x00,0x00, -0x48,0x00,0x05,0x00,0x52,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x03,0x00, -0x52,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x54,0x00,0x00,0x00,0x22,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x54,0x00,0x00,0x00,0x21,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x78,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x48,0x00,0x04,0x00, -0x79,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x19,0x00,0x00,0x00, -0x48,0x00,0x05,0x00,0x79,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x03,0x00, -0x79,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x7b,0x00,0x00,0x00,0x22,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x7b,0x00,0x00,0x00,0x21,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x9b,0x00,0x00,0x00, -0x0b,0x00,0x00,0x00,0x19,0x00,0x00,0x00,0x13,0x00,0x02,0x00, -0x02,0x00,0x00,0x00,0x21,0x00,0x03,0x00,0x03,0x00,0x00,0x00, -0x02,0x00,0x00,0x00,0x15,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x20,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x15,0x00,0x04,0x00, -0x09,0x00,0x00,0x00,0x20,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x17,0x00,0x04,0x00,0x0a,0x00,0x00,0x00,0x09,0x00,0x00,0x00, -0x03,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x0b,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x0a,0x00,0x00,0x00,0x3b,0x00,0x04,0x00, -0x0b,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x09,0x00,0x00,0x00,0x0d,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x0e,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x09,0x00,0x00,0x00,0x1e,0x00,0x06,0x00, -0x14,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0x15,0x00,0x00,0x00,0x09,0x00,0x00,0x00,0x14,0x00,0x00,0x00, -0x3b,0x00,0x04,0x00,0x15,0x00,0x00,0x00,0x16,0x00,0x00,0x00, -0x09,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x17,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0x18,0x00,0x00,0x00,0x09,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x1b,0x00,0x00,0x00, -0x20,0x00,0x00,0x00,0x14,0x00,0x02,0x00,0x24,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x37,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x48,0x00,0x00,0x00,0x10,0x00,0x00,0x00, -0x16,0x00,0x03,0x00,0x4a,0x00,0x00,0x00,0x10,0x00,0x00,0x00, -0x15,0x00,0x04,0x00,0x4d,0x00,0x00,0x00,0x08,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x09,0x00,0x00,0x00, -0x4e,0x00,0x00,0x00,0x10,0x00,0x00,0x00,0x1c,0x00,0x04,0x00, -0x4f,0x00,0x00,0x00,0x4d,0x00,0x00,0x00,0x4e,0x00,0x00,0x00, -0x1e,0x00,0x05,0x00,0x50,0x00,0x00,0x00,0x4a,0x00,0x00,0x00, -0x4a,0x00,0x00,0x00,0x4f,0x00,0x00,0x00,0x1d,0x00,0x03,0x00, -0x51,0x00,0x00,0x00,0x50,0x00,0x00,0x00,0x1e,0x00,0x03,0x00, -0x52,0x00,0x00,0x00,0x51,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0x53,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x52,0x00,0x00,0x00, -0x3b,0x00,0x04,0x00,0x53,0x00,0x00,0x00,0x54,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x56,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x4a,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0x61,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x4d,0x00,0x00,0x00, -0x17,0x00,0x04,0x00,0x64,0x00,0x00,0x00,0x4a,0x00,0x00,0x00, -0x02,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x6a,0x00,0x00,0x00,0x0f,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x6e,0x00,0x00,0x00,0x04,0x00,0x00,0x00, -0x1d,0x00,0x03,0x00,0x78,0x00,0x00,0x00,0x4a,0x00,0x00,0x00, -0x1e,0x00,0x03,0x00,0x79,0x00,0x00,0x00,0x78,0x00,0x00,0x00, -0x20,0x00,0x04,0x00,0x7a,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0x79,0x00,0x00,0x00,0x3b,0x00,0x04,0x00,0x7a,0x00,0x00,0x00, -0x7b,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x7d,0x00,0x00,0x00,0x03,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x09,0x00,0x00,0x00,0x94,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x09,0x00,0x00,0x00, -0x9a,0x00,0x00,0x00,0x00,0x01,0x00,0x00,0x2c,0x00,0x06,0x00, -0x0a,0x00,0x00,0x00,0x9b,0x00,0x00,0x00,0x9a,0x00,0x00,0x00, -0x94,0x00,0x00,0x00,0x94,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x0e,0x03,0x00,0x00,0x11,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x0f,0x03,0x00,0x00, -0x12,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x10,0x03,0x00,0x00,0x13,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x11,0x03,0x00,0x00,0x14,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x12,0x03,0x00,0x00, -0x05,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x13,0x03,0x00,0x00,0x15,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x14,0x03,0x00,0x00,0x06,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x15,0x03,0x00,0x00, -0x16,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x16,0x03,0x00,0x00,0x07,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x17,0x03,0x00,0x00,0x17,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x18,0x03,0x00,0x00, -0x08,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x19,0x03,0x00,0x00,0x18,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x1a,0x03,0x00,0x00,0x09,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x1b,0x03,0x00,0x00, -0x19,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x1c,0x03,0x00,0x00,0x0a,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x1d,0x03,0x00,0x00,0x1a,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x1e,0x03,0x00,0x00, -0x0b,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x1f,0x03,0x00,0x00,0x1b,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x20,0x03,0x00,0x00,0x0c,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x21,0x03,0x00,0x00, -0x1c,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x22,0x03,0x00,0x00,0x0d,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x23,0x03,0x00,0x00,0x1d,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x24,0x03,0x00,0x00, -0x0e,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x25,0x03,0x00,0x00,0x1e,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x26,0x03,0x00,0x00,0x1f,0x00,0x00,0x00, -0x36,0x00,0x05,0x00,0x02,0x00,0x00,0x00,0x04,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x03,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, -0x05,0x00,0x00,0x00,0xf7,0x00,0x03,0x00,0x9c,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0xfb,0x00,0x03,0x00,0x0d,0x00,0x00,0x00, -0x9d,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0x9d,0x00,0x00,0x00, -0x41,0x00,0x05,0x00,0x0e,0x00,0x00,0x00,0x0f,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x0d,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x09,0x00,0x00,0x00,0x10,0x00,0x00,0x00,0x0f,0x00,0x00,0x00, -0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x11,0x00,0x00,0x00, -0x10,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x18,0x00,0x00,0x00, -0x19,0x00,0x00,0x00,0x16,0x00,0x00,0x00,0x17,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x1a,0x00,0x00,0x00, -0x19,0x00,0x00,0x00,0x87,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x1c,0x00,0x00,0x00,0x1a,0x00,0x00,0x00,0x1b,0x00,0x00,0x00, -0x8b,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x1d,0x00,0x00,0x00, -0x11,0x00,0x00,0x00,0x1c,0x00,0x00,0x00,0x87,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x11,0x00,0x00,0x00, -0x1c,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x26,0x00,0x00,0x00,0x1d,0x00,0x00,0x00,0x1b,0x00,0x00,0x00, -0xaf,0x00,0x05,0x00,0x24,0x00,0x00,0x00,0x29,0x00,0x00,0x00, -0x26,0x00,0x00,0x00,0x1a,0x00,0x00,0x00,0xa8,0x00,0x04,0x00, -0x24,0x00,0x00,0x00,0x2a,0x00,0x00,0x00,0x29,0x00,0x00,0x00, -0xf7,0x00,0x03,0x00,0x2c,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0xfa,0x00,0x04,0x00,0x2a,0x00,0x00,0x00,0x2b,0x00,0x00,0x00, -0x2c,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0x2b,0x00,0x00,0x00, -0x41,0x00,0x05,0x00,0x18,0x00,0x00,0x00,0x2f,0x00,0x00,0x00, -0x16,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x30,0x00,0x00,0x00,0x2f,0x00,0x00,0x00, -0xaf,0x00,0x05,0x00,0x24,0x00,0x00,0x00,0x31,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x30,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, -0x2c,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0x2c,0x00,0x00,0x00, -0xf5,0x00,0x07,0x00,0x24,0x00,0x00,0x00,0x32,0x00,0x00,0x00, -0x29,0x00,0x00,0x00,0x9d,0x00,0x00,0x00,0x31,0x00,0x00,0x00, -0x2b,0x00,0x00,0x00,0xf7,0x00,0x03,0x00,0x34,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0x32,0x00,0x00,0x00, -0x33,0x00,0x00,0x00,0x34,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, -0x33,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0x9c,0x00,0x00,0x00, -0xf8,0x00,0x02,0x00,0x34,0x00,0x00,0x00,0x41,0x00,0x05,0x00, -0x18,0x00,0x00,0x00,0x38,0x00,0x00,0x00,0x16,0x00,0x00,0x00, -0x37,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x39,0x00,0x00,0x00,0x38,0x00,0x00,0x00,0x87,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x3a,0x00,0x00,0x00,0x39,0x00,0x00,0x00, -0x1b,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x3e,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x3a,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x40,0x00,0x00,0x00, -0x3e,0x00,0x00,0x00,0x1d,0x00,0x00,0x00,0x41,0x00,0x07,0x00, -0x56,0x00,0x00,0x00,0x57,0x00,0x00,0x00,0x54,0x00,0x00,0x00, -0x2e,0x00,0x00,0x00,0x40,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x4a,0x00,0x00,0x00,0x58,0x00,0x00,0x00, -0x57,0x00,0x00,0x00,0x41,0x00,0x07,0x00,0x56,0x00,0x00,0x00, -0x5b,0x00,0x00,0x00,0x54,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0x40,0x00,0x00,0x00,0x17,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x4a,0x00,0x00,0x00,0x5c,0x00,0x00,0x00,0x5b,0x00,0x00,0x00, -0x41,0x00,0x08,0x00,0x61,0x00,0x00,0x00,0x62,0x00,0x00,0x00, -0x54,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0x40,0x00,0x00,0x00, -0x37,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x4d,0x00,0x00,0x00,0x63,0x00,0x00,0x00,0x62,0x00,0x00,0x00, -0x71,0x00,0x04,0x00,0x09,0x00,0x00,0x00,0x68,0x00,0x00,0x00, -0x63,0x00,0x00,0x00,0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x69,0x00,0x00,0x00,0x68,0x00,0x00,0x00,0xc7,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x6b,0x00,0x00,0x00,0x69,0x00,0x00,0x00, -0x6a,0x00,0x00,0x00,0x6f,0x00,0x04,0x00,0x4a,0x00,0x00,0x00, -0x6c,0x00,0x00,0x00,0x6b,0x00,0x00,0x00,0xc2,0x00,0x05,0x00, -0x4d,0x00,0x00,0x00,0x6f,0x00,0x00,0x00,0x63,0x00,0x00,0x00, -0x6e,0x00,0x00,0x00,0x70,0x00,0x04,0x00,0x4a,0x00,0x00,0x00, -0x70,0x00,0x00,0x00,0x6f,0x00,0x00,0x00,0x50,0x00,0x05,0x00, -0x64,0x00,0x00,0x00,0x71,0x00,0x00,0x00,0x6c,0x00,0x00,0x00, -0x70,0x00,0x00,0x00,0x8e,0x00,0x05,0x00,0x64,0x00,0x00,0x00, -0x74,0x00,0x00,0x00,0x71,0x00,0x00,0x00,0x58,0x00,0x00,0x00, -0x50,0x00,0x05,0x00,0x64,0x00,0x00,0x00,0x76,0x00,0x00,0x00, -0x5c,0x00,0x00,0x00,0x5c,0x00,0x00,0x00,0x81,0x00,0x05,0x00, -0x64,0x00,0x00,0x00,0x77,0x00,0x00,0x00,0x74,0x00,0x00,0x00, -0x76,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x18,0x00,0x00,0x00, -0x7e,0x00,0x00,0x00,0x16,0x00,0x00,0x00,0x7d,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x7f,0x00,0x00,0x00, -0x7e,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x80,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x7f,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x83,0x00,0x00,0x00, -0x80,0x00,0x00,0x00,0x26,0x00,0x00,0x00,0x51,0x00,0x05,0x00, -0x4a,0x00,0x00,0x00,0x88,0x00,0x00,0x00,0x77,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x41,0x00,0x06,0x00,0x56,0x00,0x00,0x00, -0x89,0x00,0x00,0x00,0x7b,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0x83,0x00,0x00,0x00,0x3e,0x00,0x03,0x00,0x89,0x00,0x00,0x00, -0x88,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x93,0x00,0x00,0x00,0x83,0x00,0x00,0x00,0x48,0x00,0x00,0x00, -0x51,0x00,0x05,0x00,0x4a,0x00,0x00,0x00,0x96,0x00,0x00,0x00, -0x77,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x41,0x00,0x06,0x00, -0x56,0x00,0x00,0x00,0x97,0x00,0x00,0x00,0x7b,0x00,0x00,0x00, -0x2e,0x00,0x00,0x00,0x93,0x00,0x00,0x00,0x3e,0x00,0x03,0x00, -0x97,0x00,0x00,0x00,0x96,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x4a,0x00,0x00,0x00,0xac,0x00,0x00,0x00,0x57,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x4a,0x00,0x00,0x00,0xae,0x00,0x00,0x00, -0x5b,0x00,0x00,0x00,0x41,0x00,0x08,0x00,0x61,0x00,0x00,0x00, -0xaf,0x00,0x00,0x00,0x54,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0x40,0x00,0x00,0x00,0x37,0x00,0x00,0x00,0x17,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x4d,0x00,0x00,0x00,0xb0,0x00,0x00,0x00, -0xaf,0x00,0x00,0x00,0x71,0x00,0x04,0x00,0x09,0x00,0x00,0x00, -0xb1,0x00,0x00,0x00,0xb0,0x00,0x00,0x00,0x7c,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0xb2,0x00,0x00,0x00,0xb1,0x00,0x00,0x00, -0xc7,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xb3,0x00,0x00,0x00, -0xb2,0x00,0x00,0x00,0x6a,0x00,0x00,0x00,0x6f,0x00,0x04,0x00, -0x4a,0x00,0x00,0x00,0xb4,0x00,0x00,0x00,0xb3,0x00,0x00,0x00, -0xc2,0x00,0x05,0x00,0x4d,0x00,0x00,0x00,0xb5,0x00,0x00,0x00, -0xb0,0x00,0x00,0x00,0x6e,0x00,0x00,0x00,0x70,0x00,0x04,0x00, -0x4a,0x00,0x00,0x00,0xb6,0x00,0x00,0x00,0xb5,0x00,0x00,0x00, -0x50,0x00,0x05,0x00,0x64,0x00,0x00,0x00,0xb7,0x00,0x00,0x00, -0xb4,0x00,0x00,0x00,0xb6,0x00,0x00,0x00,0x8e,0x00,0x05,0x00, -0x64,0x00,0x00,0x00,0xb8,0x00,0x00,0x00,0xb7,0x00,0x00,0x00, -0xac,0x00,0x00,0x00,0x50,0x00,0x05,0x00,0x64,0x00,0x00,0x00, -0xb9,0x00,0x00,0x00,0xae,0x00,0x00,0x00,0xae,0x00,0x00,0x00, -0x81,0x00,0x05,0x00,0x64,0x00,0x00,0x00,0xba,0x00,0x00,0x00, -0xb8,0x00,0x00,0x00,0xb9,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xc0,0x00,0x00,0x00,0x83,0x00,0x00,0x00, -0x17,0x00,0x00,0x00,0x51,0x00,0x05,0x00,0x4a,0x00,0x00,0x00, -0xc2,0x00,0x00,0x00,0xba,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x41,0x00,0x06,0x00,0x56,0x00,0x00,0x00,0xc3,0x00,0x00,0x00, -0x7b,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0xc0,0x00,0x00,0x00, -0x3e,0x00,0x03,0x00,0xc3,0x00,0x00,0x00,0xc2,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xca,0x00,0x00,0x00, -0x83,0x00,0x00,0x00,0x0e,0x03,0x00,0x00,0x51,0x00,0x05,0x00, -0x4a,0x00,0x00,0x00,0xcb,0x00,0x00,0x00,0xba,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x41,0x00,0x06,0x00,0x56,0x00,0x00,0x00, -0xcc,0x00,0x00,0x00,0x7b,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0xca,0x00,0x00,0x00,0x3e,0x00,0x03,0x00,0xcc,0x00,0x00,0x00, -0xcb,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x4a,0x00,0x00,0x00, -0xd5,0x00,0x00,0x00,0x57,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x4a,0x00,0x00,0x00,0xd7,0x00,0x00,0x00,0x5b,0x00,0x00,0x00, -0x41,0x00,0x08,0x00,0x61,0x00,0x00,0x00,0xd8,0x00,0x00,0x00, -0x54,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0x40,0x00,0x00,0x00, -0x37,0x00,0x00,0x00,0x37,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x4d,0x00,0x00,0x00,0xd9,0x00,0x00,0x00,0xd8,0x00,0x00,0x00, -0x71,0x00,0x04,0x00,0x09,0x00,0x00,0x00,0xda,0x00,0x00,0x00, -0xd9,0x00,0x00,0x00,0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0xdb,0x00,0x00,0x00,0xda,0x00,0x00,0x00,0xc7,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xdc,0x00,0x00,0x00,0xdb,0x00,0x00,0x00, -0x6a,0x00,0x00,0x00,0x6f,0x00,0x04,0x00,0x4a,0x00,0x00,0x00, -0xdd,0x00,0x00,0x00,0xdc,0x00,0x00,0x00,0xc2,0x00,0x05,0x00, -0x4d,0x00,0x00,0x00,0xde,0x00,0x00,0x00,0xd9,0x00,0x00,0x00, -0x6e,0x00,0x00,0x00,0x70,0x00,0x04,0x00,0x4a,0x00,0x00,0x00, -0xdf,0x00,0x00,0x00,0xde,0x00,0x00,0x00,0x50,0x00,0x05,0x00, -0x64,0x00,0x00,0x00,0xe0,0x00,0x00,0x00,0xdd,0x00,0x00,0x00, -0xdf,0x00,0x00,0x00,0x8e,0x00,0x05,0x00,0x64,0x00,0x00,0x00, -0xe1,0x00,0x00,0x00,0xe0,0x00,0x00,0x00,0xd5,0x00,0x00,0x00, -0x50,0x00,0x05,0x00,0x64,0x00,0x00,0x00,0xe2,0x00,0x00,0x00, -0xd7,0x00,0x00,0x00,0xd7,0x00,0x00,0x00,0x81,0x00,0x05,0x00, -0x64,0x00,0x00,0x00,0xe3,0x00,0x00,0x00,0xe1,0x00,0x00,0x00, -0xe2,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xe9,0x00,0x00,0x00,0x83,0x00,0x00,0x00,0x37,0x00,0x00,0x00, -0x51,0x00,0x05,0x00,0x4a,0x00,0x00,0x00,0xeb,0x00,0x00,0x00, -0xe3,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x41,0x00,0x06,0x00, -0x56,0x00,0x00,0x00,0xec,0x00,0x00,0x00,0x7b,0x00,0x00,0x00, -0x2e,0x00,0x00,0x00,0xe9,0x00,0x00,0x00,0x3e,0x00,0x03,0x00, -0xec,0x00,0x00,0x00,0xeb,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xf3,0x00,0x00,0x00,0x83,0x00,0x00,0x00, -0x0f,0x03,0x00,0x00,0x51,0x00,0x05,0x00,0x4a,0x00,0x00,0x00, -0xf4,0x00,0x00,0x00,0xe3,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x41,0x00,0x06,0x00,0x56,0x00,0x00,0x00,0xf5,0x00,0x00,0x00, -0x7b,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0xf3,0x00,0x00,0x00, -0x3e,0x00,0x03,0x00,0xf5,0x00,0x00,0x00,0xf4,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x4a,0x00,0x00,0x00,0xfe,0x00,0x00,0x00, -0x57,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x4a,0x00,0x00,0x00, -0x00,0x01,0x00,0x00,0x5b,0x00,0x00,0x00,0x41,0x00,0x08,0x00, -0x61,0x00,0x00,0x00,0x01,0x01,0x00,0x00,0x54,0x00,0x00,0x00, -0x2e,0x00,0x00,0x00,0x40,0x00,0x00,0x00,0x37,0x00,0x00,0x00, -0x7d,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x4d,0x00,0x00,0x00, -0x02,0x01,0x00,0x00,0x01,0x01,0x00,0x00,0x71,0x00,0x04,0x00, -0x09,0x00,0x00,0x00,0x03,0x01,0x00,0x00,0x02,0x01,0x00,0x00, -0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x04,0x01,0x00,0x00, -0x03,0x01,0x00,0x00,0xc7,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x05,0x01,0x00,0x00,0x04,0x01,0x00,0x00,0x6a,0x00,0x00,0x00, -0x6f,0x00,0x04,0x00,0x4a,0x00,0x00,0x00,0x06,0x01,0x00,0x00, -0x05,0x01,0x00,0x00,0xc2,0x00,0x05,0x00,0x4d,0x00,0x00,0x00, -0x07,0x01,0x00,0x00,0x02,0x01,0x00,0x00,0x6e,0x00,0x00,0x00, -0x70,0x00,0x04,0x00,0x4a,0x00,0x00,0x00,0x08,0x01,0x00,0x00, -0x07,0x01,0x00,0x00,0x50,0x00,0x05,0x00,0x64,0x00,0x00,0x00, -0x09,0x01,0x00,0x00,0x06,0x01,0x00,0x00,0x08,0x01,0x00,0x00, -0x8e,0x00,0x05,0x00,0x64,0x00,0x00,0x00,0x0a,0x01,0x00,0x00, -0x09,0x01,0x00,0x00,0xfe,0x00,0x00,0x00,0x50,0x00,0x05,0x00, -0x64,0x00,0x00,0x00,0x0b,0x01,0x00,0x00,0x00,0x01,0x00,0x00, -0x00,0x01,0x00,0x00,0x81,0x00,0x05,0x00,0x64,0x00,0x00,0x00, -0x0c,0x01,0x00,0x00,0x0a,0x01,0x00,0x00,0x0b,0x01,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x12,0x01,0x00,0x00, -0x83,0x00,0x00,0x00,0x7d,0x00,0x00,0x00,0x51,0x00,0x05,0x00, -0x4a,0x00,0x00,0x00,0x14,0x01,0x00,0x00,0x0c,0x01,0x00,0x00, -0x00,0x00,0x00,0x00,0x41,0x00,0x06,0x00,0x56,0x00,0x00,0x00, -0x15,0x01,0x00,0x00,0x7b,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0x12,0x01,0x00,0x00,0x3e,0x00,0x03,0x00,0x15,0x01,0x00,0x00, -0x14,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x1c,0x01,0x00,0x00,0x83,0x00,0x00,0x00,0x10,0x03,0x00,0x00, -0x51,0x00,0x05,0x00,0x4a,0x00,0x00,0x00,0x1d,0x01,0x00,0x00, -0x0c,0x01,0x00,0x00,0x01,0x00,0x00,0x00,0x41,0x00,0x06,0x00, -0x56,0x00,0x00,0x00,0x1e,0x01,0x00,0x00,0x7b,0x00,0x00,0x00, -0x2e,0x00,0x00,0x00,0x1c,0x01,0x00,0x00,0x3e,0x00,0x03,0x00, -0x1e,0x01,0x00,0x00,0x1d,0x01,0x00,0x00,0x3d,0x00,0x04,0x00, -0x4a,0x00,0x00,0x00,0x27,0x01,0x00,0x00,0x57,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x4a,0x00,0x00,0x00,0x29,0x01,0x00,0x00, -0x5b,0x00,0x00,0x00,0x41,0x00,0x08,0x00,0x61,0x00,0x00,0x00, -0x2a,0x01,0x00,0x00,0x54,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0x40,0x00,0x00,0x00,0x37,0x00,0x00,0x00,0x6e,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x4d,0x00,0x00,0x00,0x2b,0x01,0x00,0x00, -0x2a,0x01,0x00,0x00,0x71,0x00,0x04,0x00,0x09,0x00,0x00,0x00, -0x2c,0x01,0x00,0x00,0x2b,0x01,0x00,0x00,0x7c,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x2d,0x01,0x00,0x00,0x2c,0x01,0x00,0x00, -0xc7,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x2e,0x01,0x00,0x00, -0x2d,0x01,0x00,0x00,0x6a,0x00,0x00,0x00,0x6f,0x00,0x04,0x00, -0x4a,0x00,0x00,0x00,0x2f,0x01,0x00,0x00,0x2e,0x01,0x00,0x00, -0xc2,0x00,0x05,0x00,0x4d,0x00,0x00,0x00,0x30,0x01,0x00,0x00, -0x2b,0x01,0x00,0x00,0x6e,0x00,0x00,0x00,0x70,0x00,0x04,0x00, -0x4a,0x00,0x00,0x00,0x31,0x01,0x00,0x00,0x30,0x01,0x00,0x00, -0x50,0x00,0x05,0x00,0x64,0x00,0x00,0x00,0x32,0x01,0x00,0x00, -0x2f,0x01,0x00,0x00,0x31,0x01,0x00,0x00,0x8e,0x00,0x05,0x00, -0x64,0x00,0x00,0x00,0x33,0x01,0x00,0x00,0x32,0x01,0x00,0x00, -0x27,0x01,0x00,0x00,0x50,0x00,0x05,0x00,0x64,0x00,0x00,0x00, -0x34,0x01,0x00,0x00,0x29,0x01,0x00,0x00,0x29,0x01,0x00,0x00, -0x81,0x00,0x05,0x00,0x64,0x00,0x00,0x00,0x35,0x01,0x00,0x00, -0x33,0x01,0x00,0x00,0x34,0x01,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x3b,0x01,0x00,0x00,0x83,0x00,0x00,0x00, -0x6e,0x00,0x00,0x00,0x51,0x00,0x05,0x00,0x4a,0x00,0x00,0x00, -0x3d,0x01,0x00,0x00,0x35,0x01,0x00,0x00,0x00,0x00,0x00,0x00, -0x41,0x00,0x06,0x00,0x56,0x00,0x00,0x00,0x3e,0x01,0x00,0x00, -0x7b,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0x3b,0x01,0x00,0x00, -0x3e,0x00,0x03,0x00,0x3e,0x01,0x00,0x00,0x3d,0x01,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x45,0x01,0x00,0x00, -0x83,0x00,0x00,0x00,0x11,0x03,0x00,0x00,0x51,0x00,0x05,0x00, -0x4a,0x00,0x00,0x00,0x46,0x01,0x00,0x00,0x35,0x01,0x00,0x00, -0x01,0x00,0x00,0x00,0x41,0x00,0x06,0x00,0x56,0x00,0x00,0x00, -0x47,0x01,0x00,0x00,0x7b,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0x45,0x01,0x00,0x00,0x3e,0x00,0x03,0x00,0x47,0x01,0x00,0x00, -0x46,0x01,0x00,0x00,0x3d,0x00,0x04,0x00,0x4a,0x00,0x00,0x00, -0x50,0x01,0x00,0x00,0x57,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x4a,0x00,0x00,0x00,0x52,0x01,0x00,0x00,0x5b,0x00,0x00,0x00, -0x41,0x00,0x08,0x00,0x61,0x00,0x00,0x00,0x53,0x01,0x00,0x00, -0x54,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0x40,0x00,0x00,0x00, -0x37,0x00,0x00,0x00,0x12,0x03,0x00,0x00,0x3d,0x00,0x04,0x00, -0x4d,0x00,0x00,0x00,0x54,0x01,0x00,0x00,0x53,0x01,0x00,0x00, -0x71,0x00,0x04,0x00,0x09,0x00,0x00,0x00,0x55,0x01,0x00,0x00, -0x54,0x01,0x00,0x00,0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x56,0x01,0x00,0x00,0x55,0x01,0x00,0x00,0xc7,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x57,0x01,0x00,0x00,0x56,0x01,0x00,0x00, -0x6a,0x00,0x00,0x00,0x6f,0x00,0x04,0x00,0x4a,0x00,0x00,0x00, -0x58,0x01,0x00,0x00,0x57,0x01,0x00,0x00,0xc2,0x00,0x05,0x00, -0x4d,0x00,0x00,0x00,0x59,0x01,0x00,0x00,0x54,0x01,0x00,0x00, -0x6e,0x00,0x00,0x00,0x70,0x00,0x04,0x00,0x4a,0x00,0x00,0x00, -0x5a,0x01,0x00,0x00,0x59,0x01,0x00,0x00,0x50,0x00,0x05,0x00, -0x64,0x00,0x00,0x00,0x5b,0x01,0x00,0x00,0x58,0x01,0x00,0x00, -0x5a,0x01,0x00,0x00,0x8e,0x00,0x05,0x00,0x64,0x00,0x00,0x00, -0x5c,0x01,0x00,0x00,0x5b,0x01,0x00,0x00,0x50,0x01,0x00,0x00, -0x50,0x00,0x05,0x00,0x64,0x00,0x00,0x00,0x5d,0x01,0x00,0x00, -0x52,0x01,0x00,0x00,0x52,0x01,0x00,0x00,0x81,0x00,0x05,0x00, -0x64,0x00,0x00,0x00,0x5e,0x01,0x00,0x00,0x5c,0x01,0x00,0x00, -0x5d,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x64,0x01,0x00,0x00,0x83,0x00,0x00,0x00,0x12,0x03,0x00,0x00, -0x51,0x00,0x05,0x00,0x4a,0x00,0x00,0x00,0x66,0x01,0x00,0x00, -0x5e,0x01,0x00,0x00,0x00,0x00,0x00,0x00,0x41,0x00,0x06,0x00, -0x56,0x00,0x00,0x00,0x67,0x01,0x00,0x00,0x7b,0x00,0x00,0x00, -0x2e,0x00,0x00,0x00,0x64,0x01,0x00,0x00,0x3e,0x00,0x03,0x00, -0x67,0x01,0x00,0x00,0x66,0x01,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x6e,0x01,0x00,0x00,0x83,0x00,0x00,0x00, -0x13,0x03,0x00,0x00,0x51,0x00,0x05,0x00,0x4a,0x00,0x00,0x00, -0x6f,0x01,0x00,0x00,0x5e,0x01,0x00,0x00,0x01,0x00,0x00,0x00, -0x41,0x00,0x06,0x00,0x56,0x00,0x00,0x00,0x70,0x01,0x00,0x00, -0x7b,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0x6e,0x01,0x00,0x00, -0x3e,0x00,0x03,0x00,0x70,0x01,0x00,0x00,0x6f,0x01,0x00,0x00, -0x3d,0x00,0x04,0x00,0x4a,0x00,0x00,0x00,0x79,0x01,0x00,0x00, -0x57,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x4a,0x00,0x00,0x00, -0x7b,0x01,0x00,0x00,0x5b,0x00,0x00,0x00,0x41,0x00,0x08,0x00, -0x61,0x00,0x00,0x00,0x7c,0x01,0x00,0x00,0x54,0x00,0x00,0x00, -0x2e,0x00,0x00,0x00,0x40,0x00,0x00,0x00,0x37,0x00,0x00,0x00, -0x14,0x03,0x00,0x00,0x3d,0x00,0x04,0x00,0x4d,0x00,0x00,0x00, -0x7d,0x01,0x00,0x00,0x7c,0x01,0x00,0x00,0x71,0x00,0x04,0x00, -0x09,0x00,0x00,0x00,0x7e,0x01,0x00,0x00,0x7d,0x01,0x00,0x00, -0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x7f,0x01,0x00,0x00, -0x7e,0x01,0x00,0x00,0xc7,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x80,0x01,0x00,0x00,0x7f,0x01,0x00,0x00,0x6a,0x00,0x00,0x00, -0x6f,0x00,0x04,0x00,0x4a,0x00,0x00,0x00,0x81,0x01,0x00,0x00, -0x80,0x01,0x00,0x00,0xc2,0x00,0x05,0x00,0x4d,0x00,0x00,0x00, -0x82,0x01,0x00,0x00,0x7d,0x01,0x00,0x00,0x6e,0x00,0x00,0x00, -0x70,0x00,0x04,0x00,0x4a,0x00,0x00,0x00,0x83,0x01,0x00,0x00, -0x82,0x01,0x00,0x00,0x50,0x00,0x05,0x00,0x64,0x00,0x00,0x00, -0x84,0x01,0x00,0x00,0x81,0x01,0x00,0x00,0x83,0x01,0x00,0x00, -0x8e,0x00,0x05,0x00,0x64,0x00,0x00,0x00,0x85,0x01,0x00,0x00, -0x84,0x01,0x00,0x00,0x79,0x01,0x00,0x00,0x50,0x00,0x05,0x00, -0x64,0x00,0x00,0x00,0x86,0x01,0x00,0x00,0x7b,0x01,0x00,0x00, -0x7b,0x01,0x00,0x00,0x81,0x00,0x05,0x00,0x64,0x00,0x00,0x00, -0x87,0x01,0x00,0x00,0x85,0x01,0x00,0x00,0x86,0x01,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x8d,0x01,0x00,0x00, -0x83,0x00,0x00,0x00,0x14,0x03,0x00,0x00,0x51,0x00,0x05,0x00, -0x4a,0x00,0x00,0x00,0x8f,0x01,0x00,0x00,0x87,0x01,0x00,0x00, -0x00,0x00,0x00,0x00,0x41,0x00,0x06,0x00,0x56,0x00,0x00,0x00, -0x90,0x01,0x00,0x00,0x7b,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0x8d,0x01,0x00,0x00,0x3e,0x00,0x03,0x00,0x90,0x01,0x00,0x00, -0x8f,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x97,0x01,0x00,0x00,0x83,0x00,0x00,0x00,0x15,0x03,0x00,0x00, -0x51,0x00,0x05,0x00,0x4a,0x00,0x00,0x00,0x98,0x01,0x00,0x00, -0x87,0x01,0x00,0x00,0x01,0x00,0x00,0x00,0x41,0x00,0x06,0x00, -0x56,0x00,0x00,0x00,0x99,0x01,0x00,0x00,0x7b,0x00,0x00,0x00, -0x2e,0x00,0x00,0x00,0x97,0x01,0x00,0x00,0x3e,0x00,0x03,0x00, -0x99,0x01,0x00,0x00,0x98,0x01,0x00,0x00,0x3d,0x00,0x04,0x00, -0x4a,0x00,0x00,0x00,0xa2,0x01,0x00,0x00,0x57,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x4a,0x00,0x00,0x00,0xa4,0x01,0x00,0x00, -0x5b,0x00,0x00,0x00,0x41,0x00,0x08,0x00,0x61,0x00,0x00,0x00, -0xa5,0x01,0x00,0x00,0x54,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0x40,0x00,0x00,0x00,0x37,0x00,0x00,0x00,0x16,0x03,0x00,0x00, -0x3d,0x00,0x04,0x00,0x4d,0x00,0x00,0x00,0xa6,0x01,0x00,0x00, -0xa5,0x01,0x00,0x00,0x71,0x00,0x04,0x00,0x09,0x00,0x00,0x00, -0xa7,0x01,0x00,0x00,0xa6,0x01,0x00,0x00,0x7c,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0xa8,0x01,0x00,0x00,0xa7,0x01,0x00,0x00, -0xc7,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xa9,0x01,0x00,0x00, -0xa8,0x01,0x00,0x00,0x6a,0x00,0x00,0x00,0x6f,0x00,0x04,0x00, -0x4a,0x00,0x00,0x00,0xaa,0x01,0x00,0x00,0xa9,0x01,0x00,0x00, -0xc2,0x00,0x05,0x00,0x4d,0x00,0x00,0x00,0xab,0x01,0x00,0x00, -0xa6,0x01,0x00,0x00,0x6e,0x00,0x00,0x00,0x70,0x00,0x04,0x00, -0x4a,0x00,0x00,0x00,0xac,0x01,0x00,0x00,0xab,0x01,0x00,0x00, -0x50,0x00,0x05,0x00,0x64,0x00,0x00,0x00,0xad,0x01,0x00,0x00, -0xaa,0x01,0x00,0x00,0xac,0x01,0x00,0x00,0x8e,0x00,0x05,0x00, -0x64,0x00,0x00,0x00,0xae,0x01,0x00,0x00,0xad,0x01,0x00,0x00, -0xa2,0x01,0x00,0x00,0x50,0x00,0x05,0x00,0x64,0x00,0x00,0x00, -0xaf,0x01,0x00,0x00,0xa4,0x01,0x00,0x00,0xa4,0x01,0x00,0x00, -0x81,0x00,0x05,0x00,0x64,0x00,0x00,0x00,0xb0,0x01,0x00,0x00, -0xae,0x01,0x00,0x00,0xaf,0x01,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xb6,0x01,0x00,0x00,0x83,0x00,0x00,0x00, -0x16,0x03,0x00,0x00,0x51,0x00,0x05,0x00,0x4a,0x00,0x00,0x00, -0xb8,0x01,0x00,0x00,0xb0,0x01,0x00,0x00,0x00,0x00,0x00,0x00, -0x41,0x00,0x06,0x00,0x56,0x00,0x00,0x00,0xb9,0x01,0x00,0x00, -0x7b,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0xb6,0x01,0x00,0x00, -0x3e,0x00,0x03,0x00,0xb9,0x01,0x00,0x00,0xb8,0x01,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xc0,0x01,0x00,0x00, -0x83,0x00,0x00,0x00,0x17,0x03,0x00,0x00,0x51,0x00,0x05,0x00, -0x4a,0x00,0x00,0x00,0xc1,0x01,0x00,0x00,0xb0,0x01,0x00,0x00, -0x01,0x00,0x00,0x00,0x41,0x00,0x06,0x00,0x56,0x00,0x00,0x00, -0xc2,0x01,0x00,0x00,0x7b,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0xc0,0x01,0x00,0x00,0x3e,0x00,0x03,0x00,0xc2,0x01,0x00,0x00, -0xc1,0x01,0x00,0x00,0x3d,0x00,0x04,0x00,0x4a,0x00,0x00,0x00, -0xcb,0x01,0x00,0x00,0x57,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x4a,0x00,0x00,0x00,0xcd,0x01,0x00,0x00,0x5b,0x00,0x00,0x00, -0x41,0x00,0x08,0x00,0x61,0x00,0x00,0x00,0xce,0x01,0x00,0x00, -0x54,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0x40,0x00,0x00,0x00, -0x37,0x00,0x00,0x00,0x18,0x03,0x00,0x00,0x3d,0x00,0x04,0x00, -0x4d,0x00,0x00,0x00,0xcf,0x01,0x00,0x00,0xce,0x01,0x00,0x00, -0x71,0x00,0x04,0x00,0x09,0x00,0x00,0x00,0xd0,0x01,0x00,0x00, -0xcf,0x01,0x00,0x00,0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0xd1,0x01,0x00,0x00,0xd0,0x01,0x00,0x00,0xc7,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xd2,0x01,0x00,0x00,0xd1,0x01,0x00,0x00, -0x6a,0x00,0x00,0x00,0x6f,0x00,0x04,0x00,0x4a,0x00,0x00,0x00, -0xd3,0x01,0x00,0x00,0xd2,0x01,0x00,0x00,0xc2,0x00,0x05,0x00, -0x4d,0x00,0x00,0x00,0xd4,0x01,0x00,0x00,0xcf,0x01,0x00,0x00, -0x6e,0x00,0x00,0x00,0x70,0x00,0x04,0x00,0x4a,0x00,0x00,0x00, -0xd5,0x01,0x00,0x00,0xd4,0x01,0x00,0x00,0x50,0x00,0x05,0x00, -0x64,0x00,0x00,0x00,0xd6,0x01,0x00,0x00,0xd3,0x01,0x00,0x00, -0xd5,0x01,0x00,0x00,0x8e,0x00,0x05,0x00,0x64,0x00,0x00,0x00, -0xd7,0x01,0x00,0x00,0xd6,0x01,0x00,0x00,0xcb,0x01,0x00,0x00, -0x50,0x00,0x05,0x00,0x64,0x00,0x00,0x00,0xd8,0x01,0x00,0x00, -0xcd,0x01,0x00,0x00,0xcd,0x01,0x00,0x00,0x81,0x00,0x05,0x00, -0x64,0x00,0x00,0x00,0xd9,0x01,0x00,0x00,0xd7,0x01,0x00,0x00, -0xd8,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xdf,0x01,0x00,0x00,0x83,0x00,0x00,0x00,0x18,0x03,0x00,0x00, -0x51,0x00,0x05,0x00,0x4a,0x00,0x00,0x00,0xe1,0x01,0x00,0x00, -0xd9,0x01,0x00,0x00,0x00,0x00,0x00,0x00,0x41,0x00,0x06,0x00, -0x56,0x00,0x00,0x00,0xe2,0x01,0x00,0x00,0x7b,0x00,0x00,0x00, -0x2e,0x00,0x00,0x00,0xdf,0x01,0x00,0x00,0x3e,0x00,0x03,0x00, -0xe2,0x01,0x00,0x00,0xe1,0x01,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xe9,0x01,0x00,0x00,0x83,0x00,0x00,0x00, -0x19,0x03,0x00,0x00,0x51,0x00,0x05,0x00,0x4a,0x00,0x00,0x00, -0xea,0x01,0x00,0x00,0xd9,0x01,0x00,0x00,0x01,0x00,0x00,0x00, -0x41,0x00,0x06,0x00,0x56,0x00,0x00,0x00,0xeb,0x01,0x00,0x00, -0x7b,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0xe9,0x01,0x00,0x00, -0x3e,0x00,0x03,0x00,0xeb,0x01,0x00,0x00,0xea,0x01,0x00,0x00, -0x3d,0x00,0x04,0x00,0x4a,0x00,0x00,0x00,0xf4,0x01,0x00,0x00, -0x57,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x4a,0x00,0x00,0x00, -0xf6,0x01,0x00,0x00,0x5b,0x00,0x00,0x00,0x41,0x00,0x08,0x00, -0x61,0x00,0x00,0x00,0xf7,0x01,0x00,0x00,0x54,0x00,0x00,0x00, -0x2e,0x00,0x00,0x00,0x40,0x00,0x00,0x00,0x37,0x00,0x00,0x00, -0x1a,0x03,0x00,0x00,0x3d,0x00,0x04,0x00,0x4d,0x00,0x00,0x00, -0xf8,0x01,0x00,0x00,0xf7,0x01,0x00,0x00,0x71,0x00,0x04,0x00, -0x09,0x00,0x00,0x00,0xf9,0x01,0x00,0x00,0xf8,0x01,0x00,0x00, -0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0xfa,0x01,0x00,0x00, -0xf9,0x01,0x00,0x00,0xc7,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xfb,0x01,0x00,0x00,0xfa,0x01,0x00,0x00,0x6a,0x00,0x00,0x00, -0x6f,0x00,0x04,0x00,0x4a,0x00,0x00,0x00,0xfc,0x01,0x00,0x00, -0xfb,0x01,0x00,0x00,0xc2,0x00,0x05,0x00,0x4d,0x00,0x00,0x00, -0xfd,0x01,0x00,0x00,0xf8,0x01,0x00,0x00,0x6e,0x00,0x00,0x00, -0x70,0x00,0x04,0x00,0x4a,0x00,0x00,0x00,0xfe,0x01,0x00,0x00, -0xfd,0x01,0x00,0x00,0x50,0x00,0x05,0x00,0x64,0x00,0x00,0x00, -0xff,0x01,0x00,0x00,0xfc,0x01,0x00,0x00,0xfe,0x01,0x00,0x00, -0x8e,0x00,0x05,0x00,0x64,0x00,0x00,0x00,0x00,0x02,0x00,0x00, -0xff,0x01,0x00,0x00,0xf4,0x01,0x00,0x00,0x50,0x00,0x05,0x00, -0x64,0x00,0x00,0x00,0x01,0x02,0x00,0x00,0xf6,0x01,0x00,0x00, -0xf6,0x01,0x00,0x00,0x81,0x00,0x05,0x00,0x64,0x00,0x00,0x00, -0x02,0x02,0x00,0x00,0x00,0x02,0x00,0x00,0x01,0x02,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x08,0x02,0x00,0x00, -0x83,0x00,0x00,0x00,0x1a,0x03,0x00,0x00,0x51,0x00,0x05,0x00, -0x4a,0x00,0x00,0x00,0x0a,0x02,0x00,0x00,0x02,0x02,0x00,0x00, -0x00,0x00,0x00,0x00,0x41,0x00,0x06,0x00,0x56,0x00,0x00,0x00, -0x0b,0x02,0x00,0x00,0x7b,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0x08,0x02,0x00,0x00,0x3e,0x00,0x03,0x00,0x0b,0x02,0x00,0x00, -0x0a,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x12,0x02,0x00,0x00,0x83,0x00,0x00,0x00,0x1b,0x03,0x00,0x00, -0x51,0x00,0x05,0x00,0x4a,0x00,0x00,0x00,0x13,0x02,0x00,0x00, -0x02,0x02,0x00,0x00,0x01,0x00,0x00,0x00,0x41,0x00,0x06,0x00, -0x56,0x00,0x00,0x00,0x14,0x02,0x00,0x00,0x7b,0x00,0x00,0x00, -0x2e,0x00,0x00,0x00,0x12,0x02,0x00,0x00,0x3e,0x00,0x03,0x00, -0x14,0x02,0x00,0x00,0x13,0x02,0x00,0x00,0x3d,0x00,0x04,0x00, -0x4a,0x00,0x00,0x00,0x1d,0x02,0x00,0x00,0x57,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x4a,0x00,0x00,0x00,0x1f,0x02,0x00,0x00, -0x5b,0x00,0x00,0x00,0x41,0x00,0x08,0x00,0x61,0x00,0x00,0x00, -0x20,0x02,0x00,0x00,0x54,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0x40,0x00,0x00,0x00,0x37,0x00,0x00,0x00,0x1c,0x03,0x00,0x00, -0x3d,0x00,0x04,0x00,0x4d,0x00,0x00,0x00,0x21,0x02,0x00,0x00, -0x20,0x02,0x00,0x00,0x71,0x00,0x04,0x00,0x09,0x00,0x00,0x00, -0x22,0x02,0x00,0x00,0x21,0x02,0x00,0x00,0x7c,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x23,0x02,0x00,0x00,0x22,0x02,0x00,0x00, -0xc7,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x24,0x02,0x00,0x00, -0x23,0x02,0x00,0x00,0x6a,0x00,0x00,0x00,0x6f,0x00,0x04,0x00, -0x4a,0x00,0x00,0x00,0x25,0x02,0x00,0x00,0x24,0x02,0x00,0x00, -0xc2,0x00,0x05,0x00,0x4d,0x00,0x00,0x00,0x26,0x02,0x00,0x00, -0x21,0x02,0x00,0x00,0x6e,0x00,0x00,0x00,0x70,0x00,0x04,0x00, -0x4a,0x00,0x00,0x00,0x27,0x02,0x00,0x00,0x26,0x02,0x00,0x00, -0x50,0x00,0x05,0x00,0x64,0x00,0x00,0x00,0x28,0x02,0x00,0x00, -0x25,0x02,0x00,0x00,0x27,0x02,0x00,0x00,0x8e,0x00,0x05,0x00, -0x64,0x00,0x00,0x00,0x29,0x02,0x00,0x00,0x28,0x02,0x00,0x00, -0x1d,0x02,0x00,0x00,0x50,0x00,0x05,0x00,0x64,0x00,0x00,0x00, -0x2a,0x02,0x00,0x00,0x1f,0x02,0x00,0x00,0x1f,0x02,0x00,0x00, -0x81,0x00,0x05,0x00,0x64,0x00,0x00,0x00,0x2b,0x02,0x00,0x00, -0x29,0x02,0x00,0x00,0x2a,0x02,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x31,0x02,0x00,0x00,0x83,0x00,0x00,0x00, -0x1c,0x03,0x00,0x00,0x51,0x00,0x05,0x00,0x4a,0x00,0x00,0x00, -0x33,0x02,0x00,0x00,0x2b,0x02,0x00,0x00,0x00,0x00,0x00,0x00, -0x41,0x00,0x06,0x00,0x56,0x00,0x00,0x00,0x34,0x02,0x00,0x00, -0x7b,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0x31,0x02,0x00,0x00, -0x3e,0x00,0x03,0x00,0x34,0x02,0x00,0x00,0x33,0x02,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x3b,0x02,0x00,0x00, -0x83,0x00,0x00,0x00,0x1d,0x03,0x00,0x00,0x51,0x00,0x05,0x00, -0x4a,0x00,0x00,0x00,0x3c,0x02,0x00,0x00,0x2b,0x02,0x00,0x00, -0x01,0x00,0x00,0x00,0x41,0x00,0x06,0x00,0x56,0x00,0x00,0x00, -0x3d,0x02,0x00,0x00,0x7b,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0x3b,0x02,0x00,0x00,0x3e,0x00,0x03,0x00,0x3d,0x02,0x00,0x00, -0x3c,0x02,0x00,0x00,0x3d,0x00,0x04,0x00,0x4a,0x00,0x00,0x00, -0x46,0x02,0x00,0x00,0x57,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x4a,0x00,0x00,0x00,0x48,0x02,0x00,0x00,0x5b,0x00,0x00,0x00, -0x41,0x00,0x08,0x00,0x61,0x00,0x00,0x00,0x49,0x02,0x00,0x00, -0x54,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0x40,0x00,0x00,0x00, -0x37,0x00,0x00,0x00,0x1e,0x03,0x00,0x00,0x3d,0x00,0x04,0x00, -0x4d,0x00,0x00,0x00,0x4a,0x02,0x00,0x00,0x49,0x02,0x00,0x00, -0x71,0x00,0x04,0x00,0x09,0x00,0x00,0x00,0x4b,0x02,0x00,0x00, -0x4a,0x02,0x00,0x00,0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x4c,0x02,0x00,0x00,0x4b,0x02,0x00,0x00,0xc7,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x4d,0x02,0x00,0x00,0x4c,0x02,0x00,0x00, -0x6a,0x00,0x00,0x00,0x6f,0x00,0x04,0x00,0x4a,0x00,0x00,0x00, -0x4e,0x02,0x00,0x00,0x4d,0x02,0x00,0x00,0xc2,0x00,0x05,0x00, -0x4d,0x00,0x00,0x00,0x4f,0x02,0x00,0x00,0x4a,0x02,0x00,0x00, -0x6e,0x00,0x00,0x00,0x70,0x00,0x04,0x00,0x4a,0x00,0x00,0x00, -0x50,0x02,0x00,0x00,0x4f,0x02,0x00,0x00,0x50,0x00,0x05,0x00, -0x64,0x00,0x00,0x00,0x51,0x02,0x00,0x00,0x4e,0x02,0x00,0x00, -0x50,0x02,0x00,0x00,0x8e,0x00,0x05,0x00,0x64,0x00,0x00,0x00, -0x52,0x02,0x00,0x00,0x51,0x02,0x00,0x00,0x46,0x02,0x00,0x00, -0x50,0x00,0x05,0x00,0x64,0x00,0x00,0x00,0x53,0x02,0x00,0x00, -0x48,0x02,0x00,0x00,0x48,0x02,0x00,0x00,0x81,0x00,0x05,0x00, -0x64,0x00,0x00,0x00,0x54,0x02,0x00,0x00,0x52,0x02,0x00,0x00, -0x53,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x5a,0x02,0x00,0x00,0x83,0x00,0x00,0x00,0x1e,0x03,0x00,0x00, -0x51,0x00,0x05,0x00,0x4a,0x00,0x00,0x00,0x5c,0x02,0x00,0x00, -0x54,0x02,0x00,0x00,0x00,0x00,0x00,0x00,0x41,0x00,0x06,0x00, -0x56,0x00,0x00,0x00,0x5d,0x02,0x00,0x00,0x7b,0x00,0x00,0x00, -0x2e,0x00,0x00,0x00,0x5a,0x02,0x00,0x00,0x3e,0x00,0x03,0x00, -0x5d,0x02,0x00,0x00,0x5c,0x02,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x64,0x02,0x00,0x00,0x83,0x00,0x00,0x00, -0x1f,0x03,0x00,0x00,0x51,0x00,0x05,0x00,0x4a,0x00,0x00,0x00, -0x65,0x02,0x00,0x00,0x54,0x02,0x00,0x00,0x01,0x00,0x00,0x00, -0x41,0x00,0x06,0x00,0x56,0x00,0x00,0x00,0x66,0x02,0x00,0x00, -0x7b,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0x64,0x02,0x00,0x00, -0x3e,0x00,0x03,0x00,0x66,0x02,0x00,0x00,0x65,0x02,0x00,0x00, -0x3d,0x00,0x04,0x00,0x4a,0x00,0x00,0x00,0x6f,0x02,0x00,0x00, -0x57,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x4a,0x00,0x00,0x00, -0x71,0x02,0x00,0x00,0x5b,0x00,0x00,0x00,0x41,0x00,0x08,0x00, -0x61,0x00,0x00,0x00,0x72,0x02,0x00,0x00,0x54,0x00,0x00,0x00, -0x2e,0x00,0x00,0x00,0x40,0x00,0x00,0x00,0x37,0x00,0x00,0x00, -0x20,0x03,0x00,0x00,0x3d,0x00,0x04,0x00,0x4d,0x00,0x00,0x00, -0x73,0x02,0x00,0x00,0x72,0x02,0x00,0x00,0x71,0x00,0x04,0x00, -0x09,0x00,0x00,0x00,0x74,0x02,0x00,0x00,0x73,0x02,0x00,0x00, -0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x75,0x02,0x00,0x00, -0x74,0x02,0x00,0x00,0xc7,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x76,0x02,0x00,0x00,0x75,0x02,0x00,0x00,0x6a,0x00,0x00,0x00, -0x6f,0x00,0x04,0x00,0x4a,0x00,0x00,0x00,0x77,0x02,0x00,0x00, -0x76,0x02,0x00,0x00,0xc2,0x00,0x05,0x00,0x4d,0x00,0x00,0x00, -0x78,0x02,0x00,0x00,0x73,0x02,0x00,0x00,0x6e,0x00,0x00,0x00, -0x70,0x00,0x04,0x00,0x4a,0x00,0x00,0x00,0x79,0x02,0x00,0x00, -0x78,0x02,0x00,0x00,0x50,0x00,0x05,0x00,0x64,0x00,0x00,0x00, -0x7a,0x02,0x00,0x00,0x77,0x02,0x00,0x00,0x79,0x02,0x00,0x00, -0x8e,0x00,0x05,0x00,0x64,0x00,0x00,0x00,0x7b,0x02,0x00,0x00, -0x7a,0x02,0x00,0x00,0x6f,0x02,0x00,0x00,0x50,0x00,0x05,0x00, -0x64,0x00,0x00,0x00,0x7c,0x02,0x00,0x00,0x71,0x02,0x00,0x00, -0x71,0x02,0x00,0x00,0x81,0x00,0x05,0x00,0x64,0x00,0x00,0x00, -0x7d,0x02,0x00,0x00,0x7b,0x02,0x00,0x00,0x7c,0x02,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x83,0x02,0x00,0x00, -0x83,0x00,0x00,0x00,0x20,0x03,0x00,0x00,0x51,0x00,0x05,0x00, -0x4a,0x00,0x00,0x00,0x85,0x02,0x00,0x00,0x7d,0x02,0x00,0x00, -0x00,0x00,0x00,0x00,0x41,0x00,0x06,0x00,0x56,0x00,0x00,0x00, -0x86,0x02,0x00,0x00,0x7b,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0x83,0x02,0x00,0x00,0x3e,0x00,0x03,0x00,0x86,0x02,0x00,0x00, -0x85,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x8d,0x02,0x00,0x00,0x83,0x00,0x00,0x00,0x21,0x03,0x00,0x00, -0x51,0x00,0x05,0x00,0x4a,0x00,0x00,0x00,0x8e,0x02,0x00,0x00, -0x7d,0x02,0x00,0x00,0x01,0x00,0x00,0x00,0x41,0x00,0x06,0x00, -0x56,0x00,0x00,0x00,0x8f,0x02,0x00,0x00,0x7b,0x00,0x00,0x00, -0x2e,0x00,0x00,0x00,0x8d,0x02,0x00,0x00,0x3e,0x00,0x03,0x00, -0x8f,0x02,0x00,0x00,0x8e,0x02,0x00,0x00,0x3d,0x00,0x04,0x00, -0x4a,0x00,0x00,0x00,0x98,0x02,0x00,0x00,0x57,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x4a,0x00,0x00,0x00,0x9a,0x02,0x00,0x00, -0x5b,0x00,0x00,0x00,0x41,0x00,0x08,0x00,0x61,0x00,0x00,0x00, -0x9b,0x02,0x00,0x00,0x54,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0x40,0x00,0x00,0x00,0x37,0x00,0x00,0x00,0x22,0x03,0x00,0x00, -0x3d,0x00,0x04,0x00,0x4d,0x00,0x00,0x00,0x9c,0x02,0x00,0x00, -0x9b,0x02,0x00,0x00,0x71,0x00,0x04,0x00,0x09,0x00,0x00,0x00, -0x9d,0x02,0x00,0x00,0x9c,0x02,0x00,0x00,0x7c,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x9e,0x02,0x00,0x00,0x9d,0x02,0x00,0x00, -0xc7,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x9f,0x02,0x00,0x00, -0x9e,0x02,0x00,0x00,0x6a,0x00,0x00,0x00,0x6f,0x00,0x04,0x00, -0x4a,0x00,0x00,0x00,0xa0,0x02,0x00,0x00,0x9f,0x02,0x00,0x00, -0xc2,0x00,0x05,0x00,0x4d,0x00,0x00,0x00,0xa1,0x02,0x00,0x00, -0x9c,0x02,0x00,0x00,0x6e,0x00,0x00,0x00,0x70,0x00,0x04,0x00, -0x4a,0x00,0x00,0x00,0xa2,0x02,0x00,0x00,0xa1,0x02,0x00,0x00, -0x50,0x00,0x05,0x00,0x64,0x00,0x00,0x00,0xa3,0x02,0x00,0x00, -0xa0,0x02,0x00,0x00,0xa2,0x02,0x00,0x00,0x8e,0x00,0x05,0x00, -0x64,0x00,0x00,0x00,0xa4,0x02,0x00,0x00,0xa3,0x02,0x00,0x00, -0x98,0x02,0x00,0x00,0x50,0x00,0x05,0x00,0x64,0x00,0x00,0x00, -0xa5,0x02,0x00,0x00,0x9a,0x02,0x00,0x00,0x9a,0x02,0x00,0x00, -0x81,0x00,0x05,0x00,0x64,0x00,0x00,0x00,0xa6,0x02,0x00,0x00, -0xa4,0x02,0x00,0x00,0xa5,0x02,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xac,0x02,0x00,0x00,0x83,0x00,0x00,0x00, -0x22,0x03,0x00,0x00,0x51,0x00,0x05,0x00,0x4a,0x00,0x00,0x00, -0xae,0x02,0x00,0x00,0xa6,0x02,0x00,0x00,0x00,0x00,0x00,0x00, -0x41,0x00,0x06,0x00,0x56,0x00,0x00,0x00,0xaf,0x02,0x00,0x00, -0x7b,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0xac,0x02,0x00,0x00, -0x3e,0x00,0x03,0x00,0xaf,0x02,0x00,0x00,0xae,0x02,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xb6,0x02,0x00,0x00, -0x83,0x00,0x00,0x00,0x23,0x03,0x00,0x00,0x51,0x00,0x05,0x00, -0x4a,0x00,0x00,0x00,0xb7,0x02,0x00,0x00,0xa6,0x02,0x00,0x00, -0x01,0x00,0x00,0x00,0x41,0x00,0x06,0x00,0x56,0x00,0x00,0x00, -0xb8,0x02,0x00,0x00,0x7b,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0xb6,0x02,0x00,0x00,0x3e,0x00,0x03,0x00,0xb8,0x02,0x00,0x00, -0xb7,0x02,0x00,0x00,0x3d,0x00,0x04,0x00,0x4a,0x00,0x00,0x00, -0xc1,0x02,0x00,0x00,0x57,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x4a,0x00,0x00,0x00,0xc3,0x02,0x00,0x00,0x5b,0x00,0x00,0x00, -0x41,0x00,0x08,0x00,0x61,0x00,0x00,0x00,0xc4,0x02,0x00,0x00, -0x54,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0x40,0x00,0x00,0x00, -0x37,0x00,0x00,0x00,0x24,0x03,0x00,0x00,0x3d,0x00,0x04,0x00, -0x4d,0x00,0x00,0x00,0xc5,0x02,0x00,0x00,0xc4,0x02,0x00,0x00, -0x71,0x00,0x04,0x00,0x09,0x00,0x00,0x00,0xc6,0x02,0x00,0x00, -0xc5,0x02,0x00,0x00,0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0xc7,0x02,0x00,0x00,0xc6,0x02,0x00,0x00,0xc7,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xc8,0x02,0x00,0x00,0xc7,0x02,0x00,0x00, -0x6a,0x00,0x00,0x00,0x6f,0x00,0x04,0x00,0x4a,0x00,0x00,0x00, -0xc9,0x02,0x00,0x00,0xc8,0x02,0x00,0x00,0xc2,0x00,0x05,0x00, -0x4d,0x00,0x00,0x00,0xca,0x02,0x00,0x00,0xc5,0x02,0x00,0x00, -0x6e,0x00,0x00,0x00,0x70,0x00,0x04,0x00,0x4a,0x00,0x00,0x00, -0xcb,0x02,0x00,0x00,0xca,0x02,0x00,0x00,0x50,0x00,0x05,0x00, -0x64,0x00,0x00,0x00,0xcc,0x02,0x00,0x00,0xc9,0x02,0x00,0x00, -0xcb,0x02,0x00,0x00,0x8e,0x00,0x05,0x00,0x64,0x00,0x00,0x00, -0xcd,0x02,0x00,0x00,0xcc,0x02,0x00,0x00,0xc1,0x02,0x00,0x00, -0x50,0x00,0x05,0x00,0x64,0x00,0x00,0x00,0xce,0x02,0x00,0x00, -0xc3,0x02,0x00,0x00,0xc3,0x02,0x00,0x00,0x81,0x00,0x05,0x00, -0x64,0x00,0x00,0x00,0xcf,0x02,0x00,0x00,0xcd,0x02,0x00,0x00, -0xce,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xd5,0x02,0x00,0x00,0x83,0x00,0x00,0x00,0x24,0x03,0x00,0x00, -0x51,0x00,0x05,0x00,0x4a,0x00,0x00,0x00,0xd7,0x02,0x00,0x00, -0xcf,0x02,0x00,0x00,0x00,0x00,0x00,0x00,0x41,0x00,0x06,0x00, -0x56,0x00,0x00,0x00,0xd8,0x02,0x00,0x00,0x7b,0x00,0x00,0x00, -0x2e,0x00,0x00,0x00,0xd5,0x02,0x00,0x00,0x3e,0x00,0x03,0x00, -0xd8,0x02,0x00,0x00,0xd7,0x02,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xdf,0x02,0x00,0x00,0x83,0x00,0x00,0x00, -0x25,0x03,0x00,0x00,0x51,0x00,0x05,0x00,0x4a,0x00,0x00,0x00, -0xe0,0x02,0x00,0x00,0xcf,0x02,0x00,0x00,0x01,0x00,0x00,0x00, -0x41,0x00,0x06,0x00,0x56,0x00,0x00,0x00,0xe1,0x02,0x00,0x00, -0x7b,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0xdf,0x02,0x00,0x00, -0x3e,0x00,0x03,0x00,0xe1,0x02,0x00,0x00,0xe0,0x02,0x00,0x00, -0x3d,0x00,0x04,0x00,0x4a,0x00,0x00,0x00,0xea,0x02,0x00,0x00, -0x57,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x4a,0x00,0x00,0x00, -0xec,0x02,0x00,0x00,0x5b,0x00,0x00,0x00,0x41,0x00,0x08,0x00, -0x61,0x00,0x00,0x00,0xed,0x02,0x00,0x00,0x54,0x00,0x00,0x00, -0x2e,0x00,0x00,0x00,0x40,0x00,0x00,0x00,0x37,0x00,0x00,0x00, -0x6a,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x4d,0x00,0x00,0x00, -0xee,0x02,0x00,0x00,0xed,0x02,0x00,0x00,0x71,0x00,0x04,0x00, -0x09,0x00,0x00,0x00,0xef,0x02,0x00,0x00,0xee,0x02,0x00,0x00, -0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0xf0,0x02,0x00,0x00, -0xef,0x02,0x00,0x00,0xc7,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xf1,0x02,0x00,0x00,0xf0,0x02,0x00,0x00,0x6a,0x00,0x00,0x00, -0x6f,0x00,0x04,0x00,0x4a,0x00,0x00,0x00,0xf2,0x02,0x00,0x00, -0xf1,0x02,0x00,0x00,0xc2,0x00,0x05,0x00,0x4d,0x00,0x00,0x00, -0xf3,0x02,0x00,0x00,0xee,0x02,0x00,0x00,0x6e,0x00,0x00,0x00, -0x70,0x00,0x04,0x00,0x4a,0x00,0x00,0x00,0xf4,0x02,0x00,0x00, -0xf3,0x02,0x00,0x00,0x50,0x00,0x05,0x00,0x64,0x00,0x00,0x00, -0xf5,0x02,0x00,0x00,0xf2,0x02,0x00,0x00,0xf4,0x02,0x00,0x00, -0x8e,0x00,0x05,0x00,0x64,0x00,0x00,0x00,0xf6,0x02,0x00,0x00, -0xf5,0x02,0x00,0x00,0xea,0x02,0x00,0x00,0x50,0x00,0x05,0x00, -0x64,0x00,0x00,0x00,0xf7,0x02,0x00,0x00,0xec,0x02,0x00,0x00, -0xec,0x02,0x00,0x00,0x81,0x00,0x05,0x00,0x64,0x00,0x00,0x00, -0xf8,0x02,0x00,0x00,0xf6,0x02,0x00,0x00,0xf7,0x02,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xfe,0x02,0x00,0x00, -0x83,0x00,0x00,0x00,0x6a,0x00,0x00,0x00,0x51,0x00,0x05,0x00, -0x4a,0x00,0x00,0x00,0x00,0x03,0x00,0x00,0xf8,0x02,0x00,0x00, -0x00,0x00,0x00,0x00,0x41,0x00,0x06,0x00,0x56,0x00,0x00,0x00, -0x01,0x03,0x00,0x00,0x7b,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0xfe,0x02,0x00,0x00,0x3e,0x00,0x03,0x00,0x01,0x03,0x00,0x00, -0x00,0x03,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x08,0x03,0x00,0x00,0x83,0x00,0x00,0x00,0x26,0x03,0x00,0x00, -0x51,0x00,0x05,0x00,0x4a,0x00,0x00,0x00,0x09,0x03,0x00,0x00, -0xf8,0x02,0x00,0x00,0x01,0x00,0x00,0x00,0x41,0x00,0x06,0x00, -0x56,0x00,0x00,0x00,0x0a,0x03,0x00,0x00,0x7b,0x00,0x00,0x00, -0x2e,0x00,0x00,0x00,0x08,0x03,0x00,0x00,0x3e,0x00,0x03,0x00, -0x0a,0x03,0x00,0x00,0x09,0x03,0x00,0x00,0xf9,0x00,0x02,0x00, -0x9c,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0x9c,0x00,0x00,0x00, -0xfd,0x00,0x01,0x00,0x38,0x00,0x01,0x00, -}; -const uint64_t dequant_q4_1_len = 8924; - -unsigned char dequant_q4_1_fp32_data[] = { -0x03,0x02,0x23,0x07,0x00,0x05,0x01,0x00,0x0b,0x00,0x0d,0x00, 0x59,0x03,0x00,0x00,0x00,0x00,0x00,0x00,0x11,0x00,0x02,0x00, 0x01,0x00,0x00,0x00,0x11,0x00,0x02,0x00,0x51,0x11,0x00,0x00, 0x11,0x00,0x02,0x00,0x60,0x11,0x00,0x00,0x0b,0x00,0x06,0x00, @@ -5683,496 +3348,10 @@ unsigned char dequant_q4_1_fp32_data[] = { 0xa0,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0xa0,0x00,0x00,0x00, 0xfd,0x00,0x01,0x00,0x38,0x00,0x01,0x00, }; -const uint64_t dequant_q4_1_fp32_len = 9704; +const uint64_t dequant_q4_1_len = 9704; unsigned char dequant_q4_K_data[] = { 0x03,0x02,0x23,0x07,0x00,0x05,0x01,0x00,0x0b,0x00,0x0d,0x00, -0xa6,0x01,0x00,0x00,0x00,0x00,0x00,0x00,0x11,0x00,0x02,0x00, -0x01,0x00,0x00,0x00,0x11,0x00,0x02,0x00,0x09,0x00,0x00,0x00, -0x11,0x00,0x02,0x00,0x27,0x00,0x00,0x00,0x11,0x00,0x02,0x00, -0x51,0x11,0x00,0x00,0x11,0x00,0x02,0x00,0x60,0x11,0x00,0x00, -0x0b,0x00,0x06,0x00,0x01,0x00,0x00,0x00,0x47,0x4c,0x53,0x4c, -0x2e,0x73,0x74,0x64,0x2e,0x34,0x35,0x30,0x00,0x00,0x00,0x00, -0x0e,0x00,0x03,0x00,0x00,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x0f,0x00,0x0a,0x00,0x05,0x00,0x00,0x00,0x04,0x00,0x00,0x00, -0x6d,0x61,0x69,0x6e,0x00,0x00,0x00,0x00,0x17,0x00,0x00,0x00, -0x25,0x00,0x00,0x00,0x33,0x00,0x00,0x00,0x4f,0x00,0x00,0x00, -0x08,0x01,0x00,0x00,0x10,0x00,0x06,0x00,0x04,0x00,0x00,0x00, -0x11,0x00,0x00,0x00,0x20,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x17,0x00,0x00,0x00, -0x0b,0x00,0x00,0x00,0x1a,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x23,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x04,0x00,0x00,0x00, -0x48,0x00,0x05,0x00,0x23,0x00,0x00,0x00,0x02,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x08,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0x23,0x00,0x00,0x00,0x03,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x47,0x00,0x03,0x00,0x23,0x00,0x00,0x00, -0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x33,0x00,0x00,0x00, -0x0b,0x00,0x00,0x00,0x1b,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x48,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x4a,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x4b,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x48,0x00,0x05,0x00,0x4b,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0x4b,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x10,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x4c,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x90,0x00,0x00,0x00,0x48,0x00,0x04,0x00, -0x4d,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x18,0x00,0x00,0x00, -0x48,0x00,0x05,0x00,0x4d,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x03,0x00, -0x4d,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x4f,0x00,0x00,0x00,0x22,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x4f,0x00,0x00,0x00,0x21,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x05,0x01,0x00,0x00, -0x06,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x48,0x00,0x04,0x00, -0x06,0x01,0x00,0x00,0x00,0x00,0x00,0x00,0x19,0x00,0x00,0x00, -0x48,0x00,0x05,0x00,0x06,0x01,0x00,0x00,0x00,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x03,0x00, -0x06,0x01,0x00,0x00,0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x08,0x01,0x00,0x00,0x22,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x08,0x01,0x00,0x00,0x21,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x31,0x01,0x00,0x00, -0x0b,0x00,0x00,0x00,0x19,0x00,0x00,0x00,0x13,0x00,0x02,0x00, -0x02,0x00,0x00,0x00,0x21,0x00,0x03,0x00,0x03,0x00,0x00,0x00, -0x02,0x00,0x00,0x00,0x15,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x20,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x09,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x10,0x00,0x00,0x00, -0x00,0x01,0x00,0x00,0x14,0x00,0x02,0x00,0x11,0x00,0x00,0x00, -0x15,0x00,0x04,0x00,0x14,0x00,0x00,0x00,0x20,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x17,0x00,0x04,0x00,0x15,0x00,0x00,0x00, -0x14,0x00,0x00,0x00,0x03,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0x16,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x15,0x00,0x00,0x00, -0x3b,0x00,0x04,0x00,0x16,0x00,0x00,0x00,0x17,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x14,0x00,0x00,0x00, -0x18,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0x19,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x14,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x14,0x00,0x00,0x00,0x1c,0x00,0x00,0x00, -0x00,0x01,0x00,0x00,0x1e,0x00,0x06,0x00,0x23,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x24,0x00,0x00,0x00, -0x09,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x3b,0x00,0x04,0x00, -0x24,0x00,0x00,0x00,0x25,0x00,0x00,0x00,0x09,0x00,0x00,0x00, -0x20,0x00,0x04,0x00,0x26,0x00,0x00,0x00,0x09,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x29,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x3b,0x00,0x04,0x00, -0x16,0x00,0x00,0x00,0x33,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x39,0x00,0x00,0x00, -0x08,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x3f,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x16,0x00,0x03,0x00, -0x42,0x00,0x00,0x00,0x10,0x00,0x00,0x00,0x17,0x00,0x04,0x00, -0x45,0x00,0x00,0x00,0x42,0x00,0x00,0x00,0x02,0x00,0x00,0x00, -0x15,0x00,0x04,0x00,0x46,0x00,0x00,0x00,0x08,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x14,0x00,0x00,0x00, -0x47,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x1c,0x00,0x04,0x00, -0x48,0x00,0x00,0x00,0x46,0x00,0x00,0x00,0x47,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x14,0x00,0x00,0x00,0x49,0x00,0x00,0x00, -0x80,0x00,0x00,0x00,0x1c,0x00,0x04,0x00,0x4a,0x00,0x00,0x00, -0x46,0x00,0x00,0x00,0x49,0x00,0x00,0x00,0x1e,0x00,0x05,0x00, -0x4b,0x00,0x00,0x00,0x45,0x00,0x00,0x00,0x48,0x00,0x00,0x00, -0x4a,0x00,0x00,0x00,0x1d,0x00,0x03,0x00,0x4c,0x00,0x00,0x00, -0x4b,0x00,0x00,0x00,0x1e,0x00,0x03,0x00,0x4d,0x00,0x00,0x00, -0x4c,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x4e,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x4d,0x00,0x00,0x00,0x3b,0x00,0x04,0x00, -0x4e,0x00,0x00,0x00,0x4f,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0x20,0x00,0x04,0x00,0x51,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0x42,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x14,0x00,0x00,0x00, -0x56,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x5c,0x00,0x00,0x00,0x40,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x60,0x00,0x00,0x00, -0x04,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x65,0x00,0x00,0x00,0x20,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0x73,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x46,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x78,0x00,0x00,0x00, -0x3f,0x00,0x00,0x00,0x15,0x00,0x04,0x00,0x7a,0x00,0x00,0x00, -0x08,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x90,0x00,0x00,0x00,0x0f,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x97,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0xc6,0x00,0x00,0x00,0x05,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0xda,0x00,0x00,0x00,0x03,0x00,0x00,0x00, -0x1d,0x00,0x03,0x00,0x05,0x01,0x00,0x00,0x42,0x00,0x00,0x00, -0x1e,0x00,0x03,0x00,0x06,0x01,0x00,0x00,0x05,0x01,0x00,0x00, -0x20,0x00,0x04,0x00,0x07,0x01,0x00,0x00,0x0c,0x00,0x00,0x00, -0x06,0x01,0x00,0x00,0x3b,0x00,0x04,0x00,0x07,0x01,0x00,0x00, -0x08,0x01,0x00,0x00,0x0c,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x14,0x00,0x00,0x00,0x30,0x01,0x00,0x00,0x20,0x00,0x00,0x00, -0x2c,0x00,0x06,0x00,0x15,0x00,0x00,0x00,0x31,0x01,0x00,0x00, -0x30,0x01,0x00,0x00,0x56,0x00,0x00,0x00,0x56,0x00,0x00,0x00, -0x2a,0x00,0x03,0x00,0x11,0x00,0x00,0x00,0x34,0x01,0x00,0x00, -0x29,0x00,0x03,0x00,0x11,0x00,0x00,0x00,0x37,0x01,0x00,0x00, -0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x9f,0x01,0x00,0x00, -0x21,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0xa2,0x01,0x00,0x00,0x22,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0xa5,0x01,0x00,0x00,0x23,0x00,0x00,0x00, -0x36,0x00,0x05,0x00,0x02,0x00,0x00,0x00,0x04,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x03,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, -0x05,0x00,0x00,0x00,0xf7,0x00,0x03,0x00,0x32,0x01,0x00,0x00, -0x00,0x00,0x00,0x00,0xfb,0x00,0x03,0x00,0x18,0x00,0x00,0x00, -0x33,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0x33,0x01,0x00,0x00, -0xf9,0x00,0x02,0x00,0x0a,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, -0x0a,0x00,0x00,0x00,0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00, -0x3a,0x01,0x00,0x00,0x09,0x00,0x00,0x00,0x33,0x01,0x00,0x00, -0x2f,0x01,0x00,0x00,0x0d,0x00,0x00,0x00,0xb1,0x00,0x05,0x00, -0x11,0x00,0x00,0x00,0x12,0x00,0x00,0x00,0x3a,0x01,0x00,0x00, -0x10,0x00,0x00,0x00,0xf6,0x00,0x04,0x00,0x0c,0x00,0x00,0x00, -0x0d,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, -0x12,0x00,0x00,0x00,0x0b,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0xf8,0x00,0x02,0x00,0x0b,0x00,0x00,0x00,0x41,0x00,0x05,0x00, -0x19,0x00,0x00,0x00,0x1a,0x00,0x00,0x00,0x17,0x00,0x00,0x00, -0x18,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x14,0x00,0x00,0x00, -0x1b,0x00,0x00,0x00,0x1a,0x00,0x00,0x00,0x84,0x00,0x05,0x00, -0x14,0x00,0x00,0x00,0x1d,0x00,0x00,0x00,0x1b,0x00,0x00,0x00, -0x1c,0x00,0x00,0x00,0x7c,0x00,0x04,0x00,0x14,0x00,0x00,0x00, -0x1f,0x00,0x00,0x00,0x3a,0x01,0x00,0x00,0x80,0x00,0x05,0x00, -0x14,0x00,0x00,0x00,0x20,0x00,0x00,0x00,0x1d,0x00,0x00,0x00, -0x1f,0x00,0x00,0x00,0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x21,0x00,0x00,0x00,0x20,0x00,0x00,0x00,0x41,0x00,0x05,0x00, -0x26,0x00,0x00,0x00,0x27,0x00,0x00,0x00,0x25,0x00,0x00,0x00, -0x09,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x28,0x00,0x00,0x00,0x27,0x00,0x00,0x00,0x41,0x00,0x05,0x00, -0x26,0x00,0x00,0x00,0x2a,0x00,0x00,0x00,0x25,0x00,0x00,0x00, -0x29,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x2b,0x00,0x00,0x00,0x2a,0x00,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x2c,0x00,0x00,0x00,0x28,0x00,0x00,0x00, -0x2b,0x00,0x00,0x00,0x87,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x2d,0x00,0x00,0x00,0x2c,0x00,0x00,0x00,0x10,0x00,0x00,0x00, -0xaf,0x00,0x05,0x00,0x11,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0x21,0x00,0x00,0x00,0x2d,0x00,0x00,0x00,0xf7,0x00,0x03,0x00, -0x30,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, -0x2e,0x00,0x00,0x00,0x2f,0x00,0x00,0x00,0x30,0x00,0x00,0x00, -0xf8,0x00,0x02,0x00,0x2f,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, -0x0c,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0x30,0x00,0x00,0x00, -0x41,0x00,0x05,0x00,0x19,0x00,0x00,0x00,0x34,0x00,0x00,0x00, -0x33,0x00,0x00,0x00,0x18,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x14,0x00,0x00,0x00,0x35,0x00,0x00,0x00,0x34,0x00,0x00,0x00, -0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x36,0x00,0x00,0x00, -0x35,0x00,0x00,0x00,0x87,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x3a,0x00,0x00,0x00,0x36,0x00,0x00,0x00,0x39,0x00,0x00,0x00, -0x8b,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x3d,0x00,0x00,0x00, -0x36,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x41,0x00,0x00,0x00,0x3f,0x00,0x00,0x00, -0x3a,0x00,0x00,0x00,0x41,0x00,0x08,0x00,0x51,0x00,0x00,0x00, -0x52,0x00,0x00,0x00,0x4f,0x00,0x00,0x00,0x09,0x00,0x00,0x00, -0x21,0x00,0x00,0x00,0x09,0x00,0x00,0x00,0x18,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x42,0x00,0x00,0x00,0x53,0x00,0x00,0x00, -0x52,0x00,0x00,0x00,0x41,0x00,0x08,0x00,0x51,0x00,0x00,0x00, -0x57,0x00,0x00,0x00,0x4f,0x00,0x00,0x00,0x09,0x00,0x00,0x00, -0x21,0x00,0x00,0x00,0x09,0x00,0x00,0x00,0x56,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x42,0x00,0x00,0x00,0x58,0x00,0x00,0x00, -0x57,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x5b,0x00,0x00,0x00,0x21,0x00,0x00,0x00,0x10,0x00,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x5e,0x00,0x00,0x00, -0x5c,0x00,0x00,0x00,0x3a,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x5f,0x00,0x00,0x00,0x5b,0x00,0x00,0x00, -0x5e,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x62,0x00,0x00,0x00,0x60,0x00,0x00,0x00,0x3d,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x63,0x00,0x00,0x00, -0x5f,0x00,0x00,0x00,0x62,0x00,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x67,0x00,0x00,0x00,0x65,0x00,0x00,0x00, -0x3a,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x6a,0x00,0x00,0x00,0x67,0x00,0x00,0x00,0x62,0x00,0x00,0x00, -0xb1,0x00,0x05,0x00,0x11,0x00,0x00,0x00,0x6c,0x00,0x00,0x00, -0x41,0x00,0x00,0x00,0x60,0x00,0x00,0x00,0xf7,0x00,0x03,0x00, -0x6e,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, -0x6c,0x00,0x00,0x00,0x6d,0x00,0x00,0x00,0x88,0x00,0x00,0x00, -0xf8,0x00,0x02,0x00,0x6d,0x00,0x00,0x00,0x41,0x00,0x08,0x00, -0x73,0x00,0x00,0x00,0x74,0x00,0x00,0x00,0x4f,0x00,0x00,0x00, -0x09,0x00,0x00,0x00,0x21,0x00,0x00,0x00,0x29,0x00,0x00,0x00, -0x41,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x46,0x00,0x00,0x00, -0x75,0x00,0x00,0x00,0x74,0x00,0x00,0x00,0x71,0x00,0x04,0x00, -0x14,0x00,0x00,0x00,0x76,0x00,0x00,0x00,0x75,0x00,0x00,0x00, -0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x77,0x00,0x00,0x00, -0x76,0x00,0x00,0x00,0xc7,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x79,0x00,0x00,0x00,0x77,0x00,0x00,0x00,0x78,0x00,0x00,0x00, -0x72,0x00,0x04,0x00,0x7a,0x00,0x00,0x00,0x7b,0x00,0x00,0x00, -0x79,0x00,0x00,0x00,0x7c,0x00,0x04,0x00,0x46,0x00,0x00,0x00, -0x7c,0x00,0x00,0x00,0x7b,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x80,0x00,0x00,0x00,0x41,0x00,0x00,0x00, -0x60,0x00,0x00,0x00,0x41,0x00,0x08,0x00,0x73,0x00,0x00,0x00, -0x81,0x00,0x00,0x00,0x4f,0x00,0x00,0x00,0x09,0x00,0x00,0x00, -0x21,0x00,0x00,0x00,0x29,0x00,0x00,0x00,0x80,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x46,0x00,0x00,0x00,0x82,0x00,0x00,0x00, -0x81,0x00,0x00,0x00,0x71,0x00,0x04,0x00,0x14,0x00,0x00,0x00, -0x83,0x00,0x00,0x00,0x82,0x00,0x00,0x00,0x7c,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x84,0x00,0x00,0x00,0x83,0x00,0x00,0x00, -0xc7,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x85,0x00,0x00,0x00, -0x84,0x00,0x00,0x00,0x78,0x00,0x00,0x00,0x72,0x00,0x04,0x00, -0x7a,0x00,0x00,0x00,0x86,0x00,0x00,0x00,0x85,0x00,0x00,0x00, -0x7c,0x00,0x04,0x00,0x46,0x00,0x00,0x00,0x87,0x00,0x00,0x00, -0x86,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0x6e,0x00,0x00,0x00, -0xf8,0x00,0x02,0x00,0x88,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x8b,0x00,0x00,0x00,0x41,0x00,0x00,0x00, -0x60,0x00,0x00,0x00,0x41,0x00,0x08,0x00,0x73,0x00,0x00,0x00, -0x8c,0x00,0x00,0x00,0x4f,0x00,0x00,0x00,0x09,0x00,0x00,0x00, -0x21,0x00,0x00,0x00,0x29,0x00,0x00,0x00,0x8b,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x46,0x00,0x00,0x00,0x8d,0x00,0x00,0x00, -0x8c,0x00,0x00,0x00,0x71,0x00,0x04,0x00,0x14,0x00,0x00,0x00, -0x8e,0x00,0x00,0x00,0x8d,0x00,0x00,0x00,0x7c,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x8f,0x00,0x00,0x00,0x8e,0x00,0x00,0x00, -0xc7,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x91,0x00,0x00,0x00, -0x8f,0x00,0x00,0x00,0x90,0x00,0x00,0x00,0x82,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x94,0x00,0x00,0x00,0x41,0x00,0x00,0x00, -0x60,0x00,0x00,0x00,0x41,0x00,0x08,0x00,0x73,0x00,0x00,0x00, -0x95,0x00,0x00,0x00,0x4f,0x00,0x00,0x00,0x09,0x00,0x00,0x00, -0x21,0x00,0x00,0x00,0x29,0x00,0x00,0x00,0x94,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x46,0x00,0x00,0x00,0x96,0x00,0x00,0x00, -0x95,0x00,0x00,0x00,0xc2,0x00,0x05,0x00,0x46,0x00,0x00,0x00, -0x98,0x00,0x00,0x00,0x96,0x00,0x00,0x00,0x97,0x00,0x00,0x00, -0xc4,0x00,0x05,0x00,0x46,0x00,0x00,0x00,0x99,0x00,0x00,0x00, -0x98,0x00,0x00,0x00,0x60,0x00,0x00,0x00,0x71,0x00,0x04,0x00, -0x14,0x00,0x00,0x00,0x9a,0x00,0x00,0x00,0x99,0x00,0x00,0x00, -0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x9b,0x00,0x00,0x00, -0x9a,0x00,0x00,0x00,0xc5,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x9c,0x00,0x00,0x00,0x91,0x00,0x00,0x00,0x9b,0x00,0x00,0x00, -0x72,0x00,0x04,0x00,0x7a,0x00,0x00,0x00,0x9d,0x00,0x00,0x00, -0x9c,0x00,0x00,0x00,0x7c,0x00,0x04,0x00,0x46,0x00,0x00,0x00, -0x9e,0x00,0x00,0x00,0x9d,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x46,0x00,0x00,0x00,0xa3,0x00,0x00,0x00,0x8c,0x00,0x00,0x00, -0xc2,0x00,0x05,0x00,0x46,0x00,0x00,0x00,0xa4,0x00,0x00,0x00, -0xa3,0x00,0x00,0x00,0x60,0x00,0x00,0x00,0x41,0x00,0x08,0x00, -0x73,0x00,0x00,0x00,0xa7,0x00,0x00,0x00,0x4f,0x00,0x00,0x00, -0x09,0x00,0x00,0x00,0x21,0x00,0x00,0x00,0x29,0x00,0x00,0x00, -0x41,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x46,0x00,0x00,0x00, -0xa8,0x00,0x00,0x00,0xa7,0x00,0x00,0x00,0xc2,0x00,0x05,0x00, -0x46,0x00,0x00,0x00,0xa9,0x00,0x00,0x00,0xa8,0x00,0x00,0x00, -0x97,0x00,0x00,0x00,0xc4,0x00,0x05,0x00,0x46,0x00,0x00,0x00, -0xaa,0x00,0x00,0x00,0xa9,0x00,0x00,0x00,0x60,0x00,0x00,0x00, -0xc5,0x00,0x05,0x00,0x46,0x00,0x00,0x00,0xab,0x00,0x00,0x00, -0xa4,0x00,0x00,0x00,0xaa,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, -0x6e,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0x6e,0x00,0x00,0x00, -0xf5,0x00,0x07,0x00,0x46,0x00,0x00,0x00,0x3c,0x01,0x00,0x00, -0x87,0x00,0x00,0x00,0x6d,0x00,0x00,0x00,0xab,0x00,0x00,0x00, -0x88,0x00,0x00,0x00,0xf5,0x00,0x07,0x00,0x46,0x00,0x00,0x00, -0x3b,0x01,0x00,0x00,0x7c,0x00,0x00,0x00,0x6d,0x00,0x00,0x00, -0x9e,0x00,0x00,0x00,0x88,0x00,0x00,0x00,0x70,0x00,0x04,0x00, -0x42,0x00,0x00,0x00,0xaf,0x00,0x00,0x00,0x3b,0x01,0x00,0x00, -0x85,0x00,0x05,0x00,0x42,0x00,0x00,0x00,0xb0,0x00,0x00,0x00, -0x53,0x00,0x00,0x00,0xaf,0x00,0x00,0x00,0x70,0x00,0x04,0x00, -0x42,0x00,0x00,0x00,0xb4,0x00,0x00,0x00,0x3c,0x01,0x00,0x00, -0x85,0x00,0x05,0x00,0x42,0x00,0x00,0x00,0xb5,0x00,0x00,0x00, -0x58,0x00,0x00,0x00,0xb4,0x00,0x00,0x00,0xf7,0x00,0x03,0x00, -0xb9,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, -0x6c,0x00,0x00,0x00,0xb8,0x00,0x00,0x00,0xcf,0x00,0x00,0x00, -0xf8,0x00,0x02,0x00,0xb8,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xbc,0x00,0x00,0x00,0x41,0x00,0x00,0x00, -0x29,0x00,0x00,0x00,0x41,0x00,0x08,0x00,0x73,0x00,0x00,0x00, -0xbd,0x00,0x00,0x00,0x4f,0x00,0x00,0x00,0x09,0x00,0x00,0x00, -0x21,0x00,0x00,0x00,0x29,0x00,0x00,0x00,0xbc,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x46,0x00,0x00,0x00,0xbe,0x00,0x00,0x00, -0xbd,0x00,0x00,0x00,0x71,0x00,0x04,0x00,0x14,0x00,0x00,0x00, -0xbf,0x00,0x00,0x00,0xbe,0x00,0x00,0x00,0x7c,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0xc0,0x00,0x00,0x00,0xbf,0x00,0x00,0x00, -0xc7,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xc1,0x00,0x00,0x00, -0xc0,0x00,0x00,0x00,0x78,0x00,0x00,0x00,0x72,0x00,0x04,0x00, -0x7a,0x00,0x00,0x00,0xc2,0x00,0x00,0x00,0xc1,0x00,0x00,0x00, -0x7c,0x00,0x04,0x00,0x46,0x00,0x00,0x00,0xc3,0x00,0x00,0x00, -0xc2,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xc7,0x00,0x00,0x00,0x41,0x00,0x00,0x00,0xc6,0x00,0x00,0x00, -0x41,0x00,0x08,0x00,0x73,0x00,0x00,0x00,0xc8,0x00,0x00,0x00, -0x4f,0x00,0x00,0x00,0x09,0x00,0x00,0x00,0x21,0x00,0x00,0x00, -0x29,0x00,0x00,0x00,0xc7,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x46,0x00,0x00,0x00,0xc9,0x00,0x00,0x00,0xc8,0x00,0x00,0x00, -0x71,0x00,0x04,0x00,0x14,0x00,0x00,0x00,0xca,0x00,0x00,0x00, -0xc9,0x00,0x00,0x00,0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0xcb,0x00,0x00,0x00,0xca,0x00,0x00,0x00,0xc7,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xcc,0x00,0x00,0x00,0xcb,0x00,0x00,0x00, -0x78,0x00,0x00,0x00,0x72,0x00,0x04,0x00,0x7a,0x00,0x00,0x00, -0xcd,0x00,0x00,0x00,0xcc,0x00,0x00,0x00,0x7c,0x00,0x04,0x00, -0x46,0x00,0x00,0x00,0xce,0x00,0x00,0x00,0xcd,0x00,0x00,0x00, -0xf9,0x00,0x02,0x00,0xb9,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, -0xcf,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xd2,0x00,0x00,0x00,0x41,0x00,0x00,0x00,0xc6,0x00,0x00,0x00, -0x41,0x00,0x08,0x00,0x73,0x00,0x00,0x00,0xd3,0x00,0x00,0x00, -0x4f,0x00,0x00,0x00,0x09,0x00,0x00,0x00,0x21,0x00,0x00,0x00, -0x29,0x00,0x00,0x00,0xd2,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x46,0x00,0x00,0x00,0xd4,0x00,0x00,0x00,0xd3,0x00,0x00,0x00, -0x71,0x00,0x04,0x00,0x14,0x00,0x00,0x00,0xd5,0x00,0x00,0x00, -0xd4,0x00,0x00,0x00,0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0xd6,0x00,0x00,0x00,0xd5,0x00,0x00,0x00,0xc7,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xd7,0x00,0x00,0x00,0xd6,0x00,0x00,0x00, -0x90,0x00,0x00,0x00,0x82,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xdb,0x00,0x00,0x00,0x41,0x00,0x00,0x00,0xda,0x00,0x00,0x00, -0x41,0x00,0x08,0x00,0x73,0x00,0x00,0x00,0xdc,0x00,0x00,0x00, -0x4f,0x00,0x00,0x00,0x09,0x00,0x00,0x00,0x21,0x00,0x00,0x00, -0x29,0x00,0x00,0x00,0xdb,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x46,0x00,0x00,0x00,0xdd,0x00,0x00,0x00,0xdc,0x00,0x00,0x00, -0xc2,0x00,0x05,0x00,0x46,0x00,0x00,0x00,0xde,0x00,0x00,0x00, -0xdd,0x00,0x00,0x00,0x97,0x00,0x00,0x00,0xc4,0x00,0x05,0x00, -0x46,0x00,0x00,0x00,0xdf,0x00,0x00,0x00,0xde,0x00,0x00,0x00, -0x60,0x00,0x00,0x00,0x71,0x00,0x04,0x00,0x14,0x00,0x00,0x00, -0xe0,0x00,0x00,0x00,0xdf,0x00,0x00,0x00,0x7c,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0xe1,0x00,0x00,0x00,0xe0,0x00,0x00,0x00, -0xc5,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xe2,0x00,0x00,0x00, -0xd7,0x00,0x00,0x00,0xe1,0x00,0x00,0x00,0x72,0x00,0x04,0x00, -0x7a,0x00,0x00,0x00,0xe3,0x00,0x00,0x00,0xe2,0x00,0x00,0x00, -0x7c,0x00,0x04,0x00,0x46,0x00,0x00,0x00,0xe4,0x00,0x00,0x00, -0xe3,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x46,0x00,0x00,0x00, -0xe9,0x00,0x00,0x00,0xd3,0x00,0x00,0x00,0xc2,0x00,0x05,0x00, -0x46,0x00,0x00,0x00,0xea,0x00,0x00,0x00,0xe9,0x00,0x00,0x00, -0x60,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xed,0x00,0x00,0x00,0x41,0x00,0x00,0x00,0x29,0x00,0x00,0x00, -0x41,0x00,0x08,0x00,0x73,0x00,0x00,0x00,0xee,0x00,0x00,0x00, -0x4f,0x00,0x00,0x00,0x09,0x00,0x00,0x00,0x21,0x00,0x00,0x00, -0x29,0x00,0x00,0x00,0xed,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x46,0x00,0x00,0x00,0xef,0x00,0x00,0x00,0xee,0x00,0x00,0x00, -0xc2,0x00,0x05,0x00,0x46,0x00,0x00,0x00,0xf0,0x00,0x00,0x00, -0xef,0x00,0x00,0x00,0x97,0x00,0x00,0x00,0xc4,0x00,0x05,0x00, -0x46,0x00,0x00,0x00,0xf1,0x00,0x00,0x00,0xf0,0x00,0x00,0x00, -0x60,0x00,0x00,0x00,0xc5,0x00,0x05,0x00,0x46,0x00,0x00,0x00, -0xf2,0x00,0x00,0x00,0xea,0x00,0x00,0x00,0xf1,0x00,0x00,0x00, -0xf9,0x00,0x02,0x00,0xb9,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, -0xb9,0x00,0x00,0x00,0xf5,0x00,0x07,0x00,0x46,0x00,0x00,0x00, -0x3e,0x01,0x00,0x00,0xce,0x00,0x00,0x00,0xb8,0x00,0x00,0x00, -0xf2,0x00,0x00,0x00,0xcf,0x00,0x00,0x00,0xf5,0x00,0x07,0x00, -0x46,0x00,0x00,0x00,0x3d,0x01,0x00,0x00,0xc3,0x00,0x00,0x00, -0xb8,0x00,0x00,0x00,0xe4,0x00,0x00,0x00,0xcf,0x00,0x00,0x00, -0x70,0x00,0x04,0x00,0x42,0x00,0x00,0x00,0xf6,0x00,0x00,0x00, -0x3d,0x01,0x00,0x00,0x85,0x00,0x05,0x00,0x42,0x00,0x00,0x00, -0xf7,0x00,0x00,0x00,0x53,0x00,0x00,0x00,0xf6,0x00,0x00,0x00, -0x70,0x00,0x04,0x00,0x42,0x00,0x00,0x00,0xfb,0x00,0x00,0x00, -0x3e,0x01,0x00,0x00,0x85,0x00,0x05,0x00,0x42,0x00,0x00,0x00, -0xfc,0x00,0x00,0x00,0x58,0x00,0x00,0x00,0xfb,0x00,0x00,0x00, -0x41,0x00,0x08,0x00,0x73,0x00,0x00,0x00,0x11,0x01,0x00,0x00, -0x4f,0x00,0x00,0x00,0x09,0x00,0x00,0x00,0x21,0x00,0x00,0x00, -0x3f,0x00,0x00,0x00,0x6a,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x46,0x00,0x00,0x00,0x12,0x01,0x00,0x00,0x11,0x01,0x00,0x00, -0x71,0x00,0x04,0x00,0x14,0x00,0x00,0x00,0x13,0x01,0x00,0x00, -0x12,0x01,0x00,0x00,0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x14,0x01,0x00,0x00,0x13,0x01,0x00,0x00,0xc7,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x15,0x01,0x00,0x00,0x14,0x01,0x00,0x00, -0x90,0x00,0x00,0x00,0x6f,0x00,0x04,0x00,0x42,0x00,0x00,0x00, -0x16,0x01,0x00,0x00,0x15,0x01,0x00,0x00,0x7f,0x00,0x04,0x00, -0x42,0x00,0x00,0x00,0x9c,0x01,0x00,0x00,0xb5,0x00,0x00,0x00, -0x0c,0x00,0x08,0x00,0x42,0x00,0x00,0x00,0x19,0x01,0x00,0x00, -0x01,0x00,0x00,0x00,0x32,0x00,0x00,0x00,0xb0,0x00,0x00,0x00, -0x16,0x01,0x00,0x00,0x9c,0x01,0x00,0x00,0x41,0x00,0x06,0x00, -0x51,0x00,0x00,0x00,0x1a,0x01,0x00,0x00,0x08,0x01,0x00,0x00, -0x09,0x00,0x00,0x00,0x63,0x00,0x00,0x00,0x3e,0x00,0x03,0x00, -0x1a,0x01,0x00,0x00,0x19,0x01,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x1e,0x01,0x00,0x00,0x63,0x00,0x00,0x00, -0x65,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x46,0x00,0x00,0x00, -0x25,0x01,0x00,0x00,0x11,0x01,0x00,0x00,0xc2,0x00,0x05,0x00, -0x46,0x00,0x00,0x00,0x26,0x01,0x00,0x00,0x25,0x01,0x00,0x00, -0x60,0x00,0x00,0x00,0x70,0x00,0x04,0x00,0x42,0x00,0x00,0x00, -0x27,0x01,0x00,0x00,0x26,0x01,0x00,0x00,0x7f,0x00,0x04,0x00, -0x42,0x00,0x00,0x00,0x9d,0x01,0x00,0x00,0xfc,0x00,0x00,0x00, -0x0c,0x00,0x08,0x00,0x42,0x00,0x00,0x00,0x2a,0x01,0x00,0x00, -0x01,0x00,0x00,0x00,0x32,0x00,0x00,0x00,0xf7,0x00,0x00,0x00, -0x27,0x01,0x00,0x00,0x9d,0x01,0x00,0x00,0x41,0x00,0x06,0x00, -0x51,0x00,0x00,0x00,0x2b,0x01,0x00,0x00,0x08,0x01,0x00,0x00, -0x09,0x00,0x00,0x00,0x1e,0x01,0x00,0x00,0x3e,0x00,0x03,0x00, -0x2b,0x01,0x00,0x00,0x2a,0x01,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x4d,0x01,0x00,0x00,0x63,0x00,0x00,0x00, -0x29,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x4e,0x01,0x00,0x00,0x6a,0x00,0x00,0x00,0x29,0x00,0x00,0x00, -0x41,0x00,0x08,0x00,0x73,0x00,0x00,0x00,0x4f,0x01,0x00,0x00, -0x4f,0x00,0x00,0x00,0x09,0x00,0x00,0x00,0x21,0x00,0x00,0x00, -0x3f,0x00,0x00,0x00,0x4e,0x01,0x00,0x00,0x3d,0x00,0x04,0x00, -0x46,0x00,0x00,0x00,0x50,0x01,0x00,0x00,0x4f,0x01,0x00,0x00, -0x71,0x00,0x04,0x00,0x14,0x00,0x00,0x00,0x51,0x01,0x00,0x00, -0x50,0x01,0x00,0x00,0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x52,0x01,0x00,0x00,0x51,0x01,0x00,0x00,0xc7,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x53,0x01,0x00,0x00,0x52,0x01,0x00,0x00, -0x90,0x00,0x00,0x00,0x6f,0x00,0x04,0x00,0x42,0x00,0x00,0x00, -0x54,0x01,0x00,0x00,0x53,0x01,0x00,0x00,0x0c,0x00,0x08,0x00, -0x42,0x00,0x00,0x00,0x56,0x01,0x00,0x00,0x01,0x00,0x00,0x00, -0x32,0x00,0x00,0x00,0xb0,0x00,0x00,0x00,0x54,0x01,0x00,0x00, -0x9c,0x01,0x00,0x00,0x41,0x00,0x06,0x00,0x51,0x00,0x00,0x00, -0x57,0x01,0x00,0x00,0x08,0x01,0x00,0x00,0x09,0x00,0x00,0x00, -0x4d,0x01,0x00,0x00,0x3e,0x00,0x03,0x00,0x57,0x01,0x00,0x00, -0x56,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x59,0x01,0x00,0x00,0x63,0x00,0x00,0x00,0x9f,0x01,0x00,0x00, -0x3d,0x00,0x04,0x00,0x46,0x00,0x00,0x00,0x5c,0x01,0x00,0x00, -0x4f,0x01,0x00,0x00,0xc2,0x00,0x05,0x00,0x46,0x00,0x00,0x00, -0x5d,0x01,0x00,0x00,0x5c,0x01,0x00,0x00,0x60,0x00,0x00,0x00, -0x70,0x00,0x04,0x00,0x42,0x00,0x00,0x00,0x5e,0x01,0x00,0x00, -0x5d,0x01,0x00,0x00,0x0c,0x00,0x08,0x00,0x42,0x00,0x00,0x00, -0x60,0x01,0x00,0x00,0x01,0x00,0x00,0x00,0x32,0x00,0x00,0x00, -0xf7,0x00,0x00,0x00,0x5e,0x01,0x00,0x00,0x9d,0x01,0x00,0x00, -0x41,0x00,0x06,0x00,0x51,0x00,0x00,0x00,0x61,0x01,0x00,0x00, -0x08,0x01,0x00,0x00,0x09,0x00,0x00,0x00,0x59,0x01,0x00,0x00, -0x3e,0x00,0x03,0x00,0x61,0x01,0x00,0x00,0x60,0x01,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x69,0x01,0x00,0x00, -0x63,0x00,0x00,0x00,0x3f,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x6a,0x01,0x00,0x00,0x6a,0x00,0x00,0x00, -0x3f,0x00,0x00,0x00,0x41,0x00,0x08,0x00,0x73,0x00,0x00,0x00, -0x6b,0x01,0x00,0x00,0x4f,0x00,0x00,0x00,0x09,0x00,0x00,0x00, -0x21,0x00,0x00,0x00,0x3f,0x00,0x00,0x00,0x6a,0x01,0x00,0x00, -0x3d,0x00,0x04,0x00,0x46,0x00,0x00,0x00,0x6c,0x01,0x00,0x00, -0x6b,0x01,0x00,0x00,0x71,0x00,0x04,0x00,0x14,0x00,0x00,0x00, -0x6d,0x01,0x00,0x00,0x6c,0x01,0x00,0x00,0x7c,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x6e,0x01,0x00,0x00,0x6d,0x01,0x00,0x00, -0xc7,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x6f,0x01,0x00,0x00, -0x6e,0x01,0x00,0x00,0x90,0x00,0x00,0x00,0x6f,0x00,0x04,0x00, -0x42,0x00,0x00,0x00,0x70,0x01,0x00,0x00,0x6f,0x01,0x00,0x00, -0x0c,0x00,0x08,0x00,0x42,0x00,0x00,0x00,0x72,0x01,0x00,0x00, -0x01,0x00,0x00,0x00,0x32,0x00,0x00,0x00,0xb0,0x00,0x00,0x00, -0x70,0x01,0x00,0x00,0x9c,0x01,0x00,0x00,0x41,0x00,0x06,0x00, -0x51,0x00,0x00,0x00,0x73,0x01,0x00,0x00,0x08,0x01,0x00,0x00, -0x09,0x00,0x00,0x00,0x69,0x01,0x00,0x00,0x3e,0x00,0x03,0x00, -0x73,0x01,0x00,0x00,0x72,0x01,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x75,0x01,0x00,0x00,0x63,0x00,0x00,0x00, -0xa2,0x01,0x00,0x00,0x3d,0x00,0x04,0x00,0x46,0x00,0x00,0x00, -0x78,0x01,0x00,0x00,0x6b,0x01,0x00,0x00,0xc2,0x00,0x05,0x00, -0x46,0x00,0x00,0x00,0x79,0x01,0x00,0x00,0x78,0x01,0x00,0x00, -0x60,0x00,0x00,0x00,0x70,0x00,0x04,0x00,0x42,0x00,0x00,0x00, -0x7a,0x01,0x00,0x00,0x79,0x01,0x00,0x00,0x0c,0x00,0x08,0x00, -0x42,0x00,0x00,0x00,0x7c,0x01,0x00,0x00,0x01,0x00,0x00,0x00, -0x32,0x00,0x00,0x00,0xf7,0x00,0x00,0x00,0x7a,0x01,0x00,0x00, -0x9d,0x01,0x00,0x00,0x41,0x00,0x06,0x00,0x51,0x00,0x00,0x00, -0x7d,0x01,0x00,0x00,0x08,0x01,0x00,0x00,0x09,0x00,0x00,0x00, -0x75,0x01,0x00,0x00,0x3e,0x00,0x03,0x00,0x7d,0x01,0x00,0x00, -0x7c,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x85,0x01,0x00,0x00,0x63,0x00,0x00,0x00,0xda,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x86,0x01,0x00,0x00, -0x6a,0x00,0x00,0x00,0xda,0x00,0x00,0x00,0x41,0x00,0x08,0x00, -0x73,0x00,0x00,0x00,0x87,0x01,0x00,0x00,0x4f,0x00,0x00,0x00, -0x09,0x00,0x00,0x00,0x21,0x00,0x00,0x00,0x3f,0x00,0x00,0x00, -0x86,0x01,0x00,0x00,0x3d,0x00,0x04,0x00,0x46,0x00,0x00,0x00, -0x88,0x01,0x00,0x00,0x87,0x01,0x00,0x00,0x71,0x00,0x04,0x00, -0x14,0x00,0x00,0x00,0x89,0x01,0x00,0x00,0x88,0x01,0x00,0x00, -0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x8a,0x01,0x00,0x00, -0x89,0x01,0x00,0x00,0xc7,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x8b,0x01,0x00,0x00,0x8a,0x01,0x00,0x00,0x90,0x00,0x00,0x00, -0x6f,0x00,0x04,0x00,0x42,0x00,0x00,0x00,0x8c,0x01,0x00,0x00, -0x8b,0x01,0x00,0x00,0x0c,0x00,0x08,0x00,0x42,0x00,0x00,0x00, -0x8e,0x01,0x00,0x00,0x01,0x00,0x00,0x00,0x32,0x00,0x00,0x00, -0xb0,0x00,0x00,0x00,0x8c,0x01,0x00,0x00,0x9c,0x01,0x00,0x00, -0x41,0x00,0x06,0x00,0x51,0x00,0x00,0x00,0x8f,0x01,0x00,0x00, -0x08,0x01,0x00,0x00,0x09,0x00,0x00,0x00,0x85,0x01,0x00,0x00, -0x3e,0x00,0x03,0x00,0x8f,0x01,0x00,0x00,0x8e,0x01,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x91,0x01,0x00,0x00, -0x63,0x00,0x00,0x00,0xa5,0x01,0x00,0x00,0x3d,0x00,0x04,0x00, -0x46,0x00,0x00,0x00,0x94,0x01,0x00,0x00,0x87,0x01,0x00,0x00, -0xc2,0x00,0x05,0x00,0x46,0x00,0x00,0x00,0x95,0x01,0x00,0x00, -0x94,0x01,0x00,0x00,0x60,0x00,0x00,0x00,0x70,0x00,0x04,0x00, -0x42,0x00,0x00,0x00,0x96,0x01,0x00,0x00,0x95,0x01,0x00,0x00, -0x0c,0x00,0x08,0x00,0x42,0x00,0x00,0x00,0x98,0x01,0x00,0x00, -0x01,0x00,0x00,0x00,0x32,0x00,0x00,0x00,0xf7,0x00,0x00,0x00, -0x96,0x01,0x00,0x00,0x9d,0x01,0x00,0x00,0x41,0x00,0x06,0x00, -0x51,0x00,0x00,0x00,0x99,0x01,0x00,0x00,0x08,0x01,0x00,0x00, -0x09,0x00,0x00,0x00,0x91,0x01,0x00,0x00,0x3e,0x00,0x03,0x00, -0x99,0x01,0x00,0x00,0x98,0x01,0x00,0x00,0xf9,0x00,0x02,0x00, -0x0d,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0x0d,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x2f,0x01,0x00,0x00, -0x3a,0x01,0x00,0x00,0x29,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, -0x0a,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0x0c,0x00,0x00,0x00, -0xf5,0x00,0x07,0x00,0x11,0x00,0x00,0x00,0x43,0x01,0x00,0x00, -0x34,0x01,0x00,0x00,0x0a,0x00,0x00,0x00,0x37,0x01,0x00,0x00, -0x2f,0x00,0x00,0x00,0xf7,0x00,0x03,0x00,0x38,0x01,0x00,0x00, -0x00,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0x43,0x01,0x00,0x00, -0x32,0x01,0x00,0x00,0x38,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, -0x38,0x01,0x00,0x00,0xf9,0x00,0x02,0x00,0x32,0x01,0x00,0x00, -0xf8,0x00,0x02,0x00,0x32,0x01,0x00,0x00,0xfd,0x00,0x01,0x00, -0x38,0x00,0x01,0x00, -}; -const uint64_t dequant_q4_K_len = 5776; - -unsigned char dequant_q4_K_fp32_data[] = { -0x03,0x02,0x23,0x07,0x00,0x05,0x01,0x00,0x0b,0x00,0x0d,0x00, 0xb1,0x01,0x00,0x00,0x00,0x00,0x00,0x00,0x11,0x00,0x02,0x00, 0x01,0x00,0x00,0x00,0x11,0x00,0x02,0x00,0x27,0x00,0x00,0x00, 0x11,0x00,0x02,0x00,0x51,0x11,0x00,0x00,0x11,0x00,0x02,0x00, @@ -6669,1134 +3848,10 @@ unsigned char dequant_q4_K_fp32_data[] = { 0x37,0x01,0x00,0x00,0xfd,0x00,0x01,0x00,0x38,0x00,0x01,0x00, }; -const uint64_t dequant_q4_K_fp32_len = 5940; +const uint64_t dequant_q4_K_len = 5940; unsigned char dequant_q5_0_data[] = { 0x03,0x02,0x23,0x07,0x00,0x05,0x01,0x00,0x0b,0x00,0x0d,0x00, -0x7a,0x04,0x00,0x00,0x00,0x00,0x00,0x00,0x11,0x00,0x02,0x00, -0x01,0x00,0x00,0x00,0x11,0x00,0x02,0x00,0x09,0x00,0x00,0x00, -0x11,0x00,0x02,0x00,0x27,0x00,0x00,0x00,0x11,0x00,0x02,0x00, -0x51,0x11,0x00,0x00,0x11,0x00,0x02,0x00,0x60,0x11,0x00,0x00, -0x0b,0x00,0x06,0x00,0x01,0x00,0x00,0x00,0x47,0x4c,0x53,0x4c, -0x2e,0x73,0x74,0x64,0x2e,0x34,0x35,0x30,0x00,0x00,0x00,0x00, -0x0e,0x00,0x03,0x00,0x00,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x0f,0x00,0x09,0x00,0x05,0x00,0x00,0x00,0x04,0x00,0x00,0x00, -0x6d,0x61,0x69,0x6e,0x00,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0x16,0x00,0x00,0x00,0x57,0x00,0x00,0x00,0xa1,0x00,0x00,0x00, -0x10,0x00,0x06,0x00,0x04,0x00,0x00,0x00,0x11,0x00,0x00,0x00, -0x00,0x01,0x00,0x00,0x01,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x0c,0x00,0x00,0x00,0x0b,0x00,0x00,0x00, -0x1c,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x14,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x48,0x00,0x05,0x00,0x14,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0x14,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x08,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x14,0x00,0x00,0x00, -0x03,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0x47,0x00,0x03,0x00,0x14,0x00,0x00,0x00,0x02,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x4f,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x52,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0x53,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x53,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x02,0x00,0x00,0x00, -0x48,0x00,0x05,0x00,0x53,0x00,0x00,0x00,0x02,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x54,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x16,0x00,0x00,0x00, -0x48,0x00,0x04,0x00,0x55,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x18,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x55,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x47,0x00,0x03,0x00,0x55,0x00,0x00,0x00,0x02,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x57,0x00,0x00,0x00,0x22,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x57,0x00,0x00,0x00, -0x21,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x9e,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x02,0x00,0x00,0x00, -0x48,0x00,0x04,0x00,0x9f,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x19,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x9f,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x47,0x00,0x03,0x00,0x9f,0x00,0x00,0x00,0x02,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0xa1,0x00,0x00,0x00,0x22,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0xa1,0x00,0x00,0x00, -0x21,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0xc0,0x00,0x00,0x00,0x0b,0x00,0x00,0x00,0x19,0x00,0x00,0x00, -0x13,0x00,0x02,0x00,0x02,0x00,0x00,0x00,0x21,0x00,0x03,0x00, -0x03,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x15,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x20,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x15,0x00,0x04,0x00,0x09,0x00,0x00,0x00,0x20,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x17,0x00,0x04,0x00,0x0a,0x00,0x00,0x00, -0x09,0x00,0x00,0x00,0x03,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0x0b,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x0a,0x00,0x00,0x00, -0x3b,0x00,0x04,0x00,0x0b,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x09,0x00,0x00,0x00, -0x0d,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0x0e,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x09,0x00,0x00,0x00, -0x1e,0x00,0x06,0x00,0x14,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x20,0x00,0x04,0x00,0x15,0x00,0x00,0x00,0x09,0x00,0x00,0x00, -0x14,0x00,0x00,0x00,0x3b,0x00,0x04,0x00,0x15,0x00,0x00,0x00, -0x16,0x00,0x00,0x00,0x09,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x17,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x20,0x00,0x04,0x00,0x18,0x00,0x00,0x00,0x09,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x1b,0x00,0x00,0x00,0x20,0x00,0x00,0x00,0x14,0x00,0x02,0x00, -0x24,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x2e,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x37,0x00,0x00,0x00,0x02,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x48,0x00,0x00,0x00, -0x10,0x00,0x00,0x00,0x16,0x00,0x03,0x00,0x4a,0x00,0x00,0x00, -0x10,0x00,0x00,0x00,0x15,0x00,0x04,0x00,0x4d,0x00,0x00,0x00, -0x10,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x09,0x00,0x00,0x00,0x4e,0x00,0x00,0x00,0x02,0x00,0x00,0x00, -0x1c,0x00,0x04,0x00,0x4f,0x00,0x00,0x00,0x4d,0x00,0x00,0x00, -0x4e,0x00,0x00,0x00,0x15,0x00,0x04,0x00,0x50,0x00,0x00,0x00, -0x08,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x09,0x00,0x00,0x00,0x51,0x00,0x00,0x00,0x10,0x00,0x00,0x00, -0x1c,0x00,0x04,0x00,0x52,0x00,0x00,0x00,0x50,0x00,0x00,0x00, -0x51,0x00,0x00,0x00,0x1e,0x00,0x05,0x00,0x53,0x00,0x00,0x00, -0x4a,0x00,0x00,0x00,0x4f,0x00,0x00,0x00,0x52,0x00,0x00,0x00, -0x1d,0x00,0x03,0x00,0x54,0x00,0x00,0x00,0x53,0x00,0x00,0x00, -0x1e,0x00,0x03,0x00,0x55,0x00,0x00,0x00,0x54,0x00,0x00,0x00, -0x20,0x00,0x04,0x00,0x56,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0x55,0x00,0x00,0x00,0x3b,0x00,0x04,0x00,0x56,0x00,0x00,0x00, -0x57,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0x59,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x4a,0x00,0x00,0x00, -0x20,0x00,0x04,0x00,0x5f,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0x4d,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x6f,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x75,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0x20,0x00,0x04,0x00,0x7f,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0x50,0x00,0x00,0x00,0x17,0x00,0x04,0x00,0x82,0x00,0x00,0x00, -0x4a,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x88,0x00,0x00,0x00,0x0f,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x09,0x00,0x00,0x00,0x92,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x4a,0x00,0x00,0x00, -0x99,0x00,0x00,0x00,0x00,0x4c,0x00,0x00,0x1d,0x00,0x03,0x00, -0x9e,0x00,0x00,0x00,0x4a,0x00,0x00,0x00,0x1e,0x00,0x03,0x00, -0x9f,0x00,0x00,0x00,0x9e,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0xa0,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x9f,0x00,0x00,0x00, -0x3b,0x00,0x04,0x00,0xa0,0x00,0x00,0x00,0xa1,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0xa3,0x00,0x00,0x00,0x03,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x09,0x00,0x00,0x00,0xbf,0x00,0x00,0x00,0x00,0x01,0x00,0x00, -0x2c,0x00,0x06,0x00,0x0a,0x00,0x00,0x00,0xc0,0x00,0x00,0x00, -0xbf,0x00,0x00,0x00,0x92,0x00,0x00,0x00,0x92,0x00,0x00,0x00, -0x2c,0x00,0x05,0x00,0x82,0x00,0x00,0x00,0xcd,0x00,0x00,0x00, -0x99,0x00,0x00,0x00,0x99,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x62,0x04,0x00,0x00,0x0d,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x63,0x04,0x00,0x00, -0x11,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x64,0x04,0x00,0x00,0x0e,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x65,0x04,0x00,0x00,0x12,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x66,0x04,0x00,0x00, -0x13,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x67,0x04,0x00,0x00,0x14,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x68,0x04,0x00,0x00,0x05,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x69,0x04,0x00,0x00, -0x15,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x6a,0x04,0x00,0x00,0x06,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x6b,0x04,0x00,0x00,0x16,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x6c,0x04,0x00,0x00, -0x07,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x6d,0x04,0x00,0x00,0x17,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x6e,0x04,0x00,0x00,0x08,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x6f,0x04,0x00,0x00, -0x18,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x70,0x04,0x00,0x00,0x09,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x71,0x04,0x00,0x00,0x19,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x72,0x04,0x00,0x00, -0x0a,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x73,0x04,0x00,0x00,0x1a,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x74,0x04,0x00,0x00,0x0b,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x75,0x04,0x00,0x00, -0x1b,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x76,0x04,0x00,0x00,0x1c,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x77,0x04,0x00,0x00,0x1d,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x78,0x04,0x00,0x00, -0x1e,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x79,0x04,0x00,0x00,0x1f,0x00,0x00,0x00,0x36,0x00,0x05,0x00, -0x02,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x03,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0x05,0x00,0x00,0x00, -0xf7,0x00,0x03,0x00,0xc1,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0xfb,0x00,0x03,0x00,0x0d,0x00,0x00,0x00,0xc2,0x00,0x00,0x00, -0xf8,0x00,0x02,0x00,0xc2,0x00,0x00,0x00,0x41,0x00,0x05,0x00, -0x0e,0x00,0x00,0x00,0x0f,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0x0d,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x09,0x00,0x00,0x00, -0x10,0x00,0x00,0x00,0x0f,0x00,0x00,0x00,0x7c,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x11,0x00,0x00,0x00,0x10,0x00,0x00,0x00, -0x41,0x00,0x05,0x00,0x18,0x00,0x00,0x00,0x19,0x00,0x00,0x00, -0x16,0x00,0x00,0x00,0x17,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x1a,0x00,0x00,0x00,0x19,0x00,0x00,0x00, -0x87,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x1c,0x00,0x00,0x00, -0x1a,0x00,0x00,0x00,0x1b,0x00,0x00,0x00,0x8b,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x1d,0x00,0x00,0x00,0x11,0x00,0x00,0x00, -0x1c,0x00,0x00,0x00,0x87,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x11,0x00,0x00,0x00,0x1c,0x00,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x26,0x00,0x00,0x00, -0x1d,0x00,0x00,0x00,0x1b,0x00,0x00,0x00,0xaf,0x00,0x05,0x00, -0x24,0x00,0x00,0x00,0x29,0x00,0x00,0x00,0x26,0x00,0x00,0x00, -0x1a,0x00,0x00,0x00,0xa8,0x00,0x04,0x00,0x24,0x00,0x00,0x00, -0x2a,0x00,0x00,0x00,0x29,0x00,0x00,0x00,0xf7,0x00,0x03,0x00, -0x2c,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, -0x2a,0x00,0x00,0x00,0x2b,0x00,0x00,0x00,0x2c,0x00,0x00,0x00, -0xf8,0x00,0x02,0x00,0x2b,0x00,0x00,0x00,0x41,0x00,0x05,0x00, -0x18,0x00,0x00,0x00,0x2f,0x00,0x00,0x00,0x16,0x00,0x00,0x00, -0x2e,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x30,0x00,0x00,0x00,0x2f,0x00,0x00,0x00,0xaf,0x00,0x05,0x00, -0x24,0x00,0x00,0x00,0x31,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x30,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0x2c,0x00,0x00,0x00, -0xf8,0x00,0x02,0x00,0x2c,0x00,0x00,0x00,0xf5,0x00,0x07,0x00, -0x24,0x00,0x00,0x00,0x32,0x00,0x00,0x00,0x29,0x00,0x00,0x00, -0xc2,0x00,0x00,0x00,0x31,0x00,0x00,0x00,0x2b,0x00,0x00,0x00, -0xf7,0x00,0x03,0x00,0x34,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0xfa,0x00,0x04,0x00,0x32,0x00,0x00,0x00,0x33,0x00,0x00,0x00, -0x34,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0x33,0x00,0x00,0x00, -0xf9,0x00,0x02,0x00,0xc1,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, -0x34,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x18,0x00,0x00,0x00, -0x38,0x00,0x00,0x00,0x16,0x00,0x00,0x00,0x37,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x39,0x00,0x00,0x00, -0x38,0x00,0x00,0x00,0x87,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x3a,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x1b,0x00,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x3e,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x3a,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x40,0x00,0x00,0x00,0x3e,0x00,0x00,0x00, -0x1d,0x00,0x00,0x00,0x41,0x00,0x07,0x00,0x59,0x00,0x00,0x00, -0x5a,0x00,0x00,0x00,0x57,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0x40,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x4a,0x00,0x00,0x00,0x5b,0x00,0x00,0x00,0x5a,0x00,0x00,0x00, -0x41,0x00,0x08,0x00,0x5f,0x00,0x00,0x00,0x60,0x00,0x00,0x00, -0x57,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0x40,0x00,0x00,0x00, -0x17,0x00,0x00,0x00,0x17,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x4d,0x00,0x00,0x00,0x61,0x00,0x00,0x00,0x60,0x00,0x00,0x00, -0x71,0x00,0x04,0x00,0x09,0x00,0x00,0x00,0x62,0x00,0x00,0x00, -0x61,0x00,0x00,0x00,0xc4,0x00,0x05,0x00,0x09,0x00,0x00,0x00, -0x63,0x00,0x00,0x00,0x62,0x00,0x00,0x00,0x48,0x00,0x00,0x00, -0x41,0x00,0x08,0x00,0x5f,0x00,0x00,0x00,0x65,0x00,0x00,0x00, -0x57,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0x40,0x00,0x00,0x00, -0x17,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x4d,0x00,0x00,0x00,0x66,0x00,0x00,0x00,0x65,0x00,0x00,0x00, -0x71,0x00,0x04,0x00,0x09,0x00,0x00,0x00,0x67,0x00,0x00,0x00, -0x66,0x00,0x00,0x00,0xc5,0x00,0x05,0x00,0x09,0x00,0x00,0x00, -0x68,0x00,0x00,0x00,0x63,0x00,0x00,0x00,0x67,0x00,0x00,0x00, -0xc2,0x00,0x05,0x00,0x09,0x00,0x00,0x00,0x6e,0x00,0x00,0x00, -0x68,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0xc4,0x00,0x05,0x00, -0x09,0x00,0x00,0x00,0x70,0x00,0x00,0x00,0x6e,0x00,0x00,0x00, -0x6f,0x00,0x00,0x00,0xc7,0x00,0x05,0x00,0x09,0x00,0x00,0x00, -0x71,0x00,0x00,0x00,0x70,0x00,0x00,0x00,0x51,0x00,0x00,0x00, -0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x72,0x00,0x00,0x00, -0x71,0x00,0x00,0x00,0xc2,0x00,0x05,0x00,0x09,0x00,0x00,0x00, -0x77,0x00,0x00,0x00,0x68,0x00,0x00,0x00,0x75,0x00,0x00,0x00, -0xc7,0x00,0x05,0x00,0x09,0x00,0x00,0x00,0x78,0x00,0x00,0x00, -0x77,0x00,0x00,0x00,0x51,0x00,0x00,0x00,0x7c,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x79,0x00,0x00,0x00,0x78,0x00,0x00,0x00, -0x41,0x00,0x08,0x00,0x7f,0x00,0x00,0x00,0x80,0x00,0x00,0x00, -0x57,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0x40,0x00,0x00,0x00, -0x37,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x50,0x00,0x00,0x00,0x81,0x00,0x00,0x00,0x80,0x00,0x00,0x00, -0x71,0x00,0x04,0x00,0x09,0x00,0x00,0x00,0x86,0x00,0x00,0x00, -0x81,0x00,0x00,0x00,0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x87,0x00,0x00,0x00,0x86,0x00,0x00,0x00,0xc7,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x89,0x00,0x00,0x00,0x87,0x00,0x00,0x00, -0x88,0x00,0x00,0x00,0xc5,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x8c,0x00,0x00,0x00,0x89,0x00,0x00,0x00,0x72,0x00,0x00,0x00, -0x6f,0x00,0x04,0x00,0x4a,0x00,0x00,0x00,0x8d,0x00,0x00,0x00, -0x8c,0x00,0x00,0x00,0xc2,0x00,0x05,0x00,0x50,0x00,0x00,0x00, -0x8f,0x00,0x00,0x00,0x81,0x00,0x00,0x00,0x6f,0x00,0x00,0x00, -0x71,0x00,0x04,0x00,0x09,0x00,0x00,0x00,0x90,0x00,0x00,0x00, -0x8f,0x00,0x00,0x00,0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x91,0x00,0x00,0x00,0x90,0x00,0x00,0x00,0xc5,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x95,0x00,0x00,0x00,0x91,0x00,0x00,0x00, -0x79,0x00,0x00,0x00,0x6f,0x00,0x04,0x00,0x4a,0x00,0x00,0x00, -0x96,0x00,0x00,0x00,0x95,0x00,0x00,0x00,0x50,0x00,0x05,0x00, -0x82,0x00,0x00,0x00,0x97,0x00,0x00,0x00,0x8d,0x00,0x00,0x00, -0x96,0x00,0x00,0x00,0x83,0x00,0x05,0x00,0x82,0x00,0x00,0x00, -0x9b,0x00,0x00,0x00,0x97,0x00,0x00,0x00,0xcd,0x00,0x00,0x00, -0x8e,0x00,0x05,0x00,0x82,0x00,0x00,0x00,0x9d,0x00,0x00,0x00, -0x9b,0x00,0x00,0x00,0x5b,0x00,0x00,0x00,0x41,0x00,0x05,0x00, -0x18,0x00,0x00,0x00,0xa4,0x00,0x00,0x00,0x16,0x00,0x00,0x00, -0xa3,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0xa5,0x00,0x00,0x00,0xa4,0x00,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xa6,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0xa5,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xa9,0x00,0x00,0x00,0xa6,0x00,0x00,0x00,0x26,0x00,0x00,0x00, -0x51,0x00,0x05,0x00,0x4a,0x00,0x00,0x00,0xae,0x00,0x00,0x00, -0x9d,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x41,0x00,0x06,0x00, -0x59,0x00,0x00,0x00,0xaf,0x00,0x00,0x00,0xa1,0x00,0x00,0x00, -0x2e,0x00,0x00,0x00,0xa9,0x00,0x00,0x00,0x3e,0x00,0x03,0x00, -0xaf,0x00,0x00,0x00,0xae,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xb9,0x00,0x00,0x00,0xa9,0x00,0x00,0x00, -0x48,0x00,0x00,0x00,0x51,0x00,0x05,0x00,0x4a,0x00,0x00,0x00, -0xbb,0x00,0x00,0x00,0x9d,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x41,0x00,0x06,0x00,0x59,0x00,0x00,0x00,0xbc,0x00,0x00,0x00, -0xa1,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0xb9,0x00,0x00,0x00, -0x3e,0x00,0x03,0x00,0xbc,0x00,0x00,0x00,0xbb,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x4a,0x00,0x00,0x00,0xd4,0x00,0x00,0x00, -0x5a,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x4d,0x00,0x00,0x00, -0xd6,0x00,0x00,0x00,0x60,0x00,0x00,0x00,0x71,0x00,0x04,0x00, -0x09,0x00,0x00,0x00,0xd7,0x00,0x00,0x00,0xd6,0x00,0x00,0x00, -0xc4,0x00,0x05,0x00,0x09,0x00,0x00,0x00,0xd8,0x00,0x00,0x00, -0xd7,0x00,0x00,0x00,0x48,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x4d,0x00,0x00,0x00,0xda,0x00,0x00,0x00,0x65,0x00,0x00,0x00, -0x71,0x00,0x04,0x00,0x09,0x00,0x00,0x00,0xdb,0x00,0x00,0x00, -0xda,0x00,0x00,0x00,0xc5,0x00,0x05,0x00,0x09,0x00,0x00,0x00, -0xdc,0x00,0x00,0x00,0xd8,0x00,0x00,0x00,0xdb,0x00,0x00,0x00, -0xc2,0x00,0x05,0x00,0x09,0x00,0x00,0x00,0xdd,0x00,0x00,0x00, -0xdc,0x00,0x00,0x00,0x17,0x00,0x00,0x00,0xc4,0x00,0x05,0x00, -0x09,0x00,0x00,0x00,0xde,0x00,0x00,0x00,0xdd,0x00,0x00,0x00, -0x6f,0x00,0x00,0x00,0xc7,0x00,0x05,0x00,0x09,0x00,0x00,0x00, -0xdf,0x00,0x00,0x00,0xde,0x00,0x00,0x00,0x51,0x00,0x00,0x00, -0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0xe0,0x00,0x00,0x00, -0xdf,0x00,0x00,0x00,0xc2,0x00,0x05,0x00,0x09,0x00,0x00,0x00, -0xe2,0x00,0x00,0x00,0xdc,0x00,0x00,0x00,0x62,0x04,0x00,0x00, -0xc7,0x00,0x05,0x00,0x09,0x00,0x00,0x00,0xe3,0x00,0x00,0x00, -0xe2,0x00,0x00,0x00,0x51,0x00,0x00,0x00,0x7c,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0xe4,0x00,0x00,0x00,0xe3,0x00,0x00,0x00, -0x41,0x00,0x08,0x00,0x7f,0x00,0x00,0x00,0xe6,0x00,0x00,0x00, -0x57,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0x40,0x00,0x00,0x00, -0x37,0x00,0x00,0x00,0x17,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x50,0x00,0x00,0x00,0xe7,0x00,0x00,0x00,0xe6,0x00,0x00,0x00, -0x71,0x00,0x04,0x00,0x09,0x00,0x00,0x00,0xe8,0x00,0x00,0x00, -0xe7,0x00,0x00,0x00,0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0xe9,0x00,0x00,0x00,0xe8,0x00,0x00,0x00,0xc7,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xea,0x00,0x00,0x00,0xe9,0x00,0x00,0x00, -0x88,0x00,0x00,0x00,0xc5,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xec,0x00,0x00,0x00,0xea,0x00,0x00,0x00,0xe0,0x00,0x00,0x00, -0x6f,0x00,0x04,0x00,0x4a,0x00,0x00,0x00,0xed,0x00,0x00,0x00, -0xec,0x00,0x00,0x00,0xc2,0x00,0x05,0x00,0x50,0x00,0x00,0x00, -0xee,0x00,0x00,0x00,0xe7,0x00,0x00,0x00,0x6f,0x00,0x00,0x00, -0x71,0x00,0x04,0x00,0x09,0x00,0x00,0x00,0xef,0x00,0x00,0x00, -0xee,0x00,0x00,0x00,0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0xf0,0x00,0x00,0x00,0xef,0x00,0x00,0x00,0xc5,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xf2,0x00,0x00,0x00,0xf0,0x00,0x00,0x00, -0xe4,0x00,0x00,0x00,0x6f,0x00,0x04,0x00,0x4a,0x00,0x00,0x00, -0xf3,0x00,0x00,0x00,0xf2,0x00,0x00,0x00,0x50,0x00,0x05,0x00, -0x82,0x00,0x00,0x00,0xf4,0x00,0x00,0x00,0xed,0x00,0x00,0x00, -0xf3,0x00,0x00,0x00,0x83,0x00,0x05,0x00,0x82,0x00,0x00,0x00, -0xf5,0x00,0x00,0x00,0xf4,0x00,0x00,0x00,0xcd,0x00,0x00,0x00, -0x8e,0x00,0x05,0x00,0x82,0x00,0x00,0x00,0xf6,0x00,0x00,0x00, -0xf5,0x00,0x00,0x00,0xd4,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xfc,0x00,0x00,0x00,0xa9,0x00,0x00,0x00, -0x17,0x00,0x00,0x00,0x51,0x00,0x05,0x00,0x4a,0x00,0x00,0x00, -0xfe,0x00,0x00,0x00,0xf6,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x41,0x00,0x06,0x00,0x59,0x00,0x00,0x00,0xff,0x00,0x00,0x00, -0xa1,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0xfc,0x00,0x00,0x00, -0x3e,0x00,0x03,0x00,0xff,0x00,0x00,0x00,0xfe,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x06,0x01,0x00,0x00, -0xa9,0x00,0x00,0x00,0x63,0x04,0x00,0x00,0x51,0x00,0x05,0x00, -0x4a,0x00,0x00,0x00,0x07,0x01,0x00,0x00,0xf6,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x41,0x00,0x06,0x00,0x59,0x00,0x00,0x00, -0x08,0x01,0x00,0x00,0xa1,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0x06,0x01,0x00,0x00,0x3e,0x00,0x03,0x00,0x08,0x01,0x00,0x00, -0x07,0x01,0x00,0x00,0x3d,0x00,0x04,0x00,0x4a,0x00,0x00,0x00, -0x11,0x01,0x00,0x00,0x5a,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x4d,0x00,0x00,0x00,0x13,0x01,0x00,0x00,0x60,0x00,0x00,0x00, -0x71,0x00,0x04,0x00,0x09,0x00,0x00,0x00,0x14,0x01,0x00,0x00, -0x13,0x01,0x00,0x00,0xc4,0x00,0x05,0x00,0x09,0x00,0x00,0x00, -0x15,0x01,0x00,0x00,0x14,0x01,0x00,0x00,0x48,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x4d,0x00,0x00,0x00,0x17,0x01,0x00,0x00, -0x65,0x00,0x00,0x00,0x71,0x00,0x04,0x00,0x09,0x00,0x00,0x00, -0x18,0x01,0x00,0x00,0x17,0x01,0x00,0x00,0xc5,0x00,0x05,0x00, -0x09,0x00,0x00,0x00,0x19,0x01,0x00,0x00,0x15,0x01,0x00,0x00, -0x18,0x01,0x00,0x00,0xc2,0x00,0x05,0x00,0x09,0x00,0x00,0x00, -0x1a,0x01,0x00,0x00,0x19,0x01,0x00,0x00,0x37,0x00,0x00,0x00, -0xc4,0x00,0x05,0x00,0x09,0x00,0x00,0x00,0x1b,0x01,0x00,0x00, -0x1a,0x01,0x00,0x00,0x6f,0x00,0x00,0x00,0xc7,0x00,0x05,0x00, -0x09,0x00,0x00,0x00,0x1c,0x01,0x00,0x00,0x1b,0x01,0x00,0x00, -0x51,0x00,0x00,0x00,0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x1d,0x01,0x00,0x00,0x1c,0x01,0x00,0x00,0xc2,0x00,0x05,0x00, -0x09,0x00,0x00,0x00,0x1f,0x01,0x00,0x00,0x19,0x01,0x00,0x00, -0x64,0x04,0x00,0x00,0xc7,0x00,0x05,0x00,0x09,0x00,0x00,0x00, -0x20,0x01,0x00,0x00,0x1f,0x01,0x00,0x00,0x51,0x00,0x00,0x00, -0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x21,0x01,0x00,0x00, -0x20,0x01,0x00,0x00,0x41,0x00,0x08,0x00,0x7f,0x00,0x00,0x00, -0x23,0x01,0x00,0x00,0x57,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0x40,0x00,0x00,0x00,0x37,0x00,0x00,0x00,0x37,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x50,0x00,0x00,0x00,0x24,0x01,0x00,0x00, -0x23,0x01,0x00,0x00,0x71,0x00,0x04,0x00,0x09,0x00,0x00,0x00, -0x25,0x01,0x00,0x00,0x24,0x01,0x00,0x00,0x7c,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x26,0x01,0x00,0x00,0x25,0x01,0x00,0x00, -0xc7,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x27,0x01,0x00,0x00, -0x26,0x01,0x00,0x00,0x88,0x00,0x00,0x00,0xc5,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x29,0x01,0x00,0x00,0x27,0x01,0x00,0x00, -0x1d,0x01,0x00,0x00,0x6f,0x00,0x04,0x00,0x4a,0x00,0x00,0x00, -0x2a,0x01,0x00,0x00,0x29,0x01,0x00,0x00,0xc2,0x00,0x05,0x00, -0x50,0x00,0x00,0x00,0x2b,0x01,0x00,0x00,0x24,0x01,0x00,0x00, -0x6f,0x00,0x00,0x00,0x71,0x00,0x04,0x00,0x09,0x00,0x00,0x00, -0x2c,0x01,0x00,0x00,0x2b,0x01,0x00,0x00,0x7c,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x2d,0x01,0x00,0x00,0x2c,0x01,0x00,0x00, -0xc5,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x2f,0x01,0x00,0x00, -0x2d,0x01,0x00,0x00,0x21,0x01,0x00,0x00,0x6f,0x00,0x04,0x00, -0x4a,0x00,0x00,0x00,0x30,0x01,0x00,0x00,0x2f,0x01,0x00,0x00, -0x50,0x00,0x05,0x00,0x82,0x00,0x00,0x00,0x31,0x01,0x00,0x00, -0x2a,0x01,0x00,0x00,0x30,0x01,0x00,0x00,0x83,0x00,0x05,0x00, -0x82,0x00,0x00,0x00,0x32,0x01,0x00,0x00,0x31,0x01,0x00,0x00, -0xcd,0x00,0x00,0x00,0x8e,0x00,0x05,0x00,0x82,0x00,0x00,0x00, -0x33,0x01,0x00,0x00,0x32,0x01,0x00,0x00,0x11,0x01,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x39,0x01,0x00,0x00, -0xa9,0x00,0x00,0x00,0x37,0x00,0x00,0x00,0x51,0x00,0x05,0x00, -0x4a,0x00,0x00,0x00,0x3b,0x01,0x00,0x00,0x33,0x01,0x00,0x00, -0x00,0x00,0x00,0x00,0x41,0x00,0x06,0x00,0x59,0x00,0x00,0x00, -0x3c,0x01,0x00,0x00,0xa1,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0x39,0x01,0x00,0x00,0x3e,0x00,0x03,0x00,0x3c,0x01,0x00,0x00, -0x3b,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x43,0x01,0x00,0x00,0xa9,0x00,0x00,0x00,0x65,0x04,0x00,0x00, -0x51,0x00,0x05,0x00,0x4a,0x00,0x00,0x00,0x44,0x01,0x00,0x00, -0x33,0x01,0x00,0x00,0x01,0x00,0x00,0x00,0x41,0x00,0x06,0x00, -0x59,0x00,0x00,0x00,0x45,0x01,0x00,0x00,0xa1,0x00,0x00,0x00, -0x2e,0x00,0x00,0x00,0x43,0x01,0x00,0x00,0x3e,0x00,0x03,0x00, -0x45,0x01,0x00,0x00,0x44,0x01,0x00,0x00,0x3d,0x00,0x04,0x00, -0x4a,0x00,0x00,0x00,0x4e,0x01,0x00,0x00,0x5a,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x4d,0x00,0x00,0x00,0x50,0x01,0x00,0x00, -0x60,0x00,0x00,0x00,0x71,0x00,0x04,0x00,0x09,0x00,0x00,0x00, -0x51,0x01,0x00,0x00,0x50,0x01,0x00,0x00,0xc4,0x00,0x05,0x00, -0x09,0x00,0x00,0x00,0x52,0x01,0x00,0x00,0x51,0x01,0x00,0x00, -0x48,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x4d,0x00,0x00,0x00, -0x54,0x01,0x00,0x00,0x65,0x00,0x00,0x00,0x71,0x00,0x04,0x00, -0x09,0x00,0x00,0x00,0x55,0x01,0x00,0x00,0x54,0x01,0x00,0x00, -0xc5,0x00,0x05,0x00,0x09,0x00,0x00,0x00,0x56,0x01,0x00,0x00, -0x52,0x01,0x00,0x00,0x55,0x01,0x00,0x00,0xc2,0x00,0x05,0x00, -0x09,0x00,0x00,0x00,0x57,0x01,0x00,0x00,0x56,0x01,0x00,0x00, -0xa3,0x00,0x00,0x00,0xc4,0x00,0x05,0x00,0x09,0x00,0x00,0x00, -0x58,0x01,0x00,0x00,0x57,0x01,0x00,0x00,0x6f,0x00,0x00,0x00, -0xc7,0x00,0x05,0x00,0x09,0x00,0x00,0x00,0x59,0x01,0x00,0x00, -0x58,0x01,0x00,0x00,0x51,0x00,0x00,0x00,0x7c,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x5a,0x01,0x00,0x00,0x59,0x01,0x00,0x00, -0xc2,0x00,0x05,0x00,0x09,0x00,0x00,0x00,0x5c,0x01,0x00,0x00, -0x56,0x01,0x00,0x00,0x88,0x00,0x00,0x00,0xc7,0x00,0x05,0x00, -0x09,0x00,0x00,0x00,0x5d,0x01,0x00,0x00,0x5c,0x01,0x00,0x00, -0x51,0x00,0x00,0x00,0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x5e,0x01,0x00,0x00,0x5d,0x01,0x00,0x00,0x41,0x00,0x08,0x00, -0x7f,0x00,0x00,0x00,0x60,0x01,0x00,0x00,0x57,0x00,0x00,0x00, -0x2e,0x00,0x00,0x00,0x40,0x00,0x00,0x00,0x37,0x00,0x00,0x00, -0xa3,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x50,0x00,0x00,0x00, -0x61,0x01,0x00,0x00,0x60,0x01,0x00,0x00,0x71,0x00,0x04,0x00, -0x09,0x00,0x00,0x00,0x62,0x01,0x00,0x00,0x61,0x01,0x00,0x00, -0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x63,0x01,0x00,0x00, -0x62,0x01,0x00,0x00,0xc7,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x64,0x01,0x00,0x00,0x63,0x01,0x00,0x00,0x88,0x00,0x00,0x00, -0xc5,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x66,0x01,0x00,0x00, -0x64,0x01,0x00,0x00,0x5a,0x01,0x00,0x00,0x6f,0x00,0x04,0x00, -0x4a,0x00,0x00,0x00,0x67,0x01,0x00,0x00,0x66,0x01,0x00,0x00, -0xc2,0x00,0x05,0x00,0x50,0x00,0x00,0x00,0x68,0x01,0x00,0x00, -0x61,0x01,0x00,0x00,0x6f,0x00,0x00,0x00,0x71,0x00,0x04,0x00, -0x09,0x00,0x00,0x00,0x69,0x01,0x00,0x00,0x68,0x01,0x00,0x00, -0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x6a,0x01,0x00,0x00, -0x69,0x01,0x00,0x00,0xc5,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x6c,0x01,0x00,0x00,0x6a,0x01,0x00,0x00,0x5e,0x01,0x00,0x00, -0x6f,0x00,0x04,0x00,0x4a,0x00,0x00,0x00,0x6d,0x01,0x00,0x00, -0x6c,0x01,0x00,0x00,0x50,0x00,0x05,0x00,0x82,0x00,0x00,0x00, -0x6e,0x01,0x00,0x00,0x67,0x01,0x00,0x00,0x6d,0x01,0x00,0x00, -0x83,0x00,0x05,0x00,0x82,0x00,0x00,0x00,0x6f,0x01,0x00,0x00, -0x6e,0x01,0x00,0x00,0xcd,0x00,0x00,0x00,0x8e,0x00,0x05,0x00, -0x82,0x00,0x00,0x00,0x70,0x01,0x00,0x00,0x6f,0x01,0x00,0x00, -0x4e,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x76,0x01,0x00,0x00,0xa9,0x00,0x00,0x00,0xa3,0x00,0x00,0x00, -0x51,0x00,0x05,0x00,0x4a,0x00,0x00,0x00,0x78,0x01,0x00,0x00, -0x70,0x01,0x00,0x00,0x00,0x00,0x00,0x00,0x41,0x00,0x06,0x00, -0x59,0x00,0x00,0x00,0x79,0x01,0x00,0x00,0xa1,0x00,0x00,0x00, -0x2e,0x00,0x00,0x00,0x76,0x01,0x00,0x00,0x3e,0x00,0x03,0x00, -0x79,0x01,0x00,0x00,0x78,0x01,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x80,0x01,0x00,0x00,0xa9,0x00,0x00,0x00, -0x66,0x04,0x00,0x00,0x51,0x00,0x05,0x00,0x4a,0x00,0x00,0x00, -0x81,0x01,0x00,0x00,0x70,0x01,0x00,0x00,0x01,0x00,0x00,0x00, -0x41,0x00,0x06,0x00,0x59,0x00,0x00,0x00,0x82,0x01,0x00,0x00, -0xa1,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0x80,0x01,0x00,0x00, -0x3e,0x00,0x03,0x00,0x82,0x01,0x00,0x00,0x81,0x01,0x00,0x00, -0x3d,0x00,0x04,0x00,0x4a,0x00,0x00,0x00,0x8b,0x01,0x00,0x00, -0x5a,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x4d,0x00,0x00,0x00, -0x8d,0x01,0x00,0x00,0x60,0x00,0x00,0x00,0x71,0x00,0x04,0x00, -0x09,0x00,0x00,0x00,0x8e,0x01,0x00,0x00,0x8d,0x01,0x00,0x00, -0xc4,0x00,0x05,0x00,0x09,0x00,0x00,0x00,0x8f,0x01,0x00,0x00, -0x8e,0x01,0x00,0x00,0x48,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x4d,0x00,0x00,0x00,0x91,0x01,0x00,0x00,0x65,0x00,0x00,0x00, -0x71,0x00,0x04,0x00,0x09,0x00,0x00,0x00,0x92,0x01,0x00,0x00, -0x91,0x01,0x00,0x00,0xc5,0x00,0x05,0x00,0x09,0x00,0x00,0x00, -0x93,0x01,0x00,0x00,0x8f,0x01,0x00,0x00,0x92,0x01,0x00,0x00, -0xc2,0x00,0x05,0x00,0x09,0x00,0x00,0x00,0x94,0x01,0x00,0x00, -0x93,0x01,0x00,0x00,0x6f,0x00,0x00,0x00,0xc4,0x00,0x05,0x00, -0x09,0x00,0x00,0x00,0x95,0x01,0x00,0x00,0x94,0x01,0x00,0x00, -0x6f,0x00,0x00,0x00,0xc7,0x00,0x05,0x00,0x09,0x00,0x00,0x00, -0x96,0x01,0x00,0x00,0x95,0x01,0x00,0x00,0x51,0x00,0x00,0x00, -0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x97,0x01,0x00,0x00, -0x96,0x01,0x00,0x00,0xc2,0x00,0x05,0x00,0x09,0x00,0x00,0x00, -0x99,0x01,0x00,0x00,0x93,0x01,0x00,0x00,0x48,0x00,0x00,0x00, -0xc7,0x00,0x05,0x00,0x09,0x00,0x00,0x00,0x9a,0x01,0x00,0x00, -0x99,0x01,0x00,0x00,0x51,0x00,0x00,0x00,0x7c,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x9b,0x01,0x00,0x00,0x9a,0x01,0x00,0x00, -0x41,0x00,0x08,0x00,0x7f,0x00,0x00,0x00,0x9d,0x01,0x00,0x00, -0x57,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0x40,0x00,0x00,0x00, -0x37,0x00,0x00,0x00,0x6f,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x50,0x00,0x00,0x00,0x9e,0x01,0x00,0x00,0x9d,0x01,0x00,0x00, -0x71,0x00,0x04,0x00,0x09,0x00,0x00,0x00,0x9f,0x01,0x00,0x00, -0x9e,0x01,0x00,0x00,0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0xa0,0x01,0x00,0x00,0x9f,0x01,0x00,0x00,0xc7,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xa1,0x01,0x00,0x00,0xa0,0x01,0x00,0x00, -0x88,0x00,0x00,0x00,0xc5,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xa3,0x01,0x00,0x00,0xa1,0x01,0x00,0x00,0x97,0x01,0x00,0x00, -0x6f,0x00,0x04,0x00,0x4a,0x00,0x00,0x00,0xa4,0x01,0x00,0x00, -0xa3,0x01,0x00,0x00,0xc2,0x00,0x05,0x00,0x50,0x00,0x00,0x00, -0xa5,0x01,0x00,0x00,0x9e,0x01,0x00,0x00,0x6f,0x00,0x00,0x00, -0x71,0x00,0x04,0x00,0x09,0x00,0x00,0x00,0xa6,0x01,0x00,0x00, -0xa5,0x01,0x00,0x00,0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0xa7,0x01,0x00,0x00,0xa6,0x01,0x00,0x00,0xc5,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xa9,0x01,0x00,0x00,0xa7,0x01,0x00,0x00, -0x9b,0x01,0x00,0x00,0x6f,0x00,0x04,0x00,0x4a,0x00,0x00,0x00, -0xaa,0x01,0x00,0x00,0xa9,0x01,0x00,0x00,0x50,0x00,0x05,0x00, -0x82,0x00,0x00,0x00,0xab,0x01,0x00,0x00,0xa4,0x01,0x00,0x00, -0xaa,0x01,0x00,0x00,0x83,0x00,0x05,0x00,0x82,0x00,0x00,0x00, -0xac,0x01,0x00,0x00,0xab,0x01,0x00,0x00,0xcd,0x00,0x00,0x00, -0x8e,0x00,0x05,0x00,0x82,0x00,0x00,0x00,0xad,0x01,0x00,0x00, -0xac,0x01,0x00,0x00,0x8b,0x01,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xb3,0x01,0x00,0x00,0xa9,0x00,0x00,0x00, -0x6f,0x00,0x00,0x00,0x51,0x00,0x05,0x00,0x4a,0x00,0x00,0x00, -0xb5,0x01,0x00,0x00,0xad,0x01,0x00,0x00,0x00,0x00,0x00,0x00, -0x41,0x00,0x06,0x00,0x59,0x00,0x00,0x00,0xb6,0x01,0x00,0x00, -0xa1,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0xb3,0x01,0x00,0x00, -0x3e,0x00,0x03,0x00,0xb6,0x01,0x00,0x00,0xb5,0x01,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xbd,0x01,0x00,0x00, -0xa9,0x00,0x00,0x00,0x67,0x04,0x00,0x00,0x51,0x00,0x05,0x00, -0x4a,0x00,0x00,0x00,0xbe,0x01,0x00,0x00,0xad,0x01,0x00,0x00, -0x01,0x00,0x00,0x00,0x41,0x00,0x06,0x00,0x59,0x00,0x00,0x00, -0xbf,0x01,0x00,0x00,0xa1,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0xbd,0x01,0x00,0x00,0x3e,0x00,0x03,0x00,0xbf,0x01,0x00,0x00, -0xbe,0x01,0x00,0x00,0x3d,0x00,0x04,0x00,0x4a,0x00,0x00,0x00, -0xc8,0x01,0x00,0x00,0x5a,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x4d,0x00,0x00,0x00,0xca,0x01,0x00,0x00,0x60,0x00,0x00,0x00, -0x71,0x00,0x04,0x00,0x09,0x00,0x00,0x00,0xcb,0x01,0x00,0x00, -0xca,0x01,0x00,0x00,0xc4,0x00,0x05,0x00,0x09,0x00,0x00,0x00, -0xcc,0x01,0x00,0x00,0xcb,0x01,0x00,0x00,0x48,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x4d,0x00,0x00,0x00,0xce,0x01,0x00,0x00, -0x65,0x00,0x00,0x00,0x71,0x00,0x04,0x00,0x09,0x00,0x00,0x00, -0xcf,0x01,0x00,0x00,0xce,0x01,0x00,0x00,0xc5,0x00,0x05,0x00, -0x09,0x00,0x00,0x00,0xd0,0x01,0x00,0x00,0xcc,0x01,0x00,0x00, -0xcf,0x01,0x00,0x00,0xc2,0x00,0x05,0x00,0x09,0x00,0x00,0x00, -0xd1,0x01,0x00,0x00,0xd0,0x01,0x00,0x00,0x68,0x04,0x00,0x00, -0xc4,0x00,0x05,0x00,0x09,0x00,0x00,0x00,0xd2,0x01,0x00,0x00, -0xd1,0x01,0x00,0x00,0x6f,0x00,0x00,0x00,0xc7,0x00,0x05,0x00, -0x09,0x00,0x00,0x00,0xd3,0x01,0x00,0x00,0xd2,0x01,0x00,0x00, -0x51,0x00,0x00,0x00,0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0xd4,0x01,0x00,0x00,0xd3,0x01,0x00,0x00,0xc2,0x00,0x05,0x00, -0x09,0x00,0x00,0x00,0xd6,0x01,0x00,0x00,0xd0,0x01,0x00,0x00, -0x63,0x04,0x00,0x00,0xc7,0x00,0x05,0x00,0x09,0x00,0x00,0x00, -0xd7,0x01,0x00,0x00,0xd6,0x01,0x00,0x00,0x51,0x00,0x00,0x00, -0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0xd8,0x01,0x00,0x00, -0xd7,0x01,0x00,0x00,0x41,0x00,0x08,0x00,0x7f,0x00,0x00,0x00, -0xda,0x01,0x00,0x00,0x57,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0x40,0x00,0x00,0x00,0x37,0x00,0x00,0x00,0x68,0x04,0x00,0x00, -0x3d,0x00,0x04,0x00,0x50,0x00,0x00,0x00,0xdb,0x01,0x00,0x00, -0xda,0x01,0x00,0x00,0x71,0x00,0x04,0x00,0x09,0x00,0x00,0x00, -0xdc,0x01,0x00,0x00,0xdb,0x01,0x00,0x00,0x7c,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0xdd,0x01,0x00,0x00,0xdc,0x01,0x00,0x00, -0xc7,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xde,0x01,0x00,0x00, -0xdd,0x01,0x00,0x00,0x88,0x00,0x00,0x00,0xc5,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xe0,0x01,0x00,0x00,0xde,0x01,0x00,0x00, -0xd4,0x01,0x00,0x00,0x6f,0x00,0x04,0x00,0x4a,0x00,0x00,0x00, -0xe1,0x01,0x00,0x00,0xe0,0x01,0x00,0x00,0xc2,0x00,0x05,0x00, -0x50,0x00,0x00,0x00,0xe2,0x01,0x00,0x00,0xdb,0x01,0x00,0x00, -0x6f,0x00,0x00,0x00,0x71,0x00,0x04,0x00,0x09,0x00,0x00,0x00, -0xe3,0x01,0x00,0x00,0xe2,0x01,0x00,0x00,0x7c,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0xe4,0x01,0x00,0x00,0xe3,0x01,0x00,0x00, -0xc5,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xe6,0x01,0x00,0x00, -0xe4,0x01,0x00,0x00,0xd8,0x01,0x00,0x00,0x6f,0x00,0x04,0x00, -0x4a,0x00,0x00,0x00,0xe7,0x01,0x00,0x00,0xe6,0x01,0x00,0x00, -0x50,0x00,0x05,0x00,0x82,0x00,0x00,0x00,0xe8,0x01,0x00,0x00, -0xe1,0x01,0x00,0x00,0xe7,0x01,0x00,0x00,0x83,0x00,0x05,0x00, -0x82,0x00,0x00,0x00,0xe9,0x01,0x00,0x00,0xe8,0x01,0x00,0x00, -0xcd,0x00,0x00,0x00,0x8e,0x00,0x05,0x00,0x82,0x00,0x00,0x00, -0xea,0x01,0x00,0x00,0xe9,0x01,0x00,0x00,0xc8,0x01,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xf0,0x01,0x00,0x00, -0xa9,0x00,0x00,0x00,0x68,0x04,0x00,0x00,0x51,0x00,0x05,0x00, -0x4a,0x00,0x00,0x00,0xf2,0x01,0x00,0x00,0xea,0x01,0x00,0x00, -0x00,0x00,0x00,0x00,0x41,0x00,0x06,0x00,0x59,0x00,0x00,0x00, -0xf3,0x01,0x00,0x00,0xa1,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0xf0,0x01,0x00,0x00,0x3e,0x00,0x03,0x00,0xf3,0x01,0x00,0x00, -0xf2,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xfa,0x01,0x00,0x00,0xa9,0x00,0x00,0x00,0x69,0x04,0x00,0x00, -0x51,0x00,0x05,0x00,0x4a,0x00,0x00,0x00,0xfb,0x01,0x00,0x00, -0xea,0x01,0x00,0x00,0x01,0x00,0x00,0x00,0x41,0x00,0x06,0x00, -0x59,0x00,0x00,0x00,0xfc,0x01,0x00,0x00,0xa1,0x00,0x00,0x00, -0x2e,0x00,0x00,0x00,0xfa,0x01,0x00,0x00,0x3e,0x00,0x03,0x00, -0xfc,0x01,0x00,0x00,0xfb,0x01,0x00,0x00,0x3d,0x00,0x04,0x00, -0x4a,0x00,0x00,0x00,0x05,0x02,0x00,0x00,0x5a,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x4d,0x00,0x00,0x00,0x07,0x02,0x00,0x00, -0x60,0x00,0x00,0x00,0x71,0x00,0x04,0x00,0x09,0x00,0x00,0x00, -0x08,0x02,0x00,0x00,0x07,0x02,0x00,0x00,0xc4,0x00,0x05,0x00, -0x09,0x00,0x00,0x00,0x09,0x02,0x00,0x00,0x08,0x02,0x00,0x00, -0x48,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x4d,0x00,0x00,0x00, -0x0b,0x02,0x00,0x00,0x65,0x00,0x00,0x00,0x71,0x00,0x04,0x00, -0x09,0x00,0x00,0x00,0x0c,0x02,0x00,0x00,0x0b,0x02,0x00,0x00, -0xc5,0x00,0x05,0x00,0x09,0x00,0x00,0x00,0x0d,0x02,0x00,0x00, -0x09,0x02,0x00,0x00,0x0c,0x02,0x00,0x00,0xc2,0x00,0x05,0x00, -0x09,0x00,0x00,0x00,0x0e,0x02,0x00,0x00,0x0d,0x02,0x00,0x00, -0x6a,0x04,0x00,0x00,0xc4,0x00,0x05,0x00,0x09,0x00,0x00,0x00, -0x0f,0x02,0x00,0x00,0x0e,0x02,0x00,0x00,0x6f,0x00,0x00,0x00, -0xc7,0x00,0x05,0x00,0x09,0x00,0x00,0x00,0x10,0x02,0x00,0x00, -0x0f,0x02,0x00,0x00,0x51,0x00,0x00,0x00,0x7c,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x11,0x02,0x00,0x00,0x10,0x02,0x00,0x00, -0xc2,0x00,0x05,0x00,0x09,0x00,0x00,0x00,0x13,0x02,0x00,0x00, -0x0d,0x02,0x00,0x00,0x65,0x04,0x00,0x00,0xc7,0x00,0x05,0x00, -0x09,0x00,0x00,0x00,0x14,0x02,0x00,0x00,0x13,0x02,0x00,0x00, -0x51,0x00,0x00,0x00,0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x15,0x02,0x00,0x00,0x14,0x02,0x00,0x00,0x41,0x00,0x08,0x00, -0x7f,0x00,0x00,0x00,0x17,0x02,0x00,0x00,0x57,0x00,0x00,0x00, -0x2e,0x00,0x00,0x00,0x40,0x00,0x00,0x00,0x37,0x00,0x00,0x00, -0x6a,0x04,0x00,0x00,0x3d,0x00,0x04,0x00,0x50,0x00,0x00,0x00, -0x18,0x02,0x00,0x00,0x17,0x02,0x00,0x00,0x71,0x00,0x04,0x00, -0x09,0x00,0x00,0x00,0x19,0x02,0x00,0x00,0x18,0x02,0x00,0x00, -0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x1a,0x02,0x00,0x00, -0x19,0x02,0x00,0x00,0xc7,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x1b,0x02,0x00,0x00,0x1a,0x02,0x00,0x00,0x88,0x00,0x00,0x00, -0xc5,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x1d,0x02,0x00,0x00, -0x1b,0x02,0x00,0x00,0x11,0x02,0x00,0x00,0x6f,0x00,0x04,0x00, -0x4a,0x00,0x00,0x00,0x1e,0x02,0x00,0x00,0x1d,0x02,0x00,0x00, -0xc2,0x00,0x05,0x00,0x50,0x00,0x00,0x00,0x1f,0x02,0x00,0x00, -0x18,0x02,0x00,0x00,0x6f,0x00,0x00,0x00,0x71,0x00,0x04,0x00, -0x09,0x00,0x00,0x00,0x20,0x02,0x00,0x00,0x1f,0x02,0x00,0x00, -0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x21,0x02,0x00,0x00, -0x20,0x02,0x00,0x00,0xc5,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x23,0x02,0x00,0x00,0x21,0x02,0x00,0x00,0x15,0x02,0x00,0x00, -0x6f,0x00,0x04,0x00,0x4a,0x00,0x00,0x00,0x24,0x02,0x00,0x00, -0x23,0x02,0x00,0x00,0x50,0x00,0x05,0x00,0x82,0x00,0x00,0x00, -0x25,0x02,0x00,0x00,0x1e,0x02,0x00,0x00,0x24,0x02,0x00,0x00, -0x83,0x00,0x05,0x00,0x82,0x00,0x00,0x00,0x26,0x02,0x00,0x00, -0x25,0x02,0x00,0x00,0xcd,0x00,0x00,0x00,0x8e,0x00,0x05,0x00, -0x82,0x00,0x00,0x00,0x27,0x02,0x00,0x00,0x26,0x02,0x00,0x00, -0x05,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x2d,0x02,0x00,0x00,0xa9,0x00,0x00,0x00,0x6a,0x04,0x00,0x00, -0x51,0x00,0x05,0x00,0x4a,0x00,0x00,0x00,0x2f,0x02,0x00,0x00, -0x27,0x02,0x00,0x00,0x00,0x00,0x00,0x00,0x41,0x00,0x06,0x00, -0x59,0x00,0x00,0x00,0x30,0x02,0x00,0x00,0xa1,0x00,0x00,0x00, -0x2e,0x00,0x00,0x00,0x2d,0x02,0x00,0x00,0x3e,0x00,0x03,0x00, -0x30,0x02,0x00,0x00,0x2f,0x02,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x37,0x02,0x00,0x00,0xa9,0x00,0x00,0x00, -0x6b,0x04,0x00,0x00,0x51,0x00,0x05,0x00,0x4a,0x00,0x00,0x00, -0x38,0x02,0x00,0x00,0x27,0x02,0x00,0x00,0x01,0x00,0x00,0x00, -0x41,0x00,0x06,0x00,0x59,0x00,0x00,0x00,0x39,0x02,0x00,0x00, -0xa1,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0x37,0x02,0x00,0x00, -0x3e,0x00,0x03,0x00,0x39,0x02,0x00,0x00,0x38,0x02,0x00,0x00, -0x3d,0x00,0x04,0x00,0x4a,0x00,0x00,0x00,0x42,0x02,0x00,0x00, -0x5a,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x4d,0x00,0x00,0x00, -0x44,0x02,0x00,0x00,0x60,0x00,0x00,0x00,0x71,0x00,0x04,0x00, -0x09,0x00,0x00,0x00,0x45,0x02,0x00,0x00,0x44,0x02,0x00,0x00, -0xc4,0x00,0x05,0x00,0x09,0x00,0x00,0x00,0x46,0x02,0x00,0x00, -0x45,0x02,0x00,0x00,0x48,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x4d,0x00,0x00,0x00,0x48,0x02,0x00,0x00,0x65,0x00,0x00,0x00, -0x71,0x00,0x04,0x00,0x09,0x00,0x00,0x00,0x49,0x02,0x00,0x00, -0x48,0x02,0x00,0x00,0xc5,0x00,0x05,0x00,0x09,0x00,0x00,0x00, -0x4a,0x02,0x00,0x00,0x46,0x02,0x00,0x00,0x49,0x02,0x00,0x00, -0xc2,0x00,0x05,0x00,0x09,0x00,0x00,0x00,0x4b,0x02,0x00,0x00, -0x4a,0x02,0x00,0x00,0x6c,0x04,0x00,0x00,0xc4,0x00,0x05,0x00, -0x09,0x00,0x00,0x00,0x4c,0x02,0x00,0x00,0x4b,0x02,0x00,0x00, -0x6f,0x00,0x00,0x00,0xc7,0x00,0x05,0x00,0x09,0x00,0x00,0x00, -0x4d,0x02,0x00,0x00,0x4c,0x02,0x00,0x00,0x51,0x00,0x00,0x00, -0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x4e,0x02,0x00,0x00, -0x4d,0x02,0x00,0x00,0xc2,0x00,0x05,0x00,0x09,0x00,0x00,0x00, -0x50,0x02,0x00,0x00,0x4a,0x02,0x00,0x00,0x66,0x04,0x00,0x00, -0xc7,0x00,0x05,0x00,0x09,0x00,0x00,0x00,0x51,0x02,0x00,0x00, -0x50,0x02,0x00,0x00,0x51,0x00,0x00,0x00,0x7c,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x52,0x02,0x00,0x00,0x51,0x02,0x00,0x00, -0x41,0x00,0x08,0x00,0x7f,0x00,0x00,0x00,0x54,0x02,0x00,0x00, -0x57,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0x40,0x00,0x00,0x00, -0x37,0x00,0x00,0x00,0x6c,0x04,0x00,0x00,0x3d,0x00,0x04,0x00, -0x50,0x00,0x00,0x00,0x55,0x02,0x00,0x00,0x54,0x02,0x00,0x00, -0x71,0x00,0x04,0x00,0x09,0x00,0x00,0x00,0x56,0x02,0x00,0x00, -0x55,0x02,0x00,0x00,0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x57,0x02,0x00,0x00,0x56,0x02,0x00,0x00,0xc7,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x58,0x02,0x00,0x00,0x57,0x02,0x00,0x00, -0x88,0x00,0x00,0x00,0xc5,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x5a,0x02,0x00,0x00,0x58,0x02,0x00,0x00,0x4e,0x02,0x00,0x00, -0x6f,0x00,0x04,0x00,0x4a,0x00,0x00,0x00,0x5b,0x02,0x00,0x00, -0x5a,0x02,0x00,0x00,0xc2,0x00,0x05,0x00,0x50,0x00,0x00,0x00, -0x5c,0x02,0x00,0x00,0x55,0x02,0x00,0x00,0x6f,0x00,0x00,0x00, -0x71,0x00,0x04,0x00,0x09,0x00,0x00,0x00,0x5d,0x02,0x00,0x00, -0x5c,0x02,0x00,0x00,0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x5e,0x02,0x00,0x00,0x5d,0x02,0x00,0x00,0xc5,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x60,0x02,0x00,0x00,0x5e,0x02,0x00,0x00, -0x52,0x02,0x00,0x00,0x6f,0x00,0x04,0x00,0x4a,0x00,0x00,0x00, -0x61,0x02,0x00,0x00,0x60,0x02,0x00,0x00,0x50,0x00,0x05,0x00, -0x82,0x00,0x00,0x00,0x62,0x02,0x00,0x00,0x5b,0x02,0x00,0x00, -0x61,0x02,0x00,0x00,0x83,0x00,0x05,0x00,0x82,0x00,0x00,0x00, -0x63,0x02,0x00,0x00,0x62,0x02,0x00,0x00,0xcd,0x00,0x00,0x00, -0x8e,0x00,0x05,0x00,0x82,0x00,0x00,0x00,0x64,0x02,0x00,0x00, -0x63,0x02,0x00,0x00,0x42,0x02,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x6a,0x02,0x00,0x00,0xa9,0x00,0x00,0x00, -0x6c,0x04,0x00,0x00,0x51,0x00,0x05,0x00,0x4a,0x00,0x00,0x00, -0x6c,0x02,0x00,0x00,0x64,0x02,0x00,0x00,0x00,0x00,0x00,0x00, -0x41,0x00,0x06,0x00,0x59,0x00,0x00,0x00,0x6d,0x02,0x00,0x00, -0xa1,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0x6a,0x02,0x00,0x00, -0x3e,0x00,0x03,0x00,0x6d,0x02,0x00,0x00,0x6c,0x02,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x74,0x02,0x00,0x00, -0xa9,0x00,0x00,0x00,0x6d,0x04,0x00,0x00,0x51,0x00,0x05,0x00, -0x4a,0x00,0x00,0x00,0x75,0x02,0x00,0x00,0x64,0x02,0x00,0x00, -0x01,0x00,0x00,0x00,0x41,0x00,0x06,0x00,0x59,0x00,0x00,0x00, -0x76,0x02,0x00,0x00,0xa1,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0x74,0x02,0x00,0x00,0x3e,0x00,0x03,0x00,0x76,0x02,0x00,0x00, -0x75,0x02,0x00,0x00,0x3d,0x00,0x04,0x00,0x4a,0x00,0x00,0x00, -0x7f,0x02,0x00,0x00,0x5a,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x4d,0x00,0x00,0x00,0x81,0x02,0x00,0x00,0x60,0x00,0x00,0x00, -0x71,0x00,0x04,0x00,0x09,0x00,0x00,0x00,0x82,0x02,0x00,0x00, -0x81,0x02,0x00,0x00,0xc4,0x00,0x05,0x00,0x09,0x00,0x00,0x00, -0x83,0x02,0x00,0x00,0x82,0x02,0x00,0x00,0x48,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x4d,0x00,0x00,0x00,0x85,0x02,0x00,0x00, -0x65,0x00,0x00,0x00,0x71,0x00,0x04,0x00,0x09,0x00,0x00,0x00, -0x86,0x02,0x00,0x00,0x85,0x02,0x00,0x00,0xc5,0x00,0x05,0x00, -0x09,0x00,0x00,0x00,0x87,0x02,0x00,0x00,0x83,0x02,0x00,0x00, -0x86,0x02,0x00,0x00,0xc2,0x00,0x05,0x00,0x09,0x00,0x00,0x00, -0x88,0x02,0x00,0x00,0x87,0x02,0x00,0x00,0x6e,0x04,0x00,0x00, -0xc4,0x00,0x05,0x00,0x09,0x00,0x00,0x00,0x89,0x02,0x00,0x00, -0x88,0x02,0x00,0x00,0x6f,0x00,0x00,0x00,0xc7,0x00,0x05,0x00, -0x09,0x00,0x00,0x00,0x8a,0x02,0x00,0x00,0x89,0x02,0x00,0x00, -0x51,0x00,0x00,0x00,0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x8b,0x02,0x00,0x00,0x8a,0x02,0x00,0x00,0xc2,0x00,0x05,0x00, -0x09,0x00,0x00,0x00,0x8d,0x02,0x00,0x00,0x87,0x02,0x00,0x00, -0x67,0x04,0x00,0x00,0xc7,0x00,0x05,0x00,0x09,0x00,0x00,0x00, -0x8e,0x02,0x00,0x00,0x8d,0x02,0x00,0x00,0x51,0x00,0x00,0x00, -0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x8f,0x02,0x00,0x00, -0x8e,0x02,0x00,0x00,0x41,0x00,0x08,0x00,0x7f,0x00,0x00,0x00, -0x91,0x02,0x00,0x00,0x57,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0x40,0x00,0x00,0x00,0x37,0x00,0x00,0x00,0x6e,0x04,0x00,0x00, -0x3d,0x00,0x04,0x00,0x50,0x00,0x00,0x00,0x92,0x02,0x00,0x00, -0x91,0x02,0x00,0x00,0x71,0x00,0x04,0x00,0x09,0x00,0x00,0x00, -0x93,0x02,0x00,0x00,0x92,0x02,0x00,0x00,0x7c,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x94,0x02,0x00,0x00,0x93,0x02,0x00,0x00, -0xc7,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x95,0x02,0x00,0x00, -0x94,0x02,0x00,0x00,0x88,0x00,0x00,0x00,0xc5,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x97,0x02,0x00,0x00,0x95,0x02,0x00,0x00, -0x8b,0x02,0x00,0x00,0x6f,0x00,0x04,0x00,0x4a,0x00,0x00,0x00, -0x98,0x02,0x00,0x00,0x97,0x02,0x00,0x00,0xc2,0x00,0x05,0x00, -0x50,0x00,0x00,0x00,0x99,0x02,0x00,0x00,0x92,0x02,0x00,0x00, -0x6f,0x00,0x00,0x00,0x71,0x00,0x04,0x00,0x09,0x00,0x00,0x00, -0x9a,0x02,0x00,0x00,0x99,0x02,0x00,0x00,0x7c,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x9b,0x02,0x00,0x00,0x9a,0x02,0x00,0x00, -0xc5,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x9d,0x02,0x00,0x00, -0x9b,0x02,0x00,0x00,0x8f,0x02,0x00,0x00,0x6f,0x00,0x04,0x00, -0x4a,0x00,0x00,0x00,0x9e,0x02,0x00,0x00,0x9d,0x02,0x00,0x00, -0x50,0x00,0x05,0x00,0x82,0x00,0x00,0x00,0x9f,0x02,0x00,0x00, -0x98,0x02,0x00,0x00,0x9e,0x02,0x00,0x00,0x83,0x00,0x05,0x00, -0x82,0x00,0x00,0x00,0xa0,0x02,0x00,0x00,0x9f,0x02,0x00,0x00, -0xcd,0x00,0x00,0x00,0x8e,0x00,0x05,0x00,0x82,0x00,0x00,0x00, -0xa1,0x02,0x00,0x00,0xa0,0x02,0x00,0x00,0x7f,0x02,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xa7,0x02,0x00,0x00, -0xa9,0x00,0x00,0x00,0x6e,0x04,0x00,0x00,0x51,0x00,0x05,0x00, -0x4a,0x00,0x00,0x00,0xa9,0x02,0x00,0x00,0xa1,0x02,0x00,0x00, -0x00,0x00,0x00,0x00,0x41,0x00,0x06,0x00,0x59,0x00,0x00,0x00, -0xaa,0x02,0x00,0x00,0xa1,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0xa7,0x02,0x00,0x00,0x3e,0x00,0x03,0x00,0xaa,0x02,0x00,0x00, -0xa9,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xb1,0x02,0x00,0x00,0xa9,0x00,0x00,0x00,0x6f,0x04,0x00,0x00, -0x51,0x00,0x05,0x00,0x4a,0x00,0x00,0x00,0xb2,0x02,0x00,0x00, -0xa1,0x02,0x00,0x00,0x01,0x00,0x00,0x00,0x41,0x00,0x06,0x00, -0x59,0x00,0x00,0x00,0xb3,0x02,0x00,0x00,0xa1,0x00,0x00,0x00, -0x2e,0x00,0x00,0x00,0xb1,0x02,0x00,0x00,0x3e,0x00,0x03,0x00, -0xb3,0x02,0x00,0x00,0xb2,0x02,0x00,0x00,0x3d,0x00,0x04,0x00, -0x4a,0x00,0x00,0x00,0xbc,0x02,0x00,0x00,0x5a,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x4d,0x00,0x00,0x00,0xbe,0x02,0x00,0x00, -0x60,0x00,0x00,0x00,0x71,0x00,0x04,0x00,0x09,0x00,0x00,0x00, -0xbf,0x02,0x00,0x00,0xbe,0x02,0x00,0x00,0xc4,0x00,0x05,0x00, -0x09,0x00,0x00,0x00,0xc0,0x02,0x00,0x00,0xbf,0x02,0x00,0x00, -0x48,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x4d,0x00,0x00,0x00, -0xc2,0x02,0x00,0x00,0x65,0x00,0x00,0x00,0x71,0x00,0x04,0x00, -0x09,0x00,0x00,0x00,0xc3,0x02,0x00,0x00,0xc2,0x02,0x00,0x00, -0xc5,0x00,0x05,0x00,0x09,0x00,0x00,0x00,0xc4,0x02,0x00,0x00, -0xc0,0x02,0x00,0x00,0xc3,0x02,0x00,0x00,0xc2,0x00,0x05,0x00, -0x09,0x00,0x00,0x00,0xc5,0x02,0x00,0x00,0xc4,0x02,0x00,0x00, -0x70,0x04,0x00,0x00,0xc4,0x00,0x05,0x00,0x09,0x00,0x00,0x00, -0xc6,0x02,0x00,0x00,0xc5,0x02,0x00,0x00,0x6f,0x00,0x00,0x00, -0xc7,0x00,0x05,0x00,0x09,0x00,0x00,0x00,0xc7,0x02,0x00,0x00, -0xc6,0x02,0x00,0x00,0x51,0x00,0x00,0x00,0x7c,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0xc8,0x02,0x00,0x00,0xc7,0x02,0x00,0x00, -0xc2,0x00,0x05,0x00,0x09,0x00,0x00,0x00,0xca,0x02,0x00,0x00, -0xc4,0x02,0x00,0x00,0x69,0x04,0x00,0x00,0xc7,0x00,0x05,0x00, -0x09,0x00,0x00,0x00,0xcb,0x02,0x00,0x00,0xca,0x02,0x00,0x00, -0x51,0x00,0x00,0x00,0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0xcc,0x02,0x00,0x00,0xcb,0x02,0x00,0x00,0x41,0x00,0x08,0x00, -0x7f,0x00,0x00,0x00,0xce,0x02,0x00,0x00,0x57,0x00,0x00,0x00, -0x2e,0x00,0x00,0x00,0x40,0x00,0x00,0x00,0x37,0x00,0x00,0x00, -0x70,0x04,0x00,0x00,0x3d,0x00,0x04,0x00,0x50,0x00,0x00,0x00, -0xcf,0x02,0x00,0x00,0xce,0x02,0x00,0x00,0x71,0x00,0x04,0x00, -0x09,0x00,0x00,0x00,0xd0,0x02,0x00,0x00,0xcf,0x02,0x00,0x00, -0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0xd1,0x02,0x00,0x00, -0xd0,0x02,0x00,0x00,0xc7,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xd2,0x02,0x00,0x00,0xd1,0x02,0x00,0x00,0x88,0x00,0x00,0x00, -0xc5,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xd4,0x02,0x00,0x00, -0xd2,0x02,0x00,0x00,0xc8,0x02,0x00,0x00,0x6f,0x00,0x04,0x00, -0x4a,0x00,0x00,0x00,0xd5,0x02,0x00,0x00,0xd4,0x02,0x00,0x00, -0xc2,0x00,0x05,0x00,0x50,0x00,0x00,0x00,0xd6,0x02,0x00,0x00, -0xcf,0x02,0x00,0x00,0x6f,0x00,0x00,0x00,0x71,0x00,0x04,0x00, -0x09,0x00,0x00,0x00,0xd7,0x02,0x00,0x00,0xd6,0x02,0x00,0x00, -0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0xd8,0x02,0x00,0x00, -0xd7,0x02,0x00,0x00,0xc5,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xda,0x02,0x00,0x00,0xd8,0x02,0x00,0x00,0xcc,0x02,0x00,0x00, -0x6f,0x00,0x04,0x00,0x4a,0x00,0x00,0x00,0xdb,0x02,0x00,0x00, -0xda,0x02,0x00,0x00,0x50,0x00,0x05,0x00,0x82,0x00,0x00,0x00, -0xdc,0x02,0x00,0x00,0xd5,0x02,0x00,0x00,0xdb,0x02,0x00,0x00, -0x83,0x00,0x05,0x00,0x82,0x00,0x00,0x00,0xdd,0x02,0x00,0x00, -0xdc,0x02,0x00,0x00,0xcd,0x00,0x00,0x00,0x8e,0x00,0x05,0x00, -0x82,0x00,0x00,0x00,0xde,0x02,0x00,0x00,0xdd,0x02,0x00,0x00, -0xbc,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xe4,0x02,0x00,0x00,0xa9,0x00,0x00,0x00,0x70,0x04,0x00,0x00, -0x51,0x00,0x05,0x00,0x4a,0x00,0x00,0x00,0xe6,0x02,0x00,0x00, -0xde,0x02,0x00,0x00,0x00,0x00,0x00,0x00,0x41,0x00,0x06,0x00, -0x59,0x00,0x00,0x00,0xe7,0x02,0x00,0x00,0xa1,0x00,0x00,0x00, -0x2e,0x00,0x00,0x00,0xe4,0x02,0x00,0x00,0x3e,0x00,0x03,0x00, -0xe7,0x02,0x00,0x00,0xe6,0x02,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xee,0x02,0x00,0x00,0xa9,0x00,0x00,0x00, -0x71,0x04,0x00,0x00,0x51,0x00,0x05,0x00,0x4a,0x00,0x00,0x00, -0xef,0x02,0x00,0x00,0xde,0x02,0x00,0x00,0x01,0x00,0x00,0x00, -0x41,0x00,0x06,0x00,0x59,0x00,0x00,0x00,0xf0,0x02,0x00,0x00, -0xa1,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0xee,0x02,0x00,0x00, -0x3e,0x00,0x03,0x00,0xf0,0x02,0x00,0x00,0xef,0x02,0x00,0x00, -0x3d,0x00,0x04,0x00,0x4a,0x00,0x00,0x00,0xf9,0x02,0x00,0x00, -0x5a,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x4d,0x00,0x00,0x00, -0xfb,0x02,0x00,0x00,0x60,0x00,0x00,0x00,0x71,0x00,0x04,0x00, -0x09,0x00,0x00,0x00,0xfc,0x02,0x00,0x00,0xfb,0x02,0x00,0x00, -0xc4,0x00,0x05,0x00,0x09,0x00,0x00,0x00,0xfd,0x02,0x00,0x00, -0xfc,0x02,0x00,0x00,0x48,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x4d,0x00,0x00,0x00,0xff,0x02,0x00,0x00,0x65,0x00,0x00,0x00, -0x71,0x00,0x04,0x00,0x09,0x00,0x00,0x00,0x00,0x03,0x00,0x00, -0xff,0x02,0x00,0x00,0xc5,0x00,0x05,0x00,0x09,0x00,0x00,0x00, -0x01,0x03,0x00,0x00,0xfd,0x02,0x00,0x00,0x00,0x03,0x00,0x00, -0xc2,0x00,0x05,0x00,0x09,0x00,0x00,0x00,0x02,0x03,0x00,0x00, -0x01,0x03,0x00,0x00,0x72,0x04,0x00,0x00,0xc4,0x00,0x05,0x00, -0x09,0x00,0x00,0x00,0x03,0x03,0x00,0x00,0x02,0x03,0x00,0x00, -0x6f,0x00,0x00,0x00,0xc7,0x00,0x05,0x00,0x09,0x00,0x00,0x00, -0x04,0x03,0x00,0x00,0x03,0x03,0x00,0x00,0x51,0x00,0x00,0x00, -0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x05,0x03,0x00,0x00, -0x04,0x03,0x00,0x00,0xc2,0x00,0x05,0x00,0x09,0x00,0x00,0x00, -0x07,0x03,0x00,0x00,0x01,0x03,0x00,0x00,0x6b,0x04,0x00,0x00, -0xc7,0x00,0x05,0x00,0x09,0x00,0x00,0x00,0x08,0x03,0x00,0x00, -0x07,0x03,0x00,0x00,0x51,0x00,0x00,0x00,0x7c,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x09,0x03,0x00,0x00,0x08,0x03,0x00,0x00, -0x41,0x00,0x08,0x00,0x7f,0x00,0x00,0x00,0x0b,0x03,0x00,0x00, -0x57,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0x40,0x00,0x00,0x00, -0x37,0x00,0x00,0x00,0x72,0x04,0x00,0x00,0x3d,0x00,0x04,0x00, -0x50,0x00,0x00,0x00,0x0c,0x03,0x00,0x00,0x0b,0x03,0x00,0x00, -0x71,0x00,0x04,0x00,0x09,0x00,0x00,0x00,0x0d,0x03,0x00,0x00, -0x0c,0x03,0x00,0x00,0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x0e,0x03,0x00,0x00,0x0d,0x03,0x00,0x00,0xc7,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x0f,0x03,0x00,0x00,0x0e,0x03,0x00,0x00, -0x88,0x00,0x00,0x00,0xc5,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x11,0x03,0x00,0x00,0x0f,0x03,0x00,0x00,0x05,0x03,0x00,0x00, -0x6f,0x00,0x04,0x00,0x4a,0x00,0x00,0x00,0x12,0x03,0x00,0x00, -0x11,0x03,0x00,0x00,0xc2,0x00,0x05,0x00,0x50,0x00,0x00,0x00, -0x13,0x03,0x00,0x00,0x0c,0x03,0x00,0x00,0x6f,0x00,0x00,0x00, -0x71,0x00,0x04,0x00,0x09,0x00,0x00,0x00,0x14,0x03,0x00,0x00, -0x13,0x03,0x00,0x00,0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x15,0x03,0x00,0x00,0x14,0x03,0x00,0x00,0xc5,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x17,0x03,0x00,0x00,0x15,0x03,0x00,0x00, -0x09,0x03,0x00,0x00,0x6f,0x00,0x04,0x00,0x4a,0x00,0x00,0x00, -0x18,0x03,0x00,0x00,0x17,0x03,0x00,0x00,0x50,0x00,0x05,0x00, -0x82,0x00,0x00,0x00,0x19,0x03,0x00,0x00,0x12,0x03,0x00,0x00, -0x18,0x03,0x00,0x00,0x83,0x00,0x05,0x00,0x82,0x00,0x00,0x00, -0x1a,0x03,0x00,0x00,0x19,0x03,0x00,0x00,0xcd,0x00,0x00,0x00, -0x8e,0x00,0x05,0x00,0x82,0x00,0x00,0x00,0x1b,0x03,0x00,0x00, -0x1a,0x03,0x00,0x00,0xf9,0x02,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x21,0x03,0x00,0x00,0xa9,0x00,0x00,0x00, -0x72,0x04,0x00,0x00,0x51,0x00,0x05,0x00,0x4a,0x00,0x00,0x00, -0x23,0x03,0x00,0x00,0x1b,0x03,0x00,0x00,0x00,0x00,0x00,0x00, -0x41,0x00,0x06,0x00,0x59,0x00,0x00,0x00,0x24,0x03,0x00,0x00, -0xa1,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0x21,0x03,0x00,0x00, -0x3e,0x00,0x03,0x00,0x24,0x03,0x00,0x00,0x23,0x03,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x2b,0x03,0x00,0x00, -0xa9,0x00,0x00,0x00,0x73,0x04,0x00,0x00,0x51,0x00,0x05,0x00, -0x4a,0x00,0x00,0x00,0x2c,0x03,0x00,0x00,0x1b,0x03,0x00,0x00, -0x01,0x00,0x00,0x00,0x41,0x00,0x06,0x00,0x59,0x00,0x00,0x00, -0x2d,0x03,0x00,0x00,0xa1,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0x2b,0x03,0x00,0x00,0x3e,0x00,0x03,0x00,0x2d,0x03,0x00,0x00, -0x2c,0x03,0x00,0x00,0x3d,0x00,0x04,0x00,0x4a,0x00,0x00,0x00, -0x36,0x03,0x00,0x00,0x5a,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x4d,0x00,0x00,0x00,0x38,0x03,0x00,0x00,0x60,0x00,0x00,0x00, -0x71,0x00,0x04,0x00,0x09,0x00,0x00,0x00,0x39,0x03,0x00,0x00, -0x38,0x03,0x00,0x00,0xc4,0x00,0x05,0x00,0x09,0x00,0x00,0x00, -0x3a,0x03,0x00,0x00,0x39,0x03,0x00,0x00,0x48,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x4d,0x00,0x00,0x00,0x3c,0x03,0x00,0x00, -0x65,0x00,0x00,0x00,0x71,0x00,0x04,0x00,0x09,0x00,0x00,0x00, -0x3d,0x03,0x00,0x00,0x3c,0x03,0x00,0x00,0xc5,0x00,0x05,0x00, -0x09,0x00,0x00,0x00,0x3e,0x03,0x00,0x00,0x3a,0x03,0x00,0x00, -0x3d,0x03,0x00,0x00,0xc2,0x00,0x05,0x00,0x09,0x00,0x00,0x00, -0x3f,0x03,0x00,0x00,0x3e,0x03,0x00,0x00,0x74,0x04,0x00,0x00, -0xc4,0x00,0x05,0x00,0x09,0x00,0x00,0x00,0x40,0x03,0x00,0x00, -0x3f,0x03,0x00,0x00,0x6f,0x00,0x00,0x00,0xc7,0x00,0x05,0x00, -0x09,0x00,0x00,0x00,0x41,0x03,0x00,0x00,0x40,0x03,0x00,0x00, -0x51,0x00,0x00,0x00,0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x42,0x03,0x00,0x00,0x41,0x03,0x00,0x00,0xc2,0x00,0x05,0x00, -0x09,0x00,0x00,0x00,0x44,0x03,0x00,0x00,0x3e,0x03,0x00,0x00, -0x6d,0x04,0x00,0x00,0xc7,0x00,0x05,0x00,0x09,0x00,0x00,0x00, -0x45,0x03,0x00,0x00,0x44,0x03,0x00,0x00,0x51,0x00,0x00,0x00, -0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x46,0x03,0x00,0x00, -0x45,0x03,0x00,0x00,0x41,0x00,0x08,0x00,0x7f,0x00,0x00,0x00, -0x48,0x03,0x00,0x00,0x57,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0x40,0x00,0x00,0x00,0x37,0x00,0x00,0x00,0x74,0x04,0x00,0x00, -0x3d,0x00,0x04,0x00,0x50,0x00,0x00,0x00,0x49,0x03,0x00,0x00, -0x48,0x03,0x00,0x00,0x71,0x00,0x04,0x00,0x09,0x00,0x00,0x00, -0x4a,0x03,0x00,0x00,0x49,0x03,0x00,0x00,0x7c,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x4b,0x03,0x00,0x00,0x4a,0x03,0x00,0x00, -0xc7,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x4c,0x03,0x00,0x00, -0x4b,0x03,0x00,0x00,0x88,0x00,0x00,0x00,0xc5,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x4e,0x03,0x00,0x00,0x4c,0x03,0x00,0x00, -0x42,0x03,0x00,0x00,0x6f,0x00,0x04,0x00,0x4a,0x00,0x00,0x00, -0x4f,0x03,0x00,0x00,0x4e,0x03,0x00,0x00,0xc2,0x00,0x05,0x00, -0x50,0x00,0x00,0x00,0x50,0x03,0x00,0x00,0x49,0x03,0x00,0x00, -0x6f,0x00,0x00,0x00,0x71,0x00,0x04,0x00,0x09,0x00,0x00,0x00, -0x51,0x03,0x00,0x00,0x50,0x03,0x00,0x00,0x7c,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x52,0x03,0x00,0x00,0x51,0x03,0x00,0x00, -0xc5,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x54,0x03,0x00,0x00, -0x52,0x03,0x00,0x00,0x46,0x03,0x00,0x00,0x6f,0x00,0x04,0x00, -0x4a,0x00,0x00,0x00,0x55,0x03,0x00,0x00,0x54,0x03,0x00,0x00, -0x50,0x00,0x05,0x00,0x82,0x00,0x00,0x00,0x56,0x03,0x00,0x00, -0x4f,0x03,0x00,0x00,0x55,0x03,0x00,0x00,0x83,0x00,0x05,0x00, -0x82,0x00,0x00,0x00,0x57,0x03,0x00,0x00,0x56,0x03,0x00,0x00, -0xcd,0x00,0x00,0x00,0x8e,0x00,0x05,0x00,0x82,0x00,0x00,0x00, -0x58,0x03,0x00,0x00,0x57,0x03,0x00,0x00,0x36,0x03,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x5e,0x03,0x00,0x00, -0xa9,0x00,0x00,0x00,0x74,0x04,0x00,0x00,0x51,0x00,0x05,0x00, -0x4a,0x00,0x00,0x00,0x60,0x03,0x00,0x00,0x58,0x03,0x00,0x00, -0x00,0x00,0x00,0x00,0x41,0x00,0x06,0x00,0x59,0x00,0x00,0x00, -0x61,0x03,0x00,0x00,0xa1,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0x5e,0x03,0x00,0x00,0x3e,0x00,0x03,0x00,0x61,0x03,0x00,0x00, -0x60,0x03,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x68,0x03,0x00,0x00,0xa9,0x00,0x00,0x00,0x75,0x04,0x00,0x00, -0x51,0x00,0x05,0x00,0x4a,0x00,0x00,0x00,0x69,0x03,0x00,0x00, -0x58,0x03,0x00,0x00,0x01,0x00,0x00,0x00,0x41,0x00,0x06,0x00, -0x59,0x00,0x00,0x00,0x6a,0x03,0x00,0x00,0xa1,0x00,0x00,0x00, -0x2e,0x00,0x00,0x00,0x68,0x03,0x00,0x00,0x3e,0x00,0x03,0x00, -0x6a,0x03,0x00,0x00,0x69,0x03,0x00,0x00,0x3d,0x00,0x04,0x00, -0x4a,0x00,0x00,0x00,0x73,0x03,0x00,0x00,0x5a,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x4d,0x00,0x00,0x00,0x75,0x03,0x00,0x00, -0x60,0x00,0x00,0x00,0x71,0x00,0x04,0x00,0x09,0x00,0x00,0x00, -0x76,0x03,0x00,0x00,0x75,0x03,0x00,0x00,0xc4,0x00,0x05,0x00, -0x09,0x00,0x00,0x00,0x77,0x03,0x00,0x00,0x76,0x03,0x00,0x00, -0x48,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x4d,0x00,0x00,0x00, -0x79,0x03,0x00,0x00,0x65,0x00,0x00,0x00,0x71,0x00,0x04,0x00, -0x09,0x00,0x00,0x00,0x7a,0x03,0x00,0x00,0x79,0x03,0x00,0x00, -0xc5,0x00,0x05,0x00,0x09,0x00,0x00,0x00,0x7b,0x03,0x00,0x00, -0x77,0x03,0x00,0x00,0x7a,0x03,0x00,0x00,0xc2,0x00,0x05,0x00, -0x09,0x00,0x00,0x00,0x7c,0x03,0x00,0x00,0x7b,0x03,0x00,0x00, -0x75,0x00,0x00,0x00,0xc4,0x00,0x05,0x00,0x09,0x00,0x00,0x00, -0x7d,0x03,0x00,0x00,0x7c,0x03,0x00,0x00,0x6f,0x00,0x00,0x00, -0xc7,0x00,0x05,0x00,0x09,0x00,0x00,0x00,0x7e,0x03,0x00,0x00, -0x7d,0x03,0x00,0x00,0x51,0x00,0x00,0x00,0x7c,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x7f,0x03,0x00,0x00,0x7e,0x03,0x00,0x00, -0xc2,0x00,0x05,0x00,0x09,0x00,0x00,0x00,0x81,0x03,0x00,0x00, -0x7b,0x03,0x00,0x00,0x6f,0x04,0x00,0x00,0xc7,0x00,0x05,0x00, -0x09,0x00,0x00,0x00,0x82,0x03,0x00,0x00,0x81,0x03,0x00,0x00, -0x51,0x00,0x00,0x00,0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x83,0x03,0x00,0x00,0x82,0x03,0x00,0x00,0x41,0x00,0x08,0x00, -0x7f,0x00,0x00,0x00,0x85,0x03,0x00,0x00,0x57,0x00,0x00,0x00, -0x2e,0x00,0x00,0x00,0x40,0x00,0x00,0x00,0x37,0x00,0x00,0x00, -0x75,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x50,0x00,0x00,0x00, -0x86,0x03,0x00,0x00,0x85,0x03,0x00,0x00,0x71,0x00,0x04,0x00, -0x09,0x00,0x00,0x00,0x87,0x03,0x00,0x00,0x86,0x03,0x00,0x00, -0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x88,0x03,0x00,0x00, -0x87,0x03,0x00,0x00,0xc7,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x89,0x03,0x00,0x00,0x88,0x03,0x00,0x00,0x88,0x00,0x00,0x00, -0xc5,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x8b,0x03,0x00,0x00, -0x89,0x03,0x00,0x00,0x7f,0x03,0x00,0x00,0x6f,0x00,0x04,0x00, -0x4a,0x00,0x00,0x00,0x8c,0x03,0x00,0x00,0x8b,0x03,0x00,0x00, -0xc2,0x00,0x05,0x00,0x50,0x00,0x00,0x00,0x8d,0x03,0x00,0x00, -0x86,0x03,0x00,0x00,0x6f,0x00,0x00,0x00,0x71,0x00,0x04,0x00, -0x09,0x00,0x00,0x00,0x8e,0x03,0x00,0x00,0x8d,0x03,0x00,0x00, -0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x8f,0x03,0x00,0x00, -0x8e,0x03,0x00,0x00,0xc5,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x91,0x03,0x00,0x00,0x8f,0x03,0x00,0x00,0x83,0x03,0x00,0x00, -0x6f,0x00,0x04,0x00,0x4a,0x00,0x00,0x00,0x92,0x03,0x00,0x00, -0x91,0x03,0x00,0x00,0x50,0x00,0x05,0x00,0x82,0x00,0x00,0x00, -0x93,0x03,0x00,0x00,0x8c,0x03,0x00,0x00,0x92,0x03,0x00,0x00, -0x83,0x00,0x05,0x00,0x82,0x00,0x00,0x00,0x94,0x03,0x00,0x00, -0x93,0x03,0x00,0x00,0xcd,0x00,0x00,0x00,0x8e,0x00,0x05,0x00, -0x82,0x00,0x00,0x00,0x95,0x03,0x00,0x00,0x94,0x03,0x00,0x00, -0x73,0x03,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x9b,0x03,0x00,0x00,0xa9,0x00,0x00,0x00,0x75,0x00,0x00,0x00, -0x51,0x00,0x05,0x00,0x4a,0x00,0x00,0x00,0x9d,0x03,0x00,0x00, -0x95,0x03,0x00,0x00,0x00,0x00,0x00,0x00,0x41,0x00,0x06,0x00, -0x59,0x00,0x00,0x00,0x9e,0x03,0x00,0x00,0xa1,0x00,0x00,0x00, -0x2e,0x00,0x00,0x00,0x9b,0x03,0x00,0x00,0x3e,0x00,0x03,0x00, -0x9e,0x03,0x00,0x00,0x9d,0x03,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xa5,0x03,0x00,0x00,0xa9,0x00,0x00,0x00, -0x76,0x04,0x00,0x00,0x51,0x00,0x05,0x00,0x4a,0x00,0x00,0x00, -0xa6,0x03,0x00,0x00,0x95,0x03,0x00,0x00,0x01,0x00,0x00,0x00, -0x41,0x00,0x06,0x00,0x59,0x00,0x00,0x00,0xa7,0x03,0x00,0x00, -0xa1,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0xa5,0x03,0x00,0x00, -0x3e,0x00,0x03,0x00,0xa7,0x03,0x00,0x00,0xa6,0x03,0x00,0x00, -0x3d,0x00,0x04,0x00,0x4a,0x00,0x00,0x00,0xb0,0x03,0x00,0x00, -0x5a,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x4d,0x00,0x00,0x00, -0xb2,0x03,0x00,0x00,0x60,0x00,0x00,0x00,0x71,0x00,0x04,0x00, -0x09,0x00,0x00,0x00,0xb3,0x03,0x00,0x00,0xb2,0x03,0x00,0x00, -0xc4,0x00,0x05,0x00,0x09,0x00,0x00,0x00,0xb4,0x03,0x00,0x00, -0xb3,0x03,0x00,0x00,0x48,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x4d,0x00,0x00,0x00,0xb6,0x03,0x00,0x00,0x65,0x00,0x00,0x00, -0x71,0x00,0x04,0x00,0x09,0x00,0x00,0x00,0xb7,0x03,0x00,0x00, -0xb6,0x03,0x00,0x00,0xc5,0x00,0x05,0x00,0x09,0x00,0x00,0x00, -0xb8,0x03,0x00,0x00,0xb4,0x03,0x00,0x00,0xb7,0x03,0x00,0x00, -0xc2,0x00,0x05,0x00,0x09,0x00,0x00,0x00,0xb9,0x03,0x00,0x00, -0xb8,0x03,0x00,0x00,0x62,0x04,0x00,0x00,0xc4,0x00,0x05,0x00, -0x09,0x00,0x00,0x00,0xba,0x03,0x00,0x00,0xb9,0x03,0x00,0x00, -0x6f,0x00,0x00,0x00,0xc7,0x00,0x05,0x00,0x09,0x00,0x00,0x00, -0xbb,0x03,0x00,0x00,0xba,0x03,0x00,0x00,0x51,0x00,0x00,0x00, -0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0xbc,0x03,0x00,0x00, -0xbb,0x03,0x00,0x00,0xc2,0x00,0x05,0x00,0x09,0x00,0x00,0x00, -0xbe,0x03,0x00,0x00,0xb8,0x03,0x00,0x00,0x71,0x04,0x00,0x00, -0xc7,0x00,0x05,0x00,0x09,0x00,0x00,0x00,0xbf,0x03,0x00,0x00, -0xbe,0x03,0x00,0x00,0x51,0x00,0x00,0x00,0x7c,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0xc0,0x03,0x00,0x00,0xbf,0x03,0x00,0x00, -0x41,0x00,0x08,0x00,0x7f,0x00,0x00,0x00,0xc2,0x03,0x00,0x00, -0x57,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0x40,0x00,0x00,0x00, -0x37,0x00,0x00,0x00,0x62,0x04,0x00,0x00,0x3d,0x00,0x04,0x00, -0x50,0x00,0x00,0x00,0xc3,0x03,0x00,0x00,0xc2,0x03,0x00,0x00, -0x71,0x00,0x04,0x00,0x09,0x00,0x00,0x00,0xc4,0x03,0x00,0x00, -0xc3,0x03,0x00,0x00,0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0xc5,0x03,0x00,0x00,0xc4,0x03,0x00,0x00,0xc7,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xc6,0x03,0x00,0x00,0xc5,0x03,0x00,0x00, -0x88,0x00,0x00,0x00,0xc5,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xc8,0x03,0x00,0x00,0xc6,0x03,0x00,0x00,0xbc,0x03,0x00,0x00, -0x6f,0x00,0x04,0x00,0x4a,0x00,0x00,0x00,0xc9,0x03,0x00,0x00, -0xc8,0x03,0x00,0x00,0xc2,0x00,0x05,0x00,0x50,0x00,0x00,0x00, -0xca,0x03,0x00,0x00,0xc3,0x03,0x00,0x00,0x6f,0x00,0x00,0x00, -0x71,0x00,0x04,0x00,0x09,0x00,0x00,0x00,0xcb,0x03,0x00,0x00, -0xca,0x03,0x00,0x00,0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0xcc,0x03,0x00,0x00,0xcb,0x03,0x00,0x00,0xc5,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xce,0x03,0x00,0x00,0xcc,0x03,0x00,0x00, -0xc0,0x03,0x00,0x00,0x6f,0x00,0x04,0x00,0x4a,0x00,0x00,0x00, -0xcf,0x03,0x00,0x00,0xce,0x03,0x00,0x00,0x50,0x00,0x05,0x00, -0x82,0x00,0x00,0x00,0xd0,0x03,0x00,0x00,0xc9,0x03,0x00,0x00, -0xcf,0x03,0x00,0x00,0x83,0x00,0x05,0x00,0x82,0x00,0x00,0x00, -0xd1,0x03,0x00,0x00,0xd0,0x03,0x00,0x00,0xcd,0x00,0x00,0x00, -0x8e,0x00,0x05,0x00,0x82,0x00,0x00,0x00,0xd2,0x03,0x00,0x00, -0xd1,0x03,0x00,0x00,0xb0,0x03,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xd8,0x03,0x00,0x00,0xa9,0x00,0x00,0x00, -0x62,0x04,0x00,0x00,0x51,0x00,0x05,0x00,0x4a,0x00,0x00,0x00, -0xda,0x03,0x00,0x00,0xd2,0x03,0x00,0x00,0x00,0x00,0x00,0x00, -0x41,0x00,0x06,0x00,0x59,0x00,0x00,0x00,0xdb,0x03,0x00,0x00, -0xa1,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0xd8,0x03,0x00,0x00, -0x3e,0x00,0x03,0x00,0xdb,0x03,0x00,0x00,0xda,0x03,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xe2,0x03,0x00,0x00, -0xa9,0x00,0x00,0x00,0x77,0x04,0x00,0x00,0x51,0x00,0x05,0x00, -0x4a,0x00,0x00,0x00,0xe3,0x03,0x00,0x00,0xd2,0x03,0x00,0x00, -0x01,0x00,0x00,0x00,0x41,0x00,0x06,0x00,0x59,0x00,0x00,0x00, -0xe4,0x03,0x00,0x00,0xa1,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0xe2,0x03,0x00,0x00,0x3e,0x00,0x03,0x00,0xe4,0x03,0x00,0x00, -0xe3,0x03,0x00,0x00,0x3d,0x00,0x04,0x00,0x4a,0x00,0x00,0x00, -0xed,0x03,0x00,0x00,0x5a,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x4d,0x00,0x00,0x00,0xef,0x03,0x00,0x00,0x60,0x00,0x00,0x00, -0x71,0x00,0x04,0x00,0x09,0x00,0x00,0x00,0xf0,0x03,0x00,0x00, -0xef,0x03,0x00,0x00,0xc4,0x00,0x05,0x00,0x09,0x00,0x00,0x00, -0xf1,0x03,0x00,0x00,0xf0,0x03,0x00,0x00,0x48,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x4d,0x00,0x00,0x00,0xf3,0x03,0x00,0x00, -0x65,0x00,0x00,0x00,0x71,0x00,0x04,0x00,0x09,0x00,0x00,0x00, -0xf4,0x03,0x00,0x00,0xf3,0x03,0x00,0x00,0xc5,0x00,0x05,0x00, -0x09,0x00,0x00,0x00,0xf5,0x03,0x00,0x00,0xf1,0x03,0x00,0x00, -0xf4,0x03,0x00,0x00,0xc2,0x00,0x05,0x00,0x09,0x00,0x00,0x00, -0xf6,0x03,0x00,0x00,0xf5,0x03,0x00,0x00,0x64,0x04,0x00,0x00, -0xc4,0x00,0x05,0x00,0x09,0x00,0x00,0x00,0xf7,0x03,0x00,0x00, -0xf6,0x03,0x00,0x00,0x6f,0x00,0x00,0x00,0xc7,0x00,0x05,0x00, -0x09,0x00,0x00,0x00,0xf8,0x03,0x00,0x00,0xf7,0x03,0x00,0x00, -0x51,0x00,0x00,0x00,0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0xf9,0x03,0x00,0x00,0xf8,0x03,0x00,0x00,0xc2,0x00,0x05,0x00, -0x09,0x00,0x00,0x00,0xfb,0x03,0x00,0x00,0xf5,0x03,0x00,0x00, -0x73,0x04,0x00,0x00,0xc7,0x00,0x05,0x00,0x09,0x00,0x00,0x00, -0xfc,0x03,0x00,0x00,0xfb,0x03,0x00,0x00,0x51,0x00,0x00,0x00, -0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0xfd,0x03,0x00,0x00, -0xfc,0x03,0x00,0x00,0x41,0x00,0x08,0x00,0x7f,0x00,0x00,0x00, -0xff,0x03,0x00,0x00,0x57,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0x40,0x00,0x00,0x00,0x37,0x00,0x00,0x00,0x64,0x04,0x00,0x00, -0x3d,0x00,0x04,0x00,0x50,0x00,0x00,0x00,0x00,0x04,0x00,0x00, -0xff,0x03,0x00,0x00,0x71,0x00,0x04,0x00,0x09,0x00,0x00,0x00, -0x01,0x04,0x00,0x00,0x00,0x04,0x00,0x00,0x7c,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x02,0x04,0x00,0x00,0x01,0x04,0x00,0x00, -0xc7,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x03,0x04,0x00,0x00, -0x02,0x04,0x00,0x00,0x88,0x00,0x00,0x00,0xc5,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x05,0x04,0x00,0x00,0x03,0x04,0x00,0x00, -0xf9,0x03,0x00,0x00,0x6f,0x00,0x04,0x00,0x4a,0x00,0x00,0x00, -0x06,0x04,0x00,0x00,0x05,0x04,0x00,0x00,0xc2,0x00,0x05,0x00, -0x50,0x00,0x00,0x00,0x07,0x04,0x00,0x00,0x00,0x04,0x00,0x00, -0x6f,0x00,0x00,0x00,0x71,0x00,0x04,0x00,0x09,0x00,0x00,0x00, -0x08,0x04,0x00,0x00,0x07,0x04,0x00,0x00,0x7c,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x09,0x04,0x00,0x00,0x08,0x04,0x00,0x00, -0xc5,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x0b,0x04,0x00,0x00, -0x09,0x04,0x00,0x00,0xfd,0x03,0x00,0x00,0x6f,0x00,0x04,0x00, -0x4a,0x00,0x00,0x00,0x0c,0x04,0x00,0x00,0x0b,0x04,0x00,0x00, -0x50,0x00,0x05,0x00,0x82,0x00,0x00,0x00,0x0d,0x04,0x00,0x00, -0x06,0x04,0x00,0x00,0x0c,0x04,0x00,0x00,0x83,0x00,0x05,0x00, -0x82,0x00,0x00,0x00,0x0e,0x04,0x00,0x00,0x0d,0x04,0x00,0x00, -0xcd,0x00,0x00,0x00,0x8e,0x00,0x05,0x00,0x82,0x00,0x00,0x00, -0x0f,0x04,0x00,0x00,0x0e,0x04,0x00,0x00,0xed,0x03,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x15,0x04,0x00,0x00, -0xa9,0x00,0x00,0x00,0x64,0x04,0x00,0x00,0x51,0x00,0x05,0x00, -0x4a,0x00,0x00,0x00,0x17,0x04,0x00,0x00,0x0f,0x04,0x00,0x00, -0x00,0x00,0x00,0x00,0x41,0x00,0x06,0x00,0x59,0x00,0x00,0x00, -0x18,0x04,0x00,0x00,0xa1,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0x15,0x04,0x00,0x00,0x3e,0x00,0x03,0x00,0x18,0x04,0x00,0x00, -0x17,0x04,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x1f,0x04,0x00,0x00,0xa9,0x00,0x00,0x00,0x78,0x04,0x00,0x00, -0x51,0x00,0x05,0x00,0x4a,0x00,0x00,0x00,0x20,0x04,0x00,0x00, -0x0f,0x04,0x00,0x00,0x01,0x00,0x00,0x00,0x41,0x00,0x06,0x00, -0x59,0x00,0x00,0x00,0x21,0x04,0x00,0x00,0xa1,0x00,0x00,0x00, -0x2e,0x00,0x00,0x00,0x1f,0x04,0x00,0x00,0x3e,0x00,0x03,0x00, -0x21,0x04,0x00,0x00,0x20,0x04,0x00,0x00,0x3d,0x00,0x04,0x00, -0x4a,0x00,0x00,0x00,0x2a,0x04,0x00,0x00,0x5a,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x4d,0x00,0x00,0x00,0x2c,0x04,0x00,0x00, -0x60,0x00,0x00,0x00,0x71,0x00,0x04,0x00,0x09,0x00,0x00,0x00, -0x2d,0x04,0x00,0x00,0x2c,0x04,0x00,0x00,0xc4,0x00,0x05,0x00, -0x09,0x00,0x00,0x00,0x2e,0x04,0x00,0x00,0x2d,0x04,0x00,0x00, -0x48,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x4d,0x00,0x00,0x00, -0x30,0x04,0x00,0x00,0x65,0x00,0x00,0x00,0x71,0x00,0x04,0x00, -0x09,0x00,0x00,0x00,0x31,0x04,0x00,0x00,0x30,0x04,0x00,0x00, -0xc5,0x00,0x05,0x00,0x09,0x00,0x00,0x00,0x32,0x04,0x00,0x00, -0x2e,0x04,0x00,0x00,0x31,0x04,0x00,0x00,0xc2,0x00,0x05,0x00, -0x09,0x00,0x00,0x00,0x33,0x04,0x00,0x00,0x32,0x04,0x00,0x00, -0x88,0x00,0x00,0x00,0xc4,0x00,0x05,0x00,0x09,0x00,0x00,0x00, -0x34,0x04,0x00,0x00,0x33,0x04,0x00,0x00,0x6f,0x00,0x00,0x00, -0xc7,0x00,0x05,0x00,0x09,0x00,0x00,0x00,0x35,0x04,0x00,0x00, -0x34,0x04,0x00,0x00,0x51,0x00,0x00,0x00,0x7c,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x36,0x04,0x00,0x00,0x35,0x04,0x00,0x00, -0xc2,0x00,0x05,0x00,0x09,0x00,0x00,0x00,0x38,0x04,0x00,0x00, -0x32,0x04,0x00,0x00,0x75,0x04,0x00,0x00,0xc7,0x00,0x05,0x00, -0x09,0x00,0x00,0x00,0x39,0x04,0x00,0x00,0x38,0x04,0x00,0x00, -0x51,0x00,0x00,0x00,0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x3a,0x04,0x00,0x00,0x39,0x04,0x00,0x00,0x41,0x00,0x08,0x00, -0x7f,0x00,0x00,0x00,0x3c,0x04,0x00,0x00,0x57,0x00,0x00,0x00, -0x2e,0x00,0x00,0x00,0x40,0x00,0x00,0x00,0x37,0x00,0x00,0x00, -0x88,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x50,0x00,0x00,0x00, -0x3d,0x04,0x00,0x00,0x3c,0x04,0x00,0x00,0x71,0x00,0x04,0x00, -0x09,0x00,0x00,0x00,0x3e,0x04,0x00,0x00,0x3d,0x04,0x00,0x00, -0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x3f,0x04,0x00,0x00, -0x3e,0x04,0x00,0x00,0xc7,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x40,0x04,0x00,0x00,0x3f,0x04,0x00,0x00,0x88,0x00,0x00,0x00, -0xc5,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x42,0x04,0x00,0x00, -0x40,0x04,0x00,0x00,0x36,0x04,0x00,0x00,0x6f,0x00,0x04,0x00, -0x4a,0x00,0x00,0x00,0x43,0x04,0x00,0x00,0x42,0x04,0x00,0x00, -0xc2,0x00,0x05,0x00,0x50,0x00,0x00,0x00,0x44,0x04,0x00,0x00, -0x3d,0x04,0x00,0x00,0x6f,0x00,0x00,0x00,0x71,0x00,0x04,0x00, -0x09,0x00,0x00,0x00,0x45,0x04,0x00,0x00,0x44,0x04,0x00,0x00, -0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x46,0x04,0x00,0x00, -0x45,0x04,0x00,0x00,0xc5,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x48,0x04,0x00,0x00,0x46,0x04,0x00,0x00,0x3a,0x04,0x00,0x00, -0x6f,0x00,0x04,0x00,0x4a,0x00,0x00,0x00,0x49,0x04,0x00,0x00, -0x48,0x04,0x00,0x00,0x50,0x00,0x05,0x00,0x82,0x00,0x00,0x00, -0x4a,0x04,0x00,0x00,0x43,0x04,0x00,0x00,0x49,0x04,0x00,0x00, -0x83,0x00,0x05,0x00,0x82,0x00,0x00,0x00,0x4b,0x04,0x00,0x00, -0x4a,0x04,0x00,0x00,0xcd,0x00,0x00,0x00,0x8e,0x00,0x05,0x00, -0x82,0x00,0x00,0x00,0x4c,0x04,0x00,0x00,0x4b,0x04,0x00,0x00, -0x2a,0x04,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x52,0x04,0x00,0x00,0xa9,0x00,0x00,0x00,0x88,0x00,0x00,0x00, -0x51,0x00,0x05,0x00,0x4a,0x00,0x00,0x00,0x54,0x04,0x00,0x00, -0x4c,0x04,0x00,0x00,0x00,0x00,0x00,0x00,0x41,0x00,0x06,0x00, -0x59,0x00,0x00,0x00,0x55,0x04,0x00,0x00,0xa1,0x00,0x00,0x00, -0x2e,0x00,0x00,0x00,0x52,0x04,0x00,0x00,0x3e,0x00,0x03,0x00, -0x55,0x04,0x00,0x00,0x54,0x04,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x5c,0x04,0x00,0x00,0xa9,0x00,0x00,0x00, -0x79,0x04,0x00,0x00,0x51,0x00,0x05,0x00,0x4a,0x00,0x00,0x00, -0x5d,0x04,0x00,0x00,0x4c,0x04,0x00,0x00,0x01,0x00,0x00,0x00, -0x41,0x00,0x06,0x00,0x59,0x00,0x00,0x00,0x5e,0x04,0x00,0x00, -0xa1,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0x5c,0x04,0x00,0x00, -0x3e,0x00,0x03,0x00,0x5e,0x04,0x00,0x00,0x5d,0x04,0x00,0x00, -0xf9,0x00,0x02,0x00,0xc1,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, -0xc1,0x00,0x00,0x00,0xfd,0x00,0x01,0x00,0x38,0x00,0x01,0x00, - -}; -const uint64_t dequant_q5_0_len = 13428; - -unsigned char dequant_q5_0_fp32_data[] = { -0x03,0x02,0x23,0x07,0x00,0x05,0x01,0x00,0x0b,0x00,0x0d,0x00, 0x9b,0x04,0x00,0x00,0x00,0x00,0x00,0x00,0x11,0x00,0x02,0x00, 0x01,0x00,0x00,0x00,0x11,0x00,0x02,0x00,0x51,0x11,0x00,0x00, 0x11,0x00,0x02,0x00,0x60,0x11,0x00,0x00,0x0b,0x00,0x06,0x00, @@ -8960,1079 +5015,10 @@ unsigned char dequant_q5_0_fp32_data[] = { 0xc3,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0xc3,0x00,0x00,0x00, 0xfd,0x00,0x01,0x00,0x38,0x00,0x01,0x00, }; -const uint64_t dequant_q5_0_fp32_len = 13952; +const uint64_t dequant_q5_0_len = 13952; unsigned char dequant_q5_1_data[] = { 0x03,0x02,0x23,0x07,0x00,0x05,0x01,0x00,0x0b,0x00,0x0d,0x00, -0x63,0x04,0x00,0x00,0x00,0x00,0x00,0x00,0x11,0x00,0x02,0x00, -0x01,0x00,0x00,0x00,0x11,0x00,0x02,0x00,0x09,0x00,0x00,0x00, -0x11,0x00,0x02,0x00,0x27,0x00,0x00,0x00,0x11,0x00,0x02,0x00, -0x51,0x11,0x00,0x00,0x11,0x00,0x02,0x00,0x60,0x11,0x00,0x00, -0x0b,0x00,0x06,0x00,0x01,0x00,0x00,0x00,0x47,0x4c,0x53,0x4c, -0x2e,0x73,0x74,0x64,0x2e,0x34,0x35,0x30,0x00,0x00,0x00,0x00, -0x0e,0x00,0x03,0x00,0x00,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x0f,0x00,0x09,0x00,0x05,0x00,0x00,0x00,0x04,0x00,0x00,0x00, -0x6d,0x61,0x69,0x6e,0x00,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0x16,0x00,0x00,0x00,0x54,0x00,0x00,0x00,0x9b,0x00,0x00,0x00, -0x10,0x00,0x06,0x00,0x04,0x00,0x00,0x00,0x11,0x00,0x00,0x00, -0x00,0x01,0x00,0x00,0x01,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x0c,0x00,0x00,0x00,0x0b,0x00,0x00,0x00, -0x1c,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x14,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x48,0x00,0x05,0x00,0x14,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0x14,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x08,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x14,0x00,0x00,0x00, -0x03,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0x47,0x00,0x03,0x00,0x14,0x00,0x00,0x00,0x02,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x4f,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x50,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x48,0x00,0x05,0x00,0x50,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0x50,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x04,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x50,0x00,0x00,0x00, -0x03,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x08,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x51,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x18,0x00,0x00,0x00,0x48,0x00,0x04,0x00,0x52,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x18,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0x52,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x47,0x00,0x03,0x00,0x52,0x00,0x00,0x00, -0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x54,0x00,0x00,0x00, -0x22,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x54,0x00,0x00,0x00,0x21,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x98,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x02,0x00,0x00,0x00,0x48,0x00,0x04,0x00,0x99,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x19,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0x99,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x47,0x00,0x03,0x00,0x99,0x00,0x00,0x00, -0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x9b,0x00,0x00,0x00, -0x22,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x9b,0x00,0x00,0x00,0x21,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0xb9,0x00,0x00,0x00,0x0b,0x00,0x00,0x00, -0x19,0x00,0x00,0x00,0x13,0x00,0x02,0x00,0x02,0x00,0x00,0x00, -0x21,0x00,0x03,0x00,0x03,0x00,0x00,0x00,0x02,0x00,0x00,0x00, -0x15,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x20,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x15,0x00,0x04,0x00,0x09,0x00,0x00,0x00, -0x20,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x17,0x00,0x04,0x00, -0x0a,0x00,0x00,0x00,0x09,0x00,0x00,0x00,0x03,0x00,0x00,0x00, -0x20,0x00,0x04,0x00,0x0b,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x0a,0x00,0x00,0x00,0x3b,0x00,0x04,0x00,0x0b,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x09,0x00,0x00,0x00,0x0d,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x20,0x00,0x04,0x00,0x0e,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x09,0x00,0x00,0x00,0x1e,0x00,0x06,0x00,0x14,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x15,0x00,0x00,0x00, -0x09,0x00,0x00,0x00,0x14,0x00,0x00,0x00,0x3b,0x00,0x04,0x00, -0x15,0x00,0x00,0x00,0x16,0x00,0x00,0x00,0x09,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x17,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x18,0x00,0x00,0x00, -0x09,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x1b,0x00,0x00,0x00,0x20,0x00,0x00,0x00, -0x14,0x00,0x02,0x00,0x24,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x37,0x00,0x00,0x00, -0x02,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x48,0x00,0x00,0x00,0x10,0x00,0x00,0x00,0x16,0x00,0x03,0x00, -0x4a,0x00,0x00,0x00,0x10,0x00,0x00,0x00,0x15,0x00,0x04,0x00, -0x4d,0x00,0x00,0x00,0x08,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x09,0x00,0x00,0x00,0x4e,0x00,0x00,0x00, -0x10,0x00,0x00,0x00,0x1c,0x00,0x04,0x00,0x4f,0x00,0x00,0x00, -0x4d,0x00,0x00,0x00,0x4e,0x00,0x00,0x00,0x1e,0x00,0x06,0x00, -0x50,0x00,0x00,0x00,0x4a,0x00,0x00,0x00,0x4a,0x00,0x00,0x00, -0x09,0x00,0x00,0x00,0x4f,0x00,0x00,0x00,0x1d,0x00,0x03,0x00, -0x51,0x00,0x00,0x00,0x50,0x00,0x00,0x00,0x1e,0x00,0x03,0x00, -0x52,0x00,0x00,0x00,0x51,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0x53,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x52,0x00,0x00,0x00, -0x3b,0x00,0x04,0x00,0x53,0x00,0x00,0x00,0x54,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x56,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x4a,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0x61,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x09,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x66,0x00,0x00,0x00, -0x04,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x6e,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x77,0x00,0x00,0x00,0x03,0x00,0x00,0x00, -0x20,0x00,0x04,0x00,0x79,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0x4d,0x00,0x00,0x00,0x17,0x00,0x04,0x00,0x7c,0x00,0x00,0x00, -0x4a,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x82,0x00,0x00,0x00,0x0f,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x09,0x00,0x00,0x00,0x8c,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x1d,0x00,0x03,0x00,0x98,0x00,0x00,0x00, -0x4a,0x00,0x00,0x00,0x1e,0x00,0x03,0x00,0x99,0x00,0x00,0x00, -0x98,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x9a,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x99,0x00,0x00,0x00,0x3b,0x00,0x04,0x00, -0x9a,0x00,0x00,0x00,0x9b,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x09,0x00,0x00,0x00,0xb8,0x00,0x00,0x00, -0x00,0x01,0x00,0x00,0x2c,0x00,0x06,0x00,0x0a,0x00,0x00,0x00, -0xb9,0x00,0x00,0x00,0xb8,0x00,0x00,0x00,0x8c,0x00,0x00,0x00, -0x8c,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x4b,0x04,0x00,0x00,0x0d,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x4c,0x04,0x00,0x00,0x11,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x4d,0x04,0x00,0x00, -0x0e,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x4e,0x04,0x00,0x00,0x12,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x4f,0x04,0x00,0x00,0x13,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x50,0x04,0x00,0x00, -0x14,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x51,0x04,0x00,0x00,0x05,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x52,0x04,0x00,0x00,0x15,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x53,0x04,0x00,0x00, -0x06,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x54,0x04,0x00,0x00,0x16,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x55,0x04,0x00,0x00,0x07,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x56,0x04,0x00,0x00, -0x17,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x57,0x04,0x00,0x00,0x08,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x58,0x04,0x00,0x00,0x18,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x59,0x04,0x00,0x00, -0x09,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x5a,0x04,0x00,0x00,0x19,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x5b,0x04,0x00,0x00,0x0a,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x5c,0x04,0x00,0x00, -0x1a,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x5d,0x04,0x00,0x00,0x0b,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x5e,0x04,0x00,0x00,0x1b,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x5f,0x04,0x00,0x00, -0x1c,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x60,0x04,0x00,0x00,0x1d,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x61,0x04,0x00,0x00,0x1e,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x62,0x04,0x00,0x00, -0x1f,0x00,0x00,0x00,0x36,0x00,0x05,0x00,0x02,0x00,0x00,0x00, -0x04,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x03,0x00,0x00,0x00, -0xf8,0x00,0x02,0x00,0x05,0x00,0x00,0x00,0xf7,0x00,0x03,0x00, -0xba,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xfb,0x00,0x03,0x00, -0x0d,0x00,0x00,0x00,0xbb,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, -0xbb,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x0e,0x00,0x00,0x00, -0x0f,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x0d,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x09,0x00,0x00,0x00,0x10,0x00,0x00,0x00, -0x0f,0x00,0x00,0x00,0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x11,0x00,0x00,0x00,0x10,0x00,0x00,0x00,0x41,0x00,0x05,0x00, -0x18,0x00,0x00,0x00,0x19,0x00,0x00,0x00,0x16,0x00,0x00,0x00, -0x17,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x1a,0x00,0x00,0x00,0x19,0x00,0x00,0x00,0x87,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x1c,0x00,0x00,0x00,0x1a,0x00,0x00,0x00, -0x1b,0x00,0x00,0x00,0x8b,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x1d,0x00,0x00,0x00,0x11,0x00,0x00,0x00,0x1c,0x00,0x00,0x00, -0x87,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x11,0x00,0x00,0x00,0x1c,0x00,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x26,0x00,0x00,0x00,0x1d,0x00,0x00,0x00, -0x1b,0x00,0x00,0x00,0xaf,0x00,0x05,0x00,0x24,0x00,0x00,0x00, -0x29,0x00,0x00,0x00,0x26,0x00,0x00,0x00,0x1a,0x00,0x00,0x00, -0xa8,0x00,0x04,0x00,0x24,0x00,0x00,0x00,0x2a,0x00,0x00,0x00, -0x29,0x00,0x00,0x00,0xf7,0x00,0x03,0x00,0x2c,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0x2a,0x00,0x00,0x00, -0x2b,0x00,0x00,0x00,0x2c,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, -0x2b,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x18,0x00,0x00,0x00, -0x2f,0x00,0x00,0x00,0x16,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x30,0x00,0x00,0x00, -0x2f,0x00,0x00,0x00,0xaf,0x00,0x05,0x00,0x24,0x00,0x00,0x00, -0x31,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x30,0x00,0x00,0x00, -0xf9,0x00,0x02,0x00,0x2c,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, -0x2c,0x00,0x00,0x00,0xf5,0x00,0x07,0x00,0x24,0x00,0x00,0x00, -0x32,0x00,0x00,0x00,0x29,0x00,0x00,0x00,0xbb,0x00,0x00,0x00, -0x31,0x00,0x00,0x00,0x2b,0x00,0x00,0x00,0xf7,0x00,0x03,0x00, -0x34,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, -0x32,0x00,0x00,0x00,0x33,0x00,0x00,0x00,0x34,0x00,0x00,0x00, -0xf8,0x00,0x02,0x00,0x33,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, -0xba,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0x34,0x00,0x00,0x00, -0x41,0x00,0x05,0x00,0x18,0x00,0x00,0x00,0x38,0x00,0x00,0x00, -0x16,0x00,0x00,0x00,0x37,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x38,0x00,0x00,0x00, -0x87,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x3a,0x00,0x00,0x00, -0x39,0x00,0x00,0x00,0x1b,0x00,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x3e,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x3a,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x40,0x00,0x00,0x00,0x3e,0x00,0x00,0x00,0x1d,0x00,0x00,0x00, -0x41,0x00,0x07,0x00,0x56,0x00,0x00,0x00,0x57,0x00,0x00,0x00, -0x54,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0x40,0x00,0x00,0x00, -0x2e,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x4a,0x00,0x00,0x00, -0x58,0x00,0x00,0x00,0x57,0x00,0x00,0x00,0x41,0x00,0x07,0x00, -0x56,0x00,0x00,0x00,0x5b,0x00,0x00,0x00,0x54,0x00,0x00,0x00, -0x2e,0x00,0x00,0x00,0x40,0x00,0x00,0x00,0x17,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x4a,0x00,0x00,0x00,0x5c,0x00,0x00,0x00, -0x5b,0x00,0x00,0x00,0x41,0x00,0x07,0x00,0x61,0x00,0x00,0x00, -0x62,0x00,0x00,0x00,0x54,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0x40,0x00,0x00,0x00,0x37,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x09,0x00,0x00,0x00,0x63,0x00,0x00,0x00,0x62,0x00,0x00,0x00, -0xc2,0x00,0x05,0x00,0x09,0x00,0x00,0x00,0x65,0x00,0x00,0x00, -0x63,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0xc4,0x00,0x05,0x00, -0x09,0x00,0x00,0x00,0x67,0x00,0x00,0x00,0x65,0x00,0x00,0x00, -0x66,0x00,0x00,0x00,0xc7,0x00,0x05,0x00,0x09,0x00,0x00,0x00, -0x68,0x00,0x00,0x00,0x67,0x00,0x00,0x00,0x4e,0x00,0x00,0x00, -0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x69,0x00,0x00,0x00, -0x68,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x09,0x00,0x00,0x00, -0x6c,0x00,0x00,0x00,0x62,0x00,0x00,0x00,0xc2,0x00,0x05,0x00, -0x09,0x00,0x00,0x00,0x70,0x00,0x00,0x00,0x6c,0x00,0x00,0x00, -0x6e,0x00,0x00,0x00,0xc7,0x00,0x05,0x00,0x09,0x00,0x00,0x00, -0x71,0x00,0x00,0x00,0x70,0x00,0x00,0x00,0x4e,0x00,0x00,0x00, -0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x72,0x00,0x00,0x00, -0x71,0x00,0x00,0x00,0x41,0x00,0x08,0x00,0x79,0x00,0x00,0x00, -0x7a,0x00,0x00,0x00,0x54,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0x40,0x00,0x00,0x00,0x77,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x4d,0x00,0x00,0x00,0x7b,0x00,0x00,0x00, -0x7a,0x00,0x00,0x00,0x71,0x00,0x04,0x00,0x09,0x00,0x00,0x00, -0x80,0x00,0x00,0x00,0x7b,0x00,0x00,0x00,0x7c,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x81,0x00,0x00,0x00,0x80,0x00,0x00,0x00, -0xc7,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x83,0x00,0x00,0x00, -0x81,0x00,0x00,0x00,0x82,0x00,0x00,0x00,0xc5,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x86,0x00,0x00,0x00,0x83,0x00,0x00,0x00, -0x69,0x00,0x00,0x00,0x6f,0x00,0x04,0x00,0x4a,0x00,0x00,0x00, -0x87,0x00,0x00,0x00,0x86,0x00,0x00,0x00,0xc2,0x00,0x05,0x00, -0x4d,0x00,0x00,0x00,0x89,0x00,0x00,0x00,0x7b,0x00,0x00,0x00, -0x66,0x00,0x00,0x00,0x71,0x00,0x04,0x00,0x09,0x00,0x00,0x00, -0x8a,0x00,0x00,0x00,0x89,0x00,0x00,0x00,0x7c,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x8b,0x00,0x00,0x00,0x8a,0x00,0x00,0x00, -0xc5,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x8f,0x00,0x00,0x00, -0x8b,0x00,0x00,0x00,0x72,0x00,0x00,0x00,0x6f,0x00,0x04,0x00, -0x4a,0x00,0x00,0x00,0x90,0x00,0x00,0x00,0x8f,0x00,0x00,0x00, -0x50,0x00,0x05,0x00,0x7c,0x00,0x00,0x00,0x91,0x00,0x00,0x00, -0x87,0x00,0x00,0x00,0x90,0x00,0x00,0x00,0x8e,0x00,0x05,0x00, -0x7c,0x00,0x00,0x00,0x94,0x00,0x00,0x00,0x91,0x00,0x00,0x00, -0x58,0x00,0x00,0x00,0x50,0x00,0x05,0x00,0x7c,0x00,0x00,0x00, -0x96,0x00,0x00,0x00,0x5c,0x00,0x00,0x00,0x5c,0x00,0x00,0x00, -0x81,0x00,0x05,0x00,0x7c,0x00,0x00,0x00,0x97,0x00,0x00,0x00, -0x94,0x00,0x00,0x00,0x96,0x00,0x00,0x00,0x41,0x00,0x05,0x00, -0x18,0x00,0x00,0x00,0x9d,0x00,0x00,0x00,0x16,0x00,0x00,0x00, -0x77,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x9e,0x00,0x00,0x00,0x9d,0x00,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x9f,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x9e,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xa2,0x00,0x00,0x00,0x9f,0x00,0x00,0x00,0x26,0x00,0x00,0x00, -0x51,0x00,0x05,0x00,0x4a,0x00,0x00,0x00,0xa7,0x00,0x00,0x00, -0x97,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x41,0x00,0x06,0x00, -0x56,0x00,0x00,0x00,0xa8,0x00,0x00,0x00,0x9b,0x00,0x00,0x00, -0x2e,0x00,0x00,0x00,0xa2,0x00,0x00,0x00,0x3e,0x00,0x03,0x00, -0xa8,0x00,0x00,0x00,0xa7,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xb2,0x00,0x00,0x00,0xa2,0x00,0x00,0x00, -0x48,0x00,0x00,0x00,0x51,0x00,0x05,0x00,0x4a,0x00,0x00,0x00, -0xb4,0x00,0x00,0x00,0x97,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x41,0x00,0x06,0x00,0x56,0x00,0x00,0x00,0xb5,0x00,0x00,0x00, -0x9b,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0xb2,0x00,0x00,0x00, -0x3e,0x00,0x03,0x00,0xb5,0x00,0x00,0x00,0xb4,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x4a,0x00,0x00,0x00,0xcc,0x00,0x00,0x00, -0x57,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x4a,0x00,0x00,0x00, -0xce,0x00,0x00,0x00,0x5b,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x09,0x00,0x00,0x00,0xd0,0x00,0x00,0x00,0x62,0x00,0x00,0x00, -0xc2,0x00,0x05,0x00,0x09,0x00,0x00,0x00,0xd1,0x00,0x00,0x00, -0xd0,0x00,0x00,0x00,0x17,0x00,0x00,0x00,0xc4,0x00,0x05,0x00, -0x09,0x00,0x00,0x00,0xd2,0x00,0x00,0x00,0xd1,0x00,0x00,0x00, -0x66,0x00,0x00,0x00,0xc7,0x00,0x05,0x00,0x09,0x00,0x00,0x00, -0xd3,0x00,0x00,0x00,0xd2,0x00,0x00,0x00,0x4e,0x00,0x00,0x00, -0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0xd4,0x00,0x00,0x00, -0xd3,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x09,0x00,0x00,0x00, -0xd6,0x00,0x00,0x00,0x62,0x00,0x00,0x00,0xc2,0x00,0x05,0x00, -0x09,0x00,0x00,0x00,0xd8,0x00,0x00,0x00,0xd6,0x00,0x00,0x00, -0x4b,0x04,0x00,0x00,0xc7,0x00,0x05,0x00,0x09,0x00,0x00,0x00, -0xd9,0x00,0x00,0x00,0xd8,0x00,0x00,0x00,0x4e,0x00,0x00,0x00, -0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0xda,0x00,0x00,0x00, -0xd9,0x00,0x00,0x00,0x41,0x00,0x08,0x00,0x79,0x00,0x00,0x00, -0xdc,0x00,0x00,0x00,0x54,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0x40,0x00,0x00,0x00,0x77,0x00,0x00,0x00,0x17,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x4d,0x00,0x00,0x00,0xdd,0x00,0x00,0x00, -0xdc,0x00,0x00,0x00,0x71,0x00,0x04,0x00,0x09,0x00,0x00,0x00, -0xde,0x00,0x00,0x00,0xdd,0x00,0x00,0x00,0x7c,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0xdf,0x00,0x00,0x00,0xde,0x00,0x00,0x00, -0xc7,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xe0,0x00,0x00,0x00, -0xdf,0x00,0x00,0x00,0x82,0x00,0x00,0x00,0xc5,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xe2,0x00,0x00,0x00,0xe0,0x00,0x00,0x00, -0xd4,0x00,0x00,0x00,0x6f,0x00,0x04,0x00,0x4a,0x00,0x00,0x00, -0xe3,0x00,0x00,0x00,0xe2,0x00,0x00,0x00,0xc2,0x00,0x05,0x00, -0x4d,0x00,0x00,0x00,0xe4,0x00,0x00,0x00,0xdd,0x00,0x00,0x00, -0x66,0x00,0x00,0x00,0x71,0x00,0x04,0x00,0x09,0x00,0x00,0x00, -0xe5,0x00,0x00,0x00,0xe4,0x00,0x00,0x00,0x7c,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0xe6,0x00,0x00,0x00,0xe5,0x00,0x00,0x00, -0xc5,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xe8,0x00,0x00,0x00, -0xe6,0x00,0x00,0x00,0xda,0x00,0x00,0x00,0x6f,0x00,0x04,0x00, -0x4a,0x00,0x00,0x00,0xe9,0x00,0x00,0x00,0xe8,0x00,0x00,0x00, -0x50,0x00,0x05,0x00,0x7c,0x00,0x00,0x00,0xea,0x00,0x00,0x00, -0xe3,0x00,0x00,0x00,0xe9,0x00,0x00,0x00,0x8e,0x00,0x05,0x00, -0x7c,0x00,0x00,0x00,0xeb,0x00,0x00,0x00,0xea,0x00,0x00,0x00, -0xcc,0x00,0x00,0x00,0x50,0x00,0x05,0x00,0x7c,0x00,0x00,0x00, -0xec,0x00,0x00,0x00,0xce,0x00,0x00,0x00,0xce,0x00,0x00,0x00, -0x81,0x00,0x05,0x00,0x7c,0x00,0x00,0x00,0xed,0x00,0x00,0x00, -0xeb,0x00,0x00,0x00,0xec,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xf3,0x00,0x00,0x00,0xa2,0x00,0x00,0x00, -0x17,0x00,0x00,0x00,0x51,0x00,0x05,0x00,0x4a,0x00,0x00,0x00, -0xf5,0x00,0x00,0x00,0xed,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x41,0x00,0x06,0x00,0x56,0x00,0x00,0x00,0xf6,0x00,0x00,0x00, -0x9b,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0xf3,0x00,0x00,0x00, -0x3e,0x00,0x03,0x00,0xf6,0x00,0x00,0x00,0xf5,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xfd,0x00,0x00,0x00, -0xa2,0x00,0x00,0x00,0x4c,0x04,0x00,0x00,0x51,0x00,0x05,0x00, -0x4a,0x00,0x00,0x00,0xfe,0x00,0x00,0x00,0xed,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x41,0x00,0x06,0x00,0x56,0x00,0x00,0x00, -0xff,0x00,0x00,0x00,0x9b,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0xfd,0x00,0x00,0x00,0x3e,0x00,0x03,0x00,0xff,0x00,0x00,0x00, -0xfe,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x4a,0x00,0x00,0x00, -0x08,0x01,0x00,0x00,0x57,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x4a,0x00,0x00,0x00,0x0a,0x01,0x00,0x00,0x5b,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x09,0x00,0x00,0x00,0x0c,0x01,0x00,0x00, -0x62,0x00,0x00,0x00,0xc2,0x00,0x05,0x00,0x09,0x00,0x00,0x00, -0x0d,0x01,0x00,0x00,0x0c,0x01,0x00,0x00,0x37,0x00,0x00,0x00, -0xc4,0x00,0x05,0x00,0x09,0x00,0x00,0x00,0x0e,0x01,0x00,0x00, -0x0d,0x01,0x00,0x00,0x66,0x00,0x00,0x00,0xc7,0x00,0x05,0x00, -0x09,0x00,0x00,0x00,0x0f,0x01,0x00,0x00,0x0e,0x01,0x00,0x00, -0x4e,0x00,0x00,0x00,0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x10,0x01,0x00,0x00,0x0f,0x01,0x00,0x00,0x3d,0x00,0x04,0x00, -0x09,0x00,0x00,0x00,0x12,0x01,0x00,0x00,0x62,0x00,0x00,0x00, -0xc2,0x00,0x05,0x00,0x09,0x00,0x00,0x00,0x14,0x01,0x00,0x00, -0x12,0x01,0x00,0x00,0x4d,0x04,0x00,0x00,0xc7,0x00,0x05,0x00, -0x09,0x00,0x00,0x00,0x15,0x01,0x00,0x00,0x14,0x01,0x00,0x00, -0x4e,0x00,0x00,0x00,0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x16,0x01,0x00,0x00,0x15,0x01,0x00,0x00,0x41,0x00,0x08,0x00, -0x79,0x00,0x00,0x00,0x18,0x01,0x00,0x00,0x54,0x00,0x00,0x00, -0x2e,0x00,0x00,0x00,0x40,0x00,0x00,0x00,0x77,0x00,0x00,0x00, -0x37,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x4d,0x00,0x00,0x00, -0x19,0x01,0x00,0x00,0x18,0x01,0x00,0x00,0x71,0x00,0x04,0x00, -0x09,0x00,0x00,0x00,0x1a,0x01,0x00,0x00,0x19,0x01,0x00,0x00, -0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x1b,0x01,0x00,0x00, -0x1a,0x01,0x00,0x00,0xc7,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x1c,0x01,0x00,0x00,0x1b,0x01,0x00,0x00,0x82,0x00,0x00,0x00, -0xc5,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x1e,0x01,0x00,0x00, -0x1c,0x01,0x00,0x00,0x10,0x01,0x00,0x00,0x6f,0x00,0x04,0x00, -0x4a,0x00,0x00,0x00,0x1f,0x01,0x00,0x00,0x1e,0x01,0x00,0x00, -0xc2,0x00,0x05,0x00,0x4d,0x00,0x00,0x00,0x20,0x01,0x00,0x00, -0x19,0x01,0x00,0x00,0x66,0x00,0x00,0x00,0x71,0x00,0x04,0x00, -0x09,0x00,0x00,0x00,0x21,0x01,0x00,0x00,0x20,0x01,0x00,0x00, -0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x22,0x01,0x00,0x00, -0x21,0x01,0x00,0x00,0xc5,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x24,0x01,0x00,0x00,0x22,0x01,0x00,0x00,0x16,0x01,0x00,0x00, -0x6f,0x00,0x04,0x00,0x4a,0x00,0x00,0x00,0x25,0x01,0x00,0x00, -0x24,0x01,0x00,0x00,0x50,0x00,0x05,0x00,0x7c,0x00,0x00,0x00, -0x26,0x01,0x00,0x00,0x1f,0x01,0x00,0x00,0x25,0x01,0x00,0x00, -0x8e,0x00,0x05,0x00,0x7c,0x00,0x00,0x00,0x27,0x01,0x00,0x00, -0x26,0x01,0x00,0x00,0x08,0x01,0x00,0x00,0x50,0x00,0x05,0x00, -0x7c,0x00,0x00,0x00,0x28,0x01,0x00,0x00,0x0a,0x01,0x00,0x00, -0x0a,0x01,0x00,0x00,0x81,0x00,0x05,0x00,0x7c,0x00,0x00,0x00, -0x29,0x01,0x00,0x00,0x27,0x01,0x00,0x00,0x28,0x01,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x2f,0x01,0x00,0x00, -0xa2,0x00,0x00,0x00,0x37,0x00,0x00,0x00,0x51,0x00,0x05,0x00, -0x4a,0x00,0x00,0x00,0x31,0x01,0x00,0x00,0x29,0x01,0x00,0x00, -0x00,0x00,0x00,0x00,0x41,0x00,0x06,0x00,0x56,0x00,0x00,0x00, -0x32,0x01,0x00,0x00,0x9b,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0x2f,0x01,0x00,0x00,0x3e,0x00,0x03,0x00,0x32,0x01,0x00,0x00, -0x31,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x39,0x01,0x00,0x00,0xa2,0x00,0x00,0x00,0x4e,0x04,0x00,0x00, -0x51,0x00,0x05,0x00,0x4a,0x00,0x00,0x00,0x3a,0x01,0x00,0x00, -0x29,0x01,0x00,0x00,0x01,0x00,0x00,0x00,0x41,0x00,0x06,0x00, -0x56,0x00,0x00,0x00,0x3b,0x01,0x00,0x00,0x9b,0x00,0x00,0x00, -0x2e,0x00,0x00,0x00,0x39,0x01,0x00,0x00,0x3e,0x00,0x03,0x00, -0x3b,0x01,0x00,0x00,0x3a,0x01,0x00,0x00,0x3d,0x00,0x04,0x00, -0x4a,0x00,0x00,0x00,0x44,0x01,0x00,0x00,0x57,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x4a,0x00,0x00,0x00,0x46,0x01,0x00,0x00, -0x5b,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x09,0x00,0x00,0x00, -0x48,0x01,0x00,0x00,0x62,0x00,0x00,0x00,0xc2,0x00,0x05,0x00, -0x09,0x00,0x00,0x00,0x49,0x01,0x00,0x00,0x48,0x01,0x00,0x00, -0x77,0x00,0x00,0x00,0xc4,0x00,0x05,0x00,0x09,0x00,0x00,0x00, -0x4a,0x01,0x00,0x00,0x49,0x01,0x00,0x00,0x66,0x00,0x00,0x00, -0xc7,0x00,0x05,0x00,0x09,0x00,0x00,0x00,0x4b,0x01,0x00,0x00, -0x4a,0x01,0x00,0x00,0x4e,0x00,0x00,0x00,0x7c,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x4c,0x01,0x00,0x00,0x4b,0x01,0x00,0x00, -0x3d,0x00,0x04,0x00,0x09,0x00,0x00,0x00,0x4e,0x01,0x00,0x00, -0x62,0x00,0x00,0x00,0xc2,0x00,0x05,0x00,0x09,0x00,0x00,0x00, -0x50,0x01,0x00,0x00,0x4e,0x01,0x00,0x00,0x82,0x00,0x00,0x00, -0xc7,0x00,0x05,0x00,0x09,0x00,0x00,0x00,0x51,0x01,0x00,0x00, -0x50,0x01,0x00,0x00,0x4e,0x00,0x00,0x00,0x7c,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x52,0x01,0x00,0x00,0x51,0x01,0x00,0x00, -0x41,0x00,0x08,0x00,0x79,0x00,0x00,0x00,0x54,0x01,0x00,0x00, -0x54,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0x40,0x00,0x00,0x00, -0x77,0x00,0x00,0x00,0x77,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x4d,0x00,0x00,0x00,0x55,0x01,0x00,0x00,0x54,0x01,0x00,0x00, -0x71,0x00,0x04,0x00,0x09,0x00,0x00,0x00,0x56,0x01,0x00,0x00, -0x55,0x01,0x00,0x00,0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x57,0x01,0x00,0x00,0x56,0x01,0x00,0x00,0xc7,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x58,0x01,0x00,0x00,0x57,0x01,0x00,0x00, -0x82,0x00,0x00,0x00,0xc5,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x5a,0x01,0x00,0x00,0x58,0x01,0x00,0x00,0x4c,0x01,0x00,0x00, -0x6f,0x00,0x04,0x00,0x4a,0x00,0x00,0x00,0x5b,0x01,0x00,0x00, -0x5a,0x01,0x00,0x00,0xc2,0x00,0x05,0x00,0x4d,0x00,0x00,0x00, -0x5c,0x01,0x00,0x00,0x55,0x01,0x00,0x00,0x66,0x00,0x00,0x00, -0x71,0x00,0x04,0x00,0x09,0x00,0x00,0x00,0x5d,0x01,0x00,0x00, -0x5c,0x01,0x00,0x00,0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x5e,0x01,0x00,0x00,0x5d,0x01,0x00,0x00,0xc5,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x60,0x01,0x00,0x00,0x5e,0x01,0x00,0x00, -0x52,0x01,0x00,0x00,0x6f,0x00,0x04,0x00,0x4a,0x00,0x00,0x00, -0x61,0x01,0x00,0x00,0x60,0x01,0x00,0x00,0x50,0x00,0x05,0x00, -0x7c,0x00,0x00,0x00,0x62,0x01,0x00,0x00,0x5b,0x01,0x00,0x00, -0x61,0x01,0x00,0x00,0x8e,0x00,0x05,0x00,0x7c,0x00,0x00,0x00, -0x63,0x01,0x00,0x00,0x62,0x01,0x00,0x00,0x44,0x01,0x00,0x00, -0x50,0x00,0x05,0x00,0x7c,0x00,0x00,0x00,0x64,0x01,0x00,0x00, -0x46,0x01,0x00,0x00,0x46,0x01,0x00,0x00,0x81,0x00,0x05,0x00, -0x7c,0x00,0x00,0x00,0x65,0x01,0x00,0x00,0x63,0x01,0x00,0x00, -0x64,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x6b,0x01,0x00,0x00,0xa2,0x00,0x00,0x00,0x77,0x00,0x00,0x00, -0x51,0x00,0x05,0x00,0x4a,0x00,0x00,0x00,0x6d,0x01,0x00,0x00, -0x65,0x01,0x00,0x00,0x00,0x00,0x00,0x00,0x41,0x00,0x06,0x00, -0x56,0x00,0x00,0x00,0x6e,0x01,0x00,0x00,0x9b,0x00,0x00,0x00, -0x2e,0x00,0x00,0x00,0x6b,0x01,0x00,0x00,0x3e,0x00,0x03,0x00, -0x6e,0x01,0x00,0x00,0x6d,0x01,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x75,0x01,0x00,0x00,0xa2,0x00,0x00,0x00, -0x4f,0x04,0x00,0x00,0x51,0x00,0x05,0x00,0x4a,0x00,0x00,0x00, -0x76,0x01,0x00,0x00,0x65,0x01,0x00,0x00,0x01,0x00,0x00,0x00, -0x41,0x00,0x06,0x00,0x56,0x00,0x00,0x00,0x77,0x01,0x00,0x00, -0x9b,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0x75,0x01,0x00,0x00, -0x3e,0x00,0x03,0x00,0x77,0x01,0x00,0x00,0x76,0x01,0x00,0x00, -0x3d,0x00,0x04,0x00,0x4a,0x00,0x00,0x00,0x80,0x01,0x00,0x00, -0x57,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x4a,0x00,0x00,0x00, -0x82,0x01,0x00,0x00,0x5b,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x09,0x00,0x00,0x00,0x84,0x01,0x00,0x00,0x62,0x00,0x00,0x00, -0xc2,0x00,0x05,0x00,0x09,0x00,0x00,0x00,0x85,0x01,0x00,0x00, -0x84,0x01,0x00,0x00,0x66,0x00,0x00,0x00,0xc4,0x00,0x05,0x00, -0x09,0x00,0x00,0x00,0x86,0x01,0x00,0x00,0x85,0x01,0x00,0x00, -0x66,0x00,0x00,0x00,0xc7,0x00,0x05,0x00,0x09,0x00,0x00,0x00, -0x87,0x01,0x00,0x00,0x86,0x01,0x00,0x00,0x4e,0x00,0x00,0x00, -0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x88,0x01,0x00,0x00, -0x87,0x01,0x00,0x00,0x3d,0x00,0x04,0x00,0x09,0x00,0x00,0x00, -0x8a,0x01,0x00,0x00,0x62,0x00,0x00,0x00,0xc2,0x00,0x05,0x00, -0x09,0x00,0x00,0x00,0x8c,0x01,0x00,0x00,0x8a,0x01,0x00,0x00, -0x48,0x00,0x00,0x00,0xc7,0x00,0x05,0x00,0x09,0x00,0x00,0x00, -0x8d,0x01,0x00,0x00,0x8c,0x01,0x00,0x00,0x4e,0x00,0x00,0x00, -0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x8e,0x01,0x00,0x00, -0x8d,0x01,0x00,0x00,0x41,0x00,0x08,0x00,0x79,0x00,0x00,0x00, -0x90,0x01,0x00,0x00,0x54,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0x40,0x00,0x00,0x00,0x77,0x00,0x00,0x00,0x66,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x4d,0x00,0x00,0x00,0x91,0x01,0x00,0x00, -0x90,0x01,0x00,0x00,0x71,0x00,0x04,0x00,0x09,0x00,0x00,0x00, -0x92,0x01,0x00,0x00,0x91,0x01,0x00,0x00,0x7c,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x93,0x01,0x00,0x00,0x92,0x01,0x00,0x00, -0xc7,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x94,0x01,0x00,0x00, -0x93,0x01,0x00,0x00,0x82,0x00,0x00,0x00,0xc5,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x96,0x01,0x00,0x00,0x94,0x01,0x00,0x00, -0x88,0x01,0x00,0x00,0x6f,0x00,0x04,0x00,0x4a,0x00,0x00,0x00, -0x97,0x01,0x00,0x00,0x96,0x01,0x00,0x00,0xc2,0x00,0x05,0x00, -0x4d,0x00,0x00,0x00,0x98,0x01,0x00,0x00,0x91,0x01,0x00,0x00, -0x66,0x00,0x00,0x00,0x71,0x00,0x04,0x00,0x09,0x00,0x00,0x00, -0x99,0x01,0x00,0x00,0x98,0x01,0x00,0x00,0x7c,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x9a,0x01,0x00,0x00,0x99,0x01,0x00,0x00, -0xc5,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x9c,0x01,0x00,0x00, -0x9a,0x01,0x00,0x00,0x8e,0x01,0x00,0x00,0x6f,0x00,0x04,0x00, -0x4a,0x00,0x00,0x00,0x9d,0x01,0x00,0x00,0x9c,0x01,0x00,0x00, -0x50,0x00,0x05,0x00,0x7c,0x00,0x00,0x00,0x9e,0x01,0x00,0x00, -0x97,0x01,0x00,0x00,0x9d,0x01,0x00,0x00,0x8e,0x00,0x05,0x00, -0x7c,0x00,0x00,0x00,0x9f,0x01,0x00,0x00,0x9e,0x01,0x00,0x00, -0x80,0x01,0x00,0x00,0x50,0x00,0x05,0x00,0x7c,0x00,0x00,0x00, -0xa0,0x01,0x00,0x00,0x82,0x01,0x00,0x00,0x82,0x01,0x00,0x00, -0x81,0x00,0x05,0x00,0x7c,0x00,0x00,0x00,0xa1,0x01,0x00,0x00, -0x9f,0x01,0x00,0x00,0xa0,0x01,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xa7,0x01,0x00,0x00,0xa2,0x00,0x00,0x00, -0x66,0x00,0x00,0x00,0x51,0x00,0x05,0x00,0x4a,0x00,0x00,0x00, -0xa9,0x01,0x00,0x00,0xa1,0x01,0x00,0x00,0x00,0x00,0x00,0x00, -0x41,0x00,0x06,0x00,0x56,0x00,0x00,0x00,0xaa,0x01,0x00,0x00, -0x9b,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0xa7,0x01,0x00,0x00, -0x3e,0x00,0x03,0x00,0xaa,0x01,0x00,0x00,0xa9,0x01,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xb1,0x01,0x00,0x00, -0xa2,0x00,0x00,0x00,0x50,0x04,0x00,0x00,0x51,0x00,0x05,0x00, -0x4a,0x00,0x00,0x00,0xb2,0x01,0x00,0x00,0xa1,0x01,0x00,0x00, -0x01,0x00,0x00,0x00,0x41,0x00,0x06,0x00,0x56,0x00,0x00,0x00, -0xb3,0x01,0x00,0x00,0x9b,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0xb1,0x01,0x00,0x00,0x3e,0x00,0x03,0x00,0xb3,0x01,0x00,0x00, -0xb2,0x01,0x00,0x00,0x3d,0x00,0x04,0x00,0x4a,0x00,0x00,0x00, -0xbc,0x01,0x00,0x00,0x57,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x4a,0x00,0x00,0x00,0xbe,0x01,0x00,0x00,0x5b,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x09,0x00,0x00,0x00,0xc0,0x01,0x00,0x00, -0x62,0x00,0x00,0x00,0xc2,0x00,0x05,0x00,0x09,0x00,0x00,0x00, -0xc1,0x01,0x00,0x00,0xc0,0x01,0x00,0x00,0x51,0x04,0x00,0x00, -0xc4,0x00,0x05,0x00,0x09,0x00,0x00,0x00,0xc2,0x01,0x00,0x00, -0xc1,0x01,0x00,0x00,0x66,0x00,0x00,0x00,0xc7,0x00,0x05,0x00, -0x09,0x00,0x00,0x00,0xc3,0x01,0x00,0x00,0xc2,0x01,0x00,0x00, -0x4e,0x00,0x00,0x00,0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0xc4,0x01,0x00,0x00,0xc3,0x01,0x00,0x00,0x3d,0x00,0x04,0x00, -0x09,0x00,0x00,0x00,0xc6,0x01,0x00,0x00,0x62,0x00,0x00,0x00, -0xc2,0x00,0x05,0x00,0x09,0x00,0x00,0x00,0xc8,0x01,0x00,0x00, -0xc6,0x01,0x00,0x00,0x4c,0x04,0x00,0x00,0xc7,0x00,0x05,0x00, -0x09,0x00,0x00,0x00,0xc9,0x01,0x00,0x00,0xc8,0x01,0x00,0x00, -0x4e,0x00,0x00,0x00,0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0xca,0x01,0x00,0x00,0xc9,0x01,0x00,0x00,0x41,0x00,0x08,0x00, -0x79,0x00,0x00,0x00,0xcc,0x01,0x00,0x00,0x54,0x00,0x00,0x00, -0x2e,0x00,0x00,0x00,0x40,0x00,0x00,0x00,0x77,0x00,0x00,0x00, -0x51,0x04,0x00,0x00,0x3d,0x00,0x04,0x00,0x4d,0x00,0x00,0x00, -0xcd,0x01,0x00,0x00,0xcc,0x01,0x00,0x00,0x71,0x00,0x04,0x00, -0x09,0x00,0x00,0x00,0xce,0x01,0x00,0x00,0xcd,0x01,0x00,0x00, -0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0xcf,0x01,0x00,0x00, -0xce,0x01,0x00,0x00,0xc7,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xd0,0x01,0x00,0x00,0xcf,0x01,0x00,0x00,0x82,0x00,0x00,0x00, -0xc5,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xd2,0x01,0x00,0x00, -0xd0,0x01,0x00,0x00,0xc4,0x01,0x00,0x00,0x6f,0x00,0x04,0x00, -0x4a,0x00,0x00,0x00,0xd3,0x01,0x00,0x00,0xd2,0x01,0x00,0x00, -0xc2,0x00,0x05,0x00,0x4d,0x00,0x00,0x00,0xd4,0x01,0x00,0x00, -0xcd,0x01,0x00,0x00,0x66,0x00,0x00,0x00,0x71,0x00,0x04,0x00, -0x09,0x00,0x00,0x00,0xd5,0x01,0x00,0x00,0xd4,0x01,0x00,0x00, -0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0xd6,0x01,0x00,0x00, -0xd5,0x01,0x00,0x00,0xc5,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xd8,0x01,0x00,0x00,0xd6,0x01,0x00,0x00,0xca,0x01,0x00,0x00, -0x6f,0x00,0x04,0x00,0x4a,0x00,0x00,0x00,0xd9,0x01,0x00,0x00, -0xd8,0x01,0x00,0x00,0x50,0x00,0x05,0x00,0x7c,0x00,0x00,0x00, -0xda,0x01,0x00,0x00,0xd3,0x01,0x00,0x00,0xd9,0x01,0x00,0x00, -0x8e,0x00,0x05,0x00,0x7c,0x00,0x00,0x00,0xdb,0x01,0x00,0x00, -0xda,0x01,0x00,0x00,0xbc,0x01,0x00,0x00,0x50,0x00,0x05,0x00, -0x7c,0x00,0x00,0x00,0xdc,0x01,0x00,0x00,0xbe,0x01,0x00,0x00, -0xbe,0x01,0x00,0x00,0x81,0x00,0x05,0x00,0x7c,0x00,0x00,0x00, -0xdd,0x01,0x00,0x00,0xdb,0x01,0x00,0x00,0xdc,0x01,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xe3,0x01,0x00,0x00, -0xa2,0x00,0x00,0x00,0x51,0x04,0x00,0x00,0x51,0x00,0x05,0x00, -0x4a,0x00,0x00,0x00,0xe5,0x01,0x00,0x00,0xdd,0x01,0x00,0x00, -0x00,0x00,0x00,0x00,0x41,0x00,0x06,0x00,0x56,0x00,0x00,0x00, -0xe6,0x01,0x00,0x00,0x9b,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0xe3,0x01,0x00,0x00,0x3e,0x00,0x03,0x00,0xe6,0x01,0x00,0x00, -0xe5,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xed,0x01,0x00,0x00,0xa2,0x00,0x00,0x00,0x52,0x04,0x00,0x00, -0x51,0x00,0x05,0x00,0x4a,0x00,0x00,0x00,0xee,0x01,0x00,0x00, -0xdd,0x01,0x00,0x00,0x01,0x00,0x00,0x00,0x41,0x00,0x06,0x00, -0x56,0x00,0x00,0x00,0xef,0x01,0x00,0x00,0x9b,0x00,0x00,0x00, -0x2e,0x00,0x00,0x00,0xed,0x01,0x00,0x00,0x3e,0x00,0x03,0x00, -0xef,0x01,0x00,0x00,0xee,0x01,0x00,0x00,0x3d,0x00,0x04,0x00, -0x4a,0x00,0x00,0x00,0xf8,0x01,0x00,0x00,0x57,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x4a,0x00,0x00,0x00,0xfa,0x01,0x00,0x00, -0x5b,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x09,0x00,0x00,0x00, -0xfc,0x01,0x00,0x00,0x62,0x00,0x00,0x00,0xc2,0x00,0x05,0x00, -0x09,0x00,0x00,0x00,0xfd,0x01,0x00,0x00,0xfc,0x01,0x00,0x00, -0x53,0x04,0x00,0x00,0xc4,0x00,0x05,0x00,0x09,0x00,0x00,0x00, -0xfe,0x01,0x00,0x00,0xfd,0x01,0x00,0x00,0x66,0x00,0x00,0x00, -0xc7,0x00,0x05,0x00,0x09,0x00,0x00,0x00,0xff,0x01,0x00,0x00, -0xfe,0x01,0x00,0x00,0x4e,0x00,0x00,0x00,0x7c,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x00,0x02,0x00,0x00,0xff,0x01,0x00,0x00, -0x3d,0x00,0x04,0x00,0x09,0x00,0x00,0x00,0x02,0x02,0x00,0x00, -0x62,0x00,0x00,0x00,0xc2,0x00,0x05,0x00,0x09,0x00,0x00,0x00, -0x04,0x02,0x00,0x00,0x02,0x02,0x00,0x00,0x4e,0x04,0x00,0x00, -0xc7,0x00,0x05,0x00,0x09,0x00,0x00,0x00,0x05,0x02,0x00,0x00, -0x04,0x02,0x00,0x00,0x4e,0x00,0x00,0x00,0x7c,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x06,0x02,0x00,0x00,0x05,0x02,0x00,0x00, -0x41,0x00,0x08,0x00,0x79,0x00,0x00,0x00,0x08,0x02,0x00,0x00, -0x54,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0x40,0x00,0x00,0x00, -0x77,0x00,0x00,0x00,0x53,0x04,0x00,0x00,0x3d,0x00,0x04,0x00, -0x4d,0x00,0x00,0x00,0x09,0x02,0x00,0x00,0x08,0x02,0x00,0x00, -0x71,0x00,0x04,0x00,0x09,0x00,0x00,0x00,0x0a,0x02,0x00,0x00, -0x09,0x02,0x00,0x00,0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x0b,0x02,0x00,0x00,0x0a,0x02,0x00,0x00,0xc7,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x0c,0x02,0x00,0x00,0x0b,0x02,0x00,0x00, -0x82,0x00,0x00,0x00,0xc5,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x0e,0x02,0x00,0x00,0x0c,0x02,0x00,0x00,0x00,0x02,0x00,0x00, -0x6f,0x00,0x04,0x00,0x4a,0x00,0x00,0x00,0x0f,0x02,0x00,0x00, -0x0e,0x02,0x00,0x00,0xc2,0x00,0x05,0x00,0x4d,0x00,0x00,0x00, -0x10,0x02,0x00,0x00,0x09,0x02,0x00,0x00,0x66,0x00,0x00,0x00, -0x71,0x00,0x04,0x00,0x09,0x00,0x00,0x00,0x11,0x02,0x00,0x00, -0x10,0x02,0x00,0x00,0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x12,0x02,0x00,0x00,0x11,0x02,0x00,0x00,0xc5,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x14,0x02,0x00,0x00,0x12,0x02,0x00,0x00, -0x06,0x02,0x00,0x00,0x6f,0x00,0x04,0x00,0x4a,0x00,0x00,0x00, -0x15,0x02,0x00,0x00,0x14,0x02,0x00,0x00,0x50,0x00,0x05,0x00, -0x7c,0x00,0x00,0x00,0x16,0x02,0x00,0x00,0x0f,0x02,0x00,0x00, -0x15,0x02,0x00,0x00,0x8e,0x00,0x05,0x00,0x7c,0x00,0x00,0x00, -0x17,0x02,0x00,0x00,0x16,0x02,0x00,0x00,0xf8,0x01,0x00,0x00, -0x50,0x00,0x05,0x00,0x7c,0x00,0x00,0x00,0x18,0x02,0x00,0x00, -0xfa,0x01,0x00,0x00,0xfa,0x01,0x00,0x00,0x81,0x00,0x05,0x00, -0x7c,0x00,0x00,0x00,0x19,0x02,0x00,0x00,0x17,0x02,0x00,0x00, -0x18,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x1f,0x02,0x00,0x00,0xa2,0x00,0x00,0x00,0x53,0x04,0x00,0x00, -0x51,0x00,0x05,0x00,0x4a,0x00,0x00,0x00,0x21,0x02,0x00,0x00, -0x19,0x02,0x00,0x00,0x00,0x00,0x00,0x00,0x41,0x00,0x06,0x00, -0x56,0x00,0x00,0x00,0x22,0x02,0x00,0x00,0x9b,0x00,0x00,0x00, -0x2e,0x00,0x00,0x00,0x1f,0x02,0x00,0x00,0x3e,0x00,0x03,0x00, -0x22,0x02,0x00,0x00,0x21,0x02,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x29,0x02,0x00,0x00,0xa2,0x00,0x00,0x00, -0x54,0x04,0x00,0x00,0x51,0x00,0x05,0x00,0x4a,0x00,0x00,0x00, -0x2a,0x02,0x00,0x00,0x19,0x02,0x00,0x00,0x01,0x00,0x00,0x00, -0x41,0x00,0x06,0x00,0x56,0x00,0x00,0x00,0x2b,0x02,0x00,0x00, -0x9b,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0x29,0x02,0x00,0x00, -0x3e,0x00,0x03,0x00,0x2b,0x02,0x00,0x00,0x2a,0x02,0x00,0x00, -0x3d,0x00,0x04,0x00,0x4a,0x00,0x00,0x00,0x34,0x02,0x00,0x00, -0x57,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x4a,0x00,0x00,0x00, -0x36,0x02,0x00,0x00,0x5b,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x09,0x00,0x00,0x00,0x38,0x02,0x00,0x00,0x62,0x00,0x00,0x00, -0xc2,0x00,0x05,0x00,0x09,0x00,0x00,0x00,0x39,0x02,0x00,0x00, -0x38,0x02,0x00,0x00,0x55,0x04,0x00,0x00,0xc4,0x00,0x05,0x00, -0x09,0x00,0x00,0x00,0x3a,0x02,0x00,0x00,0x39,0x02,0x00,0x00, -0x66,0x00,0x00,0x00,0xc7,0x00,0x05,0x00,0x09,0x00,0x00,0x00, -0x3b,0x02,0x00,0x00,0x3a,0x02,0x00,0x00,0x4e,0x00,0x00,0x00, -0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x3c,0x02,0x00,0x00, -0x3b,0x02,0x00,0x00,0x3d,0x00,0x04,0x00,0x09,0x00,0x00,0x00, -0x3e,0x02,0x00,0x00,0x62,0x00,0x00,0x00,0xc2,0x00,0x05,0x00, -0x09,0x00,0x00,0x00,0x40,0x02,0x00,0x00,0x3e,0x02,0x00,0x00, -0x4f,0x04,0x00,0x00,0xc7,0x00,0x05,0x00,0x09,0x00,0x00,0x00, -0x41,0x02,0x00,0x00,0x40,0x02,0x00,0x00,0x4e,0x00,0x00,0x00, -0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x42,0x02,0x00,0x00, -0x41,0x02,0x00,0x00,0x41,0x00,0x08,0x00,0x79,0x00,0x00,0x00, -0x44,0x02,0x00,0x00,0x54,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0x40,0x00,0x00,0x00,0x77,0x00,0x00,0x00,0x55,0x04,0x00,0x00, -0x3d,0x00,0x04,0x00,0x4d,0x00,0x00,0x00,0x45,0x02,0x00,0x00, -0x44,0x02,0x00,0x00,0x71,0x00,0x04,0x00,0x09,0x00,0x00,0x00, -0x46,0x02,0x00,0x00,0x45,0x02,0x00,0x00,0x7c,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x47,0x02,0x00,0x00,0x46,0x02,0x00,0x00, -0xc7,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x48,0x02,0x00,0x00, -0x47,0x02,0x00,0x00,0x82,0x00,0x00,0x00,0xc5,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x4a,0x02,0x00,0x00,0x48,0x02,0x00,0x00, -0x3c,0x02,0x00,0x00,0x6f,0x00,0x04,0x00,0x4a,0x00,0x00,0x00, -0x4b,0x02,0x00,0x00,0x4a,0x02,0x00,0x00,0xc2,0x00,0x05,0x00, -0x4d,0x00,0x00,0x00,0x4c,0x02,0x00,0x00,0x45,0x02,0x00,0x00, -0x66,0x00,0x00,0x00,0x71,0x00,0x04,0x00,0x09,0x00,0x00,0x00, -0x4d,0x02,0x00,0x00,0x4c,0x02,0x00,0x00,0x7c,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x4e,0x02,0x00,0x00,0x4d,0x02,0x00,0x00, -0xc5,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x50,0x02,0x00,0x00, -0x4e,0x02,0x00,0x00,0x42,0x02,0x00,0x00,0x6f,0x00,0x04,0x00, -0x4a,0x00,0x00,0x00,0x51,0x02,0x00,0x00,0x50,0x02,0x00,0x00, -0x50,0x00,0x05,0x00,0x7c,0x00,0x00,0x00,0x52,0x02,0x00,0x00, -0x4b,0x02,0x00,0x00,0x51,0x02,0x00,0x00,0x8e,0x00,0x05,0x00, -0x7c,0x00,0x00,0x00,0x53,0x02,0x00,0x00,0x52,0x02,0x00,0x00, -0x34,0x02,0x00,0x00,0x50,0x00,0x05,0x00,0x7c,0x00,0x00,0x00, -0x54,0x02,0x00,0x00,0x36,0x02,0x00,0x00,0x36,0x02,0x00,0x00, -0x81,0x00,0x05,0x00,0x7c,0x00,0x00,0x00,0x55,0x02,0x00,0x00, -0x53,0x02,0x00,0x00,0x54,0x02,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x5b,0x02,0x00,0x00,0xa2,0x00,0x00,0x00, -0x55,0x04,0x00,0x00,0x51,0x00,0x05,0x00,0x4a,0x00,0x00,0x00, -0x5d,0x02,0x00,0x00,0x55,0x02,0x00,0x00,0x00,0x00,0x00,0x00, -0x41,0x00,0x06,0x00,0x56,0x00,0x00,0x00,0x5e,0x02,0x00,0x00, -0x9b,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0x5b,0x02,0x00,0x00, -0x3e,0x00,0x03,0x00,0x5e,0x02,0x00,0x00,0x5d,0x02,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x65,0x02,0x00,0x00, -0xa2,0x00,0x00,0x00,0x56,0x04,0x00,0x00,0x51,0x00,0x05,0x00, -0x4a,0x00,0x00,0x00,0x66,0x02,0x00,0x00,0x55,0x02,0x00,0x00, -0x01,0x00,0x00,0x00,0x41,0x00,0x06,0x00,0x56,0x00,0x00,0x00, -0x67,0x02,0x00,0x00,0x9b,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0x65,0x02,0x00,0x00,0x3e,0x00,0x03,0x00,0x67,0x02,0x00,0x00, -0x66,0x02,0x00,0x00,0x3d,0x00,0x04,0x00,0x4a,0x00,0x00,0x00, -0x70,0x02,0x00,0x00,0x57,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x4a,0x00,0x00,0x00,0x72,0x02,0x00,0x00,0x5b,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x09,0x00,0x00,0x00,0x74,0x02,0x00,0x00, -0x62,0x00,0x00,0x00,0xc2,0x00,0x05,0x00,0x09,0x00,0x00,0x00, -0x75,0x02,0x00,0x00,0x74,0x02,0x00,0x00,0x57,0x04,0x00,0x00, -0xc4,0x00,0x05,0x00,0x09,0x00,0x00,0x00,0x76,0x02,0x00,0x00, -0x75,0x02,0x00,0x00,0x66,0x00,0x00,0x00,0xc7,0x00,0x05,0x00, -0x09,0x00,0x00,0x00,0x77,0x02,0x00,0x00,0x76,0x02,0x00,0x00, -0x4e,0x00,0x00,0x00,0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x78,0x02,0x00,0x00,0x77,0x02,0x00,0x00,0x3d,0x00,0x04,0x00, -0x09,0x00,0x00,0x00,0x7a,0x02,0x00,0x00,0x62,0x00,0x00,0x00, -0xc2,0x00,0x05,0x00,0x09,0x00,0x00,0x00,0x7c,0x02,0x00,0x00, -0x7a,0x02,0x00,0x00,0x50,0x04,0x00,0x00,0xc7,0x00,0x05,0x00, -0x09,0x00,0x00,0x00,0x7d,0x02,0x00,0x00,0x7c,0x02,0x00,0x00, -0x4e,0x00,0x00,0x00,0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x7e,0x02,0x00,0x00,0x7d,0x02,0x00,0x00,0x41,0x00,0x08,0x00, -0x79,0x00,0x00,0x00,0x80,0x02,0x00,0x00,0x54,0x00,0x00,0x00, -0x2e,0x00,0x00,0x00,0x40,0x00,0x00,0x00,0x77,0x00,0x00,0x00, -0x57,0x04,0x00,0x00,0x3d,0x00,0x04,0x00,0x4d,0x00,0x00,0x00, -0x81,0x02,0x00,0x00,0x80,0x02,0x00,0x00,0x71,0x00,0x04,0x00, -0x09,0x00,0x00,0x00,0x82,0x02,0x00,0x00,0x81,0x02,0x00,0x00, -0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x83,0x02,0x00,0x00, -0x82,0x02,0x00,0x00,0xc7,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x84,0x02,0x00,0x00,0x83,0x02,0x00,0x00,0x82,0x00,0x00,0x00, -0xc5,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x86,0x02,0x00,0x00, -0x84,0x02,0x00,0x00,0x78,0x02,0x00,0x00,0x6f,0x00,0x04,0x00, -0x4a,0x00,0x00,0x00,0x87,0x02,0x00,0x00,0x86,0x02,0x00,0x00, -0xc2,0x00,0x05,0x00,0x4d,0x00,0x00,0x00,0x88,0x02,0x00,0x00, -0x81,0x02,0x00,0x00,0x66,0x00,0x00,0x00,0x71,0x00,0x04,0x00, -0x09,0x00,0x00,0x00,0x89,0x02,0x00,0x00,0x88,0x02,0x00,0x00, -0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x8a,0x02,0x00,0x00, -0x89,0x02,0x00,0x00,0xc5,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x8c,0x02,0x00,0x00,0x8a,0x02,0x00,0x00,0x7e,0x02,0x00,0x00, -0x6f,0x00,0x04,0x00,0x4a,0x00,0x00,0x00,0x8d,0x02,0x00,0x00, -0x8c,0x02,0x00,0x00,0x50,0x00,0x05,0x00,0x7c,0x00,0x00,0x00, -0x8e,0x02,0x00,0x00,0x87,0x02,0x00,0x00,0x8d,0x02,0x00,0x00, -0x8e,0x00,0x05,0x00,0x7c,0x00,0x00,0x00,0x8f,0x02,0x00,0x00, -0x8e,0x02,0x00,0x00,0x70,0x02,0x00,0x00,0x50,0x00,0x05,0x00, -0x7c,0x00,0x00,0x00,0x90,0x02,0x00,0x00,0x72,0x02,0x00,0x00, -0x72,0x02,0x00,0x00,0x81,0x00,0x05,0x00,0x7c,0x00,0x00,0x00, -0x91,0x02,0x00,0x00,0x8f,0x02,0x00,0x00,0x90,0x02,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x97,0x02,0x00,0x00, -0xa2,0x00,0x00,0x00,0x57,0x04,0x00,0x00,0x51,0x00,0x05,0x00, -0x4a,0x00,0x00,0x00,0x99,0x02,0x00,0x00,0x91,0x02,0x00,0x00, -0x00,0x00,0x00,0x00,0x41,0x00,0x06,0x00,0x56,0x00,0x00,0x00, -0x9a,0x02,0x00,0x00,0x9b,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0x97,0x02,0x00,0x00,0x3e,0x00,0x03,0x00,0x9a,0x02,0x00,0x00, -0x99,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xa1,0x02,0x00,0x00,0xa2,0x00,0x00,0x00,0x58,0x04,0x00,0x00, -0x51,0x00,0x05,0x00,0x4a,0x00,0x00,0x00,0xa2,0x02,0x00,0x00, -0x91,0x02,0x00,0x00,0x01,0x00,0x00,0x00,0x41,0x00,0x06,0x00, -0x56,0x00,0x00,0x00,0xa3,0x02,0x00,0x00,0x9b,0x00,0x00,0x00, -0x2e,0x00,0x00,0x00,0xa1,0x02,0x00,0x00,0x3e,0x00,0x03,0x00, -0xa3,0x02,0x00,0x00,0xa2,0x02,0x00,0x00,0x3d,0x00,0x04,0x00, -0x4a,0x00,0x00,0x00,0xac,0x02,0x00,0x00,0x57,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x4a,0x00,0x00,0x00,0xae,0x02,0x00,0x00, -0x5b,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x09,0x00,0x00,0x00, -0xb0,0x02,0x00,0x00,0x62,0x00,0x00,0x00,0xc2,0x00,0x05,0x00, -0x09,0x00,0x00,0x00,0xb1,0x02,0x00,0x00,0xb0,0x02,0x00,0x00, -0x59,0x04,0x00,0x00,0xc4,0x00,0x05,0x00,0x09,0x00,0x00,0x00, -0xb2,0x02,0x00,0x00,0xb1,0x02,0x00,0x00,0x66,0x00,0x00,0x00, -0xc7,0x00,0x05,0x00,0x09,0x00,0x00,0x00,0xb3,0x02,0x00,0x00, -0xb2,0x02,0x00,0x00,0x4e,0x00,0x00,0x00,0x7c,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0xb4,0x02,0x00,0x00,0xb3,0x02,0x00,0x00, -0x3d,0x00,0x04,0x00,0x09,0x00,0x00,0x00,0xb6,0x02,0x00,0x00, -0x62,0x00,0x00,0x00,0xc2,0x00,0x05,0x00,0x09,0x00,0x00,0x00, -0xb8,0x02,0x00,0x00,0xb6,0x02,0x00,0x00,0x52,0x04,0x00,0x00, -0xc7,0x00,0x05,0x00,0x09,0x00,0x00,0x00,0xb9,0x02,0x00,0x00, -0xb8,0x02,0x00,0x00,0x4e,0x00,0x00,0x00,0x7c,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0xba,0x02,0x00,0x00,0xb9,0x02,0x00,0x00, -0x41,0x00,0x08,0x00,0x79,0x00,0x00,0x00,0xbc,0x02,0x00,0x00, -0x54,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0x40,0x00,0x00,0x00, -0x77,0x00,0x00,0x00,0x59,0x04,0x00,0x00,0x3d,0x00,0x04,0x00, -0x4d,0x00,0x00,0x00,0xbd,0x02,0x00,0x00,0xbc,0x02,0x00,0x00, -0x71,0x00,0x04,0x00,0x09,0x00,0x00,0x00,0xbe,0x02,0x00,0x00, -0xbd,0x02,0x00,0x00,0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0xbf,0x02,0x00,0x00,0xbe,0x02,0x00,0x00,0xc7,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xc0,0x02,0x00,0x00,0xbf,0x02,0x00,0x00, -0x82,0x00,0x00,0x00,0xc5,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xc2,0x02,0x00,0x00,0xc0,0x02,0x00,0x00,0xb4,0x02,0x00,0x00, -0x6f,0x00,0x04,0x00,0x4a,0x00,0x00,0x00,0xc3,0x02,0x00,0x00, -0xc2,0x02,0x00,0x00,0xc2,0x00,0x05,0x00,0x4d,0x00,0x00,0x00, -0xc4,0x02,0x00,0x00,0xbd,0x02,0x00,0x00,0x66,0x00,0x00,0x00, -0x71,0x00,0x04,0x00,0x09,0x00,0x00,0x00,0xc5,0x02,0x00,0x00, -0xc4,0x02,0x00,0x00,0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0xc6,0x02,0x00,0x00,0xc5,0x02,0x00,0x00,0xc5,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xc8,0x02,0x00,0x00,0xc6,0x02,0x00,0x00, -0xba,0x02,0x00,0x00,0x6f,0x00,0x04,0x00,0x4a,0x00,0x00,0x00, -0xc9,0x02,0x00,0x00,0xc8,0x02,0x00,0x00,0x50,0x00,0x05,0x00, -0x7c,0x00,0x00,0x00,0xca,0x02,0x00,0x00,0xc3,0x02,0x00,0x00, -0xc9,0x02,0x00,0x00,0x8e,0x00,0x05,0x00,0x7c,0x00,0x00,0x00, -0xcb,0x02,0x00,0x00,0xca,0x02,0x00,0x00,0xac,0x02,0x00,0x00, -0x50,0x00,0x05,0x00,0x7c,0x00,0x00,0x00,0xcc,0x02,0x00,0x00, -0xae,0x02,0x00,0x00,0xae,0x02,0x00,0x00,0x81,0x00,0x05,0x00, -0x7c,0x00,0x00,0x00,0xcd,0x02,0x00,0x00,0xcb,0x02,0x00,0x00, -0xcc,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xd3,0x02,0x00,0x00,0xa2,0x00,0x00,0x00,0x59,0x04,0x00,0x00, -0x51,0x00,0x05,0x00,0x4a,0x00,0x00,0x00,0xd5,0x02,0x00,0x00, -0xcd,0x02,0x00,0x00,0x00,0x00,0x00,0x00,0x41,0x00,0x06,0x00, -0x56,0x00,0x00,0x00,0xd6,0x02,0x00,0x00,0x9b,0x00,0x00,0x00, -0x2e,0x00,0x00,0x00,0xd3,0x02,0x00,0x00,0x3e,0x00,0x03,0x00, -0xd6,0x02,0x00,0x00,0xd5,0x02,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xdd,0x02,0x00,0x00,0xa2,0x00,0x00,0x00, -0x5a,0x04,0x00,0x00,0x51,0x00,0x05,0x00,0x4a,0x00,0x00,0x00, -0xde,0x02,0x00,0x00,0xcd,0x02,0x00,0x00,0x01,0x00,0x00,0x00, -0x41,0x00,0x06,0x00,0x56,0x00,0x00,0x00,0xdf,0x02,0x00,0x00, -0x9b,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0xdd,0x02,0x00,0x00, -0x3e,0x00,0x03,0x00,0xdf,0x02,0x00,0x00,0xde,0x02,0x00,0x00, -0x3d,0x00,0x04,0x00,0x4a,0x00,0x00,0x00,0xe8,0x02,0x00,0x00, -0x57,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x4a,0x00,0x00,0x00, -0xea,0x02,0x00,0x00,0x5b,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x09,0x00,0x00,0x00,0xec,0x02,0x00,0x00,0x62,0x00,0x00,0x00, -0xc2,0x00,0x05,0x00,0x09,0x00,0x00,0x00,0xed,0x02,0x00,0x00, -0xec,0x02,0x00,0x00,0x5b,0x04,0x00,0x00,0xc4,0x00,0x05,0x00, -0x09,0x00,0x00,0x00,0xee,0x02,0x00,0x00,0xed,0x02,0x00,0x00, -0x66,0x00,0x00,0x00,0xc7,0x00,0x05,0x00,0x09,0x00,0x00,0x00, -0xef,0x02,0x00,0x00,0xee,0x02,0x00,0x00,0x4e,0x00,0x00,0x00, -0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0xf0,0x02,0x00,0x00, -0xef,0x02,0x00,0x00,0x3d,0x00,0x04,0x00,0x09,0x00,0x00,0x00, -0xf2,0x02,0x00,0x00,0x62,0x00,0x00,0x00,0xc2,0x00,0x05,0x00, -0x09,0x00,0x00,0x00,0xf4,0x02,0x00,0x00,0xf2,0x02,0x00,0x00, -0x54,0x04,0x00,0x00,0xc7,0x00,0x05,0x00,0x09,0x00,0x00,0x00, -0xf5,0x02,0x00,0x00,0xf4,0x02,0x00,0x00,0x4e,0x00,0x00,0x00, -0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0xf6,0x02,0x00,0x00, -0xf5,0x02,0x00,0x00,0x41,0x00,0x08,0x00,0x79,0x00,0x00,0x00, -0xf8,0x02,0x00,0x00,0x54,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0x40,0x00,0x00,0x00,0x77,0x00,0x00,0x00,0x5b,0x04,0x00,0x00, -0x3d,0x00,0x04,0x00,0x4d,0x00,0x00,0x00,0xf9,0x02,0x00,0x00, -0xf8,0x02,0x00,0x00,0x71,0x00,0x04,0x00,0x09,0x00,0x00,0x00, -0xfa,0x02,0x00,0x00,0xf9,0x02,0x00,0x00,0x7c,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0xfb,0x02,0x00,0x00,0xfa,0x02,0x00,0x00, -0xc7,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xfc,0x02,0x00,0x00, -0xfb,0x02,0x00,0x00,0x82,0x00,0x00,0x00,0xc5,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xfe,0x02,0x00,0x00,0xfc,0x02,0x00,0x00, -0xf0,0x02,0x00,0x00,0x6f,0x00,0x04,0x00,0x4a,0x00,0x00,0x00, -0xff,0x02,0x00,0x00,0xfe,0x02,0x00,0x00,0xc2,0x00,0x05,0x00, -0x4d,0x00,0x00,0x00,0x00,0x03,0x00,0x00,0xf9,0x02,0x00,0x00, -0x66,0x00,0x00,0x00,0x71,0x00,0x04,0x00,0x09,0x00,0x00,0x00, -0x01,0x03,0x00,0x00,0x00,0x03,0x00,0x00,0x7c,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x02,0x03,0x00,0x00,0x01,0x03,0x00,0x00, -0xc5,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x04,0x03,0x00,0x00, -0x02,0x03,0x00,0x00,0xf6,0x02,0x00,0x00,0x6f,0x00,0x04,0x00, -0x4a,0x00,0x00,0x00,0x05,0x03,0x00,0x00,0x04,0x03,0x00,0x00, -0x50,0x00,0x05,0x00,0x7c,0x00,0x00,0x00,0x06,0x03,0x00,0x00, -0xff,0x02,0x00,0x00,0x05,0x03,0x00,0x00,0x8e,0x00,0x05,0x00, -0x7c,0x00,0x00,0x00,0x07,0x03,0x00,0x00,0x06,0x03,0x00,0x00, -0xe8,0x02,0x00,0x00,0x50,0x00,0x05,0x00,0x7c,0x00,0x00,0x00, -0x08,0x03,0x00,0x00,0xea,0x02,0x00,0x00,0xea,0x02,0x00,0x00, -0x81,0x00,0x05,0x00,0x7c,0x00,0x00,0x00,0x09,0x03,0x00,0x00, -0x07,0x03,0x00,0x00,0x08,0x03,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x0f,0x03,0x00,0x00,0xa2,0x00,0x00,0x00, -0x5b,0x04,0x00,0x00,0x51,0x00,0x05,0x00,0x4a,0x00,0x00,0x00, -0x11,0x03,0x00,0x00,0x09,0x03,0x00,0x00,0x00,0x00,0x00,0x00, -0x41,0x00,0x06,0x00,0x56,0x00,0x00,0x00,0x12,0x03,0x00,0x00, -0x9b,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0x0f,0x03,0x00,0x00, -0x3e,0x00,0x03,0x00,0x12,0x03,0x00,0x00,0x11,0x03,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x19,0x03,0x00,0x00, -0xa2,0x00,0x00,0x00,0x5c,0x04,0x00,0x00,0x51,0x00,0x05,0x00, -0x4a,0x00,0x00,0x00,0x1a,0x03,0x00,0x00,0x09,0x03,0x00,0x00, -0x01,0x00,0x00,0x00,0x41,0x00,0x06,0x00,0x56,0x00,0x00,0x00, -0x1b,0x03,0x00,0x00,0x9b,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0x19,0x03,0x00,0x00,0x3e,0x00,0x03,0x00,0x1b,0x03,0x00,0x00, -0x1a,0x03,0x00,0x00,0x3d,0x00,0x04,0x00,0x4a,0x00,0x00,0x00, -0x24,0x03,0x00,0x00,0x57,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x4a,0x00,0x00,0x00,0x26,0x03,0x00,0x00,0x5b,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x09,0x00,0x00,0x00,0x28,0x03,0x00,0x00, -0x62,0x00,0x00,0x00,0xc2,0x00,0x05,0x00,0x09,0x00,0x00,0x00, -0x29,0x03,0x00,0x00,0x28,0x03,0x00,0x00,0x5d,0x04,0x00,0x00, -0xc4,0x00,0x05,0x00,0x09,0x00,0x00,0x00,0x2a,0x03,0x00,0x00, -0x29,0x03,0x00,0x00,0x66,0x00,0x00,0x00,0xc7,0x00,0x05,0x00, -0x09,0x00,0x00,0x00,0x2b,0x03,0x00,0x00,0x2a,0x03,0x00,0x00, -0x4e,0x00,0x00,0x00,0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x2c,0x03,0x00,0x00,0x2b,0x03,0x00,0x00,0x3d,0x00,0x04,0x00, -0x09,0x00,0x00,0x00,0x2e,0x03,0x00,0x00,0x62,0x00,0x00,0x00, -0xc2,0x00,0x05,0x00,0x09,0x00,0x00,0x00,0x30,0x03,0x00,0x00, -0x2e,0x03,0x00,0x00,0x56,0x04,0x00,0x00,0xc7,0x00,0x05,0x00, -0x09,0x00,0x00,0x00,0x31,0x03,0x00,0x00,0x30,0x03,0x00,0x00, -0x4e,0x00,0x00,0x00,0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x32,0x03,0x00,0x00,0x31,0x03,0x00,0x00,0x41,0x00,0x08,0x00, -0x79,0x00,0x00,0x00,0x34,0x03,0x00,0x00,0x54,0x00,0x00,0x00, -0x2e,0x00,0x00,0x00,0x40,0x00,0x00,0x00,0x77,0x00,0x00,0x00, -0x5d,0x04,0x00,0x00,0x3d,0x00,0x04,0x00,0x4d,0x00,0x00,0x00, -0x35,0x03,0x00,0x00,0x34,0x03,0x00,0x00,0x71,0x00,0x04,0x00, -0x09,0x00,0x00,0x00,0x36,0x03,0x00,0x00,0x35,0x03,0x00,0x00, -0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x37,0x03,0x00,0x00, -0x36,0x03,0x00,0x00,0xc7,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x38,0x03,0x00,0x00,0x37,0x03,0x00,0x00,0x82,0x00,0x00,0x00, -0xc5,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x3a,0x03,0x00,0x00, -0x38,0x03,0x00,0x00,0x2c,0x03,0x00,0x00,0x6f,0x00,0x04,0x00, -0x4a,0x00,0x00,0x00,0x3b,0x03,0x00,0x00,0x3a,0x03,0x00,0x00, -0xc2,0x00,0x05,0x00,0x4d,0x00,0x00,0x00,0x3c,0x03,0x00,0x00, -0x35,0x03,0x00,0x00,0x66,0x00,0x00,0x00,0x71,0x00,0x04,0x00, -0x09,0x00,0x00,0x00,0x3d,0x03,0x00,0x00,0x3c,0x03,0x00,0x00, -0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x3e,0x03,0x00,0x00, -0x3d,0x03,0x00,0x00,0xc5,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x40,0x03,0x00,0x00,0x3e,0x03,0x00,0x00,0x32,0x03,0x00,0x00, -0x6f,0x00,0x04,0x00,0x4a,0x00,0x00,0x00,0x41,0x03,0x00,0x00, -0x40,0x03,0x00,0x00,0x50,0x00,0x05,0x00,0x7c,0x00,0x00,0x00, -0x42,0x03,0x00,0x00,0x3b,0x03,0x00,0x00,0x41,0x03,0x00,0x00, -0x8e,0x00,0x05,0x00,0x7c,0x00,0x00,0x00,0x43,0x03,0x00,0x00, -0x42,0x03,0x00,0x00,0x24,0x03,0x00,0x00,0x50,0x00,0x05,0x00, -0x7c,0x00,0x00,0x00,0x44,0x03,0x00,0x00,0x26,0x03,0x00,0x00, -0x26,0x03,0x00,0x00,0x81,0x00,0x05,0x00,0x7c,0x00,0x00,0x00, -0x45,0x03,0x00,0x00,0x43,0x03,0x00,0x00,0x44,0x03,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x4b,0x03,0x00,0x00, -0xa2,0x00,0x00,0x00,0x5d,0x04,0x00,0x00,0x51,0x00,0x05,0x00, -0x4a,0x00,0x00,0x00,0x4d,0x03,0x00,0x00,0x45,0x03,0x00,0x00, -0x00,0x00,0x00,0x00,0x41,0x00,0x06,0x00,0x56,0x00,0x00,0x00, -0x4e,0x03,0x00,0x00,0x9b,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0x4b,0x03,0x00,0x00,0x3e,0x00,0x03,0x00,0x4e,0x03,0x00,0x00, -0x4d,0x03,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x55,0x03,0x00,0x00,0xa2,0x00,0x00,0x00,0x5e,0x04,0x00,0x00, -0x51,0x00,0x05,0x00,0x4a,0x00,0x00,0x00,0x56,0x03,0x00,0x00, -0x45,0x03,0x00,0x00,0x01,0x00,0x00,0x00,0x41,0x00,0x06,0x00, -0x56,0x00,0x00,0x00,0x57,0x03,0x00,0x00,0x9b,0x00,0x00,0x00, -0x2e,0x00,0x00,0x00,0x55,0x03,0x00,0x00,0x3e,0x00,0x03,0x00, -0x57,0x03,0x00,0x00,0x56,0x03,0x00,0x00,0x3d,0x00,0x04,0x00, -0x4a,0x00,0x00,0x00,0x60,0x03,0x00,0x00,0x57,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x4a,0x00,0x00,0x00,0x62,0x03,0x00,0x00, -0x5b,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x09,0x00,0x00,0x00, -0x64,0x03,0x00,0x00,0x62,0x00,0x00,0x00,0xc2,0x00,0x05,0x00, -0x09,0x00,0x00,0x00,0x65,0x03,0x00,0x00,0x64,0x03,0x00,0x00, -0x6e,0x00,0x00,0x00,0xc4,0x00,0x05,0x00,0x09,0x00,0x00,0x00, -0x66,0x03,0x00,0x00,0x65,0x03,0x00,0x00,0x66,0x00,0x00,0x00, -0xc7,0x00,0x05,0x00,0x09,0x00,0x00,0x00,0x67,0x03,0x00,0x00, -0x66,0x03,0x00,0x00,0x4e,0x00,0x00,0x00,0x7c,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x68,0x03,0x00,0x00,0x67,0x03,0x00,0x00, -0x3d,0x00,0x04,0x00,0x09,0x00,0x00,0x00,0x6a,0x03,0x00,0x00, -0x62,0x00,0x00,0x00,0xc2,0x00,0x05,0x00,0x09,0x00,0x00,0x00, -0x6c,0x03,0x00,0x00,0x6a,0x03,0x00,0x00,0x58,0x04,0x00,0x00, -0xc7,0x00,0x05,0x00,0x09,0x00,0x00,0x00,0x6d,0x03,0x00,0x00, -0x6c,0x03,0x00,0x00,0x4e,0x00,0x00,0x00,0x7c,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x6e,0x03,0x00,0x00,0x6d,0x03,0x00,0x00, -0x41,0x00,0x08,0x00,0x79,0x00,0x00,0x00,0x70,0x03,0x00,0x00, -0x54,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0x40,0x00,0x00,0x00, -0x77,0x00,0x00,0x00,0x6e,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x4d,0x00,0x00,0x00,0x71,0x03,0x00,0x00,0x70,0x03,0x00,0x00, -0x71,0x00,0x04,0x00,0x09,0x00,0x00,0x00,0x72,0x03,0x00,0x00, -0x71,0x03,0x00,0x00,0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x73,0x03,0x00,0x00,0x72,0x03,0x00,0x00,0xc7,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x74,0x03,0x00,0x00,0x73,0x03,0x00,0x00, -0x82,0x00,0x00,0x00,0xc5,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x76,0x03,0x00,0x00,0x74,0x03,0x00,0x00,0x68,0x03,0x00,0x00, -0x6f,0x00,0x04,0x00,0x4a,0x00,0x00,0x00,0x77,0x03,0x00,0x00, -0x76,0x03,0x00,0x00,0xc2,0x00,0x05,0x00,0x4d,0x00,0x00,0x00, -0x78,0x03,0x00,0x00,0x71,0x03,0x00,0x00,0x66,0x00,0x00,0x00, -0x71,0x00,0x04,0x00,0x09,0x00,0x00,0x00,0x79,0x03,0x00,0x00, -0x78,0x03,0x00,0x00,0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x7a,0x03,0x00,0x00,0x79,0x03,0x00,0x00,0xc5,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x7c,0x03,0x00,0x00,0x7a,0x03,0x00,0x00, -0x6e,0x03,0x00,0x00,0x6f,0x00,0x04,0x00,0x4a,0x00,0x00,0x00, -0x7d,0x03,0x00,0x00,0x7c,0x03,0x00,0x00,0x50,0x00,0x05,0x00, -0x7c,0x00,0x00,0x00,0x7e,0x03,0x00,0x00,0x77,0x03,0x00,0x00, -0x7d,0x03,0x00,0x00,0x8e,0x00,0x05,0x00,0x7c,0x00,0x00,0x00, -0x7f,0x03,0x00,0x00,0x7e,0x03,0x00,0x00,0x60,0x03,0x00,0x00, -0x50,0x00,0x05,0x00,0x7c,0x00,0x00,0x00,0x80,0x03,0x00,0x00, -0x62,0x03,0x00,0x00,0x62,0x03,0x00,0x00,0x81,0x00,0x05,0x00, -0x7c,0x00,0x00,0x00,0x81,0x03,0x00,0x00,0x7f,0x03,0x00,0x00, -0x80,0x03,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x87,0x03,0x00,0x00,0xa2,0x00,0x00,0x00,0x6e,0x00,0x00,0x00, -0x51,0x00,0x05,0x00,0x4a,0x00,0x00,0x00,0x89,0x03,0x00,0x00, -0x81,0x03,0x00,0x00,0x00,0x00,0x00,0x00,0x41,0x00,0x06,0x00, -0x56,0x00,0x00,0x00,0x8a,0x03,0x00,0x00,0x9b,0x00,0x00,0x00, -0x2e,0x00,0x00,0x00,0x87,0x03,0x00,0x00,0x3e,0x00,0x03,0x00, -0x8a,0x03,0x00,0x00,0x89,0x03,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x91,0x03,0x00,0x00,0xa2,0x00,0x00,0x00, -0x5f,0x04,0x00,0x00,0x51,0x00,0x05,0x00,0x4a,0x00,0x00,0x00, -0x92,0x03,0x00,0x00,0x81,0x03,0x00,0x00,0x01,0x00,0x00,0x00, -0x41,0x00,0x06,0x00,0x56,0x00,0x00,0x00,0x93,0x03,0x00,0x00, -0x9b,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0x91,0x03,0x00,0x00, -0x3e,0x00,0x03,0x00,0x93,0x03,0x00,0x00,0x92,0x03,0x00,0x00, -0x3d,0x00,0x04,0x00,0x4a,0x00,0x00,0x00,0x9c,0x03,0x00,0x00, -0x57,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x4a,0x00,0x00,0x00, -0x9e,0x03,0x00,0x00,0x5b,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x09,0x00,0x00,0x00,0xa0,0x03,0x00,0x00,0x62,0x00,0x00,0x00, -0xc2,0x00,0x05,0x00,0x09,0x00,0x00,0x00,0xa1,0x03,0x00,0x00, -0xa0,0x03,0x00,0x00,0x4b,0x04,0x00,0x00,0xc4,0x00,0x05,0x00, -0x09,0x00,0x00,0x00,0xa2,0x03,0x00,0x00,0xa1,0x03,0x00,0x00, -0x66,0x00,0x00,0x00,0xc7,0x00,0x05,0x00,0x09,0x00,0x00,0x00, -0xa3,0x03,0x00,0x00,0xa2,0x03,0x00,0x00,0x4e,0x00,0x00,0x00, -0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0xa4,0x03,0x00,0x00, -0xa3,0x03,0x00,0x00,0x3d,0x00,0x04,0x00,0x09,0x00,0x00,0x00, -0xa6,0x03,0x00,0x00,0x62,0x00,0x00,0x00,0xc2,0x00,0x05,0x00, -0x09,0x00,0x00,0x00,0xa8,0x03,0x00,0x00,0xa6,0x03,0x00,0x00, -0x5a,0x04,0x00,0x00,0xc7,0x00,0x05,0x00,0x09,0x00,0x00,0x00, -0xa9,0x03,0x00,0x00,0xa8,0x03,0x00,0x00,0x4e,0x00,0x00,0x00, -0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0xaa,0x03,0x00,0x00, -0xa9,0x03,0x00,0x00,0x41,0x00,0x08,0x00,0x79,0x00,0x00,0x00, -0xac,0x03,0x00,0x00,0x54,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0x40,0x00,0x00,0x00,0x77,0x00,0x00,0x00,0x4b,0x04,0x00,0x00, -0x3d,0x00,0x04,0x00,0x4d,0x00,0x00,0x00,0xad,0x03,0x00,0x00, -0xac,0x03,0x00,0x00,0x71,0x00,0x04,0x00,0x09,0x00,0x00,0x00, -0xae,0x03,0x00,0x00,0xad,0x03,0x00,0x00,0x7c,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0xaf,0x03,0x00,0x00,0xae,0x03,0x00,0x00, -0xc7,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xb0,0x03,0x00,0x00, -0xaf,0x03,0x00,0x00,0x82,0x00,0x00,0x00,0xc5,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xb2,0x03,0x00,0x00,0xb0,0x03,0x00,0x00, -0xa4,0x03,0x00,0x00,0x6f,0x00,0x04,0x00,0x4a,0x00,0x00,0x00, -0xb3,0x03,0x00,0x00,0xb2,0x03,0x00,0x00,0xc2,0x00,0x05,0x00, -0x4d,0x00,0x00,0x00,0xb4,0x03,0x00,0x00,0xad,0x03,0x00,0x00, -0x66,0x00,0x00,0x00,0x71,0x00,0x04,0x00,0x09,0x00,0x00,0x00, -0xb5,0x03,0x00,0x00,0xb4,0x03,0x00,0x00,0x7c,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0xb6,0x03,0x00,0x00,0xb5,0x03,0x00,0x00, -0xc5,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xb8,0x03,0x00,0x00, -0xb6,0x03,0x00,0x00,0xaa,0x03,0x00,0x00,0x6f,0x00,0x04,0x00, -0x4a,0x00,0x00,0x00,0xb9,0x03,0x00,0x00,0xb8,0x03,0x00,0x00, -0x50,0x00,0x05,0x00,0x7c,0x00,0x00,0x00,0xba,0x03,0x00,0x00, -0xb3,0x03,0x00,0x00,0xb9,0x03,0x00,0x00,0x8e,0x00,0x05,0x00, -0x7c,0x00,0x00,0x00,0xbb,0x03,0x00,0x00,0xba,0x03,0x00,0x00, -0x9c,0x03,0x00,0x00,0x50,0x00,0x05,0x00,0x7c,0x00,0x00,0x00, -0xbc,0x03,0x00,0x00,0x9e,0x03,0x00,0x00,0x9e,0x03,0x00,0x00, -0x81,0x00,0x05,0x00,0x7c,0x00,0x00,0x00,0xbd,0x03,0x00,0x00, -0xbb,0x03,0x00,0x00,0xbc,0x03,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xc3,0x03,0x00,0x00,0xa2,0x00,0x00,0x00, -0x4b,0x04,0x00,0x00,0x51,0x00,0x05,0x00,0x4a,0x00,0x00,0x00, -0xc5,0x03,0x00,0x00,0xbd,0x03,0x00,0x00,0x00,0x00,0x00,0x00, -0x41,0x00,0x06,0x00,0x56,0x00,0x00,0x00,0xc6,0x03,0x00,0x00, -0x9b,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0xc3,0x03,0x00,0x00, -0x3e,0x00,0x03,0x00,0xc6,0x03,0x00,0x00,0xc5,0x03,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xcd,0x03,0x00,0x00, -0xa2,0x00,0x00,0x00,0x60,0x04,0x00,0x00,0x51,0x00,0x05,0x00, -0x4a,0x00,0x00,0x00,0xce,0x03,0x00,0x00,0xbd,0x03,0x00,0x00, -0x01,0x00,0x00,0x00,0x41,0x00,0x06,0x00,0x56,0x00,0x00,0x00, -0xcf,0x03,0x00,0x00,0x9b,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0xcd,0x03,0x00,0x00,0x3e,0x00,0x03,0x00,0xcf,0x03,0x00,0x00, -0xce,0x03,0x00,0x00,0x3d,0x00,0x04,0x00,0x4a,0x00,0x00,0x00, -0xd8,0x03,0x00,0x00,0x57,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x4a,0x00,0x00,0x00,0xda,0x03,0x00,0x00,0x5b,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x09,0x00,0x00,0x00,0xdc,0x03,0x00,0x00, -0x62,0x00,0x00,0x00,0xc2,0x00,0x05,0x00,0x09,0x00,0x00,0x00, -0xdd,0x03,0x00,0x00,0xdc,0x03,0x00,0x00,0x4d,0x04,0x00,0x00, -0xc4,0x00,0x05,0x00,0x09,0x00,0x00,0x00,0xde,0x03,0x00,0x00, -0xdd,0x03,0x00,0x00,0x66,0x00,0x00,0x00,0xc7,0x00,0x05,0x00, -0x09,0x00,0x00,0x00,0xdf,0x03,0x00,0x00,0xde,0x03,0x00,0x00, -0x4e,0x00,0x00,0x00,0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0xe0,0x03,0x00,0x00,0xdf,0x03,0x00,0x00,0x3d,0x00,0x04,0x00, -0x09,0x00,0x00,0x00,0xe2,0x03,0x00,0x00,0x62,0x00,0x00,0x00, -0xc2,0x00,0x05,0x00,0x09,0x00,0x00,0x00,0xe4,0x03,0x00,0x00, -0xe2,0x03,0x00,0x00,0x5c,0x04,0x00,0x00,0xc7,0x00,0x05,0x00, -0x09,0x00,0x00,0x00,0xe5,0x03,0x00,0x00,0xe4,0x03,0x00,0x00, -0x4e,0x00,0x00,0x00,0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0xe6,0x03,0x00,0x00,0xe5,0x03,0x00,0x00,0x41,0x00,0x08,0x00, -0x79,0x00,0x00,0x00,0xe8,0x03,0x00,0x00,0x54,0x00,0x00,0x00, -0x2e,0x00,0x00,0x00,0x40,0x00,0x00,0x00,0x77,0x00,0x00,0x00, -0x4d,0x04,0x00,0x00,0x3d,0x00,0x04,0x00,0x4d,0x00,0x00,0x00, -0xe9,0x03,0x00,0x00,0xe8,0x03,0x00,0x00,0x71,0x00,0x04,0x00, -0x09,0x00,0x00,0x00,0xea,0x03,0x00,0x00,0xe9,0x03,0x00,0x00, -0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0xeb,0x03,0x00,0x00, -0xea,0x03,0x00,0x00,0xc7,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xec,0x03,0x00,0x00,0xeb,0x03,0x00,0x00,0x82,0x00,0x00,0x00, -0xc5,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xee,0x03,0x00,0x00, -0xec,0x03,0x00,0x00,0xe0,0x03,0x00,0x00,0x6f,0x00,0x04,0x00, -0x4a,0x00,0x00,0x00,0xef,0x03,0x00,0x00,0xee,0x03,0x00,0x00, -0xc2,0x00,0x05,0x00,0x4d,0x00,0x00,0x00,0xf0,0x03,0x00,0x00, -0xe9,0x03,0x00,0x00,0x66,0x00,0x00,0x00,0x71,0x00,0x04,0x00, -0x09,0x00,0x00,0x00,0xf1,0x03,0x00,0x00,0xf0,0x03,0x00,0x00, -0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0xf2,0x03,0x00,0x00, -0xf1,0x03,0x00,0x00,0xc5,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xf4,0x03,0x00,0x00,0xf2,0x03,0x00,0x00,0xe6,0x03,0x00,0x00, -0x6f,0x00,0x04,0x00,0x4a,0x00,0x00,0x00,0xf5,0x03,0x00,0x00, -0xf4,0x03,0x00,0x00,0x50,0x00,0x05,0x00,0x7c,0x00,0x00,0x00, -0xf6,0x03,0x00,0x00,0xef,0x03,0x00,0x00,0xf5,0x03,0x00,0x00, -0x8e,0x00,0x05,0x00,0x7c,0x00,0x00,0x00,0xf7,0x03,0x00,0x00, -0xf6,0x03,0x00,0x00,0xd8,0x03,0x00,0x00,0x50,0x00,0x05,0x00, -0x7c,0x00,0x00,0x00,0xf8,0x03,0x00,0x00,0xda,0x03,0x00,0x00, -0xda,0x03,0x00,0x00,0x81,0x00,0x05,0x00,0x7c,0x00,0x00,0x00, -0xf9,0x03,0x00,0x00,0xf7,0x03,0x00,0x00,0xf8,0x03,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xff,0x03,0x00,0x00, -0xa2,0x00,0x00,0x00,0x4d,0x04,0x00,0x00,0x51,0x00,0x05,0x00, -0x4a,0x00,0x00,0x00,0x01,0x04,0x00,0x00,0xf9,0x03,0x00,0x00, -0x00,0x00,0x00,0x00,0x41,0x00,0x06,0x00,0x56,0x00,0x00,0x00, -0x02,0x04,0x00,0x00,0x9b,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0xff,0x03,0x00,0x00,0x3e,0x00,0x03,0x00,0x02,0x04,0x00,0x00, -0x01,0x04,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x09,0x04,0x00,0x00,0xa2,0x00,0x00,0x00,0x61,0x04,0x00,0x00, -0x51,0x00,0x05,0x00,0x4a,0x00,0x00,0x00,0x0a,0x04,0x00,0x00, -0xf9,0x03,0x00,0x00,0x01,0x00,0x00,0x00,0x41,0x00,0x06,0x00, -0x56,0x00,0x00,0x00,0x0b,0x04,0x00,0x00,0x9b,0x00,0x00,0x00, -0x2e,0x00,0x00,0x00,0x09,0x04,0x00,0x00,0x3e,0x00,0x03,0x00, -0x0b,0x04,0x00,0x00,0x0a,0x04,0x00,0x00,0x3d,0x00,0x04,0x00, -0x4a,0x00,0x00,0x00,0x14,0x04,0x00,0x00,0x57,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x4a,0x00,0x00,0x00,0x16,0x04,0x00,0x00, -0x5b,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x09,0x00,0x00,0x00, -0x18,0x04,0x00,0x00,0x62,0x00,0x00,0x00,0xc2,0x00,0x05,0x00, -0x09,0x00,0x00,0x00,0x19,0x04,0x00,0x00,0x18,0x04,0x00,0x00, -0x82,0x00,0x00,0x00,0xc4,0x00,0x05,0x00,0x09,0x00,0x00,0x00, -0x1a,0x04,0x00,0x00,0x19,0x04,0x00,0x00,0x66,0x00,0x00,0x00, -0xc7,0x00,0x05,0x00,0x09,0x00,0x00,0x00,0x1b,0x04,0x00,0x00, -0x1a,0x04,0x00,0x00,0x4e,0x00,0x00,0x00,0x7c,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x1c,0x04,0x00,0x00,0x1b,0x04,0x00,0x00, -0x3d,0x00,0x04,0x00,0x09,0x00,0x00,0x00,0x1e,0x04,0x00,0x00, -0x62,0x00,0x00,0x00,0xc2,0x00,0x05,0x00,0x09,0x00,0x00,0x00, -0x20,0x04,0x00,0x00,0x1e,0x04,0x00,0x00,0x5e,0x04,0x00,0x00, -0xc7,0x00,0x05,0x00,0x09,0x00,0x00,0x00,0x21,0x04,0x00,0x00, -0x20,0x04,0x00,0x00,0x4e,0x00,0x00,0x00,0x7c,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x22,0x04,0x00,0x00,0x21,0x04,0x00,0x00, -0x41,0x00,0x08,0x00,0x79,0x00,0x00,0x00,0x24,0x04,0x00,0x00, -0x54,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0x40,0x00,0x00,0x00, -0x77,0x00,0x00,0x00,0x82,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x4d,0x00,0x00,0x00,0x25,0x04,0x00,0x00,0x24,0x04,0x00,0x00, -0x71,0x00,0x04,0x00,0x09,0x00,0x00,0x00,0x26,0x04,0x00,0x00, -0x25,0x04,0x00,0x00,0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x27,0x04,0x00,0x00,0x26,0x04,0x00,0x00,0xc7,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x28,0x04,0x00,0x00,0x27,0x04,0x00,0x00, -0x82,0x00,0x00,0x00,0xc5,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x2a,0x04,0x00,0x00,0x28,0x04,0x00,0x00,0x1c,0x04,0x00,0x00, -0x6f,0x00,0x04,0x00,0x4a,0x00,0x00,0x00,0x2b,0x04,0x00,0x00, -0x2a,0x04,0x00,0x00,0xc2,0x00,0x05,0x00,0x4d,0x00,0x00,0x00, -0x2c,0x04,0x00,0x00,0x25,0x04,0x00,0x00,0x66,0x00,0x00,0x00, -0x71,0x00,0x04,0x00,0x09,0x00,0x00,0x00,0x2d,0x04,0x00,0x00, -0x2c,0x04,0x00,0x00,0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x2e,0x04,0x00,0x00,0x2d,0x04,0x00,0x00,0xc5,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x30,0x04,0x00,0x00,0x2e,0x04,0x00,0x00, -0x22,0x04,0x00,0x00,0x6f,0x00,0x04,0x00,0x4a,0x00,0x00,0x00, -0x31,0x04,0x00,0x00,0x30,0x04,0x00,0x00,0x50,0x00,0x05,0x00, -0x7c,0x00,0x00,0x00,0x32,0x04,0x00,0x00,0x2b,0x04,0x00,0x00, -0x31,0x04,0x00,0x00,0x8e,0x00,0x05,0x00,0x7c,0x00,0x00,0x00, -0x33,0x04,0x00,0x00,0x32,0x04,0x00,0x00,0x14,0x04,0x00,0x00, -0x50,0x00,0x05,0x00,0x7c,0x00,0x00,0x00,0x34,0x04,0x00,0x00, -0x16,0x04,0x00,0x00,0x16,0x04,0x00,0x00,0x81,0x00,0x05,0x00, -0x7c,0x00,0x00,0x00,0x35,0x04,0x00,0x00,0x33,0x04,0x00,0x00, -0x34,0x04,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x3b,0x04,0x00,0x00,0xa2,0x00,0x00,0x00,0x82,0x00,0x00,0x00, -0x51,0x00,0x05,0x00,0x4a,0x00,0x00,0x00,0x3d,0x04,0x00,0x00, -0x35,0x04,0x00,0x00,0x00,0x00,0x00,0x00,0x41,0x00,0x06,0x00, -0x56,0x00,0x00,0x00,0x3e,0x04,0x00,0x00,0x9b,0x00,0x00,0x00, -0x2e,0x00,0x00,0x00,0x3b,0x04,0x00,0x00,0x3e,0x00,0x03,0x00, -0x3e,0x04,0x00,0x00,0x3d,0x04,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x45,0x04,0x00,0x00,0xa2,0x00,0x00,0x00, -0x62,0x04,0x00,0x00,0x51,0x00,0x05,0x00,0x4a,0x00,0x00,0x00, -0x46,0x04,0x00,0x00,0x35,0x04,0x00,0x00,0x01,0x00,0x00,0x00, -0x41,0x00,0x06,0x00,0x56,0x00,0x00,0x00,0x47,0x04,0x00,0x00, -0x9b,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0x45,0x04,0x00,0x00, -0x3e,0x00,0x03,0x00,0x47,0x04,0x00,0x00,0x46,0x04,0x00,0x00, -0xf9,0x00,0x02,0x00,0xba,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, -0xba,0x00,0x00,0x00,0xfd,0x00,0x01,0x00,0x38,0x00,0x01,0x00, - -}; -const uint64_t dequant_q5_1_len = 12768; - -unsigned char dequant_q5_1_fp32_data[] = { -0x03,0x02,0x23,0x07,0x00,0x05,0x01,0x00,0x0b,0x00,0x0d,0x00, 0x95,0x04,0x00,0x00,0x00,0x00,0x00,0x00,0x11,0x00,0x02,0x00, 0x01,0x00,0x00,0x00,0x11,0x00,0x02,0x00,0x51,0x11,0x00,0x00, 0x11,0x00,0x02,0x00,0x60,0x11,0x00,0x00,0x0b,0x00,0x06,0x00, @@ -11163,505 +6149,10 @@ unsigned char dequant_q5_1_fp32_data[] = { 0xbe,0x00,0x00,0x00,0xfd,0x00,0x01,0x00,0x38,0x00,0x01,0x00, }; -const uint64_t dequant_q5_1_fp32_len = 13548; +const uint64_t dequant_q5_1_len = 13548; unsigned char dequant_q5_K_data[] = { 0x03,0x02,0x23,0x07,0x00,0x05,0x01,0x00,0x0b,0x00,0x0d,0x00, -0x99,0x01,0x00,0x00,0x00,0x00,0x00,0x00,0x11,0x00,0x02,0x00, -0x01,0x00,0x00,0x00,0x11,0x00,0x02,0x00,0x09,0x00,0x00,0x00, -0x11,0x00,0x02,0x00,0x27,0x00,0x00,0x00,0x11,0x00,0x02,0x00, -0x51,0x11,0x00,0x00,0x11,0x00,0x02,0x00,0x60,0x11,0x00,0x00, -0x0b,0x00,0x06,0x00,0x01,0x00,0x00,0x00,0x47,0x4c,0x53,0x4c, -0x2e,0x73,0x74,0x64,0x2e,0x34,0x35,0x30,0x00,0x00,0x00,0x00, -0x0e,0x00,0x03,0x00,0x00,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x0f,0x00,0x0a,0x00,0x05,0x00,0x00,0x00,0x04,0x00,0x00,0x00, -0x6d,0x61,0x69,0x6e,0x00,0x00,0x00,0x00,0x17,0x00,0x00,0x00, -0x25,0x00,0x00,0x00,0x33,0x00,0x00,0x00,0x51,0x00,0x00,0x00, -0x12,0x01,0x00,0x00,0x10,0x00,0x06,0x00,0x04,0x00,0x00,0x00, -0x11,0x00,0x00,0x00,0x40,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x17,0x00,0x00,0x00, -0x0b,0x00,0x00,0x00,0x1a,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x23,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x04,0x00,0x00,0x00, -0x48,0x00,0x05,0x00,0x23,0x00,0x00,0x00,0x02,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x08,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0x23,0x00,0x00,0x00,0x03,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x47,0x00,0x03,0x00,0x23,0x00,0x00,0x00, -0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x33,0x00,0x00,0x00, -0x0b,0x00,0x00,0x00,0x1b,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x48,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x4a,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x4c,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0x4d,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x4d,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x04,0x00,0x00,0x00, -0x48,0x00,0x05,0x00,0x4d,0x00,0x00,0x00,0x02,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x10,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0x4d,0x00,0x00,0x00,0x03,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x30,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x4e,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0xb0,0x00,0x00,0x00,0x48,0x00,0x04,0x00, -0x4f,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x18,0x00,0x00,0x00, -0x48,0x00,0x05,0x00,0x4f,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x03,0x00, -0x4f,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x51,0x00,0x00,0x00,0x22,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x51,0x00,0x00,0x00,0x21,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x0f,0x01,0x00,0x00, -0x06,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x48,0x00,0x04,0x00, -0x10,0x01,0x00,0x00,0x00,0x00,0x00,0x00,0x19,0x00,0x00,0x00, -0x48,0x00,0x05,0x00,0x10,0x01,0x00,0x00,0x00,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x03,0x00, -0x10,0x01,0x00,0x00,0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x12,0x01,0x00,0x00,0x22,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x12,0x01,0x00,0x00,0x21,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x82,0x01,0x00,0x00, -0x0b,0x00,0x00,0x00,0x19,0x00,0x00,0x00,0x13,0x00,0x02,0x00, -0x02,0x00,0x00,0x00,0x21,0x00,0x03,0x00,0x03,0x00,0x00,0x00, -0x02,0x00,0x00,0x00,0x15,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x20,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x09,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x10,0x00,0x00,0x00, -0x00,0x01,0x00,0x00,0x14,0x00,0x02,0x00,0x11,0x00,0x00,0x00, -0x15,0x00,0x04,0x00,0x14,0x00,0x00,0x00,0x20,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x17,0x00,0x04,0x00,0x15,0x00,0x00,0x00, -0x14,0x00,0x00,0x00,0x03,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0x16,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x15,0x00,0x00,0x00, -0x3b,0x00,0x04,0x00,0x16,0x00,0x00,0x00,0x17,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x14,0x00,0x00,0x00, -0x18,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0x19,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x14,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x14,0x00,0x00,0x00,0x1c,0x00,0x00,0x00, -0x00,0x01,0x00,0x00,0x1e,0x00,0x06,0x00,0x23,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x24,0x00,0x00,0x00, -0x09,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x3b,0x00,0x04,0x00, -0x24,0x00,0x00,0x00,0x25,0x00,0x00,0x00,0x09,0x00,0x00,0x00, -0x20,0x00,0x04,0x00,0x26,0x00,0x00,0x00,0x09,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x29,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x3b,0x00,0x04,0x00, -0x16,0x00,0x00,0x00,0x33,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x39,0x00,0x00,0x00, -0x10,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x3f,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x16,0x00,0x03,0x00, -0x42,0x00,0x00,0x00,0x10,0x00,0x00,0x00,0x17,0x00,0x04,0x00, -0x45,0x00,0x00,0x00,0x42,0x00,0x00,0x00,0x02,0x00,0x00,0x00, -0x15,0x00,0x04,0x00,0x46,0x00,0x00,0x00,0x08,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x14,0x00,0x00,0x00, -0x47,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x1c,0x00,0x04,0x00, -0x48,0x00,0x00,0x00,0x46,0x00,0x00,0x00,0x47,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x14,0x00,0x00,0x00,0x49,0x00,0x00,0x00, -0x20,0x00,0x00,0x00,0x1c,0x00,0x04,0x00,0x4a,0x00,0x00,0x00, -0x46,0x00,0x00,0x00,0x49,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x14,0x00,0x00,0x00,0x4b,0x00,0x00,0x00,0x80,0x00,0x00,0x00, -0x1c,0x00,0x04,0x00,0x4c,0x00,0x00,0x00,0x46,0x00,0x00,0x00, -0x4b,0x00,0x00,0x00,0x1e,0x00,0x06,0x00,0x4d,0x00,0x00,0x00, -0x45,0x00,0x00,0x00,0x48,0x00,0x00,0x00,0x4a,0x00,0x00,0x00, -0x4c,0x00,0x00,0x00,0x1d,0x00,0x03,0x00,0x4e,0x00,0x00,0x00, -0x4d,0x00,0x00,0x00,0x1e,0x00,0x03,0x00,0x4f,0x00,0x00,0x00, -0x4e,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x50,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x4f,0x00,0x00,0x00,0x3b,0x00,0x04,0x00, -0x50,0x00,0x00,0x00,0x51,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0x20,0x00,0x04,0x00,0x53,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0x42,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x14,0x00,0x00,0x00, -0x58,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x5e,0x00,0x00,0x00,0x40,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x66,0x00,0x00,0x00, -0x20,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x70,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0x78,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x46,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x7d,0x00,0x00,0x00, -0x3f,0x00,0x00,0x00,0x15,0x00,0x04,0x00,0x7f,0x00,0x00,0x00, -0x08,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x95,0x00,0x00,0x00,0x0f,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x9c,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0xcb,0x00,0x00,0x00,0x05,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0xdf,0x00,0x00,0x00,0x03,0x00,0x00,0x00, -0x1d,0x00,0x03,0x00,0x0f,0x01,0x00,0x00,0x42,0x00,0x00,0x00, -0x1e,0x00,0x03,0x00,0x10,0x01,0x00,0x00,0x0f,0x01,0x00,0x00, -0x20,0x00,0x04,0x00,0x11,0x01,0x00,0x00,0x0c,0x00,0x00,0x00, -0x10,0x01,0x00,0x00,0x3b,0x00,0x04,0x00,0x11,0x01,0x00,0x00, -0x12,0x01,0x00,0x00,0x0c,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x63,0x01,0x00,0x00,0x21,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x14,0x00,0x00,0x00,0x81,0x01,0x00,0x00, -0x40,0x00,0x00,0x00,0x2c,0x00,0x06,0x00,0x15,0x00,0x00,0x00, -0x82,0x01,0x00,0x00,0x81,0x01,0x00,0x00,0x58,0x00,0x00,0x00, -0x58,0x00,0x00,0x00,0x2a,0x00,0x03,0x00,0x11,0x00,0x00,0x00, -0x85,0x01,0x00,0x00,0x29,0x00,0x03,0x00,0x11,0x00,0x00,0x00, -0x88,0x01,0x00,0x00,0x36,0x00,0x05,0x00,0x02,0x00,0x00,0x00, -0x04,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x03,0x00,0x00,0x00, -0xf8,0x00,0x02,0x00,0x05,0x00,0x00,0x00,0xf7,0x00,0x03,0x00, -0x83,0x01,0x00,0x00,0x00,0x00,0x00,0x00,0xfb,0x00,0x03,0x00, -0x18,0x00,0x00,0x00,0x84,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, -0x84,0x01,0x00,0x00,0xf9,0x00,0x02,0x00,0x0a,0x00,0x00,0x00, -0xf8,0x00,0x02,0x00,0x0a,0x00,0x00,0x00,0xf5,0x00,0x07,0x00, -0x06,0x00,0x00,0x00,0x8b,0x01,0x00,0x00,0x09,0x00,0x00,0x00, -0x84,0x01,0x00,0x00,0x80,0x01,0x00,0x00,0x0d,0x00,0x00,0x00, -0xb1,0x00,0x05,0x00,0x11,0x00,0x00,0x00,0x12,0x00,0x00,0x00, -0x8b,0x01,0x00,0x00,0x10,0x00,0x00,0x00,0xf6,0x00,0x04,0x00, -0x0c,0x00,0x00,0x00,0x0d,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0xfa,0x00,0x04,0x00,0x12,0x00,0x00,0x00,0x0b,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0x0b,0x00,0x00,0x00, -0x41,0x00,0x05,0x00,0x19,0x00,0x00,0x00,0x1a,0x00,0x00,0x00, -0x17,0x00,0x00,0x00,0x18,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x14,0x00,0x00,0x00,0x1b,0x00,0x00,0x00,0x1a,0x00,0x00,0x00, -0x84,0x00,0x05,0x00,0x14,0x00,0x00,0x00,0x1d,0x00,0x00,0x00, -0x1b,0x00,0x00,0x00,0x1c,0x00,0x00,0x00,0x7c,0x00,0x04,0x00, -0x14,0x00,0x00,0x00,0x1f,0x00,0x00,0x00,0x8b,0x01,0x00,0x00, -0x80,0x00,0x05,0x00,0x14,0x00,0x00,0x00,0x20,0x00,0x00,0x00, -0x1d,0x00,0x00,0x00,0x1f,0x00,0x00,0x00,0x7c,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x21,0x00,0x00,0x00,0x20,0x00,0x00,0x00, -0x41,0x00,0x05,0x00,0x26,0x00,0x00,0x00,0x27,0x00,0x00,0x00, -0x25,0x00,0x00,0x00,0x09,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x28,0x00,0x00,0x00,0x27,0x00,0x00,0x00, -0x41,0x00,0x05,0x00,0x26,0x00,0x00,0x00,0x2a,0x00,0x00,0x00, -0x25,0x00,0x00,0x00,0x29,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x2b,0x00,0x00,0x00,0x2a,0x00,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x2c,0x00,0x00,0x00, -0x28,0x00,0x00,0x00,0x2b,0x00,0x00,0x00,0x87,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x2d,0x00,0x00,0x00,0x2c,0x00,0x00,0x00, -0x10,0x00,0x00,0x00,0xaf,0x00,0x05,0x00,0x11,0x00,0x00,0x00, -0x2e,0x00,0x00,0x00,0x21,0x00,0x00,0x00,0x2d,0x00,0x00,0x00, -0xf7,0x00,0x03,0x00,0x30,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0xfa,0x00,0x04,0x00,0x2e,0x00,0x00,0x00,0x2f,0x00,0x00,0x00, -0x30,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0x2f,0x00,0x00,0x00, -0xf9,0x00,0x02,0x00,0x0c,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, -0x30,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x19,0x00,0x00,0x00, -0x34,0x00,0x00,0x00,0x33,0x00,0x00,0x00,0x18,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x14,0x00,0x00,0x00,0x35,0x00,0x00,0x00, -0x34,0x00,0x00,0x00,0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x36,0x00,0x00,0x00,0x35,0x00,0x00,0x00,0x87,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x3a,0x00,0x00,0x00,0x36,0x00,0x00,0x00, -0x39,0x00,0x00,0x00,0x8b,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x3d,0x00,0x00,0x00,0x36,0x00,0x00,0x00,0x39,0x00,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x41,0x00,0x00,0x00, -0x3f,0x00,0x00,0x00,0x3a,0x00,0x00,0x00,0x41,0x00,0x08,0x00, -0x53,0x00,0x00,0x00,0x54,0x00,0x00,0x00,0x51,0x00,0x00,0x00, -0x09,0x00,0x00,0x00,0x21,0x00,0x00,0x00,0x09,0x00,0x00,0x00, -0x18,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x42,0x00,0x00,0x00, -0x55,0x00,0x00,0x00,0x54,0x00,0x00,0x00,0x41,0x00,0x08,0x00, -0x53,0x00,0x00,0x00,0x59,0x00,0x00,0x00,0x51,0x00,0x00,0x00, -0x09,0x00,0x00,0x00,0x21,0x00,0x00,0x00,0x09,0x00,0x00,0x00, -0x58,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x42,0x00,0x00,0x00, -0x5a,0x00,0x00,0x00,0x59,0x00,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x5d,0x00,0x00,0x00,0x21,0x00,0x00,0x00, -0x10,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x60,0x00,0x00,0x00,0x5e,0x00,0x00,0x00,0x3a,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x61,0x00,0x00,0x00, -0x5d,0x00,0x00,0x00,0x60,0x00,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x63,0x00,0x00,0x00,0x3f,0x00,0x00,0x00, -0x3d,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x64,0x00,0x00,0x00,0x61,0x00,0x00,0x00,0x63,0x00,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x68,0x00,0x00,0x00, -0x66,0x00,0x00,0x00,0x3a,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x6b,0x00,0x00,0x00,0x68,0x00,0x00,0x00, -0x63,0x00,0x00,0x00,0xb1,0x00,0x05,0x00,0x11,0x00,0x00,0x00, -0x71,0x00,0x00,0x00,0x41,0x00,0x00,0x00,0x70,0x00,0x00,0x00, -0xf7,0x00,0x03,0x00,0x73,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0xfa,0x00,0x04,0x00,0x71,0x00,0x00,0x00,0x72,0x00,0x00,0x00, -0x8d,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0x72,0x00,0x00,0x00, -0x41,0x00,0x08,0x00,0x78,0x00,0x00,0x00,0x79,0x00,0x00,0x00, -0x51,0x00,0x00,0x00,0x09,0x00,0x00,0x00,0x21,0x00,0x00,0x00, -0x29,0x00,0x00,0x00,0x41,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x46,0x00,0x00,0x00,0x7a,0x00,0x00,0x00,0x79,0x00,0x00,0x00, -0x71,0x00,0x04,0x00,0x14,0x00,0x00,0x00,0x7b,0x00,0x00,0x00, -0x7a,0x00,0x00,0x00,0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x7c,0x00,0x00,0x00,0x7b,0x00,0x00,0x00,0xc7,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x7e,0x00,0x00,0x00,0x7c,0x00,0x00,0x00, -0x7d,0x00,0x00,0x00,0x72,0x00,0x04,0x00,0x7f,0x00,0x00,0x00, -0x80,0x00,0x00,0x00,0x7e,0x00,0x00,0x00,0x7c,0x00,0x04,0x00, -0x46,0x00,0x00,0x00,0x81,0x00,0x00,0x00,0x80,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x85,0x00,0x00,0x00, -0x41,0x00,0x00,0x00,0x70,0x00,0x00,0x00,0x41,0x00,0x08,0x00, -0x78,0x00,0x00,0x00,0x86,0x00,0x00,0x00,0x51,0x00,0x00,0x00, -0x09,0x00,0x00,0x00,0x21,0x00,0x00,0x00,0x29,0x00,0x00,0x00, -0x85,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x46,0x00,0x00,0x00, -0x87,0x00,0x00,0x00,0x86,0x00,0x00,0x00,0x71,0x00,0x04,0x00, -0x14,0x00,0x00,0x00,0x88,0x00,0x00,0x00,0x87,0x00,0x00,0x00, -0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x89,0x00,0x00,0x00, -0x88,0x00,0x00,0x00,0xc7,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x8a,0x00,0x00,0x00,0x89,0x00,0x00,0x00,0x7d,0x00,0x00,0x00, -0x72,0x00,0x04,0x00,0x7f,0x00,0x00,0x00,0x8b,0x00,0x00,0x00, -0x8a,0x00,0x00,0x00,0x7c,0x00,0x04,0x00,0x46,0x00,0x00,0x00, -0x8c,0x00,0x00,0x00,0x8b,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, -0x73,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0x8d,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x90,0x00,0x00,0x00, -0x41,0x00,0x00,0x00,0x70,0x00,0x00,0x00,0x41,0x00,0x08,0x00, -0x78,0x00,0x00,0x00,0x91,0x00,0x00,0x00,0x51,0x00,0x00,0x00, -0x09,0x00,0x00,0x00,0x21,0x00,0x00,0x00,0x29,0x00,0x00,0x00, -0x90,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x46,0x00,0x00,0x00, -0x92,0x00,0x00,0x00,0x91,0x00,0x00,0x00,0x71,0x00,0x04,0x00, -0x14,0x00,0x00,0x00,0x93,0x00,0x00,0x00,0x92,0x00,0x00,0x00, -0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x94,0x00,0x00,0x00, -0x93,0x00,0x00,0x00,0xc7,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x96,0x00,0x00,0x00,0x94,0x00,0x00,0x00,0x95,0x00,0x00,0x00, -0x82,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x99,0x00,0x00,0x00, -0x41,0x00,0x00,0x00,0x70,0x00,0x00,0x00,0x41,0x00,0x08,0x00, -0x78,0x00,0x00,0x00,0x9a,0x00,0x00,0x00,0x51,0x00,0x00,0x00, -0x09,0x00,0x00,0x00,0x21,0x00,0x00,0x00,0x29,0x00,0x00,0x00, -0x99,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x46,0x00,0x00,0x00, -0x9b,0x00,0x00,0x00,0x9a,0x00,0x00,0x00,0xc2,0x00,0x05,0x00, -0x46,0x00,0x00,0x00,0x9d,0x00,0x00,0x00,0x9b,0x00,0x00,0x00, -0x9c,0x00,0x00,0x00,0xc4,0x00,0x05,0x00,0x46,0x00,0x00,0x00, -0x9e,0x00,0x00,0x00,0x9d,0x00,0x00,0x00,0x70,0x00,0x00,0x00, -0x71,0x00,0x04,0x00,0x14,0x00,0x00,0x00,0x9f,0x00,0x00,0x00, -0x9e,0x00,0x00,0x00,0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0xa0,0x00,0x00,0x00,0x9f,0x00,0x00,0x00,0xc5,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xa1,0x00,0x00,0x00,0x96,0x00,0x00,0x00, -0xa0,0x00,0x00,0x00,0x72,0x00,0x04,0x00,0x7f,0x00,0x00,0x00, -0xa2,0x00,0x00,0x00,0xa1,0x00,0x00,0x00,0x7c,0x00,0x04,0x00, -0x46,0x00,0x00,0x00,0xa3,0x00,0x00,0x00,0xa2,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x46,0x00,0x00,0x00,0xa8,0x00,0x00,0x00, -0x91,0x00,0x00,0x00,0xc2,0x00,0x05,0x00,0x46,0x00,0x00,0x00, -0xa9,0x00,0x00,0x00,0xa8,0x00,0x00,0x00,0x70,0x00,0x00,0x00, -0x41,0x00,0x08,0x00,0x78,0x00,0x00,0x00,0xac,0x00,0x00,0x00, -0x51,0x00,0x00,0x00,0x09,0x00,0x00,0x00,0x21,0x00,0x00,0x00, -0x29,0x00,0x00,0x00,0x41,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x46,0x00,0x00,0x00,0xad,0x00,0x00,0x00,0xac,0x00,0x00,0x00, -0xc2,0x00,0x05,0x00,0x46,0x00,0x00,0x00,0xae,0x00,0x00,0x00, -0xad,0x00,0x00,0x00,0x9c,0x00,0x00,0x00,0xc4,0x00,0x05,0x00, -0x46,0x00,0x00,0x00,0xaf,0x00,0x00,0x00,0xae,0x00,0x00,0x00, -0x70,0x00,0x00,0x00,0xc5,0x00,0x05,0x00,0x46,0x00,0x00,0x00, -0xb0,0x00,0x00,0x00,0xa9,0x00,0x00,0x00,0xaf,0x00,0x00,0x00, -0xf9,0x00,0x02,0x00,0x73,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, -0x73,0x00,0x00,0x00,0xf5,0x00,0x07,0x00,0x46,0x00,0x00,0x00, -0x8d,0x01,0x00,0x00,0x8c,0x00,0x00,0x00,0x72,0x00,0x00,0x00, -0xb0,0x00,0x00,0x00,0x8d,0x00,0x00,0x00,0xf5,0x00,0x07,0x00, -0x46,0x00,0x00,0x00,0x8c,0x01,0x00,0x00,0x81,0x00,0x00,0x00, -0x72,0x00,0x00,0x00,0xa3,0x00,0x00,0x00,0x8d,0x00,0x00,0x00, -0x70,0x00,0x04,0x00,0x42,0x00,0x00,0x00,0xb4,0x00,0x00,0x00, -0x8c,0x01,0x00,0x00,0x85,0x00,0x05,0x00,0x42,0x00,0x00,0x00, -0xb5,0x00,0x00,0x00,0x55,0x00,0x00,0x00,0xb4,0x00,0x00,0x00, -0x70,0x00,0x04,0x00,0x42,0x00,0x00,0x00,0xb9,0x00,0x00,0x00, -0x8d,0x01,0x00,0x00,0x85,0x00,0x05,0x00,0x42,0x00,0x00,0x00, -0xba,0x00,0x00,0x00,0x5a,0x00,0x00,0x00,0xb9,0x00,0x00,0x00, -0xf7,0x00,0x03,0x00,0xbe,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0xfa,0x00,0x04,0x00,0x71,0x00,0x00,0x00,0xbd,0x00,0x00,0x00, -0xd4,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0xbd,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xc1,0x00,0x00,0x00, -0x41,0x00,0x00,0x00,0x29,0x00,0x00,0x00,0x41,0x00,0x08,0x00, -0x78,0x00,0x00,0x00,0xc2,0x00,0x00,0x00,0x51,0x00,0x00,0x00, -0x09,0x00,0x00,0x00,0x21,0x00,0x00,0x00,0x29,0x00,0x00,0x00, -0xc1,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x46,0x00,0x00,0x00, -0xc3,0x00,0x00,0x00,0xc2,0x00,0x00,0x00,0x71,0x00,0x04,0x00, -0x14,0x00,0x00,0x00,0xc4,0x00,0x00,0x00,0xc3,0x00,0x00,0x00, -0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0xc5,0x00,0x00,0x00, -0xc4,0x00,0x00,0x00,0xc7,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xc6,0x00,0x00,0x00,0xc5,0x00,0x00,0x00,0x7d,0x00,0x00,0x00, -0x72,0x00,0x04,0x00,0x7f,0x00,0x00,0x00,0xc7,0x00,0x00,0x00, -0xc6,0x00,0x00,0x00,0x7c,0x00,0x04,0x00,0x46,0x00,0x00,0x00, -0xc8,0x00,0x00,0x00,0xc7,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xcc,0x00,0x00,0x00,0x41,0x00,0x00,0x00, -0xcb,0x00,0x00,0x00,0x41,0x00,0x08,0x00,0x78,0x00,0x00,0x00, -0xcd,0x00,0x00,0x00,0x51,0x00,0x00,0x00,0x09,0x00,0x00,0x00, -0x21,0x00,0x00,0x00,0x29,0x00,0x00,0x00,0xcc,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x46,0x00,0x00,0x00,0xce,0x00,0x00,0x00, -0xcd,0x00,0x00,0x00,0x71,0x00,0x04,0x00,0x14,0x00,0x00,0x00, -0xcf,0x00,0x00,0x00,0xce,0x00,0x00,0x00,0x7c,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0xd0,0x00,0x00,0x00,0xcf,0x00,0x00,0x00, -0xc7,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xd1,0x00,0x00,0x00, -0xd0,0x00,0x00,0x00,0x7d,0x00,0x00,0x00,0x72,0x00,0x04,0x00, -0x7f,0x00,0x00,0x00,0xd2,0x00,0x00,0x00,0xd1,0x00,0x00,0x00, -0x7c,0x00,0x04,0x00,0x46,0x00,0x00,0x00,0xd3,0x00,0x00,0x00, -0xd2,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0xbe,0x00,0x00,0x00, -0xf8,0x00,0x02,0x00,0xd4,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xd7,0x00,0x00,0x00,0x41,0x00,0x00,0x00, -0xcb,0x00,0x00,0x00,0x41,0x00,0x08,0x00,0x78,0x00,0x00,0x00, -0xd8,0x00,0x00,0x00,0x51,0x00,0x00,0x00,0x09,0x00,0x00,0x00, -0x21,0x00,0x00,0x00,0x29,0x00,0x00,0x00,0xd7,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x46,0x00,0x00,0x00,0xd9,0x00,0x00,0x00, -0xd8,0x00,0x00,0x00,0x71,0x00,0x04,0x00,0x14,0x00,0x00,0x00, -0xda,0x00,0x00,0x00,0xd9,0x00,0x00,0x00,0x7c,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0xdb,0x00,0x00,0x00,0xda,0x00,0x00,0x00, -0xc7,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xdc,0x00,0x00,0x00, -0xdb,0x00,0x00,0x00,0x95,0x00,0x00,0x00,0x82,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xe0,0x00,0x00,0x00,0x41,0x00,0x00,0x00, -0xdf,0x00,0x00,0x00,0x41,0x00,0x08,0x00,0x78,0x00,0x00,0x00, -0xe1,0x00,0x00,0x00,0x51,0x00,0x00,0x00,0x09,0x00,0x00,0x00, -0x21,0x00,0x00,0x00,0x29,0x00,0x00,0x00,0xe0,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x46,0x00,0x00,0x00,0xe2,0x00,0x00,0x00, -0xe1,0x00,0x00,0x00,0xc2,0x00,0x05,0x00,0x46,0x00,0x00,0x00, -0xe3,0x00,0x00,0x00,0xe2,0x00,0x00,0x00,0x9c,0x00,0x00,0x00, -0xc4,0x00,0x05,0x00,0x46,0x00,0x00,0x00,0xe4,0x00,0x00,0x00, -0xe3,0x00,0x00,0x00,0x70,0x00,0x00,0x00,0x71,0x00,0x04,0x00, -0x14,0x00,0x00,0x00,0xe5,0x00,0x00,0x00,0xe4,0x00,0x00,0x00, -0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0xe6,0x00,0x00,0x00, -0xe5,0x00,0x00,0x00,0xc5,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xe7,0x00,0x00,0x00,0xdc,0x00,0x00,0x00,0xe6,0x00,0x00,0x00, -0x72,0x00,0x04,0x00,0x7f,0x00,0x00,0x00,0xe8,0x00,0x00,0x00, -0xe7,0x00,0x00,0x00,0x7c,0x00,0x04,0x00,0x46,0x00,0x00,0x00, -0xe9,0x00,0x00,0x00,0xe8,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x46,0x00,0x00,0x00,0xee,0x00,0x00,0x00,0xd8,0x00,0x00,0x00, -0xc2,0x00,0x05,0x00,0x46,0x00,0x00,0x00,0xef,0x00,0x00,0x00, -0xee,0x00,0x00,0x00,0x70,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xf2,0x00,0x00,0x00,0x41,0x00,0x00,0x00, -0x29,0x00,0x00,0x00,0x41,0x00,0x08,0x00,0x78,0x00,0x00,0x00, -0xf3,0x00,0x00,0x00,0x51,0x00,0x00,0x00,0x09,0x00,0x00,0x00, -0x21,0x00,0x00,0x00,0x29,0x00,0x00,0x00,0xf2,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x46,0x00,0x00,0x00,0xf4,0x00,0x00,0x00, -0xf3,0x00,0x00,0x00,0xc2,0x00,0x05,0x00,0x46,0x00,0x00,0x00, -0xf5,0x00,0x00,0x00,0xf4,0x00,0x00,0x00,0x9c,0x00,0x00,0x00, -0xc4,0x00,0x05,0x00,0x46,0x00,0x00,0x00,0xf6,0x00,0x00,0x00, -0xf5,0x00,0x00,0x00,0x70,0x00,0x00,0x00,0xc5,0x00,0x05,0x00, -0x46,0x00,0x00,0x00,0xf7,0x00,0x00,0x00,0xef,0x00,0x00,0x00, -0xf6,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0xbe,0x00,0x00,0x00, -0xf8,0x00,0x02,0x00,0xbe,0x00,0x00,0x00,0xf5,0x00,0x07,0x00, -0x46,0x00,0x00,0x00,0x8f,0x01,0x00,0x00,0xd3,0x00,0x00,0x00, -0xbd,0x00,0x00,0x00,0xf7,0x00,0x00,0x00,0xd4,0x00,0x00,0x00, -0xf5,0x00,0x07,0x00,0x46,0x00,0x00,0x00,0x8e,0x01,0x00,0x00, -0xc8,0x00,0x00,0x00,0xbd,0x00,0x00,0x00,0xe9,0x00,0x00,0x00, -0xd4,0x00,0x00,0x00,0x70,0x00,0x04,0x00,0x42,0x00,0x00,0x00, -0xfb,0x00,0x00,0x00,0x8e,0x01,0x00,0x00,0x85,0x00,0x05,0x00, -0x42,0x00,0x00,0x00,0xfc,0x00,0x00,0x00,0x55,0x00,0x00,0x00, -0xfb,0x00,0x00,0x00,0x70,0x00,0x04,0x00,0x42,0x00,0x00,0x00, -0x00,0x01,0x00,0x00,0x8f,0x01,0x00,0x00,0x85,0x00,0x05,0x00, -0x42,0x00,0x00,0x00,0x01,0x01,0x00,0x00,0x5a,0x00,0x00,0x00, -0x00,0x01,0x00,0x00,0xc4,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x05,0x01,0x00,0x00,0x29,0x00,0x00,0x00,0x41,0x00,0x00,0x00, -0x72,0x00,0x04,0x00,0x7f,0x00,0x00,0x00,0x06,0x01,0x00,0x00, -0x05,0x01,0x00,0x00,0x7c,0x00,0x04,0x00,0x46,0x00,0x00,0x00, -0x07,0x01,0x00,0x00,0x06,0x01,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x0b,0x01,0x00,0x00,0x41,0x00,0x00,0x00, -0x29,0x00,0x00,0x00,0xc4,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x0c,0x01,0x00,0x00,0x29,0x00,0x00,0x00,0x0b,0x01,0x00,0x00, -0x72,0x00,0x04,0x00,0x7f,0x00,0x00,0x00,0x0d,0x01,0x00,0x00, -0x0c,0x01,0x00,0x00,0x7c,0x00,0x04,0x00,0x46,0x00,0x00,0x00, -0x0e,0x01,0x00,0x00,0x0d,0x01,0x00,0x00,0x41,0x00,0x08,0x00, -0x78,0x00,0x00,0x00,0x17,0x01,0x00,0x00,0x51,0x00,0x00,0x00, -0x09,0x00,0x00,0x00,0x21,0x00,0x00,0x00,0xdf,0x00,0x00,0x00, -0x6b,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x46,0x00,0x00,0x00, -0x18,0x01,0x00,0x00,0x17,0x01,0x00,0x00,0x71,0x00,0x04,0x00, -0x14,0x00,0x00,0x00,0x19,0x01,0x00,0x00,0x18,0x01,0x00,0x00, -0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x1a,0x01,0x00,0x00, -0x19,0x01,0x00,0x00,0xc7,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x1b,0x01,0x00,0x00,0x1a,0x01,0x00,0x00,0x95,0x00,0x00,0x00, -0x41,0x00,0x08,0x00,0x78,0x00,0x00,0x00,0x1e,0x01,0x00,0x00, -0x51,0x00,0x00,0x00,0x09,0x00,0x00,0x00,0x21,0x00,0x00,0x00, -0x3f,0x00,0x00,0x00,0x63,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x46,0x00,0x00,0x00,0x1f,0x01,0x00,0x00,0x1e,0x01,0x00,0x00, -0xc7,0x00,0x05,0x00,0x46,0x00,0x00,0x00,0x21,0x01,0x00,0x00, -0x1f,0x01,0x00,0x00,0x07,0x01,0x00,0x00,0x71,0x00,0x04,0x00, -0x14,0x00,0x00,0x00,0x22,0x01,0x00,0x00,0x21,0x01,0x00,0x00, -0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x23,0x01,0x00,0x00, -0x22,0x01,0x00,0x00,0xab,0x00,0x05,0x00,0x11,0x00,0x00,0x00, -0x24,0x01,0x00,0x00,0x23,0x01,0x00,0x00,0x09,0x00,0x00,0x00, -0xa9,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x25,0x01,0x00,0x00, -0x24,0x01,0x00,0x00,0x39,0x00,0x00,0x00,0x09,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x26,0x01,0x00,0x00, -0x1b,0x01,0x00,0x00,0x25,0x01,0x00,0x00,0x6f,0x00,0x04,0x00, -0x42,0x00,0x00,0x00,0x27,0x01,0x00,0x00,0x26,0x01,0x00,0x00, -0x7f,0x00,0x04,0x00,0x42,0x00,0x00,0x00,0x96,0x01,0x00,0x00, -0xba,0x00,0x00,0x00,0x0c,0x00,0x08,0x00,0x42,0x00,0x00,0x00, -0x2a,0x01,0x00,0x00,0x01,0x00,0x00,0x00,0x32,0x00,0x00,0x00, -0xb5,0x00,0x00,0x00,0x27,0x01,0x00,0x00,0x96,0x01,0x00,0x00, -0x41,0x00,0x06,0x00,0x53,0x00,0x00,0x00,0x2b,0x01,0x00,0x00, -0x12,0x01,0x00,0x00,0x09,0x00,0x00,0x00,0x64,0x00,0x00,0x00, -0x3e,0x00,0x03,0x00,0x2b,0x01,0x00,0x00,0x2a,0x01,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x2d,0x01,0x00,0x00, -0x64,0x00,0x00,0x00,0x29,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x31,0x01,0x00,0x00,0x6b,0x00,0x00,0x00, -0x29,0x00,0x00,0x00,0x41,0x00,0x08,0x00,0x78,0x00,0x00,0x00, -0x32,0x01,0x00,0x00,0x51,0x00,0x00,0x00,0x09,0x00,0x00,0x00, -0x21,0x00,0x00,0x00,0xdf,0x00,0x00,0x00,0x31,0x01,0x00,0x00, -0x3d,0x00,0x04,0x00,0x46,0x00,0x00,0x00,0x33,0x01,0x00,0x00, -0x32,0x01,0x00,0x00,0x71,0x00,0x04,0x00,0x14,0x00,0x00,0x00, -0x34,0x01,0x00,0x00,0x33,0x01,0x00,0x00,0x7c,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x35,0x01,0x00,0x00,0x34,0x01,0x00,0x00, -0xc7,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x36,0x01,0x00,0x00, -0x35,0x01,0x00,0x00,0x95,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x39,0x01,0x00,0x00,0x63,0x00,0x00,0x00, -0x29,0x00,0x00,0x00,0x41,0x00,0x08,0x00,0x78,0x00,0x00,0x00, -0x3a,0x01,0x00,0x00,0x51,0x00,0x00,0x00,0x09,0x00,0x00,0x00, -0x21,0x00,0x00,0x00,0x3f,0x00,0x00,0x00,0x39,0x01,0x00,0x00, -0x3d,0x00,0x04,0x00,0x46,0x00,0x00,0x00,0x3b,0x01,0x00,0x00, -0x3a,0x01,0x00,0x00,0xc7,0x00,0x05,0x00,0x46,0x00,0x00,0x00, -0x3d,0x01,0x00,0x00,0x3b,0x01,0x00,0x00,0x07,0x01,0x00,0x00, -0x71,0x00,0x04,0x00,0x14,0x00,0x00,0x00,0x3e,0x01,0x00,0x00, -0x3d,0x01,0x00,0x00,0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x3f,0x01,0x00,0x00,0x3e,0x01,0x00,0x00,0xab,0x00,0x05,0x00, -0x11,0x00,0x00,0x00,0x40,0x01,0x00,0x00,0x3f,0x01,0x00,0x00, -0x09,0x00,0x00,0x00,0xa9,0x00,0x06,0x00,0x06,0x00,0x00,0x00, -0x41,0x01,0x00,0x00,0x40,0x01,0x00,0x00,0x39,0x00,0x00,0x00, -0x09,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x42,0x01,0x00,0x00,0x36,0x01,0x00,0x00,0x41,0x01,0x00,0x00, -0x6f,0x00,0x04,0x00,0x42,0x00,0x00,0x00,0x43,0x01,0x00,0x00, -0x42,0x01,0x00,0x00,0x0c,0x00,0x08,0x00,0x42,0x00,0x00,0x00, -0x46,0x01,0x00,0x00,0x01,0x00,0x00,0x00,0x32,0x00,0x00,0x00, -0xb5,0x00,0x00,0x00,0x43,0x01,0x00,0x00,0x96,0x01,0x00,0x00, -0x41,0x00,0x06,0x00,0x53,0x00,0x00,0x00,0x47,0x01,0x00,0x00, -0x12,0x01,0x00,0x00,0x09,0x00,0x00,0x00,0x2d,0x01,0x00,0x00, -0x3e,0x00,0x03,0x00,0x47,0x01,0x00,0x00,0x46,0x01,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x49,0x01,0x00,0x00, -0x64,0x00,0x00,0x00,0x66,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x46,0x00,0x00,0x00,0x4e,0x01,0x00,0x00,0x17,0x01,0x00,0x00, -0xc2,0x00,0x05,0x00,0x46,0x00,0x00,0x00,0x4f,0x01,0x00,0x00, -0x4e,0x01,0x00,0x00,0x70,0x00,0x00,0x00,0x71,0x00,0x04,0x00, -0x14,0x00,0x00,0x00,0x50,0x01,0x00,0x00,0x4f,0x01,0x00,0x00, -0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x51,0x01,0x00,0x00, -0x50,0x01,0x00,0x00,0x3d,0x00,0x04,0x00,0x46,0x00,0x00,0x00, -0x55,0x01,0x00,0x00,0x1e,0x01,0x00,0x00,0xc7,0x00,0x05,0x00, -0x46,0x00,0x00,0x00,0x57,0x01,0x00,0x00,0x55,0x01,0x00,0x00, -0x0e,0x01,0x00,0x00,0x71,0x00,0x04,0x00,0x14,0x00,0x00,0x00, -0x58,0x01,0x00,0x00,0x57,0x01,0x00,0x00,0x7c,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x59,0x01,0x00,0x00,0x58,0x01,0x00,0x00, -0xab,0x00,0x05,0x00,0x11,0x00,0x00,0x00,0x5a,0x01,0x00,0x00, -0x59,0x01,0x00,0x00,0x09,0x00,0x00,0x00,0xa9,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0x5b,0x01,0x00,0x00,0x5a,0x01,0x00,0x00, -0x39,0x00,0x00,0x00,0x09,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x5c,0x01,0x00,0x00,0x51,0x01,0x00,0x00, -0x5b,0x01,0x00,0x00,0x6f,0x00,0x04,0x00,0x42,0x00,0x00,0x00, -0x5d,0x01,0x00,0x00,0x5c,0x01,0x00,0x00,0x7f,0x00,0x04,0x00, -0x42,0x00,0x00,0x00,0x98,0x01,0x00,0x00,0x01,0x01,0x00,0x00, -0x0c,0x00,0x08,0x00,0x42,0x00,0x00,0x00,0x60,0x01,0x00,0x00, -0x01,0x00,0x00,0x00,0x32,0x00,0x00,0x00,0xfc,0x00,0x00,0x00, -0x5d,0x01,0x00,0x00,0x98,0x01,0x00,0x00,0x41,0x00,0x06,0x00, -0x53,0x00,0x00,0x00,0x61,0x01,0x00,0x00,0x12,0x01,0x00,0x00, -0x09,0x00,0x00,0x00,0x49,0x01,0x00,0x00,0x3e,0x00,0x03,0x00, -0x61,0x01,0x00,0x00,0x60,0x01,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x64,0x01,0x00,0x00,0x64,0x00,0x00,0x00, -0x63,0x01,0x00,0x00,0x3d,0x00,0x04,0x00,0x46,0x00,0x00,0x00, -0x6a,0x01,0x00,0x00,0x32,0x01,0x00,0x00,0xc2,0x00,0x05,0x00, -0x46,0x00,0x00,0x00,0x6b,0x01,0x00,0x00,0x6a,0x01,0x00,0x00, -0x70,0x00,0x00,0x00,0x71,0x00,0x04,0x00,0x14,0x00,0x00,0x00, -0x6c,0x01,0x00,0x00,0x6b,0x01,0x00,0x00,0x7c,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x6d,0x01,0x00,0x00,0x6c,0x01,0x00,0x00, -0x3d,0x00,0x04,0x00,0x46,0x00,0x00,0x00,0x72,0x01,0x00,0x00, -0x3a,0x01,0x00,0x00,0xc7,0x00,0x05,0x00,0x46,0x00,0x00,0x00, -0x74,0x01,0x00,0x00,0x72,0x01,0x00,0x00,0x0e,0x01,0x00,0x00, -0x71,0x00,0x04,0x00,0x14,0x00,0x00,0x00,0x75,0x01,0x00,0x00, -0x74,0x01,0x00,0x00,0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x76,0x01,0x00,0x00,0x75,0x01,0x00,0x00,0xab,0x00,0x05,0x00, -0x11,0x00,0x00,0x00,0x77,0x01,0x00,0x00,0x76,0x01,0x00,0x00, -0x09,0x00,0x00,0x00,0xa9,0x00,0x06,0x00,0x06,0x00,0x00,0x00, -0x78,0x01,0x00,0x00,0x77,0x01,0x00,0x00,0x39,0x00,0x00,0x00, -0x09,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x79,0x01,0x00,0x00,0x6d,0x01,0x00,0x00,0x78,0x01,0x00,0x00, -0x6f,0x00,0x04,0x00,0x42,0x00,0x00,0x00,0x7a,0x01,0x00,0x00, -0x79,0x01,0x00,0x00,0x0c,0x00,0x08,0x00,0x42,0x00,0x00,0x00, -0x7d,0x01,0x00,0x00,0x01,0x00,0x00,0x00,0x32,0x00,0x00,0x00, -0xfc,0x00,0x00,0x00,0x7a,0x01,0x00,0x00,0x98,0x01,0x00,0x00, -0x41,0x00,0x06,0x00,0x53,0x00,0x00,0x00,0x7e,0x01,0x00,0x00, -0x12,0x01,0x00,0x00,0x09,0x00,0x00,0x00,0x64,0x01,0x00,0x00, -0x3e,0x00,0x03,0x00,0x7e,0x01,0x00,0x00,0x7d,0x01,0x00,0x00, -0xf9,0x00,0x02,0x00,0x0d,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, -0x0d,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x80,0x01,0x00,0x00,0x8b,0x01,0x00,0x00,0x29,0x00,0x00,0x00, -0xf9,0x00,0x02,0x00,0x0a,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, -0x0c,0x00,0x00,0x00,0xf5,0x00,0x07,0x00,0x11,0x00,0x00,0x00, -0x92,0x01,0x00,0x00,0x85,0x01,0x00,0x00,0x0a,0x00,0x00,0x00, -0x88,0x01,0x00,0x00,0x2f,0x00,0x00,0x00,0xf7,0x00,0x03,0x00, -0x89,0x01,0x00,0x00,0x00,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, -0x92,0x01,0x00,0x00,0x83,0x01,0x00,0x00,0x89,0x01,0x00,0x00, -0xf8,0x00,0x02,0x00,0x89,0x01,0x00,0x00,0xf9,0x00,0x02,0x00, -0x83,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0x83,0x01,0x00,0x00, -0xfd,0x00,0x01,0x00,0x38,0x00,0x01,0x00, -}; -const uint64_t dequant_q5_K_len = 5888; - -unsigned char dequant_q5_K_fp32_data[] = { -0x03,0x02,0x23,0x07,0x00,0x05,0x01,0x00,0x0b,0x00,0x0d,0x00, 0xa0,0x01,0x00,0x00,0x00,0x00,0x00,0x00,0x11,0x00,0x02,0x00, 0x01,0x00,0x00,0x00,0x11,0x00,0x02,0x00,0x27,0x00,0x00,0x00, 0x11,0x00,0x02,0x00,0x51,0x11,0x00,0x00,0x11,0x00,0x02,0x00, @@ -12162,366 +6653,10 @@ unsigned char dequant_q5_K_fp32_data[] = { 0x8a,0x01,0x00,0x00,0xfd,0x00,0x01,0x00,0x38,0x00,0x01,0x00, }; -const uint64_t dequant_q5_K_fp32_len = 5988; +const uint64_t dequant_q5_K_len = 5988; unsigned char dequant_q6_K_data[] = { 0x03,0x02,0x23,0x07,0x00,0x05,0x01,0x00,0x0b,0x00,0x0d,0x00, -0x0a,0x01,0x00,0x00,0x00,0x00,0x00,0x00,0x11,0x00,0x02,0x00, -0x01,0x00,0x00,0x00,0x11,0x00,0x02,0x00,0x09,0x00,0x00,0x00, -0x11,0x00,0x02,0x00,0x27,0x00,0x00,0x00,0x11,0x00,0x02,0x00, -0x51,0x11,0x00,0x00,0x11,0x00,0x02,0x00,0x60,0x11,0x00,0x00, -0x0b,0x00,0x06,0x00,0x01,0x00,0x00,0x00,0x47,0x4c,0x53,0x4c, -0x2e,0x73,0x74,0x64,0x2e,0x34,0x35,0x30,0x00,0x00,0x00,0x00, -0x0e,0x00,0x03,0x00,0x00,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x0f,0x00,0x0a,0x00,0x05,0x00,0x00,0x00,0x04,0x00,0x00,0x00, -0x6d,0x61,0x69,0x6e,0x00,0x00,0x00,0x00,0x17,0x00,0x00,0x00, -0x25,0x00,0x00,0x00,0x33,0x00,0x00,0x00,0x66,0x00,0x00,0x00, -0x79,0x00,0x00,0x00,0x10,0x00,0x06,0x00,0x04,0x00,0x00,0x00, -0x11,0x00,0x00,0x00,0x40,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x17,0x00,0x00,0x00, -0x0b,0x00,0x00,0x00,0x1a,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x23,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x04,0x00,0x00,0x00, -0x48,0x00,0x05,0x00,0x23,0x00,0x00,0x00,0x02,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x08,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0x23,0x00,0x00,0x00,0x03,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x47,0x00,0x03,0x00,0x23,0x00,0x00,0x00, -0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x33,0x00,0x00,0x00, -0x0b,0x00,0x00,0x00,0x1b,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x5b,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x5d,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x60,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0x62,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x62,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x80,0x00,0x00,0x00, -0x48,0x00,0x05,0x00,0x62,0x00,0x00,0x00,0x02,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0xc0,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0x62,0x00,0x00,0x00,0x03,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0xd0,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x63,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0xd2,0x00,0x00,0x00,0x48,0x00,0x04,0x00, -0x64,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x18,0x00,0x00,0x00, -0x48,0x00,0x05,0x00,0x64,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x03,0x00, -0x64,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x66,0x00,0x00,0x00,0x22,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x66,0x00,0x00,0x00,0x21,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x76,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x48,0x00,0x04,0x00, -0x77,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x19,0x00,0x00,0x00, -0x48,0x00,0x05,0x00,0x77,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x03,0x00, -0x77,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x79,0x00,0x00,0x00,0x22,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x79,0x00,0x00,0x00,0x21,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0xff,0x00,0x00,0x00, -0x0b,0x00,0x00,0x00,0x19,0x00,0x00,0x00,0x13,0x00,0x02,0x00, -0x02,0x00,0x00,0x00,0x21,0x00,0x03,0x00,0x03,0x00,0x00,0x00, -0x02,0x00,0x00,0x00,0x15,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x20,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x09,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x10,0x00,0x00,0x00, -0x00,0x01,0x00,0x00,0x14,0x00,0x02,0x00,0x11,0x00,0x00,0x00, -0x15,0x00,0x04,0x00,0x14,0x00,0x00,0x00,0x20,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x17,0x00,0x04,0x00,0x15,0x00,0x00,0x00, -0x14,0x00,0x00,0x00,0x03,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0x16,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x15,0x00,0x00,0x00, -0x3b,0x00,0x04,0x00,0x16,0x00,0x00,0x00,0x17,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x14,0x00,0x00,0x00, -0x18,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0x19,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x14,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x14,0x00,0x00,0x00,0x1c,0x00,0x00,0x00, -0x00,0x01,0x00,0x00,0x1e,0x00,0x06,0x00,0x23,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x24,0x00,0x00,0x00, -0x09,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x3b,0x00,0x04,0x00, -0x24,0x00,0x00,0x00,0x25,0x00,0x00,0x00,0x09,0x00,0x00,0x00, -0x20,0x00,0x04,0x00,0x26,0x00,0x00,0x00,0x09,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x29,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x3b,0x00,0x04,0x00, -0x16,0x00,0x00,0x00,0x33,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x39,0x00,0x00,0x00, -0x20,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x41,0x00,0x00,0x00,0x08,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x45,0x00,0x00,0x00,0x10,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x4b,0x00,0x00,0x00, -0x80,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x52,0x00,0x00,0x00,0x40,0x00,0x00,0x00,0x15,0x00,0x04,0x00, -0x57,0x00,0x00,0x00,0x08,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x14,0x00,0x00,0x00,0x5a,0x00,0x00,0x00, -0x80,0x00,0x00,0x00,0x1c,0x00,0x04,0x00,0x5b,0x00,0x00,0x00, -0x57,0x00,0x00,0x00,0x5a,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x14,0x00,0x00,0x00,0x5c,0x00,0x00,0x00,0x40,0x00,0x00,0x00, -0x1c,0x00,0x04,0x00,0x5d,0x00,0x00,0x00,0x57,0x00,0x00,0x00, -0x5c,0x00,0x00,0x00,0x15,0x00,0x04,0x00,0x5e,0x00,0x00,0x00, -0x08,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x14,0x00,0x00,0x00,0x5f,0x00,0x00,0x00,0x10,0x00,0x00,0x00, -0x1c,0x00,0x04,0x00,0x60,0x00,0x00,0x00,0x5e,0x00,0x00,0x00, -0x5f,0x00,0x00,0x00,0x16,0x00,0x03,0x00,0x61,0x00,0x00,0x00, -0x10,0x00,0x00,0x00,0x1e,0x00,0x06,0x00,0x62,0x00,0x00,0x00, -0x5b,0x00,0x00,0x00,0x5d,0x00,0x00,0x00,0x60,0x00,0x00,0x00, -0x61,0x00,0x00,0x00,0x1d,0x00,0x03,0x00,0x63,0x00,0x00,0x00, -0x62,0x00,0x00,0x00,0x1e,0x00,0x03,0x00,0x64,0x00,0x00,0x00, -0x63,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x65,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x64,0x00,0x00,0x00,0x3b,0x00,0x04,0x00, -0x65,0x00,0x00,0x00,0x66,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0x20,0x00,0x04,0x00,0x6c,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0x57,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x72,0x00,0x00,0x00,0x03,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0x73,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x61,0x00,0x00,0x00, -0x1d,0x00,0x03,0x00,0x76,0x00,0x00,0x00,0x61,0x00,0x00,0x00, -0x1e,0x00,0x03,0x00,0x77,0x00,0x00,0x00,0x76,0x00,0x00,0x00, -0x20,0x00,0x04,0x00,0x78,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0x77,0x00,0x00,0x00,0x3b,0x00,0x04,0x00,0x78,0x00,0x00,0x00, -0x79,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x7e,0x00,0x00,0x00,0x02,0x00,0x00,0x00, -0x20,0x00,0x04,0x00,0x81,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0x5e,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x8c,0x00,0x00,0x00,0x0f,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x93,0x00,0x00,0x00,0x04,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0xdc,0x00,0x00,0x00, -0x60,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0xe1,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x14,0x00,0x00,0x00,0xfe,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x2c,0x00,0x06,0x00,0x15,0x00,0x00,0x00,0xff,0x00,0x00,0x00, -0x5c,0x00,0x00,0x00,0xfe,0x00,0x00,0x00,0xfe,0x00,0x00,0x00, -0x2a,0x00,0x03,0x00,0x11,0x00,0x00,0x00,0x02,0x01,0x00,0x00, -0x29,0x00,0x03,0x00,0x11,0x00,0x00,0x00,0x05,0x01,0x00,0x00, -0x36,0x00,0x05,0x00,0x02,0x00,0x00,0x00,0x04,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x03,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, -0x05,0x00,0x00,0x00,0xf7,0x00,0x03,0x00,0x00,0x01,0x00,0x00, -0x00,0x00,0x00,0x00,0xfb,0x00,0x03,0x00,0x18,0x00,0x00,0x00, -0x01,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0x01,0x01,0x00,0x00, -0xf9,0x00,0x02,0x00,0x0a,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, -0x0a,0x00,0x00,0x00,0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00, -0x08,0x01,0x00,0x00,0x09,0x00,0x00,0x00,0x01,0x01,0x00,0x00, -0xfd,0x00,0x00,0x00,0x0d,0x00,0x00,0x00,0xb1,0x00,0x05,0x00, -0x11,0x00,0x00,0x00,0x12,0x00,0x00,0x00,0x08,0x01,0x00,0x00, -0x10,0x00,0x00,0x00,0xf6,0x00,0x04,0x00,0x0c,0x00,0x00,0x00, -0x0d,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, -0x12,0x00,0x00,0x00,0x0b,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0xf8,0x00,0x02,0x00,0x0b,0x00,0x00,0x00,0x41,0x00,0x05,0x00, -0x19,0x00,0x00,0x00,0x1a,0x00,0x00,0x00,0x17,0x00,0x00,0x00, -0x18,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x14,0x00,0x00,0x00, -0x1b,0x00,0x00,0x00,0x1a,0x00,0x00,0x00,0x84,0x00,0x05,0x00, -0x14,0x00,0x00,0x00,0x1d,0x00,0x00,0x00,0x1b,0x00,0x00,0x00, -0x1c,0x00,0x00,0x00,0x7c,0x00,0x04,0x00,0x14,0x00,0x00,0x00, -0x1f,0x00,0x00,0x00,0x08,0x01,0x00,0x00,0x80,0x00,0x05,0x00, -0x14,0x00,0x00,0x00,0x20,0x00,0x00,0x00,0x1d,0x00,0x00,0x00, -0x1f,0x00,0x00,0x00,0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x21,0x00,0x00,0x00,0x20,0x00,0x00,0x00,0x41,0x00,0x05,0x00, -0x26,0x00,0x00,0x00,0x27,0x00,0x00,0x00,0x25,0x00,0x00,0x00, -0x09,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x28,0x00,0x00,0x00,0x27,0x00,0x00,0x00,0x41,0x00,0x05,0x00, -0x26,0x00,0x00,0x00,0x2a,0x00,0x00,0x00,0x25,0x00,0x00,0x00, -0x29,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x2b,0x00,0x00,0x00,0x2a,0x00,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x2c,0x00,0x00,0x00,0x28,0x00,0x00,0x00, -0x2b,0x00,0x00,0x00,0x87,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x2d,0x00,0x00,0x00,0x2c,0x00,0x00,0x00,0x10,0x00,0x00,0x00, -0xaf,0x00,0x05,0x00,0x11,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0x21,0x00,0x00,0x00,0x2d,0x00,0x00,0x00,0xf7,0x00,0x03,0x00, -0x30,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, -0x2e,0x00,0x00,0x00,0x2f,0x00,0x00,0x00,0x30,0x00,0x00,0x00, -0xf8,0x00,0x02,0x00,0x2f,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, -0x0c,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0x30,0x00,0x00,0x00, -0x41,0x00,0x05,0x00,0x19,0x00,0x00,0x00,0x34,0x00,0x00,0x00, -0x33,0x00,0x00,0x00,0x18,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x14,0x00,0x00,0x00,0x35,0x00,0x00,0x00,0x34,0x00,0x00,0x00, -0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x36,0x00,0x00,0x00, -0x35,0x00,0x00,0x00,0x87,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x3a,0x00,0x00,0x00,0x36,0x00,0x00,0x00,0x39,0x00,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x3e,0x00,0x00,0x00, -0x39,0x00,0x00,0x00,0x3a,0x00,0x00,0x00,0x82,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x3f,0x00,0x00,0x00,0x36,0x00,0x00,0x00, -0x3e,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x43,0x00,0x00,0x00,0x41,0x00,0x00,0x00,0x3a,0x00,0x00,0x00, -0x87,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x46,0x00,0x00,0x00, -0x3f,0x00,0x00,0x00,0x45,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x47,0x00,0x00,0x00,0x43,0x00,0x00,0x00, -0x46,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x4a,0x00,0x00,0x00,0x21,0x00,0x00,0x00,0x10,0x00,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x4d,0x00,0x00,0x00, -0x4b,0x00,0x00,0x00,0x3a,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x4e,0x00,0x00,0x00,0x4a,0x00,0x00,0x00, -0x4d,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x50,0x00,0x00,0x00,0x4e,0x00,0x00,0x00,0x3f,0x00,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x54,0x00,0x00,0x00, -0x52,0x00,0x00,0x00,0x3a,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x56,0x00,0x00,0x00,0x54,0x00,0x00,0x00, -0x3f,0x00,0x00,0x00,0x41,0x00,0x08,0x00,0x6c,0x00,0x00,0x00, -0x6d,0x00,0x00,0x00,0x66,0x00,0x00,0x00,0x09,0x00,0x00,0x00, -0x21,0x00,0x00,0x00,0x29,0x00,0x00,0x00,0x36,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x57,0x00,0x00,0x00,0x6e,0x00,0x00,0x00, -0x6d,0x00,0x00,0x00,0x41,0x00,0x07,0x00,0x73,0x00,0x00,0x00, -0x74,0x00,0x00,0x00,0x66,0x00,0x00,0x00,0x09,0x00,0x00,0x00, -0x21,0x00,0x00,0x00,0x72,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x61,0x00,0x00,0x00,0x75,0x00,0x00,0x00,0x74,0x00,0x00,0x00, -0x41,0x00,0x08,0x00,0x81,0x00,0x00,0x00,0x82,0x00,0x00,0x00, -0x66,0x00,0x00,0x00,0x09,0x00,0x00,0x00,0x21,0x00,0x00,0x00, -0x7e,0x00,0x00,0x00,0x47,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x5e,0x00,0x00,0x00,0x83,0x00,0x00,0x00,0x82,0x00,0x00,0x00, -0x72,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x84,0x00,0x00,0x00, -0x83,0x00,0x00,0x00,0x41,0x00,0x08,0x00,0x6c,0x00,0x00,0x00, -0x88,0x00,0x00,0x00,0x66,0x00,0x00,0x00,0x09,0x00,0x00,0x00, -0x21,0x00,0x00,0x00,0x09,0x00,0x00,0x00,0x56,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x57,0x00,0x00,0x00,0x89,0x00,0x00,0x00, -0x88,0x00,0x00,0x00,0x71,0x00,0x04,0x00,0x14,0x00,0x00,0x00, -0x8a,0x00,0x00,0x00,0x89,0x00,0x00,0x00,0x7c,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x8b,0x00,0x00,0x00,0x8a,0x00,0x00,0x00, -0xc7,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x8d,0x00,0x00,0x00, -0x8b,0x00,0x00,0x00,0x8c,0x00,0x00,0x00,0xc2,0x00,0x05,0x00, -0x57,0x00,0x00,0x00,0x8f,0x00,0x00,0x00,0x6e,0x00,0x00,0x00, -0x09,0x00,0x00,0x00,0x71,0x00,0x04,0x00,0x14,0x00,0x00,0x00, -0x90,0x00,0x00,0x00,0x8f,0x00,0x00,0x00,0x7c,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x91,0x00,0x00,0x00,0x90,0x00,0x00,0x00, -0xc7,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x92,0x00,0x00,0x00, -0x91,0x00,0x00,0x00,0x72,0x00,0x00,0x00,0xc4,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x94,0x00,0x00,0x00,0x92,0x00,0x00,0x00, -0x93,0x00,0x00,0x00,0xc5,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x95,0x00,0x00,0x00,0x8d,0x00,0x00,0x00,0x94,0x00,0x00,0x00, -0x72,0x00,0x04,0x00,0x5e,0x00,0x00,0x00,0x96,0x00,0x00,0x00, -0x95,0x00,0x00,0x00,0x72,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x97,0x00,0x00,0x00,0x96,0x00,0x00,0x00,0x82,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x98,0x00,0x00,0x00,0x97,0x00,0x00,0x00, -0x39,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x99,0x00,0x00,0x00,0x84,0x00,0x00,0x00,0x98,0x00,0x00,0x00, -0x6f,0x00,0x04,0x00,0x61,0x00,0x00,0x00,0x9a,0x00,0x00,0x00, -0x99,0x00,0x00,0x00,0x85,0x00,0x05,0x00,0x61,0x00,0x00,0x00, -0x9b,0x00,0x00,0x00,0x75,0x00,0x00,0x00,0x9a,0x00,0x00,0x00, -0x41,0x00,0x06,0x00,0x73,0x00,0x00,0x00,0x9c,0x00,0x00,0x00, -0x79,0x00,0x00,0x00,0x09,0x00,0x00,0x00,0x50,0x00,0x00,0x00, -0x3e,0x00,0x03,0x00,0x9c,0x00,0x00,0x00,0x9b,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x9e,0x00,0x00,0x00, -0x50,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xa2,0x00,0x00,0x00,0x47,0x00,0x00,0x00, -0x7e,0x00,0x00,0x00,0x41,0x00,0x08,0x00,0x81,0x00,0x00,0x00, -0xa3,0x00,0x00,0x00,0x66,0x00,0x00,0x00,0x09,0x00,0x00,0x00, -0x21,0x00,0x00,0x00,0x7e,0x00,0x00,0x00,0xa2,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x5e,0x00,0x00,0x00,0xa4,0x00,0x00,0x00, -0xa3,0x00,0x00,0x00,0x72,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0xa5,0x00,0x00,0x00,0xa4,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xa8,0x00,0x00,0x00,0x56,0x00,0x00,0x00, -0x39,0x00,0x00,0x00,0x41,0x00,0x08,0x00,0x6c,0x00,0x00,0x00, -0xa9,0x00,0x00,0x00,0x66,0x00,0x00,0x00,0x09,0x00,0x00,0x00, -0x21,0x00,0x00,0x00,0x09,0x00,0x00,0x00,0xa8,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x57,0x00,0x00,0x00,0xaa,0x00,0x00,0x00, -0xa9,0x00,0x00,0x00,0x71,0x00,0x04,0x00,0x14,0x00,0x00,0x00, -0xab,0x00,0x00,0x00,0xaa,0x00,0x00,0x00,0x7c,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0xac,0x00,0x00,0x00,0xab,0x00,0x00,0x00, -0xc7,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xad,0x00,0x00,0x00, -0xac,0x00,0x00,0x00,0x8c,0x00,0x00,0x00,0xc2,0x00,0x05,0x00, -0x57,0x00,0x00,0x00,0xaf,0x00,0x00,0x00,0x6e,0x00,0x00,0x00, -0x7e,0x00,0x00,0x00,0x71,0x00,0x04,0x00,0x14,0x00,0x00,0x00, -0xb0,0x00,0x00,0x00,0xaf,0x00,0x00,0x00,0x7c,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0xb1,0x00,0x00,0x00,0xb0,0x00,0x00,0x00, -0xc7,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xb2,0x00,0x00,0x00, -0xb1,0x00,0x00,0x00,0x72,0x00,0x00,0x00,0xc4,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xb3,0x00,0x00,0x00,0xb2,0x00,0x00,0x00, -0x93,0x00,0x00,0x00,0xc5,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xb4,0x00,0x00,0x00,0xad,0x00,0x00,0x00,0xb3,0x00,0x00,0x00, -0x72,0x00,0x04,0x00,0x5e,0x00,0x00,0x00,0xb5,0x00,0x00,0x00, -0xb4,0x00,0x00,0x00,0x72,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0xb6,0x00,0x00,0x00,0xb5,0x00,0x00,0x00,0x82,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xb7,0x00,0x00,0x00,0xb6,0x00,0x00,0x00, -0x39,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xb8,0x00,0x00,0x00,0xa5,0x00,0x00,0x00,0xb7,0x00,0x00,0x00, -0x6f,0x00,0x04,0x00,0x61,0x00,0x00,0x00,0xb9,0x00,0x00,0x00, -0xb8,0x00,0x00,0x00,0x85,0x00,0x05,0x00,0x61,0x00,0x00,0x00, -0xba,0x00,0x00,0x00,0x75,0x00,0x00,0x00,0xb9,0x00,0x00,0x00, -0x41,0x00,0x06,0x00,0x73,0x00,0x00,0x00,0xbb,0x00,0x00,0x00, -0x79,0x00,0x00,0x00,0x09,0x00,0x00,0x00,0x9e,0x00,0x00,0x00, -0x3e,0x00,0x03,0x00,0xbb,0x00,0x00,0x00,0xba,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xbd,0x00,0x00,0x00, -0x50,0x00,0x00,0x00,0x52,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xc1,0x00,0x00,0x00,0x47,0x00,0x00,0x00, -0x93,0x00,0x00,0x00,0x41,0x00,0x08,0x00,0x81,0x00,0x00,0x00, -0xc2,0x00,0x00,0x00,0x66,0x00,0x00,0x00,0x09,0x00,0x00,0x00, -0x21,0x00,0x00,0x00,0x7e,0x00,0x00,0x00,0xc1,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x5e,0x00,0x00,0x00,0xc3,0x00,0x00,0x00, -0xc2,0x00,0x00,0x00,0x72,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0xc4,0x00,0x00,0x00,0xc3,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x57,0x00,0x00,0x00,0xc9,0x00,0x00,0x00,0x88,0x00,0x00,0x00, -0xc2,0x00,0x05,0x00,0x57,0x00,0x00,0x00,0xca,0x00,0x00,0x00, -0xc9,0x00,0x00,0x00,0x93,0x00,0x00,0x00,0x71,0x00,0x04,0x00, -0x14,0x00,0x00,0x00,0xcb,0x00,0x00,0x00,0xca,0x00,0x00,0x00, -0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0xcc,0x00,0x00,0x00, -0xcb,0x00,0x00,0x00,0xc2,0x00,0x05,0x00,0x57,0x00,0x00,0x00, -0xce,0x00,0x00,0x00,0x6e,0x00,0x00,0x00,0x93,0x00,0x00,0x00, -0x71,0x00,0x04,0x00,0x14,0x00,0x00,0x00,0xcf,0x00,0x00,0x00, -0xce,0x00,0x00,0x00,0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0xd0,0x00,0x00,0x00,0xcf,0x00,0x00,0x00,0xc7,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xd1,0x00,0x00,0x00,0xd0,0x00,0x00,0x00, -0x72,0x00,0x00,0x00,0xc4,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xd2,0x00,0x00,0x00,0xd1,0x00,0x00,0x00,0x93,0x00,0x00,0x00, -0xc5,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xd3,0x00,0x00,0x00, -0xcc,0x00,0x00,0x00,0xd2,0x00,0x00,0x00,0x72,0x00,0x04,0x00, -0x5e,0x00,0x00,0x00,0xd4,0x00,0x00,0x00,0xd3,0x00,0x00,0x00, -0x72,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0xd5,0x00,0x00,0x00, -0xd4,0x00,0x00,0x00,0x82,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xd6,0x00,0x00,0x00,0xd5,0x00,0x00,0x00,0x39,0x00,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xd7,0x00,0x00,0x00, -0xc4,0x00,0x00,0x00,0xd6,0x00,0x00,0x00,0x6f,0x00,0x04,0x00, -0x61,0x00,0x00,0x00,0xd8,0x00,0x00,0x00,0xd7,0x00,0x00,0x00, -0x85,0x00,0x05,0x00,0x61,0x00,0x00,0x00,0xd9,0x00,0x00,0x00, -0x75,0x00,0x00,0x00,0xd8,0x00,0x00,0x00,0x41,0x00,0x06,0x00, -0x73,0x00,0x00,0x00,0xda,0x00,0x00,0x00,0x79,0x00,0x00,0x00, -0x09,0x00,0x00,0x00,0xbd,0x00,0x00,0x00,0x3e,0x00,0x03,0x00, -0xda,0x00,0x00,0x00,0xd9,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xdd,0x00,0x00,0x00,0x50,0x00,0x00,0x00, -0xdc,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xe2,0x00,0x00,0x00,0x47,0x00,0x00,0x00,0xe1,0x00,0x00,0x00, -0x41,0x00,0x08,0x00,0x81,0x00,0x00,0x00,0xe3,0x00,0x00,0x00, -0x66,0x00,0x00,0x00,0x09,0x00,0x00,0x00,0x21,0x00,0x00,0x00, -0x7e,0x00,0x00,0x00,0xe2,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x5e,0x00,0x00,0x00,0xe4,0x00,0x00,0x00,0xe3,0x00,0x00,0x00, -0x72,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0xe5,0x00,0x00,0x00, -0xe4,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x57,0x00,0x00,0x00, -0xea,0x00,0x00,0x00,0xa9,0x00,0x00,0x00,0xc2,0x00,0x05,0x00, -0x57,0x00,0x00,0x00,0xeb,0x00,0x00,0x00,0xea,0x00,0x00,0x00, -0x93,0x00,0x00,0x00,0x71,0x00,0x04,0x00,0x14,0x00,0x00,0x00, -0xec,0x00,0x00,0x00,0xeb,0x00,0x00,0x00,0x7c,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0xed,0x00,0x00,0x00,0xec,0x00,0x00,0x00, -0xc2,0x00,0x05,0x00,0x57,0x00,0x00,0x00,0xef,0x00,0x00,0x00, -0x6e,0x00,0x00,0x00,0xe1,0x00,0x00,0x00,0x71,0x00,0x04,0x00, -0x14,0x00,0x00,0x00,0xf0,0x00,0x00,0x00,0xef,0x00,0x00,0x00, -0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0xf1,0x00,0x00,0x00, -0xf0,0x00,0x00,0x00,0xc7,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xf2,0x00,0x00,0x00,0xf1,0x00,0x00,0x00,0x72,0x00,0x00,0x00, -0xc4,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xf3,0x00,0x00,0x00, -0xf2,0x00,0x00,0x00,0x93,0x00,0x00,0x00,0xc5,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xf4,0x00,0x00,0x00,0xed,0x00,0x00,0x00, -0xf3,0x00,0x00,0x00,0x72,0x00,0x04,0x00,0x5e,0x00,0x00,0x00, -0xf5,0x00,0x00,0x00,0xf4,0x00,0x00,0x00,0x72,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0xf6,0x00,0x00,0x00,0xf5,0x00,0x00,0x00, -0x82,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xf7,0x00,0x00,0x00, -0xf6,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xf8,0x00,0x00,0x00,0xe5,0x00,0x00,0x00, -0xf7,0x00,0x00,0x00,0x6f,0x00,0x04,0x00,0x61,0x00,0x00,0x00, -0xf9,0x00,0x00,0x00,0xf8,0x00,0x00,0x00,0x85,0x00,0x05,0x00, -0x61,0x00,0x00,0x00,0xfa,0x00,0x00,0x00,0x75,0x00,0x00,0x00, -0xf9,0x00,0x00,0x00,0x41,0x00,0x06,0x00,0x73,0x00,0x00,0x00, -0xfb,0x00,0x00,0x00,0x79,0x00,0x00,0x00,0x09,0x00,0x00,0x00, -0xdd,0x00,0x00,0x00,0x3e,0x00,0x03,0x00,0xfb,0x00,0x00,0x00, -0xfa,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0x0d,0x00,0x00,0x00, -0xf8,0x00,0x02,0x00,0x0d,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xfd,0x00,0x00,0x00,0x08,0x01,0x00,0x00, -0x29,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0x0a,0x00,0x00,0x00, -0xf8,0x00,0x02,0x00,0x0c,0x00,0x00,0x00,0xf5,0x00,0x07,0x00, -0x11,0x00,0x00,0x00,0x09,0x01,0x00,0x00,0x02,0x01,0x00,0x00, -0x0a,0x00,0x00,0x00,0x05,0x01,0x00,0x00,0x2f,0x00,0x00,0x00, -0xf7,0x00,0x03,0x00,0x06,0x01,0x00,0x00,0x00,0x00,0x00,0x00, -0xfa,0x00,0x04,0x00,0x09,0x01,0x00,0x00,0x00,0x01,0x00,0x00, -0x06,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0x06,0x01,0x00,0x00, -0xf9,0x00,0x02,0x00,0x00,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, -0x00,0x01,0x00,0x00,0xfd,0x00,0x01,0x00,0x38,0x00,0x01,0x00, - -}; -const uint64_t dequant_q6_K_len = 4212; - -unsigned char dequant_q6_K_fp32_data[] = { -0x03,0x02,0x23,0x07,0x00,0x05,0x01,0x00,0x0b,0x00,0x0d,0x00, 0x10,0x01,0x00,0x00,0x00,0x00,0x00,0x00,0x11,0x00,0x02,0x00, 0x01,0x00,0x00,0x00,0x11,0x00,0x02,0x00,0x27,0x00,0x00,0x00, 0x11,0x00,0x02,0x00,0x51,0x11,0x00,0x00,0x11,0x00,0x02,0x00, @@ -12881,647 +7016,10 @@ unsigned char dequant_q6_K_fp32_data[] = { 0x06,0x01,0x00,0x00,0xfd,0x00,0x01,0x00,0x38,0x00,0x01,0x00, }; -const uint64_t dequant_q6_K_fp32_len = 4296; +const uint64_t dequant_q6_K_len = 4296; unsigned char dequant_q8_0_data[] = { 0x03,0x02,0x23,0x07,0x00,0x05,0x01,0x00,0x0b,0x00,0x0d,0x00, -0xd2,0x02,0x00,0x00,0x00,0x00,0x00,0x00,0x11,0x00,0x02,0x00, -0x01,0x00,0x00,0x00,0x11,0x00,0x02,0x00,0x09,0x00,0x00,0x00, -0x11,0x00,0x02,0x00,0x27,0x00,0x00,0x00,0x11,0x00,0x02,0x00, -0x51,0x11,0x00,0x00,0x11,0x00,0x02,0x00,0x60,0x11,0x00,0x00, -0x0b,0x00,0x06,0x00,0x01,0x00,0x00,0x00,0x47,0x4c,0x53,0x4c, -0x2e,0x73,0x74,0x64,0x2e,0x34,0x35,0x30,0x00,0x00,0x00,0x00, -0x0e,0x00,0x03,0x00,0x00,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x0f,0x00,0x09,0x00,0x05,0x00,0x00,0x00,0x04,0x00,0x00,0x00, -0x6d,0x61,0x69,0x6e,0x00,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0x16,0x00,0x00,0x00,0x53,0x00,0x00,0x00,0x6e,0x00,0x00,0x00, -0x10,0x00,0x06,0x00,0x04,0x00,0x00,0x00,0x11,0x00,0x00,0x00, -0x00,0x01,0x00,0x00,0x01,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x0c,0x00,0x00,0x00,0x0b,0x00,0x00,0x00, -0x1c,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x14,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x48,0x00,0x05,0x00,0x14,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0x14,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x08,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x14,0x00,0x00,0x00, -0x03,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0x47,0x00,0x03,0x00,0x14,0x00,0x00,0x00,0x02,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x4e,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x4f,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x48,0x00,0x05,0x00,0x4f,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x50,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x22,0x00,0x00,0x00, -0x48,0x00,0x04,0x00,0x51,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x18,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x51,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x47,0x00,0x03,0x00,0x51,0x00,0x00,0x00,0x02,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x53,0x00,0x00,0x00,0x22,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x53,0x00,0x00,0x00, -0x21,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x6b,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x02,0x00,0x00,0x00, -0x48,0x00,0x04,0x00,0x6c,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x19,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x6c,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x47,0x00,0x03,0x00,0x6c,0x00,0x00,0x00,0x02,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x6e,0x00,0x00,0x00,0x22,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x6e,0x00,0x00,0x00, -0x21,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x8e,0x00,0x00,0x00,0x0b,0x00,0x00,0x00,0x19,0x00,0x00,0x00, -0x13,0x00,0x02,0x00,0x02,0x00,0x00,0x00,0x21,0x00,0x03,0x00, -0x03,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x15,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x20,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x15,0x00,0x04,0x00,0x09,0x00,0x00,0x00,0x20,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x17,0x00,0x04,0x00,0x0a,0x00,0x00,0x00, -0x09,0x00,0x00,0x00,0x03,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0x0b,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x0a,0x00,0x00,0x00, -0x3b,0x00,0x04,0x00,0x0b,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x09,0x00,0x00,0x00, -0x0d,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0x0e,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x09,0x00,0x00,0x00, -0x1e,0x00,0x06,0x00,0x14,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x20,0x00,0x04,0x00,0x15,0x00,0x00,0x00,0x09,0x00,0x00,0x00, -0x14,0x00,0x00,0x00,0x3b,0x00,0x04,0x00,0x15,0x00,0x00,0x00, -0x16,0x00,0x00,0x00,0x09,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x17,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x20,0x00,0x04,0x00,0x18,0x00,0x00,0x00,0x09,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x1b,0x00,0x00,0x00,0x20,0x00,0x00,0x00,0x14,0x00,0x02,0x00, -0x24,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x2e,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x37,0x00,0x00,0x00,0x02,0x00,0x00,0x00, -0x16,0x00,0x03,0x00,0x49,0x00,0x00,0x00,0x10,0x00,0x00,0x00, -0x15,0x00,0x04,0x00,0x4c,0x00,0x00,0x00,0x08,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x09,0x00,0x00,0x00, -0x4d,0x00,0x00,0x00,0x20,0x00,0x00,0x00,0x1c,0x00,0x04,0x00, -0x4e,0x00,0x00,0x00,0x4c,0x00,0x00,0x00,0x4d,0x00,0x00,0x00, -0x1e,0x00,0x04,0x00,0x4f,0x00,0x00,0x00,0x49,0x00,0x00,0x00, -0x4e,0x00,0x00,0x00,0x1d,0x00,0x03,0x00,0x50,0x00,0x00,0x00, -0x4f,0x00,0x00,0x00,0x1e,0x00,0x03,0x00,0x51,0x00,0x00,0x00, -0x50,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x52,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x51,0x00,0x00,0x00,0x3b,0x00,0x04,0x00, -0x52,0x00,0x00,0x00,0x53,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0x20,0x00,0x04,0x00,0x55,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0x49,0x00,0x00,0x00,0x17,0x00,0x04,0x00,0x58,0x00,0x00,0x00, -0x49,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0x5d,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x4c,0x00,0x00,0x00, -0x1d,0x00,0x03,0x00,0x6b,0x00,0x00,0x00,0x49,0x00,0x00,0x00, -0x1e,0x00,0x03,0x00,0x6c,0x00,0x00,0x00,0x6b,0x00,0x00,0x00, -0x20,0x00,0x04,0x00,0x6d,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0x6c,0x00,0x00,0x00,0x3b,0x00,0x04,0x00,0x6d,0x00,0x00,0x00, -0x6e,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x70,0x00,0x00,0x00,0x03,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x09,0x00,0x00,0x00,0x87,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x09,0x00,0x00,0x00, -0x8d,0x00,0x00,0x00,0x00,0x01,0x00,0x00,0x2c,0x00,0x06,0x00, -0x0a,0x00,0x00,0x00,0x8e,0x00,0x00,0x00,0x8d,0x00,0x00,0x00, -0x87,0x00,0x00,0x00,0x87,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0xb6,0x02,0x00,0x00,0x04,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0xb7,0x02,0x00,0x00, -0x05,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0xb8,0x02,0x00,0x00,0x06,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0xb9,0x02,0x00,0x00,0x07,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0xba,0x02,0x00,0x00, -0x08,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0xbb,0x02,0x00,0x00,0x09,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0xbc,0x02,0x00,0x00,0x0a,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0xbd,0x02,0x00,0x00, -0x0b,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0xbe,0x02,0x00,0x00,0x0c,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0xbf,0x02,0x00,0x00,0x0d,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0xc0,0x02,0x00,0x00, -0x0e,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0xc1,0x02,0x00,0x00,0x0f,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0xc2,0x02,0x00,0x00,0x10,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0xc3,0x02,0x00,0x00, -0x11,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0xc4,0x02,0x00,0x00,0x12,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0xc5,0x02,0x00,0x00,0x13,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0xc6,0x02,0x00,0x00, -0x14,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0xc7,0x02,0x00,0x00,0x15,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0xc8,0x02,0x00,0x00,0x16,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0xc9,0x02,0x00,0x00, -0x17,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0xca,0x02,0x00,0x00,0x18,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0xcb,0x02,0x00,0x00,0x19,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0xcc,0x02,0x00,0x00, -0x1a,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0xcd,0x02,0x00,0x00,0x1b,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0xce,0x02,0x00,0x00,0x1c,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0xcf,0x02,0x00,0x00, -0x1d,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0xd0,0x02,0x00,0x00,0x1e,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0xd1,0x02,0x00,0x00,0x1f,0x00,0x00,0x00, -0x36,0x00,0x05,0x00,0x02,0x00,0x00,0x00,0x04,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x03,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, -0x05,0x00,0x00,0x00,0xf7,0x00,0x03,0x00,0x8f,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0xfb,0x00,0x03,0x00,0x0d,0x00,0x00,0x00, -0x90,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0x90,0x00,0x00,0x00, -0x41,0x00,0x05,0x00,0x0e,0x00,0x00,0x00,0x0f,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x0d,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x09,0x00,0x00,0x00,0x10,0x00,0x00,0x00,0x0f,0x00,0x00,0x00, -0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x11,0x00,0x00,0x00, -0x10,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x18,0x00,0x00,0x00, -0x19,0x00,0x00,0x00,0x16,0x00,0x00,0x00,0x17,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x1a,0x00,0x00,0x00, -0x19,0x00,0x00,0x00,0x87,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x1c,0x00,0x00,0x00,0x1a,0x00,0x00,0x00,0x1b,0x00,0x00,0x00, -0x8b,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x1d,0x00,0x00,0x00, -0x11,0x00,0x00,0x00,0x1c,0x00,0x00,0x00,0x87,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x11,0x00,0x00,0x00, -0x1c,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x26,0x00,0x00,0x00,0x1d,0x00,0x00,0x00,0x1b,0x00,0x00,0x00, -0xaf,0x00,0x05,0x00,0x24,0x00,0x00,0x00,0x29,0x00,0x00,0x00, -0x26,0x00,0x00,0x00,0x1a,0x00,0x00,0x00,0xa8,0x00,0x04,0x00, -0x24,0x00,0x00,0x00,0x2a,0x00,0x00,0x00,0x29,0x00,0x00,0x00, -0xf7,0x00,0x03,0x00,0x2c,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0xfa,0x00,0x04,0x00,0x2a,0x00,0x00,0x00,0x2b,0x00,0x00,0x00, -0x2c,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0x2b,0x00,0x00,0x00, -0x41,0x00,0x05,0x00,0x18,0x00,0x00,0x00,0x2f,0x00,0x00,0x00, -0x16,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x30,0x00,0x00,0x00,0x2f,0x00,0x00,0x00, -0xaf,0x00,0x05,0x00,0x24,0x00,0x00,0x00,0x31,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x30,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, -0x2c,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0x2c,0x00,0x00,0x00, -0xf5,0x00,0x07,0x00,0x24,0x00,0x00,0x00,0x32,0x00,0x00,0x00, -0x29,0x00,0x00,0x00,0x90,0x00,0x00,0x00,0x31,0x00,0x00,0x00, -0x2b,0x00,0x00,0x00,0xf7,0x00,0x03,0x00,0x34,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0x32,0x00,0x00,0x00, -0x33,0x00,0x00,0x00,0x34,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, -0x33,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0x8f,0x00,0x00,0x00, -0xf8,0x00,0x02,0x00,0x34,0x00,0x00,0x00,0x41,0x00,0x05,0x00, -0x18,0x00,0x00,0x00,0x38,0x00,0x00,0x00,0x16,0x00,0x00,0x00, -0x37,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x39,0x00,0x00,0x00,0x38,0x00,0x00,0x00,0x87,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x3a,0x00,0x00,0x00,0x39,0x00,0x00,0x00, -0x1b,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x3e,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x3a,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x40,0x00,0x00,0x00, -0x3e,0x00,0x00,0x00,0x1d,0x00,0x00,0x00,0x41,0x00,0x07,0x00, -0x55,0x00,0x00,0x00,0x56,0x00,0x00,0x00,0x53,0x00,0x00,0x00, -0x2e,0x00,0x00,0x00,0x40,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x49,0x00,0x00,0x00,0x57,0x00,0x00,0x00, -0x56,0x00,0x00,0x00,0x41,0x00,0x08,0x00,0x5d,0x00,0x00,0x00, -0x5e,0x00,0x00,0x00,0x53,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0x40,0x00,0x00,0x00,0x17,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x4c,0x00,0x00,0x00,0x5f,0x00,0x00,0x00, -0x5e,0x00,0x00,0x00,0x6f,0x00,0x04,0x00,0x49,0x00,0x00,0x00, -0x60,0x00,0x00,0x00,0x5f,0x00,0x00,0x00,0x41,0x00,0x08,0x00, -0x5d,0x00,0x00,0x00,0x64,0x00,0x00,0x00,0x53,0x00,0x00,0x00, -0x2e,0x00,0x00,0x00,0x40,0x00,0x00,0x00,0x17,0x00,0x00,0x00, -0x17,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x4c,0x00,0x00,0x00, -0x65,0x00,0x00,0x00,0x64,0x00,0x00,0x00,0x6f,0x00,0x04,0x00, -0x49,0x00,0x00,0x00,0x66,0x00,0x00,0x00,0x65,0x00,0x00,0x00, -0x50,0x00,0x05,0x00,0x58,0x00,0x00,0x00,0x67,0x00,0x00,0x00, -0x60,0x00,0x00,0x00,0x66,0x00,0x00,0x00,0x8e,0x00,0x05,0x00, -0x58,0x00,0x00,0x00,0x6a,0x00,0x00,0x00,0x67,0x00,0x00,0x00, -0x57,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x18,0x00,0x00,0x00, -0x71,0x00,0x00,0x00,0x16,0x00,0x00,0x00,0x70,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x72,0x00,0x00,0x00, -0x71,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x73,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x72,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x76,0x00,0x00,0x00, -0x73,0x00,0x00,0x00,0x26,0x00,0x00,0x00,0x51,0x00,0x05,0x00, -0x49,0x00,0x00,0x00,0x7b,0x00,0x00,0x00,0x6a,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x41,0x00,0x06,0x00,0x55,0x00,0x00,0x00, -0x7c,0x00,0x00,0x00,0x6e,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0x76,0x00,0x00,0x00,0x3e,0x00,0x03,0x00,0x7c,0x00,0x00,0x00, -0x7b,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x86,0x00,0x00,0x00,0x76,0x00,0x00,0x00,0x17,0x00,0x00,0x00, -0x51,0x00,0x05,0x00,0x49,0x00,0x00,0x00,0x89,0x00,0x00,0x00, -0x6a,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x41,0x00,0x06,0x00, -0x55,0x00,0x00,0x00,0x8a,0x00,0x00,0x00,0x6e,0x00,0x00,0x00, -0x2e,0x00,0x00,0x00,0x86,0x00,0x00,0x00,0x3e,0x00,0x03,0x00, -0x8a,0x00,0x00,0x00,0x89,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x49,0x00,0x00,0x00,0x9f,0x00,0x00,0x00,0x56,0x00,0x00,0x00, -0x41,0x00,0x08,0x00,0x5d,0x00,0x00,0x00,0xa0,0x00,0x00,0x00, -0x53,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0x40,0x00,0x00,0x00, -0x17,0x00,0x00,0x00,0x37,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x4c,0x00,0x00,0x00,0xa1,0x00,0x00,0x00,0xa0,0x00,0x00,0x00, -0x6f,0x00,0x04,0x00,0x49,0x00,0x00,0x00,0xa2,0x00,0x00,0x00, -0xa1,0x00,0x00,0x00,0x41,0x00,0x08,0x00,0x5d,0x00,0x00,0x00, -0xa4,0x00,0x00,0x00,0x53,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0x40,0x00,0x00,0x00,0x17,0x00,0x00,0x00,0x70,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x4c,0x00,0x00,0x00,0xa5,0x00,0x00,0x00, -0xa4,0x00,0x00,0x00,0x6f,0x00,0x04,0x00,0x49,0x00,0x00,0x00, -0xa6,0x00,0x00,0x00,0xa5,0x00,0x00,0x00,0x50,0x00,0x05,0x00, -0x58,0x00,0x00,0x00,0xa7,0x00,0x00,0x00,0xa2,0x00,0x00,0x00, -0xa6,0x00,0x00,0x00,0x8e,0x00,0x05,0x00,0x58,0x00,0x00,0x00, -0xa8,0x00,0x00,0x00,0xa7,0x00,0x00,0x00,0x9f,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xae,0x00,0x00,0x00, -0x76,0x00,0x00,0x00,0x37,0x00,0x00,0x00,0x51,0x00,0x05,0x00, -0x49,0x00,0x00,0x00,0xb0,0x00,0x00,0x00,0xa8,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x41,0x00,0x06,0x00,0x55,0x00,0x00,0x00, -0xb1,0x00,0x00,0x00,0x6e,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0xae,0x00,0x00,0x00,0x3e,0x00,0x03,0x00,0xb1,0x00,0x00,0x00, -0xb0,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xb8,0x00,0x00,0x00,0x76,0x00,0x00,0x00,0x70,0x00,0x00,0x00, -0x51,0x00,0x05,0x00,0x49,0x00,0x00,0x00,0xb9,0x00,0x00,0x00, -0xa8,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x41,0x00,0x06,0x00, -0x55,0x00,0x00,0x00,0xba,0x00,0x00,0x00,0x6e,0x00,0x00,0x00, -0x2e,0x00,0x00,0x00,0xb8,0x00,0x00,0x00,0x3e,0x00,0x03,0x00, -0xba,0x00,0x00,0x00,0xb9,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x49,0x00,0x00,0x00,0xc3,0x00,0x00,0x00,0x56,0x00,0x00,0x00, -0x41,0x00,0x08,0x00,0x5d,0x00,0x00,0x00,0xc4,0x00,0x00,0x00, -0x53,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0x40,0x00,0x00,0x00, -0x17,0x00,0x00,0x00,0xb6,0x02,0x00,0x00,0x3d,0x00,0x04,0x00, -0x4c,0x00,0x00,0x00,0xc5,0x00,0x00,0x00,0xc4,0x00,0x00,0x00, -0x6f,0x00,0x04,0x00,0x49,0x00,0x00,0x00,0xc6,0x00,0x00,0x00, -0xc5,0x00,0x00,0x00,0x41,0x00,0x08,0x00,0x5d,0x00,0x00,0x00, -0xc8,0x00,0x00,0x00,0x53,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0x40,0x00,0x00,0x00,0x17,0x00,0x00,0x00,0xb7,0x02,0x00,0x00, -0x3d,0x00,0x04,0x00,0x4c,0x00,0x00,0x00,0xc9,0x00,0x00,0x00, -0xc8,0x00,0x00,0x00,0x6f,0x00,0x04,0x00,0x49,0x00,0x00,0x00, -0xca,0x00,0x00,0x00,0xc9,0x00,0x00,0x00,0x50,0x00,0x05,0x00, -0x58,0x00,0x00,0x00,0xcb,0x00,0x00,0x00,0xc6,0x00,0x00,0x00, -0xca,0x00,0x00,0x00,0x8e,0x00,0x05,0x00,0x58,0x00,0x00,0x00, -0xcc,0x00,0x00,0x00,0xcb,0x00,0x00,0x00,0xc3,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xd2,0x00,0x00,0x00, -0x76,0x00,0x00,0x00,0xb6,0x02,0x00,0x00,0x51,0x00,0x05,0x00, -0x49,0x00,0x00,0x00,0xd4,0x00,0x00,0x00,0xcc,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x41,0x00,0x06,0x00,0x55,0x00,0x00,0x00, -0xd5,0x00,0x00,0x00,0x6e,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0xd2,0x00,0x00,0x00,0x3e,0x00,0x03,0x00,0xd5,0x00,0x00,0x00, -0xd4,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xdc,0x00,0x00,0x00,0x76,0x00,0x00,0x00,0xb7,0x02,0x00,0x00, -0x51,0x00,0x05,0x00,0x49,0x00,0x00,0x00,0xdd,0x00,0x00,0x00, -0xcc,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x41,0x00,0x06,0x00, -0x55,0x00,0x00,0x00,0xde,0x00,0x00,0x00,0x6e,0x00,0x00,0x00, -0x2e,0x00,0x00,0x00,0xdc,0x00,0x00,0x00,0x3e,0x00,0x03,0x00, -0xde,0x00,0x00,0x00,0xdd,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x49,0x00,0x00,0x00,0xe7,0x00,0x00,0x00,0x56,0x00,0x00,0x00, -0x41,0x00,0x08,0x00,0x5d,0x00,0x00,0x00,0xe8,0x00,0x00,0x00, -0x53,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0x40,0x00,0x00,0x00, -0x17,0x00,0x00,0x00,0xb8,0x02,0x00,0x00,0x3d,0x00,0x04,0x00, -0x4c,0x00,0x00,0x00,0xe9,0x00,0x00,0x00,0xe8,0x00,0x00,0x00, -0x6f,0x00,0x04,0x00,0x49,0x00,0x00,0x00,0xea,0x00,0x00,0x00, -0xe9,0x00,0x00,0x00,0x41,0x00,0x08,0x00,0x5d,0x00,0x00,0x00, -0xec,0x00,0x00,0x00,0x53,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0x40,0x00,0x00,0x00,0x17,0x00,0x00,0x00,0xb9,0x02,0x00,0x00, -0x3d,0x00,0x04,0x00,0x4c,0x00,0x00,0x00,0xed,0x00,0x00,0x00, -0xec,0x00,0x00,0x00,0x6f,0x00,0x04,0x00,0x49,0x00,0x00,0x00, -0xee,0x00,0x00,0x00,0xed,0x00,0x00,0x00,0x50,0x00,0x05,0x00, -0x58,0x00,0x00,0x00,0xef,0x00,0x00,0x00,0xea,0x00,0x00,0x00, -0xee,0x00,0x00,0x00,0x8e,0x00,0x05,0x00,0x58,0x00,0x00,0x00, -0xf0,0x00,0x00,0x00,0xef,0x00,0x00,0x00,0xe7,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xf6,0x00,0x00,0x00, -0x76,0x00,0x00,0x00,0xb8,0x02,0x00,0x00,0x51,0x00,0x05,0x00, -0x49,0x00,0x00,0x00,0xf8,0x00,0x00,0x00,0xf0,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x41,0x00,0x06,0x00,0x55,0x00,0x00,0x00, -0xf9,0x00,0x00,0x00,0x6e,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0xf6,0x00,0x00,0x00,0x3e,0x00,0x03,0x00,0xf9,0x00,0x00,0x00, -0xf8,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x00,0x01,0x00,0x00,0x76,0x00,0x00,0x00,0xb9,0x02,0x00,0x00, -0x51,0x00,0x05,0x00,0x49,0x00,0x00,0x00,0x01,0x01,0x00,0x00, -0xf0,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x41,0x00,0x06,0x00, -0x55,0x00,0x00,0x00,0x02,0x01,0x00,0x00,0x6e,0x00,0x00,0x00, -0x2e,0x00,0x00,0x00,0x00,0x01,0x00,0x00,0x3e,0x00,0x03,0x00, -0x02,0x01,0x00,0x00,0x01,0x01,0x00,0x00,0x3d,0x00,0x04,0x00, -0x49,0x00,0x00,0x00,0x0b,0x01,0x00,0x00,0x56,0x00,0x00,0x00, -0x41,0x00,0x08,0x00,0x5d,0x00,0x00,0x00,0x0c,0x01,0x00,0x00, -0x53,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0x40,0x00,0x00,0x00, -0x17,0x00,0x00,0x00,0xba,0x02,0x00,0x00,0x3d,0x00,0x04,0x00, -0x4c,0x00,0x00,0x00,0x0d,0x01,0x00,0x00,0x0c,0x01,0x00,0x00, -0x6f,0x00,0x04,0x00,0x49,0x00,0x00,0x00,0x0e,0x01,0x00,0x00, -0x0d,0x01,0x00,0x00,0x41,0x00,0x08,0x00,0x5d,0x00,0x00,0x00, -0x10,0x01,0x00,0x00,0x53,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0x40,0x00,0x00,0x00,0x17,0x00,0x00,0x00,0xbb,0x02,0x00,0x00, -0x3d,0x00,0x04,0x00,0x4c,0x00,0x00,0x00,0x11,0x01,0x00,0x00, -0x10,0x01,0x00,0x00,0x6f,0x00,0x04,0x00,0x49,0x00,0x00,0x00, -0x12,0x01,0x00,0x00,0x11,0x01,0x00,0x00,0x50,0x00,0x05,0x00, -0x58,0x00,0x00,0x00,0x13,0x01,0x00,0x00,0x0e,0x01,0x00,0x00, -0x12,0x01,0x00,0x00,0x8e,0x00,0x05,0x00,0x58,0x00,0x00,0x00, -0x14,0x01,0x00,0x00,0x13,0x01,0x00,0x00,0x0b,0x01,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x1a,0x01,0x00,0x00, -0x76,0x00,0x00,0x00,0xba,0x02,0x00,0x00,0x51,0x00,0x05,0x00, -0x49,0x00,0x00,0x00,0x1c,0x01,0x00,0x00,0x14,0x01,0x00,0x00, -0x00,0x00,0x00,0x00,0x41,0x00,0x06,0x00,0x55,0x00,0x00,0x00, -0x1d,0x01,0x00,0x00,0x6e,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0x1a,0x01,0x00,0x00,0x3e,0x00,0x03,0x00,0x1d,0x01,0x00,0x00, -0x1c,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x24,0x01,0x00,0x00,0x76,0x00,0x00,0x00,0xbb,0x02,0x00,0x00, -0x51,0x00,0x05,0x00,0x49,0x00,0x00,0x00,0x25,0x01,0x00,0x00, -0x14,0x01,0x00,0x00,0x01,0x00,0x00,0x00,0x41,0x00,0x06,0x00, -0x55,0x00,0x00,0x00,0x26,0x01,0x00,0x00,0x6e,0x00,0x00,0x00, -0x2e,0x00,0x00,0x00,0x24,0x01,0x00,0x00,0x3e,0x00,0x03,0x00, -0x26,0x01,0x00,0x00,0x25,0x01,0x00,0x00,0x3d,0x00,0x04,0x00, -0x49,0x00,0x00,0x00,0x2f,0x01,0x00,0x00,0x56,0x00,0x00,0x00, -0x41,0x00,0x08,0x00,0x5d,0x00,0x00,0x00,0x30,0x01,0x00,0x00, -0x53,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0x40,0x00,0x00,0x00, -0x17,0x00,0x00,0x00,0xbc,0x02,0x00,0x00,0x3d,0x00,0x04,0x00, -0x4c,0x00,0x00,0x00,0x31,0x01,0x00,0x00,0x30,0x01,0x00,0x00, -0x6f,0x00,0x04,0x00,0x49,0x00,0x00,0x00,0x32,0x01,0x00,0x00, -0x31,0x01,0x00,0x00,0x41,0x00,0x08,0x00,0x5d,0x00,0x00,0x00, -0x34,0x01,0x00,0x00,0x53,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0x40,0x00,0x00,0x00,0x17,0x00,0x00,0x00,0xbd,0x02,0x00,0x00, -0x3d,0x00,0x04,0x00,0x4c,0x00,0x00,0x00,0x35,0x01,0x00,0x00, -0x34,0x01,0x00,0x00,0x6f,0x00,0x04,0x00,0x49,0x00,0x00,0x00, -0x36,0x01,0x00,0x00,0x35,0x01,0x00,0x00,0x50,0x00,0x05,0x00, -0x58,0x00,0x00,0x00,0x37,0x01,0x00,0x00,0x32,0x01,0x00,0x00, -0x36,0x01,0x00,0x00,0x8e,0x00,0x05,0x00,0x58,0x00,0x00,0x00, -0x38,0x01,0x00,0x00,0x37,0x01,0x00,0x00,0x2f,0x01,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x3e,0x01,0x00,0x00, -0x76,0x00,0x00,0x00,0xbc,0x02,0x00,0x00,0x51,0x00,0x05,0x00, -0x49,0x00,0x00,0x00,0x40,0x01,0x00,0x00,0x38,0x01,0x00,0x00, -0x00,0x00,0x00,0x00,0x41,0x00,0x06,0x00,0x55,0x00,0x00,0x00, -0x41,0x01,0x00,0x00,0x6e,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0x3e,0x01,0x00,0x00,0x3e,0x00,0x03,0x00,0x41,0x01,0x00,0x00, -0x40,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x48,0x01,0x00,0x00,0x76,0x00,0x00,0x00,0xbd,0x02,0x00,0x00, -0x51,0x00,0x05,0x00,0x49,0x00,0x00,0x00,0x49,0x01,0x00,0x00, -0x38,0x01,0x00,0x00,0x01,0x00,0x00,0x00,0x41,0x00,0x06,0x00, -0x55,0x00,0x00,0x00,0x4a,0x01,0x00,0x00,0x6e,0x00,0x00,0x00, -0x2e,0x00,0x00,0x00,0x48,0x01,0x00,0x00,0x3e,0x00,0x03,0x00, -0x4a,0x01,0x00,0x00,0x49,0x01,0x00,0x00,0x3d,0x00,0x04,0x00, -0x49,0x00,0x00,0x00,0x53,0x01,0x00,0x00,0x56,0x00,0x00,0x00, -0x41,0x00,0x08,0x00,0x5d,0x00,0x00,0x00,0x54,0x01,0x00,0x00, -0x53,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0x40,0x00,0x00,0x00, -0x17,0x00,0x00,0x00,0xbe,0x02,0x00,0x00,0x3d,0x00,0x04,0x00, -0x4c,0x00,0x00,0x00,0x55,0x01,0x00,0x00,0x54,0x01,0x00,0x00, -0x6f,0x00,0x04,0x00,0x49,0x00,0x00,0x00,0x56,0x01,0x00,0x00, -0x55,0x01,0x00,0x00,0x41,0x00,0x08,0x00,0x5d,0x00,0x00,0x00, -0x58,0x01,0x00,0x00,0x53,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0x40,0x00,0x00,0x00,0x17,0x00,0x00,0x00,0xbf,0x02,0x00,0x00, -0x3d,0x00,0x04,0x00,0x4c,0x00,0x00,0x00,0x59,0x01,0x00,0x00, -0x58,0x01,0x00,0x00,0x6f,0x00,0x04,0x00,0x49,0x00,0x00,0x00, -0x5a,0x01,0x00,0x00,0x59,0x01,0x00,0x00,0x50,0x00,0x05,0x00, -0x58,0x00,0x00,0x00,0x5b,0x01,0x00,0x00,0x56,0x01,0x00,0x00, -0x5a,0x01,0x00,0x00,0x8e,0x00,0x05,0x00,0x58,0x00,0x00,0x00, -0x5c,0x01,0x00,0x00,0x5b,0x01,0x00,0x00,0x53,0x01,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x62,0x01,0x00,0x00, -0x76,0x00,0x00,0x00,0xbe,0x02,0x00,0x00,0x51,0x00,0x05,0x00, -0x49,0x00,0x00,0x00,0x64,0x01,0x00,0x00,0x5c,0x01,0x00,0x00, -0x00,0x00,0x00,0x00,0x41,0x00,0x06,0x00,0x55,0x00,0x00,0x00, -0x65,0x01,0x00,0x00,0x6e,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0x62,0x01,0x00,0x00,0x3e,0x00,0x03,0x00,0x65,0x01,0x00,0x00, -0x64,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x6c,0x01,0x00,0x00,0x76,0x00,0x00,0x00,0xbf,0x02,0x00,0x00, -0x51,0x00,0x05,0x00,0x49,0x00,0x00,0x00,0x6d,0x01,0x00,0x00, -0x5c,0x01,0x00,0x00,0x01,0x00,0x00,0x00,0x41,0x00,0x06,0x00, -0x55,0x00,0x00,0x00,0x6e,0x01,0x00,0x00,0x6e,0x00,0x00,0x00, -0x2e,0x00,0x00,0x00,0x6c,0x01,0x00,0x00,0x3e,0x00,0x03,0x00, -0x6e,0x01,0x00,0x00,0x6d,0x01,0x00,0x00,0x3d,0x00,0x04,0x00, -0x49,0x00,0x00,0x00,0x77,0x01,0x00,0x00,0x56,0x00,0x00,0x00, -0x41,0x00,0x08,0x00,0x5d,0x00,0x00,0x00,0x78,0x01,0x00,0x00, -0x53,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0x40,0x00,0x00,0x00, -0x17,0x00,0x00,0x00,0xc0,0x02,0x00,0x00,0x3d,0x00,0x04,0x00, -0x4c,0x00,0x00,0x00,0x79,0x01,0x00,0x00,0x78,0x01,0x00,0x00, -0x6f,0x00,0x04,0x00,0x49,0x00,0x00,0x00,0x7a,0x01,0x00,0x00, -0x79,0x01,0x00,0x00,0x41,0x00,0x08,0x00,0x5d,0x00,0x00,0x00, -0x7c,0x01,0x00,0x00,0x53,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0x40,0x00,0x00,0x00,0x17,0x00,0x00,0x00,0xc1,0x02,0x00,0x00, -0x3d,0x00,0x04,0x00,0x4c,0x00,0x00,0x00,0x7d,0x01,0x00,0x00, -0x7c,0x01,0x00,0x00,0x6f,0x00,0x04,0x00,0x49,0x00,0x00,0x00, -0x7e,0x01,0x00,0x00,0x7d,0x01,0x00,0x00,0x50,0x00,0x05,0x00, -0x58,0x00,0x00,0x00,0x7f,0x01,0x00,0x00,0x7a,0x01,0x00,0x00, -0x7e,0x01,0x00,0x00,0x8e,0x00,0x05,0x00,0x58,0x00,0x00,0x00, -0x80,0x01,0x00,0x00,0x7f,0x01,0x00,0x00,0x77,0x01,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x86,0x01,0x00,0x00, -0x76,0x00,0x00,0x00,0xc0,0x02,0x00,0x00,0x51,0x00,0x05,0x00, -0x49,0x00,0x00,0x00,0x88,0x01,0x00,0x00,0x80,0x01,0x00,0x00, -0x00,0x00,0x00,0x00,0x41,0x00,0x06,0x00,0x55,0x00,0x00,0x00, -0x89,0x01,0x00,0x00,0x6e,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0x86,0x01,0x00,0x00,0x3e,0x00,0x03,0x00,0x89,0x01,0x00,0x00, -0x88,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x90,0x01,0x00,0x00,0x76,0x00,0x00,0x00,0xc1,0x02,0x00,0x00, -0x51,0x00,0x05,0x00,0x49,0x00,0x00,0x00,0x91,0x01,0x00,0x00, -0x80,0x01,0x00,0x00,0x01,0x00,0x00,0x00,0x41,0x00,0x06,0x00, -0x55,0x00,0x00,0x00,0x92,0x01,0x00,0x00,0x6e,0x00,0x00,0x00, -0x2e,0x00,0x00,0x00,0x90,0x01,0x00,0x00,0x3e,0x00,0x03,0x00, -0x92,0x01,0x00,0x00,0x91,0x01,0x00,0x00,0x3d,0x00,0x04,0x00, -0x49,0x00,0x00,0x00,0x9b,0x01,0x00,0x00,0x56,0x00,0x00,0x00, -0x41,0x00,0x08,0x00,0x5d,0x00,0x00,0x00,0x9c,0x01,0x00,0x00, -0x53,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0x40,0x00,0x00,0x00, -0x17,0x00,0x00,0x00,0xc2,0x02,0x00,0x00,0x3d,0x00,0x04,0x00, -0x4c,0x00,0x00,0x00,0x9d,0x01,0x00,0x00,0x9c,0x01,0x00,0x00, -0x6f,0x00,0x04,0x00,0x49,0x00,0x00,0x00,0x9e,0x01,0x00,0x00, -0x9d,0x01,0x00,0x00,0x41,0x00,0x08,0x00,0x5d,0x00,0x00,0x00, -0xa0,0x01,0x00,0x00,0x53,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0x40,0x00,0x00,0x00,0x17,0x00,0x00,0x00,0xc3,0x02,0x00,0x00, -0x3d,0x00,0x04,0x00,0x4c,0x00,0x00,0x00,0xa1,0x01,0x00,0x00, -0xa0,0x01,0x00,0x00,0x6f,0x00,0x04,0x00,0x49,0x00,0x00,0x00, -0xa2,0x01,0x00,0x00,0xa1,0x01,0x00,0x00,0x50,0x00,0x05,0x00, -0x58,0x00,0x00,0x00,0xa3,0x01,0x00,0x00,0x9e,0x01,0x00,0x00, -0xa2,0x01,0x00,0x00,0x8e,0x00,0x05,0x00,0x58,0x00,0x00,0x00, -0xa4,0x01,0x00,0x00,0xa3,0x01,0x00,0x00,0x9b,0x01,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xaa,0x01,0x00,0x00, -0x76,0x00,0x00,0x00,0xc2,0x02,0x00,0x00,0x51,0x00,0x05,0x00, -0x49,0x00,0x00,0x00,0xac,0x01,0x00,0x00,0xa4,0x01,0x00,0x00, -0x00,0x00,0x00,0x00,0x41,0x00,0x06,0x00,0x55,0x00,0x00,0x00, -0xad,0x01,0x00,0x00,0x6e,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0xaa,0x01,0x00,0x00,0x3e,0x00,0x03,0x00,0xad,0x01,0x00,0x00, -0xac,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xb4,0x01,0x00,0x00,0x76,0x00,0x00,0x00,0xc3,0x02,0x00,0x00, -0x51,0x00,0x05,0x00,0x49,0x00,0x00,0x00,0xb5,0x01,0x00,0x00, -0xa4,0x01,0x00,0x00,0x01,0x00,0x00,0x00,0x41,0x00,0x06,0x00, -0x55,0x00,0x00,0x00,0xb6,0x01,0x00,0x00,0x6e,0x00,0x00,0x00, -0x2e,0x00,0x00,0x00,0xb4,0x01,0x00,0x00,0x3e,0x00,0x03,0x00, -0xb6,0x01,0x00,0x00,0xb5,0x01,0x00,0x00,0x3d,0x00,0x04,0x00, -0x49,0x00,0x00,0x00,0xbf,0x01,0x00,0x00,0x56,0x00,0x00,0x00, -0x41,0x00,0x08,0x00,0x5d,0x00,0x00,0x00,0xc0,0x01,0x00,0x00, -0x53,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0x40,0x00,0x00,0x00, -0x17,0x00,0x00,0x00,0xc4,0x02,0x00,0x00,0x3d,0x00,0x04,0x00, -0x4c,0x00,0x00,0x00,0xc1,0x01,0x00,0x00,0xc0,0x01,0x00,0x00, -0x6f,0x00,0x04,0x00,0x49,0x00,0x00,0x00,0xc2,0x01,0x00,0x00, -0xc1,0x01,0x00,0x00,0x41,0x00,0x08,0x00,0x5d,0x00,0x00,0x00, -0xc4,0x01,0x00,0x00,0x53,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0x40,0x00,0x00,0x00,0x17,0x00,0x00,0x00,0xc5,0x02,0x00,0x00, -0x3d,0x00,0x04,0x00,0x4c,0x00,0x00,0x00,0xc5,0x01,0x00,0x00, -0xc4,0x01,0x00,0x00,0x6f,0x00,0x04,0x00,0x49,0x00,0x00,0x00, -0xc6,0x01,0x00,0x00,0xc5,0x01,0x00,0x00,0x50,0x00,0x05,0x00, -0x58,0x00,0x00,0x00,0xc7,0x01,0x00,0x00,0xc2,0x01,0x00,0x00, -0xc6,0x01,0x00,0x00,0x8e,0x00,0x05,0x00,0x58,0x00,0x00,0x00, -0xc8,0x01,0x00,0x00,0xc7,0x01,0x00,0x00,0xbf,0x01,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xce,0x01,0x00,0x00, -0x76,0x00,0x00,0x00,0xc4,0x02,0x00,0x00,0x51,0x00,0x05,0x00, -0x49,0x00,0x00,0x00,0xd0,0x01,0x00,0x00,0xc8,0x01,0x00,0x00, -0x00,0x00,0x00,0x00,0x41,0x00,0x06,0x00,0x55,0x00,0x00,0x00, -0xd1,0x01,0x00,0x00,0x6e,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0xce,0x01,0x00,0x00,0x3e,0x00,0x03,0x00,0xd1,0x01,0x00,0x00, -0xd0,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xd8,0x01,0x00,0x00,0x76,0x00,0x00,0x00,0xc5,0x02,0x00,0x00, -0x51,0x00,0x05,0x00,0x49,0x00,0x00,0x00,0xd9,0x01,0x00,0x00, -0xc8,0x01,0x00,0x00,0x01,0x00,0x00,0x00,0x41,0x00,0x06,0x00, -0x55,0x00,0x00,0x00,0xda,0x01,0x00,0x00,0x6e,0x00,0x00,0x00, -0x2e,0x00,0x00,0x00,0xd8,0x01,0x00,0x00,0x3e,0x00,0x03,0x00, -0xda,0x01,0x00,0x00,0xd9,0x01,0x00,0x00,0x3d,0x00,0x04,0x00, -0x49,0x00,0x00,0x00,0xe3,0x01,0x00,0x00,0x56,0x00,0x00,0x00, -0x41,0x00,0x08,0x00,0x5d,0x00,0x00,0x00,0xe4,0x01,0x00,0x00, -0x53,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0x40,0x00,0x00,0x00, -0x17,0x00,0x00,0x00,0xc6,0x02,0x00,0x00,0x3d,0x00,0x04,0x00, -0x4c,0x00,0x00,0x00,0xe5,0x01,0x00,0x00,0xe4,0x01,0x00,0x00, -0x6f,0x00,0x04,0x00,0x49,0x00,0x00,0x00,0xe6,0x01,0x00,0x00, -0xe5,0x01,0x00,0x00,0x41,0x00,0x08,0x00,0x5d,0x00,0x00,0x00, -0xe8,0x01,0x00,0x00,0x53,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0x40,0x00,0x00,0x00,0x17,0x00,0x00,0x00,0xc7,0x02,0x00,0x00, -0x3d,0x00,0x04,0x00,0x4c,0x00,0x00,0x00,0xe9,0x01,0x00,0x00, -0xe8,0x01,0x00,0x00,0x6f,0x00,0x04,0x00,0x49,0x00,0x00,0x00, -0xea,0x01,0x00,0x00,0xe9,0x01,0x00,0x00,0x50,0x00,0x05,0x00, -0x58,0x00,0x00,0x00,0xeb,0x01,0x00,0x00,0xe6,0x01,0x00,0x00, -0xea,0x01,0x00,0x00,0x8e,0x00,0x05,0x00,0x58,0x00,0x00,0x00, -0xec,0x01,0x00,0x00,0xeb,0x01,0x00,0x00,0xe3,0x01,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xf2,0x01,0x00,0x00, -0x76,0x00,0x00,0x00,0xc6,0x02,0x00,0x00,0x51,0x00,0x05,0x00, -0x49,0x00,0x00,0x00,0xf4,0x01,0x00,0x00,0xec,0x01,0x00,0x00, -0x00,0x00,0x00,0x00,0x41,0x00,0x06,0x00,0x55,0x00,0x00,0x00, -0xf5,0x01,0x00,0x00,0x6e,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0xf2,0x01,0x00,0x00,0x3e,0x00,0x03,0x00,0xf5,0x01,0x00,0x00, -0xf4,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xfc,0x01,0x00,0x00,0x76,0x00,0x00,0x00,0xc7,0x02,0x00,0x00, -0x51,0x00,0x05,0x00,0x49,0x00,0x00,0x00,0xfd,0x01,0x00,0x00, -0xec,0x01,0x00,0x00,0x01,0x00,0x00,0x00,0x41,0x00,0x06,0x00, -0x55,0x00,0x00,0x00,0xfe,0x01,0x00,0x00,0x6e,0x00,0x00,0x00, -0x2e,0x00,0x00,0x00,0xfc,0x01,0x00,0x00,0x3e,0x00,0x03,0x00, -0xfe,0x01,0x00,0x00,0xfd,0x01,0x00,0x00,0x3d,0x00,0x04,0x00, -0x49,0x00,0x00,0x00,0x07,0x02,0x00,0x00,0x56,0x00,0x00,0x00, -0x41,0x00,0x08,0x00,0x5d,0x00,0x00,0x00,0x08,0x02,0x00,0x00, -0x53,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0x40,0x00,0x00,0x00, -0x17,0x00,0x00,0x00,0xc8,0x02,0x00,0x00,0x3d,0x00,0x04,0x00, -0x4c,0x00,0x00,0x00,0x09,0x02,0x00,0x00,0x08,0x02,0x00,0x00, -0x6f,0x00,0x04,0x00,0x49,0x00,0x00,0x00,0x0a,0x02,0x00,0x00, -0x09,0x02,0x00,0x00,0x41,0x00,0x08,0x00,0x5d,0x00,0x00,0x00, -0x0c,0x02,0x00,0x00,0x53,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0x40,0x00,0x00,0x00,0x17,0x00,0x00,0x00,0xc9,0x02,0x00,0x00, -0x3d,0x00,0x04,0x00,0x4c,0x00,0x00,0x00,0x0d,0x02,0x00,0x00, -0x0c,0x02,0x00,0x00,0x6f,0x00,0x04,0x00,0x49,0x00,0x00,0x00, -0x0e,0x02,0x00,0x00,0x0d,0x02,0x00,0x00,0x50,0x00,0x05,0x00, -0x58,0x00,0x00,0x00,0x0f,0x02,0x00,0x00,0x0a,0x02,0x00,0x00, -0x0e,0x02,0x00,0x00,0x8e,0x00,0x05,0x00,0x58,0x00,0x00,0x00, -0x10,0x02,0x00,0x00,0x0f,0x02,0x00,0x00,0x07,0x02,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x16,0x02,0x00,0x00, -0x76,0x00,0x00,0x00,0xc8,0x02,0x00,0x00,0x51,0x00,0x05,0x00, -0x49,0x00,0x00,0x00,0x18,0x02,0x00,0x00,0x10,0x02,0x00,0x00, -0x00,0x00,0x00,0x00,0x41,0x00,0x06,0x00,0x55,0x00,0x00,0x00, -0x19,0x02,0x00,0x00,0x6e,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0x16,0x02,0x00,0x00,0x3e,0x00,0x03,0x00,0x19,0x02,0x00,0x00, -0x18,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x20,0x02,0x00,0x00,0x76,0x00,0x00,0x00,0xc9,0x02,0x00,0x00, -0x51,0x00,0x05,0x00,0x49,0x00,0x00,0x00,0x21,0x02,0x00,0x00, -0x10,0x02,0x00,0x00,0x01,0x00,0x00,0x00,0x41,0x00,0x06,0x00, -0x55,0x00,0x00,0x00,0x22,0x02,0x00,0x00,0x6e,0x00,0x00,0x00, -0x2e,0x00,0x00,0x00,0x20,0x02,0x00,0x00,0x3e,0x00,0x03,0x00, -0x22,0x02,0x00,0x00,0x21,0x02,0x00,0x00,0x3d,0x00,0x04,0x00, -0x49,0x00,0x00,0x00,0x2b,0x02,0x00,0x00,0x56,0x00,0x00,0x00, -0x41,0x00,0x08,0x00,0x5d,0x00,0x00,0x00,0x2c,0x02,0x00,0x00, -0x53,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0x40,0x00,0x00,0x00, -0x17,0x00,0x00,0x00,0xca,0x02,0x00,0x00,0x3d,0x00,0x04,0x00, -0x4c,0x00,0x00,0x00,0x2d,0x02,0x00,0x00,0x2c,0x02,0x00,0x00, -0x6f,0x00,0x04,0x00,0x49,0x00,0x00,0x00,0x2e,0x02,0x00,0x00, -0x2d,0x02,0x00,0x00,0x41,0x00,0x08,0x00,0x5d,0x00,0x00,0x00, -0x30,0x02,0x00,0x00,0x53,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0x40,0x00,0x00,0x00,0x17,0x00,0x00,0x00,0xcb,0x02,0x00,0x00, -0x3d,0x00,0x04,0x00,0x4c,0x00,0x00,0x00,0x31,0x02,0x00,0x00, -0x30,0x02,0x00,0x00,0x6f,0x00,0x04,0x00,0x49,0x00,0x00,0x00, -0x32,0x02,0x00,0x00,0x31,0x02,0x00,0x00,0x50,0x00,0x05,0x00, -0x58,0x00,0x00,0x00,0x33,0x02,0x00,0x00,0x2e,0x02,0x00,0x00, -0x32,0x02,0x00,0x00,0x8e,0x00,0x05,0x00,0x58,0x00,0x00,0x00, -0x34,0x02,0x00,0x00,0x33,0x02,0x00,0x00,0x2b,0x02,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x3a,0x02,0x00,0x00, -0x76,0x00,0x00,0x00,0xca,0x02,0x00,0x00,0x51,0x00,0x05,0x00, -0x49,0x00,0x00,0x00,0x3c,0x02,0x00,0x00,0x34,0x02,0x00,0x00, -0x00,0x00,0x00,0x00,0x41,0x00,0x06,0x00,0x55,0x00,0x00,0x00, -0x3d,0x02,0x00,0x00,0x6e,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0x3a,0x02,0x00,0x00,0x3e,0x00,0x03,0x00,0x3d,0x02,0x00,0x00, -0x3c,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x44,0x02,0x00,0x00,0x76,0x00,0x00,0x00,0xcb,0x02,0x00,0x00, -0x51,0x00,0x05,0x00,0x49,0x00,0x00,0x00,0x45,0x02,0x00,0x00, -0x34,0x02,0x00,0x00,0x01,0x00,0x00,0x00,0x41,0x00,0x06,0x00, -0x55,0x00,0x00,0x00,0x46,0x02,0x00,0x00,0x6e,0x00,0x00,0x00, -0x2e,0x00,0x00,0x00,0x44,0x02,0x00,0x00,0x3e,0x00,0x03,0x00, -0x46,0x02,0x00,0x00,0x45,0x02,0x00,0x00,0x3d,0x00,0x04,0x00, -0x49,0x00,0x00,0x00,0x4f,0x02,0x00,0x00,0x56,0x00,0x00,0x00, -0x41,0x00,0x08,0x00,0x5d,0x00,0x00,0x00,0x50,0x02,0x00,0x00, -0x53,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0x40,0x00,0x00,0x00, -0x17,0x00,0x00,0x00,0xcc,0x02,0x00,0x00,0x3d,0x00,0x04,0x00, -0x4c,0x00,0x00,0x00,0x51,0x02,0x00,0x00,0x50,0x02,0x00,0x00, -0x6f,0x00,0x04,0x00,0x49,0x00,0x00,0x00,0x52,0x02,0x00,0x00, -0x51,0x02,0x00,0x00,0x41,0x00,0x08,0x00,0x5d,0x00,0x00,0x00, -0x54,0x02,0x00,0x00,0x53,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0x40,0x00,0x00,0x00,0x17,0x00,0x00,0x00,0xcd,0x02,0x00,0x00, -0x3d,0x00,0x04,0x00,0x4c,0x00,0x00,0x00,0x55,0x02,0x00,0x00, -0x54,0x02,0x00,0x00,0x6f,0x00,0x04,0x00,0x49,0x00,0x00,0x00, -0x56,0x02,0x00,0x00,0x55,0x02,0x00,0x00,0x50,0x00,0x05,0x00, -0x58,0x00,0x00,0x00,0x57,0x02,0x00,0x00,0x52,0x02,0x00,0x00, -0x56,0x02,0x00,0x00,0x8e,0x00,0x05,0x00,0x58,0x00,0x00,0x00, -0x58,0x02,0x00,0x00,0x57,0x02,0x00,0x00,0x4f,0x02,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x5e,0x02,0x00,0x00, -0x76,0x00,0x00,0x00,0xcc,0x02,0x00,0x00,0x51,0x00,0x05,0x00, -0x49,0x00,0x00,0x00,0x60,0x02,0x00,0x00,0x58,0x02,0x00,0x00, -0x00,0x00,0x00,0x00,0x41,0x00,0x06,0x00,0x55,0x00,0x00,0x00, -0x61,0x02,0x00,0x00,0x6e,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0x5e,0x02,0x00,0x00,0x3e,0x00,0x03,0x00,0x61,0x02,0x00,0x00, -0x60,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x68,0x02,0x00,0x00,0x76,0x00,0x00,0x00,0xcd,0x02,0x00,0x00, -0x51,0x00,0x05,0x00,0x49,0x00,0x00,0x00,0x69,0x02,0x00,0x00, -0x58,0x02,0x00,0x00,0x01,0x00,0x00,0x00,0x41,0x00,0x06,0x00, -0x55,0x00,0x00,0x00,0x6a,0x02,0x00,0x00,0x6e,0x00,0x00,0x00, -0x2e,0x00,0x00,0x00,0x68,0x02,0x00,0x00,0x3e,0x00,0x03,0x00, -0x6a,0x02,0x00,0x00,0x69,0x02,0x00,0x00,0x3d,0x00,0x04,0x00, -0x49,0x00,0x00,0x00,0x73,0x02,0x00,0x00,0x56,0x00,0x00,0x00, -0x41,0x00,0x08,0x00,0x5d,0x00,0x00,0x00,0x74,0x02,0x00,0x00, -0x53,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0x40,0x00,0x00,0x00, -0x17,0x00,0x00,0x00,0xce,0x02,0x00,0x00,0x3d,0x00,0x04,0x00, -0x4c,0x00,0x00,0x00,0x75,0x02,0x00,0x00,0x74,0x02,0x00,0x00, -0x6f,0x00,0x04,0x00,0x49,0x00,0x00,0x00,0x76,0x02,0x00,0x00, -0x75,0x02,0x00,0x00,0x41,0x00,0x08,0x00,0x5d,0x00,0x00,0x00, -0x78,0x02,0x00,0x00,0x53,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0x40,0x00,0x00,0x00,0x17,0x00,0x00,0x00,0xcf,0x02,0x00,0x00, -0x3d,0x00,0x04,0x00,0x4c,0x00,0x00,0x00,0x79,0x02,0x00,0x00, -0x78,0x02,0x00,0x00,0x6f,0x00,0x04,0x00,0x49,0x00,0x00,0x00, -0x7a,0x02,0x00,0x00,0x79,0x02,0x00,0x00,0x50,0x00,0x05,0x00, -0x58,0x00,0x00,0x00,0x7b,0x02,0x00,0x00,0x76,0x02,0x00,0x00, -0x7a,0x02,0x00,0x00,0x8e,0x00,0x05,0x00,0x58,0x00,0x00,0x00, -0x7c,0x02,0x00,0x00,0x7b,0x02,0x00,0x00,0x73,0x02,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x82,0x02,0x00,0x00, -0x76,0x00,0x00,0x00,0xce,0x02,0x00,0x00,0x51,0x00,0x05,0x00, -0x49,0x00,0x00,0x00,0x84,0x02,0x00,0x00,0x7c,0x02,0x00,0x00, -0x00,0x00,0x00,0x00,0x41,0x00,0x06,0x00,0x55,0x00,0x00,0x00, -0x85,0x02,0x00,0x00,0x6e,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0x82,0x02,0x00,0x00,0x3e,0x00,0x03,0x00,0x85,0x02,0x00,0x00, -0x84,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x8c,0x02,0x00,0x00,0x76,0x00,0x00,0x00,0xcf,0x02,0x00,0x00, -0x51,0x00,0x05,0x00,0x49,0x00,0x00,0x00,0x8d,0x02,0x00,0x00, -0x7c,0x02,0x00,0x00,0x01,0x00,0x00,0x00,0x41,0x00,0x06,0x00, -0x55,0x00,0x00,0x00,0x8e,0x02,0x00,0x00,0x6e,0x00,0x00,0x00, -0x2e,0x00,0x00,0x00,0x8c,0x02,0x00,0x00,0x3e,0x00,0x03,0x00, -0x8e,0x02,0x00,0x00,0x8d,0x02,0x00,0x00,0x3d,0x00,0x04,0x00, -0x49,0x00,0x00,0x00,0x97,0x02,0x00,0x00,0x56,0x00,0x00,0x00, -0x41,0x00,0x08,0x00,0x5d,0x00,0x00,0x00,0x98,0x02,0x00,0x00, -0x53,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0x40,0x00,0x00,0x00, -0x17,0x00,0x00,0x00,0xd0,0x02,0x00,0x00,0x3d,0x00,0x04,0x00, -0x4c,0x00,0x00,0x00,0x99,0x02,0x00,0x00,0x98,0x02,0x00,0x00, -0x6f,0x00,0x04,0x00,0x49,0x00,0x00,0x00,0x9a,0x02,0x00,0x00, -0x99,0x02,0x00,0x00,0x41,0x00,0x08,0x00,0x5d,0x00,0x00,0x00, -0x9c,0x02,0x00,0x00,0x53,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0x40,0x00,0x00,0x00,0x17,0x00,0x00,0x00,0xd1,0x02,0x00,0x00, -0x3d,0x00,0x04,0x00,0x4c,0x00,0x00,0x00,0x9d,0x02,0x00,0x00, -0x9c,0x02,0x00,0x00,0x6f,0x00,0x04,0x00,0x49,0x00,0x00,0x00, -0x9e,0x02,0x00,0x00,0x9d,0x02,0x00,0x00,0x50,0x00,0x05,0x00, -0x58,0x00,0x00,0x00,0x9f,0x02,0x00,0x00,0x9a,0x02,0x00,0x00, -0x9e,0x02,0x00,0x00,0x8e,0x00,0x05,0x00,0x58,0x00,0x00,0x00, -0xa0,0x02,0x00,0x00,0x9f,0x02,0x00,0x00,0x97,0x02,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xa6,0x02,0x00,0x00, -0x76,0x00,0x00,0x00,0xd0,0x02,0x00,0x00,0x51,0x00,0x05,0x00, -0x49,0x00,0x00,0x00,0xa8,0x02,0x00,0x00,0xa0,0x02,0x00,0x00, -0x00,0x00,0x00,0x00,0x41,0x00,0x06,0x00,0x55,0x00,0x00,0x00, -0xa9,0x02,0x00,0x00,0x6e,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0xa6,0x02,0x00,0x00,0x3e,0x00,0x03,0x00,0xa9,0x02,0x00,0x00, -0xa8,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xb0,0x02,0x00,0x00,0x76,0x00,0x00,0x00,0xd1,0x02,0x00,0x00, -0x51,0x00,0x05,0x00,0x49,0x00,0x00,0x00,0xb1,0x02,0x00,0x00, -0xa0,0x02,0x00,0x00,0x01,0x00,0x00,0x00,0x41,0x00,0x06,0x00, -0x55,0x00,0x00,0x00,0xb2,0x02,0x00,0x00,0x6e,0x00,0x00,0x00, -0x2e,0x00,0x00,0x00,0xb0,0x02,0x00,0x00,0x3e,0x00,0x03,0x00, -0xb2,0x02,0x00,0x00,0xb1,0x02,0x00,0x00,0xf9,0x00,0x02,0x00, -0x8f,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0x8f,0x00,0x00,0x00, -0xfd,0x00,0x01,0x00,0x38,0x00,0x01,0x00, -}; -const uint64_t dequant_q8_0_len = 7592; - -unsigned char dequant_q8_0_fp32_data[] = { -0x03,0x02,0x23,0x07,0x00,0x05,0x01,0x00,0x0b,0x00,0x0d,0x00, 0x23,0x03,0x00,0x00,0x00,0x00,0x00,0x00,0x11,0x00,0x02,0x00, 0x01,0x00,0x00,0x00,0x11,0x00,0x02,0x00,0x51,0x11,0x00,0x00, 0x11,0x00,0x02,0x00,0x60,0x11,0x00,0x00,0x0b,0x00,0x06,0x00, @@ -14262,7 +7760,7 @@ unsigned char dequant_q8_0_fp32_data[] = { 0x95,0x00,0x00,0x00,0xfd,0x00,0x01,0x00,0x38,0x00,0x01,0x00, }; -const uint64_t dequant_q8_0_fp32_len = 8868; +const uint64_t dequant_q8_0_len = 8868; unsigned char diag_mask_inf_f32_data[] = { 0x03,0x02,0x23,0x07,0x00,0x05,0x01,0x00,0x0b,0x00,0x0d,0x00, @@ -14530,144 +8028,6 @@ unsigned char f32_to_f16_data[] = { }; const uint64_t f32_to_f16_len = 1596; -unsigned char f32_to_f16_fp32_data[] = { -0x03,0x02,0x23,0x07,0x00,0x05,0x01,0x00,0x0b,0x00,0x0d,0x00, -0x53,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x11,0x00,0x02,0x00, -0x01,0x00,0x00,0x00,0x11,0x00,0x02,0x00,0x51,0x11,0x00,0x00, -0x0b,0x00,0x06,0x00,0x01,0x00,0x00,0x00,0x47,0x4c,0x53,0x4c, -0x2e,0x73,0x74,0x64,0x2e,0x34,0x35,0x30,0x00,0x00,0x00,0x00, -0x0e,0x00,0x03,0x00,0x00,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x0f,0x00,0x09,0x00,0x05,0x00,0x00,0x00,0x04,0x00,0x00,0x00, -0x6d,0x61,0x69,0x6e,0x00,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0x13,0x00,0x00,0x00,0x36,0x00,0x00,0x00,0x42,0x00,0x00,0x00, -0x10,0x00,0x06,0x00,0x04,0x00,0x00,0x00,0x11,0x00,0x00,0x00, -0x40,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x0c,0x00,0x00,0x00,0x0b,0x00,0x00,0x00, -0x1c,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x11,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x48,0x00,0x05,0x00,0x11,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0x11,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x08,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x11,0x00,0x00,0x00, -0x03,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0x47,0x00,0x03,0x00,0x11,0x00,0x00,0x00,0x02,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x33,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x02,0x00,0x00,0x00,0x48,0x00,0x04,0x00,0x34,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x19,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0x34,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x47,0x00,0x03,0x00,0x34,0x00,0x00,0x00, -0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x36,0x00,0x00,0x00, -0x22,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x36,0x00,0x00,0x00,0x21,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x3f,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x04,0x00,0x00,0x00,0x48,0x00,0x04,0x00,0x40,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x18,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0x40,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x47,0x00,0x03,0x00,0x40,0x00,0x00,0x00, -0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x42,0x00,0x00,0x00, -0x22,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x42,0x00,0x00,0x00,0x21,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x52,0x00,0x00,0x00,0x0b,0x00,0x00,0x00, -0x19,0x00,0x00,0x00,0x13,0x00,0x02,0x00,0x02,0x00,0x00,0x00, -0x21,0x00,0x03,0x00,0x03,0x00,0x00,0x00,0x02,0x00,0x00,0x00, -0x15,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x20,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x15,0x00,0x04,0x00,0x09,0x00,0x00,0x00, -0x20,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x17,0x00,0x04,0x00, -0x0a,0x00,0x00,0x00,0x09,0x00,0x00,0x00,0x03,0x00,0x00,0x00, -0x20,0x00,0x04,0x00,0x0b,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x0a,0x00,0x00,0x00,0x3b,0x00,0x04,0x00,0x0b,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x09,0x00,0x00,0x00,0x0d,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x20,0x00,0x04,0x00,0x0e,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x09,0x00,0x00,0x00,0x1e,0x00,0x06,0x00,0x11,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x12,0x00,0x00,0x00, -0x09,0x00,0x00,0x00,0x11,0x00,0x00,0x00,0x3b,0x00,0x04,0x00, -0x12,0x00,0x00,0x00,0x13,0x00,0x00,0x00,0x09,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x14,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x15,0x00,0x00,0x00, -0x09,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x14,0x00,0x02,0x00, -0x23,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x2b,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x16,0x00,0x03,0x00, -0x32,0x00,0x00,0x00,0x10,0x00,0x00,0x00,0x1d,0x00,0x03,0x00, -0x33,0x00,0x00,0x00,0x32,0x00,0x00,0x00,0x1e,0x00,0x03,0x00, -0x34,0x00,0x00,0x00,0x33,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0x35,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x34,0x00,0x00,0x00, -0x3b,0x00,0x04,0x00,0x35,0x00,0x00,0x00,0x36,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x38,0x00,0x00,0x00,0x03,0x00,0x00,0x00,0x16,0x00,0x03,0x00, -0x3e,0x00,0x00,0x00,0x20,0x00,0x00,0x00,0x1d,0x00,0x03,0x00, -0x3f,0x00,0x00,0x00,0x3e,0x00,0x00,0x00,0x1e,0x00,0x03,0x00, -0x40,0x00,0x00,0x00,0x3f,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0x41,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x40,0x00,0x00,0x00, -0x3b,0x00,0x04,0x00,0x41,0x00,0x00,0x00,0x42,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x44,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0x4a,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x3e,0x00,0x00,0x00, -0x20,0x00,0x04,0x00,0x4e,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0x32,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x09,0x00,0x00,0x00, -0x50,0x00,0x00,0x00,0x40,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x09,0x00,0x00,0x00,0x51,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x2c,0x00,0x06,0x00,0x0a,0x00,0x00,0x00,0x52,0x00,0x00,0x00, -0x50,0x00,0x00,0x00,0x51,0x00,0x00,0x00,0x51,0x00,0x00,0x00, -0x36,0x00,0x05,0x00,0x02,0x00,0x00,0x00,0x04,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x03,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, -0x05,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x0e,0x00,0x00,0x00, -0x0f,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x0d,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x09,0x00,0x00,0x00,0x10,0x00,0x00,0x00, -0x0f,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00, -0x16,0x00,0x00,0x00,0x13,0x00,0x00,0x00,0x14,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x17,0x00,0x00,0x00, -0x16,0x00,0x00,0x00,0x7c,0x00,0x04,0x00,0x09,0x00,0x00,0x00, -0x18,0x00,0x00,0x00,0x17,0x00,0x00,0x00,0x89,0x00,0x05,0x00, -0x09,0x00,0x00,0x00,0x19,0x00,0x00,0x00,0x10,0x00,0x00,0x00, -0x18,0x00,0x00,0x00,0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x1a,0x00,0x00,0x00,0x19,0x00,0x00,0x00,0x86,0x00,0x05,0x00, -0x09,0x00,0x00,0x00,0x21,0x00,0x00,0x00,0x10,0x00,0x00,0x00, -0x18,0x00,0x00,0x00,0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x22,0x00,0x00,0x00,0x21,0x00,0x00,0x00,0xb1,0x00,0x05,0x00, -0x23,0x00,0x00,0x00,0x27,0x00,0x00,0x00,0x1a,0x00,0x00,0x00, -0x17,0x00,0x00,0x00,0xf7,0x00,0x03,0x00,0x29,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0x27,0x00,0x00,0x00, -0x28,0x00,0x00,0x00,0x29,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, -0x28,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00, -0x2c,0x00,0x00,0x00,0x13,0x00,0x00,0x00,0x2b,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x2d,0x00,0x00,0x00, -0x2c,0x00,0x00,0x00,0xb1,0x00,0x05,0x00,0x23,0x00,0x00,0x00, -0x2e,0x00,0x00,0x00,0x22,0x00,0x00,0x00,0x2d,0x00,0x00,0x00, -0xf9,0x00,0x02,0x00,0x29,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, -0x29,0x00,0x00,0x00,0xf5,0x00,0x07,0x00,0x23,0x00,0x00,0x00, -0x2f,0x00,0x00,0x00,0x27,0x00,0x00,0x00,0x05,0x00,0x00,0x00, -0x2e,0x00,0x00,0x00,0x28,0x00,0x00,0x00,0xf7,0x00,0x03,0x00, -0x31,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, -0x2f,0x00,0x00,0x00,0x30,0x00,0x00,0x00,0x31,0x00,0x00,0x00, -0xf8,0x00,0x02,0x00,0x30,0x00,0x00,0x00,0x41,0x00,0x05,0x00, -0x15,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x13,0x00,0x00,0x00, -0x38,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x3a,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x3b,0x00,0x00,0x00,0x22,0x00,0x00,0x00, -0x3a,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x3d,0x00,0x00,0x00,0x3b,0x00,0x00,0x00,0x1a,0x00,0x00,0x00, -0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00,0x45,0x00,0x00,0x00, -0x13,0x00,0x00,0x00,0x44,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x46,0x00,0x00,0x00,0x45,0x00,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x47,0x00,0x00,0x00, -0x22,0x00,0x00,0x00,0x46,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x49,0x00,0x00,0x00,0x47,0x00,0x00,0x00, -0x1a,0x00,0x00,0x00,0x41,0x00,0x06,0x00,0x4a,0x00,0x00,0x00, -0x4b,0x00,0x00,0x00,0x42,0x00,0x00,0x00,0x2b,0x00,0x00,0x00, -0x49,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x3e,0x00,0x00,0x00, -0x4c,0x00,0x00,0x00,0x4b,0x00,0x00,0x00,0x73,0x00,0x04,0x00, -0x32,0x00,0x00,0x00,0x4d,0x00,0x00,0x00,0x4c,0x00,0x00,0x00, -0x41,0x00,0x06,0x00,0x4e,0x00,0x00,0x00,0x4f,0x00,0x00,0x00, -0x36,0x00,0x00,0x00,0x2b,0x00,0x00,0x00,0x3d,0x00,0x00,0x00, -0x3e,0x00,0x03,0x00,0x4f,0x00,0x00,0x00,0x4d,0x00,0x00,0x00, -0xf9,0x00,0x02,0x00,0x31,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, -0x31,0x00,0x00,0x00,0xfd,0x00,0x01,0x00,0x38,0x00,0x01,0x00, - -}; -const uint64_t f32_to_f16_fp32_len = 1596; - unsigned char gelu_f32_data[] = { 0x03,0x02,0x23,0x07,0x00,0x05,0x01,0x00,0x0b,0x00,0x0d,0x00, 0x4b,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x11,0x00,0x02,0x00, @@ -14798,500 +8158,6 @@ const uint64_t gelu_f32_len = 1484; unsigned char get_rows_f16_data[] = { 0x03,0x02,0x23,0x07,0x00,0x05,0x01,0x00,0x0b,0x00,0x0d,0x00, -0x77,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x11,0x00,0x02,0x00, -0x01,0x00,0x00,0x00,0x11,0x00,0x02,0x00,0x09,0x00,0x00,0x00, -0x11,0x00,0x02,0x00,0x51,0x11,0x00,0x00,0x0b,0x00,0x06,0x00, -0x01,0x00,0x00,0x00,0x47,0x4c,0x53,0x4c,0x2e,0x73,0x74,0x64, -0x2e,0x34,0x35,0x30,0x00,0x00,0x00,0x00,0x0e,0x00,0x03,0x00, -0x00,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x0f,0x00,0x0a,0x00, -0x05,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x6d,0x61,0x69,0x6e, -0x00,0x00,0x00,0x00,0x0b,0x00,0x00,0x00,0x1f,0x00,0x00,0x00, -0x2d,0x00,0x00,0x00,0x55,0x00,0x00,0x00,0x63,0x00,0x00,0x00, -0x10,0x00,0x06,0x00,0x04,0x00,0x00,0x00,0x11,0x00,0x00,0x00, -0x00,0x02,0x00,0x00,0x01,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x0b,0x00,0x00,0x00,0x0b,0x00,0x00,0x00, -0x1c,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x1d,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x48,0x00,0x05,0x00,0x1d,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0x1d,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x08,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x1d,0x00,0x00,0x00, -0x03,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0x47,0x00,0x03,0x00,0x1d,0x00,0x00,0x00,0x02,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x2a,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x04,0x00,0x00,0x00,0x48,0x00,0x04,0x00,0x2b,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x18,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0x2b,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x47,0x00,0x03,0x00,0x2b,0x00,0x00,0x00, -0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x2d,0x00,0x00,0x00, -0x22,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x2d,0x00,0x00,0x00,0x21,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x52,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x02,0x00,0x00,0x00,0x48,0x00,0x04,0x00,0x53,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x18,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0x53,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x47,0x00,0x03,0x00,0x53,0x00,0x00,0x00, -0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x55,0x00,0x00,0x00, -0x22,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x55,0x00,0x00,0x00,0x21,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x60,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x02,0x00,0x00,0x00,0x48,0x00,0x04,0x00,0x61,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x19,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0x61,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x47,0x00,0x03,0x00,0x61,0x00,0x00,0x00, -0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x63,0x00,0x00,0x00, -0x22,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x63,0x00,0x00,0x00,0x21,0x00,0x00,0x00,0x02,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x74,0x00,0x00,0x00,0x0b,0x00,0x00,0x00, -0x19,0x00,0x00,0x00,0x13,0x00,0x02,0x00,0x02,0x00,0x00,0x00, -0x21,0x00,0x03,0x00,0x03,0x00,0x00,0x00,0x02,0x00,0x00,0x00, -0x15,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x20,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x17,0x00,0x04,0x00,0x09,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x03,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0x0a,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x09,0x00,0x00,0x00, -0x3b,0x00,0x04,0x00,0x0a,0x00,0x00,0x00,0x0b,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0x0d,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x15,0x00,0x04,0x00,0x10,0x00,0x00,0x00,0x20,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x10,0x00,0x00,0x00, -0x12,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x16,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x16,0x00,0x03,0x00,0x1c,0x00,0x00,0x00,0x20,0x00,0x00,0x00, -0x1e,0x00,0x06,0x00,0x1d,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x1c,0x00,0x00,0x00,0x1c,0x00,0x00,0x00, -0x20,0x00,0x04,0x00,0x1e,0x00,0x00,0x00,0x09,0x00,0x00,0x00, -0x1d,0x00,0x00,0x00,0x3b,0x00,0x04,0x00,0x1e,0x00,0x00,0x00, -0x1f,0x00,0x00,0x00,0x09,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x10,0x00,0x00,0x00,0x20,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x20,0x00,0x04,0x00,0x21,0x00,0x00,0x00,0x09,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x14,0x00,0x02,0x00,0x24,0x00,0x00,0x00, -0x1d,0x00,0x03,0x00,0x2a,0x00,0x00,0x00,0x10,0x00,0x00,0x00, -0x1e,0x00,0x03,0x00,0x2b,0x00,0x00,0x00,0x2a,0x00,0x00,0x00, -0x20,0x00,0x04,0x00,0x2c,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0x2b,0x00,0x00,0x00,0x3b,0x00,0x04,0x00,0x2c,0x00,0x00,0x00, -0x2d,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x10,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x20,0x00,0x04,0x00,0x30,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0x10,0x00,0x00,0x00,0x16,0x00,0x03,0x00,0x4e,0x00,0x00,0x00, -0x10,0x00,0x00,0x00,0x1d,0x00,0x03,0x00,0x52,0x00,0x00,0x00, -0x4e,0x00,0x00,0x00,0x1e,0x00,0x03,0x00,0x53,0x00,0x00,0x00, -0x52,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x54,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x53,0x00,0x00,0x00,0x3b,0x00,0x04,0x00, -0x54,0x00,0x00,0x00,0x55,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0x20,0x00,0x04,0x00,0x58,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0x4e,0x00,0x00,0x00,0x1d,0x00,0x03,0x00,0x60,0x00,0x00,0x00, -0x4e,0x00,0x00,0x00,0x1e,0x00,0x03,0x00,0x61,0x00,0x00,0x00, -0x60,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x62,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x61,0x00,0x00,0x00,0x3b,0x00,0x04,0x00, -0x62,0x00,0x00,0x00,0x63,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x73,0x00,0x00,0x00, -0x00,0x02,0x00,0x00,0x2c,0x00,0x06,0x00,0x09,0x00,0x00,0x00, -0x74,0x00,0x00,0x00,0x73,0x00,0x00,0x00,0x16,0x00,0x00,0x00, -0x16,0x00,0x00,0x00,0x36,0x00,0x05,0x00,0x02,0x00,0x00,0x00, -0x04,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x03,0x00,0x00,0x00, -0xf8,0x00,0x02,0x00,0x05,0x00,0x00,0x00,0xf7,0x00,0x03,0x00, -0x75,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xfb,0x00,0x03,0x00, -0x0c,0x00,0x00,0x00,0x76,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, -0x76,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x0d,0x00,0x00,0x00, -0x0e,0x00,0x00,0x00,0x0b,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x0f,0x00,0x00,0x00, -0x0e,0x00,0x00,0x00,0x7c,0x00,0x04,0x00,0x10,0x00,0x00,0x00, -0x11,0x00,0x00,0x00,0x0f,0x00,0x00,0x00,0x84,0x00,0x05,0x00, -0x10,0x00,0x00,0x00,0x13,0x00,0x00,0x00,0x11,0x00,0x00,0x00, -0x12,0x00,0x00,0x00,0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x14,0x00,0x00,0x00,0x13,0x00,0x00,0x00,0x41,0x00,0x05,0x00, -0x0d,0x00,0x00,0x00,0x17,0x00,0x00,0x00,0x0b,0x00,0x00,0x00, -0x16,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x18,0x00,0x00,0x00,0x17,0x00,0x00,0x00,0x7c,0x00,0x04,0x00, -0x10,0x00,0x00,0x00,0x19,0x00,0x00,0x00,0x18,0x00,0x00,0x00, -0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x1a,0x00,0x00,0x00, -0x19,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x21,0x00,0x00,0x00, -0x22,0x00,0x00,0x00,0x1f,0x00,0x00,0x00,0x20,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x22,0x00,0x00,0x00,0xae,0x00,0x05,0x00,0x24,0x00,0x00,0x00, -0x25,0x00,0x00,0x00,0x14,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0xf7,0x00,0x03,0x00,0x27,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0xfa,0x00,0x04,0x00,0x25,0x00,0x00,0x00,0x26,0x00,0x00,0x00, -0x27,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0x26,0x00,0x00,0x00, -0xf9,0x00,0x02,0x00,0x75,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, -0x27,0x00,0x00,0x00,0x41,0x00,0x06,0x00,0x30,0x00,0x00,0x00, -0x31,0x00,0x00,0x00,0x2d,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0x1a,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x10,0x00,0x00,0x00, -0x32,0x00,0x00,0x00,0x31,0x00,0x00,0x00,0x7c,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x33,0x00,0x00,0x00,0x32,0x00,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x38,0x00,0x00,0x00, -0x33,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x3a,0x00,0x00,0x00,0x38,0x00,0x00,0x00, -0x14,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x3f,0x00,0x00,0x00,0x1a,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x41,0x00,0x00,0x00, -0x3f,0x00,0x00,0x00,0x14,0x00,0x00,0x00,0x86,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x44,0x00,0x00,0x00,0x3a,0x00,0x00,0x00, -0x16,0x00,0x00,0x00,0x89,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x47,0x00,0x00,0x00,0x3a,0x00,0x00,0x00,0x16,0x00,0x00,0x00, -0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x48,0x00,0x00,0x00, -0x47,0x00,0x00,0x00,0x16,0x00,0x00,0x00,0x89,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x4c,0x00,0x00,0x00,0x41,0x00,0x00,0x00, -0x16,0x00,0x00,0x00,0x82,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x4d,0x00,0x00,0x00,0x41,0x00,0x00,0x00,0x4c,0x00,0x00,0x00, -0x41,0x00,0x06,0x00,0x58,0x00,0x00,0x00,0x59,0x00,0x00,0x00, -0x55,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0x44,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x4e,0x00,0x00,0x00,0x5a,0x00,0x00,0x00, -0x59,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x5c,0x00,0x00,0x00,0x44,0x00,0x00,0x00,0x16,0x00,0x00,0x00, -0x41,0x00,0x06,0x00,0x58,0x00,0x00,0x00,0x5d,0x00,0x00,0x00, -0x55,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0x5c,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x4e,0x00,0x00,0x00,0x5e,0x00,0x00,0x00, -0x5d,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x66,0x00,0x00,0x00,0x4d,0x00,0x00,0x00,0x48,0x00,0x00,0x00, -0x41,0x00,0x06,0x00,0x58,0x00,0x00,0x00,0x6b,0x00,0x00,0x00, -0x63,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0x66,0x00,0x00,0x00, -0x3e,0x00,0x03,0x00,0x6b,0x00,0x00,0x00,0x5a,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x6f,0x00,0x00,0x00, -0x66,0x00,0x00,0x00,0x16,0x00,0x00,0x00,0x41,0x00,0x06,0x00, -0x58,0x00,0x00,0x00,0x72,0x00,0x00,0x00,0x63,0x00,0x00,0x00, -0x2e,0x00,0x00,0x00,0x6f,0x00,0x00,0x00,0x3e,0x00,0x03,0x00, -0x72,0x00,0x00,0x00,0x5e,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, -0x75,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0x75,0x00,0x00,0x00, -0xfd,0x00,0x01,0x00,0x38,0x00,0x01,0x00, -}; -const uint64_t get_rows_f16_len = 1892; - -unsigned char get_rows_f16_f32_data[] = { -0x03,0x02,0x23,0x07,0x00,0x05,0x01,0x00,0x0b,0x00,0x0d,0x00, -0x7a,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x11,0x00,0x02,0x00, -0x01,0x00,0x00,0x00,0x11,0x00,0x02,0x00,0x09,0x00,0x00,0x00, -0x11,0x00,0x02,0x00,0x51,0x11,0x00,0x00,0x0b,0x00,0x06,0x00, -0x01,0x00,0x00,0x00,0x47,0x4c,0x53,0x4c,0x2e,0x73,0x74,0x64, -0x2e,0x34,0x35,0x30,0x00,0x00,0x00,0x00,0x0e,0x00,0x03,0x00, -0x00,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x0f,0x00,0x0a,0x00, -0x05,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x6d,0x61,0x69,0x6e, -0x00,0x00,0x00,0x00,0x0b,0x00,0x00,0x00,0x1f,0x00,0x00,0x00, -0x2d,0x00,0x00,0x00,0x55,0x00,0x00,0x00,0x63,0x00,0x00,0x00, -0x10,0x00,0x06,0x00,0x04,0x00,0x00,0x00,0x11,0x00,0x00,0x00, -0x00,0x02,0x00,0x00,0x01,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x0b,0x00,0x00,0x00,0x0b,0x00,0x00,0x00, -0x1c,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x1d,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x48,0x00,0x05,0x00,0x1d,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0x1d,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x08,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x1d,0x00,0x00,0x00, -0x03,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0x47,0x00,0x03,0x00,0x1d,0x00,0x00,0x00,0x02,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x2a,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x04,0x00,0x00,0x00,0x48,0x00,0x04,0x00,0x2b,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x18,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0x2b,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x47,0x00,0x03,0x00,0x2b,0x00,0x00,0x00, -0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x2d,0x00,0x00,0x00, -0x22,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x2d,0x00,0x00,0x00,0x21,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x52,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x02,0x00,0x00,0x00,0x48,0x00,0x04,0x00,0x53,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x18,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0x53,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x47,0x00,0x03,0x00,0x53,0x00,0x00,0x00, -0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x55,0x00,0x00,0x00, -0x22,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x55,0x00,0x00,0x00,0x21,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x60,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x04,0x00,0x00,0x00,0x48,0x00,0x04,0x00,0x61,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x19,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0x61,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x47,0x00,0x03,0x00,0x61,0x00,0x00,0x00, -0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x63,0x00,0x00,0x00, -0x22,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x63,0x00,0x00,0x00,0x21,0x00,0x00,0x00,0x02,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x77,0x00,0x00,0x00,0x0b,0x00,0x00,0x00, -0x19,0x00,0x00,0x00,0x13,0x00,0x02,0x00,0x02,0x00,0x00,0x00, -0x21,0x00,0x03,0x00,0x03,0x00,0x00,0x00,0x02,0x00,0x00,0x00, -0x15,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x20,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x17,0x00,0x04,0x00,0x09,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x03,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0x0a,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x09,0x00,0x00,0x00, -0x3b,0x00,0x04,0x00,0x0a,0x00,0x00,0x00,0x0b,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0x0d,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x15,0x00,0x04,0x00,0x10,0x00,0x00,0x00,0x20,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x10,0x00,0x00,0x00, -0x12,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x16,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x16,0x00,0x03,0x00,0x1c,0x00,0x00,0x00,0x20,0x00,0x00,0x00, -0x1e,0x00,0x06,0x00,0x1d,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x1c,0x00,0x00,0x00,0x1c,0x00,0x00,0x00, -0x20,0x00,0x04,0x00,0x1e,0x00,0x00,0x00,0x09,0x00,0x00,0x00, -0x1d,0x00,0x00,0x00,0x3b,0x00,0x04,0x00,0x1e,0x00,0x00,0x00, -0x1f,0x00,0x00,0x00,0x09,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x10,0x00,0x00,0x00,0x20,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x20,0x00,0x04,0x00,0x21,0x00,0x00,0x00,0x09,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x14,0x00,0x02,0x00,0x24,0x00,0x00,0x00, -0x1d,0x00,0x03,0x00,0x2a,0x00,0x00,0x00,0x10,0x00,0x00,0x00, -0x1e,0x00,0x03,0x00,0x2b,0x00,0x00,0x00,0x2a,0x00,0x00,0x00, -0x20,0x00,0x04,0x00,0x2c,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0x2b,0x00,0x00,0x00,0x3b,0x00,0x04,0x00,0x2c,0x00,0x00,0x00, -0x2d,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x10,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x20,0x00,0x04,0x00,0x30,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0x10,0x00,0x00,0x00,0x16,0x00,0x03,0x00,0x4e,0x00,0x00,0x00, -0x10,0x00,0x00,0x00,0x1d,0x00,0x03,0x00,0x52,0x00,0x00,0x00, -0x4e,0x00,0x00,0x00,0x1e,0x00,0x03,0x00,0x53,0x00,0x00,0x00, -0x52,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x54,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x53,0x00,0x00,0x00,0x3b,0x00,0x04,0x00, -0x54,0x00,0x00,0x00,0x55,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0x20,0x00,0x04,0x00,0x58,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0x4e,0x00,0x00,0x00,0x1d,0x00,0x03,0x00,0x60,0x00,0x00,0x00, -0x1c,0x00,0x00,0x00,0x1e,0x00,0x03,0x00,0x61,0x00,0x00,0x00, -0x60,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x62,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x61,0x00,0x00,0x00,0x3b,0x00,0x04,0x00, -0x62,0x00,0x00,0x00,0x63,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0x20,0x00,0x04,0x00,0x6c,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0x1c,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x76,0x00,0x00,0x00,0x00,0x02,0x00,0x00,0x2c,0x00,0x06,0x00, -0x09,0x00,0x00,0x00,0x77,0x00,0x00,0x00,0x76,0x00,0x00,0x00, -0x16,0x00,0x00,0x00,0x16,0x00,0x00,0x00,0x36,0x00,0x05,0x00, -0x02,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x03,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0x05,0x00,0x00,0x00, -0xf7,0x00,0x03,0x00,0x78,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0xfb,0x00,0x03,0x00,0x0c,0x00,0x00,0x00,0x79,0x00,0x00,0x00, -0xf8,0x00,0x02,0x00,0x79,0x00,0x00,0x00,0x41,0x00,0x05,0x00, -0x0d,0x00,0x00,0x00,0x0e,0x00,0x00,0x00,0x0b,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x0f,0x00,0x00,0x00,0x0e,0x00,0x00,0x00,0x7c,0x00,0x04,0x00, -0x10,0x00,0x00,0x00,0x11,0x00,0x00,0x00,0x0f,0x00,0x00,0x00, -0x84,0x00,0x05,0x00,0x10,0x00,0x00,0x00,0x13,0x00,0x00,0x00, -0x11,0x00,0x00,0x00,0x12,0x00,0x00,0x00,0x7c,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x14,0x00,0x00,0x00,0x13,0x00,0x00,0x00, -0x41,0x00,0x05,0x00,0x0d,0x00,0x00,0x00,0x17,0x00,0x00,0x00, -0x0b,0x00,0x00,0x00,0x16,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x18,0x00,0x00,0x00,0x17,0x00,0x00,0x00, -0x7c,0x00,0x04,0x00,0x10,0x00,0x00,0x00,0x19,0x00,0x00,0x00, -0x18,0x00,0x00,0x00,0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x1a,0x00,0x00,0x00,0x19,0x00,0x00,0x00,0x41,0x00,0x05,0x00, -0x21,0x00,0x00,0x00,0x22,0x00,0x00,0x00,0x1f,0x00,0x00,0x00, -0x20,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x22,0x00,0x00,0x00,0xae,0x00,0x05,0x00, -0x24,0x00,0x00,0x00,0x25,0x00,0x00,0x00,0x14,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0xf7,0x00,0x03,0x00,0x27,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0x25,0x00,0x00,0x00, -0x26,0x00,0x00,0x00,0x27,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, -0x26,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0x78,0x00,0x00,0x00, -0xf8,0x00,0x02,0x00,0x27,0x00,0x00,0x00,0x41,0x00,0x06,0x00, -0x30,0x00,0x00,0x00,0x31,0x00,0x00,0x00,0x2d,0x00,0x00,0x00, -0x2e,0x00,0x00,0x00,0x1a,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x10,0x00,0x00,0x00,0x32,0x00,0x00,0x00,0x31,0x00,0x00,0x00, -0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x33,0x00,0x00,0x00, -0x32,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x38,0x00,0x00,0x00,0x33,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x3a,0x00,0x00,0x00, -0x38,0x00,0x00,0x00,0x14,0x00,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x3f,0x00,0x00,0x00,0x1a,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x41,0x00,0x00,0x00,0x3f,0x00,0x00,0x00,0x14,0x00,0x00,0x00, -0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x44,0x00,0x00,0x00, -0x3a,0x00,0x00,0x00,0x16,0x00,0x00,0x00,0x89,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x47,0x00,0x00,0x00,0x3a,0x00,0x00,0x00, -0x16,0x00,0x00,0x00,0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x48,0x00,0x00,0x00,0x47,0x00,0x00,0x00,0x16,0x00,0x00,0x00, -0x89,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x4c,0x00,0x00,0x00, -0x41,0x00,0x00,0x00,0x16,0x00,0x00,0x00,0x82,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x4d,0x00,0x00,0x00,0x41,0x00,0x00,0x00, -0x4c,0x00,0x00,0x00,0x41,0x00,0x06,0x00,0x58,0x00,0x00,0x00, -0x59,0x00,0x00,0x00,0x55,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0x44,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x4e,0x00,0x00,0x00, -0x5a,0x00,0x00,0x00,0x59,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x5c,0x00,0x00,0x00,0x44,0x00,0x00,0x00, -0x16,0x00,0x00,0x00,0x41,0x00,0x06,0x00,0x58,0x00,0x00,0x00, -0x5d,0x00,0x00,0x00,0x55,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0x5c,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x4e,0x00,0x00,0x00, -0x5e,0x00,0x00,0x00,0x5d,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x66,0x00,0x00,0x00,0x4d,0x00,0x00,0x00, -0x48,0x00,0x00,0x00,0x73,0x00,0x04,0x00,0x1c,0x00,0x00,0x00, -0x6b,0x00,0x00,0x00,0x5a,0x00,0x00,0x00,0x41,0x00,0x06,0x00, -0x6c,0x00,0x00,0x00,0x6d,0x00,0x00,0x00,0x63,0x00,0x00,0x00, -0x2e,0x00,0x00,0x00,0x66,0x00,0x00,0x00,0x3e,0x00,0x03,0x00, -0x6d,0x00,0x00,0x00,0x6b,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x71,0x00,0x00,0x00,0x66,0x00,0x00,0x00, -0x16,0x00,0x00,0x00,0x73,0x00,0x04,0x00,0x1c,0x00,0x00,0x00, -0x74,0x00,0x00,0x00,0x5e,0x00,0x00,0x00,0x41,0x00,0x06,0x00, -0x6c,0x00,0x00,0x00,0x75,0x00,0x00,0x00,0x63,0x00,0x00,0x00, -0x2e,0x00,0x00,0x00,0x71,0x00,0x00,0x00,0x3e,0x00,0x03,0x00, -0x75,0x00,0x00,0x00,0x74,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, -0x78,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0x78,0x00,0x00,0x00, -0xfd,0x00,0x01,0x00,0x38,0x00,0x01,0x00, -}; -const uint64_t get_rows_f16_f32_len = 1940; - -unsigned char get_rows_f16_f32_fp32_data[] = { -0x03,0x02,0x23,0x07,0x00,0x05,0x01,0x00,0x0b,0x00,0x0d,0x00, -0x7a,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x11,0x00,0x02,0x00, -0x01,0x00,0x00,0x00,0x11,0x00,0x02,0x00,0x51,0x11,0x00,0x00, -0x0b,0x00,0x06,0x00,0x01,0x00,0x00,0x00,0x47,0x4c,0x53,0x4c, -0x2e,0x73,0x74,0x64,0x2e,0x34,0x35,0x30,0x00,0x00,0x00,0x00, -0x0e,0x00,0x03,0x00,0x00,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x0f,0x00,0x0a,0x00,0x05,0x00,0x00,0x00,0x04,0x00,0x00,0x00, -0x6d,0x61,0x69,0x6e,0x00,0x00,0x00,0x00,0x0b,0x00,0x00,0x00, -0x1f,0x00,0x00,0x00,0x2d,0x00,0x00,0x00,0x55,0x00,0x00,0x00, -0x65,0x00,0x00,0x00,0x10,0x00,0x06,0x00,0x04,0x00,0x00,0x00, -0x11,0x00,0x00,0x00,0x00,0x02,0x00,0x00,0x01,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x0b,0x00,0x00,0x00, -0x0b,0x00,0x00,0x00,0x1c,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0x1d,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x1d,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x04,0x00,0x00,0x00, -0x48,0x00,0x05,0x00,0x1d,0x00,0x00,0x00,0x02,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x08,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0x1d,0x00,0x00,0x00,0x03,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x47,0x00,0x03,0x00,0x1d,0x00,0x00,0x00, -0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x2a,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x48,0x00,0x04,0x00, -0x2b,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x18,0x00,0x00,0x00, -0x48,0x00,0x05,0x00,0x2b,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x03,0x00, -0x2b,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x2d,0x00,0x00,0x00,0x22,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x2d,0x00,0x00,0x00,0x21,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x52,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x48,0x00,0x04,0x00, -0x53,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x18,0x00,0x00,0x00, -0x48,0x00,0x05,0x00,0x53,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x03,0x00, -0x53,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x55,0x00,0x00,0x00,0x22,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x55,0x00,0x00,0x00,0x21,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x62,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x48,0x00,0x04,0x00, -0x63,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x19,0x00,0x00,0x00, -0x48,0x00,0x05,0x00,0x63,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x03,0x00, -0x63,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x65,0x00,0x00,0x00,0x22,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x65,0x00,0x00,0x00,0x21,0x00,0x00,0x00, -0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x77,0x00,0x00,0x00, -0x0b,0x00,0x00,0x00,0x19,0x00,0x00,0x00,0x13,0x00,0x02,0x00, -0x02,0x00,0x00,0x00,0x21,0x00,0x03,0x00,0x03,0x00,0x00,0x00, -0x02,0x00,0x00,0x00,0x15,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x20,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x17,0x00,0x04,0x00, -0x09,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x03,0x00,0x00,0x00, -0x20,0x00,0x04,0x00,0x0a,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x09,0x00,0x00,0x00,0x3b,0x00,0x04,0x00,0x0a,0x00,0x00,0x00, -0x0b,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x20,0x00,0x04,0x00,0x0d,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x15,0x00,0x04,0x00,0x10,0x00,0x00,0x00, -0x20,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x10,0x00,0x00,0x00,0x12,0x00,0x00,0x00,0x02,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x16,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x16,0x00,0x03,0x00,0x1c,0x00,0x00,0x00, -0x20,0x00,0x00,0x00,0x1e,0x00,0x06,0x00,0x1d,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x1c,0x00,0x00,0x00, -0x1c,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x1e,0x00,0x00,0x00, -0x09,0x00,0x00,0x00,0x1d,0x00,0x00,0x00,0x3b,0x00,0x04,0x00, -0x1e,0x00,0x00,0x00,0x1f,0x00,0x00,0x00,0x09,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x10,0x00,0x00,0x00,0x20,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x21,0x00,0x00,0x00, -0x09,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x14,0x00,0x02,0x00, -0x24,0x00,0x00,0x00,0x1d,0x00,0x03,0x00,0x2a,0x00,0x00,0x00, -0x10,0x00,0x00,0x00,0x1e,0x00,0x03,0x00,0x2b,0x00,0x00,0x00, -0x2a,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x2c,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x2b,0x00,0x00,0x00,0x3b,0x00,0x04,0x00, -0x2c,0x00,0x00,0x00,0x2d,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x10,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x30,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x10,0x00,0x00,0x00,0x16,0x00,0x03,0x00, -0x51,0x00,0x00,0x00,0x10,0x00,0x00,0x00,0x1d,0x00,0x03,0x00, -0x52,0x00,0x00,0x00,0x51,0x00,0x00,0x00,0x1e,0x00,0x03,0x00, -0x53,0x00,0x00,0x00,0x52,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0x54,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x53,0x00,0x00,0x00, -0x3b,0x00,0x04,0x00,0x54,0x00,0x00,0x00,0x55,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x58,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x51,0x00,0x00,0x00,0x1d,0x00,0x03,0x00, -0x62,0x00,0x00,0x00,0x1c,0x00,0x00,0x00,0x1e,0x00,0x03,0x00, -0x63,0x00,0x00,0x00,0x62,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0x64,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x63,0x00,0x00,0x00, -0x3b,0x00,0x04,0x00,0x64,0x00,0x00,0x00,0x65,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x6d,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x1c,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x76,0x00,0x00,0x00,0x00,0x02,0x00,0x00, -0x2c,0x00,0x06,0x00,0x09,0x00,0x00,0x00,0x77,0x00,0x00,0x00, -0x76,0x00,0x00,0x00,0x16,0x00,0x00,0x00,0x16,0x00,0x00,0x00, -0x36,0x00,0x05,0x00,0x02,0x00,0x00,0x00,0x04,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x03,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, -0x05,0x00,0x00,0x00,0xf7,0x00,0x03,0x00,0x78,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0xfb,0x00,0x03,0x00,0x0c,0x00,0x00,0x00, -0x79,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0x79,0x00,0x00,0x00, -0x41,0x00,0x05,0x00,0x0d,0x00,0x00,0x00,0x0e,0x00,0x00,0x00, -0x0b,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x0f,0x00,0x00,0x00,0x0e,0x00,0x00,0x00, -0x7c,0x00,0x04,0x00,0x10,0x00,0x00,0x00,0x11,0x00,0x00,0x00, -0x0f,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x10,0x00,0x00,0x00, -0x13,0x00,0x00,0x00,0x11,0x00,0x00,0x00,0x12,0x00,0x00,0x00, -0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x14,0x00,0x00,0x00, -0x13,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x0d,0x00,0x00,0x00, -0x17,0x00,0x00,0x00,0x0b,0x00,0x00,0x00,0x16,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x18,0x00,0x00,0x00, -0x17,0x00,0x00,0x00,0x7c,0x00,0x04,0x00,0x10,0x00,0x00,0x00, -0x19,0x00,0x00,0x00,0x18,0x00,0x00,0x00,0x7c,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x1a,0x00,0x00,0x00,0x19,0x00,0x00,0x00, -0x41,0x00,0x05,0x00,0x21,0x00,0x00,0x00,0x22,0x00,0x00,0x00, -0x1f,0x00,0x00,0x00,0x20,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x22,0x00,0x00,0x00, -0xae,0x00,0x05,0x00,0x24,0x00,0x00,0x00,0x25,0x00,0x00,0x00, -0x14,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0xf7,0x00,0x03,0x00, -0x27,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, -0x25,0x00,0x00,0x00,0x26,0x00,0x00,0x00,0x27,0x00,0x00,0x00, -0xf8,0x00,0x02,0x00,0x26,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, -0x78,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0x27,0x00,0x00,0x00, -0x41,0x00,0x06,0x00,0x30,0x00,0x00,0x00,0x31,0x00,0x00,0x00, -0x2d,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0x1a,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x10,0x00,0x00,0x00,0x32,0x00,0x00,0x00, -0x31,0x00,0x00,0x00,0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x33,0x00,0x00,0x00,0x32,0x00,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x38,0x00,0x00,0x00,0x33,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x3a,0x00,0x00,0x00,0x38,0x00,0x00,0x00,0x14,0x00,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x3f,0x00,0x00,0x00, -0x1a,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x41,0x00,0x00,0x00,0x3f,0x00,0x00,0x00, -0x14,0x00,0x00,0x00,0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x44,0x00,0x00,0x00,0x3a,0x00,0x00,0x00,0x16,0x00,0x00,0x00, -0x89,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x47,0x00,0x00,0x00, -0x3a,0x00,0x00,0x00,0x16,0x00,0x00,0x00,0x86,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x48,0x00,0x00,0x00,0x47,0x00,0x00,0x00, -0x16,0x00,0x00,0x00,0x89,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x4c,0x00,0x00,0x00,0x41,0x00,0x00,0x00,0x16,0x00,0x00,0x00, -0x82,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x4d,0x00,0x00,0x00, -0x41,0x00,0x00,0x00,0x4c,0x00,0x00,0x00,0x41,0x00,0x06,0x00, -0x58,0x00,0x00,0x00,0x59,0x00,0x00,0x00,0x55,0x00,0x00,0x00, -0x2e,0x00,0x00,0x00,0x44,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x51,0x00,0x00,0x00,0x5a,0x00,0x00,0x00,0x59,0x00,0x00,0x00, -0x73,0x00,0x04,0x00,0x1c,0x00,0x00,0x00,0x5b,0x00,0x00,0x00, -0x5a,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x5d,0x00,0x00,0x00,0x44,0x00,0x00,0x00,0x16,0x00,0x00,0x00, -0x41,0x00,0x06,0x00,0x58,0x00,0x00,0x00,0x5e,0x00,0x00,0x00, -0x55,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0x5d,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x51,0x00,0x00,0x00,0x5f,0x00,0x00,0x00, -0x5e,0x00,0x00,0x00,0x73,0x00,0x04,0x00,0x1c,0x00,0x00,0x00, -0x60,0x00,0x00,0x00,0x5f,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x68,0x00,0x00,0x00,0x4d,0x00,0x00,0x00, -0x48,0x00,0x00,0x00,0x41,0x00,0x06,0x00,0x6d,0x00,0x00,0x00, -0x6e,0x00,0x00,0x00,0x65,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0x68,0x00,0x00,0x00,0x3e,0x00,0x03,0x00,0x6e,0x00,0x00,0x00, -0x5b,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x72,0x00,0x00,0x00,0x68,0x00,0x00,0x00,0x16,0x00,0x00,0x00, -0x41,0x00,0x06,0x00,0x6d,0x00,0x00,0x00,0x75,0x00,0x00,0x00, -0x65,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0x72,0x00,0x00,0x00, -0x3e,0x00,0x03,0x00,0x75,0x00,0x00,0x00,0x60,0x00,0x00,0x00, -0xf9,0x00,0x02,0x00,0x78,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, -0x78,0x00,0x00,0x00,0xfd,0x00,0x01,0x00,0x38,0x00,0x01,0x00, - -}; -const uint64_t get_rows_f16_f32_fp32_len = 1932; - -unsigned char get_rows_f16_fp32_data[] = { -0x03,0x02,0x23,0x07,0x00,0x05,0x01,0x00,0x0b,0x00,0x0d,0x00, 0x7b,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x11,0x00,0x02,0x00, 0x01,0x00,0x00,0x00,0x11,0x00,0x02,0x00,0x51,0x11,0x00,0x00, 0x0b,0x00,0x06,0x00,0x01,0x00,0x00,0x00,0x47,0x4c,0x53,0x4c, @@ -15455,21 +8321,19 @@ unsigned char get_rows_f16_fp32_data[] = { 0xf8,0x00,0x02,0x00,0x79,0x00,0x00,0x00,0xfd,0x00,0x01,0x00, 0x38,0x00,0x01,0x00, }; -const uint64_t get_rows_f16_fp32_len = 1948; +const uint64_t get_rows_f16_len = 1948; -unsigned char get_rows_q4_0_data[] = { +unsigned char get_rows_f16_f32_data[] = { 0x03,0x02,0x23,0x07,0x00,0x05,0x01,0x00,0x0b,0x00,0x0d,0x00, -0x97,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x11,0x00,0x02,0x00, -0x01,0x00,0x00,0x00,0x11,0x00,0x02,0x00,0x09,0x00,0x00,0x00, -0x11,0x00,0x02,0x00,0x27,0x00,0x00,0x00,0x11,0x00,0x02,0x00, -0x51,0x11,0x00,0x00,0x11,0x00,0x02,0x00,0x60,0x11,0x00,0x00, +0x7a,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x11,0x00,0x02,0x00, +0x01,0x00,0x00,0x00,0x11,0x00,0x02,0x00,0x51,0x11,0x00,0x00, 0x0b,0x00,0x06,0x00,0x01,0x00,0x00,0x00,0x47,0x4c,0x53,0x4c, 0x2e,0x73,0x74,0x64,0x2e,0x34,0x35,0x30,0x00,0x00,0x00,0x00, 0x0e,0x00,0x03,0x00,0x00,0x00,0x00,0x00,0x01,0x00,0x00,0x00, 0x0f,0x00,0x0a,0x00,0x05,0x00,0x00,0x00,0x04,0x00,0x00,0x00, 0x6d,0x61,0x69,0x6e,0x00,0x00,0x00,0x00,0x0b,0x00,0x00,0x00, -0x1f,0x00,0x00,0x00,0x2d,0x00,0x00,0x00,0x5a,0x00,0x00,0x00, -0x7d,0x00,0x00,0x00,0x10,0x00,0x06,0x00,0x04,0x00,0x00,0x00, +0x1f,0x00,0x00,0x00,0x2d,0x00,0x00,0x00,0x55,0x00,0x00,0x00, +0x65,0x00,0x00,0x00,0x10,0x00,0x06,0x00,0x04,0x00,0x00,0x00, 0x11,0x00,0x00,0x00,0x00,0x02,0x00,0x00,0x01,0x00,0x00,0x00, 0x01,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x0b,0x00,0x00,0x00, 0x0b,0x00,0x00,0x00,0x1c,0x00,0x00,0x00,0x48,0x00,0x05,0x00, @@ -15488,297 +8352,75 @@ unsigned char get_rows_q4_0_data[] = { 0x2b,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00, 0x2d,0x00,0x00,0x00,0x22,0x00,0x00,0x00,0x00,0x00,0x00,0x00, 0x47,0x00,0x04,0x00,0x2d,0x00,0x00,0x00,0x21,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x55,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0x56,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x56,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x02,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x57,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x12,0x00,0x00,0x00,0x48,0x00,0x04,0x00,0x58,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x18,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0x58,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x47,0x00,0x03,0x00,0x58,0x00,0x00,0x00, -0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x5a,0x00,0x00,0x00, -0x22,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x5a,0x00,0x00,0x00,0x21,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x7a,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x02,0x00,0x00,0x00,0x48,0x00,0x04,0x00,0x7b,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x19,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0x7b,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x47,0x00,0x03,0x00,0x7b,0x00,0x00,0x00, -0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x7d,0x00,0x00,0x00, -0x22,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x7d,0x00,0x00,0x00,0x21,0x00,0x00,0x00,0x02,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x8d,0x00,0x00,0x00,0x0b,0x00,0x00,0x00, -0x19,0x00,0x00,0x00,0x13,0x00,0x02,0x00,0x02,0x00,0x00,0x00, -0x21,0x00,0x03,0x00,0x03,0x00,0x00,0x00,0x02,0x00,0x00,0x00, -0x15,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x20,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x17,0x00,0x04,0x00,0x09,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x03,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0x0a,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x09,0x00,0x00,0x00, -0x3b,0x00,0x04,0x00,0x0a,0x00,0x00,0x00,0x0b,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0x0d,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x15,0x00,0x04,0x00,0x10,0x00,0x00,0x00,0x20,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x10,0x00,0x00,0x00, -0x12,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x16,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x16,0x00,0x03,0x00,0x1c,0x00,0x00,0x00,0x20,0x00,0x00,0x00, -0x1e,0x00,0x06,0x00,0x1d,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x1c,0x00,0x00,0x00,0x1c,0x00,0x00,0x00, -0x20,0x00,0x04,0x00,0x1e,0x00,0x00,0x00,0x09,0x00,0x00,0x00, -0x1d,0x00,0x00,0x00,0x3b,0x00,0x04,0x00,0x1e,0x00,0x00,0x00, -0x1f,0x00,0x00,0x00,0x09,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x10,0x00,0x00,0x00,0x20,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x20,0x00,0x04,0x00,0x21,0x00,0x00,0x00,0x09,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x14,0x00,0x02,0x00,0x24,0x00,0x00,0x00, -0x1d,0x00,0x03,0x00,0x2a,0x00,0x00,0x00,0x10,0x00,0x00,0x00, -0x1e,0x00,0x03,0x00,0x2b,0x00,0x00,0x00,0x2a,0x00,0x00,0x00, -0x20,0x00,0x04,0x00,0x2c,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0x2b,0x00,0x00,0x00,0x3b,0x00,0x04,0x00,0x2c,0x00,0x00,0x00, -0x2d,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x10,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x20,0x00,0x04,0x00,0x30,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0x10,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x44,0x00,0x00,0x00,0x20,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x49,0x00,0x00,0x00,0x02,0x00,0x00,0x00, -0x16,0x00,0x03,0x00,0x50,0x00,0x00,0x00,0x10,0x00,0x00,0x00, -0x15,0x00,0x04,0x00,0x53,0x00,0x00,0x00,0x08,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x54,0x00,0x00,0x00,0x10,0x00,0x00,0x00,0x1c,0x00,0x04,0x00, -0x55,0x00,0x00,0x00,0x53,0x00,0x00,0x00,0x54,0x00,0x00,0x00, -0x1e,0x00,0x04,0x00,0x56,0x00,0x00,0x00,0x50,0x00,0x00,0x00, -0x55,0x00,0x00,0x00,0x1d,0x00,0x03,0x00,0x57,0x00,0x00,0x00, -0x56,0x00,0x00,0x00,0x1e,0x00,0x03,0x00,0x58,0x00,0x00,0x00, -0x57,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x59,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x58,0x00,0x00,0x00,0x3b,0x00,0x04,0x00, -0x59,0x00,0x00,0x00,0x5a,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0x20,0x00,0x04,0x00,0x5c,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0x50,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x63,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x53,0x00,0x00,0x00,0x17,0x00,0x04,0x00, -0x66,0x00,0x00,0x00,0x50,0x00,0x00,0x00,0x02,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x10,0x00,0x00,0x00,0x6c,0x00,0x00,0x00, -0x0f,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x10,0x00,0x00,0x00, -0x70,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x50,0x00,0x00,0x00,0x75,0x00,0x00,0x00,0x00,0x48,0x00,0x00, -0x1d,0x00,0x03,0x00,0x7a,0x00,0x00,0x00,0x50,0x00,0x00,0x00, -0x1e,0x00,0x03,0x00,0x7b,0x00,0x00,0x00,0x7a,0x00,0x00,0x00, -0x20,0x00,0x04,0x00,0x7c,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0x7b,0x00,0x00,0x00,0x3b,0x00,0x04,0x00,0x7c,0x00,0x00,0x00, -0x7d,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x8c,0x00,0x00,0x00,0x00,0x02,0x00,0x00, -0x2c,0x00,0x06,0x00,0x09,0x00,0x00,0x00,0x8d,0x00,0x00,0x00, -0x8c,0x00,0x00,0x00,0x16,0x00,0x00,0x00,0x16,0x00,0x00,0x00, -0x2c,0x00,0x05,0x00,0x66,0x00,0x00,0x00,0x96,0x00,0x00,0x00, -0x75,0x00,0x00,0x00,0x75,0x00,0x00,0x00,0x36,0x00,0x05,0x00, -0x02,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x03,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0x05,0x00,0x00,0x00, -0xf7,0x00,0x03,0x00,0x8e,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0xfb,0x00,0x03,0x00,0x0c,0x00,0x00,0x00,0x8f,0x00,0x00,0x00, -0xf8,0x00,0x02,0x00,0x8f,0x00,0x00,0x00,0x41,0x00,0x05,0x00, -0x0d,0x00,0x00,0x00,0x0e,0x00,0x00,0x00,0x0b,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x0f,0x00,0x00,0x00,0x0e,0x00,0x00,0x00,0x7c,0x00,0x04,0x00, -0x10,0x00,0x00,0x00,0x11,0x00,0x00,0x00,0x0f,0x00,0x00,0x00, -0x84,0x00,0x05,0x00,0x10,0x00,0x00,0x00,0x13,0x00,0x00,0x00, -0x11,0x00,0x00,0x00,0x12,0x00,0x00,0x00,0x7c,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x14,0x00,0x00,0x00,0x13,0x00,0x00,0x00, -0x41,0x00,0x05,0x00,0x0d,0x00,0x00,0x00,0x17,0x00,0x00,0x00, -0x0b,0x00,0x00,0x00,0x16,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x18,0x00,0x00,0x00,0x17,0x00,0x00,0x00, -0x7c,0x00,0x04,0x00,0x10,0x00,0x00,0x00,0x19,0x00,0x00,0x00, -0x18,0x00,0x00,0x00,0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x1a,0x00,0x00,0x00,0x19,0x00,0x00,0x00,0x41,0x00,0x05,0x00, -0x21,0x00,0x00,0x00,0x22,0x00,0x00,0x00,0x1f,0x00,0x00,0x00, -0x20,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x22,0x00,0x00,0x00,0xae,0x00,0x05,0x00, -0x24,0x00,0x00,0x00,0x25,0x00,0x00,0x00,0x14,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0xf7,0x00,0x03,0x00,0x27,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0x25,0x00,0x00,0x00, -0x26,0x00,0x00,0x00,0x27,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, -0x26,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0x8e,0x00,0x00,0x00, -0xf8,0x00,0x02,0x00,0x27,0x00,0x00,0x00,0x41,0x00,0x06,0x00, -0x30,0x00,0x00,0x00,0x31,0x00,0x00,0x00,0x2d,0x00,0x00,0x00, -0x2e,0x00,0x00,0x00,0x1a,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x10,0x00,0x00,0x00,0x32,0x00,0x00,0x00,0x31,0x00,0x00,0x00, -0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x33,0x00,0x00,0x00, -0x32,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x38,0x00,0x00,0x00,0x33,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x3a,0x00,0x00,0x00, -0x38,0x00,0x00,0x00,0x14,0x00,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x3f,0x00,0x00,0x00,0x1a,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x41,0x00,0x00,0x00,0x3f,0x00,0x00,0x00,0x14,0x00,0x00,0x00, -0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x45,0x00,0x00,0x00, -0x3a,0x00,0x00,0x00,0x44,0x00,0x00,0x00,0x89,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x48,0x00,0x00,0x00,0x3a,0x00,0x00,0x00, -0x44,0x00,0x00,0x00,0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x4a,0x00,0x00,0x00,0x48,0x00,0x00,0x00,0x49,0x00,0x00,0x00, -0x89,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x4e,0x00,0x00,0x00, -0x41,0x00,0x00,0x00,0x44,0x00,0x00,0x00,0x82,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x4f,0x00,0x00,0x00,0x41,0x00,0x00,0x00, -0x4e,0x00,0x00,0x00,0x41,0x00,0x07,0x00,0x5c,0x00,0x00,0x00, -0x5d,0x00,0x00,0x00,0x5a,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0x45,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x50,0x00,0x00,0x00,0x5e,0x00,0x00,0x00,0x5d,0x00,0x00,0x00, -0x41,0x00,0x08,0x00,0x63,0x00,0x00,0x00,0x64,0x00,0x00,0x00, -0x5a,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0x45,0x00,0x00,0x00, -0x20,0x00,0x00,0x00,0x4a,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x53,0x00,0x00,0x00,0x65,0x00,0x00,0x00,0x64,0x00,0x00,0x00, -0x71,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x6a,0x00,0x00,0x00, -0x65,0x00,0x00,0x00,0x7c,0x00,0x04,0x00,0x10,0x00,0x00,0x00, -0x6b,0x00,0x00,0x00,0x6a,0x00,0x00,0x00,0xc7,0x00,0x05,0x00, -0x10,0x00,0x00,0x00,0x6d,0x00,0x00,0x00,0x6b,0x00,0x00,0x00, -0x6c,0x00,0x00,0x00,0x6f,0x00,0x04,0x00,0x50,0x00,0x00,0x00, -0x6e,0x00,0x00,0x00,0x6d,0x00,0x00,0x00,0xc2,0x00,0x05,0x00, -0x53,0x00,0x00,0x00,0x71,0x00,0x00,0x00,0x65,0x00,0x00,0x00, -0x70,0x00,0x00,0x00,0x70,0x00,0x04,0x00,0x50,0x00,0x00,0x00, -0x72,0x00,0x00,0x00,0x71,0x00,0x00,0x00,0x50,0x00,0x05,0x00, -0x66,0x00,0x00,0x00,0x73,0x00,0x00,0x00,0x6e,0x00,0x00,0x00, -0x72,0x00,0x00,0x00,0x83,0x00,0x05,0x00,0x66,0x00,0x00,0x00, -0x77,0x00,0x00,0x00,0x73,0x00,0x00,0x00,0x96,0x00,0x00,0x00, -0x8e,0x00,0x05,0x00,0x66,0x00,0x00,0x00,0x79,0x00,0x00,0x00, -0x77,0x00,0x00,0x00,0x5e,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x80,0x00,0x00,0x00,0x4f,0x00,0x00,0x00, -0x4a,0x00,0x00,0x00,0x51,0x00,0x05,0x00,0x50,0x00,0x00,0x00, -0x83,0x00,0x00,0x00,0x79,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x41,0x00,0x06,0x00,0x5c,0x00,0x00,0x00,0x84,0x00,0x00,0x00, -0x7d,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0x80,0x00,0x00,0x00, -0x3e,0x00,0x03,0x00,0x84,0x00,0x00,0x00,0x83,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x88,0x00,0x00,0x00, -0x80,0x00,0x00,0x00,0x54,0x00,0x00,0x00,0x51,0x00,0x05,0x00, -0x50,0x00,0x00,0x00,0x8a,0x00,0x00,0x00,0x79,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x41,0x00,0x06,0x00,0x5c,0x00,0x00,0x00, -0x8b,0x00,0x00,0x00,0x7d,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0x88,0x00,0x00,0x00,0x3e,0x00,0x03,0x00,0x8b,0x00,0x00,0x00, -0x8a,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0x8e,0x00,0x00,0x00, -0xf8,0x00,0x02,0x00,0x8e,0x00,0x00,0x00,0xfd,0x00,0x01,0x00, -0x38,0x00,0x01,0x00, -}; -const uint64_t get_rows_q4_0_len = 2356; - -unsigned char get_rows_q4_0_f32_data[] = { -0x03,0x02,0x23,0x07,0x00,0x05,0x01,0x00,0x0b,0x00,0x0d,0x00, -0x9a,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x11,0x00,0x02,0x00, -0x01,0x00,0x00,0x00,0x11,0x00,0x02,0x00,0x09,0x00,0x00,0x00, -0x11,0x00,0x02,0x00,0x27,0x00,0x00,0x00,0x11,0x00,0x02,0x00, -0x51,0x11,0x00,0x00,0x11,0x00,0x02,0x00,0x60,0x11,0x00,0x00, -0x0b,0x00,0x06,0x00,0x01,0x00,0x00,0x00,0x47,0x4c,0x53,0x4c, -0x2e,0x73,0x74,0x64,0x2e,0x34,0x35,0x30,0x00,0x00,0x00,0x00, -0x0e,0x00,0x03,0x00,0x00,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x0f,0x00,0x0a,0x00,0x05,0x00,0x00,0x00,0x04,0x00,0x00,0x00, -0x6d,0x61,0x69,0x6e,0x00,0x00,0x00,0x00,0x0b,0x00,0x00,0x00, -0x1f,0x00,0x00,0x00,0x2d,0x00,0x00,0x00,0x5a,0x00,0x00,0x00, -0x7d,0x00,0x00,0x00,0x10,0x00,0x06,0x00,0x04,0x00,0x00,0x00, -0x11,0x00,0x00,0x00,0x00,0x02,0x00,0x00,0x01,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x0b,0x00,0x00,0x00, -0x0b,0x00,0x00,0x00,0x1c,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0x1d,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x1d,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x04,0x00,0x00,0x00, -0x48,0x00,0x05,0x00,0x1d,0x00,0x00,0x00,0x02,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x08,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0x1d,0x00,0x00,0x00,0x03,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x47,0x00,0x03,0x00,0x1d,0x00,0x00,0x00, -0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x2a,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x48,0x00,0x04,0x00, -0x2b,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x18,0x00,0x00,0x00, -0x48,0x00,0x05,0x00,0x2b,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x52,0x00,0x00,0x00, +0x06,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x48,0x00,0x04,0x00, +0x53,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x18,0x00,0x00,0x00, +0x48,0x00,0x05,0x00,0x53,0x00,0x00,0x00,0x00,0x00,0x00,0x00, 0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x03,0x00, -0x2b,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x2d,0x00,0x00,0x00,0x22,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x2d,0x00,0x00,0x00,0x21,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x55,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0x56,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x56,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x02,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x57,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x12,0x00,0x00,0x00,0x48,0x00,0x04,0x00,0x58,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x18,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0x58,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x47,0x00,0x03,0x00,0x58,0x00,0x00,0x00, -0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x5a,0x00,0x00,0x00, -0x22,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x5a,0x00,0x00,0x00,0x21,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x7a,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x04,0x00,0x00,0x00,0x48,0x00,0x04,0x00,0x7b,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x19,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0x7b,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x47,0x00,0x03,0x00,0x7b,0x00,0x00,0x00, -0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x7d,0x00,0x00,0x00, -0x22,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x7d,0x00,0x00,0x00,0x21,0x00,0x00,0x00,0x02,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x90,0x00,0x00,0x00,0x0b,0x00,0x00,0x00, -0x19,0x00,0x00,0x00,0x13,0x00,0x02,0x00,0x02,0x00,0x00,0x00, -0x21,0x00,0x03,0x00,0x03,0x00,0x00,0x00,0x02,0x00,0x00,0x00, -0x15,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x20,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x17,0x00,0x04,0x00,0x09,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x03,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0x0a,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x09,0x00,0x00,0x00, -0x3b,0x00,0x04,0x00,0x0a,0x00,0x00,0x00,0x0b,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0x0d,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x15,0x00,0x04,0x00,0x10,0x00,0x00,0x00,0x20,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x10,0x00,0x00,0x00, -0x12,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x16,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x16,0x00,0x03,0x00,0x1c,0x00,0x00,0x00,0x20,0x00,0x00,0x00, -0x1e,0x00,0x06,0x00,0x1d,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x1c,0x00,0x00,0x00,0x1c,0x00,0x00,0x00, -0x20,0x00,0x04,0x00,0x1e,0x00,0x00,0x00,0x09,0x00,0x00,0x00, -0x1d,0x00,0x00,0x00,0x3b,0x00,0x04,0x00,0x1e,0x00,0x00,0x00, -0x1f,0x00,0x00,0x00,0x09,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x10,0x00,0x00,0x00,0x20,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x20,0x00,0x04,0x00,0x21,0x00,0x00,0x00,0x09,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x14,0x00,0x02,0x00,0x24,0x00,0x00,0x00, -0x1d,0x00,0x03,0x00,0x2a,0x00,0x00,0x00,0x10,0x00,0x00,0x00, -0x1e,0x00,0x03,0x00,0x2b,0x00,0x00,0x00,0x2a,0x00,0x00,0x00, -0x20,0x00,0x04,0x00,0x2c,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0x2b,0x00,0x00,0x00,0x3b,0x00,0x04,0x00,0x2c,0x00,0x00,0x00, -0x2d,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x10,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x20,0x00,0x04,0x00,0x30,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0x10,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x44,0x00,0x00,0x00,0x20,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x49,0x00,0x00,0x00,0x02,0x00,0x00,0x00, -0x16,0x00,0x03,0x00,0x50,0x00,0x00,0x00,0x10,0x00,0x00,0x00, -0x15,0x00,0x04,0x00,0x53,0x00,0x00,0x00,0x08,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x54,0x00,0x00,0x00,0x10,0x00,0x00,0x00,0x1c,0x00,0x04,0x00, -0x55,0x00,0x00,0x00,0x53,0x00,0x00,0x00,0x54,0x00,0x00,0x00, -0x1e,0x00,0x04,0x00,0x56,0x00,0x00,0x00,0x50,0x00,0x00,0x00, -0x55,0x00,0x00,0x00,0x1d,0x00,0x03,0x00,0x57,0x00,0x00,0x00, -0x56,0x00,0x00,0x00,0x1e,0x00,0x03,0x00,0x58,0x00,0x00,0x00, -0x57,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x59,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x58,0x00,0x00,0x00,0x3b,0x00,0x04,0x00, -0x59,0x00,0x00,0x00,0x5a,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0x20,0x00,0x04,0x00,0x5c,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0x50,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x63,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x53,0x00,0x00,0x00,0x17,0x00,0x04,0x00, -0x66,0x00,0x00,0x00,0x50,0x00,0x00,0x00,0x02,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x10,0x00,0x00,0x00,0x6c,0x00,0x00,0x00, -0x0f,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x10,0x00,0x00,0x00, -0x70,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x50,0x00,0x00,0x00,0x75,0x00,0x00,0x00,0x00,0x48,0x00,0x00, -0x1d,0x00,0x03,0x00,0x7a,0x00,0x00,0x00,0x1c,0x00,0x00,0x00, -0x1e,0x00,0x03,0x00,0x7b,0x00,0x00,0x00,0x7a,0x00,0x00,0x00, -0x20,0x00,0x04,0x00,0x7c,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0x7b,0x00,0x00,0x00,0x3b,0x00,0x04,0x00,0x7c,0x00,0x00,0x00, -0x7d,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0x85,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x1c,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x8f,0x00,0x00,0x00, -0x00,0x02,0x00,0x00,0x2c,0x00,0x06,0x00,0x09,0x00,0x00,0x00, -0x90,0x00,0x00,0x00,0x8f,0x00,0x00,0x00,0x16,0x00,0x00,0x00, -0x16,0x00,0x00,0x00,0x2c,0x00,0x05,0x00,0x66,0x00,0x00,0x00, -0x99,0x00,0x00,0x00,0x75,0x00,0x00,0x00,0x75,0x00,0x00,0x00, +0x53,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00, +0x55,0x00,0x00,0x00,0x22,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0x47,0x00,0x04,0x00,0x55,0x00,0x00,0x00,0x21,0x00,0x00,0x00, +0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x62,0x00,0x00,0x00, +0x06,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x48,0x00,0x04,0x00, +0x63,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x19,0x00,0x00,0x00, +0x48,0x00,0x05,0x00,0x63,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x03,0x00, +0x63,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00, +0x65,0x00,0x00,0x00,0x22,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0x47,0x00,0x04,0x00,0x65,0x00,0x00,0x00,0x21,0x00,0x00,0x00, +0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x77,0x00,0x00,0x00, +0x0b,0x00,0x00,0x00,0x19,0x00,0x00,0x00,0x13,0x00,0x02,0x00, +0x02,0x00,0x00,0x00,0x21,0x00,0x03,0x00,0x03,0x00,0x00,0x00, +0x02,0x00,0x00,0x00,0x15,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x20,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x17,0x00,0x04,0x00, +0x09,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x03,0x00,0x00,0x00, +0x20,0x00,0x04,0x00,0x0a,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0x09,0x00,0x00,0x00,0x3b,0x00,0x04,0x00,0x0a,0x00,0x00,0x00, +0x0b,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0x20,0x00,0x04,0x00,0x0d,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0x06,0x00,0x00,0x00,0x15,0x00,0x04,0x00,0x10,0x00,0x00,0x00, +0x20,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x10,0x00,0x00,0x00,0x12,0x00,0x00,0x00,0x02,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x16,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0x16,0x00,0x03,0x00,0x1c,0x00,0x00,0x00, +0x20,0x00,0x00,0x00,0x1e,0x00,0x06,0x00,0x1d,0x00,0x00,0x00, +0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x1c,0x00,0x00,0x00, +0x1c,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x1e,0x00,0x00,0x00, +0x09,0x00,0x00,0x00,0x1d,0x00,0x00,0x00,0x3b,0x00,0x04,0x00, +0x1e,0x00,0x00,0x00,0x1f,0x00,0x00,0x00,0x09,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x10,0x00,0x00,0x00,0x20,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x21,0x00,0x00,0x00, +0x09,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x14,0x00,0x02,0x00, +0x24,0x00,0x00,0x00,0x1d,0x00,0x03,0x00,0x2a,0x00,0x00,0x00, +0x10,0x00,0x00,0x00,0x1e,0x00,0x03,0x00,0x2b,0x00,0x00,0x00, +0x2a,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x2c,0x00,0x00,0x00, +0x0c,0x00,0x00,0x00,0x2b,0x00,0x00,0x00,0x3b,0x00,0x04,0x00, +0x2c,0x00,0x00,0x00,0x2d,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x10,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, +0x00,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x30,0x00,0x00,0x00, +0x0c,0x00,0x00,0x00,0x10,0x00,0x00,0x00,0x16,0x00,0x03,0x00, +0x51,0x00,0x00,0x00,0x10,0x00,0x00,0x00,0x1d,0x00,0x03,0x00, +0x52,0x00,0x00,0x00,0x51,0x00,0x00,0x00,0x1e,0x00,0x03,0x00, +0x53,0x00,0x00,0x00,0x52,0x00,0x00,0x00,0x20,0x00,0x04,0x00, +0x54,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x53,0x00,0x00,0x00, +0x3b,0x00,0x04,0x00,0x54,0x00,0x00,0x00,0x55,0x00,0x00,0x00, +0x0c,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x58,0x00,0x00,0x00, +0x0c,0x00,0x00,0x00,0x51,0x00,0x00,0x00,0x1d,0x00,0x03,0x00, +0x62,0x00,0x00,0x00,0x1c,0x00,0x00,0x00,0x1e,0x00,0x03,0x00, +0x63,0x00,0x00,0x00,0x62,0x00,0x00,0x00,0x20,0x00,0x04,0x00, +0x64,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x63,0x00,0x00,0x00, +0x3b,0x00,0x04,0x00,0x64,0x00,0x00,0x00,0x65,0x00,0x00,0x00, +0x0c,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x6d,0x00,0x00,0x00, +0x0c,0x00,0x00,0x00,0x1c,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x76,0x00,0x00,0x00,0x00,0x02,0x00,0x00, +0x2c,0x00,0x06,0x00,0x09,0x00,0x00,0x00,0x77,0x00,0x00,0x00, +0x76,0x00,0x00,0x00,0x16,0x00,0x00,0x00,0x16,0x00,0x00,0x00, 0x36,0x00,0x05,0x00,0x02,0x00,0x00,0x00,0x04,0x00,0x00,0x00, 0x00,0x00,0x00,0x00,0x03,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, -0x05,0x00,0x00,0x00,0xf7,0x00,0x03,0x00,0x91,0x00,0x00,0x00, +0x05,0x00,0x00,0x00,0xf7,0x00,0x03,0x00,0x78,0x00,0x00,0x00, 0x00,0x00,0x00,0x00,0xfb,0x00,0x03,0x00,0x0c,0x00,0x00,0x00, -0x92,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0x92,0x00,0x00,0x00, +0x79,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0x79,0x00,0x00,0x00, 0x41,0x00,0x05,0x00,0x0d,0x00,0x00,0x00,0x0e,0x00,0x00,0x00, 0x0b,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, 0x06,0x00,0x00,0x00,0x0f,0x00,0x00,0x00,0x0e,0x00,0x00,0x00, @@ -15800,7 +8442,7 @@ unsigned char get_rows_q4_0_f32_data[] = { 0x27,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, 0x25,0x00,0x00,0x00,0x26,0x00,0x00,0x00,0x27,0x00,0x00,0x00, 0xf8,0x00,0x02,0x00,0x26,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, -0x91,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0x27,0x00,0x00,0x00, +0x78,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0x27,0x00,0x00,0x00, 0x41,0x00,0x06,0x00,0x30,0x00,0x00,0x00,0x31,0x00,0x00,0x00, 0x2d,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0x1a,0x00,0x00,0x00, 0x3d,0x00,0x04,0x00,0x10,0x00,0x00,0x00,0x32,0x00,0x00,0x00, @@ -15813,258 +8455,41 @@ unsigned char get_rows_q4_0_f32_data[] = { 0x1a,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x80,0x00,0x05,0x00, 0x06,0x00,0x00,0x00,0x41,0x00,0x00,0x00,0x3f,0x00,0x00,0x00, 0x14,0x00,0x00,0x00,0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x45,0x00,0x00,0x00,0x3a,0x00,0x00,0x00,0x44,0x00,0x00,0x00, -0x89,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x48,0x00,0x00,0x00, -0x3a,0x00,0x00,0x00,0x44,0x00,0x00,0x00,0x86,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x4a,0x00,0x00,0x00,0x48,0x00,0x00,0x00, -0x49,0x00,0x00,0x00,0x89,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x4e,0x00,0x00,0x00,0x41,0x00,0x00,0x00,0x44,0x00,0x00,0x00, -0x82,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x4f,0x00,0x00,0x00, -0x41,0x00,0x00,0x00,0x4e,0x00,0x00,0x00,0x41,0x00,0x07,0x00, -0x5c,0x00,0x00,0x00,0x5d,0x00,0x00,0x00,0x5a,0x00,0x00,0x00, -0x2e,0x00,0x00,0x00,0x45,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x50,0x00,0x00,0x00,0x5e,0x00,0x00,0x00, -0x5d,0x00,0x00,0x00,0x41,0x00,0x08,0x00,0x63,0x00,0x00,0x00, -0x64,0x00,0x00,0x00,0x5a,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0x45,0x00,0x00,0x00,0x20,0x00,0x00,0x00,0x4a,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x53,0x00,0x00,0x00,0x65,0x00,0x00,0x00, -0x64,0x00,0x00,0x00,0x71,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x6a,0x00,0x00,0x00,0x65,0x00,0x00,0x00,0x7c,0x00,0x04,0x00, -0x10,0x00,0x00,0x00,0x6b,0x00,0x00,0x00,0x6a,0x00,0x00,0x00, -0xc7,0x00,0x05,0x00,0x10,0x00,0x00,0x00,0x6d,0x00,0x00,0x00, -0x6b,0x00,0x00,0x00,0x6c,0x00,0x00,0x00,0x6f,0x00,0x04,0x00, -0x50,0x00,0x00,0x00,0x6e,0x00,0x00,0x00,0x6d,0x00,0x00,0x00, -0xc2,0x00,0x05,0x00,0x53,0x00,0x00,0x00,0x71,0x00,0x00,0x00, -0x65,0x00,0x00,0x00,0x70,0x00,0x00,0x00,0x70,0x00,0x04,0x00, -0x50,0x00,0x00,0x00,0x72,0x00,0x00,0x00,0x71,0x00,0x00,0x00, -0x50,0x00,0x05,0x00,0x66,0x00,0x00,0x00,0x73,0x00,0x00,0x00, -0x6e,0x00,0x00,0x00,0x72,0x00,0x00,0x00,0x83,0x00,0x05,0x00, -0x66,0x00,0x00,0x00,0x77,0x00,0x00,0x00,0x73,0x00,0x00,0x00, -0x99,0x00,0x00,0x00,0x8e,0x00,0x05,0x00,0x66,0x00,0x00,0x00, -0x79,0x00,0x00,0x00,0x77,0x00,0x00,0x00,0x5e,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x80,0x00,0x00,0x00, -0x4f,0x00,0x00,0x00,0x4a,0x00,0x00,0x00,0x51,0x00,0x05,0x00, -0x50,0x00,0x00,0x00,0x83,0x00,0x00,0x00,0x79,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x73,0x00,0x04,0x00,0x1c,0x00,0x00,0x00, -0x84,0x00,0x00,0x00,0x83,0x00,0x00,0x00,0x41,0x00,0x06,0x00, -0x85,0x00,0x00,0x00,0x86,0x00,0x00,0x00,0x7d,0x00,0x00,0x00, -0x2e,0x00,0x00,0x00,0x80,0x00,0x00,0x00,0x3e,0x00,0x03,0x00, -0x86,0x00,0x00,0x00,0x84,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x8a,0x00,0x00,0x00,0x80,0x00,0x00,0x00, -0x54,0x00,0x00,0x00,0x51,0x00,0x05,0x00,0x50,0x00,0x00,0x00, -0x8c,0x00,0x00,0x00,0x79,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x73,0x00,0x04,0x00,0x1c,0x00,0x00,0x00,0x8d,0x00,0x00,0x00, -0x8c,0x00,0x00,0x00,0x41,0x00,0x06,0x00,0x85,0x00,0x00,0x00, -0x8e,0x00,0x00,0x00,0x7d,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0x8a,0x00,0x00,0x00,0x3e,0x00,0x03,0x00,0x8e,0x00,0x00,0x00, -0x8d,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0x91,0x00,0x00,0x00, -0xf8,0x00,0x02,0x00,0x91,0x00,0x00,0x00,0xfd,0x00,0x01,0x00, -0x38,0x00,0x01,0x00, -}; -const uint64_t get_rows_q4_0_f32_len = 2404; +0x44,0x00,0x00,0x00,0x3a,0x00,0x00,0x00,0x16,0x00,0x00,0x00, +0x89,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x47,0x00,0x00,0x00, +0x3a,0x00,0x00,0x00,0x16,0x00,0x00,0x00,0x86,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x48,0x00,0x00,0x00,0x47,0x00,0x00,0x00, +0x16,0x00,0x00,0x00,0x89,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x4c,0x00,0x00,0x00,0x41,0x00,0x00,0x00,0x16,0x00,0x00,0x00, +0x82,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x4d,0x00,0x00,0x00, +0x41,0x00,0x00,0x00,0x4c,0x00,0x00,0x00,0x41,0x00,0x06,0x00, +0x58,0x00,0x00,0x00,0x59,0x00,0x00,0x00,0x55,0x00,0x00,0x00, +0x2e,0x00,0x00,0x00,0x44,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0x51,0x00,0x00,0x00,0x5a,0x00,0x00,0x00,0x59,0x00,0x00,0x00, +0x73,0x00,0x04,0x00,0x1c,0x00,0x00,0x00,0x5b,0x00,0x00,0x00, +0x5a,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x5d,0x00,0x00,0x00,0x44,0x00,0x00,0x00,0x16,0x00,0x00,0x00, +0x41,0x00,0x06,0x00,0x58,0x00,0x00,0x00,0x5e,0x00,0x00,0x00, +0x55,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0x5d,0x00,0x00,0x00, +0x3d,0x00,0x04,0x00,0x51,0x00,0x00,0x00,0x5f,0x00,0x00,0x00, +0x5e,0x00,0x00,0x00,0x73,0x00,0x04,0x00,0x1c,0x00,0x00,0x00, +0x60,0x00,0x00,0x00,0x5f,0x00,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x68,0x00,0x00,0x00,0x4d,0x00,0x00,0x00, +0x48,0x00,0x00,0x00,0x41,0x00,0x06,0x00,0x6d,0x00,0x00,0x00, +0x6e,0x00,0x00,0x00,0x65,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, +0x68,0x00,0x00,0x00,0x3e,0x00,0x03,0x00,0x6e,0x00,0x00,0x00, +0x5b,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x72,0x00,0x00,0x00,0x68,0x00,0x00,0x00,0x16,0x00,0x00,0x00, +0x41,0x00,0x06,0x00,0x6d,0x00,0x00,0x00,0x75,0x00,0x00,0x00, +0x65,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0x72,0x00,0x00,0x00, +0x3e,0x00,0x03,0x00,0x75,0x00,0x00,0x00,0x60,0x00,0x00,0x00, +0xf9,0x00,0x02,0x00,0x78,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, +0x78,0x00,0x00,0x00,0xfd,0x00,0x01,0x00,0x38,0x00,0x01,0x00, -unsigned char get_rows_q4_0_f32_fp32_data[] = { -0x03,0x02,0x23,0x07,0x00,0x05,0x01,0x00,0x0b,0x00,0x0d,0x00, -0x97,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x11,0x00,0x02,0x00, -0x01,0x00,0x00,0x00,0x11,0x00,0x02,0x00,0x51,0x11,0x00,0x00, -0x11,0x00,0x02,0x00,0x60,0x11,0x00,0x00,0x0b,0x00,0x06,0x00, -0x01,0x00,0x00,0x00,0x47,0x4c,0x53,0x4c,0x2e,0x73,0x74,0x64, -0x2e,0x34,0x35,0x30,0x00,0x00,0x00,0x00,0x0e,0x00,0x03,0x00, -0x00,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x0f,0x00,0x0a,0x00, -0x05,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x6d,0x61,0x69,0x6e, -0x00,0x00,0x00,0x00,0x0b,0x00,0x00,0x00,0x1f,0x00,0x00,0x00, -0x2d,0x00,0x00,0x00,0x5a,0x00,0x00,0x00,0x7c,0x00,0x00,0x00, -0x10,0x00,0x06,0x00,0x04,0x00,0x00,0x00,0x11,0x00,0x00,0x00, -0x00,0x02,0x00,0x00,0x01,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x0b,0x00,0x00,0x00,0x0b,0x00,0x00,0x00, -0x1c,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x1d,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x48,0x00,0x05,0x00,0x1d,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0x1d,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x08,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x1d,0x00,0x00,0x00, -0x03,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0x47,0x00,0x03,0x00,0x1d,0x00,0x00,0x00,0x02,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x2a,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x04,0x00,0x00,0x00,0x48,0x00,0x04,0x00,0x2b,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x18,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0x2b,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x47,0x00,0x03,0x00,0x2b,0x00,0x00,0x00, -0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x2d,0x00,0x00,0x00, -0x22,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x2d,0x00,0x00,0x00,0x21,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x55,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x56,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x48,0x00,0x05,0x00,0x56,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x57,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x12,0x00,0x00,0x00, -0x48,0x00,0x04,0x00,0x58,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x18,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x58,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x47,0x00,0x03,0x00,0x58,0x00,0x00,0x00,0x02,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x5a,0x00,0x00,0x00,0x22,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x5a,0x00,0x00,0x00, -0x21,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x79,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x04,0x00,0x00,0x00, -0x48,0x00,0x04,0x00,0x7a,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x19,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x7a,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x47,0x00,0x03,0x00,0x7a,0x00,0x00,0x00,0x02,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x7c,0x00,0x00,0x00,0x22,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x7c,0x00,0x00,0x00, -0x21,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x8d,0x00,0x00,0x00,0x0b,0x00,0x00,0x00,0x19,0x00,0x00,0x00, -0x13,0x00,0x02,0x00,0x02,0x00,0x00,0x00,0x21,0x00,0x03,0x00, -0x03,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x15,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x20,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x17,0x00,0x04,0x00,0x09,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x03,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x0a,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x09,0x00,0x00,0x00,0x3b,0x00,0x04,0x00, -0x0a,0x00,0x00,0x00,0x0b,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x0d,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x15,0x00,0x04,0x00, -0x10,0x00,0x00,0x00,0x20,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x10,0x00,0x00,0x00,0x12,0x00,0x00,0x00, -0x02,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x16,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x16,0x00,0x03,0x00, -0x1c,0x00,0x00,0x00,0x20,0x00,0x00,0x00,0x1e,0x00,0x06,0x00, -0x1d,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x1c,0x00,0x00,0x00,0x1c,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0x1e,0x00,0x00,0x00,0x09,0x00,0x00,0x00,0x1d,0x00,0x00,0x00, -0x3b,0x00,0x04,0x00,0x1e,0x00,0x00,0x00,0x1f,0x00,0x00,0x00, -0x09,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x10,0x00,0x00,0x00, -0x20,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0x21,0x00,0x00,0x00,0x09,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x14,0x00,0x02,0x00,0x24,0x00,0x00,0x00,0x1d,0x00,0x03,0x00, -0x2a,0x00,0x00,0x00,0x10,0x00,0x00,0x00,0x1e,0x00,0x03,0x00, -0x2b,0x00,0x00,0x00,0x2a,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0x2c,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x2b,0x00,0x00,0x00, -0x3b,0x00,0x04,0x00,0x2c,0x00,0x00,0x00,0x2d,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x10,0x00,0x00,0x00, -0x2e,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0x30,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x10,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x44,0x00,0x00,0x00, -0x20,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x49,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x16,0x00,0x03,0x00, -0x52,0x00,0x00,0x00,0x10,0x00,0x00,0x00,0x15,0x00,0x04,0x00, -0x53,0x00,0x00,0x00,0x08,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x54,0x00,0x00,0x00, -0x10,0x00,0x00,0x00,0x1c,0x00,0x04,0x00,0x55,0x00,0x00,0x00, -0x53,0x00,0x00,0x00,0x54,0x00,0x00,0x00,0x1e,0x00,0x04,0x00, -0x56,0x00,0x00,0x00,0x52,0x00,0x00,0x00,0x55,0x00,0x00,0x00, -0x1d,0x00,0x03,0x00,0x57,0x00,0x00,0x00,0x56,0x00,0x00,0x00, -0x1e,0x00,0x03,0x00,0x58,0x00,0x00,0x00,0x57,0x00,0x00,0x00, -0x20,0x00,0x04,0x00,0x59,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0x58,0x00,0x00,0x00,0x3b,0x00,0x04,0x00,0x59,0x00,0x00,0x00, -0x5a,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0x5c,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x52,0x00,0x00,0x00, -0x20,0x00,0x04,0x00,0x63,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0x53,0x00,0x00,0x00,0x17,0x00,0x04,0x00,0x67,0x00,0x00,0x00, -0x1c,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x6b,0x00,0x00,0x00,0x0f,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x10,0x00,0x00,0x00,0x6f,0x00,0x00,0x00, -0x04,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x1c,0x00,0x00,0x00, -0x74,0x00,0x00,0x00,0x00,0x00,0x00,0x41,0x1d,0x00,0x03,0x00, -0x79,0x00,0x00,0x00,0x1c,0x00,0x00,0x00,0x1e,0x00,0x03,0x00, -0x7a,0x00,0x00,0x00,0x79,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0x7b,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x7a,0x00,0x00,0x00, -0x3b,0x00,0x04,0x00,0x7b,0x00,0x00,0x00,0x7c,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x83,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x1c,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x8c,0x00,0x00,0x00,0x00,0x02,0x00,0x00, -0x2c,0x00,0x06,0x00,0x09,0x00,0x00,0x00,0x8d,0x00,0x00,0x00, -0x8c,0x00,0x00,0x00,0x16,0x00,0x00,0x00,0x16,0x00,0x00,0x00, -0x2c,0x00,0x05,0x00,0x67,0x00,0x00,0x00,0x96,0x00,0x00,0x00, -0x74,0x00,0x00,0x00,0x74,0x00,0x00,0x00,0x36,0x00,0x05,0x00, -0x02,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x03,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0x05,0x00,0x00,0x00, -0xf7,0x00,0x03,0x00,0x8e,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0xfb,0x00,0x03,0x00,0x0c,0x00,0x00,0x00,0x8f,0x00,0x00,0x00, -0xf8,0x00,0x02,0x00,0x8f,0x00,0x00,0x00,0x41,0x00,0x05,0x00, -0x0d,0x00,0x00,0x00,0x0e,0x00,0x00,0x00,0x0b,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x0f,0x00,0x00,0x00,0x0e,0x00,0x00,0x00,0x7c,0x00,0x04,0x00, -0x10,0x00,0x00,0x00,0x11,0x00,0x00,0x00,0x0f,0x00,0x00,0x00, -0x84,0x00,0x05,0x00,0x10,0x00,0x00,0x00,0x13,0x00,0x00,0x00, -0x11,0x00,0x00,0x00,0x12,0x00,0x00,0x00,0x7c,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x14,0x00,0x00,0x00,0x13,0x00,0x00,0x00, -0x41,0x00,0x05,0x00,0x0d,0x00,0x00,0x00,0x17,0x00,0x00,0x00, -0x0b,0x00,0x00,0x00,0x16,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x18,0x00,0x00,0x00,0x17,0x00,0x00,0x00, -0x7c,0x00,0x04,0x00,0x10,0x00,0x00,0x00,0x19,0x00,0x00,0x00, -0x18,0x00,0x00,0x00,0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x1a,0x00,0x00,0x00,0x19,0x00,0x00,0x00,0x41,0x00,0x05,0x00, -0x21,0x00,0x00,0x00,0x22,0x00,0x00,0x00,0x1f,0x00,0x00,0x00, -0x20,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x22,0x00,0x00,0x00,0xae,0x00,0x05,0x00, -0x24,0x00,0x00,0x00,0x25,0x00,0x00,0x00,0x14,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0xf7,0x00,0x03,0x00,0x27,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0x25,0x00,0x00,0x00, -0x26,0x00,0x00,0x00,0x27,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, -0x26,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0x8e,0x00,0x00,0x00, -0xf8,0x00,0x02,0x00,0x27,0x00,0x00,0x00,0x41,0x00,0x06,0x00, -0x30,0x00,0x00,0x00,0x31,0x00,0x00,0x00,0x2d,0x00,0x00,0x00, -0x2e,0x00,0x00,0x00,0x1a,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x10,0x00,0x00,0x00,0x32,0x00,0x00,0x00,0x31,0x00,0x00,0x00, -0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x33,0x00,0x00,0x00, -0x32,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x38,0x00,0x00,0x00,0x33,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x3a,0x00,0x00,0x00, -0x38,0x00,0x00,0x00,0x14,0x00,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x3f,0x00,0x00,0x00,0x1a,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x41,0x00,0x00,0x00,0x3f,0x00,0x00,0x00,0x14,0x00,0x00,0x00, -0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x45,0x00,0x00,0x00, -0x3a,0x00,0x00,0x00,0x44,0x00,0x00,0x00,0x89,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x48,0x00,0x00,0x00,0x3a,0x00,0x00,0x00, -0x44,0x00,0x00,0x00,0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x4a,0x00,0x00,0x00,0x48,0x00,0x00,0x00,0x49,0x00,0x00,0x00, -0x89,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x4e,0x00,0x00,0x00, -0x41,0x00,0x00,0x00,0x44,0x00,0x00,0x00,0x82,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x4f,0x00,0x00,0x00,0x41,0x00,0x00,0x00, -0x4e,0x00,0x00,0x00,0x41,0x00,0x07,0x00,0x5c,0x00,0x00,0x00, -0x5d,0x00,0x00,0x00,0x5a,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0x45,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x52,0x00,0x00,0x00,0x5e,0x00,0x00,0x00,0x5d,0x00,0x00,0x00, -0x73,0x00,0x04,0x00,0x1c,0x00,0x00,0x00,0x5f,0x00,0x00,0x00, -0x5e,0x00,0x00,0x00,0x41,0x00,0x08,0x00,0x63,0x00,0x00,0x00, -0x64,0x00,0x00,0x00,0x5a,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0x45,0x00,0x00,0x00,0x20,0x00,0x00,0x00,0x4a,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x53,0x00,0x00,0x00,0x65,0x00,0x00,0x00, -0x64,0x00,0x00,0x00,0x71,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x66,0x00,0x00,0x00,0x65,0x00,0x00,0x00,0xc7,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x6c,0x00,0x00,0x00,0x66,0x00,0x00,0x00, -0x6b,0x00,0x00,0x00,0x70,0x00,0x04,0x00,0x1c,0x00,0x00,0x00, -0x6d,0x00,0x00,0x00,0x6c,0x00,0x00,0x00,0xc2,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x70,0x00,0x00,0x00,0x66,0x00,0x00,0x00, -0x6f,0x00,0x00,0x00,0x70,0x00,0x04,0x00,0x1c,0x00,0x00,0x00, -0x71,0x00,0x00,0x00,0x70,0x00,0x00,0x00,0x50,0x00,0x05,0x00, -0x67,0x00,0x00,0x00,0x72,0x00,0x00,0x00,0x6d,0x00,0x00,0x00, -0x71,0x00,0x00,0x00,0x83,0x00,0x05,0x00,0x67,0x00,0x00,0x00, -0x76,0x00,0x00,0x00,0x72,0x00,0x00,0x00,0x96,0x00,0x00,0x00, -0x8e,0x00,0x05,0x00,0x67,0x00,0x00,0x00,0x78,0x00,0x00,0x00, -0x76,0x00,0x00,0x00,0x5f,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x7f,0x00,0x00,0x00,0x4f,0x00,0x00,0x00, -0x4a,0x00,0x00,0x00,0x51,0x00,0x05,0x00,0x1c,0x00,0x00,0x00, -0x82,0x00,0x00,0x00,0x78,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x41,0x00,0x06,0x00,0x83,0x00,0x00,0x00,0x84,0x00,0x00,0x00, -0x7c,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0x7f,0x00,0x00,0x00, -0x3e,0x00,0x03,0x00,0x84,0x00,0x00,0x00,0x82,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x88,0x00,0x00,0x00, -0x7f,0x00,0x00,0x00,0x54,0x00,0x00,0x00,0x51,0x00,0x05,0x00, -0x1c,0x00,0x00,0x00,0x8a,0x00,0x00,0x00,0x78,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x41,0x00,0x06,0x00,0x83,0x00,0x00,0x00, -0x8b,0x00,0x00,0x00,0x7c,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0x88,0x00,0x00,0x00,0x3e,0x00,0x03,0x00,0x8b,0x00,0x00,0x00, -0x8a,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0x8e,0x00,0x00,0x00, -0xf8,0x00,0x02,0x00,0x8e,0x00,0x00,0x00,0xfd,0x00,0x01,0x00, -0x38,0x00,0x01,0x00, }; -const uint64_t get_rows_q4_0_f32_fp32_len = 2356; +const uint64_t get_rows_f16_f32_len = 1932; -unsigned char get_rows_q4_0_fp32_data[] = { +unsigned char get_rows_q4_0_data[] = { 0x03,0x02,0x23,0x07,0x00,0x05,0x01,0x00,0x0b,0x00,0x0d,0x00, 0x98,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x11,0x00,0x02,0x00, 0x01,0x00,0x00,0x00,0x11,0x00,0x02,0x00,0x51,0x11,0x00,0x00, @@ -16264,425 +8689,11 @@ unsigned char get_rows_q4_0_fp32_data[] = { 0x8f,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0x8f,0x00,0x00,0x00, 0xfd,0x00,0x01,0x00,0x38,0x00,0x01,0x00, }; -const uint64_t get_rows_q4_0_fp32_len = 2372; +const uint64_t get_rows_q4_0_len = 2372; -unsigned char get_rows_q4_1_data[] = { -0x03,0x02,0x23,0x07,0x00,0x05,0x01,0x00,0x0b,0x00,0x0d,0x00, -0x94,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x11,0x00,0x02,0x00, -0x01,0x00,0x00,0x00,0x11,0x00,0x02,0x00,0x09,0x00,0x00,0x00, -0x11,0x00,0x02,0x00,0x27,0x00,0x00,0x00,0x11,0x00,0x02,0x00, -0x51,0x11,0x00,0x00,0x11,0x00,0x02,0x00,0x60,0x11,0x00,0x00, -0x0b,0x00,0x06,0x00,0x01,0x00,0x00,0x00,0x47,0x4c,0x53,0x4c, -0x2e,0x73,0x74,0x64,0x2e,0x34,0x35,0x30,0x00,0x00,0x00,0x00, -0x0e,0x00,0x03,0x00,0x00,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x0f,0x00,0x0a,0x00,0x05,0x00,0x00,0x00,0x04,0x00,0x00,0x00, -0x6d,0x61,0x69,0x6e,0x00,0x00,0x00,0x00,0x0b,0x00,0x00,0x00, -0x1f,0x00,0x00,0x00,0x2d,0x00,0x00,0x00,0x5a,0x00,0x00,0x00, -0x81,0x00,0x00,0x00,0x10,0x00,0x06,0x00,0x04,0x00,0x00,0x00, -0x11,0x00,0x00,0x00,0x00,0x02,0x00,0x00,0x01,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x0b,0x00,0x00,0x00, -0x0b,0x00,0x00,0x00,0x1c,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0x1d,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x1d,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x04,0x00,0x00,0x00, -0x48,0x00,0x05,0x00,0x1d,0x00,0x00,0x00,0x02,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x08,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0x1d,0x00,0x00,0x00,0x03,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x47,0x00,0x03,0x00,0x1d,0x00,0x00,0x00, -0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x2a,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x48,0x00,0x04,0x00, -0x2b,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x18,0x00,0x00,0x00, -0x48,0x00,0x05,0x00,0x2b,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x03,0x00, -0x2b,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x2d,0x00,0x00,0x00,0x22,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x2d,0x00,0x00,0x00,0x21,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x55,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0x56,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x56,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x02,0x00,0x00,0x00, -0x48,0x00,0x05,0x00,0x56,0x00,0x00,0x00,0x02,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x57,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x14,0x00,0x00,0x00, -0x48,0x00,0x04,0x00,0x58,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x18,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x58,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x47,0x00,0x03,0x00,0x58,0x00,0x00,0x00,0x02,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x5a,0x00,0x00,0x00,0x22,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x5a,0x00,0x00,0x00, -0x21,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x7e,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x02,0x00,0x00,0x00, -0x48,0x00,0x04,0x00,0x7f,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x19,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x7f,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x47,0x00,0x03,0x00,0x7f,0x00,0x00,0x00,0x02,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x81,0x00,0x00,0x00,0x22,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x81,0x00,0x00,0x00, -0x21,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x91,0x00,0x00,0x00,0x0b,0x00,0x00,0x00,0x19,0x00,0x00,0x00, -0x13,0x00,0x02,0x00,0x02,0x00,0x00,0x00,0x21,0x00,0x03,0x00, -0x03,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x15,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x20,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x17,0x00,0x04,0x00,0x09,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x03,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x0a,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x09,0x00,0x00,0x00,0x3b,0x00,0x04,0x00, -0x0a,0x00,0x00,0x00,0x0b,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x0d,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x15,0x00,0x04,0x00, -0x10,0x00,0x00,0x00,0x20,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x10,0x00,0x00,0x00,0x12,0x00,0x00,0x00, -0x02,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x16,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x16,0x00,0x03,0x00, -0x1c,0x00,0x00,0x00,0x20,0x00,0x00,0x00,0x1e,0x00,0x06,0x00, -0x1d,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x1c,0x00,0x00,0x00,0x1c,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0x1e,0x00,0x00,0x00,0x09,0x00,0x00,0x00,0x1d,0x00,0x00,0x00, -0x3b,0x00,0x04,0x00,0x1e,0x00,0x00,0x00,0x1f,0x00,0x00,0x00, -0x09,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x10,0x00,0x00,0x00, -0x20,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0x21,0x00,0x00,0x00,0x09,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x14,0x00,0x02,0x00,0x24,0x00,0x00,0x00,0x1d,0x00,0x03,0x00, -0x2a,0x00,0x00,0x00,0x10,0x00,0x00,0x00,0x1e,0x00,0x03,0x00, -0x2b,0x00,0x00,0x00,0x2a,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0x2c,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x2b,0x00,0x00,0x00, -0x3b,0x00,0x04,0x00,0x2c,0x00,0x00,0x00,0x2d,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x10,0x00,0x00,0x00, -0x2e,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0x30,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x10,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x44,0x00,0x00,0x00, -0x20,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x49,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x16,0x00,0x03,0x00, -0x50,0x00,0x00,0x00,0x10,0x00,0x00,0x00,0x15,0x00,0x04,0x00, -0x53,0x00,0x00,0x00,0x08,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x54,0x00,0x00,0x00, -0x10,0x00,0x00,0x00,0x1c,0x00,0x04,0x00,0x55,0x00,0x00,0x00, -0x53,0x00,0x00,0x00,0x54,0x00,0x00,0x00,0x1e,0x00,0x05,0x00, -0x56,0x00,0x00,0x00,0x50,0x00,0x00,0x00,0x50,0x00,0x00,0x00, -0x55,0x00,0x00,0x00,0x1d,0x00,0x03,0x00,0x57,0x00,0x00,0x00, -0x56,0x00,0x00,0x00,0x1e,0x00,0x03,0x00,0x58,0x00,0x00,0x00, -0x57,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x59,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x58,0x00,0x00,0x00,0x3b,0x00,0x04,0x00, -0x59,0x00,0x00,0x00,0x5a,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0x20,0x00,0x04,0x00,0x5c,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0x50,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x67,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x53,0x00,0x00,0x00,0x17,0x00,0x04,0x00, -0x6a,0x00,0x00,0x00,0x50,0x00,0x00,0x00,0x02,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x10,0x00,0x00,0x00,0x70,0x00,0x00,0x00, -0x0f,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x10,0x00,0x00,0x00, -0x74,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x1d,0x00,0x03,0x00, -0x7e,0x00,0x00,0x00,0x50,0x00,0x00,0x00,0x1e,0x00,0x03,0x00, -0x7f,0x00,0x00,0x00,0x7e,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0x80,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x7f,0x00,0x00,0x00, -0x3b,0x00,0x04,0x00,0x80,0x00,0x00,0x00,0x81,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x90,0x00,0x00,0x00,0x00,0x02,0x00,0x00,0x2c,0x00,0x06,0x00, -0x09,0x00,0x00,0x00,0x91,0x00,0x00,0x00,0x90,0x00,0x00,0x00, -0x16,0x00,0x00,0x00,0x16,0x00,0x00,0x00,0x36,0x00,0x05,0x00, -0x02,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x03,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0x05,0x00,0x00,0x00, -0xf7,0x00,0x03,0x00,0x92,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0xfb,0x00,0x03,0x00,0x0c,0x00,0x00,0x00,0x93,0x00,0x00,0x00, -0xf8,0x00,0x02,0x00,0x93,0x00,0x00,0x00,0x41,0x00,0x05,0x00, -0x0d,0x00,0x00,0x00,0x0e,0x00,0x00,0x00,0x0b,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x0f,0x00,0x00,0x00,0x0e,0x00,0x00,0x00,0x7c,0x00,0x04,0x00, -0x10,0x00,0x00,0x00,0x11,0x00,0x00,0x00,0x0f,0x00,0x00,0x00, -0x84,0x00,0x05,0x00,0x10,0x00,0x00,0x00,0x13,0x00,0x00,0x00, -0x11,0x00,0x00,0x00,0x12,0x00,0x00,0x00,0x7c,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x14,0x00,0x00,0x00,0x13,0x00,0x00,0x00, -0x41,0x00,0x05,0x00,0x0d,0x00,0x00,0x00,0x17,0x00,0x00,0x00, -0x0b,0x00,0x00,0x00,0x16,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x18,0x00,0x00,0x00,0x17,0x00,0x00,0x00, -0x7c,0x00,0x04,0x00,0x10,0x00,0x00,0x00,0x19,0x00,0x00,0x00, -0x18,0x00,0x00,0x00,0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x1a,0x00,0x00,0x00,0x19,0x00,0x00,0x00,0x41,0x00,0x05,0x00, -0x21,0x00,0x00,0x00,0x22,0x00,0x00,0x00,0x1f,0x00,0x00,0x00, -0x20,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x22,0x00,0x00,0x00,0xae,0x00,0x05,0x00, -0x24,0x00,0x00,0x00,0x25,0x00,0x00,0x00,0x14,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0xf7,0x00,0x03,0x00,0x27,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0x25,0x00,0x00,0x00, -0x26,0x00,0x00,0x00,0x27,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, -0x26,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0x92,0x00,0x00,0x00, -0xf8,0x00,0x02,0x00,0x27,0x00,0x00,0x00,0x41,0x00,0x06,0x00, -0x30,0x00,0x00,0x00,0x31,0x00,0x00,0x00,0x2d,0x00,0x00,0x00, -0x2e,0x00,0x00,0x00,0x1a,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x10,0x00,0x00,0x00,0x32,0x00,0x00,0x00,0x31,0x00,0x00,0x00, -0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x33,0x00,0x00,0x00, -0x32,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x38,0x00,0x00,0x00,0x33,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x3a,0x00,0x00,0x00, -0x38,0x00,0x00,0x00,0x14,0x00,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x3f,0x00,0x00,0x00,0x1a,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x41,0x00,0x00,0x00,0x3f,0x00,0x00,0x00,0x14,0x00,0x00,0x00, -0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x45,0x00,0x00,0x00, -0x3a,0x00,0x00,0x00,0x44,0x00,0x00,0x00,0x89,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x48,0x00,0x00,0x00,0x3a,0x00,0x00,0x00, -0x44,0x00,0x00,0x00,0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x4a,0x00,0x00,0x00,0x48,0x00,0x00,0x00,0x49,0x00,0x00,0x00, -0x89,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x4e,0x00,0x00,0x00, -0x41,0x00,0x00,0x00,0x44,0x00,0x00,0x00,0x82,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x4f,0x00,0x00,0x00,0x41,0x00,0x00,0x00, -0x4e,0x00,0x00,0x00,0x41,0x00,0x07,0x00,0x5c,0x00,0x00,0x00, -0x5d,0x00,0x00,0x00,0x5a,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0x45,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x50,0x00,0x00,0x00,0x5e,0x00,0x00,0x00,0x5d,0x00,0x00,0x00, -0x41,0x00,0x07,0x00,0x5c,0x00,0x00,0x00,0x61,0x00,0x00,0x00, -0x5a,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0x45,0x00,0x00,0x00, -0x20,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x50,0x00,0x00,0x00, -0x62,0x00,0x00,0x00,0x61,0x00,0x00,0x00,0x41,0x00,0x08,0x00, -0x67,0x00,0x00,0x00,0x68,0x00,0x00,0x00,0x5a,0x00,0x00,0x00, -0x2e,0x00,0x00,0x00,0x45,0x00,0x00,0x00,0x12,0x00,0x00,0x00, -0x4a,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x53,0x00,0x00,0x00, -0x69,0x00,0x00,0x00,0x68,0x00,0x00,0x00,0x71,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x6e,0x00,0x00,0x00,0x69,0x00,0x00,0x00, -0x7c,0x00,0x04,0x00,0x10,0x00,0x00,0x00,0x6f,0x00,0x00,0x00, -0x6e,0x00,0x00,0x00,0xc7,0x00,0x05,0x00,0x10,0x00,0x00,0x00, -0x71,0x00,0x00,0x00,0x6f,0x00,0x00,0x00,0x70,0x00,0x00,0x00, -0x6f,0x00,0x04,0x00,0x50,0x00,0x00,0x00,0x72,0x00,0x00,0x00, -0x71,0x00,0x00,0x00,0xc2,0x00,0x05,0x00,0x53,0x00,0x00,0x00, -0x75,0x00,0x00,0x00,0x69,0x00,0x00,0x00,0x74,0x00,0x00,0x00, -0x70,0x00,0x04,0x00,0x50,0x00,0x00,0x00,0x76,0x00,0x00,0x00, -0x75,0x00,0x00,0x00,0x50,0x00,0x05,0x00,0x6a,0x00,0x00,0x00, -0x77,0x00,0x00,0x00,0x72,0x00,0x00,0x00,0x76,0x00,0x00,0x00, -0x8e,0x00,0x05,0x00,0x6a,0x00,0x00,0x00,0x7a,0x00,0x00,0x00, -0x77,0x00,0x00,0x00,0x5e,0x00,0x00,0x00,0x50,0x00,0x05,0x00, -0x6a,0x00,0x00,0x00,0x7c,0x00,0x00,0x00,0x62,0x00,0x00,0x00, -0x62,0x00,0x00,0x00,0x81,0x00,0x05,0x00,0x6a,0x00,0x00,0x00, -0x7d,0x00,0x00,0x00,0x7a,0x00,0x00,0x00,0x7c,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x84,0x00,0x00,0x00, -0x4f,0x00,0x00,0x00,0x4a,0x00,0x00,0x00,0x51,0x00,0x05,0x00, -0x50,0x00,0x00,0x00,0x87,0x00,0x00,0x00,0x7d,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x41,0x00,0x06,0x00,0x5c,0x00,0x00,0x00, -0x88,0x00,0x00,0x00,0x81,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0x84,0x00,0x00,0x00,0x3e,0x00,0x03,0x00,0x88,0x00,0x00,0x00, -0x87,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x8c,0x00,0x00,0x00,0x84,0x00,0x00,0x00,0x54,0x00,0x00,0x00, -0x51,0x00,0x05,0x00,0x50,0x00,0x00,0x00,0x8e,0x00,0x00,0x00, -0x7d,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x41,0x00,0x06,0x00, -0x5c,0x00,0x00,0x00,0x8f,0x00,0x00,0x00,0x81,0x00,0x00,0x00, -0x2e,0x00,0x00,0x00,0x8c,0x00,0x00,0x00,0x3e,0x00,0x03,0x00, -0x8f,0x00,0x00,0x00,0x8e,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, -0x92,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0x92,0x00,0x00,0x00, -0xfd,0x00,0x01,0x00,0x38,0x00,0x01,0x00, -}; -const uint64_t get_rows_q4_1_len = 2408; - -unsigned char get_rows_q4_1_f32_data[] = { +unsigned char get_rows_q4_0_f32_data[] = { 0x03,0x02,0x23,0x07,0x00,0x05,0x01,0x00,0x0b,0x00,0x0d,0x00, 0x97,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x11,0x00,0x02,0x00, -0x01,0x00,0x00,0x00,0x11,0x00,0x02,0x00,0x09,0x00,0x00,0x00, -0x11,0x00,0x02,0x00,0x27,0x00,0x00,0x00,0x11,0x00,0x02,0x00, -0x51,0x11,0x00,0x00,0x11,0x00,0x02,0x00,0x60,0x11,0x00,0x00, -0x0b,0x00,0x06,0x00,0x01,0x00,0x00,0x00,0x47,0x4c,0x53,0x4c, -0x2e,0x73,0x74,0x64,0x2e,0x34,0x35,0x30,0x00,0x00,0x00,0x00, -0x0e,0x00,0x03,0x00,0x00,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x0f,0x00,0x0a,0x00,0x05,0x00,0x00,0x00,0x04,0x00,0x00,0x00, -0x6d,0x61,0x69,0x6e,0x00,0x00,0x00,0x00,0x0b,0x00,0x00,0x00, -0x1f,0x00,0x00,0x00,0x2d,0x00,0x00,0x00,0x5a,0x00,0x00,0x00, -0x81,0x00,0x00,0x00,0x10,0x00,0x06,0x00,0x04,0x00,0x00,0x00, -0x11,0x00,0x00,0x00,0x00,0x02,0x00,0x00,0x01,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x0b,0x00,0x00,0x00, -0x0b,0x00,0x00,0x00,0x1c,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0x1d,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x1d,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x04,0x00,0x00,0x00, -0x48,0x00,0x05,0x00,0x1d,0x00,0x00,0x00,0x02,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x08,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0x1d,0x00,0x00,0x00,0x03,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x47,0x00,0x03,0x00,0x1d,0x00,0x00,0x00, -0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x2a,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x48,0x00,0x04,0x00, -0x2b,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x18,0x00,0x00,0x00, -0x48,0x00,0x05,0x00,0x2b,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x03,0x00, -0x2b,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x2d,0x00,0x00,0x00,0x22,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x2d,0x00,0x00,0x00,0x21,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x55,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0x56,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x56,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x02,0x00,0x00,0x00, -0x48,0x00,0x05,0x00,0x56,0x00,0x00,0x00,0x02,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x57,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x14,0x00,0x00,0x00, -0x48,0x00,0x04,0x00,0x58,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x18,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x58,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x47,0x00,0x03,0x00,0x58,0x00,0x00,0x00,0x02,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x5a,0x00,0x00,0x00,0x22,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x5a,0x00,0x00,0x00, -0x21,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x7e,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x04,0x00,0x00,0x00, -0x48,0x00,0x04,0x00,0x7f,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x19,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x7f,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x47,0x00,0x03,0x00,0x7f,0x00,0x00,0x00,0x02,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x81,0x00,0x00,0x00,0x22,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x81,0x00,0x00,0x00, -0x21,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x94,0x00,0x00,0x00,0x0b,0x00,0x00,0x00,0x19,0x00,0x00,0x00, -0x13,0x00,0x02,0x00,0x02,0x00,0x00,0x00,0x21,0x00,0x03,0x00, -0x03,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x15,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x20,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x17,0x00,0x04,0x00,0x09,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x03,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x0a,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x09,0x00,0x00,0x00,0x3b,0x00,0x04,0x00, -0x0a,0x00,0x00,0x00,0x0b,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x0d,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x15,0x00,0x04,0x00, -0x10,0x00,0x00,0x00,0x20,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x10,0x00,0x00,0x00,0x12,0x00,0x00,0x00, -0x02,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x16,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x16,0x00,0x03,0x00, -0x1c,0x00,0x00,0x00,0x20,0x00,0x00,0x00,0x1e,0x00,0x06,0x00, -0x1d,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x1c,0x00,0x00,0x00,0x1c,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0x1e,0x00,0x00,0x00,0x09,0x00,0x00,0x00,0x1d,0x00,0x00,0x00, -0x3b,0x00,0x04,0x00,0x1e,0x00,0x00,0x00,0x1f,0x00,0x00,0x00, -0x09,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x10,0x00,0x00,0x00, -0x20,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0x21,0x00,0x00,0x00,0x09,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x14,0x00,0x02,0x00,0x24,0x00,0x00,0x00,0x1d,0x00,0x03,0x00, -0x2a,0x00,0x00,0x00,0x10,0x00,0x00,0x00,0x1e,0x00,0x03,0x00, -0x2b,0x00,0x00,0x00,0x2a,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0x2c,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x2b,0x00,0x00,0x00, -0x3b,0x00,0x04,0x00,0x2c,0x00,0x00,0x00,0x2d,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x10,0x00,0x00,0x00, -0x2e,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0x30,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x10,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x44,0x00,0x00,0x00, -0x20,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x49,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x16,0x00,0x03,0x00, -0x50,0x00,0x00,0x00,0x10,0x00,0x00,0x00,0x15,0x00,0x04,0x00, -0x53,0x00,0x00,0x00,0x08,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x54,0x00,0x00,0x00, -0x10,0x00,0x00,0x00,0x1c,0x00,0x04,0x00,0x55,0x00,0x00,0x00, -0x53,0x00,0x00,0x00,0x54,0x00,0x00,0x00,0x1e,0x00,0x05,0x00, -0x56,0x00,0x00,0x00,0x50,0x00,0x00,0x00,0x50,0x00,0x00,0x00, -0x55,0x00,0x00,0x00,0x1d,0x00,0x03,0x00,0x57,0x00,0x00,0x00, -0x56,0x00,0x00,0x00,0x1e,0x00,0x03,0x00,0x58,0x00,0x00,0x00, -0x57,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x59,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x58,0x00,0x00,0x00,0x3b,0x00,0x04,0x00, -0x59,0x00,0x00,0x00,0x5a,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0x20,0x00,0x04,0x00,0x5c,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0x50,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x67,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x53,0x00,0x00,0x00,0x17,0x00,0x04,0x00, -0x6a,0x00,0x00,0x00,0x50,0x00,0x00,0x00,0x02,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x10,0x00,0x00,0x00,0x70,0x00,0x00,0x00, -0x0f,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x10,0x00,0x00,0x00, -0x74,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x1d,0x00,0x03,0x00, -0x7e,0x00,0x00,0x00,0x1c,0x00,0x00,0x00,0x1e,0x00,0x03,0x00, -0x7f,0x00,0x00,0x00,0x7e,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0x80,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x7f,0x00,0x00,0x00, -0x3b,0x00,0x04,0x00,0x80,0x00,0x00,0x00,0x81,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x89,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x1c,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x93,0x00,0x00,0x00,0x00,0x02,0x00,0x00, -0x2c,0x00,0x06,0x00,0x09,0x00,0x00,0x00,0x94,0x00,0x00,0x00, -0x93,0x00,0x00,0x00,0x16,0x00,0x00,0x00,0x16,0x00,0x00,0x00, -0x36,0x00,0x05,0x00,0x02,0x00,0x00,0x00,0x04,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x03,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, -0x05,0x00,0x00,0x00,0xf7,0x00,0x03,0x00,0x95,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0xfb,0x00,0x03,0x00,0x0c,0x00,0x00,0x00, -0x96,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0x96,0x00,0x00,0x00, -0x41,0x00,0x05,0x00,0x0d,0x00,0x00,0x00,0x0e,0x00,0x00,0x00, -0x0b,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x0f,0x00,0x00,0x00,0x0e,0x00,0x00,0x00, -0x7c,0x00,0x04,0x00,0x10,0x00,0x00,0x00,0x11,0x00,0x00,0x00, -0x0f,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x10,0x00,0x00,0x00, -0x13,0x00,0x00,0x00,0x11,0x00,0x00,0x00,0x12,0x00,0x00,0x00, -0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x14,0x00,0x00,0x00, -0x13,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x0d,0x00,0x00,0x00, -0x17,0x00,0x00,0x00,0x0b,0x00,0x00,0x00,0x16,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x18,0x00,0x00,0x00, -0x17,0x00,0x00,0x00,0x7c,0x00,0x04,0x00,0x10,0x00,0x00,0x00, -0x19,0x00,0x00,0x00,0x18,0x00,0x00,0x00,0x7c,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x1a,0x00,0x00,0x00,0x19,0x00,0x00,0x00, -0x41,0x00,0x05,0x00,0x21,0x00,0x00,0x00,0x22,0x00,0x00,0x00, -0x1f,0x00,0x00,0x00,0x20,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x22,0x00,0x00,0x00, -0xae,0x00,0x05,0x00,0x24,0x00,0x00,0x00,0x25,0x00,0x00,0x00, -0x14,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0xf7,0x00,0x03,0x00, -0x27,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, -0x25,0x00,0x00,0x00,0x26,0x00,0x00,0x00,0x27,0x00,0x00,0x00, -0xf8,0x00,0x02,0x00,0x26,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, -0x95,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0x27,0x00,0x00,0x00, -0x41,0x00,0x06,0x00,0x30,0x00,0x00,0x00,0x31,0x00,0x00,0x00, -0x2d,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0x1a,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x10,0x00,0x00,0x00,0x32,0x00,0x00,0x00, -0x31,0x00,0x00,0x00,0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x33,0x00,0x00,0x00,0x32,0x00,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x38,0x00,0x00,0x00,0x33,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x3a,0x00,0x00,0x00,0x38,0x00,0x00,0x00,0x14,0x00,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x3f,0x00,0x00,0x00, -0x1a,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x41,0x00,0x00,0x00,0x3f,0x00,0x00,0x00, -0x14,0x00,0x00,0x00,0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x45,0x00,0x00,0x00,0x3a,0x00,0x00,0x00,0x44,0x00,0x00,0x00, -0x89,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x48,0x00,0x00,0x00, -0x3a,0x00,0x00,0x00,0x44,0x00,0x00,0x00,0x86,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x4a,0x00,0x00,0x00,0x48,0x00,0x00,0x00, -0x49,0x00,0x00,0x00,0x89,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x4e,0x00,0x00,0x00,0x41,0x00,0x00,0x00,0x44,0x00,0x00,0x00, -0x82,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x4f,0x00,0x00,0x00, -0x41,0x00,0x00,0x00,0x4e,0x00,0x00,0x00,0x41,0x00,0x07,0x00, -0x5c,0x00,0x00,0x00,0x5d,0x00,0x00,0x00,0x5a,0x00,0x00,0x00, -0x2e,0x00,0x00,0x00,0x45,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x50,0x00,0x00,0x00,0x5e,0x00,0x00,0x00, -0x5d,0x00,0x00,0x00,0x41,0x00,0x07,0x00,0x5c,0x00,0x00,0x00, -0x61,0x00,0x00,0x00,0x5a,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0x45,0x00,0x00,0x00,0x20,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x50,0x00,0x00,0x00,0x62,0x00,0x00,0x00,0x61,0x00,0x00,0x00, -0x41,0x00,0x08,0x00,0x67,0x00,0x00,0x00,0x68,0x00,0x00,0x00, -0x5a,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0x45,0x00,0x00,0x00, -0x12,0x00,0x00,0x00,0x4a,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x53,0x00,0x00,0x00,0x69,0x00,0x00,0x00,0x68,0x00,0x00,0x00, -0x71,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x6e,0x00,0x00,0x00, -0x69,0x00,0x00,0x00,0x7c,0x00,0x04,0x00,0x10,0x00,0x00,0x00, -0x6f,0x00,0x00,0x00,0x6e,0x00,0x00,0x00,0xc7,0x00,0x05,0x00, -0x10,0x00,0x00,0x00,0x71,0x00,0x00,0x00,0x6f,0x00,0x00,0x00, -0x70,0x00,0x00,0x00,0x6f,0x00,0x04,0x00,0x50,0x00,0x00,0x00, -0x72,0x00,0x00,0x00,0x71,0x00,0x00,0x00,0xc2,0x00,0x05,0x00, -0x53,0x00,0x00,0x00,0x75,0x00,0x00,0x00,0x69,0x00,0x00,0x00, -0x74,0x00,0x00,0x00,0x70,0x00,0x04,0x00,0x50,0x00,0x00,0x00, -0x76,0x00,0x00,0x00,0x75,0x00,0x00,0x00,0x50,0x00,0x05,0x00, -0x6a,0x00,0x00,0x00,0x77,0x00,0x00,0x00,0x72,0x00,0x00,0x00, -0x76,0x00,0x00,0x00,0x8e,0x00,0x05,0x00,0x6a,0x00,0x00,0x00, -0x7a,0x00,0x00,0x00,0x77,0x00,0x00,0x00,0x5e,0x00,0x00,0x00, -0x50,0x00,0x05,0x00,0x6a,0x00,0x00,0x00,0x7c,0x00,0x00,0x00, -0x62,0x00,0x00,0x00,0x62,0x00,0x00,0x00,0x81,0x00,0x05,0x00, -0x6a,0x00,0x00,0x00,0x7d,0x00,0x00,0x00,0x7a,0x00,0x00,0x00, -0x7c,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x84,0x00,0x00,0x00,0x4f,0x00,0x00,0x00,0x4a,0x00,0x00,0x00, -0x51,0x00,0x05,0x00,0x50,0x00,0x00,0x00,0x87,0x00,0x00,0x00, -0x7d,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x73,0x00,0x04,0x00, -0x1c,0x00,0x00,0x00,0x88,0x00,0x00,0x00,0x87,0x00,0x00,0x00, -0x41,0x00,0x06,0x00,0x89,0x00,0x00,0x00,0x8a,0x00,0x00,0x00, -0x81,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0x84,0x00,0x00,0x00, -0x3e,0x00,0x03,0x00,0x8a,0x00,0x00,0x00,0x88,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x8e,0x00,0x00,0x00, -0x84,0x00,0x00,0x00,0x54,0x00,0x00,0x00,0x51,0x00,0x05,0x00, -0x50,0x00,0x00,0x00,0x90,0x00,0x00,0x00,0x7d,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x73,0x00,0x04,0x00,0x1c,0x00,0x00,0x00, -0x91,0x00,0x00,0x00,0x90,0x00,0x00,0x00,0x41,0x00,0x06,0x00, -0x89,0x00,0x00,0x00,0x92,0x00,0x00,0x00,0x81,0x00,0x00,0x00, -0x2e,0x00,0x00,0x00,0x8e,0x00,0x00,0x00,0x3e,0x00,0x03,0x00, -0x92,0x00,0x00,0x00,0x91,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, -0x95,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0x95,0x00,0x00,0x00, -0xfd,0x00,0x01,0x00,0x38,0x00,0x01,0x00, -}; -const uint64_t get_rows_q4_1_f32_len = 2456; - -unsigned char get_rows_q4_1_f32_fp32_data[] = { -0x03,0x02,0x23,0x07,0x00,0x05,0x01,0x00,0x0b,0x00,0x0d,0x00, -0x95,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x11,0x00,0x02,0x00, 0x01,0x00,0x00,0x00,0x11,0x00,0x02,0x00,0x51,0x11,0x00,0x00, 0x11,0x00,0x02,0x00,0x60,0x11,0x00,0x00,0x0b,0x00,0x06,0x00, 0x01,0x00,0x00,0x00,0x47,0x4c,0x53,0x4c,0x2e,0x73,0x74,0x64, @@ -16690,7 +8701,7 @@ unsigned char get_rows_q4_1_f32_fp32_data[] = { 0x00,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x0f,0x00,0x0a,0x00, 0x05,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x6d,0x61,0x69,0x6e, 0x00,0x00,0x00,0x00,0x0b,0x00,0x00,0x00,0x1f,0x00,0x00,0x00, -0x2d,0x00,0x00,0x00,0x5a,0x00,0x00,0x00,0x81,0x00,0x00,0x00, +0x2d,0x00,0x00,0x00,0x5a,0x00,0x00,0x00,0x7c,0x00,0x00,0x00, 0x10,0x00,0x06,0x00,0x04,0x00,0x00,0x00,0x11,0x00,0x00,0x00, 0x00,0x02,0x00,0x00,0x01,0x00,0x00,0x00,0x01,0x00,0x00,0x00, 0x47,0x00,0x04,0x00,0x0b,0x00,0x00,0x00,0x0b,0x00,0x00,0x00, @@ -16714,91 +8725,92 @@ unsigned char get_rows_q4_1_f32_fp32_data[] = { 0x01,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x56,0x00,0x00,0x00, 0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00, 0x48,0x00,0x05,0x00,0x56,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0x56,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x04,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x57,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x14,0x00,0x00,0x00,0x48,0x00,0x04,0x00, -0x58,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x18,0x00,0x00,0x00, -0x48,0x00,0x05,0x00,0x58,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x03,0x00, -0x58,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x5a,0x00,0x00,0x00,0x22,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x5a,0x00,0x00,0x00,0x21,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x7e,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x48,0x00,0x04,0x00, -0x7f,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x19,0x00,0x00,0x00, -0x48,0x00,0x05,0x00,0x7f,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x03,0x00, -0x7f,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x81,0x00,0x00,0x00,0x22,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x81,0x00,0x00,0x00,0x21,0x00,0x00,0x00, -0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x92,0x00,0x00,0x00, -0x0b,0x00,0x00,0x00,0x19,0x00,0x00,0x00,0x13,0x00,0x02,0x00, -0x02,0x00,0x00,0x00,0x21,0x00,0x03,0x00,0x03,0x00,0x00,0x00, -0x02,0x00,0x00,0x00,0x15,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x20,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x17,0x00,0x04,0x00, -0x09,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x03,0x00,0x00,0x00, -0x20,0x00,0x04,0x00,0x0a,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x09,0x00,0x00,0x00,0x3b,0x00,0x04,0x00,0x0a,0x00,0x00,0x00, -0x0b,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x20,0x00,0x04,0x00,0x0d,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x15,0x00,0x04,0x00,0x10,0x00,0x00,0x00, -0x20,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x10,0x00,0x00,0x00,0x12,0x00,0x00,0x00,0x02,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x16,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x16,0x00,0x03,0x00,0x1c,0x00,0x00,0x00, -0x20,0x00,0x00,0x00,0x1e,0x00,0x06,0x00,0x1d,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x1c,0x00,0x00,0x00, -0x1c,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x1e,0x00,0x00,0x00, -0x09,0x00,0x00,0x00,0x1d,0x00,0x00,0x00,0x3b,0x00,0x04,0x00, -0x1e,0x00,0x00,0x00,0x1f,0x00,0x00,0x00,0x09,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x10,0x00,0x00,0x00,0x20,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x21,0x00,0x00,0x00, -0x09,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x14,0x00,0x02,0x00, -0x24,0x00,0x00,0x00,0x1d,0x00,0x03,0x00,0x2a,0x00,0x00,0x00, -0x10,0x00,0x00,0x00,0x1e,0x00,0x03,0x00,0x2b,0x00,0x00,0x00, -0x2a,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x2c,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x2b,0x00,0x00,0x00,0x3b,0x00,0x04,0x00, -0x2c,0x00,0x00,0x00,0x2d,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x10,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x30,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x10,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x44,0x00,0x00,0x00,0x20,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x49,0x00,0x00,0x00, -0x02,0x00,0x00,0x00,0x16,0x00,0x03,0x00,0x52,0x00,0x00,0x00, -0x10,0x00,0x00,0x00,0x15,0x00,0x04,0x00,0x53,0x00,0x00,0x00, -0x08,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x54,0x00,0x00,0x00,0x10,0x00,0x00,0x00, -0x1c,0x00,0x04,0x00,0x55,0x00,0x00,0x00,0x53,0x00,0x00,0x00, -0x54,0x00,0x00,0x00,0x1e,0x00,0x05,0x00,0x56,0x00,0x00,0x00, -0x52,0x00,0x00,0x00,0x52,0x00,0x00,0x00,0x55,0x00,0x00,0x00, +0x23,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00, +0x57,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x12,0x00,0x00,0x00, +0x48,0x00,0x04,0x00,0x58,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0x18,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x58,0x00,0x00,0x00, +0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0x47,0x00,0x03,0x00,0x58,0x00,0x00,0x00,0x02,0x00,0x00,0x00, +0x47,0x00,0x04,0x00,0x5a,0x00,0x00,0x00,0x22,0x00,0x00,0x00, +0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x5a,0x00,0x00,0x00, +0x21,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00, +0x79,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x04,0x00,0x00,0x00, +0x48,0x00,0x04,0x00,0x7a,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0x19,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x7a,0x00,0x00,0x00, +0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0x47,0x00,0x03,0x00,0x7a,0x00,0x00,0x00,0x02,0x00,0x00,0x00, +0x47,0x00,0x04,0x00,0x7c,0x00,0x00,0x00,0x22,0x00,0x00,0x00, +0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x7c,0x00,0x00,0x00, +0x21,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00, +0x8d,0x00,0x00,0x00,0x0b,0x00,0x00,0x00,0x19,0x00,0x00,0x00, +0x13,0x00,0x02,0x00,0x02,0x00,0x00,0x00,0x21,0x00,0x03,0x00, +0x03,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x15,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x20,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0x17,0x00,0x04,0x00,0x09,0x00,0x00,0x00,0x06,0x00,0x00,0x00, +0x03,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x0a,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0x09,0x00,0x00,0x00,0x3b,0x00,0x04,0x00, +0x0a,0x00,0x00,0x00,0x0b,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, +0x00,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x0d,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x15,0x00,0x04,0x00, +0x10,0x00,0x00,0x00,0x20,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x10,0x00,0x00,0x00,0x12,0x00,0x00,0x00, +0x02,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x16,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x16,0x00,0x03,0x00, +0x1c,0x00,0x00,0x00,0x20,0x00,0x00,0x00,0x1e,0x00,0x06,0x00, +0x1d,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00, +0x1c,0x00,0x00,0x00,0x1c,0x00,0x00,0x00,0x20,0x00,0x04,0x00, +0x1e,0x00,0x00,0x00,0x09,0x00,0x00,0x00,0x1d,0x00,0x00,0x00, +0x3b,0x00,0x04,0x00,0x1e,0x00,0x00,0x00,0x1f,0x00,0x00,0x00, +0x09,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x10,0x00,0x00,0x00, +0x20,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x20,0x00,0x04,0x00, +0x21,0x00,0x00,0x00,0x09,0x00,0x00,0x00,0x06,0x00,0x00,0x00, +0x14,0x00,0x02,0x00,0x24,0x00,0x00,0x00,0x1d,0x00,0x03,0x00, +0x2a,0x00,0x00,0x00,0x10,0x00,0x00,0x00,0x1e,0x00,0x03,0x00, +0x2b,0x00,0x00,0x00,0x2a,0x00,0x00,0x00,0x20,0x00,0x04,0x00, +0x2c,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x2b,0x00,0x00,0x00, +0x3b,0x00,0x04,0x00,0x2c,0x00,0x00,0x00,0x2d,0x00,0x00,0x00, +0x0c,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x10,0x00,0x00,0x00, +0x2e,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x20,0x00,0x04,0x00, +0x30,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x10,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x44,0x00,0x00,0x00, +0x20,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x49,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x16,0x00,0x03,0x00, +0x52,0x00,0x00,0x00,0x10,0x00,0x00,0x00,0x15,0x00,0x04,0x00, +0x53,0x00,0x00,0x00,0x08,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x54,0x00,0x00,0x00, +0x10,0x00,0x00,0x00,0x1c,0x00,0x04,0x00,0x55,0x00,0x00,0x00, +0x53,0x00,0x00,0x00,0x54,0x00,0x00,0x00,0x1e,0x00,0x04,0x00, +0x56,0x00,0x00,0x00,0x52,0x00,0x00,0x00,0x55,0x00,0x00,0x00, 0x1d,0x00,0x03,0x00,0x57,0x00,0x00,0x00,0x56,0x00,0x00,0x00, 0x1e,0x00,0x03,0x00,0x58,0x00,0x00,0x00,0x57,0x00,0x00,0x00, 0x20,0x00,0x04,0x00,0x59,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, 0x58,0x00,0x00,0x00,0x3b,0x00,0x04,0x00,0x59,0x00,0x00,0x00, 0x5a,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x20,0x00,0x04,0x00, 0x5c,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x52,0x00,0x00,0x00, -0x20,0x00,0x04,0x00,0x68,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0x53,0x00,0x00,0x00,0x17,0x00,0x04,0x00,0x6c,0x00,0x00,0x00, +0x20,0x00,0x04,0x00,0x63,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, +0x53,0x00,0x00,0x00,0x17,0x00,0x04,0x00,0x67,0x00,0x00,0x00, 0x1c,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x70,0x00,0x00,0x00,0x0f,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x10,0x00,0x00,0x00,0x74,0x00,0x00,0x00, -0x04,0x00,0x00,0x00,0x1d,0x00,0x03,0x00,0x7e,0x00,0x00,0x00, -0x1c,0x00,0x00,0x00,0x1e,0x00,0x03,0x00,0x7f,0x00,0x00,0x00, -0x7e,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x80,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x7f,0x00,0x00,0x00,0x3b,0x00,0x04,0x00, -0x80,0x00,0x00,0x00,0x81,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0x20,0x00,0x04,0x00,0x88,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0x1c,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x91,0x00,0x00,0x00,0x00,0x02,0x00,0x00,0x2c,0x00,0x06,0x00, -0x09,0x00,0x00,0x00,0x92,0x00,0x00,0x00,0x91,0x00,0x00,0x00, -0x16,0x00,0x00,0x00,0x16,0x00,0x00,0x00,0x36,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x6b,0x00,0x00,0x00,0x0f,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x10,0x00,0x00,0x00,0x6f,0x00,0x00,0x00, +0x04,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x1c,0x00,0x00,0x00, +0x74,0x00,0x00,0x00,0x00,0x00,0x00,0x41,0x1d,0x00,0x03,0x00, +0x79,0x00,0x00,0x00,0x1c,0x00,0x00,0x00,0x1e,0x00,0x03,0x00, +0x7a,0x00,0x00,0x00,0x79,0x00,0x00,0x00,0x20,0x00,0x04,0x00, +0x7b,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x7a,0x00,0x00,0x00, +0x3b,0x00,0x04,0x00,0x7b,0x00,0x00,0x00,0x7c,0x00,0x00,0x00, +0x0c,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x83,0x00,0x00,0x00, +0x0c,0x00,0x00,0x00,0x1c,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x8c,0x00,0x00,0x00,0x00,0x02,0x00,0x00, +0x2c,0x00,0x06,0x00,0x09,0x00,0x00,0x00,0x8d,0x00,0x00,0x00, +0x8c,0x00,0x00,0x00,0x16,0x00,0x00,0x00,0x16,0x00,0x00,0x00, +0x2c,0x00,0x05,0x00,0x67,0x00,0x00,0x00,0x96,0x00,0x00,0x00, +0x74,0x00,0x00,0x00,0x74,0x00,0x00,0x00,0x36,0x00,0x05,0x00, 0x02,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x00,0x00,0x00,0x00, 0x03,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0x05,0x00,0x00,0x00, -0xf7,0x00,0x03,0x00,0x93,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0xfb,0x00,0x03,0x00,0x0c,0x00,0x00,0x00,0x94,0x00,0x00,0x00, -0xf8,0x00,0x02,0x00,0x94,0x00,0x00,0x00,0x41,0x00,0x05,0x00, +0xf7,0x00,0x03,0x00,0x8e,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0xfb,0x00,0x03,0x00,0x0c,0x00,0x00,0x00,0x8f,0x00,0x00,0x00, +0xf8,0x00,0x02,0x00,0x8f,0x00,0x00,0x00,0x41,0x00,0x05,0x00, 0x0d,0x00,0x00,0x00,0x0e,0x00,0x00,0x00,0x0b,0x00,0x00,0x00, 0x0c,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, 0x0f,0x00,0x00,0x00,0x0e,0x00,0x00,0x00,0x7c,0x00,0x04,0x00, @@ -16819,7 +8831,7 @@ unsigned char get_rows_q4_1_f32_fp32_data[] = { 0x23,0x00,0x00,0x00,0xf7,0x00,0x03,0x00,0x27,0x00,0x00,0x00, 0x00,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0x25,0x00,0x00,0x00, 0x26,0x00,0x00,0x00,0x27,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, -0x26,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0x93,0x00,0x00,0x00, +0x26,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0x8e,0x00,0x00,0x00, 0xf8,0x00,0x02,0x00,0x27,0x00,0x00,0x00,0x41,0x00,0x06,0x00, 0x30,0x00,0x00,0x00,0x31,0x00,0x00,0x00,0x2d,0x00,0x00,0x00, 0x2e,0x00,0x00,0x00,0x1a,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, @@ -16845,49 +8857,42 @@ unsigned char get_rows_q4_1_f32_fp32_data[] = { 0x45,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, 0x52,0x00,0x00,0x00,0x5e,0x00,0x00,0x00,0x5d,0x00,0x00,0x00, 0x73,0x00,0x04,0x00,0x1c,0x00,0x00,0x00,0x5f,0x00,0x00,0x00, -0x5e,0x00,0x00,0x00,0x41,0x00,0x07,0x00,0x5c,0x00,0x00,0x00, -0x62,0x00,0x00,0x00,0x5a,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0x45,0x00,0x00,0x00,0x20,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x52,0x00,0x00,0x00,0x63,0x00,0x00,0x00,0x62,0x00,0x00,0x00, -0x73,0x00,0x04,0x00,0x1c,0x00,0x00,0x00,0x64,0x00,0x00,0x00, -0x63,0x00,0x00,0x00,0x41,0x00,0x08,0x00,0x68,0x00,0x00,0x00, -0x69,0x00,0x00,0x00,0x5a,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0x45,0x00,0x00,0x00,0x12,0x00,0x00,0x00,0x4a,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x53,0x00,0x00,0x00,0x6a,0x00,0x00,0x00, -0x69,0x00,0x00,0x00,0x71,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x6b,0x00,0x00,0x00,0x6a,0x00,0x00,0x00,0xc7,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x71,0x00,0x00,0x00,0x6b,0x00,0x00,0x00, -0x70,0x00,0x00,0x00,0x70,0x00,0x04,0x00,0x1c,0x00,0x00,0x00, -0x72,0x00,0x00,0x00,0x71,0x00,0x00,0x00,0xc2,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x75,0x00,0x00,0x00,0x6b,0x00,0x00,0x00, -0x74,0x00,0x00,0x00,0x70,0x00,0x04,0x00,0x1c,0x00,0x00,0x00, -0x76,0x00,0x00,0x00,0x75,0x00,0x00,0x00,0x50,0x00,0x05,0x00, -0x6c,0x00,0x00,0x00,0x77,0x00,0x00,0x00,0x72,0x00,0x00,0x00, -0x76,0x00,0x00,0x00,0x8e,0x00,0x05,0x00,0x6c,0x00,0x00,0x00, -0x7a,0x00,0x00,0x00,0x77,0x00,0x00,0x00,0x5f,0x00,0x00,0x00, -0x50,0x00,0x05,0x00,0x6c,0x00,0x00,0x00,0x7c,0x00,0x00,0x00, -0x64,0x00,0x00,0x00,0x64,0x00,0x00,0x00,0x81,0x00,0x05,0x00, -0x6c,0x00,0x00,0x00,0x7d,0x00,0x00,0x00,0x7a,0x00,0x00,0x00, -0x7c,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x84,0x00,0x00,0x00,0x4f,0x00,0x00,0x00,0x4a,0x00,0x00,0x00, -0x51,0x00,0x05,0x00,0x1c,0x00,0x00,0x00,0x87,0x00,0x00,0x00, -0x7d,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x41,0x00,0x06,0x00, -0x88,0x00,0x00,0x00,0x89,0x00,0x00,0x00,0x81,0x00,0x00,0x00, -0x2e,0x00,0x00,0x00,0x84,0x00,0x00,0x00,0x3e,0x00,0x03,0x00, -0x89,0x00,0x00,0x00,0x87,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x8d,0x00,0x00,0x00,0x84,0x00,0x00,0x00, -0x54,0x00,0x00,0x00,0x51,0x00,0x05,0x00,0x1c,0x00,0x00,0x00, -0x8f,0x00,0x00,0x00,0x7d,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x41,0x00,0x06,0x00,0x88,0x00,0x00,0x00,0x90,0x00,0x00,0x00, -0x81,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0x8d,0x00,0x00,0x00, -0x3e,0x00,0x03,0x00,0x90,0x00,0x00,0x00,0x8f,0x00,0x00,0x00, -0xf9,0x00,0x02,0x00,0x93,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, -0x93,0x00,0x00,0x00,0xfd,0x00,0x01,0x00,0x38,0x00,0x01,0x00, - +0x5e,0x00,0x00,0x00,0x41,0x00,0x08,0x00,0x63,0x00,0x00,0x00, +0x64,0x00,0x00,0x00,0x5a,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, +0x45,0x00,0x00,0x00,0x20,0x00,0x00,0x00,0x4a,0x00,0x00,0x00, +0x3d,0x00,0x04,0x00,0x53,0x00,0x00,0x00,0x65,0x00,0x00,0x00, +0x64,0x00,0x00,0x00,0x71,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x66,0x00,0x00,0x00,0x65,0x00,0x00,0x00,0xc7,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x6c,0x00,0x00,0x00,0x66,0x00,0x00,0x00, +0x6b,0x00,0x00,0x00,0x70,0x00,0x04,0x00,0x1c,0x00,0x00,0x00, +0x6d,0x00,0x00,0x00,0x6c,0x00,0x00,0x00,0xc2,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x70,0x00,0x00,0x00,0x66,0x00,0x00,0x00, +0x6f,0x00,0x00,0x00,0x70,0x00,0x04,0x00,0x1c,0x00,0x00,0x00, +0x71,0x00,0x00,0x00,0x70,0x00,0x00,0x00,0x50,0x00,0x05,0x00, +0x67,0x00,0x00,0x00,0x72,0x00,0x00,0x00,0x6d,0x00,0x00,0x00, +0x71,0x00,0x00,0x00,0x83,0x00,0x05,0x00,0x67,0x00,0x00,0x00, +0x76,0x00,0x00,0x00,0x72,0x00,0x00,0x00,0x96,0x00,0x00,0x00, +0x8e,0x00,0x05,0x00,0x67,0x00,0x00,0x00,0x78,0x00,0x00,0x00, +0x76,0x00,0x00,0x00,0x5f,0x00,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x7f,0x00,0x00,0x00,0x4f,0x00,0x00,0x00, +0x4a,0x00,0x00,0x00,0x51,0x00,0x05,0x00,0x1c,0x00,0x00,0x00, +0x82,0x00,0x00,0x00,0x78,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0x41,0x00,0x06,0x00,0x83,0x00,0x00,0x00,0x84,0x00,0x00,0x00, +0x7c,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0x7f,0x00,0x00,0x00, +0x3e,0x00,0x03,0x00,0x84,0x00,0x00,0x00,0x82,0x00,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x88,0x00,0x00,0x00, +0x7f,0x00,0x00,0x00,0x54,0x00,0x00,0x00,0x51,0x00,0x05,0x00, +0x1c,0x00,0x00,0x00,0x8a,0x00,0x00,0x00,0x78,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0x41,0x00,0x06,0x00,0x83,0x00,0x00,0x00, +0x8b,0x00,0x00,0x00,0x7c,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, +0x88,0x00,0x00,0x00,0x3e,0x00,0x03,0x00,0x8b,0x00,0x00,0x00, +0x8a,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0x8e,0x00,0x00,0x00, +0xf8,0x00,0x02,0x00,0x8e,0x00,0x00,0x00,0xfd,0x00,0x01,0x00, +0x38,0x00,0x01,0x00, }; -const uint64_t get_rows_q4_1_f32_fp32_len = 2424; +const uint64_t get_rows_q4_0_f32_len = 2356; -unsigned char get_rows_q4_1_fp32_data[] = { +unsigned char get_rows_q4_1_data[] = { 0x03,0x02,0x23,0x07,0x00,0x05,0x01,0x00,0x0b,0x00,0x0d,0x00, 0x96,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x11,0x00,0x02,0x00, 0x01,0x00,0x00,0x00,0x11,0x00,0x02,0x00,0x51,0x11,0x00,0x00, @@ -17093,503 +9098,11 @@ unsigned char get_rows_q4_1_fp32_data[] = { 0xf8,0x00,0x02,0x00,0x94,0x00,0x00,0x00,0xfd,0x00,0x01,0x00, 0x38,0x00,0x01,0x00, }; -const uint64_t get_rows_q4_1_fp32_len = 2440; +const uint64_t get_rows_q4_1_len = 2440; -unsigned char get_rows_q5_0_data[] = { +unsigned char get_rows_q4_1_f32_data[] = { 0x03,0x02,0x23,0x07,0x00,0x05,0x01,0x00,0x0b,0x00,0x0d,0x00, -0xc2,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x11,0x00,0x02,0x00, -0x01,0x00,0x00,0x00,0x11,0x00,0x02,0x00,0x09,0x00,0x00,0x00, -0x11,0x00,0x02,0x00,0x27,0x00,0x00,0x00,0x11,0x00,0x02,0x00, -0x51,0x11,0x00,0x00,0x11,0x00,0x02,0x00,0x60,0x11,0x00,0x00, -0x0b,0x00,0x06,0x00,0x01,0x00,0x00,0x00,0x47,0x4c,0x53,0x4c, -0x2e,0x73,0x74,0x64,0x2e,0x34,0x35,0x30,0x00,0x00,0x00,0x00, -0x0e,0x00,0x03,0x00,0x00,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x0f,0x00,0x0a,0x00,0x05,0x00,0x00,0x00,0x04,0x00,0x00,0x00, -0x6d,0x61,0x69,0x6e,0x00,0x00,0x00,0x00,0x0b,0x00,0x00,0x00, -0x1f,0x00,0x00,0x00,0x2d,0x00,0x00,0x00,0x5c,0x00,0x00,0x00, -0xa6,0x00,0x00,0x00,0x10,0x00,0x06,0x00,0x04,0x00,0x00,0x00, -0x11,0x00,0x00,0x00,0x00,0x02,0x00,0x00,0x01,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x0b,0x00,0x00,0x00, -0x0b,0x00,0x00,0x00,0x1c,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0x1d,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x1d,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x04,0x00,0x00,0x00, -0x48,0x00,0x05,0x00,0x1d,0x00,0x00,0x00,0x02,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x08,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0x1d,0x00,0x00,0x00,0x03,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x47,0x00,0x03,0x00,0x1d,0x00,0x00,0x00, -0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x2a,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x48,0x00,0x04,0x00, -0x2b,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x18,0x00,0x00,0x00, -0x48,0x00,0x05,0x00,0x2b,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x03,0x00, -0x2b,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x2d,0x00,0x00,0x00,0x22,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x2d,0x00,0x00,0x00,0x21,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x54,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x57,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x48,0x00,0x05,0x00,0x58,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0x58,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x02,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x58,0x00,0x00,0x00, -0x02,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x59,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x16,0x00,0x00,0x00,0x48,0x00,0x04,0x00,0x5a,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x18,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0x5a,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x47,0x00,0x03,0x00,0x5a,0x00,0x00,0x00, -0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x5c,0x00,0x00,0x00, -0x22,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x5c,0x00,0x00,0x00,0x21,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0xa3,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x02,0x00,0x00,0x00,0x48,0x00,0x04,0x00,0xa4,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x19,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0xa4,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x47,0x00,0x03,0x00,0xa4,0x00,0x00,0x00, -0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0xa6,0x00,0x00,0x00, -0x22,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0xa6,0x00,0x00,0x00,0x21,0x00,0x00,0x00,0x02,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0xb6,0x00,0x00,0x00,0x0b,0x00,0x00,0x00, -0x19,0x00,0x00,0x00,0x13,0x00,0x02,0x00,0x02,0x00,0x00,0x00, -0x21,0x00,0x03,0x00,0x03,0x00,0x00,0x00,0x02,0x00,0x00,0x00, -0x15,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x20,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x17,0x00,0x04,0x00,0x09,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x03,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0x0a,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x09,0x00,0x00,0x00, -0x3b,0x00,0x04,0x00,0x0a,0x00,0x00,0x00,0x0b,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0x0d,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x15,0x00,0x04,0x00,0x10,0x00,0x00,0x00,0x20,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x10,0x00,0x00,0x00, -0x12,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x16,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x16,0x00,0x03,0x00,0x1c,0x00,0x00,0x00,0x20,0x00,0x00,0x00, -0x1e,0x00,0x06,0x00,0x1d,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x1c,0x00,0x00,0x00,0x1c,0x00,0x00,0x00, -0x20,0x00,0x04,0x00,0x1e,0x00,0x00,0x00,0x09,0x00,0x00,0x00, -0x1d,0x00,0x00,0x00,0x3b,0x00,0x04,0x00,0x1e,0x00,0x00,0x00, -0x1f,0x00,0x00,0x00,0x09,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x10,0x00,0x00,0x00,0x20,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x20,0x00,0x04,0x00,0x21,0x00,0x00,0x00,0x09,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x14,0x00,0x02,0x00,0x24,0x00,0x00,0x00, -0x1d,0x00,0x03,0x00,0x2a,0x00,0x00,0x00,0x10,0x00,0x00,0x00, -0x1e,0x00,0x03,0x00,0x2b,0x00,0x00,0x00,0x2a,0x00,0x00,0x00, -0x20,0x00,0x04,0x00,0x2c,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0x2b,0x00,0x00,0x00,0x3b,0x00,0x04,0x00,0x2c,0x00,0x00,0x00, -0x2d,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x10,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x20,0x00,0x04,0x00,0x30,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0x10,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x44,0x00,0x00,0x00,0x20,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x49,0x00,0x00,0x00,0x02,0x00,0x00,0x00, -0x16,0x00,0x03,0x00,0x50,0x00,0x00,0x00,0x10,0x00,0x00,0x00, -0x15,0x00,0x04,0x00,0x53,0x00,0x00,0x00,0x10,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x1c,0x00,0x04,0x00,0x54,0x00,0x00,0x00, -0x53,0x00,0x00,0x00,0x49,0x00,0x00,0x00,0x15,0x00,0x04,0x00, -0x55,0x00,0x00,0x00,0x08,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x56,0x00,0x00,0x00, -0x10,0x00,0x00,0x00,0x1c,0x00,0x04,0x00,0x57,0x00,0x00,0x00, -0x55,0x00,0x00,0x00,0x56,0x00,0x00,0x00,0x1e,0x00,0x05,0x00, -0x58,0x00,0x00,0x00,0x50,0x00,0x00,0x00,0x54,0x00,0x00,0x00, -0x57,0x00,0x00,0x00,0x1d,0x00,0x03,0x00,0x59,0x00,0x00,0x00, -0x58,0x00,0x00,0x00,0x1e,0x00,0x03,0x00,0x5a,0x00,0x00,0x00, -0x59,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x5b,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x5a,0x00,0x00,0x00,0x3b,0x00,0x04,0x00, -0x5b,0x00,0x00,0x00,0x5c,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0x20,0x00,0x04,0x00,0x5e,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0x50,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x63,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x53,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x10,0x00,0x00,0x00,0x67,0x00,0x00,0x00,0x10,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x10,0x00,0x00,0x00,0x74,0x00,0x00,0x00, -0x04,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x7a,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0x84,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x55,0x00,0x00,0x00, -0x17,0x00,0x04,0x00,0x87,0x00,0x00,0x00,0x50,0x00,0x00,0x00, -0x02,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x10,0x00,0x00,0x00, -0x8d,0x00,0x00,0x00,0x0f,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x50,0x00,0x00,0x00,0x9e,0x00,0x00,0x00,0x00,0x4c,0x00,0x00, -0x1d,0x00,0x03,0x00,0xa3,0x00,0x00,0x00,0x50,0x00,0x00,0x00, -0x1e,0x00,0x03,0x00,0xa4,0x00,0x00,0x00,0xa3,0x00,0x00,0x00, -0x20,0x00,0x04,0x00,0xa5,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0xa4,0x00,0x00,0x00,0x3b,0x00,0x04,0x00,0xa5,0x00,0x00,0x00, -0xa6,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0xb5,0x00,0x00,0x00,0x00,0x02,0x00,0x00, -0x2c,0x00,0x06,0x00,0x09,0x00,0x00,0x00,0xb6,0x00,0x00,0x00, -0xb5,0x00,0x00,0x00,0x16,0x00,0x00,0x00,0x16,0x00,0x00,0x00, -0x2c,0x00,0x05,0x00,0x87,0x00,0x00,0x00,0xc1,0x00,0x00,0x00, -0x9e,0x00,0x00,0x00,0x9e,0x00,0x00,0x00,0x36,0x00,0x05,0x00, -0x02,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x03,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0x05,0x00,0x00,0x00, -0xf7,0x00,0x03,0x00,0xb7,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0xfb,0x00,0x03,0x00,0x0c,0x00,0x00,0x00,0xb8,0x00,0x00,0x00, -0xf8,0x00,0x02,0x00,0xb8,0x00,0x00,0x00,0x41,0x00,0x05,0x00, -0x0d,0x00,0x00,0x00,0x0e,0x00,0x00,0x00,0x0b,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x0f,0x00,0x00,0x00,0x0e,0x00,0x00,0x00,0x7c,0x00,0x04,0x00, -0x10,0x00,0x00,0x00,0x11,0x00,0x00,0x00,0x0f,0x00,0x00,0x00, -0x84,0x00,0x05,0x00,0x10,0x00,0x00,0x00,0x13,0x00,0x00,0x00, -0x11,0x00,0x00,0x00,0x12,0x00,0x00,0x00,0x7c,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x14,0x00,0x00,0x00,0x13,0x00,0x00,0x00, -0x41,0x00,0x05,0x00,0x0d,0x00,0x00,0x00,0x17,0x00,0x00,0x00, -0x0b,0x00,0x00,0x00,0x16,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x18,0x00,0x00,0x00,0x17,0x00,0x00,0x00, -0x7c,0x00,0x04,0x00,0x10,0x00,0x00,0x00,0x19,0x00,0x00,0x00, -0x18,0x00,0x00,0x00,0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x1a,0x00,0x00,0x00,0x19,0x00,0x00,0x00,0x41,0x00,0x05,0x00, -0x21,0x00,0x00,0x00,0x22,0x00,0x00,0x00,0x1f,0x00,0x00,0x00, -0x20,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x22,0x00,0x00,0x00,0xae,0x00,0x05,0x00, -0x24,0x00,0x00,0x00,0x25,0x00,0x00,0x00,0x14,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0xf7,0x00,0x03,0x00,0x27,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0x25,0x00,0x00,0x00, -0x26,0x00,0x00,0x00,0x27,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, -0x26,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0xb7,0x00,0x00,0x00, -0xf8,0x00,0x02,0x00,0x27,0x00,0x00,0x00,0x41,0x00,0x06,0x00, -0x30,0x00,0x00,0x00,0x31,0x00,0x00,0x00,0x2d,0x00,0x00,0x00, -0x2e,0x00,0x00,0x00,0x1a,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x10,0x00,0x00,0x00,0x32,0x00,0x00,0x00,0x31,0x00,0x00,0x00, -0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x33,0x00,0x00,0x00, -0x32,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x38,0x00,0x00,0x00,0x33,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x3a,0x00,0x00,0x00, -0x38,0x00,0x00,0x00,0x14,0x00,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x3f,0x00,0x00,0x00,0x1a,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x41,0x00,0x00,0x00,0x3f,0x00,0x00,0x00,0x14,0x00,0x00,0x00, -0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x45,0x00,0x00,0x00, -0x3a,0x00,0x00,0x00,0x44,0x00,0x00,0x00,0x89,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x48,0x00,0x00,0x00,0x3a,0x00,0x00,0x00, -0x44,0x00,0x00,0x00,0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x4a,0x00,0x00,0x00,0x48,0x00,0x00,0x00,0x49,0x00,0x00,0x00, -0x89,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x4e,0x00,0x00,0x00, -0x41,0x00,0x00,0x00,0x44,0x00,0x00,0x00,0x82,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x4f,0x00,0x00,0x00,0x41,0x00,0x00,0x00, -0x4e,0x00,0x00,0x00,0x41,0x00,0x07,0x00,0x5e,0x00,0x00,0x00, -0x5f,0x00,0x00,0x00,0x5c,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0x45,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x50,0x00,0x00,0x00,0x60,0x00,0x00,0x00,0x5f,0x00,0x00,0x00, -0x41,0x00,0x08,0x00,0x63,0x00,0x00,0x00,0x64,0x00,0x00,0x00, -0x5c,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0x45,0x00,0x00,0x00, -0x20,0x00,0x00,0x00,0x20,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x53,0x00,0x00,0x00,0x65,0x00,0x00,0x00,0x64,0x00,0x00,0x00, -0x71,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x66,0x00,0x00,0x00, -0x65,0x00,0x00,0x00,0xc4,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x68,0x00,0x00,0x00,0x66,0x00,0x00,0x00,0x67,0x00,0x00,0x00, -0x41,0x00,0x08,0x00,0x63,0x00,0x00,0x00,0x6a,0x00,0x00,0x00, -0x5c,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0x45,0x00,0x00,0x00, -0x20,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x53,0x00,0x00,0x00,0x6b,0x00,0x00,0x00,0x6a,0x00,0x00,0x00, -0x71,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x6c,0x00,0x00,0x00, -0x6b,0x00,0x00,0x00,0xc5,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x6d,0x00,0x00,0x00,0x68,0x00,0x00,0x00,0x6c,0x00,0x00,0x00, -0xc2,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x73,0x00,0x00,0x00, -0x6d,0x00,0x00,0x00,0x4a,0x00,0x00,0x00,0xc4,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x75,0x00,0x00,0x00,0x73,0x00,0x00,0x00, -0x74,0x00,0x00,0x00,0xc7,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x76,0x00,0x00,0x00,0x75,0x00,0x00,0x00,0x56,0x00,0x00,0x00, -0x7c,0x00,0x04,0x00,0x10,0x00,0x00,0x00,0x77,0x00,0x00,0x00, -0x76,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x7b,0x00,0x00,0x00,0x4a,0x00,0x00,0x00,0x7a,0x00,0x00,0x00, -0xc2,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x7c,0x00,0x00,0x00, -0x6d,0x00,0x00,0x00,0x7b,0x00,0x00,0x00,0xc7,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x7d,0x00,0x00,0x00,0x7c,0x00,0x00,0x00, -0x56,0x00,0x00,0x00,0x7c,0x00,0x04,0x00,0x10,0x00,0x00,0x00, -0x7e,0x00,0x00,0x00,0x7d,0x00,0x00,0x00,0x41,0x00,0x08,0x00, -0x84,0x00,0x00,0x00,0x85,0x00,0x00,0x00,0x5c,0x00,0x00,0x00, -0x2e,0x00,0x00,0x00,0x45,0x00,0x00,0x00,0x12,0x00,0x00,0x00, -0x4a,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x55,0x00,0x00,0x00, -0x86,0x00,0x00,0x00,0x85,0x00,0x00,0x00,0x71,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x8b,0x00,0x00,0x00,0x86,0x00,0x00,0x00, -0x7c,0x00,0x04,0x00,0x10,0x00,0x00,0x00,0x8c,0x00,0x00,0x00, -0x8b,0x00,0x00,0x00,0xc7,0x00,0x05,0x00,0x10,0x00,0x00,0x00, -0x8e,0x00,0x00,0x00,0x8c,0x00,0x00,0x00,0x8d,0x00,0x00,0x00, -0xc5,0x00,0x05,0x00,0x10,0x00,0x00,0x00,0x92,0x00,0x00,0x00, -0x8e,0x00,0x00,0x00,0x77,0x00,0x00,0x00,0x6f,0x00,0x04,0x00, -0x50,0x00,0x00,0x00,0x93,0x00,0x00,0x00,0x92,0x00,0x00,0x00, -0xc2,0x00,0x05,0x00,0x55,0x00,0x00,0x00,0x95,0x00,0x00,0x00, -0x86,0x00,0x00,0x00,0x74,0x00,0x00,0x00,0x71,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x96,0x00,0x00,0x00,0x95,0x00,0x00,0x00, -0x7c,0x00,0x04,0x00,0x10,0x00,0x00,0x00,0x97,0x00,0x00,0x00, -0x96,0x00,0x00,0x00,0xc5,0x00,0x05,0x00,0x10,0x00,0x00,0x00, -0x9a,0x00,0x00,0x00,0x97,0x00,0x00,0x00,0x7e,0x00,0x00,0x00, -0x6f,0x00,0x04,0x00,0x50,0x00,0x00,0x00,0x9b,0x00,0x00,0x00, -0x9a,0x00,0x00,0x00,0x50,0x00,0x05,0x00,0x87,0x00,0x00,0x00, -0x9c,0x00,0x00,0x00,0x93,0x00,0x00,0x00,0x9b,0x00,0x00,0x00, -0x83,0x00,0x05,0x00,0x87,0x00,0x00,0x00,0xa0,0x00,0x00,0x00, -0x9c,0x00,0x00,0x00,0xc1,0x00,0x00,0x00,0x8e,0x00,0x05,0x00, -0x87,0x00,0x00,0x00,0xa2,0x00,0x00,0x00,0xa0,0x00,0x00,0x00, -0x60,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xa9,0x00,0x00,0x00,0x4f,0x00,0x00,0x00,0x4a,0x00,0x00,0x00, -0x51,0x00,0x05,0x00,0x50,0x00,0x00,0x00,0xac,0x00,0x00,0x00, -0xa2,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x41,0x00,0x06,0x00, -0x5e,0x00,0x00,0x00,0xad,0x00,0x00,0x00,0xa6,0x00,0x00,0x00, -0x2e,0x00,0x00,0x00,0xa9,0x00,0x00,0x00,0x3e,0x00,0x03,0x00, -0xad,0x00,0x00,0x00,0xac,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xb1,0x00,0x00,0x00,0xa9,0x00,0x00,0x00, -0x56,0x00,0x00,0x00,0x51,0x00,0x05,0x00,0x50,0x00,0x00,0x00, -0xb3,0x00,0x00,0x00,0xa2,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x41,0x00,0x06,0x00,0x5e,0x00,0x00,0x00,0xb4,0x00,0x00,0x00, -0xa6,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0xb1,0x00,0x00,0x00, -0x3e,0x00,0x03,0x00,0xb4,0x00,0x00,0x00,0xb3,0x00,0x00,0x00, -0xf9,0x00,0x02,0x00,0xb7,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, -0xb7,0x00,0x00,0x00,0xfd,0x00,0x01,0x00,0x38,0x00,0x01,0x00, - -}; -const uint64_t get_rows_q5_0_len = 2868; - -unsigned char get_rows_q5_0_f32_data[] = { -0x03,0x02,0x23,0x07,0x00,0x05,0x01,0x00,0x0b,0x00,0x0d,0x00, -0xc5,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x11,0x00,0x02,0x00, -0x01,0x00,0x00,0x00,0x11,0x00,0x02,0x00,0x09,0x00,0x00,0x00, -0x11,0x00,0x02,0x00,0x27,0x00,0x00,0x00,0x11,0x00,0x02,0x00, -0x51,0x11,0x00,0x00,0x11,0x00,0x02,0x00,0x60,0x11,0x00,0x00, -0x0b,0x00,0x06,0x00,0x01,0x00,0x00,0x00,0x47,0x4c,0x53,0x4c, -0x2e,0x73,0x74,0x64,0x2e,0x34,0x35,0x30,0x00,0x00,0x00,0x00, -0x0e,0x00,0x03,0x00,0x00,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x0f,0x00,0x0a,0x00,0x05,0x00,0x00,0x00,0x04,0x00,0x00,0x00, -0x6d,0x61,0x69,0x6e,0x00,0x00,0x00,0x00,0x0b,0x00,0x00,0x00, -0x1f,0x00,0x00,0x00,0x2d,0x00,0x00,0x00,0x5c,0x00,0x00,0x00, -0xa6,0x00,0x00,0x00,0x10,0x00,0x06,0x00,0x04,0x00,0x00,0x00, -0x11,0x00,0x00,0x00,0x00,0x02,0x00,0x00,0x01,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x0b,0x00,0x00,0x00, -0x0b,0x00,0x00,0x00,0x1c,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0x1d,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x1d,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x04,0x00,0x00,0x00, -0x48,0x00,0x05,0x00,0x1d,0x00,0x00,0x00,0x02,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x08,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0x1d,0x00,0x00,0x00,0x03,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x47,0x00,0x03,0x00,0x1d,0x00,0x00,0x00, -0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x2a,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x48,0x00,0x04,0x00, -0x2b,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x18,0x00,0x00,0x00, -0x48,0x00,0x05,0x00,0x2b,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x03,0x00, -0x2b,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x2d,0x00,0x00,0x00,0x22,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x2d,0x00,0x00,0x00,0x21,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x54,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x57,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x48,0x00,0x05,0x00,0x58,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0x58,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x02,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x58,0x00,0x00,0x00, -0x02,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x59,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x16,0x00,0x00,0x00,0x48,0x00,0x04,0x00,0x5a,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x18,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0x5a,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x47,0x00,0x03,0x00,0x5a,0x00,0x00,0x00, -0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x5c,0x00,0x00,0x00, -0x22,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x5c,0x00,0x00,0x00,0x21,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0xa3,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x04,0x00,0x00,0x00,0x48,0x00,0x04,0x00,0xa4,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x19,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0xa4,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x47,0x00,0x03,0x00,0xa4,0x00,0x00,0x00, -0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0xa6,0x00,0x00,0x00, -0x22,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0xa6,0x00,0x00,0x00,0x21,0x00,0x00,0x00,0x02,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0xb9,0x00,0x00,0x00,0x0b,0x00,0x00,0x00, -0x19,0x00,0x00,0x00,0x13,0x00,0x02,0x00,0x02,0x00,0x00,0x00, -0x21,0x00,0x03,0x00,0x03,0x00,0x00,0x00,0x02,0x00,0x00,0x00, -0x15,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x20,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x17,0x00,0x04,0x00,0x09,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x03,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0x0a,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x09,0x00,0x00,0x00, -0x3b,0x00,0x04,0x00,0x0a,0x00,0x00,0x00,0x0b,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0x0d,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x15,0x00,0x04,0x00,0x10,0x00,0x00,0x00,0x20,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x10,0x00,0x00,0x00, -0x12,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x16,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x16,0x00,0x03,0x00,0x1c,0x00,0x00,0x00,0x20,0x00,0x00,0x00, -0x1e,0x00,0x06,0x00,0x1d,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x1c,0x00,0x00,0x00,0x1c,0x00,0x00,0x00, -0x20,0x00,0x04,0x00,0x1e,0x00,0x00,0x00,0x09,0x00,0x00,0x00, -0x1d,0x00,0x00,0x00,0x3b,0x00,0x04,0x00,0x1e,0x00,0x00,0x00, -0x1f,0x00,0x00,0x00,0x09,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x10,0x00,0x00,0x00,0x20,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x20,0x00,0x04,0x00,0x21,0x00,0x00,0x00,0x09,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x14,0x00,0x02,0x00,0x24,0x00,0x00,0x00, -0x1d,0x00,0x03,0x00,0x2a,0x00,0x00,0x00,0x10,0x00,0x00,0x00, -0x1e,0x00,0x03,0x00,0x2b,0x00,0x00,0x00,0x2a,0x00,0x00,0x00, -0x20,0x00,0x04,0x00,0x2c,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0x2b,0x00,0x00,0x00,0x3b,0x00,0x04,0x00,0x2c,0x00,0x00,0x00, -0x2d,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x10,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x20,0x00,0x04,0x00,0x30,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0x10,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x44,0x00,0x00,0x00,0x20,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x49,0x00,0x00,0x00,0x02,0x00,0x00,0x00, -0x16,0x00,0x03,0x00,0x50,0x00,0x00,0x00,0x10,0x00,0x00,0x00, -0x15,0x00,0x04,0x00,0x53,0x00,0x00,0x00,0x10,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x1c,0x00,0x04,0x00,0x54,0x00,0x00,0x00, -0x53,0x00,0x00,0x00,0x49,0x00,0x00,0x00,0x15,0x00,0x04,0x00, -0x55,0x00,0x00,0x00,0x08,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x56,0x00,0x00,0x00, -0x10,0x00,0x00,0x00,0x1c,0x00,0x04,0x00,0x57,0x00,0x00,0x00, -0x55,0x00,0x00,0x00,0x56,0x00,0x00,0x00,0x1e,0x00,0x05,0x00, -0x58,0x00,0x00,0x00,0x50,0x00,0x00,0x00,0x54,0x00,0x00,0x00, -0x57,0x00,0x00,0x00,0x1d,0x00,0x03,0x00,0x59,0x00,0x00,0x00, -0x58,0x00,0x00,0x00,0x1e,0x00,0x03,0x00,0x5a,0x00,0x00,0x00, -0x59,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x5b,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x5a,0x00,0x00,0x00,0x3b,0x00,0x04,0x00, -0x5b,0x00,0x00,0x00,0x5c,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0x20,0x00,0x04,0x00,0x5e,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0x50,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x63,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x53,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x10,0x00,0x00,0x00,0x67,0x00,0x00,0x00,0x10,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x10,0x00,0x00,0x00,0x74,0x00,0x00,0x00, -0x04,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x7a,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0x84,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x55,0x00,0x00,0x00, -0x17,0x00,0x04,0x00,0x87,0x00,0x00,0x00,0x50,0x00,0x00,0x00, -0x02,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x10,0x00,0x00,0x00, -0x8d,0x00,0x00,0x00,0x0f,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x50,0x00,0x00,0x00,0x9e,0x00,0x00,0x00,0x00,0x4c,0x00,0x00, -0x1d,0x00,0x03,0x00,0xa3,0x00,0x00,0x00,0x1c,0x00,0x00,0x00, -0x1e,0x00,0x03,0x00,0xa4,0x00,0x00,0x00,0xa3,0x00,0x00,0x00, -0x20,0x00,0x04,0x00,0xa5,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0xa4,0x00,0x00,0x00,0x3b,0x00,0x04,0x00,0xa5,0x00,0x00,0x00, -0xa6,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0xae,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x1c,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0xb8,0x00,0x00,0x00, -0x00,0x02,0x00,0x00,0x2c,0x00,0x06,0x00,0x09,0x00,0x00,0x00, -0xb9,0x00,0x00,0x00,0xb8,0x00,0x00,0x00,0x16,0x00,0x00,0x00, -0x16,0x00,0x00,0x00,0x2c,0x00,0x05,0x00,0x87,0x00,0x00,0x00, -0xc4,0x00,0x00,0x00,0x9e,0x00,0x00,0x00,0x9e,0x00,0x00,0x00, -0x36,0x00,0x05,0x00,0x02,0x00,0x00,0x00,0x04,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x03,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, -0x05,0x00,0x00,0x00,0xf7,0x00,0x03,0x00,0xba,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0xfb,0x00,0x03,0x00,0x0c,0x00,0x00,0x00, -0xbb,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0xbb,0x00,0x00,0x00, -0x41,0x00,0x05,0x00,0x0d,0x00,0x00,0x00,0x0e,0x00,0x00,0x00, -0x0b,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x0f,0x00,0x00,0x00,0x0e,0x00,0x00,0x00, -0x7c,0x00,0x04,0x00,0x10,0x00,0x00,0x00,0x11,0x00,0x00,0x00, -0x0f,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x10,0x00,0x00,0x00, -0x13,0x00,0x00,0x00,0x11,0x00,0x00,0x00,0x12,0x00,0x00,0x00, -0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x14,0x00,0x00,0x00, -0x13,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x0d,0x00,0x00,0x00, -0x17,0x00,0x00,0x00,0x0b,0x00,0x00,0x00,0x16,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x18,0x00,0x00,0x00, -0x17,0x00,0x00,0x00,0x7c,0x00,0x04,0x00,0x10,0x00,0x00,0x00, -0x19,0x00,0x00,0x00,0x18,0x00,0x00,0x00,0x7c,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x1a,0x00,0x00,0x00,0x19,0x00,0x00,0x00, -0x41,0x00,0x05,0x00,0x21,0x00,0x00,0x00,0x22,0x00,0x00,0x00, -0x1f,0x00,0x00,0x00,0x20,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x22,0x00,0x00,0x00, -0xae,0x00,0x05,0x00,0x24,0x00,0x00,0x00,0x25,0x00,0x00,0x00, -0x14,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0xf7,0x00,0x03,0x00, -0x27,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, -0x25,0x00,0x00,0x00,0x26,0x00,0x00,0x00,0x27,0x00,0x00,0x00, -0xf8,0x00,0x02,0x00,0x26,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, -0xba,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0x27,0x00,0x00,0x00, -0x41,0x00,0x06,0x00,0x30,0x00,0x00,0x00,0x31,0x00,0x00,0x00, -0x2d,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0x1a,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x10,0x00,0x00,0x00,0x32,0x00,0x00,0x00, -0x31,0x00,0x00,0x00,0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x33,0x00,0x00,0x00,0x32,0x00,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x38,0x00,0x00,0x00,0x33,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x3a,0x00,0x00,0x00,0x38,0x00,0x00,0x00,0x14,0x00,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x3f,0x00,0x00,0x00, -0x1a,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x41,0x00,0x00,0x00,0x3f,0x00,0x00,0x00, -0x14,0x00,0x00,0x00,0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x45,0x00,0x00,0x00,0x3a,0x00,0x00,0x00,0x44,0x00,0x00,0x00, -0x89,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x48,0x00,0x00,0x00, -0x3a,0x00,0x00,0x00,0x44,0x00,0x00,0x00,0x86,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x4a,0x00,0x00,0x00,0x48,0x00,0x00,0x00, -0x49,0x00,0x00,0x00,0x89,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x4e,0x00,0x00,0x00,0x41,0x00,0x00,0x00,0x44,0x00,0x00,0x00, -0x82,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x4f,0x00,0x00,0x00, -0x41,0x00,0x00,0x00,0x4e,0x00,0x00,0x00,0x41,0x00,0x07,0x00, -0x5e,0x00,0x00,0x00,0x5f,0x00,0x00,0x00,0x5c,0x00,0x00,0x00, -0x2e,0x00,0x00,0x00,0x45,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x50,0x00,0x00,0x00,0x60,0x00,0x00,0x00, -0x5f,0x00,0x00,0x00,0x41,0x00,0x08,0x00,0x63,0x00,0x00,0x00, -0x64,0x00,0x00,0x00,0x5c,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0x45,0x00,0x00,0x00,0x20,0x00,0x00,0x00,0x20,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x53,0x00,0x00,0x00,0x65,0x00,0x00,0x00, -0x64,0x00,0x00,0x00,0x71,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x66,0x00,0x00,0x00,0x65,0x00,0x00,0x00,0xc4,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x68,0x00,0x00,0x00,0x66,0x00,0x00,0x00, -0x67,0x00,0x00,0x00,0x41,0x00,0x08,0x00,0x63,0x00,0x00,0x00, -0x6a,0x00,0x00,0x00,0x5c,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0x45,0x00,0x00,0x00,0x20,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x53,0x00,0x00,0x00,0x6b,0x00,0x00,0x00, -0x6a,0x00,0x00,0x00,0x71,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x6c,0x00,0x00,0x00,0x6b,0x00,0x00,0x00,0xc5,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x6d,0x00,0x00,0x00,0x68,0x00,0x00,0x00, -0x6c,0x00,0x00,0x00,0xc2,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x73,0x00,0x00,0x00,0x6d,0x00,0x00,0x00,0x4a,0x00,0x00,0x00, -0xc4,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x75,0x00,0x00,0x00, -0x73,0x00,0x00,0x00,0x74,0x00,0x00,0x00,0xc7,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x76,0x00,0x00,0x00,0x75,0x00,0x00,0x00, -0x56,0x00,0x00,0x00,0x7c,0x00,0x04,0x00,0x10,0x00,0x00,0x00, -0x77,0x00,0x00,0x00,0x76,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x7b,0x00,0x00,0x00,0x4a,0x00,0x00,0x00, -0x7a,0x00,0x00,0x00,0xc2,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x7c,0x00,0x00,0x00,0x6d,0x00,0x00,0x00,0x7b,0x00,0x00,0x00, -0xc7,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x7d,0x00,0x00,0x00, -0x7c,0x00,0x00,0x00,0x56,0x00,0x00,0x00,0x7c,0x00,0x04,0x00, -0x10,0x00,0x00,0x00,0x7e,0x00,0x00,0x00,0x7d,0x00,0x00,0x00, -0x41,0x00,0x08,0x00,0x84,0x00,0x00,0x00,0x85,0x00,0x00,0x00, -0x5c,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0x45,0x00,0x00,0x00, -0x12,0x00,0x00,0x00,0x4a,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x55,0x00,0x00,0x00,0x86,0x00,0x00,0x00,0x85,0x00,0x00,0x00, -0x71,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x8b,0x00,0x00,0x00, -0x86,0x00,0x00,0x00,0x7c,0x00,0x04,0x00,0x10,0x00,0x00,0x00, -0x8c,0x00,0x00,0x00,0x8b,0x00,0x00,0x00,0xc7,0x00,0x05,0x00, -0x10,0x00,0x00,0x00,0x8e,0x00,0x00,0x00,0x8c,0x00,0x00,0x00, -0x8d,0x00,0x00,0x00,0xc5,0x00,0x05,0x00,0x10,0x00,0x00,0x00, -0x92,0x00,0x00,0x00,0x8e,0x00,0x00,0x00,0x77,0x00,0x00,0x00, -0x6f,0x00,0x04,0x00,0x50,0x00,0x00,0x00,0x93,0x00,0x00,0x00, -0x92,0x00,0x00,0x00,0xc2,0x00,0x05,0x00,0x55,0x00,0x00,0x00, -0x95,0x00,0x00,0x00,0x86,0x00,0x00,0x00,0x74,0x00,0x00,0x00, -0x71,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x96,0x00,0x00,0x00, -0x95,0x00,0x00,0x00,0x7c,0x00,0x04,0x00,0x10,0x00,0x00,0x00, -0x97,0x00,0x00,0x00,0x96,0x00,0x00,0x00,0xc5,0x00,0x05,0x00, -0x10,0x00,0x00,0x00,0x9a,0x00,0x00,0x00,0x97,0x00,0x00,0x00, -0x7e,0x00,0x00,0x00,0x6f,0x00,0x04,0x00,0x50,0x00,0x00,0x00, -0x9b,0x00,0x00,0x00,0x9a,0x00,0x00,0x00,0x50,0x00,0x05,0x00, -0x87,0x00,0x00,0x00,0x9c,0x00,0x00,0x00,0x93,0x00,0x00,0x00, -0x9b,0x00,0x00,0x00,0x83,0x00,0x05,0x00,0x87,0x00,0x00,0x00, -0xa0,0x00,0x00,0x00,0x9c,0x00,0x00,0x00,0xc4,0x00,0x00,0x00, -0x8e,0x00,0x05,0x00,0x87,0x00,0x00,0x00,0xa2,0x00,0x00,0x00, -0xa0,0x00,0x00,0x00,0x60,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xa9,0x00,0x00,0x00,0x4f,0x00,0x00,0x00, -0x4a,0x00,0x00,0x00,0x51,0x00,0x05,0x00,0x50,0x00,0x00,0x00, -0xac,0x00,0x00,0x00,0xa2,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x73,0x00,0x04,0x00,0x1c,0x00,0x00,0x00,0xad,0x00,0x00,0x00, -0xac,0x00,0x00,0x00,0x41,0x00,0x06,0x00,0xae,0x00,0x00,0x00, -0xaf,0x00,0x00,0x00,0xa6,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0xa9,0x00,0x00,0x00,0x3e,0x00,0x03,0x00,0xaf,0x00,0x00,0x00, -0xad,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xb3,0x00,0x00,0x00,0xa9,0x00,0x00,0x00,0x56,0x00,0x00,0x00, -0x51,0x00,0x05,0x00,0x50,0x00,0x00,0x00,0xb5,0x00,0x00,0x00, -0xa2,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x73,0x00,0x04,0x00, -0x1c,0x00,0x00,0x00,0xb6,0x00,0x00,0x00,0xb5,0x00,0x00,0x00, -0x41,0x00,0x06,0x00,0xae,0x00,0x00,0x00,0xb7,0x00,0x00,0x00, -0xa6,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0xb3,0x00,0x00,0x00, -0x3e,0x00,0x03,0x00,0xb7,0x00,0x00,0x00,0xb6,0x00,0x00,0x00, -0xf9,0x00,0x02,0x00,0xba,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, -0xba,0x00,0x00,0x00,0xfd,0x00,0x01,0x00,0x38,0x00,0x01,0x00, - -}; -const uint64_t get_rows_q5_0_f32_len = 2916; - -unsigned char get_rows_q5_0_f32_fp32_data[] = { -0x03,0x02,0x23,0x07,0x00,0x05,0x01,0x00,0x0b,0x00,0x0d,0x00, -0xc2,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x11,0x00,0x02,0x00, +0x95,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x11,0x00,0x02,0x00, 0x01,0x00,0x00,0x00,0x11,0x00,0x02,0x00,0x51,0x11,0x00,0x00, 0x11,0x00,0x02,0x00,0x60,0x11,0x00,0x00,0x0b,0x00,0x06,0x00, 0x01,0x00,0x00,0x00,0x47,0x4c,0x53,0x4c,0x2e,0x73,0x74,0x64, @@ -17597,7 +9110,7 @@ unsigned char get_rows_q5_0_f32_fp32_data[] = { 0x00,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x0f,0x00,0x0a,0x00, 0x05,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x6d,0x61,0x69,0x6e, 0x00,0x00,0x00,0x00,0x0b,0x00,0x00,0x00,0x1f,0x00,0x00,0x00, -0x2d,0x00,0x00,0x00,0x5c,0x00,0x00,0x00,0xa5,0x00,0x00,0x00, +0x2d,0x00,0x00,0x00,0x5a,0x00,0x00,0x00,0x81,0x00,0x00,0x00, 0x10,0x00,0x06,0x00,0x04,0x00,0x00,0x00,0x11,0x00,0x00,0x00, 0x00,0x02,0x00,0x00,0x01,0x00,0x00,0x00,0x01,0x00,0x00,0x00, 0x47,0x00,0x04,0x00,0x0b,0x00,0x00,0x00,0x0b,0x00,0x00,0x00, @@ -17617,106 +9130,95 @@ unsigned char get_rows_q5_0_f32_fp32_data[] = { 0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x2d,0x00,0x00,0x00, 0x22,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00, 0x2d,0x00,0x00,0x00,0x21,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x54,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x57,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0x58,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x58,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x02,0x00,0x00,0x00, -0x48,0x00,0x05,0x00,0x58,0x00,0x00,0x00,0x02,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x59,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x16,0x00,0x00,0x00, -0x48,0x00,0x04,0x00,0x5a,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x18,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x5a,0x00,0x00,0x00, +0x47,0x00,0x04,0x00,0x55,0x00,0x00,0x00,0x06,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x56,0x00,0x00,0x00, 0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x47,0x00,0x03,0x00,0x5a,0x00,0x00,0x00,0x02,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x5c,0x00,0x00,0x00,0x22,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x5c,0x00,0x00,0x00, -0x21,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0xa2,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x04,0x00,0x00,0x00, -0x48,0x00,0x04,0x00,0xa3,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x19,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0xa3,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x47,0x00,0x03,0x00,0xa3,0x00,0x00,0x00,0x02,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0xa5,0x00,0x00,0x00,0x22,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0xa5,0x00,0x00,0x00, -0x21,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0xb6,0x00,0x00,0x00,0x0b,0x00,0x00,0x00,0x19,0x00,0x00,0x00, -0x13,0x00,0x02,0x00,0x02,0x00,0x00,0x00,0x21,0x00,0x03,0x00, -0x03,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x15,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x20,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x17,0x00,0x04,0x00,0x09,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x03,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x0a,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x09,0x00,0x00,0x00,0x3b,0x00,0x04,0x00, -0x0a,0x00,0x00,0x00,0x0b,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x0d,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x15,0x00,0x04,0x00, -0x10,0x00,0x00,0x00,0x20,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x10,0x00,0x00,0x00,0x12,0x00,0x00,0x00, -0x02,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x16,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x16,0x00,0x03,0x00, -0x1c,0x00,0x00,0x00,0x20,0x00,0x00,0x00,0x1e,0x00,0x06,0x00, -0x1d,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x1c,0x00,0x00,0x00,0x1c,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0x1e,0x00,0x00,0x00,0x09,0x00,0x00,0x00,0x1d,0x00,0x00,0x00, -0x3b,0x00,0x04,0x00,0x1e,0x00,0x00,0x00,0x1f,0x00,0x00,0x00, -0x09,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x10,0x00,0x00,0x00, -0x20,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0x21,0x00,0x00,0x00,0x09,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x14,0x00,0x02,0x00,0x24,0x00,0x00,0x00,0x1d,0x00,0x03,0x00, -0x2a,0x00,0x00,0x00,0x10,0x00,0x00,0x00,0x1e,0x00,0x03,0x00, -0x2b,0x00,0x00,0x00,0x2a,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0x2c,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x2b,0x00,0x00,0x00, -0x3b,0x00,0x04,0x00,0x2c,0x00,0x00,0x00,0x2d,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x10,0x00,0x00,0x00, -0x2e,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0x30,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x10,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x44,0x00,0x00,0x00, -0x20,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x49,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x16,0x00,0x03,0x00, -0x52,0x00,0x00,0x00,0x10,0x00,0x00,0x00,0x15,0x00,0x04,0x00, -0x53,0x00,0x00,0x00,0x10,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x1c,0x00,0x04,0x00,0x54,0x00,0x00,0x00,0x53,0x00,0x00,0x00, -0x49,0x00,0x00,0x00,0x15,0x00,0x04,0x00,0x55,0x00,0x00,0x00, +0x48,0x00,0x05,0x00,0x56,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0x23,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x48,0x00,0x05,0x00, +0x56,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x23,0x00,0x00,0x00, +0x04,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x57,0x00,0x00,0x00, +0x06,0x00,0x00,0x00,0x14,0x00,0x00,0x00,0x48,0x00,0x04,0x00, +0x58,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x18,0x00,0x00,0x00, +0x48,0x00,0x05,0x00,0x58,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x03,0x00, +0x58,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00, +0x5a,0x00,0x00,0x00,0x22,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0x47,0x00,0x04,0x00,0x5a,0x00,0x00,0x00,0x21,0x00,0x00,0x00, +0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x7e,0x00,0x00,0x00, +0x06,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x48,0x00,0x04,0x00, +0x7f,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x19,0x00,0x00,0x00, +0x48,0x00,0x05,0x00,0x7f,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x03,0x00, +0x7f,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00, +0x81,0x00,0x00,0x00,0x22,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0x47,0x00,0x04,0x00,0x81,0x00,0x00,0x00,0x21,0x00,0x00,0x00, +0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x92,0x00,0x00,0x00, +0x0b,0x00,0x00,0x00,0x19,0x00,0x00,0x00,0x13,0x00,0x02,0x00, +0x02,0x00,0x00,0x00,0x21,0x00,0x03,0x00,0x03,0x00,0x00,0x00, +0x02,0x00,0x00,0x00,0x15,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x20,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x17,0x00,0x04,0x00, +0x09,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x03,0x00,0x00,0x00, +0x20,0x00,0x04,0x00,0x0a,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0x09,0x00,0x00,0x00,0x3b,0x00,0x04,0x00,0x0a,0x00,0x00,0x00, +0x0b,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0x20,0x00,0x04,0x00,0x0d,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0x06,0x00,0x00,0x00,0x15,0x00,0x04,0x00,0x10,0x00,0x00,0x00, +0x20,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x10,0x00,0x00,0x00,0x12,0x00,0x00,0x00,0x02,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x16,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0x16,0x00,0x03,0x00,0x1c,0x00,0x00,0x00, +0x20,0x00,0x00,0x00,0x1e,0x00,0x06,0x00,0x1d,0x00,0x00,0x00, +0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x1c,0x00,0x00,0x00, +0x1c,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x1e,0x00,0x00,0x00, +0x09,0x00,0x00,0x00,0x1d,0x00,0x00,0x00,0x3b,0x00,0x04,0x00, +0x1e,0x00,0x00,0x00,0x1f,0x00,0x00,0x00,0x09,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x10,0x00,0x00,0x00,0x20,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x21,0x00,0x00,0x00, +0x09,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x14,0x00,0x02,0x00, +0x24,0x00,0x00,0x00,0x1d,0x00,0x03,0x00,0x2a,0x00,0x00,0x00, +0x10,0x00,0x00,0x00,0x1e,0x00,0x03,0x00,0x2b,0x00,0x00,0x00, +0x2a,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x2c,0x00,0x00,0x00, +0x0c,0x00,0x00,0x00,0x2b,0x00,0x00,0x00,0x3b,0x00,0x04,0x00, +0x2c,0x00,0x00,0x00,0x2d,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x10,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, +0x00,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x30,0x00,0x00,0x00, +0x0c,0x00,0x00,0x00,0x10,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x44,0x00,0x00,0x00,0x20,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x49,0x00,0x00,0x00, +0x02,0x00,0x00,0x00,0x16,0x00,0x03,0x00,0x52,0x00,0x00,0x00, +0x10,0x00,0x00,0x00,0x15,0x00,0x04,0x00,0x53,0x00,0x00,0x00, 0x08,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x56,0x00,0x00,0x00,0x10,0x00,0x00,0x00, -0x1c,0x00,0x04,0x00,0x57,0x00,0x00,0x00,0x55,0x00,0x00,0x00, -0x56,0x00,0x00,0x00,0x1e,0x00,0x05,0x00,0x58,0x00,0x00,0x00, -0x52,0x00,0x00,0x00,0x54,0x00,0x00,0x00,0x57,0x00,0x00,0x00, -0x1d,0x00,0x03,0x00,0x59,0x00,0x00,0x00,0x58,0x00,0x00,0x00, -0x1e,0x00,0x03,0x00,0x5a,0x00,0x00,0x00,0x59,0x00,0x00,0x00, -0x20,0x00,0x04,0x00,0x5b,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0x5a,0x00,0x00,0x00,0x3b,0x00,0x04,0x00,0x5b,0x00,0x00,0x00, -0x5c,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0x5e,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x52,0x00,0x00,0x00, -0x20,0x00,0x04,0x00,0x64,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0x53,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x10,0x00,0x00,0x00, -0x68,0x00,0x00,0x00,0x10,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x10,0x00,0x00,0x00,0x75,0x00,0x00,0x00,0x04,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x7b,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x84,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x55,0x00,0x00,0x00,0x17,0x00,0x04,0x00, -0x88,0x00,0x00,0x00,0x1c,0x00,0x00,0x00,0x02,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x8c,0x00,0x00,0x00, -0x0f,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x1c,0x00,0x00,0x00, -0x9d,0x00,0x00,0x00,0x00,0x00,0x80,0x41,0x1d,0x00,0x03,0x00, -0xa2,0x00,0x00,0x00,0x1c,0x00,0x00,0x00,0x1e,0x00,0x03,0x00, -0xa3,0x00,0x00,0x00,0xa2,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0xa4,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0xa3,0x00,0x00,0x00, -0x3b,0x00,0x04,0x00,0xa4,0x00,0x00,0x00,0xa5,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0xac,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x1c,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0xb5,0x00,0x00,0x00,0x00,0x02,0x00,0x00, -0x2c,0x00,0x06,0x00,0x09,0x00,0x00,0x00,0xb6,0x00,0x00,0x00, -0xb5,0x00,0x00,0x00,0x16,0x00,0x00,0x00,0x16,0x00,0x00,0x00, -0x2c,0x00,0x05,0x00,0x88,0x00,0x00,0x00,0xc1,0x00,0x00,0x00, -0x9d,0x00,0x00,0x00,0x9d,0x00,0x00,0x00,0x36,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x54,0x00,0x00,0x00,0x10,0x00,0x00,0x00, +0x1c,0x00,0x04,0x00,0x55,0x00,0x00,0x00,0x53,0x00,0x00,0x00, +0x54,0x00,0x00,0x00,0x1e,0x00,0x05,0x00,0x56,0x00,0x00,0x00, +0x52,0x00,0x00,0x00,0x52,0x00,0x00,0x00,0x55,0x00,0x00,0x00, +0x1d,0x00,0x03,0x00,0x57,0x00,0x00,0x00,0x56,0x00,0x00,0x00, +0x1e,0x00,0x03,0x00,0x58,0x00,0x00,0x00,0x57,0x00,0x00,0x00, +0x20,0x00,0x04,0x00,0x59,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, +0x58,0x00,0x00,0x00,0x3b,0x00,0x04,0x00,0x59,0x00,0x00,0x00, +0x5a,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x20,0x00,0x04,0x00, +0x5c,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x52,0x00,0x00,0x00, +0x20,0x00,0x04,0x00,0x68,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, +0x53,0x00,0x00,0x00,0x17,0x00,0x04,0x00,0x6c,0x00,0x00,0x00, +0x1c,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x70,0x00,0x00,0x00,0x0f,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x10,0x00,0x00,0x00,0x74,0x00,0x00,0x00, +0x04,0x00,0x00,0x00,0x1d,0x00,0x03,0x00,0x7e,0x00,0x00,0x00, +0x1c,0x00,0x00,0x00,0x1e,0x00,0x03,0x00,0x7f,0x00,0x00,0x00, +0x7e,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x80,0x00,0x00,0x00, +0x0c,0x00,0x00,0x00,0x7f,0x00,0x00,0x00,0x3b,0x00,0x04,0x00, +0x80,0x00,0x00,0x00,0x81,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, +0x20,0x00,0x04,0x00,0x88,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, +0x1c,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x91,0x00,0x00,0x00,0x00,0x02,0x00,0x00,0x2c,0x00,0x06,0x00, +0x09,0x00,0x00,0x00,0x92,0x00,0x00,0x00,0x91,0x00,0x00,0x00, +0x16,0x00,0x00,0x00,0x16,0x00,0x00,0x00,0x36,0x00,0x05,0x00, 0x02,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x00,0x00,0x00,0x00, 0x03,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0x05,0x00,0x00,0x00, -0xf7,0x00,0x03,0x00,0xb7,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0xfb,0x00,0x03,0x00,0x0c,0x00,0x00,0x00,0xb8,0x00,0x00,0x00, -0xf8,0x00,0x02,0x00,0xb8,0x00,0x00,0x00,0x41,0x00,0x05,0x00, +0xf7,0x00,0x03,0x00,0x93,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0xfb,0x00,0x03,0x00,0x0c,0x00,0x00,0x00,0x94,0x00,0x00,0x00, +0xf8,0x00,0x02,0x00,0x94,0x00,0x00,0x00,0x41,0x00,0x05,0x00, 0x0d,0x00,0x00,0x00,0x0e,0x00,0x00,0x00,0x0b,0x00,0x00,0x00, 0x0c,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, 0x0f,0x00,0x00,0x00,0x0e,0x00,0x00,0x00,0x7c,0x00,0x04,0x00, @@ -17737,7 +9239,7 @@ unsigned char get_rows_q5_0_f32_fp32_data[] = { 0x23,0x00,0x00,0x00,0xf7,0x00,0x03,0x00,0x27,0x00,0x00,0x00, 0x00,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0x25,0x00,0x00,0x00, 0x26,0x00,0x00,0x00,0x27,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, -0x26,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0xb7,0x00,0x00,0x00, +0x26,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0x93,0x00,0x00,0x00, 0xf8,0x00,0x02,0x00,0x27,0x00,0x00,0x00,0x41,0x00,0x06,0x00, 0x30,0x00,0x00,0x00,0x31,0x00,0x00,0x00,0x2d,0x00,0x00,0x00, 0x2e,0x00,0x00,0x00,0x1a,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, @@ -17758,80 +9260,54 @@ unsigned char get_rows_q5_0_f32_fp32_data[] = { 0x89,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x4e,0x00,0x00,0x00, 0x41,0x00,0x00,0x00,0x44,0x00,0x00,0x00,0x82,0x00,0x05,0x00, 0x06,0x00,0x00,0x00,0x4f,0x00,0x00,0x00,0x41,0x00,0x00,0x00, -0x4e,0x00,0x00,0x00,0x41,0x00,0x07,0x00,0x5e,0x00,0x00,0x00, -0x5f,0x00,0x00,0x00,0x5c,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, +0x4e,0x00,0x00,0x00,0x41,0x00,0x07,0x00,0x5c,0x00,0x00,0x00, +0x5d,0x00,0x00,0x00,0x5a,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, 0x45,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x52,0x00,0x00,0x00,0x60,0x00,0x00,0x00,0x5f,0x00,0x00,0x00, -0x73,0x00,0x04,0x00,0x1c,0x00,0x00,0x00,0x61,0x00,0x00,0x00, -0x60,0x00,0x00,0x00,0x41,0x00,0x08,0x00,0x64,0x00,0x00,0x00, -0x65,0x00,0x00,0x00,0x5c,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0x45,0x00,0x00,0x00,0x20,0x00,0x00,0x00,0x20,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x53,0x00,0x00,0x00,0x66,0x00,0x00,0x00, -0x65,0x00,0x00,0x00,0x71,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x67,0x00,0x00,0x00,0x66,0x00,0x00,0x00,0xc4,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x69,0x00,0x00,0x00,0x67,0x00,0x00,0x00, -0x68,0x00,0x00,0x00,0x41,0x00,0x08,0x00,0x64,0x00,0x00,0x00, -0x6b,0x00,0x00,0x00,0x5c,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0x45,0x00,0x00,0x00,0x20,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x53,0x00,0x00,0x00,0x6c,0x00,0x00,0x00, -0x6b,0x00,0x00,0x00,0x71,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x6d,0x00,0x00,0x00,0x6c,0x00,0x00,0x00,0xc5,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x6e,0x00,0x00,0x00,0x69,0x00,0x00,0x00, -0x6d,0x00,0x00,0x00,0xc2,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x74,0x00,0x00,0x00,0x6e,0x00,0x00,0x00,0x4a,0x00,0x00,0x00, -0xc4,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x76,0x00,0x00,0x00, -0x74,0x00,0x00,0x00,0x75,0x00,0x00,0x00,0xc7,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x77,0x00,0x00,0x00,0x76,0x00,0x00,0x00, -0x56,0x00,0x00,0x00,0x7c,0x00,0x04,0x00,0x10,0x00,0x00,0x00, -0x78,0x00,0x00,0x00,0x77,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x7c,0x00,0x00,0x00,0x4a,0x00,0x00,0x00, -0x7b,0x00,0x00,0x00,0xc2,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x7d,0x00,0x00,0x00,0x6e,0x00,0x00,0x00,0x7c,0x00,0x00,0x00, -0xc7,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x7e,0x00,0x00,0x00, -0x7d,0x00,0x00,0x00,0x56,0x00,0x00,0x00,0x7c,0x00,0x04,0x00, -0x10,0x00,0x00,0x00,0x7f,0x00,0x00,0x00,0x7e,0x00,0x00,0x00, -0x41,0x00,0x08,0x00,0x84,0x00,0x00,0x00,0x85,0x00,0x00,0x00, -0x5c,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0x45,0x00,0x00,0x00, -0x12,0x00,0x00,0x00,0x4a,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x55,0x00,0x00,0x00,0x86,0x00,0x00,0x00,0x85,0x00,0x00,0x00, -0x71,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x87,0x00,0x00,0x00, -0x86,0x00,0x00,0x00,0xc7,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x8d,0x00,0x00,0x00,0x87,0x00,0x00,0x00,0x8c,0x00,0x00,0x00, -0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x91,0x00,0x00,0x00, -0x78,0x00,0x00,0x00,0xc5,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x92,0x00,0x00,0x00,0x8d,0x00,0x00,0x00,0x91,0x00,0x00,0x00, -0x70,0x00,0x04,0x00,0x1c,0x00,0x00,0x00,0x93,0x00,0x00,0x00, -0x92,0x00,0x00,0x00,0xc2,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x95,0x00,0x00,0x00,0x87,0x00,0x00,0x00,0x75,0x00,0x00,0x00, -0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x98,0x00,0x00,0x00, -0x7f,0x00,0x00,0x00,0xc5,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x99,0x00,0x00,0x00,0x95,0x00,0x00,0x00,0x98,0x00,0x00,0x00, -0x70,0x00,0x04,0x00,0x1c,0x00,0x00,0x00,0x9a,0x00,0x00,0x00, -0x99,0x00,0x00,0x00,0x50,0x00,0x05,0x00,0x88,0x00,0x00,0x00, -0x9b,0x00,0x00,0x00,0x93,0x00,0x00,0x00,0x9a,0x00,0x00,0x00, -0x83,0x00,0x05,0x00,0x88,0x00,0x00,0x00,0x9f,0x00,0x00,0x00, -0x9b,0x00,0x00,0x00,0xc1,0x00,0x00,0x00,0x8e,0x00,0x05,0x00, -0x88,0x00,0x00,0x00,0xa1,0x00,0x00,0x00,0x9f,0x00,0x00,0x00, -0x61,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xa8,0x00,0x00,0x00,0x4f,0x00,0x00,0x00,0x4a,0x00,0x00,0x00, -0x51,0x00,0x05,0x00,0x1c,0x00,0x00,0x00,0xab,0x00,0x00,0x00, -0xa1,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x41,0x00,0x06,0x00, -0xac,0x00,0x00,0x00,0xad,0x00,0x00,0x00,0xa5,0x00,0x00,0x00, -0x2e,0x00,0x00,0x00,0xa8,0x00,0x00,0x00,0x3e,0x00,0x03,0x00, -0xad,0x00,0x00,0x00,0xab,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xb1,0x00,0x00,0x00,0xa8,0x00,0x00,0x00, -0x56,0x00,0x00,0x00,0x51,0x00,0x05,0x00,0x1c,0x00,0x00,0x00, -0xb3,0x00,0x00,0x00,0xa1,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x41,0x00,0x06,0x00,0xac,0x00,0x00,0x00,0xb4,0x00,0x00,0x00, -0xa5,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0xb1,0x00,0x00,0x00, -0x3e,0x00,0x03,0x00,0xb4,0x00,0x00,0x00,0xb3,0x00,0x00,0x00, -0xf9,0x00,0x02,0x00,0xb7,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, -0xb7,0x00,0x00,0x00,0xfd,0x00,0x01,0x00,0x38,0x00,0x01,0x00, +0x52,0x00,0x00,0x00,0x5e,0x00,0x00,0x00,0x5d,0x00,0x00,0x00, +0x73,0x00,0x04,0x00,0x1c,0x00,0x00,0x00,0x5f,0x00,0x00,0x00, +0x5e,0x00,0x00,0x00,0x41,0x00,0x07,0x00,0x5c,0x00,0x00,0x00, +0x62,0x00,0x00,0x00,0x5a,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, +0x45,0x00,0x00,0x00,0x20,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0x52,0x00,0x00,0x00,0x63,0x00,0x00,0x00,0x62,0x00,0x00,0x00, +0x73,0x00,0x04,0x00,0x1c,0x00,0x00,0x00,0x64,0x00,0x00,0x00, +0x63,0x00,0x00,0x00,0x41,0x00,0x08,0x00,0x68,0x00,0x00,0x00, +0x69,0x00,0x00,0x00,0x5a,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, +0x45,0x00,0x00,0x00,0x12,0x00,0x00,0x00,0x4a,0x00,0x00,0x00, +0x3d,0x00,0x04,0x00,0x53,0x00,0x00,0x00,0x6a,0x00,0x00,0x00, +0x69,0x00,0x00,0x00,0x71,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x6b,0x00,0x00,0x00,0x6a,0x00,0x00,0x00,0xc7,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x71,0x00,0x00,0x00,0x6b,0x00,0x00,0x00, +0x70,0x00,0x00,0x00,0x70,0x00,0x04,0x00,0x1c,0x00,0x00,0x00, +0x72,0x00,0x00,0x00,0x71,0x00,0x00,0x00,0xc2,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x75,0x00,0x00,0x00,0x6b,0x00,0x00,0x00, +0x74,0x00,0x00,0x00,0x70,0x00,0x04,0x00,0x1c,0x00,0x00,0x00, +0x76,0x00,0x00,0x00,0x75,0x00,0x00,0x00,0x50,0x00,0x05,0x00, +0x6c,0x00,0x00,0x00,0x77,0x00,0x00,0x00,0x72,0x00,0x00,0x00, +0x76,0x00,0x00,0x00,0x8e,0x00,0x05,0x00,0x6c,0x00,0x00,0x00, +0x7a,0x00,0x00,0x00,0x77,0x00,0x00,0x00,0x5f,0x00,0x00,0x00, +0x50,0x00,0x05,0x00,0x6c,0x00,0x00,0x00,0x7c,0x00,0x00,0x00, +0x64,0x00,0x00,0x00,0x64,0x00,0x00,0x00,0x81,0x00,0x05,0x00, +0x6c,0x00,0x00,0x00,0x7d,0x00,0x00,0x00,0x7a,0x00,0x00,0x00, +0x7c,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x84,0x00,0x00,0x00,0x4f,0x00,0x00,0x00,0x4a,0x00,0x00,0x00, +0x51,0x00,0x05,0x00,0x1c,0x00,0x00,0x00,0x87,0x00,0x00,0x00, +0x7d,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x41,0x00,0x06,0x00, +0x88,0x00,0x00,0x00,0x89,0x00,0x00,0x00,0x81,0x00,0x00,0x00, +0x2e,0x00,0x00,0x00,0x84,0x00,0x00,0x00,0x3e,0x00,0x03,0x00, +0x89,0x00,0x00,0x00,0x87,0x00,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x8d,0x00,0x00,0x00,0x84,0x00,0x00,0x00, +0x54,0x00,0x00,0x00,0x51,0x00,0x05,0x00,0x1c,0x00,0x00,0x00, +0x8f,0x00,0x00,0x00,0x7d,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0x41,0x00,0x06,0x00,0x88,0x00,0x00,0x00,0x90,0x00,0x00,0x00, +0x81,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0x8d,0x00,0x00,0x00, +0x3e,0x00,0x03,0x00,0x90,0x00,0x00,0x00,0x8f,0x00,0x00,0x00, +0xf9,0x00,0x02,0x00,0x93,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, +0x93,0x00,0x00,0x00,0xfd,0x00,0x01,0x00,0x38,0x00,0x01,0x00, }; -const uint64_t get_rows_q5_0_f32_fp32_len = 2868; +const uint64_t get_rows_q4_1_f32_len = 2424; -unsigned char get_rows_q5_0_fp32_data[] = { +unsigned char get_rows_q5_0_data[] = { 0x03,0x02,0x23,0x07,0x00,0x05,0x01,0x00,0x0b,0x00,0x0d,0x00, 0xc3,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x11,0x00,0x02,0x00, 0x01,0x00,0x00,0x00,0x11,0x00,0x02,0x00,0x51,0x11,0x00,0x00, @@ -18074,485 +9550,11 @@ unsigned char get_rows_q5_0_fp32_data[] = { 0xf8,0x00,0x02,0x00,0xb8,0x00,0x00,0x00,0xfd,0x00,0x01,0x00, 0x38,0x00,0x01,0x00, }; -const uint64_t get_rows_q5_0_fp32_len = 2884; +const uint64_t get_rows_q5_0_len = 2884; -unsigned char get_rows_q5_1_data[] = { +unsigned char get_rows_q5_0_f32_data[] = { 0x03,0x02,0x23,0x07,0x00,0x05,0x01,0x00,0x0b,0x00,0x0d,0x00, -0xb4,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x11,0x00,0x02,0x00, -0x01,0x00,0x00,0x00,0x11,0x00,0x02,0x00,0x09,0x00,0x00,0x00, -0x11,0x00,0x02,0x00,0x27,0x00,0x00,0x00,0x11,0x00,0x02,0x00, -0x51,0x11,0x00,0x00,0x11,0x00,0x02,0x00,0x60,0x11,0x00,0x00, -0x0b,0x00,0x06,0x00,0x01,0x00,0x00,0x00,0x47,0x4c,0x53,0x4c, -0x2e,0x73,0x74,0x64,0x2e,0x34,0x35,0x30,0x00,0x00,0x00,0x00, -0x0e,0x00,0x03,0x00,0x00,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x0f,0x00,0x0a,0x00,0x05,0x00,0x00,0x00,0x04,0x00,0x00,0x00, -0x6d,0x61,0x69,0x6e,0x00,0x00,0x00,0x00,0x0b,0x00,0x00,0x00, -0x1f,0x00,0x00,0x00,0x2d,0x00,0x00,0x00,0x5a,0x00,0x00,0x00, -0xa1,0x00,0x00,0x00,0x10,0x00,0x06,0x00,0x04,0x00,0x00,0x00, -0x11,0x00,0x00,0x00,0x00,0x02,0x00,0x00,0x01,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x0b,0x00,0x00,0x00, -0x0b,0x00,0x00,0x00,0x1c,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0x1d,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x1d,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x04,0x00,0x00,0x00, -0x48,0x00,0x05,0x00,0x1d,0x00,0x00,0x00,0x02,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x08,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0x1d,0x00,0x00,0x00,0x03,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x47,0x00,0x03,0x00,0x1d,0x00,0x00,0x00, -0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x2a,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x48,0x00,0x04,0x00, -0x2b,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x18,0x00,0x00,0x00, -0x48,0x00,0x05,0x00,0x2b,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x03,0x00, -0x2b,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x2d,0x00,0x00,0x00,0x22,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x2d,0x00,0x00,0x00,0x21,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x55,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0x56,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x56,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x02,0x00,0x00,0x00, -0x48,0x00,0x05,0x00,0x56,0x00,0x00,0x00,0x02,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0x56,0x00,0x00,0x00,0x03,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x08,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x57,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x18,0x00,0x00,0x00,0x48,0x00,0x04,0x00, -0x58,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x18,0x00,0x00,0x00, -0x48,0x00,0x05,0x00,0x58,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x03,0x00, -0x58,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x5a,0x00,0x00,0x00,0x22,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x5a,0x00,0x00,0x00,0x21,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x9e,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x48,0x00,0x04,0x00, -0x9f,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x19,0x00,0x00,0x00, -0x48,0x00,0x05,0x00,0x9f,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x03,0x00, -0x9f,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0xa1,0x00,0x00,0x00,0x22,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0xa1,0x00,0x00,0x00,0x21,0x00,0x00,0x00, -0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0xb1,0x00,0x00,0x00, -0x0b,0x00,0x00,0x00,0x19,0x00,0x00,0x00,0x13,0x00,0x02,0x00, -0x02,0x00,0x00,0x00,0x21,0x00,0x03,0x00,0x03,0x00,0x00,0x00, -0x02,0x00,0x00,0x00,0x15,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x20,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x17,0x00,0x04,0x00, -0x09,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x03,0x00,0x00,0x00, -0x20,0x00,0x04,0x00,0x0a,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x09,0x00,0x00,0x00,0x3b,0x00,0x04,0x00,0x0a,0x00,0x00,0x00, -0x0b,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x20,0x00,0x04,0x00,0x0d,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x15,0x00,0x04,0x00,0x10,0x00,0x00,0x00, -0x20,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x10,0x00,0x00,0x00,0x12,0x00,0x00,0x00,0x02,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x16,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x16,0x00,0x03,0x00,0x1c,0x00,0x00,0x00, -0x20,0x00,0x00,0x00,0x1e,0x00,0x06,0x00,0x1d,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x1c,0x00,0x00,0x00, -0x1c,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x1e,0x00,0x00,0x00, -0x09,0x00,0x00,0x00,0x1d,0x00,0x00,0x00,0x3b,0x00,0x04,0x00, -0x1e,0x00,0x00,0x00,0x1f,0x00,0x00,0x00,0x09,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x10,0x00,0x00,0x00,0x20,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x21,0x00,0x00,0x00, -0x09,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x14,0x00,0x02,0x00, -0x24,0x00,0x00,0x00,0x1d,0x00,0x03,0x00,0x2a,0x00,0x00,0x00, -0x10,0x00,0x00,0x00,0x1e,0x00,0x03,0x00,0x2b,0x00,0x00,0x00, -0x2a,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x2c,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x2b,0x00,0x00,0x00,0x3b,0x00,0x04,0x00, -0x2c,0x00,0x00,0x00,0x2d,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x10,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x30,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x10,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x44,0x00,0x00,0x00,0x20,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x49,0x00,0x00,0x00, -0x02,0x00,0x00,0x00,0x16,0x00,0x03,0x00,0x50,0x00,0x00,0x00, -0x10,0x00,0x00,0x00,0x15,0x00,0x04,0x00,0x53,0x00,0x00,0x00, -0x08,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x54,0x00,0x00,0x00,0x10,0x00,0x00,0x00, -0x1c,0x00,0x04,0x00,0x55,0x00,0x00,0x00,0x53,0x00,0x00,0x00, -0x54,0x00,0x00,0x00,0x1e,0x00,0x06,0x00,0x56,0x00,0x00,0x00, -0x50,0x00,0x00,0x00,0x50,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x55,0x00,0x00,0x00,0x1d,0x00,0x03,0x00,0x57,0x00,0x00,0x00, -0x56,0x00,0x00,0x00,0x1e,0x00,0x03,0x00,0x58,0x00,0x00,0x00, -0x57,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x59,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x58,0x00,0x00,0x00,0x3b,0x00,0x04,0x00, -0x59,0x00,0x00,0x00,0x5a,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0x20,0x00,0x04,0x00,0x5c,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0x50,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x67,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x10,0x00,0x00,0x00,0x6c,0x00,0x00,0x00,0x04,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x74,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x10,0x00,0x00,0x00, -0x7d,0x00,0x00,0x00,0x03,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0x7f,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x53,0x00,0x00,0x00, -0x17,0x00,0x04,0x00,0x82,0x00,0x00,0x00,0x50,0x00,0x00,0x00, -0x02,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x10,0x00,0x00,0x00, -0x88,0x00,0x00,0x00,0x0f,0x00,0x00,0x00,0x1d,0x00,0x03,0x00, -0x9e,0x00,0x00,0x00,0x50,0x00,0x00,0x00,0x1e,0x00,0x03,0x00, -0x9f,0x00,0x00,0x00,0x9e,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0xa0,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x9f,0x00,0x00,0x00, -0x3b,0x00,0x04,0x00,0xa0,0x00,0x00,0x00,0xa1,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0xb0,0x00,0x00,0x00,0x00,0x02,0x00,0x00,0x2c,0x00,0x06,0x00, -0x09,0x00,0x00,0x00,0xb1,0x00,0x00,0x00,0xb0,0x00,0x00,0x00, -0x16,0x00,0x00,0x00,0x16,0x00,0x00,0x00,0x36,0x00,0x05,0x00, -0x02,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x03,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0x05,0x00,0x00,0x00, -0xf7,0x00,0x03,0x00,0xb2,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0xfb,0x00,0x03,0x00,0x0c,0x00,0x00,0x00,0xb3,0x00,0x00,0x00, -0xf8,0x00,0x02,0x00,0xb3,0x00,0x00,0x00,0x41,0x00,0x05,0x00, -0x0d,0x00,0x00,0x00,0x0e,0x00,0x00,0x00,0x0b,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x0f,0x00,0x00,0x00,0x0e,0x00,0x00,0x00,0x7c,0x00,0x04,0x00, -0x10,0x00,0x00,0x00,0x11,0x00,0x00,0x00,0x0f,0x00,0x00,0x00, -0x84,0x00,0x05,0x00,0x10,0x00,0x00,0x00,0x13,0x00,0x00,0x00, -0x11,0x00,0x00,0x00,0x12,0x00,0x00,0x00,0x7c,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x14,0x00,0x00,0x00,0x13,0x00,0x00,0x00, -0x41,0x00,0x05,0x00,0x0d,0x00,0x00,0x00,0x17,0x00,0x00,0x00, -0x0b,0x00,0x00,0x00,0x16,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x18,0x00,0x00,0x00,0x17,0x00,0x00,0x00, -0x7c,0x00,0x04,0x00,0x10,0x00,0x00,0x00,0x19,0x00,0x00,0x00, -0x18,0x00,0x00,0x00,0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x1a,0x00,0x00,0x00,0x19,0x00,0x00,0x00,0x41,0x00,0x05,0x00, -0x21,0x00,0x00,0x00,0x22,0x00,0x00,0x00,0x1f,0x00,0x00,0x00, -0x20,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x22,0x00,0x00,0x00,0xae,0x00,0x05,0x00, -0x24,0x00,0x00,0x00,0x25,0x00,0x00,0x00,0x14,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0xf7,0x00,0x03,0x00,0x27,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0x25,0x00,0x00,0x00, -0x26,0x00,0x00,0x00,0x27,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, -0x26,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0xb2,0x00,0x00,0x00, -0xf8,0x00,0x02,0x00,0x27,0x00,0x00,0x00,0x41,0x00,0x06,0x00, -0x30,0x00,0x00,0x00,0x31,0x00,0x00,0x00,0x2d,0x00,0x00,0x00, -0x2e,0x00,0x00,0x00,0x1a,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x10,0x00,0x00,0x00,0x32,0x00,0x00,0x00,0x31,0x00,0x00,0x00, -0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x33,0x00,0x00,0x00, -0x32,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x38,0x00,0x00,0x00,0x33,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x3a,0x00,0x00,0x00, -0x38,0x00,0x00,0x00,0x14,0x00,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x3f,0x00,0x00,0x00,0x1a,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x41,0x00,0x00,0x00,0x3f,0x00,0x00,0x00,0x14,0x00,0x00,0x00, -0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x45,0x00,0x00,0x00, -0x3a,0x00,0x00,0x00,0x44,0x00,0x00,0x00,0x89,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x48,0x00,0x00,0x00,0x3a,0x00,0x00,0x00, -0x44,0x00,0x00,0x00,0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x4a,0x00,0x00,0x00,0x48,0x00,0x00,0x00,0x49,0x00,0x00,0x00, -0x89,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x4e,0x00,0x00,0x00, -0x41,0x00,0x00,0x00,0x44,0x00,0x00,0x00,0x82,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x4f,0x00,0x00,0x00,0x41,0x00,0x00,0x00, -0x4e,0x00,0x00,0x00,0x41,0x00,0x07,0x00,0x5c,0x00,0x00,0x00, -0x5d,0x00,0x00,0x00,0x5a,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0x45,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x50,0x00,0x00,0x00,0x5e,0x00,0x00,0x00,0x5d,0x00,0x00,0x00, -0x41,0x00,0x07,0x00,0x5c,0x00,0x00,0x00,0x61,0x00,0x00,0x00, -0x5a,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0x45,0x00,0x00,0x00, -0x20,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x50,0x00,0x00,0x00, -0x62,0x00,0x00,0x00,0x61,0x00,0x00,0x00,0x41,0x00,0x07,0x00, -0x67,0x00,0x00,0x00,0x68,0x00,0x00,0x00,0x5a,0x00,0x00,0x00, -0x2e,0x00,0x00,0x00,0x45,0x00,0x00,0x00,0x12,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x69,0x00,0x00,0x00, -0x68,0x00,0x00,0x00,0xc2,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x6b,0x00,0x00,0x00,0x69,0x00,0x00,0x00,0x4a,0x00,0x00,0x00, -0xc4,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x6d,0x00,0x00,0x00, -0x6b,0x00,0x00,0x00,0x6c,0x00,0x00,0x00,0xc7,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x6e,0x00,0x00,0x00,0x6d,0x00,0x00,0x00, -0x54,0x00,0x00,0x00,0x7c,0x00,0x04,0x00,0x10,0x00,0x00,0x00, -0x6f,0x00,0x00,0x00,0x6e,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x72,0x00,0x00,0x00,0x68,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x75,0x00,0x00,0x00, -0x4a,0x00,0x00,0x00,0x74,0x00,0x00,0x00,0xc2,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x76,0x00,0x00,0x00,0x72,0x00,0x00,0x00, -0x75,0x00,0x00,0x00,0xc7,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x77,0x00,0x00,0x00,0x76,0x00,0x00,0x00,0x54,0x00,0x00,0x00, -0x7c,0x00,0x04,0x00,0x10,0x00,0x00,0x00,0x78,0x00,0x00,0x00, -0x77,0x00,0x00,0x00,0x41,0x00,0x08,0x00,0x7f,0x00,0x00,0x00, -0x80,0x00,0x00,0x00,0x5a,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0x45,0x00,0x00,0x00,0x7d,0x00,0x00,0x00,0x4a,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x53,0x00,0x00,0x00,0x81,0x00,0x00,0x00, -0x80,0x00,0x00,0x00,0x71,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x86,0x00,0x00,0x00,0x81,0x00,0x00,0x00,0x7c,0x00,0x04,0x00, -0x10,0x00,0x00,0x00,0x87,0x00,0x00,0x00,0x86,0x00,0x00,0x00, -0xc7,0x00,0x05,0x00,0x10,0x00,0x00,0x00,0x89,0x00,0x00,0x00, -0x87,0x00,0x00,0x00,0x88,0x00,0x00,0x00,0xc5,0x00,0x05,0x00, -0x10,0x00,0x00,0x00,0x8d,0x00,0x00,0x00,0x89,0x00,0x00,0x00, -0x6f,0x00,0x00,0x00,0x6f,0x00,0x04,0x00,0x50,0x00,0x00,0x00, -0x8e,0x00,0x00,0x00,0x8d,0x00,0x00,0x00,0xc2,0x00,0x05,0x00, -0x53,0x00,0x00,0x00,0x90,0x00,0x00,0x00,0x81,0x00,0x00,0x00, -0x6c,0x00,0x00,0x00,0x71,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x91,0x00,0x00,0x00,0x90,0x00,0x00,0x00,0x7c,0x00,0x04,0x00, -0x10,0x00,0x00,0x00,0x92,0x00,0x00,0x00,0x91,0x00,0x00,0x00, -0xc5,0x00,0x05,0x00,0x10,0x00,0x00,0x00,0x95,0x00,0x00,0x00, -0x92,0x00,0x00,0x00,0x78,0x00,0x00,0x00,0x6f,0x00,0x04,0x00, -0x50,0x00,0x00,0x00,0x96,0x00,0x00,0x00,0x95,0x00,0x00,0x00, -0x50,0x00,0x05,0x00,0x82,0x00,0x00,0x00,0x97,0x00,0x00,0x00, -0x8e,0x00,0x00,0x00,0x96,0x00,0x00,0x00,0x8e,0x00,0x05,0x00, -0x82,0x00,0x00,0x00,0x9a,0x00,0x00,0x00,0x97,0x00,0x00,0x00, -0x5e,0x00,0x00,0x00,0x50,0x00,0x05,0x00,0x82,0x00,0x00,0x00, -0x9c,0x00,0x00,0x00,0x62,0x00,0x00,0x00,0x62,0x00,0x00,0x00, -0x81,0x00,0x05,0x00,0x82,0x00,0x00,0x00,0x9d,0x00,0x00,0x00, -0x9a,0x00,0x00,0x00,0x9c,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xa4,0x00,0x00,0x00,0x4f,0x00,0x00,0x00, -0x4a,0x00,0x00,0x00,0x51,0x00,0x05,0x00,0x50,0x00,0x00,0x00, -0xa7,0x00,0x00,0x00,0x9d,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x41,0x00,0x06,0x00,0x5c,0x00,0x00,0x00,0xa8,0x00,0x00,0x00, -0xa1,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0xa4,0x00,0x00,0x00, -0x3e,0x00,0x03,0x00,0xa8,0x00,0x00,0x00,0xa7,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xac,0x00,0x00,0x00, -0xa4,0x00,0x00,0x00,0x54,0x00,0x00,0x00,0x51,0x00,0x05,0x00, -0x50,0x00,0x00,0x00,0xae,0x00,0x00,0x00,0x9d,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x41,0x00,0x06,0x00,0x5c,0x00,0x00,0x00, -0xaf,0x00,0x00,0x00,0xa1,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0xac,0x00,0x00,0x00,0x3e,0x00,0x03,0x00,0xaf,0x00,0x00,0x00, -0xae,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0xb2,0x00,0x00,0x00, -0xf8,0x00,0x02,0x00,0xb2,0x00,0x00,0x00,0xfd,0x00,0x01,0x00, -0x38,0x00,0x01,0x00, -}; -const uint64_t get_rows_q5_1_len = 2764; - -unsigned char get_rows_q5_1_f32_data[] = { -0x03,0x02,0x23,0x07,0x00,0x05,0x01,0x00,0x0b,0x00,0x0d,0x00, -0xb7,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x11,0x00,0x02,0x00, -0x01,0x00,0x00,0x00,0x11,0x00,0x02,0x00,0x09,0x00,0x00,0x00, -0x11,0x00,0x02,0x00,0x27,0x00,0x00,0x00,0x11,0x00,0x02,0x00, -0x51,0x11,0x00,0x00,0x11,0x00,0x02,0x00,0x60,0x11,0x00,0x00, -0x0b,0x00,0x06,0x00,0x01,0x00,0x00,0x00,0x47,0x4c,0x53,0x4c, -0x2e,0x73,0x74,0x64,0x2e,0x34,0x35,0x30,0x00,0x00,0x00,0x00, -0x0e,0x00,0x03,0x00,0x00,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x0f,0x00,0x0a,0x00,0x05,0x00,0x00,0x00,0x04,0x00,0x00,0x00, -0x6d,0x61,0x69,0x6e,0x00,0x00,0x00,0x00,0x0b,0x00,0x00,0x00, -0x1f,0x00,0x00,0x00,0x2d,0x00,0x00,0x00,0x5a,0x00,0x00,0x00, -0xa1,0x00,0x00,0x00,0x10,0x00,0x06,0x00,0x04,0x00,0x00,0x00, -0x11,0x00,0x00,0x00,0x00,0x02,0x00,0x00,0x01,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x0b,0x00,0x00,0x00, -0x0b,0x00,0x00,0x00,0x1c,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0x1d,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x1d,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x04,0x00,0x00,0x00, -0x48,0x00,0x05,0x00,0x1d,0x00,0x00,0x00,0x02,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x08,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0x1d,0x00,0x00,0x00,0x03,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x47,0x00,0x03,0x00,0x1d,0x00,0x00,0x00, -0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x2a,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x48,0x00,0x04,0x00, -0x2b,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x18,0x00,0x00,0x00, -0x48,0x00,0x05,0x00,0x2b,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x03,0x00, -0x2b,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x2d,0x00,0x00,0x00,0x22,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x2d,0x00,0x00,0x00,0x21,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x55,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0x56,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x56,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x02,0x00,0x00,0x00, -0x48,0x00,0x05,0x00,0x56,0x00,0x00,0x00,0x02,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0x56,0x00,0x00,0x00,0x03,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x08,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x57,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x18,0x00,0x00,0x00,0x48,0x00,0x04,0x00, -0x58,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x18,0x00,0x00,0x00, -0x48,0x00,0x05,0x00,0x58,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x03,0x00, -0x58,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x5a,0x00,0x00,0x00,0x22,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x5a,0x00,0x00,0x00,0x21,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x9e,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x48,0x00,0x04,0x00, -0x9f,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x19,0x00,0x00,0x00, -0x48,0x00,0x05,0x00,0x9f,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x03,0x00, -0x9f,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0xa1,0x00,0x00,0x00,0x22,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0xa1,0x00,0x00,0x00,0x21,0x00,0x00,0x00, -0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0xb4,0x00,0x00,0x00, -0x0b,0x00,0x00,0x00,0x19,0x00,0x00,0x00,0x13,0x00,0x02,0x00, -0x02,0x00,0x00,0x00,0x21,0x00,0x03,0x00,0x03,0x00,0x00,0x00, -0x02,0x00,0x00,0x00,0x15,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x20,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x17,0x00,0x04,0x00, -0x09,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x03,0x00,0x00,0x00, -0x20,0x00,0x04,0x00,0x0a,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x09,0x00,0x00,0x00,0x3b,0x00,0x04,0x00,0x0a,0x00,0x00,0x00, -0x0b,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x20,0x00,0x04,0x00,0x0d,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x15,0x00,0x04,0x00,0x10,0x00,0x00,0x00, -0x20,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x10,0x00,0x00,0x00,0x12,0x00,0x00,0x00,0x02,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x16,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x16,0x00,0x03,0x00,0x1c,0x00,0x00,0x00, -0x20,0x00,0x00,0x00,0x1e,0x00,0x06,0x00,0x1d,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x1c,0x00,0x00,0x00, -0x1c,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x1e,0x00,0x00,0x00, -0x09,0x00,0x00,0x00,0x1d,0x00,0x00,0x00,0x3b,0x00,0x04,0x00, -0x1e,0x00,0x00,0x00,0x1f,0x00,0x00,0x00,0x09,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x10,0x00,0x00,0x00,0x20,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x21,0x00,0x00,0x00, -0x09,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x14,0x00,0x02,0x00, -0x24,0x00,0x00,0x00,0x1d,0x00,0x03,0x00,0x2a,0x00,0x00,0x00, -0x10,0x00,0x00,0x00,0x1e,0x00,0x03,0x00,0x2b,0x00,0x00,0x00, -0x2a,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x2c,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x2b,0x00,0x00,0x00,0x3b,0x00,0x04,0x00, -0x2c,0x00,0x00,0x00,0x2d,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x10,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x30,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x10,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x44,0x00,0x00,0x00,0x20,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x49,0x00,0x00,0x00, -0x02,0x00,0x00,0x00,0x16,0x00,0x03,0x00,0x50,0x00,0x00,0x00, -0x10,0x00,0x00,0x00,0x15,0x00,0x04,0x00,0x53,0x00,0x00,0x00, -0x08,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x54,0x00,0x00,0x00,0x10,0x00,0x00,0x00, -0x1c,0x00,0x04,0x00,0x55,0x00,0x00,0x00,0x53,0x00,0x00,0x00, -0x54,0x00,0x00,0x00,0x1e,0x00,0x06,0x00,0x56,0x00,0x00,0x00, -0x50,0x00,0x00,0x00,0x50,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x55,0x00,0x00,0x00,0x1d,0x00,0x03,0x00,0x57,0x00,0x00,0x00, -0x56,0x00,0x00,0x00,0x1e,0x00,0x03,0x00,0x58,0x00,0x00,0x00, -0x57,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x59,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x58,0x00,0x00,0x00,0x3b,0x00,0x04,0x00, -0x59,0x00,0x00,0x00,0x5a,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0x20,0x00,0x04,0x00,0x5c,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0x50,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x67,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x10,0x00,0x00,0x00,0x6c,0x00,0x00,0x00,0x04,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x74,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x10,0x00,0x00,0x00, -0x7d,0x00,0x00,0x00,0x03,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0x7f,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x53,0x00,0x00,0x00, -0x17,0x00,0x04,0x00,0x82,0x00,0x00,0x00,0x50,0x00,0x00,0x00, -0x02,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x10,0x00,0x00,0x00, -0x88,0x00,0x00,0x00,0x0f,0x00,0x00,0x00,0x1d,0x00,0x03,0x00, -0x9e,0x00,0x00,0x00,0x1c,0x00,0x00,0x00,0x1e,0x00,0x03,0x00, -0x9f,0x00,0x00,0x00,0x9e,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0xa0,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x9f,0x00,0x00,0x00, -0x3b,0x00,0x04,0x00,0xa0,0x00,0x00,0x00,0xa1,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0xa9,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x1c,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0xb3,0x00,0x00,0x00,0x00,0x02,0x00,0x00, -0x2c,0x00,0x06,0x00,0x09,0x00,0x00,0x00,0xb4,0x00,0x00,0x00, -0xb3,0x00,0x00,0x00,0x16,0x00,0x00,0x00,0x16,0x00,0x00,0x00, -0x36,0x00,0x05,0x00,0x02,0x00,0x00,0x00,0x04,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x03,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, -0x05,0x00,0x00,0x00,0xf7,0x00,0x03,0x00,0xb5,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0xfb,0x00,0x03,0x00,0x0c,0x00,0x00,0x00, -0xb6,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0xb6,0x00,0x00,0x00, -0x41,0x00,0x05,0x00,0x0d,0x00,0x00,0x00,0x0e,0x00,0x00,0x00, -0x0b,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x0f,0x00,0x00,0x00,0x0e,0x00,0x00,0x00, -0x7c,0x00,0x04,0x00,0x10,0x00,0x00,0x00,0x11,0x00,0x00,0x00, -0x0f,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x10,0x00,0x00,0x00, -0x13,0x00,0x00,0x00,0x11,0x00,0x00,0x00,0x12,0x00,0x00,0x00, -0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x14,0x00,0x00,0x00, -0x13,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x0d,0x00,0x00,0x00, -0x17,0x00,0x00,0x00,0x0b,0x00,0x00,0x00,0x16,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x18,0x00,0x00,0x00, -0x17,0x00,0x00,0x00,0x7c,0x00,0x04,0x00,0x10,0x00,0x00,0x00, -0x19,0x00,0x00,0x00,0x18,0x00,0x00,0x00,0x7c,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x1a,0x00,0x00,0x00,0x19,0x00,0x00,0x00, -0x41,0x00,0x05,0x00,0x21,0x00,0x00,0x00,0x22,0x00,0x00,0x00, -0x1f,0x00,0x00,0x00,0x20,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x22,0x00,0x00,0x00, -0xae,0x00,0x05,0x00,0x24,0x00,0x00,0x00,0x25,0x00,0x00,0x00, -0x14,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0xf7,0x00,0x03,0x00, -0x27,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, -0x25,0x00,0x00,0x00,0x26,0x00,0x00,0x00,0x27,0x00,0x00,0x00, -0xf8,0x00,0x02,0x00,0x26,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, -0xb5,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0x27,0x00,0x00,0x00, -0x41,0x00,0x06,0x00,0x30,0x00,0x00,0x00,0x31,0x00,0x00,0x00, -0x2d,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0x1a,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x10,0x00,0x00,0x00,0x32,0x00,0x00,0x00, -0x31,0x00,0x00,0x00,0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x33,0x00,0x00,0x00,0x32,0x00,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x38,0x00,0x00,0x00,0x33,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x3a,0x00,0x00,0x00,0x38,0x00,0x00,0x00,0x14,0x00,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x3f,0x00,0x00,0x00, -0x1a,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x41,0x00,0x00,0x00,0x3f,0x00,0x00,0x00, -0x14,0x00,0x00,0x00,0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x45,0x00,0x00,0x00,0x3a,0x00,0x00,0x00,0x44,0x00,0x00,0x00, -0x89,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x48,0x00,0x00,0x00, -0x3a,0x00,0x00,0x00,0x44,0x00,0x00,0x00,0x86,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x4a,0x00,0x00,0x00,0x48,0x00,0x00,0x00, -0x49,0x00,0x00,0x00,0x89,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x4e,0x00,0x00,0x00,0x41,0x00,0x00,0x00,0x44,0x00,0x00,0x00, -0x82,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x4f,0x00,0x00,0x00, -0x41,0x00,0x00,0x00,0x4e,0x00,0x00,0x00,0x41,0x00,0x07,0x00, -0x5c,0x00,0x00,0x00,0x5d,0x00,0x00,0x00,0x5a,0x00,0x00,0x00, -0x2e,0x00,0x00,0x00,0x45,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x50,0x00,0x00,0x00,0x5e,0x00,0x00,0x00, -0x5d,0x00,0x00,0x00,0x41,0x00,0x07,0x00,0x5c,0x00,0x00,0x00, -0x61,0x00,0x00,0x00,0x5a,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0x45,0x00,0x00,0x00,0x20,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x50,0x00,0x00,0x00,0x62,0x00,0x00,0x00,0x61,0x00,0x00,0x00, -0x41,0x00,0x07,0x00,0x67,0x00,0x00,0x00,0x68,0x00,0x00,0x00, -0x5a,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0x45,0x00,0x00,0x00, -0x12,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x69,0x00,0x00,0x00,0x68,0x00,0x00,0x00,0xc2,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x6b,0x00,0x00,0x00,0x69,0x00,0x00,0x00, -0x4a,0x00,0x00,0x00,0xc4,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x6d,0x00,0x00,0x00,0x6b,0x00,0x00,0x00,0x6c,0x00,0x00,0x00, -0xc7,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x6e,0x00,0x00,0x00, -0x6d,0x00,0x00,0x00,0x54,0x00,0x00,0x00,0x7c,0x00,0x04,0x00, -0x10,0x00,0x00,0x00,0x6f,0x00,0x00,0x00,0x6e,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x72,0x00,0x00,0x00, -0x68,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x75,0x00,0x00,0x00,0x4a,0x00,0x00,0x00,0x74,0x00,0x00,0x00, -0xc2,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x76,0x00,0x00,0x00, -0x72,0x00,0x00,0x00,0x75,0x00,0x00,0x00,0xc7,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x77,0x00,0x00,0x00,0x76,0x00,0x00,0x00, -0x54,0x00,0x00,0x00,0x7c,0x00,0x04,0x00,0x10,0x00,0x00,0x00, -0x78,0x00,0x00,0x00,0x77,0x00,0x00,0x00,0x41,0x00,0x08,0x00, -0x7f,0x00,0x00,0x00,0x80,0x00,0x00,0x00,0x5a,0x00,0x00,0x00, -0x2e,0x00,0x00,0x00,0x45,0x00,0x00,0x00,0x7d,0x00,0x00,0x00, -0x4a,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x53,0x00,0x00,0x00, -0x81,0x00,0x00,0x00,0x80,0x00,0x00,0x00,0x71,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x86,0x00,0x00,0x00,0x81,0x00,0x00,0x00, -0x7c,0x00,0x04,0x00,0x10,0x00,0x00,0x00,0x87,0x00,0x00,0x00, -0x86,0x00,0x00,0x00,0xc7,0x00,0x05,0x00,0x10,0x00,0x00,0x00, -0x89,0x00,0x00,0x00,0x87,0x00,0x00,0x00,0x88,0x00,0x00,0x00, -0xc5,0x00,0x05,0x00,0x10,0x00,0x00,0x00,0x8d,0x00,0x00,0x00, -0x89,0x00,0x00,0x00,0x6f,0x00,0x00,0x00,0x6f,0x00,0x04,0x00, -0x50,0x00,0x00,0x00,0x8e,0x00,0x00,0x00,0x8d,0x00,0x00,0x00, -0xc2,0x00,0x05,0x00,0x53,0x00,0x00,0x00,0x90,0x00,0x00,0x00, -0x81,0x00,0x00,0x00,0x6c,0x00,0x00,0x00,0x71,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x91,0x00,0x00,0x00,0x90,0x00,0x00,0x00, -0x7c,0x00,0x04,0x00,0x10,0x00,0x00,0x00,0x92,0x00,0x00,0x00, -0x91,0x00,0x00,0x00,0xc5,0x00,0x05,0x00,0x10,0x00,0x00,0x00, -0x95,0x00,0x00,0x00,0x92,0x00,0x00,0x00,0x78,0x00,0x00,0x00, -0x6f,0x00,0x04,0x00,0x50,0x00,0x00,0x00,0x96,0x00,0x00,0x00, -0x95,0x00,0x00,0x00,0x50,0x00,0x05,0x00,0x82,0x00,0x00,0x00, -0x97,0x00,0x00,0x00,0x8e,0x00,0x00,0x00,0x96,0x00,0x00,0x00, -0x8e,0x00,0x05,0x00,0x82,0x00,0x00,0x00,0x9a,0x00,0x00,0x00, -0x97,0x00,0x00,0x00,0x5e,0x00,0x00,0x00,0x50,0x00,0x05,0x00, -0x82,0x00,0x00,0x00,0x9c,0x00,0x00,0x00,0x62,0x00,0x00,0x00, -0x62,0x00,0x00,0x00,0x81,0x00,0x05,0x00,0x82,0x00,0x00,0x00, -0x9d,0x00,0x00,0x00,0x9a,0x00,0x00,0x00,0x9c,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xa4,0x00,0x00,0x00, -0x4f,0x00,0x00,0x00,0x4a,0x00,0x00,0x00,0x51,0x00,0x05,0x00, -0x50,0x00,0x00,0x00,0xa7,0x00,0x00,0x00,0x9d,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x73,0x00,0x04,0x00,0x1c,0x00,0x00,0x00, -0xa8,0x00,0x00,0x00,0xa7,0x00,0x00,0x00,0x41,0x00,0x06,0x00, -0xa9,0x00,0x00,0x00,0xaa,0x00,0x00,0x00,0xa1,0x00,0x00,0x00, -0x2e,0x00,0x00,0x00,0xa4,0x00,0x00,0x00,0x3e,0x00,0x03,0x00, -0xaa,0x00,0x00,0x00,0xa8,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xae,0x00,0x00,0x00,0xa4,0x00,0x00,0x00, -0x54,0x00,0x00,0x00,0x51,0x00,0x05,0x00,0x50,0x00,0x00,0x00, -0xb0,0x00,0x00,0x00,0x9d,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x73,0x00,0x04,0x00,0x1c,0x00,0x00,0x00,0xb1,0x00,0x00,0x00, -0xb0,0x00,0x00,0x00,0x41,0x00,0x06,0x00,0xa9,0x00,0x00,0x00, -0xb2,0x00,0x00,0x00,0xa1,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0xae,0x00,0x00,0x00,0x3e,0x00,0x03,0x00,0xb2,0x00,0x00,0x00, -0xb1,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0xb5,0x00,0x00,0x00, -0xf8,0x00,0x02,0x00,0xb5,0x00,0x00,0x00,0xfd,0x00,0x01,0x00, -0x38,0x00,0x01,0x00, -}; -const uint64_t get_rows_q5_1_f32_len = 2812; - -unsigned char get_rows_q5_1_f32_fp32_data[] = { -0x03,0x02,0x23,0x07,0x00,0x05,0x01,0x00,0x0b,0x00,0x0d,0x00, -0xb5,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x11,0x00,0x02,0x00, +0xc2,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x11,0x00,0x02,0x00, 0x01,0x00,0x00,0x00,0x11,0x00,0x02,0x00,0x51,0x11,0x00,0x00, 0x11,0x00,0x02,0x00,0x60,0x11,0x00,0x00,0x0b,0x00,0x06,0x00, 0x01,0x00,0x00,0x00,0x47,0x4c,0x53,0x4c,0x2e,0x73,0x74,0x64, @@ -18560,7 +9562,7 @@ unsigned char get_rows_q5_1_f32_fp32_data[] = { 0x00,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x0f,0x00,0x0a,0x00, 0x05,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x6d,0x61,0x69,0x6e, 0x00,0x00,0x00,0x00,0x0b,0x00,0x00,0x00,0x1f,0x00,0x00,0x00, -0x2d,0x00,0x00,0x00,0x5a,0x00,0x00,0x00,0xa1,0x00,0x00,0x00, +0x2d,0x00,0x00,0x00,0x5c,0x00,0x00,0x00,0xa5,0x00,0x00,0x00, 0x10,0x00,0x06,0x00,0x04,0x00,0x00,0x00,0x11,0x00,0x00,0x00, 0x00,0x02,0x00,0x00,0x01,0x00,0x00,0x00,0x01,0x00,0x00,0x00, 0x47,0x00,0x04,0x00,0x0b,0x00,0x00,0x00,0x0b,0x00,0x00,0x00, @@ -18580,101 +9582,106 @@ unsigned char get_rows_q5_1_f32_fp32_data[] = { 0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x2d,0x00,0x00,0x00, 0x22,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00, 0x2d,0x00,0x00,0x00,0x21,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x55,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x56,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x48,0x00,0x05,0x00,0x56,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0x56,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x04,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x56,0x00,0x00,0x00, -0x03,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x08,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x57,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x18,0x00,0x00,0x00,0x48,0x00,0x04,0x00,0x58,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x18,0x00,0x00,0x00,0x48,0x00,0x05,0x00, +0x47,0x00,0x04,0x00,0x54,0x00,0x00,0x00,0x06,0x00,0x00,0x00, +0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x57,0x00,0x00,0x00, +0x06,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x48,0x00,0x05,0x00, 0x58,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x47,0x00,0x03,0x00,0x58,0x00,0x00,0x00, -0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x5a,0x00,0x00,0x00, -0x22,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x5a,0x00,0x00,0x00,0x21,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x9e,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x04,0x00,0x00,0x00,0x48,0x00,0x04,0x00,0x9f,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x19,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0x9f,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x47,0x00,0x03,0x00,0x9f,0x00,0x00,0x00, -0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0xa1,0x00,0x00,0x00, -0x22,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0xa1,0x00,0x00,0x00,0x21,0x00,0x00,0x00,0x02,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0xb2,0x00,0x00,0x00,0x0b,0x00,0x00,0x00, -0x19,0x00,0x00,0x00,0x13,0x00,0x02,0x00,0x02,0x00,0x00,0x00, -0x21,0x00,0x03,0x00,0x03,0x00,0x00,0x00,0x02,0x00,0x00,0x00, -0x15,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x20,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x17,0x00,0x04,0x00,0x09,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x03,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0x0a,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x09,0x00,0x00,0x00, -0x3b,0x00,0x04,0x00,0x0a,0x00,0x00,0x00,0x0b,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0x0d,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x15,0x00,0x04,0x00,0x10,0x00,0x00,0x00,0x20,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x10,0x00,0x00,0x00, -0x12,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x16,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x16,0x00,0x03,0x00,0x1c,0x00,0x00,0x00,0x20,0x00,0x00,0x00, -0x1e,0x00,0x06,0x00,0x1d,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x1c,0x00,0x00,0x00,0x1c,0x00,0x00,0x00, -0x20,0x00,0x04,0x00,0x1e,0x00,0x00,0x00,0x09,0x00,0x00,0x00, -0x1d,0x00,0x00,0x00,0x3b,0x00,0x04,0x00,0x1e,0x00,0x00,0x00, -0x1f,0x00,0x00,0x00,0x09,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x00,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x58,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x02,0x00,0x00,0x00, +0x48,0x00,0x05,0x00,0x58,0x00,0x00,0x00,0x02,0x00,0x00,0x00, +0x23,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x47,0x00,0x04,0x00, +0x59,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x16,0x00,0x00,0x00, +0x48,0x00,0x04,0x00,0x5a,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0x18,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x5a,0x00,0x00,0x00, +0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0x47,0x00,0x03,0x00,0x5a,0x00,0x00,0x00,0x02,0x00,0x00,0x00, +0x47,0x00,0x04,0x00,0x5c,0x00,0x00,0x00,0x22,0x00,0x00,0x00, +0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x5c,0x00,0x00,0x00, +0x21,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00, +0xa2,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x04,0x00,0x00,0x00, +0x48,0x00,0x04,0x00,0xa3,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0x19,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0xa3,0x00,0x00,0x00, +0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0x47,0x00,0x03,0x00,0xa3,0x00,0x00,0x00,0x02,0x00,0x00,0x00, +0x47,0x00,0x04,0x00,0xa5,0x00,0x00,0x00,0x22,0x00,0x00,0x00, +0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0xa5,0x00,0x00,0x00, +0x21,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00, +0xb6,0x00,0x00,0x00,0x0b,0x00,0x00,0x00,0x19,0x00,0x00,0x00, +0x13,0x00,0x02,0x00,0x02,0x00,0x00,0x00,0x21,0x00,0x03,0x00, +0x03,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x15,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x20,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0x17,0x00,0x04,0x00,0x09,0x00,0x00,0x00,0x06,0x00,0x00,0x00, +0x03,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x0a,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0x09,0x00,0x00,0x00,0x3b,0x00,0x04,0x00, +0x0a,0x00,0x00,0x00,0x0b,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, +0x00,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x0d,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x15,0x00,0x04,0x00, 0x10,0x00,0x00,0x00,0x20,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x20,0x00,0x04,0x00,0x21,0x00,0x00,0x00,0x09,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x14,0x00,0x02,0x00,0x24,0x00,0x00,0x00, -0x1d,0x00,0x03,0x00,0x2a,0x00,0x00,0x00,0x10,0x00,0x00,0x00, -0x1e,0x00,0x03,0x00,0x2b,0x00,0x00,0x00,0x2a,0x00,0x00,0x00, -0x20,0x00,0x04,0x00,0x2c,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0x2b,0x00,0x00,0x00,0x3b,0x00,0x04,0x00,0x2c,0x00,0x00,0x00, -0x2d,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x10,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x20,0x00,0x04,0x00,0x30,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0x10,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x44,0x00,0x00,0x00,0x20,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x49,0x00,0x00,0x00,0x02,0x00,0x00,0x00, -0x16,0x00,0x03,0x00,0x52,0x00,0x00,0x00,0x10,0x00,0x00,0x00, -0x15,0x00,0x04,0x00,0x53,0x00,0x00,0x00,0x08,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x54,0x00,0x00,0x00,0x10,0x00,0x00,0x00,0x1c,0x00,0x04,0x00, -0x55,0x00,0x00,0x00,0x53,0x00,0x00,0x00,0x54,0x00,0x00,0x00, -0x1e,0x00,0x06,0x00,0x56,0x00,0x00,0x00,0x52,0x00,0x00,0x00, -0x52,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x55,0x00,0x00,0x00, -0x1d,0x00,0x03,0x00,0x57,0x00,0x00,0x00,0x56,0x00,0x00,0x00, -0x1e,0x00,0x03,0x00,0x58,0x00,0x00,0x00,0x57,0x00,0x00,0x00, -0x20,0x00,0x04,0x00,0x59,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0x58,0x00,0x00,0x00,0x3b,0x00,0x04,0x00,0x59,0x00,0x00,0x00, -0x5a,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0x5c,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x52,0x00,0x00,0x00, -0x20,0x00,0x04,0x00,0x69,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x10,0x00,0x00,0x00, -0x6e,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x76,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x10,0x00,0x00,0x00,0x7e,0x00,0x00,0x00, -0x03,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x80,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x53,0x00,0x00,0x00,0x17,0x00,0x04,0x00, -0x84,0x00,0x00,0x00,0x1c,0x00,0x00,0x00,0x02,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x88,0x00,0x00,0x00, -0x0f,0x00,0x00,0x00,0x1d,0x00,0x03,0x00,0x9e,0x00,0x00,0x00, -0x1c,0x00,0x00,0x00,0x1e,0x00,0x03,0x00,0x9f,0x00,0x00,0x00, -0x9e,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0xa0,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x9f,0x00,0x00,0x00,0x3b,0x00,0x04,0x00, -0xa0,0x00,0x00,0x00,0xa1,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0x20,0x00,0x04,0x00,0xa8,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0x1c,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0xb1,0x00,0x00,0x00,0x00,0x02,0x00,0x00,0x2c,0x00,0x06,0x00, -0x09,0x00,0x00,0x00,0xb2,0x00,0x00,0x00,0xb1,0x00,0x00,0x00, -0x16,0x00,0x00,0x00,0x16,0x00,0x00,0x00,0x36,0x00,0x05,0x00, +0x2b,0x00,0x04,0x00,0x10,0x00,0x00,0x00,0x12,0x00,0x00,0x00, +0x02,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x16,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x16,0x00,0x03,0x00, +0x1c,0x00,0x00,0x00,0x20,0x00,0x00,0x00,0x1e,0x00,0x06,0x00, +0x1d,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00, +0x1c,0x00,0x00,0x00,0x1c,0x00,0x00,0x00,0x20,0x00,0x04,0x00, +0x1e,0x00,0x00,0x00,0x09,0x00,0x00,0x00,0x1d,0x00,0x00,0x00, +0x3b,0x00,0x04,0x00,0x1e,0x00,0x00,0x00,0x1f,0x00,0x00,0x00, +0x09,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x10,0x00,0x00,0x00, +0x20,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x20,0x00,0x04,0x00, +0x21,0x00,0x00,0x00,0x09,0x00,0x00,0x00,0x06,0x00,0x00,0x00, +0x14,0x00,0x02,0x00,0x24,0x00,0x00,0x00,0x1d,0x00,0x03,0x00, +0x2a,0x00,0x00,0x00,0x10,0x00,0x00,0x00,0x1e,0x00,0x03,0x00, +0x2b,0x00,0x00,0x00,0x2a,0x00,0x00,0x00,0x20,0x00,0x04,0x00, +0x2c,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x2b,0x00,0x00,0x00, +0x3b,0x00,0x04,0x00,0x2c,0x00,0x00,0x00,0x2d,0x00,0x00,0x00, +0x0c,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x10,0x00,0x00,0x00, +0x2e,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x20,0x00,0x04,0x00, +0x30,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x10,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x44,0x00,0x00,0x00, +0x20,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x49,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x16,0x00,0x03,0x00, +0x52,0x00,0x00,0x00,0x10,0x00,0x00,0x00,0x15,0x00,0x04,0x00, +0x53,0x00,0x00,0x00,0x10,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0x1c,0x00,0x04,0x00,0x54,0x00,0x00,0x00,0x53,0x00,0x00,0x00, +0x49,0x00,0x00,0x00,0x15,0x00,0x04,0x00,0x55,0x00,0x00,0x00, +0x08,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x56,0x00,0x00,0x00,0x10,0x00,0x00,0x00, +0x1c,0x00,0x04,0x00,0x57,0x00,0x00,0x00,0x55,0x00,0x00,0x00, +0x56,0x00,0x00,0x00,0x1e,0x00,0x05,0x00,0x58,0x00,0x00,0x00, +0x52,0x00,0x00,0x00,0x54,0x00,0x00,0x00,0x57,0x00,0x00,0x00, +0x1d,0x00,0x03,0x00,0x59,0x00,0x00,0x00,0x58,0x00,0x00,0x00, +0x1e,0x00,0x03,0x00,0x5a,0x00,0x00,0x00,0x59,0x00,0x00,0x00, +0x20,0x00,0x04,0x00,0x5b,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, +0x5a,0x00,0x00,0x00,0x3b,0x00,0x04,0x00,0x5b,0x00,0x00,0x00, +0x5c,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x20,0x00,0x04,0x00, +0x5e,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x52,0x00,0x00,0x00, +0x20,0x00,0x04,0x00,0x64,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, +0x53,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x10,0x00,0x00,0x00, +0x68,0x00,0x00,0x00,0x10,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x10,0x00,0x00,0x00,0x75,0x00,0x00,0x00,0x04,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x7b,0x00,0x00,0x00, +0x0c,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x84,0x00,0x00,0x00, +0x0c,0x00,0x00,0x00,0x55,0x00,0x00,0x00,0x17,0x00,0x04,0x00, +0x88,0x00,0x00,0x00,0x1c,0x00,0x00,0x00,0x02,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x8c,0x00,0x00,0x00, +0x0f,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x1c,0x00,0x00,0x00, +0x9d,0x00,0x00,0x00,0x00,0x00,0x80,0x41,0x1d,0x00,0x03,0x00, +0xa2,0x00,0x00,0x00,0x1c,0x00,0x00,0x00,0x1e,0x00,0x03,0x00, +0xa3,0x00,0x00,0x00,0xa2,0x00,0x00,0x00,0x20,0x00,0x04,0x00, +0xa4,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0xa3,0x00,0x00,0x00, +0x3b,0x00,0x04,0x00,0xa4,0x00,0x00,0x00,0xa5,0x00,0x00,0x00, +0x0c,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0xac,0x00,0x00,0x00, +0x0c,0x00,0x00,0x00,0x1c,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0xb5,0x00,0x00,0x00,0x00,0x02,0x00,0x00, +0x2c,0x00,0x06,0x00,0x09,0x00,0x00,0x00,0xb6,0x00,0x00,0x00, +0xb5,0x00,0x00,0x00,0x16,0x00,0x00,0x00,0x16,0x00,0x00,0x00, +0x2c,0x00,0x05,0x00,0x88,0x00,0x00,0x00,0xc1,0x00,0x00,0x00, +0x9d,0x00,0x00,0x00,0x9d,0x00,0x00,0x00,0x36,0x00,0x05,0x00, 0x02,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x00,0x00,0x00,0x00, 0x03,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0x05,0x00,0x00,0x00, -0xf7,0x00,0x03,0x00,0xb3,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0xfb,0x00,0x03,0x00,0x0c,0x00,0x00,0x00,0xb4,0x00,0x00,0x00, -0xf8,0x00,0x02,0x00,0xb4,0x00,0x00,0x00,0x41,0x00,0x05,0x00, +0xf7,0x00,0x03,0x00,0xb7,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0xfb,0x00,0x03,0x00,0x0c,0x00,0x00,0x00,0xb8,0x00,0x00,0x00, +0xf8,0x00,0x02,0x00,0xb8,0x00,0x00,0x00,0x41,0x00,0x05,0x00, 0x0d,0x00,0x00,0x00,0x0e,0x00,0x00,0x00,0x0b,0x00,0x00,0x00, 0x0c,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, 0x0f,0x00,0x00,0x00,0x0e,0x00,0x00,0x00,0x7c,0x00,0x04,0x00, @@ -18695,7 +9702,7 @@ unsigned char get_rows_q5_1_f32_fp32_data[] = { 0x23,0x00,0x00,0x00,0xf7,0x00,0x03,0x00,0x27,0x00,0x00,0x00, 0x00,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0x25,0x00,0x00,0x00, 0x26,0x00,0x00,0x00,0x27,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, -0x26,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0xb3,0x00,0x00,0x00, +0x26,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0xb7,0x00,0x00,0x00, 0xf8,0x00,0x02,0x00,0x27,0x00,0x00,0x00,0x41,0x00,0x06,0x00, 0x30,0x00,0x00,0x00,0x31,0x00,0x00,0x00,0x2d,0x00,0x00,0x00, 0x2e,0x00,0x00,0x00,0x1a,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, @@ -18716,77 +9723,80 @@ unsigned char get_rows_q5_1_f32_fp32_data[] = { 0x89,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x4e,0x00,0x00,0x00, 0x41,0x00,0x00,0x00,0x44,0x00,0x00,0x00,0x82,0x00,0x05,0x00, 0x06,0x00,0x00,0x00,0x4f,0x00,0x00,0x00,0x41,0x00,0x00,0x00, -0x4e,0x00,0x00,0x00,0x41,0x00,0x07,0x00,0x5c,0x00,0x00,0x00, -0x5d,0x00,0x00,0x00,0x5a,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, +0x4e,0x00,0x00,0x00,0x41,0x00,0x07,0x00,0x5e,0x00,0x00,0x00, +0x5f,0x00,0x00,0x00,0x5c,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, 0x45,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x52,0x00,0x00,0x00,0x5e,0x00,0x00,0x00,0x5d,0x00,0x00,0x00, -0x73,0x00,0x04,0x00,0x1c,0x00,0x00,0x00,0x5f,0x00,0x00,0x00, -0x5e,0x00,0x00,0x00,0x41,0x00,0x07,0x00,0x5c,0x00,0x00,0x00, -0x62,0x00,0x00,0x00,0x5a,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0x45,0x00,0x00,0x00,0x20,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x52,0x00,0x00,0x00,0x63,0x00,0x00,0x00,0x62,0x00,0x00,0x00, -0x73,0x00,0x04,0x00,0x1c,0x00,0x00,0x00,0x64,0x00,0x00,0x00, -0x63,0x00,0x00,0x00,0x41,0x00,0x07,0x00,0x69,0x00,0x00,0x00, -0x6a,0x00,0x00,0x00,0x5a,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0x45,0x00,0x00,0x00,0x12,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x6b,0x00,0x00,0x00,0x6a,0x00,0x00,0x00, -0xc2,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x6d,0x00,0x00,0x00, -0x6b,0x00,0x00,0x00,0x4a,0x00,0x00,0x00,0xc4,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x6f,0x00,0x00,0x00,0x6d,0x00,0x00,0x00, -0x6e,0x00,0x00,0x00,0xc7,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x70,0x00,0x00,0x00,0x6f,0x00,0x00,0x00,0x54,0x00,0x00,0x00, -0x7c,0x00,0x04,0x00,0x10,0x00,0x00,0x00,0x71,0x00,0x00,0x00, -0x70,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x74,0x00,0x00,0x00,0x6a,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x77,0x00,0x00,0x00,0x4a,0x00,0x00,0x00, -0x76,0x00,0x00,0x00,0xc2,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x78,0x00,0x00,0x00,0x74,0x00,0x00,0x00,0x77,0x00,0x00,0x00, -0xc7,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x79,0x00,0x00,0x00, -0x78,0x00,0x00,0x00,0x54,0x00,0x00,0x00,0x7c,0x00,0x04,0x00, -0x10,0x00,0x00,0x00,0x7a,0x00,0x00,0x00,0x79,0x00,0x00,0x00, -0x41,0x00,0x08,0x00,0x80,0x00,0x00,0x00,0x81,0x00,0x00,0x00, -0x5a,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0x45,0x00,0x00,0x00, -0x7e,0x00,0x00,0x00,0x4a,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x53,0x00,0x00,0x00,0x82,0x00,0x00,0x00,0x81,0x00,0x00,0x00, -0x71,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x83,0x00,0x00,0x00, -0x82,0x00,0x00,0x00,0xc7,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x89,0x00,0x00,0x00,0x83,0x00,0x00,0x00,0x88,0x00,0x00,0x00, -0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x8d,0x00,0x00,0x00, -0x71,0x00,0x00,0x00,0xc5,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x8e,0x00,0x00,0x00,0x89,0x00,0x00,0x00,0x8d,0x00,0x00,0x00, -0x70,0x00,0x04,0x00,0x1c,0x00,0x00,0x00,0x8f,0x00,0x00,0x00, -0x8e,0x00,0x00,0x00,0xc2,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x91,0x00,0x00,0x00,0x83,0x00,0x00,0x00,0x6e,0x00,0x00,0x00, -0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x94,0x00,0x00,0x00, -0x7a,0x00,0x00,0x00,0xc5,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x95,0x00,0x00,0x00,0x91,0x00,0x00,0x00,0x94,0x00,0x00,0x00, -0x70,0x00,0x04,0x00,0x1c,0x00,0x00,0x00,0x96,0x00,0x00,0x00, -0x95,0x00,0x00,0x00,0x50,0x00,0x05,0x00,0x84,0x00,0x00,0x00, -0x97,0x00,0x00,0x00,0x8f,0x00,0x00,0x00,0x96,0x00,0x00,0x00, -0x8e,0x00,0x05,0x00,0x84,0x00,0x00,0x00,0x9a,0x00,0x00,0x00, -0x97,0x00,0x00,0x00,0x5f,0x00,0x00,0x00,0x50,0x00,0x05,0x00, -0x84,0x00,0x00,0x00,0x9c,0x00,0x00,0x00,0x64,0x00,0x00,0x00, -0x64,0x00,0x00,0x00,0x81,0x00,0x05,0x00,0x84,0x00,0x00,0x00, -0x9d,0x00,0x00,0x00,0x9a,0x00,0x00,0x00,0x9c,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xa4,0x00,0x00,0x00, -0x4f,0x00,0x00,0x00,0x4a,0x00,0x00,0x00,0x51,0x00,0x05,0x00, -0x1c,0x00,0x00,0x00,0xa7,0x00,0x00,0x00,0x9d,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x41,0x00,0x06,0x00,0xa8,0x00,0x00,0x00, -0xa9,0x00,0x00,0x00,0xa1,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0xa4,0x00,0x00,0x00,0x3e,0x00,0x03,0x00,0xa9,0x00,0x00,0x00, -0xa7,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xad,0x00,0x00,0x00,0xa4,0x00,0x00,0x00,0x54,0x00,0x00,0x00, -0x51,0x00,0x05,0x00,0x1c,0x00,0x00,0x00,0xaf,0x00,0x00,0x00, -0x9d,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x41,0x00,0x06,0x00, -0xa8,0x00,0x00,0x00,0xb0,0x00,0x00,0x00,0xa1,0x00,0x00,0x00, -0x2e,0x00,0x00,0x00,0xad,0x00,0x00,0x00,0x3e,0x00,0x03,0x00, -0xb0,0x00,0x00,0x00,0xaf,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, -0xb3,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0xb3,0x00,0x00,0x00, -0xfd,0x00,0x01,0x00,0x38,0x00,0x01,0x00, -}; -const uint64_t get_rows_q5_1_f32_fp32_len = 2780; +0x52,0x00,0x00,0x00,0x60,0x00,0x00,0x00,0x5f,0x00,0x00,0x00, +0x73,0x00,0x04,0x00,0x1c,0x00,0x00,0x00,0x61,0x00,0x00,0x00, +0x60,0x00,0x00,0x00,0x41,0x00,0x08,0x00,0x64,0x00,0x00,0x00, +0x65,0x00,0x00,0x00,0x5c,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, +0x45,0x00,0x00,0x00,0x20,0x00,0x00,0x00,0x20,0x00,0x00,0x00, +0x3d,0x00,0x04,0x00,0x53,0x00,0x00,0x00,0x66,0x00,0x00,0x00, +0x65,0x00,0x00,0x00,0x71,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x67,0x00,0x00,0x00,0x66,0x00,0x00,0x00,0xc4,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x69,0x00,0x00,0x00,0x67,0x00,0x00,0x00, +0x68,0x00,0x00,0x00,0x41,0x00,0x08,0x00,0x64,0x00,0x00,0x00, +0x6b,0x00,0x00,0x00,0x5c,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, +0x45,0x00,0x00,0x00,0x20,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, +0x3d,0x00,0x04,0x00,0x53,0x00,0x00,0x00,0x6c,0x00,0x00,0x00, +0x6b,0x00,0x00,0x00,0x71,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x6d,0x00,0x00,0x00,0x6c,0x00,0x00,0x00,0xc5,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x6e,0x00,0x00,0x00,0x69,0x00,0x00,0x00, +0x6d,0x00,0x00,0x00,0xc2,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x74,0x00,0x00,0x00,0x6e,0x00,0x00,0x00,0x4a,0x00,0x00,0x00, +0xc4,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x76,0x00,0x00,0x00, +0x74,0x00,0x00,0x00,0x75,0x00,0x00,0x00,0xc7,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x77,0x00,0x00,0x00,0x76,0x00,0x00,0x00, +0x56,0x00,0x00,0x00,0x7c,0x00,0x04,0x00,0x10,0x00,0x00,0x00, +0x78,0x00,0x00,0x00,0x77,0x00,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x7c,0x00,0x00,0x00,0x4a,0x00,0x00,0x00, +0x7b,0x00,0x00,0x00,0xc2,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x7d,0x00,0x00,0x00,0x6e,0x00,0x00,0x00,0x7c,0x00,0x00,0x00, +0xc7,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x7e,0x00,0x00,0x00, +0x7d,0x00,0x00,0x00,0x56,0x00,0x00,0x00,0x7c,0x00,0x04,0x00, +0x10,0x00,0x00,0x00,0x7f,0x00,0x00,0x00,0x7e,0x00,0x00,0x00, +0x41,0x00,0x08,0x00,0x84,0x00,0x00,0x00,0x85,0x00,0x00,0x00, +0x5c,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0x45,0x00,0x00,0x00, +0x12,0x00,0x00,0x00,0x4a,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0x55,0x00,0x00,0x00,0x86,0x00,0x00,0x00,0x85,0x00,0x00,0x00, +0x71,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x87,0x00,0x00,0x00, +0x86,0x00,0x00,0x00,0xc7,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x8d,0x00,0x00,0x00,0x87,0x00,0x00,0x00,0x8c,0x00,0x00,0x00, +0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x91,0x00,0x00,0x00, +0x78,0x00,0x00,0x00,0xc5,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x92,0x00,0x00,0x00,0x8d,0x00,0x00,0x00,0x91,0x00,0x00,0x00, +0x70,0x00,0x04,0x00,0x1c,0x00,0x00,0x00,0x93,0x00,0x00,0x00, +0x92,0x00,0x00,0x00,0xc2,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x95,0x00,0x00,0x00,0x87,0x00,0x00,0x00,0x75,0x00,0x00,0x00, +0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x98,0x00,0x00,0x00, +0x7f,0x00,0x00,0x00,0xc5,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x99,0x00,0x00,0x00,0x95,0x00,0x00,0x00,0x98,0x00,0x00,0x00, +0x70,0x00,0x04,0x00,0x1c,0x00,0x00,0x00,0x9a,0x00,0x00,0x00, +0x99,0x00,0x00,0x00,0x50,0x00,0x05,0x00,0x88,0x00,0x00,0x00, +0x9b,0x00,0x00,0x00,0x93,0x00,0x00,0x00,0x9a,0x00,0x00,0x00, +0x83,0x00,0x05,0x00,0x88,0x00,0x00,0x00,0x9f,0x00,0x00,0x00, +0x9b,0x00,0x00,0x00,0xc1,0x00,0x00,0x00,0x8e,0x00,0x05,0x00, +0x88,0x00,0x00,0x00,0xa1,0x00,0x00,0x00,0x9f,0x00,0x00,0x00, +0x61,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xa8,0x00,0x00,0x00,0x4f,0x00,0x00,0x00,0x4a,0x00,0x00,0x00, +0x51,0x00,0x05,0x00,0x1c,0x00,0x00,0x00,0xab,0x00,0x00,0x00, +0xa1,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x41,0x00,0x06,0x00, +0xac,0x00,0x00,0x00,0xad,0x00,0x00,0x00,0xa5,0x00,0x00,0x00, +0x2e,0x00,0x00,0x00,0xa8,0x00,0x00,0x00,0x3e,0x00,0x03,0x00, +0xad,0x00,0x00,0x00,0xab,0x00,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xb1,0x00,0x00,0x00,0xa8,0x00,0x00,0x00, +0x56,0x00,0x00,0x00,0x51,0x00,0x05,0x00,0x1c,0x00,0x00,0x00, +0xb3,0x00,0x00,0x00,0xa1,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0x41,0x00,0x06,0x00,0xac,0x00,0x00,0x00,0xb4,0x00,0x00,0x00, +0xa5,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0xb1,0x00,0x00,0x00, +0x3e,0x00,0x03,0x00,0xb4,0x00,0x00,0x00,0xb3,0x00,0x00,0x00, +0xf9,0x00,0x02,0x00,0xb7,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, +0xb7,0x00,0x00,0x00,0xfd,0x00,0x01,0x00,0x38,0x00,0x01,0x00, -unsigned char get_rows_q5_1_fp32_data[] = { +}; +const uint64_t get_rows_q5_0_f32_len = 2868; + +unsigned char get_rows_q5_1_data[] = { 0x03,0x02,0x23,0x07,0x00,0x05,0x01,0x00,0x0b,0x00,0x0d,0x00, 0xb6,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x11,0x00,0x02,0x00, 0x01,0x00,0x00,0x00,0x11,0x00,0x02,0x00,0x51,0x11,0x00,0x00, @@ -19022,61 +10032,63 @@ unsigned char get_rows_q5_1_fp32_data[] = { 0xb4,0x00,0x00,0x00,0xfd,0x00,0x01,0x00,0x38,0x00,0x01,0x00, }; -const uint64_t get_rows_q5_1_fp32_len = 2796; +const uint64_t get_rows_q5_1_len = 2796; -unsigned char get_rows_q8_0_data[] = { +unsigned char get_rows_q5_1_f32_data[] = { 0x03,0x02,0x23,0x07,0x00,0x05,0x01,0x00,0x0b,0x00,0x0d,0x00, -0x86,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x11,0x00,0x02,0x00, -0x01,0x00,0x00,0x00,0x11,0x00,0x02,0x00,0x09,0x00,0x00,0x00, -0x11,0x00,0x02,0x00,0x27,0x00,0x00,0x00,0x11,0x00,0x02,0x00, -0x51,0x11,0x00,0x00,0x11,0x00,0x02,0x00,0x60,0x11,0x00,0x00, -0x0b,0x00,0x06,0x00,0x01,0x00,0x00,0x00,0x47,0x4c,0x53,0x4c, -0x2e,0x73,0x74,0x64,0x2e,0x34,0x35,0x30,0x00,0x00,0x00,0x00, -0x0e,0x00,0x03,0x00,0x00,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x0f,0x00,0x0a,0x00,0x05,0x00,0x00,0x00,0x04,0x00,0x00,0x00, -0x6d,0x61,0x69,0x6e,0x00,0x00,0x00,0x00,0x0b,0x00,0x00,0x00, -0x1f,0x00,0x00,0x00,0x2d,0x00,0x00,0x00,0x58,0x00,0x00,0x00, -0x73,0x00,0x00,0x00,0x10,0x00,0x06,0x00,0x04,0x00,0x00,0x00, -0x11,0x00,0x00,0x00,0x00,0x02,0x00,0x00,0x01,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x0b,0x00,0x00,0x00, -0x0b,0x00,0x00,0x00,0x1c,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0x1d,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x1d,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x04,0x00,0x00,0x00, -0x48,0x00,0x05,0x00,0x1d,0x00,0x00,0x00,0x02,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x08,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0x1d,0x00,0x00,0x00,0x03,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x47,0x00,0x03,0x00,0x1d,0x00,0x00,0x00, -0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x2a,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x48,0x00,0x04,0x00, -0x2b,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x18,0x00,0x00,0x00, -0x48,0x00,0x05,0x00,0x2b,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x03,0x00, -0x2b,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x2d,0x00,0x00,0x00,0x22,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x2d,0x00,0x00,0x00,0x21,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x53,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0x54,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x54,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x02,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x55,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x22,0x00,0x00,0x00,0x48,0x00,0x04,0x00,0x56,0x00,0x00,0x00, +0xb5,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x11,0x00,0x02,0x00, +0x01,0x00,0x00,0x00,0x11,0x00,0x02,0x00,0x51,0x11,0x00,0x00, +0x11,0x00,0x02,0x00,0x60,0x11,0x00,0x00,0x0b,0x00,0x06,0x00, +0x01,0x00,0x00,0x00,0x47,0x4c,0x53,0x4c,0x2e,0x73,0x74,0x64, +0x2e,0x34,0x35,0x30,0x00,0x00,0x00,0x00,0x0e,0x00,0x03,0x00, +0x00,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x0f,0x00,0x0a,0x00, +0x05,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x6d,0x61,0x69,0x6e, +0x00,0x00,0x00,0x00,0x0b,0x00,0x00,0x00,0x1f,0x00,0x00,0x00, +0x2d,0x00,0x00,0x00,0x5a,0x00,0x00,0x00,0xa1,0x00,0x00,0x00, +0x10,0x00,0x06,0x00,0x04,0x00,0x00,0x00,0x11,0x00,0x00,0x00, +0x00,0x02,0x00,0x00,0x01,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0x47,0x00,0x04,0x00,0x0b,0x00,0x00,0x00,0x0b,0x00,0x00,0x00, +0x1c,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x1d,0x00,0x00,0x00, +0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0x48,0x00,0x05,0x00,0x1d,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0x23,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x48,0x00,0x05,0x00, +0x1d,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x23,0x00,0x00,0x00, +0x08,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x1d,0x00,0x00,0x00, +0x03,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, +0x47,0x00,0x03,0x00,0x1d,0x00,0x00,0x00,0x02,0x00,0x00,0x00, +0x47,0x00,0x04,0x00,0x2a,0x00,0x00,0x00,0x06,0x00,0x00,0x00, +0x04,0x00,0x00,0x00,0x48,0x00,0x04,0x00,0x2b,0x00,0x00,0x00, 0x00,0x00,0x00,0x00,0x18,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0x56,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x47,0x00,0x03,0x00,0x56,0x00,0x00,0x00, -0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x58,0x00,0x00,0x00, +0x2b,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00, +0x00,0x00,0x00,0x00,0x47,0x00,0x03,0x00,0x2b,0x00,0x00,0x00, +0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x2d,0x00,0x00,0x00, 0x22,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x58,0x00,0x00,0x00,0x21,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x70,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x02,0x00,0x00,0x00,0x48,0x00,0x04,0x00,0x71,0x00,0x00,0x00, +0x2d,0x00,0x00,0x00,0x21,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0x47,0x00,0x04,0x00,0x55,0x00,0x00,0x00,0x06,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x56,0x00,0x00,0x00, +0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0x48,0x00,0x05,0x00,0x56,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0x23,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x48,0x00,0x05,0x00, +0x56,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x23,0x00,0x00,0x00, +0x04,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x56,0x00,0x00,0x00, +0x03,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x08,0x00,0x00,0x00, +0x47,0x00,0x04,0x00,0x57,0x00,0x00,0x00,0x06,0x00,0x00,0x00, +0x18,0x00,0x00,0x00,0x48,0x00,0x04,0x00,0x58,0x00,0x00,0x00, +0x00,0x00,0x00,0x00,0x18,0x00,0x00,0x00,0x48,0x00,0x05,0x00, +0x58,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00, +0x00,0x00,0x00,0x00,0x47,0x00,0x03,0x00,0x58,0x00,0x00,0x00, +0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x5a,0x00,0x00,0x00, +0x22,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00, +0x5a,0x00,0x00,0x00,0x21,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0x47,0x00,0x04,0x00,0x9e,0x00,0x00,0x00,0x06,0x00,0x00,0x00, +0x04,0x00,0x00,0x00,0x48,0x00,0x04,0x00,0x9f,0x00,0x00,0x00, 0x00,0x00,0x00,0x00,0x19,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0x71,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x47,0x00,0x03,0x00,0x71,0x00,0x00,0x00, -0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x73,0x00,0x00,0x00, +0x9f,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00, +0x00,0x00,0x00,0x00,0x47,0x00,0x03,0x00,0x9f,0x00,0x00,0x00, +0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0xa1,0x00,0x00,0x00, 0x22,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x73,0x00,0x00,0x00,0x21,0x00,0x00,0x00,0x02,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x83,0x00,0x00,0x00,0x0b,0x00,0x00,0x00, +0xa1,0x00,0x00,0x00,0x21,0x00,0x00,0x00,0x02,0x00,0x00,0x00, +0x47,0x00,0x04,0x00,0xb2,0x00,0x00,0x00,0x0b,0x00,0x00,0x00, 0x19,0x00,0x00,0x00,0x13,0x00,0x02,0x00,0x02,0x00,0x00,0x00, 0x21,0x00,0x03,0x00,0x03,0x00,0x00,0x00,0x02,0x00,0x00,0x00, 0x15,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x20,0x00,0x00,0x00, @@ -19108,226 +10120,45 @@ unsigned char get_rows_q8_0_data[] = { 0x10,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0x00,0x00,0x00,0x00, 0x20,0x00,0x04,0x00,0x30,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, 0x10,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x44,0x00,0x00,0x00,0x20,0x00,0x00,0x00,0x16,0x00,0x03,0x00, -0x4f,0x00,0x00,0x00,0x10,0x00,0x00,0x00,0x15,0x00,0x04,0x00, -0x52,0x00,0x00,0x00,0x08,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x1c,0x00,0x04,0x00,0x53,0x00,0x00,0x00,0x52,0x00,0x00,0x00, -0x44,0x00,0x00,0x00,0x1e,0x00,0x04,0x00,0x54,0x00,0x00,0x00, -0x4f,0x00,0x00,0x00,0x53,0x00,0x00,0x00,0x1d,0x00,0x03,0x00, -0x55,0x00,0x00,0x00,0x54,0x00,0x00,0x00,0x1e,0x00,0x03,0x00, -0x56,0x00,0x00,0x00,0x55,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0x57,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x56,0x00,0x00,0x00, -0x3b,0x00,0x04,0x00,0x57,0x00,0x00,0x00,0x58,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x5a,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x4f,0x00,0x00,0x00,0x17,0x00,0x04,0x00, -0x5d,0x00,0x00,0x00,0x4f,0x00,0x00,0x00,0x02,0x00,0x00,0x00, -0x20,0x00,0x04,0x00,0x62,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0x52,0x00,0x00,0x00,0x1d,0x00,0x03,0x00,0x70,0x00,0x00,0x00, -0x4f,0x00,0x00,0x00,0x1e,0x00,0x03,0x00,0x71,0x00,0x00,0x00, -0x70,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x72,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x71,0x00,0x00,0x00,0x3b,0x00,0x04,0x00, -0x72,0x00,0x00,0x00,0x73,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x82,0x00,0x00,0x00, -0x00,0x02,0x00,0x00,0x2c,0x00,0x06,0x00,0x09,0x00,0x00,0x00, -0x83,0x00,0x00,0x00,0x82,0x00,0x00,0x00,0x16,0x00,0x00,0x00, -0x16,0x00,0x00,0x00,0x36,0x00,0x05,0x00,0x02,0x00,0x00,0x00, -0x04,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x03,0x00,0x00,0x00, -0xf8,0x00,0x02,0x00,0x05,0x00,0x00,0x00,0xf7,0x00,0x03,0x00, -0x84,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xfb,0x00,0x03,0x00, -0x0c,0x00,0x00,0x00,0x85,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, -0x85,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x0d,0x00,0x00,0x00, -0x0e,0x00,0x00,0x00,0x0b,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x0f,0x00,0x00,0x00, -0x0e,0x00,0x00,0x00,0x7c,0x00,0x04,0x00,0x10,0x00,0x00,0x00, -0x11,0x00,0x00,0x00,0x0f,0x00,0x00,0x00,0x84,0x00,0x05,0x00, -0x10,0x00,0x00,0x00,0x13,0x00,0x00,0x00,0x11,0x00,0x00,0x00, -0x12,0x00,0x00,0x00,0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x14,0x00,0x00,0x00,0x13,0x00,0x00,0x00,0x41,0x00,0x05,0x00, -0x0d,0x00,0x00,0x00,0x17,0x00,0x00,0x00,0x0b,0x00,0x00,0x00, -0x16,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x18,0x00,0x00,0x00,0x17,0x00,0x00,0x00,0x7c,0x00,0x04,0x00, -0x10,0x00,0x00,0x00,0x19,0x00,0x00,0x00,0x18,0x00,0x00,0x00, -0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x1a,0x00,0x00,0x00, -0x19,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x21,0x00,0x00,0x00, -0x22,0x00,0x00,0x00,0x1f,0x00,0x00,0x00,0x20,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x22,0x00,0x00,0x00,0xae,0x00,0x05,0x00,0x24,0x00,0x00,0x00, -0x25,0x00,0x00,0x00,0x14,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0xf7,0x00,0x03,0x00,0x27,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0xfa,0x00,0x04,0x00,0x25,0x00,0x00,0x00,0x26,0x00,0x00,0x00, -0x27,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0x26,0x00,0x00,0x00, -0xf9,0x00,0x02,0x00,0x84,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, -0x27,0x00,0x00,0x00,0x41,0x00,0x06,0x00,0x30,0x00,0x00,0x00, -0x31,0x00,0x00,0x00,0x2d,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0x1a,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x10,0x00,0x00,0x00, -0x32,0x00,0x00,0x00,0x31,0x00,0x00,0x00,0x7c,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x33,0x00,0x00,0x00,0x32,0x00,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x38,0x00,0x00,0x00, -0x33,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x3a,0x00,0x00,0x00,0x38,0x00,0x00,0x00, -0x14,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x3f,0x00,0x00,0x00,0x1a,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x41,0x00,0x00,0x00, -0x3f,0x00,0x00,0x00,0x14,0x00,0x00,0x00,0x86,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x45,0x00,0x00,0x00,0x3a,0x00,0x00,0x00, -0x44,0x00,0x00,0x00,0x89,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x48,0x00,0x00,0x00,0x3a,0x00,0x00,0x00,0x44,0x00,0x00,0x00, -0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x49,0x00,0x00,0x00, -0x48,0x00,0x00,0x00,0x16,0x00,0x00,0x00,0x89,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x4d,0x00,0x00,0x00,0x41,0x00,0x00,0x00, -0x44,0x00,0x00,0x00,0x82,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x4e,0x00,0x00,0x00,0x41,0x00,0x00,0x00,0x4d,0x00,0x00,0x00, -0x41,0x00,0x07,0x00,0x5a,0x00,0x00,0x00,0x5b,0x00,0x00,0x00, -0x58,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0x45,0x00,0x00,0x00, -0x2e,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x4f,0x00,0x00,0x00, -0x5c,0x00,0x00,0x00,0x5b,0x00,0x00,0x00,0x41,0x00,0x08,0x00, -0x62,0x00,0x00,0x00,0x63,0x00,0x00,0x00,0x58,0x00,0x00,0x00, -0x2e,0x00,0x00,0x00,0x45,0x00,0x00,0x00,0x20,0x00,0x00,0x00, -0x49,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x52,0x00,0x00,0x00, -0x64,0x00,0x00,0x00,0x63,0x00,0x00,0x00,0x6f,0x00,0x04,0x00, -0x4f,0x00,0x00,0x00,0x65,0x00,0x00,0x00,0x64,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x68,0x00,0x00,0x00, -0x49,0x00,0x00,0x00,0x16,0x00,0x00,0x00,0x41,0x00,0x08,0x00, -0x62,0x00,0x00,0x00,0x69,0x00,0x00,0x00,0x58,0x00,0x00,0x00, -0x2e,0x00,0x00,0x00,0x45,0x00,0x00,0x00,0x20,0x00,0x00,0x00, -0x68,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x52,0x00,0x00,0x00, -0x6a,0x00,0x00,0x00,0x69,0x00,0x00,0x00,0x6f,0x00,0x04,0x00, -0x4f,0x00,0x00,0x00,0x6b,0x00,0x00,0x00,0x6a,0x00,0x00,0x00, -0x50,0x00,0x05,0x00,0x5d,0x00,0x00,0x00,0x6c,0x00,0x00,0x00, -0x65,0x00,0x00,0x00,0x6b,0x00,0x00,0x00,0x8e,0x00,0x05,0x00, -0x5d,0x00,0x00,0x00,0x6f,0x00,0x00,0x00,0x6c,0x00,0x00,0x00, -0x5c,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x76,0x00,0x00,0x00,0x4e,0x00,0x00,0x00,0x49,0x00,0x00,0x00, -0x51,0x00,0x05,0x00,0x4f,0x00,0x00,0x00,0x79,0x00,0x00,0x00, -0x6f,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x41,0x00,0x06,0x00, -0x5a,0x00,0x00,0x00,0x7a,0x00,0x00,0x00,0x73,0x00,0x00,0x00, -0x2e,0x00,0x00,0x00,0x76,0x00,0x00,0x00,0x3e,0x00,0x03,0x00, -0x7a,0x00,0x00,0x00,0x79,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x7e,0x00,0x00,0x00,0x76,0x00,0x00,0x00, -0x16,0x00,0x00,0x00,0x51,0x00,0x05,0x00,0x4f,0x00,0x00,0x00, -0x80,0x00,0x00,0x00,0x6f,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x41,0x00,0x06,0x00,0x5a,0x00,0x00,0x00,0x81,0x00,0x00,0x00, -0x73,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0x7e,0x00,0x00,0x00, -0x3e,0x00,0x03,0x00,0x81,0x00,0x00,0x00,0x80,0x00,0x00,0x00, -0xf9,0x00,0x02,0x00,0x84,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, -0x84,0x00,0x00,0x00,0xfd,0x00,0x01,0x00,0x38,0x00,0x01,0x00, - -}; -const uint64_t get_rows_q8_0_len = 2232; - -unsigned char get_rows_q8_0_f32_data[] = { -0x03,0x02,0x23,0x07,0x00,0x05,0x01,0x00,0x0b,0x00,0x0d,0x00, -0x89,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x11,0x00,0x02,0x00, -0x01,0x00,0x00,0x00,0x11,0x00,0x02,0x00,0x09,0x00,0x00,0x00, -0x11,0x00,0x02,0x00,0x27,0x00,0x00,0x00,0x11,0x00,0x02,0x00, -0x51,0x11,0x00,0x00,0x11,0x00,0x02,0x00,0x60,0x11,0x00,0x00, -0x0b,0x00,0x06,0x00,0x01,0x00,0x00,0x00,0x47,0x4c,0x53,0x4c, -0x2e,0x73,0x74,0x64,0x2e,0x34,0x35,0x30,0x00,0x00,0x00,0x00, -0x0e,0x00,0x03,0x00,0x00,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x0f,0x00,0x0a,0x00,0x05,0x00,0x00,0x00,0x04,0x00,0x00,0x00, -0x6d,0x61,0x69,0x6e,0x00,0x00,0x00,0x00,0x0b,0x00,0x00,0x00, -0x1f,0x00,0x00,0x00,0x2d,0x00,0x00,0x00,0x58,0x00,0x00,0x00, -0x73,0x00,0x00,0x00,0x10,0x00,0x06,0x00,0x04,0x00,0x00,0x00, -0x11,0x00,0x00,0x00,0x00,0x02,0x00,0x00,0x01,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x0b,0x00,0x00,0x00, -0x0b,0x00,0x00,0x00,0x1c,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0x1d,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x1d,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x04,0x00,0x00,0x00, -0x48,0x00,0x05,0x00,0x1d,0x00,0x00,0x00,0x02,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x08,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0x1d,0x00,0x00,0x00,0x03,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x47,0x00,0x03,0x00,0x1d,0x00,0x00,0x00, -0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x2a,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x48,0x00,0x04,0x00, -0x2b,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x18,0x00,0x00,0x00, -0x48,0x00,0x05,0x00,0x2b,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x03,0x00, -0x2b,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x2d,0x00,0x00,0x00,0x22,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x2d,0x00,0x00,0x00,0x21,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x53,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0x54,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x54,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x02,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x55,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x22,0x00,0x00,0x00,0x48,0x00,0x04,0x00,0x56,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x18,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0x56,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x47,0x00,0x03,0x00,0x56,0x00,0x00,0x00, -0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x58,0x00,0x00,0x00, -0x22,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x58,0x00,0x00,0x00,0x21,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x70,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x04,0x00,0x00,0x00,0x48,0x00,0x04,0x00,0x71,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x19,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0x71,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x47,0x00,0x03,0x00,0x71,0x00,0x00,0x00, -0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x73,0x00,0x00,0x00, -0x22,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x73,0x00,0x00,0x00,0x21,0x00,0x00,0x00,0x02,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x86,0x00,0x00,0x00,0x0b,0x00,0x00,0x00, -0x19,0x00,0x00,0x00,0x13,0x00,0x02,0x00,0x02,0x00,0x00,0x00, -0x21,0x00,0x03,0x00,0x03,0x00,0x00,0x00,0x02,0x00,0x00,0x00, -0x15,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x20,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x17,0x00,0x04,0x00,0x09,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x03,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0x0a,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x09,0x00,0x00,0x00, -0x3b,0x00,0x04,0x00,0x0a,0x00,0x00,0x00,0x0b,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0x0d,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x15,0x00,0x04,0x00,0x10,0x00,0x00,0x00,0x20,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x10,0x00,0x00,0x00, -0x12,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x16,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x16,0x00,0x03,0x00,0x1c,0x00,0x00,0x00,0x20,0x00,0x00,0x00, -0x1e,0x00,0x06,0x00,0x1d,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x1c,0x00,0x00,0x00,0x1c,0x00,0x00,0x00, -0x20,0x00,0x04,0x00,0x1e,0x00,0x00,0x00,0x09,0x00,0x00,0x00, -0x1d,0x00,0x00,0x00,0x3b,0x00,0x04,0x00,0x1e,0x00,0x00,0x00, -0x1f,0x00,0x00,0x00,0x09,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x10,0x00,0x00,0x00,0x20,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x20,0x00,0x04,0x00,0x21,0x00,0x00,0x00,0x09,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x14,0x00,0x02,0x00,0x24,0x00,0x00,0x00, -0x1d,0x00,0x03,0x00,0x2a,0x00,0x00,0x00,0x10,0x00,0x00,0x00, -0x1e,0x00,0x03,0x00,0x2b,0x00,0x00,0x00,0x2a,0x00,0x00,0x00, -0x20,0x00,0x04,0x00,0x2c,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0x2b,0x00,0x00,0x00,0x3b,0x00,0x04,0x00,0x2c,0x00,0x00,0x00, -0x2d,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x10,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x20,0x00,0x04,0x00,0x30,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0x10,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x44,0x00,0x00,0x00,0x20,0x00,0x00,0x00,0x16,0x00,0x03,0x00, -0x4f,0x00,0x00,0x00,0x10,0x00,0x00,0x00,0x15,0x00,0x04,0x00, -0x52,0x00,0x00,0x00,0x08,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x1c,0x00,0x04,0x00,0x53,0x00,0x00,0x00,0x52,0x00,0x00,0x00, -0x44,0x00,0x00,0x00,0x1e,0x00,0x04,0x00,0x54,0x00,0x00,0x00, -0x4f,0x00,0x00,0x00,0x53,0x00,0x00,0x00,0x1d,0x00,0x03,0x00, -0x55,0x00,0x00,0x00,0x54,0x00,0x00,0x00,0x1e,0x00,0x03,0x00, -0x56,0x00,0x00,0x00,0x55,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0x57,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x56,0x00,0x00,0x00, -0x3b,0x00,0x04,0x00,0x57,0x00,0x00,0x00,0x58,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x5a,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x4f,0x00,0x00,0x00,0x17,0x00,0x04,0x00, -0x5d,0x00,0x00,0x00,0x4f,0x00,0x00,0x00,0x02,0x00,0x00,0x00, -0x20,0x00,0x04,0x00,0x62,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0x52,0x00,0x00,0x00,0x1d,0x00,0x03,0x00,0x70,0x00,0x00,0x00, -0x1c,0x00,0x00,0x00,0x1e,0x00,0x03,0x00,0x71,0x00,0x00,0x00, -0x70,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x72,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x71,0x00,0x00,0x00,0x3b,0x00,0x04,0x00, -0x72,0x00,0x00,0x00,0x73,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0x20,0x00,0x04,0x00,0x7b,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, +0x44,0x00,0x00,0x00,0x20,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x49,0x00,0x00,0x00,0x02,0x00,0x00,0x00, +0x16,0x00,0x03,0x00,0x52,0x00,0x00,0x00,0x10,0x00,0x00,0x00, +0x15,0x00,0x04,0x00,0x53,0x00,0x00,0x00,0x08,0x00,0x00,0x00, +0x00,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x54,0x00,0x00,0x00,0x10,0x00,0x00,0x00,0x1c,0x00,0x04,0x00, +0x55,0x00,0x00,0x00,0x53,0x00,0x00,0x00,0x54,0x00,0x00,0x00, +0x1e,0x00,0x06,0x00,0x56,0x00,0x00,0x00,0x52,0x00,0x00,0x00, +0x52,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x55,0x00,0x00,0x00, +0x1d,0x00,0x03,0x00,0x57,0x00,0x00,0x00,0x56,0x00,0x00,0x00, +0x1e,0x00,0x03,0x00,0x58,0x00,0x00,0x00,0x57,0x00,0x00,0x00, +0x20,0x00,0x04,0x00,0x59,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, +0x58,0x00,0x00,0x00,0x3b,0x00,0x04,0x00,0x59,0x00,0x00,0x00, +0x5a,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x20,0x00,0x04,0x00, +0x5c,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x52,0x00,0x00,0x00, +0x20,0x00,0x04,0x00,0x69,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, +0x06,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x10,0x00,0x00,0x00, +0x6e,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x76,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x10,0x00,0x00,0x00,0x7e,0x00,0x00,0x00, +0x03,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x80,0x00,0x00,0x00, +0x0c,0x00,0x00,0x00,0x53,0x00,0x00,0x00,0x17,0x00,0x04,0x00, +0x84,0x00,0x00,0x00,0x1c,0x00,0x00,0x00,0x02,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x88,0x00,0x00,0x00, +0x0f,0x00,0x00,0x00,0x1d,0x00,0x03,0x00,0x9e,0x00,0x00,0x00, +0x1c,0x00,0x00,0x00,0x1e,0x00,0x03,0x00,0x9f,0x00,0x00,0x00, +0x9e,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0xa0,0x00,0x00,0x00, +0x0c,0x00,0x00,0x00,0x9f,0x00,0x00,0x00,0x3b,0x00,0x04,0x00, +0xa0,0x00,0x00,0x00,0xa1,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, +0x20,0x00,0x04,0x00,0xa8,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, 0x1c,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x85,0x00,0x00,0x00,0x00,0x02,0x00,0x00,0x2c,0x00,0x06,0x00, -0x09,0x00,0x00,0x00,0x86,0x00,0x00,0x00,0x85,0x00,0x00,0x00, +0xb1,0x00,0x00,0x00,0x00,0x02,0x00,0x00,0x2c,0x00,0x06,0x00, +0x09,0x00,0x00,0x00,0xb2,0x00,0x00,0x00,0xb1,0x00,0x00,0x00, 0x16,0x00,0x00,0x00,0x16,0x00,0x00,0x00,0x36,0x00,0x05,0x00, 0x02,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x00,0x00,0x00,0x00, 0x03,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0x05,0x00,0x00,0x00, -0xf7,0x00,0x03,0x00,0x87,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0xfb,0x00,0x03,0x00,0x0c,0x00,0x00,0x00,0x88,0x00,0x00,0x00, -0xf8,0x00,0x02,0x00,0x88,0x00,0x00,0x00,0x41,0x00,0x05,0x00, +0xf7,0x00,0x03,0x00,0xb3,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0xfb,0x00,0x03,0x00,0x0c,0x00,0x00,0x00,0xb4,0x00,0x00,0x00, +0xf8,0x00,0x02,0x00,0xb4,0x00,0x00,0x00,0x41,0x00,0x05,0x00, 0x0d,0x00,0x00,0x00,0x0e,0x00,0x00,0x00,0x0b,0x00,0x00,0x00, 0x0c,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, 0x0f,0x00,0x00,0x00,0x0e,0x00,0x00,0x00,0x7c,0x00,0x04,0x00, @@ -19348,7 +10179,7 @@ unsigned char get_rows_q8_0_f32_data[] = { 0x23,0x00,0x00,0x00,0xf7,0x00,0x03,0x00,0x27,0x00,0x00,0x00, 0x00,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0x25,0x00,0x00,0x00, 0x26,0x00,0x00,0x00,0x27,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, -0x26,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0x87,0x00,0x00,0x00, +0x26,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0xb3,0x00,0x00,0x00, 0xf8,0x00,0x02,0x00,0x27,0x00,0x00,0x00,0x41,0x00,0x06,0x00, 0x30,0x00,0x00,0x00,0x31,0x00,0x00,0x00,0x2d,0x00,0x00,0x00, 0x2e,0x00,0x00,0x00,0x1a,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, @@ -19365,247 +10196,81 @@ unsigned char get_rows_q8_0_f32_data[] = { 0x3a,0x00,0x00,0x00,0x44,0x00,0x00,0x00,0x89,0x00,0x05,0x00, 0x06,0x00,0x00,0x00,0x48,0x00,0x00,0x00,0x3a,0x00,0x00,0x00, 0x44,0x00,0x00,0x00,0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x49,0x00,0x00,0x00,0x48,0x00,0x00,0x00,0x16,0x00,0x00,0x00, -0x89,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x4d,0x00,0x00,0x00, +0x4a,0x00,0x00,0x00,0x48,0x00,0x00,0x00,0x49,0x00,0x00,0x00, +0x89,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x4e,0x00,0x00,0x00, 0x41,0x00,0x00,0x00,0x44,0x00,0x00,0x00,0x82,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x4e,0x00,0x00,0x00,0x41,0x00,0x00,0x00, -0x4d,0x00,0x00,0x00,0x41,0x00,0x07,0x00,0x5a,0x00,0x00,0x00, -0x5b,0x00,0x00,0x00,0x58,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, +0x06,0x00,0x00,0x00,0x4f,0x00,0x00,0x00,0x41,0x00,0x00,0x00, +0x4e,0x00,0x00,0x00,0x41,0x00,0x07,0x00,0x5c,0x00,0x00,0x00, +0x5d,0x00,0x00,0x00,0x5a,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, 0x45,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x4f,0x00,0x00,0x00,0x5c,0x00,0x00,0x00,0x5b,0x00,0x00,0x00, -0x41,0x00,0x08,0x00,0x62,0x00,0x00,0x00,0x63,0x00,0x00,0x00, -0x58,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0x45,0x00,0x00,0x00, -0x20,0x00,0x00,0x00,0x49,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x52,0x00,0x00,0x00,0x64,0x00,0x00,0x00,0x63,0x00,0x00,0x00, -0x6f,0x00,0x04,0x00,0x4f,0x00,0x00,0x00,0x65,0x00,0x00,0x00, -0x64,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x68,0x00,0x00,0x00,0x49,0x00,0x00,0x00,0x16,0x00,0x00,0x00, -0x41,0x00,0x08,0x00,0x62,0x00,0x00,0x00,0x69,0x00,0x00,0x00, -0x58,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0x45,0x00,0x00,0x00, -0x20,0x00,0x00,0x00,0x68,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x52,0x00,0x00,0x00,0x6a,0x00,0x00,0x00,0x69,0x00,0x00,0x00, -0x6f,0x00,0x04,0x00,0x4f,0x00,0x00,0x00,0x6b,0x00,0x00,0x00, -0x6a,0x00,0x00,0x00,0x50,0x00,0x05,0x00,0x5d,0x00,0x00,0x00, -0x6c,0x00,0x00,0x00,0x65,0x00,0x00,0x00,0x6b,0x00,0x00,0x00, -0x8e,0x00,0x05,0x00,0x5d,0x00,0x00,0x00,0x6f,0x00,0x00,0x00, -0x6c,0x00,0x00,0x00,0x5c,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x76,0x00,0x00,0x00,0x4e,0x00,0x00,0x00, -0x49,0x00,0x00,0x00,0x51,0x00,0x05,0x00,0x4f,0x00,0x00,0x00, -0x79,0x00,0x00,0x00,0x6f,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x73,0x00,0x04,0x00,0x1c,0x00,0x00,0x00,0x7a,0x00,0x00,0x00, -0x79,0x00,0x00,0x00,0x41,0x00,0x06,0x00,0x7b,0x00,0x00,0x00, -0x7c,0x00,0x00,0x00,0x73,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0x76,0x00,0x00,0x00,0x3e,0x00,0x03,0x00,0x7c,0x00,0x00,0x00, -0x7a,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x80,0x00,0x00,0x00,0x76,0x00,0x00,0x00,0x16,0x00,0x00,0x00, -0x51,0x00,0x05,0x00,0x4f,0x00,0x00,0x00,0x82,0x00,0x00,0x00, -0x6f,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x73,0x00,0x04,0x00, -0x1c,0x00,0x00,0x00,0x83,0x00,0x00,0x00,0x82,0x00,0x00,0x00, -0x41,0x00,0x06,0x00,0x7b,0x00,0x00,0x00,0x84,0x00,0x00,0x00, -0x73,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0x80,0x00,0x00,0x00, -0x3e,0x00,0x03,0x00,0x84,0x00,0x00,0x00,0x83,0x00,0x00,0x00, -0xf9,0x00,0x02,0x00,0x87,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, -0x87,0x00,0x00,0x00,0xfd,0x00,0x01,0x00,0x38,0x00,0x01,0x00, - +0x52,0x00,0x00,0x00,0x5e,0x00,0x00,0x00,0x5d,0x00,0x00,0x00, +0x73,0x00,0x04,0x00,0x1c,0x00,0x00,0x00,0x5f,0x00,0x00,0x00, +0x5e,0x00,0x00,0x00,0x41,0x00,0x07,0x00,0x5c,0x00,0x00,0x00, +0x62,0x00,0x00,0x00,0x5a,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, +0x45,0x00,0x00,0x00,0x20,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0x52,0x00,0x00,0x00,0x63,0x00,0x00,0x00,0x62,0x00,0x00,0x00, +0x73,0x00,0x04,0x00,0x1c,0x00,0x00,0x00,0x64,0x00,0x00,0x00, +0x63,0x00,0x00,0x00,0x41,0x00,0x07,0x00,0x69,0x00,0x00,0x00, +0x6a,0x00,0x00,0x00,0x5a,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, +0x45,0x00,0x00,0x00,0x12,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x6b,0x00,0x00,0x00,0x6a,0x00,0x00,0x00, +0xc2,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x6d,0x00,0x00,0x00, +0x6b,0x00,0x00,0x00,0x4a,0x00,0x00,0x00,0xc4,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x6f,0x00,0x00,0x00,0x6d,0x00,0x00,0x00, +0x6e,0x00,0x00,0x00,0xc7,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x70,0x00,0x00,0x00,0x6f,0x00,0x00,0x00,0x54,0x00,0x00,0x00, +0x7c,0x00,0x04,0x00,0x10,0x00,0x00,0x00,0x71,0x00,0x00,0x00, +0x70,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x74,0x00,0x00,0x00,0x6a,0x00,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x77,0x00,0x00,0x00,0x4a,0x00,0x00,0x00, +0x76,0x00,0x00,0x00,0xc2,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x78,0x00,0x00,0x00,0x74,0x00,0x00,0x00,0x77,0x00,0x00,0x00, +0xc7,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x79,0x00,0x00,0x00, +0x78,0x00,0x00,0x00,0x54,0x00,0x00,0x00,0x7c,0x00,0x04,0x00, +0x10,0x00,0x00,0x00,0x7a,0x00,0x00,0x00,0x79,0x00,0x00,0x00, +0x41,0x00,0x08,0x00,0x80,0x00,0x00,0x00,0x81,0x00,0x00,0x00, +0x5a,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0x45,0x00,0x00,0x00, +0x7e,0x00,0x00,0x00,0x4a,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0x53,0x00,0x00,0x00,0x82,0x00,0x00,0x00,0x81,0x00,0x00,0x00, +0x71,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x83,0x00,0x00,0x00, +0x82,0x00,0x00,0x00,0xc7,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x89,0x00,0x00,0x00,0x83,0x00,0x00,0x00,0x88,0x00,0x00,0x00, +0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x8d,0x00,0x00,0x00, +0x71,0x00,0x00,0x00,0xc5,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x8e,0x00,0x00,0x00,0x89,0x00,0x00,0x00,0x8d,0x00,0x00,0x00, +0x70,0x00,0x04,0x00,0x1c,0x00,0x00,0x00,0x8f,0x00,0x00,0x00, +0x8e,0x00,0x00,0x00,0xc2,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x91,0x00,0x00,0x00,0x83,0x00,0x00,0x00,0x6e,0x00,0x00,0x00, +0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x94,0x00,0x00,0x00, +0x7a,0x00,0x00,0x00,0xc5,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x95,0x00,0x00,0x00,0x91,0x00,0x00,0x00,0x94,0x00,0x00,0x00, +0x70,0x00,0x04,0x00,0x1c,0x00,0x00,0x00,0x96,0x00,0x00,0x00, +0x95,0x00,0x00,0x00,0x50,0x00,0x05,0x00,0x84,0x00,0x00,0x00, +0x97,0x00,0x00,0x00,0x8f,0x00,0x00,0x00,0x96,0x00,0x00,0x00, +0x8e,0x00,0x05,0x00,0x84,0x00,0x00,0x00,0x9a,0x00,0x00,0x00, +0x97,0x00,0x00,0x00,0x5f,0x00,0x00,0x00,0x50,0x00,0x05,0x00, +0x84,0x00,0x00,0x00,0x9c,0x00,0x00,0x00,0x64,0x00,0x00,0x00, +0x64,0x00,0x00,0x00,0x81,0x00,0x05,0x00,0x84,0x00,0x00,0x00, +0x9d,0x00,0x00,0x00,0x9a,0x00,0x00,0x00,0x9c,0x00,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xa4,0x00,0x00,0x00, +0x4f,0x00,0x00,0x00,0x4a,0x00,0x00,0x00,0x51,0x00,0x05,0x00, +0x1c,0x00,0x00,0x00,0xa7,0x00,0x00,0x00,0x9d,0x00,0x00,0x00, +0x00,0x00,0x00,0x00,0x41,0x00,0x06,0x00,0xa8,0x00,0x00,0x00, +0xa9,0x00,0x00,0x00,0xa1,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, +0xa4,0x00,0x00,0x00,0x3e,0x00,0x03,0x00,0xa9,0x00,0x00,0x00, +0xa7,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xad,0x00,0x00,0x00,0xa4,0x00,0x00,0x00,0x54,0x00,0x00,0x00, +0x51,0x00,0x05,0x00,0x1c,0x00,0x00,0x00,0xaf,0x00,0x00,0x00, +0x9d,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x41,0x00,0x06,0x00, +0xa8,0x00,0x00,0x00,0xb0,0x00,0x00,0x00,0xa1,0x00,0x00,0x00, +0x2e,0x00,0x00,0x00,0xad,0x00,0x00,0x00,0x3e,0x00,0x03,0x00, +0xb0,0x00,0x00,0x00,0xaf,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, +0xb3,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0xb3,0x00,0x00,0x00, +0xfd,0x00,0x01,0x00,0x38,0x00,0x01,0x00, }; -const uint64_t get_rows_q8_0_f32_len = 2280; +const uint64_t get_rows_q5_1_f32_len = 2780; -unsigned char get_rows_q8_0_f32_fp32_data[] = { -0x03,0x02,0x23,0x07,0x00,0x05,0x01,0x00,0x0b,0x00,0x0d,0x00, -0x8a,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x11,0x00,0x02,0x00, -0x01,0x00,0x00,0x00,0x11,0x00,0x02,0x00,0x51,0x11,0x00,0x00, -0x11,0x00,0x02,0x00,0x60,0x11,0x00,0x00,0x0b,0x00,0x06,0x00, -0x01,0x00,0x00,0x00,0x47,0x4c,0x53,0x4c,0x2e,0x73,0x74,0x64, -0x2e,0x34,0x35,0x30,0x00,0x00,0x00,0x00,0x0e,0x00,0x03,0x00, -0x00,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x0f,0x00,0x0a,0x00, -0x05,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x6d,0x61,0x69,0x6e, -0x00,0x00,0x00,0x00,0x0b,0x00,0x00,0x00,0x1f,0x00,0x00,0x00, -0x2d,0x00,0x00,0x00,0x58,0x00,0x00,0x00,0x76,0x00,0x00,0x00, -0x10,0x00,0x06,0x00,0x04,0x00,0x00,0x00,0x11,0x00,0x00,0x00, -0x00,0x02,0x00,0x00,0x01,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x0b,0x00,0x00,0x00,0x0b,0x00,0x00,0x00, -0x1c,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x1d,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x48,0x00,0x05,0x00,0x1d,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0x1d,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x08,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x1d,0x00,0x00,0x00, -0x03,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0x47,0x00,0x03,0x00,0x1d,0x00,0x00,0x00,0x02,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x2a,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x04,0x00,0x00,0x00,0x48,0x00,0x04,0x00,0x2b,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x18,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0x2b,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x47,0x00,0x03,0x00,0x2b,0x00,0x00,0x00, -0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x2d,0x00,0x00,0x00, -0x22,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x2d,0x00,0x00,0x00,0x21,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x53,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x54,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x48,0x00,0x05,0x00,0x54,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x55,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x22,0x00,0x00,0x00, -0x48,0x00,0x04,0x00,0x56,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x18,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x56,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x47,0x00,0x03,0x00,0x56,0x00,0x00,0x00,0x02,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x58,0x00,0x00,0x00,0x22,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x58,0x00,0x00,0x00, -0x21,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x73,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x04,0x00,0x00,0x00, -0x48,0x00,0x04,0x00,0x74,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x19,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x74,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x47,0x00,0x03,0x00,0x74,0x00,0x00,0x00,0x02,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x76,0x00,0x00,0x00,0x22,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x76,0x00,0x00,0x00, -0x21,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x87,0x00,0x00,0x00,0x0b,0x00,0x00,0x00,0x19,0x00,0x00,0x00, -0x13,0x00,0x02,0x00,0x02,0x00,0x00,0x00,0x21,0x00,0x03,0x00, -0x03,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x15,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x20,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x17,0x00,0x04,0x00,0x09,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x03,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x0a,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x09,0x00,0x00,0x00,0x3b,0x00,0x04,0x00, -0x0a,0x00,0x00,0x00,0x0b,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x0d,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x15,0x00,0x04,0x00, -0x10,0x00,0x00,0x00,0x20,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x10,0x00,0x00,0x00,0x12,0x00,0x00,0x00, -0x02,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x16,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x16,0x00,0x03,0x00, -0x1c,0x00,0x00,0x00,0x20,0x00,0x00,0x00,0x1e,0x00,0x06,0x00, -0x1d,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x1c,0x00,0x00,0x00,0x1c,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0x1e,0x00,0x00,0x00,0x09,0x00,0x00,0x00,0x1d,0x00,0x00,0x00, -0x3b,0x00,0x04,0x00,0x1e,0x00,0x00,0x00,0x1f,0x00,0x00,0x00, -0x09,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x10,0x00,0x00,0x00, -0x20,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0x21,0x00,0x00,0x00,0x09,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x14,0x00,0x02,0x00,0x24,0x00,0x00,0x00,0x1d,0x00,0x03,0x00, -0x2a,0x00,0x00,0x00,0x10,0x00,0x00,0x00,0x1e,0x00,0x03,0x00, -0x2b,0x00,0x00,0x00,0x2a,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0x2c,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x2b,0x00,0x00,0x00, -0x3b,0x00,0x04,0x00,0x2c,0x00,0x00,0x00,0x2d,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x10,0x00,0x00,0x00, -0x2e,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0x30,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x10,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x44,0x00,0x00,0x00, -0x20,0x00,0x00,0x00,0x16,0x00,0x03,0x00,0x51,0x00,0x00,0x00, -0x10,0x00,0x00,0x00,0x15,0x00,0x04,0x00,0x52,0x00,0x00,0x00, -0x08,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x1c,0x00,0x04,0x00, -0x53,0x00,0x00,0x00,0x52,0x00,0x00,0x00,0x44,0x00,0x00,0x00, -0x1e,0x00,0x04,0x00,0x54,0x00,0x00,0x00,0x51,0x00,0x00,0x00, -0x53,0x00,0x00,0x00,0x1d,0x00,0x03,0x00,0x55,0x00,0x00,0x00, -0x54,0x00,0x00,0x00,0x1e,0x00,0x03,0x00,0x56,0x00,0x00,0x00, -0x55,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x57,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x56,0x00,0x00,0x00,0x3b,0x00,0x04,0x00, -0x57,0x00,0x00,0x00,0x58,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0x20,0x00,0x04,0x00,0x5a,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0x51,0x00,0x00,0x00,0x17,0x00,0x04,0x00,0x5e,0x00,0x00,0x00, -0x1c,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0x63,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x52,0x00,0x00,0x00, -0x1d,0x00,0x03,0x00,0x73,0x00,0x00,0x00,0x1c,0x00,0x00,0x00, -0x1e,0x00,0x03,0x00,0x74,0x00,0x00,0x00,0x73,0x00,0x00,0x00, -0x20,0x00,0x04,0x00,0x75,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0x74,0x00,0x00,0x00,0x3b,0x00,0x04,0x00,0x75,0x00,0x00,0x00, -0x76,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0x7d,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x1c,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x86,0x00,0x00,0x00, -0x00,0x02,0x00,0x00,0x2c,0x00,0x06,0x00,0x09,0x00,0x00,0x00, -0x87,0x00,0x00,0x00,0x86,0x00,0x00,0x00,0x16,0x00,0x00,0x00, -0x16,0x00,0x00,0x00,0x36,0x00,0x05,0x00,0x02,0x00,0x00,0x00, -0x04,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x03,0x00,0x00,0x00, -0xf8,0x00,0x02,0x00,0x05,0x00,0x00,0x00,0xf7,0x00,0x03,0x00, -0x88,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xfb,0x00,0x03,0x00, -0x0c,0x00,0x00,0x00,0x89,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, -0x89,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x0d,0x00,0x00,0x00, -0x0e,0x00,0x00,0x00,0x0b,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x0f,0x00,0x00,0x00, -0x0e,0x00,0x00,0x00,0x7c,0x00,0x04,0x00,0x10,0x00,0x00,0x00, -0x11,0x00,0x00,0x00,0x0f,0x00,0x00,0x00,0x84,0x00,0x05,0x00, -0x10,0x00,0x00,0x00,0x13,0x00,0x00,0x00,0x11,0x00,0x00,0x00, -0x12,0x00,0x00,0x00,0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x14,0x00,0x00,0x00,0x13,0x00,0x00,0x00,0x41,0x00,0x05,0x00, -0x0d,0x00,0x00,0x00,0x17,0x00,0x00,0x00,0x0b,0x00,0x00,0x00, -0x16,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x18,0x00,0x00,0x00,0x17,0x00,0x00,0x00,0x7c,0x00,0x04,0x00, -0x10,0x00,0x00,0x00,0x19,0x00,0x00,0x00,0x18,0x00,0x00,0x00, -0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x1a,0x00,0x00,0x00, -0x19,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x21,0x00,0x00,0x00, -0x22,0x00,0x00,0x00,0x1f,0x00,0x00,0x00,0x20,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x22,0x00,0x00,0x00,0xae,0x00,0x05,0x00,0x24,0x00,0x00,0x00, -0x25,0x00,0x00,0x00,0x14,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0xf7,0x00,0x03,0x00,0x27,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0xfa,0x00,0x04,0x00,0x25,0x00,0x00,0x00,0x26,0x00,0x00,0x00, -0x27,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0x26,0x00,0x00,0x00, -0xf9,0x00,0x02,0x00,0x88,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, -0x27,0x00,0x00,0x00,0x41,0x00,0x06,0x00,0x30,0x00,0x00,0x00, -0x31,0x00,0x00,0x00,0x2d,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0x1a,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x10,0x00,0x00,0x00, -0x32,0x00,0x00,0x00,0x31,0x00,0x00,0x00,0x7c,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x33,0x00,0x00,0x00,0x32,0x00,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x38,0x00,0x00,0x00, -0x33,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x3a,0x00,0x00,0x00,0x38,0x00,0x00,0x00, -0x14,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x3f,0x00,0x00,0x00,0x1a,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x41,0x00,0x00,0x00, -0x3f,0x00,0x00,0x00,0x14,0x00,0x00,0x00,0x86,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x45,0x00,0x00,0x00,0x3a,0x00,0x00,0x00, -0x44,0x00,0x00,0x00,0x89,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x48,0x00,0x00,0x00,0x3a,0x00,0x00,0x00,0x44,0x00,0x00,0x00, -0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x49,0x00,0x00,0x00, -0x48,0x00,0x00,0x00,0x16,0x00,0x00,0x00,0x89,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x4d,0x00,0x00,0x00,0x41,0x00,0x00,0x00, -0x44,0x00,0x00,0x00,0x82,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x4e,0x00,0x00,0x00,0x41,0x00,0x00,0x00,0x4d,0x00,0x00,0x00, -0x41,0x00,0x07,0x00,0x5a,0x00,0x00,0x00,0x5b,0x00,0x00,0x00, -0x58,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0x45,0x00,0x00,0x00, -0x2e,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x51,0x00,0x00,0x00, -0x5c,0x00,0x00,0x00,0x5b,0x00,0x00,0x00,0x73,0x00,0x04,0x00, -0x1c,0x00,0x00,0x00,0x5d,0x00,0x00,0x00,0x5c,0x00,0x00,0x00, -0x41,0x00,0x08,0x00,0x63,0x00,0x00,0x00,0x64,0x00,0x00,0x00, -0x58,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0x45,0x00,0x00,0x00, -0x20,0x00,0x00,0x00,0x49,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x52,0x00,0x00,0x00,0x65,0x00,0x00,0x00,0x64,0x00,0x00,0x00, -0x72,0x00,0x04,0x00,0x10,0x00,0x00,0x00,0x66,0x00,0x00,0x00, -0x65,0x00,0x00,0x00,0x6f,0x00,0x04,0x00,0x1c,0x00,0x00,0x00, -0x67,0x00,0x00,0x00,0x66,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x6a,0x00,0x00,0x00,0x49,0x00,0x00,0x00, -0x16,0x00,0x00,0x00,0x41,0x00,0x08,0x00,0x63,0x00,0x00,0x00, -0x6b,0x00,0x00,0x00,0x58,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0x45,0x00,0x00,0x00,0x20,0x00,0x00,0x00,0x6a,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x52,0x00,0x00,0x00,0x6c,0x00,0x00,0x00, -0x6b,0x00,0x00,0x00,0x72,0x00,0x04,0x00,0x10,0x00,0x00,0x00, -0x6d,0x00,0x00,0x00,0x6c,0x00,0x00,0x00,0x6f,0x00,0x04,0x00, -0x1c,0x00,0x00,0x00,0x6e,0x00,0x00,0x00,0x6d,0x00,0x00,0x00, -0x50,0x00,0x05,0x00,0x5e,0x00,0x00,0x00,0x6f,0x00,0x00,0x00, -0x67,0x00,0x00,0x00,0x6e,0x00,0x00,0x00,0x8e,0x00,0x05,0x00, -0x5e,0x00,0x00,0x00,0x72,0x00,0x00,0x00,0x6f,0x00,0x00,0x00, -0x5d,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x79,0x00,0x00,0x00,0x4e,0x00,0x00,0x00,0x49,0x00,0x00,0x00, -0x51,0x00,0x05,0x00,0x1c,0x00,0x00,0x00,0x7c,0x00,0x00,0x00, -0x72,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x41,0x00,0x06,0x00, -0x7d,0x00,0x00,0x00,0x7e,0x00,0x00,0x00,0x76,0x00,0x00,0x00, -0x2e,0x00,0x00,0x00,0x79,0x00,0x00,0x00,0x3e,0x00,0x03,0x00, -0x7e,0x00,0x00,0x00,0x7c,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x82,0x00,0x00,0x00,0x79,0x00,0x00,0x00, -0x16,0x00,0x00,0x00,0x51,0x00,0x05,0x00,0x1c,0x00,0x00,0x00, -0x84,0x00,0x00,0x00,0x72,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x41,0x00,0x06,0x00,0x7d,0x00,0x00,0x00,0x85,0x00,0x00,0x00, -0x76,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0x82,0x00,0x00,0x00, -0x3e,0x00,0x03,0x00,0x85,0x00,0x00,0x00,0x84,0x00,0x00,0x00, -0xf9,0x00,0x02,0x00,0x88,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, -0x88,0x00,0x00,0x00,0xfd,0x00,0x01,0x00,0x38,0x00,0x01,0x00, - -}; -const uint64_t get_rows_q8_0_f32_fp32_len = 2280; - -unsigned char get_rows_q8_0_fp32_data[] = { +unsigned char get_rows_q8_0_data[] = { 0x03,0x02,0x23,0x07,0x00,0x05,0x01,0x00,0x0b,0x00,0x0d,0x00, 0x8b,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x11,0x00,0x02,0x00, 0x01,0x00,0x00,0x00,0x11,0x00,0x02,0x00,0x51,0x11,0x00,0x00, @@ -19799,7 +10464,202 @@ unsigned char get_rows_q8_0_fp32_data[] = { 0xf8,0x00,0x02,0x00,0x89,0x00,0x00,0x00,0xfd,0x00,0x01,0x00, 0x38,0x00,0x01,0x00, }; -const uint64_t get_rows_q8_0_fp32_len = 2296; +const uint64_t get_rows_q8_0_len = 2296; + +unsigned char get_rows_q8_0_f32_data[] = { +0x03,0x02,0x23,0x07,0x00,0x05,0x01,0x00,0x0b,0x00,0x0d,0x00, +0x8a,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x11,0x00,0x02,0x00, +0x01,0x00,0x00,0x00,0x11,0x00,0x02,0x00,0x51,0x11,0x00,0x00, +0x11,0x00,0x02,0x00,0x60,0x11,0x00,0x00,0x0b,0x00,0x06,0x00, +0x01,0x00,0x00,0x00,0x47,0x4c,0x53,0x4c,0x2e,0x73,0x74,0x64, +0x2e,0x34,0x35,0x30,0x00,0x00,0x00,0x00,0x0e,0x00,0x03,0x00, +0x00,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x0f,0x00,0x0a,0x00, +0x05,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x6d,0x61,0x69,0x6e, +0x00,0x00,0x00,0x00,0x0b,0x00,0x00,0x00,0x1f,0x00,0x00,0x00, +0x2d,0x00,0x00,0x00,0x58,0x00,0x00,0x00,0x76,0x00,0x00,0x00, +0x10,0x00,0x06,0x00,0x04,0x00,0x00,0x00,0x11,0x00,0x00,0x00, +0x00,0x02,0x00,0x00,0x01,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0x47,0x00,0x04,0x00,0x0b,0x00,0x00,0x00,0x0b,0x00,0x00,0x00, +0x1c,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x1d,0x00,0x00,0x00, +0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0x48,0x00,0x05,0x00,0x1d,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0x23,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x48,0x00,0x05,0x00, +0x1d,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x23,0x00,0x00,0x00, +0x08,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x1d,0x00,0x00,0x00, +0x03,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, +0x47,0x00,0x03,0x00,0x1d,0x00,0x00,0x00,0x02,0x00,0x00,0x00, +0x47,0x00,0x04,0x00,0x2a,0x00,0x00,0x00,0x06,0x00,0x00,0x00, +0x04,0x00,0x00,0x00,0x48,0x00,0x04,0x00,0x2b,0x00,0x00,0x00, +0x00,0x00,0x00,0x00,0x18,0x00,0x00,0x00,0x48,0x00,0x05,0x00, +0x2b,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00, +0x00,0x00,0x00,0x00,0x47,0x00,0x03,0x00,0x2b,0x00,0x00,0x00, +0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x2d,0x00,0x00,0x00, +0x22,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00, +0x2d,0x00,0x00,0x00,0x21,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0x47,0x00,0x04,0x00,0x53,0x00,0x00,0x00,0x06,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x54,0x00,0x00,0x00, +0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0x48,0x00,0x05,0x00,0x54,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0x23,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00, +0x55,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x22,0x00,0x00,0x00, +0x48,0x00,0x04,0x00,0x56,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0x18,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x56,0x00,0x00,0x00, +0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0x47,0x00,0x03,0x00,0x56,0x00,0x00,0x00,0x02,0x00,0x00,0x00, +0x47,0x00,0x04,0x00,0x58,0x00,0x00,0x00,0x22,0x00,0x00,0x00, +0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x58,0x00,0x00,0x00, +0x21,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00, +0x73,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x04,0x00,0x00,0x00, +0x48,0x00,0x04,0x00,0x74,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0x19,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x74,0x00,0x00,0x00, +0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0x47,0x00,0x03,0x00,0x74,0x00,0x00,0x00,0x02,0x00,0x00,0x00, +0x47,0x00,0x04,0x00,0x76,0x00,0x00,0x00,0x22,0x00,0x00,0x00, +0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x76,0x00,0x00,0x00, +0x21,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00, +0x87,0x00,0x00,0x00,0x0b,0x00,0x00,0x00,0x19,0x00,0x00,0x00, +0x13,0x00,0x02,0x00,0x02,0x00,0x00,0x00,0x21,0x00,0x03,0x00, +0x03,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x15,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x20,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0x17,0x00,0x04,0x00,0x09,0x00,0x00,0x00,0x06,0x00,0x00,0x00, +0x03,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x0a,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0x09,0x00,0x00,0x00,0x3b,0x00,0x04,0x00, +0x0a,0x00,0x00,0x00,0x0b,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, +0x00,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x0d,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x15,0x00,0x04,0x00, +0x10,0x00,0x00,0x00,0x20,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x10,0x00,0x00,0x00,0x12,0x00,0x00,0x00, +0x02,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x16,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x16,0x00,0x03,0x00, +0x1c,0x00,0x00,0x00,0x20,0x00,0x00,0x00,0x1e,0x00,0x06,0x00, +0x1d,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00, +0x1c,0x00,0x00,0x00,0x1c,0x00,0x00,0x00,0x20,0x00,0x04,0x00, +0x1e,0x00,0x00,0x00,0x09,0x00,0x00,0x00,0x1d,0x00,0x00,0x00, +0x3b,0x00,0x04,0x00,0x1e,0x00,0x00,0x00,0x1f,0x00,0x00,0x00, +0x09,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x10,0x00,0x00,0x00, +0x20,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x20,0x00,0x04,0x00, +0x21,0x00,0x00,0x00,0x09,0x00,0x00,0x00,0x06,0x00,0x00,0x00, +0x14,0x00,0x02,0x00,0x24,0x00,0x00,0x00,0x1d,0x00,0x03,0x00, +0x2a,0x00,0x00,0x00,0x10,0x00,0x00,0x00,0x1e,0x00,0x03,0x00, +0x2b,0x00,0x00,0x00,0x2a,0x00,0x00,0x00,0x20,0x00,0x04,0x00, +0x2c,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x2b,0x00,0x00,0x00, +0x3b,0x00,0x04,0x00,0x2c,0x00,0x00,0x00,0x2d,0x00,0x00,0x00, +0x0c,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x10,0x00,0x00,0x00, +0x2e,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x20,0x00,0x04,0x00, +0x30,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x10,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x44,0x00,0x00,0x00, +0x20,0x00,0x00,0x00,0x16,0x00,0x03,0x00,0x51,0x00,0x00,0x00, +0x10,0x00,0x00,0x00,0x15,0x00,0x04,0x00,0x52,0x00,0x00,0x00, +0x08,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x1c,0x00,0x04,0x00, +0x53,0x00,0x00,0x00,0x52,0x00,0x00,0x00,0x44,0x00,0x00,0x00, +0x1e,0x00,0x04,0x00,0x54,0x00,0x00,0x00,0x51,0x00,0x00,0x00, +0x53,0x00,0x00,0x00,0x1d,0x00,0x03,0x00,0x55,0x00,0x00,0x00, +0x54,0x00,0x00,0x00,0x1e,0x00,0x03,0x00,0x56,0x00,0x00,0x00, +0x55,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x57,0x00,0x00,0x00, +0x0c,0x00,0x00,0x00,0x56,0x00,0x00,0x00,0x3b,0x00,0x04,0x00, +0x57,0x00,0x00,0x00,0x58,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, +0x20,0x00,0x04,0x00,0x5a,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, +0x51,0x00,0x00,0x00,0x17,0x00,0x04,0x00,0x5e,0x00,0x00,0x00, +0x1c,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x20,0x00,0x04,0x00, +0x63,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x52,0x00,0x00,0x00, +0x1d,0x00,0x03,0x00,0x73,0x00,0x00,0x00,0x1c,0x00,0x00,0x00, +0x1e,0x00,0x03,0x00,0x74,0x00,0x00,0x00,0x73,0x00,0x00,0x00, +0x20,0x00,0x04,0x00,0x75,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, +0x74,0x00,0x00,0x00,0x3b,0x00,0x04,0x00,0x75,0x00,0x00,0x00, +0x76,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x20,0x00,0x04,0x00, +0x7d,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x1c,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x86,0x00,0x00,0x00, +0x00,0x02,0x00,0x00,0x2c,0x00,0x06,0x00,0x09,0x00,0x00,0x00, +0x87,0x00,0x00,0x00,0x86,0x00,0x00,0x00,0x16,0x00,0x00,0x00, +0x16,0x00,0x00,0x00,0x36,0x00,0x05,0x00,0x02,0x00,0x00,0x00, +0x04,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x03,0x00,0x00,0x00, +0xf8,0x00,0x02,0x00,0x05,0x00,0x00,0x00,0xf7,0x00,0x03,0x00, +0x88,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xfb,0x00,0x03,0x00, +0x0c,0x00,0x00,0x00,0x89,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, +0x89,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x0d,0x00,0x00,0x00, +0x0e,0x00,0x00,0x00,0x0b,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, +0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x0f,0x00,0x00,0x00, +0x0e,0x00,0x00,0x00,0x7c,0x00,0x04,0x00,0x10,0x00,0x00,0x00, +0x11,0x00,0x00,0x00,0x0f,0x00,0x00,0x00,0x84,0x00,0x05,0x00, +0x10,0x00,0x00,0x00,0x13,0x00,0x00,0x00,0x11,0x00,0x00,0x00, +0x12,0x00,0x00,0x00,0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x14,0x00,0x00,0x00,0x13,0x00,0x00,0x00,0x41,0x00,0x05,0x00, +0x0d,0x00,0x00,0x00,0x17,0x00,0x00,0x00,0x0b,0x00,0x00,0x00, +0x16,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x18,0x00,0x00,0x00,0x17,0x00,0x00,0x00,0x7c,0x00,0x04,0x00, +0x10,0x00,0x00,0x00,0x19,0x00,0x00,0x00,0x18,0x00,0x00,0x00, +0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x1a,0x00,0x00,0x00, +0x19,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x21,0x00,0x00,0x00, +0x22,0x00,0x00,0x00,0x1f,0x00,0x00,0x00,0x20,0x00,0x00,0x00, +0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x23,0x00,0x00,0x00, +0x22,0x00,0x00,0x00,0xae,0x00,0x05,0x00,0x24,0x00,0x00,0x00, +0x25,0x00,0x00,0x00,0x14,0x00,0x00,0x00,0x23,0x00,0x00,0x00, +0xf7,0x00,0x03,0x00,0x27,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0xfa,0x00,0x04,0x00,0x25,0x00,0x00,0x00,0x26,0x00,0x00,0x00, +0x27,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0x26,0x00,0x00,0x00, +0xf9,0x00,0x02,0x00,0x88,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, +0x27,0x00,0x00,0x00,0x41,0x00,0x06,0x00,0x30,0x00,0x00,0x00, +0x31,0x00,0x00,0x00,0x2d,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, +0x1a,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x10,0x00,0x00,0x00, +0x32,0x00,0x00,0x00,0x31,0x00,0x00,0x00,0x7c,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x33,0x00,0x00,0x00,0x32,0x00,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x38,0x00,0x00,0x00, +0x33,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x3a,0x00,0x00,0x00,0x38,0x00,0x00,0x00, +0x14,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x3f,0x00,0x00,0x00,0x1a,0x00,0x00,0x00,0x23,0x00,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x41,0x00,0x00,0x00, +0x3f,0x00,0x00,0x00,0x14,0x00,0x00,0x00,0x86,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x45,0x00,0x00,0x00,0x3a,0x00,0x00,0x00, +0x44,0x00,0x00,0x00,0x89,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x48,0x00,0x00,0x00,0x3a,0x00,0x00,0x00,0x44,0x00,0x00,0x00, +0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x49,0x00,0x00,0x00, +0x48,0x00,0x00,0x00,0x16,0x00,0x00,0x00,0x89,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x4d,0x00,0x00,0x00,0x41,0x00,0x00,0x00, +0x44,0x00,0x00,0x00,0x82,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x4e,0x00,0x00,0x00,0x41,0x00,0x00,0x00,0x4d,0x00,0x00,0x00, +0x41,0x00,0x07,0x00,0x5a,0x00,0x00,0x00,0x5b,0x00,0x00,0x00, +0x58,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0x45,0x00,0x00,0x00, +0x2e,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x51,0x00,0x00,0x00, +0x5c,0x00,0x00,0x00,0x5b,0x00,0x00,0x00,0x73,0x00,0x04,0x00, +0x1c,0x00,0x00,0x00,0x5d,0x00,0x00,0x00,0x5c,0x00,0x00,0x00, +0x41,0x00,0x08,0x00,0x63,0x00,0x00,0x00,0x64,0x00,0x00,0x00, +0x58,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0x45,0x00,0x00,0x00, +0x20,0x00,0x00,0x00,0x49,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0x52,0x00,0x00,0x00,0x65,0x00,0x00,0x00,0x64,0x00,0x00,0x00, +0x72,0x00,0x04,0x00,0x10,0x00,0x00,0x00,0x66,0x00,0x00,0x00, +0x65,0x00,0x00,0x00,0x6f,0x00,0x04,0x00,0x1c,0x00,0x00,0x00, +0x67,0x00,0x00,0x00,0x66,0x00,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x6a,0x00,0x00,0x00,0x49,0x00,0x00,0x00, +0x16,0x00,0x00,0x00,0x41,0x00,0x08,0x00,0x63,0x00,0x00,0x00, +0x6b,0x00,0x00,0x00,0x58,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, +0x45,0x00,0x00,0x00,0x20,0x00,0x00,0x00,0x6a,0x00,0x00,0x00, +0x3d,0x00,0x04,0x00,0x52,0x00,0x00,0x00,0x6c,0x00,0x00,0x00, +0x6b,0x00,0x00,0x00,0x72,0x00,0x04,0x00,0x10,0x00,0x00,0x00, +0x6d,0x00,0x00,0x00,0x6c,0x00,0x00,0x00,0x6f,0x00,0x04,0x00, +0x1c,0x00,0x00,0x00,0x6e,0x00,0x00,0x00,0x6d,0x00,0x00,0x00, +0x50,0x00,0x05,0x00,0x5e,0x00,0x00,0x00,0x6f,0x00,0x00,0x00, +0x67,0x00,0x00,0x00,0x6e,0x00,0x00,0x00,0x8e,0x00,0x05,0x00, +0x5e,0x00,0x00,0x00,0x72,0x00,0x00,0x00,0x6f,0x00,0x00,0x00, +0x5d,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x79,0x00,0x00,0x00,0x4e,0x00,0x00,0x00,0x49,0x00,0x00,0x00, +0x51,0x00,0x05,0x00,0x1c,0x00,0x00,0x00,0x7c,0x00,0x00,0x00, +0x72,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x41,0x00,0x06,0x00, +0x7d,0x00,0x00,0x00,0x7e,0x00,0x00,0x00,0x76,0x00,0x00,0x00, +0x2e,0x00,0x00,0x00,0x79,0x00,0x00,0x00,0x3e,0x00,0x03,0x00, +0x7e,0x00,0x00,0x00,0x7c,0x00,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x82,0x00,0x00,0x00,0x79,0x00,0x00,0x00, +0x16,0x00,0x00,0x00,0x51,0x00,0x05,0x00,0x1c,0x00,0x00,0x00, +0x84,0x00,0x00,0x00,0x72,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0x41,0x00,0x06,0x00,0x7d,0x00,0x00,0x00,0x85,0x00,0x00,0x00, +0x76,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0x82,0x00,0x00,0x00, +0x3e,0x00,0x03,0x00,0x85,0x00,0x00,0x00,0x84,0x00,0x00,0x00, +0xf9,0x00,0x02,0x00,0x88,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, +0x88,0x00,0x00,0x00,0xfd,0x00,0x01,0x00,0x38,0x00,0x01,0x00, + +}; +const uint64_t get_rows_q8_0_f32_len = 2280; unsigned char matmul_f16_aligned_l_data[] = { 0x03,0x02,0x23,0x07,0x00,0x05,0x01,0x00,0x0b,0x00,0x0d,0x00, diff --git a/ggml-vulkan.cpp b/ggml-vulkan.cpp index b1e0006bb..14fb89e09 100644 --- a/ggml-vulkan.cpp +++ b/ggml-vulkan.cpp @@ -1,6 +1,6 @@ #include "ggml-vulkan.h" -#ifdef VK_RUN_TESTS +#ifdef GGML_VULKAN_RUN_TESTS #include #endif @@ -255,6 +255,7 @@ static size_t vk_staging_offset; static vk_buffer vk_sync_staging; static vk_context * vk_ctx; +static vk_context * vk_transfer_ctx; static bool vk_disable; @@ -264,7 +265,7 @@ size_t vk_output_tensor; #endif static vk_pipeline ggml_vk_create_pipeline(const std::string& name, size_t spv_size, const void* spv_data, const std::string& entrypoint, uint32_t parameter_count, uint32_t push_constant_size, std::array wg_denoms, std::vector&& specialization_constants, uint32_t align) { -#ifdef VK_DEBUG +#ifdef GGML_VULKAN_DEBUG std::cerr << "ggml_vk_create_pipeline(" << name << ", " << entrypoint << ", " << parameter_count << ", " << push_constant_size << ", (" << wg_denoms[0] << "," << wg_denoms[1] << "," << wg_denoms[2] << "), specialization_constants, " << align << ")" << std::endl; #endif GGML_ASSERT(parameter_count > 0); @@ -368,7 +369,7 @@ static vk_pipeline ggml_vk_create_pipeline(const std::string& name, size_t spv_s } static void ggml_vk_pipeline_allocate_descriptor_sets(vk_pipeline& pipeline, uint32_t n) { -#ifdef VK_DEBUG +#ifdef GGML_VULKAN_DEBUG std::cerr << "ggml_vk_pipeline_allocate_descriptor_sets(" << pipeline.name << ", " << n << ")" << std::endl; #endif // Check if gc already contains pipeline before adding it @@ -413,14 +414,14 @@ static void ggml_vk_pipeline_allocate_descriptor_sets(vk_pipeline& pipeline, uin } static void ggml_vk_pipeline_cleanup(vk_pipeline& pipeline) { -#ifdef VK_DEBUG +#ifdef GGML_VULKAN_DEBUG std::cerr << "ggml_vk_pipeline_cleanup(" << pipeline.name << ")" << std::endl; #endif pipeline.descriptor_set_idx = 0; } static vk::CommandBuffer ggml_vk_create_cmd_buffer(vk_queue& q) { -#ifdef VK_DEBUG +#ifdef GGML_VULKAN_DEBUG std::cerr << "ggml_vk_create_cmd_buffer()" << std::endl; #endif if (q.cmd_buffers.size() > q.cmd_buffer_idx) { @@ -442,7 +443,7 @@ static vk::CommandBuffer ggml_vk_create_cmd_buffer(vk_queue& q) { } static vk_submission ggml_vk_create_submission(vk_queue& q, std::vector wait_semaphores, std::vector signal_semaphores) { -#ifdef VK_DEBUG +#ifdef GGML_VULKAN_DEBUG std::cerr << "ggml_vk_create_submission()" << std::endl; #endif vk_submission s; @@ -453,14 +454,14 @@ static vk_submission ggml_vk_create_submission(vk_queue& q, std::vector wait_semaphores, std::vector signal_semaphores) { -#ifdef VK_DEBUG +#ifdef GGML_VULKAN_DEBUG std::cerr << "ggml_vk_create_sequence_1()" << std::endl; #endif return { ggml_vk_create_submission(q, std::move(wait_semaphores), std::move(signal_semaphores)) }; } static void ggml_vk_submit(vk_context * ctx, vk::Fence fence) { -#ifdef VK_DEBUG +#ifdef GGML_VULKAN_DEBUG std::cerr << "ggml_vk_submit(" << ctx->seqs.size() << ", " << fence << ")" << std::endl; #endif if (ctx->seqs.empty()) { @@ -536,7 +537,7 @@ static void ggml_vk_submit(vk_context * ctx, vk::Fence fence) { } static uint32_t ggml_vk_find_queue_family_index(std::vector& queue_family_props, const vk::QueueFlags& required, const vk::QueueFlags& avoid, int32_t compute_index, uint32_t min_num_queues) { -#ifdef VK_DEBUG +#ifdef GGML_VULKAN_DEBUG std::cerr << "ggml_vk_find_queue_family_index()" << std::endl; #endif const uint32_t qfsize = queue_family_props.size(); @@ -578,7 +579,7 @@ static uint32_t ggml_vk_find_queue_family_index(std::vector= vk_gc.tl_semaphores.size()) { @@ -642,7 +643,7 @@ static vk::Event ggml_vk_create_event() { } static void ggml_vk_queue_cleanup(vk_queue& q) { -#ifdef VK_DEBUG +#ifdef GGML_VULKAN_DEBUG std::cerr << "ggml_vk_queue_cleanup()" << std::endl; #endif // Requires command buffers to be done @@ -652,7 +653,7 @@ static void ggml_vk_queue_cleanup(vk_queue& q) { } static vk_buffer ggml_vk_create_buffer(size_t size, vk::MemoryPropertyFlags req_flags) { -#ifdef VK_DEBUG +#ifdef GGML_VULKAN_DEBUG std::cerr << "ggml_vk_create_buffer(" << size << ", " << to_string(req_flags) << ")" << std::endl; #endif GGML_ASSERT(size > 0); @@ -743,7 +744,7 @@ static void ggml_vk_destroy_buffer(vk_buffer& buf) { if (buf.size == 0) { return; } -#ifdef VK_DEBUG +#ifdef GGML_VULKAN_DEBUG std::cerr << "ggml_vk_destroy_buffer(" << buf.size << ")" << std::endl; #endif @@ -757,7 +758,7 @@ static vk_subbuffer ggml_vk_subbuffer(vk_buffer& buf) { } static void ggml_vk_sync_buffers(vk_context * ctx) { -#ifdef VK_DEBUG +#ifdef GGML_VULKAN_DEBUG std::cerr << "ggml_vk_sync_buffers()" << std::endl; #endif const std::vector mem_barriers{ { { vk::AccessFlagBits::eMemoryRead | vk::AccessFlagBits::eMemoryWrite }, { vk::AccessFlagBits::eMemoryRead | vk::AccessFlagBits::eMemoryWrite } } }; @@ -773,7 +774,7 @@ static void ggml_vk_sync_buffers(vk_context * ctx) { } static void ggml_vk_wait_events(vk::CommandBuffer& cmd_buffer, std::vector&& events, vk::PipelineStageFlags src_stages, vk::PipelineStageFlags dst_stages) { -#ifdef VK_DEBUG +#ifdef GGML_VULKAN_DEBUG std::cerr << "ggml_vk_wait_events()" << std::endl; #endif if (events.empty()) { @@ -810,7 +811,7 @@ static bool ggml_vk_build_shader(ggml_type type) { } static void ggml_vk_load_shaders() { -#ifdef VK_DEBUG +#ifdef GGML_VULKAN_DEBUG std::cerr << "ggml_vk_load_shaders()" << std::endl; #endif @@ -849,36 +850,6 @@ static void ggml_vk_load_shaders() { vk_pipeline_matmul_f16_f32_aligned_l = ggml_vk_create_pipeline("matmul_f16_f32_aligned_l", matmul_f16_f32_aligned_l_len, matmul_f16_f32_aligned_l_data, "main", 3, 14 * sizeof(uint32_t), l_wg_denoms, warptile_l, l_align); vk_pipeline_matmul_f16_f32_aligned_m = ggml_vk_create_pipeline("matmul_f16_f32_aligned_m", matmul_f16_f32_aligned_m_len, matmul_f16_f32_aligned_m_data, "main", 3, 14 * sizeof(uint32_t), m_wg_denoms, warptile_m, m_align); vk_pipeline_matmul_f16_f32_aligned_s = ggml_vk_create_pipeline("matmul_f16_f32_aligned_s", matmul_f16_f32_aligned_s_len, matmul_f16_f32_aligned_s_data, "main", 3, 14 * sizeof(uint32_t), s_wg_denoms, warptile_s, s_align); - - // Build dequant shaders - vk_pipeline_dequant[GGML_TYPE_F32] = ggml_vk_create_pipeline("f32_to_f16", f32_to_f16_len, f32_to_f16_data, "main", 2, 4 * sizeof(int), {64, 1, 1}, {}, 1); - - vk_pipeline_dequant[GGML_TYPE_F16] = ggml_vk_create_pipeline("dequant_f16", dequant_f16_len, dequant_f16_data, "main", 2, 4 * sizeof(int), {256 * 32, 1, 1}, {}, 1); - vk_pipeline_dequant[GGML_TYPE_Q4_0] = ggml_vk_create_pipeline("dequant_q4_0", dequant_q4_0_len, dequant_q4_0_data, "main", 2, 4 * sizeof(int), {256 * 32, 1, 1}, {}, 1); - vk_pipeline_dequant[GGML_TYPE_Q4_1] = ggml_vk_create_pipeline("dequant_q4_1", dequant_q4_1_len, dequant_q4_1_data, "main", 2, 4 * sizeof(int), {256 * 32, 1, 1}, {}, 1); - vk_pipeline_dequant[GGML_TYPE_Q5_0] = ggml_vk_create_pipeline("dequant_q5_0", dequant_q5_0_len, dequant_q5_0_data, "main", 2, 4 * sizeof(int), {256 * 32, 1, 1}, {}, 1); - vk_pipeline_dequant[GGML_TYPE_Q5_1] = ggml_vk_create_pipeline("dequant_q5_1", dequant_q5_1_len, dequant_q5_1_data, "main", 2, 4 * sizeof(int), {256 * 32, 1, 1}, {}, 1); - vk_pipeline_dequant[GGML_TYPE_Q8_0] = ggml_vk_create_pipeline("dequant_q8_0", dequant_q8_0_len, dequant_q8_0_data, "main", 2, 4 * sizeof(int), {256 * 32, 1, 1}, {}, 1); - vk_pipeline_dequant[GGML_TYPE_Q2_K] = ggml_vk_create_pipeline("dequant_q2_K", dequant_q2_K_len, dequant_q2_K_data, "main", 2, 4 * sizeof(int), {256 * 64, 1, 1}, {}, 1); - vk_pipeline_dequant[GGML_TYPE_Q3_K] = ggml_vk_create_pipeline("dequant_q3_K", dequant_q3_K_len, dequant_q3_K_data, "main", 2, 4 * sizeof(int), {256 * 64, 1, 1}, {}, 1); - vk_pipeline_dequant[GGML_TYPE_Q4_K] = ggml_vk_create_pipeline("dequant_q4_K", dequant_q4_K_len, dequant_q4_K_data, "main", 2, 4 * sizeof(int), {256 * 32, 1, 1}, {}, 1); - vk_pipeline_dequant[GGML_TYPE_Q5_K] = ggml_vk_create_pipeline("dequant_q5_K", dequant_q5_K_len, dequant_q5_K_data, "main", 2, 4 * sizeof(int), {256 * 64, 1, 1}, {}, 1); - vk_pipeline_dequant[GGML_TYPE_Q6_K] = ggml_vk_create_pipeline("dequant_q6_K", dequant_q6_K_len, dequant_q6_K_data, "main", 2, 4 * sizeof(int), {256 * 64, 1, 1}, {}, 1); - - // get_rows - vk_pipeline_get_rows[GGML_TYPE_F16] = ggml_vk_create_pipeline("get_rows_f16", get_rows_f16_len, get_rows_f16_data, "main", 3, sizeof(vk_op_push_constants), {512, 1, 1}, {}, 1); - vk_pipeline_get_rows[GGML_TYPE_Q4_0] = ggml_vk_create_pipeline("get_rows_q4_0", get_rows_q4_0_len, get_rows_q4_0_data, "main", 3, sizeof(vk_op_push_constants), {512, 1, 1}, {}, 1); - vk_pipeline_get_rows[GGML_TYPE_Q4_1] = ggml_vk_create_pipeline("get_rows_q4_1", get_rows_q4_1_len, get_rows_q4_1_data, "main", 3, sizeof(vk_op_push_constants), {512, 1, 1}, {}, 1); - vk_pipeline_get_rows[GGML_TYPE_Q5_0] = ggml_vk_create_pipeline("get_rows_q5_0", get_rows_q5_0_len, get_rows_q5_0_data, "main", 3, sizeof(vk_op_push_constants), {512, 1, 1}, {}, 1); - vk_pipeline_get_rows[GGML_TYPE_Q5_1] = ggml_vk_create_pipeline("get_rows_q5_1", get_rows_q5_1_len, get_rows_q5_1_data, "main", 3, sizeof(vk_op_push_constants), {512, 1, 1}, {}, 1); - vk_pipeline_get_rows[GGML_TYPE_Q8_0] = ggml_vk_create_pipeline("get_rows_q8_0", get_rows_q8_0_len, get_rows_q8_0_data, "main", 3, sizeof(vk_op_push_constants), {512, 1, 1}, {}, 1); - - vk_pipeline_get_rows_f32[GGML_TYPE_F16] = ggml_vk_create_pipeline("get_rows_f16_f32", get_rows_f16_f32_len, get_rows_f16_f32_data, "main", 3, sizeof(vk_op_push_constants), {512, 1, 1}, {}, 1); - vk_pipeline_get_rows_f32[GGML_TYPE_Q4_0] = ggml_vk_create_pipeline("get_rows_q4_0_f32", get_rows_q4_0_f32_len, get_rows_q4_0_f32_data, "main", 3, sizeof(vk_op_push_constants), {512, 1, 1}, {}, 1); - vk_pipeline_get_rows_f32[GGML_TYPE_Q4_1] = ggml_vk_create_pipeline("get_rows_q4_1_f32", get_rows_q4_1_f32_len, get_rows_q4_1_f32_data, "main", 3, sizeof(vk_op_push_constants), {512, 1, 1}, {}, 1); - vk_pipeline_get_rows_f32[GGML_TYPE_Q5_0] = ggml_vk_create_pipeline("get_rows_q5_0_f32", get_rows_q5_0_f32_len, get_rows_q5_0_f32_data, "main", 3, sizeof(vk_op_push_constants), {512, 1, 1}, {}, 1); - vk_pipeline_get_rows_f32[GGML_TYPE_Q5_1] = ggml_vk_create_pipeline("get_rows_q5_1_f32", get_rows_q5_1_f32_len, get_rows_q5_1_f32_data, "main", 3, sizeof(vk_op_push_constants), {512, 1, 1}, {}, 1); - vk_pipeline_get_rows_f32[GGML_TYPE_Q8_0] = ggml_vk_create_pipeline("get_rows_q8_0_f32", get_rows_q8_0_f32_len, get_rows_q8_0_f32_data, "main", 3, sizeof(vk_op_push_constants), {512, 1, 1}, {}, 1); } else { vk_pipeline_matmul_f32_l = ggml_vk_create_pipeline("matmul_f32_l", matmul_f32_l_fp32_len, matmul_f32_l_fp32_data, "main", 3, 14 * sizeof(uint32_t), l_wg_denoms, warptile_l, 1); vk_pipeline_matmul_f32_m = ggml_vk_create_pipeline("matmul_f32_m", matmul_f32_m_fp32_len, matmul_f32_m_fp32_data, "main", 3, 14 * sizeof(uint32_t), m_wg_denoms, warptile_m, 1); @@ -901,36 +872,6 @@ static void ggml_vk_load_shaders() { vk_pipeline_matmul_f16_f32_aligned_l = ggml_vk_create_pipeline("matmul_f16_f32_aligned_l", matmul_f16_f32_aligned_l_fp32_len, matmul_f16_f32_aligned_l_fp32_data, "main", 3, 14 * sizeof(uint32_t), l_wg_denoms, warptile_l, l_align); vk_pipeline_matmul_f16_f32_aligned_m = ggml_vk_create_pipeline("matmul_f16_f32_aligned_m", matmul_f16_f32_aligned_m_fp32_len, matmul_f16_f32_aligned_m_fp32_data, "main", 3, 14 * sizeof(uint32_t), m_wg_denoms, warptile_m, m_align); vk_pipeline_matmul_f16_f32_aligned_s = ggml_vk_create_pipeline("matmul_f16_f32_aligned_s", matmul_f16_f32_aligned_s_fp32_len, matmul_f16_f32_aligned_s_fp32_data, "main", 3, 14 * sizeof(uint32_t), s_wg_denoms, warptile_s, s_align); - - // Build dequant shaders - vk_pipeline_dequant[GGML_TYPE_F32] = ggml_vk_create_pipeline("f32_to_f16", f32_to_f16_fp32_len, f32_to_f16_fp32_data, "main", 2, 4 * sizeof(int), {64, 1, 1}, {}, 1); - - vk_pipeline_dequant[GGML_TYPE_F16] = ggml_vk_create_pipeline("dequant_f16", dequant_f16_fp32_len, dequant_f16_fp32_data, "main", 2, 4 * sizeof(int), {256 * 32, 1, 1}, {}, 1); - vk_pipeline_dequant[GGML_TYPE_Q4_0] = ggml_vk_create_pipeline("dequant_q4_0", dequant_q4_0_fp32_len, dequant_q4_0_fp32_data, "main", 2, 4 * sizeof(int), {256 * 32, 1, 1}, {}, 1); - vk_pipeline_dequant[GGML_TYPE_Q4_1] = ggml_vk_create_pipeline("dequant_q4_1", dequant_q4_1_fp32_len, dequant_q4_1_fp32_data, "main", 2, 4 * sizeof(int), {256 * 32, 1, 1}, {}, 1); - vk_pipeline_dequant[GGML_TYPE_Q5_0] = ggml_vk_create_pipeline("dequant_q5_0", dequant_q5_0_fp32_len, dequant_q5_0_fp32_data, "main", 2, 4 * sizeof(int), {256 * 32, 1, 1}, {}, 1); - vk_pipeline_dequant[GGML_TYPE_Q5_1] = ggml_vk_create_pipeline("dequant_q5_1", dequant_q5_1_fp32_len, dequant_q5_1_fp32_data, "main", 2, 4 * sizeof(int), {256 * 32, 1, 1}, {}, 1); - vk_pipeline_dequant[GGML_TYPE_Q8_0] = ggml_vk_create_pipeline("dequant_q8_0", dequant_q8_0_fp32_len, dequant_q8_0_fp32_data, "main", 2, 4 * sizeof(int), {256 * 32, 1, 1}, {}, 1); - vk_pipeline_dequant[GGML_TYPE_Q2_K] = ggml_vk_create_pipeline("dequant_q2_K", dequant_q2_K_fp32_len, dequant_q2_K_fp32_data, "main", 2, 4 * sizeof(int), {256 * 32, 1, 1}, {}, 1); - vk_pipeline_dequant[GGML_TYPE_Q3_K] = ggml_vk_create_pipeline("dequant_q3_K", dequant_q3_K_fp32_len, dequant_q3_K_fp32_data, "main", 2, 4 * sizeof(int), {256 * 32, 1, 1}, {}, 1); - vk_pipeline_dequant[GGML_TYPE_Q4_K] = ggml_vk_create_pipeline("dequant_q4_K", dequant_q4_K_fp32_len, dequant_q4_K_fp32_data, "main", 2, 4 * sizeof(int), {256 * 32, 1, 1}, {}, 1); - vk_pipeline_dequant[GGML_TYPE_Q5_K] = ggml_vk_create_pipeline("dequant_q5_K", dequant_q5_K_fp32_len, dequant_q5_K_fp32_data, "main", 2, 4 * sizeof(int), {256 * 32, 1, 1}, {}, 1); - vk_pipeline_dequant[GGML_TYPE_Q6_K] = ggml_vk_create_pipeline("dequant_q6_K", dequant_q6_K_fp32_len, dequant_q6_K_fp32_data, "main", 2, 4 * sizeof(int), {256 * 32, 1, 1}, {}, 1); - - // get_rows - vk_pipeline_get_rows[GGML_TYPE_F16] = ggml_vk_create_pipeline("get_rows_f16", get_rows_f16_fp32_len, get_rows_f16_fp32_data, "main", 3, sizeof(vk_op_push_constants), {512, 1, 1}, {}, 1); - vk_pipeline_get_rows[GGML_TYPE_Q4_0] = ggml_vk_create_pipeline("get_rows_q4_0", get_rows_q4_0_fp32_len, get_rows_q4_0_fp32_data, "main", 3, sizeof(vk_op_push_constants), {512, 1, 1}, {}, 1); - vk_pipeline_get_rows[GGML_TYPE_Q4_1] = ggml_vk_create_pipeline("get_rows_q4_1", get_rows_q4_1_fp32_len, get_rows_q4_1_fp32_data, "main", 3, sizeof(vk_op_push_constants), {512, 1, 1}, {}, 1); - vk_pipeline_get_rows[GGML_TYPE_Q5_0] = ggml_vk_create_pipeline("get_rows_q5_0", get_rows_q5_0_fp32_len, get_rows_q5_0_fp32_data, "main", 3, sizeof(vk_op_push_constants), {512, 1, 1}, {}, 1); - vk_pipeline_get_rows[GGML_TYPE_Q5_1] = ggml_vk_create_pipeline("get_rows_q5_1", get_rows_q5_1_fp32_len, get_rows_q5_1_fp32_data, "main", 3, sizeof(vk_op_push_constants), {512, 1, 1}, {}, 1); - vk_pipeline_get_rows[GGML_TYPE_Q8_0] = ggml_vk_create_pipeline("get_rows_q8_0", get_rows_q8_0_fp32_len, get_rows_q8_0_fp32_data, "main", 3, sizeof(vk_op_push_constants), {512, 1, 1}, {}, 1); - - vk_pipeline_get_rows_f32[GGML_TYPE_F16] = ggml_vk_create_pipeline("get_rows_f16_f32", get_rows_f16_f32_fp32_len, get_rows_f16_f32_fp32_data, "main", 3, sizeof(vk_op_push_constants), {512, 1, 1}, {}, 1); - vk_pipeline_get_rows_f32[GGML_TYPE_Q4_0] = ggml_vk_create_pipeline("get_rows_q4_0_f32", get_rows_q4_0_f32_fp32_len, get_rows_q4_0_f32_fp32_data, "main", 3, sizeof(vk_op_push_constants), {512, 1, 1}, {}, 1); - vk_pipeline_get_rows_f32[GGML_TYPE_Q4_1] = ggml_vk_create_pipeline("get_rows_q4_1_f32", get_rows_q4_1_f32_fp32_len, get_rows_q4_1_f32_fp32_data, "main", 3, sizeof(vk_op_push_constants), {512, 1, 1}, {}, 1); - vk_pipeline_get_rows_f32[GGML_TYPE_Q5_0] = ggml_vk_create_pipeline("get_rows_q5_0_f32", get_rows_q5_0_f32_fp32_len, get_rows_q5_0_f32_fp32_data, "main", 3, sizeof(vk_op_push_constants), {512, 1, 1}, {}, 1); - vk_pipeline_get_rows_f32[GGML_TYPE_Q5_1] = ggml_vk_create_pipeline("get_rows_q5_1_f32", get_rows_q5_1_f32_fp32_len, get_rows_q5_1_f32_fp32_data, "main", 3, sizeof(vk_op_push_constants), {512, 1, 1}, {}, 1); - vk_pipeline_get_rows_f32[GGML_TYPE_Q8_0] = ggml_vk_create_pipeline("get_rows_q8_0_f32", get_rows_q8_0_f32_fp32_len, get_rows_q8_0_f32_fp32_data, "main", 3, sizeof(vk_op_push_constants), {512, 1, 1}, {}, 1); } vk_pipeline_dequant_mul_mat_vec_f32[GGML_TYPE_F16] = ggml_vk_create_pipeline("mul_mat_vec_f16_f32", mul_mat_vec_f16_f32_len, mul_mat_vec_f16_f32_data, "main", 3, 3 * sizeof(int), {1, 1, 1}, {}, 1); @@ -945,6 +886,36 @@ static void ggml_vk_load_shaders() { vk_pipeline_dequant_mul_mat_vec_f32[GGML_TYPE_Q5_K] = ggml_vk_create_pipeline("mul_mat_vec_q5_K_f32", mul_mat_vec_q5_K_f32_len, mul_mat_vec_q5_K_f32_data, "main", 3, 3 * sizeof(int), {1, 1, 1}, {}, 1); vk_pipeline_dequant_mul_mat_vec_f32[GGML_TYPE_Q6_K] = ggml_vk_create_pipeline("mul_mat_vec_q6_K_f32", mul_mat_vec_q6_K_f32_len, mul_mat_vec_q6_K_f32_data, "main", 3, 3 * sizeof(int), {1, 1, 1}, {}, 1); + // dequant shaders + vk_pipeline_dequant[GGML_TYPE_F32] = ggml_vk_create_pipeline("f32_to_f16", f32_to_f16_len, f32_to_f16_data, "main", 2, 4 * sizeof(int), {64, 1, 1}, {}, 1); + + vk_pipeline_dequant[GGML_TYPE_F16] = ggml_vk_create_pipeline("dequant_f16", dequant_f16_len, dequant_f16_data, "main", 2, 4 * sizeof(int), {256 * 32, 1, 1}, {}, 1); + vk_pipeline_dequant[GGML_TYPE_Q4_0] = ggml_vk_create_pipeline("dequant_q4_0", dequant_q4_0_len, dequant_q4_0_data, "main", 2, 4 * sizeof(int), {256 * 32, 1, 1}, {}, 1); + vk_pipeline_dequant[GGML_TYPE_Q4_1] = ggml_vk_create_pipeline("dequant_q4_1", dequant_q4_1_len, dequant_q4_1_data, "main", 2, 4 * sizeof(int), {256 * 32, 1, 1}, {}, 1); + vk_pipeline_dequant[GGML_TYPE_Q5_0] = ggml_vk_create_pipeline("dequant_q5_0", dequant_q5_0_len, dequant_q5_0_data, "main", 2, 4 * sizeof(int), {256 * 32, 1, 1}, {}, 1); + vk_pipeline_dequant[GGML_TYPE_Q5_1] = ggml_vk_create_pipeline("dequant_q5_1", dequant_q5_1_len, dequant_q5_1_data, "main", 2, 4 * sizeof(int), {256 * 32, 1, 1}, {}, 1); + vk_pipeline_dequant[GGML_TYPE_Q8_0] = ggml_vk_create_pipeline("dequant_q8_0", dequant_q8_0_len, dequant_q8_0_data, "main", 2, 4 * sizeof(int), {256 * 32, 1, 1}, {}, 1); + vk_pipeline_dequant[GGML_TYPE_Q2_K] = ggml_vk_create_pipeline("dequant_q2_K", dequant_q2_K_len, dequant_q2_K_data, "main", 2, 4 * sizeof(int), {256 * 64, 1, 1}, {}, 1); + vk_pipeline_dequant[GGML_TYPE_Q3_K] = ggml_vk_create_pipeline("dequant_q3_K", dequant_q3_K_len, dequant_q3_K_data, "main", 2, 4 * sizeof(int), {256 * 64, 1, 1}, {}, 1); + vk_pipeline_dequant[GGML_TYPE_Q4_K] = ggml_vk_create_pipeline("dequant_q4_K", dequant_q4_K_len, dequant_q4_K_data, "main", 2, 4 * sizeof(int), {256 * 32, 1, 1}, {}, 1); + vk_pipeline_dequant[GGML_TYPE_Q5_K] = ggml_vk_create_pipeline("dequant_q5_K", dequant_q5_K_len, dequant_q5_K_data, "main", 2, 4 * sizeof(int), {256 * 64, 1, 1}, {}, 1); + vk_pipeline_dequant[GGML_TYPE_Q6_K] = ggml_vk_create_pipeline("dequant_q6_K", dequant_q6_K_len, dequant_q6_K_data, "main", 2, 4 * sizeof(int), {256 * 64, 1, 1}, {}, 1); + + // get_rows + vk_pipeline_get_rows[GGML_TYPE_F16] = ggml_vk_create_pipeline("get_rows_f16", get_rows_f16_len, get_rows_f16_data, "main", 3, sizeof(vk_op_push_constants), {512, 1, 1}, {}, 1); + vk_pipeline_get_rows[GGML_TYPE_Q4_0] = ggml_vk_create_pipeline("get_rows_q4_0", get_rows_q4_0_len, get_rows_q4_0_data, "main", 3, sizeof(vk_op_push_constants), {512, 1, 1}, {}, 1); + vk_pipeline_get_rows[GGML_TYPE_Q4_1] = ggml_vk_create_pipeline("get_rows_q4_1", get_rows_q4_1_len, get_rows_q4_1_data, "main", 3, sizeof(vk_op_push_constants), {512, 1, 1}, {}, 1); + vk_pipeline_get_rows[GGML_TYPE_Q5_0] = ggml_vk_create_pipeline("get_rows_q5_0", get_rows_q5_0_len, get_rows_q5_0_data, "main", 3, sizeof(vk_op_push_constants), {512, 1, 1}, {}, 1); + vk_pipeline_get_rows[GGML_TYPE_Q5_1] = ggml_vk_create_pipeline("get_rows_q5_1", get_rows_q5_1_len, get_rows_q5_1_data, "main", 3, sizeof(vk_op_push_constants), {512, 1, 1}, {}, 1); + vk_pipeline_get_rows[GGML_TYPE_Q8_0] = ggml_vk_create_pipeline("get_rows_q8_0", get_rows_q8_0_len, get_rows_q8_0_data, "main", 3, sizeof(vk_op_push_constants), {512, 1, 1}, {}, 1); + + vk_pipeline_get_rows_f32[GGML_TYPE_F16] = ggml_vk_create_pipeline("get_rows_f16_f32", get_rows_f16_f32_len, get_rows_f16_f32_data, "main", 3, sizeof(vk_op_push_constants), {512, 1, 1}, {}, 1); + vk_pipeline_get_rows_f32[GGML_TYPE_Q4_0] = ggml_vk_create_pipeline("get_rows_q4_0_f32", get_rows_q4_0_f32_len, get_rows_q4_0_f32_data, "main", 3, sizeof(vk_op_push_constants), {512, 1, 1}, {}, 1); + vk_pipeline_get_rows_f32[GGML_TYPE_Q4_1] = ggml_vk_create_pipeline("get_rows_q4_1_f32", get_rows_q4_1_f32_len, get_rows_q4_1_f32_data, "main", 3, sizeof(vk_op_push_constants), {512, 1, 1}, {}, 1); + vk_pipeline_get_rows_f32[GGML_TYPE_Q5_0] = ggml_vk_create_pipeline("get_rows_q5_0_f32", get_rows_q5_0_f32_len, get_rows_q5_0_f32_data, "main", 3, sizeof(vk_op_push_constants), {512, 1, 1}, {}, 1); + vk_pipeline_get_rows_f32[GGML_TYPE_Q5_1] = ggml_vk_create_pipeline("get_rows_q5_1_f32", get_rows_q5_1_f32_len, get_rows_q5_1_f32_data, "main", 3, sizeof(vk_op_push_constants), {512, 1, 1}, {}, 1); + vk_pipeline_get_rows_f32[GGML_TYPE_Q8_0] = ggml_vk_create_pipeline("get_rows_q8_0_f32", get_rows_q8_0_f32_len, get_rows_q8_0_f32_data, "main", 3, sizeof(vk_op_push_constants), {512, 1, 1}, {}, 1); + vk_pipeline_matmul_split_k_reduce = ggml_vk_create_pipeline("split_k_reduce", split_k_reduce_len, split_k_reduce_data, "main", 2, 2 * sizeof(uint32_t), {256, 1, 1}, {}, 1); vk_pipeline_mul_mat_vec_p021_f16_f32 = ggml_vk_create_pipeline("mul_mat_vec_p021_f16_f32", mul_mat_vec_p021_f16_f32_len, mul_mat_vec_p021_f16_f32_data, "main", 3, 6 * sizeof(uint32_t), {1, 1, 1}, {}, 1); @@ -983,7 +954,7 @@ static void ggml_vk_load_shaders() { } void ggml_vk_init() { -#ifdef VK_DEBUG +#ifdef GGML_VULKAN_DEBUG std::cerr << "ggml_vk_init()" << std::endl; #endif static bool initialized = false; @@ -999,17 +970,17 @@ void ggml_vk_init() { vk::ApplicationInfo app_info{ "ggml-vulkan", 1, nullptr, 0, VK_API_VERSION }; const std::vector layers = { -#ifdef VK_VALIDATE +#ifdef GGML_VULKAN_VALIDATE "VK_LAYER_KHRONOS_validation", #endif }; const std::vector extensions = { -#ifdef VK_VALIDATE +#ifdef GGML_VULKAN_VALIDATE "VK_EXT_validation_features", #endif }; vk::InstanceCreateInfo instance_create_info(vk::InstanceCreateFlags(), &app_info, layers, extensions); -#ifdef VK_VALIDATE +#ifdef GGML_VULKAN_VALIDATE const std::vector features_enable = { vk::ValidationFeatureEnableEXT::eBestPractices }; vk::ValidationFeaturesEXT validation_features = { features_enable, @@ -1120,7 +1091,7 @@ std::cerr << "ggml_vulkan: Validation layers enabled" << std::endl; device_extensions.push_back("VK_KHR_16bit_storage"); -#ifdef VK_VALIDATE +#ifdef GGML_VULKAN_VALIDATE device_extensions.push_back("VK_KHR_shader_non_semantic_info"); #endif @@ -1154,6 +1125,7 @@ std::cerr << "ggml_vulkan: Validation layers enabled" << std::endl; vk_fence = vk_device.device.createFence({}); vk_ctx = nullptr; + vk_transfer_ctx = nullptr; vk_disable = false; @@ -1166,7 +1138,7 @@ std::cerr << "ggml_vulkan: Validation layers enabled" << std::endl; } static vk_pipeline* ggml_vk_get_to_fp16(ggml_type type) { -#ifdef VK_DEBUG +#ifdef GGML_VULKAN_DEBUG std::cerr << "ggml_vk_get_to_fp16()" << std::endl; #endif switch (type) { @@ -1190,7 +1162,7 @@ static vk_pipeline* ggml_vk_get_to_fp16(ggml_type type) { } static vk_pipeline* ggml_vk_get_dequantize_mul_mat_vec(ggml_type type) { -#ifdef VK_DEBUG +#ifdef GGML_VULKAN_DEBUG std::cerr << "ggml_vk_get_dequantize_mul_mat_vec()" << std::endl; #endif switch (type) { @@ -1219,7 +1191,7 @@ static vk_pipeline* ggml_vk_get_dequantize_mul_mat_vec(ggml_type type) { static vk_buffer g_vk_buffer_pool[MAX_VK_BUFFERS]; static vk_buffer ggml_vk_pool_malloc(size_t size) { -#ifdef VK_DEBUG +#ifdef GGML_VULKAN_DEBUG std::cerr << "ggml_vk_pool_malloc(" << size << ")" << std::endl; #endif int best_i = -1; @@ -1253,7 +1225,7 @@ static vk_buffer ggml_vk_pool_malloc(size_t size) { } static void ggml_vk_pool_free(vk_buffer& buffer) { -#ifdef VK_DEBUG +#ifdef GGML_VULKAN_DEBUG std::cerr << "ggml_vk_pool_free(" << buffer.size << ")" << std::endl; #endif for (int i = 0; i < MAX_VK_BUFFERS; ++i) { @@ -1286,7 +1258,7 @@ static vk_buffer ggml_vk_create_buffer_temp(size_t size) { } static void * ggml_vk_host_malloc(size_t size) { -#ifdef VK_DEBUG +#ifdef GGML_VULKAN_DEBUG std::cerr << "ggml_vk_host_malloc(" << size << ")" << std::endl; #endif vk_buffer buf = ggml_vk_create_buffer(size, vk::MemoryPropertyFlagBits::eHostVisible | vk::MemoryPropertyFlagBits::eHostCoherent | vk::MemoryPropertyFlagBits::eHostCached); @@ -1309,7 +1281,7 @@ static void ggml_vk_host_free(void* ptr) { if (ptr == nullptr) { return; } -#ifdef VK_DEBUG +#ifdef GGML_VULKAN_DEBUG std::cerr << "ggml_vk_host_free(" << ptr << ")" << std::endl; #endif vk_buffer* buf = nullptr; @@ -1363,7 +1335,7 @@ static void ggml_vk_dispatch_pipeline(vk_context * ctx, vk_pipeline& pipeline, s const uint32_t wg0 = CEIL_DIV(elements[0], pipeline.wg_denoms[0]); const uint32_t wg1 = CEIL_DIV(elements[1], pipeline.wg_denoms[1]); const uint32_t wg2 = CEIL_DIV(elements[2], pipeline.wg_denoms[2]); -#ifdef VK_DEBUG +#ifdef GGML_VULKAN_DEBUG std::cerr << "ggml_vk_dispatch_pipeline(" << pipeline.name << ", (" << wg0 << "," << wg1 << "," << wg2 << "))" << std::endl; #endif std::vector descriptor_buffer_infos; @@ -1398,7 +1370,7 @@ static void ggml_vk_end_submission(vk_submission& s, std::vector w } static void ggml_vk_ctx_end(vk_context * ctx) { -#ifdef VK_DEBUG +#ifdef GGML_VULKAN_DEBUG std::cerr << "ggml_vk_ctx_end(" << ctx << ", " << ctx->seqs.size() << ")" << std::endl; #endif if (ctx->s == nullptr) { @@ -1410,7 +1382,7 @@ static void ggml_vk_ctx_end(vk_context * ctx) { } static void ggml_vk_ctx_begin(vk_context * ctx) { -#ifdef VK_DEBUG +#ifdef GGML_VULKAN_DEBUG std::cerr << "ggml_vk_ctx_begin(" << ctx << ")" << std::endl; #endif if (ctx->s != nullptr) { @@ -1441,7 +1413,7 @@ static void ensure_sync_staging_buffer(size_t size) { } static void ggml_vk_buffer_write_nc_async(vk_context * ctx, vk_buffer* dst, size_t offset, const ggml_tensor * tensor, bool sync_staging = false) { -#ifdef VK_DEBUG +#ifdef GGML_VULKAN_DEBUG std::cerr << "ggml_vk_buffer_write_nc_async(" << tensor << ")" << std::endl; #endif GGML_ASSERT(!ggml_is_contiguous(tensor)); @@ -1548,7 +1520,7 @@ static void ggml_vk_buffer_write_nc_async(vk_context * ctx, vk_buffer* dst, size } static void ggml_vk_buffer_write_2d_async(vk_context * ctx, vk_buffer* dst, size_t offset, const void * src, size_t spitch, size_t width, size_t height, bool sync_staging = false) { -#ifdef VK_DEBUG +#ifdef GGML_VULKAN_DEBUG std::cerr << "ggml_vk_buffer_write_2d_async(" << width << ", " << height << ")" << std::endl; #endif // Buffer is already mapped @@ -1582,7 +1554,7 @@ static void ggml_vk_buffer_write_2d_async(vk_context * ctx, vk_buffer* dst, size ctx->s->buffer.copyBuffer(buf->buffer, dst->buffer, slices); return; } -#ifdef VK_DEBUG +#ifdef GGML_VULKAN_DEBUG std::cerr << "STAGING" << std::endl; #endif @@ -1619,14 +1591,14 @@ static void ggml_vk_buffer_write_2d_async(vk_context * ctx, vk_buffer* dst, size } static void ggml_vk_buffer_write_async(vk_context * ctx, vk_buffer* dst, size_t offset, const void * src, size_t size, bool sync_staging = false) { -#ifdef VK_DEBUG +#ifdef GGML_VULKAN_DEBUG std::cerr << "ggml_vk_buffer_write_async(" << size << ")" << std::endl; #endif return ggml_vk_buffer_write_2d_async(ctx, dst, offset, src, size, size, 1, sync_staging); } static void ggml_vk_buffer_write_2d(vk_buffer* dst, size_t offset, const void * src, size_t spitch, size_t width, size_t height) { -#ifdef VK_DEBUG +#ifdef GGML_VULKAN_DEBUG std::cerr << "ggml_vk_buffer_write_2d(" << width << ", " << height << ")" << std::endl; #endif // Buffer is already mapped @@ -1653,14 +1625,14 @@ static void ggml_vk_buffer_write_2d(vk_buffer* dst, size_t offset, const void * } static void ggml_vk_buffer_write(vk_buffer* dst, size_t offset, const void * src, size_t size) { -#ifdef VK_DEBUG +#ifdef GGML_VULKAN_DEBUG std::cerr << "ggml_vk_buffer_write(" << size << ")" << std::endl; #endif ggml_vk_buffer_write_2d(dst, offset, src, 0, size, 1); } static void ggml_vk_buffer_read_2d_async(vk_context * ctx, vk_buffer* src, size_t offset, void * dst, size_t spitch, size_t dpitch, size_t width, size_t height, bool sync_staging = false) { -#ifdef VK_DEBUG +#ifdef GGML_VULKAN_DEBUG std::cerr << "ggml_vk_buffer_read_2d_async(offset=" << offset << ", width=" << width << ", height=" << height << ")" << std::endl; #endif GGML_ASSERT(width > 0); @@ -1693,7 +1665,7 @@ static void ggml_vk_buffer_read_2d_async(vk_context * ctx, vk_buffer* src, size_ return; } -#ifdef VK_DEBUG +#ifdef GGML_VULKAN_DEBUG std::cerr << "STAGING" << std::endl; #endif @@ -1722,7 +1694,7 @@ static void ggml_vk_buffer_read_async(vk_context * ctx, vk_buffer* src, size_t o } static void ggml_vk_buffer_read(vk_buffer* src, size_t offset, void * dst, size_t size) { -#ifdef VK_DEBUG +#ifdef GGML_VULKAN_DEBUG std::cerr << "ggml_vk_buffer_read(" << offset << ", " << size << ")" << std::endl; #endif if(src->memory_property_flags & vk::MemoryPropertyFlagBits::eHostVisible) { @@ -1746,7 +1718,7 @@ static void ggml_vk_buffer_read(vk_buffer* src, size_t offset, void * dst, size_ } static void ggml_vk_buffer_copy_async(vk_context * ctx, vk_buffer * dst, size_t dst_offset, vk_buffer * src, size_t src_offset, size_t size) { -#ifdef VK_DEBUG +#ifdef GGML_VULKAN_DEBUG std::cerr << "ggml_vk_buffer_copy_async(" << size << ")" << std::endl; #endif VkBufferCopy bc{ src_offset, dst_offset, size }; @@ -1755,7 +1727,7 @@ static void ggml_vk_buffer_copy_async(vk_context * ctx, vk_buffer * dst, size_t } static void ggml_vk_buffer_copy(vk_buffer * dst, size_t dst_offset, vk_buffer * src, size_t src_offset, size_t size) { -#ifdef VK_DEBUG +#ifdef GGML_VULKAN_DEBUG std::cerr << "ggml_vk_buffer_copy(" << size << ")" << std::endl; #endif VkBufferCopy bc{ src_offset, dst_offset, size }; @@ -1771,7 +1743,7 @@ static void ggml_vk_buffer_copy(vk_buffer * dst, size_t dst_offset, vk_buffer * } static void ggml_vk_buffer_memset(vk_buffer* dst, size_t offset, uint32_t c, size_t size) { -#ifdef VK_DEBUG +#ifdef GGML_VULKAN_DEBUG std::cerr << "ggml_vk_buffer_memset(" << offset << ", " << c << ", " << size << ")" << std::endl; #endif vk_context * ctx = ggml_vk_create_context(vk_device.transfer_queue); @@ -1785,7 +1757,7 @@ static void ggml_vk_buffer_memset(vk_buffer* dst, size_t offset, uint32_t c, siz } static void ggml_vk_h2d_tensor_2d(vk_context * ctx, vk_buffer * dst, size_t offset, const ggml_tensor * src, uint64_t i3, uint64_t i2, uint64_t i1) { -#ifdef VK_DEBUG +#ifdef GGML_VULKAN_DEBUG std::cerr << "ggml_vk_h2d_tensor_2d(dst=" << dst << ", offset=" << offset << ", src=" << src << ", i3=" << i3 << ", i2=" << i2 << ", i1=" << i1 << ")" << std::endl; #endif const uint64_t ne0 = src->ne[0]; @@ -1815,7 +1787,7 @@ static void ggml_vk_h2d_tensor_2d(vk_context * ctx, vk_buffer * dst, size_t offs } static void ggml_vk_d2h_tensor_2d(vk_context * ctx, vk_buffer * src, size_t offset, const ggml_tensor * dst) { -#ifdef VK_DEBUG +#ifdef GGML_VULKAN_DEBUG std::cerr << "ggml_vk_d2h_tensor_2d()" << std::endl; #endif const uint64_t ne0 = dst->ne[0]; @@ -1841,24 +1813,24 @@ static void ggml_vk_d2h_tensor_2d(vk_context * ctx, vk_buffer * src, size_t offs } static uint32_t ggml_vk_guess_split_k(int m, int n, int k) { -#ifdef VK_DEBUG +#ifdef GGML_VULKAN_DEBUG std::cerr << "ggml_vk_guess_split_k(" << m << ", " << n << ", " << k << ")"; #endif if (k > 128 && (m < 128 || n < 128) && m > 2 && n > 2) { -#ifdef VK_DEBUG +#ifdef GGML_VULKAN_DEBUG std::cerr << " = 4" << std::endl; #endif return 4; } -#ifdef VK_DEBUG +#ifdef GGML_VULKAN_DEBUG std::cerr << " = 1" << std::endl; #endif return 1; } static uint32_t ggml_vk_guess_matmul_pipeline_align(int m, int n) { -#ifdef VK_DEBUG +#ifdef GGML_VULKAN_DEBUG std::cerr << "ggml_vk_guess_matmul_pipeline_align(" << m << ", " << n << ")" << std::endl; #endif if (m <= 32 || n <= 32) { @@ -1871,41 +1843,41 @@ static uint32_t ggml_vk_guess_matmul_pipeline_align(int m, int n) { } static vk_pipeline* ggml_vk_guess_matmul_pipeline(bool bit16_x, bool bit16_y, int m, int n, bool aligned) { -#ifdef VK_DEBUG +#ifdef GGML_VULKAN_DEBUG std::cerr << "ggml_vk_guess_matmul_pipeline(" << bit16_x << ", " << bit16_y << ", " << m << ", " << n << ", " << aligned << ")"; #endif if (bit16_x && bit16_y) { - if (m <= 32 || n <= 32) { -#ifdef VK_DEBUG + if (vk_device.vendor_id == VK_VENDOR_ID_INTEL || m <= 32 || n <= 32) { +#ifdef GGML_VULKAN_DEBUG std::cerr << " S" << std::endl; #endif return aligned ? &vk_pipeline_matmul_f16_aligned_s : &vk_pipeline_matmul_f16_s; } if (vk_device.subgroup_size == 64 || m <= 64 || n <= 64) { -#ifdef VK_DEBUG +#ifdef GGML_VULKAN_DEBUG std::cerr << " M" << std::endl; #endif return aligned ? &vk_pipeline_matmul_f16_aligned_m : &vk_pipeline_matmul_f16_m; } -#ifdef VK_DEBUG +#ifdef GGML_VULKAN_DEBUG std::cerr << " L" << std::endl; #endif return aligned ? &vk_pipeline_matmul_f16_aligned_l : &vk_pipeline_matmul_f16_l; } if (bit16_x && !bit16_y) { - if (m <= 32 || n <= 32) { -#ifdef VK_DEBUG + if (vk_device.vendor_id == VK_VENDOR_ID_INTEL || m <= 32 || n <= 32) { +#ifdef GGML_VULKAN_DEBUG std::cerr << " S" << std::endl; #endif return aligned ? &vk_pipeline_matmul_f16_f32_aligned_s : &vk_pipeline_matmul_f16_f32_s; } if (vk_device.subgroup_size == 64 || m <= 64 || n <= 64) { -#ifdef VK_DEBUG +#ifdef GGML_VULKAN_DEBUG std::cerr << " M" << std::endl; #endif return aligned ? &vk_pipeline_matmul_f16_f32_aligned_m : &vk_pipeline_matmul_f16_f32_m; } -#ifdef VK_DEBUG +#ifdef GGML_VULKAN_DEBUG std::cerr << " L" << std::endl; #endif return aligned ? &vk_pipeline_matmul_f16_f32_aligned_l : &vk_pipeline_matmul_f16_f32_l; @@ -1914,30 +1886,30 @@ static vk_pipeline* ggml_vk_guess_matmul_pipeline(bool bit16_x, bool bit16_y, in GGML_ASSERT(false); } - if (m <= 32 || n <= 32) { -#ifdef VK_DEBUG + if (vk_device.vendor_id == VK_VENDOR_ID_INTEL || m <= 32 || n <= 32) { +#ifdef GGML_VULKAN_DEBUG std::cerr << " S" << std::endl; #endif return aligned ? &vk_pipeline_matmul_f32_aligned_s : &vk_pipeline_matmul_f32_s; } if (vk_device.subgroup_size == 64 || m <= 64 || n <= 64) { -#ifdef VK_DEBUG +#ifdef GGML_VULKAN_DEBUG std::cerr << " M" << std::endl; #endif return aligned ? &vk_pipeline_matmul_f32_aligned_m : &vk_pipeline_matmul_f32_m; } -#ifdef VK_DEBUG +#ifdef GGML_VULKAN_DEBUG std::cerr << " L" << std::endl; #endif return aligned ? &vk_pipeline_matmul_f32_aligned_l : &vk_pipeline_matmul_f32_l; } static void ggml_vk_matmul(vk_context * ctx, vk_pipeline& pipeline, vk_subbuffer&& a, vk_subbuffer&& b, vk_subbuffer&& d, vk_subbuffer&& split_k_buffer, uint32_t m, uint32_t n, uint32_t k, uint32_t stride_a, uint32_t stride_b, uint32_t stride_d, uint32_t split_k, uint32_t batch, uint32_t ne02, uint32_t ne12, uint32_t broadcast2, uint32_t broadcast3, uint32_t batch_stride_a, uint32_t batch_stride_b, uint32_t batch_stride_d) { -#ifdef VK_DEBUG +#ifdef GGML_VULKAN_DEBUG std::cerr << "ggml_vk_matmul(a: (" << a.buffer.buffer << ", " << a.offset << ", " << a.size << "), b: (" << b.buffer.buffer << ", " << b.offset << ", " << b.size << "), c: (" << d.buffer.buffer << ", " << d.offset << ", " << d.size << "), split_k: (" << split_k_buffer.buffer.buffer << ", " << split_k_buffer.offset << ", " << split_k_buffer.size << "), m: " << m << ", n: " << n << ", k: " << k << ", stride_a: " << stride_a << ", stride_b: " << stride_b << ", stride_d: " << stride_d << ", split_k: " << split_k << ", batch: " << batch << ", ne02: " << ne02 << ", ne12: " << ne12 << ", broadcast2: " << broadcast2 << ", broadcast3: " << broadcast3 << ", batch_stride_a: " << batch_stride_a << ", batch_stride_b: " << batch_stride_b << ", batch_stride_d: " << batch_stride_d << ")" << std::endl; #endif + ggml_vk_sync_buffers(ctx); if (split_k == 1) { - ggml_vk_sync_buffers(ctx); const std::array pc = { m, n, k, stride_a, stride_b, stride_d, k, ne02, ne12, broadcast2, broadcast3, batch_stride_a, batch_stride_b, batch_stride_d }; ggml_vk_dispatch_pipeline(ctx, pipeline, { a, b, d }, pc.size() * sizeof(uint32_t), pc.data(), { m, n, batch }); return; @@ -1945,10 +1917,6 @@ static void ggml_vk_matmul(vk_context * ctx, vk_pipeline& pipeline, vk_subbuffer GGML_ASSERT(batch_stride_d == m * n); - // Synchronize the two submissions - ggml_vk_sync_buffers(ctx); - ctx->s->buffer.fillBuffer(split_k_buffer.buffer.buffer, 0, split_k_buffer.size, 0); - ggml_vk_sync_buffers(ctx); const std::array pc1 = { m, n, k, stride_a, stride_b, stride_d, CEIL_DIV(k, split_k), ne02, ne12, broadcast2, broadcast3, batch_stride_a, batch_stride_b, batch_stride_d }; // Make sure enough workgroups get assigned for split k to work ggml_vk_dispatch_pipeline(ctx, pipeline, { a, b, split_k_buffer }, pc1.size() * sizeof(uint32_t), pc1.data(), { (CEIL_DIV(m, pipeline.wg_denoms[0]) * pipeline.wg_denoms[0]) * split_k, n, batch }); @@ -1980,7 +1948,7 @@ static vk_pipeline * ggml_vk_get_cpy_pipeline(ggml_type from, ggml_type to) { } static void ggml_vk_cpy_to_contiguous(vk_context * ctx, vk_pipeline * pipeline, const ggml_tensor * tensor, vk_subbuffer&& in, vk_subbuffer&& out, ggml_type buffer_type, bool aligned=true) { -#ifdef VK_DEBUG +#ifdef GGML_VULKAN_DEBUG std::cerr << "ggml_vk_cpy_to_contiguous((" << tensor << ", type=" << tensor->type << ", backend=" << tensor->backend << ", ne0=" << tensor->ne[0] << ", ne1=" << tensor->ne[1] << ", ne2=" << tensor->ne[2] << ", ne3=" << tensor->ne[3] << ", nb0=" << tensor->nb[0] << ", nb1=" << tensor->nb[1] << ", nb2=" << tensor->nb[2] << ", nb3=" << tensor->nb[3] << "), "; std::cerr << "buffer in size=" << in.buffer.size << ", buffer out size=" << out.buffer.size << ")" << std::endl; #endif @@ -2002,7 +1970,7 @@ static void ggml_vk_cpy_to_contiguous(vk_context * ctx, vk_pipeline * pipeline, } static void ggml_vk_mul_mat_q_f16(vk_context * ctx, const ggml_tensor * src0, const ggml_tensor * src1, ggml_tensor * dst) { -#ifdef VK_DEBUG +#ifdef GGML_VULKAN_DEBUG std::cerr << "ggml_vk_mul_mat_q_f16((" << src0 << ", name=" << src0->name << ", type=" << src0->type << ", backend=" << src0->backend << ", ne0=" << src0->ne[0] << ", ne1=" << src0->ne[1] << ", ne2=" << src0->ne[2] << ", ne3=" << src0->ne[3] << ", nb0=" << src0->nb[0] << ", nb1=" << src0->nb[1] << ", nb2=" << src0->nb[2] << ", nb3=" << src0->nb[3]; std::cerr << "), (" << src1 << ", name=" << src1->name << ", type=" << src1->type << ", backend=" << src1->backend << ", ne0=" << src1->ne[0] << ", ne1=" << src1->ne[1] << ", ne2=" << src1->ne[2] << ", ne3=" << src1->ne[3] << ", nb0=" << src1->nb[0] << ", nb1=" << src1->nb[1] << ", nb2=" << src1->nb[2] << ", nb3=" << src1->nb[3]; std::cerr << "), (" << dst << ", name=" << dst->name << ", type=" << dst->type << ", backend=" << dst->backend << ", ne0=" << dst->ne[0] << ", ne1=" << dst->ne[1] << ", ne2=" << dst->ne[2] << ", ne3=" << dst->ne[3] << ", nb0=" << dst->nb[0] << ", nb1=" << dst->nb[1] << ", nb2=" << dst->nb[2] << ", nb3=" << dst->nb[3] << "),)" << std::endl; @@ -2186,7 +2154,7 @@ static void ggml_vk_mul_mat_q_f16(vk_context * ctx, const ggml_tensor * src0, co } static void ggml_vk_mul_mat_vec_q_f16(vk_context * ctx, const ggml_tensor * src0, const ggml_tensor * src1, ggml_tensor * dst) { -#ifdef VK_DEBUG +#ifdef GGML_VULKAN_DEBUG std::cerr << "ggml_vk_mul_mat_vec_q_f16((" << src0 << ", name=" << src0->name << ", type=" << src0->type << ", backend=" << src0->backend << ", ne0=" << src0->ne[0] << ", ne1=" << src0->ne[1] << ", ne2=" << src0->ne[2] << ", ne3=" << src0->ne[3] << ", nb0=" << src0->nb[0] << ", nb1=" << src0->nb[1] << ", nb2=" << src0->nb[2] << ", nb3=" << src0->nb[3]; std::cerr << "), (" << src1 << ", name=" << src1->name << ", type=" << src1->type << ", backend=" << src1->backend << ", ne0=" << src1->ne[0] << ", ne1=" << src1->ne[1] << ", ne2=" << src1->ne[2] << ", ne3=" << src1->ne[3] << ", nb0=" << src1->nb[0] << ", nb1=" << src1->nb[1] << ", nb2=" << src1->nb[2] << ", nb3=" << src1->nb[3]; std::cerr << "), (" << dst << ", name=" << dst->name << ", type=" << dst->type << ", backend=" << dst->backend << ", ne0=" << dst->ne[0] << ", ne1=" << dst->ne[1] << ", ne2=" << dst->ne[2] << ", ne3=" << dst->ne[3] << ", nb0=" << dst->nb[0] << ", nb1=" << dst->nb[1] << ", nb2=" << dst->nb[2] << ", nb3=" << dst->nb[3] << "),)" << std::endl; @@ -2366,7 +2334,7 @@ static void ggml_vk_mul_mat_vec_q_f16(vk_context * ctx, const ggml_tensor * src0 } static void ggml_vk_mul_mat_vec_p021_f16_f32(vk_context * ctx, const ggml_tensor * src0, const ggml_tensor * src1, ggml_tensor * dst) { -#ifdef VK_DEBUG +#ifdef GGML_VULKAN_DEBUG std::cerr << "ggml_vk_mul_mat_p021_f16_f32((" << src0 << ", name=" << src0->name << ", type=" << src0->type << ", backend=" << src0->backend << ", ne0=" << src0->ne[0] << ", ne1=" << src0->ne[1] << ", ne2=" << src0->ne[2] << ", ne3=" << src0->ne[3] << ", nb0=" << src0->nb[0] << ", nb1=" << src0->nb[1] << ", nb2=" << src0->nb[2] << ", nb3=" << src0->nb[3]; std::cerr << "), (" << src1 << ", name=" << src1->name << ", type=" << src1->type << ", backend=" << src1->backend << ", ne0=" << src1->ne[0] << ", ne1=" << src1->ne[1] << ", ne2=" << src1->ne[2] << ", ne3=" << src1->ne[3] << ", nb0=" << src1->nb[0] << ", nb1=" << src1->nb[1] << ", nb2=" << src1->nb[2] << ", nb3=" << src1->nb[3]; std::cerr << "), (" << dst << ", name=" << dst->name << ", type=" << dst->type << ", backend=" << dst->backend << ", ne0=" << dst->ne[0] << ", ne1=" << dst->ne[1] << ", ne2=" << dst->ne[2] << ", ne3=" << dst->ne[3] << ", nb0=" << dst->nb[0] << ", nb1=" << dst->nb[1] << ", nb2=" << dst->nb[2] << ", nb3=" << dst->nb[3] << "),)" << std::endl; @@ -2455,7 +2423,7 @@ static void ggml_vk_mul_mat_vec_p021_f16_f32(vk_context * ctx, const ggml_tensor } static void ggml_vk_mul_mat_vec_nc_f16_f32(vk_context * ctx, const ggml_tensor * src0, const ggml_tensor * src1, ggml_tensor * dst) { -#ifdef VK_DEBUG +#ifdef GGML_VULKAN_DEBUG std::cerr << "ggml_vk_mul_mat_nc_f16_f32((" << src0 << ", name=" << src0->name << ", type=" << src0->type << ", backend=" << src0->backend << ", ne0=" << src0->ne[0] << ", ne1=" << src0->ne[1] << ", ne2=" << src0->ne[2] << ", ne3=" << src0->ne[3] << ", nb0=" << src0->nb[0] << ", nb1=" << src0->nb[1] << ", nb2=" << src0->nb[2] << ", nb3=" << src0->nb[3]; std::cerr << "), (" << src1 << ", name=" << src1->name << ", type=" << src1->type << ", backend=" << src1->backend << ", ne0=" << src1->ne[0] << ", ne1=" << src1->ne[1] << ", ne2=" << src1->ne[2] << ", ne3=" << src1->ne[3] << ", nb0=" << src1->nb[0] << ", nb1=" << src1->nb[1] << ", nb2=" << src1->nb[2] << ", nb3=" << src1->nb[3]; std::cerr << "), (" << dst << ", name=" << dst->name << ", type=" << dst->type << ", backend=" << dst->backend << ", ne0=" << dst->ne[0] << ", ne1=" << dst->ne[1] << ", ne2=" << dst->ne[2] << ", ne3=" << dst->ne[3] << ", nb0=" << dst->nb[0] << ", nb1=" << dst->nb[1] << ", nb2=" << dst->nb[2] << ", nb3=" << dst->nb[3] << "),)" << std::endl; @@ -2561,7 +2529,7 @@ static bool ggml_vk_can_mul_mat(const ggml_tensor * src0, const ggml_tensor * sr } static void ggml_vk_mul_mat(vk_context * ctx, const struct ggml_tensor * src0, const struct ggml_tensor * src1, struct ggml_tensor * dst) { -#ifdef VK_DEBUG +#ifdef GGML_VULKAN_DEBUG std::cerr << "ggml_vk_mul_mat(" << src0 << ", " << src1 << ", " << dst << ")" << std::endl; #endif if (src0->type == GGML_TYPE_F16 && ggml_is_permuted(src0) && ggml_is_permuted(src1) && src1->ne[1] == 1) { @@ -2774,7 +2742,7 @@ static void ggml_vk_check_results_0(ggml_compute_params * params, ggml_tensor * template static void ggml_vk_op_f32(vk_context * ctx, const ggml_tensor * src0, const ggml_tensor * src1, ggml_tensor * dst, ggml_op op, const PC&& pc) { -#ifdef VK_DEBUG +#ifdef GGML_VULKAN_DEBUG std::cerr << "ggml_vk_op_f32((" << src0 << ", name=" << src0->name << ", type=" << src0->type << ", backend=" << src0->backend << ", ne0=" << src0->ne[0] << ", ne1=" << src0->ne[1] << ", ne2=" << src0->ne[2] << ", ne3=" << src0->ne[3] << ", nb0=" << src0->nb[0] << ", nb1=" << src0->nb[1] << ", nb2=" << src0->nb[2] << ", nb3=" << src0->nb[3]; if (src1 != nullptr) { std::cerr << "), (" << src1 << ", name=" << src1->name << ", type=" << src1->type << ", backend=" << src1->backend << ", ne0=" << src1->ne[0] << ", ne1=" << src1->ne[1] << ", ne2=" << src1->ne[2] << ", ne3=" << src1->ne[3] << ", nb0=" << src1->nb[0] << ", nb1=" << src1->nb[1] << ", nb2=" << src1->nb[2] << ", nb3=" << src1->nb[3]; @@ -3095,7 +3063,7 @@ static void ggml_vk_nop(vk_context * ctx, const ggml_tensor * src0, ggml_tensor } } -#ifdef VK_RUN_TESTS +#ifdef GGML_VULKAN_RUN_TESTS static void ggml_vk_print_matrix_area(const void * data, ggml_type type, int ne0, int ne1, int i0, int i1, int i2) { if (type != GGML_TYPE_F32 && type != GGML_TYPE_F16) { return; @@ -3129,7 +3097,7 @@ static void ggml_vk_print_matrix_area(const void * data, ggml_type type, int ne0 template static void ggml_vk_test_matmul(size_t m, size_t n, size_t k, size_t batch, size_t num_it, int split_k, int shader_size) { -#ifdef VK_DEBUG +#ifdef GGML_VULKAN_DEBUG std::cerr << "ggml_vk_test_matmul(" << m << ", " << n << ", " << k << ", " << batch << ", " << num_it << ", " << split_k << ", " << shader_size << ")" << std::endl; #endif const size_t x_ne = m * k * batch; @@ -3520,7 +3488,7 @@ static void ggml_vk_test_h2d_nc(size_t ne0, size_t ne1, size_t ne2, size_t ne3) } static void ggml_vk_test_transfer(size_t ne, bool pinned) { -#ifdef VK_DEBUG +#ifdef GGML_VULKAN_DEBUG std::cerr << "ggml_vk_test_transfer(" << ne << ")" << std::endl; #endif // Check transfers are correct @@ -3600,10 +3568,103 @@ static void ggml_vk_test_transfer(size_t ne, bool pinned) { free(y); } } + +static void ggml_vk_test_dequant(size_t ne, ggml_type quant) { +#ifdef GGML_VULKAN_DEBUG + std::cerr << "ggml_vk_test_dequant(" << ne << ")" << std::endl; +#endif + const size_t x_sz = sizeof(float) * ne; + const size_t x_sz_f16 = sizeof(ggml_fp16_t) * ne; + const size_t qx_sz = ne * ggml_type_size(quant)/ggml_blck_size(quant); + float * x = (float *) malloc(x_sz); + void * qx = malloc(qx_sz); + vk_buffer qx_buf = ggml_vk_create_buffer_check(qx_sz, vk::MemoryPropertyFlagBits::eDeviceLocal); + vk_buffer x_buf = ggml_vk_create_buffer_check(x_sz_f16, vk::MemoryPropertyFlagBits::eDeviceLocal); + ggml_fp16_t * x_chk = (ggml_fp16_t *) malloc(x_sz_f16); + + for (size_t i = 0; i < ne; i++) { + x[i] = rand() / (float)RAND_MAX; + } + + std::vector hist_cur(1 << 4, 0); + + vk_pipeline& p = vk_pipeline_dequant[quant]; + + switch(quant) { + case GGML_TYPE_Q4_0: + ggml_quantize_q4_0(x, qx, ne, ne, hist_cur.data()); + break; + case GGML_TYPE_Q4_1: + ggml_quantize_q4_1(x, qx, ne, ne, hist_cur.data()); + break; + case GGML_TYPE_Q5_0: + ggml_quantize_q5_0(x, qx, ne, ne, hist_cur.data()); + break; + case GGML_TYPE_Q5_1: + ggml_quantize_q4_1(x, qx, ne, ne, hist_cur.data()); + break; + case GGML_TYPE_Q8_0: + ggml_quantize_q8_0(x, qx, ne, ne, hist_cur.data()); + break; + case GGML_TYPE_Q2_K: + ggml_quantize_q2_K(x, qx, ne, ne, hist_cur.data()); + break; + case GGML_TYPE_Q3_K: + ggml_quantize_q3_K(x, qx, ne, ne, hist_cur.data()); + break; + case GGML_TYPE_Q4_K: + ggml_quantize_q4_K(x, qx, ne, ne, hist_cur.data()); + break; + case GGML_TYPE_Q5_K: + ggml_quantize_q5_K(x, qx, ne, ne, hist_cur.data()); + break; + case GGML_TYPE_Q6_K: + ggml_quantize_q6_K(x, qx, ne, ne, hist_cur.data()); + break; + default: + GGML_ASSERT(false); + } + + ggml_vk_pipeline_allocate_descriptor_sets(p, 1); + + ggml_vk_buffer_write(&qx_buf, 0, qx, qx_sz); + + vk_context * ctx = ggml_vk_create_context(vk_device.compute_queue); + ggml_vk_ctx_begin(ctx); + const std::vector pc = { 1, (int)ne, (int)ne, (int)ne }; + ggml_vk_sync_buffers(ctx); + ggml_vk_dispatch_pipeline(ctx, p, { { qx_buf, 0, qx_sz }, { x_buf, 0, x_sz_f16 } }, pc.size() * sizeof(int), pc.data(), { (uint32_t)ne, 1, 1}); + ggml_vk_ctx_end(ctx); + + auto begin = std::chrono::high_resolution_clock::now(); + + ggml_vk_submit(ctx, vk_fence); + VK_CHECK(vk_device.device.waitForFences({ vk_fence }, true, UINT64_MAX), "ggml_vk_compute_forward waitForFences"); + vk_device.device.resetFences({ vk_fence }); + + auto end = std::chrono::high_resolution_clock::now(); + + double ms_dequant = std::chrono::duration_cast(end-begin).count() / 1000.0; + ggml_vk_buffer_read(&x_buf, 0, x_chk, x_sz_f16); + + double avg_err = 0.0; + for (size_t i = 0; i < ne; i++) { + avg_err += std::fabs(x[i] - ggml_fp16_to_fp32(x_chk[i])); + } + + std::cerr << "TEST DEQUANT " << ggml_type_name(quant) << " time=" << ms_dequant << "ms avg_err=" << avg_err / ne << std::endl; + + ggml_vk_destroy_buffer(x_buf); + ggml_vk_destroy_buffer(qx_buf); + + free(x); + free(qx); + free(x_chk); +} #endif static ggml_tensor_extra_gpu * ggml_vk_tensor_create_extra(ggml_tensor * tensor) { -#ifdef VK_DEBUG +#ifdef GGML_VULKAN_DEBUG std::cerr << "ggml_vk_create_extra(" << tensor << " (" << tensor->name << ", " << ggml_op_name(tensor->op) << "))" << std::endl; #endif ggml_tensor_extra_gpu * extra = new ggml_tensor_extra_gpu; @@ -3627,7 +3688,7 @@ static ggml_tensor * ggml_vk_find_last_use(const ggml_tensor * node, ggml_cgraph } void ggml_vk_preallocate_buffers_graph(ggml_tensor * node){ -#ifdef VK_DEBUG +#ifdef GGML_VULKAN_DEBUG std::cerr << "ggml_vk_preallocate_buffers_graph(" << node << ")" << std::endl; #endif const bool any_on_device = node->backend == GGML_BACKEND_GPU @@ -3746,15 +3807,26 @@ void ggml_vk_preallocate_buffers() { if (vk_disable) { return; } -#ifdef VK_DEBUG +#ifdef GGML_VULKAN_DEBUG std::cerr << "ggml_vk_preallocate_buffers()" << std::endl; std::cerr << "qx_size: " << vk_prealloc_size_qx << " qy_size: " << vk_prealloc_size_qy << " x_size: " << vk_prealloc_size_x << " y_size: " << vk_prealloc_size_y << " split_k_size: " << vk_prealloc_size_split_k << std::endl; #endif -#if defined(VK_RUN_TESTS) +#if defined(GGML_VULKAN_RUN_TESTS) vk_staging = ggml_vk_create_buffer_check(100ul * 1024ul * 1024ul, vk::MemoryPropertyFlagBits::eHostVisible | vk::MemoryPropertyFlagBits::eHostCoherent | vk::MemoryPropertyFlagBits::eHostCached); ggml_vk_test_transfer(8192 * 1000, false); ggml_vk_test_transfer(8192 * 1000, true); + ggml_vk_test_dequant(2560 * 7680, GGML_TYPE_Q4_0); + ggml_vk_test_dequant(2560 * 7680, GGML_TYPE_Q4_1); + ggml_vk_test_dequant(2560 * 7680, GGML_TYPE_Q5_0); + ggml_vk_test_dequant(2560 * 7680, GGML_TYPE_Q5_1); + ggml_vk_test_dequant(2560 * 7680, GGML_TYPE_Q8_0); + ggml_vk_test_dequant(2560 * 7680, GGML_TYPE_Q2_K); + ggml_vk_test_dequant(2560 * 7680, GGML_TYPE_Q3_K); + ggml_vk_test_dequant(2560 * 7680, GGML_TYPE_Q4_K); + ggml_vk_test_dequant(2560 * 7680, GGML_TYPE_Q5_K); + ggml_vk_test_dequant(2560 * 7680, GGML_TYPE_Q6_K); + const std::vector vals { 8, 8, 8, 100, 46, 576, @@ -3845,7 +3917,7 @@ void ggml_vk_build_graph(ggml_tensor * node, bool last_node){ return; } -#ifdef VK_DEBUG +#ifdef GGML_VULKAN_DEBUG std::cerr << "ggml_vk_build_graph(" << node << ", " << ggml_op_name(node->op) << ")" << std::endl; #endif vk_semaphore_idx = 0; @@ -4068,7 +4140,7 @@ bool ggml_vk_compute_forward(ggml_compute_params * params, ggml_tensor * tensor) return true; } -#ifdef VK_DEBUG +#ifdef GGML_VULKAN_DEBUG std::cerr << "ggml_vk_compute_forward(" << tensor << ", name=" << tensor->name << ", op=" << ggml_op_name(tensor->op) << ", type=" << tensor->type << ", backend=" << tensor->backend << ", ne0=" << tensor->ne[0] << ", ne1=" << tensor->ne[1] << ", ne2=" << tensor->ne[2] << ", ne3=" << tensor->ne[3] << ", nb0=" << tensor->nb[0] << ", nb1=" << tensor->nb[1] << ", nb2=" << tensor->nb[2] << ", nb3=" << tensor->nb[3] << ", view_src=" << tensor->view_src << ", view_offs=" << tensor->view_offs << ")" << std::endl; #endif @@ -4111,7 +4183,7 @@ void ggml_vk_graph_cleanup() { if (vk_disable) { return; } -#ifdef VK_DEBUG +#ifdef GGML_VULKAN_DEBUG std::cerr << "ggml_vk_graph_cleanup()" << std::endl; #endif for (auto& buffer : vk_gc.temp_buffers) { @@ -4150,7 +4222,7 @@ void ggml_vk_graph_cleanup() { } static void ggml_vk_cleanup() { -#ifdef VK_DEBUG +#ifdef GGML_VULKAN_DEBUG std::cerr << "ggml_vk_cleanup()" << std::endl; #endif ggml_vk_destroy_buffer(vk_prealloc_x); @@ -4234,7 +4306,7 @@ GGML_CALL static void * ggml_backend_vk_buffer_get_base(ggml_backend_buffer_t bu } GGML_CALL static void ggml_backend_vk_buffer_init_tensor(ggml_backend_buffer_t buffer, ggml_tensor * tensor) { -#ifdef VK_DEBUG +#ifdef GGML_VULKAN_DEBUG std::cerr << "ggml_backend_vk_buffer_init_tensor(" << buffer << " (" << buffer->context << "), " << tensor << ")" << std::endl; #endif ggml_backend_vk_buffer_context * ctx = (ggml_backend_vk_buffer_context *)buffer->context; @@ -4254,7 +4326,7 @@ GGML_CALL static void ggml_backend_vk_buffer_init_tensor(ggml_backend_buffer_t b } GGML_CALL static void ggml_backend_vk_buffer_set_tensor(ggml_backend_buffer_t buffer, ggml_tensor * tensor, const void * data, size_t offset, size_t size) { -#ifdef VK_DEBUG +#ifdef GGML_VULKAN_DEBUG std::cerr << "ggml_backend_vk_buffer_set_tensor(" << buffer << ", " << tensor << ", " << data << ", " << offset << ", " << size << ")" << std::endl; #endif GGML_ASSERT(tensor->backend == GGML_BACKEND_GPU); @@ -4267,7 +4339,7 @@ GGML_CALL static void ggml_backend_vk_buffer_set_tensor(ggml_backend_buffer_t bu } GGML_CALL static void ggml_backend_vk_buffer_get_tensor(ggml_backend_buffer_t buffer, const ggml_tensor * tensor, void * data, size_t offset, size_t size) { -#ifdef VK_DEBUG +#ifdef GGML_VULKAN_DEBUG std::cerr << "ggml_backend_vk_buffer_get_tensor(" << buffer << ", " << tensor << ", " << data << ", " << offset << ", " << size << ")" << std::endl; #endif GGML_ASSERT(tensor->backend == GGML_BACKEND_GPU); @@ -4323,7 +4395,7 @@ GGML_CALL static const char * ggml_backend_vk_buffer_type_name(ggml_backend_buff } GGML_CALL static ggml_backend_buffer_t ggml_backend_vk_buffer_type_alloc_buffer(ggml_backend_buffer_type_t buft, size_t size) { -#ifdef VK_DEBUG +#ifdef GGML_VULKAN_DEBUG std::cerr << "ggml_backend_vk_buffer_type_alloc_buffer(" << size << ")" << std::endl; #endif vk_buffer dev_buffer = ggml_vk_create_buffer_device(size); @@ -4467,7 +4539,7 @@ GGML_CALL static ggml_backend_buffer_type_t ggml_backend_vk_get_default_buffer_t } GGML_CALL static void ggml_backend_vk_set_tensor_async(ggml_backend_t backend, ggml_tensor * tensor, const void * data, size_t offset, size_t size) { -#ifdef VK_DEBUG +#ifdef GGML_VULKAN_DEBUG std::cerr << "ggml_backend_vk_set_tensor_async(" << size << ")" << std::endl; #endif GGML_ASSERT((tensor->buffer->buft == ggml_backend_vk_buffer_type() || tensor->buffer->buft == ggml_backend_vk_host_buffer_type()) && "unsupported buffer type"); @@ -4475,19 +4547,19 @@ GGML_CALL static void ggml_backend_vk_set_tensor_async(ggml_backend_t backend, g ggml_tensor_extra_gpu * extra = (ggml_tensor_extra_gpu *) tensor->extra; - if (vk_ctx == nullptr) { + if (vk_transfer_ctx == nullptr) { // Initialize new transfer context - vk_ctx = ggml_vk_create_context(vk_device.transfer_queue); - ggml_vk_ctx_begin(vk_ctx); + vk_transfer_ctx = ggml_vk_create_context(vk_device.transfer_queue); + ggml_vk_ctx_begin(vk_transfer_ctx); } - ggml_vk_buffer_write_async(vk_ctx, &extra->buffer_gpu, extra->offset + offset, data, size); + ggml_vk_buffer_write_async(vk_transfer_ctx, &extra->buffer_gpu, extra->offset + offset, data, size); UNUSED(backend); } GGML_CALL static void ggml_backend_vk_get_tensor_async(ggml_backend_t backend, const ggml_tensor * tensor, void * data, size_t offset, size_t size) { -#ifdef VK_DEBUG +#ifdef GGML_VULKAN_DEBUG std::cerr << "ggml_backend_vk_get_tensor_async(" << size << ")" << std::endl; #endif GGML_ASSERT((tensor->buffer->buft == ggml_backend_vk_buffer_type() || tensor->buffer->buft == ggml_backend_vk_host_buffer_type()) && "unsupported buffer type"); @@ -4495,32 +4567,32 @@ GGML_CALL static void ggml_backend_vk_get_tensor_async(ggml_backend_t backend, c ggml_tensor_extra_gpu * extra = (ggml_tensor_extra_gpu *) tensor->extra; - if (vk_ctx == nullptr) { + if (vk_transfer_ctx == nullptr) { // Initialize new transfer context - vk_ctx = ggml_vk_create_context(vk_device.transfer_queue); - ggml_vk_ctx_begin(vk_ctx); + vk_transfer_ctx = ggml_vk_create_context(vk_device.transfer_queue); + ggml_vk_ctx_begin(vk_transfer_ctx); } - ggml_vk_buffer_read_async(vk_ctx, &extra->buffer_gpu, extra->offset + offset, data, size); + ggml_vk_buffer_read_async(vk_transfer_ctx, &extra->buffer_gpu, extra->offset + offset, data, size); UNUSED(backend); } GGML_CALL static bool ggml_backend_vk_cpy_tensor_async(ggml_backend_t backend, const ggml_tensor * src, ggml_tensor * dst) { -#ifdef VK_DEBUG +#ifdef GGML_VULKAN_DEBUG std::cerr << "ggml_backend_vk_cpy_tensor_async()" << std::endl; #endif if ((dst->buffer->buft == ggml_backend_vk_buffer_type() || dst->buffer->buft == ggml_backend_vk_host_buffer_type()) && ggml_backend_buffer_is_vk(src->buffer)) { ggml_tensor_extra_gpu * src_extra = (ggml_tensor_extra_gpu *) src->extra; ggml_tensor_extra_gpu * dst_extra = (ggml_tensor_extra_gpu *) dst->extra; - if (vk_ctx == nullptr) { + if (vk_transfer_ctx == nullptr) { // Initialize new transfer context - vk_ctx = ggml_vk_create_context(vk_device.transfer_queue); - ggml_vk_ctx_begin(vk_ctx); + vk_transfer_ctx = ggml_vk_create_context(vk_device.transfer_queue); + ggml_vk_ctx_begin(vk_transfer_ctx); } - ggml_vk_buffer_copy_async(vk_ctx, &src_extra->buffer_gpu, src_extra->offset, &dst_extra->buffer_gpu, dst_extra->offset, ggml_nbytes(src)); + ggml_vk_buffer_copy_async(vk_transfer_ctx, &src_extra->buffer_gpu, src_extra->offset, &dst_extra->buffer_gpu, dst_extra->offset, ggml_nbytes(src)); return true; } @@ -4530,28 +4602,28 @@ GGML_CALL static bool ggml_backend_vk_cpy_tensor_async(ggml_backend_t backend, c } GGML_CALL static void ggml_backend_vk_synchronize(ggml_backend_t backend) { -#ifdef VK_DEBUG +#ifdef GGML_VULKAN_DEBUG std::cerr << "ggml_backend_vk_synchronize()" << std::endl; #endif - if(vk_ctx == nullptr) { + if(vk_transfer_ctx == nullptr) { return; } - ggml_vk_ctx_end(vk_ctx); + ggml_vk_ctx_end(vk_transfer_ctx); - for (auto& cpy : vk_ctx->in_memcpys) { + for (auto& cpy : vk_transfer_ctx->in_memcpys) { memcpy(cpy.dst, cpy.src, cpy.n); } - ggml_vk_submit(vk_ctx, vk_fence); + ggml_vk_submit(vk_transfer_ctx, vk_fence); VK_CHECK(vk_device.device.waitForFences({ vk_fence }, true, UINT64_MAX), "ggml_backend_vk_synchronize waitForFences"); vk_device.device.resetFences({ vk_fence }); - for (auto& cpy : vk_ctx->out_memcpys) { + for (auto& cpy : vk_transfer_ctx->out_memcpys) { memcpy(cpy.dst, cpy.src, cpy.n); } - vk_ctx = nullptr; + vk_transfer_ctx = nullptr; UNUSED(backend); } diff --git a/ggml_vk_generate_shaders.py b/ggml_vk_generate_shaders.py index 67981a751..4abb0383f 100644 --- a/ggml_vk_generate_shaders.py +++ b/ggml_vk_generate_shaders.py @@ -157,19 +157,10 @@ struct block_q6_K # Dequant functions shader_f16_dequant_func = """ -#define DEQUANT_FUNC f16vec2 v = f16vec2(data_a[ib + 0], data_a[ib + 1]); -""" -shader_f16_dequant_func_compat = """ #define DEQUANT_FUNC vec2 v = vec2(data_a[ib + 0], data_a[ib + 1]); """ shader_q4_0_dequant_func = """ -#define DEQUANT_FUNC const float16_t d = data_a[ib].d; \ -const uint8_t vui = data_a[ib].qs[iqs]; \ -f16vec2 v = f16vec2(vui & 0xF, vui >> 4); \ -v = (v - 8.0hf)*d; -""" -shader_q4_0_dequant_func_compat = """ #define DEQUANT_FUNC const float d = float(data_a[ib].d); \ const uint vui = uint(data_a[ib].qs[iqs]); \ vec2 v = vec2(vui & 0xF, vui >> 4); \ @@ -177,13 +168,6 @@ v = (v - 8.0f)*d; """ shader_q4_1_dequant_func = """ -#define DEQUANT_FUNC const float16_t d = data_a[ib].d; \ -const float16_t m = data_a[ib].m; \ -const uint8_t vui = data_a[ib].qs[iqs]; \ -f16vec2 v = f16vec2(vui & 0xF, vui >> 4); \ -v = v*d + m; -""" -shader_q4_1_dequant_func_compat = """ #define DEQUANT_FUNC const float d = float(data_a[ib].d); \ const float m = float(data_a[ib].m); \ const uint vui = uint(data_a[ib].qs[iqs]); \ @@ -192,14 +176,6 @@ v = v*d + m; """ shader_q5_0_dequant_func = """ -#define DEQUANT_FUNC const float16_t d = data_a[ib].d; \ -const uint uint_qh = uint(data_a[ib].qh[1]) << 16 | data_a[ib].qh[0]; \ -const ivec2 qh = ivec2(((uint_qh >> iqs) << 4) & 0x10, (uint_qh >> (iqs + 12)) & 0x10); \ -const uint8_t vui = data_a[ib].qs[iqs]; \ -f16vec2 v = f16vec2((vui & 0xF) | qh.x, (vui >> 4) | qh.y); \ -v = (v - 16.0hf) * d; -""" -shader_q5_0_dequant_func_compat = """ #define DEQUANT_FUNC const float d = float(data_a[ib].d); \ const uint uint_qh = uint(data_a[ib].qh[1]) << 16 | data_a[ib].qh[0]; \ const ivec2 qh = ivec2(((uint_qh >> iqs) << 4) & 0x10, (uint_qh >> (iqs + 12)) & 0x10); \ @@ -209,14 +185,6 @@ v = (v - 16.0f) * d; """ shader_q5_1_dequant_func = """ -#define DEQUANT_FUNC const float16_t d = data_a[ib].d; \ -const float16_t m = data_a[ib].m; \ -const ivec2 qh = ivec2(((data_a[ib].qh >> iqs) << 4) & 0x10, (data_a[ib].qh >> (iqs + 12)) & 0x10); \ -const uint8_t vui = data_a[ib].qs[iqs]; \ -f16vec2 v = f16vec2((vui & 0xF) | qh.x, (vui >> 4) | qh.y); \ -v = v*d + m; -""" -shader_q5_1_dequant_func_compat = """ #define DEQUANT_FUNC const float d = float(data_a[ib].d); \ const float m = float(data_a[ib].m); \ const ivec2 qh = ivec2(((data_a[ib].qh >> iqs) << 4) & 0x10, (data_a[ib].qh >> (iqs + 12)) & 0x10); \ @@ -226,11 +194,6 @@ v = v*d + m; """ shader_q8_0_dequant_func = """ -#define DEQUANT_FUNC const float16_t d = data_a[ib].d; \ -f16vec2 v = f16vec2(data_a[ib].qs[iqs], data_a[ib].qs[iqs + 1]); \ -v = v * d; -""" -shader_q8_0_dequant_func_compat = """ #define DEQUANT_FUNC const float d = float(data_a[ib].d); \ vec2 v = vec2(int(data_a[ib].qs[iqs]), int(data_a[ib].qs[iqs + 1])); \ v = v * d; @@ -2110,7 +2073,7 @@ lock = asyncio.Lock() shader_fnames = [] -async def string_to_spv(name, code, defines, fp16): +async def string_to_spv(name, code, defines, fp16=True): f = NamedTemporaryFile(mode="w", delete=False) f.write(code) f.flush() @@ -2200,64 +2163,6 @@ async def main(): tasks.append(string_to_spv("matmul_f16_f32_aligned_m", "".join(stream), {"LOAD_VEC": load_vec, "A_TYPE": vec_type_f16, "B_TYPE": vec_type, "D_TYPE": "float"}, fp16)) tasks.append(string_to_spv("matmul_f16_f32_aligned_s", "".join(stream), {"LOAD_VEC": load_vec, "A_TYPE": vec_type_f16, "B_TYPE": vec_type, "D_TYPE": "float"}, fp16)) - # Build dequant shaders - tasks.append(string_to_spv("f32_to_f16", f32_to_f16_src, {}, fp16)) - - for i in range(0, VK_NUM_TYPES): - stream.clear() - - stream.extend((dequant_head, shader_int8_ext, shader_float_type)) - - if i == GGML_TYPE_F16: - stream.extend((shader_f16_defines, shader_f16_dequant_func_compat if not fp16 else shader_f16_dequant_func, dequant_body)) - elif i == GGML_TYPE_Q4_0: - stream.extend((shader_q4_0_defines, shader_q4_0_dequant_func_compat if not fp16 else shader_q4_0_dequant_func, dequant_body)) - elif i == GGML_TYPE_Q4_1: - stream.extend((shader_q4_1_defines, shader_q4_1_dequant_func_compat if not fp16 else shader_q4_1_dequant_func, dequant_body)) - elif i == GGML_TYPE_Q5_0: - stream.extend((shader_q5_0_defines, shader_q5_0_dequant_func_compat if not fp16 else shader_q5_0_dequant_func, dequant_body)) - elif i == GGML_TYPE_Q5_1: - stream.extend((shader_q5_1_defines, shader_q5_1_dequant_func_compat if not fp16 else shader_q5_1_dequant_func, dequant_body)) - elif i == GGML_TYPE_Q8_0: - stream.extend((shader_q8_0_defines, shader_q8_0_dequant_func_compat if not fp16 else shader_q8_0_dequant_func, dequant_body)) - elif i == GGML_TYPE_Q2_K: - stream.extend((shader_q2_K_defines, dequant_q2_K_body)) - elif i == GGML_TYPE_Q3_K: - stream.extend((shader_q3_K_defines, dequant_q3_K_body)) - elif i == GGML_TYPE_Q4_K: - stream.extend((shader_q4_K_defines, dequant_q4_K_body)) - elif i == GGML_TYPE_Q5_K: - stream.extend((shader_q5_K_defines, dequant_q5_K_body)) - elif i == GGML_TYPE_Q6_K: - stream.extend((shader_q6_K_defines, dequant_q6_K_body)) - else: - continue - - tasks.append(string_to_spv(f"dequant_{type_names[i]}", "".join(stream), {"D_TYPE": "float16_t"}, fp16)) - - # get_rows - for i in range(0, VK_NUM_TYPES): - stream.clear() - stream.extend((generic_head, shader_int8_ext, shader_float_type)) - - if i == GGML_TYPE_F16: - stream.extend((shader_f16_defines, shader_f16_dequant_func_compat if not fp16 else shader_f16_dequant_func, get_rows_body)) - elif i == GGML_TYPE_Q4_0: - stream.extend((shader_q4_0_defines, shader_q4_0_dequant_func_compat if not fp16 else shader_q4_0_dequant_func, get_rows_body)) - elif i == GGML_TYPE_Q4_1: - stream.extend((shader_q4_1_defines, shader_q4_1_dequant_func_compat if not fp16 else shader_q4_1_dequant_func, get_rows_body)) - elif i == GGML_TYPE_Q5_0: - stream.extend((shader_q5_0_defines, shader_q5_0_dequant_func_compat if not fp16 else shader_q5_0_dequant_func, get_rows_body)) - elif i == GGML_TYPE_Q5_1: - stream.extend((shader_q5_1_defines, shader_q5_1_dequant_func_compat if not fp16 else shader_q5_1_dequant_func, get_rows_body)) - elif i == GGML_TYPE_Q8_0: - stream.extend((shader_q8_0_defines, shader_q8_0_dequant_func_compat if not fp16 else shader_q8_0_dequant_func, get_rows_body)) - else: - continue - - tasks.append(string_to_spv(f"get_rows_{type_names[i]}", "".join(stream), {"B_TYPE": "float", "D_TYPE": "float16_t"}, fp16)) - tasks.append(string_to_spv(f"get_rows_{type_names[i]}_f32", "".join(stream), {"B_TYPE": "float", "D_TYPE": "float"}, fp16)) - # Shaders where precision is needed, so no fp16 version # mul mat vec @@ -2266,17 +2171,17 @@ async def main(): stream.extend((mul_mat_vec_head, shader_int8_ext, shader_f32)) if i == GGML_TYPE_F16: - stream.extend((shader_f16_defines, shader_f16_dequant_func_compat, mul_mat_vec_body)) + stream.extend((shader_f16_defines, shader_f16_dequant_func, mul_mat_vec_body)) elif i == GGML_TYPE_Q4_0: - stream.extend((shader_q4_0_defines, shader_q4_0_dequant_func_compat, mul_mat_vec_body)) + stream.extend((shader_q4_0_defines, shader_q4_0_dequant_func, mul_mat_vec_body)) elif i == GGML_TYPE_Q4_1: - stream.extend((shader_q4_1_defines, shader_q4_1_dequant_func_compat, mul_mat_vec_body)) + stream.extend((shader_q4_1_defines, shader_q4_1_dequant_func, mul_mat_vec_body)) elif i == GGML_TYPE_Q5_0: - stream.extend((shader_q5_0_defines, shader_q5_0_dequant_func_compat, mul_mat_vec_body)) + stream.extend((shader_q5_0_defines, shader_q5_0_dequant_func, mul_mat_vec_body)) elif i == GGML_TYPE_Q5_1: - stream.extend((shader_q5_1_defines, shader_q5_1_dequant_func_compat, mul_mat_vec_body)) + stream.extend((shader_q5_1_defines, shader_q5_1_dequant_func, mul_mat_vec_body)) elif i == GGML_TYPE_Q8_0: - stream.extend((shader_q8_0_defines, shader_q8_0_dequant_func_compat, mul_mat_vec_body)) + stream.extend((shader_q8_0_defines, shader_q8_0_dequant_func, mul_mat_vec_body)) elif i == GGML_TYPE_Q2_K: stream.extend((shader_q2_K_defines, mul_mat_vec_q2_K_body)) elif i == GGML_TYPE_Q3_K: @@ -2290,43 +2195,101 @@ async def main(): else: continue - tasks.append(string_to_spv(f"mul_mat_vec_{type_names[i]}_f32", "".join(stream), {"B_TYPE": "float", "D_TYPE": "float", "K_QUANTS_PER_ITERATION": K_QUANTS_PER_ITERATION}, fp16)) + tasks.append(string_to_spv(f"mul_mat_vec_{type_names[i]}_f32", "".join(stream), {"B_TYPE": "float", "D_TYPE": "float", "K_QUANTS_PER_ITERATION": K_QUANTS_PER_ITERATION})) - tasks.append(string_to_spv("mul_mat_vec_p021_f16_f32", mul_mat_p021_src, {"A_TYPE": "float16_t", "B_TYPE": "float", "D_TYPE": "float"}, True)) - tasks.append(string_to_spv("mul_mat_vec_nc_f16_f32", mul_mat_nc_src, {"A_TYPE": "float16_t", "B_TYPE": "float", "D_TYPE": "float"}, True)) + # Dequant shaders + for i in range(0, VK_NUM_TYPES): + stream.clear() + + stream.extend((dequant_head, shader_int8_ext, shader_f32)) + + if i == GGML_TYPE_F16: + stream.extend((shader_f16_defines, shader_f16_dequant_func, dequant_body)) + elif i == GGML_TYPE_Q4_0: + stream.extend((shader_q4_0_defines, shader_q4_0_dequant_func, dequant_body)) + elif i == GGML_TYPE_Q4_1: + stream.extend((shader_q4_1_defines, shader_q4_1_dequant_func, dequant_body)) + elif i == GGML_TYPE_Q5_0: + stream.extend((shader_q5_0_defines, shader_q5_0_dequant_func, dequant_body)) + elif i == GGML_TYPE_Q5_1: + stream.extend((shader_q5_1_defines, shader_q5_1_dequant_func, dequant_body)) + elif i == GGML_TYPE_Q8_0: + stream.extend((shader_q8_0_defines, shader_q8_0_dequant_func, dequant_body)) + elif i == GGML_TYPE_Q2_K: + stream.extend((shader_q2_K_defines, dequant_q2_K_body)) + elif i == GGML_TYPE_Q3_K: + stream.extend((shader_q3_K_defines, dequant_q3_K_body)) + elif i == GGML_TYPE_Q4_K: + stream.extend((shader_q4_K_defines, dequant_q4_K_body)) + elif i == GGML_TYPE_Q5_K: + stream.extend((shader_q5_K_defines, dequant_q5_K_body)) + elif i == GGML_TYPE_Q6_K: + stream.extend((shader_q6_K_defines, dequant_q6_K_body)) + else: + continue + + tasks.append(string_to_spv(f"dequant_{type_names[i]}", "".join(stream), {"D_TYPE": "float16_t"})) + + tasks.append(string_to_spv("f32_to_f16", f32_to_f16_src, {})) + + # get_rows + for i in range(0, VK_NUM_TYPES): + stream.clear() + stream.extend((generic_head, shader_int8_ext, shader_f32)) + + if i == GGML_TYPE_F16: + stream.extend((shader_f16_defines, shader_f16_dequant_func, get_rows_body)) + elif i == GGML_TYPE_Q4_0: + stream.extend((shader_q4_0_defines, shader_q4_0_dequant_func, get_rows_body)) + elif i == GGML_TYPE_Q4_1: + stream.extend((shader_q4_1_defines, shader_q4_1_dequant_func, get_rows_body)) + elif i == GGML_TYPE_Q5_0: + stream.extend((shader_q5_0_defines, shader_q5_0_dequant_func, get_rows_body)) + elif i == GGML_TYPE_Q5_1: + stream.extend((shader_q5_1_defines, shader_q5_1_dequant_func, get_rows_body)) + elif i == GGML_TYPE_Q8_0: + stream.extend((shader_q8_0_defines, shader_q8_0_dequant_func, get_rows_body)) + else: + continue + + tasks.append(string_to_spv(f"get_rows_{type_names[i]}", "".join(stream), {"B_TYPE": "float", "D_TYPE": "float16_t"})) + tasks.append(string_to_spv(f"get_rows_{type_names[i]}_f32", "".join(stream), {"B_TYPE": "float", "D_TYPE": "float"})) + + tasks.append(string_to_spv("mul_mat_vec_p021_f16_f32", mul_mat_p021_src, {"A_TYPE": "float16_t", "B_TYPE": "float", "D_TYPE": "float"})) + tasks.append(string_to_spv("mul_mat_vec_nc_f16_f32", mul_mat_nc_src, {"A_TYPE": "float16_t", "B_TYPE": "float", "D_TYPE": "float"})) # Norms - tasks.append(string_to_spv("norm_f32", f"{generic_head}\n{shader_f32}\n{norm_body}", {"A_TYPE": "float", "D_TYPE": "float"}, True)) - tasks.append(string_to_spv("rms_norm_f32", f"{generic_head}\n{shader_f32}\n{rms_norm_body}", {"A_TYPE": "float", "D_TYPE": "float"}, True)) + tasks.append(string_to_spv("norm_f32", f"{generic_head}\n{shader_f32}\n{norm_body}", {"A_TYPE": "float", "D_TYPE": "float"})) + tasks.append(string_to_spv("rms_norm_f32", f"{generic_head}\n{shader_f32}\n{rms_norm_body}", {"A_TYPE": "float", "D_TYPE": "float"})) - tasks.append(string_to_spv("cpy_f32_f32", f"{cpy_src}\n{cpy_end}", {"A_TYPE": "float", "D_TYPE": "float"}, True)) - tasks.append(string_to_spv("cpy_f32_f16", f"{cpy_src}\n{cpy_end}", {"A_TYPE": "float", "D_TYPE": "float16_t"}, True)) - tasks.append(string_to_spv("cpy_f16_f16", f"{cpy_src}\n{cpy_f16_f16_end}", {"A_TYPE": "float16_t", "D_TYPE": "float16_t"}, True)) + tasks.append(string_to_spv("cpy_f32_f32", f"{cpy_src}\n{cpy_end}", {"A_TYPE": "float", "D_TYPE": "float"})) + tasks.append(string_to_spv("cpy_f32_f16", f"{cpy_src}\n{cpy_end}", {"A_TYPE": "float", "D_TYPE": "float16_t"})) + tasks.append(string_to_spv("cpy_f16_f16", f"{cpy_src}\n{cpy_f16_f16_end}", {"A_TYPE": "float16_t", "D_TYPE": "float16_t"})) - tasks.append(string_to_spv("add_f32", f"{generic_head}\n{shader_f32}\n{add_body}", {"A_TYPE": "float", "B_TYPE": "float", "D_TYPE": "float"}, True)) + tasks.append(string_to_spv("add_f32", f"{generic_head}\n{shader_f32}\n{add_body}", {"A_TYPE": "float", "B_TYPE": "float", "D_TYPE": "float"})) - tasks.append(string_to_spv("split_k_reduce", mulmat_split_k_reduce_src, {}, True)) - tasks.append(string_to_spv("mul_f32", f"{generic_head}\n{shader_f32}\n{mul_body}", {"A_TYPE": "float", "B_TYPE": "float", "D_TYPE": "float"}, True)) + tasks.append(string_to_spv("split_k_reduce", mulmat_split_k_reduce_src, {})) + tasks.append(string_to_spv("mul_f32", f"{generic_head}\n{shader_f32}\n{mul_body}", {"A_TYPE": "float", "B_TYPE": "float", "D_TYPE": "float"})) - tasks.append(string_to_spv("scale_f32", f"{generic_head}\n{shader_f32}\n{scale_body}", {"A_TYPE": "float", "D_TYPE": "float"}, True)) + tasks.append(string_to_spv("scale_f32", f"{generic_head}\n{shader_f32}\n{scale_body}", {"A_TYPE": "float", "D_TYPE": "float"})) - tasks.append(string_to_spv("sqr_f32", f"{generic_head}\n{shader_f32}\n{sqr_body}", {"A_TYPE": "float", "D_TYPE": "float"}, True)) + tasks.append(string_to_spv("sqr_f32", f"{generic_head}\n{shader_f32}\n{sqr_body}", {"A_TYPE": "float", "D_TYPE": "float"})) - tasks.append(string_to_spv("clamp_f32", f"{generic_head}\n{shader_f32}\n{clamp_body}", {"A_TYPE": "float", "D_TYPE": "float"}, True)) + tasks.append(string_to_spv("clamp_f32", f"{generic_head}\n{shader_f32}\n{clamp_body}", {"A_TYPE": "float", "D_TYPE": "float"})) - tasks.append(string_to_spv("gelu_f32", f"{generic_head}\n{shader_f32}\n{gelu_body}", {"A_TYPE": "float", "D_TYPE": "float"}, True)) - tasks.append(string_to_spv("silu_f32", f"{generic_head}\n{shader_f32}\n{silu_body}", {"A_TYPE": "float", "D_TYPE": "float"}, True)) - tasks.append(string_to_spv("relu_f32", f"{generic_head}\n{shader_f32}\n{relu_body}", {"A_TYPE": "float", "D_TYPE": "float"}, True)) + tasks.append(string_to_spv("gelu_f32", f"{generic_head}\n{shader_f32}\n{gelu_body}", {"A_TYPE": "float", "D_TYPE": "float"})) + tasks.append(string_to_spv("silu_f32", f"{generic_head}\n{shader_f32}\n{silu_body}", {"A_TYPE": "float", "D_TYPE": "float"})) + tasks.append(string_to_spv("relu_f32", f"{generic_head}\n{shader_f32}\n{relu_body}", {"A_TYPE": "float", "D_TYPE": "float"})) - tasks.append(string_to_spv("diag_mask_inf_f32", f"{diag_mask_inf_head}\n{shader_f32}\n{diag_mask_inf_body}", {"A_TYPE": "float", "D_TYPE": "float"}, True)) + tasks.append(string_to_spv("diag_mask_inf_f32", f"{diag_mask_inf_head}\n{shader_f32}\n{diag_mask_inf_body}", {"A_TYPE": "float", "D_TYPE": "float"})) - tasks.append(string_to_spv("soft_max_f32", f"{generic_head}\n{shader_f32}\n{soft_max_body}", {"A_TYPE": "float", "B_TYPE": "float", "D_TYPE": "float"}, True)) + tasks.append(string_to_spv("soft_max_f32", f"{generic_head}\n{shader_f32}\n{soft_max_body}", {"A_TYPE": "float", "B_TYPE": "float", "D_TYPE": "float"})) - tasks.append(string_to_spv("rope_f32", rope_src, {"A_TYPE": "float", "D_TYPE": "float"}, True)) - tasks.append(string_to_spv("rope_f16", rope_src, {"A_TYPE": "float16_t", "D_TYPE": "float16_t"}, True)) + tasks.append(string_to_spv("rope_f32", rope_src, {"A_TYPE": "float", "D_TYPE": "float"})) + tasks.append(string_to_spv("rope_f16", rope_src, {"A_TYPE": "float16_t", "D_TYPE": "float16_t"})) - tasks.append(string_to_spv("rope_neox_f32", rope_neox_src, {"A_TYPE": "float", "D_TYPE": "float"}, True)) - tasks.append(string_to_spv("rope_neox_f16", rope_neox_src, {"A_TYPE": "float16_t", "D_TYPE": "float16_t"}, True)) + tasks.append(string_to_spv("rope_neox_f32", rope_neox_src, {"A_TYPE": "float", "D_TYPE": "float"})) + tasks.append(string_to_spv("rope_neox_f16", rope_neox_src, {"A_TYPE": "float16_t", "D_TYPE": "float16_t"})) await asyncio.gather(*tasks) From 60ecf099eddfe70fec797ef6790572e452054add Mon Sep 17 00:00:00 2001 From: Martin Schwaighofer Date: Sun, 28 Jan 2024 12:59:43 +0100 Subject: [PATCH 501/811] add Vulkan support to Nix flake --- .devops/nix/package.nix | 21 +++++++++++++++++---- flake.nix | 1 + 2 files changed, 18 insertions(+), 4 deletions(-) diff --git a/.devops/nix/package.nix b/.devops/nix/package.nix index a868a9a61..ad23f7dd7 100644 --- a/.devops/nix/package.nix +++ b/.devops/nix/package.nix @@ -13,18 +13,22 @@ cudaPackages, darwin, rocmPackages, + vulkan-headers, + vulkan-loader, clblast, useBlas ? builtins.all (x: !x) [ useCuda useMetalKit useOpenCL useRocm + useVulkan ], useCuda ? config.cudaSupport, useMetalKit ? stdenv.isAarch64 && stdenv.isDarwin && !useOpenCL, useMpi ? false, # Increases the runtime closure size by ~700M useOpenCL ? false, useRocm ? config.rocmSupport, + useVulkan ? false, llamaVersion ? "0.0.0", # Arbitrary version, substituted by the flake }@inputs: @@ -48,7 +52,8 @@ let ++ lib.optionals useMetalKit [ "MetalKit" ] ++ lib.optionals useMpi [ "MPI" ] ++ lib.optionals useOpenCL [ "OpenCL" ] - ++ lib.optionals useRocm [ "ROCm" ]; + ++ lib.optionals useRocm [ "ROCm" ] + ++ lib.optionals useVulkan [ "Vulkan" ]; pnameSuffix = strings.optionalString (suffices != [ ]) @@ -108,6 +113,11 @@ let hipblas rocblas ]; + + vulkanBuildInputs = [ + vulkan-headers + vulkan-loader + ]; in effectiveStdenv.mkDerivation ( @@ -164,7 +174,8 @@ effectiveStdenv.mkDerivation ( ++ optionals useCuda cudaBuildInputs ++ optionals useMpi [ mpi ] ++ optionals useOpenCL [ clblast ] - ++ optionals useRocm rocmBuildInputs; + ++ optionals useRocm rocmBuildInputs + ++ optionals useVulkan vulkanBuildInputs; cmakeFlags = [ @@ -178,6 +189,7 @@ effectiveStdenv.mkDerivation ( (cmakeBool "LLAMA_HIPBLAS" useRocm) (cmakeBool "LLAMA_METAL" useMetalKit) (cmakeBool "LLAMA_MPI" useMpi) + (cmakeBool "LLAMA_VULKAN" useVulkan) ] ++ optionals useCuda [ ( @@ -218,6 +230,7 @@ effectiveStdenv.mkDerivation ( useMpi useOpenCL useRocm + useVulkan ; shell = mkShell { @@ -242,11 +255,11 @@ effectiveStdenv.mkDerivation ( # Configurations we don't want even the CI to evaluate. Results in the # "unsupported platform" messages. This is mostly a no-op, because # cudaPackages would've refused to evaluate anyway. - badPlatforms = optionals (useCuda || useOpenCL) lib.platforms.darwin; + badPlatforms = optionals (useCuda || useOpenCL || useVulkan) lib.platforms.darwin; # Configurations that are known to result in build failures. Can be # overridden by importing Nixpkgs with `allowBroken = true`. - broken = (useMetalKit && !effectiveStdenv.isDarwin); + broken = (useMetalKit && !effectiveStdenv.isDarwin) || (useVulkan && effectiveStdenv.isDarwin); description = "Inference of LLaMA model in pure C/C++${descriptionSuffix}"; homepage = "https://github.com/ggerganov/llama.cpp/"; diff --git a/flake.nix b/flake.nix index a776ba024..ad2f9b295 100644 --- a/flake.nix +++ b/flake.nix @@ -157,6 +157,7 @@ mpi-cpu = config.packages.default.override { useMpi = true; }; mpi-cuda = config.packages.default.override { useMpi = true; }; + vulkan = config.packages.default.override { useVulkan = true; }; } // lib.optionalAttrs (system == "x86_64-linux") { rocm = config.legacyPackages.llamaPackagesRocm.llama-cpp; From 3cc5ed353c07201d8d5b98b0a4713ab633da6d04 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Johannes=20G=C3=A4=C3=9Fler?= Date: Sat, 3 Feb 2024 20:14:59 +0100 Subject: [PATCH 502/811] make: fix nvcc optimization flags for host code (#5309) --- Makefile | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/Makefile b/Makefile index a55d15888..40b16e0ea 100644 --- a/Makefile +++ b/Makefile @@ -109,6 +109,7 @@ MK_NVCCFLAGS += -O3 else MK_CFLAGS += -O3 MK_CXXFLAGS += -O3 +MK_NVCCFLAGS += -O3 endif # clock_gettime came in POSIX.1b (1993) @@ -365,7 +366,7 @@ ifdef LLAMA_CUBLAS MK_CPPFLAGS += -DGGML_USE_CUBLAS -I/usr/local/cuda/include -I/opt/cuda/include -I$(CUDA_PATH)/targets/x86_64-linux/include -I/usr/local/cuda/targets/aarch64-linux/include MK_LDFLAGS += -lcuda -lcublas -lculibos -lcudart -lcublasLt -lpthread -ldl -lrt -L/usr/local/cuda/lib64 -L/opt/cuda/lib64 -L$(CUDA_PATH)/targets/x86_64-linux/lib -L/usr/local/cuda/targets/aarch64-linux/lib -L/usr/lib/wsl/lib OBJS += ggml-cuda.o - MK_NVCCFLAGS = -use_fast_math + MK_NVCCFLAGS += -use_fast_math ifndef JETSON_EOL_MODULE_DETECT MK_NVCCFLAGS += --forward-unknown-to-host-compiler endif # JETSON_EOL_MODULE_DETECT From 3c0d25c4756742ebf15ad44700fabc0700c638bd Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Johannes=20G=C3=A4=C3=9Fler?= Date: Sat, 3 Feb 2024 20:15:13 +0100 Subject: [PATCH 503/811] make: add nvcc info print (#5310) --- Makefile | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/Makefile b/Makefile index 40b16e0ea..21d5e15ba 100644 --- a/Makefile +++ b/Makefile @@ -553,8 +553,11 @@ $(info I CFLAGS: $(CFLAGS)) $(info I CXXFLAGS: $(CXXFLAGS)) $(info I NVCCFLAGS: $(NVCCFLAGS)) $(info I LDFLAGS: $(LDFLAGS)) -$(info I CC: $(shell $(CC) --version | head -n 1)) -$(info I CXX: $(shell $(CXX) --version | head -n 1)) +$(info I CC: $(shell $(CC) --version | head -n 1)) +$(info I CXX: $(shell $(CXX) --version | head -n 1)) +ifdef LLAMA_CUBLAS +$(info I NVCC: $(shell $(NVCC) --version | tail -n 1)) +endif # LLAMA_CUBLAS $(info ) # From 277fad30c60ef3559dc2d01b19d05e659d40a824 Mon Sep 17 00:00:00 2001 From: Welby Seely Date: Sat, 3 Feb 2024 23:18:51 -0500 Subject: [PATCH 504/811] cmake : use set() for LLAMA_WIN_VER (#5298) option() is specifically for booleans. Fixes #5158 --- CMakeLists.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/CMakeLists.txt b/CMakeLists.txt index c156c4824..8c04e4c19 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -79,7 +79,7 @@ if (NOT MSVC) endif() if (WIN32) - option(LLAMA_WIN_VER "llama: Windows Version" 0x602) + set(LLAMA_WIN_VER "0x602" CACHE STRING "llama: Windows Version") endif() # 3rd party libs From 5ed26e1fc9fab4ce96ecf2d84183fe45bdcab0d4 Mon Sep 17 00:00:00 2001 From: Kawrakow <48489457+ikawrakow@users.noreply.github.com> Date: Sun, 4 Feb 2024 10:39:58 +0200 Subject: [PATCH 505/811] Adding some imatrix tools (#5302) * imatrix: adding --combine and --continue-from * imatrix: be able to start from a specific chunk --------- Co-authored-by: Iwan Kawrakow --- examples/imatrix/imatrix.cpp | 116 +++++++++++++++++++++++++++++++++-- 1 file changed, 112 insertions(+), 4 deletions(-) diff --git a/examples/imatrix/imatrix.cpp b/examples/imatrix/imatrix.cpp index ea06fcdbf..bc9f6fa68 100644 --- a/examples/imatrix/imatrix.cpp +++ b/examples/imatrix/imatrix.cpp @@ -36,6 +36,8 @@ public: void set_parameters(StatParams&& params) { m_params = std::move(params); } bool collect_imatrix(struct ggml_tensor * t, bool ask, void * user_data); void save_imatrix() const; + bool load_imatrix(const char * file_name, bool add); + static bool load_imatrix(const char * file_name, std::unordered_map& imatrix); private: std::unordered_map m_stats; StatParams m_params; @@ -189,6 +191,57 @@ void IMatrixCollector::save_imatrix(const char * fname) const { } } +bool IMatrixCollector::load_imatrix(const char * imatrix_file, std::unordered_map& imatrix_data) { + std::ifstream in(imatrix_file, std::ios::binary); + if (!in) { + printf("%s: failed to open %s\n",__func__,imatrix_file); + return false; + } + int n_entries; + in.read((char*)&n_entries, sizeof(n_entries)); + if (in.fail() || n_entries < 1) { + printf("%s: no data in file %s\n", __func__, imatrix_file); + return false; + } + for (int i = 0; i < n_entries; ++i) { + int len; in.read((char *)&len, sizeof(len)); + std::vector name_as_vec(len+1); + in.read((char *)name_as_vec.data(), len); + if (in.fail()) { + printf("%s: failed reading name for entry %d from %s\n",__func__,i+1,imatrix_file); + return false; + } + name_as_vec[len] = 0; + std::string name{name_as_vec.data()}; + auto& e = imatrix_data[std::move(name)]; + int ncall; + in.read((char*)&ncall, sizeof(ncall)); + int nval; + in.read((char *)&nval, sizeof(nval)); + if (in.fail() || nval < 1) { + printf("%s: failed reading number of values for entry %d\n",__func__,i); + imatrix_data = {}; + return false; + } + e.values.resize(nval); + in.read((char*)e.values.data(), nval*sizeof(float)); + if (in.fail()) { + printf("%s: failed reading data for entry %d\n",__func__,i); + imatrix_data = {}; + return false; + } + e.ncall = ncall; + } + return true; +} + +bool IMatrixCollector::load_imatrix(const char * file_name, bool add) { + if (!add) { + m_stats.clear(); + } + return load_imatrix(file_name, m_stats); +} + static IMatrixCollector g_collector; static bool ik_collect_imatrix(struct ggml_tensor * t, bool ask, void * user_data) { @@ -269,7 +322,7 @@ static void process_logits( } } -static bool compute_imatrix(llama_context * ctx, const gpt_params & params, bool compute_ppl) { +static bool compute_imatrix(llama_context * ctx, const gpt_params & params, bool compute_ppl, int from_chunk) { const bool add_bos = llama_should_add_bos_token(llama_get_model(ctx)); const int n_ctx = llama_n_ctx(ctx); @@ -282,6 +335,15 @@ static bool compute_imatrix(llama_context * ctx, const gpt_params & params, bool auto tim2 = std::chrono::high_resolution_clock::now(); fprintf(stderr, "%s: tokenization took %g ms\n",__func__,1e-3*std::chrono::duration_cast(tim2-tim1).count()); + if (from_chunk > 0) { + if (size_t((from_chunk + 2)*n_ctx) >= tokens.size()) { + fprintf(stderr, "%s: there will be not enough tokens left after removing %d chunks\n", __func__, from_chunk); + return false; + } + fprintf(stderr, "%s: removing initial %d chunks (%d tokens)\n", __func__, from_chunk, from_chunk*n_ctx); + tokens.erase(tokens.begin(), tokens.begin() + from_chunk*n_ctx); + } + if (int(tokens.size()) < 2*n_ctx) { fprintf(stderr, "%s: you need at least %d tokens for a context of %d tokens\n",__func__,2*n_ctx, n_ctx); @@ -402,7 +464,10 @@ static bool compute_imatrix(llama_context * ctx, const gpt_params & params, bool int main(int argc, char ** argv) { StatParams sparams; + std::string prev_result_file; + std::string combine_files; bool compute_ppl = true; + int from_chunk = 0; std::vector args; args.push_back(argv[0]); int iarg = 1; @@ -423,6 +488,13 @@ int main(int argc, char ** argv) { compute_ppl = false; } else if (arg == "--keep-imatrix") { sparams.keep_every = std::stoi(argv[++iarg]); + } else if (arg == "--continue-from") { + prev_result_file = argv[++iarg]; + } else if (arg == "--combine") { + combine_files = argv[++iarg]; + } + else if (arg == "--from-chunk") { + from_chunk = std::stoi(argv[++iarg]); } else { args.push_back(argv[iarg]); } @@ -436,14 +508,50 @@ int main(int argc, char ** argv) { } } + g_collector.set_parameters(std::move(sparams)); + + if (!combine_files.empty()) { + std::vector files; + size_t pos = 0; + while (true) { + auto new_pos = combine_files.find(',', pos); + if (new_pos != std::string::npos) { + files.emplace_back(combine_files.substr(pos, new_pos - pos)); + pos = new_pos + 1; + } else { + files.emplace_back(combine_files.substr(pos)); + break; + } + } + if (files.size() < 2) { + fprintf(stderr, "You must provide at least two comma separated files to use --combine\n"); + return 1; + } + printf("Combining the following %d files\n", int(files.size())); + for (auto& file : files) { + printf(" %s\n", file.c_str()); + if (!g_collector.load_imatrix(file.c_str(), true)) { + fprintf(stderr, "Failed to load %s\n", file.c_str()); + return 1; + } + } + g_collector.save_imatrix(); + return 0; + } + + if (!prev_result_file.empty()) { + if (!g_collector.load_imatrix(prev_result_file.c_str(), false)) { + fprintf(stderr, "=============== Failed to load %s\n", prev_result_file.c_str()); + return 1; + } + } + gpt_params params; params.n_batch = 512; if (!gpt_params_parse(args.size(), args.data(), params)) { return 1; } - g_collector.set_parameters(std::move(sparams)); - params.logits_all = true; params.n_batch = std::min(params.n_batch, params.n_ctx); @@ -495,7 +603,7 @@ int main(int argc, char ** argv) { fprintf(stderr, "%s\n", get_system_info(params).c_str()); } - bool OK = compute_imatrix(ctx, params, compute_ppl); + bool OK = compute_imatrix(ctx, params, compute_ppl, from_chunk); if (!OK) { return 1; } From 9392ebd49ea5ae236a55b47cbf6a13247e8a3b8c Mon Sep 17 00:00:00 2001 From: "github-actions[bot]" Date: Sun, 4 Feb 2024 00:17:24 +0000 Subject: [PATCH 506/811] flake.lock: Update MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Flake lock file updates: • Updated input 'flake-parts': 'github:hercules-ci/flake-parts/07f6395285469419cf9d078f59b5b49993198c00' (2024-01-11) → 'github:hercules-ci/flake-parts/b253292d9c0a5ead9bc98c4e9a26c6312e27d69f' (2024-02-01) • Updated input 'flake-parts/nixpkgs-lib': 'github:NixOS/nixpkgs/b0d36bd0a420ecee3bc916c91886caca87c894e9?dir=lib' (2023-12-30) → 'github:NixOS/nixpkgs/97b17f32362e475016f942bbdfda4a4a72a8a652?dir=lib' (2024-01-29) • Updated input 'nixpkgs': 'github:NixOS/nixpkgs/ae5c332cbb5827f6b1f02572496b141021de335f' (2024-01-25) → 'github:NixOS/nixpkgs/b8b232ae7b8b144397fdb12d20f592e5e7c1a64d' (2024-01-31) --- flake.lock | 18 +++++++++--------- 1 file changed, 9 insertions(+), 9 deletions(-) diff --git a/flake.lock b/flake.lock index 95e41f333..8cfc78273 100644 --- a/flake.lock +++ b/flake.lock @@ -5,11 +5,11 @@ "nixpkgs-lib": "nixpkgs-lib" }, "locked": { - "lastModified": 1704982712, - "narHash": "sha256-2Ptt+9h8dczgle2Oo6z5ni5rt/uLMG47UFTR1ry/wgg=", + "lastModified": 1706830856, + "narHash": "sha256-a0NYyp+h9hlb7ddVz4LUn1vT/PLwqfrWYcHMvFB1xYg=", "owner": "hercules-ci", "repo": "flake-parts", - "rev": "07f6395285469419cf9d078f59b5b49993198c00", + "rev": "b253292d9c0a5ead9bc98c4e9a26c6312e27d69f", "type": "github" }, "original": { @@ -20,11 +20,11 @@ }, "nixpkgs": { "locked": { - "lastModified": 1706191920, - "narHash": "sha256-eLihrZAPZX0R6RyM5fYAWeKVNuQPYjAkCUBr+JNvtdE=", + "lastModified": 1706732774, + "narHash": "sha256-hqJlyJk4MRpcItGYMF+3uHe8HvxNETWvlGtLuVpqLU0=", "owner": "NixOS", "repo": "nixpkgs", - "rev": "ae5c332cbb5827f6b1f02572496b141021de335f", + "rev": "b8b232ae7b8b144397fdb12d20f592e5e7c1a64d", "type": "github" }, "original": { @@ -37,11 +37,11 @@ "nixpkgs-lib": { "locked": { "dir": "lib", - "lastModified": 1703961334, - "narHash": "sha256-M1mV/Cq+pgjk0rt6VxoyyD+O8cOUiai8t9Q6Yyq4noY=", + "lastModified": 1706550542, + "narHash": "sha256-UcsnCG6wx++23yeER4Hg18CXWbgNpqNXcHIo5/1Y+hc=", "owner": "NixOS", "repo": "nixpkgs", - "rev": "b0d36bd0a420ecee3bc916c91886caca87c894e9", + "rev": "97b17f32362e475016f942bbdfda4a4a72a8a652", "type": "github" }, "original": { From 4833ac209da6a427de64f97e8f403dcdc5de6bc3 Mon Sep 17 00:00:00 2001 From: AidanBeltonS <87009434+AidanBeltonS@users.noreply.github.com> Date: Mon, 5 Feb 2024 07:08:24 +0000 Subject: [PATCH 507/811] [SYCL] Fix cpy with dims of 3 (#5289) * Fix cpy with dims of 3 * rm asserts --------- Co-authored-by: Abhilash Majumder <30946547+abhilash1910@users.noreply.github.com> --- ggml-sycl.cpp | 194 +++++++++++++++++++++++++++++--------------------- 1 file changed, 114 insertions(+), 80 deletions(-) diff --git a/ggml-sycl.cpp b/ggml-sycl.cpp index 51445b5e7..a03df4c65 100644 --- a/ggml-sycl.cpp +++ b/ggml-sycl.cpp @@ -7693,6 +7693,13 @@ static void cpy_1_f16_f16(const char * cxi, char * cdsti) { *dsti = *xi; } +static void cpy_1_f16_f32(const char * cxi, char * cdsti) { + const sycl::half *xi = (const sycl::half *)cxi; + float *dsti = (float *)cdsti; + + *dsti = *xi; +} + static void cpy_1_i16_i16(const char * cxi, char * cdsti) { const int16_t *xi = (const int16_t *)cxi; int16_t *dsti = (int16_t *)cdsti; @@ -7709,9 +7716,9 @@ static void cpy_1_i32_i32(const char * cxi, char * cdsti) { template static void cpy_f32_f16(const char * cx, char * cdst, const int ne, - const int ne00, const int ne01, const int nb00, const int nb01, const int nb02, - const int ne10, const int ne11, const int nb10, const int nb11, const int nb12, - const sycl::nd_item<3> &item_ct1) { + const int ne00, const int ne01, const int ne02, const int nb00, const int nb01, const int nb02, + const int nb03, const int ne10, const int ne11, const int ne12, const int nb10, const int nb11, + const int nb12, const int nb13, const sycl::nd_item<3> &item_ct1) { const int i = item_ct1.get_local_range(2) * item_ct1.get_group(2) + item_ct1.get_local_id(2); @@ -7721,15 +7728,17 @@ static void cpy_f32_f16(const char * cx, char * cdst, const int ne, // determine indices i02/i12, i01/i11, i00/i10 as a function of index i of flattened tensor // then combine those indices with the corresponding byte offsets to get the total offsets - const int i02 = i / (ne00*ne01); - const int i01 = (i - i02*ne01*ne00) / ne00; - const int i00 = i - i02*ne01*ne00 - i01*ne00; - const int x_offset = i00*nb00 + i01*nb01 + i02*nb02; + const int i03 = i/(ne00 * ne01 * ne02); + const int i02 = (i - i03*ne00*ne01*ne02 )/ (ne00*ne01); + const int i01 = (i - i03*ne00*ne01*ne02 - i02*ne01*ne00) / ne00; + const int i00 = i - i03*ne00*ne01*ne02 - i02*ne01*ne00 - i01*ne00; + const int x_offset = i00*nb00 + i01*nb01 + i02*nb02 + i03 * nb03; - const int i12 = i / (ne10*ne11); - const int i11 = (i - i12*ne10*ne11) / ne10; - const int i10 = i - i12*ne10*ne11 - i11*ne10; - const int dst_offset = i10*nb10 + i11*nb11 + i12*nb12; + const int i13 = i/(ne10 * ne11 * ne12); + const int i12 = (i - i13*ne10*ne11*ne12) / (ne10*ne11); + const int i11 = (i - i13*ne10*ne11*ne12 - i12*ne10*ne11) / ne10; + const int i10 = i - i13*ne10*ne11*ne12 - i12*ne10*ne11 - i11*ne10; + const int dst_offset = i10*nb10 + i11*nb11 + i12*nb12 + i13 * nb13; cpy_1(cx + x_offset, cdst + dst_offset); } @@ -7823,9 +7832,9 @@ static void cpy_blck_f32_q4_1(const char * cxi, char * cdsti) { template static void cpy_f32_q(const char * cx, char * cdst, const int ne, - const int ne00, const int ne01, const int nb00, const int nb01, const int nb02, - const int ne10, const int ne11, const int nb10, const int nb11, const int nb12, - const sycl::nd_item<3> &item_ct1) { + const int ne00, const int ne01, const int ne02, const int nb00, const int nb01, const int nb02, + const int nb03, const int ne10, const int ne11, const int ne12, const int nb10, const int nb11, + const int nb12, const int nb13, const sycl::nd_item<3> &item_ct1) { const int i = (item_ct1.get_local_range(2) * item_ct1.get_group(2) + item_ct1.get_local_id(2)) * qk; @@ -7834,15 +7843,17 @@ static void cpy_f32_q(const char * cx, char * cdst, const int ne, return; } - const int i02 = i / (ne00*ne01); - const int i01 = (i - i02*ne01*ne00) / ne00; - const int i00 = (i - i02*ne01*ne00 - i01*ne00); - const int x_offset = i00*nb00 + i01*nb01 + i02*nb02; + const int i03 = i/(ne00 * ne01 * ne02); + const int i02 = (i - i03*ne00*ne01*ne02 )/ (ne00*ne01); + const int i01 = (i - i03*ne00*ne01*ne02 - i02*ne01*ne00) / ne00; + const int i00 = i - i03*ne00*ne01*ne02 - i02*ne01*ne00 - i01*ne00; + const int x_offset = i00*nb00 + i01*nb01 + i02*nb02 + i03 * nb03; - const int i12 = i / (ne10*ne11); - const int i11 = (i - i12*ne10*ne11) / ne10; - const int i10 = (i - i12*ne10*ne11 - i11*ne10)/qk; - const int dst_offset = i10*nb10 + i11*nb11 + i12*nb12; + const int i13 = i/(ne10 * ne11 * ne12); + const int i12 = (i - i13*ne10*ne11*ne12) / (ne10*ne11); + const int i11 = (i - i13*ne10*ne11*ne12 - i12*ne10*ne11) / ne10; + const int i10 = i - i13*ne10*ne11*ne12 - i12*ne10*ne11 - i11*ne10; + const int dst_offset = (i10/qk)*nb10 + i11*nb11 + i12*nb12 + i13*nb13; cpy_blck(cx + x_offset, cdst + dst_offset); } @@ -10599,10 +10610,12 @@ static void ggml_mul_mat_vec_nc_f16_f32_sycl( static void ggml_cpy_f32_f32_sycl(const char *cx, char *cdst, const int ne, const int ne00, const int ne01, - const int nb00, const int nb01, - const int nb02, const int ne10, - const int ne11, const int nb10, - const int nb11, const int nb12, + const int ne02, const int nb00, + const int nb01, const int nb02, + const int nb03, const int ne10, + const int ne11, const int ne12, + const int nb10, const int nb11, + const int nb12, const int nb13, dpct::queue_ptr stream) { const int num_blocks = (ne + SYCL_CPY_BLOCK_SIZE - 1) / SYCL_CPY_BLOCK_SIZE; @@ -10615,8 +10628,8 @@ static void ggml_cpy_f32_f32_sycl(const char *cx, char *cdst, const int ne, sycl::range<3>(1, 1, SYCL_CPY_BLOCK_SIZE), sycl::range<3>(1, 1, SYCL_CPY_BLOCK_SIZE)), [=](sycl::nd_item<3> item_ct1) { - cpy_f32_f16(cx, cdst, ne, ne00, ne01, nb00, nb01, - nb02, ne10, ne11, nb10, nb11, nb12, + cpy_f32_f16(cx, cdst, ne, ne00, ne01, ne02, nb00, nb01, nb02, + nb03, ne10, ne11, ne12, nb10, nb11, nb12, nb13, item_ct1); }); } @@ -10624,10 +10637,12 @@ static void ggml_cpy_f32_f32_sycl(const char *cx, char *cdst, const int ne, static void ggml_cpy_f32_f16_sycl(const char *cx, char *cdst, const int ne, const int ne00, const int ne01, - const int nb00, const int nb01, - const int nb02, const int ne10, - const int ne11, const int nb10, - const int nb11, const int nb12, + const int ne02, const int nb00, + const int nb01, const int nb02, + const int nb03, const int ne10, + const int ne11, const int ne12, + const int nb10, const int nb11, + const int nb12, const int nb13, dpct::queue_ptr stream) { const int num_blocks = (ne + SYCL_CPY_BLOCK_SIZE - 1) / SYCL_CPY_BLOCK_SIZE; @@ -10640,8 +10655,8 @@ static void ggml_cpy_f32_f16_sycl(const char *cx, char *cdst, const int ne, sycl::range<3>(1, 1, SYCL_CPY_BLOCK_SIZE), sycl::range<3>(1, 1, SYCL_CPY_BLOCK_SIZE)), [=](sycl::nd_item<3> item_ct1) { - cpy_f32_f16(cx, cdst, ne, ne00, ne01, nb00, nb01, - nb02, ne10, ne11, nb10, nb11, nb12, + cpy_f32_f16(cx, cdst, ne, ne00, ne01, ne02, nb00, nb01, nb02, + nb03, ne10, ne11, ne12, nb10, nb11, nb12, nb13, item_ct1); }); } @@ -10649,10 +10664,12 @@ static void ggml_cpy_f32_f16_sycl(const char *cx, char *cdst, const int ne, static void ggml_cpy_f32_q8_0_sycl(const char *cx, char *cdst, const int ne, const int ne00, const int ne01, - const int nb00, const int nb01, - const int nb02, const int ne10, - const int ne11, const int nb10, - const int nb11, const int nb12, + const int ne02, const int nb00, + const int nb01, const int nb02, + const int nb03, const int ne10, + const int ne11, const int ne12, + const int nb10, const int nb11, + const int nb12, const int nb13, dpct::queue_ptr stream) { GGML_ASSERT(ne % QK8_0 == 0); @@ -10661,17 +10678,20 @@ static void ggml_cpy_f32_q8_0_sycl(const char *cx, char *cdst, const int ne, sycl::range<3>(1, 1, 1)), [=](sycl::nd_item<3> item_ct1) { cpy_f32_q( - cx, cdst, ne, ne00, ne01, nb00, nb01, nb02, - ne10, ne11, nb10, nb11, nb12, item_ct1); + cx, cdst, ne, ne00, ne01, ne02, nb00, nb01, nb02, + nb03, ne10, ne11, ne12, nb10, nb11, nb12, nb13, + item_ct1); }); } static void ggml_cpy_f32_q4_0_sycl(const char *cx, char *cdst, const int ne, const int ne00, const int ne01, - const int nb00, const int nb01, - const int nb02, const int ne10, - const int ne11, const int nb10, - const int nb11, const int nb12, + const int ne02, const int nb00, + const int nb01, const int nb02, + const int nb03, const int ne10, + const int ne11, const int ne12, + const int nb10, const int nb11, + const int nb12, const int nb13, dpct::queue_ptr stream) { GGML_ASSERT(ne % QK4_0 == 0); @@ -10680,17 +10700,20 @@ static void ggml_cpy_f32_q4_0_sycl(const char *cx, char *cdst, const int ne, sycl::range<3>(1, 1, 1)), [=](sycl::nd_item<3> item_ct1) { cpy_f32_q( - cx, cdst, ne, ne00, ne01, nb00, nb01, nb02, - ne10, ne11, nb10, nb11, nb12, item_ct1); + cx, cdst, ne, ne00, ne01, ne02, nb00, nb01, nb02, + nb03, ne10, ne11, ne12, nb10, nb11, nb12, nb13, + item_ct1); }); } static void ggml_cpy_f32_q4_1_sycl(const char *cx, char *cdst, const int ne, const int ne00, const int ne01, - const int nb00, const int nb01, - const int nb02, const int ne10, - const int ne11, const int nb10, - const int nb11, const int nb12, + const int ne02, const int nb00, + const int nb01, const int nb02, + const int nb03, const int ne10, + const int ne11, const int ne12, + const int nb10, const int nb11, + const int nb12, const int nb13, dpct::queue_ptr stream) { GGML_ASSERT(ne % QK4_1 == 0); @@ -10699,17 +10722,20 @@ static void ggml_cpy_f32_q4_1_sycl(const char *cx, char *cdst, const int ne, sycl::range<3>(1, 1, 1)), [=](sycl::nd_item<3> item_ct1) { cpy_f32_q( - cx, cdst, ne, ne00, ne01, nb00, nb01, nb02, - ne10, ne11, nb10, nb11, nb12, item_ct1); + cx, cdst, ne, ne00, ne01, ne02, nb00, nb01, nb02, + nb03, ne10, ne11, ne12, nb10, nb11, nb12, nb13, + item_ct1); }); } static void ggml_cpy_f16_f16_sycl(const char *cx, char *cdst, const int ne, const int ne00, const int ne01, - const int nb00, const int nb01, - const int nb02, const int ne10, - const int ne11, const int nb10, - const int nb11, const int nb12, + const int ne02, const int nb00, + const int nb01, const int nb02, + const int nb03, const int ne10, + const int ne11, const int ne12, + const int nb10, const int nb11, + const int nb12, const int nb13, dpct::queue_ptr stream) { const int num_blocks = (ne + SYCL_CPY_BLOCK_SIZE - 1) / SYCL_CPY_BLOCK_SIZE; @@ -10722,8 +10748,8 @@ static void ggml_cpy_f16_f16_sycl(const char *cx, char *cdst, const int ne, sycl::range<3>(1, 1, SYCL_CPY_BLOCK_SIZE), sycl::range<3>(1, 1, SYCL_CPY_BLOCK_SIZE)), [=](sycl::nd_item<3> item_ct1) { - cpy_f32_f16(cx, cdst, ne, ne00, ne01, nb00, nb01, - nb02, ne10, ne11, nb10, nb11, nb12, + cpy_f32_f16(cx, cdst, ne, ne00, ne01, ne02, nb00, nb01, nb02, + nb03, ne10, ne11, ne12, nb10, nb11, nb12, nb13, item_ct1); }); } @@ -10731,10 +10757,12 @@ static void ggml_cpy_f16_f16_sycl(const char *cx, char *cdst, const int ne, static void ggml_cpy_i16_i16_sycl(const char *cx, char *cdst, const int ne, const int ne00, const int ne01, - const int nb00, const int nb01, - const int nb02, const int ne10, - const int ne11, const int nb10, - const int nb11, const int nb12, + const int ne02, const int nb00, + const int nb01, const int nb02, + const int nb03, const int ne10, + const int ne11, const int ne12, + const int nb10, const int nb11, + const int nb12, const int nb13, dpct::queue_ptr stream) { const int num_blocks = (ne + SYCL_CPY_BLOCK_SIZE - 1) / SYCL_CPY_BLOCK_SIZE; @@ -10747,8 +10775,8 @@ static void ggml_cpy_i16_i16_sycl(const char *cx, char *cdst, const int ne, sycl::range<3>(1, 1, SYCL_CPY_BLOCK_SIZE), sycl::range<3>(1, 1, SYCL_CPY_BLOCK_SIZE)), [=](sycl::nd_item<3> item_ct1) { - cpy_f32_f16(cx, cdst, ne, ne00, ne01, nb00, nb01, - nb02, ne10, ne11, nb10, nb11, nb12, + cpy_f32_f16(cx, cdst, ne, ne00, ne01, ne02, nb00, nb01, nb02, + nb03, ne10, ne11, ne12, nb10, nb11, nb12, nb13, item_ct1); }); } @@ -10756,10 +10784,12 @@ static void ggml_cpy_i16_i16_sycl(const char *cx, char *cdst, const int ne, static void ggml_cpy_i32_i32_sycl(const char *cx, char *cdst, const int ne, const int ne00, const int ne01, - const int nb00, const int nb01, - const int nb02, const int ne10, - const int ne11, const int nb10, - const int nb11, const int nb12, + const int ne02, const int nb00, + const int nb01, const int nb02, + const int nb03, const int ne10, + const int ne11, const int ne12, + const int nb10, const int nb11, + const int nb12, const int nb13, dpct::queue_ptr stream) { const int num_blocks = (ne + SYCL_CPY_BLOCK_SIZE - 1) / SYCL_CPY_BLOCK_SIZE; @@ -10772,8 +10802,8 @@ static void ggml_cpy_i32_i32_sycl(const char *cx, char *cdst, const int ne, sycl::range<3>(1, 1, SYCL_CPY_BLOCK_SIZE), sycl::range<3>(1, 1, SYCL_CPY_BLOCK_SIZE)), [=](sycl::nd_item<3> item_ct1) { - cpy_f32_f16(cx, cdst, ne, ne00, ne01, nb00, nb01, - nb02, ne10, ne11, nb10, nb11, nb12, + cpy_f32_f16(cx, cdst, ne, ne00, ne01, ne02, nb00, nb01, nb02, + nb03, ne10, ne11, ne12, nb10, nb11, nb12, nb13, item_ct1); }); } @@ -13910,19 +13940,23 @@ static void ggml_sycl_cpy(const ggml_tensor *src0, const ggml_tensor *src1, const int64_t ne00 = src0->ne[0]; const int64_t ne01 = src0->ne[1]; - GGML_ASSERT(src0->ne[3] == 1); + const int64_t ne02 = src0->ne[2]; + const int64_t nb00 = src0->nb[0]; const int64_t nb01 = src0->nb[1]; const int64_t nb02 = src0->nb[2]; + const int64_t nb03 = src0->nb[3]; const int64_t ne10 = src1->ne[0]; const int64_t ne11 = src1->ne[1]; - GGML_ASSERT(src1->ne[3] == 1); + const int64_t ne12 = src1->ne[2]; + const int64_t nb10 = src1->nb[0]; const int64_t nb11 = src1->nb[1]; const int64_t nb12 = src1->nb[2]; + const int64_t nb13 = src1->nb[3]; SYCL_CHECK(ggml_sycl_set_device(g_main_device)); dpct::queue_ptr main_stream = g_syclStreams[g_main_device_index][0]; @@ -13934,21 +13968,21 @@ static void ggml_sycl_cpy(const ggml_tensor *src0, const ggml_tensor *src1, char * src1_ddc = (char *) src1_extra->data_device[g_main_device_index]; if (src0->type == GGML_TYPE_F32 && src1->type == GGML_TYPE_F32) { - ggml_cpy_f32_f32_sycl (src0_ddc, src1_ddc, ne, ne00, ne01, nb00, nb01, nb02, ne10, ne11, nb10, nb11, nb12, main_stream); + ggml_cpy_f32_f32_sycl (src0_ddc, src1_ddc, ne, ne00, ne01, ne02, nb00, nb01, nb02, nb03, ne10, ne11, ne12, nb10, nb11, nb12, nb13, main_stream); } else if (src0->type == GGML_TYPE_F32 && src1->type == GGML_TYPE_F16) { - ggml_cpy_f32_f16_sycl (src0_ddc, src1_ddc, ne, ne00, ne01, nb00, nb01, nb02, ne10, ne11, nb10, nb11, nb12, main_stream); + ggml_cpy_f32_f16_sycl (src0_ddc, src1_ddc, ne, ne00, ne01, ne02, nb00, nb01, nb02, nb03, ne10, ne11, ne12, nb10, nb11, nb12, nb13, main_stream); } else if (src0->type == GGML_TYPE_F32 && src1->type == GGML_TYPE_Q8_0) { - ggml_cpy_f32_q8_0_sycl(src0_ddc, src1_ddc, ne, ne00, ne01, nb00, nb01, nb02, ne10, ne11, nb10, nb11, nb12, main_stream); + ggml_cpy_f32_q8_0_sycl(src0_ddc, src1_ddc, ne, ne00, ne01, ne02, nb00, nb01, nb02, nb03, ne10, ne11, ne12, nb10, nb11, nb12, nb13, main_stream); } else if (src0->type == GGML_TYPE_F32 && src1->type == GGML_TYPE_Q4_0) { - ggml_cpy_f32_q4_0_sycl(src0_ddc, src1_ddc, ne, ne00, ne01, nb00, nb01, nb02, ne10, ne11, nb10, nb11, nb12, main_stream); + ggml_cpy_f32_q4_0_sycl(src0_ddc, src1_ddc, ne, ne00, ne01, ne02, nb00, nb01, nb02, nb03, ne10, ne11, ne12, nb10, nb11, nb12, nb13, main_stream); } else if (src0->type == GGML_TYPE_F32 && src1->type == GGML_TYPE_Q4_1) { - ggml_cpy_f32_q4_1_sycl(src0_ddc, src1_ddc, ne, ne00, ne01, nb00, nb01, nb02, ne10, ne11, nb10, nb11, nb12, main_stream); + ggml_cpy_f32_q4_1_sycl(src0_ddc, src1_ddc, ne, ne00, ne01, ne02, nb00, nb01, nb02, nb03, ne10, ne11, ne12, nb10, nb11, nb12, nb13, main_stream); } else if (src0->type == GGML_TYPE_F16 && src1->type == GGML_TYPE_F16) { - ggml_cpy_f16_f16_sycl (src0_ddc, src1_ddc, ne, ne00, ne01, nb00, nb01, nb02, ne10, ne11, nb10, nb11, nb12, main_stream); + ggml_cpy_f16_f16_sycl (src0_ddc, src1_ddc, ne, ne00, ne01, ne02, nb00, nb01, nb02, nb03, ne10, ne11, ne12, nb10, nb11, nb12, nb13, main_stream); } else if (src0->type == GGML_TYPE_I16 && src1->type == GGML_TYPE_I16) { - ggml_cpy_i16_i16_sycl (src0_ddc, src1_ddc, ne, ne00, ne01, nb00, nb01, nb02, ne10, ne11, nb10, nb11, nb12, main_stream); + ggml_cpy_i16_i16_sycl (src0_ddc, src1_ddc, ne, ne00, ne01, ne02, nb00, nb01, nb02, nb03, ne10, ne11, ne12, nb10, nb11, nb12, nb13, main_stream); } else if (src0->type == GGML_TYPE_I32 && src1->type == GGML_TYPE_I32) { - ggml_cpy_i32_i32_sycl (src0_ddc, src1_ddc, ne, ne00, ne01, nb00, nb01, nb02, ne10, ne11, nb10, nb11, nb12, main_stream); + ggml_cpy_i32_i32_sycl (src0_ddc, src1_ddc, ne, ne00, ne01, ne02, nb00, nb01, nb02, nb03, ne10, ne11, ne12, nb10, nb11, nb12, nb13, main_stream); } else { fprintf(stderr, "%s: unsupported type combination (%s to %s)\n", __func__, ggml_type_name(src0->type), ggml_type_name(src1->type)); From 5d55b0cd827bb0fcfedfa329a82bd5d6ef2c93ca Mon Sep 17 00:00:00 2001 From: chiranko <96988916+chiranko@users.noreply.github.com> Date: Mon, 5 Feb 2024 15:41:38 +0800 Subject: [PATCH 508/811] readme : add CodeShell models to the supported models list (#5330) --- README.md | 1 + 1 file changed, 1 insertion(+) diff --git a/README.md b/README.md index 4a9bdf314..a6fe34629 100644 --- a/README.md +++ b/README.md @@ -107,6 +107,7 @@ as the main playground for developing new features for the [ggml](https://github - [x] [Mixtral MoE](https://huggingface.co/models?search=mistral-ai/Mixtral) - [x] [PLaMo-13B](https://github.com/ggerganov/llama.cpp/pull/3557) - [x] [GPT-2](https://huggingface.co/gpt2) +- [x] [CodeShell](https://github.com/WisdomShell/codeshell) **Multimodal models:** From 4be04c8965578edc09194fab769b4b922b8444f5 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=D0=9D=D0=B8=D1=8F=D0=B7=20=D0=93=D0=B0=D1=80=D0=B8=D1=84?= =?UTF-8?q?=D0=B7=D1=8F=D0=BD=D0=BE=D0=B2?= <112617865+garrnizon@users.noreply.github.com> Date: Mon, 5 Feb 2024 10:43:57 +0300 Subject: [PATCH 509/811] scripts : add non-interactive server-llm.sh (#5303) * Update server-llm.sh Add flag --non-interactive that allows run script without asking a permission * Update scripts/server-llm.sh --------- Co-authored-by: Georgi Gerganov --- scripts/server-llm.sh | 73 ++++++++++++++++++++++++------------------- 1 file changed, 40 insertions(+), 33 deletions(-) diff --git a/scripts/server-llm.sh b/scripts/server-llm.sh index 0b83cdbbc..062b70496 100644 --- a/scripts/server-llm.sh +++ b/scripts/server-llm.sh @@ -47,6 +47,7 @@ if ! command -v make &> /dev/null; then fi # parse arguments +is_interactive=1 port=8888 repo="" wtype="" @@ -66,15 +67,16 @@ verbose=0 function print_usage { printf "Usage:\n" - printf " ./server-llm.sh [--port] [--repo] [--wtype] [--backend] [--gpu-id] [--n-parallel] [--n-kv] [--verbose]\n\n" - printf " --port: port number, default is 8888\n" - printf " --repo: path to a repo containing GGUF model files\n" - printf " --wtype: weights type (f16, q8_0, q4_0, q4_1), default is user-input\n" - printf " --backend: cpu, cuda, metal, opencl, depends on the OS\n" - printf " --gpu-id: gpu id, default is 0\n" - printf " --n-parallel: number of parallel requests, default is 8\n" - printf " --n-kv: KV cache size, default is 4096\n" - printf " --verbose: verbose output\n\n" + printf " ./server-llm.sh [-interactive] [--port] [--repo] [--wtype] [--backend] [--gpu-id] [--n-parallel] [--n-kv] [--verbose]\n\n" + printf " --non-interactive: run without asking a permision to run\n" + printf " --port: port number, default is 8888\n" + printf " --repo: path to a repo containing GGUF model files\n" + printf " --wtype: weights type (f16, q8_0, q4_0, q4_1), default is user-input\n" + printf " --backend: cpu, cuda, metal, opencl, depends on the OS\n" + printf " --gpu-id: gpu id, default is 0\n" + printf " --n-parallel: number of parallel requests, default is 8\n" + printf " --n-kv: KV cache size, default is 4096\n" + printf " --verbose: verbose output\n\n" printf "Example:\n\n" printf ' bash -c "$(curl -s https://ggml.ai/server-llm.sh)"\n\n' } @@ -82,6 +84,10 @@ function print_usage { while [[ $# -gt 0 ]]; do key="$1" case $key in + --non-interactive) + is_interactive=0 + shift + ;; --port) port="$2" shift @@ -176,31 +182,32 @@ repos=( "https://huggingface.co/TheBloke/OpenHermes-2-Mistral-7B-GGUF" "https://huggingface.co/TheBloke/CausalLM-7B-GGUF" ) +if [ $is_interactive -eq 1 ]; then + printf "\n" + printf "[I] This is a helper script for deploying llama.cpp's server on this machine.\n\n" + printf " Based on the options that follow, the script might download a model file\n" + printf " from the internet, which can be a few GBs in size. The script will also\n" + printf " build the latest llama.cpp source code from GitHub, which can be unstable.\n" + printf "\n" + printf " Upon success, an HTTP server will be started and it will serve the selected\n" + printf " model using llama.cpp for demonstration purposes.\n" + printf "\n" + printf " Please note:\n" + printf "\n" + printf " - All new data will be stored in the current folder\n" + printf " - The server will be listening on all network interfaces\n" + printf " - The server will run with default settings which are not always optimal\n" + printf " - Do not judge the quality of a model based on the results from this script\n" + printf " - Do not use this script to benchmark llama.cpp\n" + printf " - Do not use this script in production\n" + printf " - This script is only for demonstration purposes\n" + printf "\n" + printf " If you don't know what you are doing, please press Ctrl-C to abort now\n" + printf "\n" + printf " Press Enter to continue ...\n\n" -printf "\n" -printf "[I] This is a helper script for deploying llama.cpp's server on this machine.\n\n" -printf " Based on the options that follow, the script might download a model file\n" -printf " from the internet, which can be a few GBs in size. The script will also\n" -printf " build the latest llama.cpp source code from GitHub, which can be unstable.\n" -printf "\n" -printf " Upon success, an HTTP server will be started and it will serve the selected\n" -printf " model using llama.cpp for demonstration purposes.\n" -printf "\n" -printf " Please note:\n" -printf "\n" -printf " - All new data will be stored in the current folder\n" -printf " - The server will be listening on all network interfaces\n" -printf " - The server will run with default settings which are not always optimal\n" -printf " - Do not judge the quality of a model based on the results from this script\n" -printf " - Do not use this script to benchmark llama.cpp\n" -printf " - Do not use this script in production\n" -printf " - This script is only for demonstration purposes\n" -printf "\n" -printf " If you don't know what you are doing, please press Ctrl-C to abort now\n" -printf "\n" -printf " Press Enter to continue ...\n\n" - -read + read +fi if [[ -z "$repo" ]]; then printf "[+] No repo provided from the command line\n" From 30679d438d5225b3aecf5cec6482cbc9f8f87ba5 Mon Sep 17 00:00:00 2001 From: Georgi Gerganov Date: Mon, 5 Feb 2024 09:48:03 +0200 Subject: [PATCH 510/811] scripts : fix typos, cleanup (#5303) --- scripts/server-llm.sh | 23 ++++++++++++----------- 1 file changed, 12 insertions(+), 11 deletions(-) diff --git a/scripts/server-llm.sh b/scripts/server-llm.sh index 062b70496..30bbac321 100644 --- a/scripts/server-llm.sh +++ b/scripts/server-llm.sh @@ -14,16 +14,17 @@ # - Might be unstable! # # Usage: -# ./server-llm.sh [--port] [--repo] [--wtype] [--backend] [--gpu-id] [--n-parallel] [--n-kv] [--verbose] +# ./server-llm.sh [--port] [--repo] [--wtype] [--backend] [--gpu-id] [--n-parallel] [--n-kv] [--verbose] [-non-interactive] # -# --port: port number, default is 8888 -# --repo: path to a repo containing GGUF model files -# --wtype: weights type (f16, q8_0, q4_0, q4_1), default is user-input -# --backend: cpu, cuda, metal, opencl, depends on the OS -# --gpu-id: gpu id, default is 0 -# --n-parallel: number of parallel requests, default is 8 -# --n-kv: KV cache size, default is 4096 -# --verbose: verbose output +# --port: port number, default is 8888 +# --repo: path to a repo containing GGUF model files +# --wtype: weights type (f16, q8_0, q4_0, q4_1), default is user-input +# --backend: cpu, cuda, metal, opencl, depends on the OS +# --gpu-id: gpu id, default is 0 +# --n-parallel: number of parallel requests, default is 8 +# --n-kv: KV cache size, default is 4096 +# --verbose: verbose output +# --non-interactive: run without asking a permission to run # # Example: # @@ -67,8 +68,7 @@ verbose=0 function print_usage { printf "Usage:\n" - printf " ./server-llm.sh [-interactive] [--port] [--repo] [--wtype] [--backend] [--gpu-id] [--n-parallel] [--n-kv] [--verbose]\n\n" - printf " --non-interactive: run without asking a permision to run\n" + printf " ./server-llm.sh [--port] [--repo] [--wtype] [--backend] [--gpu-id] [--n-parallel] [--n-kv] [--verbose] [-non-interactive]\n\n" printf " --port: port number, default is 8888\n" printf " --repo: path to a repo containing GGUF model files\n" printf " --wtype: weights type (f16, q8_0, q4_0, q4_1), default is user-input\n" @@ -77,6 +77,7 @@ function print_usage { printf " --n-parallel: number of parallel requests, default is 8\n" printf " --n-kv: KV cache size, default is 4096\n" printf " --verbose: verbose output\n\n" + printf " --non-interactive: run without asking a permission to run\n" printf "Example:\n\n" printf ' bash -c "$(curl -s https://ggml.ai/server-llm.sh)"\n\n' } From e6f81775323f6f4e4a30abf022a6028fa86b79ac Mon Sep 17 00:00:00 2001 From: l3utterfly Date: Mon, 5 Feb 2024 17:00:47 +0900 Subject: [PATCH 511/811] common : add dynamic temperature parameters to main example cli (#5295) * added dynamic temp params in main * added help text --- common/common.cpp | 14 ++++++++++++++ 1 file changed, 14 insertions(+) diff --git a/common/common.cpp b/common/common.cpp index 3302caa20..8c1a60583 100644 --- a/common/common.cpp +++ b/common/common.cpp @@ -399,6 +399,18 @@ bool gpt_params_parse_ex(int argc, char ** argv, gpt_params & params) { break; } sparams.penalty_present = std::stof(argv[i]); + } else if (arg == "--dynatemp-range") { + if (++i >= argc) { + invalid_param = true; + break; + } + sparams.dynatemp_range = std::stof(argv[i]); + } else if (arg == "--dynatemp-exp") { + if (++i >= argc) { + invalid_param = true; + break; + } + sparams.dynatemp_exponent = std::stof(argv[i]); } else if (arg == "--mirostat") { if (++i >= argc) { invalid_param = true; @@ -942,6 +954,8 @@ void gpt_print_usage(int /*argc*/, char ** argv, const gpt_params & params) { printf(" --repeat-penalty N penalize repeat sequence of tokens (default: %.1f, 1.0 = disabled)\n", (double)sparams.penalty_repeat); printf(" --presence-penalty N repeat alpha presence penalty (default: %.1f, 0.0 = disabled)\n", (double)sparams.penalty_present); printf(" --frequency-penalty N repeat alpha frequency penalty (default: %.1f, 0.0 = disabled)\n", (double)sparams.penalty_freq); + printf(" --dynatemp-range N dynamic temperature range (default: %.1f, 0.0 = disabled)\n", (double)sparams.dynatemp_range); + printf(" --dynatemp-exp N dynamic temperature exponent (default: %.1f)\n", (double)sparams.dynatemp_exponent); printf(" --mirostat N use Mirostat sampling.\n"); printf(" Top K, Nucleus, Tail Free and Locally Typical samplers are ignored if used.\n"); printf(" (default: %d, 0 = disabled, 1 = Mirostat, 2 = Mirostat 2.0)\n", sparams.mirostat); From a2d60c9158435ae9a6f14632f07f1acf7a3becef Mon Sep 17 00:00:00 2001 From: Alexey Parfenov Date: Mon, 5 Feb 2024 08:10:22 +0000 Subject: [PATCH 512/811] server : allow to get default generation settings for completion (#5307) --- examples/server/README.md | 16 +++++++++++++++- examples/server/server.cpp | 7 ++++++- 2 files changed, 21 insertions(+), 2 deletions(-) diff --git a/examples/server/README.md b/examples/server/README.md index fe934dab1..d8e7c313e 100644 --- a/examples/server/README.md +++ b/examples/server/README.md @@ -264,7 +264,21 @@ Notice that each `probs` is an array of length `n_probs`. It also accepts all the options of `/completion` except `stream` and `prompt`. -- **GET** `/props`: Return the required assistant name and anti-prompt to generate the prompt in case you have specified a system prompt for all slots. +- **GET** `/props`: Return current server settings. + +### Result JSON + +```json +{ + "assistant_name": "", + "user_name": "", + "default_generation_settings": { ... } +} +``` + +- `assistant_name` - the required assistant name to generate the prompt in case you have specified a system prompt for all slots. +- `user_name` - the required anti-prompt to generate the prompt in case you have specified a system prompt for all slots. +- `default_generation_settings` - the default generation settings for the `/completion` endpoint, has the same fields as the `generation_settings` response object from the `/completion` endpoint. - **POST** `/v1/chat/completions`: OpenAI-compatible Chat Completions API. Given a ChatML-formatted json description in `messages`, it returns the predicted completion. Both synchronous and streaming mode are supported, so scripted and interactive applications work fine. While no strong claims of compatibility with OpenAI API spec is being made, in our experience it suffices to support many apps. Only ChatML-tuned models, such as Dolphin, OpenOrca, OpenHermes, OpenChat-3.5, etc can be used with this endpoint. Compared to `api_like_OAI.py` this API implementation does not require a wrapper to be served. diff --git a/examples/server/server.cpp b/examples/server/server.cpp index a9f8cb369..8000fee5c 100644 --- a/examples/server/server.cpp +++ b/examples/server/server.cpp @@ -334,6 +334,7 @@ struct llama_server_context // slots / clients std::vector slots; + json default_generation_settings_for_props; llama_server_queue queue_tasks; llama_server_response queue_results; @@ -430,6 +431,9 @@ struct llama_server_context slots.push_back(slot); } + default_generation_settings_for_props = get_formated_generation(slots.front()); + default_generation_settings_for_props["seed"] = -1; + batch = llama_batch_init(n_ctx, 0, params.n_parallel); // empty system prompt @@ -2614,7 +2618,8 @@ int main(int argc, char **argv) res.set_header("Access-Control-Allow-Origin", req.get_header_value("Origin")); json data = { { "user_name", llama.name_user.c_str() }, - { "assistant_name", llama.name_assistant.c_str() } + { "assistant_name", llama.name_assistant.c_str() }, + { "default_generation_settings", llama.default_generation_settings_for_props } }; res.set_content(data.dump(), "application/json; charset=utf-8"); }); From 6fdfa2ecc684000a25a4ad91823bc82a6652b645 Mon Sep 17 00:00:00 2001 From: Kawrakow <48489457+ikawrakow@users.noreply.github.com> Date: Mon, 5 Feb 2024 10:46:06 +0200 Subject: [PATCH 513/811] iq2_xxs: tune quantization (#5320) We get slightly better PPL, and we cut quantization time in nearly half. The trick is to 1st quantize without forcing points onto the E8-lattice. We can then use a narrower search range around the block scale that we got that way. Co-authored-by: Iwan Kawrakow --- ggml-quants.c | 58 ++++++--------------------------------------------- 1 file changed, 6 insertions(+), 52 deletions(-) diff --git a/ggml-quants.c b/ggml-quants.c index 8236385bc..014c0525a 100644 --- a/ggml-quants.c +++ b/ggml-quants.c @@ -9048,8 +9048,6 @@ static void quantize_row_iq2_xxs_impl(const float * restrict x, void * restrict int8_t L[32]; int8_t Laux[32]; float waux[32]; - bool is_on_grid[4]; - bool is_on_grid_aux[4]; uint8_t block_signs[4]; uint32_t q2[2*(QK_K/32)]; @@ -9099,10 +9097,11 @@ static void quantize_row_iq2_xxs_impl(const float * restrict x, void * restrict memset(L, 0, 32); continue; } + float scale = make_qp_quants(32, kMaxQ+1, xval, (uint8_t*)L, weight); + float eff_max = scale*kMaxQ; float best = 0; - float scale = max/(2*kMaxQ-1); - for (int is = -9; is <= 9; ++is) { - float id = (2*kMaxQ-1+is*0.1f)/max; + for (int is = -6; is <= 6; ++is) { + float id = (2*kMaxQ-1+is*0.1f)/eff_max; float this_scale = 1/id; for (int k = 0; k < 4; ++k) { for (int i = 0; i < 8; ++i) { @@ -9112,9 +9111,7 @@ static void quantize_row_iq2_xxs_impl(const float * restrict x, void * restrict uint16_t u = 0; for (int i = 0; i < 8; ++i) u |= (Laux[8*k+i] << 2*i); int grid_index = kmap_q2xs[u]; - is_on_grid_aux[k] = true; if (grid_index < 0) { - is_on_grid_aux[k] = false; const uint16_t * neighbours = kneighbors_q2xs - kmap_q2xs[u] - 1; grid_index = iq2_find_best_neighbour(neighbours, kgrid_q2xs, xval + 8*k, waux + 8*k, this_scale, Laux + 8*k); } @@ -9128,16 +9125,12 @@ static void quantize_row_iq2_xxs_impl(const float * restrict x, void * restrict } if (sumq2 > 0 && sumqx*sumqx > best*sumq2) { scale = sumqx/sumq2; best = scale*sumqx; - for (int i = 0; i < 32; ++i) L[i] = Laux[i]; - for (int k = 0; k < 4; ++k) is_on_grid[k] = is_on_grid_aux[k]; + memcpy(L, Laux, 32); } } - int n_not_ongrid = 0; - for (int k = 0; k < 4; ++k) if (!is_on_grid[k]) ++n_not_ongrid; - if (n_not_ongrid > 0 && scale > 0) { + if (scale > 0) { float id = 1/scale; for (int k = 0; k < 4; ++k) { - if (is_on_grid[k]) continue; uint16_t u = 0; for (int i = 0; i < 8; ++i) { int l = nearest_int(0.5f*(id*xval[8*k+i]-1)); @@ -9193,49 +9186,10 @@ static void quantize_row_iq2_xxs_impl(const float * restrict x, void * restrict float d = max_scale/31; y[ibl].d = GGML_FP32_TO_FP16(d); float id = 1/d; - float sumqx = 0, sumq2 = 0; for (int ib = 0; ib < QK_K/32; ++ib) { int l = nearest_int(0.5f*(id*scales[ib]-1)); l = MAX(0, MIN(15, l)); q2[2*ib+1] |= ((uint32_t)l << 28); - const float * xb = xbl + 32*ib; - const float * qw = quant_weights + QK_K*ibl + 32*ib; - for (int i = 0; i < 32; ++i) weight[i] = qw[i] * sqrtf(sigma2 + xb[i]*xb[i]); - const uint8_t * aux8 = (const uint8_t *)(q2 + 2*ib); - const float db = d * (1 + 2*l); - uint32_t u = 0; - for (int k = 0; k < 4; ++k) { - const int8_t * signs = keven_signs_q2xs + 8*((q2[2*ib+1] >> 7*k) & 127); - const float * xk = xb + 8*k; - const float * wk = weight + 8*k; - const uint8_t * grid = (const uint8_t *)(kgrid_q2xs + aux8[k]); - float best_mse = 0; int best_index = aux8[k]; - for (int j = 0; j < 8; ++j) { - float diff = db * grid[j] * signs[j] - xk[j]; - best_mse += wk[j] * diff * diff; - } - for (int idx = 0; idx < 256; ++idx) { - grid = (const uint8_t *)(kgrid_q2xs + idx); - float mse = 0; - for (int j = 0; j < 8; ++j) { - float diff = db * grid[j] * signs[j] - xk[j]; - mse += wk[j] * diff * diff; - } - if (mse < best_mse) { - best_mse = mse; best_index = idx; - } - } - u |= (best_index << 8*k); - grid = (const uint8_t *)(kgrid_q2xs + best_index); - //grid = (const uint8_t *)(kgrid_q2xs + aux8[k]); - for (int j = 0; j < 8; ++j) { - float q = db * grid[j] * signs[j]; - sumqx += wk[j] * q * xk[j]; - sumq2 += wk[j] * q * q; - } - } - q2[2*ib] = u; - if (sumq2 > 0) y[ibl].d = GGML_FP32_TO_FP16(d*sumqx/sumq2); } memcpy(y[ibl].qs, q2, QK_K/4); } From 7e1ae372f36d98fa66b1d778c5862904b4d80c88 Mon Sep 17 00:00:00 2001 From: Guoteng <32697156+SolenoidWGT@users.noreply.github.com> Date: Mon, 5 Feb 2024 17:04:06 +0800 Subject: [PATCH 514/811] py : fix internlm2-hf convert to gguf (#5305) * py : fix internlm2-hf convert to gguf * ggml-ci --- convert-hf-to-gguf.py | 29 +++++++++++++++++++++++++++-- 1 file changed, 27 insertions(+), 2 deletions(-) diff --git a/convert-hf-to-gguf.py b/convert-hf-to-gguf.py index a6ffd128b..5e343742d 100755 --- a/convert-hf-to-gguf.py +++ b/convert-hf-to-gguf.py @@ -1416,8 +1416,32 @@ class InternLM2Model(Model): self.gguf_writer.add_add_space_prefix(add_prefix) special_vocab = gguf.SpecialVocab(self.dir_model, n_vocab=len(tokens)) + old_eos = special_vocab.special_token_ids["eos"] + if "chat" in os.path.basename(self.dir_model.absolute()): + # For the chat model, we replace the eos with '<|im_end|>'. + special_vocab.special_token_ids["eos"] = self._try_get_sft_eos(tokenizer) + print(f"Replace eos:{old_eos} with a special token:{special_vocab.special_token_ids['eos']} \ +in chat mode so that the conversation can end normally.") + special_vocab.add_to_gguf(self.gguf_writer) + def _try_get_sft_eos(self, tokenizer): + unused_145_list = tokenizer.encode('[UNUSED_TOKEN_145]') + im_end_list = tokenizer.encode('<|im_end|>') + assert (len(unused_145_list) == 1) ^ (len(im_end_list) == 1) + if len(unused_145_list) == 1: + eos_token = unused_145_list[0] + if len(im_end_list) == 1: + eos_token = im_end_list[0] + return eos_token + + def _hf_permute_qk(self, weights, n_head: int, n_head_kv: int): + if n_head_kv is not None and n_head != n_head_kv: + n_head = n_head_kv + return (weights.reshape(n_head, 2, weights.shape[0] // n_head // 2, *weights.shape[1:]) + .swapaxes(1, 2) + .reshape(weights.shape)) + def set_gguf_parameters(self): self.gguf_writer.add_name("InternLM2") self.gguf_writer.add_context_length(self.hparams["max_position_embeddings"]) @@ -1486,8 +1510,9 @@ class InternLM2Model(Model): qkv = data_torch qkv = rearrange(qkv.T, " o (g n i) ->o g n i", g=num_groups, n=q_per_kv + 2, i=head_dim) q, k, v = qkv[..., : q_per_kv, :], qkv[..., q_per_kv: q_per_kv + 1, :], qkv[..., q_per_kv + 1: q_per_kv + 2, :] - q = rearrange(q, " o g n i -> o (g n i)").T - k = rearrange(k, " o g n i -> o (g n i)").T + # The model weights of q and k equire additional reshape. + q = self._hf_permute_qk(rearrange(q, " o g n i -> o (g n i)").T, num_heads, num_heads) + k = self._hf_permute_qk(rearrange(k, " o g n i -> o (g n i)").T, num_heads, num_kv_heads) v = rearrange(v, " o g n i -> o (g n i)").T self.post_write_tensors(tensor_map, f"model.layers.{bid}.attention.wq.weight", q) self.post_write_tensors(tensor_map, f"model.layers.{bid}.attention.wk.weight", k) From 89503dcb5f764a5cc7093db1f395f5121876a2cc Mon Sep 17 00:00:00 2001 From: Kawrakow <48489457+ikawrakow@users.noreply.github.com> Date: Mon, 5 Feb 2024 12:32:27 +0200 Subject: [PATCH 515/811] iq3_xxs: quards for the no-imatrix situation (#5334) Co-authored-by: Iwan Kawrakow --- llama.cpp | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/llama.cpp b/llama.cpp index 4787a92fe..65e399adc 100644 --- a/llama.cpp +++ b/llama.cpp @@ -9456,8 +9456,8 @@ static ggml_type get_k_quant_type(quantize_state_internal & qs, ggml_type new_ty else if (ftype == LLAMA_FTYPE_MOSTLY_Q2_K_S && qs.model.hparams.n_gqa() >= 4) { new_type = GGML_TYPE_Q4_K; } - else if (ftype == LLAMA_FTYPE_MOSTLY_IQ3_XXS && qs.model.hparams.n_gqa() >= 4) { - new_type = GGML_TYPE_Q4_K; + else if (ftype == LLAMA_FTYPE_MOSTLY_IQ3_XXS) { + new_type = qs.model.hparams.n_gqa() >= 4 ? GGML_TYPE_Q4_K : !qs.has_imatrix ? GGML_TYPE_Q3_K : GGML_TYPE_IQ3_XXS; } else if (ftype == LLAMA_FTYPE_MOSTLY_Q3_K_M) { new_type = qs.i_attention_wv < 2 ? GGML_TYPE_Q5_K : GGML_TYPE_Q4_K; @@ -9496,9 +9496,9 @@ static ggml_type get_k_quant_type(quantize_state_internal & qs, ggml_type new_ty else if (ftype == LLAMA_FTYPE_MOSTLY_Q2_K_S || ftype == LLAMA_FTYPE_MOSTLY_Q3_K_XS) { if (i_layer < n_layer/8) new_type = GGML_TYPE_Q4_K; } - //else if (ftype == LLAMA_FTYPE_MOSTLY_IQ3_XXS) { - // if (i_layer < n_layer/8) new_type = GGML_TYPE_Q5_K; - //} + else if (ftype == LLAMA_FTYPE_MOSTLY_IQ3_XXS && !qs.has_imatrix) { + new_type = i_layer < n_layer/8 ? GGML_TYPE_Q4_K : GGML_TYPE_Q3_K; + } else if (ftype == LLAMA_FTYPE_MOSTLY_Q3_K_M) { new_type = i_layer < n_layer/16 ? GGML_TYPE_Q5_K : arch != LLM_ARCH_FALCON || use_more_bits(i_layer, n_layer) ? GGML_TYPE_Q4_K From abb61944a5f64dec62c893ed0db10790169b672a Mon Sep 17 00:00:00 2001 From: "Dr. Tom Murphy VII Ph.D" <499244+tom7@users.noreply.github.com> Date: Mon, 5 Feb 2024 06:13:57 -0500 Subject: [PATCH 516/811] ggml : avoid duplicating function calls using MIN/MAX macros (#5325) * Avoid duplicating function calls when using MIN/MAX macros. Since these copy "a" and "b" they ask the compiler to evaluate one of them twice. The compiler doesn't have a problem with removing the duplication in something like MAX(0, x + 2), but in some cases we're calling functions, and those calls just happen twice. By explicitly evaluating at the expression we get smaller and faster code without duplicate calls. See ggml_rope_yarn_corr_dims in Compiler Explorer: https://godbolt.org/z/Ee4KMrvKh Code behaves exactly the same. * Update ggml.c --------- Co-authored-by: Georgi Gerganov --- ggml.c | 9 ++++++--- 1 file changed, 6 insertions(+), 3 deletions(-) diff --git a/ggml.c b/ggml.c index ee994c875..b9ec0c981 100644 --- a/ggml.c +++ b/ggml.c @@ -2470,7 +2470,8 @@ size_t ggml_get_max_tensor_size(const struct ggml_context * ctx) { size_t max_size = 0; for (struct ggml_tensor * tensor = ggml_get_first_tensor(ctx); tensor != NULL; tensor = ggml_get_next_tensor(ctx, tensor)) { - max_size = MAX(max_size, ggml_nbytes(tensor)); + size_t bytes = ggml_nbytes(tensor); + max_size = MAX(max_size, bytes); } return max_size; @@ -11887,8 +11888,10 @@ GGML_CALL void ggml_rope_yarn_corr_dims( int n_dims, int n_orig_ctx, float freq_base, float beta_fast, float beta_slow, float dims[2] ) { // start and end correction dims - dims[0] = MAX(0, floorf(ggml_rope_yarn_corr_dim(n_dims, n_orig_ctx, beta_fast, freq_base))); - dims[1] = MIN(n_dims - 1, ceilf(ggml_rope_yarn_corr_dim(n_dims, n_orig_ctx, beta_slow, freq_base))); + float start = floorf(ggml_rope_yarn_corr_dim(n_dims, n_orig_ctx, beta_fast, freq_base)); + float end = ceilf(ggml_rope_yarn_corr_dim(n_dims, n_orig_ctx, beta_slow, freq_base)); + dims[0] = MAX(0, start); + dims[1] = MIN(n_dims - 1, end); } static void ggml_compute_forward_rope_f32( From c6b395535a6874d749ef47c33eacd466cb252cd5 Mon Sep 17 00:00:00 2001 From: Kawrakow <48489457+ikawrakow@users.noreply.github.com> Date: Mon, 5 Feb 2024 14:09:47 +0200 Subject: [PATCH 517/811] ggml : make use of ggml-quants.h possible in C++ code (#5338) * Make use of ggml-quants.h possible in C++ code * One cannot possibly be defining static_assert in a C++ compilation --------- Co-authored-by: Iwan Kawrakow --- ggml-impl.h | 2 + ggml-quants.h | 117 +++++++++++++++++++++++++++----------------------- 2 files changed, 65 insertions(+), 54 deletions(-) diff --git a/ggml-impl.h b/ggml-impl.h index 2c58075ac..19df66bce 100644 --- a/ggml-impl.h +++ b/ggml-impl.h @@ -19,6 +19,7 @@ extern "C" { // fall back to the _Static_assert C11 keyword. // if C99 - static_assert is noop // ref: https://stackoverflow.com/a/53923785/4039976 +#ifndef __cplusplus #ifndef static_assert #if defined(__STDC_VERSION__) && (__STDC_VERSION__ >= 201100L) #define static_assert(cond, msg) _Static_assert(cond, msg) @@ -26,6 +27,7 @@ extern "C" { #define static_assert(cond, msg) struct global_scope_noop_trick #endif #endif +#endif // __FMA__ and __F16C__ are not defined in MSVC, however they are implied with AVX2/AVX512 #if defined(_MSC_VER) && (defined(__AVX2__) || defined(__AVX512F__)) diff --git a/ggml-quants.h b/ggml-quants.h index 5c9f63bd9..bfdf3c997 100644 --- a/ggml-quants.h +++ b/ggml-quants.h @@ -191,70 +191,74 @@ typedef struct { } block_iq3_xxs; static_assert(sizeof(block_iq3_xxs) == sizeof(ggml_fp16_t) + 3*(QK_K/8), "wrong iq3_xxs block size/padding"); +#ifdef __cplusplus +extern "C" { +#endif + // Quantization -void quantize_row_q4_0_reference(const float * restrict x, block_q4_0 * restrict y, int k); -void quantize_row_q4_1_reference(const float * restrict x, block_q4_1 * restrict y, int k); -void quantize_row_q5_0_reference(const float * restrict x, block_q5_0 * restrict y, int k); -void quantize_row_q5_1_reference(const float * restrict x, block_q5_1 * restrict y, int k); -void quantize_row_q8_0_reference(const float * restrict x, block_q8_0 * restrict y, int k); -void quantize_row_q8_1_reference(const float * restrict x, block_q8_1 * restrict y, int k); +void quantize_row_q4_0_reference(const float * GGML_RESTRICT x, block_q4_0 * GGML_RESTRICT y, int k); +void quantize_row_q4_1_reference(const float * GGML_RESTRICT x, block_q4_1 * GGML_RESTRICT y, int k); +void quantize_row_q5_0_reference(const float * GGML_RESTRICT x, block_q5_0 * GGML_RESTRICT y, int k); +void quantize_row_q5_1_reference(const float * GGML_RESTRICT x, block_q5_1 * GGML_RESTRICT y, int k); +void quantize_row_q8_0_reference(const float * GGML_RESTRICT x, block_q8_0 * GGML_RESTRICT y, int k); +void quantize_row_q8_1_reference(const float * GGML_RESTRICT x, block_q8_1 * GGML_RESTRICT y, int k); -void quantize_row_q2_K_reference(const float * restrict x, block_q2_K * restrict y, int k); -void quantize_row_q3_K_reference(const float * restrict x, block_q3_K * restrict y, int k); -void quantize_row_q4_K_reference(const float * restrict x, block_q4_K * restrict y, int k); -void quantize_row_q5_K_reference(const float * restrict x, block_q5_K * restrict y, int k); -void quantize_row_q6_K_reference(const float * restrict x, block_q6_K * restrict y, int k); -void quantize_row_q8_K_reference(const float * restrict x, block_q8_K * restrict y, int k); -void quantize_row_iq3_xxs_reference(const float * restrict x, block_iq3_xxs * restrict y, int k); +void quantize_row_q2_K_reference(const float * GGML_RESTRICT x, block_q2_K * GGML_RESTRICT y, int k); +void quantize_row_q3_K_reference(const float * GGML_RESTRICT x, block_q3_K * GGML_RESTRICT y, int k); +void quantize_row_q4_K_reference(const float * GGML_RESTRICT x, block_q4_K * GGML_RESTRICT y, int k); +void quantize_row_q5_K_reference(const float * GGML_RESTRICT x, block_q5_K * GGML_RESTRICT y, int k); +void quantize_row_q6_K_reference(const float * GGML_RESTRICT x, block_q6_K * GGML_RESTRICT y, int k); +void quantize_row_q8_K_reference(const float * GGML_RESTRICT x, block_q8_K * GGML_RESTRICT y, int k); +void quantize_row_iq3_xxs_reference(const float * GGML_RESTRICT x, block_iq3_xxs * GGML_RESTRICT y, int k); -void quantize_row_q4_0(const float * restrict x, void * restrict y, int k); -void quantize_row_q4_1(const float * restrict x, void * restrict y, int k); -void quantize_row_q5_0(const float * restrict x, void * restrict y, int k); -void quantize_row_q5_1(const float * restrict x, void * restrict y, int k); -void quantize_row_q8_0(const float * restrict x, void * restrict y, int k); -void quantize_row_q8_1(const float * restrict x, void * restrict y, int k); +void quantize_row_q4_0(const float * GGML_RESTRICT x, void * GGML_RESTRICT y, int k); +void quantize_row_q4_1(const float * GGML_RESTRICT x, void * GGML_RESTRICT y, int k); +void quantize_row_q5_0(const float * GGML_RESTRICT x, void * GGML_RESTRICT y, int k); +void quantize_row_q5_1(const float * GGML_RESTRICT x, void * GGML_RESTRICT y, int k); +void quantize_row_q8_0(const float * GGML_RESTRICT x, void * GGML_RESTRICT y, int k); +void quantize_row_q8_1(const float * GGML_RESTRICT x, void * GGML_RESTRICT y, int k); -void quantize_row_q2_K(const float * restrict x, void * restrict y, int k); -void quantize_row_q3_K(const float * restrict x, void * restrict y, int k); -void quantize_row_q4_K(const float * restrict x, void * restrict y, int k); -void quantize_row_q5_K(const float * restrict x, void * restrict y, int k); -void quantize_row_q6_K(const float * restrict x, void * restrict y, int k); -void quantize_row_q8_K(const float * restrict x, void * restrict y, int k); -void quantize_row_iq3_xxs(const float * restrict x, void * restrict y, int k); +void quantize_row_q2_K(const float * GGML_RESTRICT x, void * GGML_RESTRICT y, int k); +void quantize_row_q3_K(const float * GGML_RESTRICT x, void * GGML_RESTRICT y, int k); +void quantize_row_q4_K(const float * GGML_RESTRICT x, void * GGML_RESTRICT y, int k); +void quantize_row_q5_K(const float * GGML_RESTRICT x, void * GGML_RESTRICT y, int k); +void quantize_row_q6_K(const float * GGML_RESTRICT x, void * GGML_RESTRICT y, int k); +void quantize_row_q8_K(const float * GGML_RESTRICT x, void * GGML_RESTRICT y, int k); +void quantize_row_iq3_xxs(const float * GGML_RESTRICT x, void * GGML_RESTRICT y, int k); // Dequantization -void dequantize_row_q4_0(const block_q4_0 * restrict x, float * restrict y, int k); -void dequantize_row_q4_1(const block_q4_1 * restrict x, float * restrict y, int k); -void dequantize_row_q5_0(const block_q5_0 * restrict x, float * restrict y, int k); -void dequantize_row_q5_1(const block_q5_1 * restrict x, float * restrict y, int k); -void dequantize_row_q8_0(const block_q8_0 * restrict x, float * restrict y, int k); -//void dequantize_row_q8_1(const block_q8_1 * restrict x, float * restrict y, int k); +void dequantize_row_q4_0(const block_q4_0 * GGML_RESTRICT x, float * GGML_RESTRICT y, int k); +void dequantize_row_q4_1(const block_q4_1 * GGML_RESTRICT x, float * GGML_RESTRICT y, int k); +void dequantize_row_q5_0(const block_q5_0 * GGML_RESTRICT x, float * GGML_RESTRICT y, int k); +void dequantize_row_q5_1(const block_q5_1 * GGML_RESTRICT x, float * GGML_RESTRICT y, int k); +void dequantize_row_q8_0(const block_q8_0 * GGML_RESTRICT x, float * GGML_RESTRICT y, int k); +//void dequantize_row_q8_1(const block_q8_1 * GGML_RESTRICT x, float * GGML_RESTRICT y, int k); -void dequantize_row_q2_K(const block_q2_K * restrict x, float * restrict y, int k); -void dequantize_row_q3_K(const block_q3_K * restrict x, float * restrict y, int k); -void dequantize_row_q4_K(const block_q4_K * restrict x, float * restrict y, int k); -void dequantize_row_q5_K(const block_q5_K * restrict x, float * restrict y, int k); -void dequantize_row_q6_K(const block_q6_K * restrict x, float * restrict y, int k); -void dequantize_row_q8_K(const block_q8_K * restrict x, float * restrict y, int k); -void dequantize_row_iq2_xxs(const block_iq2_xxs * restrict x, float * restrict y, int k); -void dequantize_row_iq2_xs (const block_iq2_xs * restrict x, float * restrict y, int k); -void dequantize_row_iq3_xxs(const block_iq3_xxs * restrict x, float * restrict y, int k); +void dequantize_row_q2_K(const block_q2_K * GGML_RESTRICT x, float * GGML_RESTRICT y, int k); +void dequantize_row_q3_K(const block_q3_K * GGML_RESTRICT x, float * GGML_RESTRICT y, int k); +void dequantize_row_q4_K(const block_q4_K * GGML_RESTRICT x, float * GGML_RESTRICT y, int k); +void dequantize_row_q5_K(const block_q5_K * GGML_RESTRICT x, float * GGML_RESTRICT y, int k); +void dequantize_row_q6_K(const block_q6_K * GGML_RESTRICT x, float * GGML_RESTRICT y, int k); +void dequantize_row_q8_K(const block_q8_K * GGML_RESTRICT x, float * GGML_RESTRICT y, int k); +void dequantize_row_iq2_xxs(const block_iq2_xxs * GGML_RESTRICT x, float * GGML_RESTRICT y, int k); +void dequantize_row_iq2_xs (const block_iq2_xs * GGML_RESTRICT x, float * GGML_RESTRICT y, int k); +void dequantize_row_iq3_xxs(const block_iq3_xxs * GGML_RESTRICT x, float * GGML_RESTRICT y, int k); // Dot product -void ggml_vec_dot_q4_0_q8_0(int n, float * restrict s, const void * restrict vx, const void * restrict vy); -void ggml_vec_dot_q4_1_q8_1(int n, float * restrict s, const void * restrict vx, const void * restrict vy); -void ggml_vec_dot_q5_0_q8_0(int n, float * restrict s, const void * restrict vx, const void * restrict vy); -void ggml_vec_dot_q5_1_q8_1(int n, float * restrict s, const void * restrict vx, const void * restrict vy); -void ggml_vec_dot_q8_0_q8_0(int n, float * restrict s, const void * restrict vx, const void * restrict vy); +void ggml_vec_dot_q4_0_q8_0(int n, float * GGML_RESTRICT s, const void * GGML_RESTRICT vx, const void * GGML_RESTRICT vy); +void ggml_vec_dot_q4_1_q8_1(int n, float * GGML_RESTRICT s, const void * GGML_RESTRICT vx, const void * GGML_RESTRICT vy); +void ggml_vec_dot_q5_0_q8_0(int n, float * GGML_RESTRICT s, const void * GGML_RESTRICT vx, const void * GGML_RESTRICT vy); +void ggml_vec_dot_q5_1_q8_1(int n, float * GGML_RESTRICT s, const void * GGML_RESTRICT vx, const void * GGML_RESTRICT vy); +void ggml_vec_dot_q8_0_q8_0(int n, float * GGML_RESTRICT s, const void * GGML_RESTRICT vx, const void * GGML_RESTRICT vy); -void ggml_vec_dot_q2_K_q8_K(int n, float * restrict s, const void * restrict vx, const void * restrict vy); -void ggml_vec_dot_q3_K_q8_K(int n, float * restrict s, const void * restrict vx, const void * restrict vy); -void ggml_vec_dot_q4_K_q8_K(int n, float * restrict s, const void * restrict vx, const void * restrict vy); -void ggml_vec_dot_q5_K_q8_K(int n, float * restrict s, const void * restrict vx, const void * restrict vy); -void ggml_vec_dot_q6_K_q8_K(int n, float * restrict s, const void * restrict vx, const void * restrict vy); -void ggml_vec_dot_iq2_xxs_q8_K(int n, float * restrict s, const void * restrict vx, const void * restrict vy); -void ggml_vec_dot_iq2_xs_q8_K (int n, float * restrict s, const void * restrict vx, const void * restrict vy); -void ggml_vec_dot_iq3_xxs_q8_K(int n, float * restrict s, const void * restrict vx, const void * restrict vy); +void ggml_vec_dot_q2_K_q8_K(int n, float * GGML_RESTRICT s, const void * GGML_RESTRICT vx, const void * GGML_RESTRICT vy); +void ggml_vec_dot_q3_K_q8_K(int n, float * GGML_RESTRICT s, const void * GGML_RESTRICT vx, const void * GGML_RESTRICT vy); +void ggml_vec_dot_q4_K_q8_K(int n, float * GGML_RESTRICT s, const void * GGML_RESTRICT vx, const void * GGML_RESTRICT vy); +void ggml_vec_dot_q5_K_q8_K(int n, float * GGML_RESTRICT s, const void * GGML_RESTRICT vx, const void * GGML_RESTRICT vy); +void ggml_vec_dot_q6_K_q8_K(int n, float * GGML_RESTRICT s, const void * GGML_RESTRICT vx, const void * GGML_RESTRICT vy); +void ggml_vec_dot_iq2_xxs_q8_K(int n, float * GGML_RESTRICT s, const void * GGML_RESTRICT vx, const void * GGML_RESTRICT vy); +void ggml_vec_dot_iq2_xs_q8_K (int n, float * GGML_RESTRICT s, const void * GGML_RESTRICT vx, const void * GGML_RESTRICT vy); +void ggml_vec_dot_iq3_xxs_q8_K(int n, float * GGML_RESTRICT s, const void * GGML_RESTRICT vx, const void * GGML_RESTRICT vy); // // Quantization utilizing an importance matrix (a.k.a. "Activation aWare Quantization") @@ -276,3 +280,8 @@ void iq2xs_init_impl(int grid_size); void iq2xs_free_impl(int grid_size); void iq3xs_init_impl(int grid_size); void iq3xs_free_impl(int grid_size); + +#ifdef __cplusplus +} +#endif + From 78b00dda6c0d62c34f5371d47718defff6ed2b22 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Johannes=20G=C3=A4=C3=9Fler?= Date: Mon, 5 Feb 2024 15:55:10 +0100 Subject: [PATCH 518/811] README: updated introduction (#5343) * README: updated introduction * readme : update --------- Co-authored-by: Georgi Gerganov --- README.md | 49 ++++++++++++++++++++++++++++++------------------- 1 file changed, 30 insertions(+), 19 deletions(-) diff --git a/README.md b/README.md index a6fe34629..bb6c49338 100644 --- a/README.md +++ b/README.md @@ -6,7 +6,7 @@ [Roadmap](https://github.com/users/ggerganov/projects/7) / [Project status](https://github.com/ggerganov/llama.cpp/discussions/3471) / [Manifesto](https://github.com/ggerganov/llama.cpp/discussions/205) / [ggml](https://github.com/ggerganov/ggml) -Inference of [LLaMA](https://arxiv.org/abs/2302.13971) model in pure C/C++ +Inference of Meta's [LLaMA](https://arxiv.org/abs/2302.13971) model (and others) in pure C/C++ ### Hot topics @@ -58,18 +58,20 @@ Inference of [LLaMA](https://arxiv.org/abs/2302.13971) model in pure C/C++ ## Description -The main goal of `llama.cpp` is to run the LLaMA model using 4-bit integer quantization on a MacBook +The main goal of `llama.cpp` is to enable LLM inference with minimal setup and state-of-the-art performance on a wide +variety of hardware - locally and in the cloud. -- Plain C/C++ implementation without dependencies -- Apple silicon first-class citizen - optimized via ARM NEON, Accelerate and Metal frameworks +- Plain C/C++ implementation without any dependencies +- Apple silicon is a first-class citizen - optimized via ARM NEON, Accelerate and Metal frameworks - AVX, AVX2 and AVX512 support for x86 architectures -- Mixed F16 / F32 precision -- 2-bit, 3-bit, 4-bit, 5-bit, 6-bit and 8-bit integer quantization support -- CUDA, Metal, OpenCL, SYCL GPU backend support +- 2-bit, 3-bit, 4-bit, 5-bit, 6-bit, and 8-bit integer quantization for faster inference and reduced memory use +- Custom CUDA kernels for running LLMs on NVIDIA GPUs (support for AMD GPUs via HIP) +- Vulkan, SYCL, and (partial) OpenCL backend support +- CPU+GPU hybrid inference to partially accelerate models larger than the total VRAM capacity -The original implementation of `llama.cpp` was [hacked in an evening](https://github.com/ggerganov/llama.cpp/issues/33#issuecomment-1465108022). -Since then, the project has improved significantly thanks to many contributions. This project is mainly for educational purposes and serves -as the main playground for developing new features for the [ggml](https://github.com/ggerganov/ggml) library. +Since its [inception](https://github.com/ggerganov/llama.cpp/issues/33#issuecomment-1465108022), the project has +improved significantly thanks to many contributions. It is the main playground for developing new features for the +[ggml](https://github.com/ggerganov/ggml) library. **Supported platforms:** @@ -77,11 +79,14 @@ as the main playground for developing new features for the [ggml](https://github - [X] Linux - [X] Windows (via CMake) - [X] Docker +- [X] FreeBSD **Supported models:** - [X] LLaMA 🦙 - [x] LLaMA 2 🦙🦙 +- [X] [Mistral AI v0.1](https://huggingface.co/mistralai/Mistral-7B-v0.1) +- [x] [Mixtral MoE](https://huggingface.co/models?search=mistral-ai/Mixtral) - [X] Falcon - [X] [Alpaca](https://github.com/ggerganov/llama.cpp#instruction-mode-with-alpaca) - [X] [GPT4All](https://github.com/ggerganov/llama.cpp#using-gpt4all) @@ -95,7 +100,6 @@ as the main playground for developing new features for the [ggml](https://github - [X] [Baichuan 1 & 2](https://huggingface.co/models?search=baichuan-inc/Baichuan) + [derivations](https://huggingface.co/hiyouga/baichuan-7b-sft) - [X] [Aquila 1 & 2](https://huggingface.co/models?search=BAAI/Aquila) - [X] [Starcoder models](https://github.com/ggerganov/llama.cpp/pull/3187) -- [X] [Mistral AI v0.1](https://huggingface.co/mistralai/Mistral-7B-v0.1) - [X] [Refact](https://huggingface.co/smallcloudai/Refact-1_6B-fim) - [X] [Persimmon 8B](https://github.com/ggerganov/llama.cpp/pull/3410) - [X] [MPT](https://github.com/ggerganov/llama.cpp/pull/3417) @@ -104,15 +108,14 @@ as the main playground for developing new features for the [ggml](https://github - [X] [StableLM-3b-4e1t](https://github.com/ggerganov/llama.cpp/pull/3586) - [x] [Deepseek models](https://huggingface.co/models?search=deepseek-ai/deepseek) - [x] [Qwen models](https://huggingface.co/models?search=Qwen/Qwen) -- [x] [Mixtral MoE](https://huggingface.co/models?search=mistral-ai/Mixtral) - [x] [PLaMo-13B](https://github.com/ggerganov/llama.cpp/pull/3557) - [x] [GPT-2](https://huggingface.co/gpt2) - [x] [CodeShell](https://github.com/WisdomShell/codeshell) **Multimodal models:** -- [x] [Llava 1.5 models](https://huggingface.co/collections/liuhaotian/llava-15-653aac15d994e992e2677a7e) -- [x] [Bakllava](https://huggingface.co/models?search=SkunkworksAI/Bakllava) +- [x] [LLaVA 1.5 models](https://huggingface.co/collections/liuhaotian/llava-15-653aac15d994e992e2677a7e) +- [x] [BakLLaVA](https://huggingface.co/models?search=SkunkworksAI/Bakllava) - [x] [Obsidian](https://huggingface.co/NousResearch/Obsidian-3B-V0.5) - [x] [ShareGPT4V](https://huggingface.co/models?search=Lin-Chen/ShareGPT4V) - [x] [MobileVLM 1.7B/3B models](https://huggingface.co/models?search=mobileVLM) @@ -137,14 +140,22 @@ as the main playground for developing new features for the [ggml](https://github **UI:** +Unless otherwise noted these projects are open-source with permissive licensing: + +- [iohub/collama](https://github.com/iohub/coLLaMA) +- [janhq/jan](https://github.com/janhq/jan) (AGPL) - [nat/openplayground](https://github.com/nat/openplayground) -- [oobabooga/text-generation-webui](https://github.com/oobabooga/text-generation-webui) -- [withcatai/catai](https://github.com/withcatai/catai) -- [semperai/amica](https://github.com/semperai/amica) +- [LMStudio](https://lmstudio.ai/) (proprietary) +- [LostRuins/koboldcpp](https://github.com/LostRuins/koboldcpp) (AGPL) +- [Mozilla-Ocho/llamafile](https://github.com/Mozilla-Ocho/llamafile) +- [nomic-ai/gpt4all](https://github.com/nomic-ai/gpt4all) +- [ollama/ollama](https://github.com/ollama/ollama) +- [oobabooga/text-generation-webui](https://github.com/oobabooga/text-generation-webui) (AGPL) - [psugihara/FreeChat](https://github.com/psugihara/FreeChat) - [ptsochantaris/emeltal](https://github.com/ptsochantaris/emeltal) -- [iohub/collama](https://github.com/iohub/coLLaMA) -- [pythops/tenere](https://github.com/pythops/tenere) +- [pythops/tenere](https://github.com/pythops/tenere) (AGPL) +- [semperai/amica](https://github.com/semperai/amica) +- [withcatai/catai](https://github.com/withcatai/catai) --- From 098f6d737b65134cf220d12b9b706e8cfc5e4610 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Johannes=20G=C3=A4=C3=9Fler?= Date: Mon, 5 Feb 2024 19:33:00 +0100 Subject: [PATCH 519/811] make: Use ccache for faster compilation (#5318) * make: Use ccache for faster compilation --- CMakeLists.txt | 4 +- Makefile | 169 ++++++++++++++++++++++++++++++++++--------------- 2 files changed, 121 insertions(+), 52 deletions(-) diff --git a/CMakeLists.txt b/CMakeLists.txt index 8c04e4c19..427015be5 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -809,9 +809,9 @@ if (LLAMA_CCACHE) if (LLAMA_CCACHE_FOUND) set_property(GLOBAL PROPERTY RULE_LAUNCH_COMPILE ccache) set(ENV{CCACHE_SLOPPINESS} time_macros) - message(STATUS "Using ccache") + message(STATUS "ccache found, compilation results will be cached. Disable with LLAMA_CCACHE=OFF.") else() - message(STATUS "Warning: ccache not found - consider installing it or use LLAMA_CCACHE=OFF") + message(STATUS "Warning: ccache not found - consider installing it for faster compilation or disable this warning with LLAMA_CCACHE=OFF") endif () endif() diff --git a/Makefile b/Makefile index 21d5e15ba..ba73f0637 100644 --- a/Makefile +++ b/Makefile @@ -112,6 +112,18 @@ MK_CXXFLAGS += -O3 MK_NVCCFLAGS += -O3 endif +ifndef LLAMA_NO_CCACHE +CCACHE := $(shell which ccache) +ifdef CCACHE +export CCACHE_SLOPPINESS = time_macros +$(info I ccache found, compilation results will be cached. Disable with LLAMA_NO_CCACHE.) +CC := $(CCACHE) $(CC) +CXX := $(CCACHE) $(CXX) +else +$(info I ccache not found. Consider installing it for faster compilation.) +endif # CCACHE +endif # LLAMA_NO_CCACHE + # clock_gettime came in POSIX.1b (1993) # CLOCK_MONOTONIC came in POSIX.1-2001 / SUSv3 as optional # posix_memalign came in POSIX.1-2001 / SUSv3 @@ -374,9 +386,9 @@ ifdef LLAMA_DEBUG MK_NVCCFLAGS += -lineinfo endif # LLAMA_DEBUG ifdef LLAMA_CUDA_NVCC - NVCC = $(LLAMA_CUDA_NVCC) + NVCC = $(CCACHE) $(LLAMA_CUDA_NVCC) else - NVCC = nvcc + NVCC = $(CCACHE) nvcc endif #LLAMA_CUDA_NVCC ifdef CUDA_DOCKER_ARCH MK_NVCCFLAGS += -Wno-deprecated-gpu-targets -arch=$(CUDA_DOCKER_ARCH) @@ -483,7 +495,7 @@ ifdef LLAMA_HIPBLAS ROCM_PATH ?= /opt/rocm GPU_TARGETS ?= $(shell $(ROCM_PATH)/llvm/bin/amdgpu-arch) endif - HIPCC ?= $(ROCM_PATH)/bin/hipcc + HIPCC ?= $(CCACHE) $(ROCM_PATH)/bin/hipcc LLAMA_CUDA_DMMV_X ?= 32 LLAMA_CUDA_MMV_Y ?= 1 LLAMA_CUDA_KQUANTS_ITER ?= 2 @@ -607,97 +619,135 @@ libllama.a: llama.o ggml.o $(OBJS) $(COMMON_DEPS) clean: rm -vrf *.o tests/*.o *.so *.a *.dll benchmark-matmult common/build-info.cpp *.dot $(COV_TARGETS) $(BUILD_TARGETS) $(TEST_TARGETS) + find examples pocs -type f -name "*.o" -delete # # Examples # +# $< is the first prerequisite, i.e. the source file. +# Explicitly compile this to an object file so that it can be cached with ccache. +# The source file is then filtered out from $^ (the list of all prerequisites) and the object file is added instead. + +# Helper function that replaces .c, .cpp, and .cu file endings with .o: +GET_OBJ_FILE = $(patsubst %.c,%.o,$(patsubst %.cpp,%.o,$(patsubst %.cu,%.o,$(1)))) + main: examples/main/main.cpp ggml.o llama.o $(COMMON_DEPS) console.o grammar-parser.o $(OBJS) - $(CXX) $(CXXFLAGS) $(filter-out %.h,$^) -o $@ $(LDFLAGS) + $(CXX) $(CXXFLAGS) -c $< -o $(call GET_OBJ_FILE, $<) + $(CXX) $(CXXFLAGS) $(filter-out %.h $<,$^) $(call GET_OBJ_FILE, $<) -o $@ $(LDFLAGS) @echo @echo '==== Run ./main -h for help. ====' @echo infill: examples/infill/infill.cpp ggml.o llama.o $(COMMON_DEPS) console.o grammar-parser.o $(OBJS) - $(CXX) $(CXXFLAGS) $(filter-out %.h,$^) -o $@ $(LDFLAGS) + $(CXX) $(CXXFLAGS) -c $< -o $(call GET_OBJ_FILE, $<) + $(CXX) $(CXXFLAGS) $(filter-out %.h $<,$^) $(call GET_OBJ_FILE, $<) -o $@ $(LDFLAGS) simple: examples/simple/simple.cpp ggml.o llama.o $(COMMON_DEPS) $(OBJS) - $(CXX) $(CXXFLAGS) $(filter-out %.h,$^) -o $@ $(LDFLAGS) + $(CXX) $(CXXFLAGS) -c $< -o $(call GET_OBJ_FILE, $<) + $(CXX) $(CXXFLAGS) $(filter-out %.h $<,$^) $(call GET_OBJ_FILE, $<) -o $@ $(LDFLAGS) tokenize: examples/tokenize/tokenize.cpp ggml.o llama.o $(COMMON_DEPS) $(OBJS) - $(CXX) $(CXXFLAGS) $(filter-out %.h,$^) -o $@ $(LDFLAGS) + $(CXX) $(CXXFLAGS) -c $< -o $(call GET_OBJ_FILE, $<) + $(CXX) $(CXXFLAGS) $(filter-out %.h $<,$^) $(call GET_OBJ_FILE, $<) -o $@ $(LDFLAGS) batched: examples/batched/batched.cpp ggml.o llama.o $(COMMON_DEPS) $(OBJS) - $(CXX) $(CXXFLAGS) $(filter-out %.h,$^) -o $@ $(LDFLAGS) + $(CXX) $(CXXFLAGS) -c $< -o $(call GET_OBJ_FILE, $<) + $(CXX) $(CXXFLAGS) $(filter-out %.h $<,$^) $(call GET_OBJ_FILE, $<) -o $@ $(LDFLAGS) batched-bench: examples/batched-bench/batched-bench.cpp build-info.o ggml.o llama.o common.o $(OBJS) - $(CXX) $(CXXFLAGS) $(filter-out %.h,$^) -o $@ $(LDFLAGS) + $(CXX) $(CXXFLAGS) -c $< -o $(call GET_OBJ_FILE, $<) + $(CXX) $(CXXFLAGS) $(filter-out %.h $<,$^) $(call GET_OBJ_FILE, $<) -o $@ $(LDFLAGS) quantize: examples/quantize/quantize.cpp build-info.o ggml.o llama.o $(OBJS) - $(CXX) $(CXXFLAGS) $(filter-out %.h,$^) -o $@ $(LDFLAGS) + $(CXX) $(CXXFLAGS) -c $< -o $(call GET_OBJ_FILE, $<) + $(CXX) $(CXXFLAGS) $(filter-out %.h $<,$^) $(call GET_OBJ_FILE, $<) -o $@ $(LDFLAGS) quantize-stats: examples/quantize-stats/quantize-stats.cpp build-info.o ggml.o llama.o $(OBJS) - $(CXX) $(CXXFLAGS) $(filter-out %.h,$^) -o $@ $(LDFLAGS) + $(CXX) $(CXXFLAGS) -c $< -o $(call GET_OBJ_FILE, $<) + $(CXX) $(CXXFLAGS) $(filter-out %.h $<,$^) $(call GET_OBJ_FILE, $<) -o $@ $(LDFLAGS) perplexity: examples/perplexity/perplexity.cpp ggml.o llama.o $(COMMON_DEPS) $(OBJS) - $(CXX) $(CXXFLAGS) $(filter-out %.h,$^) -o $@ $(LDFLAGS) + $(CXX) $(CXXFLAGS) -c $< -o $(call GET_OBJ_FILE, $<) + $(CXX) $(CXXFLAGS) $(filter-out %.h $<,$^) $(call GET_OBJ_FILE, $<) -o $@ $(LDFLAGS) imatrix: examples/imatrix/imatrix.cpp ggml.o llama.o $(COMMON_DEPS) $(OBJS) - $(CXX) $(CXXFLAGS) $(filter-out %.h,$^) -o $@ $(LDFLAGS) + $(CXX) $(CXXFLAGS) -c $< -o $(call GET_OBJ_FILE, $<) + $(CXX) $(CXXFLAGS) $(filter-out %.h $<,$^) $(call GET_OBJ_FILE, $<) -o $@ $(LDFLAGS) embedding: examples/embedding/embedding.cpp ggml.o llama.o $(COMMON_DEPS) $(OBJS) - $(CXX) $(CXXFLAGS) $(filter-out %.h,$^) -o $@ $(LDFLAGS) + $(CXX) $(CXXFLAGS) -c $< -o $(call GET_OBJ_FILE, $<) + $(CXX) $(CXXFLAGS) $(filter-out %.h $<,$^) $(call GET_OBJ_FILE, $<) -o $@ $(LDFLAGS) save-load-state: examples/save-load-state/save-load-state.cpp ggml.o llama.o $(COMMON_DEPS) $(OBJS) - $(CXX) $(CXXFLAGS) $(filter-out %.h,$^) -o $@ $(LDFLAGS) + $(CXX) $(CXXFLAGS) -c $< -o $(call GET_OBJ_FILE, $<) + $(CXX) $(CXXFLAGS) $(filter-out %.h $<,$^) $(call GET_OBJ_FILE, $<) -o $@ $(LDFLAGS) server: examples/server/server.cpp examples/server/oai.hpp examples/server/utils.hpp examples/server/httplib.h examples/server/json.hpp examples/server/index.html.hpp examples/server/index.js.hpp examples/server/completion.js.hpp examples/llava/clip.cpp examples/llava/clip.h common/stb_image.h ggml.o llama.o $(COMMON_DEPS) grammar-parser.o $(OBJS) - $(CXX) $(CXXFLAGS) -Iexamples/server $(filter-out %.h,$(filter-out %.hpp,$^)) -o $@ $(LDFLAGS) $(LWINSOCK2) -Wno-cast-qual + $(CXX) $(CXXFLAGS) -c $< -o $(call GET_OBJ_FILE, $<) + $(CXX) $(CXXFLAGS) -c examples/llava/clip.cpp -o $(call GET_OBJ_FILE, examples/llava/clip.cpp) -Wno-cast-qual + $(CXX) $(CXXFLAGS) -Iexamples/server $(filter-out %.h %.hpp $< examples/llava/clip.cpp,$^) $(call GET_OBJ_FILE, $<) $(call GET_OBJ_FILE, examples/llava/clip.cpp) -o $@ $(LDFLAGS) $(LWINSOCK2) gguf: examples/gguf/gguf.cpp ggml.o $(OBJS) - $(CXX) $(CXXFLAGS) $(filter-out %.h,$^) -o $@ $(LDFLAGS) + $(CXX) $(CXXFLAGS) -c $< -o $(call GET_OBJ_FILE, $<) + $(CXX) $(CXXFLAGS) $(filter-out %.h $<,$^) $(call GET_OBJ_FILE, $<) -o $@ $(LDFLAGS) train-text-from-scratch: examples/train-text-from-scratch/train-text-from-scratch.cpp ggml.o llama.o $(COMMON_DEPS) train.o $(OBJS) - $(CXX) $(CXXFLAGS) $(filter-out %.h,$^) -o $@ $(LDFLAGS) + $(CXX) $(CXXFLAGS) -c $< -o $(call GET_OBJ_FILE, $<) + $(CXX) $(CXXFLAGS) $(filter-out %.h $<,$^) $(call GET_OBJ_FILE, $<) -o $@ $(LDFLAGS) convert-llama2c-to-ggml: examples/convert-llama2c-to-ggml/convert-llama2c-to-ggml.cpp ggml.o llama.o $(OBJS) - $(CXX) $(CXXFLAGS) $(filter-out %.h,$^) -o $@ $(LDFLAGS) + $(CXX) $(CXXFLAGS) -c $< -o $(call GET_OBJ_FILE, $<) + $(CXX) $(CXXFLAGS) $(filter-out %.h $<,$^) $(call GET_OBJ_FILE, $<) -o $@ $(LDFLAGS) llama-bench: examples/llama-bench/llama-bench.cpp ggml.o llama.o $(COMMON_DEPS) $(OBJS) - $(CXX) $(CXXFLAGS) $(filter-out %.h,$^) -o $@ $(LDFLAGS) + $(CXX) $(CXXFLAGS) -c $< -o $(call GET_OBJ_FILE, $<) + $(CXX) $(CXXFLAGS) $(filter-out %.h $<,$^) $(call GET_OBJ_FILE, $<) -o $@ $(LDFLAGS) libllava.a: examples/llava/llava.cpp examples/llava/llava.h examples/llava/clip.cpp examples/llava/clip.h common/stb_image.h common/base64.hpp ggml.o llama.o $(COMMON_DEPS) $(OBJS) $(CXX) $(CXXFLAGS) -static -fPIC -c $< -o $@ -Wno-cast-qual llava-cli: examples/llava/llava-cli.cpp examples/llava/clip.h examples/llava/clip.cpp examples/llava/llava.h examples/llava/llava.cpp ggml.o llama.o $(COMMON_DEPS) $(OBJS) - $(CXX) $(CXXFLAGS) $(filter-out %.h,$^) -o $@ $(LDFLAGS) -Wno-cast-qual + $(CXX) $(CXXFLAGS) -c $< -o $(call GET_OBJ_FILE, $<) + $(CXX) $(CXXFLAGS) -c examples/llava/clip.cpp -o $(call GET_OBJ_FILE, examples/llava/clip.cpp) -Wno-cast-qual + $(CXX) $(CXXFLAGS) -c examples/llava/llava.cpp -o $(call GET_OBJ_FILE, examples/llava/llava.cpp) + $(CXX) $(CXXFLAGS) $(filter-out %.h $< examples/llava/clip.cpp examples/llava/llava.cpp,$^) $(call GET_OBJ_FILE, $<) $(call GET_OBJ_FILE, examples/llava/clip.cpp) $(call GET_OBJ_FILE, examples/llava/llava.cpp) -o $@ $(LDFLAGS) baby-llama: examples/baby-llama/baby-llama.cpp ggml.o llama.o $(COMMON_DEPS) train.o $(OBJS) - $(CXX) $(CXXFLAGS) $(filter-out %.h,$^) -o $@ $(LDFLAGS) + $(CXX) $(CXXFLAGS) -c $< -o $(call GET_OBJ_FILE, $<) + $(CXX) $(CXXFLAGS) $(filter-out %.h $<,$^) $(call GET_OBJ_FILE, $<) -o $@ $(LDFLAGS) beam-search: examples/beam-search/beam-search.cpp ggml.o llama.o $(COMMON_DEPS) $(OBJS) - $(CXX) $(CXXFLAGS) $(filter-out %.h,$^) -o $@ $(LDFLAGS) + $(CXX) $(CXXFLAGS) -c $< -o $(call GET_OBJ_FILE, $<) + $(CXX) $(CXXFLAGS) $(filter-out %.h $<,$^) $(call GET_OBJ_FILE, $<) -o $@ $(LDFLAGS) finetune: examples/finetune/finetune.cpp ggml.o llama.o $(COMMON_DEPS) train.o $(OBJS) - $(CXX) $(CXXFLAGS) $(filter-out %.h,$^) -o $@ $(LDFLAGS) + $(CXX) $(CXXFLAGS) -c $< -o $(call GET_OBJ_FILE, $<) + $(CXX) $(CXXFLAGS) $(filter-out %.h $<,$^) $(call GET_OBJ_FILE, $<) -o $@ $(LDFLAGS) export-lora: examples/export-lora/export-lora.cpp ggml.o common/common.h $(OBJS) - $(CXX) $(CXXFLAGS) $(filter-out %.h,$^) -o $@ $(LDFLAGS) + $(CXX) $(CXXFLAGS) -c $< -o $(call GET_OBJ_FILE, $<) + $(CXX) $(CXXFLAGS) $(filter-out %.h $<,$^) $(call GET_OBJ_FILE, $<) -o $@ $(LDFLAGS) speculative: examples/speculative/speculative.cpp ggml.o llama.o $(COMMON_DEPS) grammar-parser.o $(OBJS) - $(CXX) $(CXXFLAGS) $(filter-out %.h,$^) -o $@ $(LDFLAGS) + $(CXX) $(CXXFLAGS) -c $< -o $(call GET_OBJ_FILE, $<) + $(CXX) $(CXXFLAGS) $(filter-out %.h $<,$^) $(call GET_OBJ_FILE, $<) -o $@ $(LDFLAGS) parallel: examples/parallel/parallel.cpp ggml.o llama.o $(COMMON_DEPS) $(OBJS) - $(CXX) $(CXXFLAGS) $(filter-out %.h,$^) -o $@ $(LDFLAGS) + $(CXX) $(CXXFLAGS) -c $< -o $(call GET_OBJ_FILE, $<) + $(CXX) $(CXXFLAGS) $(filter-out %.h $<,$^) $(call GET_OBJ_FILE, $<) -o $@ $(LDFLAGS) lookahead: examples/lookahead/lookahead.cpp ggml.o llama.o $(COMMON_DEPS) $(OBJS) - $(CXX) $(CXXFLAGS) $(filter-out %.h,$^) -o $@ $(LDFLAGS) + $(CXX) $(CXXFLAGS) -c $< -o $(call GET_OBJ_FILE, $<) + $(CXX) $(CXXFLAGS) $(filter-out %.h $<,$^) $(call GET_OBJ_FILE, $<) -o $@ $(LDFLAGS) lookup: examples/lookup/lookup.cpp ggml.o llama.o $(COMMON_DEPS) $(OBJS) - $(CXX) $(CXXFLAGS) $(filter-out %.h,$^) -o $@ $(LDFLAGS) + $(CXX) $(CXXFLAGS) -c $< -o $(call GET_OBJ_FILE, $<) + $(CXX) $(CXXFLAGS) $(filter-out %.h $<,$^) $(call GET_OBJ_FILE, $<) -o $@ $(LDFLAGS) passkey: examples/passkey/passkey.cpp ggml.o llama.o $(COMMON_DEPS) $(OBJS) - $(CXX) $(CXXFLAGS) $(filter-out %.h,$^) -o $@ $(LDFLAGS) + $(CXX) $(CXXFLAGS) -c $< -o $(call GET_OBJ_FILE, $<) + $(CXX) $(CXXFLAGS) $(filter-out %.h $<,$^) $(call GET_OBJ_FILE, $<) -o $@ $(LDFLAGS) ifeq ($(UNAME_S),Darwin) swift: examples/batched.swift @@ -705,7 +755,7 @@ swift: examples/batched.swift endif common/build-info.cpp: $(wildcard .git/index) scripts/build-info.sh - @sh scripts/build-info.sh $(CC) > $@.tmp + @sh scripts/build-info.sh "$(CC)" > $@.tmp @if ! cmp -s $@.tmp $@; then \ mv $@.tmp $@; \ else \ @@ -722,7 +772,8 @@ build-info.o: common/build-info.cpp tests: $(TEST_TARGETS) benchmark-matmult: examples/benchmark/benchmark-matmult.cpp build-info.o ggml.o $(OBJS) - $(CXX) $(CXXFLAGS) $(filter-out %.h,$^) -o $@ $(LDFLAGS) + $(CXX) $(CXXFLAGS) -c $< -o $(call GET_OBJ_FILE, $<) + $(CXX) $(CXXFLAGS) $(filter-out %.h $<,$^) $(call GET_OBJ_FILE, $<) -o $@ $(LDFLAGS) run-benchmark-matmult: benchmark-matmult ./$@ @@ -730,58 +781,76 @@ run-benchmark-matmult: benchmark-matmult .PHONY: run-benchmark-matmult swift vdot: pocs/vdot/vdot.cpp ggml.o $(OBJS) - $(CXX) $(CXXFLAGS) $^ -o $@ $(LDFLAGS) + $(CXX) $(CXXFLAGS) -c $< -o $(call GET_OBJ_FILE, $<) + $(CXX) $(CXXFLAGS) $(filter-out $<,$^) $(call GET_OBJ_FILE, $<) -o $@ $(LDFLAGS) q8dot: pocs/vdot/q8dot.cpp ggml.o $(OBJS) - $(CXX) $(CXXFLAGS) $^ -o $@ $(LDFLAGS) + $(CXX) $(CXXFLAGS) -c $< -o $(call GET_OBJ_FILE, $<) + $(CXX) $(CXXFLAGS) $(filter-out $<,$^) $(call GET_OBJ_FILE, $<) -o $@ $(LDFLAGS) tests/test-llama-grammar: tests/test-llama-grammar.cpp ggml.o grammar-parser.o $(OBJS) - $(CXX) $(CXXFLAGS) $(filter-out %.h,$^) -o $@ $(LDFLAGS) + $(CXX) $(CXXFLAGS) -c $< -o $(call GET_OBJ_FILE, $<) + $(CXX) $(CXXFLAGS) $(filter-out %.h $<,$^) $(call GET_OBJ_FILE, $<) -o $@ $(LDFLAGS) tests/test-grammar-parser: tests/test-grammar-parser.cpp ggml.o llama.o grammar-parser.o $(OBJS) - $(CXX) $(CXXFLAGS) $(filter-out %.h,$^) -o $@ $(LDFLAGS) + $(CXX) $(CXXFLAGS) -c $< -o $(call GET_OBJ_FILE, $<) + $(CXX) $(CXXFLAGS) $(filter-out %.h $<,$^) $(call GET_OBJ_FILE, $<) -o $@ $(LDFLAGS) tests/test-double-float: tests/test-double-float.cpp ggml.o $(OBJS) - $(CXX) $(CXXFLAGS) $(filter-out %.h,$^) -o $@ $(LDFLAGS) + $(CXX) $(CXXFLAGS) -c $< -o $(call GET_OBJ_FILE, $<) + $(CXX) $(CXXFLAGS) $(filter-out %.h $<,$^) $(call GET_OBJ_FILE, $<) -o $@ $(LDFLAGS) tests/test-grad0: tests/test-grad0.cpp ggml.o $(OBJS) - $(CXX) $(CXXFLAGS) $(filter-out %.h,$^) -o $@ $(LDFLAGS) + $(CXX) $(CXXFLAGS) -c $< -o $(call GET_OBJ_FILE, $<) + $(CXX) $(CXXFLAGS) $(filter-out %.h $<,$^) $(call GET_OBJ_FILE, $<) -o $@ $(LDFLAGS) tests/test-opt: tests/test-opt.cpp ggml.o $(OBJS) - $(CXX) $(CXXFLAGS) $(filter-out %.h,$^) -o $@ $(LDFLAGS) + $(CXX) $(CXXFLAGS) -c $< -o $(call GET_OBJ_FILE, $<) + $(CXX) $(CXXFLAGS) $(filter-out %.h $<,$^) $(call GET_OBJ_FILE, $<) -o $@ $(LDFLAGS) tests/test-quantize-fns: tests/test-quantize-fns.cpp ggml.o $(OBJS) - $(CXX) $(CXXFLAGS) $(filter-out %.h,$^) -o $@ $(LDFLAGS) + $(CXX) $(CXXFLAGS) -c $< -o $(call GET_OBJ_FILE, $<) + $(CXX) $(CXXFLAGS) $(filter-out %.h $<,$^) $(call GET_OBJ_FILE, $<) -o $@ $(LDFLAGS) tests/test-quantize-perf: tests/test-quantize-perf.cpp ggml.o $(OBJS) - $(CXX) $(CXXFLAGS) $(filter-out %.h,$^) -o $@ $(LDFLAGS) + $(CXX) $(CXXFLAGS) -c $< -o $(call GET_OBJ_FILE, $<) + $(CXX) $(CXXFLAGS) $(filter-out %.h $<,$^) $(call GET_OBJ_FILE, $<) -o $@ $(LDFLAGS) tests/test-sampling: tests/test-sampling.cpp ggml.o llama.o $(OBJS) - $(CXX) $(CXXFLAGS) $(filter-out %.h,$^) -o $@ $(LDFLAGS) + $(CXX) $(CXXFLAGS) -c $< -o $(call GET_OBJ_FILE, $<) + $(CXX) $(CXXFLAGS) $(filter-out %.h $<,$^) $(call GET_OBJ_FILE, $<) -o $@ $(LDFLAGS) tests/test-tokenizer-0-falcon: tests/test-tokenizer-0-falcon.cpp ggml.o llama.o $(COMMON_DEPS) console.o $(OBJS) - $(CXX) $(CXXFLAGS) $(filter-out %.h,$^) -o $@ $(LDFLAGS) + $(CXX) $(CXXFLAGS) -c $< -o $(call GET_OBJ_FILE, $<) + $(CXX) $(CXXFLAGS) $(filter-out %.h $<,$^) $(call GET_OBJ_FILE, $<) -o $@ $(LDFLAGS) tests/test-tokenizer-0-llama: tests/test-tokenizer-0-llama.cpp ggml.o llama.o $(COMMON_DEPS) console.o $(OBJS) - $(CXX) $(CXXFLAGS) $(filter-out %.h,$^) -o $@ $(LDFLAGS) + $(CXX) $(CXXFLAGS) -c $< -o $(call GET_OBJ_FILE, $<) + $(CXX) $(CXXFLAGS) $(filter-out %.h $<,$^) $(call GET_OBJ_FILE, $<) -o $@ $(LDFLAGS) tests/test-tokenizer-1-bpe: tests/test-tokenizer-1-bpe.cpp ggml.o llama.o $(COMMON_DEPS) console.o $(OBJS) - $(CXX) $(CXXFLAGS) $(filter-out %.h,$^) -o $@ $(LDFLAGS) + $(CXX) $(CXXFLAGS) -c $< -o $(call GET_OBJ_FILE, $<) + $(CXX) $(CXXFLAGS) $(filter-out %.h $<,$^) $(call GET_OBJ_FILE, $<) -o $@ $(LDFLAGS) tests/test-tokenizer-1-llama: tests/test-tokenizer-1-llama.cpp ggml.o llama.o $(COMMON_DEPS) console.o $(OBJS) - $(CXX) $(CXXFLAGS) $(filter-out %.h,$^) -o $@ $(LDFLAGS) + $(CXX) $(CXXFLAGS) -c $< -o $(call GET_OBJ_FILE, $<) + $(CXX) $(CXXFLAGS) $(filter-out %.h $<,$^) $(call GET_OBJ_FILE, $<) -o $@ $(LDFLAGS) tests/test-rope: tests/test-rope.cpp ggml.o $(OBJS) - $(CXX) $(CXXFLAGS) $(filter-out %.h,$^) -o $@ $(LDFLAGS) + $(CXX) $(CXXFLAGS) -c $< -o $(call GET_OBJ_FILE, $<) + $(CXX) $(CXXFLAGS) $(filter-out %.h $<,$^) $(call GET_OBJ_FILE, $<) -o $@ $(LDFLAGS) tests/test-c.o: tests/test-c.c llama.h $(CC) $(CFLAGS) -c $(filter-out %.h,$^) -o $@ tests/test-backend-ops: tests/test-backend-ops.cpp ggml.o $(OBJS) - $(CXX) $(CXXFLAGS) $(filter-out %.h,$^) -o $@ $(LDFLAGS) + $(CXX) $(CXXFLAGS) -c $< -o $(call GET_OBJ_FILE, $<) + $(CXX) $(CXXFLAGS) $(filter-out %.h $<,$^) $(call GET_OBJ_FILE, $<) -o $@ $(LDFLAGS) tests/test-model-load-cancel: tests/test-model-load-cancel.cpp ggml.o llama.o tests/get-model.cpp $(COMMON_DEPS) $(OBJS) - $(CXX) $(CXXFLAGS) $(filter-out %.h,$^) -o $@ $(LDFLAGS) + $(CXX) $(CXXFLAGS) -c $< -o $(call GET_OBJ_FILE, $<) + $(CXX) $(CXXFLAGS) $(filter-out %.h $<,$^) $(call GET_OBJ_FILE, $<) -o $@ $(LDFLAGS) tests/test-autorelease: tests/test-autorelease.cpp ggml.o llama.o tests/get-model.cpp $(COMMON_DEPS) $(OBJS) - $(CXX) $(CXXFLAGS) $(filter-out %.h,$^) -o $@ $(LDFLAGS) + $(CXX) $(CXXFLAGS) -c $< -o $(call GET_OBJ_FILE, $<) + $(CXX) $(CXXFLAGS) $(filter-out %.h $<,$^) $(call GET_OBJ_FILE, $<) -o $@ $(LDFLAGS) From 906cff55c2848fda091d888a1585915ec0c9ea9e Mon Sep 17 00:00:00 2001 From: Georgi Gerganov Date: Tue, 6 Feb 2024 07:47:22 +0200 Subject: [PATCH 520/811] py : handle byte tokens in `get_token_type` (#5341) * py : handle byte tokens in `get_token_type` * py : fix empty bytes arg --- convert.py | 10 +++++++--- 1 file changed, 7 insertions(+), 3 deletions(-) diff --git a/convert.py b/convert.py index 75c100118..4a2847a27 100755 --- a/convert.py +++ b/convert.py @@ -515,10 +515,14 @@ class HfVocab: # Yield token text, score, and type yield token_text, self.get_token_score(token_id), self.get_token_type( - token_id, self.special_ids # Reuse already stored special IDs + token_id, token_text, self.special_ids # Reuse already stored special IDs ) - def get_token_type(self, token_id: int, special_ids: set[int]) -> gguf.TokenType: + def get_token_type(self, token_id: int, token_text: bytes, special_ids: set[int]) -> gguf.TokenType: + # Special case for byte tokens + if re.fullmatch(br"<0x[0-9A-Fa-f]{2}>", token_text): + return gguf.TokenType.BYTE + # Determine token type based on whether it's a special token return gguf.TokenType.CONTROL if token_id in special_ids else gguf.TokenType.NORMAL @@ -530,7 +534,7 @@ class HfVocab: def added_tokens(self) -> Iterable[tuple[bytes, float, gguf.TokenType]]: for text in self.added_tokens_list: if text in self.specials: - toktype = self.get_token_type(self.specials[text], self.special_ids) + toktype = self.get_token_type(self.specials[text], b'', self.special_ids) score = self.get_token_score(self.specials[text]) else: toktype = gguf.TokenType.USER_DEFINED From 4ffc7a17d4e80c5f3f905139cb570ed9b6934fcb Mon Sep 17 00:00:00 2001 From: Niall Coates <1349685+Niall-@users.noreply.github.com> Date: Tue, 6 Feb 2024 08:16:23 +0000 Subject: [PATCH 521/811] server : various fixes for the prompt field in /completion (#5300) server : fix deadlock when prompt array contains strings and numbers server : removed an unnecessary generation when generating multi-prompts server : removed an unnecessary assert --- examples/server/server.cpp | 34 +++++++++++++++++++++++++++------- 1 file changed, 27 insertions(+), 7 deletions(-) diff --git a/examples/server/server.cpp b/examples/server/server.cpp index 8000fee5c..fc7e723a1 100644 --- a/examples/server/server.cpp +++ b/examples/server/server.cpp @@ -1163,13 +1163,30 @@ struct llama_server_context task.multitask_id = multitask_id; // when a completion task's prompt array is not a singleton, we split it into multiple requests - if (task.data.count("prompt") && task.data.at("prompt").size() > 1) - { - split_multiprompt_task(task_id, task); - } - // otherwise, it's a single-prompt task, we actually queue it - queue_tasks.post(task); + // if there's numbers in the prompt array it will be treated as an array of tokens + if (task.data.count("prompt") != 0 && task.data.at("prompt").size() > 1) { + bool numbers = false; + for (const auto& e : task.data.at("prompt")) { + if (e.is_number()) { + numbers = true; + break; + } + } + + // NOTE: split_multiprompt_task() does not handle a mix of strings and numbers, + // it will completely stall the server. I don't know where the bug for this is. + // + // if there are numbers, it needs to be treated like a single prompt, + // queue_tasks handles a mix of strings and numbers just fine. + if (numbers) { + queue_tasks.post(task); + } else { + split_multiprompt_task(task_id, task); + } + } else { + queue_tasks.post(task); + } } // for multiple images processing @@ -1251,7 +1268,10 @@ struct llama_server_context void split_multiprompt_task(int multitask_id, task_server& multiprompt_task) { int prompt_count = multiprompt_task.data.at("prompt").size(); - assert(prompt_count > 1); + if (prompt_count <= 1) { + send_error(multiprompt_task, "error while handling multiple prompts"); + return; + } // generate all the ID for subtask std::vector subtask_ids(prompt_count); From 31e790322133a4b1d0684527ea446e765e8a96cf Mon Sep 17 00:00:00 2001 From: Michael Coppola Date: Tue, 6 Feb 2024 04:20:00 -0500 Subject: [PATCH 522/811] server : add `dynatemp_range` and `dynatemp_exponent` (#5352) * server: added `dynatemp_range` and `dynatemp_exponent` * Update README.md --------- Co-authored-by: Michael Coppola --- examples/server/README.md | 4 ++++ examples/server/server.cpp | 46 +++++++++++++++++++++----------------- 2 files changed, 29 insertions(+), 21 deletions(-) diff --git a/examples/server/README.md b/examples/server/README.md index d8e7c313e..46d8f85ae 100644 --- a/examples/server/README.md +++ b/examples/server/README.md @@ -137,6 +137,10 @@ node index.js `temperature`: Adjust the randomness of the generated text (default: 0.8). + `dynatemp_range`: Dynamic temperature range (default: 0.0, 0.0 = disabled). + + `dynatemp_exponent`: Dynamic temperature exponent (default: 1.0). + `top_k`: Limit the next token selection to the K most probable tokens (default: 40). `top_p`: Limit the next token selection to a subset of tokens with a cumulative probability above a threshold P (default: 0.95). diff --git a/examples/server/server.cpp b/examples/server/server.cpp index fc7e723a1..e48a1da75 100644 --- a/examples/server/server.cpp +++ b/examples/server/server.cpp @@ -524,27 +524,29 @@ struct llama_server_context slot->oaicompat_model = ""; } - slot->params.stream = json_value(data, "stream", false); - slot->params.cache_prompt = json_value(data, "cache_prompt", false); - slot->params.n_predict = json_value(data, "n_predict", default_params.n_predict); - slot->sparams.top_k = json_value(data, "top_k", default_sparams.top_k); - slot->sparams.top_p = json_value(data, "top_p", default_sparams.top_p); - slot->sparams.min_p = json_value(data, "min_p", default_sparams.min_p); - slot->sparams.tfs_z = json_value(data, "tfs_z", default_sparams.tfs_z); - slot->sparams.typical_p = json_value(data, "typical_p", default_sparams.typical_p); - slot->sparams.temp = json_value(data, "temperature", default_sparams.temp); - slot->sparams.penalty_last_n = json_value(data, "repeat_last_n", default_sparams.penalty_last_n); - slot->sparams.penalty_repeat = json_value(data, "repeat_penalty", default_sparams.penalty_repeat); - slot->sparams.penalty_freq = json_value(data, "frequency_penalty", default_sparams.penalty_freq); - slot->sparams.penalty_present = json_value(data, "presence_penalty", default_sparams.penalty_present); - slot->sparams.mirostat = json_value(data, "mirostat", default_sparams.mirostat); - slot->sparams.mirostat_tau = json_value(data, "mirostat_tau", default_sparams.mirostat_tau); - slot->sparams.mirostat_eta = json_value(data, "mirostat_eta", default_sparams.mirostat_eta); - slot->sparams.penalize_nl = json_value(data, "penalize_nl", default_sparams.penalize_nl); - slot->params.n_keep = json_value(data, "n_keep", slot->params.n_keep); - slot->params.seed = json_value(data, "seed", default_params.seed); - slot->sparams.grammar = json_value(data, "grammar", default_sparams.grammar); - slot->sparams.n_probs = json_value(data, "n_probs", default_sparams.n_probs); + slot->params.stream = json_value(data, "stream", false); + slot->params.cache_prompt = json_value(data, "cache_prompt", false); + slot->params.n_predict = json_value(data, "n_predict", default_params.n_predict); + slot->sparams.top_k = json_value(data, "top_k", default_sparams.top_k); + slot->sparams.top_p = json_value(data, "top_p", default_sparams.top_p); + slot->sparams.min_p = json_value(data, "min_p", default_sparams.min_p); + slot->sparams.tfs_z = json_value(data, "tfs_z", default_sparams.tfs_z); + slot->sparams.typical_p = json_value(data, "typical_p", default_sparams.typical_p); + slot->sparams.temp = json_value(data, "temperature", default_sparams.temp); + slot->sparams.dynatemp_range = json_value(data, "dynatemp_range", default_sparams.dynatemp_range); + slot->sparams.dynatemp_exponent = json_value(data, "dynatemp_exponent", default_sparams.dynatemp_exponent); + slot->sparams.penalty_last_n = json_value(data, "repeat_last_n", default_sparams.penalty_last_n); + slot->sparams.penalty_repeat = json_value(data, "repeat_penalty", default_sparams.penalty_repeat); + slot->sparams.penalty_freq = json_value(data, "frequency_penalty", default_sparams.penalty_freq); + slot->sparams.penalty_present = json_value(data, "presence_penalty", default_sparams.penalty_present); + slot->sparams.mirostat = json_value(data, "mirostat", default_sparams.mirostat); + slot->sparams.mirostat_tau = json_value(data, "mirostat_tau", default_sparams.mirostat_tau); + slot->sparams.mirostat_eta = json_value(data, "mirostat_eta", default_sparams.mirostat_eta); + slot->sparams.penalize_nl = json_value(data, "penalize_nl", default_sparams.penalize_nl); + slot->params.n_keep = json_value(data, "n_keep", slot->params.n_keep); + slot->params.seed = json_value(data, "seed", default_params.seed); + slot->sparams.grammar = json_value(data, "grammar", default_sparams.grammar); + slot->sparams.n_probs = json_value(data, "n_probs", default_sparams.n_probs); // infill if (data.count("input_prefix") != 0) @@ -1002,6 +1004,8 @@ struct llama_server_context {"model", params.model_alias}, {"seed", slot.params.seed}, {"temperature", slot.sparams.temp}, + {"dynatemp_range", slot.sparams.dynatemp_range}, + {"dynatemp_exponent", slot.sparams.dynatemp_exponent}, {"top_k", slot.sparams.top_k}, {"top_p", slot.sparams.top_p}, {"min_p", slot.sparams.min_p}, From 8a79c591de9b7ff3242a94f68b7fb5a17ed8c2be Mon Sep 17 00:00:00 2001 From: Justin Parker Date: Tue, 6 Feb 2024 04:20:59 -0500 Subject: [PATCH 523/811] server : include total "num_slots" in props endpoint (#5349) --- examples/server/server.cpp | 1 + 1 file changed, 1 insertion(+) diff --git a/examples/server/server.cpp b/examples/server/server.cpp index e48a1da75..d86d7e04a 100644 --- a/examples/server/server.cpp +++ b/examples/server/server.cpp @@ -432,6 +432,7 @@ struct llama_server_context } default_generation_settings_for_props = get_formated_generation(slots.front()); + default_generation_settings_for_props["num_slots"] = params.n_parallel; default_generation_settings_for_props["seed"] = -1; batch = llama_batch_init(n_ctx, 0, params.n_parallel); From 2c516611f1d0f1e5e9754f8ea1cf97cb1b17bf2c Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Johannes=20G=C3=A4=C3=9Fler?= Date: Tue, 6 Feb 2024 14:44:06 +0100 Subject: [PATCH 524/811] CUDA: mul_mat_vec_q for batch sizes > 1 (#5351) --- ggml-cuda.cu | 240 +++++++++++++++++++++------------------------------ 1 file changed, 98 insertions(+), 142 deletions(-) diff --git a/ggml-cuda.cu b/ggml-cuda.cu index 3242a0b4a..95161b3f4 100644 --- a/ggml-cuda.cu +++ b/ggml-cuda.cu @@ -5310,41 +5310,50 @@ template static __global__ void #endif // __CUDA_ARCH__ >= CC_VOLTA } -template -static __global__ void mul_mat_vec_q(const void * __restrict__ vx, const void * __restrict__ vy, float * __restrict__ dst, const int ncols, const int nrows) { +template +static __global__ void mul_mat_vec_q( + const void * __restrict__ vx, const void * __restrict__ vy, float * __restrict__ dst, + const int ncols_x, const int nrows_x, const int nrows_y, const int ncols_y_par) { + + const int ncols_y = ncols_y_template != 0 ? ncols_y_template : ncols_y_par; + const int row = blockIdx.x*blockDim.y + threadIdx.y; - if (row >= nrows) { + if (row >= nrows_x) { return; } - const int blocks_per_row = ncols / qk; + const int blocks_per_row_x = ncols_x / qk; + const int blocks_per_col_y = nrows_y / QK8_1; const int blocks_per_warp = vdr * WARP_SIZE / qi; // partial sum for each thread - float tmp = 0.0f; + float tmp[ncols_y_template != 0 ? ncols_y_template : 8] = {0.0f}; const block_q_t * x = (const block_q_t *) vx; const block_q8_1 * y = (const block_q8_1 *) vy; - for (int i = threadIdx.x / (qi/vdr); i < blocks_per_row; i += blocks_per_warp) { - const int ibx = row*blocks_per_row + i; // x block index + for (int i = threadIdx.x / (qi/vdr); i < blocks_per_row_x; i += blocks_per_warp) { + const int ibx = row*blocks_per_row_x + i; // x block index const int iby = i * (qk/QK8_1); // y block index that aligns with ibx const int iqs = vdr * (threadIdx.x % (qi/vdr)); // x block quant index when casting the quants to int - tmp += vec_dot_q_cuda(&x[ibx], &y[iby], iqs); +#pragma unroll + for (int j = 0; j < ncols_y; ++j) { + tmp[j] += vec_dot_q_cuda(&x[ibx], &y[j*blocks_per_col_y + iby], iqs); + } } // sum up partial sums and write back result #pragma unroll - for (int mask = 16; mask > 0; mask >>= 1) { - tmp += __shfl_xor_sync(0xffffffff, tmp, mask, 32); - } + for (int j = 0; j < ncols_y; ++j) { + tmp[j] = warp_reduce_sum(tmp[j]); - if (threadIdx.x == 0) { - dst[row] = tmp; + if (threadIdx.x == 0) { + dst[j*nrows_x + row] = tmp[j]; + } } } @@ -6816,121 +6825,56 @@ static void convert_mul_mat_vec_f16_cuda(const void * vx, const dfloat * y, floa <<>>(vx, y, dst, ncols, nrows); } -static void mul_mat_vec_q4_0_q8_1_cuda(const void * vx, const void * vy, float * dst, const int ncols, const int nrows, cudaStream_t stream) { - GGML_ASSERT(ncols % QK4_0 == 0); - const int block_num_y = (nrows + GGML_CUDA_MMV_Y - 1) / GGML_CUDA_MMV_Y; - const dim3 block_nums(block_num_y, 1, 1); - const dim3 block_dims(WARP_SIZE, GGML_CUDA_MMV_Y, 1); - mul_mat_vec_q - <<>>(vx, vy, dst, ncols, nrows); -} +template +static void mul_mat_vec_q_cuda( + const void * vx, const void * vy, float * dst, + const int ncols_x, const int nrows_x, const int nrows_y, const int ncols_y, cudaStream_t stream) { -static void mul_mat_vec_q4_1_q8_1_cuda(const void * vx, const void * vy, float * dst, const int ncols, const int nrows, cudaStream_t stream) { - GGML_ASSERT(ncols % QK4_1 == 0); - const int block_num_y = (nrows + GGML_CUDA_MMV_Y - 1) / GGML_CUDA_MMV_Y; - const dim3 block_nums(block_num_y, 1, 1); - const dim3 block_dims(WARP_SIZE, GGML_CUDA_MMV_Y, 1); - mul_mat_vec_q - <<>>(vx, vy, dst, ncols, nrows); -} + GGML_ASSERT(ncols_x % qk == 0); + GGML_ASSERT(ncols_y <= 8); -static void mul_mat_vec_q5_0_q8_1_cuda(const void * vx, const void * vy, float * dst, const int ncols, const int nrows, cudaStream_t stream) { - GGML_ASSERT(ncols % QK5_0 == 0); - const int block_num_y = (nrows + GGML_CUDA_MMV_Y - 1) / GGML_CUDA_MMV_Y; + const int block_num_y = (nrows_x + GGML_CUDA_MMV_Y - 1) / GGML_CUDA_MMV_Y; const dim3 block_nums(block_num_y, 1, 1); const dim3 block_dims(WARP_SIZE, GGML_CUDA_MMV_Y, 1); - mul_mat_vec_q - <<>>(vx, vy, dst, ncols, nrows); -} - -static void mul_mat_vec_q5_1_q8_1_cuda(const void * vx, const void * vy, float * dst, const int ncols, const int nrows, cudaStream_t stream) { - GGML_ASSERT(ncols % QK5_1 == 0); - const int block_num_y = (nrows + GGML_CUDA_MMV_Y - 1) / GGML_CUDA_MMV_Y; - const dim3 block_nums(block_num_y, 1, 1); - const dim3 block_dims(WARP_SIZE, GGML_CUDA_MMV_Y, 1); - mul_mat_vec_q - <<>>(vx, vy, dst, ncols, nrows); -} - -static void mul_mat_vec_q8_0_q8_1_cuda(const void * vx, const void * vy, float * dst, const int ncols, const int nrows, cudaStream_t stream) { - GGML_ASSERT(ncols % QK8_0 == 0); - const int block_num_y = (nrows + GGML_CUDA_MMV_Y - 1) / GGML_CUDA_MMV_Y; - const dim3 block_nums(block_num_y, 1, 1); - const dim3 block_dims(WARP_SIZE, GGML_CUDA_MMV_Y, 1); - mul_mat_vec_q - <<>>(vx, vy, dst, ncols, nrows); -} - -static void mul_mat_vec_q2_K_q8_1_cuda(const void * vx, const void * vy, float * dst, const int ncols, const int nrows, cudaStream_t stream) { - GGML_ASSERT(ncols % QK_K == 0); - const int block_num_y = (nrows + GGML_CUDA_MMV_Y - 1) / GGML_CUDA_MMV_Y; - const dim3 block_nums(block_num_y, 1, 1); - const dim3 block_dims(WARP_SIZE, GGML_CUDA_MMV_Y, 1); - mul_mat_vec_q - <<>>(vx, vy, dst, ncols, nrows); -} - -static void mul_mat_vec_q3_K_q8_1_cuda(const void * vx, const void * vy, float * dst, const int ncols, const int nrows, cudaStream_t stream) { - GGML_ASSERT(ncols % QK_K == 0); - const int block_num_y = (nrows + GGML_CUDA_MMV_Y - 1) / GGML_CUDA_MMV_Y; - const dim3 block_nums(block_num_y, 1, 1); - const dim3 block_dims(WARP_SIZE, GGML_CUDA_MMV_Y, 1); - mul_mat_vec_q - <<>>(vx, vy, dst, ncols, nrows); -} - -static void mul_mat_vec_q4_K_q8_1_cuda(const void * vx, const void * vy, float * dst, const int ncols, const int nrows, cudaStream_t stream) { - GGML_ASSERT(ncols % QK_K == 0); - const int block_num_y = (nrows + GGML_CUDA_MMV_Y - 1) / GGML_CUDA_MMV_Y; - const dim3 block_nums(block_num_y, 1, 1); - const dim3 block_dims(WARP_SIZE, GGML_CUDA_MMV_Y, 1); - mul_mat_vec_q - <<>>(vx, vy, dst, ncols, nrows); -} - -static void mul_mat_vec_q5_K_q8_1_cuda(const void * vx, const void * vy, float * dst, const int ncols, const int nrows, cudaStream_t stream) { - GGML_ASSERT(ncols % QK_K == 0); - const int block_num_y = (nrows + GGML_CUDA_MMV_Y - 1) / GGML_CUDA_MMV_Y; - const dim3 block_nums(block_num_y, 1, 1); - const dim3 block_dims(WARP_SIZE, GGML_CUDA_MMV_Y, 1); - mul_mat_vec_q - <<>>(vx, vy, dst, ncols, nrows); -} - -static void mul_mat_vec_q6_K_q8_1_cuda(const void * vx, const void * vy, float * dst, const int ncols, const int nrows, cudaStream_t stream) { - GGML_ASSERT(ncols % QK_K == 0); - const int block_num_y = (nrows + GGML_CUDA_MMV_Y - 1) / GGML_CUDA_MMV_Y; - const dim3 block_nums(block_num_y, 1, 1); - const dim3 block_dims(WARP_SIZE, GGML_CUDA_MMV_Y, 1); - mul_mat_vec_q - <<>>(vx, vy, dst, ncols, nrows); -} - -static void mul_mat_vec_iq2_xxs_q8_1_cuda(const void * vx, const void * vy, float * dst, const int ncols, const int nrows, cudaStream_t stream) { - GGML_ASSERT(ncols % QK_K == 0); - const int block_num_y = (nrows + GGML_CUDA_MMV_Y - 1) / GGML_CUDA_MMV_Y; - const dim3 block_nums(block_num_y, 1, 1); - const dim3 block_dims(WARP_SIZE, GGML_CUDA_MMV_Y, 1); - mul_mat_vec_q - <<>>(vx, vy, dst, ncols, nrows); -} - -static void mul_mat_vec_iq2_xs_q8_1_cuda(const void * vx, const void * vy, float * dst, const int ncols, const int nrows, cudaStream_t stream) { - GGML_ASSERT(ncols % QK_K == 0); - const int block_num_y = (nrows + GGML_CUDA_MMV_Y - 1) / GGML_CUDA_MMV_Y; - const dim3 block_nums(block_num_y, 1, 1); - const dim3 block_dims(WARP_SIZE, GGML_CUDA_MMV_Y, 1); - mul_mat_vec_q - <<>>(vx, vy, dst, ncols, nrows); -} - -static void mul_mat_vec_iq3_xxs_q8_1_cuda(const void * vx, const void * vy, float * dst, const int ncols, const int nrows, cudaStream_t stream) { - GGML_ASSERT(ncols % QK_K == 0); - const int block_num_y = (nrows + GGML_CUDA_MMV_Y - 1) / GGML_CUDA_MMV_Y; - const dim3 block_nums(block_num_y, 1, 1); - const dim3 block_dims(WARP_SIZE, GGML_CUDA_MMV_Y, 1); - mul_mat_vec_q - <<>>(vx, vy, dst, ncols, nrows); + switch (ncols_y) { + case 1: + mul_mat_vec_q<1, qk, qi, block_q_t, vdr, vec_dot> + <<>>(vx, vy, dst, ncols_x, nrows_x, nrows_y, ncols_y); + break; + case 2: + mul_mat_vec_q<2, qk, qi, block_q_t, vdr, vec_dot> + <<>>(vx, vy, dst, ncols_x, nrows_x, nrows_y, ncols_y); + break; + case 3: + mul_mat_vec_q<3, qk, qi, block_q_t, vdr, vec_dot> + <<>>(vx, vy, dst, ncols_x, nrows_x, nrows_y, ncols_y); + break; + case 4: + mul_mat_vec_q<4, qk, qi, block_q_t, vdr, vec_dot> + <<>>(vx, vy, dst, ncols_x, nrows_x, nrows_y, ncols_y); + break; + case 5: + mul_mat_vec_q<5, qk, qi, block_q_t, vdr, vec_dot> + <<>>(vx, vy, dst, ncols_x, nrows_x, nrows_y, ncols_y); + break; + case 6: + mul_mat_vec_q<6, qk, qi, block_q_t, vdr, vec_dot> + <<>>(vx, vy, dst, ncols_x, nrows_x, nrows_y, ncols_y); + break; + case 7: + mul_mat_vec_q<7, qk, qi, block_q_t, vdr, vec_dot> + <<>>(vx, vy, dst, ncols_x, nrows_x, nrows_y, ncols_y); + break; + case 8: + mul_mat_vec_q<8, qk, qi, block_q_t, vdr, vec_dot> + <<>>(vx, vy, dst, ncols_x, nrows_x, nrows_y, ncols_y); + break; + default: + GGML_ASSERT(false); + // mul_mat_vec_q<0, qk, qi, block_q_t, vdr, vec_dot> + // <<>>(vx, vy, dst, ncols_x, nrows_x, nrows_y, ncols_y); + break; + } } static void ggml_mul_mat_q4_0_q8_1_cuda( @@ -8578,50 +8522,61 @@ static void ggml_cuda_op_mul_mat_vec_q( const char * src1_ddq_i, float * dst_dd_i, const int64_t row_low, const int64_t row_high, const int64_t src1_ncols, const int64_t src1_padded_row_size, cudaStream_t stream) { - GGML_ASSERT(ggml_nrows(src1) == 1); - const int64_t ne00 = src0->ne[0]; const int64_t row_diff = row_high - row_low; switch (src0->type) { case GGML_TYPE_Q4_0: - mul_mat_vec_q4_0_q8_1_cuda(src0_dd_i, src1_ddq_i, dst_dd_i, ne00, row_diff, stream); + mul_mat_vec_q_cuda + (src0_dd_i, src1_ddq_i, dst_dd_i, ne00, row_diff, src1_padded_row_size, src1_ncols, stream); break; case GGML_TYPE_Q4_1: - mul_mat_vec_q4_1_q8_1_cuda(src0_dd_i, src1_ddq_i, dst_dd_i, ne00, row_diff, stream); + mul_mat_vec_q_cuda + (src0_dd_i, src1_ddq_i, dst_dd_i, ne00, row_diff, src1_padded_row_size, src1_ncols, stream); break; case GGML_TYPE_Q5_0: - mul_mat_vec_q5_0_q8_1_cuda(src0_dd_i, src1_ddq_i, dst_dd_i, ne00, row_diff, stream); + mul_mat_vec_q_cuda + (src0_dd_i, src1_ddq_i, dst_dd_i, ne00, row_diff, src1_padded_row_size, src1_ncols, stream); break; case GGML_TYPE_Q5_1: - mul_mat_vec_q5_1_q8_1_cuda(src0_dd_i, src1_ddq_i, dst_dd_i, ne00, row_diff, stream); + mul_mat_vec_q_cuda + (src0_dd_i, src1_ddq_i, dst_dd_i, ne00, row_diff, src1_padded_row_size, src1_ncols, stream); break; case GGML_TYPE_Q8_0: - mul_mat_vec_q8_0_q8_1_cuda(src0_dd_i, src1_ddq_i, dst_dd_i, ne00, row_diff, stream); + mul_mat_vec_q_cuda + (src0_dd_i, src1_ddq_i, dst_dd_i, ne00, row_diff, src1_padded_row_size, src1_ncols, stream); break; case GGML_TYPE_Q2_K: - mul_mat_vec_q2_K_q8_1_cuda(src0_dd_i, src1_ddq_i, dst_dd_i, ne00, row_diff, stream); + mul_mat_vec_q_cuda + (src0_dd_i, src1_ddq_i, dst_dd_i, ne00, row_diff, src1_padded_row_size, src1_ncols, stream); break; case GGML_TYPE_Q3_K: - mul_mat_vec_q3_K_q8_1_cuda(src0_dd_i, src1_ddq_i, dst_dd_i, ne00, row_diff, stream); + mul_mat_vec_q_cuda + (src0_dd_i, src1_ddq_i, dst_dd_i, ne00, row_diff, src1_padded_row_size, src1_ncols, stream); break; case GGML_TYPE_Q4_K: - mul_mat_vec_q4_K_q8_1_cuda(src0_dd_i, src1_ddq_i, dst_dd_i, ne00, row_diff, stream); + mul_mat_vec_q_cuda + (src0_dd_i, src1_ddq_i, dst_dd_i, ne00, row_diff, src1_padded_row_size, src1_ncols, stream); break; case GGML_TYPE_Q5_K: - mul_mat_vec_q5_K_q8_1_cuda(src0_dd_i, src1_ddq_i, dst_dd_i, ne00, row_diff, stream); + mul_mat_vec_q_cuda + (src0_dd_i, src1_ddq_i, dst_dd_i, ne00, row_diff, src1_padded_row_size, src1_ncols, stream); break; case GGML_TYPE_Q6_K: - mul_mat_vec_q6_K_q8_1_cuda(src0_dd_i, src1_ddq_i, dst_dd_i, ne00, row_diff, stream); + mul_mat_vec_q_cuda + (src0_dd_i, src1_ddq_i, dst_dd_i, ne00, row_diff, src1_padded_row_size, src1_ncols, stream); break; case GGML_TYPE_IQ2_XXS: - mul_mat_vec_iq2_xxs_q8_1_cuda(src0_dd_i, src1_ddq_i, dst_dd_i, ne00, row_diff, stream); + mul_mat_vec_q_cuda + (src0_dd_i, src1_ddq_i, dst_dd_i, ne00, row_diff, src1_padded_row_size, src1_ncols, stream); break; case GGML_TYPE_IQ2_XS: - mul_mat_vec_iq2_xs_q8_1_cuda(src0_dd_i, src1_ddq_i, dst_dd_i, ne00, row_diff, stream); + mul_mat_vec_q_cuda + (src0_dd_i, src1_ddq_i, dst_dd_i, ne00, row_diff, src1_padded_row_size, src1_ncols, stream); break; case GGML_TYPE_IQ3_XXS: - mul_mat_vec_iq3_xxs_q8_1_cuda(src0_dd_i, src1_ddq_i, dst_dd_i, ne00, row_diff, stream); + mul_mat_vec_q_cuda + (src0_dd_i, src1_ddq_i, dst_dd_i, ne00, row_diff, src1_padded_row_size, src1_ncols, stream); break; default: GGML_ASSERT(false); @@ -9945,17 +9900,18 @@ static void ggml_cuda_mul_mat(const ggml_tensor * src0, const ggml_tensor * src1 #ifdef GGML_CUDA_FORCE_DMMV const bool use_mul_mat_vec_q = false; #else - const bool use_mul_mat_vec_q = min_compute_capability >= MIN_CC_DP4A && ggml_is_quantized(src0->type) && ggml_nrows(src1) == 1; + const bool use_mul_mat_vec_q = min_compute_capability >= MIN_CC_DP4A && ggml_is_quantized(src0->type); #endif // GGML_CUDA_FORCE_DMMV if (use_mul_mat_vec_q) { - // NOTE: this kernel does not support ggml_nrows(src1) > 1 ggml_cuda_op_mul_mat(src0, src1, dst, ggml_cuda_op_mul_mat_vec_q, true); } else { ggml_cuda_op_mul_mat(src0, src1, dst, ggml_cuda_op_dequantize_mul_mat_vec, false); } } else { - if (use_mul_mat_q) { + if (src1->ne[1] <= 8 && min_compute_capability >= MIN_CC_DP4A && ggml_is_quantized(src0->type)) { + ggml_cuda_op_mul_mat(src0, src1, dst, ggml_cuda_op_mul_mat_vec_q, true); + } else if (use_mul_mat_q) { ggml_cuda_op_mul_mat(src0, src1, dst, ggml_cuda_op_mul_mat_q, true); } else { ggml_cuda_op_mul_mat(src0, src1, dst, ggml_cuda_op_mul_mat_cublas, false); From 2e9c0bd6b301155ce749e162527fc55e9fb5b832 Mon Sep 17 00:00:00 2001 From: BarfingLemurs <128182951+BarfingLemurs@users.noreply.github.com> Date: Tue, 6 Feb 2024 09:06:48 -0500 Subject: [PATCH 525/811] readme : add phi, orion 14b, internlm2, and yi-VL to readme (#5362) --- README.md | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/README.md b/README.md index bb6c49338..cc87ac797 100644 --- a/README.md +++ b/README.md @@ -105,11 +105,14 @@ improved significantly thanks to many contributions. It is the main playground f - [X] [MPT](https://github.com/ggerganov/llama.cpp/pull/3417) - [X] [Bloom](https://github.com/ggerganov/llama.cpp/pull/3553) - [x] [Yi models](https://huggingface.co/models?search=01-ai/Yi) -- [X] [StableLM-3b-4e1t](https://github.com/ggerganov/llama.cpp/pull/3586) +- [X] [StableLM models](https://huggingface.co/stabilityai) - [x] [Deepseek models](https://huggingface.co/models?search=deepseek-ai/deepseek) - [x] [Qwen models](https://huggingface.co/models?search=Qwen/Qwen) - [x] [PLaMo-13B](https://github.com/ggerganov/llama.cpp/pull/3557) +- [x] [Phi models](https://huggingface.co/models?search=microsoft/phi) - [x] [GPT-2](https://huggingface.co/gpt2) +- [x] [Orion 14B](https://github.com/ggerganov/llama.cpp/pull/5118) +- [x] [InternLM2](https://huggingface.co/models?search=internlm2) - [x] [CodeShell](https://github.com/WisdomShell/codeshell) **Multimodal models:** @@ -119,6 +122,7 @@ improved significantly thanks to many contributions. It is the main playground f - [x] [Obsidian](https://huggingface.co/NousResearch/Obsidian-3B-V0.5) - [x] [ShareGPT4V](https://huggingface.co/models?search=Lin-Chen/ShareGPT4V) - [x] [MobileVLM 1.7B/3B models](https://huggingface.co/models?search=mobileVLM) +- [x] [Yi-VL](https://huggingface.co/models?search=Yi-VL) **Bindings:** From f57fadc009cbff741a1961cb7896c47d73978d2c Mon Sep 17 00:00:00 2001 From: Kawrakow <48489457+ikawrakow@users.noreply.github.com> Date: Tue, 6 Feb 2024 17:28:02 +0200 Subject: [PATCH 526/811] Slight quantization improvement for Q4_K and Q5_K (#5361) * Q4_K: slightly better quantization * Q5_K: slightly better quantization --------- Co-authored-by: Iwan Kawrakow --- ggml-quants.c | 75 +++++++++++++++++++++++---------------------------- 1 file changed, 33 insertions(+), 42 deletions(-) diff --git a/ggml-quants.c b/ggml-quants.c index 014c0525a..101d3e783 100644 --- a/ggml-quants.c +++ b/ggml-quants.c @@ -2381,19 +2381,20 @@ static void quantize_row_q4_K_impl(const float * restrict x, block_q4_K * restri uint8_t L[QK_K]; uint8_t Laux[32]; + uint8_t Ls[QK_K/32]; + uint8_t Lm[QK_K/32]; float weights[32]; - float mins[QK_K/32]; - float scales[QK_K/32]; + float sw[QK_K/32]; + float mins[QK_K/32]; + float scales[QK_K/32]; for (int i = 0; i < nb; i++) { float sum_x2 = 0; for (int l = 0; l < QK_K; ++l) sum_x2 += x[l] * x[l]; - float sigma2 = sum_x2/QK_K; + float sigma2 = 2*sum_x2/QK_K; float av_x = sqrtf(sigma2); - float max_scale = 0; // as we are deducting the min, scales are always positive - float max_min = 0; for (int j = 0; j < QK_K/32; ++j) { if (quant_weights) { const float * qw = quant_weights + QK_K*i + 32*j; @@ -2401,25 +2402,17 @@ static void quantize_row_q4_K_impl(const float * restrict x, block_q4_K * restri } else { for (int l = 0; l < 32; ++l) weights[l] = av_x + fabsf(x[32*j + l]); } + float sumw = 0; + for (int l = 0; l < 32; ++l) sumw += weights[l]; + sw[j] = sumw; scales[j] = make_qkx3_quants(32, 15, x + 32*j, weights, L + 32*j, &mins[j], Laux, -0.9f, 0.05f, 36, false); - //scales[j] = make_qkx2_quants(32, 15, x + 32*j, weights, L + 32*j, &mins[j], Laux, -1.f, 0.1f, 20, false); - float scale = scales[j]; - if (scale > max_scale) { - max_scale = scale; - } - float min = mins[j]; - if (min > max_min) { - max_min = min; - } } - float inv_scale = max_scale > 0 ? 63.f/max_scale : 0.f; - float inv_min = max_min > 0 ? 63.f/max_min : 0.f; + float d_block = make_qp_quants(QK_K/32, 63, scales, Ls, sw); + float m_block = make_qp_quants(QK_K/32, 63, mins, Lm, sw); for (int j = 0; j < QK_K/32; ++j) { - uint8_t ls = nearest_int(inv_scale*scales[j]); - uint8_t lm = nearest_int(inv_min*mins[j]); - ls = MIN(63, ls); - lm = MIN(63, lm); + uint8_t ls = Ls[j]; + uint8_t lm = Lm[j]; if (j < 4) { y[i].scales[j] = ls; y[i].scales[j+4] = lm; @@ -2429,8 +2422,8 @@ static void quantize_row_q4_K_impl(const float * restrict x, block_q4_K * restri y[i].scales[j-0] |= ((lm >> 4) << 6); } } - y[i].d = GGML_FP32_TO_FP16(max_scale/63.f); - y[i].dmin = GGML_FP32_TO_FP16(max_min/63.f); + y[i].d = GGML_FP32_TO_FP16(d_block); + y[i].dmin = GGML_FP32_TO_FP16(m_block); uint8_t sc, m; for (int j = 0; j < QK_K/32; ++j) { @@ -2688,20 +2681,21 @@ static void quantize_row_q5_K_impl(const float * restrict x, block_q5_K * restri const int nb = n_per_row / QK_K; uint8_t L[QK_K]; - float mins[QK_K/32]; - float scales[QK_K/32]; - float weights[32]; uint8_t Laux[32]; + uint8_t Ls[QK_K/32]; + uint8_t Lm[QK_K/32]; + float mins[QK_K/32]; + float scales[QK_K/32]; + float sw[QK_K/32]; + float weights[32]; for (int i = 0; i < nb; i++) { float sum_x2 = 0; for (int l = 0; l < QK_K; ++l) sum_x2 += x[l] * x[l]; - float sigma2 = sum_x2/QK_K; + float sigma2 = 2*sum_x2/QK_K; float av_x = sqrtf(sigma2); - float max_scale = 0; // as we are deducting the min, scales are always positive - float max_min = 0; for (int j = 0; j < QK_K/32; ++j) { if (quant_weights) { const float * qw = quant_weights + QK_K*i + 32*j; @@ -2709,22 +2703,19 @@ static void quantize_row_q5_K_impl(const float * restrict x, block_q5_K * restri } else { for (int l = 0; l < 32; ++l) weights[l] = av_x + fabsf(x[32*j + l]); } + float sumw = 0; + for (int l = 0; l < 32; ++l) sumw += weights[l]; + sw[j] = sumw; + scales[j] = make_qkx3_quants(32, 31, x + 32*j, weights, L + 32*j, &mins[j], Laux, -0.9f, 0.05f, 36, false); - float scale = scales[j]; - if (scale > max_scale) { - max_scale = scale; - } - float min = mins[j]; - if (min > max_min) { - max_min = min; - } } - float inv_scale = max_scale > 0 ? 63.f/max_scale : 0.f; - float inv_min = max_min > 0 ? 63.f/max_min : 0.f; + float d_block = make_qp_quants(QK_K/32, 63, scales, Ls, sw); + float m_block = make_qp_quants(QK_K/32, 63, mins, Lm, sw); + for (int j = 0; j < QK_K/32; ++j) { - uint8_t ls = nearest_int(inv_scale*scales[j]); - uint8_t lm = nearest_int(inv_min*mins[j]); + uint8_t ls = Ls[j]; + uint8_t lm = Lm[j]; ls = MIN(63, ls); lm = MIN(63, lm); if (j < 4) { @@ -2736,8 +2727,8 @@ static void quantize_row_q5_K_impl(const float * restrict x, block_q5_K * restri y[i].scales[j-0] |= ((lm >> 4) << 6); } } - y[i].d = GGML_FP32_TO_FP16(max_scale/63.f); - y[i].dmin = GGML_FP32_TO_FP16(max_min/63.f); + y[i].d = GGML_FP32_TO_FP16(d_block); + y[i].dmin = GGML_FP32_TO_FP16(m_block); uint8_t sc, m; for (int j = 0; j < QK_K/32; ++j) { From b08f22c882a1443e6b97081f3ce718a4d1a741f8 Mon Sep 17 00:00:00 2001 From: Kawrakow <48489457+ikawrakow@users.noreply.github.com> Date: Tue, 6 Feb 2024 19:00:16 +0200 Subject: [PATCH 527/811] Update README.md (#5366) Add some links to quantization related PRs --- README.md | 14 +++++++++++++- 1 file changed, 13 insertions(+), 1 deletion(-) diff --git a/README.md b/README.md index cc87ac797..34f2021f9 100644 --- a/README.md +++ b/README.md @@ -736,9 +736,21 @@ Several quantization methods are supported. They differ in the resulting model d | 13B | bits/weight | 16.0 | 4.5 | 5.0 | 5.5 | 6.0 | 8.5 | - [k-quants](https://github.com/ggerganov/llama.cpp/pull/1684) -- recent k-quants improvements +- recent k-quants improvements and new i-quants - [#2707](https://github.com/ggerganov/llama.cpp/pull/2707) - [#2807](https://github.com/ggerganov/llama.cpp/pull/2807) + - [#4773 - 2-bit i-quants (inference)](https://github.com/ggerganov/llama.cpp/pull/4773) + - [#4856 - 2-bit i-quants (inference)](https://github.com/ggerganov/llama.cpp/pull/4856) + - [#4861 - importance matrix](https://github.com/ggerganov/llama.cpp/pull/4861) + - [#4872 - MoE models](https://github.com/ggerganov/llama.cpp/pull/4872) + - [#4897 - 2-bit quantization](https://github.com/ggerganov/llama.cpp/pull/4897) + - [#4930 - imatrix for all k-quants](https://github.com/ggerganov/llama.cpp/pull/4930) + - [#4951 - imatrix on the GPU](https://github.com/ggerganov/llama.cpp/pull/4957) + - [#4969 - imatrix for legacy quants](https://github.com/ggerganov/llama.cpp/pull/4969) + - [#4996 - k-qunats tuning](https://github.com/ggerganov/llama.cpp/pull/4996) + - [#5060 - Q3_K_XS](https://github.com/ggerganov/llama.cpp/pull/5060) + - [#5196 - 3-bit i-quants](https://github.com/ggerganov/llama.cpp/pull/5196) + - [quantization tuning](https://github.com/ggerganov/llama.cpp/pull/5320), [another one](https://github.com/ggerganov/llama.cpp/pull/5334), and [another one](https://github.com/ggerganov/llama.cpp/pull/5361) ### Perplexity (measuring model quality) From 17c97fb0620448b37516a3f53fea6c482b0a30a4 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Johannes=20G=C3=A4=C3=9Fler?= Date: Tue, 6 Feb 2024 18:43:06 +0100 Subject: [PATCH 528/811] CUDA: mul_mat_vec_q max. batch size 8 -> 4 (#5370) --- ggml-cuda.cu | 36 ++++++++++++++++++------------------ 1 file changed, 18 insertions(+), 18 deletions(-) diff --git a/ggml-cuda.cu b/ggml-cuda.cu index 95161b3f4..3b828375e 100644 --- a/ggml-cuda.cu +++ b/ggml-cuda.cu @@ -6831,7 +6831,7 @@ static void mul_mat_vec_q_cuda( const int ncols_x, const int nrows_x, const int nrows_y, const int ncols_y, cudaStream_t stream) { GGML_ASSERT(ncols_x % qk == 0); - GGML_ASSERT(ncols_y <= 8); + GGML_ASSERT(ncols_y <= 4); const int block_num_y = (nrows_x + GGML_CUDA_MMV_Y - 1) / GGML_CUDA_MMV_Y; const dim3 block_nums(block_num_y, 1, 1); @@ -6853,22 +6853,22 @@ static void mul_mat_vec_q_cuda( mul_mat_vec_q<4, qk, qi, block_q_t, vdr, vec_dot> <<>>(vx, vy, dst, ncols_x, nrows_x, nrows_y, ncols_y); break; - case 5: - mul_mat_vec_q<5, qk, qi, block_q_t, vdr, vec_dot> - <<>>(vx, vy, dst, ncols_x, nrows_x, nrows_y, ncols_y); - break; - case 6: - mul_mat_vec_q<6, qk, qi, block_q_t, vdr, vec_dot> - <<>>(vx, vy, dst, ncols_x, nrows_x, nrows_y, ncols_y); - break; - case 7: - mul_mat_vec_q<7, qk, qi, block_q_t, vdr, vec_dot> - <<>>(vx, vy, dst, ncols_x, nrows_x, nrows_y, ncols_y); - break; - case 8: - mul_mat_vec_q<8, qk, qi, block_q_t, vdr, vec_dot> - <<>>(vx, vy, dst, ncols_x, nrows_x, nrows_y, ncols_y); - break; + // case 5: + // mul_mat_vec_q<5, qk, qi, block_q_t, vdr, vec_dot> + // <<>>(vx, vy, dst, ncols_x, nrows_x, nrows_y, ncols_y); + // break; + // case 6: + // mul_mat_vec_q<6, qk, qi, block_q_t, vdr, vec_dot> + // <<>>(vx, vy, dst, ncols_x, nrows_x, nrows_y, ncols_y); + // break; + // case 7: + // mul_mat_vec_q<7, qk, qi, block_q_t, vdr, vec_dot> + // <<>>(vx, vy, dst, ncols_x, nrows_x, nrows_y, ncols_y); + // break; + // case 8: + // mul_mat_vec_q<8, qk, qi, block_q_t, vdr, vec_dot> + // <<>>(vx, vy, dst, ncols_x, nrows_x, nrows_y, ncols_y); + // break; default: GGML_ASSERT(false); // mul_mat_vec_q<0, qk, qi, block_q_t, vdr, vec_dot> @@ -9909,7 +9909,7 @@ static void ggml_cuda_mul_mat(const ggml_tensor * src0, const ggml_tensor * src1 ggml_cuda_op_mul_mat(src0, src1, dst, ggml_cuda_op_dequantize_mul_mat_vec, false); } } else { - if (src1->ne[1] <= 8 && min_compute_capability >= MIN_CC_DP4A && ggml_is_quantized(src0->type)) { + if (src1->ne[1] <= 4 && min_compute_capability >= MIN_CC_DP4A && ggml_is_quantized(src0->type)) { ggml_cuda_op_mul_mat(src0, src1, dst, ggml_cuda_op_mul_mat_vec_q, true); } else if (use_mul_mat_q) { ggml_cuda_op_mul_mat(src0, src1, dst, ggml_cuda_op_mul_mat_q, true); From 213d1439fadefe182f69c5f7e8dd3b4b6572ebcb Mon Sep 17 00:00:00 2001 From: Alexey Parfenov Date: Tue, 6 Feb 2024 18:08:38 +0000 Subject: [PATCH 529/811] server : remove model.json endpoint (#5371) --- examples/server/completion.js.hpp | 448 +++++++++++++++------------ examples/server/public/completion.js | 3 +- examples/server/server.cpp | 11 - 3 files changed, 244 insertions(+), 218 deletions(-) diff --git a/examples/server/completion.js.hpp b/examples/server/completion.js.hpp index fe5f81228..f5e696e17 100644 --- a/examples/server/completion.js.hpp +++ b/examples/server/completion.js.hpp @@ -236,214 +236,250 @@ unsigned char completion_js[] = { 0x20, 0x4a, 0x53, 0x4f, 0x4e, 0x2e, 0x70, 0x61, 0x72, 0x73, 0x65, 0x28, 0x72, 0x65, 0x73, 0x75, 0x6c, 0x74, 0x2e, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x6f, 0x6c, 0x65, 0x2e, 0x65, - 0x72, 0x72, 0x6f, 0x72, 0x28, 0x60, 0x6c, 0x6c, 0x61, 0x6d, 0x61, 0x2e, - 0x63, 0x70, 0x70, 0x20, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x3a, 0x20, 0x24, - 0x7b, 0x72, 0x65, 0x73, 0x75, 0x6c, 0x74, 0x2e, 0x65, 0x72, 0x72, 0x6f, - 0x72, 0x2e, 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x7d, 0x60, 0x29, - 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, + 0x20, 0x20, 0x20, 0x69, 0x66, 0x20, 0x28, 0x72, 0x65, 0x73, 0x75, 0x6c, + 0x74, 0x2e, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x2e, 0x63, 0x6f, 0x6e, 0x74, + 0x65, 0x6e, 0x74, 0x2e, 0x69, 0x6e, 0x63, 0x6c, 0x75, 0x64, 0x65, 0x73, + 0x28, 0x27, 0x73, 0x6c, 0x6f, 0x74, 0x20, 0x75, 0x6e, 0x61, 0x76, 0x61, + 0x69, 0x6c, 0x61, 0x62, 0x6c, 0x65, 0x27, 0x29, 0x29, 0x20, 0x7b, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x2f, 0x2f, 0x20, 0x54, 0x68, 0x72, 0x6f, 0x77, 0x20, 0x61, + 0x6e, 0x20, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x20, 0x74, 0x6f, 0x20, 0x62, + 0x65, 0x20, 0x63, 0x61, 0x75, 0x67, 0x68, 0x74, 0x20, 0x62, 0x79, 0x20, + 0x75, 0x70, 0x73, 0x74, 0x72, 0x65, 0x61, 0x6d, 0x20, 0x63, 0x61, 0x6c, + 0x6c, 0x65, 0x72, 0x73, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x74, 0x68, 0x72, 0x6f, 0x77, + 0x20, 0x6e, 0x65, 0x77, 0x20, 0x45, 0x72, 0x72, 0x6f, 0x72, 0x28, 0x27, + 0x73, 0x6c, 0x6f, 0x74, 0x20, 0x75, 0x6e, 0x61, 0x76, 0x61, 0x69, 0x6c, + 0x61, 0x62, 0x6c, 0x65, 0x27, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x20, 0x65, 0x6c, + 0x73, 0x65, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x6f, + 0x6c, 0x65, 0x2e, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x28, 0x60, 0x6c, 0x6c, + 0x61, 0x6d, 0x61, 0x2e, 0x63, 0x70, 0x70, 0x20, 0x65, 0x72, 0x72, 0x6f, + 0x72, 0x3a, 0x20, 0x24, 0x7b, 0x72, 0x65, 0x73, 0x75, 0x6c, 0x74, 0x2e, + 0x65, 0x72, 0x72, 0x6f, 0x72, 0x2e, 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x6e, + 0x74, 0x7d, 0x60, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, - 0x7d, 0x0a, 0x20, 0x20, 0x7d, 0x20, 0x63, 0x61, 0x74, 0x63, 0x68, 0x20, - 0x28, 0x65, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x69, 0x66, - 0x20, 0x28, 0x65, 0x2e, 0x6e, 0x61, 0x6d, 0x65, 0x20, 0x21, 0x3d, 0x3d, - 0x20, 0x27, 0x41, 0x62, 0x6f, 0x72, 0x74, 0x45, 0x72, 0x72, 0x6f, 0x72, - 0x27, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, - 0x6f, 0x6e, 0x73, 0x6f, 0x6c, 0x65, 0x2e, 0x65, 0x72, 0x72, 0x6f, 0x72, - 0x28, 0x22, 0x6c, 0x6c, 0x61, 0x6d, 0x61, 0x20, 0x65, 0x72, 0x72, 0x6f, - 0x72, 0x3a, 0x20, 0x22, 0x2c, 0x20, 0x65, 0x29, 0x3b, 0x0a, 0x20, 0x20, - 0x20, 0x20, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x74, 0x68, 0x72, 0x6f, - 0x77, 0x20, 0x65, 0x3b, 0x0a, 0x20, 0x20, 0x7d, 0x0a, 0x20, 0x20, 0x66, - 0x69, 0x6e, 0x61, 0x6c, 0x6c, 0x79, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, - 0x20, 0x63, 0x6f, 0x6e, 0x74, 0x72, 0x6f, 0x6c, 0x6c, 0x65, 0x72, 0x2e, - 0x61, 0x62, 0x6f, 0x72, 0x74, 0x28, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x7d, - 0x0a, 0x0a, 0x20, 0x20, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x63, - 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x3b, 0x0a, 0x7d, 0x0a, 0x0a, 0x2f, - 0x2f, 0x20, 0x43, 0x61, 0x6c, 0x6c, 0x20, 0x6c, 0x6c, 0x61, 0x6d, 0x61, - 0x2c, 0x20, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x61, 0x6e, 0x20, - 0x65, 0x76, 0x65, 0x6e, 0x74, 0x20, 0x74, 0x61, 0x72, 0x67, 0x65, 0x74, - 0x20, 0x74, 0x68, 0x61, 0x74, 0x20, 0x79, 0x6f, 0x75, 0x20, 0x63, 0x61, - 0x6e, 0x20, 0x73, 0x75, 0x62, 0x73, 0x63, 0x72, 0x69, 0x62, 0x65, 0x20, - 0x74, 0x6f, 0x0a, 0x2f, 0x2f, 0x0a, 0x2f, 0x2f, 0x20, 0x45, 0x78, 0x61, - 0x6d, 0x70, 0x6c, 0x65, 0x3a, 0x0a, 0x2f, 0x2f, 0x0a, 0x2f, 0x2f, 0x20, - 0x20, 0x20, 0x20, 0x69, 0x6d, 0x70, 0x6f, 0x72, 0x74, 0x20, 0x7b, 0x20, - 0x6c, 0x6c, 0x61, 0x6d, 0x61, 0x45, 0x76, 0x65, 0x6e, 0x74, 0x54, 0x61, - 0x72, 0x67, 0x65, 0x74, 0x20, 0x7d, 0x20, 0x66, 0x72, 0x6f, 0x6d, 0x20, - 0x27, 0x2f, 0x63, 0x6f, 0x6d, 0x70, 0x6c, 0x65, 0x74, 0x69, 0x6f, 0x6e, - 0x2e, 0x6a, 0x73, 0x27, 0x0a, 0x2f, 0x2f, 0x0a, 0x2f, 0x2f, 0x20, 0x20, - 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x63, 0x6f, 0x6e, 0x6e, - 0x20, 0x3d, 0x20, 0x6c, 0x6c, 0x61, 0x6d, 0x61, 0x45, 0x76, 0x65, 0x6e, - 0x74, 0x54, 0x61, 0x72, 0x67, 0x65, 0x74, 0x28, 0x70, 0x72, 0x6f, 0x6d, - 0x70, 0x74, 0x29, 0x0a, 0x2f, 0x2f, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, - 0x6e, 0x6e, 0x2e, 0x61, 0x64, 0x64, 0x45, 0x76, 0x65, 0x6e, 0x74, 0x4c, - 0x69, 0x73, 0x74, 0x65, 0x6e, 0x65, 0x72, 0x28, 0x22, 0x6d, 0x65, 0x73, - 0x73, 0x61, 0x67, 0x65, 0x22, 0x2c, 0x20, 0x28, 0x63, 0x68, 0x75, 0x6e, - 0x6b, 0x29, 0x20, 0x3d, 0x3e, 0x20, 0x7b, 0x0a, 0x2f, 0x2f, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x64, 0x6f, 0x63, 0x75, 0x6d, 0x65, 0x6e, 0x74, - 0x2e, 0x77, 0x72, 0x69, 0x74, 0x65, 0x28, 0x63, 0x68, 0x75, 0x6e, 0x6b, - 0x2e, 0x64, 0x65, 0x74, 0x61, 0x69, 0x6c, 0x2e, 0x63, 0x6f, 0x6e, 0x74, - 0x65, 0x6e, 0x74, 0x29, 0x0a, 0x2f, 0x2f, 0x20, 0x20, 0x20, 0x20, 0x7d, - 0x29, 0x0a, 0x2f, 0x2f, 0x0a, 0x65, 0x78, 0x70, 0x6f, 0x72, 0x74, 0x20, - 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x6c, 0x6c, 0x61, 0x6d, 0x61, 0x45, - 0x76, 0x65, 0x6e, 0x74, 0x54, 0x61, 0x72, 0x67, 0x65, 0x74, 0x20, 0x3d, - 0x20, 0x28, 0x70, 0x72, 0x6f, 0x6d, 0x70, 0x74, 0x2c, 0x20, 0x70, 0x61, - 0x72, 0x61, 0x6d, 0x73, 0x20, 0x3d, 0x20, 0x7b, 0x7d, 0x2c, 0x20, 0x63, - 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x20, 0x3d, 0x20, 0x7b, 0x7d, 0x29, 0x20, - 0x3d, 0x3e, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, - 0x20, 0x65, 0x76, 0x65, 0x6e, 0x74, 0x54, 0x61, 0x72, 0x67, 0x65, 0x74, - 0x20, 0x3d, 0x20, 0x6e, 0x65, 0x77, 0x20, 0x45, 0x76, 0x65, 0x6e, 0x74, - 0x54, 0x61, 0x72, 0x67, 0x65, 0x74, 0x28, 0x29, 0x3b, 0x0a, 0x20, 0x20, - 0x28, 0x61, 0x73, 0x79, 0x6e, 0x63, 0x20, 0x28, 0x29, 0x20, 0x3d, 0x3e, - 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x6c, 0x65, 0x74, 0x20, 0x63, - 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x20, 0x3d, 0x20, 0x22, 0x22, 0x3b, - 0x0a, 0x20, 0x20, 0x20, 0x20, 0x66, 0x6f, 0x72, 0x20, 0x61, 0x77, 0x61, - 0x69, 0x74, 0x20, 0x28, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x63, 0x68, - 0x75, 0x6e, 0x6b, 0x20, 0x6f, 0x66, 0x20, 0x6c, 0x6c, 0x61, 0x6d, 0x61, - 0x28, 0x70, 0x72, 0x6f, 0x6d, 0x70, 0x74, 0x2c, 0x20, 0x70, 0x61, 0x72, - 0x61, 0x6d, 0x73, 0x2c, 0x20, 0x63, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x29, - 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x69, 0x66, - 0x20, 0x28, 0x63, 0x68, 0x75, 0x6e, 0x6b, 0x2e, 0x64, 0x61, 0x74, 0x61, - 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x20, 0x2b, 0x3d, 0x20, 0x63, - 0x68, 0x75, 0x6e, 0x6b, 0x2e, 0x64, 0x61, 0x74, 0x61, 0x2e, 0x63, 0x6f, - 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x65, 0x76, 0x65, 0x6e, 0x74, 0x54, 0x61, 0x72, 0x67, - 0x65, 0x74, 0x2e, 0x64, 0x69, 0x73, 0x70, 0x61, 0x74, 0x63, 0x68, 0x45, - 0x76, 0x65, 0x6e, 0x74, 0x28, 0x6e, 0x65, 0x77, 0x20, 0x43, 0x75, 0x73, - 0x74, 0x6f, 0x6d, 0x45, 0x76, 0x65, 0x6e, 0x74, 0x28, 0x22, 0x6d, 0x65, - 0x73, 0x73, 0x61, 0x67, 0x65, 0x22, 0x2c, 0x20, 0x7b, 0x20, 0x64, 0x65, - 0x74, 0x61, 0x69, 0x6c, 0x3a, 0x20, 0x63, 0x68, 0x75, 0x6e, 0x6b, 0x2e, - 0x64, 0x61, 0x74, 0x61, 0x20, 0x7d, 0x29, 0x29, 0x3b, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x69, 0x66, 0x20, 0x28, 0x72, 0x65, + 0x73, 0x75, 0x6c, 0x74, 0x2e, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x29, 0x20, + 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x72, 0x65, 0x73, 0x75, 0x6c, 0x74, 0x2e, 0x65, 0x72, 0x72, + 0x6f, 0x72, 0x20, 0x3d, 0x20, 0x4a, 0x53, 0x4f, 0x4e, 0x2e, 0x70, 0x61, + 0x72, 0x73, 0x65, 0x28, 0x72, 0x65, 0x73, 0x75, 0x6c, 0x74, 0x2e, 0x65, + 0x72, 0x72, 0x6f, 0x72, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x6f, + 0x6c, 0x65, 0x2e, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x28, 0x60, 0x6c, 0x6c, + 0x61, 0x6d, 0x61, 0x2e, 0x63, 0x70, 0x70, 0x20, 0x65, 0x72, 0x72, 0x6f, + 0x72, 0x3a, 0x20, 0x24, 0x7b, 0x72, 0x65, 0x73, 0x75, 0x6c, 0x74, 0x2e, + 0x65, 0x72, 0x72, 0x6f, 0x72, 0x2e, 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x6e, + 0x74, 0x7d, 0x60, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x69, 0x66, 0x20, 0x28, 0x63, 0x68, 0x75, 0x6e, 0x6b, 0x2e, 0x64, 0x61, - 0x74, 0x61, 0x2e, 0x67, 0x65, 0x6e, 0x65, 0x72, 0x61, 0x74, 0x69, 0x6f, - 0x6e, 0x5f, 0x73, 0x65, 0x74, 0x74, 0x69, 0x6e, 0x67, 0x73, 0x29, 0x20, - 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x65, 0x76, - 0x65, 0x6e, 0x74, 0x54, 0x61, 0x72, 0x67, 0x65, 0x74, 0x2e, 0x64, 0x69, - 0x73, 0x70, 0x61, 0x74, 0x63, 0x68, 0x45, 0x76, 0x65, 0x6e, 0x74, 0x28, - 0x6e, 0x65, 0x77, 0x20, 0x43, 0x75, 0x73, 0x74, 0x6f, 0x6d, 0x45, 0x76, - 0x65, 0x6e, 0x74, 0x28, 0x22, 0x67, 0x65, 0x6e, 0x65, 0x72, 0x61, 0x74, - 0x69, 0x6f, 0x6e, 0x5f, 0x73, 0x65, 0x74, 0x74, 0x69, 0x6e, 0x67, 0x73, - 0x22, 0x2c, 0x20, 0x7b, 0x20, 0x64, 0x65, 0x74, 0x61, 0x69, 0x6c, 0x3a, - 0x20, 0x63, 0x68, 0x75, 0x6e, 0x6b, 0x2e, 0x64, 0x61, 0x74, 0x61, 0x2e, - 0x67, 0x65, 0x6e, 0x65, 0x72, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x5f, 0x73, - 0x65, 0x74, 0x74, 0x69, 0x6e, 0x67, 0x73, 0x20, 0x7d, 0x29, 0x29, 0x3b, - 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x69, 0x66, 0x20, 0x28, 0x63, 0x68, 0x75, 0x6e, 0x6b, - 0x2e, 0x64, 0x61, 0x74, 0x61, 0x2e, 0x74, 0x69, 0x6d, 0x69, 0x6e, 0x67, - 0x73, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x65, 0x76, 0x65, 0x6e, 0x74, 0x54, 0x61, 0x72, 0x67, 0x65, 0x74, - 0x2e, 0x64, 0x69, 0x73, 0x70, 0x61, 0x74, 0x63, 0x68, 0x45, 0x76, 0x65, - 0x6e, 0x74, 0x28, 0x6e, 0x65, 0x77, 0x20, 0x43, 0x75, 0x73, 0x74, 0x6f, - 0x6d, 0x45, 0x76, 0x65, 0x6e, 0x74, 0x28, 0x22, 0x74, 0x69, 0x6d, 0x69, - 0x6e, 0x67, 0x73, 0x22, 0x2c, 0x20, 0x7b, 0x20, 0x64, 0x65, 0x74, 0x61, - 0x69, 0x6c, 0x3a, 0x20, 0x63, 0x68, 0x75, 0x6e, 0x6b, 0x2e, 0x64, 0x61, - 0x74, 0x61, 0x2e, 0x74, 0x69, 0x6d, 0x69, 0x6e, 0x67, 0x73, 0x20, 0x7d, - 0x29, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, - 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x65, 0x76, - 0x65, 0x6e, 0x74, 0x54, 0x61, 0x72, 0x67, 0x65, 0x74, 0x2e, 0x64, 0x69, - 0x73, 0x70, 0x61, 0x74, 0x63, 0x68, 0x45, 0x76, 0x65, 0x6e, 0x74, 0x28, - 0x6e, 0x65, 0x77, 0x20, 0x43, 0x75, 0x73, 0x74, 0x6f, 0x6d, 0x45, 0x76, - 0x65, 0x6e, 0x74, 0x28, 0x22, 0x64, 0x6f, 0x6e, 0x65, 0x22, 0x2c, 0x20, - 0x7b, 0x20, 0x64, 0x65, 0x74, 0x61, 0x69, 0x6c, 0x3a, 0x20, 0x7b, 0x20, - 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x20, 0x7d, 0x20, 0x7d, 0x29, - 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x7d, 0x29, 0x28, 0x29, 0x3b, 0x0a, 0x20, - 0x20, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x65, 0x76, 0x65, 0x6e, - 0x74, 0x54, 0x61, 0x72, 0x67, 0x65, 0x74, 0x3b, 0x0a, 0x7d, 0x0a, 0x0a, - 0x2f, 0x2f, 0x20, 0x43, 0x61, 0x6c, 0x6c, 0x20, 0x6c, 0x6c, 0x61, 0x6d, - 0x61, 0x2c, 0x20, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x61, 0x20, - 0x70, 0x72, 0x6f, 0x6d, 0x69, 0x73, 0x65, 0x20, 0x74, 0x68, 0x61, 0x74, - 0x20, 0x72, 0x65, 0x73, 0x6f, 0x6c, 0x76, 0x65, 0x73, 0x20, 0x74, 0x6f, - 0x20, 0x74, 0x68, 0x65, 0x20, 0x63, 0x6f, 0x6d, 0x70, 0x6c, 0x65, 0x74, - 0x65, 0x64, 0x20, 0x74, 0x65, 0x78, 0x74, 0x2e, 0x20, 0x54, 0x68, 0x69, - 0x73, 0x20, 0x64, 0x6f, 0x65, 0x73, 0x20, 0x6e, 0x6f, 0x74, 0x20, 0x73, - 0x75, 0x70, 0x70, 0x6f, 0x72, 0x74, 0x20, 0x73, 0x74, 0x72, 0x65, 0x61, - 0x6d, 0x69, 0x6e, 0x67, 0x0a, 0x2f, 0x2f, 0x0a, 0x2f, 0x2f, 0x20, 0x45, - 0x78, 0x61, 0x6d, 0x70, 0x6c, 0x65, 0x3a, 0x0a, 0x2f, 0x2f, 0x0a, 0x2f, - 0x2f, 0x20, 0x20, 0x20, 0x20, 0x20, 0x6c, 0x6c, 0x61, 0x6d, 0x61, 0x50, - 0x72, 0x6f, 0x6d, 0x69, 0x73, 0x65, 0x28, 0x70, 0x72, 0x6f, 0x6d, 0x70, - 0x74, 0x29, 0x2e, 0x74, 0x68, 0x65, 0x6e, 0x28, 0x28, 0x63, 0x6f, 0x6e, - 0x74, 0x65, 0x6e, 0x74, 0x29, 0x20, 0x3d, 0x3e, 0x20, 0x7b, 0x0a, 0x2f, - 0x2f, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x64, 0x6f, 0x63, 0x75, + 0x20, 0x20, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x20, 0x20, 0x7d, 0x20, 0x63, 0x61, + 0x74, 0x63, 0x68, 0x20, 0x28, 0x65, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x69, 0x66, 0x20, 0x28, 0x65, 0x2e, 0x6e, 0x61, 0x6d, 0x65, + 0x20, 0x21, 0x3d, 0x3d, 0x20, 0x27, 0x41, 0x62, 0x6f, 0x72, 0x74, 0x45, + 0x72, 0x72, 0x6f, 0x72, 0x27, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x6f, 0x6c, 0x65, 0x2e, 0x65, + 0x72, 0x72, 0x6f, 0x72, 0x28, 0x22, 0x6c, 0x6c, 0x61, 0x6d, 0x61, 0x20, + 0x65, 0x72, 0x72, 0x6f, 0x72, 0x3a, 0x20, 0x22, 0x2c, 0x20, 0x65, 0x29, + 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x74, 0x68, 0x72, 0x6f, 0x77, 0x20, 0x65, 0x3b, 0x0a, 0x20, 0x20, 0x7d, + 0x0a, 0x20, 0x20, 0x66, 0x69, 0x6e, 0x61, 0x6c, 0x6c, 0x79, 0x20, 0x7b, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x74, 0x72, 0x6f, 0x6c, + 0x6c, 0x65, 0x72, 0x2e, 0x61, 0x62, 0x6f, 0x72, 0x74, 0x28, 0x29, 0x3b, + 0x0a, 0x20, 0x20, 0x7d, 0x0a, 0x0a, 0x20, 0x20, 0x72, 0x65, 0x74, 0x75, + 0x72, 0x6e, 0x20, 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x3b, 0x0a, + 0x7d, 0x0a, 0x0a, 0x2f, 0x2f, 0x20, 0x43, 0x61, 0x6c, 0x6c, 0x20, 0x6c, + 0x6c, 0x61, 0x6d, 0x61, 0x2c, 0x20, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, + 0x20, 0x61, 0x6e, 0x20, 0x65, 0x76, 0x65, 0x6e, 0x74, 0x20, 0x74, 0x61, + 0x72, 0x67, 0x65, 0x74, 0x20, 0x74, 0x68, 0x61, 0x74, 0x20, 0x79, 0x6f, + 0x75, 0x20, 0x63, 0x61, 0x6e, 0x20, 0x73, 0x75, 0x62, 0x73, 0x63, 0x72, + 0x69, 0x62, 0x65, 0x20, 0x74, 0x6f, 0x0a, 0x2f, 0x2f, 0x0a, 0x2f, 0x2f, + 0x20, 0x45, 0x78, 0x61, 0x6d, 0x70, 0x6c, 0x65, 0x3a, 0x0a, 0x2f, 0x2f, + 0x0a, 0x2f, 0x2f, 0x20, 0x20, 0x20, 0x20, 0x69, 0x6d, 0x70, 0x6f, 0x72, + 0x74, 0x20, 0x7b, 0x20, 0x6c, 0x6c, 0x61, 0x6d, 0x61, 0x45, 0x76, 0x65, + 0x6e, 0x74, 0x54, 0x61, 0x72, 0x67, 0x65, 0x74, 0x20, 0x7d, 0x20, 0x66, + 0x72, 0x6f, 0x6d, 0x20, 0x27, 0x2f, 0x63, 0x6f, 0x6d, 0x70, 0x6c, 0x65, + 0x74, 0x69, 0x6f, 0x6e, 0x2e, 0x6a, 0x73, 0x27, 0x0a, 0x2f, 0x2f, 0x0a, + 0x2f, 0x2f, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, + 0x63, 0x6f, 0x6e, 0x6e, 0x20, 0x3d, 0x20, 0x6c, 0x6c, 0x61, 0x6d, 0x61, + 0x45, 0x76, 0x65, 0x6e, 0x74, 0x54, 0x61, 0x72, 0x67, 0x65, 0x74, 0x28, + 0x70, 0x72, 0x6f, 0x6d, 0x70, 0x74, 0x29, 0x0a, 0x2f, 0x2f, 0x20, 0x20, + 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x6e, 0x2e, 0x61, 0x64, 0x64, 0x45, 0x76, + 0x65, 0x6e, 0x74, 0x4c, 0x69, 0x73, 0x74, 0x65, 0x6e, 0x65, 0x72, 0x28, + 0x22, 0x6d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x22, 0x2c, 0x20, 0x28, + 0x63, 0x68, 0x75, 0x6e, 0x6b, 0x29, 0x20, 0x3d, 0x3e, 0x20, 0x7b, 0x0a, + 0x2f, 0x2f, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x64, 0x6f, 0x63, 0x75, 0x6d, 0x65, 0x6e, 0x74, 0x2e, 0x77, 0x72, 0x69, 0x74, 0x65, 0x28, 0x63, - 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x29, 0x0a, 0x2f, 0x2f, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x7d, 0x29, 0x0a, 0x2f, 0x2f, 0x0a, 0x2f, 0x2f, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x6f, 0x72, 0x0a, 0x2f, 0x2f, 0x0a, 0x2f, 0x2f, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x63, - 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x20, 0x3d, 0x20, 0x61, 0x77, 0x61, - 0x69, 0x74, 0x20, 0x6c, 0x6c, 0x61, 0x6d, 0x61, 0x50, 0x72, 0x6f, 0x6d, - 0x69, 0x73, 0x65, 0x28, 0x70, 0x72, 0x6f, 0x6d, 0x70, 0x74, 0x29, 0x0a, - 0x2f, 0x2f, 0x20, 0x20, 0x20, 0x20, 0x20, 0x64, 0x6f, 0x63, 0x75, 0x6d, - 0x65, 0x6e, 0x74, 0x2e, 0x77, 0x72, 0x69, 0x74, 0x65, 0x28, 0x63, 0x6f, - 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x29, 0x0a, 0x2f, 0x2f, 0x0a, 0x65, 0x78, - 0x70, 0x6f, 0x72, 0x74, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x6c, - 0x6c, 0x61, 0x6d, 0x61, 0x50, 0x72, 0x6f, 0x6d, 0x69, 0x73, 0x65, 0x20, - 0x3d, 0x20, 0x28, 0x70, 0x72, 0x6f, 0x6d, 0x70, 0x74, 0x2c, 0x20, 0x70, - 0x61, 0x72, 0x61, 0x6d, 0x73, 0x20, 0x3d, 0x20, 0x7b, 0x7d, 0x2c, 0x20, - 0x63, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x20, 0x3d, 0x20, 0x7b, 0x7d, 0x29, - 0x20, 0x3d, 0x3e, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x72, 0x65, 0x74, 0x75, - 0x72, 0x6e, 0x20, 0x6e, 0x65, 0x77, 0x20, 0x50, 0x72, 0x6f, 0x6d, 0x69, - 0x73, 0x65, 0x28, 0x61, 0x73, 0x79, 0x6e, 0x63, 0x20, 0x28, 0x72, 0x65, - 0x73, 0x6f, 0x6c, 0x76, 0x65, 0x2c, 0x20, 0x72, 0x65, 0x6a, 0x65, 0x63, - 0x74, 0x29, 0x20, 0x3d, 0x3e, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, - 0x6c, 0x65, 0x74, 0x20, 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x20, - 0x3d, 0x20, 0x22, 0x22, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x74, 0x72, - 0x79, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x66, 0x6f, - 0x72, 0x20, 0x61, 0x77, 0x61, 0x69, 0x74, 0x20, 0x28, 0x63, 0x6f, 0x6e, - 0x73, 0x74, 0x20, 0x63, 0x68, 0x75, 0x6e, 0x6b, 0x20, 0x6f, 0x66, 0x20, - 0x6c, 0x6c, 0x61, 0x6d, 0x61, 0x28, 0x70, 0x72, 0x6f, 0x6d, 0x70, 0x74, - 0x2c, 0x20, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x2c, 0x20, 0x63, 0x6f, - 0x6e, 0x66, 0x69, 0x67, 0x29, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, - 0x20, 0x2b, 0x3d, 0x20, 0x63, 0x68, 0x75, 0x6e, 0x6b, 0x2e, 0x64, 0x61, - 0x74, 0x61, 0x2e, 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x3b, 0x0a, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x72, 0x65, 0x73, 0x6f, 0x6c, 0x76, 0x65, 0x28, 0x63, 0x6f, - 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, - 0x7d, 0x20, 0x63, 0x61, 0x74, 0x63, 0x68, 0x20, 0x28, 0x65, 0x72, 0x72, - 0x6f, 0x72, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x72, 0x65, 0x6a, 0x65, 0x63, 0x74, 0x28, 0x65, 0x72, 0x72, 0x6f, 0x72, - 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x20, 0x20, 0x7d, - 0x29, 0x3b, 0x0a, 0x7d, 0x3b, 0x0a, 0x0a, 0x2f, 0x2a, 0x2a, 0x0a, 0x20, - 0x2a, 0x20, 0x28, 0x64, 0x65, 0x70, 0x72, 0x65, 0x63, 0x61, 0x74, 0x65, - 0x64, 0x29, 0x0a, 0x20, 0x2a, 0x2f, 0x0a, 0x65, 0x78, 0x70, 0x6f, 0x72, - 0x74, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x6c, 0x6c, 0x61, 0x6d, - 0x61, 0x43, 0x6f, 0x6d, 0x70, 0x6c, 0x65, 0x74, 0x65, 0x20, 0x3d, 0x20, - 0x61, 0x73, 0x79, 0x6e, 0x63, 0x20, 0x28, 0x70, 0x61, 0x72, 0x61, 0x6d, - 0x73, 0x2c, 0x20, 0x63, 0x6f, 0x6e, 0x74, 0x72, 0x6f, 0x6c, 0x6c, 0x65, - 0x72, 0x2c, 0x20, 0x63, 0x61, 0x6c, 0x6c, 0x62, 0x61, 0x63, 0x6b, 0x29, - 0x20, 0x3d, 0x3e, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x66, 0x6f, 0x72, 0x20, - 0x61, 0x77, 0x61, 0x69, 0x74, 0x20, 0x28, 0x63, 0x6f, 0x6e, 0x73, 0x74, - 0x20, 0x63, 0x68, 0x75, 0x6e, 0x6b, 0x20, 0x6f, 0x66, 0x20, 0x6c, 0x6c, - 0x61, 0x6d, 0x61, 0x28, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x2e, 0x70, - 0x72, 0x6f, 0x6d, 0x70, 0x74, 0x2c, 0x20, 0x70, 0x61, 0x72, 0x61, 0x6d, - 0x73, 0x2c, 0x20, 0x7b, 0x20, 0x63, 0x6f, 0x6e, 0x74, 0x72, 0x6f, 0x6c, - 0x6c, 0x65, 0x72, 0x20, 0x7d, 0x29, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, - 0x20, 0x20, 0x63, 0x61, 0x6c, 0x6c, 0x62, 0x61, 0x63, 0x6b, 0x28, 0x63, - 0x68, 0x75, 0x6e, 0x6b, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x7d, 0x0a, 0x7d, - 0x0a, 0x0a, 0x2f, 0x2f, 0x20, 0x47, 0x65, 0x74, 0x20, 0x74, 0x68, 0x65, - 0x20, 0x6d, 0x6f, 0x64, 0x65, 0x6c, 0x20, 0x69, 0x6e, 0x66, 0x6f, 0x20, - 0x66, 0x72, 0x6f, 0x6d, 0x20, 0x74, 0x68, 0x65, 0x20, 0x73, 0x65, 0x72, - 0x76, 0x65, 0x72, 0x2e, 0x20, 0x54, 0x68, 0x69, 0x73, 0x20, 0x69, 0x73, - 0x20, 0x75, 0x73, 0x65, 0x66, 0x75, 0x6c, 0x20, 0x66, 0x6f, 0x72, 0x20, - 0x67, 0x65, 0x74, 0x74, 0x69, 0x6e, 0x67, 0x20, 0x74, 0x68, 0x65, 0x20, - 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x78, 0x74, 0x20, 0x77, 0x69, 0x6e, 0x64, - 0x6f, 0x77, 0x20, 0x61, 0x6e, 0x64, 0x20, 0x73, 0x6f, 0x20, 0x6f, 0x6e, - 0x2e, 0x0a, 0x65, 0x78, 0x70, 0x6f, 0x72, 0x74, 0x20, 0x63, 0x6f, 0x6e, - 0x73, 0x74, 0x20, 0x6c, 0x6c, 0x61, 0x6d, 0x61, 0x4d, 0x6f, 0x64, 0x65, - 0x6c, 0x49, 0x6e, 0x66, 0x6f, 0x20, 0x3d, 0x20, 0x61, 0x73, 0x79, 0x6e, - 0x63, 0x20, 0x28, 0x29, 0x20, 0x3d, 0x3e, 0x20, 0x7b, 0x0a, 0x20, 0x20, - 0x69, 0x66, 0x20, 0x28, 0x21, 0x67, 0x65, 0x6e, 0x65, 0x72, 0x61, 0x74, - 0x69, 0x6f, 0x6e, 0x5f, 0x73, 0x65, 0x74, 0x74, 0x69, 0x6e, 0x67, 0x73, - 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x67, 0x65, 0x6e, 0x65, - 0x72, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x5f, 0x73, 0x65, 0x74, 0x74, 0x69, - 0x6e, 0x67, 0x73, 0x20, 0x3d, 0x20, 0x61, 0x77, 0x61, 0x69, 0x74, 0x20, - 0x66, 0x65, 0x74, 0x63, 0x68, 0x28, 0x22, 0x2f, 0x6d, 0x6f, 0x64, 0x65, - 0x6c, 0x2e, 0x6a, 0x73, 0x6f, 0x6e, 0x22, 0x29, 0x2e, 0x74, 0x68, 0x65, - 0x6e, 0x28, 0x72, 0x20, 0x3d, 0x3e, 0x20, 0x72, 0x2e, 0x6a, 0x73, 0x6f, - 0x6e, 0x28, 0x29, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x7d, 0x0a, 0x20, 0x20, - 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x67, 0x65, 0x6e, 0x65, 0x72, + 0x68, 0x75, 0x6e, 0x6b, 0x2e, 0x64, 0x65, 0x74, 0x61, 0x69, 0x6c, 0x2e, + 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x29, 0x0a, 0x2f, 0x2f, 0x20, + 0x20, 0x20, 0x20, 0x7d, 0x29, 0x0a, 0x2f, 0x2f, 0x0a, 0x65, 0x78, 0x70, + 0x6f, 0x72, 0x74, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x6c, 0x6c, + 0x61, 0x6d, 0x61, 0x45, 0x76, 0x65, 0x6e, 0x74, 0x54, 0x61, 0x72, 0x67, + 0x65, 0x74, 0x20, 0x3d, 0x20, 0x28, 0x70, 0x72, 0x6f, 0x6d, 0x70, 0x74, + 0x2c, 0x20, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x20, 0x3d, 0x20, 0x7b, + 0x7d, 0x2c, 0x20, 0x63, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x20, 0x3d, 0x20, + 0x7b, 0x7d, 0x29, 0x20, 0x3d, 0x3e, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x63, + 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x65, 0x76, 0x65, 0x6e, 0x74, 0x54, 0x61, + 0x72, 0x67, 0x65, 0x74, 0x20, 0x3d, 0x20, 0x6e, 0x65, 0x77, 0x20, 0x45, + 0x76, 0x65, 0x6e, 0x74, 0x54, 0x61, 0x72, 0x67, 0x65, 0x74, 0x28, 0x29, + 0x3b, 0x0a, 0x20, 0x20, 0x28, 0x61, 0x73, 0x79, 0x6e, 0x63, 0x20, 0x28, + 0x29, 0x20, 0x3d, 0x3e, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x6c, + 0x65, 0x74, 0x20, 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x20, 0x3d, + 0x20, 0x22, 0x22, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x66, 0x6f, 0x72, + 0x20, 0x61, 0x77, 0x61, 0x69, 0x74, 0x20, 0x28, 0x63, 0x6f, 0x6e, 0x73, + 0x74, 0x20, 0x63, 0x68, 0x75, 0x6e, 0x6b, 0x20, 0x6f, 0x66, 0x20, 0x6c, + 0x6c, 0x61, 0x6d, 0x61, 0x28, 0x70, 0x72, 0x6f, 0x6d, 0x70, 0x74, 0x2c, + 0x20, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x2c, 0x20, 0x63, 0x6f, 0x6e, + 0x66, 0x69, 0x67, 0x29, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x69, 0x66, 0x20, 0x28, 0x63, 0x68, 0x75, 0x6e, 0x6b, 0x2e, + 0x64, 0x61, 0x74, 0x61, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x20, + 0x2b, 0x3d, 0x20, 0x63, 0x68, 0x75, 0x6e, 0x6b, 0x2e, 0x64, 0x61, 0x74, + 0x61, 0x2e, 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x3b, 0x0a, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x65, 0x76, 0x65, 0x6e, 0x74, + 0x54, 0x61, 0x72, 0x67, 0x65, 0x74, 0x2e, 0x64, 0x69, 0x73, 0x70, 0x61, + 0x74, 0x63, 0x68, 0x45, 0x76, 0x65, 0x6e, 0x74, 0x28, 0x6e, 0x65, 0x77, + 0x20, 0x43, 0x75, 0x73, 0x74, 0x6f, 0x6d, 0x45, 0x76, 0x65, 0x6e, 0x74, + 0x28, 0x22, 0x6d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x22, 0x2c, 0x20, + 0x7b, 0x20, 0x64, 0x65, 0x74, 0x61, 0x69, 0x6c, 0x3a, 0x20, 0x63, 0x68, + 0x75, 0x6e, 0x6b, 0x2e, 0x64, 0x61, 0x74, 0x61, 0x20, 0x7d, 0x29, 0x29, + 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x69, 0x66, 0x20, 0x28, 0x63, 0x68, 0x75, 0x6e, + 0x6b, 0x2e, 0x64, 0x61, 0x74, 0x61, 0x2e, 0x67, 0x65, 0x6e, 0x65, 0x72, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x5f, 0x73, 0x65, 0x74, 0x74, 0x69, 0x6e, - 0x67, 0x73, 0x3b, 0x0a, 0x7d, 0x0a + 0x67, 0x73, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x65, 0x76, 0x65, 0x6e, 0x74, 0x54, 0x61, 0x72, 0x67, 0x65, + 0x74, 0x2e, 0x64, 0x69, 0x73, 0x70, 0x61, 0x74, 0x63, 0x68, 0x45, 0x76, + 0x65, 0x6e, 0x74, 0x28, 0x6e, 0x65, 0x77, 0x20, 0x43, 0x75, 0x73, 0x74, + 0x6f, 0x6d, 0x45, 0x76, 0x65, 0x6e, 0x74, 0x28, 0x22, 0x67, 0x65, 0x6e, + 0x65, 0x72, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x5f, 0x73, 0x65, 0x74, 0x74, + 0x69, 0x6e, 0x67, 0x73, 0x22, 0x2c, 0x20, 0x7b, 0x20, 0x64, 0x65, 0x74, + 0x61, 0x69, 0x6c, 0x3a, 0x20, 0x63, 0x68, 0x75, 0x6e, 0x6b, 0x2e, 0x64, + 0x61, 0x74, 0x61, 0x2e, 0x67, 0x65, 0x6e, 0x65, 0x72, 0x61, 0x74, 0x69, + 0x6f, 0x6e, 0x5f, 0x73, 0x65, 0x74, 0x74, 0x69, 0x6e, 0x67, 0x73, 0x20, + 0x7d, 0x29, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x69, 0x66, 0x20, 0x28, 0x63, + 0x68, 0x75, 0x6e, 0x6b, 0x2e, 0x64, 0x61, 0x74, 0x61, 0x2e, 0x74, 0x69, + 0x6d, 0x69, 0x6e, 0x67, 0x73, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x65, 0x76, 0x65, 0x6e, 0x74, 0x54, 0x61, + 0x72, 0x67, 0x65, 0x74, 0x2e, 0x64, 0x69, 0x73, 0x70, 0x61, 0x74, 0x63, + 0x68, 0x45, 0x76, 0x65, 0x6e, 0x74, 0x28, 0x6e, 0x65, 0x77, 0x20, 0x43, + 0x75, 0x73, 0x74, 0x6f, 0x6d, 0x45, 0x76, 0x65, 0x6e, 0x74, 0x28, 0x22, + 0x74, 0x69, 0x6d, 0x69, 0x6e, 0x67, 0x73, 0x22, 0x2c, 0x20, 0x7b, 0x20, + 0x64, 0x65, 0x74, 0x61, 0x69, 0x6c, 0x3a, 0x20, 0x63, 0x68, 0x75, 0x6e, + 0x6b, 0x2e, 0x64, 0x61, 0x74, 0x61, 0x2e, 0x74, 0x69, 0x6d, 0x69, 0x6e, + 0x67, 0x73, 0x20, 0x7d, 0x29, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x65, 0x76, 0x65, 0x6e, 0x74, 0x54, 0x61, 0x72, 0x67, 0x65, + 0x74, 0x2e, 0x64, 0x69, 0x73, 0x70, 0x61, 0x74, 0x63, 0x68, 0x45, 0x76, + 0x65, 0x6e, 0x74, 0x28, 0x6e, 0x65, 0x77, 0x20, 0x43, 0x75, 0x73, 0x74, + 0x6f, 0x6d, 0x45, 0x76, 0x65, 0x6e, 0x74, 0x28, 0x22, 0x64, 0x6f, 0x6e, + 0x65, 0x22, 0x2c, 0x20, 0x7b, 0x20, 0x64, 0x65, 0x74, 0x61, 0x69, 0x6c, + 0x3a, 0x20, 0x7b, 0x20, 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x20, + 0x7d, 0x20, 0x7d, 0x29, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x7d, 0x29, 0x28, + 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, + 0x65, 0x76, 0x65, 0x6e, 0x74, 0x54, 0x61, 0x72, 0x67, 0x65, 0x74, 0x3b, + 0x0a, 0x7d, 0x0a, 0x0a, 0x2f, 0x2f, 0x20, 0x43, 0x61, 0x6c, 0x6c, 0x20, + 0x6c, 0x6c, 0x61, 0x6d, 0x61, 0x2c, 0x20, 0x72, 0x65, 0x74, 0x75, 0x72, + 0x6e, 0x20, 0x61, 0x20, 0x70, 0x72, 0x6f, 0x6d, 0x69, 0x73, 0x65, 0x20, + 0x74, 0x68, 0x61, 0x74, 0x20, 0x72, 0x65, 0x73, 0x6f, 0x6c, 0x76, 0x65, + 0x73, 0x20, 0x74, 0x6f, 0x20, 0x74, 0x68, 0x65, 0x20, 0x63, 0x6f, 0x6d, + 0x70, 0x6c, 0x65, 0x74, 0x65, 0x64, 0x20, 0x74, 0x65, 0x78, 0x74, 0x2e, + 0x20, 0x54, 0x68, 0x69, 0x73, 0x20, 0x64, 0x6f, 0x65, 0x73, 0x20, 0x6e, + 0x6f, 0x74, 0x20, 0x73, 0x75, 0x70, 0x70, 0x6f, 0x72, 0x74, 0x20, 0x73, + 0x74, 0x72, 0x65, 0x61, 0x6d, 0x69, 0x6e, 0x67, 0x0a, 0x2f, 0x2f, 0x0a, + 0x2f, 0x2f, 0x20, 0x45, 0x78, 0x61, 0x6d, 0x70, 0x6c, 0x65, 0x3a, 0x0a, + 0x2f, 0x2f, 0x0a, 0x2f, 0x2f, 0x20, 0x20, 0x20, 0x20, 0x20, 0x6c, 0x6c, + 0x61, 0x6d, 0x61, 0x50, 0x72, 0x6f, 0x6d, 0x69, 0x73, 0x65, 0x28, 0x70, + 0x72, 0x6f, 0x6d, 0x70, 0x74, 0x29, 0x2e, 0x74, 0x68, 0x65, 0x6e, 0x28, + 0x28, 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x29, 0x20, 0x3d, 0x3e, + 0x20, 0x7b, 0x0a, 0x2f, 0x2f, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x64, 0x6f, 0x63, 0x75, 0x6d, 0x65, 0x6e, 0x74, 0x2e, 0x77, 0x72, 0x69, + 0x74, 0x65, 0x28, 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x29, 0x0a, + 0x2f, 0x2f, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x29, 0x0a, 0x2f, 0x2f, + 0x0a, 0x2f, 0x2f, 0x20, 0x20, 0x20, 0x20, 0x20, 0x6f, 0x72, 0x0a, 0x2f, + 0x2f, 0x0a, 0x2f, 0x2f, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, + 0x73, 0x74, 0x20, 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x20, 0x3d, + 0x20, 0x61, 0x77, 0x61, 0x69, 0x74, 0x20, 0x6c, 0x6c, 0x61, 0x6d, 0x61, + 0x50, 0x72, 0x6f, 0x6d, 0x69, 0x73, 0x65, 0x28, 0x70, 0x72, 0x6f, 0x6d, + 0x70, 0x74, 0x29, 0x0a, 0x2f, 0x2f, 0x20, 0x20, 0x20, 0x20, 0x20, 0x64, + 0x6f, 0x63, 0x75, 0x6d, 0x65, 0x6e, 0x74, 0x2e, 0x77, 0x72, 0x69, 0x74, + 0x65, 0x28, 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x29, 0x0a, 0x2f, + 0x2f, 0x0a, 0x65, 0x78, 0x70, 0x6f, 0x72, 0x74, 0x20, 0x63, 0x6f, 0x6e, + 0x73, 0x74, 0x20, 0x6c, 0x6c, 0x61, 0x6d, 0x61, 0x50, 0x72, 0x6f, 0x6d, + 0x69, 0x73, 0x65, 0x20, 0x3d, 0x20, 0x28, 0x70, 0x72, 0x6f, 0x6d, 0x70, + 0x74, 0x2c, 0x20, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x20, 0x3d, 0x20, + 0x7b, 0x7d, 0x2c, 0x20, 0x63, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x20, 0x3d, + 0x20, 0x7b, 0x7d, 0x29, 0x20, 0x3d, 0x3e, 0x20, 0x7b, 0x0a, 0x20, 0x20, + 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x6e, 0x65, 0x77, 0x20, 0x50, + 0x72, 0x6f, 0x6d, 0x69, 0x73, 0x65, 0x28, 0x61, 0x73, 0x79, 0x6e, 0x63, + 0x20, 0x28, 0x72, 0x65, 0x73, 0x6f, 0x6c, 0x76, 0x65, 0x2c, 0x20, 0x72, + 0x65, 0x6a, 0x65, 0x63, 0x74, 0x29, 0x20, 0x3d, 0x3e, 0x20, 0x7b, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x6c, 0x65, 0x74, 0x20, 0x63, 0x6f, 0x6e, 0x74, + 0x65, 0x6e, 0x74, 0x20, 0x3d, 0x20, 0x22, 0x22, 0x3b, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x74, 0x72, 0x79, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x66, 0x6f, 0x72, 0x20, 0x61, 0x77, 0x61, 0x69, 0x74, 0x20, + 0x28, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x63, 0x68, 0x75, 0x6e, 0x6b, + 0x20, 0x6f, 0x66, 0x20, 0x6c, 0x6c, 0x61, 0x6d, 0x61, 0x28, 0x70, 0x72, + 0x6f, 0x6d, 0x70, 0x74, 0x2c, 0x20, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, + 0x2c, 0x20, 0x63, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x29, 0x29, 0x20, 0x7b, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, + 0x74, 0x65, 0x6e, 0x74, 0x20, 0x2b, 0x3d, 0x20, 0x63, 0x68, 0x75, 0x6e, + 0x6b, 0x2e, 0x64, 0x61, 0x74, 0x61, 0x2e, 0x63, 0x6f, 0x6e, 0x74, 0x65, + 0x6e, 0x74, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x72, 0x65, 0x73, 0x6f, 0x6c, 0x76, + 0x65, 0x28, 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x29, 0x3b, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x7d, 0x20, 0x63, 0x61, 0x74, 0x63, 0x68, 0x20, + 0x28, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x72, 0x65, 0x6a, 0x65, 0x63, 0x74, 0x28, 0x65, + 0x72, 0x72, 0x6f, 0x72, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x7d, + 0x0a, 0x20, 0x20, 0x7d, 0x29, 0x3b, 0x0a, 0x7d, 0x3b, 0x0a, 0x0a, 0x2f, + 0x2a, 0x2a, 0x0a, 0x20, 0x2a, 0x20, 0x28, 0x64, 0x65, 0x70, 0x72, 0x65, + 0x63, 0x61, 0x74, 0x65, 0x64, 0x29, 0x0a, 0x20, 0x2a, 0x2f, 0x0a, 0x65, + 0x78, 0x70, 0x6f, 0x72, 0x74, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, + 0x6c, 0x6c, 0x61, 0x6d, 0x61, 0x43, 0x6f, 0x6d, 0x70, 0x6c, 0x65, 0x74, + 0x65, 0x20, 0x3d, 0x20, 0x61, 0x73, 0x79, 0x6e, 0x63, 0x20, 0x28, 0x70, + 0x61, 0x72, 0x61, 0x6d, 0x73, 0x2c, 0x20, 0x63, 0x6f, 0x6e, 0x74, 0x72, + 0x6f, 0x6c, 0x6c, 0x65, 0x72, 0x2c, 0x20, 0x63, 0x61, 0x6c, 0x6c, 0x62, + 0x61, 0x63, 0x6b, 0x29, 0x20, 0x3d, 0x3e, 0x20, 0x7b, 0x0a, 0x20, 0x20, + 0x66, 0x6f, 0x72, 0x20, 0x61, 0x77, 0x61, 0x69, 0x74, 0x20, 0x28, 0x63, + 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x63, 0x68, 0x75, 0x6e, 0x6b, 0x20, 0x6f, + 0x66, 0x20, 0x6c, 0x6c, 0x61, 0x6d, 0x61, 0x28, 0x70, 0x61, 0x72, 0x61, + 0x6d, 0x73, 0x2e, 0x70, 0x72, 0x6f, 0x6d, 0x70, 0x74, 0x2c, 0x20, 0x70, + 0x61, 0x72, 0x61, 0x6d, 0x73, 0x2c, 0x20, 0x7b, 0x20, 0x63, 0x6f, 0x6e, + 0x74, 0x72, 0x6f, 0x6c, 0x6c, 0x65, 0x72, 0x20, 0x7d, 0x29, 0x29, 0x20, + 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x63, 0x61, 0x6c, 0x6c, 0x62, 0x61, + 0x63, 0x6b, 0x28, 0x63, 0x68, 0x75, 0x6e, 0x6b, 0x29, 0x3b, 0x0a, 0x20, + 0x20, 0x7d, 0x0a, 0x7d, 0x0a, 0x0a, 0x2f, 0x2f, 0x20, 0x47, 0x65, 0x74, + 0x20, 0x74, 0x68, 0x65, 0x20, 0x6d, 0x6f, 0x64, 0x65, 0x6c, 0x20, 0x69, + 0x6e, 0x66, 0x6f, 0x20, 0x66, 0x72, 0x6f, 0x6d, 0x20, 0x74, 0x68, 0x65, + 0x20, 0x73, 0x65, 0x72, 0x76, 0x65, 0x72, 0x2e, 0x20, 0x54, 0x68, 0x69, + 0x73, 0x20, 0x69, 0x73, 0x20, 0x75, 0x73, 0x65, 0x66, 0x75, 0x6c, 0x20, + 0x66, 0x6f, 0x72, 0x20, 0x67, 0x65, 0x74, 0x74, 0x69, 0x6e, 0x67, 0x20, + 0x74, 0x68, 0x65, 0x20, 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x78, 0x74, 0x20, + 0x77, 0x69, 0x6e, 0x64, 0x6f, 0x77, 0x20, 0x61, 0x6e, 0x64, 0x20, 0x73, + 0x6f, 0x20, 0x6f, 0x6e, 0x2e, 0x0a, 0x65, 0x78, 0x70, 0x6f, 0x72, 0x74, + 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x6c, 0x6c, 0x61, 0x6d, 0x61, + 0x4d, 0x6f, 0x64, 0x65, 0x6c, 0x49, 0x6e, 0x66, 0x6f, 0x20, 0x3d, 0x20, + 0x61, 0x73, 0x79, 0x6e, 0x63, 0x20, 0x28, 0x29, 0x20, 0x3d, 0x3e, 0x20, + 0x7b, 0x0a, 0x20, 0x20, 0x69, 0x66, 0x20, 0x28, 0x21, 0x67, 0x65, 0x6e, + 0x65, 0x72, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x5f, 0x73, 0x65, 0x74, 0x74, + 0x69, 0x6e, 0x67, 0x73, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x70, 0x72, 0x6f, 0x70, 0x73, 0x20, + 0x3d, 0x20, 0x61, 0x77, 0x61, 0x69, 0x74, 0x20, 0x66, 0x65, 0x74, 0x63, + 0x68, 0x28, 0x22, 0x2f, 0x70, 0x72, 0x6f, 0x70, 0x73, 0x22, 0x29, 0x2e, + 0x74, 0x68, 0x65, 0x6e, 0x28, 0x72, 0x20, 0x3d, 0x3e, 0x20, 0x72, 0x2e, + 0x6a, 0x73, 0x6f, 0x6e, 0x28, 0x29, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, + 0x20, 0x67, 0x65, 0x6e, 0x65, 0x72, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x5f, + 0x73, 0x65, 0x74, 0x74, 0x69, 0x6e, 0x67, 0x73, 0x20, 0x3d, 0x20, 0x70, + 0x72, 0x6f, 0x70, 0x73, 0x2e, 0x64, 0x65, 0x66, 0x61, 0x75, 0x6c, 0x74, + 0x5f, 0x67, 0x65, 0x6e, 0x65, 0x72, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x5f, + 0x73, 0x65, 0x74, 0x74, 0x69, 0x6e, 0x67, 0x73, 0x3b, 0x0a, 0x20, 0x20, + 0x7d, 0x0a, 0x20, 0x20, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x67, + 0x65, 0x6e, 0x65, 0x72, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x5f, 0x73, 0x65, + 0x74, 0x74, 0x69, 0x6e, 0x67, 0x73, 0x3b, 0x0a, 0x7d, 0x0a }; -unsigned int completion_js_len = 5346; +unsigned int completion_js_len = 5782; diff --git a/examples/server/public/completion.js b/examples/server/public/completion.js index baaec1d60..ab38a7b40 100644 --- a/examples/server/public/completion.js +++ b/examples/server/public/completion.js @@ -195,7 +195,8 @@ export const llamaComplete = async (params, controller, callback) => { // Get the model info from the server. This is useful for getting the context window and so on. export const llamaModelInfo = async () => { if (!generation_settings) { - generation_settings = await fetch("/model.json").then(r => r.json()); + const props = await fetch("/props").then(r => r.json()); + generation_settings = props.default_generation_settings; } return generation_settings; } diff --git a/examples/server/server.cpp b/examples/server/server.cpp index d86d7e04a..9481ce6b1 100644 --- a/examples/server/server.cpp +++ b/examples/server/server.cpp @@ -990,11 +990,6 @@ struct llama_server_context queue_results.send(res); } - json get_model_props() - { - return get_formated_generation(slots[0]); - } - json get_formated_generation(llama_client_slot &slot) { const auto eos_bias = slot.sparams.logit_bias.find(llama_token_eos(model)); @@ -2895,12 +2890,6 @@ int main(int argc, char **argv) } }); - svr.Get("/model.json", [&llama](const httplib::Request &, httplib::Response &res) - { - const json data = llama.get_model_props(); - return res.set_content(data.dump(), "application/json; charset=utf-8"); - }); - svr.Options(R"(/.*)", [](const httplib::Request &, httplib::Response &res) { return res.set_content("", "application/json; charset=utf-8"); }); From f68664ac241a6b5c233d8f1051eef20929b06008 Mon Sep 17 00:00:00 2001 From: Sang-Kil Park Date: Wed, 7 Feb 2024 13:28:00 +0900 Subject: [PATCH 530/811] convert : fix TypeError on GPT-2 vocab.json (#5288) --- convert.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/convert.py b/convert.py index 4a2847a27..323e8058d 100755 --- a/convert.py +++ b/convert.py @@ -334,9 +334,9 @@ class Params: class BpeVocab: def __init__(self, fname_tokenizer: Path, fname_added_tokens: Path | None) -> None: self.bpe_tokenizer = json.loads(open(str(fname_tokenizer), encoding="utf-8").read()) - try: + if isinstance(self.bpe_tokenizer.get('model'), dict): self.vocab = self.bpe_tokenizer["model"]["vocab"] - except KeyError: + else: self.vocab = self.bpe_tokenizer added_tokens: dict[str, int] if fname_added_tokens is not None: From f3e2b4fa3f81a410ecb7dec929c259ef8d8dbb7d Mon Sep 17 00:00:00 2001 From: Justin Parker Date: Wed, 7 Feb 2024 01:15:19 -0500 Subject: [PATCH 531/811] server : update `/props` with "total_slots" value (#5373) * include total "num_slots" in default_generation_settings_for_props * cleanup total_slots return value in /props endpoint * update /props endpoint docs with total_slots * remove num_slots from default_generation_settings_for_props * update /props endpoint section --- examples/server/README.md | 4 +++- examples/server/server.cpp | 4 ++-- 2 files changed, 5 insertions(+), 3 deletions(-) diff --git a/examples/server/README.md b/examples/server/README.md index 46d8f85ae..1db7cdf21 100644 --- a/examples/server/README.md +++ b/examples/server/README.md @@ -276,13 +276,15 @@ Notice that each `probs` is an array of length `n_probs`. { "assistant_name": "", "user_name": "", - "default_generation_settings": { ... } + "default_generation_settings": { ... }, + "total_slots": 1 } ``` - `assistant_name` - the required assistant name to generate the prompt in case you have specified a system prompt for all slots. - `user_name` - the required anti-prompt to generate the prompt in case you have specified a system prompt for all slots. - `default_generation_settings` - the default generation settings for the `/completion` endpoint, has the same fields as the `generation_settings` response object from the `/completion` endpoint. +- `total_slots` - the total number of slots for process requests (defined by `--parallel` option) - **POST** `/v1/chat/completions`: OpenAI-compatible Chat Completions API. Given a ChatML-formatted json description in `messages`, it returns the predicted completion. Both synchronous and streaming mode are supported, so scripted and interactive applications work fine. While no strong claims of compatibility with OpenAI API spec is being made, in our experience it suffices to support many apps. Only ChatML-tuned models, such as Dolphin, OpenOrca, OpenHermes, OpenChat-3.5, etc can be used with this endpoint. Compared to `api_like_OAI.py` this API implementation does not require a wrapper to be served. diff --git a/examples/server/server.cpp b/examples/server/server.cpp index 9481ce6b1..eceda30d0 100644 --- a/examples/server/server.cpp +++ b/examples/server/server.cpp @@ -432,7 +432,6 @@ struct llama_server_context } default_generation_settings_for_props = get_formated_generation(slots.front()); - default_generation_settings_for_props["num_slots"] = params.n_parallel; default_generation_settings_for_props["seed"] = -1; batch = llama_batch_init(n_ctx, 0, params.n_parallel); @@ -2639,7 +2638,8 @@ int main(int argc, char **argv) json data = { { "user_name", llama.name_user.c_str() }, { "assistant_name", llama.name_assistant.c_str() }, - { "default_generation_settings", llama.default_generation_settings_for_props } + { "default_generation_settings", llama.default_generation_settings_for_props }, + { "total_slots", llama.params.n_parallel } }; res.set_content(data.dump(), "application/json; charset=utf-8"); }); From 316c7faf7740fa98ea68f1445f4505810f706b9e Mon Sep 17 00:00:00 2001 From: runfuture Date: Wed, 7 Feb 2024 14:15:56 +0800 Subject: [PATCH 532/811] llama : add MiniCPM support (#5346) * support minicpm arch. * fix tab/space typo. * convert minicpm model via convert-hf-gguf.py * try to make tokenizer work * fix bug for quantize minicpm * fix for flake8 lint * remove convert-minicpm.py * fix for editorconfig * correct minicpm model type (size) * constants expanded for minicpm * Minor change of the constant names for minicpm --- convert-hf-to-gguf.py | 49 ++++++++++ gguf-py/gguf/constants.py | 21 +++++ llama.cpp | 190 +++++++++++++++++++++++++++++++++++++- 3 files changed, 259 insertions(+), 1 deletion(-) diff --git a/convert-hf-to-gguf.py b/convert-hf-to-gguf.py index 5e343742d..829d68368 100755 --- a/convert-hf-to-gguf.py +++ b/convert-hf-to-gguf.py @@ -22,6 +22,8 @@ if 'NO_LOCAL_GGUF' not in os.environ: sys.path.insert(1, str(Path(__file__).parent / 'gguf-py')) import gguf +from convert import HfVocab + # check for any of the given keys in the dictionary and return the value of the first key found def get_key_opts(d, keys): @@ -205,6 +207,8 @@ class Model: return OrionModel if model_architecture == "InternLM2ForCausalLM": return InternLM2Model + if model_architecture == "MiniCPMForCausalLM": + return MiniCPMModel return Model def _is_model_safetensors(self) -> bool: @@ -258,6 +262,8 @@ class Model: return gguf.MODEL_ARCH.ORION if arch == "InternLM2ForCausalLM": return gguf.MODEL_ARCH.INTERNLM2 + if arch == "MiniCPMForCausalLM": + return gguf.MODEL_ARCH.MINICPM raise NotImplementedError(f'Architecture "{arch}" not supported!') @@ -402,6 +408,31 @@ class Model: special_vocab = gguf.SpecialVocab(self.dir_model, n_vocab=len(tokens)) special_vocab.add_to_gguf(self.gguf_writer) + def _set_vocab_hf(self): + path = self.dir_model + added_tokens_path = self.dir_model + vocab = HfVocab( + path, added_tokens_path if added_tokens_path.exists() else None + ) + tokens = [] + scores = [] + toktypes = [] + + for text, score, toktype in vocab.all_tokens(): + tokens.append(text) + scores.append(score) + toktypes.append(toktype) + + assert len(tokens) == vocab.vocab_size + + self.gguf_writer.add_tokenizer_model("llama") + self.gguf_writer.add_token_list(tokens) + self.gguf_writer.add_token_scores(scores) + self.gguf_writer.add_token_types(toktypes) + + special_vocab = gguf.SpecialVocab(self.dir_model, n_vocab=len(tokens)) + special_vocab.add_to_gguf(self.gguf_writer) + class GPTNeoXModel(Model): def set_gguf_parameters(self): @@ -1041,6 +1072,24 @@ class MixtralModel(Model): self._set_vocab_sentencepiece() +class MiniCPMModel(Model): + def set_gguf_parameters(self): + block_count = self.hparams["num_hidden_layers"] + self.gguf_writer.add_name("MiniCPM") + self.gguf_writer.add_context_length(self.hparams["max_position_embeddings"]) + self.gguf_writer.add_embedding_length(self.hparams["hidden_size"]) + self.gguf_writer.add_feed_forward_length(self.hparams["intermediate_size"]) + self.gguf_writer.add_block_count(block_count) + self.gguf_writer.add_head_count(self.hparams["num_attention_heads"]) + self.gguf_writer.add_head_count_kv(self.hparams["num_key_value_heads"]) + self.gguf_writer.add_layer_norm_rms_eps(self.hparams["rms_norm_eps"]) + self.gguf_writer.add_file_type(self.ftype) + self.gguf_writer.add_rope_dimension_count(self.hparams["hidden_size"] // self.hparams["num_attention_heads"]) + + def set_vocab(self): + self._set_vocab_hf() + + class QwenModel(Model): @staticmethod def token_bytes_to_string(b): diff --git a/gguf-py/gguf/constants.py b/gguf-py/gguf/constants.py index ed8e26f83..1cfd41c0b 100644 --- a/gguf-py/gguf/constants.py +++ b/gguf-py/gguf/constants.py @@ -104,6 +104,7 @@ class MODEL_ARCH(IntEnum): CODESHELL = auto() ORION = auto() INTERNLM2 = auto() + MINICPM = auto() class MODEL_TENSOR(IntEnum): @@ -156,6 +157,7 @@ MODEL_ARCH_NAMES: dict[MODEL_ARCH, str] = { MODEL_ARCH.CODESHELL: "codeshell", MODEL_ARCH.ORION: "orion", MODEL_ARCH.INTERNLM2: "internlm2", + MODEL_ARCH.MINICPM: "minicpm", } TENSOR_NAMES: dict[MODEL_TENSOR, str] = { @@ -464,6 +466,25 @@ MODEL_TENSORS: dict[MODEL_ARCH, list[MODEL_TENSOR]] = { MODEL_TENSOR.FFN_DOWN, MODEL_TENSOR.FFN_UP, ], + MODEL_ARCH.MINICPM: [ + MODEL_TENSOR.TOKEN_EMBD, + MODEL_TENSOR.OUTPUT_NORM, + MODEL_TENSOR.ROPE_FREQS, + MODEL_TENSOR.ATTN_NORM, + MODEL_TENSOR.ATTN_Q, + MODEL_TENSOR.ATTN_K, + MODEL_TENSOR.ATTN_V, + MODEL_TENSOR.ATTN_OUT, + MODEL_TENSOR.ATTN_ROT_EMBD, + MODEL_TENSOR.FFN_GATE_INP, + MODEL_TENSOR.FFN_NORM, + MODEL_TENSOR.FFN_GATE, + MODEL_TENSOR.FFN_DOWN, + MODEL_TENSOR.FFN_UP, + MODEL_TENSOR.FFN_GATE_EXP, + MODEL_TENSOR.FFN_DOWN_EXP, + MODEL_TENSOR.FFN_UP_EXP, + ], # TODO } diff --git a/llama.cpp b/llama.cpp index 65e399adc..f3c5146d1 100644 --- a/llama.cpp +++ b/llama.cpp @@ -205,6 +205,7 @@ enum llm_arch { LLM_ARCH_CODESHELL, LLM_ARCH_ORION, LLM_ARCH_INTERNLM2, + LLM_ARCH_MINICPM, LLM_ARCH_UNKNOWN, }; @@ -228,6 +229,7 @@ static std::map LLM_ARCH_NAMES = { { LLM_ARCH_CODESHELL, "codeshell" }, { LLM_ARCH_ORION, "orion" }, { LLM_ARCH_INTERNLM2, "internlm2" }, + { LLM_ARCH_MINICPM, "minicpm" }, }; enum llm_kv { @@ -690,6 +692,29 @@ static std::map> LLM_TENSOR_NAMES = { LLM_TENSOR_FFN_UP, "blk.%d.ffn_up" }, }, }, + { + LLM_ARCH_MINICPM, + { + { LLM_TENSOR_TOKEN_EMBD, "token_embd" }, + { LLM_TENSOR_OUTPUT_NORM, "output_norm" }, + { LLM_TENSOR_OUTPUT, "output" }, + { LLM_TENSOR_ROPE_FREQS, "rope_freqs" }, + { LLM_TENSOR_ATTN_NORM, "blk.%d.attn_norm" }, + { LLM_TENSOR_ATTN_Q, "blk.%d.attn_q" }, + { LLM_TENSOR_ATTN_K, "blk.%d.attn_k" }, + { LLM_TENSOR_ATTN_V, "blk.%d.attn_v" }, + { LLM_TENSOR_ATTN_OUT, "blk.%d.attn_output" }, + { LLM_TENSOR_ATTN_ROT_EMBD, "blk.%d.attn_rot_embd" }, + { LLM_TENSOR_FFN_GATE_INP, "blk.%d.ffn_gate_inp" }, + { LLM_TENSOR_FFN_NORM, "blk.%d.ffn_norm" }, + { LLM_TENSOR_FFN_GATE, "blk.%d.ffn_gate" }, + { LLM_TENSOR_FFN_DOWN, "blk.%d.ffn_down" }, + { LLM_TENSOR_FFN_UP, "blk.%d.ffn_up" }, + { LLM_TENSOR_FFN_GATE_EXP, "blk.%d.ffn_gate.%d" }, + { LLM_TENSOR_FFN_DOWN_EXP, "blk.%d.ffn_down.%d" }, + { LLM_TENSOR_FFN_UP_EXP, "blk.%d.ffn_up.%d" }, + }, + }, { LLM_ARCH_UNKNOWN, { @@ -1390,6 +1415,7 @@ enum e_model { MODEL_UNKNOWN, MODEL_0_5B, MODEL_1B, + MODEL_2B, MODEL_3B, MODEL_4B, MODEL_7B, @@ -2748,6 +2774,7 @@ static std::string llama_model_ftype_name(llama_ftype ftype) { static const char * llama_model_type_name(e_model type) { switch (type) { case MODEL_1B: return "1B"; + case MODEL_2B: return "2B"; case MODEL_3B: return "3B"; case MODEL_7B: return "7B"; case MODEL_8B: return "8B"; @@ -2887,6 +2914,13 @@ static void llm_load_hparams( default: model.type = e_model::MODEL_UNKNOWN; } } break; + case LLM_ARCH_MINICPM: + { + switch (hparams.n_layer) { + case 40: model.type = e_model::MODEL_2B; break; + default: model.type = e_model::MODEL_UNKNOWN; + } + } break; case LLM_ARCH_FALCON: { ml.get_key(LLM_KV_ATTENTION_LAYERNORM_EPS, hparams.f_norm_eps); @@ -3524,13 +3558,16 @@ static bool llm_load_tensors( switch (model.arch) { case LLM_ARCH_LLAMA: case LLM_ARCH_REFACT: + case LLM_ARCH_MINICPM: { model.tok_embd = ml.create_tensor(ctx_input, tn(LLM_TENSOR_TOKEN_EMBD, "weight"), {n_embd, n_vocab}); // output { model.output_norm = ml.create_tensor(ctx_output, tn(LLM_TENSOR_OUTPUT_NORM, "weight"), {n_embd}); - model.output = ml.create_tensor(ctx_output_split, tn(LLM_TENSOR_OUTPUT, "weight"), {n_embd, n_vocab}); + if (model.arch != LLM_ARCH_MINICPM){ + model.output = ml.create_tensor(ctx_output_split, tn(LLM_TENSOR_OUTPUT, "weight"), {n_embd, n_vocab}); + } } for (int i = 0; i < n_layer; ++i) { @@ -6781,6 +6818,153 @@ struct llm_build_context { return gf; } + // ref: https://arxiv.org/abs/2203.03466 + // https://github.com/ggerganov/llama.cpp/issues/5276#issuecomment-1925774738 + // based on the original build_llama() function + struct ggml_cgraph * build_minicpm() { + struct ggml_cgraph * gf = ggml_new_graph_custom(ctx0, LLAMA_MAX_NODES, false); + + const int64_t n_embd_head = hparams.n_embd_head_v; + GGML_ASSERT(n_embd_head == hparams.n_embd_head_k); + GGML_ASSERT(n_embd_head == hparams.n_rot); + + const int64_t n_embd = hparams.n_embd; + //TODO: if the model varies, these parameters need to be read from the model + const int64_t n_embd_base = 256; + const float scale_embd = 12.0f; + const float scale_depth = 1.4f; + + struct ggml_tensor * cur; + struct ggml_tensor * inpL; + + inpL = llm_build_inp_embd(ctx0, hparams, batch, model.tok_embd, lctx.inp_tokens, lctx.inp_embd, cb); + cb(inpL, "inp_embd", -1); + + // scale the input embeddings + inpL = ggml_scale(ctx0, inpL, scale_embd); + cb(inpL, "inp_scaled", -1); + + // inp_pos - contains the positions + struct ggml_tensor * inp_pos = ggml_view_1d(ctx0, lctx.inp_pos, n_tokens, 0); + cb(inp_pos, "inp_pos", -1); + + // KQ_mask (mask for 1 head, it will be broadcasted to all heads) + struct ggml_tensor * KQ_mask = ggml_view_2d(ctx0, lctx.inp_KQ_mask, n_kv, n_tokens, n_kv*ggml_type_size(lctx.inp_KQ_mask->type), 0); + cb(KQ_mask, "KQ_mask", -1); + + // shift the entire K-cache if needed + if (do_rope_shift) { + llm_build_k_shift(ctx0, hparams, cparams, kv_self, gf, lctx.inp_K_shift, LLM_ROPE, n_ctx, freq_base, freq_scale, cb); + } + + for (int il = 0; il < n_layer; ++il) { + struct ggml_tensor * inpSA = inpL; + + // norm + cur = llm_build_norm(ctx0, inpL, hparams, + model.layers[il].attn_norm, NULL, + LLM_NORM_RMS, cb, il); + cb(cur, "attn_norm", il); + + // self-attention + { + // compute Q and K and RoPE them + struct ggml_tensor * Qcur = ggml_mul_mat(ctx0, model.layers[il].wq, cur); + cb(Qcur, "Qcur", il); + if (model.layers[il].bq) { + Qcur = ggml_add(ctx0, Qcur, model.layers[il].bq); + cb(Qcur, "Qcur", il); + } + + struct ggml_tensor * Kcur = ggml_mul_mat(ctx0, model.layers[il].wk, cur); + cb(Kcur, "Kcur", il); + if (model.layers[il].bk) { + Kcur = ggml_add(ctx0, Kcur, model.layers[il].bk); + cb(Kcur, "Kcur", il); + } + + struct ggml_tensor * Vcur = ggml_mul_mat(ctx0, model.layers[il].wv, cur); + cb(Vcur, "Vcur", il); + if (model.layers[il].bv) { + Vcur = ggml_add(ctx0, Vcur, model.layers[il].bv); + cb(Vcur, "Vcur", il); + } + + Qcur = ggml_rope_custom( + ctx0, ggml_reshape_3d(ctx0, Qcur, n_embd_head, n_head, n_tokens), inp_pos, + hparams.n_rot, 0, 0, n_orig_ctx, freq_base, freq_scale, + ext_factor, attn_factor, beta_fast, beta_slow + ); + cb(Qcur, "Qcur", il); + + Kcur = ggml_rope_custom( + ctx0, ggml_reshape_3d(ctx0, Kcur, n_embd_head, n_head_kv, n_tokens), inp_pos, + hparams.n_rot, 0, 0, n_orig_ctx, freq_base, freq_scale, + ext_factor, attn_factor, beta_fast, beta_slow + ); + cb(Kcur, "Kcur", il); + + cur = llm_build_kv(ctx0, model, hparams, kv_self, gf, + model.layers[il].wo, model.layers[il].bo, + Kcur, Vcur, Qcur, KQ_mask, n_ctx, n_tokens, kv_head, n_kv, -1.0f, 1.0f/sqrtf(float(n_embd_head)), cb, il); + cb(cur, "kqv_out", il); + } + + // scale_res - scale the hidden states for residual connection + const float scale_res = scale_depth/sqrtf(float(n_layer)); + cur = ggml_scale(ctx0, cur, scale_res); + cb(cur, "hidden_scaled", -1); + + struct ggml_tensor * ffn_inp = ggml_add(ctx0, cur, inpSA); + cb(ffn_inp, "ffn_inp", il); + + // feed-forward network + { + cur = llm_build_norm(ctx0, ffn_inp, hparams, + model.layers[il].ffn_norm, NULL, + LLM_NORM_RMS, cb, il); + cb(cur, "ffn_norm", il); + + cur = llm_build_ffn(ctx0, cur, + model.layers[il].ffn_up, NULL, + model.layers[il].ffn_gate, NULL, + model.layers[il].ffn_down, NULL, + NULL, + LLM_FFN_SILU, LLM_FFN_PAR, cb, il); + cb(cur, "ffn_out", il); + } + + // scale the hidden states for residual connection + cur = ggml_scale(ctx0, cur, scale_res); + cb(cur, "hidden_scaled_ffn", -1); + + cur = ggml_add(ctx0, cur, ffn_inp); + cb(cur, "l_out", il); + + // input for next layer + inpL = cur; + } + + cur = inpL; + + cur = llm_build_norm(ctx0, cur, hparams, + model.output_norm, NULL, + LLM_NORM_RMS, cb, -1); + cb(cur, "result_norm", -1); + + // lm_head scaling + const float scale_lmhead = float(n_embd_base)/float(n_embd); + cur = ggml_scale(ctx0, cur, scale_lmhead); + cb(cur, "lmhead_scaling", -1); + + // lm_head + cur = ggml_mul_mat(ctx0, model.tok_embd, cur); + cb(cur, "result_output", -1); + + ggml_build_forward_expand(gf, cur); + + return gf; + } }; static struct ggml_cgraph * llama_build_graph( @@ -6943,6 +7127,10 @@ static struct ggml_cgraph * llama_build_graph( { result = llm.build_internlm2(); } break; + case LLM_ARCH_MINICPM: + { + result = llm.build_minicpm(); + } break; default: GGML_ASSERT(false); } From 9a697d842bc0cfce8268ebd2ba703ffc1c904f98 Mon Sep 17 00:00:00 2001 From: Ben Williams Date: Tue, 6 Feb 2024 22:16:48 -0800 Subject: [PATCH 533/811] readme : update ui list (#5354) --- README.md | 1 + 1 file changed, 1 insertion(+) diff --git a/README.md b/README.md index 34f2021f9..672512d18 100644 --- a/README.md +++ b/README.md @@ -149,6 +149,7 @@ Unless otherwise noted these projects are open-source with permissive licensing: - [iohub/collama](https://github.com/iohub/coLLaMA) - [janhq/jan](https://github.com/janhq/jan) (AGPL) - [nat/openplayground](https://github.com/nat/openplayground) +- [Faraday](https://faraday.dev/) (proprietary) - [LMStudio](https://lmstudio.ai/) (proprietary) - [LostRuins/koboldcpp](https://github.com/LostRuins/koboldcpp) (AGPL) - [Mozilla-Ocho/llamafile](https://github.com/Mozilla-Ocho/llamafile) From ed0bf32290ee5b30ffad5becd99cbecef74aedd7 Mon Sep 17 00:00:00 2001 From: Eve <139727413+netrunnereve@users.noreply.github.com> Date: Wed, 7 Feb 2024 06:21:30 +0000 Subject: [PATCH 534/811] readme : modernize (#5379) * first cleanup, update everything to Llama 2 and remove outdated content * Delete SHA256SUMS * make build instructions generic * recommend Q4_K_M quantization method * Update README.md --- README.md | 127 +++++++++++++++-------------------------------------- SHA256SUMS | 40 ----------------- 2 files changed, 36 insertions(+), 131 deletions(-) delete mode 100644 SHA256SUMS diff --git a/README.md b/README.md index 672512d18..0509b0ba1 100644 --- a/README.md +++ b/README.md @@ -33,17 +33,14 @@ Inference of Meta's [LLaMA](https://arxiv.org/abs/2302.13971) model (and others)
  • Build
  • BLAS Build
  • -
  • Prepare Data & Run
  • +
  • Prepare and Quantize
  • +
  • Run the quantized model
  • Memory/Disk Requirements
  • Quantization
  • Interactive mode
  • Constrained output with grammars
  • -
  • Instruction mode with Alpaca
  • -
  • Using OpenLLaMA
  • -
  • Using GPT4All
  • -
  • Using Pygmalion 7B & Metharme 7B
  • -
  • Obtaining the Facebook LLaMA original model and Stanford Alpaca model data
  • -
  • Verifying the model files
  • +
  • Instruct mode
  • +
  • Obtaining and using the Facebook LLaMA 2 model
  • Seminal papers and background on the models
  • Perplexity (measuring model quality)
  • Android
  • @@ -83,20 +80,16 @@ improved significantly thanks to many contributions. It is the main playground f **Supported models:** +Typically finetunes of the base models below are supported as well. + - [X] LLaMA 🦙 - [x] LLaMA 2 🦙🦙 -- [X] [Mistral AI v0.1](https://huggingface.co/mistralai/Mistral-7B-v0.1) +- [X] [Mistral 7B](https://huggingface.co/mistralai/Mistral-7B-v0.1) - [x] [Mixtral MoE](https://huggingface.co/models?search=mistral-ai/Mixtral) - [X] Falcon -- [X] [Alpaca](https://github.com/ggerganov/llama.cpp#instruction-mode-with-alpaca) -- [X] [GPT4All](https://github.com/ggerganov/llama.cpp#using-gpt4all) - [X] [Chinese LLaMA / Alpaca](https://github.com/ymcui/Chinese-LLaMA-Alpaca) and [Chinese LLaMA-2 / Alpaca-2](https://github.com/ymcui/Chinese-LLaMA-Alpaca-2) - [X] [Vigogne (French)](https://github.com/bofenghuang/vigogne) -- [X] [Vicuna](https://github.com/ggerganov/llama.cpp/discussions/643#discussioncomment-5533894) - [X] [Koala](https://bair.berkeley.edu/blog/2023/04/03/koala/) -- [X] [OpenBuddy 🐶 (Multilingual)](https://github.com/OpenBuddy/OpenBuddy) -- [X] [Pygmalion/Metharme](#using-pygmalion-7b--metharme-7b) -- [X] [WizardLM](https://github.com/nlpxucan/WizardLM) - [X] [Baichuan 1 & 2](https://huggingface.co/models?search=baichuan-inc/Baichuan) + [derivations](https://huggingface.co/hiyouga/baichuan-7b-sft) - [X] [Aquila 1 & 2](https://huggingface.co/models?search=BAAI/Aquila) - [X] [Starcoder models](https://github.com/ggerganov/llama.cpp/pull/3187) @@ -166,7 +159,7 @@ Unless otherwise noted these projects are open-source with permissive licensing: Here is a typical run using LLaMA v2 13B on M2 Ultra: -```java +``` $ make -j && ./main -m models/llama-13b-v2/ggml-model-q4_0.gguf -p "Building a website can be done in 10 simple steps:\nStep 1:" -n 400 -e I llama.cpp build info: I UNAME_S: Darwin @@ -250,7 +243,7 @@ https://user-images.githubusercontent.com/1991296/224442907-7693d4be-acaa-4e01-8 ## Usage -Here are the end-to-end binary build and model conversion steps for the LLaMA-7B model. +Here are the end-to-end binary build and model conversion steps for most supported models. ### Get the Code @@ -635,7 +628,7 @@ Building the program with BLAS support may lead to some performance improvements **Without docker**: - Firstly, you need to make sure you installed [Vulkan SDK](https://vulkan.lunarg.com/doc/view/latest/linux/getting_started_ubuntu.html) + Firstly, you need to make sure you have installed [Vulkan SDK](https://vulkan.lunarg.com/doc/view/latest/linux/getting_started_ubuntu.html) For example, on Ubuntu 22.04 (jammy), use the command below: @@ -648,6 +641,8 @@ Building the program with BLAS support may lead to some performance improvements vulkaninfo ``` + Alternatively your package manager might be able to provide the appropiate libraries. For example for Ubuntu 22.04 you can install `libvulkan-dev` instead. + Then, build llama.cpp using the cmake command below: ```bash @@ -662,34 +657,42 @@ Building the program with BLAS support may lead to some performance improvements # ggml_vulkan: Using Intel(R) Graphics (ADL GT2) | uma: 1 | fp16: 1 | warp size: 32 ``` -### Prepare Data & Run +### Prepare and Quantize + +To obtain the official LLaMA 2 weights please see the Obtaining and using the Facebook LLaMA 2 model section. There is also a large selection of pre-quantized `gguf` models available on Hugging Face. ```bash -# obtain the original LLaMA model weights and place them in ./models +# obtain the official LLaMA model weights and place them in ./models ls ./models -65B 30B 13B 7B tokenizer_checklist.chk tokenizer.model +llama-2-7b tokenizer_checklist.chk tokenizer.model # [Optional] for models using BPE tokenizers ls ./models -65B 30B 13B 7B vocab.json + vocab.json +# [Optional] for PyTorch .bin models like Mistral-7B +ls ./models + # install Python dependencies python3 -m pip install -r requirements.txt -# convert the 7B model to ggml FP16 format -python3 convert.py models/7B/ +# convert the model to ggml FP16 format +python3 convert.py models/mymodel/ # [Optional] for models using BPE tokenizers -python convert.py models/7B/ --vocabtype bpe +python convert.py models/mymodel/ --vocabtype bpe -# quantize the model to 4-bits (using q4_0 method) -./quantize ./models/7B/ggml-model-f16.gguf ./models/7B/ggml-model-q4_0.gguf q4_0 +# quantize the model to 4-bits (using Q4_K_M method) +./quantize ./models/mymodel/ggml-model-f16.gguf ./models/mymodel/ggml-model-Q4_K_M.gguf Q4_K_M -# update the gguf filetype to current if older version is unsupported by another application -./quantize ./models/7B/ggml-model-q4_0.gguf ./models/7B/ggml-model-q4_0-v2.gguf COPY +# update the gguf filetype to current version if older version is now unsupported +./quantize ./models/mymodel/ggml-model-Q4_K_M.gguf ./models/mymodel/ggml-model-Q4_K_M-v2.gguf COPY +``` +### Run the quantized model -# run the inference -./main -m ./models/7B/ggml-model-q4_0.gguf -n 128 +```bash +# start inference on a gguf model +./main -m ./models/mymodel/ggml-model-Q4_K_M.gguf -n 128 ``` When running the larger models, make sure you have enough disk space to store all the intermediate files. @@ -710,7 +713,7 @@ From the unzipped folder, open a terminal/cmd window here and place a pre-conver As the models are currently fully loaded into memory, you will need adequate disk space to save them and sufficient RAM to load them. At the moment, memory and disk requirements are the same. -| Model | Original size | Quantized size (4-bit) | +| Model | Original size | Quantized size (Q4_0) | |------:|--------------:|-----------------------:| | 7B | 13 GB | 3.9 GB | | 13B | 24 GB | 7.8 GB | @@ -826,9 +829,9 @@ The `grammars/` folder contains a handful of sample grammars. To write your own, For authoring more complex JSON grammars, you can also check out https://grammar.intrinsiclabs.ai/, a browser app that lets you write TypeScript interfaces which it compiles to GBNF grammars that you can save for local use. Note that the app is built and maintained by members of the community, please file any issues or FRs on [its repo](http://github.com/intrinsiclabsai/gbnfgen) and not this one. -### Instruction mode with Alpaca +### Instruct mode -1. First, download the `ggml` Alpaca model into the `./models` folder +1. First, download and place the `ggml` model into the `./models` folder 2. Run the `main` tool like this: ``` @@ -854,50 +857,6 @@ cadaver, cauliflower, cabbage (vegetable), catalpa (tree) and Cailleach. > ``` -### Using [OpenLLaMA](https://github.com/openlm-research/open_llama) - -OpenLLaMA is an openly licensed reproduction of Meta's original LLaMA model. It uses the same architecture and is a drop-in replacement for the original LLaMA weights. - -- Download the [3B](https://huggingface.co/openlm-research/open_llama_3b), [7B](https://huggingface.co/openlm-research/open_llama_7b), or [13B](https://huggingface.co/openlm-research/open_llama_13b) model from Hugging Face. -- Convert the model to ggml FP16 format using `python convert.py ` - -### Using [GPT4All](https://github.com/nomic-ai/gpt4all) - -*Note: these instructions are likely obsoleted by the GGUF update* - -- Obtain the `tokenizer.model` file from LLaMA model and put it to `models` -- Obtain the `added_tokens.json` file from Alpaca model and put it to `models` -- Obtain the `gpt4all-lora-quantized.bin` file from GPT4All model and put it to `models/gpt4all-7B` -- It is distributed in the old `ggml` format which is now obsoleted -- You have to convert it to the new format using `convert.py`: - -```bash -python3 convert.py models/gpt4all-7B/gpt4all-lora-quantized.bin -``` - -- You can now use the newly generated `models/gpt4all-7B/ggml-model-q4_0.bin` model in exactly the same way as all other models - -- The newer GPT4All-J model is not yet supported! - -### Using Pygmalion 7B & Metharme 7B - -- Obtain the [LLaMA weights](#obtaining-the-facebook-llama-original-model-and-stanford-alpaca-model-data) -- Obtain the [Pygmalion 7B](https://huggingface.co/PygmalionAI/pygmalion-7b/) or [Metharme 7B](https://huggingface.co/PygmalionAI/metharme-7b) XOR encoded weights -- Convert the LLaMA model with [the latest HF convert script](https://github.com/huggingface/transformers/blob/main/src/transformers/models/llama/convert_llama_weights_to_hf.py) -- Merge the XOR files with the converted LLaMA weights by running the [xor_codec](https://huggingface.co/PygmalionAI/pygmalion-7b/blob/main/xor_codec.py) script -- Convert to `ggml` format using the `convert.py` script in this repo: -```bash -python3 convert.py pygmalion-7b/ --outtype q4_1 -``` -> The Pygmalion 7B & Metharme 7B weights are saved in [bfloat16](https://en.wikipedia.org/wiki/Bfloat16_floating-point_format) precision. If you wish to convert to `ggml` without quantizating, please specify the `--outtype` as `f32` instead of `f16`. - - -### Obtaining the Facebook LLaMA original model and Stanford Alpaca model data - -- **Under no circumstances should IPFS, magnet links, or any other links to model downloads be shared anywhere in this repository, including in issues, discussions, or pull requests. They will be immediately deleted.** -- The LLaMA models are officially distributed by Facebook and will **never** be provided through this repository. -- Refer to [Facebook's LLaMA repository](https://github.com/facebookresearch/llama/pull/73/files) if you need to request access to the model data. - ### Obtaining and using the Facebook LLaMA 2 model - Refer to [Facebook's LLaMA download page](https://ai.meta.com/resources/models-and-libraries/llama-downloads/) if you want to access the model data. @@ -909,20 +868,6 @@ python3 convert.py pygmalion-7b/ --outtype q4_1 - [LLaMA 2 13B chat](https://huggingface.co/TheBloke/Llama-2-13B-chat-GGUF) - [LLaMA 2 70B chat](https://huggingface.co/TheBloke/Llama-2-70B-chat-GGUF) -### Verifying the model files - -Please verify the [sha256 checksums](SHA256SUMS) of all downloaded model files to confirm that you have the correct model data files before creating an issue relating to your model files. -- The following python script will verify if you have all possible latest files in your self-installed `./models` subdirectory: - -```bash -# run the verification script -./scripts/verify-checksum-models.py -``` - -- On linux or macOS it is also possible to run the following commands to verify if you have all possible latest files in your self-installed `./models` subdirectory: - - On Linux: `sha256sum --ignore-missing -c SHA256SUMS` - - on macOS: `shasum -a 256 --ignore-missing -c SHA256SUMS` - ### Seminal papers and background on the models If your issue is with model generation quality, then please at least scan the following links and papers to understand the limitations of LLaMA models. This is especially important when choosing an appropriate model size and appreciating both the significant and subtle differences between LLaMA models and ChatGPT: diff --git a/SHA256SUMS b/SHA256SUMS deleted file mode 100644 index ca4d5a4a5..000000000 --- a/SHA256SUMS +++ /dev/null @@ -1,40 +0,0 @@ -700df0d3013b703a806d2ae7f1bfb8e59814e3d06ae78be0c66368a50059f33d models/7B/consolidated.00.pth -666a4bb533b303bdaf89e1b6a3b6f93535d868de31d903afdc20983dc526c847 models/7B/ggml-model-f16.bin -ec2f2d1f0dfb73b72a4cbac7fa121abbe04c37ab327125a38248f930c0f09ddf models/7B/ggml-model-q4_0.bin -ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff models/7B/ggml-model-q4_1.bin -ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff models/7B/ggml-model-q5_0.bin -ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff models/7B/ggml-model-q5_1.bin -7e89e242ddc0dd6f060b43ca219ce8b3e8f08959a72cb3c0855df8bb04d46265 models/7B/params.json -745bf4e29a4dd6f411e72976d92b452da1b49168a4f41c951cfcc8051823cf08 models/13B/consolidated.00.pth -d5ccbcc465c71c0de439a5aeffebe8344c68a519bce70bc7f9f92654ee567085 models/13B/consolidated.01.pth -2b206e9b21fb1076f11cafc624e2af97c9e48ea09312a0962153acc20d45f808 models/13B/ggml-model-f16.bin -fad169e6f0f575402cf75945961cb4a8ecd824ba4da6be2af831f320c4348fa5 models/13B/ggml-model-q4_0.bin -ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff models/13B/ggml-model-q4_1.bin -ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff models/13B/ggml-model-q5_0.bin -ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff models/13B/ggml-model-q5_1.bin -4ab77bec4d4405ccb66a97b282574c89a94417e3c32e5f68f37e2876fc21322f models/13B/params.json -e23294a58552d8cdec5b7e8abb87993b97ea6eced4178ff2697c02472539d067 models/30B/consolidated.00.pth -4e077b7136c7ae2302e954860cf64930458d3076fcde9443f4d0e939e95903ff models/30B/consolidated.01.pth -24a87f01028cbd3a12de551dcedb712346c0b5cbdeff1454e0ddf2df9b675378 models/30B/consolidated.02.pth -1adfcef71420886119544949767f6a56cb6339b4d5fcde755d80fe68b49de93b models/30B/consolidated.03.pth -7e1b524061a9f4b27c22a12d6d2a5bf13b8ebbea73e99f218809351ed9cf7d37 models/30B/ggml-model-f16.bin -d2a441403944819492ec8c2002cc36fa38468149bfb4b7b4c52afc7bd9a7166d models/30B/ggml-model-q4_0.bin -ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff models/30B/ggml-model-q4_1.bin -ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff models/30B/ggml-model-q5_0.bin -ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff models/30B/ggml-model-q5_1.bin -2c07118ea98d69dbe7810d88520e30288fa994751b337f8fca02b171955f44cb models/30B/params.json -135c563f6b3938114458183afb01adc9a63bef3d8ff7cccc3977e5d3664ecafe models/65B/consolidated.00.pth -9a600b37b19d38c7e43809485f70d17d1dc12206c07efa83bc72bb498a568bde models/65B/consolidated.01.pth -e7babf7c5606f165a3756f527cb0fedc4f83e67ef1290391e52fb1cce5f26770 models/65B/consolidated.02.pth -73176ffb426b40482f2aa67ae1217ef79fbbd1fff5482bae5060cdc5a24ab70e models/65B/consolidated.03.pth -882e6431d0b08a8bc66261a0d3607da21cbaeafa96a24e7e59777632dbdac225 models/65B/consolidated.04.pth -a287c0dfe49081626567c7fe87f74cce5831f58e459b427b5e05567641f47b78 models/65B/consolidated.05.pth -72b4eba67a1a3b18cb67a85b70f8f1640caae9b40033ea943fb166bd80a7b36b models/65B/consolidated.06.pth -d27f5b0677d7ff129ceacd73fd461c4d06910ad7787cf217b249948c3f3bc638 models/65B/consolidated.07.pth -60758f2384d74e423dffddfd020ffed9d3bb186ebc54506f9c4a787d0f5367b0 models/65B/ggml-model-f16.bin -cde053439fa4910ae454407e2717cc46cc2c2b4995c00c93297a2b52e790fa92 models/65B/ggml-model-q4_0.bin -ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff models/65B/ggml-model-q4_1.bin -ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff models/65B/ggml-model-q5_0.bin -ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff models/65B/ggml-model-q5_1.bin -999ed1659b469ccc2a941714c0a9656fa571d17c9f7c8c7589817ca90edef51b models/65B/params.json -9e556afd44213b6bd1be2b850ebbbd98f5481437a8021afaf58ee7fb1818d347 models/tokenizer.model From ee1628bdfea8b0079fed0140ac2f00ef1b465b57 Mon Sep 17 00:00:00 2001 From: 0cc4m Date: Wed, 7 Feb 2024 07:54:50 +0100 Subject: [PATCH 535/811] Basic Vulkan Multi-GPU implementation (#5321) * Initial Vulkan multi-gpu implementation Move most global variables into backend context * Add names to backend device functions * Add further missing cleanup code * Reduce code duplication in tensor split layer assignment * generalize LLAMA_SPLIT_LAYER for all backends, do not expose device count and memory in llama.h * Only do device info print in the beginning and initialize one backend for cpu assist Add missing cleanup code * Rework backend memory management to make sure devices and buffers get properly allocated and freed * Rename cpu assist free function --------- Co-authored-by: slaren --- common/common.cpp | 8 +- ggml-vulkan.cpp | 2639 ++++++++++++++++++++++++++------------------- ggml-vulkan.h | 23 +- ggml.c | 14 +- llama.cpp | 69 +- 5 files changed, 1587 insertions(+), 1166 deletions(-) diff --git a/common/common.cpp b/common/common.cpp index 8c1a60583..e0082a823 100644 --- a/common/common.cpp +++ b/common/common.cpp @@ -46,6 +46,10 @@ #define GGML_USE_CUBLAS_SYCL #endif +#if (defined(GGML_USE_CUBLAS) || defined(GGML_USE_SYCL)) || defined(GGML_USE_VULKAN) +#define GGML_USE_CUBLAS_SYCL_VULKAN +#endif + int32_t get_num_physical_cores() { #ifdef __linux__ // enumerate the set of thread siblings, num entries is num cores @@ -660,8 +664,8 @@ bool gpt_params_parse_ex(int argc, char ** argv, gpt_params & params) { params.tensor_split[i] = 0.0f; } } -#ifndef GGML_USE_CUBLAS_SYCL - fprintf(stderr, "warning: llama.cpp was compiled without cuBLAS/SYCL. Setting a tensor split has no effect.\n"); +#ifndef GGML_USE_CUBLAS_SYCL_VULKAN + fprintf(stderr, "warning: llama.cpp was compiled without cuBLAS/SYCL/Vulkan. Setting a tensor split has no effect.\n"); #endif // GGML_USE_CUBLAS_SYCL } else if (arg == "--no-mmap") { params.use_mmap = false; diff --git a/ggml-vulkan.cpp b/ggml-vulkan.cpp index 14fb89e09..9e2846ee4 100644 --- a/ggml-vulkan.cpp +++ b/ggml-vulkan.cpp @@ -15,6 +15,7 @@ #include #include #include +#include #include "ggml.h" #include "ggml-backend-impl.h" @@ -37,6 +38,8 @@ #define GGML_VK_MAX_NODES 8192 +#define MAX_VK_BUFFERS 256 + #ifndef K_QUANTS_PER_ITERATION #define K_QUANTS_PER_ITERATION 1 #else @@ -53,15 +56,68 @@ static_assert(K_QUANTS_PER_ITERATION == 1 || K_QUANTS_PER_ITERATION == 2, "K_QUA } \ } while (0) -struct vk_buffer { +struct ggml_backend_vk_context; + +struct vk_queue { + uint32_t queue_family_index; + vk::Queue queue; + vk::CommandPool pool; + uint32_t cmd_buffer_idx; + std::vector cmd_buffers; + + vk::PipelineStageFlags stage_flags; +}; + +struct vk_device { + vk::PhysicalDevice physical_device; + vk::PhysicalDeviceProperties properties; + std::string name; + uint64_t max_memory_allocation_size; + bool fp16; + vk::Device device; + uint32_t vendor_id; + vk_queue compute_queue; + vk_queue transfer_queue; + bool single_queue; + uint32_t descriptor_set_mode; + uint32_t subgroup_size; + bool uma; + + ~vk_device() { +#ifdef GGML_VULKAN_DEBUG + std::cerr << "destroy device " << name << std::endl; +#endif + device.destroy(); + } +}; + +struct vk_buffer_struct { vk::Buffer buffer; vk::DeviceMemory device_memory; vk::MemoryPropertyFlags memory_property_flags; void * ptr; size_t size = 0; - uint32_t qf_owner; + + ggml_backend_vk_context * ctx; + + std::shared_ptr device; + + ~vk_buffer_struct() { + if (size == 0) { + return; + } +#ifdef GGML_VULKAN_DEBUG + std::cerr << "~vk_buffer_struct(" << buffer << ", " << size << ")" << std::endl; +#endif + + device->device.freeMemory(device_memory); + device->device.destroyBuffer(buffer); + } }; +typedef std::shared_ptr vk_buffer; +typedef std::weak_ptr vk_buffer_ref; + struct vk_subbuffer { vk_buffer buffer; uint64_t offset; @@ -70,6 +126,7 @@ struct vk_subbuffer { struct vk_pipeline { std::string name; + vk::ShaderModule shader_module; vk::DescriptorSetLayout dsl; std::vector descriptor_pools; std::vector descriptor_sets; @@ -82,16 +139,6 @@ struct vk_pipeline { uint32_t align; }; -struct vk_queue { - uint32_t queue_family_index; - vk::Queue queue; - vk::CommandPool pool; - uint32_t cmd_buffer_idx; - std::vector cmd_buffers; - - vk::PipelineStageFlags stage_flags; -}; - struct vk_semaphore { vk::Semaphore s; uint64_t value; @@ -105,20 +152,6 @@ struct vk_submission { typedef std::vector vk_sequence; -struct vk_device { - vk::PhysicalDevice physical_device; - vk::PhysicalDeviceProperties properties; - uint64_t max_memory_allocation_size; - bool fp16; - vk::Device device; - uint32_t vendor_id; - vk_queue compute_queue; - vk_queue transfer_queue; - uint32_t descriptor_set_mode; - uint32_t subgroup_size; - bool uma; -}; - struct vk_op_push_constants { uint32_t KX; uint32_t KY; @@ -190,13 +223,13 @@ struct ggml_tensor_extra_gpu { size_t ctx_idx; - vk_buffer buffer_gpu; + vk_buffer_ref buffer_gpu; uint64_t offset; void reset() { ready = false; ctx_idx = 0; - buffer_gpu.size = 0; + buffer_gpu.reset(); offset = 0; } }; @@ -210,69 +243,96 @@ struct ggml_vk_garbage_collector { std::vector contexts; }; -typedef void (*ggml_vk_func_t)(vk_context * ctx, const ggml_tensor * src0, const ggml_tensor * src1, ggml_tensor * dst); +struct ggml_backend_vk_context { + std::string name; -vk::Instance vk_instance; -vk_device vk_device; -vk_pipeline vk_pipeline_matmul_f32_l, vk_pipeline_matmul_f32_m, vk_pipeline_matmul_f32_s; -vk_pipeline vk_pipeline_matmul_f32_aligned_l, vk_pipeline_matmul_f32_aligned_m, vk_pipeline_matmul_f32_aligned_s; -vk_pipeline vk_pipeline_matmul_f16_l, vk_pipeline_matmul_f16_m, vk_pipeline_matmul_f16_s; -vk_pipeline vk_pipeline_matmul_f16_aligned_l, vk_pipeline_matmul_f16_aligned_m, vk_pipeline_matmul_f16_aligned_s; -vk_pipeline vk_pipeline_matmul_f16_f32_l, vk_pipeline_matmul_f16_f32_m, vk_pipeline_matmul_f16_f32_s; -vk_pipeline vk_pipeline_matmul_f16_f32_aligned_l, vk_pipeline_matmul_f16_f32_aligned_m, vk_pipeline_matmul_f16_f32_aligned_s; -vk_pipeline vk_pipeline_matmul_split_k_reduce; -vk_pipeline vk_pipeline_dequant[VK_NUM_TYPES]; -vk_pipeline vk_pipeline_dequant_mul_mat_vec_f32[VK_NUM_TYPES]; -vk_pipeline vk_pipeline_mul_mat_vec_p021_f16_f32; -vk_pipeline vk_pipeline_mul_mat_vec_nc_f16_f32; -vk_pipeline vk_pipeline_get_rows[VK_NUM_TYPES]; -vk_pipeline vk_pipeline_get_rows_f32[VK_NUM_TYPES]; -vk_pipeline vk_pipeline_mul_f32; -vk_pipeline vk_pipeline_add_f32; -vk_pipeline vk_pipeline_scale_f32; -vk_pipeline vk_pipeline_sqr_f32; -vk_pipeline vk_pipeline_clamp_f32; -vk_pipeline vk_pipeline_cpy_f32_f32, vk_pipeline_cpy_f32_f16, vk_pipeline_cpy_f16_f16; -vk_pipeline vk_pipeline_norm_f32; -vk_pipeline vk_pipeline_rms_norm_f32; -vk_pipeline vk_pipeline_gelu_f32; -vk_pipeline vk_pipeline_silu_f32; -vk_pipeline vk_pipeline_relu_f32; -vk_pipeline vk_pipeline_diag_mask_inf_f32; -vk_pipeline vk_pipeline_soft_max_f32; -vk_pipeline vk_pipeline_rope_f32, vk_pipeline_rope_f16; -vk_pipeline vk_pipeline_rope_neox_f32, vk_pipeline_rope_neox_f16; + std::weak_ptr device; + vk_pipeline pipeline_matmul_f32_l, pipeline_matmul_f32_m, pipeline_matmul_f32_s; + vk_pipeline pipeline_matmul_f32_aligned_l, pipeline_matmul_f32_aligned_m, pipeline_matmul_f32_aligned_s; + vk_pipeline pipeline_matmul_f16_l, pipeline_matmul_f16_m, pipeline_matmul_f16_s; + vk_pipeline pipeline_matmul_f16_aligned_l, pipeline_matmul_f16_aligned_m, pipeline_matmul_f16_aligned_s; + vk_pipeline pipeline_matmul_f16_f32_l, pipeline_matmul_f16_f32_m, pipeline_matmul_f16_f32_s; + vk_pipeline pipeline_matmul_f16_f32_aligned_l, pipeline_matmul_f16_f32_aligned_m, pipeline_matmul_f16_f32_aligned_s; + vk_pipeline pipeline_matmul_split_k_reduce; + vk_pipeline pipeline_dequant[VK_NUM_TYPES]; + vk_pipeline pipeline_dequant_mul_mat_vec_f32[VK_NUM_TYPES]; + vk_pipeline pipeline_mul_mat_vec_p021_f16_f32; + vk_pipeline pipeline_mul_mat_vec_nc_f16_f32; + vk_pipeline pipeline_get_rows[VK_NUM_TYPES]; + vk_pipeline pipeline_get_rows_f32[VK_NUM_TYPES]; + vk_pipeline pipeline_mul_f32; + vk_pipeline pipeline_add_f32; + vk_pipeline pipeline_scale_f32; + vk_pipeline pipeline_sqr_f32; + vk_pipeline pipeline_clamp_f32; + vk_pipeline pipeline_cpy_f32_f32, pipeline_cpy_f32_f16, pipeline_cpy_f16_f16; + vk_pipeline pipeline_norm_f32; + vk_pipeline pipeline_rms_norm_f32; + vk_pipeline pipeline_gelu_f32; + vk_pipeline pipeline_silu_f32; + vk_pipeline pipeline_relu_f32; + vk_pipeline pipeline_diag_mask_inf_f32; + vk_pipeline pipeline_soft_max_f32; + vk_pipeline pipeline_rope_f32, pipeline_rope_f16; + vk_pipeline pipeline_rope_neox_f32, pipeline_rope_neox_f16; -static size_t vk_semaphore_idx, vk_event_idx; -static ggml_vk_garbage_collector vk_gc; -static std::vector> vk_pinned_memory; -static size_t vk_prealloc_size_qx, vk_prealloc_size_qy, vk_prealloc_size_x, vk_prealloc_size_y, vk_prealloc_size_split_k; -static vk_buffer vk_prealloc_qx, vk_prealloc_qy, vk_prealloc_x, vk_prealloc_y, vk_prealloc_split_k; -static vk::Fence vk_fence; -static vk_buffer vk_staging; -static size_t vk_staging_size; -static size_t vk_staging_offset; -static vk_buffer vk_sync_staging; + size_t semaphore_idx, event_idx; + ggml_vk_garbage_collector gc; + std::vector> pinned_memory; + size_t prealloc_size_qx, prealloc_size_qy, prealloc_size_x, prealloc_size_y, prealloc_size_split_k; + vk_buffer prealloc_qx, prealloc_qy, prealloc_x, prealloc_y, prealloc_split_k; + vk::Fence fence; + vk_buffer staging; + size_t staging_size; + size_t staging_offset; + vk_buffer sync_staging; -static vk_context * vk_ctx; -static vk_context * vk_transfer_ctx; + vk_buffer buffer_pool[MAX_VK_BUFFERS]; -static bool vk_disable; + vk_context * compute_ctx; + vk_context * transfer_ctx; + + bool disable; + bool initialized; + + size_t idx; +}; + +struct vk_instance { + vk::Instance instance; + + std::vector device_indices; + + std::shared_ptr devices[GGML_VK_MAX_DEVICES]; + ggml_backend_t backends[GGML_VK_MAX_DEVICES]; + ggml_backend_vk_context contexts[GGML_VK_MAX_DEVICES]; + ggml_backend_buffer_type buffer_types[GGML_VK_MAX_DEVICES]; + bool initialized[GGML_VK_MAX_DEVICES]; +}; #ifdef GGML_VULKAN_CHECK_RESULTS -size_t vk_skip_checks; -size_t vk_output_tensor; +static size_t vk_skip_checks; +static size_t vk_output_tensor; + +static void ggml_vk_print_tensor(ggml_backend * ctx, const ggml_tensor * tensor, const char * name); +static void ggml_vk_check_results_0(ggml_backend_vk_context * ctx, ggml_compute_params * params, ggml_tensor * tensor); +static void ggml_vk_check_results_1(ggml_backend_vk_context * ctx, ggml_compute_params * params, ggml_tensor * tensor); #endif -static vk_pipeline ggml_vk_create_pipeline(const std::string& name, size_t spv_size, const void* spv_data, const std::string& entrypoint, uint32_t parameter_count, uint32_t push_constant_size, std::array wg_denoms, std::vector&& specialization_constants, uint32_t align) { +typedef void (*ggml_vk_func_t)(ggml_backend_vk_context * ctx, vk_context * subctx, const ggml_tensor * src0, const ggml_tensor * src1, ggml_tensor * dst); + +static bool vk_instance_initialized = false; +static vk_instance vk_instance; + +GGML_CALL static void ggml_backend_vk_free(ggml_backend_t backend); + +static void ggml_vk_create_pipeline(ggml_backend_vk_context * ctx, vk_pipeline& pipeline, const std::string& name, size_t spv_size, const void* spv_data, const std::string& entrypoint, uint32_t parameter_count, uint32_t push_constant_size, std::array wg_denoms, std::vector&& specialization_constants, uint32_t align) { #ifdef GGML_VULKAN_DEBUG std::cerr << "ggml_vk_create_pipeline(" << name << ", " << entrypoint << ", " << parameter_count << ", " << push_constant_size << ", (" << wg_denoms[0] << "," << wg_denoms[1] << "," << wg_denoms[2] << "), specialization_constants, " << align << ")" << std::endl; #endif GGML_ASSERT(parameter_count > 0); GGML_ASSERT(wg_denoms[0] > 0 && wg_denoms[1] > 0 && wg_denoms[2] > 0); // NOLINT - vk_pipeline pipeline; - pipeline.name = name; pipeline.parameter_count = parameter_count; pipeline.push_constant_size = push_constant_size; @@ -280,7 +340,7 @@ static vk_pipeline ggml_vk_create_pipeline(const std::string& name, size_t spv_s pipeline.align = align; vk::ShaderModuleCreateInfo shader_module_create_info({}, spv_size, reinterpret_cast(spv_data)); - vk::ShaderModule shader_module = vk_device.device.createShaderModule(shader_module_create_info); + pipeline.shader_module = ctx->device.lock()->device.createShaderModule(shader_module_create_info); std::vector dsl_binding; std::vector dsl_binding_flags; @@ -301,17 +361,17 @@ static vk_pipeline ggml_vk_create_pipeline(const std::string& name, size_t spv_s {}, dsl_binding); descriptor_set_layout_create_info.setPNext(&dslbfci); - pipeline.dsl = vk_device.device.createDescriptorSetLayout(descriptor_set_layout_create_info); + pipeline.dsl = ctx->device.lock()->device.createDescriptorSetLayout(descriptor_set_layout_create_info); // Check if device supports multiple descriptors per pool - if (vk_device.descriptor_set_mode == VK_DEVICE_DESCRIPTOR_POOL_MODE_UNKNOWN) { + if (ctx->device.lock()->descriptor_set_mode == VK_DEVICE_DESCRIPTOR_POOL_MODE_UNKNOWN) { const uint32_t alloc_count = 2; // Try allocating multiple sets from one pool // This fails on AMD for some reason, so add a fall back to allocating one pool per set vk::DescriptorPoolSize descriptor_pool_size(vk::DescriptorType::eStorageBuffer, pipeline.parameter_count); vk::DescriptorPoolCreateInfo descriptor_pool_create_info({}, alloc_count, descriptor_pool_size); - vk::DescriptorPool pool = vk_device.device.createDescriptorPool(descriptor_pool_create_info); + vk::DescriptorPool pool = ctx->device.lock()->device.createDescriptorPool(descriptor_pool_create_info); std::vector layouts(alloc_count); for (uint32_t i = 0; i < alloc_count; i++) { @@ -319,24 +379,24 @@ static vk_pipeline ggml_vk_create_pipeline(const std::string& name, size_t spv_s } try { vk::DescriptorSetAllocateInfo descriptor_set_alloc_info(pool, alloc_count, layouts.data()); - std::vector sets = vk_device.device.allocateDescriptorSets(descriptor_set_alloc_info); + std::vector sets = ctx->device.lock()->device.allocateDescriptorSets(descriptor_set_alloc_info); } catch(vk::OutOfPoolMemoryError const&) { - vk_device.descriptor_set_mode = VK_DEVICE_DESCRIPTOR_POOL_MODE_SINGLE; + ctx->device.lock()->descriptor_set_mode = VK_DEVICE_DESCRIPTOR_POOL_MODE_SINGLE; } - vk_device.device.destroyDescriptorPool(pool); + ctx->device.lock()->device.destroyDescriptorPool(pool); } - if (vk_device.descriptor_set_mode == VK_DEVICE_DESCRIPTOR_POOL_MODE_MULTI) { + if (ctx->device.lock()->descriptor_set_mode == VK_DEVICE_DESCRIPTOR_POOL_MODE_MULTI) { vk::DescriptorPoolSize descriptor_pool_size(vk::DescriptorType::eStorageBuffer, pipeline.parameter_count); vk::DescriptorPoolCreateInfo descriptor_pool_create_info({}, 128, descriptor_pool_size); - pipeline.descriptor_pools.push_back(vk_device.device.createDescriptorPool(descriptor_pool_create_info)); + pipeline.descriptor_pools.push_back(ctx->device.lock()->device.createDescriptorPool(descriptor_pool_create_info)); } pipeline.descriptor_set_idx = 0; vk::PipelineLayoutCreateInfo pipeline_layout_create_info(vk::PipelineLayoutCreateFlags(), pipeline.dsl, pcr); - pipeline.layout = vk_device.device.createPipelineLayout(pipeline_layout_create_info); + pipeline.layout = ctx->device.lock()->device.createPipelineLayout(pipeline_layout_create_info); std::vector specialization_entries(specialization_constants.size()); @@ -356,41 +416,45 @@ static vk_pipeline ggml_vk_create_pipeline(const std::string& name, size_t spv_s vk::PipelineShaderStageCreateInfo pipeline_shader_create_info( vk::PipelineShaderStageCreateFlags(), vk::ShaderStageFlagBits::eCompute, - shader_module, + pipeline.shader_module, entrypoint.c_str(), &specialization_info); vk::ComputePipelineCreateInfo compute_pipeline_create_info( vk::PipelineCreateFlags(), pipeline_shader_create_info, pipeline.layout); - pipeline.pipeline = vk_device.device.createComputePipeline(VK_NULL_HANDLE, compute_pipeline_create_info).value; + pipeline.pipeline = ctx->device.lock()->device.createComputePipeline(VK_NULL_HANDLE, compute_pipeline_create_info).value; - return pipeline; + ctx->gc.pipelines.push_back(&pipeline); } -static void ggml_vk_pipeline_allocate_descriptor_sets(vk_pipeline& pipeline, uint32_t n) { +static void ggml_vk_destroy_pipeline(ggml_backend_vk_context * ctx, vk_pipeline * pipeline) { + for (auto& pool : pipeline->descriptor_pools) { + ctx->device.lock()->device.destroyDescriptorPool(pool); + } + pipeline->descriptor_pools.clear(); + pipeline->descriptor_sets.clear(); + pipeline->descriptor_set_idx = 0; + + ctx->device.lock()->device.destroyDescriptorSetLayout(pipeline->dsl); + + ctx->device.lock()->device.destroyPipelineLayout(pipeline->layout); + + ctx->device.lock()->device.destroyShaderModule(pipeline->shader_module); + + ctx->device.lock()->device.destroyPipeline(pipeline->pipeline); +} + +static void ggml_pipeline_allocate_descriptor_sets(ggml_backend_vk_context * ctx, vk_pipeline& pipeline, uint32_t n) { #ifdef GGML_VULKAN_DEBUG - std::cerr << "ggml_vk_pipeline_allocate_descriptor_sets(" << pipeline.name << ", " << n << ")" << std::endl; + std::cerr << "ggml_pipeline_allocate_descriptor_sets(" << pipeline.name << ", " << n << ")" << std::endl; #endif - // Check if gc already contains pipeline before adding it - bool gc_found = false; - for (auto * pl : vk_gc.pipelines) { - if (&pipeline == pl) { - gc_found = true; - break; - } - } - - if (!gc_found) { - vk_gc.pipelines.push_back(&pipeline); - } - if (pipeline.descriptor_sets.size() >= pipeline.descriptor_set_idx + n) { // Enough descriptors are available return; } - if (vk_device.descriptor_set_mode == VK_DEVICE_DESCRIPTOR_POOL_MODE_MULTI) { + if (ctx->device.lock()->descriptor_set_mode == VK_DEVICE_DESCRIPTOR_POOL_MODE_MULTI) { const uint32_t alloc_count = pipeline.descriptor_set_idx + n - pipeline.descriptor_sets.size(); std::vector layouts(alloc_count); @@ -398,29 +462,29 @@ static void ggml_vk_pipeline_allocate_descriptor_sets(vk_pipeline& pipeline, uin layouts[i] = pipeline.dsl; } vk::DescriptorSetAllocateInfo descriptor_set_alloc_info(pipeline.descriptor_pools[0], alloc_count, layouts.data()); - std::vector sets = vk_device.device.allocateDescriptorSets(descriptor_set_alloc_info); + std::vector sets = ctx->device.lock()->device.allocateDescriptorSets(descriptor_set_alloc_info); pipeline.descriptor_sets.insert(pipeline.descriptor_sets.end(), sets.begin(), sets.end()); } else { for (uint32_t i = pipeline.descriptor_sets.size(); i < pipeline.descriptor_set_idx + n; i++) { vk::DescriptorPoolSize descriptor_pool_size(vk::DescriptorType::eStorageBuffer, pipeline.parameter_count); vk::DescriptorPoolCreateInfo descriptor_pool_create_info({}, 1, descriptor_pool_size); - pipeline.descriptor_pools.push_back(vk_device.device.createDescriptorPool(descriptor_pool_create_info)); + pipeline.descriptor_pools.push_back(ctx->device.lock()->device.createDescriptorPool(descriptor_pool_create_info)); vk::DescriptorSetAllocateInfo descriptor_set_alloc_info(pipeline.descriptor_pools[i], 1, &pipeline.dsl); - std::vector sets = vk_device.device.allocateDescriptorSets(descriptor_set_alloc_info); + std::vector sets = ctx->device.lock()->device.allocateDescriptorSets(descriptor_set_alloc_info); pipeline.descriptor_sets.push_back(sets[0]); } } } -static void ggml_vk_pipeline_cleanup(vk_pipeline& pipeline) { +static void ggml_pipeline_cleanup(vk_pipeline& pipeline) { #ifdef GGML_VULKAN_DEBUG - std::cerr << "ggml_vk_pipeline_cleanup(" << pipeline.name << ")" << std::endl; + std::cerr << "ggml_pipeline_cleanup(" << pipeline.name << ")" << std::endl; #endif pipeline.descriptor_set_idx = 0; } -static vk::CommandBuffer ggml_vk_create_cmd_buffer(vk_queue& q) { +static vk::CommandBuffer ggml_vk_create_cmd_buffer(ggml_backend_vk_context * ctx, vk_queue& q) { #ifdef GGML_VULKAN_DEBUG std::cerr << "ggml_vk_create_cmd_buffer()" << std::endl; #endif @@ -433,7 +497,7 @@ static vk::CommandBuffer ggml_vk_create_cmd_buffer(vk_queue& q) { q.pool, vk::CommandBufferLevel::ePrimary, 1); - const std::vector cmd_buffers = vk_device.device.allocateCommandBuffers(command_buffer_alloc_info); + const std::vector cmd_buffers = ctx->device.lock()->device.allocateCommandBuffers(command_buffer_alloc_info); auto buf = cmd_buffers.front(); q.cmd_buffers.push_back(buf); @@ -442,24 +506,17 @@ static vk::CommandBuffer ggml_vk_create_cmd_buffer(vk_queue& q) { return buf; } -static vk_submission ggml_vk_create_submission(vk_queue& q, std::vector wait_semaphores, std::vector signal_semaphores) { +static vk_submission ggml_vk_create_submission(ggml_backend_vk_context * ctx, vk_queue& q, std::vector wait_semaphores, std::vector signal_semaphores) { #ifdef GGML_VULKAN_DEBUG std::cerr << "ggml_vk_create_submission()" << std::endl; #endif vk_submission s; - s.buffer = ggml_vk_create_cmd_buffer(q); + s.buffer = ggml_vk_create_cmd_buffer(ctx, q); s.wait_semaphores = std::move(wait_semaphores); s.signal_semaphores = std::move(signal_semaphores); return s; } -static vk_sequence ggml_vk_create_sequence_1(vk_queue& q, std::vector wait_semaphores, std::vector signal_semaphores) { -#ifdef GGML_VULKAN_DEBUG - std::cerr << "ggml_vk_create_sequence_1()" << std::endl; -#endif - return { ggml_vk_create_submission(q, std::move(wait_semaphores), std::move(signal_semaphores)) }; -} - static void ggml_vk_submit(vk_context * ctx, vk::Fence fence) { #ifdef GGML_VULKAN_DEBUG std::cerr << "ggml_vk_submit(" << ctx->seqs.size() << ", " << fence << ")" << std::endl; @@ -578,89 +635,89 @@ static uint32_t ggml_vk_find_queue_family_index(std::vectordevice.lock()->device.createCommandPool(command_pool_create_info_compute); q.cmd_buffer_idx = 0; - q.queue = vk_device.device.getQueue(queue_family_index, queue_index); + q.queue = ctx->device.lock()->device.getQueue(queue_family_index, queue_index); q.stage_flags = stage_flags; - - return q; } -static vk_context * ggml_vk_create_context(vk_queue& q) { +static vk_context * ggml_vk_create_context(ggml_backend_vk_context * ctx, vk_queue& q) { #ifdef GGML_VULKAN_DEBUG std::cerr << "ggml_vk_create_context()" << std::endl; #endif - vk_gc.contexts.emplace_back(); - vk_context * result = &vk_gc.contexts[vk_gc.contexts.size() - 1]; + ctx->gc.contexts.emplace_back(); + vk_context * result = &ctx->gc.contexts[ctx->gc.contexts.size() - 1]; memset((void *) result, 0, sizeof(vk_context)); - result->idx = vk_gc.contexts.size() - 1; + result->idx = ctx->gc.contexts.size() - 1; result->q = &q; return result; } -static vk_semaphore * ggml_vk_create_binary_semaphore() { +static vk_semaphore * ggml_vk_create_binary_semaphore(ggml_backend_vk_context * ctx) { #ifdef GGML_VULKAN_DEBUG std::cerr << "ggml_vk_create_timeline_semaphore()" << std::endl; #endif vk::SemaphoreTypeCreateInfo tci{ vk::SemaphoreType::eBinary, 0 }; vk::SemaphoreCreateInfo ci{}; ci.setPNext(&tci); - vk::Semaphore semaphore = vk_device.device.createSemaphore(ci); - vk_gc.semaphores.push_back({ semaphore, 0 }); - return &vk_gc.semaphores[vk_gc.semaphores.size() - 1]; + vk::Semaphore semaphore = ctx->device.lock()->device.createSemaphore(ci); + ctx->gc.semaphores.push_back({ semaphore, 0 }); + return &ctx->gc.semaphores[ctx->gc.semaphores.size() - 1]; } -static vk_semaphore * ggml_vk_create_timeline_semaphore() { +static vk_semaphore * ggml_vk_create_timeline_semaphore(ggml_backend_vk_context * ctx) { #ifdef GGML_VULKAN_DEBUG std::cerr << "ggml_vk_create_timeline_semaphore()" << std::endl; #endif - if (vk_semaphore_idx >= vk_gc.tl_semaphores.size()) { + if (ctx->semaphore_idx >= ctx->gc.tl_semaphores.size()) { vk::SemaphoreTypeCreateInfo tci{ vk::SemaphoreType::eTimeline, 0 }; vk::SemaphoreCreateInfo ci{}; ci.setPNext(&tci); - vk::Semaphore semaphore = vk_device.device.createSemaphore(ci); - vk_gc.tl_semaphores.push_back({ semaphore, 0 }); + vk::Semaphore semaphore = ctx->device.lock()->device.createSemaphore(ci); + ctx->gc.tl_semaphores.push_back({ semaphore, 0 }); } - return &vk_gc.tl_semaphores[vk_semaphore_idx++]; + return &ctx->gc.tl_semaphores[ctx->semaphore_idx++]; } -static vk::Event ggml_vk_create_event() { - if (vk_event_idx >= vk_gc.events.size()) { - vk_gc.events.push_back(vk_device.device.createEvent({})); +static vk::Event ggml_vk_create_event(ggml_backend_vk_context * ctx) { + if (ctx->event_idx >= ctx->gc.events.size()) { + ctx->gc.events.push_back(ctx->device.lock()->device.createEvent({})); } - return vk_gc.events[vk_event_idx++]; + return ctx->gc.events[ctx->event_idx++]; } -static void ggml_vk_queue_cleanup(vk_queue& q) { +static void ggml_vk_queue_cleanup(ggml_backend_vk_context * ctx, vk_queue& q) { #ifdef GGML_VULKAN_DEBUG std::cerr << "ggml_vk_queue_cleanup()" << std::endl; #endif // Requires command buffers to be done - vk_device.device.resetCommandPool(q.pool); + ctx->device.lock()->device.resetCommandPool(q.pool); q.cmd_buffer_idx = 0; } -static vk_buffer ggml_vk_create_buffer(size_t size, vk::MemoryPropertyFlags req_flags) { +static vk_buffer ggml_vk_create_buffer(ggml_backend_vk_context * ctx, size_t size, vk::MemoryPropertyFlags req_flags) { #ifdef GGML_VULKAN_DEBUG std::cerr << "ggml_vk_create_buffer(" << size << ", " << to_string(req_flags) << ")" << std::endl; #endif - GGML_ASSERT(size > 0); + vk_buffer buf = std::make_shared(); - vk_buffer buf; + if (size == 0) { + buf->size = 0; + return buf; + } - buf.size = size; + buf->size = size; vk::BufferCreateInfo buffer_create_info{ vk::BufferCreateFlags(), size, @@ -670,11 +727,11 @@ static vk_buffer ggml_vk_create_buffer(size_t size, vk::MemoryPropertyFlags req_ nullptr, }; - buf.buffer = vk_device.device.createBuffer(buffer_create_info); + buf->buffer = ctx->device.lock()->device.createBuffer(buffer_create_info); - vk::MemoryRequirements mem_req = vk_device.device.getBufferMemoryRequirements(buf.buffer); + vk::MemoryRequirements mem_req = ctx->device.lock()->device.getBufferMemoryRequirements(buf->buffer); - vk::PhysicalDeviceMemoryProperties mem_props = vk_device.physical_device.getMemoryProperties(); + vk::PhysicalDeviceMemoryProperties mem_props = ctx->device.lock()->physical_device.getMemoryProperties(); uint32_t memory_type_index = UINT32_MAX; @@ -691,30 +748,36 @@ static vk_buffer ggml_vk_create_buffer(size_t size, vk::MemoryPropertyFlags req_ } try { - buf.device_memory = vk_device.device.allocateMemory({ mem_req.size, memory_type_index }); + buf->device_memory = ctx->device.lock()->device.allocateMemory({ mem_req.size, memory_type_index }); } catch (const vk::SystemError& e) { // Out of Host/Device memory, clean up buffer - vk_device.device.destroyBuffer(buf.buffer); - buf.size = 0; + ctx->device.lock()->device.destroyBuffer(buf->buffer); + buf->size = 0; throw e; } - buf.memory_property_flags = req_flags; - buf.ptr = nullptr; + buf->memory_property_flags = req_flags; + buf->ptr = nullptr; if (req_flags & vk::MemoryPropertyFlagBits::eHostVisible) { - buf.ptr = vk_device.device.mapMemory(buf.device_memory, 0, VK_WHOLE_SIZE); + buf->ptr = ctx->device.lock()->device.mapMemory(buf->device_memory, 0, VK_WHOLE_SIZE); } - vk_device.device.bindBufferMemory(buf.buffer, buf.device_memory, 0); + ctx->device.lock()->device.bindBufferMemory(buf->buffer, buf->device_memory, 0); - buf.qf_owner = VK_QUEUE_FAMILY_IGNORED; + buf->ctx = ctx; + + buf->device = ctx->device.lock(); + +#ifdef GGML_VULKAN_DEBUG + std::cerr << "Created buffer " << buf->buffer << std::endl; +#endif return buf; } -static vk_buffer ggml_vk_create_buffer_check(size_t size, vk::MemoryPropertyFlags req_flags) { +static vk_buffer ggml_vk_create_buffer_check(ggml_backend_vk_context * ctx, size_t size, vk::MemoryPropertyFlags req_flags) { try { - return ggml_vk_create_buffer(size, req_flags); + return ggml_vk_create_buffer(ctx, size, req_flags); } catch (const vk::SystemError& e) { std::cerr << "ggml_vulkan: Memory allocation of size " << size << " failed." << std::endl; std::cerr << "ggml_vulkan: " << e.what() << std::endl; @@ -722,14 +785,14 @@ static vk_buffer ggml_vk_create_buffer_check(size_t size, vk::MemoryPropertyFlag } } -static vk_buffer ggml_vk_create_buffer_device(size_t size) { +static vk_buffer ggml_vk_create_buffer_device(ggml_backend_vk_context * ctx, size_t size) { vk_buffer buf; try { - buf = ggml_vk_create_buffer(size, vk::MemoryPropertyFlagBits::eDeviceLocal); + buf = ggml_vk_create_buffer(ctx, size, vk::MemoryPropertyFlagBits::eDeviceLocal); } catch (const vk::SystemError& e) { - if (vk_device.uma) { + if (ctx->device.lock()->uma) { // Fall back to host memory type - buf = ggml_vk_create_buffer_check(size, vk::MemoryPropertyFlagBits::eHostVisible | vk::MemoryPropertyFlagBits::eHostCoherent); + buf = ggml_vk_create_buffer_check(ctx, size, vk::MemoryPropertyFlagBits::eHostVisible | vk::MemoryPropertyFlagBits::eHostCoherent); } else { std::cerr << "ggml_vulkan: Device memory allocation of size " << size << " failed." << std::endl; std::cerr << "ggml_vulkan: " << e.what() << std::endl; @@ -741,16 +804,7 @@ static vk_buffer ggml_vk_create_buffer_device(size_t size) { } static void ggml_vk_destroy_buffer(vk_buffer& buf) { - if (buf.size == 0) { - return; - } -#ifdef GGML_VULKAN_DEBUG - std::cerr << "ggml_vk_destroy_buffer(" << buf.size << ")" << std::endl; -#endif - - buf.size = 0; - vk_device.device.freeMemory(buf.device_memory); - vk_device.device.destroyBuffer(buf.buffer); + buf.reset(); } static vk_subbuffer ggml_vk_subbuffer(vk_buffer& buf) { @@ -773,7 +827,7 @@ static void ggml_vk_sync_buffers(vk_context * ctx) { ); } -static void ggml_vk_wait_events(vk::CommandBuffer& cmd_buffer, std::vector&& events, vk::PipelineStageFlags src_stages, vk::PipelineStageFlags dst_stages) { +static void ggml_vk_wait_events(vk_context * ctx, std::vector&& events) { #ifdef GGML_VULKAN_DEBUG std::cerr << "ggml_vk_wait_events()" << std::endl; #endif @@ -781,10 +835,10 @@ static void ggml_vk_wait_events(vk::CommandBuffer& cmd_buffer, std::vectors->buffer.waitEvents( events, - src_stages, - dst_stages, + ctx->q->stage_flags, + ctx->q->stage_flags, {}, {}, {} @@ -810,15 +864,15 @@ static bool ggml_vk_build_shader(ggml_type type) { } } -static void ggml_vk_load_shaders() { +static void ggml_vk_load_shaders(ggml_backend_vk_context * ctx) { #ifdef GGML_VULKAN_DEBUG - std::cerr << "ggml_vk_load_shaders()" << std::endl; + std::cerr << "ggml_vk_load_shaders(" << ctx->name << ")" << std::endl; #endif // mulmat - std::initializer_list warptile_l = { 128, 128, 128, 16, vk_device.subgroup_size * 2, 64, 2, 4, 4, vk_device.subgroup_size }; - std::initializer_list warptile_m = { 128, 64, 64, 16, vk_device.subgroup_size, 32, 2, 4, 2, vk_device.subgroup_size }; - std::initializer_list warptile_s = { vk_device.subgroup_size, 32, 32, 16, 32, 32, 2, 2, 2, vk_device.subgroup_size }; + std::initializer_list warptile_l = { 128, 128, 128, 16, ctx->device.lock()->subgroup_size * 2, 64, 2, 4, 4, ctx->device.lock()->subgroup_size }; + std::initializer_list warptile_m = { 128, 64, 64, 16, ctx->device.lock()->subgroup_size, 32, 2, 4, 2, ctx->device.lock()->subgroup_size }; + std::initializer_list warptile_s = { ctx->device.lock()->subgroup_size, 32, 32, 16, 32, 32, 2, 2, 2, ctx->device.lock()->subgroup_size }; std::array l_wg_denoms = {128, 128, 1 }; std::array m_wg_denoms = { 64, 64, 1 }; @@ -828,145 +882,208 @@ static void ggml_vk_load_shaders() { uint32_t m_align = 64; uint32_t s_align = 32; - if (vk_device.fp16) { - vk_pipeline_matmul_f32_l = ggml_vk_create_pipeline("matmul_f32_l", matmul_f32_l_len, matmul_f32_l_data, "main", 3, 14 * sizeof(uint32_t), l_wg_denoms, warptile_l, 1); - vk_pipeline_matmul_f32_m = ggml_vk_create_pipeline("matmul_f32_m", matmul_f32_m_len, matmul_f32_m_data, "main", 3, 14 * sizeof(uint32_t), m_wg_denoms, warptile_m, 1); - vk_pipeline_matmul_f32_s = ggml_vk_create_pipeline("matmul_f32_s", matmul_f32_s_len, matmul_f32_s_data, "main", 3, 14 * sizeof(uint32_t), s_wg_denoms, warptile_s, 1); - vk_pipeline_matmul_f32_aligned_l = ggml_vk_create_pipeline("matmul_f32_aligned_l", matmul_f32_aligned_l_len, matmul_f32_aligned_l_data, "main", 3, 14 * sizeof(uint32_t), l_wg_denoms, warptile_l, l_align); - vk_pipeline_matmul_f32_aligned_m = ggml_vk_create_pipeline("matmul_f32_aligned_m", matmul_f32_aligned_m_len, matmul_f32_aligned_m_data, "main", 3, 14 * sizeof(uint32_t), m_wg_denoms, warptile_m, m_align); - vk_pipeline_matmul_f32_aligned_s = ggml_vk_create_pipeline("matmul_f32_aligned_s", matmul_f32_aligned_s_len, matmul_f32_aligned_s_data, "main", 3, 14 * sizeof(uint32_t), s_wg_denoms, warptile_s, s_align); + if (ctx->device.lock()->fp16) { + ggml_vk_create_pipeline(ctx, ctx->pipeline_matmul_f32_l, "matmul_f32_l", matmul_f32_l_len, matmul_f32_l_data, "main", 3, 14 * sizeof(uint32_t), l_wg_denoms, warptile_l, 1); + ggml_vk_create_pipeline(ctx, ctx->pipeline_matmul_f32_m, "matmul_f32_m", matmul_f32_m_len, matmul_f32_m_data, "main", 3, 14 * sizeof(uint32_t), m_wg_denoms, warptile_m, 1); + ggml_vk_create_pipeline(ctx, ctx->pipeline_matmul_f32_s, "matmul_f32_s", matmul_f32_s_len, matmul_f32_s_data, "main", 3, 14 * sizeof(uint32_t), s_wg_denoms, warptile_s, 1); + ggml_vk_create_pipeline(ctx, ctx->pipeline_matmul_f32_aligned_l, "matmul_f32_aligned_l", matmul_f32_aligned_l_len, matmul_f32_aligned_l_data, "main", 3, 14 * sizeof(uint32_t), l_wg_denoms, warptile_l, l_align); + ggml_vk_create_pipeline(ctx, ctx->pipeline_matmul_f32_aligned_m, "matmul_f32_aligned_m", matmul_f32_aligned_m_len, matmul_f32_aligned_m_data, "main", 3, 14 * sizeof(uint32_t), m_wg_denoms, warptile_m, m_align); + ggml_vk_create_pipeline(ctx, ctx->pipeline_matmul_f32_aligned_s, "matmul_f32_aligned_s", matmul_f32_aligned_s_len, matmul_f32_aligned_s_data, "main", 3, 14 * sizeof(uint32_t), s_wg_denoms, warptile_s, s_align); - vk_pipeline_matmul_f16_l = ggml_vk_create_pipeline("matmul_f16_l", matmul_f16_l_len, matmul_f16_l_data, "main", 3, 14 * sizeof(uint32_t), l_wg_denoms, warptile_l, 1); - vk_pipeline_matmul_f16_m = ggml_vk_create_pipeline("matmul_f16_m", matmul_f16_m_len, matmul_f16_m_data, "main", 3, 14 * sizeof(uint32_t), m_wg_denoms, warptile_m, 1); - vk_pipeline_matmul_f16_s = ggml_vk_create_pipeline("matmul_f16_s", matmul_f16_s_len, matmul_f16_s_data, "main", 3, 14 * sizeof(uint32_t), s_wg_denoms, warptile_s, 1); + ggml_vk_create_pipeline(ctx, ctx->pipeline_matmul_f16_l, "matmul_f16_l", matmul_f16_l_len, matmul_f16_l_data, "main", 3, 14 * sizeof(uint32_t), l_wg_denoms, warptile_l, 1); + ggml_vk_create_pipeline(ctx, ctx->pipeline_matmul_f16_m, "matmul_f16_m", matmul_f16_m_len, matmul_f16_m_data, "main", 3, 14 * sizeof(uint32_t), m_wg_denoms, warptile_m, 1); + ggml_vk_create_pipeline(ctx, ctx->pipeline_matmul_f16_s, "matmul_f16_s", matmul_f16_s_len, matmul_f16_s_data, "main", 3, 14 * sizeof(uint32_t), s_wg_denoms, warptile_s, 1); + ggml_vk_create_pipeline(ctx, ctx->pipeline_matmul_f16_aligned_l, "matmul_f16_aligned_l", matmul_f16_aligned_l_len, matmul_f16_aligned_l_data, "main", 3, 14 * sizeof(uint32_t), l_wg_denoms, warptile_l, l_align); + ggml_vk_create_pipeline(ctx, ctx->pipeline_matmul_f16_aligned_m, "matmul_f16_aligned_m", matmul_f16_aligned_m_len, matmul_f16_aligned_m_data, "main", 3, 14 * sizeof(uint32_t), m_wg_denoms, warptile_m, m_align); + ggml_vk_create_pipeline(ctx, ctx->pipeline_matmul_f16_aligned_s, "matmul_f16_aligned_s", matmul_f16_aligned_s_len, matmul_f16_aligned_s_data, "main", 3, 14 * sizeof(uint32_t), s_wg_denoms, warptile_s, s_align); - vk_pipeline_matmul_f16_aligned_l = ggml_vk_create_pipeline("matmul_f16_aligned_l", matmul_f16_aligned_l_len, matmul_f16_aligned_l_data, "main", 3, 14 * sizeof(uint32_t), l_wg_denoms, warptile_l, l_align); - vk_pipeline_matmul_f16_aligned_m = ggml_vk_create_pipeline("matmul_f16_aligned_m", matmul_f16_aligned_m_len, matmul_f16_aligned_m_data, "main", 3, 14 * sizeof(uint32_t), m_wg_denoms, warptile_m, m_align); - vk_pipeline_matmul_f16_aligned_s = ggml_vk_create_pipeline("matmul_f16_aligned_s", matmul_f16_aligned_s_len, matmul_f16_aligned_s_data, "main", 3, 14 * sizeof(uint32_t), s_wg_denoms, warptile_s, s_align); - - vk_pipeline_matmul_f16_f32_l = ggml_vk_create_pipeline("matmul_f16_f32_l", matmul_f16_f32_l_len, matmul_f16_f32_l_data, "main", 3, 14 * sizeof(uint32_t), l_wg_denoms, warptile_l, 1); - vk_pipeline_matmul_f16_f32_m = ggml_vk_create_pipeline("matmul_f16_f32_m", matmul_f16_f32_m_len, matmul_f16_f32_m_data, "main", 3, 14 * sizeof(uint32_t), m_wg_denoms, warptile_m, 1); - vk_pipeline_matmul_f16_f32_s = ggml_vk_create_pipeline("matmul_f16_f32_s", matmul_f16_f32_s_len, matmul_f16_f32_s_data, "main", 3, 14 * sizeof(uint32_t), s_wg_denoms, warptile_s, 1); - vk_pipeline_matmul_f16_f32_aligned_l = ggml_vk_create_pipeline("matmul_f16_f32_aligned_l", matmul_f16_f32_aligned_l_len, matmul_f16_f32_aligned_l_data, "main", 3, 14 * sizeof(uint32_t), l_wg_denoms, warptile_l, l_align); - vk_pipeline_matmul_f16_f32_aligned_m = ggml_vk_create_pipeline("matmul_f16_f32_aligned_m", matmul_f16_f32_aligned_m_len, matmul_f16_f32_aligned_m_data, "main", 3, 14 * sizeof(uint32_t), m_wg_denoms, warptile_m, m_align); - vk_pipeline_matmul_f16_f32_aligned_s = ggml_vk_create_pipeline("matmul_f16_f32_aligned_s", matmul_f16_f32_aligned_s_len, matmul_f16_f32_aligned_s_data, "main", 3, 14 * sizeof(uint32_t), s_wg_denoms, warptile_s, s_align); + ggml_vk_create_pipeline(ctx, ctx->pipeline_matmul_f16_f32_l, "matmul_f16_f32_l", matmul_f16_f32_l_len, matmul_f16_f32_l_data, "main", 3, 14 * sizeof(uint32_t), l_wg_denoms, warptile_l, 1); + ggml_vk_create_pipeline(ctx, ctx->pipeline_matmul_f16_f32_m, "matmul_f16_f32_m", matmul_f16_f32_m_len, matmul_f16_f32_m_data, "main", 3, 14 * sizeof(uint32_t), m_wg_denoms, warptile_m, 1); + ggml_vk_create_pipeline(ctx, ctx->pipeline_matmul_f16_f32_s, "matmul_f16_f32_s", matmul_f16_f32_s_len, matmul_f16_f32_s_data, "main", 3, 14 * sizeof(uint32_t), s_wg_denoms, warptile_s, 1); + ggml_vk_create_pipeline(ctx, ctx->pipeline_matmul_f16_f32_aligned_l, "matmul_f16_f32_aligned_l", matmul_f16_f32_aligned_l_len, matmul_f16_f32_aligned_l_data, "main", 3, 14 * sizeof(uint32_t), l_wg_denoms, warptile_l, l_align); + ggml_vk_create_pipeline(ctx, ctx->pipeline_matmul_f16_f32_aligned_m, "matmul_f16_f32_aligned_m", matmul_f16_f32_aligned_m_len, matmul_f16_f32_aligned_m_data, "main", 3, 14 * sizeof(uint32_t), m_wg_denoms, warptile_m, m_align); + ggml_vk_create_pipeline(ctx, ctx->pipeline_matmul_f16_f32_aligned_s, "matmul_f16_f32_aligned_s", matmul_f16_f32_aligned_s_len, matmul_f16_f32_aligned_s_data, "main", 3, 14 * sizeof(uint32_t), s_wg_denoms, warptile_s, s_align); } else { - vk_pipeline_matmul_f32_l = ggml_vk_create_pipeline("matmul_f32_l", matmul_f32_l_fp32_len, matmul_f32_l_fp32_data, "main", 3, 14 * sizeof(uint32_t), l_wg_denoms, warptile_l, 1); - vk_pipeline_matmul_f32_m = ggml_vk_create_pipeline("matmul_f32_m", matmul_f32_m_fp32_len, matmul_f32_m_fp32_data, "main", 3, 14 * sizeof(uint32_t), m_wg_denoms, warptile_m, 1); - vk_pipeline_matmul_f32_s = ggml_vk_create_pipeline("matmul_f32_s", matmul_f32_s_fp32_len, matmul_f32_s_fp32_data, "main", 3, 14 * sizeof(uint32_t), s_wg_denoms, warptile_s, 1); - vk_pipeline_matmul_f32_aligned_l = ggml_vk_create_pipeline("matmul_f32_aligned_l", matmul_f32_aligned_l_fp32_len, matmul_f32_aligned_l_fp32_data, "main", 3, 14 * sizeof(uint32_t), l_wg_denoms, warptile_l, l_align); - vk_pipeline_matmul_f32_aligned_m = ggml_vk_create_pipeline("matmul_f32_aligned_m", matmul_f32_aligned_m_fp32_len, matmul_f32_aligned_m_fp32_data, "main", 3, 14 * sizeof(uint32_t), m_wg_denoms, warptile_m, m_align); - vk_pipeline_matmul_f32_aligned_s = ggml_vk_create_pipeline("matmul_f32_aligned_s", matmul_f32_aligned_s_fp32_len, matmul_f32_aligned_s_fp32_data, "main", 3, 14 * sizeof(uint32_t), s_wg_denoms, warptile_s, s_align); + ggml_vk_create_pipeline(ctx, ctx->pipeline_matmul_f32_l, "matmul_f32_l", matmul_f32_l_fp32_len, matmul_f32_l_fp32_data, "main", 3, 14 * sizeof(uint32_t), l_wg_denoms, warptile_l, 1); + ggml_vk_create_pipeline(ctx, ctx->pipeline_matmul_f32_m, "matmul_f32_m", matmul_f32_m_fp32_len, matmul_f32_m_fp32_data, "main", 3, 14 * sizeof(uint32_t), m_wg_denoms, warptile_m, 1); + ggml_vk_create_pipeline(ctx, ctx->pipeline_matmul_f32_s, "matmul_f32_s", matmul_f32_s_fp32_len, matmul_f32_s_fp32_data, "main", 3, 14 * sizeof(uint32_t), s_wg_denoms, warptile_s, 1); + ggml_vk_create_pipeline(ctx, ctx->pipeline_matmul_f32_aligned_l, "matmul_f32_aligned_l", matmul_f32_aligned_l_fp32_len, matmul_f32_aligned_l_fp32_data, "main", 3, 14 * sizeof(uint32_t), l_wg_denoms, warptile_l, l_align); + ggml_vk_create_pipeline(ctx, ctx->pipeline_matmul_f32_aligned_m, "matmul_f32_aligned_m", matmul_f32_aligned_m_fp32_len, matmul_f32_aligned_m_fp32_data, "main", 3, 14 * sizeof(uint32_t), m_wg_denoms, warptile_m, m_align); + ggml_vk_create_pipeline(ctx, ctx->pipeline_matmul_f32_aligned_s, "matmul_f32_aligned_s", matmul_f32_aligned_s_fp32_len, matmul_f32_aligned_s_fp32_data, "main", 3, 14 * sizeof(uint32_t), s_wg_denoms, warptile_s, s_align); - vk_pipeline_matmul_f16_l = ggml_vk_create_pipeline("matmul_f16_l", matmul_f16_l_fp32_len, matmul_f16_l_fp32_data, "main", 3, 14 * sizeof(uint32_t), l_wg_denoms, warptile_l, 1); - vk_pipeline_matmul_f16_m = ggml_vk_create_pipeline("matmul_f16_m", matmul_f16_m_fp32_len, matmul_f16_m_fp32_data, "main", 3, 14 * sizeof(uint32_t), m_wg_denoms, warptile_m, 1); - vk_pipeline_matmul_f16_s = ggml_vk_create_pipeline("matmul_f16_s", matmul_f16_s_fp32_len, matmul_f16_s_fp32_data, "main", 3, 14 * sizeof(uint32_t), s_wg_denoms, warptile_s, 1); + ggml_vk_create_pipeline(ctx, ctx->pipeline_matmul_f16_l, "matmul_f16_l", matmul_f16_l_fp32_len, matmul_f16_l_fp32_data, "main", 3, 14 * sizeof(uint32_t), l_wg_denoms, warptile_l, 1); + ggml_vk_create_pipeline(ctx, ctx->pipeline_matmul_f16_m, "matmul_f16_m", matmul_f16_m_fp32_len, matmul_f16_m_fp32_data, "main", 3, 14 * sizeof(uint32_t), m_wg_denoms, warptile_m, 1); + ggml_vk_create_pipeline(ctx, ctx->pipeline_matmul_f16_s, "matmul_f16_s", matmul_f16_s_fp32_len, matmul_f16_s_fp32_data, "main", 3, 14 * sizeof(uint32_t), s_wg_denoms, warptile_s, 1); + ggml_vk_create_pipeline(ctx, ctx->pipeline_matmul_f16_aligned_l, "matmul_f16_aligned_l", matmul_f16_aligned_l_fp32_len, matmul_f16_aligned_l_fp32_data, "main", 3, 14 * sizeof(uint32_t), l_wg_denoms, warptile_l, l_align); + ggml_vk_create_pipeline(ctx, ctx->pipeline_matmul_f16_aligned_m, "matmul_f16_aligned_m", matmul_f16_aligned_m_fp32_len, matmul_f16_aligned_m_fp32_data, "main", 3, 14 * sizeof(uint32_t), m_wg_denoms, warptile_m, m_align); + ggml_vk_create_pipeline(ctx, ctx->pipeline_matmul_f16_aligned_s, "matmul_f16_aligned_s", matmul_f16_aligned_s_fp32_len, matmul_f16_aligned_s_fp32_data, "main", 3, 14 * sizeof(uint32_t), s_wg_denoms, warptile_s, s_align); - vk_pipeline_matmul_f16_aligned_l = ggml_vk_create_pipeline("matmul_f16_aligned_l", matmul_f16_aligned_l_fp32_len, matmul_f16_aligned_l_fp32_data, "main", 3, 14 * sizeof(uint32_t), l_wg_denoms, warptile_l, l_align); - vk_pipeline_matmul_f16_aligned_m = ggml_vk_create_pipeline("matmul_f16_aligned_m", matmul_f16_aligned_m_fp32_len, matmul_f16_aligned_m_fp32_data, "main", 3, 14 * sizeof(uint32_t), m_wg_denoms, warptile_m, m_align); - vk_pipeline_matmul_f16_aligned_s = ggml_vk_create_pipeline("matmul_f16_aligned_s", matmul_f16_aligned_s_fp32_len, matmul_f16_aligned_s_fp32_data, "main", 3, 14 * sizeof(uint32_t), s_wg_denoms, warptile_s, s_align); - - vk_pipeline_matmul_f16_f32_l = ggml_vk_create_pipeline("matmul_f16_f32_l", matmul_f16_f32_l_fp32_len, matmul_f16_f32_l_fp32_data, "main", 3, 14 * sizeof(uint32_t), l_wg_denoms, warptile_l, 1); - vk_pipeline_matmul_f16_f32_m = ggml_vk_create_pipeline("matmul_f16_f32_m", matmul_f16_f32_m_fp32_len, matmul_f16_f32_m_fp32_data, "main", 3, 14 * sizeof(uint32_t), m_wg_denoms, warptile_m, 1); - vk_pipeline_matmul_f16_f32_s = ggml_vk_create_pipeline("matmul_f16_f32_s", matmul_f16_f32_s_fp32_len, matmul_f16_f32_s_fp32_data, "main", 3, 14 * sizeof(uint32_t), s_wg_denoms, warptile_s, 1); - vk_pipeline_matmul_f16_f32_aligned_l = ggml_vk_create_pipeline("matmul_f16_f32_aligned_l", matmul_f16_f32_aligned_l_fp32_len, matmul_f16_f32_aligned_l_fp32_data, "main", 3, 14 * sizeof(uint32_t), l_wg_denoms, warptile_l, l_align); - vk_pipeline_matmul_f16_f32_aligned_m = ggml_vk_create_pipeline("matmul_f16_f32_aligned_m", matmul_f16_f32_aligned_m_fp32_len, matmul_f16_f32_aligned_m_fp32_data, "main", 3, 14 * sizeof(uint32_t), m_wg_denoms, warptile_m, m_align); - vk_pipeline_matmul_f16_f32_aligned_s = ggml_vk_create_pipeline("matmul_f16_f32_aligned_s", matmul_f16_f32_aligned_s_fp32_len, matmul_f16_f32_aligned_s_fp32_data, "main", 3, 14 * sizeof(uint32_t), s_wg_denoms, warptile_s, s_align); + ggml_vk_create_pipeline(ctx, ctx->pipeline_matmul_f16_f32_l, "matmul_f16_f32_l", matmul_f16_f32_l_fp32_len, matmul_f16_f32_l_fp32_data, "main", 3, 14 * sizeof(uint32_t), l_wg_denoms, warptile_l, 1); + ggml_vk_create_pipeline(ctx, ctx->pipeline_matmul_f16_f32_m, "matmul_f16_f32_m", matmul_f16_f32_m_fp32_len, matmul_f16_f32_m_fp32_data, "main", 3, 14 * sizeof(uint32_t), m_wg_denoms, warptile_m, 1); + ggml_vk_create_pipeline(ctx, ctx->pipeline_matmul_f16_f32_s, "matmul_f16_f32_s", matmul_f16_f32_s_fp32_len, matmul_f16_f32_s_fp32_data, "main", 3, 14 * sizeof(uint32_t), s_wg_denoms, warptile_s, 1); + ggml_vk_create_pipeline(ctx, ctx->pipeline_matmul_f16_f32_aligned_l, "matmul_f16_f32_aligned_l", matmul_f16_f32_aligned_l_fp32_len, matmul_f16_f32_aligned_l_fp32_data, "main", 3, 14 * sizeof(uint32_t), l_wg_denoms, warptile_l, l_align); + ggml_vk_create_pipeline(ctx, ctx->pipeline_matmul_f16_f32_aligned_m, "matmul_f16_f32_aligned_m", matmul_f16_f32_aligned_m_fp32_len, matmul_f16_f32_aligned_m_fp32_data, "main", 3, 14 * sizeof(uint32_t), m_wg_denoms, warptile_m, m_align); + ggml_vk_create_pipeline(ctx, ctx->pipeline_matmul_f16_f32_aligned_s, "matmul_f16_f32_aligned_s", matmul_f16_f32_aligned_s_fp32_len, matmul_f16_f32_aligned_s_fp32_data, "main", 3, 14 * sizeof(uint32_t), s_wg_denoms, warptile_s, s_align); } - vk_pipeline_dequant_mul_mat_vec_f32[GGML_TYPE_F16] = ggml_vk_create_pipeline("mul_mat_vec_f16_f32", mul_mat_vec_f16_f32_len, mul_mat_vec_f16_f32_data, "main", 3, 3 * sizeof(int), {1, 1, 1}, {}, 1); - vk_pipeline_dequant_mul_mat_vec_f32[GGML_TYPE_Q4_0] = ggml_vk_create_pipeline("mul_mat_vec_q4_0_f32", mul_mat_vec_q4_0_f32_len, mul_mat_vec_q4_0_f32_data, "main", 3, 3 * sizeof(int), {1, 1, 1}, {}, 1); - vk_pipeline_dequant_mul_mat_vec_f32[GGML_TYPE_Q4_1] = ggml_vk_create_pipeline("mul_mat_vec_q4_1_f32", mul_mat_vec_q4_1_f32_len, mul_mat_vec_q4_1_f32_data, "main", 3, 3 * sizeof(int), {1, 1, 1}, {}, 1); - vk_pipeline_dequant_mul_mat_vec_f32[GGML_TYPE_Q5_0] = ggml_vk_create_pipeline("mul_mat_vec_q5_0_f32", mul_mat_vec_q5_0_f32_len, mul_mat_vec_q5_0_f32_data, "main", 3, 3 * sizeof(int), {1, 1, 1}, {}, 1); - vk_pipeline_dequant_mul_mat_vec_f32[GGML_TYPE_Q5_1] = ggml_vk_create_pipeline("mul_mat_vec_q5_1_f32", mul_mat_vec_q5_1_f32_len, mul_mat_vec_q5_1_f32_data, "main", 3, 3 * sizeof(int), {1, 1, 1}, {}, 1); - vk_pipeline_dequant_mul_mat_vec_f32[GGML_TYPE_Q8_0] = ggml_vk_create_pipeline("mul_mat_vec_q8_0_f32", mul_mat_vec_q8_0_f32_len, mul_mat_vec_q8_0_f32_data, "main", 3, 3 * sizeof(int), {1, 1, 1}, {}, 1); - vk_pipeline_dequant_mul_mat_vec_f32[GGML_TYPE_Q2_K] = ggml_vk_create_pipeline("mul_mat_vec_q2_K_f32", mul_mat_vec_q2_K_f32_len, mul_mat_vec_q2_K_f32_data, "main", 3, 3 * sizeof(int), {1, 1, 1}, {}, 1); - vk_pipeline_dequant_mul_mat_vec_f32[GGML_TYPE_Q3_K] = ggml_vk_create_pipeline("mul_mat_vec_q3_K_f32", mul_mat_vec_q3_K_f32_len, mul_mat_vec_q3_K_f32_data, "main", 3, 3 * sizeof(int), {1, 1, 1}, {}, 1); - vk_pipeline_dequant_mul_mat_vec_f32[GGML_TYPE_Q4_K] = ggml_vk_create_pipeline("mul_mat_vec_q4_K_f32", mul_mat_vec_q4_K_f32_len, mul_mat_vec_q4_K_f32_data, "main", 3, 3 * sizeof(int), {1, 1, 1}, {}, 1); - vk_pipeline_dequant_mul_mat_vec_f32[GGML_TYPE_Q5_K] = ggml_vk_create_pipeline("mul_mat_vec_q5_K_f32", mul_mat_vec_q5_K_f32_len, mul_mat_vec_q5_K_f32_data, "main", 3, 3 * sizeof(int), {1, 1, 1}, {}, 1); - vk_pipeline_dequant_mul_mat_vec_f32[GGML_TYPE_Q6_K] = ggml_vk_create_pipeline("mul_mat_vec_q6_K_f32", mul_mat_vec_q6_K_f32_len, mul_mat_vec_q6_K_f32_data, "main", 3, 3 * sizeof(int), {1, 1, 1}, {}, 1); + ggml_vk_create_pipeline(ctx, ctx->pipeline_dequant_mul_mat_vec_f32[GGML_TYPE_F16 ], "mul_mat_vec_f16_f32", mul_mat_vec_f16_f32_len, mul_mat_vec_f16_f32_data, "main", 3, 3 * sizeof(int), {1, 1, 1}, {}, 1); + ggml_vk_create_pipeline(ctx, ctx->pipeline_dequant_mul_mat_vec_f32[GGML_TYPE_Q4_0], "mul_mat_vec_q4_0_f32", mul_mat_vec_q4_0_f32_len, mul_mat_vec_q4_0_f32_data, "main", 3, 3 * sizeof(int), {1, 1, 1}, {}, 1); + ggml_vk_create_pipeline(ctx, ctx->pipeline_dequant_mul_mat_vec_f32[GGML_TYPE_Q4_1], "mul_mat_vec_q4_1_f32", mul_mat_vec_q4_1_f32_len, mul_mat_vec_q4_1_f32_data, "main", 3, 3 * sizeof(int), {1, 1, 1}, {}, 1); + ggml_vk_create_pipeline(ctx, ctx->pipeline_dequant_mul_mat_vec_f32[GGML_TYPE_Q5_0], "mul_mat_vec_q5_0_f32", mul_mat_vec_q5_0_f32_len, mul_mat_vec_q5_0_f32_data, "main", 3, 3 * sizeof(int), {1, 1, 1}, {}, 1); + ggml_vk_create_pipeline(ctx, ctx->pipeline_dequant_mul_mat_vec_f32[GGML_TYPE_Q5_1], "mul_mat_vec_q5_1_f32", mul_mat_vec_q5_1_f32_len, mul_mat_vec_q5_1_f32_data, "main", 3, 3 * sizeof(int), {1, 1, 1}, {}, 1); + ggml_vk_create_pipeline(ctx, ctx->pipeline_dequant_mul_mat_vec_f32[GGML_TYPE_Q8_0], "mul_mat_vec_q8_0_f32", mul_mat_vec_q8_0_f32_len, mul_mat_vec_q8_0_f32_data, "main", 3, 3 * sizeof(int), {1, 1, 1}, {}, 1); + ggml_vk_create_pipeline(ctx, ctx->pipeline_dequant_mul_mat_vec_f32[GGML_TYPE_Q2_K], "mul_mat_vec_q2_K_f32", mul_mat_vec_q2_K_f32_len, mul_mat_vec_q2_K_f32_data, "main", 3, 3 * sizeof(int), {1, 1, 1}, {}, 1); + ggml_vk_create_pipeline(ctx, ctx->pipeline_dequant_mul_mat_vec_f32[GGML_TYPE_Q3_K], "mul_mat_vec_q3_K_f32", mul_mat_vec_q3_K_f32_len, mul_mat_vec_q3_K_f32_data, "main", 3, 3 * sizeof(int), {1, 1, 1}, {}, 1); + ggml_vk_create_pipeline(ctx, ctx->pipeline_dequant_mul_mat_vec_f32[GGML_TYPE_Q4_K], "mul_mat_vec_q4_K_f32", mul_mat_vec_q4_K_f32_len, mul_mat_vec_q4_K_f32_data, "main", 3, 3 * sizeof(int), {1, 1, 1}, {}, 1); + ggml_vk_create_pipeline(ctx, ctx->pipeline_dequant_mul_mat_vec_f32[GGML_TYPE_Q5_K], "mul_mat_vec_q5_K_f32", mul_mat_vec_q5_K_f32_len, mul_mat_vec_q5_K_f32_data, "main", 3, 3 * sizeof(int), {1, 1, 1}, {}, 1); + ggml_vk_create_pipeline(ctx, ctx->pipeline_dequant_mul_mat_vec_f32[GGML_TYPE_Q6_K], "mul_mat_vec_q6_K_f32", mul_mat_vec_q6_K_f32_len, mul_mat_vec_q6_K_f32_data, "main", 3, 3 * sizeof(int), {1, 1, 1}, {}, 1); // dequant shaders - vk_pipeline_dequant[GGML_TYPE_F32] = ggml_vk_create_pipeline("f32_to_f16", f32_to_f16_len, f32_to_f16_data, "main", 2, 4 * sizeof(int), {64, 1, 1}, {}, 1); - - vk_pipeline_dequant[GGML_TYPE_F16] = ggml_vk_create_pipeline("dequant_f16", dequant_f16_len, dequant_f16_data, "main", 2, 4 * sizeof(int), {256 * 32, 1, 1}, {}, 1); - vk_pipeline_dequant[GGML_TYPE_Q4_0] = ggml_vk_create_pipeline("dequant_q4_0", dequant_q4_0_len, dequant_q4_0_data, "main", 2, 4 * sizeof(int), {256 * 32, 1, 1}, {}, 1); - vk_pipeline_dequant[GGML_TYPE_Q4_1] = ggml_vk_create_pipeline("dequant_q4_1", dequant_q4_1_len, dequant_q4_1_data, "main", 2, 4 * sizeof(int), {256 * 32, 1, 1}, {}, 1); - vk_pipeline_dequant[GGML_TYPE_Q5_0] = ggml_vk_create_pipeline("dequant_q5_0", dequant_q5_0_len, dequant_q5_0_data, "main", 2, 4 * sizeof(int), {256 * 32, 1, 1}, {}, 1); - vk_pipeline_dequant[GGML_TYPE_Q5_1] = ggml_vk_create_pipeline("dequant_q5_1", dequant_q5_1_len, dequant_q5_1_data, "main", 2, 4 * sizeof(int), {256 * 32, 1, 1}, {}, 1); - vk_pipeline_dequant[GGML_TYPE_Q8_0] = ggml_vk_create_pipeline("dequant_q8_0", dequant_q8_0_len, dequant_q8_0_data, "main", 2, 4 * sizeof(int), {256 * 32, 1, 1}, {}, 1); - vk_pipeline_dequant[GGML_TYPE_Q2_K] = ggml_vk_create_pipeline("dequant_q2_K", dequant_q2_K_len, dequant_q2_K_data, "main", 2, 4 * sizeof(int), {256 * 64, 1, 1}, {}, 1); - vk_pipeline_dequant[GGML_TYPE_Q3_K] = ggml_vk_create_pipeline("dequant_q3_K", dequant_q3_K_len, dequant_q3_K_data, "main", 2, 4 * sizeof(int), {256 * 64, 1, 1}, {}, 1); - vk_pipeline_dequant[GGML_TYPE_Q4_K] = ggml_vk_create_pipeline("dequant_q4_K", dequant_q4_K_len, dequant_q4_K_data, "main", 2, 4 * sizeof(int), {256 * 32, 1, 1}, {}, 1); - vk_pipeline_dequant[GGML_TYPE_Q5_K] = ggml_vk_create_pipeline("dequant_q5_K", dequant_q5_K_len, dequant_q5_K_data, "main", 2, 4 * sizeof(int), {256 * 64, 1, 1}, {}, 1); - vk_pipeline_dequant[GGML_TYPE_Q6_K] = ggml_vk_create_pipeline("dequant_q6_K", dequant_q6_K_len, dequant_q6_K_data, "main", 2, 4 * sizeof(int), {256 * 64, 1, 1}, {}, 1); + ggml_vk_create_pipeline(ctx, ctx->pipeline_dequant[GGML_TYPE_F32 ], "f32_to_f16", f32_to_f16_len, f32_to_f16_data, "main", 2, 4 * sizeof(int), { 64, 1, 1}, {}, 1); + ggml_vk_create_pipeline(ctx, ctx->pipeline_dequant[GGML_TYPE_F16 ], "dequant_f16", dequant_f16_len, dequant_f16_data, "main", 2, 4 * sizeof(int), {256 * 32, 1, 1}, {}, 1); + ggml_vk_create_pipeline(ctx, ctx->pipeline_dequant[GGML_TYPE_Q4_0], "dequant_q4_0", dequant_q4_0_len, dequant_q4_0_data, "main", 2, 4 * sizeof(int), {256 * 32, 1, 1}, {}, 1); + ggml_vk_create_pipeline(ctx, ctx->pipeline_dequant[GGML_TYPE_Q4_1], "dequant_q4_1", dequant_q4_1_len, dequant_q4_1_data, "main", 2, 4 * sizeof(int), {256 * 32, 1, 1}, {}, 1); + ggml_vk_create_pipeline(ctx, ctx->pipeline_dequant[GGML_TYPE_Q5_0], "dequant_q5_0", dequant_q5_0_len, dequant_q5_0_data, "main", 2, 4 * sizeof(int), {256 * 32, 1, 1}, {}, 1); + ggml_vk_create_pipeline(ctx, ctx->pipeline_dequant[GGML_TYPE_Q5_1], "dequant_q5_1", dequant_q5_1_len, dequant_q5_1_data, "main", 2, 4 * sizeof(int), {256 * 32, 1, 1}, {}, 1); + ggml_vk_create_pipeline(ctx, ctx->pipeline_dequant[GGML_TYPE_Q8_0], "dequant_q8_0", dequant_q8_0_len, dequant_q8_0_data, "main", 2, 4 * sizeof(int), {256 * 32, 1, 1}, {}, 1); + ggml_vk_create_pipeline(ctx, ctx->pipeline_dequant[GGML_TYPE_Q2_K], "dequant_q2_K", dequant_q2_K_len, dequant_q2_K_data, "main", 2, 4 * sizeof(int), {256 * 64, 1, 1}, {}, 1); + ggml_vk_create_pipeline(ctx, ctx->pipeline_dequant[GGML_TYPE_Q3_K], "dequant_q3_K", dequant_q3_K_len, dequant_q3_K_data, "main", 2, 4 * sizeof(int), {256 * 64, 1, 1}, {}, 1); + ggml_vk_create_pipeline(ctx, ctx->pipeline_dequant[GGML_TYPE_Q4_K], "dequant_q4_K", dequant_q4_K_len, dequant_q4_K_data, "main", 2, 4 * sizeof(int), {256 * 32, 1, 1}, {}, 1); + ggml_vk_create_pipeline(ctx, ctx->pipeline_dequant[GGML_TYPE_Q5_K], "dequant_q5_K", dequant_q5_K_len, dequant_q5_K_data, "main", 2, 4 * sizeof(int), {256 * 64, 1, 1}, {}, 1); + ggml_vk_create_pipeline(ctx, ctx->pipeline_dequant[GGML_TYPE_Q6_K], "dequant_q6_K", dequant_q6_K_len, dequant_q6_K_data, "main", 2, 4 * sizeof(int), {256 * 64, 1, 1}, {}, 1); // get_rows - vk_pipeline_get_rows[GGML_TYPE_F16] = ggml_vk_create_pipeline("get_rows_f16", get_rows_f16_len, get_rows_f16_data, "main", 3, sizeof(vk_op_push_constants), {512, 1, 1}, {}, 1); - vk_pipeline_get_rows[GGML_TYPE_Q4_0] = ggml_vk_create_pipeline("get_rows_q4_0", get_rows_q4_0_len, get_rows_q4_0_data, "main", 3, sizeof(vk_op_push_constants), {512, 1, 1}, {}, 1); - vk_pipeline_get_rows[GGML_TYPE_Q4_1] = ggml_vk_create_pipeline("get_rows_q4_1", get_rows_q4_1_len, get_rows_q4_1_data, "main", 3, sizeof(vk_op_push_constants), {512, 1, 1}, {}, 1); - vk_pipeline_get_rows[GGML_TYPE_Q5_0] = ggml_vk_create_pipeline("get_rows_q5_0", get_rows_q5_0_len, get_rows_q5_0_data, "main", 3, sizeof(vk_op_push_constants), {512, 1, 1}, {}, 1); - vk_pipeline_get_rows[GGML_TYPE_Q5_1] = ggml_vk_create_pipeline("get_rows_q5_1", get_rows_q5_1_len, get_rows_q5_1_data, "main", 3, sizeof(vk_op_push_constants), {512, 1, 1}, {}, 1); - vk_pipeline_get_rows[GGML_TYPE_Q8_0] = ggml_vk_create_pipeline("get_rows_q8_0", get_rows_q8_0_len, get_rows_q8_0_data, "main", 3, sizeof(vk_op_push_constants), {512, 1, 1}, {}, 1); + ggml_vk_create_pipeline(ctx, ctx->pipeline_get_rows[GGML_TYPE_F16 ], "get_rows_f16", get_rows_f16_len, get_rows_f16_data, "main", 3, sizeof(vk_op_push_constants), {512, 1, 1}, {}, 1); + ggml_vk_create_pipeline(ctx, ctx->pipeline_get_rows[GGML_TYPE_Q4_0], "get_rows_q4_0", get_rows_q4_0_len, get_rows_q4_0_data, "main", 3, sizeof(vk_op_push_constants), {512, 1, 1}, {}, 1); + ggml_vk_create_pipeline(ctx, ctx->pipeline_get_rows[GGML_TYPE_Q4_1], "get_rows_q4_1", get_rows_q4_1_len, get_rows_q4_1_data, "main", 3, sizeof(vk_op_push_constants), {512, 1, 1}, {}, 1); + ggml_vk_create_pipeline(ctx, ctx->pipeline_get_rows[GGML_TYPE_Q5_0], "get_rows_q5_0", get_rows_q5_0_len, get_rows_q5_0_data, "main", 3, sizeof(vk_op_push_constants), {512, 1, 1}, {}, 1); + ggml_vk_create_pipeline(ctx, ctx->pipeline_get_rows[GGML_TYPE_Q5_1], "get_rows_q5_1", get_rows_q5_1_len, get_rows_q5_1_data, "main", 3, sizeof(vk_op_push_constants), {512, 1, 1}, {}, 1); + ggml_vk_create_pipeline(ctx, ctx->pipeline_get_rows[GGML_TYPE_Q8_0], "get_rows_q8_0", get_rows_q8_0_len, get_rows_q8_0_data, "main", 3, sizeof(vk_op_push_constants), {512, 1, 1}, {}, 1); - vk_pipeline_get_rows_f32[GGML_TYPE_F16] = ggml_vk_create_pipeline("get_rows_f16_f32", get_rows_f16_f32_len, get_rows_f16_f32_data, "main", 3, sizeof(vk_op_push_constants), {512, 1, 1}, {}, 1); - vk_pipeline_get_rows_f32[GGML_TYPE_Q4_0] = ggml_vk_create_pipeline("get_rows_q4_0_f32", get_rows_q4_0_f32_len, get_rows_q4_0_f32_data, "main", 3, sizeof(vk_op_push_constants), {512, 1, 1}, {}, 1); - vk_pipeline_get_rows_f32[GGML_TYPE_Q4_1] = ggml_vk_create_pipeline("get_rows_q4_1_f32", get_rows_q4_1_f32_len, get_rows_q4_1_f32_data, "main", 3, sizeof(vk_op_push_constants), {512, 1, 1}, {}, 1); - vk_pipeline_get_rows_f32[GGML_TYPE_Q5_0] = ggml_vk_create_pipeline("get_rows_q5_0_f32", get_rows_q5_0_f32_len, get_rows_q5_0_f32_data, "main", 3, sizeof(vk_op_push_constants), {512, 1, 1}, {}, 1); - vk_pipeline_get_rows_f32[GGML_TYPE_Q5_1] = ggml_vk_create_pipeline("get_rows_q5_1_f32", get_rows_q5_1_f32_len, get_rows_q5_1_f32_data, "main", 3, sizeof(vk_op_push_constants), {512, 1, 1}, {}, 1); - vk_pipeline_get_rows_f32[GGML_TYPE_Q8_0] = ggml_vk_create_pipeline("get_rows_q8_0_f32", get_rows_q8_0_f32_len, get_rows_q8_0_f32_data, "main", 3, sizeof(vk_op_push_constants), {512, 1, 1}, {}, 1); + ggml_vk_create_pipeline(ctx, ctx->pipeline_get_rows_f32[GGML_TYPE_F32 ], "get_rows_f16_f32", get_rows_f16_f32_len, get_rows_f16_f32_data, "main", 3, sizeof(vk_op_push_constants), {512, 1, 1}, {}, 1); + ggml_vk_create_pipeline(ctx, ctx->pipeline_get_rows_f32[GGML_TYPE_Q4_0], "get_rows_q4_0_f32", get_rows_q4_0_f32_len, get_rows_q4_0_f32_data, "main", 3, sizeof(vk_op_push_constants), {512, 1, 1}, {}, 1); + ggml_vk_create_pipeline(ctx, ctx->pipeline_get_rows_f32[GGML_TYPE_Q4_1], "get_rows_q4_1_f32", get_rows_q4_1_f32_len, get_rows_q4_1_f32_data, "main", 3, sizeof(vk_op_push_constants), {512, 1, 1}, {}, 1); + ggml_vk_create_pipeline(ctx, ctx->pipeline_get_rows_f32[GGML_TYPE_Q5_0], "get_rows_q5_0_f32", get_rows_q5_0_f32_len, get_rows_q5_0_f32_data, "main", 3, sizeof(vk_op_push_constants), {512, 1, 1}, {}, 1); + ggml_vk_create_pipeline(ctx, ctx->pipeline_get_rows_f32[GGML_TYPE_Q5_1], "get_rows_q5_1_f32", get_rows_q5_1_f32_len, get_rows_q5_1_f32_data, "main", 3, sizeof(vk_op_push_constants), {512, 1, 1}, {}, 1); + ggml_vk_create_pipeline(ctx, ctx->pipeline_get_rows_f32[GGML_TYPE_Q8_0], "get_rows_q8_0_f32", get_rows_q8_0_f32_len, get_rows_q8_0_f32_data, "main", 3, sizeof(vk_op_push_constants), {512, 1, 1}, {}, 1); - vk_pipeline_matmul_split_k_reduce = ggml_vk_create_pipeline("split_k_reduce", split_k_reduce_len, split_k_reduce_data, "main", 2, 2 * sizeof(uint32_t), {256, 1, 1}, {}, 1); + ggml_vk_create_pipeline(ctx, ctx->pipeline_matmul_split_k_reduce, "split_k_reduce", split_k_reduce_len, split_k_reduce_data, "main", 2, 2 * sizeof(uint32_t), {256, 1, 1}, {}, 1); - vk_pipeline_mul_mat_vec_p021_f16_f32 = ggml_vk_create_pipeline("mul_mat_vec_p021_f16_f32", mul_mat_vec_p021_f16_f32_len, mul_mat_vec_p021_f16_f32_data, "main", 3, 6 * sizeof(uint32_t), {1, 1, 1}, {}, 1); - vk_pipeline_mul_mat_vec_nc_f16_f32 = ggml_vk_create_pipeline("mul_mat_vec_nc_f16_f32", mul_mat_vec_nc_f16_f32_len, mul_mat_vec_nc_f16_f32_data, "main", 3, 7 * sizeof(uint32_t), {1, 1, 1}, {}, 1); + ggml_vk_create_pipeline(ctx, ctx->pipeline_mul_mat_vec_p021_f16_f32, "mul_mat_vec_p021_f16_f32", mul_mat_vec_p021_f16_f32_len, mul_mat_vec_p021_f16_f32_data, "main", 3, 6 * sizeof(uint32_t), {1, 1, 1}, {}, 1); + ggml_vk_create_pipeline(ctx, ctx->pipeline_mul_mat_vec_nc_f16_f32, "mul_mat_vec_nc_f16_f32", mul_mat_vec_nc_f16_f32_len, mul_mat_vec_nc_f16_f32_data, "main", 3, 7 * sizeof(uint32_t), {1, 1, 1}, {}, 1); - vk_pipeline_norm_f32 = ggml_vk_create_pipeline("norm_f32", norm_f32_len, norm_f32_data, "main", 2, sizeof(vk_op_push_constants), {1, 1, 1}, {}, 1); - vk_pipeline_rms_norm_f32 = ggml_vk_create_pipeline("rms_norm_f32", rms_norm_f32_len, rms_norm_f32_data, "main", 2, sizeof(vk_op_push_constants), {1, 1, 1}, {}, 1); + ggml_vk_create_pipeline(ctx, ctx->pipeline_norm_f32, "norm_f32", norm_f32_len, norm_f32_data, "main", 2, sizeof(vk_op_push_constants), {1, 1, 1}, {}, 1); + ggml_vk_create_pipeline(ctx, ctx->pipeline_rms_norm_f32, "rms_norm_f32", rms_norm_f32_len, rms_norm_f32_data, "main", 2, sizeof(vk_op_push_constants), {1, 1, 1}, {}, 1); - vk_pipeline_cpy_f32_f32 = ggml_vk_create_pipeline("cpy_f32_f32", cpy_f32_f32_len, cpy_f32_f32_data, "main", 2, sizeof(vk_op_cpy_push_constants), {512, 1, 1}, {}, 1); - vk_pipeline_cpy_f32_f16 = ggml_vk_create_pipeline("cpy_f32_f16", cpy_f32_f16_len, cpy_f32_f16_data, "main", 2, sizeof(vk_op_cpy_push_constants), {512, 1, 1}, {}, 1); - vk_pipeline_cpy_f16_f16 = ggml_vk_create_pipeline("cpy_f16_f16", cpy_f16_f16_len, cpy_f16_f16_data, "main", 2, sizeof(vk_op_cpy_push_constants), {512, 1, 1}, {}, 1); + ggml_vk_create_pipeline(ctx, ctx->pipeline_cpy_f32_f32, "cpy_f32_f32", cpy_f32_f32_len, cpy_f32_f32_data, "main", 2, sizeof(vk_op_cpy_push_constants), {512, 1, 1}, {}, 1); + ggml_vk_create_pipeline(ctx, ctx->pipeline_cpy_f32_f16, "cpy_f32_f16", cpy_f32_f16_len, cpy_f32_f16_data, "main", 2, sizeof(vk_op_cpy_push_constants), {512, 1, 1}, {}, 1); + ggml_vk_create_pipeline(ctx, ctx->pipeline_cpy_f16_f16, "cpy_f16_f16", cpy_f16_f16_len, cpy_f16_f16_data, "main", 2, sizeof(vk_op_cpy_push_constants), {512, 1, 1}, {}, 1); - vk_pipeline_add_f32 = ggml_vk_create_pipeline("add_f32", add_f32_len, add_f32_data, "main", 3, sizeof(vk_op_push_constants), {512, 1, 1}, {}, 1); + ggml_vk_create_pipeline(ctx, ctx->pipeline_add_f32, "add_f32", add_f32_len, add_f32_data, "main", 3, sizeof(vk_op_push_constants), {512, 1, 1}, {}, 1); - vk_pipeline_mul_f32 = ggml_vk_create_pipeline("mul_f32", mul_f32_len, mul_f32_data, "main", 3, sizeof(vk_op_push_constants), {512, 1, 1}, {}, 1); + ggml_vk_create_pipeline(ctx, ctx->pipeline_mul_f32, "mul_f32", mul_f32_len, mul_f32_data, "main", 3, sizeof(vk_op_push_constants), {512, 1, 1}, {}, 1); - vk_pipeline_scale_f32 = ggml_vk_create_pipeline("scale_f32", scale_f32_len, scale_f32_data, "main", 2, sizeof(vk_op_push_constants), {512, 1, 1}, {}, 1); + ggml_vk_create_pipeline(ctx, ctx->pipeline_scale_f32, "scale_f32", scale_f32_len, scale_f32_data, "main", 2, sizeof(vk_op_push_constants), {512, 1, 1}, {}, 1); - vk_pipeline_sqr_f32 = ggml_vk_create_pipeline("sqr_f32", sqr_f32_len, sqr_f32_data, "main", 2, sizeof(vk_op_push_constants), {512, 1, 1}, {}, 1); + ggml_vk_create_pipeline(ctx, ctx->pipeline_sqr_f32, "sqr_f32", sqr_f32_len, sqr_f32_data, "main", 2, sizeof(vk_op_push_constants), {512, 1, 1}, {}, 1); - vk_pipeline_clamp_f32 = ggml_vk_create_pipeline("clamp_f32", clamp_f32_len, clamp_f32_data, "main", 2, sizeof(vk_op_push_constants), {512, 1, 1}, {}, 1); + ggml_vk_create_pipeline(ctx, ctx->pipeline_clamp_f32, "clamp_f32", clamp_f32_len, clamp_f32_data, "main", 2, sizeof(vk_op_push_constants), {512, 1, 1}, {}, 1); - vk_pipeline_gelu_f32 = ggml_vk_create_pipeline("gelu_f32", gelu_f32_len, gelu_f32_data, "main", 2, sizeof(vk_op_push_constants), {512, 1, 1}, {}, 1); - vk_pipeline_silu_f32 = ggml_vk_create_pipeline("silu_f32", silu_f32_len, silu_f32_data, "main", 2, sizeof(vk_op_push_constants), {512, 1, 1}, {}, 1); - vk_pipeline_relu_f32 = ggml_vk_create_pipeline("relu_f32", relu_f32_len, relu_f32_data, "main", 2, sizeof(vk_op_push_constants), {512, 1, 1}, {}, 1); + ggml_vk_create_pipeline(ctx, ctx->pipeline_gelu_f32, "gelu_f32", gelu_f32_len, gelu_f32_data, "main", 2, sizeof(vk_op_push_constants), {512, 1, 1}, {}, 1); + ggml_vk_create_pipeline(ctx, ctx->pipeline_silu_f32, "silu_f32", silu_f32_len, silu_f32_data, "main", 2, sizeof(vk_op_push_constants), {512, 1, 1}, {}, 1); + ggml_vk_create_pipeline(ctx, ctx->pipeline_relu_f32, "relu_f32", relu_f32_len, relu_f32_data, "main", 2, sizeof(vk_op_push_constants), {512, 1, 1}, {}, 1); - vk_pipeline_diag_mask_inf_f32 = ggml_vk_create_pipeline("diag_mask_inf_f32", diag_mask_inf_f32_len, diag_mask_inf_f32_data, "main", 2, sizeof(vk_op_diag_mask_push_constants), {512, 1, 1}, {}, 1); + ggml_vk_create_pipeline(ctx, ctx->pipeline_diag_mask_inf_f32, "diag_mask_inf_f32", diag_mask_inf_f32_len, diag_mask_inf_f32_data, "main", 2, sizeof(vk_op_diag_mask_push_constants), {512, 1, 1}, {}, 1); - vk_pipeline_soft_max_f32 = ggml_vk_create_pipeline("soft_max_f32", soft_max_f32_len, soft_max_f32_data, "main", 3, sizeof(vk_op_push_constants), {1, 1, 1}, {}, 1); + ggml_vk_create_pipeline(ctx, ctx->pipeline_soft_max_f32, "soft_max_f32", soft_max_f32_len, soft_max_f32_data, "main", 3, sizeof(vk_op_push_constants), {1, 1, 1}, {}, 1); - vk_pipeline_rope_f32 = ggml_vk_create_pipeline("rope_f32", rope_f32_len, rope_f32_data, "main", 3, sizeof(vk_op_rope_push_constants), {1, 512, 1}, {}, 1); - vk_pipeline_rope_f16 = ggml_vk_create_pipeline("rope_f16", rope_f16_len, rope_f16_data, "main", 3, sizeof(vk_op_rope_push_constants), {1, 512, 1}, {}, 1); + ggml_vk_create_pipeline(ctx, ctx->pipeline_rope_f32, "rope_f32", rope_f32_len, rope_f32_data, "main", 3, sizeof(vk_op_rope_push_constants), {1, 512, 1}, {}, 1); + ggml_vk_create_pipeline(ctx, ctx->pipeline_rope_f16, "rope_f16", rope_f16_len, rope_f16_data, "main", 3, sizeof(vk_op_rope_push_constants), {1, 512, 1}, {}, 1); - vk_pipeline_rope_neox_f32 = ggml_vk_create_pipeline("rope_neox_f32", rope_neox_f32_len, rope_neox_f32_data, "main", 3, sizeof(vk_op_rope_neox_push_constants), {1, 512, 1}, {}, 1); - vk_pipeline_rope_neox_f16 = ggml_vk_create_pipeline("rope_neox_f16", rope_neox_f16_len, rope_neox_f16_data, "main", 3, sizeof(vk_op_rope_neox_push_constants), {1, 512, 1}, {}, 1); + ggml_vk_create_pipeline(ctx, ctx->pipeline_rope_neox_f32, "rope_neox_f32", rope_neox_f32_len, rope_neox_f32_data, "main", 3, sizeof(vk_op_rope_neox_push_constants), {1, 512, 1}, {}, 1); + ggml_vk_create_pipeline(ctx, ctx->pipeline_rope_neox_f16, "rope_neox_f16", rope_neox_f16_len, rope_neox_f16_data, "main", 3, sizeof(vk_op_rope_neox_push_constants), {1, 512, 1}, {}, 1); } -void ggml_vk_init() { +static void ggml_vk_print_gpu_info(size_t idx) { + GGML_ASSERT(idx < vk_instance.device_indices.size()); + size_t dev_num = vk_instance.device_indices[idx]; #ifdef GGML_VULKAN_DEBUG - std::cerr << "ggml_vk_init()" << std::endl; + std::cerr << "ggml_vk_print_gpu_info(" << dev_num << ")" << std::endl; #endif - static bool initialized = false; + GGML_ASSERT(vk_instance.initialized); - if (initialized) { - return; + std::vector devices = vk_instance.instance.enumeratePhysicalDevices(); + + if (dev_num >= devices.size()) { + std::cerr << "ggml_vulkan: Device with index " << dev_num << " does not exist." << std::endl; + throw std::runtime_error("Device not found"); } - initialized = true; + vk::PhysicalDevice physical_device = devices[dev_num]; + std::vector ext_props = physical_device.enumerateDeviceExtensionProperties(); - const char* GGML_VULKAN_DEVICE = getenv("GGML_VULKAN_DEVICE"); - int dev_num = (GGML_VULKAN_DEVICE == NULL ? 0 : atoi(GGML_VULKAN_DEVICE)); + vk::PhysicalDeviceProperties2 props2; + vk::PhysicalDeviceMaintenance3Properties props3; + vk::PhysicalDeviceSubgroupProperties subgroup_props; + props2.pNext = &props3; + props3.pNext = &subgroup_props; + physical_device.getProperties2(&props2); + + const size_t subgroup_size = subgroup_props.subgroupSize; + const bool uma = props2.properties.deviceType == vk::PhysicalDeviceType::eIntegratedGpu; + + bool fp16_storage = false; + bool fp16_compute = false; + + for (auto properties : ext_props) { + if (strcmp("VK_KHR_16bit_storage", properties.extensionName) == 0) { + fp16_storage = true; + } else if (strcmp("VK_KHR_shader_float16_int8", properties.extensionName) == 0) { + fp16_compute = true; + } + } + + const char* GGML_VULKAN_DISABLE_F16 = getenv("GGML_VULKAN_DISABLE_F16"); + bool force_disable_f16 = GGML_VULKAN_DISABLE_F16 != nullptr; + + bool fp16 = !force_disable_f16 && fp16_storage && fp16_compute; + + vk::PhysicalDeviceFeatures device_features = physical_device.getFeatures(); + + VkPhysicalDeviceFeatures2 device_features2; + device_features2.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FEATURES_2; + device_features2.pNext = nullptr; + device_features2.features = (VkPhysicalDeviceFeatures)device_features; + + VkPhysicalDeviceVulkan11Features vk11_features; + vk11_features.pNext = nullptr; + vk11_features.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_1_1_FEATURES; + device_features2.pNext = &vk11_features; + + VkPhysicalDeviceVulkan12Features vk12_features; + vk12_features.pNext = nullptr; + vk12_features.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_1_2_FEATURES; + vk11_features.pNext = &vk12_features; + + vkGetPhysicalDeviceFeatures2(physical_device, &device_features2); + + fp16 = fp16 && vk12_features.shaderFloat16; + + std::string device_name = props2.properties.deviceName.data(); + std::cerr << GGML_VK_NAME << idx << ": " << device_name << " | uma: " << uma << " | fp16: " << fp16 << " | warp size: " << subgroup_size << std::endl; + + if (props2.properties.deviceType == vk::PhysicalDeviceType::eCpu) { + std::cerr << "ggml_vulkan: Warning: Device type is CPU. This is probably not the device you want." << std::endl; + } +} + +void ggml_vk_instance_init() { + if (vk_instance_initialized) { + return; + } +#ifdef GGML_VULKAN_DEBUG + std::cerr << "ggml_vk_instance_init()" << std::endl; +#endif vk::ApplicationInfo app_info{ "ggml-vulkan", 1, nullptr, 0, VK_API_VERSION }; const std::vector layers = { @@ -989,12 +1106,55 @@ void ggml_vk_init() { validation_features.setPNext(nullptr); instance_create_info.setPNext(&validation_features); -std::cerr << "ggml_vulkan: Validation layers enabled" << std::endl; + std::cerr << "ggml_vulkan: Validation layers enabled" << std::endl; #endif - vk_instance = vk::createInstance(instance_create_info); + vk_instance.instance = vk::createInstance(instance_create_info); - vk_device.physical_device = vk_instance.enumeratePhysicalDevices()[dev_num]; - std::vector ext_props = vk_device.physical_device.enumerateDeviceExtensionProperties(); + memset(vk_instance.initialized, 0, sizeof(bool) * GGML_VK_MAX_DEVICES); + + size_t num_available_devices = vk_instance.instance.enumeratePhysicalDevices().size(); + + // Emulate behavior of CUDA_VISIBLE_DEVICES for Vulkan + char * devices_env = getenv("GGML_VK_VISIBLE_DEVICES"); + if (devices_env != nullptr) { + std::string devices(devices_env); + std::replace(devices.begin(), devices.end(), ',', ' '); + + std::stringstream ss(devices); + size_t tmp; + while (ss >> tmp) { + if(tmp >= num_available_devices) { + std::cerr << "ggml_vulkan: Invalid device index " << tmp << " in GGML_VK_VISIBLE_DEVICES." << std::endl; + throw std::runtime_error("Invalid Vulkan device index"); + } + vk_instance.device_indices.push_back(tmp); + } + } else { + vk_instance.device_indices.push_back(0); + } + + vk_instance_initialized = true; +} + +void ggml_vk_init(ggml_backend_vk_context * ctx, size_t idx) { + GGML_ASSERT(idx < vk_instance.device_indices.size()); + size_t dev_num = vk_instance.device_indices[idx]; +#ifdef GGML_VULKAN_DEBUG + std::cerr << "ggml_vk_init(" << ctx->name << ", " << dev_num << ")" << std::endl; +#endif + ggml_vk_instance_init(); + + std::vector devices = vk_instance.instance.enumeratePhysicalDevices(); + + if (dev_num >= devices.size()) { + std::cerr << "ggml_vulkan: Device with index " << dev_num << " does not exist." << std::endl; + throw std::runtime_error("Device not found"); + } + + vk_instance.devices[idx] = std::make_shared(); + ctx->device = vk_instance.devices[idx]; + ctx->device.lock()->physical_device = devices[dev_num]; + std::vector ext_props = ctx->device.lock()->physical_device.enumerateDeviceExtensionProperties(); bool maintenance4_support = false; @@ -1014,18 +1174,18 @@ std::cerr << "ggml_vulkan: Validation layers enabled" << std::endl; if (maintenance4_support) { subgroup_props.pNext = &props4; } - vk_device.physical_device.getProperties2(&props2); - vk_device.properties = props2.properties; + ctx->device.lock()->physical_device.getProperties2(&props2); + ctx->device.lock()->properties = props2.properties; if (maintenance4_support) { - vk_device.max_memory_allocation_size = std::min(props3.maxMemoryAllocationSize, props4.maxBufferSize); + ctx->device.lock()->max_memory_allocation_size = std::min(props3.maxMemoryAllocationSize, props4.maxBufferSize); } else { - vk_device.max_memory_allocation_size = props3.maxMemoryAllocationSize; + ctx->device.lock()->max_memory_allocation_size = props3.maxMemoryAllocationSize; } - vk_device.vendor_id = vk_device.properties.vendorID; - vk_device.subgroup_size = subgroup_props.subgroupSize; - vk_device.uma = vk_device.properties.deviceType == vk::PhysicalDeviceType::eIntegratedGpu; + ctx->device.lock()->vendor_id = ctx->device.lock()->properties.vendorID; + ctx->device.lock()->subgroup_size = subgroup_props.subgroupSize; + ctx->device.lock()->uma = ctx->device.lock()->properties.deviceType == vk::PhysicalDeviceType::eIntegratedGpu; bool fp16_storage = false; bool fp16_compute = false; @@ -1039,31 +1199,31 @@ std::cerr << "ggml_vulkan: Validation layers enabled" << std::endl; } const char* GGML_VULKAN_DISABLE_F16 = getenv("GGML_VULKAN_DISABLE_F16"); - bool force_disable_f16 = GGML_VULKAN_DISABLE_F16 != NULL; + bool force_disable_f16 = GGML_VULKAN_DISABLE_F16 != nullptr; - vk_device.fp16 = !force_disable_f16 && fp16_storage && fp16_compute; + ctx->device.lock()->fp16 = !force_disable_f16 && fp16_storage && fp16_compute; - std::vector queue_family_props = vk_device.physical_device.getQueueFamilyProperties(); + std::vector queue_family_props = ctx->device.lock()->physical_device.getQueueFamilyProperties(); // Try to find a non-graphics compute queue and transfer-focused queues const uint32_t compute_queue_family_index = ggml_vk_find_queue_family_index(queue_family_props, vk::QueueFlagBits::eCompute, vk::QueueFlagBits::eGraphics, -1, 1); const uint32_t transfer_queue_family_index = ggml_vk_find_queue_family_index(queue_family_props, vk::QueueFlagBits::eTransfer, vk::QueueFlagBits::eCompute | vk::QueueFlagBits::eGraphics, compute_queue_family_index, 1); const float priorities[] = { 1.0f, 1.0f }; - const bool single_queue = compute_queue_family_index == transfer_queue_family_index && queue_family_props[compute_queue_family_index].queueCount == 1; + ctx->device.lock()->single_queue = compute_queue_family_index == transfer_queue_family_index && queue_family_props[compute_queue_family_index].queueCount == 1; std::vector device_queue_create_infos; if (compute_queue_family_index != transfer_queue_family_index) { device_queue_create_infos.push_back({vk::DeviceQueueCreateFlags(), compute_queue_family_index, 1, priorities}); device_queue_create_infos.push_back({vk::DeviceQueueCreateFlags(), transfer_queue_family_index, 1, priorities + 1}); - } else if(!single_queue) { + } else if(!ctx->device.lock()->single_queue) { device_queue_create_infos.push_back({vk::DeviceQueueCreateFlags(), compute_queue_family_index, 2, priorities}); } else { device_queue_create_infos.push_back({vk::DeviceQueueCreateFlags(), compute_queue_family_index, 1, priorities}); } vk::DeviceCreateInfo device_create_info; std::vector device_extensions; - vk::PhysicalDeviceFeatures device_features = vk_device.physical_device.getFeatures(); + vk::PhysicalDeviceFeatures device_features = ctx->device.lock()->physical_device.getFeatures(); VkPhysicalDeviceFeatures2 device_features2; device_features2.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FEATURES_2; @@ -1080,13 +1240,13 @@ std::cerr << "ggml_vulkan: Validation layers enabled" << std::endl; vk12_features.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_1_2_FEATURES; vk11_features.pNext = &vk12_features; - vkGetPhysicalDeviceFeatures2(vk_device.physical_device, &device_features2); + vkGetPhysicalDeviceFeatures2(ctx->device.lock()->physical_device, &device_features2); - vk_device.fp16 = vk_device.fp16 && vk12_features.shaderFloat16; + ctx->device.lock()->fp16 = ctx->device.lock()->fp16 && vk12_features.shaderFloat16; if (!vk11_features.storageBuffer16BitAccess) { - std::cerr << "ggml_vulkan: device does not support 16-bit storage" << std::endl; - GGML_ASSERT(false); + std::cerr << "ggml_vulkan: device " << GGML_VK_NAME << idx << " does not support 16-bit storage." << std::endl; + throw std::runtime_error("Unsupported device"); } device_extensions.push_back("VK_KHR_16bit_storage"); @@ -1095,10 +1255,11 @@ std::cerr << "ggml_vulkan: Validation layers enabled" << std::endl; device_extensions.push_back("VK_KHR_shader_non_semantic_info"); #endif - if (vk_device.fp16) { + if (ctx->device.lock()->fp16) { device_extensions.push_back("VK_KHR_shader_float16_int8"); } - std::cerr << "ggml_vulkan: Using " << vk_device.properties.deviceName << " | uma: " << vk_device.uma << " | fp16: " << vk_device.fp16 << " | warp size: " << vk_device.subgroup_size << std::endl; + ctx->device.lock()->name = ctx->device.lock()->properties.deviceName.data(); + device_create_info = { vk::DeviceCreateFlags(), device_queue_create_infos, @@ -1106,28 +1267,32 @@ std::cerr << "ggml_vulkan: Validation layers enabled" << std::endl; device_extensions }; device_create_info.setPNext(&device_features2); - vk_device.device = vk_device.physical_device.createDevice(device_create_info); + ctx->device.lock()->device = ctx->device.lock()->physical_device.createDevice(device_create_info); - vk_device.descriptor_set_mode = VK_DEVICE_DESCRIPTOR_POOL_MODE_UNKNOWN; + ctx->device.lock()->descriptor_set_mode = VK_DEVICE_DESCRIPTOR_POOL_MODE_UNKNOWN; // Shaders - ggml_vk_load_shaders(); + ggml_vk_load_shaders(ctx); // Queues - vk_device.compute_queue = ggml_vk_create_queue(compute_queue_family_index, 0, { vk::PipelineStageFlagBits::eComputeShader | vk::PipelineStageFlagBits::eTransfer }); - if (!single_queue) { + ggml_vk_create_queue(ctx, ctx->device.lock()->compute_queue, compute_queue_family_index, 0, { vk::PipelineStageFlagBits::eComputeShader | vk::PipelineStageFlagBits::eTransfer }); + if (!ctx->device.lock()->single_queue) { const uint32_t transfer_queue_index = compute_queue_family_index == transfer_queue_family_index ? 1 : 0; - vk_device.transfer_queue = ggml_vk_create_queue(transfer_queue_family_index, transfer_queue_index, { vk::PipelineStageFlagBits::eTransfer }); + ggml_vk_create_queue(ctx, ctx->device.lock()->transfer_queue, transfer_queue_family_index, transfer_queue_index, { vk::PipelineStageFlagBits::eTransfer }); } else { - vk_device.transfer_queue = vk_device.compute_queue; + // TODO: Use pointer or reference to avoid copy + ctx->device.lock()->transfer_queue = ctx->device.lock()->compute_queue; } - vk_fence = vk_device.device.createFence({}); + ctx->fence = ctx->device.lock()->device.createFence({}); - vk_ctx = nullptr; - vk_transfer_ctx = nullptr; + ctx->compute_ctx = nullptr; + ctx->transfer_ctx = nullptr; - vk_disable = false; + ctx->disable = false; + ctx->initialized = true; + + ctx->idx = idx; #ifdef GGML_VULKAN_CHECK_RESULTS const char* skip_checks = getenv("GGML_VULKAN_SKIP_CHECKS"); @@ -1137,7 +1302,7 @@ std::cerr << "ggml_vulkan: Validation layers enabled" << std::endl; #endif } -static vk_pipeline* ggml_vk_get_to_fp16(ggml_type type) { +static vk_pipeline* ggml_vk_get_to_fp16(ggml_backend_vk_context * ctx, ggml_type type) { #ifdef GGML_VULKAN_DEBUG std::cerr << "ggml_vk_get_to_fp16()" << std::endl; #endif @@ -1158,10 +1323,10 @@ static vk_pipeline* ggml_vk_get_to_fp16(ggml_type type) { return nullptr; } - return &vk_pipeline_dequant[type]; + return &ctx->pipeline_dequant[type]; } -static vk_pipeline* ggml_vk_get_dequantize_mul_mat_vec(ggml_type type) { +static vk_pipeline* ggml_vk_get_dequantize_mul_mat_vec(ggml_backend_vk_context * ctx, ggml_type type) { #ifdef GGML_VULKAN_DEBUG std::cerr << "ggml_vk_get_dequantize_mul_mat_vec()" << std::endl; #endif @@ -1182,15 +1347,10 @@ static vk_pipeline* ggml_vk_get_dequantize_mul_mat_vec(ggml_type type) { return nullptr; } - return &vk_pipeline_dequant_mul_mat_vec_f32[type]; + return &ctx->pipeline_dequant_mul_mat_vec_f32[type]; } -// buffer pool for vulkan -#define MAX_VK_BUFFERS 256 - -static vk_buffer g_vk_buffer_pool[MAX_VK_BUFFERS]; - -static vk_buffer ggml_vk_pool_malloc(size_t size) { +static vk_buffer ggml_vk_pool_malloc(ggml_backend_vk_context * ctx, size_t size) { #ifdef GGML_VULKAN_DEBUG std::cerr << "ggml_vk_pool_malloc(" << size << ")" << std::endl; #endif @@ -1199,98 +1359,95 @@ static vk_buffer ggml_vk_pool_malloc(size_t size) { int worst_i = -1; size_t worst_size = 0; //largest unused buffer seen so far for (int i = 0; i < MAX_VK_BUFFERS; ++i) { - vk_buffer &b = g_vk_buffer_pool[i]; - if (b.size > 0 && b.size >= size && b.size < best_size) { + vk_buffer &b = ctx->buffer_pool[i]; + if (b != nullptr && b->size >= size && b->size < best_size) { best_i = i; - best_size = b.size; + best_size = b->size; } - if (b.size > 0 && b.size > worst_size) { + if (b != nullptr && b->size > worst_size) { worst_i = i; - worst_size = b.size; + worst_size = b->size; } } if(best_i != -1) { //found the smallest buffer that fits our needs - vk_buffer b = g_vk_buffer_pool[best_i]; - g_vk_buffer_pool[best_i].size = 0; + vk_buffer b = ctx->buffer_pool[best_i]; + ctx->buffer_pool[best_i].reset(); return b; } if(worst_i != -1) { //no buffer that fits our needs, resize largest one to save memory - vk_buffer& b = g_vk_buffer_pool[worst_i]; + vk_buffer& b = ctx->buffer_pool[worst_i]; ggml_vk_destroy_buffer(b); } - return ggml_vk_create_buffer_check(size, vk::MemoryPropertyFlagBits::eDeviceLocal); + return ggml_vk_create_buffer_check(ctx, size, vk::MemoryPropertyFlagBits::eDeviceLocal); } -static void ggml_vk_pool_free(vk_buffer& buffer) { +static void ggml_vk_pool_free(ggml_backend_vk_context * ctx, vk_buffer& buffer) { #ifdef GGML_VULKAN_DEBUG - std::cerr << "ggml_vk_pool_free(" << buffer.size << ")" << std::endl; + std::cerr << "ggml_vk_pool_free(" << buffer->size << ")" << std::endl; #endif for (int i = 0; i < MAX_VK_BUFFERS; ++i) { - vk_buffer& b = g_vk_buffer_pool[i]; - if (b.size == 0) { + vk_buffer& b = ctx->buffer_pool[i]; + if (b == nullptr) { b = buffer; - // Set owning queue family index to ignored to avoid synchronization on next use - b.qf_owner = VK_QUEUE_FAMILY_IGNORED; return; } } - fprintf(stderr, "WARNING: vk buffer pool full, increase MAX_VK_BUFFERS\n"); + std::cerr << "ggml_vulkan: WARNING: vk buffer pool full, increase MAX_VK_BUFFERS" << std::endl; ggml_vk_destroy_buffer(buffer); } // Returns an available temporary buffer that may only be used temporarily, it will be reused -static vk_buffer ggml_vk_create_buffer_temp(size_t size) { +static vk_buffer ggml_vk_create_buffer_temp(ggml_backend_vk_context * ctx, size_t size) { // Try to find existing temp buffer with enough capacity - for (auto& buffer : vk_gc.temp_buffers) { - if (buffer.size >= size) { + for (auto& buffer : ctx->gc.temp_buffers) { + if (buffer->size >= size) { return buffer; } } // Otherwise create new buffer - vk_buffer buf = ggml_vk_pool_malloc(size); - vk_gc.temp_buffers.push_back(buf); + vk_buffer buf = ggml_vk_pool_malloc(ctx, size); + ctx->gc.temp_buffers.push_back(buf); return buf; } -static void * ggml_vk_host_malloc(size_t size) { +static void * ggml_vk_host_malloc(ggml_backend_vk_context * ctx, size_t size) { #ifdef GGML_VULKAN_DEBUG std::cerr << "ggml_vk_host_malloc(" << size << ")" << std::endl; #endif - vk_buffer buf = ggml_vk_create_buffer(size, vk::MemoryPropertyFlagBits::eHostVisible | vk::MemoryPropertyFlagBits::eHostCoherent | vk::MemoryPropertyFlagBits::eHostCached); + vk_buffer buf = ggml_vk_create_buffer(ctx, size, vk::MemoryPropertyFlagBits::eHostVisible | vk::MemoryPropertyFlagBits::eHostCoherent | vk::MemoryPropertyFlagBits::eHostCached); - if(!(buf.memory_property_flags & vk::MemoryPropertyFlagBits::eHostVisible)) { + if(!(buf->memory_property_flags & vk::MemoryPropertyFlagBits::eHostVisible)) { fprintf(stderr, "WARNING: failed to allocate %.2f MB of pinned memory\n", size/1024.0/1024.0); - buf.size = 0; - vk_device.device.freeMemory(buf.device_memory); - vk_device.device.destroyBuffer(buf.buffer); + ctx->device.lock()->device.freeMemory(buf->device_memory); + ctx->device.lock()->device.destroyBuffer(buf->buffer); return nullptr; } - vk_pinned_memory.push_back(std::make_tuple(buf.ptr, size, buf)); + ctx->pinned_memory.push_back(std::make_tuple(buf->ptr, size, buf)); - return buf.ptr; + return buf->ptr; } -static void ggml_vk_host_free(void* ptr) { +static void ggml_vk_host_free(ggml_backend_vk_context * ctx, void* ptr) { if (ptr == nullptr) { return; } #ifdef GGML_VULKAN_DEBUG std::cerr << "ggml_vk_host_free(" << ptr << ")" << std::endl; #endif - vk_buffer* buf = nullptr; + vk_buffer buf; size_t index; - for (size_t i = 0; i < vk_pinned_memory.size(); i++) { - const uint8_t* addr = (const uint8_t*) std::get<0>(vk_pinned_memory[i]); - const uint8_t* endr = addr + std::get<1>(vk_pinned_memory[i]); + for (size_t i = 0; i < ctx->pinned_memory.size(); i++) { + const uint8_t* addr = (const uint8_t*) std::get<0>(ctx->pinned_memory[i]); + const uint8_t* endr = addr + std::get<1>(ctx->pinned_memory[i]); if (ptr >= addr && ptr < endr) { - buf = &std::get<2>(vk_pinned_memory[i]); + buf = std::get<2>(ctx->pinned_memory[i]); index = i; break; } @@ -1300,28 +1457,28 @@ static void ggml_vk_host_free(void* ptr) { return; } - ggml_vk_destroy_buffer(*buf); + ggml_vk_destroy_buffer(buf); - vk_pinned_memory.erase(vk_pinned_memory.begin() + index); + ctx->pinned_memory.erase(ctx->pinned_memory.begin() + index); } -static void ggml_vk_host_get(const void * ptr, vk_buffer *& buf, size_t& buf_offset) { +static void ggml_vk_host_get(ggml_backend_vk_context * ctx, const void * ptr, vk_buffer& buf, size_t& buf_offset) { buf = nullptr; buf_offset = 0; - for (size_t i = 0; i < vk_pinned_memory.size(); i++) { - const uint8_t* addr = (const uint8_t*) std::get<0>(vk_pinned_memory[i]); - const uint8_t* endr = addr + std::get<1>(vk_pinned_memory[i]); + for (size_t i = 0; i < ctx->pinned_memory.size(); i++) { + const uint8_t* addr = (const uint8_t*) std::get<0>(ctx->pinned_memory[i]); + const uint8_t* endr = addr + std::get<1>(ctx->pinned_memory[i]); if (ptr >= addr && ptr < endr) { - buf = &std::get<2>(vk_pinned_memory[i]); + buf = std::get<2>(ctx->pinned_memory[i]); buf_offset = ((const uint8_t *)ptr) - addr; break; } } } -static vk_submission ggml_vk_begin_submission(vk_queue& q, bool one_time = true) { +static vk_submission ggml_vk_begin_submission(ggml_backend_vk_context * ctx, vk_queue& q, bool one_time = true) { vk_submission s; - s.buffer = ggml_vk_create_cmd_buffer(q); + s.buffer = ggml_vk_create_cmd_buffer(ctx, q); if (one_time) { s.buffer.begin({ vk::CommandBufferUsageFlagBits::eOneTimeSubmit }); } else { @@ -1331,7 +1488,7 @@ static vk_submission ggml_vk_begin_submission(vk_queue& q, bool one_time = true) return s; } -static void ggml_vk_dispatch_pipeline(vk_context * ctx, vk_pipeline& pipeline, std::vector&& buffers, size_t push_constant_size, const void* push_constants, std::array elements) { +static void ggml_vk_dispatch_pipeline(ggml_backend_vk_context * ctx, vk_context * subctx, vk_pipeline& pipeline, std::vector&& buffers, size_t push_constant_size, const void* push_constants, std::array elements) { const uint32_t wg0 = CEIL_DIV(elements[0], pipeline.wg_denoms[0]); const uint32_t wg1 = CEIL_DIV(elements[1], pipeline.wg_denoms[1]); const uint32_t wg2 = CEIL_DIV(elements[2], pipeline.wg_denoms[2]); @@ -1344,22 +1501,22 @@ static void ggml_vk_dispatch_pipeline(vk_context * ctx, vk_pipeline& pipeline, s GGML_ASSERT(buffers.size() == pipeline.parameter_count); vk::DescriptorSet& descriptor_set = pipeline.descriptor_sets[pipeline.descriptor_set_idx++]; for (uint32_t i = 0; i < pipeline.parameter_count; i++) { - descriptor_buffer_infos.push_back({buffers[i].buffer.buffer, buffers[i].offset, buffers[i].size}); + descriptor_buffer_infos.push_back({buffers[i].buffer->buffer, buffers[i].offset, buffers[i].size}); } for (uint32_t i = 0; i < pipeline.parameter_count; i++) { write_descriptor_sets.push_back({descriptor_set, i, 0, 1, vk::DescriptorType::eStorageBuffer, nullptr, &descriptor_buffer_infos[i]}); } - vk_device.device.updateDescriptorSets(write_descriptor_sets, {}); + ctx->device.lock()->device.updateDescriptorSets(write_descriptor_sets, {}); - ctx->s->buffer.pushConstants(pipeline.layout, vk::ShaderStageFlagBits::eCompute, 0, push_constant_size, push_constants); - ctx->s->buffer.bindPipeline(vk::PipelineBindPoint::eCompute, pipeline.pipeline); - ctx->s->buffer.bindDescriptorSets(vk::PipelineBindPoint::eCompute, + subctx->s->buffer.pushConstants(pipeline.layout, vk::ShaderStageFlagBits::eCompute, 0, push_constant_size, push_constants); + subctx->s->buffer.bindPipeline(vk::PipelineBindPoint::eCompute, pipeline.pipeline); + subctx->s->buffer.bindDescriptorSets(vk::PipelineBindPoint::eCompute, pipeline.layout, 0, { descriptor_set }, {}); - ctx->s->buffer.dispatch(wg0, wg1, wg2); + subctx->s->buffer.dispatch(wg0, wg1, wg2); } static void ggml_vk_end_submission(vk_submission& s, std::vector wait_semaphores, std::vector signal_semaphores) { @@ -1381,16 +1538,16 @@ static void ggml_vk_ctx_end(vk_context * ctx) { ctx->s = nullptr; } -static void ggml_vk_ctx_begin(vk_context * ctx) { +static void ggml_vk_ctx_begin(ggml_backend_vk_context * ctx, vk_context * subctx) { #ifdef GGML_VULKAN_DEBUG std::cerr << "ggml_vk_ctx_begin(" << ctx << ")" << std::endl; #endif - if (ctx->s != nullptr) { - ggml_vk_ctx_end(ctx); + if (subctx->s != nullptr) { + ggml_vk_ctx_end(subctx); } - ctx->seqs.push_back({ ggml_vk_begin_submission(*ctx->q) }); - ctx->s = ctx->seqs[ctx->seqs.size() - 1].data(); + subctx->seqs.push_back({ ggml_vk_begin_submission(ctx, *subctx->q) }); + subctx->s = subctx->seqs[subctx->seqs.size() - 1].data(); } static size_t ggml_vk_align_size(size_t width, size_t align) { @@ -1405,14 +1562,14 @@ static void deferred_memcpy(void * dst, const void * src, size_t size, std::vect } } -static void ensure_sync_staging_buffer(size_t size) { - if (vk_sync_staging.size < size) { - ggml_vk_destroy_buffer(vk_sync_staging); - vk_sync_staging = ggml_vk_create_buffer_check(size, vk::MemoryPropertyFlagBits::eHostVisible | vk::MemoryPropertyFlagBits::eHostCoherent | vk::MemoryPropertyFlagBits::eHostCached); +static void ggml_vk_ensure_sync_staging_buffer(ggml_backend_vk_context * ctx, size_t size) { + if (ctx->sync_staging == nullptr || ctx->sync_staging->size < size) { + ggml_vk_destroy_buffer(ctx->sync_staging); + ctx->sync_staging = ggml_vk_create_buffer_check(ctx, size, vk::MemoryPropertyFlagBits::eHostVisible | vk::MemoryPropertyFlagBits::eHostCoherent | vk::MemoryPropertyFlagBits::eHostCached); } } -static void ggml_vk_buffer_write_nc_async(vk_context * ctx, vk_buffer* dst, size_t offset, const ggml_tensor * tensor, bool sync_staging = false) { +static void ggml_vk_buffer_write_nc_async(ggml_backend_vk_context * ctx, vk_context * subctx, vk_buffer& dst, size_t offset, const ggml_tensor * tensor, bool sync_staging = false) { #ifdef GGML_VULKAN_DEBUG std::cerr << "ggml_vk_buffer_write_nc_async(" << tensor << ")" << std::endl; #endif @@ -1423,9 +1580,9 @@ static void ggml_vk_buffer_write_nc_async(vk_context * ctx, vk_buffer* dst, size GGML_ASSERT(false); } // Check if src is pinned memory - vk_buffer * buf = nullptr; + vk_buffer buf; size_t buf_offset; - ggml_vk_host_get(tensor->data, buf, buf_offset); + ggml_vk_host_get(ctx, tensor->data, buf, buf_offset); const uint64_t ne0 = tensor->ne[0]; const uint64_t ne1 = tensor->ne[1]; @@ -1471,21 +1628,21 @@ static void ggml_vk_buffer_write_nc_async(vk_context * ctx, vk_buffer* dst, size } } - ggml_vk_sync_buffers(ctx); - ctx->s->buffer.copyBuffer(buf->buffer, dst->buffer, slices); + ggml_vk_sync_buffers(subctx); + subctx->s->buffer.copyBuffer(buf->buffer, dst->buffer, slices); return; } // Staging buffer required - vk_buffer * staging = &vk_staging; - size_t staging_offset = vk_staging_offset; + vk_buffer staging = ctx->staging; + size_t staging_offset = ctx->staging_offset; const size_t copy_size = ts*ne/bs; - if (vk_staging.size < vk_staging_offset + copy_size) { + if (ctx->staging->size < ctx->staging_offset + copy_size) { if (sync_staging) { // Create temporary larger buffer - ensure_sync_staging_buffer(copy_size); + ggml_vk_ensure_sync_staging_buffer(ctx, copy_size); - staging = &vk_sync_staging; + staging = ctx->sync_staging; staging_offset = 0; } else { GGML_ASSERT(false); @@ -1494,23 +1651,23 @@ static void ggml_vk_buffer_write_nc_async(vk_context * ctx, vk_buffer* dst, size VkBufferCopy buf_copy{ staging_offset, offset, copy_size }; - ggml_vk_sync_buffers(ctx); - vkCmdCopyBuffer(ctx->s->buffer, staging->buffer, dst->buffer, 1, &buf_copy); + ggml_vk_sync_buffers(subctx); + vkCmdCopyBuffer(subctx->s->buffer, staging->buffer, dst->buffer, 1, &buf_copy); for (uint64_t i3 = 0; i3 < ne3; i3++) { for (uint64_t i2 = 0; i2 < ne2; i2++) { // Find longest contiguous slice if (ne1*nb1 == dstnb2) { - deferred_memcpy((uint8_t *)staging->ptr + staging_offset + i3*dstnb3 + i2*dstnb2, (const uint8_t *) tensor->data + buf_offset + i3*nb3 + i2*nb2, dstnb2, &ctx->in_memcpys); + deferred_memcpy((uint8_t *)staging->ptr + staging_offset + i3*dstnb3 + i2*dstnb2, (const uint8_t *) tensor->data + buf_offset + i3*nb3 + i2*nb2, dstnb2, &subctx->in_memcpys); } else { for (uint64_t i1 = 0; i1 < ne1; i1++) { if (ne0*nb0/bs == dstnb1) { - deferred_memcpy((uint8_t *)staging->ptr + staging_offset + i3*dstnb3 + i2*dstnb2 + i1*dstnb1, (const uint8_t *) tensor->data + buf_offset + i3*nb3 + i2*nb2 + i1*nb1, dstnb1, &ctx->in_memcpys); + deferred_memcpy((uint8_t *)staging->ptr + staging_offset + i3*dstnb3 + i2*dstnb2 + i1*dstnb1, (const uint8_t *) tensor->data + buf_offset + i3*nb3 + i2*nb2 + i1*nb1, dstnb1, &subctx->in_memcpys); } else { const uint64_t s_off = buf_offset + i3*nb3 + i2*nb2 + i1*nb1; const uint64_t d_off = staging_offset + i3*dstnb3 + i2*dstnb2 + i1*dstnb1; for (uint64_t i0 = 0; i0 < ne0; i0++) { - deferred_memcpy((uint8_t *)staging->ptr + d_off + i0*dstnb0, (const uint8_t *) tensor->data + s_off + i0*nb0, dstnb0, &ctx->in_memcpys); + deferred_memcpy((uint8_t *)staging->ptr + d_off + i0*dstnb0, (const uint8_t *) tensor->data + s_off + i0*nb0, dstnb0, &subctx->in_memcpys); } } } @@ -1519,19 +1676,22 @@ static void ggml_vk_buffer_write_nc_async(vk_context * ctx, vk_buffer* dst, size } } -static void ggml_vk_buffer_write_2d_async(vk_context * ctx, vk_buffer* dst, size_t offset, const void * src, size_t spitch, size_t width, size_t height, bool sync_staging = false) { +static void ggml_vk_buffer_write_2d_async(ggml_backend_vk_context * ctx, vk_context * subctx, vk_buffer& dst, size_t offset, const void * src, size_t spitch, size_t width, size_t height, bool sync_staging = false) { #ifdef GGML_VULKAN_DEBUG std::cerr << "ggml_vk_buffer_write_2d_async(" << width << ", " << height << ")" << std::endl; #endif + // Make sure ctx owns the buffer + GGML_ASSERT(dst->ctx == ctx); + // Buffer is already mapped if(dst->memory_property_flags & vk::MemoryPropertyFlagBits::eHostVisible) { std::cerr << "ggml_vulkan: buffer_write_async dst buffer is host_visible. Use synchronous write." << std::endl; GGML_ASSERT(false); } // Check if src is pinned memory - vk_buffer * buf = nullptr; + vk_buffer buf = nullptr; size_t buf_offset; - ggml_vk_host_get(src, buf, buf_offset); + ggml_vk_host_get(ctx, src, buf, buf_offset); if (buf != nullptr) { // Memory is pinned, use as staging buffer @@ -1550,8 +1710,8 @@ static void ggml_vk_buffer_write_2d_async(vk_context * ctx, vk_buffer* dst, size } } - ggml_vk_sync_buffers(ctx); - ctx->s->buffer.copyBuffer(buf->buffer, dst->buffer, slices); + ggml_vk_sync_buffers(subctx); + subctx->s->buffer.copyBuffer(buf->buffer, dst->buffer, slices); return; } #ifdef GGML_VULKAN_DEBUG @@ -1559,14 +1719,14 @@ static void ggml_vk_buffer_write_2d_async(vk_context * ctx, vk_buffer* dst, size #endif // Staging buffer required - vk_buffer * staging = &vk_staging; - size_t staging_offset = vk_staging_offset; + vk_buffer staging = ctx->staging; + size_t staging_offset = ctx->staging_offset; const size_t copy_size = width*height; - if (vk_staging.size < vk_staging_offset + copy_size) { + if (ctx->staging == nullptr || ctx->staging->size < ctx->staging_offset + copy_size) { if (sync_staging) { - ensure_sync_staging_buffer(copy_size); + ggml_vk_ensure_sync_staging_buffer(ctx, copy_size); - staging = &vk_sync_staging; + staging = ctx->sync_staging; staging_offset = 0; } else { GGML_ASSERT(false); @@ -1578,26 +1738,26 @@ static void ggml_vk_buffer_write_2d_async(vk_context * ctx, vk_buffer* dst, size offset, copy_size}; - ggml_vk_sync_buffers(ctx); - vkCmdCopyBuffer(ctx->s->buffer, staging->buffer, dst->buffer, 1, &buf_copy); + ggml_vk_sync_buffers(subctx); + vkCmdCopyBuffer(subctx->s->buffer, staging->buffer, dst->buffer, 1, &buf_copy); if (width == spitch) { - deferred_memcpy((uint8_t *)staging->ptr + staging_offset, src, width * height, &ctx->in_memcpys); + deferred_memcpy((uint8_t *)staging->ptr + staging_offset, src, width * height, &subctx->in_memcpys); } else { for (size_t i = 0; i < height; i++) { - deferred_memcpy((uint8_t *)staging->ptr + staging_offset + i * width, (const uint8_t *) src + i * spitch, width, &ctx->in_memcpys); + deferred_memcpy((uint8_t *)staging->ptr + staging_offset + i * width, (const uint8_t *) src + i * spitch, width, &subctx->in_memcpys); } } } -static void ggml_vk_buffer_write_async(vk_context * ctx, vk_buffer* dst, size_t offset, const void * src, size_t size, bool sync_staging = false) { +static void ggml_vk_buffer_write_async(ggml_backend_vk_context * ctx, vk_context * subctx, vk_buffer& dst, size_t offset, const void * src, size_t size, bool sync_staging = false) { #ifdef GGML_VULKAN_DEBUG std::cerr << "ggml_vk_buffer_write_async(" << size << ")" << std::endl; #endif - return ggml_vk_buffer_write_2d_async(ctx, dst, offset, src, size, size, 1, sync_staging); + return ggml_vk_buffer_write_2d_async(ctx, subctx, dst, offset, src, size, size, 1, sync_staging); } -static void ggml_vk_buffer_write_2d(vk_buffer* dst, size_t offset, const void * src, size_t spitch, size_t width, size_t height) { +static void ggml_vk_buffer_write_2d(ggml_backend_vk_context * ctx, vk_buffer& dst, size_t offset, const void * src, size_t spitch, size_t width, size_t height) { #ifdef GGML_VULKAN_DEBUG std::cerr << "ggml_vk_buffer_write_2d(" << width << ", " << height << ")" << std::endl; #endif @@ -1609,39 +1769,42 @@ static void ggml_vk_buffer_write_2d(vk_buffer* dst, size_t offset, const void * memcpy((uint8_t *)dst->ptr + offset + i * width, (const uint8_t *) src + i * spitch, width); } } else { - vk_context * ctx = ggml_vk_create_context(vk_device.transfer_queue); - ggml_vk_ctx_begin(ctx); - ggml_vk_buffer_write_2d_async(ctx, dst, offset, src, spitch, width, height, true); - ggml_vk_ctx_end(ctx); + vk_context * subctx = ggml_vk_create_context(ctx, ctx->device.lock()->transfer_queue); + ggml_vk_ctx_begin(ctx, subctx); + ggml_vk_buffer_write_2d_async(ctx, subctx, dst, offset, src, spitch, width, height, true); + ggml_vk_ctx_end(subctx); - for (auto& cpy : ctx->in_memcpys) { + for (auto& cpy : subctx->in_memcpys) { memcpy(cpy.dst, cpy.src, cpy.n); } - ggml_vk_submit(ctx, vk_fence); - VK_CHECK(vk_device.device.waitForFences({ vk_fence }, true, UINT64_MAX), "vk_buffer_write_2d waitForFences"); - vk_device.device.resetFences({ vk_fence }); + ggml_vk_submit(subctx, ctx->fence); + VK_CHECK(ctx->device.lock()->device.waitForFences({ ctx->fence }, true, UINT64_MAX), "vk_buffer_write_2d waitForFences"); + ctx->device.lock()->device.resetFences({ ctx->fence }); } } -static void ggml_vk_buffer_write(vk_buffer* dst, size_t offset, const void * src, size_t size) { +static void ggml_vk_buffer_write(ggml_backend_vk_context * ctx, vk_buffer& dst, size_t offset, const void * src, size_t size) { #ifdef GGML_VULKAN_DEBUG std::cerr << "ggml_vk_buffer_write(" << size << ")" << std::endl; #endif - ggml_vk_buffer_write_2d(dst, offset, src, 0, size, 1); + ggml_vk_buffer_write_2d(ctx, dst, offset, src, 0, size, 1); } -static void ggml_vk_buffer_read_2d_async(vk_context * ctx, vk_buffer* src, size_t offset, void * dst, size_t spitch, size_t dpitch, size_t width, size_t height, bool sync_staging = false) { +static void ggml_vk_buffer_read_2d_async(ggml_backend_vk_context * ctx, vk_context * subctx, vk_buffer& src, size_t offset, void * dst, size_t spitch, size_t dpitch, size_t width, size_t height, bool sync_staging = false) { #ifdef GGML_VULKAN_DEBUG std::cerr << "ggml_vk_buffer_read_2d_async(offset=" << offset << ", width=" << width << ", height=" << height << ")" << std::endl; #endif GGML_ASSERT(width > 0); GGML_ASSERT(height > 0); - GGML_ASSERT(src->size > 0); + GGML_ASSERT(src != nullptr); + // Make sure ctx owns the buffer + GGML_ASSERT(src->ctx == ctx); + // Check if dst is pinned memory - vk_buffer * buf = nullptr; + vk_buffer buf = nullptr; size_t buf_offset; - ggml_vk_host_get(dst, buf, buf_offset); + ggml_vk_host_get(ctx, dst, buf, buf_offset); std::vector slices(1); if (width == spitch && width == dpitch) { @@ -1660,8 +1823,8 @@ static void ggml_vk_buffer_read_2d_async(vk_context * ctx, vk_buffer* src, size_ if (buf != nullptr) { // Memory is pinned, use as staging buffer - ggml_vk_sync_buffers(ctx); - ctx->s->buffer.copyBuffer(src->buffer, buf->buffer, slices); + ggml_vk_sync_buffers(subctx); + subctx->s->buffer.copyBuffer(src->buffer, buf->buffer, slices); return; } @@ -1670,30 +1833,30 @@ static void ggml_vk_buffer_read_2d_async(vk_context * ctx, vk_buffer* src, size_ #endif // Fall back to staging buffer - vk_buffer * staging = &vk_staging; + vk_buffer staging = ctx->staging; const size_t copy_size = dpitch * height; - if (vk_staging.size < vk_staging_offset + copy_size) { + if (ctx->staging == nullptr || ctx->staging->size < ctx->staging_offset + copy_size) { if (sync_staging) { // Create temporary larger buffer - ensure_sync_staging_buffer(copy_size); + ggml_vk_ensure_sync_staging_buffer(ctx, copy_size); - staging = &vk_sync_staging; + staging = ctx->sync_staging; } else { GGML_ASSERT(false); } } - ggml_vk_sync_buffers(ctx); - ctx->s->buffer.copyBuffer(src->buffer, staging->buffer, slices); + ggml_vk_sync_buffers(subctx); + subctx->s->buffer.copyBuffer(src->buffer, staging->buffer, slices); - deferred_memcpy(dst, staging->ptr, copy_size, &ctx->out_memcpys); + deferred_memcpy(dst, staging->ptr, copy_size, &subctx->out_memcpys); } -static void ggml_vk_buffer_read_async(vk_context * ctx, vk_buffer* src, size_t offset, void * dst, size_t size, bool sync_staging = false) { - return ggml_vk_buffer_read_2d_async(ctx, src, offset, dst, size, size, size, 1, sync_staging); +static void ggml_vk_buffer_read_async(ggml_backend_vk_context * ctx, vk_context * subctx, vk_buffer& src, size_t offset, void * dst, size_t size, bool sync_staging = false) { + return ggml_vk_buffer_read_2d_async(ctx, subctx, src, offset, dst, size, size, size, 1, sync_staging); } -static void ggml_vk_buffer_read(vk_buffer* src, size_t offset, void * dst, size_t size) { +static void ggml_vk_buffer_read(ggml_backend_vk_context * ctx, vk_buffer& src, size_t offset, void * dst, size_t size) { #ifdef GGML_VULKAN_DEBUG std::cerr << "ggml_vk_buffer_read(" << offset << ", " << size << ")" << std::endl; #endif @@ -1702,61 +1865,88 @@ static void ggml_vk_buffer_read(vk_buffer* src, size_t offset, void * dst, size_ memcpy(dst, (uint8_t *) src->ptr + offset, size); } else { - vk_context * ctx = ggml_vk_create_context(vk_device.transfer_queue); - ggml_vk_ctx_begin(ctx); - ggml_vk_buffer_read_async(ctx, src, offset, dst, size, true); - ggml_vk_ctx_end(ctx); + vk_context * subctx = ggml_vk_create_context(ctx, ctx->device.lock()->transfer_queue); + ggml_vk_ctx_begin(ctx, subctx); + ggml_vk_buffer_read_async(ctx, subctx, src, offset, dst, size, true); + ggml_vk_ctx_end(subctx); - ggml_vk_submit(ctx, vk_fence); - VK_CHECK(vk_device.device.waitForFences({ vk_fence }, true, UINT64_MAX), "vk_buffer_read waitForFences"); - vk_device.device.resetFences({ vk_fence }); + ggml_vk_submit(subctx, ctx->fence); + VK_CHECK(ctx->device.lock()->device.waitForFences({ ctx->fence }, true, UINT64_MAX), "vk_buffer_read waitForFences"); + ctx->device.lock()->device.resetFences({ ctx->fence }); - for (auto& cpy : ctx->out_memcpys) { + for (auto& cpy : subctx->out_memcpys) { memcpy(cpy.dst, cpy.src, cpy.n); } } } -static void ggml_vk_buffer_copy_async(vk_context * ctx, vk_buffer * dst, size_t dst_offset, vk_buffer * src, size_t src_offset, size_t size) { +static void ggml_vk_buffer_copy_async(vk_context * ctx, vk_buffer& dst, size_t dst_offset, vk_buffer& src, size_t src_offset, size_t size) { #ifdef GGML_VULKAN_DEBUG std::cerr << "ggml_vk_buffer_copy_async(" << size << ")" << std::endl; #endif + // Make sure both buffers are on same ctx + GGML_ASSERT(src->ctx == dst->ctx); + VkBufferCopy bc{ src_offset, dst_offset, size }; vkCmdCopyBuffer(ctx->s->buffer, src->buffer, dst->buffer, 1, &bc); } -static void ggml_vk_buffer_copy(vk_buffer * dst, size_t dst_offset, vk_buffer * src, size_t src_offset, size_t size) { +static void ggml_vk_buffer_copy(vk_buffer& dst, size_t dst_offset, vk_buffer& src, size_t src_offset, size_t size) { + if (src->ctx == dst->ctx) { #ifdef GGML_VULKAN_DEBUG - std::cerr << "ggml_vk_buffer_copy(" << size << ")" << std::endl; + std::cerr << "ggml_vk_buffer_copy(SINGLE_DEVICE, " << size << ")" << std::endl; #endif - VkBufferCopy bc{ src_offset, dst_offset, size }; + // Copy within the device + ggml_backend_vk_context * ctx = src->ctx; - vk_context * ctx = ggml_vk_create_context(vk_device.transfer_queue); - ggml_vk_ctx_begin(ctx); - vkCmdCopyBuffer(ctx->s->buffer, src->buffer, dst->buffer, 1, &bc); - ggml_vk_buffer_copy_async(ctx, dst, dst_offset, src, src_offset, size); - ggml_vk_ctx_end(ctx); - ggml_vk_submit(ctx, vk_fence); - VK_CHECK(vk_device.device.waitForFences({ vk_fence }, true, UINT64_MAX), "vk_buffer_copy waitForFences"); - vk_device.device.resetFences({ vk_fence }); + VkBufferCopy bc{ src_offset, dst_offset, size }; + + vk_context * subctx = ggml_vk_create_context(ctx, ctx->device.lock()->transfer_queue); + ggml_vk_ctx_begin(ctx, subctx); + ggml_vk_buffer_copy_async(subctx, dst, dst_offset, src, src_offset, size); + ggml_vk_ctx_end(subctx); + ggml_vk_submit(subctx, ctx->fence); + VK_CHECK(ctx->device.lock()->device.waitForFences({ ctx->fence }, true, UINT64_MAX), "vk_buffer_copy waitForFences"); + ctx->device.lock()->device.resetFences({ ctx->fence }); + } else { +#ifdef GGML_VULKAN_DEBUG + std::cerr << "ggml_vk_buffer_copy(MULTI_DEVICE, " << size << ")" << std::endl; +#endif + // Copy device to device + ggml_backend_vk_context * src_ctx = src->ctx; + ggml_backend_vk_context * dst_ctx = dst->ctx; + + ggml_vk_ensure_sync_staging_buffer(src_ctx, size); + ggml_vk_ensure_sync_staging_buffer(dst_ctx, size); + + // Copy to src staging buffer + ggml_vk_buffer_copy(src_ctx->sync_staging, 0, src, src_offset, size); + // memcpy to dst staging buffer + memcpy(dst_ctx->sync_staging->ptr, src_ctx->sync_staging->ptr, size); + // Copy to dst buffer + ggml_vk_buffer_copy(dst, dst_offset, dst_ctx->sync_staging, 0, size); + } } -static void ggml_vk_buffer_memset(vk_buffer* dst, size_t offset, uint32_t c, size_t size) { +static void ggml_vk_buffer_memset(ggml_backend_vk_context * ctx, vk_buffer& dst, size_t offset, uint32_t c, size_t size) { #ifdef GGML_VULKAN_DEBUG std::cerr << "ggml_vk_buffer_memset(" << offset << ", " << c << ", " << size << ")" << std::endl; #endif - vk_context * ctx = ggml_vk_create_context(vk_device.transfer_queue); - ggml_vk_ctx_begin(ctx); - ctx->s->buffer.fillBuffer(dst->buffer, offset, size, c); - ggml_vk_ctx_end(ctx); + // Make sure ctx owns the buffer + GGML_ASSERT(dst->ctx == ctx); - ggml_vk_submit(ctx, vk_fence); - VK_CHECK(vk_device.device.waitForFences({ vk_fence }, true, UINT64_MAX), "vk_memset waitForFences"); - vk_device.device.resetFences({ vk_fence }); + vk_context * subctx = ggml_vk_create_context(ctx, ctx->device.lock()->transfer_queue); + ggml_vk_ctx_begin(ctx, subctx); + subctx->s->buffer.fillBuffer(dst->buffer, offset, size, c); + ggml_vk_ctx_end(subctx); + + ggml_vk_submit(subctx, ctx->fence); + VK_CHECK(ctx->device.lock()->device.waitForFences({ ctx->fence }, true, UINT64_MAX), "vk_memset waitForFences"); + ctx->device.lock()->device.resetFences({ ctx->fence }); } -static void ggml_vk_h2d_tensor_2d(vk_context * ctx, vk_buffer * dst, size_t offset, const ggml_tensor * src, uint64_t i3, uint64_t i2, uint64_t i1) { +static void ggml_vk_h2d_tensor_2d(ggml_backend_vk_context * ctx, vk_context * subctx, vk_buffer& dst, size_t offset, const ggml_tensor * src, uint64_t i3, uint64_t i2, uint64_t i1) { #ifdef GGML_VULKAN_DEBUG std::cerr << "ggml_vk_h2d_tensor_2d(dst=" << dst << ", offset=" << offset << ", src=" << src << ", i3=" << i3 << ", i2=" << i2 << ", i1=" << i1 << ")" << std::endl; #endif @@ -1773,20 +1963,20 @@ static void ggml_vk_h2d_tensor_2d(vk_context * ctx, vk_buffer * dst, size_t offs const void * x = (const void *) ((const char *) src->data + i2*nb2 + i3*nb3); if (nb0 == ts && nb1 == row_length) { - return ggml_vk_buffer_write_async(ctx, dst, offset, x, i1*nb1); + return ggml_vk_buffer_write_async(ctx, subctx, dst, offset, x, i1*nb1); } if (nb0 == ts && (i1 == ne1 || !ggml_is_permuted(src))) { - return ggml_vk_buffer_write_2d_async(ctx, dst, offset, x, nb1, row_length, i1); + return ggml_vk_buffer_write_2d_async(ctx, subctx, dst, offset, x, nb1, row_length, i1); } GGML_ASSERT(i3 == 0); GGML_ASSERT(i2 == 0); GGML_ASSERT(i1 == (uint64_t) ggml_nrows(src)); - return ggml_vk_buffer_write_nc_async(ctx, dst, offset, src); + return ggml_vk_buffer_write_nc_async(ctx, subctx, dst, offset, src); } -static void ggml_vk_d2h_tensor_2d(vk_context * ctx, vk_buffer * src, size_t offset, const ggml_tensor * dst) { +static void ggml_vk_d2h_tensor_2d(ggml_backend_vk_context * ctx, vk_context * subctx, vk_buffer& src, size_t offset, const ggml_tensor * dst) { #ifdef GGML_VULKAN_DEBUG std::cerr << "ggml_vk_d2h_tensor_2d()" << std::endl; #endif @@ -1804,10 +1994,10 @@ static void ggml_vk_d2h_tensor_2d(vk_context * ctx, vk_buffer * src, size_t offs const size_t row_length = ts*ne0/bs; if (ggml_is_contiguous(dst)) { - return ggml_vk_buffer_read_async(ctx, src, offset, dst->data, ne1*nb1*ne2*ne3); + return ggml_vk_buffer_read_async(ctx, subctx, src, offset, dst->data, ne1*nb1*ne2*ne3); } if (nb0 == ts) { - return ggml_vk_buffer_read_2d_async(ctx, src, offset, dst->data, nb1, nb1, row_length, ne1*ne2*ne3); + return ggml_vk_buffer_read_2d_async(ctx, subctx, src, offset, dst->data, nb1, nb1, row_length, ne1*ne2*ne3); } GGML_ASSERT(false); } @@ -1829,89 +2019,89 @@ static uint32_t ggml_vk_guess_split_k(int m, int n, int k) { return 1; } -static uint32_t ggml_vk_guess_matmul_pipeline_align(int m, int n) { +static uint32_t ggml_vk_guess_matmul_pipeline_align(ggml_backend_vk_context * ctx, int m, int n) { #ifdef GGML_VULKAN_DEBUG std::cerr << "ggml_vk_guess_matmul_pipeline_align(" << m << ", " << n << ")" << std::endl; #endif if (m <= 32 || n <= 32) { - return vk_pipeline_matmul_f32_aligned_s.align; + return ctx->pipeline_matmul_f32_aligned_s.align; } - if (vk_device.subgroup_size == 64 || m <= 64 || n <= 64) { - return vk_pipeline_matmul_f32_aligned_m.align; + if (ctx->device.lock()->subgroup_size == 64 || m <= 64 || n <= 64) { + return ctx->pipeline_matmul_f32_aligned_m.align; } - return vk_pipeline_matmul_f32_aligned_l.align; + return ctx->pipeline_matmul_f32_aligned_l.align; } -static vk_pipeline* ggml_vk_guess_matmul_pipeline(bool bit16_x, bool bit16_y, int m, int n, bool aligned) { +static vk_pipeline* ggml_vk_guess_matmul_pipeline(ggml_backend_vk_context * ctx, bool bit16_x, bool bit16_y, int m, int n, bool aligned) { #ifdef GGML_VULKAN_DEBUG std::cerr << "ggml_vk_guess_matmul_pipeline(" << bit16_x << ", " << bit16_y << ", " << m << ", " << n << ", " << aligned << ")"; #endif if (bit16_x && bit16_y) { - if (vk_device.vendor_id == VK_VENDOR_ID_INTEL || m <= 32 || n <= 32) { + if (ctx->device.lock()->vendor_id == VK_VENDOR_ID_INTEL || m <= 32 || n <= 32) { #ifdef GGML_VULKAN_DEBUG std::cerr << " S" << std::endl; #endif - return aligned ? &vk_pipeline_matmul_f16_aligned_s : &vk_pipeline_matmul_f16_s; + return aligned ? &ctx->pipeline_matmul_f16_aligned_s : &ctx->pipeline_matmul_f16_s; } - if (vk_device.subgroup_size == 64 || m <= 64 || n <= 64) { + if (ctx->device.lock()->subgroup_size == 64 || m <= 64 || n <= 64) { #ifdef GGML_VULKAN_DEBUG std::cerr << " M" << std::endl; #endif - return aligned ? &vk_pipeline_matmul_f16_aligned_m : &vk_pipeline_matmul_f16_m; + return aligned ? &ctx->pipeline_matmul_f16_aligned_m : &ctx->pipeline_matmul_f16_m; } #ifdef GGML_VULKAN_DEBUG std::cerr << " L" << std::endl; #endif - return aligned ? &vk_pipeline_matmul_f16_aligned_l : &vk_pipeline_matmul_f16_l; + return aligned ? &ctx->pipeline_matmul_f16_aligned_l : &ctx->pipeline_matmul_f16_l; } if (bit16_x && !bit16_y) { - if (vk_device.vendor_id == VK_VENDOR_ID_INTEL || m <= 32 || n <= 32) { + if (ctx->device.lock()->vendor_id == VK_VENDOR_ID_INTEL || m <= 32 || n <= 32) { #ifdef GGML_VULKAN_DEBUG std::cerr << " S" << std::endl; #endif - return aligned ? &vk_pipeline_matmul_f16_f32_aligned_s : &vk_pipeline_matmul_f16_f32_s; + return aligned ? &ctx->pipeline_matmul_f16_f32_aligned_s : &ctx->pipeline_matmul_f16_f32_s; } - if (vk_device.subgroup_size == 64 || m <= 64 || n <= 64) { + if (ctx->device.lock()->subgroup_size == 64 || m <= 64 || n <= 64) { #ifdef GGML_VULKAN_DEBUG std::cerr << " M" << std::endl; #endif - return aligned ? &vk_pipeline_matmul_f16_f32_aligned_m : &vk_pipeline_matmul_f16_f32_m; + return aligned ? &ctx->pipeline_matmul_f16_f32_aligned_m : &ctx->pipeline_matmul_f16_f32_m; } #ifdef GGML_VULKAN_DEBUG std::cerr << " L" << std::endl; #endif - return aligned ? &vk_pipeline_matmul_f16_f32_aligned_l : &vk_pipeline_matmul_f16_f32_l; + return aligned ? &ctx->pipeline_matmul_f16_f32_aligned_l : &ctx->pipeline_matmul_f16_f32_l; } if (!bit16_x && bit16_y) { GGML_ASSERT(false); } - if (vk_device.vendor_id == VK_VENDOR_ID_INTEL || m <= 32 || n <= 32) { + if (ctx->device.lock()->vendor_id == VK_VENDOR_ID_INTEL || m <= 32 || n <= 32) { #ifdef GGML_VULKAN_DEBUG std::cerr << " S" << std::endl; #endif - return aligned ? &vk_pipeline_matmul_f32_aligned_s : &vk_pipeline_matmul_f32_s; + return aligned ? &ctx->pipeline_matmul_f32_aligned_s : &ctx->pipeline_matmul_f32_s; } - if (vk_device.subgroup_size == 64 || m <= 64 || n <= 64) { + if (ctx->device.lock()->subgroup_size == 64 || m <= 64 || n <= 64) { #ifdef GGML_VULKAN_DEBUG std::cerr << " M" << std::endl; #endif - return aligned ? &vk_pipeline_matmul_f32_aligned_m : &vk_pipeline_matmul_f32_m; + return aligned ? &ctx->pipeline_matmul_f32_aligned_m : &ctx->pipeline_matmul_f32_m; } #ifdef GGML_VULKAN_DEBUG std::cerr << " L" << std::endl; #endif - return aligned ? &vk_pipeline_matmul_f32_aligned_l : &vk_pipeline_matmul_f32_l; + return aligned ? &ctx->pipeline_matmul_f32_aligned_l : &ctx->pipeline_matmul_f32_l; } -static void ggml_vk_matmul(vk_context * ctx, vk_pipeline& pipeline, vk_subbuffer&& a, vk_subbuffer&& b, vk_subbuffer&& d, vk_subbuffer&& split_k_buffer, uint32_t m, uint32_t n, uint32_t k, uint32_t stride_a, uint32_t stride_b, uint32_t stride_d, uint32_t split_k, uint32_t batch, uint32_t ne02, uint32_t ne12, uint32_t broadcast2, uint32_t broadcast3, uint32_t batch_stride_a, uint32_t batch_stride_b, uint32_t batch_stride_d) { +static void ggml_vk_matmul(ggml_backend_vk_context * ctx, vk_context * subctx, vk_pipeline& pipeline, vk_subbuffer&& a, vk_subbuffer&& b, vk_subbuffer&& d, vk_subbuffer&& split_k_buffer, uint32_t m, uint32_t n, uint32_t k, uint32_t stride_a, uint32_t stride_b, uint32_t stride_d, uint32_t split_k, uint32_t batch, uint32_t ne02, uint32_t ne12, uint32_t broadcast2, uint32_t broadcast3, uint32_t batch_stride_a, uint32_t batch_stride_b, uint32_t batch_stride_d) { #ifdef GGML_VULKAN_DEBUG - std::cerr << "ggml_vk_matmul(a: (" << a.buffer.buffer << ", " << a.offset << ", " << a.size << "), b: (" << b.buffer.buffer << ", " << b.offset << ", " << b.size << "), c: (" << d.buffer.buffer << ", " << d.offset << ", " << d.size << "), split_k: (" << split_k_buffer.buffer.buffer << ", " << split_k_buffer.offset << ", " << split_k_buffer.size << "), m: " << m << ", n: " << n << ", k: " << k << ", stride_a: " << stride_a << ", stride_b: " << stride_b << ", stride_d: " << stride_d << ", split_k: " << split_k << ", batch: " << batch << ", ne02: " << ne02 << ", ne12: " << ne12 << ", broadcast2: " << broadcast2 << ", broadcast3: " << broadcast3 << ", batch_stride_a: " << batch_stride_a << ", batch_stride_b: " << batch_stride_b << ", batch_stride_d: " << batch_stride_d << ")" << std::endl; + std::cerr << "ggml_vk_matmul(a: (" << a.buffer->buffer << ", " << a.offset << ", " << a.size << "), b: (" << b.buffer->buffer << ", " << b.offset << ", " << b.size << "), c: (" << d.buffer->buffer << ", " << d.offset << ", " << d.size << "), split_k: (" << split_k_buffer.buffer->buffer << ", " << split_k_buffer.offset << ", " << split_k_buffer.size << "), m: " << m << ", n: " << n << ", k: " << k << ", stride_a: " << stride_a << ", stride_b: " << stride_b << ", stride_d: " << stride_d << ", split_k: " << split_k << ", batch: " << batch << ", ne02: " << ne02 << ", ne12: " << ne12 << ", broadcast2: " << broadcast2 << ", broadcast3: " << broadcast3 << ", batch_stride_a: " << batch_stride_a << ", batch_stride_b: " << batch_stride_b << ", batch_stride_d: " << batch_stride_d << ")" << std::endl; #endif - ggml_vk_sync_buffers(ctx); + ggml_vk_sync_buffers(subctx); if (split_k == 1) { const std::array pc = { m, n, k, stride_a, stride_b, stride_d, k, ne02, ne12, broadcast2, broadcast3, batch_stride_a, batch_stride_b, batch_stride_d }; - ggml_vk_dispatch_pipeline(ctx, pipeline, { a, b, d }, pc.size() * sizeof(uint32_t), pc.data(), { m, n, batch }); + ggml_vk_dispatch_pipeline(ctx, subctx, pipeline, { a, b, d }, pc.size() * sizeof(uint32_t), pc.data(), { m, n, batch }); return; } @@ -1919,10 +2109,10 @@ static void ggml_vk_matmul(vk_context * ctx, vk_pipeline& pipeline, vk_subbuffer const std::array pc1 = { m, n, k, stride_a, stride_b, stride_d, CEIL_DIV(k, split_k), ne02, ne12, broadcast2, broadcast3, batch_stride_a, batch_stride_b, batch_stride_d }; // Make sure enough workgroups get assigned for split k to work - ggml_vk_dispatch_pipeline(ctx, pipeline, { a, b, split_k_buffer }, pc1.size() * sizeof(uint32_t), pc1.data(), { (CEIL_DIV(m, pipeline.wg_denoms[0]) * pipeline.wg_denoms[0]) * split_k, n, batch }); - ggml_vk_sync_buffers(ctx); + ggml_vk_dispatch_pipeline(ctx, subctx, pipeline, { a, b, split_k_buffer }, pc1.size() * sizeof(uint32_t), pc1.data(), { (CEIL_DIV(m, pipeline.wg_denoms[0]) * pipeline.wg_denoms[0]) * split_k, n, batch }); + ggml_vk_sync_buffers(subctx); const std::array pc2 = { (uint32_t)(m * n * batch), split_k }; - ggml_vk_dispatch_pipeline(ctx, vk_pipeline_matmul_split_k_reduce, { split_k_buffer, d }, pc2.size() * sizeof(uint32_t), pc2.data(), { m * n * batch, 1, 1 }); + ggml_vk_dispatch_pipeline(ctx, subctx, ctx->pipeline_matmul_split_k_reduce, { split_k_buffer, d }, pc2.size() * sizeof(uint32_t), pc2.data(), { m * n * batch, 1, 1 }); } static bool ggml_vk_dim01_contiguous(const ggml_tensor * tensor) { @@ -1932,32 +2122,32 @@ static bool ggml_vk_dim01_contiguous(const ggml_tensor * tensor) { tensor->nb[3] == tensor->nb[2]*tensor->ne[2]; } -static vk_pipeline * ggml_vk_get_cpy_pipeline(ggml_type from, ggml_type to) { +static vk_pipeline * ggml_vk_get_cpy_pipeline(ggml_backend_vk_context * ctx, ggml_type from, ggml_type to) { if (from == GGML_TYPE_F32 && to == GGML_TYPE_F32) { - return &vk_pipeline_cpy_f32_f32; + return &ctx->pipeline_cpy_f32_f32; } if (from == GGML_TYPE_F32 && to == GGML_TYPE_F16) { - return &vk_pipeline_cpy_f32_f16; + return &ctx->pipeline_cpy_f32_f16; } if (from == GGML_TYPE_F16 && to == GGML_TYPE_F16) { - return &vk_pipeline_cpy_f16_f16; + return &ctx->pipeline_cpy_f16_f16; } std::cerr << "Missing CPY op for types: " << ggml_type_name(from) << " " << ggml_type_name(to) << std::endl; GGML_ASSERT(false); } -static void ggml_vk_cpy_to_contiguous(vk_context * ctx, vk_pipeline * pipeline, const ggml_tensor * tensor, vk_subbuffer&& in, vk_subbuffer&& out, ggml_type buffer_type, bool aligned=true) { +static void ggml_vk_cpy_to_contiguous(ggml_backend_vk_context * ctx, vk_context * subctx, vk_pipeline * pipeline, const ggml_tensor * tensor, vk_subbuffer&& in, vk_subbuffer&& out, ggml_type buffer_type, bool aligned=true) { #ifdef GGML_VULKAN_DEBUG std::cerr << "ggml_vk_cpy_to_contiguous((" << tensor << ", type=" << tensor->type << ", backend=" << tensor->backend << ", ne0=" << tensor->ne[0] << ", ne1=" << tensor->ne[1] << ", ne2=" << tensor->ne[2] << ", ne3=" << tensor->ne[3] << ", nb0=" << tensor->nb[0] << ", nb1=" << tensor->nb[1] << ", nb2=" << tensor->nb[2] << ", nb3=" << tensor->nb[3] << "), "; - std::cerr << "buffer in size=" << in.buffer.size << ", buffer out size=" << out.buffer.size << ")" << std::endl; + std::cerr << "buffer in size=" << in.buffer->size << ", buffer out size=" << out.buffer->size << ")" << std::endl; #endif const int tensor_type_size = ggml_type_size(tensor->type); const int dst_type_size = ggml_type_size(buffer_type); const uint32_t ne = tensor->ne[0] * tensor->ne[1] * tensor->ne[2]; - const uint32_t nb2 = aligned ? ggml_vk_align_size(dst_type_size * tensor->ne[0] * tensor->ne[1], vk_device.properties.limits.minStorageBufferOffsetAlignment) / dst_type_size : tensor->ne[0] * tensor->ne[1]; + const uint32_t nb2 = aligned ? ggml_vk_align_size(dst_type_size * tensor->ne[0] * tensor->ne[1], ctx->device.lock()->properties.limits.minStorageBufferOffsetAlignment) / dst_type_size : tensor->ne[0] * tensor->ne[1]; const vk_op_cpy_push_constants pc = { (uint32_t)ne, @@ -1965,11 +2155,11 @@ static void ggml_vk_cpy_to_contiguous(vk_context * ctx, vk_pipeline * pipeline, (uint32_t)tensor->ne[0], (uint32_t)tensor->ne[1], 1 , (uint32_t)tensor->ne[0] , nb2, 0, }; - ggml_vk_sync_buffers(ctx); - ggml_vk_dispatch_pipeline(ctx, *pipeline, { in, out }, sizeof(vk_op_cpy_push_constants), &pc, { ne, 1, 1 }); + ggml_vk_sync_buffers(subctx); + ggml_vk_dispatch_pipeline(ctx, subctx, *pipeline, { in, out }, sizeof(vk_op_cpy_push_constants), &pc, { ne, 1, 1 }); } -static void ggml_vk_mul_mat_q_f16(vk_context * ctx, const ggml_tensor * src0, const ggml_tensor * src1, ggml_tensor * dst) { +static void ggml_vk_mul_mat_q_f16(ggml_backend_vk_context * ctx, vk_context * subctx, const ggml_tensor * src0, const ggml_tensor * src1, ggml_tensor * dst) { #ifdef GGML_VULKAN_DEBUG std::cerr << "ggml_vk_mul_mat_q_f16((" << src0 << ", name=" << src0->name << ", type=" << src0->type << ", backend=" << src0->backend << ", ne0=" << src0->ne[0] << ", ne1=" << src0->ne[1] << ", ne2=" << src0->ne[2] << ", ne3=" << src0->ne[3] << ", nb0=" << src0->nb[0] << ", nb1=" << src0->nb[1] << ", nb2=" << src0->nb[2] << ", nb3=" << src0->nb[3]; std::cerr << "), (" << src1 << ", name=" << src1->name << ", type=" << src1->type << ", backend=" << src1->backend << ", ne0=" << src1->ne[0] << ", ne1=" << src1->ne[1] << ", ne2=" << src1->ne[2] << ", ne3=" << src1->ne[3] << ", nb0=" << src1->nb[0] << ", nb1=" << src1->nb[1] << ", nb2=" << src1->nb[2] << ", nb3=" << src1->nb[3]; @@ -1998,17 +2188,17 @@ static void ggml_vk_mul_mat_q_f16(vk_context * ctx, const ggml_tensor * src0, co ggml_tensor_extra_gpu * extra_src0 = (ggml_tensor_extra_gpu *) src0->extra; ggml_tensor_extra_gpu * extra_src1 = (ggml_tensor_extra_gpu *) src1->extra; - vk_buffer * d_Qx = nullptr; + vk_buffer d_Qx; size_t qx_buf_offset = 0; - vk_buffer * d_Qy = nullptr; + vk_buffer d_Qy; size_t qy_buf_offset = 0; bool src0_uma = false; bool src1_uma = false; - if (vk_device.uma) { - ggml_vk_host_get(src0->data, d_Qx, qx_buf_offset); - ggml_vk_host_get(src1->data, d_Qy, qy_buf_offset); + if (ctx->device.lock()->uma) { + ggml_vk_host_get(ctx, src0->data, d_Qx, qx_buf_offset); + ggml_vk_host_get(ctx, src1->data, d_Qy, qy_buf_offset); src0_uma = d_Qx != nullptr; src1_uma = d_Qy != nullptr; } @@ -2031,12 +2221,12 @@ static void ggml_vk_mul_mat_q_f16(vk_context * ctx, const ggml_tensor * src0, co const int y_ne = ne11 * ne10; const int d_ne = ne11 * ne01; - const uint32_t kpad = ggml_vk_align_size(ne10, ggml_vk_guess_matmul_pipeline_align(ne01, ne11)); + const uint32_t kpad = ggml_vk_align_size(ne10, ggml_vk_guess_matmul_pipeline_align(ctx, ne01, ne11)); const bool aligned = ne10 == kpad; const uint32_t split_k = ggml_vk_guess_split_k(ne01, ne11, ne10); - vk_pipeline * pipeline = ggml_vk_guess_matmul_pipeline(true, !f16_f32_kernel, ne01, ne11, aligned); + vk_pipeline * pipeline = ggml_vk_guess_matmul_pipeline(ctx, true, !f16_f32_kernel, ne01, ne11, aligned); const uint64_t qx_sz = ggml_type_size(src0->type) * x_ne / ggml_blck_size(src0->type); const uint64_t qy_sz = ggml_type_size(src1->type) * y_ne / ggml_blck_size(src1->type); @@ -2044,30 +2234,30 @@ static void ggml_vk_mul_mat_q_f16(vk_context * ctx, const ggml_tensor * src0, co const uint64_t y_sz = f16_f32_kernel ? sizeof(float) * y_ne : sizeof(ggml_fp16_t) * y_ne; const uint64_t d_sz = sizeof(float) * d_ne; - vk_buffer* d_D = &extra->buffer_gpu; + vk_buffer d_D = extra->buffer_gpu.lock(); const uint64_t d_buf_offset = extra->offset; GGML_ASSERT(d_D != nullptr); GGML_ASSERT(d_D->size >= d_buf_offset + d_sz * ne02 * ne03); - vk_buffer* d_X; + vk_buffer d_X; uint64_t x_buf_offset = 0; - vk_buffer* d_Y; + vk_buffer d_Y; uint64_t y_buf_offset = 0; if (load_x) { - d_Qx = &vk_prealloc_qx; + d_Qx = ctx->prealloc_qx; } else if (!src0_uma) { - d_Qx = &extra_src0->buffer_gpu; + d_Qx = extra_src0->buffer_gpu.lock(); qx_buf_offset = extra_src0->offset; GGML_ASSERT(d_Qx != nullptr); } if (load_y) { - d_Qy = &vk_prealloc_qy; + d_Qy = ctx->prealloc_qy; } else if (!src1_uma) { - d_Qy = &extra_src1->buffer_gpu; + d_Qy = extra_src1->buffer_gpu.lock(); qy_buf_offset = extra_src1->offset; GGML_ASSERT(d_Qy != nullptr); } if (qx_needs_dequant) { - d_X = &vk_prealloc_x; + d_X = ctx->prealloc_x; GGML_ASSERT(d_X->size >= x_sz * ne02 * ne03); } else { d_X = d_Qx; @@ -2075,7 +2265,7 @@ static void ggml_vk_mul_mat_q_f16(vk_context * ctx, const ggml_tensor * src0, co GGML_ASSERT(qx_sz == x_sz); // NOLINT } if (qy_needs_dequant) { - d_Y = &vk_prealloc_y; + d_Y = ctx->prealloc_y; GGML_ASSERT(d_Y->size >= y_sz * ne02 * ne03); } else { d_Y = d_Qy; @@ -2087,49 +2277,49 @@ static void ggml_vk_mul_mat_q_f16(vk_context * ctx, const ggml_tensor * src0, co vk_pipeline * to_fp16_vk_1 = nullptr; if (x_non_contig) { - to_fp16_vk_0 = ggml_vk_get_cpy_pipeline(src0->type, GGML_TYPE_F16); + to_fp16_vk_0 = ggml_vk_get_cpy_pipeline(ctx, src0->type, GGML_TYPE_F16); } else { - to_fp16_vk_0 = ggml_vk_get_to_fp16(src0->type); + to_fp16_vk_0 = ggml_vk_get_to_fp16(ctx, src0->type); } if (y_non_contig) { - to_fp16_vk_1 = ggml_vk_get_cpy_pipeline(src1->type, GGML_TYPE_F16); + to_fp16_vk_1 = ggml_vk_get_cpy_pipeline(ctx, src1->type, GGML_TYPE_F16); } else { - to_fp16_vk_1 = ggml_vk_get_to_fp16(src1->type); + to_fp16_vk_1 = ggml_vk_get_to_fp16(ctx, src1->type); } GGML_ASSERT(!qx_needs_dequant || to_fp16_vk_0 != nullptr); // NOLINT GGML_ASSERT(!qy_needs_dequant || to_fp16_vk_1 != nullptr); // NOLINT // Allocate descriptor sets - ggml_vk_pipeline_allocate_descriptor_sets(*pipeline, ne12 * ne13); + ggml_pipeline_allocate_descriptor_sets(ctx, *pipeline, ne12 * ne13); if (qx_needs_dequant) { - ggml_vk_pipeline_allocate_descriptor_sets(*to_fp16_vk_0, x_non_contig ? 1 : ne12 * ne13); + ggml_pipeline_allocate_descriptor_sets(ctx, *to_fp16_vk_0, x_non_contig ? 1 : ne12 * ne13); } if (qy_needs_dequant) { - ggml_vk_pipeline_allocate_descriptor_sets(*to_fp16_vk_1, y_non_contig ? 1 : ne12 * ne13); + ggml_pipeline_allocate_descriptor_sets(ctx, *to_fp16_vk_1, y_non_contig ? 1 : ne12 * ne13); } if (split_k > 1) { - ggml_vk_pipeline_allocate_descriptor_sets(vk_pipeline_matmul_split_k_reduce, ne12 * ne13); + ggml_pipeline_allocate_descriptor_sets(ctx, ctx->pipeline_matmul_split_k_reduce, ne12 * ne13); } if (x_non_contig) { - ggml_vk_cpy_to_contiguous(ctx, to_fp16_vk_0, src0, { *d_Qx, qx_buf_offset, VK_WHOLE_SIZE }, { *d_X, 0, VK_WHOLE_SIZE }, dst->type, false); + ggml_vk_cpy_to_contiguous(ctx, subctx, to_fp16_vk_0, src0, { d_Qx, qx_buf_offset, VK_WHOLE_SIZE }, { d_X, 0, VK_WHOLE_SIZE }, dst->type, false); } else if (load_x || qx_needs_dequant) { if (load_x) { // copy data to device - ggml_vk_h2d_tensor_2d(ctx, d_Qx, 0, src0, 0, 0, ggml_nrows(src0)); - vk_staging_offset = qx_sz * ne02 * ne03; + ggml_vk_h2d_tensor_2d(ctx, subctx, d_Qx, 0, src0, 0, 0, ggml_nrows(src0)); + ctx->staging_offset = qx_sz * ne02 * ne03; } if (qx_needs_dequant) { const std::vector pc = { (int)ne01, (int)ne10, (int)ne10, (int)ne10 }; - ggml_vk_sync_buffers(ctx); - ggml_vk_dispatch_pipeline(ctx, *to_fp16_vk_0, { { *d_Qx, qx_buf_offset, qx_sz * ne02 * ne03 }, { *d_X, 0, x_sz * ne02 * ne03 } }, pc.size() * sizeof(int), pc.data(), { (uint32_t)(x_ne * ne02 * ne03), 1, 1}); + ggml_vk_sync_buffers(subctx); + ggml_vk_dispatch_pipeline(ctx, subctx, *to_fp16_vk_0, { { d_Qx, qx_buf_offset, qx_sz * ne02 * ne03 }, { d_X, 0, x_sz * ne02 * ne03 } }, pc.size() * sizeof(int), pc.data(), { (uint32_t)(x_ne * ne02 * ne03), 1, 1}); } } if (y_non_contig) { - ggml_vk_cpy_to_contiguous(ctx, to_fp16_vk_1, src1, { *d_Qy, qy_buf_offset, VK_WHOLE_SIZE }, { *d_Y, 0, VK_WHOLE_SIZE }, dst->type); + ggml_vk_cpy_to_contiguous(ctx, subctx, to_fp16_vk_1, src1, { d_Qy, qy_buf_offset, VK_WHOLE_SIZE }, { d_Y, 0, VK_WHOLE_SIZE }, dst->type); } else if (load_y) { - ggml_vk_h2d_tensor_2d(ctx, d_Qy, 0, src1, 0, 0, ggml_nrows(src1)); + ggml_vk_h2d_tensor_2d(ctx, subctx, d_Qy, 0, src1, 0, 0, ggml_nrows(src1)); } uint32_t stride_batch_x = ne00*ne01; @@ -2144,16 +2334,16 @@ static void ggml_vk_mul_mat_q_f16(vk_context * ctx, const ggml_tensor * src0, co } // compute - ggml_vk_matmul(ctx, *pipeline, { *d_X, x_buf_offset, x_sz * ne02 * ne03 }, { *d_Y, y_buf_offset, y_sz * ne12 * ne13 }, { *d_D, d_buf_offset, d_sz * ne12 * ne13 }, { vk_prealloc_split_k, 0, d_sz * ne12 * ne13 * split_k }, ne01, ne11, ne10, ne10, ne10, ne01, split_k, ne12*ne13, ne02, ne12, r2, r3, stride_batch_x, stride_batch_y, ne20*ne21); // NOLINT + ggml_vk_matmul(ctx, subctx, *pipeline, { d_X, x_buf_offset, x_sz * ne02 * ne03 }, { d_Y, y_buf_offset, y_sz * ne12 * ne13 }, { d_D, d_buf_offset, d_sz * ne12 * ne13 }, { ctx->prealloc_split_k, 0, d_sz * ne12 * ne13 * split_k }, ne01, ne11, ne10, ne10, ne10, ne01, split_k, ne12*ne13, ne02, ne12, r2, r3, stride_batch_x, stride_batch_y, ne20*ne21); // NOLINT if (dst->backend == GGML_BACKEND_CPU) { // copy dst to host float * d = (float *) ((char *) dst->data); - ggml_vk_buffer_read_async(ctx, d_D, 0, d, sizeof(float) * d_ne * ne12 * ne13); + ggml_vk_buffer_read_async(ctx, subctx, d_D, 0, d, sizeof(float) * d_ne * ne12 * ne13); } } -static void ggml_vk_mul_mat_vec_q_f16(vk_context * ctx, const ggml_tensor * src0, const ggml_tensor * src1, ggml_tensor * dst) { +static void ggml_vk_mul_mat_vec_q_f16(ggml_backend_vk_context * ctx, vk_context * subctx, const ggml_tensor * src0, const ggml_tensor * src1, ggml_tensor * dst) { #ifdef GGML_VULKAN_DEBUG std::cerr << "ggml_vk_mul_mat_vec_q_f16((" << src0 << ", name=" << src0->name << ", type=" << src0->type << ", backend=" << src0->backend << ", ne0=" << src0->ne[0] << ", ne1=" << src0->ne[1] << ", ne2=" << src0->ne[2] << ", ne3=" << src0->ne[3] << ", nb0=" << src0->nb[0] << ", nb1=" << src0->nb[1] << ", nb2=" << src0->nb[2] << ", nb3=" << src0->nb[3]; std::cerr << "), (" << src1 << ", name=" << src1->name << ", type=" << src1->type << ", backend=" << src1->backend << ", ne0=" << src1->ne[0] << ", ne1=" << src1->ne[1] << ", ne2=" << src1->ne[2] << ", ne3=" << src1->ne[3] << ", nb0=" << src1->nb[0] << ", nb1=" << src1->nb[1] << ", nb2=" << src1->nb[2] << ", nb3=" << src1->nb[3]; @@ -2184,17 +2374,17 @@ static void ggml_vk_mul_mat_vec_q_f16(vk_context * ctx, const ggml_tensor * src0 ggml_tensor_extra_gpu * extra_src0 = (ggml_tensor_extra_gpu *) src0->extra; ggml_tensor_extra_gpu * extra_src1 = (ggml_tensor_extra_gpu *) src1->extra; - vk_buffer * d_Qx = nullptr; + vk_buffer d_Qx; size_t qx_buf_offset = 0; - vk_buffer * d_Qy = nullptr; + vk_buffer d_Qy; size_t qy_buf_offset = 0; bool src0_uma = false; bool src1_uma = false; - if (vk_device.uma) { - ggml_vk_host_get(src0->data, d_Qx, qx_buf_offset); - ggml_vk_host_get(src1->data, d_Qy, qy_buf_offset); + if (ctx->device.lock()->uma) { + ggml_vk_host_get(ctx, src0->data, d_Qx, qx_buf_offset); + ggml_vk_host_get(ctx, src1->data, d_Qy, qy_buf_offset); src0_uma = d_Qx != nullptr; src1_uma = d_Qy != nullptr; } @@ -2214,42 +2404,42 @@ static void ggml_vk_mul_mat_vec_q_f16(vk_context * ctx, const ggml_tensor * src0 const uint64_t y_ne = ne11 * ne10; const uint64_t d_ne = ne11 * ne01; - const uint64_t qx_sz = ggml_vk_align_size(ggml_type_size(src0->type) * x_ne / ggml_blck_size(src0->type), vk_device.properties.limits.minStorageBufferOffsetAlignment); + const uint64_t qx_sz = ggml_vk_align_size(ggml_type_size(src0->type) * x_ne / ggml_blck_size(src0->type), ctx->device.lock()->properties.limits.minStorageBufferOffsetAlignment); const uint64_t qy_sz = ggml_type_size(src1->type) * y_ne / ggml_blck_size(src1->type); - const uint64_t x_sz = x_non_contig ? ggml_vk_align_size(ggml_type_size(src0->type) * x_ne, vk_device.properties.limits.minStorageBufferOffsetAlignment) : qx_sz; + const uint64_t x_sz = x_non_contig ? ggml_vk_align_size(ggml_type_size(src0->type) * x_ne, ctx->device.lock()->properties.limits.minStorageBufferOffsetAlignment) : qx_sz; const uint64_t y_sz = f16_f32_kernel ? sizeof(float) * y_ne : sizeof(ggml_fp16_t) * y_ne; const uint64_t d_sz = sizeof(float) * d_ne; - vk_buffer* d_D = &extra->buffer_gpu; + vk_buffer d_D = extra->buffer_gpu.lock(); const uint64_t d_buf_offset = extra->offset; GGML_ASSERT(d_D != nullptr); - vk_buffer* d_X; + vk_buffer d_X; uint64_t x_buf_offset = 0; - vk_buffer* d_Y; + vk_buffer d_Y; uint64_t y_buf_offset = 0; if (load_x) { - d_Qx = &vk_prealloc_qx; + d_Qx = ctx->prealloc_qx; } else if(!src1_uma) { - d_Qx = &extra_src0->buffer_gpu; + d_Qx = extra_src0->buffer_gpu.lock(); qx_buf_offset = extra_src0->offset; GGML_ASSERT(d_Qx != nullptr); } if (load_y) { - d_Qy = &vk_prealloc_qy; + d_Qy = ctx->prealloc_qy; } else if(!src1_uma) { - d_Qy = &extra_src1->buffer_gpu; + d_Qy = extra_src1->buffer_gpu.lock(); qy_buf_offset = extra_src1->offset; GGML_ASSERT(d_Qy != nullptr); } if (qx_needs_dequant) { - d_X = &vk_prealloc_x; + d_X = ctx->prealloc_x; } else { d_X = d_Qx; x_buf_offset = qx_buf_offset; GGML_ASSERT(qx_sz == x_sz); } if (qy_needs_dequant) { - d_Y = &vk_prealloc_y; + d_Y = ctx->prealloc_y; } else { d_Y = d_Qy; y_buf_offset = qy_buf_offset; @@ -2259,39 +2449,39 @@ static void ggml_vk_mul_mat_vec_q_f16(vk_context * ctx, const ggml_tensor * src0 vk_pipeline * to_fp16_vk_0 = nullptr; vk_pipeline* to_fp16_vk_1 = nullptr; if (x_non_contig) { - to_fp16_vk_0 = ggml_vk_get_cpy_pipeline(src0->type, src0->type); + to_fp16_vk_0 = ggml_vk_get_cpy_pipeline(ctx, src0->type, src0->type); } if (y_non_contig) { - to_fp16_vk_1 = ggml_vk_get_cpy_pipeline(src1->type, src1->type); + to_fp16_vk_1 = ggml_vk_get_cpy_pipeline(ctx, src1->type, src1->type); } else { - to_fp16_vk_1 = ggml_vk_get_to_fp16(src1->type); + to_fp16_vk_1 = ggml_vk_get_to_fp16(ctx, src1->type); } - vk_pipeline* dmmv = ggml_vk_get_dequantize_mul_mat_vec(src0->type); + vk_pipeline* dmmv = ggml_vk_get_dequantize_mul_mat_vec(ctx, src0->type); GGML_ASSERT(!qx_needs_dequant || to_fp16_vk_0 != nullptr); // NOLINT GGML_ASSERT(!qy_needs_dequant || to_fp16_vk_1 != nullptr); // NOLINT GGML_ASSERT(dmmv != nullptr); // Allocate descriptor sets if (qx_needs_dequant) { - ggml_vk_pipeline_allocate_descriptor_sets(*to_fp16_vk_0, 1); + ggml_pipeline_allocate_descriptor_sets(ctx, *to_fp16_vk_0, 1); } if (qy_needs_dequant) { - ggml_vk_pipeline_allocate_descriptor_sets(*to_fp16_vk_1, y_non_contig ? 1 : ne12 * ne13); + ggml_pipeline_allocate_descriptor_sets(ctx, *to_fp16_vk_1, y_non_contig ? 1 : ne12 * ne13); } - ggml_vk_pipeline_allocate_descriptor_sets(*dmmv, ne12 * ne13); + ggml_pipeline_allocate_descriptor_sets(ctx, *dmmv, ne12 * ne13); if (x_non_contig) { - GGML_ASSERT(x_sz == ggml_vk_align_size(ggml_type_size(src0->type) * x_ne, vk_device.properties.limits.minStorageBufferOffsetAlignment)); - ggml_vk_cpy_to_contiguous(ctx, to_fp16_vk_0, src0, { *d_Qx, qx_buf_offset, VK_WHOLE_SIZE }, { *d_X, 0, VK_WHOLE_SIZE }, src0->type); + GGML_ASSERT(x_sz == ggml_vk_align_size(ggml_type_size(src0->type) * x_ne, ctx->device.lock()->properties.limits.minStorageBufferOffsetAlignment)); + ggml_vk_cpy_to_contiguous(ctx, subctx, to_fp16_vk_0, src0, { d_Qx, qx_buf_offset, VK_WHOLE_SIZE }, { d_X, 0, VK_WHOLE_SIZE }, src0->type); } else if (load_x) { // copy data to device - ggml_vk_h2d_tensor_2d(ctx, d_Qx, 0, src0, 0, 0, ggml_nrows(src0)); + ggml_vk_h2d_tensor_2d(ctx, subctx, d_Qx, 0, src0, 0, 0, ggml_nrows(src0)); } if (y_non_contig) { GGML_ASSERT(y_sz == ggml_type_size(src1->type) * y_ne); - ggml_vk_cpy_to_contiguous(ctx, to_fp16_vk_1, src1, { *d_Qy, qy_buf_offset, VK_WHOLE_SIZE }, { *d_Y, 0, VK_WHOLE_SIZE }, src1->type); + ggml_vk_cpy_to_contiguous(ctx, subctx, to_fp16_vk_1, src1, { d_Qy, qy_buf_offset, VK_WHOLE_SIZE }, { d_Y, 0, VK_WHOLE_SIZE }, src1->type); } else if (load_y) { - ggml_vk_h2d_tensor_2d(ctx, d_Qy, 0, src1, 0, 0, ggml_nrows(src1)); + ggml_vk_h2d_tensor_2d(ctx, subctx, d_Qy, 0, src1, 0, 0, ggml_nrows(src1)); } for (uint64_t i13 = 0; i13 < ne13; i13++) { @@ -2306,34 +2496,34 @@ static void ggml_vk_mul_mat_vec_q_f16(vk_context * ctx, const ggml_tensor * src0 const uint64_t y_offset = y_buf_offset + y_sz * it_idx1; const uint64_t d_offset = d_buf_offset + d_sz * it_idx1; - const uint64_t y_buffer_offset = (y_offset / vk_device.properties.limits.minStorageBufferOffsetAlignment) * vk_device.properties.limits.minStorageBufferOffsetAlignment; + const uint64_t y_buffer_offset = (y_offset / ctx->device.lock()->properties.limits.minStorageBufferOffsetAlignment) * ctx->device.lock()->properties.limits.minStorageBufferOffsetAlignment; const uint64_t y_shader_offset = y_offset - y_buffer_offset; - const uint64_t d_buffer_offset = (d_offset / vk_device.properties.limits.minStorageBufferOffsetAlignment) * vk_device.properties.limits.minStorageBufferOffsetAlignment; + const uint64_t d_buffer_offset = (d_offset / ctx->device.lock()->properties.limits.minStorageBufferOffsetAlignment) * ctx->device.lock()->properties.limits.minStorageBufferOffsetAlignment; const uint64_t d_shader_offset = d_offset - d_buffer_offset; if (!y_non_contig && qy_needs_dequant) { const std::vector pc = { (int)ne11, (int)ne10, (int)ne10, (int)ne10 }; - ggml_vk_sync_buffers(ctx); - ggml_vk_dispatch_pipeline(ctx, *to_fp16_vk_1, { { *d_Qy, qy_offset, qy_sz }, { *d_Y, y_offset, y_sz } }, pc.size() * sizeof(int), pc.data(), { (uint32_t)y_ne, 1, 1}); + ggml_vk_sync_buffers(subctx); + ggml_vk_dispatch_pipeline(ctx, subctx, *to_fp16_vk_1, { { d_Qy, qy_offset, qy_sz }, { d_Y, y_offset, y_sz } }, pc.size() * sizeof(int), pc.data(), { (uint32_t)y_ne, 1, 1}); } // compute const std::array pc = { (int)ne00, (int)(y_shader_offset / ggml_type_size(src1->type)), (int)(d_shader_offset / ggml_type_size(dst->type))}; - ggml_vk_sync_buffers(ctx); - ggml_vk_dispatch_pipeline(ctx, *dmmv, { { *d_X, x_offset, x_sz }, { *d_Y, y_buffer_offset, y_sz + y_shader_offset }, { *d_D, d_buffer_offset, d_sz + d_shader_offset } }, 3 * sizeof(int), &pc, { (uint32_t)ne01, 1, 1}); + ggml_vk_sync_buffers(subctx); + ggml_vk_dispatch_pipeline(ctx, subctx, *dmmv, { { d_X, x_offset, x_sz }, { d_Y, y_buffer_offset, y_sz + y_shader_offset }, { d_D, d_buffer_offset, d_sz + d_shader_offset } }, 3 * sizeof(int), &pc, { (uint32_t)ne01, 1, 1}); if (dst->backend == GGML_BACKEND_CPU) { // copy dst to host float * d = (float *) ((char *) dst->data + i12*nb2 + i13*nb3); - ggml_vk_sync_buffers(ctx); - ggml_vk_buffer_read_async(ctx, d_D, d_offset, d, sizeof(float) * d_ne); + ggml_vk_sync_buffers(subctx); + ggml_vk_buffer_read_async(ctx, subctx, d_D, d_offset, d, sizeof(float) * d_ne); } } } } -static void ggml_vk_mul_mat_vec_p021_f16_f32(vk_context * ctx, const ggml_tensor * src0, const ggml_tensor * src1, ggml_tensor * dst) { +static void ggml_vk_mul_mat_vec_p021_f16_f32(ggml_backend_vk_context * ctx, vk_context * subctx, const ggml_tensor * src0, const ggml_tensor * src1, ggml_tensor * dst) { #ifdef GGML_VULKAN_DEBUG std::cerr << "ggml_vk_mul_mat_p021_f16_f32((" << src0 << ", name=" << src0->name << ", type=" << src0->type << ", backend=" << src0->backend << ", ne0=" << src0->ne[0] << ", ne1=" << src0->ne[1] << ", ne2=" << src0->ne[2] << ", ne3=" << src0->ne[3] << ", nb0=" << src0->nb[0] << ", nb1=" << src0->nb[1] << ", nb2=" << src0->nb[2] << ", nb3=" << src0->nb[3]; std::cerr << "), (" << src1 << ", name=" << src1->name << ", type=" << src1->type << ", backend=" << src1->backend << ", ne0=" << src1->ne[0] << ", ne1=" << src1->ne[1] << ", ne2=" << src1->ne[2] << ", ne3=" << src1->ne[3] << ", nb0=" << src1->nb[0] << ", nb1=" << src1->nb[1] << ", nb2=" << src1->nb[2] << ", nb3=" << src1->nb[3]; @@ -2362,13 +2552,13 @@ static void ggml_vk_mul_mat_vec_p021_f16_f32(vk_context * ctx, const ggml_tensor ggml_tensor_extra_gpu * extra_src0 = (ggml_tensor_extra_gpu *) src0->extra; ggml_tensor_extra_gpu * extra_src1 = (ggml_tensor_extra_gpu *) src1->extra; - vk_buffer * d_Qy = nullptr; + vk_buffer d_Qy; size_t qy_buf_offset = 0; bool src1_uma = false; - if (vk_device.uma) { - ggml_vk_host_get(src1->data, d_Qy, qy_buf_offset); + if (ctx->device.lock()->uma) { + ggml_vk_host_get(ctx, src1->data, d_Qy, qy_buf_offset); src1_uma = d_Qy != nullptr; } @@ -2378,51 +2568,51 @@ static void ggml_vk_mul_mat_vec_p021_f16_f32(vk_context * ctx, const ggml_tensor const uint64_t y_ne = ne10 * ne11 * ne12; const uint64_t d_ne = ne01 * ne11 * ne12; - const uint64_t qx_sz = ggml_vk_align_size(ggml_type_size(src0->type) * x_ne / ggml_blck_size(src0->type), vk_device.properties.limits.minStorageBufferOffsetAlignment); + const uint64_t qx_sz = ggml_vk_align_size(ggml_type_size(src0->type) * x_ne / ggml_blck_size(src0->type), ctx->device.lock()->properties.limits.minStorageBufferOffsetAlignment); const uint64_t qy_sz = ggml_type_size(src1->type) * y_ne / ggml_blck_size(src1->type); const uint64_t d_sz = sizeof(float) * d_ne; - vk_buffer* d_D = &extra->buffer_gpu; + vk_buffer d_D = extra->buffer_gpu.lock(); const uint64_t d_buf_offset = extra->offset; GGML_ASSERT(d_D != nullptr); - vk_buffer* d_Qx = &extra_src0->buffer_gpu; + vk_buffer d_Qx = extra_src0->buffer_gpu.lock(); const uint64_t qx_buf_offset = extra_src0->offset; GGML_ASSERT(d_Qx != nullptr); if (load_y) { - d_Qy = &vk_prealloc_qy; + d_Qy = ctx->prealloc_qy; } else if (!src1_uma) { - d_Qy = &extra_src1->buffer_gpu; + d_Qy = extra_src1->buffer_gpu.lock(); qy_buf_offset = extra_src1->offset; GGML_ASSERT(d_Qx != nullptr); } // Allocate descriptor sets - ggml_vk_pipeline_allocate_descriptor_sets(vk_pipeline_mul_mat_vec_p021_f16_f32, 1); + ggml_pipeline_allocate_descriptor_sets(ctx, ctx->pipeline_mul_mat_vec_p021_f16_f32, 1); - const uint64_t qy_buffer_offset = (qy_buf_offset / vk_device.properties.limits.minStorageBufferOffsetAlignment) * vk_device.properties.limits.minStorageBufferOffsetAlignment; + const uint64_t qy_buffer_offset = (qy_buf_offset / ctx->device.lock()->properties.limits.minStorageBufferOffsetAlignment) * ctx->device.lock()->properties.limits.minStorageBufferOffsetAlignment; const uint64_t qy_shader_offset = qy_buf_offset - qy_buffer_offset; - const uint64_t d_buffer_offset = (d_buf_offset / vk_device.properties.limits.minStorageBufferOffsetAlignment) * vk_device.properties.limits.minStorageBufferOffsetAlignment; + const uint64_t d_buffer_offset = (d_buf_offset / ctx->device.lock()->properties.limits.minStorageBufferOffsetAlignment) * ctx->device.lock()->properties.limits.minStorageBufferOffsetAlignment; const uint64_t d_shader_offset = d_buf_offset - d_buffer_offset; if (load_y) { - ggml_vk_h2d_tensor_2d(ctx, d_Qy, qy_buf_offset, src1, 0, 0, ggml_nrows(src1)); + ggml_vk_h2d_tensor_2d(ctx, subctx, d_Qy, qy_buf_offset, src1, 0, 0, ggml_nrows(src1)); } // compute const std::array pc = { (uint32_t)ne00, (uint32_t)ne01, (uint32_t)ne02, (uint32_t)ne12, (uint32_t)(qy_shader_offset / ggml_type_size(src1->type)), (uint32_t)(d_shader_offset / ggml_type_size(dst->type)) }; - ggml_vk_sync_buffers(ctx); - ggml_vk_dispatch_pipeline(ctx, vk_pipeline_mul_mat_vec_p021_f16_f32, { { *d_Qx, qx_buf_offset, qx_sz }, { *d_Qy, qy_buffer_offset, qy_sz + qy_shader_offset }, { *d_D, d_buffer_offset, d_sz + d_shader_offset } }, 6 * sizeof(uint32_t), &pc, { 1, (uint32_t)ne01, (uint32_t)ne12 }); + ggml_vk_sync_buffers(subctx); + ggml_vk_dispatch_pipeline(ctx, subctx, ctx->pipeline_mul_mat_vec_p021_f16_f32, { { d_Qx, qx_buf_offset, qx_sz }, { d_Qy, qy_buffer_offset, qy_sz + qy_shader_offset }, { d_D, d_buffer_offset, d_sz + d_shader_offset } }, 6 * sizeof(uint32_t), &pc, { 1, (uint32_t)ne01, (uint32_t)ne12 }); if (dst->backend == GGML_BACKEND_CPU) { // copy dst to host float * d = (float *) dst->data; - ggml_vk_sync_buffers(ctx); - ggml_vk_buffer_read_async(ctx, d_D, d_buf_offset, d, sizeof(float) * d_ne); + ggml_vk_sync_buffers(subctx); + ggml_vk_buffer_read_async(ctx, subctx, d_D, d_buf_offset, d, sizeof(float) * d_ne); } } -static void ggml_vk_mul_mat_vec_nc_f16_f32(vk_context * ctx, const ggml_tensor * src0, const ggml_tensor * src1, ggml_tensor * dst) { +static void ggml_vk_mul_mat_vec_nc_f16_f32(ggml_backend_vk_context * ctx, vk_context * subctx, const ggml_tensor * src0, const ggml_tensor * src1, ggml_tensor * dst) { #ifdef GGML_VULKAN_DEBUG std::cerr << "ggml_vk_mul_mat_nc_f16_f32((" << src0 << ", name=" << src0->name << ", type=" << src0->type << ", backend=" << src0->backend << ", ne0=" << src0->ne[0] << ", ne1=" << src0->ne[1] << ", ne2=" << src0->ne[2] << ", ne3=" << src0->ne[3] << ", nb0=" << src0->nb[0] << ", nb1=" << src0->nb[1] << ", nb2=" << src0->nb[2] << ", nb3=" << src0->nb[3]; std::cerr << "), (" << src1 << ", name=" << src1->name << ", type=" << src1->type << ", backend=" << src1->backend << ", ne0=" << src1->ne[0] << ", ne1=" << src1->ne[1] << ", ne2=" << src1->ne[2] << ", ne3=" << src1->ne[3] << ", nb0=" << src1->nb[0] << ", nb1=" << src1->nb[1] << ", nb2=" << src1->nb[2] << ", nb3=" << src1->nb[3]; @@ -2454,13 +2644,13 @@ static void ggml_vk_mul_mat_vec_nc_f16_f32(vk_context * ctx, const ggml_tensor * ggml_tensor_extra_gpu * extra_src0 = (ggml_tensor_extra_gpu *) src0->extra; ggml_tensor_extra_gpu * extra_src1 = (ggml_tensor_extra_gpu *) src1->extra; - vk_buffer * d_Qy = nullptr; + vk_buffer d_Qy = nullptr; size_t qy_buf_offset = 0; bool src1_uma = false; - if (vk_device.uma) { - ggml_vk_host_get(src1->data, d_Qy, qy_buf_offset); + if (ctx->device.lock()->uma) { + ggml_vk_host_get(ctx, src1->data, d_Qy, qy_buf_offset); src1_uma = d_Qy != nullptr; } @@ -2475,43 +2665,43 @@ static void ggml_vk_mul_mat_vec_nc_f16_f32(vk_context * ctx, const ggml_tensor * const uint64_t qy_sz = ggml_nbytes(src1); const uint64_t d_sz = sizeof(float) * d_ne; - vk_buffer* d_D = &extra->buffer_gpu; + vk_buffer d_D = extra->buffer_gpu.lock(); const uint64_t d_buf_offset = extra->offset; GGML_ASSERT(d_D != nullptr); - vk_buffer* d_Qx = &extra_src0->buffer_gpu; + vk_buffer d_Qx = extra_src0->buffer_gpu.lock(); const uint64_t qx_buf_offset = extra_src0->offset; GGML_ASSERT(d_Qx != nullptr); if (load_y) { - d_Qy = &vk_prealloc_qy; + d_Qy = ctx->prealloc_qy; } else { - d_Qy = &extra_src1->buffer_gpu; + d_Qy = extra_src1->buffer_gpu.lock(); qy_buf_offset = extra_src1->offset; GGML_ASSERT(d_Qx != nullptr); } // Allocate descriptor sets - ggml_vk_pipeline_allocate_descriptor_sets(vk_pipeline_mul_mat_vec_nc_f16_f32, 1); + ggml_pipeline_allocate_descriptor_sets(ctx, ctx->pipeline_mul_mat_vec_nc_f16_f32, 1); - const uint64_t qy_buffer_offset = (qy_buf_offset / vk_device.properties.limits.minStorageBufferOffsetAlignment) * vk_device.properties.limits.minStorageBufferOffsetAlignment; + const uint64_t qy_buffer_offset = (qy_buf_offset / ctx->device.lock()->properties.limits.minStorageBufferOffsetAlignment) * ctx->device.lock()->properties.limits.minStorageBufferOffsetAlignment; const uint64_t qy_shader_offset = qy_buf_offset - qy_buffer_offset; - const uint64_t d_buffer_offset = (d_buf_offset / vk_device.properties.limits.minStorageBufferOffsetAlignment) * vk_device.properties.limits.minStorageBufferOffsetAlignment; + const uint64_t d_buffer_offset = (d_buf_offset / ctx->device.lock()->properties.limits.minStorageBufferOffsetAlignment) * ctx->device.lock()->properties.limits.minStorageBufferOffsetAlignment; const uint64_t d_shader_offset = d_buf_offset - d_buffer_offset; if (load_y) { - ggml_vk_h2d_tensor_2d(ctx, d_Qy, qy_buf_offset, src1, 0, 0, ggml_nrows(src1)); + ggml_vk_h2d_tensor_2d(ctx, subctx, d_Qy, qy_buf_offset, src1, 0, 0, ggml_nrows(src1)); } // compute const std::array pc = { (uint32_t)ne00, (uint32_t)ne01, row_stride_x, channel_stride_x, (uint32_t)(ne12 / ne02), (uint32_t)(qy_shader_offset / ggml_type_size(src1->type)), (uint32_t)(d_shader_offset / ggml_type_size(dst->type)) }; - ggml_vk_sync_buffers(ctx); - ggml_vk_dispatch_pipeline(ctx, vk_pipeline_mul_mat_vec_nc_f16_f32, { { *d_Qx, qx_buf_offset, qx_sz }, { *d_Qy, qy_buffer_offset, qy_sz + qy_shader_offset }, { *d_D, d_buffer_offset, d_sz + d_shader_offset } }, 7 * sizeof(uint32_t), &pc, { 1, (uint32_t)ne01, (uint32_t)ne12 }); + ggml_vk_sync_buffers(subctx); + ggml_vk_dispatch_pipeline(ctx, subctx, ctx->pipeline_mul_mat_vec_nc_f16_f32, { { d_Qx, qx_buf_offset, qx_sz }, { d_Qy, qy_buffer_offset, qy_sz + qy_shader_offset }, { d_D, d_buffer_offset, d_sz + d_shader_offset } }, 7 * sizeof(uint32_t), &pc, { 1, (uint32_t)ne01, (uint32_t)ne12 }); if (dst->backend == GGML_BACKEND_CPU) { // copy dst to host float * d = (float *) dst->data; - ggml_vk_sync_buffers(ctx); - ggml_vk_buffer_read_async(ctx, d_D, d_buf_offset, d, sizeof(float) * d_ne); + ggml_vk_sync_buffers(subctx); + ggml_vk_buffer_read_async(ctx, subctx, d_D, d_buf_offset, d, sizeof(float) * d_ne); } } @@ -2528,22 +2718,22 @@ static bool ggml_vk_can_mul_mat(const ggml_tensor * src0, const ggml_tensor * sr ((ne0 >= 32 && ne1 >= 32 && ne10 >= 32) || src0->backend == GGML_BACKEND_GPU); } -static void ggml_vk_mul_mat(vk_context * ctx, const struct ggml_tensor * src0, const struct ggml_tensor * src1, struct ggml_tensor * dst) { +static void ggml_vk_mul_mat(ggml_backend_vk_context * ctx, vk_context * subctx, const struct ggml_tensor * src0, const struct ggml_tensor * src1, struct ggml_tensor * dst) { #ifdef GGML_VULKAN_DEBUG std::cerr << "ggml_vk_mul_mat(" << src0 << ", " << src1 << ", " << dst << ")" << std::endl; #endif if (src0->type == GGML_TYPE_F16 && ggml_is_permuted(src0) && ggml_is_permuted(src1) && src1->ne[1] == 1) { - ggml_vk_mul_mat_vec_p021_f16_f32(ctx, src0, src1, dst); + ggml_vk_mul_mat_vec_p021_f16_f32(ctx, subctx, src0, src1, dst); } else if (src0->type == GGML_TYPE_F16 && !ggml_is_contiguous(src0) && !ggml_is_transposed(src1) && src1->ne[1] == 1) { - ggml_vk_mul_mat_vec_nc_f16_f32(ctx, src0, src1, dst); + ggml_vk_mul_mat_vec_nc_f16_f32(ctx, subctx, src0, src1, dst); } else if (src1->ne[1] == 1 && (src0->type == GGML_TYPE_F16 || ggml_is_quantized(src0->type))) { - ggml_vk_mul_mat_vec_q_f16(ctx, src0, src1, dst); + ggml_vk_mul_mat_vec_q_f16(ctx, subctx, src0, src1, dst); } else { - ggml_vk_mul_mat_q_f16(ctx, src0, src1, dst); + ggml_vk_mul_mat_q_f16(ctx, subctx, src0, src1, dst); } } -static void ggml_vk_op_repeat(vk_context * ctx, const ggml_tensor * src0, const ggml_tensor * src1, ggml_tensor * dst) { +static void ggml_vk_op_repeat(ggml_backend_vk_context * ctx, vk_context * subctx, const ggml_tensor * src0, const ggml_tensor * src1, ggml_tensor * dst) { // guaranteed to be an integer due to the check in ggml_can_repeat const uint64_t ne0 = dst->ne[0]; const uint64_t ne1 = dst->ne[1]; @@ -2579,9 +2769,9 @@ static void ggml_vk_op_repeat(vk_context * ctx, const ggml_tensor * src0, const ggml_tensor_extra_gpu * extra = (ggml_tensor_extra_gpu *) dst->extra; ggml_tensor_extra_gpu * extra_src0 = (ggml_tensor_extra_gpu *) src0->extra; - const vk_buffer* src_buf = &extra_src0->buffer_gpu; + const vk_buffer src_buf = extra_src0->buffer_gpu.lock(); const uint64_t src_offset = extra_src0->offset; - vk_buffer* dst_buf = &extra->buffer_gpu; + vk_buffer dst_buf = extra->buffer_gpu.lock(); const uint64_t dst_offset = extra->offset; std::vector copies; @@ -2606,78 +2796,79 @@ static void ggml_vk_op_repeat(vk_context * ctx, const ggml_tensor * src0, const } } - ggml_vk_sync_buffers(ctx); - ctx->s->buffer.copyBuffer(src_buf->buffer, dst_buf->buffer, copies); + ggml_vk_sync_buffers(subctx); + subctx->s->buffer.copyBuffer(src_buf->buffer, dst_buf->buffer, copies); - (void) src1; + GGML_UNUSED(ctx); + GGML_UNUSED(src1); } -static vk_pipeline* ggml_vk_op_get_pipeline(const ggml_tensor * src0, const ggml_tensor * src1, ggml_tensor * dst, ggml_op op) { +static vk_pipeline* ggml_vk_op_get_pipeline(ggml_backend_vk_context * ctx, const ggml_tensor * src0, const ggml_tensor * src1, ggml_tensor * dst, ggml_op op) { switch (op) { case GGML_OP_ADD: if (src0->type == GGML_TYPE_F32 && src1->type == GGML_TYPE_F32 && dst->type == GGML_TYPE_F32) { - return &vk_pipeline_add_f32; + return &ctx->pipeline_add_f32; } return nullptr; case GGML_OP_GET_ROWS: GGML_ASSERT(src1->type == GGML_TYPE_I32); if (dst->type == GGML_TYPE_F16) { - return &vk_pipeline_get_rows[src0->type]; + return &ctx->pipeline_get_rows[src0->type]; } if (dst->type == GGML_TYPE_F32) { - return &vk_pipeline_get_rows_f32[src0->type]; + return &ctx->pipeline_get_rows_f32[src0->type]; } return nullptr; case GGML_OP_MUL: if (src0->type == GGML_TYPE_F32 && src1->type == GGML_TYPE_F32 && dst->type == GGML_TYPE_F32) { - return &vk_pipeline_mul_f32; + return &ctx->pipeline_mul_f32; } return nullptr; case GGML_OP_SCALE: if (src0->type == GGML_TYPE_F32 && dst->type == GGML_TYPE_F32) { - return &vk_pipeline_scale_f32; + return &ctx->pipeline_scale_f32; } return nullptr; case GGML_OP_SQR: if (src0->type == GGML_TYPE_F32 && dst->type == GGML_TYPE_F32) { - return &vk_pipeline_sqr_f32; + return &ctx->pipeline_sqr_f32; } return nullptr; case GGML_OP_CLAMP: if (src0->type == GGML_TYPE_F32 && dst->type == GGML_TYPE_F32) { - return &vk_pipeline_clamp_f32; + return &ctx->pipeline_clamp_f32; } return nullptr; case GGML_OP_CPY: case GGML_OP_CONT: case GGML_OP_DUP: - return ggml_vk_get_cpy_pipeline(src0->type, dst->type); + return ggml_vk_get_cpy_pipeline(ctx, src0->type, dst->type); case GGML_OP_NORM: if (src0->type == GGML_TYPE_F32 && dst->type == GGML_TYPE_F32) { - return &vk_pipeline_norm_f32; + return &ctx->pipeline_norm_f32; } return nullptr; case GGML_OP_RMS_NORM: if (src0->type == GGML_TYPE_F32 && dst->type == GGML_TYPE_F32) { - return &vk_pipeline_rms_norm_f32; + return &ctx->pipeline_rms_norm_f32; } return nullptr; case GGML_OP_UNARY: switch (ggml_get_unary_op(dst)) { case GGML_UNARY_OP_SILU: if (src0->type == GGML_TYPE_F32 && dst->type == GGML_TYPE_F32) { - return &vk_pipeline_silu_f32; + return &ctx->pipeline_silu_f32; } break; case GGML_UNARY_OP_GELU: if (src0->type == GGML_TYPE_F32 && dst->type == GGML_TYPE_F32) { - return &vk_pipeline_gelu_f32; + return &ctx->pipeline_gelu_f32; } break; case GGML_UNARY_OP_RELU: if (src0->type == GGML_TYPE_F32 && dst->type == GGML_TYPE_F32) { - return &vk_pipeline_relu_f32; + return &ctx->pipeline_relu_f32; } break; default: @@ -2686,12 +2877,12 @@ static vk_pipeline* ggml_vk_op_get_pipeline(const ggml_tensor * src0, const ggml return nullptr; case GGML_OP_DIAG_MASK_INF: if (src0->type == GGML_TYPE_F32 && dst->type == GGML_TYPE_F32) { - return &vk_pipeline_diag_mask_inf_f32; + return &ctx->pipeline_diag_mask_inf_f32; } return nullptr; case GGML_OP_SOFT_MAX: if (src0->type == GGML_TYPE_F32 && dst->type == GGML_TYPE_F32) { - return &vk_pipeline_soft_max_f32; + return &ctx->pipeline_soft_max_f32; } return nullptr; case GGML_OP_ROPE: @@ -2706,17 +2897,17 @@ static vk_pipeline* ggml_vk_op_get_pipeline(const ggml_tensor * src0, const ggml if (is_neox) { if (src0->type == GGML_TYPE_F32 && dst->type == GGML_TYPE_F32) { - return &vk_pipeline_rope_neox_f32; + return &ctx->pipeline_rope_neox_f32; } if (src0->type == GGML_TYPE_F16 && dst->type == GGML_TYPE_F16) { - return &vk_pipeline_rope_neox_f16; + return &ctx->pipeline_rope_neox_f16; } } else { if (src0->type == GGML_TYPE_F32 && dst->type == GGML_TYPE_F32) { - return &vk_pipeline_rope_f32; + return &ctx->pipeline_rope_f32; } if (src0->type == GGML_TYPE_F16 && dst->type == GGML_TYPE_F16) { - return &vk_pipeline_rope_f16; + return &ctx->pipeline_rope_f16; } } return nullptr; @@ -2735,13 +2926,8 @@ static ggml_vk_func_t ggml_vk_op_get_func(ggml_op op) { } } -#ifdef GGML_VULKAN_CHECK_RESULTS -static void ggml_vk_print_tensor(const ggml_tensor * tensor, const char * name); -static void ggml_vk_check_results_0(ggml_compute_params * params, ggml_tensor * tensor); -#endif - template -static void ggml_vk_op_f32(vk_context * ctx, const ggml_tensor * src0, const ggml_tensor * src1, ggml_tensor * dst, ggml_op op, const PC&& pc) { +static void ggml_vk_op_f32(ggml_backend_vk_context * ctx, vk_context * subctx, const ggml_tensor * src0, const ggml_tensor * src1, ggml_tensor * dst, ggml_op op, const PC&& pc) { #ifdef GGML_VULKAN_DEBUG std::cerr << "ggml_vk_op_f32((" << src0 << ", name=" << src0->name << ", type=" << src0->type << ", backend=" << src0->backend << ", ne0=" << src0->ne[0] << ", ne1=" << src0->ne[1] << ", ne2=" << src0->ne[2] << ", ne3=" << src0->ne[3] << ", nb0=" << src0->nb[0] << ", nb1=" << src0->nb[1] << ", nb2=" << src0->nb[2] << ", nb3=" << src0->nb[3]; if (src1 != nullptr) { @@ -2768,7 +2954,7 @@ static void ggml_vk_op_f32(vk_context * ctx, const ggml_tensor * src0, const ggm const uint64_t nb2 = dst->nb[2]; const uint64_t nb3 = dst->nb[3]; - vk_pipeline * pipeline = ggml_vk_op_get_pipeline(src0, src1, dst, op); + vk_pipeline * pipeline = ggml_vk_op_get_pipeline(ctx, src0, src1, dst, op); ggml_vk_func_t op_func; if (pipeline == nullptr) { @@ -2782,7 +2968,7 @@ static void ggml_vk_op_f32(vk_context * ctx, const ggml_tensor * src0, const ggm GGML_ASSERT(false); } - op_func(ctx, src0, src1, dst); + op_func(ctx, subctx, src0, src1, dst); return; } @@ -2790,19 +2976,19 @@ static void ggml_vk_op_f32(vk_context * ctx, const ggml_tensor * src0, const ggm ggml_tensor_extra_gpu * extra_src0 = (ggml_tensor_extra_gpu *) src0->extra; ggml_tensor_extra_gpu * extra_src1 = use_src1 ? (ggml_tensor_extra_gpu *) src1->extra : nullptr; - vk_buffer * d_X = nullptr; + vk_buffer d_X = nullptr; size_t x_buf_offset = 0; - vk_buffer * d_Y = nullptr; + vk_buffer d_Y = nullptr; size_t y_buf_offset = 0; bool src0_uma = false; bool src1_uma = false; - if (vk_device.uma) { - ggml_vk_host_get(src0->data, d_X, x_buf_offset); + if (ctx->device.lock()->uma) { + ggml_vk_host_get(ctx, src0->data, d_X, x_buf_offset); src0_uma = d_X != nullptr; if (use_src1) { - ggml_vk_host_get(src1->data, d_Y, y_buf_offset); + ggml_vk_host_get(ctx, src1->data, d_Y, y_buf_offset); src1_uma = d_Y != nullptr; } } @@ -2810,30 +2996,31 @@ static void ggml_vk_op_f32(vk_context * ctx, const ggml_tensor * src0, const ggm const bool transfer_src0 = src0->backend != GGML_BACKEND_GPU && !src0_uma; const bool transfer_src1 = use_src1 && src1->backend != GGML_BACKEND_GPU && !src1_uma; - uint64_t x_sz = ggml_vk_align_size(ggml_type_size(src0->type) * ne0, vk_device.properties.limits.minStorageBufferOffsetAlignment); - uint64_t y_sz = use_src1 ? ggml_vk_align_size(ggml_type_size(src1->type) * ne1, vk_device.properties.limits.minStorageBufferOffsetAlignment) : 0; + uint64_t x_sz = ggml_vk_align_size(ggml_type_size(src0->type) * ne0, ctx->device.lock()->properties.limits.minStorageBufferOffsetAlignment); + uint64_t y_sz = use_src1 ? ggml_vk_align_size(ggml_type_size(src1->type) * ne1, ctx->device.lock()->properties.limits.minStorageBufferOffsetAlignment) : 0; uint64_t d_sz = ggml_type_size(dst->type) * ne0; + vk_buffer d_D = extra->buffer_gpu.lock(); + // Workaround for tiny tensor inputs on ROPE - if (use_src1 && src1->backend == GGML_BACKEND_GPU && y_sz > extra_src1->buffer_gpu.size) { + if (use_src1 && src1->backend == GGML_BACKEND_GPU && y_sz > d_D->size) { y_sz = VK_WHOLE_SIZE; } - vk_buffer* d_D = &extra->buffer_gpu; GGML_ASSERT(d_D != nullptr); - uint64_t d_buf_offset = (extra->offset / vk_device.properties.limits.minStorageBufferOffsetAlignment) * vk_device.properties.limits.minStorageBufferOffsetAlignment; + uint64_t d_buf_offset = (extra->offset / ctx->device.lock()->properties.limits.minStorageBufferOffsetAlignment) * ctx->device.lock()->properties.limits.minStorageBufferOffsetAlignment; GGML_ASSERT(d_buf_offset == extra->offset || op == GGML_OP_CPY); // NOLINT if (transfer_src0) { - d_X = &vk_prealloc_qx; + d_X = ctx->prealloc_qx; } else if(!src0_uma) { - d_X = &extra_src0->buffer_gpu; + d_X = extra_src0->buffer_gpu.lock(); x_buf_offset = extra_src0->offset; GGML_ASSERT(d_X != nullptr); } if (transfer_src1) { - d_Y = &vk_prealloc_qy; + d_Y = ctx->prealloc_qy; } else if (use_src1 && !src1_uma) { - d_Y = &extra_src1->buffer_gpu; + d_Y = extra_src1->buffer_gpu.lock(); y_buf_offset = extra_src1->offset; GGML_ASSERT(d_Y != nullptr); } @@ -2856,16 +3043,16 @@ static void ggml_vk_op_f32(vk_context * ctx, const ggml_tensor * src0, const ggm // copy src0 to device if (transfer_src0) { - ggml_vk_h2d_tensor_2d(ctx, d_X, 0, src0, 0, 0, ggml_nrows(src0)); - vk_staging_offset = x_sz * ne02 * ne03; + ggml_vk_h2d_tensor_2d(ctx, subctx, d_X, 0, src0, 0, 0, ggml_nrows(src0)); + ctx->staging_offset = x_sz * ne02 * ne03; } if (transfer_src1) { - ggml_vk_h2d_tensor_2d(ctx, d_Y, 0, src1, 0, 0, ggml_nrows(src1)); + ggml_vk_h2d_tensor_2d(ctx, subctx, d_Y, 0, src1, 0, 0, ggml_nrows(src1)); } // Single call if dimension 2 is contiguous if (op == GGML_OP_CPY || (ggml_is_contiguous(src0) && (src1 == nullptr || ggml_is_contiguous(src1)))) { - ggml_vk_pipeline_allocate_descriptor_sets(*pipeline, 1); + ggml_pipeline_allocate_descriptor_sets(ctx, *pipeline, 1); switch (dst->op) { case GGML_OP_NORM: @@ -2896,24 +3083,24 @@ static void ggml_vk_op_f32(vk_context * ctx, const ggml_tensor * src0, const ggm if (!use_src1 && op == GGML_OP_SOFT_MAX) { // Empty src1 is possible on soft_max, but the shader needs a buffer - ggml_vk_sync_buffers(ctx); - ggml_vk_dispatch_pipeline(ctx, *pipeline, { { *d_X, x_buf_offset, x_sz }, { vk_prealloc_y, 0, vk_prealloc_y.size }, { *d_D, d_buf_offset, d_sz } }, sizeof(PC), &pc, elements); + ggml_vk_sync_buffers(subctx); + ggml_vk_dispatch_pipeline(ctx, subctx, *pipeline, { { d_X, x_buf_offset, x_sz }, { ctx->prealloc_y, 0, ctx->prealloc_y->size }, { d_D, d_buf_offset, d_sz } }, sizeof(PC), &pc, elements); } else if (use_src1) { - ggml_vk_sync_buffers(ctx); - ggml_vk_dispatch_pipeline(ctx, *pipeline, { { *d_X, x_buf_offset, x_sz }, { *d_Y, y_buf_offset, y_sz }, { *d_D, d_buf_offset, d_sz } }, sizeof(PC), &pc, elements); + ggml_vk_sync_buffers(subctx); + ggml_vk_dispatch_pipeline(ctx, subctx, *pipeline, { { d_X, x_buf_offset, x_sz }, { d_Y, y_buf_offset, y_sz }, { d_D, d_buf_offset, d_sz } }, sizeof(PC), &pc, elements); } else { - ggml_vk_sync_buffers(ctx); - ggml_vk_dispatch_pipeline(ctx, *pipeline, { { *d_X, x_buf_offset, x_sz }, { *d_D, d_buf_offset, d_sz } }, sizeof(PC), &pc, elements); + ggml_vk_sync_buffers(subctx); + ggml_vk_dispatch_pipeline(ctx, subctx, *pipeline, { { d_X, x_buf_offset, x_sz }, { d_D, d_buf_offset, d_sz } }, sizeof(PC), &pc, elements); } if (dst->backend == GGML_BACKEND_CPU && op == GGML_OP_CPY) { - ggml_vk_d2h_tensor_2d(ctx, d_D, 0, dst); + ggml_vk_d2h_tensor_2d(ctx, subctx, d_D, 0, dst); } else if(dst->backend == GGML_BACKEND_CPU) { // copy dst to host float * d = (float *) dst->data; - ggml_vk_buffer_read_async(ctx, d_D, 0, d, d_sz); + ggml_vk_buffer_read_async(ctx, subctx, d_D, 0, d, d_sz); } } else { - ggml_vk_pipeline_allocate_descriptor_sets(*pipeline, ne02 * ne03); + ggml_pipeline_allocate_descriptor_sets(ctx, *pipeline, ne02 * ne03); switch (dst->op) { case GGML_OP_NORM: @@ -2940,60 +3127,60 @@ static void ggml_vk_op_f32(vk_context * ctx, const ggml_tensor * src0, const ggm if (!use_src1 && op == GGML_OP_SOFT_MAX) { // Empty src1 is possible on soft_max, but the shader needs a buffer - ggml_vk_sync_buffers(ctx); - ggml_vk_dispatch_pipeline(ctx, *pipeline, { { *d_X, x_buf_offset, x_sz }, { vk_prealloc_y, 0, vk_prealloc_y.size }, { *d_D, d_buf_offset, d_sz } }, sizeof(PC), &pc, elements); + ggml_vk_sync_buffers(subctx); + ggml_vk_dispatch_pipeline(ctx, subctx, *pipeline, { { d_X, x_buf_offset, x_sz }, { ctx->prealloc_y, 0, ctx->prealloc_y->size }, { d_D, d_buf_offset, d_sz } }, sizeof(PC), &pc, elements); } else if (use_src1) { - ggml_vk_sync_buffers(ctx); - ggml_vk_dispatch_pipeline(ctx, *pipeline, { { *d_X, x_buf_offset + x_offset, x_sz }, { *d_Y, y_buf_offset + y_offset, y_sz }, { *d_D, d_buf_offset + d_offset, d_sz } }, sizeof(PC), &pc, elements); + ggml_vk_sync_buffers(subctx); + ggml_vk_dispatch_pipeline(ctx, subctx, *pipeline, { { d_X, x_buf_offset + x_offset, x_sz }, { d_Y, y_buf_offset + y_offset, y_sz }, { d_D, d_buf_offset + d_offset, d_sz } }, sizeof(PC), &pc, elements); } else { - ggml_vk_sync_buffers(ctx); - ggml_vk_dispatch_pipeline(ctx, *pipeline, { { *d_X, x_buf_offset + x_offset, x_sz }, { *d_D, d_buf_offset + d_offset, d_sz } }, sizeof(PC), &pc, elements); + ggml_vk_sync_buffers(subctx); + ggml_vk_dispatch_pipeline(ctx, subctx, *pipeline, { { d_X, x_buf_offset + x_offset, x_sz }, { d_D, d_buf_offset + d_offset, d_sz } }, sizeof(PC), &pc, elements); } if (dst->backend == GGML_BACKEND_CPU) { // copy dst to host - ggml_vk_buffer_read_async(ctx, d_D, d_buf_offset + d_offset, (char *) dst->data + i02*nb2 + i03*nb3, d_sz); + ggml_vk_buffer_read_async(ctx, subctx, d_D, d_buf_offset + d_offset, (char *) dst->data + i02*nb2 + i03*nb3, d_sz); } } } } } -static void ggml_vk_repeat(vk_context * ctx, const ggml_tensor * src0, const ggml_tensor * src1, ggml_tensor * dst) { - ggml_vk_op_f32(ctx, src0, src1, dst, GGML_OP_REPEAT, { (uint32_t)ggml_nelements(src0), (uint32_t)ggml_nelements(src1), 0.0f, 0.0f }); +static void ggml_vk_repeat(ggml_backend_vk_context * ctx, vk_context * subctx, const ggml_tensor * src0, const ggml_tensor * src1, ggml_tensor * dst) { + ggml_vk_op_f32(ctx, subctx, src0, src1, dst, GGML_OP_REPEAT, { (uint32_t)ggml_nelements(src0), (uint32_t)ggml_nelements(src1), 0.0f, 0.0f }); } -static void ggml_vk_get_rows(vk_context * ctx, const ggml_tensor * src0, const ggml_tensor * src1, ggml_tensor * dst) { - ggml_vk_op_f32(ctx, src0, src1, dst, GGML_OP_GET_ROWS, { (uint32_t)ggml_nelements(src0), (uint32_t)ggml_nelements(src1), 0.0f, 0.0f }); +static void ggml_vk_get_rows(ggml_backend_vk_context * ctx, vk_context * subctx, const ggml_tensor * src0, const ggml_tensor * src1, ggml_tensor * dst) { + ggml_vk_op_f32(ctx, subctx, src0, src1, dst, GGML_OP_GET_ROWS, { (uint32_t)ggml_nelements(src0), (uint32_t)ggml_nelements(src1), 0.0f, 0.0f }); } -static void ggml_vk_add(vk_context * ctx, const ggml_tensor * src0, const ggml_tensor * src1, ggml_tensor * dst) { - ggml_vk_op_f32(ctx, src0, src1, dst, GGML_OP_ADD, { (uint32_t)ggml_nelements(src0), (uint32_t)ggml_nelements(src1), 0.0f, 0.0f }); +static void ggml_vk_add(ggml_backend_vk_context * ctx, vk_context * subctx, const ggml_tensor * src0, const ggml_tensor * src1, ggml_tensor * dst) { + ggml_vk_op_f32(ctx, subctx, src0, src1, dst, GGML_OP_ADD, { (uint32_t)ggml_nelements(src0), (uint32_t)ggml_nelements(src1), 0.0f, 0.0f }); } -static void ggml_vk_mul(vk_context * ctx, const ggml_tensor * src0, const ggml_tensor * src1, ggml_tensor * dst) { - ggml_vk_op_f32(ctx, src0, src1, dst, GGML_OP_MUL, { (uint32_t)ggml_nelements(src0), (uint32_t)ggml_nelements(src1), 0.0f, 0.0f }); +static void ggml_vk_mul(ggml_backend_vk_context * ctx, vk_context * subctx, const ggml_tensor * src0, const ggml_tensor * src1, ggml_tensor * dst) { + ggml_vk_op_f32(ctx, subctx, src0, src1, dst, GGML_OP_MUL, { (uint32_t)ggml_nelements(src0), (uint32_t)ggml_nelements(src1), 0.0f, 0.0f }); } -static void ggml_vk_scale(vk_context * ctx, const ggml_tensor * src0, ggml_tensor * dst) { +static void ggml_vk_scale(ggml_backend_vk_context * ctx, vk_context * subctx, const ggml_tensor * src0, ggml_tensor * dst) { float * op_params = (float *)dst->op_params; - ggml_vk_op_f32(ctx, src0, nullptr, dst, GGML_OP_SCALE, { (uint32_t)ggml_nelements(src0), 0, op_params[0], 0.0f }); + ggml_vk_op_f32(ctx, subctx, src0, nullptr, dst, GGML_OP_SCALE, { (uint32_t)ggml_nelements(src0), 0, op_params[0], 0.0f }); } -static void ggml_vk_sqr(vk_context * ctx, const ggml_tensor * src0, ggml_tensor * dst) { - ggml_vk_op_f32(ctx, src0, nullptr, dst, GGML_OP_SQR, { (uint32_t)ggml_nelements(src0), 0, 0.0f, 0.0f }); +static void ggml_vk_sqr(ggml_backend_vk_context * ctx, vk_context * subctx, const ggml_tensor * src0, ggml_tensor * dst) { + ggml_vk_op_f32(ctx, subctx, src0, nullptr, dst, GGML_OP_SQR, { (uint32_t)ggml_nelements(src0), 0, 0.0f, 0.0f }); } -static void ggml_vk_clamp(vk_context * ctx, const ggml_tensor * src0, ggml_tensor * dst) { +static void ggml_vk_clamp(ggml_backend_vk_context * ctx, vk_context * subctx, const ggml_tensor * src0, ggml_tensor * dst) { float * op_params = (float *)dst->op_params; - ggml_vk_op_f32(ctx, src0, nullptr, dst, GGML_OP_CLAMP, { (uint32_t)ggml_nelements(src0), 0, op_params[0], op_params[1] }); + ggml_vk_op_f32(ctx, subctx, src0, nullptr, dst, GGML_OP_CLAMP, { (uint32_t)ggml_nelements(src0), 0, op_params[0], op_params[1] }); } -static void ggml_vk_cpy(vk_context * ctx, const ggml_tensor * src0, ggml_tensor * dst) { +static void ggml_vk_cpy(ggml_backend_vk_context * ctx, vk_context * subctx, const ggml_tensor * src0, ggml_tensor * dst) { ggml_tensor_extra_gpu * extra = (ggml_tensor_extra_gpu *) dst->extra; const int src0_type_size = ggml_type_size(src0->type); const int dst_type_size = ggml_type_size(dst->type); - const uint32_t d_offset = (extra->offset % vk_device.properties.limits.minStorageBufferOffsetAlignment) / dst_type_size; - ggml_vk_op_f32(ctx, src0, nullptr, dst, GGML_OP_CPY, { + const uint32_t d_offset = (extra->offset % ctx->device.lock()->properties.limits.minStorageBufferOffsetAlignment) / dst_type_size; + ggml_vk_op_f32(ctx, subctx, src0, nullptr, dst, GGML_OP_CPY, { (uint32_t)ggml_nelements(src0), (uint32_t)src0->ne[0], (uint32_t)src0->ne[1], (uint32_t)src0->nb[0] / src0_type_size, (uint32_t)src0->nb[1] / src0_type_size, (uint32_t)src0->nb[2] / src0_type_size, (uint32_t) dst->ne[0], (uint32_t) dst->ne[1], (uint32_t) dst->nb[0] / dst_type_size, (uint32_t) dst->nb[1] / dst_type_size, (uint32_t) dst->nb[2] / dst_type_size, @@ -3001,30 +3188,30 @@ static void ggml_vk_cpy(vk_context * ctx, const ggml_tensor * src0, ggml_tensor }); } -static void ggml_vk_norm(vk_context * ctx, const ggml_tensor * src0, ggml_tensor * dst) { - ggml_vk_op_f32(ctx, src0, nullptr, dst, GGML_OP_NORM, { (uint32_t)src0->ne[0], (uint32_t)src0->ne[1], 0.0f, 0.0f }); +static void ggml_vk_norm(ggml_backend_vk_context * ctx, vk_context * subctx, const ggml_tensor * src0, ggml_tensor * dst) { + ggml_vk_op_f32(ctx, subctx, src0, nullptr, dst, GGML_OP_NORM, { (uint32_t)src0->ne[0], (uint32_t)src0->ne[1], 0.0f, 0.0f }); } -static void ggml_vk_rms_norm(vk_context * ctx, const ggml_tensor * src0, ggml_tensor * dst) { +static void ggml_vk_rms_norm(ggml_backend_vk_context * ctx, vk_context * subctx, const ggml_tensor * src0, ggml_tensor * dst) { float * op_params = (float *)dst->op_params; - ggml_vk_op_f32(ctx, src0, nullptr, dst, GGML_OP_RMS_NORM, { (uint32_t)src0->ne[0], (uint32_t)src0->ne[1], op_params[0], 0.0f }); + ggml_vk_op_f32(ctx, subctx, src0, nullptr, dst, GGML_OP_RMS_NORM, { (uint32_t)src0->ne[0], (uint32_t)src0->ne[1], op_params[0], 0.0f }); } -static void ggml_vk_unary(vk_context * ctx, const ggml_tensor * src0, ggml_tensor * dst) { - ggml_vk_op_f32(ctx, src0, nullptr, dst, GGML_OP_UNARY, { (uint32_t)ggml_nelements(src0), 0, 0.0f, 0.0f }); +static void ggml_vk_unary(ggml_backend_vk_context * ctx, vk_context * subctx, const ggml_tensor * src0, ggml_tensor * dst) { + ggml_vk_op_f32(ctx, subctx, src0, nullptr, dst, GGML_OP_UNARY, { (uint32_t)ggml_nelements(src0), 0, 0.0f, 0.0f }); } -static void ggml_vk_diag_mask_inf(vk_context * ctx, const ggml_tensor * src0, ggml_tensor * dst) { +static void ggml_vk_diag_mask_inf(ggml_backend_vk_context * ctx, vk_context * subctx, const ggml_tensor * src0, ggml_tensor * dst) { int32_t * op_params = (int32_t *)dst->op_params; - ggml_vk_op_f32(ctx, src0, nullptr, dst, GGML_OP_DIAG_MASK_INF, { (uint32_t)src0->ne[0], (uint32_t)src0->ne[1], op_params[0] }); + ggml_vk_op_f32(ctx, subctx, src0, nullptr, dst, GGML_OP_DIAG_MASK_INF, { (uint32_t)src0->ne[0], (uint32_t)src0->ne[1], op_params[0] }); } -static void ggml_vk_soft_max(vk_context * ctx, const ggml_tensor * src0, const ggml_tensor * src1, ggml_tensor * dst) { +static void ggml_vk_soft_max(ggml_backend_vk_context * ctx, vk_context * subctx, const ggml_tensor * src0, const ggml_tensor * src1, ggml_tensor * dst) { float * op_params = (float *)dst->op_params; - ggml_vk_op_f32(ctx, src0, src1, dst, GGML_OP_SOFT_MAX, { (uint32_t)src0->ne[0], (uint32_t)(src1 != nullptr ? ggml_nrows(src1) : 0), op_params[0], 0.0f }); + ggml_vk_op_f32(ctx, subctx, src0, src1, dst, GGML_OP_SOFT_MAX, { (uint32_t)src0->ne[0], (uint32_t)(src1 != nullptr ? ggml_nrows(src1) : 0), op_params[0], 0.0f }); } -static void ggml_vk_rope(vk_context * ctx, const ggml_tensor * src0, const ggml_tensor * src1, ggml_tensor * dst) { +static void ggml_vk_rope(ggml_backend_vk_context * ctx, vk_context * subctx, const ggml_tensor * src0, const ggml_tensor * src1, ggml_tensor * dst) { const int n_dims = ((int32_t *) dst->op_params)[1]; const int mode = ((int32_t *) dst->op_params)[2]; // const int n_ctx = ((int32_t *) dst->op_params)[3]; @@ -3047,19 +3234,19 @@ static void ggml_vk_rope(vk_context * ctx, const ggml_tensor * src0, const ggml_ if (is_neox) { const float theta_scale = powf(freq_base, -2.0f/n_dims); const float inv_ndims = -1.0f / n_dims; - ggml_vk_op_f32(ctx, src0, src1, dst, GGML_OP_ROPE, { (uint32_t)src0->ne[0], (uint32_t)n_dims, freq_scale, (uint32_t)src0->ne[1], freq_base, ext_factor, attn_factor, corr_dims[0], corr_dims[1], 0.0f, 0.0f, theta_scale, inv_ndims }); + ggml_vk_op_f32(ctx, subctx, src0, src1, dst, GGML_OP_ROPE, { (uint32_t)src0->ne[0], (uint32_t)n_dims, freq_scale, (uint32_t)src0->ne[1], freq_base, ext_factor, attn_factor, corr_dims[0], corr_dims[1], 0.0f, 0.0f, theta_scale, inv_ndims }); } else { - ggml_vk_op_f32(ctx, src0, src1, dst, GGML_OP_ROPE, { (uint32_t)src0->ne[0], freq_scale, (uint32_t)src0->ne[1], freq_base, ext_factor, attn_factor, corr_dims[0], corr_dims[1], 0.0f, 0.0f }); + ggml_vk_op_f32(ctx, subctx, src0, src1, dst, GGML_OP_ROPE, { (uint32_t)src0->ne[0], freq_scale, (uint32_t)src0->ne[1], freq_base, ext_factor, attn_factor, corr_dims[0], corr_dims[1], 0.0f, 0.0f }); } } -static void ggml_vk_nop(vk_context * ctx, const ggml_tensor * src0, ggml_tensor * dst) { +static void ggml_vk_nop(ggml_backend_vk_context * ctx, vk_context * subctx, const ggml_tensor * src0, ggml_tensor * dst) { // If backend is CPU, data from src0 has to be copied off the device if (dst->backend == GGML_BACKEND_CPU) { ggml_tensor_extra_gpu * extra_src0 = (ggml_tensor_extra_gpu *) src0->extra; - vk_buffer * d_D = &extra_src0->buffer_gpu; - ggml_vk_sync_buffers(ctx); - ggml_vk_buffer_read_async(ctx, d_D, 0, dst->data, d_D->size); + vk_buffer d_D = extra_src0->buffer_gpu.lock(); + ggml_vk_sync_buffers(subctx); + ggml_vk_buffer_read_async(ctx, subctx, d_D, 0, dst->data, d_D->size); } } @@ -3096,7 +3283,7 @@ static void ggml_vk_print_matrix_area(const void * data, ggml_type type, int ne0 } template -static void ggml_vk_test_matmul(size_t m, size_t n, size_t k, size_t batch, size_t num_it, int split_k, int shader_size) { +static void ggml_vk_test_matmul(ggml_backend_vk_context * ctx, size_t m, size_t n, size_t k, size_t batch, size_t num_it, int split_k, int shader_size) { #ifdef GGML_VULKAN_DEBUG std::cerr << "ggml_vk_test_matmul(" << m << ", " << n << ", " << k << ", " << batch << ", " << num_it << ", " << split_k << ", " << shader_size << ")" << std::endl; #endif @@ -3108,39 +3295,39 @@ static void ggml_vk_test_matmul(size_t m, size_t n, size_t k, size_t batch, size std::string shname; if (shader_size == 0) { if (std::is_same() && std::is_same()) { - p = &vk_pipeline_matmul_f32_aligned_s; + p = &ctx->pipeline_matmul_f32_aligned_s; shname = "F32_ALIGNED_S"; } else if (std::is_same() && std::is_same()) { - p = &vk_pipeline_matmul_f16_f32_aligned_s; + p = &ctx->pipeline_matmul_f16_f32_aligned_s; shname = "F16_F32_ALIGNED_S"; } else if (std::is_same() && std::is_same()) { - p = &vk_pipeline_matmul_f16_aligned_s; + p = &ctx->pipeline_matmul_f16_aligned_s; shname = "F16_ALIGNED_S"; } else { GGML_ASSERT(false); } } else if (shader_size == 1) { if (std::is_same() && std::is_same()) { - p = &vk_pipeline_matmul_f32_aligned_m; + p = &ctx->pipeline_matmul_f32_aligned_m; shname = "F32_ALIGNED_M"; } else if (std::is_same() && std::is_same()) { - p = &vk_pipeline_matmul_f16_f32_aligned_m; + p = &ctx->pipeline_matmul_f16_f32_aligned_m; shname = "F16_F32_ALIGNED_M"; } else if (std::is_same() && std::is_same()) { - p = &vk_pipeline_matmul_f16_aligned_m; + p = &ctx->pipeline_matmul_f16_aligned_m; shname = "F16_ALIGNED_M"; } else { GGML_ASSERT(false); } } else if (shader_size == 2) { if (std::is_same() && std::is_same()) { - p = &vk_pipeline_matmul_f32_aligned_l; + p = &ctx->pipeline_matmul_f32_aligned_l; shname = "F32_ALIGNED_L"; } else if (std::is_same() && std::is_same()) { - p = &vk_pipeline_matmul_f16_f32_aligned_l; + p = &ctx->pipeline_matmul_f16_f32_aligned_l; shname = "F16_F32_ALIGNED_L"; } else if (std::is_same() && std::is_same()) { - p = &vk_pipeline_matmul_f16_aligned_l; + p = &ctx->pipeline_matmul_f16_aligned_l; shname = "F16_ALIGNED_L"; } else { GGML_ASSERT(false); @@ -3154,56 +3341,56 @@ static void ggml_vk_test_matmul(size_t m, size_t n, size_t k, size_t batch, size if (k != kpad) { if (shader_size == 0) { if (std::is_same() && std::is_same()) { - p = &vk_pipeline_matmul_f32_s; + p = &ctx->pipeline_matmul_f32_s; shname = "F32_S"; } else if (std::is_same() && std::is_same()) { - p = &vk_pipeline_matmul_f16_f32_s; + p = &ctx->pipeline_matmul_f16_f32_s; shname = "F16_F32_S"; } else if (std::is_same() && std::is_same()) { - p = &vk_pipeline_matmul_f16_s; + p = &ctx->pipeline_matmul_f16_s; shname = "F16_S"; } } else if (shader_size == 1) { if (std::is_same() && std::is_same()) { - p = &vk_pipeline_matmul_f32_m; + p = &ctx->pipeline_matmul_f32_m; shname = "F32_M"; } else if (std::is_same() && std::is_same()) { - p = &vk_pipeline_matmul_f16_f32_m; + p = &ctx->pipeline_matmul_f16_f32_m; shname = "F16_F32_M"; } else if (std::is_same() && std::is_same()) { - p = &vk_pipeline_matmul_f16_m; + p = &ctx->pipeline_matmul_f16_m; shname = "F16_M"; } } else if (shader_size == 2) { if (std::is_same() && std::is_same()) { - p = &vk_pipeline_matmul_f32_l; + p = &ctx->pipeline_matmul_f32_l; shname = "F32_L"; } else if (std::is_same() && std::is_same()) { - p = &vk_pipeline_matmul_f16_f32_l; + p = &ctx->pipeline_matmul_f16_f32_l; shname = "F16_F32_L"; } else if (std::is_same() && std::is_same()) { - p = &vk_pipeline_matmul_f16_l; + p = &ctx->pipeline_matmul_f16_l; shname = "F16_L"; } } } - ggml_vk_pipeline_allocate_descriptor_sets(*p, num_it); + ggml_pipeline_allocate_descriptor_sets(ctx, *p, num_it); if (split_k > 1) { - ggml_vk_pipeline_allocate_descriptor_sets(vk_pipeline_matmul_split_k_reduce, num_it); + ggml_pipeline_allocate_descriptor_sets(ctx, ctx->pipeline_matmul_split_k_reduce, num_it); - if (vk_prealloc_split_k.size < sizeof(float) * d_ne * split_k) { + if (ctx->prealloc_split_k == nullptr || ctx->prealloc_split_k->size < sizeof(float) * d_ne * split_k) { // Resize buffer - if (vk_prealloc_split_k.size > 0) { - ggml_vk_destroy_buffer(vk_prealloc_split_k); + if (ctx->prealloc_split_k != nullptr) { + ggml_vk_destroy_buffer(ctx->prealloc_split_k); } - vk_prealloc_split_k = ggml_vk_create_buffer_check(sizeof(float) * d_ne * split_k, vk::MemoryPropertyFlagBits::eDeviceLocal); + ctx->prealloc_split_k = ggml_vk_create_buffer_check(ctx, sizeof(float) * d_ne * split_k, vk::MemoryPropertyFlagBits::eDeviceLocal); } } - vk_buffer d_X = ggml_vk_create_buffer_check(sizeof(X_TYPE) * x_ne, vk::MemoryPropertyFlagBits::eDeviceLocal); - vk_buffer d_Y = ggml_vk_create_buffer_check(sizeof(Y_TYPE) * y_ne, vk::MemoryPropertyFlagBits::eDeviceLocal); - vk_buffer d_D = ggml_vk_create_buffer_check(sizeof(float) * d_ne, vk::MemoryPropertyFlagBits::eDeviceLocal); + vk_buffer d_X = ggml_vk_create_buffer_check(ctx, sizeof(X_TYPE) * x_ne, vk::MemoryPropertyFlagBits::eDeviceLocal); + vk_buffer d_Y = ggml_vk_create_buffer_check(ctx, sizeof(Y_TYPE) * y_ne, vk::MemoryPropertyFlagBits::eDeviceLocal); + vk_buffer d_D = ggml_vk_create_buffer_check(ctx, sizeof(float) * d_ne, vk::MemoryPropertyFlagBits::eDeviceLocal); X_TYPE* x = (X_TYPE *) malloc(sizeof(X_TYPE) * x_ne); Y_TYPE* y = (Y_TYPE *) malloc(sizeof(Y_TYPE) * y_ne); @@ -3228,26 +3415,26 @@ static void ggml_vk_test_matmul(size_t m, size_t n, size_t k, size_t batch, size } } - ggml_vk_buffer_write(&d_X, 0, x, sizeof(X_TYPE) * k * m * batch); - ggml_vk_buffer_write(&d_Y, 0, y, sizeof(Y_TYPE) * k * n * batch); + ggml_vk_buffer_write(ctx, d_X, 0, x, sizeof(X_TYPE) * k * m * batch); + ggml_vk_buffer_write(ctx, d_Y, 0, y, sizeof(Y_TYPE) * k * n * batch); - vk_context * ctx = ggml_vk_create_context(vk_device.compute_queue); + vk_context * subctx = ggml_vk_create_context(ctx, ctx->device.lock()->compute_queue); for (size_t i = 0; i < num_it; i++) { - ggml_vk_ctx_begin(ctx); - ggml_vk_matmul(ctx, *p, ggml_vk_subbuffer(d_X), ggml_vk_subbuffer(d_Y), ggml_vk_subbuffer(d_D), ggml_vk_subbuffer(vk_prealloc_split_k), m, n, k, k, k, m, split_k, batch, batch, batch, 1, 1, k*m, k*n, m*n); - ggml_vk_ctx_end(ctx); + ggml_vk_ctx_begin(ctx, subctx); + ggml_vk_matmul(ctx, subctx, *p, ggml_vk_subbuffer(d_X), ggml_vk_subbuffer(d_Y), ggml_vk_subbuffer(d_D), ggml_vk_subbuffer(ctx->prealloc_split_k), m, n, k, k, k, m, split_k, batch, batch, batch, 1, 1, k*m, k*n, m*n); + ggml_vk_ctx_end(subctx); } auto begin = std::chrono::high_resolution_clock::now(); - ggml_vk_submit(ctx, vk_fence); - VK_CHECK(vk_device.device.waitForFences({ vk_fence }, true, UINT64_MAX), "ggml_vk_test_matmul waitForFences"); - vk_device.device.resetFences({ vk_fence }); + ggml_vk_submit(subctx, ctx->fence); + VK_CHECK(ctx->device.lock()->device.waitForFences({ ctx->fence }, true, UINT64_MAX), "ggml_vk_test_matmul waitForFences"); + ctx->device.lock()->device.resetFences({ ctx->fence }); auto end = std::chrono::high_resolution_clock::now(); double time = std::chrono::duration_cast(end-begin).count() / 1000.0; // copy dst to host - ggml_vk_buffer_read(&d_D, 0, d, sizeof(float) * d_ne); + ggml_vk_buffer_read(ctx, d_D, 0, d, sizeof(float) * d_ne); float * d_chk = (float *) malloc(sizeof(float) * d_ne); @@ -3285,14 +3472,14 @@ static void ggml_vk_test_matmul(size_t m, size_t n, size_t k, size_t batch, size src1_ggml->data = y; tensor_ggml->data = d_chk; - vk_disable = true; + ctx->disable = true; ggml_cgraph * cgraph = ggml_new_graph(ggml_ctx); ggml_build_forward_expand(cgraph, tensor_ggml); ggml_graph_compute_with_ctx(ggml_ctx, cgraph, 1); - vk_disable = false; + ctx->disable = false; ggml_free(ggml_ctx); @@ -3325,7 +3512,7 @@ static void ggml_vk_test_matmul(size_t m, size_t n, size_t k, size_t batch, size if (split_k > 1) { float * split_k_buf = (float *) malloc(sizeof(float) * d_ne * split_k); - ggml_vk_buffer_read(&vk_prealloc_split_k, 0, split_k_buf, sizeof(float) * d_ne * split_k); + ggml_vk_buffer_read(ctx, ctx->prealloc_split_k, 0, split_k_buf, sizeof(float) * d_ne * split_k); std::cerr << "d_buf0: " << std::endl << std::endl; ggml_vk_print_matrix_area(split_k_buf, GGML_TYPE_F32, m, n, first_err_m, first_err_n, first_err_b); @@ -3345,15 +3532,15 @@ static void ggml_vk_test_matmul(size_t m, size_t n, size_t k, size_t batch, size free(d_chk); - ggml_vk_queue_cleanup(vk_device.transfer_queue); - ggml_vk_queue_cleanup(vk_device.compute_queue); + ggml_vk_queue_cleanup(ctx, ctx->device.lock()->transfer_queue); + ggml_vk_queue_cleanup(ctx, ctx->device.lock()->compute_queue); ggml_vk_destroy_buffer(d_X); ggml_vk_destroy_buffer(d_Y); ggml_vk_destroy_buffer(d_D); - ggml_vk_pipeline_cleanup(*p); - ggml_vk_pipeline_cleanup(vk_pipeline_matmul_split_k_reduce); + ggml_pipeline_cleanup(*p); + ggml_pipeline_cleanup(ctx->pipeline_matmul_split_k_reduce); free(x); free(y); @@ -3392,7 +3579,7 @@ static void ggml_vk_print_tensor_area(const ggml_tensor * tensor, int i0, int i1 } } -static void ggml_vk_test_h2d_nc(size_t ne0, size_t ne1, size_t ne2, size_t ne3) { +static void ggml_vk_test_h2d_nc(ggml_backend_vk_context * ctx, size_t ne0, size_t ne1, size_t ne2, size_t ne3) { const size_t ne = ne0 * ne1 * ne2 * ne3; ggml_init_params iparams = { @@ -3406,7 +3593,7 @@ static void ggml_vk_test_h2d_nc(size_t ne0, size_t ne1, size_t ne2, size_t ne3) ggml_tensor * tensor = ggml_new_tensor_4d(ggml_ctx, GGML_TYPE_F32, ne0, ne2, ne1, ne3); // NOLINT ggml_tensor * result_tensor = ggml_new_tensor_4d(ggml_ctx, GGML_TYPE_F32, ne0, ne1, ne2, ne3); - float * data = (float *) ggml_vk_host_malloc(ggml_nbytes(tensor)); + float * data = (float *) ggml_vk_host_malloc(ctx, ggml_nbytes(tensor)); tensor->data = data; float * result_data = (float *) malloc(ggml_nbytes(tensor)); @@ -3426,19 +3613,19 @@ static void ggml_vk_test_h2d_nc(size_t ne0, size_t ne1, size_t ne2, size_t ne3) data[i] = (rand() / (float)RAND_MAX) * 2.0f - 1.0f; } - vk_context * ctx = ggml_vk_create_context(vk_device.compute_queue); - ggml_vk_ctx_begin(ctx); + vk_context * subctx = ggml_vk_create_context(ctx, ctx->device.lock()->compute_queue); + ggml_vk_ctx_begin(ctx, subctx); - vk_buffer buffer = ggml_vk_create_buffer_check(ggml_nbytes(tensor), vk::MemoryPropertyFlagBits::eDeviceLocal); + vk_buffer buffer = ggml_vk_create_buffer_check(ctx, ggml_nbytes(tensor), vk::MemoryPropertyFlagBits::eDeviceLocal); - ggml_vk_h2d_tensor_2d(ctx, &buffer, 0, tensor, 0, 0, ggml_nrows(tensor)); + ggml_vk_h2d_tensor_2d(ctx, subctx, buffer, 0, tensor, 0, 0, ggml_nrows(tensor)); - ggml_vk_ctx_end(ctx); - ggml_vk_submit(ctx, vk_fence); - VK_CHECK(vk_device.device.waitForFences({ vk_fence }, true, UINT64_MAX), "ggml_vk_compute_forward waitForFences"); - vk_device.device.resetFences({ vk_fence }); + ggml_vk_ctx_end(subctx); + ggml_vk_submit(subctx, ctx->fence); + VK_CHECK(ctx->device.lock()->device.waitForFences({ ctx->fence }, true, UINT64_MAX), "ggml_vk_test_h2d_nc waitForFences"); + ctx->device.lock()->device.resetFences({ ctx->fence }); - ggml_vk_buffer_read(&buffer, 0, result_data, ggml_nbytes(tensor)); + ggml_vk_buffer_read(ctx, buffer, 0, result_data, ggml_nbytes(tensor)); double avg_err = 0.0; int first_err_i0 = -1; @@ -3483,22 +3670,22 @@ static void ggml_vk_test_h2d_nc(size_t ne0, size_t ne1, size_t ne2, size_t ne3) ggml_vk_destroy_buffer(buffer); - ggml_vk_host_free(data); + ggml_vk_host_free(ctx, data); free(result_data); } -static void ggml_vk_test_transfer(size_t ne, bool pinned) { +static void ggml_vk_test_transfer(ggml_backend_vk_context * ctx, size_t ne, bool pinned) { #ifdef GGML_VULKAN_DEBUG std::cerr << "ggml_vk_test_transfer(" << ne << ")" << std::endl; #endif // Check transfers are correct - vk_buffer buffer = ggml_vk_create_buffer_check(sizeof(float) * ne, vk::MemoryPropertyFlagBits::eDeviceLocal); + vk_buffer buffer = ggml_vk_create_buffer_check(ctx, sizeof(float) * ne, vk::MemoryPropertyFlagBits::eDeviceLocal); float * x; float * y; if (pinned) { - x = (float *) ggml_vk_host_malloc(sizeof(float) * ne); - y = (float *) ggml_vk_host_malloc(sizeof(float) * ne); + x = (float *) ggml_vk_host_malloc(ctx, sizeof(float) * ne); + y = (float *) ggml_vk_host_malloc(ctx, sizeof(float) * ne); } else { x = (float *) malloc(sizeof(float) * ne); y = (float *) malloc(sizeof(float) * ne); @@ -3508,42 +3695,42 @@ static void ggml_vk_test_transfer(size_t ne, bool pinned) { x[i] = rand() / (float)RAND_MAX; } - vk_context * ctx = ggml_vk_create_context(vk_device.compute_queue); - ggml_vk_ctx_begin(ctx); + vk_context * subctx = ggml_vk_create_context(ctx, ctx->device.lock()->compute_queue); + ggml_vk_ctx_begin(ctx, subctx); auto begin = std::chrono::high_resolution_clock::now(); - ggml_vk_buffer_write_async(ctx, &buffer, 0, x, sizeof(float) * ne); + ggml_vk_buffer_write_async(ctx, subctx, buffer, 0, x, sizeof(float) * ne); - for (auto& cpy : ctx->in_memcpys) { + for (auto& cpy : subctx->in_memcpys) { memcpy(cpy.dst, cpy.src, cpy.n); } - ctx->in_memcpys.clear(); + subctx->in_memcpys.clear(); - ggml_vk_ctx_end(ctx); - ggml_vk_submit(ctx, vk_fence); - VK_CHECK(vk_device.device.waitForFences({ vk_fence }, true, UINT64_MAX), "ggml_vk_compute_forward waitForFences"); - vk_device.device.resetFences({ vk_fence }); + ggml_vk_ctx_end(subctx); + ggml_vk_submit(subctx, ctx->fence); + VK_CHECK(ctx->device.lock()->device.waitForFences({ ctx->fence }, true, UINT64_MAX), "ggml_vk_test_transfer waitForFences"); + ctx->device.lock()->device.resetFences({ ctx->fence }); auto end = std::chrono::high_resolution_clock::now(); double ms_to_gpu = std::chrono::duration_cast(end-begin).count() / 1000.0; - ggml_vk_ctx_begin(ctx); + ggml_vk_ctx_begin(ctx, subctx); begin = std::chrono::high_resolution_clock::now(); - ggml_vk_buffer_read_async(ctx, &buffer, 0, y, sizeof(float) * ne); + ggml_vk_buffer_read_async(ctx, subctx, buffer, 0, y, sizeof(float) * ne); - ggml_vk_ctx_end(ctx); - ggml_vk_submit(ctx, vk_fence); - VK_CHECK(vk_device.device.waitForFences({ vk_fence }, true, UINT64_MAX), "ggml_vk_compute_forward waitForFences"); - vk_device.device.resetFences({ vk_fence }); + ggml_vk_ctx_end(subctx); + ggml_vk_submit(subctx, ctx->fence); + VK_CHECK(ctx->device.lock()->device.waitForFences({ ctx->fence }, true, UINT64_MAX), "ggml_vk_test_transfer waitForFences"); + ctx->device.lock()->device.resetFences({ ctx->fence }); - for (auto& cpy : ctx->out_memcpys) { + for (auto& cpy : subctx->out_memcpys) { memcpy(cpy.dst, cpy.src, cpy.n); } - ctx->out_memcpys.clear(); + subctx->out_memcpys.clear(); end = std::chrono::high_resolution_clock::now(); @@ -3561,15 +3748,15 @@ static void ggml_vk_test_transfer(size_t ne, bool pinned) { ggml_vk_destroy_buffer(buffer); if (pinned) { - ggml_vk_host_free(x); - ggml_vk_host_free(y); + ggml_vk_host_free(ctx, x); + ggml_vk_host_free(ctx, y); } else { free(x); free(y); } } -static void ggml_vk_test_dequant(size_t ne, ggml_type quant) { +static void ggml_vk_test_dequant(ggml_backend_vk_context * ctx, size_t ne, ggml_type quant) { #ifdef GGML_VULKAN_DEBUG std::cerr << "ggml_vk_test_dequant(" << ne << ")" << std::endl; #endif @@ -3578,8 +3765,8 @@ static void ggml_vk_test_dequant(size_t ne, ggml_type quant) { const size_t qx_sz = ne * ggml_type_size(quant)/ggml_blck_size(quant); float * x = (float *) malloc(x_sz); void * qx = malloc(qx_sz); - vk_buffer qx_buf = ggml_vk_create_buffer_check(qx_sz, vk::MemoryPropertyFlagBits::eDeviceLocal); - vk_buffer x_buf = ggml_vk_create_buffer_check(x_sz_f16, vk::MemoryPropertyFlagBits::eDeviceLocal); + vk_buffer qx_buf = ggml_vk_create_buffer_check(ctx, qx_sz, vk::MemoryPropertyFlagBits::eDeviceLocal); + vk_buffer x_buf = ggml_vk_create_buffer_check(ctx, x_sz_f16, vk::MemoryPropertyFlagBits::eDeviceLocal); ggml_fp16_t * x_chk = (ggml_fp16_t *) malloc(x_sz_f16); for (size_t i = 0; i < ne; i++) { @@ -3588,7 +3775,7 @@ static void ggml_vk_test_dequant(size_t ne, ggml_type quant) { std::vector hist_cur(1 << 4, 0); - vk_pipeline& p = vk_pipeline_dequant[quant]; + vk_pipeline& p = ctx->pipeline_dequant[quant]; switch(quant) { case GGML_TYPE_Q4_0: @@ -3625,27 +3812,26 @@ static void ggml_vk_test_dequant(size_t ne, ggml_type quant) { GGML_ASSERT(false); } - ggml_vk_pipeline_allocate_descriptor_sets(p, 1); + ggml_pipeline_allocate_descriptor_sets(ctx, p, 1); - ggml_vk_buffer_write(&qx_buf, 0, qx, qx_sz); + ggml_vk_buffer_write(ctx, qx_buf, 0, qx, qx_sz); - vk_context * ctx = ggml_vk_create_context(vk_device.compute_queue); - ggml_vk_ctx_begin(ctx); + vk_context * subctx = ggml_vk_create_context(ctx, ctx->device.lock()->compute_queue); + ggml_vk_ctx_begin(ctx, subctx); const std::vector pc = { 1, (int)ne, (int)ne, (int)ne }; - ggml_vk_sync_buffers(ctx); - ggml_vk_dispatch_pipeline(ctx, p, { { qx_buf, 0, qx_sz }, { x_buf, 0, x_sz_f16 } }, pc.size() * sizeof(int), pc.data(), { (uint32_t)ne, 1, 1}); - ggml_vk_ctx_end(ctx); + ggml_vk_dispatch_pipeline(ctx, subctx, p, { { qx_buf, 0, qx_sz }, { x_buf, 0, x_sz_f16 } }, pc.size() * sizeof(int), pc.data(), { (uint32_t)ne, 1, 1}); + ggml_vk_ctx_end(subctx); auto begin = std::chrono::high_resolution_clock::now(); - ggml_vk_submit(ctx, vk_fence); - VK_CHECK(vk_device.device.waitForFences({ vk_fence }, true, UINT64_MAX), "ggml_vk_compute_forward waitForFences"); - vk_device.device.resetFences({ vk_fence }); + ggml_vk_submit(subctx, ctx->fence); + VK_CHECK(ctx->device.lock()->device.waitForFences({ ctx->fence }, true, UINT64_MAX), "ggml_vk_test_dequant waitForFences"); + ctx->device.lock()->device.resetFences({ ctx->fence }); auto end = std::chrono::high_resolution_clock::now(); double ms_dequant = std::chrono::duration_cast(end-begin).count() / 1000.0; - ggml_vk_buffer_read(&x_buf, 0, x_chk, x_sz_f16); + ggml_vk_buffer_read(ctx, x_buf, 0, x_chk, x_sz_f16); double avg_err = 0.0; for (size_t i = 0; i < ne; i++) { @@ -3687,15 +3873,15 @@ static ggml_tensor * ggml_vk_find_last_use(const ggml_tensor * node, ggml_cgraph return nullptr; } -void ggml_vk_preallocate_buffers_graph(ggml_tensor * node){ +static void ggml_vk_preallocate_buffers_graph(ggml_backend_vk_context * ctx, ggml_tensor * node){ #ifdef GGML_VULKAN_DEBUG - std::cerr << "ggml_vk_preallocate_buffers_graph(" << node << ")" << std::endl; + std::cerr << "ggml_ctx->preallocate_buffers_graph(" << node << ")" << std::endl; #endif const bool any_on_device = node->backend == GGML_BACKEND_GPU || (node->src[0] != nullptr && (node->src[0]->backend == GGML_BACKEND_GPU || node->src[0]->backend == GGML_BACKEND_GPU_SPLIT)) || (node->src[1] != nullptr && (node->src[1]->backend == GGML_BACKEND_GPU)); - if (vk_disable || (!any_on_device && node->op != GGML_OP_MUL_MAT)) { + if (ctx->disable || (!any_on_device && node->op != GGML_OP_MUL_MAT)) { return; } @@ -3735,16 +3921,16 @@ void ggml_vk_preallocate_buffers_graph(ggml_tensor * node){ const uint32_t y_ne = ne10 * ne11; const uint32_t d_ne = ne20 * ne21; - const uint64_t qx_sz = use_src0 ? ggml_vk_align_size(ggml_type_size(src0->type) * x_ne / ggml_blck_size(src0->type), vk_device.properties.limits.minStorageBufferOffsetAlignment) * ne02 * ne03 : 0; - const uint64_t qy_sz = use_src1 ? ggml_vk_align_size(ggml_type_size(src1->type) * y_ne / ggml_blck_size(src1->type), vk_device.properties.limits.minStorageBufferOffsetAlignment) * ne12 * ne13 : 0; - const uint64_t x_sz = use_src0 ? ggml_vk_align_size(sizeof(ggml_fp16_t) * x_ne, vk_device.properties.limits.minStorageBufferOffsetAlignment) * ne02 * ne03 : 0; - const uint64_t y_sz = use_src1 ? ggml_vk_align_size(f16_f32_kernel ? sizeof(float) * y_ne : sizeof(ggml_fp16_t) * y_ne, vk_device.properties.limits.minStorageBufferOffsetAlignment) * ne12 * ne13 : 0; - uint64_t d_sz = ggml_vk_align_size(ggml_type_size(node->type) * d_ne, vk_device.properties.limits.minStorageBufferOffsetAlignment) * ne22 * ne23; + const uint64_t qx_sz = use_src0 ? ggml_vk_align_size(ggml_type_size(src0->type) * x_ne / ggml_blck_size(src0->type), ctx->device.lock()->properties.limits.minStorageBufferOffsetAlignment) * ne02 * ne03 : 0; + const uint64_t qy_sz = use_src1 ? ggml_vk_align_size(ggml_type_size(src1->type) * y_ne / ggml_blck_size(src1->type), ctx->device.lock()->properties.limits.minStorageBufferOffsetAlignment) * ne12 * ne13 : 0; + const uint64_t x_sz = use_src0 ? ggml_vk_align_size(sizeof(ggml_fp16_t) * x_ne, ctx->device.lock()->properties.limits.minStorageBufferOffsetAlignment) * ne02 * ne03 : 0; + const uint64_t y_sz = use_src1 ? ggml_vk_align_size(f16_f32_kernel ? sizeof(float) * y_ne : sizeof(ggml_fp16_t) * y_ne, ctx->device.lock()->properties.limits.minStorageBufferOffsetAlignment) * ne12 * ne13 : 0; + uint64_t d_sz = ggml_vk_align_size(ggml_type_size(node->type) * d_ne, ctx->device.lock()->properties.limits.minStorageBufferOffsetAlignment) * ne22 * ne23; const uint64_t split_k_size = split_k > 1 ? d_sz * 4 : 0; - if (extra->buffer_gpu.size == 0) { + if (extra->buffer_gpu.expired()) { // Workaround for CPU backend BLAS matmul calls - extra->buffer_gpu = ggml_vk_create_buffer_temp(d_sz); + extra->buffer_gpu = ggml_vk_create_buffer_temp(ctx, d_sz); } switch (node->op) { @@ -3779,23 +3965,23 @@ void ggml_vk_preallocate_buffers_graph(ggml_tensor * node){ } break; case GGML_OP_MUL_MAT: - if (vk_prealloc_size_qx < qx_sz) { - vk_prealloc_size_qx = qx_sz; + if (ctx->prealloc_size_qx < qx_sz) { + ctx->prealloc_size_qx = qx_sz; } - if (vk_prealloc_size_qy < qy_sz) { - vk_prealloc_size_qy = qy_sz; + if (ctx->prealloc_size_qy < qy_sz) { + ctx->prealloc_size_qy = qy_sz; } - if (vk_prealloc_size_x < x_sz) { - vk_prealloc_size_x = x_sz; + if (ctx->prealloc_size_x < x_sz) { + ctx->prealloc_size_x = x_sz; } - if (vk_prealloc_size_y < y_sz) { - vk_prealloc_size_y = y_sz; + if (ctx->prealloc_size_y < y_sz) { + ctx->prealloc_size_y = y_sz; } - if (vk_prealloc_size_split_k < split_k_size) { - vk_prealloc_size_split_k = split_k_size; + if (ctx->prealloc_size_split_k < split_k_size) { + ctx->prealloc_size_split_k = split_k_size; } - if (vk_staging_size < x_sz + y_sz) { - vk_staging_size = x_sz + y_sz; + if (ctx->staging_size < x_sz + y_sz) { + ctx->staging_size = x_sz + y_sz; } break; default: @@ -3803,29 +3989,29 @@ void ggml_vk_preallocate_buffers_graph(ggml_tensor * node){ } } -void ggml_vk_preallocate_buffers() { - if (vk_disable) { +static void ggml_vk_preallocate_buffers(ggml_backend_vk_context * ctx) { + if (ctx->disable) { return; } #ifdef GGML_VULKAN_DEBUG - std::cerr << "ggml_vk_preallocate_buffers()" << std::endl; - std::cerr << "qx_size: " << vk_prealloc_size_qx << " qy_size: " << vk_prealloc_size_qy << " x_size: " << vk_prealloc_size_x << " y_size: " << vk_prealloc_size_y << " split_k_size: " << vk_prealloc_size_split_k << std::endl; + std::cerr << "ggml_ctx->preallocate_buffers()" << std::endl; + std::cerr << "qx_size: " << ctx->prealloc_size_qx << " qy_size: " << ctx->prealloc_size_qy << " x_size: " << ctx->prealloc_size_x << " y_size: " << ctx->prealloc_size_y << " split_k_size: " << ctx->prealloc_size_split_k << std::endl; #endif #if defined(GGML_VULKAN_RUN_TESTS) - vk_staging = ggml_vk_create_buffer_check(100ul * 1024ul * 1024ul, vk::MemoryPropertyFlagBits::eHostVisible | vk::MemoryPropertyFlagBits::eHostCoherent | vk::MemoryPropertyFlagBits::eHostCached); - ggml_vk_test_transfer(8192 * 1000, false); - ggml_vk_test_transfer(8192 * 1000, true); + ctx->staging = ggml_vk_create_buffer_check(ctx, 100ul * 1024ul * 1024ul, vk::MemoryPropertyFlagBits::eHostVisible | vk::MemoryPropertyFlagBits::eHostCoherent | vk::MemoryPropertyFlagBits::eHostCached); + ggml_vk_test_transfer(ctx, 8192 * 1000, false); + ggml_vk_test_transfer(ctx, 8192 * 1000, true); - ggml_vk_test_dequant(2560 * 7680, GGML_TYPE_Q4_0); - ggml_vk_test_dequant(2560 * 7680, GGML_TYPE_Q4_1); - ggml_vk_test_dequant(2560 * 7680, GGML_TYPE_Q5_0); - ggml_vk_test_dequant(2560 * 7680, GGML_TYPE_Q5_1); - ggml_vk_test_dequant(2560 * 7680, GGML_TYPE_Q8_0); - ggml_vk_test_dequant(2560 * 7680, GGML_TYPE_Q2_K); - ggml_vk_test_dequant(2560 * 7680, GGML_TYPE_Q3_K); - ggml_vk_test_dequant(2560 * 7680, GGML_TYPE_Q4_K); - ggml_vk_test_dequant(2560 * 7680, GGML_TYPE_Q5_K); - ggml_vk_test_dequant(2560 * 7680, GGML_TYPE_Q6_K); + ggml_vk_test_dequant(ctx, 2560 * 7680, GGML_TYPE_Q4_0); + ggml_vk_test_dequant(ctx, 2560 * 7680, GGML_TYPE_Q4_1); + ggml_vk_test_dequant(ctx, 2560 * 7680, GGML_TYPE_Q5_0); + ggml_vk_test_dequant(ctx, 2560 * 7680, GGML_TYPE_Q5_1); + ggml_vk_test_dequant(ctx, 2560 * 7680, GGML_TYPE_Q8_0); + ggml_vk_test_dequant(ctx, 2560 * 7680, GGML_TYPE_Q2_K); + ggml_vk_test_dequant(ctx, 2560 * 7680, GGML_TYPE_Q3_K); + ggml_vk_test_dequant(ctx, 2560 * 7680, GGML_TYPE_Q4_K); + ggml_vk_test_dequant(ctx, 2560 * 7680, GGML_TYPE_Q5_K); + ggml_vk_test_dequant(ctx, 2560 * 7680, GGML_TYPE_Q6_K); const std::vector vals { 8, 8, 8, @@ -3852,76 +4038,76 @@ void ggml_vk_preallocate_buffers() { }; const size_t num_it = 1; for (size_t i = 0; i < vals.size(); i += 3) { - ggml_vk_test_matmul(vals[i], vals[i + 1], vals[i + 2], 2, num_it, 1, 0); - ggml_vk_test_matmul(vals[i], vals[i + 1], vals[i + 2], 2, num_it, 1, 1); - ggml_vk_test_matmul(vals[i], vals[i + 1], vals[i + 2], 2, num_it, 1, 2); - ggml_vk_test_matmul(vals[i], vals[i + 1], vals[i + 2], 2, num_it, 4, 0); - ggml_vk_test_matmul(vals[i], vals[i + 1], vals[i + 2], 2, num_it, 4, 1); - ggml_vk_test_matmul(vals[i], vals[i + 1], vals[i + 2], 2, num_it, 4, 2); + ggml_vk_test_matmul(ctx, vals[i], vals[i + 1], vals[i + 2], 2, num_it, 1, 0); + ggml_vk_test_matmul(ctx, vals[i], vals[i + 1], vals[i + 2], 2, num_it, 1, 1); + ggml_vk_test_matmul(ctx, vals[i], vals[i + 1], vals[i + 2], 2, num_it, 1, 2); + ggml_vk_test_matmul(ctx, vals[i], vals[i + 1], vals[i + 2], 2, num_it, 4, 0); + ggml_vk_test_matmul(ctx, vals[i], vals[i + 1], vals[i + 2], 2, num_it, 4, 1); + ggml_vk_test_matmul(ctx, vals[i], vals[i + 1], vals[i + 2], 2, num_it, 4, 2); std::cerr << std::endl; } GGML_ASSERT(false); #endif - if (vk_prealloc_size_qx > 0 && vk_prealloc_qx.size < vk_prealloc_size_qx) { + if (ctx->prealloc_qx == nullptr || (ctx->prealloc_size_qx > 0 && ctx->prealloc_qx->size < ctx->prealloc_size_qx)) { // Resize buffer - if (vk_prealloc_qx.size > 0) { - ggml_vk_destroy_buffer(vk_prealloc_qx); + if (ctx->prealloc_qx != nullptr) { + ggml_vk_destroy_buffer(ctx->prealloc_qx); } - vk_prealloc_qx = ggml_vk_create_buffer_device(vk_prealloc_size_qx); + ctx->prealloc_qx = ggml_vk_create_buffer_device(ctx, ctx->prealloc_size_qx); } - if (vk_prealloc_size_qy > 0 && vk_prealloc_qy.size < vk_prealloc_size_qy) { + if (ctx->prealloc_qy == nullptr || (ctx->prealloc_size_qy > 0 && ctx->prealloc_qy->size < ctx->prealloc_size_qy)) { // Resize buffer - if (vk_prealloc_qy.size > 0) { - ggml_vk_destroy_buffer(vk_prealloc_qy); + if (ctx->prealloc_qy != nullptr) { + ggml_vk_destroy_buffer(ctx->prealloc_qy); } - vk_prealloc_qy = ggml_vk_create_buffer_device(vk_prealloc_size_qy); + ctx->prealloc_qy = ggml_vk_create_buffer_device(ctx, ctx->prealloc_size_qy); } - if (vk_prealloc_size_x > 0 && vk_prealloc_x.size < vk_prealloc_size_x) { + if (ctx->prealloc_x == nullptr || (ctx->prealloc_size_x > 0 && ctx->prealloc_x->size < ctx->prealloc_size_x)) { // Resize buffer - if (vk_prealloc_x.size > 0) { - ggml_vk_destroy_buffer(vk_prealloc_x); + if (ctx->prealloc_x != nullptr) { + ggml_vk_destroy_buffer(ctx->prealloc_x); } - vk_prealloc_x = ggml_vk_create_buffer_device(vk_prealloc_size_x); + ctx->prealloc_x = ggml_vk_create_buffer_device(ctx, ctx->prealloc_size_x); } - if (vk_prealloc_size_y > 0 && vk_prealloc_y.size < vk_prealloc_size_y) { + if (ctx->prealloc_y == nullptr || (ctx->prealloc_size_y > 0 && ctx->prealloc_y->size < ctx->prealloc_size_y)) { // Resize buffer - if (vk_prealloc_y.size > 0) { - ggml_vk_destroy_buffer(vk_prealloc_y); + if (ctx->prealloc_y != nullptr) { + ggml_vk_destroy_buffer(ctx->prealloc_y); } - vk_prealloc_y = ggml_vk_create_buffer_device(vk_prealloc_size_y); + ctx->prealloc_y = ggml_vk_create_buffer_device(ctx, ctx->prealloc_size_y); } - if (vk_prealloc_size_split_k > 0 && vk_prealloc_split_k.size < vk_prealloc_size_split_k) { + if (ctx->prealloc_split_k == nullptr || (ctx->prealloc_size_split_k > 0 && ctx->prealloc_split_k->size < ctx->prealloc_size_split_k)) { // Resize buffer - if (vk_prealloc_split_k.size > 0) { - ggml_vk_destroy_buffer(vk_prealloc_split_k); + if (ctx->prealloc_split_k != nullptr) { + ggml_vk_destroy_buffer(ctx->prealloc_split_k); } - vk_prealloc_split_k = ggml_vk_create_buffer_device(vk_prealloc_size_split_k); + ctx->prealloc_split_k = ggml_vk_create_buffer_device(ctx, ctx->prealloc_size_split_k); } - if (vk_staging_size > 0 && vk_staging.size < vk_staging_size) { + if (ctx->staging == nullptr || (ctx->staging_size > 0 && ctx->staging->size < ctx->staging_size)) { // Resize buffer - if (vk_staging.size > 0) { - ggml_vk_destroy_buffer(vk_staging); + if (ctx->staging != nullptr) { + ggml_vk_destroy_buffer(ctx->staging); } - vk_staging = ggml_vk_create_buffer_check(vk_staging_size, vk::MemoryPropertyFlagBits::eHostVisible | vk::MemoryPropertyFlagBits::eHostCoherent | vk::MemoryPropertyFlagBits::eHostCached); + ctx->staging = ggml_vk_create_buffer_check(ctx, ctx->staging_size, vk::MemoryPropertyFlagBits::eHostVisible | vk::MemoryPropertyFlagBits::eHostCoherent | vk::MemoryPropertyFlagBits::eHostCached); } } -void ggml_vk_build_graph(ggml_tensor * node, bool last_node){ +static void ggml_vk_build_graph(ggml_backend_vk_context * ctx, ggml_tensor * node, bool last_node){ const bool any_on_device = node->backend == GGML_BACKEND_GPU || (node->src[0] != nullptr && (node->src[0]->backend == GGML_BACKEND_GPU || node->src[0]->backend == GGML_BACKEND_GPU_SPLIT)) || (node->src[1] != nullptr && node->src[1]->backend == GGML_BACKEND_GPU); - if (vk_disable || (!any_on_device && node->op != GGML_OP_MUL_MAT) || (node->op == GGML_OP_MUL_MAT && !any_on_device && !ggml_vk_can_mul_mat(node->src[0], node->src[1], node))) { + if (ctx->disable || (!any_on_device && node->op != GGML_OP_MUL_MAT) || (node->op == GGML_OP_MUL_MAT && !any_on_device && !ggml_vk_can_mul_mat(node->src[0], node->src[1], node))) { return; } #ifdef GGML_VULKAN_DEBUG std::cerr << "ggml_vk_build_graph(" << node << ", " << ggml_op_name(node->op) << ")" << std::endl; #endif - vk_semaphore_idx = 0; - vk_staging_offset = 0; + ctx->semaphore_idx = 0; + ctx->staging_offset = 0; const ggml_tensor * src0 = node->src[0]; const ggml_tensor * src1 = node->src[1]; @@ -3969,44 +4155,44 @@ void ggml_vk_build_graph(ggml_tensor * node, bool last_node){ return; } - if (vk_ctx == nullptr) { - vk_ctx = ggml_vk_create_context(vk_device.compute_queue); - ggml_vk_ctx_begin(vk_ctx); + if (ctx->compute_ctx == nullptr) { + ctx->compute_ctx = ggml_vk_create_context(ctx, ctx->device.lock()->compute_queue); + ggml_vk_ctx_begin(ctx, ctx->compute_ctx); } switch (node->op) { case GGML_OP_REPEAT: - ggml_vk_repeat(vk_ctx, src0, src1, node); + ggml_vk_repeat(ctx, ctx->compute_ctx, src0, src1, node); break; case GGML_OP_GET_ROWS: - ggml_vk_get_rows(vk_ctx, src0, src1, node); + ggml_vk_get_rows(ctx, ctx->compute_ctx, src0, src1, node); break; case GGML_OP_ADD: - ggml_vk_add(vk_ctx, src0, src1, node); + ggml_vk_add(ctx, ctx->compute_ctx, src0, src1, node); break; case GGML_OP_MUL: - ggml_vk_mul(vk_ctx, src0, src1, node); + ggml_vk_mul(ctx, ctx->compute_ctx, src0, src1, node); break; case GGML_OP_SCALE: - ggml_vk_scale(vk_ctx, src0, node); + ggml_vk_scale(ctx, ctx->compute_ctx, src0, node); break; case GGML_OP_SQR: - ggml_vk_sqr(vk_ctx, src0, node); + ggml_vk_sqr(ctx, ctx->compute_ctx, src0, node); break; case GGML_OP_CLAMP: - ggml_vk_clamp(vk_ctx, src0, node); + ggml_vk_clamp(ctx, ctx->compute_ctx, src0, node); break; case GGML_OP_CPY: case GGML_OP_CONT: case GGML_OP_DUP: - ggml_vk_cpy(vk_ctx, src0, node); + ggml_vk_cpy(ctx, ctx->compute_ctx, src0, node); break; case GGML_OP_RESHAPE: @@ -4014,15 +4200,15 @@ void ggml_vk_build_graph(ggml_tensor * node, bool last_node){ case GGML_OP_PERMUTE: case GGML_OP_TRANSPOSE: case GGML_OP_NONE: - ggml_vk_nop(vk_ctx, src0, node); + ggml_vk_nop(ctx, ctx->compute_ctx, src0, node); break; case GGML_OP_NORM: - ggml_vk_norm(vk_ctx, src0, node); + ggml_vk_norm(ctx, ctx->compute_ctx, src0, node); break; case GGML_OP_RMS_NORM: - ggml_vk_rms_norm(vk_ctx, src0, node); + ggml_vk_rms_norm(ctx, ctx->compute_ctx, src0, node); break; case GGML_OP_UNARY: @@ -4030,26 +4216,26 @@ void ggml_vk_build_graph(ggml_tensor * node, bool last_node){ case GGML_UNARY_OP_SILU: case GGML_UNARY_OP_GELU: case GGML_UNARY_OP_RELU: - ggml_vk_unary(vk_ctx, src0, node); + ggml_vk_unary(ctx, ctx->compute_ctx, src0, node); break; default: return; } break; case GGML_OP_DIAG_MASK_INF: - ggml_vk_diag_mask_inf(vk_ctx, src0, node); + ggml_vk_diag_mask_inf(ctx, ctx->compute_ctx, src0, node); break; case GGML_OP_SOFT_MAX: - ggml_vk_soft_max(vk_ctx, src0, src1, node); + ggml_vk_soft_max(ctx, ctx->compute_ctx, src0, src1, node); break; case GGML_OP_ROPE: - ggml_vk_rope(vk_ctx, src0, src1, node); + ggml_vk_rope(ctx, ctx->compute_ctx, src0, src1, node); break; case GGML_OP_MUL_MAT: - ggml_vk_mul_mat(vk_ctx, src0, src1, node); + ggml_vk_mul_mat(ctx, ctx->compute_ctx, src0, src1, node); break; default: @@ -4057,7 +4243,7 @@ void ggml_vk_build_graph(ggml_tensor * node, bool last_node){ } extra->ready = true; - extra->ctx_idx = vk_ctx->idx; + extra->ctx_idx = ctx->compute_ctx->idx; #ifdef GGML_VULKAN_CHECK_RESULTS // Force context reset on each node so that each tensor ends up in its own context @@ -4066,18 +4252,18 @@ void ggml_vk_build_graph(ggml_tensor * node, bool last_node){ #endif if (node->backend == GGML_BACKEND_CPU || last_node) { - ggml_vk_ctx_end(vk_ctx); - vk_ctx->exit_tensor = node; - vk_ctx = nullptr; + ggml_vk_ctx_end(ctx->compute_ctx); + ctx->compute_ctx->exit_tensor = node; + ctx->compute_ctx = nullptr; } } -bool ggml_vk_compute_forward(ggml_compute_params * params, ggml_tensor * tensor){ +static bool ggml_vk_compute_forward(ggml_backend_vk_context * ctx, ggml_compute_params * params, ggml_tensor * tensor){ const bool any_on_device = tensor->backend == GGML_BACKEND_GPU || (tensor->src[0] != nullptr && (tensor->src[0]->backend == GGML_BACKEND_GPU || tensor->src[0]->backend == GGML_BACKEND_GPU_SPLIT)) || (tensor->src[1] != nullptr && tensor->src[1]->backend == GGML_BACKEND_GPU); - if (vk_disable || (!any_on_device && tensor->op != GGML_OP_MUL_MAT)) { + if (ctx->disable || (!any_on_device && tensor->op != GGML_OP_MUL_MAT)) { return false; } @@ -4145,33 +4331,33 @@ bool ggml_vk_compute_forward(ggml_compute_params * params, ggml_tensor * tensor) #endif #ifdef GGML_VULKAN_CHECK_RESULTS - ggml_vk_check_results_0(params, tensor); + ggml_vk_check_results_0(ctx, params, tensor); #endif GGML_ASSERT(extra->ready); - vk_context& ctx = vk_gc.contexts[extra->ctx_idx]; + vk_context& subctx = ctx->gc.contexts[extra->ctx_idx]; // Only run if ctx hasn't been submitted yet - if (!ctx.seqs.empty()) { + if (!subctx.seqs.empty()) { // Do staging buffer copies - for (auto& cpy : ctx.in_memcpys) { + for (auto& cpy : subctx.in_memcpys) { memcpy(cpy.dst, cpy.src, cpy.n); } - ggml_vk_submit(&ctx, vk_fence); + ggml_vk_submit(&subctx, ctx->fence); } - if (tensor == ctx.exit_tensor) { - VK_CHECK(vk_device.device.waitForFences({ vk_fence }, true, UINT64_MAX), "ggml_vk_compute_forward waitForFences"); - vk_device.device.resetFences({ vk_fence }); + if (tensor == subctx.exit_tensor) { + VK_CHECK(ctx->device.lock()->device.waitForFences({ ctx->fence }, true, UINT64_MAX), "ggml_vk_compute_forward waitForFences"); + ctx->device.lock()->device.resetFences({ ctx->fence }); // Do staging buffer copies - for (auto& cpy : ctx.out_memcpys) { + for (auto& cpy : subctx.out_memcpys) { memcpy(cpy.dst, cpy.src, cpy.n); } - ctx.in_memcpys.clear(); - ctx.out_memcpys.clear(); + subctx.in_memcpys.clear(); + subctx.out_memcpys.clear(); } extra->ready = false; @@ -4179,90 +4365,204 @@ bool ggml_vk_compute_forward(ggml_compute_params * params, ggml_tensor * tensor) return true; } -void ggml_vk_graph_cleanup() { - if (vk_disable) { +// Clean up after graph processing is done +static void ggml_vk_graph_cleanup(ggml_backend_vk_context * ctx) { + if (ctx->disable) { return; } #ifdef GGML_VULKAN_DEBUG std::cerr << "ggml_vk_graph_cleanup()" << std::endl; #endif - for (auto& buffer : vk_gc.temp_buffers) { - ggml_vk_pool_free(buffer); + for (auto& buffer : ctx->gc.temp_buffers) { + ggml_vk_pool_free(ctx, buffer); } - vk_gc.temp_buffers.clear(); + ctx->gc.temp_buffers.clear(); - for (auto * pipeline : vk_gc.pipelines) { - ggml_vk_pipeline_cleanup(*pipeline); - } - vk_gc.pipelines.clear(); - - ggml_vk_queue_cleanup(vk_device.compute_queue); - ggml_vk_queue_cleanup(vk_device.transfer_queue); - - for (size_t i = 0; i < vk_gc.semaphores.size(); i++) { - vk_device.device.destroySemaphore({ vk_gc.semaphores[i].s }); - } - vk_gc.semaphores.clear(); - - for (size_t i = 0; i < vk_gc.tl_semaphores.size(); i++) { - vk_device.device.destroySemaphore({ vk_gc.tl_semaphores[i].s }); - } - vk_gc.tl_semaphores.clear(); - - vk_event_idx = 0; - - for (auto& event : vk_gc.events) { - vk_device.device.resetEvent(event); + for (auto * pipeline : ctx->gc.pipelines) { + ggml_pipeline_cleanup(*pipeline); } - vk_staging_offset = 0; + ggml_vk_queue_cleanup(ctx, ctx->device.lock()->compute_queue); + ggml_vk_queue_cleanup(ctx, ctx->device.lock()->transfer_queue); - vk_ctx = nullptr; - vk_gc.contexts.clear(); + for (size_t i = 0; i < ctx->gc.semaphores.size(); i++) { + ctx->device.lock()->device.destroySemaphore({ ctx->gc.semaphores[i].s }); + } + ctx->gc.semaphores.clear(); + + for (size_t i = 0; i < ctx->gc.tl_semaphores.size(); i++) { + ctx->device.lock()->device.destroySemaphore({ ctx->gc.tl_semaphores[i].s }); + } + ctx->gc.tl_semaphores.clear(); + ctx->semaphore_idx = 0; + + ctx->event_idx = 0; + + for (auto& event : ctx->gc.events) { + ctx->device.lock()->device.resetEvent(event); + } + + ctx->staging_offset = 0; + + ctx->compute_ctx = nullptr; + ctx->transfer_ctx = nullptr; + ctx->gc.contexts.clear(); } -static void ggml_vk_cleanup() { +// Clean up on backend free +static void ggml_vk_cleanup(ggml_backend_vk_context * ctx) { #ifdef GGML_VULKAN_DEBUG - std::cerr << "ggml_vk_cleanup()" << std::endl; + std::cerr << "ggml_vk_cleanup(" << ctx->idx << ")" << std::endl; #endif - ggml_vk_destroy_buffer(vk_prealloc_x); - ggml_vk_destroy_buffer(vk_prealloc_y); - ggml_vk_destroy_buffer(vk_prealloc_split_k); - ggml_vk_destroy_buffer(vk_staging); - ggml_vk_destroy_buffer(vk_sync_staging); + ggml_vk_graph_cleanup(ctx); - vk_prealloc_size_x = 0; - vk_prealloc_size_y = 0; - vk_prealloc_size_split_k = 0; - vk_staging_size = 0; + ggml_vk_destroy_buffer(ctx->prealloc_qx); + ggml_vk_destroy_buffer(ctx->prealloc_qy); + ggml_vk_destroy_buffer(ctx->prealloc_x); + ggml_vk_destroy_buffer(ctx->prealloc_y); + ggml_vk_destroy_buffer(ctx->prealloc_split_k); + ggml_vk_destroy_buffer(ctx->staging); + ggml_vk_destroy_buffer(ctx->sync_staging); - for (auto& event : vk_gc.events) { - vk_device.device.destroyEvent(event); + for (auto& buffer : ctx->buffer_pool) { + ggml_vk_destroy_buffer(buffer); } - vk_gc.events.clear(); + + ctx->prealloc_size_qx = 0; + ctx->prealloc_size_qy = 0; + ctx->prealloc_size_x = 0; + ctx->prealloc_size_y = 0; + ctx->prealloc_size_split_k = 0; + ctx->staging_size = 0; + + for (auto& event : ctx->gc.events) { + ctx->device.lock()->device.destroyEvent(event); + } + ctx->gc.events.clear(); + + for (auto* pipeline : ctx->gc.pipelines) { + ggml_vk_destroy_pipeline(ctx, pipeline); + } + ctx->gc.pipelines.clear(); + + ctx->device.lock()->device.destroyFence(ctx->fence); + + ctx->device.lock()->device.destroyCommandPool(ctx->device.lock()->compute_queue.pool); + if (!ctx->device.lock()->single_queue) { + ctx->device.lock()->device.destroyCommandPool(ctx->device.lock()->transfer_queue.pool); + } +} + +GGML_CALL int ggml_vk_get_device_count() { + ggml_vk_instance_init(); + + return vk_instance.device_indices.size(); +} + +GGML_CALL void ggml_vk_get_device_description(int device, char * description, size_t description_size) { + ggml_vk_instance_init(); + + std::vector devices = vk_instance.instance.enumeratePhysicalDevices(); + + vk::PhysicalDeviceProperties props; + devices[device].getProperties(&props); + + snprintf(description, description_size, "%s", props.deviceName.data()); +} + +// CPU assist interface + +void ggml_vk_init_cpu_assist() { + ggml_vk_instance_init(); + + std::cerr << "ggml_vulkan: Found " << ggml_vk_get_device_count() << " Vulkan devices:" << std::endl; + + for (size_t i = 0; i < ggml_vk_get_device_count(); i++) { + ggml_vk_print_gpu_info(i); + } + // Initialize the first backend to make sure CPU matrix multiplications can be offloaded. + ggml_backend_vk_init(0); +} + +void ggml_vk_preallocate_buffers_graph_cpu_assist(ggml_tensor * node) { + ggml_backend_vk_context * ctx = &vk_instance.contexts[0]; + + if (!ctx->initialized) { + return; + } + + ggml_vk_preallocate_buffers_graph(ctx, node); +} + +void ggml_vk_preallocate_buffers_cpu_assist() { + ggml_backend_vk_context * ctx = &vk_instance.contexts[0]; + + if (!ctx->initialized) { + return; + } + + ggml_vk_preallocate_buffers(ctx); +} + +void ggml_vk_build_graph_cpu_assist(ggml_tensor * node, bool last_node) { + ggml_backend_vk_context * ctx = &vk_instance.contexts[0]; + + if (!ctx->initialized) { + return; + } + + ggml_vk_build_graph(ctx, node, last_node); +} + +bool ggml_vk_compute_forward_cpu_assist(ggml_compute_params * params, ggml_tensor * tensor){ + ggml_backend_vk_context * ctx = &vk_instance.contexts[0]; + + if (!ctx->initialized) { + return false; + } + + return ggml_vk_compute_forward(ctx, params, tensor); +} + +void ggml_vk_graph_cleanup_cpu_assist() { + ggml_backend_vk_context * ctx = &vk_instance.contexts[0]; + + if (!ctx->initialized) { + return; + } + + ggml_vk_graph_cleanup(ctx); +} + +void ggml_vk_free_cpu_assist() { + ggml_backend_vk_context * ctx = &vk_instance.contexts[0]; + + if (!ctx->initialized || vk_instance.backends[0] == nullptr) { + return; + } + + ggml_backend_vk_free(vk_instance.backends[0]); } // backend interface #define UNUSED GGML_UNUSED -struct ggml_backend_vk_context { - std::string name; -}; - // device backend static void * const vk_ptr_base = (void *)(uintptr_t) 0x1000; // NOLINT struct ggml_backend_vk_buffer_context { + ggml_backend_vk_context * ctx; vk_buffer dev_buffer; ggml_tensor_extra_gpu * temp_tensor_extras = nullptr; size_t temp_tensor_extra_index = 0; std::string name; - ggml_backend_vk_buffer_context(vk_buffer dev_buffer) : + ggml_backend_vk_buffer_context(ggml_backend_vk_context * ctx, vk_buffer&& dev_buffer, std::string& name) : + ctx(ctx), dev_buffer(dev_buffer), - name(GGML_VK_NAME) { + name(name) { } ~ggml_backend_vk_buffer_context() { @@ -4294,6 +4594,9 @@ GGML_CALL static bool ggml_backend_buffer_is_vk(ggml_backend_buffer_t buffer) { } GGML_CALL static void ggml_backend_vk_buffer_free_buffer(ggml_backend_buffer_t buffer) { +#ifdef GGML_VULKAN_DEBUG + std::cerr << "ggml_backend_vk_buffer_free_buffer()" << std::endl; +#endif ggml_backend_vk_buffer_context * ctx = (ggml_backend_vk_buffer_context *)buffer->context; ggml_vk_destroy_buffer(ctx->dev_buffer); delete ctx; @@ -4313,6 +4616,7 @@ GGML_CALL static void ggml_backend_vk_buffer_init_tensor(ggml_backend_buffer_t b ggml_tensor_extra_gpu * extra = ctx->ggml_vk_alloc_temp_tensor_extra(); if (tensor->view_src != nullptr && tensor->view_src->extra != nullptr) { + GGML_ASSERT(tensor->view_src->buffer->buft == buffer->buft); ggml_tensor_extra_gpu * extra_view = (ggml_tensor_extra_gpu *) tensor->view_src->extra; extra->buffer_gpu = extra_view->buffer_gpu; extra->offset = extra_view->offset + tensor->view_offs; @@ -4331,11 +4635,13 @@ GGML_CALL static void ggml_backend_vk_buffer_set_tensor(ggml_backend_buffer_t bu #endif GGML_ASSERT(tensor->backend == GGML_BACKEND_GPU); + ggml_backend_vk_buffer_context * ctx = (ggml_backend_vk_buffer_context *)buffer->context; + ggml_tensor_extra_gpu * extra = (ggml_tensor_extra_gpu *) tensor->extra; - ggml_vk_buffer_write(&extra->buffer_gpu, extra->offset + offset, data, size); + vk_buffer buf = extra->buffer_gpu.lock(); - UNUSED(buffer); + ggml_vk_buffer_write(ctx->ctx, buf, extra->offset + offset, data, size); } GGML_CALL static void ggml_backend_vk_buffer_get_tensor(ggml_backend_buffer_t buffer, const ggml_tensor * tensor, void * data, size_t offset, size_t size) { @@ -4344,31 +4650,35 @@ GGML_CALL static void ggml_backend_vk_buffer_get_tensor(ggml_backend_buffer_t bu #endif GGML_ASSERT(tensor->backend == GGML_BACKEND_GPU); + ggml_backend_vk_buffer_context * ctx = (ggml_backend_vk_buffer_context *)buffer->context; + ggml_tensor_extra_gpu * extra = (ggml_tensor_extra_gpu *) tensor->extra; - ggml_vk_buffer_read(&extra->buffer_gpu, extra->offset + offset, data, size); + vk_buffer buf = extra->buffer_gpu.lock(); - UNUSED(buffer); + ggml_vk_buffer_read(ctx->ctx, buf, extra->offset + offset, data, size); } GGML_CALL static bool ggml_backend_vk_buffer_cpy_tensor(ggml_backend_buffer_t buffer, const ggml_tensor * src, ggml_tensor * dst) { if (ggml_backend_buffer_is_vk(src->buffer)) { + ggml_backend_vk_buffer_context * ctx = (ggml_backend_vk_buffer_context *)buffer->context; ggml_tensor_extra_gpu * src_extra = (ggml_tensor_extra_gpu *) src->extra; ggml_tensor_extra_gpu * dst_extra = (ggml_tensor_extra_gpu *) dst->extra; - ggml_vk_buffer_copy(&src_extra->buffer_gpu, src_extra->offset, &dst_extra->buffer_gpu, dst_extra->offset, ggml_nbytes(src)); + vk_buffer src_buf = src_extra->buffer_gpu.lock(); + vk_buffer dst_buf = dst_extra->buffer_gpu.lock(); + + ggml_vk_buffer_copy(dst_buf, dst_extra->offset, src_buf, src_extra->offset, ggml_nbytes(src)); return true; } return false; - - UNUSED(buffer); } GGML_CALL static void ggml_backend_vk_buffer_clear(ggml_backend_buffer_t buffer, uint8_t value) { ggml_backend_vk_buffer_context * ctx = (ggml_backend_vk_buffer_context *)buffer->context; - ggml_vk_buffer_memset(&ctx->dev_buffer, 0, value, buffer->size); + ggml_vk_buffer_memset(ctx->ctx, ctx->dev_buffer, 0, value, buffer->size); } static ggml_backend_buffer_i ggml_backend_vk_buffer_interface = { @@ -4386,6 +4696,7 @@ static ggml_backend_buffer_i ggml_backend_vk_buffer_interface = { // vk buffer type struct ggml_backend_vk_buffer_type_context { std::string name; + ggml_backend_vk_context * ctx; }; GGML_CALL static const char * ggml_backend_vk_buffer_type_name(ggml_backend_buffer_type_t buft) { @@ -4398,25 +4709,22 @@ GGML_CALL static ggml_backend_buffer_t ggml_backend_vk_buffer_type_alloc_buffer( #ifdef GGML_VULKAN_DEBUG std::cerr << "ggml_backend_vk_buffer_type_alloc_buffer(" << size << ")" << std::endl; #endif - vk_buffer dev_buffer = ggml_vk_create_buffer_device(size); + ggml_backend_vk_buffer_type_context * ctx = (ggml_backend_vk_buffer_type_context *) buft->context; + vk_buffer dev_buffer = ggml_vk_create_buffer_device(ctx->ctx, size); - ggml_backend_vk_buffer_context * ctx = new ggml_backend_vk_buffer_context(dev_buffer); + ggml_backend_vk_buffer_context * bufctx = new ggml_backend_vk_buffer_context(ctx->ctx, std::move(dev_buffer), ctx->name); - return ggml_backend_buffer_init(buft, ggml_backend_vk_buffer_interface, ctx, size); - - UNUSED(buft); + return ggml_backend_buffer_init(buft, ggml_backend_vk_buffer_interface, bufctx, size); } GGML_CALL static size_t ggml_backend_vk_buffer_type_get_alignment(ggml_backend_buffer_type_t buft) { - return vk_device.properties.limits.minStorageBufferOffsetAlignment; - - UNUSED(buft); + ggml_backend_vk_buffer_type_context * ctx = (ggml_backend_vk_buffer_type_context *) buft->context; + return ctx->ctx->device.lock()->properties.limits.minStorageBufferOffsetAlignment; } GGML_CALL static size_t ggml_backend_vk_buffer_type_get_max_size(ggml_backend_buffer_type_t buft) { - return vk_device.max_memory_allocation_size; - - UNUSED(buft); + ggml_backend_vk_buffer_type_context * ctx = (ggml_backend_vk_buffer_type_context *) buft->context; + return ctx->ctx->device.lock()->max_memory_allocation_size; } GGML_CALL static size_t ggml_backend_vk_buffer_type_get_alloc_size(ggml_backend_buffer_type_t buft, const ggml_tensor * tensor) { @@ -4426,9 +4734,14 @@ GGML_CALL static size_t ggml_backend_vk_buffer_type_get_alloc_size(ggml_backend_ } GGML_CALL static bool ggml_backend_vk_buffer_type_supports_backend(ggml_backend_buffer_type_t buft, ggml_backend_t backend) { - return ggml_backend_is_vk(backend); + if (!ggml_backend_is_vk(backend)) { + return false; + } - UNUSED(buft); + ggml_backend_vk_buffer_type_context * buft_ctx = (ggml_backend_vk_buffer_type_context *)buft->context; + ggml_backend_vk_context * ctx = (ggml_backend_vk_context *)backend->context; + + return buft_ctx->ctx->idx == ctx->idx; } static ggml_backend_buffer_type_i ggml_backend_vk_buffer_type_interface = { @@ -4441,20 +4754,16 @@ static ggml_backend_buffer_type_i ggml_backend_vk_buffer_type_interface = { /* .is_host = */ NULL, }; -GGML_CALL ggml_backend_buffer_type_t ggml_backend_vk_buffer_type() { - static ggml_backend_buffer_type ggml_backend_vk_buffer_type; +GGML_CALL ggml_backend_buffer_type_t ggml_backend_vk_buffer_type(size_t idx) { +#ifdef GGML_VULKAN_DEBUG + std::cerr << "ggml_backend_vk_buffer_type(" << idx << ")" << std::endl; +#endif - static bool ggml_backend_vk_buffer_type_initialized = false; + GGML_ASSERT(idx < vk_instance.device_indices.size()); - if (!ggml_backend_vk_buffer_type_initialized) { - ggml_backend_vk_buffer_type = { - /* .iface = */ ggml_backend_vk_buffer_type_interface, - /* .context = */ new ggml_backend_vk_buffer_type_context{GGML_VK_NAME}, - }; - ggml_backend_vk_buffer_type_initialized = true; - } + ggml_backend_vk_init(idx); - return &ggml_backend_vk_buffer_type; + return &vk_instance.buffer_types[idx]; } // host buffer type @@ -4472,13 +4781,19 @@ GGML_CALL static const char * ggml_backend_vk_host_buffer_name(ggml_backend_buff } GGML_CALL static void ggml_backend_vk_host_buffer_free_buffer(ggml_backend_buffer_t buffer) { - ggml_vk_host_free(buffer->context); +#ifdef GGML_VULKAN_DEBUG + std::cerr << "ggml_backend_vk_host_buffer_free_buffer()" << std::endl; +#endif + ggml_vk_host_free(&vk_instance.contexts[0], buffer->context); } GGML_CALL static ggml_backend_buffer_t ggml_backend_vk_host_buffer_type_alloc_buffer(ggml_backend_buffer_type_t buft, size_t size) { +#ifdef GGML_VULKAN_DEBUG + std::cerr << "ggml_backend_vk_host_buffer_type_alloc_buffer(" << size << ")" << std::endl; +#endif void * ptr = nullptr; try { - ptr = ggml_vk_host_malloc(size); + ptr = ggml_vk_host_malloc(&vk_instance.contexts[0], size); } catch (vk::SystemError& e) { std::cerr << "ggml_vulkan: Failed to allocate pinned memory." << std::endl; std::cerr << "ggml_vulkan: " << e.what() << std::endl; @@ -4495,7 +4810,7 @@ GGML_CALL static ggml_backend_buffer_t ggml_backend_vk_host_buffer_type_alloc_bu } GGML_CALL static size_t ggml_backend_vk_host_buffer_type_get_alignment(ggml_backend_buffer_type_t buft) { - return vk_device.properties.limits.minMemoryMapAlignment; + return vk_instance.contexts[0].device.lock()->properties.limits.minMemoryMapAlignment; UNUSED(buft); } @@ -4514,127 +4829,150 @@ GGML_CALL ggml_backend_buffer_type_t ggml_backend_vk_host_buffer_type() { /* .context = */ nullptr, }; + if (!vk_instance.contexts[0].initialized) { + // Fall back to CPU + return ggml_backend_cpu_buffer_type(); + } + return &ggml_backend_vk_buffer_type_host; } // backend GGML_CALL static const char * ggml_backend_vk_name(ggml_backend_t backend) { - ggml_backend_vk_context * vk_ctx = (ggml_backend_vk_context *)backend->context; + ggml_backend_vk_context * ctx = (ggml_backend_vk_context *)backend->context; - return vk_ctx->name.c_str(); + return ctx->name.c_str(); } GGML_CALL static void ggml_backend_vk_free(ggml_backend_t backend) { - ggml_backend_vk_context * vk_ctx = (ggml_backend_vk_context *)backend->context; + ggml_backend_vk_context * ctx = (ggml_backend_vk_context *)backend->context; +#ifdef GGML_VULKAN_DEBUG + std::cerr << "ggml_backend_vk_free(" << ctx->name << ")" << std::endl; +#endif - delete vk_ctx; + size_t idx = ctx->idx; + + ggml_vk_cleanup(ctx); + + // Release device + vk_instance.devices[ctx->idx].reset(); + ctx->initialized = false; + + vk_instance.initialized[idx] = false; + vk_instance.backends[idx] = nullptr; + memset(&vk_instance.buffer_types[idx], 0, sizeof(ggml_backend_buffer_type)); delete backend; } GGML_CALL static ggml_backend_buffer_type_t ggml_backend_vk_get_default_buffer_type(ggml_backend_t backend) { - return ggml_backend_vk_buffer_type(); + ggml_backend_vk_context * ctx = (ggml_backend_vk_context *)backend->context; - UNUSED(backend); + GGML_ASSERT(ctx->initialized); + + return ggml_backend_vk_buffer_type(ctx->idx); } GGML_CALL static void ggml_backend_vk_set_tensor_async(ggml_backend_t backend, ggml_tensor * tensor, const void * data, size_t offset, size_t size) { #ifdef GGML_VULKAN_DEBUG std::cerr << "ggml_backend_vk_set_tensor_async(" << size << ")" << std::endl; #endif - GGML_ASSERT((tensor->buffer->buft == ggml_backend_vk_buffer_type() || tensor->buffer->buft == ggml_backend_vk_host_buffer_type()) && "unsupported buffer type"); + ggml_backend_vk_context * ctx = (ggml_backend_vk_context *)backend->context; + GGML_ASSERT((tensor->buffer->buft == ggml_backend_vk_buffer_type(ctx->idx) || tensor->buffer->buft == ggml_backend_vk_host_buffer_type()) && "unsupported buffer type"); GGML_ASSERT(tensor->backend == GGML_BACKEND_GPU); ggml_tensor_extra_gpu * extra = (ggml_tensor_extra_gpu *) tensor->extra; - if (vk_transfer_ctx == nullptr) { + if (ctx->transfer_ctx == nullptr) { // Initialize new transfer context - vk_transfer_ctx = ggml_vk_create_context(vk_device.transfer_queue); - ggml_vk_ctx_begin(vk_transfer_ctx); + ctx->transfer_ctx = ggml_vk_create_context(ctx, ctx->device.lock()->transfer_queue); + ggml_vk_ctx_begin(ctx, ctx->transfer_ctx); } - ggml_vk_buffer_write_async(vk_transfer_ctx, &extra->buffer_gpu, extra->offset + offset, data, size); + vk_buffer buf = extra->buffer_gpu.lock(); - UNUSED(backend); + ggml_vk_buffer_write_async(ctx, ctx->transfer_ctx, buf, extra->offset + offset, data, size); } GGML_CALL static void ggml_backend_vk_get_tensor_async(ggml_backend_t backend, const ggml_tensor * tensor, void * data, size_t offset, size_t size) { #ifdef GGML_VULKAN_DEBUG std::cerr << "ggml_backend_vk_get_tensor_async(" << size << ")" << std::endl; #endif - GGML_ASSERT((tensor->buffer->buft == ggml_backend_vk_buffer_type() || tensor->buffer->buft == ggml_backend_vk_host_buffer_type()) && "unsupported buffer type"); + ggml_backend_vk_context * ctx = (ggml_backend_vk_context *)backend->context; + GGML_ASSERT((tensor->buffer->buft == ggml_backend_vk_buffer_type(ctx->idx) || tensor->buffer->buft == ggml_backend_vk_host_buffer_type()) && "unsupported buffer type"); GGML_ASSERT(tensor->backend == GGML_BACKEND_GPU); ggml_tensor_extra_gpu * extra = (ggml_tensor_extra_gpu *) tensor->extra; - if (vk_transfer_ctx == nullptr) { + if (ctx->transfer_ctx == nullptr) { // Initialize new transfer context - vk_transfer_ctx = ggml_vk_create_context(vk_device.transfer_queue); - ggml_vk_ctx_begin(vk_transfer_ctx); + ctx->transfer_ctx = ggml_vk_create_context(ctx, ctx->device.lock()->transfer_queue); + ggml_vk_ctx_begin(ctx, ctx->transfer_ctx); } - ggml_vk_buffer_read_async(vk_transfer_ctx, &extra->buffer_gpu, extra->offset + offset, data, size); + vk_buffer buf = extra->buffer_gpu.lock(); - UNUSED(backend); + ggml_vk_buffer_read_async(ctx, ctx->transfer_ctx, buf, extra->offset + offset, data, size); } GGML_CALL static bool ggml_backend_vk_cpy_tensor_async(ggml_backend_t backend, const ggml_tensor * src, ggml_tensor * dst) { #ifdef GGML_VULKAN_DEBUG std::cerr << "ggml_backend_vk_cpy_tensor_async()" << std::endl; #endif - if ((dst->buffer->buft == ggml_backend_vk_buffer_type() || dst->buffer->buft == ggml_backend_vk_host_buffer_type()) && ggml_backend_buffer_is_vk(src->buffer)) { + ggml_backend_vk_context * ctx = (ggml_backend_vk_context *)backend->context; + if ((dst->buffer->buft == ggml_backend_vk_buffer_type(ctx->idx) || dst->buffer->buft == ggml_backend_vk_host_buffer_type()) && ggml_backend_buffer_is_vk(src->buffer)) { ggml_tensor_extra_gpu * src_extra = (ggml_tensor_extra_gpu *) src->extra; ggml_tensor_extra_gpu * dst_extra = (ggml_tensor_extra_gpu *) dst->extra; - if (vk_transfer_ctx == nullptr) { + if (ctx->transfer_ctx == nullptr) { // Initialize new transfer context - vk_transfer_ctx = ggml_vk_create_context(vk_device.transfer_queue); - ggml_vk_ctx_begin(vk_transfer_ctx); + ctx->transfer_ctx = ggml_vk_create_context(ctx, ctx->device.lock()->transfer_queue); + ggml_vk_ctx_begin(ctx, ctx->transfer_ctx); } - ggml_vk_buffer_copy_async(vk_transfer_ctx, &src_extra->buffer_gpu, src_extra->offset, &dst_extra->buffer_gpu, dst_extra->offset, ggml_nbytes(src)); + vk_buffer src_buf = src_extra->buffer_gpu.lock(); + vk_buffer dst_buf = dst_extra->buffer_gpu.lock(); + + ggml_vk_buffer_copy_async(ctx->transfer_ctx, src_buf, src_extra->offset, dst_buf, dst_extra->offset, ggml_nbytes(src)); return true; } return false; - - UNUSED(backend); } GGML_CALL static void ggml_backend_vk_synchronize(ggml_backend_t backend) { #ifdef GGML_VULKAN_DEBUG std::cerr << "ggml_backend_vk_synchronize()" << std::endl; #endif - if(vk_transfer_ctx == nullptr) { + ggml_backend_vk_context * ctx = (ggml_backend_vk_context *)backend->context; + if(ctx->transfer_ctx == nullptr) { return; } - ggml_vk_ctx_end(vk_transfer_ctx); + ggml_vk_ctx_end(ctx->transfer_ctx); - for (auto& cpy : vk_transfer_ctx->in_memcpys) { + for (auto& cpy : ctx->transfer_ctx->in_memcpys) { memcpy(cpy.dst, cpy.src, cpy.n); } - ggml_vk_submit(vk_transfer_ctx, vk_fence); - VK_CHECK(vk_device.device.waitForFences({ vk_fence }, true, UINT64_MAX), "ggml_backend_vk_synchronize waitForFences"); - vk_device.device.resetFences({ vk_fence }); + ggml_vk_submit(ctx->transfer_ctx, ctx->fence); + VK_CHECK(ctx->device.lock()->device.waitForFences({ ctx->fence }, true, UINT64_MAX), "ggml_backend_vk_synchronize waitForFences"); + ctx->device.lock()->device.resetFences({ ctx->fence }); - for (auto& cpy : vk_transfer_ctx->out_memcpys) { + for (auto& cpy : ctx->transfer_ctx->out_memcpys) { memcpy(cpy.dst, cpy.src, cpy.n); } - vk_transfer_ctx = nullptr; - - UNUSED(backend); + ctx->transfer_ctx = nullptr; } GGML_CALL static bool ggml_backend_vk_graph_compute(ggml_backend_t backend, ggml_cgraph * cgraph) { - // ggml_backend_vk_context * vk_ctx = (ggml_backend_vk_context *)backend->context; + ggml_backend_vk_context * ctx = (ggml_backend_vk_context *)backend->context; for (int i = 0; i < cgraph->n_nodes; i++) { - ggml_vk_preallocate_buffers_graph(cgraph->nodes[i]); + ggml_vk_preallocate_buffers_graph(ctx, cgraph->nodes[i]); } - ggml_vk_preallocate_buffers(); + ggml_vk_preallocate_buffers(ctx); int last_node = cgraph->n_nodes - 1; @@ -4644,7 +4982,7 @@ GGML_CALL static bool ggml_backend_vk_graph_compute(ggml_backend_t backend, ggml } for (int i = 0; i < cgraph->n_nodes; i++) { - ggml_vk_build_graph(cgraph->nodes[i], i == last_node); + ggml_vk_build_graph(ctx,cgraph->nodes[i], i == last_node); } ggml_compute_params params = {}; @@ -4657,19 +4995,19 @@ GGML_CALL static bool ggml_backend_vk_graph_compute(ggml_backend_t backend, ggml continue; } - bool ok = ggml_vk_compute_forward(¶ms, node); + bool ok = ggml_vk_compute_forward(ctx, ¶ms, node); if (!ok) { fprintf(stderr, "%s: error: op not supported %s (%s)\n", __func__, node->name, ggml_op_name(node->op)); } #ifdef GGML_VULKAN_CHECK_RESULTS else { - ggml_vk_check_results_1(¶ms, node); + ggml_vk_check_results_1(ctx, ¶ms, node); } #endif GGML_ASSERT(ok); } - ggml_vk_graph_cleanup(); + ggml_vk_graph_cleanup(ctx); return true; @@ -4734,7 +5072,7 @@ GGML_CALL static bool ggml_backend_vk_supports_op(ggml_backend_t backend, const } return false; } break; - // case GGML_OP_DUP: + case GGML_OP_DUP: // case GGML_OP_REPEAT: // { // ggml_type src0_type = op->src[0]->type; @@ -4786,18 +5124,30 @@ static ggml_backend_i ggml_backend_vk_interface = { /* .supports_op = */ ggml_backend_vk_supports_op, }; -GGML_CALL ggml_backend_t ggml_backend_vk_init() { - ggml_vk_init(); // TODO: remove from ggml.c +GGML_CALL ggml_backend_t ggml_backend_vk_init(size_t idx) { + if (vk_instance.initialized[idx]) { + return vk_instance.backends[idx]; + } +#ifdef GGML_VULKAN_DEBUG + std::cerr << "ggml_backend_vk_init(" << idx << ")" << std::endl; +#endif - ggml_backend_vk_context * ctx = new ggml_backend_vk_context { - /* .name = */ GGML_VK_NAME, + ggml_backend_vk_context * ctx = &vk_instance.contexts[idx]; + ggml_vk_init(ctx, idx); + ctx->name = GGML_VK_NAME + std::to_string(idx); + vk_instance.buffer_types[idx] = { + /* .iface = */ ggml_backend_vk_buffer_type_interface, + /* .context = */ new ggml_backend_vk_buffer_type_context{ ctx->name, ctx }, }; + vk_instance.initialized[idx] = true; ggml_backend_t vk_backend = new ggml_backend { /* .interface = */ ggml_backend_vk_interface, - /* .context = */ ctx + /* .context = */ &vk_instance.contexts[ctx->idx], }; + vk_instance.backends[idx] = vk_backend; + return vk_backend; } @@ -4805,20 +5155,47 @@ GGML_CALL bool ggml_backend_is_vk(ggml_backend_t backend) { return backend && backend->iface.get_name == ggml_backend_vk_name; } +GGML_CALL int ggml_backend_vk_get_device_count() { + return ggml_vk_get_device_count(); +} + +GGML_CALL void ggml_backend_vk_get_device_description(int device, char * description, size_t description_size) { + ggml_vk_get_device_description(device, description, description_size); +} + +GGML_CALL void ggml_backend_vk_get_device_memory(int device, size_t * free, size_t * total) { + GGML_ASSERT(device < vk_instance.device_indices.size()); + + vk::PhysicalDevice vkdev = vk_instance.instance.enumeratePhysicalDevices()[vk_instance.device_indices[device]]; + + vk::PhysicalDeviceMemoryProperties memprops = vkdev.getMemoryProperties(); + + for (const vk::MemoryHeap& heap : memprops.memoryHeaps) { + if (heap.flags & vk::MemoryHeapFlagBits::eDeviceLocal) { + *total = heap.size; + *free = heap.size; + break; + } + } +} + // backend registry GGML_CALL static ggml_backend_t ggml_backend_reg_vk_init(const char * params, void * user_data) { - ggml_backend_t vk_backend = ggml_backend_vk_init(); + ggml_backend_t vk_backend = ggml_backend_vk_init((int) (intptr_t) user_data); return vk_backend; UNUSED(params); - UNUSED(user_data); } extern "C" GGML_CALL int ggml_backend_vk_reg_devices(); GGML_CALL int ggml_backend_vk_reg_devices() { - ggml_backend_register(GGML_VK_NAME, ggml_backend_reg_vk_init, ggml_backend_vk_buffer_type(), nullptr); - return 1; + for (auto idx : vk_instance.device_indices) { + char name[128]; + snprintf(name, sizeof(name), "%s%ld", GGML_VK_NAME, idx); + ggml_backend_register(name, ggml_backend_reg_vk_init, ggml_backend_vk_buffer_type(idx), (void *) (intptr_t) idx); + } + return vk_instance.device_indices.size(); } // checks @@ -4874,7 +5251,7 @@ static void ggml_vk_print_tensor_area(const ggml_tensor * tensor, const void * d } } -static void ggml_vk_print_tensor(const ggml_tensor * tensor, const char * name) { +static void ggml_vk_print_tensor(ggml_backend_vk_context * ctx, const ggml_tensor * tensor, const char * name) { void * tensor_data = tensor->data; if (tensor->backend == GGML_BACKEND_GPU) { @@ -4883,7 +5260,7 @@ static void ggml_vk_print_tensor(const ggml_tensor * tensor, const char * name) ggml_tensor_extra_gpu * extra = (ggml_tensor_extra_gpu *) tensor->extra; - ggml_vk_buffer_read(&extra->buffer_gpu, extra->offset, tensor_data, tensor_size); + ggml_vk_buffer_read(ctx, extra->buffer_gpu, extra->offset, tensor_data, tensor_size); } std::cerr << "TENSOR CHECK " << name << " (" << tensor->name << "): " << ggml_op_name(tensor->op) << std::endl; @@ -4944,7 +5321,7 @@ void * comp_result; size_t comp_size; size_t comp_nb[GGML_MAX_DIMS]; size_t check_counter = 0; -static void ggml_vk_check_results_0(ggml_compute_params * params, ggml_tensor * tensor) { +static void ggml_vk_check_results_0(ggml_backend_vk_context * ctx, ggml_compute_params * params, ggml_tensor * tensor) { if (params->ith != 0) { return; } @@ -4966,7 +5343,7 @@ static void ggml_vk_check_results_0(ggml_compute_params * params, ggml_tensor * /*.no_alloc =*/ false, }; - struct ggml_context * ctx = ggml_init(iparams); + struct ggml_context * ggml_ctx = ggml_init(iparams); struct ggml_tensor * src0_clone = nullptr; struct ggml_tensor * src1_clone = nullptr; @@ -4979,7 +5356,7 @@ static void ggml_vk_check_results_0(ggml_compute_params * params, ggml_tensor * void * src1_buffer; if (src0 != nullptr) { - src0_clone = ggml_dup_tensor(ctx, src0); + src0_clone = ggml_dup_tensor(ggml_ctx, src0); src0_size = ggml_nbytes(src0); @@ -4995,7 +5372,7 @@ static void ggml_vk_check_results_0(ggml_compute_params * params, ggml_tensor * for (int i3 = 0; i3 < src0->ne[3]; i3++) { for (int i2 = 0; i2 < src0->ne[2]; i2++) { const int idx = i3*src0->ne[2] + i2; - ggml_vk_buffer_read(&extra->buffer_gpu, offset + idx * src0->nb[2], ((char *)src0_clone->data + idx * src0_clone->nb[2]), src0->ne[1] * src0->nb[1]); + ggml_vk_buffer_read(ctx, extra->buffer_gpu, offset + idx * src0->nb[2], ((char *)src0_clone->data + idx * src0_clone->nb[2]), src0->ne[1] * src0->nb[1]); } } @@ -5005,10 +5382,10 @@ static void ggml_vk_check_results_0(ggml_compute_params * params, ggml_tensor * src0_clone->nb[i] = src0_clone->nb[i - 1]*src0_clone->ne[i - 1]; } } else { - if (offset + src0_size >= extra->buffer_gpu.size) { - src0_size = extra->buffer_gpu.size - offset; + if (offset + src0_size >= extra->buffer_gpu->size) { + src0_size = extra->buffer_gpu->size - offset; } - ggml_vk_buffer_read(&extra->buffer_gpu, offset, src0_clone->data, src0_size); + ggml_vk_buffer_read(ctx, extra->buffer_gpu, offset, src0_clone->data, src0_size); memcpy(src0_clone->nb, src0->nb, sizeof(size_t) * GGML_MAX_DIMS); } } else { @@ -5016,13 +5393,13 @@ static void ggml_vk_check_results_0(ggml_compute_params * params, ggml_tensor * } if (vk_output_tensor > 0 && vk_output_tensor == check_counter) { - ggml_vk_print_tensor(src0, "src0"); + ggml_vk_print_tensor(ctx, src0, "src0"); } ggml_vk_check_tensor(std::string(ggml_op_name(tensor->op)) + "->src0", src0_clone); } if (src1 != nullptr) { - src1_clone = ggml_dup_tensor(ctx, src1); + src1_clone = ggml_dup_tensor(ggml_ctx, src1); src1_size = ggml_nbytes(src1); @@ -5038,7 +5415,7 @@ static void ggml_vk_check_results_0(ggml_compute_params * params, ggml_tensor * for (int i3 = 0; i3 < src1->ne[3]; i3++) { for (int i2 = 0; i2 < src1->ne[2]; i2++) { const int idx = i3*src1->ne[2] + i2; - ggml_vk_buffer_read(&extra->buffer_gpu, offset + idx * src1->nb[2], ((char *)src1_clone->data + idx * src1_clone->nb[2]), src1->ne[1] * src1->nb[1]); + ggml_vk_buffer_read(ctx, extra->buffer_gpu, offset + idx * src1->nb[2], ((char *)src1_clone->data + idx * src1_clone->nb[2]), src1->ne[1] * src1->nb[1]); } } @@ -5048,10 +5425,10 @@ static void ggml_vk_check_results_0(ggml_compute_params * params, ggml_tensor * src1_clone->nb[i] = src1_clone->nb[i - 1]*src1_clone->ne[i - 1]; } } else { - if (offset + src1_size >= extra->buffer_gpu.size) { - src1_size = extra->buffer_gpu.size - offset; + if (offset + src1_size >= extra->buffer_gpu->size) { + src1_size = extra->buffer_gpu->size - offset; } - ggml_vk_buffer_read(&extra->buffer_gpu, offset, src1_clone->data, src1_size); + ggml_vk_buffer_read(ctx, extra->buffer_gpu, offset, src1_clone->data, src1_size); memcpy(src1_clone->nb, src1->nb, sizeof(size_t) * GGML_MAX_DIMS); } } else { @@ -5059,7 +5436,7 @@ static void ggml_vk_check_results_0(ggml_compute_params * params, ggml_tensor * } if (vk_output_tensor > 0 && vk_output_tensor == check_counter) { - ggml_vk_print_tensor(src1, "src1"); + ggml_vk_print_tensor(ctx, src1, "src1"); std::cerr << "TENSOR CHECK: " << ggml_op_name(src1_clone->op) << " (check " << check_counter << ")" << std::endl; std::cerr << "src1_clone=" << tensor << " src1_clone->backend: " << src1_clone->backend << " src1_clone->type: " << ggml_type_name(src1_clone->type) << " ne0=" << src1_clone->ne[0] << " nb0=" << src1_clone->nb[0] << " ne1=" << src1_clone->ne[1] << " nb1=" << src1_clone->nb[1] << " ne2=" << src1_clone->ne[2] << " nb2=" << src1_clone->nb[2] << " ne3=" << src1_clone->ne[3] << " nb3=" << src1_clone->nb[3] << std::endl; if (src1->src[0] != nullptr) { @@ -5082,51 +5459,51 @@ static void ggml_vk_check_results_0(ggml_compute_params * params, ggml_tensor * } if (tensor->op == GGML_OP_MUL_MAT) { - tensor_clone = ggml_mul_mat(ctx, src0_clone, src1_clone); + tensor_clone = ggml_mul_mat(ggml_ctx, src0_clone, src1_clone); } else if (tensor->op == GGML_OP_MUL) { - tensor_clone = ggml_mul(ctx, src0_clone, src1_clone); + tensor_clone = ggml_mul(ggml_ctx, src0_clone, src1_clone); } else if (tensor->op == GGML_OP_SCALE) { - tensor_clone = ggml_scale(ctx, src0_clone, ((float *)tensor->op_params)[0]); + tensor_clone = ggml_scale(ggml_ctx, src0_clone, ((float *)tensor->op_params)[0]); } else if (tensor->op == GGML_OP_SQR) { - tensor_clone = ggml_sqr(ctx, src0_clone); + tensor_clone = ggml_sqr(ggml_ctx, src0_clone); } else if (tensor->op == GGML_OP_CLAMP) { - tensor_clone = ggml_clamp(ctx, src0_clone, ((float *)tensor->op_params)[0], ((float *)tensor->op_params)[1]); + tensor_clone = ggml_clamp(ggml_ctx, src0_clone, ((float *)tensor->op_params)[0], ((float *)tensor->op_params)[1]); } else if (tensor->op == GGML_OP_ADD) { - tensor_clone = ggml_add(ctx, src0_clone, src1_clone); + tensor_clone = ggml_add(ggml_ctx, src0_clone, src1_clone); } else if (tensor->op == GGML_OP_NORM) { - tensor_clone = ggml_norm(ctx, src0_clone, *(float *)tensor->op_params); + tensor_clone = ggml_norm(ggml_ctx, src0_clone, *(float *)tensor->op_params); } else if (tensor->op == GGML_OP_RMS_NORM) { - tensor_clone = ggml_rms_norm(ctx, src0_clone, *(float *)tensor->op_params); + tensor_clone = ggml_rms_norm(ggml_ctx, src0_clone, *(float *)tensor->op_params); } else if (tensor->op == GGML_OP_SOFT_MAX) { if (src1 != nullptr) { - tensor_clone = ggml_soft_max_ext(ctx, src0_clone, src1_clone, *(float *)tensor->op_params); + tensor_clone = ggml_soft_max_ext(ggml_ctx, src0_clone, src1_clone, *(float *)tensor->op_params); } else { - tensor_clone = ggml_soft_max(ctx, src0_clone); + tensor_clone = ggml_soft_max(ggml_ctx, src0_clone); } } else if (tensor->op == GGML_OP_DIAG_MASK_INF) { - tensor_clone = ggml_diag_mask_inf(ctx, src0_clone, *(float *)tensor->op_params); + tensor_clone = ggml_diag_mask_inf(ggml_ctx, src0_clone, *(float *)tensor->op_params); } else if (tensor->op == GGML_OP_ROPE) { const int n_dims = ((int32_t *) tensor->op_params)[1]; const int mode = ((int32_t *) tensor->op_params)[2]; - const int n_ctx = ((int32_t *) tensor->op_params)[3]; - const int n_orig_ctx = ((int32_t *) tensor->op_params)[4]; + const int n_ggml_ctx = ((int32_t *) tensor->op_params)[3]; + const int n_orig_ggml_ctx = ((int32_t *) tensor->op_params)[4]; float freq_base = ((float *) tensor->op_params)[5]; float freq_scale = ((float *) tensor->op_params)[6]; float ext_factor = ((float *) tensor->op_params)[7]; float attn_factor = ((float *) tensor->op_params)[8]; float beta_fast = ((float *) tensor->op_params)[9]; float beta_slow = ((float *) tensor->op_params)[10]; - tensor_clone = ggml_rope_custom(ctx, src0_clone, src1_clone, n_dims, mode, n_ctx, n_orig_ctx, freq_base, freq_scale, ext_factor, attn_factor, beta_fast, beta_slow); + tensor_clone = ggml_rope_custom(ggml_ctx, src0_clone, src1_clone, n_dims, mode, n_ggml_ctx, n_orig_ggml_ctx, freq_base, freq_scale, ext_factor, attn_factor, beta_fast, beta_slow); } else if (tensor->op == GGML_OP_UNARY) { switch (ggml_get_unary_op(tensor)) { case GGML_UNARY_OP_SILU: - tensor_clone = ggml_silu(ctx, src0_clone); + tensor_clone = ggml_silu(ggml_ctx, src0_clone); break; case GGML_UNARY_OP_GELU: - tensor_clone = ggml_gelu(ctx, src0_clone); + tensor_clone = ggml_gelu(ggml_ctx, src0_clone); break; case GGML_UNARY_OP_RELU: - tensor_clone = ggml_relu(ctx, src0_clone); + tensor_clone = ggml_relu(ggml_ctx, src0_clone); break; default: std::cerr << "Missing vk_check_results OP: " << ggml_op_name(tensor->op) << std::endl; @@ -5134,40 +5511,40 @@ static void ggml_vk_check_results_0(ggml_compute_params * params, ggml_tensor * } } else if (tensor->op == GGML_OP_CPY || tensor->op == GGML_OP_DUP) { if (src1 == nullptr) { - tensor_clone = ggml_dup(ctx, src0_clone); + tensor_clone = ggml_dup(ggml_ctx, src0_clone); tensor_clone->type = tensor->type; } else { - tensor_clone = ggml_cpy(ctx, src0_clone, src1_clone); + tensor_clone = ggml_cpy(ggml_ctx, src0_clone, src1_clone); } } else if (tensor->op == GGML_OP_CONT) { - tensor_clone = ggml_cont_4d(ctx, src0_clone, tensor->ne[0], tensor->ne[1], tensor->ne[2], tensor->ne[3]); + tensor_clone = ggml_cont_4d(ggml_ctx, src0_clone, tensor->ne[0], tensor->ne[1], tensor->ne[2], tensor->ne[3]); } else if (tensor->op == GGML_OP_RESHAPE) { - tensor_clone = ggml_reshape_4d(ctx, src0_clone, tensor->ne[0], tensor->ne[1], tensor->ne[2], tensor->ne[3]); + tensor_clone = ggml_reshape_4d(ggml_ctx, src0_clone, tensor->ne[0], tensor->ne[1], tensor->ne[2], tensor->ne[3]); } else if (tensor->op == GGML_OP_VIEW) { - tensor_clone = ggml_view_4d(ctx, src0_clone, tensor->ne[0], tensor->ne[1], tensor->ne[2], tensor->ne[3], tensor->nb[1], tensor->nb[2], tensor->nb[3], ((int32_t *) tensor->op_params)[0]); + tensor_clone = ggml_view_4d(ggml_ctx, src0_clone, tensor->ne[0], tensor->ne[1], tensor->ne[2], tensor->ne[3], tensor->nb[1], tensor->nb[2], tensor->nb[3], ((int32_t *) tensor->op_params)[0]); } else if (tensor->op == GGML_OP_PERMUTE) { int32_t * params = (int32_t *)tensor->op_params; - tensor_clone = ggml_permute(ctx, src0_clone, params[0], params[1], params[2], params[3]); + tensor_clone = ggml_permute(ggml_ctx, src0_clone, params[0], params[1], params[2], params[3]); } else if (tensor->op == GGML_OP_TRANSPOSE) { - tensor_clone = ggml_transpose(ctx, src0_clone); + tensor_clone = ggml_transpose(ggml_ctx, src0_clone); } else { std::cerr << "Missing vk_check_results OP: " << ggml_op_name(tensor->op) << std::endl; GGML_ASSERT(false); } // Disable vulkan here to avoid the hooks in ggml.c - vk_disable = true; + ctx->disable = true; - ggml_cgraph * cgraph = ggml_new_graph(ctx); + ggml_cgraph * cgraph = ggml_new_graph(ggml_ctx); ggml_build_forward_expand(cgraph, tensor_clone); - ggml_graph_compute_with_ctx(ctx, cgraph, 8); + ggml_graph_compute_with_ctx(ggml_ctx, cgraph, 8); - vk_disable = false; + ctx->disable = false; ggml_vk_check_tensor(ggml_op_name(tensor->op), tensor_clone); if (vk_output_tensor > 0 && vk_output_tensor == check_counter) { - ggml_vk_print_tensor(tensor_clone, "tensor_clone"); + ggml_vk_print_tensor(ctx, tensor_clone, "tensor_clone"); } comp_size = ggml_nbytes(tensor_clone); @@ -5183,10 +5560,10 @@ static void ggml_vk_check_results_0(ggml_compute_params * params, ggml_tensor * free(src1_buffer); } - ggml_free(ctx); + ggml_free(ggml_ctx); } -void ggml_vk_check_results_1(ggml_compute_params * params, ggml_tensor * tensor) { +static void ggml_vk_check_results_1(ggml_backend_vk_context * ctx, ggml_compute_params * params, ggml_tensor * tensor) { if (params->ith != 0) { return; } @@ -5208,11 +5585,11 @@ void ggml_vk_check_results_1(ggml_compute_params * params, ggml_tensor * tensor) ggml_tensor_extra_gpu * extra = (ggml_tensor_extra_gpu *) tensor->extra; - if (extra->offset + tensor_size >= extra->buffer_gpu.size) { - tensor_size = extra->buffer_gpu.size - (extra->offset); + if (extra->offset + tensor_size >= extra->buffer_gpu->size) { + tensor_size = extra->buffer_gpu->size - (extra->offset); } - ggml_vk_buffer_read(&extra->buffer_gpu, extra->offset, tensor_data, tensor_size); + ggml_vk_buffer_read(ctx, extra->buffer_gpu, extra->offset, tensor_data, tensor_size); } float first_error_result = -1.0f; @@ -5339,4 +5716,10 @@ void ggml_vk_check_results_1(ggml_compute_params * params, ggml_tensor * tensor) free(tensor_data); } } + +void ggml_vk_check_results_1_cpu_assist(struct ggml_compute_params * params, struct ggml_tensor * tensor) { + ggml_backend_vk_context * ctx = &vk_instance.contexts[0]; + + ggml_vk_check_results_0(ctx, params, tensor); +} #endif diff --git a/ggml-vulkan.h b/ggml-vulkan.h index eb8a148e2..9645126b4 100644 --- a/ggml-vulkan.h +++ b/ggml-vulkan.h @@ -8,24 +8,29 @@ extern "C" { #endif #define GGML_VK_NAME "Vulkan" +#define GGML_VK_MAX_DEVICES 16 -GGML_API void ggml_vk_init(void); +GGML_API void ggml_vk_init_cpu_assist(void); -GGML_API void ggml_vk_preallocate_buffers_graph(struct ggml_tensor * node); -GGML_API void ggml_vk_preallocate_buffers(void); -GGML_API void ggml_vk_build_graph(struct ggml_tensor * node, bool last_node); -GGML_API bool ggml_vk_compute_forward(struct ggml_compute_params * params, struct ggml_tensor * tensor); +GGML_API void ggml_vk_preallocate_buffers_graph_cpu_assist(struct ggml_tensor * node); +GGML_API void ggml_vk_preallocate_buffers_cpu_assist(void); +GGML_API void ggml_vk_build_graph_cpu_assist(struct ggml_tensor * node, bool last_node); +GGML_API bool ggml_vk_compute_forward_cpu_assist(struct ggml_compute_params * params, struct ggml_tensor * tensor); #ifdef GGML_VULKAN_CHECK_RESULTS -void ggml_vk_check_results_1(struct ggml_compute_params * params, struct ggml_tensor * tensor); +void ggml_vk_check_results_1_cpu_assist(struct ggml_compute_params * params, struct ggml_tensor * tensor); #endif -GGML_API void ggml_vk_graph_cleanup(void); +GGML_API void ggml_vk_graph_cleanup_cpu_assist(void); +GGML_API void ggml_vk_free_cpu_assist(void); // backend API -GGML_API GGML_CALL ggml_backend_t ggml_backend_vk_init(void); +GGML_API GGML_CALL ggml_backend_t ggml_backend_vk_init(size_t dev_num); GGML_API GGML_CALL bool ggml_backend_is_vk(ggml_backend_t backend); +GGML_API GGML_CALL int ggml_backend_vk_get_device_count(void); +GGML_API GGML_CALL void ggml_backend_vk_get_device_description(int device, char * description, size_t description_size); +GGML_API GGML_CALL void ggml_backend_vk_get_device_memory(int device, size_t * free, size_t * total); -GGML_API GGML_CALL ggml_backend_buffer_type_t ggml_backend_vk_buffer_type(void); +GGML_API GGML_CALL ggml_backend_buffer_type_t ggml_backend_vk_buffer_type(size_t dev_num); // pinned host buffer for use with the CPU backend for faster copies between CPU and GPU GGML_API GGML_CALL ggml_backend_buffer_type_t ggml_backend_vk_host_buffer_type(void); diff --git a/ggml.c b/ggml.c index b9ec0c981..f783a6fd3 100644 --- a/ggml.c +++ b/ggml.c @@ -2343,7 +2343,7 @@ struct ggml_context * ggml_init(struct ggml_init_params params) { #elif defined(GGML_USE_CLBLAST) ggml_cl_init(); #elif defined(GGML_USE_VULKAN) - ggml_vk_init(); + ggml_vk_init_cpu_assist(); #elif defined(GGML_USE_SYCL) ggml_init_sycl(); #endif @@ -14850,10 +14850,10 @@ static void ggml_compute_forward(struct ggml_compute_params * params, struct ggm GGML_ASSERT(tensor->src[0] == NULL || tensor->src[0]->backend == GGML_BACKEND_CPU); GGML_ASSERT(tensor->src[1] == NULL || tensor->src[1]->backend == GGML_BACKEND_CPU); #elif defined(GGML_USE_VULKAN) - const bool skip_cpu = ggml_vk_compute_forward(params, tensor); + const bool skip_cpu = ggml_vk_compute_forward_cpu_assist(params, tensor); #ifdef GGML_VULKAN_CHECK_RESULTS if (skip_cpu) { - ggml_vk_check_results_1(params, tensor); + ggml_vk_check_results_1_cpu_assist(params, tensor); } #endif if (skip_cpu) { @@ -17269,12 +17269,12 @@ int ggml_graph_compute(struct ggml_cgraph * cgraph, struct ggml_cplan * cplan) { #ifdef GGML_USE_VULKAN for (int i = 0; i < cgraph->n_nodes; i++) { - ggml_vk_preallocate_buffers_graph(cgraph->nodes[i]); + ggml_vk_preallocate_buffers_graph_cpu_assist(cgraph->nodes[i]); } - ggml_vk_preallocate_buffers(); + ggml_vk_preallocate_buffers_cpu_assist(); for (int i = 0; i < cgraph->n_nodes; i++) { - ggml_vk_build_graph(cgraph->nodes[i], i == cgraph->n_nodes - 1); + ggml_vk_build_graph_cpu_assist(cgraph->nodes[i], i == cgraph->n_nodes - 1); } #endif @@ -17330,7 +17330,7 @@ int ggml_graph_compute(struct ggml_cgraph * cgraph, struct ggml_cplan * cplan) { } #ifdef GGML_USE_VULKAN - ggml_vk_graph_cleanup(); + ggml_vk_graph_cleanup_cpu_assist(); #endif // performance stats (graph) diff --git a/llama.cpp b/llama.cpp index f3c5146d1..c45ae1d50 100644 --- a/llama.cpp +++ b/llama.cpp @@ -1355,7 +1355,7 @@ static ggml_backend_buffer_type_t llama_default_buffer_type_offload(int gpu) { #elif defined(GGML_USE_CUBLAS) buft = ggml_backend_cuda_buffer_type(gpu); #elif defined(GGML_USE_VULKAN) - buft = ggml_backend_vk_buffer_type(); + buft = ggml_backend_vk_buffer_type(gpu); #elif defined(GGML_USE_SYCL) buft = ggml_backend_sycl_buffer_type(gpu); #elif defined(GGML_USE_CLBLAST) @@ -1392,6 +1392,33 @@ static ggml_backend_buffer_type_t llama_default_buffer_type_split(int fallback_g GGML_UNUSED(tensor_split); } +static size_t llama_get_device_count() { +#if defined(GGML_USE_CUBLAS) + return ggml_backend_cuda_get_device_count(); +#elif defined(GGML_USE_VULKAN) + return ggml_backend_vk_get_device_count(); +#else + return 1; +#endif +} + +static size_t llama_get_device_memory(int device) { +#if defined(GGML_USE_CUBLAS) + size_t total; + size_t free; + ggml_backend_cuda_get_device_memory(device, &total, &free); + return free; +#elif defined(GGML_USE_VULKAN) + size_t total; + size_t free; + ggml_backend_vk_get_device_memory(device, &total, &free); + return free; +#else + return 1; + GGML_UNUSED(device); +#endif +} + // // globals // @@ -1763,6 +1790,10 @@ struct llama_context { ggml_backend_free(backend); } +#ifdef GGML_USE_VULKAN + ggml_vk_free_cpu_assist(); +#endif + ggml_backend_buffer_free(buf_input); ggml_free(ctx_input); } @@ -3436,22 +3467,18 @@ static bool llm_load_tensors( model.buft_layer[i] = llama_default_buffer_type_cpu(true); } -#ifdef GGML_USE_CUBLAS if (split_mode == LLAMA_SPLIT_LAYER) { // calculate the split points - int device_count = ggml_backend_cuda_get_device_count(); + int device_count = llama_get_device_count(); bool all_zero = tensor_split == nullptr || std::all_of(tensor_split, tensor_split + device_count, [](float x) { return x == 0.0f; }); - float splits[GGML_CUDA_MAX_DEVICES]; + std::vector splits(device_count); if (all_zero) { // default split, by free memory for (int i = 0; i < device_count; ++i) { - size_t total; - size_t free; - ggml_backend_cuda_get_device_memory(i, &total, &free); - splits[i] = free; + splits[i] = llama_get_device_memory(i); } } else { - std::copy(tensor_split, tensor_split + device_count, splits); + std::copy(tensor_split, tensor_split + device_count, splits.begin()); } // sum and normalize the splits to get the split points @@ -3467,19 +3494,17 @@ static bool llm_load_tensors( // assign the repeating layers to the devices according to the splits int act_gpu_layers = std::min(n_gpu_layers, (int)n_layer + 1); for (int64_t i = i_gpu_start; i < n_layer; ++i) { - int layer_gpu = std::upper_bound(splits, splits + device_count, float(i - i_gpu_start)/act_gpu_layers) - splits; + int layer_gpu = std::upper_bound(splits.begin(), splits.begin() + device_count, float(i - i_gpu_start)/act_gpu_layers) - splits.begin(); model.buft_layer[i] = llama_default_buffer_type_offload(layer_gpu); } // assign the output layer if (n_gpu_layers > n_layer) { - int layer_gpu = std::upper_bound(splits, splits + device_count, float(act_gpu_layers - 1)/act_gpu_layers) - splits; + int layer_gpu = std::upper_bound(splits.begin(), splits.begin() + device_count, float(act_gpu_layers - 1)/act_gpu_layers) - splits.begin(); model.buft_output = llama_default_buffer_type_offload(layer_gpu); } else { model.buft_output = llama_default_buffer_type_cpu(true); } - } else -#endif - { + } else { ggml_backend_buffer_type_t split_buft; if (split_mode == LLAMA_SPLIT_ROW) { split_buft = llama_default_buffer_type_split(main_gpu, tensor_split); @@ -10483,6 +10508,8 @@ size_t llama_max_devices(void) { return GGML_CUDA_MAX_DEVICES; #elif defined(GGML_USE_SYCL) return GGML_SYCL_MAX_DEVICES; +#elif defined(GGML_USE_VULKAN) + return GGML_VK_MAX_DEVICES; #else return 1; #endif @@ -10690,13 +10717,15 @@ struct llama_context * llama_new_context_with_model( } #elif defined(GGML_USE_VULKAN) if (model->n_gpu_layers > 0) { - ggml_backend_t backend = ggml_backend_vk_init(); - if (backend == nullptr) { - LLAMA_LOG_ERROR("%s: failed to initialize Vulkan backend\n", __func__); - llama_free(ctx); - return nullptr; + for (int device = 0; device < ggml_backend_vk_get_device_count(); ++device) { + ggml_backend_t backend = ggml_backend_vk_init(device); + if (backend == nullptr) { + LLAMA_LOG_ERROR("%s: failed to initialize Vulkan%d backend\n", __func__, device); + llama_free(ctx); + return nullptr; + } + ctx->backends.push_back(backend); } - ctx->backends.push_back(backend); } #elif defined(GGML_USE_SYCL) if (model->n_gpu_layers > 0) { From 0ef46da632c32faa1a538e5dc180994e8bbb46e1 Mon Sep 17 00:00:00 2001 From: Xiao-Yong Jin Date: Wed, 7 Feb 2024 02:17:25 -0600 Subject: [PATCH 536/811] llava-cli : always tokenize special tokens (#5382) * llava-cli: tokenize special tokens in prompt * llava-cli: use the escape CLI argument, remove incomplete separate escaping process --- examples/llava/llava-cli.cpp | 14 +------------- 1 file changed, 1 insertion(+), 13 deletions(-) diff --git a/examples/llava/llava-cli.cpp b/examples/llava/llava-cli.cpp index 6ac70ba69..031e9806d 100644 --- a/examples/llava/llava-cli.cpp +++ b/examples/llava/llava-cli.cpp @@ -34,7 +34,7 @@ static bool eval_id(struct llama_context * ctx_llama, int id, int * n_past) { static bool eval_string(struct llama_context * ctx_llama, const char* str, int n_batch, int * n_past, bool add_bos){ std::string str2 = str; - std::vector embd_inp = ::llama_tokenize(ctx_llama, str2, add_bos); + std::vector embd_inp = ::llama_tokenize(ctx_llama, str2, add_bos, true); eval_tokens(ctx_llama, embd_inp, n_batch, n_past); return true; } @@ -152,20 +152,8 @@ static void process_prompt(struct llava_context * ctx_llava, struct llava_image_ size_t image_pos = prompt.find(""); if (image_pos != std::string::npos) { // new templating mode: Provide the full prompt including system message and use as a placeholder for the image - system_prompt = prompt.substr(0, image_pos); user_prompt = prompt.substr(image_pos + std::string("").length()); - // We replace \n with actual newlines in user_prompt, just in case -e was not used in templating string - size_t pos = 0; - while ((pos = user_prompt.find("\\n", pos)) != std::string::npos) { - user_prompt.replace(pos, 2, "\n"); - pos += 1; // Advance past the replaced newline - } - while ((pos = system_prompt.find("\\n", pos)) != std::string::npos) { - system_prompt.replace(pos, 2, "\n"); - pos += 1; // Advance past the replaced newline - } - printf("system_prompt: %s\n", system_prompt.c_str()); printf("user_prompt: %s\n", user_prompt.c_str()); } else { From 10afa6f1d11ebc9fcc1085f468170002cbf6e2b5 Mon Sep 17 00:00:00 2001 From: Neo Zhang Jianyu Date: Wed, 7 Feb 2024 18:16:55 +0800 Subject: [PATCH 537/811] [SYCL] update install make by w64devkit (#5297) --- README-sycl.md | 10 ++++------ 1 file changed, 4 insertions(+), 6 deletions(-) diff --git a/README-sycl.md b/README-sycl.md index 7aa4274a9..e3a8e726e 100644 --- a/README-sycl.md +++ b/README-sycl.md @@ -311,15 +311,13 @@ Output (example): a. Download & install cmake for Windows: https://cmake.org/download/ -b. Download & install make for Windows provided by mingw-w64 +b. Download & install mingw-w64 make for Windows provided by w64devkit -- Download binary package for Windows in https://github.com/niXman/mingw-builds-binaries/releases. +- Download the latest fortran version of [w64devkit](https://github.com/skeeto/w64devkit/releases). - Like [x86_64-13.2.0-release-win32-seh-msvcrt-rt_v11-rev1.7z](https://github.com/niXman/mingw-builds-binaries/releases/download/13.2.0-rt_v11-rev1/x86_64-13.2.0-release-win32-seh-msvcrt-rt_v11-rev1.7z). +- Extract `w64devkit` on your pc. -- Unzip the binary package. In the **bin** sub-folder and rename **xxx-make.exe** to **make.exe**. - -- Add the **bin** folder path in the Windows system PATH environment. +- Add the **bin** folder path in the Windows system PATH environment, like `C:\xxx\w64devkit\bin\`. ### Build locally: From aa7ab99be29b633263803f2e185265734c2d9427 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Johannes=20G=C3=A4=C3=9Fler?= Date: Wed, 7 Feb 2024 12:40:26 +0100 Subject: [PATCH 538/811] CUDA: fixed mmvq kernel for bs 2,3,4 and -sm row (#5386) --- ggml-cuda.cu | 66 +++++++++++++++++++++++++++++++--------------------- 1 file changed, 39 insertions(+), 27 deletions(-) diff --git a/ggml-cuda.cu b/ggml-cuda.cu index 3b828375e..db9da2459 100644 --- a/ggml-cuda.cu +++ b/ggml-cuda.cu @@ -5313,7 +5313,7 @@ template static __global__ void template static __global__ void mul_mat_vec_q( const void * __restrict__ vx, const void * __restrict__ vy, float * __restrict__ dst, - const int ncols_x, const int nrows_x, const int nrows_y, const int ncols_y_par) { + const int ncols_x, const int nrows_x, const int nrows_y, const int ncols_y_par, const int nrows_dst) { const int ncols_y = ncols_y_template != 0 ? ncols_y_template : ncols_y_par; @@ -5352,7 +5352,7 @@ static __global__ void mul_mat_vec_q( tmp[j] = warp_reduce_sum(tmp[j]); if (threadIdx.x == 0) { - dst[j*nrows_x + row] = tmp[j]; + dst[j*nrows_dst + row] = tmp[j]; } } } @@ -6828,7 +6828,7 @@ static void convert_mul_mat_vec_f16_cuda(const void * vx, const dfloat * y, floa template static void mul_mat_vec_q_cuda( const void * vx, const void * vy, float * dst, - const int ncols_x, const int nrows_x, const int nrows_y, const int ncols_y, cudaStream_t stream) { + const int ncols_x, const int nrows_x, const int nrows_y, const int ncols_y, const int nrows_dst, cudaStream_t stream) { GGML_ASSERT(ncols_x % qk == 0); GGML_ASSERT(ncols_y <= 4); @@ -6839,40 +6839,40 @@ static void mul_mat_vec_q_cuda( switch (ncols_y) { case 1: mul_mat_vec_q<1, qk, qi, block_q_t, vdr, vec_dot> - <<>>(vx, vy, dst, ncols_x, nrows_x, nrows_y, ncols_y); + <<>>(vx, vy, dst, ncols_x, nrows_x, nrows_y, ncols_y, nrows_dst); break; case 2: mul_mat_vec_q<2, qk, qi, block_q_t, vdr, vec_dot> - <<>>(vx, vy, dst, ncols_x, nrows_x, nrows_y, ncols_y); + <<>>(vx, vy, dst, ncols_x, nrows_x, nrows_y, ncols_y, nrows_dst); break; case 3: mul_mat_vec_q<3, qk, qi, block_q_t, vdr, vec_dot> - <<>>(vx, vy, dst, ncols_x, nrows_x, nrows_y, ncols_y); + <<>>(vx, vy, dst, ncols_x, nrows_x, nrows_y, ncols_y, nrows_dst); break; case 4: mul_mat_vec_q<4, qk, qi, block_q_t, vdr, vec_dot> - <<>>(vx, vy, dst, ncols_x, nrows_x, nrows_y, ncols_y); + <<>>(vx, vy, dst, ncols_x, nrows_x, nrows_y, ncols_y, nrows_dst); break; // case 5: // mul_mat_vec_q<5, qk, qi, block_q_t, vdr, vec_dot> - // <<>>(vx, vy, dst, ncols_x, nrows_x, nrows_y, ncols_y); + // <<>>(vx, vy, dst, ncols_x, nrows_x, nrows_y, ncols_y, nrows_dst); // break; // case 6: // mul_mat_vec_q<6, qk, qi, block_q_t, vdr, vec_dot> - // <<>>(vx, vy, dst, ncols_x, nrows_x, nrows_y, ncols_y); + // <<>>(vx, vy, dst, ncols_x, nrows_x, nrows_y, ncols_y, nrows_dst); // break; // case 7: // mul_mat_vec_q<7, qk, qi, block_q_t, vdr, vec_dot> - // <<>>(vx, vy, dst, ncols_x, nrows_x, nrows_y, ncols_y); + // <<>>(vx, vy, dst, ncols_x, nrows_x, nrows_y, ncols_y, nrows_dst); // break; // case 8: // mul_mat_vec_q<8, qk, qi, block_q_t, vdr, vec_dot> - // <<>>(vx, vy, dst, ncols_x, nrows_x, nrows_y, ncols_y); + // <<>>(vx, vy, dst, ncols_x, nrows_x, nrows_y, ncols_y, nrows_dst); // break; default: GGML_ASSERT(false); // mul_mat_vec_q<0, qk, qi, block_q_t, vdr, vec_dot> - // <<>>(vx, vy, dst, ncols_x, nrows_x, nrows_y, ncols_y); + // <<>>(vx, vy, dst, ncols_x, nrows_x, nrows_y, ncols_y, nrows_dst); break; } } @@ -8391,7 +8391,7 @@ static void ggml_cuda_op_mul_mat_q( CUDA_CHECK(cudaGetDevice(&id)); // the main device has a larger memory buffer to hold the results from all GPUs - // nrows_dst == nrows of the matrix that the dequantize_mul_mat kernel writes into + // nrows_dst == nrows of the matrix that the kernel writes into const int64_t nrows_dst = dst->backend == GGML_BACKEND_GPU && id == g_main_device ? ne0 : row_diff; switch (src0->type) { @@ -8525,58 +8525,70 @@ static void ggml_cuda_op_mul_mat_vec_q( const int64_t ne00 = src0->ne[0]; const int64_t row_diff = row_high - row_low; + const int64_t ne10 = src1->ne[0]; + GGML_ASSERT(ne10 % QK8_1 == 0); + + const int64_t ne0 = dst->ne[0]; + + int id; + CUDA_CHECK(cudaGetDevice(&id)); + + // the main device has a larger memory buffer to hold the results from all GPUs + // nrows_dst == nrows of the matrix that the kernel writes into + const int64_t nrows_dst = dst->backend == GGML_BACKEND_GPU && id == g_main_device ? ne0 : row_diff; + switch (src0->type) { case GGML_TYPE_Q4_0: mul_mat_vec_q_cuda - (src0_dd_i, src1_ddq_i, dst_dd_i, ne00, row_diff, src1_padded_row_size, src1_ncols, stream); + (src0_dd_i, src1_ddq_i, dst_dd_i, ne00, row_diff, src1_padded_row_size, src1_ncols, nrows_dst, stream); break; case GGML_TYPE_Q4_1: mul_mat_vec_q_cuda - (src0_dd_i, src1_ddq_i, dst_dd_i, ne00, row_diff, src1_padded_row_size, src1_ncols, stream); + (src0_dd_i, src1_ddq_i, dst_dd_i, ne00, row_diff, src1_padded_row_size, src1_ncols, nrows_dst, stream); break; case GGML_TYPE_Q5_0: mul_mat_vec_q_cuda - (src0_dd_i, src1_ddq_i, dst_dd_i, ne00, row_diff, src1_padded_row_size, src1_ncols, stream); + (src0_dd_i, src1_ddq_i, dst_dd_i, ne00, row_diff, src1_padded_row_size, src1_ncols, nrows_dst, stream); break; case GGML_TYPE_Q5_1: mul_mat_vec_q_cuda - (src0_dd_i, src1_ddq_i, dst_dd_i, ne00, row_diff, src1_padded_row_size, src1_ncols, stream); + (src0_dd_i, src1_ddq_i, dst_dd_i, ne00, row_diff, src1_padded_row_size, src1_ncols, nrows_dst, stream); break; case GGML_TYPE_Q8_0: mul_mat_vec_q_cuda - (src0_dd_i, src1_ddq_i, dst_dd_i, ne00, row_diff, src1_padded_row_size, src1_ncols, stream); + (src0_dd_i, src1_ddq_i, dst_dd_i, ne00, row_diff, src1_padded_row_size, src1_ncols, nrows_dst, stream); break; case GGML_TYPE_Q2_K: mul_mat_vec_q_cuda - (src0_dd_i, src1_ddq_i, dst_dd_i, ne00, row_diff, src1_padded_row_size, src1_ncols, stream); + (src0_dd_i, src1_ddq_i, dst_dd_i, ne00, row_diff, src1_padded_row_size, src1_ncols, nrows_dst, stream); break; case GGML_TYPE_Q3_K: mul_mat_vec_q_cuda - (src0_dd_i, src1_ddq_i, dst_dd_i, ne00, row_diff, src1_padded_row_size, src1_ncols, stream); + (src0_dd_i, src1_ddq_i, dst_dd_i, ne00, row_diff, src1_padded_row_size, src1_ncols, nrows_dst, stream); break; case GGML_TYPE_Q4_K: mul_mat_vec_q_cuda - (src0_dd_i, src1_ddq_i, dst_dd_i, ne00, row_diff, src1_padded_row_size, src1_ncols, stream); + (src0_dd_i, src1_ddq_i, dst_dd_i, ne00, row_diff, src1_padded_row_size, src1_ncols, nrows_dst, stream); break; case GGML_TYPE_Q5_K: mul_mat_vec_q_cuda - (src0_dd_i, src1_ddq_i, dst_dd_i, ne00, row_diff, src1_padded_row_size, src1_ncols, stream); + (src0_dd_i, src1_ddq_i, dst_dd_i, ne00, row_diff, src1_padded_row_size, src1_ncols, nrows_dst, stream); break; case GGML_TYPE_Q6_K: mul_mat_vec_q_cuda - (src0_dd_i, src1_ddq_i, dst_dd_i, ne00, row_diff, src1_padded_row_size, src1_ncols, stream); + (src0_dd_i, src1_ddq_i, dst_dd_i, ne00, row_diff, src1_padded_row_size, src1_ncols, nrows_dst, stream); break; case GGML_TYPE_IQ2_XXS: mul_mat_vec_q_cuda - (src0_dd_i, src1_ddq_i, dst_dd_i, ne00, row_diff, src1_padded_row_size, src1_ncols, stream); + (src0_dd_i, src1_ddq_i, dst_dd_i, ne00, row_diff, src1_padded_row_size, src1_ncols, nrows_dst, stream); break; case GGML_TYPE_IQ2_XS: mul_mat_vec_q_cuda - (src0_dd_i, src1_ddq_i, dst_dd_i, ne00, row_diff, src1_padded_row_size, src1_ncols, stream); + (src0_dd_i, src1_ddq_i, dst_dd_i, ne00, row_diff, src1_padded_row_size, src1_ncols, nrows_dst, stream); break; case GGML_TYPE_IQ3_XXS: mul_mat_vec_q_cuda - (src0_dd_i, src1_ddq_i, dst_dd_i, ne00, row_diff, src1_padded_row_size, src1_ncols, stream); + (src0_dd_i, src1_ddq_i, dst_dd_i, ne00, row_diff, src1_padded_row_size, src1_ncols, nrows_dst, stream); break; default: GGML_ASSERT(false); @@ -9909,7 +9921,7 @@ static void ggml_cuda_mul_mat(const ggml_tensor * src0, const ggml_tensor * src1 ggml_cuda_op_mul_mat(src0, src1, dst, ggml_cuda_op_dequantize_mul_mat_vec, false); } } else { - if (src1->ne[1] <= 4 && min_compute_capability >= MIN_CC_DP4A && ggml_is_quantized(src0->type)) { + if (src1->ne[1] <= 4 && min_compute_capability >= MIN_CC_DP4A && ggml_is_quantized(src0->type) && src1->type == GGML_TYPE_F32) { ggml_cuda_op_mul_mat(src0, src1, dst, ggml_cuda_op_mul_mat_vec_q, true); } else if (use_mul_mat_q) { ggml_cuda_op_mul_mat(src0, src1, dst, ggml_cuda_op_mul_mat_q, true); From b906596bb775b17656c2e51d5ab1b347faab6860 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Kamil=20Tom=C5=A1=C3=ADk?= Date: Wed, 7 Feb 2024 19:44:52 +0100 Subject: [PATCH 539/811] Add Ava in the list of llama.cpp UIs (#4362) --- README.md | 1 + 1 file changed, 1 insertion(+) diff --git a/README.md b/README.md index 0509b0ba1..7e1187349 100644 --- a/README.md +++ b/README.md @@ -150,6 +150,7 @@ Unless otherwise noted these projects are open-source with permissive licensing: - [ollama/ollama](https://github.com/ollama/ollama) - [oobabooga/text-generation-webui](https://github.com/oobabooga/text-generation-webui) (AGPL) - [psugihara/FreeChat](https://github.com/psugihara/FreeChat) +- [cztomsik/ava](https://github.com/cztomsik/ava) (MIT) - [ptsochantaris/emeltal](https://github.com/ptsochantaris/emeltal) - [pythops/tenere](https://github.com/pythops/tenere) (AGPL) - [semperai/amica](https://github.com/semperai/amica) From 8c933b70c21e05b685d476d0a1f36b34cbda7365 Mon Sep 17 00:00:00 2001 From: Ebey Abraham Date: Wed, 7 Feb 2024 21:11:30 +0000 Subject: [PATCH 540/811] fix typo in readme (#5399) Co-authored-by: Ebey Abraham --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index 7e1187349..66166c01b 100644 --- a/README.md +++ b/README.md @@ -680,7 +680,7 @@ python3 -m pip install -r requirements.txt python3 convert.py models/mymodel/ # [Optional] for models using BPE tokenizers -python convert.py models/mymodel/ --vocabtype bpe +python convert.py models/mymodel/ --vocab-type bpe # quantize the model to 4-bits (using Q4_K_M method) ./quantize ./models/mymodel/ggml-model-f16.gguf ./models/mymodel/ggml-model-Q4_K_M.gguf Q4_K_M From c4fbb6717c684196bd13b72d21747557130914e8 Mon Sep 17 00:00:00 2001 From: Michael Podvitskiy Date: Wed, 7 Feb 2024 22:39:23 +0100 Subject: [PATCH 541/811] CMAKE_OSX_ARCHITECTURES for MacOS cross compilation (#5393) Co-authored-by: Jared Van Bortel --- CMakeLists.txt | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/CMakeLists.txt b/CMakeLists.txt index 427015be5..a544f2da6 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -850,7 +850,9 @@ endif() set(ARCH_FLAGS "") -if ((${CMAKE_SYSTEM_PROCESSOR} MATCHES "arm") OR (${CMAKE_SYSTEM_PROCESSOR} MATCHES "aarch64") OR ("${CMAKE_GENERATOR_PLATFORM_LWR}" MATCHES "arm64")) +if (CMAKE_OSX_ARCHITECTURES STREQUAL "arm64" OR CMAKE_GENERATOR_PLATFORM_LWR STREQUAL "arm64" OR + (NOT CMAKE_OSX_ARCHITECTURES AND NOT CMAKE_GENERATOR_PLATFORM_LWR AND + CMAKE_SYSTEM_PROCESSOR MATCHES "^(aarch64|arm.*|ARM64)$")) message(STATUS "ARM detected") if (MSVC) add_compile_definitions(__ARM_NEON) @@ -876,7 +878,9 @@ if ((${CMAKE_SYSTEM_PROCESSOR} MATCHES "arm") OR (${CMAKE_SYSTEM_PROCESSOR} MATC list(APPEND ARCH_FLAGS -mno-unaligned-access) endif() endif() -elseif (${CMAKE_SYSTEM_PROCESSOR} MATCHES "^(x86_64|i686|AMD64)$" OR "${CMAKE_GENERATOR_PLATFORM_LWR}" MATCHES "^(x86_64|i686|amd64|x64)$" ) +elseif (CMAKE_OSX_ARCHITECTURES STREQUAL "x86_64" OR CMAKE_GENERATOR_PLATFORM_LWR MATCHES "^(x86_64|i686|amd64|x64|win32)$" OR + (NOT CMAKE_OSX_ARCHITECTURES AND NOT CMAKE_GENERATOR_PLATFORM_LWR AND + CMAKE_SYSTEM_PROCESSOR MATCHES "^(x86_64|i686|AMD64)$")) message(STATUS "x86 detected") if (MSVC) # instruction set detection for MSVC only From 8504d2d0da8cc7a1f2eee0e9e56949f960510b75 Mon Sep 17 00:00:00 2001 From: Georgi Gerganov Date: Thu, 8 Feb 2024 09:46:47 +0200 Subject: [PATCH 542/811] tests : .gitignore obj files --- tests/.gitignore | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/.gitignore b/tests/.gitignore index 092dce742..9427cf13d 100644 --- a/tests/.gitignore +++ b/tests/.gitignore @@ -1,3 +1,3 @@ * !*.* -test-c.o +*.o From 26d4efd11e48908e14e2ee9471a7fc4c57079a1d Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Johannes=20G=C3=A4=C3=9Fler?= Date: Thu, 8 Feb 2024 09:46:30 +0100 Subject: [PATCH 543/811] sampling: fix top_k <= 0 (#5388) * sampling: fix top_k <= 0 * Update llama.cpp Co-authored-by: Georgi Gerganov --------- Co-authored-by: Georgi Gerganov --- common/sampling.cpp | 2 +- llama.cpp | 4 ++++ tests/test-sampling.cpp | 2 ++ 3 files changed, 7 insertions(+), 1 deletion(-) diff --git a/common/sampling.cpp b/common/sampling.cpp index e8675a8c0..844ad7c53 100644 --- a/common/sampling.cpp +++ b/common/sampling.cpp @@ -132,7 +132,7 @@ static void sampler_queue( const float temp = params.temp; const float dynatemp_range = params.dynatemp_range; const float dynatemp_exponent = params.dynatemp_exponent; - const int32_t top_k = params.top_k <= 0 ? n_vocab : params.top_k; + const int32_t top_k = params.top_k; const float top_p = params.top_p; const float min_p = params.min_p; const float tfs_z = params.tfs_z; diff --git a/llama.cpp b/llama.cpp index c45ae1d50..f8f5796a4 100644 --- a/llama.cpp +++ b/llama.cpp @@ -8585,6 +8585,10 @@ void llama_sample_top_k(struct llama_context * ctx, llama_token_data_array * can // } const int64_t t_start_sample_us = ggml_time_us(); + + if (k <= 0) { + k = candidates->size; + } k = std::max(k, (int) min_keep); k = std::min(k, (int) candidates->size); diff --git a/tests/test-sampling.cpp b/tests/test-sampling.cpp index c3b3d6629..6374958fe 100644 --- a/tests/test-sampling.cpp +++ b/tests/test-sampling.cpp @@ -235,6 +235,8 @@ int main(void) { test_top_k({0.1f, 0.2f, 0.3f, 0.4f}, {0.4f}, 1); test_top_k({0.1f, 0.2f, 0.3f, 0.4f}, {0.4f, 0.3f, 0.2f}, 3); + test_top_k({0.1f, 0.2f, 0.3f, 0.4f}, {0.4f, 0.3f, 0.2f, 0.1f}, 4); + test_top_k({0.1f, 0.2f, 0.3f, 0.4f}, {0.4f, 0.3f, 0.2f, 0.1f}, 0); test_top_p({0.1f, 0.2f, 0.3f, 0.4f}, {0.4f}, 0); test_top_p({0.1f, 0.2f, 0.3f, 0.4f}, {0.4f, 0.3f}, 0.7f); From a6e514a85f0fda38ff78ec91782877ea3d19ed98 Mon Sep 17 00:00:00 2001 From: Daniel Bevenius Date: Thu, 8 Feb 2024 09:58:19 +0100 Subject: [PATCH 544/811] llava: fix typo/formatting in README.md (#5405) This commit fixes a typo in the README.md file for the llava example which is causing the formatting to look a little off: Clone llava-v15-7b`` and clip-vit-large-patch14-336`` locally Signed-off-by: Daniel Bevenius --- examples/llava/README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/examples/llava/README.md b/examples/llava/README.md index 323c5fdd0..295181a34 100644 --- a/examples/llava/README.md +++ b/examples/llava/README.md @@ -21,7 +21,7 @@ After building, run: `./llava-cli` to see the usage. For example: ## Model conversion -- Clone `llava-v15-7b`` and `clip-vit-large-patch14-336`` locally: +- Clone `llava-v15-7b` and `clip-vit-large-patch14-336` locally: ```sh git clone https://huggingface.co/liuhaotian/llava-v1.5-7b From 4aa43fab569215a13495a7f1a0f8afc541b16d03 Mon Sep 17 00:00:00 2001 From: runfuture Date: Thu, 8 Feb 2024 18:36:19 +0800 Subject: [PATCH 545/811] llama : fix MiniCPM (#5392) * fix bug for norm_rms_eps missing * to align with the same order as convert.py for model write * fix: undo HF models permute tensor * update for flake8 lint --- convert-hf-to-gguf.py | 63 +++++++++++++++++++++++++++++++++++++++++-- llama.cpp | 2 ++ 2 files changed, 63 insertions(+), 2 deletions(-) diff --git a/convert-hf-to-gguf.py b/convert-hf-to-gguf.py index 829d68368..0d4ea03b4 100755 --- a/convert-hf-to-gguf.py +++ b/convert-hf-to-gguf.py @@ -1078,17 +1078,76 @@ class MiniCPMModel(Model): self.gguf_writer.add_name("MiniCPM") self.gguf_writer.add_context_length(self.hparams["max_position_embeddings"]) self.gguf_writer.add_embedding_length(self.hparams["hidden_size"]) - self.gguf_writer.add_feed_forward_length(self.hparams["intermediate_size"]) self.gguf_writer.add_block_count(block_count) + self.gguf_writer.add_feed_forward_length(self.hparams["intermediate_size"]) + self.gguf_writer.add_rope_dimension_count(self.hparams["hidden_size"] // self.hparams["num_attention_heads"]) self.gguf_writer.add_head_count(self.hparams["num_attention_heads"]) self.gguf_writer.add_head_count_kv(self.hparams["num_key_value_heads"]) self.gguf_writer.add_layer_norm_rms_eps(self.hparams["rms_norm_eps"]) self.gguf_writer.add_file_type(self.ftype) - self.gguf_writer.add_rope_dimension_count(self.hparams["hidden_size"] // self.hparams["num_attention_heads"]) def set_vocab(self): self._set_vocab_hf() + def _reverse_hf_permute(self, weights: Tensor, n_head: int, n_kv_head: int | None = None) -> Tensor: + if n_kv_head is not None and n_head != n_kv_head: + n_head //= n_kv_head + + return ( + weights.reshape(n_head, 2, weights.shape[0] // n_head // 2, *weights.shape[1:]) + .swapaxes(1, 2) + .reshape(weights.shape) + ) + + def write_tensors(self): + block_count = self.hparams.get("n_layers", self.hparams.get("num_hidden_layers", self.hparams.get("n_layer"))) + tensor_map = gguf.get_tensor_name_map(self.model_arch, block_count) + n_head = self.hparams.get("num_attention_heads") + n_kv_head = self.hparams.get("num_key_value_heads") + for name, data_torch in self.get_tensors(): + # we don't need these + if name.endswith((".attention.masked_bias", ".attention.bias", ".attention.rotary_emb.inv_freq")): + continue + + old_dtype = data_torch.dtype + + # convert any unsupported data types to float32 + if data_torch.dtype not in (torch.float16, torch.float32): + data_torch = data_torch.to(torch.float32) + + # HF models permute some of the tensors, so we need to undo that + if name.endswith(("q_proj.weight")): + data_torch = self._reverse_hf_permute(data_torch, n_head, n_head) + if name.endswith(("k_proj.weight")): + data_torch = self._reverse_hf_permute(data_torch, n_head, n_kv_head) + + data = data_torch.squeeze().numpy() + + # map tensor names + new_name = tensor_map.get_name(name, try_suffixes=(".weight", ".bias")) + if new_name is None: + print(f"Can not map tensor {name!r}") + sys.exit() + + n_dims = len(data.shape) + data_dtype = data.dtype + + # if f32 desired, convert any float16 to float32 + if self.ftype == 0 and data_dtype == np.float16: + data = data.astype(np.float32) + + # TODO: Why cant we use these float16 as-is? There should be not reason to store float16 as float32 + if self.ftype == 1 and data_dtype == np.float16 and n_dims == 1: + data = data.astype(np.float32) + + # if f16 desired, convert any float32 2-dim weight tensors to float16 + if self.ftype == 1 and data_dtype == np.float32 and name.endswith(".weight") and n_dims == 2: + data = data.astype(np.float16) + + print(f"{new_name}, n_dims = {n_dims}, {old_dtype} --> {data.dtype}") + + self.gguf_writer.add_tensor(new_name, data) + class QwenModel(Model): @staticmethod diff --git a/llama.cpp b/llama.cpp index f8f5796a4..552e0d02e 100644 --- a/llama.cpp +++ b/llama.cpp @@ -2947,6 +2947,8 @@ static void llm_load_hparams( } break; case LLM_ARCH_MINICPM: { + ml.get_key(LLM_KV_ATTENTION_LAYERNORM_RMS_EPS, hparams.f_norm_rms_eps); + switch (hparams.n_layer) { case 40: model.type = e_model::MODEL_2B; break; default: model.type = e_model::MODEL_UNKNOWN; From b7b74cef36a93ae01e0b9af8986d131761742d0e Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Johannes=20G=C3=A4=C3=9Fler?= Date: Thu, 8 Feb 2024 11:36:54 +0100 Subject: [PATCH 546/811] fix trailing whitespace (#5407) --- llama.cpp | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/llama.cpp b/llama.cpp index 552e0d02e..89acafbc3 100644 --- a/llama.cpp +++ b/llama.cpp @@ -8587,7 +8587,7 @@ void llama_sample_top_k(struct llama_context * ctx, llama_token_data_array * can // } const int64_t t_start_sample_us = ggml_time_us(); - + if (k <= 0) { k = candidates->size; } From ff4ff05c5ff4311c05a8ce1f984c7d8def4f07a5 Mon Sep 17 00:00:00 2001 From: Daniel Bevenius Date: Thu, 8 Feb 2024 15:20:03 +0100 Subject: [PATCH 547/811] llava : add missing .py, and fix paths in README.md (#5414) This commit adds the missing .py extension to the convert-image-encoder-to-gguf script. It also fixes the paths for the `model` and `mmproj` options in the example llava-cli command. Signed-off-by: Daniel Bevenius --- examples/llava/README.md | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/examples/llava/README.md b/examples/llava/README.md index 295181a34..721d5e613 100644 --- a/examples/llava/README.md +++ b/examples/llava/README.md @@ -14,7 +14,7 @@ Build with cmake or run `make llava-cli` to build it. After building, run: `./llava-cli` to see the usage. For example: ```sh -./llava-cli -m llava-v1.5-7b/ggml-model-q5_k.gguf --mmproj llava-v1.5-7b/mmproj-model-f16.gguf --image path/to/an/image.jpg +./llava-cli -m ../llava-v1.5-7b/ggml-model-f16.gguf --mmproj ../llava-v1.5-7b/mmproj-model-f16.gguf --image path/to/an/image.jpg ``` **note**: A lower temperature like 0.1 is recommended for better quality. add `--temp 0.1` to the command to do so. @@ -38,7 +38,7 @@ python ./examples/llava/llava-surgery.py -m ../llava-v1.5-7b 3. Use `convert-image-encoder-to-gguf.py` to convert the LLaVA image encoder to GGUF: ```sh -python ./examples/llava/convert-image-encoder-to-gguf -m ../clip-vit-large-patch14-336 --llava-projector ../llava-v1.5-7b/llava.projector --output-dir ../llava-v1.5-7b +python ./examples/llava/convert-image-encoder-to-gguf.py -m ../clip-vit-large-patch14-336 --llava-projector ../llava-v1.5-7b/llava.projector --output-dir ../llava-v1.5-7b ``` 4. Use `convert.py` to convert the LLaMA part of LLaVA to GGUF: From 6e99f2a04f1871d637dd77eb4d81de31a5510253 Mon Sep 17 00:00:00 2001 From: Abhilash Majumder <30946547+abhilash1910@users.noreply.github.com> Date: Thu, 8 Feb 2024 22:39:10 +0530 Subject: [PATCH 548/811] Fix f16_sycl cpy call from Arc (#5411) * fix f16_sycl cpy call * rm old logic * add fp16 build CI * use macro * format fix --- .github/workflows/build.yml | 41 +++++++++++++++++++++++++++++++++++++ ggml-sycl.cpp | 8 +++++--- 2 files changed, 46 insertions(+), 3 deletions(-) diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index f4c374ce5..ed292d6b8 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -184,6 +184,47 @@ jobs: cmake -DLLAMA_SYCL=ON -DCMAKE_C_COMPILER=icx -DCMAKE_CXX_COMPILER=icpx .. cmake --build . --config Release -j $(nproc) + ubuntu-22-cmake-sycl-fp16: + runs-on: ubuntu-22.04 + + continue-on-error: true + + steps: + - uses: actions/checkout@v2 + + - name: add oneAPI to apt + shell: bash + run: | + cd /tmp + wget https://apt.repos.intel.com/intel-gpg-keys/GPG-PUB-KEY-INTEL-SW-PRODUCTS.PUB + sudo apt-key add GPG-PUB-KEY-INTEL-SW-PRODUCTS.PUB + rm GPG-PUB-KEY-INTEL-SW-PRODUCTS.PUB + sudo add-apt-repository "deb https://apt.repos.intel.com/oneapi all main" + + - name: install oneAPI dpcpp compiler + shell: bash + run: | + sudo apt update + sudo apt install intel-oneapi-compiler-dpcpp-cpp + + - name: install oneAPI MKL library + shell: bash + run: | + sudo apt install intel-oneapi-mkl-devel + + - name: Clone + id: checkout + uses: actions/checkout@v3 + + - name: Build + id: cmake_build + run: | + source /opt/intel/oneapi/setvars.sh + mkdir build + cd build + cmake -DLLAMA_SYCL=ON -DCMAKE_C_COMPILER=icx -DCMAKE_CXX_COMPILER=icpx -DLLAMA_SYCL_F16=ON .. + cmake --build . --config Release -j $(nproc) + # TODO: build with LLAMA_NO_METAL because test-backend-ops fail on "Apple Paravirtual device" and I don't know # how to debug it. # ref: https://github.com/ggerganov/llama.cpp/actions/runs/7131777249/job/19420981052#step:5:1124 diff --git a/ggml-sycl.cpp b/ggml-sycl.cpp index a03df4c65..dd562a898 100644 --- a/ggml-sycl.cpp +++ b/ggml-sycl.cpp @@ -12148,7 +12148,8 @@ inline void ggml_sycl_op_dequantize_mul_mat_vec( const int64_t src1_ncols, const int64_t src1_padded_row_size, const dpct::queue_ptr &stream) { - const int64_t ne00 = src0->ne[0]; + GGML_TENSOR_BINARY_OP_LOCALS + const int64_t row_diff = row_high - row_low; // on some GPUs it is faster to convert src1 to half and to use half precision intrinsics @@ -12167,8 +12168,9 @@ inline void ggml_sycl_op_dequantize_mul_mat_vec( } else { src1_dfloat = src1_dfloat_a.alloc(ne00); ggml_cpy_f32_f16_sycl((const char *)src1_ddf_i, (char *)src1_dfloat, - ne00, ne00, 1, sizeof(float), 0, 0, ne00, 1, - sizeof(sycl::half), 0, 0, stream); + ne00, ne00, ne01, ne02, nb00, nb01, nb02, + nb03, ne10, ne11, ne12, nb10, nb11, nb12, + nb13, stream); } } #else From 41f308f58edc2a04bcf9e245100b0a9b10e9a0fb Mon Sep 17 00:00:00 2001 From: slaren Date: Thu, 8 Feb 2024 21:33:03 +0100 Subject: [PATCH 549/811] llama : do not print "offloading layers" message in CPU-only builds (#5416) --- llama.cpp | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/llama.cpp b/llama.cpp index 89acafbc3..db7d1c1cd 100644 --- a/llama.cpp +++ b/llama.cpp @@ -4209,8 +4209,7 @@ static bool llm_load_tensors( ctx_bufs.emplace_back(ctx, buf); } - // print memory requirements - { + if (llama_supports_gpu_offload()) { const int n_gpu = std::min(n_gpu_layers, int(hparams.n_layer)); LLAMA_LOG_INFO("%s: offloading %d repeating layers to GPU\n", __func__, n_gpu); @@ -4222,10 +4221,11 @@ static bool llm_load_tensors( const int max_offloadable_layers = hparams.n_layer + 1; LLAMA_LOG_INFO("%s: offloaded %d/%d layers to GPU\n", __func__, std::min(n_gpu_layers, max_offloadable_layers), max_backend_supported_layers); + } - for (ggml_backend_buffer_t buf : model.bufs) { - LLAMA_LOG_INFO("%s: %10s buffer size = %8.2f MiB\n", __func__, ggml_backend_buffer_name(buf), ggml_backend_buffer_get_size(buf) / 1024.0 / 1024.0); - } + // print memory requirements + for (ggml_backend_buffer_t buf : model.bufs) { + LLAMA_LOG_INFO("%s: %10s buffer size = %8.2f MiB\n", __func__, ggml_backend_buffer_name(buf), ggml_backend_buffer_get_size(buf) / 1024.0 / 1024.0); } // populate tensors_by_name From 8e6a9d2de0096af7120606c74ee2f26684e87b41 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Johannes=20G=C3=A4=C3=9Fler?= Date: Thu, 8 Feb 2024 21:56:40 +0100 Subject: [PATCH 550/811] CUDA: more warps for mmvq on NVIDIA (#5394) --- ggml-cuda.cu | 133 +++++++++++++++++++++++++++++++++------------------ 1 file changed, 86 insertions(+), 47 deletions(-) diff --git a/ggml-cuda.cu b/ggml-cuda.cu index db9da2459..5053757e6 100644 --- a/ggml-cuda.cu +++ b/ggml-cuda.cu @@ -5310,22 +5310,26 @@ template static __global__ void #endif // __CUDA_ARCH__ >= CC_VOLTA } -template +#define MMVQ_NWARPS_NVIDIA 4 +#define MMVQ_NWARPS_AMD_RDNA2 1 +#define MMVQ_NWARPS_AMD_OLD 4 + +template +#if !(defined(GGML_USE_HIPBLAS) && defined(__HIP_PLATFORM_AMD__)) +__launch_bounds__(nwarps*WARP_SIZE, 1) // tells the compiler to use as many registers as it wants +#endif // !(defined(GGML_USE_HIPBLAS) && defined(__HIP_PLATFORM_AMD__)) static __global__ void mul_mat_vec_q( const void * __restrict__ vx, const void * __restrict__ vy, float * __restrict__ dst, const int ncols_x, const int nrows_x, const int nrows_y, const int ncols_y_par, const int nrows_dst) { const int ncols_y = ncols_y_template != 0 ? ncols_y_template : ncols_y_par; - const int row = blockIdx.x*blockDim.y + threadIdx.y; - - if (row >= nrows_x) { - return; - } + const int tid = WARP_SIZE*threadIdx.y + threadIdx.x; + const int row = blockIdx.x; const int blocks_per_row_x = ncols_x / qk; const int blocks_per_col_y = nrows_y / QK8_1; - const int blocks_per_warp = vdr * WARP_SIZE / qi; + const int blocks_per_iter = vdr * nwarps*WARP_SIZE / qi; // partial sum for each thread float tmp[ncols_y_template != 0 ? ncols_y_template : 8] = {0.0f}; @@ -5333,12 +5337,12 @@ static __global__ void mul_mat_vec_q( const block_q_t * x = (const block_q_t *) vx; const block_q8_1 * y = (const block_q8_1 *) vy; - for (int i = threadIdx.x / (qi/vdr); i < blocks_per_row_x; i += blocks_per_warp) { + for (int i = tid / (qi/vdr); i < blocks_per_row_x; i += blocks_per_iter) { const int ibx = row*blocks_per_row_x + i; // x block index const int iby = i * (qk/QK8_1); // y block index that aligns with ibx - const int iqs = vdr * (threadIdx.x % (qi/vdr)); // x block quant index when casting the quants to int + const int iqs = vdr * (tid % (qi/vdr)); // x block quant index when casting the quants to int #pragma unroll for (int j = 0; j < ncols_y; ++j) { @@ -5346,9 +5350,25 @@ static __global__ void mul_mat_vec_q( } } + __shared__ float tmp_shared[nwarps-1 > 0 ? nwarps-1 : 1][ncols_y_template != 0 ? ncols_y_template : 8][WARP_SIZE]; + if (threadIdx.y > 0) { +#pragma unroll + for (int j = 0; j < ncols_y; ++j) { + tmp_shared[threadIdx.y-1][j][threadIdx.x] = tmp[j]; + } + } + __syncthreads(); + if (threadIdx.y > 0) { + return; + } + // sum up partial sums and write back result #pragma unroll for (int j = 0; j < ncols_y; ++j) { +#pragma unroll + for (int i = 0; i < nwarps-1; ++i) { + tmp[j] += tmp_shared[i][j][threadIdx.x]; + } tmp[j] = warp_reduce_sum(tmp[j]); if (threadIdx.x == 0) { @@ -6833,46 +6853,65 @@ static void mul_mat_vec_q_cuda( GGML_ASSERT(ncols_x % qk == 0); GGML_ASSERT(ncols_y <= 4); - const int block_num_y = (nrows_x + GGML_CUDA_MMV_Y - 1) / GGML_CUDA_MMV_Y; - const dim3 block_nums(block_num_y, 1, 1); - const dim3 block_dims(WARP_SIZE, GGML_CUDA_MMV_Y, 1); - switch (ncols_y) { - case 1: - mul_mat_vec_q<1, qk, qi, block_q_t, vdr, vec_dot> - <<>>(vx, vy, dst, ncols_x, nrows_x, nrows_y, ncols_y, nrows_dst); - break; - case 2: - mul_mat_vec_q<2, qk, qi, block_q_t, vdr, vec_dot> - <<>>(vx, vy, dst, ncols_x, nrows_x, nrows_y, ncols_y, nrows_dst); - break; - case 3: - mul_mat_vec_q<3, qk, qi, block_q_t, vdr, vec_dot> - <<>>(vx, vy, dst, ncols_x, nrows_x, nrows_y, ncols_y, nrows_dst); - break; - case 4: - mul_mat_vec_q<4, qk, qi, block_q_t, vdr, vec_dot> - <<>>(vx, vy, dst, ncols_x, nrows_x, nrows_y, ncols_y, nrows_dst); - break; - // case 5: - // mul_mat_vec_q<5, qk, qi, block_q_t, vdr, vec_dot> - // <<>>(vx, vy, dst, ncols_x, nrows_x, nrows_y, ncols_y, nrows_dst); - // break; - // case 6: - // mul_mat_vec_q<6, qk, qi, block_q_t, vdr, vec_dot> - // <<>>(vx, vy, dst, ncols_x, nrows_x, nrows_y, ncols_y, nrows_dst); - // break; - // case 7: - // mul_mat_vec_q<7, qk, qi, block_q_t, vdr, vec_dot> - // <<>>(vx, vy, dst, ncols_x, nrows_x, nrows_y, ncols_y, nrows_dst); - // break; - // case 8: - // mul_mat_vec_q<8, qk, qi, block_q_t, vdr, vec_dot> - // <<>>(vx, vy, dst, ncols_x, nrows_x, nrows_y, ncols_y, nrows_dst); - // break; + int id; + CUDA_CHECK(cudaGetDevice(&id)); + + int nwarps; + if (g_device_caps[id].cc >= CC_OFFSET_AMD) { + nwarps = g_device_caps[id].cc >= CC_RDNA2 ? MMVQ_NWARPS_AMD_RDNA2 : MMVQ_NWARPS_AMD_OLD; + } else { + nwarps = MMVQ_NWARPS_NVIDIA; + } + + const dim3 block_nums(nrows_x, 1, 1); + const dim3 block_dims(WARP_SIZE, nwarps, 1); + + switch (nwarps) { + case 1: switch(ncols_y) { + case 1: + mul_mat_vec_q<1, 1, qk, qi, block_q_t, vdr, vec_dot> + <<>>(vx, vy, dst, ncols_x, nrows_x, nrows_y, ncols_y, nrows_dst); + break; + case 2: + mul_mat_vec_q<1, 2, qk, qi, block_q_t, vdr, vec_dot> + <<>>(vx, vy, dst, ncols_x, nrows_x, nrows_y, ncols_y, nrows_dst); + break; + case 3: + mul_mat_vec_q<1, 3, qk, qi, block_q_t, vdr, vec_dot> + <<>>(vx, vy, dst, ncols_x, nrows_x, nrows_y, ncols_y, nrows_dst); + break; + case 4: + mul_mat_vec_q<1, 4, qk, qi, block_q_t, vdr, vec_dot> + <<>>(vx, vy, dst, ncols_x, nrows_x, nrows_y, ncols_y, nrows_dst); + break; + default: + GGML_ASSERT(false); + break; + } break; + case 4: switch(ncols_y) { + case 1: + mul_mat_vec_q<4, 1, qk, qi, block_q_t, vdr, vec_dot> + <<>>(vx, vy, dst, ncols_x, nrows_x, nrows_y, ncols_y, nrows_dst); + break; + case 2: + mul_mat_vec_q<4, 2, qk, qi, block_q_t, vdr, vec_dot> + <<>>(vx, vy, dst, ncols_x, nrows_x, nrows_y, ncols_y, nrows_dst); + break; + case 3: + mul_mat_vec_q<4, 3, qk, qi, block_q_t, vdr, vec_dot> + <<>>(vx, vy, dst, ncols_x, nrows_x, nrows_y, ncols_y, nrows_dst); + break; + case 4: + mul_mat_vec_q<4, 4, qk, qi, block_q_t, vdr, vec_dot> + <<>>(vx, vy, dst, ncols_x, nrows_x, nrows_y, ncols_y, nrows_dst); + break; + default: + GGML_ASSERT(false); + break; + } break; + default: GGML_ASSERT(false); - // mul_mat_vec_q<0, qk, qi, block_q_t, vdr, vec_dot> - // <<>>(vx, vy, dst, ncols_x, nrows_x, nrows_y, ncols_y, nrows_dst); break; } } From 44fbe34360dd760f9e68b4271f21533436397f84 Mon Sep 17 00:00:00 2001 From: 0cc4m Date: Fri, 9 Feb 2024 06:52:33 +0100 Subject: [PATCH 551/811] Fix Vulkan crash on APUs with very little device memory (#5424) * Fix Vulkan crash on APUs with very little device memory * Fix debug output function names --- ggml-vulkan.cpp | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/ggml-vulkan.cpp b/ggml-vulkan.cpp index 9e2846ee4..254f648a6 100644 --- a/ggml-vulkan.cpp +++ b/ggml-vulkan.cpp @@ -744,6 +744,8 @@ static vk_buffer ggml_vk_create_buffer(ggml_backend_vk_context * ctx, size_t siz } if (memory_type_index >= mem_props.memoryTypeCount) { + ctx->device.lock()->device.destroyBuffer(buf->buffer); + buf->size = 0; throw vk::OutOfDeviceMemoryError("No suitable memory type found"); } @@ -3875,7 +3877,7 @@ static ggml_tensor * ggml_vk_find_last_use(const ggml_tensor * node, ggml_cgraph static void ggml_vk_preallocate_buffers_graph(ggml_backend_vk_context * ctx, ggml_tensor * node){ #ifdef GGML_VULKAN_DEBUG - std::cerr << "ggml_ctx->preallocate_buffers_graph(" << node << ")" << std::endl; + std::cerr << "ggml_vk_preallocate_buffers_graph(" << node << ")" << std::endl; #endif const bool any_on_device = node->backend == GGML_BACKEND_GPU || (node->src[0] != nullptr && (node->src[0]->backend == GGML_BACKEND_GPU || node->src[0]->backend == GGML_BACKEND_GPU_SPLIT)) @@ -3994,8 +3996,7 @@ static void ggml_vk_preallocate_buffers(ggml_backend_vk_context * ctx) { return; } #ifdef GGML_VULKAN_DEBUG - std::cerr << "ggml_ctx->preallocate_buffers()" << std::endl; - std::cerr << "qx_size: " << ctx->prealloc_size_qx << " qy_size: " << ctx->prealloc_size_qy << " x_size: " << ctx->prealloc_size_x << " y_size: " << ctx->prealloc_size_y << " split_k_size: " << ctx->prealloc_size_split_k << std::endl; + std::cerr << "ggml_vk_preallocate_buffers(qx_size: " << ctx->prealloc_size_qx << " qy_size: " << ctx->prealloc_size_qy << " x_size: " << ctx->prealloc_size_x << " y_size: " << ctx->prealloc_size_y << " split_k_size: " << ctx->prealloc_size_split_k << ")" << std::endl; #endif #if defined(GGML_VULKAN_RUN_TESTS) ctx->staging = ggml_vk_create_buffer_check(ctx, 100ul * 1024ul * 1024ul, vk::MemoryPropertyFlagBits::eHostVisible | vk::MemoryPropertyFlagBits::eHostCoherent | vk::MemoryPropertyFlagBits::eHostCached); From b2f87cb64db47d799b6f3656855c9caf9792ab2a Mon Sep 17 00:00:00 2001 From: Michael Podvitskiy Date: Fri, 9 Feb 2024 10:56:43 +0100 Subject: [PATCH 552/811] ggml : fix `error C2078: too many initializers` for MSVC ARM64 (#5404) --- ggml-quants.c | 19 +++++++++++++++---- 1 file changed, 15 insertions(+), 4 deletions(-) diff --git a/ggml-quants.c b/ggml-quants.c index 101d3e783..1031e3761 100644 --- a/ggml-quants.c +++ b/ggml-quants.c @@ -268,6 +268,17 @@ static inline float hsum_float_4x4(const __m128 a, const __m128 b, const __m128 #endif // defined(__AVX__) || defined(__AVX2__) || defined(__AVX512F__) || defined(__SSSE3__) #if defined(__ARM_NEON) + +#ifdef _MSC_VER + +#define ggml_vld1q_u32(w,x,y,z) { ((w) + ((uint64_t)(x) << 32)), ((y) + ((uint64_t)(z) << 32)) } + +#else + +#define ggml_vld1q_u32(w,x,y,z) { (w), (x), (y), (z) } + +#endif + #if !defined(__aarch64__) // 64-bit compatibility @@ -8698,10 +8709,10 @@ void ggml_vec_dot_iq3_xxs_q8_K(const int n, float * restrict s, const void * res for (int ib32 = 0; ib32 < QK_K/32; ib32 += 2) { q8b = ggml_vld1q_s8_x4(q8); q8 += 64; memcpy(aux32, gas, 2*sizeof(uint32_t)); gas += 2*sizeof(uint32_t); - const uint32x4_t aux32x4_0 = {iq3xxs_grid[q3[ 0]], iq3xxs_grid[q3[ 1]], iq3xxs_grid[q3[ 2]], iq3xxs_grid[q3[ 3]]}; - const uint32x4_t aux32x4_1 = {iq3xxs_grid[q3[ 4]], iq3xxs_grid[q3[ 5]], iq3xxs_grid[q3[ 6]], iq3xxs_grid[q3[ 7]]}; - const uint32x4_t aux32x4_2 = {iq3xxs_grid[q3[ 8]], iq3xxs_grid[q3[ 9]], iq3xxs_grid[q3[10]], iq3xxs_grid[q3[11]]}; - const uint32x4_t aux32x4_3 = {iq3xxs_grid[q3[12]], iq3xxs_grid[q3[13]], iq3xxs_grid[q3[14]], iq3xxs_grid[q3[15]]}; + const uint32x4_t aux32x4_0 = ggml_vld1q_u32(iq3xxs_grid[q3[ 0]], iq3xxs_grid[q3[ 1]], iq3xxs_grid[q3[ 2]], iq3xxs_grid[q3[ 3]]); + const uint32x4_t aux32x4_1 = ggml_vld1q_u32(iq3xxs_grid[q3[ 4]], iq3xxs_grid[q3[ 5]], iq3xxs_grid[q3[ 6]], iq3xxs_grid[q3[ 7]]); + const uint32x4_t aux32x4_2 = ggml_vld1q_u32(iq3xxs_grid[q3[ 8]], iq3xxs_grid[q3[ 9]], iq3xxs_grid[q3[10]], iq3xxs_grid[q3[11]]); + const uint32x4_t aux32x4_3 = ggml_vld1q_u32(iq3xxs_grid[q3[12]], iq3xxs_grid[q3[13]], iq3xxs_grid[q3[14]], iq3xxs_grid[q3[15]]); q3 += 16; q3s.val[0] = vcombine_s8(vld1_s8((const void *)(signs64 + ((aux32[0] >> 0) & 127))), vld1_s8((const void *)(signs64 + ((aux32[0] >> 7) & 127)))); q3s.val[1] = vcombine_s8(vld1_s8((const void *)(signs64 + ((aux32[0] >> 14) & 127))), vld1_s8((const void *)(signs64 + ((aux32[0] >> 21) & 127)))); From e4124c24775f2cb5b3d7acc93bf9dc5471c172ef Mon Sep 17 00:00:00 2001 From: Marko Tasic Date: Fri, 9 Feb 2024 11:17:00 +0100 Subject: [PATCH 553/811] readme : add JavaScript/Wasm repo (#5415) --- README.md | 1 + 1 file changed, 1 insertion(+) diff --git a/README.md b/README.md index 66166c01b..0b4efdd33 100644 --- a/README.md +++ b/README.md @@ -124,6 +124,7 @@ Typically finetunes of the base models below are supported as well. - Go: [go-skynet/go-llama.cpp](https://github.com/go-skynet/go-llama.cpp) - Node.js: [withcatai/node-llama-cpp](https://github.com/withcatai/node-llama-cpp) - JS/TS (llama.cpp server client): [lgrammel/modelfusion](https://modelfusion.dev/integration/model-provider/llamacpp) +- JavaScript/Wasm (works in browser): [tangledgroup/llama-cpp-wasm](https://github.com/tangledgroup/llama-cpp-wasm) - Ruby: [yoshoku/llama_cpp.rb](https://github.com/yoshoku/llama_cpp.rb) - Rust (nicer API): [mdrokz/rust-llama.cpp](https://github.com/mdrokz/rust-llama.cpp) - Rust (more direct bindings): [utilityai/llama-cpp-rs](https://github.com/utilityai/llama-cpp-rs) From e5ca3937c685d6e012ac4db40555d6ec100ff03c Mon Sep 17 00:00:00 2001 From: Paul Tsochantaris Date: Fri, 9 Feb 2024 10:48:06 +0000 Subject: [PATCH 554/811] llama : do not cap thread count when MoE on CPU (#5419) * Not capping thread count when MoE inference is running on CPU * Whitespace --- llama.cpp | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/llama.cpp b/llama.cpp index db7d1c1cd..0566b087b 100644 --- a/llama.cpp +++ b/llama.cpp @@ -7285,7 +7285,9 @@ static int llama_decode_internal( // TODO: this is mostly important for Apple Silicon where CBLAS is still performing very well // we still need some threads to process all non-mul_mat ops, but not too much to avoid interfering // with the BLAS calls. need a better solution - if (n_tokens >= 32 && ggml_cpu_has_blas() && !ggml_cpu_has_gpublas()) { + // MoE Special Case: This logic applies when hparams.n_expert == 0, i.e. the model is NOT an MoE model. When an MoE is + // being processed then Accelerate/BLAS will not be involved, so capping would limit performance. + if (n_tokens >= 32 && hparams.n_expert == 0 && ggml_cpu_has_blas() && !ggml_cpu_has_gpublas()) { n_threads = std::min(4, n_threads); } From 7c777fcd5dd4af7079e33390cf6a19c328a2666f Mon Sep 17 00:00:00 2001 From: Riley Stewart Date: Fri, 9 Feb 2024 02:49:49 -0800 Subject: [PATCH 555/811] server : fix prompt caching for repeated prompts (#5420) --- examples/server/server.cpp | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/examples/server/server.cpp b/examples/server/server.cpp index eceda30d0..8d668f798 100644 --- a/examples/server/server.cpp +++ b/examples/server/server.cpp @@ -1592,10 +1592,6 @@ struct llama_server_context LOG_TEE("slot %d : in cache: %i tokens | to process: %i tokens\n", slot.id, slot.n_past, slot.num_prompt_tokens_processed); } - LOG_TEE("slot %d : kv cache rm - [%d, end)\n", slot.id, (int) system_tokens.size() + slot.n_past); - - llama_kv_cache_seq_rm(ctx, slot.id, system_tokens.size() + slot.n_past, -1); - slot.cache_tokens = prompt_tokens; if (slot.n_past == slot.num_prompt_tokens && slot.n_past > 0) @@ -1609,6 +1605,10 @@ struct llama_server_context } } + LOG_TEE("slot %d : kv cache rm - [%d, end)\n", slot.id, (int) system_tokens.size() + slot.n_past); + + llama_kv_cache_seq_rm(ctx, slot.id, system_tokens.size() + slot.n_past, -1); + LOG_VERBOSE("prompt ingested", { {"n_past", slot.n_past}, {"cached", tokens_to_str(ctx, slot.cache_tokens.cbegin(), slot.cache_tokens.cbegin() + slot.n_past)}, From e00d2a62dd1441e3b089570ec06d05c18800d368 Mon Sep 17 00:00:00 2001 From: Daniel Bevenius Date: Fri, 9 Feb 2024 14:00:59 +0100 Subject: [PATCH 556/811] llava : add requirements.txt and update README.md (#5428) * llava: add requirements.txt and update README.md This commit adds a `requirements.txt` file to the `examples/llava` directory. This file contains the required Python packages to run the scripts in the `examples/llava` directory. The motivation of this to make it easier for users to run the scripts in `examples/llava`. This will avoid users from having to possibly run into missing package issues if the packages are not installed on their system. Signed-off-by: Daniel Bevenius * llava: fix typo in llava-surgery.py output Signed-off-by: Daniel Bevenius --------- Signed-off-by: Daniel Bevenius --- examples/llava/README.md | 12 +++++++++--- examples/llava/llava-surgery.py | 2 +- examples/llava/requirements.txt | 3 +++ 3 files changed, 13 insertions(+), 4 deletions(-) create mode 100644 examples/llava/requirements.txt diff --git a/examples/llava/README.md b/examples/llava/README.md index 721d5e613..19f1a50a2 100644 --- a/examples/llava/README.md +++ b/examples/llava/README.md @@ -29,19 +29,25 @@ git clone https://huggingface.co/liuhaotian/llava-v1.5-7b git clone https://huggingface.co/openai/clip-vit-large-patch14-336 ``` -2. Use `llava-surgery.py` to split the LLaVA model to LLaMA and multimodel projector constituents: +2. Install the required Python packages: + +```sh +pip install -r examples/llava/requirements.txt +``` + +3. Use `llava-surgery.py` to split the LLaVA model to LLaMA and multimodel projector constituents: ```sh python ./examples/llava/llava-surgery.py -m ../llava-v1.5-7b ``` -3. Use `convert-image-encoder-to-gguf.py` to convert the LLaVA image encoder to GGUF: +4. Use `convert-image-encoder-to-gguf.py` to convert the LLaVA image encoder to GGUF: ```sh python ./examples/llava/convert-image-encoder-to-gguf.py -m ../clip-vit-large-patch14-336 --llava-projector ../llava-v1.5-7b/llava.projector --output-dir ../llava-v1.5-7b ``` -4. Use `convert.py` to convert the LLaMA part of LLaVA to GGUF: +5. Use `convert.py` to convert the LLaMA part of LLaVA to GGUF: ```sh python ./convert.py ../llava-v1.5-7b diff --git a/examples/llava/llava-surgery.py b/examples/llava/llava-surgery.py index 515f6b58d..0a61efdfe 100644 --- a/examples/llava/llava-surgery.py +++ b/examples/llava/llava-surgery.py @@ -42,5 +42,5 @@ if len(clip_tensors) > 0: torch.save(checkpoint, path) print("Done!") -print(f"Now you can convert {args.model} to a a regular LLaMA GGUF file.") +print(f"Now you can convert {args.model} to a regular LLaMA GGUF file.") print(f"Also, use {args.model}/llava.projector to prepare a llava-encoder.gguf file.") diff --git a/examples/llava/requirements.txt b/examples/llava/requirements.txt new file mode 100644 index 000000000..f80f727a7 --- /dev/null +++ b/examples/llava/requirements.txt @@ -0,0 +1,3 @@ +-r ../../requirements/requirements-convert.txt +pillow~=10.2.0 +torch~=2.1.1 From 4b7b38bef5addbd31f453871d79647fbae6bec8a Mon Sep 17 00:00:00 2001 From: Neuman Vong Date: Sat, 10 Feb 2024 05:30:19 +1100 Subject: [PATCH 557/811] vulkan: Set limit for task concurrency (#5427) A common default for the maximum number of open files is 256, which can lead to `asyncio.gather(*tasks)` failing with Too many open files. $ python ggml_vk_generate_shaders.py --glslc=$ANDROID_NDK_PATH/shader-tools/darwin-x86_64/glslc ggml_vulkan: Generating and compiling shaders to SPIR-V Traceback (most recent call last): File "/Users/neuman/Code.noindex/github/llama.cpp/ggml_vk_generate_shaders.py", line 2326, in asyncio.run(main()) File "/Users/neuman/Code.noindex/miniforge3/lib/python3.10/asyncio/runners.py", line 44, in run return loop.run_until_complete(main) File "/Users/neuman/Code.noindex/miniforge3/lib/python3.10/asyncio/base_events.py", line 649, in run_until_complete return future.result() File "/Users/neuman/Code.noindex/github/llama.cpp/ggml_vk_generate_shaders.py", line 2294, in main await asyncio.gather(*tasks) [...snip...] OSError: [Errno 24] Too many open files This change sets a reasonable concurrency limit for tasks (and therefore open files), without significant impact on run time. --- ggml_vk_generate_shaders.py | 11 ++++++++++- 1 file changed, 10 insertions(+), 1 deletion(-) diff --git a/ggml_vk_generate_shaders.py b/ggml_vk_generate_shaders.py index 4abb0383f..b2e86e182 100644 --- a/ggml_vk_generate_shaders.py +++ b/ggml_vk_generate_shaders.py @@ -2067,6 +2067,8 @@ type_names = { K_QUANTS_PER_ITERATION = 2 +ASYNCIO_CONCURRENCY = 64 + output_dir = gettempdir() lock = asyncio.Lock() @@ -2291,7 +2293,14 @@ async def main(): tasks.append(string_to_spv("rope_neox_f32", rope_neox_src, {"A_TYPE": "float", "D_TYPE": "float"})) tasks.append(string_to_spv("rope_neox_f16", rope_neox_src, {"A_TYPE": "float16_t", "D_TYPE": "float16_t"})) - await asyncio.gather(*tasks) + # Helper to decorate tasks with semaphore acquisition. + async def withSemaphore(sem, task): + async with sem: + return await task + + # Run tasks concurrently guarded by a concurrency limit. + sem = asyncio.Semaphore(ASYNCIO_CONCURRENCY) + await asyncio.gather(*(withSemaphore(sem, task) for task in tasks)) with open("ggml-vulkan-shaders.hpp", "w") as f: f.write("#include \n\n") From 4633d93af08d890ecd00fa6e4f61d76f21cded4c Mon Sep 17 00:00:00 2001 From: Michael Podvitskiy Date: Fri, 9 Feb 2024 10:42:27 +0100 Subject: [PATCH 558/811] ggml : add abort_callback for cpu backend (ggml/725) * a way to use abort_callback with the cpu backend * whisper update --- ggml-backend.c | 26 ++++++++++++++++++++++---- ggml-backend.h | 5 +++-- ggml.c | 2 +- ggml.h | 9 +++++++-- 4 files changed, 33 insertions(+), 9 deletions(-) diff --git a/ggml-backend.c b/ggml-backend.c index 0764dfebc..532da8eda 100644 --- a/ggml-backend.c +++ b/ggml-backend.c @@ -653,6 +653,9 @@ struct ggml_backend_cpu_context { int n_threads; void * work_data; size_t work_size; + + ggml_abort_callback abort_callback; + void * abort_callback_data; }; GGML_CALL static const char * ggml_backend_cpu_name(ggml_backend_t backend) { @@ -691,6 +694,9 @@ GGML_CALL static ggml_backend_graph_plan_t ggml_backend_cpu_graph_plan_create(gg cpu_plan->cplan.work_data = malloc(cpu_plan->cplan.work_size); } + cpu_plan->cplan.abort_callback = cpu_ctx->abort_callback; + cpu_plan->cplan.abort_callback_data = cpu_ctx->abort_callback_data; + return cpu_plan; } @@ -721,9 +727,11 @@ GGML_CALL static bool ggml_backend_cpu_graph_compute(ggml_backend_t backend, str cpu_ctx->work_data = realloc(cpu_ctx->work_data, cplan.work_size); cpu_ctx->work_size = cplan.work_size; } - cplan.work_data = cpu_ctx->work_data; + cplan.abort_callback = cpu_ctx->abort_callback; + cplan.abort_callback_data = cpu_ctx->abort_callback_data; + ggml_graph_compute(cgraph, &cplan); return true; } @@ -759,9 +767,11 @@ static struct ggml_backend_i cpu_backend_i = { ggml_backend_t ggml_backend_cpu_init(void) { struct ggml_backend_cpu_context * ctx = malloc(sizeof(struct ggml_backend_cpu_context)); - ctx->n_threads = GGML_DEFAULT_N_THREADS; - ctx->work_data = NULL; - ctx->work_size = 0; + ctx->n_threads = GGML_DEFAULT_N_THREADS; + ctx->work_data = NULL; + ctx->work_size = 0; + ctx->abort_callback = NULL; + ctx->abort_callback_data = NULL; ggml_backend_t cpu_backend = malloc(sizeof(struct ggml_backend)); @@ -783,6 +793,14 @@ void ggml_backend_cpu_set_n_threads(ggml_backend_t backend_cpu, int n_threads) { ctx->n_threads = n_threads; } +void ggml_backend_cpu_set_abort_callback(ggml_backend_t backend_cpu, ggml_abort_callback abort_callback, void * abort_callback_data) { + GGML_ASSERT(ggml_backend_is_cpu(backend_cpu)); + + struct ggml_backend_cpu_context * ctx = (struct ggml_backend_cpu_context *)backend_cpu->context; + ctx->abort_callback = abort_callback; + ctx->abort_callback_data = abort_callback_data; +} + GGML_CALL ggml_backend_buffer_t ggml_backend_cpu_buffer_from_ptr(void * ptr, size_t size) { return ggml_backend_buffer_init(ggml_backend_cpu_buffer_type(), cpu_backend_buffer_i_from_ptr, ptr, size); } diff --git a/ggml-backend.h b/ggml-backend.h index 8b8160fcf..282b3a9b7 100644 --- a/ggml-backend.h +++ b/ggml-backend.h @@ -83,8 +83,9 @@ extern "C" { GGML_API ggml_backend_t ggml_backend_cpu_init(void); - GGML_API GGML_CALL bool ggml_backend_is_cpu (ggml_backend_t backend); - GGML_API void ggml_backend_cpu_set_n_threads(ggml_backend_t backend_cpu, int n_threads); + GGML_API GGML_CALL bool ggml_backend_is_cpu (ggml_backend_t backend); + GGML_API void ggml_backend_cpu_set_n_threads (ggml_backend_t backend_cpu, int n_threads); + GGML_API void ggml_backend_cpu_set_abort_callback(ggml_backend_t backend_cpu, ggml_abort_callback abort_callback, void * abort_callback_data); // Create a backend buffer from an existing pointer GGML_API GGML_CALL ggml_backend_buffer_t ggml_backend_cpu_buffer_from_ptr(void * ptr, size_t size); diff --git a/ggml.c b/ggml.c index f783a6fd3..86cd65862 100644 --- a/ggml.c +++ b/ggml.c @@ -16649,7 +16649,7 @@ struct ggml_compute_state_shared { atomic_int node_n; // active graph node atomic_int node_task; // active graph node task phase - bool (*abort_callback)(void * data); // abort ggml_graph_compute when true + ggml_abort_callback abort_callback; // abort ggml_graph_compute when true void * abort_callback_data; }; diff --git a/ggml.h b/ggml.h index e0a4799f3..1360cd8ee 100644 --- a/ggml.h +++ b/ggml.h @@ -567,6 +567,11 @@ extern "C" { static const size_t GGML_TENSOR_SIZE = sizeof(struct ggml_tensor); + // Abort callback + // If not NULL, called before ggml computation + // If it returns true, the computation is aborted + typedef bool (*ggml_abort_callback)(void * data); + // the compute plan that needs to be prepared for ggml_graph_compute() // since https://github.com/ggerganov/ggml/issues/287 struct ggml_cplan { @@ -576,8 +581,8 @@ extern "C" { int n_threads; // abort ggml_graph_compute when true - bool (*abort_callback)(void * data); - void * abort_callback_data; + ggml_abort_callback abort_callback; + void * abort_callback_data; }; enum ggml_cgraph_eval_order { From 43b65f5eb85e8741aba573a8f65bb8efad245d31 Mon Sep 17 00:00:00 2001 From: Georgi Gerganov Date: Sat, 10 Feb 2024 09:30:36 +0200 Subject: [PATCH 559/811] sync : ggml --- scripts/sync-ggml.last | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/scripts/sync-ggml.last b/scripts/sync-ggml.last index 7b6c17915..6ae75bc31 100644 --- a/scripts/sync-ggml.last +++ b/scripts/sync-ggml.last @@ -1 +1 @@ -475cbad5c1c834e31e26a2283bc1413181644360 +2c7cf49810d523b9632da393a9e8270b60bf3b24 From cd9aea63b577a83def84dbd6dcd90a6fa02af745 Mon Sep 17 00:00:00 2001 From: Georgi Gerganov Date: Sat, 10 Feb 2024 09:53:05 +0200 Subject: [PATCH 560/811] scripts : update sync scripts with new backends --- scripts/sync-ggml-am.sh | 12 ++++++++++++ scripts/sync-ggml.sh | 6 ++++++ 2 files changed, 18 insertions(+) diff --git a/scripts/sync-ggml-am.sh b/scripts/sync-ggml-am.sh index 6b2514a11..2c391e641 100755 --- a/scripts/sync-ggml-am.sh +++ b/scripts/sync-ggml-am.sh @@ -97,6 +97,8 @@ if [ -f $SRC_LLAMA/ggml-src.patch ]; then # src/ggml-cuda.cu -> ggml-cuda.cu # src/ggml-cuda.h -> ggml-cuda.h # src/ggml-impl.h -> ggml-impl.h + # src/ggml-kompute.cpp -> ggml-kompute.cpp + # src/ggml-kompute.h -> ggml-kompute.h # src/ggml-metal.h -> ggml-metal.h # src/ggml-metal.m -> ggml-metal.m # src/ggml-mpi.h -> ggml-mpi.h @@ -105,6 +107,10 @@ if [ -f $SRC_LLAMA/ggml-src.patch ]; then # src/ggml-opencl.h -> ggml-opencl.h # src/ggml-quants.c -> ggml-quants.c # src/ggml-quants.h -> ggml-quants.h + # src/ggml-sycl.cpp -> ggml-sycl.cpp + # src/ggml-sycl.h -> ggml-sycl.h + # src/ggml-vulkan.cpp -> ggml-vulkan.cpp + # src/ggml-vulkan.h -> ggml-vulkan.h # include/ggml/ggml.h -> ggml.h # include/ggml/ggml-alloc.h -> ggml-alloc.h # include/ggml/ggml-backend.h -> ggml-backend.h @@ -123,6 +129,8 @@ if [ -f $SRC_LLAMA/ggml-src.patch ]; then -e 's/src\/ggml-cuda\.cu/ggml-cuda.cu/g' \ -e 's/src\/ggml-cuda\.h/ggml-cuda.h/g' \ -e 's/src\/ggml-impl\.h/ggml-impl.h/g' \ + -e 's/src\/ggml-kompute\.cpp/ggml-kompute.cpp/g' \ + -e 's/src\/ggml-kompute\.h/ggml-kompute.h/g' \ -e 's/src\/ggml-metal\.h/ggml-metal.h/g' \ -e 's/src\/ggml-metal\.m/ggml-metal.m/g' \ -e 's/src\/ggml-mpi\.h/ggml-mpi.h/g' \ @@ -131,6 +139,10 @@ if [ -f $SRC_LLAMA/ggml-src.patch ]; then -e 's/src\/ggml-opencl\.h/ggml-opencl.h/g' \ -e 's/src\/ggml-quants\.c/ggml-quants.c/g' \ -e 's/src\/ggml-quants\.h/ggml-quants.h/g' \ + -e 's/src\/ggml-sycl\.cpp/ggml-sycl.cpp/g' \ + -e 's/src\/ggml-sycl\.h/ggml-sycl.h/g' \ + -e 's/src\/ggml-vulkan\.cpp/ggml-vulkan.cpp/g' \ + -e 's/src\/ggml-vulkan\.h/ggml-vulkan.h/g' \ -e 's/include\/ggml\/ggml\.h/ggml.h/g' \ -e 's/include\/ggml\/ggml-alloc\.h/ggml-alloc.h/g' \ -e 's/include\/ggml\/ggml-backend\.h/ggml-backend.h/g' \ diff --git a/scripts/sync-ggml.sh b/scripts/sync-ggml.sh index 0097db435..feb34bbc8 100755 --- a/scripts/sync-ggml.sh +++ b/scripts/sync-ggml.sh @@ -7,6 +7,8 @@ cp -rpv ../ggml/src/ggml-backend.c ./ggml-backend.c cp -rpv ../ggml/src/ggml-cuda.cu ./ggml-cuda.cu cp -rpv ../ggml/src/ggml-cuda.h ./ggml-cuda.h cp -rpv ../ggml/src/ggml-impl.h ./ggml-impl.h +cp -rpv ../ggml/src/ggml-kompute.cpp ./ggml-kompute.cpp +cp -rpv ../ggml/src/ggml-kompute.h ./ggml-kompute.h cp -rpv ../ggml/src/ggml-metal.h ./ggml-metal.h cp -rpv ../ggml/src/ggml-metal.m ./ggml-metal.m cp -rpv ../ggml/src/ggml-metal.metal ./ggml-metal.metal @@ -16,6 +18,10 @@ cp -rpv ../ggml/src/ggml-opencl.cpp ./ggml-opencl.cpp cp -rpv ../ggml/src/ggml-opencl.h ./ggml-opencl.h cp -rpv ../ggml/src/ggml-quants.c ./ggml-quants.c cp -rpv ../ggml/src/ggml-quants.h ./ggml-quants.h +cp -rpv ../ggml/src/ggml-sycl.cpp ./ggml-sycl.cpp +cp -rpv ../ggml/src/ggml-sycl.h ./ggml-sycl.h +cp -rpv ../ggml/src/ggml-vulkan.cpp ./ggml-vulkan.cpp +cp -rpv ../ggml/src/ggml-vulkan.h ./ggml-vulkan.h cp -rpv ../ggml/include/ggml/ggml.h ./ggml.h cp -rpv ../ggml/include/ggml/ggml-alloc.h ./ggml-alloc.h cp -rpv ../ggml/include/ggml/ggml-backend.h ./ggml-backend.h From f026f8120f97090d34a52b3dc023c82e0ede3f7d Mon Sep 17 00:00:00 2001 From: Ian Bull Date: Sat, 10 Feb 2024 02:53:28 -0800 Subject: [PATCH 561/811] metal : use autoreleasepool to avoid memory leaks (#5437) There appears to be a known memory leak when using the `MLTCommandBuffer`. It is suggested to use `@autoreleasepool` in [1,2] [1] https://developer.apple.com/forums/thread/662721 [2] https://forums.developer.apple.com/forums/thread/120931 This change-set wraps the `ggml_metal_graph_compute` in a `@autoreleasepool`. This commit addresses https://github.com/ggerganov/llama.cpp/issues/5436 --- ggml-metal.m | 2 ++ 1 file changed, 2 insertions(+) diff --git a/ggml-metal.m b/ggml-metal.m index 5260ed827..c1d8e2de8 100644 --- a/ggml-metal.m +++ b/ggml-metal.m @@ -687,6 +687,7 @@ static bool ggml_metal_graph_compute( struct ggml_metal_context * ctx, struct ggml_cgraph * gf) { + @autoreleasepool { MTLComputePassDescriptor * edesc = MTLComputePassDescriptor.computePassDescriptor; edesc.dispatchType = MTLDispatchTypeSerial; @@ -2272,6 +2273,7 @@ static bool ggml_metal_graph_compute( [[MTLCaptureManager sharedCaptureManager] stopCapture]; } + } return true; } From 907e08c1109f498b01036367804cff3082c44524 Mon Sep 17 00:00:00 2001 From: Xuan Son Nguyen Date: Sun, 11 Feb 2024 11:16:22 +0100 Subject: [PATCH 562/811] server : add llama2 chat template (#5425) * server: add mistral chat template * server: fix typo * server: rename template mistral to llama2 * server: format_llama2: remove BOS * server: validate "--chat-template" argument * server: clean up using_chatml variable Co-authored-by: Jared Van Bortel --------- Co-authored-by: Jared Van Bortel --- examples/server/oai.hpp | 8 ++++++-- examples/server/server.cpp | 22 ++++++++++++++++++++-- examples/server/utils.hpp | 30 ++++++++++++++++++++++++++++++ 3 files changed, 56 insertions(+), 4 deletions(-) diff --git a/examples/server/oai.hpp b/examples/server/oai.hpp index 43410f803..2eca8a9fb 100644 --- a/examples/server/oai.hpp +++ b/examples/server/oai.hpp @@ -15,9 +15,13 @@ using json = nlohmann::json; inline static json oaicompat_completion_params_parse( - const json &body /* openai api json semantics */) + const json &body, /* openai api json semantics */ + const std::string &chat_template) { json llama_params; + std::string formatted_prompt = chat_template == "chatml" + ? format_chatml(body["messages"]) // OpenAI 'messages' to chatml (with <|im_start|>,...) + : format_llama2(body["messages"]); // OpenAI 'messages' to llama2 (with [INST],...) llama_params["__oaicompat"] = true; @@ -30,7 +34,7 @@ inline static json oaicompat_completion_params_parse( // https://platform.openai.com/docs/api-reference/chat/create llama_sampling_params default_sparams; llama_params["model"] = json_value(body, "model", std::string("unknown")); - llama_params["prompt"] = format_chatml(body["messages"]); // OpenAI 'messages' to llama.cpp 'prompt' + llama_params["prompt"] = formatted_prompt; llama_params["cache_prompt"] = json_value(body, "cache_prompt", false); llama_params["temperature"] = json_value(body, "temperature", 0.0); llama_params["top_k"] = json_value(body, "top_k", default_sparams.top_k); diff --git a/examples/server/server.cpp b/examples/server/server.cpp index 8d668f798..4d212f1f0 100644 --- a/examples/server/server.cpp +++ b/examples/server/server.cpp @@ -36,6 +36,7 @@ struct server_params std::string hostname = "127.0.0.1"; std::vector api_keys; std::string public_path = "examples/server/public"; + std::string chat_template = "chatml"; int32_t port = 8080; int32_t read_timeout = 600; int32_t write_timeout = 600; @@ -1859,6 +1860,8 @@ static void server_print_usage(const char *argv0, const gpt_params ¶ms, printf(" types: int, float, bool. example: --override-kv tokenizer.ggml.add_bos_token=bool:false\n"); printf(" -gan N, --grp-attn-n N set the group attention factor to extend context size through self-extend(default: 1=disabled), used together with group attention width `--grp-attn-w`"); printf(" -gaw N, --grp-attn-w N set the group attention width to extend context size through self-extend(default: 512), used together with group attention factor `--grp-attn-n`"); + printf(" --chat-template FORMAT_NAME"); + printf(" set chat template, possible valus is: llama2, chatml (default %s)", sparams.chat_template.c_str()); printf("\n"); } @@ -2290,6 +2293,21 @@ static void server_params_parse(int argc, char **argv, server_params &sparams, log_set_target(stdout); LOG_INFO("logging to file is disabled.", {}); } + else if (arg == "--chat-template") + { + if (++i >= argc) + { + invalid_param = true; + break; + } + std::string value(argv[i]); + if (value != "chatml" && value != "llama2") { + fprintf(stderr, "error: chat template can be \"llama2\" or \"chatml\", but got: %s\n", value.c_str()); + invalid_param = true; + break; + } + sparams.chat_template = value; + } else if (arg == "--override-kv") { if (++i >= argc) { @@ -2743,13 +2761,13 @@ int main(int argc, char **argv) // TODO: add mount point without "/v1" prefix -- how? - svr.Post("/v1/chat/completions", [&llama, &validate_api_key](const httplib::Request &req, httplib::Response &res) + svr.Post("/v1/chat/completions", [&llama, &validate_api_key, &sparams](const httplib::Request &req, httplib::Response &res) { res.set_header("Access-Control-Allow-Origin", req.get_header_value("Origin")); if (!validate_api_key(req, res)) { return; } - json data = oaicompat_completion_params_parse(json::parse(req.body)); + json data = oaicompat_completion_params_parse(json::parse(req.body), sparams.chat_template); const int task_id = llama.queue_tasks.get_new_id(); llama.queue_results.add_waiting_task_id(task_id); diff --git a/examples/server/utils.hpp b/examples/server/utils.hpp index 70cce0721..548548962 100644 --- a/examples/server/utils.hpp +++ b/examples/server/utils.hpp @@ -167,6 +167,34 @@ static T json_value(const json &body, const std::string &key, const T &default_v : default_value; } +inline std::string format_llama2(std::vector messages) +{ + std::ostringstream output; + bool is_inside_turn = false; + + for (auto it = messages.begin(); it != messages.end(); ++it) { + if (!is_inside_turn) { + output << "[INST] "; + } + std::string role = json_value(*it, "role", std::string("user")); + std::string content = json_value(*it, "content", std::string("")); + if (role == "system") { + output << "<>\n" << content << "\n<>\n\n"; + is_inside_turn = true; + } else if (role == "user") { + output << content << " [/INST]"; + is_inside_turn = true; + } else { + output << " " << content << " "; + is_inside_turn = false; + } + } + + LOG_VERBOSE("format_llama2", {{"text", output.str()}}); + + return output.str(); +} + inline std::string format_chatml(std::vector messages) { std::ostringstream chatml_msgs; @@ -180,6 +208,8 @@ inline std::string format_chatml(std::vector messages) chatml_msgs << "<|im_start|>assistant" << '\n'; + LOG_VERBOSE("format_chatml", {{"text", chatml_msgs.str()}}); + return chatml_msgs.str(); } From e4640d8fdf56f14a6db3d092bcd3d2d315cb5d04 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Johannes=20G=C3=A4=C3=9Fler?= Date: Sun, 11 Feb 2024 12:44:51 +0100 Subject: [PATCH 563/811] lookup: add print for drafting performance (#5450) --- examples/lookup/lookup.cpp | 12 +++++++++++- 1 file changed, 11 insertions(+), 1 deletion(-) diff --git a/examples/lookup/lookup.cpp b/examples/lookup/lookup.cpp index d8de7dd38..18235b8a1 100644 --- a/examples/lookup/lookup.cpp +++ b/examples/lookup/lookup.cpp @@ -1,7 +1,9 @@ #include "common.h" +#include "ggml.h" #include "llama.h" #include +#include #include #include #include @@ -73,6 +75,8 @@ int main(int argc, char ** argv){ int n_drafted = 0; int n_accept = 0; + int64_t t_draft_us = 0; + int n_past = inp.size(); bool has_eos = false; @@ -160,7 +164,7 @@ int main(int argc, char ** argv){ // generate n_pred tokens through prompt lookup auto prompt_lookup = [&]() -> void { - int inp_size = inp.size(); + const int inp_size = inp.size(); for (int ngram_size = ngram_max ; ngram_size > ngram_min; --ngram_size){ const llama_token * ngram = &inp[inp_size - ngram_size]; @@ -191,8 +195,12 @@ int main(int argc, char ** argv){ return; }; + const int64_t t_start_draft_us = ggml_time_us(); + prompt_lookup(); + t_draft_us += ggml_time_us() - t_start_draft_us; + llama_decode(ctx, batch_tgt); ++n_past; @@ -210,6 +218,8 @@ int main(int argc, char ** argv){ LOG_TEE("n_draft = %d\n", n_draft); LOG_TEE("n_predict = %d\n", n_predict); LOG_TEE("n_drafted = %d\n", n_drafted); + LOG_TEE("t_draft = %.2f ms, %.2f us per token, %.2f tokens per second\n", + t_draft_us*1e-3, 1.0f*t_draft_us/n_drafted, n_drafted/(1e-6*t_draft_us)); LOG_TEE("n_accept = %d\n", n_accept); LOG_TEE("accept = %.3f%%\n", 100.0f * n_accept / n_drafted); From a07d0fee1f05c5c1dc49948ae1a3293db017275f Mon Sep 17 00:00:00 2001 From: snadampal <87143774+snadampal@users.noreply.github.com> Date: Sun, 11 Feb 2024 07:22:33 -0600 Subject: [PATCH 564/811] ggml : add mmla kernels for quantized GEMM (#4966) * ggml: aarch64: implement smmla kernel for q8_0_q8_0 quantized gemm armv8.2-a and above supports MMLA instructions that have higher throughput than DOT. this commit adds mmla kernel for q8_0_q8_0 gemm. The feature is enabled if the platform supports "__ARM_FEATURE_MATMUL_INT8" On AWS Graviton3 processors this kernel resulted up to 1.5x improvement for prompt evaluation throughput compared to the default sdot kernel. * ggml: aarch64: implement smmla kernel for q4_0_q8_0 quantized gemm armv8.2-a and above supports MMLA instructions that have higher throughput than DOT. this commit adds mmla kernel for q4_0_q8_0 gemm. The feature is enabled if the platform supports "__ARM_FEATURE_MATMUL_INT8" On AWS Graviton3 processors this kernel resulted up to 1.5x improvement for prompt evaluation throughput compared to the default sdot kernel. * ggml: aarch64: implement smmla kernel for q4_1_q8_1 quantized gemm armv8.2-a and above supports MMLA instructions that have higher throughput than DOT. this commit adds mmla kernel for q4_1_q8_1 gemm. The feature is enabled if the platform supports "__ARM_FEATURE_MATMUL_INT8" On AWS Graviton3 processors this kernel resulted up to 1.5x improvement for prompt evaluation throughput compared to the default sdot kernel. * ggml: update unit tests for the new vec_dot interface * llama.cpp: add MATMUL_INT8 capability to system_info --- common/common.cpp | 1 + ggml-quants.c | 320 +++++++++++++++++++++++++++++++++-- ggml-quants.h | 26 +-- ggml.c | 164 ++++++++++++------ ggml.h | 5 +- llama.cpp | 1 + pocs/vdot/q8dot.cpp | 4 +- pocs/vdot/vdot.cpp | 4 +- tests/test-quantize-fns.cpp | 2 +- tests/test-quantize-perf.cpp | 2 +- 10 files changed, 441 insertions(+), 88 deletions(-) diff --git a/common/common.cpp b/common/common.cpp index e0082a823..9a489a553 100644 --- a/common/common.cpp +++ b/common/common.cpp @@ -1550,6 +1550,7 @@ void dump_non_result_info_yaml(FILE * stream, const gpt_params & params, const l fprintf(stream, "cpu_has_blas: %s\n", ggml_cpu_has_blas() ? "true" : "false"); fprintf(stream, "cpu_has_sse3: %s\n", ggml_cpu_has_sse3() ? "true" : "false"); fprintf(stream, "cpu_has_vsx: %s\n", ggml_cpu_has_vsx() ? "true" : "false"); + fprintf(stream, "cpu_has_matmul_int8: %s\n", ggml_cpu_has_matmul_int8() ? "true" : "false"); #ifdef NDEBUG fprintf(stream, "debug: false\n"); diff --git a/ggml-quants.c b/ggml-quants.c index 1031e3761..6c122dd2a 100644 --- a/ggml-quants.c +++ b/ggml-quants.c @@ -49,6 +49,8 @@ #define MIN(a, b) ((a) < (b) ? (a) : (b)) #define MAX(a, b) ((a) > (b) ? (a) : (b)) +#define UNUSED GGML_UNUSED + #define MM256_SET_M128I(a, b) _mm256_insertf128_si256(_mm256_castsi128_si256(b), (a), 1) #if defined(__AVX__) || defined(__AVX2__) || defined(__AVX512F__) || defined(__SSSE3__) @@ -3677,15 +3679,88 @@ static inline __m128i get_scale_shuffle(int i) { } #endif -void ggml_vec_dot_q4_0_q8_0(int n, float * restrict s, const void * restrict vx, const void * restrict vy) { +void ggml_vec_dot_q4_0_q8_0(int n, float * restrict s, size_t bs, const void * restrict vx, size_t bx, const void * restrict vy, size_t by, int nrc) { const int qk = QK8_0; const int nb = n / qk; assert(n % qk == 0); +#if defined(__ARM_FEATURE_MATMUL_INT8) + assert((nrc == 2) || (nrc == 1)); +#else + assert(nrc == 1); +#endif const block_q4_0 * restrict x = vx; const block_q8_0 * restrict y = vy; +#if defined(__ARM_FEATURE_MATMUL_INT8) + if (nrc == 2) { + const block_q4_0 * restrict vx0 = vx; + const block_q4_0 * restrict vx1 = vx + bx; + + const block_q8_0 * restrict vy0 = vy; + const block_q8_0 * restrict vy1 = vy + by; + + float32x4_t sumv0 = vdupq_n_f32(0.0f); + + for (int i = 0; i < nb; i++) { + const block_q4_0 * restrict b_x0 = &vx0[i]; + const block_q4_0 * restrict b_x1 = &vx1[i]; + const block_q8_0 * restrict b_y0 = &vy0[i]; + const block_q8_0 * restrict b_y1 = &vy1[i]; + + const uint8x16_t m4b = vdupq_n_u8(0x0F); + const int8x16_t s8b = vdupq_n_s8(0x8); + + const uint8x16_t v0_0 = vld1q_u8(b_x0->qs); + const uint8x16_t v0_1 = vld1q_u8(b_x1->qs); + + // 4-bit -> 8-bit + const int8x16_t v0_0l = vreinterpretq_s8_u8(vandq_u8 (v0_0, m4b)); + const int8x16_t v0_0h = vreinterpretq_s8_u8(vshrq_n_u8(v0_0, 4)); + const int8x16_t v0_1l = vreinterpretq_s8_u8(vandq_u8 (v0_1, m4b)); + const int8x16_t v0_1h = vreinterpretq_s8_u8(vshrq_n_u8(v0_1, 4)); + + // sub 8 + const int8x16_t x0_l = vsubq_s8(v0_0l, s8b); + const int8x16_t x0_h = vsubq_s8(v0_0h, s8b); + const int8x16_t x1_l = vsubq_s8(v0_1l, s8b); + const int8x16_t x1_h = vsubq_s8(v0_1h, s8b); + + // load y + const int8x16_t y0_l = vld1q_s8(b_y0->qs); + const int8x16_t y0_h = vld1q_s8(b_y0->qs + 16); + const int8x16_t y1_l = vld1q_s8(b_y1->qs); + const int8x16_t y1_h = vld1q_s8(b_y1->qs + 16); + + float32x4_t scale = {GGML_FP16_TO_FP32(b_x0->d)*GGML_FP16_TO_FP32(b_y0->d), + GGML_FP16_TO_FP32(b_x0->d)*GGML_FP16_TO_FP32(b_y1->d), + GGML_FP16_TO_FP32(b_x1->d)*GGML_FP16_TO_FP32(b_y0->d), + GGML_FP16_TO_FP32(b_x1->d)*GGML_FP16_TO_FP32(b_y1->d)}; + + int8x16_t l0 = vreinterpretq_s8_s64(vzip1q_s64(vreinterpretq_s64_s8(x0_l), vreinterpretq_s64_s8(x1_l))); + int8x16_t l1 = vreinterpretq_s8_s64(vzip2q_s64(vreinterpretq_s64_s8(x0_l), vreinterpretq_s64_s8(x1_l))); + + int8x16_t l2 = vreinterpretq_s8_s64(vzip1q_s64(vreinterpretq_s64_s8(x0_h), vreinterpretq_s64_s8(x1_h))); + int8x16_t l3 = vreinterpretq_s8_s64(vzip2q_s64(vreinterpretq_s64_s8(x0_h), vreinterpretq_s64_s8(x1_h))); + + int8x16_t r0 = vreinterpretq_s8_s64(vzip1q_s64(vreinterpretq_s64_s8(y0_l), vreinterpretq_s64_s8(y1_l))); + int8x16_t r1 = vreinterpretq_s8_s64(vzip2q_s64(vreinterpretq_s64_s8(y0_l), vreinterpretq_s64_s8(y1_l))); + + int8x16_t r2 = vreinterpretq_s8_s64(vzip1q_s64(vreinterpretq_s64_s8(y0_h), vreinterpretq_s64_s8(y1_h))); + int8x16_t r3 = vreinterpretq_s8_s64(vzip2q_s64(vreinterpretq_s64_s8(y0_h), vreinterpretq_s64_s8(y1_h))); + + sumv0 = vmlaq_f32(sumv0,(vcvtq_f32_s32(vmmlaq_s32((vmmlaq_s32((vmmlaq_s32((vmmlaq_s32(vdupq_n_s32(0), l0, r0)), + l1, r1)), l2, r2)), l3, r3))), scale); + } + float32x4_t sumv1 = vextq_f32(sumv0, sumv0, 2); + float32x4_t sumv2 = vzip1q_f32(sumv0, sumv1); + + vst1_f32(s, vget_low_f32(sumv2)); + vst1_f32(s + bs, vget_high_f32(sumv2)); + return; + } +#endif #if defined(__ARM_NEON) float32x4_t sumv0 = vdupq_n_f32(0.0f); float32x4_t sumv1 = vdupq_n_f32(0.0f); @@ -3967,15 +4042,89 @@ void ggml_vec_dot_q4_0_q8_0(int n, float * restrict s, const void * restrict vx, #endif } -void ggml_vec_dot_q4_1_q8_1(const int n, float * restrict s, const void * restrict vx, const void * restrict vy) { +void ggml_vec_dot_q4_1_q8_1(int n, float * restrict s, size_t bs, const void * restrict vx, size_t bx, const void * restrict vy, size_t by, int nrc) { const int qk = QK8_1; const int nb = n / qk; assert(n % qk == 0); +#if defined(__ARM_FEATURE_MATMUL_INT8) + assert((nrc == 2) || (nrc == 1)); +#else + assert(nrc == 1); +#endif const block_q4_1 * restrict x = vx; const block_q8_1 * restrict y = vy; +#if defined(__ARM_FEATURE_MATMUL_INT8) + if (nrc == 2) { + const block_q4_1 * restrict vx0 = vx; + const block_q4_1 * restrict vx1 = vx + bx; + const block_q8_1 * restrict vy0 = vy; + const block_q8_1 * restrict vy1 = vy + by; + + float32x4_t sumv0 = vdupq_n_f32(0.0f); + float32x4_t summs0 = vdupq_n_f32(0.0f); + + for (int i = 0; i < nb; i++) { + const block_q4_1 * restrict b_x0 = &vx0[i]; + const block_q4_1 * restrict b_x1 = &vx1[i]; + const block_q8_1 * restrict b_y0 = &vy0[i]; + const block_q8_1 * restrict b_y1 = &vy1[i]; + + float32x4_t summs_t = {GGML_FP16_TO_FP32(b_x0->m) * b_y0->s, + GGML_FP16_TO_FP32(b_x1->m) * b_y0->s, + GGML_FP16_TO_FP32(b_x0->m) * b_y1->s, + GGML_FP16_TO_FP32(b_x1->m) * b_y1->s}; + summs0 += summs_t; + + const uint8x16_t m4b = vdupq_n_u8(0x0F); + + const uint8x16_t v0_0 = vld1q_u8(b_x0->qs); + const uint8x16_t v0_1 = vld1q_u8(b_x1->qs); + + // 4-bit -> 8-bit + const int8x16_t x0_l = vreinterpretq_s8_u8(vandq_u8 (v0_0, m4b)); + const int8x16_t x0_h = vreinterpretq_s8_u8(vshrq_n_u8(v0_0, 4)); + const int8x16_t x1_l = vreinterpretq_s8_u8(vandq_u8 (v0_1, m4b)); + const int8x16_t x1_h = vreinterpretq_s8_u8(vshrq_n_u8(v0_1, 4)); + + // load y + const int8x16_t y0_l = vld1q_s8(b_y0->qs); + const int8x16_t y0_h = vld1q_s8(b_y0->qs + 16); + const int8x16_t y1_l = vld1q_s8(b_y1->qs); + const int8x16_t y1_h = vld1q_s8(b_y1->qs + 16); + + // mmla into int32x4_t + float32x4_t scale = {GGML_FP16_TO_FP32(b_x0->d)*GGML_FP16_TO_FP32(b_y0->d), + GGML_FP16_TO_FP32(b_x0->d)*GGML_FP16_TO_FP32(b_y1->d), + GGML_FP16_TO_FP32(b_x1->d)*GGML_FP16_TO_FP32(b_y0->d), + GGML_FP16_TO_FP32(b_x1->d)*GGML_FP16_TO_FP32(b_y1->d)}; + + int8x16_t l0 = vreinterpretq_s8_s64(vzip1q_s64(vreinterpretq_s64_s8(x0_l), vreinterpretq_s64_s8(x1_l))); + int8x16_t l1 = vreinterpretq_s8_s64(vzip2q_s64(vreinterpretq_s64_s8(x0_l), vreinterpretq_s64_s8(x1_l))); + + int8x16_t l2 = vreinterpretq_s8_s64(vzip1q_s64(vreinterpretq_s64_s8(x0_h), vreinterpretq_s64_s8(x1_h))); + int8x16_t l3 = vreinterpretq_s8_s64(vzip2q_s64(vreinterpretq_s64_s8(x0_h), vreinterpretq_s64_s8(x1_h))); + + int8x16_t r0 = vreinterpretq_s8_s64(vzip1q_s64(vreinterpretq_s64_s8(y0_l), vreinterpretq_s64_s8(y1_l))); + int8x16_t r1 = vreinterpretq_s8_s64(vzip2q_s64(vreinterpretq_s64_s8(y0_l), vreinterpretq_s64_s8(y1_l))); + + int8x16_t r2 = vreinterpretq_s8_s64(vzip1q_s64(vreinterpretq_s64_s8(y0_h), vreinterpretq_s64_s8(y1_h))); + int8x16_t r3 = vreinterpretq_s8_s64(vzip2q_s64(vreinterpretq_s64_s8(y0_h), vreinterpretq_s64_s8(y1_h))); + sumv0 = vmlaq_f32(sumv0,(vcvtq_f32_s32(vmmlaq_s32((vmmlaq_s32((vmmlaq_s32((vmmlaq_s32(vdupq_n_s32(0), l0, r0)), + l1, r1)), l2, r2)), l3, r3))), scale); + } + + float32x4_t sumv1 = vextq_f32(sumv0, sumv0, 2); + float32x4_t sumv2 = vzip1q_f32(sumv0, sumv1); + sumv2 = sumv2 + summs0; + + vst1_f32(s, vget_low_f32(sumv2)); + vst1_f32(s + bs, vget_high_f32(sumv2)); + return; + } +#endif // TODO: add WASM SIMD #if defined(__ARM_NEON) float32x4_t sumv0 = vdupq_n_f32(0.0f); @@ -4107,12 +4256,17 @@ void ggml_vec_dot_q4_1_q8_1(const int n, float * restrict s, const void * restri #endif } -void ggml_vec_dot_q5_0_q8_0(const int n, float * restrict s, const void * restrict vx, const void * restrict vy) { +void ggml_vec_dot_q5_0_q8_0(int n, float * restrict s, size_t bs, const void * restrict vx, size_t bx, const void * restrict vy, size_t by, int nrc) { const int qk = QK8_0; const int nb = n / qk; assert(n % qk == 0); assert(qk == QK5_0); + assert(nrc == 1); + UNUSED(nrc); + UNUSED(bx); + UNUSED(by); + UNUSED(bs); const block_q5_0 * restrict x = vx; const block_q8_0 * restrict y = vy; @@ -4393,12 +4547,17 @@ void ggml_vec_dot_q5_0_q8_0(const int n, float * restrict s, const void * restri #endif } -void ggml_vec_dot_q5_1_q8_1(const int n, float * restrict s, const void * restrict vx, const void * restrict vy) { +void ggml_vec_dot_q5_1_q8_1(int n, float * restrict s, size_t bs, const void * restrict vx, size_t bx, const void * restrict vy, size_t by, int nrc) { const int qk = QK8_1; const int nb = n / qk; assert(n % qk == 0); assert(qk == QK5_1); + assert(nrc == 1); + UNUSED(nrc); + UNUSED(bx); + UNUSED(by); + UNUSED(bs); const block_q5_1 * restrict x = vx; const block_q8_1 * restrict y = vy; @@ -4692,15 +4851,75 @@ void ggml_vec_dot_q5_1_q8_1(const int n, float * restrict s, const void * restri #endif } -void ggml_vec_dot_q8_0_q8_0(const int n, float * restrict s, const void * restrict vx, const void * restrict vy) { +void ggml_vec_dot_q8_0_q8_0(int n, float * restrict s, size_t bs, const void * restrict vx, size_t bx, const void * restrict vy, size_t by, int nrc) { const int qk = QK8_0; const int nb = n / qk; assert(n % qk == 0); +#if defined(__ARM_FEATURE_MATMUL_INT8) + assert((nrc == 2) || (nrc == 1)); +#else + assert(nrc == 1); +#endif const block_q8_0 * restrict x = vx; const block_q8_0 * restrict y = vy; +#if defined(__ARM_FEATURE_MATMUL_INT8) + if (nrc == 2) { + const block_q8_0 * restrict vx0 = vx; + const block_q8_0 * restrict vx1 = vx + bx; + const block_q8_0 * restrict vy0 = vy; + const block_q8_0 * restrict vy1 = vy + by; + + float32x4_t sumv0 = vdupq_n_f32(0.0f); + + for (int i = 0; i < nb; i++) { + const block_q8_0 * restrict b_x0 = &vx0[i]; + const block_q8_0 * restrict b_y0 = &vy0[i]; + + const block_q8_0 * restrict b_x1 = &vx1[i]; + const block_q8_0 * restrict b_y1 = &vy1[i]; + + const int8x16_t x0_l = vld1q_s8(b_x0->qs); + const int8x16_t x0_h = vld1q_s8(b_x0->qs + 16); + const int8x16_t x1_l = vld1q_s8(b_x1->qs); + const int8x16_t x1_h = vld1q_s8(b_x1->qs + 16); + + // load y + const int8x16_t y0_l = vld1q_s8(b_y0->qs); + const int8x16_t y0_h = vld1q_s8(b_y0->qs + 16); + const int8x16_t y1_l = vld1q_s8(b_y1->qs); + const int8x16_t y1_h = vld1q_s8(b_y1->qs + 16); + + float32x4_t scale = {GGML_FP16_TO_FP32(b_x0->d)*GGML_FP16_TO_FP32(b_y0->d), + GGML_FP16_TO_FP32(b_x0->d)*GGML_FP16_TO_FP32(b_y1->d), + GGML_FP16_TO_FP32(b_x1->d)*GGML_FP16_TO_FP32(b_y0->d), + GGML_FP16_TO_FP32(b_x1->d)*GGML_FP16_TO_FP32(b_y1->d)}; + + int8x16_t l0 = vreinterpretq_s8_s64(vzip1q_s64(vreinterpretq_s64_s8(x0_l), vreinterpretq_s64_s8(x1_l))); + int8x16_t l1 = vreinterpretq_s8_s64(vzip2q_s64(vreinterpretq_s64_s8(x0_l), vreinterpretq_s64_s8(x1_l))); + + int8x16_t l2 = vreinterpretq_s8_s64(vzip1q_s64(vreinterpretq_s64_s8(x0_h), vreinterpretq_s64_s8(x1_h))); + int8x16_t l3 = vreinterpretq_s8_s64(vzip2q_s64(vreinterpretq_s64_s8(x0_h), vreinterpretq_s64_s8(x1_h))); + + int8x16_t r0 = vreinterpretq_s8_s64(vzip1q_s64(vreinterpretq_s64_s8(y0_l), vreinterpretq_s64_s8(y1_l))); + int8x16_t r1 = vreinterpretq_s8_s64(vzip2q_s64(vreinterpretq_s64_s8(y0_l), vreinterpretq_s64_s8(y1_l))); + + int8x16_t r2 = vreinterpretq_s8_s64(vzip1q_s64(vreinterpretq_s64_s8(y0_h), vreinterpretq_s64_s8(y1_h))); + int8x16_t r3 = vreinterpretq_s8_s64(vzip2q_s64(vreinterpretq_s64_s8(y0_h), vreinterpretq_s64_s8(y1_h))); + + sumv0 = vmlaq_f32(sumv0,(vcvtq_f32_s32(vmmlaq_s32((vmmlaq_s32((vmmlaq_s32((vmmlaq_s32(vdupq_n_s32(0), l0, r0)), + l1, r1)), l2, r2)), l3, r3))), scale); + } + float32x4_t sumv1 = vextq_f32(sumv0, sumv0, 2); + float32x4_t sumv2 = vzip1q_f32(sumv0, sumv1); + + vst1_f32(s, vget_low_f32(sumv2)); + vst1_f32(s + bs, vget_high_f32(sumv2)); + return; + } +#endif #if defined(__ARM_NEON) float32x4_t sumv0 = vdupq_n_f32(0.0f); float32x4_t sumv1 = vdupq_n_f32(0.0f); @@ -4795,7 +5014,12 @@ void ggml_vec_dot_q8_0_q8_0(const int n, float * restrict s, const void * restri } #if QK_K == 256 -void ggml_vec_dot_q2_K_q8_K(const int n, float * restrict s, const void * restrict vx, const void * restrict vy) { +void ggml_vec_dot_q2_K_q8_K(int n, float * restrict s, size_t bs, const void * restrict vx, size_t bx, const void * restrict vy, size_t by, int nrc) { + assert(nrc == 1); + UNUSED(nrc); + UNUSED(bx); + UNUSED(by); + UNUSED(bs); const block_q2_K * restrict x = vx; const block_q8_K * restrict y = vy; @@ -5171,7 +5395,12 @@ void ggml_vec_dot_q2_K_q8_K(const int n, float * restrict s, const void * restri #else -void ggml_vec_dot_q2_K_q8_K(const int n, float * restrict s, const void * restrict vx, const void * restrict vy) { +void ggml_vec_dot_q2_K_q8_K(int n, float * restrict s, size_t bs, const void * restrict vx, size_t bx, const void * restrict vy, size_t by, int nrc) { + assert(nrc == 1); + UNUSED(nrc); + UNUSED(bx); + UNUSED(by); + UNUSED(bs); const block_q2_K * restrict x = vx; const block_q8_K * restrict y = vy; @@ -5429,8 +5658,13 @@ void ggml_vec_dot_q2_K_q8_K(const int n, float * restrict s, const void * restri #endif #if QK_K == 256 -void ggml_vec_dot_q3_K_q8_K(const int n, float * restrict s, const void * restrict vx, const void * restrict vy) { +void ggml_vec_dot_q3_K_q8_K(int n, float * restrict s, size_t bs, const void * restrict vx, size_t bx, const void * restrict vy, size_t by, int nrc) { assert(n % QK_K == 0); + assert(nrc == 1); + UNUSED(nrc); + UNUSED(bx); + UNUSED(by); + UNUSED(bs); const uint32_t kmask1 = 0x03030303; const uint32_t kmask2 = 0x0f0f0f0f; @@ -5949,8 +6183,13 @@ void ggml_vec_dot_q3_K_q8_K(const int n, float * restrict s, const void * restri #else -void ggml_vec_dot_q3_K_q8_K(const int n, float * restrict s, const void * restrict vx, const void * restrict vy) { +void ggml_vec_dot_q3_K_q8_K(int n, float * restrict s, size_t bs, const void * restrict vx, size_t bx, const void * restrict vy, size_t by, int nrc) { assert(n % QK_K == 0); + assert(nrc == 1); + UNUSED(nrc); + UNUSED(bx); + UNUSED(by); + UNUSED(bs); const block_q3_K * restrict x = vx; const block_q8_K * restrict y = vy; @@ -6292,8 +6531,13 @@ void ggml_vec_dot_q3_K_q8_K(const int n, float * restrict s, const void * restri #endif #if QK_K == 256 -void ggml_vec_dot_q4_K_q8_K(const int n, float * restrict s, const void * restrict vx, const void * restrict vy) { +void ggml_vec_dot_q4_K_q8_K(int n, float * restrict s, size_t bs, const void * restrict vx, size_t bx, const void * restrict vy, size_t by, int nrc) { assert(n % QK_K == 0); + assert(nrc == 1); + UNUSED(nrc); + UNUSED(bx); + UNUSED(by); + UNUSED(bs); const block_q4_K * restrict x = vx; const block_q8_K * restrict y = vy; @@ -6648,8 +6892,13 @@ void ggml_vec_dot_q4_K_q8_K(const int n, float * restrict s, const void * restri #endif } #else -void ggml_vec_dot_q4_K_q8_K(const int n, float * restrict s, const void * restrict vx, const void * restrict vy) { +void ggml_vec_dot_q4_K_q8_K(int n, float * restrict s, size_t bs, const void * restrict vx, size_t bx, const void * restrict vy, size_t by, int nrc) { assert(n % QK_K == 0); + assert(nrc == 1); + UNUSED(nrc); + UNUSED(bx); + UNUSED(by); + UNUSED(bs); const block_q4_K * restrict x = vx; const block_q8_K * restrict y = vy; @@ -6891,8 +7140,13 @@ void ggml_vec_dot_q4_K_q8_K(const int n, float * restrict s, const void * restri #endif #if QK_K == 256 -void ggml_vec_dot_q5_K_q8_K(const int n, float * restrict s, const void * restrict vx, const void * restrict vy) { +void ggml_vec_dot_q5_K_q8_K(int n, float * restrict s, size_t bs, const void * restrict vx, size_t bx, const void * restrict vy, size_t by, int nrc) { assert(n % QK_K == 0); + assert(nrc == 1); + UNUSED(nrc); + UNUSED(bx); + UNUSED(by); + UNUSED(bs); const block_q5_K * restrict x = vx; const block_q8_K * restrict y = vy; @@ -7311,8 +7565,13 @@ void ggml_vec_dot_q5_K_q8_K(const int n, float * restrict s, const void * restri #else -void ggml_vec_dot_q5_K_q8_K(const int n, float * restrict s, const void * restrict vx, const void * restrict vy) { +void ggml_vec_dot_q5_K_q8_K(int n, float * restrict s, size_t bs, const void * restrict vx, size_t bx, const void * restrict vy, size_t by, int nrc) { assert(n % QK_K == 0); + assert(nrc == 1); + UNUSED(nrc); + UNUSED(bx); + UNUSED(by); + UNUSED(bs); const block_q5_K * restrict x = vx; const block_q8_K * restrict y = vy; @@ -7577,8 +7836,13 @@ void ggml_vec_dot_q5_K_q8_K(const int n, float * restrict s, const void * restri #if QK_K == 256 -void ggml_vec_dot_q6_K_q8_K(const int n, float * restrict s, const void * restrict vx, const void * restrict vy) { +void ggml_vec_dot_q6_K_q8_K(int n, float * restrict s, size_t bs, const void * restrict vx, size_t bx, const void * restrict vy, size_t by, int nrc) { assert(n % QK_K == 0); + assert(nrc == 1); + UNUSED(nrc); + UNUSED(bx); + UNUSED(by); + UNUSED(bs); const block_q6_K * restrict x = vx; const block_q8_K * restrict y = vy; @@ -8009,8 +8273,13 @@ void ggml_vec_dot_q6_K_q8_K(const int n, float * restrict s, const void * restri #else -void ggml_vec_dot_q6_K_q8_K(const int n, float * restrict s, const void * restrict vx, const void * restrict vy) { +void ggml_vec_dot_q6_K_q8_K(int n, float * restrict s, size_t bs, const void * restrict vx, size_t bx, const void * restrict vy, size_t by, int nrc) { assert(n % QK_K == 0); + assert(nrc == 1); + UNUSED(nrc); + UNUSED(bx); + UNUSED(by); + UNUSED(bs); const block_q6_K * restrict x = vx; const block_q8_K * restrict y = vy; @@ -8339,8 +8608,13 @@ static const int8_t keven_signs_q2xs[1024] = { 1, 1, -1, -1, -1, -1, -1, -1, -1, 1, -1, -1, -1, -1, -1, 1, 1, -1, -1, -1, -1, -1, -1, 1, -1, -1, -1, -1, -1, -1, -1, -1, }; -void ggml_vec_dot_iq2_xxs_q8_K(const int n, float * restrict s, const void * restrict vx, const void * restrict vy) { +void ggml_vec_dot_iq2_xxs_q8_K(int n, float * restrict s, size_t bs, const void * restrict vx, size_t bx, const void * restrict vy, size_t by, int nrc) { assert(n % QK_K == 0); + assert(nrc == 1); + UNUSED(nrc); + UNUSED(bx); + UNUSED(by); + UNUSED(bs); const block_iq2_xxs * restrict x = vx; const block_q8_K * restrict y = vy; @@ -8462,8 +8736,13 @@ void ggml_vec_dot_iq2_xxs_q8_K(const int n, float * restrict s, const void * res #endif } -void ggml_vec_dot_iq2_xs_q8_K(const int n, float * restrict s, const void * restrict vx, const void * restrict vy) { +void ggml_vec_dot_iq2_xs_q8_K(int n, float * restrict s, size_t bs, const void * restrict vx, size_t bx, const void * restrict vy, size_t by, int nrc) { assert(n % QK_K == 0); + assert(nrc == 1); + UNUSED(nrc); + UNUSED(bx); + UNUSED(by); + UNUSED(bs); const block_iq2_xs * restrict x = vx; const block_q8_K * restrict y = vy; @@ -8682,8 +8961,13 @@ void ggml_vec_dot_iq2_xs_q8_K(const int n, float * restrict s, const void * rest } // TODO -void ggml_vec_dot_iq3_xxs_q8_K(const int n, float * restrict s, const void * restrict vx, const void * restrict vy) { +void ggml_vec_dot_iq3_xxs_q8_K(int n, float * restrict s, size_t bs, const void * restrict vx, size_t bx, const void * restrict vy, size_t by, int nrc) { assert(n % QK_K == 0); + assert(nrc == 1); + UNUSED(nrc); + UNUSED(bx); + UNUSED(by); + UNUSED(bs); const block_iq3_xxs * restrict x = vx; const block_q8_K * restrict y = vy; diff --git a/ggml-quants.h b/ggml-quants.h index bfdf3c997..68f09b1e1 100644 --- a/ggml-quants.h +++ b/ggml-quants.h @@ -245,20 +245,20 @@ void dequantize_row_iq2_xs (const block_iq2_xs * GGML_RESTRICT x, float * GGML_ void dequantize_row_iq3_xxs(const block_iq3_xxs * GGML_RESTRICT x, float * GGML_RESTRICT y, int k); // Dot product -void ggml_vec_dot_q4_0_q8_0(int n, float * GGML_RESTRICT s, const void * GGML_RESTRICT vx, const void * GGML_RESTRICT vy); -void ggml_vec_dot_q4_1_q8_1(int n, float * GGML_RESTRICT s, const void * GGML_RESTRICT vx, const void * GGML_RESTRICT vy); -void ggml_vec_dot_q5_0_q8_0(int n, float * GGML_RESTRICT s, const void * GGML_RESTRICT vx, const void * GGML_RESTRICT vy); -void ggml_vec_dot_q5_1_q8_1(int n, float * GGML_RESTRICT s, const void * GGML_RESTRICT vx, const void * GGML_RESTRICT vy); -void ggml_vec_dot_q8_0_q8_0(int n, float * GGML_RESTRICT s, const void * GGML_RESTRICT vx, const void * GGML_RESTRICT vy); +void ggml_vec_dot_q4_0_q8_0(int n, float * GGML_RESTRICT s, size_t bs, const void * GGML_RESTRICT vx, size_t bx, const void * GGML_RESTRICT vy, size_t by, int nrc); +void ggml_vec_dot_q4_1_q8_1(int n, float * GGML_RESTRICT s, size_t bs, const void * GGML_RESTRICT vx, size_t bx, const void * GGML_RESTRICT vy, size_t by, int nrc); +void ggml_vec_dot_q5_0_q8_0(int n, float * GGML_RESTRICT s, size_t bs, const void * GGML_RESTRICT vx, size_t bx, const void * GGML_RESTRICT vy, size_t by, int nrc); +void ggml_vec_dot_q5_1_q8_1(int n, float * GGML_RESTRICT s, size_t bs, const void * GGML_RESTRICT vx, size_t bx, const void * GGML_RESTRICT vy, size_t by, int nrc); +void ggml_vec_dot_q8_0_q8_0(int n, float * GGML_RESTRICT s, size_t bs, const void * GGML_RESTRICT vx, size_t bx, const void * GGML_RESTRICT vy, size_t by, int nrc); -void ggml_vec_dot_q2_K_q8_K(int n, float * GGML_RESTRICT s, const void * GGML_RESTRICT vx, const void * GGML_RESTRICT vy); -void ggml_vec_dot_q3_K_q8_K(int n, float * GGML_RESTRICT s, const void * GGML_RESTRICT vx, const void * GGML_RESTRICT vy); -void ggml_vec_dot_q4_K_q8_K(int n, float * GGML_RESTRICT s, const void * GGML_RESTRICT vx, const void * GGML_RESTRICT vy); -void ggml_vec_dot_q5_K_q8_K(int n, float * GGML_RESTRICT s, const void * GGML_RESTRICT vx, const void * GGML_RESTRICT vy); -void ggml_vec_dot_q6_K_q8_K(int n, float * GGML_RESTRICT s, const void * GGML_RESTRICT vx, const void * GGML_RESTRICT vy); -void ggml_vec_dot_iq2_xxs_q8_K(int n, float * GGML_RESTRICT s, const void * GGML_RESTRICT vx, const void * GGML_RESTRICT vy); -void ggml_vec_dot_iq2_xs_q8_K (int n, float * GGML_RESTRICT s, const void * GGML_RESTRICT vx, const void * GGML_RESTRICT vy); -void ggml_vec_dot_iq3_xxs_q8_K(int n, float * GGML_RESTRICT s, const void * GGML_RESTRICT vx, const void * GGML_RESTRICT vy); +void ggml_vec_dot_q2_K_q8_K(int n, float * GGML_RESTRICT s, size_t bs, const void * GGML_RESTRICT vx, size_t bx, const void * GGML_RESTRICT vy, size_t by, int nrc); +void ggml_vec_dot_q3_K_q8_K(int n, float * GGML_RESTRICT s, size_t bs, const void * GGML_RESTRICT vx, size_t bx, const void * GGML_RESTRICT vy, size_t by, int nrc); +void ggml_vec_dot_q4_K_q8_K(int n, float * GGML_RESTRICT s, size_t bs, const void * GGML_RESTRICT vx, size_t bx, const void * GGML_RESTRICT vy, size_t by, int nrc); +void ggml_vec_dot_q5_K_q8_K(int n, float * GGML_RESTRICT s, size_t bs, const void * GGML_RESTRICT vx, size_t bx, const void * GGML_RESTRICT vy, size_t by, int nrc); +void ggml_vec_dot_q6_K_q8_K(int n, float * GGML_RESTRICT s, size_t bs, const void * GGML_RESTRICT vx, size_t bx, const void * GGML_RESTRICT vy, size_t by, int nrc); +void ggml_vec_dot_iq2_xxs_q8_K(int n, float * GGML_RESTRICT s, size_t bs, const void * GGML_RESTRICT vx, size_t bx, const void * GGML_RESTRICT vy, size_t by, int nrc); +void ggml_vec_dot_iq2_xs_q8_K (int n, float * GGML_RESTRICT s, size_t bs, const void * GGML_RESTRICT vx, size_t bx, const void * GGML_RESTRICT vy, size_t by, int nrc); +void ggml_vec_dot_iq3_xxs_q8_K(int n, float * GGML_RESTRICT s, size_t bs, const void * GGML_RESTRICT vx, size_t bx, const void * GGML_RESTRICT vy, size_t by, int nrc); // // Quantization utilizing an importance matrix (a.k.a. "Activation aWare Quantization") diff --git a/ggml.c b/ggml.c index 86cd65862..e45b78d7e 100644 --- a/ggml.c +++ b/ggml.c @@ -428,8 +428,8 @@ int64_t ggml_cycles_per_ms(void) { static const size_t CACHE_LINE_SIZE_F32 = CACHE_LINE_SIZE/sizeof(float); -static void ggml_vec_dot_f32(const int n, float * restrict s, const float * restrict x, const float * restrict y); -static void ggml_vec_dot_f16(const int n, float * restrict s, ggml_fp16_t * restrict x, ggml_fp16_t * restrict y); +static void ggml_vec_dot_f32(int n, float * restrict s, size_t bs, const float * restrict x, size_t bx, const float * restrict y, size_t by, int nrc); +static void ggml_vec_dot_f16(int n, float * restrict s, size_t bs, ggml_fp16_t * restrict x, size_t bx, ggml_fp16_t * restrict y, size_t by, int nrc); static const ggml_type_traits_t type_traits[GGML_TYPE_COUNT] = { [GGML_TYPE_I8] = { @@ -457,6 +457,7 @@ static const ggml_type_traits_t type_traits[GGML_TYPE_COUNT] = { .is_quantized = false, .vec_dot = (ggml_vec_dot_t) ggml_vec_dot_f32, .vec_dot_type = GGML_TYPE_F32, + .nrows = 1, }, [GGML_TYPE_F16] = { .type_name = "f16", @@ -468,6 +469,7 @@ static const ggml_type_traits_t type_traits[GGML_TYPE_COUNT] = { .from_float_reference = (ggml_from_float_t) ggml_fp32_to_fp16_row, .vec_dot = (ggml_vec_dot_t) ggml_vec_dot_f16, .vec_dot_type = GGML_TYPE_F16, + .nrows = 1, }, [GGML_TYPE_Q4_0] = { .type_name = "q4_0", @@ -479,6 +481,11 @@ static const ggml_type_traits_t type_traits[GGML_TYPE_COUNT] = { .from_float_reference = (ggml_from_float_t) quantize_row_q4_0_reference, .vec_dot = ggml_vec_dot_q4_0_q8_0, .vec_dot_type = GGML_TYPE_Q8_0, +#if defined (__ARM_FEATURE_MATMUL_INT8) + .nrows = 2, +#else + .nrows = 1, +#endif }, [GGML_TYPE_Q4_1] = { .type_name = "q4_1", @@ -490,6 +497,11 @@ static const ggml_type_traits_t type_traits[GGML_TYPE_COUNT] = { .from_float_reference = (ggml_from_float_t) quantize_row_q4_1_reference, .vec_dot = ggml_vec_dot_q4_1_q8_1, .vec_dot_type = GGML_TYPE_Q8_1, +#if defined (__ARM_FEATURE_MATMUL_INT8) + .nrows = 2, +#else + .nrows = 1, +#endif }, [4] = { // GGML_TYPE_Q4_2 .type_name = "DEPRECATED", @@ -501,6 +513,7 @@ static const ggml_type_traits_t type_traits[GGML_TYPE_COUNT] = { .from_float_reference = NULL, .vec_dot = NULL, .vec_dot_type = GGML_TYPE_COUNT, + .nrows = 1, }, [5] = { // GGML_TYPE_Q4_3 .type_name = "DEPRECATED", @@ -512,6 +525,7 @@ static const ggml_type_traits_t type_traits[GGML_TYPE_COUNT] = { .from_float_reference = NULL, .vec_dot = NULL, .vec_dot_type = GGML_TYPE_COUNT, + .nrows = 1, }, [GGML_TYPE_Q5_0] = { .type_name = "q5_0", @@ -523,6 +537,7 @@ static const ggml_type_traits_t type_traits[GGML_TYPE_COUNT] = { .from_float_reference = (ggml_from_float_t) quantize_row_q5_0_reference, .vec_dot = ggml_vec_dot_q5_0_q8_0, .vec_dot_type = GGML_TYPE_Q8_0, + .nrows = 1, }, [GGML_TYPE_Q5_1] = { .type_name = "q5_1", @@ -534,6 +549,7 @@ static const ggml_type_traits_t type_traits[GGML_TYPE_COUNT] = { .from_float_reference = (ggml_from_float_t) quantize_row_q5_1_reference, .vec_dot = ggml_vec_dot_q5_1_q8_1, .vec_dot_type = GGML_TYPE_Q8_1, + .nrows = 1, }, [GGML_TYPE_Q8_0] = { .type_name = "q8_0", @@ -545,6 +561,11 @@ static const ggml_type_traits_t type_traits[GGML_TYPE_COUNT] = { .from_float_reference = (ggml_from_float_t) quantize_row_q8_0_reference, .vec_dot = ggml_vec_dot_q8_0_q8_0, .vec_dot_type = GGML_TYPE_Q8_0, +#if defined (__ARM_FEATURE_MATMUL_INT8) + .nrows = 2, +#else + .nrows = 1, +#endif }, [GGML_TYPE_Q8_1] = { .type_name = "q8_1", @@ -554,6 +575,7 @@ static const ggml_type_traits_t type_traits[GGML_TYPE_COUNT] = { .from_float = quantize_row_q8_1, .from_float_reference = (ggml_from_float_t) quantize_row_q8_1_reference, .vec_dot_type = GGML_TYPE_Q8_1, + .nrows = 1, }, [GGML_TYPE_Q2_K] = { .type_name = "q2_K", @@ -565,6 +587,7 @@ static const ggml_type_traits_t type_traits[GGML_TYPE_COUNT] = { .from_float_reference = (ggml_from_float_t) quantize_row_q2_K_reference, .vec_dot = ggml_vec_dot_q2_K_q8_K, .vec_dot_type = GGML_TYPE_Q8_K, + .nrows = 1, }, [GGML_TYPE_Q3_K] = { .type_name = "q3_K", @@ -576,6 +599,7 @@ static const ggml_type_traits_t type_traits[GGML_TYPE_COUNT] = { .from_float_reference = (ggml_from_float_t) quantize_row_q3_K_reference, .vec_dot = ggml_vec_dot_q3_K_q8_K, .vec_dot_type = GGML_TYPE_Q8_K, + .nrows = 1, }, [GGML_TYPE_Q4_K] = { .type_name = "q4_K", @@ -587,6 +611,7 @@ static const ggml_type_traits_t type_traits[GGML_TYPE_COUNT] = { .from_float_reference = (ggml_from_float_t) quantize_row_q4_K_reference, .vec_dot = ggml_vec_dot_q4_K_q8_K, .vec_dot_type = GGML_TYPE_Q8_K, + .nrows = 1, }, [GGML_TYPE_Q5_K] = { .type_name = "q5_K", @@ -598,6 +623,7 @@ static const ggml_type_traits_t type_traits[GGML_TYPE_COUNT] = { .from_float_reference = (ggml_from_float_t) quantize_row_q5_K_reference, .vec_dot = ggml_vec_dot_q5_K_q8_K, .vec_dot_type = GGML_TYPE_Q8_K, + .nrows = 1, }, [GGML_TYPE_Q6_K] = { .type_name = "q6_K", @@ -609,6 +635,7 @@ static const ggml_type_traits_t type_traits[GGML_TYPE_COUNT] = { .from_float_reference = (ggml_from_float_t) quantize_row_q6_K_reference, .vec_dot = ggml_vec_dot_q6_K_q8_K, .vec_dot_type = GGML_TYPE_Q8_K, + .nrows = 1, }, [GGML_TYPE_IQ2_XXS] = { .type_name = "iq2_xxs", @@ -620,6 +647,7 @@ static const ggml_type_traits_t type_traits[GGML_TYPE_COUNT] = { .from_float_reference = NULL, .vec_dot = ggml_vec_dot_iq2_xxs_q8_K, .vec_dot_type = GGML_TYPE_Q8_K, + .nrows = 1, }, [GGML_TYPE_IQ2_XS] = { .type_name = "iq2_xs", @@ -631,6 +659,7 @@ static const ggml_type_traits_t type_traits[GGML_TYPE_COUNT] = { .from_float_reference = NULL, .vec_dot = ggml_vec_dot_iq2_xs_q8_K, .vec_dot_type = GGML_TYPE_Q8_K, + .nrows = 1, }, [GGML_TYPE_IQ3_XXS] = { .type_name = "iq3_xxs", @@ -642,6 +671,7 @@ static const ggml_type_traits_t type_traits[GGML_TYPE_COUNT] = { .from_float_reference = (ggml_from_float_t)quantize_row_iq3_xxs_reference, .vec_dot = ggml_vec_dot_iq3_xxs_q8_K, .vec_dot_type = GGML_TYPE_Q8_K, + .nrows = 1, }, [GGML_TYPE_Q8_K] = { .type_name = "q8_K", @@ -1212,7 +1242,13 @@ inline static void ggml_vec_neg_f32 (const int n, float * y, const float * x) inline static void ggml_vec_mul_f32 (const int n, float * z, const float * x, const float * y) { for (int i = 0; i < n; ++i) z[i] = x[i]*y[i]; } inline static void ggml_vec_div_f32 (const int n, float * z, const float * x, const float * y) { for (int i = 0; i < n; ++i) z[i] = x[i]/y[i]; } -static void ggml_vec_dot_f32(const int n, float * restrict s, const float * restrict x, const float * restrict y) { +static void ggml_vec_dot_f32(int n, float * restrict s, size_t bs, const float * restrict x, size_t bx, const float * restrict y, size_t by, int nrc) { + assert(nrc == 1); + UNUSED(nrc); + UNUSED(bx); + UNUSED(by); + UNUSED(bs); + #ifdef GGML_SIMD float sumf = 0.0f; const int np = (n & ~(GGML_F32_STEP - 1)); @@ -1249,7 +1285,13 @@ static void ggml_vec_dot_f32(const int n, float * restrict s, const float * rest *s = sumf; } -static void ggml_vec_dot_f16(const int n, float * restrict s, ggml_fp16_t * restrict x, ggml_fp16_t * restrict y) { +static void ggml_vec_dot_f16(int n, float * restrict s, size_t bs, ggml_fp16_t * restrict x, size_t bx, ggml_fp16_t * restrict y, size_t by, int nrc) { + assert(nrc == 1); + UNUSED(nrc); + UNUSED(bx); + UNUSED(by); + UNUSED(bs); + ggml_float sumf = 0.0; #if defined(GGML_SIMD) @@ -1455,7 +1497,7 @@ inline static void ggml_vec_scale_f32(const int n, float * y, const float v) { #endif } -inline static void ggml_vec_norm_f32 (const int n, float * s, const float * x) { ggml_vec_dot_f32(n, s, x, x); *s = sqrtf(*s); } +inline static void ggml_vec_norm_f32 (const int n, float * s, const float * x) { ggml_vec_dot_f32(n, s, 0, x, 0, x, 0, 1); *s = sqrtf(*s); } inline static void ggml_vec_sqr_f32 (const int n, float * y, const float * x) { for (int i = 0; i < n; ++i) y[i] = x[i]*x[i]; } inline static void ggml_vec_sqrt_f32 (const int n, float * y, const float * x) { for (int i = 0; i < n; ++i) y[i] = sqrtf(x[i]); } inline static void ggml_vec_log_f32 (const int n, float * y, const float * x) { for (int i = 0; i < n; ++i) y[i] = logf(x[i]); } @@ -9992,6 +10034,7 @@ static void ggml_compute_forward_mul_mat( ggml_vec_dot_t const vec_dot = type_traits[type].vec_dot; enum ggml_type const vec_dot_type = type_traits[type].vec_dot_type; ggml_from_float_t const from_float_to_vec_dot = type_traits[vec_dot_type].from_float; + int64_t const vec_dot_num_rows = type_traits[type].nrows; GGML_ASSERT(ne0 == ne01); GGML_ASSERT(ne1 == ne11); @@ -10159,12 +10202,23 @@ static void ggml_compute_forward_mul_mat( const int64_t blck_0 = 16; const int64_t blck_1 = 16; + // dot kernels can handle 1 row and col at a time, but mmla kernels can process 2 rows and cols + int64_t nrc = vec_dot_num_rows; + // TODO: currently the mmla kernels support only even numbered rows/cols. + // this check can be removed once they are extended to support odd numbered rows/cols too + if ((nr0 % 2 != 0) || (ne11 % 2 != 0)) { + nrc = 1; + } + + const size_t src1_col_stride = src1_cont || src1->type != vec_dot_type ? row_size : nb11; + // attempt to reduce false-sharing (does not seem to make a difference) - float tmp[16]; + // 16 * 2, accounting for mmla kernels + float tmp[32]; for (int64_t iir1 = ir110; iir1 < ir111; iir1 += blck_1) { for (int64_t iir0 = ir010; iir0 < ir011; iir0 += blck_0) { - for (int64_t ir1 = iir1; ir1 < iir1 + blck_1 && ir1 < ir111; ++ir1) { + for (int64_t ir1 = iir1; ir1 < iir1 + blck_1 && ir1 < ir111; ir1 += nrc) { const int64_t i13 = (ir1/(ne12*ne1)); const int64_t i12 = (ir1 - i13*ne12*ne1)/ne1; const int64_t i11 = (ir1 - i13*ne12*ne1 - i12*ne1); @@ -10187,17 +10241,19 @@ static void ggml_compute_forward_mul_mat( (src1_cont || src1->type != vec_dot_type ? (i11 + i12*ne11 + i13*ne12*ne11)*row_size : (i11*nb11 + i12*nb12 + i13*nb13)); - float * dst_col = (float *) ((char *) dst->data + (i1*nb1 + i2*nb2 + i3*nb3)); //for (int64_t ir0 = iir0; ir0 < iir0 + blck_0 && ir0 < ir011; ++ir0) { // vec_dot(ne00, &dst_col[ir0], src0_row + ir0*nb01, src1_col); //} - for (int64_t ir0 = iir0; ir0 < iir0 + blck_0 && ir0 < ir011; ++ir0) { - vec_dot(ne00, &tmp[ir0 - iir0], src0_row + ir0*nb01, src1_col); + for (int64_t ir0 = iir0; ir0 < iir0 + blck_0 && ir0 < ir011; ir0 += nrc) { + vec_dot(ne00, &tmp[ir0 - iir0], (nrc>1 ? 16 : 0), src0_row + ir0*nb01, (nrc>1 ? nb01 : 0), src1_col, (nrc>1 ? src1_col_stride : 0), nrc); + } + + for (int cn = 0; cn < nrc; ++cn) { + memcpy(&dst_col[iir0 + cn*nb1/nb0], tmp + (cn*16), (MIN(iir0 + blck_0, ir011) - iir0)*sizeof(float)); } - memcpy(&dst_col[iir0], tmp, (MIN(iir0 + blck_0, ir011) - iir0)*sizeof(float)); } } } @@ -10386,7 +10442,7 @@ static void ggml_compute_forward_mul_mat_id( //} for (int64_t ir0 = iir0; ir0 < iir0 + blck_0 && ir0 < ir011; ++ir0) { - vec_dot(ne00, &tmp[ir0 - iir0], src0_row + ir0*nb01, src1_col); + vec_dot(ne00, &tmp[ir0 - iir0], 0, src0_row + ir0*nb01, 0, src1_col, 0, 1); } memcpy(&dst_col[iir0], tmp, (MIN(iir0 + blck_0, ir011) - iir0)*sizeof(float)); } @@ -11568,7 +11624,7 @@ static void ggml_compute_forward_soft_max_back_f32( // linear runtime, no additional memory float dot_y_dy = 0; - ggml_vec_dot_f32 (nc, &dot_y_dy, y, dy); + ggml_vec_dot_f32 (nc, &dot_y_dy, 0, y, 0, dy, 0, 1); ggml_vec_cpy_f32 (nc, dx, dy); ggml_vec_acc1_f32(nc, dx, -dot_y_dy); ggml_vec_mul_f32 (nc, dx, dx, y); @@ -12369,9 +12425,9 @@ static void ggml_compute_forward_conv_transpose_1d_f16_f32( const int i1n = i10*ne11; for (int i00 = 0; i00 < ne00; i00++) { float v = 0; - ggml_vec_dot_f16(ne02, &v, - (ggml_fp16_t *) wdata_src + i1n, - (ggml_fp16_t *) wdata_kernel + i00*ne02); + ggml_vec_dot_f16(ne02, &v, 0, + (ggml_fp16_t *) wdata_src + i1n, 0, + (ggml_fp16_t *) wdata_kernel + i00*ne02, 0, 1); dst_data[i10*s0 + i00] += v; } } @@ -12466,9 +12522,9 @@ static void ggml_compute_forward_conv_transpose_1d_f32( const int i1n = i10*ne11; for (int i00 = 0; i00 < ne00; i00++) { float v = 0; - ggml_vec_dot_f32(ne02, &v, - wdata_src + i1n, - wdata_kernel + i00*ne02); + ggml_vec_dot_f32(ne02, &v, 0, + wdata_src + i1n, 0, + wdata_kernel + i00*ne02, 0, 1); dst_data[i10*s0 + i00] += v; } } @@ -12783,9 +12839,9 @@ static void ggml_compute_forward_conv_transpose_2d( for (int i01 = 0; i01 < ne01; i01++) { for (int i00 = 0; i00 < ne00; i00++) { float v = 0; - ggml_vec_dot_f16(ne03, &v, - wdata_src + i1n, - wdata_kernel + i01*ne00*ne03 + i00*ne03); + ggml_vec_dot_f16(ne03, &v, 0, + wdata_src + i1n, 0, + wdata_kernel + i01*ne00*ne03 + i00*ne03, 0, 1); dst_data[(i11*stride + i01)*ne0 + i10*stride + i00] += v; } } @@ -13214,9 +13270,9 @@ static void ggml_compute_forward_flash_attn_f32( const int i1 = ik1; ggml_vec_dot_f32(neq0, - S + i1, - (float *) ((char *) k->data + (ik1*nbk1 + ik2*nbk2 + ik3*nbk3)), - (float *) ((char *) q->data + (iq1*nbq1 + iq2*nbq2 + iq3*nbq3))); + S + i1, 0, + (float *) ((char *) k->data + (ik1*nbk1 + ik2*nbk2 + ik3*nbk3)), 0, + (float *) ((char *) q->data + (iq1*nbq1 + iq2*nbq2 + iq3*nbq3)), 0, 1); } // scale @@ -13299,9 +13355,9 @@ static void ggml_compute_forward_flash_attn_f32( const int iv3 = iq3; ggml_vec_dot_f32(masked_begin, - (float *) ((char *) dst->data + (ic*nb0 + i1*nb1 + i2*nb2 + i3*nb3)), - (float *) ((char *) v->data + ( ic*nbv1 + iv2*nbv2 + iv3*nbv3)), - S); + (float *) ((char *) dst->data + (ic*nb0 + i1*nb1 + i2*nb2 + i3*nb3)), 0, + (float *) ((char *) v->data + ( ic*nbv1 + iv2*nbv2 + iv3*nbv3)), 0, + S, 0, 1); } } } @@ -13404,9 +13460,9 @@ static void ggml_compute_forward_flash_attn_f16( const int i1 = ik1; ggml_vec_dot_f16(neq0, - S + i1, - (ggml_fp16_t *) ((char *) k->data + (ik1*nbk1 + ik2*nbk2 + ik3*nbk3)), - (ggml_fp16_t *) ((char *) q->data + (iq1*nbq1 + iq2*nbq2 + iq3*nbq3))); + S + i1, 0, + (ggml_fp16_t *) ((char *) k->data + (ik1*nbk1 + ik2*nbk2 + ik3*nbk3)), 0, + (ggml_fp16_t *) ((char *) q->data + (iq1*nbq1 + iq2*nbq2 + iq3*nbq3)), 0, 1); } } else { for (int64_t ic = 0; ic < nek1; ic += GGML_VEC_DOT_UNROLL) { @@ -13508,9 +13564,9 @@ static void ggml_compute_forward_flash_attn_f16( const int iv3 = iq3; ggml_vec_dot_f16(nev0, - (float *) ((char *) dst->data + (ic*nb0 + i1*nb1 + i2*nb2 + i3*nb3)), - (ggml_fp16_t *) ((char *) v->data + ( ic*nbv1 + iv2*nbv2 + iv3*nbv3)), - S16); + (float *) ((char *) dst->data + (ic*nb0 + i1*nb1 + i2*nb2 + i3*nb3)), 0, + (ggml_fp16_t *) ((char *) v->data + ( ic*nbv1 + iv2*nbv2 + iv3*nbv3)), 0, + S16, 0, 1); } } else { for (int64_t ic = 0; ic < nev1; ic += GGML_VEC_DOT_UNROLL) { @@ -13652,9 +13708,9 @@ static void ggml_compute_forward_flash_ff_f16( const int i1 = ib01; ggml_vec_dot_f16(nea0, - S + i1, - (ggml_fp16_t *) ((char *) b0->data + (ib01*nbb01 + ib02*nbb02 + ib03*nbb03)), - (ggml_fp16_t *) ((char *) a->data + ( ia1*nba1 + ia2*nba2 + ia3*nba3))); + S + i1, 0, + (ggml_fp16_t *) ((char *) b0->data + (ib01*nbb01 + ib02*nbb02 + ib03*nbb03)), 0, + (ggml_fp16_t *) ((char *) a->data + ( ia1*nba1 + ia2*nba2 + ia3*nba3)), 0, 1); } ggml_vec_add_f32(neb01, S, S, (float *) b1->data); @@ -13677,9 +13733,9 @@ static void ggml_compute_forward_flash_ff_f16( for (int64_t ic = 0; ic < nec01; ++ic) { ggml_vec_dot_f16(neb01, - (float *) ((char *) dst->data + (ic*nb0 + i1*nb1 + i2*nb2 + i3*nb3)), - (ggml_fp16_t *) ((char *) c0->data + ( ic*nbc01 + i2*nbc02 + i3*nbc03)), - S16); + (float *) ((char *) dst->data + (ic*nb0 + i1*nb1 + i2*nb2 + i3*nb3)), 0, + (ggml_fp16_t *) ((char *) c0->data + ( ic*nbc01 + i2*nbc02 + i3*nbc03)), 0, + S16, 0, 1); } ggml_vec_add_f32(nec01, @@ -13866,9 +13922,9 @@ static void ggml_compute_forward_flash_attn_back_f32( const int i1 = ik1; ggml_vec_dot_f32(neq0, - S + i1, - (float *) ((char *) k->data + (ik1*nbk1 + ik2*nbk2 + ik3*nbk3)), - (float *) ((char *) q->data + (iq1*nbq1 + iq2*nbq2 + iq3*nbq3))); + S + i1, 0, + (float *) ((char *) k->data + (ik1*nbk1 + ik2*nbk2 + ik3*nbk3)), 0, + (float *) ((char *) q->data + (iq1*nbq1 + iq2*nbq2 + iq3*nbq3)), 0, 1); } // scale @@ -14013,7 +14069,7 @@ static void ggml_compute_forward_flash_attn_back_f32( // S = SM * (S - dot(SM, S)) float dot_SM_gradSM = 0; - ggml_vec_dot_f32 (masked_begin, &dot_SM_gradSM, SM, S); + ggml_vec_dot_f32 (masked_begin, &dot_SM_gradSM, 0, SM, 0, S, 0, 1); ggml_vec_acc1_f32(M, S, -dot_SM_gradSM); ggml_vec_mul_f32 (masked_begin, S, S, SM); @@ -18382,7 +18438,7 @@ static enum ggml_opt_result linesearch_backtracking( } // compute the initial gradient in the search direction - ggml_vec_dot_f32(nx, &dginit, g, d); + ggml_vec_dot_f32(nx, &dginit, 0, g, 0, d, 0, 1); // make sure that d points to a descent direction if (0 < dginit) { @@ -18432,7 +18488,7 @@ static enum ggml_opt_result linesearch_backtracking( return count; } - ggml_vec_dot_f32(nx, &dg, g, d); + ggml_vec_dot_f32(nx, &dg, 0, g, 0, d, 0, 1); // check the Wolfe condition if (dg < params->lbfgs.wolfe * dginit) { @@ -18693,8 +18749,8 @@ static enum ggml_opt_result ggml_opt_lbfgs( // ys = y^t \cdot s -> 1 / \rho. // yy = y^t \cdot y. // - ggml_vec_dot_f32(nx, &ys, &lm_y[end[0]*nx], &lm_s[end[0]*nx]); - ggml_vec_dot_f32(nx, &yy, &lm_y[end[0]*nx], &lm_y[end[0]*nx]); + ggml_vec_dot_f32(nx, &ys, 0, &lm_y[end[0]*nx], 0, &lm_s[end[0]*nx], 0, 1); + ggml_vec_dot_f32(nx, &yy, 0, &lm_y[end[0]*nx], 0, &lm_y[end[0]*nx], 0, 1); lm_ys[end[0]] = ys; @@ -18713,7 +18769,7 @@ static enum ggml_opt_result ggml_opt_lbfgs( for (int i = 0; i < bound; ++i) { j[0] = (j[0] + m - 1) % m; // \alpha_{j} = \rho_{j} s^{t}_{j} \cdot q_{k+1} - ggml_vec_dot_f32(nx, &lm_alpha[j[0]], &lm_s[j[0]*nx], d); + ggml_vec_dot_f32(nx, &lm_alpha[j[0]], 0, &lm_s[j[0]*nx], 0, d, 0, 1); lm_alpha[j[0]] /= lm_ys[j[0]]; // q_{i} = q_{i+1} - \alpha_{i} y_{i} ggml_vec_mad_f32(nx, d, &lm_y[j[0]*nx], -lm_alpha[j[0]]); @@ -18723,7 +18779,7 @@ static enum ggml_opt_result ggml_opt_lbfgs( for (int i = 0; i < bound; ++i) { // \beta_{j} = \rho_{j} y^t_{j} \cdot \gamma_{i} - ggml_vec_dot_f32(nx, &beta, &lm_y[j[0]*nx], d); + ggml_vec_dot_f32(nx, &beta, 0, &lm_y[j[0]*nx], 0, d, 0, 1); beta /= lm_ys[j[0]]; // \gamma_{i+1} = \gamma_{i} + (\alpha_{j} - \beta_{j}) s_{j} ggml_vec_mad_f32(nx, d, &lm_s[j[0]*nx], lm_alpha[j[0]] - beta); @@ -20611,4 +20667,12 @@ int ggml_cpu_has_vsx(void) { #endif } +int ggml_cpu_has_matmul_int8(void) { +#if defined(__ARM_FEATURE_MATMUL_INT8) + return 1; +#else + return 0; +#endif +} + //////////////////////////////////////////////////////////////////////////////// diff --git a/ggml.h b/ggml.h index 1360cd8ee..9cfec5bac 100644 --- a/ggml.h +++ b/ggml.h @@ -2278,6 +2278,7 @@ extern "C" { GGML_API int ggml_cpu_has_ssse3 (void); GGML_API int ggml_cpu_has_sycl (void); GGML_API int ggml_cpu_has_vsx (void); + GGML_API int ggml_cpu_has_matmul_int8(void); // // Internal types and functions exposed for tests and benchmarks @@ -2291,7 +2292,8 @@ extern "C" { #endif typedef void (*ggml_to_float_t) (const void * GGML_RESTRICT x, float * GGML_RESTRICT y, int k); typedef void (*ggml_from_float_t)(const float * GGML_RESTRICT x, void * GGML_RESTRICT y, int k); - typedef void (*ggml_vec_dot_t) (const int n, float * GGML_RESTRICT s, const void * GGML_RESTRICT x, const void * GGML_RESTRICT y); + typedef void (*ggml_vec_dot_t) (int n, float * GGML_RESTRICT s, size_t bs, const void * GGML_RESTRICT x, size_t bx, + const void * GGML_RESTRICT y, size_t by, int nrc); typedef struct { const char * type_name; @@ -2303,6 +2305,7 @@ extern "C" { ggml_from_float_t from_float_reference; ggml_vec_dot_t vec_dot; enum ggml_type vec_dot_type; + int64_t nrows; // number of rows to process simultaneously; } ggml_type_traits_t; GGML_API ggml_type_traits_t ggml_internal_get_type_traits(enum ggml_type type); diff --git a/llama.cpp b/llama.cpp index 0566b087b..3f39a67fb 100644 --- a/llama.cpp +++ b/llama.cpp @@ -11869,6 +11869,7 @@ const char * llama_print_system_info(void) { s += "SSE3 = " + std::to_string(ggml_cpu_has_sse3()) + " | "; s += "SSSE3 = " + std::to_string(ggml_cpu_has_ssse3()) + " | "; s += "VSX = " + std::to_string(ggml_cpu_has_vsx()) + " | "; + s += "MATMUL_INT8 = " + std::to_string(ggml_cpu_has_matmul_int8()) + " | "; return s.c_str(); } diff --git a/pocs/vdot/q8dot.cpp b/pocs/vdot/q8dot.cpp index 111770d55..1a52ff5e9 100644 --- a/pocs/vdot/q8dot.cpp +++ b/pocs/vdot/q8dot.cpp @@ -156,8 +156,8 @@ int main(int argc, char** argv) { t1 = std::chrono::high_resolution_clock::now(); float fs; - if (type == 0) funcs.vec_dot(kVecSize * QK4_1, &fs, x40.data(), y.data()); - else funcs.vec_dot(kVecSize * QK4_1, &fs, x41.data(), y.data()); + if (type == 0) funcs.vec_dot(kVecSize * QK4_1, &fs, 0, x40.data(), 0, y.data(), 0, 1); + else funcs.vec_dot(kVecSize * QK4_1, &fs, 0, x41.data(), 0, y.data(), 0, 1); t2 = std::chrono::high_resolution_clock::now(); t = 1e-3*std::chrono::duration_cast(t2-t1).count(); if (iloop > 3) ggml.addResult(fs, t); diff --git a/pocs/vdot/vdot.cpp b/pocs/vdot/vdot.cpp index 73ffcd1ca..17e9e4482 100644 --- a/pocs/vdot/vdot.cpp +++ b/pocs/vdot/vdot.cpp @@ -284,8 +284,8 @@ int main(int argc, char** argv) { else { auto vdot = ggml_internal_get_type_traits(funcs.vec_dot_type); vdot.from_float(y1.data(), q8.data(), kVecSize); - if (useQ4_1) funcs.vec_dot(kVecSize, &result, q41.data(), q8.data()); - else funcs.vec_dot(kVecSize, &result, q40.data(), q8.data()); + if (useQ4_1) funcs.vec_dot(kVecSize, &result, 0, q41.data(), 0, q8.data(), 0, 1); + else funcs.vec_dot(kVecSize, &result, 0, q40.data(), 0, q8.data(), 0, 1); } sumq += result; t2 = std::chrono::high_resolution_clock::now(); diff --git a/tests/test-quantize-fns.cpp b/tests/test-quantize-fns.cpp index 43df8022d..5e92d5742 100644 --- a/tests/test-quantize-fns.cpp +++ b/tests/test-quantize-fns.cpp @@ -87,7 +87,7 @@ static float dot_product_error( vdot.from_float(test_data2, tmp_q2.data(), test_size); float result = INFINITY; - qfns.vec_dot(test_size, &result, tmp_q1.data(), tmp_q2.data()); + qfns.vec_dot(test_size, &result, 0, tmp_q1.data(), 0, tmp_q2.data(), 0, 1); const float dot_ref = dot_product(test_data1, test_data2, test_size); diff --git a/tests/test-quantize-perf.cpp b/tests/test-quantize-perf.cpp index 8ec817344..48d9fae3d 100644 --- a/tests/test-quantize-perf.cpp +++ b/tests/test-quantize-perf.cpp @@ -346,7 +346,7 @@ int main(int argc, char * argv[]) { printf(" %zu values (%.2f MB)\n", size, 4*size/(float)(1024*1024)); auto quantize_fn = [&](void) -> float { float result; - qfns.vec_dot(size, &result, test_q1, test_q2); + qfns.vec_dot(size, &result, 0, test_q1, 0, test_q2, 0, 1); return result; }; size_t quantized_size = ggml_row_size(type, size); From 0f2411f154db46780d3aaa3a0664691b2170c83f Mon Sep 17 00:00:00 2001 From: Georgi Gerganov Date: Sun, 11 Feb 2024 15:33:01 +0200 Subject: [PATCH 565/811] ggml : fix compile warnings (unused vars) (#4966) --- ggml-quants.c | 12 ++++++++++++ 1 file changed, 12 insertions(+) diff --git a/ggml-quants.c b/ggml-quants.c index 6c122dd2a..b2a309bf8 100644 --- a/ggml-quants.c +++ b/ggml-quants.c @@ -3689,6 +3689,10 @@ void ggml_vec_dot_q4_0_q8_0(int n, float * restrict s, size_t bs, const void * r #else assert(nrc == 1); #endif + UNUSED(nrc); + UNUSED(bx); + UNUSED(by); + UNUSED(bs); const block_q4_0 * restrict x = vx; const block_q8_0 * restrict y = vy; @@ -4052,6 +4056,10 @@ void ggml_vec_dot_q4_1_q8_1(int n, float * restrict s, size_t bs, const void * r #else assert(nrc == 1); #endif + UNUSED(nrc); + UNUSED(bx); + UNUSED(by); + UNUSED(bs); const block_q4_1 * restrict x = vx; const block_q8_1 * restrict y = vy; @@ -4861,6 +4869,10 @@ void ggml_vec_dot_q8_0_q8_0(int n, float * restrict s, size_t bs, const void * r #else assert(nrc == 1); #endif + UNUSED(nrc); + UNUSED(bx); + UNUSED(by); + UNUSED(bs); const block_q8_0 * restrict x = vx; const block_q8_0 * restrict y = vy; From 139b62a839825ef20084ed75ed624db7a5ad554a Mon Sep 17 00:00:00 2001 From: Georgi Gerganov Date: Sun, 11 Feb 2024 15:33:43 +0200 Subject: [PATCH 566/811] common : fix compile warning --- common/sampling.cpp | 2 -- 1 file changed, 2 deletions(-) diff --git a/common/sampling.cpp b/common/sampling.cpp index 844ad7c53..82cbdecea 100644 --- a/common/sampling.cpp +++ b/common/sampling.cpp @@ -127,8 +127,6 @@ static void sampler_queue( const llama_sampling_params & params, llama_token_data_array & cur_p, size_t & min_keep) { - const int n_vocab = llama_n_vocab(llama_get_model(ctx_main)); - const float temp = params.temp; const float dynatemp_range = params.dynatemp_range; const float dynatemp_exponent = params.dynatemp_exponent; From 85910c5b30f6e268321be8df044f5528a6efac52 Mon Sep 17 00:00:00 2001 From: Georgi Gerganov Date: Sun, 11 Feb 2024 15:35:50 +0200 Subject: [PATCH 567/811] main : ctrl+C print timing in non-interactive mode (#3873) --- examples/main/main.cpp | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/examples/main/main.cpp b/examples/main/main.cpp index 0ed4d79f9..e8ab8cbae 100644 --- a/examples/main/main.cpp +++ b/examples/main/main.cpp @@ -98,7 +98,7 @@ static void write_logfile( #if defined (__unix__) || (defined (__APPLE__) && defined (__MACH__)) || defined (_WIN32) static void sigint_handler(int signo) { if (signo == SIGINT) { - if (!is_interacting) { + if (!is_interacting && g_params->interactive) { is_interacting = true; } else { console::cleanup(); @@ -392,7 +392,8 @@ int main(int argc, char ** argv) { LOG_TEE("\n"); } - if (params.interactive) { + // ctrl+C handling + { #if defined (__unix__) || (defined (__APPLE__) && defined (__MACH__)) struct sigaction sigint_action; sigint_action.sa_handler = sigint_handler; @@ -405,7 +406,9 @@ int main(int argc, char ** argv) { }; SetConsoleCtrlHandler(reinterpret_cast(console_ctrl_handler), true); #endif + } + if (params.interactive) { LOG_TEE("%s: interactive mode on.\n", __func__); if (!params.antiprompt.empty()) { From 684780141a08200ec98eba3e982dbafd1d0b5000 Mon Sep 17 00:00:00 2001 From: Alexey Parfenov Date: Sun, 11 Feb 2024 13:38:14 +0000 Subject: [PATCH 568/811] server : allow to specify tokens as strings in logit_bias (#5003) * server: allow to specify tokens as strings in logit_bias * Apply suggestions from code review Co-authored-by: Georgi Gerganov --------- Co-authored-by: Georgi Gerganov --- examples/server/README.md | 2 +- examples/server/server.cpp | 32 +++++++++++++++++++++++++------- 2 files changed, 26 insertions(+), 8 deletions(-) diff --git a/examples/server/README.md b/examples/server/README.md index 1db7cdf21..0f7373ae8 100644 --- a/examples/server/README.md +++ b/examples/server/README.md @@ -185,7 +185,7 @@ node index.js `ignore_eos`: Ignore end of stream token and continue generating (default: false). - `logit_bias`: Modify the likelihood of a token appearing in the generated text completion. For example, use `"logit_bias": [[15043,1.0]]` to increase the likelihood of the token 'Hello', or `"logit_bias": [[15043,-1.0]]` to decrease its likelihood. Setting the value to false, `"logit_bias": [[15043,false]]` ensures that the token `Hello` is never produced (default: []). + `logit_bias`: Modify the likelihood of a token appearing in the generated text completion. For example, use `"logit_bias": [[15043,1.0]]` to increase the likelihood of the token 'Hello', or `"logit_bias": [[15043,-1.0]]` to decrease its likelihood. Setting the value to false, `"logit_bias": [[15043,false]]` ensures that the token `Hello` is never produced. The tokens can also be represented as strings, e.g. `[["Hello, World!",-0.5]]` will reduce the likelihood of all the individual tokens that represent the string `Hello, World!`, just like the `presence_penalty` does. (default: []). `n_probs`: If greater than 0, the response also contains the probabilities of top N tokens for each generated token (default: 0) diff --git a/examples/server/server.cpp b/examples/server/server.cpp index 4d212f1f0..1699eb76b 100644 --- a/examples/server/server.cpp +++ b/examples/server/server.cpp @@ -626,18 +626,36 @@ struct llama_server_context const int n_vocab = llama_n_vocab(model); for (const auto &el : *logit_bias) { - if (el.is_array() && el.size() == 2 && el[0].is_number_integer()) + if (el.is_array() && el.size() == 2) { - llama_token tok = el[0].get(); - if (tok >= 0 && tok < n_vocab) + float bias; + if (el[1].is_number()) { - if (el[1].is_number()) + bias = el[1].get(); + } + else if (el[1].is_boolean() && !el[1].get()) + { + bias = -INFINITY; + } + else + { + continue; + } + + if (el[0].is_number_integer()) + { + llama_token tok = el[0].get(); + if (tok >= 0 && tok < n_vocab) { - slot->sparams.logit_bias[tok] = el[1].get(); + slot->sparams.logit_bias[tok] = bias; } - else if (el[1].is_boolean() && !el[1].get()) + } + else if (el[0].is_string()) + { + auto toks = llama_tokenize(model, el[0].get(), false); + for (auto tok : toks) { - slot->sparams.logit_bias[tok] = -INFINITY; + slot->sparams.logit_bias[tok] = bias; } } } From a803333a4e6fc534c93afe90d741bc2388bdec87 Mon Sep 17 00:00:00 2001 From: Alexey Parfenov Date: Sun, 11 Feb 2024 13:43:31 +0000 Subject: [PATCH 569/811] common : use enums for sampler types (#5418) * common: use enums for sampler types * Apply suggestions from code review Co-authored-by: Georgi Gerganov * minor : spaces --------- Co-authored-by: Georgi Gerganov --- common/common.cpp | 117 +++++++++++++++++++++++++++++++------------- common/common.h | 7 ++- common/sampling.cpp | 31 +++++------- common/sampling.h | 20 +++++++- 4 files changed, 120 insertions(+), 55 deletions(-) diff --git a/common/common.cpp b/common/common.cpp index 9a489a553..f64da2cb6 100644 --- a/common/common.cpp +++ b/common/common.cpp @@ -340,13 +340,14 @@ bool gpt_params_parse_ex(int argc, char ** argv, gpt_params & params) { invalid_param = true; break; } - sparams.samplers_sequence = parse_samplers_input(argv[i]); + const auto sampler_names = string_split(argv[i], ';'); + sparams.samplers_sequence = sampler_types_from_names(sampler_names); } else if (arg == "--sampling-seq") { if (++i >= argc) { invalid_param = true; break; } - sparams.samplers_sequence = argv[i]; + sparams.samplers_sequence = sampler_types_from_chars(argv[i]); } else if (arg == "--top-p") { if (++i >= argc) { invalid_param = true; @@ -906,6 +907,14 @@ bool gpt_params_parse_ex(int argc, char ** argv, gpt_params & params) { void gpt_print_usage(int /*argc*/, char ** argv, const gpt_params & params) { const llama_sampling_params & sparams = params.sparams; + std::string sampler_type_chars; + std::string sampler_type_names; + for (const auto sampler_type : sparams.samplers_sequence) { + sampler_type_chars += static_cast(sampler_type); + sampler_type_names += sampler_type_to_name_string(sampler_type) + ";"; + } + sampler_type_names.pop_back(); + printf("\n"); printf("usage: %s [options]\n", argv[0]); printf("\n"); @@ -947,8 +956,8 @@ void gpt_print_usage(int /*argc*/, char ** argv, const gpt_params & params) { printf(" -n N, --n-predict N number of tokens to predict (default: %d, -1 = infinity, -2 = until context filled)\n", params.n_predict); printf(" -c N, --ctx-size N size of the prompt context (default: %d, 0 = loaded from model)\n", params.n_ctx); printf(" -b N, --batch-size N batch size for prompt processing (default: %d)\n", params.n_batch); - printf(" --samplers samplers that will be used for generation in the order, separated by \';\', for example: \"top_k;tfs;typical;top_p;min_p;temp\"\n"); - printf(" --sampling-seq simplified sequence for samplers that will be used (default: %s)\n", sparams.samplers_sequence.c_str()); + printf(" --samplers samplers that will be used for generation in the order, separated by \';\' (default: %s)\n", sampler_type_names.c_str()); + printf(" --sampling-seq simplified sequence for samplers that will be used (default: %s)\n", sampler_type_chars.c_str()); printf(" --top-k N top-k sampling (default: %d, 0 = disabled)\n", sparams.top_k); printf(" --top-p N top-p sampling (default: %.1f, 1.0 = disabled)\n", (double)sparams.top_p); printf(" --min-p N min-p sampling (default: %.1f, 0.0 = disabled)\n", (double)sparams.min_p); @@ -1097,45 +1106,85 @@ std::string gpt_random_prompt(std::mt19937 & rng) { } // -// String parsing +// String utils // -std::string parse_samplers_input(std::string input) { - std::string output = ""; +std::vector string_split(std::string input, char separator) { + std::vector parts; + size_t separator_pos = input.find(separator); + while (separator_pos != std::string::npos) { + std::string part = input.substr(0, separator_pos); + parts.emplace_back(part); + input = input.substr(separator_pos + 1); + separator_pos = input.find(separator); + } + parts.emplace_back(input); + return parts; +} + +std::vector sampler_types_from_names(const std::vector & names) { // since samplers names are written multiple ways // make it ready for both system names and input names - std::unordered_map samplers_symbols { - {"top_k", 'k'}, - {"top-k", 'k'}, - {"top_p", 'p'}, - {"top-p", 'p'}, - {"nucleus", 'p'}, - {"typical_p", 'y'}, - {"typical-p", 'y'}, - {"typical", 'y'}, - {"min_p", 'm'}, - {"min-p", 'm'}, - {"tfs_z", 'f'}, - {"tfs-z", 'f'}, - {"tfs", 'f'}, - {"temp", 't'}, - {"temperature",'t'} + std::unordered_map sampler_name_map { + {"top_k", llama_sampler_type::TOP_K}, + {"top-k", llama_sampler_type::TOP_K}, + {"top_p", llama_sampler_type::TOP_P}, + {"top-p", llama_sampler_type::TOP_P}, + {"nucleus", llama_sampler_type::TOP_P}, + {"typical_p", llama_sampler_type::TYPICAL_P}, + {"typical-p", llama_sampler_type::TYPICAL_P}, + {"typical", llama_sampler_type::TYPICAL_P}, + {"min_p", llama_sampler_type::MIN_P}, + {"min-p", llama_sampler_type::MIN_P}, + {"tfs_z", llama_sampler_type::TFS_Z}, + {"tfs-z", llama_sampler_type::TFS_Z}, + {"tfs", llama_sampler_type::TFS_Z}, + {"temp", llama_sampler_type::TEMP}, + {"temperature", llama_sampler_type::TEMP} }; - // expected format example: "temp;top_k;tfs_z;typical_p;top_p;min_p" - size_t separator = input.find(';'); - while (separator != input.npos) { - std::string name = input.substr(0,separator); - input = input.substr(separator+1); - separator = input.find(';'); - if (samplers_symbols.find(name) != samplers_symbols.end()) { - output += samplers_symbols[name]; + std::vector sampler_types; + sampler_types.reserve(names.size()); + for (const auto& name : names) { + const auto sampler_item = sampler_name_map.find(name); + if (sampler_item != sampler_name_map.end()) { + sampler_types.push_back(sampler_item->second); } } - if (samplers_symbols.find(input) != samplers_symbols.end()) { - output += samplers_symbols[input]; + return sampler_types; +} + +std::vector sampler_types_from_chars(const std::string & names_string) { + std::unordered_map sampler_name_map { + {'k', llama_sampler_type::TOP_K}, + {'p', llama_sampler_type::TOP_P}, + {'y', llama_sampler_type::TYPICAL_P}, + {'m', llama_sampler_type::MIN_P}, + {'f', llama_sampler_type::TFS_Z}, + {'t', llama_sampler_type::TEMP} + }; + + std::vector sampler_types; + sampler_types.reserve(names_string.size()); + for (const auto & c : names_string) { + const auto sampler_item = sampler_name_map.find(c); + if (sampler_item != sampler_name_map.end()) { + sampler_types.push_back(sampler_item->second); + } + } + return sampler_types; +} + +std::string sampler_type_to_name_string(llama_sampler_type sampler_type) { + switch (sampler_type) { + case llama_sampler_type::TOP_K: return "top_k"; + case llama_sampler_type::TFS_Z: return "tfs_z"; + case llama_sampler_type::TYPICAL_P: return "typical_p"; + case llama_sampler_type::TOP_P: return "top_p"; + case llama_sampler_type::MIN_P: return "min_p"; + case llama_sampler_type::TEMP: return "temp"; + default : return ""; } - return output; } // diff --git a/common/common.h b/common/common.h index 62de25d6a..9bdd45cf9 100644 --- a/common/common.h +++ b/common/common.h @@ -162,10 +162,13 @@ std::string gpt_random_prompt(std::mt19937 & rng); void process_escapes(std::string& input); // -// String parsing +// String utils // -std::string parse_samplers_input(std::string input); +std::vector sampler_types_from_names(const std::vector & names); +std::vector sampler_types_from_chars(const std::string & names_string); +std::vector string_split(std::string input, char separator); +std::string sampler_type_to_name_string(llama_sampler_type sampler_type); // // Model utils diff --git a/common/sampling.cpp b/common/sampling.cpp index 82cbdecea..a001750da 100644 --- a/common/sampling.cpp +++ b/common/sampling.cpp @@ -103,15 +103,10 @@ std::string llama_sampling_print(const llama_sampling_params & params) { std::string llama_sampling_order_print(const llama_sampling_params & params) { std::string result = "CFG -> Penalties "; if (params.mirostat == 0) { - for (auto s : params.samplers_sequence) { - switch (s) { - case 'k': result += "-> top_k "; break; - case 'f': result += "-> tfs_z "; break; - case 'y': result += "-> typical_p "; break; - case 'p': result += "-> top_p "; break; - case 'm': result += "-> min_p "; break; - case 't': result += "-> temp "; break; - default : break; + for (auto sampler_type : params.samplers_sequence) { + const auto sampler_type_name = sampler_type_to_name_string(sampler_type); + if (!sampler_type_name.empty()) { + result += "-> " + sampler_type_name + " "; } } } else { @@ -135,16 +130,16 @@ static void sampler_queue( const float min_p = params.min_p; const float tfs_z = params.tfs_z; const float typical_p = params.typical_p; - const std::string & samplers_sequence = params.samplers_sequence; + const std::vector & samplers_sequence = params.samplers_sequence; - for (auto s : samplers_sequence) { - switch (s){ - case 'k': llama_sample_top_k (ctx_main, &cur_p, top_k, min_keep); break; - case 'f': llama_sample_tail_free(ctx_main, &cur_p, tfs_z, min_keep); break; - case 'y': llama_sample_typical (ctx_main, &cur_p, typical_p, min_keep); break; - case 'p': llama_sample_top_p (ctx_main, &cur_p, top_p, min_keep); break; - case 'm': llama_sample_min_p (ctx_main, &cur_p, min_p, min_keep); break; - case 't': + for (auto sampler_type : samplers_sequence) { + switch (sampler_type) { + case llama_sampler_type::TOP_K : llama_sample_top_k (ctx_main, &cur_p, top_k, min_keep); break; + case llama_sampler_type::TFS_Z : llama_sample_tail_free(ctx_main, &cur_p, tfs_z, min_keep); break; + case llama_sampler_type::TYPICAL_P: llama_sample_typical (ctx_main, &cur_p, typical_p, min_keep); break; + case llama_sampler_type::TOP_P : llama_sample_top_p (ctx_main, &cur_p, top_p, min_keep); break; + case llama_sampler_type::MIN_P : llama_sample_min_p (ctx_main, &cur_p, min_p, min_keep); break; + case llama_sampler_type::TEMP: if (dynatemp_range > 0) { float dynatemp_min = std::max(0.0f, temp - dynatemp_range); float dynatemp_max = std::max(0.0f, temp + dynatemp_range); diff --git a/common/sampling.h b/common/sampling.h index 88899c094..2bd6a75d2 100644 --- a/common/sampling.h +++ b/common/sampling.h @@ -8,6 +8,16 @@ #include #include +// sampler types +enum class llama_sampler_type : char { + TOP_K = 'k', + TOP_P = 'p', + MIN_P = 'm', + TFS_Z = 'f', + TYPICAL_P = 'y', + TEMP = 't' +}; + // sampling parameters typedef struct llama_sampling_params { int32_t n_prev = 64; // number of previous tokens to remember @@ -28,7 +38,15 @@ typedef struct llama_sampling_params { float mirostat_tau = 5.00f; // target entropy float mirostat_eta = 0.10f; // learning rate bool penalize_nl = true; // consider newlines as a repeatable token - std::string samplers_sequence = "kfypmt"; // top_k, tail_free, typical_p, top_p, min_p, temp + + std::vector samplers_sequence = { + llama_sampler_type::TOP_K, + llama_sampler_type::TFS_Z, + llama_sampler_type::TYPICAL_P, + llama_sampler_type::TOP_P, + llama_sampler_type::MIN_P, + llama_sampler_type::TEMP + }; std::string grammar; // optional BNF-like grammar to constrain sampling From c88c74f967028ae3d5ebade40ae586d20a961abc Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Sergio=20L=C3=B3pez?= Date: Sun, 11 Feb 2024 15:12:00 +0100 Subject: [PATCH 570/811] vulkan: only use M-sized matmul on Apple GPUs (#5412) * vulkan: refactor guess_matmul_pipeline for vendor Refactor ggml_vk_guess_matmul_pipeline to simplify adding per-vendor conditionals. Signed-off-by: Sergio Lopez * vulkan: only use M-sized matmul on Apple GPUs L-sized and S-sized matmuls are broken on Apple GPUs, force using M-size with this vendor. Signed-off-by: Sergio Lopez --------- Signed-off-by: Sergio Lopez --- ggml-vulkan.cpp | 103 +++++++++++++++++++++++++++++++++++++++++++----- 1 file changed, 93 insertions(+), 10 deletions(-) diff --git a/ggml-vulkan.cpp b/ggml-vulkan.cpp index 254f648a6..7834e635c 100644 --- a/ggml-vulkan.cpp +++ b/ggml-vulkan.cpp @@ -27,6 +27,7 @@ #define CEIL_DIV(M, N) (((M) + (N)-1) / (N)) #define VK_VENDOR_ID_AMD 0x1002 +#define VK_VENDOR_ID_APPLE 0x106b #define VK_VENDOR_ID_INTEL 0x8086 #define VK_VENDOR_ID_NVIDIA 0x10de @@ -2034,18 +2035,100 @@ static uint32_t ggml_vk_guess_matmul_pipeline_align(ggml_backend_vk_context * ct return ctx->pipeline_matmul_f32_aligned_l.align; } -static vk_pipeline* ggml_vk_guess_matmul_pipeline(ggml_backend_vk_context * ctx, bool bit16_x, bool bit16_y, int m, int n, bool aligned) { -#ifdef GGML_VULKAN_DEBUG - std::cerr << "ggml_vk_guess_matmul_pipeline(" << bit16_x << ", " << bit16_y << ", " << m << ", " << n << ", " << aligned << ")"; -#endif +static vk_pipeline* ggml_vk_guess_matmul_pipeline_amd(ggml_backend_vk_context * ctx, bool bit16_x, bool bit16_y, int m, int n, bool aligned) { if (bit16_x && bit16_y) { - if (ctx->device.lock()->vendor_id == VK_VENDOR_ID_INTEL || m <= 32 || n <= 32) { + if (m <= 32 || n <= 32) { #ifdef GGML_VULKAN_DEBUG std::cerr << " S" << std::endl; #endif return aligned ? &ctx->pipeline_matmul_f16_aligned_s : &ctx->pipeline_matmul_f16_s; } - if (ctx->device.lock()->subgroup_size == 64 || m <= 64 || n <= 64) { +#ifdef GGML_VULKAN_DEBUG + std::cerr << " M" << std::endl; +#endif + return aligned ? &ctx->pipeline_matmul_f16_aligned_m : &ctx->pipeline_matmul_f16_m; + } + if (bit16_x && !bit16_y) { + if (m <= 32 || n <= 32) { +#ifdef GGML_VULKAN_DEBUG + std::cerr << " S" << std::endl; +#endif + return aligned ? &ctx->pipeline_matmul_f16_f32_aligned_s : &ctx->pipeline_matmul_f16_f32_s; + } +#ifdef GGML_VULKAN_DEBUG + std::cerr << " M" << std::endl; +#endif + return aligned ? &ctx->pipeline_matmul_f16_f32_aligned_m : &ctx->pipeline_matmul_f16_f32_m; + } + if (!bit16_x && bit16_y) { + GGML_ASSERT(false); + } + + if (m <= 32 || n <= 32) { +#ifdef GGML_VULKAN_DEBUG + std::cerr << " S" << std::endl; +#endif + return aligned ? &ctx->pipeline_matmul_f32_aligned_s : &ctx->pipeline_matmul_f32_s; + } +#ifdef GGML_VULKAN_DEBUG + std::cerr << " M" << std::endl; +#endif + return aligned ? &ctx->pipeline_matmul_f32_aligned_m : &ctx->pipeline_matmul_f32_m; +} + +static vk_pipeline* ggml_vk_guess_matmul_pipeline_apple(ggml_backend_vk_context * ctx, bool bit16_x, bool bit16_y, bool aligned) { +#ifdef GGML_VULKAN_DEBUG + std::cerr << " M" << std::endl; +#endif + if (bit16_x && bit16_y) { + return aligned ? &ctx->pipeline_matmul_f16_aligned_m : &ctx->pipeline_matmul_f16_m; + } + if (bit16_x && !bit16_y) { + return aligned ? &ctx->pipeline_matmul_f16_f32_aligned_m : &ctx->pipeline_matmul_f16_f32_m; + } + if (!bit16_x && bit16_y) { + GGML_ASSERT(false); + } + return aligned ? &ctx->pipeline_matmul_f32_aligned_m : &ctx->pipeline_matmul_f32_m; +} + +static vk_pipeline* ggml_vk_guess_matmul_pipeline_intel(ggml_backend_vk_context * ctx, bool bit16_x, bool bit16_y, bool aligned) { +#ifdef GGML_VULKAN_DEBUG + std::cerr << " S" << std::endl; +#endif + if (bit16_x && bit16_y) { + return aligned ? &ctx->pipeline_matmul_f16_aligned_s : &ctx->pipeline_matmul_f16_s; + } + if (bit16_x && !bit16_y) { + return aligned ? &ctx->pipeline_matmul_f16_f32_aligned_s : &ctx->pipeline_matmul_f16_f32_s; + } + if (!bit16_x && bit16_y) { + GGML_ASSERT(false); + } + return aligned ? &ctx->pipeline_matmul_f32_aligned_s : &ctx->pipeline_matmul_f32_s; +} + +static vk_pipeline* ggml_vk_guess_matmul_pipeline(ggml_backend_vk_context * ctx, bool bit16_x, bool bit16_y, int m, int n, bool aligned) { +#ifdef GGML_VULKAN_DEBUG + std::cerr << "ggml_vk_guess_matmul_pipeline(" << bit16_x << ", " << bit16_y << ", " << m << ", " << n << ", " << aligned << ")"; +#endif + switch (ctx->device.lock()->vendor_id) { + case VK_VENDOR_ID_AMD: + return ggml_vk_guess_matmul_pipeline_amd(ctx, bit16_x, bit16_y, m, n, aligned); + case VK_VENDOR_ID_APPLE: + return ggml_vk_guess_matmul_pipeline_apple(ctx, bit16_x, bit16_y, aligned); + case VK_VENDOR_ID_INTEL: + return ggml_vk_guess_matmul_pipeline_intel(ctx, bit16_x, bit16_y, aligned); + } + + if (bit16_x && bit16_y) { + if (m <= 32 || n <= 32) { +#ifdef GGML_VULKAN_DEBUG + std::cerr << " S" << std::endl; +#endif + return aligned ? &ctx->pipeline_matmul_f16_aligned_s : &ctx->pipeline_matmul_f16_s; + } + if (m <= 64 || n <= 64) { #ifdef GGML_VULKAN_DEBUG std::cerr << " M" << std::endl; #endif @@ -2057,13 +2140,13 @@ static vk_pipeline* ggml_vk_guess_matmul_pipeline(ggml_backend_vk_context * ctx, return aligned ? &ctx->pipeline_matmul_f16_aligned_l : &ctx->pipeline_matmul_f16_l; } if (bit16_x && !bit16_y) { - if (ctx->device.lock()->vendor_id == VK_VENDOR_ID_INTEL || m <= 32 || n <= 32) { + if (m <= 32 || n <= 32) { #ifdef GGML_VULKAN_DEBUG std::cerr << " S" << std::endl; #endif return aligned ? &ctx->pipeline_matmul_f16_f32_aligned_s : &ctx->pipeline_matmul_f16_f32_s; } - if (ctx->device.lock()->subgroup_size == 64 || m <= 64 || n <= 64) { + if (m <= 64 || n <= 64) { #ifdef GGML_VULKAN_DEBUG std::cerr << " M" << std::endl; #endif @@ -2078,13 +2161,13 @@ static vk_pipeline* ggml_vk_guess_matmul_pipeline(ggml_backend_vk_context * ctx, GGML_ASSERT(false); } - if (ctx->device.lock()->vendor_id == VK_VENDOR_ID_INTEL || m <= 32 || n <= 32) { + if (m <= 32 || n <= 32) { #ifdef GGML_VULKAN_DEBUG std::cerr << " S" << std::endl; #endif return aligned ? &ctx->pipeline_matmul_f32_aligned_s : &ctx->pipeline_matmul_f32_s; } - if (ctx->device.lock()->subgroup_size == 64 || m <= 64 || n <= 64) { + if (m <= 64 || n <= 64) { #ifdef GGML_VULKAN_DEBUG std::cerr << " M" << std::endl; #endif From 97a336507ed9b971d72262bec7e2b8b7016a054a Mon Sep 17 00:00:00 2001 From: "github-actions[bot]" Date: Sun, 11 Feb 2024 00:17:31 +0000 Subject: [PATCH 571/811] flake.lock: Update MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Flake lock file updates: • Updated input 'nixpkgs': 'github:NixOS/nixpkgs/b8b232ae7b8b144397fdb12d20f592e5e7c1a64d' (2024-01-31) → 'github:NixOS/nixpkgs/f8e2ebd66d097614d51a56a755450d4ae1632df1' (2024-02-07) --- flake.lock | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/flake.lock b/flake.lock index 8cfc78273..239d0686c 100644 --- a/flake.lock +++ b/flake.lock @@ -20,11 +20,11 @@ }, "nixpkgs": { "locked": { - "lastModified": 1706732774, - "narHash": "sha256-hqJlyJk4MRpcItGYMF+3uHe8HvxNETWvlGtLuVpqLU0=", + "lastModified": 1707268954, + "narHash": "sha256-2en1kvde3cJVc3ZnTy8QeD2oKcseLFjYPLKhIGDanQ0=", "owner": "NixOS", "repo": "nixpkgs", - "rev": "b8b232ae7b8b144397fdb12d20f592e5e7c1a64d", + "rev": "f8e2ebd66d097614d51a56a755450d4ae1632df1", "type": "github" }, "original": { From 2891c8aa9af17f4ff636ff3868bc34ff72b56e25 Mon Sep 17 00:00:00 2001 From: Douglas Hanley Date: Sun, 11 Feb 2024 10:21:38 -0600 Subject: [PATCH 572/811] Add support for BERT embedding models (#5423) * BERT model graph construction (build_bert) * WordPiece tokenizer (llm_tokenize_wpm) * Add flag for non-causal attention models * Allow for models that only output embeddings * Support conversion of BERT models to GGUF * Based on prior work by @xyzhang626 and @skeskinen --------- Co-authored-by: Jared Van Bortel Co-authored-by: Jared Van Bortel Co-authored-by: Georgi Gerganov --- .flake8 | 1 + convert-hf-to-gguf.py | 94 ++++++ examples/embedding/embedding.cpp | 12 +- gguf-py/gguf/constants.py | 43 +-- gguf-py/gguf/gguf_writer.py | 6 + gguf-py/gguf/tensor_mapping.py | 13 +- llama.cpp | 498 +++++++++++++++++++++++++++++-- llama.h | 1 + 8 files changed, 616 insertions(+), 52 deletions(-) diff --git a/.flake8 b/.flake8 index 113ca5fd3..18fba2c15 100644 --- a/.flake8 +++ b/.flake8 @@ -1,2 +1,3 @@ [flake8] max-line-length = 125 +ignore = W503 diff --git a/convert-hf-to-gguf.py b/convert-hf-to-gguf.py index 0d4ea03b4..cae1551a2 100755 --- a/convert-hf-to-gguf.py +++ b/convert-hf-to-gguf.py @@ -209,6 +209,8 @@ class Model: return InternLM2Model if model_architecture == "MiniCPMForCausalLM": return MiniCPMModel + if model_architecture == "BertModel": + return BertModel return Model def _is_model_safetensors(self) -> bool: @@ -264,6 +266,8 @@ class Model: return gguf.MODEL_ARCH.INTERNLM2 if arch == "MiniCPMForCausalLM": return gguf.MODEL_ARCH.MINICPM + if arch == "BertModel": + return gguf.MODEL_ARCH.BERT raise NotImplementedError(f'Architecture "{arch}" not supported!') @@ -1629,6 +1633,96 @@ in chat mode so that the conversation can end normally.") self.post_write_tensors(tensor_map, name, data_torch) +class BertModel(Model): + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + self.block_count = self.hparams["num_hidden_layers"] + + def set_gguf_parameters(self): + # TODO(cebtenzzre): merge with parent class + self.gguf_writer.add_name(self.dir_model.name) + self.gguf_writer.add_context_length(self.hparams["max_position_embeddings"]) + self.gguf_writer.add_embedding_length(self.hparams["hidden_size"]) + self.gguf_writer.add_feed_forward_length(self.hparams["intermediate_size"]) + self.gguf_writer.add_block_count(self.block_count) + self.gguf_writer.add_head_count(self.hparams["num_attention_heads"]) + self.gguf_writer.add_layer_norm_eps(self.hparams["layer_norm_eps"]) + self.gguf_writer.add_causal_attention(False) + self.gguf_writer.add_file_type(self.ftype) + + def set_vocab(self): + path = self.dir_model + added_tokens_path = self.dir_model if self.dir_model.exists() else None + + # use huggingface vocab to get all tokens + vocab = HfVocab(path, added_tokens_path) + tokens, scores, toktypes = zip(*vocab.all_tokens()) + assert len(tokens) == vocab.vocab_size + + # we need this to validate the size of the token_type embeddings + # though currently we are passing all zeros to the token_type embeddings + n_token_types = len(set(toktypes)) + self.gguf_writer.add_token_type_count(n_token_types) + + # convert to phantom space vocab + def phantom(tok, typ): + if tok.startswith(b"[") and tok.endswith(b"]"): + return tok + if tok.startswith(b"##"): + return tok[2:] + return b"\xe2\x96\x81" + tok + tokens = [phantom(t, y) for t, y in zip(tokens, toktypes)] + + # set up bos and eos tokens (cls and sep) + self.gguf_writer.add_bos_token_id(vocab.tokenizer.cls_token_id) + self.gguf_writer.add_eos_token_id(vocab.tokenizer.sep_token_id) + + # add vocab to gguf + self.gguf_writer.add_tokenizer_model("bert") + self.gguf_writer.add_token_list(tokens) + self.gguf_writer.add_token_scores(scores) + self.gguf_writer.add_token_types(toktypes) + + # handle special tokens + special_vocab = gguf.SpecialVocab(self.dir_model, n_vocab=len(tokens)) + special_vocab.add_to_gguf(self.gguf_writer) + + def write_tensors(self): + tensor_map = gguf.get_tensor_name_map(self.model_arch, self.block_count) + tensors = dict(self.get_tensors()) + for name, data_torch in tensors.items(): + # we are only using BERT for embeddings so we don't need the pooling layer + if name in ("embeddings.position_ids", "pooler.dense.weight", "pooler.dense.bias"): + continue # we don't need these + + # map tensor names + new_name = tensor_map.get_name(name, try_suffixes=(".weight", ".bias")) + if new_name is None: + print(f"Can not map tensor {name!r}") + sys.exit() + + data = data_torch.squeeze().numpy() + n_dims = len(data.shape) + new_dtype: type[np.floating[Any]] + + if ( + self.ftype == 1 and name.endswith(".weight") and n_dims == 2 + and name != "embeddings.token_type_embeddings.weight" # not used with get_rows, must be F32 + ): + # if f16 desired, convert any float32 2-dim weight tensors to float16 + new_dtype = np.float16 + else: + # if f32 desired, convert any float16 to float32 + new_dtype = np.float32 + + print(f"{new_name}, n_dims = {n_dims}, {data_torch.dtype} --> {new_dtype}") + + if data.dtype != new_dtype: + data = data.astype(new_dtype) + + self.gguf_writer.add_tensor(new_name, data) + + ###### CONVERSION LOGIC ###### diff --git a/examples/embedding/embedding.cpp b/examples/embedding/embedding.cpp index 3295cd240..27376c8f0 100644 --- a/examples/embedding/embedding.cpp +++ b/examples/embedding/embedding.cpp @@ -87,7 +87,17 @@ int main(int argc, char ** argv) { } const int n_embd = llama_n_embd(model); - const auto * embeddings = llama_get_embeddings(ctx); + auto * embeddings = llama_get_embeddings(ctx); + + // l2-normalize embeddings + float norm = 0; + for (int i = 0; i < n_embd; i++) { + norm += embeddings[i] * embeddings[i]; + } + norm = sqrt(norm); + for (int i = 0; i < n_embd; i++) { + embeddings[i] /= norm; + } for (int i = 0; i < n_embd; i++) { printf("%f ", embeddings[i]); diff --git a/gguf-py/gguf/constants.py b/gguf-py/gguf/constants.py index 1cfd41c0b..a9c13dd38 100644 --- a/gguf-py/gguf/constants.py +++ b/gguf-py/gguf/constants.py @@ -50,6 +50,7 @@ class Keys: VALUE_LENGTH = "{arch}.attention.value_length" LAYERNORM_EPS = "{arch}.attention.layer_norm_epsilon" LAYERNORM_RMS_EPS = "{arch}.attention.layer_norm_rms_epsilon" + CAUSAL = "{arch}.attention.causal" class Rope: DIMENSION_COUNT = "{arch}.rope.dimension_count" @@ -60,22 +61,23 @@ class Keys: SCALING_FINETUNED = "{arch}.rope.scaling.finetuned" class Tokenizer: - MODEL = "tokenizer.ggml.model" - LIST = "tokenizer.ggml.tokens" - TOKEN_TYPE = "tokenizer.ggml.token_type" - SCORES = "tokenizer.ggml.scores" - MERGES = "tokenizer.ggml.merges" - BOS_ID = "tokenizer.ggml.bos_token_id" - EOS_ID = "tokenizer.ggml.eos_token_id" - UNK_ID = "tokenizer.ggml.unknown_token_id" - SEP_ID = "tokenizer.ggml.seperator_token_id" - PAD_ID = "tokenizer.ggml.padding_token_id" - ADD_BOS = "tokenizer.ggml.add_bos_token" - ADD_EOS = "tokenizer.ggml.add_eos_token" - ADD_PREFIX = "tokenizer.ggml.add_space_prefix" - HF_JSON = "tokenizer.huggingface.json" - RWKV = "tokenizer.rwkv.world" - CHAT_TEMPLATE = "tokenizer.chat_template" + MODEL = "tokenizer.ggml.model" + LIST = "tokenizer.ggml.tokens" + TOKEN_TYPE = "tokenizer.ggml.token_type" + TOKEN_TYPE_COUNT = "tokenizer.ggml.token_type_count" # for BERT-style token types + SCORES = "tokenizer.ggml.scores" + MERGES = "tokenizer.ggml.merges" + BOS_ID = "tokenizer.ggml.bos_token_id" + EOS_ID = "tokenizer.ggml.eos_token_id" + UNK_ID = "tokenizer.ggml.unknown_token_id" + SEP_ID = "tokenizer.ggml.seperator_token_id" + PAD_ID = "tokenizer.ggml.padding_token_id" + ADD_BOS = "tokenizer.ggml.add_bos_token" + ADD_EOS = "tokenizer.ggml.add_eos_token" + ADD_PREFIX = "tokenizer.ggml.add_space_prefix" + HF_JSON = "tokenizer.huggingface.json" + RWKV = "tokenizer.rwkv.world" + CHAT_TEMPLATE = "tokenizer.chat_template" # @@ -122,6 +124,7 @@ class MODEL_TENSOR(IntEnum): ATTN_OUT = auto() ATTN_NORM = auto() ATTN_NORM_2 = auto() + ATTN_OUT_NORM = auto() ATTN_ROT_EMBD = auto() FFN_GATE_INP = auto() FFN_NORM = auto() @@ -134,6 +137,7 @@ class MODEL_TENSOR(IntEnum): FFN_UP_EXP = auto() ATTN_Q_NORM = auto() ATTN_K_NORM = auto() + LAYER_OUT_NORM = auto() MODEL_ARCH_NAMES: dict[MODEL_ARCH, str] = { @@ -178,6 +182,7 @@ TENSOR_NAMES: dict[MODEL_TENSOR, str] = { MODEL_TENSOR.ATTN_ROT_EMBD: "blk.{bid}.attn_rot_embd", MODEL_TENSOR.ATTN_Q_NORM: "blk.{bid}.attn_q_norm", MODEL_TENSOR.ATTN_K_NORM: "blk.{bid}.attn_k_norm", + MODEL_TENSOR.ATTN_OUT_NORM: "blk.{bid}.attn_output_norm", MODEL_TENSOR.FFN_GATE_INP: "blk.{bid}.ffn_gate_inp", MODEL_TENSOR.FFN_NORM: "blk.{bid}.ffn_norm", MODEL_TENSOR.FFN_GATE: "blk.{bid}.ffn_gate", @@ -187,6 +192,7 @@ TENSOR_NAMES: dict[MODEL_TENSOR, str] = { MODEL_TENSOR.FFN_GATE_EXP: "blk.{bid}.ffn_gate.{xid}", MODEL_TENSOR.FFN_DOWN_EXP: "blk.{bid}.ffn_down.{xid}", MODEL_TENSOR.FFN_UP_EXP: "blk.{bid}.ffn_up.{xid}", + MODEL_TENSOR.LAYER_OUT_NORM: "blk.{bid}.layer_output_norm", } MODEL_TENSORS: dict[MODEL_ARCH, list[MODEL_TENSOR]] = { @@ -262,17 +268,18 @@ MODEL_TENSORS: dict[MODEL_ARCH, list[MODEL_TENSOR]] = { ], MODEL_ARCH.BERT: [ MODEL_TENSOR.TOKEN_EMBD, + MODEL_TENSOR.TOKEN_EMBD_NORM, MODEL_TENSOR.TOKEN_TYPES, MODEL_TENSOR.POS_EMBD, MODEL_TENSOR.OUTPUT_NORM, - MODEL_TENSOR.ATTN_NORM, + MODEL_TENSOR.ATTN_OUT_NORM, MODEL_TENSOR.ATTN_Q, MODEL_TENSOR.ATTN_K, MODEL_TENSOR.ATTN_V, MODEL_TENSOR.ATTN_OUT, - MODEL_TENSOR.FFN_NORM, MODEL_TENSOR.FFN_DOWN, MODEL_TENSOR.FFN_UP, + MODEL_TENSOR.LAYER_OUT_NORM, ], MODEL_ARCH.MPT: [ MODEL_TENSOR.TOKEN_EMBD, diff --git a/gguf-py/gguf/gguf_writer.py b/gguf-py/gguf/gguf_writer.py index 16808196e..7af58a46c 100644 --- a/gguf-py/gguf/gguf_writer.py +++ b/gguf-py/gguf/gguf_writer.py @@ -357,6 +357,9 @@ class GGUFWriter: def add_layer_norm_rms_eps(self, value: float) -> None: self.add_float32(Keys.Attention.LAYERNORM_RMS_EPS.format(arch=self.arch), value) + def add_causal_attention(self, value: bool) -> None: + self.add_bool(Keys.Attention.CAUSAL.format(arch=self.arch), value) + def add_rope_dimension_count(self, count: int) -> None: self.add_uint32(Keys.Rope.DIMENSION_COUNT.format(arch=self.arch), count) @@ -387,6 +390,9 @@ class GGUFWriter: def add_token_types(self, types: Sequence[TokenType] | Sequence[int]) -> None: self.add_array(Keys.Tokenizer.TOKEN_TYPE, types) + def add_token_type_count(self, value: int) -> None: + self.add_uint32(Keys.Tokenizer.TOKEN_TYPE_COUNT, value) + def add_token_scores(self, scores: Sequence[float]) -> None: self.add_array(Keys.Tokenizer.SCORES, scores) diff --git a/gguf-py/gguf/tensor_mapping.py b/gguf-py/gguf/tensor_mapping.py index 4f16d8504..c7ba1420e 100644 --- a/gguf-py/gguf/tensor_mapping.py +++ b/gguf-py/gguf/tensor_mapping.py @@ -30,6 +30,7 @@ class TensorNameMap: # Normalization of token embeddings MODEL_TENSOR.TOKEN_EMBD_NORM: ( "word_embeddings_layernorm", # bloom + "embeddings.LayerNorm", # bert ), # Position embeddings @@ -54,7 +55,6 @@ class TensorNameMap: "transformer.ln_f", # gpt2 gpt-j falcon "model.norm", # llama-hf baichuan internlm2 "norm", # llama-pth - "embeddings.LayerNorm", # bert "transformer.norm_f", # mpt "ln_f", # refact bloom qwen gpt2 "language_model.encoder.final_layernorm", # persimmon @@ -79,7 +79,6 @@ class TensorNameMap: "transformer.h.{bid}.ln_mlp", # falcon40b "model.layers.{bid}.input_layernorm", # llama-hf "layers.{bid}.attention_norm", # llama-pth - "encoder.layer.{bid}.attention.output.LayerNorm", # bert "language_model.encoder.layers.{bid}.input_layernorm", # persimmon "model.layers.{bid}.ln1", # yi "h.{bid}.ln_1", # gpt2 @@ -155,6 +154,11 @@ class TensorNameMap: "model.layers.{bid}.attention.wo", # internlm2 ), + # Attention output norm + MODEL_TENSOR.ATTN_OUT_NORM: ( + "encoder.layer.{bid}.attention.output.LayerNorm", # bert + ), + # Rotary embeddings MODEL_TENSOR.ATTN_ROT_EMBD: ( "model.layers.{bid}.self_attn.rotary_emb.inv_freq", # llama-hf @@ -171,7 +175,6 @@ class TensorNameMap: "transformer.blocks.{bid}.norm_2", # mpt "model.layers.{bid}.post_attention_layernorm", # llama-hf "layers.{bid}.ffn_norm", # llama-pth - "encoder.layer.{bid}.output.LayerNorm", # bert "language_model.encoder.layers.{bid}.post_attention_layernorm", # persimmon "model.layers.{bid}.ln2", # yi "h.{bid}.ln_2", # gpt2 @@ -266,6 +269,10 @@ class TensorNameMap: MODEL_TENSOR.ROPE_FREQS: ( "language_model.encoder.layers.{bid}.self_attention.rotary_emb.inv_freq", # persimmon ), + + MODEL_TENSOR.LAYER_OUT_NORM: ( + "encoder.layer.{bid}.output.LayerNorm", # bert + ) } mapping: dict[str, tuple[MODEL_TENSOR, str]] diff --git a/llama.cpp b/llama.cpp index 3f39a67fb..d1ee26ce2 100644 --- a/llama.cpp +++ b/llama.cpp @@ -196,6 +196,7 @@ enum llm_arch { LLM_ARCH_STARCODER, LLM_ARCH_PERSIMMON, LLM_ARCH_REFACT, + LLM_ARCH_BERT, LLM_ARCH_BLOOM, LLM_ARCH_STABLELM, LLM_ARCH_QWEN, @@ -220,6 +221,7 @@ static std::map LLM_ARCH_NAMES = { { LLM_ARCH_STARCODER, "starcoder" }, { LLM_ARCH_PERSIMMON, "persimmon" }, { LLM_ARCH_REFACT, "refact" }, + { LLM_ARCH_BERT, "bert" }, { LLM_ARCH_BLOOM, "bloom" }, { LLM_ARCH_STABLELM, "stablelm" }, { LLM_ARCH_QWEN, "qwen" }, @@ -261,6 +263,7 @@ enum llm_kv { LLM_KV_ATTENTION_VALUE_LENGTH, LLM_KV_ATTENTION_LAYERNORM_EPS, LLM_KV_ATTENTION_LAYERNORM_RMS_EPS, + LLM_KV_ATTENTION_CAUSAL, LLM_KV_ROPE_DIMENSION_COUNT, LLM_KV_ROPE_FREQ_BASE, @@ -273,6 +276,7 @@ enum llm_kv { LLM_KV_TOKENIZER_MODEL, LLM_KV_TOKENIZER_LIST, LLM_KV_TOKENIZER_TOKEN_TYPE, + LLM_KV_TOKENIZER_TOKEN_TYPE_COUNT, LLM_KV_TOKENIZER_SCORES, LLM_KV_TOKENIZER_MERGES, LLM_KV_TOKENIZER_BOS_ID, @@ -316,6 +320,7 @@ static std::map LLM_KV_NAMES = { { LLM_KV_ATTENTION_VALUE_LENGTH, "%s.attention.value_length" }, { LLM_KV_ATTENTION_LAYERNORM_EPS, "%s.attention.layer_norm_epsilon" }, { LLM_KV_ATTENTION_LAYERNORM_RMS_EPS, "%s.attention.layer_norm_rms_epsilon" }, + { LLM_KV_ATTENTION_CAUSAL, "%s.attention.causal" }, { LLM_KV_ROPE_DIMENSION_COUNT, "%s.rope.dimension_count" }, { LLM_KV_ROPE_FREQ_BASE, "%s.rope.freq_base" }, @@ -328,6 +333,7 @@ static std::map LLM_KV_NAMES = { { LLM_KV_TOKENIZER_MODEL, "tokenizer.ggml.model" }, { LLM_KV_TOKENIZER_LIST, "tokenizer.ggml.tokens" }, { LLM_KV_TOKENIZER_TOKEN_TYPE, "tokenizer.ggml.token_type" }, + { LLM_KV_TOKENIZER_TOKEN_TYPE_COUNT, "tokenizer.ggml.token_type_count" }, { LLM_KV_TOKENIZER_SCORES, "tokenizer.ggml.scores" }, { LLM_KV_TOKENIZER_MERGES, "tokenizer.ggml.merges" }, { LLM_KV_TOKENIZER_BOS_ID, "tokenizer.ggml.bos_token_id" }, @@ -355,6 +361,7 @@ struct LLM_KV { enum llm_tensor { LLM_TENSOR_TOKEN_EMBD, LLM_TENSOR_TOKEN_EMBD_NORM, + LLM_TENSOR_TOKEN_TYPES, LLM_TENSOR_POS_EMBD, LLM_TENSOR_OUTPUT, LLM_TENSOR_OUTPUT_NORM, @@ -536,6 +543,23 @@ static std::map> LLM_TENSOR_NAMES = { LLM_TENSOR_FFN_UP, "blk.%d.ffn_up" }, }, }, + { + LLM_ARCH_BERT, + { + { LLM_TENSOR_TOKEN_EMBD, "token_embd" }, + { LLM_TENSOR_TOKEN_EMBD_NORM, "token_embd_norm" }, + { LLM_TENSOR_TOKEN_TYPES, "token_types" }, + { LLM_TENSOR_POS_EMBD, "position_embd" }, + { LLM_TENSOR_ATTN_NORM, "blk.%d.attn_output_norm" }, + { LLM_TENSOR_ATTN_Q, "blk.%d.attn_q" }, + { LLM_TENSOR_ATTN_K, "blk.%d.attn_k" }, + { LLM_TENSOR_ATTN_V, "blk.%d.attn_v" }, + { LLM_TENSOR_ATTN_OUT, "blk.%d.attn_output" }, + { LLM_TENSOR_FFN_NORM, "blk.%d.layer_output_norm" }, + { LLM_TENSOR_FFN_DOWN, "blk.%d.ffn_down" }, + { LLM_TENSOR_FFN_UP, "blk.%d.ffn_up" }, + }, + }, { LLM_ARCH_BLOOM, { @@ -1440,6 +1464,11 @@ static llama_state g_state; // available llama models enum e_model { MODEL_UNKNOWN, + MODEL_17M, + MODEL_22M, + MODEL_33M, + MODEL_109M, + MODEL_335M, MODEL_0_5B, MODEL_1B, MODEL_2B, @@ -1481,6 +1510,7 @@ struct llama_hparams { uint32_t n_ff; uint32_t n_expert = 0; uint32_t n_expert_used = 0; + uint32_t n_vocab_type = 0; // for BERT-style token types float f_norm_eps; float f_norm_rms_eps; @@ -1493,6 +1523,8 @@ struct llama_hparams { float f_clamp_kqv; float f_max_alibi_bias; + bool causal_attn = true; + bool operator!=(const llama_hparams & other) const { if (this->vocab_only != other.vocab_only) return true; @@ -1720,6 +1752,7 @@ struct llama_model { llama_vocab vocab; struct ggml_tensor * tok_embd; + struct ggml_tensor * type_embd; struct ggml_tensor * pos_embd; struct ggml_tensor * tok_norm; struct ggml_tensor * tok_norm_b; @@ -1850,6 +1883,7 @@ struct llama_context { struct ggml_tensor * inp_pos; // I32 [n_batch] struct ggml_tensor * inp_KQ_mask; // F32 [n_ctx, n_batch] struct ggml_tensor * inp_K_shift; // I32 [n_ctx] + struct ggml_tensor * inp_sum; // F32 [1, n_batch] #ifdef GGML_USE_MPI ggml_mpi_context * ctx_mpi = NULL; @@ -2829,6 +2863,7 @@ static const char * llama_model_vocab_type_name(enum llama_vocab_type type){ switch (type) { case LLAMA_VOCAB_TYPE_SPM: return "SPM"; case LLAMA_VOCAB_TYPE_BPE: return "BPE"; + case LLAMA_VOCAB_TYPE_WPM: return "WPM"; default: return "unknown"; } } @@ -3000,6 +3035,26 @@ static void llm_load_hparams( default: model.type = e_model::MODEL_UNKNOWN; } } break; + case LLM_ARCH_BERT: + { + ml.get_key(LLM_KV_ATTENTION_LAYERNORM_EPS, hparams.f_norm_eps); + ml.get_key(LLM_KV_ATTENTION_CAUSAL, hparams.causal_attn); + ml.get_key(LLM_KV_TOKENIZER_TOKEN_TYPE_COUNT, hparams.n_vocab_type); + + switch (hparams.n_layer) { + case 3: + model.type = e_model::MODEL_17M; break; // bge-micro + case 6: + model.type = e_model::MODEL_22M; break; // MiniLM-L6 + case 12: + switch (hparams.n_embd) { + case 384: model.type = e_model::MODEL_33M; break; // MiniLM-L12, bge-small + case 768: model.type = e_model::MODEL_109M; break; // bge-base + } break; + case 24: + model.type = e_model::MODEL_335M; break; // bge-large + } + } break; case LLM_ARCH_BLOOM: { ml.get_key(LLM_KV_ATTENTION_LAYERNORM_EPS, hparams.f_norm_eps); @@ -3204,6 +3259,16 @@ static void llm_load_vocab( vocab.special_unk_id = -1; vocab.special_sep_id = -1; vocab.special_pad_id = -1; + } else if (tokenizer_name == "bert") { + vocab.type = LLAMA_VOCAB_TYPE_WPM; + + // default special tokens + vocab.special_bos_id = 101; + vocab.special_eos_id = 102; + vocab.special_unk_id = 100; + vocab.special_sep_id = -1; + vocab.special_pad_id = -1; + vocab.add_space_prefix = false; } else { LLAMA_LOG_WARN("%s: unknown tokenizer: '%s'", __func__, tokenizer_name.c_str()); LLAMA_LOG_WARN("%s: using default tokenizer: 'llama'", __func__); @@ -3232,6 +3297,8 @@ static void llm_load_vocab( // determine the newline token: LLaMA "<0x0A>" == 10 == '\n', Falcon 193 == '\n' if (vocab.type == LLAMA_VOCAB_TYPE_SPM) { vocab.linefeed_id = llama_byte_to_token(vocab, '\n'); + } else if (vocab.type == LLAMA_VOCAB_TYPE_WPM) { + vocab.linefeed_id = vocab.special_pad_id; } else { const std::vector ids = llama_tokenize_internal(vocab, "\u010A", false); GGML_ASSERT(!ids.empty() && "model vocab missing newline token"); @@ -3569,6 +3636,7 @@ static bool llm_load_tensors( const int64_t n_embd_v_gqa = hparams.n_embd_v_gqa(); const int64_t n_embd_gqa = n_embd_v_gqa; const int64_t n_vocab = hparams.n_vocab; + const int64_t n_vocab_type = hparams.n_vocab_type; const int64_t n_ff = hparams.n_ff; GGML_ASSERT(n_embd_gqa == n_embd_k_gqa); @@ -3783,11 +3851,50 @@ static bool llm_load_tensors( layer.attn_k_norm_b = ml.create_tensor(ctx_layer, tn(LLM_TENSOR_ATTN_K_NORM, "bias", i), {64}); } } break; - case LLM_ARCH_BLOOM: + case LLM_ARCH_BERT: { model.tok_embd = ml.create_tensor(ctx_input, tn(LLM_TENSOR_TOKEN_EMBD, "weight"), {n_embd, n_vocab}); - model.tok_norm = ml.create_tensor(ctx_input, tn(LLM_TENSOR_TOKEN_EMBD_NORM, "weight"), {n_embd}); - model.tok_norm_b = ml.create_tensor(ctx_input, tn(LLM_TENSOR_TOKEN_EMBD_NORM, "bias"), {n_embd}); + model.type_embd = ml.create_tensor(ctx_input, tn(LLM_TENSOR_TOKEN_TYPES, "weight"), {n_embd, n_vocab_type}); + model.pos_embd = ml.create_tensor(ctx_input, tn(LLM_TENSOR_POS_EMBD, "weight"), {n_embd, hparams.n_ctx_train}); + model.tok_norm = ml.create_tensor(ctx_output, tn(LLM_TENSOR_TOKEN_EMBD_NORM, "weight"), {n_embd}); + model.tok_norm_b = ml.create_tensor(ctx_output, tn(LLM_TENSOR_TOKEN_EMBD_NORM, "bias"), {n_embd}); + + for (int i = 0; i < n_layer; ++i) { + ggml_context * ctx_layer = ctx_for_layer(i); + ggml_context * ctx_split = ctx_for_layer_split(i); + + auto & layer = model.layers[i]; + + layer.attn_norm = ml.create_tensor(ctx_layer, tn(LLM_TENSOR_ATTN_NORM, "weight", i), {n_embd}); + layer.attn_norm_b = ml.create_tensor(ctx_layer, tn(LLM_TENSOR_ATTN_NORM, "bias", i), {n_embd}); + + layer.ffn_norm = ml.create_tensor(ctx_layer, tn(LLM_TENSOR_FFN_NORM, "weight", i), {n_embd}); + layer.ffn_norm_b = ml.create_tensor(ctx_layer, tn(LLM_TENSOR_FFN_NORM, "bias", i), {n_embd}); + + layer.wq = ml.create_tensor(ctx_split, tn(LLM_TENSOR_ATTN_Q, "weight", i), {n_embd, n_embd}); + layer.bq = ml.create_tensor(ctx_layer, tn(LLM_TENSOR_ATTN_Q, "bias", i), {n_embd}); + + layer.wk = ml.create_tensor(ctx_split, tn(LLM_TENSOR_ATTN_K, "weight", i), {n_embd, n_embd_gqa}); + layer.bk = ml.create_tensor(ctx_layer, tn(LLM_TENSOR_ATTN_K, "bias", i), {n_embd_gqa}); + + layer.wv = ml.create_tensor(ctx_split, tn(LLM_TENSOR_ATTN_V, "weight", i), {n_embd, n_embd_gqa}); + layer.bv = ml.create_tensor(ctx_layer, tn(LLM_TENSOR_ATTN_V, "bias", i), {n_embd_gqa}); + + layer.wo = ml.create_tensor(ctx_split, tn(LLM_TENSOR_ATTN_OUT, "weight", i), {n_embd, n_embd}); + layer.bo = ml.create_tensor(ctx_layer, tn(LLM_TENSOR_ATTN_OUT, "bias", i), {n_embd}); + + layer.ffn_up = ml.create_tensor(ctx_split, tn(LLM_TENSOR_FFN_UP, "weight", i), {n_embd, n_ff}); + layer.ffn_up_b = ml.create_tensor(ctx_layer, tn(LLM_TENSOR_FFN_UP, "bias", i), {n_ff}); + + layer.ffn_down = ml.create_tensor(ctx_split, tn(LLM_TENSOR_FFN_DOWN, "weight", i), {n_ff, n_embd}); + layer.ffn_down_b = ml.create_tensor(ctx_layer, tn(LLM_TENSOR_FFN_DOWN, "bias", i), {n_embd}); + } + } break; + case LLM_ARCH_BLOOM: + { + model.tok_embd = ml.create_tensor(ctx_input, tn(LLM_TENSOR_TOKEN_EMBD, "weight"), {n_embd, n_vocab}); + model.tok_norm = ml.create_tensor(ctx_output, tn(LLM_TENSOR_TOKEN_EMBD_NORM, "weight"), {n_embd}); + model.tok_norm_b = ml.create_tensor(ctx_output, tn(LLM_TENSOR_TOKEN_EMBD_NORM, "bias"), {n_embd}); // output { @@ -4739,6 +4846,7 @@ struct llm_build_context { const int32_t n_orig_ctx; const bool do_rope_shift; + const bool causal_attn; const llm_build_cb & cb; @@ -4782,6 +4890,7 @@ struct llm_build_context { kv_head (worst_case ? n_ctx - n_tokens : kv_self.head), n_orig_ctx (cparams.n_yarn_orig_ctx), do_rope_shift (worst_case || kv_self.has_shift), + causal_attn (hparams.causal_attn), cb (cb), buf_compute_meta (lctx.buf_compute_meta) { // all initializations should be done in init() @@ -5625,6 +5734,100 @@ struct llm_build_context { return gf; } + struct ggml_cgraph * build_bert() { + struct ggml_cgraph * gf = ggml_new_graph_custom(ctx0, LLAMA_MAX_NODES, false); + + const int64_t n_embd_head = hparams.n_embd_head_v; + GGML_ASSERT(n_embd_head == hparams.n_embd_head_k); + GGML_ASSERT(n_embd_head == hparams.n_rot); + + struct ggml_tensor * cur; + struct ggml_tensor * inpL; + + // get input vectors with right size + struct ggml_tensor * inp_pos = ggml_view_1d(ctx0, lctx.inp_pos, n_tokens, 0); + struct ggml_tensor * inp_sum = ggml_view_1d(ctx0, lctx.inp_sum, n_tokens, 0); + + // construct input embeddings (token, type, position) + inpL = llm_build_inp_embd(ctx0, hparams, batch, model.tok_embd, lctx.inp_tokens, lctx.inp_embd, cb); + // token types are hardcoded to zero ("Sentence A") + struct ggml_tensor * type_row0 = ggml_view_1d(ctx0, model.type_embd, n_embd, 0); + inpL = ggml_add(ctx0, inpL, type_row0); + inpL = ggml_add(ctx0, ggml_get_rows(ctx0, model.pos_embd, inp_pos), inpL); + cb(inpL, "inp_embd", -1); + + // embed layer norm + inpL = llm_build_norm(ctx0, inpL, hparams, model.tok_norm, model.tok_norm_b, LLM_NORM, cb, -1); + cb(inpL, "inp_norm", -1); + + // KQ_mask (mask for 1 head, it will be broadcasted to all heads) + struct ggml_tensor * KQ_mask = ggml_view_2d(ctx0, lctx.inp_KQ_mask, n_kv, n_tokens, n_kv*ggml_type_size(lctx.inp_KQ_mask->type), 0); + cb(KQ_mask, "KQ_mask", -1); // [n_kv, n_tokens] + + // iterate layers + for (int il = 0; il < n_layer; ++il) { + struct ggml_tensor * cur = inpL; + + // self-attention + { + struct ggml_tensor * Qcur = ggml_add(ctx0, ggml_mul_mat(ctx0, model.layers[il].wq, cur), model.layers[il].bq); + cb(Qcur, "Qcur", il); + + struct ggml_tensor * Kcur = ggml_add(ctx0, ggml_mul_mat(ctx0, model.layers[il].wk, cur), model.layers[il].bk); + cb(Kcur, "Kcur", il); + + struct ggml_tensor * Vcur = ggml_add(ctx0, ggml_mul_mat(ctx0, model.layers[il].wv, cur), model.layers[il].bv); + cb(Vcur, "Vcur", il); + + // seems like we just need to do this for Q? + Qcur = ggml_reshape_3d(ctx0, Qcur, n_embd_head, n_head, n_tokens); + + cur = llm_build_kv(ctx0, model, hparams, kv_self, gf, + model.layers[il].wo, model.layers[il].bo, + Kcur, Vcur, Qcur, KQ_mask, n_ctx, n_tokens, kv_head, n_kv, -1.0f, 1.0f/sqrtf(float(n_embd_head)), cb, il); + cb(cur, "kqv_out", il); + } + + // re-add the layer input + cur = ggml_add(ctx0, cur, inpL); + + // attention layer norm + cur = llm_build_norm(ctx0, cur, hparams, model.layers[il].attn_norm, model.layers[il].attn_norm_b, LLM_NORM, cb, il); + + struct ggml_tensor * ffn_inp = cur; + cb(ffn_inp, "ffn_inp", il); + + // feed-forward network + cur = llm_build_ffn(ctx0, cur, + model.layers[il].ffn_up, model.layers[il].ffn_up_b, + NULL, NULL, + model.layers[il].ffn_down, model.layers[il].ffn_down_b, + NULL, + LLM_FFN_GELU, LLM_FFN_SEQ, cb, il); + cb(cur, "ffn_out", il); + + // attentions bypass the intermediate layer + cur = ggml_add(ctx0, cur, ffn_inp); + + // output layer norm + cur = llm_build_norm(ctx0, cur, hparams, model.layers[il].ffn_norm, model.layers[il].ffn_norm_b, LLM_NORM, cb, il); + + // input for next layer + inpL = cur; + } + + // final output + cur = inpL; + + // pooling + cur = ggml_mul_mat(ctx0, inp_sum, ggml_cont(ctx0, ggml_transpose(ctx0, cur))); + cb(cur, "result_embed", -1); + + ggml_build_forward_expand(gf, cur); + + return gf; + } + struct ggml_cgraph * build_bloom() { struct ggml_cgraph * gf = ggml_new_graph_custom(ctx0, LLAMA_MAX_NODES, false); @@ -7060,7 +7263,8 @@ static struct ggml_cgraph * llama_build_graph( for (int i = 0; i < n_kv; ++i) { float f; - if (!lctx.kv_self.cells[i].has_seq_id(seq_id) || lctx.kv_self.cells[i].pos > pos) { + if (!lctx.kv_self.cells[i].has_seq_id(seq_id) || + (llm.causal_attn && lctx.kv_self.cells[i].pos > pos)) { f = -INFINITY; } else { f = 0; @@ -7081,6 +7285,15 @@ static struct ggml_cgraph * llama_build_graph( data[i] = lctx.kv_self.cells[i].delta; } } + + { + GGML_ASSERT(ggml_backend_buffer_is_host(lctx.inp_sum->buffer)); + float * data = (float *) lctx.inp_sum->data; + + for (int i = 0; i < batch.n_tokens; ++i) { + data[i] = 1.0f/float(batch.n_tokens); + } + } } llm.init(); @@ -7110,6 +7323,10 @@ static struct ggml_cgraph * llama_build_graph( { result = llm.build_refact(); } break; + case LLM_ARCH_BERT: + { + result = llm.build_bert(); + } break; case LLM_ARCH_BLOOM: { result = llm.build_bloom(); @@ -7269,13 +7486,18 @@ static int llama_decode_internal( // the output is always the last tensor in the graph struct ggml_tensor * res = gf->nodes[gf->n_nodes - 1]; - GGML_ASSERT(strcmp(res->name, "result_output") == 0); - - // the embeddings could be the second to last tensor, or the third to last tensor struct ggml_tensor * embeddings = gf->nodes[gf->n_nodes - 2]; - if (strcmp(embeddings->name, "result_norm") != 0) { - embeddings = gf->nodes[gf->n_nodes - 3]; - GGML_ASSERT(strcmp(embeddings->name, "result_norm") == 0); + if (strcmp(res->name, "result_output") == 0) { + // the embeddings could be the second to last tensor, or the third to last tensor + if (strcmp(embeddings->name, "result_norm") != 0) { + embeddings = gf->nodes[gf->n_nodes - 3]; + GGML_ASSERT(strcmp(embeddings->name, "result_norm") == 0); + } + } else if (strcmp(res->name, "result_embed") == 0) { + embeddings = res; + res = nullptr; + } else { + GGML_ASSERT(false); } // LLAMA_LOG_INFO("graph build time: %.3f ms (%d nodes, %d leafs)\n", (ggml_time_us() - t_start_us)/1000.0, gf->n_nodes, gf->n_leafs); @@ -7344,7 +7566,7 @@ static int llama_decode_internal( // extract logits // TODO: do not compute and extract logits if only embeddings are needed // need to update the graphs to skip "result_output" - { + if (res) { auto & logits_out = lctx.logits; #ifndef NDEBUG @@ -7388,9 +7610,11 @@ static int llama_decode_internal( if (!lctx.embedding.empty()) { auto & embedding_out = lctx.embedding; + const int64_t embed_pos = res ? n_embd * (n_tokens-1) : 0; + embedding_out.resize(n_embd); ggml_backend_t embeddings_backend = ggml_backend_sched_get_node_backend(lctx.sched, embeddings); - ggml_backend_tensor_get_async(embeddings_backend, embeddings, embedding_out.data(), (n_embd*(n_tokens - 1))*sizeof(float), n_embd*sizeof(float)); + ggml_backend_tensor_get_async(embeddings_backend, embeddings, embedding_out.data(), embed_pos*sizeof(float), n_embd*sizeof(float)); ggml_backend_synchronize(embeddings_backend); } @@ -7454,6 +7678,9 @@ static uint8_t llama_token_to_byte(const llama_vocab& vocab, llama_token id) { GGML_ASSERT(false); return unicode_to_bytes_bpe(token_data.text); } + case LLAMA_VOCAB_TYPE_WPM: { + GGML_ASSERT(false); + } default: GGML_ASSERT(false); } @@ -7466,6 +7693,7 @@ static llama_token llama_byte_to_token(const llama_vocab & vocab, uint8_t ch) { const char buf[7] = { '<', '0', 'x', hex[ch >> 4], hex[ch & 15], '>', 0 }; return vocab.token_to_id.at(buf); } + case LLAMA_VOCAB_TYPE_WPM: case LLAMA_VOCAB_TYPE_BPE: { return vocab.token_to_id.at(bytes_to_unicode_bpe(ch)); } @@ -7936,12 +8164,212 @@ private: llm_bigram_bpe::queue work_queue; }; -typedef enum FRAGMENT_BUFFER_VARIANT_TYPE{ +struct llm_tokenizer_wpm { + llm_tokenizer_wpm(const llama_vocab & vocab): vocab(vocab) {} + + void tokenize(const std::string & text, std::vector & output) { + auto * token_map = &vocab.token_to_id; + + // normalize and split by whitespace + std::vector words = preprocess(text); + + // bos token prepended already + + // find the longest tokens that form the words + for (const std::string &word : words) { + // skip empty words + if (word.size() == 0) { + continue; + } + + // prepend phantom space + std::string word1 = "\xe2\x96\x81" + word; + int n = word1.size(); + + // we're at the start of a new word + int i = 0; + bool match_any = false; + + // move through character position in word + while (i < n) { + // loop through possible match length + bool match = false; + for (int j = n; j > i; j--) { + auto it = token_map->find(word1.substr(i, j - i)); + if (it != token_map->end()) { + output.push_back(it->second); + match = true; + match_any = true; + i = j; + break; + } + } + + // must be an unknown character + if (!match) { + i++; + } + } + + // we didn't find any matches for this word + if (!match_any) { + output.push_back(vocab.special_unk_id); + } + } + + // append eos token + output.push_back(vocab.special_eos_id); + } + + std::vector preprocess(const std::string & text) { + std::string ori_str = normalize(text); + uint64_t ori_size = ori_str.size(); + + // single punct / single symbol / single digit + // baseline: add whitespace on the left and right of punct and chinese characters + std::vector words; + std::string new_str = ""; + uint64_t i = 0; + while (i < ori_size) { + int utf_char_len = utf8_len(ori_str[i]); + if ((utf_char_len == 1) && ispunct(ori_str[i])) { + new_str += " "; + new_str += ori_str[i]; + new_str += " "; + i += 1; + } + else if ((utf_char_len == 3) && is_chinese_char(ori_str.substr(i, 3))) { + new_str += " "; + new_str += ori_str.substr(i, 3); + new_str += " "; + i += 3; + } + else { + new_str += ori_str[i]; + i += 1; + } + } + + // split by whitespace + uint64_t l = 0; + uint64_t r = 0; + while (r < new_str.size()) { + // if is whitespace + if (isspace(new_str[r])) { + if (r > l) words.push_back(new_str.substr(l, (r - l))); + l = r + 1; + r = l; + } + else { + r += 1; + } + } + if (r > l) { + words.push_back(new_str.substr(l, (r - l))); + } + return words; + } + + std::string normalize(const std::string & text) { + // TODO: handle chinese characters? https://github.com/huggingface/tokenizers/blob/ef5f50605ddf9f8caef1598c0e4853862b9707a7/tokenizers/src/normalizers/bert.rs#L98 + std::string text2 = strip_accents(text); + for (size_t i = 0; i < text2.size(); i += utf8_len(text2[i])) { + char c = text2[i]; + if (c >= 'A' && c <= 'Z') { + text2[i] = c - 'A' + 'a'; + } + } + return text2; + } + + bool is_chinese_char(const std::string & str) { + int len = str.length(); + unsigned int codepoint = 0; + int num_bytes = 0; + int i = 0; + unsigned char ch = static_cast(str[i]); + if (ch <= 0x7f) { + codepoint = ch; + num_bytes = 1; + } else if ((ch >> 5) == 0x06) { + codepoint = ch & 0x1f; + num_bytes = 2; + } else if ((ch >> 4) == 0x0e) { + codepoint = ch & 0x0f; + num_bytes = 3; + } else if ((ch >> 3) == 0x1e) { + codepoint = ch & 0x07; + num_bytes = 4; + } + for (int j = 1; j < num_bytes; ++j) { + if (i + j >= len) { + return false; // incomplete UTF-8 character + } + unsigned char next_ch = static_cast(str[i + j]); + if ((next_ch >> 6) != 0x02) { + return false; // invalid trailing byte + } + codepoint = (codepoint << 6) | (next_ch & 0x3f); + } + if ((codepoint >= 0x4E00 && codepoint <= 0x9FFF) || + (codepoint >= 0x3400 && codepoint <= 0x4DBF) || + (codepoint >= 0x20000 && codepoint <= 0x2A6DF) || + (codepoint >= 0x2A700 && codepoint <= 0x2B73F) || + (codepoint >= 0x2B740 && codepoint <= 0x2B81F) || + (codepoint >= 0x2B920 && codepoint <= 0x2CEAF) || // this should be 0x2B820 but in hf rust code it is 0x2B920 + (codepoint >= 0xF900 && codepoint <= 0xFAFF) || + (codepoint >= 0x2F800 && codepoint <= 0x2FA1F) || + (codepoint >= 0x3000 && codepoint <= 0x303F) || + (codepoint >= 0xFF00 && codepoint <= 0xFFEF)) { + return true; // NOLINT + } + return false; + } + + std::string strip_accents(const std::string & input_string) { + std::string resultString; + std::map accent_map = { + {"À", 'A'}, {"Á", 'A'}, {"Â", 'A'}, {"Ã", 'A'}, {"Ä", 'A'}, {"Å", 'A'}, + {"à", 'a'}, {"á", 'a'}, {"â", 'a'}, {"ã", 'a'}, {"ä", 'a'}, {"å", 'a'}, + {"È", 'E'}, {"É", 'E'}, {"Ê", 'E'}, {"Ë", 'E'}, {"è", 'e'}, {"é", 'e'}, + {"ê", 'e'}, {"ë", 'e'}, {"Ì", 'I'}, {"Í", 'I'}, {"Î", 'I'}, {"Ï", 'I'}, + {"ì", 'i'}, {"í", 'i'}, {"î", 'i'}, {"ï", 'i'}, {"Ò", 'O'}, {"Ó", 'O'}, + {"Ô", 'O'}, {"Õ", 'O'}, {"Ö", 'O'}, {"ò", 'o'}, {"ó", 'o'}, {"ô", 'o'}, + {"õ", 'o'}, {"ö", 'o'}, {"Ù", 'U'}, {"Ú", 'U'}, {"Û", 'U'}, {"Ü", 'U'}, + {"ù", 'u'}, {"ú", 'u'}, {"û", 'u'}, {"ü", 'u'}, {"Ý", 'Y'}, {"ý", 'y'}, + {"Ç", 'C'}, {"ç", 'c'}, {"Ñ", 'N'}, {"ñ", 'n'}, + }; + + for (size_t i = 0; i < input_string.length();) { + int len = utf8_len(input_string[i]); + std::string curChar = input_string.substr(i, len); + auto iter = accent_map.find(curChar); + if (iter != accent_map.end()) { + resultString += iter->second; + } else { + resultString += curChar; + } + i += len; + } + + return resultString; + } + + static size_t utf8_len(char src) { + const size_t lookup[] = {1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 2, 2, 3, 4}; + uint8_t highbits = static_cast(src) >> 4; + return lookup[highbits]; + } + + const llama_vocab & vocab; +}; + +typedef enum FRAGMENT_BUFFER_VARIANT_TYPE { FRAGMENT_BUFFER_VARIANT_TYPE_TOKEN, FRAGMENT_BUFFER_VARIANT_TYPE_RAW_TEXT } FRAGMENT_BUFFER_VARIANT_TYPE; -struct fragment_buffer_variant{ +struct fragment_buffer_variant { fragment_buffer_variant(llama_vocab::id _token) : type(FRAGMENT_BUFFER_VARIANT_TYPE_TOKEN), @@ -7971,8 +8399,7 @@ struct fragment_buffer_variant{ // #define PRETOKENIZERDEBUG -static void tokenizer_st_partition(const llama_vocab & vocab, std::forward_list & buffer) -{ +static void tokenizer_st_partition(const llama_vocab & vocab, std::forward_list & buffer) { // for each special token for (const auto & st: vocab.special_tokens_cache) { const auto & special_token = st.first; @@ -8090,10 +8517,8 @@ static std::vector llama_tokenize_internal(const llama_vocab & switch (vocab.type) { case LLAMA_VOCAB_TYPE_SPM: { - for (const auto & fragment: fragment_buffer) - { - if (fragment.type == FRAGMENT_BUFFER_VARIANT_TYPE_RAW_TEXT) - { + for (const auto & fragment: fragment_buffer) { + if (fragment.type == FRAGMENT_BUFFER_VARIANT_TYPE_RAW_TEXT) { // without adding this leading whitespace, we do not get the same results as the original tokenizer // TODO: It's likely possible to get rid of this string copy entirely @@ -8113,19 +8538,15 @@ static std::vector llama_tokenize_internal(const llama_vocab & llm_tokenizer_spm tokenizer(vocab); llama_escape_whitespace(raw_text); tokenizer.tokenize(raw_text, output); - } - else // if (fragment.type == FRAGMENT_BUFFER_VARIANT_TYPE_TOKEN) - { + } else { // if (fragment.type == FRAGMENT_BUFFER_VARIANT_TYPE_TOKEN) output.push_back(fragment.token); } } } break; case LLAMA_VOCAB_TYPE_BPE: { - for (const auto & fragment: fragment_buffer) - { - if (fragment.type == FRAGMENT_BUFFER_VARIANT_TYPE_RAW_TEXT) - { + for (const auto & fragment: fragment_buffer) { + if (fragment.type == FRAGMENT_BUFFER_VARIANT_TYPE_RAW_TEXT) { auto raw_text = fragment.raw_text.substr(fragment.offset, fragment.length); #ifdef PRETOKENIZERDEBUG @@ -8133,9 +8554,23 @@ static std::vector llama_tokenize_internal(const llama_vocab & #endif llm_tokenizer_bpe tokenizer(vocab); tokenizer.tokenize(raw_text, output); + } else { // if (fragment.type == FRAGMENT_BUFFER_VARIANT_TYPE_TOKEN) + output.push_back(fragment.token); } - else // if (fragment.type == FRAGMENT_BUFFER_VARIANT_TYPE_TOKEN) - { + } + } break; + case LLAMA_VOCAB_TYPE_WPM: + { + for (const auto & fragment: fragment_buffer) { + if (fragment.type == FRAGMENT_BUFFER_VARIANT_TYPE_RAW_TEXT) { + auto raw_text = fragment.raw_text.substr(fragment.offset, fragment.length); + +#ifdef PRETOKENIZERDEBUG + LLAMA_LOG_WARN("TT: (%ld %ld %ld) '%s'\n", raw_text.length(), fragment.offset, fragment.length, raw_text.c_str()); +#endif + llm_tokenizer_wpm tokenizer(vocab); + tokenizer.tokenize(raw_text, output); + } else { // if (fragment.type == FRAGMENT_BUFFER_VARIANT_TYPE_TOKEN) output.push_back(fragment.token); } } @@ -10799,7 +11234,7 @@ struct llama_context * llama_new_context_with_model( // graph inputs { ggml_init_params init_params = { - /* .mem_size */ ggml_tensor_overhead()*5, + /* .mem_size */ ggml_tensor_overhead()*7, /* .mem_buffer */ nullptr, /* .no_alloc */ true, }; @@ -10810,12 +11245,14 @@ struct llama_context * llama_new_context_with_model( ctx->inp_pos = ggml_new_tensor_1d(ctx->ctx_input, GGML_TYPE_I32, cparams.n_batch); ctx->inp_KQ_mask = ggml_new_tensor_2d(ctx->ctx_input, GGML_TYPE_F32, cparams.n_ctx, cparams.n_batch); ctx->inp_K_shift = ggml_new_tensor_1d(ctx->ctx_input, GGML_TYPE_I32, cparams.n_ctx); + ctx->inp_sum = ggml_new_tensor_2d(ctx->ctx_input, GGML_TYPE_F32, 1, cparams.n_batch); ggml_set_name(ctx->inp_tokens, "inp_tokens"); ggml_set_name(ctx->inp_embd, "inp_embd"); ggml_set_name(ctx->inp_pos, "inp_pos"); ggml_set_name(ctx->inp_KQ_mask, "inp_KQ_mask"); ggml_set_name(ctx->inp_K_shift, "inp_K_shift"); + ggml_set_name(ctx->inp_sum, "inp_sum"); ctx->buf_input = ggml_backend_alloc_ctx_tensors_from_buft(ctx->ctx_input, llama_default_buffer_type_cpu(true)); @@ -11746,6 +12183,7 @@ static std::string llama_decode_text(const std::string & text) { int32_t llama_token_to_piece(const struct llama_model * model, llama_token token, char * buf, int32_t length) { if (0 <= token && token < llama_n_vocab(model)) { switch (llama_vocab_get_type(model->vocab)) { + case LLAMA_VOCAB_TYPE_WPM: case LLAMA_VOCAB_TYPE_SPM: { // NOTE: we accept all unsupported token types, // suppressing them like CONTROL tokens. diff --git a/llama.h b/llama.h index cec4158bc..367e8f1a1 100644 --- a/llama.h +++ b/llama.h @@ -61,6 +61,7 @@ extern "C" { enum llama_vocab_type { LLAMA_VOCAB_TYPE_SPM = 0, // SentencePiece LLAMA_VOCAB_TYPE_BPE = 1, // Byte Pair Encoding + LLAMA_VOCAB_TYPE_WPM = 2, // WordPiece }; enum llama_token_type { From 3bdc4cd0f595a6096cca4a64aa75ffa8a3503465 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Johannes=20G=C3=A4=C3=9Fler?= Date: Sun, 11 Feb 2024 19:08:39 +0100 Subject: [PATCH 573/811] CUDA: mul_mat_vec_q tiling, refactor mul mat logic (#5434) * CUDA: mul_mat_vec_q tiling, refactor mul mat logic Co-authored-by: slaren --------- Co-authored-by: slaren --- ggml-cuda.cu | 265 +++++++++++++++++++++++++++++---------------------- 1 file changed, 149 insertions(+), 116 deletions(-) diff --git a/ggml-cuda.cu b/ggml-cuda.cu index 5053757e6..96976f248 100644 --- a/ggml-cuda.cu +++ b/ggml-cuda.cu @@ -150,8 +150,8 @@ #define CUDA_USE_TENSOR_CORES #endif -// max batch size to use MMQ kernels when tensor cores are available -#define MMQ_MAX_BATCH_SIZE 32 +#define MMVQ_MAX_BATCH_SIZE 8 // max batch size to use MMVQ kernels +#define MMQ_MAX_BATCH_SIZE 32 // max batch size to use MMQ kernels when tensor cores are available #if defined(GGML_USE_HIPBLAS) #define __CUDA_ARCH__ 1300 @@ -5310,51 +5310,59 @@ template static __global__ void #endif // __CUDA_ARCH__ >= CC_VOLTA } -#define MMVQ_NWARPS_NVIDIA 4 -#define MMVQ_NWARPS_AMD_RDNA2 1 -#define MMVQ_NWARPS_AMD_OLD 4 - -template +template #if !(defined(GGML_USE_HIPBLAS) && defined(__HIP_PLATFORM_AMD__)) -__launch_bounds__(nwarps*WARP_SIZE, 1) // tells the compiler to use as many registers as it wants +// tell the compiler to use as many registers as it wants, see nwarps definition below +__launch_bounds__((ncols_y <= 4 ? 4 : 2)*WARP_SIZE, 1) #endif // !(defined(GGML_USE_HIPBLAS) && defined(__HIP_PLATFORM_AMD__)) static __global__ void mul_mat_vec_q( const void * __restrict__ vx, const void * __restrict__ vy, float * __restrict__ dst, - const int ncols_x, const int nrows_x, const int nrows_y, const int ncols_y_par, const int nrows_dst) { + const int ncols_x, const int nrows_x, const int nrows_y, const int nrows_dst) { - const int ncols_y = ncols_y_template != 0 ? ncols_y_template : ncols_y_par; +#if defined(GGML_USE_HIPBLAS) && defined(__HIP_PLATFORM_AMD__) && (defined(RDNA2) || defined(RDNA3)) + constexpr int nwarps = 1; + constexpr int rows_per_cuda_block = 1; +#else + constexpr int nwarps = ncols_y <= 4 ? 4 : 2; + constexpr int rows_per_cuda_block = ncols_y == 1 ? 1 : 2; +#endif // defined(GGML_USE_HIPBLAS) && defined(__HIP_PLATFORM_AMD__) && !defined(RDNA2) && !defined(RDNA3) - const int tid = WARP_SIZE*threadIdx.y + threadIdx.x; - const int row = blockIdx.x; - - const int blocks_per_row_x = ncols_x / qk; - const int blocks_per_col_y = nrows_y / QK8_1; - const int blocks_per_iter = vdr * nwarps*WARP_SIZE / qi; + const int tid = WARP_SIZE*threadIdx.y + threadIdx.x; + const int row0 = rows_per_cuda_block*blockIdx.x; + const int blocks_per_row_x = ncols_x / qk; + const int blocks_per_col_y = nrows_y / QK8_1; + constexpr int blocks_per_iter = vdr * nwarps*WARP_SIZE / qi; // partial sum for each thread - float tmp[ncols_y_template != 0 ? ncols_y_template : 8] = {0.0f}; + float tmp[ncols_y][rows_per_cuda_block] = {0.0f}; const block_q_t * x = (const block_q_t *) vx; const block_q8_1 * y = (const block_q8_1 *) vy; - for (int i = tid / (qi/vdr); i < blocks_per_row_x; i += blocks_per_iter) { - const int ibx = row*blocks_per_row_x + i; // x block index + for (int kbx = tid / (qi/vdr); kbx < blocks_per_row_x; kbx += blocks_per_iter) { + const int kby = kbx * (qk/QK8_1); // y block index that aligns with kbx - const int iby = i * (qk/QK8_1); // y block index that aligns with ibx - - const int iqs = vdr * (tid % (qi/vdr)); // x block quant index when casting the quants to int + // x block quant index when casting the quants to int + const int kqs = vdr * (tid % (qi/vdr)); #pragma unroll for (int j = 0; j < ncols_y; ++j) { - tmp[j] += vec_dot_q_cuda(&x[ibx], &y[j*blocks_per_col_y + iby], iqs); +#pragma unroll + for (int i = 0; i < rows_per_cuda_block; ++i) { + tmp[j][i] += vec_dot_q_cuda( + &x[kbx + (row0 + i)*blocks_per_row_x], &y[j*blocks_per_col_y + kby], kqs); + } } } - __shared__ float tmp_shared[nwarps-1 > 0 ? nwarps-1 : 1][ncols_y_template != 0 ? ncols_y_template : 8][WARP_SIZE]; + __shared__ float tmp_shared[nwarps-1 > 0 ? nwarps-1 : 1][ncols_y][rows_per_cuda_block][WARP_SIZE]; if (threadIdx.y > 0) { #pragma unroll for (int j = 0; j < ncols_y; ++j) { - tmp_shared[threadIdx.y-1][j][threadIdx.x] = tmp[j]; +#pragma unroll + for (int i = 0; i < rows_per_cuda_block; ++i) { + tmp_shared[threadIdx.y-1][j][i][threadIdx.x] = tmp[j][i]; + } } } __syncthreads(); @@ -5366,13 +5374,16 @@ static __global__ void mul_mat_vec_q( #pragma unroll for (int j = 0; j < ncols_y; ++j) { #pragma unroll - for (int i = 0; i < nwarps-1; ++i) { - tmp[j] += tmp_shared[i][j][threadIdx.x]; + for (int i = 0; i < rows_per_cuda_block; ++i) { +#pragma unroll + for (int l = 0; l < nwarps-1; ++l) { + tmp[j][i] += tmp_shared[l][j][i][threadIdx.x]; + } + tmp[j][i] = warp_reduce_sum(tmp[j][i]); } - tmp[j] = warp_reduce_sum(tmp[j]); - if (threadIdx.x == 0) { - dst[j*nrows_dst + row] = tmp[j]; + if (threadIdx.x < rows_per_cuda_block) { + dst[j*nrows_dst + row0 + threadIdx.x] = tmp[j][threadIdx.x]; } } } @@ -6851,65 +6862,75 @@ static void mul_mat_vec_q_cuda( const int ncols_x, const int nrows_x, const int nrows_y, const int ncols_y, const int nrows_dst, cudaStream_t stream) { GGML_ASSERT(ncols_x % qk == 0); - GGML_ASSERT(ncols_y <= 4); + GGML_ASSERT(ncols_y <= MMVQ_MAX_BATCH_SIZE); int id; CUDA_CHECK(cudaGetDevice(&id)); - int nwarps; - if (g_device_caps[id].cc >= CC_OFFSET_AMD) { - nwarps = g_device_caps[id].cc >= CC_RDNA2 ? MMVQ_NWARPS_AMD_RDNA2 : MMVQ_NWARPS_AMD_OLD; - } else { - nwarps = MMVQ_NWARPS_NVIDIA; - } + int64_t nwarps = 1; + int64_t rows_per_cuda_block = 1; - const dim3 block_nums(nrows_x, 1, 1); + if (g_device_caps[id].cc < CC_RDNA2) { // NVIDIA and AMD older than RDNA2 + switch(ncols_y) { + case 1: + nwarps = 4; + rows_per_cuda_block = 1; + break; + case 2: + case 3: + case 4: + nwarps = 4; + rows_per_cuda_block = 2; + break; + case 5: + case 6: + case 7: + case 8: + nwarps = 2; + rows_per_cuda_block = 2; + break; + default: + GGML_ASSERT(false); + break; + } + } + const int64_t nblocks = (nrows_x + rows_per_cuda_block - 1) / rows_per_cuda_block; + const dim3 block_nums(nblocks, 1, 1); const dim3 block_dims(WARP_SIZE, nwarps, 1); - switch (nwarps) { - case 1: switch(ncols_y) { - case 1: - mul_mat_vec_q<1, 1, qk, qi, block_q_t, vdr, vec_dot> - <<>>(vx, vy, dst, ncols_x, nrows_x, nrows_y, ncols_y, nrows_dst); - break; - case 2: - mul_mat_vec_q<1, 2, qk, qi, block_q_t, vdr, vec_dot> - <<>>(vx, vy, dst, ncols_x, nrows_x, nrows_y, ncols_y, nrows_dst); - break; - case 3: - mul_mat_vec_q<1, 3, qk, qi, block_q_t, vdr, vec_dot> - <<>>(vx, vy, dst, ncols_x, nrows_x, nrows_y, ncols_y, nrows_dst); - break; - case 4: - mul_mat_vec_q<1, 4, qk, qi, block_q_t, vdr, vec_dot> - <<>>(vx, vy, dst, ncols_x, nrows_x, nrows_y, ncols_y, nrows_dst); - break; - default: - GGML_ASSERT(false); - break; - } break; - case 4: switch(ncols_y) { - case 1: - mul_mat_vec_q<4, 1, qk, qi, block_q_t, vdr, vec_dot> - <<>>(vx, vy, dst, ncols_x, nrows_x, nrows_y, ncols_y, nrows_dst); - break; - case 2: - mul_mat_vec_q<4, 2, qk, qi, block_q_t, vdr, vec_dot> - <<>>(vx, vy, dst, ncols_x, nrows_x, nrows_y, ncols_y, nrows_dst); - break; - case 3: - mul_mat_vec_q<4, 3, qk, qi, block_q_t, vdr, vec_dot> - <<>>(vx, vy, dst, ncols_x, nrows_x, nrows_y, ncols_y, nrows_dst); - break; - case 4: - mul_mat_vec_q<4, 4, qk, qi, block_q_t, vdr, vec_dot> - <<>>(vx, vy, dst, ncols_x, nrows_x, nrows_y, ncols_y, nrows_dst); - break; - default: - GGML_ASSERT(false); - break; - } break; - + switch (ncols_y) { + case 1: + mul_mat_vec_q<1, qk, qi, block_q_t, vdr, vec_dot> + <<>>(vx, vy, dst, ncols_x, nrows_x, nrows_y, nrows_dst); + break; + case 2: + mul_mat_vec_q<2, qk, qi, block_q_t, vdr, vec_dot> + <<>>(vx, vy, dst, ncols_x, nrows_x, nrows_y, nrows_dst); + break; + case 3: + mul_mat_vec_q<3, qk, qi, block_q_t, vdr, vec_dot> + <<>>(vx, vy, dst, ncols_x, nrows_x, nrows_y, nrows_dst); + break; + case 4: + mul_mat_vec_q<4, qk, qi, block_q_t, vdr, vec_dot> + <<>>(vx, vy, dst, ncols_x, nrows_x, nrows_y, nrows_dst); + break; + case 5: + mul_mat_vec_q<5, qk, qi, block_q_t, vdr, vec_dot> + <<>>(vx, vy, dst, ncols_x, nrows_x, nrows_y, nrows_dst); + break; + case 6: + mul_mat_vec_q<6, qk, qi, block_q_t, vdr, vec_dot> + <<>>(vx, vy, dst, ncols_x, nrows_x, nrows_y, nrows_dst); + break; + case 7: + mul_mat_vec_q<7, qk, qi, block_q_t, vdr, vec_dot> + <<>>(vx, vy, dst, ncols_x, nrows_x, nrows_y, nrows_dst); + break; + case 8: + mul_mat_vec_q<8, qk, qi, block_q_t, vdr, vec_dot> + <<>>(vx, vy, dst, ncols_x, nrows_x, nrows_y, nrows_dst); + break; default: GGML_ASSERT(false); break; @@ -9735,7 +9756,7 @@ static __global__ void k_compute_batched_ptrs( ptrs_dst[0*ne23 + i12 + i13*ne12] = ( char *) dst + i12*nbd2 + i13*nbd3; } -static void ggml_cuda_mul_mat_mat_batched_cublas(const ggml_tensor * src0, const ggml_tensor * src1, ggml_tensor * dst) { +static void ggml_cuda_mul_mat_batched_cublas(const ggml_tensor * src0, const ggml_tensor * src1, ggml_tensor * dst) { GGML_ASSERT(!ggml_is_transposed(src0)); GGML_ASSERT(!ggml_is_transposed(src1)); @@ -9893,39 +9914,69 @@ static void ggml_cuda_mul_mat(const ggml_tensor * src0, const ggml_tensor * src1 int64_t min_compute_capability = INT_MAX; + bool any_pascal_with_slow_fp16 = false; if (split) { ggml_backend_cuda_split_buffer_type_context * buft_ctx = (ggml_backend_cuda_split_buffer_type_context *) src0->buffer->buft->context; auto & tensor_split = buft_ctx->tensor_split; for (int id = 0; id < g_device_count; ++id) { - if (min_compute_capability > g_device_caps[id].cc && tensor_split[id] < (id + 1 < g_device_count ? tensor_split[id + 1] : 1.0f)) { + // skip devices that are not going to do any work: + if (tensor_split[id] >= (id + 1 < g_device_count ? tensor_split[id + 1] : 1.0f)) { + continue; + } + + if (min_compute_capability > g_device_caps[id].cc) { min_compute_capability = g_device_caps[id].cc; } + if (g_device_caps[id].cc == 610) { + any_pascal_with_slow_fp16 = true; + } } } else { - min_compute_capability = g_device_caps[g_main_device].cc; + min_compute_capability = g_device_caps[g_main_device].cc; + any_pascal_with_slow_fp16 = g_device_caps[g_main_device].cc == 610; } + // check data types and tensor shapes for custom matrix multiplication kernels: + bool use_dequantize_mul_mat_vec = (ggml_is_quantized(src0->type) || src0->type == GGML_TYPE_F16) + && src1->type == GGML_TYPE_F32 && dst->type == GGML_TYPE_F32 + && src0->ne[0] % GGML_CUDA_DMMV_X == 0 && src1->ne[1] == 1; + + bool use_mul_mat_vec_q = ggml_is_quantized(src0->type) + && src1->type == GGML_TYPE_F32 && dst->type == GGML_TYPE_F32 + && src1->ne[1] <= MMVQ_MAX_BATCH_SIZE; + + bool use_mul_mat_q = ggml_cuda_supports_mmq(src0->type) + && src1->type == GGML_TYPE_F32 && dst->type == GGML_TYPE_F32; + #if defined(GGML_USE_HIPBLAS) && defined(__HIP_PLATFORM_AMD__) const bool fp16_performance_good = min_compute_capability >= CC_RDNA1; - bool use_mul_mat_q = ggml_is_quantized(src0->type); + #ifdef CUDA_USE_TENSOR_CORES use_mul_mat_q = use_mul_mat_q && min_compute_capability < CC_RDNA3; #endif // CUDA_USE_TENSOR_CORES #else - const bool fp16_performance_good = min_compute_capability >= CC_VOLTA; - bool use_mul_mat_q = min_compute_capability >= MIN_CC_DP4A && ggml_is_quantized(src0->type); + // fp16 performance is good on Volta or newer and on P100 (compute capability 6.0) + const bool fp16_performance_good = min_compute_capability >= CC_PASCAL && !any_pascal_with_slow_fp16; + + // mmvq and mmq need the __dp4a instruction which on NVIDIA is only available for CC >= 6.1 + use_mul_mat_vec_q = use_mul_mat_vec_q && min_compute_capability >= MIN_CC_DP4A; + use_mul_mat_q = use_mul_mat_q && min_compute_capability >= MIN_CC_DP4A; + #ifdef CUDA_USE_TENSOR_CORES // when tensor cores are available, use them for large batch size // ref: https://github.com/ggerganov/llama.cpp/pull/3776 - use_mul_mat_q = use_mul_mat_q && !(fp16_performance_good && src1->ne[1] > MMQ_MAX_BATCH_SIZE); + use_mul_mat_q = use_mul_mat_q && (!fp16_performance_good || src1->ne[1] <= MMQ_MAX_BATCH_SIZE); #endif // CUDA_USE_TENSOR_CORES #endif // defined(GGML_USE_HIPBLAS) && defined(__HIP_PLATFORM_AMD__) - use_mul_mat_q = use_mul_mat_q && ggml_cuda_supports_mmq(src0->type); + // if mmvq is available it's a better choice than dmmv: +#ifndef GGML_CUDA_FORCE_DMMV + use_dequantize_mul_mat_vec = use_dequantize_mul_mat_vec && !use_mul_mat_vec_q; +#endif // GGML_CUDA_FORCE_DMMV // debug helpers //printf("src0: %8d %8d %8d %8d\n", src0->ne[0], src0->ne[1], src0->ne[2], src0->ne[3]); @@ -9943,33 +9994,15 @@ static void ggml_cuda_mul_mat(const ggml_tensor * src0, const ggml_tensor * src1 ggml_cuda_mul_mat_vec_nc(src0, src1, dst); } else if (!split && all_on_device && fp16_performance_good && src0->type == GGML_TYPE_F16 && !ggml_is_transposed(src0) && !ggml_is_transposed(src1) && src1->ne[2]*src1->ne[3] > 1) { // KQ + KQV multi-batch - ggml_cuda_mul_mat_mat_batched_cublas(src0, src1, dst); - } else if (src0->type == GGML_TYPE_F32) { - ggml_cuda_op_mul_mat(src0, src1, dst, ggml_cuda_op_mul_mat_cublas, false); - } else if (ggml_is_quantized(src0->type) || src0->type == GGML_TYPE_F16) { - if (src1->ne[1] == 1 && src0->ne[0] % GGML_CUDA_DMMV_X == 0 && src1->type == GGML_TYPE_F32) { -#ifdef GGML_CUDA_FORCE_DMMV - const bool use_mul_mat_vec_q = false; -#else - const bool use_mul_mat_vec_q = min_compute_capability >= MIN_CC_DP4A && ggml_is_quantized(src0->type); -#endif // GGML_CUDA_FORCE_DMMV - - if (use_mul_mat_vec_q) { - ggml_cuda_op_mul_mat(src0, src1, dst, ggml_cuda_op_mul_mat_vec_q, true); - } else { - ggml_cuda_op_mul_mat(src0, src1, dst, ggml_cuda_op_dequantize_mul_mat_vec, false); - } - } else { - if (src1->ne[1] <= 4 && min_compute_capability >= MIN_CC_DP4A && ggml_is_quantized(src0->type) && src1->type == GGML_TYPE_F32) { - ggml_cuda_op_mul_mat(src0, src1, dst, ggml_cuda_op_mul_mat_vec_q, true); - } else if (use_mul_mat_q) { - ggml_cuda_op_mul_mat(src0, src1, dst, ggml_cuda_op_mul_mat_q, true); - } else { - ggml_cuda_op_mul_mat(src0, src1, dst, ggml_cuda_op_mul_mat_cublas, false); - } - } + ggml_cuda_mul_mat_batched_cublas(src0, src1, dst); + } else if (use_dequantize_mul_mat_vec) { + ggml_cuda_op_mul_mat(src0, src1, dst, ggml_cuda_op_dequantize_mul_mat_vec, false); + } else if (use_mul_mat_vec_q) { + ggml_cuda_op_mul_mat(src0, src1, dst, ggml_cuda_op_mul_mat_vec_q, true); + } else if (use_mul_mat_q) { + ggml_cuda_op_mul_mat(src0, src1, dst, ggml_cuda_op_mul_mat_q, true); } else { - GGML_ASSERT(false); + ggml_cuda_op_mul_mat(src0, src1, dst, ggml_cuda_op_mul_mat_cublas, false); } } From 3b169441dfe8e420f88d1592708cc2a871daadb9 Mon Sep 17 00:00:00 2001 From: Georgi Gerganov Date: Mon, 12 Feb 2024 09:16:06 +0200 Subject: [PATCH 574/811] sync : ggml (#5452) * ggml-alloc : v3 (ggml/727) * ggml-alloc v3 ggml-ci * fix ci ggml-ci * whisper : check for backend buffer allocation failures * whisper : avoid leaks when initialization fails * cleanup ggml-ci * style fixes ggml-ci * sync : ggml * update llama.cpp, clip.cpp, export-lora.cpp * update finetune.cpp, train-text-from-scratch.cpp ggml-ci * ggml-backend : reduce alignment to 32 to match gguf and fix mmap --------- Co-authored-by: slaren --- examples/export-lora/export-lora.cpp | 19 +- examples/finetune/finetune.cpp | 147 +- examples/llava/clip.cpp | 152 +- .../train-text-from-scratch.cpp | 112 +- ggml-alloc.c | 1373 +++++++++-------- ggml-alloc.h | 110 +- ggml-backend.c | 492 +++--- ggml-backend.h | 15 +- ggml.c | 28 +- ggml.h | 18 +- llama.cpp | 181 +-- scripts/sync-ggml.last | 2 +- 12 files changed, 1287 insertions(+), 1362 deletions(-) diff --git a/examples/export-lora/export-lora.cpp b/examples/export-lora/export-lora.cpp index 4cd5d99bb..2f7be8a13 100644 --- a/examples/export-lora/export-lora.cpp +++ b/examples/export-lora/export-lora.cpp @@ -337,24 +337,14 @@ static bool apply_lora(struct ggml_tensor * tensor, struct lora_data * lora, int params.mem_buffer = NULL; params.no_alloc = true; struct ggml_context * ctx = NULL; - struct ggml_allocr * alloc = NULL; - struct ggml_cgraph * gf = NULL; + struct ggml_gallocr * alloc = NULL; + struct ggml_cgraph * gf = NULL; ctx = ggml_init(params); - alloc = ggml_allocr_new_measure(tensor_alignment); + alloc = ggml_gallocr_new(ggml_backend_cpu_buffer_type()); gf = build_graph_lora(ctx, tensor, lora_a, lora_b, scaling); - size_t alloc_size = ggml_allocr_alloc_graph(alloc, gf); - ggml_allocr_free(alloc); - ggml_free(ctx); - static std::vector data_compute; - data_compute.resize(alloc_size + tensor_alignment); - - ctx = ggml_init(params); - alloc = ggml_allocr_new(data_compute.data(), data_compute.size(), tensor_alignment); - gf = build_graph_lora(ctx, tensor, lora_a, lora_b, scaling); - ggml_allocr_alloc_graph(alloc, gf); - ggml_allocr_free(alloc); + ggml_gallocr_alloc_graph(alloc, gf); struct ggml_cplan cplan = ggml_graph_plan(gf, n_threads); static std::vector data_work; @@ -363,6 +353,7 @@ static bool apply_lora(struct ggml_tensor * tensor, struct lora_data * lora, int ggml_graph_compute(gf, &cplan); + ggml_gallocr_free(alloc); ggml_free(ctx); return true; } diff --git a/examples/finetune/finetune.cpp b/examples/finetune/finetune.cpp index b7e19c5fe..b11c56020 100644 --- a/examples/finetune/finetune.cpp +++ b/examples/finetune/finetune.cpp @@ -1,5 +1,6 @@ #include "ggml.h" #include "ggml-alloc.h" +#include "ggml-backend.h" #include "llama.h" #include "common.h" #include "train.h" @@ -13,8 +14,6 @@ #pragma warning(disable: 4244 4267) // possible loss of data #endif -static const size_t tensor_alignment = 32; - struct my_llama_hparams { uint32_t n_vocab = 32000; uint32_t n_ctx = 512; @@ -128,7 +127,7 @@ struct my_llama_lora_layer { struct my_llama_lora { struct ggml_context * ctx = NULL; - std::vector data; + ggml_backend_buffer_t data; my_llama_lora_hparams hparams; @@ -372,63 +371,6 @@ static void set_param_lora(struct my_llama_lora * lora) { } } -static void alloc_lora(struct ggml_allocr * alloc, struct my_llama_lora * lora) { - ggml_allocr_alloc(alloc, lora->tok_embeddings_a); - ggml_allocr_alloc(alloc, lora->tok_embeddings_b); - ggml_allocr_alloc(alloc, lora->norm_a); - ggml_allocr_alloc(alloc, lora->norm_b); - ggml_allocr_alloc(alloc, lora->output_a); - ggml_allocr_alloc(alloc, lora->output_b); - for (uint32_t i = 0; i < lora->layers.size(); ++i) { - auto & layer = lora->layers[i]; - ggml_allocr_alloc(alloc, layer.attention_norm_a); - ggml_allocr_alloc(alloc, layer.attention_norm_b); - ggml_allocr_alloc(alloc, layer.wq_a); - ggml_allocr_alloc(alloc, layer.wq_b); - ggml_allocr_alloc(alloc, layer.wk_a); - ggml_allocr_alloc(alloc, layer.wk_b); - ggml_allocr_alloc(alloc, layer.wv_a); - ggml_allocr_alloc(alloc, layer.wv_b); - ggml_allocr_alloc(alloc, layer.wo_a); - ggml_allocr_alloc(alloc, layer.wo_b); - ggml_allocr_alloc(alloc, layer.ffn_norm_a); - ggml_allocr_alloc(alloc, layer.ffn_norm_b); - ggml_allocr_alloc(alloc, layer.w1_a); - ggml_allocr_alloc(alloc, layer.w1_b); - ggml_allocr_alloc(alloc, layer.w2_a); - ggml_allocr_alloc(alloc, layer.w2_b); - ggml_allocr_alloc(alloc, layer.w3_a); - ggml_allocr_alloc(alloc, layer.w3_b); - } - ggml_allocr_alloc(alloc, lora->tok_embeddings_a->grad); - ggml_allocr_alloc(alloc, lora->tok_embeddings_b->grad); - ggml_allocr_alloc(alloc, lora->norm_a->grad); - ggml_allocr_alloc(alloc, lora->norm_b->grad); - ggml_allocr_alloc(alloc, lora->output_a->grad); - ggml_allocr_alloc(alloc, lora->output_b->grad); - for (uint32_t i = 0; i < lora->layers.size(); ++i) { - auto & layer = lora->layers[i]; - ggml_allocr_alloc(alloc, layer.attention_norm_a->grad); - ggml_allocr_alloc(alloc, layer.attention_norm_b->grad); - ggml_allocr_alloc(alloc, layer.wq_a->grad); - ggml_allocr_alloc(alloc, layer.wq_b->grad); - ggml_allocr_alloc(alloc, layer.wk_a->grad); - ggml_allocr_alloc(alloc, layer.wk_b->grad); - ggml_allocr_alloc(alloc, layer.wv_a->grad); - ggml_allocr_alloc(alloc, layer.wv_b->grad); - ggml_allocr_alloc(alloc, layer.wo_a->grad); - ggml_allocr_alloc(alloc, layer.wo_b->grad); - ggml_allocr_alloc(alloc, layer.ffn_norm_a->grad); - ggml_allocr_alloc(alloc, layer.ffn_norm_b->grad); - ggml_allocr_alloc(alloc, layer.w1_a->grad); - ggml_allocr_alloc(alloc, layer.w1_b->grad); - ggml_allocr_alloc(alloc, layer.w2_a->grad); - ggml_allocr_alloc(alloc, layer.w2_b->grad); - ggml_allocr_alloc(alloc, layer.w3_a->grad); - ggml_allocr_alloc(alloc, layer.w3_b->grad); - } -} - static void init_lora(const struct my_llama_model * model, struct my_llama_lora * lora) { const auto & lparams = lora->hparams; @@ -522,18 +464,8 @@ static void init_lora(const struct my_llama_model * model, struct my_llama_lora set_param_lora(lora); - // measure data size - size_t size = 0; - for (struct ggml_tensor * t = ggml_get_first_tensor(ctx); t != NULL; t = ggml_get_next_tensor(ctx, t)) { - size += GGML_PAD(ggml_nbytes(t), tensor_alignment); - } - - // allocate data - struct ggml_allocr * alloc = NULL; - lora->data.resize(size + tensor_alignment); - alloc = ggml_allocr_new(lora->data.data(), lora->data.size(), tensor_alignment); - alloc_lora(alloc, lora); - ggml_allocr_free(alloc); + // allocate data for lora tensors + lora->data = ggml_backend_alloc_ctx_tensors_from_buft(ctx, ggml_backend_cpu_buffer_type()); } static void randomize_lora(struct my_llama_lora * lora, int seed, float mean, float std, float min, float max) { @@ -579,7 +511,7 @@ static void randomize_lora(struct my_llama_lora * lora, int seed, float mean, fl static struct ggml_tensor * llama_build_lora_finetune_graphs( struct my_llama_model * model, struct my_llama_lora * lora, - struct ggml_allocr * alloc, + ggml_gallocr_t alloc, struct ggml_context * ctx, struct ggml_cgraph * gf, struct ggml_cgraph * gb, @@ -590,7 +522,8 @@ static struct ggml_tensor * llama_build_lora_finetune_graphs( const int n_tokens, const int n_batch, const bool enable_flash_attn, - const bool enable_checkpointing) { + const bool enable_checkpointing, + const bool measure_only) { ggml_set_scratch(ctx, { 0, 0, nullptr, }); const int n_past = 0; @@ -622,13 +555,7 @@ static struct ggml_tensor * llama_build_lora_finetune_graphs( // KQ_pos - contains the positions struct ggml_tensor * KQ_pos = ggml_new_tensor_1d(ctx, GGML_TYPE_I32, N); - ggml_allocr_alloc(alloc, KQ_pos); - if (!ggml_allocr_is_measure(alloc)) { - int * data = (int *) KQ_pos->data; - for (int i = 0; i < N; ++i) { - data[i] = n_past + i; - } - } + ggml_set_input(KQ_pos); // rope has so much parameters that we make a custom function for it auto rope = [ctx, KQ_pos, n_rot, n_ctx, rope_freq_base, rope_freq_scale] @@ -780,7 +707,7 @@ static struct ggml_tensor * llama_build_lora_finetune_graphs( // input gradient ggml_build_forward_expand(gb, ggml_scale_inplace(ctx, t36->grad, 1.0f)); GGML_ASSERT(t36->grad->data == NULL && t36->grad->view_src == NULL); - ggml_allocr_alloc(alloc, t36->grad); + ggml_set_input(t36->grad); // KQ_pos ggml_build_forward_expand(gb, ggml_scale_inplace(ctx, KQ_pos, 1.0f)); @@ -805,11 +732,23 @@ static struct ggml_tensor * llama_build_lora_finetune_graphs( // note: they will be freed in reverse order for (unsigned int i = 0; i < checkpoints.size(); ++i) { if (checkpoints[i]->data == NULL && checkpoints[i]->view_src == NULL) { - ggml_allocr_alloc(alloc, checkpoints[i]); + ggml_set_input(checkpoints[i]); } } - ggml_allocr_alloc_graph(alloc, gb); + if (measure_only) { + ggml_gallocr_reserve(alloc, gb); + } else { + ggml_gallocr_alloc_graph(alloc, gb); + + // set KQ_pos + { + int * data = (int *) KQ_pos->data; + for (int i = 0; i < N; ++i) { + data[i] = n_past + i; + } + } + } // remove the additional nodes and leafs for (int i = n_leafs_before; i < gb->n_leafs; ++i) { @@ -1663,7 +1602,7 @@ int main(int argc, char ** argv) { printf("%s: seen train_samples %llu\n", __func__, (long long unsigned) train->train_samples); printf("%s: seen train_tokens %llu\n", __func__, (long long unsigned) train->train_tokens); printf("%s: completed train_epochs %llu\n", __func__, (long long unsigned) train->train_epochs); - printf("%s: lora_size = %zu bytes (%.1f MB)\n", __func__, (ggml_used_mem(lora.ctx) + lora.data.size()), (float) (ggml_used_mem(lora.ctx) + lora.data.size()) / (1024.0f*1024.0f)); + printf("%s: lora_size = %zu bytes (%.1f MB)\n", __func__, (ggml_used_mem(lora.ctx) + ggml_backend_buffer_get_size(lora.data)), (float) (ggml_used_mem(lora.ctx) + ggml_backend_buffer_get_size(lora.data)) / (1024.0f*1024.0f)); if (params.only_write_lora) { save_train_files_data save_data; @@ -1690,10 +1629,6 @@ int main(int argc, char ** argv) { int n_vocab = model.hparams.n_vocab; int n_batch = params.common.n_batch; - - std::vector mem_input_data; - std::vector mem_compute_data; - // context for input tensors without their data struct ggml_init_params ctx_input_params = { ggml_tensor_overhead() * 2, // mem_size @@ -1706,17 +1641,11 @@ int main(int argc, char ** argv) { struct ggml_tensor * tokens_input = ggml_new_tensor_2d(ctx_input, GGML_TYPE_I32, n_tokens, n_batch); struct ggml_tensor * target_probs = ggml_new_tensor_3d(ctx_input, GGML_TYPE_F32, n_vocab, n_tokens, n_batch); - // measure required memory for input tensors - size_t max_input_size = GGML_PAD(ggml_nbytes(tokens_input), tensor_alignment) + - GGML_PAD(ggml_nbytes(target_probs), tensor_alignment) + - tensor_alignment; - printf("%s: input_size = %zu bytes (%.1f MB)\n", __func__, max_input_size, (float) max_input_size / (1024.0f*1024.0f)); - // allocate input tensors - mem_input_data.resize(max_input_size); - ggml_allocr_t alloc_inps = ggml_allocr_new(mem_input_data.data(), mem_input_data.size(), tensor_alignment); - ggml_allocr_alloc(alloc_inps, tokens_input); - ggml_allocr_alloc(alloc_inps, target_probs); + // measure required memory for input tensors + ggml_backend_buffer_t input_data = ggml_backend_alloc_ctx_tensors_from_buft(ctx_input, ggml_backend_cpu_buffer_type()); + size_t max_input_size = ggml_backend_buffer_get_size(input_data); + printf("%s: input_size = %zu bytes (%.1f MB)\n", __func__, max_input_size, (float) max_input_size / (1024.0f*1024.0f)); // context for compute tensors without their data const size_t estimated_compute_size_wo_data = ( @@ -1743,7 +1672,7 @@ int main(int argc, char ** argv) { // find best evaluation order for (unsigned order = 0; order < (unsigned) GGML_CGRAPH_EVAL_ORDER_COUNT; ++order) { ctx_compute = ggml_init(ctx_compute_params); - ggml_allocr_t alloc = ggml_allocr_new_measure(tensor_alignment); + ggml_gallocr_t alloc = ggml_gallocr_new(ggml_backend_cpu_buffer_type()); gf = ggml_new_graph_custom(ctx_compute, LLAMA_TRAIN_MAX_NODES, true); gf->order = (enum ggml_cgraph_eval_order) order; gb = ggml_new_graph_custom(ctx_compute, LLAMA_TRAIN_MAX_NODES, true); @@ -1756,14 +1685,15 @@ int main(int argc, char ** argv) { &logits, tokens_input, target_probs, n_tokens, n_batch, params.common.use_flash, - params.common.use_checkpointing + params.common.use_checkpointing, + true ); - size_t max_compute_size = ggml_allocr_max_size(alloc) + tensor_alignment; + size_t max_compute_size = ggml_gallocr_get_buffer_size(alloc, 0); // FIXME: this will still allocate the buffer if (max_compute_size < best_compute_size) { best_compute_size = max_compute_size; best_order = gf->order; } - ggml_allocr_free(alloc); + ggml_gallocr_free(alloc); ggml_free(ctx_compute); } size_t max_compute_size = best_compute_size; @@ -1774,9 +1704,8 @@ int main(int argc, char ** argv) { "invalid"); // allocate compute tensors - mem_compute_data.resize(max_compute_size); ctx_compute = ggml_init(ctx_compute_params); - ggml_allocr_t alloc = ggml_allocr_new(mem_compute_data.data(), mem_compute_data.size(), tensor_alignment); + ggml_gallocr_t alloc = ggml_gallocr_new(ggml_backend_cpu_buffer_type()); gf = ggml_new_graph_custom(ctx_compute, LLAMA_TRAIN_MAX_NODES, true); gf->order = best_order; gb = ggml_new_graph_custom(ctx_compute, LLAMA_TRAIN_MAX_NODES, true); @@ -1789,11 +1718,9 @@ int main(int argc, char ** argv) { &logits, tokens_input, target_probs, n_tokens, n_batch, params.common.use_flash, - params.common.use_checkpointing + params.common.use_checkpointing, + false ); - ggml_allocr_free(alloc); - ggml_allocr_free(alloc_inps); - // tokenize data std::vector train_tokens; @@ -1908,6 +1835,8 @@ int main(int argc, char ** argv) { ggml_free(ctx_work); ggml_free(ctx_compute); ggml_free(ctx_input); + ggml_gallocr_free(alloc); + int64_t t1 = ggml_time_ms(); printf("%s: total training time: ", __func__); diff --git a/examples/llava/clip.cpp b/examples/llava/clip.cpp index 9129052a2..ccd0d85ad 100644 --- a/examples/llava/clip.cpp +++ b/examples/llava/clip.cpp @@ -367,7 +367,7 @@ struct clip_ctx { ggml_backend_buffer_t params_buffer = NULL; ggml_backend_buffer_t compute_buffer = NULL; ggml_backend_t backend = NULL; - ggml_allocr * compute_alloc = NULL; + ggml_gallocr_t compute_alloc = NULL; }; static ggml_cgraph * clip_image_build_graph(clip_ctx * ctx, const clip_image_f32_batch * imgs) { @@ -405,31 +405,8 @@ static ggml_cgraph * clip_image_build_graph(clip_ctx * ctx, const clip_image_f32 struct ggml_cgraph * gf = ggml_new_graph(ctx0); struct ggml_tensor * inp_raw = ggml_new_tensor_4d(ctx0, GGML_TYPE_F32, image_size, image_size, 3, batch_size); - ggml_allocr_alloc(ctx->compute_alloc, inp_raw); - - if (!ggml_allocr_is_measure(ctx->compute_alloc)) { - float * data = (float *)malloc(ggml_nbytes(inp_raw)); - - for (size_t i = 0; i < imgs->size; i++) { - const int nx = imgs->data[i].nx; - const int ny = imgs->data[i].ny; - GGML_ASSERT(nx == image_size && ny == image_size); - - const int n = nx * ny; - - for (int b = 0; b < batch_size; b++) { - for (int k = 0; k < 3; k++) { - for (int y = 0; y < ny; y++) { - for (int x = 0; x < nx; x++) { - data[(b * 3 * n) + k * n + y * nx + x] = imgs->data[b].buf[3 * (y * nx + x) + k]; - } - } - } - } - } - ggml_backend_tensor_set(inp_raw, data, 0, ggml_nbytes(inp_raw)); - free(data); - } + ggml_set_name(inp_raw, "inp_raw"); + ggml_set_input(inp_raw); struct ggml_tensor * inp = ggml_conv_2d(ctx0, model.patch_embeddings, inp_raw, patch_size, patch_size, 0, 0, 1, 1); @@ -438,13 +415,8 @@ static ggml_cgraph * clip_image_build_graph(clip_ctx * ctx, const clip_image_f32 // concat class_embeddings and patch_embeddings struct ggml_tensor * embeddings = ggml_new_tensor_3d(ctx0, GGML_TYPE_F32, hidden_size, num_positions, batch_size); - ggml_allocr_alloc(ctx->compute_alloc, embeddings); - if (!ggml_allocr_is_measure(ctx->compute_alloc)) { - void* zero_mem = malloc(ggml_nbytes(embeddings)); - memset(zero_mem, 0, ggml_nbytes(embeddings)); - ggml_backend_tensor_set(embeddings, zero_mem, 0, ggml_nbytes(embeddings)); - free(zero_mem); - } + ggml_set_name(embeddings, "embeddings"); + ggml_set_input(embeddings); embeddings = ggml_acc(ctx0, embeddings, model.class_embedding, embeddings->nb[1], embeddings->nb[2], embeddings->nb[3], 0); @@ -453,15 +425,8 @@ static ggml_cgraph * clip_image_build_graph(clip_ctx * ctx, const clip_image_f32 embeddings->nb[1], embeddings->nb[2], embeddings->nb[3], model.class_embedding->nb[1]); struct ggml_tensor * positions = ggml_new_tensor_1d(ctx0, GGML_TYPE_I32, num_positions); - ggml_allocr_alloc(ctx->compute_alloc, positions); - if (!ggml_allocr_is_measure(ctx->compute_alloc)) { - int* positions_data = (int*)malloc(ggml_nbytes(positions)); - for (int i = 0; i < num_positions; i++) { - positions_data[i] = i; - } - ggml_backend_tensor_set(positions, positions_data, 0, ggml_nbytes(positions)); - free(positions_data); - } + ggml_set_name(positions, "positions"); + ggml_set_input(positions); embeddings = ggml_add(ctx0, embeddings, ggml_get_rows(ctx0, model.position_embeddings, positions)); @@ -560,15 +525,8 @@ static ggml_cgraph * clip_image_build_graph(clip_ctx * ctx, const clip_image_f32 embeddings = ggml_reshape_2d(ctx0, embeddings, embeddings->ne[0], embeddings->ne[1]); struct ggml_tensor * patches = ggml_new_tensor_1d(ctx0, GGML_TYPE_I32, num_patches); - ggml_allocr_alloc(ctx->compute_alloc, patches); - if (!ggml_allocr_is_measure(ctx->compute_alloc)) { - int* patches_data = (int*)malloc(ggml_nbytes(patches)); - for (int i = 0; i < num_patches; i++) { - patches_data[i] = i + 1; - } - ggml_backend_tensor_set(patches, patches_data, 0, ggml_nbytes(patches)); - free(patches_data); - } + ggml_set_name(patches, "patches"); + ggml_set_input(patches); // shape [1, 576, 1024] // ne is whcn, ne = [1024, 576, 1, 1] @@ -809,7 +767,7 @@ struct clip_ctx * clip_model_load(const char * fname, const int verbosity = 1) { } // data - size_t buffer_size = 0; + size_t model_size = 0; { for (int i = 0; i < n_tensors; ++i) { const char * name = gguf_get_tensor_name(ctx, i); @@ -817,7 +775,7 @@ struct clip_ctx * clip_model_load(const char * fname, const int verbosity = 1) { enum ggml_type type = gguf_get_tensor_type(ctx, i); struct ggml_tensor * cur = ggml_get_tensor(meta, name); size_t tensor_size = ggml_nbytes(cur); - buffer_size += tensor_size; + model_size += tensor_size; if (verbosity >= 3) { printf("%s: tensor[%d]: n_dims = %d, name = %s, tensor_size=%zu, offset=%zu, shape:[%" PRIu64 ", %" PRIu64 ", %" PRIu64 ", %" PRIu64 "], type = %s\n", __func__, i, ggml_n_dims(cur), cur->name, tensor_size, offset, cur->ne[0], cur->ne[1], cur->ne[2], cur->ne[3], ggml_type_name(type)); @@ -825,8 +783,6 @@ struct clip_ctx * clip_model_load(const char * fname, const int verbosity = 1) { } } - buffer_size += n_tensors * 128 /* CLIP PADDING */; - clip_ctx * new_clip = new clip_ctx; // update projector type @@ -886,12 +842,12 @@ struct clip_ctx * clip_model_load(const char * fname, const int verbosity = 1) { printf("%s: text_encoder: %d\n", __func__, new_clip->has_text_encoder); printf("%s: vision_encoder: %d\n", __func__, new_clip->has_vision_encoder); printf("%s: llava_projector: %d\n", __func__, new_clip->has_llava_projector); - printf("%s: model size: %.2f MB\n", __func__, buffer_size / 1024.0 / 1024.0); + printf("%s: model size: %.2f MB\n", __func__, model_size / 1024.0 / 1024.0); printf("%s: metadata size: %.2f MB\n", __func__, ggml_get_mem_size(meta) / 1024.0 / 1024.0); } } - printf("%s: params backend buffer size = % 6.2f MB (%i tensors)\n", __func__, buffer_size / (1024.0 * 1024.0), n_tensors); + printf("%s: params backend buffer size = % 6.2f MB (%i tensors)\n", __func__, model_size / (1024.0 * 1024.0), n_tensors); // load tensors { @@ -925,12 +881,10 @@ struct clip_ctx * clip_model_load(const char * fname, const int verbosity = 1) { } // alloc memory and offload data - new_clip->params_buffer = ggml_backend_alloc_buffer(new_clip->backend, buffer_size); - ggml_allocr* alloc = ggml_allocr_new_from_buffer(new_clip->params_buffer); + new_clip->params_buffer = ggml_backend_alloc_ctx_tensors(new_clip->ctx_data, new_clip->backend); for (int i = 0; i < n_tensors; ++i) { const char * name = gguf_get_tensor_name(ctx, i); struct ggml_tensor * cur = ggml_get_tensor(new_clip->ctx_data, name); - ggml_allocr_alloc(alloc, cur); const size_t offset = gguf_get_data_offset(ctx) + gguf_get_tensor_offset(ctx, i); fin.seekg(offset, std::ios::beg); if (!fin) { @@ -949,7 +903,6 @@ struct clip_ctx * clip_model_load(const char * fname, const int verbosity = 1) { ggml_backend_tensor_set(cur, read_buf.data(), 0, num_bytes); } } - ggml_allocr_free(alloc); fin.close(); } @@ -1077,15 +1030,12 @@ struct clip_ctx * clip_model_load(const char * fname, const int verbosity = 1) { // measure mem requirement and allocate { new_clip->buf_compute_meta.resize(GGML_DEFAULT_GRAPH_SIZE * ggml_tensor_overhead() + ggml_graph_overhead()); - new_clip->compute_alloc = ggml_allocr_new_measure_from_backend(new_clip->backend); + new_clip->compute_alloc = ggml_gallocr_new(ggml_backend_get_default_buffer_type(new_clip->backend)); clip_image_f32_batch batch; batch.size = 1; ggml_cgraph * gf = clip_image_build_graph(new_clip, &batch); - size_t compute_memory_buffer_size = ggml_allocr_alloc_graph(new_clip->compute_alloc, gf); - ggml_allocr_free(new_clip->compute_alloc); - new_clip->compute_buffer = ggml_backend_alloc_buffer(new_clip->backend, compute_memory_buffer_size); - new_clip->compute_alloc = ggml_allocr_new_from_buffer(new_clip->compute_buffer); - + ggml_gallocr_reserve(new_clip->compute_alloc, gf); + size_t compute_memory_buffer_size = ggml_gallocr_get_buffer_size(new_clip->compute_alloc, 0); printf("%s: compute allocated memory: %.2f MB\n", __func__, compute_memory_buffer_size /1024.0/1024.0); } @@ -1267,12 +1217,72 @@ bool clip_image_batch_encode(clip_ctx * ctx, const int n_threads, const clip_ima GGML_ASSERT(batch_size == 1); // TODO: support multiple images } - // reset alloc buffer to clean the memory from previous invocations - ggml_allocr_reset(ctx->compute_alloc); - // build the inference graph ggml_cgraph * gf = clip_image_build_graph(ctx, imgs); - ggml_allocr_alloc_graph(ctx->compute_alloc, gf); + ggml_gallocr_alloc_graph(ctx->compute_alloc, gf); + + // set inputs + const auto & model = ctx->vision_model; + const auto & hparams = model.hparams; + const int image_size = hparams.image_size; + const int patch_size = hparams.patch_size; + const int num_patches = ((image_size / patch_size) * (image_size / patch_size)); + const int num_positions = num_patches + 1; + + { + struct ggml_tensor * inp_raw = ggml_graph_get_tensor(gf, "inp_raw"); + float * data = (float *)malloc(ggml_nbytes(inp_raw)); + + for (size_t i = 0; i < imgs->size; i++) { + const int nx = imgs->data[i].nx; + const int ny = imgs->data[i].ny; + GGML_ASSERT(nx == image_size && ny == image_size); + + const int n = nx * ny; + + for (int b = 0; b < batch_size; b++) { + for (int k = 0; k < 3; k++) { + for (int y = 0; y < ny; y++) { + for (int x = 0; x < nx; x++) { + data[(b * 3 * n) + k * n + y * nx + x] = imgs->data[b].buf[3 * (y * nx + x) + k]; + } + } + } + } + } + ggml_backend_tensor_set(inp_raw, data, 0, ggml_nbytes(inp_raw)); + free(data); + } + + { + struct ggml_tensor * embeddings = ggml_graph_get_tensor(gf, "embeddings"); + + void* zero_mem = malloc(ggml_nbytes(embeddings)); + memset(zero_mem, 0, ggml_nbytes(embeddings)); + ggml_backend_tensor_set(embeddings, zero_mem, 0, ggml_nbytes(embeddings)); + free(zero_mem); + } + + { + struct ggml_tensor * positions = ggml_graph_get_tensor(gf, "positions"); + + int* positions_data = (int*)malloc(ggml_nbytes(positions)); + for (int i = 0; i < num_positions; i++) { + positions_data[i] = i; + } + ggml_backend_tensor_set(positions, positions_data, 0, ggml_nbytes(positions)); + free(positions_data); + } + + { + struct ggml_tensor * patches = ggml_graph_get_tensor(gf, "patches"); + int* patches_data = (int*)malloc(ggml_nbytes(patches)); + for (int i = 0; i < num_patches; i++) { + patches_data[i] = i + 1; + } + ggml_backend_tensor_set(patches, patches_data, 0, ggml_nbytes(patches)); + free(patches_data); + } if (ggml_backend_is_cpu(ctx->backend)) { ggml_backend_cpu_set_n_threads(ctx->backend, n_threads); diff --git a/examples/train-text-from-scratch/train-text-from-scratch.cpp b/examples/train-text-from-scratch/train-text-from-scratch.cpp index eee9d4de3..2e2a8ce08 100644 --- a/examples/train-text-from-scratch/train-text-from-scratch.cpp +++ b/examples/train-text-from-scratch/train-text-from-scratch.cpp @@ -1,5 +1,6 @@ #include "ggml.h" #include "ggml-alloc.h" +#include "ggml-backend.h" #include "common.h" #include "train.h" #include "llama.h" @@ -19,8 +20,6 @@ #pragma warning(disable: 4244 4267) // possible loss of data #endif -static const size_t tensor_alignment = 32; - struct my_llama_hparams { uint32_t n_vocab = 32000; uint32_t n_ctx = 512; @@ -58,7 +57,7 @@ struct my_llama_layer { struct my_llama_model { struct ggml_context * ctx = NULL; - std::vector data; + ggml_backend_buffer_t data = NULL; my_llama_hparams hparams; @@ -147,39 +146,6 @@ static void set_param_model(struct my_llama_model * model) { } } -static void alloc_model(struct ggml_allocr * alloc, struct my_llama_model * model) { - ggml_allocr_alloc(alloc, model->tok_embeddings); - ggml_allocr_alloc(alloc, model->norm); - ggml_allocr_alloc(alloc, model->output); - for (uint32_t i = 0; i < model->layers.size(); ++i) { - auto & layer = model->layers[i]; - ggml_allocr_alloc(alloc, layer.attention_norm); - ggml_allocr_alloc(alloc, layer.wq); - ggml_allocr_alloc(alloc, layer.wk); - ggml_allocr_alloc(alloc, layer.wv); - ggml_allocr_alloc(alloc, layer.wo); - ggml_allocr_alloc(alloc, layer.ffn_norm); - ggml_allocr_alloc(alloc, layer.w1); - ggml_allocr_alloc(alloc, layer.w2); - ggml_allocr_alloc(alloc, layer.w3); - } - ggml_allocr_alloc(alloc, model->tok_embeddings->grad); - ggml_allocr_alloc(alloc, model->norm->grad); - ggml_allocr_alloc(alloc, model->output->grad); - for (uint32_t i = 0; i < model->layers.size(); ++i) { - auto & layer = model->layers[i]; - ggml_allocr_alloc(alloc, layer.attention_norm->grad); - ggml_allocr_alloc(alloc, layer.wq->grad); - ggml_allocr_alloc(alloc, layer.wk->grad); - ggml_allocr_alloc(alloc, layer.wv->grad); - ggml_allocr_alloc(alloc, layer.wo->grad); - ggml_allocr_alloc(alloc, layer.ffn_norm->grad); - ggml_allocr_alloc(alloc, layer.w1->grad); - ggml_allocr_alloc(alloc, layer.w2->grad); - ggml_allocr_alloc(alloc, layer.w3->grad); - } -} - static void init_model(struct my_llama_model * model) { const auto & hparams = model->hparams; @@ -252,17 +218,8 @@ static void init_model(struct my_llama_model * model) { set_param_model(model); - // measure data size - size_t size = 0; - for (struct ggml_tensor * t = ggml_get_first_tensor(ctx); t != NULL; t = ggml_get_next_tensor(ctx, t)) { - size += GGML_PAD(ggml_nbytes(t), tensor_alignment); - } - // allocate data - struct ggml_allocr * alloc = NULL; - model->data.resize(size + tensor_alignment); - alloc = ggml_allocr_new(model->data.data(), model->data.size(), tensor_alignment); - alloc_model(alloc, model); + model->data = ggml_backend_alloc_ctx_tensors_from_buft(ctx, ggml_backend_cpu_buffer_type()); } static void randomize_model(struct my_llama_model * model, int seed, float mean, float std, float min, float max) { @@ -297,7 +254,7 @@ static void randomize_model(struct my_llama_model * model, int seed, float mean, static struct ggml_tensor * llama_build_train_graphs( struct my_llama_model * model, - struct ggml_allocr * alloc, + ggml_gallocr_t alloc, struct ggml_context * ctx, struct ggml_cgraph * gf, struct ggml_cgraph * gb, @@ -308,7 +265,8 @@ static struct ggml_tensor * llama_build_train_graphs( const int n_tokens, const int n_batch, const bool enable_flash_attn, - const bool enable_checkpointing) { + const bool enable_checkpointing, + const bool measure_only) { ggml_set_scratch(ctx, { 0, 0, nullptr, }); const int n_past = 0; @@ -334,13 +292,7 @@ static struct ggml_tensor * llama_build_train_graphs( // KQ_pos - contains the positions struct ggml_tensor * KQ_pos = ggml_new_tensor_1d(ctx, GGML_TYPE_I32, N); - ggml_allocr_alloc(alloc, KQ_pos); - if (!ggml_allocr_is_measure(alloc)) { - int * data = (int *) KQ_pos->data; - for (int i = 0; i < N; ++i) { - data[i] = n_past + i; - } - } + ggml_set_input(KQ_pos); // rope has so much parameters that we make a custom function for it auto rope = [ctx, KQ_pos, n_rot, n_ctx, rope_freq_base, rope_freq_scale] @@ -448,21 +400,31 @@ static struct ggml_tensor * llama_build_train_graphs( // KQ_pos ggml_build_forward_expand(gb, ggml_scale_inplace(ctx, KQ_pos, 1.0f)); GGML_ASSERT(t36->grad->data == NULL && t36->grad->view_src == NULL); - - ggml_allocr_alloc(alloc, t36->grad); + ggml_set_input(t36->grad); // allocating checkpoints in one block to reduce memory fragmentation // note: they will be freed in reverse order for (int i = 0; i < (int) checkpoints.size(); ++i) { if (checkpoints[i]->data == NULL && checkpoints[i]->view_src == NULL) { - ggml_allocr_alloc(alloc, checkpoints[i]); + ggml_set_input(checkpoints[i]); } } //int n_leafs_after = gb->n_leafs; //int n_nodes_after = gb->n_nodes; + if (measure_only) { + // FIXME: will still allocate + ggml_gallocr_reserve(alloc, gb); + } else { + ggml_gallocr_alloc_graph(alloc, gb); - ggml_allocr_alloc_graph(alloc, gb); + if (!measure_only) { + int * data = (int *) KQ_pos->data; + for (int i = 0; i < N; ++i) { + data[i] = n_past + i; + } + } + } // remove the additional nodes and leafs for (int i = n_leafs_before; i < gb->n_leafs; ++i) { @@ -1046,7 +1008,7 @@ int main(int argc, char ** argv) { printf("%s: seen train_samples %llu\n", __func__, (long long unsigned) train->train_samples); printf("%s: seen train_tokens %llu\n", __func__, (long long unsigned) train->train_tokens); printf("%s: completed train_epochs %llu\n", __func__, (long long unsigned) train->train_epochs); - printf("%s: model_size = %zu bytes (%.1f MB)\n", __func__, (ggml_used_mem(model.ctx) + model.data.size()), (float) (ggml_used_mem(model.ctx) + model.data.size()) / (1024.0f*1024.0f)); + printf("%s: model_size = %zu bytes (%.1f MB)\n", __func__, (ggml_used_mem(model.ctx) + ggml_backend_buffer_get_size(model.data)), (float) (ggml_used_mem(model.ctx) + ggml_backend_buffer_get_size(model.data)) / (1024.0f*1024.0f)); if (params.only_write_model) { save_train_files_data save_data; @@ -1073,11 +1035,6 @@ int main(int argc, char ** argv) { int n_vocab = model.hparams.n_vocab; int n_batch = params.common.n_batch; - std::vector mem_input_data; - std::vector mem_compute_data; - - ggml_allocr * alloc = NULL; - // context for input tensors without their data struct ggml_init_params ctx_input_params = { ggml_tensor_overhead() * 2, // mem_size @@ -1091,16 +1048,10 @@ int main(int argc, char ** argv) { struct ggml_tensor * target_probs = ggml_new_tensor_3d(ctx_input, GGML_TYPE_F32, n_vocab, n_tokens, n_batch); // measure required memory for input tensors - size_t max_input_size = GGML_PAD(ggml_nbytes(tokens_input), tensor_alignment) + - GGML_PAD(ggml_nbytes(target_probs), tensor_alignment) + - tensor_alignment; - printf("%s: input_size = %zu bytes (%.1f MB)\n", __func__, max_input_size, (float) max_input_size / (1024.0f*1024.0f)); - // allocate input tensors - mem_input_data.resize(max_input_size); - alloc = ggml_allocr_new(mem_input_data.data(), mem_input_data.size(), tensor_alignment); - ggml_allocr_alloc(alloc, tokens_input); - ggml_allocr_alloc(alloc, target_probs); + ggml_backend_buffer_t input_data = ggml_backend_alloc_ctx_tensors_from_buft(ctx_input, ggml_backend_cpu_buffer_type()); + size_t max_input_size = ggml_backend_buffer_get_size(input_data); + printf("%s: input_size = %zu bytes (%.1f MB)\n", __func__, max_input_size, (float) max_input_size / (1024.0f*1024.0f)); // context for compute tensors without their data const size_t estimated_compute_size_wo_data = ( @@ -1127,7 +1078,7 @@ int main(int argc, char ** argv) { // find best evaluation order for (unsigned order = 0; order < (unsigned) GGML_CGRAPH_EVAL_ORDER_COUNT; ++order) { ctx_compute = ggml_init(ctx_compute_params); - alloc = ggml_allocr_new_measure(tensor_alignment); + ggml_gallocr_t alloc = ggml_gallocr_new(ggml_backend_cpu_buffer_type()); gf = ggml_new_graph_custom(ctx_compute, LLAMA_TRAIN_MAX_NODES, true); gf->order = (enum ggml_cgraph_eval_order) order; gb = ggml_new_graph_custom(ctx_compute, LLAMA_TRAIN_MAX_NODES, true); @@ -1140,9 +1091,10 @@ int main(int argc, char ** argv) { &logits, tokens_input, target_probs, n_tokens, n_batch, params.common.use_flash, - params.common.use_checkpointing + params.common.use_checkpointing, + true ); - size_t max_compute_size = ggml_allocr_max_size(alloc) + tensor_alignment; + size_t max_compute_size = ggml_gallocr_get_buffer_size(alloc, 0); // FIXME: this will still allocate the buffer if (max_compute_size < best_compute_size) { best_compute_size = max_compute_size; best_order = gf->order; @@ -1157,9 +1109,8 @@ int main(int argc, char ** argv) { "invalid"); // allocate compute tensors - mem_compute_data.resize(max_compute_size); ctx_compute = ggml_init(ctx_compute_params); - alloc = ggml_allocr_new(mem_compute_data.data(), mem_compute_data.size(), tensor_alignment); + ggml_gallocr_t alloc = ggml_gallocr_new(ggml_backend_cpu_buffer_type()); gf = ggml_new_graph_custom(ctx_compute, LLAMA_TRAIN_MAX_NODES, true); gf->order = best_order; gb = ggml_new_graph_custom(ctx_compute, LLAMA_TRAIN_MAX_NODES, true); @@ -1172,7 +1123,8 @@ int main(int argc, char ** argv) { &logits, tokens_input, target_probs, n_tokens, n_batch, params.common.use_flash, - params.common.use_checkpointing + params.common.use_checkpointing, + false ); std::vector train_tokens; diff --git a/ggml-alloc.c b/ggml-alloc.c index f9be6e1cb..c28c37c4f 100644 --- a/ggml-alloc.c +++ b/ggml-alloc.c @@ -17,397 +17,11 @@ //#define AT_PRINTF(...) fprintf(stderr, __VA_ARGS__) #define AT_PRINTF(...) -// TODO: GGML_PAD ? -static size_t aligned_offset(const void * buffer, size_t offset, size_t alignment) { - assert(alignment && !(alignment & (alignment - 1))); // power of 2 - size_t align = (alignment - (((uintptr_t)buffer + offset) % alignment)) % alignment; - return offset + align; -} -struct free_block { - void * addr; - size_t size; -}; - -struct ggml_tallocr { - struct ggml_backend_buffer * buffer; - bool buffer_owned; - void * base; - size_t alignment; - - int n_free_blocks; - struct free_block free_blocks[MAX_FREE_BLOCKS]; - - size_t max_size; - - bool measure; - -#ifdef GGML_ALLOCATOR_DEBUG - struct ggml_tensor * allocated_tensors[1024]; -#endif -}; - -#ifdef GGML_ALLOCATOR_DEBUG -static void add_allocated_tensor(ggml_tallocr_t alloc, struct ggml_tensor * tensor) { - for (int i = 0; i < 1024; i++) { - if (alloc->allocated_tensors[i] == NULL) { - alloc->allocated_tensors[i] = tensor; - return; - } - } - GGML_ASSERT(!"out of allocated_tensors"); -} -static void remove_allocated_tensor(ggml_tallocr_t alloc, struct ggml_tensor * tensor) { - for (int i = 0; i < 1024; i++) { - if (alloc->allocated_tensors[i] == tensor || - (alloc->allocated_tensors[i] != NULL && alloc->allocated_tensors[i]->data == tensor->data)) { - alloc->allocated_tensors[i] = NULL; - return; - } - } - printf("tried to free tensor %s not found\n", tensor->name); - GGML_ASSERT(!"tensor not found"); -} -#endif - -// check if a tensor is allocated by this buffer -static bool ggml_tallocr_is_own(ggml_tallocr_t alloc, const struct ggml_tensor * tensor) { - return tensor->buffer == alloc->buffer && (!tensor->view_src || tensor->view_src->buffer == alloc->buffer); -} - -static bool ggml_is_view(struct ggml_tensor * t) { +static bool ggml_is_view(const struct ggml_tensor * t) { return t->view_src != NULL; } -void ggml_tallocr_alloc(ggml_tallocr_t alloc, struct ggml_tensor * tensor) { - GGML_ASSERT(!ggml_is_view(tensor)); // views generally get data pointer from one of their sources - GGML_ASSERT(tensor->data == NULL); // avoid allocating tensor which already has memory allocated - - size_t size = ggml_backend_buffer_get_alloc_size(alloc->buffer, tensor); - size = aligned_offset(NULL, size, alloc->alignment); - - AT_PRINTF("%s: allocating %s (%zu bytes) - ", __func__, tensor->name, size); - - size_t max_avail = 0; - - // find the best fitting free block besides the last block - int best_fit_block = -1; - size_t best_fit_size = SIZE_MAX; - for (int i = 0; i < alloc->n_free_blocks - 1; i++) { - struct free_block * block = &alloc->free_blocks[i]; - max_avail = MAX(max_avail, block->size); - if (block->size >= size && block->size <= best_fit_size) { - best_fit_block = i; - best_fit_size = block->size; - } - } - - if (best_fit_block == -1) { - // the last block is our last resort - struct free_block * block = &alloc->free_blocks[alloc->n_free_blocks - 1]; - max_avail = MAX(max_avail, block->size); - if (block->size >= size) { - best_fit_block = alloc->n_free_blocks - 1; - } else { - fprintf(stderr, "%s: not enough space in the buffer to allocate %s (needed %zu, largest block available %zu)\n", - __func__, tensor->name, size, max_avail); - GGML_ASSERT(!"not enough space in the buffer"); - return; - } - } - - struct free_block * block = &alloc->free_blocks[best_fit_block]; - void * addr = block->addr; - block->addr = (char*)block->addr + size; - block->size -= size; - if (block->size == 0) { - // remove block if empty - alloc->n_free_blocks--; - for (int j = best_fit_block; j < alloc->n_free_blocks; j++) { - alloc->free_blocks[j] = alloc->free_blocks[j+1]; - } - } - - AT_PRINTF("block %d, addr %p\n", best_fit_block, addr); - - tensor->data = addr; - tensor->buffer = alloc->buffer; - if (!alloc->measure) { - ggml_backend_buffer_init_tensor(alloc->buffer, tensor); - } - -#ifdef GGML_ALLOCATOR_DEBUG - add_allocated_tensor(alloc, tensor); - size_t cur_max = (char*)addr - (char*)alloc->base + size; - if (cur_max > alloc->max_size) { - printf("max_size = %.2f MB: tensors: ", cur_max / 1024.0 / 1024.0); - for (int i = 0; i < 1024; i++) { - if (alloc->allocated_tensors[i]) { - printf("%s (%.2f MB) ", alloc->allocated_tensors[i]->name, ggml_nbytes(alloc->allocated_tensors[i]) / 1024.0 / 1024.0); - } - } - printf("\n"); - } -#endif - - alloc->max_size = MAX(alloc->max_size, (char*)addr - (char*)alloc->base + size); -} - -// this is a very naive implementation, but for our case the number of free blocks should be very small -static void ggml_tallocr_free_tensor(ggml_tallocr_t alloc, struct ggml_tensor * tensor) { - if (ggml_tallocr_is_own(alloc, tensor) == false) { - // the tensor was not allocated in this buffer - // this can happen because the graph allocator will try to free weights and other tensors from different buffers - // the easiest way to deal with this is just to ignore it - // AT_PRINTF("ignoring %s (their buffer: %p, our buffer: %p)\n", tensor->name, (void *)tensor->buffer, (void *)alloc->buffer); - return; - } - - void * ptr = tensor->data; - - size_t size = ggml_backend_buffer_get_alloc_size(alloc->buffer, tensor); - size = aligned_offset(NULL, size, alloc->alignment); - AT_PRINTF("%s: freeing %s at %p (%zu bytes) - n_free_blocks = %d\n", __func__, tensor->name, ptr, size, alloc->n_free_blocks); - -#ifdef GGML_ALLOCATOR_DEBUG - remove_allocated_tensor(alloc, tensor); -#endif - - // see if we can merge with an existing block - for (int i = 0; i < alloc->n_free_blocks; i++) { - struct free_block * block = &alloc->free_blocks[i]; - // check if ptr is at the end of the block - if ((char*)block->addr + block->size == ptr) { - block->size += size; - // check if we can merge with the next block - if (i < alloc->n_free_blocks - 1 && (char*)block->addr + block->size == alloc->free_blocks[i+1].addr) { - block->size += alloc->free_blocks[i+1].size; - alloc->n_free_blocks--; - for (int j = i+1; j < alloc->n_free_blocks; j++) { - alloc->free_blocks[j] = alloc->free_blocks[j+1]; - } - } - return; - } - // check if ptr is at the beginning of the block - if ((char*)ptr + size == block->addr) { - block->addr = ptr; - block->size += size; - // check if we can merge with the previous block - if (i > 0 && (char*)alloc->free_blocks[i-1].addr + alloc->free_blocks[i-1].size == block->addr) { - alloc->free_blocks[i-1].size += block->size; - alloc->n_free_blocks--; - for (int j = i; j < alloc->n_free_blocks; j++) { - alloc->free_blocks[j] = alloc->free_blocks[j+1]; - } - } - return; - } - } - // otherwise, add a new block - GGML_ASSERT(alloc->n_free_blocks < MAX_FREE_BLOCKS && "out of free blocks"); - // insert the new block in the correct position to keep the array sorted by address (to make merging blocks faster) - int insert_pos = 0; - while (insert_pos < alloc->n_free_blocks && alloc->free_blocks[insert_pos].addr < ptr) { - insert_pos++; - } - // shift all blocks from insert_pos onward to make room for the new block - for (int i = alloc->n_free_blocks; i > insert_pos; i--) { - alloc->free_blocks[i] = alloc->free_blocks[i-1]; - } - // insert the new block - alloc->free_blocks[insert_pos].addr = ptr; - alloc->free_blocks[insert_pos].size = size; - alloc->n_free_blocks++; -} - -void ggml_tallocr_reset(ggml_tallocr_t alloc) { - alloc->n_free_blocks = 1; - size_t align_offset = aligned_offset(alloc->base, 0, alloc->alignment); - alloc->free_blocks[0].addr = (char *)alloc->base + align_offset; - - if (alloc->measure) { - alloc->free_blocks[0].size = SIZE_MAX/2; // restrict maximum size of a measure allocator to half size_t max to avoid overflows - } else { - alloc->free_blocks[0].size = ggml_backend_buffer_get_size(alloc->buffer) - align_offset; - ggml_backend_buffer_reset(alloc->buffer); - } -} - -ggml_tallocr_t ggml_tallocr_new(void * data, size_t size, size_t alignment) { - struct ggml_backend_buffer * buffer = ggml_backend_cpu_buffer_from_ptr(data, size); - - ggml_tallocr_t alloc = (ggml_tallocr_t)malloc(sizeof(struct ggml_tallocr)); - - *alloc = (struct ggml_tallocr) { - /*.buffer = */ buffer, - /*.buffer_owned = */ true, - /*.base = */ ggml_backend_buffer_get_base(buffer), - /*.alignment = */ alignment, - /*.n_free_blocks = */ 0, - /*.free_blocks = */ {{0}}, - /*.max_size = */ 0, - /*.measure = */ false, -#ifdef GGML_ALLOCATOR_DEBUG - /*.allocated_tensors = */ {0}, -#endif - }; - - ggml_tallocr_reset(alloc); - - return alloc; -} - -ggml_tallocr_t ggml_tallocr_new_measure(size_t alignment) { - ggml_tallocr_t alloc = ggml_tallocr_new((void *)0x1000, SIZE_MAX/2, alignment); - alloc->measure = true; - - return alloc; -} - -ggml_tallocr_t ggml_tallocr_new_measure_from_buft(struct ggml_backend_buffer_type * buft) { - // create a backend buffer to get the correct tensor allocation sizes - ggml_backend_buffer_t buffer = ggml_backend_buft_alloc_buffer(buft, 1); - - // TODO: move alloc initialization to a common ggml_tallocr_new_impl function - ggml_tallocr_t alloc = ggml_tallocr_new_from_buffer(buffer); - alloc->buffer_owned = true; - alloc->measure = true; - ggml_tallocr_reset(alloc); - return alloc; -} - -ggml_tallocr_t ggml_tallocr_new_measure_from_backend(struct ggml_backend * backend) { - return ggml_tallocr_new_measure_from_buft(ggml_backend_get_default_buffer_type(backend)); -} - -ggml_tallocr_t ggml_tallocr_new_from_buft(struct ggml_backend_buffer_type * buft, size_t size) { - // create a backend buffer to get the correct tensor allocation sizes - ggml_backend_buffer_t buffer = ggml_backend_buft_alloc_buffer(buft, size); - ggml_tallocr_t alloc = ggml_tallocr_new_from_buffer(buffer); - alloc->buffer_owned = true; - return alloc; -} - -ggml_tallocr_t ggml_tallocr_new_from_backend(struct ggml_backend * backend, size_t size) { - return ggml_tallocr_new_from_buft(ggml_backend_get_default_buffer_type(backend), size); -} - -ggml_tallocr_t ggml_tallocr_new_from_buffer(struct ggml_backend_buffer * buffer) { - ggml_tallocr_t alloc = (ggml_tallocr_t)malloc(sizeof(struct ggml_tallocr)); - - *alloc = (struct ggml_tallocr) { - /*.buffer = */ buffer, - /*.buffer_owned = */ false, - /*.base = */ ggml_backend_buffer_get_base(buffer), - /*.alignment = */ ggml_backend_buffer_get_alignment(buffer), - /*.n_free_blocks = */ 0, - /*.free_blocks = */ {{0}}, - /*.max_size = */ 0, - /*.measure = */ false, -#ifdef GGML_ALLOCATOR_DEBUG - /*.allocated_tensors = */ {0}, -#endif - }; - - ggml_tallocr_reset(alloc); - - return alloc; -} - -struct ggml_backend_buffer * ggml_tallocr_get_buffer(ggml_tallocr_t alloc) { - return alloc->buffer; -} - -void ggml_tallocr_free(ggml_tallocr_t alloc) { - if (alloc == NULL) { - return; - } - - if (alloc->buffer_owned) { - ggml_backend_buffer_free(alloc->buffer); - } - free(alloc); -} - -bool ggml_tallocr_is_measure(ggml_tallocr_t alloc) { - return alloc->measure; -} - -size_t ggml_tallocr_max_size(ggml_tallocr_t alloc) { - // FIXME: changes in the tensor sizes compared to the measure graph may cause allocations to fail - // to avoid this, we add a 10% margin to the buffer size - return alloc->max_size + alloc->max_size/10; -} - -// graph allocator - -struct hash_node { - int n_children; - int n_views; -}; - -struct ggml_gallocr { - ggml_tallocr_t talloc; - struct ggml_hash_set hash_set; - struct hash_node * hash_values; - size_t hash_values_size; - ggml_tallocr_t * hash_allocs; - int * parse_seq; - int parse_seq_len; -}; - -ggml_gallocr_t ggml_gallocr_new(void) { - ggml_gallocr_t galloc = (ggml_gallocr_t)malloc(sizeof(struct ggml_gallocr)); - - *galloc = (struct ggml_gallocr) { - /*.talloc = */ NULL, - /*.hash_set = */ {0}, - /*.hash_values = */ NULL, - /*.hash_values_size = */ 0, - /*.hash_allocs = */ NULL, - /*.parse_seq = */ NULL, - /*.parse_seq_len = */ 0, - }; - - return galloc; -} - -void ggml_gallocr_free(ggml_gallocr_t galloc) { - if (galloc == NULL) { - return; - } - - if (galloc->hash_set.keys != NULL) { - free(galloc->hash_set.keys); - } - if (galloc->hash_values != NULL) { - free(galloc->hash_values); - } - if (galloc->hash_allocs != NULL) { - free(galloc->hash_allocs); - } - if (galloc->parse_seq != NULL) { - free(galloc->parse_seq); - } - free(galloc); -} - -void ggml_gallocr_set_parse_seq(ggml_gallocr_t galloc, const int * list, int n) { - free(galloc->parse_seq); - galloc->parse_seq = malloc(sizeof(int) * n); - - for (int i = 0; i < n; i++) { - galloc->parse_seq[i] = list[i]; - } - galloc->parse_seq_len = n; -} - -static struct hash_node * hash_get(ggml_gallocr_t galloc, struct ggml_tensor * t) { - size_t i = ggml_hash_find_or_insert(galloc->hash_set, t); - return &galloc->hash_values[i]; -} - static bool ggml_are_same_layout(const struct ggml_tensor * a, const struct ggml_tensor * b) { if (a->type != b->type) { return false; @@ -447,106 +61,511 @@ static bool ggml_op_can_inplace(enum ggml_op op) { } } -static ggml_tallocr_t node_tallocr(ggml_gallocr_t galloc, struct ggml_tensor * node) { - if (galloc->talloc != NULL) { - return galloc->talloc; - } - - return galloc->hash_allocs[ggml_hash_find_or_insert(galloc->hash_set, node)]; +// TODO: GGML_PAD ? +static size_t aligned_offset(const void * buffer, size_t offset, size_t alignment) { + assert(alignment && !(alignment & (alignment - 1))); // power of 2 + size_t align = (alignment - (((uintptr_t)buffer + offset) % alignment)) % alignment; + return offset + align; } -static void init_view(ggml_gallocr_t galloc, struct ggml_tensor * view, bool update_backend) { - ggml_tallocr_t alloc = node_tallocr(galloc, view); +// tallocr +struct ggml_tallocr { + ggml_backend_buffer_t buffer; + void * base; + size_t alignment; + size_t offset; +}; - GGML_ASSERT(view->view_src != NULL && view->view_src->data != NULL); - if (update_backend) { - view->backend = view->view_src->backend; +ggml_tallocr_t ggml_tallocr_new(ggml_backend_buffer_t buffer) { + ggml_tallocr_t talloc = malloc(sizeof(struct ggml_tallocr)); + if (talloc == NULL) { + return NULL; } - // views are initialized in the alloc buffer rather than the view_src buffer - view->buffer = alloc->buffer; - view->data = (char *)view->view_src->data + view->view_offs; - assert(ggml_tallocr_is_measure(alloc) || !view->buffer || view->buffer->buft == alloc->buffer->buft); + void * base = ggml_backend_buffer_get_base(buffer); + size_t align = ggml_backend_buffer_get_alignment(buffer); - if (!alloc->measure) { - ggml_backend_buffer_init_tensor(alloc->buffer, view); - } + assert(align && !(align & (align - 1))); // power of 2 + + *talloc = (struct ggml_tallocr) { + /*.buffer = */ buffer, + /*.base = */ base, + /*.alignment = */ align, + /*.offset = */ aligned_offset(base, 0, align), + }; + return talloc; } -static void allocate_node(ggml_gallocr_t galloc, struct ggml_tensor * node) { - ggml_tallocr_t alloc = node_tallocr(galloc, node); +void ggml_tallocr_free(ggml_tallocr_t talloc) { + free(talloc); +} - if (node->data == NULL) { - if (ggml_is_view(node)) { - init_view(galloc, node, true); +void ggml_tallocr_alloc(ggml_tallocr_t talloc, struct ggml_tensor * tensor) { + size_t size = ggml_backend_buffer_get_alloc_size(talloc->buffer, tensor); + size = GGML_PAD(size, talloc->alignment); + + if (talloc->offset + size > ggml_backend_buffer_get_size(talloc->buffer)) { + fprintf(stderr, "%s: not enough space in the buffer to allocate %s (needed %zu, available %zu)\n", + __func__, tensor->name, size, ggml_backend_buffer_get_size(talloc->buffer) - talloc->offset); + GGML_ASSERT(!"not enough space in the buffer"); + return; + } + + void * addr = (char *)ggml_backend_buffer_get_base(talloc->buffer) + talloc->offset; + talloc->offset += size; + + assert(((uintptr_t)addr % talloc->alignment) == 0); + + ggml_backend_tensor_alloc(talloc->buffer, tensor, addr); +} + +// dynamic tensor allocator + +struct free_block { + size_t offset; + size_t size; +}; + +struct ggml_dyn_tallocr { + size_t alignment; + int n_free_blocks; + struct free_block free_blocks[MAX_FREE_BLOCKS]; + size_t max_size; + +#ifdef GGML_ALLOCATOR_DEBUG + struct { + const struct ggml_tensor * tensor; + size_t offset; + } allocated_tensors[1024]; +#endif +}; + +#ifdef GGML_ALLOCATOR_DEBUG +static void add_allocated_tensor(struct ggml_dyn_tallocr * alloc, size_t offset, const struct ggml_tensor * tensor) { + for (int i = 0; i < 1024; i++) { + if (alloc->allocated_tensors[i].tensor == NULL) { + alloc->allocated_tensors[i].tensor = tensor; + alloc->allocated_tensors[i].offset = offset; + return; + } + } + GGML_ASSERT(!"out of allocated_tensors"); +} +static void remove_allocated_tensor(struct ggml_dyn_tallocr * alloc, size_t offset, const struct ggml_tensor * tensor) { + for (int i = 0; i < 1024; i++) { + if (alloc->allocated_tensors[i].offset == offset) { + alloc->allocated_tensors[i].tensor = NULL; + return; + } + } + fprintf(stderr, "tried to free tensor %s not found\n", tensor->name); + GGML_ASSERT(!"tensor not found"); +} +#endif + +static size_t ggml_dyn_tallocr_alloc(struct ggml_dyn_tallocr * alloc, size_t size, const struct ggml_tensor * tensor) { + size = aligned_offset(NULL, size, alloc->alignment); + + AT_PRINTF("%s: allocating %s (%zu bytes) - ", __func__, tensor->name, size); + + size_t max_avail = 0; + + // find the best fitting free block besides the last block + int best_fit_block = -1; + size_t best_fit_size = SIZE_MAX; + for (int i = 0; i < alloc->n_free_blocks - 1; i++) { + struct free_block * block = &alloc->free_blocks[i]; + max_avail = MAX(max_avail, block->size); + if (block->size >= size && block->size <= best_fit_size) { + best_fit_block = i; + best_fit_size = block->size; + } + } + + if (best_fit_block == -1) { + // the last block is our last resort + struct free_block * block = &alloc->free_blocks[alloc->n_free_blocks - 1]; + max_avail = MAX(max_avail, block->size); + if (block->size >= size) { + best_fit_block = alloc->n_free_blocks - 1; } else { - // see if we can reuse a parent's buffer (inplace) - if (ggml_op_can_inplace(node->op)) { - for (int i = 0; i < GGML_MAX_SRC; i++) { - struct ggml_tensor * parent = node->src[i]; - if (parent == NULL) { - break; - } + // this should never happen + fprintf(stderr, "%s: not enough space in the buffer to allocate %zu bytes, largest block available %zu bytes\n", + __func__, size, max_avail); + GGML_ASSERT(!"not enough space in the buffer"); + GGML_UNREACHABLE(); + } + } - // if the node's data is external, then we cannot re-use it - if (ggml_tallocr_is_own(alloc, parent) == false) { - AT_PRINTF("not reusing parent %s for %s as %p is external\n", parent->name, node->name, parent->data); - continue; - } + struct free_block * block = &alloc->free_blocks[best_fit_block]; + size_t offset = block->offset; + block->offset = offset + size; + block->size -= size; + if (block->size == 0) { + // remove block if empty + alloc->n_free_blocks--; + for (int j = best_fit_block; j < alloc->n_free_blocks; j++) { + alloc->free_blocks[j] = alloc->free_blocks[j+1]; + } + } - struct hash_node * p_hn = hash_get(galloc, parent); - if (parent->data != NULL && p_hn->n_children == 1 && p_hn->n_views == 0 && ggml_are_same_layout(node, parent)) { - if (ggml_is_view(parent)) { - struct ggml_tensor * view_src = parent->view_src; - struct hash_node * view_src_hn = hash_get(galloc, view_src); - if (view_src_hn->n_views == 1 && view_src_hn->n_children == 0 && view_src->data == parent->data) { - // TODO: the offset of the view parent must be kept to ensure that the op doesn't overwrite - // the parent's data that it will need later (same layout requirement). the problem is that then - // we cannot free the tensor because the original address of the allocation is lost. - // adding a view_src pointer to the tensor would solve this and simplify the code dealing with views - // for now, we only reuse the parent's data if the offset is zero (view_src->data == parent->data) - AT_PRINTF("reusing view parent %s (%s) for %s\n", parent->name, view_src->name, node->name); - node->view_src = view_src; - view_src_hn->n_views += 1; - init_view(galloc, node, false); - return; - } - } else { - AT_PRINTF("reusing parent %s for %s\n", parent->name, node->name); - node->view_src = parent; - p_hn->n_views += 1; - init_view(galloc, node, false); + AT_PRINTF("block %d, offset %zu\n", best_fit_block, offset); + +#ifdef GGML_ALLOCATOR_DEBUG + add_allocated_tensor(alloc, offset, tensor); + size_t cur_max = offset + size; + if (cur_max > alloc->max_size) { + // sort allocated_tensors by offset + for (int i = 0; i < 1024; i++) { + for (int j = i + 1; j < 1024; j++) { + if (alloc->allocated_tensors[i].offset > alloc->allocated_tensors[j].offset) { + const struct ggml_tensor * tmp_tensor = alloc->allocated_tensors[i].tensor; + size_t tmp_offset = alloc->allocated_tensors[i].offset; + alloc->allocated_tensors[i].tensor = alloc->allocated_tensors[j].tensor; + alloc->allocated_tensors[i].offset = alloc->allocated_tensors[j].offset; + alloc->allocated_tensors[j].tensor = tmp_tensor; + alloc->allocated_tensors[j].offset = tmp_offset; + } + } + } + fprintf(stderr, "max_size = %.2f MB: tensors: ", cur_max / 1024.0 / 1024.0); + for (int i = 0; i < 1024; i++) { + if (alloc->allocated_tensors[i].tensor) { + fprintf(stderr, "%s [%zx-%zx] (%.2f MB) ", alloc->allocated_tensors[i].tensor->name, + alloc->allocated_tensors[i].offset, + alloc->allocated_tensors[i].offset + ggml_nbytes(alloc->allocated_tensors[i].tensor), + ggml_nbytes(alloc->allocated_tensors[i].tensor) / 1024.0 / 1024.0); + } + } + fprintf(stderr, "\n"); + } +#endif + + alloc->max_size = MAX(alloc->max_size, offset + size); + + return offset; + + GGML_UNUSED(tensor); +} + +// this is a very naive implementation, but for our case the number of free blocks should be very small +static void ggml_dyn_tallocr_free_tensor(struct ggml_dyn_tallocr * alloc, size_t offset, size_t size, const struct ggml_tensor * tensor) { + size = aligned_offset(NULL, size, alloc->alignment); + + AT_PRINTF("%s: freeing %s at %zu (%zu bytes) - n_free_blocks = %d\n", __func__, tensor->name, offset, size, alloc->n_free_blocks); + +#ifdef GGML_ALLOCATOR_DEBUG + remove_allocated_tensor(alloc, offset, tensor); +#endif + + // see if we can merge with an existing block + for (int i = 0; i < alloc->n_free_blocks; i++) { + struct free_block * block = &alloc->free_blocks[i]; + // check if ptr is at the end of the block + if (block->offset + block->size == offset) { + block->size += size; + // check if we can merge with the next block + if (i < alloc->n_free_blocks - 1 && block->offset + block->size == alloc->free_blocks[i+1].offset) { + block->size += alloc->free_blocks[i+1].size; + alloc->n_free_blocks--; + for (int j = i+1; j < alloc->n_free_blocks; j++) { + alloc->free_blocks[j] = alloc->free_blocks[j+1]; + } + } + return; + } + // check if ptr is at the beginning of the block + if (offset + size == block->offset) { + block->offset = offset; + block->size += size; + // check if we can merge with the previous block + if (i > 0 && alloc->free_blocks[i-1].offset + alloc->free_blocks[i-1].size == block->offset) { + alloc->free_blocks[i-1].size += block->size; + alloc->n_free_blocks--; + for (int j = i; j < alloc->n_free_blocks; j++) { + alloc->free_blocks[j] = alloc->free_blocks[j+1]; + } + } + return; + } + } + // otherwise, add a new block + GGML_ASSERT(alloc->n_free_blocks < MAX_FREE_BLOCKS && "out of free blocks"); + // insert the new block in the correct position to keep the array sorted by address (to make merging blocks faster) + int insert_pos = 0; + while (insert_pos < alloc->n_free_blocks && alloc->free_blocks[insert_pos].offset < offset) { + insert_pos++; + } + // shift all blocks from insert_pos onward to make room for the new block + for (int i = alloc->n_free_blocks; i > insert_pos; i--) { + alloc->free_blocks[i] = alloc->free_blocks[i-1]; + } + // insert the new block + alloc->free_blocks[insert_pos].offset = offset; + alloc->free_blocks[insert_pos].size = size; + alloc->n_free_blocks++; + + GGML_UNUSED(tensor); +} + +static void ggml_dyn_tallocr_reset(struct ggml_dyn_tallocr * alloc) { + alloc->n_free_blocks = 1; + alloc->free_blocks[0].offset = 0; + alloc->free_blocks[0].size = SIZE_MAX/2; // restrict maximum size of a measure allocator to half size_t max to avoid overflows + alloc->max_size = 0; +} + +static struct ggml_dyn_tallocr * ggml_dyn_tallocr_new(size_t alignment) { + struct ggml_dyn_tallocr * alloc = (struct ggml_dyn_tallocr *)malloc(sizeof(struct ggml_dyn_tallocr)); + + *alloc = (struct ggml_dyn_tallocr) { + /*.alignment = */ alignment, + /*.n_free_blocks = */ 0, + /*.free_blocks = */ {{0}}, + /*.max_size = */ 0, +#ifdef GGML_ALLOCATOR_DEBUG + /*.allocated_tensors = */ {{0}}, +#endif + }; + + ggml_dyn_tallocr_reset(alloc); + + return alloc; +} + +static void ggml_dyn_tallocr_free(struct ggml_dyn_tallocr * alloc) { + free(alloc); +} + +static size_t ggml_dyn_tallocr_max_size(struct ggml_dyn_tallocr * alloc) { + return alloc->max_size; +} + + +///////////////////////////////////// + +// graph allocator + +struct hash_node { + int n_children; + int n_views; + int buffer_id; + size_t offset; // offset within the buffer + bool allocated; +}; + +// +struct tensor_alloc { + size_t offset; + size_t size_max; // 0 = pre-allocated, unused, or view +}; + +struct node_alloc { + int buffer_id; + struct tensor_alloc dst; + struct tensor_alloc src[GGML_MAX_SRC]; +}; + +struct ggml_gallocr { + ggml_backend_buffer_type_t * bufts; // [n_buffers] + ggml_backend_buffer_t * buffers; // [n_buffers] + struct ggml_dyn_tallocr ** buf_tallocs; // [n_buffers] + int n_buffers; + + struct ggml_hash_set hash_set; + struct hash_node * hash_values; // [hash_set.size] + + struct node_alloc * node_allocs; // [n_nodes] + int n_nodes; +}; + +ggml_gallocr_t ggml_gallocr_new_n(ggml_backend_buffer_type_t * bufts, int n_bufs) { + ggml_gallocr_t galloc = (ggml_gallocr_t)calloc(sizeof(struct ggml_gallocr), 1); + GGML_ASSERT(galloc != NULL); + + galloc->bufts = calloc(sizeof(ggml_backend_buffer_type_t) * n_bufs, 1); + GGML_ASSERT(galloc->bufts != NULL); + + galloc->buffers = calloc(sizeof(ggml_backend_buffer_t) * n_bufs, 1); + GGML_ASSERT(galloc->buffers != NULL); + + galloc->buf_tallocs = calloc(sizeof(struct ggml_dyn_tallocr *) * n_bufs, 1); + GGML_ASSERT(galloc->buf_tallocs != NULL); + + for (int i = 0; i < n_bufs; i++) { + galloc->bufts[i] = bufts[i]; + galloc->buffers[i] = NULL; + size_t alignment = ggml_backend_buft_get_alignment(bufts[i]); + galloc->buf_tallocs[i] = ggml_dyn_tallocr_new(alignment); + } + galloc->n_buffers = n_bufs; + + return galloc; +} + +ggml_gallocr_t ggml_gallocr_new(ggml_backend_buffer_type_t buft) { + return ggml_gallocr_new_n(&buft, 1); +} + +void ggml_gallocr_free(ggml_gallocr_t galloc) { + if (galloc == NULL) { + return; + } + + for (int i = 0; i < galloc->n_buffers; i++) { + if (galloc->buffers != NULL) { + ggml_backend_buffer_free(galloc->buffers[i]); + } + if (galloc->buf_tallocs != NULL) { + ggml_dyn_tallocr_free(galloc->buf_tallocs[i]); + } + } + + free(galloc->hash_set.keys); + free(galloc->hash_values); + free(galloc->bufts); + free(galloc->buffers); + free(galloc->buf_tallocs); + free(galloc->node_allocs); + free(galloc); +} + +typedef struct ggml_gallocr * ggml_gallocr_t; + +static struct hash_node * ggml_gallocr_hash_get(ggml_gallocr_t galloc, struct ggml_tensor * t) { + size_t i = ggml_hash_find_or_insert(galloc->hash_set, t); + return &galloc->hash_values[i]; +} + +static bool ggml_gallocr_is_own(ggml_gallocr_t galloc, struct ggml_tensor * t) { + return ggml_gallocr_hash_get(galloc, t)->allocated; +} + +static void ggml_gallocr_set_node_offset(ggml_gallocr_t galloc, struct ggml_tensor * node, int buffer_id, size_t offset) { + struct hash_node * hn = ggml_gallocr_hash_get(galloc, node); + hn->buffer_id = buffer_id; + hn->offset = offset; + hn->allocated = true; +} + +static bool ggml_gallocr_is_allocated(ggml_gallocr_t galloc, struct ggml_tensor * t) { + return t->data != NULL || ggml_gallocr_hash_get(galloc, t)->allocated; +} + +static void ggml_gallocr_allocate_node(ggml_gallocr_t galloc, struct ggml_tensor * node, int buffer_id) { + struct hash_node * hn = ggml_gallocr_hash_get(galloc, node); + + if (!ggml_gallocr_is_allocated(galloc, node) && !ggml_is_view(node)) { + hn->allocated = true; + assert(hn->offset == 0); + + // try to reuse a parent's buffer (inplace) + if (ggml_op_can_inplace(node->op)) { + for (int i = 0; i < GGML_MAX_SRC; i++) { + struct ggml_tensor * parent = node->src[i]; + if (parent == NULL) { + break; + } + + // if the node's data is external, then we cannot re-use it + if (!ggml_gallocr_is_own(galloc, parent)) { + AT_PRINTF("not reusing parent %s for %s as %p is external\n", parent->name, node->name, parent->data); + continue; + } + + // outputs cannot be reused + if (parent->flags & GGML_TENSOR_FLAG_OUTPUT || (parent->view_src != NULL && parent->view_src->flags & GGML_TENSOR_FLAG_OUTPUT)) { + AT_PRINTF("not reusing parent %s for %s as it is an output\n", parent->name, node->name); + continue; + } + + if (!ggml_are_same_layout(node, parent)) { + AT_PRINTF("not reusing parent %s for %s as layouts are different\n", parent->name, node->name); + continue; + } + + struct hash_node * p_hn = ggml_gallocr_hash_get(galloc, parent); + if (p_hn->n_children == 1 && p_hn->n_views == 0) { + if (ggml_is_view(parent)) { + struct ggml_tensor * view_src = parent->view_src; + struct hash_node * view_src_hn = ggml_gallocr_hash_get(galloc, view_src); + if (view_src_hn->n_views == 1 && view_src_hn->n_children == 0 && view_src->data == parent->data) { + AT_PRINTF("reusing view parent %s (%s) for %s\n", parent->name, view_src->name, node->name); + assert(view_src_hn->offset == p_hn->offset); + hn->buffer_id = p_hn->buffer_id; + hn->offset = p_hn->offset; + p_hn->allocated = false; // avoid freeing the parent + view_src_hn->allocated = false; return; } + } else { + AT_PRINTF("reusing parent %s for %s\n", parent->name, node->name); + hn->buffer_id = p_hn->buffer_id; + hn->offset = p_hn->offset; + p_hn->allocated = false; // avoid freeing the parent + return; } } } - ggml_tallocr_alloc(alloc, node); } + // allocate tensor from the buffer + struct ggml_dyn_tallocr * alloc = galloc->buf_tallocs[buffer_id]; + ggml_backend_buffer_type_t buft = galloc->bufts[buffer_id]; + size_t size = ggml_backend_buft_get_alloc_size(buft, node); + size_t offset = ggml_dyn_tallocr_alloc(alloc, size, node); + hn->buffer_id = buffer_id; + hn->offset = offset; + return; } } -static void free_node(ggml_gallocr_t galloc, struct ggml_tensor * node) { - ggml_tallocr_t alloc = node_tallocr(galloc, node); +static void ggml_gallocr_free_node(ggml_gallocr_t galloc, struct ggml_tensor * node, int buffer_id) { + // graph outputs are never freed + if (node->flags & GGML_TENSOR_FLAG_OUTPUT) { + AT_PRINTF("not freeing output %s\n", node->name); + return; + } - ggml_tallocr_free_tensor(alloc, node); + struct ggml_dyn_tallocr * alloc = galloc->buf_tallocs[buffer_id]; + ggml_backend_buffer_type_t buft = galloc->bufts[buffer_id]; + struct hash_node * hn = ggml_gallocr_hash_get(galloc, node); + size_t offset = hn->offset; + size_t size = ggml_backend_buft_get_alloc_size(buft, node); + ggml_dyn_tallocr_free_tensor(alloc, offset, size, node); + hn->allocated = false; } -static void ggml_tallocr_alloc_graph_impl(ggml_gallocr_t galloc, struct ggml_cgraph * gf) { - const int * parse_seq = galloc->parse_seq; - int parse_seq_len = galloc->parse_seq_len; +static int get_node_buffer_id(const int * node_buffer_ids, int i) { + return node_buffer_ids ? node_buffer_ids[i] : 0; +} + +static void ggml_gallocr_alloc_graph_impl(ggml_gallocr_t galloc, struct ggml_cgraph * graph, const int * node_buffer_ids) { + // clear hash tables + memset(galloc->hash_set.keys, 0, galloc->hash_set.size * sizeof(struct ggml_tensor *)); + memset(galloc->hash_values, 0, galloc->hash_set.size * sizeof(struct hash_node)); + + // allocate all graph inputs first to avoid overwriting them + for (int i = 0; i < graph->n_nodes; i++) { + if (graph->nodes[i]->flags & GGML_TENSOR_FLAG_INPUT) { + ggml_gallocr_allocate_node(galloc, graph->nodes[i], get_node_buffer_id(node_buffer_ids, i)); + } + for (int j = 0; j < GGML_MAX_SRC; j++) { + if (graph->nodes[i]->src[j] == NULL) { + break; + } + if (graph->nodes[i]->src[j]->flags & GGML_TENSOR_FLAG_INPUT) { + ggml_gallocr_allocate_node(galloc, graph->nodes[i]->src[j], get_node_buffer_id(node_buffer_ids, i)); + } + } + } // count number of children and views - for (int i = 0; i < gf->n_nodes; i++) { - struct ggml_tensor * node = gf->nodes[i]; + for (int i = 0; i < graph->n_nodes; i++) { + struct ggml_tensor * node = graph->nodes[i]; if (ggml_is_view(node)) { struct ggml_tensor * view_src = node->view_src; - hash_get(galloc, view_src)->n_views += 1; - if (node->buffer == NULL && node->data != NULL) { - // view of a pre-allocated tensor, didn't call init_view() yet - init_view(galloc, node, true); - } + ggml_gallocr_hash_get(galloc, view_src)->n_views += 1; } for (int j = 0; j < GGML_MAX_SRC; j++) { @@ -554,227 +573,283 @@ static void ggml_tallocr_alloc_graph_impl(ggml_gallocr_t galloc, struct ggml_cgr if (parent == NULL) { break; } - hash_get(galloc, parent)->n_children += 1; - if (ggml_is_view(parent) && parent->buffer == NULL && parent->data != NULL) { - init_view(galloc, parent, true); - } + ggml_gallocr_hash_get(galloc, parent)->n_children += 1; } } // allocate tensors - // if we have parse_seq then we allocate nodes following the list, and we only free nodes at barriers - int last_barrier_pos = 0; - int n_nodes = parse_seq_len ? parse_seq_len : gf->n_nodes; + for (int i = 0; i < graph->n_nodes; i++) { + struct ggml_tensor * node = graph->nodes[i]; + int buffer_id = get_node_buffer_id(node_buffer_ids, i); - for (int ind = 0; ind < n_nodes; ind++) { - // allocate a node if there is no parse_seq or this is not a barrier - if (parse_seq_len == 0 || parse_seq[ind] != -1) { - int i = parse_seq_len ? parse_seq[ind] : ind; - struct ggml_tensor * node = gf->nodes[i]; - - // allocate parents (leafs) - for (int j = 0; j < GGML_MAX_SRC; j++) { - struct ggml_tensor * parent = node->src[j]; - if (parent == NULL) { - break; - } - allocate_node(galloc, parent); + // allocate parents (only leafs need to be allocated at this point) + for (int j = 0; j < GGML_MAX_SRC; j++) { + struct ggml_tensor * parent = node->src[j]; + if (parent == NULL) { + break; } - - // allocate node - allocate_node(galloc, node); - - AT_PRINTF("exec: %s (%s) <= ", ggml_op_name(node->op), node->name); - for (int j = 0; j < GGML_MAX_SRC; j++) { - struct ggml_tensor * parent = node->src[j]; - if (parent == NULL) { - break; - } - AT_PRINTF("%s", parent->name); - if (j < GGML_MAX_SRC - 1 && node->src[j + 1] != NULL) { - AT_PRINTF(", "); - } - } - AT_PRINTF("\n"); + ggml_gallocr_allocate_node(galloc, parent, buffer_id); } + // allocate node + ggml_gallocr_allocate_node(galloc, node, buffer_id); + + AT_PRINTF("exec: %s (%s) <= ", ggml_op_desc(node), node->name); + for (int j = 0; j < GGML_MAX_SRC; j++) { + struct ggml_tensor * parent = node->src[j]; + if (parent == NULL) { + break; + } + AT_PRINTF("%s", parent->name); + if (j < GGML_MAX_SRC - 1 && node->src[j + 1] != NULL) { + AT_PRINTF(", "); + } + } + AT_PRINTF("\n"); + // update parents - // update immediately if there is no parse_seq - // update only at barriers if there is parse_seq - if ((parse_seq_len == 0) || parse_seq[ind] == -1) { - int update_start = parse_seq_len ? last_barrier_pos : ind; - int update_end = parse_seq_len ? ind : ind + 1; - for (int i = update_start; i < update_end; i++) { - int node_i = parse_seq_len ? parse_seq[i] : i; - struct ggml_tensor * node = gf->nodes[node_i]; + for (int j = 0; j < GGML_MAX_SRC; j++) { + struct ggml_tensor * parent = node->src[j]; + if (parent == NULL) { + break; + } + struct hash_node * p_hn = ggml_gallocr_hash_get(galloc, parent); + p_hn->n_children -= 1; - for (int j = 0; j < GGML_MAX_SRC; j++) { - struct ggml_tensor * parent = node->src[j]; - if (parent == NULL) { - break; - } - struct hash_node * p_hn = hash_get(galloc, parent); - p_hn->n_children -= 1; - - //AT_PRINTF("parent %s: %d children, %d views\n", parent->name, parent->n_children, parent->n_views); - - if (p_hn->n_children == 0 && p_hn->n_views == 0) { - if (ggml_is_view(parent)) { - struct ggml_tensor * view_src = parent->view_src; - struct hash_node * view_src_hn = hash_get(galloc, view_src); - view_src_hn->n_views -= 1; - AT_PRINTF("view_src %s: %d children, %d views\n", view_src->name, view_src_hn->n_children, view_src_hn->n_views); - if (view_src_hn->n_views == 0 && view_src_hn->n_children == 0) { - free_node(galloc, view_src); - } - } - else { - free_node(galloc, parent); - } + AT_PRINTF("parent %s: %d children, %d views, allocated: %d\n", + parent->name, p_hn->n_children, p_hn->n_views, p_hn->allocated); + + if (p_hn->n_children == 0 && p_hn->n_views == 0) { + if (ggml_is_view(parent)) { + struct ggml_tensor * view_src = parent->view_src; + struct hash_node * view_src_hn = ggml_gallocr_hash_get(galloc, view_src); + view_src_hn->n_views -= 1; + AT_PRINTF("view_src %s: %d children, %d views\n", + view_src->name, view_src_hn->n_children, view_src_hn->n_views); + if (view_src_hn->n_views == 0 && view_src_hn->n_children == 0 && view_src_hn->allocated) { + ggml_gallocr_free_node(galloc, view_src, buffer_id); } } + else if (p_hn->allocated) { + ggml_gallocr_free_node(galloc, parent, buffer_id); + } } AT_PRINTF("\n"); - if (parse_seq_len) { - last_barrier_pos = ind + 1; + } + } +} + +bool ggml_gallocr_reserve_n(ggml_gallocr_t galloc, struct ggml_cgraph * graph, const int * node_buffer_ids) { + size_t hash_size = graph->visited_hash_table.size; + + // initialize hash table + if (galloc->hash_set.size < hash_size) { + free(galloc->hash_set.keys); + free(galloc->hash_values); + galloc->hash_set.size = hash_size; + galloc->hash_set.keys = calloc(sizeof(struct ggml_tensor *), hash_size); + galloc->hash_values = calloc(sizeof(struct hash_node), hash_size); + GGML_ASSERT(galloc->hash_set.keys != NULL); + GGML_ASSERT(galloc->hash_values != NULL); + } else { + // reset hash table + memset(galloc->hash_set.keys, 0, sizeof(struct ggml_tensor *) * galloc->hash_set.size); + memset(galloc->hash_values, 0, sizeof(struct hash_node) * galloc->hash_set.size); + } + + // reset allocators + for (int i = 0; i < galloc->n_buffers; i++) { + ggml_dyn_tallocr_reset(galloc->buf_tallocs[i]); + } + + // allocate in hash table + ggml_gallocr_alloc_graph_impl(galloc, graph, node_buffer_ids); + + // set the node_allocs from the hash table + if (galloc->n_nodes < graph->n_nodes) { + free(galloc->node_allocs); + galloc->node_allocs = calloc(sizeof(struct node_alloc), graph->n_nodes); + GGML_ASSERT(galloc->node_allocs != NULL); + } + galloc->n_nodes = graph->n_nodes; + for (int i = 0; i < graph->n_nodes; i++) { + struct ggml_tensor * node = graph->nodes[i]; + struct node_alloc * node_alloc = &galloc->node_allocs[i]; + node_alloc->buffer_id = get_node_buffer_id(node_buffer_ids, i); + if (node->view_src || node->data) { + node_alloc->dst.offset = SIZE_MAX; + node_alloc->dst.size_max = 0; + } else { + struct hash_node * hn = ggml_gallocr_hash_get(galloc, node); + node_alloc->dst.offset = hn->offset; + node_alloc->dst.size_max = ggml_backend_buft_get_alloc_size(galloc->bufts[hn->buffer_id], node); + } + for (int j = 0; j < GGML_MAX_SRC; j++) { + struct ggml_tensor * src = node->src[j]; + if (!src || src->view_src || src->data) { + node_alloc->src[j].offset = SIZE_MAX; + node_alloc->src[j].size_max = 0; + } else { + struct hash_node * hn = ggml_gallocr_hash_get(galloc, src); + node_alloc->src[j].offset = hn->offset; + node_alloc->src[j].size_max = ggml_backend_buft_get_alloc_size(galloc->bufts[hn->buffer_id], src); } } } -} -size_t ggml_gallocr_alloc_graph(ggml_gallocr_t galloc, ggml_tallocr_t talloc, struct ggml_cgraph * graph) { - size_t hash_size = graph->visited_hash_table.size; + // reallocate buffers if needed + for (int i = 0; i < galloc->n_buffers; i++) { + size_t cur_size = galloc->buffers[i] ? ggml_backend_buffer_get_size(galloc->buffers[i]) : 0; + size_t new_size = ggml_dyn_tallocr_max_size(galloc->buf_tallocs[i]); - // check if the hash table is initialized and large enough - if (galloc->hash_set.size < hash_size) { - if (galloc->hash_set.keys != NULL) { - free(galloc->hash_set.keys); + if (new_size > cur_size) { +#ifndef NDEBUG + fprintf(stderr, "%s: reallocating %s buffer from size %.02f MiB to %.02f MiB\n", __func__, ggml_backend_buft_name(galloc->bufts[i]), cur_size / 1024.0 / 1024.0, new_size / 1024.0 / 1024.0); +#endif + ggml_backend_buffer_free(galloc->buffers[i]); + galloc->buffers[i] = ggml_backend_buft_alloc_buffer(galloc->bufts[i], new_size); + if (galloc->buffers[i] == NULL) { + fprintf(stderr, "%s: failed to allocate %s buffer of size %zu\n", __func__, ggml_backend_buft_name(galloc->bufts[i]), new_size); + return false; + } } - if (galloc->hash_values != NULL) { - free(galloc->hash_values); + } + + return true; +} + +bool ggml_gallocr_reserve(ggml_gallocr_t galloc, struct ggml_cgraph *graph) { + return ggml_gallocr_reserve_n(galloc, graph, NULL); +} + +static void ggml_gallocr_init_tensor(ggml_gallocr_t galloc, struct ggml_tensor * node, struct node_alloc * node_alloc, struct tensor_alloc * tensor_alloc) { + assert(node->data || node->view_src || ggml_backend_buffer_get_alloc_size(galloc->buffers[node_alloc->buffer_id], node) <= tensor_alloc->size_max); + + if (node->view_src != NULL) { + if (node->buffer == NULL) { + assert(tensor_alloc->offset == SIZE_MAX); + if (node->view_src->buffer == NULL) { + // this tensor was allocated without ggml-backend + return; + } + ggml_backend_view_init(galloc->buffers[node_alloc->buffer_id], node); } - galloc->hash_set.keys = malloc(sizeof(struct ggml_tensor *) * hash_size); - galloc->hash_set.size = hash_size; - galloc->hash_values = malloc(sizeof(struct hash_node) * hash_size); + } else { + if (node->data == NULL) { + assert(tensor_alloc->offset != SIZE_MAX); + assert(ggml_backend_buffer_get_alloc_size(galloc->buffers[node_alloc->buffer_id], node) <= tensor_alloc->size_max); + void * base = ggml_backend_buffer_get_base(galloc->buffers[node_alloc->buffer_id]); + void * addr = (char *)base + tensor_alloc->offset; + ggml_backend_tensor_alloc(galloc->buffers[node_alloc->buffer_id], node, addr); + } else { + if (node->buffer == NULL) { + // this tensor was allocated without ggml-backend + return; + } + +#ifndef NDEBUG + size_t offset = + (char *)node->data - + (char *)ggml_backend_buffer_get_base(node->buffer); + size_t size = ggml_backend_buffer_get_alloc_size(node->buffer, node); + assert(tensor_alloc->offset == SIZE_MAX || offset == tensor_alloc->offset); + assert(tensor_alloc->offset == SIZE_MAX || size <= tensor_alloc->size_max); +#endif + } + } +} + +static bool ggml_gallocr_node_needs_realloc(ggml_gallocr_t galloc, struct ggml_tensor * node, struct node_alloc * nalloc, struct tensor_alloc * talloc) { + ggml_backend_buffer_type_t buft = galloc->bufts[nalloc->buffer_id]; + size_t node_size = (node->data || node->view_src) ? 0 : ggml_backend_buft_get_alloc_size(buft, node); + return talloc->size_max >= node_size; +} + +static bool ggml_gallocr_needs_realloc(ggml_gallocr_t galloc, struct ggml_cgraph * graph) { + if (galloc->n_nodes != graph->n_nodes) { +#ifndef NDEBUG + fprintf(stderr, "%s: graph has different number of nodes\n", __func__); +#endif + return true; } - // reset hash table - memset(galloc->hash_set.keys, 0, sizeof(struct ggml_tensor *) * hash_size); - memset(galloc->hash_values, 0, sizeof(struct hash_node) * hash_size); + for (int i = 0; i < graph->n_nodes; i++) { + struct ggml_tensor * node = graph->nodes[i]; + struct node_alloc * node_alloc = &galloc->node_allocs[i]; - galloc->talloc = talloc; - ggml_tallocr_alloc_graph_impl(galloc, graph); - galloc->talloc = NULL; + if (!ggml_gallocr_node_needs_realloc(galloc, node, node_alloc, &node_alloc->dst)) { +#ifndef NDEBUG + fprintf(stderr, "%s: node %s is not valid\n", __func__, node->name); +#endif + return true; + } - size_t max_size = ggml_tallocr_max_size(talloc); - - return max_size; -} - -void ggml_gallocr_alloc_graph_n(ggml_gallocr_t galloc, struct ggml_cgraph * graph, struct ggml_hash_set hash_set, ggml_tallocr_t * hash_node_talloc) { - const size_t hash_size = hash_set.size; - - GGML_ASSERT(hash_size >= (size_t)(graph->n_nodes + graph->n_leafs)); - - galloc->talloc = NULL; - - // alloc hash_values if needed - if (galloc->hash_values == NULL || galloc->hash_values_size < hash_size) { - free(galloc->hash_values); - galloc->hash_values = malloc(sizeof(struct hash_node) * hash_size); - galloc->hash_values_size = hash_size; + for (int j = 0; j < GGML_MAX_SRC; j++) { + struct ggml_tensor * src = node->src[j]; + if (src == NULL) { + break; + } + if (!ggml_gallocr_node_needs_realloc(galloc, src, node_alloc, &node_alloc->src[j])) { +#ifndef NDEBUG + fprintf(stderr, "%s: src %d (%s) of node %s is not valid\n", __func__, j, src->name, node->name); +#endif + return true; + } + } } - // free hash_set.keys if needed - if (galloc->hash_set.keys != NULL) { - free(galloc->hash_set.keys); - } - galloc->hash_set = hash_set; - - // reset hash values - memset(galloc->hash_values, 0, sizeof(struct hash_node) * hash_size); - - galloc->hash_allocs = hash_node_talloc; - - ggml_tallocr_alloc_graph_impl(galloc, graph); - - // remove unowned resources - galloc->hash_set.keys = NULL; - galloc->hash_allocs = NULL; + return false; } -// legacy API wrapper - -struct ggml_allocr { - ggml_tallocr_t talloc; - ggml_gallocr_t galloc; -}; - -static ggml_allocr_t ggml_allocr_new_impl(ggml_tallocr_t talloc) { - ggml_allocr_t alloc = (ggml_allocr_t)malloc(sizeof(struct ggml_allocr)); - *alloc = (struct ggml_allocr) { - /*.talloc = */ talloc, - /*.galloc = */ ggml_gallocr_new(), - }; - return alloc; -} - -ggml_allocr_t ggml_allocr_new(void * data, size_t size, size_t alignment) { - return ggml_allocr_new_impl(ggml_tallocr_new(data, size, alignment)); -} - -ggml_allocr_t ggml_allocr_new_measure(size_t alignment) { - return ggml_allocr_new_impl(ggml_tallocr_new_measure(alignment)); -} - -ggml_allocr_t ggml_allocr_new_from_buffer(struct ggml_backend_buffer * buffer) { - return ggml_allocr_new_impl(ggml_tallocr_new_from_buffer(buffer)); -} - -ggml_allocr_t ggml_allocr_new_from_backend(struct ggml_backend * backend, size_t size) { - return ggml_allocr_new_impl(ggml_tallocr_new_from_backend(backend, size)); -} - -ggml_allocr_t ggml_allocr_new_measure_from_backend(struct ggml_backend * backend) { - return ggml_allocr_new_impl(ggml_tallocr_new_measure_from_backend(backend)); -} - -struct ggml_backend_buffer * ggml_allocr_get_buffer(ggml_allocr_t alloc) { - return ggml_tallocr_get_buffer(alloc->talloc); -} - -void ggml_allocr_set_parse_seq(ggml_allocr_t alloc, const int * list, int n) { - ggml_gallocr_set_parse_seq(alloc->galloc, list, n); -} - -void ggml_allocr_free(ggml_allocr_t alloc) { - if (alloc == NULL) { - return; +bool ggml_gallocr_alloc_graph(ggml_gallocr_t galloc, struct ggml_cgraph * graph) { + if (ggml_gallocr_needs_realloc(galloc, graph)) { + if (galloc->n_buffers == 1) { +#ifndef NDEBUG + fprintf(stderr, "%s: reallocating buffers automatically\n", __func__); +#endif + if (!ggml_gallocr_reserve(galloc, graph)) { + return false; + } + } else { +#ifndef NDEBUG + fprintf(stderr, "%s: cannot reallocate multi buffer graph automatically, call reserve\n", __func__); +#endif + return false; + } } - ggml_gallocr_free(alloc->galloc); - ggml_tallocr_free(alloc->talloc); - free(alloc); + // reset buffers + for (int i = 0; i < galloc->n_buffers; i++) { + // zero size buffers are not allocated + if (galloc->buffers[i] != NULL) { + ggml_backend_buffer_reset(galloc->buffers[i]); + } + } + + // allocate the graph tensors from the previous assignments + for (int i = 0; i < graph->n_nodes; i++) { + struct ggml_tensor * node = graph->nodes[i]; + struct node_alloc * node_alloc = &galloc->node_allocs[i]; + for (int j = 0; j < GGML_MAX_SRC; j++) { + struct ggml_tensor * src = node->src[j]; + if (src == NULL) { + break; + } + ggml_gallocr_init_tensor(galloc, src, node_alloc, &node_alloc->src[j]); + } + ggml_gallocr_init_tensor(galloc, node, node_alloc, &node_alloc->dst); + } + + return true; } -bool ggml_allocr_is_measure(ggml_allocr_t alloc) { - return ggml_tallocr_is_measure(alloc->talloc); -} +size_t ggml_gallocr_get_buffer_size(ggml_gallocr_t galloc, int buffer_id) { + GGML_ASSERT(buffer_id >= 0 && buffer_id < galloc->n_buffers); -void ggml_allocr_reset(ggml_allocr_t alloc) { - ggml_tallocr_reset(alloc->talloc); -} - -void ggml_allocr_alloc(ggml_allocr_t alloc, struct ggml_tensor * tensor) { - ggml_tallocr_alloc(alloc->talloc, tensor); -} - -size_t ggml_allocr_max_size(ggml_allocr_t alloc) { - return ggml_tallocr_max_size(alloc->talloc); -} - -size_t ggml_allocr_alloc_graph(ggml_allocr_t alloc, struct ggml_cgraph * graph) { - return ggml_gallocr_alloc_graph(alloc->galloc, alloc->talloc, graph); + if (galloc->buffers[buffer_id] == NULL) { + return 0; + } + return ggml_backend_buffer_get_size(galloc->buffers[buffer_id]); } // utils @@ -795,17 +870,17 @@ static bool alloc_tensor_range(struct ggml_context * ctx, return false; } - ggml_tallocr_t tallocr = ggml_tallocr_new_from_buffer(buffer); + struct ggml_tallocr * tallocr = ggml_tallocr_new(buffer); for (struct ggml_tensor * t = first; t != last; t = ggml_get_next_tensor(ctx, t)) { if (t->data == NULL) { if (t->view_src == NULL) { ggml_tallocr_alloc(tallocr, t); - } else { + } else if (t->buffer == NULL) { ggml_backend_view_init(buffer, t); } } else { - if (t->view_src != NULL) { + if (t->view_src != NULL && t->buffer == NULL) { // view of a pre-allocated tensor ggml_backend_view_init(buffer, t); } @@ -838,7 +913,6 @@ ggml_backend_buffer_t ggml_backend_alloc_ctx_tensors_from_buft(struct ggml_conte } if (this_size > max_size) { - // tensor is too large to fit in a single buffer fprintf(stderr, "%s: tensor %s is too large to fit in a %s buffer (tensor size: %zu, max buffer size: %zu)\n", __func__, t->name, ggml_backend_buft_name(buft), @@ -870,7 +944,6 @@ ggml_backend_buffer_t ggml_backend_alloc_ctx_tensors_from_buft(struct ggml_conte } if (n_buffers == 0) { - // all the tensors in the context are already allocated #ifndef NDEBUG fprintf(stderr, "%s: all tensors in the context are already allocated\n", __func__); #endif diff --git a/ggml-alloc.h b/ggml-alloc.h index 4e5997521..1d9085d15 100644 --- a/ggml-alloc.h +++ b/ggml-alloc.h @@ -6,88 +6,62 @@ extern "C" { #endif -struct ggml_backend; -struct ggml_backend_buffer; -struct ggml_backend_buffer_type; - -// -// Legacy API -// - -typedef struct ggml_allocr * ggml_allocr_t; - -// initialize allocator for use with CPU backend only -GGML_API ggml_allocr_t ggml_allocr_new(void * data, size_t size, size_t alignment); -GGML_API ggml_allocr_t ggml_allocr_new_measure(size_t alignment); - -// initialize allocator for use with ggml-backend -GGML_API ggml_allocr_t ggml_allocr_new_from_buffer(struct ggml_backend_buffer * buffer); -GGML_API ggml_allocr_t ggml_allocr_new_from_backend(struct ggml_backend * backend, size_t size); // allocates an owned buffer -GGML_API ggml_allocr_t ggml_allocr_new_measure_from_backend(struct ggml_backend * backend); - -GGML_API struct ggml_backend_buffer * ggml_allocr_get_buffer(ggml_allocr_t alloc); - -// tell the allocator to parse nodes following the order described in the list -// you should call this if your graph are optimized to execute out-of-order -GGML_API void ggml_allocr_set_parse_seq(ggml_allocr_t alloc, const int * list, int n); - -GGML_API void ggml_allocr_free (ggml_allocr_t alloc); -GGML_API bool ggml_allocr_is_measure (ggml_allocr_t alloc); -GGML_API void ggml_allocr_reset (ggml_allocr_t alloc); -GGML_API void ggml_allocr_alloc (ggml_allocr_t alloc, struct ggml_tensor * tensor); -GGML_API size_t ggml_allocr_max_size (ggml_allocr_t alloc); - -GGML_API size_t ggml_allocr_alloc_graph(ggml_allocr_t alloc, struct ggml_cgraph * graph); - -// -// ggml-backend v2 API -// - -// Separate tensor and graph allocator objects -// This is necessary for multi-backend allocation because the graph allocator needs to use multiple tensor allocators -// The original API is kept as a wrapper around the new API +typedef struct ggml_backend_buffer_type * ggml_backend_buffer_type_t; +typedef struct ggml_backend_buffer * ggml_backend_buffer_t; +typedef struct ggml_backend * ggml_backend_t; // Tensor allocator typedef struct ggml_tallocr * ggml_tallocr_t; -GGML_API ggml_tallocr_t ggml_tallocr_new(void * data, size_t size, size_t alignment); -GGML_API ggml_tallocr_t ggml_tallocr_new_measure(size_t alignment); -GGML_API ggml_tallocr_t ggml_tallocr_new_from_buft(struct ggml_backend_buffer_type * buft, size_t size); -GGML_API ggml_tallocr_t ggml_tallocr_new_from_backend(struct ggml_backend * backend, size_t size); // allocates an owned buffer -GGML_API ggml_tallocr_t ggml_tallocr_new_from_buffer(struct ggml_backend_buffer * buffer); -GGML_API ggml_tallocr_t ggml_tallocr_new_measure_from_buft(struct ggml_backend_buffer_type * buft); -GGML_API ggml_tallocr_t ggml_tallocr_new_measure_from_backend(struct ggml_backend * backend); - -GGML_API struct ggml_backend_buffer * ggml_tallocr_get_buffer(ggml_tallocr_t talloc); - -GGML_API void ggml_tallocr_free (ggml_tallocr_t talloc); -GGML_API bool ggml_tallocr_is_measure (ggml_tallocr_t talloc); -GGML_API void ggml_tallocr_reset (ggml_tallocr_t talloc); -GGML_API void ggml_tallocr_alloc (ggml_tallocr_t talloc, struct ggml_tensor * tensor); -GGML_API size_t ggml_tallocr_max_size (ggml_tallocr_t talloc); - +GGML_API ggml_tallocr_t ggml_tallocr_new(ggml_backend_buffer_t buffer); +GGML_API void ggml_tallocr_free(ggml_tallocr_t talloc); +GGML_API void ggml_tallocr_alloc(ggml_tallocr_t talloc, struct ggml_tensor * tensor); // Graph allocator +/* + Example usage: + ggml_gallocr_t galloc = ggml_gallocr_new(ggml_bacckend_cpu_buffer_type()); + + // optional: create a worst-case graph and reserve the buffers to avoid reallocations + ggml_gallocr_reserve(galloc, build_graph(max_batch)); + + // allocate the graph + struct ggml_cgraph * graph = build_graph(batch); + ggml_gallocr_alloc_graph(galloc, graph); + + printf("compute buffer size: %zu bytes\n", ggml_gallocr_get_buffer_size(galloc, 0)); + + // evaluate the graph + ggml_backend_graph_compute(backend, graph); +*/ + +// special tensor flags for use with the graph allocator: +// ggml_set_input(): all input tensors are allocated at the beginning of the graph in non-overlapping addresses +// ggml_set_output(): output tensors are never freed and never overwritten + typedef struct ggml_gallocr * ggml_gallocr_t; -GGML_API ggml_gallocr_t ggml_gallocr_new(void); -GGML_API void ggml_gallocr_free(ggml_gallocr_t galloc); +GGML_API ggml_gallocr_t ggml_gallocr_new(ggml_backend_buffer_type_t buft); +GGML_API ggml_gallocr_t ggml_gallocr_new_n(ggml_backend_buffer_type_t * bufts, int n_bufs); +GGML_API void ggml_gallocr_free(ggml_gallocr_t galloc); -GGML_API void ggml_gallocr_set_parse_seq(ggml_gallocr_t galloc, const int * list, int n); -GGML_API size_t ggml_gallocr_alloc_graph(ggml_gallocr_t galloc, ggml_tallocr_t talloc, struct ggml_cgraph * graph); +// pre-allocate buffers from a measure graph - does not allocate or modify the graph +// call with a worst-case graph to avoid buffer reallocations +// not strictly required for single buffer usage: ggml_gallocr_alloc_graph will reallocate the buffers automatically if needed +// returns false if the buffer allocation failed +GGML_API bool ggml_gallocr_reserve(ggml_gallocr_t galloc, struct ggml_cgraph * graph); +GGML_API bool ggml_gallocr_reserve_n(ggml_gallocr_t galloc, struct ggml_cgraph * graph, const int * node_buffer_ids); -// Allocate tensors from the allocators given by the hash table -GGML_API void ggml_gallocr_alloc_graph_n( - ggml_gallocr_t galloc, - struct ggml_cgraph * graph, - struct ggml_hash_set hash_set, - ggml_tallocr_t * hash_node_talloc); +// automatic reallocation if the topology changes when using a single buffer +// returns false if using multiple buffers and a re-allocation is needed (call ggml_gallocr_reserve_n first to set the node buffers) +GGML_API bool ggml_gallocr_alloc_graph(ggml_gallocr_t galloc, struct ggml_cgraph * graph); +GGML_API size_t ggml_gallocr_get_buffer_size(ggml_gallocr_t galloc, int buffer_id); // Utils // Create a buffer and allocate all the tensors in a ggml_context -GGML_API struct ggml_backend_buffer * ggml_backend_alloc_ctx_tensors_from_buft(struct ggml_context * ctx, struct ggml_backend_buffer_type * buft); -GGML_API struct ggml_backend_buffer * ggml_backend_alloc_ctx_tensors(struct ggml_context * ctx, struct ggml_backend * backend); +GGML_API struct ggml_backend_buffer * ggml_backend_alloc_ctx_tensors_from_buft(struct ggml_context * ctx, ggml_backend_buffer_type_t buft); +GGML_API struct ggml_backend_buffer * ggml_backend_alloc_ctx_tensors(struct ggml_context * ctx, ggml_backend_t backend); #ifdef __cplusplus } diff --git a/ggml-backend.c b/ggml-backend.c index 532da8eda..9ee81b766 100644 --- a/ggml-backend.c +++ b/ggml-backend.c @@ -475,6 +475,8 @@ ggml_backend_buffer_t ggml_backend_reg_alloc_buffer(size_t i, size_t size) { // backend CPU +static const size_t TENSOR_ALIGNMENT = 32; // required for mmap as gguf only guarantees 32-byte alignment + GGML_CALL static const char * ggml_backend_cpu_buffer_name(ggml_backend_buffer_t buffer) { return "CPU"; @@ -482,7 +484,14 @@ GGML_CALL static const char * ggml_backend_cpu_buffer_name(ggml_backend_buffer_t } GGML_CALL static void * ggml_backend_cpu_buffer_get_base(ggml_backend_buffer_t buffer) { - return (void *)buffer->context; + uintptr_t data = (uintptr_t)buffer->context; + + // align the buffer + if (data % TENSOR_ALIGNMENT != 0) { + data = GGML_PAD(data, TENSOR_ALIGNMENT); + } + + return (void *)data; } GGML_CALL static void ggml_backend_cpu_buffer_free_buffer(ggml_backend_buffer_t buffer) { @@ -540,8 +549,6 @@ static struct ggml_backend_buffer_i cpu_backend_buffer_i_from_ptr = { /* .reset = */ NULL, }; -static const size_t TENSOR_ALIGNMENT = 64; // should be enough for AVX 512 - GGML_CALL static const char * ggml_backend_cpu_buffer_type_get_name(ggml_backend_buffer_type_t buft) { return "CPU"; @@ -550,9 +557,11 @@ GGML_CALL static const char * ggml_backend_cpu_buffer_type_get_name(ggml_backend GGML_CALL static ggml_backend_buffer_t ggml_backend_cpu_buffer_type_alloc_buffer(ggml_backend_buffer_type_t buft, size_t size) { size += TENSOR_ALIGNMENT; // malloc may return an address that is not aligned - void * data = malloc(size); // TODO: maybe use GGML_ALIGNED_MALLOC? - - GGML_ASSERT(data != NULL && "failed to allocate buffer"); + void * data = malloc(size); // TODO: use GGML_ALIGNED_MALLOC (move to ggml-impl.h) + if (data == NULL) { + fprintf(stderr, "%s: failed to allocate buffer of size %zu\n", __func__, size); + return NULL; + } return ggml_backend_buffer_init(buft, cpu_backend_buffer_i, data, size); } @@ -766,6 +775,9 @@ static struct ggml_backend_i cpu_backend_i = { ggml_backend_t ggml_backend_cpu_init(void) { struct ggml_backend_cpu_context * ctx = malloc(sizeof(struct ggml_backend_cpu_context)); + if (ctx == NULL) { + return NULL; + } ctx->n_threads = GGML_DEFAULT_N_THREADS; ctx->work_data = NULL; @@ -774,6 +786,10 @@ ggml_backend_t ggml_backend_cpu_init(void) { ctx->abort_callback_data = NULL; ggml_backend_t cpu_backend = malloc(sizeof(struct ggml_backend)); + if (cpu_backend == NULL) { + free(ctx); + return NULL; + } *cpu_backend = (struct ggml_backend) { /* .interface = */ cpu_backend_i, @@ -802,6 +818,7 @@ void ggml_backend_cpu_set_abort_callback(ggml_backend_t backend_cpu, ggml_abort_ } GGML_CALL ggml_backend_buffer_t ggml_backend_cpu_buffer_from_ptr(void * ptr, size_t size) { + GGML_ASSERT((uintptr_t)ptr % TENSOR_ALIGNMENT == 0 && "buffer pointer must be aligned"); return ggml_backend_buffer_init(ggml_backend_cpu_buffer_type(), cpu_backend_buffer_i_from_ptr, ptr, size); } @@ -865,6 +882,8 @@ GGML_CALL ggml_backend_buffer_t ggml_backend_multi_buffer_alloc_buffer(ggml_back ctx->n_buffers = n_buffers; ctx->buffers = (ggml_backend_buffer_t *) malloc(n_buffers * sizeof(ggml_backend_buffer_t)); + GGML_ASSERT(ctx->buffers != NULL); + size_t total_size = 0; for (size_t i = 0; i < n_buffers; i++) { ctx->buffers[i] = buffers[i]; @@ -886,6 +905,18 @@ GGML_CALL void ggml_backend_multi_buffer_set_usage(ggml_backend_buffer_t buffer, } } +// creates a copy of the tensor with the same memory layout +static struct ggml_tensor * ggml_dup_tensor_layout(struct ggml_context * ctx, const struct ggml_tensor * tensor) { + struct ggml_tensor * dup = ggml_dup_tensor(ctx, tensor); + for (int i = 0; i < GGML_MAX_DIMS; i++) { + dup->nb[i] = tensor->nb[i]; + } + return dup; +} + +static bool ggml_is_view_op(enum ggml_op op) { + return op == GGML_OP_VIEW || op == GGML_OP_RESHAPE || op == GGML_OP_PERMUTE || op == GGML_OP_TRANSPOSE; +} // scheduler @@ -894,7 +925,7 @@ GGML_CALL void ggml_backend_multi_buffer_set_usage(ggml_backend_buffer_t buffer, #define GGML_MAX_SPLIT_INPUTS 16 struct ggml_backend_sched_split { - ggml_tallocr_t tallocr; + int backend_id; int i_start; int i_end; struct ggml_tensor * inputs[GGML_MAX_SPLIT_INPUTS]; @@ -909,15 +940,17 @@ struct ggml_backend_sched { int n_backends; ggml_backend_t backends[GGML_MAX_BACKENDS]; ggml_backend_buffer_type_t bufts[GGML_MAX_BACKENDS]; - ggml_tallocr_t tallocs[GGML_MAX_BACKENDS]; ggml_gallocr_t galloc; // hash keys of the nodes in the graph struct ggml_hash_set hash_set; - // hash values (arrays of [hash_set.size]) - ggml_tallocr_t * node_talloc; // tallocr assigned to each node (indirectly this is the backend) - struct ggml_tensor * (* node_copies)[GGML_MAX_BACKENDS]; // copies of each node for each destination backend + // hash values + int * tensor_backend_id; + struct ggml_tensor * (* tensor_copies)[GGML_MAX_BACKENDS]; + + int * node_backend_ids; // [n_nodes] + int n_nodes; // copy of the graph with modified inputs struct ggml_cgraph * graph; @@ -927,77 +960,46 @@ struct ggml_backend_sched { struct ggml_context * ctx; + ggml_backend_sched_eval_callback callback_eval; + void * callback_eval_user_data; + // align context_buffer to GGML_MEM_ALIGN #ifdef _MSC_VER __declspec(align(GGML_MEM_ALIGN)) #else __attribute__((aligned(GGML_MEM_ALIGN))) #endif - char context_buffer[GGML_MAX_SPLITS*GGML_MAX_SPLIT_INPUTS*sizeof(struct ggml_tensor) + sizeof(struct ggml_cgraph)]; - - ggml_backend_sched_eval_callback callback_eval; - void * callback_eval_user_data; + char context_buffer[GGML_MAX_SPLITS*GGML_MAX_SPLIT_INPUTS*2*sizeof(struct ggml_tensor) + sizeof(struct ggml_cgraph)]; }; #define hash_id(node) ggml_hash_find_or_insert(sched->hash_set, node) -#define node_allocr(node) sched->node_talloc[hash_id(node)] +#define tensor_backend_id(node) sched->tensor_backend_id[hash_id(node)] +#define tensor_backend(node) (tensor_backend_id(node) == -1 ? NULL : sched->backends[tensor_backend_id(node)]) -static bool ggml_is_view_op(enum ggml_op op) { - return op == GGML_OP_VIEW || op == GGML_OP_RESHAPE || op == GGML_OP_PERMUTE || op == GGML_OP_TRANSPOSE; -} - -// returns the priority of the backend, lower is better -static int sched_backend_prio(ggml_backend_sched_t sched, ggml_backend_t backend) { +// returns the priority of the backend, lower id is higher priority +static int ggml_backend_sched_backend_id(ggml_backend_sched_t sched, ggml_backend_t backend) { for (int i = 0; i < sched->n_backends; i++) { if (sched->backends[i] == backend) { return i; } } - return INT_MAX; + return -1; } -static int sched_allocr_prio(ggml_backend_sched_t sched, ggml_tallocr_t allocr) { - for (int i = 0; i < sched->n_backends; i++) { - if (sched->tallocs[i] == allocr) { - return i; - } - } - return INT_MAX; -} - -static ggml_tallocr_t sched_allocr_from_buffer(ggml_backend_sched_t sched, ggml_backend_buffer_t buffer) { +static int ggml_backend_sched_backend_from_buffer(ggml_backend_sched_t sched, ggml_backend_buffer_t buffer) { if (buffer == NULL) { - return NULL; - } - - // check if this is already allocate in a allocr buffer (from user manual allocations) - for (int i = 0; i < sched->n_backends; i++) { - if (ggml_tallocr_get_buffer(sched->tallocs[i]) == buffer) { - return sched->tallocs[i]; - } + return -1; } // find highest prio backend that supports the buffer type for (int i = 0; i < sched->n_backends; i++) { if (ggml_backend_buft_supports_backend(buffer->buft, sched->backends[i])) { - return sched->tallocs[i]; + return i; } } GGML_ASSERT(false && "tensor buffer type not supported by any backend"); } -static ggml_backend_t get_allocr_backend(ggml_backend_sched_t sched, ggml_tallocr_t allocr) { - if (allocr == NULL) { - return NULL; - } - for (int i = 0; i < sched->n_backends; i++) { - if (sched->tallocs[i] == allocr) { - return sched->backends[i]; - } - } - GGML_UNREACHABLE(); -} - #if 0 static char causes[GGML_DEFAULT_GRAPH_SIZE*16 + GGML_MAX_SPLITS*GGML_MAX_SPLIT_INPUTS][128]; // debug only #define SET_CAUSE(node, ...) sprintf(causes[hash_id(node)], __VA_ARGS__) @@ -1008,37 +1010,39 @@ static char causes[GGML_DEFAULT_GRAPH_SIZE*16 + GGML_MAX_SPLITS*GGML_MAX_SPLIT_I #endif // returns the backend that should be used for the node based on the current locations -static ggml_tallocr_t sched_allocr_from_cur(ggml_backend_sched_t sched, struct ggml_tensor * node) { +static int ggml_backend_sched_backend_id_from_cur(ggml_backend_sched_t sched, struct ggml_tensor * tensor) { + // TODO: use supports_op to check if the backend supports the op + // assign pre-allocated nodes to their backend // dst - ggml_tallocr_t cur_allocr = sched_allocr_from_buffer(sched, node->buffer); - if (cur_allocr != NULL) { + int cur_backend = ggml_backend_sched_backend_from_buffer(sched, tensor->buffer); + if (cur_backend != -1) { SET_CAUSE(node, "1.dst"); - return cur_allocr; + return cur_backend; } // view_src - if (node->view_src != NULL) { - cur_allocr = sched_allocr_from_buffer(sched, node->view_src->buffer); - if (cur_allocr != NULL) { + if (tensor->view_src != NULL) { + cur_backend = ggml_backend_sched_backend_from_buffer(sched, tensor->view_src->buffer); + if (cur_backend != -1) { SET_CAUSE(node, "1.vsrc"); - return cur_allocr; + return cur_backend; } } // assign nodes that use weights to the backend of the weights for (int i = 0; i < GGML_MAX_SRC; i++) { - const struct ggml_tensor * src = node->src[i]; + const struct ggml_tensor * src = tensor->src[i]; if (src == NULL) { break; } if (src->buffer != NULL && src->buffer->usage == GGML_BACKEND_BUFFER_USAGE_WEIGHTS) { - ggml_tallocr_t src_allocr = sched_allocr_from_buffer(sched, src->buffer); + int src_backend = ggml_backend_sched_backend_from_buffer(sched, src->buffer); // operations with weights are always run on the same backend as the weights SET_CAUSE(node, "1.wgt%d", i); - return src_allocr; + return src_backend; } } - return NULL; + return -1; } static char * fmt_size(size_t size) { @@ -1051,11 +1055,11 @@ static char * fmt_size(size_t size) { return buffer; } -static void sched_print_assignments(ggml_backend_sched_t sched, struct ggml_cgraph * graph) { +static void ggml_backend_sched_print_assignments(ggml_backend_sched_t sched, struct ggml_cgraph * graph) { int cur_split = 0; for (int i = 0; i < graph->n_nodes; i++) { if (cur_split < sched->n_splits && i == sched->splits[cur_split].i_start) { - ggml_backend_t split_backend = get_allocr_backend(sched, sched->splits[cur_split].tallocr); + ggml_backend_t split_backend = sched->backends[sched->splits[cur_split].backend_id]; fprintf(stderr, "\n## SPLIT #%d: %s # %d inputs: ", cur_split, ggml_backend_name(split_backend), sched->splits[cur_split].n_inputs); for (int j = 0; j < sched->splits[cur_split].n_inputs; j++) { @@ -1069,17 +1073,15 @@ static void sched_print_assignments(ggml_backend_sched_t sched, struct ggml_cgra if (ggml_is_view_op(node->op)) { continue; } - ggml_tallocr_t node_allocr = node_allocr(node); - ggml_backend_t node_backend = node_allocr ? get_allocr_backend(sched, node_allocr) : NULL; // FIXME: + ggml_backend_t tensor_backend = tensor_backend(node); fprintf(stderr, "node #%3d (%10.10s): %20.20s (%5.5s) [%5.5s %8.8s]:", i, ggml_op_name(node->op), node->name, - fmt_size(ggml_nbytes(node)), node_allocr ? ggml_backend_name(node_backend) : "NULL", GET_CAUSE(node)); + fmt_size(ggml_nbytes(node)), tensor_backend ? ggml_backend_name(tensor_backend) : "NULL", GET_CAUSE(node)); for (int j = 0; j < GGML_MAX_SRC; j++) { struct ggml_tensor * src = node->src[j]; if (src == NULL) { break; } - ggml_tallocr_t src_allocr = node_allocr(src); - ggml_backend_t src_backend = src_allocr ? get_allocr_backend(sched, src_allocr) : NULL; + ggml_backend_t src_backend = tensor_backend(src); fprintf(stderr, " %20.20s (%5.5s) [%5.5s %8.8s]", src->name, fmt_size(ggml_nbytes(src)), src_backend ? ggml_backend_name(src_backend) : "NULL", GET_CAUSE(src)); } @@ -1087,23 +1089,13 @@ static void sched_print_assignments(ggml_backend_sched_t sched, struct ggml_cgra } } -// creates a copy of the tensor with the same memory layout -static struct ggml_tensor * ggml_dup_tensor_layout(struct ggml_context * ctx, const struct ggml_tensor * tensor) { - struct ggml_tensor * dup = ggml_dup_tensor(ctx, tensor); - for (int i = 0; i < GGML_MAX_DIMS; i++) { - dup->nb[i] = tensor->nb[i]; - } - return dup; -} - - //#define DEBUG_PASS1 //#define DEBUG_PASS2 //#define DEBUG_PASS3 //#define DEBUG_PASS4 // assigns backends to ops and splits the graph into subgraphs that can be computed on the same backend -static void sched_split_graph(ggml_backend_sched_t sched, struct ggml_cgraph * graph) { +static void ggml_backend_sched_split_graph(ggml_backend_sched_t sched, struct ggml_cgraph * graph) { // reset splits sched->n_splits = 0; sched->is_reset = false; @@ -1125,28 +1117,28 @@ static void sched_split_graph(ggml_backend_sched_t sched, struct ggml_cgraph * g // pass 1: assign backends to ops with pre-allocated inputs for (int i = 0; i < graph->n_leafs; i++) { struct ggml_tensor * leaf = graph->leafs[i]; - if (node_allocr(leaf) != NULL) { + if (tensor_backend_id(leaf) != -1) { // do not overwrite user assignments continue; } - node_allocr(leaf) = sched_allocr_from_cur(sched, leaf); + tensor_backend_id(leaf) = ggml_backend_sched_backend_id_from_cur(sched, leaf); } for (int i = 0; i < graph->n_nodes; i++) { struct ggml_tensor * node = graph->nodes[i]; - if (node_allocr(node) != NULL) { + if (tensor_backend_id(node) != -1) { // do not overwrite user assignments continue; } - node_allocr(node) = sched_allocr_from_cur(sched, node); + tensor_backend_id(node) = ggml_backend_sched_backend_id_from_cur(sched, node); // src for (int j = 0; j < GGML_MAX_SRC; j++) { struct ggml_tensor * src = node->src[j]; if (src == NULL) { break; } - if (node_allocr(src) == NULL) { - node_allocr(src) = sched_allocr_from_cur(sched, src); + if (tensor_backend_id(src) == -1) { + tensor_backend_id(src) = ggml_backend_sched_backend_id_from_cur(sched, src); } } } @@ -1161,22 +1153,22 @@ static void sched_split_graph(ggml_backend_sched_t sched, struct ggml_cgraph * g // pass 2.1 expand gpu up { - ggml_tallocr_t cur_allocr = NULL; + int cur_backend_id = -1; for (int i = graph->n_nodes - 1; i >= 0; i--) { struct ggml_tensor * node = graph->nodes[i]; if (ggml_is_view_op(node->op)) { continue; } - ggml_tallocr_t node_allocr = node_allocr(node); - if (node_allocr != NULL) { - if (sched_allocr_prio(sched, node_allocr) == sched->n_backends - 1) { + int tensor_backend_id = tensor_backend_id(node); + if (tensor_backend_id != -1) { + if (tensor_backend_id == sched->n_backends - 1) { // skip cpu (lowest prio backend) - cur_allocr = NULL; + cur_backend_id = -1; } else { - cur_allocr = node_allocr; + cur_backend_id = tensor_backend_id; } } else { - node_allocr(node) = cur_allocr; + tensor_backend_id(node) = cur_backend_id; SET_CAUSE(node, "2.1"); } } @@ -1184,22 +1176,22 @@ static void sched_split_graph(ggml_backend_sched_t sched, struct ggml_cgraph * g // pass 2.2 expand gpu down { - ggml_tallocr_t cur_allocr = NULL; + int cur_backend_id = -1; for (int i = 0; i < graph->n_nodes; i++) { struct ggml_tensor * node = graph->nodes[i]; if (ggml_is_view_op(node->op)) { continue; } - ggml_tallocr_t node_allocr = node_allocr(node); - if (node_allocr != NULL) { - if (sched_allocr_prio(sched, node_allocr) == sched->n_backends - 1) { + int tensor_backend_id = tensor_backend_id(node); + if (tensor_backend_id != -1) { + if (tensor_backend_id == sched->n_backends - 1) { // skip cpu (lowest prio backend) - cur_allocr = NULL; + cur_backend_id = -1; } else { - cur_allocr = node_allocr; + cur_backend_id = tensor_backend_id; } } else { - node_allocr(node) = cur_allocr; + tensor_backend_id(node) = cur_backend_id; SET_CAUSE(node, "2.2"); } } @@ -1207,17 +1199,17 @@ static void sched_split_graph(ggml_backend_sched_t sched, struct ggml_cgraph * g // pass 2.3 expand rest up { - ggml_tallocr_t cur_allocr = NULL; + int cur_backend_id = -1; for (int i = graph->n_nodes - 1; i >= 0; i--) { struct ggml_tensor * node = graph->nodes[i]; if (ggml_is_view_op(node->op)) { continue; } - ggml_tallocr_t node_allocr = node_allocr(node); - if (node_allocr != NULL) { - cur_allocr = node_allocr; + int tensor_backend_id = tensor_backend_id(node); + if (tensor_backend_id != -1) { + cur_backend_id = tensor_backend_id; } else { - node_allocr(node) = cur_allocr; + tensor_backend_id(node) = cur_backend_id; SET_CAUSE(node, "2.3"); } } @@ -1225,17 +1217,17 @@ static void sched_split_graph(ggml_backend_sched_t sched, struct ggml_cgraph * g // pass 2.4 expand rest down { - ggml_tallocr_t cur_allocr = NULL; + int cur_backend_id = -1; for (int i = 0; i < graph->n_nodes; i++) { struct ggml_tensor * node = graph->nodes[i]; if (ggml_is_view_op(node->op)) { continue; } - ggml_tallocr_t node_allocr = node_allocr(node); - if (node_allocr != NULL) { - cur_allocr = node_allocr; + int tensor_backend_id = tensor_backend_id(node); + if (tensor_backend_id != -1) { + cur_backend_id = tensor_backend_id; } else { - node_allocr(node) = cur_allocr; + tensor_backend_id(node) = cur_backend_id; SET_CAUSE(node, "2.4"); } } @@ -1247,9 +1239,9 @@ static void sched_split_graph(ggml_backend_sched_t sched, struct ggml_cgraph * g // pass 3: assign backends to remaining src from dst and view_src for (int i = 0; i < graph->n_nodes; i++) { struct ggml_tensor * node = graph->nodes[i]; - ggml_tallocr_t cur_allocr = node_allocr(node); - if (node->view_src != NULL && cur_allocr == NULL) { - cur_allocr = node_allocr(node) = node_allocr(node->view_src); + int cur_backend_id = tensor_backend_id(node); + if (node->view_src != NULL && cur_backend_id == -1) { + cur_backend_id = tensor_backend_id(node) = tensor_backend_id(node->view_src); SET_CAUSE(node, "3.vsrc"); } for (int j = 0; j < GGML_MAX_SRC; j++) { @@ -1257,14 +1249,14 @@ static void sched_split_graph(ggml_backend_sched_t sched, struct ggml_cgraph * g if (src == NULL) { break; } - ggml_tallocr_t src_allocr = node_allocr(src); - if (src_allocr == NULL) { + int src_backend_id = tensor_backend_id(src); + if (src_backend_id == -1) { if (src->view_src != NULL) { // views are always on the same backend as the source - node_allocr(src) = node_allocr(src->view_src); + tensor_backend_id(src) = tensor_backend_id(src->view_src); SET_CAUSE(src, "3.vsrc"); } else { - node_allocr(src) = cur_allocr; + tensor_backend_id(src) = cur_backend_id; SET_CAUSE(src, "3.cur"); } } @@ -1281,15 +1273,14 @@ static void sched_split_graph(ggml_backend_sched_t sched, struct ggml_cgraph * g for (int i = 0; i < graph->n_nodes; i++) { struct ggml_tensor * node = graph->nodes[i]; if (!ggml_is_view_op(node->op)) { - sched->splits[0].tallocr = node_allocr(node); + sched->splits[0].backend_id = tensor_backend_id(node); break; } } sched->splits[0].i_start = 0; sched->splits[0].n_inputs = 0; memset(sched->splits[0].inputs, 0, sizeof(sched->splits[0].inputs)); //HACK - ggml_tallocr_t cur_allocr = sched->splits[0].tallocr; - size_t cur_backend_id = sched_allocr_prio(sched, cur_allocr); + int cur_backend_id = sched->splits[0].backend_id; for (int i = 0; i < graph->n_nodes; i++) { struct ggml_tensor * node = graph->nodes[i]; @@ -1297,19 +1288,18 @@ static void sched_split_graph(ggml_backend_sched_t sched, struct ggml_cgraph * g continue; } - ggml_tallocr_t node_allocr = node_allocr(node); + int tensor_backend_id = tensor_backend_id(node); - GGML_ASSERT(node_allocr != NULL); // all nodes should be assigned by now + GGML_ASSERT(tensor_backend_id != -1); // all nodes should be assigned by now - if (node_allocr != cur_allocr) { + if (tensor_backend_id != cur_backend_id) { sched->splits[cur_split].i_end = i; cur_split++; GGML_ASSERT(cur_split < GGML_MAX_SPLITS); - sched->splits[cur_split].tallocr = node_allocr; + sched->splits[cur_split].backend_id = tensor_backend_id; sched->splits[cur_split].i_start = i; sched->splits[cur_split].n_inputs = 0; - cur_allocr = node_allocr; - cur_backend_id = sched_allocr_prio(sched, cur_allocr); + cur_backend_id = tensor_backend_id; } // find inputs that are not on the same backend @@ -1318,43 +1308,25 @@ static void sched_split_graph(ggml_backend_sched_t sched, struct ggml_cgraph * g if (src == NULL) { break; } - ggml_tallocr_t src_allocr = node_allocr(src); - GGML_ASSERT(src_allocr != NULL); // all inputs should be assigned by now - if (src_allocr != node_allocr) { + int src_backend_id = tensor_backend_id(src); + assert(src_backend_id != -1); // all inputs should be assigned by now + if (src_backend_id != tensor_backend_id) { // create a copy of the input in the split's backend size_t id = hash_id(src); - if (sched->node_copies[id][cur_backend_id] == NULL) { - ggml_backend_t backend = get_allocr_backend(sched, cur_allocr); + if (sched->tensor_copies[id][cur_backend_id] == NULL) { + ggml_backend_t backend = sched->backends[cur_backend_id]; struct ggml_tensor * tensor_copy = ggml_dup_tensor_layout(sched->ctx, src); ggml_format_name(tensor_copy, "%s#%s", ggml_backend_name(backend), src->name); - sched->node_copies[id][cur_backend_id] = tensor_copy; - node_allocr(tensor_copy) = cur_allocr; + sched->tensor_copies[id][cur_backend_id] = tensor_copy; + tensor_backend_id(tensor_copy) = cur_backend_id; SET_CAUSE(tensor_copy, "4.cpy"); int n_inputs = sched->splits[cur_split].n_inputs++; GGML_ASSERT(n_inputs < GGML_MAX_SPLIT_INPUTS); sched->splits[cur_split].inputs[n_inputs] = src; } - node->src[j] = sched->node_copies[id][cur_backend_id]; - -#if 0 - // check if the input is already in the split - bool found = false; - for (int k = 0; k < sched->splits[cur_split].n_inputs; k++) { - if (sched->splits[cur_split].inputs[k] == src) { - found = true; - break; - } - } - - if (!found) { - int n_inputs = sched->splits[cur_split].n_inputs++; - //printf("split %d input %d: %s (%s)\n", cur_split, n_inputs, src->name, ggml_backend_name(get_allocr_backend(sched, src_allocr))); - GGML_ASSERT(n_inputs < GGML_MAX_SPLIT_INPUTS); - sched->splits[cur_split].inputs[n_inputs] = src; - } -#endif + node->src[j] = sched->tensor_copies[id][cur_backend_id]; } } } @@ -1369,30 +1341,30 @@ static void sched_split_graph(ggml_backend_sched_t sched, struct ggml_cgraph * g // sanity check: all sources should have the same backend as the node for (int i = 0; i < graph->n_nodes; i++) { struct ggml_tensor * node = graph->nodes[i]; - ggml_tallocr_t node_allocr = node_allocr(node); - if (node_allocr == NULL) { + ggml_backend_t tensor_backend = tensor_backend(node); + if (tensor_backend == NULL) { fprintf(stderr, "!!!!!!! %s has no backend\n", node->name); } - if (node->view_src != NULL && node_allocr != node_allocr(node->view_src)) { + if (node->view_src != NULL && tensor_backend != tensor_backend(node->view_src)) { fprintf(stderr, "!!!!!!! %s has backend %s, view_src %s has backend %s\n", - node->name, node_allocr ? ggml_backend_name(get_allocr_backend(sched, node_allocr)) : "NULL", - node->view_src->name, node_allocr(node->view_src) ? ggml_backend_name(get_allocr_backend(sched, node_allocr(node->view_src))) : "NULL"); + node->name, tensor_backend ? ggml_backend_name(tensor_backend) : "NULL", + node->view_src->name, tensor_backend(node->view_src) ? ggml_backend_name(tensor_backend(node->view_src)) : "NULL"); } for (int j = 0; j < GGML_MAX_SRC; j++) { struct ggml_tensor * src = node->src[j]; if (src == NULL) { break; } - ggml_tallocr_t src_allocr = node_allocr(src); - if (src_allocr != node_allocr /* && src_backend != NULL */) { // ignore nulls for now + ggml_backend_t src_backend = tensor_backend(src); + if (src_backend != tensor_backend /* && src_backend != NULL */) { fprintf(stderr, "!!!! %s has backend %s, src %d (%s) has backend %s\n", - node->name, node_allocr ? ggml_backend_name(get_allocr_backend(sched, node_allocr)) : "NULL", - j, src->name, src_allocr ? ggml_backend_name(get_allocr_backend(sched, src_allocr)) : "NULL"); + node->name, tensor_backend ? ggml_backend_name(tensor_backend) : "NULL", + j, src->name, src_backend ? ggml_backend_name(src_backend) : "NULL"); } - if (src->view_src != NULL && src_allocr != node_allocr(src->view_src)) { + if (src->view_src != NULL && src_backend != tensor_backend(src->view_src)) { fprintf(stderr, "!!!!!!! [src] %s has backend %s, view_src %s has backend %s\n", - src->name, src_allocr ? ggml_backend_name(get_allocr_backend(sched, src_allocr)) : "NULL", - src->view_src->name, node_allocr(src->view_src) ? ggml_backend_name(get_allocr_backend(sched, node_allocr(src->view_src))) : "NULL"); + src->name, src_backend ? ggml_backend_name(src_backend) : "NULL", + src->view_src->name, tensor_backend(src->view_src) ? ggml_backend_name(tensor_backend(src->view_src)) : "NULL"); } } } @@ -1406,32 +1378,45 @@ static void sched_split_graph(ggml_backend_sched_t sched, struct ggml_cgraph * g struct ggml_backend_sched_split * split = &sched->splits[i]; split->graph = ggml_graph_view(graph, split->i_start, split->i_end); - // add inputs to the graph copy so that they are allocated by ggml-alloc at the start of the split for (int j = 0; j < split->n_inputs; j++) { struct ggml_tensor * input = split->inputs[j]; - struct ggml_tensor * input_cpy = sched->node_copies[hash_id(input)][sched_allocr_prio(sched, split->tallocr)]; + struct ggml_tensor * input_cpy = sched->tensor_copies[hash_id(input)][split->backend_id]; + // add a dependency to the input source so that it is not freed before the copy is done - GGML_ASSERT(input_cpy->src[0] == NULL || input_cpy->src[0] == input); - input_cpy->src[0] = input; + struct ggml_tensor * input_dep = ggml_view_tensor(sched->ctx, input); + sched->node_backend_ids[graph_copy->n_nodes] = tensor_backend_id(input); + graph_copy->nodes[graph_copy->n_nodes++] = input_dep; + + // add a dependency to the input copy so that it is allocated at the start of the split + sched->node_backend_ids[graph_copy->n_nodes] = split->backend_id; graph_copy->nodes[graph_copy->n_nodes++] = input_cpy; } for (int j = split->i_start; j < split->i_end; j++) { + sched->node_backend_ids[graph_copy->n_nodes] = tensor_backend_id(graph->nodes[j]); graph_copy->nodes[graph_copy->n_nodes++] = graph->nodes[j]; } } sched->graph = graph_copy; } -static void sched_alloc_splits(ggml_backend_sched_t sched) { - ggml_gallocr_alloc_graph_n( - sched->galloc, - sched->graph, - sched->hash_set, - sched->node_talloc); +static bool ggml_backend_sched_alloc_splits(ggml_backend_sched_t sched) { + // ggml_gallocr_reserve_n(sched->galloc, sched->graph, sched->node_backend_ids); + if (!ggml_gallocr_alloc_graph(sched->galloc, sched->graph)) { +#ifndef NDEBUG + fprintf(stderr, "ggml_backend_sched: failed to allocate graph, reserving\n"); +#endif + ggml_gallocr_reserve_n(sched->galloc, sched->graph, sched->node_backend_ids); + if (!ggml_gallocr_alloc_graph(sched->galloc, sched->graph)) { + fprintf(stderr, "ggml_backend_sched: failed to allocate graph\n"); + return false; + } + } + + return true; } -static void sched_compute_splits(ggml_backend_sched_t sched) { +static bool ggml_backend_sched_compute_splits(ggml_backend_sched_t sched) { uint64_t copy_us[GGML_MAX_BACKENDS] = {0}; uint64_t compute_us[GGML_MAX_BACKENDS] = {0}; @@ -1439,20 +1424,18 @@ static void sched_compute_splits(ggml_backend_sched_t sched) { for (int i = 0; i < sched->n_splits; i++) { struct ggml_backend_sched_split * split = &splits[i]; - ggml_backend_t split_backend = get_allocr_backend(sched, split->tallocr); - int split_backend_id = sched_backend_prio(sched, split_backend); + int split_backend_id = split->backend_id; + ggml_backend_t split_backend = sched->backends[split_backend_id]; // copy the input tensors to the split backend uint64_t copy_start_us = ggml_time_us(); for (int j = 0; j < split->n_inputs; j++) { struct ggml_tensor * input = split->inputs[j]; - struct ggml_tensor * input_cpy = sched->node_copies[hash_id(input)][split_backend_id]; + struct ggml_tensor * input_cpy = sched->tensor_copies[hash_id(input)][split_backend_id]; GGML_ASSERT(input->buffer != NULL); GGML_ASSERT(input_cpy->buffer != NULL); - // TODO: avoid this copy if it was already copied in a previous split, and the input didn't change - // this is important to avoid copying constants such as KQ_mask and inp_pos multiple times ggml_backend_tensor_copy_async(split_backend, input, input_cpy); } //ggml_backend_synchronize(split_backend); // necessary to measure copy time @@ -1468,7 +1451,9 @@ static void sched_compute_splits(ggml_backend_sched_t sched) { uint64_t compute_start_us = ggml_time_us(); if (!sched->callback_eval) { - ggml_backend_graph_compute(split_backend, &split->graph); + if (!ggml_backend_graph_compute(split_backend, &split->graph)) { + return false; + } //ggml_backend_synchronize(split_backend); // necessary to measure compute time } else { // similar to ggml_backend_compare_graph_backend @@ -1488,7 +1473,9 @@ static void sched_compute_splits(ggml_backend_sched_t sched) { struct ggml_cgraph gv = ggml_graph_view(&split->graph, j0, j1 + 1); - ggml_backend_graph_compute(split_backend, &gv); + if (!ggml_backend_graph_compute(split_backend, &gv)) { + return false; + } if (need && !sched->callback_eval(t, false, sched->callback_eval_user_data)) { break; @@ -1510,19 +1497,8 @@ static void sched_compute_splits(ggml_backend_sched_t sched) { } } #endif -} -static void sched_reset(ggml_backend_sched_t sched) { - for (int i = 0; i < sched->n_backends; i++) { - ggml_tallocr_reset(sched->tallocs[i]); - } - // reset state for the next run - size_t hash_size = sched->hash_set.size; - memset(sched->hash_set.keys, 0, sizeof(sched->hash_set.keys[0]) * hash_size); - memset(sched->node_talloc, 0, sizeof(sched->node_talloc[0]) * hash_size); - memset(sched->node_copies, 0, sizeof(sched->node_copies[0]) * hash_size); - - sched->is_reset = true; + return true; } ggml_backend_sched_t ggml_backend_sched_new(ggml_backend_t * backends, ggml_backend_buffer_type_t * bufts, int n_backends, size_t graph_size) { @@ -1532,9 +1508,10 @@ ggml_backend_sched_t ggml_backend_sched_new(ggml_backend_t * backends, ggml_back struct ggml_backend_sched * sched = calloc(sizeof(struct ggml_backend_sched), 1); // initialize hash table - sched->hash_set = ggml_hash_set_new(graph_size + GGML_MAX_SPLITS*GGML_MAX_SPLIT_INPUTS); - sched->node_talloc = calloc(sizeof(sched->node_talloc[0]) * sched->hash_set.size, 1); - sched->node_copies = calloc(sizeof(sched->node_copies[0]) * sched->hash_set.size, 1); + sched->hash_set = ggml_hash_set_new(graph_size + GGML_MAX_SPLITS*GGML_MAX_SPLIT_INPUTS); + sched->tensor_backend_id = calloc(sizeof(sched->tensor_backend_id[0]), sched->hash_set.size); + sched->tensor_copies = calloc(sizeof(sched->tensor_copies[0]), sched->hash_set.size); + sched->node_backend_ids = calloc(sizeof(sched->node_backend_ids[0]), graph_size); sched->n_backends = n_backends; for (int i = 0; i < n_backends; i++) { @@ -1542,14 +1519,9 @@ ggml_backend_sched_t ggml_backend_sched_new(ggml_backend_t * backends, ggml_back sched->bufts[i] = bufts ? bufts[i] : ggml_backend_get_default_buffer_type(backends[i]); } - sched->galloc = ggml_gallocr_new(); + sched->galloc = ggml_gallocr_new_n(sched->bufts, n_backends); - // init measure allocs for each backend - for (int i = 0; i < n_backends; i++) { - sched->tallocs[i] = ggml_tallocr_new_measure_from_buft(sched->bufts[i]); - } - - sched_reset(sched); + ggml_backend_sched_reset(sched); return sched; } @@ -1558,49 +1530,54 @@ void ggml_backend_sched_free(ggml_backend_sched_t sched) { if (sched == NULL) { return; } - for (int i = 0; i < sched->n_backends; i++) { - ggml_tallocr_free(sched->tallocs[i]); - } ggml_gallocr_free(sched->galloc); ggml_free(sched->ctx); free(sched->hash_set.keys); - free(sched->node_talloc); - free(sched->node_copies); + free(sched->tensor_backend_id); + free(sched->tensor_copies); + free(sched->node_backend_ids); free(sched); } -void ggml_backend_sched_init_measure(ggml_backend_sched_t sched, struct ggml_cgraph * measure_graph) { - GGML_ASSERT(ggml_tallocr_is_measure(sched->tallocs[0])); // can only be initialized once +void ggml_backend_sched_reset(ggml_backend_sched_t sched) { + // reset state for the next run + size_t hash_size = sched->hash_set.size; + memset(sched->hash_set.keys, 0, sizeof(sched->hash_set.keys[0]) * hash_size); // NOLINT + memset(sched->tensor_backend_id, -1, sizeof(sched->tensor_backend_id[0]) * hash_size); + memset(sched->tensor_copies, 0, sizeof(sched->tensor_copies[0]) * hash_size); - sched_split_graph(sched, measure_graph); - sched_alloc_splits(sched); - - // allocate buffers and reset allocators - for (int i = 0; i < sched->n_backends; i++) { - size_t size = ggml_tallocr_max_size(sched->tallocs[i]); - ggml_tallocr_free(sched->tallocs[i]); - sched->tallocs[i] = ggml_tallocr_new_from_buft(sched->bufts[i], size); - } - - sched_reset(sched); + sched->is_reset = true; } -void ggml_backend_sched_graph_compute(ggml_backend_sched_t sched, struct ggml_cgraph * graph) { +bool ggml_backend_sched_reserve(ggml_backend_sched_t sched, struct ggml_cgraph * measure_graph) { + ggml_backend_sched_split_graph(sched, measure_graph); + + if (!ggml_gallocr_reserve_n(sched->galloc, sched->graph, sched->node_backend_ids)) { + return false; + } + + ggml_backend_sched_reset(sched); + return true; +} + +bool ggml_backend_sched_graph_compute(ggml_backend_sched_t sched, struct ggml_cgraph * graph) { GGML_ASSERT((int)sched->hash_set.size >= graph->n_nodes + GGML_MAX_SPLITS*GGML_MAX_SPLIT_INPUTS); if (!sched->is_reset) { - sched_reset(sched); + ggml_backend_sched_reset(sched); } - sched_split_graph(sched, graph); - sched_alloc_splits(sched); - sched_compute_splits(sched); -} + ggml_backend_sched_split_graph(sched, graph); + if (!ggml_backend_sched_alloc_splits(sched)) { + return false; + } -void ggml_backend_sched_reset(ggml_backend_sched_t sched) { - sched_reset(sched); -} + if (!ggml_backend_sched_compute_splits(sched)) { + return false; + } + return true; +} void ggml_backend_sched_set_eval_callback(ggml_backend_sched_t sched, ggml_backend_sched_eval_callback callback, void * user_data) { sched->callback_eval = callback; @@ -1611,37 +1588,30 @@ int ggml_backend_sched_get_n_splits(ggml_backend_sched_t sched) { return sched->n_splits; } -ggml_tallocr_t ggml_backend_sched_get_tallocr(ggml_backend_sched_t sched, ggml_backend_t backend) { - int backend_index = sched_backend_prio(sched, backend); +size_t ggml_backend_sched_get_buffer_size(ggml_backend_sched_t sched, ggml_backend_t backend) { + int backend_index = ggml_backend_sched_backend_id(sched, backend); GGML_ASSERT(backend_index >= 0 && backend_index < sched->n_backends); - return sched->tallocs[backend_index]; -} - -ggml_backend_buffer_t ggml_backend_sched_get_buffer(ggml_backend_sched_t sched, ggml_backend_t backend) { - int backend_index = sched_backend_prio(sched, backend); - GGML_ASSERT(backend_index >= 0 && backend_index < sched->n_backends); - return ggml_tallocr_get_buffer(sched->tallocs[backend_index]); + return ggml_gallocr_get_buffer_size(sched->galloc, backend_index); } void ggml_backend_sched_set_node_backend(ggml_backend_sched_t sched, struct ggml_tensor * node, ggml_backend_t backend) { - int backend_index = sched_backend_prio(sched, backend); + int backend_index = ggml_backend_sched_backend_id(sched, backend); GGML_ASSERT(backend_index >= 0 && backend_index < sched->n_backends); - node_allocr(node) = sched->tallocs[backend_index]; + tensor_backend_id(node) = backend_index; } ggml_backend_t ggml_backend_sched_get_node_backend(ggml_backend_sched_t sched, struct ggml_tensor * node) { - ggml_tallocr_t allocr = node_allocr(node); - if (allocr == NULL) { + int backend_index = tensor_backend_id(node); + if (backend_index == -1) { return NULL; } - return get_allocr_backend(sched, allocr); + return sched->backends[backend_index]; } // utils void ggml_backend_view_init(ggml_backend_buffer_t buffer, struct ggml_tensor * tensor) { GGML_ASSERT(tensor->buffer == NULL); - //GGML_ASSERT(tensor->data == NULL); // views of pre-allocated tensors may have the data set in ggml_new_tensor, but still need to be initialized by the backend GGML_ASSERT(tensor->view_src != NULL); GGML_ASSERT(tensor->view_src->buffer != NULL); GGML_ASSERT(tensor->view_src->data != NULL); @@ -1665,7 +1635,7 @@ void ggml_backend_tensor_alloc(ggml_backend_buffer_t buffer, struct ggml_tensor ggml_backend_buffer_init_tensor(buffer, tensor); } -static struct ggml_tensor * graph_dup_tensor(struct ggml_hash_set hash_set, struct ggml_tensor ** node_copies, +static struct ggml_tensor * graph_copy_dup_tensor(struct ggml_hash_set hash_set, struct ggml_tensor ** node_copies, struct ggml_context * ctx_allocated, struct ggml_context * ctx_unallocated, struct ggml_tensor * src) { GGML_ASSERT(src != NULL); @@ -1678,7 +1648,7 @@ static struct ggml_tensor * graph_dup_tensor(struct ggml_hash_set hash_set, stru struct ggml_tensor * dst = ggml_dup_tensor_layout(src->data && !src->view_src ? ctx_allocated : ctx_unallocated, src); if (src->view_src != NULL) { - dst->view_src = graph_dup_tensor(hash_set, node_copies, ctx_allocated, ctx_unallocated, src->view_src); + dst->view_src = graph_copy_dup_tensor(hash_set, node_copies, ctx_allocated, ctx_unallocated, src->view_src); dst->view_offs = src->view_offs; } dst->op = src->op; @@ -1691,14 +1661,14 @@ static struct ggml_tensor * graph_dup_tensor(struct ggml_hash_set hash_set, stru if (s == NULL) { break; } - dst->src[i] = graph_dup_tensor(hash_set, node_copies, ctx_allocated, ctx_unallocated, s); + dst->src[i] = graph_copy_dup_tensor(hash_set, node_copies, ctx_allocated, ctx_unallocated, s); } node_copies[id] = dst; return dst; } -static void graph_init_tensor(struct ggml_hash_set hash_set, struct ggml_tensor ** node_copies, bool * node_init, struct ggml_tensor * src) { +static void graph_copy_init_tensor(struct ggml_hash_set hash_set, struct ggml_tensor ** node_copies, bool * node_init, struct ggml_tensor * src) { size_t id = ggml_hash_find(hash_set, src); if (node_init[id]) { return; @@ -1707,7 +1677,7 @@ static void graph_init_tensor(struct ggml_hash_set hash_set, struct ggml_tensor struct ggml_tensor * dst = node_copies[id]; if (dst->view_src != NULL) { - graph_init_tensor(hash_set, node_copies, node_init, src->view_src); + graph_copy_init_tensor(hash_set, node_copies, node_init, src->view_src); ggml_backend_view_init(dst->view_src->buffer, dst); } else { @@ -1720,17 +1690,17 @@ static void graph_init_tensor(struct ggml_hash_set hash_set, struct ggml_tensor if (s == NULL) { break; } - graph_init_tensor(hash_set, node_copies, node_init, s); + graph_copy_init_tensor(hash_set, node_copies, node_init, s); } } struct ggml_backend_graph_copy ggml_backend_graph_copy(ggml_backend_t backend, struct ggml_cgraph * graph) { struct ggml_hash_set hash_set = { /* .size = */ graph->visited_hash_table.size, - /* .keys = */ calloc(sizeof(hash_set.keys[0]) * graph->visited_hash_table.size, 1) + /* .keys = */ calloc(sizeof(hash_set.keys[0]), graph->visited_hash_table.size) // NOLINT }; - struct ggml_tensor ** node_copies = calloc(sizeof(node_copies[0]) * hash_set.size, 1); - bool * node_init = calloc(sizeof(node_init[0]) * hash_set.size, 1); + struct ggml_tensor ** node_copies = calloc(sizeof(node_copies[0]), hash_set.size); // NOLINT + bool * node_init = calloc(sizeof(node_init[0]), hash_set.size); struct ggml_init_params params = { /* .mem_size = */ ggml_tensor_overhead()*hash_set.size + ggml_graph_overhead_custom(graph->size, false), @@ -1759,7 +1729,7 @@ struct ggml_backend_graph_copy ggml_backend_graph_copy(ggml_backend_t backend, s // dup nodes for (int i = 0; i < graph->n_nodes; i++) { struct ggml_tensor * node = graph->nodes[i]; - graph_dup_tensor(hash_set, node_copies, ctx_allocated, ctx_unallocated, node); + graph_copy_dup_tensor(hash_set, node_copies, ctx_allocated, ctx_unallocated, node); } // allocate nodes @@ -1784,7 +1754,7 @@ struct ggml_backend_graph_copy ggml_backend_graph_copy(ggml_backend_t backend, s // copy data and init views for (int i = 0; i < graph->n_nodes; i++) { struct ggml_tensor * node = graph->nodes[i]; - graph_init_tensor(hash_set, node_copies, node_init, node); + graph_copy_init_tensor(hash_set, node_copies, node_init, node); } // build graph copy diff --git a/ggml-backend.h b/ggml-backend.h index 282b3a9b7..f13c69bff 100644 --- a/ggml-backend.h +++ b/ggml-backend.h @@ -130,11 +130,7 @@ extern "C" { // in build_graph: build_graph(...) { - // allocating tensors in a specific backend (optional, recommended: pre-allocate inputs in a different buffer) - alloc_cpu = ggml_backend_sched_get_allocr(sched, backend_cpu); - ggml_allocr_alloc(alloc_cpu, tensor); - - // manually assigning nodes to a backend (optional, shouldn't be needed in most cases) + // manually assign nodes to a backend (optional, should not be needed in most cases) struct ggml_tensor * node = ggml_mul_mat(ctx, ...); ggml_backend_sched_set_node_backend(sched, node, backend_gpu); } @@ -164,20 +160,19 @@ extern "C" { GGML_API ggml_backend_sched_t ggml_backend_sched_new(ggml_backend_t * backends, ggml_backend_buffer_type_t * bufts, int n_backends, size_t graph_size); GGML_API void ggml_backend_sched_free(ggml_backend_sched_t sched); // Initialize backend buffers from a measure graph - GGML_API void ggml_backend_sched_init_measure(ggml_backend_sched_t sched, struct ggml_cgraph * measure_graph); + GGML_API bool ggml_backend_sched_reserve(ggml_backend_sched_t sched, struct ggml_cgraph * measure_graph); // Get the number of splits of the last graph GGML_API int ggml_backend_sched_get_n_splits(ggml_backend_sched_t sched); - GGML_API ggml_tallocr_t ggml_backend_sched_get_tallocr(ggml_backend_sched_t sched, ggml_backend_t backend); - GGML_API ggml_backend_buffer_t ggml_backend_sched_get_buffer (ggml_backend_sched_t sched, ggml_backend_t backend); + GGML_API size_t ggml_backend_sched_get_buffer_size(ggml_backend_sched_t sched, ggml_backend_t backend); GGML_API void ggml_backend_sched_set_node_backend(ggml_backend_sched_t sched, struct ggml_tensor * node, ggml_backend_t backend); GGML_API ggml_backend_t ggml_backend_sched_get_node_backend(ggml_backend_sched_t sched, struct ggml_tensor * node); // Allocate and compute graph on the backend scheduler - GGML_API void ggml_backend_sched_graph_compute(ggml_backend_sched_t sched, struct ggml_cgraph * graph); + GGML_API bool ggml_backend_sched_graph_compute(ggml_backend_sched_t sched, struct ggml_cgraph * graph); - // Reset all assignments and allocators - must be called before using the sched allocators to allocate inputs + // Reset all assignments and allocators - must be called before changing the node backends GGML_API void ggml_backend_sched_reset(ggml_backend_sched_t sched); // Set a callback to be called for each resulting node during graph compute diff --git a/ggml.c b/ggml.c index e45b78d7e..d921d82fe 100644 --- a/ggml.c +++ b/ggml.c @@ -2649,7 +2649,7 @@ static struct ggml_tensor * ggml_new_tensor_impl( /*.nb =*/ { 0, 0, 0, 0 }, /*.op =*/ GGML_OP_NONE, /*.op_params =*/ { 0 }, - /*.is_param =*/ false, + /*.flags =*/ 0, /*.grad =*/ NULL, /*.src =*/ { NULL }, /*.perf_runs =*/ 0, @@ -6551,7 +6551,7 @@ struct ggml_tensor * ggml_cross_entropy_loss_back( void ggml_set_param( struct ggml_context * ctx, struct ggml_tensor * tensor) { - tensor->is_param = true; + tensor->flags |= GGML_TENSOR_FLAG_PARAM; GGML_ASSERT(tensor->grad == NULL); tensor->grad = ggml_dup_tensor(ctx, tensor); @@ -15367,7 +15367,7 @@ static struct ggml_tensor * ggml_recompute_graph_node( return NULL; } - if (node->is_param) { + if (node->flags & GGML_TENSOR_FLAG_PARAM) { return node; } @@ -15401,7 +15401,7 @@ static struct ggml_tensor * ggml_recompute_graph_node( clone->op = node->op; clone->grad = node->grad; - clone->is_param = node->is_param; + clone->flags = node->flags; clone->extra = node->extra; for (int k = 0; k < GGML_MAX_DIMS; ++k) { clone->nb[k] = node->nb[k]; @@ -16433,7 +16433,7 @@ void ggml_build_backward_expand(struct ggml_context * ctx, struct ggml_cgraph * for (int i = 0; i < gf->n_nodes; i++) { struct ggml_tensor * node = gf->nodes[i]; - if (node->is_param) { + if (node->flags & GGML_TENSOR_FLAG_PARAM) { GGML_PRINT_DEBUG("%s: found root node %p\n", __func__, (void *) node); ggml_build_forward_expand(gb, node->grad); } @@ -17918,7 +17918,7 @@ void ggml_graph_print(const struct ggml_cgraph * cgraph) { GGML_PRINT(" - %3d: [ %5" PRId64 ", %5" PRId64 ", %5" PRId64 "] %16s %s (%3d) cpu = %7.3f / %7.3f ms, wall = %7.3f / %7.3f ms\n", i, node->ne[0], node->ne[1], node->ne[2], - ggml_op_name(node->op), node->is_param ? "x" : node->grad ? "g" : " ", node->perf_runs, + ggml_op_name(node->op), (node->flags & GGML_TENSOR_FLAG_PARAM) ? "x" : node->grad ? "g" : " ", node->perf_runs, (double) node->perf_cycles / (double) ggml_cycles_per_ms(), (double) node->perf_cycles / (double) ggml_cycles_per_ms() / (double) node->perf_runs, (double) node->perf_time_us / 1000.0, @@ -18011,7 +18011,7 @@ void ggml_graph_dump_dot(const struct ggml_cgraph * gb, const struct ggml_cgraph continue; } - if (node->is_param) { + if (node->flags & GGML_TENSOR_FLAG_PARAM) { snprintf(color, sizeof(color), "yellow"); } else if (node->grad) { if (ggml_graph_find(gf, node)) { @@ -18185,7 +18185,7 @@ static enum ggml_opt_result ggml_opt_adam( int np = 0; int64_t nx = 0; for (int i = 0; i < gf->n_nodes; ++i) { - if (gf->nodes[i]->is_param) { + if (gf->nodes[i]->flags & GGML_TENSOR_FLAG_PARAM) { GGML_PRINT_DEBUG("found param %d: grad->op = %d\n", np, gf->nodes[i]->grad->op); GGML_ASSERT(np < GGML_MAX_PARAMS); @@ -18548,7 +18548,7 @@ static enum ggml_opt_result ggml_opt_lbfgs( int np = 0; int nx = 0; for (int i = 0; i < gf->n_nodes; ++i) { - if (gf->nodes[i]->is_param) { + if (gf->nodes[i]->flags & GGML_TENSOR_FLAG_PARAM) { GGML_PRINT_DEBUG("found param %d: grad->op = %d\n", np, gf->nodes[i]->grad->op); GGML_ASSERT(np < GGML_MAX_PARAMS); @@ -19023,6 +19023,16 @@ enum ggml_opt_result ggml_opt_resume_g( //////////////////////////////////////////////////////////////////////////////// +void ggml_set_input(struct ggml_tensor * tensor) { + tensor->flags |= GGML_TENSOR_FLAG_INPUT; +} + +void ggml_set_output(struct ggml_tensor * tensor) { + tensor->flags |= GGML_TENSOR_FLAG_OUTPUT; +} + +//////////////////////////////////////////////////////////////////////////////// + void ggml_quantize_init(enum ggml_type type) { ggml_critical_section_start(); diff --git a/ggml.h b/ggml.h index 9cfec5bac..01cecc1e1 100644 --- a/ggml.h +++ b/ggml.h @@ -505,11 +505,17 @@ extern "C" { enum ggml_log_level { GGML_LOG_LEVEL_ERROR = 2, - GGML_LOG_LEVEL_WARN = 3, - GGML_LOG_LEVEL_INFO = 4, + GGML_LOG_LEVEL_WARN = 3, + GGML_LOG_LEVEL_INFO = 4, GGML_LOG_LEVEL_DEBUG = 5 }; + enum ggml_tensor_flag { + GGML_TENSOR_FLAG_INPUT = 1, + GGML_TENSOR_FLAG_OUTPUT = 2, + GGML_TENSOR_FLAG_PARAM = 4, + }; + // ggml object struct ggml_object { size_t offs; @@ -543,7 +549,7 @@ extern "C" { // op params - allocated as int32_t for alignment int32_t op_params[GGML_MAX_OP_PARAMS / sizeof(int32_t)]; - bool is_param; + int32_t flags; struct ggml_tensor * grad; struct ggml_tensor * src[GGML_MAX_SRC]; @@ -2092,6 +2098,12 @@ extern "C" { ggml_opt_callback callback, void * callback_data); + // + // tensor flags + // + GGML_API void ggml_set_input(struct ggml_tensor * tensor); + GGML_API void ggml_set_output(struct ggml_tensor * tensor); + // // quantization // diff --git a/llama.cpp b/llama.cpp index d1ee26ce2..a5b873a7b 100644 --- a/llama.cpp +++ b/llama.cpp @@ -1872,8 +1872,6 @@ struct llama_context { // memory buffers used to evaluate the model std::vector buf_compute_meta; ggml_backend_sched_t sched = nullptr; - // allocator for the input tensors - ggml_tallocr * alloc = nullptr; // input tensors ggml_backend_buffer_t buf_input = nullptr; @@ -7199,12 +7197,10 @@ struct llm_build_context { static struct ggml_cgraph * llama_build_graph( llama_context & lctx, - const llama_batch & batch) { + const llama_batch & batch, + bool worst_case) { const auto & model = lctx.model; - // check if we should build the worst-case graph (for memory measurement) - const bool worst_case = ggml_tallocr_is_measure(lctx.alloc); - // this callback allows us to apply custom logic to each tensor (e.g. ggml-alloc, offloading, etc.) llm_build_cb cb = [&](struct ggml_tensor * cur, const char * name, int il) { if (il >= 0) { @@ -7225,77 +7221,6 @@ static struct ggml_cgraph * llama_build_graph( struct llm_build_context llm(lctx, batch, cb, worst_case); - // - // set input data - // - - if (!ggml_tallocr_is_measure(lctx.alloc)) { - if (batch.token) { - const int64_t n_tokens = batch.n_tokens; - - ggml_backend_tensor_set(lctx.inp_tokens, batch.token, 0, n_tokens*ggml_element_size(lctx.inp_tokens)); - } - - if (batch.embd) { - const int64_t n_embd = llm.n_embd; - const int64_t n_tokens = batch.n_tokens; - - ggml_backend_tensor_set(lctx.inp_embd, batch.embd, 0, n_tokens*n_embd*ggml_element_size(lctx.inp_embd)); - } - - if (batch.pos) { - const int64_t n_tokens = batch.n_tokens; - - ggml_backend_tensor_set(lctx.inp_pos, batch.pos, 0, n_tokens*ggml_element_size(lctx.inp_pos)); - } - - { - const int64_t n_kv = llm.n_kv; - const int64_t n_tokens = batch.n_tokens; - - GGML_ASSERT(ggml_backend_buffer_is_host(lctx.inp_KQ_mask->buffer)); - float * data = (float *) lctx.inp_KQ_mask->data; - - for (int h = 0; h < 1; ++h) { - for (int j = 0; j < n_tokens; ++j) { - const llama_pos pos = batch.pos[j]; - const llama_seq_id seq_id = batch.seq_id[j][0]; - - for (int i = 0; i < n_kv; ++i) { - float f; - if (!lctx.kv_self.cells[i].has_seq_id(seq_id) || - (llm.causal_attn && lctx.kv_self.cells[i].pos > pos)) { - f = -INFINITY; - } else { - f = 0; - } - data[h*(n_kv*n_tokens) + j*n_kv + i] = f; - } - } - } - } - - if (llm.do_rope_shift) { - const int64_t n_ctx = llm.n_ctx; - - GGML_ASSERT(ggml_backend_buffer_is_host(lctx.inp_K_shift->buffer)); - int32_t * data = (int32_t *) lctx.inp_K_shift->data; - - for (int i = 0; i < n_ctx; ++i) { - data[i] = lctx.kv_self.cells[i].delta; - } - } - - { - GGML_ASSERT(ggml_backend_buffer_is_host(lctx.inp_sum->buffer)); - float * data = (float *) lctx.inp_sum->data; - - for (int i = 0; i < batch.n_tokens; ++i) { - data[i] = 1.0f/float(batch.n_tokens); - } - } - } - llm.init(); switch (model.arch) { @@ -7384,6 +7309,83 @@ static struct ggml_cgraph * llama_build_graph( return result; } +static void llama_set_inputs(llama_context & lctx, const llama_batch & batch) { + // + // set input data + // + + const auto & hparams = lctx.model.hparams; + const auto & cparams = lctx.cparams; + const auto & kv_self = lctx.kv_self; + + if (batch.token) { + const int64_t n_tokens = batch.n_tokens; + + ggml_backend_tensor_set(lctx.inp_tokens, batch.token, 0, n_tokens*ggml_element_size(lctx.inp_tokens)); + } + + if (batch.embd) { + const int64_t n_embd = hparams.n_embd; + const int64_t n_tokens = batch.n_tokens; + + ggml_backend_tensor_set(lctx.inp_embd, batch.embd, 0, n_tokens*n_embd*ggml_element_size(lctx.inp_embd)); + } + + if (batch.pos) { + const int64_t n_tokens = batch.n_tokens; + + ggml_backend_tensor_set(lctx.inp_pos, batch.pos, 0, n_tokens*ggml_element_size(lctx.inp_pos)); + } + + { + const int64_t n_kv = kv_self.n; + const int64_t n_tokens = batch.n_tokens; + + assert(ggml_backend_buffer_is_host(lctx.inp_KQ_mask->buffer)); + + float * data = (float *) lctx.inp_KQ_mask->data; + + for (int h = 0; h < 1; ++h) { + for (int j = 0; j < n_tokens; ++j) { + const llama_pos pos = batch.pos[j]; + const llama_seq_id seq_id = batch.seq_id[j][0]; + + for (int i = 0; i < n_kv; ++i) { + float f; + if (!lctx.kv_self.cells[i].has_seq_id(seq_id) || lctx.kv_self.cells[i].pos > pos) { + f = -INFINITY; + } else { + f = 0; + } + data[h*(n_kv*n_tokens) + j*n_kv + i] = f; + } + } + } + } + + + { + assert(ggml_backend_buffer_is_host(lctx.inp_sum->buffer)); + float * data = (float *) lctx.inp_sum->data; + + for (int i = 0; i < batch.n_tokens; ++i) { + data[i] = 1.0f/float(batch.n_tokens); + } + } + + if (kv_self.has_shift) { + const int64_t n_ctx = cparams.n_ctx; + + assert(ggml_backend_buffer_is_host(lctx.inp_K_shift->buffer)); + + int32_t * data = (int32_t *) lctx.inp_K_shift->data; + + for (int i = 0; i < n_ctx; ++i) { + data[i] = lctx.kv_self.cells[i].delta; + } + } +} + // decode a batch of tokens by evaluating the transformer // // - lctx: llama context @@ -7482,7 +7484,7 @@ static int llama_decode_internal( ggml_backend_sched_reset(lctx.sched); ggml_backend_sched_set_eval_callback(lctx.sched, lctx.cparams.cb_eval, lctx.cparams.cb_eval_user_data); - ggml_cgraph * gf = llama_build_graph(lctx, batch); + ggml_cgraph * gf = llama_build_graph(lctx, batch, false); // the output is always the last tensor in the graph struct ggml_tensor * res = gf->nodes[gf->n_nodes - 1]; @@ -7527,6 +7529,9 @@ static int llama_decode_internal( if (lctx.backend_cpu != nullptr) { ggml_backend_cpu_set_n_threads(lctx.backend_cpu, n_threads); } + + llama_set_inputs(lctx, batch); + ggml_backend_sched_graph_compute(lctx.sched, gf); // fprintf(stderr, "splits: %d\n", ggml_backend_sched_get_n_splits(lctx.sched)); @@ -11278,23 +11283,27 @@ struct llama_context * llama_new_context_with_model( ctx->buf_compute_meta.resize(ggml_tensor_overhead()*LLAMA_MAX_NODES + ggml_graph_overhead()); ctx->sched = ggml_backend_sched_new(ctx->backends.data(), backend_buft.data(), ctx->backends.size(), LLAMA_MAX_NODES); - ctx->alloc = ggml_backend_sched_get_tallocr(ctx->sched, ctx->backend_cpu); // build worst-case graph int n_tokens = (int)std::min(cparams.n_ctx, cparams.n_batch); int n_past = cparams.n_ctx - n_tokens; llama_token token = llama_token_bos(&ctx->model); // not actually used by llama_build_graph, but required to choose between token and embedding inputs graph - ggml_cgraph * gf = llama_build_graph(*ctx, llama_batch_get_one(&token, n_tokens, n_past, 0)); + ggml_cgraph * gf = llama_build_graph(*ctx, llama_batch_get_one(&token, n_tokens, n_past, 0), true); // initialize scheduler with the worst-case graph - ggml_backend_sched_init_measure(ctx->sched, gf); - ctx->alloc = ggml_backend_sched_get_tallocr(ctx->sched, ctx->backend_cpu); + if (!ggml_backend_sched_reserve(ctx->sched, gf)) { + LLAMA_LOG_ERROR("%s: failed to allocate compute buffers\n", __func__); + llama_free(ctx); + return nullptr; + } - for (ggml_backend_t backend : ctx->backends) { - ggml_backend_buffer_t buf = ggml_backend_sched_get_buffer(ctx->sched, backend); + for (size_t i = 0; i < ctx->backends.size(); i++) { + ggml_backend_t backend = ctx->backends[i]; + ggml_backend_buffer_type_t buft = backend_buft[i]; + size_t size = ggml_backend_sched_get_buffer_size(ctx->sched, backend); LLAMA_LOG_INFO("%s: %10s compute buffer size = %8.2f MiB\n", __func__, - ggml_backend_buffer_name(buf), - ggml_backend_buffer_get_size(buf) / 1024.0 / 1024.0); + ggml_backend_buft_name(buft), + size / 1024.0 / 1024.0); } // note: the number of splits during measure is higher than during inference due to the kv shift diff --git a/scripts/sync-ggml.last b/scripts/sync-ggml.last index 6ae75bc31..7a23ab162 100644 --- a/scripts/sync-ggml.last +++ b/scripts/sync-ggml.last @@ -1 +1 @@ -2c7cf49810d523b9632da393a9e8270b60bf3b24 +5070f078a67c18c11736e78316ab715ca9afde16 From 4a46d2b7923be83d6019251671ee63aa1fa0d6bc Mon Sep 17 00:00:00 2001 From: Daniel Bevenius Date: Mon, 12 Feb 2024 09:38:44 +0100 Subject: [PATCH 575/811] llava : remove prog parameter from ArgumentParser (#5457) * llava: remove prog parameter from ArgumentParser This commit removes the `prog` parameter from `ArgumentParser` so that it uses the default value which is the name of the script. The motivation for this change is that currently the usage output looks like this: ```console $ python examples/llava/convert-image-encoder-to-gguf.py --help usage: convert_hf_to_gguf.py [-h] ... ``` And with this change it will look like this: ```console $ python examples/llava/convert-image-encoder-to-gguf.py --help usage: convert-image-encoder-to-gguf.py [-h] ... ``` Signed-off-by: Daniel Bevenius * ci: add W503 to flake8 ignore list This commit adds W503 to the ignore list for flake8. This is done to avoid the following error: W503 line break before binary operator Signed-off-by: Daniel Bevenius --------- Signed-off-by: Daniel Bevenius --- .github/workflows/python-lint.yml | 2 +- examples/llava/convert-image-encoder-to-gguf.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/python-lint.yml b/.github/workflows/python-lint.yml index 56d17b66c..ea0a05ea1 100644 --- a/.github/workflows/python-lint.yml +++ b/.github/workflows/python-lint.yml @@ -16,5 +16,5 @@ jobs: - name: flake8 Lint uses: py-actions/flake8@v2 with: - ignore: "E203,E211,E221,E225,E231,E241,E251,E261,E266,E501,E701,E704" + ignore: "E203,E211,E221,E225,E231,E241,E251,E261,E266,E501,E701,E704,W503" exclude: "examples/*,examples/*/**,*/**/__init__.py" diff --git a/examples/llava/convert-image-encoder-to-gguf.py b/examples/llava/convert-image-encoder-to-gguf.py index f5a3c9b46..e204b56be 100644 --- a/examples/llava/convert-image-encoder-to-gguf.py +++ b/examples/llava/convert-image-encoder-to-gguf.py @@ -71,7 +71,7 @@ def bytes_to_unicode(): return dict(zip(bs, cs)) -ap = argparse.ArgumentParser(prog="convert_hf_to_gguf.py") +ap = argparse.ArgumentParser() ap.add_argument("-m", "--model-dir", help="Path to model directory cloned from HF Hub", required=True) ap.add_argument("--use-f32", action="store_true", default=False, help="Use f32 instead of f16") ap.add_argument("--text-only", action="store_true", required=False, From 43fe07c1a4f3a58612e1d9543f7c6b556710f5d0 Mon Sep 17 00:00:00 2001 From: Abhilash Majumder <30946547+abhilash1910@users.noreply.github.com> Date: Mon, 12 Feb 2024 20:22:05 +0530 Subject: [PATCH 576/811] ggml-sycl: Replace 3d ops with macro (#5458) * use macro * use macro * fix format --- ggml-sycl.cpp | 75 ++++++++++----------------------------------------- 1 file changed, 14 insertions(+), 61 deletions(-) diff --git a/ggml-sycl.cpp b/ggml-sycl.cpp index dd562a898..cd4b3a1e1 100644 --- a/ggml-sycl.cpp +++ b/ggml-sycl.cpp @@ -11578,11 +11578,8 @@ static dpct::err0 ggml_sycl_cpy_tensor_2d(void *dst, } char * dst_ptr = (char *) dst; - const int64_t ne0 = src->ne[0]; - const int64_t nb0 = src->nb[0]; - const int64_t nb1 = src->nb[1]; - const int64_t nb2 = src->nb[2]; - const int64_t nb3 = src->nb[3]; + GGML_TENSOR_LOCALS_1(int64_t, ne, src, ne); + GGML_TENSOR_LOCALS(int64_t, nb, src, nb); const enum ggml_type type = src->type; const int64_t ts = ggml_type_size(type); const int64_t bs = ggml_blck_size(type); @@ -12426,9 +12423,7 @@ inline void ggml_sycl_op_alibi(const ggml_tensor *src0, const ggml_tensor *src1, GGML_ASSERT(src0->type == GGML_TYPE_F32); GGML_ASSERT( dst->type == GGML_TYPE_F32); - const int64_t ne00 = src0->ne[0]; - const int64_t ne01 = src0->ne[1]; - const int64_t ne02 = src0->ne[2]; + GGML_TENSOR_LOCALS_3(int64_t, ne0, src0, ne); const int64_t nrows = ggml_nrows(src0); //const int n_past = ((int32_t *) dst->op_params)[0]; @@ -12758,15 +12753,9 @@ static void ggml_sycl_op_mul_mat(const ggml_tensor *src0, ggml_sycl_op_mul_mat_t op, const bool convert_src1_to_q8_1) try { - const int64_t ne00 = src0->ne[0]; - const int64_t ne01 = src0->ne[1]; - const int64_t ne02 = src0->ne[2]; - const int64_t ne03 = src0->ne[3]; + GGML_TENSOR_LOCALS(int64_t, ne0, src0, ne); - const int64_t ne10 = src1->ne[0]; - const int64_t ne11 = src1->ne[1]; - const int64_t ne12 = src1->ne[2]; - const int64_t ne13 = src1->ne[3]; + GGML_TENSOR_LOCALS(int64_t, ne1, src1, ne); const int64_t nrows1 = ggml_nrows(src1); GGML_ASSERT(ne03 == ne13); @@ -13337,23 +13326,13 @@ static void ggml_sycl_mul_mat_mat_batched_sycl(const ggml_tensor *src0, GGML_ASSERT(src0->type == GGML_TYPE_F16); GGML_ASSERT(src1->type == GGML_TYPE_F32); - const int64_t ne00 = src0->ne[0]; GGML_UNUSED(ne00); - const int64_t ne01 = src0->ne[1]; - const int64_t ne02 = src0->ne[2]; - const int64_t ne03 = src0->ne[3]; + GGML_TENSOR_LOCALS(int64_t, ne0, src0, ne); - const int64_t nb01 = src0->nb[1]; - const int64_t nb02 = src0->nb[2]; GGML_UNUSED(nb02); - const int64_t nb03 = src0->nb[3]; GGML_UNUSED(nb03); + GGML_TENSOR_LOCALS(int64_t, nb0, src0, nb); - const int64_t ne10 = src1->ne[0]; - const int64_t ne11 = src1->ne[1]; - const int64_t ne12 = src1->ne[2]; - const int64_t ne13 = src1->ne[3]; + GGML_TENSOR_LOCALS(int64_t, ne1, src1, ne); - const int64_t nb11 = src1->nb[1]; - const int64_t nb12 = src1->nb[2]; GGML_UNUSED(nb12); - const int64_t nb13 = src1->nb[3]; GGML_UNUSED(nb13); + GGML_TENSOR_LOCALS(int64_t, nb1, src1, nb); const int64_t ne1 = ggml_nelements(src1); const int64_t ne = ggml_nelements(dst); @@ -13655,23 +13634,15 @@ static void ggml_sycl_mul_mat_id_sycl(ggml_tensor * dst) { GGML_ASSERT(src00->backend != GGML_BACKEND_GPU_SPLIT); GGML_ASSERT(src1->type == GGML_TYPE_F32); - const int64_t ne00 = src00->ne[0]; GGML_UNUSED(ne00); - const int64_t ne01 = src00->ne[1]; - const int64_t ne02 = src00->ne[2]; - const int64_t ne03 = src00->ne[3]; + GGML_TENSOR_LOCALS(int64_t, ne0, src00, ne); //const int64_t nb01 = src00->nb[1]; - const int64_t nb02 = src00->nb[2]; GGML_UNUSED(nb02); - const int64_t nb03 = src00->nb[3]; GGML_UNUSED(nb03); + GGML_TENSOR_LOCALS(int64_t, nb0, src00, nb); - const int64_t ne10 = src1->ne[0]; - const int64_t ne11 = src1->ne[1]; - const int64_t ne12 = src1->ne[2]; - const int64_t ne13 = src1->ne[3]; + GGML_TENSOR_LOCALS(int64_t, ne1, src1, ne); + GGML_TENSOR_LOCALS(int64_t, nb1, src1, nb); //const int64_t nb11 = src1->nb[1]; - const int64_t nb12 = src1->nb[2]; GGML_UNUSED(nb12); - const int64_t nb13 = src1->nb[3]; GGML_UNUSED(nb13); const int64_t ne1 = ggml_nelements(src1); const int64_t ne = ggml_nelements(dst); @@ -13940,25 +13911,7 @@ static void ggml_sycl_cpy(const ggml_tensor *src0, const ggml_tensor *src1, GGML_ASSERT(ggml_nbytes(src0) <= INT_MAX); GGML_ASSERT(ggml_nbytes(src1) <= INT_MAX); - const int64_t ne00 = src0->ne[0]; - const int64_t ne01 = src0->ne[1]; - const int64_t ne02 = src0->ne[2]; - - - const int64_t nb00 = src0->nb[0]; - const int64_t nb01 = src0->nb[1]; - const int64_t nb02 = src0->nb[2]; - const int64_t nb03 = src0->nb[3]; - - const int64_t ne10 = src1->ne[0]; - const int64_t ne11 = src1->ne[1]; - const int64_t ne12 = src1->ne[2]; - - - const int64_t nb10 = src1->nb[0]; - const int64_t nb11 = src1->nb[1]; - const int64_t nb12 = src1->nb[2]; - const int64_t nb13 = src1->nb[3]; + GGML_TENSOR_BINARY_OP_LOCALS; SYCL_CHECK(ggml_sycl_set_device(g_main_device)); dpct::queue_ptr main_stream = g_syclStreams[g_main_device_index][0]; From dbd8828eb03b9aa8d0af7e4c533d3c2f5b38aba6 Mon Sep 17 00:00:00 2001 From: Lee <44310445+lx200916@users.noreply.github.com> Date: Tue, 13 Feb 2024 01:29:57 +0800 Subject: [PATCH 577/811] py : fix persimmon `n_rot` conversion (#5460) * convert : fix persimmon offical weight conversion to write correct n_rot. * Update convert-persimmon-to-gguf.py --------- Co-authored-by: Georgi Gerganov --- convert-persimmon-to-gguf.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/convert-persimmon-to-gguf.py b/convert-persimmon-to-gguf.py index d2be805d1..def210531 100755 --- a/convert-persimmon-to-gguf.py +++ b/convert-persimmon-to-gguf.py @@ -88,7 +88,8 @@ def main(): gguf_writer.add_embedding_length(hidden_size) gguf_writer.add_block_count(block_count) gguf_writer.add_feed_forward_length(hparams.ffn_hidden_size) - gguf_writer.add_rope_dimension_count(hidden_size // head_count) + # ref: https://github.com/ggerganov/llama.cpp/pull/4889/commits/eea19039fc52ea2dbd1aab45b59ab4e3e29a3443 + gguf_writer.add_rope_dimension_count(hidden_size // head_count // 2) gguf_writer.add_head_count(head_count) gguf_writer.add_head_count_kv(head_count_kv) gguf_writer.add_rope_freq_base(hparams.rotary_emb_base) From df334a11251b81fd0b6a0e51e7146e0ba9e973f2 Mon Sep 17 00:00:00 2001 From: Georgi Gerganov Date: Mon, 12 Feb 2024 19:54:29 +0200 Subject: [PATCH 578/811] swift : package no longer use ggml dependency (#5465) * Revert "swift : update Package.swift to use ggml as dependency (#4691)" This reverts commit ece9a45e8ffb73ad461c792720c2fec28b0137bc. * spm : add ggml headers --- Package.swift | 24 +++++++++++++++++++----- spm-headers/ggml-alloc.h | 1 + spm-headers/ggml-backend.h | 1 + spm-headers/ggml.h | 1 + 4 files changed, 22 insertions(+), 5 deletions(-) create mode 120000 spm-headers/ggml-alloc.h create mode 120000 spm-headers/ggml-backend.h create mode 120000 spm-headers/ggml.h diff --git a/Package.swift b/Package.swift index 37524edee..b24c9204a 100644 --- a/Package.swift +++ b/Package.swift @@ -13,17 +13,31 @@ let package = Package( products: [ .library(name: "llama", targets: ["llama"]), ], - dependencies: [ - .package(url: "https://github.com/ggerganov/ggml.git", .branch("release")) - ], targets: [ .target( name: "llama", - dependencies: ["ggml"], path: ".", - exclude: ["ggml-metal.metal"], + exclude: [ + "cmake", + "examples", + "scripts", + "models", + "tests", + "CMakeLists.txt", + "ggml-cuda.cu", + "ggml-cuda.h", + "Makefile" + ], sources: [ + "ggml.c", "llama.cpp", + "ggml-alloc.c", + "ggml-backend.c", + "ggml-quants.c", + "ggml-metal.m", + ], + resources: [ + .process("ggml-metal.metal") ], publicHeadersPath: "spm-headers", cSettings: [ diff --git a/spm-headers/ggml-alloc.h b/spm-headers/ggml-alloc.h new file mode 120000 index 000000000..a49d385a1 --- /dev/null +++ b/spm-headers/ggml-alloc.h @@ -0,0 +1 @@ +../ggml-alloc.h \ No newline at end of file diff --git a/spm-headers/ggml-backend.h b/spm-headers/ggml-backend.h new file mode 120000 index 000000000..17c2cf14f --- /dev/null +++ b/spm-headers/ggml-backend.h @@ -0,0 +1 @@ +../ggml-backend.h \ No newline at end of file diff --git a/spm-headers/ggml.h b/spm-headers/ggml.h new file mode 120000 index 000000000..39215298f --- /dev/null +++ b/spm-headers/ggml.h @@ -0,0 +1 @@ +../ggml.h \ No newline at end of file From 099afc6274c859ca67146e725839f2d97a5ef313 Mon Sep 17 00:00:00 2001 From: Georgi Gerganov Date: Mon, 12 Feb 2024 20:14:39 +0200 Subject: [PATCH 579/811] llama : fix quantization when tensors are missing (#5423) --- llama.cpp | 32 ++++++++++++++++++++++++-------- 1 file changed, 24 insertions(+), 8 deletions(-) diff --git a/llama.cpp b/llama.cpp index a5b873a7b..d316d067b 100644 --- a/llama.cpp +++ b/llama.cpp @@ -772,22 +772,37 @@ struct LLM_TN { llm_arch arch; std::string operator()(llm_tensor tensor) const { + if (LLM_TENSOR_NAMES[arch].find(tensor) == LLM_TENSOR_NAMES[arch].end()) { + return "__missing__"; + } return LLM_TENSOR_NAMES[arch].at(tensor); } std::string operator()(llm_tensor tensor, const std::string & suffix) const { + if (LLM_TENSOR_NAMES[arch].find(tensor) == LLM_TENSOR_NAMES[arch].end()) { + return "__missing__"; + } return LLM_TENSOR_NAMES[arch].at(tensor) + "." + suffix; } std::string operator()(llm_tensor tensor, int bid) const { + if (LLM_TENSOR_NAMES[arch].find(tensor) == LLM_TENSOR_NAMES[arch].end()) { + return "__missing__"; + } return ::format(LLM_TENSOR_NAMES[arch].at(tensor).c_str(), bid); } std::string operator()(llm_tensor tensor, const std::string & suffix, int bid) const { + if (LLM_TENSOR_NAMES[arch].find(tensor) == LLM_TENSOR_NAMES[arch].end()) { + return "__missing__"; + } return ::format(LLM_TENSOR_NAMES[arch].at(tensor).c_str(), bid) + "." + suffix; } std::string operator()(llm_tensor tensor, const std::string & suffix, int bid, int xid) const { + if (LLM_TENSOR_NAMES[arch].find(tensor) == LLM_TENSOR_NAMES[arch].end()) { + return "__missing__"; + } return ::format(LLM_TENSOR_NAMES[arch].at(tensor).c_str(), bid, xid) + "." + suffix; } }; @@ -10227,6 +10242,7 @@ static ggml_type get_k_quant_type(quantize_state_internal & qs, ggml_type new_ty } ++qs.i_ffn_up; } + // if (ftype == LLAMA_FTYPE_MOSTLY_Q2_K) new_type = GGML_TYPE_Q3_K; //} // IK: let's remove this, else Q2_K is almost the same as Q3_K_S @@ -10286,19 +10302,19 @@ static void llama_model_quantize_internal(const std::string & fname_inp, const s // K-quants case LLAMA_FTYPE_MOSTLY_Q2_K_S: - case LLAMA_FTYPE_MOSTLY_Q2_K: quantized_type = GGML_TYPE_Q2_K; break; + case LLAMA_FTYPE_MOSTLY_Q2_K: quantized_type = GGML_TYPE_Q2_K; break; case LLAMA_FTYPE_MOSTLY_Q3_K_XS: case LLAMA_FTYPE_MOSTLY_Q3_K_S: case LLAMA_FTYPE_MOSTLY_Q3_K_M: - case LLAMA_FTYPE_MOSTLY_Q3_K_L: quantized_type = GGML_TYPE_Q3_K; break; + case LLAMA_FTYPE_MOSTLY_Q3_K_L: quantized_type = GGML_TYPE_Q3_K; break; case LLAMA_FTYPE_MOSTLY_Q4_K_S: - case LLAMA_FTYPE_MOSTLY_Q4_K_M: quantized_type = GGML_TYPE_Q4_K; break; + case LLAMA_FTYPE_MOSTLY_Q4_K_M: quantized_type = GGML_TYPE_Q4_K; break; case LLAMA_FTYPE_MOSTLY_Q5_K_S: - case LLAMA_FTYPE_MOSTLY_Q5_K_M: quantized_type = GGML_TYPE_Q5_K; break; - case LLAMA_FTYPE_MOSTLY_Q6_K: quantized_type = GGML_TYPE_Q6_K; break; - case LLAMA_FTYPE_MOSTLY_IQ2_XXS:quantized_type = GGML_TYPE_IQ2_XXS; break; - case LLAMA_FTYPE_MOSTLY_IQ2_XS :quantized_type = GGML_TYPE_IQ2_XS; break; - case LLAMA_FTYPE_MOSTLY_IQ3_XXS:quantized_type = GGML_TYPE_IQ3_XXS; break; + case LLAMA_FTYPE_MOSTLY_Q5_K_M: quantized_type = GGML_TYPE_Q5_K; break; + case LLAMA_FTYPE_MOSTLY_Q6_K: quantized_type = GGML_TYPE_Q6_K; break; + case LLAMA_FTYPE_MOSTLY_IQ2_XXS: quantized_type = GGML_TYPE_IQ2_XXS; break; + case LLAMA_FTYPE_MOSTLY_IQ2_XS: quantized_type = GGML_TYPE_IQ2_XS; break; + case LLAMA_FTYPE_MOSTLY_IQ3_XXS: quantized_type = GGML_TYPE_IQ3_XXS; break; default: throw std::runtime_error(format("invalid output file type %d\n", ftype)); } From 895407f31b358e3d9335e847d13f033491ec8a5b Mon Sep 17 00:00:00 2001 From: Kawrakow <48489457+ikawrakow@users.noreply.github.com> Date: Tue, 13 Feb 2024 09:07:57 +0200 Subject: [PATCH 580/811] ggml-quants : fix compiler warnings (shadow variable) (#5472) Co-authored-by: Iwan Kawrakow --- ggml-quants.c | 36 ++++++++++++++++++------------------ 1 file changed, 18 insertions(+), 18 deletions(-) diff --git a/ggml-quants.c b/ggml-quants.c index b2a309bf8..f44377f45 100644 --- a/ggml-quants.c +++ b/ggml-quants.c @@ -3819,15 +3819,15 @@ void ggml_vec_dot_q4_0_q8_0(int n, float * restrict s, size_t bs, const void * r /* Compute combined scale for the block */ const __m256 d = _mm256_set1_ps( GGML_FP16_TO_FP32(x[i].d) * GGML_FP16_TO_FP32(y[i].d) ); - __m256i bx = bytes_from_nibbles_32(x[i].qs); + __m256i qx = bytes_from_nibbles_32(x[i].qs); // Now we have a vector with bytes in [ 0 .. 15 ] interval. Offset them into [ -8 .. +7 ] interval. const __m256i off = _mm256_set1_epi8( 8 ); - bx = _mm256_sub_epi8( bx, off ); + qx = _mm256_sub_epi8( qx, off ); - __m256i by = _mm256_loadu_si256((const __m256i *)y[i].qs); + __m256i qy = _mm256_loadu_si256((const __m256i *)y[i].qs); - const __m256 q = mul_sum_i8_pairs_float(bx, by); + const __m256 q = mul_sum_i8_pairs_float(qx, qy); /* Multiply q with scale and accumulate */ acc = _mm256_fmadd_ps( d, q, acc ); @@ -4196,10 +4196,10 @@ void ggml_vec_dot_q4_1_q8_1(int n, float * restrict s, size_t bs, const void * r const __m256 d0d1 = _mm256_mul_ps( d0v, d1v ); // Load 16 bytes, and unpack 4 bit fields into bytes, making 32 bytes - const __m256i bx = bytes_from_nibbles_32(x[i].qs); - const __m256i by = _mm256_loadu_si256( (const __m256i *)y[i].qs ); + const __m256i qx = bytes_from_nibbles_32(x[i].qs); + const __m256i qy = _mm256_loadu_si256( (const __m256i *)y[i].qs ); - const __m256 xy = mul_sum_us8_pairs_float(bx, by); + const __m256 xy = mul_sum_us8_pairs_float(qx, qy); // Accumulate d0*d1*x*y #if defined(__AVX2__) @@ -4418,14 +4418,14 @@ void ggml_vec_dot_q5_0_q8_0(int n, float * restrict s, size_t bs, const void * r /* Compute combined scale for the block */ const __m256 d = _mm256_set1_ps(GGML_FP16_TO_FP32(x[i].d) * GGML_FP16_TO_FP32(y[i].d)); - __m256i bx = bytes_from_nibbles_32(x[i].qs); + __m256i qx = bytes_from_nibbles_32(x[i].qs); __m256i bxhi = bytes_from_bits_32(x[i].qh); bxhi = _mm256_andnot_si256(bxhi, _mm256_set1_epi8((char)0xF0)); - bx = _mm256_or_si256(bx, bxhi); + qx = _mm256_or_si256(qx, bxhi); - __m256i by = _mm256_loadu_si256((const __m256i *)y[i].qs); + __m256i qy = _mm256_loadu_si256((const __m256i *)y[i].qs); - const __m256 q = mul_sum_i8_pairs_float(bx, by); + const __m256 q = mul_sum_i8_pairs_float(qx, qy); /* Multiply q with scale and accumulate */ acc = _mm256_fmadd_ps(d, q, acc); @@ -4722,15 +4722,15 @@ void ggml_vec_dot_q5_1_q8_1(int n, float * restrict s, size_t bs, const void * r summs += GGML_FP16_TO_FP32(x[i].m) * y[i].s; - __m256i bx = bytes_from_nibbles_32(x[i].qs); + __m256i qx = bytes_from_nibbles_32(x[i].qs); __m256i bxhi = bytes_from_bits_32(x[i].qh); bxhi = _mm256_and_si256(bxhi, _mm256_set1_epi8(0x10)); - bx = _mm256_or_si256(bx, bxhi); + qx = _mm256_or_si256(qx, bxhi); const __m256 dy = _mm256_set1_ps(y[i].d); - const __m256i by = _mm256_loadu_si256((const __m256i *)y[i].qs); + const __m256i qy = _mm256_loadu_si256((const __m256i *)y[i].qs); - const __m256 q = mul_sum_us8_pairs_float(bx, by); + const __m256 q = mul_sum_us8_pairs_float(qx, qy); acc = _mm256_fmadd_ps(q, _mm256_mul_ps(dx, dy), acc); } @@ -4973,10 +4973,10 @@ void ggml_vec_dot_q8_0_q8_0(int n, float * restrict s, size_t bs, const void * r for (int i = 0; i < nb; ++i) { // Compute combined scale for the block const __m256 d = _mm256_set1_ps(GGML_FP16_TO_FP32(x[i].d) * GGML_FP16_TO_FP32(y[i].d)); - __m256i bx = _mm256_loadu_si256((const __m256i *)x[i].qs); - __m256i by = _mm256_loadu_si256((const __m256i *)y[i].qs); + __m256i qx = _mm256_loadu_si256((const __m256i *)x[i].qs); + __m256i qy = _mm256_loadu_si256((const __m256i *)y[i].qs); - const __m256 q = mul_sum_i8_pairs_float(bx, by); + const __m256 q = mul_sum_i8_pairs_float(qx, qy); // Multiply q with scale and accumulate #if defined(__AVX2__) From 99b8b43d7b185a6483f28cf798a2d968b2e16ca7 Mon Sep 17 00:00:00 2001 From: Georgi Gerganov Date: Tue, 13 Feb 2024 11:20:24 +0200 Subject: [PATCH 581/811] tests : disable moe test (#5473) --- tests/test-backend-ops.cpp | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/tests/test-backend-ops.cpp b/tests/test-backend-ops.cpp index eb06123d2..9af8517d9 100644 --- a/tests/test-backend-ops.cpp +++ b/tests/test-backend-ops.cpp @@ -2129,14 +2129,13 @@ static bool test_backend(ggml_backend_t backend, test_mode mode, const char * op test_cases.emplace_back(new test_pad()); test_cases.emplace_back(new test_leaky_relu()); + // these tests are disabled to save execution time, but they can be handy for debugging +#if 0 #if !defined(__SANITIZE_THREAD__) // FIXME: these tests use too much memory with thread sanitizer test_cases.emplace_back(new test_moe(8, 2, 1, 4096, 8*1024)); //test_cases.emplace_back(new test_moe(8, 2, 8, 4096, 14336)); #endif - - // these tests are disabled to save execution time, but they can be handy for debugging -#if 0 test_cases.emplace_back(new test_llama(1)); test_cases.emplace_back(new test_llama(2)); test_cases.emplace_back(new test_falcon(1)); From 49cc1f7d67de2da99f3ac185f9ff1319b7bf35f8 Mon Sep 17 00:00:00 2001 From: Georgi Gerganov Date: Tue, 13 Feb 2024 13:01:29 +0200 Subject: [PATCH 582/811] bert : add tests + fix quantization (#5475) * llama : do not quantize pos embd and token type tensors * ci : add BERT tests ggml-ci * ci : do not do BERT tests on low-perf nodes ggml-ci --- ci/run.sh | 46 ++++++++++++++++++++++++++++++++++++++++++++++ llama.cpp | 6 +++++- 2 files changed, 51 insertions(+), 1 deletion(-) diff --git a/ci/run.sh b/ci/run.sh index 82fe247a5..a4264d775 100755 --- a/ci/run.sh +++ b/ci/run.sh @@ -568,6 +568,50 @@ function gg_sum_open_llama_7b_v2 { #gg_printf '- shakespeare (q8_0 / f16 base lora):\n```\n%s\n```\n' "$(cat $OUT/${ci}-ppl-shakespeare-lora-q8_0-f16.log)" } +# bge-small + +function gg_run_embd_bge_small { + cd ${SRC} + + gg_wget models-mnt/bge-small/ https://huggingface.co/BAAI/bge-small-en-v1.5/raw/main/config.json + gg_wget models-mnt/bge-small/ https://huggingface.co/BAAI/bge-small-en-v1.5/resolve/main/tokenizer.model + gg_wget models-mnt/bge-small/ https://huggingface.co/BAAI/bge-small-en-v1.5/raw/main/tokenizer_config.json + gg_wget models-mnt/bge-small/ https://huggingface.co/BAAI/bge-small-en-v1.5/raw/main/special_tokens_map.json + gg_wget models-mnt/bge-small/ https://huggingface.co/BAAI/bge-small-en-v1.5/resolve/main/pytorch_model.bin + gg_wget models-mnt/bge-small/ https://huggingface.co/BAAI/bge-small-en-v1.5/raw/main/sentence_bert_config.json + gg_wget models-mnt/bge-small/ https://huggingface.co/BAAI/bge-small-en-v1.5/raw/main/vocab.txt + + path_models="../models-mnt/bge-small" + + rm -rf build-ci-release && mkdir build-ci-release && cd build-ci-release + + set -e + + (time cmake -DCMAKE_BUILD_TYPE=Release ${CMAKE_EXTRA} .. ) 2>&1 | tee -a $OUT/${ci}-cmake.log + (time make -j ) 2>&1 | tee -a $OUT/${ci}-make.log + + python3 ../convert-hf-to-gguf.py ${path_models} + + model_f16="${path_models}/ggml-model-f16.gguf" + model_q8_0="${path_models}/ggml-model-q8_0.gguf" + + ./bin/quantize ${model_f16} ${model_q8_0} q8_0 + + (time ./bin/embedding --model ${model_f16} -p "I believe the meaning of life is" ) 2>&1 | tee -a $OUT/${ci}-tg-f16.log + (time ./bin/embedding --model ${model_q8_0} -p "I believe the meaning of life is" ) 2>&1 | tee -a $OUT/${ci}-tg-q8_0.log + + set +e +} + +function gg_sum_embd_bge_small { + gg_printf '### %s\n\n' "${ci}" + + gg_printf 'BGE Small (BERT):\n' + gg_printf '- status: %s\n' "$(cat $OUT/${ci}.exit)" + gg_printf '- f16: \n```\n%s\n```\n' "$(cat $OUT/${ci}-tg-f16.log)" + gg_printf '- q8_0:\n```\n%s\n```\n' "$(cat $OUT/${ci}-tg-q8_0.log)" +} + ## main if [ -z ${GG_BUILD_LOW_PERF} ]; then @@ -591,6 +635,8 @@ test $ret -eq 0 && gg_run ctest_debug test $ret -eq 0 && gg_run ctest_release if [ -z ${GG_BUILD_LOW_PERF} ]; then + test $ret -eq 0 && gg_run embd_bge_small + if [ -z ${GG_BUILD_VRAM_GB} ] || [ ${GG_BUILD_VRAM_GB} -ge 8 ]; then if [ -z ${GG_BUILD_CUDA} ]; then test $ret -eq 0 && gg_run open_llama_3b_v2 diff --git a/llama.cpp b/llama.cpp index d316d067b..6dce392df 100644 --- a/llama.cpp +++ b/llama.cpp @@ -10444,7 +10444,11 @@ static void llama_model_quantize_internal(const std::string & fname_inp, const s quantize &= !params->only_copy; // do not quantize expert gating tensors - quantize &= name.find("ffn_gate_inp.weight") == std::string::npos; + quantize &= name != LLM_TN(model.arch)(LLM_TENSOR_FFN_GATE_INP, "weight"); + + // do not quantize positional embeddings and token types (BERT) + quantize &= name != LLM_TN(model.arch)(LLM_TENSOR_POS_EMBD, "weight"); + quantize &= name != LLM_TN(model.arch)(LLM_TENSOR_TOKEN_TYPES, "weight"); enum ggml_type new_type; void * new_data; From ad014bba97ef6ef6c3e2f78b2fc463e91ae94579 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Johannes=20G=C3=A4=C3=9Fler?= Date: Tue, 13 Feb 2024 12:38:37 +0100 Subject: [PATCH 583/811] make: add error message for bad CUDA version (#5444) * make: add error message for bad CUDA version * Update Makefile Co-authored-by: Jared Van Bortel --------- Co-authored-by: Jared Van Bortel --- Makefile | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/Makefile b/Makefile index ba73f0637..0a2070b53 100644 --- a/Makefile +++ b/Makefile @@ -569,6 +569,14 @@ $(info I CC: $(shell $(CC) --version | head -n 1)) $(info I CXX: $(shell $(CXX) --version | head -n 1)) ifdef LLAMA_CUBLAS $(info I NVCC: $(shell $(NVCC) --version | tail -n 1)) +CUDA_VERSION := $(shell nvcc --version | grep -oP 'release (\K[0-9]+\.[0-9])') +ifeq ($(shell awk -v "v=$(CUDA_VERSION)" 'BEGIN { print (v < 11.7) }'),1) +ifndef CUDA_DOCKER_ARCH +ifndef CUDA_POWER_ARCH +$(error I ERROR: For CUDA versions < 11.7 a target CUDA architecture must be explicitly provided via CUDA_DOCKER_ARCH) +endif # CUDA_POWER_ARCH +endif # CUDA_DOCKER_ARCH +endif # eq ($(shell echo "$(CUDA_VERSION) < 11.7" | bc),1) endif # LLAMA_CUBLAS $(info ) From 03bf161eb6dea6400ee49c6dc6b69bdcfa9fd3fc Mon Sep 17 00:00:00 2001 From: Douglas Hanley Date: Tue, 13 Feb 2024 06:06:58 -0600 Subject: [PATCH 584/811] llama : support batched embeddings (#5466) * batched embedding: pool outputs by sequence id. updated embedding example * bring back non-causal attention * embd : minor improvements * llama : minor --------- Co-authored-by: Georgi Gerganov --- convert-hf-to-gguf.py | 1 + examples/embedding/embedding.cpp | 146 +++++++++++++++++++++++-------- gguf-py/gguf/constants.py | 1 + gguf-py/gguf/gguf_writer.py | 3 + llama.cpp | 61 +++++++++---- llama.h | 5 ++ 6 files changed, 163 insertions(+), 54 deletions(-) diff --git a/convert-hf-to-gguf.py b/convert-hf-to-gguf.py index cae1551a2..5adfdc143 100755 --- a/convert-hf-to-gguf.py +++ b/convert-hf-to-gguf.py @@ -1648,6 +1648,7 @@ class BertModel(Model): self.gguf_writer.add_head_count(self.hparams["num_attention_heads"]) self.gguf_writer.add_layer_norm_eps(self.hparams["layer_norm_eps"]) self.gguf_writer.add_causal_attention(False) + self.gguf_writer.add_pooling_layer(True) self.gguf_writer.add_file_type(self.ftype) def set_vocab(self): diff --git a/examples/embedding/embedding.cpp b/examples/embedding/embedding.cpp index 27376c8f0..b4688cf51 100644 --- a/examples/embedding/embedding.cpp +++ b/examples/embedding/embedding.cpp @@ -7,6 +7,51 @@ #pragma warning(disable: 4244 4267) // possible loss of data #endif +static std::vector split_lines(const std::string & s) { + std::string line; + std::vector lines; + std::stringstream ss(s); + while (std::getline(ss, line)) { + lines.push_back(line); + } + return lines; +} + +static void batch_add_seq(llama_batch & batch, const std::vector & tokens, int seq_id) { + for (size_t i = 0; i < tokens.size(); i++) { + llama_batch_add(batch, tokens[i], i, { seq_id }, false); + } +} + +static void normalize(float * vec, float * out, int n) { + float norm = 0; + for (int i = 0; i < n; i++) { + norm += vec[i] * vec[i]; + } + norm = sqrt(norm); + for (int i = 0; i < n; i++) { + out[i] = vec[i] / norm; + } +} + +static void batch_decode(llama_context * ctx, llama_batch & batch, float * output, int n_seq, int n_embd) { + // clear previous kv_cache values (irrelevant for embeddings) + llama_kv_cache_clear(ctx); + + // run model + fprintf(stderr, "%s: n_tokens = %d, n_seq = %d\n", __func__, batch.n_tokens, n_seq); + if (llama_decode(ctx, batch) < 0) { + fprintf(stderr, "%s : failed to decode\n", __func__); + } + + // normalize on copy + for (int k = 0; k < n_seq; k++) { + float * emb = llama_get_embeddings_ith(ctx, k); + float * out = output + k * n_embd; + normalize(emb, out, n_embd); + } +} + int main(int argc, char ** argv) { gpt_params params; @@ -55,59 +100,84 @@ int main(int argc, char ** argv) { fprintf(stderr, "%s\n", get_system_info(params).c_str()); } - int n_past = 0; + // split the prompt into lines + std::vector prompts = split_lines(params.prompt); - // tokenize the prompt - auto embd_inp = ::llama_tokenize(ctx, params.prompt, true); + // max batch size + const uint64_t n_batch = params.n_batch; + GGML_ASSERT(params.n_batch == params.n_ctx); + // tokenize the prompts and trim + std::vector> inputs; + for (const auto & prompt : prompts) { + auto inp = ::llama_tokenize(ctx, prompt, true); + if (inp.size() > n_batch) { + inp.resize(n_batch); + } + inputs.push_back(inp); + } + + // tokenization stats if (params.verbose_prompt) { - fprintf(stderr, "\n"); - fprintf(stderr, "%s: prompt: '%s'\n", __func__, params.prompt.c_str()); - fprintf(stderr, "%s: number of tokens in prompt = %zu\n", __func__, embd_inp.size()); - for (int i = 0; i < (int) embd_inp.size(); i++) { - fprintf(stderr, "%6d -> '%s'\n", embd_inp[i], llama_token_to_piece(ctx, embd_inp[i]).c_str()); + for (int i = 0; i < (int) inputs.size(); i++) { + fprintf(stderr, "%s: prompt %d: '%s'\n", __func__, i, prompts[i].c_str()); + fprintf(stderr, "%s: number of tokens in prompt = %zu\n", __func__, inputs[i].size()); + for (int j = 0; j < (int) inputs[i].size(); j++) { + fprintf(stderr, "%6d -> '%s'\n", inputs[i][j], llama_token_to_piece(ctx, inputs[i][j]).c_str()); + } + fprintf(stderr, "\n\n"); } - fprintf(stderr, "\n"); } - if (embd_inp.size() > (size_t)n_ctx) { - fprintf(stderr, "%s: error: prompt is longer than the context window (%zu tokens, n_ctx = %d)\n", - __func__, embd_inp.size(), n_ctx); - return 1; - } - - while (!embd_inp.empty()) { - int n_tokens = std::min(params.n_batch, (int) embd_inp.size()); - if (llama_decode(ctx, llama_batch_get_one(embd_inp.data(), n_tokens, n_past, 0))) { - fprintf(stderr, "%s : failed to eval\n", __func__); - return 1; - } - n_past += n_tokens; - embd_inp.erase(embd_inp.begin(), embd_inp.begin() + n_tokens); - } + // initialize batch + const int n_prompts = prompts.size(); + struct llama_batch batch = llama_batch_init(n_batch, 0, n_prompts); + // allocate output const int n_embd = llama_n_embd(model); - auto * embeddings = llama_get_embeddings(ctx); + std::vector embeddings(n_prompts * n_embd, 0); + float * emb = embeddings.data(); - // l2-normalize embeddings - float norm = 0; - for (int i = 0; i < n_embd; i++) { - norm += embeddings[i] * embeddings[i]; - } - norm = sqrt(norm); - for (int i = 0; i < n_embd; i++) { - embeddings[i] /= norm; + // break into batches + int p = 0; // number of prompts processed already + int s = 0; // number of prompts in current batch + for (int k = 0; k < n_prompts; k++) { + // clamp to n_batch tokens + auto & inp = inputs[k]; + const uint64_t n_toks = inp.size(); + + // encode if at capacity + if (batch.n_tokens + n_toks > n_batch) { + float * out = emb + p * n_embd; + batch_decode(ctx, batch, out, s, n_embd); + llama_batch_clear(batch); + p += s; + s = 0; + } + + // add to batch + batch_add_seq(batch, inp, s); + s += 1; } - for (int i = 0; i < n_embd; i++) { - printf("%f ", embeddings[i]); - } - printf("\n"); + // final batch + float * out = emb + p * n_embd; + batch_decode(ctx, batch, out, s, n_embd); + // print first 3 embeddings + for (int j = 0; j < std::min(3, n_prompts); j++) { + fprintf(stderr, "embedding %d: ", j); + for (int i = 0; i < n_embd; i++) { + fprintf(stderr, "%f ", emb[j * n_embd + i]); + } + fprintf(stderr, "\n\n"); + } + fprintf(stderr, "\n"); + + // clean up llama_print_timings(ctx); llama_free(ctx); llama_free_model(model); - llama_backend_free(); return 0; diff --git a/gguf-py/gguf/constants.py b/gguf-py/gguf/constants.py index a9c13dd38..644e1589c 100644 --- a/gguf-py/gguf/constants.py +++ b/gguf-py/gguf/constants.py @@ -40,6 +40,7 @@ class Keys: TENSOR_DATA_LAYOUT = "{arch}.tensor_data_layout" EXPERT_COUNT = "{arch}.expert_count" EXPERT_USED_COUNT = "{arch}.expert_used_count" + POOLING_LAYER = "{arch}.pooling_layer" class Attention: HEAD_COUNT = "{arch}.attention.head_count" diff --git a/gguf-py/gguf/gguf_writer.py b/gguf-py/gguf/gguf_writer.py index 7af58a46c..d87bd8e88 100644 --- a/gguf-py/gguf/gguf_writer.py +++ b/gguf-py/gguf/gguf_writer.py @@ -360,6 +360,9 @@ class GGUFWriter: def add_causal_attention(self, value: bool) -> None: self.add_bool(Keys.Attention.CAUSAL.format(arch=self.arch), value) + def add_pooling_layer(self, value: bool) -> None: + self.add_bool(Keys.LLM.POOLING_LAYER.format(arch=self.arch), value) + def add_rope_dimension_count(self, count: int) -> None: self.add_uint32(Keys.Rope.DIMENSION_COUNT.format(arch=self.arch), count) diff --git a/llama.cpp b/llama.cpp index 6dce392df..eb6c46f36 100644 --- a/llama.cpp +++ b/llama.cpp @@ -254,6 +254,7 @@ enum llm_kv { LLM_KV_TENSOR_DATA_LAYOUT, LLM_KV_EXPERT_COUNT, LLM_KV_EXPERT_USED_COUNT, + LLM_KV_POOLING_LAYER, LLM_KV_ATTENTION_HEAD_COUNT, LLM_KV_ATTENTION_HEAD_COUNT_KV, @@ -311,6 +312,7 @@ static std::map LLM_KV_NAMES = { { LLM_KV_TENSOR_DATA_LAYOUT, "%s.tensor_data_layout" }, { LLM_KV_EXPERT_COUNT, "%s.expert_count" }, { LLM_KV_EXPERT_USED_COUNT, "%s.expert_used_count" }, + { LLM_KV_POOLING_LAYER, "%s.pooling_layer" }, { LLM_KV_ATTENTION_HEAD_COUNT, "%s.attention.head_count" }, { LLM_KV_ATTENTION_HEAD_COUNT_KV, "%s.attention.head_count_kv" }, @@ -1539,6 +1541,7 @@ struct llama_hparams { float f_max_alibi_bias; bool causal_attn = true; + bool pooling_layer = false; bool operator!=(const llama_hparams & other) const { @@ -1601,6 +1604,7 @@ struct llama_cparams { bool mul_mat_q; bool offload_kqv; + bool do_pooling; ggml_backend_sched_eval_callback cb_eval; void * cb_eval_user_data; @@ -1896,7 +1900,7 @@ struct llama_context { struct ggml_tensor * inp_pos; // I32 [n_batch] struct ggml_tensor * inp_KQ_mask; // F32 [n_ctx, n_batch] struct ggml_tensor * inp_K_shift; // I32 [n_ctx] - struct ggml_tensor * inp_sum; // F32 [1, n_batch] + struct ggml_tensor * inp_sum; // F32 [n_batch, n_batch] #ifdef GGML_USE_MPI ggml_mpi_context * ctx_mpi = NULL; @@ -3053,6 +3057,7 @@ static void llm_load_hparams( ml.get_key(LLM_KV_ATTENTION_LAYERNORM_EPS, hparams.f_norm_eps); ml.get_key(LLM_KV_ATTENTION_CAUSAL, hparams.causal_attn); ml.get_key(LLM_KV_TOKENIZER_TOKEN_TYPE_COUNT, hparams.n_vocab_type); + ml.get_key(LLM_KV_POOLING_LAYER, hparams.pooling_layer); switch (hparams.n_layer) { case 3: @@ -4859,7 +4864,7 @@ struct llm_build_context { const int32_t n_orig_ctx; const bool do_rope_shift; - const bool causal_attn; + const bool do_pooling; const llm_build_cb & cb; @@ -4903,7 +4908,7 @@ struct llm_build_context { kv_head (worst_case ? n_ctx - n_tokens : kv_self.head), n_orig_ctx (cparams.n_yarn_orig_ctx), do_rope_shift (worst_case || kv_self.has_shift), - causal_attn (hparams.causal_attn), + do_pooling (hparams.pooling_layer && cparams.do_pooling), cb (cb), buf_compute_meta (lctx.buf_compute_meta) { // all initializations should be done in init() @@ -5752,17 +5757,18 @@ struct llm_build_context { const int64_t n_embd_head = hparams.n_embd_head_v; GGML_ASSERT(n_embd_head == hparams.n_embd_head_k); - GGML_ASSERT(n_embd_head == hparams.n_rot); struct ggml_tensor * cur; struct ggml_tensor * inpL; // get input vectors with right size + const size_t stride1 = n_tokens * ggml_type_size(lctx.inp_tokens->type); struct ggml_tensor * inp_pos = ggml_view_1d(ctx0, lctx.inp_pos, n_tokens, 0); - struct ggml_tensor * inp_sum = ggml_view_1d(ctx0, lctx.inp_sum, n_tokens, 0); + struct ggml_tensor * inp_sum = ggml_view_2d(ctx0, lctx.inp_sum, n_tokens, n_tokens, stride1, 0); // construct input embeddings (token, type, position) inpL = llm_build_inp_embd(ctx0, hparams, batch, model.tok_embd, lctx.inp_tokens, lctx.inp_embd, cb); + // token types are hardcoded to zero ("Sentence A") struct ggml_tensor * type_row0 = ggml_view_1d(ctx0, model.type_embd, n_embd, 0); inpL = ggml_add(ctx0, inpL, type_row0); @@ -5832,9 +5838,11 @@ struct llm_build_context { // final output cur = inpL; - // pooling - cur = ggml_mul_mat(ctx0, inp_sum, ggml_cont(ctx0, ggml_transpose(ctx0, cur))); - cb(cur, "result_embed", -1); + // pooling layer + if (do_pooling) { + cur = ggml_mul_mat(ctx0, ggml_cont(ctx0, ggml_transpose(ctx0, cur)), inp_sum); + } + cb(cur, "result_embd", -1); ggml_build_forward_expand(gf, cur); @@ -7367,7 +7375,8 @@ static void llama_set_inputs(llama_context & lctx, const llama_batch & batch) { for (int i = 0; i < n_kv; ++i) { float f; - if (!lctx.kv_self.cells[i].has_seq_id(seq_id) || lctx.kv_self.cells[i].pos > pos) { + if (!lctx.kv_self.cells[i].has_seq_id(seq_id) || + (hparams.causal_attn && lctx.kv_self.cells[i].pos > pos)) { f = -INFINITY; } else { f = 0; @@ -7378,7 +7387,6 @@ static void llama_set_inputs(llama_context & lctx, const llama_batch & batch) { } } - { assert(ggml_backend_buffer_is_host(lctx.inp_sum->buffer)); float * data = (float *) lctx.inp_sum->data; @@ -7399,6 +7407,20 @@ static void llama_set_inputs(llama_context & lctx, const llama_batch & batch) { data[i] = lctx.kv_self.cells[i].delta; } } + + if (hparams.pooling_layer && cparams.do_pooling) { + const int64_t n_tokens = batch.n_tokens; + + GGML_ASSERT(ggml_backend_buffer_is_host(lctx.inp_sum->buffer)); + float * data = (float *) lctx.inp_sum->data; + + memset(lctx.inp_sum->data, 0, batch.n_tokens * batch.n_tokens * ggml_element_size(lctx.inp_sum)); + + for (int i = 0; i < n_tokens; ++i) { + const llama_seq_id seq_id = batch.seq_id[i][0]; + data[seq_id*n_tokens + i] = 1.0f; + } + } } // decode a batch of tokens by evaluating the transformer @@ -7510,7 +7532,7 @@ static int llama_decode_internal( embeddings = gf->nodes[gf->n_nodes - 3]; GGML_ASSERT(strcmp(embeddings->name, "result_norm") == 0); } - } else if (strcmp(res->name, "result_embed") == 0) { + } else if (strcmp(res->name, "result_embd") == 0) { embeddings = res; res = nullptr; } else { @@ -7630,11 +7652,12 @@ static int llama_decode_internal( if (!lctx.embedding.empty()) { auto & embedding_out = lctx.embedding; - const int64_t embed_pos = res ? n_embd * (n_tokens-1) : 0; + const int64_t embd_pos = res ? n_embd * (n_tokens-1) : 0; + const int64_t embd_size = res ? n_embd : n_embd * n_tokens; - embedding_out.resize(n_embd); + embedding_out.resize(embd_size); ggml_backend_t embeddings_backend = ggml_backend_sched_get_node_backend(lctx.sched, embeddings); - ggml_backend_tensor_get_async(embeddings_backend, embeddings, embedding_out.data(), embed_pos*sizeof(float), n_embd*sizeof(float)); + ggml_backend_tensor_get_async(embeddings_backend, embeddings, embedding_out.data(), embd_pos*sizeof(float), embd_size*sizeof(float)); ggml_backend_synchronize(embeddings_backend); } @@ -10950,6 +10973,7 @@ struct llama_context_params llama_context_default_params() { /*.logits_all =*/ false, /*.embedding =*/ false, /*.offload_kqv =*/ true, + /*.do_pooling =*/ true, }; return result; @@ -11105,6 +11129,7 @@ struct llama_context * llama_new_context_with_model( cparams.yarn_beta_slow = params.yarn_beta_slow; cparams.mul_mat_q = params.mul_mat_q; cparams.offload_kqv = params.offload_kqv; + cparams.do_pooling = params.do_pooling; cparams.n_ctx = params.n_ctx == 0 ? hparams.n_ctx_train : params.n_ctx; cparams.rope_freq_base = params.rope_freq_base == 0.0f ? hparams.rope_freq_base_train : params.rope_freq_base; @@ -11252,7 +11277,7 @@ struct llama_context * llama_new_context_with_model( // resized during inference, reserve maximum ctx->logits.reserve(hparams.n_vocab*cparams.n_batch); - if (params.embedding){ + if (params.embedding) { ctx->embedding.resize(hparams.n_embd); } @@ -11270,7 +11295,7 @@ struct llama_context * llama_new_context_with_model( ctx->inp_pos = ggml_new_tensor_1d(ctx->ctx_input, GGML_TYPE_I32, cparams.n_batch); ctx->inp_KQ_mask = ggml_new_tensor_2d(ctx->ctx_input, GGML_TYPE_F32, cparams.n_ctx, cparams.n_batch); ctx->inp_K_shift = ggml_new_tensor_1d(ctx->ctx_input, GGML_TYPE_I32, cparams.n_ctx); - ctx->inp_sum = ggml_new_tensor_2d(ctx->ctx_input, GGML_TYPE_F32, 1, cparams.n_batch); + ctx->inp_sum = ggml_new_tensor_2d(ctx->ctx_input, GGML_TYPE_F32, cparams.n_batch, cparams.n_batch); ggml_set_name(ctx->inp_tokens, "inp_tokens"); ggml_set_name(ctx->inp_embd, "inp_embd"); @@ -12128,6 +12153,10 @@ float * llama_get_embeddings(struct llama_context * ctx) { return ctx->embedding.data(); } +float * llama_get_embeddings_ith(struct llama_context * ctx, int32_t i) { + return ctx->embedding.data() + i*ctx->model.hparams.n_embd; +} + const char * llama_token_get_text(const struct llama_model * model, llama_token token) { return model->vocab.id_to_token[token].text.c_str(); } diff --git a/llama.h b/llama.h index 367e8f1a1..5ef78ec96 100644 --- a/llama.h +++ b/llama.h @@ -236,6 +236,7 @@ extern "C" { bool logits_all; // the llama_eval() call computes all logits, not just the last one (DEPRECATED - set llama_batch.logits instead) bool embedding; // embedding mode only bool offload_kqv; // whether to offload the KQV ops (including the KV cache) to GPU + bool do_pooling; // whether to pool (sum) embedding results by sequence id (ignored if no pooling layer) }; // model quantization parameters @@ -628,6 +629,10 @@ extern "C" { // shape: [n_embd] (1-dimensional) LLAMA_API float * llama_get_embeddings(struct llama_context * ctx); + // Get the embeddings for the ith sequence + // llama_get_embeddings(ctx) + i*n_embd + LLAMA_API float * llama_get_embeddings_ith(struct llama_context * ctx, int32_t i); + // // Vocab // From cf45252a7cfcb998bade46a886e20477cecc538a Mon Sep 17 00:00:00 2001 From: Georgi Gerganov Date: Tue, 13 Feb 2024 15:14:22 +0200 Subject: [PATCH 585/811] tests : multi-thread the tokenizer tests (#5474) * tests : multi-thread the tokenizer tests ggml-ci * unicode : fix data race for unidentified codepoints ggml-ci * unicode : minor style fixes ggml-ci --- llama.cpp | 24 +++++----- tests/test-tokenizer-1-bpe.cpp | 77 ++++++++++++++++---------------- tests/test-tokenizer-1-llama.cpp | 53 ++++++++++++---------- unicode.h | 72 ++++++++++++++++------------- 4 files changed, 124 insertions(+), 102 deletions(-) diff --git a/llama.cpp b/llama.cpp index eb6c46f36..381a03068 100644 --- a/llama.cpp +++ b/llama.cpp @@ -7782,7 +7782,7 @@ struct llm_bigram_spm { }; struct llm_tokenizer_spm { - llm_tokenizer_spm(const llama_vocab & vocab): vocab(vocab) {} + llm_tokenizer_spm(const llama_vocab & vocab) : vocab(vocab) {} void tokenize(const std::string & text, std::vector & output) { // split string into utf8 chars @@ -7857,6 +7857,7 @@ private: if (p == rev_merge.end()) { // output any symbols that did not form tokens as bytes. + output.reserve(output.size() + symbol.n); for (int j = 0; j < (int)symbol.n; ++j) { llama_vocab::id token_id = llama_byte_to_token(vocab, symbol.text[j]); output.push_back(token_id); @@ -8419,17 +8420,18 @@ struct fragment_buffer_variant { token(_token), raw_text(_dummy), offset(0), - length(0){} + length(0) {} + fragment_buffer_variant(const std::string & _raw_text, int64_t _offset, int64_t _length) : type(FRAGMENT_BUFFER_VARIANT_TYPE_RAW_TEXT), - token((llama_vocab::id)-1), + token((llama_vocab::id) - 1), raw_text(_raw_text), offset(_offset), length(_length){ - GGML_ASSERT( _offset >= 0 ); - GGML_ASSERT( _length >= 1 ); - GGML_ASSERT( offset + length <= raw_text.length() ); + GGML_ASSERT(_offset >= 0); + GGML_ASSERT(_length >= 1); + GGML_ASSERT(offset + length <= raw_text.length()); } const FRAGMENT_BUFFER_VARIANT_TYPE type; @@ -8553,14 +8555,14 @@ static std::vector llama_tokenize_internal(const llama_vocab & } std::forward_list fragment_buffer; - fragment_buffer.emplace_front( raw_text, 0, raw_text.length() ); + fragment_buffer.emplace_front(raw_text, 0, raw_text.length()); - if (special) tokenizer_st_partition( vocab, fragment_buffer ); + if (special) tokenizer_st_partition(vocab, fragment_buffer); switch (vocab.type) { case LLAMA_VOCAB_TYPE_SPM: { - for (const auto & fragment: fragment_buffer) { + for (const auto & fragment : fragment_buffer) { if (fragment.type == FRAGMENT_BUFFER_VARIANT_TYPE_RAW_TEXT) { // without adding this leading whitespace, we do not get the same results as the original tokenizer @@ -8588,7 +8590,7 @@ static std::vector llama_tokenize_internal(const llama_vocab & } break; case LLAMA_VOCAB_TYPE_BPE: { - for (const auto & fragment: fragment_buffer) { + for (const auto & fragment : fragment_buffer) { if (fragment.type == FRAGMENT_BUFFER_VARIANT_TYPE_RAW_TEXT) { auto raw_text = fragment.raw_text.substr(fragment.offset, fragment.length); @@ -8604,7 +8606,7 @@ static std::vector llama_tokenize_internal(const llama_vocab & } break; case LLAMA_VOCAB_TYPE_WPM: { - for (const auto & fragment: fragment_buffer) { + for (const auto & fragment : fragment_buffer) { if (fragment.type == FRAGMENT_BUFFER_VARIANT_TYPE_RAW_TEXT) { auto raw_text = fragment.raw_text.substr(fragment.offset, fragment.length); diff --git a/tests/test-tokenizer-1-bpe.cpp b/tests/test-tokenizer-1-bpe.cpp index 386530f23..3bb629561 100644 --- a/tests/test-tokenizer-1-bpe.cpp +++ b/tests/test-tokenizer-1-bpe.cpp @@ -4,13 +4,13 @@ #include "console.h" #include +#include #include #include -#include -#include -#include -#include #include +#include +#include +#include int main(int argc, char **argv) { if (argc < 2) { @@ -74,45 +74,46 @@ int main(int argc, char **argv) { } } catch (const std::invalid_argument &) { - fprintf(stderr, "%s : info: utf8 conversion %d '%s'\n", __func__, i, str.c_str()); + //fprintf(stderr, "%s : info: utf8 conversion %d '%s'\n", __func__, i, str.c_str()); } } - for (uint32_t cp = 0x0000; cp < 0xffff; ++cp) { - // NOTE: these exceptions seem to be necessary, because the GPT2 tokenizer doesn't want to interfere with some ASCII control characters - if ((cp < 0x03 || cp > 0x05) && cp != 0x0b && cp != 0x11 && (cp < 0x13 || cp > 0x17) && cp != 0x19 && (cp < 0x1c || cp > 0x1e) && (cp < 0xd800 || cp > 0xdfff)) { - std::string str = " " + codepoint_to_utf8(cp); - std::vector tokens = llama_tokenize(ctx, str, false); - std::string check = llama_detokenize_bpe(ctx, tokens); - if (str != check) { - fprintf(stderr, "%s : error: codepoint %x detokenizes to '%s'(%zu) instead of '%s'(%zu)\n", - __func__, cp, check.c_str(), check.length(), str.c_str(), str.length()); - return 3; - } - } - } - // Restrict to assigned unicode planes - // for (uint32_t cp = 0x10000; cp < 0x0010ffff; ++cp) { - for (uint32_t cp = 0x10000; cp < 0x00040000; ++cp) { - std::string str = codepoint_to_utf8(cp); - std::vector tokens = llama_tokenize(ctx, str, false); - std::string check = llama_detokenize_bpe(ctx, tokens); - if (str != check) { - fprintf(stderr, "%s : error: codepoint %x detokenizes to '%s'(%zu) instead of '%s'(%zu)\n", - __func__, cp, check.c_str(), check.length(), str.c_str(), str.length()); - return 4; - } - } - for (uint32_t cp = 0x000e0000; cp < 0x0010ffff; ++cp) { - std::string str = codepoint_to_utf8(cp); - std::vector tokens = llama_tokenize(ctx, str, false); - std::string check = llama_detokenize_bpe(ctx, tokens); - if (str != check) { - fprintf(stderr, "%s : error: codepoint %x detokenizes to '%s'(%zu) instead of '%s'(%zu)\n", - __func__, cp, check.c_str(), check.length(), str.c_str(), str.length()); - return 4; + // unicode + { + const int nthread = std::thread::hardware_concurrency(); + + std::vector threads(nthread); + + for (int i = 0; i < nthread; ++i) { + threads[i] = std::thread([i, nthread, ctx]() { + for (uint32_t cp = i; cp < 0x0010ffff; cp += nthread) { + if (!( // NOLINT + (cp < 0x03 || cp > 0x05) && cp != 0x0b && cp != 0x11 && + (cp < 0x13 || cp > 0x17) && cp != 0x19 && + (cp < 0x1c || cp > 0x1e) && + (cp < 0xd800 || cp > 0xdfff) && + (cp < 0x00040000 || cp >= 0x000e0000) + )) { + continue; + } + + std::string str = codepoint_to_utf8(cp); + std::vector tokens = llama_tokenize(ctx, str, false); + std::string check = llama_detokenize_bpe(ctx, tokens); + if (cp != 9601 && str != check) { + fprintf(stderr, "error: codepoint %x detokenizes to '%s'(%zu) instead of '%s'(%zu)\n", + cp, check.c_str(), check.length(), str.c_str(), str.length()); + std::exit(3); + } + } + }); + } + + for (auto & t : threads) { + t.join(); } } + llama_free_model(model); llama_free(ctx); diff --git a/tests/test-tokenizer-1-llama.cpp b/tests/test-tokenizer-1-llama.cpp index 4b58fe495..b0d814a41 100644 --- a/tests/test-tokenizer-1-llama.cpp +++ b/tests/test-tokenizer-1-llama.cpp @@ -4,13 +4,13 @@ #include "console.h" #include +#include #include #include -#include -#include -#include -#include #include +#include +#include +#include int main(int argc, char **argv) { if (argc < 2) { @@ -72,26 +72,33 @@ int main(int argc, char **argv) { } } - for (uint32_t cp = 0x0000; cp < 0xffff; ++cp) { - if (cp < 0xd800 || cp > 0xdfff) { - std::string str = codepoint_to_utf8(cp); - std::vector tokens = llama_tokenize(ctx, str, false); - std::string check = llama_detokenize_spm(ctx, tokens); - if (cp != 9601 && str != check) { - fprintf(stderr, "%s : error: codepoint %d detokenizes to '%s'(%zu) instead of '%s'(%zu)\n", - __func__, cp, check.c_str(), check.length(), str.c_str(), str.length()); - return 3; - } + // unicode + { + const int nthread = std::thread::hardware_concurrency(); + + std::vector threads(nthread); + + for (int i = 0; i < nthread; ++i) { + threads[i] = std::thread([i, nthread, ctx]() { + for (uint32_t cp = i; cp < 0x0010ffff; cp += nthread) { + if (cp >= 0xd800 && cp <= 0xdfff) { + continue; + } + + std::string str = codepoint_to_utf8(cp); + std::vector tokens = llama_tokenize(ctx, str, false); + std::string check = llama_detokenize_spm(ctx, tokens); + if (cp != 9601 && str != check) { + fprintf(stderr, "error: codepoint %x detokenizes to '%s'(%zu) instead of '%s'(%zu)\n", + cp, check.c_str(), check.length(), str.c_str(), str.length()); + std::exit(3); + } + } + }); } - } - for (uint32_t cp = 0x10000; cp < 0x0010ffff; ++cp) { - std::string str = codepoint_to_utf8(cp); - std::vector tokens = llama_tokenize(ctx, str, false); - std::string check = llama_detokenize_spm(ctx, tokens); - if (str != check) { - fprintf(stderr, "%s : error: codepoint %d detokenizes to '%s'(%zu) instead of '%s'(%zu)\n", - __func__, cp, check.c_str(), check.length(), str.c_str(), str.length()); - return 4; + + for (auto & t : threads) { + t.join(); } } diff --git a/unicode.h b/unicode.h index 844eff3da..263260702 100644 --- a/unicode.h +++ b/unicode.h @@ -264,26 +264,29 @@ static uint32_t codepoint_from_utf8(const std::string & utf8, size_t & offset) { offset += 1; return result; } - else if (!(utf8[offset + 0] & 0x40)) { + if (!(utf8[offset + 0] & 0x40)) { throw std::invalid_argument("invalid character"); } - else if (!(utf8[offset + 0] & 0x20)) { - if (offset + 1 >= utf8.size() || ! ((utf8[offset + 1] & 0xc0) == 0x80)) + if (!(utf8[offset + 0] & 0x20)) { + if (offset + 1 >= utf8.size() || ! ((utf8[offset + 1] & 0xc0) == 0x80)) { throw std::invalid_argument("invalid character"); + } auto result = ((utf8[offset + 0] & 0x1f) << 6) | (utf8[offset + 1] & 0x3f); offset += 2; return result; } - else if (!(utf8[offset + 0] & 0x10)) { - if (offset + 2 >= utf8.size() || ! ((utf8[offset + 1] & 0xc0) == 0x80) || ! ((utf8[offset + 2] & 0xc0) == 0x80)) + if (!(utf8[offset + 0] & 0x10)) { + if (offset + 2 >= utf8.size() || ! ((utf8[offset + 1] & 0xc0) == 0x80) || ! ((utf8[offset + 2] & 0xc0) == 0x80)) { throw std::invalid_argument("invalid character"); + } auto result = ((utf8[offset + 0] & 0x0f) << 12) | ((utf8[offset + 1] & 0x3f) << 6) | (utf8[offset + 2] & 0x3f); offset += 3; return result; } - else if (!(utf8[offset + 0] & 0x08)) { - if (offset + 3 >= utf8.size() || ! ((utf8[offset + 1] & 0xc0) == 0x80) || ! ((utf8[offset + 2] & 0xc0) == 0x80) || !((utf8[offset + 3] & 0xc0) == 0x80)) + if (!(utf8[offset + 0] & 0x08)) { + if (offset + 3 >= utf8.size() || ! ((utf8[offset + 1] & 0xc0) == 0x80) || ! ((utf8[offset + 2] & 0xc0) == 0x80) || !((utf8[offset + 3] & 0xc0) == 0x80)) { throw std::invalid_argument("invalid character"); + } auto result = ((utf8[offset + 0] & 0x07) << 18) | ((utf8[offset + 1] & 0x3f) << 12) | ((utf8[offset + 2] & 0x3f) << 6) | (utf8[offset + 3] & 0x3f); offset += 4; return result; @@ -331,21 +334,22 @@ static uint32_t codepoint_from_utf16(const std::vector & utf16, size_t offset += 1; return result; } - else { - if (offset + 1 >= utf16.size() || !((utf16[1] & 0xdc00) == 0xdc00)) - throw std::invalid_argument("invalid character"); - auto result = 0x10000 + (((utf16[0] & 0x03ff) << 10) | (utf16[1] & 0x03ff)); - offset += 2; - return result; + + if (offset + 1 >= utf16.size() || !((utf16[1] & 0xdc00) == 0xdc00)) { + throw std::invalid_argument("invalid character"); } - throw std::invalid_argument("invalid string"); + + auto result = 0x10000 + (((utf16[0] & 0x03ff) << 10) | (utf16[1] & 0x03ff)); + offset += 2; + return result; } static std::vector codepoints_from_utf16(const std::vector & utf16) { std::vector result; size_t offset = 0; - while (offset < utf16.size()) + while (offset < utf16.size()) { result.push_back(codepoint_from_utf16(utf16, offset)); + } return result; } @@ -361,44 +365,52 @@ static std::vector codepoints_from_utf16(const std::vector & static std::unordered_map codepoint_type_map() { std::unordered_map codepoint_types; for (auto p : digit_ranges) { - for(auto i = p.first; i <= p.second; ++ i) + for (auto i = p.first; i <= p.second; ++ i) { codepoint_types[i] = CODEPOINT_TYPE_DIGIT; + } } - for(auto p : letter_ranges) { - for(auto i = p.first; i <= p.second; ++ i) + for (auto p : letter_ranges) { + for (auto i = p.first; i <= p.second; ++ i) { codepoint_types[i] = CODEPOINT_TYPE_LETTER; + } } - for(auto p : whitespace_ranges) { - for(auto i = p.first; i <= p.second; ++ i) + for (auto p : whitespace_ranges) { + for (auto i = p.first; i <= p.second; ++ i) { codepoint_types[i] = CODEPOINT_TYPE_WHITESPACE; + } } - for(auto p : accent_mark_ranges) { - for(auto i = p.first; i <= p.second; ++ i) + for (auto p : accent_mark_ranges) { + for (auto i = p.first; i <= p.second; ++ i) { codepoint_types[i] = CODEPOINT_TYPE_ACCENT_MARK; + } } - for(auto p : punctuation_ranges) { - for(auto i = p.first; i <= p.second; ++ i) + for (auto p : punctuation_ranges) { + for (auto i = p.first; i <= p.second; ++ i) { codepoint_types[i] = CODEPOINT_TYPE_PUNCTUATION; + } } - for (auto p : symbol_ranges) { - for (auto i = p.first; i <= p.second; ++i) + for (auto p : symbol_ranges) { + for (auto i = p.first; i <= p.second; ++i) { codepoint_types[i] = CODEPOINT_TYPE_SYMBOL; + } } - for(auto p : control_ranges) { - for(auto i = p.first; i <= p.second; ++ i) + for (auto p : control_ranges) { + for (auto i = p.first; i <= p.second; ++ i) { codepoint_types[i] = CODEPOINT_TYPE_CONTROL; + } } return codepoint_types; } static int codepoint_type(uint32_t cp) { static std::unordered_map codepoint_types = codepoint_type_map(); - return codepoint_types[cp]; + return codepoint_types.find(cp) == codepoint_types.end() ? CODEPOINT_TYPE_UNIDENTIFIED : codepoint_types.at(cp); } static int codepoint_type(const std::string & utf8) { - if (utf8.length() == 0) + if (utf8.length() == 0) { return CODEPOINT_TYPE_UNIDENTIFIED; + } size_t offset = 0; return codepoint_type(codepoint_from_utf8(utf8, offset)); } From 263978904c7472db1865409a7ff1129599f6a40b Mon Sep 17 00:00:00 2001 From: Daniel Bevenius Date: Tue, 13 Feb 2024 14:15:42 +0100 Subject: [PATCH 586/811] finetune : rename feed-forward tensors (w1/w2/w3) (#4839) * finetune: rename feed-forward tensors (w1/w2/w3) This commit renames the feed-forward tensors w1, w2 and w3 to ffn_gate, ffn_down and ffn_up respectively. The motivation for this change is to make it easier to understand the purpose of the tensors. This also seems to be inline with the names used in the llama_layer struct in llama.cpp. Signed-off-by: Daniel Bevenius * train-text-from-scratch: rename ff tensors This commit renames the feed-forward tensors w1, w2 and w3 to ffn_gate, ffn_down and ffn_up respectively. The motivation for this change is to make it easier to understand the purpose of the tensors. This also seems to be inline with the names used in the llama_layer struct in llama.cpp Signed-off-by: Daniel Bevenius --------- Signed-off-by: Daniel Bevenius --- examples/finetune/README.md | 6 +- examples/finetune/finetune.cpp | 242 +++++++++--------- .../train-text-from-scratch.cpp | 54 ++-- 3 files changed, 151 insertions(+), 151 deletions(-) diff --git a/examples/finetune/README.md b/examples/finetune/README.md index a884706c5..2fafd505e 100644 --- a/examples/finetune/README.md +++ b/examples/finetune/README.md @@ -80,9 +80,9 @@ The LORA rank can be configured for each model tensor type separately with these --rank-wk N LORA rank for wk tensor (default 4) --rank-wv N LORA rank for wv tensor (default 4) --rank-wo N LORA rank for wo tensor (default 4) - --rank-w1 N LORA rank for w1 tensor (default 4) - --rank-w2 N LORA rank for w2 tensor (default 4) - --rank-w3 N LORA rank for w3 tensor (default 4) + --rank-ffn_gate N LORA rank for ffn_gate tensor (default 4) + --rank-ffn_down N LORA rank for ffn_down tensor (default 4) + --rank-ffn_up N LORA rank for ffn_up tensor (default 4) ``` The LORA rank of 'norm' tensors should always be 1. diff --git a/examples/finetune/finetune.cpp b/examples/finetune/finetune.cpp index b11c56020..98bf5a07a 100644 --- a/examples/finetune/finetune.cpp +++ b/examples/finetune/finetune.cpp @@ -60,9 +60,9 @@ struct my_llama_layer { struct ggml_tensor * ffn_norm; // ff - struct ggml_tensor * w1; - struct ggml_tensor * w2; - struct ggml_tensor * w3; + struct ggml_tensor * ffn_gate; // w1 + struct ggml_tensor * ffn_down; // w2 + struct ggml_tensor * ffn_up; // w3 }; struct my_llama_model { @@ -85,9 +85,9 @@ struct my_llama_lora_hparams { uint32_t n_rank_wv = 4; uint32_t n_rank_wo = 4; uint32_t n_rank_ffn_norm = 1; - uint32_t n_rank_w1 = 4; - uint32_t n_rank_w2 = 4; - uint32_t n_rank_w3 = 4; + uint32_t n_rank_ffn_gate = 4; + uint32_t n_rank_ffn_down = 4; + uint32_t n_rank_ffn_up = 4; uint32_t n_rank_tok_embeddings = 4; uint32_t n_rank_norm = 1; uint32_t n_rank_output = 4; @@ -117,12 +117,12 @@ struct my_llama_lora_layer { struct ggml_tensor * ffn_norm_b; // ff - struct ggml_tensor * w1_a; - struct ggml_tensor * w1_b; - struct ggml_tensor * w2_a; - struct ggml_tensor * w2_b; - struct ggml_tensor * w3_a; - struct ggml_tensor * w3_b; + struct ggml_tensor * ffn_gate_a; + struct ggml_tensor * ffn_gate_b; + struct ggml_tensor * ffn_down_a; + struct ggml_tensor * ffn_down_b; + struct ggml_tensor * ffn_up_a; + struct ggml_tensor * ffn_up_b; }; struct my_llama_lora { @@ -208,9 +208,9 @@ static void print_lora_params(struct my_llama_lora_hparams * params) { printf("%s: n_rank_wv : %u\n", __func__, params->n_rank_wv); printf("%s: n_rank_wo : %u\n", __func__, params->n_rank_wo); printf("%s: n_rank_ffn_norm : %u\n", __func__, params->n_rank_ffn_norm); - printf("%s: n_rank_w1 : %u\n", __func__, params->n_rank_w1); - printf("%s: n_rank_w2 : %u\n", __func__, params->n_rank_w2); - printf("%s: n_rank_w3 : %u\n", __func__, params->n_rank_w3); + printf("%s: n_rank_ffn_gate : %u\n", __func__, params->n_rank_ffn_gate); + printf("%s: n_rank_ffn_down : %u\n", __func__, params->n_rank_ffn_down); + printf("%s: n_rank_ffn_up : %u\n", __func__, params->n_rank_ffn_up); printf("%s: n_rank_tok_embeddings : %u\n", __func__, params->n_rank_tok_embeddings); printf("%s: n_rank_norm : %u\n", __func__, params->n_rank_norm); printf("%s: n_rank_output : %u\n", __func__, params->n_rank_output); @@ -319,9 +319,9 @@ static void init_model(struct llama_model * input, struct my_llama_model * model layer.wv = llama_get_model_tensor(input, tni(LLM_TENSOR_ATTN_V, i)); layer.wo = llama_get_model_tensor(input, tni(LLM_TENSOR_ATTN_OUT, i)); layer.ffn_norm = llama_get_model_tensor(input, tni(LLM_TENSOR_FFN_NORM, i)); - layer.w1 = llama_get_model_tensor(input, tni(LLM_TENSOR_FFN_GATE, i)); - layer.w2 = llama_get_model_tensor(input, tni(LLM_TENSOR_FFN_DOWN, i)); - layer.w3 = llama_get_model_tensor(input, tni(LLM_TENSOR_FFN_UP, i)); + layer.ffn_gate = llama_get_model_tensor(input, tni(LLM_TENSOR_FFN_GATE, i)); + layer.ffn_down = llama_get_model_tensor(input, tni(LLM_TENSOR_FFN_DOWN, i)); + layer.ffn_up = llama_get_model_tensor(input, tni(LLM_TENSOR_FFN_UP, i)); assert_shape_1d(layer.attention_norm, hparams.n_embd); assert_shape_2d(layer.wq, hparams.n_embd, hparams.n_embd); @@ -329,9 +329,9 @@ static void init_model(struct llama_model * input, struct my_llama_model * model assert_shape_2d(layer.wv, hparams.n_embd, hparams.n_embd_gqa()); assert_shape_2d(layer.wo, hparams.n_embd, hparams.n_embd); assert_shape_1d(layer.ffn_norm, hparams.n_embd); - assert_shape_2d(layer.w1, hparams.n_embd, hparams.n_ff); - assert_shape_2d(layer.w2, hparams.n_ff, hparams.n_embd); - assert_shape_2d(layer.w3, hparams.n_embd, hparams.n_ff); + assert_shape_2d(layer.ffn_gate, hparams.n_embd, hparams.n_ff); + assert_shape_2d(layer.ffn_down, hparams.n_ff, hparams.n_embd); + assert_shape_2d(layer.ffn_up, hparams.n_embd, hparams.n_ff); } } @@ -362,12 +362,12 @@ static void set_param_lora(struct my_llama_lora * lora) { ggml_set_param(ctx, layer.wo_b); ggml_set_param(ctx, layer.ffn_norm_a); ggml_set_param(ctx, layer.ffn_norm_b); - ggml_set_param(ctx, layer.w1_a); - ggml_set_param(ctx, layer.w1_b); - ggml_set_param(ctx, layer.w2_a); - ggml_set_param(ctx, layer.w2_b); - ggml_set_param(ctx, layer.w3_a); - ggml_set_param(ctx, layer.w3_b); + ggml_set_param(ctx, layer.ffn_gate_a); + ggml_set_param(ctx, layer.ffn_gate_b); + ggml_set_param(ctx, layer.ffn_down_a); + ggml_set_param(ctx, layer.ffn_down_b); + ggml_set_param(ctx, layer.ffn_up_a); + ggml_set_param(ctx, layer.ffn_up_b); } } @@ -435,12 +435,12 @@ static void init_lora(const struct my_llama_model * model, struct my_llama_lora layer.ffn_norm_a = ggml_new_tensor_2d(ctx, GGML_TYPE_F32, lparams.n_rank_ffn_norm, n_embd); layer.ffn_norm_b = ggml_new_tensor_2d(ctx, GGML_TYPE_F32, lparams.n_rank_ffn_norm, 1); - layer.w1_a = ggml_new_tensor_2d(ctx, GGML_TYPE_F32, lparams.n_rank_w1, n_embd); - layer.w1_b = ggml_new_tensor_2d(ctx, GGML_TYPE_F32, lparams.n_rank_w1, n_ff); - layer.w2_a = ggml_new_tensor_2d(ctx, GGML_TYPE_F32, lparams.n_rank_w2, n_ff); - layer.w2_b = ggml_new_tensor_2d(ctx, GGML_TYPE_F32, lparams.n_rank_w2, n_embd); - layer.w3_a = ggml_new_tensor_2d(ctx, GGML_TYPE_F32, lparams.n_rank_w3, n_embd); - layer.w3_b = ggml_new_tensor_2d(ctx, GGML_TYPE_F32, lparams.n_rank_w3, n_ff); + layer.ffn_gate_a = ggml_new_tensor_2d(ctx, GGML_TYPE_F32, lparams.n_rank_ffn_gate, n_embd); + layer.ffn_gate_b = ggml_new_tensor_2d(ctx, GGML_TYPE_F32, lparams.n_rank_ffn_gate, n_ff); + layer.ffn_down_a = ggml_new_tensor_2d(ctx, GGML_TYPE_F32, lparams.n_rank_ffn_down, n_ff); + layer.ffn_down_b = ggml_new_tensor_2d(ctx, GGML_TYPE_F32, lparams.n_rank_ffn_down, n_embd); + layer.ffn_up_a = ggml_new_tensor_2d(ctx, GGML_TYPE_F32, lparams.n_rank_ffn_up, n_embd); + layer.ffn_up_b = ggml_new_tensor_2d(ctx, GGML_TYPE_F32, lparams.n_rank_ffn_up, n_ff); ggml_set_name(layer.attention_norm_a, tni(LLM_TENSOR_ATTN_NORM, ".weight.lora_a", i)); ggml_set_name(layer.attention_norm_b, tni(LLM_TENSOR_ATTN_NORM, ".weight.lora_b", i)); @@ -454,12 +454,12 @@ static void init_lora(const struct my_llama_model * model, struct my_llama_lora ggml_set_name(layer.wo_b, tni(LLM_TENSOR_ATTN_OUT, ".weight.lora_b", i)); ggml_set_name(layer.ffn_norm_a, tni(LLM_TENSOR_FFN_NORM, ".weight.lora_a", i)); ggml_set_name(layer.ffn_norm_b, tni(LLM_TENSOR_FFN_NORM, ".weight.lora_b", i)); - ggml_set_name(layer.w1_a, tni(LLM_TENSOR_FFN_GATE, ".weight.lora_a", i)); - ggml_set_name(layer.w1_b, tni(LLM_TENSOR_FFN_GATE, ".weight.lora_b", i)); - ggml_set_name(layer.w2_a, tni(LLM_TENSOR_FFN_DOWN, ".weight.lora_a", i)); - ggml_set_name(layer.w2_b, tni(LLM_TENSOR_FFN_DOWN, ".weight.lora_b", i)); - ggml_set_name(layer.w3_a, tni(LLM_TENSOR_FFN_UP, ".weight.lora_a", i)); - ggml_set_name(layer.w3_b, tni(LLM_TENSOR_FFN_UP, ".weight.lora_b", i)); + ggml_set_name(layer.ffn_gate_a, tni(LLM_TENSOR_FFN_GATE, ".weight.lora_a", i)); + ggml_set_name(layer.ffn_gate_b, tni(LLM_TENSOR_FFN_GATE, ".weight.lora_b", i)); + ggml_set_name(layer.ffn_down_a, tni(LLM_TENSOR_FFN_DOWN, ".weight.lora_a", i)); + ggml_set_name(layer.ffn_down_b, tni(LLM_TENSOR_FFN_DOWN, ".weight.lora_b", i)); + ggml_set_name(layer.ffn_up_a, tni(LLM_TENSOR_FFN_UP, ".weight.lora_a", i)); + ggml_set_name(layer.ffn_up_b, tni(LLM_TENSOR_FFN_UP, ".weight.lora_b", i)); } set_param_lora(lora); @@ -497,12 +497,12 @@ static void randomize_lora(struct my_llama_lora * lora, int seed, float mean, fl randomize_tensor_normal(layer.ffn_norm_a, rnd); ggml_set_zero(layer.ffn_norm_b); - randomize_tensor_normal(layer.w1_a, rnd); - ggml_set_zero(layer.w1_b); - randomize_tensor_normal(layer.w2_a, rnd); - ggml_set_zero(layer.w2_b); - randomize_tensor_normal(layer.w3_a, rnd); - ggml_set_zero(layer.w3_b); + randomize_tensor_normal(layer.ffn_gate_a, rnd); + ggml_set_zero(layer.ffn_gate_b); + randomize_tensor_normal(layer.ffn_down_a, rnd); + ggml_set_zero(layer.ffn_down_b); + randomize_tensor_normal(layer.ffn_up_a, rnd); + ggml_set_zero(layer.ffn_up_b); } free_random_normal_distribution(rnd); @@ -610,13 +610,13 @@ static struct ggml_tensor * llama_build_lora_finetune_graphs( struct ggml_tensor * attention_norm = add_to_f32(ctx, layer.attention_norm, ggml_mul_mat(ctx, llayer.attention_norm_a, llayer.attention_norm_b)); struct ggml_tensor * ffn_norm = add_to_f32(ctx, layer.ffn_norm, ggml_mul_mat(ctx, llayer.ffn_norm_a, llayer.ffn_norm_b)); - struct ggml_tensor * wq = add_to_f32(ctx, layer.wq, ggml_mul_mat(ctx, llayer.wq_a, llayer.wq_b)); - struct ggml_tensor * wk = add_to_f32(ctx, layer.wk, ggml_mul_mat(ctx, llayer.wk_a, llayer.wk_b)); - struct ggml_tensor * wv = add_to_f32(ctx, layer.wv, ggml_mul_mat(ctx, llayer.wv_a, llayer.wv_b)); - struct ggml_tensor * wo = add_to_f32(ctx, layer.wo, ggml_mul_mat(ctx, llayer.wo_a, llayer.wo_b)); - struct ggml_tensor * w1 = add_to_f32(ctx, layer.w1, ggml_mul_mat(ctx, llayer.w1_a, llayer.w1_b)); - struct ggml_tensor * w2 = add_to_f32(ctx, layer.w2, ggml_mul_mat(ctx, llayer.w2_a, llayer.w2_b)); - struct ggml_tensor * w3 = add_to_f32(ctx, layer.w3, ggml_mul_mat(ctx, llayer.w3_a, llayer.w3_b)); + struct ggml_tensor * wq = add_to_f32(ctx, layer.wq, ggml_mul_mat(ctx, llayer.wq_a, llayer.wq_b)); + struct ggml_tensor * wk = add_to_f32(ctx, layer.wk, ggml_mul_mat(ctx, llayer.wk_a, llayer.wk_b)); + struct ggml_tensor * wv = add_to_f32(ctx, layer.wv, ggml_mul_mat(ctx, llayer.wv_a, llayer.wv_b)); + struct ggml_tensor * wo = add_to_f32(ctx, layer.wo, ggml_mul_mat(ctx, llayer.wo_a, llayer.wo_b)); + struct ggml_tensor * ffn_gate = add_to_f32(ctx, layer.ffn_gate, ggml_mul_mat(ctx, llayer.ffn_gate_a, llayer.ffn_gate_b)); + struct ggml_tensor * ffn_down = add_to_f32(ctx, layer.ffn_down, ggml_mul_mat(ctx, llayer.ffn_down_a, llayer.ffn_down_b)); + struct ggml_tensor * ffn_up = add_to_f32(ctx, layer.ffn_up, ggml_mul_mat(ctx, llayer.ffn_up_a, llayer.ffn_up_b)); struct ggml_tensor * t02 = ggml_rms_norm (ctx, cur, rms_norm_eps); set_name(t02, "t02"); assert_shape_2d(t02, n_embd, N*n_batch); struct ggml_tensor * t03 = ggml_repeat (ctx, attention_norm, t02); set_name(t03, "t03"); assert_shape_2d(t03, n_embd, N*n_batch); @@ -659,11 +659,11 @@ static struct ggml_tensor * llama_build_lora_finetune_graphs( struct ggml_tensor * t22 = ggml_rms_norm (ctx, t21, rms_norm_eps); set_name(t22, "t22"); assert_shape_2d(t22, n_embd, N*n_batch); struct ggml_tensor * t23 = ggml_repeat (ctx, ffn_norm, t22); set_name(t23, "t23"); assert_shape_2d(t23, n_embd, N*n_batch); struct ggml_tensor * t24 = ggml_mul (ctx, t23, t22); set_name(t24, "t24"); assert_shape_2d(t24, n_embd, N*n_batch); - struct ggml_tensor * t25 = ggml_mul_mat (ctx, w3, t24); set_name(t25, "t25"); assert_shape_2d(t25, n_ff, N*n_batch); - struct ggml_tensor * t26 = ggml_mul_mat (ctx, w1, t24); set_name(t26, "t26"); assert_shape_2d(t26, n_ff, N*n_batch); + struct ggml_tensor * t25 = ggml_mul_mat (ctx, ffn_up, t24); set_name(t25, "t25"); assert_shape_2d(t25, n_ff, N*n_batch); + struct ggml_tensor * t26 = ggml_mul_mat (ctx, ffn_gate, t24); set_name(t26, "t26"); assert_shape_2d(t26, n_ff, N*n_batch); struct ggml_tensor * t27 = ggml_silu (ctx, t26); set_name(t27, "t27"); assert_shape_2d(t27, n_ff, N*n_batch); struct ggml_tensor * t28 = ggml_mul (ctx, t27, t25); set_name(t28, "t28"); assert_shape_2d(t28, n_ff, N*n_batch); - struct ggml_tensor * t29 = ggml_mul_mat (ctx, w2, t28); set_name(t29, "t29"); assert_shape_2d(t29, n_embd, N*n_batch); + struct ggml_tensor * t29 = ggml_mul_mat (ctx, ffn_down, t28); set_name(t29, "t29"); assert_shape_2d(t29, n_embd, N*n_batch); struct ggml_tensor * t30 = ggml_add (ctx, t29, t21); set_name(t30, "t30"); assert_shape_2d(t30, n_embd, N*n_batch); cur = t30; if (enable_checkpointing) { @@ -723,9 +723,9 @@ static struct ggml_tensor * llama_build_lora_finetune_graphs( ggml_build_forward_expand(gb, ggml_scale_inplace(ctx, layer.wk, 1.0f)); ggml_build_forward_expand(gb, ggml_scale_inplace(ctx, layer.wv, 1.0f)); ggml_build_forward_expand(gb, ggml_scale_inplace(ctx, layer.wo, 1.0f)); - ggml_build_forward_expand(gb, ggml_scale_inplace(ctx, layer.w1, 1.0f)); - ggml_build_forward_expand(gb, ggml_scale_inplace(ctx, layer.w2, 1.0f)); - ggml_build_forward_expand(gb, ggml_scale_inplace(ctx, layer.w3, 1.0f)); + ggml_build_forward_expand(gb, ggml_scale_inplace(ctx, layer.ffn_gate, 1.0f)); + ggml_build_forward_expand(gb, ggml_scale_inplace(ctx, layer.ffn_down, 1.0f)); + ggml_build_forward_expand(gb, ggml_scale_inplace(ctx, layer.ffn_up, 1.0f)); } // allocating checkpoints in one block to reduce memory fragmentation @@ -798,9 +798,9 @@ static void load_llama_lora_gguf(struct gguf_context * fctx, struct ggml_context GGUF_GET_KEY(fctx, lora->hparams.n_rank_wv, gguf_get_val_u32, GGUF_TYPE_UINT32, true, LLM_KV_TRAINING_LORA_RANK_ATTN_V); GGUF_GET_KEY(fctx, lora->hparams.n_rank_wo, gguf_get_val_u32, GGUF_TYPE_UINT32, true, LLM_KV_TRAINING_LORA_RANK_ATTN_OUT); GGUF_GET_KEY(fctx, lora->hparams.n_rank_ffn_norm, gguf_get_val_u32, GGUF_TYPE_UINT32, true, LLM_KV_TRAINING_LORA_RANK_FFN_NORM); - GGUF_GET_KEY(fctx, lora->hparams.n_rank_w1, gguf_get_val_u32, GGUF_TYPE_UINT32, true, LLM_KV_TRAINING_LORA_RANK_FFN_GATE); - GGUF_GET_KEY(fctx, lora->hparams.n_rank_w2, gguf_get_val_u32, GGUF_TYPE_UINT32, true, LLM_KV_TRAINING_LORA_RANK_FFN_DOWN); - GGUF_GET_KEY(fctx, lora->hparams.n_rank_w3, gguf_get_val_u32, GGUF_TYPE_UINT32, true, LLM_KV_TRAINING_LORA_RANK_FFN_UP); + GGUF_GET_KEY(fctx, lora->hparams.n_rank_ffn_gate, gguf_get_val_u32, GGUF_TYPE_UINT32, true, LLM_KV_TRAINING_LORA_RANK_FFN_GATE); + GGUF_GET_KEY(fctx, lora->hparams.n_rank_ffn_down, gguf_get_val_u32, GGUF_TYPE_UINT32, true, LLM_KV_TRAINING_LORA_RANK_FFN_DOWN); + GGUF_GET_KEY(fctx, lora->hparams.n_rank_ffn_up, gguf_get_val_u32, GGUF_TYPE_UINT32, true, LLM_KV_TRAINING_LORA_RANK_FFN_UP); init_lora(model, lora); @@ -825,12 +825,12 @@ static void load_llama_lora_gguf(struct gguf_context * fctx, struct ggml_context copy_tensor_by_name(layer.wo_b, f_ggml_ctx, ggml_get_name(layer.wo_b)); copy_tensor_by_name(layer.ffn_norm_a, f_ggml_ctx, ggml_get_name(layer.ffn_norm_a)); copy_tensor_by_name(layer.ffn_norm_b, f_ggml_ctx, ggml_get_name(layer.ffn_norm_b)); - copy_tensor_by_name(layer.w1_a, f_ggml_ctx, ggml_get_name(layer.w1_a)); - copy_tensor_by_name(layer.w1_b, f_ggml_ctx, ggml_get_name(layer.w1_b)); - copy_tensor_by_name(layer.w2_a, f_ggml_ctx, ggml_get_name(layer.w2_a)); - copy_tensor_by_name(layer.w2_b, f_ggml_ctx, ggml_get_name(layer.w2_b)); - copy_tensor_by_name(layer.w3_a, f_ggml_ctx, ggml_get_name(layer.w3_a)); - copy_tensor_by_name(layer.w3_b, f_ggml_ctx, ggml_get_name(layer.w3_b)); + copy_tensor_by_name(layer.ffn_gate_a, f_ggml_ctx, ggml_get_name(layer.ffn_gate_a)); + copy_tensor_by_name(layer.ffn_gate_b, f_ggml_ctx, ggml_get_name(layer.ffn_gate_b)); + copy_tensor_by_name(layer.ffn_down_a, f_ggml_ctx, ggml_get_name(layer.ffn_down_a)); + copy_tensor_by_name(layer.ffn_down_b, f_ggml_ctx, ggml_get_name(layer.ffn_down_b)); + copy_tensor_by_name(layer.ffn_up_a, f_ggml_ctx, ggml_get_name(layer.ffn_up_a)); + copy_tensor_by_name(layer.ffn_up_b, f_ggml_ctx, ggml_get_name(layer.ffn_up_b)); } } @@ -868,9 +868,9 @@ static void save_llama_lora_gguf(struct gguf_context * fctx, struct my_llama_mod gguf_set_val_u32(fctx, LLM_KV_TRAINING_LORA_RANK_ATTN_V, lora->hparams.n_rank_wv); gguf_set_val_u32(fctx, LLM_KV_TRAINING_LORA_RANK_ATTN_OUT, lora->hparams.n_rank_wo); gguf_set_val_u32(fctx, LLM_KV_TRAINING_LORA_RANK_FFN_NORM, lora->hparams.n_rank_ffn_norm); - gguf_set_val_u32(fctx, LLM_KV_TRAINING_LORA_RANK_FFN_GATE, lora->hparams.n_rank_w1); - gguf_set_val_u32(fctx, LLM_KV_TRAINING_LORA_RANK_FFN_DOWN, lora->hparams.n_rank_w2); - gguf_set_val_u32(fctx, LLM_KV_TRAINING_LORA_RANK_FFN_UP, lora->hparams.n_rank_w3); + gguf_set_val_u32(fctx, LLM_KV_TRAINING_LORA_RANK_FFN_GATE, lora->hparams.n_rank_ffn_gate); + gguf_set_val_u32(fctx, LLM_KV_TRAINING_LORA_RANK_FFN_DOWN, lora->hparams.n_rank_ffn_down); + gguf_set_val_u32(fctx, LLM_KV_TRAINING_LORA_RANK_FFN_UP, lora->hparams.n_rank_ffn_up); gguf_add_tensor(fctx, lora->tok_embeddings_a); gguf_add_tensor(fctx, lora->tok_embeddings_b); @@ -894,12 +894,12 @@ static void save_llama_lora_gguf(struct gguf_context * fctx, struct my_llama_mod gguf_add_tensor(fctx, layer.wo_b); gguf_add_tensor(fctx, layer.ffn_norm_a); gguf_add_tensor(fctx, layer.ffn_norm_b); - gguf_add_tensor(fctx, layer.w1_a); - gguf_add_tensor(fctx, layer.w1_b); - gguf_add_tensor(fctx, layer.w2_a); - gguf_add_tensor(fctx, layer.w2_b); - gguf_add_tensor(fctx, layer.w3_a); - gguf_add_tensor(fctx, layer.w3_b); + gguf_add_tensor(fctx, layer.ffn_gate_a); + gguf_add_tensor(fctx, layer.ffn_gate_b); + gguf_add_tensor(fctx, layer.ffn_down_a); + gguf_add_tensor(fctx, layer.ffn_down_b); + gguf_add_tensor(fctx, layer.ffn_up_a); + gguf_add_tensor(fctx, layer.ffn_up_b); } } @@ -1104,12 +1104,12 @@ static void save_as_llama_lora(const char * filename, struct my_llama_lora * lor write_tensor(&file, layer.wo_b, tni(LLM_TENSOR_ATTN_OUT, i, ".weight.loraB")); write_tensor(&file, layer.ffn_norm_a, tni(LLM_TENSOR_FFN_NORM, i, ".weight.loraA")); write_tensor(&file, layer.ffn_norm_b, tni(LLM_TENSOR_FFN_NORM, i, ".weight.loraB")); - write_tensor(&file, layer.w1_a, tni(LLM_TENSOR_FFN_GATE, i, ".weight.loraA")); - write_tensor(&file, layer.w1_b, tni(LLM_TENSOR_FFN_GATE, i, ".weight.loraB")); - write_tensor(&file, layer.w2_a, tni(LLM_TENSOR_FFN_DOWN, i, ".weight.loraA")); - write_tensor(&file, layer.w2_b, tni(LLM_TENSOR_FFN_DOWN, i, ".weight.loraB")); - write_tensor(&file, layer.w3_a, tni(LLM_TENSOR_FFN_UP, i, ".weight.loraA")); - write_tensor(&file, layer.w3_b, tni(LLM_TENSOR_FFN_UP, i, ".weight.loraB")); + write_tensor(&file, layer.ffn_gate_a, tni(LLM_TENSOR_FFN_GATE, i, ".weight.loraA")); + write_tensor(&file, layer.ffn_gate_b, tni(LLM_TENSOR_FFN_GATE, i, ".weight.loraB")); + write_tensor(&file, layer.ffn_down_a, tni(LLM_TENSOR_FFN_DOWN, i, ".weight.loraA")); + write_tensor(&file, layer.ffn_down_b, tni(LLM_TENSOR_FFN_DOWN, i, ".weight.loraB")); + write_tensor(&file, layer.ffn_up_a, tni(LLM_TENSOR_FFN_UP, i, ".weight.loraA")); + write_tensor(&file, layer.ffn_up_b, tni(LLM_TENSOR_FFN_UP, i, ".weight.loraB")); } } @@ -1139,9 +1139,9 @@ struct train_params { uint32_t n_rank_wv; uint32_t n_rank_wo; uint32_t n_rank_ffn_norm; - uint32_t n_rank_w1; - uint32_t n_rank_w2; - uint32_t n_rank_w3; + uint32_t n_rank_ffn_gate; + uint32_t n_rank_ffn_down; + uint32_t n_rank_ffn_up; uint32_t n_rank_tok_embeddings; uint32_t n_rank_norm; uint32_t n_rank_output; @@ -1152,9 +1152,9 @@ struct train_params { bool custom_n_rank_wv; bool custom_n_rank_wo; bool custom_n_rank_ffn_norm; - bool custom_n_rank_w1; - bool custom_n_rank_w2; - bool custom_n_rank_w3; + bool custom_n_rank_ffn_gate; + bool custom_n_rank_ffn_down; + bool custom_n_rank_ffn_up; bool custom_n_rank_tok_embeddings; bool custom_n_rank_norm; bool custom_n_rank_output; @@ -1186,9 +1186,9 @@ static struct train_params get_default_train_params() { params.n_rank_wv = 4; params.n_rank_wo = 4; params.n_rank_ffn_norm = 1; - params.n_rank_w1 = 4; - params.n_rank_w2 = 4; - params.n_rank_w3 = 4; + params.n_rank_ffn_gate = 4; + params.n_rank_ffn_down = 4; + params.n_rank_ffn_up = 4; params.n_rank_tok_embeddings = 4; params.n_rank_norm = 1; params.n_rank_output = 4; @@ -1199,9 +1199,9 @@ static struct train_params get_default_train_params() { params.custom_n_rank_wv = false; params.custom_n_rank_wo = false; params.custom_n_rank_ffn_norm = false; - params.custom_n_rank_w1 = false; - params.custom_n_rank_w2 = false; - params.custom_n_rank_w3 = false; + params.custom_n_rank_ffn_gate = false; + params.custom_n_rank_ffn_down = false; + params.custom_n_rank_ffn_up = false; params.custom_n_rank_tok_embeddings = false; params.custom_n_rank_norm = false; params.custom_n_rank_output = false; @@ -1232,9 +1232,9 @@ static void train_print_usage(int argc, char ** argv, const struct train_params fprintf(stderr, " --rank-wk N LORA rank for wk tensor, overrides default rank.\n"); fprintf(stderr, " --rank-wv N LORA rank for wv tensor, overrides default rank.\n"); fprintf(stderr, " --rank-wo N LORA rank for wo tensor, overrides default rank.\n"); - fprintf(stderr, " --rank-w1 N LORA rank for w1 tensor, overrides default rank.\n"); - fprintf(stderr, " --rank-w2 N LORA rank for w2 tensor, overrides default rank.\n"); - fprintf(stderr, " --rank-w3 N LORA rank for w3 tensor, overrides default rank.\n"); + fprintf(stderr, " --rank-ffn_gate N LORA rank for ffn_gate tensor, overrides default rank.\n"); + fprintf(stderr, " --rank-ffn_down N LORA rank for ffn_down tensor, overrides default rank.\n"); + fprintf(stderr, " --rank-ffn_up N LORA rank for ffn_up tensor, overrides default rank.\n"); print_common_train_usage(argc, argv, ¶ms->common); } @@ -1369,27 +1369,27 @@ static bool train_params_parse(int argc, char ** argv, struct train_params * par } params->n_rank_wo = std::stoi(argv[i]); params->custom_n_rank_wo = true; - } else if (arg == "--rank-w1") { + } else if (arg == "--rank-ffn_gate") { if (++i >= argc) { invalid_param = true; break; } - params->n_rank_w1 = std::stoi(argv[i]); - params->custom_n_rank_w1 = true; - } else if (arg == "--rank-w2") { + params->n_rank_ffn_gate = std::stoi(argv[i]); + params->custom_n_rank_ffn_gate = true; + } else if (arg == "--rank-ffn_down") { if (++i >= argc) { invalid_param = true; break; } - params->n_rank_w2 = std::stoi(argv[i]); - params->custom_n_rank_w2 = true; - } else if (arg == "--rank-w3") { + params->n_rank_ffn_down = std::stoi(argv[i]); + params->custom_n_rank_ffn_down = true; + } else if (arg == "--rank-ffn_up") { if (++i >= argc) { invalid_param = true; break; } - params->n_rank_w3 = std::stoi(argv[i]); - params->custom_n_rank_w3 = true; + params->n_rank_ffn_up = std::stoi(argv[i]); + params->custom_n_rank_ffn_up = true; } else { fprintf(stderr, "error: unknown argument: %s\n", arg.c_str()); train_print_usage(argc, argv, &default_params); @@ -1452,12 +1452,12 @@ static int64_t get_parameter_count(struct my_llama_lora* lora) { nx += ggml_nelements(layer.wo_b); nx += ggml_nelements(layer.ffn_norm_a); nx += ggml_nelements(layer.ffn_norm_b); - nx += ggml_nelements(layer.w1_a); - nx += ggml_nelements(layer.w1_b); - nx += ggml_nelements(layer.w2_a); - nx += ggml_nelements(layer.w2_b); - nx += ggml_nelements(layer.w3_a); - nx += ggml_nelements(layer.w3_b); + nx += ggml_nelements(layer.ffn_gate_a); + nx += ggml_nelements(layer.ffn_gate_b); + nx += ggml_nelements(layer.ffn_down_a); + nx += ggml_nelements(layer.ffn_down_b); + nx += ggml_nelements(layer.ffn_up_a); + nx += ggml_nelements(layer.ffn_up_b); } return nx; } @@ -1511,9 +1511,9 @@ int main(int argc, char ** argv) { uint32_t n_rank_wv = params.custom_n_rank_wv ? params.n_rank_wv : params.lora_r; uint32_t n_rank_wo = params.custom_n_rank_wo ? params.n_rank_wo : params.lora_r; uint32_t n_rank_ffn_norm = params.custom_n_rank_ffn_norm ? params.n_rank_ffn_norm : 1; - uint32_t n_rank_w1 = params.custom_n_rank_w1 ? params.n_rank_w1 : params.lora_r; - uint32_t n_rank_w2 = params.custom_n_rank_w2 ? params.n_rank_w2 : params.lora_r; - uint32_t n_rank_w3 = params.custom_n_rank_w3 ? params.n_rank_w3 : params.lora_r; + uint32_t n_rank_ffn_gate = params.custom_n_rank_ffn_gate ? params.n_rank_ffn_gate : params.lora_r; + uint32_t n_rank_ffn_down = params.custom_n_rank_ffn_down ? params.n_rank_ffn_down : params.lora_r; + uint32_t n_rank_ffn_up = params.custom_n_rank_ffn_up ? params.n_rank_ffn_up : params.lora_r; uint32_t n_rank_tok_embeddings = params.custom_n_rank_tok_embeddings ? params.n_rank_tok_embeddings : params.lora_r; uint32_t n_rank_norm = params.custom_n_rank_norm ? params.n_rank_norm : 1; uint32_t n_rank_output = params.custom_n_rank_output ? params.n_rank_output : params.lora_r; @@ -1523,9 +1523,9 @@ int main(int argc, char ** argv) { lora.hparams.n_rank_wv = n_rank_wv; lora.hparams.n_rank_wo = n_rank_wo; lora.hparams.n_rank_ffn_norm = n_rank_ffn_norm; - lora.hparams.n_rank_w1 = n_rank_w1; - lora.hparams.n_rank_w2 = n_rank_w2; - lora.hparams.n_rank_w3 = n_rank_w3; + lora.hparams.n_rank_ffn_gate = n_rank_ffn_gate; + lora.hparams.n_rank_ffn_down = n_rank_ffn_down; + lora.hparams.n_rank_ffn_up = n_rank_ffn_up; lora.hparams.n_rank_tok_embeddings = n_rank_tok_embeddings; lora.hparams.n_rank_norm = n_rank_norm; lora.hparams.n_rank_output = n_rank_output; @@ -1566,9 +1566,9 @@ int main(int argc, char ** argv) { || (lora.hparams.n_rank_wv != n_rank_wv) || (lora.hparams.n_rank_wo != n_rank_wo) || (lora.hparams.n_rank_ffn_norm != n_rank_ffn_norm) - || (lora.hparams.n_rank_w1 != n_rank_w1) - || (lora.hparams.n_rank_w2 != n_rank_w2) - || (lora.hparams.n_rank_w3 != n_rank_w3) + || (lora.hparams.n_rank_ffn_gate != n_rank_ffn_gate) + || (lora.hparams.n_rank_ffn_down != n_rank_ffn_down) + || (lora.hparams.n_rank_ffn_up != n_rank_ffn_up) || (lora.hparams.n_rank_tok_embeddings != n_rank_tok_embeddings) || (lora.hparams.n_rank_norm != n_rank_norm) || (lora.hparams.n_rank_output != n_rank_output) diff --git a/examples/train-text-from-scratch/train-text-from-scratch.cpp b/examples/train-text-from-scratch/train-text-from-scratch.cpp index 2e2a8ce08..bfdf124d7 100644 --- a/examples/train-text-from-scratch/train-text-from-scratch.cpp +++ b/examples/train-text-from-scratch/train-text-from-scratch.cpp @@ -50,9 +50,9 @@ struct my_llama_layer { struct ggml_tensor * ffn_norm; // ff - struct ggml_tensor * w1; - struct ggml_tensor * w2; - struct ggml_tensor * w3; + struct ggml_tensor * ffn_gate; // w1 + struct ggml_tensor * ffn_down; // w2 + struct ggml_tensor * ffn_up; // w3 }; struct my_llama_model { @@ -140,9 +140,9 @@ static void set_param_model(struct my_llama_model * model) { ggml_set_param(ctx, layer.wv); ggml_set_param(ctx, layer.wo); ggml_set_param(ctx, layer.ffn_norm); - ggml_set_param(ctx, layer.w1); - ggml_set_param(ctx, layer.w2); - ggml_set_param(ctx, layer.w3); + ggml_set_param(ctx, layer.ffn_gate); + ggml_set_param(ctx, layer.ffn_down); + ggml_set_param(ctx, layer.ffn_up); } } @@ -198,9 +198,9 @@ static void init_model(struct my_llama_model * model) { layer.ffn_norm = ggml_new_tensor_1d(ctx, GGML_TYPE_F32, n_embd); - layer.w1 = ggml_new_tensor_2d(ctx, GGML_TYPE_F32, n_embd, n_ff); - layer.w2 = ggml_new_tensor_2d(ctx, GGML_TYPE_F32, n_ff, n_embd); - layer.w3 = ggml_new_tensor_2d(ctx, GGML_TYPE_F32, n_embd, n_ff); + layer.ffn_gate = ggml_new_tensor_2d(ctx, GGML_TYPE_F32, n_embd, n_ff); + layer.ffn_down = ggml_new_tensor_2d(ctx, GGML_TYPE_F32, n_ff, n_embd); + layer.ffn_up = ggml_new_tensor_2d(ctx, GGML_TYPE_F32, n_embd, n_ff); ggml_set_name(layer.attention_norm, tni(LLM_TENSOR_ATTN_NORM, i)); @@ -211,9 +211,9 @@ static void init_model(struct my_llama_model * model) { ggml_set_name(layer.ffn_norm, tni(LLM_TENSOR_FFN_NORM, i)); - ggml_set_name(layer.w1, tni(LLM_TENSOR_FFN_GATE, i)); - ggml_set_name(layer.w2, tni(LLM_TENSOR_FFN_DOWN, i)); - ggml_set_name(layer.w3, tni(LLM_TENSOR_FFN_UP, i)); + ggml_set_name(layer.ffn_gate, tni(LLM_TENSOR_FFN_GATE, i)); + ggml_set_name(layer.ffn_down, tni(LLM_TENSOR_FFN_DOWN, i)); + ggml_set_name(layer.ffn_up, tni(LLM_TENSOR_FFN_UP, i)); } set_param_model(model); @@ -244,9 +244,9 @@ static void randomize_model(struct my_llama_model * model, int seed, float mean, randomize_tensor_normal(layer.ffn_norm, rnd); - randomize_tensor_normal(layer.w1, rnd); - randomize_tensor_normal(layer.w2, rnd); - randomize_tensor_normal(layer.w3, rnd); + randomize_tensor_normal(layer.ffn_gate, rnd); + randomize_tensor_normal(layer.ffn_down, rnd); + randomize_tensor_normal(layer.ffn_up, rnd); } free_random_normal_distribution(rnd); @@ -356,11 +356,11 @@ static struct ggml_tensor * llama_build_train_graphs( struct ggml_tensor * t22 = ggml_rms_norm (ctx, t21, f_norm_rms_eps); set_name(t22, "t22"); assert_shape_2d(t22, n_embd, N*n_batch); struct ggml_tensor * t23 = ggml_repeat (ctx, layer.ffn_norm, t22); set_name(t23, "t23"); assert_shape_2d(t23, n_embd, N*n_batch); struct ggml_tensor * t24 = ggml_mul (ctx, t23, t22); set_name(t24, "t24"); assert_shape_2d(t24, n_embd, N*n_batch); - struct ggml_tensor * t25 = ggml_mul_mat (ctx, layer.w3, t24); set_name(t25, "t25"); assert_shape_2d(t25, n_ff, N*n_batch); - struct ggml_tensor * t26 = ggml_mul_mat (ctx, layer.w1, t24); set_name(t26, "t26"); assert_shape_2d(t26, n_ff, N*n_batch); + struct ggml_tensor * t25 = ggml_mul_mat (ctx, layer.ffn_up, t24); set_name(t25, "t25"); assert_shape_2d(t25, n_ff, N*n_batch); + struct ggml_tensor * t26 = ggml_mul_mat (ctx, layer.ffn_gate, t24); set_name(t26, "t26"); assert_shape_2d(t26, n_ff, N*n_batch); struct ggml_tensor * t27 = ggml_silu (ctx, t26); set_name(t27, "t27"); assert_shape_2d(t27, n_ff, N*n_batch); struct ggml_tensor * t28 = ggml_mul (ctx, t27, t25); set_name(t28, "t28"); assert_shape_2d(t28, n_ff, N*n_batch); - struct ggml_tensor * t29 = ggml_mul_mat (ctx, layer.w2, t28); set_name(t29, "t29"); assert_shape_2d(t29, n_embd, N*n_batch); + struct ggml_tensor * t29 = ggml_mul_mat (ctx, layer.ffn_down, t28); set_name(t29, "t29"); assert_shape_2d(t29, n_embd, N*n_batch); struct ggml_tensor * t30 = ggml_add (ctx, t29, t21); set_name(t30, "t30"); assert_shape_2d(t30, n_embd, N*n_batch); cur = t30; checkpoints.push_back(cur); @@ -521,9 +521,9 @@ static void load_llama_model_gguf(struct gguf_context * fctx, struct ggml_contex copy_tensor_by_name(layer.wv, f_ggml_ctx, tni(LLM_TENSOR_ATTN_V, i)); copy_tensor_by_name(layer.wo, f_ggml_ctx, tni(LLM_TENSOR_ATTN_OUT, i)); copy_tensor_by_name(layer.ffn_norm, f_ggml_ctx, tni(LLM_TENSOR_FFN_NORM, i)); - copy_tensor_by_name(layer.w1, f_ggml_ctx, tni(LLM_TENSOR_FFN_GATE, i)); - copy_tensor_by_name(layer.w2, f_ggml_ctx, tni(LLM_TENSOR_FFN_DOWN, i)); - copy_tensor_by_name(layer.w3, f_ggml_ctx, tni(LLM_TENSOR_FFN_UP, i)); + copy_tensor_by_name(layer.ffn_gate, f_ggml_ctx, tni(LLM_TENSOR_FFN_GATE, i)); + copy_tensor_by_name(layer.ffn_down, f_ggml_ctx, tni(LLM_TENSOR_FFN_DOWN, i)); + copy_tensor_by_name(layer.ffn_up, f_ggml_ctx, tni(LLM_TENSOR_FFN_UP, i)); } } @@ -664,9 +664,9 @@ static void save_llama_model_gguf(struct gguf_context * fctx, const char * fn_vo gguf_add_tensor(fctx, layer.wv); gguf_add_tensor(fctx, layer.wo); gguf_add_tensor(fctx, layer.ffn_norm); - gguf_add_tensor(fctx, layer.w1); - gguf_add_tensor(fctx, layer.w2); - gguf_add_tensor(fctx, layer.w3); + gguf_add_tensor(fctx, layer.ffn_gate); + gguf_add_tensor(fctx, layer.ffn_down); + gguf_add_tensor(fctx, layer.ffn_up); } } @@ -915,9 +915,9 @@ static int64_t get_parameter_count(struct my_llama_model* model) { nx += ggml_nelements(layer.wv); nx += ggml_nelements(layer.wo); nx += ggml_nelements(layer.ffn_norm); - nx += ggml_nelements(layer.w1); - nx += ggml_nelements(layer.w2); - nx += ggml_nelements(layer.w3); + nx += ggml_nelements(layer.ffn_gate); + nx += ggml_nelements(layer.ffn_down); + nx += ggml_nelements(layer.ffn_up); } return nx; } From 037259be689353081e7bae3c1ab4ab18e7fbe8c9 Mon Sep 17 00:00:00 2001 From: Aarni Koskela Date: Tue, 13 Feb 2024 15:24:50 +0200 Subject: [PATCH 587/811] llama : make load error reporting more granular (#5477) Makes it easier to pinpoint where e.g. `unordered_map::at: key not found` comes from. --- llama.cpp | 18 +++++++++++++++--- 1 file changed, 15 insertions(+), 3 deletions(-) diff --git a/llama.cpp b/llama.cpp index 381a03068..61c695187 100644 --- a/llama.cpp +++ b/llama.cpp @@ -4384,9 +4384,21 @@ static int llama_model_load(const std::string & fname, llama_model & model, llam model.hparams.vocab_only = params.vocab_only; - llm_load_arch (ml, model); - llm_load_hparams(ml, model); - llm_load_vocab (ml, model); + try { + llm_load_arch(ml, model); + } catch(const std::exception & e) { + throw std::runtime_error("error loading model architecture: " + std::string(e.what())); + } + try { + llm_load_hparams(ml, model); + } catch(const std::exception & e) { + throw std::runtime_error("error loading model hyperparameters: " + std::string(e.what())); + } + try { + llm_load_vocab(ml, model); + } catch(const std::exception & e) { + throw std::runtime_error("error loading model vocabulary: " + std::string(e.what())); + } llm_load_print_meta(ml, model); From c4e6dd59e45ef7b14f7763fb073b517395dc176c Mon Sep 17 00:00:00 2001 From: Aarni Koskela Date: Tue, 13 Feb 2024 18:18:16 +0200 Subject: [PATCH 588/811] llama : allow raw byte in SPM vocabs; don't crash on nl 404 (#5478) * common : don't crash if newline token is not found * common : llama_byte_to_token: allow falling back to finding just the token byte in SPM vocabs --- llama.cpp | 15 +++++++++++++-- 1 file changed, 13 insertions(+), 2 deletions(-) diff --git a/llama.cpp b/llama.cpp index 61c695187..8ebbf7628 100644 --- a/llama.cpp +++ b/llama.cpp @@ -3314,7 +3314,12 @@ static void llm_load_vocab( // determine the newline token: LLaMA "<0x0A>" == 10 == '\n', Falcon 193 == '\n' if (vocab.type == LLAMA_VOCAB_TYPE_SPM) { - vocab.linefeed_id = llama_byte_to_token(vocab, '\n'); + try { + vocab.linefeed_id = llama_byte_to_token(vocab, '\n'); + } catch (const std::exception & e) { + LLAMA_LOG_WARN("%s: SPM vocabulary, but newline token not found: %s! Using special_pad_id instead.", __func__, e.what()); + vocab.linefeed_id = vocab.special_pad_id; + } } else if (vocab.type == LLAMA_VOCAB_TYPE_WPM) { vocab.linefeed_id = vocab.special_pad_id; } else { @@ -7746,7 +7751,13 @@ static llama_token llama_byte_to_token(const llama_vocab & vocab, uint8_t ch) { switch (llama_vocab_get_type(vocab)) { case LLAMA_VOCAB_TYPE_SPM: { const char buf[7] = { '<', '0', 'x', hex[ch >> 4], hex[ch & 15], '>', 0 }; - return vocab.token_to_id.at(buf); + auto token = vocab.token_to_id.find(buf); + if (token != vocab.token_to_id.end()) { + return (*token).second; + } + // Try to fall back to just the byte as a string + const char buf2[2] = { (char)ch, 0 }; + return vocab.token_to_id.at(buf2); } case LLAMA_VOCAB_TYPE_WPM: case LLAMA_VOCAB_TYPE_BPE: { From ea9c8e11436ad50719987fa23a289c74b7b40d40 Mon Sep 17 00:00:00 2001 From: Jared Van Bortel Date: Tue, 13 Feb 2024 12:03:53 -0500 Subject: [PATCH 589/811] llama : add support for Nomic Embed (#5468) --- convert-hf-to-gguf.py | 117 ++++++++++++------- gguf-py/gguf/constants.py | 56 +++++---- gguf-py/gguf/tensor_mapping.py | 12 +- llama.cpp | 201 ++++++++++++++++++++++++--------- 4 files changed, 273 insertions(+), 113 deletions(-) diff --git a/convert-hf-to-gguf.py b/convert-hf-to-gguf.py index 5adfdc143..ae471481d 100755 --- a/convert-hf-to-gguf.py +++ b/convert-hf-to-gguf.py @@ -10,7 +10,7 @@ import re import sys from enum import IntEnum from pathlib import Path -from typing import TYPE_CHECKING, Any, ContextManager, Iterator, cast +from typing import TYPE_CHECKING, Any, ContextManager, Iterator, Sequence, cast import numpy as np import torch @@ -25,15 +25,6 @@ import gguf from convert import HfVocab -# check for any of the given keys in the dictionary and return the value of the first key found -def get_key_opts(d, keys): - for k in keys: - if k in d: - return d[k] - print(f"Could not find any of {keys}") - sys.exit() - - ###### MODEL DEFINITIONS ###### class SentencePieceTokenTypes(IntEnum): @@ -58,6 +49,15 @@ class Model: self.hparams = Model.load_hparams(self.dir_model) self.model_arch = self._get_model_architecture() self.gguf_writer = gguf.GGUFWriter(fname_out, gguf.MODEL_ARCH_NAMES[self.model_arch], endianess=self.endianess, use_temp_file=False) + self.block_count = self.find_hparam(["n_layers", "num_hidden_layers", "n_layer"]) + + def find_hparam(self, keys: Sequence[str], optional: bool = False) -> Any: + key = next((k for k in keys if k in self.hparams), None) + if key is not None: + return self.hparams[key] + if optional: + return None + raise KeyError(f"could not find any of: {keys}") def set_vocab(self): self._set_vocab_gpt2() @@ -79,28 +79,33 @@ class Model: def set_gguf_parameters(self): self.gguf_writer.add_name(self.dir_model.name) - self.gguf_writer.add_block_count(self.hparams.get( - "n_layers", self.hparams.get("num_hidden_layers", self.hparams.get("n_layer")), - )) - if (n_ctx := self.hparams.get("max_position_embeddings")) is not None: + self.gguf_writer.add_block_count(self.block_count) + + if (n_ctx := self.find_hparam(["max_position_embeddings", "n_ctx"], optional=True)) is not None: self.gguf_writer.add_context_length(n_ctx) - if (n_embd := self.hparams.get("hidden_size")) is not None: - self.gguf_writer.add_embedding_length(n_embd) - if (n_ff := self.hparams.get("intermediate_size")) is not None: + + n_embd = self.find_hparam(["hidden_size", "n_embd"]) + self.gguf_writer.add_embedding_length(n_embd) + + if (n_ff := self.find_hparam(["intermediate_size", "n_inner"], optional=True)) is not None: self.gguf_writer.add_feed_forward_length(n_ff) - if (n_head := self.hparams.get("num_attention_heads")) is not None: - self.gguf_writer.add_head_count(n_head) + + n_head = self.find_hparam(["num_attention_heads", "n_head"]) + self.gguf_writer.add_head_count(n_head) + if (n_head_kv := self.hparams.get("num_key_value_heads")) is not None: self.gguf_writer.add_head_count_kv(n_head_kv) - if (n_rms_eps := self.hparams.get("rms_norm_eps")) is not None: - self.gguf_writer.add_layer_norm_rms_eps(n_rms_eps) + if (f_rms_eps := self.hparams.get("rms_norm_eps")) is not None: + self.gguf_writer.add_layer_norm_rms_eps(f_rms_eps) + if (f_norm_eps := self.find_hparam(["layer_norm_eps", "layer_norm_epsilon"], optional=True)) is not None: + self.gguf_writer.add_layer_norm_eps(f_norm_eps) if (n_experts := self.hparams.get("num_local_experts")) is not None: self.gguf_writer.add_expert_count(n_experts) if (n_experts_used := self.hparams.get("num_experts_per_tok")) is not None: self.gguf_writer.add_expert_used_count(n_experts_used) - self.gguf_writer.add_parallel_residual(self.hparams.get("use_parallel_residual", True)) + self.gguf_writer.add_file_type(self.ftype) def write_tensors(self): block_count = self.hparams.get("n_layers", self.hparams.get("num_hidden_layers", self.hparams.get("n_layer"))) @@ -211,6 +216,8 @@ class Model: return MiniCPMModel if model_architecture == "BertModel": return BertModel + if model_architecture == "NomicBertModel": + return NomicBertModel return Model def _is_model_safetensors(self) -> bool: @@ -268,6 +275,8 @@ class Model: return gguf.MODEL_ARCH.MINICPM if arch == "BertModel": return gguf.MODEL_ARCH.BERT + if arch == "NomicBertModel": + return gguf.MODEL_ARCH.NOMIC_BERT raise NotImplementedError(f'Architecture "{arch}" not supported!') @@ -1297,21 +1306,21 @@ class GPT2Model(Model): class Phi2Model(Model): def set_gguf_parameters(self): - block_count = get_key_opts(self.hparams, ["num_hidden_layers", "n_layer"]) + block_count = self.find_hparam(["num_hidden_layers", "n_layer"]) - rot_pct = get_key_opts(self.hparams, ["partial_rotary_factor"]) - n_embd = get_key_opts(self.hparams, ["hidden_size", "n_embd"]) - n_head = get_key_opts(self.hparams, ["num_attention_heads", "n_head"]) + rot_pct = self.find_hparam(["partial_rotary_factor"]) + n_embd = self.find_hparam(["hidden_size", "n_embd"]) + n_head = self.find_hparam(["num_attention_heads", "n_head"]) self.gguf_writer.add_name("Phi2") - self.gguf_writer.add_context_length(get_key_opts(self.hparams, ["n_positions", "max_position_embeddings"])) + self.gguf_writer.add_context_length(self.find_hparam(["n_positions", "max_position_embeddings"])) self.gguf_writer.add_embedding_length(n_embd) self.gguf_writer.add_feed_forward_length(4 * n_embd) self.gguf_writer.add_block_count(block_count) self.gguf_writer.add_head_count(n_head) self.gguf_writer.add_head_count_kv(n_head) - self.gguf_writer.add_layer_norm_eps(get_key_opts(self.hparams, ["layer_norm_epsilon", "layer_norm_eps"])) + self.gguf_writer.add_layer_norm_eps(self.find_hparam(["layer_norm_epsilon", "layer_norm_eps"])) self.gguf_writer.add_rope_dimension_count(int(rot_pct * n_embd) // n_head) self.gguf_writer.add_file_type(self.ftype) self.gguf_writer.add_add_bos_token(False) @@ -1636,20 +1645,12 @@ in chat mode so that the conversation can end normally.") class BertModel(Model): def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) - self.block_count = self.hparams["num_hidden_layers"] + self.vocab_size = None def set_gguf_parameters(self): - # TODO(cebtenzzre): merge with parent class - self.gguf_writer.add_name(self.dir_model.name) - self.gguf_writer.add_context_length(self.hparams["max_position_embeddings"]) - self.gguf_writer.add_embedding_length(self.hparams["hidden_size"]) - self.gguf_writer.add_feed_forward_length(self.hparams["intermediate_size"]) - self.gguf_writer.add_block_count(self.block_count) - self.gguf_writer.add_head_count(self.hparams["num_attention_heads"]) - self.gguf_writer.add_layer_norm_eps(self.hparams["layer_norm_eps"]) + super().set_gguf_parameters() self.gguf_writer.add_causal_attention(False) self.gguf_writer.add_pooling_layer(True) - self.gguf_writer.add_file_type(self.ftype) def set_vocab(self): path = self.dir_model @@ -1659,6 +1660,7 @@ class BertModel(Model): vocab = HfVocab(path, added_tokens_path) tokens, scores, toktypes = zip(*vocab.all_tokens()) assert len(tokens) == vocab.vocab_size + self.vocab_size = vocab.vocab_size # we need this to validate the size of the token_type embeddings # though currently we are passing all zeros to the token_type embeddings @@ -1672,7 +1674,7 @@ class BertModel(Model): if tok.startswith(b"##"): return tok[2:] return b"\xe2\x96\x81" + tok - tokens = [phantom(t, y) for t, y in zip(tokens, toktypes)] + tokens = tuple(phantom(t, y) for t, y in zip(tokens, toktypes)) # set up bos and eos tokens (cls and sep) self.gguf_writer.add_bos_token_id(vocab.tokenizer.cls_token_id) @@ -1724,6 +1726,43 @@ class BertModel(Model): self.gguf_writer.add_tensor(new_name, data) +class NomicBertModel(BertModel): + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + + # the HF config claims n_ctx=8192, but it uses RoPE scaling + self.hparams["n_ctx"] = 2048 + + # SwigLU activation + assert self.hparams["activation_function"] == "swiglu" + # this doesn't do anything in the HF version + assert self.hparams["causal"] is False + # no bias tensors + assert self.hparams["qkv_proj_bias"] is False + assert self.hparams["mlp_fc1_bias"] is False + assert self.hparams["mlp_fc2_bias"] is False + # norm at end of layer + assert self.hparams["prenorm"] is False + # standard RoPE + assert self.hparams["rotary_emb_fraction"] == 1.0 + assert self.hparams["rotary_emb_interleaved"] is False + assert self.hparams["rotary_emb_scale_base"] is None + + def set_gguf_parameters(self): + super().set_gguf_parameters() + self.gguf_writer.add_rope_freq_base(self.hparams["rotary_emb_base"]) + + def get_tensors(self): + assert self.vocab_size is not None + for name, data in super().get_tensors(): + # Nomic Embed's token embeddings tensor is padded, but llama.cpp wants tensor sizes to match exactly. + if name == 'embeddings.word_embeddings.weight' and data.shape[1] != self.vocab_size: + rounded_vocab_size = (self.vocab_size + 63) // 64 * 64 + assert data.shape == (rounded_vocab_size, self.hparams["n_embd"]) + data = data[:self.vocab_size, :] + yield name, data + + ###### CONVERSION LOGIC ###### diff --git a/gguf-py/gguf/constants.py b/gguf-py/gguf/constants.py index 644e1589c..5fba01714 100644 --- a/gguf-py/gguf/constants.py +++ b/gguf-py/gguf/constants.py @@ -87,27 +87,28 @@ class Keys: class MODEL_ARCH(IntEnum): - LLAMA = auto() - FALCON = auto() - BAICHUAN = auto() - GPT2 = auto() - GPTJ = auto() - GPTNEOX = auto() - MPT = auto() - STARCODER = auto() - PERSIMMON = auto() - REFACT = auto() - BERT = auto() - BLOOM = auto() - STABLELM = auto() - QWEN = auto() - QWEN2 = auto() - PHI2 = auto() - PLAMO = auto() - CODESHELL = auto() - ORION = auto() + LLAMA = auto() + FALCON = auto() + BAICHUAN = auto() + GPT2 = auto() + GPTJ = auto() + GPTNEOX = auto() + MPT = auto() + STARCODER = auto() + PERSIMMON = auto() + REFACT = auto() + BERT = auto() + NOMIC_BERT = auto() + BLOOM = auto() + STABLELM = auto() + QWEN = auto() + QWEN2 = auto() + PHI2 = auto() + PLAMO = auto() + CODESHELL = auto() + ORION = auto() INTERNLM2 = auto() - MINICPM = auto() + MINICPM = auto() class MODEL_TENSOR(IntEnum): @@ -153,6 +154,7 @@ MODEL_ARCH_NAMES: dict[MODEL_ARCH, str] = { MODEL_ARCH.PERSIMMON: "persimmon", MODEL_ARCH.REFACT: "refact", MODEL_ARCH.BERT: "bert", + MODEL_ARCH.NOMIC_BERT: "nomic-bert", MODEL_ARCH.BLOOM: "bloom", MODEL_ARCH.STABLELM: "stablelm", MODEL_ARCH.QWEN: "qwen", @@ -282,6 +284,20 @@ MODEL_TENSORS: dict[MODEL_ARCH, list[MODEL_TENSOR]] = { MODEL_TENSOR.FFN_UP, MODEL_TENSOR.LAYER_OUT_NORM, ], + MODEL_ARCH.NOMIC_BERT: [ + MODEL_TENSOR.TOKEN_EMBD, + MODEL_TENSOR.TOKEN_EMBD_NORM, + MODEL_TENSOR.TOKEN_TYPES, + MODEL_TENSOR.POS_EMBD, + MODEL_TENSOR.OUTPUT_NORM, + MODEL_TENSOR.ATTN_OUT_NORM, + MODEL_TENSOR.ATTN_QKV, + MODEL_TENSOR.ATTN_OUT, + MODEL_TENSOR.FFN_GATE, + MODEL_TENSOR.FFN_DOWN, + MODEL_TENSOR.FFN_UP, + MODEL_TENSOR.LAYER_OUT_NORM, + ], MODEL_ARCH.MPT: [ MODEL_TENSOR.TOKEN_EMBD, MODEL_TENSOR.OUTPUT_NORM, diff --git a/gguf-py/gguf/tensor_mapping.py b/gguf-py/gguf/tensor_mapping.py index c7ba1420e..861003776 100644 --- a/gguf-py/gguf/tensor_mapping.py +++ b/gguf-py/gguf/tensor_mapping.py @@ -15,7 +15,7 @@ class TensorNameMap: "word_embeddings", # bloom "model.embed_tokens", # llama-hf "tok_embeddings", # llama-pth - "embeddings.word_embeddings", # bert + "embeddings.word_embeddings", # bert nomic-bert "language_model.embedding.word_embeddings", # persimmon "wte", # gpt2 "transformer.embd.wte", # phi2 @@ -24,13 +24,14 @@ class TensorNameMap: # Token type embeddings MODEL_TENSOR.TOKEN_TYPES: ( - "embeddings.token_type_embeddings", # bert + "embeddings.token_type_embeddings", # bert nomic-bert ), # Normalization of token embeddings MODEL_TENSOR.TOKEN_EMBD_NORM: ( "word_embeddings_layernorm", # bloom "embeddings.LayerNorm", # bert + "emb_ln", # nomic-bert ), # Position embeddings @@ -103,6 +104,7 @@ class TensorNameMap: "model.layers.{bid}.self_attn.query_key_value", # persimmon "h.{bid}.attn.c_attn", # gpt2 "transformer.h.{bid}.mixer.Wqkv", # phi2 + "encoder.layers.{bid}.attn.Wqkv", # nomic-bert ), # Attention query @@ -152,11 +154,13 @@ class TensorNameMap: "transformer.h.{bid}.mixer.out_proj", # phi2 "model.layers.layers.{bid}.self_attn.o_proj", # plamo "model.layers.{bid}.attention.wo", # internlm2 + "encoder.layers.{bid}.attn.out_proj", # nomic-bert ), # Attention output norm MODEL_TENSOR.ATTN_OUT_NORM: ( "encoder.layer.{bid}.attention.output.LayerNorm", # bert + "encoder.layers.{bid}.norm1", # nomic-bert ), # Rotary embeddings @@ -205,6 +209,7 @@ class TensorNameMap: "model.layers.{bid}.mlp.fc1", # phi2 "model.layers.layers.{bid}.mlp.up_proj", # plamo "model.layers.{bid}.feed_forward.w3", # internlm2 + "encoder.layers.{bid}.mlp.fc11", # nomic-bert ), MODEL_TENSOR.FFN_UP_EXP: ( @@ -224,6 +229,7 @@ class TensorNameMap: "transformer.h.{bid}.mlp.w2", # qwen "model.layers.layers.{bid}.mlp.gate_proj", # plamo "model.layers.{bid}.feed_forward.w1", # internlm2 + "encoder.layers.{bid}.mlp.fc12", # nomic-bert ), MODEL_TENSOR.FFN_GATE_EXP: ( @@ -249,6 +255,7 @@ class TensorNameMap: "model.layers.{bid}.mlp.fc2", # phi2 "model.layers.layers.{bid}.mlp.down_proj", # plamo "model.layers.{bid}.feed_forward.w2", # internlm2 + "encoder.layers.{bid}.mlp.fc2", # nomic-bert ), MODEL_TENSOR.FFN_DOWN_EXP: ( @@ -272,6 +279,7 @@ class TensorNameMap: MODEL_TENSOR.LAYER_OUT_NORM: ( "encoder.layer.{bid}.output.LayerNorm", # bert + "encoder.layers.{bid}.norm2", # nomic-bert ) } diff --git a/llama.cpp b/llama.cpp index 8ebbf7628..14e8821cd 100644 --- a/llama.cpp +++ b/llama.cpp @@ -197,6 +197,7 @@ enum llm_arch { LLM_ARCH_PERSIMMON, LLM_ARCH_REFACT, LLM_ARCH_BERT, + LLM_ARCH_NOMIC_BERT, LLM_ARCH_BLOOM, LLM_ARCH_STABLELM, LLM_ARCH_QWEN, @@ -211,27 +212,28 @@ enum llm_arch { }; static std::map LLM_ARCH_NAMES = { - { LLM_ARCH_LLAMA, "llama" }, - { LLM_ARCH_FALCON, "falcon" }, - { LLM_ARCH_GPT2, "gpt2" }, - { LLM_ARCH_GPTJ, "gptj" }, - { LLM_ARCH_GPTNEOX, "gptneox" }, - { LLM_ARCH_MPT, "mpt" }, - { LLM_ARCH_BAICHUAN, "baichuan" }, - { LLM_ARCH_STARCODER, "starcoder" }, - { LLM_ARCH_PERSIMMON, "persimmon" }, - { LLM_ARCH_REFACT, "refact" }, - { LLM_ARCH_BERT, "bert" }, - { LLM_ARCH_BLOOM, "bloom" }, - { LLM_ARCH_STABLELM, "stablelm" }, - { LLM_ARCH_QWEN, "qwen" }, - { LLM_ARCH_QWEN2, "qwen2" }, - { LLM_ARCH_PHI2, "phi2" }, - { LLM_ARCH_PLAMO, "plamo" }, - { LLM_ARCH_CODESHELL, "codeshell" }, - { LLM_ARCH_ORION, "orion" }, - { LLM_ARCH_INTERNLM2, "internlm2" }, - { LLM_ARCH_MINICPM, "minicpm" }, + { LLM_ARCH_LLAMA, "llama" }, + { LLM_ARCH_FALCON, "falcon" }, + { LLM_ARCH_GPT2, "gpt2" }, + { LLM_ARCH_GPTJ, "gptj" }, + { LLM_ARCH_GPTNEOX, "gptneox" }, + { LLM_ARCH_MPT, "mpt" }, + { LLM_ARCH_BAICHUAN, "baichuan" }, + { LLM_ARCH_STARCODER, "starcoder" }, + { LLM_ARCH_PERSIMMON, "persimmon" }, + { LLM_ARCH_REFACT, "refact" }, + { LLM_ARCH_BERT, "bert" }, + { LLM_ARCH_NOMIC_BERT, "nomic-bert" }, + { LLM_ARCH_BLOOM, "bloom" }, + { LLM_ARCH_STABLELM, "stablelm" }, + { LLM_ARCH_QWEN, "qwen" }, + { LLM_ARCH_QWEN2, "qwen2" }, + { LLM_ARCH_PHI2, "phi2" }, + { LLM_ARCH_PLAMO, "plamo" }, + { LLM_ARCH_CODESHELL, "codeshell" }, + { LLM_ARCH_ORION, "orion" }, + { LLM_ARCH_INTERNLM2, "internlm2" }, + { LLM_ARCH_MINICPM, "minicpm" }, }; enum llm_kv { @@ -375,6 +377,7 @@ enum llm_tensor { LLM_TENSOR_ATTN_OUT, LLM_TENSOR_ATTN_NORM, LLM_TENSOR_ATTN_NORM_2, + LLM_TENSOR_ATTN_OUT_NORM, LLM_TENSOR_ATTN_ROT_EMBD, LLM_TENSOR_FFN_GATE_INP, LLM_TENSOR_FFN_NORM, @@ -387,6 +390,7 @@ enum llm_tensor { LLM_TENSOR_FFN_UP_EXP, LLM_TENSOR_ATTN_Q_NORM, LLM_TENSOR_ATTN_K_NORM, + LLM_TENSOR_LAYER_OUT_NORM, }; static std::map> LLM_TENSOR_NAMES = { @@ -552,12 +556,27 @@ static std::map> LLM_TENSOR_NAMES = { LLM_TENSOR_TOKEN_EMBD_NORM, "token_embd_norm" }, { LLM_TENSOR_TOKEN_TYPES, "token_types" }, { LLM_TENSOR_POS_EMBD, "position_embd" }, - { LLM_TENSOR_ATTN_NORM, "blk.%d.attn_output_norm" }, + { LLM_TENSOR_ATTN_OUT_NORM, "blk.%d.attn_output_norm" }, { LLM_TENSOR_ATTN_Q, "blk.%d.attn_q" }, { LLM_TENSOR_ATTN_K, "blk.%d.attn_k" }, { LLM_TENSOR_ATTN_V, "blk.%d.attn_v" }, { LLM_TENSOR_ATTN_OUT, "blk.%d.attn_output" }, - { LLM_TENSOR_FFN_NORM, "blk.%d.layer_output_norm" }, + { LLM_TENSOR_LAYER_OUT_NORM, "blk.%d.layer_output_norm" }, + { LLM_TENSOR_FFN_DOWN, "blk.%d.ffn_down" }, + { LLM_TENSOR_FFN_UP, "blk.%d.ffn_up" }, + }, + }, + { + LLM_ARCH_NOMIC_BERT, + { + { LLM_TENSOR_TOKEN_EMBD, "token_embd" }, + { LLM_TENSOR_TOKEN_EMBD_NORM, "token_embd_norm" }, + { LLM_TENSOR_TOKEN_TYPES, "token_types" }, + { LLM_TENSOR_ATTN_OUT_NORM, "blk.%d.attn_output_norm" }, + { LLM_TENSOR_ATTN_QKV, "blk.%d.attn_qkv" }, + { LLM_TENSOR_ATTN_OUT, "blk.%d.attn_output" }, + { LLM_TENSOR_LAYER_OUT_NORM, "blk.%d.layer_output_norm" }, + { LLM_TENSOR_FFN_GATE, "blk.%d.ffn_gate" }, { LLM_TENSOR_FFN_DOWN, "blk.%d.ffn_down" }, { LLM_TENSOR_FFN_UP, "blk.%d.ffn_up" }, }, @@ -1485,6 +1504,7 @@ enum e_model { MODEL_22M, MODEL_33M, MODEL_109M, + MODEL_137M, MODEL_335M, MODEL_0_5B, MODEL_1B, @@ -1620,6 +1640,8 @@ struct llama_layer { struct ggml_tensor * attn_q_norm_b; struct ggml_tensor * attn_k_norm; struct ggml_tensor * attn_k_norm_b; + struct ggml_tensor * attn_out_norm; + struct ggml_tensor * attn_out_norm_b; // attention struct ggml_tensor * wq; @@ -1638,6 +1660,8 @@ struct llama_layer { // normalization struct ggml_tensor * ffn_norm; struct ggml_tensor * ffn_norm_b; + struct ggml_tensor * layer_out_norm; + struct ggml_tensor * layer_out_norm_b; // ff struct ggml_tensor * ffn_gate; // w1 @@ -2855,6 +2879,11 @@ static std::string llama_model_ftype_name(llama_ftype ftype) { static const char * llama_model_type_name(e_model type) { switch (type) { + case MODEL_22M: return "22M"; + case MODEL_33M: return "33M"; + case MODEL_109M: return "109M"; + case MODEL_137M: return "137M"; + case MODEL_0_5B: return "0.5B"; case MODEL_1B: return "1B"; case MODEL_2B: return "2B"; case MODEL_3B: return "3B"; @@ -3073,6 +3102,17 @@ static void llm_load_hparams( model.type = e_model::MODEL_335M; break; // bge-large } } break; + case LLM_ARCH_NOMIC_BERT: + { + ml.get_key(LLM_KV_ATTENTION_LAYERNORM_EPS, hparams.f_norm_eps); + ml.get_key(LLM_KV_ATTENTION_CAUSAL, hparams.causal_attn); + ml.get_key(LLM_KV_TOKENIZER_TOKEN_TYPE_COUNT, hparams.n_vocab_type); + ml.get_key(LLM_KV_POOLING_LAYER, hparams.pooling_layer); + + if (hparams.n_layer == 12 && hparams.n_embd == 768) { + model.type = e_model::MODEL_137M; + } + } break; case LLM_ARCH_BLOOM: { ml.get_key(LLM_KV_ATTENTION_LAYERNORM_EPS, hparams.f_norm_eps); @@ -3875,10 +3915,14 @@ static bool llm_load_tensors( } } break; case LLM_ARCH_BERT: + case LLM_ARCH_NOMIC_BERT: { - model.tok_embd = ml.create_tensor(ctx_input, tn(LLM_TENSOR_TOKEN_EMBD, "weight"), {n_embd, n_vocab}); - model.type_embd = ml.create_tensor(ctx_input, tn(LLM_TENSOR_TOKEN_TYPES, "weight"), {n_embd, n_vocab_type}); - model.pos_embd = ml.create_tensor(ctx_input, tn(LLM_TENSOR_POS_EMBD, "weight"), {n_embd, hparams.n_ctx_train}); + model.tok_embd = ml.create_tensor(ctx_input, tn(LLM_TENSOR_TOKEN_EMBD, "weight"), {n_embd, n_vocab}); + model.type_embd = ml.create_tensor(ctx_input, tn(LLM_TENSOR_TOKEN_TYPES, "weight"), {n_embd, n_vocab_type}); + if (model.arch == LLM_ARCH_BERT) { + model.pos_embd = ml.create_tensor(ctx_input, tn(LLM_TENSOR_POS_EMBD, "weight"), {n_embd, hparams.n_ctx_train}); + } + model.tok_norm = ml.create_tensor(ctx_output, tn(LLM_TENSOR_TOKEN_EMBD_NORM, "weight"), {n_embd}); model.tok_norm_b = ml.create_tensor(ctx_output, tn(LLM_TENSOR_TOKEN_EMBD_NORM, "bias"), {n_embd}); @@ -3888,29 +3932,38 @@ static bool llm_load_tensors( auto & layer = model.layers[i]; - layer.attn_norm = ml.create_tensor(ctx_layer, tn(LLM_TENSOR_ATTN_NORM, "weight", i), {n_embd}); - layer.attn_norm_b = ml.create_tensor(ctx_layer, tn(LLM_TENSOR_ATTN_NORM, "bias", i), {n_embd}); + if (model.arch == LLM_ARCH_BERT) { + layer.wq = ml.create_tensor(ctx_split, tn(LLM_TENSOR_ATTN_Q, "weight", i), {n_embd, n_embd}); + layer.bq = ml.create_tensor(ctx_layer, tn(LLM_TENSOR_ATTN_Q, "bias", i), {n_embd}); - layer.ffn_norm = ml.create_tensor(ctx_layer, tn(LLM_TENSOR_FFN_NORM, "weight", i), {n_embd}); - layer.ffn_norm_b = ml.create_tensor(ctx_layer, tn(LLM_TENSOR_FFN_NORM, "bias", i), {n_embd}); + layer.wk = ml.create_tensor(ctx_split, tn(LLM_TENSOR_ATTN_K, "weight", i), {n_embd, n_embd_gqa}); + layer.bk = ml.create_tensor(ctx_layer, tn(LLM_TENSOR_ATTN_K, "bias", i), {n_embd_gqa}); - layer.wq = ml.create_tensor(ctx_split, tn(LLM_TENSOR_ATTN_Q, "weight", i), {n_embd, n_embd}); - layer.bq = ml.create_tensor(ctx_layer, tn(LLM_TENSOR_ATTN_Q, "bias", i), {n_embd}); + layer.wv = ml.create_tensor(ctx_split, tn(LLM_TENSOR_ATTN_V, "weight", i), {n_embd, n_embd_gqa}); + layer.bv = ml.create_tensor(ctx_layer, tn(LLM_TENSOR_ATTN_V, "bias", i), {n_embd_gqa}); + } else { + layer.wqkv = ml.create_tensor(ctx_split, tn(LLM_TENSOR_ATTN_QKV, "weight", i), {n_embd, n_embd + 2*n_embd_gqa}); + } - layer.wk = ml.create_tensor(ctx_split, tn(LLM_TENSOR_ATTN_K, "weight", i), {n_embd, n_embd_gqa}); - layer.bk = ml.create_tensor(ctx_layer, tn(LLM_TENSOR_ATTN_K, "bias", i), {n_embd_gqa}); + layer.wo = ml.create_tensor(ctx_split, tn(LLM_TENSOR_ATTN_OUT, "weight", i), {n_embd, n_embd}); - layer.wv = ml.create_tensor(ctx_split, tn(LLM_TENSOR_ATTN_V, "weight", i), {n_embd, n_embd_gqa}); - layer.bv = ml.create_tensor(ctx_layer, tn(LLM_TENSOR_ATTN_V, "bias", i), {n_embd_gqa}); + layer.attn_out_norm = ml.create_tensor(ctx_layer, tn(LLM_TENSOR_ATTN_OUT_NORM, "weight", i), {n_embd}); + layer.attn_out_norm_b = ml.create_tensor(ctx_layer, tn(LLM_TENSOR_ATTN_OUT_NORM, "bias", i), {n_embd}); - layer.wo = ml.create_tensor(ctx_split, tn(LLM_TENSOR_ATTN_OUT, "weight", i), {n_embd, n_embd}); - layer.bo = ml.create_tensor(ctx_layer, tn(LLM_TENSOR_ATTN_OUT, "bias", i), {n_embd}); + layer.ffn_up = ml.create_tensor(ctx_split, tn(LLM_TENSOR_FFN_UP, "weight", i), {n_embd, n_ff}); + layer.ffn_down = ml.create_tensor(ctx_split, tn(LLM_TENSOR_FFN_DOWN, "weight", i), {n_ff, n_embd}); - layer.ffn_up = ml.create_tensor(ctx_split, tn(LLM_TENSOR_FFN_UP, "weight", i), {n_embd, n_ff}); - layer.ffn_up_b = ml.create_tensor(ctx_layer, tn(LLM_TENSOR_FFN_UP, "bias", i), {n_ff}); + if (model.arch == LLM_ARCH_BERT) { + layer.bo = ml.create_tensor(ctx_layer, tn(LLM_TENSOR_ATTN_OUT, "bias", i), {n_embd}); + layer.ffn_up_b = ml.create_tensor(ctx_layer, tn(LLM_TENSOR_FFN_UP, "bias", i), {n_ff}); - layer.ffn_down = ml.create_tensor(ctx_split, tn(LLM_TENSOR_FFN_DOWN, "weight", i), {n_ff, n_embd}); - layer.ffn_down_b = ml.create_tensor(ctx_layer, tn(LLM_TENSOR_FFN_DOWN, "bias", i), {n_embd}); + layer.ffn_down_b = ml.create_tensor(ctx_layer, tn(LLM_TENSOR_FFN_DOWN, "bias", i), {n_embd}); + } else { + layer.ffn_gate = ml.create_tensor(ctx_split, tn(LLM_TENSOR_FFN_GATE, "weight", i), {n_embd, n_ff}); + } + + layer.layer_out_norm = ml.create_tensor(ctx_layer, tn(LLM_TENSOR_LAYER_OUT_NORM, "weight", i), {n_embd}); + layer.layer_out_norm_b = ml.create_tensor(ctx_layer, tn(LLM_TENSOR_LAYER_OUT_NORM, "bias", i), {n_embd}); } } break; case LLM_ARCH_BLOOM: @@ -5773,6 +5826,7 @@ struct llm_build_context { struct ggml_cgraph * gf = ggml_new_graph_custom(ctx0, LLAMA_MAX_NODES, false); const int64_t n_embd_head = hparams.n_embd_head_v; + const int64_t n_embd_gqa = hparams.n_embd_v_gqa(); GGML_ASSERT(n_embd_head == hparams.n_embd_head_k); struct ggml_tensor * cur; @@ -5789,7 +5843,9 @@ struct llm_build_context { // token types are hardcoded to zero ("Sentence A") struct ggml_tensor * type_row0 = ggml_view_1d(ctx0, model.type_embd, n_embd, 0); inpL = ggml_add(ctx0, inpL, type_row0); - inpL = ggml_add(ctx0, ggml_get_rows(ctx0, model.pos_embd, inp_pos), inpL); + if (model.arch == LLM_ARCH_BERT) { + inpL = ggml_add(ctx0, ggml_get_rows(ctx0, model.pos_embd, inp_pos), inpL); + } cb(inpL, "inp_embd", -1); // embed layer norm @@ -5805,7 +5861,7 @@ struct llm_build_context { struct ggml_tensor * cur = inpL; // self-attention - { + if (model.arch == LLM_ARCH_BERT) { struct ggml_tensor * Qcur = ggml_add(ctx0, ggml_mul_mat(ctx0, model.layers[il].wq, cur), model.layers[il].bq); cb(Qcur, "Qcur", il); @@ -5818,6 +5874,37 @@ struct llm_build_context { // seems like we just need to do this for Q? Qcur = ggml_reshape_3d(ctx0, Qcur, n_embd_head, n_head, n_tokens); + cur = llm_build_kv(ctx0, model, hparams, kv_self, gf, + model.layers[il].wo, model.layers[il].bo, + Kcur, Vcur, Qcur, KQ_mask, n_ctx, n_tokens, kv_head, n_kv, -1.0f, 1.0f/sqrtf(float(n_embd_head)), cb, il); + cb(cur, "kqv_out", il); + } else { + // compute Q and K and RoPE them + cur = ggml_mul_mat(ctx0, model.layers[il].wqkv, cur); + cb(cur, "wqkv", il); + + struct ggml_tensor * Qcur = ggml_cont(ctx0, ggml_view_2d(ctx0, cur, n_embd, n_tokens, cur->nb[1], 0*sizeof(float)*(n_embd))); + struct ggml_tensor * Kcur = ggml_cont(ctx0, ggml_view_2d(ctx0, cur, n_embd_gqa, n_tokens, cur->nb[1], 1*sizeof(float)*(n_embd))); + struct ggml_tensor * Vcur = ggml_cont(ctx0, ggml_view_2d(ctx0, cur, n_embd_gqa, n_tokens, cur->nb[1], 1*sizeof(float)*(n_embd + n_embd_gqa))); + + cb(Qcur, "Qcur", il); + cb(Kcur, "Kcur", il); + cb(Vcur, "Vcur", il); + + Qcur = ggml_rope_custom( + ctx0, ggml_reshape_3d(ctx0, Qcur, n_embd_head, n_head, n_tokens), inp_pos, + hparams.n_rot, 2, 0, n_orig_ctx, freq_base, freq_scale, + ext_factor, attn_factor, beta_fast, beta_slow + ); + cb(Qcur, "Qcur", il); + + Kcur = ggml_rope_custom( + ctx0, ggml_reshape_3d(ctx0, Kcur, n_embd_head, n_head_kv, n_tokens), inp_pos, + hparams.n_rot, 2, 0, n_orig_ctx, freq_base, freq_scale, + ext_factor, attn_factor, beta_fast, beta_slow + ); + cb(Kcur, "Kcur", il); + cur = llm_build_kv(ctx0, model, hparams, kv_self, gf, model.layers[il].wo, model.layers[il].bo, Kcur, Vcur, Qcur, KQ_mask, n_ctx, n_tokens, kv_head, n_kv, -1.0f, 1.0f/sqrtf(float(n_embd_head)), cb, il); @@ -5828,25 +5915,34 @@ struct llm_build_context { cur = ggml_add(ctx0, cur, inpL); // attention layer norm - cur = llm_build_norm(ctx0, cur, hparams, model.layers[il].attn_norm, model.layers[il].attn_norm_b, LLM_NORM, cb, il); + cur = llm_build_norm(ctx0, cur, hparams, model.layers[il].attn_out_norm, model.layers[il].attn_out_norm_b, LLM_NORM, cb, il); struct ggml_tensor * ffn_inp = cur; cb(ffn_inp, "ffn_inp", il); // feed-forward network - cur = llm_build_ffn(ctx0, cur, - model.layers[il].ffn_up, model.layers[il].ffn_up_b, - NULL, NULL, - model.layers[il].ffn_down, model.layers[il].ffn_down_b, - NULL, - LLM_FFN_GELU, LLM_FFN_SEQ, cb, il); + if (model.arch == LLM_ARCH_BERT) { + cur = llm_build_ffn(ctx0, cur, + model.layers[il].ffn_up, model.layers[il].ffn_up_b, + NULL, NULL, + model.layers[il].ffn_down, model.layers[il].ffn_down_b, + NULL, + LLM_FFN_GELU, LLM_FFN_SEQ, cb, il); + } else { + cur = llm_build_ffn(ctx0, cur, + model.layers[il].ffn_up, NULL, + model.layers[il].ffn_gate, NULL, + model.layers[il].ffn_down, NULL, + NULL, + LLM_FFN_SILU, LLM_FFN_PAR, cb, il); + } cb(cur, "ffn_out", il); // attentions bypass the intermediate layer cur = ggml_add(ctx0, cur, ffn_inp); // output layer norm - cur = llm_build_norm(ctx0, cur, hparams, model.layers[il].ffn_norm, model.layers[il].ffn_norm_b, LLM_NORM, cb, il); + cur = llm_build_norm(ctx0, cur, hparams, model.layers[il].layer_out_norm, model.layers[il].layer_out_norm_b, LLM_NORM, cb, il); // input for next layer inpL = cur; @@ -7289,6 +7385,7 @@ static struct ggml_cgraph * llama_build_graph( result = llm.build_refact(); } break; case LLM_ARCH_BERT: + case LLM_ARCH_NOMIC_BERT: { result = llm.build_bert(); } break; From 6c00a066928b0475b865a2e3e709e2166e02d548 Mon Sep 17 00:00:00 2001 From: John <78893154+cmp-nct@users.noreply.github.com> Date: Tue, 13 Feb 2024 18:56:38 +0100 Subject: [PATCH 590/811] gguf : add python reader example (#5216) * Update CMakeLists.txt * Create reader.py * Update reader.py * Update reader.py another whitespace :| * Update reader.py * lintlintlint --- examples/CMakeLists.txt | 1 + gguf-py/examples/reader.py | 45 ++++++++++++++++++++++++++++++++++++++ 2 files changed, 46 insertions(+) create mode 100644 gguf-py/examples/reader.py diff --git a/examples/CMakeLists.txt b/examples/CMakeLists.txt index 68ad89964..653abc73a 100644 --- a/examples/CMakeLists.txt +++ b/examples/CMakeLists.txt @@ -38,6 +38,7 @@ else() add_subdirectory(speculative) add_subdirectory(lookahead) add_subdirectory(lookup) + add_subdirectory(gguf) add_subdirectory(train-text-from-scratch) add_subdirectory(imatrix) if (LLAMA_BUILD_SERVER) diff --git a/gguf-py/examples/reader.py b/gguf-py/examples/reader.py new file mode 100644 index 000000000..62e0769da --- /dev/null +++ b/gguf-py/examples/reader.py @@ -0,0 +1,45 @@ +#!/usr/bin/env python3 +import sys +from pathlib import Path +from gguf.gguf_reader import GGUFReader + + +sys.path.insert(0, str(Path(__file__).parent.parent)) + + +def read_gguf_file(gguf_file_path): + """ + Reads and prints key-value pairs and tensor information from a GGUF file in an improved format. + + Parameters: + - gguf_file_path: Path to the GGUF file. + """ + + reader = GGUFReader(gguf_file_path) + + # List all key-value pairs in a columnized format + print("Key-Value Pairs:") + max_key_length = max(len(key) for key in reader.fields.keys()) + for key, field in reader.fields.items(): + value = field.parts[field.data[0]] + print(f"{key:{max_key_length}} : {value}") + print("----") + + # List all tensors + print("Tensors:") + tensor_info_format = "{:<30} | Shape: {:<15} | Size: {:<12} | Quantization: {}" + print(tensor_info_format.format("Tensor Name", "Shape", "Size", "Quantization")) + print("-" * 80) + for tensor in reader.tensors: + shape_str = "x".join(map(str, tensor.shape)) + size_str = str(tensor.n_elements) + quantization_str = tensor.tensor_type.name + print(tensor_info_format.format(tensor.name, shape_str, size_str, quantization_str)) + + +if __name__ == '__main__': + if len(sys.argv) < 2: + print("Usage: reader.py ") + sys.exit(1) + gguf_file_path = sys.argv[1] + read_gguf_file(gguf_file_path) From f5ca054855dea83f424003162f26de376e5643f6 Mon Sep 17 00:00:00 2001 From: AT Date: Tue, 13 Feb 2024 15:44:25 -0600 Subject: [PATCH 591/811] Early return for zero size calls to get_tensor. (#5482) * Early return for zero size calls to get_tensor. Signed-off-by: Adam Treat * Update ggml-kompute.cpp Co-authored-by: Georgi Gerganov * Update ggml-kompute.cpp Co-authored-by: Georgi Gerganov * Add an early return to the get/set tensor when the size is null. Signed-off-by: Adam Treat * Early return after the assertions. Signed-off-by: Adam Treat * Since we do the early return in the generic backend now no reason to do so here as well. Signed-off-by: Adam Treat --------- Signed-off-by: Adam Treat Co-authored-by: Georgi Gerganov --- ggml-backend.c | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/ggml-backend.c b/ggml-backend.c index 9ee81b766..87eea8440 100644 --- a/ggml-backend.c +++ b/ggml-backend.c @@ -219,6 +219,10 @@ GGML_CALL void ggml_backend_tensor_set(struct ggml_tensor * tensor, const void * GGML_ASSERT(buf != NULL && "tensor buffer not set"); GGML_ASSERT(offset + size <= ggml_nbytes(tensor) && "tensor write out of bounds"); + if (!size) { + return; + } + tensor->buffer->iface.set_tensor(buf, tensor, data, offset, size); } @@ -229,6 +233,10 @@ GGML_CALL void ggml_backend_tensor_get(const struct ggml_tensor * tensor, void * GGML_ASSERT(tensor->buffer != NULL && "tensor buffer not set"); GGML_ASSERT(offset + size <= ggml_nbytes(tensor) && "tensor read out of bounds"); + if (!size) { + return; + } + tensor->buffer->iface.get_tensor(buf, tensor, data, offset, size); } From aa2341298924ac89778252015efcb792f2df1e20 Mon Sep 17 00:00:00 2001 From: John <78893154+cmp-nct@users.noreply.github.com> Date: Wed, 14 Feb 2024 08:38:35 +0100 Subject: [PATCH 592/811] llava : support v1.6 (#5267) * Create llava-survery-v2.py * Update convert-image-encoder-to-gguf.py * Update convert-image-encoder-to-gguf.py * Rename llava-survery-v2.py to llava-surgery-v2.py * Update convert-image-encoder-to-gguf.py will now search for projector * Update convert-image-encoder-to-gguf.py whoops * Update llava-surgery-v2.py * Clip: Bugfix for normalization (it did not loat the 3 std and mean values) Clip: bicubic resize function Clip: added save-to-bmp/pil for debugging and conversion from/to 32/8 images Clip: added normalization with FP16 precision simulation (image tensors match HF implementation, can be switched off, only used for llava-1.6) Clip: added newline tensor, mergetype kv, image-grid kv, new resize-pad function with resolution from gridpoints Clip: clip_image_preprocess now returns a float * vector instead of float, this way llava 1.5 and 1.6 is supported llava: added ggml cpu graph for embedding patching, added spatial_unpad preliminary support, added a lot of comments that need to be cleaned when all is final convert-image-encoder: fixed image-grid flattening * whitespace corrections * ws * Tensors are now properly permuted. Before the embeddings were inserted 1:1, now they are split into the 24x24 patches as in reference. * ws * added verbose_prompt support into cli added stopwords for llava-1.6 into cli * moved llava functions to llava.cpp, made clip.h C compatible API, replaced vector style functions with pointers, added a debug define to remove functions from compilation while not needed * ws * convert : skip unknown tensors (need for LLaVA) * llava : update readme * llava : fix compile warnings * llava : style * convert : add --skip-unknown CLI arg * server : remove clip structs * bugfix for non llava-1.6 It should now work with llava-1.5 as well * clip : minor code rearrange * llava : update readme a bit --------- Co-authored-by: John Co-authored-by: Georgi Gerganov --- convert.py | 37 +- examples/llava/README.md | 12 +- examples/llava/clip.cpp | 766 +++++++++++++++--- examples/llava/clip.h | 47 +- .../llava/convert-image-encoder-to-gguf.py | 66 +- examples/llava/llava-cli.cpp | 26 +- examples/llava/llava-surgery-v2.py | 167 ++++ examples/llava/llava.cpp | 296 ++++++- examples/llava/llava.h | 2 - examples/server/server.cpp | 15 +- 10 files changed, 1229 insertions(+), 205 deletions(-) create mode 100644 examples/llava/llava-surgery-v2.py diff --git a/convert.py b/convert.py index 323e8058d..63a0a5d78 100755 --- a/convert.py +++ b/convert.py @@ -1173,7 +1173,7 @@ def convert_to_output_type(model: LazyModel, output_type: GGMLFileType) -> LazyM for (name, tensor) in model.items()} -def convert_model_names(model: LazyModel, params: Params) -> LazyModel: +def convert_model_names(model: LazyModel, params: Params, skip_unknown: bool) -> LazyModel: tmap = gguf.TensorNameMap(ARCH, params.n_layer) should_skip: set[gguf.MODEL_TENSOR] = set(gguf.MODEL_TENSOR_SKIP.get(ARCH, [])) @@ -1199,7 +1199,11 @@ def convert_model_names(model: LazyModel, params: Params) -> LazyModel: for name, lazy_tensor in model.items(): tensor_type, name_new = tmap.get_type_and_name(name, try_suffixes = (".weight", ".bias")) or (None, None) if name_new is None: - raise Exception(f"Unexpected tensor name: {name}") + if skip_unknown: + print(f"Unexpected tensor name: {name} - skipping") + continue + else: + raise Exception(f"Unexpected tensor name: {name}. Use --skip-unknown to ignore it (e.g. LLaVA)") if tensor_type in should_skip: print(f"skipping tensor {name_new}") @@ -1377,19 +1381,20 @@ def main(args_in: list[str] | None = None) -> None: output_choices.append("q8_0") vocab_types = ["spm", "bpe", "hfft"] parser = argparse.ArgumentParser(description="Convert a LLaMa model to a GGML compatible file") - parser.add_argument("--awq-path", type=Path, help="Path to scale awq cache file", default=None) - parser.add_argument("--dump", action="store_true", help="don't convert, just show what's in the model") - parser.add_argument("--dump-single", action="store_true", help="don't convert, just show what's in a single model file") - parser.add_argument("--vocab-only", action="store_true", help="extract only the vocab") - parser.add_argument("--outtype", choices=output_choices, help="output format - note: q8_0 may be very slow (default: f16 or f32 based on input)") - parser.add_argument("--vocab-dir", type=Path, help="directory containing tokenizer.model, if separate from model file") - parser.add_argument("--vocab-type", choices=vocab_types, help="The vocabulary format used to define the tokenizer model (default: spm)", default="spm") - parser.add_argument("--outfile", type=Path, help="path to write to; default: based on input") - parser.add_argument("model", type=Path, help="directory containing model file, or model file itself (*.pth, *.pt, *.bin)") - parser.add_argument("--ctx", type=int, help="model training context (default: based on input)") - parser.add_argument("--concurrency", type=int, help=f"concurrency used for conversion (default: {DEFAULT_CONCURRENCY})", default=DEFAULT_CONCURRENCY) - parser.add_argument("--big-endian", action="store_true", help="model is executed on big endian machine") - parser.add_argument("--pad-vocab", action="store_true", help="add pad tokens when model vocab expects more than tokenizer metadata provides") + parser.add_argument("--awq-path", type=Path, help="Path to scale awq cache file", default=None) + parser.add_argument("--dump", action="store_true", help="don't convert, just show what's in the model") + parser.add_argument("--dump-single", action="store_true", help="don't convert, just show what's in a single model file") + parser.add_argument("--vocab-only", action="store_true", help="extract only the vocab") + parser.add_argument("--outtype", choices=output_choices, help="output format - note: q8_0 may be very slow (default: f16 or f32 based on input)") + parser.add_argument("--vocab-dir", type=Path, help="directory containing tokenizer.model, if separate from model file") + parser.add_argument("--vocab-type", choices=vocab_types, help="The vocabulary format used to define the tokenizer model (default: spm)", default="spm") + parser.add_argument("--outfile", type=Path, help="path to write to; default: based on input") + parser.add_argument("model", type=Path, help="directory containing model file, or model file itself (*.pth, *.pt, *.bin)") + parser.add_argument("--ctx", type=int, help="model training context (default: based on input)") + parser.add_argument("--concurrency", type=int, help=f"concurrency used for conversion (default: {DEFAULT_CONCURRENCY})", default=DEFAULT_CONCURRENCY) + parser.add_argument("--big-endian", action="store_true", help="model is executed on big endian machine") + parser.add_argument("--pad-vocab", action="store_true", help="add pad tokens when model vocab expects more than tokenizer metadata provides") + parser.add_argument("--skip-unknown", action="store_true", help="skip unknown tensor names instead of failing") args = parser.parse_args(args_in) if args.awq_path: @@ -1461,7 +1466,7 @@ def main(args_in: list[str] | None = None) -> None: print(f"Special vocab info: {special_vocab}") model = model_plus.model - model = convert_model_names(model, params) + model = convert_model_names(model, params, args.skip_unknown) ftype = pick_output_type(model, args.outtype) model = convert_to_output_type(model, ftype) outfile = args.outfile or default_outfile(model_plus.paths, ftype) diff --git a/examples/llava/README.md b/examples/llava/README.md index 19f1a50a2..e2ef0eff1 100644 --- a/examples/llava/README.md +++ b/examples/llava/README.md @@ -19,9 +19,9 @@ After building, run: `./llava-cli` to see the usage. For example: **note**: A lower temperature like 0.1 is recommended for better quality. add `--temp 0.1` to the command to do so. -## Model conversion +## LLaVA 1.5 -- Clone `llava-v15-7b` and `clip-vit-large-patch14-336` locally: +- Clone a LLaVA and a CLIP model ([available options](https://github.com/haotian-liu/LLaVA/blob/main/docs/MODEL_ZOO.md)). For example: ```sh git clone https://huggingface.co/liuhaotian/llava-v1.5-7b @@ -55,8 +55,14 @@ python ./convert.py ../llava-v1.5-7b Now both the LLaMA part and the image encoder is in the `llava-v1.5-7b` directory. +## LLaVA 1.6 + +- Use `llava-surgery-v2.py` + +- TODO: add detailed instructions + ## TODO -- [ ] Support non-CPU backend for the image encoding part. +- [x] Support non-CPU backend for the image encoding part. - [ ] Support different sampling methods. - [ ] Support more model variants. diff --git a/examples/llava/clip.cpp b/examples/llava/clip.cpp index ccd0d85ad..9c5091e61 100644 --- a/examples/llava/clip.cpp +++ b/examples/llava/clip.cpp @@ -1,7 +1,7 @@ // NOTE: This is modified from clip.cpp only for LLaVA, // so there might be still unnecessary artifacts hanging around // I'll gradually clean and extend it - +// Note: Even when using identical normalized image inputs (see normalize_image_u8_to_f32()) we have a significant difference in resulting embeddings compared to pytorch #include "clip.h" #include "ggml.h" #include "ggml-alloc.h" @@ -30,6 +30,26 @@ #include #include #include +#include + +//#define CLIP_DEBUG_FUNCTIONS + +// RGB uint8 image +struct clip_image_u8 { + int nx; + int ny; + + std::vector buf; +}; + +// RGB float32 image (NHWC) +// Memory layout: RGBRGBRGB... +struct clip_image_f32 { + int nx; + int ny; + + std::vector buf; +}; static std::string format(const char * fmt, ...) { va_list ap; @@ -50,50 +70,56 @@ static std::string format(const char * fmt, ...) { // key constants // -#define KEY_FTYPE "general.file_type" -#define KEY_NAME "general.name" -#define KEY_DESCRIPTION "general.description" -#define KEY_HAS_TEXT_ENC "clip.has_text_encoder" -#define KEY_HAS_VIS_ENC "clip.has_vision_encoder" +#define KEY_FTYPE "general.file_type" +#define KEY_NAME "general.name" +#define KEY_DESCRIPTION "general.description" +#define KEY_HAS_TEXT_ENC "clip.has_text_encoder" +#define KEY_HAS_VIS_ENC "clip.has_vision_encoder" #define KEY_HAS_LLAVA_PROJ "clip.has_llava_projector" -#define KEY_USE_GELU "clip.use_gelu" -#define KEY_N_EMBD "clip.%s.embedding_length" -#define KEY_N_FF "clip.%s.feed_forward_length" -#define KEY_N_BLOCK "clip.%s.block_count" -#define KEY_N_HEAD "clip.%s.attention.head_count" +#define KEY_USE_GELU "clip.use_gelu" +#define KEY_N_EMBD "clip.%s.embedding_length" +#define KEY_N_FF "clip.%s.feed_forward_length" +#define KEY_N_BLOCK "clip.%s.block_count" +#define KEY_N_HEAD "clip.%s.attention.head_count" #define KEY_LAYER_NORM_EPS "clip.%s.attention.layer_norm_epsilon" -#define KEY_PROJ_DIM "clip.%s.projection_dim" -#define KEY_TOKENS "tokenizer.ggml.tokens" -#define KEY_N_POSITIONS "clip.text.context_length" -#define KEY_IMAGE_SIZE "clip.vision.image_size" -#define KEY_PATCH_SIZE "clip.vision.patch_size" -#define KEY_IMAGE_MEAN "clip.vision.image_mean" -#define KEY_IMAGE_STD "clip.vision.image_std" -#define KEY_PROJ_TYPE "clip.projector_type" +#define KEY_PROJ_DIM "clip.%s.projection_dim" +#define KEY_TOKENS "tokenizer.ggml.tokens" +#define KEY_N_POSITIONS "clip.text.context_length" +#define KEY_IMAGE_SIZE "clip.vision.image_size" +#define KEY_PATCH_SIZE "clip.vision.patch_size" +#define KEY_IMAGE_MEAN "clip.vision.image_mean" +#define KEY_IMAGE_STD "clip.vision.image_std" +#define KEY_PROJ_TYPE "clip.projector_type" + +#define KEY_MM_PATCH_MERGE_TYPE "clip.vision.mm_patch_merge_type" +#define KEY_IMAGE_GRID_PINPOINTS "clip.vision.image_grid_pinpoints" +#define KEY_IMAGE_CROP_RESOLUTION "clip.vision.image_crop_resolution" + // // tensor name constants // -#define TN_TOKEN_EMBD "%s.token_embd.weight" -#define TN_POS_EMBD "%s.position_embd.weight" -#define TN_CLASS_EMBD "v.class_embd" -#define TN_PATCH_EMBD "v.patch_embd.weight" -#define TN_ATTN_K "%s.blk.%d.attn_k.%s" -#define TN_ATTN_Q "%s.blk.%d.attn_q.%s" -#define TN_ATTN_V "%s.blk.%d.attn_v.%s" -#define TN_ATTN_OUTPUT "%s.blk.%d.attn_out.%s" -#define TN_FFN_DOWN "%s.blk.%d.ffn_down.%s" -#define TN_FFN_UP "%s.blk.%d.ffn_up.%s" -#define TN_LN_1 "%s.blk.%d.ln1.%s" -#define TN_LN_2 "%s.blk.%d.ln2.%s" -#define TN_LN_PRE "%s.pre_ln.%s" -#define TN_LN_POST "%s.post_ln.%s" -#define TN_TEXT_PROJ "text_projection.weight" -#define TN_VIS_PROJ "visual_projection.weight" -#define TN_LLAVA_PROJ "mm.%d.%s" -#define TN_MVLM_PROJ_MLP "mm.model.mlp.%d.%s" +#define TN_TOKEN_EMBD "%s.token_embd.weight" +#define TN_POS_EMBD "%s.position_embd.weight" +#define TN_CLASS_EMBD "v.class_embd" +#define TN_PATCH_EMBD "v.patch_embd.weight" +#define TN_ATTN_K "%s.blk.%d.attn_k.%s" +#define TN_ATTN_Q "%s.blk.%d.attn_q.%s" +#define TN_ATTN_V "%s.blk.%d.attn_v.%s" +#define TN_ATTN_OUTPUT "%s.blk.%d.attn_out.%s" +#define TN_FFN_DOWN "%s.blk.%d.ffn_down.%s" +#define TN_FFN_UP "%s.blk.%d.ffn_up.%s" +#define TN_LN_1 "%s.blk.%d.ln1.%s" +#define TN_LN_2 "%s.blk.%d.ln2.%s" +#define TN_LN_PRE "%s.pre_ln.%s" +#define TN_LN_POST "%s.post_ln.%s" +#define TN_TEXT_PROJ "text_projection.weight" +#define TN_VIS_PROJ "visual_projection.weight" +#define TN_LLAVA_PROJ "mm.%d.%s" +#define TN_MVLM_PROJ_MLP "mm.model.mlp.%d.%s" #define TN_MVLM_PROJ_BLOCK "mm.model.mb_block.%d.block.%d.%s" +#define TN_IMAGE_NEWLINE "model.image_newline" enum projector_type { @@ -104,8 +130,8 @@ enum projector_type { }; static std::map PROJECTOR_TYPE_NAMES = { - { PROJECTOR_TYPE_MLP, "mlp" }, - { PROJECTOR_TYPE_LDP, "ldp" }, + { PROJECTOR_TYPE_MLP, "mlp" }, + { PROJECTOR_TYPE_LDP, "ldp" }, }; @@ -165,7 +191,6 @@ static std::string gguf_data_to_str(enum gguf_type type, const void * data, int } } - static void replace_all(std::string & s, const std::string & search, const std::string & replace) { std::string result; for (size_t pos = 0; ; pos += search.length()) { @@ -217,7 +242,7 @@ static std::string gguf_kv_to_str(const struct gguf_context * ctx_gguf, int i) { } } -static void print_tensor_info(const ggml_tensor* tensor, const char* prefix = "") { +static void print_tensor_info(const ggml_tensor * tensor, const char * prefix = "") { size_t tensor_size = ggml_nbytes(tensor); printf("%s: n_dims = %d, name = %s, tensor_size=%zu, shape:[%" PRId64 ", %" PRId64 ", %" PRId64 ", %" PRId64 "], type = %s\n", prefix, ggml_n_dims(tensor), tensor->name, tensor_size, @@ -233,31 +258,136 @@ static projector_type clip_projector_type_from_string(const std::string & name) return PROJECTOR_TYPE_UNKNOWN; } -// -// image data -// +#ifdef CLIP_DEBUG_FUNCTIONS +static void clip_image_write_image_to_ppm(const clip_image_u8& img, const std::string& filename) { + std::ofstream file(filename, std::ios::binary); + if (!file.is_open()) { + std::cerr << "Failed to open file for writing: " << filename << std::endl; + return; + } -// RGB uint8 image -struct clip_image_u8 { - int nx; - int ny; + // PPM header: P6 format, width, height, and max color value + file << "P6\n" << img.nx << " " << img.ny << "\n255\n"; - std::vector buf; -}; + // Write pixel data + for (size_t i = 0; i < img.buf.size(); i += 3) { + // PPM expects binary data in RGB format, which matches our image buffer + file.write(reinterpret_cast(&img.buf[i]), 3); + } -// RGB float32 image (NHWC) -// Memory layout: RGBRGBRGB... -struct clip_image_f32 { - int nx; - int ny; + file.close(); +} + +static void clip_image_save_to_bmp(const clip_image_u8& img, const std::string& filename) { + std::ofstream file(filename, std::ios::binary); + if (!file.is_open()) { + std::cerr << "Failed to open file for writing: " << filename << std::endl; + return; + } + + int fileSize = 54 + 3 * img.nx * img.ny; // File header + info header + pixel data + int bytesPerPixel = 3; + int widthInBytes = img.nx * bytesPerPixel; + int paddingAmount = (4 - (widthInBytes % 4)) % 4; + int stride = widthInBytes + paddingAmount; + + // Bitmap file header + unsigned char fileHeader[14] = { + 'B','M', // Signature + 0,0,0,0, // Image file size in bytes + 0,0,0,0, // Reserved + 54,0,0,0 // Start of pixel array + }; + + // Total file size + fileSize = 54 + (stride * img.ny); + fileHeader[2] = (unsigned char)(fileSize); + fileHeader[3] = (unsigned char)(fileSize >> 8); + fileHeader[4] = (unsigned char)(fileSize >> 16); + fileHeader[5] = (unsigned char)(fileSize >> 24); + + // Bitmap information header (BITMAPINFOHEADER) + unsigned char infoHeader[40] = { + 40,0,0,0, // Size of this header (40 bytes) + 0,0,0,0, // Image width + 0,0,0,0, // Image height + 1,0, // Number of color planes + 24,0, // Bits per pixel + 0,0,0,0, // No compression + 0,0,0,0, // Image size (can be 0 for no compression) + 0,0,0,0, // X pixels per meter (not specified) + 0,0,0,0, // Y pixels per meter (not specified) + 0,0,0,0, // Total colors (color table not used) + 0,0,0,0 // Important colors (all are important) + }; + + // Width and height in the information header + infoHeader[4] = (unsigned char)(img.nx); + infoHeader[5] = (unsigned char)(img.nx >> 8); + infoHeader[6] = (unsigned char)(img.nx >> 16); + infoHeader[7] = (unsigned char)(img.nx >> 24); + infoHeader[8] = (unsigned char)(img.ny); + infoHeader[9] = (unsigned char)(img.ny >> 8); + infoHeader[10] = (unsigned char)(img.ny >> 16); + infoHeader[11] = (unsigned char)(img.ny >> 24); + + // Write file headers + file.write(reinterpret_cast(fileHeader), sizeof(fileHeader)); + file.write(reinterpret_cast(infoHeader), sizeof(infoHeader)); + + // Pixel data + std::vector padding(3, 0); // Max padding size to be added to each row + for (int y = img.ny - 1; y >= 0; --y) { // BMP files are stored bottom-to-top + for (int x = 0; x < img.nx; ++x) { + // Each pixel + size_t pixelIndex = (y * img.nx + x) * 3; + unsigned char pixel[3] = { + img.buf[pixelIndex + 2], // BMP stores pixels in BGR format + img.buf[pixelIndex + 1], + img.buf[pixelIndex] + }; + file.write(reinterpret_cast(pixel), 3); + } + // Write padding for the row + file.write(reinterpret_cast(padding.data()), paddingAmount); + } + + file.close(); +} + +// debug function to convert f32 to u8 +static void clip_image_convert_f32_to_u8(const clip_image_f32& src, clip_image_u8& dst) { + dst.nx = src.nx; + dst.ny = src.ny; + dst.buf.resize(3 * src.nx * src.ny); + for (size_t i = 0; i < src.buf.size(); ++i) { + dst.buf[i] = static_cast(std::min(std::max(int(src.buf[i] * 255.0f), 0), 255)); + } +} +#endif - std::vector buf; -}; // // clip layers // +struct clip_hparams { + int32_t image_size; + int32_t patch_size; + int32_t hidden_size; + int32_t n_intermediate; + int32_t projection_dim; + int32_t n_head; + int32_t n_layer; + + float eps; + + char mm_patch_merge_type[32] = "flat"; // spatial_unpad or flat (default) + + int32_t image_grid_pinpoints[32]; + int32_t image_crop_resolution; +}; + struct clip_layer { // attention struct ggml_tensor * k_w; @@ -287,7 +417,7 @@ struct clip_layer { }; struct clip_vision_model { - struct clip_vision_hparams hparams; + struct clip_hparams hparams; // embeddings struct ggml_tensor * class_embedding; @@ -310,6 +440,8 @@ struct clip_vision_model { struct ggml_tensor * mm_2_w = NULL; struct ggml_tensor * mm_2_b = NULL; + struct ggml_tensor * image_newline = NULL; + // Yi type models with mlp+normalization projection struct ggml_tensor * mm_1_w = NULL; // Yi type models have 0, 1, 3, 4 struct ggml_tensor * mm_1_b = NULL; @@ -364,9 +496,10 @@ struct clip_ctx { std::vector buf_compute_meta; // memory buffers to evaluate the model - ggml_backend_buffer_t params_buffer = NULL; + ggml_backend_buffer_t params_buffer = NULL; ggml_backend_buffer_t compute_buffer = NULL; - ggml_backend_t backend = NULL; + + ggml_backend_t backend = NULL; ggml_gallocr_t compute_alloc = NULL; }; @@ -379,18 +512,19 @@ static ggml_cgraph * clip_image_build_graph(clip_ctx * ctx, const clip_image_f32 const auto & model = ctx->vision_model; const auto & hparams = model.hparams; - const int image_size = hparams.image_size; - const int patch_size = hparams.patch_size; - const int num_patches = ((image_size / patch_size) * (image_size / patch_size)); - const int num_positions = num_patches + 1; - const int hidden_size = hparams.hidden_size; - const int n_head = hparams.n_head; - const int d_head = hidden_size / n_head; - const int n_layer = hparams.n_layer; - //const int n_intermediate = hparams.n_intermediate; - //const int projection_dim = hparams.projection_dim; - const float eps = hparams.eps; - int batch_size = imgs->size; + const int image_size = hparams.image_size; + const int patch_size = hparams.patch_size; + const int num_patches = ((image_size / patch_size) * (image_size / patch_size)); + const int num_patches_per_side = image_size / patch_size; GGML_UNUSED(num_patches_per_side); + const int num_positions = num_patches + 1; + const int hidden_size = hparams.hidden_size; + const int n_head = hparams.n_head; + const int d_head = hidden_size / n_head; + const int n_layer = hparams.n_layer; + const float eps = hparams.eps; + + const int batch_size = imgs->size; + if (ctx->has_llava_projector) { GGML_ASSERT(batch_size == 1); } @@ -540,7 +674,6 @@ static ggml_cgraph * clip_image_build_graph(clip_ctx * ctx, const clip_image_f32 embeddings = ggml_add(ctx0, embeddings, model.mm_0_b); embeddings = ggml_gelu(ctx0, embeddings); - embeddings = ggml_mul_mat(ctx0, model.mm_2_w, embeddings); embeddings = ggml_add(ctx0, embeddings, model.mm_2_b); @@ -791,10 +924,10 @@ struct clip_ctx * clip_model_load(const char * fname, const int verbosity = 1) { if (idx != -1) { const std::string proj_type = gguf_get_val_str(ctx, idx); new_clip->proj_type = clip_projector_type_from_string(proj_type); - } - else { + } else { new_clip->proj_type = PROJECTOR_TYPE_MLP; } + if (new_clip->proj_type == PROJECTOR_TYPE_MLP) { if (gguf_find_tensor(ctx, format(TN_LLAVA_PROJ, 3, "weight").c_str()) != -1) { new_clip->proj_type = PROJECTOR_TYPE_MLP_NORM; @@ -920,11 +1053,41 @@ struct clip_ctx * clip_model_load(const char * fname, const int verbosity = 1) { hparams.projection_dim = get_u32(ctx, format(KEY_PROJ_DIM, "vision")); hparams.eps = get_f32(ctx, format(KEY_LAYER_NORM_EPS, "vision")); + try { + int idx = get_key_idx(ctx, KEY_IMAGE_GRID_PINPOINTS); + int n = gguf_get_arr_n(ctx, idx); + const int32_t * pinpoints = (const int32_t *)gguf_get_arr_data(ctx, idx); + for (int i = 0; i < 32 && i < n && pinpoints[i] != 0; ++i) { + hparams.image_grid_pinpoints[i] = pinpoints[i]; + } + if (n < 32) + hparams.image_grid_pinpoints[n] = 0; + } catch (std::runtime_error & e) { + hparams.image_grid_pinpoints[0]=0; + } + + try { + int idx = get_key_idx(ctx, KEY_MM_PATCH_MERGE_TYPE); + strcpy(hparams.mm_patch_merge_type, gguf_get_val_str(ctx, idx)); + } catch (std::runtime_error & e) { + strcpy(hparams.mm_patch_merge_type, "flat"); + } + + try { + hparams.image_crop_resolution = get_u32(ctx, KEY_IMAGE_CROP_RESOLUTION); // llava-1.6 + } catch(const std::exception& e) { + hparams.image_crop_resolution = hparams.image_size; + } + int idx_mean = get_key_idx(ctx, KEY_IMAGE_MEAN); int idx_std = get_key_idx(ctx, KEY_IMAGE_STD); + + const float * mean_data = (const float *)gguf_get_arr_data(ctx, idx_mean); + const float * std_data = (const float *)gguf_get_arr_data(ctx, idx_std); + for (int i = 0; i < 3; ++i) { - new_clip->image_mean[i] = *((const float *)gguf_get_arr_data(ctx, idx_mean)); - new_clip->image_std[i] = *((const float *)gguf_get_arr_data(ctx, idx_std)); + new_clip->image_mean[i] = mean_data[i]; + new_clip->image_std[i] = std_data[i]; } if (verbosity >= 2) { @@ -936,13 +1099,27 @@ struct clip_ctx * clip_model_load(const char * fname, const int verbosity = 1) { printf("v_projection_dim %d\n", hparams.projection_dim); printf("v_n_head %d\n", hparams.n_head); printf("v_n_layer %d\n", hparams.n_layer); + printf("v_eps %f\n", hparams.eps); + printf("v_image_mean %f %f %f\n", new_clip->image_mean[0], new_clip->image_mean[1], new_clip->image_mean[2]); + printf("v_image_std %f %f %f\n", new_clip->image_std[0], new_clip->image_std[1], new_clip->image_std[2]); + printf("v_image_grid_pinpoints: "); + for (int i = 0; i < 32 & hparams.image_grid_pinpoints[i]!=0; ++i) { + printf("%d ", hparams.image_grid_pinpoints[i]); + } + printf("\n"); + printf("v_mm_patch_merge_type: %s\n", hparams.mm_patch_merge_type); + } - vision_model.patch_embeddings = get_tensor(new_clip->ctx_data, TN_PATCH_EMBD); - vision_model.class_embedding = get_tensor(new_clip->ctx_data, TN_CLASS_EMBD); - vision_model.position_embeddings = get_tensor(new_clip->ctx_data, format(TN_POS_EMBD, "v")); - vision_model.pre_ln_w = get_tensor(new_clip->ctx_data, format(TN_LN_PRE, "v", "weight")); - vision_model.pre_ln_b = get_tensor(new_clip->ctx_data, format(TN_LN_PRE, "v", "bias")); + try { + vision_model.patch_embeddings = get_tensor(new_clip->ctx_data, TN_PATCH_EMBD); + vision_model.class_embedding = get_tensor(new_clip->ctx_data, TN_CLASS_EMBD); + vision_model.position_embeddings = get_tensor(new_clip->ctx_data, format(TN_POS_EMBD, "v")); + vision_model.pre_ln_w = get_tensor(new_clip->ctx_data, format(TN_LN_PRE, "v", "weight")); + vision_model.pre_ln_b = get_tensor(new_clip->ctx_data, format(TN_LN_PRE, "v", "bias")); + } catch(const std::exception& e) { + fprintf(stderr, "%s: failed to load vision model tensors\n", __func__); + } // LLaVA projection if (new_clip->proj_type == PROJECTOR_TYPE_MLP || new_clip->proj_type == PROJECTOR_TYPE_MLP_NORM) { @@ -968,40 +1145,43 @@ struct clip_ctx * clip_model_load(const char * fname, const int verbosity = 1) { vision_model.mm_4_w = get_tensor(new_clip->ctx_data, format(TN_LLAVA_PROJ, 4, "weight")); vision_model.mm_4_b = get_tensor(new_clip->ctx_data, format(TN_LLAVA_PROJ, 4, "bias")); } catch (std::runtime_error & e) { } - } - else if (new_clip->proj_type == PROJECTOR_TYPE_LDP) { + try { + vision_model.image_newline = get_tensor(new_clip->ctx_data, TN_IMAGE_NEWLINE); + // fprintf(stderr, "%s: image_newline tensor (llava-1.6) found\n", __func__); + } catch (std::runtime_error & e) { } + } else if (new_clip->proj_type == PROJECTOR_TYPE_LDP) { // MobileVLM projection - vision_model.mm_model_mlp_1_w = get_tensor(new_clip->ctx_data, format(TN_MVLM_PROJ_MLP, 1, "weight")); - vision_model.mm_model_mlp_1_b = get_tensor(new_clip->ctx_data, format(TN_MVLM_PROJ_MLP, 1, "bias")); - vision_model.mm_model_mlp_3_w = get_tensor(new_clip->ctx_data, format(TN_MVLM_PROJ_MLP, 3, "weight")); - vision_model.mm_model_mlp_3_b = get_tensor(new_clip->ctx_data, format(TN_MVLM_PROJ_MLP, 3, "bias")); - vision_model.mm_model_block_1_block_0_0_w = get_tensor(new_clip->ctx_data, format(TN_MVLM_PROJ_BLOCK, 1, 0, "0.weight")); - vision_model.mm_model_block_1_block_0_1_w = get_tensor(new_clip->ctx_data, format(TN_MVLM_PROJ_BLOCK, 1, 0, "1.weight")); - vision_model.mm_model_block_1_block_0_1_b = get_tensor(new_clip->ctx_data, format(TN_MVLM_PROJ_BLOCK, 1, 0, "1.bias")); + vision_model.mm_model_mlp_1_w = get_tensor(new_clip->ctx_data, format(TN_MVLM_PROJ_MLP, 1, "weight")); + vision_model.mm_model_mlp_1_b = get_tensor(new_clip->ctx_data, format(TN_MVLM_PROJ_MLP, 1, "bias")); + vision_model.mm_model_mlp_3_w = get_tensor(new_clip->ctx_data, format(TN_MVLM_PROJ_MLP, 3, "weight")); + vision_model.mm_model_mlp_3_b = get_tensor(new_clip->ctx_data, format(TN_MVLM_PROJ_MLP, 3, "bias")); + vision_model.mm_model_block_1_block_0_0_w = get_tensor(new_clip->ctx_data, format(TN_MVLM_PROJ_BLOCK, 1, 0, "0.weight")); + vision_model.mm_model_block_1_block_0_1_w = get_tensor(new_clip->ctx_data, format(TN_MVLM_PROJ_BLOCK, 1, 0, "1.weight")); + vision_model.mm_model_block_1_block_0_1_b = get_tensor(new_clip->ctx_data, format(TN_MVLM_PROJ_BLOCK, 1, 0, "1.bias")); vision_model.mm_model_block_1_block_1_fc1_w = get_tensor(new_clip->ctx_data, format(TN_MVLM_PROJ_BLOCK, 1, 1, "fc1.weight")); vision_model.mm_model_block_1_block_1_fc1_b = get_tensor(new_clip->ctx_data, format(TN_MVLM_PROJ_BLOCK, 1, 1, "fc1.bias")); vision_model.mm_model_block_1_block_1_fc2_w = get_tensor(new_clip->ctx_data, format(TN_MVLM_PROJ_BLOCK, 1, 1, "fc2.weight")); vision_model.mm_model_block_1_block_1_fc2_b = get_tensor(new_clip->ctx_data, format(TN_MVLM_PROJ_BLOCK, 1, 1, "fc2.bias")); - vision_model.mm_model_block_1_block_2_0_w = get_tensor(new_clip->ctx_data, format(TN_MVLM_PROJ_BLOCK, 1, 2, "0.weight")); - vision_model.mm_model_block_1_block_2_1_w = get_tensor(new_clip->ctx_data, format(TN_MVLM_PROJ_BLOCK, 1, 2, "1.weight")); - vision_model.mm_model_block_1_block_2_1_b = get_tensor(new_clip->ctx_data, format(TN_MVLM_PROJ_BLOCK, 1, 2, "1.bias")); - vision_model.mm_model_block_2_block_0_0_w = get_tensor(new_clip->ctx_data, format(TN_MVLM_PROJ_BLOCK, 2, 0, "0.weight")); - vision_model.mm_model_block_2_block_0_1_w = get_tensor(new_clip->ctx_data, format(TN_MVLM_PROJ_BLOCK, 2, 0, "1.weight")); - vision_model.mm_model_block_2_block_0_1_b = get_tensor(new_clip->ctx_data, format(TN_MVLM_PROJ_BLOCK, 2, 0, "1.bias")); + vision_model.mm_model_block_1_block_2_0_w = get_tensor(new_clip->ctx_data, format(TN_MVLM_PROJ_BLOCK, 1, 2, "0.weight")); + vision_model.mm_model_block_1_block_2_1_w = get_tensor(new_clip->ctx_data, format(TN_MVLM_PROJ_BLOCK, 1, 2, "1.weight")); + vision_model.mm_model_block_1_block_2_1_b = get_tensor(new_clip->ctx_data, format(TN_MVLM_PROJ_BLOCK, 1, 2, "1.bias")); + vision_model.mm_model_block_2_block_0_0_w = get_tensor(new_clip->ctx_data, format(TN_MVLM_PROJ_BLOCK, 2, 0, "0.weight")); + vision_model.mm_model_block_2_block_0_1_w = get_tensor(new_clip->ctx_data, format(TN_MVLM_PROJ_BLOCK, 2, 0, "1.weight")); + vision_model.mm_model_block_2_block_0_1_b = get_tensor(new_clip->ctx_data, format(TN_MVLM_PROJ_BLOCK, 2, 0, "1.bias")); vision_model.mm_model_block_2_block_1_fc1_w = get_tensor(new_clip->ctx_data, format(TN_MVLM_PROJ_BLOCK, 2, 1, "fc1.weight")); vision_model.mm_model_block_2_block_1_fc1_b = get_tensor(new_clip->ctx_data, format(TN_MVLM_PROJ_BLOCK, 2, 1, "fc1.bias")); vision_model.mm_model_block_2_block_1_fc2_w = get_tensor(new_clip->ctx_data, format(TN_MVLM_PROJ_BLOCK, 2, 1, "fc2.weight")); vision_model.mm_model_block_2_block_1_fc2_b = get_tensor(new_clip->ctx_data, format(TN_MVLM_PROJ_BLOCK, 2, 1, "fc2.bias")); - vision_model.mm_model_block_2_block_2_0_w = get_tensor(new_clip->ctx_data, format(TN_MVLM_PROJ_BLOCK, 2, 2, "0.weight")); - vision_model.mm_model_block_2_block_2_1_w = get_tensor(new_clip->ctx_data, format(TN_MVLM_PROJ_BLOCK, 2, 2, "1.weight")); - vision_model.mm_model_block_2_block_2_1_b = get_tensor(new_clip->ctx_data, format(TN_MVLM_PROJ_BLOCK, 2, 2, "1.bias")); - } - else { + vision_model.mm_model_block_2_block_2_0_w = get_tensor(new_clip->ctx_data, format(TN_MVLM_PROJ_BLOCK, 2, 2, "0.weight")); + vision_model.mm_model_block_2_block_2_1_w = get_tensor(new_clip->ctx_data, format(TN_MVLM_PROJ_BLOCK, 2, 2, "1.weight")); + vision_model.mm_model_block_2_block_2_1_b = get_tensor(new_clip->ctx_data, format(TN_MVLM_PROJ_BLOCK, 2, 2, "1.bias")); + } else { std::string proj_type = PROJECTOR_TYPE_NAMES[new_clip->proj_type]; throw std::runtime_error(format("%s: don't support projector with: %s currently\n", __func__, proj_type.c_str())); } vision_model.layers.resize(hparams.n_layer); + for (int il = 0; il < hparams.n_layer; ++il) { auto & layer = vision_model.layers[il]; layer.k_w = get_tensor(new_clip->ctx_data, format(TN_ATTN_K, "v", il, "weight")); @@ -1084,24 +1264,255 @@ bool clip_image_load_from_bytes(const unsigned char * bytes, size_t bytes_length return true; } -// normalize: x = (x - mean) / std -// TODO: implement bicubic interpolation instead of linear. -bool clip_image_preprocess(struct clip_ctx * ctx, const clip_image_u8 * img, clip_image_f32 * res, const bool pad2square) { +// Linear interpolation between two points +inline float lerp(float s, float e, float t) { + return s + (e - s) * t; +} +// Bilinear resize function +static void bilinear_resize(const clip_image_u8& src, clip_image_u8& dst, int target_width, int target_height) { + dst.nx = target_width; + dst.ny = target_height; + dst.buf.resize(3 * target_width * target_height); + + float x_ratio = static_cast(src.nx - 1) / target_width; + float y_ratio = static_cast(src.ny - 1) / target_height; + + for (int y = 0; y < target_height; y++) { + for (int x = 0; x < target_width; x++) { + float px = x_ratio * x; + float py = y_ratio * y; + int x_floor = static_cast(px); + int y_floor = static_cast(py); + float x_lerp = px - x_floor; + float y_lerp = py - y_floor; + + for (int c = 0; c < 3; c++) { + float top = lerp( + static_cast(src.buf[3 * (y_floor * src.nx + x_floor) + c]), + static_cast(src.buf[3 * (y_floor * src.nx + (x_floor + 1)) + c]), + x_lerp + ); + float bottom = lerp( + static_cast(src.buf[3 * ((y_floor + 1) * src.nx + x_floor) + c]), + static_cast(src.buf[3 * ((y_floor + 1) * src.nx + (x_floor + 1)) + c]), + x_lerp + ); + dst.buf[3 * (y * target_width + x) + c] = static_cast(lerp(top, bottom, y_lerp)); + } + } + } +} + +// Normalize image to float32 - careful with pytorch .to(model.device, dtype=torch.float16) - this sometimes reduces precision (32>16>32), sometimes not +static void normalize_image_u8_to_f32(const clip_image_u8* src, clip_image_f32* dst, const float mean[3], const float std[3]) { + dst->nx = src->nx; + dst->ny = src->ny; + dst->buf.resize(src->buf.size()); + + for (size_t i = 0; i < src->buf.size(); ++i) { + int c = i % 3; // rgb + dst->buf[i] = (static_cast(src->buf[i]) / 255.0f - mean[c]) / std[c]; + } +} + +inline float clip(float x, float lower, float upper) { + return std::max(lower, std::min(x, upper)); +} + +static bool bicubic_resize(const clip_image_u8 &img, clip_image_u8 &dst, int target_width, int target_height) { + const int nx = img.nx; + const int ny = img.ny; + + dst.nx = target_width; + dst.ny = target_height; + dst.buf.resize(3 * target_width * target_height); + + float Cc; + float C[5]; + float d0, d2, d3, a0, a1, a2, a3; + int i, j, k, jj; + int x, y; + float dx, dy; + float tx, ty; + + tx = (float)nx / (float)target_width; + ty = (float)ny / (float)target_height; + + // Bicubic interpolation; adapted from ViT.cpp, inspired from : + // -> https://github.com/yglukhov/bicubic-interpolation-image-processing/blob/master/libimage.c#L36 + // -> https://en.wikipedia.org/wiki/Bicubic_interpolation + + for (i = 0; i < target_height; i++) { + for (j = 0; j < target_width; j++) { + x = (int)(tx * j); + y = (int)(ty * i); + + dx = tx * j - x; + dy = ty * i - y; + + for (k = 0; k < 3; k++) { + for (jj = 0; jj <= 3; jj++) { + d0 = img.buf[(clip(y - 1 + jj, 0, ny - 1) * nx + clip(x - 1, 0, nx - 1)) * 3 + k] - img.buf[(clip(y - 1 + jj, 0, ny - 1) * nx + clip(x, 0, nx - 1)) * 3 + k]; + d2 = img.buf[(clip(y - 1 + jj, 0, ny - 1) * nx + clip(x + 1, 0, nx - 1)) * 3 + k] - img.buf[(clip(y - 1 + jj, 0, ny - 1) * nx + clip(x, 0, nx - 1)) * 3 + k]; + d3 = img.buf[(clip(y - 1 + jj, 0, ny - 1) * nx + clip(x + 2, 0, nx - 1)) * 3 + k] - img.buf[(clip(y - 1 + jj, 0, ny - 1) * nx + clip(x, 0, nx - 1)) * 3 + k]; + a0 = img.buf[(clip(y - 1 + jj, 0, ny - 1) * nx + clip(x, 0, nx - 1)) * 3 + k]; + + a1 = -1.0 / 3 * d0 + d2 - 1.0 / 6 * d3; + a2 = 1.0 / 2 * d0 + 1.0 / 2 * d2; + a3 = -1.0 / 6 * d0 - 1.0 / 2 * d2 + 1.0 / 6 * d3; + + C[jj] = a0 + a1 * dx + a2 * dx * dx + a3 * dx * dx * dx; + + d0 = C[0] - C[1]; + d2 = C[2] - C[1]; + d3 = C[3] - C[1]; + a0 = C[1]; + a1 = -1.0 / 3 * d0 + d2 - 1.0 / 6 * d3; + a2 = 1.0 / 2 * d0 + 1.0 / 2 * d2; + a3 = -1.0 / 6 * d0 - 1.0 / 2 * d2 + 1.0 / 6 * d3; + Cc = a0 + a1 * dy + a2 * dy * dy + a3 * dy * dy * dy; + + const uint8_t Cc2 = std::min(std::max(std::round(Cc), 0.0f), 255.0f); + dst.buf[(i * target_width + j) * 3 + k] = float(Cc2); + } + } + } + } + + return true; +} + +// llava-1.6 type of resize_and_pad (black) +static void resize_and_pad_image(const clip_image_u8& image, clip_image_u8 &image_output, const std::pair& target_resolution) { + int target_width = target_resolution.first; + int target_height = target_resolution.second; + + float scale_w = static_cast(target_width) / image.nx; + float scale_h = static_cast(target_height) / image.ny; + + int new_width, new_height; + + if (scale_w < scale_h) { + new_width = target_width; + new_height = std::min(static_cast(std::ceil(image.ny * scale_w)), target_height); + } else { + new_height = target_height; + new_width = std::min(static_cast(std::ceil(image.nx * scale_h)), target_width); + } + + clip_image_u8 resized_image; + // bilinear_resize(image, resized_image, new_width, new_height); + bicubic_resize(image, resized_image, new_width, new_height); + + clip_image_u8 padded_image; + padded_image.nx = target_width; + padded_image.ny = target_height; + padded_image.buf.resize(3 * target_width * target_height, 0); // Initialize with black + + // Calculate padding offsets + int pad_x = (target_width - new_width) / 2; + int pad_y = (target_height - new_height) / 2; + + // Copy the resized image into the center of the padded buffer + for (int y = 0; y < new_height; ++y) { + for (int x = 0; x < new_width; ++x) { + for (int c = 0; c < 3; ++c) { + padded_image.buf[3 * ((y + pad_y) * target_width + (x + pad_x)) + c] = resized_image.buf[3 * (y * new_width + x) + c]; + } + } + } + image_output = std::move(padded_image); +} + +/** + * Selects the best resolution from a list of possible resolutions based on the original size. + * + * @param original_size The original size of the image in the format (width, height). + * @param possible_resolutions A list of possible resolutions in the format [(width1, height1), (width2, height2), ...]. + * @return The best fit resolution in the format (width, height). + */ +static std::pair select_best_resolution(const std::pair & original_size, const std::vector> & possible_resolutions) { + int original_width = original_size.first; + int original_height = original_size.second; + std::pair best_fit; + int max_effective_resolution = 0; + int min_wasted_resolution = std::numeric_limits::max(); + + for (const auto& resolution : possible_resolutions) { + int width = resolution.first; + int height = resolution.second; + float scale = std::min(static_cast(width) / original_width, static_cast(height) / original_height); + int downscaled_width = static_cast(original_width * scale); + int downscaled_height = static_cast(original_height * scale); + int effective_resolution = std::min(downscaled_width * downscaled_height, original_width * original_height); + int wasted_resolution = (width * height) - effective_resolution; + // fprintf(stderr, "resolution: %d %d, scale: %f, downscaled: %d %d, effective: %d, wasted: %d\n", width, height, scale, downscaled_width, downscaled_height, effective_resolution, wasted_resolution); + if (effective_resolution > max_effective_resolution || (effective_resolution == max_effective_resolution && wasted_resolution < min_wasted_resolution)) { + max_effective_resolution = effective_resolution; + min_wasted_resolution = wasted_resolution; + best_fit = resolution; + } + } + + return best_fit; +} + +static std::vector divide_to_patches_u8(const clip_image_u8 & image, int patch_size) { + std::vector patches; + int width = image.nx; + int height = image.ny; + for (int i = 0; i < height; i += patch_size) { + for (int j = 0; j < width; j += patch_size) { + clip_image_u8 *patch = clip_image_u8_init(); + patch->nx = std::min(patch_size, width - j); + patch->ny = std::min(patch_size, height - i); + patch->buf.resize(3 * patch->nx * patch->ny); + for (int y = 0; y < patch->ny; ++y) { + for (int x = 0; x < patch->nx; ++x) { + for (int c = 0; c < 3; ++c) { + patch->buf[3 * (y * patch->nx + x) + c] = image.buf[3 * ((i + y) * width + (j + x)) + c]; + } + } + } + patches.push_back(patch); + } + } + return patches; +} + +// returns the normalized float tensor for llava-1.5, for spatial_unpad with anyres processing for llava-1.6 it returns the normalized image patch tensors as a vector +// res_imgs memory is being allocated here, previous allocations will be freed if found +bool clip_image_preprocess(struct clip_ctx * ctx, const clip_image_u8 * img, clip_image_f32_batch & res_imgs) { + bool pad_to_square = true; if (!ctx->has_vision_encoder) { printf("This gguf file seems to have no vision encoder\n"); return false; } + auto & params = ctx->vision_model.hparams; + // The model config actually contains all we need to decide on how to preprocess, here we automatically switch to the new llava-1.6 preprocessing + if (strcmp(params.mm_patch_merge_type, "spatial_unpad") == 0) { + pad_to_square = false; + } + // free the previous res_imgs if any set + if (res_imgs.size > 0 && res_imgs.size < 100) { + for (size_t i = 0; i < res_imgs.size; i++) { + clip_image_f32_free(&(res_imgs.data[i])); + } + delete[] res_imgs.data; + } + res_imgs.data = nullptr; + res_imgs.size = 0; // the logic below is to pad the shorter side to the longer side with a background color: rgb(122, 116, 104) // see https://github.com/haotian-liu/LLaVA/blob/e854a2bf85118c504f6f16bf5c3c7c92f8fa8c6b/llava/conversation.py#L113-L156 clip_image_u8 * temp = clip_image_u8_init(); // we will keep the input image data here temporarily - if (pad2square && img->nx != img->ny) { + if (pad_to_square && img->nx != img->ny) { int longer_side = std::max(img->nx, img->ny); temp->nx = longer_side; temp->ny = longer_side; temp->buf.resize(3 * longer_side * longer_side); - const uint8_t bc[3] = {122, 116, 104}; // background color in RGB from LLaVA + const uint8_t bc[3] = {122, 116, 104}; // background color in RGB from LLaVA (this is the mean rgb color * 255) // fill with background color for (size_t i = 0; i < temp->buf.size(); i++) { @@ -1119,18 +1530,63 @@ bool clip_image_preprocess(struct clip_ctx * ctx, const clip_image_u8 * img, cli } } } else { - temp->nx = img->nx; - temp->ny = img->ny; - temp->buf.resize(img->buf.size()); - memcpy(temp->buf.data(), img->buf.data(), temp->buf.size()); + if (params.image_grid_pinpoints[0] != 0) { + // "spatial_unpad" with "anyres" processing for llava-1.6 + std::vector> possible_resolutions; + for (int i = 0; i < 32 && params.image_grid_pinpoints[i] != 0; i+=2) { + possible_resolutions.push_back({params.image_grid_pinpoints[i], params.image_grid_pinpoints[i+1]}); + } + std::pair best_resolution = select_best_resolution({img->nx, img->ny}, possible_resolutions); + // clip_image_save_to_bmp(*img, "input.bmp"); + resize_and_pad_image(*img, *temp, best_resolution); // we do not pad with mean-bg color anymore in llava-1.6 + // clip_image_save_to_bmp(*temp, "resized.bmp"); + // visually verify normalized image: + // normalize_image_u8_to_f32(*temp, *res, ctx->image_mean, ctx->image_std); + // { + // clip_image_u8 * temp2 = clip_image_u8_init(); + // clip_image_convert_f32_to_u8(*res, *temp2); + // clip_image_save_to_bmp(*temp2, "resized_normalized_f32.bmp"); + // clip_image_u8_free(temp2); + // } + + std::vector patches = divide_to_patches_u8(*temp, params.image_size); // prepare spatial sorted main patches of image_size each (336 in llava-1.6) + + clip_image_u8 *image_original_resize = clip_image_u8_init(); + // bilinear_resize(*img, *image_original_resize, params.image_size, params.image_size); // in python this is "shortest_edge", but all CLIP are square + bicubic_resize(*img, *image_original_resize, params.image_size, params.image_size); // in python this is "shortest_edge", but all CLIP are square + patches.insert(patches.begin(), image_original_resize); + // clip_image_f32_batch_init(patches.size()); + res_imgs.size = patches.size(); + res_imgs.data = new clip_image_f32[res_imgs.size]; + int num=0; + for (auto& patch : patches) { + normalize_image_u8_to_f32(patch, &res_imgs.data[num], ctx->image_mean, ctx->image_std); + num++; + } + + for (size_t i = 0; i < patches.size(); i++) { + // printf("patch %d: %d %d\n", i, patches[i]->nx, patches[i]->ny); + clip_image_u8_free(patches[i]); + } + + clip_image_u8_free(temp); + + return true; + } else { + temp->nx = img->nx; + temp->ny = img->ny; + temp->buf.resize(img->buf.size()); + memcpy(temp->buf.data(), img->buf.data(), temp->buf.size()); + } } const int nx = temp->nx; const int ny = temp->ny; + // clip_image_save_to_bmp(*temp, "resized_vanilla.bmp"); const int nx2 = ctx->vision_model.hparams.image_size; const int ny2 = ctx->vision_model.hparams.image_size; - + clip_image_f32 * res = clip_image_f32_init(); res->nx = nx2; res->ny = ny2; res->buf.resize(3 * nx2 * ny2); @@ -1184,9 +1640,25 @@ bool clip_image_preprocess(struct clip_ctx * ctx, const clip_image_u8 * img, cli } clip_image_u8_free(temp); + // { + // clip_image_u8 * temp2 = clip_image_u8_init(); + // clip_image_convert_f32_to_u8(*res, *temp2); + // clip_image_save_to_bmp(*temp2, "resized_normalized_f32_vanilla.bmp"); + // clip_image_u8_free(temp2); + // } + // res_imgs.push_back(res); + + res_imgs.size = 1; + res_imgs.data = new clip_image_f32[res_imgs.size]; + res_imgs.data[0] = std::move(*res); + return true; } +ggml_tensor * clip_get_newline_tensor(const struct clip_ctx * ctx) { + return ctx->vision_model.image_newline; +} + void clip_free(clip_ctx * ctx) { ggml_free(ctx->ctx_data); gguf_free(ctx->ctx_gguf); @@ -1194,6 +1666,42 @@ void clip_free(clip_ctx * ctx) { delete ctx; } +size_t clip_embd_nbytes(const struct clip_ctx * ctx) { + return clip_n_patches(ctx) * clip_n_mmproj_embd(ctx) * sizeof(float); +} + +int32_t clip_image_size(const struct clip_ctx * ctx) { + return ctx->vision_model.hparams.image_size; +} + +int32_t clip_patch_size(const struct clip_ctx * ctx) { + return ctx->vision_model.hparams.patch_size; +} + +int32_t clip_hidden_size(const struct clip_ctx * ctx) { + return ctx->vision_model.hparams.hidden_size; +} + +const char * clip_patch_merge_type(const struct clip_ctx * ctx) { + return ctx->vision_model.hparams.mm_patch_merge_type; +} + +const int32_t * clip_image_grid(const struct clip_ctx * ctx) { + return ctx->vision_model.hparams.image_grid_pinpoints; +} + +int clip_n_patches(const struct clip_ctx * ctx) { + const auto & params = ctx->vision_model.hparams; + + int n_patches = (params.image_size / params.patch_size) * (params.image_size / params.patch_size); + + if (ctx->proj_type == PROJECTOR_TYPE_LDP) { + n_patches /= 4; + } + + return n_patches; +} + bool clip_image_encode(struct clip_ctx * ctx, const int n_threads, clip_image_f32 * img, float * vec) { if (!ctx->has_vision_encoder) { printf("This gguf file seems to have no vision encoder\n"); @@ -1213,7 +1721,7 @@ bool clip_image_batch_encode(clip_ctx * ctx, const int n_threads, const clip_ima } int batch_size = imgs->size; - if(ctx->has_llava_projector) { + if (ctx->has_llava_projector) { GGML_ASSERT(batch_size == 1); // TODO: support multiple images } @@ -1224,9 +1732,10 @@ bool clip_image_batch_encode(clip_ctx * ctx, const int n_threads, const clip_ima // set inputs const auto & model = ctx->vision_model; const auto & hparams = model.hparams; - const int image_size = hparams.image_size; - const int patch_size = hparams.patch_size; - const int num_patches = ((image_size / patch_size) * (image_size / patch_size)); + + const int image_size = hparams.image_size; + const int patch_size = hparams.patch_size; + const int num_patches = ((image_size / patch_size) * (image_size / patch_size)); const int num_positions = num_patches + 1; { @@ -1301,11 +1810,11 @@ bool clip_image_batch_encode(clip_ctx * ctx, const int n_threads, const clip_ima // copy the embeddings to the location passed by the user ggml_backend_tensor_get(embeddings, vec, 0, ggml_nbytes(embeddings)); + return true; } bool clip_model_quantize(const char * fname_inp, const char * fname_out, const int itype) { - ggml_type type = GGML_TYPE_Q4_1; assert(itype < GGML_TYPE_COUNT); @@ -1494,26 +2003,13 @@ int clip_n_mmproj_embd(const struct clip_ctx * ctx) { if (ctx->proj_type == PROJECTOR_TYPE_LDP) { return ctx->vision_model.mm_model_block_1_block_2_1_b->ne[0]; } - else if (ctx->proj_type == PROJECTOR_TYPE_MLP) { + if (ctx->proj_type == PROJECTOR_TYPE_MLP) { return ctx->vision_model.mm_2_b->ne[0]; - } else if (ctx->proj_type == PROJECTOR_TYPE_MLP_NORM) { + } + if (ctx->proj_type == PROJECTOR_TYPE_MLP_NORM) { return ctx->vision_model.mm_3_b->ne[0]; } - else { - std::string proj_type = PROJECTOR_TYPE_NAMES[ctx->proj_type]; - throw std::runtime_error(format("%s: don't support projector with: %s currently\n", __func__, proj_type.c_str())); - } -} -int clip_n_patches(const struct clip_ctx * ctx) { - auto & params = ctx->vision_model.hparams; - int n_patches = (params.image_size / params.patch_size) * (params.image_size / params.patch_size); - if (ctx->proj_type == PROJECTOR_TYPE_LDP) { - n_patches /= 4; - } - return n_patches; -} - -size_t clip_embd_nbytes(const struct clip_ctx * ctx) { - return clip_n_patches(ctx) * clip_n_mmproj_embd(ctx) * sizeof(float); + std::string proj_type = PROJECTOR_TYPE_NAMES[ctx->proj_type]; + throw std::runtime_error(format("%s: don't support projector with: %s currently\n", __func__, proj_type.c_str())); } diff --git a/examples/llava/clip.h b/examples/llava/clip.h index 458a256a1..cd9a4022f 100644 --- a/examples/llava/clip.h +++ b/examples/llava/clip.h @@ -24,25 +24,7 @@ struct clip_ctx; extern "C" { #endif -struct clip_vision_hparams { - int32_t image_size; - int32_t patch_size; - int32_t hidden_size; - int32_t n_intermediate; - int32_t projection_dim; - int32_t n_head; - int32_t n_layer; - float eps; -}; - -CLIP_API struct clip_ctx * clip_model_load(const char * fname, int verbosity); - -CLIP_API void clip_free(struct clip_ctx * ctx); - -CLIP_API size_t clip_embd_nbytes(const struct clip_ctx * ctx); - -CLIP_API int clip_n_patches (const struct clip_ctx * ctx); -CLIP_API int clip_n_mmproj_embd(const struct clip_ctx * ctx); +struct clip_ctx; struct clip_image_u8_batch { struct clip_image_u8 * data; @@ -54,10 +36,29 @@ struct clip_image_f32_batch { size_t size; }; +CLIP_API struct clip_ctx * clip_model_load (const char * fname, int verbosity); +CLIP_API struct clip_ctx * clip_model_load_cpu(const char * fname, int verbosity); + +CLIP_API void clip_free(struct clip_ctx * ctx); + +CLIP_API size_t clip_embd_nbytes(const struct clip_ctx * ctx); + +CLIP_API int32_t clip_image_size (const struct clip_ctx * ctx); +CLIP_API int32_t clip_patch_size (const struct clip_ctx * ctx); +CLIP_API int32_t clip_hidden_size(const struct clip_ctx * ctx); + +// TODO: should be enum, not string +CLIP_API const char * clip_patch_merge_type(const struct clip_ctx * ctx); + +CLIP_API const int32_t * clip_image_grid(const struct clip_ctx * ctx); + +CLIP_API int clip_n_patches (const struct clip_ctx * ctx); +CLIP_API int clip_n_mmproj_embd(const struct clip_ctx * ctx); + CLIP_API struct clip_image_u8 * clip_image_u8_init (); CLIP_API struct clip_image_f32 * clip_image_f32_init(); -CLIP_API void clip_image_u8_free (struct clip_image_u8 * img); +CLIP_API void clip_image_u8_free (struct clip_image_u8 * img); CLIP_API void clip_image_f32_free(struct clip_image_f32 * img); CLIP_API bool clip_image_load_from_file(const char * fname, struct clip_image_u8 * img); @@ -65,7 +66,11 @@ CLIP_API bool clip_image_load_from_file(const char * fname, struct clip_image_u8 /** interpret bytes as an image file with length bytes_length, and use the result to populate img */ CLIP_API bool clip_image_load_from_bytes(const unsigned char * bytes, size_t bytes_length, struct clip_image_u8 * img); -CLIP_API bool clip_image_preprocess (struct clip_ctx * ctx, const struct clip_image_u8 * img, struct clip_image_f32 * res, bool pad2square); +/** preprocess img and store the result in res_imgs, pad_to_square may be overriden to false depending on model configuration */ +CLIP_API bool clip_image_preprocess(struct clip_ctx * ctx, const clip_image_u8 * img, clip_image_f32_batch & res_imgs ); + +CLIP_API struct ggml_tensor * clip_get_newline_tensor(const struct clip_ctx * ctx); + CLIP_API bool clip_image_encode (struct clip_ctx * ctx, int n_threads, struct clip_image_f32 * img, float * vec); CLIP_API bool clip_image_batch_encode(struct clip_ctx * ctx, int n_threads, const struct clip_image_f32_batch * imgs, float * vec); diff --git a/examples/llava/convert-image-encoder-to-gguf.py b/examples/llava/convert-image-encoder-to-gguf.py index e204b56be..c69f89ac2 100644 --- a/examples/llava/convert-image-encoder-to-gguf.py +++ b/examples/llava/convert-image-encoder-to-gguf.py @@ -78,18 +78,19 @@ ap.add_argument("--text-only", action="store_true", required=False, help="Save a text-only model. It can't be used to encode images") ap.add_argument("--vision-only", action="store_true", required=False, help="Save a vision-only model. It can't be used to encode texts") -ap.add_argument("--clip_model_is_vision", action="store_true", required=False, +ap.add_argument("--clip-model-is-vision", action="store_true", required=False, help="The clip model is a pure vision model (ShareGPT4V vision extract for example)") +ap.add_argument("--clip-model-is-openclip", action="store_true", required=False, + help="The clip model is from openclip (for ViT-SO400M type))") ap.add_argument("--llava-projector", help="Path to llava.projector file. If specified, save an image encoder for LLaVA models.") ap.add_argument("--projector-type", help="Type of projector. Possible values: mlp, ldp", choices=["mlp", "ldp"], default="mlp") -ap.add_argument("--image-mean", nargs=3, type=float, required=False, help="Override image mean values") -ap.add_argument("--image-std", nargs=3, type=float, required=False, help="Override image std values") ap.add_argument("-o", "--output-dir", help="Directory to save GGUF files. Default is the original model directory", default=None) # Example --image_mean 0.48145466 0.4578275 0.40821073 --image_std 0.26862954 0.26130258 0.27577711 +# Example --image_mean 0.5 0.5 0.5 --image_std 0.5 0.5 0.5 default_image_mean = [0.48145466, 0.4578275, 0.40821073] default_image_std = [0.26862954, 0.26130258, 0.27577711] -ap.add_argument('--image_mean', type=float, nargs='+', help='Mean of the images for normalization (overrides processor) ', default=None) -ap.add_argument('--image_std', type=float, nargs='+', help='Standard deviation of the images for normalization (overrides processor)', default=None) +ap.add_argument('--image-mean', type=float, nargs='+', help='Mean of the images for normalization (overrides processor) ', default=None) +ap.add_argument('--image-std', type=float, nargs='+', help='Standard deviation of the images for normalization (overrides processor)', default=None) # with proper args = ap.parse_args() @@ -105,7 +106,7 @@ if args.use_f32: # output in the same directory as the model if output_dir is None dir_model = args.model_dir -if args.clip_model_is_vision: +if args.clip_model_is_vision or not os.path.exists(dir_model + "/vocab.json") or args.clip_model_is_openclip: vocab = None tokens = None else: @@ -133,7 +134,7 @@ ftype = 1 if args.use_f32: ftype = 0 -if args.clip_model_is_vision: +if args.clip_model_is_vision or args.clip_model_is_openclip: model = CLIPVisionModel.from_pretrained(dir_model) processor = None else: @@ -202,6 +203,57 @@ if has_vision_encoder: fout.add_float32(k(KEY_ATTENTION_LAYERNORM_EPS, VISION), v_hparams["layer_norm_eps"]) block_count = v_hparams["num_hidden_layers"] - 1 if has_llava_projector else v_hparams["num_hidden_layers"] fout.add_uint32(k(KEY_BLOCK_COUNT, VISION), block_count) + # /** + # "image_grid_pinpoints": [ + # [ + # 336, + # 672 + # ], + # [ + # 672, + # 336 + # ], + # [ + # 672, + # 672 + # ], + # [ + # 1008, + # 336 + # ], + # [ + # 336, + # 1008 + # ] + # ], + # Flattened: + # [ + # 336, 672, + # 672, 336, + # 672, 672, + # 1008, 336, + # 336, 1008 + # ] + # * + # */ + if "image_grid_pinpoints" in v_hparams: + # flatten it + image_grid_pinpoints = [] + for pinpoint in v_hparams["image_grid_pinpoints"]: + for p in pinpoint: + image_grid_pinpoints.append(p) + fout.add_array("clip.vision.image_grid_pinpoints", image_grid_pinpoints) + if "image_crop_resolution" in v_hparams: + fout.add_uint32("clip.vision.image_crop_resolution", v_hparams["image_crop_resolution"]) + if "image_aspect_ratio" in v_hparams: + fout.add_string("clip.vision.image_aspect_ratio", v_hparams["image_aspect_ratio"]) + if "image_split_resolution" in v_hparams: + fout.add_uint32("clip.vision.image_split_resolution", v_hparams["image_split_resolution"]) + if "mm_patch_merge_type" in v_hparams: + fout.add_string("clip.vision.mm_patch_merge_type", v_hparams["mm_patch_merge_type"]) + if "mm_projector_type" in v_hparams: + fout.add_string("clip.vision.mm_projector_type", v_hparams["mm_projector_type"]) + if processor is not None: image_mean = processor.image_processor.image_mean if args.image_mean is None or args.image_mean == default_image_mean else args.image_mean diff --git a/examples/llava/llava-cli.cpp b/examples/llava/llava-cli.cpp index 031e9806d..bef7f7c95 100644 --- a/examples/llava/llava-cli.cpp +++ b/examples/llava/llava-cli.cpp @@ -155,11 +155,29 @@ static void process_prompt(struct llava_context * ctx_llava, struct llava_image_ system_prompt = prompt.substr(0, image_pos); user_prompt = prompt.substr(image_pos + std::string("").length()); printf("system_prompt: %s\n", system_prompt.c_str()); + if (params->verbose_prompt) { + auto tmp = ::llama_tokenize(ctx_llava->ctx_llama, system_prompt, true, true); + for (int i = 0; i < (int) tmp.size(); i++) { + printf("%6d -> '%s'\n", tmp[i], llama_token_to_piece(ctx_llava->ctx_llama, tmp[i]).c_str()); + } + } printf("user_prompt: %s\n", user_prompt.c_str()); + if (params->verbose_prompt) { + auto tmp = ::llama_tokenize(ctx_llava->ctx_llama, user_prompt, true, true); + for (int i = 0; i < (int) tmp.size(); i++) { + printf("%6d -> '%s'\n", tmp[i], llama_token_to_piece(ctx_llava->ctx_llama, tmp[i]).c_str()); + } + } } else { // llava-1.5 native mode system_prompt = "A chat between a curious human and an artificial intelligence assistant. The assistant gives helpful, detailed, and polite answers to the human's questions.\nUSER:"; user_prompt = prompt + "\nASSISTANT:"; + if (params->verbose_prompt) { + auto tmp = ::llama_tokenize(ctx_llava->ctx_llama, user_prompt, true, true); + for (int i = 0; i < (int) tmp.size(); i++) { + printf("%6d -> '%s'\n", tmp[i], llama_token_to_piece(ctx_llava->ctx_llama, tmp[i]).c_str()); + } + } } eval_string(ctx_llava->ctx_llama, system_prompt.c_str(), params->n_batch, &n_past, add_bos); @@ -171,13 +189,17 @@ static void process_prompt(struct llava_context * ctx_llava, struct llava_image_ fprintf(stderr, "\n"); struct llama_sampling_context * ctx_sampling = llama_sampling_init(params->sparams); - + std::string response = ""; for (int i = 0; i < max_tgt_len; i++) { const char * tmp = sample(ctx_sampling, ctx_llava->ctx_llama, &n_past); + response += tmp; if (strcmp(tmp, "") == 0) break; if (strstr(tmp, "###")) break; // Yi-VL behavior - printf("%s", tmp); + if (strstr(response.c_str(), "<|im_end|>")) break; // Yi-34B llava-1.6 - for some reason those decode not as the correct token (tokenizer works) + if (strstr(response.c_str(), "<|im_start|>")) break; // Yi-34B llava-1.6 + if (strstr(response.c_str(), "USER:")) break; // mistral llava-1.6 + fflush(stdout); } diff --git a/examples/llava/llava-surgery-v2.py b/examples/llava/llava-surgery-v2.py new file mode 100644 index 000000000..5bc5bc513 --- /dev/null +++ b/examples/llava/llava-surgery-v2.py @@ -0,0 +1,167 @@ +import argparse +import glob +import os +import torch +from safetensors.torch import load as safe_load, save as safe_save, safe_open, save_file + +# Function to determine if file is a SafeTensor file +def is_safetensor_file(file_path): + return file_path.endswith('.safetensors') + + +# Unified loading function +def load_model(file_path): + if is_safetensor_file(file_path): + tensors = {} + with safe_open(file_path, framework="pt", device="cpu") as f: + for key in f.keys(): + tensors[key] = f.get_tensor(key).clone() + # output shape + print(f"{key} : {tensors[key].shape}") + return tensors, 'safetensor' + else: + return torch.load(file_path, map_location=torch.device('cpu')), 'pytorch' + + +# Unified saving function +def save_model(model, file_path, file_type): + if file_type == 'safetensor': + # safe_save(model, file_path) + save_file(model, file_path) + else: + torch.save(model, file_path) + + +# Adapted function to clean vision tower from checkpoint +def clean_vision_tower_from_checkpoint(checkpoint_path): + checkpoint, file_type = load_model(checkpoint_path) + # file_type = 'pytorch' + model_path = os.path.dirname(checkpoint_path) + print(f"Searching for vision tower tensors in {checkpoint_path}") + clip_tensors = [k for k, v in checkpoint.items() if (k.startswith("model.vision_tower") or k.startswith("vit."))] + + if len(clip_tensors) > 0: + print(f"Found {len(clip_tensors)} tensors to extract from {checkpoint_path}") + # Adapted for file type + clip_path = os.path.join(model_path, "llava.clip") + + if os.path.exists(clip_path): + print(f"Loading existing llava.clip from {clip_path}") + existing_clip, _ = load_model(clip_path) + else: + print(f"Creating new llava.clip at {clip_path}") + existing_clip = {} + # Update existing_clip with new tensors, avoid duplicates + for name in clip_tensors: + simple_name = name[name.index('vision_model.'):] if 'vision_model.' in name else name + print(f"Adding {simple_name} to llava.clip") + if simple_name not in existing_clip: + existing_clip[simple_name] = checkpoint[name] + + # Save the updated clip tensors back to llava.clip + save_model(existing_clip, clip_path, 'pytorch') + + # Remove the tensors from the original checkpoint + for name in clip_tensors: + del checkpoint[name] + + # Save the updated checkpoint + checkpoint_path = checkpoint_path + save_model(checkpoint, checkpoint_path, file_type) + return True + return False + +def find_relevant_checkpoints(checkpoint_paths, newline_criteria, projector): + newline_checkpoint_path = None + projector_checkpoint_path = None + + for path in checkpoint_paths: + checkpoint, _ = load_model(path) + if newline_criteria(checkpoint) and newline_checkpoint_path is None: + newline_checkpoint_path = path + if projector(checkpoint): + projector_checkpoint_path = path + + return newline_checkpoint_path, projector_checkpoint_path + +def newline_criteria(checkpoint): + return any(k.startswith("model.image_newline") for k in checkpoint.keys()) + +def proj_criteria(checkpoint): + return any(k.startswith("model.mm_projector") or k.startswith("vision_proj.") for k in checkpoint.keys()) + + +# Command-line interface setup +ap = argparse.ArgumentParser() +ap.add_argument("-m", "--model", required=True, help="Path to LLaVA v1.5+ model") +ap.add_argument("-C", "--clean-vision-tower", action="store_true", help="Remove any vision tower from the model files") +args = ap.parse_args() + +if args.clean_vision_tower: + # Generalized to handle both PyTorch and SafeTensors models + model_files = sorted(glob.glob(f"{args.model}/*"), key=os.path.getmtime, reverse=True) + # checkpoint_paths = [path for path in model_files if (path.endswith('.bin') and path.startswith('pytorch')) or (path.endswith('.safetensors') and path.startswith('model'))] + checkpoint_paths = [path for path in model_files if (path.endswith('.bin') and 'pytorch' in path.split('/')[-1].split('\\')[-1]) or (path.endswith('.safetensors') and 'model' in path.split('/')[-1].split('\\')[-1])] + for projector_checkpoint_path in checkpoint_paths: + print(f"Cleaning {projector_checkpoint_path}") + if not clean_vision_tower_from_checkpoint(projector_checkpoint_path): + print(f"No vision tower found in {projector_checkpoint_path}") + # we break once none is found, so far all models append them at the end + # break + print("Done! All vision tower tensors are removed from the model files and stored in llava.clip file.") + +# Now we look for the projector in the last checkpoint +model_files = sorted(glob.glob(f"{args.model}/*"), key=os.path.getmtime, reverse=True) +checkpoint_paths = [path for path in model_files if (path.endswith('.bin') and 'pytorch' in path.split('/')[-1].split('\\')[-1]) or (path.endswith('.safetensors') and 'model' in path.split('/')[-1].split('\\')[-1])] +# last_checkpoint_path = checkpoint_paths[0] +# first_checkpoint_path = checkpoint_paths[-1] +newline_checkpoint_path, projector_checkpoint_path = find_relevant_checkpoints(checkpoint_paths, newline_criteria, proj_criteria) + +print(f"Taking projector from {projector_checkpoint_path}") +first_mm_tensors = [] +first_checkpoint = None +if newline_checkpoint_path is not None: + print(f"Taking newline from {newline_checkpoint_path}") + first_checkpoint, file_type = load_model(newline_checkpoint_path) + first_mm_tensors = [k for k, v in first_checkpoint.items() if k.startswith("model.image_newline")] + +# Load the checkpoint +mm_tensors = [] +last_checkpoint = None +if projector_checkpoint_path is not None: + last_checkpoint, file_type = load_model(projector_checkpoint_path) + mm_tensors = [k for k, v in last_checkpoint.items() if k.startswith("model.mm_projector") or k.startswith("vision_proj.")] + +if len(mm_tensors) == 0: + if last_checkpoint is not None: + for k, v in last_checkpoint.items(): + print(k) + print(f"Found {len(mm_tensors)} tensors to extract out of {len(last_checkpoint)} tensors.") + print("No tensors found. Is this a LLaVA model?") + exit() + +print(f"Found {len(mm_tensors)} tensors to extract.") +print(f"Found additional {len(first_mm_tensors)} tensors to extract.") +# projector = {name: checkpoint.[name].float() for name in mm_tensors} +projector = {} +for name in mm_tensors: + projector[name] = last_checkpoint[name].float() +for name in first_mm_tensors: + projector[name] = first_checkpoint[name].float() + +if len(projector) > 0: + save_model(projector, f"{args.model}/llava.projector", 'pytorch') + +for name in mm_tensors: + del last_checkpoint[name] +for name in first_mm_tensors: + del first_checkpoint[name] + +if len(mm_tensors) > 0: + save_model(last_checkpoint, projector_checkpoint_path, file_type) +if len(first_mm_tensors) > 0: + save_model(first_checkpoint, newline_checkpoint_path, file_type) + +print("Done!") +print(f"Now you can convert {args.model} to a a regular LLaMA GGUF file.") +print(f"Also, use {args.model}/llava.projector to prepare a llava-encoder.gguf file.") diff --git a/examples/llava/llava.cpp b/examples/llava/llava.cpp index d42e7582e..22953417f 100644 --- a/examples/llava/llava.cpp +++ b/examples/llava/llava.cpp @@ -2,32 +2,296 @@ #include "common.h" #include "llama.h" #include "llava.h" +#include "base64.hpp" #include #include #include +#include + +// RGB uint8 image +struct clip_image_u8 { + int nx; + int ny; + + std::vector buf; +}; + +// RGB float32 image (NHWC) +// Memory layout: RGBRGBRGB... +struct clip_image_f32 { + int nx; + int ny; + + std::vector buf; +}; + +struct clip_image_grid_shape { + int first; + int second; +}; + +/** + * Selects the best resolution from a list of possible resolutions based on the original size. + * + * @param original_size The original size of the image in the format (width, height). + * @param possible_resolutions A list of possible resolutions in the format [(width1, height1), (width2, height2), ...]. + * @return The best fit resolution in the format (width, height). + */ +static std::pair select_best_resolution(const std::pair& original_size, const std::vector>& possible_resolutions) { + int original_width = original_size.first; + int original_height = original_size.second; + + std::pair best_fit; + int max_effective_resolution = 0; + int min_wasted_resolution = std::numeric_limits::max(); + + for (const auto& resolution : possible_resolutions) { + int width = resolution.first; + int height = resolution.second; + float scale = std::min(static_cast(width) / original_width, static_cast(height) / original_height); + int downscaled_width = static_cast(original_width * scale); + int downscaled_height = static_cast(original_height * scale); + int effective_resolution = std::min(downscaled_width * downscaled_height, original_width * original_height); + int wasted_resolution = (width * height) - effective_resolution; + // fprintf(stderr, "resolution: %d %d, scale: %f, downscaled: %d %d, effective: %d, wasted: %d\n", width, height, scale, downscaled_width, downscaled_height, effective_resolution, wasted_resolution); + if (effective_resolution > max_effective_resolution || (effective_resolution == max_effective_resolution && wasted_resolution < min_wasted_resolution)) { + max_effective_resolution = effective_resolution; + min_wasted_resolution = wasted_resolution; + best_fit = resolution; + } + } + + return best_fit; +} + +/** + * @brief Get the anyres image grid shape object + * + * @param image_size + * @param grid_pinpoints + * @param image_patch_size + * @return + */ +static struct clip_image_grid_shape get_anyres_image_grid_shape(const std::pair & image_size, const std::vector> & grid_pinpoints, int image_patch_size) { + /** + Conversion from gguf flat array to vector: + std::vector> possible_resolutions; + for (int i = 0; i < 32 && params.image_grid_pinpoints[i] != 0; i+=2) { + possible_resolutions.push_back({params.image_grid_pinpoints[i], params.image_grid_pinpoints[i+1]}); + } + */ + auto best_resolution = select_best_resolution(image_size, grid_pinpoints); + return {best_resolution.first / image_patch_size, best_resolution.second / image_patch_size}; +} + +// Take the image segments in a grid configuration and return the embeddings and the number of embeddings into preallocated memory (image_embd_out) +static bool clip_llava_handle_patches(clip_ctx * ctx_clip, std::vector & image_embd_v, struct clip_image_grid_shape grid_shape, float * image_embd_out, int * n_img_pos_out) { + struct { + struct ggml_tensor * newline; + struct ggml_context * ctx; + } model; + + const int32_t image_size = clip_image_size(ctx_clip); + const int32_t patch_size = clip_patch_size(ctx_clip); + + int32_t num_patches_per_side = image_size / patch_size; // 336 / 14 = 24 - used for embedding-patching boxes (24*24 = 576 patches) + + int num_patches_width = grid_shape.first; // grid 1-4 + int num_patches_height = grid_shape.second; // grid 1-4 + + const size_t num_images = num_patches_width + num_patches_height + 1; + + // TODO: size calculation is not calculated - it's only tens of MB + size_t ctx_size = 0; + + { + ctx_size += clip_embd_nbytes(ctx_clip) * num_images * 8; // image_features + ctx_size += 1024*1024 * ggml_type_size(GGML_TYPE_F32); + } + + struct ggml_init_params params { + /*.mem_size =*/ ctx_size, + /*.mem_buffer =*/ NULL, + /*.no_alloc =*/ false, // NOTE: this should be false when using the legacy API + }; + + // Python reference code for full unpad: + /* + base_image_feature = image_feature[0] + image_feature = image_feature[1:] + image_feature = image_feature.permute(4, 0, 2, 1, 3).contiguous() + image_feature = image_feature.flatten(1, 2).flatten(2, 3) + image_feature = unpad_image(image_feature, image_sizes[image_idx]) + image_feature = torch.cat(( + image_feature, + self.model.image_newline[:, None, None].expand(*image_feature.shape[:-1], 1) + ), dim=-1) + image_feature = image_feature.flatten(1, 2).transpose(0, 1) + image_feature = torch.cat((base_image_feature, image_feature), dim=0) + */ + // We now have two options: unpad or no unpad. Unpad removes tokens for faster llm eval. + // In terms of result quality it appears to make no difference, so we'll start with the easier approach given 5D tensors are not supported in ggml yet. + // Without unpad we have to split the sub-image embeddings into patches of 24 features each and permute them. + // Once all images are processed to prepended the base_image_features without any changes. + + // Pytorch reference simplified, modified for ggml compatibility - confirmed identical output in python (for a 2x2 grid image (676x676 scaling)) + /* + image_feature = image_feature.view(2, 2, 24, 24, 4096) + image_feature = image_feature.permute(0, 2, 1, 3, 4).contiguous() + image_feature = image_feature.view(2, 24, 2, 24, 4096) + image_feature = image_feature.flatten(0, 3) + + // Reshape to 4D tensor by merging the last two dimensions + image_feature = image_feature.view(2, 2, 24, 24*4096) + image_feature = image_feature.permute(0, 2, 1, 3).contiguous() + image_feature = image_feature.view(-1, 4096) + */ + + model.ctx = ggml_init(params); + + ggml_tensor * newline_tmp = clip_get_newline_tensor(ctx_clip); + model.newline = ggml_new_tensor_1d(model.ctx, GGML_TYPE_F32, newline_tmp->ne[0]); + if (newline_tmp->backend != GGML_BACKEND_CPU) { + if (newline_tmp->buffer == NULL) { + printf("newline_tmp tensor buffer is NULL\n"); + } + ggml_backend_tensor_get(newline_tmp, model.newline->data, 0, ggml_nbytes(newline_tmp)); + } else { + model.newline->data = newline_tmp->data; + if (model.newline->data == NULL) { + printf("newline_tmp tensor data is NULL\n"); + } + } + + struct ggml_tensor * image_features = ggml_new_tensor_3d(model.ctx, GGML_TYPE_F32, clip_n_mmproj_embd(ctx_clip), clip_n_patches(ctx_clip), num_images - 1); // example: 4096 x 576 x 4 + // ggml_tensor_printf(image_features,"image_features",__LINE__,false,false); + // fill it with the image embeddings, ignoring the base + for (size_t i = 1; i < num_images; i++) { + size_t offset = (i-1) * clip_embd_nbytes(ctx_clip); + memcpy((uint8_t *)(image_features->data) + offset, image_embd_v[i], clip_embd_nbytes(ctx_clip)); + } + + struct ggml_cgraph * gf = ggml_new_graph(model.ctx); + size_t size_ele = ggml_type_size(GGML_TYPE_F32); + + struct ggml_tensor *image_features_patchview = ggml_view_4d(model.ctx, image_features, + num_patches_per_side * clip_n_mmproj_embd(ctx_clip), + num_patches_per_side, + num_patches_width, + num_patches_height, + size_ele * num_patches_per_side * clip_n_mmproj_embd(ctx_clip), + size_ele * num_patches_per_side * clip_n_mmproj_embd(ctx_clip) * num_patches_per_side, + size_ele * num_patches_per_side * clip_n_mmproj_embd(ctx_clip) * num_patches_per_side * num_patches_width, 0); + // ggml_tensor_printf(image_features_patchview,"image_features_patchview",__LINE__,false,false); + struct ggml_tensor *permuted_cont = ggml_cont(model.ctx, ggml_permute(model.ctx, image_features_patchview, 0, 2, 1, 3)); + /** + At the end of each row we have to add the row_end embeddings, which are the same as the newline embeddings + image_feature = torch.cat(( + image_feature, + self.model.image_newline[:, None, None].expand(*image_feature.shape[:-1], 1).to(image_feature.device) + ), dim=-1) + * + */ + + // ggml_tensor_printf(permuted_cont,"permuted_cont",__LINE__,false,false); + struct ggml_tensor *flatten = ggml_view_2d(model.ctx, permuted_cont, clip_n_mmproj_embd(ctx_clip), num_patches_height * num_patches_width * num_patches_per_side * num_patches_per_side, size_ele * clip_n_mmproj_embd(ctx_clip), 0); + // ggml_tensor_printf(flatten,"flatten",__LINE__,false,false); + ggml_build_forward_expand(gf, flatten); + ggml_graph_compute_with_ctx(model.ctx, gf, 1); + struct ggml_tensor* result = gf->nodes[gf->n_nodes - 1]; + + memcpy(image_embd_out, image_embd_v[0], clip_embd_nbytes(ctx_clip)); // main image as global context + // append without newline tokens (default behavior in llava_arch when not using unpad ): + memcpy(image_embd_out + clip_n_patches(ctx_clip) * clip_n_mmproj_embd(ctx_clip), (float*)result->data, clip_embd_nbytes(ctx_clip) * (num_images-1)); // grid patches + *n_img_pos_out = static_cast(result->ne[1]+clip_n_patches(ctx_clip)); + + // Debug: Test single segments + // Current findings: sending base image, sending a segment embedding all works similar to python + // However, permuted embeddings do not work yet (stride issue?) + // memcpy(image_embd_out, image_embd_v[0], clip_embd_nbytes(ctx_clip)); // main image as context + // memcpy(image_embd_out, (float*)prepared_cont->data, clip_embd_nbytes(ctx_clip)); // main image as context + // *n_img_pos_out=576; + + ggml_free(model.ctx); + return true; +} -#include "base64.hpp" static bool encode_image_with_clip(clip_ctx * ctx_clip, int n_threads, const clip_image_u8 * img, float * image_embd, int * n_img_pos) { - clip_image_f32 * img_res = clip_image_f32_init(); - if (!clip_image_preprocess(ctx_clip, img, img_res, /*pad2square =*/ true)) { + // std::vector img_res_v; // format VectN x H x W x RGB (N x 336 x 336 x 3), so interleaved RGB - different to the python implementation which is N x 3 x 336 x 336 + clip_image_f32_batch img_res_v; + img_res_v.size = 0; + img_res_v.data = nullptr; + if (!clip_image_preprocess(ctx_clip, img, img_res_v)) { fprintf(stderr, "%s: unable to preprocess image\n", __func__); - clip_image_f32_free(img_res); + delete[] img_res_v.data; return false; } - *n_img_pos = clip_n_patches(ctx_clip); - const int64_t t_img_enc_start_us = ggml_time_us(); - bool encoded = clip_image_encode(ctx_clip, n_threads, img_res, image_embd); - clip_image_f32_free(img_res); - if (!encoded) { - fprintf(stderr, "Unable to encode image\n"); - return false; + const char * mm_patch_merge_type = clip_patch_merge_type(ctx_clip); + + if (strcmp(mm_patch_merge_type, "spatial_unpad") != 0) { + // flat / default llava-1.5 type embedding + *n_img_pos = clip_n_patches(ctx_clip); + bool encoded = clip_image_encode(ctx_clip, n_threads, &img_res_v.data[0], image_embd); // image_embd shape is 576 x 4096 + delete[] img_res_v.data; + if (!encoded) { + fprintf(stderr, "Unable to encode image\n"); + + return false; + } + } else { + // spatial_unpad llava-1.6 type embedding + // TODO: CLIP needs batching support - in HF the llm projection is separate after encoding, which might be a solution to quickly get batching working + std::vector image_embd_v; + image_embd_v.resize(img_res_v.size); + for (size_t i = 0; i < img_res_v.size; i++) { + image_embd_v[i] = (float *)malloc(clip_embd_nbytes(ctx_clip)); // 576 patches * 4096 embeddings * 4 bytes = 9437184 + const bool encoded = clip_image_encode(ctx_clip, n_threads, &img_res_v.data[i], image_embd_v[i]); // image data is in 3x336x336 format and will be converted to 336x336x3 inside + if (!encoded) { + fprintf(stderr, "Unable to encode image - spatial_unpad - subimage %d of %d\n", (int) i+1, (int) img_res_v.size); + return false; + } + } + const int64_t t_img_enc_batch_us = ggml_time_us(); + printf("%s: %d segments encoded in %8.2f ms\n", __func__, (int)img_res_v.size, (t_img_enc_batch_us - t_img_enc_start_us) / 1000.0); + + const int32_t * image_grid = clip_image_grid(ctx_clip); + + std::vector> grid_pinpoints; + for (int i = 0; i < 32 && image_grid[i] != 0; i += 2) { + grid_pinpoints.push_back({image_grid[i], image_grid[i+1]}); + } + + // free all img_res_v - not needed anymore + delete[] img_res_v.data; + img_res_v.size = 0; + img_res_v.data = nullptr; + + const int32_t image_size = clip_image_size(ctx_clip); + + struct clip_image_grid_shape grid_shape = get_anyres_image_grid_shape({img->nx,img->ny}, grid_pinpoints, image_size); + + int n_img_pos_out; + clip_llava_handle_patches(ctx_clip, image_embd_v, grid_shape, image_embd, &n_img_pos_out); + *n_img_pos = n_img_pos_out; + + for (size_t i = 0; i < image_embd_v.size(); i++) { + free(image_embd_v[i]); + } + image_embd_v.clear(); + + // debug image/segment/normalization content: + // clip_image_u8 * tmp = clip_image_u8_init(); + // clip_image_convert_f32_to_u8(*image_feature, *tmp); + // clip_image_save_to_bmp(*tmp, "image_feature.bmp"); } + printf("%s: image embedding created: %d tokens\n", __func__, *n_img_pos); + const int64_t t_img_enc_end_us = ggml_time_us(); float t_img_enc_ms = (t_img_enc_end_us - t_img_enc_start_us) / 1000.0; @@ -48,7 +312,7 @@ bool llava_validate_embed_size(const llama_context * ctx_llama, const clip_ctx * } static bool llava_image_embed_make_with_clip_img(clip_ctx * ctx_clip, int n_threads, const clip_image_u8 * img, float ** image_embd_out, int * n_img_pos_out) { - float * image_embd = (float *)malloc(clip_embd_nbytes(ctx_clip)); + float * image_embd = (float *)malloc(clip_embd_nbytes(ctx_clip)*6); // TODO: base on gridsize/llava model if (!image_embd) { fprintf(stderr, "Unable to allocate memory for image embeddings\n"); free(image_embd); @@ -85,7 +349,7 @@ bool llava_eval_image_embed(llama_context * ctx_llama, const struct llava_image_ return true; } -LLAVA_API struct llava_image_embed * llava_image_embed_make_with_bytes(struct clip_ctx * ctx_clip, int n_threads, const unsigned char * image_bytes, int image_bytes_length) { +struct llava_image_embed * llava_image_embed_make_with_bytes(struct clip_ctx * ctx_clip, int n_threads, const unsigned char * image_bytes, int image_bytes_length) { clip_image_u8 * img = clip_image_u8_init(); if (!clip_image_load_from_bytes(image_bytes, image_bytes_length, img)) { clip_image_u8_free(img); @@ -142,7 +406,7 @@ static bool load_file_to_bytes(const char* path, unsigned char** bytesOut, long return true; } -LLAVA_API struct llava_image_embed * llava_image_embed_make_with_filename(struct clip_ctx * ctx_clip, int n_threads, const char * image_path) { +struct llava_image_embed * llava_image_embed_make_with_filename(struct clip_ctx * ctx_clip, int n_threads, const char * image_path) { unsigned char* image_bytes; long image_bytes_length; auto loaded = load_file_to_bytes(image_path, &image_bytes, &image_bytes_length); @@ -151,13 +415,13 @@ LLAVA_API struct llava_image_embed * llava_image_embed_make_with_filename(struct return NULL; } - auto embed = llava_image_embed_make_with_bytes(ctx_clip, n_threads, image_bytes, image_bytes_length); + llava_image_embed *embed = llava_image_embed_make_with_bytes(ctx_clip, n_threads, image_bytes, image_bytes_length); free(image_bytes); return embed; } -LLAVA_API void llava_image_embed_free(struct llava_image_embed * embed) { +void llava_image_embed_free(struct llava_image_embed * embed) { free(embed->embed); free(embed); } diff --git a/examples/llava/llava.h b/examples/llava/llava.h index e08ce7883..9e9466a5d 100644 --- a/examples/llava/llava.h +++ b/examples/llava/llava.h @@ -3,7 +3,6 @@ #include "ggml.h" - #ifdef LLAMA_SHARED # if defined(_WIN32) && !defined(__MINGW32__) # ifdef LLAMA_BUILD @@ -42,7 +41,6 @@ LLAVA_API void llava_image_embed_free(struct llava_image_embed * embed); /** write the image represented by embed into the llama context with batch size n_batch, starting at context pos n_past. on completion, n_past points to the next position in the context after the image embed. */ LLAVA_API bool llava_eval_image_embed(struct llama_context * ctx_llama, const struct llava_image_embed * embed, int n_batch, int * n_past); - #ifdef __cplusplus } #endif diff --git a/examples/server/server.cpp b/examples/server/server.cpp index 1699eb76b..6e3434030 100644 --- a/examples/server/server.cpp +++ b/examples/server/server.cpp @@ -968,13 +968,20 @@ struct llama_server_context { continue; } - clip_image_f32 * img_res = clip_image_f32_init(); - if (!clip_image_preprocess(clp_ctx, img.img_data, img_res, /*pad2square =*/ true)) + clip_image_f32_batch img_res_v; + img_res_v.size = 0; + img_res_v.data = nullptr; + if (!clip_image_preprocess(clp_ctx, img.img_data, img_res_v)) { LOG_TEE("Error processing the given image"); clip_free(clp_ctx); + clip_image_f32_free(img_res_v.data); return false; } + + // note: assumes only one image was returned by clip_image_preprocess + clip_image_f32 * img_res = img_res_v.data; + img.image_tokens = clip_n_patches(clp_ctx); img.image_embedding = (float *)malloc(clip_embd_nbytes(clp_ctx)); if (!img.image_embedding) @@ -989,7 +996,9 @@ struct llama_server_context LOG_TEE("Unable to encode image\n"); return false; } - clip_image_f32_free(img_res); + + clip_image_f32_free(img_res_v.data); + img.request_encode_image = false; } From 8084d554406b767d36b3250b3b787462d5dd626f Mon Sep 17 00:00:00 2001 From: Michael Podvitskiy Date: Wed, 14 Feb 2024 11:49:01 +0300 Subject: [PATCH 593/811] cmake : ARM intrinsics detection for MSVC (#5401) --- CMakeLists.txt | 16 +++++++++++++--- 1 file changed, 13 insertions(+), 3 deletions(-) diff --git a/CMakeLists.txt b/CMakeLists.txt index a544f2da6..f8c7f9978 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -855,11 +855,21 @@ if (CMAKE_OSX_ARCHITECTURES STREQUAL "arm64" OR CMAKE_GENERATOR_PLATFORM_LWR STR CMAKE_SYSTEM_PROCESSOR MATCHES "^(aarch64|arm.*|ARM64)$")) message(STATUS "ARM detected") if (MSVC) + add_compile_definitions(__aarch64__) # MSVC defines _M_ARM64 instead add_compile_definitions(__ARM_NEON) add_compile_definitions(__ARM_FEATURE_FMA) - add_compile_definitions(__ARM_FEATURE_DOTPROD) - # add_compile_definitions(__ARM_FEATURE_FP16_VECTOR_ARITHMETIC) # MSVC doesn't support vdupq_n_f16, vld1q_f16, vst1q_f16 - add_compile_definitions(__aarch64__) # MSVC defines _M_ARM64 instead + + set(CMAKE_REQUIRED_FLAGS_PREV ${CMAKE_REQUIRED_FLAGS}) + string(JOIN " " CMAKE_REQUIRED_FLAGS ${CMAKE_REQUIRED_FLAGS} "/arch:armv8.2") + check_cxx_source_compiles("#include \nint main() { int8x16_t _a, _b; int32x4_t _s = vdotq_s32(_s, _a, _b); return 0; }" GGML_COMPILER_SUPPORT_DOTPROD) + if (GGML_COMPILER_SUPPORT_DOTPROD) + add_compile_definitions(__ARM_FEATURE_DOTPROD) + endif () + check_cxx_source_compiles("#include \nint main() { float16_t _a; float16x8_t _s = vdupq_n_f16(_a); return 0; }" GGML_COMPILER_SUPPORT_FP16_VECTOR_ARITHMETIC) + if (GGML_COMPILER_SUPPORT_FP16_VECTOR_ARITHMETIC) + add_compile_definitions(__ARM_FEATURE_FP16_VECTOR_ARITHMETIC) + endif () + set(CMAKE_REQUIRED_FLAGS ${CMAKE_REQUIRED_FLAGS_PREV}) else() check_cxx_compiler_flag(-mfp16-format=ieee COMPILER_SUPPORTS_FP16_FORMAT_I3E) if (NOT "${COMPILER_SUPPORTS_FP16_FORMAT_I3E}" STREQUAL "") From ccbb277f4642fc0d84c72dbc0d51ed2df418d6ce Mon Sep 17 00:00:00 2001 From: John <78893154+cmp-nct@users.noreply.github.com> Date: Wed, 14 Feb 2024 15:49:42 +0100 Subject: [PATCH 594/811] llava : update README.md (#5489) * Update README.md * Update README.md * Update examples/llava/README.md --------- Co-authored-by: Georgi Gerganov --- examples/llava/README.md | 46 ++++++++++++++++++++++++++++++++++++---- 1 file changed, 42 insertions(+), 4 deletions(-) diff --git a/examples/llava/README.md b/examples/llava/README.md index e2ef0eff1..1d5374f2a 100644 --- a/examples/llava/README.md +++ b/examples/llava/README.md @@ -1,10 +1,12 @@ # LLaVA -Currently this implementation supports [llava-v1.5](https://huggingface.co/liuhaotian/llava-v1.5-7b) variants. +Currently this implementation supports [llava-v1.5](https://huggingface.co/liuhaotian/llava-v1.5-7b) variants, +as well as llava-1.6 [llava-v1.6](https://huggingface.co/collections/liuhaotian/llava-16-65b9e40155f60fd046a5ccf2) variants. The pre-converted [7b](https://huggingface.co/mys/ggml_llava-v1.5-7b) and [13b](https://huggingface.co/mys/ggml_llava-v1.5-13b) models are available. +For llava-1.6 a variety of prepared gguf models are available as well [7b-34b](https://huggingface.co/cmp-nct/llava-1.6-gguf) After API is confirmed, more models will be supported / uploaded. @@ -18,6 +20,7 @@ After building, run: `./llava-cli` to see the usage. For example: ``` **note**: A lower temperature like 0.1 is recommended for better quality. add `--temp 0.1` to the command to do so. +**note**: For GPU offloading ensure to use the `-ngl` flag just like usual ## LLaVA 1.5 @@ -55,11 +58,46 @@ python ./convert.py ../llava-v1.5-7b Now both the LLaMA part and the image encoder is in the `llava-v1.5-7b` directory. -## LLaVA 1.6 +## LLaVA 1.6 gguf conversion + +1) Backup your pth/safetensor model files as llava-surgery modifies them +2) Use `python llava-surgery-v2.py -C -m /path/to/hf-model` which also supports llava-1.5 variants pytorch as well as safetensor models: +- you will find a llava.projector and a llava.clip file in your model directory +3) Copy the llava.clip file into a subdirectory (like vit), rename it to pytorch_model.bin and add a fitting vit configuration to the directory (https://huggingface.co/cmp-nct/llava-1.6-gguf/blob/main/config.json) +4) Create the visual gguf model: `python ./examples/llava/convert-image-encoder-to-gguf.py -m ../path/to/vit --llava-projector ../path/to/llava.projector --output-dir ../path/to/output --clip_model_is_vision` +- This is similar to llava-1.5, the difference is that we tell the encoder that we are working with the pure vision model part of CLIP +5) Everything else as usual: convert.py the hf model, quantize as needed +**note** llava-1.6 needs more context than llava-1.5, at least 3000 is needed (just run it at -c 4096) +**note** llava-1.6 greatly benefits from batched prompt processing (defaults work) + +## llava-cli templating and llava-1.6 prompting + +llava-1.5 models all use the same vicuna prompt, here you can just add your image question like `-p "Provide a full description."` +For llava-1.5 models which are not vicuna (mistral and Yi) you need to adapt system prompt as well as user prompt, for this purpose llava-cli has a basic templating system: + +**For Mistral and using llava-cli binary:** +Add this: `-p "\nUSER:\nProvide a full description.\nASSISTANT:\n"` +The mistral template for llava-1.6 seems to be no system print and a USER/ASSISTANT role + +**For the 34B this should work:** +Add this: `-e -p <|im_start|>system\nAnswer the questions.<|im_end|><|im_start|>user\n\nProvide a full description.<|im_end|><|im_start|>assistant\n` + + +## How to know if you are running in llava-1.5 or llava-1.6 mode + +When running llava-cli you will see a visual information right before the prompt is being processed: + +**Llava-1.5:** +`encode_image_with_clip: image embedding created: 576 tokens` + +**Llava-1.6 (anything above 576):** +`encode_image_with_clip: image embedding created: 2880 tokens` + + +Alternatively just pay notice to how many "tokens" have been used for your prompt, it will also show 1000+ tokens for llava-1.6 + -- Use `llava-surgery-v2.py` -- TODO: add detailed instructions ## TODO From 594fca3fefe27b8e95cfb1656eb0e160ad15a793 Mon Sep 17 00:00:00 2001 From: Rune <43761327+Rune-AI@users.noreply.github.com> Date: Wed, 14 Feb 2024 16:15:49 +0100 Subject: [PATCH 595/811] readme : fix typo (#5490) executabhle -> executable --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index 0b4efdd33..0c4ee5a27 100644 --- a/README.md +++ b/README.md @@ -958,7 +958,7 @@ We have three Docker images available for this project: 1. `ghcr.io/ggerganov/llama.cpp:full`: This image includes both the main executable file and the tools to convert LLaMA models into ggml and convert into 4-bit quantization. (platforms: `linux/amd64`, `linux/arm64`) 2. `ghcr.io/ggerganov/llama.cpp:light`: This image only includes the main executable file. (platforms: `linux/amd64`, `linux/arm64`) -3. `ghcr.io/ggerganov/llama.cpp:server`: This image only includes the server executabhle file. (platforms: `linux/amd64`, `linux/arm64`) +3. `ghcr.io/ggerganov/llama.cpp:server`: This image only includes the server executable file. (platforms: `linux/amd64`, `linux/arm64`) Additionally, there the following images, similar to the above: From 704359e29985a06a389337a2617b7f3fa8eff908 Mon Sep 17 00:00:00 2001 From: Neuman Vong Date: Thu, 15 Feb 2024 17:11:15 +1100 Subject: [PATCH 596/811] vulkan: Find optimal memory type but with fallback (#5381) * @0cc4m feedback * More feedback @0cc4m --- ggml-vulkan.cpp | 65 ++++++++++++++++++++++++++++++++----------------- 1 file changed, 42 insertions(+), 23 deletions(-) diff --git a/ggml-vulkan.cpp b/ggml-vulkan.cpp index 7834e635c..1fad24fd1 100644 --- a/ggml-vulkan.cpp +++ b/ggml-vulkan.cpp @@ -707,9 +707,21 @@ static void ggml_vk_queue_cleanup(ggml_backend_vk_context * ctx, vk_queue& q) { q.cmd_buffer_idx = 0; } -static vk_buffer ggml_vk_create_buffer(ggml_backend_vk_context * ctx, size_t size, vk::MemoryPropertyFlags req_flags) { +static uint32_t find_properties(const vk::PhysicalDeviceMemoryProperties* mem_props, vk::MemoryRequirements* mem_req, vk::MemoryPropertyFlags flags) { + for (uint32_t i = 0; i < mem_props->memoryTypeCount; ++i) { + vk::MemoryType memory_type = mem_props->memoryTypes[i]; + if ((mem_req->memoryTypeBits & ((uint64_t)1 << i)) && + (flags & memory_type.propertyFlags) == flags && + mem_props->memoryHeaps[memory_type.heapIndex].size >= mem_req->size) { + return static_cast(i); + } + } + return UINT32_MAX; +} + +static vk_buffer ggml_vk_create_buffer(ggml_backend_vk_context * ctx, size_t size, vk::MemoryPropertyFlags req_flags, vk::MemoryPropertyFlags fallback_flags = vk::MemoryPropertyFlags(0)) { #ifdef GGML_VULKAN_DEBUG - std::cerr << "ggml_vk_create_buffer(" << size << ", " << to_string(req_flags) << ")" << std::endl; + std::cerr << "ggml_vk_create_buffer(" << size << ", " << to_string(req_flags) << ", " << to_string(fallback_flags) << ")" << std::endl; #endif vk_buffer buf = std::make_shared(); @@ -736,15 +748,15 @@ static vk_buffer ggml_vk_create_buffer(ggml_backend_vk_context * ctx, size_t siz uint32_t memory_type_index = UINT32_MAX; - for (uint32_t i = 0; i < mem_props.memoryTypeCount; ++i) { - vk::MemoryType memory_type = mem_props.memoryTypes[i]; - if ((mem_req.memoryTypeBits & ((uint64_t)1 << i)) && (req_flags & memory_type.propertyFlags) == req_flags && mem_props.memoryHeaps[memory_type.heapIndex].size >= mem_req.size) { - memory_type_index = i; - break; - } + memory_type_index = find_properties(&mem_props, &mem_req, req_flags); + buf->memory_property_flags = req_flags; + + if (memory_type_index == UINT32_MAX && fallback_flags) { + memory_type_index = find_properties(&mem_props, &mem_req, fallback_flags); + buf->memory_property_flags = fallback_flags; } - if (memory_type_index >= mem_props.memoryTypeCount) { + if (memory_type_index == UINT32_MAX) { ctx->device.lock()->device.destroyBuffer(buf->buffer); buf->size = 0; throw vk::OutOfDeviceMemoryError("No suitable memory type found"); @@ -758,10 +770,9 @@ static vk_buffer ggml_vk_create_buffer(ggml_backend_vk_context * ctx, size_t siz buf->size = 0; throw e; } - buf->memory_property_flags = req_flags; buf->ptr = nullptr; - if (req_flags & vk::MemoryPropertyFlagBits::eHostVisible) { + if (buf->memory_property_flags & vk::MemoryPropertyFlagBits::eHostVisible) { buf->ptr = ctx->device.lock()->device.mapMemory(buf->device_memory, 0, VK_WHOLE_SIZE); } @@ -778,9 +789,9 @@ static vk_buffer ggml_vk_create_buffer(ggml_backend_vk_context * ctx, size_t siz return buf; } -static vk_buffer ggml_vk_create_buffer_check(ggml_backend_vk_context * ctx, size_t size, vk::MemoryPropertyFlags req_flags) { +static vk_buffer ggml_vk_create_buffer_check(ggml_backend_vk_context * ctx, size_t size, vk::MemoryPropertyFlags req_flags, vk::MemoryPropertyFlags fallback_flags = vk::MemoryPropertyFlags(0)) { try { - return ggml_vk_create_buffer(ctx, size, req_flags); + return ggml_vk_create_buffer(ctx, size, req_flags, fallback_flags); } catch (const vk::SystemError& e) { std::cerr << "ggml_vulkan: Memory allocation of size " << size << " failed." << std::endl; std::cerr << "ggml_vulkan: " << e.what() << std::endl; @@ -791,16 +802,16 @@ static vk_buffer ggml_vk_create_buffer_check(ggml_backend_vk_context * ctx, size static vk_buffer ggml_vk_create_buffer_device(ggml_backend_vk_context * ctx, size_t size) { vk_buffer buf; try { - buf = ggml_vk_create_buffer(ctx, size, vk::MemoryPropertyFlagBits::eDeviceLocal); - } catch (const vk::SystemError& e) { if (ctx->device.lock()->uma) { // Fall back to host memory type - buf = ggml_vk_create_buffer_check(ctx, size, vk::MemoryPropertyFlagBits::eHostVisible | vk::MemoryPropertyFlagBits::eHostCoherent); + buf = ggml_vk_create_buffer(ctx, size, vk::MemoryPropertyFlagBits::eDeviceLocal, vk::MemoryPropertyFlagBits::eHostVisible | vk::MemoryPropertyFlagBits::eHostCoherent); } else { - std::cerr << "ggml_vulkan: Device memory allocation of size " << size << " failed." << std::endl; - std::cerr << "ggml_vulkan: " << e.what() << std::endl; - throw e; + buf = ggml_vk_create_buffer(ctx, size, vk::MemoryPropertyFlagBits::eDeviceLocal); } + } catch (const vk::SystemError& e) { + std::cerr << "ggml_vulkan: Device memory allocation of size " << size << " failed." << std::endl; + std::cerr << "ggml_vulkan: " << e.what() << std::endl; + throw e; } return buf; @@ -1422,7 +1433,9 @@ static void * ggml_vk_host_malloc(ggml_backend_vk_context * ctx, size_t size) { #ifdef GGML_VULKAN_DEBUG std::cerr << "ggml_vk_host_malloc(" << size << ")" << std::endl; #endif - vk_buffer buf = ggml_vk_create_buffer(ctx, size, vk::MemoryPropertyFlagBits::eHostVisible | vk::MemoryPropertyFlagBits::eHostCoherent | vk::MemoryPropertyFlagBits::eHostCached); + vk_buffer buf = ggml_vk_create_buffer(ctx, size, + vk::MemoryPropertyFlagBits::eHostVisible | vk::MemoryPropertyFlagBits::eHostCoherent | vk::MemoryPropertyFlagBits::eHostCached, + vk::MemoryPropertyFlagBits::eHostVisible | vk::MemoryPropertyFlagBits::eHostCoherent); if(!(buf->memory_property_flags & vk::MemoryPropertyFlagBits::eHostVisible)) { fprintf(stderr, "WARNING: failed to allocate %.2f MB of pinned memory\n", @@ -1568,7 +1581,9 @@ static void deferred_memcpy(void * dst, const void * src, size_t size, std::vect static void ggml_vk_ensure_sync_staging_buffer(ggml_backend_vk_context * ctx, size_t size) { if (ctx->sync_staging == nullptr || ctx->sync_staging->size < size) { ggml_vk_destroy_buffer(ctx->sync_staging); - ctx->sync_staging = ggml_vk_create_buffer_check(ctx, size, vk::MemoryPropertyFlagBits::eHostVisible | vk::MemoryPropertyFlagBits::eHostCoherent | vk::MemoryPropertyFlagBits::eHostCached); + ctx->sync_staging = ggml_vk_create_buffer_check(ctx, size, + vk::MemoryPropertyFlagBits::eHostVisible | vk::MemoryPropertyFlagBits::eHostCoherent | vk::MemoryPropertyFlagBits::eHostCached, + vk::MemoryPropertyFlagBits::eHostVisible | vk::MemoryPropertyFlagBits::eHostCoherent); } } @@ -4082,7 +4097,9 @@ static void ggml_vk_preallocate_buffers(ggml_backend_vk_context * ctx) { std::cerr << "ggml_vk_preallocate_buffers(qx_size: " << ctx->prealloc_size_qx << " qy_size: " << ctx->prealloc_size_qy << " x_size: " << ctx->prealloc_size_x << " y_size: " << ctx->prealloc_size_y << " split_k_size: " << ctx->prealloc_size_split_k << ")" << std::endl; #endif #if defined(GGML_VULKAN_RUN_TESTS) - ctx->staging = ggml_vk_create_buffer_check(ctx, 100ul * 1024ul * 1024ul, vk::MemoryPropertyFlagBits::eHostVisible | vk::MemoryPropertyFlagBits::eHostCoherent | vk::MemoryPropertyFlagBits::eHostCached); + ctx->staging = ggml_vk_create_buffer_check(ctx, 100ul * 1024ul * 1024ul, + vk::MemoryPropertyFlagBits::eHostVisible | vk::MemoryPropertyFlagBits::eHostCoherent | vk::MemoryPropertyFlagBits::eHostCached + vk::MemoryPropertyFlagBits::eHostVisible | vk::MemoryPropertyFlagBits::eHostCoherent); ggml_vk_test_transfer(ctx, 8192 * 1000, false); ggml_vk_test_transfer(ctx, 8192 * 1000, true); @@ -4174,7 +4191,9 @@ static void ggml_vk_preallocate_buffers(ggml_backend_vk_context * ctx) { if (ctx->staging != nullptr) { ggml_vk_destroy_buffer(ctx->staging); } - ctx->staging = ggml_vk_create_buffer_check(ctx, ctx->staging_size, vk::MemoryPropertyFlagBits::eHostVisible | vk::MemoryPropertyFlagBits::eHostCoherent | vk::MemoryPropertyFlagBits::eHostCached); + ctx->staging = ggml_vk_create_buffer_check(ctx, ctx->staging_size, + vk::MemoryPropertyFlagBits::eHostVisible | vk::MemoryPropertyFlagBits::eHostCoherent | vk::MemoryPropertyFlagBits::eHostCached, + vk::MemoryPropertyFlagBits::eHostVisible | vk::MemoryPropertyFlagBits::eHostCoherent); } } From 7930a8a6e89a04c77c51e3ae5dc1cd8e845b6b8f Mon Sep 17 00:00:00 2001 From: John <78893154+cmp-nct@users.noreply.github.com> Date: Thu, 15 Feb 2024 08:59:18 +0100 Subject: [PATCH 597/811] llaba : hotfix for llava-1.6 image number (#5495) Co-authored-by: John --- examples/llava/llava.cpp | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/examples/llava/llava.cpp b/examples/llava/llava.cpp index 22953417f..4ed310a0e 100644 --- a/examples/llava/llava.cpp +++ b/examples/llava/llava.cpp @@ -100,7 +100,7 @@ static bool clip_llava_handle_patches(clip_ctx * ctx_clip, std::vector int num_patches_width = grid_shape.first; // grid 1-4 int num_patches_height = grid_shape.second; // grid 1-4 - const size_t num_images = num_patches_width + num_patches_height + 1; + const size_t num_images = num_patches_width * num_patches_height + 1; // TODO: size calculation is not calculated - it's only tens of MB size_t ctx_size = 0; From 0d4177126b0556e202efb85bf3f768be81076400 Mon Sep 17 00:00:00 2001 From: Elbios <141279586+Elbios@users.noreply.github.com> Date: Thu, 15 Feb 2024 09:01:57 +0100 Subject: [PATCH 598/811] llava : fix memory management bug (#5491) * Fix memory management in llava and server code Fixes this error: llama_new_context_with_model: graph splits (measure): 3 Available slots: -> Slot 0 - max context: 6000 {"timestamp":1707926446,"level":"INFO","function":"main","line":2623,"message":"model loaded"} all slots are idle and system prompt is empty, clear the KV cache slot 0 - loaded image slot 0 is processing [task id: 0] slot 0 : kv cache rm - [0, end) slot 0 - encoding image [id: 1] munmap_chunk(): invalid pointer Aborted * Make it cleaner by checking size in batch free wrapper --- examples/llava/clip.cpp | 24 +++++++++++++++++------- examples/llava/clip.h | 2 ++ examples/server/server.cpp | 11 +++++++++-- 3 files changed, 28 insertions(+), 9 deletions(-) diff --git a/examples/llava/clip.cpp b/examples/llava/clip.cpp index 9c5091e61..2cad27e82 100644 --- a/examples/llava/clip.cpp +++ b/examples/llava/clip.cpp @@ -1230,8 +1230,20 @@ struct clip_image_f32 * clip_image_f32_init() { return new clip_image_f32(); } -void clip_image_u8_free (struct clip_image_u8 * img) { delete img; } +void clip_image_u8_free(struct clip_image_u8 * img) { delete img; } void clip_image_f32_free(struct clip_image_f32 * img) { delete img; } +void clip_image_u8_batch_free(struct clip_image_u8_batch & batch) { + if (batch.size > 0) { + delete[] batch.data; + batch.size = 0; + } +} +void clip_image_f32_batch_free(struct clip_image_f32_batch & batch) { + if (batch.size > 0) { + delete[] batch.data; + batch.size = 0; + } +} static void build_clip_img_from_data(const stbi_uc * data, int nx, int ny, clip_image_u8 * img) { img->nx = nx; @@ -1494,11 +1506,8 @@ bool clip_image_preprocess(struct clip_ctx * ctx, const clip_image_u8 * img, cli pad_to_square = false; } // free the previous res_imgs if any set - if (res_imgs.size > 0 && res_imgs.size < 100) { - for (size_t i = 0; i < res_imgs.size; i++) { - clip_image_f32_free(&(res_imgs.data[i])); - } - delete[] res_imgs.data; + if (res_imgs.size > 0) { + clip_image_f32_batch_free(res_imgs); } res_imgs.data = nullptr; res_imgs.size = 0; @@ -1650,7 +1659,8 @@ bool clip_image_preprocess(struct clip_ctx * ctx, const clip_image_u8 * img, cli res_imgs.size = 1; res_imgs.data = new clip_image_f32[res_imgs.size]; - res_imgs.data[0] = std::move(*res); + res_imgs.data[0] = *res; + clip_image_f32_free(res); return true; } diff --git a/examples/llava/clip.h b/examples/llava/clip.h index cd9a4022f..e5bd54924 100644 --- a/examples/llava/clip.h +++ b/examples/llava/clip.h @@ -60,6 +60,8 @@ CLIP_API struct clip_image_f32 * clip_image_f32_init(); CLIP_API void clip_image_u8_free (struct clip_image_u8 * img); CLIP_API void clip_image_f32_free(struct clip_image_f32 * img); +CLIP_API void clip_image_u8_batch_free (struct clip_image_u8_batch & batch); +CLIP_API void clip_image_f32_batch_free(struct clip_image_f32_batch & batch); CLIP_API bool clip_image_load_from_file(const char * fname, struct clip_image_u8 * img); diff --git a/examples/server/server.cpp b/examples/server/server.cpp index 6e3434030..2decd7762 100644 --- a/examples/server/server.cpp +++ b/examples/server/server.cpp @@ -975,7 +975,12 @@ struct llama_server_context { LOG_TEE("Error processing the given image"); clip_free(clp_ctx); - clip_image_f32_free(img_res_v.data); + clip_image_f32_batch_free(img_res_v); + return false; + } + if (img_res_v.size == 0) + { + LOG_TEE("Error processing the given image"); return false; } @@ -987,6 +992,7 @@ struct llama_server_context if (!img.image_embedding) { LOG_TEE("Unable to allocate memory for image embeddings\n"); + clip_image_f32_batch_free(img_res_v); clip_free(clp_ctx); return false; } @@ -994,10 +1000,11 @@ struct llama_server_context if (!clip_image_encode(clp_ctx, params.n_threads, img_res, img.image_embedding)) { LOG_TEE("Unable to encode image\n"); + clip_image_f32_batch_free(img_res_v); return false; } - clip_image_f32_free(img_res_v.data); + clip_image_f32_batch_free(img_res_v); img.request_encode_image = false; } From 73122473ffd73030146276dbb85da7c8021a3ee4 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Micha=C3=ABl=20de=20Vries?= Date: Thu, 15 Feb 2024 14:14:37 +0100 Subject: [PATCH 599/811] fix(gguf-py): special tokens are no longer skipped when add__token is set to false (#5487) * fix(gguf-py): special tokens are no longer skipped when add__token is set to false * fix(gguf-py): added missing cls and mask token ids to the gguf metadata --- gguf-py/gguf/constants.py | 4 ++++ gguf-py/gguf/gguf_writer.py | 6 ++++++ gguf-py/gguf/vocab.py | 6 +----- 3 files changed, 11 insertions(+), 5 deletions(-) diff --git a/gguf-py/gguf/constants.py b/gguf-py/gguf/constants.py index 5fba01714..9986ce9de 100644 --- a/gguf-py/gguf/constants.py +++ b/gguf-py/gguf/constants.py @@ -73,6 +73,8 @@ class Keys: UNK_ID = "tokenizer.ggml.unknown_token_id" SEP_ID = "tokenizer.ggml.seperator_token_id" PAD_ID = "tokenizer.ggml.padding_token_id" + CLS_ID = "tokenizer.ggml.cls_token_id" + MASK_ID = "tokenizer.ggml.mask_token_id" ADD_BOS = "tokenizer.ggml.add_bos_token" ADD_EOS = "tokenizer.ggml.add_eos_token" ADD_PREFIX = "tokenizer.ggml.add_space_prefix" @@ -685,5 +687,7 @@ KEY_TOKENIZER_EOS_ID = Keys.Tokenizer.EOS_ID KEY_TOKENIZER_UNK_ID = Keys.Tokenizer.UNK_ID KEY_TOKENIZER_SEP_ID = Keys.Tokenizer.SEP_ID KEY_TOKENIZER_PAD_ID = Keys.Tokenizer.PAD_ID +KEY_TOKENIZER_CLS_ID = Keys.Tokenizer.CLS_ID +KEY_TOKENIZER_MASK_ID = Keys.Tokenizer.MASK_ID KEY_TOKENIZER_HF_JSON = Keys.Tokenizer.HF_JSON KEY_TOKENIZER_RWKV = Keys.Tokenizer.RWKV diff --git a/gguf-py/gguf/gguf_writer.py b/gguf-py/gguf/gguf_writer.py index d87bd8e88..26724bf94 100644 --- a/gguf-py/gguf/gguf_writer.py +++ b/gguf-py/gguf/gguf_writer.py @@ -414,6 +414,12 @@ class GGUFWriter: def add_pad_token_id(self, id: int) -> None: self.add_uint32(Keys.Tokenizer.PAD_ID, id) + def add_cls_token_id(self, id: int) -> None: + self.add_uint32(Keys.Tokenizer.CLS_ID, id) + + def add_mask_token_id(self, id: int) -> None: + self.add_uint32(Keys.Tokenizer.MASK_ID, id) + def add_add_bos_token(self, value: bool) -> None: self.add_bool(Keys.Tokenizer.ADD_BOS, value) diff --git a/gguf-py/gguf/vocab.py b/gguf-py/gguf/vocab.py index cd1942975..a23136b18 100644 --- a/gguf-py/gguf/vocab.py +++ b/gguf-py/gguf/vocab.py @@ -29,7 +29,7 @@ class SpecialVocab: if special_token_types is not None: self.special_token_types = special_token_types else: - self.special_token_types = ('bos', 'eos', 'unk', 'sep', 'pad') + self.special_token_types = ('bos', 'eos', 'unk', 'sep', 'pad', 'cls', 'mask') self._load(Path(path)) def __repr__(self) -> str: @@ -152,10 +152,6 @@ class SpecialVocab: add_entry = tokenizer_config.get(f'add_{typ}_token') if isinstance(add_entry, bool): self.add_special_token[typ] = add_entry - if not added_tokens: - # We will need this to get the content for the token, so if it's empty - # may as well just give up. - continue entry = tokenizer_config.get(f'{typ}_token') if isinstance(entry, str): tc_content = entry From 9350a1cf21b1492c69b20175b73a419b897d6a3a Mon Sep 17 00:00:00 2001 From: Georgi Gerganov Date: Thu, 15 Feb 2024 15:41:15 +0200 Subject: [PATCH 600/811] scripts : add hf.sh helper script (#5501) * scripts : add hf.sh helper scripts * hf : add error logs * hf : add support for --repo and --file --- scripts/hf.sh | 107 ++++++++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 107 insertions(+) create mode 100755 scripts/hf.sh diff --git a/scripts/hf.sh b/scripts/hf.sh new file mode 100755 index 000000000..1e9e5a6ea --- /dev/null +++ b/scripts/hf.sh @@ -0,0 +1,107 @@ +#!/bin/bash +# +# Shortcut for downloading HF models +# +# Usage: +# ./main -m $(./examples/hf.sh https://huggingface.co/TheBloke/Mixtral-8x7B-v0.1-GGUF/resolve/main/mixtral-8x7b-v0.1.Q4_K_M.gguf) +# ./main -m $(./examples/hf.sh --url https://huggingface.co/TheBloke/Mixtral-8x7B-v0.1-GGUF/blob/main/mixtral-8x7b-v0.1.Q4_K_M.gguf) +# ./main -m $(./examples/hf.sh --repo TheBloke/Mixtral-8x7B-v0.1-GGUF --file mixtral-8x7b-v0.1.Q4_K_M.gguf) +# + +# all logs go to stderr +function log { + echo "$@" 1>&2 +} + +function usage { + log "Usage: $0 [[--url] ] [--repo ] [--file ] [-h|--help]" + exit 1 +} + +# check for curl or wget +function has_cmd { + if ! [ -x "$(command -v $1)" ]; then + return 1 + fi +} + +if has_cmd wget; then + cmd="wget -q --show-progress -c -O %s %s" +elif has_cmd curl; then + cmd="curl -C - -f -o %s -L %s" +else + log "[E] curl or wget not found" + exit 1 +fi + +url="" +repo="" +file="" + +# parse args +while [[ $# -gt 0 ]]; do + case "$1" in + --url) + url="$2" + shift 2 + ;; + --repo) + repo="$2" + shift 2 + ;; + --file) + file="$2" + shift 2 + ;; + -h|--help) + usage + ;; + *) + url="$1" + shift + ;; + esac +done + +if [ -n "$repo" ] && [ -n "$file" ]; then + url="https://huggingface.co/$repo/resolve/main/$file" +fi + +if [ -z "$url" ]; then + log "[E] missing --url" + usage +fi + +# check if the URL is a HuggingFace model, and if so, try to download it +is_url=false + +if [[ ${#url} -gt 22 ]]; then + if [[ ${url:0:22} == "https://huggingface.co" ]]; then + is_url=true + fi +fi + +if [ "$is_url" = false ]; then + log "[E] invalid URL, must start with https://huggingface.co" + exit 0 +fi + +# replace "blob/main" with "resolve/main" +url=${url/blob\/main/resolve\/main} + +basename=$(basename $url) + +log "[+] attempting to download $basename" + +if [ -n "$cmd" ]; then + cmd=$(printf "$cmd" "$basename" "$url") + log "[+] $cmd" + if $cmd; then + echo $basename + exit 0 + fi +fi + +log "[-] failed to download" + +exit 1 From 9060a1e9dfca6038906e819be5fa42217f49028c Mon Sep 17 00:00:00 2001 From: slaren Date: Thu, 15 Feb 2024 16:49:01 +0100 Subject: [PATCH 601/811] cuda : print message when initialization fails (#5512) * cuda : print message when initialization fails * use CUDA_NAME both times --- ggml-cuda.cu | 1 + 1 file changed, 1 insertion(+) diff --git a/ggml-cuda.cu b/ggml-cuda.cu index 96976f248..b35fcb7fd 100644 --- a/ggml-cuda.cu +++ b/ggml-cuda.cu @@ -7943,6 +7943,7 @@ GGML_CALL void ggml_init_cublas() { if (cudaGetDeviceCount(&g_device_count) != cudaSuccess) { initialized = true; g_cublas_loaded = false; + fprintf(stderr, "%s: no " GGML_CUDA_NAME " devices found, " GGML_CUDA_NAME " will be disabled\n", __func__); return; } From c06e45d72983d9ace7b1535f7e7ea258d212169e Mon Sep 17 00:00:00 2001 From: Georgi Gerganov Date: Thu, 15 Feb 2024 18:49:08 +0200 Subject: [PATCH 602/811] clip : fix wrong loop condition --- examples/llava/clip.cpp | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/examples/llava/clip.cpp b/examples/llava/clip.cpp index 2cad27e82..98d512f67 100644 --- a/examples/llava/clip.cpp +++ b/examples/llava/clip.cpp @@ -1103,7 +1103,7 @@ struct clip_ctx * clip_model_load(const char * fname, const int verbosity = 1) { printf("v_image_mean %f %f %f\n", new_clip->image_mean[0], new_clip->image_mean[1], new_clip->image_mean[2]); printf("v_image_std %f %f %f\n", new_clip->image_std[0], new_clip->image_std[1], new_clip->image_std[2]); printf("v_image_grid_pinpoints: "); - for (int i = 0; i < 32 & hparams.image_grid_pinpoints[i]!=0; ++i) { + for (int i = 0; i < 32 && (hparams.image_grid_pinpoints[i] != 0); ++i) { printf("%d ", hparams.image_grid_pinpoints[i]); } printf("\n"); From 4524290e87b8e107cc2b56e1251751546f4b9051 Mon Sep 17 00:00:00 2001 From: Douglas Hanley Date: Thu, 15 Feb 2024 11:21:49 -0600 Subject: [PATCH 603/811] Use correct type of pooling for embedding models (#5500) Use correct type of pooling for embedding models --- convert-hf-to-gguf.py | 24 ++++++++++- gguf-py/gguf/constants.py | 8 +++- gguf-py/gguf/gguf_writer.py | 5 ++- llama.cpp | 82 +++++++++++++++++++++++++------------ llama.h | 6 +++ 5 files changed, 94 insertions(+), 31 deletions(-) diff --git a/convert-hf-to-gguf.py b/convert-hf-to-gguf.py index ae471481d..9771fccf9 100755 --- a/convert-hf-to-gguf.py +++ b/convert-hf-to-gguf.py @@ -1650,7 +1650,29 @@ class BertModel(Model): def set_gguf_parameters(self): super().set_gguf_parameters() self.gguf_writer.add_causal_attention(False) - self.gguf_writer.add_pooling_layer(True) + + # get pooling path + with open(self.dir_model / "modules.json", encoding="utf-8") as f: + modules = json.load(f) + pooling_path = None + for mod in modules: + if mod["type"] == "sentence_transformers.models.Pooling": + pooling_path = mod["path"] + break + + # get pooling type + pooling_type = gguf.PoolingType.NONE + if pooling_path is not None: + with open(self.dir_model / pooling_path / "config.json", encoding="utf-8") as f: + pooling = json.load(f) + if pooling["pooling_mode_mean_tokens"]: + pooling_type = gguf.PoolingType.MEAN + elif pooling["pooling_mode_cls_token"]: + pooling_type = gguf.PoolingType.CLS + else: + raise NotImplementedError("Only MEAN and CLS pooling types supported") + + self.gguf_writer.add_pooling_type(pooling_type.value) def set_vocab(self): path = self.dir_model diff --git a/gguf-py/gguf/constants.py b/gguf-py/gguf/constants.py index 9986ce9de..114a9a974 100644 --- a/gguf-py/gguf/constants.py +++ b/gguf-py/gguf/constants.py @@ -40,7 +40,7 @@ class Keys: TENSOR_DATA_LAYOUT = "{arch}.tensor_data_layout" EXPERT_COUNT = "{arch}.expert_count" EXPERT_USED_COUNT = "{arch}.expert_used_count" - POOLING_LAYER = "{arch}.pooling_layer" + POOLING_TYPE = "{arch}.pooling_type" class Attention: HEAD_COUNT = "{arch}.attention.head_count" @@ -561,6 +561,12 @@ class RopeScalingType(Enum): YARN = 'yarn' +class PoolingType(IntEnum): + NONE = 0 + MEAN = 1 + CLS = 2 + + class GGMLQuantizationType(IntEnum): F32 = 0 F16 = 1 diff --git a/gguf-py/gguf/gguf_writer.py b/gguf-py/gguf/gguf_writer.py index 26724bf94..e4681475c 100644 --- a/gguf-py/gguf/gguf_writer.py +++ b/gguf-py/gguf/gguf_writer.py @@ -19,6 +19,7 @@ from .constants import ( GGUFValueType, Keys, RopeScalingType, + PoolingType, TokenType, ) @@ -360,8 +361,8 @@ class GGUFWriter: def add_causal_attention(self, value: bool) -> None: self.add_bool(Keys.Attention.CAUSAL.format(arch=self.arch), value) - def add_pooling_layer(self, value: bool) -> None: - self.add_bool(Keys.LLM.POOLING_LAYER.format(arch=self.arch), value) + def add_pooling_type(self, value: PoolingType) -> None: + self.add_uint32(Keys.LLM.POOLING_TYPE.format(arch=self.arch), value) def add_rope_dimension_count(self, count: int) -> None: self.add_uint32(Keys.Rope.DIMENSION_COUNT.format(arch=self.arch), count) diff --git a/llama.cpp b/llama.cpp index 14e8821cd..aceb9c25a 100644 --- a/llama.cpp +++ b/llama.cpp @@ -256,7 +256,7 @@ enum llm_kv { LLM_KV_TENSOR_DATA_LAYOUT, LLM_KV_EXPERT_COUNT, LLM_KV_EXPERT_USED_COUNT, - LLM_KV_POOLING_LAYER, + LLM_KV_POOLING_TYPE, LLM_KV_ATTENTION_HEAD_COUNT, LLM_KV_ATTENTION_HEAD_COUNT_KV, @@ -314,7 +314,7 @@ static std::map LLM_KV_NAMES = { { LLM_KV_TENSOR_DATA_LAYOUT, "%s.tensor_data_layout" }, { LLM_KV_EXPERT_COUNT, "%s.expert_count" }, { LLM_KV_EXPERT_USED_COUNT, "%s.expert_used_count" }, - { LLM_KV_POOLING_LAYER, "%s.pooling_layer" }, + { LLM_KV_POOLING_TYPE , "%s.pooling_type" }, { LLM_KV_ATTENTION_HEAD_COUNT, "%s.attention.head_count" }, { LLM_KV_ATTENTION_HEAD_COUNT_KV, "%s.attention.head_count_kv" }, @@ -1561,7 +1561,7 @@ struct llama_hparams { float f_max_alibi_bias; bool causal_attn = true; - bool pooling_layer = false; + uint32_t pooling_type = LLAMA_POOLING_NONE; bool operator!=(const llama_hparams & other) const { @@ -1924,7 +1924,8 @@ struct llama_context { struct ggml_tensor * inp_pos; // I32 [n_batch] struct ggml_tensor * inp_KQ_mask; // F32 [n_ctx, n_batch] struct ggml_tensor * inp_K_shift; // I32 [n_ctx] - struct ggml_tensor * inp_sum; // F32 [n_batch, n_batch] + struct ggml_tensor * inp_mean; // F32 [n_batch, n_batch] + struct ggml_tensor * inp_cls; // I32 [n_batch] #ifdef GGML_USE_MPI ggml_mpi_context * ctx_mpi = NULL; @@ -3086,7 +3087,7 @@ static void llm_load_hparams( ml.get_key(LLM_KV_ATTENTION_LAYERNORM_EPS, hparams.f_norm_eps); ml.get_key(LLM_KV_ATTENTION_CAUSAL, hparams.causal_attn); ml.get_key(LLM_KV_TOKENIZER_TOKEN_TYPE_COUNT, hparams.n_vocab_type); - ml.get_key(LLM_KV_POOLING_LAYER, hparams.pooling_layer); + ml.get_key(LLM_KV_POOLING_TYPE, hparams.pooling_type); switch (hparams.n_layer) { case 3: @@ -3107,7 +3108,7 @@ static void llm_load_hparams( ml.get_key(LLM_KV_ATTENTION_LAYERNORM_EPS, hparams.f_norm_eps); ml.get_key(LLM_KV_ATTENTION_CAUSAL, hparams.causal_attn); ml.get_key(LLM_KV_TOKENIZER_TOKEN_TYPE_COUNT, hparams.n_vocab_type); - ml.get_key(LLM_KV_POOLING_LAYER, hparams.pooling_layer); + ml.get_key(LLM_KV_POOLING_TYPE, hparams.pooling_type); if (hparams.n_layer == 12 && hparams.n_embd == 768) { model.type = e_model::MODEL_137M; @@ -4934,7 +4935,7 @@ struct llm_build_context { const int32_t n_orig_ctx; const bool do_rope_shift; - const bool do_pooling; + const uint32_t pooling_type; const llm_build_cb & cb; @@ -4978,7 +4979,7 @@ struct llm_build_context { kv_head (worst_case ? n_ctx - n_tokens : kv_self.head), n_orig_ctx (cparams.n_yarn_orig_ctx), do_rope_shift (worst_case || kv_self.has_shift), - do_pooling (hparams.pooling_layer && cparams.do_pooling), + pooling_type (cparams.do_pooling ? hparams.pooling_type : (uint32_t)LLAMA_POOLING_NONE), cb (cb), buf_compute_meta (lctx.buf_compute_meta) { // all initializations should be done in init() @@ -5835,7 +5836,8 @@ struct llm_build_context { // get input vectors with right size const size_t stride1 = n_tokens * ggml_type_size(lctx.inp_tokens->type); struct ggml_tensor * inp_pos = ggml_view_1d(ctx0, lctx.inp_pos, n_tokens, 0); - struct ggml_tensor * inp_sum = ggml_view_2d(ctx0, lctx.inp_sum, n_tokens, n_tokens, stride1, 0); + struct ggml_tensor * inp_mean = ggml_view_2d(ctx0, lctx.inp_mean, n_tokens, n_tokens, stride1, 0); + struct ggml_tensor * inp_cls = ggml_view_1d(ctx0, lctx.inp_cls, n_tokens, 0); // construct input embeddings (token, type, position) inpL = llm_build_inp_embd(ctx0, hparams, batch, model.tok_embd, lctx.inp_tokens, lctx.inp_embd, cb); @@ -5952,8 +5954,12 @@ struct llm_build_context { cur = inpL; // pooling layer - if (do_pooling) { - cur = ggml_mul_mat(ctx0, ggml_cont(ctx0, ggml_transpose(ctx0, cur)), inp_sum); + if (pooling_type == LLAMA_POOLING_MEAN) { + cur = ggml_mul_mat(ctx0, ggml_cont(ctx0, ggml_transpose(ctx0, cur)), inp_mean); + } else if (pooling_type == LLAMA_POOLING_CLS) { + cur = ggml_get_rows(ctx0, cur, inp_cls); + } else { + GGML_ASSERT(pooling_type == LLAMA_POOLING_NONE && "Invalid pooling type"); } cb(cur, "result_embd", -1); @@ -7501,15 +7507,6 @@ static void llama_set_inputs(llama_context & lctx, const llama_batch & batch) { } } - { - assert(ggml_backend_buffer_is_host(lctx.inp_sum->buffer)); - float * data = (float *) lctx.inp_sum->data; - - for (int i = 0; i < batch.n_tokens; ++i) { - data[i] = 1.0f/float(batch.n_tokens); - } - } - if (kv_self.has_shift) { const int64_t n_ctx = cparams.n_ctx; @@ -7522,17 +7519,46 @@ static void llama_set_inputs(llama_context & lctx, const llama_batch & batch) { } } - if (hparams.pooling_layer && cparams.do_pooling) { + if (cparams.do_pooling && hparams.pooling_type == LLAMA_POOLING_MEAN) { const int64_t n_tokens = batch.n_tokens; - GGML_ASSERT(ggml_backend_buffer_is_host(lctx.inp_sum->buffer)); - float * data = (float *) lctx.inp_sum->data; + GGML_ASSERT(ggml_backend_buffer_is_host(lctx.inp_mean->buffer)); + float * data = (float *) lctx.inp_mean->data; - memset(lctx.inp_sum->data, 0, batch.n_tokens * batch.n_tokens * ggml_element_size(lctx.inp_sum)); + memset(lctx.inp_mean->data, 0, n_tokens * n_tokens * ggml_element_size(lctx.inp_mean)); + + std::vector sum(n_tokens, 0); + for (int i = 0; i < n_tokens; ++i) { + const llama_seq_id seq_id = batch.seq_id[i][0]; + sum[seq_id] += 1; + } + + std::vector div(n_tokens, 0.0f); + for (int i = 0; i < n_tokens; ++i) { + const uint64_t s = sum[i]; + if (s > 0) { + div[i] = 1.0f/float(s); + } + } for (int i = 0; i < n_tokens; ++i) { const llama_seq_id seq_id = batch.seq_id[i][0]; - data[seq_id*n_tokens + i] = 1.0f; + data[seq_id*n_tokens + i] = div[seq_id]; + } + } + + if (cparams.do_pooling && hparams.pooling_type == LLAMA_POOLING_CLS) { + const int64_t n_tokens = batch.n_tokens; + + GGML_ASSERT(ggml_backend_buffer_is_host(lctx.inp_cls->buffer)); + uint32_t * data = (uint32_t *) lctx.inp_cls->data; + + for (int i = 0; i < n_tokens; ++i) { + const llama_seq_id seq_id = batch.seq_id[i][0]; + const llama_pos pos = batch.pos[i]; + if (pos == 0) { + data[seq_id] = i; + } } } } @@ -11417,14 +11443,16 @@ struct llama_context * llama_new_context_with_model( ctx->inp_pos = ggml_new_tensor_1d(ctx->ctx_input, GGML_TYPE_I32, cparams.n_batch); ctx->inp_KQ_mask = ggml_new_tensor_2d(ctx->ctx_input, GGML_TYPE_F32, cparams.n_ctx, cparams.n_batch); ctx->inp_K_shift = ggml_new_tensor_1d(ctx->ctx_input, GGML_TYPE_I32, cparams.n_ctx); - ctx->inp_sum = ggml_new_tensor_2d(ctx->ctx_input, GGML_TYPE_F32, cparams.n_batch, cparams.n_batch); + ctx->inp_mean = ggml_new_tensor_2d(ctx->ctx_input, GGML_TYPE_F32, cparams.n_batch, cparams.n_batch); + ctx->inp_cls = ggml_new_tensor_1d(ctx->ctx_input, GGML_TYPE_I32, cparams.n_batch); ggml_set_name(ctx->inp_tokens, "inp_tokens"); ggml_set_name(ctx->inp_embd, "inp_embd"); ggml_set_name(ctx->inp_pos, "inp_pos"); ggml_set_name(ctx->inp_KQ_mask, "inp_KQ_mask"); ggml_set_name(ctx->inp_K_shift, "inp_K_shift"); - ggml_set_name(ctx->inp_sum, "inp_sum"); + ggml_set_name(ctx->inp_mean, "inp_mean"); + ggml_set_name(ctx->inp_cls, "inp_cls"); ctx->buf_input = ggml_backend_alloc_ctx_tensors_from_buft(ctx->ctx_input, llama_default_buffer_type_cpu(true)); diff --git a/llama.h b/llama.h index 5ef78ec96..4a26bd619 100644 --- a/llama.h +++ b/llama.h @@ -112,6 +112,12 @@ extern "C" { LLAMA_ROPE_SCALING_MAX_VALUE = LLAMA_ROPE_SCALING_YARN, }; + enum llama_pooling_type { + LLAMA_POOLING_NONE = 0, + LLAMA_POOLING_MEAN = 1, + LLAMA_POOLING_CLS = 2, + }; + enum llama_split_mode { LLAMA_SPLIT_NONE = 0, // single GPU LLAMA_SPLIT_LAYER = 1, // split layers and KV across GPUs From 594845aab1c6775877f6d9545a51dc0f8d0b3d77 Mon Sep 17 00:00:00 2001 From: Georgi Gerganov Date: Fri, 16 Feb 2024 09:57:55 +0200 Subject: [PATCH 604/811] ci : fix BERT model download and convert --- ci/run.sh | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/ci/run.sh b/ci/run.sh index a4264d775..979b4a793 100755 --- a/ci/run.sh +++ b/ci/run.sh @@ -580,6 +580,10 @@ function gg_run_embd_bge_small { gg_wget models-mnt/bge-small/ https://huggingface.co/BAAI/bge-small-en-v1.5/resolve/main/pytorch_model.bin gg_wget models-mnt/bge-small/ https://huggingface.co/BAAI/bge-small-en-v1.5/raw/main/sentence_bert_config.json gg_wget models-mnt/bge-small/ https://huggingface.co/BAAI/bge-small-en-v1.5/raw/main/vocab.txt + gg_wget models-mnt/bge-small/ https://huggingface.co/BAAI/bge-small-en-v1.5/raw/main/modules.json + gg_wget models-mnt/bge-small/ https://huggingface.co/BAAI/bge-small-en-v1.5/raw/main/config.json + + gg_wget models-mnt/bge-small/1_Pooling https://huggingface.co/BAAI/bge-small-en-v1.5/raw/main/1_Pooling/config.json path_models="../models-mnt/bge-small" From 60ed04cf82dc91ade725dd7ad53f0ee81f76eccf Mon Sep 17 00:00:00 2001 From: Daniel Bevenius Date: Fri, 16 Feb 2024 10:24:39 +0100 Subject: [PATCH 605/811] llava : fix clip-model-is-vision flag in README.md (#5509) * llava: fix clip-model-is-vision flag in README.md This commit fixes the flag `--clip_model_is_vision` in README.md which is does not match the actual flag: ```console $ python convert-image-encoder-to-gguf.py --help ... --clip-model-is-vision The clip model is a pure vision model (ShareGPT4V vision extract for example) ``` Signed-off-by: Daniel Bevenius * llava: update link to vit config in README.md Signed-off-by: Daniel Bevenius --------- Signed-off-by: Daniel Bevenius --- examples/llava/README.md | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/examples/llava/README.md b/examples/llava/README.md index 1d5374f2a..57eb42932 100644 --- a/examples/llava/README.md +++ b/examples/llava/README.md @@ -63,8 +63,8 @@ Now both the LLaMA part and the image encoder is in the `llava-v1.5-7b` director 1) Backup your pth/safetensor model files as llava-surgery modifies them 2) Use `python llava-surgery-v2.py -C -m /path/to/hf-model` which also supports llava-1.5 variants pytorch as well as safetensor models: - you will find a llava.projector and a llava.clip file in your model directory -3) Copy the llava.clip file into a subdirectory (like vit), rename it to pytorch_model.bin and add a fitting vit configuration to the directory (https://huggingface.co/cmp-nct/llava-1.6-gguf/blob/main/config.json) -4) Create the visual gguf model: `python ./examples/llava/convert-image-encoder-to-gguf.py -m ../path/to/vit --llava-projector ../path/to/llava.projector --output-dir ../path/to/output --clip_model_is_vision` +3) Copy the llava.clip file into a subdirectory (like vit), rename it to pytorch_model.bin and add a fitting vit configuration to the directory (https://huggingface.co/cmp-nct/llava-1.6-gguf/blob/main/config_vit.json) and rename it to config.json. +4) Create the visual gguf model: `python ./examples/llava/convert-image-encoder-to-gguf.py -m ../path/to/vit --llava-projector ../path/to/llava.projector --output-dir ../path/to/output --clip-model-is-vision` - This is similar to llava-1.5, the difference is that we tell the encoder that we are working with the pure vision model part of CLIP 5) Everything else as usual: convert.py the hf model, quantize as needed **note** llava-1.6 needs more context than llava-1.5, at least 3000 is needed (just run it at -c 4096) From f486f6e1e5e9d01603d9325ab3e05f1edb362a95 Mon Sep 17 00:00:00 2001 From: bmwl Date: Fri, 16 Feb 2024 01:31:07 -0800 Subject: [PATCH 606/811] ggml : add numa options (#5377) * Added numa options to allow finer grained control as well as plumbing for a new mirror mode that will require numa.h * Reverted Makefile * Fixed include * Removed sched.h from ggml.h, moved ggml_get_numa_affinity into ggml.c, removed trailing whitespace and fixed up a few inconsistent variables * removed trailing whitespace * Added numa options to allow finer grained control as well as plumbing for a new mirror mode that will require numa.h * Reverting Makefile * Fixed a number of issues with the move from BOOL to ggml_numa_strategies. Added a note about mirror mode note being implemented yet * Removing MIRROR_MODE code for this PR * Removing last bit of MIRROR_MODE code for this PR * Removing unneeded branch in server.cpp example and moving get_numa_affinity and making it static * Fixed lingering init_llama_backend() bool calls in tests and examples * Remote enum llama_numa_strategies * Revert bad merge with dynatemp flags * add missing enum ggml_numa_strategies declaration and revert sync problem with master * add missing enum ggml_numa_strategies declaration * fixed ggml_init_numa variable * Update ggml.h Co-authored-by: Jared Van Bortel * Update READMEs with info about numa flags, change INTERLEAVE strategy name to DISTRIBUTE everywhere, implement the improved distribution strategy from @rankaiyx, fix a spelling mistake and un-merge some bad merges * split numa init out from llama_backend_init and created llama_numa_init. Updated all code paths and samples * Fix up some boolean vs enum comparisons * Added #ifdefs for non-Linux OS that don't have cpu_set_t datatype * Update ggml.h Align enum values Co-authored-by: Georgi Gerganov * Update ggml.c Remove whitespace Co-authored-by: Georgi Gerganov * Update ggml.c align paremeters Co-authored-by: Georgi Gerganov * Update examples/server/server.cpp remove whitespace and align brace Co-authored-by: Georgi Gerganov * Update common/common.cpp Remove whitespace and align brace Co-authored-by: Georgi Gerganov * unified ggml_numa_strategy enum and fixed text alignment in server.cpp example * Update ggml.c simplified return for platforms without NUMA support Co-authored-by: Jared Van Bortel * removed redundant else from cli argument processing of --numa * whitespace --------- Co-authored-by: root Co-authored-by: Jared Van Bortel Co-authored-by: Georgi Gerganov Co-authored-by: Jared Van Bortel --- common/common.cpp | 20 +++-- common/common.h | 2 +- examples/batched-bench/batched-bench.cpp | 3 +- examples/batched.swift/Sources/main.swift | 2 +- examples/batched/batched.cpp | 3 +- examples/beam-search/beam-search.cpp | 3 +- examples/embedding/embedding.cpp | 3 +- examples/imatrix/imatrix.cpp | 3 +- examples/infill/infill.cpp | 3 +- examples/llama-bench/llama-bench.cpp | 3 +- .../app/src/main/cpp/llama-android.cpp | 4 +- .../llama.cpp.swift/LibLlama.swift | 2 +- examples/llava/llava-cli.cpp | 3 +- examples/lookahead/lookahead.cpp | 3 +- examples/lookup/lookup.cpp | 3 +- examples/main/README.md | 6 +- examples/main/main.cpp | 3 +- examples/parallel/parallel.cpp | 3 +- examples/passkey/passkey.cpp | 3 +- examples/perplexity/perplexity.cpp | 3 +- examples/quantize/quantize.cpp | 2 +- examples/server/README.md | 7 ++ examples/server/server.cpp | 22 +++-- examples/simple/simple.cpp | 3 +- examples/speculative/speculative.cpp | 3 +- examples/tokenize/tokenize.cpp | 2 +- ggml.c | 80 ++++++++++++++++--- ggml.h | 12 ++- llama.cpp | 14 ++-- llama.h | 5 +- tests/test-autorelease.cpp | 2 +- tests/test-model-load-cancel.cpp | 2 +- tests/test-tokenizer-0-falcon.cpp | 2 +- tests/test-tokenizer-0-llama.cpp | 2 +- tests/test-tokenizer-1-bpe.cpp | 2 +- tests/test-tokenizer-1-llama.cpp | 2 +- 36 files changed, 178 insertions(+), 62 deletions(-) diff --git a/common/common.cpp b/common/common.cpp index f64da2cb6..c5e83cc2a 100644 --- a/common/common.cpp +++ b/common/common.cpp @@ -671,7 +671,15 @@ bool gpt_params_parse_ex(int argc, char ** argv, gpt_params & params) { } else if (arg == "--no-mmap") { params.use_mmap = false; } else if (arg == "--numa") { - params.numa = true; + if (++i >= argc) { + invalid_param = true; + break; + } + std::string value(argv[i]); + /**/ if (value == "distribute" || value == "") { params.numa = GGML_NUMA_STRATEGY_DISTRIBUTE; } + else if (value == "isolate") { params.numa = GGML_NUMA_STRATEGY_ISOLATE; } + else if (value == "numactl") { params.numa = GGML_NUMA_STRATEGY_NUMACTL; } + else { invalid_param = true; break; } } else if (arg == "--verbose-prompt") { params.verbose_prompt = true; } else if (arg == "--no-display-prompt") { @@ -935,7 +943,7 @@ void gpt_print_usage(int /*argc*/, char ** argv, const gpt_params & params) { printf(" -tb N, --threads-batch N\n"); printf(" number of threads to use during batch and prompt processing (default: same as --threads)\n"); printf(" -td N, --threads-draft N"); - printf(" number of threads to use during generation (default: same as --threads)"); + printf(" number of threads to use during generation (default: same as --threads)\n"); printf(" -tbd N, --threads-batch-draft N\n"); printf(" number of threads to use during batch and prompt processing (default: same as --threads-draft)\n"); printf(" -p PROMPT, --prompt PROMPT\n"); @@ -1005,7 +1013,7 @@ void gpt_print_usage(int /*argc*/, char ** argv, const gpt_params & params) { printf(" --winogrande-tasks N number of tasks to use when computing the Winogrande score (default: %zu)\n", params.winogrande_tasks); printf(" --multiple-choice compute multiple choice score over random tasks from datafile supplied with -f\n"); printf(" --multiple-choice-tasks N number of tasks to use when computing the multiple choice score (default: %zu)\n", params.winogrande_tasks); - printf(" --kl-divergence computes KL-divergence to logits provided via --kl-divergence-base"); + printf(" --kl-divergence computes KL-divergence to logits provided via --kl-divergence-base\n"); printf(" --keep N number of tokens to keep from the initial prompt (default: %d, -1 = all)\n", params.n_keep); printf(" --draft N number of tokens to draft for speculative decoding (default: %d)\n", params.n_draft); printf(" --chunks N max number of chunks to process (default: %d, -1 = all)\n", params.n_chunks); @@ -1022,7 +1030,10 @@ void gpt_print_usage(int /*argc*/, char ** argv, const gpt_params & params) { if (llama_supports_mmap()) { printf(" --no-mmap do not memory-map model (slower load but may reduce pageouts if not using mlock)\n"); } - printf(" --numa attempt optimizations that help on some NUMA systems\n"); + printf(" --numa TYPE attempt optimizations that help on some NUMA systems\n"); + printf(" - distribute: spread execution evenly over all nodes\n"); + printf(" - isolate: only spawn threads on CPUs on the node that execution started on\n"); + printf(" - numactl: use the CPU map provided by numactl\n"); printf(" if run without this previously, it is recommended to drop the system page cache before using this\n"); printf(" see https://github.com/ggerganov/llama.cpp/issues/1437\n"); if (llama_supports_gpu_offload()) { @@ -1689,7 +1700,6 @@ void dump_non_result_info_yaml(FILE * stream, const gpt_params & params, const l fprintf(stream, "no_mmap: %s # default: false\n", !params.use_mmap ? "true" : "false"); fprintf(stream, "no_mul_mat_q: %s # default: false\n", !params.mul_mat_q ? "true" : "false"); fprintf(stream, "no_penalize_nl: %s # default: false\n", !sparams.penalize_nl ? "true" : "false"); - fprintf(stream, "numa: %s # default: false\n", params.numa ? "true" : "false"); fprintf(stream, "ppl_output_type: %d # default: 0\n", params.ppl_output_type); fprintf(stream, "ppl_stride: %d # default: 0\n", params.ppl_stride); fprintf(stream, "presence_penalty: %f # default: 0.0\n", sparams.penalty_present); diff --git a/common/common.h b/common/common.h index 9bdd45cf9..74c136995 100644 --- a/common/common.h +++ b/common/common.h @@ -76,6 +76,7 @@ struct gpt_params { float yarn_beta_slow = 1.0f; // YaRN high correction dim int32_t yarn_orig_ctx = 0; // YaRN original context length int32_t rope_scaling_type = LLAMA_ROPE_SCALING_UNSPECIFIED; + ggml_numa_strategy numa = GGML_NUMA_STRATEGY_DISABLED; // // sampling parameters struct llama_sampling_params sparams; @@ -134,7 +135,6 @@ struct gpt_params { bool logits_all = false; // return logits for all tokens in the batch bool use_mmap = true; // use mmap for faster loads bool use_mlock = false; // use mlock to keep model in memory - bool numa = false; // attempt optimizations that help on some NUMA systems bool verbose_prompt = false; // print prompt tokens before generation bool display_prompt = true; // print prompt before generation bool infill = false; // use infill mode diff --git a/examples/batched-bench/batched-bench.cpp b/examples/batched-bench/batched-bench.cpp index b52d68457..55dfd9784 100644 --- a/examples/batched-bench/batched-bench.cpp +++ b/examples/batched-bench/batched-bench.cpp @@ -82,7 +82,8 @@ int main(int argc, char ** argv) { // init LLM - llama_backend_init(params.numa); + llama_backend_init(); + llama_numa_init(params.numa); // initialize the model diff --git a/examples/batched.swift/Sources/main.swift b/examples/batched.swift/Sources/main.swift index 4d0005349..d75c503d5 100644 --- a/examples/batched.swift/Sources/main.swift +++ b/examples/batched.swift/Sources/main.swift @@ -17,7 +17,7 @@ let n_parallel: Int = arguments.count > 3 && Int(arguments[3]) != nil ? Int(argu let n_len: Int = 32 // init LLM -llama_backend_init(false) +llama_backend_init() defer { llama_backend_free() } diff --git a/examples/batched/batched.cpp b/examples/batched/batched.cpp index b1775e0b0..eab636692 100644 --- a/examples/batched/batched.cpp +++ b/examples/batched/batched.cpp @@ -50,7 +50,8 @@ int main(int argc, char ** argv) { // init LLM - llama_backend_init(params.numa); + llama_backend_init(); + llama_numa_init(params.numa); // initialize the model diff --git a/examples/beam-search/beam-search.cpp b/examples/beam-search/beam-search.cpp index 679b382e1..866c6d7a6 100644 --- a/examples/beam-search/beam-search.cpp +++ b/examples/beam-search/beam-search.cpp @@ -119,7 +119,8 @@ int main(int argc, char ** argv) // Init LLM : //--------------------------------- - llama_backend_init(params.numa); + llama_backend_init(); + llama_numa_init(params.numa); llama_model * model; llama_context * ctx; diff --git a/examples/embedding/embedding.cpp b/examples/embedding/embedding.cpp index b4688cf51..acff715e9 100644 --- a/examples/embedding/embedding.cpp +++ b/examples/embedding/embedding.cpp @@ -74,7 +74,8 @@ int main(int argc, char ** argv) { params.prompt = gpt_random_prompt(rng); } - llama_backend_init(params.numa); + llama_backend_init(); + llama_numa_init(params.numa); llama_model * model; llama_context * ctx; diff --git a/examples/imatrix/imatrix.cpp b/examples/imatrix/imatrix.cpp index bc9f6fa68..f21bc48f3 100644 --- a/examples/imatrix/imatrix.cpp +++ b/examples/imatrix/imatrix.cpp @@ -568,7 +568,8 @@ int main(int argc, char ** argv) { params.prompt = gpt_random_prompt(rng); } - llama_backend_init(params.numa); + llama_backend_init(); + llama_numa_init(params.numa); llama_model_params mparams = llama_model_params_from_gpt_params(params); diff --git a/examples/infill/infill.cpp b/examples/infill/infill.cpp index 72fb133b4..92c67b7cf 100644 --- a/examples/infill/infill.cpp +++ b/examples/infill/infill.cpp @@ -202,7 +202,8 @@ int main(int argc, char ** argv) { std::mt19937 rng(params.seed); LOG("%s: llama backend init\n", __func__); - llama_backend_init(params.numa); + llama_backend_init(); + llama_numa_init(params.numa); llama_model * model; llama_context * ctx; diff --git a/examples/llama-bench/llama-bench.cpp b/examples/llama-bench/llama-bench.cpp index ddb0ba064..11410f8ae 100644 --- a/examples/llama-bench/llama-bench.cpp +++ b/examples/llama-bench/llama-bench.cpp @@ -1151,8 +1151,7 @@ int main(int argc, char ** argv) { if (!params.verbose) { llama_log_set(llama_null_log_callback, NULL); } - bool numa = false; - llama_backend_init(numa); + llama_backend_init(); // initialize printer std::unique_ptr p; diff --git a/examples/llama.android/app/src/main/cpp/llama-android.cpp b/examples/llama.android/app/src/main/cpp/llama-android.cpp index d5e705dce..2beb1e0d5 100644 --- a/examples/llama.android/app/src/main/cpp/llama-android.cpp +++ b/examples/llama.android/app/src/main/cpp/llama-android.cpp @@ -274,8 +274,8 @@ Java_com_example_llama_Llm_new_1batch(JNIEnv *, jobject, jint n_tokens, jint emb extern "C" JNIEXPORT void JNICALL -Java_com_example_llama_Llm_backend_1init(JNIEnv *, jobject, jboolean numa) { - llama_backend_init(numa); +Java_com_example_llama_Llm_backend_1init(JNIEnv *, jobject) { + llama_backend_init(); } extern "C" diff --git a/examples/llama.swiftui/llama.cpp.swift/LibLlama.swift b/examples/llama.swiftui/llama.cpp.swift/LibLlama.swift index fc79fd346..58fcf40c6 100644 --- a/examples/llama.swiftui/llama.cpp.swift/LibLlama.swift +++ b/examples/llama.swiftui/llama.cpp.swift/LibLlama.swift @@ -51,7 +51,7 @@ actor LlamaContext { } static func create_context(path: String) throws -> LlamaContext { - llama_backend_init(false) + llama_backend_init() var model_params = llama_model_default_params() #if targetEnvironment(simulator) diff --git a/examples/llava/llava-cli.cpp b/examples/llava/llava-cli.cpp index bef7f7c95..e29da6cb2 100644 --- a/examples/llava/llava-cli.cpp +++ b/examples/llava/llava-cli.cpp @@ -218,7 +218,8 @@ static struct llava_context * llava_init(gpt_params * params) { auto ctx_clip = clip_model_load(clip_path, /*verbosity=*/ 1); - llama_backend_init(params->numa); + llama_backend_init(); + llama_numa_init(params->numa); llama_model_params model_params = llama_model_params_from_gpt_params(*params); diff --git a/examples/lookahead/lookahead.cpp b/examples/lookahead/lookahead.cpp index e55a15a1b..e2551e7a4 100644 --- a/examples/lookahead/lookahead.cpp +++ b/examples/lookahead/lookahead.cpp @@ -54,7 +54,8 @@ int main(int argc, char ** argv) { #endif // LOG_DISABLE_LOGS // init llama.cpp - llama_backend_init(params.numa); + llama_backend_init(); + llama_numa_init(params.numa); llama_model * model = NULL; llama_context * ctx = NULL; diff --git a/examples/lookup/lookup.cpp b/examples/lookup/lookup.cpp index 18235b8a1..b53fae110 100644 --- a/examples/lookup/lookup.cpp +++ b/examples/lookup/lookup.cpp @@ -31,7 +31,8 @@ int main(int argc, char ** argv){ #endif // LOG_DISABLE_LOGS // init llama.cpp - llama_backend_init(params.numa); + llama_backend_init(); + llama_numa_init(params.numa); llama_model * model = NULL; llama_context * ctx = NULL; diff --git a/examples/main/README.md b/examples/main/README.md index c7997f665..7f84e4262 100644 --- a/examples/main/README.md +++ b/examples/main/README.md @@ -283,7 +283,11 @@ These options help improve the performance and memory usage of the LLaMA models. ### NUMA support -- `--numa`: Attempt optimizations that help on some systems with non-uniform memory access. This currently consists of pinning an equal proportion of the threads to the cores on each NUMA node, and disabling prefetch and readahead for mmap. The latter causes mapped pages to be faulted in on first access instead of all at once, and in combination with pinning threads to NUMA nodes, more of the pages end up on the NUMA node where they are used. Note that if the model is already in the system page cache, for example because of a previous run without this option, this will have little effect unless you drop the page cache first. This can be done by rebooting the system or on Linux by writing '3' to '/proc/sys/vm/drop_caches' as root. +- `--numa distribute`: Pin an equal proportion of the threads to the cores on each NUMA node. This will spread the load amongst all cores on the system, utilitizing all memory channels at the expense of potentially requiring memory to travel over the slow links between nodes. +- `--numa isolate`: Pin all threads to the NUMA node that the program starts on. This limits the number of cores and amount of memory that can be used, but guarantees all memory access remains local to the NUMA node. +- `--numa numactl`: Pin threads to the CPUMAP that is passed to the program by starting it with the numactl utility. This is the most flexible mode, and allow arbitraty core usage patterns, for example a map that uses all the cores on one NUMA nodes, and just enough cores on a second node to saturate the inter-node memory bus. + + These flags attempt optimizations that help on some systems with non-uniform memory access. This currently consists of one of the above strategies, and disabling prefetch and readahead for mmap. The latter causes mapped pages to be faulted in on first access instead of all at once, and in combination with pinning threads to NUMA nodes, more of the pages end up on the NUMA node where they are used. Note that if the model is already in the system page cache, for example because of a previous run without this option, this will have little effect unless you drop the page cache first. This can be done by rebooting the system or on Linux by writing '3' to '/proc/sys/vm/drop_caches' as root. ### Memory Float 32 diff --git a/examples/main/main.cpp b/examples/main/main.cpp index e8ab8cbae..f5d2f4893 100644 --- a/examples/main/main.cpp +++ b/examples/main/main.cpp @@ -185,7 +185,8 @@ int main(int argc, char ** argv) { } LOG("%s: llama backend init\n", __func__); - llama_backend_init(params.numa); + llama_backend_init(); + llama_numa_init(params.numa); llama_model * model; llama_context * ctx; diff --git a/examples/parallel/parallel.cpp b/examples/parallel/parallel.cpp index d2e074d9e..7d11fcd59 100644 --- a/examples/parallel/parallel.cpp +++ b/examples/parallel/parallel.cpp @@ -122,7 +122,8 @@ int main(int argc, char ** argv) { #endif // LOG_DISABLE_LOGS // init llama.cpp - llama_backend_init(params.numa); + llama_backend_init(); + llama_numa_init(params.numa); llama_model * model = NULL; llama_context * ctx = NULL; diff --git a/examples/passkey/passkey.cpp b/examples/passkey/passkey.cpp index 5c0022832..e12a1cdf1 100644 --- a/examples/passkey/passkey.cpp +++ b/examples/passkey/passkey.cpp @@ -71,7 +71,8 @@ int main(int argc, char ** argv) { // init LLM - llama_backend_init(params.numa); + llama_backend_init(); + llama_numa_init(params.numa); // initialize the model diff --git a/examples/perplexity/perplexity.cpp b/examples/perplexity/perplexity.cpp index b2c131d4c..67d2d3293 100644 --- a/examples/perplexity/perplexity.cpp +++ b/examples/perplexity/perplexity.cpp @@ -1809,7 +1809,8 @@ int main(int argc, char ** argv) { params.prompt = gpt_random_prompt(rng); } - llama_backend_init(params.numa); + llama_backend_init(); + llama_numa_init(params.numa); llama_model * model; llama_context * ctx; diff --git a/examples/quantize/quantize.cpp b/examples/quantize/quantize.cpp index 85f403ffc..4a5c504e3 100644 --- a/examples/quantize/quantize.cpp +++ b/examples/quantize/quantize.cpp @@ -237,7 +237,7 @@ int main(int argc, char ** argv) { params.imatrix = &imatrix_data; } - llama_backend_init(false); + llama_backend_init(); // parse command line arguments const std::string fname_inp = argv[arg_idx]; diff --git a/examples/server/README.md b/examples/server/README.md index 0f7373ae8..8e141d22d 100644 --- a/examples/server/README.md +++ b/examples/server/README.md @@ -16,6 +16,13 @@ Command line options: - `--memory-f32`: Use 32-bit floats instead of 16-bit floats for memory key+value. Not recommended. - `--mlock`: Lock the model in memory, preventing it from being swapped out when memory-mapped. - `--no-mmap`: Do not memory-map the model. By default, models are mapped into memory, which allows the system to load only the necessary parts of the model as needed. +- `--numa STRATEGY`: Attempt one of the below optimization strategies that help on some NUMA systems +- `--numa distribute`: Spread execution evenly over all nodes +- `--numa isolate`: Only spawn threads on CPUs on the node that execution started on +- `--numa numactl`: Use the CPU map provided by numactl +if run without this previously, it is recommended to drop the system page cache before using this +see https://github.com/ggerganov/llama.cpp/issues/1437 + - `--numa`: Attempt optimizations that help on some NUMA systems. - `--lora FNAME`: Apply a LoRA (Low-Rank Adaptation) adapter to the model (implies --no-mmap). This allows you to adapt the pretrained model to specific tasks or domains. - `--lora-base FNAME`: Optional model to use as a base for the layers modified by the LoRA adapter. This flag is used in conjunction with the `--lora` flag, and specifies the base model for the adaptation. diff --git a/examples/server/server.cpp b/examples/server/server.cpp index 2decd7762..912c750cc 100644 --- a/examples/server/server.cpp +++ b/examples/server/server.cpp @@ -1855,7 +1855,10 @@ static void server_print_usage(const char *argv0, const gpt_params ¶ms, { printf(" --no-mmap do not memory-map model (slower load but may reduce pageouts if not using mlock)\n"); } - printf(" --numa attempt optimizations that help on some NUMA systems\n"); + printf(" --numa TYPE attempt optimizations that help on some NUMA systems\n"); + printf(" - distribute: spread execution evenly over all nodes\n"); + printf(" - isolate: only spawn threads on CPUs on the node that execution started on\n"); + printf(" - numactl: use the CPU map provided my numactl\n"); if (llama_supports_gpu_offload()) { printf(" -ngl N, --n-gpu-layers N\n"); printf(" number of layers to store in VRAM\n"); @@ -2264,9 +2267,17 @@ static void server_params_parse(int argc, char **argv, server_params &sparams, { params.use_mmap = false; } - else if (arg == "--numa") - { - params.numa = true; + else if (arg == "--numa") { + if (++i >= argc) { + invalid_param = true; + break; + } else { + std::string value(argv[i]); + /**/ if (value == "distribute" || value == "" ) { params.numa = GGML_NUMA_STRATEGY_DISTRIBUTE; } + else if (value == "isolate") { params.numa = GGML_NUMA_STRATEGY_ISOLATE; } + else if (value == "numactl") { params.numa = GGML_NUMA_STRATEGY_NUMACTL; } + else { invalid_param = true; break; } + } } else if (arg == "--embedding") { @@ -2497,7 +2508,8 @@ int main(int argc, char **argv) params.model_alias = params.model; } - llama_backend_init(params.numa); + llama_backend_init(); + llama_numa_init(params.numa); LOG_INFO("build info", {{"build", LLAMA_BUILD_NUMBER}, {"commit", LLAMA_COMMIT}}); diff --git a/examples/simple/simple.cpp b/examples/simple/simple.cpp index 9cfde8308..39e2d8ea4 100644 --- a/examples/simple/simple.cpp +++ b/examples/simple/simple.cpp @@ -31,7 +31,8 @@ int main(int argc, char ** argv) { // init LLM - llama_backend_init(params.numa); + llama_backend_init(); + llama_numa_init(params.numa); // initialize the model diff --git a/examples/speculative/speculative.cpp b/examples/speculative/speculative.cpp index 7b3af01f3..3848791d4 100644 --- a/examples/speculative/speculative.cpp +++ b/examples/speculative/speculative.cpp @@ -50,7 +50,8 @@ int main(int argc, char ** argv) { #endif // LOG_DISABLE_LOGS // init llama.cpp - llama_backend_init(params.numa); + llama_backend_init(); + llama_numa_init(params.numa); llama_model * model_tgt = NULL; llama_model * model_dft = NULL; diff --git a/examples/tokenize/tokenize.cpp b/examples/tokenize/tokenize.cpp index 4ff8e3fa7..d95a92475 100644 --- a/examples/tokenize/tokenize.cpp +++ b/examples/tokenize/tokenize.cpp @@ -17,7 +17,7 @@ int main(int argc, char ** argv) { const bool printing_ids = argc > 3 && std::string(argv[3]) == "--ids"; - llama_backend_init(false); + llama_backend_init(); llama_model_params model_params = llama_model_default_params(); model_params.vocab_only = true; diff --git a/ggml.c b/ggml.c index d921d82fe..4e302fb7d 100644 --- a/ggml.c +++ b/ggml.c @@ -1954,9 +1954,16 @@ struct ggml_numa_node { }; struct ggml_numa_nodes { + enum ggml_numa_strategy numa_strategy; struct ggml_numa_node nodes[GGML_NUMA_MAX_NODES]; uint32_t n_nodes; uint32_t total_cpus; // hardware threads on system + uint32_t current_node; // node on which main process is execting +#ifdef __linux__ + cpu_set_t cpuset; // cpuset from numactl +#else + uint32_t cpuset; // no NUMA support outside of Linux at this time. Use a portable datatype +#endif }; // @@ -1990,7 +1997,22 @@ inline static void ggml_critical_section_end(void) { atomic_fetch_sub(&g_state_barrier, 1); } -void ggml_numa_init(void) { +#ifdef __linux__ +static cpu_set_t ggml_get_numa_affinity(void) { + cpu_set_t cpuset; + pthread_t thread; + thread = pthread_self(); + CPU_ZERO(&cpuset); + pthread_getaffinity_np(thread, sizeof(cpu_set_t), &cpuset); + return cpuset; +} +#else +static uint32_t ggml_get_numa_affinity(void) { + return 0; // no NUMA support +} +#endif + +void ggml_numa_init(enum ggml_numa_strategy numa_flag) { if (g_state.numa.n_nodes > 0) { fprintf(stderr, "ggml_numa_init: NUMA already initialized\n"); @@ -2002,6 +2024,13 @@ void ggml_numa_init(void) { char path[256]; int rv; + // set numa scheme + g_state.numa.numa_strategy = numa_flag; + + GGML_PRINT_DEBUG("numa strategy %u\n",g_state.numa.numa_strategy); + + g_state.numa.cpuset = ggml_get_numa_affinity(); + // enumerate nodes while (g_state.numa.n_nodes < GGML_NUMA_MAX_NODES) { rv = snprintf(path, sizeof(path), "/sys/devices/system/node/node%u", g_state.numa.n_nodes); @@ -2020,11 +2049,17 @@ void ggml_numa_init(void) { GGML_PRINT_DEBUG("found %u numa nodes, %u CPUs\n", g_state.numa.n_nodes, g_state.numa.total_cpus); - if (g_state.numa.n_nodes < 1 || g_state.numa.total_cpus < 1) { + // figure out which node we're on + uint current_cpu; + int getcpu_ret = getcpu(¤t_cpu, &g_state.numa.current_node); + + if (g_state.numa.n_nodes < 1 || g_state.numa.total_cpus < 1 || getcpu_ret != 0) { g_state.numa.n_nodes = 0; return; } + GGML_PRINT_DEBUG("found our process on numa node %u, CPU %u\n", g_state.numa.current_node, current_cpu); + for (uint32_t n = 0; n < g_state.numa.n_nodes; ++n) { struct ggml_numa_node * node = &g_state.numa.nodes[n]; GGML_PRINT_DEBUG("CPUs on node %u:", n); @@ -16638,26 +16673,46 @@ typedef pthread_t ggml_thread_t; // Android's libc implementation "bionic" does not support setting affinity #if defined(__linux__) && !defined(__BIONIC__) -static void set_numa_thread_affinity(int thread_n, int n_threads) { +static void set_numa_thread_affinity(int thread_n) { if (!ggml_is_numa()) { return; } - // run thread on node_num thread_n / (threads per node) - const int node_num = thread_n / ((n_threads + g_state.numa.n_nodes - 1) / g_state.numa.n_nodes); - struct ggml_numa_node * node = &g_state.numa.nodes[node_num]; + int node_num; + int rv; size_t setsize = CPU_ALLOC_SIZE(g_state.numa.total_cpus); + switch(g_state.numa.numa_strategy) { + case GGML_NUMA_STRATEGY_DISTRIBUTE: + // run thread on node_num thread_n / (threads per node) + node_num = thread_n % g_state.numa.n_nodes; + break; + case GGML_NUMA_STRATEGY_ISOLATE: + // run thread on current_node + node_num = g_state.numa.current_node; + break; + case GGML_NUMA_STRATEGY_NUMACTL: + // use the cpuset that numactl gave us + rv = pthread_setaffinity_np(pthread_self(), setsize, &g_state.numa.cpuset); + if (rv) { + fprintf(stderr, "warning: pthread_setaffinity_np() failed: %s\n",strerror(rv)); + } + return; + default: + return; + } + + struct ggml_numa_node * node = &g_state.numa.nodes[node_num]; + cpu_set_t * cpus = CPU_ALLOC(g_state.numa.total_cpus); CPU_ZERO_S(setsize, cpus); for (size_t i = 0; i < node->n_cpus; ++i) { CPU_SET_S(node->cpus[i], setsize, cpus); } - int rv = pthread_setaffinity_np(pthread_self(), setsize, cpus); + rv = pthread_setaffinity_np(pthread_self(), setsize, cpus); if (rv) { - fprintf(stderr, "warning: pthread_setaffinity_np() failed: %s\n", - strerror(rv)); + fprintf(stderr, "warning: pthread_setaffinity_np() failed: %s\n", strerror(rv)); } CPU_FREE(cpus); @@ -16678,8 +16733,7 @@ static void clear_numa_thread_affinity(void) { int rv = pthread_setaffinity_np(pthread_self(), setsize, cpus); if (rv) { - fprintf(stderr, "warning: pthread_setaffinity_np() failed: %s\n", - strerror(rv)); + fprintf(stderr, "warning: pthread_setaffinity_np() failed: %s\n", strerror(rv)); } CPU_FREE(cpus); @@ -16687,7 +16741,7 @@ static void clear_numa_thread_affinity(void) { #else // TODO: Windows etc. // (the linux implementation may also work on BSD, someone should test) -static void set_numa_thread_affinity(int thread_n, int n_threads) { UNUSED(thread_n); UNUSED(n_threads); } +static void set_numa_thread_affinity(int thread_n) { UNUSED(thread_n); } static void clear_numa_thread_affinity(void) {} #endif @@ -16987,7 +17041,7 @@ static thread_ret_t ggml_graph_compute_thread(void * data) { const int n_threads = state->shared->n_threads; - set_numa_thread_affinity(state->ith, n_threads); + set_numa_thread_affinity(state->ith); int node_n = -1; int task_phase = GGML_TASK_FINALIZE; diff --git a/ggml.h b/ggml.h index 01cecc1e1..270018185 100644 --- a/ggml.h +++ b/ggml.h @@ -658,6 +658,16 @@ extern "C" { void * wdata; }; + // numa strategies + enum ggml_numa_strategy { + GGML_NUMA_STRATEGY_DISABLED = 0, + GGML_NUMA_STRATEGY_DISTRIBUTE = 1, + GGML_NUMA_STRATEGY_ISOLATE = 2, + GGML_NUMA_STRATEGY_NUMACTL = 3, + GGML_NUMA_STRATEGY_MIRROR = 4, + GGML_NUMA_STRATEGY_COUNT + }; + // misc GGML_API void ggml_time_init(void); // call this once at the beginning of the program @@ -668,7 +678,7 @@ extern "C" { GGML_API void ggml_print_backtrace(void); - GGML_API void ggml_numa_init(void); // call once for better performance on NUMA systems + GGML_API void ggml_numa_init(enum ggml_numa_strategy numa); // call once for better performance on NUMA systems GGML_API bool ggml_is_numa(void); // true if init detected that system has >1 NUMA node GGML_API void ggml_print_object (const struct ggml_object * obj); diff --git a/llama.cpp b/llama.cpp index aceb9c25a..08e7b02b4 100644 --- a/llama.cpp +++ b/llama.cpp @@ -1034,7 +1034,7 @@ struct llama_mmap { int fd = fileno(file->fp); int flags = MAP_SHARED; // prefetch/readahead impairs performance on NUMA systems - if (numa) { prefetch = 0; } + if (numa) { prefetch = 0; } #ifdef __linux__ // advise the kernel to read the file sequentially (increases readahead) if (posix_fadvise(fd, 0, 0, POSIX_FADV_SEQUENTIAL)) { @@ -11182,7 +11182,7 @@ bool llama_mlock_supported(void) { return llama_supports_mlock(); } -void llama_backend_init(bool numa) { +void llama_backend_init(void) { ggml_time_init(); // needed to initialize f16 tables @@ -11192,15 +11192,17 @@ void llama_backend_init(bool numa) { ggml_free(ctx); } - if (numa) { - ggml_numa_init(); - } - #ifdef GGML_USE_MPI ggml_mpi_backend_init(); #endif } +void llama_numa_init(enum ggml_numa_strategy numa) { + if (numa != GGML_NUMA_STRATEGY_DISABLED) { + ggml_numa_init(numa); + } +} + void llama_backend_free(void) { #ifdef GGML_USE_MPI ggml_mpi_backend_free(); diff --git a/llama.h b/llama.h index 4a26bd619..f4ec6ea63 100644 --- a/llama.h +++ b/llama.h @@ -312,7 +312,10 @@ extern "C" { // Initialize the llama + ggml backend // If numa is true, use NUMA optimizations // Call once at the start of the program - LLAMA_API void llama_backend_init(bool numa); + LLAMA_API void llama_backend_init(void); + + //optional: + LLAMA_API void llama_numa_init(enum ggml_numa_strategy numa); // Call once at the end of the program - currently only used for MPI LLAMA_API void llama_backend_free(void); diff --git a/tests/test-autorelease.cpp b/tests/test-autorelease.cpp index 36a23c0bb..57fa00011 100644 --- a/tests/test-autorelease.cpp +++ b/tests/test-autorelease.cpp @@ -12,7 +12,7 @@ int main(int argc, char ** argv) { auto * model_path = get_model_or_exit(argc, argv); std::thread([&model_path]() { - llama_backend_init(false); + llama_backend_init(); auto * model = llama_load_model_from_file(model_path, llama_model_default_params()); auto * ctx = llama_new_context_with_model(model, llama_context_default_params()); llama_free(ctx); diff --git a/tests/test-model-load-cancel.cpp b/tests/test-model-load-cancel.cpp index 7ea4bbacc..858535c3c 100644 --- a/tests/test-model-load-cancel.cpp +++ b/tests/test-model-load-cancel.cpp @@ -14,7 +14,7 @@ int main(int argc, char *argv[] ) { fprintf(stderr, "using '%s'\n", model_path); fclose(file); - llama_backend_init(false); + llama_backend_init(); auto params = llama_model_params{}; params.use_mmap = false; params.progress_callback = [](float progress, void * ctx){ diff --git a/tests/test-tokenizer-0-falcon.cpp b/tests/test-tokenizer-0-falcon.cpp index a4e9d2b91..472b0b3a8 100644 --- a/tests/test-tokenizer-0-falcon.cpp +++ b/tests/test-tokenizer-0-falcon.cpp @@ -61,7 +61,7 @@ int main(int argc, char **argv) { llama_model * model; llama_context * ctx; - llama_backend_init(false); + llama_backend_init(); // load the vocab { diff --git a/tests/test-tokenizer-0-llama.cpp b/tests/test-tokenizer-0-llama.cpp index 39c8d188c..0a16cd7eb 100644 --- a/tests/test-tokenizer-0-llama.cpp +++ b/tests/test-tokenizer-0-llama.cpp @@ -60,7 +60,7 @@ int main(int argc, char **argv) { llama_model * model; llama_context * ctx; - llama_backend_init(false); + llama_backend_init(); // load the vocab { diff --git a/tests/test-tokenizer-1-bpe.cpp b/tests/test-tokenizer-1-bpe.cpp index 3bb629561..3596ce55a 100644 --- a/tests/test-tokenizer-1-bpe.cpp +++ b/tests/test-tokenizer-1-bpe.cpp @@ -25,7 +25,7 @@ int main(int argc, char **argv) { llama_model * model; llama_context * ctx; - llama_backend_init(false); + llama_backend_init(); // load the vocab { diff --git a/tests/test-tokenizer-1-llama.cpp b/tests/test-tokenizer-1-llama.cpp index b0d814a41..9333f8686 100644 --- a/tests/test-tokenizer-1-llama.cpp +++ b/tests/test-tokenizer-1-llama.cpp @@ -25,7 +25,7 @@ int main(int argc, char **argv) { llama_model * model; llama_context * ctx; - llama_backend_init(false); + llama_backend_init(); // load the vocab { From 5f5808ca7b7f23a1fa7a77241842bb84a0e55108 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?R=C5=91czey=20Barnab=C3=A1s?= <31726601+An0nie@users.noreply.github.com> Date: Fri, 16 Feb 2024 11:00:56 +0100 Subject: [PATCH 607/811] server : fix system prompt cli (#5516) --- examples/server/server.cpp | 47 ++++++++++++++++++-------------------- 1 file changed, 22 insertions(+), 25 deletions(-) diff --git a/examples/server/server.cpp b/examples/server/server.cpp index 912c750cc..0cb802ce8 100644 --- a/examples/server/server.cpp +++ b/examples/server/server.cpp @@ -436,10 +436,6 @@ struct llama_server_context default_generation_settings_for_props["seed"] = -1; batch = llama_batch_init(n_ctx, 0, params.n_parallel); - - // empty system prompt - system_prompt = ""; - system_tokens.clear(); } std::vector tokenize(const json & json_prompt, bool add_bos) const @@ -765,27 +761,30 @@ struct llama_server_context } void update_system_prompt() { - system_tokens = ::llama_tokenize(ctx, system_prompt, add_bos_token); - - llama_batch_clear(batch); - kv_cache_clear(); + system_tokens.clear(); - for (int i = 0; i < (int) system_tokens.size(); ++i) - { - llama_batch_add(batch, system_tokens[i], i, { 0 }, false); - } + if (!system_prompt.empty()) { + system_tokens = ::llama_tokenize(ctx, system_prompt, add_bos_token); - if (llama_decode(ctx, batch) != 0) - { - LOG_TEE("%s: llama_decode() failed\n", __func__); - return; - } + llama_batch_clear(batch); - // assign the system KV cache to all parallel sequences - for (int32_t i = 1; i < params.n_parallel; ++i) - { - llama_kv_cache_seq_cp(ctx, 0, i, 0, system_tokens.size()); + for (int i = 0; i < (int)system_tokens.size(); ++i) + { + llama_batch_add(batch, system_tokens[i], i, { 0 }, false); + } + + if (llama_decode(ctx, batch) != 0) + { + LOG_TEE("%s: llama_decode() failed\n", __func__); + return; + } + + // assign the system KV cache to all parallel sequences + for (int32_t i = 1; i < params.n_parallel; ++i) + { + llama_kv_cache_seq_cp(ctx, 0, i, 0, system_tokens.size()); + } } LOG_TEE("system prompt updated\n"); @@ -807,10 +806,8 @@ struct llama_server_context name_user = sys_props.value("anti_prompt", ""); name_assistant = sys_props.value("assistant_name", ""); - if (slots.size() > 0) - { - notify_system_prompt_changed(); - } + + notify_system_prompt_changed(); } static size_t find_stopping_strings(const std::string &text, const size_t last_token_size, From 6dcc02d2444c779c18d49c364c5d5c5728b6b484 Mon Sep 17 00:00:00 2001 From: Alexey Parfenov Date: Fri, 16 Feb 2024 11:33:25 +0000 Subject: [PATCH 608/811] server : add "samplers" param to control the samplers order (#5494) --- common/common.cpp | 59 ++++++++++++++++++++++++-------------- common/common.h | 2 +- common/sampling.cpp | 2 +- common/sampling.h | 14 ++++----- examples/server/README.md | 2 ++ examples/server/server.cpp | 25 ++++++++++++++++ 6 files changed, 74 insertions(+), 30 deletions(-) diff --git a/common/common.cpp b/common/common.cpp index c5e83cc2a..3a92d3797 100644 --- a/common/common.cpp +++ b/common/common.cpp @@ -341,7 +341,7 @@ bool gpt_params_parse_ex(int argc, char ** argv, gpt_params & params) { break; } const auto sampler_names = string_split(argv[i], ';'); - sparams.samplers_sequence = sampler_types_from_names(sampler_names); + sparams.samplers_sequence = sampler_types_from_names(sampler_names, true); } else if (arg == "--sampling-seq") { if (++i >= argc) { invalid_param = true; @@ -964,7 +964,8 @@ void gpt_print_usage(int /*argc*/, char ** argv, const gpt_params & params) { printf(" -n N, --n-predict N number of tokens to predict (default: %d, -1 = infinity, -2 = until context filled)\n", params.n_predict); printf(" -c N, --ctx-size N size of the prompt context (default: %d, 0 = loaded from model)\n", params.n_ctx); printf(" -b N, --batch-size N batch size for prompt processing (default: %d)\n", params.n_batch); - printf(" --samplers samplers that will be used for generation in the order, separated by \';\' (default: %s)\n", sampler_type_names.c_str()); + printf(" --samplers samplers that will be used for generation in the order, separated by \';\'\n"); + printf(" (default: %s)\n", sampler_type_names.c_str()); printf(" --sampling-seq simplified sequence for samplers that will be used (default: %s)\n", sampler_type_chars.c_str()); printf(" --top-k N top-k sampling (default: %d, 0 = disabled)\n", sparams.top_k); printf(" --top-p N top-p sampling (default: %.1f, 1.0 = disabled)\n", (double)sparams.top_p); @@ -1133,34 +1134,50 @@ std::vector string_split(std::string input, char separator) { return parts; } -std::vector sampler_types_from_names(const std::vector & names) { +std::vector sampler_types_from_names(const std::vector & names, bool allow_alt_names) { + std::unordered_map sampler_canonical_name_map { + {"top_k", llama_sampler_type::TOP_K}, + {"top_p", llama_sampler_type::TOP_P}, + {"typical_p", llama_sampler_type::TYPICAL_P}, + {"min_p", llama_sampler_type::MIN_P}, + {"tfs_z", llama_sampler_type::TFS_Z}, + {"temperature", llama_sampler_type::TEMPERATURE} + }; + // since samplers names are written multiple ways // make it ready for both system names and input names - std::unordered_map sampler_name_map { - {"top_k", llama_sampler_type::TOP_K}, + std::unordered_map sampler_alt_name_map { {"top-k", llama_sampler_type::TOP_K}, - {"top_p", llama_sampler_type::TOP_P}, {"top-p", llama_sampler_type::TOP_P}, {"nucleus", llama_sampler_type::TOP_P}, - {"typical_p", llama_sampler_type::TYPICAL_P}, {"typical-p", llama_sampler_type::TYPICAL_P}, {"typical", llama_sampler_type::TYPICAL_P}, - {"min_p", llama_sampler_type::MIN_P}, {"min-p", llama_sampler_type::MIN_P}, - {"tfs_z", llama_sampler_type::TFS_Z}, {"tfs-z", llama_sampler_type::TFS_Z}, {"tfs", llama_sampler_type::TFS_Z}, - {"temp", llama_sampler_type::TEMP}, - {"temperature", llama_sampler_type::TEMP} + {"temp", llama_sampler_type::TEMPERATURE} }; std::vector sampler_types; sampler_types.reserve(names.size()); - for (const auto& name : names) { - const auto sampler_item = sampler_name_map.find(name); - if (sampler_item != sampler_name_map.end()) { + for (const auto & name : names) + { + auto sampler_item = sampler_canonical_name_map.find(name); + if (sampler_item != sampler_canonical_name_map.end()) + { sampler_types.push_back(sampler_item->second); } + else + { + if (allow_alt_names) + { + sampler_item = sampler_alt_name_map.find(name); + if (sampler_item != sampler_alt_name_map.end()) + { + sampler_types.push_back(sampler_item->second); + } + } + } } return sampler_types; } @@ -1172,7 +1189,7 @@ std::vector sampler_types_from_chars(const std::string & nam {'y', llama_sampler_type::TYPICAL_P}, {'m', llama_sampler_type::MIN_P}, {'f', llama_sampler_type::TFS_Z}, - {'t', llama_sampler_type::TEMP} + {'t', llama_sampler_type::TEMPERATURE} }; std::vector sampler_types; @@ -1188,12 +1205,12 @@ std::vector sampler_types_from_chars(const std::string & nam std::string sampler_type_to_name_string(llama_sampler_type sampler_type) { switch (sampler_type) { - case llama_sampler_type::TOP_K: return "top_k"; - case llama_sampler_type::TFS_Z: return "tfs_z"; - case llama_sampler_type::TYPICAL_P: return "typical_p"; - case llama_sampler_type::TOP_P: return "top_p"; - case llama_sampler_type::MIN_P: return "min_p"; - case llama_sampler_type::TEMP: return "temp"; + case llama_sampler_type::TOP_K: return "top_k"; + case llama_sampler_type::TFS_Z: return "tfs_z"; + case llama_sampler_type::TYPICAL_P: return "typical_p"; + case llama_sampler_type::TOP_P: return "top_p"; + case llama_sampler_type::MIN_P: return "min_p"; + case llama_sampler_type::TEMPERATURE: return "temperature"; default : return ""; } } diff --git a/common/common.h b/common/common.h index 74c136995..935771d44 100644 --- a/common/common.h +++ b/common/common.h @@ -165,7 +165,7 @@ void process_escapes(std::string& input); // String utils // -std::vector sampler_types_from_names(const std::vector & names); +std::vector sampler_types_from_names(const std::vector & names, bool allow_alt_names); std::vector sampler_types_from_chars(const std::string & names_string); std::vector string_split(std::string input, char separator); std::string sampler_type_to_name_string(llama_sampler_type sampler_type); diff --git a/common/sampling.cpp b/common/sampling.cpp index a001750da..53013138a 100644 --- a/common/sampling.cpp +++ b/common/sampling.cpp @@ -139,7 +139,7 @@ static void sampler_queue( case llama_sampler_type::TYPICAL_P: llama_sample_typical (ctx_main, &cur_p, typical_p, min_keep); break; case llama_sampler_type::TOP_P : llama_sample_top_p (ctx_main, &cur_p, top_p, min_keep); break; case llama_sampler_type::MIN_P : llama_sample_min_p (ctx_main, &cur_p, min_p, min_keep); break; - case llama_sampler_type::TEMP: + case llama_sampler_type::TEMPERATURE: if (dynatemp_range > 0) { float dynatemp_min = std::max(0.0f, temp - dynatemp_range); float dynatemp_max = std::max(0.0f, temp + dynatemp_range); diff --git a/common/sampling.h b/common/sampling.h index 2bd6a75d2..e1279a894 100644 --- a/common/sampling.h +++ b/common/sampling.h @@ -10,12 +10,12 @@ // sampler types enum class llama_sampler_type : char { - TOP_K = 'k', - TOP_P = 'p', - MIN_P = 'm', - TFS_Z = 'f', - TYPICAL_P = 'y', - TEMP = 't' + TOP_K = 'k', + TOP_P = 'p', + MIN_P = 'm', + TFS_Z = 'f', + TYPICAL_P = 'y', + TEMPERATURE = 't' }; // sampling parameters @@ -45,7 +45,7 @@ typedef struct llama_sampling_params { llama_sampler_type::TYPICAL_P, llama_sampler_type::TOP_P, llama_sampler_type::MIN_P, - llama_sampler_type::TEMP + llama_sampler_type::TEMPERATURE }; std::string grammar; // optional BNF-like grammar to constrain sampling diff --git a/examples/server/README.md b/examples/server/README.md index 8e141d22d..249368749 100644 --- a/examples/server/README.md +++ b/examples/server/README.md @@ -204,6 +204,8 @@ node index.js `system_prompt`: Change the system prompt (initial prompt of all slots), this is useful for chat applications. [See more](#change-system-prompt-on-runtime) + `samplers`: The order the samplers should be applied in. An array of strings representing sampler type names. If a sampler is not set, it will not be used. If a sampler is specified more than once, it will be applied multiple times. (default: `["top_k", "tfs_z", "typical_p", "top_p", "min_p", "temperature"]` - these are all the available values) + ### Result JSON - Note: When using streaming mode (`stream`) only `content` and `stop` will be returned until end of completion. diff --git a/examples/server/server.cpp b/examples/server/server.cpp index 0cb802ce8..a0b46970b 100644 --- a/examples/server/server.cpp +++ b/examples/server/server.cpp @@ -672,6 +672,24 @@ struct llama_server_context } } + const auto &samplers_sequence = data.find("samplers"); + if (samplers_sequence != data.end() && samplers_sequence->is_array()) + { + std::vector sampler_names; + for (const auto &sampler_name : *samplers_sequence) + { + if (sampler_name.is_string()) + { + sampler_names.emplace_back(sampler_name); + } + } + slot->sparams.samplers_sequence = sampler_types_from_names(sampler_names, false); + } + else + { + slot->sparams.samplers_sequence = default_sparams.samplers_sequence; + } + if (multimodal) { const auto &images_data = data.find("image_data"); @@ -1026,6 +1044,12 @@ struct llama_server_context const auto eos_bias = slot.sparams.logit_bias.find(llama_token_eos(model)); const bool ignore_eos = eos_bias != slot.sparams.logit_bias.end() && eos_bias->second < 0.0f && std::isinf(eos_bias->second); + std::vector samplers_sequence; + for (const auto &sampler_type : slot.sparams.samplers_sequence) + { + samplers_sequence.emplace_back(sampler_type_to_name_string(sampler_type)); + } + return json { {"n_ctx", slot.n_ctx}, {"model", params.model_alias}, @@ -1056,6 +1080,7 @@ struct llama_server_context {"logit_bias", slot.sparams.logit_bias}, {"n_probs", slot.sparams.n_probs}, {"grammar", slot.sparams.grammar}, + {"samplers", samplers_sequence} }; } From 65085c713e14f78cdda6abc275b1a5d8c2b8ca15 Mon Sep 17 00:00:00 2001 From: Herman Semenov Date: Fri, 16 Feb 2024 11:45:48 +0000 Subject: [PATCH 609/811] llama : minor fixed return int value (#5529) --- llama.cpp | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/llama.cpp b/llama.cpp index 08e7b02b4..8966c3e66 100644 --- a/llama.cpp +++ b/llama.cpp @@ -10893,7 +10893,7 @@ static int llama_apply_lora_from_file_internal( { LLAMA_LOG_ERROR("%s: invalid tensor data type '%d'\n", __func__, ftype); - return false; + return 1; } } From 4cb072769804c77ab466bc8351c76ede9d5ba49d Mon Sep 17 00:00:00 2001 From: Herman Semenov Date: Fri, 16 Feb 2024 12:43:23 +0000 Subject: [PATCH 610/811] llava : removed excess free(NULL) operation (#5531) --- examples/llava/llava.cpp | 1 - 1 file changed, 1 deletion(-) diff --git a/examples/llava/llava.cpp b/examples/llava/llava.cpp index 4ed310a0e..4cb65a07b 100644 --- a/examples/llava/llava.cpp +++ b/examples/llava/llava.cpp @@ -315,7 +315,6 @@ static bool llava_image_embed_make_with_clip_img(clip_ctx * ctx_clip, int n_thre float * image_embd = (float *)malloc(clip_embd_nbytes(ctx_clip)*6); // TODO: base on gridsize/llava model if (!image_embd) { fprintf(stderr, "Unable to allocate memory for image embeddings\n"); - free(image_embd); return false; } From d2819d5577b35507be83d0c3f4d2d3c0ab1488ca Mon Sep 17 00:00:00 2001 From: Georgi Gerganov Date: Fri, 16 Feb 2024 15:14:40 +0200 Subject: [PATCH 611/811] scripts : add helpers script for bench comparing commits (#5521) * scripts : add helpers script for bench comparing commits * scripts : detect CUDA * set flags after checking the command line * fix make flags --------- Co-authored-by: slaren --- scripts/compare-commits.sh | 37 +++++++++++++++++++++++++++++++++++++ 1 file changed, 37 insertions(+) create mode 100755 scripts/compare-commits.sh diff --git a/scripts/compare-commits.sh b/scripts/compare-commits.sh new file mode 100755 index 000000000..331c4b9ce --- /dev/null +++ b/scripts/compare-commits.sh @@ -0,0 +1,37 @@ +#!/bin/bash + +if [ $# -lt 2 ]; then + echo "usage: ./scripts/compare-commits.sh [additional llama-bench arguments]" + exit 1 +fi + +set -e +set -x + +bench_args="${@:3}" + +rm -f llama-bench.sqlite + +backend="cpu" + +if [[ "$OSTYPE" == "darwin"* ]]; then + backend="metal" +elif command -v nvcc &> /dev/null; then + backend="cuda" +fi + +make_opts="" + +if [[ "$backend" == "cuda" ]]; then + make_opts="LLAMA_CUBLAS=1" +fi + +git checkout $1 +make clean && make -j32 $make_opts llama-bench +./llama-bench -o sql $bench_args | tee /dev/tty | sqlite3 llama-bench.sqlite + +git checkout $2 +make clean && make -j32 $make_opts llama-bench +./llama-bench -o sql $bench_args | tee /dev/tty | sqlite3 llama-bench.sqlite + +./scripts/compare-llama-bench.py -b $1 -c $2 From 5bf2b94dd4fb74378b78604023b31512fec55f8f Mon Sep 17 00:00:00 2001 From: Georgi Gerganov Date: Fri, 16 Feb 2024 19:05:56 +0200 Subject: [PATCH 612/811] cmake : fix VULKAN and ROCm builds (#5525) * cmake : fix VULKAN and ROCm builds * cmake : fix (cont) * vulkan : fix compile warnings ggml-ci * cmake : fix ggml-ci * cmake : minor ggml-ci --- CMakeLists.txt | 379 +++++++++++++++++++++++++----------------------- ggml-vulkan.cpp | 12 +- 2 files changed, 205 insertions(+), 186 deletions(-) diff --git a/CMakeLists.txt b/CMakeLists.txt index f8c7f9978..2a922fdb3 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -112,17 +112,14 @@ option(LLAMA_MPI "llama: use MPI" option(LLAMA_QKK_64 "llama: use super-block size of 64 for k-quants" OFF) option(LLAMA_SYCL "llama: use SYCL" OFF) option(LLAMA_SYCL_F16 "llama: use 16 bit floats for sycl calculations" OFF) +option(LLAMA_CPU_HBM "llama: use memkind for CPU HBM" OFF) option(LLAMA_BUILD_TESTS "llama: build tests" ${LLAMA_STANDALONE}) option(LLAMA_BUILD_EXAMPLES "llama: build examples" ${LLAMA_STANDALONE}) option(LLAMA_BUILD_SERVER "llama: build server example" ON) - # add perf arguments option(LLAMA_PERF "llama: enable perf" OFF) -if (LLAMA_PERF) - add_definitions(-DGGML_PERF) -endif() # Required for relocatable CMake package include(${CMAKE_CURRENT_SOURCE_DIR}/scripts/build-info.cmake) @@ -130,6 +127,7 @@ include(${CMAKE_CURRENT_SOURCE_DIR}/scripts/build-info.cmake) # # Compile flags # + if (LLAMA_SYCL) set(CMAKE_CXX_STANDARD 17) else() @@ -140,6 +138,7 @@ set(CMAKE_CXX_STANDARD_REQUIRED true) set(CMAKE_C_STANDARD 11) set(CMAKE_C_STANDARD_REQUIRED true) set(THREADS_PREFER_PTHREAD_FLAG ON) + find_package(Threads REQUIRED) include(CheckCXXCompilerFlag) @@ -151,17 +150,17 @@ endif() if (NOT MSVC) if (LLAMA_SANITIZE_THREAD) add_compile_options(-fsanitize=thread) - link_libraries(-fsanitize=thread) + link_libraries (-fsanitize=thread) endif() if (LLAMA_SANITIZE_ADDRESS) add_compile_options(-fsanitize=address -fno-omit-frame-pointer) - link_libraries(-fsanitize=address) + link_libraries (-fsanitize=address) endif() if (LLAMA_SANITIZE_UNDEFINED) add_compile_options(-fsanitize=undefined) - link_libraries(-fsanitize=undefined) + link_libraries (-fsanitize=undefined) endif() endif() @@ -298,14 +297,17 @@ if (LLAMA_BLAS) endif() message(STATUS "BLAS found, Includes: ${BLAS_INCLUDE_DIRS}") + add_compile_options(${BLAS_LINKER_FLAGS}) + add_compile_definitions(GGML_USE_OPENBLAS) + if (${BLAS_INCLUDE_DIRS} MATCHES "mkl" AND (${LLAMA_BLAS_VENDOR} MATCHES "Generic" OR ${LLAMA_BLAS_VENDOR} MATCHES "Intel")) add_compile_definitions(GGML_BLAS_USE_MKL) endif() - set(LLAMA_EXTRA_LIBS ${LLAMA_EXTRA_LIBS} ${BLAS_LIBRARIES}) - set(LLAMA_EXTRA_INCLUDES ${LLAMA_EXTRA_INCLUDES} ${BLAS_INCLUDE_DIRS}) + set(LLAMA_EXTRA_LIBS ${LLAMA_EXTRA_LIBS} ${BLAS_LIBRARIES}) + set(LLAMA_EXTRA_INCLUDES ${LLAMA_EXTRA_INCLUDES} ${BLAS_INCLUDE_DIRS}) else() message(WARNING "BLAS not found, please refer to " "https://cmake.org/cmake/help/latest/module/FindBLAS.html#blas-lapack-vendors" @@ -330,9 +332,6 @@ if (LLAMA_CUBLAS) set(GGML_SOURCES_CUDA ggml-cuda.cu) add_compile_definitions(GGML_USE_CUBLAS) -# if (LLAMA_CUDA_CUBLAS) -# add_compile_definitions(GGML_CUDA_CUBLAS) -# endif() if (LLAMA_CUDA_FORCE_DMMV) add_compile_definitions(GGML_CUDA_FORCE_DMMV) endif() @@ -387,15 +386,20 @@ if (LLAMA_MPI) find_package(MPI) if (MPI_C_FOUND) message(STATUS "MPI found") + set(GGML_HEADERS_MPI ggml-mpi.h) - set(GGML_SOURCES_MPI ggml-mpi.c ggml-mpi.h) + set(GGML_SOURCES_MPI ggml-mpi.c) + add_compile_definitions(GGML_USE_MPI) add_compile_definitions(${MPI_C_COMPILE_DEFINITIONS}) + if (NOT MSVC) add_compile_options(-Wno-cast-qual) endif() + set(LLAMA_EXTRA_LIBS ${LLAMA_EXTRA_LIBS} ${MPI_C_LIBRARIES}) set(LLAMA_EXTRA_INCLUDES ${LLAMA_EXTRA_INCLUDES} ${MPI_C_INCLUDE_DIRS}) + # Even if you're only using the C header, C++ programs may bring in MPI # C++ functions, so more linkage is needed if (MPI_CXX_FOUND) @@ -427,31 +431,28 @@ if (LLAMA_VULKAN) if (Vulkan_FOUND) message(STATUS "Vulkan found") - add_library(ggml-vulkan OBJECT ggml-vulkan.cpp ggml-vulkan.h) - if (BUILD_SHARED_LIBS) - set_target_properties(ggml-vulkan PROPERTIES POSITION_INDEPENDENT_CODE ON) - endif() - target_link_libraries(ggml-vulkan PRIVATE Vulkan::Vulkan) + set(GGML_HEADERS_VULKAN ggml-vulkan.h) + set(GGML_SOURCES_VULKAN ggml-vulkan.cpp) add_compile_definitions(GGML_USE_VULKAN) if (LLAMA_VULKAN_CHECK_RESULTS) - target_compile_definitions(ggml-vulkan PRIVATE GGML_VULKAN_CHECK_RESULTS) + add_compile_definitions(GGML_VULKAN_CHECK_RESULTS) endif() if (LLAMA_VULKAN_DEBUG) - target_compile_definitions(ggml-vulkan PRIVATE GGML_VULKAN_DEBUG) + add_compile_definitions(GGML_VULKAN_DEBUG) endif() if (LLAMA_VULKAN_VALIDATE) - target_compile_definitions(ggml-vulkan PRIVATE GGML_VULKAN_VALIDATE) + add_compile_definitions(GGML_VULKAN_VALIDATE) endif() if (LLAMA_VULKAN_RUN_TESTS) - target_compile_definitions(ggml-vulkan PRIVATE GGML_VULKAN_RUN_TESTS) + add_compile_definitions(GGML_VULKAN_RUN_TESTS) endif() - set(LLAMA_EXTRA_LIBS ${LLAMA_EXTRA_LIBS} ggml-vulkan) + set(LLAMA_EXTRA_LIBS ${LLAMA_EXTRA_LIBS} Vulkan::Vulkan) else() message(WARNING "Vulkan not found") endif() @@ -463,43 +464,45 @@ if (LLAMA_HIPBLAS) if (NOT ${CMAKE_C_COMPILER_ID} MATCHES "Clang") message(WARNING "Only LLVM is supported for HIP, hint: CC=/opt/rocm/llvm/bin/clang") endif() + if (NOT ${CMAKE_CXX_COMPILER_ID} MATCHES "Clang") message(WARNING "Only LLVM is supported for HIP, hint: CXX=/opt/rocm/llvm/bin/clang++") endif() - find_package(hip) - find_package(hipblas) - find_package(rocblas) + find_package(hip REQUIRED) + find_package(hipblas REQUIRED) + find_package(rocblas REQUIRED) - if (${hipblas_FOUND} AND ${hip_FOUND}) - message(STATUS "HIP and hipBLAS found") - add_compile_definitions(GGML_USE_HIPBLAS GGML_USE_CUBLAS) - if (LLAMA_HIP_UMA) - add_compile_definitions(GGML_HIP_UMA) - endif() - add_library(ggml-rocm OBJECT ggml-cuda.cu ggml-cuda.h) - if (BUILD_SHARED_LIBS) - set_target_properties(ggml-rocm PROPERTIES POSITION_INDEPENDENT_CODE ON) - endif() - if (LLAMA_CUDA_FORCE_DMMV) - target_compile_definitions(ggml-rocm PRIVATE GGML_CUDA_FORCE_DMMV) - endif() - if (LLAMA_CUDA_FORCE_MMQ) - target_compile_definitions(ggml-rocm PRIVATE GGML_CUDA_FORCE_MMQ) - endif() - target_compile_definitions(ggml-rocm PRIVATE GGML_CUDA_DMMV_X=${LLAMA_CUDA_DMMV_X}) - target_compile_definitions(ggml-rocm PRIVATE GGML_CUDA_MMV_Y=${LLAMA_CUDA_MMV_Y}) - target_compile_definitions(ggml-rocm PRIVATE K_QUANTS_PER_ITERATION=${LLAMA_CUDA_KQUANTS_ITER}) - set_source_files_properties(ggml-cuda.cu PROPERTIES LANGUAGE CXX) - target_link_libraries(ggml-rocm PRIVATE hip::device PUBLIC hip::host roc::rocblas roc::hipblas) + message(STATUS "HIP and hipBLAS found") - if (LLAMA_STATIC) - message(FATAL_ERROR "Static linking not supported for HIP/ROCm") - endif() - set(LLAMA_EXTRA_LIBS ${LLAMA_EXTRA_LIBS} ggml-rocm) - else() - message(WARNING "hipBLAS or HIP not found. Try setting CMAKE_PREFIX_PATH=/opt/rocm") + set(GGML_HEADERS_ROCM ggml-cuda.h) + set(GGML_SOURCES_ROCM ggml-cuda.cu) + + add_compile_definitions(GGML_USE_HIPBLAS GGML_USE_CUBLAS) + + if (LLAMA_HIP_UMA) + add_compile_definitions(GGML_HIP_UMA) endif() + + if (LLAMA_CUDA_FORCE_DMMV) + add_compile_definitions(GGML_CUDA_FORCE_DMMV) + endif() + + if (LLAMA_CUDA_FORCE_MMQ) + add_compile_definitions(GGML_CUDA_FORCE_MMQ) + endif() + + add_compile_definitions(GGML_CUDA_DMMV_X=${LLAMA_CUDA_DMMV_X}) + add_compile_definitions(GGML_CUDA_MMV_Y=${LLAMA_CUDA_MMV_Y}) + add_compile_definitions(K_QUANTS_PER_ITERATION=${LLAMA_CUDA_KQUANTS_ITER}) + + set_source_files_properties(ggml-cuda.cu PROPERTIES LANGUAGE CXX) + + if (LLAMA_STATIC) + message(FATAL_ERROR "Static linking not supported for HIP/ROCm") + endif() + + set(LLAMA_EXTRA_LIBS ${LLAMA_EXTRA_LIBS} hip::device PUBLIC hip::host roc::rocblas roc::hipblas) endif() if (LLAMA_SYCL) @@ -509,10 +512,14 @@ if (LLAMA_SYCL) #todo: AOT find_package(IntelSYCL REQUIRED) + + message(STATUS "SYCL found") + + add_compile_definitions(GML_USE_SYCL) + if (LLAMA_SYCL_F16) add_compile_definitions(GGML_SYCL_F16) endif() - add_compile_definitions(GGML_USE_SYCL) add_compile_options(-I./) #include DPCT add_compile_options(-I/${SYCL_INCLUDE_DIR}) @@ -521,7 +528,7 @@ if (LLAMA_SYCL) set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -O3") set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -fsycl -L${MKLROOT}/lib") - set(GGML_HEADERS_SYCL ggml.h ggml-sycl.h) + set(GGML_HEADERS_SYCL ggml-sycl.h) set(GGML_SOURCES_SYCL ggml-sycl.cpp) if (WIN32) @@ -540,61 +547,61 @@ if (LLAMA_KOMPUTE) endif() function(compile_shader) - set(options) - set(oneValueArgs) - set(multiValueArgs SOURCES) - cmake_parse_arguments(compile_shader "${options}" "${oneValueArgs}" "${multiValueArgs}" ${ARGN}) - foreach(source ${compile_shader_SOURCES}) - get_filename_component(filename ${source} NAME) - set(spv_file ${filename}.spv) - add_custom_command( - OUTPUT ${spv_file} - DEPENDS ${CMAKE_CURRENT_SOURCE_DIR}/${source} - ${CMAKE_CURRENT_SOURCE_DIR}/kompute-shaders/common.comp - ${CMAKE_CURRENT_SOURCE_DIR}/kompute-shaders/op_getrows.comp - ${CMAKE_CURRENT_SOURCE_DIR}/kompute-shaders/op_mul_mv_q_n_pre.comp - ${CMAKE_CURRENT_SOURCE_DIR}/kompute-shaders/op_mul_mv_q_n.comp - COMMAND ${glslc_executable} --target-env=vulkan1.2 -o ${spv_file} ${CMAKE_CURRENT_SOURCE_DIR}/${source} - COMMENT "Compiling ${source} to ${spv_file}" - ) + set(options) + set(oneValueArgs) + set(multiValueArgs SOURCES) + cmake_parse_arguments(compile_shader "${options}" "${oneValueArgs}" "${multiValueArgs}" ${ARGN}) + foreach(source ${compile_shader_SOURCES}) + get_filename_component(filename ${source} NAME) + set(spv_file ${filename}.spv) + add_custom_command( + OUTPUT ${spv_file} + DEPENDS ${CMAKE_CURRENT_SOURCE_DIR}/${source} + ${CMAKE_CURRENT_SOURCE_DIR}/kompute-shaders/common.comp + ${CMAKE_CURRENT_SOURCE_DIR}/kompute-shaders/op_getrows.comp + ${CMAKE_CURRENT_SOURCE_DIR}/kompute-shaders/op_mul_mv_q_n_pre.comp + ${CMAKE_CURRENT_SOURCE_DIR}/kompute-shaders/op_mul_mv_q_n.comp + COMMAND ${glslc_executable} --target-env=vulkan1.2 -o ${spv_file} ${CMAKE_CURRENT_SOURCE_DIR}/${source} + COMMENT "Compiling ${source} to ${spv_file}" + ) - get_filename_component(RAW_FILE_NAME ${spv_file} NAME) - set(FILE_NAME "shader${RAW_FILE_NAME}") - string(REPLACE ".comp.spv" ".h" HEADER_FILE ${FILE_NAME}) - string(TOUPPER ${HEADER_FILE} HEADER_FILE_DEFINE) - string(REPLACE "." "_" HEADER_FILE_DEFINE "${HEADER_FILE_DEFINE}") - set(OUTPUT_HEADER_FILE "${HEADER_FILE}") - message(STATUS "${HEADER_FILE} generating ${HEADER_FILE_DEFINE}") - if(CMAKE_GENERATOR MATCHES "Visual Studio") - add_custom_command( - OUTPUT ${OUTPUT_HEADER_FILE} - COMMAND ${CMAKE_COMMAND} -E echo "/*THIS FILE HAS BEEN AUTOMATICALLY GENERATED - DO NOT EDIT*/" > ${OUTPUT_HEADER_FILE} - COMMAND ${CMAKE_COMMAND} -E echo \"\#ifndef ${HEADER_FILE_DEFINE}\" >> ${OUTPUT_HEADER_FILE} - COMMAND ${CMAKE_COMMAND} -E echo \"\#define ${HEADER_FILE_DEFINE}\" >> ${OUTPUT_HEADER_FILE} - COMMAND ${CMAKE_COMMAND} -E echo "namespace kp {" >> ${OUTPUT_HEADER_FILE} - COMMAND ${CMAKE_COMMAND} -E echo "namespace shader_data {" >> ${OUTPUT_HEADER_FILE} - COMMAND ${CMAKE_BINARY_DIR}/bin/$/xxd -i ${RAW_FILE_NAME} >> ${OUTPUT_HEADER_FILE} - COMMAND ${CMAKE_COMMAND} -E echo "}}" >> ${OUTPUT_HEADER_FILE} - COMMAND ${CMAKE_COMMAND} -E echo \"\#endif // define ${HEADER_FILE_DEFINE}\" >> ${OUTPUT_HEADER_FILE} - DEPENDS ${spv_file} xxd - COMMENT "Converting to hpp: ${FILE_NAME} ${CMAKE_BINARY_DIR}/bin/$/xxd" - ) - else() - add_custom_command( - OUTPUT ${OUTPUT_HEADER_FILE} - COMMAND ${CMAKE_COMMAND} -E echo "/*THIS FILE HAS BEEN AUTOMATICALLY GENERATED - DO NOT EDIT*/" > ${OUTPUT_HEADER_FILE} - COMMAND ${CMAKE_COMMAND} -E echo \"\#ifndef ${HEADER_FILE_DEFINE}\" >> ${OUTPUT_HEADER_FILE} - COMMAND ${CMAKE_COMMAND} -E echo \"\#define ${HEADER_FILE_DEFINE}\" >> ${OUTPUT_HEADER_FILE} - COMMAND ${CMAKE_COMMAND} -E echo "namespace kp {" >> ${OUTPUT_HEADER_FILE} - COMMAND ${CMAKE_COMMAND} -E echo "namespace shader_data {" >> ${OUTPUT_HEADER_FILE} - COMMAND ${CMAKE_BINARY_DIR}/bin/xxd -i ${RAW_FILE_NAME} >> ${OUTPUT_HEADER_FILE} - COMMAND ${CMAKE_COMMAND} -E echo "}}" >> ${OUTPUT_HEADER_FILE} - COMMAND ${CMAKE_COMMAND} -E echo \"\#endif // define ${HEADER_FILE_DEFINE}\" >> ${OUTPUT_HEADER_FILE} - DEPENDS ${spv_file} xxd - COMMENT "Converting to hpp: ${FILE_NAME} ${CMAKE_BINARY_DIR}/bin/xxd" - ) - endif() - endforeach() + get_filename_component(RAW_FILE_NAME ${spv_file} NAME) + set(FILE_NAME "shader${RAW_FILE_NAME}") + string(REPLACE ".comp.spv" ".h" HEADER_FILE ${FILE_NAME}) + string(TOUPPER ${HEADER_FILE} HEADER_FILE_DEFINE) + string(REPLACE "." "_" HEADER_FILE_DEFINE "${HEADER_FILE_DEFINE}") + set(OUTPUT_HEADER_FILE "${HEADER_FILE}") + message(STATUS "${HEADER_FILE} generating ${HEADER_FILE_DEFINE}") + if(CMAKE_GENERATOR MATCHES "Visual Studio") + add_custom_command( + OUTPUT ${OUTPUT_HEADER_FILE} + COMMAND ${CMAKE_COMMAND} -E echo "/*THIS FILE HAS BEEN AUTOMATICALLY GENERATED - DO NOT EDIT*/" > ${OUTPUT_HEADER_FILE} + COMMAND ${CMAKE_COMMAND} -E echo \"\#ifndef ${HEADER_FILE_DEFINE}\" >> ${OUTPUT_HEADER_FILE} + COMMAND ${CMAKE_COMMAND} -E echo \"\#define ${HEADER_FILE_DEFINE}\" >> ${OUTPUT_HEADER_FILE} + COMMAND ${CMAKE_COMMAND} -E echo "namespace kp {" >> ${OUTPUT_HEADER_FILE} + COMMAND ${CMAKE_COMMAND} -E echo "namespace shader_data {" >> ${OUTPUT_HEADER_FILE} + COMMAND ${CMAKE_BINARY_DIR}/bin/$/xxd -i ${RAW_FILE_NAME} >> ${OUTPUT_HEADER_FILE} + COMMAND ${CMAKE_COMMAND} -E echo "}}" >> ${OUTPUT_HEADER_FILE} + COMMAND ${CMAKE_COMMAND} -E echo \"\#endif // define ${HEADER_FILE_DEFINE}\" >> ${OUTPUT_HEADER_FILE} + DEPENDS ${spv_file} xxd + COMMENT "Converting to hpp: ${FILE_NAME} ${CMAKE_BINARY_DIR}/bin/$/xxd" + ) + else() + add_custom_command( + OUTPUT ${OUTPUT_HEADER_FILE} + COMMAND ${CMAKE_COMMAND} -E echo "/*THIS FILE HAS BEEN AUTOMATICALLY GENERATED - DO NOT EDIT*/" > ${OUTPUT_HEADER_FILE} + COMMAND ${CMAKE_COMMAND} -E echo \"\#ifndef ${HEADER_FILE_DEFINE}\" >> ${OUTPUT_HEADER_FILE} + COMMAND ${CMAKE_COMMAND} -E echo \"\#define ${HEADER_FILE_DEFINE}\" >> ${OUTPUT_HEADER_FILE} + COMMAND ${CMAKE_COMMAND} -E echo "namespace kp {" >> ${OUTPUT_HEADER_FILE} + COMMAND ${CMAKE_COMMAND} -E echo "namespace shader_data {" >> ${OUTPUT_HEADER_FILE} + COMMAND ${CMAKE_BINARY_DIR}/bin/xxd -i ${RAW_FILE_NAME} >> ${OUTPUT_HEADER_FILE} + COMMAND ${CMAKE_COMMAND} -E echo "}}" >> ${OUTPUT_HEADER_FILE} + COMMAND ${CMAKE_COMMAND} -E echo \"\#endif // define ${HEADER_FILE_DEFINE}\" >> ${OUTPUT_HEADER_FILE} + DEPENDS ${spv_file} xxd + COMMENT "Converting to hpp: ${FILE_NAME} ${CMAKE_BINARY_DIR}/bin/xxd" + ) + endif() + endforeach() endfunction() if (EXISTS "${CMAKE_CURRENT_SOURCE_DIR}/kompute/CMakeLists.txt") @@ -604,66 +611,66 @@ if (LLAMA_KOMPUTE) # Compile our shaders compile_shader(SOURCES - kompute-shaders/op_scale.comp - kompute-shaders/op_scale_8.comp - kompute-shaders/op_add.comp - kompute-shaders/op_addrow.comp - kompute-shaders/op_mul.comp - kompute-shaders/op_silu.comp - kompute-shaders/op_relu.comp - kompute-shaders/op_gelu.comp - kompute-shaders/op_softmax.comp - kompute-shaders/op_norm.comp - kompute-shaders/op_rmsnorm.comp - kompute-shaders/op_diagmask.comp - kompute-shaders/op_mul_mat_mat_f32.comp - kompute-shaders/op_mul_mat_f16.comp - kompute-shaders/op_mul_mat_q8_0.comp - kompute-shaders/op_mul_mat_q4_0.comp - kompute-shaders/op_mul_mat_q4_1.comp - kompute-shaders/op_mul_mat_q6_k.comp - kompute-shaders/op_getrows_f16.comp - kompute-shaders/op_getrows_q4_0.comp - kompute-shaders/op_getrows_q4_1.comp - kompute-shaders/op_getrows_q6_k.comp - kompute-shaders/op_rope_f16.comp - kompute-shaders/op_rope_f32.comp - kompute-shaders/op_cpy_f16_f16.comp - kompute-shaders/op_cpy_f16_f32.comp - kompute-shaders/op_cpy_f32_f16.comp - kompute-shaders/op_cpy_f32_f32.comp + kompute-shaders/op_scale.comp + kompute-shaders/op_scale_8.comp + kompute-shaders/op_add.comp + kompute-shaders/op_addrow.comp + kompute-shaders/op_mul.comp + kompute-shaders/op_silu.comp + kompute-shaders/op_relu.comp + kompute-shaders/op_gelu.comp + kompute-shaders/op_softmax.comp + kompute-shaders/op_norm.comp + kompute-shaders/op_rmsnorm.comp + kompute-shaders/op_diagmask.comp + kompute-shaders/op_mul_mat_mat_f32.comp + kompute-shaders/op_mul_mat_f16.comp + kompute-shaders/op_mul_mat_q8_0.comp + kompute-shaders/op_mul_mat_q4_0.comp + kompute-shaders/op_mul_mat_q4_1.comp + kompute-shaders/op_mul_mat_q6_k.comp + kompute-shaders/op_getrows_f16.comp + kompute-shaders/op_getrows_q4_0.comp + kompute-shaders/op_getrows_q4_1.comp + kompute-shaders/op_getrows_q6_k.comp + kompute-shaders/op_rope_f16.comp + kompute-shaders/op_rope_f32.comp + kompute-shaders/op_cpy_f16_f16.comp + kompute-shaders/op_cpy_f16_f32.comp + kompute-shaders/op_cpy_f32_f16.comp + kompute-shaders/op_cpy_f32_f32.comp ) # Create a custom target for our generated shaders add_custom_target(generated_shaders DEPENDS - shaderop_scale.h - shaderop_scale_8.h - shaderop_add.h - shaderop_addrow.h - shaderop_mul.h - shaderop_silu.h - shaderop_relu.h - shaderop_gelu.h - shaderop_softmax.h - shaderop_norm.h - shaderop_rmsnorm.h - shaderop_diagmask.h - shaderop_mul_mat_mat_f32.h - shaderop_mul_mat_f16.h - shaderop_mul_mat_q8_0.h - shaderop_mul_mat_q4_0.h - shaderop_mul_mat_q4_1.h - shaderop_mul_mat_q6_k.h - shaderop_getrows_f16.h - shaderop_getrows_q4_0.h - shaderop_getrows_q4_1.h - shaderop_getrows_q6_k.h - shaderop_rope_f16.h - shaderop_rope_f32.h - shaderop_cpy_f16_f16.h - shaderop_cpy_f16_f32.h - shaderop_cpy_f32_f16.h - shaderop_cpy_f32_f32.h + shaderop_scale.h + shaderop_scale_8.h + shaderop_add.h + shaderop_addrow.h + shaderop_mul.h + shaderop_silu.h + shaderop_relu.h + shaderop_gelu.h + shaderop_softmax.h + shaderop_norm.h + shaderop_rmsnorm.h + shaderop_diagmask.h + shaderop_mul_mat_mat_f32.h + shaderop_mul_mat_f16.h + shaderop_mul_mat_q8_0.h + shaderop_mul_mat_q4_0.h + shaderop_mul_mat_q4_1.h + shaderop_mul_mat_q6_k.h + shaderop_getrows_f16.h + shaderop_getrows_q4_0.h + shaderop_getrows_q4_1.h + shaderop_getrows_q6_k.h + shaderop_rope_f16.h + shaderop_rope_f32.h + shaderop_cpy_f16_f16.h + shaderop_cpy_f16_f32.h + shaderop_cpy_f32_f16.h + shaderop_cpy_f32_f32.h ) # Create a custom command that depends on the generated_shaders @@ -676,8 +683,10 @@ if (LLAMA_KOMPUTE) # Add the stamp to the main sources to ensure dependency tracking set(GGML_SOURCES_KOMPUTE ggml-kompute.cpp ${CMAKE_CURRENT_BINARY_DIR}/ggml-kompute.stamp) - set(GGML_HEADERS_KOMPUTE ggml-kompute.h ${CMAKE_CURRENT_BINARY_DIR}/ggml-kompute.stamp) + set(GGML_HEADERS_KOMPUTE ggml-kompute.h ${CMAKE_CURRENT_BINARY_DIR}/ggml-kompute.stamp) + add_compile_definitions(GGML_USE_KOMPUTE) + set(LLAMA_EXTRA_LIBS ${LLAMA_EXTRA_LIBS} kompute) set(LLAMA_EXTRA_INCLUDES ${LLAMA_EXTRA_INCLUDES} ${CMAKE_BINARY_DIR}) else() @@ -685,6 +694,18 @@ if (LLAMA_KOMPUTE) endif() endif() +if (LLAMA_CPU_HBM) + find_library(memkind memkind REQUIRED) + + add_compile_definitions(GGML_USE_CPU_HBM) + + target_link_libraries(ggml PUBLIC memkind) +endif() + +if (LLAMA_PERF) + add_compile_definitions(GGML_PERF) +endif() + function(get_flags CCID CCVER) set(C_FLAGS "") set(CXX_FLAGS "") @@ -821,6 +842,7 @@ execute_process( ERROR_VARIABLE output OUTPUT_QUIET ) + if (output MATCHES "dyld-1015\.7") add_compile_definitions(HAVE_BUGGY_APPLE_LINKER) endif() @@ -830,10 +852,10 @@ endif() # feel free to update the Makefile for your architecture and send a pull request or issue message(STATUS "CMAKE_SYSTEM_PROCESSOR: ${CMAKE_SYSTEM_PROCESSOR}") if (MSVC) - string(TOLOWER "${CMAKE_GENERATOR_PLATFORM}" CMAKE_GENERATOR_PLATFORM_LWR) - message(STATUS "CMAKE_GENERATOR_PLATFORM: ${CMAKE_GENERATOR_PLATFORM}") + string(TOLOWER "${CMAKE_GENERATOR_PLATFORM}" CMAKE_GENERATOR_PLATFORM_LWR) + message(STATUS "CMAKE_GENERATOR_PLATFORM: ${CMAKE_GENERATOR_PLATFORM}") else () - set(CMAKE_GENERATOR_PLATFORM_LWR "") + set(CMAKE_GENERATOR_PLATFORM_LWR "") endif () if (NOT MSVC) @@ -1027,11 +1049,6 @@ endif() # ggml -if (GGML_USE_CPU_HBM) - add_definitions(-DGGML_USE_CPU_HBM) - find_library(memkind memkind REQUIRED) -endif() - add_library(ggml OBJECT ggml.c ggml.h @@ -1048,16 +1065,17 @@ add_library(ggml OBJECT ${GGML_SOURCES_EXTRA} ${GGML_HEADERS_EXTRA} ${GGML_SOURCES_SYCL} ${GGML_HEADERS_SYCL} ${GGML_SOURCES_KOMPUTE} ${GGML_HEADERS_KOMPUTE} + ${GGML_SOURCES_VULKAN} ${GGML_HEADERS_VULKAN} + ${GGML_SOURCES_ROCM} ${GGML_HEADERS_ROCM} ) target_include_directories(ggml PUBLIC . ${LLAMA_EXTRA_INCLUDES}) -target_compile_features(ggml PUBLIC c_std_11) # don't bump +target_compile_features (ggml PUBLIC c_std_11) # don't bump + target_link_libraries(ggml PUBLIC Threads::Threads ${LLAMA_EXTRA_LIBS}) -if (GGML_USE_CPU_HBM) - target_link_libraries(ggml PUBLIC memkind) -endif() add_library(ggml_static STATIC $) + if (BUILD_SHARED_LIBS) set_target_properties(ggml PROPERTIES POSITION_INDEPENDENT_CODE ON) add_library(ggml_shared SHARED $) @@ -1073,7 +1091,8 @@ add_library(llama ) target_include_directories(llama PUBLIC .) -target_compile_features(llama PUBLIC cxx_std_11) # don't bump +target_compile_features (llama PUBLIC cxx_std_11) # don't bump + target_link_libraries(llama PRIVATE ggml ${LLAMA_EXTRA_LIBS} @@ -1124,7 +1143,7 @@ install(FILES ${CMAKE_CURRENT_BINARY_DIR}/LlamaConfig.cmake DESTINATION ${CMAKE_INSTALL_LIBDIR}/cmake/Llama) set(GGML_PUBLIC_HEADERS "ggml.h" "ggml-alloc.h" "ggml-backend.h" - "${GGML_HEADERS_CUDA}" "${GGML_HEADERS_OPENCL}" + "${GGML_HEADERS_CUDA}" "${GGML_HEADERS_OPENCL}" "${GGML_HEADERS_METAL}" "${GGML_HEADERS_MPI}" "${GGML_HEADERS_EXTRA}") set_target_properties(ggml PROPERTIES PUBLIC_HEADER "${GGML_PUBLIC_HEADERS}") diff --git a/ggml-vulkan.cpp b/ggml-vulkan.cpp index 1fad24fd1..4a30414df 100644 --- a/ggml-vulkan.cpp +++ b/ggml-vulkan.cpp @@ -1091,7 +1091,7 @@ static void ggml_vk_print_gpu_info(size_t idx) { } } -void ggml_vk_instance_init() { +static void ggml_vk_instance_init() { if (vk_instance_initialized) { return; } @@ -1150,7 +1150,7 @@ void ggml_vk_instance_init() { vk_instance_initialized = true; } -void ggml_vk_init(ggml_backend_vk_context * ctx, size_t idx) { +static void ggml_vk_init(ggml_backend_vk_context * ctx, size_t idx) { GGML_ASSERT(idx < vk_instance.device_indices.size()); size_t dev_num = vk_instance.device_indices[idx]; #ifdef GGML_VULKAN_DEBUG @@ -4556,13 +4556,13 @@ static void ggml_vk_cleanup(ggml_backend_vk_context * ctx) { } } -GGML_CALL int ggml_vk_get_device_count() { +GGML_CALL static int ggml_vk_get_device_count() { ggml_vk_instance_init(); return vk_instance.device_indices.size(); } -GGML_CALL void ggml_vk_get_device_description(int device, char * description, size_t description_size) { +GGML_CALL static void ggml_vk_get_device_description(int device, char * description, size_t description_size) { ggml_vk_instance_init(); std::vector devices = vk_instance.instance.enumeratePhysicalDevices(); @@ -4580,7 +4580,7 @@ void ggml_vk_init_cpu_assist() { std::cerr << "ggml_vulkan: Found " << ggml_vk_get_device_count() << " Vulkan devices:" << std::endl; - for (size_t i = 0; i < ggml_vk_get_device_count(); i++) { + for (int i = 0; i < ggml_vk_get_device_count(); i++) { ggml_vk_print_gpu_info(i); } // Initialize the first backend to make sure CPU matrix multiplications can be offloaded. @@ -5267,7 +5267,7 @@ GGML_CALL void ggml_backend_vk_get_device_description(int device, char * descrip } GGML_CALL void ggml_backend_vk_get_device_memory(int device, size_t * free, size_t * total) { - GGML_ASSERT(device < vk_instance.device_indices.size()); + GGML_ASSERT(device < (int) vk_instance.device_indices.size()); vk::PhysicalDevice vkdev = vk_instance.instance.enumeratePhysicalDevices()[vk_instance.device_indices[device]]; From d250c9d61d4d9f7346930814cc4aef3f3673dc3e Mon Sep 17 00:00:00 2001 From: clibdev <52199778+clibdev@users.noreply.github.com> Date: Sat, 17 Feb 2024 18:28:37 +0200 Subject: [PATCH 613/811] gitignore : update for CLion IDE (#5544) --- .gitignore | 2 ++ 1 file changed, 2 insertions(+) diff --git a/.gitignore b/.gitignore index b84459b92..62b6b8b1a 100644 --- a/.gitignore +++ b/.gitignore @@ -23,11 +23,13 @@ .clang-tidy .vs/ .vscode/ +.idea/ lcov-report/ gcovr-report/ build* +cmake-build-* out/ tmp/ From 6e4e973b2615f8d390b1c4f4a7e05a119078bb0f Mon Sep 17 00:00:00 2001 From: Ananta Bastola Date: Sat, 17 Feb 2024 16:03:14 -0500 Subject: [PATCH 614/811] ci : add an option to fail on compile warning (#3952) * feat(ci): add an option to fail on compile warning * Update CMakeLists.txt * minor : fix compile warnings ggml-ci * ggml : fix unreachable code warnings ggml-ci * ci : disable fatal warnings for windows, ios and tvos * ggml : fix strncpy warning * ci : disable fatal warnings for MPI build * ci : add fatal warnings to ggml-ci ggml-ci --------- Co-authored-by: Georgi Gerganov --- .github/workflows/build.yml | 10 +++++++--- CMakeLists.txt | 11 +++++++++++ Makefile | 29 ++++++++++++++++++++++++++++ ci/run.sh | 2 +- examples/export-lora/export-lora.cpp | 2 -- ggml-backend.c | 1 + ggml-metal.m | 2 +- ggml.c | 15 +++++++++----- 8 files changed, 60 insertions(+), 12 deletions(-) diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index ed292d6b8..03d76d455 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -37,6 +37,8 @@ jobs: - name: Build id: make_build + env: + LLAMA_FATAL_WARNINGS: 1 run: | CC=gcc-8 make -j $(nproc) @@ -65,7 +67,7 @@ jobs: run: | mkdir build cd build - cmake .. + cmake .. -DLLAMA_FATAL_WARNINGS=ON cmake --build . --config Release -j $(nproc) - name: Test @@ -100,7 +102,7 @@ jobs: run: | mkdir build cd build - cmake .. -DLLAMA_SANITIZE_${{ matrix.sanitizer }}=ON -DCMAKE_BUILD_TYPE=${{ matrix.build_type }} + cmake .. -DLLAMA_FATAL_WARNINGS=ON -DLLAMA_SANITIZE_${{ matrix.sanitizer }}=ON -DCMAKE_BUILD_TYPE=${{ matrix.build_type }} cmake --build . --config ${{ matrix.build_type }} -j $(nproc) - name: Test @@ -244,6 +246,8 @@ jobs: - name: Build id: make_build + env: + LLAMA_FATAL_WARNINGS: 1 run: | LLAMA_NO_METAL=1 make -j $(sysctl -n hw.logicalcpu) @@ -277,7 +281,7 @@ jobs: sysctl -a mkdir build cd build - cmake -DLLAMA_METAL=OFF .. + cmake -DLLAMA_FATAL_WARNINGS=ON -DLLAMA_METAL=OFF .. cmake --build . --config Release -j $(sysctl -n hw.logicalcpu) - name: Test diff --git a/CMakeLists.txt b/CMakeLists.txt index 2a922fdb3..5ea4d4f19 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -55,6 +55,9 @@ option(LLAMA_ALL_WARNINGS "llama: enable all compiler warnings" option(LLAMA_ALL_WARNINGS_3RD_PARTY "llama: enable all compiler warnings in 3rd party libs" OFF) option(LLAMA_GPROF "llama: enable gprof" OFF) +# build +option(LLAMA_FATAL_WARNINGS "llama: enable -Werror flag" OFF) + # sanitizers option(LLAMA_SANITIZE_THREAD "llama: enable thread sanitizer" OFF) option(LLAMA_SANITIZE_ADDRESS "llama: enable address sanitizer" OFF) @@ -142,6 +145,14 @@ set(THREADS_PREFER_PTHREAD_FLAG ON) find_package(Threads REQUIRED) include(CheckCXXCompilerFlag) +if (LLAMA_FATAL_WARNINGS) + if (CMAKE_CXX_COMPILER_ID MATCHES "GNU" OR CMAKE_CXX_COMPILER_ID MATCHES "Clang") + add_compile_options(-Werror) + elseif (CMAKE_CXX_COMPILER_ID STREQUAL "MSVC") + add_compile_options(/WX) + endif() +endif() + # enable libstdc++ assertions for debug builds if (CMAKE_SYSTEM_NAME MATCHES "Linux") add_compile_definitions($<$:_GLIBCXX_ASSERTIONS>) diff --git a/Makefile b/Makefile index 0a2070b53..901798606 100644 --- a/Makefile +++ b/Makefile @@ -215,6 +215,35 @@ MK_CFLAGS += $(WARN_FLAGS) -Wshadow -Wstrict-prototypes -Wpointer-arith -Wmis -Werror=implicit-function-declaration MK_CXXFLAGS += $(WARN_FLAGS) -Wmissing-declarations -Wmissing-noreturn +ifeq ($(LLAMA_FATAL_WARNINGS),1) + MK_CFLAGS += -Werror + MK_CXXFLAGS += -Werror +endif + +ifeq ($(CC_IS_CLANG), 1) + # clang options + MK_CFLAGS += -Wunreachable-code-break -Wunreachable-code-return + MK_HOST_CXXFLAGS += -Wunreachable-code-break -Wunreachable-code-return -Wmissing-prototypes -Wextra-semi + + ifneq '' '$(and $(CC_IS_LLVM_CLANG),$(filter 1,$(shell expr $(CC_VER) \>= 030800)))' + MK_CFLAGS += -Wdouble-promotion + endif + ifneq '' '$(and $(CC_IS_APPLE_CLANG),$(filter 1,$(shell expr $(CC_VER) \>= 070300)))' + MK_CFLAGS += -Wdouble-promotion + endif +else + # gcc options + MK_CFLAGS += -Wdouble-promotion + MK_HOST_CXXFLAGS += -Wno-array-bounds + + ifeq ($(shell expr $(CC_VER) \>= 070100), 1) + MK_HOST_CXXFLAGS += -Wno-format-truncation + endif + ifeq ($(shell expr $(CC_VER) \>= 080100), 1) + MK_HOST_CXXFLAGS += -Wextra-semi + endif +endif + # this version of Apple ld64 is buggy ifneq '' '$(findstring dyld-1015.7,$(shell $(CC) $(LDFLAGS) -Wl,-v 2>&1))' MK_CPPFLAGS += -DHAVE_BUGGY_APPLE_LINKER diff --git a/ci/run.sh b/ci/run.sh index 979b4a793..b94658c96 100755 --- a/ci/run.sh +++ b/ci/run.sh @@ -33,7 +33,7 @@ sd=`dirname $0` cd $sd/../ SRC=`pwd` -CMAKE_EXTRA="" +CMAKE_EXTRA="-DLLAMA_FATAL_WARNINGS=ON" if [ ! -z ${GG_BUILD_METAL} ]; then CMAKE_EXTRA="${CMAKE_EXTRA} -DLLAMA_METAL_SHADER_DEBUG=ON" diff --git a/examples/export-lora/export-lora.cpp b/examples/export-lora/export-lora.cpp index 2f7be8a13..08413f57e 100644 --- a/examples/export-lora/export-lora.cpp +++ b/examples/export-lora/export-lora.cpp @@ -7,8 +7,6 @@ #include #include -static const size_t tensor_alignment = 32; - struct lora_info { std::string filename; float scale; diff --git a/ggml-backend.c b/ggml-backend.c index 87eea8440..d019d813a 100644 --- a/ggml-backend.c +++ b/ggml-backend.c @@ -1006,6 +1006,7 @@ static int ggml_backend_sched_backend_from_buffer(ggml_backend_sched_t sched, gg } } GGML_ASSERT(false && "tensor buffer type not supported by any backend"); + return -1; // silence warning } #if 0 diff --git a/ggml-metal.m b/ggml-metal.m index c1d8e2de8..6e76f8bed 100644 --- a/ggml-metal.m +++ b/ggml-metal.m @@ -176,7 +176,7 @@ struct ggml_metal_context { // MSL code // TODO: move the contents here when ready // for now it is easier to work in a separate file -//static NSString * const msl_library_source = @"see metal.metal"; +// static NSString * const msl_library_source = @"see metal.metal"; // Here to assist with NSBundle Path Hack @interface GGMLMetalClass : NSObject diff --git a/ggml.c b/ggml.c index 4e302fb7d..264cfd705 100644 --- a/ggml.c +++ b/ggml.c @@ -868,7 +868,7 @@ do { \ const __m128 t0 = _mm_add_ps(_mm256_castps256_ps128(x[0]), \ _mm256_extractf128_ps(x[0], 1)); \ const __m128 t1 = _mm_hadd_ps(t0, t0); \ - res = _mm_cvtss_f32(_mm_hadd_ps(t1, t1)); \ + res = (ggml_float) _mm_cvtss_f32(_mm_hadd_ps(t1, t1)); \ } while (0) // TODO: is this optimal ? @@ -1149,7 +1149,7 @@ inline static void __wasm_f16x4_store(ggml_fp16_t * p, v128_t x) { x[i] = _mm_add_ps(x[i], x[offset+i]); \ } \ const __m128 t0 = _mm_hadd_ps(x[0], x[0]); \ - res = _mm_cvtss_f32(_mm_hadd_ps(t0, t0)); \ + res = (ggml_float) _mm_cvtss_f32(_mm_hadd_ps(t0, t0)); \ } // TODO: is this optimal ? @@ -2086,6 +2086,7 @@ void ggml_numa_init(enum ggml_numa_strategy numa_flag) { } } #else + GGML_UNUSED(numa_flag); // TODO #endif } @@ -3219,7 +3220,7 @@ const char * ggml_get_name(const struct ggml_tensor * tensor) { } struct ggml_tensor * ggml_set_name(struct ggml_tensor * tensor, const char * name) { - strncpy(tensor->name, name, sizeof(tensor->name)); + strncpy(tensor->name, name, sizeof(tensor->name) - 1); tensor->name[sizeof(tensor->name) - 1] = '\0'; return tensor; } @@ -18575,7 +18576,9 @@ static enum ggml_opt_result linesearch_backtracking( (*step) *= width; } - GGML_UNREACHABLE(); + GGML_ASSERT(false && "line search failed"); + + return GGML_LINESEARCH_FAIL; } static enum ggml_opt_result ggml_opt_lbfgs( @@ -18843,7 +18846,9 @@ static enum ggml_opt_result ggml_opt_lbfgs( step[0] = 1.0; } - GGML_UNREACHABLE(); + GGML_ASSERT(false && "lbfgs failed"); + + return GGML_OPT_DID_NOT_CONVERGE; } struct ggml_opt_params ggml_opt_default_params(enum ggml_opt_type type) { From 8f1be0d42f23016cb6819dbae01126699c4bd9bc Mon Sep 17 00:00:00 2001 From: Georgi Gerganov Date: Sat, 17 Feb 2024 23:04:16 +0200 Subject: [PATCH 615/811] ggml : add ALiBi support for ggml_soft_max_ext (#5488) * ggml : avoid recomputing alibi slopes (CPU) * llama : reuse hparams.f_max_alibi_bias in all cases ggml-ci * ggml : support alibi bias in ggml_soft_max_ext (CPU + Metal) ggml-ci * ggml : handle all SRCs (do not break on first null) ggml-ci * tests : do not use slope for large soft_max accumulates too much error ggml-ci * ggml : alternative ALiBi without extra tensor We compute the slopes in the kernel ggml-ci * cuda : add ALiBi support in ggml_soft_max_ext ggml-ci * ggml : deprecate ggml_alibi * ggml : support multi-sequence ALiBi (Metal) ggml-ci * cuda : add multi-seq ALiBi + remote F16 soft_max ggml-ci * ggml : update deprecation message * ggml : fix pos ptr when no ALiBi ggml-ci * cuda : fix performance (pow -> powf) * cuda : precompute ALiBi constants * metal : pre-compute ALiBi slopes ggml-ci * llama : init kq_pos only if needed ggml-ci * test-backend-ops : add null pos test to soft_max test-backend-ops : replace soft_max tests ggml-ci --------- Co-authored-by: slaren --- ggml-alloc.c | 6 +- ggml-backend.c | 16 +-- ggml-cuda.cu | 263 ++++++++----------------------------- ggml-metal.m | 35 +++-- ggml-metal.metal | 47 ++++++- ggml.c | 118 +++++++++++------ ggml.h | 13 +- llama.cpp | 133 ++++++++++++------- tests/test-backend-ops.cpp | 74 +++++------ 9 files changed, 348 insertions(+), 357 deletions(-) diff --git a/ggml-alloc.c b/ggml-alloc.c index c28c37c4f..d4123564f 100644 --- a/ggml-alloc.c +++ b/ggml-alloc.c @@ -551,7 +551,7 @@ static void ggml_gallocr_alloc_graph_impl(ggml_gallocr_t galloc, struct ggml_cgr } for (int j = 0; j < GGML_MAX_SRC; j++) { if (graph->nodes[i]->src[j] == NULL) { - break; + continue; } if (graph->nodes[i]->src[j]->flags & GGML_TENSOR_FLAG_INPUT) { ggml_gallocr_allocate_node(galloc, graph->nodes[i]->src[j], get_node_buffer_id(node_buffer_ids, i)); @@ -787,7 +787,7 @@ static bool ggml_gallocr_needs_realloc(ggml_gallocr_t galloc, struct ggml_cgraph for (int j = 0; j < GGML_MAX_SRC; j++) { struct ggml_tensor * src = node->src[j]; if (src == NULL) { - break; + continue; } if (!ggml_gallocr_node_needs_realloc(galloc, src, node_alloc, &node_alloc->src[j])) { #ifndef NDEBUG @@ -833,7 +833,7 @@ bool ggml_gallocr_alloc_graph(ggml_gallocr_t galloc, struct ggml_cgraph * graph) for (int j = 0; j < GGML_MAX_SRC; j++) { struct ggml_tensor * src = node->src[j]; if (src == NULL) { - break; + continue; } ggml_gallocr_init_tensor(galloc, src, node_alloc, &node_alloc->src[j]); } diff --git a/ggml-backend.c b/ggml-backend.c index d019d813a..66e8c293a 100644 --- a/ggml-backend.c +++ b/ggml-backend.c @@ -1041,7 +1041,7 @@ static int ggml_backend_sched_backend_id_from_cur(ggml_backend_sched_t sched, st for (int i = 0; i < GGML_MAX_SRC; i++) { const struct ggml_tensor * src = tensor->src[i]; if (src == NULL) { - break; + continue; } if (src->buffer != NULL && src->buffer->usage == GGML_BACKEND_BUFFER_USAGE_WEIGHTS) { int src_backend = ggml_backend_sched_backend_from_buffer(sched, src->buffer); @@ -1088,7 +1088,7 @@ static void ggml_backend_sched_print_assignments(ggml_backend_sched_t sched, str for (int j = 0; j < GGML_MAX_SRC; j++) { struct ggml_tensor * src = node->src[j]; if (src == NULL) { - break; + continue; } ggml_backend_t src_backend = tensor_backend(src); fprintf(stderr, " %20.20s (%5.5s) [%5.5s %8.8s]", src->name, @@ -1144,7 +1144,7 @@ static void ggml_backend_sched_split_graph(ggml_backend_sched_t sched, struct gg for (int j = 0; j < GGML_MAX_SRC; j++) { struct ggml_tensor * src = node->src[j]; if (src == NULL) { - break; + continue; } if (tensor_backend_id(src) == -1) { tensor_backend_id(src) = ggml_backend_sched_backend_id_from_cur(sched, src); @@ -1256,7 +1256,7 @@ static void ggml_backend_sched_split_graph(ggml_backend_sched_t sched, struct gg for (int j = 0; j < GGML_MAX_SRC; j++) { struct ggml_tensor * src = node->src[j]; if (src == NULL) { - break; + continue; } int src_backend_id = tensor_backend_id(src); if (src_backend_id == -1) { @@ -1315,7 +1315,7 @@ static void ggml_backend_sched_split_graph(ggml_backend_sched_t sched, struct gg for (int j = 0; j < GGML_MAX_SRC; j++) { struct ggml_tensor * src = node->src[j]; if (src == NULL) { - break; + continue; } int src_backend_id = tensor_backend_id(src); assert(src_backend_id != -1); // all inputs should be assigned by now @@ -1362,7 +1362,7 @@ static void ggml_backend_sched_split_graph(ggml_backend_sched_t sched, struct gg for (int j = 0; j < GGML_MAX_SRC; j++) { struct ggml_tensor * src = node->src[j]; if (src == NULL) { - break; + continue; } ggml_backend_t src_backend = tensor_backend(src); if (src_backend != tensor_backend /* && src_backend != NULL */) { @@ -1668,7 +1668,7 @@ static struct ggml_tensor * graph_copy_dup_tensor(struct ggml_hash_set hash_set, for (int i = 0; i < GGML_MAX_SRC; i++) { struct ggml_tensor * s = src->src[i]; if (s == NULL) { - break; + continue; } dst->src[i] = graph_copy_dup_tensor(hash_set, node_copies, ctx_allocated, ctx_unallocated, s); } @@ -1697,7 +1697,7 @@ static void graph_copy_init_tensor(struct ggml_hash_set hash_set, struct ggml_te for (int i = 0; i < GGML_MAX_SRC; i++) { struct ggml_tensor * s = src->src[i]; if (s == NULL) { - break; + continue; } graph_copy_init_tensor(hash_set, node_copies, node_init, s); } diff --git a/ggml-cuda.cu b/ggml-cuda.cu index b35fcb7fd..5fd8a87e4 100644 --- a/ggml-cuda.cu +++ b/ggml-cuda.cu @@ -5956,149 +5956,31 @@ static __global__ void diag_mask_inf_f32(const float * x, float * dst, const int dst[i] = x[i] - (col > n_past + row % rows_per_channel) * FLT_MAX; } -template -static __global__ void soft_max_f16(const float * x, const float * y, float * dst, const int ncols_par, const int nrows_y, const float scale) { -#if !(defined(GGML_USE_HIPBLAS) && defined(__HIP_PLATFORM_AMD__)) && __CUDA_ARCH__ >= CC_PASCAL && CUDART_VERSION >= CUDART_HMAX - const int ncols_data = ncols_template == 0 ? ncols_par : ncols_template; - const int ncols_smem = GGML_PAD(ncols_data, 2*WARP_SIZE)/2; - - const int tid = threadIdx.x; - const int rowx = blockIdx.x; - const int rowy = rowx % nrows_y; // broadcast the mask (y) in the row dimension - - const int block_size = block_size_template == 0 ? blockDim.x : block_size_template; - - const int warp_id = threadIdx.x / WARP_SIZE; - const int lane_id = threadIdx.x % WARP_SIZE; - - extern __shared__ half data_soft_max_f16[]; - half * buf_iw = data_soft_max_f16 + 0; // shared memory buffer for inter-warp communication - // (shared memory) buffer to cache values between iterations: - half2 * vals = vals_smem ? (half2 *) (buf_iw + WARP_SIZE) : (half2 *) (dst + rowx*ncols_data); - // if the buffer is larger than max. shared memory per block, use dst as temp. buffer instead - // in that case col_smem == col_data must be enforced to avoid race conditions - - half2 max_val = make_half2(-INFINITY, -INFINITY); - -#pragma unroll - for (int col0 = 0; col0 < ncols_smem; col0 += block_size) { - const int col_data = 2*col0 + 2*WARP_SIZE*warp_id + lane_id; - const int col_smem = vals_smem ? col0 + tid : col_data; - - const int ix = rowx*ncols_data + col_data; - const int iy = rowy*ncols_data + col_data; - - half2 val; - if (need_check && col_data + 0 >= ncols_data) { - val.x = -INFINITY; - } else { - val.x = x[ix + 0]*scale + (y ? y[iy + 0] : 0.0f); - } - if (need_check && col_data + WARP_SIZE >= ncols_data) { - val.y = -INFINITY; - } else { - val.y = x[ix + WARP_SIZE]*scale + (y ? y[iy + WARP_SIZE] : 0.0f); - } - if (!need_check || col_smem < (vals_smem ? ncols_smem : ncols_data)) { - vals[col_smem] = val; - } - max_val = __hmax2(max_val, val); - } - - // find the max value in the block - max_val = warp_reduce_max(max_val); - if (block_size > WARP_SIZE) { - if (warp_id == 0) { - buf_iw[lane_id] = -INFINITY; - } - __syncthreads(); - - if (lane_id == 0) { - buf_iw[warp_id] = __hmax(max_val.x, max_val.y); - } - __syncthreads(); - - max_val = __half2half2(buf_iw[lane_id]); - max_val = warp_reduce_max(max_val); - } else { - max_val = __half2half2(__hmax(max_val.x, max_val.y)); - } - - half2 tmp = make_half2(0.0f, 0.0f); // partial sums - -#pragma unroll - for (int col0 = 0; col0 < ncols_smem; col0 += block_size) { - const int col_smem = vals_smem ? col0 + tid : 2*col0 + 2*warp_id*WARP_SIZE + lane_id; - - if (ncols_template == 0 && col_smem >= (vals_smem ? ncols_smem : ncols_data)) { - break; - } - - const half2 val = h2exp(vals[col_smem] - max_val); - - tmp += val; - vals[col_smem] = val; - } - - // find the sum of exps in the block - tmp = warp_reduce_sum(tmp); - if (block_size > WARP_SIZE) { - if (warp_id == 0) { - buf_iw[lane_id] = 0.0f; - } - __syncthreads(); - - if (lane_id == 0) { - buf_iw[warp_id] = tmp.x + tmp.y; - } - __syncthreads(); - - tmp = __half2half2(buf_iw[lane_id]); - tmp = warp_reduce_sum(tmp); - } else { - tmp = __half2half2(tmp.x + tmp.y); - } - - const half2 inv_sum = make_half2(1.0f, 1.0f) / tmp; - -#pragma unroll - for (int col0 = 0; col0 < ncols_smem; col0 += block_size) { - const int col_data = 2*col0 + 2*WARP_SIZE*warp_id + lane_id; - const int col_smem = vals_smem ? col0 + tid : col_data; - - const int idst = rowx*ncols_data + col_data; - const half2 result = vals[col_smem] * inv_sum; - - if (need_check && col_data + 0 >= ncols_data) { - return; - } - dst[idst] = result.x; - - if (need_check && col_data + WARP_SIZE >= ncols_data) { - return; - } - - dst[idst + WARP_SIZE] = result.y; - } -#else - (void) x; (void) y; (void) dst; (void) ncols_par; (void) nrows_y; (void) scale; - NO_DEVICE_CODE; -#endif // !(defined(GGML_USE_HIPBLAS) && defined(__HIP_PLATFORM_AMD__)) && __CUDA_ARCH__ >= CC_PASCAL && CUDART_VERSION >= CUDART_HMAX -} - template -static __global__ void soft_max_f32(const float * x, const float * y, float * dst, const int ncols_par, const int nrows_y, const float scale) { +static __global__ void soft_max_f32(const float * x, const float * mask, const float * pos, float * dst, const int ncols_par, const int nrows_y, const float scale, const float max_bias, const float m0, const float m1, uint32_t n_head_log2) { const int ncols = ncols_template == 0 ? ncols_par : ncols_template; const int tid = threadIdx.x; const int rowx = blockIdx.x; - const int rowy = rowx % nrows_y; // broadcast the mask (y) in the row dimension + const int rowy = rowx % nrows_y; // broadcast the mask in the row dimension const int block_size = block_size_template == 0 ? blockDim.x : block_size_template; const int warp_id = threadIdx.x / WARP_SIZE; const int lane_id = threadIdx.x % WARP_SIZE; + float slope = 0.0f; + + // ALiBi + if (max_bias > 0.0f) { + const int h = rowx/nrows_y; // head index + + const float base = h < n_head_log2 ? m0 : m1; + const int exp = h < n_head_log2 ? h + 1 : 2*(h - n_head_log2) + 1; + + slope = powf(base, exp); + } + extern __shared__ float data_soft_max_f32[]; float * buf_iw = data_soft_max_f32; // shared memory buffer for inter-warp communication // shared memory buffer to cache values between iterations: @@ -6117,7 +5999,8 @@ static __global__ void soft_max_f32(const float * x, const float * y, float * ds const int ix = rowx*ncols + col; const int iy = rowy*ncols + col; - const float val = x[ix]*scale + (y ? y[iy] : 0.0f); + const float val = x[ix]*scale + (mask ? mask[iy] : 0.0f) + slope*pos[col]; + vals[col] = val; max_val = max(max_val, val); } @@ -7589,89 +7472,53 @@ static void diag_mask_inf_f32_cuda(const float * x, float * dst, const int ncols diag_mask_inf_f32<<>>(x, dst, ncols_x, rows_per_channel, n_past); } -static void soft_max_f16_cuda(const float * x, const float * y, float * dst, const int ncols_x, const int nrows_x, const int nrows_y, const float scale, cudaStream_t stream) { - int nth = WARP_SIZE; - while (nth < ncols_x/2 && nth < CUDA_SOFT_MAX_BLOCK_SIZE) nth *= 2; - const dim3 block_dims(nth, 1, 1); - const dim3 block_nums(nrows_x, 1, 1); - const size_t shmem = (GGML_PAD(ncols_x, 2*WARP_SIZE) + WARP_SIZE)*sizeof(half); - static_assert(CUDA_SOFT_MAX_BLOCK_SIZE == 1024, "These values need to be adjusted."); - if (shmem <= g_device_caps[g_main_device].smpb) { - switch (ncols_x) { - case 32: - soft_max_f16<<>>(x, y, dst, ncols_x, nrows_y, scale); - break; - case 64: - soft_max_f16<<>>(x, y, dst, ncols_x, nrows_y, scale); - break; - case 128: - soft_max_f16<<>>(x, y, dst, ncols_x, nrows_y, scale); - break; - case 256: - soft_max_f16<<>>(x, y, dst, ncols_x, nrows_y, scale); - break; - case 512: - soft_max_f16<<>>(x, y, dst, ncols_x, nrows_y, scale); - break; - case 1024: - soft_max_f16<<>>(x, y, dst, ncols_x, nrows_y, scale); - break; - case 2048: - soft_max_f16<<>>(x, y, dst, ncols_x, nrows_y, scale); - break; - case 4096: - soft_max_f16<<>>(x, y, dst, ncols_x, nrows_y, scale); - break; - default: - soft_max_f16<<>>(x, y, dst, ncols_x, nrows_y, scale); - break; - } - } else { - const size_t shmem_low = WARP_SIZE*sizeof(half); - soft_max_f16<<>>(x, y, dst, ncols_x, nrows_y, scale); - } -} - -static void soft_max_f32_cuda(const float * x, const float * y, float * dst, const int ncols_x, const int nrows_x, const int nrows_y, const float scale, cudaStream_t stream) { +static void soft_max_f32_cuda(const float * x, const float * mask, const float * pos, float * dst, const int ncols_x, const int nrows_x, const int nrows_y, const float scale, const float max_bias, cudaStream_t stream) { int nth = WARP_SIZE; while (nth < ncols_x && nth < CUDA_SOFT_MAX_BLOCK_SIZE) nth *= 2; const dim3 block_dims(nth, 1, 1); const dim3 block_nums(nrows_x, 1, 1); const size_t shmem = (GGML_PAD(ncols_x, WARP_SIZE) + WARP_SIZE)*sizeof(float); static_assert(CUDA_SOFT_MAX_BLOCK_SIZE == 1024, "These values need to be adjusted."); + + const uint32_t n_head_kv = nrows_x/nrows_y; + const uint32_t n_head_log2 = 1u << (uint32_t) floorf(log2f((float) n_head_kv)); + + const float m0 = powf(2.0f, -(max_bias ) / n_head_log2); + const float m1 = powf(2.0f, -(max_bias / 2.0f) / n_head_log2); + if (shmem < g_device_caps[g_main_device].smpb) { switch (ncols_x) { case 32: - soft_max_f32<<>>(x, y, dst, ncols_x, nrows_y, scale); + soft_max_f32<<>>(x, mask, pos, dst, ncols_x, nrows_y, scale, max_bias, m0, m1, n_head_log2); break; case 64: - soft_max_f32<<>>(x, y, dst, ncols_x, nrows_y, scale); + soft_max_f32<<>>(x, mask, pos, dst, ncols_x, nrows_y, scale, max_bias, m0, m1, n_head_log2); break; case 128: - soft_max_f32<<>>(x, y, dst, ncols_x, nrows_y, scale); + soft_max_f32<<>>(x, mask, pos, dst, ncols_x, nrows_y, scale, max_bias, m0, m1, n_head_log2); break; case 256: - soft_max_f32<<>>(x, y, dst, ncols_x, nrows_y, scale); + soft_max_f32<<>>(x, mask, pos, dst, ncols_x, nrows_y, scale, max_bias, m0, m1, n_head_log2); break; case 512: - soft_max_f32<<>>(x, y, dst, ncols_x, nrows_y, scale); + soft_max_f32<<>>(x, mask, pos, dst, ncols_x, nrows_y, scale, max_bias, m0, m1, n_head_log2); break; case 1024: - soft_max_f32<<>>(x, y, dst, ncols_x, nrows_y, scale); + soft_max_f32<<>>(x, mask, pos, dst, ncols_x, nrows_y, scale, max_bias, m0, m1, n_head_log2); break; case 2048: - soft_max_f32<<>>(x, y, dst, ncols_x, nrows_y, scale); + soft_max_f32<<>>(x, mask, pos, dst, ncols_x, nrows_y, scale, max_bias, m0, m1, n_head_log2); break; case 4096: - soft_max_f32<<>>(x, y, dst, ncols_x, nrows_y, scale); + soft_max_f32<<>>(x, mask, pos, dst, ncols_x, nrows_y, scale, max_bias, m0, m1, n_head_log2); break; default: - soft_max_f32<<>>(x, y, dst, ncols_x, nrows_y, scale); + soft_max_f32<<>>(x, mask, pos, dst, ncols_x, nrows_y, scale, max_bias, m0, m1, n_head_log2); break; } } else { const size_t shmem_low = WARP_SIZE*sizeof(float); - soft_max_f32<<>>(x, y, dst, ncols_x, nrows_y, scale); + soft_max_f32<<>>(x, mask, pos, dst, ncols_x, nrows_y, scale, max_bias, m0, m1, n_head_log2); } } @@ -9090,30 +8937,36 @@ static void ggml_cuda_op_soft_max( GGML_ASSERT(!src1 || src1->type == GGML_TYPE_F32); // src1 contains mask and it is optional - const int64_t ne00 = src0->ne[0]; + const int64_t ne00 = src0->ne[0]; const int64_t nrows_x = ggml_nrows(src0); - const int64_t nrows_y = src1 ? ggml_nrows(src1) : 1; + const int64_t nrows_y = src0->ne[1]; - float scale = 1.0f; - memcpy(&scale, dst->op_params, sizeof(float)); + float scale = 1.0f; + float max_bias = 0.0f; -#if !(defined(GGML_USE_HIPBLAS) && defined(__HIP_PLATFORM_AMD__)) && CUDART_VERSION >= CUDART_HMAX -#ifdef GGML_CUDA_F16 - const bool use_f16_soft_max = true; -#else - const bool use_f16_soft_max = false; -#endif // GGML_CUDA_F16 -#else - const bool use_f16_soft_max = false; -#endif // defined(GGML_USE_HIPBLAS) && defined(__HIP_PLATFORM_AMD__) && CUDART_VERSION >= CUDART_HMAX + memcpy(&scale, (float *) dst->op_params + 0, sizeof(float)); + memcpy(&max_bias, (float *) dst->op_params + 1, sizeof(float)); - if (use_f16_soft_max) { - soft_max_f16_cuda(src0_dd, src1 ? src1_dd : nullptr, dst_dd, ne00, nrows_x, nrows_y, scale, main_stream); - } else { - soft_max_f32_cuda(src0_dd, src1 ? src1_dd : nullptr, dst_dd, ne00, nrows_x, nrows_y, scale, main_stream); + // positions tensor + float * src2_dd = dst_dd; // default to avoid null checks in the kernel + cuda_pool_alloc src2_f; + + ggml_tensor * src2 = dst->src[2]; + const bool use_src2 = src2 != nullptr; + + if (use_src2) { + const bool src2_on_device = use_src2 && src2->backend == GGML_BACKEND_GPU; + ggml_tensor_extra_gpu * src2_extra = use_src2 ? (ggml_tensor_extra_gpu *) src2->extra : nullptr; + + if (src2_on_device) { + src2_dd = (float *) src2_extra->data_device[g_main_device]; + } else { + src2_dd = src2_f.alloc(ggml_nelements(src2)); + CUDA_CHECK(ggml_cuda_cpy_tensor_2d(src2_dd, src2, 0, 0, 0, 1, main_stream)); + } } - (void) dst; + soft_max_f32_cuda(src0_dd, src1 ? src1_dd : nullptr, src2_dd, dst_dd, ne00, nrows_x, nrows_y, scale, max_bias, main_stream); } static void ggml_cuda_op_scale( diff --git a/ggml-metal.m b/ggml-metal.m index 6e76f8bed..c0848a293 100644 --- a/ggml-metal.m +++ b/ggml-metal.m @@ -728,6 +728,7 @@ static bool ggml_metal_graph_compute( size_t offs_src0 = 0; size_t offs_src1 = 0; + size_t offs_src2 = 0; size_t offs_dst = 0; id command_buffer = command_buffers[cb_idx]; @@ -746,6 +747,7 @@ static bool ggml_metal_graph_compute( struct ggml_tensor * src0 = gf->nodes[i]->src[0]; struct ggml_tensor * src1 = gf->nodes[i]->src[1]; + struct ggml_tensor * src2 = gf->nodes[i]->src[2]; struct ggml_tensor * dst = gf->nodes[i]; switch (dst->op) { @@ -807,6 +809,7 @@ static bool ggml_metal_graph_compute( id id_src0 = src0 ? ggml_metal_get_buffer(src0, &offs_src0) : nil; id id_src1 = src1 ? ggml_metal_get_buffer(src1, &offs_src1) : nil; + id id_src2 = src2 ? ggml_metal_get_buffer(src2, &offs_src2) : nil; id id_dst = dst ? ggml_metal_get_buffer(dst, &offs_dst) : nil; //GGML_METAL_LOG_INFO("%s: op - %s\n", __func__, ggml_op_name(dst->op)); @@ -1188,7 +1191,16 @@ static bool ggml_metal_graph_compute( pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_SOFT_MAX].pipeline; } - const float scale = ((float *) dst->op_params)[0]; + const float scale = ((float *) dst->op_params)[0]; + const float max_bias = ((float *) dst->op_params)[1]; + + const int64_t nrows_x = ggml_nrows(src0); + const int64_t nrows_y = src0->ne[1]; + const uint32_t n_head_kv = nrows_x/nrows_y; + const uint32_t n_head_log2 = 1u << (uint32_t) floorf(log2f((float) n_head_kv)); + + const float m0 = powf(2.0f, -(max_bias ) / n_head_log2); + const float m1 = powf(2.0f, -(max_bias / 2.0f) / n_head_log2); [encoder setComputePipelineState:pipeline]; [encoder setBuffer:id_src0 offset:offs_src0 atIndex:0]; @@ -1197,11 +1209,20 @@ static bool ggml_metal_graph_compute( } else { [encoder setBuffer:id_src0 offset:offs_src0 atIndex:1]; } - [encoder setBuffer:id_dst offset:offs_dst atIndex:2]; - [encoder setBytes:&ne00 length:sizeof(ne00) atIndex:3]; - [encoder setBytes:&ne01 length:sizeof(ne01) atIndex:4]; - [encoder setBytes:&ne02 length:sizeof(ne02) atIndex:5]; - [encoder setBytes:&scale length:sizeof(scale) atIndex:6]; + if (id_src2) { + [encoder setBuffer:id_src2 offset:offs_src2 atIndex:2]; + } else { + [encoder setBuffer:id_src0 offset:offs_src0 atIndex:2]; + } + [encoder setBuffer:id_dst offset:offs_dst atIndex:3]; + [encoder setBytes:&ne00 length:sizeof(ne00) atIndex:4]; + [encoder setBytes:&ne01 length:sizeof(ne01) atIndex:5]; + [encoder setBytes:&ne02 length:sizeof(ne02) atIndex:6]; + [encoder setBytes:&scale length:sizeof(scale) atIndex:7]; + [encoder setBytes:&max_bias length:sizeof(max_bias) atIndex:8]; + [encoder setBytes:&m0 length:sizeof(m0) atIndex:9]; + [encoder setBytes:&m1 length:sizeof(m1) atIndex:10]; + [encoder setBytes:&n_head_log2 length:sizeof(n_head_log2) atIndex:11]; [encoder setThreadgroupMemoryLength:32*sizeof(float) atIndex:0]; [encoder dispatchThreadgroups:MTLSizeMake(ne01*ne02*ne03, 1, 1) threadsPerThreadgroup:MTLSizeMake(nth, 1, 1)]; @@ -1514,8 +1535,6 @@ static bool ggml_metal_graph_compute( // max size of the src1ids array in the kernel stack GGML_ASSERT(ne11 <= 512); - struct ggml_tensor * src2 = gf->nodes[i]->src[2]; - const int64_t ne20 = src2 ? src2->ne[0] : 0; const int64_t ne21 = src2 ? src2->ne[1] : 0; const int64_t ne22 = src2 ? src2->ne[2] : 0; diff --git a/ggml-metal.metal b/ggml-metal.metal index efed6ad46..09ebcc9e3 100644 --- a/ggml-metal.metal +++ b/ggml-metal.metal @@ -351,12 +351,17 @@ kernel void kernel_sum_rows( kernel void kernel_soft_max( device const float * src0, device const float * src1, + device const float * src2, device float * dst, constant int64_t & ne00, constant int64_t & ne01, constant int64_t & ne02, constant float & scale, - threadgroup float * buf [[threadgroup(0)]], + constant float & max_bias, + constant float & m0, + constant float & m1, + constant uint32_t & n_head_log2, + threadgroup float * buf [[threadgroup(0)]], uint tgpig[[threadgroup_position_in_grid]], uint tpitg[[thread_position_in_threadgroup]], uint sgitg[[simdgroup_index_in_threadgroup]], @@ -368,13 +373,26 @@ kernel void kernel_soft_max( device const float * psrc0 = src0 + i03*ne02*ne01*ne00 + i02*ne01*ne00 + i01*ne00; device const float * pmask = src1 != src0 ? src1 + i01*ne00 : nullptr; + device const float * ppos = src2 != src0 ? src2 : nullptr; device float * pdst = dst + i03*ne02*ne01*ne00 + i02*ne01*ne00 + i01*ne00; + float slope = 0.0f; + + // ALiBi + if (max_bias > 0.0f) { + const int64_t h = i02; + + const float base = h < n_head_log2 ? m0 : m1; + const int exp = h < n_head_log2 ? h + 1 : 2*(h - n_head_log2) + 1; + + slope = pow(base, exp); + } + // parallel max float lmax = -INFINITY; for (int i00 = tpitg; i00 < ne00; i00 += ntg) { - lmax = MAX(lmax, psrc0[i00]*scale + (pmask ? pmask[i00] : 0.0f)); + lmax = MAX(lmax, psrc0[i00]*scale + (pmask ? pmask[i00] : 0.0f) + slope*ppos[i00]); } // find the max value in the block @@ -399,7 +417,7 @@ kernel void kernel_soft_max( // parallel sum float lsum = 0.0f; for (int i00 = tpitg; i00 < ne00; i00 += ntg) { - const float exp_psrc0 = exp((psrc0[i00]*scale + (pmask ? pmask[i00] : 0.0f)) - max_val); + const float exp_psrc0 = exp((psrc0[i00]*scale + (pmask ? pmask[i00] : 0.0f) + slope*ppos[i00]) - max_val); lsum += exp_psrc0; pdst[i00] = exp_psrc0; } @@ -437,12 +455,17 @@ kernel void kernel_soft_max( kernel void kernel_soft_max_4( device const float * src0, device const float * src1, + device const float * src2, device float * dst, constant int64_t & ne00, constant int64_t & ne01, constant int64_t & ne02, constant float & scale, - threadgroup float * buf [[threadgroup(0)]], + constant float & max_bias, + constant float & m0, + constant float & m1, + constant uint32_t & n_head_log2, + threadgroup float * buf [[threadgroup(0)]], uint tgpig[[threadgroup_position_in_grid]], uint tpitg[[thread_position_in_threadgroup]], uint sgitg[[simdgroup_index_in_threadgroup]], @@ -454,13 +477,25 @@ kernel void kernel_soft_max_4( device const float4 * psrc4 = (device const float4 *)(src0 + i03*ne02*ne01*ne00 + i02*ne01*ne00 + i01*ne00); device const float4 * pmask = src1 != src0 ? (device const float4 *)(src1 + i01*ne00) : nullptr; + device const float4 * ppos = src2 != src0 ? (device const float4 *)(src2) : nullptr; device float4 * pdst4 = (device float4 *)(dst + i03*ne02*ne01*ne00 + i02*ne01*ne00 + i01*ne00); + float slope = 0.0f; + + if (max_bias > 0.0f) { + const int64_t h = i02; + + const float base = h < n_head_log2 ? m0 : m1; + const int exp = h < n_head_log2 ? h + 1 : 2*(h - n_head_log2) + 1; + + slope = pow(base, exp); + } + // parallel max float4 lmax4 = -INFINITY; for (int i00 = tpitg; i00 < ne00/4; i00 += ntg) { - lmax4 = fmax(lmax4, psrc4[i00]*scale + (pmask ? pmask[i00] : 0.0f)); + lmax4 = fmax(lmax4, psrc4[i00]*scale + (pmask ? pmask[i00] : 0.0f) + slope*ppos[i00]); } const float lmax = MAX(MAX(lmax4[0], lmax4[1]), MAX(lmax4[2], lmax4[3])); @@ -486,7 +521,7 @@ kernel void kernel_soft_max_4( // parallel sum float4 lsum4 = 0.0f; for (int i00 = tpitg; i00 < ne00/4; i00 += ntg) { - const float4 exp_psrc4 = exp((psrc4[i00]*scale + (pmask ? pmask[i00] : 0.0f)) - max_val); + const float4 exp_psrc4 = exp((psrc4[i00]*scale + (pmask ? pmask[i00] : 0.0f) + slope*ppos[i00]) - max_val); lsum4 += exp_psrc4; pdst4[i00] = exp_psrc4; } diff --git a/ggml.c b/ggml.c index 264cfd705..e94024c62 100644 --- a/ggml.c +++ b/ggml.c @@ -5096,16 +5096,28 @@ static struct ggml_tensor * ggml_soft_max_impl( struct ggml_context * ctx, struct ggml_tensor * a, struct ggml_tensor * mask, + struct ggml_tensor * pos, float scale, + float max_bias, bool inplace) { GGML_ASSERT(ggml_is_contiguous(a)); + if (mask) { GGML_ASSERT(ggml_is_contiguous(mask)); - GGML_ASSERT(mask->ne[2] == 1); - GGML_ASSERT(mask->ne[3] == 1); + GGML_ASSERT(ggml_is_matrix(mask)); GGML_ASSERT(ggml_can_repeat_rows(mask, a)); } + if (pos) { + GGML_ASSERT(ggml_is_vector(pos)); + GGML_ASSERT(pos->type == GGML_TYPE_F32); + GGML_ASSERT(pos->ne[0] == a->ne[0]); + } + + if (max_bias > 0.0f) { + GGML_ASSERT(pos); + } + bool is_node = false; if (a->grad) { @@ -5114,13 +5126,14 @@ static struct ggml_tensor * ggml_soft_max_impl( struct ggml_tensor * result = inplace ? ggml_view_tensor(ctx, a) : ggml_dup_tensor(ctx, a); - float params[] = { scale }; + float params[] = { scale, max_bias }; ggml_set_op_params(result, params, sizeof(params)); result->op = GGML_OP_SOFT_MAX; result->grad = is_node ? ggml_dup_tensor(ctx, result) : NULL; result->src[0] = a; result->src[1] = mask; + result->src[2] = pos; return result; } @@ -5128,21 +5141,23 @@ static struct ggml_tensor * ggml_soft_max_impl( struct ggml_tensor * ggml_soft_max( struct ggml_context * ctx, struct ggml_tensor * a) { - return ggml_soft_max_impl(ctx, a, NULL, 1.0f, false); + return ggml_soft_max_impl(ctx, a, NULL, NULL, 1.0f, 0.0f, false); } struct ggml_tensor * ggml_soft_max_inplace( struct ggml_context * ctx, struct ggml_tensor * a) { - return ggml_soft_max_impl(ctx, a, NULL, 1.0f, true); + return ggml_soft_max_impl(ctx, a, NULL, NULL, 1.0f, 0.0f, true); } struct ggml_tensor * ggml_soft_max_ext( struct ggml_context * ctx, struct ggml_tensor * a, struct ggml_tensor * mask, - float scale) { - return ggml_soft_max_impl(ctx, a, mask, scale, false); + struct ggml_tensor * pos, + float scale, + float max_bias) { + return ggml_soft_max_impl(ctx, a, mask, pos, scale, max_bias, false); } // ggml_soft_max_back @@ -11495,6 +11510,7 @@ static void ggml_compute_forward_soft_max_f32( const struct ggml_compute_params * params, const struct ggml_tensor * src0, const struct ggml_tensor * src1, + const struct ggml_tensor * src2, struct ggml_tensor * dst) { assert(ggml_is_contiguous(dst)); assert(ggml_are_same_shape(src0, dst)); @@ -11503,16 +11519,29 @@ static void ggml_compute_forward_soft_max_f32( return; } - float scale = 1.0f; - memcpy(&scale, (float *) dst->op_params + 0, sizeof(float)); + float scale = 1.0f; + float max_bias = 0.0f; + + memcpy(&scale, (float *) dst->op_params + 0, sizeof(float)); + memcpy(&max_bias, (float *) dst->op_params + 1, sizeof(float)); // TODO: handle transposed/permuted matrices const int ith = params->ith; const int nth = params->nth; + GGML_TENSOR_UNARY_OP_LOCALS + const int64_t ne11 = src1 ? src1->ne[1] : 1; + // TODO: is this supposed to be ceil instead of floor? + // https://huggingface.co/mosaicml/mpt-7b/blob/main/attention.py#L370 + const uint32_t n_head_kv = ne02; + const uint32_t n_head_log2 = 1u << (uint32_t) floor(log2(n_head_kv)); + + const float m0 = powf(2.0f, -(max_bias ) / n_head_log2); + const float m1 = powf(2.0f, -(max_bias / 2.0f) / n_head_log2); + const int nc = src0->ne[0]; const int nr = ggml_nrows(src0); @@ -11525,6 +11554,9 @@ static void ggml_compute_forward_soft_max_f32( float * wp = (float *) params->wdata + (nc + CACHE_LINE_SIZE_F32) * ith; + // when max_bias <= 0.0f, src2 is not used and we default it to src0 to avoid branching + float * pos = src2 ? (float *) src2->data : src0->data; + for (int i1 = ir0; i1 < ir1; i1++) { float * sp = (float *)((char *) src0->data + i1*src0->nb[1]); float * dp = (float *)((char *) dst->data + i1*dst->nb[1]); @@ -11538,6 +11570,16 @@ static void ggml_compute_forward_soft_max_f32( ggml_vec_acc_f32(nc, wp, mp); } + // ALiBi bias + if (max_bias > 0.0f) { + const uint32_t h = (i1/ne01)%ne02; // head + const float slope = h < n_head_log2 ? powf(m0, h + 1) : powf(m1, 2*(h - n_head_log2) + 1); + + for (int i = 0; i < nc; i++) { + wp[i] = wp[i] + slope*pos[i]; + } + } + #ifndef NDEBUG for (int i = 0; i < nc; ++i) { //printf("p[%d] = %f\n", i, p[i]); @@ -11582,11 +11624,12 @@ static void ggml_compute_forward_soft_max( const struct ggml_compute_params * params, const struct ggml_tensor * src0, const struct ggml_tensor * src1, + const struct ggml_tensor * src2, struct ggml_tensor * dst) { switch (src0->type) { case GGML_TYPE_F32: { - ggml_compute_forward_soft_max_f32(params, src0, src1, dst); + ggml_compute_forward_soft_max_f32(params, src0, src1, src2, dst); } break; default: { @@ -11730,22 +11773,20 @@ static void ggml_compute_forward_alibi_f32( const float m0 = powf(2.0f, -(max_bias) / n_heads_log2_floor); const float m1 = powf(2.0f, -(max_bias / 2.0f) / n_heads_log2_floor); - for (int64_t i = 0; i < ne0; i++) { - for (int64_t j = 0; j < ne1; j++) { - for (int64_t k = 0; k < ne2_ne3; k++) { + for (int64_t k = 0; k < ne2_ne3; k++) { + // TODO: k*nb2 or k*nb3 + float m_k; + + if (k < n_heads_log2_floor) { + m_k = powf(m0, k + 1); + } else { + m_k = powf(m1, 2 * (k - n_heads_log2_floor) + 1); + } + + for (int64_t i = 0; i < ne0; i++) { + for (int64_t j = 0; j < ne1; j++) { float * const src = (float *)((char *) src0->data + i*nb0 + j*nb1 + k*nb2); float * pdst = (float *)((char *) dst->data + i*nb0 + j*nb1 + k*nb2); - - // TODO: k*nb2 or k*nb3 - - float m_k; - - if (k < n_heads_log2_floor) { - m_k = powf(m0, k + 1); - } else { - m_k = powf(m1, 2 * (k - n_heads_log2_floor) + 1); - } - pdst[0] = i * m_k + src[0]; } } @@ -11790,21 +11831,20 @@ static void ggml_compute_forward_alibi_f16( const float m0 = powf(2.0f, -(max_bias) / n_heads_log2_floor); const float m1 = powf(2.0f, -(max_bias / 2.0f) / n_heads_log2_floor); - for (int i = 0; i < ne0; i++) { - for (int j = 0; j < ne1; j++) { - for (int k = 0; k < ne2_ne3; k++) { + for (int k = 0; k < ne2_ne3; k++) { + // TODO: k*nb2 or k*nb3 + float m_k; + + if (k < n_heads_log2_floor) { + m_k = powf(m0, k + 1); + } else { + m_k = powf(m1, 2 * (k - n_heads_log2_floor) + 1); + } + + for (int i = 0; i < ne0; i++) { + for (int j = 0; j < ne1; j++) { ggml_fp16_t * const src = (ggml_fp16_t *)((char *) src0->data + i*nb0 + j*nb1 + k*nb2); - float * pdst = (float *)((char *) dst->data + i*nb0 + j*nb1 + k*nb2); - - // TODO: k*nb2 or k*nb3 - - float m_k; - - if (k < n_heads_log2_floor) { - m_k = powf(m0, k + 1); - } else { - m_k = powf(m1, 2 * (k - n_heads_log2_floor) + 1); - } + float * pdst = (float *)((char *) dst->data + i*nb0 + j*nb1 + k*nb2); // we return F32 pdst[0] = i * m_k + GGML_FP16_TO_FP32(src[0]); @@ -15116,7 +15156,7 @@ static void ggml_compute_forward(struct ggml_compute_params * params, struct ggm } break; case GGML_OP_SOFT_MAX: { - ggml_compute_forward_soft_max(params, tensor->src[0], tensor->src[1], tensor); + ggml_compute_forward_soft_max(params, tensor->src[0], tensor->src[1], tensor->src[2], tensor); } break; case GGML_OP_SOFT_MAX_BACK: { diff --git a/ggml.h b/ggml.h index 270018185..6c1956772 100644 --- a/ggml.h +++ b/ggml.h @@ -1383,13 +1383,17 @@ extern "C" { struct ggml_context * ctx, struct ggml_tensor * a); - // fused soft_max(a*scale + mask) + // fused soft_max(a*scale + mask + pos[i]*(ALiBi slope)) // mask is optional + // pos is required when max_bias > 0.0f + // max_bias = 0.0f for no ALiBi GGML_API struct ggml_tensor * ggml_soft_max_ext( struct ggml_context * ctx, struct ggml_tensor * a, struct ggml_tensor * mask, - float scale); + struct ggml_tensor * pos, + float scale, + float max_bias); GGML_API struct ggml_tensor * ggml_soft_max_back( struct ggml_context * ctx, @@ -1491,12 +1495,13 @@ extern "C" { // alibi position embedding // in-place, returns view(a) - GGML_API struct ggml_tensor * ggml_alibi( + GGML_DEPRECATED(GGML_API struct ggml_tensor * ggml_alibi( struct ggml_context * ctx, struct ggml_tensor * a, int n_past, int n_head, - float bias_max); + float bias_max), + "use ggml_soft_max_ext instead (will be removed in Mar 2024)"); // clamp // in-place, returns view(a) diff --git a/llama.cpp b/llama.cpp index 8966c3e66..6ac9caa95 100644 --- a/llama.cpp +++ b/llama.cpp @@ -1557,12 +1557,13 @@ struct llama_hparams { uint32_t n_yarn_orig_ctx; int32_t rope_scaling_type_train; - float f_clamp_kqv; - float f_max_alibi_bias; + float f_clamp_kqv = 0.0f; + float f_max_alibi_bias = 0.0f; bool causal_attn = true; - uint32_t pooling_type = LLAMA_POOLING_NONE; + bool need_kq_pos = false; + uint32_t pooling_type = LLAMA_POOLING_NONE; bool operator!=(const llama_hparams & other) const { if (this->vocab_only != other.vocab_only) return true; @@ -1923,6 +1924,7 @@ struct llama_context { struct ggml_tensor * inp_embd; // F32 [n_embd, n_batch] struct ggml_tensor * inp_pos; // I32 [n_batch] struct ggml_tensor * inp_KQ_mask; // F32 [n_ctx, n_batch] + struct ggml_tensor * inp_KQ_pos; // F32 [n_ctx] struct ggml_tensor * inp_K_shift; // I32 [n_ctx] struct ggml_tensor * inp_mean; // F32 [n_batch, n_batch] struct ggml_tensor * inp_cls; // I32 [n_batch] @@ -3054,6 +3056,11 @@ static void llm_load_hparams( case 40: model.type = e_model::MODEL_13B; break; default: model.type = e_model::MODEL_UNKNOWN; } + + if (model.type == e_model::MODEL_13B) { + // TODO: become GGUF KV parameter + hparams.f_max_alibi_bias = 8.0f; + } } break; case LLM_ARCH_STARCODER: { @@ -3081,6 +3088,9 @@ static void llm_load_hparams( case 32: model.type = e_model::MODEL_1B; break; default: model.type = e_model::MODEL_UNKNOWN; } + + // TODO: become GGUF KV parameter + hparams.f_max_alibi_bias = 8.0f; } break; case LLM_ARCH_BERT: { @@ -3126,11 +3136,12 @@ static void llm_load_hparams( case 4096: model.type = e_model::MODEL_7B; break; } break; } + + // TODO: become GGUF KV parameter + hparams.f_max_alibi_bias = 8.0f; } break; case LLM_ARCH_MPT: { - hparams.f_clamp_kqv = 0.0f; - ml.get_key(LLM_KV_ATTENTION_LAYERNORM_EPS, hparams.f_norm_eps); ml.get_key(LLM_KV_ATTENTION_CLAMP_KQV, hparams.f_clamp_kqv, false); ml.get_key(LLM_KV_ATTENTION_MAX_ALIBI_BIAS, hparams.f_max_alibi_bias); @@ -3232,6 +3243,10 @@ static void llm_load_hparams( } model.ftype = ml.ftype; + + if (hparams.f_max_alibi_bias > 0.0f) { + hparams.need_kq_pos = true; + } } // TODO: This should probably be in llama.h @@ -4774,10 +4789,10 @@ static struct ggml_tensor * llm_build_kqv( struct ggml_tensor * wo_b, struct ggml_tensor * q_cur, struct ggml_tensor * kq_mask, + struct ggml_tensor * kq_pos, int64_t n_ctx, int32_t n_tokens, int32_t n_kv, - float max_alibi_bias, float kq_scale, const llm_build_cb & cb, int il) { @@ -4807,26 +4822,26 @@ static struct ggml_tensor * llm_build_kqv( ggml_mul_mat_set_prec(kq, GGML_PREC_F32); } - if (max_alibi_bias > 0.0f) { - // temporary branch until we figure out how to handle ggml_alibi through ggml_add +#if defined(GGML_USE_VULKAN) || defined(GGML_USE_KOMPUTE) || defined(GGML_USE_SYCL) +#pragma message("TODO: ALiBi support in ggml_soft_max_ext is not implemented for Vulkan, Kompute, and SYCL") +#pragma message(" Falling back to ggml_alibi(). Will become an error in Mar 2024") +#pragma message("ref: https://github.com/ggerganov/llama.cpp/pull/5488") + if (hparams.f_max_alibi_bias > 0.0f) { kq = ggml_scale(ctx, kq, kq_scale); cb(kq, "kq_scaled", il); - if (max_alibi_bias > 0.0f) { - // TODO: n_head or n_head_kv - // TODO: K-shift is likely not working - // TODO: change to ggml_add - kq = ggml_alibi(ctx, kq, /*n_past*/ 0, n_head, max_alibi_bias); - cb(kq, "kq_scaled_alibi", il); - } + kq = ggml_alibi(ctx, kq, /*n_past*/ 0, n_head, hparams.f_max_alibi_bias); + cb(kq, "kq_scaled_alibi", il); kq = ggml_add(ctx, kq, kq_mask); cb(kq, "kq_masked", il); kq = ggml_soft_max(ctx, kq); cb(kq, "kq_soft_max", il); - } else { - kq = ggml_soft_max_ext(ctx, kq, kq_mask, kq_scale); + } else +#endif + { + kq = ggml_soft_max_ext(ctx, kq, kq_mask, kq_pos, kq_scale, hparams.f_max_alibi_bias); cb(kq, "kq_soft_max_ext", il); } @@ -4874,11 +4889,11 @@ static struct ggml_tensor * llm_build_kv( struct ggml_tensor * v_cur, struct ggml_tensor * q_cur, struct ggml_tensor * kq_mask, + struct ggml_tensor * kq_pos, int64_t n_ctx, int32_t n_tokens, int32_t kv_head, int32_t n_kv, - float max_alibi_bias, float kq_scale, const llm_build_cb & cb, int il) { @@ -4892,9 +4907,8 @@ static struct ggml_tensor * llm_build_kv( llm_build_kv_store(ctx, hparams, kv, graph, k_cur, v_cur, n_ctx, n_tokens, kv_head, cb, il); struct ggml_tensor * cur; - cur = llm_build_kqv(ctx, model, hparams, kv, graph, - wo, wo_b, - q_cur, kq_mask, n_ctx, n_tokens, n_kv, max_alibi_bias, kq_scale, cb, il); + cur = llm_build_kqv(ctx, model, hparams, kv, graph, wo, wo_b, + q_cur, kq_mask, kq_pos, n_ctx, n_tokens, n_kv, kq_scale, cb, il); cb(cur, "kqv_out", il); return cur; @@ -5062,7 +5076,7 @@ struct llm_build_context { } Qcur = ggml_rope_custom( - ctx0, ggml_reshape_3d(ctx0, Qcur, n_embd_head, n_head, n_tokens), inp_pos, + ctx0, ggml_reshape_3d(ctx0, Qcur, n_embd_head, n_head, n_tokens), inp_pos, hparams.n_rot, 0, 0, n_orig_ctx, freq_base, freq_scale, ext_factor, attn_factor, beta_fast, beta_slow ); @@ -5077,7 +5091,7 @@ struct llm_build_context { cur = llm_build_kv(ctx0, model, hparams, kv_self, gf, model.layers[il].wo, model.layers[il].bo, - Kcur, Vcur, Qcur, KQ_mask, n_ctx, n_tokens, kv_head, n_kv, -1.0f, 1.0f/sqrtf(float(n_embd_head)), cb, il); + Kcur, Vcur, Qcur, KQ_mask, nullptr, n_ctx, n_tokens, kv_head, n_kv, 1.0f/sqrtf(float(n_embd_head)), cb, il); cb(cur, "kqv_out", il); } @@ -5207,6 +5221,10 @@ struct llm_build_context { struct ggml_tensor * KQ_mask = ggml_view_2d(ctx0, lctx.inp_KQ_mask, n_kv, n_tokens, n_kv*ggml_type_size(lctx.inp_KQ_mask->type), 0); cb(KQ_mask, "KQ_mask", -1); + // positions of the tokens in the KV cache + struct ggml_tensor * KQ_pos = ggml_view_1d(ctx0, lctx.inp_KQ_pos, n_kv, 0); + cb(KQ_pos, "KQ_pos", -1); + // shift the entire K-cache if needed if (do_rope_shift) { llm_build_k_shift(ctx0, hparams, cparams, kv_self, gf, lctx.inp_K_shift, LLM_ROPE, n_ctx, freq_base, freq_scale, cb); @@ -5255,12 +5273,9 @@ struct llm_build_context { cb(Kcur, "Kcur", il); - // apply ALiBi for 13B model - const float max_alibi_bias = model.type == MODEL_13B ? 8.0f : -1.0f; - cur = llm_build_kv(ctx0, model, hparams, kv_self, gf, model.layers[il].wo, NULL, - Kcur, Vcur, Qcur, KQ_mask, n_ctx, n_tokens, kv_head, n_kv, max_alibi_bias, 1.0f/sqrtf(float(n_embd_head)), cb, il); + Kcur, Vcur, Qcur, KQ_mask, KQ_pos, n_ctx, n_tokens, kv_head, n_kv, 1.0f/sqrtf(float(n_embd_head)), cb, il); cb(cur, "kqv_out", il); } @@ -5384,7 +5399,7 @@ struct llm_build_context { cur = llm_build_kv(ctx0, model, hparams, kv_self, gf, model.layers[il].wo, NULL, - Kcur, Vcur, Qcur, KQ_mask, n_ctx, n_tokens, kv_head, n_kv, -1.0f, 1.0f/sqrtf(float(n_embd_head)), cb, il); + Kcur, Vcur, Qcur, KQ_mask, nullptr, n_ctx, n_tokens, kv_head, n_kv, 1.0f/sqrtf(float(n_embd_head)), cb, il); cb(cur, "kqv_out", il); } @@ -5483,7 +5498,7 @@ struct llm_build_context { cur = llm_build_kv(ctx0, model, hparams, kv_self, gf, model.layers[il].wo, model.layers[il].bo, - Kcur, Vcur, Qcur, KQ_mask, n_ctx, n_tokens, kv_head, n_kv, -1.0f, 1.0f/sqrtf(float(n_embd_head)), cb, il); + Kcur, Vcur, Qcur, KQ_mask, nullptr, n_ctx, n_tokens, kv_head, n_kv, 1.0f/sqrtf(float(n_embd_head)), cb, il); cb(cur, "kqv_out", il); } @@ -5688,7 +5703,7 @@ struct llm_build_context { cur = llm_build_kv(ctx0, model, hparams, kv_self, gf, model.layers[il].wo, model.layers[il].bo, - Kcur, Vcur, Q, KQ_mask, n_ctx, n_tokens, kv_head, n_kv, -1.0f, 1.0f/sqrtf(float(n_embd_head)), cb, il); + Kcur, Vcur, Q, KQ_mask, nullptr, n_ctx, n_tokens, kv_head, n_kv, 1.0f/sqrtf(float(n_embd_head)), cb, il); cb(cur, "kqv_out", il); } @@ -5750,6 +5765,10 @@ struct llm_build_context { struct ggml_tensor * KQ_mask = ggml_view_2d(ctx0, lctx.inp_KQ_mask, n_kv, n_tokens, n_kv*ggml_type_size(lctx.inp_KQ_mask->type), 0); cb(KQ_mask, "KQ_mask", -1); + // positions of the tokens in the KV cache + struct ggml_tensor * KQ_pos = ggml_view_1d(ctx0, lctx.inp_KQ_pos, n_kv, 0); + cb(KQ_pos, "KQ_pos", -1); + for (int il = 0; il < n_layer; ++il) { struct ggml_tensor * inpSA = inpL; @@ -5777,7 +5796,7 @@ struct llm_build_context { cur = llm_build_kv(ctx0, model, hparams, kv_self, gf, model.layers[il].wo, NULL, - Kcur, Vcur, Qcur, KQ_mask, n_ctx, n_tokens, kv_head, n_kv, 8.0f, 1.0f/sqrtf(float(n_embd_head)), cb, il); + Kcur, Vcur, Qcur, KQ_mask, KQ_pos, n_ctx, n_tokens, kv_head, n_kv, 1.0f/sqrtf(float(n_embd_head)), cb, il); cb(cur, "kqv_out", il); } @@ -5878,7 +5897,7 @@ struct llm_build_context { cur = llm_build_kv(ctx0, model, hparams, kv_self, gf, model.layers[il].wo, model.layers[il].bo, - Kcur, Vcur, Qcur, KQ_mask, n_ctx, n_tokens, kv_head, n_kv, -1.0f, 1.0f/sqrtf(float(n_embd_head)), cb, il); + Kcur, Vcur, Qcur, KQ_mask, nullptr, n_ctx, n_tokens, kv_head, n_kv, 1.0f/sqrtf(float(n_embd_head)), cb, il); cb(cur, "kqv_out", il); } else { // compute Q and K and RoPE them @@ -5909,7 +5928,7 @@ struct llm_build_context { cur = llm_build_kv(ctx0, model, hparams, kv_self, gf, model.layers[il].wo, model.layers[il].bo, - Kcur, Vcur, Qcur, KQ_mask, n_ctx, n_tokens, kv_head, n_kv, -1.0f, 1.0f/sqrtf(float(n_embd_head)), cb, il); + Kcur, Vcur, Qcur, KQ_mask, nullptr, n_ctx, n_tokens, kv_head, n_kv, 1.0f/sqrtf(float(n_embd_head)), cb, il); cb(cur, "kqv_out", il); } @@ -5985,6 +6004,10 @@ struct llm_build_context { struct ggml_tensor * KQ_mask = ggml_view_2d(ctx0, lctx.inp_KQ_mask, n_kv, n_tokens, n_kv*ggml_type_size(lctx.inp_KQ_mask->type), 0); cb(KQ_mask, "KQ_mask", -1); + // positions of the tokens in the KV cache + struct ggml_tensor * KQ_pos = ggml_view_1d(ctx0, lctx.inp_KQ_pos, n_kv, 0); + cb(KQ_pos, "KQ_pos", -1); + inpL = llm_build_norm(ctx0, inpL, hparams, model.tok_norm, model.tok_norm_b, @@ -6018,7 +6041,7 @@ struct llm_build_context { cur = llm_build_kv(ctx0, model, hparams, kv_self, gf, model.layers[il].wo, model.layers[il].bo, - Kcur, Vcur, Qcur, KQ_mask, n_ctx, n_tokens, kv_head, n_kv, 8.0f, 1.0f/sqrtf(float(n_embd_head)), cb, il); + Kcur, Vcur, Qcur, KQ_mask, KQ_pos, n_ctx, n_tokens, kv_head, n_kv, 1.0f/sqrtf(float(n_embd_head)), cb, il); cb(cur, "kqv_out", il); } @@ -6078,6 +6101,10 @@ struct llm_build_context { struct ggml_tensor * KQ_mask = ggml_view_2d(ctx0, lctx.inp_KQ_mask, n_kv, n_tokens, n_kv*ggml_type_size(lctx.inp_KQ_mask->type), 0); cb(KQ_mask, "KQ_mask", -1); + // positions of the tokens in the KV cache + struct ggml_tensor * KQ_pos = ggml_view_1d(ctx0, lctx.inp_KQ_pos, n_kv, 0); + cb(KQ_pos, "KQ_pos", -1); + for (int il = 0; il < n_layer; ++il) { struct ggml_tensor * attn_norm; @@ -6111,7 +6138,7 @@ struct llm_build_context { cur = llm_build_kv(ctx0, model, hparams, kv_self, gf, model.layers[il].wo, NULL, - Kcur, Vcur, Qcur, KQ_mask, n_ctx, n_tokens, kv_head, n_kv, hparams.f_max_alibi_bias, 1.0f/sqrtf(float(n_embd_head)), cb, il); + Kcur, Vcur, Qcur, KQ_mask, KQ_pos, n_ctx, n_tokens, kv_head, n_kv, 1.0f/sqrtf(float(n_embd_head)), cb, il); cb(cur, "kqv_out", il); } @@ -6233,7 +6260,7 @@ struct llm_build_context { cur = llm_build_kv(ctx0, model, hparams, kv_self, gf, model.layers[il].wo, NULL, - Kcur, Vcur, Qcur, KQ_mask, n_ctx, n_tokens, kv_head, n_kv, -1.0f, 1.0f/sqrtf(float(n_embd_head)), cb, il); + Kcur, Vcur, Qcur, KQ_mask, nullptr, n_ctx, n_tokens, kv_head, n_kv, 1.0f/sqrtf(float(n_embd_head)), cb, il); cb(cur, "kqv_out", il); } @@ -6348,7 +6375,7 @@ struct llm_build_context { cur = llm_build_kv(ctx0, model, hparams, kv_self, gf, model.layers[il].wo, NULL, - Kcur, Vcur, Qcur, KQ_mask, n_ctx, n_tokens, kv_head, n_kv, -1.0f, 1.0f/sqrtf(float(n_embd_head)), cb, il); + Kcur, Vcur, Qcur, KQ_mask, nullptr, n_ctx, n_tokens, kv_head, n_kv, 1.0f/sqrtf(float(n_embd_head)), cb, il); cb(cur, "kqv_out", il); } @@ -6469,7 +6496,7 @@ struct llm_build_context { cur = llm_build_kv(ctx0, model, hparams, kv_self, gf, model.layers[il].wo, model.layers[il].bo, - Kcur, Vcur, Qcur, KQ_mask, n_ctx, n_tokens, kv_head, n_kv, -1.0f, 1.0f/sqrtf(float(n_embd_head)), cb, il); + Kcur, Vcur, Qcur, KQ_mask, nullptr, n_ctx, n_tokens, kv_head, n_kv, 1.0f/sqrtf(float(n_embd_head)), cb, il); cb(cur, "kqv_out", il); } @@ -6596,7 +6623,7 @@ struct llm_build_context { cur = llm_build_kv(ctx0, model, hparams, kv_self, gf, model.layers[il].wo, model.layers[il].bo, - Kcur, Vcur, Qcur, KQ_mask, n_ctx, n_tokens, kv_head, n_kv, -1.0f, 1.0f, cb, il); + Kcur, Vcur, Qcur, KQ_mask, nullptr, n_ctx, n_tokens, kv_head, n_kv, 1.0f, cb, il); cb(cur, "kqv_out", il); } @@ -6699,7 +6726,7 @@ struct llm_build_context { cur = llm_build_kv(ctx0, model, hparams, kv_self, gf, model.layers[il].wo, NULL, - Kcur, Vcur, Qcur, KQ_mask, n_ctx, n_tokens, kv_head, n_kv, -1.0f, 1.0f/sqrtf(float(n_embd_head)), cb, il); + Kcur, Vcur, Qcur, KQ_mask, nullptr, n_ctx, n_tokens, kv_head, n_kv, 1.0f/sqrtf(float(n_embd_head)), cb, il); cb(cur, "kqv_out", il); } struct ggml_tensor * sa_out = cur; @@ -6798,7 +6825,7 @@ struct llm_build_context { cur = llm_build_kv(ctx0, model, hparams, kv_self, gf, model.layers[il].wo, model.layers[il].bo, - Kcur, Vcur, Qcur, KQ_mask, n_ctx, n_tokens, kv_head, n_kv, -1.0f, 1.0f/sqrtf(float(n_embd_head)), cb, il); + Kcur, Vcur, Qcur, KQ_mask, nullptr, n_ctx, n_tokens, kv_head, n_kv, 1.0f/sqrtf(float(n_embd_head)), cb, il); cb(cur, "kqv_out", il); } @@ -6907,7 +6934,7 @@ struct llm_build_context { cur = llm_build_kv(ctx0, model, hparams, kv_self, gf, model.layers[il].wo, model.layers[il].bo, - Kcur, Vcur, Qcur, KQ_mask, n_ctx, n_tokens, kv_head, n_kv, -1.0f, 1.0f/sqrtf(float(n_embd_head)), cb, il); + Kcur, Vcur, Qcur, KQ_mask, nullptr, n_ctx, n_tokens, kv_head, n_kv, 1.0f/sqrtf(float(n_embd_head)), cb, il); cb(cur, "kqv_out", il); } @@ -7025,7 +7052,7 @@ struct llm_build_context { cur = llm_build_kv(ctx0, model, hparams, kv_self, gf, model.layers[il].wo, NULL, - Kcur, Vcur, Qcur, KQ_mask, n_ctx, n_tokens, kv_head, n_kv, -1.0f, 1.0f/sqrtf(float(n_embd_head)), cb, il); + Kcur, Vcur, Qcur, KQ_mask, nullptr, n_ctx, n_tokens, kv_head, n_kv, 1.0f/sqrtf(float(n_embd_head)), cb, il); cb(cur, "kqv_out", il); } @@ -7144,7 +7171,7 @@ struct llm_build_context { cur = llm_build_kv(ctx0, model, hparams, kv_self, gf, model.layers[il].wo, model.layers[il].bo, - Kcur, Vcur, Qcur, KQ_mask, n_ctx, n_tokens, kv_head, n_kv, -1.0f, 1.0f/sqrtf(float(n_embd_head)), cb, il); + Kcur, Vcur, Qcur, KQ_mask, nullptr, n_ctx, n_tokens, kv_head, n_kv, 1.0f/sqrtf(float(n_embd_head)), cb, il); cb(cur, "kqv_out", il); } @@ -7276,7 +7303,7 @@ struct llm_build_context { cur = llm_build_kv(ctx0, model, hparams, kv_self, gf, model.layers[il].wo, model.layers[il].bo, - Kcur, Vcur, Qcur, KQ_mask, n_ctx, n_tokens, kv_head, n_kv, -1.0f, 1.0f/sqrtf(float(n_embd_head)), cb, il); + Kcur, Vcur, Qcur, KQ_mask, nullptr, n_ctx, n_tokens, kv_head, n_kv, 1.0f/sqrtf(float(n_embd_head)), cb, il); cb(cur, "kqv_out", il); } @@ -7507,6 +7534,18 @@ static void llama_set_inputs(llama_context & lctx, const llama_batch & batch) { } } + if (hparams.need_kq_pos) { + const int64_t n_kv = kv_self.n; + + assert(ggml_backend_buffer_is_host(lctx.inp_KQ_pos->buffer)); + + float * data = (float *) lctx.inp_KQ_pos->data; + + for (int i = 0; i < n_kv; ++i) { + data[i] = float(lctx.kv_self.cells[i].pos); + } + } + if (kv_self.has_shift) { const int64_t n_ctx = cparams.n_ctx; @@ -11434,7 +11473,7 @@ struct llama_context * llama_new_context_with_model( // graph inputs { ggml_init_params init_params = { - /* .mem_size */ ggml_tensor_overhead()*7, + /* .mem_size */ ggml_tensor_overhead()*8, /* .mem_buffer */ nullptr, /* .no_alloc */ true, }; @@ -11444,6 +11483,7 @@ struct llama_context * llama_new_context_with_model( ctx->inp_embd = ggml_new_tensor_2d(ctx->ctx_input, GGML_TYPE_F32, hparams.n_embd, cparams.n_batch); ctx->inp_pos = ggml_new_tensor_1d(ctx->ctx_input, GGML_TYPE_I32, cparams.n_batch); ctx->inp_KQ_mask = ggml_new_tensor_2d(ctx->ctx_input, GGML_TYPE_F32, cparams.n_ctx, cparams.n_batch); + ctx->inp_KQ_pos = ggml_new_tensor_1d(ctx->ctx_input, GGML_TYPE_F32, cparams.n_ctx); ctx->inp_K_shift = ggml_new_tensor_1d(ctx->ctx_input, GGML_TYPE_I32, cparams.n_ctx); ctx->inp_mean = ggml_new_tensor_2d(ctx->ctx_input, GGML_TYPE_F32, cparams.n_batch, cparams.n_batch); ctx->inp_cls = ggml_new_tensor_1d(ctx->ctx_input, GGML_TYPE_I32, cparams.n_batch); @@ -11452,6 +11492,7 @@ struct llama_context * llama_new_context_with_model( ggml_set_name(ctx->inp_embd, "inp_embd"); ggml_set_name(ctx->inp_pos, "inp_pos"); ggml_set_name(ctx->inp_KQ_mask, "inp_KQ_mask"); + ggml_set_name(ctx->inp_KQ_pos, "inp_KQ_pos"); ggml_set_name(ctx->inp_K_shift, "inp_K_shift"); ggml_set_name(ctx->inp_mean, "inp_mean"); ggml_set_name(ctx->inp_cls, "inp_cls"); diff --git a/tests/test-backend-ops.cpp b/tests/test-backend-ops.cpp index 9af8517d9..30a7d1f5a 100644 --- a/tests/test-backend-ops.cpp +++ b/tests/test-backend-ops.cpp @@ -1085,24 +1085,32 @@ struct test_diag_mask_inf : public test_case { struct test_soft_max : public test_case { const ggml_type type; const std::array ne; - const float scale; const bool mask; + const float scale; + const float max_bias; std::string vars() override { - return VARS_TO_STR4(type, ne, scale, mask); + return VARS_TO_STR5(type, ne, mask, scale, max_bias); } test_soft_max(ggml_type type = GGML_TYPE_F32, std::array ne = {10, 10, 10, 10}, + bool mask = false, float scale = 1.0f, - bool mask = false) - : type(type), ne(ne), scale(scale), mask(mask) {} + float max_bias = 0.0f) + : type(type), ne(ne), mask(mask), scale(scale), max_bias(max_bias) {} ggml_tensor * build_graph(ggml_context * ctx) override { ggml_tensor * a = ggml_new_tensor(ctx, type, 4, ne.data()); - ggml_tensor * b = nullptr; - if (mask) { b = ggml_new_tensor_2d(ctx, type, ne[0], ne[1]); } - ggml_tensor * out = ggml_soft_max_ext(ctx, a, b, scale); + ggml_tensor * mask = nullptr; + if (this->mask) { + mask = ggml_new_tensor_2d(ctx, type, ne[0], ne[1]); + } + ggml_tensor * pos = nullptr; + if (max_bias > 0.0f) { + pos = ggml_new_tensor_1d(ctx, GGML_TYPE_F32, ne[0]); + } + ggml_tensor * out = ggml_soft_max_ext(ctx, a, mask, pos, scale, max_bias); return out; } }; @@ -1147,30 +1155,6 @@ struct test_rope : public test_case { } }; -// GGML_OP_ALIBI -struct test_alibi : public test_case { - const ggml_type type; - const std::array ne; - int n_past; - int n_head; - float bias_max; - - std::string vars() override { - return VARS_TO_STR5(type, ne, n_past, n_head, bias_max); - } - - test_alibi(ggml_type type = GGML_TYPE_F32, - std::array ne = {10, 10, 10, 10}, - int n_past = 512, int n_head = 10, float bias_max = 0.5f) - : type(type), ne(ne), n_past(n_past), n_head(n_head), bias_max(bias_max) {} - - ggml_tensor * build_graph(ggml_context * ctx) override { - ggml_tensor * a = ggml_new_tensor(ctx, type, 4, ne.data()); - ggml_tensor * out = ggml_alibi(ctx, a, n_past, n_head, bias_max); - return out; - } -}; - // GGML_OP_POOL2D struct test_pool2d : public test_case { enum ggml_op_pool pool_type; @@ -1488,7 +1472,7 @@ struct test_moe : public test_case { ggml_tensor * cur = ggml_new_tensor_2d(ctx, GGML_TYPE_F32, n_embd, n_tokens); ggml_tensor * logits = ggml_mul_mat(ctx, ffn_gate_inp, cur); - ggml_tensor * probs = ggml_soft_max_ext(ctx, logits, nullptr, 1.0f/sqrtf(n_embd)); + ggml_tensor * probs = ggml_soft_max_ext(ctx, logits, nullptr, nullptr, 1.0f/sqrtf(n_embd), 0.0f); // select experts ggml_tensor * selected_experts = ggml_top_k(ctx, probs, n_experts_per_tok); @@ -1617,7 +1601,6 @@ public: ggml_cpy(ctx, v_cur_t, v_cache_view); } - // if max_alibi_bias > 0 then apply ALiBi struct ggml_tensor * llm_build_kqv( struct ggml_context * ctx, struct ggml_tensor * k_l, @@ -1636,7 +1619,7 @@ public: struct ggml_tensor * kq = ggml_mul_mat(ctx, k, q); - kq = ggml_soft_max_ext(ctx, kq, kq_mask, kq_scale); + kq = ggml_soft_max_ext(ctx, kq, kq_mask, nullptr, kq_scale, 0.0f); // split cached v into n_head heads struct ggml_tensor * v = @@ -2083,6 +2066,7 @@ static bool test_backend(ggml_backend_t backend, test_mode mode, const char * op test_cases.emplace_back(new test_diag_mask_inf(GGML_TYPE_F32, {10, 10, 10, 1}, 5)); test_cases.emplace_back(new test_diag_mask_inf(GGML_TYPE_F32, {10, 10, 10, 10}, 5)); +#if 0 std::uniform_int_distribution<> dist_ne1(1, 50); int exponent = 1; while (exponent < (1 << 17)) { @@ -2091,14 +2075,29 @@ static bool test_backend(ggml_backend_t backend, test_mode mode, const char * op for (int n = 0; n < 10; ++n) { int64_t ne0 = dist_ne0(rng); int64_t ne1 = dist_ne1(rng); - test_cases.emplace_back(new test_soft_max(GGML_TYPE_F32, {ne0, ne1, 1, 1})); + test_cases.emplace_back(new test_soft_max(GGML_TYPE_F32, {ne0, ne1, 1, 1}, n/2 == 0, 0.1f, ne0 < 1000 ? 4.0f : 0.0f)); } exponent <<= 1; } +#endif + for (bool mask : {false, true}) { + for (float max_bias : {0.0f, 8.0f}) { + for (float scale : {1.0f, 0.1f}) { + for (int64_t ne0 : {16, 1024}) { + for (int64_t ne1 : {16, 1024}) { + test_cases.emplace_back(new test_soft_max(GGML_TYPE_F32, {ne0, ne1, 1, 1}, mask, scale, max_bias)); + test_cases.emplace_back(new test_soft_max(GGML_TYPE_F32, {ne0-1, ne1-1, 1, 1}, mask, scale, max_bias)); + } + } + } + } + } - test_cases.emplace_back(new test_soft_max(GGML_TYPE_F32, {16, 2, 32, 1}, 0.1f)); - test_cases.emplace_back(new test_soft_max(GGML_TYPE_F32, {32, 2, 32, 1}, 0.1f, true)); + test_cases.emplace_back(new test_soft_max(GGML_TYPE_F32, {16, 2, 32, 1}, false, 0.1f, 0.0f)); + test_cases.emplace_back(new test_soft_max(GGML_TYPE_F32, {32, 2, 32, 1}, true, 0.1f, 0.0f)); + test_cases.emplace_back(new test_soft_max(GGML_TYPE_F32, {16, 2, 32, 1}, false, 0.1f, 8.0f)); + test_cases.emplace_back(new test_soft_max(GGML_TYPE_F32, {32, 2, 32, 1}, true, 0.1f, 8.0f)); for (ggml_type type : {GGML_TYPE_F32, GGML_TYPE_F16}) { test_cases.emplace_back(new test_rope(type, {128, 32, 10, 1}, 128, 0, 512)); // llama 7B @@ -2113,7 +2112,6 @@ static bool test_backend(ggml_backend_t backend, test_mode mode, const char * op test_cases.emplace_back(new test_rope(type, { 80, 32, 10, 1}, 32, 2, 512)); // neox (phi-2) } - test_cases.emplace_back(new test_alibi()); test_cases.emplace_back(new test_concat(GGML_TYPE_F32)); test_cases.emplace_back(new test_concat(GGML_TYPE_I32)); From c8e0d7efeb7634ecc2e9832e879ab9fca4510e71 Mon Sep 17 00:00:00 2001 From: "github-actions[bot]" Date: Sun, 18 Feb 2024 00:17:07 +0000 Subject: [PATCH 616/811] flake.lock: Update MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Flake lock file updates: • Updated input 'nixpkgs': 'github:NixOS/nixpkgs/f8e2ebd66d097614d51a56a755450d4ae1632df1' (2024-02-07) → 'github:NixOS/nixpkgs/5863c27340ba4de8f83e7e3c023b9599c3cb3c80' (2024-02-16) --- flake.lock | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/flake.lock b/flake.lock index 239d0686c..47d6448b5 100644 --- a/flake.lock +++ b/flake.lock @@ -20,11 +20,11 @@ }, "nixpkgs": { "locked": { - "lastModified": 1707268954, - "narHash": "sha256-2en1kvde3cJVc3ZnTy8QeD2oKcseLFjYPLKhIGDanQ0=", + "lastModified": 1708118438, + "narHash": "sha256-kk9/0nuVgA220FcqH/D2xaN6uGyHp/zoxPNUmPCMmEE=", "owner": "NixOS", "repo": "nixpkgs", - "rev": "f8e2ebd66d097614d51a56a755450d4ae1632df1", + "rev": "5863c27340ba4de8f83e7e3c023b9599c3cb3c80", "type": "github" }, "original": { From bd2d4e393b2b7d2a1b2e201058e26017c9728ead Mon Sep 17 00:00:00 2001 From: Kawrakow <48489457+ikawrakow@users.noreply.github.com> Date: Sun, 18 Feb 2024 18:16:55 +0200 Subject: [PATCH 617/811] 1.5 bit quantization (#5453) * iq1_s: WIP basics * iq1_s: CUDA is working * iq1_s: scalar CPU dot product * iq1_s: WIP AVX2 dot product - something is not right * Fix tests * Fix shadow warnings * Fix after merge with latest master * iq1_s: AVX2 finally works * iq1_s: ARM_NEON dot product. Works, but not very fast * iq1_s: better grid * iq1_s: use IQ2_XXS for attn_output At a cost of 0.04 extra bpw this gives a big improvement in PPL. * iq1_s: Metal basics Dequantize works, but not dot product * iq1_s: Metal works, but quite slow As usual, Apple Silicon does not like the code I write. * iq1_s: Tests * iq1_s: slightly faster dot product --------- Co-authored-by: Iwan Kawrakow --- examples/quantize/quantize.cpp | 6 +- ggml-backend.c | 2 +- ggml-cuda.cu | 224 ++++++++++- ggml-metal.m | 29 +- ggml-metal.metal | 337 +++++++++++++++++ ggml-quants.c | 657 +++++++++++++++++++++++++++++++-- ggml-quants.h | 14 +- ggml.c | 44 ++- ggml.h | 2 + llama.cpp | 16 +- llama.h | 1 + tests/test-backend-ops.cpp | 2 +- 12 files changed, 1286 insertions(+), 48 deletions(-) diff --git a/examples/quantize/quantize.cpp b/examples/quantize/quantize.cpp index 4a5c504e3..ea7ba50c9 100644 --- a/examples/quantize/quantize.cpp +++ b/examples/quantize/quantize.cpp @@ -23,6 +23,7 @@ static const std::vector QUANT_OPTIONS = { { "Q5_1", LLAMA_FTYPE_MOSTLY_Q5_1, " 4.70G, +0.0349 ppl @ LLaMA-v1-7B", }, { "IQ2_XXS",LLAMA_FTYPE_MOSTLY_IQ2_XXS," 2.06 bpw quantization", }, { "IQ2_XS", LLAMA_FTYPE_MOSTLY_IQ2_XS, " 2.31 bpw quantization", }, + { "IQ1_S", LLAMA_FTYPE_MOSTLY_IQ1_S, " 1.56 bpw quantization", }, { "Q2_K", LLAMA_FTYPE_MOSTLY_Q2_K, " 2.63G, +0.6717 ppl @ LLaMA-v1-7B", }, { "Q2_K_S", LLAMA_FTYPE_MOSTLY_Q2_K_S, " 2.16G, +9.0634 ppl @ LLaMA-v1-7B", }, { "IQ3_XXS",LLAMA_FTYPE_MOSTLY_IQ3_XXS," 3.06 bpw quantization", }, @@ -287,9 +288,10 @@ int main(int argc, char ** argv) { } } - if ((params.ftype == LLAMA_FTYPE_MOSTLY_IQ2_XS || params.ftype == LLAMA_FTYPE_MOSTLY_IQ2_XXS || params.ftype == LLAMA_FTYPE_MOSTLY_Q2_K_S) && imatrix_data.empty()) { + if ((params.ftype == LLAMA_FTYPE_MOSTLY_IQ2_XS || params.ftype == LLAMA_FTYPE_MOSTLY_IQ2_XXS || + params.ftype == LLAMA_FTYPE_MOSTLY_Q2_K_S || params.ftype == LLAMA_FTYPE_MOSTLY_IQ1_S) && imatrix_data.empty()) { fprintf(stderr, "\n===============================================================================================\n"); - fprintf(stderr, "Please do not use IQ2_XXS, IQ2_XS or Q2_K_S quantization without an importance matrix\n"); + fprintf(stderr, "Please do not use IQ1_S, IQ2_XXS, IQ2_XS or Q2_K_S quantization without an importance matrix\n"); fprintf(stderr, "===============================================================================================\n\n\n"); return 1; } diff --git a/ggml-backend.c b/ggml-backend.c index 66e8c293a..5076d9e5e 100644 --- a/ggml-backend.c +++ b/ggml-backend.c @@ -756,7 +756,7 @@ GGML_CALL static bool ggml_backend_cpu_graph_compute(ggml_backend_t backend, str GGML_CALL static bool ggml_backend_cpu_supports_op(ggml_backend_t backend, const struct ggml_tensor * op) { switch (op->op) { case GGML_OP_CPY: - return op->type != GGML_TYPE_IQ2_XXS && op->type != GGML_TYPE_IQ2_XS; // missing type_traits.from_float + return op->type != GGML_TYPE_IQ2_XXS && op->type != GGML_TYPE_IQ2_XS && op->type != GGML_TYPE_IQ1_S; // missing type_traits.from_float case GGML_OP_MUL_MAT: return op->src[1]->type == GGML_TYPE_F32 || op->src[1]->type == ggml_internal_get_type_traits(op->src[0]->type).vec_dot_type; default: diff --git a/ggml-cuda.cu b/ggml-cuda.cu index 5fd8a87e4..933ebbc4e 100644 --- a/ggml-cuda.cu +++ b/ggml-cuda.cu @@ -517,6 +517,15 @@ typedef struct { } block_iq3_xxs; static_assert(sizeof(block_iq3_xxs) == sizeof(ggml_fp16_t) + 3*(QK_K/8), "wrong iq3_xxs block size/padding"); +#define QR1_S 8 +#define QI1_S (QK_K / (4*QR1_S)) +typedef struct { + half d; + uint8_t qs[QK_K/8]; + uint8_t scales[QK_K/16]; +} block_iq1_s; +static_assert(sizeof(block_iq1_s) == sizeof(ggml_fp16_t) + QK_K/8 + QK_K/16, "wrong iq1_s block size/padding"); + #define WARP_SIZE 32 #define MATRIX_ROW_PADDING 512 // last row of quant. matrices is a multiple of this to avoid out-of-bounds memory accesses @@ -1681,6 +1690,137 @@ static const __device__ uint32_t iq3xxs_grid[256] = { 0x3e1c1c1c, 0x3e1c3404, 0x3e24140c, 0x3e24240c, 0x3e2c0404, 0x3e2c0414, 0x3e2c1424, 0x3e341c04, }; +static const __device__ uint64_t iq1s_grid[512] = { + 0xffffffffffff0101, 0xffffffffff01ff00, 0xffffffffff010100, 0xffffffff00000000, + 0xffffffff01ff00ff, 0xffffffff01ff0001, 0xffffffff0101ffff, 0xffffffff0101ff01, + 0xffffff00ff000000, 0xffffff000000ff00, 0xffffff00000000ff, 0xffffff0000000100, + 0xffffff0000010000, 0xffffff0001000000, 0xffffff01ffff00ff, 0xffffff01ff01ff00, + 0xffffff01ff010100, 0xffffff0100000001, 0xffffff0101ffff00, 0xffffff0101ff0101, + 0xffffff0101010100, 0xffff00ffff00ff01, 0xffff00ffff0000ff, 0xffff00ff00ff0100, + 0xffff00ff0100ff00, 0xffff00ff010001ff, 0xffff0000ff0101ff, 0xffff000000ffff00, + 0xffff000000000000, 0xffff00000001ff01, 0xffff000001000101, 0xffff0000010100ff, + 0xffff0001ffff0100, 0xffff00010000ff00, 0xffff000100010101, 0xffff000101000000, + 0xffff01ffffff0000, 0xffff01ffff01ffff, 0xffff01ffff010100, 0xffff01ff00000000, + 0xffff01ff01ffffff, 0xffff01ff01ff0001, 0xffff01ff0101ffff, 0xffff01ff01010001, + 0xffff0100ffffff01, 0xffff01000000ffff, 0xffff010000000100, 0xffff010001ff01ff, + 0xffff010001000000, 0xffff0101ff000000, 0xffff0101000101ff, 0xffff010101ffff01, + 0xffff01010101ff00, 0xff00ffffff000000, 0xff00ffff00ffff00, 0xff00ffff00000001, + 0xff00ffff000001ff, 0xff00ffff01010000, 0xff00ff00ffff0000, 0xff00ff00ff00ff00, + 0xff00ff00ff0000ff, 0xff00ff00ff000100, 0xff00ff00ff010001, 0xff00ff0000ff0001, + 0xff00ff000000ffff, 0xff00ff0000000000, 0xff00ff000001ff00, 0xff00ff0000010100, + 0xff00ff0001ff0000, 0xff00ff000100ff00, 0xff00ff0001000100, 0xff00ff01ff000000, + 0xff00ff0100ff0000, 0xff00ff01000001ff, 0xff00ff0101010001, 0xff0000ff00000000, + 0xff0000ff0001ff00, 0xff0000ff00010100, 0xff000000ffff0101, 0xff000000ff000000, + 0xff000000ff01ff00, 0xff00000000ff0000, 0xff0000000000ff00, 0xff000000000000ff, + 0xff00000000000000, 0xff00000000000001, 0xff00000000000100, 0xff0000000001ffff, + 0xff00000000010000, 0xff00000001000000, 0xff00000001010100, 0xff000001ff00ff01, + 0xff000001ff0100ff, 0xff00000100000000, 0xff0000010001ff00, 0xff00000101ff0100, + 0xff0000010100ff00, 0xff0001ff00ff00ff, 0xff0001ff00000101, 0xff0001ff000100ff, + 0xff0001ff01000000, 0xff000100ff0001ff, 0xff0001000000ff01, 0xff00010000000000, + 0xff00010000010001, 0xff00010000010100, 0xff00010001ffff00, 0xff00010001ff0101, + 0xff00010001010000, 0xff000101ffffffff, 0xff000101ff000101, 0xff00010101ff00ff, + 0xff00010101000001, 0xff000101010100ff, 0xff01ffffff000101, 0xff01ffffff01ffff, + 0xff01ffffff01ff01, 0xff01ffffff0101ff, 0xff01ffff00000000, 0xff01ffff01ff0001, + 0xff01ffff0101ff01, 0xff01ff00ff000000, 0xff01ff0000ff0100, 0xff01ff000000ff01, + 0xff01ff0000010000, 0xff01ff00010000ff, 0xff01ff01ff01ff00, 0xff01ff0100000101, + 0xff0100ffffff0000, 0xff0100ffff010000, 0xff0100ff01ff00ff, 0xff0100ff01000100, + 0xff0100ff010100ff, 0xff010000ffffff01, 0xff01000000000000, 0xff0100000101ff00, + 0xff010001ffff00ff, 0xff010001ff000100, 0xff01000100ffff00, 0xff01000100010001, + 0xff01000101ff0001, 0xff010001010001ff, 0xff0101ffffffffff, 0xff0101ffff01ffff, + 0xff0101ffff010101, 0xff0101ff0000ff00, 0xff0101ff01010001, 0xff010100ff000000, + 0xff010100ff01ff01, 0xff01010000ff0001, 0xff01010000000100, 0xff01010001000000, + 0xff0101010100ffff, 0x00ffffff0000ff01, 0x00ffffff000000ff, 0x00ffffff00000100, + 0x00ffffff00010000, 0x00ffff00ffff0001, 0x00ffff00ff0000ff, 0x00ffff00ff000100, + 0x00ffff0000000000, 0x00ffff0001000100, 0x00ffff0001010001, 0x00ffff01ff00ff01, + 0x00ffff0100ff0100, 0x00ffff010000ff00, 0x00ffff01000100ff, 0x00ffff0101ff00ff, + 0x00ffff010101ff00, 0x00ff00ffffffffff, 0x00ff00ffffff01ff, 0x00ff00ffff000101, + 0x00ff00ff00000000, 0x00ff00ff000101ff, 0x00ff00ff01010101, 0x00ff0000ff000000, + 0x00ff0000ff01ffff, 0x00ff000000ff0000, 0x00ff00000000ff00, 0x00ff0000000000ff, + 0x00ff000000000000, 0x00ff000000000001, 0x00ff000000000100, 0x00ff000000010000, + 0x00ff000001ffff01, 0x00ff000001000000, 0x00ff0001ff000101, 0x00ff000100ffffff, + 0x00ff000100000000, 0x00ff0001010001ff, 0x00ff01ffff000000, 0x00ff01ff0001ff00, + 0x00ff01ff01ff0100, 0x00ff0100ff01ff01, 0x00ff010000ff00ff, 0x00ff010000ff0101, + 0x00ff010000000000, 0x00ff010000010101, 0x00ff01000100ff00, 0x00ff010001010000, + 0x00ff0101ffffff00, 0x00ff01010000ff01, 0x00ff010100000100, 0x00ff010101ff0000, + 0x0000ffffffff0100, 0x0000ffffff00ff00, 0x0000ffffff0000ff, 0x0000ffffff010000, + 0x0000ffff00000000, 0x0000ffff00010101, 0x0000ffff01ffff01, 0x0000ffff01000100, + 0x0000ff00ff000000, 0x0000ff00ff01ff00, 0x0000ff00ff0101ff, 0x0000ff0000ff0000, + 0x0000ff000000ff00, 0x0000ff00000000ff, 0x0000ff0000000000, 0x0000ff0000000001, + 0x0000ff0000000100, 0x0000ff0000010000, 0x0000ff0001ffffff, 0x0000ff0001ff01ff, + 0x0000ff0001000000, 0x0000ff000101ffff, 0x0000ff01ffff0101, 0x0000ff01ff010000, + 0x0000ff0100000000, 0x0000ff0101000101, 0x000000ffffff0001, 0x000000ffff000000, + 0x000000ff00ff0000, 0x000000ff0000ff00, 0x000000ff000000ff, 0x000000ff00000000, + 0x000000ff00000001, 0x000000ff00000100, 0x000000ff00010000, 0x000000ff01000000, + 0x000000ff0101ff00, 0x00000000ffff0000, 0x00000000ff00ff00, 0x00000000ff0000ff, + 0x00000000ff000000, 0x00000000ff000001, 0x00000000ff000100, 0x00000000ff010000, + 0x0000000000ffff00, 0x0000000000ff00ff, 0x0000000000ff0000, 0x0000000000ff0001, + 0x0000000000ff0100, 0x000000000000ffff, 0x000000000000ff00, 0x000000000000ff01, + 0x00000000000000ff, 0x0000000000000001, 0x00000000000001ff, 0x0000000000000100, + 0x0000000000000101, 0x000000000001ff00, 0x00000000000100ff, 0x0000000000010000, + 0x0000000000010001, 0x0000000000010100, 0x0000000001ff0000, 0x000000000100ff00, + 0x00000000010000ff, 0x0000000001000000, 0x0000000001000001, 0x0000000001000100, + 0x0000000001010000, 0x00000001ffff01ff, 0x00000001ff000000, 0x0000000100ff0000, + 0x000000010000ff00, 0x00000001000000ff, 0x0000000100000000, 0x0000000100000001, + 0x0000000100000100, 0x0000000100010000, 0x0000000101000000, 0x000001ffff00ff00, + 0x000001ffff010001, 0x000001ffff0101ff, 0x000001ff00ffff01, 0x000001ff0000ffff, + 0x000001ff00000000, 0x000001ff010000ff, 0x000001ff01010100, 0x00000100ffff0100, + 0x00000100ff000000, 0x0000010000ff0000, 0x000001000000ff00, 0x00000100000000ff, + 0x0000010000000000, 0x0000010000000001, 0x0000010000000100, 0x0000010000010000, + 0x0000010001000000, 0x000001000101ff01, 0x00000101ffff0001, 0x00000101ff01ffff, + 0x0000010100000000, 0x0000010101010100, 0x0001ffffff000000, 0x0001ffff00ffffff, + 0x0001ffff00000100, 0x0001ffff0001ff00, 0x0001ffff01000000, 0x0001ff00ffffff00, + 0x0001ff00ffff01ff, 0x0001ff00ff010000, 0x0001ff0000000000, 0x0001ff0000010001, + 0x0001ff0001ff0000, 0x0001ff0001010100, 0x0001ff01ff0000ff, 0x0001ff01ff000001, + 0x0001ff0100ffffff, 0x0001ff010001ffff, 0x0001ff01000101ff, 0x0001ff010100ff01, + 0x000100ffff00ffff, 0x000100ffff00ff01, 0x000100ffff000100, 0x000100ff00000000, + 0x000100ff000101ff, 0x000100ff01ff0101, 0x000100ff0100ffff, 0x000100ff01010101, + 0x00010000ff000000, 0x00010000ff010100, 0x0001000000ff0000, 0x000100000000ff00, + 0x00010000000000ff, 0x0001000000000000, 0x0001000000000001, 0x0001000000000100, + 0x0001000000010000, 0x0001000001ffff01, 0x0001000001000000, 0x0001000100ff0101, + 0x0001000100000000, 0x00010001010100ff, 0x000101ffffff01ff, 0x000101ffffff0101, + 0x000101ff00010000, 0x000101ff01ff0000, 0x000101ff0100ff01, 0x00010100ffff0000, + 0x0001010000000000, 0x000101000001ffff, 0x0001010000010101, 0x00010100010001ff, + 0x00010101ff00ff00, 0x00010101ff010001, 0x0001010100ffffff, 0x0001010100ff01ff, + 0x00010101000101ff, 0x0001010101ff0000, 0x000101010100ff01, 0x0001010101000101, + 0x01ffffffffff0101, 0x01ffffffff01ffff, 0x01ffffffff01ff01, 0x01ffffffff0101ff, + 0x01ffffffff010101, 0x01ffffff00000000, 0x01ffffff01ff01ff, 0x01ffffff01000101, + 0x01ffffff0101ff01, 0x01ffffff010100ff, 0x01ffff000000ff00, 0x01ffff0000000001, + 0x01ffff00000001ff, 0x01ffff0000010000, 0x01ffff0001ff0000, 0x01ffff01ffffffff, + 0x01ffff01ffff01ff, 0x01ffff01ff000000, 0x01ffff01ff01ffff, 0x01ffff01ff0101ff, + 0x01ffff010100ffff, 0x01ff00ffffff0000, 0x01ff00ffff010000, 0x01ff00ff00ffff01, + 0x01ff0000ff0000ff, 0x01ff000000000000, 0x01ff00000001ff01, 0x01ff000001ffffff, + 0x01ff000001010100, 0x01ff0001ffffff01, 0x01ff0001ff010001, 0x01ff000101ff0100, + 0x01ff000101000001, 0x01ff0001010100ff, 0x01ff01ffff00ffff, 0x01ff01ff00010001, + 0x01ff01ff01000000, 0x01ff01ff010101ff, 0x01ff0100ff000001, 0x01ff010000ffff00, + 0x01ff010000000100, 0x01ff010001ff01ff, 0x01ff01000101ffff, 0x01ff0101ffff00ff, + 0x01ff0101ffff0101, 0x01ff0101ff0101ff, 0x01ff010100010000, 0x0100ffff00ff00ff, + 0x0100ffff00ff0001, 0x0100ffff00000100, 0x0100ffff0100ff00, 0x0100ff00ffff0000, + 0x0100ff00ff00ffff, 0x0100ff00ff00ff01, 0x0100ff00ff000100, 0x0100ff00ff010000, + 0x0100ff0000000000, 0x0100ff00000100ff, 0x0100ff0001ff0101, 0x0100ff0001010101, + 0x0100ff0100ff00ff, 0x0100ff0100ff0001, 0x0100ff0100000100, 0x0100ff0100010001, + 0x0100ff0101000000, 0x010000ffff00ff00, 0x010000ff0000ffff, 0x010000ff00000000, + 0x010000ff010001ff, 0x010000ff01010001, 0x01000000ffffff00, 0x01000000ffff0101, + 0x01000000ff000000, 0x01000000ff0100ff, 0x01000000ff010101, 0x0100000000ff0000, + 0x010000000000ff00, 0x01000000000000ff, 0x0100000000000000, 0x0100000000000001, + 0x0100000000000100, 0x0100000000010000, 0x0100000001000000, 0x0100000100000000, + 0x01000001000101ff, 0x0100000101ffff01, 0x010001ffff000101, 0x010001ff00ff0100, + 0x010001ff0000ff00, 0x010001ff000100ff, 0x010001ff01ffffff, 0x01000100ffff0000, + 0x01000100ff0001ff, 0x0100010000000000, 0x010001000001ff00, 0x0100010001ff0000, + 0x01000100010000ff, 0x0100010001000101, 0x01000101ff00ff01, 0x0100010100ff0100, + 0x010001010000ffff, 0x0100010101010001, 0x0101ffffffff0101, 0x0101ffffff0001ff, + 0x0101ffffff01ffff, 0x0101ffffff010101, 0x0101ffff00000000, 0x0101ffff0101ffff, + 0x0101ffff010101ff, 0x0101ff00ff000000, 0x0101ff0000ff0100, 0x0101ff000000ff00, + 0x0101ff0000010000, 0x0101ff00010000ff, 0x0101ff0001000001, 0x0101ff01ff010101, + 0x0101ff0100000000, 0x0101ff010101ff00, 0x010100ffffff0000, 0x010100ffff010000, + 0x010100ff00ff01ff, 0x010100ff000000ff, 0x010100ff00000101, 0x010100ff01ffff00, + 0x01010000ffffff01, 0x01010000ff000100, 0x01010000ff01ff01, 0x0101000000000000, + 0x01010000000100ff, 0x010100000101ff01, 0x01010001ffff0000, 0x01010001ff00ffff, + 0x01010001ff010000, 0x0101000101ffffff, 0x0101000101ff01ff, 0x0101000101010101, + 0x010101ffff01ffff, 0x010101ff00000000, 0x010101ff0001ff01, 0x010101ff0101ffff, + 0x010101ff010101ff, 0x01010100ffffffff, 0x01010100ff000001, 0x010101000000ff00, + 0x0101010001010000, 0x0101010100ff0001, 0x010101010001ff01, 0x010101010101ffff, +}; + static const __device__ uint8_t ksigns_iq2xs[128] = { 0, 129, 130, 3, 132, 5, 6, 135, 136, 9, 10, 139, 12, 141, 142, 15, 144, 17, 18, 147, 20, 149, 150, 23, 24, 153, 154, 27, 156, 29, 30, 159, @@ -1823,6 +1963,29 @@ static __global__ void dequantize_block_iq3_xxs(const void * __restrict__ vx, ds } +template +static __global__ void dequantize_block_iq1_s(const void * __restrict__ vx, dst_t * __restrict__ yy) { + + const int i = blockIdx.x; + const block_iq1_s * x = (const block_iq1_s *) vx; + + const int tid = threadIdx.x; +#if QK_K == 256 + const int il = tid/8; // 0...3 + const int ib = tid%8; // 0...7 + dst_t * y = yy + i*QK_K + 32*ib + 8*il; + const int i8 = 4*ib+il; + uint8_t h = x[i].scales[i8/2] >> 4*(i8%2); + const int8_t * grid = (const int8_t *)(iq1s_grid + (x[i].qs[i8] | ((h & 8) << 5))); + const float d = (float)x[i].d * (2*(h & 7) + 1); + for (int j = 0; j < 8; ++j) y[j] = d * grid[j]; +#else + assert(false); +#endif + +} + + static __global__ void dequantize_mul_mat_vec_q2_k(const void * __restrict__ vx, const float * __restrict__ yy, float * __restrict__ dst, const int ncols, int nrows) { static_assert(16%K_QUANTS_PER_ITERATION == 0, "16 must be divisible by K_QUANTS_PER_ITERATION"); @@ -4522,6 +4685,49 @@ static __device__ __forceinline__ float vec_dot_iq3_xxs_q8_1( #endif } +static __device__ __forceinline__ float vec_dot_iq1_s_q8_1( + const void * __restrict__ vbq, const block_q8_1 * __restrict__ bq8_1, const int & iqs) { +#if QK_K == 256 + const block_iq1_s * bq1 = (const block_iq1_s *) vbq; + + const int ib32 = iqs; + int sumi1 = 0, sumi2 = 0, sumi3 = 0, sumi4 = 0; + const uint8_t h1 = bq1->scales[2*ib32+0]; + const uint8_t h2 = bq1->scales[2*ib32+1]; +#if __CUDA_ARCH__ >= MIN_CC_DP4A // lowest compute capability for integer intrinsics + const int * q8 = (const int *)bq8_1[ib32].qs; + const int * grid1 = (const int *)(iq1s_grid + (bq1->qs[4*ib32+0] | ((h1 & 0x08) << 5))); + const int * grid2 = (const int *)(iq1s_grid + (bq1->qs[4*ib32+1] | ((h1 & 0x80) << 1))); + const int * grid3 = (const int *)(iq1s_grid + (bq1->qs[4*ib32+2] | ((h2 & 0x08) << 5))); + const int * grid4 = (const int *)(iq1s_grid + (bq1->qs[4*ib32+3] | ((h2 & 0x80) << 1))); + for (int j = 0; j < 2; ++j) { + sumi1 = __dp4a(q8[j+0], grid1[j], sumi1); + sumi2 = __dp4a(q8[j+2], grid2[j], sumi2); + sumi3 = __dp4a(q8[j+4], grid3[j], sumi3); + sumi4 = __dp4a(q8[j+6], grid4[j], sumi4); + } +#else + const int8_t * q8 = bq8_1[ib32].qs; + const int8_t * grid1 = (const int8_t *)(iq1s_grid + (bq1->qs[4*ib32+0] | ((h1 & 0x08) << 5))); + const int8_t * grid2 = (const int8_t *)(iq1s_grid + (bq1->qs[4*ib32+1] | ((h1 & 0x80) << 1))); + const int8_t * grid3 = (const int8_t *)(iq1s_grid + (bq1->qs[4*ib32+2] | ((h2 & 0x08) << 5))); + const int8_t * grid4 = (const int8_t *)(iq1s_grid + (bq1->qs[4*ib32+3] | ((h2 & 0x80) << 1))); + for (int j = 0; j < 8; ++j) { + sumi1 += q8[j+ 0] * grid1[j]; + sumi2 += q8[j+ 8] * grid2[j]; + sumi3 += q8[j+16] * grid3[j]; + sumi4 += q8[j+24] * grid4[j]; + } +#endif + const float d = (float)bq1->d * __low2float(bq8_1[ib32].ds); + return d * (sumi1 * (2*(h1 & 7) + 1) + sumi2 * (2*((h1 >> 4) & 7) + 1) + + sumi3 * (2*(h2 & 7) + 1) + sumi4 * (2*((h2 >> 4) & 7) + 1)); +#else + assert(false); + return 0.f; +#endif +} + template static __device__ __forceinline__ void mul_mat_q( @@ -6561,6 +6767,12 @@ static void dequantize_row_iq3_xxs_cuda(const void * vx, dst_t * y, const int k, dequantize_block_iq3_xxs<<>>(vx, y); } +template +static void dequantize_row_iq1_s_cuda(const void * vx, dst_t * y, const int k, cudaStream_t stream) { + const int nb = k / QK_K; + dequantize_block_iq1_s<<>>(vx, y); +} + template static void convert_unary_cuda(const void * __restrict__ vx, dst_t * __restrict__ y, const int k, cudaStream_t stream) { const int num_blocks = (k + CUDA_DEQUANTIZE_BLOCK_SIZE - 1) / CUDA_DEQUANTIZE_BLOCK_SIZE; @@ -6600,6 +6812,8 @@ static to_fp16_cuda_t ggml_get_to_fp16_cuda(ggml_type type) { return dequantize_row_iq2_xs_cuda; case GGML_TYPE_IQ3_XXS: return dequantize_row_iq3_xxs_cuda; + case GGML_TYPE_IQ1_S: + return dequantize_row_iq1_s_cuda; case GGML_TYPE_F32: return convert_unary_cuda; default: @@ -6635,6 +6849,8 @@ static to_fp32_cuda_t ggml_get_to_fp32_cuda(ggml_type type) { return dequantize_row_iq2_xs_cuda; case GGML_TYPE_IQ3_XXS: return dequantize_row_iq3_xxs_cuda; + case GGML_TYPE_IQ1_S: + return dequantize_row_iq1_s_cuda; case GGML_TYPE_F16: return convert_unary_cuda; default: @@ -8378,6 +8594,7 @@ static int64_t get_row_rounding(ggml_type type, const std::array= CC_RDNA2 ? 128 : 64; default: GGML_ASSERT(false); @@ -8401,6 +8618,7 @@ static int64_t get_row_rounding(ggml_type type, const std::array= CC_VOLTA ? 128 : 64; case GGML_TYPE_Q6_K: return 64; @@ -8498,6 +8716,10 @@ static void ggml_cuda_op_mul_mat_vec_q( mul_mat_vec_q_cuda (src0_dd_i, src1_ddq_i, dst_dd_i, ne00, row_diff, src1_padded_row_size, src1_ncols, nrows_dst, stream); break; + case GGML_TYPE_IQ1_S: + mul_mat_vec_q_cuda + (src0_dd_i, src1_ddq_i, dst_dd_i, ne00, row_diff, src1_padded_row_size, src1_ncols, nrows_dst, stream); + break; default: GGML_ASSERT(false); break; @@ -11214,7 +11436,7 @@ GGML_CALL static bool ggml_backend_cuda_supports_op(ggml_backend_t backend, cons return false; } ggml_type a_type = a->type; - if (a_type == GGML_TYPE_IQ2_XXS || a_type == GGML_TYPE_IQ2_XS || a_type == GGML_TYPE_IQ3_XXS) { + if (a_type == GGML_TYPE_IQ2_XXS || a_type == GGML_TYPE_IQ2_XS || a_type == GGML_TYPE_IQ3_XXS || a_type == GGML_TYPE_IQ1_S) { if (b->ne[1] == 1 && ggml_nrows(b) > 1) { return false; } diff --git a/ggml-metal.m b/ggml-metal.m index c0848a293..f3c1fff8f 100644 --- a/ggml-metal.m +++ b/ggml-metal.m @@ -61,6 +61,7 @@ enum ggml_metal_kernel_type { GGML_METAL_KERNEL_TYPE_GET_ROWS_IQ2_XXS, GGML_METAL_KERNEL_TYPE_GET_ROWS_IQ2_XS, GGML_METAL_KERNEL_TYPE_GET_ROWS_IQ3_XXS, + GGML_METAL_KERNEL_TYPE_GET_ROWS_IQ1_S, GGML_METAL_KERNEL_TYPE_GET_ROWS_I32, GGML_METAL_KERNEL_TYPE_RMS_NORM, GGML_METAL_KERNEL_TYPE_GROUP_NORM, @@ -83,6 +84,7 @@ enum ggml_metal_kernel_type { GGML_METAL_KERNEL_TYPE_MUL_MV_IQ2_XXS_F32, GGML_METAL_KERNEL_TYPE_MUL_MV_IQ2_XS_F32, GGML_METAL_KERNEL_TYPE_MUL_MV_IQ3_XXS_F32, + GGML_METAL_KERNEL_TYPE_MUL_MV_IQ1_S_F32, GGML_METAL_KERNEL_TYPE_MUL_MV_ID_F32_F32, //GGML_METAL_KERNEL_TYPE_MUL_MV_ID_F16_F16, GGML_METAL_KERNEL_TYPE_MUL_MV_ID_F16_F32, @@ -101,6 +103,7 @@ enum ggml_metal_kernel_type { GGML_METAL_KERNEL_TYPE_MUL_MV_ID_IQ2_XXS_F32, GGML_METAL_KERNEL_TYPE_MUL_MV_ID_IQ2_XS_F32, GGML_METAL_KERNEL_TYPE_MUL_MV_ID_IQ3_XXS_F32, + GGML_METAL_KERNEL_TYPE_MUL_MV_ID_IQ1_S_F32, GGML_METAL_KERNEL_TYPE_MUL_MM_F32_F32, GGML_METAL_KERNEL_TYPE_MUL_MM_F16_F32, GGML_METAL_KERNEL_TYPE_MUL_MM_Q4_0_F32, @@ -116,6 +119,7 @@ enum ggml_metal_kernel_type { GGML_METAL_KERNEL_TYPE_MUL_MM_IQ2_XXS_F32, GGML_METAL_KERNEL_TYPE_MUL_MM_IQ2_XS_F32, GGML_METAL_KERNEL_TYPE_MUL_MM_IQ3_XXS_F32, + GGML_METAL_KERNEL_TYPE_MUL_MM_IQ1_S_F32, GGML_METAL_KERNEL_TYPE_MUL_MM_ID_F32_F32, GGML_METAL_KERNEL_TYPE_MUL_MM_ID_F16_F32, GGML_METAL_KERNEL_TYPE_MUL_MM_ID_Q4_0_F32, @@ -131,6 +135,7 @@ enum ggml_metal_kernel_type { GGML_METAL_KERNEL_TYPE_MUL_MM_ID_IQ2_XXS_F32, GGML_METAL_KERNEL_TYPE_MUL_MM_ID_IQ2_XS_F32, GGML_METAL_KERNEL_TYPE_MUL_MM_ID_IQ3_XXS_F32, + GGML_METAL_KERNEL_TYPE_MUL_MM_ID_IQ1_S_F32, GGML_METAL_KERNEL_TYPE_ROPE_F32, GGML_METAL_KERNEL_TYPE_ROPE_F16, GGML_METAL_KERNEL_TYPE_ALIBI_F32, @@ -433,6 +438,7 @@ static struct ggml_metal_context * ggml_metal_init(int n_cb) { GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_GET_ROWS_IQ2_XXS, get_rows_iq2_xxs, true); GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_GET_ROWS_IQ2_XS, get_rows_iq2_xs, true); GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_GET_ROWS_IQ3_XXS, get_rows_iq3_xxs, true); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_GET_ROWS_IQ1_S, get_rows_iq1_s, true); GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_GET_ROWS_I32, get_rows_i32, true); GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_RMS_NORM, rms_norm, ctx->support_simdgroup_reduction); GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_GROUP_NORM, group_norm, ctx->support_simdgroup_reduction); @@ -455,6 +461,7 @@ static struct ggml_metal_context * ggml_metal_init(int n_cb) { GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MV_IQ2_XXS_F32, mul_mv_iq2_xxs_f32, ctx->support_simdgroup_reduction); GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MV_IQ2_XS_F32, mul_mv_iq2_xs_f32, ctx->support_simdgroup_reduction); GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MV_IQ3_XXS_F32, mul_mv_iq3_xxs_f32, ctx->support_simdgroup_reduction); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MV_IQ1_S_F32, mul_mv_iq1_s_f32, ctx->support_simdgroup_reduction); GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MV_ID_F32_F32, mul_mv_id_f32_f32, ctx->support_simdgroup_reduction); //GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MV_ID_F16_F16, mul_mv_id_f16_f16, ctx->support_simdgroup_reduction); GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MV_ID_F16_F32, mul_mv_id_f16_f32, ctx->support_simdgroup_reduction); @@ -473,6 +480,7 @@ static struct ggml_metal_context * ggml_metal_init(int n_cb) { GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MV_ID_IQ2_XXS_F32, mul_mv_id_iq2_xxs_f32, ctx->support_simdgroup_reduction); GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MV_ID_IQ2_XS_F32, mul_mv_id_iq2_xs_f32, ctx->support_simdgroup_reduction); GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MV_ID_IQ3_XXS_F32, mul_mv_id_iq3_xxs_f32, ctx->support_simdgroup_reduction); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MV_ID_IQ1_S_F32, mul_mv_id_iq1_s_f32, ctx->support_simdgroup_reduction); GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MM_F32_F32, mul_mm_f32_f32, ctx->support_simdgroup_mm); GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MM_F16_F32, mul_mm_f16_f32, ctx->support_simdgroup_mm); GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MM_Q4_0_F32, mul_mm_q4_0_f32, ctx->support_simdgroup_mm); @@ -488,6 +496,7 @@ static struct ggml_metal_context * ggml_metal_init(int n_cb) { GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MM_IQ2_XXS_F32, mul_mm_iq2_xxs_f32, ctx->support_simdgroup_mm); GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MM_IQ2_XS_F32, mul_mm_iq2_xs_f32, ctx->support_simdgroup_mm); GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MM_IQ3_XXS_F32, mul_mm_iq3_xxs_f32, ctx->support_simdgroup_mm); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MM_IQ1_S_F32, mul_mm_iq1_s_f32, ctx->support_simdgroup_mm); GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MM_ID_F32_F32, mul_mm_id_f32_f32, ctx->support_simdgroup_mm); GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MM_ID_F16_F32, mul_mm_id_f16_f32, ctx->support_simdgroup_mm); GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MM_ID_Q4_0_F32, mul_mm_id_q4_0_f32, ctx->support_simdgroup_mm); @@ -503,6 +512,7 @@ static struct ggml_metal_context * ggml_metal_init(int n_cb) { GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MM_ID_IQ2_XXS_F32, mul_mm_id_iq2_xxs_f32, ctx->support_simdgroup_mm); GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MM_ID_IQ2_XS_F32, mul_mm_id_iq2_xs_f32, ctx->support_simdgroup_mm); GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MM_ID_IQ3_XXS_F32, mul_mm_id_iq3_xxs_f32, ctx->support_simdgroup_mm); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MM_ID_IQ1_S_F32, mul_mm_id_iq1_s_f32, ctx->support_simdgroup_mm); GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_ROPE_F32, rope_f32, true); GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_ROPE_F16, rope_f16, true); GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_ALIBI_F32, alibi_f32, true); @@ -1318,6 +1328,7 @@ static bool ggml_metal_graph_compute( case GGML_TYPE_IQ2_XXS: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MM_IQ2_XXS_F32].pipeline; break; case GGML_TYPE_IQ2_XS: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MM_IQ2_XS_F32 ].pipeline; break; case GGML_TYPE_IQ3_XXS: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MM_IQ3_XXS_F32].pipeline; break; + case GGML_TYPE_IQ1_S: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MM_IQ1_S_F32 ].pipeline; break; default: GGML_ASSERT(false && "MUL MAT-MAT not implemented"); } @@ -1452,6 +1463,12 @@ static bool ggml_metal_graph_compute( nth1 = 16; pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MV_IQ3_XXS_F32].pipeline; } break; + case GGML_TYPE_IQ1_S: + { + nth0 = 4; + nth1 = 16; + pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MV_IQ1_S_F32].pipeline; + } break; default: { GGML_METAL_LOG_ERROR("Asserting on type %d\n", (int)src0t); @@ -1486,7 +1503,7 @@ static bool ggml_metal_graph_compute( if (src0t == GGML_TYPE_Q4_0 || src0t == GGML_TYPE_Q4_1 || src0t == GGML_TYPE_Q5_0 || src0t == GGML_TYPE_Q5_1 || src0t == GGML_TYPE_Q8_0 || - src0t == GGML_TYPE_Q2_K) { // || src0t == GGML_TYPE_Q4_K) { + src0t == GGML_TYPE_Q2_K || src0t == GGML_TYPE_IQ1_S) { // || src0t == GGML_TYPE_Q4_K) { [encoder dispatchThreadgroups:MTLSizeMake((ne01 + 7)/8, ne11, ne12*ne13) threadsPerThreadgroup:MTLSizeMake(nth0, nth1, 1)]; } else if (src0t == GGML_TYPE_IQ2_XXS || src0t == GGML_TYPE_IQ2_XS) { @@ -1592,6 +1609,7 @@ static bool ggml_metal_graph_compute( case GGML_TYPE_IQ2_XXS: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MM_ID_IQ2_XXS_F32].pipeline; break; case GGML_TYPE_IQ2_XS: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MM_ID_IQ2_XS_F32 ].pipeline; break; case GGML_TYPE_IQ3_XXS: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MM_ID_IQ3_XXS_F32].pipeline; break; + case GGML_TYPE_IQ1_S: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MM_ID_IQ1_S_F32 ].pipeline; break; default: GGML_ASSERT(false && "MUL_MAT_ID not implemented"); } @@ -1729,6 +1747,12 @@ static bool ggml_metal_graph_compute( nth1 = 16; pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MV_ID_IQ3_XXS_F32].pipeline; } break; + case GGML_TYPE_IQ1_S: + { + nth0 = 4; + nth1 = 16; + pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MV_ID_IQ1_S_F32].pipeline; + } break; default: { GGML_METAL_LOG_ERROR("Asserting on type %d\n", (int)src2t); @@ -1779,7 +1803,7 @@ static bool ggml_metal_graph_compute( if (src2t == GGML_TYPE_Q4_0 || src2t == GGML_TYPE_Q4_1 || src2t == GGML_TYPE_Q5_0 || src2t == GGML_TYPE_Q5_1 || src2t == GGML_TYPE_Q8_0 || - src2t == GGML_TYPE_Q2_K) { // || src2t == GGML_TYPE_Q4_K) { + src2t == GGML_TYPE_Q2_K || src2t == GGML_TYPE_IQ1_S) { // || src2t == GGML_TYPE_Q4_K) { [encoder dispatchThreadgroups:MTLSizeMake((ne21 + 7)/8, _ne1, ne01*ne12*ne13) threadsPerThreadgroup:MTLSizeMake(nth0, nth1, 1)]; } else if (src2t == GGML_TYPE_IQ2_XXS || src2t == GGML_TYPE_IQ2_XS) { @@ -1833,6 +1857,7 @@ static bool ggml_metal_graph_compute( case GGML_TYPE_IQ2_XXS: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_GET_ROWS_IQ2_XXS].pipeline; break; case GGML_TYPE_IQ2_XS: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_GET_ROWS_IQ2_XS ].pipeline; break; case GGML_TYPE_IQ3_XXS: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_GET_ROWS_IQ3_XXS].pipeline; break; + case GGML_TYPE_IQ1_S: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_GET_ROWS_IQ1_S ].pipeline; break; case GGML_TYPE_I32: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_GET_ROWS_I32 ].pipeline; break; default: GGML_ASSERT(false && "not implemented"); } diff --git a/ggml-metal.metal b/ggml-metal.metal index 09ebcc9e3..a00962111 100644 --- a/ggml-metal.metal +++ b/ggml-metal.metal @@ -2525,6 +2525,13 @@ typedef struct { } block_iq3_xxs; // 98 bytes / block for QK_K = 256, so 3.0625 bpw +typedef struct { + half d; + uint8_t qs[QK_K/8]; + uint8_t scales[QK_K/16]; +} block_iq1_s; + + //====================================== dot products ========================= void kernel_mul_mv_q2_K_f32_impl( @@ -3782,6 +3789,137 @@ constexpr constant static uint32_t iq3xxs_grid[256] = { 0x3e1c1c1c, 0x3e1c3404, 0x3e24140c, 0x3e24240c, 0x3e2c0404, 0x3e2c0414, 0x3e2c1424, 0x3e341c04, }; +#define NGRID_IQ1S 512 +constexpr constant static uint64_t iq1s_grid[NGRID_IQ1S] = { + 0xffffffffffff0101, 0xffffffffff01ff00, 0xffffffffff010100, 0xffffffff00000000, + 0xffffffff01ff00ff, 0xffffffff01ff0001, 0xffffffff0101ffff, 0xffffffff0101ff01, + 0xffffff00ff000000, 0xffffff000000ff00, 0xffffff00000000ff, 0xffffff0000000100, + 0xffffff0000010000, 0xffffff0001000000, 0xffffff01ffff00ff, 0xffffff01ff01ff00, + 0xffffff01ff010100, 0xffffff0100000001, 0xffffff0101ffff00, 0xffffff0101ff0101, + 0xffffff0101010100, 0xffff00ffff00ff01, 0xffff00ffff0000ff, 0xffff00ff00ff0100, + 0xffff00ff0100ff00, 0xffff00ff010001ff, 0xffff0000ff0101ff, 0xffff000000ffff00, + 0xffff000000000000, 0xffff00000001ff01, 0xffff000001000101, 0xffff0000010100ff, + 0xffff0001ffff0100, 0xffff00010000ff00, 0xffff000100010101, 0xffff000101000000, + 0xffff01ffffff0000, 0xffff01ffff01ffff, 0xffff01ffff010100, 0xffff01ff00000000, + 0xffff01ff01ffffff, 0xffff01ff01ff0001, 0xffff01ff0101ffff, 0xffff01ff01010001, + 0xffff0100ffffff01, 0xffff01000000ffff, 0xffff010000000100, 0xffff010001ff01ff, + 0xffff010001000000, 0xffff0101ff000000, 0xffff0101000101ff, 0xffff010101ffff01, + 0xffff01010101ff00, 0xff00ffffff000000, 0xff00ffff00ffff00, 0xff00ffff00000001, + 0xff00ffff000001ff, 0xff00ffff01010000, 0xff00ff00ffff0000, 0xff00ff00ff00ff00, + 0xff00ff00ff0000ff, 0xff00ff00ff000100, 0xff00ff00ff010001, 0xff00ff0000ff0001, + 0xff00ff000000ffff, 0xff00ff0000000000, 0xff00ff000001ff00, 0xff00ff0000010100, + 0xff00ff0001ff0000, 0xff00ff000100ff00, 0xff00ff0001000100, 0xff00ff01ff000000, + 0xff00ff0100ff0000, 0xff00ff01000001ff, 0xff00ff0101010001, 0xff0000ff00000000, + 0xff0000ff0001ff00, 0xff0000ff00010100, 0xff000000ffff0101, 0xff000000ff000000, + 0xff000000ff01ff00, 0xff00000000ff0000, 0xff0000000000ff00, 0xff000000000000ff, + 0xff00000000000000, 0xff00000000000001, 0xff00000000000100, 0xff0000000001ffff, + 0xff00000000010000, 0xff00000001000000, 0xff00000001010100, 0xff000001ff00ff01, + 0xff000001ff0100ff, 0xff00000100000000, 0xff0000010001ff00, 0xff00000101ff0100, + 0xff0000010100ff00, 0xff0001ff00ff00ff, 0xff0001ff00000101, 0xff0001ff000100ff, + 0xff0001ff01000000, 0xff000100ff0001ff, 0xff0001000000ff01, 0xff00010000000000, + 0xff00010000010001, 0xff00010000010100, 0xff00010001ffff00, 0xff00010001ff0101, + 0xff00010001010000, 0xff000101ffffffff, 0xff000101ff000101, 0xff00010101ff00ff, + 0xff00010101000001, 0xff000101010100ff, 0xff01ffffff000101, 0xff01ffffff01ffff, + 0xff01ffffff01ff01, 0xff01ffffff0101ff, 0xff01ffff00000000, 0xff01ffff01ff0001, + 0xff01ffff0101ff01, 0xff01ff00ff000000, 0xff01ff0000ff0100, 0xff01ff000000ff01, + 0xff01ff0000010000, 0xff01ff00010000ff, 0xff01ff01ff01ff00, 0xff01ff0100000101, + 0xff0100ffffff0000, 0xff0100ffff010000, 0xff0100ff01ff00ff, 0xff0100ff01000100, + 0xff0100ff010100ff, 0xff010000ffffff01, 0xff01000000000000, 0xff0100000101ff00, + 0xff010001ffff00ff, 0xff010001ff000100, 0xff01000100ffff00, 0xff01000100010001, + 0xff01000101ff0001, 0xff010001010001ff, 0xff0101ffffffffff, 0xff0101ffff01ffff, + 0xff0101ffff010101, 0xff0101ff0000ff00, 0xff0101ff01010001, 0xff010100ff000000, + 0xff010100ff01ff01, 0xff01010000ff0001, 0xff01010000000100, 0xff01010001000000, + 0xff0101010100ffff, 0x00ffffff0000ff01, 0x00ffffff000000ff, 0x00ffffff00000100, + 0x00ffffff00010000, 0x00ffff00ffff0001, 0x00ffff00ff0000ff, 0x00ffff00ff000100, + 0x00ffff0000000000, 0x00ffff0001000100, 0x00ffff0001010001, 0x00ffff01ff00ff01, + 0x00ffff0100ff0100, 0x00ffff010000ff00, 0x00ffff01000100ff, 0x00ffff0101ff00ff, + 0x00ffff010101ff00, 0x00ff00ffffffffff, 0x00ff00ffffff01ff, 0x00ff00ffff000101, + 0x00ff00ff00000000, 0x00ff00ff000101ff, 0x00ff00ff01010101, 0x00ff0000ff000000, + 0x00ff0000ff01ffff, 0x00ff000000ff0000, 0x00ff00000000ff00, 0x00ff0000000000ff, + 0x00ff000000000000, 0x00ff000000000001, 0x00ff000000000100, 0x00ff000000010000, + 0x00ff000001ffff01, 0x00ff000001000000, 0x00ff0001ff000101, 0x00ff000100ffffff, + 0x00ff000100000000, 0x00ff0001010001ff, 0x00ff01ffff000000, 0x00ff01ff0001ff00, + 0x00ff01ff01ff0100, 0x00ff0100ff01ff01, 0x00ff010000ff00ff, 0x00ff010000ff0101, + 0x00ff010000000000, 0x00ff010000010101, 0x00ff01000100ff00, 0x00ff010001010000, + 0x00ff0101ffffff00, 0x00ff01010000ff01, 0x00ff010100000100, 0x00ff010101ff0000, + 0x0000ffffffff0100, 0x0000ffffff00ff00, 0x0000ffffff0000ff, 0x0000ffffff010000, + 0x0000ffff00000000, 0x0000ffff00010101, 0x0000ffff01ffff01, 0x0000ffff01000100, + 0x0000ff00ff000000, 0x0000ff00ff01ff00, 0x0000ff00ff0101ff, 0x0000ff0000ff0000, + 0x0000ff000000ff00, 0x0000ff00000000ff, 0x0000ff0000000000, 0x0000ff0000000001, + 0x0000ff0000000100, 0x0000ff0000010000, 0x0000ff0001ffffff, 0x0000ff0001ff01ff, + 0x0000ff0001000000, 0x0000ff000101ffff, 0x0000ff01ffff0101, 0x0000ff01ff010000, + 0x0000ff0100000000, 0x0000ff0101000101, 0x000000ffffff0001, 0x000000ffff000000, + 0x000000ff00ff0000, 0x000000ff0000ff00, 0x000000ff000000ff, 0x000000ff00000000, + 0x000000ff00000001, 0x000000ff00000100, 0x000000ff00010000, 0x000000ff01000000, + 0x000000ff0101ff00, 0x00000000ffff0000, 0x00000000ff00ff00, 0x00000000ff0000ff, + 0x00000000ff000000, 0x00000000ff000001, 0x00000000ff000100, 0x00000000ff010000, + 0x0000000000ffff00, 0x0000000000ff00ff, 0x0000000000ff0000, 0x0000000000ff0001, + 0x0000000000ff0100, 0x000000000000ffff, 0x000000000000ff00, 0x000000000000ff01, + 0x00000000000000ff, 0x0000000000000001, 0x00000000000001ff, 0x0000000000000100, + 0x0000000000000101, 0x000000000001ff00, 0x00000000000100ff, 0x0000000000010000, + 0x0000000000010001, 0x0000000000010100, 0x0000000001ff0000, 0x000000000100ff00, + 0x00000000010000ff, 0x0000000001000000, 0x0000000001000001, 0x0000000001000100, + 0x0000000001010000, 0x00000001ffff01ff, 0x00000001ff000000, 0x0000000100ff0000, + 0x000000010000ff00, 0x00000001000000ff, 0x0000000100000000, 0x0000000100000001, + 0x0000000100000100, 0x0000000100010000, 0x0000000101000000, 0x000001ffff00ff00, + 0x000001ffff010001, 0x000001ffff0101ff, 0x000001ff00ffff01, 0x000001ff0000ffff, + 0x000001ff00000000, 0x000001ff010000ff, 0x000001ff01010100, 0x00000100ffff0100, + 0x00000100ff000000, 0x0000010000ff0000, 0x000001000000ff00, 0x00000100000000ff, + 0x0000010000000000, 0x0000010000000001, 0x0000010000000100, 0x0000010000010000, + 0x0000010001000000, 0x000001000101ff01, 0x00000101ffff0001, 0x00000101ff01ffff, + 0x0000010100000000, 0x0000010101010100, 0x0001ffffff000000, 0x0001ffff00ffffff, + 0x0001ffff00000100, 0x0001ffff0001ff00, 0x0001ffff01000000, 0x0001ff00ffffff00, + 0x0001ff00ffff01ff, 0x0001ff00ff010000, 0x0001ff0000000000, 0x0001ff0000010001, + 0x0001ff0001ff0000, 0x0001ff0001010100, 0x0001ff01ff0000ff, 0x0001ff01ff000001, + 0x0001ff0100ffffff, 0x0001ff010001ffff, 0x0001ff01000101ff, 0x0001ff010100ff01, + 0x000100ffff00ffff, 0x000100ffff00ff01, 0x000100ffff000100, 0x000100ff00000000, + 0x000100ff000101ff, 0x000100ff01ff0101, 0x000100ff0100ffff, 0x000100ff01010101, + 0x00010000ff000000, 0x00010000ff010100, 0x0001000000ff0000, 0x000100000000ff00, + 0x00010000000000ff, 0x0001000000000000, 0x0001000000000001, 0x0001000000000100, + 0x0001000000010000, 0x0001000001ffff01, 0x0001000001000000, 0x0001000100ff0101, + 0x0001000100000000, 0x00010001010100ff, 0x000101ffffff01ff, 0x000101ffffff0101, + 0x000101ff00010000, 0x000101ff01ff0000, 0x000101ff0100ff01, 0x00010100ffff0000, + 0x0001010000000000, 0x000101000001ffff, 0x0001010000010101, 0x00010100010001ff, + 0x00010101ff00ff00, 0x00010101ff010001, 0x0001010100ffffff, 0x0001010100ff01ff, + 0x00010101000101ff, 0x0001010101ff0000, 0x000101010100ff01, 0x0001010101000101, + 0x01ffffffffff0101, 0x01ffffffff01ffff, 0x01ffffffff01ff01, 0x01ffffffff0101ff, + 0x01ffffffff010101, 0x01ffffff00000000, 0x01ffffff01ff01ff, 0x01ffffff01000101, + 0x01ffffff0101ff01, 0x01ffffff010100ff, 0x01ffff000000ff00, 0x01ffff0000000001, + 0x01ffff00000001ff, 0x01ffff0000010000, 0x01ffff0001ff0000, 0x01ffff01ffffffff, + 0x01ffff01ffff01ff, 0x01ffff01ff000000, 0x01ffff01ff01ffff, 0x01ffff01ff0101ff, + 0x01ffff010100ffff, 0x01ff00ffffff0000, 0x01ff00ffff010000, 0x01ff00ff00ffff01, + 0x01ff0000ff0000ff, 0x01ff000000000000, 0x01ff00000001ff01, 0x01ff000001ffffff, + 0x01ff000001010100, 0x01ff0001ffffff01, 0x01ff0001ff010001, 0x01ff000101ff0100, + 0x01ff000101000001, 0x01ff0001010100ff, 0x01ff01ffff00ffff, 0x01ff01ff00010001, + 0x01ff01ff01000000, 0x01ff01ff010101ff, 0x01ff0100ff000001, 0x01ff010000ffff00, + 0x01ff010000000100, 0x01ff010001ff01ff, 0x01ff01000101ffff, 0x01ff0101ffff00ff, + 0x01ff0101ffff0101, 0x01ff0101ff0101ff, 0x01ff010100010000, 0x0100ffff00ff00ff, + 0x0100ffff00ff0001, 0x0100ffff00000100, 0x0100ffff0100ff00, 0x0100ff00ffff0000, + 0x0100ff00ff00ffff, 0x0100ff00ff00ff01, 0x0100ff00ff000100, 0x0100ff00ff010000, + 0x0100ff0000000000, 0x0100ff00000100ff, 0x0100ff0001ff0101, 0x0100ff0001010101, + 0x0100ff0100ff00ff, 0x0100ff0100ff0001, 0x0100ff0100000100, 0x0100ff0100010001, + 0x0100ff0101000000, 0x010000ffff00ff00, 0x010000ff0000ffff, 0x010000ff00000000, + 0x010000ff010001ff, 0x010000ff01010001, 0x01000000ffffff00, 0x01000000ffff0101, + 0x01000000ff000000, 0x01000000ff0100ff, 0x01000000ff010101, 0x0100000000ff0000, + 0x010000000000ff00, 0x01000000000000ff, 0x0100000000000000, 0x0100000000000001, + 0x0100000000000100, 0x0100000000010000, 0x0100000001000000, 0x0100000100000000, + 0x01000001000101ff, 0x0100000101ffff01, 0x010001ffff000101, 0x010001ff00ff0100, + 0x010001ff0000ff00, 0x010001ff000100ff, 0x010001ff01ffffff, 0x01000100ffff0000, + 0x01000100ff0001ff, 0x0100010000000000, 0x010001000001ff00, 0x0100010001ff0000, + 0x01000100010000ff, 0x0100010001000101, 0x01000101ff00ff01, 0x0100010100ff0100, + 0x010001010000ffff, 0x0100010101010001, 0x0101ffffffff0101, 0x0101ffffff0001ff, + 0x0101ffffff01ffff, 0x0101ffffff010101, 0x0101ffff00000000, 0x0101ffff0101ffff, + 0x0101ffff010101ff, 0x0101ff00ff000000, 0x0101ff0000ff0100, 0x0101ff000000ff00, + 0x0101ff0000010000, 0x0101ff00010000ff, 0x0101ff0001000001, 0x0101ff01ff010101, + 0x0101ff0100000000, 0x0101ff010101ff00, 0x010100ffffff0000, 0x010100ffff010000, + 0x010100ff00ff01ff, 0x010100ff000000ff, 0x010100ff00000101, 0x010100ff01ffff00, + 0x01010000ffffff01, 0x01010000ff000100, 0x01010000ff01ff01, 0x0101000000000000, + 0x01010000000100ff, 0x010100000101ff01, 0x01010001ffff0000, 0x01010001ff00ffff, + 0x01010001ff010000, 0x0101000101ffffff, 0x0101000101ff01ff, 0x0101000101010101, + 0x010101ffff01ffff, 0x010101ff00000000, 0x010101ff0001ff01, 0x010101ff0101ffff, + 0x010101ff010101ff, 0x01010100ffffffff, 0x01010100ff000001, 0x010101000000ff00, + 0x0101010001010000, 0x0101010100ff0001, 0x010101010001ff01, 0x010101010101ffff, +}; constexpr constant static uint8_t ksigns_iq2xs[128] = { 0, 129, 130, 3, 132, 5, 6, 135, 136, 9, 10, 139, 12, 141, 142, 15, @@ -4208,6 +4346,123 @@ kernel void kernel_mul_mv_iq3_xxs_f32( kernel_mul_mv_iq3_xxs_f32_impl(src0, src1, dst, ne00, ne01, ne02, ne10, ne12, ne0, ne1, r2, r3, shared_values, tgpig, tiisg, sgitg); } +void kernel_mul_mv_iq1_s_f32_impl( + device const void * src0, + device const float * src1, + device float * dst, + constant int64_t & ne00, + constant int64_t & ne01, + constant int64_t & ne02, + constant int64_t & ne10, + constant int64_t & ne12, + constant int64_t & ne0, + constant int64_t & ne1, + constant uint & r2, + constant uint & r3, + uint3 tgpig[[threadgroup_position_in_grid]], + uint tiisg[[thread_index_in_simdgroup]], + uint sgitg[[simdgroup_index_in_threadgroup]]) { + + const int nb = ne00/QK_K; + const int r0 = tgpig.x; + const int r1 = tgpig.y; + const int im = tgpig.z; + + const int first_row = (r0 * N_SIMDGROUP + sgitg) * N_DST; + const int ib_row = first_row * nb; + + const uint i12 = im%ne12; + const uint i13 = im/ne12; + + const uint offset0 = (i12/r2)*(nb*ne01) + (i13/r3)*(nb*ne01*ne02); + + device const block_iq1_s * x = (device const block_iq1_s *) src0 + ib_row + offset0; + device const float * y = (device const float *) src1 + r1*ne10 + im*ne00*ne1; + + float yl[16]; + float sumf[N_DST]={0.f}, all_sum; + + const int nb32 = nb * (QK_K / 32); + +#if QK_K == 256 + const int ix = tiisg/2; + const int il = tiisg%2; + + device const float * y4 = y + 32 * ix + 16 * il; + + for (int ib32 = ix; ib32 < nb32; ib32 += 16) { + + for (int i = 0; i < 16; ++i) { + yl[i] = y4[i]; + } + + const int ibl = ib32 / (QK_K / 32); + const int ib = ib32 % (QK_K / 32); + + device const block_iq1_s * xr = x + ibl; + device const uint8_t * qs = xr->qs + 4 * ib + 2 * il; + device const uint8_t * sc = xr->scales + 2 * ib + il; + device const half * dh = &xr->d; + + for (int row = 0; row < N_DST; row++) { + + constant int8_t * grid1 = (constant int8_t *)(iq1s_grid + (qs[0] | ((sc[0] & 0x08) << 5))); + constant int8_t * grid2 = (constant int8_t *)(iq1s_grid + (qs[1] | ((sc[0] & 0x80) << 1))); + + float2 sum = {0}; + for (int j = 0; j < 8; ++j) { + sum[0] += yl[j+ 0] * grid1[j]; + sum[1] += yl[j+ 8] * grid2[j]; + } + sumf[row] += (float)dh[0] * (sum[0] * (2*(sc[0] & 7) + 1) + sum[1] * (2*((sc[0] >> 4) & 7) + 1)); + + dh += nb*sizeof(block_iq1_s)/2; + qs += nb*sizeof(block_iq1_s); + sc += nb*sizeof(block_iq1_s); + } + + y4 += 16 * 32; + } +#else + // TODO +#endif + + for (int row = 0; row < N_DST; ++row) { + all_sum = simd_sum(sumf[row]); + if (tiisg == 0) { + dst[r1*ne0 + im*ne0*ne1 + first_row + row] = all_sum; + } + } +} + +[[host_name("kernel_mul_mv_iq1_s_f32")]] +kernel void kernel_mul_mv_iq1_s_f32( + device const void * src0, + device const float * src1, + device float * dst, + constant int64_t & ne00, + constant int64_t & ne01, + constant int64_t & ne02, + constant uint64_t & nb00, + constant uint64_t & nb01, + constant uint64_t & nb02, + constant int64_t & ne10, + constant int64_t & ne11, + constant int64_t & ne12, + constant uint64_t & nb10, + constant uint64_t & nb11, + constant uint64_t & nb12, + constant int64_t & ne0, + constant int64_t & ne1, + constant uint & r2, + constant uint & r3, + uint3 tgpig[[threadgroup_position_in_grid]], + uint tiisg[[thread_index_in_simdgroup]], + uint sgitg[[simdgroup_index_in_threadgroup]]) { + + kernel_mul_mv_iq1_s_f32_impl(src0, src1, dst, ne00, ne01, ne02, ne10, ne12, ne0, ne1, r2, r3, tgpig, tiisg, sgitg); +} + //============================= templates and their specializations ============================= @@ -4553,6 +4808,22 @@ void dequantize_iq3_xxs(device const block_iq3_xxs * xb, short il, thread type4x } } +template +void dequantize_iq1_s(device const block_iq1_s * xb, short il, thread type4x4 & reg) { + // il is 0...15 for QK_K = 256 => index of block of 32 is il/2 + const float d = xb->d; + device const uint8_t * qs = xb->qs + 2*il; + device const uint8_t * sc = xb->scales + il; + const float dl1 = d * (2*(sc[0] & 7) + 1); + const float dl2 = d * (2*((sc[0] >> 4) & 7) + 1); + constant int8_t * grid1 = (constant int8_t *)(iq1s_grid + (qs[0] | ((sc[0] & 0x08) << 5))); + constant int8_t * grid2 = (constant int8_t *)(iq1s_grid + (qs[1] | ((sc[0] & 0x80) << 1))); + for (int i = 0; i < 8; ++i) { + reg[i/4+0][i%4] = dl1 * grid1[i]; + reg[i/4+2][i%4] = dl2 * grid2[i]; + } +} + template kernel void kernel_get_rows( device const void * src0, @@ -5095,6 +5366,7 @@ template [[host_name("kernel_get_rows_q6_K")]] kernel get_rows_t kernel_get_rows template [[host_name("kernel_get_rows_iq2_xxs")]] kernel get_rows_t kernel_get_rows; template [[host_name("kernel_get_rows_iq2_xs")]] kernel get_rows_t kernel_get_rows; template [[host_name("kernel_get_rows_iq3_xxs")]] kernel get_rows_t kernel_get_rows; +template [[host_name("kernel_get_rows_iq1_s")]] kernel get_rows_t kernel_get_rows; // // matrix-matrix multiplication @@ -5134,6 +5406,7 @@ template [[host_name("kernel_mul_mm_q6_K_f32")]] kernel mat_mm_t kernel_mul_mm; template [[host_name("kernel_mul_mm_iq2_xs_f32")]] kernel mat_mm_t kernel_mul_mm; template [[host_name("kernel_mul_mm_iq3_xxs_f32")]] kernel mat_mm_t kernel_mul_mm; +template [[host_name("kernel_mul_mm_iq1_s_f32")]] kernel mat_mm_t kernel_mul_mm; // // indirect matrix-matrix multiplication @@ -5185,6 +5458,7 @@ template [[host_name("kernel_mul_mm_id_q6_K_f32")]] kernel mat_mm_id_t kernel_mu template [[host_name("kernel_mul_mm_id_iq2_xxs_f32")]] kernel mat_mm_id_t kernel_mul_mm_id; template [[host_name("kernel_mul_mm_id_iq2_xs_f32")]] kernel mat_mm_id_t kernel_mul_mm_id; template [[host_name("kernel_mul_mm_id_iq3_xxs_f32")]] kernel mat_mm_id_t kernel_mul_mm_id; +template [[host_name("kernel_mul_mm_id_iq1_s_f32")]] kernel mat_mm_id_t kernel_mul_mm_id; // // matrix-vector multiplication @@ -6152,3 +6426,66 @@ kernel void kernel_mul_mv_id_iq3_xxs_f32( tiisg, sgitg); } + +[[host_name("kernel_mul_mv_id_iq1_s_f32")]] +kernel void kernel_mul_mv_id_iq1_s_f32( + device const char * ids, + device const char * src1, + device float * dst, + constant uint64_t & nbi1, + constant int64_t & ne00, + constant int64_t & ne01, + constant int64_t & ne02, + constant uint64_t & nb00, + constant uint64_t & nb01, + constant uint64_t & nb02, + constant int64_t & ne10, + constant int64_t & ne11, + constant int64_t & ne12, + constant int64_t & ne13, + constant uint64_t & nb10, + constant uint64_t & nb11, + constant uint64_t & nb12, + constant int64_t & ne0, + constant int64_t & ne1, + constant uint64_t & nb1, + constant uint & r2, + constant uint & r3, + constant int & idx, + device const char * src00, + device const char * src01, + device const char * src02, + device const char * src03, + device const char * src04, + device const char * src05, + device const char * src06, + device const char * src07, + uint3 tgpig[[threadgroup_position_in_grid]], + uint tiitg[[thread_index_in_threadgroup]], + uint tiisg[[thread_index_in_simdgroup]], + uint sgitg[[simdgroup_index_in_threadgroup]]) { + device const char * src0[8] = {src00, src01, src02, src03, src04, src05, src06, src07}; + + const int64_t bid = tgpig.z/(ne12*ne13); + + tgpig.z = tgpig.z%(ne12*ne13); + + const int32_t id = ((device int32_t *) (ids + bid*nbi1))[idx]; + + kernel_mul_mv_iq1_s_f32_impl( + src0[id], + (device const float *) (src1 + bid*nb11), + dst + bid*ne0, + ne00, + ne01, + ne02, + ne10, + ne12, + ne0, + ne1, + r2, + r3, + tgpig, + tiisg, + sgitg); +} diff --git a/ggml-quants.c b/ggml-quants.c index f44377f45..48f5294e1 100644 --- a/ggml-quants.c +++ b/ggml-quants.c @@ -3480,6 +3480,139 @@ static const uint32_t iq3xxs_grid[256] = { 0x3e1c1c1c, 0x3e1c3404, 0x3e24140c, 0x3e24240c, 0x3e2c0404, 0x3e2c0414, 0x3e2c1424, 0x3e341c04, }; +#define NGRID_IQ2XXS 512 +static const uint64_t iq1s_grid[NGRID_IQ2XXS] = { + 0xffffffffffff0101, 0xffffffffff01ff00, 0xffffffffff010100, 0xffffffff00000000, + 0xffffffff01ff00ff, 0xffffffff01ff0001, 0xffffffff0101ffff, 0xffffffff0101ff01, + 0xffffff00ff000000, 0xffffff000000ff00, 0xffffff00000000ff, 0xffffff0000000100, + 0xffffff0000010000, 0xffffff0001000000, 0xffffff01ffff00ff, 0xffffff01ff01ff00, + 0xffffff01ff010100, 0xffffff0100000001, 0xffffff0101ffff00, 0xffffff0101ff0101, + 0xffffff0101010100, 0xffff00ffff00ff01, 0xffff00ffff0000ff, 0xffff00ff00ff0100, + 0xffff00ff0100ff00, 0xffff00ff010001ff, 0xffff0000ff0101ff, 0xffff000000ffff00, + 0xffff000000000000, 0xffff00000001ff01, 0xffff000001000101, 0xffff0000010100ff, + 0xffff0001ffff0100, 0xffff00010000ff00, 0xffff000100010101, 0xffff000101000000, + 0xffff01ffffff0000, 0xffff01ffff01ffff, 0xffff01ffff010100, 0xffff01ff00000000, + 0xffff01ff01ffffff, 0xffff01ff01ff0001, 0xffff01ff0101ffff, 0xffff01ff01010001, + 0xffff0100ffffff01, 0xffff01000000ffff, 0xffff010000000100, 0xffff010001ff01ff, + 0xffff010001000000, 0xffff0101ff000000, 0xffff0101000101ff, 0xffff010101ffff01, + 0xffff01010101ff00, 0xff00ffffff000000, 0xff00ffff00ffff00, 0xff00ffff00000001, + 0xff00ffff000001ff, 0xff00ffff01010000, 0xff00ff00ffff0000, 0xff00ff00ff00ff00, + 0xff00ff00ff0000ff, 0xff00ff00ff000100, 0xff00ff00ff010001, 0xff00ff0000ff0001, + 0xff00ff000000ffff, 0xff00ff0000000000, 0xff00ff000001ff00, 0xff00ff0000010100, + 0xff00ff0001ff0000, 0xff00ff000100ff00, 0xff00ff0001000100, 0xff00ff01ff000000, + 0xff00ff0100ff0000, 0xff00ff01000001ff, 0xff00ff0101010001, 0xff0000ff00000000, + 0xff0000ff0001ff00, 0xff0000ff00010100, 0xff000000ffff0101, 0xff000000ff000000, + 0xff000000ff01ff00, 0xff00000000ff0000, 0xff0000000000ff00, 0xff000000000000ff, + 0xff00000000000000, 0xff00000000000001, 0xff00000000000100, 0xff0000000001ffff, + 0xff00000000010000, 0xff00000001000000, 0xff00000001010100, 0xff000001ff00ff01, + 0xff000001ff0100ff, 0xff00000100000000, 0xff0000010001ff00, 0xff00000101ff0100, + 0xff0000010100ff00, 0xff0001ff00ff00ff, 0xff0001ff00000101, 0xff0001ff000100ff, + 0xff0001ff01000000, 0xff000100ff0001ff, 0xff0001000000ff01, 0xff00010000000000, + 0xff00010000010001, 0xff00010000010100, 0xff00010001ffff00, 0xff00010001ff0101, + 0xff00010001010000, 0xff000101ffffffff, 0xff000101ff000101, 0xff00010101ff00ff, + 0xff00010101000001, 0xff000101010100ff, 0xff01ffffff000101, 0xff01ffffff01ffff, + 0xff01ffffff01ff01, 0xff01ffffff0101ff, 0xff01ffff00000000, 0xff01ffff01ff0001, + 0xff01ffff0101ff01, 0xff01ff00ff000000, 0xff01ff0000ff0100, 0xff01ff000000ff01, + 0xff01ff0000010000, 0xff01ff00010000ff, 0xff01ff01ff01ff00, 0xff01ff0100000101, + 0xff0100ffffff0000, 0xff0100ffff010000, 0xff0100ff01ff00ff, 0xff0100ff01000100, + 0xff0100ff010100ff, 0xff010000ffffff01, 0xff01000000000000, 0xff0100000101ff00, + 0xff010001ffff00ff, 0xff010001ff000100, 0xff01000100ffff00, 0xff01000100010001, + 0xff01000101ff0001, 0xff010001010001ff, 0xff0101ffffffffff, 0xff0101ffff01ffff, + 0xff0101ffff010101, 0xff0101ff0000ff00, 0xff0101ff01010001, 0xff010100ff000000, + 0xff010100ff01ff01, 0xff01010000ff0001, 0xff01010000000100, 0xff01010001000000, + 0xff0101010100ffff, 0x00ffffff0000ff01, 0x00ffffff000000ff, 0x00ffffff00000100, + 0x00ffffff00010000, 0x00ffff00ffff0001, 0x00ffff00ff0000ff, 0x00ffff00ff000100, + 0x00ffff0000000000, 0x00ffff0001000100, 0x00ffff0001010001, 0x00ffff01ff00ff01, + 0x00ffff0100ff0100, 0x00ffff010000ff00, 0x00ffff01000100ff, 0x00ffff0101ff00ff, + 0x00ffff010101ff00, 0x00ff00ffffffffff, 0x00ff00ffffff01ff, 0x00ff00ffff000101, + 0x00ff00ff00000000, 0x00ff00ff000101ff, 0x00ff00ff01010101, 0x00ff0000ff000000, + 0x00ff0000ff01ffff, 0x00ff000000ff0000, 0x00ff00000000ff00, 0x00ff0000000000ff, + 0x00ff000000000000, 0x00ff000000000001, 0x00ff000000000100, 0x00ff000000010000, + 0x00ff000001ffff01, 0x00ff000001000000, 0x00ff0001ff000101, 0x00ff000100ffffff, + 0x00ff000100000000, 0x00ff0001010001ff, 0x00ff01ffff000000, 0x00ff01ff0001ff00, + 0x00ff01ff01ff0100, 0x00ff0100ff01ff01, 0x00ff010000ff00ff, 0x00ff010000ff0101, + 0x00ff010000000000, 0x00ff010000010101, 0x00ff01000100ff00, 0x00ff010001010000, + 0x00ff0101ffffff00, 0x00ff01010000ff01, 0x00ff010100000100, 0x00ff010101ff0000, + 0x0000ffffffff0100, 0x0000ffffff00ff00, 0x0000ffffff0000ff, 0x0000ffffff010000, + 0x0000ffff00000000, 0x0000ffff00010101, 0x0000ffff01ffff01, 0x0000ffff01000100, + 0x0000ff00ff000000, 0x0000ff00ff01ff00, 0x0000ff00ff0101ff, 0x0000ff0000ff0000, + 0x0000ff000000ff00, 0x0000ff00000000ff, 0x0000ff0000000000, 0x0000ff0000000001, + 0x0000ff0000000100, 0x0000ff0000010000, 0x0000ff0001ffffff, 0x0000ff0001ff01ff, + 0x0000ff0001000000, 0x0000ff000101ffff, 0x0000ff01ffff0101, 0x0000ff01ff010000, + 0x0000ff0100000000, 0x0000ff0101000101, 0x000000ffffff0001, 0x000000ffff000000, + 0x000000ff00ff0000, 0x000000ff0000ff00, 0x000000ff000000ff, 0x000000ff00000000, + 0x000000ff00000001, 0x000000ff00000100, 0x000000ff00010000, 0x000000ff01000000, + 0x000000ff0101ff00, 0x00000000ffff0000, 0x00000000ff00ff00, 0x00000000ff0000ff, + 0x00000000ff000000, 0x00000000ff000001, 0x00000000ff000100, 0x00000000ff010000, + 0x0000000000ffff00, 0x0000000000ff00ff, 0x0000000000ff0000, 0x0000000000ff0001, + 0x0000000000ff0100, 0x000000000000ffff, 0x000000000000ff00, 0x000000000000ff01, + 0x00000000000000ff, 0x0000000000000001, 0x00000000000001ff, 0x0000000000000100, + 0x0000000000000101, 0x000000000001ff00, 0x00000000000100ff, 0x0000000000010000, + 0x0000000000010001, 0x0000000000010100, 0x0000000001ff0000, 0x000000000100ff00, + 0x00000000010000ff, 0x0000000001000000, 0x0000000001000001, 0x0000000001000100, + 0x0000000001010000, 0x00000001ffff01ff, 0x00000001ff000000, 0x0000000100ff0000, + 0x000000010000ff00, 0x00000001000000ff, 0x0000000100000000, 0x0000000100000001, + 0x0000000100000100, 0x0000000100010000, 0x0000000101000000, 0x000001ffff00ff00, + 0x000001ffff010001, 0x000001ffff0101ff, 0x000001ff00ffff01, 0x000001ff0000ffff, + 0x000001ff00000000, 0x000001ff010000ff, 0x000001ff01010100, 0x00000100ffff0100, + 0x00000100ff000000, 0x0000010000ff0000, 0x000001000000ff00, 0x00000100000000ff, + 0x0000010000000000, 0x0000010000000001, 0x0000010000000100, 0x0000010000010000, + 0x0000010001000000, 0x000001000101ff01, 0x00000101ffff0001, 0x00000101ff01ffff, + 0x0000010100000000, 0x0000010101010100, 0x0001ffffff000000, 0x0001ffff00ffffff, + 0x0001ffff00000100, 0x0001ffff0001ff00, 0x0001ffff01000000, 0x0001ff00ffffff00, + 0x0001ff00ffff01ff, 0x0001ff00ff010000, 0x0001ff0000000000, 0x0001ff0000010001, + 0x0001ff0001ff0000, 0x0001ff0001010100, 0x0001ff01ff0000ff, 0x0001ff01ff000001, + 0x0001ff0100ffffff, 0x0001ff010001ffff, 0x0001ff01000101ff, 0x0001ff010100ff01, + 0x000100ffff00ffff, 0x000100ffff00ff01, 0x000100ffff000100, 0x000100ff00000000, + 0x000100ff000101ff, 0x000100ff01ff0101, 0x000100ff0100ffff, 0x000100ff01010101, + 0x00010000ff000000, 0x00010000ff010100, 0x0001000000ff0000, 0x000100000000ff00, + 0x00010000000000ff, 0x0001000000000000, 0x0001000000000001, 0x0001000000000100, + 0x0001000000010000, 0x0001000001ffff01, 0x0001000001000000, 0x0001000100ff0101, + 0x0001000100000000, 0x00010001010100ff, 0x000101ffffff01ff, 0x000101ffffff0101, + 0x000101ff00010000, 0x000101ff01ff0000, 0x000101ff0100ff01, 0x00010100ffff0000, + 0x0001010000000000, 0x000101000001ffff, 0x0001010000010101, 0x00010100010001ff, + 0x00010101ff00ff00, 0x00010101ff010001, 0x0001010100ffffff, 0x0001010100ff01ff, + 0x00010101000101ff, 0x0001010101ff0000, 0x000101010100ff01, 0x0001010101000101, + 0x01ffffffffff0101, 0x01ffffffff01ffff, 0x01ffffffff01ff01, 0x01ffffffff0101ff, + 0x01ffffffff010101, 0x01ffffff00000000, 0x01ffffff01ff01ff, 0x01ffffff01000101, + 0x01ffffff0101ff01, 0x01ffffff010100ff, 0x01ffff000000ff00, 0x01ffff0000000001, + 0x01ffff00000001ff, 0x01ffff0000010000, 0x01ffff0001ff0000, 0x01ffff01ffffffff, + 0x01ffff01ffff01ff, 0x01ffff01ff000000, 0x01ffff01ff01ffff, 0x01ffff01ff0101ff, + 0x01ffff010100ffff, 0x01ff00ffffff0000, 0x01ff00ffff010000, 0x01ff00ff00ffff01, + 0x01ff0000ff0000ff, 0x01ff000000000000, 0x01ff00000001ff01, 0x01ff000001ffffff, + 0x01ff000001010100, 0x01ff0001ffffff01, 0x01ff0001ff010001, 0x01ff000101ff0100, + 0x01ff000101000001, 0x01ff0001010100ff, 0x01ff01ffff00ffff, 0x01ff01ff00010001, + 0x01ff01ff01000000, 0x01ff01ff010101ff, 0x01ff0100ff000001, 0x01ff010000ffff00, + 0x01ff010000000100, 0x01ff010001ff01ff, 0x01ff01000101ffff, 0x01ff0101ffff00ff, + 0x01ff0101ffff0101, 0x01ff0101ff0101ff, 0x01ff010100010000, 0x0100ffff00ff00ff, + 0x0100ffff00ff0001, 0x0100ffff00000100, 0x0100ffff0100ff00, 0x0100ff00ffff0000, + 0x0100ff00ff00ffff, 0x0100ff00ff00ff01, 0x0100ff00ff000100, 0x0100ff00ff010000, + 0x0100ff0000000000, 0x0100ff00000100ff, 0x0100ff0001ff0101, 0x0100ff0001010101, + 0x0100ff0100ff00ff, 0x0100ff0100ff0001, 0x0100ff0100000100, 0x0100ff0100010001, + 0x0100ff0101000000, 0x010000ffff00ff00, 0x010000ff0000ffff, 0x010000ff00000000, + 0x010000ff010001ff, 0x010000ff01010001, 0x01000000ffffff00, 0x01000000ffff0101, + 0x01000000ff000000, 0x01000000ff0100ff, 0x01000000ff010101, 0x0100000000ff0000, + 0x010000000000ff00, 0x01000000000000ff, 0x0100000000000000, 0x0100000000000001, + 0x0100000000000100, 0x0100000000010000, 0x0100000001000000, 0x0100000100000000, + 0x01000001000101ff, 0x0100000101ffff01, 0x010001ffff000101, 0x010001ff00ff0100, + 0x010001ff0000ff00, 0x010001ff000100ff, 0x010001ff01ffffff, 0x01000100ffff0000, + 0x01000100ff0001ff, 0x0100010000000000, 0x010001000001ff00, 0x0100010001ff0000, + 0x01000100010000ff, 0x0100010001000101, 0x01000101ff00ff01, 0x0100010100ff0100, + 0x010001010000ffff, 0x0100010101010001, 0x0101ffffffff0101, 0x0101ffffff0001ff, + 0x0101ffffff01ffff, 0x0101ffffff010101, 0x0101ffff00000000, 0x0101ffff0101ffff, + 0x0101ffff010101ff, 0x0101ff00ff000000, 0x0101ff0000ff0100, 0x0101ff000000ff00, + 0x0101ff0000010000, 0x0101ff00010000ff, 0x0101ff0001000001, 0x0101ff01ff010101, + 0x0101ff0100000000, 0x0101ff010101ff00, 0x010100ffffff0000, 0x010100ffff010000, + 0x010100ff00ff01ff, 0x010100ff000000ff, 0x010100ff00000101, 0x010100ff01ffff00, + 0x01010000ffffff01, 0x01010000ff000100, 0x01010000ff01ff01, 0x0101000000000000, + 0x01010000000100ff, 0x010100000101ff01, 0x01010001ffff0000, 0x01010001ff00ffff, + 0x01010001ff010000, 0x0101000101ffffff, 0x0101000101ff01ff, 0x0101000101010101, + 0x010101ffff01ffff, 0x010101ff00000000, 0x010101ff0001ff01, 0x010101ff0101ffff, + 0x010101ff010101ff, 0x01010100ffffffff, 0x01010100ff000001, 0x010101000000ff00, + 0x0101010001010000, 0x0101010100ff0001, 0x010101010001ff01, 0x010101010101ffff, + +}; + static const uint8_t ksigns_iq2xs[128] = { 0, 129, 130, 3, 132, 5, 6, 135, 136, 9, 10, 139, 12, 141, 142, 15, 144, 17, 18, 147, 20, 149, 150, 23, 24, 153, 154, 27, 156, 29, 30, 159, @@ -3578,6 +3711,49 @@ void dequantize_row_iq3_xxs(const block_iq3_xxs * restrict x, float * restrict y } } +// ====================== 1.5625 bpw (de)-quantization + +void dequantize_row_iq1_s(const block_iq1_s * restrict x, float * restrict y, int k) { + assert(k % QK_K == 0); + const int nb = k / QK_K; + + float db[4]; + uint16_t idx[4]; + //const int8_t * grid[4]; + + for (int i = 0; i < nb; i++) { + + const float d = GGML_FP16_TO_FP32(x[i].d); + const uint8_t * sc = x[i].scales; + const uint8_t * qs = x[i].qs; + + for (int i8 = 0; i8 < QK_K/8; i8 += 4) { + idx[0] = qs[0] | ((sc[0] & 0x08) << 5); + idx[1] = qs[1] | ((sc[0] & 0x80) << 1); + idx[2] = qs[2] | ((sc[1] & 0x08) << 5); + idx[3] = qs[3] | ((sc[1] & 0x80) << 1); + //grid[0] = (const int8_t *)(iq1s_grid + (qs[0] | ((sc[0] & 0x08) << 5))); + //grid[1] = (const int8_t *)(iq1s_grid + (qs[1] | ((sc[0] & 0x80) << 1))); + //grid[2] = (const int8_t *)(iq1s_grid + (qs[2] | ((sc[1] & 0x08) << 5))); + //grid[3] = (const int8_t *)(iq1s_grid + (qs[3] | ((sc[1] & 0x80) << 1))); + db[0] = d * (2*(sc[0] & 7) + 1); + db[1] = d * (2*((sc[0] >> 4) & 7) + 1); + db[2] = d * (2*(sc[1] & 7) + 1); + db[3] = d * (2*((sc[1] >> 4) & 7) + 1); + for (int l = 0; l < 4; ++l) { + const int8_t * grid = (const int8_t *)(iq1s_grid + idx[l]); + for (int j = 0; j < 8; ++j) { + //y[j] = db[l] * grid[l][j]; + y[j] = db[l] * grid[j]; + } + y += 8; + } + qs += 4; + sc += 2; + } + } +} + //===================================== Q8_K ============================================== void quantize_row_q8_K_reference(const float * restrict x, block_q8_K * restrict y, int k) { @@ -3679,7 +3855,7 @@ static inline __m128i get_scale_shuffle(int i) { } #endif -void ggml_vec_dot_q4_0_q8_0(int n, float * restrict s, size_t bs, const void * restrict vx, size_t bx, const void * restrict vy, size_t by, int nrc) { +void ggml_vec_dot_q4_0_q8_0(int n, float * restrict s, size_t bs, const void * restrict vx, size_t bbx, const void * restrict vy, size_t bby, int nrc) { const int qk = QK8_0; const int nb = n / qk; @@ -3690,8 +3866,8 @@ void ggml_vec_dot_q4_0_q8_0(int n, float * restrict s, size_t bs, const void * r assert(nrc == 1); #endif UNUSED(nrc); - UNUSED(bx); - UNUSED(by); + UNUSED(bbx); + UNUSED(bby); UNUSED(bs); const block_q4_0 * restrict x = vx; @@ -4046,7 +4222,7 @@ void ggml_vec_dot_q4_0_q8_0(int n, float * restrict s, size_t bs, const void * r #endif } -void ggml_vec_dot_q4_1_q8_1(int n, float * restrict s, size_t bs, const void * restrict vx, size_t bx, const void * restrict vy, size_t by, int nrc) { +void ggml_vec_dot_q4_1_q8_1(int n, float * restrict s, size_t bs, const void * restrict vx, size_t bbx, const void * restrict vy, size_t bby, int nrc) { const int qk = QK8_1; const int nb = n / qk; @@ -4057,8 +4233,8 @@ void ggml_vec_dot_q4_1_q8_1(int n, float * restrict s, size_t bs, const void * r assert(nrc == 1); #endif UNUSED(nrc); - UNUSED(bx); - UNUSED(by); + UNUSED(bbx); + UNUSED(bby); UNUSED(bs); const block_q4_1 * restrict x = vx; @@ -4264,7 +4440,7 @@ void ggml_vec_dot_q4_1_q8_1(int n, float * restrict s, size_t bs, const void * r #endif } -void ggml_vec_dot_q5_0_q8_0(int n, float * restrict s, size_t bs, const void * restrict vx, size_t bx, const void * restrict vy, size_t by, int nrc) { +void ggml_vec_dot_q5_0_q8_0(int n, float * restrict s, size_t bs, const void * restrict vx, size_t bbx, const void * restrict vy, size_t bby, int nrc) { const int qk = QK8_0; const int nb = n / qk; @@ -4272,8 +4448,8 @@ void ggml_vec_dot_q5_0_q8_0(int n, float * restrict s, size_t bs, const void * r assert(qk == QK5_0); assert(nrc == 1); UNUSED(nrc); - UNUSED(bx); - UNUSED(by); + UNUSED(bbx); + UNUSED(bby); UNUSED(bs); const block_q5_0 * restrict x = vx; @@ -4555,7 +4731,7 @@ void ggml_vec_dot_q5_0_q8_0(int n, float * restrict s, size_t bs, const void * r #endif } -void ggml_vec_dot_q5_1_q8_1(int n, float * restrict s, size_t bs, const void * restrict vx, size_t bx, const void * restrict vy, size_t by, int nrc) { +void ggml_vec_dot_q5_1_q8_1(int n, float * restrict s, size_t bs, const void * restrict vx, size_t bbx, const void * restrict vy, size_t bby, int nrc) { const int qk = QK8_1; const int nb = n / qk; @@ -4563,8 +4739,8 @@ void ggml_vec_dot_q5_1_q8_1(int n, float * restrict s, size_t bs, const void * r assert(qk == QK5_1); assert(nrc == 1); UNUSED(nrc); - UNUSED(bx); - UNUSED(by); + UNUSED(bbx); + UNUSED(bby); UNUSED(bs); const block_q5_1 * restrict x = vx; @@ -4859,7 +5035,7 @@ void ggml_vec_dot_q5_1_q8_1(int n, float * restrict s, size_t bs, const void * r #endif } -void ggml_vec_dot_q8_0_q8_0(int n, float * restrict s, size_t bs, const void * restrict vx, size_t bx, const void * restrict vy, size_t by, int nrc) { +void ggml_vec_dot_q8_0_q8_0(int n, float * restrict s, size_t bs, const void * restrict vx, size_t bbx, const void * restrict vy, size_t bby, int nrc) { const int qk = QK8_0; const int nb = n / qk; @@ -4870,8 +5046,8 @@ void ggml_vec_dot_q8_0_q8_0(int n, float * restrict s, size_t bs, const void * r assert(nrc == 1); #endif UNUSED(nrc); - UNUSED(bx); - UNUSED(by); + UNUSED(bbx); + UNUSED(bby); UNUSED(bs); const block_q8_0 * restrict x = vx; @@ -9107,6 +9283,178 @@ void ggml_vec_dot_iq3_xxs_q8_K(int n, float * restrict s, size_t bs, const void #endif } +#ifdef __AVX2__ +static inline __m256i mul_add_epi8(const __m256i x, const __m256i y) { + const __m256i ax = _mm256_sign_epi8(x, x); + const __m256i sy = _mm256_sign_epi8(y, x); + return _mm256_maddubs_epi16(ax, sy); +} +#endif + +void ggml_vec_dot_iq1_s_q8_K (int n, float * GGML_RESTRICT s, size_t bs, const void * GGML_RESTRICT vx, size_t bx, const void * GGML_RESTRICT vy, size_t by, int nrc) { + assert(n % QK_K == 0); + assert(nrc == 1); + UNUSED(nrc); + UNUSED(bx); + UNUSED(by); + UNUSED(bs); + + const block_iq1_s * restrict x = vx; + const block_q8_K * restrict y = vy; + + const int nb = n / QK_K; + +#if defined __ARM_NEON + + const uint8x16_t m8 = vdupq_n_u8(0x08); + const uint8x16_t m7 = vdupq_n_u8(0x07); + const uint8x16_t m1 = vdupq_n_u8(0x01); + const int32x4_t vzero = vdupq_n_s32(0); + + uint16_t gindex[8]; + uint16x8x2_t vindex; + int8x16x4_t q1b; + int8x16x4_t q8b; + uint16x8x4_t scales; + int32x4x2_t sumi; + int32x4x2_t dotq; + + float sumf = 0; + for (int i = 0; i < nb; ++i) { + + const int8_t * q8 = y[i].qs; + const uint8_t * qs = x[i].qs; + const uint8_t * sc = x[i].scales; + + sumi.val[0] = sumi.val[1] = vzero; + + for (int i128 = 0; i128 < QK_K/128; ++i128) { + const uint8x16_t ql = vld1q_u8(qs); qs += 16; + const uint8x8_t tm1 = vld1_u8 (sc); sc += 8; + const uint8x8_t tm2 = vshr_n_u8(tm1, 4); + const uint8x16_t qh = vcombine_u8(vzip1_u8(tm1, tm2), vzip2_u8(tm1, tm2)); + const uint8x16_t hbit = vandq_u8(qh, m8); + vindex.val[0] = vorrq_u16(vmovl_u8(vget_low_u8 (ql)), vshlq_n_u16(vmovl_u8(vget_low_u8 (hbit)), 5)); + vindex.val[1] = vorrq_u16(vmovl_u8(vget_high_u8(ql)), vshlq_n_u16(vmovl_u8(vget_high_u8(hbit)), 5)); + const uint8x16_t scales8 = vorrq_u8(vshlq_n_u8(vandq_u8(qh, m7), 1), m1); + scales.val[0] = vmovl_u8(vget_low_u8 (scales8)); + scales.val[1] = vmovl_u8(vget_high_u8 (scales8)); + + for (int l = 0; l < 2; ++l) { + vst1q_u16(gindex+0, vindex.val[l]); + q1b.val[0] = vcombine_s8(vld1_s8((const void *)(iq1s_grid+gindex[0])), vld1_s8((const void *)(iq1s_grid+gindex[1]))); + q1b.val[1] = vcombine_s8(vld1_s8((const void *)(iq1s_grid+gindex[2])), vld1_s8((const void *)(iq1s_grid+gindex[3]))); + q1b.val[2] = vcombine_s8(vld1_s8((const void *)(iq1s_grid+gindex[4])), vld1_s8((const void *)(iq1s_grid+gindex[5]))); + q1b.val[3] = vcombine_s8(vld1_s8((const void *)(iq1s_grid+gindex[6])), vld1_s8((const void *)(iq1s_grid+gindex[7]))); + q8b = ggml_vld1q_s8_x4(q8); q8 += 64; + + dotq.val[0] = vpaddq_s32(ggml_vdotq_s32(vzero, q1b.val[0], q8b.val[0]), ggml_vdotq_s32(vzero, q1b.val[1], q8b.val[1])); + dotq.val[1] = vpaddq_s32(ggml_vdotq_s32(vzero, q1b.val[2], q8b.val[2]), ggml_vdotq_s32(vzero, q1b.val[3], q8b.val[3])); + + sumi.val[0] = vmlaq_s32(sumi.val[0], dotq.val[0], vreinterpretq_s32_u32(vmovl_u16(vget_low_u16 (scales.val[l])))); + sumi.val[1] = vmlaq_s32(sumi.val[1], dotq.val[1], vreinterpretq_s32_u32(vmovl_u16(vget_high_u16(scales.val[l])))); + } + } + + sumf += y[i].d * GGML_FP16_TO_FP32(x[i].d) * vaddvq_s32(vaddq_s32(sumi.val[0], sumi.val[1])); + } + + *s = sumf; + +#elif defined __AVX2__ + + const __m128i m8 = _mm_set1_epi8(0x08); + const __m128i m7 = _mm_set1_epi8(0x07); + const __m128i m1 = _mm_set1_epi8(0x01); + const __m128i shuffle_h = _mm_set_epi8(15, 7, 14, 6, 13, 5, 12, 4, 11, 3, 10, 2, 9, 1, 8, 0); + const __m128i shuffle_s[4] = { + _mm_set_epi32(0x03030303, 0x02020202, 0x01010101, 0x00000000), + _mm_set_epi32(0x07070707, 0x06060606, 0x05050505, 0x04040404), + _mm_set_epi32(0x0b0b0b0b, 0x0a0a0a0a, 0x09090909, 0x08080808), + _mm_set_epi32(0x0f0f0f0f, 0x0e0e0e0e, 0x0d0d0d0d, 0x0c0c0c0c) + }; + + uint64_t aux64; + + __m256i v_gindex; + const uint16_t * gindex = (const uint16_t *)&v_gindex; + + __m256 accum = _mm256_setzero_ps(); + for (int i = 0; i < nb; ++i) { + + const int8_t * q8 = y[i].qs; + const uint8_t * qs = x[i].qs; + const uint8_t * sc = x[i].scales; + + __m256i sumi = _mm256_setzero_si256(); + for (int i128 = 0; i128 < QK_K/128; ++i128) { + const __m128i ql = _mm_loadu_si128((const __m128i*)qs); qs += 16; + memcpy(&aux64, sc, 8); sc += 8; + const __m128i qh = _mm_shuffle_epi8(_mm_set_epi64x(aux64 >> 4, aux64), shuffle_h); + const __m256i hbit = _mm256_cvtepu8_epi16(_mm_and_si128(qh, m8)); + v_gindex = _mm256_or_si256(_mm256_cvtepu8_epi16(ql), _mm256_slli_epi16(hbit, 5)); + const __m128i scales = _mm_or_si128(_mm_slli_epi16(_mm_and_si128(qh, m7), 1), m1); + + for (int i32 = 0; i32 < 4; ++i32) { + const __m256i q8b = _mm256_loadu_si256((const __m256i*)q8); q8 += 32; + const __m256i q1b = _mm256_set_epi64x(iq1s_grid[gindex[4*i32+3]], iq1s_grid[gindex[4*i32+2]], + iq1s_grid[gindex[4*i32+1]], iq1s_grid[gindex[4*i32+0]]); + const __m256i dot = mul_add_epi8(q1b, q8b); + const __m256i s16 = _mm256_cvtepi8_epi16(_mm_shuffle_epi8(scales, shuffle_s[i32])); + const __m256i p = _mm256_madd_epi16(s16, dot); + sumi = _mm256_add_epi32(sumi, p); + } + + } + + accum = _mm256_fmadd_ps(_mm256_set1_ps(y[i].d * GGML_FP16_TO_FP32(x[i].d)), _mm256_cvtepi32_ps(sumi), accum); + + } + + *s = hsum_float_8(accum); + +#else + + int db[4]; + uint16_t idx[4]; + + float sumf = 0; + for (int i = 0; i < nb; ++i) { + + const int8_t * q8 = y[i].qs; + const uint8_t * qs = x[i].qs; + const uint8_t * sc = x[i].scales; + + int sumi = 0; + for (int i32 = 0; i32 < QK_K/32; ++i32) { + idx[0] = qs[0] | ((sc[0] & 0x08) << 5); + idx[1] = qs[1] | ((sc[0] & 0x80) << 1); + idx[2] = qs[2] | ((sc[1] & 0x08) << 5); + idx[3] = qs[3] | ((sc[1] & 0x80) << 1); + db[0] = (2*(sc[0] & 7) + 1); + db[1] = (2*((sc[0] >> 4) & 7) + 1); + db[2] = (2*(sc[1] & 7) + 1); + db[3] = (2*((sc[1] >> 4) & 7) + 1); + for (int l = 0; l < 4; ++l) { + const int8_t * grid = (const int8_t *)(iq1s_grid + idx[l]); + int suml = 0; + for (int j = 0; j < 8; ++j) suml += q8[j] * grid[j]; + sumi += db[l] * suml; + q8 += 8; + } + qs += 4; + sc += 2; + } + + sumf += GGML_FP16_TO_FP32(x[i].d) * y[i].d * sumi; + } + + *s = sumf; + +#endif + +} + // ================================ IQ2 quantization ============================================= typedef struct { @@ -9115,14 +9463,22 @@ typedef struct { uint16_t * neighbours; } iq2_entry_t; -static iq2_entry_t iq2_data[2] = { +static iq2_entry_t iq2_data[3] = { + {NULL, NULL, NULL}, {NULL, NULL, NULL}, {NULL, NULL, NULL}, }; -static inline int iq2_data_index(int grid_size) { - GGML_ASSERT(grid_size == 256 || grid_size == 512); - return grid_size == 256 ? 0 : 1; +static inline int iq2_data_index(enum ggml_type type) { + GGML_ASSERT(type == GGML_TYPE_IQ2_XXS || type == GGML_TYPE_IQ2_XS || type == GGML_TYPE_IQ1_S); + return type == GGML_TYPE_IQ2_XXS ? 0 : + type == GGML_TYPE_IQ2_XS ? 1 : 2; +} + +static inline int iq2_grid_size(enum ggml_type type) { + GGML_ASSERT(type == GGML_TYPE_IQ2_XXS || type == GGML_TYPE_IQ2_XS || type == GGML_TYPE_IQ1_S); + return type == GGML_TYPE_IQ2_XXS ? 256 : + type == GGML_TYPE_IQ2_XS ? 512 : 512; } static int iq2_compare_func(const void * left, const void * right) { @@ -9131,12 +9487,13 @@ static int iq2_compare_func(const void * left, const void * right) { return l[0] < r[0] ? -1 : l[0] > r[0] ? 1 : l[1] < r[1] ? -1 : l[1] > r[1] ? 1 : 0; } -void iq2xs_init_impl(int grid_size) { - const int gindex = iq2_data_index(grid_size); +void iq2xs_init_impl(enum ggml_type type) { + const int gindex = iq2_data_index(type); + const int grid_size = iq2_grid_size(type); if (iq2_data[gindex].grid) { return; } - static const uint16_t kgrid_256[256] = { + static const uint16_t kgrid_2bit_256[256] = { 0, 2, 5, 8, 10, 17, 20, 32, 34, 40, 42, 65, 68, 80, 88, 97, 100, 128, 130, 138, 162, 257, 260, 272, 277, 320, 388, 408, 512, 514, 546, 642, 1025, 1028, 1040, 1057, 1060, 1088, 1090, 1096, 1120, 1153, 1156, 1168, 1188, 1280, 1282, 1288, @@ -9154,7 +9511,7 @@ void iq2xs_init_impl(int grid_size) { 33888, 34048, 34118, 34196, 34313, 34368, 34400, 34818, 35076, 35345, 36868, 36880, 36900, 36928, 37025, 37142, 37248, 37445, 37888, 37922, 37956, 38225, 39041, 39200, 40962, 41040, 41093, 41225, 41472, 42008, 43088, 43268, }; - static const uint16_t kgrid_512[512] = { + static const uint16_t kgrid_2bit_512[512] = { 0, 2, 5, 8, 10, 17, 20, 22, 25, 32, 34, 37, 40, 65, 68, 70, 73, 80, 82, 85, 88, 97, 100, 128, 130, 133, 136, 145, 148, 153, 160, 257, 260, 262, 265, 272, 274, 277, 280, 282, 289, 292, 320, 322, 325, 328, 337, 340, @@ -9188,9 +9545,45 @@ void iq2xs_init_impl(int grid_size) { 40962, 40968, 40970, 40992, 41002, 41120, 41297, 41305, 41382, 41472, 41474, 41480, 41514, 41600, 41632, 42048, 42133, 42597, 42648, 43018, 43040, 43042, 43048, 43168, 43176, 43268, 43396, 43398, 43560, 43562, 43665, 43690, }; + static const uint16_t kgrid_1bit_512[512] = { + 10, 33, 41, 85, 132, 134, 160, 162, 277, 337, 340, 345, 357, 405, 516, 545, + 553, 598, 641, 650, 681, 1042, 1044, 1097, 1169, 1176, 1320, 1345, 1365, 1378, 1434, 1444, + 1545, 1617, 1642, 1685, 2053, 2080, 2089, 2133, 2176, 2182, 2208, 2214, 2306, 2384, 2393, 2440, + 2453, 2581, 2664, 2690, 2721, 4117, 4161, 4182, 4184, 4261, 4357, 4369, 4372, 4377, 4390, 4422, + 4432, 4437, 4449, 4457, 4485, 4497, 4505, 4629, 4677, 4696, 4774, 5205, 5217, 5225, 5386, 5397, + 5409, 5445, 5457, 5460, 5461, 5462, 5465, 5472, 5477, 5525, 5545, 5650, 5668, 5717, 5729, 5769, + 5777, 6212, 6234, 6244, 6293, 6424, 6482, 6485, 6502, 6505, 6529, 6538, 6565, 6656, 6682, 6788, + 6806, 6820, 8218, 8224, 8226, 8232, 8277, 8326, 8354, 8469, 8521, 8530, 8549, 8596, 8737, 8794, + 9221, 9253, 9348, 9369, 9380, 9474, 9557, 9633, 9732, 9753, 9793, 9830, 9862, 9880, 10240, 10272, + 10282, 10321, 10406, 10517, 10530, 10566, 10585, 10645, 10896, 16466, 16468, 16473, 16485, 16646, 16660, 16665, + 16725, 16793, 16806, 16914, 16969, 16977, 16996, 17028, 17057, 17408, 17416, 17434, 17493, 17512, 17578, 17685, + 17696, 17733, 17745, 17748, 17749, 17750, 17753, 17765, 17794, 17813, 17946, 17984, 18005, 18072, 18453, 18529, + 18569, 18722, 18756, 18762, 18773, 18794, 18833, 18853, 18945, 19026, 19033, 19077, 20489, 20497, 20500, 20517, + 20565, 20586, 20610, 20633, 20757, 20769, 20776, 20805, 20817, 20820, 20821, 20822, 20825, 20837, 20864, 20872, + 20885, 20896, 21002, 21029, 21077, 21146, 21510, 21525, 21573, 21585, 21588, 21589, 21590, 21593, 21605, 21653, + 21665, 21765, 21777, 21780, 21781, 21782, 21785, 21797, 21825, 21828, 21829, 21830, 21833, 21840, 21841, 21842, + 21844, 21846, 21848, 21849, 21850, 21857, 21860, 21861, 21862, 21865, 21893, 21905, 21908, 21909, 21910, 21913, + 21925, 22024, 22037, 22085, 22097, 22100, 22101, 22102, 22105, 22117, 22165, 22545, 22566, 22568, 22594, 22608, + 22613, 22676, 22697, 22793, 22805, 22853, 22865, 22868, 22869, 22870, 22873, 22885, 22933, 22946, 23046, 23072, + 23125, 23209, 24597, 24640, 24665, 24673, 24725, 24833, 24840, 24869, 24917, 24934, 24965, 25001, 25108, 25110, + 25152, 25184, 25192, 25234, 25616, 25618, 25625, 25685, 25704, 25738, 25744, 25770, 25877, 25897, 25925, 25937, + 25940, 25941, 25942, 25945, 25957, 25986, 26005, 26186, 26197, 26276, 26632, 26634, 26725, 26757, 26770, 26885, + 26965, 26976, 26986, 27032, 27153, 27174, 27200, 27208, 27240, 27269, 27282, 27290, 32778, 32800, 32802, 32808, + 32810, 32853, 32904, 32922, 32930, 32932, 33105, 33110, 33112, 33125, 33157, 33280, 33288, 33301, 33312, 33320, + 33424, 33797, 33829, 33858, 34068, 34133, 34146, 34176, 34217, 34306, 34342, 34441, 34454, 34468, 34832, 34918, + 34965, 34984, 35094, 35137, 35161, 35208, 35232, 35332, 35338, 35368, 35429, 36932, 36934, 36953, 37009, 37125, + 37136, 37138, 37145, 37157, 37205, 37220, 37258, 37290, 37444, 37446, 37465, 37478, 37525, 37905, 37968, 37973, + 38040, 38054, 38145, 38154, 38165, 38180, 38186, 38213, 38225, 38228, 38229, 38230, 38233, 38245, 38293, 38485, + 38504, 38530, 38938, 38985, 38993, 39012, 39040, 39173, 39192, 39253, 39265, 39301, 39316, 39322, 39442, 39497, + 39504, 39590, 40970, 40984, 40992, 41002, 41045, 41120, 41128, 41237, 41289, 41297, 41317, 41364, 41366, 41514, + 41557, 41633, 41989, 42021, 42056, 42068, 42074, 42113, 42242, 42265, 42274, 42325, 42340, 42402, 42501, 42512, + 42533, 42624, 42632, 42666, 43040, 43093, 43106, 43168, 43176, 43264, 43286, 43345, 43429, 43590, 43618, 43680, + }; + const int kmap_size = 43692; - const int nwant = 2; - const uint16_t * kgrid = grid_size == 256 ? kgrid_256 : kgrid_512; + const int nwant = type == GGML_TYPE_IQ1_S ? 3 : 2; + const uint16_t * kgrid = type == GGML_TYPE_IQ2_XXS ? kgrid_2bit_256 : + type == GGML_TYPE_IQ2_XS ? kgrid_2bit_512 : kgrid_1bit_512; uint64_t * kgrid_q2xs; int * kmap_q2xs; uint16_t * kneighbors_q2xs; @@ -9286,9 +9679,9 @@ void iq2xs_init_impl(int grid_size) { free(dist2); } -void iq2xs_free_impl(int grid_size) { - GGML_ASSERT(grid_size == 256 || grid_size == 512 || grid_size == 1024); - const int gindex = iq2_data_index(grid_size); +void iq2xs_free_impl(enum ggml_type type) { + GGML_ASSERT(type == GGML_TYPE_IQ2_XXS || type == GGML_TYPE_IQ2_XS || type == GGML_TYPE_IQ1_S); + const int gindex = iq2_data_index(type); if (iq2_data[gindex].grid) { free(iq2_data[gindex].grid); iq2_data[gindex].grid = NULL; free(iq2_data[gindex].map); iq2_data[gindex].map = NULL; @@ -9322,7 +9715,7 @@ static int iq2_find_best_neighbour(const uint16_t * restrict neighbours, const u static void quantize_row_iq2_xxs_impl(const float * restrict x, void * restrict vy, int n, const float * restrict quant_weights) { - const int gindex = iq2_data_index(256); + const int gindex = iq2_data_index(GGML_TYPE_IQ2_XXS); const uint64_t * kgrid_q2xs = iq2_data[gindex].grid; const int * kmap_q2xs = iq2_data[gindex].map; @@ -9495,7 +9888,7 @@ static void quantize_row_iq2_xxs_impl(const float * restrict x, void * restrict static void quantize_row_iq2_xs_impl(const float * restrict x, void * restrict vy, int n, const float * restrict quant_weights) { - const int gindex = iq2_data_index(512); + const int gindex = iq2_data_index(GGML_TYPE_IQ2_XS); const uint64_t * kgrid_q2xs = iq2_data[gindex].grid; const int * kmap_q2xs = iq2_data[gindex].map; @@ -10132,3 +10525,207 @@ void quantize_row_iq3_xxs_reference(const float * restrict x, block_iq3_xxs * re assert(k % QK_K == 0); quantize_row_iq3_xxs_impl(x, y, k, NULL); } + +// =================================== 1.5 bpw =================================================== + +static int iq1_find_best_neighbour(const uint16_t * restrict neighbours, const uint64_t * restrict grid, + const float * restrict xval, const float * restrict weight, float * scale, int8_t * restrict L, int ngrid) { + int num_neighbors = neighbours[0]; + GGML_ASSERT(num_neighbors > 0); + float best_score = 0; + int grid_index = -1; + for (int j = 1; j <= num_neighbors; ++j) { + const int8_t * pg = (const int8_t *)(grid + neighbours[j]); + float sumqx = 0, sumq2 = 0; + for (int i = 0; i < 8; ++i) { + float q = (pg[i] - 3)/2; + float w = weight[i]; + sumqx += w*q*xval[i]; + sumq2 += w*q*q; + } + if (sumqx > 0 && sumq2 > 0 && sumqx*sumqx > best_score*sumq2) { + *scale = sumqx/sumq2; best_score = *scale * sumqx; + grid_index = neighbours[j]; + } + } + if (grid_index < 0) { + for (int i = 0; i < ngrid; ++i) { + const int8_t * grid_i = (const int8_t *)(grid + i); + float sumqx = 0, sumq2 = 0; + for (int j = 0; j < 8; ++j) { + float w = weight[j]; + float q = (grid_i[j] - 3)/2; + sumqx += w*q*xval[j]; + sumq2 += w*q*q; + } + if (sumqx > 0 && sumq2 > 0 && sumqx*sumqx > best_score*sumq2) { + *scale = sumqx/sumq2; best_score = *scale*sumqx; + grid_index = i; + } + } + } + if (grid_index < 0) { + printf("Oops, did not find grid point\n"); + printf("Have %d neighbours\n", num_neighbors); + for (int j = 1; j <= num_neighbors; ++j) { + const int8_t * pg = (const int8_t *)(grid + neighbours[j]); + float sumqx = 0, sumq2 = 0; + for (int i = 0; i < 8; ++i) { + float q = (pg[i] - 3)/2; + float w = weight[i]; + sumqx += w*q*xval[i]; + sumq2 += w*q*q; + } + printf(" neighbour %d: sumqx = %g sumq2 = %g\n", j, (double)sumqx, (double)sumq2); + } + } + GGML_ASSERT(grid_index >= 0); + //!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! + *scale *= 1.05f; // This is a fudge factor. Don't ask me why it improves the result. + //!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! + const int8_t * pg = (const int8_t *)(grid + grid_index); + for (int i = 0; i < 8; ++i) L[i] = (pg[i] - 1)/2; + return grid_index; +} + +static int iq1_sort_helper(const void * left, const void * right) { + const float * l = left; + const float * r = right; + return *l < *r ? -1 : *l > *r ? 1 : 0; +} + +static void quantize_row_iq1_s_impl(const float * restrict x, void * restrict vy, int n, const float * restrict quant_weights) { + + const int gindex = iq2_data_index(GGML_TYPE_IQ1_S); + + const uint64_t * kgrid_q2xs = iq2_data[gindex].grid; + const int * kmap_q2xs = iq2_data[gindex].map; + const uint16_t * kneighbors_q2xs = iq2_data[gindex].neighbours; + + GGML_ASSERT(quant_weights && "missing quantization weights"); + GGML_ASSERT(kgrid_q2xs && "forgot to call ggml_quantize_init()?"); + GGML_ASSERT(kmap_q2xs && "forgot to call ggml_quantize_init()?"); + GGML_ASSERT(kneighbors_q2xs && "forgot to call ggml_quantize_init()?"); + GGML_ASSERT(n%QK_K == 0); + + const int nbl = n/256; + + block_iq1_s * y = vy; + + float scales[QK_K/8]; + float weight[8]; + int8_t L[8]; + float sumx[9]; + float sumw[9]; + float pairs[16]; + int * idx = (int *)(pairs + 1); + uint8_t hbit[QK_K/8]; + + for (int ibl = 0; ibl < nbl; ++ibl) { + + y[ibl].d = GGML_FP32_TO_FP16(0.f); + memset(y[ibl].qs, 0, QK_K/8); + memset(y[ibl].scales, 0, QK_K/16); + + float max_scale = 0; + + const float * xbl = x + QK_K*ibl; + float sumx2 = 0; + for (int i = 0; i < QK_K; ++i) sumx2 += xbl[i]*xbl[i]; + float sigma2 = sumx2/QK_K; + + for (int ib = 0; ib < QK_K/8; ++ib) { + const float * xb = xbl + 8*ib; + const float * qw = quant_weights + QK_K*ibl + 8*ib; + for (int i = 0; i < 8; ++i) weight[i] = qw[i] * sqrtf(sigma2 + xb[i]*xb[i]); + float max = fabsf(xb[0]); + for (int i = 1; i < 8; ++i) max = MAX(max, fabsf(xb[i])); + if (!max) { + scales[ib] = 0; + memset(L, 1, 8); + continue; + } + // Here we solve exactly the sum of squared difference (SSD) weighted minimization problem. + // With just 3 allowed quant values (-1, 0, 1), we can search exhaustively for the two + // boundaries that split the weights xb[i] into 3 groups. To do so, we sort the weights + // in ascending order, compute Si = sum[weight[j] xb[j], j = 0...i] and + // Wi = sum[weight[j], j = 0...i], and use these to quckly get get the optimum scale + // for each possible and score for each split. + for (int j = 0; j < 8; ++j) { + pairs[2*j] = xb[j]; + idx[2*j] = j; + } + qsort(pairs, 8, 2*sizeof(float), iq1_sort_helper); + { + sumx[0] = sumw[0] = 0; + for (int j = 0; j < 8; ++j) { + int i = idx[2*j]; + sumx[j+1] = sumx[j] + weight[i]*xb[i]; + sumw[j+1] = sumw[j] + weight[i]; + } + } + float best_score = 0, scale = max; + int besti1 = 0, besti2 = 0; + for (int i1 = 0; i1 <= 8; ++i1) { + for (int i2 = i1; i2 <= 8; ++i2) { + float sumqx = -(sumx[i1] - sumx[0]) + (sumx[8] - sumx[i2]); + float sumq2 = (sumw[i1] - sumw[0]) + (sumw[8] - sumw[i2]); + if (sumq2 > 0 && sumqx*sumqx > best_score*sumq2) { + scale = sumqx/sumq2; best_score = scale*sumqx; + besti1 = i1; besti2 = i2; + } + } + } + for (int j = 0; j < besti1; ++j) L[idx[2*j]] = 0; + for (int j = besti1; j < besti2; ++j) L[idx[2*j]] = 1; + for (int j = besti2; j < 8; ++j) L[idx[2*j]] = 2; + if (scale < 0) { + for (int j = 0; j < 8; ++j) L[j] = 2 - L[j]; + scale = -scale; + } + // Now we check if the solution found above corresponds to a grid point and, if not, use a neighbouring + // grid point that minimizes SSD. + uint16_t u = 0; + for (int j = 0; j < 8; ++j) u |= (L[j] << 2*j); + int grid_index = kmap_q2xs[u]; + if (grid_index < 0) { + const uint16_t * neighbours = kneighbors_q2xs - kmap_q2xs[u] - 1; + grid_index = iq1_find_best_neighbour(neighbours, kgrid_q2xs, xb, weight, &scale, L, NGRID_IQ2XXS); + GGML_ASSERT(grid_index >= 0); + } + y[ibl].qs[ib] = grid_index & 255; + hbit[ib] = grid_index >> 8; + GGML_ASSERT(scale >= 0); + scales[ib] = scale; + max_scale = MAX(max_scale, scale); + } + + if (!max_scale) { + memset(y[ibl].qs, 0, QK_K/8); + continue; + } + + float d = max_scale/15; + y[ibl].d = GGML_FP32_TO_FP16(d*1.085f); // 1.085f is another fudge factor. Don't ask me why it is needed. + float id = 1/d; + for (int ib = 0; ib < QK_K/8; ++ib) { + int l = nearest_int(0.5f*(id*scales[ib]-1)); + l = MAX(0, MIN(7, l)); + if (hbit[ib]) l |= 8; + y[ibl].scales[ib/2] |= (l << 4*(ib%2)); + } + } +} + +size_t quantize_iq1_s(const float * src, void * dst, int nrow, int n_per_row, int64_t * hist, const float * quant_weights) { + (void)hist; + GGML_ASSERT(n_per_row%QK_K == 0); + int nblock = n_per_row/QK_K; + char * qrow = (char *)dst; + for (int row = 0; row < nrow; ++row) { + quantize_row_iq1_s_impl(src, qrow, n_per_row, quant_weights); + src += n_per_row; + qrow += nblock*sizeof(block_iq1_s); + } + return nrow * nblock * sizeof(block_iq1_s); +} diff --git a/ggml-quants.h b/ggml-quants.h index 68f09b1e1..ad381cfab 100644 --- a/ggml-quants.h +++ b/ggml-quants.h @@ -191,6 +191,13 @@ typedef struct { } block_iq3_xxs; static_assert(sizeof(block_iq3_xxs) == sizeof(ggml_fp16_t) + 3*(QK_K/8), "wrong iq3_xxs block size/padding"); +typedef struct { + ggml_fp16_t d; + uint8_t qs[QK_K/8]; + uint8_t scales[QK_K/16]; +} block_iq1_s; +static_assert(sizeof(block_iq1_s) == sizeof(ggml_fp16_t) + QK_K/8 + QK_K/16, "wrong iq1_s block size/padding"); + #ifdef __cplusplus extern "C" { #endif @@ -243,6 +250,7 @@ void dequantize_row_q8_K(const block_q8_K * GGML_RESTRICT x, float * GGML_RESTRI void dequantize_row_iq2_xxs(const block_iq2_xxs * GGML_RESTRICT x, float * GGML_RESTRICT y, int k); void dequantize_row_iq2_xs (const block_iq2_xs * GGML_RESTRICT x, float * GGML_RESTRICT y, int k); void dequantize_row_iq3_xxs(const block_iq3_xxs * GGML_RESTRICT x, float * GGML_RESTRICT y, int k); +void dequantize_row_iq1_s (const block_iq1_s * GGML_RESTRICT x, float * GGML_RESTRICT y, int k); // Dot product void ggml_vec_dot_q4_0_q8_0(int n, float * GGML_RESTRICT s, size_t bs, const void * GGML_RESTRICT vx, size_t bx, const void * GGML_RESTRICT vy, size_t by, int nrc); @@ -259,6 +267,7 @@ void ggml_vec_dot_q6_K_q8_K(int n, float * GGML_RESTRICT s, size_t bs, const voi void ggml_vec_dot_iq2_xxs_q8_K(int n, float * GGML_RESTRICT s, size_t bs, const void * GGML_RESTRICT vx, size_t bx, const void * GGML_RESTRICT vy, size_t by, int nrc); void ggml_vec_dot_iq2_xs_q8_K (int n, float * GGML_RESTRICT s, size_t bs, const void * GGML_RESTRICT vx, size_t bx, const void * GGML_RESTRICT vy, size_t by, int nrc); void ggml_vec_dot_iq3_xxs_q8_K(int n, float * GGML_RESTRICT s, size_t bs, const void * GGML_RESTRICT vx, size_t bx, const void * GGML_RESTRICT vy, size_t by, int nrc); +void ggml_vec_dot_iq1_s_q8_K (int n, float * GGML_RESTRICT s, size_t bs, const void * GGML_RESTRICT vx, size_t bx, const void * GGML_RESTRICT vy, size_t by, int nrc); // // Quantization utilizing an importance matrix (a.k.a. "Activation aWare Quantization") @@ -266,6 +275,7 @@ void ggml_vec_dot_iq3_xxs_q8_K(int n, float * GGML_RESTRICT s, size_t bs, const size_t quantize_iq2_xxs(const float * src, void * dst, int nrows, int n_per_row, int64_t * hist, const float * imatrix); size_t quantize_iq2_xs (const float * src, void * dst, int nrows, int n_per_row, int64_t * hist, const float * imatrix); size_t quantize_iq3_xxs(const float * src, void * dst, int nrows, int n_per_row, int64_t * hist, const float * imatrix); +size_t quantize_iq1_s (const float * src, void * dst, int nrows, int n_per_row, int64_t * hist, const float * imatrix); size_t quantize_q2_K (const float * src, void * dst, int nrows, int n_per_row, int64_t * hist, const float * imatrix); size_t quantize_q3_K (const float * src, void * dst, int nrows, int n_per_row, int64_t * hist, const float * imatrix); size_t quantize_q4_K (const float * src, void * dst, int nrows, int n_per_row, int64_t * hist, const float * imatrix); @@ -276,8 +286,8 @@ size_t quantize_q4_1 (const float * src, void * dst, int nrows, int n_per_row, size_t quantize_q5_0 (const float * src, void * dst, int nrows, int n_per_row, int64_t * hist, const float * imatrix); size_t quantize_q5_1 (const float * src, void * dst, int nrows, int n_per_row, int64_t * hist, const float * imatrix); -void iq2xs_init_impl(int grid_size); -void iq2xs_free_impl(int grid_size); +void iq2xs_init_impl(enum ggml_type type); +void iq2xs_free_impl(enum ggml_type type); void iq3xs_init_impl(int grid_size); void iq3xs_free_impl(int grid_size); diff --git a/ggml.c b/ggml.c index e94024c62..aefcda6d4 100644 --- a/ggml.c +++ b/ggml.c @@ -673,6 +673,18 @@ static const ggml_type_traits_t type_traits[GGML_TYPE_COUNT] = { .vec_dot_type = GGML_TYPE_Q8_K, .nrows = 1, }, + [GGML_TYPE_IQ1_S] = { + .type_name = "iq1_s", + .blck_size = QK_K, + .type_size = sizeof(block_iq1_s), + .is_quantized = true, + .to_float = (ggml_to_float_t) dequantize_row_iq1_s, + .from_float = NULL, + .from_float_reference = NULL, + .vec_dot = ggml_vec_dot_iq1_s_q8_K, + .vec_dot_type = GGML_TYPE_Q8_K, + .nrows = 1, + }, [GGML_TYPE_Q8_K] = { .type_name = "q8_K", .blck_size = QK_K, @@ -2267,6 +2279,7 @@ enum ggml_type ggml_ftype_to_ggml_type(enum ggml_ftype ftype) { case GGML_FTYPE_MOSTLY_IQ2_XXS: wtype = GGML_TYPE_IQ2_XXS; break; case GGML_FTYPE_MOSTLY_IQ2_XS: wtype = GGML_TYPE_IQ2_XS; break; case GGML_FTYPE_MOSTLY_IQ3_XXS: wtype = GGML_TYPE_IQ3_XXS; break; + case GGML_FTYPE_MOSTLY_IQ1_S: wtype = GGML_TYPE_IQ1_S; break; case GGML_FTYPE_UNKNOWN: wtype = GGML_TYPE_COUNT; break; case GGML_FTYPE_MOSTLY_Q4_1_SOME_F16: wtype = GGML_TYPE_COUNT; break; } @@ -7677,6 +7690,7 @@ static void ggml_compute_forward_add( case GGML_TYPE_IQ2_XXS: case GGML_TYPE_IQ2_XS: case GGML_TYPE_IQ3_XXS: + case GGML_TYPE_IQ1_S: { ggml_compute_forward_add_q_f32(params, src0, src1, dst); } break; @@ -7944,6 +7958,7 @@ static void ggml_compute_forward_add1( case GGML_TYPE_IQ2_XXS: case GGML_TYPE_IQ2_XS: case GGML_TYPE_IQ3_XXS: + case GGML_TYPE_IQ1_S: { ggml_compute_forward_add1_q_f32(params, src0, src1, dst); } break; @@ -8064,6 +8079,7 @@ static void ggml_compute_forward_acc( case GGML_TYPE_IQ2_XXS: case GGML_TYPE_IQ2_XS: case GGML_TYPE_IQ3_XXS: + case GGML_TYPE_IQ1_S: default: { GGML_ASSERT(false); @@ -10830,6 +10846,7 @@ static void ggml_compute_forward_out_prod( case GGML_TYPE_IQ2_XXS: case GGML_TYPE_IQ2_XS: case GGML_TYPE_IQ3_XXS: + case GGML_TYPE_IQ1_S: { ggml_compute_forward_out_prod_q_f32(params, src0, src1, dst); } break; @@ -11010,6 +11027,7 @@ static void ggml_compute_forward_set( case GGML_TYPE_IQ2_XXS: case GGML_TYPE_IQ2_XS: case GGML_TYPE_IQ3_XXS: + case GGML_TYPE_IQ1_S: default: { GGML_ASSERT(false); @@ -11207,6 +11225,7 @@ static void ggml_compute_forward_get_rows( case GGML_TYPE_IQ2_XXS: case GGML_TYPE_IQ2_XS: case GGML_TYPE_IQ3_XXS: + case GGML_TYPE_IQ1_S: { ggml_compute_forward_get_rows_q(params, src0, src1, dst); } break; @@ -11880,6 +11899,7 @@ static void ggml_compute_forward_alibi( case GGML_TYPE_IQ2_XXS: case GGML_TYPE_IQ2_XS: case GGML_TYPE_IQ3_XXS: + case GGML_TYPE_IQ1_S: case GGML_TYPE_Q8_K: case GGML_TYPE_I8: case GGML_TYPE_I16: @@ -11957,6 +11977,7 @@ static void ggml_compute_forward_clamp( case GGML_TYPE_IQ2_XXS: case GGML_TYPE_IQ2_XS: case GGML_TYPE_IQ3_XXS: + case GGML_TYPE_IQ1_S: case GGML_TYPE_Q8_K: case GGML_TYPE_I8: case GGML_TYPE_I16: @@ -19136,8 +19157,9 @@ void ggml_quantize_init(enum ggml_type type) { ggml_critical_section_start(); switch (type) { - case GGML_TYPE_IQ2_XXS: iq2xs_init_impl(256); break; - case GGML_TYPE_IQ2_XS: iq2xs_init_impl(512); break; + case GGML_TYPE_IQ2_XXS: + case GGML_TYPE_IQ2_XS: + case GGML_TYPE_IQ1_S: iq2xs_init_impl(type); break; case GGML_TYPE_IQ3_XXS: iq3xs_init_impl(256); break; default: // nothing break; @@ -19149,8 +19171,10 @@ void ggml_quantize_init(enum ggml_type type) { void ggml_quantize_free(void) { ggml_critical_section_start(); - iq2xs_free_impl(256); - iq2xs_free_impl(512); + iq2xs_free_impl(GGML_TYPE_IQ2_XXS); + iq2xs_free_impl(GGML_TYPE_IQ2_XS); + iq2xs_free_impl(GGML_TYPE_IQ1_S); + iq3xs_free_impl(256); ggml_critical_section_end(); } @@ -19285,7 +19309,8 @@ size_t ggml_quantize_q8_0(const float * src, void * dst, int n, int k, int64_t * bool ggml_quantize_requires_imatrix(enum ggml_type type) { return type == GGML_TYPE_IQ2_XXS || - type == GGML_TYPE_IQ2_XS; + type == GGML_TYPE_IQ2_XS || + type == GGML_TYPE_IQ1_S; } size_t ggml_quantize_chunk(enum ggml_type type, const float * src, void * dst, int start, @@ -19410,6 +19435,15 @@ size_t ggml_quantize_chunk(enum ggml_type type, const float * src, void * dst, i result = quantize_iq3_xxs(src + start, (char *)dst + start_row * row_size, nrows, n_per_row, hist, imatrix); GGML_ASSERT(result == row_size * nrows); } break; + case GGML_TYPE_IQ1_S: + { + GGML_ASSERT(start % QK_K == 0); + GGML_ASSERT(start % n_per_row == 0); + size_t start_row = start / n_per_row; + size_t row_size = ggml_row_size(type, n_per_row); + result = quantize_iq1_s(src + start, (char *)dst + start_row * row_size, nrows, n_per_row, hist, imatrix); + GGML_ASSERT(result == row_size * nrows); + } break; case GGML_TYPE_F16: { size_t elemsize = sizeof(ggml_fp16_t); diff --git a/ggml.h b/ggml.h index 6c1956772..004d09c70 100644 --- a/ggml.h +++ b/ggml.h @@ -354,6 +354,7 @@ extern "C" { GGML_TYPE_IQ2_XXS = 16, GGML_TYPE_IQ2_XS = 17, GGML_TYPE_IQ3_XXS = 18, + GGML_TYPE_IQ1_S = 19, GGML_TYPE_I8, GGML_TYPE_I16, GGML_TYPE_I32, @@ -391,6 +392,7 @@ extern "C" { GGML_FTYPE_MOSTLY_IQ2_XXS = 15, // except 1d tensors GGML_FTYPE_MOSTLY_IQ2_XS = 16, // except 1d tensors GGML_FTYPE_MOSTLY_IQ3_XXS = 17, // except 1d tensors + GGML_FTYPE_MOSTLY_IQ1_S = 18, // except 1d tensors }; // available tensor operations: diff --git a/llama.cpp b/llama.cpp index 6ac9caa95..5cfebb3b1 100644 --- a/llama.cpp +++ b/llama.cpp @@ -2526,6 +2526,7 @@ struct llama_model_loader { case GGML_TYPE_IQ2_XXS: ftype = LLAMA_FTYPE_MOSTLY_IQ2_XXS; break; case GGML_TYPE_IQ2_XS: ftype = LLAMA_FTYPE_MOSTLY_IQ2_XS; break; case GGML_TYPE_IQ3_XXS: ftype = LLAMA_FTYPE_MOSTLY_IQ3_XXS; break; + case GGML_TYPE_IQ1_S: ftype = LLAMA_FTYPE_MOSTLY_IQ1_S; break; default: { LLAMA_LOG_WARN("%s: unknown type %s\n", __func__, ggml_type_name(type_max)); @@ -2875,6 +2876,7 @@ static std::string llama_model_ftype_name(llama_ftype ftype) { case LLAMA_FTYPE_MOSTLY_IQ2_XS: return "IQ2_XS - 2.3125 bpw"; case LLAMA_FTYPE_MOSTLY_Q3_K_XS:return "Q3_K - Extra small"; case LLAMA_FTYPE_MOSTLY_IQ3_XXS:return "IQ3_XXS - 3.0625 bpw"; + case LLAMA_FTYPE_MOSTLY_IQ1_S :return "IQ1_S - 1.5625 bpw"; default: return "unknown, may not work"; } @@ -10312,20 +10314,20 @@ static ggml_type get_k_quant_type(quantize_state_internal & qs, ggml_type new_ty if (arch == LLM_ARCH_FALCON || nx % QK_K != 0) { new_type = GGML_TYPE_Q8_0; } - else if (ftype == LLAMA_FTYPE_MOSTLY_IQ2_XXS || ftype == LLAMA_FTYPE_MOSTLY_IQ2_XS) { + else if (ftype == LLAMA_FTYPE_MOSTLY_IQ2_XXS || ftype == LLAMA_FTYPE_MOSTLY_IQ2_XS || ftype == LLAMA_FTYPE_MOSTLY_IQ1_S) { new_type = GGML_TYPE_Q5_K; } else if (new_type != GGML_TYPE_Q8_0) { new_type = GGML_TYPE_Q6_K; } } else if (name == "token_embd.weight") { - if (ftype == LLAMA_FTYPE_MOSTLY_IQ2_XXS || ftype == LLAMA_FTYPE_MOSTLY_IQ2_XS) { + if (ftype == LLAMA_FTYPE_MOSTLY_IQ2_XXS || ftype == LLAMA_FTYPE_MOSTLY_IQ2_XS || ftype == LLAMA_FTYPE_MOSTLY_IQ1_S) { new_type = GGML_TYPE_Q2_K; } else if (ftype == LLAMA_FTYPE_MOSTLY_IQ3_XXS) { new_type = GGML_TYPE_Q4_K; } - } else if (ftype == LLAMA_FTYPE_MOSTLY_IQ2_XXS || ftype == LLAMA_FTYPE_MOSTLY_IQ2_XS) { + } else if (ftype == LLAMA_FTYPE_MOSTLY_IQ2_XXS || ftype == LLAMA_FTYPE_MOSTLY_IQ2_XS || ftype == LLAMA_FTYPE_MOSTLY_IQ1_S) { if (name.find("attn_v.weight") != std::string::npos) { if (qs.model.hparams.n_gqa() >= 4 || qs.model.hparams.n_expert >= 4) new_type = GGML_TYPE_Q4_K; else new_type = GGML_TYPE_Q2_K; @@ -10335,6 +10337,9 @@ static ggml_type get_k_quant_type(quantize_state_internal & qs, ggml_type new_ty if (qs.i_ffn_down < qs.n_ffn_down/8) new_type = GGML_TYPE_Q2_K; ++qs.i_ffn_down; } + else if (name.find("attn_output.weight") != std::string::npos) { + if (ftype == LLAMA_FTYPE_MOSTLY_IQ1_S) new_type = GGML_TYPE_IQ2_XXS; + } } else if (name.find("attn_v.weight") != std::string::npos) { if (ftype == LLAMA_FTYPE_MOSTLY_Q2_K) { new_type = qs.model.hparams.n_gqa() >= 4 ? GGML_TYPE_Q4_K : GGML_TYPE_Q3_K; @@ -10468,7 +10473,7 @@ static ggml_type get_k_quant_type(quantize_state_internal & qs, ggml_type new_ty if (new_type == GGML_TYPE_Q2_K || new_type == GGML_TYPE_Q3_K || new_type == GGML_TYPE_Q4_K || new_type == GGML_TYPE_Q5_K || new_type == GGML_TYPE_Q6_K || new_type == GGML_TYPE_IQ2_XS || new_type == GGML_TYPE_IQ2_XXS || - new_type == GGML_TYPE_IQ3_XXS) { + new_type == GGML_TYPE_IQ3_XXS || ftype == LLAMA_FTYPE_MOSTLY_IQ1_S) { int nx = tensor->ne[0]; int ny = tensor->ne[1]; if (nx % QK_K != 0) { @@ -10483,6 +10488,7 @@ static ggml_type get_k_quant_type(quantize_state_internal & qs, ggml_type new_ty case GGML_TYPE_IQ2_XXS: case GGML_TYPE_IQ2_XS: case GGML_TYPE_IQ3_XXS: + case GGML_TYPE_IQ1_S: case GGML_TYPE_Q2_K: new_type = GGML_TYPE_Q4_0; break; case GGML_TYPE_Q3_K: new_type = GGML_TYPE_Q4_1; break; case GGML_TYPE_Q4_K: new_type = GGML_TYPE_Q5_0; break; @@ -10525,6 +10531,7 @@ static void llama_model_quantize_internal(const std::string & fname_inp, const s case LLAMA_FTYPE_MOSTLY_IQ2_XXS: quantized_type = GGML_TYPE_IQ2_XXS; break; case LLAMA_FTYPE_MOSTLY_IQ2_XS: quantized_type = GGML_TYPE_IQ2_XS; break; case LLAMA_FTYPE_MOSTLY_IQ3_XXS: quantized_type = GGML_TYPE_IQ3_XXS; break; + case LLAMA_FTYPE_MOSTLY_IQ1_S: quantized_type = GGML_TYPE_IQ1_S ; break; default: throw std::runtime_error(format("invalid output file type %d\n", ftype)); } @@ -10698,6 +10705,7 @@ static void llama_model_quantize_internal(const std::string & fname_inp, const s } if ((new_type == GGML_TYPE_IQ2_XXS || new_type == GGML_TYPE_IQ2_XS || + new_type == GGML_TYPE_IQ1_S || (new_type == GGML_TYPE_Q2_K && params->ftype == LLAMA_FTYPE_MOSTLY_Q2_K_S && strcmp(tensor->name, "token_embd.weight") != 0)) && !imatrix) { LLAMA_LOG_ERROR("\n\n============================================================\n"); LLAMA_LOG_ERROR("Missing importance matrix for tensor %s in a very low-bit quantization\n", tensor->name); diff --git a/llama.h b/llama.h index f4ec6ea63..5a97abcc9 100644 --- a/llama.h +++ b/llama.h @@ -100,6 +100,7 @@ extern "C" { LLAMA_FTYPE_MOSTLY_Q2_K_S = 21, // except 1d tensors LLAMA_FTYPE_MOSTLY_Q3_K_XS = 22, // except 1d tensors LLAMA_FTYPE_MOSTLY_IQ3_XXS = 23, // except 1d tensors + LLAMA_FTYPE_MOSTLY_IQ1_S = 24, // except 1d tensors LLAMA_FTYPE_GUESSED = 1024, // not specified in the model file }; diff --git a/tests/test-backend-ops.cpp b/tests/test-backend-ops.cpp index 30a7d1f5a..ef37c5af2 100644 --- a/tests/test-backend-ops.cpp +++ b/tests/test-backend-ops.cpp @@ -1917,7 +1917,7 @@ static bool test_backend(ggml_backend_t backend, test_mode mode, const char * op GGML_TYPE_Q4_K, GGML_TYPE_Q5_K, GGML_TYPE_Q6_K, GGML_TYPE_IQ2_XXS, GGML_TYPE_IQ2_XS, - GGML_TYPE_IQ3_XXS, + GGML_TYPE_IQ3_XXS, GGML_TYPE_IQ1_S, }; // unary ops From fc0c8d286a533363a9a663510b62af85ffad58b3 Mon Sep 17 00:00:00 2001 From: Daniel Bevenius Date: Sun, 18 Feb 2024 17:19:23 +0100 Subject: [PATCH 618/811] llava : update surgery script to not remove tensors (#5536) This commit updates the surgery script to not remove the tensors from the model file. For this to work the `--skip-unknown` flag is added as an argument to the convert.py script in README.md. The motivation for this change is that the surgery script currently removes the projector tensors from the model file. If the model was checked out from a repository, the model file will have been updated and have to be checked out again to reset this effect. If this can be avoided I think it would be preferable. I did not perform this change for BakLLaVA models as I am not sure how that part works. --- examples/llava/README.md | 2 +- examples/llava/llava-surgery.py | 6 +----- 2 files changed, 2 insertions(+), 6 deletions(-) diff --git a/examples/llava/README.md b/examples/llava/README.md index 57eb42932..e42db6e5a 100644 --- a/examples/llava/README.md +++ b/examples/llava/README.md @@ -53,7 +53,7 @@ python ./examples/llava/convert-image-encoder-to-gguf.py -m ../clip-vit-large-pa 5. Use `convert.py` to convert the LLaMA part of LLaVA to GGUF: ```sh -python ./convert.py ../llava-v1.5-7b +python ./convert.py ../llava-v1.5-7b --skip-unknown ``` Now both the LLaMA part and the image encoder is in the `llava-v1.5-7b` directory. diff --git a/examples/llava/llava-surgery.py b/examples/llava/llava-surgery.py index 0a61efdfe..8b7a62fba 100644 --- a/examples/llava/llava-surgery.py +++ b/examples/llava/llava-surgery.py @@ -19,10 +19,6 @@ mm_tensors = [k for k, v in checkpoint.items() if k.startswith("model.mm_project projector = {name: checkpoint[name].float() for name in mm_tensors} torch.save(projector, f"{args.model}/llava.projector") -# remove these tensors from the checkpoint and save it again -for name in mm_tensors: - del checkpoint[name] - # BakLLaVA models contain CLIP tensors in it clip_tensors = [k for k, v in checkpoint.items() if k.startswith("model.vision_tower")] if len(clip_tensors) > 0: @@ -39,7 +35,7 @@ if len(clip_tensors) > 0: f.write("{}\n") -torch.save(checkpoint, path) + torch.save(checkpoint, path) print("Done!") print(f"Now you can convert {args.model} to a regular LLaMA GGUF file.") From 5d3de51f972055702a1859186fe7acb8f0b43dc4 Mon Sep 17 00:00:00 2001 From: Herman Semenov Date: Sun, 18 Feb 2024 16:20:12 +0000 Subject: [PATCH 619/811] ggml, common, examples, tests : fixed type arguments in printf (#5528) --- common/common.cpp | 4 +- examples/batched-bench/batched-bench.cpp | 2 +- examples/batched/batched.cpp | 2 +- .../convert-llama2c-to-ggml.cpp | 38 +++++++++---------- examples/perplexity/perplexity.cpp | 2 +- .../train-text-from-scratch.cpp | 14 +++---- ggml.c | 4 +- tests/test-grammar-parser.cpp | 20 +++++----- tests/test-llama-grammar.cpp | 4 +- 9 files changed, 45 insertions(+), 45 deletions(-) diff --git a/common/common.cpp b/common/common.cpp index 3a92d3797..9ffc3951f 100644 --- a/common/common.cpp +++ b/common/common.cpp @@ -1741,7 +1741,7 @@ void dump_non_result_info_yaml(FILE * stream, const gpt_params & params, const l fprintf(stream, "rope_freq_base: %f # default: 10000.0\n", params.rope_freq_base); fprintf(stream, "rope_freq_scale: %f # default: 1.0\n", params.rope_freq_scale); - fprintf(stream, "seed: %d # default: -1 (random seed)\n", params.seed); + fprintf(stream, "seed: %u # default: -1 (random seed)\n", params.seed); fprintf(stream, "simple_io: %s # default: false\n", params.simple_io ? "true" : "false"); fprintf(stream, "cont_batching: %s # default: false\n", params.cont_batching ? "true" : "false"); fprintf(stream, "temp: %f # default: 0.8\n", sparams.temp); @@ -1750,7 +1750,7 @@ void dump_non_result_info_yaml(FILE * stream, const gpt_params & params, const l dump_vector_float_yaml(stream, "tensor_split", tensor_split_vector); fprintf(stream, "tfs: %f # default: 1.0\n", sparams.tfs_z); - fprintf(stream, "threads: %d # default: %d\n", params.n_threads, std::thread::hardware_concurrency()); + fprintf(stream, "threads: %d # default: %u\n", params.n_threads, std::thread::hardware_concurrency()); fprintf(stream, "top_k: %d # default: 40\n", sparams.top_k); fprintf(stream, "top_p: %f # default: 0.95\n", sparams.top_p); fprintf(stream, "min_p: %f # default: 0.0\n", sparams.min_p); diff --git a/examples/batched-bench/batched-bench.cpp b/examples/batched-bench/batched-bench.cpp index 55dfd9784..b4b8a38e1 100644 --- a/examples/batched-bench/batched-bench.cpp +++ b/examples/batched-bench/batched-bench.cpp @@ -159,7 +159,7 @@ int main(int argc, char ** argv) { } LOG_TEE("\n"); - LOG_TEE("%s: n_kv_max = %d, is_pp_shared = %d, n_gpu_layers = %d, mmq = %d, n_threads = %d, n_threads_batch = %d\n", __func__, n_kv_max, is_pp_shared, n_gpu_layers, mmq, ctx_params.n_threads, ctx_params.n_threads_batch); + LOG_TEE("%s: n_kv_max = %d, is_pp_shared = %d, n_gpu_layers = %d, mmq = %d, n_threads = %u, n_threads_batch = %u\n", __func__, n_kv_max, is_pp_shared, n_gpu_layers, mmq, ctx_params.n_threads, ctx_params.n_threads_batch); LOG_TEE("\n"); LOG_TEE("|%6s | %6s | %4s | %6s | %8s | %8s | %8s | %8s | %8s | %8s |\n", "PP", "TG", "B", "N_KV", "T_PP s", "S_PP t/s", "T_TG s", "S_TG t/s", "T s", "S t/s"); diff --git a/examples/batched/batched.cpp b/examples/batched/batched.cpp index eab636692..9be7eb56b 100644 --- a/examples/batched/batched.cpp +++ b/examples/batched/batched.cpp @@ -92,7 +92,7 @@ int main(int argc, char ** argv) { const int n_ctx = llama_n_ctx(ctx); - LOG_TEE("\n%s: n_len = %d, n_ctx = %d, n_batch = %d, n_parallel = %d, n_kv_req = %d\n", __func__, n_len, n_ctx, ctx_params.n_batch, n_parallel, n_kv_req); + LOG_TEE("\n%s: n_len = %d, n_ctx = %d, n_batch = %u, n_parallel = %d, n_kv_req = %d\n", __func__, n_len, n_ctx, ctx_params.n_batch, n_parallel, n_kv_req); // make sure the KV cache is big enough to hold all the prompt and generated tokens if (n_kv_req > n_ctx) { diff --git a/examples/convert-llama2c-to-ggml/convert-llama2c-to-ggml.cpp b/examples/convert-llama2c-to-ggml/convert-llama2c-to-ggml.cpp index 4d41e1779..8209dcb64 100644 --- a/examples/convert-llama2c-to-ggml/convert-llama2c-to-ggml.cpp +++ b/examples/convert-llama2c-to-ggml/convert-llama2c-to-ggml.cpp @@ -325,14 +325,14 @@ struct train_params { }; static void print_params(struct my_llama_hparams * params) { - printf("%s: n_vocab: %d\n", __func__, params->n_vocab); - printf("%s: n_ctx: %d\n", __func__, params->n_ctx); - printf("%s: n_embd: %d\n", __func__, params->n_embd); - printf("%s: n_mult: %d\n", __func__, params->n_mult); - printf("%s: n_head: %d\n", __func__, params->n_head); - printf("%s: n_ff: %d\n", __func__, params->n_ff); - printf("%s: n_layer: %d\n", __func__, params->n_layer); - printf("%s: n_rot: %d\n", __func__, params->n_rot); + printf("%s: n_vocab: %u\n", __func__, params->n_vocab); + printf("%s: n_ctx: %u\n", __func__, params->n_ctx); + printf("%s: n_embd: %u\n", __func__, params->n_embd); + printf("%s: n_mult: %u\n", __func__, params->n_mult); + printf("%s: n_head: %u\n", __func__, params->n_head); + printf("%s: n_ff: %u\n", __func__, params->n_ff); + printf("%s: n_layer: %u\n", __func__, params->n_layer); + printf("%s: n_rot: %u\n", __func__, params->n_rot); } static void init_model(struct my_llama_model * model) { @@ -350,25 +350,25 @@ static void init_model(struct my_llama_model * model) { model->train_tokens = 0; model->tok_embeddings = ggml_new_tensor_2d(ctx, GGML_TYPE_F32, n_embd, n_vocab); - printf("[%s:GG] Allocating [%d] x [%d] = [%d] float space for model->tok_embeddings\n",__func__,n_embd , n_vocab, n_embd * n_vocab); + printf("[%s:GG] Allocating [%u] x [%u] = [%u] float space for model->tok_embeddings\n",__func__,n_embd , n_vocab, n_embd * n_vocab); model->norm = ggml_new_tensor_1d(ctx, GGML_TYPE_F32, n_embd); - printf("[%s:GG] Allocating [%d] float space for model->norm\n",__func__,n_embd); + printf("[%s:GG] Allocating [%u] float space for model->norm\n",__func__,n_embd); model->output = ggml_new_tensor_2d(ctx, GGML_TYPE_F32, n_embd, n_vocab); - printf("[%s:GG] Allocating [%d] x[%d] = [%d] float space for model->output\n",__func__,n_embd, n_vocab, n_embd * n_vocab); + printf("[%s:GG] Allocating [%u] x[%u] = [%u] float space for model->output\n",__func__,n_embd, n_vocab, n_embd * n_vocab); // printing the per-layer allocations here so we dont print in the for loop. - printf("[%s:GG] Allocating [%d] x[%d] = [%d] float space for layer.wq for [%d] layers\n",__func__, n_embd, n_embd, n_embd * n_embd, n_layer); - printf("[%s:GG] Allocating [%d] x[%d] = [%d] float space for layer.wk for [%d] layers\n",__func__, n_embd, n_embd, n_embd * n_embd, n_layer); - printf("[%s:GG] Allocating [%d] x[%d] = [%d] float space for layer.wv for [%d] layers\n",__func__, n_embd, n_embd, n_embd * n_embd, n_layer); - printf("[%s:GG] Allocating [%d] x[%d] = [%d] float space for layer.wo for [%d] layers\n",__func__, n_embd, n_embd, n_embd * n_embd, n_layer); + printf("[%s:GG] Allocating [%u] x[%u] = [%u] float space for layer.wq for [%u] layers\n",__func__, n_embd, n_embd, n_embd * n_embd, n_layer); + printf("[%s:GG] Allocating [%u] x[%u] = [%u] float space for layer.wk for [%u] layers\n",__func__, n_embd, n_embd, n_embd * n_embd, n_layer); + printf("[%s:GG] Allocating [%u] x[%u] = [%u] float space for layer.wv for [%u] layers\n",__func__, n_embd, n_embd, n_embd * n_embd, n_layer); + printf("[%s:GG] Allocating [%u] x[%u] = [%u] float space for layer.wo for [%u] layers\n",__func__, n_embd, n_embd, n_embd * n_embd, n_layer); - printf("[%s:GG] Allocating [%d] float space for layer.ffn_norm for [%d] layers\n",__func__,n_embd, n_layer); + printf("[%s:GG] Allocating [%u] float space for layer.ffn_norm for [%u] layers\n",__func__,n_embd, n_layer); - printf("[%s:GG] Allocating [%d] x[%d] = [%d] float space for layer.w1 for [%d] layers\n",__func__, n_ff, n_embd, n_embd * n_ff, n_layer); - printf("[%s:GG] Allocating [%d] x[%d] = [%d] float space for layer.w2 for [%d] layers\n",__func__, n_embd, n_ff, n_ff * n_embd, n_layer); - printf("[%s:GG] Allocating [%d] x[%d] = [%d] float space for layer.w3 for [%d] layers\n",__func__, n_ff, n_embd, n_embd * n_ff, n_layer); + printf("[%s:GG] Allocating [%u] x[%u] = [%u] float space for layer.w1 for [%u] layers\n",__func__, n_ff, n_embd, n_embd * n_ff, n_layer); + printf("[%s:GG] Allocating [%u] x[%u] = [%u] float space for layer.w2 for [%u] layers\n",__func__, n_embd, n_ff, n_ff * n_embd, n_layer); + printf("[%s:GG] Allocating [%u] x[%u] = [%u] float space for layer.w3 for [%u] layers\n",__func__, n_ff, n_embd, n_embd * n_ff, n_layer); ggml_set_name(model->tok_embeddings, "tok_embeddings.weight"); ggml_set_name(model->norm, "norm.weight"); diff --git a/examples/perplexity/perplexity.cpp b/examples/perplexity/perplexity.cpp index 67d2d3293..74dcc642a 100644 --- a/examples/perplexity/perplexity.cpp +++ b/examples/perplexity/perplexity.cpp @@ -1623,7 +1623,7 @@ static void kl_divergence(llama_context * ctx, const gpt_params & params) { uint32_t n_ctx; in.read((char *)&n_ctx, sizeof(n_ctx)); if (n_ctx > llama_n_ctx(ctx)) { - fprintf(stderr, "%s: %s has been computed with %d, while the current context is %d. Increase it with -c and retry\n", + fprintf(stderr, "%s: %s has been computed with %u, while the current context is %d. Increase it with -c and retry\n", __func__, params.logits_file.c_str(), n_ctx, params.n_ctx); } diff --git a/examples/train-text-from-scratch/train-text-from-scratch.cpp b/examples/train-text-from-scratch/train-text-from-scratch.cpp index bfdf124d7..e78ab185d 100644 --- a/examples/train-text-from-scratch/train-text-from-scratch.cpp +++ b/examples/train-text-from-scratch/train-text-from-scratch.cpp @@ -111,13 +111,13 @@ static const char * LLM_TENSOR_FFN_DOWN = "blk.%d.ffn_down"; static const char * LLM_TENSOR_FFN_UP = "blk.%d.ffn_up"; static void print_params(struct my_llama_hparams * params) { - printf("%s: n_vocab: %d\n", __func__, params->n_vocab); - printf("%s: n_ctx: %d\n", __func__, params->n_ctx); - printf("%s: n_embd: %d\n", __func__, params->n_embd); - printf("%s: n_head: %d\n", __func__, params->n_head); - printf("%s: n_ff: %d\n", __func__, params->n_ff); - printf("%s: n_layer: %d\n", __func__, params->n_layer); - printf("%s: n_rot: %d\n", __func__, params->n_rot); + printf("%s: n_vocab: %u\n", __func__, params->n_vocab); + printf("%s: n_ctx: %u\n", __func__, params->n_ctx); + printf("%s: n_embd: %u\n", __func__, params->n_embd); + printf("%s: n_head: %u\n", __func__, params->n_head); + printf("%s: n_ff: %u\n", __func__, params->n_ff); + printf("%s: n_layer: %u\n", __func__, params->n_layer); + printf("%s: n_rot: %u\n", __func__, params->n_rot); } static void set_param_model(struct my_llama_model * model) { diff --git a/ggml.c b/ggml.c index aefcda6d4..8224652a9 100644 --- a/ggml.c +++ b/ggml.c @@ -17909,7 +17909,7 @@ struct ggml_cgraph * ggml_graph_import(const char * fname, struct ggml_context * ptr += ggml_nbytes(tensor); - fprintf(stderr, "%s: loaded leaf %d: '%16s', %9zu bytes\n", __func__, i, tensor->name, ggml_nbytes(tensor)); + fprintf(stderr, "%s: loaded leaf %u: '%16s', %9zu bytes\n", __func__, i, tensor->name, ggml_nbytes(tensor)); } } @@ -18012,7 +18012,7 @@ struct ggml_cgraph * ggml_graph_import(const char * fname, struct ggml_context * result->nodes[i] = tensor; - fprintf(stderr, "%s: loaded node %d: '%16s', %9zu bytes\n", __func__, i, tensor->name, ggml_nbytes(tensor)); + fprintf(stderr, "%s: loaded node %u: '%16s', %9zu bytes\n", __func__, i, tensor->name, ggml_nbytes(tensor)); } } } diff --git a/tests/test-grammar-parser.cpp b/tests/test-grammar-parser.cpp index a0b5b043d..91939e276 100644 --- a/tests/test-grammar-parser.cpp +++ b/tests/test-grammar-parser.cpp @@ -38,8 +38,8 @@ term ::= [0-9]+)"""; // pretty print error message before asserting if (expected_pair.first != key || expected_pair.second != value) { - fprintf(stderr, "expected_pair: %s, %d\n", expected_pair.first.c_str(), expected_pair.second); - fprintf(stderr, "actual_pair: %s, %d\n", key.c_str(), value); + fprintf(stderr, "expected_pair: %s, %u\n", expected_pair.first.c_str(), expected_pair.second); + fprintf(stderr, "actual_pair: %s, %u\n", key.c_str(), value); fprintf(stderr, "expected_pair != actual_pair\n"); } @@ -96,9 +96,9 @@ term ::= [0-9]+)"""; // pretty print error message before asserting if (expected_element.type != element.type || expected_element.value != element.value) { - fprintf(stderr, "index: %d\n", index); - fprintf(stderr, "expected_element: %d, %d\n", expected_element.type, expected_element.value); - fprintf(stderr, "actual_element: %d, %d\n", element.type, element.value); + fprintf(stderr, "index: %u\n", index); + fprintf(stderr, "expected_element: %d, %u\n", expected_element.type, expected_element.value); + fprintf(stderr, "actual_element: %d, %u\n", element.type, element.value); fprintf(stderr, "expected_element != actual_element\n"); } @@ -144,8 +144,8 @@ term ::= [0-9]+)"""; // pretty print error message before asserting if (expected_pair.first != key || expected_pair.second != value) { - fprintf(stderr, "expected_pair: %s, %d\n", expected_pair.first.c_str(), expected_pair.second); - fprintf(stderr, "actual_pair: %s, %d\n", key.c_str(), value); + fprintf(stderr, "expected_pair: %s, %u\n", expected_pair.first.c_str(), expected_pair.second); + fprintf(stderr, "actual_pair: %s, %u\n", key.c_str(), value); fprintf(stderr, "expected_pair != actual_pair\n"); } @@ -235,9 +235,9 @@ term ::= [0-9]+)"""; // pretty print error message before asserting if (expected_element.type != element.type || expected_element.value != element.value) { - fprintf(stderr, "index: %d\n", index); - fprintf(stderr, "expected_element: %d, %d\n", expected_element.type, expected_element.value); - fprintf(stderr, "actual_element: %d, %d\n", element.type, element.value); + fprintf(stderr, "index: %u\n", index); + fprintf(stderr, "expected_element: %d, %u\n", expected_element.type, expected_element.value); + fprintf(stderr, "actual_element: %d, %u\n", element.type, element.value); fprintf(stderr, "expected_element != actual_element\n"); } diff --git a/tests/test-llama-grammar.cpp b/tests/test-llama-grammar.cpp index 16ebe753f..27ca4d265 100644 --- a/tests/test-llama-grammar.cpp +++ b/tests/test-llama-grammar.cpp @@ -180,8 +180,8 @@ int main() if (expected_element.type != element->type || expected_element.value != element->value) { fprintf(stderr, "index: %d\n", index); - fprintf(stderr, "expected_element: %d, %d\n", expected_element.type, expected_element.value); - fprintf(stderr, "actual_element: %d, %d\n", element->type, element->value); + fprintf(stderr, "expected_element: %d, %u\n", expected_element.type, expected_element.value); + fprintf(stderr, "actual_element: %d, %u\n", element->type, element->value); fprintf(stderr, "expected_element != actual_element\n"); } From 1dcc3fde004787e6fc4d84c9de0bb34cd2901a3e Mon Sep 17 00:00:00 2001 From: Georgi Gerganov Date: Sun, 18 Feb 2024 18:21:52 +0200 Subject: [PATCH 620/811] common : fix ub (#5530) --- common/common.cpp | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/common/common.cpp b/common/common.cpp index 9ffc3951f..489462b5a 100644 --- a/common/common.cpp +++ b/common/common.cpp @@ -1801,7 +1801,8 @@ void dump_kv_cache_view_seqs(const llama_kv_cache_view & view, int row_size) { if (cs_curr[j] < 0) { continue; } if (seqs.find(cs_curr[j]) == seqs.end()) { if (seqs.size() + 1 >= sizeof(slot_chars)) { break; } - seqs[cs_curr[j]] = seqs.size(); + const size_t sz = seqs.size(); + seqs[cs_curr[j]] = sz; } } if (seqs.size() + 1 >= sizeof(slot_chars)) { break; } From 66c1968f7a2e895675425e875b6589f1233a1b52 Mon Sep 17 00:00:00 2001 From: Daniel Hiltgen Date: Sun, 18 Feb 2024 08:23:16 -0800 Subject: [PATCH 621/811] server : graceful server shutdown (#5244) This updates the server queue to support graceful shutdown of the server on signals. --- examples/server/server.cpp | 23 ++++++++++++++++++++++- examples/server/utils.hpp | 20 +++++++++++++++++--- 2 files changed, 39 insertions(+), 4 deletions(-) diff --git a/examples/server/server.cpp b/examples/server/server.cpp index a0b46970b..7800c6e7e 100644 --- a/examples/server/server.cpp +++ b/examples/server/server.cpp @@ -28,6 +28,7 @@ #include #include #include +#include using json = nlohmann::json; @@ -2511,6 +2512,9 @@ static void append_to_generated_text_from_generated_token_probs(llama_server_con } } +std::function shutdown_handler; +inline void signal_handler(int signal) { shutdown_handler(signal); } + int main(int argc, char **argv) { #if SERVER_VERBOSE != 1 @@ -3128,8 +3132,25 @@ int main(int argc, char **argv) std::placeholders::_2, std::placeholders::_3 )); - llama.queue_tasks.start_loop(); + shutdown_handler = [&](int) { + llama.queue_tasks.terminate(); + }; + +#if defined (__unix__) || (defined (__APPLE__) && defined (__MACH__)) + struct sigaction sigint_action; + sigint_action.sa_handler = signal_handler; + sigemptyset (&sigint_action.sa_mask); + sigint_action.sa_flags = 0; + sigaction(SIGINT, &sigint_action, NULL); +#elif defined (_WIN32) + auto console_ctrl_handler = +[](DWORD ctrl_type) -> BOOL { + return (ctrl_type == CTRL_C_EVENT) ? (signal_handler(SIGINT), true) : false; + }; + SetConsoleCtrlHandler(reinterpret_cast(console_ctrl_handler), true); +#endif + llama.queue_tasks.start_loop(); + svr.stop(); t.join(); llama_backend_free(); diff --git a/examples/server/utils.hpp b/examples/server/utils.hpp index 548548962..0ee670dba 100644 --- a/examples/server/utils.hpp +++ b/examples/server/utils.hpp @@ -220,6 +220,7 @@ inline std::string format_chatml(std::vector messages) struct llama_server_queue { int id = 0; std::mutex mutex_tasks; + bool running; // queues std::vector queue_tasks; std::vector queue_tasks_deferred; @@ -278,9 +279,18 @@ struct llama_server_queue { queue_tasks_deferred.clear(); } - // Start the main loop. This call is blocking - [[noreturn]] + // end the start_loop routine + void terminate() { + { + std::unique_lock lock(mutex_tasks); + running = false; + } + condition_tasks.notify_all(); + } + + // Start the main loop. void start_loop() { + running = true; while (true) { // new task arrived LOG_VERBOSE("have new task", {}); @@ -324,8 +334,12 @@ struct llama_server_queue { { std::unique_lock lock(mutex_tasks); if (queue_tasks.empty()) { + if (!running) { + LOG_VERBOSE("ending start_loop", {}); + return; + } condition_tasks.wait(lock, [&]{ - return !queue_tasks.empty(); + return (!queue_tasks.empty() || !running); }); } } From 36376abe05a12a8cb3af548a4af9b8d0e2e69597 Mon Sep 17 00:00:00 2001 From: Pierrick Hymbert Date: Sun, 18 Feb 2024 17:30:09 +0100 Subject: [PATCH 622/811] server : --n-predict option document and cap to max value (#5549) * server: document --n-predict * server: ensure client request cannot override n_predict if set * server: fix print usage LF in new --n-predict option --- examples/server/README.md | 1 + examples/server/server.cpp | 15 ++++++++++++++- 2 files changed, 15 insertions(+), 1 deletion(-) diff --git a/examples/server/README.md b/examples/server/README.md index 249368749..fe5cd8d5d 100644 --- a/examples/server/README.md +++ b/examples/server/README.md @@ -39,6 +39,7 @@ see https://github.com/ggerganov/llama.cpp/issues/1437 - `--mmproj MMPROJ_FILE`: Path to a multimodal projector file for LLaVA. - `--grp-attn-n`: Set the group attention factor to extend context size through self-extend(default: 1=disabled), used together with group attention width `--grp-attn-w` - `--grp-attn-w`: Set the group attention width to extend context size through self-extend(default: 512), used together with group attention factor `--grp-attn-n` +- `-n, --n-predict`: Set the maximum tokens to predict (default: -1) ## Build diff --git a/examples/server/server.cpp b/examples/server/server.cpp index 7800c6e7e..7aa706e95 100644 --- a/examples/server/server.cpp +++ b/examples/server/server.cpp @@ -159,6 +159,7 @@ struct llama_client_slot int32_t n_decoded = 0; int32_t n_remaining = -1; int32_t i_batch = -1; + int32_t n_predict = -1; int32_t num_prompt_tokens = 0; int32_t num_prompt_tokens_processed = 0; @@ -410,6 +411,7 @@ struct llama_server_context slot.id = i; slot.n_ctx = n_ctx_slot; + slot.n_predict = params.n_predict; LOG_TEE(" -> Slot %i - max context: %i\n", slot.id, n_ctx_slot); @@ -546,6 +548,15 @@ struct llama_server_context slot->sparams.grammar = json_value(data, "grammar", default_sparams.grammar); slot->sparams.n_probs = json_value(data, "n_probs", default_sparams.n_probs); + if (slot->n_predict > 0 && slot->params.n_predict > slot->n_predict) { + // Might be better to reject the request with a 400 ? + LOG_WARNING("Max tokens to predict exceeds server configuration", { + {"params.n_predict", slot->params.n_predict}, + {"slot.n_predict", slot->n_predict}, + }); + slot->params.n_predict = slot->n_predict; + } + // infill if (data.count("input_prefix") != 0) { @@ -1053,6 +1064,7 @@ struct llama_server_context return json { {"n_ctx", slot.n_ctx}, + {"n_predict", slot.n_predict}, {"model", params.model_alias}, {"seed", slot.params.seed}, {"temperature", slot.sparams.temp}, @@ -1915,13 +1927,14 @@ static void server_print_usage(const char *argv0, const gpt_params ¶ms, printf(" --mmproj MMPROJ_FILE path to a multimodal projector file for LLaVA.\n"); printf(" --log-disable disables logging to a file.\n"); printf("\n"); + printf(" -n, --n-predict maximum tokens to predict (default: %d)\n", params.n_predict); printf(" --override-kv KEY=TYPE:VALUE\n"); printf(" advanced option to override model metadata by key. may be specified multiple times.\n"); printf(" types: int, float, bool. example: --override-kv tokenizer.ggml.add_bos_token=bool:false\n"); printf(" -gan N, --grp-attn-n N set the group attention factor to extend context size through self-extend(default: 1=disabled), used together with group attention width `--grp-attn-w`"); printf(" -gaw N, --grp-attn-w N set the group attention width to extend context size through self-extend(default: 512), used together with group attention factor `--grp-attn-n`"); printf(" --chat-template FORMAT_NAME"); - printf(" set chat template, possible valus is: llama2, chatml (default %s)", sparams.chat_template.c_str()); + printf(" set chat template, possible value is: llama2, chatml (default %s)", sparams.chat_template.c_str()); printf("\n"); } From e75c6279d1c8e7abb82a331f5de7124eed402de2 Mon Sep 17 00:00:00 2001 From: Pierrick Hymbert Date: Sun, 18 Feb 2024 17:31:28 +0100 Subject: [PATCH 623/811] server : enhanced health endpoint (#5548) * server: enrich health endpoint with available slots, return 503 if not slots are available * server: document new status no slot available in the README.md --- examples/server/README.md | 1 + examples/server/server.cpp | 31 +++++++++++++++++++++++++++++-- 2 files changed, 30 insertions(+), 2 deletions(-) diff --git a/examples/server/README.md b/examples/server/README.md index fe5cd8d5d..5e3ae833b 100644 --- a/examples/server/README.md +++ b/examples/server/README.md @@ -136,6 +136,7 @@ node index.js - `{"status": "loading model"}` if the model is still being loaded. - `{"status": "error"}` if the model failed to load. - `{"status": "ok"}` if the model is successfully loaded and the server is ready for further requests mentioned below. + - `{"status": "no slot available", "slots_idle": 0, "slots_processing": 32}` if no slot are currently available - **POST** `/completion`: Given a `prompt`, it returns the predicted completion. diff --git a/examples/server/server.cpp b/examples/server/server.cpp index 7aa706e95..8145af867 100644 --- a/examples/server/server.cpp +++ b/examples/server/server.cpp @@ -2578,8 +2578,35 @@ int main(int argc, char **argv) server_state current_state = state.load(); switch(current_state) { case SERVER_STATE_READY: - res.set_content(R"({"status": "ok"})", "application/json"); - res.status = 200; // HTTP OK + if (llama.all_slots_are_idle) { + res.set_content(R"({"status": "ok"})", "application/json"); + res.status = 200; // HTTP OK + } else { + int available_slots = 0; + int processing_slots = 0; + for (llama_client_slot & slot : llama.slots) { + if (slot.available()) { + available_slots++; + } else { + processing_slots++; + } + } + if (available_slots > 0) { + json health = { + {"status", "ok"}, + {"slots_idle", available_slots}, + {"slots_processing", processing_slots}}; + res.set_content(health.dump(), "application/json"); + res.status = 200; // HTTP OK + } else { + json health = { + {"status", "no slot available"}, + {"slots_idle", available_slots}, + {"slots_processing", processing_slots}}; + res.set_content(health.dump(), "application/json"); + res.status = 503; // HTTP Service Unavailable + } + } break; case SERVER_STATE_LOADING_MODEL: res.set_content(R"({"status": "loading model"})", "application/json"); From f3f28c5395cd25b371617981b341616dbdd31e85 Mon Sep 17 00:00:00 2001 From: Georgi Gerganov Date: Sun, 18 Feb 2024 19:17:00 +0200 Subject: [PATCH 624/811] cmake : fix GGML_USE_SYCL typo (#5555) --- CMakeLists.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/CMakeLists.txt b/CMakeLists.txt index 5ea4d4f19..0c29b5d09 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -526,7 +526,7 @@ if (LLAMA_SYCL) message(STATUS "SYCL found") - add_compile_definitions(GML_USE_SYCL) + add_compile_definitions(GGML_USE_SYCL) if (LLAMA_SYCL_F16) add_compile_definitions(GGML_SYCL_F16) From 689a091bbe0537ee9abff3e15a1d74f5f3561165 Mon Sep 17 00:00:00 2001 From: Georgi Gerganov Date: Sun, 18 Feb 2024 19:38:06 +0200 Subject: [PATCH 625/811] sampling : do not set min_keep to n_probs (#5564) --- common/sampling.cpp | 7 ++----- 1 file changed, 2 insertions(+), 5 deletions(-) diff --git a/common/sampling.cpp b/common/sampling.cpp index 53013138a..611c327bb 100644 --- a/common/sampling.cpp +++ b/common/sampling.cpp @@ -121,7 +121,7 @@ static void sampler_queue( struct llama_context * ctx_main, const llama_sampling_params & params, llama_token_data_array & cur_p, - size_t & min_keep) { + size_t min_keep) { const float temp = params.temp; const float dynatemp_range = params.dynatemp_range; const float dynatemp_exponent = params.dynatemp_exponent; @@ -248,10 +248,7 @@ static llama_token llama_sampling_sample_impl( llama_sample_temp(ctx_main, &cur_p, temp); id = llama_sample_token_mirostat_v2(ctx_main, &cur_p, mirostat_tau, mirostat_eta, &ctx_sampling->mirostat_mu); } else { - // temperature sampling - size_t min_keep = std::max(1, params.n_probs); - - sampler_queue(ctx_main, params, cur_p, min_keep); + sampler_queue(ctx_main, params, cur_p, 1); id = llama_sample_token(ctx_main, &cur_p); From c145f8a132b2fe1d1e65987faddbd9a40bef7a12 Mon Sep 17 00:00:00 2001 From: Pierrick Hymbert Date: Sun, 18 Feb 2024 18:39:57 +0100 Subject: [PATCH 626/811] server : slots monitoring endpoint (#5550) --- examples/server/README.md | 64 ++++++++++++++++++++++++++++++++++++++ examples/server/server.cpp | 32 +++++++++++++++++++ 2 files changed, 96 insertions(+) diff --git a/examples/server/README.md b/examples/server/README.md index 5e3ae833b..ac5133d24 100644 --- a/examples/server/README.md +++ b/examples/server/README.md @@ -40,6 +40,7 @@ see https://github.com/ggerganov/llama.cpp/issues/1437 - `--grp-attn-n`: Set the group attention factor to extend context size through self-extend(default: 1=disabled), used together with group attention width `--grp-attn-w` - `--grp-attn-w`: Set the group attention width to extend context size through self-extend(default: 512), used together with group attention factor `--grp-attn-n` - `-n, --n-predict`: Set the maximum tokens to predict (default: -1) +- `--slots-endpoint-disable`: To disable slots state monitoring endpoint. Slots state may contain user data, prompts included. ## Build @@ -381,6 +382,69 @@ Notice that each `probs` is an array of length `n_probs`. }' ``` +- **GET** `/slots`: Returns the current slots processing state. Can be disabled with `--slots-endpoint-disable`. + +### Result JSON + +```json +[ + { + "dynatemp_exponent": 1.0, + "dynatemp_range": 0.0, + "frequency_penalty": 0.0, + "grammar": "", + "id": 0, + "ignore_eos": false, + "logit_bias": [], + "min_p": 0.05000000074505806, + "mirostat": 0, + "mirostat_eta": 0.10000000149011612, + "mirostat_tau": 5.0, + "model": "llama-2-7b-32k-instruct.Q2_K.gguf", + "n_ctx": 2048, + "n_keep": 0, + "n_predict": 100000, + "n_probs": 0, + "next_token": { + "has_next_token": true, + "n_remain": -1, + "num_tokens_predicted": 0, + "stopped_eos": false, + "stopped_limit": false, + "stopped_word": false, + "stopping_word": "" + }, + "penalize_nl": true, + "penalty_prompt_tokens": [], + "presence_penalty": 0.0, + "prompt": "Say hello to llama.cpp", + "repeat_last_n": 64, + "repeat_penalty": 1.100000023841858, + "samplers": [ + "top_k", + "tfs_z", + "typical_p", + "top_p", + "min_p", + "temperature" + ], + "seed": 42, + "state": 1, + "stop": [ + "\n" + ], + "stream": false, + "task_id": 0, + "temperature": 0.0, + "tfs_z": 1.0, + "top_k": 40, + "top_p": 0.949999988079071, + "typical_p": 1.0, + "use_penalty_prompt_tokens": false + } +] +``` + ## More examples ### Change system prompt on runtime diff --git a/examples/server/server.cpp b/examples/server/server.cpp index 8145af867..4f2e9c898 100644 --- a/examples/server/server.cpp +++ b/examples/server/server.cpp @@ -41,6 +41,7 @@ struct server_params int32_t port = 8080; int32_t read_timeout = 600; int32_t write_timeout = 600; + bool slots_endpoint = true; }; bool server_verbose = false; @@ -1926,6 +1927,7 @@ static void server_print_usage(const char *argv0, const gpt_params ¶ms, printf(" set a file to load a system prompt (initial prompt of all slots), this is useful for chat applications.\n"); printf(" --mmproj MMPROJ_FILE path to a multimodal projector file for LLaVA.\n"); printf(" --log-disable disables logging to a file.\n"); + printf(" --slots-endpoint-disable disables slots monitoring endpoint.\n"); printf("\n"); printf(" -n, --n-predict maximum tokens to predict (default: %d)\n", params.n_predict); printf(" --override-kv KEY=TYPE:VALUE\n"); @@ -2374,6 +2376,10 @@ static void server_params_parse(int argc, char **argv, server_params &sparams, log_set_target(stdout); LOG_INFO("logging to file is disabled.", {}); } + else if (arg == "--slots-endpoint-disable") + { + sparams.slots_endpoint = false; + } else if (arg == "--chat-template") { if (++i >= argc) @@ -2619,6 +2625,32 @@ int main(int argc, char **argv) } }); + if (sparams.slots_endpoint) { + svr.Get("/slots", [&](const httplib::Request&, httplib::Response& res) { + json slots; + for (llama_client_slot & slot : llama.slots) { + json slot_data = llama.get_formated_generation(slot); + slot_data["id"] = slot.id; + slot_data["task_id"] = slot.task_id; + slot_data["state"] = slot.state; + slot_data["prompt"] = slot.prompt; + slot_data["next_token"] = { + {"has_next_token", slot.has_next_token}, + {"n_remain", slot.n_remaining}, + {"num_tokens_predicted", slot.n_decoded}, + {"stopped_eos", slot.stopped_eos}, + {"stopped_word", slot.stopped_word}, + {"stopped_limit", slot.stopped_limit}, + {"stopping_word", slot.stopping_word}, + }; + + slots.push_back(slot_data); + } + res.set_content(slots.dump(), "application/json"); + res.status = 200; // HTTP OK + }); + } + svr.set_logger(log_server_request); svr.set_exception_handler([](const httplib::Request &, httplib::Response &res, std::exception_ptr ep) From 5ee99c32f5e47c8d32634eff9a47fb32a24c276b Mon Sep 17 00:00:00 2001 From: Robey Holderith Date: Sun, 18 Feb 2024 11:11:16 -0800 Subject: [PATCH 627/811] common, server : surface min_keep as its own parameter (#5567) * Feature - surface min_keep as its own parameter * Updated README with min_keep param --- common/common.cpp | 1 + common/sampling.cpp | 5 ++++- common/sampling.h | 1 + examples/server/README.md | 2 ++ examples/server/public/index.html | 4 ++++ examples/server/server.cpp | 2 ++ 6 files changed, 14 insertions(+), 1 deletion(-) diff --git a/common/common.cpp b/common/common.cpp index 489462b5a..10ef11829 100644 --- a/common/common.cpp +++ b/common/common.cpp @@ -1704,6 +1704,7 @@ void dump_non_result_info_yaml(FILE * stream, const gpt_params & params, const l } fprintf(stream, "lora_base: %s\n", params.lora_base.c_str()); fprintf(stream, "main_gpu: %d # default: 0\n", params.main_gpu); + fprintf(stream, "min_keep: %d # default: 0 (disabled)\n", sparams.min_keep); fprintf(stream, "mirostat: %d # default: 0 (disabled)\n", sparams.mirostat); fprintf(stream, "mirostat_ent: %f # default: 5.0\n", sparams.mirostat_tau); fprintf(stream, "mirostat_lr: %f # default: 0.1\n", sparams.mirostat_eta); diff --git a/common/sampling.cpp b/common/sampling.cpp index 611c327bb..de4331a11 100644 --- a/common/sampling.cpp +++ b/common/sampling.cpp @@ -248,7 +248,10 @@ static llama_token llama_sampling_sample_impl( llama_sample_temp(ctx_main, &cur_p, temp); id = llama_sample_token_mirostat_v2(ctx_main, &cur_p, mirostat_tau, mirostat_eta, &ctx_sampling->mirostat_mu); } else { - sampler_queue(ctx_main, params, cur_p, 1); + // temperature sampling + size_t min_keep = std::max(1, params.min_keep); + + sampler_queue(ctx_main, params, cur_p, min_keep); id = llama_sample_token(ctx_main, &cur_p); diff --git a/common/sampling.h b/common/sampling.h index e1279a894..95d875394 100644 --- a/common/sampling.h +++ b/common/sampling.h @@ -22,6 +22,7 @@ enum class llama_sampler_type : char { typedef struct llama_sampling_params { int32_t n_prev = 64; // number of previous tokens to remember int32_t n_probs = 0; // if greater than 0, output the probabilities of top n_probs tokens. + int32_t min_keep = 0; // 0 = disabled, otherwise samplers should return at least min_keep tokens int32_t top_k = 40; // <= 0 to use vocab size float top_p = 0.95f; // 1.0 = disabled float min_p = 0.05f; // 0.0 = disabled diff --git a/examples/server/README.md b/examples/server/README.md index ac5133d24..809e2d37c 100644 --- a/examples/server/README.md +++ b/examples/server/README.md @@ -199,6 +199,8 @@ node index.js `n_probs`: If greater than 0, the response also contains the probabilities of top N tokens for each generated token (default: 0) + `min_keep`: If greater than 0, force samplers to return N possible tokens at minimum (default: 0) + `image_data`: An array of objects to hold base64-encoded image `data` and its `id`s to be reference in `prompt`. You can determine the place of the image in the prompt as in the following: `USER:[img-12]Describe the image in detail.\nASSISTANT:`. In this case, `[img-12]` will be replaced by the embeddings of the image with id `12` in the following `image_data` array: `{..., "image_data": [{"data": "", "id": 12}]}`. Use `image_data` only with multimodal models, e.g., LLaVA. `slot_id`: Assign the completion task to an specific slot. If is -1 the task will be assigned to a Idle slot (default: -1) diff --git a/examples/server/public/index.html b/examples/server/public/index.html index b059c75f2..84038ddce 100644 --- a/examples/server/public/index.html +++ b/examples/server/public/index.html @@ -234,6 +234,7 @@ mirostat_eta: 0.1, // learning rate grammar: '', n_probs: 0, // no completion_probabilities, + min_keep: 0, // min probs from each sampler, image_data: [], cache_prompt: true, api_key: '' @@ -791,6 +792,9 @@
    ${IntField({ label: "Show Probabilities", max: 10, min: 0, name: "n_probs", value: params.value.n_probs })}
    +
    + ${IntField({ label: "Min Probabilities from each Sampler", max: 10, min: 0, name: "min_keep", value: params.value.min_keep })} +
    diff --git a/examples/server/server.cpp b/examples/server/server.cpp index 4f2e9c898..22c344dd4 100644 --- a/examples/server/server.cpp +++ b/examples/server/server.cpp @@ -548,6 +548,7 @@ struct llama_server_context slot->params.seed = json_value(data, "seed", default_params.seed); slot->sparams.grammar = json_value(data, "grammar", default_sparams.grammar); slot->sparams.n_probs = json_value(data, "n_probs", default_sparams.n_probs); + slot->sparams.min_keep = json_value(data, "min_keep", default_sparams.min_keep); if (slot->n_predict > 0 && slot->params.n_predict > slot->n_predict) { // Might be better to reject the request with a 400 ? @@ -1093,6 +1094,7 @@ struct llama_server_context {"stream", slot.params.stream}, {"logit_bias", slot.sparams.logit_bias}, {"n_probs", slot.sparams.n_probs}, + {"min_keep", slot.sparams.min_keep}, {"grammar", slot.sparams.grammar}, {"samplers", samplers_sequence} }; From 7ad554f90e735cf2a0f612ce44f9aa4fad6ae46a Mon Sep 17 00:00:00 2001 From: Georgi Gerganov Date: Sun, 18 Feb 2024 21:39:58 +0200 Subject: [PATCH 628/811] metal : fix unused warnings (#0) --- ggml-metal.metal | 22 ++++++++++++++++++---- 1 file changed, 18 insertions(+), 4 deletions(-) diff --git a/ggml-metal.metal b/ggml-metal.metal index a00962111..d0a85a192 100644 --- a/ggml-metal.metal +++ b/ggml-metal.metal @@ -4027,7 +4027,10 @@ void kernel_mul_mv_iq2_xxs_f32_impl( y4 += 32 * 32; } #else - // TODO + (void) x; + (void) y; + (void) yl; + (void) nb32; #endif for (int row = 0; row < N_DST; ++row) { @@ -4170,7 +4173,10 @@ void kernel_mul_mv_iq2_xs_f32_impl( y4 += 32 * 32; } #else - // TODO + (void) x; + (void) y; + (void) yl; + (void) nb32; #endif for (int row = 0; row < N_DST; ++row) { @@ -4306,7 +4312,10 @@ void kernel_mul_mv_iq3_xxs_f32_impl( y4 += 32 * 32; } #else - // TODO + (void) x; + (void) y; + (void) yl; + (void) nb32; #endif for (int row = 0; row < N_DST; ++row) { @@ -4424,7 +4433,10 @@ void kernel_mul_mv_iq1_s_f32_impl( y4 += 16 * 32; } #else - // TODO + (void) x; + (void) y; + (void) yl; + (void) nb32; #endif for (int row = 0; row < N_DST; ++row) { @@ -4659,6 +4671,8 @@ void dequantize_q4_K(device const block_q4_K *xb, short il, thread type4x4 & reg const float dl = d * sc[0]; const float ml = min * sc[1]; #else + (void) get_scale_min_k4_just2; + q = q + 16 * (il&1); device const uint8_t * s = xb->scales; device const half2 * dh = (device const half2 *)xb->d; From b1de96824bdbeb91ea458abcb3e5478690ad0727 Mon Sep 17 00:00:00 2001 From: Georgi Gerganov Date: Sun, 18 Feb 2024 22:39:30 +0200 Subject: [PATCH 629/811] ci : fix wikitext url + compile warnings (#5569) ggml-ci --- README.md | 2 +- ci/run.sh | 4 ++-- examples/perplexity/perplexity.cpp | 4 ++-- ggml-quants.c | 6 +++--- scripts/get-wikitext-2.sh | 2 +- 5 files changed, 9 insertions(+), 9 deletions(-) diff --git a/README.md b/README.md index 0c4ee5a27..8c7bc2689 100644 --- a/README.md +++ b/README.md @@ -768,7 +768,7 @@ The time per token is measured on a MacBook M1 Pro 32GB RAM using 4 and 8 thread #### How to run -1. Download/extract: https://s3.amazonaws.com/research.metamind.io/wikitext/wikitext-2-raw-v1.zip?ref=salesforce-research +1. Download/extract: https://huggingface.co/datasets/ggml-org/ci/resolve/main/wikitext-2-raw-v1.zip 2. Run `./perplexity -m models/7B/ggml-model-q4_0.gguf -f wiki.test.raw` 3. Output: ``` diff --git a/ci/run.sh b/ci/run.sh index b94658c96..f3a29c2e9 100755 --- a/ci/run.sh +++ b/ci/run.sh @@ -219,7 +219,7 @@ function gg_run_open_llama_3b_v2 { gg_wget models-mnt/open-llama/3B-v2/ https://huggingface.co/openlm-research/open_llama_3b_v2/resolve/main/pytorch_model.bin gg_wget models-mnt/open-llama/3B-v2/ https://huggingface.co/openlm-research/open_llama_3b_v2/raw/main/generation_config.json - gg_wget models-mnt/wikitext/ https://s3.amazonaws.com/research.metamind.io/wikitext/wikitext-2-raw-v1.zip + gg_wget models-mnt/wikitext/ https://huggingface.co/datasets/ggml-org/ci/resolve/main/wikitext-2-raw-v1.zip unzip -o models-mnt/wikitext/wikitext-2-raw-v1.zip -d models-mnt/wikitext/ head -n 60 models-mnt/wikitext/wikitext-2-raw/wiki.test.raw > models-mnt/wikitext/wikitext-2-raw/wiki.test-60.raw @@ -401,7 +401,7 @@ function gg_run_open_llama_7b_v2 { gg_wget models-mnt/open-llama/7B-v2/ https://huggingface.co/openlm-research/open_llama_7b_v2/resolve/main/pytorch_model-00002-of-00002.bin gg_wget models-mnt/open-llama/7B-v2/ https://huggingface.co/openlm-research/open_llama_7b_v2/raw/main/generation_config.json - gg_wget models-mnt/wikitext/ https://s3.amazonaws.com/research.metamind.io/wikitext/wikitext-2-raw-v1.zip + gg_wget models-mnt/wikitext/ https://huggingface.co/datasets/ggml-org/ci/resolve/main/wikitext-2-raw-v1.zip unzip -o models-mnt/wikitext/wikitext-2-raw-v1.zip -d models-mnt/wikitext/ path_models="../models-mnt/open-llama/7B-v2" diff --git a/examples/perplexity/perplexity.cpp b/examples/perplexity/perplexity.cpp index 74dcc642a..9ec989389 100644 --- a/examples/perplexity/perplexity.cpp +++ b/examples/perplexity/perplexity.cpp @@ -309,7 +309,7 @@ static void process_logits(int n_vocab, const float * logits, const int * tokens } static results_perplexity perplexity_v2(llama_context * ctx, const gpt_params & params) { - // Download: https://s3.amazonaws.com/research.metamind.io/wikitext/wikitext-2-raw-v1.zip?ref=salesforce-research + // Download: https://huggingface.co/datasets/ggml-org/ci/resolve/main/wikitext-2-raw-v1.zip // Run `./perplexity -m models/7B/ggml-model-q4_0.bin -f wiki.test.raw` // Output: `perplexity: 13.5106 [114/114]` // BOS tokens will be added for each chunk before eval @@ -447,7 +447,7 @@ static results_perplexity perplexity(llama_context * ctx, const gpt_params & par return perplexity_v2(ctx, params); } - // Download: https://s3.amazonaws.com/research.metamind.io/wikitext/wikitext-2-raw-v1.zip?ref=salesforce-research + // Download: https://huggingface.co/datasets/ggml-org/ci/resolve/main/wikitext-2-raw-v1.zip // Run `./perplexity -m models/7B/ggml-model-q4_0.bin -f wiki.test.raw` // Output: `perplexity: 13.5106 [114/114]` // BOS tokens will be added for each chunk before eval diff --git a/ggml-quants.c b/ggml-quants.c index 48f5294e1..43a8f1de4 100644 --- a/ggml-quants.c +++ b/ggml-quants.c @@ -1837,9 +1837,9 @@ static void quantize_row_q2_K_impl(const float * restrict x, block_q2_K * restri float sigma2 = sumx2/QK_K; for (int j = 0; j < QK_K/16; ++j) { const float * restrict qw = quant_weights + QK_K * i + 16*j; - for (int l = 0; l < 16; ++l) weight[l] = qw[l] * sqrtf(sigma2 + x[16*j + l]*x[16*j + l]); - for (int l = 0; l < 16; ++l) sw[j] += weight[l]; - scales[j] = make_qkx3_quants(16, 3, x + 16*j, weight, L + 16*j, &mins[j], Laux, -0.9f, 0.05f, 36, false); + for (int l = 0; l < QK_K/16; ++l) weight[l] = qw[l] * sqrtf(sigma2 + x[16*j + l]*x[16*j + l]); + for (int l = 0; l < QK_K/16; ++l) sw[j] += weight[l]; + scales[j] = make_qkx3_quants(QK_K/16, 3, x + 16*j, weight, L + 16*j, &mins[j], Laux, -0.9f, 0.05f, 36, false); } float dm = make_qp_quants(QK_K/16, 15, scales, Ls, sw); diff --git a/scripts/get-wikitext-2.sh b/scripts/get-wikitext-2.sh index ff96f331e..7ca760fa6 100755 --- a/scripts/get-wikitext-2.sh +++ b/scripts/get-wikitext-2.sh @@ -1,6 +1,6 @@ #!/bin/bash -wget https://s3.amazonaws.com/research.metamind.io/wikitext/wikitext-2-raw-v1.zip +wget https://huggingface.co/datasets/ggml-org/ci/resolve/main/wikitext-2-raw-v1.zip echo "Usage:" echo "" From 14278f55d2e2c6a53022075c7f2719b71e1cd61d Mon Sep 17 00:00:00 2001 From: Georgi Gerganov Date: Sun, 18 Feb 2024 22:58:57 +0200 Subject: [PATCH 630/811] ggml : restore vec dot stride arg names (#5453) --- ggml-quants.c | 76 +++++++++++++++++++++++++-------------------------- 1 file changed, 38 insertions(+), 38 deletions(-) diff --git a/ggml-quants.c b/ggml-quants.c index 43a8f1de4..3319d2ccf 100644 --- a/ggml-quants.c +++ b/ggml-quants.c @@ -3855,7 +3855,7 @@ static inline __m128i get_scale_shuffle(int i) { } #endif -void ggml_vec_dot_q4_0_q8_0(int n, float * restrict s, size_t bs, const void * restrict vx, size_t bbx, const void * restrict vy, size_t bby, int nrc) { +void ggml_vec_dot_q4_0_q8_0(int n, float * restrict s, size_t bs, const void * restrict vx, size_t bx, const void * restrict vy, size_t by, int nrc) { const int qk = QK8_0; const int nb = n / qk; @@ -3866,8 +3866,8 @@ void ggml_vec_dot_q4_0_q8_0(int n, float * restrict s, size_t bs, const void * r assert(nrc == 1); #endif UNUSED(nrc); - UNUSED(bbx); - UNUSED(bby); + UNUSED(bx); + UNUSED(by); UNUSED(bs); const block_q4_0 * restrict x = vx; @@ -4024,15 +4024,15 @@ void ggml_vec_dot_q4_0_q8_0(int n, float * restrict s, size_t bs, const void * r const __m128i tmp = _mm_loadu_si128((const __m128i *)x[i].qs); - __m128i bx = _mm_and_si128(lowMask, tmp); - __m128i by = _mm_loadu_si128((const __m128i *)y[i].qs); - bx = _mm_sub_epi8(bx, off); - const __m128i i32_0 = mul_sum_i8_pairs(bx, by); + __m128i bx_0 = _mm_and_si128(lowMask, tmp); + __m128i by_0 = _mm_loadu_si128((const __m128i *)y[i].qs); + bx_0 = _mm_sub_epi8(bx_0, off); + const __m128i i32_0 = mul_sum_i8_pairs(bx_0, by_0); - bx = _mm_and_si128(lowMask, _mm_srli_epi64(tmp, 4)); - by = _mm_loadu_si128((const __m128i *)(y[i].qs + 16)); - bx = _mm_sub_epi8(bx, off); - const __m128i i32_1 = mul_sum_i8_pairs(bx, by); + bx_0 = _mm_and_si128(lowMask, _mm_srli_epi64(tmp, 4)); + by_0 = _mm_loadu_si128((const __m128i *)(y[i].qs + 16)); + bx_0 = _mm_sub_epi8(bx_0, off); + const __m128i i32_1 = mul_sum_i8_pairs(bx_0, by_0); // Convert int32_t to float __m256 p = _mm256_cvtepi32_ps(MM256_SET_M128I(i32_0, i32_1)); @@ -4222,7 +4222,7 @@ void ggml_vec_dot_q4_0_q8_0(int n, float * restrict s, size_t bs, const void * r #endif } -void ggml_vec_dot_q4_1_q8_1(int n, float * restrict s, size_t bs, const void * restrict vx, size_t bbx, const void * restrict vy, size_t bby, int nrc) { +void ggml_vec_dot_q4_1_q8_1(int n, float * restrict s, size_t bs, const void * restrict vx, size_t bx, const void * restrict vy, size_t by, int nrc) { const int qk = QK8_1; const int nb = n / qk; @@ -4233,8 +4233,8 @@ void ggml_vec_dot_q4_1_q8_1(int n, float * restrict s, size_t bs, const void * r assert(nrc == 1); #endif UNUSED(nrc); - UNUSED(bbx); - UNUSED(bby); + UNUSED(bx); + UNUSED(by); UNUSED(bs); const block_q4_1 * restrict x = vx; @@ -4440,7 +4440,7 @@ void ggml_vec_dot_q4_1_q8_1(int n, float * restrict s, size_t bs, const void * r #endif } -void ggml_vec_dot_q5_0_q8_0(int n, float * restrict s, size_t bs, const void * restrict vx, size_t bbx, const void * restrict vy, size_t bby, int nrc) { +void ggml_vec_dot_q5_0_q8_0(int n, float * restrict s, size_t bs, const void * restrict vx, size_t bx, const void * restrict vy, size_t by, int nrc) { const int qk = QK8_0; const int nb = n / qk; @@ -4448,8 +4448,8 @@ void ggml_vec_dot_q5_0_q8_0(int n, float * restrict s, size_t bs, const void * r assert(qk == QK5_0); assert(nrc == 1); UNUSED(nrc); - UNUSED(bbx); - UNUSED(bby); + UNUSED(bx); + UNUSED(by); UNUSED(bs); const block_q5_0 * restrict x = vx; @@ -4618,21 +4618,21 @@ void ggml_vec_dot_q5_0_q8_0(int n, float * restrict s, size_t bs, const void * r /* Compute combined scale for the block */ const __m256 d = _mm256_set1_ps(GGML_FP16_TO_FP32(x[i].d) * GGML_FP16_TO_FP32(y[i].d)); - __m256i bx = bytes_from_nibbles_32(x[i].qs); + __m256i bx_0 = bytes_from_nibbles_32(x[i].qs); const __m256i bxhi = bytes_from_bits_32(x[i].qh); __m128i bxhil = _mm256_castsi256_si128(bxhi); __m128i bxhih = _mm256_extractf128_si256(bxhi, 1); bxhil = _mm_andnot_si128(bxhil, mask); bxhih = _mm_andnot_si128(bxhih, mask); - __m128i bxl = _mm256_castsi256_si128(bx); - __m128i bxh = _mm256_extractf128_si256(bx, 1); + __m128i bxl = _mm256_castsi256_si128(bx_0); + __m128i bxh = _mm256_extractf128_si256(bx_0, 1); bxl = _mm_or_si128(bxl, bxhil); bxh = _mm_or_si128(bxh, bxhih); - bx = MM256_SET_M128I(bxh, bxl); + bx_0 = MM256_SET_M128I(bxh, bxl); - const __m256i by = _mm256_loadu_si256((const __m256i *)y[i].qs); + const __m256i by_0 = _mm256_loadu_si256((const __m256i *)y[i].qs); - const __m256 q = mul_sum_i8_pairs_float(bx, by); + const __m256 q = mul_sum_i8_pairs_float(bx_0, by_0); /* Multiply q with scale and accumulate */ acc = _mm256_add_ps(_mm256_mul_ps(d, q), acc); @@ -4731,7 +4731,7 @@ void ggml_vec_dot_q5_0_q8_0(int n, float * restrict s, size_t bs, const void * r #endif } -void ggml_vec_dot_q5_1_q8_1(int n, float * restrict s, size_t bs, const void * restrict vx, size_t bbx, const void * restrict vy, size_t bby, int nrc) { +void ggml_vec_dot_q5_1_q8_1(int n, float * restrict s, size_t bs, const void * restrict vx, size_t bx, const void * restrict vy, size_t by, int nrc) { const int qk = QK8_1; const int nb = n / qk; @@ -4739,8 +4739,8 @@ void ggml_vec_dot_q5_1_q8_1(int n, float * restrict s, size_t bs, const void * r assert(qk == QK5_1); assert(nrc == 1); UNUSED(nrc); - UNUSED(bbx); - UNUSED(bby); + UNUSED(bx); + UNUSED(by); UNUSED(bs); const block_q5_1 * restrict x = vx; @@ -4925,22 +4925,22 @@ void ggml_vec_dot_q5_1_q8_1(int n, float * restrict s, size_t bs, const void * r summs += GGML_FP16_TO_FP32(x[i].m) * y[i].s; - __m256i bx = bytes_from_nibbles_32(x[i].qs); + __m256i bx_0 = bytes_from_nibbles_32(x[i].qs); const __m256i bxhi = bytes_from_bits_32(x[i].qh); __m128i bxhil = _mm256_castsi256_si128(bxhi); __m128i bxhih = _mm256_extractf128_si256(bxhi, 1); bxhil = _mm_and_si128(bxhil, mask); bxhih = _mm_and_si128(bxhih, mask); - __m128i bxl = _mm256_castsi256_si128(bx); - __m128i bxh = _mm256_extractf128_si256(bx, 1); + __m128i bxl = _mm256_castsi256_si128(bx_0); + __m128i bxh = _mm256_extractf128_si256(bx_0, 1); bxl = _mm_or_si128(bxl, bxhil); bxh = _mm_or_si128(bxh, bxhih); - bx = MM256_SET_M128I(bxh, bxl); + bx_0 = MM256_SET_M128I(bxh, bxl); const __m256 dy = _mm256_set1_ps(y[i].d); - const __m256i by = _mm256_loadu_si256((const __m256i *)y[i].qs); + const __m256i by_0 = _mm256_loadu_si256((const __m256i *)y[i].qs); - const __m256 q = mul_sum_us8_pairs_float(bx, by); + const __m256 q = mul_sum_us8_pairs_float(bx_0, by_0); acc = _mm256_add_ps(_mm256_mul_ps(q, _mm256_mul_ps(dx, dy)), acc); } @@ -5035,7 +5035,7 @@ void ggml_vec_dot_q5_1_q8_1(int n, float * restrict s, size_t bs, const void * r #endif } -void ggml_vec_dot_q8_0_q8_0(int n, float * restrict s, size_t bs, const void * restrict vx, size_t bbx, const void * restrict vy, size_t bby, int nrc) { +void ggml_vec_dot_q8_0_q8_0(int n, float * restrict s, size_t bs, const void * restrict vx, size_t bx, const void * restrict vy, size_t by, int nrc) { const int qk = QK8_0; const int nb = n / qk; @@ -5046,8 +5046,8 @@ void ggml_vec_dot_q8_0_q8_0(int n, float * restrict s, size_t bs, const void * r assert(nrc == 1); #endif UNUSED(nrc); - UNUSED(bbx); - UNUSED(bby); + UNUSED(bx); + UNUSED(by); UNUSED(bs); const block_q8_0 * restrict x = vx; @@ -5169,10 +5169,10 @@ void ggml_vec_dot_q8_0_q8_0(int n, float * restrict s, size_t bs, const void * r for (int i = 0; i < nb; i++) { // load elements - vint8m1_t bx = __riscv_vle8_v_i8m1(x[i].qs, vl); - vint8m1_t by = __riscv_vle8_v_i8m1(y[i].qs, vl); + vint8m1_t bx_0 = __riscv_vle8_v_i8m1(x[i].qs, vl); + vint8m1_t by_0 = __riscv_vle8_v_i8m1(y[i].qs, vl); - vint16m2_t vw_mul = __riscv_vwmul_vv_i16m2(bx, by, vl); + vint16m2_t vw_mul = __riscv_vwmul_vv_i16m2(bx_0, by_0, vl); vint32m1_t v_zero = __riscv_vmv_v_x_i32m1(0, vl); vint32m1_t v_sum = __riscv_vwredsum_vs_i16m2_i32m1(vw_mul, v_zero, vl); From a0c2dad9d43456c677e205c6240a5f8afb0121ac Mon Sep 17 00:00:00 2001 From: Jared Van Bortel Date: Sun, 18 Feb 2024 16:21:52 -0500 Subject: [PATCH 631/811] build : pass all warning flags to nvcc via -Xcompiler (#5570) * build : pass all warning flags to nvcc via -Xcompiler * make : fix apparent mis-merge from #3952 * make : fix incorrect GF_CC_VER for CUDA host compiler --- CMakeLists.txt | 11 ++++++----- Makefile | 28 ++-------------------------- scripts/get-flags.mk | 2 +- 3 files changed, 9 insertions(+), 32 deletions(-) diff --git a/CMakeLists.txt b/CMakeLists.txt index 0c29b5d09..f5e7f2980 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -778,10 +778,7 @@ endif() set(CUDA_CXX_FLAGS "") if (LLAMA_CUBLAS) - set(CUDA_FLAGS ${CXX_FLAGS} -use_fast_math) - if (NOT MSVC) - list(APPEND CUDA_FLAGS -Wno-pedantic) - endif() + set(CUDA_FLAGS -use_fast_math) if (LLAMA_ALL_WARNINGS AND NOT MSVC) set(NVCC_CMD ${CMAKE_CUDA_COMPILER} .c) @@ -814,7 +811,11 @@ if (LLAMA_CUBLAS) message("-- CUDA host compiler is ${CUDA_CCID} ${CUDA_CCVER}") get_flags(${CUDA_CCID} ${CUDA_CCVER}) - list(APPEND CUDA_CXX_FLAGS ${GF_CXX_FLAGS}) # This is passed to -Xcompiler later + list(APPEND CUDA_CXX_FLAGS ${CXX_FLAGS} ${GF_CXX_FLAGS}) # This is passed to -Xcompiler later + endif() + + if (NOT MSVC) + list(APPEND CUDA_CXX_FLAGS -Wno-pedantic) endif() endif() diff --git a/Makefile b/Makefile index 901798606..f5f6d32a7 100644 --- a/Makefile +++ b/Makefile @@ -220,30 +220,6 @@ ifeq ($(LLAMA_FATAL_WARNINGS),1) MK_CXXFLAGS += -Werror endif -ifeq ($(CC_IS_CLANG), 1) - # clang options - MK_CFLAGS += -Wunreachable-code-break -Wunreachable-code-return - MK_HOST_CXXFLAGS += -Wunreachable-code-break -Wunreachable-code-return -Wmissing-prototypes -Wextra-semi - - ifneq '' '$(and $(CC_IS_LLVM_CLANG),$(filter 1,$(shell expr $(CC_VER) \>= 030800)))' - MK_CFLAGS += -Wdouble-promotion - endif - ifneq '' '$(and $(CC_IS_APPLE_CLANG),$(filter 1,$(shell expr $(CC_VER) \>= 070300)))' - MK_CFLAGS += -Wdouble-promotion - endif -else - # gcc options - MK_CFLAGS += -Wdouble-promotion - MK_HOST_CXXFLAGS += -Wno-array-bounds - - ifeq ($(shell expr $(CC_VER) \>= 070100), 1) - MK_HOST_CXXFLAGS += -Wno-format-truncation - endif - ifeq ($(shell expr $(CC_VER) \>= 080100), 1) - MK_HOST_CXXFLAGS += -Wextra-semi - endif -endif - # this version of Apple ld64 is buggy ifneq '' '$(findstring dyld-1015.7,$(shell $(CC) $(LDFLAGS) -Wl,-v 2>&1))' MK_CPPFLAGS += -DHAVE_BUGGY_APPLE_LINKER @@ -468,7 +444,7 @@ ggml-cuda.o: ggml-cuda.cu ggml-cuda.h ifdef JETSON_EOL_MODULE_DETECT $(NVCC) -I. -Icommon -D_XOPEN_SOURCE=600 -D_GNU_SOURCE -DNDEBUG -DGGML_USE_CUBLAS -I/usr/local/cuda/include -I/opt/cuda/include -I/usr/local/cuda/targets/aarch64-linux/include -std=c++11 -O3 $(NVCCFLAGS) -Xcompiler "$(CUDA_CXXFLAGS)" -c $< -o $@ else - $(NVCC) $(BASE_CXXFLAGS) $(NVCCFLAGS) -Wno-pedantic -Xcompiler "$(CUDA_CXXFLAGS)" -c $< -o $@ + $(NVCC) $(NVCCFLAGS) -Xcompiler "$(CUDA_CXXFLAGS)" -c $< -o $@ endif # JETSON_EOL_MODULE_DETECT endif # LLAMA_CUBLAS @@ -579,7 +555,7 @@ override LDFLAGS := $(MK_LDFLAGS) $(LDFLAGS) ifdef LLAMA_CUBLAS GF_CC := $(NVCC) $(NVCCFLAGS) 2>/dev/null .c -Xcompiler include scripts/get-flags.mk -CUDA_CXXFLAGS := $(GF_CXXFLAGS) +CUDA_CXXFLAGS := $(BASE_CXXFLAGS) $(GF_CXXFLAGS) -Wno-pedantic endif # diff --git a/scripts/get-flags.mk b/scripts/get-flags.mk index 596d7ead1..a742766d1 100644 --- a/scripts/get-flags.mk +++ b/scripts/get-flags.mk @@ -1,6 +1,6 @@ ifeq '' '$(findstring clang,$(shell $(GF_CC) --version))' GF_CC_IS_GCC = 1 - GF_CC_VER := $(shell { $(GF_CC) -dumpfullversion 2>/dev/null || $(GF_CC) -dumpversion; } | awk -F. '{ printf("%02d%02d%02d", $$1, $$2, $$3) }') + GF_CC_VER := $(shell { $(GF_CC) -dumpfullversion 2>/dev/null; echo; $(GF_CC) -dumpversion; } | awk -F. '/./ { printf("%02d%02d%02d", $$1, $$2, $$3); exit }') else GF_CC_IS_CLANG = 1 ifeq '' '$(findstring Apple,$(shell $(GF_CC) --version))' From f0d1fafc029a056cd765bdae58dcaa12312e9879 Mon Sep 17 00:00:00 2001 From: bmwl Date: Sun, 18 Feb 2024 23:38:32 -0800 Subject: [PATCH 632/811] ggml : android and old glibc NUMA incompatibility bugfixes (#5557) * #ifdef out some code NUMA blocks for Android due to lack of support * added in some __ANDROID__ if def gates around numa code and forced GLIBC prior to 2.29 to use a syscall for getcpu instead of the wrapper * Changed gates on numa platform specific stuff to __gnu_linux__ to skip any platforms without glibc * harmonizing #if defined blocks for numa code to __gnu_linux__ since that's the only model that's being followed anyways --------- Co-authored-by: root --- ggml.c | 19 ++++++++++++++----- 1 file changed, 14 insertions(+), 5 deletions(-) diff --git a/ggml.c b/ggml.c index 8224652a9..4ee2c5e11 100644 --- a/ggml.c +++ b/ggml.c @@ -23,6 +23,9 @@ #include #include #include +#if defined(__gnu_linux__) +#include +#endif #ifdef GGML_USE_METAL #include @@ -1971,7 +1974,7 @@ struct ggml_numa_nodes { uint32_t n_nodes; uint32_t total_cpus; // hardware threads on system uint32_t current_node; // node on which main process is execting -#ifdef __linux__ +#if defined(__gnu_linux__) cpu_set_t cpuset; // cpuset from numactl #else uint32_t cpuset; // no NUMA support outside of Linux at this time. Use a portable datatype @@ -2009,7 +2012,7 @@ inline static void ggml_critical_section_end(void) { atomic_fetch_sub(&g_state_barrier, 1); } -#ifdef __linux__ +#if defined(__gnu_linux__) static cpu_set_t ggml_get_numa_affinity(void) { cpu_set_t cpuset; pthread_t thread; @@ -2031,7 +2034,7 @@ void ggml_numa_init(enum ggml_numa_strategy numa_flag) { return; } -#ifdef __linux__ +#if defined(__gnu_linux__) struct stat st; char path[256]; int rv; @@ -2063,7 +2066,13 @@ void ggml_numa_init(enum ggml_numa_strategy numa_flag) { // figure out which node we're on uint current_cpu; - int getcpu_ret = getcpu(¤t_cpu, &g_state.numa.current_node); + int getcpu_ret = 0; +#if __GLIBC__ > 2 || (__GLIBC__ == 2 && __GLIBC_MINOR__ > 28) + getcpu_ret = getcpu(¤t_cpu, &g_state.numa.current_node); +#else + // old glibc doesn't have a wrapper for this call. Fall back on direct syscall + getcpu_ret = syscall(SYS_getcpu,¤t_cpu,&g_state.numa.current_node); +#endif if (g_state.numa.n_nodes < 1 || g_state.numa.total_cpus < 1 || getcpu_ret != 0) { g_state.numa.n_nodes = 0; @@ -16734,7 +16743,7 @@ typedef pthread_t ggml_thread_t; #endif // Android's libc implementation "bionic" does not support setting affinity -#if defined(__linux__) && !defined(__BIONIC__) +#if defined(__gnu_linux__) static void set_numa_thread_affinity(int thread_n) { if (!ggml_is_numa()) { return; From 769a716e30ba1da46f709df1c00727d6869d30e7 Mon Sep 17 00:00:00 2001 From: Mirko185 Date: Mon, 19 Feb 2024 08:39:31 +0100 Subject: [PATCH 633/811] readme : update (#5572) Added 1.5-bit on README.md --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index 8c7bc2689..70866e249 100644 --- a/README.md +++ b/README.md @@ -61,7 +61,7 @@ variety of hardware - locally and in the cloud. - Plain C/C++ implementation without any dependencies - Apple silicon is a first-class citizen - optimized via ARM NEON, Accelerate and Metal frameworks - AVX, AVX2 and AVX512 support for x86 architectures -- 2-bit, 3-bit, 4-bit, 5-bit, 6-bit, and 8-bit integer quantization for faster inference and reduced memory use +- 1.5-bit, 2-bit, 3-bit, 4-bit, 5-bit, 6-bit, and 8-bit integer quantization for faster inference and reduced memory use - Custom CUDA kernels for running LLMs on NVIDIA GPUs (support for AMD GPUs via HIP) - Vulkan, SYCL, and (partial) OpenCL backend support - CPU+GPU hybrid inference to partially accelerate models larger than the total VRAM capacity From 3a9cb4ca6408c29423373dd6cd7aa78a58286c00 Mon Sep 17 00:00:00 2001 From: slaren Date: Mon, 19 Feb 2024 09:04:45 +0100 Subject: [PATCH 634/811] cuda, metal : fix nans in soft_max (#5574) * cuda : fix nans in soft_max * metal : fix nans in soft_max --------- Co-authored-by: Georgi Gerganov --- ggml-cuda.cu | 8 ++++---- ggml-metal.metal | 8 ++++---- 2 files changed, 8 insertions(+), 8 deletions(-) diff --git a/ggml-cuda.cu b/ggml-cuda.cu index 933ebbc4e..eef213509 100644 --- a/ggml-cuda.cu +++ b/ggml-cuda.cu @@ -6205,7 +6205,7 @@ static __global__ void soft_max_f32(const float * x, const float * mask, const f const int ix = rowx*ncols + col; const int iy = rowy*ncols + col; - const float val = x[ix]*scale + (mask ? mask[iy] : 0.0f) + slope*pos[col]; + const float val = x[ix]*scale + (mask ? mask[iy] : 0.0f) + (pos ? slope*pos[col] : 0.0f); vals[col] = val; max_val = max(max_val, val); @@ -9170,17 +9170,17 @@ static void ggml_cuda_op_soft_max( memcpy(&max_bias, (float *) dst->op_params + 1, sizeof(float)); // positions tensor - float * src2_dd = dst_dd; // default to avoid null checks in the kernel + float * src2_dd = nullptr; cuda_pool_alloc src2_f; ggml_tensor * src2 = dst->src[2]; const bool use_src2 = src2 != nullptr; if (use_src2) { - const bool src2_on_device = use_src2 && src2->backend == GGML_BACKEND_GPU; - ggml_tensor_extra_gpu * src2_extra = use_src2 ? (ggml_tensor_extra_gpu *) src2->extra : nullptr; + const bool src2_on_device = src2->backend == GGML_BACKEND_GPU; if (src2_on_device) { + ggml_tensor_extra_gpu * src2_extra = (ggml_tensor_extra_gpu *) src2->extra; src2_dd = (float *) src2_extra->data_device[g_main_device]; } else { src2_dd = src2_f.alloc(ggml_nelements(src2)); diff --git a/ggml-metal.metal b/ggml-metal.metal index d0a85a192..f0d77d446 100644 --- a/ggml-metal.metal +++ b/ggml-metal.metal @@ -392,7 +392,7 @@ kernel void kernel_soft_max( float lmax = -INFINITY; for (int i00 = tpitg; i00 < ne00; i00 += ntg) { - lmax = MAX(lmax, psrc0[i00]*scale + (pmask ? pmask[i00] : 0.0f) + slope*ppos[i00]); + lmax = MAX(lmax, psrc0[i00]*scale + (pmask ? pmask[i00] : 0.0f) + (ppos ? slope*ppos[i00] : 0.0f)); } // find the max value in the block @@ -417,7 +417,7 @@ kernel void kernel_soft_max( // parallel sum float lsum = 0.0f; for (int i00 = tpitg; i00 < ne00; i00 += ntg) { - const float exp_psrc0 = exp((psrc0[i00]*scale + (pmask ? pmask[i00] : 0.0f) + slope*ppos[i00]) - max_val); + const float exp_psrc0 = exp((psrc0[i00]*scale + (pmask ? pmask[i00] : 0.0f) + (ppos ? slope*ppos[i00] : 0.0f)) - max_val); lsum += exp_psrc0; pdst[i00] = exp_psrc0; } @@ -495,7 +495,7 @@ kernel void kernel_soft_max_4( float4 lmax4 = -INFINITY; for (int i00 = tpitg; i00 < ne00/4; i00 += ntg) { - lmax4 = fmax(lmax4, psrc4[i00]*scale + (pmask ? pmask[i00] : 0.0f) + slope*ppos[i00]); + lmax4 = fmax(lmax4, psrc4[i00]*scale + (pmask ? pmask[i00] : 0.0f) + (ppos ? slope*ppos[i00] : 0.0f)); } const float lmax = MAX(MAX(lmax4[0], lmax4[1]), MAX(lmax4[2], lmax4[3])); @@ -521,7 +521,7 @@ kernel void kernel_soft_max_4( // parallel sum float4 lsum4 = 0.0f; for (int i00 = tpitg; i00 < ne00/4; i00 += ntg) { - const float4 exp_psrc4 = exp((psrc4[i00]*scale + (pmask ? pmask[i00] : 0.0f) + slope*ppos[i00]) - max_val); + const float4 exp_psrc4 = exp((psrc4[i00]*scale + (pmask ? pmask[i00] : 0.0f) + (ppos ? slope*ppos[i00] : 0.0f)) - max_val); lsum4 += exp_psrc4; pdst4[i00] = exp_psrc4; } From 11b12de39bd787c0494da0cd405958fdfedc29c4 Mon Sep 17 00:00:00 2001 From: Xuan Son Nguyen Date: Mon, 19 Feb 2024 09:23:37 +0100 Subject: [PATCH 635/811] llama : add llama_chat_apply_template() (#5538) * llama: add llama_chat_apply_template * test-chat-template: remove dedundant vector * chat_template: do not use std::string for buffer * add clarification for llama_chat_apply_template * llama_chat_apply_template: add zephyr template * llama_chat_apply_template: correct docs * llama_chat_apply_template: use term "chat" everywhere * llama_chat_apply_template: change variable name to "tmpl" --- Makefile | 4 ++ llama.cpp | 117 +++++++++++++++++++++++++++++++++++ llama.h | 25 ++++++++ tests/CMakeLists.txt | 1 + tests/test-chat-template.cpp | 64 +++++++++++++++++++ 5 files changed, 211 insertions(+) create mode 100644 tests/test-chat-template.cpp diff --git a/Makefile b/Makefile index f5f6d32a7..59352eb53 100644 --- a/Makefile +++ b/Makefile @@ -867,3 +867,7 @@ tests/test-model-load-cancel: tests/test-model-load-cancel.cpp ggml.o llama.o te tests/test-autorelease: tests/test-autorelease.cpp ggml.o llama.o tests/get-model.cpp $(COMMON_DEPS) $(OBJS) $(CXX) $(CXXFLAGS) -c $< -o $(call GET_OBJ_FILE, $<) $(CXX) $(CXXFLAGS) $(filter-out %.h $<,$^) $(call GET_OBJ_FILE, $<) -o $@ $(LDFLAGS) + +tests/test-chat-template: tests/test-chat-template.cpp ggml.o llama.o $(COMMON_DEPS) $(OBJS) + $(CXX) $(CXXFLAGS) -c $< -o $(call GET_OBJ_FILE, $<) + $(CXX) $(CXXFLAGS) $(filter-out %.h $<,$^) $(call GET_OBJ_FILE, $<) -o $@ $(LDFLAGS) diff --git a/llama.cpp b/llama.cpp index 5cfebb3b1..143870645 100644 --- a/llama.cpp +++ b/llama.cpp @@ -12508,6 +12508,123 @@ int32_t llama_token_to_piece(const struct llama_model * model, llama_token token return 0; } +// trim whitespace from the beginning and end of a string +static std::string trim(const std::string & str) { + size_t start = 0; + size_t end = str.size(); + while (start < end && isspace(str[start])) { + start += 1; + } + while (end > start && isspace(str[end - 1])) { + end -= 1; + } + return str.substr(start, end - start); +} + +// Simple version of "llama_apply_chat_template" that only works with strings +// This function uses heuristic checks to determine commonly used template. It is not a jinja parser. +static int32_t llama_chat_apply_template_internal( + const std::string & tmpl, + const std::vector & chat, + std::string & dest, bool add_ass) { + // Taken from the research: https://github.com/ggerganov/llama.cpp/issues/5527 + std::stringstream ss; + if (tmpl.find("<|im_start|>") != std::string::npos) { + // chatml template + for (auto message : chat) { + ss << "<|im_start|>" << message->role << "\n" << message->content << "<|im_end|>\n"; + } + if (add_ass) { + ss << "<|im_start|>assistant\n"; + } + } else if (tmpl.find("[INST]") != std::string::npos) { + // llama2 template and its variants + // [variant] support system message + bool support_system_message = tmpl.find("<>") != std::string::npos; + // [variant] space before + after response + bool space_around_response = tmpl.find("' ' + eos_token") != std::string::npos; + // [variant] add BOS inside history + bool add_bos_inside_history = tmpl.find("bos_token + '[INST]") != std::string::npos; + // [variant] trim spaces from the input message + bool strip_message = tmpl.find("content.strip()") != std::string::npos; + // construct the prompt + bool is_inside_turn = true; // skip BOS at the beginning + ss << "[INST] "; + for (auto message : chat) { + std::string content = strip_message ? trim(message->content) : message->content; + std::string role(message->role); + if (!is_inside_turn) { + is_inside_turn = true; + ss << (add_bos_inside_history ? "[INST] " : "[INST] "); + } + if (role == "system") { + if (support_system_message) { + ss << "<>\n" << content << "\n<>\n\n"; + } else { + // if the model does not support system message, we still include it in the first message, but without <> + ss << content << "\n"; + } + } else if (role == "user") { + ss << content << " [/INST]"; + } else { + ss << (space_around_response ? " " : "") << content << (space_around_response ? " " : "") << ""; + is_inside_turn = false; + } + } + // llama2 templates seem to not care about "add_generation_prompt" + } else if (tmpl.find("<|user|>") != std::string::npos) { + // zephyr template + for (auto message : chat) { + ss << "<|" << message->role << "|>" << "\n" << message->content << "<|endoftext|>\n"; + } + if (add_ass) { + ss << "<|assistant|>\n"; + } + } else { + // template not supported + return -1; + } + dest = ss.str(); + return dest.size(); +} + +LLAMA_API int32_t llama_chat_apply_template( + const struct llama_model * model, + const char * tmpl, + const struct llama_chat_message * chat, + size_t n_msg, + bool add_ass, + char * buf, + int32_t length) { + std::string curr_tmpl(tmpl == nullptr ? "" : tmpl); + if (tmpl == nullptr) { + GGML_ASSERT(model != nullptr); + // load template from model + std::vector model_template(2048, 0); // longest known template is about 1200 bytes + std::string template_key = "tokenizer.chat_template"; + int32_t res = llama_model_meta_val_str(model, template_key.c_str(), model_template.data(), curr_tmpl.size()); + if (res < 0) { + // worst case: there is no information about template, we will use chatml by default + curr_tmpl = "<|im_start|>"; // see llama_chat_apply_template_internal + } else { + curr_tmpl = std::string(model_template.data(), model_template.size()); + } + } + // format the chat to string + std::vector chat_vec; + chat_vec.resize(n_msg); + for (size_t i = 0; i < n_msg; i++) { + chat_vec[i] = &chat[i]; + } + std::string formatted_chat; + int32_t res = llama_chat_apply_template_internal(curr_tmpl, chat_vec, formatted_chat, add_ass); + if (res < 0) { + return res; + } + strncpy(buf, formatted_chat.c_str(), length); + return res; +} + struct llama_timings llama_get_timings(struct llama_context * ctx) { struct llama_timings result = { /*.t_start_ms =*/ 1e-3 * ctx->t_start_us, diff --git a/llama.h b/llama.h index 5a97abcc9..77a84c18a 100644 --- a/llama.h +++ b/llama.h @@ -305,6 +305,12 @@ extern "C" { int32_t n_eval; }; + // used in chat template + typedef struct llama_chat_message { + const char * role; + const char * content; + } llama_chat_message; + // Helpers for getting default parameters LLAMA_API struct llama_model_params llama_model_default_params(void); LLAMA_API struct llama_context_params llama_context_default_params(void); @@ -699,6 +705,25 @@ extern "C" { char * buf, int32_t length); + /// Apply chat template. Inspired by hf apply_chat_template() on python. + /// Both "model" and "custom_template" are optional, but at least one is required. "custom_template" has higher precedence than "model" + /// NOTE: This function only support some known jinja templates. It is not a jinja parser. + /// @param tmpl A Jinja template to use for this chat. If this is nullptr, the model’s default chat template will be used instead. + /// @param chat Pointer to a list of multiple llama_chat_message + /// @param n_msg Number of llama_chat_message in this chat + /// @param add_ass Whether to end the prompt with the token(s) that indicate the start of an assistant message. + /// @param buf A buffer to hold the output formatted prompt. The recommended alloc size is 2 * (total number of characters of all messages) + /// @param length The size of the allocated buffer + /// @return The total number of bytes of the formatted prompt. If is it larger than the size of buffer, you may need to re-alloc it and then re-apply the template. + LLAMA_API int32_t llama_chat_apply_template( + const struct llama_model * model, + const char * tmpl, + const struct llama_chat_message * chat, + size_t n_msg, + bool add_ass, + char * buf, + int32_t length); + // // Grammar // diff --git a/tests/CMakeLists.txt b/tests/CMakeLists.txt index 3e40a78cd..10326d531 100644 --- a/tests/CMakeLists.txt +++ b/tests/CMakeLists.txt @@ -28,6 +28,7 @@ endfunction() llama_build_and_test_executable(test-quantize-fns.cpp) llama_build_and_test_executable(test-quantize-perf.cpp) llama_build_and_test_executable(test-sampling.cpp) +llama_build_and_test_executable(test-chat-template.cpp) llama_build_executable(test-tokenizer-0-llama.cpp) llama_test_executable (test-tokenizer-0-llama test-tokenizer-0-llama.cpp ${CMAKE_CURRENT_SOURCE_DIR}/../models/ggml-vocab-llama.gguf) diff --git a/tests/test-chat-template.cpp b/tests/test-chat-template.cpp new file mode 100644 index 000000000..9830650d4 --- /dev/null +++ b/tests/test-chat-template.cpp @@ -0,0 +1,64 @@ +#include +#include +#include +#include + +#undef NDEBUG +#include + +#include "llama.h" + +int main(void) { + llama_chat_message conversation[] = { + {"system", "You are a helpful assistant"}, + {"user", "Hello"}, + {"assistant", "Hi there"}, + {"user", "Who are you"}, + {"assistant", " I am an assistant "}, + {"user", "Another question"}, + }; + size_t message_count = 6; + std::vector templates = { + // teknium/OpenHermes-2.5-Mistral-7B + "{% for message in messages %}{{'<|im_start|>' + message['role'] + '\\n' + message['content'] + '<|im_end|>' + '\\n'}}{% endfor %}{% if add_generation_prompt %}{{ '<|im_start|>assistant\\n' }}{% endif %}", + // mistralai/Mistral-7B-Instruct-v0.2 + "{{ bos_token }}{% for message in messages %}{% if (message['role'] == 'user') != (loop.index0 % 2 == 0) %}{{ raise_exception('Conversation roles must alternate user/assistant/user/assistant/...') }}{% endif %}{% if message['role'] == 'user' %}{{ '[INST] ' + message['content'] + ' [/INST]' }}{% elif message['role'] == 'assistant' %}{{ message['content'] + eos_token}}{% else %}{{ raise_exception('Only user and assistant roles are supported!') }}{% endif %}{% endfor %}", + // TheBloke/FusionNet_34Bx2_MoE-AWQ + "{%- for idx in range(0, messages|length) -%}\\n{%- if messages[idx]['role'] == 'user' -%}\\n{%- if idx > 1 -%}\\n{{- bos_token + '[INST] ' + messages[idx]['content'] + ' [/INST]' -}}\\n{%- else -%}\\n{{- messages[idx]['content'] + ' [/INST]' -}}\\n{%- endif -%}\\n{% elif messages[idx]['role'] == 'system' %}\\n{{- '[INST] <>\\\\n' + messages[idx]['content'] + '\\\\n<>\\\\n\\\\n' -}}\\n{%- elif messages[idx]['role'] == 'assistant' -%}\\n{{- ' ' + messages[idx]['content'] + ' ' + eos_token -}}\\n{% endif %}\\n{% endfor %}", + // bofenghuang/vigogne-2-70b-chat + "{{ bos_token }}{% if messages[0]['role'] == 'system' %}{% set loop_messages = messages[1:] %}{% set system_message = messages[0]['content'] %}{% elif true == true and not '<>' in messages[0]['content'] %}{% set loop_messages = messages %}{% set system_message = 'Vous êtes Vigogne, un assistant IA créé par Zaion Lab. Vous suivez extrêmement bien les instructions. Aidez autant que vous le pouvez.' %}{% else %}{% set loop_messages = messages %}{% set system_message = false %}{% endif %}{% for message in loop_messages %}{% if (message['role'] == 'user') != (loop.index0 % 2 == 0) %}{{ raise_exception('Conversation roles must alternate user/assistant/user/assistant/...') }}{% endif %}{% if loop.index0 == 0 and system_message != false %}{% set content = '<>\\\\n' + system_message + '\\\\n<>\\\\n\\\\n' + message['content'] %}{% else %}{% set content = message['content'] %}{% endif %}{% if message['role'] == 'user' %}{{ '[INST] ' + content.strip() + ' [/INST]' }}{% elif message['role'] == 'system' %}{{ '<>\\\\n' + content.strip() + '\\\\n<>\\\\n\\\\n' }}{% elif message['role'] == 'assistant' %}{{ ' ' + content.strip() + ' ' + eos_token }}{% endif %}{% endfor %}", + }; + std::vector expected_substr = { + "<|im_start|>assistant\n I am an assistant <|im_end|>\n<|im_start|>user\nAnother question<|im_end|>\n<|im_start|>assistant", + "[/INST]Hi there[INST] Who are you [/INST] I am an assistant [INST] Another question [/INST]", + "[INST] Who are you [/INST] I am an assistant [INST] Another question [/INST]", + "[/INST] Hi there [INST] Who are you [/INST] I am an assistant [INST] Another question [/INST]", + }; + std::vector formatted_chat(1024); + int32_t res; + + // test invalid chat template + res = llama_chat_apply_template(nullptr, "INVALID TEMPLATE", conversation, message_count, true, formatted_chat.data(), formatted_chat.size()); + assert(res < 0); + + for (size_t i = 0; i < templates.size(); i++) { + std::string custom_template = templates[i]; + std::string substr = expected_substr[i]; + formatted_chat.resize(1024); + res = llama_chat_apply_template( + nullptr, + custom_template.c_str(), + conversation, + message_count, + true, + formatted_chat.data(), + formatted_chat.size() + ); + formatted_chat.resize(res); + std::string output(formatted_chat.data(), formatted_chat.size()); + std::cout << output << "\n-------------------------\n"; + // expect the "formatted_chat" to contain pre-defined strings + assert(output.find(substr) != std::string::npos); + } + return 0; +} From 4480542b2271ba1438f0daff8e5f3a74b1dc8609 Mon Sep 17 00:00:00 2001 From: NawafAlansari <72708095+NawafAlansari@users.noreply.github.com> Date: Mon, 19 Feb 2024 03:25:38 -0500 Subject: [PATCH 636/811] baby-llama : allocate graphs in ggml_context (#5573) * Fixed the baby-llama issue (see issue #4830) * minor : fix whitespaces --------- Co-authored-by: Georgi Gerganov --- examples/baby-llama/baby-llama.cpp | 22 ++++++++++++---------- 1 file changed, 12 insertions(+), 10 deletions(-) diff --git a/examples/baby-llama/baby-llama.cpp b/examples/baby-llama/baby-llama.cpp index e7d2ad592..65bb238a0 100644 --- a/examples/baby-llama/baby-llama.cpp +++ b/examples/baby-llama/baby-llama.cpp @@ -1533,16 +1533,17 @@ int main(int argc, char ** argv) { int n_past = 0; - ggml_cgraph gf = {}; + struct ggml_cgraph * gf = NULL; + gf = ggml_new_graph_custom(ctx0, LLAMA_TRAIN_MAX_NODES, true); get_example_targets_batch(ctx0, 64*ex+0, tokens_input, targets); - struct ggml_tensor * logits = forward_batch(&model, &kv_self, ctx0, &gf, tokens_input, n_tokens, n_past, n_batch); + struct ggml_tensor * logits = forward_batch(&model, &kv_self, ctx0, gf, tokens_input, n_tokens, n_past, n_batch); // struct ggml_tensor * e = cross_entropy_loss(ctx0, targets, logits); struct ggml_tensor * e = square_error_loss(ctx0, targets, logits); - ggml_build_forward_expand(&gf, e); - ggml_graph_compute_helper(work_buffer, &gf, /*n_threads*/ 1); + ggml_build_forward_expand(gf, e); + ggml_graph_compute_helper(work_buffer, gf, /*n_threads*/ 1); float error_before_opt = ggml_get_f32_1d(e, 0); @@ -1552,8 +1553,8 @@ int main(int argc, char ** argv) { opt_params_lbfgs.lbfgs.n_iter = 16; ggml_opt(ctx0, opt_params_lbfgs, e); // - ggml_build_forward_expand(&gf, e); - ggml_graph_compute_helper(work_buffer, &gf, /*n_threads*/ 1); + ggml_build_forward_expand(gf, e); + ggml_graph_compute_helper(work_buffer, gf, /*n_threads*/ 1); float error_after_opt = ggml_get_f32_1d(e, 0); @@ -1600,13 +1601,14 @@ int main(int argc, char ** argv) { }; struct ggml_context * ctx0 = ggml_init(params); - ggml_cgraph gf = {}; + struct ggml_cgraph * gf = NULL; + gf = ggml_new_graph_custom(ctx0, LLAMA_TRAIN_MAX_NODES, true); int n_past = 0; - struct ggml_tensor * logits = forward(&model, &kv_self, ctx0, &gf, tokens_input, sample_ctx, n_past); + struct ggml_tensor * logits = forward(&model, &kv_self, ctx0, gf, tokens_input, sample_ctx, n_past); - ggml_build_forward_expand(&gf, logits); - ggml_graph_compute_helper(work_buffer, &gf, /*n_threads*/ 1); + ggml_build_forward_expand(gf, logits); + ggml_graph_compute_helper(work_buffer, gf, /*n_threads*/ 1); struct ggml_tensor * best_samples = ggml_new_tensor_1d(ctx0, GGML_TYPE_I32, sample_ctx); struct ggml_tensor * probs = ggml_new_tensor_2d(ctx0, GGML_TYPE_F32, n_vocab, sample_ctx); From 70847553963c85e86051d06df848236829f5f951 Mon Sep 17 00:00:00 2001 From: Daniel Bevenius Date: Mon, 19 Feb 2024 09:31:59 +0100 Subject: [PATCH 637/811] llava : avoid changing the original BakLLaVA model (#5577) This is a follup of Commit fc0c8d286a533363a9a663510b62af85ffad58b3 ("llava : update surgery script to not remove tensors") but this time the change is to the BakLLaVA specific part of the surgery script. I've been able to test this using SkunkworksAI/BakLLaVA-1 and it works as expected using the instructions in README.md. Signed-off-by: Daniel Bevenius --- examples/llava/llava-surgery.py | 4 ---- 1 file changed, 4 deletions(-) diff --git a/examples/llava/llava-surgery.py b/examples/llava/llava-surgery.py index 8b7a62fba..4f2da3bee 100644 --- a/examples/llava/llava-surgery.py +++ b/examples/llava/llava-surgery.py @@ -25,9 +25,6 @@ if len(clip_tensors) > 0: clip = {name.replace("vision_tower.vision_tower.", ""): checkpoint[name].float() for name in clip_tensors} torch.save(clip, f"{args.model}/llava.clip") - # remove these tensors - for name in clip_tensors: - del checkpoint[name] # added tokens should be removed to be able to convert Mistral models if os.path.exists(f"{args.model}/added_tokens.json"): @@ -35,7 +32,6 @@ if len(clip_tensors) > 0: f.write("{}\n") - torch.save(checkpoint, path) print("Done!") print(f"Now you can convert {args.model} to a regular LLaMA GGUF file.") From f53119cec4f073b6d214195ecbe1fad3abdf2b34 Mon Sep 17 00:00:00 2001 From: Georgi Gerganov Date: Mon, 19 Feb 2024 10:34:10 +0200 Subject: [PATCH 638/811] minor : fix trailing whitespace (#5538) --- llama.cpp | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/llama.cpp b/llama.cpp index 143870645..5de07dfa9 100644 --- a/llama.cpp +++ b/llama.cpp @@ -12525,7 +12525,7 @@ static std::string trim(const std::string & str) { // This function uses heuristic checks to determine commonly used template. It is not a jinja parser. static int32_t llama_chat_apply_template_internal( const std::string & tmpl, - const std::vector & chat, + const std::vector & chat, std::string & dest, bool add_ass) { // Taken from the research: https://github.com/ggerganov/llama.cpp/issues/5527 std::stringstream ss; From 13e2c771aa4212cd5405cf310203848d50f7f859 Mon Sep 17 00:00:00 2001 From: Abhilash Majumder <30946547+abhilash1910@users.noreply.github.com> Date: Mon, 19 Feb 2024 14:45:18 +0530 Subject: [PATCH 639/811] cmake : remove obsolete sycl compile flags (#5581) * rm unwanted sycl compile options * fix bug * fix bug * format fix --- CMakeLists.txt | 7 ------- 1 file changed, 7 deletions(-) diff --git a/CMakeLists.txt b/CMakeLists.txt index f5e7f2980..40a098d01 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -741,13 +741,6 @@ function(get_flags CCID CCVER) if (CCVER VERSION_GREATER_EQUAL 8.1.0) list(APPEND CXX_FLAGS -Wextra-semi) endif() - elseif (CCID MATCHES "Intel") - if (NOT LLAMA_SYCL) - # enable max optimization level when using Intel compiler - set(C_FLAGS -ipo -O3 -static -fp-model=fast -flto -fno-stack-protector) - set(CXX_FLAGS -ipo -O3 -static -fp-model=fast -flto -fno-stack-protector) - add_link_options(-fuse-ld=lld -static-intel) - endif() endif() set(GF_C_FLAGS ${C_FLAGS} PARENT_SCOPE) From 70d45af0efce9ed360e1858b827989d971dd9caf Mon Sep 17 00:00:00 2001 From: valiray <133289098+valiray@users.noreply.github.com> Date: Mon, 19 Feb 2024 02:37:10 -0800 Subject: [PATCH 640/811] readme : fix typo in README-sycl.md (#5353) --- README-sycl.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README-sycl.md b/README-sycl.md index e3a8e726e..dd5bf9dea 100644 --- a/README-sycl.md +++ b/README-sycl.md @@ -272,7 +272,7 @@ Please install [Visual Studio](https://visualstudio.microsoft.com/) which impact a. Please follow the procedure in [Get the Intel® oneAPI Base Toolkit ](https://www.intel.com/content/www/us/en/developer/tools/oneapi/base-toolkit.html). -Recommend to install to default folder: **/opt/intel/oneapi**. +Recommend to install to default folder: **C:\Program Files (x86)\Intel\oneAPI**. Following guide uses the default folder as example. If you use other folder, please modify the following guide info with your folder. From 68a6b98b3c8af7e5baade3ee45fe1d2c7b9323a9 Mon Sep 17 00:00:00 2001 From: Georgi Gerganov Date: Mon, 19 Feb 2024 13:41:51 +0200 Subject: [PATCH 641/811] make : fix CUDA build (#5580) --- Makefile | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/Makefile b/Makefile index 59352eb53..29fd2ca9c 100644 --- a/Makefile +++ b/Makefile @@ -97,9 +97,10 @@ endif # # keep standard at C11 and C++11 -MK_CPPFLAGS = -I. -Icommon -MK_CFLAGS = -std=c11 -fPIC -MK_CXXFLAGS = -std=c++11 -fPIC +MK_CPPFLAGS = -I. -Icommon +MK_CFLAGS = -std=c11 -fPIC +MK_CXXFLAGS = -std=c++11 -fPIC +MK_NVCCFLAGS = -std=c++11 # -Ofast tends to produce faster code, but may not be available for some compilers. ifdef LLAMA_FAST From d0e3ce51f45bd6a646da1952d7e5d143a087db3e Mon Sep 17 00:00:00 2001 From: Georgi Gerganov Date: Mon, 19 Feb 2024 14:45:41 +0200 Subject: [PATCH 642/811] ci : enable -Werror for CUDA builds (#5579) * cmake : pass -Werror through -Xcompiler ggml-ci * make, cmake : enable CUDA errors on warnings ggml-ci --- CMakeLists.txt | 33 +++++++++++++++++++-------------- Makefile | 5 ++++- ggml-cuda.cu | 50 ++++++++++++++++++++++++++------------------------ 3 files changed, 49 insertions(+), 39 deletions(-) diff --git a/CMakeLists.txt b/CMakeLists.txt index 40a098d01..168b133f4 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -145,14 +145,6 @@ set(THREADS_PREFER_PTHREAD_FLAG ON) find_package(Threads REQUIRED) include(CheckCXXCompilerFlag) -if (LLAMA_FATAL_WARNINGS) - if (CMAKE_CXX_COMPILER_ID MATCHES "GNU" OR CMAKE_CXX_COMPILER_ID MATCHES "Clang") - add_compile_options(-Werror) - elseif (CMAKE_CXX_COMPILER_ID STREQUAL "MSVC") - add_compile_options(/WX) - endif() -endif() - # enable libstdc++ assertions for debug builds if (CMAKE_SYSTEM_NAME MATCHES "Linux") add_compile_definitions($<$:_GLIBCXX_ASSERTIONS>) @@ -747,15 +739,24 @@ function(get_flags CCID CCVER) set(GF_CXX_FLAGS ${CXX_FLAGS} PARENT_SCOPE) endfunction() +if (LLAMA_FATAL_WARNINGS) + if (CMAKE_CXX_COMPILER_ID MATCHES "GNU" OR CMAKE_CXX_COMPILER_ID MATCHES "Clang") + list(APPEND C_FLAGS -Werror) + list(APPEND CXX_FLAGS -Werror) + elseif (CMAKE_CXX_COMPILER_ID STREQUAL "MSVC") + add_compile_options(/WX) + endif() +endif() + if (LLAMA_ALL_WARNINGS) if (NOT MSVC) - set(WARNING_FLAGS -Wall -Wextra -Wpedantic -Wcast-qual -Wno-unused-function) - set(C_FLAGS -Wshadow -Wstrict-prototypes -Wpointer-arith -Wmissing-prototypes - -Werror=implicit-int -Werror=implicit-function-declaration) - set(CXX_FLAGS -Wmissing-declarations -Wmissing-noreturn) + list(APPEND WARNING_FLAGS -Wall -Wextra -Wpedantic -Wcast-qual -Wno-unused-function) + list(APPEND C_FLAGS -Wshadow -Wstrict-prototypes -Wpointer-arith -Wmissing-prototypes + -Werror=implicit-int -Werror=implicit-function-declaration) + list(APPEND CXX_FLAGS -Wmissing-declarations -Wmissing-noreturn) - set(C_FLAGS ${WARNING_FLAGS} ${C_FLAGS}) - set(CXX_FLAGS ${WARNING_FLAGS} ${CXX_FLAGS}) + list(APPEND C_FLAGS ${WARNING_FLAGS}) + list(APPEND CXX_FLAGS ${WARNING_FLAGS}) get_flags(${CMAKE_CXX_COMPILER_ID} ${CMAKE_CXX_COMPILER_VERSION}) @@ -773,6 +774,10 @@ set(CUDA_CXX_FLAGS "") if (LLAMA_CUBLAS) set(CUDA_FLAGS -use_fast_math) + if (LLAMA_FATAL_WARNINGS) + list(APPEND CUDA_FLAGS -Werror all-warnings) + endif() + if (LLAMA_ALL_WARNINGS AND NOT MSVC) set(NVCC_CMD ${CMAKE_CUDA_COMPILER} .c) if (NOT CMAKE_CUDA_HOST_COMPILER STREQUAL "") diff --git a/Makefile b/Makefile index 29fd2ca9c..63b4af9ba 100644 --- a/Makefile +++ b/Makefile @@ -217,7 +217,7 @@ MK_CFLAGS += $(WARN_FLAGS) -Wshadow -Wstrict-prototypes -Wpointer-arith -Wmis MK_CXXFLAGS += $(WARN_FLAGS) -Wmissing-declarations -Wmissing-noreturn ifeq ($(LLAMA_FATAL_WARNINGS),1) - MK_CFLAGS += -Werror + MK_CFLAGS += -Werror MK_CXXFLAGS += -Werror endif @@ -385,6 +385,9 @@ ifdef LLAMA_CUBLAS MK_LDFLAGS += -lcuda -lcublas -lculibos -lcudart -lcublasLt -lpthread -ldl -lrt -L/usr/local/cuda/lib64 -L/opt/cuda/lib64 -L$(CUDA_PATH)/targets/x86_64-linux/lib -L/usr/local/cuda/targets/aarch64-linux/lib -L/usr/lib/wsl/lib OBJS += ggml-cuda.o MK_NVCCFLAGS += -use_fast_math +ifdef LLAMA_FATAL_WARNINGS + MK_NVCCFLAGS += -Werror all-warnings +endif # LLAMA_FATAL_WARNINGS ifndef JETSON_EOL_MODULE_DETECT MK_NVCCFLAGS += --forward-unknown-to-host-compiler endif # JETSON_EOL_MODULE_DETECT diff --git a/ggml-cuda.cu b/ggml-cuda.cu index eef213509..e091dbdc1 100644 --- a/ggml-cuda.cu +++ b/ggml-cuda.cu @@ -651,18 +651,18 @@ static __device__ __forceinline__ float2 warp_reduce_sum(float2 a) { return a; } -static __device__ __forceinline__ half2 warp_reduce_sum(half2 a) { -#if !(defined(GGML_USE_HIPBLAS) && defined(__HIP_PLATFORM_AMD__)) && __CUDA_ARCH__ >= CC_PASCAL -#pragma unroll - for (int mask = 16; mask > 0; mask >>= 1) { - a = __hadd2(a, __shfl_xor_sync(0xffffffff, a, mask, 32)); - } - return a; -#else - (void) a; - NO_DEVICE_CODE; -#endif // !(defined(GGML_USE_HIPBLAS) && defined(__HIP_PLATFORM_AMD__)) && __CUDA_ARCH__ >= CC_PASCAL -} +//static __device__ __forceinline__ half2 warp_reduce_sum(half2 a) { +//#if !(defined(GGML_USE_HIPBLAS) && defined(__HIP_PLATFORM_AMD__)) && __CUDA_ARCH__ >= CC_PASCAL +//#pragma unroll +// for (int mask = 16; mask > 0; mask >>= 1) { +// a = __hadd2(a, __shfl_xor_sync(0xffffffff, a, mask, 32)); +// } +// return a; +//#else +// (void) a; +// NO_DEVICE_CODE; +//#endif // !(defined(GGML_USE_HIPBLAS) && defined(__HIP_PLATFORM_AMD__)) && __CUDA_ARCH__ >= CC_PASCAL +//} static __device__ __forceinline__ float warp_reduce_max(float x) { #pragma unroll @@ -672,18 +672,18 @@ static __device__ __forceinline__ float warp_reduce_max(float x) { return x; } -static __device__ __forceinline__ half2 warp_reduce_max(half2 x) { -#if !(defined(GGML_USE_HIPBLAS) && defined(__HIP_PLATFORM_AMD__)) && __CUDA_ARCH__ >= CC_PASCAL && CUDART_VERSION >= CUDART_HMAX -#pragma unroll - for (int mask = 16; mask > 0; mask >>= 1) { - x = __hmax2(x, __shfl_xor_sync(0xffffffff, x, mask, 32)); - } - return x; -#else - (void) x; - NO_DEVICE_CODE; -#endif // !(defined(GGML_USE_HIPBLAS) && defined(__HIP_PLATFORM_AMD__)) && __CUDA_ARCH__ >= CC_PASCAL && CUDART_VERSION >= CUDART_HMAX -} +//static __device__ __forceinline__ half2 warp_reduce_max(half2 x) { +//#if !(defined(GGML_USE_HIPBLAS) && defined(__HIP_PLATFORM_AMD__)) && __CUDA_ARCH__ >= CC_PASCAL && CUDART_VERSION >= CUDART_HMAX +//#pragma unroll +// for (int mask = 16; mask > 0; mask >>= 1) { +// x = __hmax2(x, __shfl_xor_sync(0xffffffff, x, mask, 32)); +// } +// return x; +//#else +// (void) x; +// NO_DEVICE_CODE; +//#endif // !(defined(GGML_USE_HIPBLAS) && defined(__HIP_PLATFORM_AMD__)) && __CUDA_ARCH__ >= CC_PASCAL && CUDART_VERSION >= CUDART_HMAX +//} static __device__ __forceinline__ float op_repeat(const float a, const float b) { return b; @@ -4641,10 +4641,12 @@ static __device__ __forceinline__ float vec_dot_iq2_xs_q8_1( const float d = (float)bq2->d * __low2float(bq8_1[ib32].ds) * 0.25f; return d * ((0.5f + ls1) * sumi1 + (0.5f + ls2) * sumi2); #else + (void) ksigns64; assert(false); return 0.f; #endif #else + (void) ksigns64; assert(false); return 0.f; #endif From 890559ab28e354052e16e770155ad007fd0856e8 Mon Sep 17 00:00:00 2001 From: Didzis Gosko Date: Sun, 11 Feb 2024 16:41:41 +0200 Subject: [PATCH 643/811] metal : option to embed MSL source into compiled binary (whisper/1842) * ggml : embed Metal library source (ggml-metal.metal) into binary enable by setting WHISPER_EMBED_METAL_LIBRARY * rename the build option * rename the preprocessor directive * generate Metal library embedding assembly on-fly during build process --- ggml-metal.m | 9 +++++++++ 1 file changed, 9 insertions(+) diff --git a/ggml-metal.m b/ggml-metal.m index f3c1fff8f..956e323a0 100644 --- a/ggml-metal.m +++ b/ggml-metal.m @@ -277,6 +277,14 @@ static struct ggml_metal_context * ggml_metal_init(int n_cb) { return NULL; } } else { +#if GGML_METAL_EMBED_LIBRARY + GGML_METAL_LOG_INFO("%s: using embedded metal library\n", __func__); + + extern const char ggml_metallib_start[]; + extern const char ggml_metallib_end[]; + + NSString * src = [[NSString alloc] initWithBytes:ggml_metallib_start length:(ggml_metallib_end-ggml_metallib_start) encoding:NSUTF8StringEncoding]; +#else GGML_METAL_LOG_INFO("%s: default.metallib not found, loading from source\n", __func__); NSString * sourcePath; @@ -299,6 +307,7 @@ static struct ggml_metal_context * ggml_metal_init(int n_cb) { GGML_METAL_LOG_ERROR("%s: error: %s\n", __func__, [[error description] UTF8String]); return NULL; } +#endif @autoreleasepool { // dictionary of preprocessor macros From a3145bdc305422973e25f0b066da6f469ed5dc45 Mon Sep 17 00:00:00 2001 From: Georgi Gerganov Date: Mon, 19 Feb 2024 14:53:48 +0200 Subject: [PATCH 644/811] ggml-alloc : apply ggml/731 --- ggml-alloc.c | 116 ++++++++++++++++++++++++++++++++------------------- 1 file changed, 73 insertions(+), 43 deletions(-) diff --git a/ggml-alloc.c b/ggml-alloc.c index d4123564f..e675306c8 100644 --- a/ggml-alloc.c +++ b/ggml-alloc.c @@ -377,6 +377,9 @@ struct ggml_gallocr { struct node_alloc * node_allocs; // [n_nodes] int n_nodes; + + struct tensor_alloc * leaf_allocs; // [n_leafs] + int n_leafs; }; ggml_gallocr_t ggml_gallocr_new_n(ggml_backend_buffer_type_t * bufts, int n_bufs) { @@ -427,6 +430,7 @@ void ggml_gallocr_free(ggml_gallocr_t galloc) { free(galloc->buffers); free(galloc->buf_tallocs); free(galloc->node_allocs); + free(galloc->leaf_allocs); free(galloc); } @@ -464,7 +468,7 @@ static void ggml_gallocr_allocate_node(ggml_gallocr_t galloc, struct ggml_tensor for (int i = 0; i < GGML_MAX_SRC; i++) { struct ggml_tensor * parent = node->src[i]; if (parent == NULL) { - break; + continue; } // if the node's data is external, then we cannot re-use it @@ -544,22 +548,8 @@ static void ggml_gallocr_alloc_graph_impl(ggml_gallocr_t galloc, struct ggml_cgr memset(galloc->hash_set.keys, 0, galloc->hash_set.size * sizeof(struct ggml_tensor *)); memset(galloc->hash_values, 0, galloc->hash_set.size * sizeof(struct hash_node)); - // allocate all graph inputs first to avoid overwriting them - for (int i = 0; i < graph->n_nodes; i++) { - if (graph->nodes[i]->flags & GGML_TENSOR_FLAG_INPUT) { - ggml_gallocr_allocate_node(galloc, graph->nodes[i], get_node_buffer_id(node_buffer_ids, i)); - } - for (int j = 0; j < GGML_MAX_SRC; j++) { - if (graph->nodes[i]->src[j] == NULL) { - continue; - } - if (graph->nodes[i]->src[j]->flags & GGML_TENSOR_FLAG_INPUT) { - ggml_gallocr_allocate_node(galloc, graph->nodes[i]->src[j], get_node_buffer_id(node_buffer_ids, i)); - } - } - } - // count number of children and views + // allocate all graph inputs and leafs first to avoid overwriting them for (int i = 0; i < graph->n_nodes; i++) { struct ggml_tensor * node = graph->nodes[i]; @@ -568,14 +558,37 @@ static void ggml_gallocr_alloc_graph_impl(ggml_gallocr_t galloc, struct ggml_cgr ggml_gallocr_hash_get(galloc, view_src)->n_views += 1; } - for (int j = 0; j < GGML_MAX_SRC; j++) { - struct ggml_tensor * parent = node->src[j]; - if (parent == NULL) { - break; - } - ggml_gallocr_hash_get(galloc, parent)->n_children += 1; + if (node->flags & GGML_TENSOR_FLAG_INPUT) { + ggml_gallocr_allocate_node(galloc, graph->nodes[i], get_node_buffer_id(node_buffer_ids, i)); } - } + + for (int j = 0; j < GGML_MAX_SRC; j++) { + struct ggml_tensor * src = node->src[j]; + if (src == NULL) { + continue; + } + + ggml_gallocr_hash_get(galloc, src)->n_children += 1; + + // allocate explicit inputs and leafs + if (src->flags & GGML_TENSOR_FLAG_INPUT || src->op == GGML_OP_NONE) { + ggml_gallocr_allocate_node(galloc, src, get_node_buffer_id(node_buffer_ids, i)); + } + } + } + + // allocate the remaining leafs that are unused on the graph + // these are effectively static tensors that the application is not using in the graph, but may still want to allocate for other purposes + for (int i = 0; i < graph->n_leafs; i++) { + struct ggml_tensor * leaf = graph->leafs[i]; + struct hash_node * hn = ggml_gallocr_hash_get(galloc, leaf); + + if (hn->n_children == 0) { + assert(!hn->allocated); + // since buffer ids are only given for nodes, these leafs are always allocated in the first buffer + ggml_gallocr_allocate_node(galloc, leaf, 0); + } + } // allocate tensors for (int i = 0; i < graph->n_nodes; i++) { @@ -586,7 +599,7 @@ static void ggml_gallocr_alloc_graph_impl(ggml_gallocr_t galloc, struct ggml_cgr for (int j = 0; j < GGML_MAX_SRC; j++) { struct ggml_tensor * parent = node->src[j]; if (parent == NULL) { - break; + continue; } ggml_gallocr_allocate_node(galloc, parent, buffer_id); } @@ -598,7 +611,7 @@ static void ggml_gallocr_alloc_graph_impl(ggml_gallocr_t galloc, struct ggml_cgr for (int j = 0; j < GGML_MAX_SRC; j++) { struct ggml_tensor * parent = node->src[j]; if (parent == NULL) { - break; + continue; } AT_PRINTF("%s", parent->name); if (j < GGML_MAX_SRC - 1 && node->src[j + 1] != NULL) { @@ -611,7 +624,7 @@ static void ggml_gallocr_alloc_graph_impl(ggml_gallocr_t galloc, struct ggml_cgr for (int j = 0; j < GGML_MAX_SRC; j++) { struct ggml_tensor * parent = node->src[j]; if (parent == NULL) { - break; + continue; } struct hash_node * p_hn = ggml_gallocr_hash_get(galloc, parent); p_hn->n_children -= 1; @@ -696,6 +709,18 @@ bool ggml_gallocr_reserve_n(ggml_gallocr_t galloc, struct ggml_cgraph * graph, c } } } + if (galloc->n_leafs < graph->n_leafs) { + free(galloc->leaf_allocs); + galloc->leaf_allocs = calloc(sizeof(struct tensor_alloc), graph->n_leafs); + GGML_ASSERT(galloc->leaf_allocs != NULL); + } + galloc->n_leafs = graph->n_leafs; + for (int i = 0; i < graph->n_leafs; i++) { + struct ggml_tensor * leaf = graph->leafs[i]; + struct hash_node * hn = ggml_gallocr_hash_get(galloc, leaf); + galloc->leaf_allocs[i].offset = hn->offset; + galloc->leaf_allocs[i].size_max = ggml_backend_buft_get_alloc_size(galloc->bufts[hn->buffer_id], leaf); + } // reallocate buffers if needed for (int i = 0; i < galloc->n_buffers; i++) { @@ -722,8 +747,8 @@ bool ggml_gallocr_reserve(ggml_gallocr_t galloc, struct ggml_cgraph *graph) { return ggml_gallocr_reserve_n(galloc, graph, NULL); } -static void ggml_gallocr_init_tensor(ggml_gallocr_t galloc, struct ggml_tensor * node, struct node_alloc * node_alloc, struct tensor_alloc * tensor_alloc) { - assert(node->data || node->view_src || ggml_backend_buffer_get_alloc_size(galloc->buffers[node_alloc->buffer_id], node) <= tensor_alloc->size_max); +static void ggml_gallocr_init_tensor(ggml_gallocr_t galloc, struct ggml_tensor * node, int buffer_id, struct tensor_alloc * tensor_alloc) { + assert(node->data || node->view_src || ggml_backend_buffer_get_alloc_size(galloc->buffers[buffer_id], node) <= tensor_alloc->size_max); if (node->view_src != NULL) { if (node->buffer == NULL) { @@ -732,29 +757,20 @@ static void ggml_gallocr_init_tensor(ggml_gallocr_t galloc, struct ggml_tensor * // this tensor was allocated without ggml-backend return; } - ggml_backend_view_init(galloc->buffers[node_alloc->buffer_id], node); + ggml_backend_view_init(galloc->buffers[buffer_id], node); } } else { if (node->data == NULL) { assert(tensor_alloc->offset != SIZE_MAX); - assert(ggml_backend_buffer_get_alloc_size(galloc->buffers[node_alloc->buffer_id], node) <= tensor_alloc->size_max); - void * base = ggml_backend_buffer_get_base(galloc->buffers[node_alloc->buffer_id]); + assert(ggml_backend_buffer_get_alloc_size(galloc->buffers[buffer_id], node) <= tensor_alloc->size_max); + void * base = ggml_backend_buffer_get_base(galloc->buffers[buffer_id]); void * addr = (char *)base + tensor_alloc->offset; - ggml_backend_tensor_alloc(galloc->buffers[node_alloc->buffer_id], node, addr); + ggml_backend_tensor_alloc(galloc->buffers[buffer_id], node, addr); } else { if (node->buffer == NULL) { // this tensor was allocated without ggml-backend return; } - -#ifndef NDEBUG - size_t offset = - (char *)node->data - - (char *)ggml_backend_buffer_get_base(node->buffer); - size_t size = ggml_backend_buffer_get_alloc_size(node->buffer, node); - assert(tensor_alloc->offset == SIZE_MAX || offset == tensor_alloc->offset); - assert(tensor_alloc->offset == SIZE_MAX || size <= tensor_alloc->size_max); -#endif } } } @@ -773,6 +789,13 @@ static bool ggml_gallocr_needs_realloc(ggml_gallocr_t galloc, struct ggml_cgraph return true; } + if (galloc->n_leafs != graph->n_leafs) { +#ifndef NDEBUG + fprintf(stderr, "%s: graph has different number of leafs\n", __func__); +#endif + return true; + } + for (int i = 0; i < graph->n_nodes; i++) { struct ggml_tensor * node = graph->nodes[i]; struct node_alloc * node_alloc = &galloc->node_allocs[i]; @@ -827,6 +850,7 @@ bool ggml_gallocr_alloc_graph(ggml_gallocr_t galloc, struct ggml_cgraph * graph) } // allocate the graph tensors from the previous assignments + // nodes for (int i = 0; i < graph->n_nodes; i++) { struct ggml_tensor * node = graph->nodes[i]; struct node_alloc * node_alloc = &galloc->node_allocs[i]; @@ -835,9 +859,15 @@ bool ggml_gallocr_alloc_graph(ggml_gallocr_t galloc, struct ggml_cgraph * graph) if (src == NULL) { continue; } - ggml_gallocr_init_tensor(galloc, src, node_alloc, &node_alloc->src[j]); + ggml_gallocr_init_tensor(galloc, src, node_alloc->buffer_id, &node_alloc->src[j]); } - ggml_gallocr_init_tensor(galloc, node, node_alloc, &node_alloc->dst); + ggml_gallocr_init_tensor(galloc, node, node_alloc->buffer_id, &node_alloc->dst); + } + // leafs + for (int i = 0; i < graph->n_leafs; i++) { + struct ggml_tensor * leaf = graph->leafs[i]; + struct tensor_alloc * leaf_alloc = &galloc->leaf_allocs[i]; + ggml_gallocr_init_tensor(galloc, leaf, 0, leaf_alloc); } return true; From 337c9cbd52918ae5fb9a9d9e25d7fae4e238c9f1 Mon Sep 17 00:00:00 2001 From: Georgi Gerganov Date: Mon, 19 Feb 2024 14:54:21 +0200 Subject: [PATCH 645/811] sync : ggml ggml-ci --- scripts/sync-ggml.last | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/scripts/sync-ggml.last b/scripts/sync-ggml.last index 7a23ab162..733d8f95b 100644 --- a/scripts/sync-ggml.last +++ b/scripts/sync-ggml.last @@ -1 +1 @@ -5070f078a67c18c11736e78316ab715ca9afde16 +818eeb8a3be99125746a90ec63af8f51516a2ec6 From 6fd413791a754598a54a366145960f2e27eec015 Mon Sep 17 00:00:00 2001 From: slaren Date: Mon, 19 Feb 2024 14:02:36 +0100 Subject: [PATCH 646/811] llava : replace ggml_cpy with ggml_cont --- examples/llava/clip.cpp | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/examples/llava/clip.cpp b/examples/llava/clip.cpp index 98d512f67..1a02fde32 100644 --- a/examples/llava/clip.cpp +++ b/examples/llava/clip.cpp @@ -618,7 +618,7 @@ static ggml_cgraph * clip_image_build_graph(clip_ctx * ctx, const clip_image_f32 KQV = ggml_reshape_4d(ctx0, KQV, d_head, num_positions, n_head, batch_size); KQV = ggml_cont(ctx0, ggml_permute(ctx0, KQV, 0, 2, 1, 3)); - cur = ggml_cpy(ctx0, KQV, ggml_new_tensor_3d(ctx0, GGML_TYPE_F32, hidden_size, num_positions, batch_size)); + cur = ggml_cont_3d(ctx0, KQV, hidden_size, num_positions, batch_size); } // attention output From 1387cf60f758efb218fa06b670182c38ff149b7b Mon Sep 17 00:00:00 2001 From: Georgi Gerganov Date: Mon, 19 Feb 2024 15:23:17 +0200 Subject: [PATCH 647/811] llava : remove extra cont (#5587) --- examples/llava/clip.cpp | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/examples/llava/clip.cpp b/examples/llava/clip.cpp index 1a02fde32..ef9e4ba7a 100644 --- a/examples/llava/clip.cpp +++ b/examples/llava/clip.cpp @@ -616,7 +616,7 @@ static ggml_cgraph * clip_image_build_graph(clip_ctx * ctx, const clip_image_f32 KQ = ggml_soft_max_inplace(ctx0, KQ); struct ggml_tensor * KQV = ggml_mul_mat(ctx0, V, KQ); KQV = ggml_reshape_4d(ctx0, KQV, d_head, num_positions, n_head, batch_size); - KQV = ggml_cont(ctx0, ggml_permute(ctx0, KQV, 0, 2, 1, 3)); + KQV = ggml_permute(ctx0, KQV, 0, 2, 1, 3); cur = ggml_cont_3d(ctx0, KQV, hidden_size, num_positions, batch_size); } From 9d679f0fccd4030779ed3c7684a40122fe41806c Mon Sep 17 00:00:00 2001 From: nopperl <54780682+nopperl@users.noreply.github.com> Date: Mon, 19 Feb 2024 14:14:07 +0000 Subject: [PATCH 648/811] examples : support minItems/maxItems in JSON grammar converter (#5039) * support minLength and maxLength in JSON schema grammar converter * Update examples/json-schema-to-grammar.py --------- Co-authored-by: Georgi Gerganov --- examples/json-schema-to-grammar.py | 16 +++++++++++++++- 1 file changed, 15 insertions(+), 1 deletion(-) diff --git a/examples/json-schema-to-grammar.py b/examples/json-schema-to-grammar.py index 2a4cb65bc..6a977f031 100755 --- a/examples/json-schema-to-grammar.py +++ b/examples/json-schema-to-grammar.py @@ -87,7 +87,21 @@ class SchemaConverter: elif schema_type == 'array' and 'items' in schema: # TODO `prefixItems` keyword item_rule_name = self.visit(schema['items'], f'{name}{"-" if name else ""}item') - rule = f'"[" space ({item_rule_name} ("," space {item_rule_name})*)? "]" space' + list_item_operator = f'("," space {item_rule_name})' + successive_items = "" + min_items = schema.get("minItems", 0) + if min_items > 0: + first_item = f"({item_rule_name})" + successive_items = list_item_operator * (min_items - 1) + min_items -= 1 + else: + first_item = f"({item_rule_name})?" + max_items = schema.get("maxItems") + if max_items is not None and max_items > min_items: + successive_items += (list_item_operator + "?") * (max_items - min_items - 1) + else: + successive_items += list_item_operator + "*" + rule = f'"[" space {first_item} {successive_items} "]" space' return self._add_rule(rule_name, rule) else: From f24ed14ee0ce28dfe98115c378b37da144912016 Mon Sep 17 00:00:00 2001 From: Jared Van Bortel Date: Mon, 19 Feb 2024 15:54:12 -0500 Subject: [PATCH 649/811] make : pass CPPFLAGS directly to nvcc, not via -Xcompiler (#5598) --- Makefile | 11 ++++++----- 1 file changed, 6 insertions(+), 5 deletions(-) diff --git a/Makefile b/Makefile index 63b4af9ba..db5df1b32 100644 --- a/Makefile +++ b/Makefile @@ -446,9 +446,9 @@ ifdef LLAMA_CUDA_CCBIN endif ggml-cuda.o: ggml-cuda.cu ggml-cuda.h ifdef JETSON_EOL_MODULE_DETECT - $(NVCC) -I. -Icommon -D_XOPEN_SOURCE=600 -D_GNU_SOURCE -DNDEBUG -DGGML_USE_CUBLAS -I/usr/local/cuda/include -I/opt/cuda/include -I/usr/local/cuda/targets/aarch64-linux/include -std=c++11 -O3 $(NVCCFLAGS) -Xcompiler "$(CUDA_CXXFLAGS)" -c $< -o $@ + $(NVCC) -I. -Icommon -D_XOPEN_SOURCE=600 -D_GNU_SOURCE -DNDEBUG -DGGML_USE_CUBLAS -I/usr/local/cuda/include -I/opt/cuda/include -I/usr/local/cuda/targets/aarch64-linux/include -std=c++11 -O3 $(NVCCFLAGS) $(CPPFLAGS) -Xcompiler "$(CUDA_CXXFLAGS)" -c $< -o $@ else - $(NVCC) $(NVCCFLAGS) -Xcompiler "$(CUDA_CXXFLAGS)" -c $< -o $@ + $(NVCC) $(NVCCFLAGS) $(CPPFLAGS) -Xcompiler "$(CUDA_CXXFLAGS)" -c $< -o $@ endif # JETSON_EOL_MODULE_DETECT endif # LLAMA_CUBLAS @@ -549,9 +549,10 @@ GF_CC := $(CC) include scripts/get-flags.mk # combine build flags with cmdline overrides -override CFLAGS := $(MK_CPPFLAGS) $(CPPFLAGS) $(MK_CFLAGS) $(GF_CFLAGS) $(CFLAGS) -BASE_CXXFLAGS := $(MK_CPPFLAGS) $(CPPFLAGS) $(MK_CXXFLAGS) $(CXXFLAGS) -override CXXFLAGS := $(BASE_CXXFLAGS) $(HOST_CXXFLAGS) $(GF_CXXFLAGS) +override CPPFLAGS := $(MK_CPPFLAGS) $(CPPFLAGS) +override CFLAGS := $(CPPFLAGS) $(MK_CFLAGS) $(GF_CFLAGS) $(CFLAGS) +BASE_CXXFLAGS := $(MK_CXXFLAGS) $(CXXFLAGS) +override CXXFLAGS := $(BASE_CXXFLAGS) $(HOST_CXXFLAGS) $(GF_CXXFLAGS) $(CPPFLAGS) override NVCCFLAGS := $(MK_NVCCFLAGS) $(NVCCFLAGS) override LDFLAGS := $(MK_LDFLAGS) $(LDFLAGS) From 40c3a6c1e11040088b4a1ce0abc4651cb3011dd4 Mon Sep 17 00:00:00 2001 From: slaren Date: Mon, 19 Feb 2024 23:40:26 +0100 Subject: [PATCH 650/811] cuda : ignore peer access already enabled errors (#5597) * cuda : ignore peer access already enabled errors * fix hip --- ggml-cuda.cu | 22 +++++++++++++++------- 1 file changed, 15 insertions(+), 7 deletions(-) diff --git a/ggml-cuda.cu b/ggml-cuda.cu index e091dbdc1..6caae56b0 100644 --- a/ggml-cuda.cu +++ b/ggml-cuda.cu @@ -54,6 +54,8 @@ #define cudaDeviceProp hipDeviceProp_t #define cudaDeviceSynchronize hipDeviceSynchronize #define cudaError_t hipError_t +#define cudaErrorPeerAccessAlreadyEnabled hipErrorPeerAccessAlreadyEnabled +#define cudaErrorPeerAccessNotEnabled hipErrorPeerAccessNotEnabled #define cudaEventCreateWithFlags hipEventCreateWithFlags #define cudaEventDisableTiming hipEventDisableTiming #define cudaEventRecord hipEventRecord @@ -9325,9 +9327,15 @@ static void ggml_cuda_set_peer_access(const int n_tokens) { CUDA_CHECK(cudaDeviceCanAccessPeer(&can_access_peer, id, id_other)); if (can_access_peer) { if (enable_peer_access) { - CUDA_CHECK(cudaDeviceEnablePeerAccess(id_other, 0)); + cudaError_t err = cudaDeviceEnablePeerAccess(id_other, 0); + if (err != cudaErrorPeerAccessAlreadyEnabled) { + CUDA_CHECK(err); + } } else { - CUDA_CHECK(cudaDeviceDisablePeerAccess(id_other)); + cudaError_t err = cudaDeviceDisablePeerAccess(id_other); + if (err != cudaErrorPeerAccessNotEnabled) { + CUDA_CHECK(err); + } } } } @@ -10999,10 +11007,10 @@ GGML_CALL static const char * ggml_backend_cuda_split_buffer_get_name(ggml_backe UNUSED(buffer); } -// unused at the moment -//static bool ggml_backend_buffer_is_cuda_split(ggml_backend_buffer_t buffer) { -// return buffer->iface.get_name == ggml_backend_cuda_split_buffer_get_name; -//} +static bool ggml_backend_buffer_is_cuda_split(ggml_backend_buffer_t buffer) { + return buffer->iface.get_name == ggml_backend_cuda_split_buffer_get_name; + UNUSED(ggml_backend_buffer_is_cuda_split); // only used in debug builds currently, avoid unused function warning in release builds +} GGML_CALL static void ggml_backend_cuda_split_buffer_free_buffer(ggml_backend_buffer_t buffer) { ggml_backend_cuda_split_buffer_context * ctx = (ggml_backend_cuda_split_buffer_context *)buffer->context; @@ -11390,7 +11398,7 @@ GGML_CALL static bool ggml_backend_cuda_graph_compute(ggml_backend_t backend, gg for (int j = 0; j < GGML_MAX_SRC; j++) { if (node->src[j] != nullptr) { assert(node->src[j]->backend == GGML_BACKEND_GPU || node->src[j]->backend == GGML_BACKEND_GPU_SPLIT); - assert(node->src[j]->buffer->buft == ggml_backend_cuda_buffer_type(cuda_ctx->device)); + assert(node->src[j]->buffer->buft == ggml_backend_cuda_buffer_type(cuda_ctx->device) || ggml_backend_buffer_is_cuda_split(node->src[j]->buffer)); assert(node->src[j]->extra != nullptr); } } From 5dde5408978eda22242b87e22e306d1c2d1a5834 Mon Sep 17 00:00:00 2001 From: Mathijs de Bruin Date: Sat, 3 Feb 2024 17:56:46 +0000 Subject: [PATCH 651/811] Allow for Vulkan build with Accelerate. Closes #5304 --- ggml.c | 2 ++ 1 file changed, 2 insertions(+) diff --git a/ggml.c b/ggml.c index 4ee2c5e11..d129df505 100644 --- a/ggml.c +++ b/ggml.c @@ -273,6 +273,8 @@ inline static void * ggml_calloc(size_t num, size_t size) { #include #if defined(GGML_USE_CLBLAST) // allow usage of CLBlast alongside Accelerate functions #include "ggml-opencl.h" +#elif defined(GGML_USE_VULKAN) +#include "ggml-vulkan.h" #endif #elif defined(GGML_USE_OPENBLAS) #if defined(GGML_BLAS_USE_MKL) From 42f664a3825dfde13a32c3577ab66d10c56f3aa6 Mon Sep 17 00:00:00 2001 From: Mathijs de Bruin Date: Sat, 3 Feb 2024 18:00:11 +0000 Subject: [PATCH 652/811] Resolve ErrorIncompatibleDriver with Vulkan on MacOS. Refs: - https://chat.openai.com/share/7020ce72-65fc-45ec-b7be-9d9d798a5f3f - https://github.com/SaschaWillems/Vulkan/issues/954 - https://github.com/haasn/libplacebo/issues/128 - https://github.com/KhronosGroup/Vulkan-Samples/issues/476 --- ggml-vulkan.cpp | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/ggml-vulkan.cpp b/ggml-vulkan.cpp index 4a30414df..e9e966dbf 100644 --- a/ggml-vulkan.cpp +++ b/ggml-vulkan.cpp @@ -1109,8 +1109,10 @@ static void ggml_vk_instance_init() { #ifdef GGML_VULKAN_VALIDATE "VK_EXT_validation_features", #endif + "VK_KHR_portability_enumeration", }; - vk::InstanceCreateInfo instance_create_info(vk::InstanceCreateFlags(), &app_info, layers, extensions); + + vk::InstanceCreateInfo instance_create_info(vk::InstanceCreateFlags(vk::InstanceCreateFlagBits::eEnumeratePortabilityKHR), &app_info, layers, extensions); #ifdef GGML_VULKAN_VALIDATE const std::vector features_enable = { vk::ValidationFeatureEnableEXT::eBestPractices }; vk::ValidationFeaturesEXT validation_features = { From d8c054517dc24f1316f3be12a98fff383e1e93e3 Mon Sep 17 00:00:00 2001 From: Mathijs de Bruin Date: Tue, 6 Feb 2024 14:39:22 +0000 Subject: [PATCH 653/811] Add preprocessor checks for Apple devices. Based on work by @rbourgeat in https://github.com/ggerganov/llama.cpp/pull/5322/files --- ggml-vulkan.cpp | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/ggml-vulkan.cpp b/ggml-vulkan.cpp index e9e966dbf..33b8a9061 100644 --- a/ggml-vulkan.cpp +++ b/ggml-vulkan.cpp @@ -1109,10 +1109,15 @@ static void ggml_vk_instance_init() { #ifdef GGML_VULKAN_VALIDATE "VK_EXT_validation_features", #endif +#ifdef __APPLE__ "VK_KHR_portability_enumeration", +#endif }; + vk::InstanceCreateInfo instance_create_info(vk::InstanceCreateFlags(), &app_info, layers, extensions); +#ifdef __APPLE__ + instance_create_info.flags = vk::InstanceCreateFlagBits::eEnumeratePortabilityKHR; +#endif - vk::InstanceCreateInfo instance_create_info(vk::InstanceCreateFlags(vk::InstanceCreateFlagBits::eEnumeratePortabilityKHR), &app_info, layers, extensions); #ifdef GGML_VULKAN_VALIDATE const std::vector features_enable = { vk::ValidationFeatureEnableEXT::eBestPractices }; vk::ValidationFeaturesEXT validation_features = { From f50db6ae0bdcb5f8593ca6ca46dfa03b177faa2f Mon Sep 17 00:00:00 2001 From: 0cc4m Date: Sat, 10 Feb 2024 22:14:52 +0100 Subject: [PATCH 654/811] Add check for VK_KHR_portability_enumeration for MoltenVK support --- ggml-vulkan.cpp | 41 +++++++++++++++++++++++++++++++---------- 1 file changed, 31 insertions(+), 10 deletions(-) diff --git a/ggml-vulkan.cpp b/ggml-vulkan.cpp index 33b8a9061..37123ac8f 100644 --- a/ggml-vulkan.cpp +++ b/ggml-vulkan.cpp @@ -1100,23 +1100,44 @@ static void ggml_vk_instance_init() { #endif vk::ApplicationInfo app_info{ "ggml-vulkan", 1, nullptr, 0, VK_API_VERSION }; - const std::vector layers = { + + const std::vector instance_extensions = vk::enumerateInstanceExtensionProperties(); +#ifdef __APPLE__ + bool portability_enumeration_ext = false; + // Check for portability enumeration extension for MoltenVK support + for (const auto& properties : instance_extensions) { + if (strcmp("VK_KHR_portability_enumeration", properties.extensionName) == 0) { + portability_enumeration_ext = true; + break; + } + } + if (!portability_enumeration_ext) { + std::cerr << "ggml_vulkan: WARNING: Instance extension VK_KHR_portability_enumeration not found." << std::endl; + } +#endif + + std::vector layers = { #ifdef GGML_VULKAN_VALIDATE "VK_LAYER_KHRONOS_validation", #endif }; - const std::vector extensions = { + std::vector extensions = { #ifdef GGML_VULKAN_VALIDATE "VK_EXT_validation_features", -#endif -#ifdef __APPLE__ - "VK_KHR_portability_enumeration", #endif }; - vk::InstanceCreateInfo instance_create_info(vk::InstanceCreateFlags(), &app_info, layers, extensions); #ifdef __APPLE__ - instance_create_info.flags = vk::InstanceCreateFlagBits::eEnumeratePortabilityKHR; + if (portability_enumeration_ext) { + extensions.push_back("VK_KHR_portability_enumeration"); + } #endif + vk::InstanceCreateInfo instance_create_info(vk::InstanceCreateFlags{}, &app_info, layers, extensions); +#ifdef __APPLE__ + if (portability_enumeration_ext) { + instance_create_info.flags |= vk::InstanceCreateFlagBits::eEnumeratePortabilityKHR; + } +#endif + #ifdef GGML_VULKAN_VALIDATE const std::vector features_enable = { vk::ValidationFeatureEnableEXT::eBestPractices }; @@ -1175,12 +1196,12 @@ static void ggml_vk_init(ggml_backend_vk_context * ctx, size_t idx) { vk_instance.devices[idx] = std::make_shared(); ctx->device = vk_instance.devices[idx]; ctx->device.lock()->physical_device = devices[dev_num]; - std::vector ext_props = ctx->device.lock()->physical_device.enumerateDeviceExtensionProperties(); + const std::vector ext_props = ctx->device.lock()->physical_device.enumerateDeviceExtensionProperties(); bool maintenance4_support = false; // Check if maintenance4 is supported - for (auto properties : ext_props) { + for (const auto& properties : ext_props) { if (strcmp("VK_KHR_maintenance4", properties.extensionName) == 0) { maintenance4_support = true; } @@ -1211,7 +1232,7 @@ static void ggml_vk_init(ggml_backend_vk_context * ctx, size_t idx) { bool fp16_storage = false; bool fp16_compute = false; - for (auto properties : ext_props) { + for (const auto& properties : ext_props) { if (strcmp("VK_KHR_16bit_storage", properties.extensionName) == 0) { fp16_storage = true; } else if (strcmp("VK_KHR_shader_float16_int8", properties.extensionName) == 0) { From bb9dcd560a7e81265398b0d463c40f3e467daf19 Mon Sep 17 00:00:00 2001 From: 0cc4m Date: Wed, 14 Feb 2024 20:57:17 +0100 Subject: [PATCH 655/811] Refactor validation and enumeration platform checks into functions to clean up ggml_vk_instance_init() --- ggml-vulkan.cpp | 101 ++++++++++++++++++++++++++++++------------------ 1 file changed, 63 insertions(+), 38 deletions(-) diff --git a/ggml-vulkan.cpp b/ggml-vulkan.cpp index 37123ac8f..4e5eaff15 100644 --- a/ggml-vulkan.cpp +++ b/ggml-vulkan.cpp @@ -1091,7 +1091,10 @@ static void ggml_vk_print_gpu_info(size_t idx) { } } -static void ggml_vk_instance_init() { +static bool ggml_vk_instance_validation_ext_available(const std::vector& instance_extensions); +static bool ggml_vk_instance_portability_enumeration_ext_available(const std::vector& instance_extensions); + +void ggml_vk_instance_init() { if (vk_instance_initialized) { return; } @@ -1102,54 +1105,40 @@ static void ggml_vk_instance_init() { vk::ApplicationInfo app_info{ "ggml-vulkan", 1, nullptr, 0, VK_API_VERSION }; const std::vector instance_extensions = vk::enumerateInstanceExtensionProperties(); -#ifdef __APPLE__ - bool portability_enumeration_ext = false; - // Check for portability enumeration extension for MoltenVK support - for (const auto& properties : instance_extensions) { - if (strcmp("VK_KHR_portability_enumeration", properties.extensionName) == 0) { - portability_enumeration_ext = true; - break; - } - } - if (!portability_enumeration_ext) { - std::cerr << "ggml_vulkan: WARNING: Instance extension VK_KHR_portability_enumeration not found." << std::endl; - } -#endif + const bool validation_ext = ggml_vk_instance_validation_ext_available(instance_extensions); + const bool portability_enumeration_ext = ggml_vk_instance_portability_enumeration_ext_available(instance_extensions); - std::vector layers = { -#ifdef GGML_VULKAN_VALIDATE - "VK_LAYER_KHRONOS_validation", -#endif - }; - std::vector extensions = { -#ifdef GGML_VULKAN_VALIDATE - "VK_EXT_validation_features", -#endif - }; -#ifdef __APPLE__ + std::vector layers; + + if (validation_ext) { + layers.push_back("VK_LAYER_KHRONOS_validation"); + } + std::vector extensions; + if (validation_ext) { + extensions.push_back("VK_EXT_validation_features"); + } if (portability_enumeration_ext) { extensions.push_back("VK_KHR_portability_enumeration"); } -#endif vk::InstanceCreateInfo instance_create_info(vk::InstanceCreateFlags{}, &app_info, layers, extensions); -#ifdef __APPLE__ if (portability_enumeration_ext) { instance_create_info.flags |= vk::InstanceCreateFlagBits::eEnumeratePortabilityKHR; } -#endif + std::vector features_enable; + vk::ValidationFeaturesEXT validation_features; -#ifdef GGML_VULKAN_VALIDATE - const std::vector features_enable = { vk::ValidationFeatureEnableEXT::eBestPractices }; - vk::ValidationFeaturesEXT validation_features = { - features_enable, - {}, - }; - validation_features.setPNext(nullptr); - instance_create_info.setPNext(&validation_features); + if (validation_ext) { + features_enable = { vk::ValidationFeatureEnableEXT::eBestPractices }; + validation_features = { + features_enable, + {}, + }; + validation_features.setPNext(nullptr); + instance_create_info.setPNext(&validation_features); - std::cerr << "ggml_vulkan: Validation layers enabled" << std::endl; -#endif + std::cerr << "ggml_vulkan: Validation layers enabled" << std::endl; + } vk_instance.instance = vk::createInstance(instance_create_info); memset(vk_instance.initialized, 0, sizeof(bool) * GGML_VK_MAX_DEVICES); @@ -5329,6 +5318,42 @@ GGML_CALL int ggml_backend_vk_reg_devices() { return vk_instance.device_indices.size(); } +// Extension availability +static bool ggml_vk_instance_validation_ext_available(const std::vector& instance_extensions) { +#ifdef GGML_VULKAN_VALIDATE + bool portability_enumeration_ext = false; + // Check for portability enumeration extension for MoltenVK support + for (const auto& properties : instance_extensions) { + if (strcmp("VK_KHR_portability_enumeration", properties.extensionName) == 0) { + return true; + } + } + if (!portability_enumeration_ext) { + std::cerr << "ggml_vulkan: WARNING: Instance extension VK_KHR_portability_enumeration not found." << std::endl; + } +#endif + return false; + + UNUSED(instance_extensions); +} +static bool ggml_vk_instance_portability_enumeration_ext_available(const std::vector& instance_extensions) { +#ifdef __APPLE__ + bool portability_enumeration_ext = false; + // Check for portability enumeration extension for MoltenVK support + for (const auto& properties : instance_extensions) { + if (strcmp("VK_KHR_portability_enumeration", properties.extensionName) == 0) { + return true; + } + } + if (!portability_enumeration_ext) { + std::cerr << "ggml_vulkan: WARNING: Instance extension VK_KHR_portability_enumeration not found." << std::endl; + } +#endif + return false; + + UNUSED(instance_extensions); +} + // checks #ifdef GGML_VULKAN_CHECK_RESULTS From 22f83f0c383e12106692b8afc224d61b8993a52c Mon Sep 17 00:00:00 2001 From: 0cc4m Date: Sat, 10 Feb 2024 22:18:33 +0100 Subject: [PATCH 656/811] Enable Vulkan MacOS CI --- .devops/nix/package.nix | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.devops/nix/package.nix b/.devops/nix/package.nix index ad23f7dd7..815db6a2d 100644 --- a/.devops/nix/package.nix +++ b/.devops/nix/package.nix @@ -255,11 +255,11 @@ effectiveStdenv.mkDerivation ( # Configurations we don't want even the CI to evaluate. Results in the # "unsupported platform" messages. This is mostly a no-op, because # cudaPackages would've refused to evaluate anyway. - badPlatforms = optionals (useCuda || useOpenCL || useVulkan) lib.platforms.darwin; + badPlatforms = optionals (useCuda || useOpenCL) lib.platforms.darwin; # Configurations that are known to result in build failures. Can be # overridden by importing Nixpkgs with `allowBroken = true`. - broken = (useMetalKit && !effectiveStdenv.isDarwin) || (useVulkan && effectiveStdenv.isDarwin); + broken = (useMetalKit && !effectiveStdenv.isDarwin); description = "Inference of LLaMA model in pure C/C++${descriptionSuffix}"; homepage = "https://github.com/ggerganov/llama.cpp/"; From 633782b8d949f24b619e6c68ee37b5cc79167173 Mon Sep 17 00:00:00 2001 From: Mathijs de Bruin Date: Tue, 13 Feb 2024 20:28:02 +0000 Subject: [PATCH 657/811] nix: now that we can do so, allow MacOS to build Vulkan binaries Author: Philip Taron Date: Tue Feb 13 20:28:02 2024 +0000 --- flake.nix | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/flake.nix b/flake.nix index ad2f9b295..dc4e503c3 100644 --- a/flake.nix +++ b/flake.nix @@ -150,6 +150,7 @@ packages = { default = config.legacyPackages.llamaPackages.llama-cpp; + vulkan = config.packages.default.override { useVulkan = true; }; } // lib.optionalAttrs pkgs.stdenv.isLinux { opencl = config.packages.default.override { useOpenCL = true; }; @@ -157,7 +158,6 @@ mpi-cpu = config.packages.default.override { useMpi = true; }; mpi-cuda = config.packages.default.override { useMpi = true; }; - vulkan = config.packages.default.override { useVulkan = true; }; } // lib.optionalAttrs (system == "x86_64-linux") { rocm = config.legacyPackages.llamaPackagesRocm.llama-cpp; From b9111bd209c7b11b0592450a6ed2e0ca545b2c84 Mon Sep 17 00:00:00 2001 From: AidanBeltonS <87009434+AidanBeltonS@users.noreply.github.com> Date: Tue, 20 Feb 2024 07:01:25 +0000 Subject: [PATCH 658/811] Update ggml_sycl_op_mul_mat_vec_q (#5502) * Update ggml_sycl_op_mul_mat_vec_q * Apply suggestions from code review Co-authored-by: Abhilash Majumder <30946547+abhilash1910@users.noreply.github.com> * revert suggestion on macro * fix bug * Add quant type GGML_TYPE_IQ1_S to unsupported * fix format --------- Co-authored-by: Abhilash Majumder <30946547+abhilash1910@users.noreply.github.com> --- ggml-sycl.cpp | 258 ++++++++++++++------------------------------------ 1 file changed, 69 insertions(+), 189 deletions(-) diff --git a/ggml-sycl.cpp b/ggml-sycl.cpp index cd4b3a1e1..df1826112 100644 --- a/ggml-sycl.cpp +++ b/ggml-sycl.cpp @@ -9188,174 +9188,22 @@ static void convert_mul_mat_vec_f16_sycl(const void *vx, const dfloat *y, } } -static void mul_mat_vec_q4_0_q8_1_sycl(const void *vx, const void *vy, - float *dst, const int ncols, - const int nrows, - dpct::queue_ptr stream) { - GGML_ASSERT(ncols % QK4_0 == 0); - const int block_num_y = (nrows + GGML_SYCL_MMV_Y - 1) / GGML_SYCL_MMV_Y; - const sycl::range<3> block_nums(1, 1, block_num_y); - const sycl::range<3> block_dims(1, GGML_SYCL_MMV_Y, WARP_SIZE); - stream->parallel_for( - sycl::nd_range<3>(block_nums * block_dims, block_dims), - [=](sycl::nd_item<3> item_ct1) [[intel::reqd_sub_group_size(32)]] { - mul_mat_vec_q(vx, vy, dst, ncols, nrows, - item_ct1); - }); -} - -static void mul_mat_vec_q4_1_q8_1_sycl(const void *vx, const void *vy, - float *dst, const int ncols, - const int nrows, - dpct::queue_ptr stream) { - GGML_ASSERT(ncols % QK4_1 == 0); - const int block_num_y = (nrows + GGML_SYCL_MMV_Y - 1) / GGML_SYCL_MMV_Y; - const sycl::range<3> block_nums(1, 1, block_num_y); - const sycl::range<3> block_dims(1, GGML_SYCL_MMV_Y, WARP_SIZE); - stream->parallel_for( - sycl::nd_range<3>(block_nums * block_dims, block_dims), - [=](sycl::nd_item<3> item_ct1) [[intel::reqd_sub_group_size(32)]] { - mul_mat_vec_q(vx, vy, dst, ncols, nrows, - item_ct1); - }); -} - -static void mul_mat_vec_q5_0_q8_1_sycl(const void *vx, const void *vy, - float *dst, const int ncols, - const int nrows, - dpct::queue_ptr stream) { - GGML_ASSERT(ncols % QK5_0 == 0); - const int block_num_y = (nrows + GGML_SYCL_MMV_Y - 1) / GGML_SYCL_MMV_Y; - const sycl::range<3> block_nums(1, 1, block_num_y); - const sycl::range<3> block_dims(1, GGML_SYCL_MMV_Y, WARP_SIZE); - stream->parallel_for( - sycl::nd_range<3>(block_nums * block_dims, block_dims), - [=](sycl::nd_item<3> item_ct1) [[intel::reqd_sub_group_size(32)]] { - mul_mat_vec_q(vx, vy, dst, ncols, nrows, - item_ct1); - }); -} - -static void mul_mat_vec_q5_1_q8_1_sycl(const void *vx, const void *vy, - float *dst, const int ncols, - const int nrows, - dpct::queue_ptr stream) { - GGML_ASSERT(ncols % QK5_1 == 0); - const int block_num_y = (nrows + GGML_SYCL_MMV_Y - 1) / GGML_SYCL_MMV_Y; - const sycl::range<3> block_nums(1, 1, block_num_y); - const sycl::range<3> block_dims(1, GGML_SYCL_MMV_Y, WARP_SIZE); - stream->parallel_for( - sycl::nd_range<3>(block_nums * block_dims, block_dims), - [=](sycl::nd_item<3> item_ct1) [[intel::reqd_sub_group_size(32)]] { - mul_mat_vec_q(vx, vy, dst, ncols, nrows, - item_ct1); - }); -} - -static void mul_mat_vec_q8_0_q8_1_sycl(const void *vx, const void *vy, - float *dst, const int ncols, - const int nrows, - dpct::queue_ptr stream) { - GGML_ASSERT(ncols % QK8_0 == 0); - const int block_num_y = (nrows + GGML_SYCL_MMV_Y - 1) / GGML_SYCL_MMV_Y; - const sycl::range<3> block_nums(1, 1, block_num_y); - const sycl::range<3> block_dims(1, GGML_SYCL_MMV_Y, WARP_SIZE); - stream->parallel_for( - sycl::nd_range<3>(block_nums * block_dims, block_dims), - [=](sycl::nd_item<3> item_ct1) [[intel::reqd_sub_group_size(32)]] { - mul_mat_vec_q(vx, vy, dst, ncols, nrows, - item_ct1); - }); -} - -static void mul_mat_vec_q2_K_q8_1_sycl(const void *vx, const void *vy, - float *dst, const int ncols, - const int nrows, - dpct::queue_ptr stream) { - GGML_ASSERT(ncols % QK_K == 0); - const int block_num_y = (nrows + GGML_SYCL_MMV_Y - 1) / GGML_SYCL_MMV_Y; - const sycl::range<3> block_nums(1, 1, block_num_y); - const sycl::range<3> block_dims(1, GGML_SYCL_MMV_Y, WARP_SIZE); - stream->parallel_for( - sycl::nd_range<3>(block_nums * block_dims, block_dims), - [=](sycl::nd_item<3> item_ct1) [[intel::reqd_sub_group_size(32)]] { - mul_mat_vec_q(vx, vy, dst, ncols, nrows, - item_ct1); - }); -} - -static void mul_mat_vec_q3_K_q8_1_sycl(const void *vx, const void *vy, - float *dst, const int ncols, - const int nrows, - dpct::queue_ptr stream) { - GGML_ASSERT(ncols % QK_K == 0); - const int block_num_y = (nrows + GGML_SYCL_MMV_Y - 1) / GGML_SYCL_MMV_Y; - const sycl::range<3> block_nums(1, 1, block_num_y); - const sycl::range<3> block_dims(1, GGML_SYCL_MMV_Y, WARP_SIZE); - stream->parallel_for( - sycl::nd_range<3>(block_nums * block_dims, block_dims), - [=](sycl::nd_item<3> item_ct1) [[intel::reqd_sub_group_size(32)]] { - mul_mat_vec_q(vx, vy, dst, ncols, nrows, - item_ct1); - }); -} - -static void mul_mat_vec_q4_K_q8_1_sycl(const void *vx, const void *vy, - float *dst, const int ncols, - const int nrows, - dpct::queue_ptr stream) { - GGML_ASSERT(ncols % QK_K == 0); - const int block_num_y = (nrows + GGML_SYCL_MMV_Y - 1) / GGML_SYCL_MMV_Y; - const sycl::range<3> block_nums(1, 1, block_num_y); - const sycl::range<3> block_dims(1, GGML_SYCL_MMV_Y, WARP_SIZE); - stream->parallel_for( - sycl::nd_range<3>(block_nums * block_dims, block_dims), - [=](sycl::nd_item<3> item_ct1) [[intel::reqd_sub_group_size(32)]] { - mul_mat_vec_q(vx, vy, dst, ncols, nrows, - item_ct1); - }); -} - -static void mul_mat_vec_q5_K_q8_1_sycl(const void *vx, const void *vy, - float *dst, const int ncols, - const int nrows, - dpct::queue_ptr stream) { - GGML_ASSERT(ncols % QK_K == 0); - const int block_num_y = (nrows + GGML_SYCL_MMV_Y - 1) / GGML_SYCL_MMV_Y; - const sycl::range<3> block_nums(1, 1, block_num_y); - const sycl::range<3> block_dims(1, GGML_SYCL_MMV_Y, WARP_SIZE); - stream->parallel_for( - sycl::nd_range<3>(block_nums * block_dims, block_dims), - [=](sycl::nd_item<3> item_ct1) [[intel::reqd_sub_group_size(32)]] { - mul_mat_vec_q(vx, vy, dst, ncols, nrows, - item_ct1); - }); -} - -static void mul_mat_vec_q6_K_q8_1_sycl(const void *vx, const void *vy, - float *dst, const int ncols, - const int nrows, - dpct::queue_ptr stream) { - GGML_ASSERT(ncols % QK_K == 0); - const int block_num_y = (nrows + GGML_SYCL_MMV_Y - 1) / GGML_SYCL_MMV_Y; - const sycl::range<3> block_nums(1, 1, block_num_y); - const sycl::range<3> block_dims(1, GGML_SYCL_MMV_Y, WARP_SIZE); - stream->parallel_for( - sycl::nd_range<3>(block_nums * block_dims, block_dims), - [=](sycl::nd_item<3> item_ct1) [[intel::reqd_sub_group_size(32)]] { - mul_mat_vec_q(vx, vy, dst, ncols, nrows, - item_ct1); - }); +template +static void mul_mat_vec_q_sycl_submitter(const void *vx, const void *vy, + float *dst, const int ncols, + const int nrows, + dpct::queue_ptr stream) { + GGML_ASSERT(ncols % QK4_0 == 0); + const int block_num_y = (nrows + GGML_SYCL_MMV_Y - 1) / GGML_SYCL_MMV_Y; + const sycl::range<3> block_nums(1, 1, block_num_y); + const sycl::range<3> block_dims(1, GGML_SYCL_MMV_Y, WARP_SIZE); + stream->parallel_for( + sycl::nd_range<3>(block_nums * block_dims, block_dims), [= + ](sycl::nd_item<3> item_ct1) [[intel::reqd_sub_group_size(32)]] { + mul_mat_vec_q( + vx, vy, dst, ncols, nrows, item_ct1); + }); } int get_device_index_by_id(int id){ @@ -12095,37 +11943,63 @@ inline void ggml_sycl_op_mul_mat_vec_q( const int64_t ne00 = src0->ne[0]; const int64_t row_diff = row_high - row_low; + // TODO: support these quantization types + GGML_ASSERT(!(src0->type == GGML_TYPE_IQ2_XXS || + src0->type == GGML_TYPE_IQ2_XS || + src0->type == GGML_TYPE_IQ3_XXS || + src0->type == GGML_TYPE_IQ1_S)); + switch (src0->type) { case GGML_TYPE_Q4_0: - mul_mat_vec_q4_0_q8_1_sycl(src0_dd_i, src1_ddq_i, dst_dd_i, ne00, row_diff, stream); - break; + mul_mat_vec_q_sycl_submitter( + src0_dd_i, src1_ddq_i, dst_dd_i, ne00, row_diff, stream); + break; case GGML_TYPE_Q4_1: - mul_mat_vec_q4_1_q8_1_sycl(src0_dd_i, src1_ddq_i, dst_dd_i, ne00, row_diff, stream); - break; + mul_mat_vec_q_sycl_submitter( + src0_dd_i, src1_ddq_i, dst_dd_i, ne00, row_diff, stream); + break; case GGML_TYPE_Q5_0: - mul_mat_vec_q5_0_q8_1_sycl(src0_dd_i, src1_ddq_i, dst_dd_i, ne00, row_diff, stream); - break; + mul_mat_vec_q_sycl_submitter( + src0_dd_i, src1_ddq_i, dst_dd_i, ne00, row_diff, stream); + break; case GGML_TYPE_Q5_1: - mul_mat_vec_q5_1_q8_1_sycl(src0_dd_i, src1_ddq_i, dst_dd_i, ne00, row_diff, stream); - break; + mul_mat_vec_q_sycl_submitter( + src0_dd_i, src1_ddq_i, dst_dd_i, ne00, row_diff, stream); + break; case GGML_TYPE_Q8_0: - mul_mat_vec_q8_0_q8_1_sycl(src0_dd_i, src1_ddq_i, dst_dd_i, ne00, row_diff, stream); - break; + mul_mat_vec_q_sycl_submitter( + src0_dd_i, src1_ddq_i, dst_dd_i, ne00, row_diff, stream); + break; case GGML_TYPE_Q2_K: - mul_mat_vec_q2_K_q8_1_sycl(src0_dd_i, src1_ddq_i, dst_dd_i, ne00, row_diff, stream); - break; + mul_mat_vec_q_sycl_submitter( + src0_dd_i, src1_ddq_i, dst_dd_i, ne00, row_diff, stream); + break; case GGML_TYPE_Q3_K: - mul_mat_vec_q3_K_q8_1_sycl(src0_dd_i, src1_ddq_i, dst_dd_i, ne00, row_diff, stream); - break; + mul_mat_vec_q_sycl_submitter( + src0_dd_i, src1_ddq_i, dst_dd_i, ne00, row_diff, stream); + break; case GGML_TYPE_Q4_K: - mul_mat_vec_q4_K_q8_1_sycl(src0_dd_i, src1_ddq_i, dst_dd_i, ne00, row_diff, stream); - break; + mul_mat_vec_q_sycl_submitter( + src0_dd_i, src1_ddq_i, dst_dd_i, ne00, row_diff, stream); + break; case GGML_TYPE_Q5_K: - mul_mat_vec_q5_K_q8_1_sycl(src0_dd_i, src1_ddq_i, dst_dd_i, ne00, row_diff, stream); - break; + mul_mat_vec_q_sycl_submitter( + src0_dd_i, src1_ddq_i, dst_dd_i, ne00, row_diff, stream); + break; case GGML_TYPE_Q6_K: - mul_mat_vec_q6_K_q8_1_sycl(src0_dd_i, src1_ddq_i, dst_dd_i, ne00, row_diff, stream); - break; + mul_mat_vec_q_sycl_submitter( + src0_dd_i, src1_ddq_i, dst_dd_i, ne00, row_diff, stream); + break; default: GGML_ASSERT(false); break; @@ -12145,7 +12019,7 @@ inline void ggml_sycl_op_dequantize_mul_mat_vec( const int64_t src1_ncols, const int64_t src1_padded_row_size, const dpct::queue_ptr &stream) { - GGML_TENSOR_BINARY_OP_LOCALS + GGML_TENSOR_BINARY_OP_LOCALS; const int64_t row_diff = row_high - row_low; @@ -15093,6 +14967,12 @@ static bool ggml_backend_sycl_supports_op(ggml_backend_t backend, const ggml_ten return false; } + if (a->type == GGML_TYPE_IQ1_S) { + return false; + } + if (a->type == GGML_TYPE_IQ3_XXS) { + return false; + } if (a->type == GGML_TYPE_IQ2_XXS) { return false; } From c0a8c6db371cb3e4379900867b948879f5842201 Mon Sep 17 00:00:00 2001 From: Pierrick Hymbert Date: Tue, 20 Feb 2024 08:48:19 +0100 Subject: [PATCH 659/811] server : health endpoint configurable failure on no slot (#5594) --- examples/server/README.md | 9 ++++--- examples/server/server.cpp | 52 +++++++++++++++++++------------------- 2 files changed, 31 insertions(+), 30 deletions(-) diff --git a/examples/server/README.md b/examples/server/README.md index 809e2d37c..f6b9c7402 100644 --- a/examples/server/README.md +++ b/examples/server/README.md @@ -134,10 +134,11 @@ node index.js ## API Endpoints - **GET** `/health`: Returns the current state of the server: - - `{"status": "loading model"}` if the model is still being loaded. - - `{"status": "error"}` if the model failed to load. - - `{"status": "ok"}` if the model is successfully loaded and the server is ready for further requests mentioned below. - - `{"status": "no slot available", "slots_idle": 0, "slots_processing": 32}` if no slot are currently available + - 503 -> `{"status": "loading model"}` if the model is still being loaded. + - 500 -> `{"status": "error"}` if the model failed to load. + - 200 -> `{"status": "ok", "slots_idle": 1, "slots_processing": 2 }` if the model is successfully loaded and the server is ready for further requests mentioned below. + - 200 -> `{"status": "no slot available", "slots_idle": 0, "slots_processing": 32}` if no slot are currently available. + - 503 -> `{"status": "no slot available", "slots_idle": 0, "slots_processing": 32}` if the query parameter `fail_on_no_slot` is provided and no slot are currently available. - **POST** `/completion`: Given a `prompt`, it returns the predicted completion. diff --git a/examples/server/server.cpp b/examples/server/server.cpp index 22c344dd4..23482ed95 100644 --- a/examples/server/server.cpp +++ b/examples/server/server.cpp @@ -2582,40 +2582,40 @@ int main(int argc, char **argv) res.set_header("Access-Control-Allow-Headers", "*"); }); - svr.Get("/health", [&](const httplib::Request&, httplib::Response& res) { + svr.Get("/health", [&](const httplib::Request& req, httplib::Response& res) { server_state current_state = state.load(); switch(current_state) { - case SERVER_STATE_READY: - if (llama.all_slots_are_idle) { - res.set_content(R"({"status": "ok"})", "application/json"); + case SERVER_STATE_READY: { + int available_slots = 0; + int processing_slots = 0; + for (llama_client_slot &slot: llama.slots) { + if (slot.available()) { + available_slots++; + } else { + processing_slots++; + } + } + if (available_slots > 0) { + json health = { + {"status", "ok"}, + {"slots_idle", available_slots}, + {"slots_processing", processing_slots}}; + res.set_content(health.dump(), "application/json"); res.status = 200; // HTTP OK } else { - int available_slots = 0; - int processing_slots = 0; - for (llama_client_slot & slot : llama.slots) { - if (slot.available()) { - available_slots++; - } else { - processing_slots++; - } - } - if (available_slots > 0) { - json health = { - {"status", "ok"}, - {"slots_idle", available_slots}, - {"slots_processing", processing_slots}}; - res.set_content(health.dump(), "application/json"); - res.status = 200; // HTTP OK - } else { - json health = { - {"status", "no slot available"}, - {"slots_idle", available_slots}, - {"slots_processing", processing_slots}}; - res.set_content(health.dump(), "application/json"); + json health = { + {"status", "no slot available"}, + {"slots_idle", available_slots}, + {"slots_processing", processing_slots}}; + res.set_content(health.dump(), "application/json"); + if (req.has_param("fail_on_no_slot")) { res.status = 503; // HTTP Service Unavailable + } else { + res.status = 200; // HTTP OK } } break; + } case SERVER_STATE_LOADING_MODEL: res.set_content(R"({"status": "loading model"})", "application/json"); res.status = 503; // HTTP Service Unavailable From 8dbbd75754d43ec7b4bbe42fb287cc2553fdf0e9 Mon Sep 17 00:00:00 2001 From: Haoxiang Fei Date: Mon, 19 Feb 2024 22:58:36 -1100 Subject: [PATCH 660/811] metal : add build system support for embedded metal library (#5604) * add build support for embedded metal library * Update Makefile --------- Co-authored-by: Haoxiang Fei Co-authored-by: Georgi Gerganov --- CMakeLists.txt | 24 ++++++++++++++++++++++++ Makefile | 18 ++++++++++++++++++ 2 files changed, 42 insertions(+) diff --git a/CMakeLists.txt b/CMakeLists.txt index 168b133f4..3c4629001 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -110,6 +110,7 @@ option(LLAMA_VULKAN_RUN_TESTS "llama: run Vulkan tests" option(LLAMA_METAL "llama: use Metal" ${LLAMA_METAL_DEFAULT}) option(LLAMA_METAL_NDEBUG "llama: disable Metal debugging" OFF) option(LLAMA_METAL_SHADER_DEBUG "llama: compile Metal with -fno-fast-math" OFF) +option(LLAMA_METAL_EMBED_LIBRARY "llama: embed Metal library" OFF) option(LLAMA_KOMPUTE "llama: use Kompute" OFF) option(LLAMA_MPI "llama: use MPI" OFF) option(LLAMA_QKK_64 "llama: use super-block size of 64 for k-quants" OFF) @@ -201,6 +202,29 @@ if (LLAMA_METAL) # copy ggml-metal.metal to bin directory configure_file(ggml-metal.metal ${CMAKE_RUNTIME_OUTPUT_DIRECTORY}/ggml-metal.metal COPYONLY) + if (LLAMA_METAL_EMBED_LIBRARY) + enable_language(ASM) + add_compile_definitions(GGML_METAL_EMBED_LIBRARY) + + set(METALLIB_SOURCE "${CMAKE_SOURCE_DIR}/ggml-metal.metal") + file(MAKE_DIRECTORY "${CMAKE_BINARY_DIR}/autogenerated") + set(EMBED_METALLIB_ASSEMBLY "${CMAKE_BINARY_DIR}/autogenerated/ggml-embed-metallib.s") + + add_custom_command( + OUTPUT ${EMBED_METALLIB_ASSEMBLY} + COMMAND echo ".section __DATA,__ggml_metallib" > ${EMBED_METALLIB_ASSEMBLY} + COMMAND echo ".globl _ggml_metallib_start" >> ${EMBED_METALLIB_ASSEMBLY} + COMMAND echo "_ggml_metallib_start:" >> ${EMBED_METALLIB_ASSEMBLY} + COMMAND echo ".incbin \\\"${METALLIB_SOURCE}\\\"" >> ${EMBED_METALLIB_ASSEMBLY} + COMMAND echo ".globl _ggml_metallib_end" >> ${EMBED_METALLIB_ASSEMBLY} + COMMAND echo "_ggml_metallib_end:" >> ${EMBED_METALLIB_ASSEMBLY} + DEPENDS ${METALLIB_SOURCE} + COMMENT "Generate assembly for embedded Metal library" + ) + + set(GGML_SOURCES_METAL ${GGML_SOURCES_METAL} ${EMBED_METALLIB_ASSEMBLY}) + endif() + if (LLAMA_METAL_SHADER_DEBUG) # custom command to do the following: # xcrun -sdk macosx metal -fno-fast-math -c ggml-metal.metal -o ggml-metal.air diff --git a/Makefile b/Makefile index db5df1b32..211a08d7f 100644 --- a/Makefile +++ b/Makefile @@ -533,11 +533,29 @@ ifdef LLAMA_METAL ifdef LLAMA_METAL_NDEBUG MK_CPPFLAGS += -DGGML_METAL_NDEBUG endif +ifdef LLAMA_METAL_EMBED_LIBRARY + MK_CPPFLAGS += -DGGML_METAL_EMBED_LIBRARY + OBJS += ggml-metal-embed.o +endif endif # LLAMA_METAL ifdef LLAMA_METAL ggml-metal.o: ggml-metal.m ggml-metal.h $(CC) $(CFLAGS) -c $< -o $@ + +ifdef LLAMA_METAL_EMBED_LIBRARY +ggml-metal-embed.o: ggml-metal.metal + @echo "Embedding Metal library" + $(eval TEMP_ASSEMBLY=$(shell mktemp)) + @echo ".section __DATA, __ggml_metallib" > $(TEMP_ASSEMBLY) + @echo ".globl _ggml_metallib_start" >> $(TEMP_ASSEMBLY) + @echo "_ggml_metallib_start:" >> $(TEMP_ASSEMBLY) + @echo ".incbin \"$<\"" >> $(TEMP_ASSEMBLY) + @echo ".globl _ggml_metallib_end" >> $(TEMP_ASSEMBLY) + @echo "_ggml_metallib_end:" >> $(TEMP_ASSEMBLY) + @$(AS) $(TEMP_ASSEMBLY) -o $@ + @rm -f ${TEMP_ASSEMBLY} +endif endif # LLAMA_METAL ifdef LLAMA_MPI From 5207b3fbc500f89dfe528693e96540956dbaed96 Mon Sep 17 00:00:00 2001 From: Dane Madsen Date: Tue, 20 Feb 2024 21:00:23 +1100 Subject: [PATCH 661/811] readme : update UI list (#5605) * Add maid to ui list * Specify licence --- README.md | 1 + 1 file changed, 1 insertion(+) diff --git a/README.md b/README.md index 70866e249..747d2e98b 100644 --- a/README.md +++ b/README.md @@ -156,6 +156,7 @@ Unless otherwise noted these projects are open-source with permissive licensing: - [pythops/tenere](https://github.com/pythops/tenere) (AGPL) - [semperai/amica](https://github.com/semperai/amica) - [withcatai/catai](https://github.com/withcatai/catai) +- [Mobile-Artificial-Intelligence/maid](https://github.com/Mobile-Artificial-Intelligence/maid) (MIT) --- From 9c405c9f9a7cfd23511fd6b2de05dc72481119b4 Mon Sep 17 00:00:00 2001 From: Xuan Son Nguyen Date: Tue, 20 Feb 2024 15:58:27 +0100 Subject: [PATCH 662/811] Server: use llama_chat_apply_template (#5593) * server: use llama_chat_apply_template * server: remove trailing space * server: fix format_chat * server: fix help message Co-authored-by: Georgi Gerganov * server: fix formatted_chat --------- Co-authored-by: Georgi Gerganov --- examples/server/oai.hpp | 6 ++-- examples/server/server.cpp | 17 +++++----- examples/server/utils.hpp | 69 ++++++++++++++++++-------------------- llama.cpp | 2 +- 4 files changed, 45 insertions(+), 49 deletions(-) diff --git a/examples/server/oai.hpp b/examples/server/oai.hpp index 2eca8a9fb..ff4ad6994 100644 --- a/examples/server/oai.hpp +++ b/examples/server/oai.hpp @@ -15,13 +15,11 @@ using json = nlohmann::json; inline static json oaicompat_completion_params_parse( + const struct llama_model * model, const json &body, /* openai api json semantics */ const std::string &chat_template) { json llama_params; - std::string formatted_prompt = chat_template == "chatml" - ? format_chatml(body["messages"]) // OpenAI 'messages' to chatml (with <|im_start|>,...) - : format_llama2(body["messages"]); // OpenAI 'messages' to llama2 (with [INST],...) llama_params["__oaicompat"] = true; @@ -34,7 +32,7 @@ inline static json oaicompat_completion_params_parse( // https://platform.openai.com/docs/api-reference/chat/create llama_sampling_params default_sparams; llama_params["model"] = json_value(body, "model", std::string("unknown")); - llama_params["prompt"] = formatted_prompt; + llama_params["prompt"] = format_chat(model, chat_template, body["messages"]); llama_params["cache_prompt"] = json_value(body, "cache_prompt", false); llama_params["temperature"] = json_value(body, "temperature", 0.0); llama_params["top_k"] = json_value(body, "top_k", default_sparams.top_k); diff --git a/examples/server/server.cpp b/examples/server/server.cpp index 23482ed95..c7821eca6 100644 --- a/examples/server/server.cpp +++ b/examples/server/server.cpp @@ -37,7 +37,7 @@ struct server_params std::string hostname = "127.0.0.1"; std::vector api_keys; std::string public_path = "examples/server/public"; - std::string chat_template = "chatml"; + std::string chat_template = ""; int32_t port = 8080; int32_t read_timeout = 600; int32_t write_timeout = 600; @@ -1937,8 +1937,9 @@ static void server_print_usage(const char *argv0, const gpt_params ¶ms, printf(" types: int, float, bool. example: --override-kv tokenizer.ggml.add_bos_token=bool:false\n"); printf(" -gan N, --grp-attn-n N set the group attention factor to extend context size through self-extend(default: 1=disabled), used together with group attention width `--grp-attn-w`"); printf(" -gaw N, --grp-attn-w N set the group attention width to extend context size through self-extend(default: 512), used together with group attention factor `--grp-attn-n`"); - printf(" --chat-template FORMAT_NAME"); - printf(" set chat template, possible value is: llama2, chatml (default %s)", sparams.chat_template.c_str()); + printf(" --chat-template JINJA_TEMPLATE\n"); + printf(" set custom jinja chat template (default: template taken from model's metadata)\n"); + printf(" Note: only commonly used templates are accepted, since we don't have jinja parser\n"); printf("\n"); } @@ -2389,13 +2390,13 @@ static void server_params_parse(int argc, char **argv, server_params &sparams, invalid_param = true; break; } - std::string value(argv[i]); - if (value != "chatml" && value != "llama2") { - fprintf(stderr, "error: chat template can be \"llama2\" or \"chatml\", but got: %s\n", value.c_str()); + if (!verify_custom_template(argv[i])) { + fprintf(stderr, "error: the supplied chat template is not supported: %s\n", argv[i]); + fprintf(stderr, "note: llama.cpp does not use jinja parser, we only support commonly used templates\n"); invalid_param = true; break; } - sparams.chat_template = value; + sparams.chat_template = argv[i]; } else if (arg == "--override-kv") { @@ -2913,7 +2914,7 @@ int main(int argc, char **argv) if (!validate_api_key(req, res)) { return; } - json data = oaicompat_completion_params_parse(json::parse(req.body), sparams.chat_template); + json data = oaicompat_completion_params_parse(llama.model, json::parse(req.body), sparams.chat_template); const int task_id = llama.queue_tasks.get_new_id(); llama.queue_results.add_waiting_task_id(task_id); diff --git a/examples/server/utils.hpp b/examples/server/utils.hpp index 0ee670dba..e954fb0ef 100644 --- a/examples/server/utils.hpp +++ b/examples/server/utils.hpp @@ -167,50 +167,47 @@ static T json_value(const json &body, const std::string &key, const T &default_v : default_value; } -inline std::string format_llama2(std::vector messages) -{ - std::ostringstream output; - bool is_inside_turn = false; - - for (auto it = messages.begin(); it != messages.end(); ++it) { - if (!is_inside_turn) { - output << "[INST] "; - } - std::string role = json_value(*it, "role", std::string("user")); - std::string content = json_value(*it, "content", std::string("")); - if (role == "system") { - output << "<>\n" << content << "\n<>\n\n"; - is_inside_turn = true; - } else if (role == "user") { - output << content << " [/INST]"; - is_inside_turn = true; - } else { - output << " " << content << " "; - is_inside_turn = false; - } - } - - LOG_VERBOSE("format_llama2", {{"text", output.str()}}); - - return output.str(); +// Check if the template supplied via "--chat-template" is supported or not. Returns true if it's valid +inline bool verify_custom_template(const std::string & tmpl) { + llama_chat_message chat[] = {{"user", "test"}}; + std::vector buf(1); + int res = llama_chat_apply_template(nullptr, tmpl.c_str(), chat, 1, true, buf.data(), buf.size()); + return res >= 0; } -inline std::string format_chatml(std::vector messages) +// Format given chat. If tmpl is empty, we take the template from model metadata +inline std::string format_chat(const struct llama_model * model, const std::string & tmpl, const std::vector & messages) { - std::ostringstream chatml_msgs; + size_t alloc_size = 0; + // vector holding all allocated string to be passed to llama_chat_apply_template + std::vector str(messages.size() * 2); + std::vector chat(messages.size()); - for (auto it = messages.begin(); it != messages.end(); ++it) { - chatml_msgs << "<|im_start|>" - << json_value(*it, "role", std::string("user")) << '\n'; - chatml_msgs << json_value(*it, "content", std::string("")) - << "<|im_end|>\n"; + for (size_t i = 0; i < messages.size(); ++i) { + auto &curr_msg = messages[i]; + str[i*2 + 0] = json_value(curr_msg, "role", std::string("")); + str[i*2 + 1] = json_value(curr_msg, "content", std::string("")); + alloc_size += str[i*2 + 1].length(); + chat[i].role = str[i*2 + 0].c_str(); + chat[i].content = str[i*2 + 1].c_str(); } - chatml_msgs << "<|im_start|>assistant" << '\n'; + const char * ptr_tmpl = tmpl.empty() ? nullptr : tmpl.c_str(); + std::vector buf(alloc_size * 2); - LOG_VERBOSE("format_chatml", {{"text", chatml_msgs.str()}}); + // run the first time to get the total output length + int32_t res = llama_chat_apply_template(model, ptr_tmpl, chat.data(), chat.size(), true, buf.data(), buf.size()); - return chatml_msgs.str(); + // if it turns out that our buffer is too small, we resize it + if ((size_t) res > buf.size()) { + buf.resize(res); + res = llama_chat_apply_template(model, ptr_tmpl, chat.data(), chat.size(), true, buf.data(), buf.size()); + } + + std::string formatted_chat(buf.data(), res); + LOG_VERBOSE("formatted_chat", {{"text", formatted_chat.c_str()}}); + + return formatted_chat; } // diff --git a/llama.cpp b/llama.cpp index 5de07dfa9..4296eca32 100644 --- a/llama.cpp +++ b/llama.cpp @@ -12602,7 +12602,7 @@ LLAMA_API int32_t llama_chat_apply_template( // load template from model std::vector model_template(2048, 0); // longest known template is about 1200 bytes std::string template_key = "tokenizer.chat_template"; - int32_t res = llama_model_meta_val_str(model, template_key.c_str(), model_template.data(), curr_tmpl.size()); + int32_t res = llama_model_meta_val_str(model, template_key.c_str(), model_template.data(), model_template.size()); if (res < 0) { // worst case: there is no information about template, we will use chatml by default curr_tmpl = "<|im_start|>"; // see llama_chat_apply_template_internal From 4ed8e4fbef6a15afd993bfcd9ffa279841e18ef1 Mon Sep 17 00:00:00 2001 From: Daniel Bevenius Date: Tue, 20 Feb 2024 18:30:27 +0100 Subject: [PATCH 663/811] llava : add explicit instructions for llava-1.6 (#5611) This commit contains a suggestion for the README.md in the llava example. The suggestion adds explicit instructions for how to convert a llava-1.6 model and run it using llava-cli. The motivation for this is that having explicit instructions similar to the 1.5 instructions will make it easier for users to try this out. Signed-off-by: Daniel Bevenius --- examples/llava/README.md | 38 ++++++++++++++++++++++++++++++++------ 1 file changed, 32 insertions(+), 6 deletions(-) diff --git a/examples/llava/README.md b/examples/llava/README.md index e42db6e5a..25ea96715 100644 --- a/examples/llava/README.md +++ b/examples/llava/README.md @@ -59,14 +59,40 @@ python ./convert.py ../llava-v1.5-7b --skip-unknown Now both the LLaMA part and the image encoder is in the `llava-v1.5-7b` directory. ## LLaVA 1.6 gguf conversion - -1) Backup your pth/safetensor model files as llava-surgery modifies them -2) Use `python llava-surgery-v2.py -C -m /path/to/hf-model` which also supports llava-1.5 variants pytorch as well as safetensor models: +1) First clone a LLaVA 1.6 model: +```console +git clone https://huggingface.co/liuhaotian/llava-v1.6-vicuna-7b +``` +2) Backup your pth/safetensor model files as llava-surgery modifies them +3) Use `llava-surgery-v2.py` which also supports llava-1.5 variants pytorch as well as safetensor models: +```console +python examples/llava/llava-surgery-v2.py -C -m ../llava-v1.6-vicuna-7b/ +``` - you will find a llava.projector and a llava.clip file in your model directory -3) Copy the llava.clip file into a subdirectory (like vit), rename it to pytorch_model.bin and add a fitting vit configuration to the directory (https://huggingface.co/cmp-nct/llava-1.6-gguf/blob/main/config_vit.json) and rename it to config.json. -4) Create the visual gguf model: `python ./examples/llava/convert-image-encoder-to-gguf.py -m ../path/to/vit --llava-projector ../path/to/llava.projector --output-dir ../path/to/output --clip-model-is-vision` +4) Copy the llava.clip file into a subdirectory (like vit), rename it to pytorch_model.bin and add a fitting vit configuration to the directory: +```console +mkdir vit +cp ../llava-v1.6-vicuna-7b/llava.clip vit/pytorch_model.bin +cp ../llava-v1.6-vicuna-7b/llava.projector vit/ +curl -s -q https://huggingface.co/cmp-nct/llava-1.6-gguf/raw/main/config_vit.json -o vit/config.json +``` + +5) Create the visual gguf model: +```console +python ./examples/llava/convert-image-encoder-to-gguf.py -m vit --llava-projector vit/llava.projector --output-dir vit --clip-model-is-vision +``` - This is similar to llava-1.5, the difference is that we tell the encoder that we are working with the pure vision model part of CLIP -5) Everything else as usual: convert.py the hf model, quantize as needed + +6) Then convert the model to gguf format: +```console +python ./convert.py ../llava-v1.6-vicuna-7b/ +``` + +7) And finally we can run the llava-cli using the 1.6 model version: +```console +./llava-cli -m ../llava-v1.6-vicuna-7b/ggml-model-f16.gguf --mmproj vit/mmproj-model-f16.gguf --image some-image.jpg -c 4096 +``` + **note** llava-1.6 needs more context than llava-1.5, at least 3000 is needed (just run it at -c 4096) **note** llava-1.6 greatly benefits from batched prompt processing (defaults work) From 06bf2cf8c406e6b70dbf9b431a02fa0ad845b9df Mon Sep 17 00:00:00 2001 From: slaren Date: Tue, 20 Feb 2024 20:06:17 +0100 Subject: [PATCH 664/811] make : fix debug build with CUDA (#5616) --- Makefile | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Makefile b/Makefile index 211a08d7f..41c79c135 100644 --- a/Makefile +++ b/Makefile @@ -173,7 +173,7 @@ ifdef LLAMA_DEBUG MK_LDFLAGS += -g ifeq ($(UNAME_S),Linux) - MK_CXXFLAGS += -Wp,-D_GLIBCXX_ASSERTIONS + MK_CPPFLAGS += -D_GLIBCXX_ASSERTIONS endif else MK_CPPFLAGS += -DNDEBUG From 6560bed3f066c876682464762cad90f1e28e3f1b Mon Sep 17 00:00:00 2001 From: CJ Pais Date: Tue, 20 Feb 2024 11:07:22 -0800 Subject: [PATCH 665/811] server : support llava 1.6 (#5553) * server: init working 1.6 * move clip_image to header * remove commented code * remove c++ style from header * remove todo * expose llava_image_embed_make_with_clip_img * fix zig build --- Makefile | 2 +- build.zig | 3 ++- examples/llava/llava.cpp | 2 +- examples/llava/llava.h | 2 ++ examples/server/server.cpp | 36 +++--------------------------------- 5 files changed, 9 insertions(+), 36 deletions(-) diff --git a/Makefile b/Makefile index 41c79c135..f03faf6ed 100644 --- a/Makefile +++ b/Makefile @@ -719,7 +719,7 @@ save-load-state: examples/save-load-state/save-load-state.cpp ggml.o llama.o $(C $(CXX) $(CXXFLAGS) -c $< -o $(call GET_OBJ_FILE, $<) $(CXX) $(CXXFLAGS) $(filter-out %.h $<,$^) $(call GET_OBJ_FILE, $<) -o $@ $(LDFLAGS) -server: examples/server/server.cpp examples/server/oai.hpp examples/server/utils.hpp examples/server/httplib.h examples/server/json.hpp examples/server/index.html.hpp examples/server/index.js.hpp examples/server/completion.js.hpp examples/llava/clip.cpp examples/llava/clip.h common/stb_image.h ggml.o llama.o $(COMMON_DEPS) grammar-parser.o $(OBJS) +server: examples/server/server.cpp examples/server/oai.hpp examples/server/utils.hpp examples/server/httplib.h examples/server/json.hpp examples/server/index.html.hpp examples/server/index.js.hpp examples/server/completion.js.hpp examples/llava/clip.cpp examples/llava/clip.h examples/llava/llava.h examples/llava/llava.cpp common/stb_image.h ggml.o llama.o $(COMMON_DEPS) grammar-parser.o $(OBJS) $(CXX) $(CXXFLAGS) -c $< -o $(call GET_OBJ_FILE, $<) $(CXX) $(CXXFLAGS) -c examples/llava/clip.cpp -o $(call GET_OBJ_FILE, examples/llava/clip.cpp) -Wno-cast-qual $(CXX) $(CXXFLAGS) -Iexamples/server $(filter-out %.h %.hpp $< examples/llava/clip.cpp,$^) $(call GET_OBJ_FILE, $<) $(call GET_OBJ_FILE, examples/llava/clip.cpp) -o $@ $(LDFLAGS) $(LWINSOCK2) diff --git a/build.zig b/build.zig index 699738f3d..c0af454dc 100644 --- a/build.zig +++ b/build.zig @@ -123,6 +123,7 @@ pub fn build(b: *std.build.Builder) !void { const grammar_parser = make.obj("grammar-parser", "common/grammar-parser.cpp"); const train = make.obj("train", "common/train.cpp"); const clip = make.obj("clip", "examples/llava/clip.cpp"); + const llava = make.obj("llava", "examples/llava/llava.cpp"); _ = make.exe("main", "examples/main/main.cpp", &.{ ggml, ggml_alloc, ggml_backend, ggml_quants, llama, common, buildinfo, sampling, console, grammar_parser }); _ = make.exe("quantize", "examples/quantize/quantize.cpp", &.{ ggml, ggml_alloc, ggml_backend, ggml_quants, llama, common, buildinfo }); @@ -131,7 +132,7 @@ pub fn build(b: *std.build.Builder) !void { _ = make.exe("finetune", "examples/finetune/finetune.cpp", &.{ ggml, ggml_alloc, ggml_backend, ggml_quants, llama, common, buildinfo, train }); _ = make.exe("train-text-from-scratch", "examples/train-text-from-scratch/train-text-from-scratch.cpp", &.{ ggml, ggml_alloc, ggml_backend, ggml_quants, llama, common, buildinfo, train }); - const server = make.exe("server", "examples/server/server.cpp", &.{ ggml, ggml_alloc, ggml_backend, ggml_quants, llama, common, buildinfo, sampling, grammar_parser, clip }); + const server = make.exe("server", "examples/server/server.cpp", &.{ ggml, ggml_alloc, ggml_backend, ggml_quants, llama, common, buildinfo, sampling, grammar_parser, clip, llava }); if (server.target.isWindows()) { server.linkSystemLibrary("ws2_32"); } diff --git a/examples/llava/llava.cpp b/examples/llava/llava.cpp index 4cb65a07b..1a1cf7c78 100644 --- a/examples/llava/llava.cpp +++ b/examples/llava/llava.cpp @@ -311,7 +311,7 @@ bool llava_validate_embed_size(const llama_context * ctx_llama, const clip_ctx * return true; } -static bool llava_image_embed_make_with_clip_img(clip_ctx * ctx_clip, int n_threads, const clip_image_u8 * img, float ** image_embd_out, int * n_img_pos_out) { +bool llava_image_embed_make_with_clip_img(clip_ctx * ctx_clip, int n_threads, const clip_image_u8 * img, float ** image_embd_out, int * n_img_pos_out) { float * image_embd = (float *)malloc(clip_embd_nbytes(ctx_clip)*6); // TODO: base on gridsize/llava model if (!image_embd) { fprintf(stderr, "Unable to allocate memory for image embeddings\n"); diff --git a/examples/llava/llava.h b/examples/llava/llava.h index 9e9466a5d..2d40f3f1d 100644 --- a/examples/llava/llava.h +++ b/examples/llava/llava.h @@ -31,6 +31,8 @@ struct llava_image_embed { /** sanity check for clip <-> llava embed size match */ LLAVA_API bool llava_validate_embed_size(const llama_context * ctx_llama, const clip_ctx * ctx_clip); +LLAVA_API bool llava_image_embed_make_with_clip_img(clip_ctx * ctx_clip, int n_threads, const clip_image_u8 * img, float ** image_embd_out, int * n_img_pos_out); + /** build an image embed from image file bytes */ LLAVA_API struct llava_image_embed * llava_image_embed_make_with_bytes(struct clip_ctx * ctx_clip, int n_threads, const unsigned char * image_bytes, int image_bytes_length); /** build an image embed from a path to an image filename */ diff --git a/examples/server/server.cpp b/examples/server/server.cpp index c7821eca6..eb01729fa 100644 --- a/examples/server/server.cpp +++ b/examples/server/server.cpp @@ -5,6 +5,7 @@ #include "oai.hpp" #include "../llava/clip.h" +#include "../llava/llava.h" #include "stb_image.h" @@ -997,43 +998,12 @@ struct llama_server_context { continue; } - clip_image_f32_batch img_res_v; - img_res_v.size = 0; - img_res_v.data = nullptr; - if (!clip_image_preprocess(clp_ctx, img.img_data, img_res_v)) - { - LOG_TEE("Error processing the given image"); - clip_free(clp_ctx); - clip_image_f32_batch_free(img_res_v); - return false; - } - if (img_res_v.size == 0) - { + + if (!llava_image_embed_make_with_clip_img(clp_ctx, params.n_threads, img.img_data, &img.image_embedding, &img.image_tokens)) { LOG_TEE("Error processing the given image"); return false; } - // note: assumes only one image was returned by clip_image_preprocess - clip_image_f32 * img_res = img_res_v.data; - - img.image_tokens = clip_n_patches(clp_ctx); - img.image_embedding = (float *)malloc(clip_embd_nbytes(clp_ctx)); - if (!img.image_embedding) - { - LOG_TEE("Unable to allocate memory for image embeddings\n"); - clip_image_f32_batch_free(img_res_v); - clip_free(clp_ctx); - return false; - } - LOG_TEE("slot %i - encoding image [id: %i]\n", slot.id, img.id); - if (!clip_image_encode(clp_ctx, params.n_threads, img_res, img.image_embedding)) - { - LOG_TEE("Unable to encode image\n"); - clip_image_f32_batch_free(img_res_v); - return false; - } - - clip_image_f32_batch_free(img_res_v); img.request_encode_image = false; } From a14679cc30c785e75d38028bae6ec39c6209ddef Mon Sep 17 00:00:00 2001 From: Kawrakow <48489457+ikawrakow@users.noreply.github.com> Date: Wed, 21 Feb 2024 11:39:52 +0200 Subject: [PATCH 666/811] IQ4_NL: 4-bit non-linear quants with blocks of 32 (#5590) * iq4_nl: squash commits for easier rebase * Basics (quantize, dequantize) * CUDA dequantize and dot product * Slightly faster CUDA dot product (120 t/s) * Switch to 6-bit scales * Scalar dot product * AVX2 dot product * ARM_NEON dot product * Works on metal, but still slow * Slightly better Metal dot product * Another small Metal improvement * Metal dot product is getting there * Faster CUDA dot product * Add 1/8 ffn_down layers as Q5_K when no imatrix has been provided * Report the actual bpw * Add _xs mix that is 4.05 bpw for non-MoE models * Remove IQ4_XS for now, slightly adjust kvalues_iq4nl * AVX2 dot product uses Q8_0 instead of Q8_K * Add to test-backend-ops * Minor fix * Also use use Q5_K for attn_output in MoE models * Fixes after merging latest master * Switching to blocks of 32 * AVX2 for blocks of 32 * Scaler dot product for blocks of 32 * ARM_NEON dot product for blocks of 32 * Metal kernels for blocks of 32 * Slightly faster Metal kernels * iq4_nl: Fix after merging with master * iq4_nl: another fix after merging with master * Use IQ4_NL instead of Q4_K when using k-quants is not possible * Fix typo that makes several tests fail * It was the ggml_vdotq thing missed inside the brackets --------- Co-authored-by: Iwan Kawrakow --- examples/quantize/quantize.cpp | 1 + ggml-cuda.cu | 98 +++++++++++++- ggml-metal.m | 35 +++++ ggml-metal.metal | 215 +++++++++++++++++++++++++++++- ggml-quants.c | 234 ++++++++++++++++++++++++++++++++- ggml-quants.h | 13 ++ ggml.c | 30 +++++ ggml.h | 2 + llama.cpp | 17 ++- llama.h | 1 + tests/test-backend-ops.cpp | 1 + 11 files changed, 640 insertions(+), 7 deletions(-) diff --git a/examples/quantize/quantize.cpp b/examples/quantize/quantize.cpp index ea7ba50c9..37520857f 100644 --- a/examples/quantize/quantize.cpp +++ b/examples/quantize/quantize.cpp @@ -32,6 +32,7 @@ static const std::vector QUANT_OPTIONS = { { "Q3_K_S", LLAMA_FTYPE_MOSTLY_Q3_K_S, " 2.75G, +0.5551 ppl @ LLaMA-v1-7B", }, { "Q3_K_M", LLAMA_FTYPE_MOSTLY_Q3_K_M, " 3.07G, +0.2496 ppl @ LLaMA-v1-7B", }, { "Q3_K_L", LLAMA_FTYPE_MOSTLY_Q3_K_L, " 3.35G, +0.1764 ppl @ LLaMA-v1-7B", }, + { "IQ4_NL", LLAMA_FTYPE_MOSTLY_IQ4_NL, " 4.25 bpw non-linear quantization", }, { "Q4_K", LLAMA_FTYPE_MOSTLY_Q4_K_M, "alias for Q4_K_M", }, { "Q4_K_S", LLAMA_FTYPE_MOSTLY_Q4_K_S, " 3.59G, +0.0992 ppl @ LLaMA-v1-7B", }, { "Q4_K_M", LLAMA_FTYPE_MOSTLY_Q4_K_M, " 3.80G, +0.0532 ppl @ LLaMA-v1-7B", }, diff --git a/ggml-cuda.cu b/ggml-cuda.cu index 6caae56b0..e7c211d7d 100644 --- a/ggml-cuda.cu +++ b/ggml-cuda.cu @@ -528,6 +528,15 @@ typedef struct { } block_iq1_s; static_assert(sizeof(block_iq1_s) == sizeof(ggml_fp16_t) + QK_K/8 + QK_K/16, "wrong iq1_s block size/padding"); +#define QK4_NL 32 +#define QR4_NL 2 +#define QI4_NL (QK4_NL / (4*QR4_NL)) +typedef struct { + half d; + uint8_t qs[QK4_NL/2]; +} block_iq4_nl; +static_assert(sizeof(block_iq4_nl) == sizeof(ggml_fp16_t) + QK4_NL/2, "wrong iq4_nl block size/padding"); + #define WARP_SIZE 32 #define MATRIX_ROW_PADDING 512 // last row of quant. matrices is a multiple of this to avoid out-of-bounds memory accesses @@ -1987,6 +1996,26 @@ static __global__ void dequantize_block_iq1_s(const void * __restrict__ vx, dst_ } +static const __device__ int8_t kvalues_iq4nl[16] = {-127, -104, -83, -65, -49, -35, -22, -10, 1, 13, 25, 38, 53, 69, 89, 113}; + +template +static __global__ void dequantize_block_iq4_nl(const void * __restrict__ vx, dst_t * __restrict__ yy) { + + const int i = blockIdx.x; + const block_iq4_nl * x = (const block_iq4_nl *) vx + i*(QK_K/QK4_NL); + + const int tid = threadIdx.x; + const int il = tid/8; // 0...3 + const int ib = tid%8; // 0...7 + dst_t * y = yy + i*QK_K + 32*ib + 4*il; + const uint8_t * q4 = x[ib].qs + 4*il; + const float d = (float)x[ib].d; + for (int j = 0; j < 4; ++j) { + y[j+ 0] = d * kvalues_iq4nl[q4[j] & 0xf]; + y[j+16] = d * kvalues_iq4nl[q4[j] >> 4]; + } + +} static __global__ void dequantize_mul_mat_vec_q2_k(const void * __restrict__ vx, const float * __restrict__ yy, float * __restrict__ dst, const int ncols, int nrows) { @@ -4732,6 +4761,56 @@ static __device__ __forceinline__ float vec_dot_iq1_s_q8_1( #endif } +#if __CUDA_ARCH__ >= MIN_CC_DP4A // lowest compute capability for integer intrinsics +static __device__ __forceinline__ void get_int_from_table_16(const uint32_t & q4, const uint8_t * values, + int & val1, int & val2) { + + uint32_t aux32; const uint8_t * q8 = (const uint8_t *)&aux32; + aux32 = q4 & 0x0f0f0f0f; + uint16_t v1 = values[q8[0]] | (values[q8[1]] << 8); + uint16_t v2 = values[q8[2]] | (values[q8[3]] << 8); + val1 = v1 | (v2 << 16); + aux32 = (q4 >> 4) & 0x0f0f0f0f; + v1 = values[q8[0]] | (values[q8[1]] << 8); + v2 = values[q8[2]] | (values[q8[3]] << 8); + val2 = v1 | (v2 << 16); +} +#endif + +static __device__ __forceinline__ float vec_dot_iq4_nl_q8_1( + const void * __restrict__ vbq, const block_q8_1 * __restrict__ bq8_1, const int & iqs) { + + const block_iq4_nl * bq = (const block_iq4_nl *) vbq; + +#if __CUDA_ARCH__ >= MIN_CC_DP4A // lowest compute capability for integer intrinsics + const uint16_t * q4 = (const uint16_t *)bq->qs + 2*iqs; + const int32_t * q8 = (const int32_t *)bq8_1->qs + iqs; + + const uint8_t * values = (const uint8_t *)kvalues_iq4nl; + + int v1, v2; + int sumi1 = 0, sumi2 = 0; + for (int l = 0; l < VDR_Q4_0_Q8_1_MMVQ; ++l) { + const uint32_t aux = q4[2*l] | (q4[2*l+1] << 16); + get_int_from_table_16(aux, values, v1, v2); + sumi1 = __dp4a(v1, q8[l+0], sumi1); + sumi2 = __dp4a(v2, q8[l+4], sumi2); + } + +#else + const uint8_t * q4 = bq->qs + 4*iqs; + const int8_t * q8 = bq8_1->qs + 4*iqs; + + int sumi1 = 0, sumi2 = 0; + for (int l = 0; l < 4*VDR_Q4_0_Q8_1_MMVQ; ++l) { + sumi1 += q8[l+ 0] * kvalues_iq4nl[q4[l] & 0xf]; + sumi2 += q8[l+16] * kvalues_iq4nl[q4[l] >> 4]; + } +#endif + const float d = (float)bq->d * __low2float(bq8_1->ds); + return d * (sumi1 + sumi2); +} + template static __device__ __forceinline__ void mul_mat_q( @@ -6777,6 +6856,12 @@ static void dequantize_row_iq1_s_cuda(const void * vx, dst_t * y, const int k, c dequantize_block_iq1_s<<>>(vx, y); } +template +static void dequantize_row_iq4_nl_cuda(const void * vx, dst_t * y, const int k, cudaStream_t stream) { + const int nb = (k + QK_K - 1) / QK_K; + dequantize_block_iq4_nl<<>>(vx, y); +} + template static void convert_unary_cuda(const void * __restrict__ vx, dst_t * __restrict__ y, const int k, cudaStream_t stream) { const int num_blocks = (k + CUDA_DEQUANTIZE_BLOCK_SIZE - 1) / CUDA_DEQUANTIZE_BLOCK_SIZE; @@ -6818,6 +6903,8 @@ static to_fp16_cuda_t ggml_get_to_fp16_cuda(ggml_type type) { return dequantize_row_iq3_xxs_cuda; case GGML_TYPE_IQ1_S: return dequantize_row_iq1_s_cuda; + case GGML_TYPE_IQ4_NL: + return dequantize_row_iq4_nl_cuda; case GGML_TYPE_F32: return convert_unary_cuda; default: @@ -6855,6 +6942,8 @@ static to_fp32_cuda_t ggml_get_to_fp32_cuda(ggml_type type) { return dequantize_row_iq3_xxs_cuda; case GGML_TYPE_IQ1_S: return dequantize_row_iq1_s_cuda; + case GGML_TYPE_IQ4_NL: + return dequantize_row_iq4_nl_cuda; case GGML_TYPE_F16: return convert_unary_cuda; default: @@ -8599,6 +8688,7 @@ static int64_t get_row_rounding(ggml_type type, const std::array= CC_RDNA2 ? 128 : 64; default: GGML_ASSERT(false); @@ -8623,6 +8713,7 @@ static int64_t get_row_rounding(ggml_type type, const std::array= CC_VOLTA ? 128 : 64; case GGML_TYPE_Q6_K: return 64; @@ -8724,6 +8815,10 @@ static void ggml_cuda_op_mul_mat_vec_q( mul_mat_vec_q_cuda (src0_dd_i, src1_ddq_i, dst_dd_i, ne00, row_diff, src1_padded_row_size, src1_ncols, nrows_dst, stream); break; + case GGML_TYPE_IQ4_NL: + mul_mat_vec_q_cuda + (src0_dd_i, src1_ddq_i, dst_dd_i, ne00, row_diff, src1_padded_row_size, src1_ncols, nrows_dst, stream); + break; default: GGML_ASSERT(false); break; @@ -11446,7 +11541,8 @@ GGML_CALL static bool ggml_backend_cuda_supports_op(ggml_backend_t backend, cons return false; } ggml_type a_type = a->type; - if (a_type == GGML_TYPE_IQ2_XXS || a_type == GGML_TYPE_IQ2_XS || a_type == GGML_TYPE_IQ3_XXS || a_type == GGML_TYPE_IQ1_S) { + if (a_type == GGML_TYPE_IQ2_XXS || a_type == GGML_TYPE_IQ2_XS || a_type == GGML_TYPE_IQ3_XXS || + a_type == GGML_TYPE_IQ1_S || a_type == GGML_TYPE_IQ4_NL) { if (b->ne[1] == 1 && ggml_nrows(b) > 1) { return false; } diff --git a/ggml-metal.m b/ggml-metal.m index 956e323a0..0d4aa4309 100644 --- a/ggml-metal.m +++ b/ggml-metal.m @@ -62,6 +62,7 @@ enum ggml_metal_kernel_type { GGML_METAL_KERNEL_TYPE_GET_ROWS_IQ2_XS, GGML_METAL_KERNEL_TYPE_GET_ROWS_IQ3_XXS, GGML_METAL_KERNEL_TYPE_GET_ROWS_IQ1_S, + GGML_METAL_KERNEL_TYPE_GET_ROWS_IQ4_NL, GGML_METAL_KERNEL_TYPE_GET_ROWS_I32, GGML_METAL_KERNEL_TYPE_RMS_NORM, GGML_METAL_KERNEL_TYPE_GROUP_NORM, @@ -85,6 +86,7 @@ enum ggml_metal_kernel_type { GGML_METAL_KERNEL_TYPE_MUL_MV_IQ2_XS_F32, GGML_METAL_KERNEL_TYPE_MUL_MV_IQ3_XXS_F32, GGML_METAL_KERNEL_TYPE_MUL_MV_IQ1_S_F32, + GGML_METAL_KERNEL_TYPE_MUL_MV_IQ4_NL_F32, GGML_METAL_KERNEL_TYPE_MUL_MV_ID_F32_F32, //GGML_METAL_KERNEL_TYPE_MUL_MV_ID_F16_F16, GGML_METAL_KERNEL_TYPE_MUL_MV_ID_F16_F32, @@ -104,6 +106,7 @@ enum ggml_metal_kernel_type { GGML_METAL_KERNEL_TYPE_MUL_MV_ID_IQ2_XS_F32, GGML_METAL_KERNEL_TYPE_MUL_MV_ID_IQ3_XXS_F32, GGML_METAL_KERNEL_TYPE_MUL_MV_ID_IQ1_S_F32, + GGML_METAL_KERNEL_TYPE_MUL_MV_ID_IQ4_NL_F32, GGML_METAL_KERNEL_TYPE_MUL_MM_F32_F32, GGML_METAL_KERNEL_TYPE_MUL_MM_F16_F32, GGML_METAL_KERNEL_TYPE_MUL_MM_Q4_0_F32, @@ -120,6 +123,7 @@ enum ggml_metal_kernel_type { GGML_METAL_KERNEL_TYPE_MUL_MM_IQ2_XS_F32, GGML_METAL_KERNEL_TYPE_MUL_MM_IQ3_XXS_F32, GGML_METAL_KERNEL_TYPE_MUL_MM_IQ1_S_F32, + GGML_METAL_KERNEL_TYPE_MUL_MM_IQ4_NL_F32, GGML_METAL_KERNEL_TYPE_MUL_MM_ID_F32_F32, GGML_METAL_KERNEL_TYPE_MUL_MM_ID_F16_F32, GGML_METAL_KERNEL_TYPE_MUL_MM_ID_Q4_0_F32, @@ -136,6 +140,7 @@ enum ggml_metal_kernel_type { GGML_METAL_KERNEL_TYPE_MUL_MM_ID_IQ2_XS_F32, GGML_METAL_KERNEL_TYPE_MUL_MM_ID_IQ3_XXS_F32, GGML_METAL_KERNEL_TYPE_MUL_MM_ID_IQ1_S_F32, + GGML_METAL_KERNEL_TYPE_MUL_MM_ID_IQ4_NL_F32, GGML_METAL_KERNEL_TYPE_ROPE_F32, GGML_METAL_KERNEL_TYPE_ROPE_F16, GGML_METAL_KERNEL_TYPE_ALIBI_F32, @@ -448,6 +453,7 @@ static struct ggml_metal_context * ggml_metal_init(int n_cb) { GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_GET_ROWS_IQ2_XS, get_rows_iq2_xs, true); GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_GET_ROWS_IQ3_XXS, get_rows_iq3_xxs, true); GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_GET_ROWS_IQ1_S, get_rows_iq1_s, true); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_GET_ROWS_IQ4_NL, get_rows_iq4_nl, true); GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_GET_ROWS_I32, get_rows_i32, true); GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_RMS_NORM, rms_norm, ctx->support_simdgroup_reduction); GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_GROUP_NORM, group_norm, ctx->support_simdgroup_reduction); @@ -471,6 +477,7 @@ static struct ggml_metal_context * ggml_metal_init(int n_cb) { GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MV_IQ2_XS_F32, mul_mv_iq2_xs_f32, ctx->support_simdgroup_reduction); GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MV_IQ3_XXS_F32, mul_mv_iq3_xxs_f32, ctx->support_simdgroup_reduction); GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MV_IQ1_S_F32, mul_mv_iq1_s_f32, ctx->support_simdgroup_reduction); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MV_IQ4_NL_F32, mul_mv_iq4_nl_f32, ctx->support_simdgroup_reduction); GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MV_ID_F32_F32, mul_mv_id_f32_f32, ctx->support_simdgroup_reduction); //GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MV_ID_F16_F16, mul_mv_id_f16_f16, ctx->support_simdgroup_reduction); GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MV_ID_F16_F32, mul_mv_id_f16_f32, ctx->support_simdgroup_reduction); @@ -490,6 +497,7 @@ static struct ggml_metal_context * ggml_metal_init(int n_cb) { GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MV_ID_IQ2_XS_F32, mul_mv_id_iq2_xs_f32, ctx->support_simdgroup_reduction); GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MV_ID_IQ3_XXS_F32, mul_mv_id_iq3_xxs_f32, ctx->support_simdgroup_reduction); GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MV_ID_IQ1_S_F32, mul_mv_id_iq1_s_f32, ctx->support_simdgroup_reduction); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MV_ID_IQ4_NL_F32, mul_mv_id_iq4_nl_f32, ctx->support_simdgroup_reduction); GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MM_F32_F32, mul_mm_f32_f32, ctx->support_simdgroup_mm); GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MM_F16_F32, mul_mm_f16_f32, ctx->support_simdgroup_mm); GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MM_Q4_0_F32, mul_mm_q4_0_f32, ctx->support_simdgroup_mm); @@ -506,6 +514,7 @@ static struct ggml_metal_context * ggml_metal_init(int n_cb) { GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MM_IQ2_XS_F32, mul_mm_iq2_xs_f32, ctx->support_simdgroup_mm); GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MM_IQ3_XXS_F32, mul_mm_iq3_xxs_f32, ctx->support_simdgroup_mm); GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MM_IQ1_S_F32, mul_mm_iq1_s_f32, ctx->support_simdgroup_mm); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MM_IQ4_NL_F32, mul_mm_iq4_nl_f32, ctx->support_simdgroup_mm); GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MM_ID_F32_F32, mul_mm_id_f32_f32, ctx->support_simdgroup_mm); GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MM_ID_F16_F32, mul_mm_id_f16_f32, ctx->support_simdgroup_mm); GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MM_ID_Q4_0_F32, mul_mm_id_q4_0_f32, ctx->support_simdgroup_mm); @@ -522,6 +531,7 @@ static struct ggml_metal_context * ggml_metal_init(int n_cb) { GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MM_ID_IQ2_XS_F32, mul_mm_id_iq2_xs_f32, ctx->support_simdgroup_mm); GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MM_ID_IQ3_XXS_F32, mul_mm_id_iq3_xxs_f32, ctx->support_simdgroup_mm); GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MM_ID_IQ1_S_F32, mul_mm_id_iq1_s_f32, ctx->support_simdgroup_mm); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MM_ID_IQ4_NL_F32, mul_mm_id_iq4_nl_f32, ctx->support_simdgroup_mm); GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_ROPE_F32, rope_f32, true); GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_ROPE_F16, rope_f16, true); GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_ALIBI_F32, alibi_f32, true); @@ -1338,6 +1348,7 @@ static bool ggml_metal_graph_compute( case GGML_TYPE_IQ2_XS: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MM_IQ2_XS_F32 ].pipeline; break; case GGML_TYPE_IQ3_XXS: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MM_IQ3_XXS_F32].pipeline; break; case GGML_TYPE_IQ1_S: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MM_IQ1_S_F32 ].pipeline; break; + case GGML_TYPE_IQ4_NL: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MM_IQ4_NL_F32 ].pipeline; break; default: GGML_ASSERT(false && "MUL MAT-MAT not implemented"); } @@ -1478,6 +1489,12 @@ static bool ggml_metal_graph_compute( nth1 = 16; pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MV_IQ1_S_F32].pipeline; } break; + case GGML_TYPE_IQ4_NL: + { + nth0 = 4; + nth1 = 16; + pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MV_IQ4_NL_F32].pipeline; + } break; default: { GGML_METAL_LOG_ERROR("Asserting on type %d\n", (int)src0t); @@ -1525,6 +1542,11 @@ static bool ggml_metal_graph_compute( [encoder setThreadgroupMemoryLength:mem_size atIndex:0]; [encoder dispatchThreadgroups:MTLSizeMake((ne01 + 7)/8, ne11, ne12*ne13) threadsPerThreadgroup:MTLSizeMake(nth0, nth1, 1)]; } + else if (src0t == GGML_TYPE_IQ4_NL) { + const int mem_size = 32*sizeof(float); + [encoder setThreadgroupMemoryLength:mem_size atIndex:0]; + [encoder dispatchThreadgroups:MTLSizeMake((ne01 + 3)/4, ne11, ne12*ne13) threadsPerThreadgroup:MTLSizeMake(nth0, nth1, 1)]; + } else if (src0t == GGML_TYPE_Q4_K) { [encoder dispatchThreadgroups:MTLSizeMake((ne01 + 3)/4, ne11, ne12*ne13) threadsPerThreadgroup:MTLSizeMake(nth0, nth1, 1)]; } @@ -1619,6 +1641,7 @@ static bool ggml_metal_graph_compute( case GGML_TYPE_IQ2_XS: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MM_ID_IQ2_XS_F32 ].pipeline; break; case GGML_TYPE_IQ3_XXS: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MM_ID_IQ3_XXS_F32].pipeline; break; case GGML_TYPE_IQ1_S: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MM_ID_IQ1_S_F32 ].pipeline; break; + case GGML_TYPE_IQ4_NL: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MM_ID_IQ4_NL_F32 ].pipeline; break; default: GGML_ASSERT(false && "MUL_MAT_ID not implemented"); } @@ -1762,6 +1785,12 @@ static bool ggml_metal_graph_compute( nth1 = 16; pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MV_ID_IQ1_S_F32].pipeline; } break; + case GGML_TYPE_IQ4_NL: + { + nth0 = 4; + nth1 = 16; + pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MV_ID_IQ4_NL_F32].pipeline; + } break; default: { GGML_METAL_LOG_ERROR("Asserting on type %d\n", (int)src2t); @@ -1825,6 +1854,11 @@ static bool ggml_metal_graph_compute( [encoder setThreadgroupMemoryLength:mem_size atIndex:0]; [encoder dispatchThreadgroups:MTLSizeMake((ne21 + 7)/8, _ne1, ne01*ne12*ne13) threadsPerThreadgroup:MTLSizeMake(nth0, nth1, 1)]; } + else if (src2t == GGML_TYPE_IQ4_NL) { + const int mem_size = 32*sizeof(float); + [encoder setThreadgroupMemoryLength:mem_size atIndex:0]; + [encoder dispatchThreadgroups:MTLSizeMake((ne21 + 3)/4, _ne1, ne01*ne12*ne13) threadsPerThreadgroup:MTLSizeMake(nth0, nth1, 1)]; + } else if (src2t == GGML_TYPE_Q4_K) { [encoder dispatchThreadgroups:MTLSizeMake((ne21 + 3)/4, _ne1, ne01*ne12*ne13) threadsPerThreadgroup:MTLSizeMake(nth0, nth1, 1)]; } @@ -1867,6 +1901,7 @@ static bool ggml_metal_graph_compute( case GGML_TYPE_IQ2_XS: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_GET_ROWS_IQ2_XS ].pipeline; break; case GGML_TYPE_IQ3_XXS: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_GET_ROWS_IQ3_XXS].pipeline; break; case GGML_TYPE_IQ1_S: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_GET_ROWS_IQ1_S ].pipeline; break; + case GGML_TYPE_IQ4_NL: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_GET_ROWS_IQ4_NL ].pipeline; break; case GGML_TYPE_I32: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_GET_ROWS_I32 ].pipeline; break; default: GGML_ASSERT(false && "not implemented"); } diff --git a/ggml-metal.metal b/ggml-metal.metal index f0d77d446..c223a981c 100644 --- a/ggml-metal.metal +++ b/ggml-metal.metal @@ -2531,6 +2531,12 @@ typedef struct { uint8_t scales[QK_K/16]; } block_iq1_s; +// Non-linear quants +#define QK4_NL 32 +typedef struct { + half d; + uint8_t qs[QK4_NL/2]; +} block_iq4_nl; //====================================== dot products ========================= @@ -4384,7 +4390,6 @@ void kernel_mul_mv_iq1_s_f32_impl( const uint i13 = im/ne12; const uint offset0 = (i12/r2)*(nb*ne01) + (i13/r3)*(nb*ne01*ne02); - device const block_iq1_s * x = (device const block_iq1_s *) src0 + ib_row + offset0; device const float * y = (device const float *) src1 + r1*ne10 + im*ne00*ne1; @@ -4447,6 +4452,103 @@ void kernel_mul_mv_iq1_s_f32_impl( } } +constexpr constant static float kvalues_iq4nl_f[16] = { + -127.f, -104.f, -83.f, -65.f, -49.f, -35.f, -22.f, -10.f, 1.f, 13.f, 25.f, 38.f, 53.f, 69.f, 89.f, 113.f +}; + +void kernel_mul_mv_iq4_nl_f32_impl( + device const void * src0, + device const float * src1, + device float * dst, + constant int64_t & ne00, + constant int64_t & ne01, + constant int64_t & ne02, + constant int64_t & ne10, + constant int64_t & ne12, + constant int64_t & ne0, + constant int64_t & ne1, + constant uint & r2, + constant uint & r3, + threadgroup float * shared_values [[threadgroup(0)]], + uint3 tgpig[[threadgroup_position_in_grid]], + uint tiisg[[thread_index_in_simdgroup]], + uint sgitg[[simdgroup_index_in_threadgroup]]) { + + const int nb = ne00/QK4_NL; + const int r0 = tgpig.x; + const int r1 = tgpig.y; + const int im = tgpig.z; + const int first_row = (r0 * 2 + sgitg) * 2; + const int ib_row = first_row * nb; + + const uint i12 = im%ne12; + const uint i13 = im/ne12; + + const uint offset0 = (i12/r2)*(nb*ne01) + (i13/r3)*(nb*ne01*ne02); + device const block_iq4_nl * x = (device const block_iq4_nl *) src0 + ib_row + offset0; + device const float * y = (device const float *) src1 + r1*ne10 + im*ne00*ne1; + + const int ix = tiisg/2; // 0...15 + const int it = tiisg%2; // 0 or 1 + + shared_values[tiisg] = kvalues_iq4nl_f[tiisg%16]; + threadgroup_barrier(mem_flags::mem_threadgroup); + + float4 yl[4]; + float sumf[2]={0.f}, all_sum; + + device const float * yb = y + ix * QK4_NL + it * 8; + + uint32_t aux32[2]; + thread const uint8_t * q8 = (thread const uint8_t *)aux32; + + float4 qf1, qf2; + + for (int ib = ix; ib < nb; ib += 16) { + + device const float4 * y4 = (device const float4 *)yb; + yl[0] = y4[0]; yl[1] = y4[4]; yl[2] = y4[1]; yl[3] = y4[5]; + + for (int row = 0; row < 2; ++row) { + + device const block_iq4_nl & xb = x[row*nb + ib]; + device const uint16_t * q4 = (device const uint16_t *)(xb.qs + 8*it); + + float4 acc1 = {0.f}, acc2 = {0.f}; + + aux32[0] = q4[0] | (q4[1] << 16); + aux32[1] = (aux32[0] >> 4) & 0x0f0f0f0f; + aux32[0] &= 0x0f0f0f0f; + qf1 = {shared_values[q8[0]], shared_values[q8[1]], shared_values[q8[2]], shared_values[q8[3]]}; + qf2 = {shared_values[q8[4]], shared_values[q8[5]], shared_values[q8[6]], shared_values[q8[7]]}; + acc1 += yl[0] * qf1; + acc2 += yl[1] * qf2; + + aux32[0] = q4[2] | (q4[3] << 16); + aux32[1] = (aux32[0] >> 4) & 0x0f0f0f0f; + aux32[0] &= 0x0f0f0f0f; + qf1 = {shared_values[q8[0]], shared_values[q8[1]], shared_values[q8[2]], shared_values[q8[3]]}; + qf2 = {shared_values[q8[4]], shared_values[q8[5]], shared_values[q8[6]], shared_values[q8[7]]}; + acc1 += yl[2] * qf1; + acc2 += yl[3] * qf2; + + acc1 += acc2; + + sumf[row] += (float)xb.d * (acc1[0] + acc1[1] + acc1[2] + acc1[3]); + + } + + yb += 16 * QK4_NL; + } + + for (int row = 0; row < 2; ++row) { + all_sum = simd_sum(sumf[row]); + if (tiisg == 0) { + dst[r1*ne0 + im*ne0*ne1 + first_row + row] = all_sum; + } + } +} + [[host_name("kernel_mul_mv_iq1_s_f32")]] kernel void kernel_mul_mv_iq1_s_f32( device const void * src0, @@ -4475,6 +4577,34 @@ kernel void kernel_mul_mv_iq1_s_f32( kernel_mul_mv_iq1_s_f32_impl(src0, src1, dst, ne00, ne01, ne02, ne10, ne12, ne0, ne1, r2, r3, tgpig, tiisg, sgitg); } +[[host_name("kernel_mul_mv_iq4_nl_f32")]] +kernel void kernel_mul_mv_iq4_nl_f32( + device const void * src0, + device const float * src1, + device float * dst, + constant int64_t & ne00, + constant int64_t & ne01, + constant int64_t & ne02, + constant uint64_t & nb00, + constant uint64_t & nb01, + constant uint64_t & nb02, + constant int64_t & ne10, + constant int64_t & ne11, + constant int64_t & ne12, + constant uint64_t & nb10, + constant uint64_t & nb11, + constant uint64_t & nb12, + constant int64_t & ne0, + constant int64_t & ne1, + constant uint & r2, + constant uint & r3, + threadgroup float * shared_values [[threadgroup(0)]], + uint3 tgpig[[threadgroup_position_in_grid]], + uint tiisg[[thread_index_in_simdgroup]], + uint sgitg[[simdgroup_index_in_threadgroup]]) { + + kernel_mul_mv_iq4_nl_f32_impl(src0, src1, dst, ne00, ne01, ne02, ne10, ne12, ne0, ne1, r2, r3, shared_values, tgpig, tiisg, sgitg); +} //============================= templates and their specializations ============================= @@ -4838,6 +4968,21 @@ void dequantize_iq1_s(device const block_iq1_s * xb, short il, thread type4x4 & } } +template +void dequantize_iq4_nl(device const block_iq4_nl * xb, short il, thread type4x4 & reg) { + device const uint16_t * q4 = (device const uint16_t *)xb->qs; + const float d = xb->d; + uint32_t aux32; + thread const uint8_t * q8 = (thread const uint8_t *)&aux32; + for (int i = 0; i < 4; ++i) { + aux32 = ((q4[2*i] | (q4[2*i+1] << 16)) >> 4*il) & 0x0f0f0f0f; + reg[i][0] = d * kvalues_iq4nl_f[q8[0]]; + reg[i][1] = d * kvalues_iq4nl_f[q8[1]]; + reg[i][2] = d * kvalues_iq4nl_f[q8[2]]; + reg[i][3] = d * kvalues_iq4nl_f[q8[3]]; + } +} + template kernel void kernel_get_rows( device const void * src0, @@ -5381,6 +5526,7 @@ template [[host_name("kernel_get_rows_iq2_xxs")]] kernel get_rows_t kernel_get_r template [[host_name("kernel_get_rows_iq2_xs")]] kernel get_rows_t kernel_get_rows; template [[host_name("kernel_get_rows_iq3_xxs")]] kernel get_rows_t kernel_get_rows; template [[host_name("kernel_get_rows_iq1_s")]] kernel get_rows_t kernel_get_rows; +template [[host_name("kernel_get_rows_iq4_nl")]] kernel get_rows_t kernel_get_rows; // // matrix-matrix multiplication @@ -5421,6 +5567,7 @@ template [[host_name("kernel_mul_mm_iq2_xxs_f32")]] kernel mat_mm_t kernel_mul_m template [[host_name("kernel_mul_mm_iq2_xs_f32")]] kernel mat_mm_t kernel_mul_mm; template [[host_name("kernel_mul_mm_iq3_xxs_f32")]] kernel mat_mm_t kernel_mul_mm; template [[host_name("kernel_mul_mm_iq1_s_f32")]] kernel mat_mm_t kernel_mul_mm; +template [[host_name("kernel_mul_mm_iq4_nl_f32")]] kernel mat_mm_t kernel_mul_mm; // // indirect matrix-matrix multiplication @@ -5473,6 +5620,7 @@ template [[host_name("kernel_mul_mm_id_iq2_xxs_f32")]] kernel mat_mm_id_t kernel template [[host_name("kernel_mul_mm_id_iq2_xs_f32")]] kernel mat_mm_id_t kernel_mul_mm_id; template [[host_name("kernel_mul_mm_id_iq3_xxs_f32")]] kernel mat_mm_id_t kernel_mul_mm_id; template [[host_name("kernel_mul_mm_id_iq1_s_f32")]] kernel mat_mm_id_t kernel_mul_mm_id; +template [[host_name("kernel_mul_mm_id_iq4_nl_f32")]] kernel mat_mm_id_t kernel_mul_mm_id; // // matrix-vector multiplication @@ -6503,3 +6651,68 @@ kernel void kernel_mul_mv_id_iq1_s_f32( tiisg, sgitg); } + +[[host_name("kernel_mul_mv_id_iq4_nl_f32")]] +kernel void kernel_mul_mv_id_iq4_nl_f32( + device const char * ids, + device const char * src1, + device float * dst, + constant uint64_t & nbi1, + constant int64_t & ne00, + constant int64_t & ne01, + constant int64_t & ne02, + constant uint64_t & nb00, + constant uint64_t & nb01, + constant uint64_t & nb02, + constant int64_t & ne10, + constant int64_t & ne11, + constant int64_t & ne12, + constant int64_t & ne13, + constant uint64_t & nb10, + constant uint64_t & nb11, + constant uint64_t & nb12, + constant int64_t & ne0, + constant int64_t & ne1, + constant uint64_t & nb1, + constant uint & r2, + constant uint & r3, + constant int & idx, + device const char * src00, + device const char * src01, + device const char * src02, + device const char * src03, + device const char * src04, + device const char * src05, + device const char * src06, + device const char * src07, + threadgroup float * shared_values [[threadgroup(0)]], + uint3 tgpig[[threadgroup_position_in_grid]], + uint tiitg[[thread_index_in_threadgroup]], + uint tiisg[[thread_index_in_simdgroup]], + uint sgitg[[simdgroup_index_in_threadgroup]]) { + device const char * src0[8] = {src00, src01, src02, src03, src04, src05, src06, src07}; + + const int64_t bid = tgpig.z/(ne12*ne13); + + tgpig.z = tgpig.z%(ne12*ne13); + + const int32_t id = ((device int32_t *) (ids + bid*nbi1))[idx]; + + kernel_mul_mv_iq4_nl_f32_impl( + src0[id], + (device const float *) (src1 + bid*nb11), + dst + bid*ne0, + ne00, + ne01, + ne02, + ne10, + ne12, + ne0, + ne1, + r2, + r3, + shared_values, + tgpig, + tiisg, + sgitg); +} diff --git a/ggml-quants.c b/ggml-quants.c index 3319d2ccf..6336538f0 100644 --- a/ggml-quants.c +++ b/ggml-quants.c @@ -3754,6 +3754,26 @@ void dequantize_row_iq1_s(const block_iq1_s * restrict x, float * restrict y, in } } +static const int8_t kvalues_iq4nl[16] = {-127, -104, -83, -65, -49, -35, -22, -10, 1, 13, 25, 38, 53, 69, 89, 113}; + +void dequantize_row_iq4_nl(const block_iq4_nl * restrict x, float * restrict y, int k) { + assert(k % QK4_NL == 0); + const int nb = k / QK4_NL; + + for (int i = 0; i < nb; i++) { + + const uint8_t * qs = x[i].qs; + + const float d = GGML_FP16_TO_FP32(x[i].d); + for (int j = 0; j < QK4_NL/2; ++j) { + y[j+ 0] = d * kvalues_iq4nl[qs[j] & 0xf]; + y[j+QK4_NL/2] = d * kvalues_iq4nl[qs[j] >> 4]; + } + y += QK4_NL; + qs += QK4_NL/2; + } +} + //===================================== Q8_K ============================================== void quantize_row_q8_K_reference(const float * restrict x, block_q8_K * restrict y, int k) { @@ -9148,7 +9168,6 @@ void ggml_vec_dot_iq2_xs_q8_K(int n, float * restrict s, size_t bs, const void * #endif } -// TODO void ggml_vec_dot_iq3_xxs_q8_K(int n, float * restrict s, size_t bs, const void * restrict vx, size_t bx, const void * restrict vy, size_t by, int nrc) { assert(n % QK_K == 0); assert(nrc == 1); @@ -9452,7 +9471,100 @@ void ggml_vec_dot_iq1_s_q8_K (int n, float * GGML_RESTRICT s, size_t bs, const *s = sumf; #endif +} +void ggml_vec_dot_iq4_nl_q8_0(int n, float * restrict s, size_t bs, const void * restrict vx, size_t bx, const void * restrict vy, size_t by, int nrc) { + assert(nrc == 1); + UNUSED(nrc); + UNUSED(bx); + UNUSED(by); + UNUSED(bs); + assert(n % QK4_NL == 0); + static_assert(QK4_NL == QK8_0, "QK4_NL and QK8_0 must be the same"); + + const block_iq4_nl * restrict x = vx; + const block_q8_0 * restrict y = vy; + + const int nb = n / QK4_NL; + +#if defined __ARM_NEON + const int8x16_t values = vld1q_s8(kvalues_iq4nl); + const uint8x16_t m4b = vdupq_n_u8(0x0f); + uint8x16x2_t q4bits; + int8x16x4_t q4b; + int8x16x4_t q8b; + int32x4_t prod_1, prod_2; + + float sumf = 0; + + for (int ib = 0; ib < nb; ib += 2) { + + q4bits.val[0] = vld1q_u8(x[ib+0].qs); + q4bits.val[1] = vld1q_u8(x[ib+1].qs); + q8b.val[0] = vld1q_s8(y[ib+0].qs); + q8b.val[1] = vld1q_s8(y[ib+0].qs + 16); + q8b.val[2] = vld1q_s8(y[ib+1].qs); + q8b.val[3] = vld1q_s8(y[ib+1].qs + 16); + + q4b.val[0] = vqtbl1q_s8(values, vandq_u8(q4bits.val[0], m4b)); + q4b.val[1] = vqtbl1q_s8(values, vshrq_n_u8(q4bits.val[0], 4)); + q4b.val[2] = vqtbl1q_s8(values, vandq_u8(q4bits.val[1], m4b)); + q4b.val[3] = vqtbl1q_s8(values, vshrq_n_u8(q4bits.val[1], 4)); + + prod_1 = ggml_vdotq_s32(ggml_vdotq_s32(vdupq_n_s32(0), q4b.val[0], q8b.val[0]), q4b.val[1], q8b.val[1]); + prod_2 = ggml_vdotq_s32(ggml_vdotq_s32(vdupq_n_s32(0), q4b.val[2], q8b.val[2]), q4b.val[3], q8b.val[3]); + + sumf += (float)x[ib+0].d * (float)y[ib+0].d * vaddvq_s32(prod_1) + (float)x[ib+1].d * (float)y[ib+1].d * vaddvq_s32(prod_2); + + } + + *s = sumf; + +#elif defined __AVX2__ + + const __m128i values128 = _mm_loadu_si128((const __m128i*)kvalues_iq4nl); + const __m128i m4b = _mm_set1_epi8(0x0f); + const __m256i mone = _mm256_set1_epi16(1); + + __m256 accum1 = _mm256_setzero_ps(); + __m256 accum2 = _mm256_setzero_ps(); + for (int ib = 0; ib < nb; ib += 2) { + const __m128i q4bits_1 = _mm_loadu_si128((const __m128i*)x[0].qs); + const __m128i q4bits_2 = _mm_loadu_si128((const __m128i*)x[1].qs); + const __m256i q8b_1 = _mm256_loadu_si256((const __m256i *)y[0].qs); + const __m256i q8b_2 = _mm256_loadu_si256((const __m256i *)y[1].qs); + const __m256i q4b_1 = _mm256_set_m128i(_mm_shuffle_epi8(values128, _mm_and_si128(_mm_srli_epi16(q4bits_1, 4), m4b)), + _mm_shuffle_epi8(values128, _mm_and_si128(q4bits_1, m4b))); + const __m256i q4b_2 = _mm256_set_m128i(_mm_shuffle_epi8(values128, _mm_and_si128(_mm_srli_epi16(q4bits_2, 4), m4b)), + _mm_shuffle_epi8(values128, _mm_and_si128(q4bits_2, m4b))); + const __m256i p16_1 = mul_add_epi8(q4b_1, q8b_1); + const __m256i p16_2 = mul_add_epi8(q4b_2, q8b_2); + const __m256i p_1 = _mm256_madd_epi16(p16_1, mone); + const __m256i p_2 = _mm256_madd_epi16(p16_2, mone); + accum1 = _mm256_fmadd_ps(_mm256_set1_ps(GGML_FP16_TO_FP32(y[0].d)*GGML_FP16_TO_FP32(x[0].d)), + _mm256_cvtepi32_ps(p_1), accum1); + accum2 = _mm256_fmadd_ps(_mm256_set1_ps(GGML_FP16_TO_FP32(y[1].d)*GGML_FP16_TO_FP32(x[1].d)), + _mm256_cvtepi32_ps(p_2), accum2); + + y += 2; + x += 2; + } + + *s = hsum_float_8(_mm256_add_ps(accum1, accum2)); + +#else + float sumf = 0; + for (int ib = 0; ib < nb; ++ib) { + const float d = GGML_FP16_TO_FP32(y[ib].d)*GGML_FP16_TO_FP32(x[ib].d); + int sumi1 = 0, sumi2 = 0; + for (int j = 0; j < QK4_NL/2; ++j) { + sumi1 += y[ib].qs[j+ 0] * kvalues_iq4nl[x[ib].qs[j] & 0xf]; + sumi2 += y[ib].qs[j+QK4_NL/2] * kvalues_iq4nl[x[ib].qs[j] >> 4]; + } + sumf += d * (sumi1 + sumi2); + } + *s = sumf; +#endif } // ================================ IQ2 quantization ============================================= @@ -10729,3 +10841,123 @@ size_t quantize_iq1_s(const float * src, void * dst, int nrow, int n_per_row, in } return nrow * nblock * sizeof(block_iq1_s); } + +// ============================ 4-bit non-linear quants + +static inline int best_index_int8(int n, const int8_t * val, float x) { + if (x <= val[0]) return 0; + if (x >= val[n-1]) return n-1; + int ml = 0, mu = n-1; + while (mu-ml > 1) { + int mav = (ml+mu)/2; + if (x < val[mav]) mu = mav; else ml = mav; + } + return x - val[mu-1] < val[mu] - x ? mu-1 : mu; +} + +static void quantize_row_iq4_nl_impl(const int block_size, const float * GGML_RESTRICT x, + ggml_fp16_t * dh, uint8_t * q4, + float * weight, uint8_t * L, + const int8_t * values, + const float * quant_weights) { + + const int ntry = 7; + + float sigma2 = 0; + for (int j = 0; j < QK4_NL; ++j) sigma2 += x[j]*x[j]; + sigma2 *= 2.f/QK4_NL; + + const int nb = QK4_NL/block_size; + + memset(q4, 0, QK4_NL/2); + for (int ib = 0; ib < nb; ++ib) { + dh[ib] = GGML_FP32_TO_FP16(0.f); + const float * xb = x + ib*block_size; + if (quant_weights) { + const float * qw = quant_weights + ib*block_size; + for (int j = 0; j < block_size; ++j) weight[j] = qw[j] * sqrtf(sigma2 + xb[j]*xb[j]); + } else { + for (int j = 0; j < block_size; ++j) weight[j] = xb[j]*xb[j]; + } + float amax = 0, max = 0; + for (int j = 0; j < block_size; ++j) { + float ax = fabsf(xb[j]); + if (ax > amax) { + amax = ax; max = xb[j]; + } + } + if (!amax) { + continue; + } + float d = -max/values[0]; + float id = 1/d; + float sumqx = 0, sumq2 = 0; + for (int j = 0; j < block_size; ++j) { + float al = id*xb[j]; + int l = best_index_int8(16, values, al); + float q = values[l]; + float w = weight[j]; + sumqx += w*q*xb[j]; + sumq2 += w*q*q; + } + float best_id = id; + d = sumqx/sumq2; + float best = d*sumqx; + for (int itry = -ntry; itry <= ntry; ++itry) { + id = (itry + values[0])/max; + sumqx = sumq2 = 0; + for (int j = 0; j < block_size; ++j) { + float al = id*xb[j]; + int l = best_index_int8(16, values, al); + float q = values[l]; + float w = weight[j]; + sumqx += w*q*xb[j]; + sumq2 += w*q*q; + } + if (sumq2 > 0 && sumqx*sumqx > best*sumq2) { + d = sumqx/sumq2; best = d * sumqx; + best_id = id; + } + } + dh[ib] = GGML_FP32_TO_FP16(d); + for (int j = 0; j < block_size; ++j) { + L[ib*block_size + j] = best_index_int8(16, values, best_id*xb[j]); + } + } + for (int i = 0; i < QK4_NL/32; ++i) { + for (int j = 0; j < 16; ++j) { + q4[16*i + j] = L[32*i + j] | (L[32*i + 16 + j] << 4); + } + } +} + +size_t quantize_iq4_nl(const float * src, void * dst, int nrow, int n_per_row, int64_t * hist, const float * quant_weights) { + (void)hist; + GGML_ASSERT(n_per_row%QK4_NL == 0); + int nblock = n_per_row/QK4_NL; + char * qrow = (char *)dst; + uint8_t L[QK4_NL]; + float weight[32]; + for (int row = 0; row < nrow; ++row) { + block_iq4_nl * iq4 = (block_iq4_nl *)qrow; + for (int ibl = 0; ibl < nblock; ++ibl) { + const float * qw = quant_weights ? quant_weights + QK4_NL*ibl : NULL; + quantize_row_iq4_nl_impl(32, src + QK4_NL*ibl, &iq4[ibl].d, iq4[ibl].qs, weight, L, kvalues_iq4nl, qw); + } + src += n_per_row; + qrow += nblock*sizeof(block_iq4_nl); + } + return nrow * nblock * sizeof(block_iq4_nl); +} + +void quantize_row_iq4_nl(const float * restrict x, void * restrict vy, int k) { + assert(k % QK4_NL == 0); + block_iq4_nl * restrict y = vy; + quantize_row_iq4_nl_reference(x, y, k); +} + +void quantize_row_iq4_nl_reference(const float * restrict x, block_iq4_nl * restrict y, int k) { + assert(k % QK4_NL == 0); + quantize_iq4_nl(x, y, 1, k, NULL, NULL); +} + diff --git a/ggml-quants.h b/ggml-quants.h index ad381cfab..113623b62 100644 --- a/ggml-quants.h +++ b/ggml-quants.h @@ -198,6 +198,14 @@ typedef struct { } block_iq1_s; static_assert(sizeof(block_iq1_s) == sizeof(ggml_fp16_t) + QK_K/8 + QK_K/16, "wrong iq1_s block size/padding"); +// Non-linear quants +#define QK4_NL 32 +typedef struct { + ggml_fp16_t d; + uint8_t qs[QK4_NL/2]; +} block_iq4_nl; +static_assert(sizeof(block_iq4_nl) == sizeof(ggml_fp16_t) + QK4_NL/2, "wrong iq4_nl block size/padding"); + #ifdef __cplusplus extern "C" { #endif @@ -217,6 +225,7 @@ void quantize_row_q5_K_reference(const float * GGML_RESTRICT x, block_q5_K * GGM void quantize_row_q6_K_reference(const float * GGML_RESTRICT x, block_q6_K * GGML_RESTRICT y, int k); void quantize_row_q8_K_reference(const float * GGML_RESTRICT x, block_q8_K * GGML_RESTRICT y, int k); void quantize_row_iq3_xxs_reference(const float * GGML_RESTRICT x, block_iq3_xxs * GGML_RESTRICT y, int k); +void quantize_row_iq4_nl_reference (const float * GGML_RESTRICT x, block_iq4_nl * GGML_RESTRICT y, int k); void quantize_row_q4_0(const float * GGML_RESTRICT x, void * GGML_RESTRICT y, int k); void quantize_row_q4_1(const float * GGML_RESTRICT x, void * GGML_RESTRICT y, int k); @@ -232,6 +241,7 @@ void quantize_row_q5_K(const float * GGML_RESTRICT x, void * GGML_RESTRICT y, in void quantize_row_q6_K(const float * GGML_RESTRICT x, void * GGML_RESTRICT y, int k); void quantize_row_q8_K(const float * GGML_RESTRICT x, void * GGML_RESTRICT y, int k); void quantize_row_iq3_xxs(const float * GGML_RESTRICT x, void * GGML_RESTRICT y, int k); +void quantize_row_iq4_nl (const float * GGML_RESTRICT x, void * GGML_RESTRICT y, int k); // Dequantization void dequantize_row_q4_0(const block_q4_0 * GGML_RESTRICT x, float * GGML_RESTRICT y, int k); @@ -251,6 +261,7 @@ void dequantize_row_iq2_xxs(const block_iq2_xxs * GGML_RESTRICT x, float * GGML_ void dequantize_row_iq2_xs (const block_iq2_xs * GGML_RESTRICT x, float * GGML_RESTRICT y, int k); void dequantize_row_iq3_xxs(const block_iq3_xxs * GGML_RESTRICT x, float * GGML_RESTRICT y, int k); void dequantize_row_iq1_s (const block_iq1_s * GGML_RESTRICT x, float * GGML_RESTRICT y, int k); +void dequantize_row_iq4_nl (const block_iq4_nl * GGML_RESTRICT x, float * GGML_RESTRICT y, int k); // Dot product void ggml_vec_dot_q4_0_q8_0(int n, float * GGML_RESTRICT s, size_t bs, const void * GGML_RESTRICT vx, size_t bx, const void * GGML_RESTRICT vy, size_t by, int nrc); @@ -268,6 +279,7 @@ void ggml_vec_dot_iq2_xxs_q8_K(int n, float * GGML_RESTRICT s, size_t bs, const void ggml_vec_dot_iq2_xs_q8_K (int n, float * GGML_RESTRICT s, size_t bs, const void * GGML_RESTRICT vx, size_t bx, const void * GGML_RESTRICT vy, size_t by, int nrc); void ggml_vec_dot_iq3_xxs_q8_K(int n, float * GGML_RESTRICT s, size_t bs, const void * GGML_RESTRICT vx, size_t bx, const void * GGML_RESTRICT vy, size_t by, int nrc); void ggml_vec_dot_iq1_s_q8_K (int n, float * GGML_RESTRICT s, size_t bs, const void * GGML_RESTRICT vx, size_t bx, const void * GGML_RESTRICT vy, size_t by, int nrc); +void ggml_vec_dot_iq4_nl_q8_0 (int n, float * GGML_RESTRICT s, size_t bs, const void * GGML_RESTRICT vx, size_t bx, const void * GGML_RESTRICT vy, size_t by, int nrc); // // Quantization utilizing an importance matrix (a.k.a. "Activation aWare Quantization") @@ -276,6 +288,7 @@ size_t quantize_iq2_xxs(const float * src, void * dst, int nrows, int n_per_row, size_t quantize_iq2_xs (const float * src, void * dst, int nrows, int n_per_row, int64_t * hist, const float * imatrix); size_t quantize_iq3_xxs(const float * src, void * dst, int nrows, int n_per_row, int64_t * hist, const float * imatrix); size_t quantize_iq1_s (const float * src, void * dst, int nrows, int n_per_row, int64_t * hist, const float * imatrix); +size_t quantize_iq4_nl (const float * src, void * dst, int nrows, int n_per_row, int64_t * hist, const float * imatrix); size_t quantize_q2_K (const float * src, void * dst, int nrows, int n_per_row, int64_t * hist, const float * imatrix); size_t quantize_q3_K (const float * src, void * dst, int nrows, int n_per_row, int64_t * hist, const float * imatrix); size_t quantize_q4_K (const float * src, void * dst, int nrows, int n_per_row, int64_t * hist, const float * imatrix); diff --git a/ggml.c b/ggml.c index d129df505..91adbb0ae 100644 --- a/ggml.c +++ b/ggml.c @@ -690,6 +690,18 @@ static const ggml_type_traits_t type_traits[GGML_TYPE_COUNT] = { .vec_dot_type = GGML_TYPE_Q8_K, .nrows = 1, }, + [GGML_TYPE_IQ4_NL] = { + .type_name = "iq4_nl", + .blck_size = QK4_NL, + .type_size = sizeof(block_iq4_nl), + .is_quantized = true, + .to_float = (ggml_to_float_t) dequantize_row_iq4_nl, + .from_float = quantize_row_iq4_nl, + .from_float_reference = (ggml_from_float_t)quantize_row_iq4_nl_reference, + .vec_dot = ggml_vec_dot_iq4_nl_q8_0, + .vec_dot_type = GGML_TYPE_Q8_0, + .nrows = 1, + }, [GGML_TYPE_Q8_K] = { .type_name = "q8_K", .blck_size = QK_K, @@ -2291,6 +2303,7 @@ enum ggml_type ggml_ftype_to_ggml_type(enum ggml_ftype ftype) { case GGML_FTYPE_MOSTLY_IQ2_XS: wtype = GGML_TYPE_IQ2_XS; break; case GGML_FTYPE_MOSTLY_IQ3_XXS: wtype = GGML_TYPE_IQ3_XXS; break; case GGML_FTYPE_MOSTLY_IQ1_S: wtype = GGML_TYPE_IQ1_S; break; + case GGML_FTYPE_MOSTLY_IQ4_NL: wtype = GGML_TYPE_IQ4_NL; break; case GGML_FTYPE_UNKNOWN: wtype = GGML_TYPE_COUNT; break; case GGML_FTYPE_MOSTLY_Q4_1_SOME_F16: wtype = GGML_TYPE_COUNT; break; } @@ -7702,6 +7715,7 @@ static void ggml_compute_forward_add( case GGML_TYPE_IQ2_XS: case GGML_TYPE_IQ3_XXS: case GGML_TYPE_IQ1_S: + case GGML_TYPE_IQ4_NL: { ggml_compute_forward_add_q_f32(params, src0, src1, dst); } break; @@ -7970,6 +7984,7 @@ static void ggml_compute_forward_add1( case GGML_TYPE_IQ2_XS: case GGML_TYPE_IQ3_XXS: case GGML_TYPE_IQ1_S: + case GGML_TYPE_IQ4_NL: { ggml_compute_forward_add1_q_f32(params, src0, src1, dst); } break; @@ -8091,6 +8106,7 @@ static void ggml_compute_forward_acc( case GGML_TYPE_IQ2_XS: case GGML_TYPE_IQ3_XXS: case GGML_TYPE_IQ1_S: + case GGML_TYPE_IQ4_NL: default: { GGML_ASSERT(false); @@ -10858,6 +10874,7 @@ static void ggml_compute_forward_out_prod( case GGML_TYPE_IQ2_XS: case GGML_TYPE_IQ3_XXS: case GGML_TYPE_IQ1_S: + case GGML_TYPE_IQ4_NL: { ggml_compute_forward_out_prod_q_f32(params, src0, src1, dst); } break; @@ -11039,6 +11056,7 @@ static void ggml_compute_forward_set( case GGML_TYPE_IQ2_XS: case GGML_TYPE_IQ3_XXS: case GGML_TYPE_IQ1_S: + case GGML_TYPE_IQ4_NL: default: { GGML_ASSERT(false); @@ -11237,6 +11255,7 @@ static void ggml_compute_forward_get_rows( case GGML_TYPE_IQ2_XS: case GGML_TYPE_IQ3_XXS: case GGML_TYPE_IQ1_S: + case GGML_TYPE_IQ4_NL: { ggml_compute_forward_get_rows_q(params, src0, src1, dst); } break; @@ -11911,6 +11930,7 @@ static void ggml_compute_forward_alibi( case GGML_TYPE_IQ2_XS: case GGML_TYPE_IQ3_XXS: case GGML_TYPE_IQ1_S: + case GGML_TYPE_IQ4_NL: case GGML_TYPE_Q8_K: case GGML_TYPE_I8: case GGML_TYPE_I16: @@ -11989,6 +12009,7 @@ static void ggml_compute_forward_clamp( case GGML_TYPE_IQ2_XS: case GGML_TYPE_IQ3_XXS: case GGML_TYPE_IQ1_S: + case GGML_TYPE_IQ4_NL: case GGML_TYPE_Q8_K: case GGML_TYPE_I8: case GGML_TYPE_I16: @@ -19455,6 +19476,15 @@ size_t ggml_quantize_chunk(enum ggml_type type, const float * src, void * dst, i result = quantize_iq1_s(src + start, (char *)dst + start_row * row_size, nrows, n_per_row, hist, imatrix); GGML_ASSERT(result == row_size * nrows); } break; + case GGML_TYPE_IQ4_NL: + { + GGML_ASSERT(start % QK4_NL == 0); + GGML_ASSERT(start % n_per_row == 0); + size_t start_row = start / n_per_row; + size_t row_size = ggml_row_size(type, n_per_row); + result = quantize_iq4_nl(src + start, (char *)dst + start_row * row_size, nrows, n_per_row, hist, imatrix); + GGML_ASSERT(result == row_size * nrows); + } break; case GGML_TYPE_F16: { size_t elemsize = sizeof(ggml_fp16_t); diff --git a/ggml.h b/ggml.h index 004d09c70..bed7a36a0 100644 --- a/ggml.h +++ b/ggml.h @@ -355,6 +355,7 @@ extern "C" { GGML_TYPE_IQ2_XS = 17, GGML_TYPE_IQ3_XXS = 18, GGML_TYPE_IQ1_S = 19, + GGML_TYPE_IQ4_NL = 20, GGML_TYPE_I8, GGML_TYPE_I16, GGML_TYPE_I32, @@ -393,6 +394,7 @@ extern "C" { GGML_FTYPE_MOSTLY_IQ2_XS = 16, // except 1d tensors GGML_FTYPE_MOSTLY_IQ3_XXS = 17, // except 1d tensors GGML_FTYPE_MOSTLY_IQ1_S = 18, // except 1d tensors + GGML_FTYPE_MOSTLY_IQ4_NL = 19, // except 1d tensors }; // available tensor operations: diff --git a/llama.cpp b/llama.cpp index 4296eca32..3748d5eac 100644 --- a/llama.cpp +++ b/llama.cpp @@ -2527,6 +2527,7 @@ struct llama_model_loader { case GGML_TYPE_IQ2_XS: ftype = LLAMA_FTYPE_MOSTLY_IQ2_XS; break; case GGML_TYPE_IQ3_XXS: ftype = LLAMA_FTYPE_MOSTLY_IQ3_XXS; break; case GGML_TYPE_IQ1_S: ftype = LLAMA_FTYPE_MOSTLY_IQ1_S; break; + case GGML_TYPE_IQ4_NL: ftype = LLAMA_FTYPE_MOSTLY_IQ4_NL; break; default: { LLAMA_LOG_WARN("%s: unknown type %s\n", __func__, ggml_type_name(type_max)); @@ -2877,6 +2878,7 @@ static std::string llama_model_ftype_name(llama_ftype ftype) { case LLAMA_FTYPE_MOSTLY_Q3_K_XS:return "Q3_K - Extra small"; case LLAMA_FTYPE_MOSTLY_IQ3_XXS:return "IQ3_XXS - 3.0625 bpw"; case LLAMA_FTYPE_MOSTLY_IQ1_S :return "IQ1_S - 1.5625 bpw"; + case LLAMA_FTYPE_MOSTLY_IQ4_NL: return "IQ4_NL - 4.5 bpw"; default: return "unknown, may not work"; } @@ -10354,6 +10356,9 @@ static ggml_type get_k_quant_type(quantize_state_internal & qs, ggml_type new_ty new_type = qs.i_attention_wv < 2 ? GGML_TYPE_Q5_K : GGML_TYPE_Q4_K; } else if (ftype == LLAMA_FTYPE_MOSTLY_Q3_K_L) new_type = GGML_TYPE_Q5_K; + else if (ftype == LLAMA_FTYPE_MOSTLY_IQ4_NL && qs.model.hparams.n_gqa() >= 4) { + new_type = GGML_TYPE_Q5_K; + } else if ((ftype == LLAMA_FTYPE_MOSTLY_Q4_K_M || ftype == LLAMA_FTYPE_MOSTLY_Q5_K_M) && use_more_bits(qs.i_attention_wv, qs.n_attention_wv)) new_type = GGML_TYPE_Q6_K; else if (ftype == LLAMA_FTYPE_MOSTLY_Q4_K_S && qs.i_attention_wv < 4) new_type = GGML_TYPE_Q5_K; @@ -10406,6 +10411,9 @@ static ggml_type get_k_quant_type(quantize_state_internal & qs, ggml_type new_ty if (use_more_bits(i_layer, n_layer)) new_type = GGML_TYPE_Q6_K; } } + else if (ftype == LLAMA_FTYPE_MOSTLY_IQ4_NL && !qs.has_imatrix) { + if (i_layer < n_layer/8) new_type = GGML_TYPE_Q5_K; + } else if (ftype == LLAMA_FTYPE_MOSTLY_Q5_K_M && use_more_bits(i_layer, n_layer)) new_type = GGML_TYPE_Q6_K; else if (ftype == LLAMA_FTYPE_MOSTLY_Q4_K_S && arch != LLM_ARCH_FALCON && i_layer < n_layer/8) { new_type = GGML_TYPE_Q5_K; @@ -10422,7 +10430,7 @@ static ggml_type get_k_quant_type(quantize_state_internal & qs, ggml_type new_ty if (arch != LLM_ARCH_FALCON) { if (qs.model.hparams.n_expert == 8) { if (ftype == LLAMA_FTYPE_MOSTLY_Q2_K || ftype == LLAMA_FTYPE_MOSTLY_Q3_K_XS || ftype == LLAMA_FTYPE_MOSTLY_IQ3_XXS || - ftype == LLAMA_FTYPE_MOSTLY_Q3_K_S || ftype == LLAMA_FTYPE_MOSTLY_Q3_K_M || + ftype == LLAMA_FTYPE_MOSTLY_Q3_K_S || ftype == LLAMA_FTYPE_MOSTLY_Q3_K_M || ftype == LLAMA_FTYPE_MOSTLY_IQ4_NL || ftype == LLAMA_FTYPE_MOSTLY_Q4_K_S || ftype == LLAMA_FTYPE_MOSTLY_Q4_K_M) { new_type = GGML_TYPE_Q5_K; } @@ -10489,8 +10497,8 @@ static ggml_type get_k_quant_type(quantize_state_internal & qs, ggml_type new_ty case GGML_TYPE_IQ2_XS: case GGML_TYPE_IQ3_XXS: case GGML_TYPE_IQ1_S: - case GGML_TYPE_Q2_K: new_type = GGML_TYPE_Q4_0; break; - case GGML_TYPE_Q3_K: new_type = GGML_TYPE_Q4_1; break; + case GGML_TYPE_Q2_K: + case GGML_TYPE_Q3_K: new_type = GGML_TYPE_IQ4_NL; break; case GGML_TYPE_Q4_K: new_type = GGML_TYPE_Q5_0; break; case GGML_TYPE_Q5_K: new_type = GGML_TYPE_Q5_1; break; case GGML_TYPE_Q6_K: new_type = GGML_TYPE_Q8_0; break; @@ -10531,7 +10539,8 @@ static void llama_model_quantize_internal(const std::string & fname_inp, const s case LLAMA_FTYPE_MOSTLY_IQ2_XXS: quantized_type = GGML_TYPE_IQ2_XXS; break; case LLAMA_FTYPE_MOSTLY_IQ2_XS: quantized_type = GGML_TYPE_IQ2_XS; break; case LLAMA_FTYPE_MOSTLY_IQ3_XXS: quantized_type = GGML_TYPE_IQ3_XXS; break; - case LLAMA_FTYPE_MOSTLY_IQ1_S: quantized_type = GGML_TYPE_IQ1_S ; break; + case LLAMA_FTYPE_MOSTLY_IQ1_S: quantized_type = GGML_TYPE_IQ1_S; break; + case LLAMA_FTYPE_MOSTLY_IQ4_NL: quantized_type = GGML_TYPE_IQ4_NL; break; default: throw std::runtime_error(format("invalid output file type %d\n", ftype)); } diff --git a/llama.h b/llama.h index 77a84c18a..8ba20696f 100644 --- a/llama.h +++ b/llama.h @@ -101,6 +101,7 @@ extern "C" { LLAMA_FTYPE_MOSTLY_Q3_K_XS = 22, // except 1d tensors LLAMA_FTYPE_MOSTLY_IQ3_XXS = 23, // except 1d tensors LLAMA_FTYPE_MOSTLY_IQ1_S = 24, // except 1d tensors + LLAMA_FTYPE_MOSTLY_IQ4_NL = 25, // except 1d tensors LLAMA_FTYPE_GUESSED = 1024, // not specified in the model file }; diff --git a/tests/test-backend-ops.cpp b/tests/test-backend-ops.cpp index ef37c5af2..55db42bf6 100644 --- a/tests/test-backend-ops.cpp +++ b/tests/test-backend-ops.cpp @@ -1918,6 +1918,7 @@ static bool test_backend(ggml_backend_t backend, test_mode mode, const char * op GGML_TYPE_Q6_K, GGML_TYPE_IQ2_XXS, GGML_TYPE_IQ2_XS, GGML_TYPE_IQ3_XXS, GGML_TYPE_IQ1_S, + GGML_TYPE_IQ4_NL, }; // unary ops From 88c46cbdac05cebd936511b1d3c74112e721615f Mon Sep 17 00:00:00 2001 From: "Meng, Hengyu" Date: Wed, 21 Feb 2024 17:52:06 +0800 Subject: [PATCH 667/811] [SYCL] conext add name (#5624) * [SYCL] conext add name * name should start with SYCL* --- ggml-sycl.cpp | 26 ++++++++++++-------------- 1 file changed, 12 insertions(+), 14 deletions(-) diff --git a/ggml-sycl.cpp b/ggml-sycl.cpp index df1826112..b897828f9 100644 --- a/ggml-sycl.cpp +++ b/ggml-sycl.cpp @@ -14642,7 +14642,8 @@ GGML_CALL static const char * ggml_backend_sycl_buffer_type_name(ggml_backend_bu static ggml_backend_buffer_t ggml_backend_sycl_buffer_type_alloc_buffer(ggml_backend_buffer_type_t buft, size_t size) try { - int device = (int) (intptr_t) buft->context; + ggml_backend_sycl_buffer_type_context * buft_ctx = (ggml_backend_sycl_buffer_type_context *)buft->context; + int device = (int) buft_ctx->device; ggml_sycl_set_device(device); int device_index = get_device_index_by_id(device); @@ -14720,7 +14721,7 @@ ggml_backend_buffer_type_t ggml_backend_sycl_buffer_type(int device) { for (int i = 0; i < GGML_SYCL_MAX_DEVICES; i++) { ggml_backend_sycl_buffer_types[i] = { /* .iface = */ ggml_backend_sycl_buffer_type_interface, - /* .context = */ (ggml_backend_buffer_type_context_t) (intptr_t) i, + /* .context = */ new ggml_backend_sycl_buffer_type_context{i, GGML_SYCL_NAME + std::to_string(i)}, }; } ggml_backend_sycl_buffer_type_initialized = true; @@ -14782,10 +14783,6 @@ ggml_backend_buffer_type_t ggml_backend_sycl_host_buffer_type() { // backend -struct ggml_backend_context_sycl { - int device; -}; - static const char * ggml_backend_sycl_name(ggml_backend_t backend) { return GGML_SYCL_NAME; @@ -14793,14 +14790,14 @@ static const char * ggml_backend_sycl_name(ggml_backend_t backend) { } static void ggml_backend_sycl_free(ggml_backend_t backend) { - ggml_backend_context_sycl * sycl_ctx = (ggml_backend_context_sycl *)backend->context; + ggml_backend_sycl_context * sycl_ctx = (ggml_backend_sycl_context *)backend->context; delete sycl_ctx; delete backend; } static ggml_backend_buffer_type_t ggml_backend_sycl_get_default_buffer_type(ggml_backend_t backend) { - ggml_backend_context_sycl * sycl_ctx = (ggml_backend_context_sycl *)backend->context; + ggml_backend_sycl_context * sycl_ctx = (ggml_backend_sycl_context *)backend->context; return ggml_backend_sycl_buffer_type(sycl_ctx->device); } @@ -14809,7 +14806,7 @@ static void ggml_backend_sycl_set_tensor_async(ggml_backend_t backend, ggml_tensor *tensor, const void *data, size_t offset, size_t size) try { - ggml_backend_context_sycl * sycl_ctx = (ggml_backend_context_sycl *)backend->context; + ggml_backend_sycl_context * sycl_ctx = (ggml_backend_sycl_context *)backend->context; GGML_ASSERT(tensor->buffer->buft == ggml_backend_sycl_buffer_type(sycl_ctx->device) && "unsupported buffer type"); GGML_ASSERT(tensor->backend == GGML_BACKEND_GPU); @@ -14827,7 +14824,7 @@ static void ggml_backend_sycl_get_tensor_async(ggml_backend_t backend, const ggml_tensor *tensor, void *data, size_t offset, size_t size) try { - ggml_backend_context_sycl * sycl_ctx = (ggml_backend_context_sycl *)backend->context; + ggml_backend_sycl_context * sycl_ctx = (ggml_backend_sycl_context *)backend->context; GGML_ASSERT(tensor->buffer->buft == ggml_backend_sycl_buffer_type(sycl_ctx->device) && "unsupported buffer type"); GGML_ASSERT(tensor->backend == GGML_BACKEND_GPU); @@ -14842,7 +14839,7 @@ catch (sycl::exception const &exc) { } static void ggml_backend_sycl_synchronize(ggml_backend_t backend) try { - ggml_backend_context_sycl * sycl_ctx = (ggml_backend_context_sycl *)backend->context; + ggml_backend_sycl_context * sycl_ctx = (ggml_backend_sycl_context *)backend->context; SYCL_CHECK(CHECK_TRY_ERROR(g_syclStreams[sycl_ctx->device][0]->wait())); @@ -14878,7 +14875,7 @@ static void ggml_backend_sycl_graph_plan_compute(ggml_backend_t backend, ggml_ba } static bool ggml_backend_sycl_graph_compute(ggml_backend_t backend, ggml_cgraph * cgraph) { - ggml_backend_context_sycl * sycl_ctx = (ggml_backend_context_sycl *)backend->context; + ggml_backend_sycl_context * sycl_ctx = (ggml_backend_sycl_context *)backend->context; ggml_sycl_set_main_device(sycl_ctx->device); @@ -15092,8 +15089,9 @@ ggml_backend_t ggml_backend_sycl_init(int device) { // not strictly necessary, but it may reduce the overhead of the first graph_compute ggml_sycl_set_main_device(device); - ggml_backend_context_sycl * ctx = new ggml_backend_context_sycl { - /* .device = */ device + ggml_backend_sycl_context * ctx = new ggml_backend_sycl_context { + /* .device = */ device, + /* .name = */ GGML_SYCL_NAME + std::to_string(device), }; ggml_backend_t sycl_backend = new ggml_backend { From 580111d42b3b6ad0a390bfb267d6e3077506eb31 Mon Sep 17 00:00:00 2001 From: postmasters Date: Wed, 21 Feb 2024 05:08:22 -0800 Subject: [PATCH 668/811] llama : add `gemma` model (#5631) There are couple things in this architecture: 1. Shared input and output embedding parameters. 2. Key length and value length are not derived from `n_embd`. More information about the models can be found at https://ai.google.dev/gemma. GGUFs can be downloaded from https://huggingface.co/google. --- README.md | 1 + gguf-py/gguf/constants.py | 15 ++++ llama.cpp | 170 ++++++++++++++++++++++++++++++++++++++ 3 files changed, 186 insertions(+) diff --git a/README.md b/README.md index 747d2e98b..225db8e49 100644 --- a/README.md +++ b/README.md @@ -107,6 +107,7 @@ Typically finetunes of the base models below are supported as well. - [x] [Orion 14B](https://github.com/ggerganov/llama.cpp/pull/5118) - [x] [InternLM2](https://huggingface.co/models?search=internlm2) - [x] [CodeShell](https://github.com/WisdomShell/codeshell) +- [x] [Gemma](https://ai.google.dev/gemma) **Multimodal models:** diff --git a/gguf-py/gguf/constants.py b/gguf-py/gguf/constants.py index 114a9a974..8f9139d1b 100644 --- a/gguf-py/gguf/constants.py +++ b/gguf-py/gguf/constants.py @@ -111,6 +111,7 @@ class MODEL_ARCH(IntEnum): ORION = auto() INTERNLM2 = auto() MINICPM = auto() + GEMMA = auto() class MODEL_TENSOR(IntEnum): @@ -167,6 +168,7 @@ MODEL_ARCH_NAMES: dict[MODEL_ARCH, str] = { MODEL_ARCH.ORION: "orion", MODEL_ARCH.INTERNLM2: "internlm2", MODEL_ARCH.MINICPM: "minicpm", + MODEL_ARCH.GEMMA: "gemma", } TENSOR_NAMES: dict[MODEL_TENSOR, str] = { @@ -511,6 +513,19 @@ MODEL_TENSORS: dict[MODEL_ARCH, list[MODEL_TENSOR]] = { MODEL_TENSOR.FFN_DOWN_EXP, MODEL_TENSOR.FFN_UP_EXP, ], + MODEL_ARCH.GEMMA: [ + MODEL_TENSOR.TOKEN_EMBD, + MODEL_TENSOR.OUTPUT_NORM, + MODEL_TENSOR.ATTN_NORM, + MODEL_TENSOR.ATTN_Q, + MODEL_TENSOR.ATTN_K, + MODEL_TENSOR.ATTN_V, + MODEL_TENSOR.ATTN_OUT, + MODEL_TENSOR.FFN_GATE, + MODEL_TENSOR.FFN_DOWN, + MODEL_TENSOR.FFN_UP, + MODEL_TENSOR.FFN_NORM, + ], # TODO } diff --git a/llama.cpp b/llama.cpp index 3748d5eac..3a226c426 100644 --- a/llama.cpp +++ b/llama.cpp @@ -208,6 +208,7 @@ enum llm_arch { LLM_ARCH_ORION, LLM_ARCH_INTERNLM2, LLM_ARCH_MINICPM, + LLM_ARCH_GEMMA, LLM_ARCH_UNKNOWN, }; @@ -234,6 +235,7 @@ static std::map LLM_ARCH_NAMES = { { LLM_ARCH_ORION, "orion" }, { LLM_ARCH_INTERNLM2, "internlm2" }, { LLM_ARCH_MINICPM, "minicpm" }, + { LLM_ARCH_GEMMA, "gemma" }, }; enum llm_kv { @@ -760,6 +762,22 @@ static std::map> LLM_TENSOR_NAMES = { LLM_TENSOR_FFN_UP_EXP, "blk.%d.ffn_up.%d" }, }, }, + { + LLM_ARCH_GEMMA, + { + { LLM_TENSOR_TOKEN_EMBD, "token_embd" }, + { LLM_TENSOR_OUTPUT_NORM, "output_norm" }, + { LLM_TENSOR_ATTN_NORM, "blk.%d.attn_norm" }, + { LLM_TENSOR_ATTN_Q, "blk.%d.attn_q" }, + { LLM_TENSOR_ATTN_K, "blk.%d.attn_k" }, + { LLM_TENSOR_ATTN_V, "blk.%d.attn_v" }, + { LLM_TENSOR_ATTN_OUT, "blk.%d.attn_output" }, + { LLM_TENSOR_FFN_NORM, "blk.%d.ffn_norm" }, + { LLM_TENSOR_FFN_GATE, "blk.%d.ffn_gate" }, + { LLM_TENSOR_FFN_DOWN, "blk.%d.ffn_down" }, + { LLM_TENSOR_FFN_UP, "blk.%d.ffn_up" }, + }, + }, { LLM_ARCH_UNKNOWN, { @@ -3243,6 +3261,16 @@ static void llm_load_hparams( default: model.type = e_model::MODEL_UNKNOWN; } } break; + case LLM_ARCH_GEMMA: + { + ml.get_key(LLM_KV_ATTENTION_LAYERNORM_RMS_EPS, hparams.f_norm_rms_eps); + + switch (hparams.n_layer) { + case 18: model.type = e_model::MODEL_2B; break; + case 28: model.type = e_model::MODEL_7B; break; + default: model.type = e_model::MODEL_UNKNOWN; + } + } break; default: (void)0; } @@ -4360,6 +4388,37 @@ static bool llm_load_tensors( layer.ffn_up = ml.create_tensor(ctx_split, tn(LLM_TENSOR_FFN_UP, "weight", i), {n_embd, n_ff}); } } break; + case LLM_ARCH_GEMMA: + { + model.tok_embd = ml.create_tensor(ctx_input, tn(LLM_TENSOR_TOKEN_EMBD, "weight"), {n_embd, n_vocab}); + + // output + model.output_norm = ml.create_tensor(ctx_output, tn(LLM_TENSOR_OUTPUT_NORM, "weight"), {n_embd}); + + const int64_t n_ff = hparams.n_ff; + const int64_t n_embd_head_k = hparams.n_embd_head_k; + const int64_t n_embd_k_gqa = hparams.n_embd_k_gqa(); + const int64_t n_embd_v_gqa = hparams.n_embd_v_gqa(); + + for (uint32_t i = 0; i < n_layer; ++i) { + ggml_context * ctx_layer = ctx_for_layer(i); + ggml_context * ctx_split = ctx_for_layer_split(i); + + auto & layer = model.layers[i]; + + layer.attn_norm = ml.create_tensor(ctx_layer, tn(LLM_TENSOR_ATTN_NORM, "weight", i), {n_embd}); + + layer.wq = ml.create_tensor(ctx_split, tn(LLM_TENSOR_ATTN_Q, "weight", i), {n_embd, n_embd_head_k * hparams.n_head}); + layer.wk = ml.create_tensor(ctx_split, tn(LLM_TENSOR_ATTN_K, "weight", i), {n_embd, n_embd_k_gqa}); + layer.wv = ml.create_tensor(ctx_split, tn(LLM_TENSOR_ATTN_V, "weight", i), {n_embd, n_embd_v_gqa}); + layer.wo = ml.create_tensor(ctx_split, tn(LLM_TENSOR_ATTN_OUT, "weight", i), {n_embd_head_k * hparams.n_head, n_embd}); + + layer.ffn_norm = ml.create_tensor(ctx_layer, tn(LLM_TENSOR_FFN_NORM, "weight", i), {n_embd}); + layer.ffn_gate = ml.create_tensor(ctx_split, tn(LLM_TENSOR_FFN_GATE, "weight", i), {n_embd, n_ff}); + layer.ffn_up = ml.create_tensor(ctx_split, tn(LLM_TENSOR_FFN_UP, "weight", i), {n_embd, n_ff}); + layer.ffn_down = ml.create_tensor(ctx_split, tn(LLM_TENSOR_FFN_DOWN, "weight", i), { n_ff, n_embd}); + } + } break; default: throw std::runtime_error("unknown architecture"); } @@ -7366,6 +7425,113 @@ struct llm_build_context { return gf; } + + struct ggml_cgraph * build_gemma() { + struct ggml_cgraph * gf = ggml_new_graph_custom(ctx0, LLAMA_MAX_NODES, false); + + const int64_t n_embd_head_k = hparams.n_embd_head_k; + + struct ggml_tensor * cur; + struct ggml_tensor * inpL; + + inpL = llm_build_inp_embd(ctx0, hparams, batch, model.tok_embd, lctx.inp_tokens, lctx.inp_embd, cb); + cb(inpL, "inp_embd", -1); + inpL = ggml_scale(ctx0, inpL, sqrtf(n_embd)); + cb(inpL, "inp_scaled", -1); + + // inp_pos - contains the positions + struct ggml_tensor * inp_pos = ggml_view_1d(ctx0, lctx.inp_pos, n_tokens, 0); + cb(inp_pos, "inp_pos", -1); + + // KQ_mask (mask for 1 head, it will be broadcasted to all heads) + struct ggml_tensor * KQ_mask = ggml_view_2d(ctx0, lctx.inp_KQ_mask, n_kv, n_tokens, n_kv*ggml_type_size(lctx.inp_KQ_mask->type), 0); + cb(KQ_mask, "KQ_mask", -1); + + // shift the entire K-cache if needed + if (do_rope_shift) { + llm_build_k_shift(ctx0, hparams, cparams, kv_self, gf, lctx.inp_K_shift, LLM_ROPE, n_ctx, freq_base, freq_scale, cb); + } + + for (int il = 0; il < n_layer; ++il) { + + // norm + cur = llm_build_norm(ctx0, inpL, hparams, + model.layers[il].attn_norm, NULL, + LLM_NORM_RMS, cb, il); + cb(cur, "attn_norm", il); + + // self-attention + { + // compute Q and K and RoPE them + struct ggml_tensor * Qcur = ggml_mul_mat(ctx0, model.layers[il].wq, cur); + cb(Qcur, "Qcur", il); + + struct ggml_tensor * Kcur = ggml_mul_mat(ctx0, model.layers[il].wk, cur); + cb(Kcur, "Kcur", il); + + struct ggml_tensor * Vcur = ggml_mul_mat(ctx0, model.layers[il].wv, cur); + cb(Vcur, "Vcur", il); + + Qcur = ggml_rope_custom( + ctx0, ggml_reshape_3d(ctx0, Qcur, n_embd_head_k, n_head, n_tokens), inp_pos, + n_embd_head_k, 2, 0, n_orig_ctx, freq_base, freq_scale, + ext_factor, attn_factor, beta_fast, beta_slow); + cb(Qcur, "Qcur", il); + Qcur = ggml_scale(ctx0, Qcur, 1.0f / sqrtf(float(n_embd_head_k))); + cb(Qcur, "Qcur_scaled", il); + + Kcur = ggml_rope_custom( + ctx0, ggml_reshape_3d(ctx0, Kcur, n_embd_head_k, n_head_kv, n_tokens), inp_pos, + n_embd_head_k, 2, 0, n_orig_ctx, freq_base, freq_scale, + ext_factor, attn_factor, beta_fast, beta_slow); + cb(Kcur, "Kcur", il); + + cur = llm_build_kv(ctx0, model, hparams, kv_self, gf, + model.layers[il].wo, NULL, + Kcur, Vcur, Qcur, KQ_mask, nullptr, n_ctx, n_tokens, kv_head, n_kv, 1.0f, cb, il); + cb(cur, "kqv_out", il); + } + struct ggml_tensor * sa_out = ggml_add(ctx0, cur, inpL); + cb(sa_out, "sa_out", il); + + cur = llm_build_norm(ctx0, sa_out, hparams, + model.layers[il].ffn_norm, NULL, + LLM_NORM_RMS, cb, il); + cb(cur, "ffn_norm", il); + + // feed-forward network + { + cur = llm_build_ffn(ctx0, cur, + model.layers[il].ffn_up, NULL, + model.layers[il].ffn_gate, NULL, + model.layers[il].ffn_down, NULL, + NULL, + LLM_FFN_GELU, LLM_FFN_PAR, cb, il); + cb(cur, "ffn_out", il); + } + + cur = ggml_add(ctx0, cur, sa_out); + cb(cur, "l_out", il); + + // input for next layer + inpL = cur; + } + + cur = inpL; + + cur = llm_build_norm(ctx0, cur, hparams, + model.output_norm, NULL, + LLM_NORM_RMS, cb, -1); + cb(cur, "result_norm", -1); + + // lm_head + cur = ggml_mul_mat(ctx0, model.tok_embd, cur); + cb(cur, "result_output", -1); + + ggml_build_forward_expand(gf, cur); + + return gf; + } }; static struct ggml_cgraph * llama_build_graph( @@ -7474,6 +7640,10 @@ static struct ggml_cgraph * llama_build_graph( { result = llm.build_minicpm(); } break; + case LLM_ARCH_GEMMA: + { + result = llm.build_gemma(); + } break; default: GGML_ASSERT(false); } From cc6cac08e38e32bf40bbe07e9e8f8f0130b5fd94 Mon Sep 17 00:00:00 2001 From: Daniel Bevenius Date: Wed, 21 Feb 2024 14:36:57 +0100 Subject: [PATCH 669/811] llava : add --skip-unknown to 1.6 convert.py (#5632) This commit adds the `--skip-unknown` option to the convert.py script and removes the saving of the updated checkpoints to avoid updating possibly checked out files. The motivation for this change is that this was done for 1.5 in Commit fc0c8d286a533363a9a663510b62af85ffad58b3 ("llava : update surgery script to not remove tensors") and makes the examples more consistent. Signed-off-by: Daniel Bevenius --- examples/llava/README.md | 13 ++++++------- examples/llava/llava-surgery-v2.py | 12 ------------ 2 files changed, 6 insertions(+), 19 deletions(-) diff --git a/examples/llava/README.md b/examples/llava/README.md index 25ea96715..35e6d9e5d 100644 --- a/examples/llava/README.md +++ b/examples/llava/README.md @@ -63,13 +63,12 @@ Now both the LLaMA part and the image encoder is in the `llava-v1.5-7b` director ```console git clone https://huggingface.co/liuhaotian/llava-v1.6-vicuna-7b ``` -2) Backup your pth/safetensor model files as llava-surgery modifies them -3) Use `llava-surgery-v2.py` which also supports llava-1.5 variants pytorch as well as safetensor models: +2) Use `llava-surgery-v2.py` which also supports llava-1.5 variants pytorch as well as safetensor models: ```console python examples/llava/llava-surgery-v2.py -C -m ../llava-v1.6-vicuna-7b/ ``` - you will find a llava.projector and a llava.clip file in your model directory -4) Copy the llava.clip file into a subdirectory (like vit), rename it to pytorch_model.bin and add a fitting vit configuration to the directory: +3) Copy the llava.clip file into a subdirectory (like vit), rename it to pytorch_model.bin and add a fitting vit configuration to the directory: ```console mkdir vit cp ../llava-v1.6-vicuna-7b/llava.clip vit/pytorch_model.bin @@ -77,18 +76,18 @@ cp ../llava-v1.6-vicuna-7b/llava.projector vit/ curl -s -q https://huggingface.co/cmp-nct/llava-1.6-gguf/raw/main/config_vit.json -o vit/config.json ``` -5) Create the visual gguf model: +4) Create the visual gguf model: ```console python ./examples/llava/convert-image-encoder-to-gguf.py -m vit --llava-projector vit/llava.projector --output-dir vit --clip-model-is-vision ``` - This is similar to llava-1.5, the difference is that we tell the encoder that we are working with the pure vision model part of CLIP -6) Then convert the model to gguf format: +5) Then convert the model to gguf format: ```console -python ./convert.py ../llava-v1.6-vicuna-7b/ +python ./convert.py ../llava-v1.6-vicuna-7b/ --skip-unknown ``` -7) And finally we can run the llava-cli using the 1.6 model version: +6) And finally we can run the llava-cli using the 1.6 model version: ```console ./llava-cli -m ../llava-v1.6-vicuna-7b/ggml-model-f16.gguf --mmproj vit/mmproj-model-f16.gguf --image some-image.jpg -c 4096 ``` diff --git a/examples/llava/llava-surgery-v2.py b/examples/llava/llava-surgery-v2.py index 5bc5bc513..eb56d6988 100644 --- a/examples/llava/llava-surgery-v2.py +++ b/examples/llava/llava-surgery-v2.py @@ -65,9 +65,7 @@ def clean_vision_tower_from_checkpoint(checkpoint_path): for name in clip_tensors: del checkpoint[name] - # Save the updated checkpoint checkpoint_path = checkpoint_path - save_model(checkpoint, checkpoint_path, file_type) return True return False @@ -152,16 +150,6 @@ for name in first_mm_tensors: if len(projector) > 0: save_model(projector, f"{args.model}/llava.projector", 'pytorch') -for name in mm_tensors: - del last_checkpoint[name] -for name in first_mm_tensors: - del first_checkpoint[name] - -if len(mm_tensors) > 0: - save_model(last_checkpoint, projector_checkpoint_path, file_type) -if len(first_mm_tensors) > 0: - save_model(first_checkpoint, newline_checkpoint_path, file_type) - print("Done!") print(f"Now you can convert {args.model} to a a regular LLaMA GGUF file.") print(f"Also, use {args.model}/llava.projector to prepare a llava-encoder.gguf file.") From c14f72db9c62d71d35eb1c141745c0bd0cb27b49 Mon Sep 17 00:00:00 2001 From: Georgi Gerganov Date: Wed, 21 Feb 2024 15:39:54 +0200 Subject: [PATCH 670/811] readme : update hot topics --- README.md | 9 ++------- 1 file changed, 2 insertions(+), 7 deletions(-) diff --git a/README.md b/README.md index 225db8e49..ce5dec7ca 100644 --- a/README.md +++ b/README.md @@ -10,13 +10,8 @@ Inference of Meta's [LLaMA](https://arxiv.org/abs/2302.13971) model (and others) ### Hot topics -- Remove LLAMA_MAX_DEVICES and LLAMA_SUPPORTS_GPU_OFFLOAD: https://github.com/ggerganov/llama.cpp/pull/5240 -- Incoming backends: https://github.com/ggerganov/llama.cpp/discussions/5138 - - [SYCL backend](README-sycl.md) is ready (1/28/2024), support Linux/Windows in Intel GPUs (iGPU, Arc/Flex/Max series) -- New SOTA quantized models, including pure 2-bits: https://huggingface.co/ikawrakow -- Collecting Apple Silicon performance stats: - - M-series: https://github.com/ggerganov/llama.cpp/discussions/4167 - - A-series: https://github.com/ggerganov/llama.cpp/discussions/4508 +- Support for Gemma models: https://github.com/ggerganov/llama.cpp/pull/5631 +- Non-linear quantization IQ4_NL: https://github.com/ggerganov/llama.cpp/pull/5590 - Looking for contributions to improve and maintain the `server` example: https://github.com/ggerganov/llama.cpp/issues/4216 ---- From eccd7a26ddbff19e4b8805648f5f14c501957859 Mon Sep 17 00:00:00 2001 From: Georgi Gerganov Date: Wed, 21 Feb 2024 16:17:10 +0200 Subject: [PATCH 671/811] sync : ggml (#5633) * ggml : fix conv_2d batch mode (ggml/737) Co-authored-by: bssrdf * ggml : compute forward no longer pass src tensors (ggml/729) * sync : ggml ggml-ci --------- Co-authored-by: bssrdf Co-authored-by: bssrdf --- ggml.c | 1150 +++++++++++++++++++++++++--------------- scripts/sync-ggml.last | 2 +- 2 files changed, 711 insertions(+), 441 deletions(-) diff --git a/ggml.c b/ggml.c index 91adbb0ae..5b9fa741a 100644 --- a/ggml.c +++ b/ggml.c @@ -5644,7 +5644,9 @@ struct ggml_tensor * ggml_conv_2d( ggml_reshape_2d(ctx, im2col, im2col->ne[0], im2col->ne[3] * im2col->ne[2] * im2col->ne[1]), // [N, OH, OW, IC * KH * KW] => [N*OH*OW, IC * KH * KW] ggml_reshape_2d(ctx, a, (a->ne[0] * a->ne[1] * a->ne[2]), a->ne[3])); // [OC,IC, KH, KW] => [OC, IC * KH * KW] - result = ggml_reshape_4d(ctx, result, im2col->ne[1], im2col->ne[2], a->ne[3], im2col->ne[3]); // [N, OC, OH, OW] + result = ggml_reshape_4d(ctx, result, im2col->ne[1], im2col->ne[2], im2col->ne[3], a->ne[3]); // [OC, N, OH, OW] + result = ggml_cont(ctx, ggml_permute(ctx, result, 0, 1, 3, 2)); // [N, OC, OH, OW] + return result; } @@ -6650,8 +6652,10 @@ void ggml_set_param( static void ggml_compute_forward_dup_same_cont( const struct ggml_compute_params * params, - const struct ggml_tensor * src0, struct ggml_tensor * dst) { + + const struct ggml_tensor * src0 = dst->src[0]; + GGML_ASSERT(ggml_nelements(dst) == ggml_nelements(src0)); GGML_ASSERT(ggml_is_contiguous(dst) && ggml_is_contiguous(src0)); GGML_ASSERT(src0->type == dst->type); @@ -6682,8 +6686,10 @@ static void ggml_compute_forward_dup_same_cont( } static void ggml_compute_forward_dup_f16( const struct ggml_compute_params * params, - const struct ggml_tensor * src0, struct ggml_tensor * dst) { + + const struct ggml_tensor * src0 = dst->src[0]; + GGML_ASSERT(ggml_nelements(dst) == ggml_nelements(src0)); if (params->type == GGML_TASK_INIT || params->type == GGML_TASK_FINALIZE) { @@ -6696,7 +6702,7 @@ static void ggml_compute_forward_dup_f16( const int nth = params->nth; // number of threads if (ggml_is_contiguous(src0) && ggml_is_contiguous(dst) && src0->type == dst->type) { - ggml_compute_forward_dup_same_cont(params, src0, dst); + ggml_compute_forward_dup_same_cont(params, dst); return; } @@ -6953,8 +6959,10 @@ static void ggml_compute_forward_dup_f16( static void ggml_compute_forward_dup_f32( const struct ggml_compute_params * params, - const struct ggml_tensor * src0, struct ggml_tensor * dst) { + + const struct ggml_tensor * src0 = dst->src[0]; + GGML_ASSERT(ggml_nelements(dst) == ggml_nelements(src0)); if (params->type == GGML_TASK_INIT || params->type == GGML_TASK_FINALIZE) { @@ -6967,7 +6975,7 @@ static void ggml_compute_forward_dup_f32( const int nth = params->nth; // number of threads if (ggml_is_contiguous(src0) && ggml_is_contiguous(dst) && src0->type == dst->type) { - ggml_compute_forward_dup_same_cont(params, src0, dst); + ggml_compute_forward_dup_same_cont(params, dst); return; } @@ -7203,8 +7211,10 @@ static void ggml_compute_forward_dup_f32( // A simplified version of ggml_compute_forward_dup that doesn't do float upcasting, and just plain old memcpy. static void ggml_compute_forward_dup_bytes( const struct ggml_compute_params * params, - const struct ggml_tensor * src0, struct ggml_tensor * dst) { + + const struct ggml_tensor * src0 = dst->src[0]; + GGML_ASSERT(ggml_nelements(dst) == ggml_nelements(src0)); GGML_ASSERT(src0->type == dst->type); @@ -7213,7 +7223,7 @@ static void ggml_compute_forward_dup_bytes( } if (ggml_is_contiguous(src0) && ggml_is_contiguous(dst)) { - ggml_compute_forward_dup_same_cont(params, src0, dst); + ggml_compute_forward_dup_same_cont(params, dst); return; } @@ -7352,21 +7362,23 @@ static void ggml_compute_forward_dup_bytes( static void ggml_compute_forward_dup( const struct ggml_compute_params * params, - const struct ggml_tensor * src0, struct ggml_tensor * dst) { + + const struct ggml_tensor * src0 = dst->src[0]; + if (src0->type == dst->type) { - ggml_compute_forward_dup_bytes(params, src0, dst); + ggml_compute_forward_dup_bytes(params, dst); return; } switch (src0->type) { case GGML_TYPE_F16: { - ggml_compute_forward_dup_f16(params, src0, dst); + ggml_compute_forward_dup_f16(params, dst); } break; case GGML_TYPE_F32: { - ggml_compute_forward_dup_f32(params, src0, dst); + ggml_compute_forward_dup_f32(params, dst); } break; default: { @@ -7379,9 +7391,11 @@ static void ggml_compute_forward_dup( static void ggml_compute_forward_add_f32( const struct ggml_compute_params * params, - const struct ggml_tensor * src0, - const struct ggml_tensor * src1, struct ggml_tensor * dst) { + + const struct ggml_tensor * src0 = dst->src[0]; + const struct ggml_tensor * src1 = dst->src[1]; + GGML_ASSERT(ggml_can_repeat(src1, src0) && ggml_are_same_shape(src0, dst)); if (params->type == GGML_TASK_INIT || params->type == GGML_TASK_FINALIZE) { @@ -7467,9 +7481,11 @@ static void ggml_compute_forward_add_f32( static void ggml_compute_forward_add_f16_f32( const struct ggml_compute_params * params, - const struct ggml_tensor * src0, - const struct ggml_tensor * src1, struct ggml_tensor * dst) { + + const struct ggml_tensor * src0 = dst->src[0]; + const struct ggml_tensor * src1 = dst->src[1]; + GGML_ASSERT(ggml_are_same_shape(src0, src1) && ggml_are_same_shape(src0, dst)); if (params->type == GGML_TASK_INIT || params->type == GGML_TASK_FINALIZE) { @@ -7544,9 +7560,11 @@ static void ggml_compute_forward_add_f16_f32( static void ggml_compute_forward_add_f16_f16( const struct ggml_compute_params * params, - const struct ggml_tensor * src0, - const struct ggml_tensor * src1, struct ggml_tensor * dst) { + + const struct ggml_tensor * src0 = dst->src[0]; + const struct ggml_tensor * src1 = dst->src[1]; + GGML_ASSERT(ggml_are_same_shape(src0, src1) && ggml_are_same_shape(src0, dst)); if (params->type == GGML_TASK_INIT || params->type == GGML_TASK_FINALIZE) { @@ -7598,9 +7616,11 @@ static void ggml_compute_forward_add_f16_f16( static void ggml_compute_forward_add_q_f32( const struct ggml_compute_params * params, - const struct ggml_tensor * src0, - const struct ggml_tensor * src1, struct ggml_tensor * dst) { + + const struct ggml_tensor * src0 = dst->src[0]; + const struct ggml_tensor * src1 = dst->src[1]; + GGML_ASSERT(ggml_are_same_shape(src0, src1) && ggml_are_same_shape(src0, dst)); if (params->type == GGML_TASK_INIT || params->type == GGML_TASK_FINALIZE) { @@ -7676,14 +7696,16 @@ static void ggml_compute_forward_add_q_f32( static void ggml_compute_forward_add( const struct ggml_compute_params * params, - const struct ggml_tensor * src0, - const struct ggml_tensor * src1, struct ggml_tensor * dst) { + + const struct ggml_tensor * src0 = dst->src[0]; + const struct ggml_tensor * src1 = dst->src[1]; + switch (src0->type) { case GGML_TYPE_F32: { if (src1->type == GGML_TYPE_F32) { - ggml_compute_forward_add_f32(params, src0, src1, dst); + ggml_compute_forward_add_f32(params, dst); } else { GGML_ASSERT(false); @@ -7692,10 +7714,10 @@ static void ggml_compute_forward_add( case GGML_TYPE_F16: { if (src1->type == GGML_TYPE_F16) { - ggml_compute_forward_add_f16_f16(params, src0, src1, dst); + ggml_compute_forward_add_f16_f16(params, dst); } else if (src1->type == GGML_TYPE_F32) { - ggml_compute_forward_add_f16_f32(params, src0, src1, dst); + ggml_compute_forward_add_f16_f32(params, dst); } else { GGML_ASSERT(false); @@ -7717,7 +7739,7 @@ static void ggml_compute_forward_add( case GGML_TYPE_IQ1_S: case GGML_TYPE_IQ4_NL: { - ggml_compute_forward_add_q_f32(params, src0, src1, dst); + ggml_compute_forward_add_q_f32(params, dst); } break; default: { @@ -7730,9 +7752,11 @@ static void ggml_compute_forward_add( static void ggml_compute_forward_add1_f32( const struct ggml_compute_params * params, - const struct ggml_tensor * src0, - const struct ggml_tensor * src1, struct ggml_tensor * dst) { + + const struct ggml_tensor * src0 = dst->src[0]; + const struct ggml_tensor * src1 = dst->src[1]; + GGML_ASSERT(ggml_are_same_shape(src0, dst)); GGML_ASSERT(ggml_is_scalar(src1)); @@ -7782,9 +7806,11 @@ static void ggml_compute_forward_add1_f32( static void ggml_compute_forward_add1_f16_f32( const struct ggml_compute_params * params, - const struct ggml_tensor * src0, - const struct ggml_tensor * src1, struct ggml_tensor * dst) { + + const struct ggml_tensor * src0 = dst->src[0]; + const struct ggml_tensor * src1 = dst->src[1]; + GGML_ASSERT(ggml_are_same_shape(src0, dst)); GGML_ASSERT(ggml_is_scalar(src1)); @@ -7832,9 +7858,11 @@ static void ggml_compute_forward_add1_f16_f32( static void ggml_compute_forward_add1_f16_f16( const struct ggml_compute_params * params, - const struct ggml_tensor * src0, - const struct ggml_tensor * src1, struct ggml_tensor * dst) { + + const struct ggml_tensor * src0 = dst->src[0]; + const struct ggml_tensor * src1 = dst->src[1]; + GGML_ASSERT(ggml_are_same_shape(src0, dst)); GGML_ASSERT(ggml_is_scalar(src1)); @@ -7882,9 +7910,11 @@ static void ggml_compute_forward_add1_f16_f16( static void ggml_compute_forward_add1_q_f32( const struct ggml_compute_params * params, - const struct ggml_tensor * src0, - const struct ggml_tensor * src1, struct ggml_tensor * dst) { + + const struct ggml_tensor * src0 = dst->src[0]; + const struct ggml_tensor * src1 = dst->src[1]; + GGML_ASSERT(ggml_are_same_shape(src0, dst)); GGML_ASSERT(ggml_is_scalar(src1)); @@ -7949,21 +7979,23 @@ static void ggml_compute_forward_add1_q_f32( static void ggml_compute_forward_add1( const struct ggml_compute_params * params, - const struct ggml_tensor * src0, - const struct ggml_tensor * src1, struct ggml_tensor * dst) { + + const struct ggml_tensor * src0 = dst->src[0]; + const struct ggml_tensor * src1 = dst->src[1]; + switch (src0->type) { case GGML_TYPE_F32: { - ggml_compute_forward_add1_f32(params, src0, src1, dst); + ggml_compute_forward_add1_f32(params, dst); } break; case GGML_TYPE_F16: { if (src1->type == GGML_TYPE_F16) { - ggml_compute_forward_add1_f16_f16(params, src0, src1, dst); + ggml_compute_forward_add1_f16_f16(params, dst); } else if (src1->type == GGML_TYPE_F32) { - ggml_compute_forward_add1_f16_f32(params, src0, src1, dst); + ggml_compute_forward_add1_f16_f32(params, dst); } else { GGML_ASSERT(false); @@ -7986,7 +8018,7 @@ static void ggml_compute_forward_add1( case GGML_TYPE_IQ1_S: case GGML_TYPE_IQ4_NL: { - ggml_compute_forward_add1_q_f32(params, src0, src1, dst); + ggml_compute_forward_add1_q_f32(params, dst); } break; default: { @@ -7999,9 +8031,11 @@ static void ggml_compute_forward_add1( static void ggml_compute_forward_acc_f32( const struct ggml_compute_params * params, - const struct ggml_tensor * src0, - const struct ggml_tensor * src1, struct ggml_tensor * dst) { + + const struct ggml_tensor * src0 = dst->src[0]; + const struct ggml_tensor * src1 = dst->src[1]; + GGML_ASSERT(ggml_are_same_shape(src0, dst)); GGML_ASSERT(ggml_is_contiguous(dst) && ggml_is_contiguous(src0)); @@ -8081,14 +8115,14 @@ static void ggml_compute_forward_acc_f32( static void ggml_compute_forward_acc( const struct ggml_compute_params * params, - const struct ggml_tensor * src0, - const struct ggml_tensor * src1, struct ggml_tensor * dst) { + const struct ggml_tensor * src0 = dst->src[0]; + switch (src0->type) { case GGML_TYPE_F32: { - ggml_compute_forward_acc_f32(params, src0, src1, dst); + ggml_compute_forward_acc_f32(params, dst); } break; case GGML_TYPE_F16: case GGML_TYPE_Q4_0: @@ -8118,9 +8152,11 @@ static void ggml_compute_forward_acc( static void ggml_compute_forward_sub_f32( const struct ggml_compute_params * params, - const struct ggml_tensor * src0, - const struct ggml_tensor * src1, struct ggml_tensor * dst) { + + const struct ggml_tensor * src0 = dst->src[0]; + const struct ggml_tensor * src1 = dst->src[1]; + assert(params->ith == 0); assert(ggml_are_same_shape(src0, src1) && ggml_are_same_shape(src0, dst)); @@ -8178,13 +8214,14 @@ static void ggml_compute_forward_sub_f32( static void ggml_compute_forward_sub( const struct ggml_compute_params * params, - const struct ggml_tensor * src0, - const struct ggml_tensor * src1, struct ggml_tensor * dst) { + + const struct ggml_tensor * src0 = dst->src[0]; + switch (src0->type) { case GGML_TYPE_F32: { - ggml_compute_forward_sub_f32(params, src0, src1, dst); + ggml_compute_forward_sub_f32(params, dst); } break; default: { @@ -8197,9 +8234,11 @@ static void ggml_compute_forward_sub( static void ggml_compute_forward_mul_f32( const struct ggml_compute_params * params, - const struct ggml_tensor * src0, - const struct ggml_tensor * src1, struct ggml_tensor * dst) { + + const struct ggml_tensor * src0 = dst->src[0]; + const struct ggml_tensor * src1 = dst->src[1]; + GGML_ASSERT(ggml_can_repeat(src1, src0) && ggml_are_same_shape(src0, dst)); if (params->type == GGML_TASK_INIT || params->type == GGML_TASK_FINALIZE) { @@ -8280,15 +8319,17 @@ static void ggml_compute_forward_mul_f32( static void ggml_compute_forward_mul( const struct ggml_compute_params * params, - const struct ggml_tensor * src0, - const struct ggml_tensor * src1, struct ggml_tensor * dst) { + + const struct ggml_tensor * src0 = dst->src[0]; + const struct ggml_tensor * src1 = dst->src[1]; + GGML_ASSERT(src1->type == GGML_TYPE_F32 && "only f32 src1 supported for now"); switch (src0->type) { case GGML_TYPE_F32: { - ggml_compute_forward_mul_f32(params, src0, src1, dst); + ggml_compute_forward_mul_f32(params, dst); } break; default: { @@ -8301,9 +8342,11 @@ static void ggml_compute_forward_mul( static void ggml_compute_forward_div_f32( const struct ggml_compute_params * params, - const struct ggml_tensor * src0, - const struct ggml_tensor * src1, struct ggml_tensor * dst) { + + const struct ggml_tensor * src0 = dst->src[0]; + const struct ggml_tensor * src1 = dst->src[1]; + GGML_ASSERT(ggml_can_repeat(src1, src0) && ggml_are_same_shape(src0, dst)); if (params->type == GGML_TASK_INIT || params->type == GGML_TASK_FINALIZE) { @@ -8374,13 +8417,14 @@ static void ggml_compute_forward_div_f32( static void ggml_compute_forward_div( const struct ggml_compute_params * params, - const struct ggml_tensor * src0, - const struct ggml_tensor * src1, struct ggml_tensor * dst) { + + const struct ggml_tensor * src0 = dst->src[0]; + switch (src0->type) { case GGML_TYPE_F32: { - ggml_compute_forward_div_f32(params, src0, src1, dst); + ggml_compute_forward_div_f32(params, dst); } break; default: { @@ -8393,8 +8437,10 @@ static void ggml_compute_forward_div( static void ggml_compute_forward_sqr_f32( const struct ggml_compute_params * params, - const struct ggml_tensor * src0, struct ggml_tensor * dst) { + + const struct ggml_tensor * src0 = dst->src[0]; + assert(params->ith == 0); assert(ggml_are_same_shape(src0, dst)); @@ -8417,12 +8463,14 @@ static void ggml_compute_forward_sqr_f32( static void ggml_compute_forward_sqr( const struct ggml_compute_params * params, - const struct ggml_tensor * src0, struct ggml_tensor * dst) { + + const struct ggml_tensor * src0 = dst->src[0]; + switch (src0->type) { case GGML_TYPE_F32: { - ggml_compute_forward_sqr_f32(params, src0, dst); + ggml_compute_forward_sqr_f32(params, dst); } break; default: { @@ -8435,8 +8483,10 @@ static void ggml_compute_forward_sqr( static void ggml_compute_forward_sqrt_f32( const struct ggml_compute_params * params, - const struct ggml_tensor * src0, struct ggml_tensor * dst) { + + const struct ggml_tensor * src0 = dst->src[0]; + assert(params->ith == 0); assert(ggml_are_same_shape(src0, dst)); @@ -8459,12 +8509,14 @@ static void ggml_compute_forward_sqrt_f32( static void ggml_compute_forward_sqrt( const struct ggml_compute_params * params, - const struct ggml_tensor * src0, struct ggml_tensor * dst) { + + const struct ggml_tensor * src0 = dst->src[0]; + switch (src0->type) { case GGML_TYPE_F32: { - ggml_compute_forward_sqrt_f32(params, src0, dst); + ggml_compute_forward_sqrt_f32(params, dst); } break; default: { @@ -8477,8 +8529,10 @@ static void ggml_compute_forward_sqrt( static void ggml_compute_forward_log_f32( const struct ggml_compute_params * params, - const struct ggml_tensor * src0, struct ggml_tensor * dst) { + + const struct ggml_tensor * src0 = dst->src[0]; + GGML_ASSERT(params->ith == 0); GGML_ASSERT(ggml_are_same_shape(src0, dst)); @@ -8501,12 +8555,14 @@ static void ggml_compute_forward_log_f32( static void ggml_compute_forward_log( const struct ggml_compute_params * params, - const struct ggml_tensor * src0, struct ggml_tensor * dst) { + + const struct ggml_tensor * src0 = dst->src[0]; + switch (src0->type) { case GGML_TYPE_F32: { - ggml_compute_forward_log_f32(params, src0, dst); + ggml_compute_forward_log_f32(params, dst); } break; default: { @@ -8519,8 +8575,10 @@ static void ggml_compute_forward_log( static void ggml_compute_forward_sum_f32( const struct ggml_compute_params * params, - const struct ggml_tensor * src0, struct ggml_tensor * dst) { + + const struct ggml_tensor * src0 = dst->src[0]; + assert(params->ith == 0); assert(ggml_is_scalar(dst)); @@ -8552,8 +8610,10 @@ static void ggml_compute_forward_sum_f32( static void ggml_compute_forward_sum_f16( const struct ggml_compute_params * params, - const struct ggml_tensor * src0, struct ggml_tensor * dst) { + + const struct ggml_tensor * src0 = dst->src[0]; + assert(params->ith == 0); assert(ggml_is_scalar(dst)); @@ -8584,16 +8644,18 @@ static void ggml_compute_forward_sum_f16( static void ggml_compute_forward_sum( const struct ggml_compute_params * params, - const struct ggml_tensor * src0, struct ggml_tensor * dst) { + + const struct ggml_tensor * src0 = dst->src[0]; + switch (src0->type) { case GGML_TYPE_F32: { - ggml_compute_forward_sum_f32(params, src0, dst); + ggml_compute_forward_sum_f32(params, dst); } break; case GGML_TYPE_F16: { - ggml_compute_forward_sum_f16(params, src0, dst); + ggml_compute_forward_sum_f16(params, dst); } break; default: { @@ -8606,8 +8668,10 @@ static void ggml_compute_forward_sum( static void ggml_compute_forward_sum_rows_f32( const struct ggml_compute_params * params, - const struct ggml_tensor * src0, struct ggml_tensor * dst) { + + const struct ggml_tensor * src0 = dst->src[0]; + GGML_ASSERT(params->ith == 0); if (params->type == GGML_TASK_INIT || params->type == GGML_TASK_FINALIZE) { @@ -8639,12 +8703,14 @@ static void ggml_compute_forward_sum_rows_f32( static void ggml_compute_forward_sum_rows( const struct ggml_compute_params * params, - const struct ggml_tensor * src0, struct ggml_tensor * dst) { + + const struct ggml_tensor * src0 = dst->src[0]; + switch (src0->type) { case GGML_TYPE_F32: { - ggml_compute_forward_sum_rows_f32(params, src0, dst); + ggml_compute_forward_sum_rows_f32(params, dst); } break; default: { @@ -8657,8 +8723,10 @@ static void ggml_compute_forward_sum_rows( static void ggml_compute_forward_mean_f32( const struct ggml_compute_params * params, - const struct ggml_tensor * src0, struct ggml_tensor * dst) { + + const struct ggml_tensor * src0 = dst->src[0]; + assert(params->ith == 0); if (params->type == GGML_TASK_INIT || params->type == GGML_TASK_FINALIZE) { @@ -8694,12 +8762,14 @@ static void ggml_compute_forward_mean_f32( static void ggml_compute_forward_mean( const struct ggml_compute_params * params, - const struct ggml_tensor * src0, struct ggml_tensor * dst) { + + const struct ggml_tensor * src0 = dst->src[0]; + switch (src0->type) { case GGML_TYPE_F32: { - ggml_compute_forward_mean_f32(params, src0, dst); + ggml_compute_forward_mean_f32(params, dst); } break; default: { @@ -8712,8 +8782,10 @@ static void ggml_compute_forward_mean( static void ggml_compute_forward_argmax_f32( const struct ggml_compute_params * params, - const struct ggml_tensor * src0, struct ggml_tensor * dst) { + + const struct ggml_tensor * src0 = dst->src[0]; + assert(params->ith == 0); if (params->type == GGML_TASK_INIT || params->type == GGML_TASK_FINALIZE) { @@ -8740,12 +8812,14 @@ static void ggml_compute_forward_argmax_f32( static void ggml_compute_forward_argmax( const struct ggml_compute_params * params, - const struct ggml_tensor * src0, struct ggml_tensor * dst) { + + const struct ggml_tensor * src0 = dst->src[0]; + switch (src0->type) { case GGML_TYPE_F32: { - ggml_compute_forward_argmax_f32(params, src0, dst); + ggml_compute_forward_argmax_f32(params, dst); } break; default: { @@ -8758,8 +8832,10 @@ static void ggml_compute_forward_argmax( static void ggml_compute_forward_repeat_f32( const struct ggml_compute_params * params, - const struct ggml_tensor * src0, struct ggml_tensor * dst) { + + const struct ggml_tensor * src0 = dst->src[0]; + GGML_ASSERT(params->ith == 0); GGML_ASSERT(ggml_can_repeat(src0, dst)); @@ -8801,8 +8877,10 @@ static void ggml_compute_forward_repeat_f32( static void ggml_compute_forward_repeat_f16( const struct ggml_compute_params * params, - const struct ggml_tensor * src0, struct ggml_tensor * dst) { + + const struct ggml_tensor * src0 = dst->src[0]; + GGML_ASSERT(params->ith == 0); GGML_ASSERT(ggml_can_repeat(src0, dst)); @@ -8847,18 +8925,20 @@ static void ggml_compute_forward_repeat_f16( static void ggml_compute_forward_repeat( const struct ggml_compute_params * params, - const struct ggml_tensor * src0, struct ggml_tensor * dst) { + + const struct ggml_tensor * src0 = dst->src[0]; + switch (src0->type) { case GGML_TYPE_F16: case GGML_TYPE_I16: { - ggml_compute_forward_repeat_f16(params, src0, dst); + ggml_compute_forward_repeat_f16(params, dst); } break; case GGML_TYPE_F32: case GGML_TYPE_I32: { - ggml_compute_forward_repeat_f32(params, src0, dst); + ggml_compute_forward_repeat_f32(params, dst); } break; default: { @@ -8871,8 +8951,10 @@ static void ggml_compute_forward_repeat( static void ggml_compute_forward_repeat_back_f32( const struct ggml_compute_params * params, - const struct ggml_tensor * src0, struct ggml_tensor * dst) { + + const struct ggml_tensor * src0 = dst->src[0]; + GGML_ASSERT(params->ith == 0); GGML_ASSERT(ggml_can_repeat(dst, src0)); @@ -8928,12 +9010,14 @@ static void ggml_compute_forward_repeat_back_f32( static void ggml_compute_forward_repeat_back( const struct ggml_compute_params * params, - const struct ggml_tensor * src0, struct ggml_tensor * dst) { + + const struct ggml_tensor * src0 = dst->src[0]; + switch (src0->type) { case GGML_TYPE_F32: { - ggml_compute_forward_repeat_back_f32(params, src0, dst); + ggml_compute_forward_repeat_back_f32(params, dst); } break; default: { @@ -8946,10 +9030,11 @@ static void ggml_compute_forward_repeat_back( static void ggml_compute_forward_concat_f32( const struct ggml_compute_params * params, - const struct ggml_tensor * src0, - const struct ggml_tensor * src1, struct ggml_tensor * dst) { + const struct ggml_tensor * src0 = dst->src[0]; + const struct ggml_tensor * src1 = dst->src[1]; + if (params->type == GGML_TASK_INIT || params->type == GGML_TASK_FINALIZE) { return; } @@ -8994,14 +9079,15 @@ static void ggml_compute_forward_concat_f32( static void ggml_compute_forward_concat( const struct ggml_compute_params* params, - const struct ggml_tensor* src0, - const struct ggml_tensor* src1, struct ggml_tensor* dst) { + + const struct ggml_tensor * src0 = dst->src[0]; + switch (src0->type) { case GGML_TYPE_F32: case GGML_TYPE_I32: { - ggml_compute_forward_concat_f32(params, src0, src1, dst); + ggml_compute_forward_concat_f32(params, dst); } break; default: { @@ -9014,8 +9100,10 @@ static void ggml_compute_forward_concat( static void ggml_compute_forward_abs_f32( const struct ggml_compute_params * params, - const struct ggml_tensor * src0, struct ggml_tensor * dst) { + + const struct ggml_tensor * src0 = dst->src[0]; + assert(params->ith == 0); assert(ggml_are_same_shape(src0, dst)); @@ -9038,12 +9126,14 @@ static void ggml_compute_forward_abs_f32( static void ggml_compute_forward_abs( const struct ggml_compute_params * params, - const struct ggml_tensor * src0, struct ggml_tensor * dst) { + + const struct ggml_tensor * src0 = dst->src[0]; + switch (src0->type) { case GGML_TYPE_F32: { - ggml_compute_forward_abs_f32(params, src0, dst); + ggml_compute_forward_abs_f32(params, dst); } break; default: { @@ -9056,8 +9146,10 @@ static void ggml_compute_forward_abs( static void ggml_compute_forward_sgn_f32( const struct ggml_compute_params * params, - const struct ggml_tensor * src0, struct ggml_tensor * dst) { + + const struct ggml_tensor * src0 = dst->src[0]; + assert(params->ith == 0); assert(ggml_are_same_shape(src0, dst)); @@ -9080,12 +9172,14 @@ static void ggml_compute_forward_sgn_f32( static void ggml_compute_forward_sgn( const struct ggml_compute_params * params, - const struct ggml_tensor * src0, struct ggml_tensor * dst) { + + const struct ggml_tensor * src0 = dst->src[0]; + switch (src0->type) { case GGML_TYPE_F32: { - ggml_compute_forward_sgn_f32(params, src0, dst); + ggml_compute_forward_sgn_f32(params, dst); } break; default: { @@ -9098,8 +9192,10 @@ static void ggml_compute_forward_sgn( static void ggml_compute_forward_neg_f32( const struct ggml_compute_params * params, - const struct ggml_tensor * src0, struct ggml_tensor * dst) { + + const struct ggml_tensor * src0 = dst->src[0]; + assert(params->ith == 0); assert(ggml_are_same_shape(src0, dst)); @@ -9122,12 +9218,14 @@ static void ggml_compute_forward_neg_f32( static void ggml_compute_forward_neg( const struct ggml_compute_params * params, - const struct ggml_tensor * src0, struct ggml_tensor * dst) { + + const struct ggml_tensor * src0 = dst->src[0]; + switch (src0->type) { case GGML_TYPE_F32: { - ggml_compute_forward_neg_f32(params, src0, dst); + ggml_compute_forward_neg_f32(params, dst); } break; default: { @@ -9140,8 +9238,10 @@ static void ggml_compute_forward_neg( static void ggml_compute_forward_step_f32( const struct ggml_compute_params * params, - const struct ggml_tensor * src0, struct ggml_tensor * dst) { + + const struct ggml_tensor * src0 = dst->src[0]; + assert(params->ith == 0); assert(ggml_are_same_shape(src0, dst)); @@ -9164,12 +9264,14 @@ static void ggml_compute_forward_step_f32( static void ggml_compute_forward_step( const struct ggml_compute_params * params, - const struct ggml_tensor * src0, struct ggml_tensor * dst) { + + const struct ggml_tensor * src0 = dst->src[0]; + switch (src0->type) { case GGML_TYPE_F32: { - ggml_compute_forward_step_f32(params, src0, dst); + ggml_compute_forward_step_f32(params, dst); } break; default: { @@ -9182,8 +9284,10 @@ static void ggml_compute_forward_step( static void ggml_compute_forward_tanh_f32( const struct ggml_compute_params * params, - const struct ggml_tensor * src0, struct ggml_tensor * dst) { + + const struct ggml_tensor * src0 = dst->src[0]; + assert(params->ith == 0); assert(ggml_are_same_shape(src0, dst)); @@ -9206,12 +9310,14 @@ static void ggml_compute_forward_tanh_f32( static void ggml_compute_forward_tanh( const struct ggml_compute_params * params, - const struct ggml_tensor * src0, struct ggml_tensor * dst) { + + const struct ggml_tensor * src0 = dst->src[0]; + switch (src0->type) { case GGML_TYPE_F32: { - ggml_compute_forward_tanh_f32(params, src0, dst); + ggml_compute_forward_tanh_f32(params, dst); } break; default: { @@ -9224,8 +9330,10 @@ static void ggml_compute_forward_tanh( static void ggml_compute_forward_elu_f32( const struct ggml_compute_params * params, - const struct ggml_tensor * src0, struct ggml_tensor * dst) { + + const struct ggml_tensor * src0 = dst->src[0]; + assert(params->ith == 0); assert(ggml_are_same_shape(src0, dst)); @@ -9248,12 +9356,14 @@ static void ggml_compute_forward_elu_f32( static void ggml_compute_forward_elu( const struct ggml_compute_params * params, - const struct ggml_tensor * src0, struct ggml_tensor * dst) { + + const struct ggml_tensor * src0 = dst->src[0]; + switch (src0->type) { case GGML_TYPE_F32: { - ggml_compute_forward_elu_f32(params, src0, dst); + ggml_compute_forward_elu_f32(params, dst); } break; default: { @@ -9266,8 +9376,10 @@ static void ggml_compute_forward_elu( static void ggml_compute_forward_relu_f32( const struct ggml_compute_params * params, - const struct ggml_tensor * src0, struct ggml_tensor * dst) { + + const struct ggml_tensor * src0 = dst->src[0]; + assert(params->ith == 0); assert(ggml_are_same_shape(src0, dst)); @@ -9290,12 +9402,14 @@ static void ggml_compute_forward_relu_f32( static void ggml_compute_forward_relu( const struct ggml_compute_params * params, - const struct ggml_tensor * src0, struct ggml_tensor * dst) { + + const struct ggml_tensor * src0 = dst->src[0]; + switch (src0->type) { case GGML_TYPE_F32: { - ggml_compute_forward_relu_f32(params, src0, dst); + ggml_compute_forward_relu_f32(params, dst); } break; default: { @@ -9308,8 +9422,10 @@ static void ggml_compute_forward_relu( static void ggml_compute_forward_gelu_f32( const struct ggml_compute_params * params, - const struct ggml_tensor * src0, struct ggml_tensor * dst) { + + const struct ggml_tensor * src0 = dst->src[0]; + GGML_ASSERT(ggml_is_contiguous_except_dim_1(src0)); GGML_ASSERT(ggml_is_contiguous_except_dim_1(dst)); GGML_ASSERT(ggml_are_same_shape(src0, dst)); @@ -9349,12 +9465,14 @@ static void ggml_compute_forward_gelu_f32( static void ggml_compute_forward_gelu( const struct ggml_compute_params * params, - const struct ggml_tensor * src0, struct ggml_tensor * dst) { + + const struct ggml_tensor * src0 = dst->src[0]; + switch (src0->type) { case GGML_TYPE_F32: { - ggml_compute_forward_gelu_f32(params, src0, dst); + ggml_compute_forward_gelu_f32(params, dst); } break; default: { @@ -9367,8 +9485,10 @@ static void ggml_compute_forward_gelu( static void ggml_compute_forward_gelu_quick_f32( const struct ggml_compute_params * params, - const struct ggml_tensor * src0, struct ggml_tensor * dst) { + + const struct ggml_tensor * src0 = dst->src[0]; + GGML_ASSERT(ggml_is_contiguous_except_dim_1(src0)); GGML_ASSERT(ggml_is_contiguous_except_dim_1(dst)); GGML_ASSERT(ggml_are_same_shape(src0, dst)); @@ -9408,12 +9528,14 @@ static void ggml_compute_forward_gelu_quick_f32( static void ggml_compute_forward_gelu_quick( const struct ggml_compute_params * params, - const struct ggml_tensor * src0, struct ggml_tensor * dst) { + + const struct ggml_tensor * src0 = dst->src[0]; + switch (src0->type) { case GGML_TYPE_F32: { - ggml_compute_forward_gelu_quick_f32(params, src0, dst); + ggml_compute_forward_gelu_quick_f32(params, dst); } break; default: { @@ -9426,8 +9548,10 @@ static void ggml_compute_forward_gelu_quick( static void ggml_compute_forward_silu_f32( const struct ggml_compute_params * params, - const struct ggml_tensor * src0, struct ggml_tensor * dst) { + + const struct ggml_tensor * src0 = dst->src[0]; + GGML_ASSERT(ggml_is_contiguous_except_dim_1(src0)); GGML_ASSERT(ggml_is_contiguous_except_dim_1(dst)); GGML_ASSERT(ggml_are_same_shape(src0, dst)); @@ -9467,12 +9591,14 @@ static void ggml_compute_forward_silu_f32( static void ggml_compute_forward_silu( const struct ggml_compute_params * params, - const struct ggml_tensor * src0, struct ggml_tensor * dst) { + + const struct ggml_tensor * src0 = dst->src[0]; + switch (src0->type) { case GGML_TYPE_F32: { - ggml_compute_forward_silu_f32(params, src0, dst); + ggml_compute_forward_silu_f32(params, dst); } break; default: { @@ -9484,8 +9610,10 @@ static void ggml_compute_forward_silu( static void ggml_compute_forward_leaky_relu_f32( const struct ggml_compute_params * params, - const struct ggml_tensor * src0, struct ggml_tensor * dst) { + + const struct ggml_tensor * src0 = dst->src[0]; + assert(params->ith == 0); assert(ggml_are_same_shape(src0, dst)); @@ -9511,12 +9639,14 @@ static void ggml_compute_forward_leaky_relu_f32( static void ggml_compute_forward_leaky_relu( const struct ggml_compute_params * params, - const struct ggml_tensor * src0, struct ggml_tensor * dst) { + + const struct ggml_tensor * src0 = dst->src[0]; + switch (src0->type) { case GGML_TYPE_F32: { - ggml_compute_forward_leaky_relu_f32(params, src0, dst); + ggml_compute_forward_leaky_relu_f32(params, dst); } break; default: { @@ -9529,9 +9659,11 @@ static void ggml_compute_forward_leaky_relu( static void ggml_compute_forward_silu_back_f32( const struct ggml_compute_params * params, - const struct ggml_tensor * src0, - const struct ggml_tensor * grad, struct ggml_tensor * dst) { + + const struct ggml_tensor * src0 = dst->src[0]; + const struct ggml_tensor * grad = dst->src[1]; + GGML_ASSERT(ggml_is_contiguous_except_dim_1(grad)); GGML_ASSERT(ggml_is_contiguous_except_dim_1(src0)); GGML_ASSERT(ggml_is_contiguous_except_dim_1(dst)); @@ -9574,13 +9706,14 @@ static void ggml_compute_forward_silu_back_f32( static void ggml_compute_forward_silu_back( const struct ggml_compute_params * params, - const struct ggml_tensor * src0, - const struct ggml_tensor * grad, struct ggml_tensor * dst) { + + const struct ggml_tensor * src0 = dst->src[0]; + switch (src0->type) { case GGML_TYPE_F32: { - ggml_compute_forward_silu_back_f32(params, src0, grad, dst); + ggml_compute_forward_silu_back_f32(params, dst); } break; default: { @@ -9592,8 +9725,10 @@ static void ggml_compute_forward_silu_back( static void ggml_compute_forward_hardswish_f32( const struct ggml_compute_params * params, - const struct ggml_tensor * src0, struct ggml_tensor * dst) { + + const struct ggml_tensor * src0 = dst->src[0]; + assert(params->ith == 0); assert(ggml_are_same_shape(src0, dst)); @@ -9615,12 +9750,14 @@ static void ggml_compute_forward_hardswish_f32( } static void ggml_compute_forward_hardswish( const struct ggml_compute_params * params, - const struct ggml_tensor * src0, struct ggml_tensor * dst) { + + const struct ggml_tensor * src0 = dst->src[0]; + switch (src0->type) { case GGML_TYPE_F32: { - ggml_compute_forward_hardswish_f32(params, src0, dst); + ggml_compute_forward_hardswish_f32(params, dst); } break; default: { @@ -9631,8 +9768,10 @@ static void ggml_compute_forward_hardswish( static void ggml_compute_forward_hardsigmoid_f32( const struct ggml_compute_params * params, - const struct ggml_tensor * src0, struct ggml_tensor * dst) { + + const struct ggml_tensor * src0 = dst->src[0]; + assert(params->ith == 0); assert(ggml_are_same_shape(src0, dst)); @@ -9655,12 +9794,14 @@ static void ggml_compute_forward_hardsigmoid_f32( static void ggml_compute_forward_hardsigmoid( const struct ggml_compute_params * params, - const struct ggml_tensor * src0, struct ggml_tensor * dst) { + + const struct ggml_tensor * src0 = dst->src[0]; + switch (src0->type) { case GGML_TYPE_F32: { - ggml_compute_forward_hardsigmoid_f32(params, src0, dst); + ggml_compute_forward_hardsigmoid_f32(params, dst); } break; default: { @@ -9674,8 +9815,10 @@ static void ggml_compute_forward_hardsigmoid( static void ggml_compute_forward_norm_f32( const struct ggml_compute_params * params, - const struct ggml_tensor * src0, struct ggml_tensor * dst) { + + const struct ggml_tensor * src0 = dst->src[0]; + GGML_ASSERT(ggml_are_same_shape(src0, dst)); if (params->type == GGML_TASK_INIT || params->type == GGML_TASK_FINALIZE) { @@ -9727,12 +9870,14 @@ static void ggml_compute_forward_norm_f32( static void ggml_compute_forward_norm( const struct ggml_compute_params * params, - const struct ggml_tensor * src0, struct ggml_tensor * dst) { + + const struct ggml_tensor * src0 = dst->src[0]; + switch (src0->type) { case GGML_TYPE_F32: { - ggml_compute_forward_norm_f32(params, src0, dst); + ggml_compute_forward_norm_f32(params, dst); } break; default: { @@ -9745,8 +9890,10 @@ static void ggml_compute_forward_norm( static void ggml_compute_forward_rms_norm_f32( const struct ggml_compute_params * params, - const struct ggml_tensor * src0, struct ggml_tensor * dst) { + + const struct ggml_tensor * src0 = dst->src[0]; + GGML_ASSERT(ggml_are_same_shape(src0, dst)); if (params->type == GGML_TASK_INIT || params->type == GGML_TASK_FINALIZE) { @@ -9795,12 +9942,14 @@ static void ggml_compute_forward_rms_norm_f32( static void ggml_compute_forward_rms_norm( const struct ggml_compute_params * params, - const struct ggml_tensor * src0, struct ggml_tensor * dst) { + + const struct ggml_tensor * src0 = dst->src[0]; + switch (src0->type) { case GGML_TYPE_F32: { - ggml_compute_forward_rms_norm_f32(params, src0, dst); + ggml_compute_forward_rms_norm_f32(params, dst); } break; default: { @@ -9811,9 +9960,11 @@ static void ggml_compute_forward_rms_norm( static void ggml_compute_forward_rms_norm_back_f32( const struct ggml_compute_params * params, - const struct ggml_tensor * src0, - const struct ggml_tensor * src1, struct ggml_tensor * dst) { + + const struct ggml_tensor * src0 = dst->src[0]; + const struct ggml_tensor * src1 = dst->src[1]; + GGML_ASSERT(ggml_are_same_shape(src0, dst) && ggml_are_same_shape(src0, src1)); if (params->type == GGML_TASK_INIT || params->type == GGML_TASK_FINALIZE) { @@ -9968,13 +10119,14 @@ static void ggml_compute_forward_rms_norm_back_f32( static void ggml_compute_forward_rms_norm_back( const struct ggml_compute_params * params, - const struct ggml_tensor * src0, - const struct ggml_tensor * src1, struct ggml_tensor * dst) { + + const struct ggml_tensor * src0 = dst->src[0]; + switch (src0->type) { case GGML_TYPE_F32: { - ggml_compute_forward_rms_norm_back_f32(params, src0, src1, dst); + ggml_compute_forward_rms_norm_back_f32(params, dst); } break; default: { @@ -9987,8 +10139,10 @@ static void ggml_compute_forward_rms_norm_back( static void ggml_compute_forward_group_norm_f32( const struct ggml_compute_params * params, - const struct ggml_tensor * src0, struct ggml_tensor * dst) { + + const struct ggml_tensor * src0 = dst->src[0]; + GGML_ASSERT(ggml_are_same_shape(src0, dst)); if (params->type == GGML_TASK_INIT || params->type == GGML_TASK_FINALIZE) { @@ -10059,12 +10213,14 @@ static void ggml_compute_forward_group_norm_f32( static void ggml_compute_forward_group_norm( const struct ggml_compute_params * params, - const struct ggml_tensor * src0, struct ggml_tensor * dst) { + + const struct ggml_tensor * src0 = dst->src[0]; + switch (src0->type) { case GGML_TYPE_F32: { - ggml_compute_forward_group_norm_f32(params, src0, dst); + ggml_compute_forward_group_norm_f32(params, dst); } break; default: { @@ -10110,9 +10266,11 @@ static bool ggml_compute_forward_mul_mat_use_blas(struct ggml_tensor * dst) { static void ggml_compute_forward_mul_mat( const struct ggml_compute_params * params, - const struct ggml_tensor * src0, - const struct ggml_tensor * src1, struct ggml_tensor * dst) { + + const struct ggml_tensor * src0 = dst->src[0]; + const struct ggml_tensor * src1 = dst->src[1]; + int64_t t0 = ggml_perf_time_us(); UNUSED(t0); @@ -10357,10 +10515,11 @@ static void ggml_compute_forward_mul_mat( static void ggml_compute_forward_mul_mat_id( const struct ggml_compute_params * params, - const struct ggml_tensor * ids, - const struct ggml_tensor * src1, struct ggml_tensor * dst) { + const struct ggml_tensor * ids = dst->src[0]; + const struct ggml_tensor * src1 = dst->src[1]; + const struct ggml_tensor * src0 = dst->src[2]; // only for GGML_TENSOR_BINARY_OP_LOCALS GGML_TENSOR_BINARY_OP_LOCALS @@ -10551,9 +10710,11 @@ static void ggml_compute_forward_mul_mat_id( static void ggml_compute_forward_out_prod_f32( const struct ggml_compute_params * params, - const struct ggml_tensor * src0, - const struct ggml_tensor * src1, struct ggml_tensor * dst) { + + const struct ggml_tensor * src0 = dst->src[0]; + const struct ggml_tensor * src1 = dst->src[1]; + // int64_t t0 = ggml_perf_time_us(); // UNUSED(t0); @@ -10743,9 +10904,11 @@ static void ggml_compute_forward_out_prod_f32( static void ggml_compute_forward_out_prod_q_f32( const struct ggml_compute_params * params, - const struct ggml_tensor * src0, - const struct ggml_tensor * src1, struct ggml_tensor * dst) { + + const struct ggml_tensor * src0 = dst->src[0]; + const struct ggml_tensor * src1 = dst->src[1]; + // int64_t t0 = ggml_perf_time_us(); // UNUSED(t0); @@ -10856,9 +11019,10 @@ static void ggml_compute_forward_out_prod_q_f32( static void ggml_compute_forward_out_prod( const struct ggml_compute_params * params, - const struct ggml_tensor * src0, - const struct ggml_tensor * src1, struct ggml_tensor * dst) { + + const struct ggml_tensor * src0 = dst->src[0]; + switch (src0->type) { case GGML_TYPE_Q4_0: case GGML_TYPE_Q4_1: @@ -10876,16 +11040,16 @@ static void ggml_compute_forward_out_prod( case GGML_TYPE_IQ1_S: case GGML_TYPE_IQ4_NL: { - ggml_compute_forward_out_prod_q_f32(params, src0, src1, dst); + ggml_compute_forward_out_prod_q_f32(params, dst); } break; case GGML_TYPE_F16: { GGML_ASSERT(false); // todo - // ggml_compute_forward_out_prod_f16_f32(params, src0, src1, dst); + // ggml_compute_forward_out_prod_f16_f32(params, dst); } break; case GGML_TYPE_F32: { - ggml_compute_forward_out_prod_f32(params, src0, src1, dst); + ggml_compute_forward_out_prod_f32(params, dst); } break; default: { @@ -10898,8 +11062,10 @@ static void ggml_compute_forward_out_prod( static void ggml_compute_forward_scale_f32( const struct ggml_compute_params * params, - const struct ggml_tensor * src0, struct ggml_tensor * dst) { + + const struct ggml_tensor * src0 = dst->src[0]; + GGML_ASSERT(ggml_is_contiguous(src0)); GGML_ASSERT(ggml_is_contiguous(dst)); GGML_ASSERT(ggml_are_same_shape(src0, dst)); @@ -10940,12 +11106,14 @@ static void ggml_compute_forward_scale_f32( static void ggml_compute_forward_scale( const struct ggml_compute_params * params, - const struct ggml_tensor * src0, struct ggml_tensor * dst) { + + const struct ggml_tensor * src0 = dst->src[0]; + switch (src0->type) { case GGML_TYPE_F32: { - ggml_compute_forward_scale_f32(params, src0, dst); + ggml_compute_forward_scale_f32(params, dst); } break; default: { @@ -10958,9 +11126,11 @@ static void ggml_compute_forward_scale( static void ggml_compute_forward_set_f32( const struct ggml_compute_params * params, - const struct ggml_tensor * src0, - const struct ggml_tensor * src1, struct ggml_tensor * dst) { + + const struct ggml_tensor * src0 = dst->src[0]; + const struct ggml_tensor * src1 = dst->src[1]; + GGML_ASSERT(ggml_are_same_shape(src0, dst)); GGML_ASSERT(ggml_is_contiguous(dst) && ggml_is_contiguous(src0)); @@ -11031,14 +11201,14 @@ static void ggml_compute_forward_set_f32( static void ggml_compute_forward_set( const struct ggml_compute_params * params, - const struct ggml_tensor * src0, - const struct ggml_tensor * src1, struct ggml_tensor * dst) { + const struct ggml_tensor * src0 = dst->src[0]; + switch (src0->type) { case GGML_TYPE_F32: { - ggml_compute_forward_set_f32(params, src0, src1, dst); + ggml_compute_forward_set_f32(params, dst); } break; case GGML_TYPE_F16: case GGML_TYPE_Q4_0: @@ -11068,29 +11238,25 @@ static void ggml_compute_forward_set( static void ggml_compute_forward_cpy( const struct ggml_compute_params * params, - const struct ggml_tensor * src0, struct ggml_tensor * dst) { - ggml_compute_forward_dup(params, src0, dst); + ggml_compute_forward_dup(params, dst); } // ggml_compute_forward_cont static void ggml_compute_forward_cont( const struct ggml_compute_params * params, - const struct ggml_tensor * src0, struct ggml_tensor * dst) { - ggml_compute_forward_dup(params, src0, dst); + ggml_compute_forward_dup(params, dst); } // ggml_compute_forward_reshape static void ggml_compute_forward_reshape( const struct ggml_compute_params * params, - const struct ggml_tensor * src0, struct ggml_tensor * dst) { // NOP UNUSED(params); - UNUSED(src0); UNUSED(dst); } @@ -11098,39 +11264,41 @@ static void ggml_compute_forward_reshape( static void ggml_compute_forward_view( const struct ggml_compute_params * params, - const struct ggml_tensor * src0) { + const struct ggml_tensor * dst) { // NOP UNUSED(params); - UNUSED(src0); + UNUSED(dst); } // ggml_compute_forward_permute static void ggml_compute_forward_permute( const struct ggml_compute_params * params, - const struct ggml_tensor * src0) { + const struct ggml_tensor * dst) { // NOP UNUSED(params); - UNUSED(src0); + UNUSED(dst); } // ggml_compute_forward_transpose static void ggml_compute_forward_transpose( const struct ggml_compute_params * params, - const struct ggml_tensor * src0) { + const struct ggml_tensor * dst) { // NOP UNUSED(params); - UNUSED(src0); + UNUSED(dst); } // ggml_compute_forward_get_rows static void ggml_compute_forward_get_rows_q( const struct ggml_compute_params * params, - const struct ggml_tensor * src0, - const struct ggml_tensor * src1, struct ggml_tensor * dst) { + + const struct ggml_tensor * src0 = dst->src[0]; + const struct ggml_tensor * src1 = dst->src[1]; + assert(params->ith == 0); if (params->type == GGML_TASK_INIT || params->type == GGML_TASK_FINALIZE) { @@ -11166,9 +11334,11 @@ static void ggml_compute_forward_get_rows_q( static void ggml_compute_forward_get_rows_f16( const struct ggml_compute_params * params, - const struct ggml_tensor * src0, - const struct ggml_tensor * src1, struct ggml_tensor * dst) { + + const struct ggml_tensor * src0 = dst->src[0]; + const struct ggml_tensor * src1 = dst->src[1]; + assert(params->ith == 0); if (params->type == GGML_TASK_INIT || params->type == GGML_TASK_FINALIZE) { @@ -11201,9 +11371,11 @@ static void ggml_compute_forward_get_rows_f16( static void ggml_compute_forward_get_rows_f32( const struct ggml_compute_params * params, - const struct ggml_tensor * src0, - const struct ggml_tensor * src1, struct ggml_tensor * dst) { + + const struct ggml_tensor * src0 = dst->src[0]; + const struct ggml_tensor * src1 = dst->src[1]; + assert(params->ith == 0); if (params->type == GGML_TASK_INIT || params->type == GGML_TASK_FINALIZE) { @@ -11236,9 +11408,10 @@ static void ggml_compute_forward_get_rows_f32( static void ggml_compute_forward_get_rows( const struct ggml_compute_params * params, - const struct ggml_tensor * src0, - const struct ggml_tensor * src1, struct ggml_tensor * dst) { + + const struct ggml_tensor * src0 = dst->src[0]; + switch (src0->type) { case GGML_TYPE_Q4_0: case GGML_TYPE_Q4_1: @@ -11257,16 +11430,16 @@ static void ggml_compute_forward_get_rows( case GGML_TYPE_IQ1_S: case GGML_TYPE_IQ4_NL: { - ggml_compute_forward_get_rows_q(params, src0, src1, dst); + ggml_compute_forward_get_rows_q(params, dst); } break; case GGML_TYPE_F16: { - ggml_compute_forward_get_rows_f16(params, src0, src1, dst); + ggml_compute_forward_get_rows_f16(params, dst); } break; case GGML_TYPE_F32: case GGML_TYPE_I32: { - ggml_compute_forward_get_rows_f32(params, src0, src1, dst); + ggml_compute_forward_get_rows_f32(params, dst); } break; default: { @@ -11297,9 +11470,11 @@ static void ggml_compute_forward_get_rows( static void ggml_compute_forward_get_rows_back_f32_f16( const struct ggml_compute_params * params, - const struct ggml_tensor * src0, - const struct ggml_tensor * src1, struct ggml_tensor * dst) { + + const struct ggml_tensor * src0 = dst->src[0]; + const struct ggml_tensor * src1 = dst->src[1]; + GGML_ASSERT(params->ith == 0); GGML_ASSERT(ggml_is_contiguous(dst)); @@ -11334,9 +11509,11 @@ static void ggml_compute_forward_get_rows_back_f32_f16( static void ggml_compute_forward_get_rows_back_f32( const struct ggml_compute_params * params, - const struct ggml_tensor * src0, - const struct ggml_tensor * src1, struct ggml_tensor * dst) { + + const struct ggml_tensor * src0 = dst->src[0]; + const struct ggml_tensor * src1 = dst->src[1]; + GGML_ASSERT(params->ith == 0); GGML_ASSERT(ggml_is_contiguous(dst)); @@ -11371,17 +11548,18 @@ static void ggml_compute_forward_get_rows_back_f32( static void ggml_compute_forward_get_rows_back( const struct ggml_compute_params * params, - const struct ggml_tensor * src0, - const struct ggml_tensor * src1, struct ggml_tensor * dst) { + + const struct ggml_tensor * src0 = dst->src[0]; + switch (src0->type) { case GGML_TYPE_F16: { - ggml_compute_forward_get_rows_back_f32_f16(params, src0, src1, dst); + ggml_compute_forward_get_rows_back_f32_f16(params, dst); } break; case GGML_TYPE_F32: { - ggml_compute_forward_get_rows_back_f32(params, src0, src1, dst); + ggml_compute_forward_get_rows_back_f32(params, dst); } break; default: { @@ -11412,8 +11590,10 @@ static void ggml_compute_forward_get_rows_back( static void ggml_compute_forward_diag_f32( const struct ggml_compute_params * params, - const struct ggml_tensor * src0, struct ggml_tensor * dst) { + + const struct ggml_tensor * src0 = dst->src[0]; + GGML_ASSERT(params->ith == 0); if (params->type == GGML_TASK_INIT || params->type == GGML_TASK_FINALIZE) { @@ -11452,12 +11632,14 @@ static void ggml_compute_forward_diag_f32( static void ggml_compute_forward_diag( const struct ggml_compute_params * params, - const struct ggml_tensor * src0, struct ggml_tensor * dst) { + + const struct ggml_tensor * src0 = dst->src[0]; + switch (src0->type) { case GGML_TYPE_F32: { - ggml_compute_forward_diag_f32(params, src0, dst); + ggml_compute_forward_diag_f32(params, dst); } break; default: { @@ -11470,10 +11652,11 @@ static void ggml_compute_forward_diag( static void ggml_compute_forward_diag_mask_f32( const struct ggml_compute_params * params, - const struct ggml_tensor * src0, struct ggml_tensor * dst, const float value) { + const struct ggml_tensor * src0 = dst->src[0]; + const int ith = params->ith; const int nth = params->nth; @@ -11523,12 +11706,14 @@ static void ggml_compute_forward_diag_mask_f32( static void ggml_compute_forward_diag_mask_inf( const struct ggml_compute_params * params, - const struct ggml_tensor * src0, struct ggml_tensor * dst) { + + const struct ggml_tensor * src0 = dst->src[0]; + switch (src0->type) { case GGML_TYPE_F32: { - ggml_compute_forward_diag_mask_f32(params, src0, dst, -INFINITY); + ggml_compute_forward_diag_mask_f32(params, dst, -INFINITY); } break; default: { @@ -11539,12 +11724,14 @@ static void ggml_compute_forward_diag_mask_inf( static void ggml_compute_forward_diag_mask_zero( const struct ggml_compute_params * params, - const struct ggml_tensor * src0, struct ggml_tensor * dst) { + + const struct ggml_tensor * src0 = dst->src[0]; + switch (src0->type) { case GGML_TYPE_F32: { - ggml_compute_forward_diag_mask_f32(params, src0, dst, 0); + ggml_compute_forward_diag_mask_f32(params, dst, 0); } break; default: { @@ -11557,10 +11744,12 @@ static void ggml_compute_forward_diag_mask_zero( static void ggml_compute_forward_soft_max_f32( const struct ggml_compute_params * params, - const struct ggml_tensor * src0, - const struct ggml_tensor * src1, - const struct ggml_tensor * src2, struct ggml_tensor * dst) { + + const struct ggml_tensor * src0 = dst->src[0]; + const struct ggml_tensor * src1 = dst->src[1]; + const struct ggml_tensor * src2 = dst->src[2]; + assert(ggml_is_contiguous(dst)); assert(ggml_are_same_shape(src0, dst)); @@ -11671,14 +11860,14 @@ static void ggml_compute_forward_soft_max_f32( static void ggml_compute_forward_soft_max( const struct ggml_compute_params * params, - const struct ggml_tensor * src0, - const struct ggml_tensor * src1, - const struct ggml_tensor * src2, struct ggml_tensor * dst) { + + const struct ggml_tensor * src0 = dst->src[0]; + switch (src0->type) { case GGML_TYPE_F32: { - ggml_compute_forward_soft_max_f32(params, src0, src1, src2, dst); + ggml_compute_forward_soft_max_f32(params, dst); } break; default: { @@ -11691,9 +11880,11 @@ static void ggml_compute_forward_soft_max( static void ggml_compute_forward_soft_max_back_f32( const struct ggml_compute_params * params, - const struct ggml_tensor * src0, - const struct ggml_tensor * src1, struct ggml_tensor * dst) { + + const struct ggml_tensor * src0 = dst->src[0]; + const struct ggml_tensor * src1 = dst->src[1]; + GGML_ASSERT(ggml_is_contiguous(src0)); GGML_ASSERT(ggml_is_contiguous(src1)); GGML_ASSERT(ggml_is_contiguous(dst)); @@ -11768,13 +11959,14 @@ static void ggml_compute_forward_soft_max_back_f32( static void ggml_compute_forward_soft_max_back( const struct ggml_compute_params * params, - const struct ggml_tensor * src0, - const struct ggml_tensor * src1, struct ggml_tensor * dst) { + + const struct ggml_tensor * src0 = dst->src[0]; + switch (src0->type) { case GGML_TYPE_F32: { - ggml_compute_forward_soft_max_back_f32(params, src0, src1, dst); + ggml_compute_forward_soft_max_back_f32(params, dst); } break; default: { @@ -11787,8 +11979,10 @@ static void ggml_compute_forward_soft_max_back( static void ggml_compute_forward_alibi_f32( const struct ggml_compute_params * params, - const struct ggml_tensor * src0, struct ggml_tensor * dst) { + + const struct ggml_tensor * src0 = dst->src[0]; + assert(params->ith == 0); if (params->type == GGML_TASK_INIT || params->type == GGML_TASK_FINALIZE) { @@ -11844,8 +12038,10 @@ static void ggml_compute_forward_alibi_f32( static void ggml_compute_forward_alibi_f16( const struct ggml_compute_params * params, - const struct ggml_tensor * src0, struct ggml_tensor * dst) { + + const struct ggml_tensor * src0 = dst->src[0]; + assert(params->ith == 0); if (params->type == GGML_TASK_INIT || params->type == GGML_TASK_FINALIZE) { @@ -11904,16 +12100,18 @@ static void ggml_compute_forward_alibi_f16( static void ggml_compute_forward_alibi( const struct ggml_compute_params * params, - const struct ggml_tensor * src0, struct ggml_tensor * dst) { + + const struct ggml_tensor * src0 = dst->src[0]; + switch (src0->type) { case GGML_TYPE_F16: { - ggml_compute_forward_alibi_f16(params, src0, dst); + ggml_compute_forward_alibi_f16(params, dst); } break; case GGML_TYPE_F32: { - ggml_compute_forward_alibi_f32(params, src0, dst); + ggml_compute_forward_alibi_f32(params, dst); } break; case GGML_TYPE_Q4_0: case GGML_TYPE_Q4_1: @@ -11946,8 +12144,10 @@ static void ggml_compute_forward_alibi( static void ggml_compute_forward_clamp_f32( const struct ggml_compute_params * params, - const struct ggml_tensor * src0, struct ggml_tensor * dst) { + + const struct ggml_tensor * src0 = dst->src[0]; + assert(params->ith == 0); if (params->type == GGML_TASK_INIT || params->type == GGML_TASK_FINALIZE) { @@ -11986,12 +12186,14 @@ static void ggml_compute_forward_clamp_f32( static void ggml_compute_forward_clamp( const struct ggml_compute_params * params, - const struct ggml_tensor * src0, struct ggml_tensor * dst) { + + const struct ggml_tensor * src0 = dst->src[0]; + switch (src0->type) { case GGML_TYPE_F32: { - ggml_compute_forward_clamp_f32(params, src0, dst); + ggml_compute_forward_clamp_f32(params, dst); } break; case GGML_TYPE_F16: case GGML_TYPE_Q4_0: @@ -12081,10 +12283,12 @@ GGML_CALL void ggml_rope_yarn_corr_dims( static void ggml_compute_forward_rope_f32( const struct ggml_compute_params * params, - const struct ggml_tensor * src0, - const struct ggml_tensor * src1, struct ggml_tensor * dst, const bool forward) { + + const struct ggml_tensor * src0 = dst->src[0]; + const struct ggml_tensor * src1 = dst->src[1]; + if (params->type == GGML_TASK_INIT || params->type == GGML_TASK_FINALIZE) { return; } @@ -12257,10 +12461,12 @@ static void ggml_compute_forward_rope_f32( static void ggml_compute_forward_rope_f16( const struct ggml_compute_params * params, - const struct ggml_tensor * src0, - const struct ggml_tensor * src1, struct ggml_tensor * dst, const bool forward) { + + const struct ggml_tensor * src0 = dst->src[0]; + const struct ggml_tensor * src1 = dst->src[1]; + if (params->type == GGML_TASK_INIT || params->type == GGML_TASK_FINALIZE) { return; } @@ -12422,17 +12628,18 @@ static void ggml_compute_forward_rope_f16( static void ggml_compute_forward_rope( const struct ggml_compute_params * params, - const struct ggml_tensor * src0, - const struct ggml_tensor * src1, struct ggml_tensor * dst) { + + const struct ggml_tensor * src0 = dst->src[0]; + switch (src0->type) { case GGML_TYPE_F16: { - ggml_compute_forward_rope_f16(params, src0, src1, dst, true); + ggml_compute_forward_rope_f16(params, dst, true); } break; case GGML_TYPE_F32: { - ggml_compute_forward_rope_f32(params, src0, src1, dst, true); + ggml_compute_forward_rope_f32(params, dst, true); } break; default: { @@ -12445,17 +12652,18 @@ static void ggml_compute_forward_rope( static void ggml_compute_forward_rope_back( const struct ggml_compute_params * params, - const struct ggml_tensor * src0, - const struct ggml_tensor * src1, struct ggml_tensor * dst) { + + const struct ggml_tensor * src0 = dst->src[0]; + switch (src0->type) { case GGML_TYPE_F16: { - ggml_compute_forward_rope_f16(params, src0, src1, dst, false); + ggml_compute_forward_rope_f16(params, dst, false); } break; case GGML_TYPE_F32: { - ggml_compute_forward_rope_f32(params, src0, src1, dst, false); + ggml_compute_forward_rope_f32(params, dst, false); } break; default: { @@ -12468,9 +12676,11 @@ static void ggml_compute_forward_rope_back( static void ggml_compute_forward_conv_transpose_1d_f16_f32( const struct ggml_compute_params * params, - const struct ggml_tensor * src0, - const struct ggml_tensor * src1, struct ggml_tensor * dst) { + + const struct ggml_tensor * src0 = dst->src[0]; + const struct ggml_tensor * src1 = dst->src[1]; + GGML_ASSERT(src0->type == GGML_TYPE_F16); GGML_ASSERT(src1->type == GGML_TYPE_F32); GGML_ASSERT( dst->type == GGML_TYPE_F32); @@ -12565,9 +12775,11 @@ static void ggml_compute_forward_conv_transpose_1d_f16_f32( static void ggml_compute_forward_conv_transpose_1d_f32( const struct ggml_compute_params * params, - const struct ggml_tensor * src0, - const struct ggml_tensor * src1, struct ggml_tensor * dst) { + + const struct ggml_tensor * src0 = dst->src[0]; + const struct ggml_tensor * src1 = dst->src[1]; + GGML_ASSERT(src0->type == GGML_TYPE_F32); GGML_ASSERT(src1->type == GGML_TYPE_F32); GGML_ASSERT( dst->type == GGML_TYPE_F32); @@ -12662,17 +12874,18 @@ static void ggml_compute_forward_conv_transpose_1d_f32( static void ggml_compute_forward_conv_transpose_1d( const struct ggml_compute_params * params, - const struct ggml_tensor * src0, - const struct ggml_tensor * src1, struct ggml_tensor * dst) { + + const struct ggml_tensor * src0 = dst->src[0]; + switch (src0->type) { case GGML_TYPE_F16: { - ggml_compute_forward_conv_transpose_1d_f16_f32(params, src0, src1, dst); + ggml_compute_forward_conv_transpose_1d_f16_f32(params, dst); } break; case GGML_TYPE_F32: { - ggml_compute_forward_conv_transpose_1d_f32(params, src0, src1, dst); + ggml_compute_forward_conv_transpose_1d_f32(params, dst); } break; default: { @@ -12686,9 +12899,11 @@ static void ggml_compute_forward_conv_transpose_1d( // dst: result [N, OH, OW, IC*KH*KW] static void ggml_compute_forward_im2col_f32( const struct ggml_compute_params * params, - const struct ggml_tensor * src0, - const struct ggml_tensor * src1, struct ggml_tensor * dst) { + + const struct ggml_tensor * src0 = dst->src[0]; + const struct ggml_tensor * src1 = dst->src[1]; + GGML_ASSERT(src0->type == GGML_TYPE_F16); GGML_ASSERT(src1->type == GGML_TYPE_F32); GGML_ASSERT( dst->type == GGML_TYPE_F32); @@ -12772,9 +12987,11 @@ static void ggml_compute_forward_im2col_f32( // dst: result [N, OH, OW, IC*KH*KW] static void ggml_compute_forward_im2col_f16( const struct ggml_compute_params * params, - const struct ggml_tensor * src0, - const struct ggml_tensor * src1, struct ggml_tensor * dst) { + + const struct ggml_tensor * src0 = dst->src[0]; + const struct ggml_tensor * src1 = dst->src[1]; + GGML_ASSERT(src0->type == GGML_TYPE_F16); GGML_ASSERT(src1->type == GGML_TYPE_F32); GGML_ASSERT( dst->type == GGML_TYPE_F16); @@ -12854,17 +13071,15 @@ static void ggml_compute_forward_im2col_f16( static void ggml_compute_forward_im2col( const struct ggml_compute_params * params, - const struct ggml_tensor * src0, - const struct ggml_tensor * src1, struct ggml_tensor * dst) { switch (dst->type) { case GGML_TYPE_F16: { - ggml_compute_forward_im2col_f16(params, src0, src1, dst); + ggml_compute_forward_im2col_f16(params, dst); } break; case GGML_TYPE_F32: { - ggml_compute_forward_im2col_f32(params, src0, src1, dst); + ggml_compute_forward_im2col_f32(params, dst); } break; default: { @@ -12878,9 +13093,11 @@ static void ggml_compute_forward_im2col( static void ggml_compute_forward_conv_transpose_2d( const struct ggml_compute_params * params, - const struct ggml_tensor * src0, - const struct ggml_tensor * src1, struct ggml_tensor * dst) { + + const struct ggml_tensor * src0 = dst->src[0]; + const struct ggml_tensor * src1 = dst->src[1]; + GGML_ASSERT(src0->type == GGML_TYPE_F16); GGML_ASSERT(src1->type == GGML_TYPE_F32); GGML_ASSERT( dst->type == GGML_TYPE_F32); @@ -12984,9 +13201,11 @@ static void ggml_compute_forward_conv_transpose_2d( static void ggml_compute_forward_pool_1d_sk_p0( const struct ggml_compute_params * params, const enum ggml_op_pool op, - const struct ggml_tensor * src, const int k, struct ggml_tensor * dst) { + + const struct ggml_tensor * src = dst->src[0]; + assert(src->type == GGML_TYPE_F32); assert(params->ith == 0); @@ -13035,7 +13254,6 @@ static void ggml_compute_forward_pool_1d_sk_p0( static void ggml_compute_forward_pool_1d( const struct ggml_compute_params * params, - const struct ggml_tensor * src0, struct ggml_tensor * dst) { const int32_t * opts = (const int32_t *)dst->op_params; @@ -13046,15 +13264,17 @@ static void ggml_compute_forward_pool_1d( GGML_ASSERT(p0 == 0); // padding not supported GGML_ASSERT(k0 == s0); // only s = k supported - ggml_compute_forward_pool_1d_sk_p0(params, op, src0, k0, dst); + ggml_compute_forward_pool_1d_sk_p0(params, op, k0, dst); } // ggml_compute_forward_pool_2d static void ggml_compute_forward_pool_2d( const struct ggml_compute_params * params, - const struct ggml_tensor * src, struct ggml_tensor * dst) { + + const struct ggml_tensor * src = dst->src[0]; + GGML_ASSERT(src->type == GGML_TYPE_F32); GGML_ASSERT(params->ith == 0); @@ -13127,9 +13347,10 @@ static void ggml_compute_forward_pool_2d( static void ggml_compute_forward_upscale_f32( const struct ggml_compute_params * params, - const struct ggml_tensor * src0, struct ggml_tensor * dst) { + const struct ggml_tensor * src0 = dst->src[0]; + if (params->type == GGML_TASK_INIT || params->type == GGML_TASK_FINALIZE) { return; } @@ -13166,12 +13387,14 @@ static void ggml_compute_forward_upscale_f32( static void ggml_compute_forward_upscale( const struct ggml_compute_params * params, - const struct ggml_tensor * src0, struct ggml_tensor * dst) { + + const struct ggml_tensor * src0 = dst->src[0]; + switch (src0->type) { case GGML_TYPE_F32: { - ggml_compute_forward_upscale_f32(params, src0, dst); + ggml_compute_forward_upscale_f32(params, dst); } break; default: { @@ -13184,9 +13407,10 @@ static void ggml_compute_forward_upscale( static void ggml_compute_forward_pad_f32( const struct ggml_compute_params * params, - const struct ggml_tensor * src0, struct ggml_tensor * dst) { + const struct ggml_tensor * src0 = dst->src[0]; + if (params->type == GGML_TASK_INIT || params->type == GGML_TASK_FINALIZE) { return; } @@ -13224,12 +13448,14 @@ static void ggml_compute_forward_pad_f32( static void ggml_compute_forward_pad( const struct ggml_compute_params * params, - const struct ggml_tensor * src0, struct ggml_tensor * dst) { + + const struct ggml_tensor * src0 = dst->src[0]; + switch (src0->type) { case GGML_TYPE_F32: { - ggml_compute_forward_pad_f32(params, src0, dst); + ggml_compute_forward_pad_f32(params, dst); } break; default: { @@ -13242,9 +13468,10 @@ static void ggml_compute_forward_pad( static void ggml_compute_forward_argsort_f32( const struct ggml_compute_params * params, - const struct ggml_tensor * src0, struct ggml_tensor * dst) { + const struct ggml_tensor * src0 = dst->src[0]; + if (params->type == GGML_TASK_INIT || params->type == GGML_TASK_FINALIZE) { return; } @@ -13284,13 +13511,14 @@ static void ggml_compute_forward_argsort_f32( static void ggml_compute_forward_argsort( const struct ggml_compute_params * params, - const struct ggml_tensor * src0, struct ggml_tensor * dst) { + const struct ggml_tensor * src0 = dst->src[0]; + switch (src0->type) { case GGML_TYPE_F32: { - ggml_compute_forward_argsort_f32(params, src0, dst); + ggml_compute_forward_argsort_f32(params, dst); } break; default: { @@ -13303,11 +13531,13 @@ static void ggml_compute_forward_argsort( static void ggml_compute_forward_flash_attn_f32( const struct ggml_compute_params * params, - const struct ggml_tensor * q, - const struct ggml_tensor * k, - const struct ggml_tensor * v, const bool masked, struct ggml_tensor * dst) { + + const struct ggml_tensor * q = dst->src[0]; + const struct ggml_tensor * k = dst->src[1]; + const struct ggml_tensor * v = dst->src[2]; + int64_t t0 = ggml_perf_time_us(); UNUSED(t0); @@ -13493,11 +13723,13 @@ static void ggml_compute_forward_flash_attn_f32( static void ggml_compute_forward_flash_attn_f16( const struct ggml_compute_params * params, - const struct ggml_tensor * q, - const struct ggml_tensor * k, - const struct ggml_tensor * v, const bool masked, struct ggml_tensor * dst) { + + const struct ggml_tensor * q = dst->src[0]; + const struct ggml_tensor * k = dst->src[1]; + const struct ggml_tensor * v = dst->src[2]; + int64_t t0 = ggml_perf_time_us(); UNUSED(t0); @@ -13719,19 +13951,19 @@ static void ggml_compute_forward_flash_attn_f16( static void ggml_compute_forward_flash_attn( const struct ggml_compute_params * params, - const struct ggml_tensor * q, - const struct ggml_tensor * k, - const struct ggml_tensor * v, const bool masked, struct ggml_tensor * dst) { + + const struct ggml_tensor * q = dst->src[0]; + switch (q->type) { case GGML_TYPE_F16: { - ggml_compute_forward_flash_attn_f16(params, q, k, v, masked, dst); + ggml_compute_forward_flash_attn_f16(params, masked, dst); } break; case GGML_TYPE_F32: { - ggml_compute_forward_flash_attn_f32(params, q, k, v, masked, dst); + ggml_compute_forward_flash_attn_f32(params, masked, dst); } break; default: { @@ -13744,12 +13976,14 @@ static void ggml_compute_forward_flash_attn( static void ggml_compute_forward_flash_ff_f16( const struct ggml_compute_params * params, - const struct ggml_tensor * a, // F16 - const struct ggml_tensor * b0, // F16 fc_w - const struct ggml_tensor * b1, // F32 fc_b - const struct ggml_tensor * c0, // F16 proj_w - const struct ggml_tensor * c1, // F32 proj_b struct ggml_tensor * dst) { + + const struct ggml_tensor * a = dst->src[0]; // F16 + const struct ggml_tensor * b0 = dst->src[1]; // F16 fc_w + const struct ggml_tensor * b1 = dst->src[2]; // F32 fc_b + const struct ggml_tensor * c0 = dst->src[3]; // F16 proj_w + const struct ggml_tensor * c1 = dst->src[4]; // F32 proj_b + int64_t t0 = ggml_perf_time_us(); UNUSED(t0); @@ -13877,16 +14111,14 @@ static void ggml_compute_forward_flash_ff_f16( static void ggml_compute_forward_flash_ff( const struct ggml_compute_params * params, - const struct ggml_tensor * a, - const struct ggml_tensor * b0, - const struct ggml_tensor * b1, - const struct ggml_tensor * c0, - const struct ggml_tensor * c1, struct ggml_tensor * dst) { + + const struct ggml_tensor * b0 = dst->src[1]; + switch (b0->type) { case GGML_TYPE_F16: { - ggml_compute_forward_flash_ff_f16(params, a, b0, b1, c0, c1, dst); + ggml_compute_forward_flash_ff_f16(params, dst); } break; case GGML_TYPE_F32: { @@ -13903,12 +14135,14 @@ static void ggml_compute_forward_flash_ff( static void ggml_compute_forward_flash_attn_back_f32( const struct ggml_compute_params * params, - const struct ggml_tensor * q, - const struct ggml_tensor * k, - const struct ggml_tensor * v, - const struct ggml_tensor * d, const bool masked, struct ggml_tensor * dst) { + + const struct ggml_tensor * q = dst->src[0]; + const struct ggml_tensor * k = dst->src[1]; + const struct ggml_tensor * v = dst->src[2]; + const struct ggml_tensor * d = dst->src[3]; + int64_t t0 = ggml_perf_time_us(); UNUSED(t0); @@ -14256,16 +14490,15 @@ static void ggml_compute_forward_flash_attn_back_f32( static void ggml_compute_forward_flash_attn_back( const struct ggml_compute_params * params, - const struct ggml_tensor * q, - const struct ggml_tensor * k, - const struct ggml_tensor * v, - const struct ggml_tensor * d, const bool masked, struct ggml_tensor * dst) { + + const struct ggml_tensor * q = dst->src[0]; + switch (q->type) { case GGML_TYPE_F32: { - ggml_compute_forward_flash_attn_back_f32(params, q, k, v, d, masked, dst); + ggml_compute_forward_flash_attn_back_f32(params, masked, dst); } break; default: { @@ -14278,8 +14511,10 @@ static void ggml_compute_forward_flash_attn_back( static void ggml_compute_forward_win_part_f32( const struct ggml_compute_params * params, - const struct ggml_tensor * src0, struct ggml_tensor * dst) { + + const struct ggml_tensor * src0 = dst->src[0]; + if (params->type == GGML_TASK_INIT || params->type == GGML_TASK_FINALIZE) { return; } @@ -14322,12 +14557,14 @@ static void ggml_compute_forward_win_part_f32( static void ggml_compute_forward_win_part( const struct ggml_compute_params * params, - const struct ggml_tensor * src0, struct ggml_tensor * dst) { + + const struct ggml_tensor * src0 = dst->src[0]; + switch (src0->type) { case GGML_TYPE_F32: { - ggml_compute_forward_win_part_f32(params, src0, dst); + ggml_compute_forward_win_part_f32(params, dst); } break; default: { @@ -14340,8 +14577,10 @@ static void ggml_compute_forward_win_part( static void ggml_compute_forward_win_unpart_f32( const struct ggml_compute_params * params, - const struct ggml_tensor * src0, struct ggml_tensor * dst) { + + const struct ggml_tensor * src0 = dst->src[0]; + if (params->type == GGML_TASK_INIT || params->type == GGML_TASK_FINALIZE) { return; } @@ -14382,12 +14621,14 @@ static void ggml_compute_forward_win_unpart_f32( static void ggml_compute_forward_win_unpart( const struct ggml_compute_params * params, - const struct ggml_tensor * src0, struct ggml_tensor * dst) { + + const struct ggml_tensor * src0 = dst->src[0]; + switch (src0->type) { case GGML_TYPE_F32: { - ggml_compute_forward_win_unpart_f32(params, src0, dst); + ggml_compute_forward_win_unpart_f32(params, dst); } break; default: { @@ -14400,58 +14641,58 @@ static void ggml_compute_forward_win_unpart( static void ggml_compute_forward_unary( const struct ggml_compute_params * params, - const struct ggml_tensor * src0, struct ggml_tensor * dst) { + const enum ggml_unary_op op = ggml_get_unary_op(dst); switch (op) { case GGML_UNARY_OP_ABS: { - ggml_compute_forward_abs(params, src0, dst); + ggml_compute_forward_abs(params, dst); } break; case GGML_UNARY_OP_SGN: { - ggml_compute_forward_sgn(params, src0, dst); + ggml_compute_forward_sgn(params, dst); } break; case GGML_UNARY_OP_NEG: { - ggml_compute_forward_neg(params, src0, dst); + ggml_compute_forward_neg(params, dst); } break; case GGML_UNARY_OP_STEP: { - ggml_compute_forward_step(params, src0, dst); + ggml_compute_forward_step(params, dst); } break; case GGML_UNARY_OP_TANH: { - ggml_compute_forward_tanh(params, src0, dst); + ggml_compute_forward_tanh(params, dst); } break; case GGML_UNARY_OP_ELU: { - ggml_compute_forward_elu(params, src0, dst); + ggml_compute_forward_elu(params, dst); } break; case GGML_UNARY_OP_RELU: { - ggml_compute_forward_relu(params, src0, dst); + ggml_compute_forward_relu(params, dst); } break; case GGML_UNARY_OP_GELU: { - ggml_compute_forward_gelu(params, src0, dst); + ggml_compute_forward_gelu(params, dst); } break; case GGML_UNARY_OP_GELU_QUICK: { - ggml_compute_forward_gelu_quick(params, src0, dst); + ggml_compute_forward_gelu_quick(params, dst); } break; case GGML_UNARY_OP_SILU: { - ggml_compute_forward_silu(params, src0, dst); + ggml_compute_forward_silu(params, dst); } break; case GGML_UNARY_OP_HARDSWISH: { - ggml_compute_forward_hardswish(params, src0, dst); + ggml_compute_forward_hardswish(params, dst); } break; case GGML_UNARY_OP_HARDSIGMOID: { - ggml_compute_forward_hardsigmoid(params, src0, dst); + ggml_compute_forward_hardsigmoid(params, dst); } break; default: { @@ -14464,8 +14705,10 @@ static void ggml_compute_forward_unary( static void ggml_compute_forward_get_rel_pos_f16( const struct ggml_compute_params * params, - const struct ggml_tensor * src0, struct ggml_tensor * dst) { + + const struct ggml_tensor * src0 = dst->src[0]; + if (params->type == GGML_TASK_INIT || params->type == GGML_TASK_FINALIZE) { return; } @@ -14491,12 +14734,14 @@ static void ggml_compute_forward_get_rel_pos_f16( static void ggml_compute_forward_get_rel_pos( const struct ggml_compute_params * params, - const struct ggml_tensor * src0, struct ggml_tensor * dst) { + + const struct ggml_tensor * src0 = dst->src[0]; + switch (src0->type) { case GGML_TYPE_F16: { - ggml_compute_forward_get_rel_pos_f16(params, src0, dst); + ggml_compute_forward_get_rel_pos_f16(params, dst); } break; default: { @@ -14509,11 +14754,12 @@ static void ggml_compute_forward_get_rel_pos( static void ggml_compute_forward_add_rel_pos_f32( const struct ggml_compute_params * params, - const struct ggml_tensor * src0, - const struct ggml_tensor * src1, - const struct ggml_tensor * src2, struct ggml_tensor * dst) { + const struct ggml_tensor * src0 = dst->src[0]; + const struct ggml_tensor * src1 = dst->src[1]; + const struct ggml_tensor * src2 = dst->src[2]; + const bool inplace = (bool) ((int32_t *) dst->op_params)[0]; if (!inplace && params->type == GGML_TASK_INIT) { if (params->ith != 0) { @@ -14577,14 +14823,14 @@ static void ggml_compute_forward_add_rel_pos_f32( static void ggml_compute_forward_add_rel_pos( const struct ggml_compute_params * params, - const struct ggml_tensor * src0, - const struct ggml_tensor * src1, - const struct ggml_tensor * src2, struct ggml_tensor * dst) { + + const struct ggml_tensor * src0 = dst->src[0]; + switch (src0->type) { case GGML_TYPE_F32: { - ggml_compute_forward_add_rel_pos_f32(params, src0, src1, src2, dst); + ggml_compute_forward_add_rel_pos_f32(params, dst); } break; default: { @@ -14597,9 +14843,11 @@ static void ggml_compute_forward_add_rel_pos( static void ggml_compute_forward_map_unary_f32( const struct ggml_compute_params * params, - const struct ggml_tensor * src0, struct ggml_tensor * dst, const ggml_unary_op_f32_t fun) { + + const struct ggml_tensor * src0 = dst->src[0]; + GGML_ASSERT(ggml_are_same_shape(src0, dst)); if (params->type == GGML_TASK_INIT || params->type == GGML_TASK_FINALIZE) { @@ -14621,13 +14869,15 @@ static void ggml_compute_forward_map_unary_f32( static void ggml_compute_forward_map_unary( const struct ggml_compute_params * params, - const struct ggml_tensor * src0, struct ggml_tensor * dst, const ggml_unary_op_f32_t fun) { + + const struct ggml_tensor * src0 = dst->src[0]; + switch (src0->type) { case GGML_TYPE_F32: { - ggml_compute_forward_map_unary_f32(params, src0, dst, fun); + ggml_compute_forward_map_unary_f32(params, dst, fun); } break; default: { @@ -14640,10 +14890,12 @@ static void ggml_compute_forward_map_unary( static void ggml_compute_forward_map_binary_f32( const struct ggml_compute_params * params, - const struct ggml_tensor * src0, - const struct ggml_tensor * src1, struct ggml_tensor * dst, const ggml_binary_op_f32_t fun) { + + const struct ggml_tensor * src0 = dst->src[0]; + const struct ggml_tensor * src1 = dst->src[1]; + assert(params->ith == 0); assert(ggml_are_same_shape(src0, src1) && ggml_are_same_shape(src0, dst)); @@ -14668,14 +14920,15 @@ static void ggml_compute_forward_map_binary_f32( static void ggml_compute_forward_map_binary( const struct ggml_compute_params * params, - const struct ggml_tensor * src0, - const struct ggml_tensor * src1, struct ggml_tensor * dst, const ggml_binary_op_f32_t fun) { + + const struct ggml_tensor * src0 = dst->src[0]; + switch (src0->type) { case GGML_TYPE_F32: { - ggml_compute_forward_map_binary_f32(params, src0, src1, dst, fun); + ggml_compute_forward_map_binary_f32(params, dst, fun); } break; default: { @@ -14688,9 +14941,11 @@ static void ggml_compute_forward_map_binary( static void ggml_compute_forward_map_custom1_f32( const struct ggml_compute_params * params, - const struct ggml_tensor * a, struct ggml_tensor * dst, const ggml_custom1_op_f32_t fun) { + + const struct ggml_tensor * a = dst->src[0]; + assert(params->ith == 0); if (params->type == GGML_TASK_INIT || params->type == GGML_TASK_FINALIZE) { @@ -14704,10 +14959,12 @@ static void ggml_compute_forward_map_custom1_f32( static void ggml_compute_forward_map_custom2_f32( const struct ggml_compute_params * params, - const struct ggml_tensor * a, - const struct ggml_tensor * b, struct ggml_tensor * dst, const ggml_custom2_op_f32_t fun) { + + const struct ggml_tensor * a = dst->src[0]; + const struct ggml_tensor * b = dst->src[1]; + assert(params->ith == 0); if (params->type == GGML_TASK_INIT || params->type == GGML_TASK_FINALIZE) { @@ -14721,11 +14978,13 @@ static void ggml_compute_forward_map_custom2_f32( static void ggml_compute_forward_map_custom3_f32( const struct ggml_compute_params * params, - const struct ggml_tensor * a, - const struct ggml_tensor * b, - const struct ggml_tensor * c, struct ggml_tensor * dst, const ggml_custom3_op_f32_t fun) { + + const struct ggml_tensor * a = dst->src[0]; + const struct ggml_tensor * b = dst->src[1]; + const struct ggml_tensor * c = dst->src[1]; + assert(params->ith == 0); if (params->type == GGML_TASK_INIT || params->type == GGML_TASK_FINALIZE) { @@ -14739,8 +14998,10 @@ static void ggml_compute_forward_map_custom3_f32( static void ggml_compute_forward_map_custom1( const struct ggml_compute_params * params, - const struct ggml_tensor * a, struct ggml_tensor * dst) { + + const struct ggml_tensor * a = dst->src[0]; + if (params->type == GGML_TASK_INIT || params->type == GGML_TASK_FINALIZE) { return; } @@ -14754,9 +15015,11 @@ static void ggml_compute_forward_map_custom1( static void ggml_compute_forward_map_custom2( const struct ggml_compute_params * params, - const struct ggml_tensor * a, - const struct ggml_tensor * b, struct ggml_tensor * dst) { + + const struct ggml_tensor * a = dst->src[0]; + const struct ggml_tensor * b = dst->src[1]; + if (params->type == GGML_TASK_INIT || params->type == GGML_TASK_FINALIZE) { return; } @@ -14770,10 +15033,12 @@ static void ggml_compute_forward_map_custom2( static void ggml_compute_forward_map_custom3( const struct ggml_compute_params * params, - const struct ggml_tensor * a, - const struct ggml_tensor * b, - const struct ggml_tensor * c, struct ggml_tensor * dst) { + + const struct ggml_tensor * a = dst->src[0]; + const struct ggml_tensor * b = dst->src[1]; + const struct ggml_tensor * c = dst->src[2]; + if (params->type == GGML_TASK_INIT || params->type == GGML_TASK_FINALIZE) { return; } @@ -14787,9 +15052,11 @@ static void ggml_compute_forward_map_custom3( static void ggml_compute_forward_cross_entropy_loss_f32( const struct ggml_compute_params * params, - const struct ggml_tensor * src0, - const struct ggml_tensor * src1, struct ggml_tensor * dst) { + + const struct ggml_tensor * src0 = dst->src[0]; + const struct ggml_tensor * src1 = dst->src[1]; + GGML_ASSERT(ggml_is_contiguous(src0)); GGML_ASSERT(ggml_is_contiguous(src1)); GGML_ASSERT(ggml_is_scalar(dst)); @@ -14893,13 +15160,14 @@ static void ggml_compute_forward_cross_entropy_loss_f32( static void ggml_compute_forward_cross_entropy_loss( const struct ggml_compute_params * params, - const struct ggml_tensor * src0, - const struct ggml_tensor * src1, struct ggml_tensor * dst) { + + const struct ggml_tensor * src0 = dst->src[0]; + switch (src0->type) { case GGML_TYPE_F32: { - ggml_compute_forward_cross_entropy_loss_f32(params, src0, src1, dst); + ggml_compute_forward_cross_entropy_loss_f32(params, dst); } break; default: { @@ -14912,10 +15180,12 @@ static void ggml_compute_forward_cross_entropy_loss( static void ggml_compute_forward_cross_entropy_loss_back_f32( const struct ggml_compute_params * params, - const struct ggml_tensor * src0, - const struct ggml_tensor * src1, - const struct ggml_tensor * opt0, struct ggml_tensor * dst) { + + const struct ggml_tensor * src0 = dst->src[0]; + const struct ggml_tensor * src1 = dst->src[1]; + const struct ggml_tensor * opt0 = dst->src[2]; + GGML_ASSERT(ggml_is_contiguous(dst)); GGML_ASSERT(ggml_is_contiguous(src0)); GGML_ASSERT(ggml_is_contiguous(src1)); @@ -15002,14 +15272,14 @@ static void ggml_compute_forward_cross_entropy_loss_back_f32( static void ggml_compute_forward_cross_entropy_loss_back( const struct ggml_compute_params * params, - const struct ggml_tensor * src0, - const struct ggml_tensor * src1, - const struct ggml_tensor * opt0, struct ggml_tensor * dst) { + + const struct ggml_tensor * src0 = dst->src[0]; + switch (src0->type) { case GGML_TYPE_F32: { - ggml_compute_forward_cross_entropy_loss_back_f32(params, src0, src1, opt0, dst); + ggml_compute_forward_cross_entropy_loss_back_f32(params, dst); } break; default: { @@ -15057,312 +15327,312 @@ static void ggml_compute_forward(struct ggml_compute_params * params, struct ggm switch (tensor->op) { case GGML_OP_DUP: { - ggml_compute_forward_dup(params, tensor->src[0], tensor); + ggml_compute_forward_dup(params, tensor); } break; case GGML_OP_ADD: { - ggml_compute_forward_add(params, tensor->src[0], tensor->src[1], tensor); + ggml_compute_forward_add(params, tensor); } break; case GGML_OP_ADD1: { - ggml_compute_forward_add1(params, tensor->src[0], tensor->src[1], tensor); + ggml_compute_forward_add1(params, tensor); } break; case GGML_OP_ACC: { - ggml_compute_forward_acc(params, tensor->src[0], tensor->src[1], tensor); + ggml_compute_forward_acc(params, tensor); } break; case GGML_OP_SUB: { - ggml_compute_forward_sub(params, tensor->src[0], tensor->src[1], tensor); + ggml_compute_forward_sub(params, tensor); } break; case GGML_OP_MUL: { - ggml_compute_forward_mul(params, tensor->src[0], tensor->src[1], tensor); + ggml_compute_forward_mul(params, tensor); } break; case GGML_OP_DIV: { - ggml_compute_forward_div(params, tensor->src[0], tensor->src[1], tensor); + ggml_compute_forward_div(params, tensor); } break; case GGML_OP_SQR: { - ggml_compute_forward_sqr(params, tensor->src[0], tensor); + ggml_compute_forward_sqr(params, tensor); } break; case GGML_OP_SQRT: { - ggml_compute_forward_sqrt(params, tensor->src[0], tensor); + ggml_compute_forward_sqrt(params, tensor); } break; case GGML_OP_LOG: { - ggml_compute_forward_log(params, tensor->src[0], tensor); + ggml_compute_forward_log(params, tensor); } break; case GGML_OP_SUM: { - ggml_compute_forward_sum(params, tensor->src[0], tensor); + ggml_compute_forward_sum(params, tensor); } break; case GGML_OP_SUM_ROWS: { - ggml_compute_forward_sum_rows(params, tensor->src[0], tensor); + ggml_compute_forward_sum_rows(params, tensor); } break; case GGML_OP_MEAN: { - ggml_compute_forward_mean(params, tensor->src[0], tensor); + ggml_compute_forward_mean(params, tensor); } break; case GGML_OP_ARGMAX: { - ggml_compute_forward_argmax(params, tensor->src[0], tensor); + ggml_compute_forward_argmax(params, tensor); } break; case GGML_OP_REPEAT: { - ggml_compute_forward_repeat(params, tensor->src[0], tensor); + ggml_compute_forward_repeat(params, tensor); } break; case GGML_OP_REPEAT_BACK: { - ggml_compute_forward_repeat_back(params, tensor->src[0], tensor); + ggml_compute_forward_repeat_back(params, tensor); } break; case GGML_OP_CONCAT: { - ggml_compute_forward_concat(params, tensor->src[0], tensor->src[1], tensor); + ggml_compute_forward_concat(params, tensor); } break; case GGML_OP_SILU_BACK: { - ggml_compute_forward_silu_back(params, tensor->src[0], tensor->src[1], tensor); + ggml_compute_forward_silu_back(params, tensor); } break; case GGML_OP_NORM: { - ggml_compute_forward_norm(params, tensor->src[0], tensor); + ggml_compute_forward_norm(params, tensor); } break; case GGML_OP_RMS_NORM: { - ggml_compute_forward_rms_norm(params, tensor->src[0], tensor); + ggml_compute_forward_rms_norm(params, tensor); } break; case GGML_OP_RMS_NORM_BACK: { - ggml_compute_forward_rms_norm_back(params, tensor->src[0], tensor->src[1], tensor); + ggml_compute_forward_rms_norm_back(params, tensor); } break; case GGML_OP_GROUP_NORM: { - ggml_compute_forward_group_norm(params, tensor->src[0], tensor); + ggml_compute_forward_group_norm(params, tensor); } break; case GGML_OP_MUL_MAT: { - ggml_compute_forward_mul_mat(params, tensor->src[0], tensor->src[1], tensor); + ggml_compute_forward_mul_mat(params, tensor); } break; case GGML_OP_MUL_MAT_ID: { - ggml_compute_forward_mul_mat_id(params, tensor->src[0], tensor->src[1], tensor); + ggml_compute_forward_mul_mat_id(params, tensor); } break; case GGML_OP_OUT_PROD: { - ggml_compute_forward_out_prod(params, tensor->src[0], tensor->src[1], tensor); + ggml_compute_forward_out_prod(params, tensor); } break; case GGML_OP_SCALE: { - ggml_compute_forward_scale(params, tensor->src[0], tensor); + ggml_compute_forward_scale(params, tensor); } break; case GGML_OP_SET: { - ggml_compute_forward_set(params, tensor->src[0], tensor->src[1], tensor); + ggml_compute_forward_set(params, tensor); } break; case GGML_OP_CPY: { - ggml_compute_forward_cpy(params, tensor->src[0], tensor); + ggml_compute_forward_cpy(params, tensor); } break; case GGML_OP_CONT: { - ggml_compute_forward_cont(params, tensor->src[0], tensor); + ggml_compute_forward_cont(params, tensor); } break; case GGML_OP_RESHAPE: { - ggml_compute_forward_reshape(params, tensor->src[0], tensor); + ggml_compute_forward_reshape(params, tensor); } break; case GGML_OP_VIEW: { - ggml_compute_forward_view(params, tensor->src[0]); + ggml_compute_forward_view(params, tensor); } break; case GGML_OP_PERMUTE: { - ggml_compute_forward_permute(params, tensor->src[0]); + ggml_compute_forward_permute(params, tensor); } break; case GGML_OP_TRANSPOSE: { - ggml_compute_forward_transpose(params, tensor->src[0]); + ggml_compute_forward_transpose(params, tensor); } break; case GGML_OP_GET_ROWS: { - ggml_compute_forward_get_rows(params, tensor->src[0], tensor->src[1], tensor); + ggml_compute_forward_get_rows(params, tensor); } break; case GGML_OP_GET_ROWS_BACK: { - ggml_compute_forward_get_rows_back(params, tensor->src[0], tensor->src[1], tensor); + ggml_compute_forward_get_rows_back(params, tensor); } break; case GGML_OP_DIAG: { - ggml_compute_forward_diag(params, tensor->src[0], tensor); + ggml_compute_forward_diag(params, tensor); } break; case GGML_OP_DIAG_MASK_INF: { - ggml_compute_forward_diag_mask_inf(params, tensor->src[0], tensor); + ggml_compute_forward_diag_mask_inf(params, tensor); } break; case GGML_OP_DIAG_MASK_ZERO: { - ggml_compute_forward_diag_mask_zero(params, tensor->src[0], tensor); + ggml_compute_forward_diag_mask_zero(params, tensor); } break; case GGML_OP_SOFT_MAX: { - ggml_compute_forward_soft_max(params, tensor->src[0], tensor->src[1], tensor->src[2], tensor); + ggml_compute_forward_soft_max(params, tensor); } break; case GGML_OP_SOFT_MAX_BACK: { - ggml_compute_forward_soft_max_back(params, tensor->src[0], tensor->src[1], tensor); + ggml_compute_forward_soft_max_back(params, tensor); } break; case GGML_OP_ROPE: { - ggml_compute_forward_rope(params, tensor->src[0], tensor->src[1], tensor); + ggml_compute_forward_rope(params, tensor); } break; case GGML_OP_ROPE_BACK: { - ggml_compute_forward_rope_back(params, tensor->src[0], tensor->src[1], tensor); + ggml_compute_forward_rope_back(params, tensor); } break; case GGML_OP_ALIBI: { - ggml_compute_forward_alibi(params, tensor->src[0], tensor); + ggml_compute_forward_alibi(params, tensor); } break; case GGML_OP_CLAMP: { - ggml_compute_forward_clamp(params, tensor->src[0], tensor); + ggml_compute_forward_clamp(params, tensor); } break; case GGML_OP_CONV_TRANSPOSE_1D: { - ggml_compute_forward_conv_transpose_1d(params, tensor->src[0], tensor->src[1], tensor); + ggml_compute_forward_conv_transpose_1d(params, tensor); } break; case GGML_OP_IM2COL: { - ggml_compute_forward_im2col(params, tensor->src[0], tensor->src[1], tensor); + ggml_compute_forward_im2col(params, tensor); } break; case GGML_OP_CONV_TRANSPOSE_2D: { - ggml_compute_forward_conv_transpose_2d(params, tensor->src[0], tensor->src[1], tensor); + ggml_compute_forward_conv_transpose_2d(params, tensor); } break; case GGML_OP_POOL_1D: { - ggml_compute_forward_pool_1d(params, tensor->src[0], tensor); + ggml_compute_forward_pool_1d(params, tensor); } break; case GGML_OP_POOL_2D: { - ggml_compute_forward_pool_2d(params, tensor->src[0], tensor); + ggml_compute_forward_pool_2d(params, tensor); } break; case GGML_OP_UPSCALE: { - ggml_compute_forward_upscale(params, tensor->src[0], tensor); + ggml_compute_forward_upscale(params, tensor); } break; case GGML_OP_PAD: { - ggml_compute_forward_pad(params, tensor->src[0], tensor); + ggml_compute_forward_pad(params, tensor); } break; case GGML_OP_ARGSORT: { - ggml_compute_forward_argsort(params, tensor->src[0], tensor); + ggml_compute_forward_argsort(params, tensor); } break; case GGML_OP_LEAKY_RELU: { - ggml_compute_forward_leaky_relu(params, tensor->src[0], tensor); + ggml_compute_forward_leaky_relu(params, tensor); } break; case GGML_OP_FLASH_ATTN: { const int32_t t = ggml_get_op_params_i32(tensor, 0); GGML_ASSERT(t == 0 || t == 1); const bool masked = t != 0; - ggml_compute_forward_flash_attn(params, tensor->src[0], tensor->src[1], tensor->src[2], masked, tensor); + ggml_compute_forward_flash_attn(params, masked, tensor); } break; case GGML_OP_FLASH_FF: { - ggml_compute_forward_flash_ff(params, tensor->src[0], tensor->src[1], tensor->src[2], tensor->src[3], tensor->src[4], tensor); + ggml_compute_forward_flash_ff(params, tensor); } break; case GGML_OP_FLASH_ATTN_BACK: { int32_t t = ggml_get_op_params_i32(tensor, 0); GGML_ASSERT(t == 0 || t == 1); bool masked = t != 0; - ggml_compute_forward_flash_attn_back(params, tensor->src[0], tensor->src[1], tensor->src[2], tensor->src[3], masked, tensor); + ggml_compute_forward_flash_attn_back(params, masked, tensor); } break; case GGML_OP_WIN_PART: { - ggml_compute_forward_win_part(params, tensor->src[0], tensor); + ggml_compute_forward_win_part(params, tensor); } break; case GGML_OP_WIN_UNPART: { - ggml_compute_forward_win_unpart(params, tensor->src[0], tensor); + ggml_compute_forward_win_unpart(params, tensor); } break; case GGML_OP_UNARY: { - ggml_compute_forward_unary(params, tensor->src[0], tensor); + ggml_compute_forward_unary(params, tensor); } break; case GGML_OP_GET_REL_POS: { - ggml_compute_forward_get_rel_pos(params, tensor->src[0], tensor); + ggml_compute_forward_get_rel_pos(params, tensor); } break; case GGML_OP_ADD_REL_POS: { - ggml_compute_forward_add_rel_pos(params, tensor->src[0], tensor->src[1], tensor->src[2], tensor); + ggml_compute_forward_add_rel_pos(params, tensor); } break; case GGML_OP_MAP_UNARY: { ggml_unary_op_f32_t fun; memcpy(&fun, tensor->op_params, sizeof(fun)); - ggml_compute_forward_map_unary(params, tensor->src[0], tensor, fun); + ggml_compute_forward_map_unary(params, tensor, fun); } break; case GGML_OP_MAP_BINARY: { ggml_binary_op_f32_t fun; memcpy(&fun, tensor->op_params, sizeof(fun)); - ggml_compute_forward_map_binary(params, tensor->src[0], tensor->src[1], tensor, fun); + ggml_compute_forward_map_binary(params, tensor, fun); } break; case GGML_OP_MAP_CUSTOM1_F32: { ggml_custom1_op_f32_t fun; memcpy(&fun, tensor->op_params, sizeof(fun)); - ggml_compute_forward_map_custom1_f32(params, tensor->src[0], tensor, fun); + ggml_compute_forward_map_custom1_f32(params, tensor, fun); } break; case GGML_OP_MAP_CUSTOM2_F32: { ggml_custom2_op_f32_t fun; memcpy(&fun, tensor->op_params, sizeof(fun)); - ggml_compute_forward_map_custom2_f32(params, tensor->src[0], tensor->src[1], tensor, fun); + ggml_compute_forward_map_custom2_f32(params, tensor, fun); } break; case GGML_OP_MAP_CUSTOM3_F32: { ggml_custom3_op_f32_t fun; memcpy(&fun, tensor->op_params, sizeof(fun)); - ggml_compute_forward_map_custom3_f32(params, tensor->src[0], tensor->src[1], tensor->src[2], tensor, fun); + ggml_compute_forward_map_custom3_f32(params, tensor, fun); } break; case GGML_OP_MAP_CUSTOM1: { - ggml_compute_forward_map_custom1(params, tensor->src[0], tensor); + ggml_compute_forward_map_custom1(params, tensor); } break; case GGML_OP_MAP_CUSTOM2: { - ggml_compute_forward_map_custom2(params, tensor->src[0], tensor->src[1], tensor); + ggml_compute_forward_map_custom2(params, tensor); } break; case GGML_OP_MAP_CUSTOM3: { - ggml_compute_forward_map_custom3(params, tensor->src[0], tensor->src[1], tensor->src[2], tensor); + ggml_compute_forward_map_custom3(params, tensor); } break; case GGML_OP_CROSS_ENTROPY_LOSS: { - ggml_compute_forward_cross_entropy_loss(params, tensor->src[0], tensor->src[1], tensor); + ggml_compute_forward_cross_entropy_loss(params, tensor); } break; case GGML_OP_CROSS_ENTROPY_LOSS_BACK: { - ggml_compute_forward_cross_entropy_loss_back(params, tensor->src[0], tensor->src[1], tensor->src[2], tensor); + ggml_compute_forward_cross_entropy_loss_back(params, tensor); } break; case GGML_OP_NONE: diff --git a/scripts/sync-ggml.last b/scripts/sync-ggml.last index 733d8f95b..97f34ac85 100644 --- a/scripts/sync-ggml.last +++ b/scripts/sync-ggml.last @@ -1 +1 @@ -818eeb8a3be99125746a90ec63af8f51516a2ec6 +4712fd12d7acb9971f850b1b98588f934cb39444 From a00a35cef93e057eace8351a667d14d152a91ebc Mon Sep 17 00:00:00 2001 From: Ettore Di Giacinto Date: Wed, 21 Feb 2024 15:39:10 +0100 Subject: [PATCH 672/811] readme : add LocalAI to the availables UI (#5629) --- README.md | 1 + 1 file changed, 1 insertion(+) diff --git a/README.md b/README.md index ce5dec7ca..c1624b9f9 100644 --- a/README.md +++ b/README.md @@ -141,6 +141,7 @@ Unless otherwise noted these projects are open-source with permissive licensing: - [nat/openplayground](https://github.com/nat/openplayground) - [Faraday](https://faraday.dev/) (proprietary) - [LMStudio](https://lmstudio.ai/) (proprietary) +- [LocalAI](https://github.com/mudler/LocalAI) (MIT) - [LostRuins/koboldcpp](https://github.com/LostRuins/koboldcpp) (AGPL) - [Mozilla-Ocho/llamafile](https://github.com/Mozilla-Ocho/llamafile) - [nomic-ai/gpt4all](https://github.com/nomic-ai/gpt4all) From 1ecea255ebb70750b52688393f37a63606b90e3f Mon Sep 17 00:00:00 2001 From: Pierrick Hymbert Date: Wed, 21 Feb 2024 15:47:48 +0100 Subject: [PATCH 673/811] server: health: fix race condition on slots data using tasks queue (#5634) * server: health: fix race condition on slots data using tasks queue * server: health: * include_slots only if slots_endpoint * fix compile warning task.target_id not initialized. --- examples/server/README.md | 2 + examples/server/server.cpp | 122 ++++++++++++++++++++++++------------- examples/server/utils.hpp | 3 +- 3 files changed, 84 insertions(+), 43 deletions(-) diff --git a/examples/server/README.md b/examples/server/README.md index f6b9c7402..6d9f96cd4 100644 --- a/examples/server/README.md +++ b/examples/server/README.md @@ -140,6 +140,8 @@ node index.js - 200 -> `{"status": "no slot available", "slots_idle": 0, "slots_processing": 32}` if no slot are currently available. - 503 -> `{"status": "no slot available", "slots_idle": 0, "slots_processing": 32}` if the query parameter `fail_on_no_slot` is provided and no slot are currently available. + If the query parameter `include_slots` is passed, `slots` field will contain internal slots data except if `--slots-endpoint-disable` is set. + - **POST** `/completion`: Given a `prompt`, it returns the predicted completion. *Options:* diff --git a/examples/server/server.cpp b/examples/server/server.cpp index eb01729fa..1c4479512 100644 --- a/examples/server/server.cpp +++ b/examples/server/server.cpp @@ -1394,6 +1394,46 @@ struct llama_server_context case TASK_TYPE_NEXT_RESPONSE: { // do nothing } break; + case TASK_TYPE_SLOTS_DATA: { + json slots_data = json::array(); + int n_idle_slots = 0; + int n_processing_slots = 0; + + for (llama_client_slot &slot: slots) { + if (slot.available()) { + n_idle_slots++; + } else { + n_processing_slots++; + } + json slot_data = get_formated_generation(slot); + slot_data["id"] = slot.id; + slot_data["task_id"] = slot.task_id; + slot_data["state"] = slot.state; + slot_data["prompt"] = slot.prompt; + slot_data["next_token"] = { + {"has_next_token", slot.has_next_token}, + {"n_remain", slot.n_remaining}, + {"num_tokens_predicted", slot.n_decoded}, + {"stopped_eos", slot.stopped_eos}, + {"stopped_word", slot.stopped_word}, + {"stopped_limit", slot.stopped_limit}, + {"stopping_word", slot.stopping_word}, + }; + slots_data.push_back(slot_data); + } + LOG_TEE("task %i - slots data: idle=%i processing=%i\n", task.id, n_idle_slots, n_processing_slots); + task_result res; + res.id = task.id; + res.multitask_id = task.multitask_id; + res.stop = true; + res.error = false; + res.result_json = { + { "idle", n_idle_slots }, + { "processing", n_processing_slots }, + { "slots", slots_data } + }; + queue_results.send(res); + } break; } } @@ -2557,34 +2597,38 @@ int main(int argc, char **argv) server_state current_state = state.load(); switch(current_state) { case SERVER_STATE_READY: { - int available_slots = 0; - int processing_slots = 0; - for (llama_client_slot &slot: llama.slots) { - if (slot.available()) { - available_slots++; - } else { - processing_slots++; - } + // request slots data using task queue + task_server task; + task.id = llama.queue_tasks.get_new_id(); + task.type = TASK_TYPE_SLOTS_DATA; + task.target_id = -1; + + llama.queue_results.add_waiting_task_id(task.id); + llama.queue_tasks.post(task); + + // get the result + task_result result = llama.queue_results.recv(task.id); + llama.queue_results.remove_waiting_task_id(task.id); + + int n_idle_slots = result.result_json["idle"]; + int n_processing_slots = result.result_json["processing"]; + + json health = { + {"status", "ok"}, + {"slots_idle", n_idle_slots}, + {"slots_processing", n_processing_slots}}; + res.status = 200; // HTTP OK + if (sparams.slots_endpoint && req.has_param("include_slots")) { + health["slots"] = result.result_json["slots"]; } - if (available_slots > 0) { - json health = { - {"status", "ok"}, - {"slots_idle", available_slots}, - {"slots_processing", processing_slots}}; - res.set_content(health.dump(), "application/json"); - res.status = 200; // HTTP OK - } else { - json health = { - {"status", "no slot available"}, - {"slots_idle", available_slots}, - {"slots_processing", processing_slots}}; - res.set_content(health.dump(), "application/json"); + + if (n_idle_slots == 0) { + health["status"] = "no slot available"; if (req.has_param("fail_on_no_slot")) { res.status = 503; // HTTP Service Unavailable - } else { - res.status = 200; // HTTP OK } } + res.set_content(health.dump(), "application/json"); break; } case SERVER_STATE_LOADING_MODEL: @@ -2600,26 +2644,20 @@ int main(int argc, char **argv) if (sparams.slots_endpoint) { svr.Get("/slots", [&](const httplib::Request&, httplib::Response& res) { - json slots; - for (llama_client_slot & slot : llama.slots) { - json slot_data = llama.get_formated_generation(slot); - slot_data["id"] = slot.id; - slot_data["task_id"] = slot.task_id; - slot_data["state"] = slot.state; - slot_data["prompt"] = slot.prompt; - slot_data["next_token"] = { - {"has_next_token", slot.has_next_token}, - {"n_remain", slot.n_remaining}, - {"num_tokens_predicted", slot.n_decoded}, - {"stopped_eos", slot.stopped_eos}, - {"stopped_word", slot.stopped_word}, - {"stopped_limit", slot.stopped_limit}, - {"stopping_word", slot.stopping_word}, - }; + // request slots data using task queue + task_server task; + task.id = llama.queue_tasks.get_new_id(); + task.type = TASK_TYPE_SLOTS_DATA; + task.target_id = -1; - slots.push_back(slot_data); - } - res.set_content(slots.dump(), "application/json"); + llama.queue_results.add_waiting_task_id(task.id); + llama.queue_tasks.post(task); + + // get the result + task_result result = llama.queue_results.recv(task.id); + llama.queue_results.remove_waiting_task_id(task.id); + + res.set_content(result.result_json["slots"].dump(), "application/json"); res.status = 200; // HTTP OK }); } diff --git a/examples/server/utils.hpp b/examples/server/utils.hpp index e954fb0ef..88545eb69 100644 --- a/examples/server/utils.hpp +++ b/examples/server/utils.hpp @@ -49,7 +49,8 @@ enum server_state { enum task_type { TASK_TYPE_COMPLETION, TASK_TYPE_CANCEL, - TASK_TYPE_NEXT_RESPONSE + TASK_TYPE_NEXT_RESPONSE, + TASK_TYPE_SLOTS_DATA }; struct task_server { From 5022cf242d689e15defd133f96c4345ad30c5d19 Mon Sep 17 00:00:00 2001 From: Georgi Gerganov Date: Wed, 21 Feb 2024 16:52:39 +0200 Subject: [PATCH 674/811] sync : ggml --- scripts/sync-ggml.last | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/scripts/sync-ggml.last b/scripts/sync-ggml.last index 97f34ac85..bbbf88d9d 100644 --- a/scripts/sync-ggml.last +++ b/scripts/sync-ggml.last @@ -1 +1 @@ -4712fd12d7acb9971f850b1b98588f934cb39444 +30805514e1bf389a59d30a54a0525cbdc30d5bd1 From 89febfed9322c8849520dc63c93ee4f5fd72556e Mon Sep 17 00:00:00 2001 From: Jared Van Bortel Date: Wed, 21 Feb 2024 10:33:54 -0500 Subject: [PATCH 675/811] examples : do not assume BOS when shifting context (#5622) --- examples/main/main.cpp | 12 +++++++----- examples/server/server.cpp | 13 +++++++------ 2 files changed, 14 insertions(+), 11 deletions(-) diff --git a/examples/main/main.cpp b/examples/main/main.cpp index f5d2f4893..7555dffe4 100644 --- a/examples/main/main.cpp +++ b/examples/main/main.cpp @@ -334,6 +334,8 @@ int main(int argc, char ** argv) { // number of tokens to keep when resetting context if (params.n_keep < 0 || params.n_keep > (int) embd_inp.size() || params.instruct || params.chatml) { params.n_keep = (int)embd_inp.size(); + } else { + params.n_keep += add_bos; // always keep the BOS token } // prefix & suffix for instruct mode @@ -383,8 +385,8 @@ int main(int argc, char ** argv) { } } - if (params.n_keep > 0) { - LOG_TEE("%s: static prompt based on n_keep: '", __func__); + if (params.n_keep > add_bos) { + LOG_TEE("%s: static prompt based on n_keep: '", __func__); for (int i = 0; i < params.n_keep; i++) { LOG_TEE("%s", llama_token_to_piece(ctx, embd_inp[i]).c_str()); } @@ -540,14 +542,14 @@ int main(int argc, char ** argv) { break; } - const int n_left = n_past - params.n_keep - 1; + const int n_left = n_past - params.n_keep; const int n_discard = n_left/2; LOG("context full, swapping: n_past = %d, n_left = %d, n_ctx = %d, n_keep = %d, n_discard = %d\n", n_past, n_left, n_ctx, params.n_keep, n_discard); - llama_kv_cache_seq_rm (ctx, 0, params.n_keep + 1 , params.n_keep + n_discard + 1); - llama_kv_cache_seq_shift(ctx, 0, params.n_keep + 1 + n_discard, n_past, -n_discard); + llama_kv_cache_seq_rm (ctx, 0, params.n_keep , params.n_keep + n_discard); + llama_kv_cache_seq_shift(ctx, 0, params.n_keep + n_discard, n_past, -n_discard); n_past -= n_discard; diff --git a/examples/server/server.cpp b/examples/server/server.cpp index 1c4479512..c84719a0d 100644 --- a/examples/server/server.cpp +++ b/examples/server/server.cpp @@ -1487,14 +1487,15 @@ struct llama_server_context if (slot.is_processing() && system_tokens.size() + slot.cache_tokens.size() >= (size_t) slot.n_ctx) { // Shift context - const int n_left = system_tokens.size() + slot.n_past - slot.params.n_keep - 1; + const int n_keep = slot.params.n_keep + add_bos_token; + const int n_left = system_tokens.size() + slot.n_past - n_keep; const int n_discard = n_left / 2; - LOG_TEE("slot %d: context shift - n_keep = %d, n_left = %d, n_discard = %d\n", slot.id, slot.params.n_keep, n_left, n_discard); - llama_kv_cache_seq_rm (ctx, slot.id, slot.params.n_keep + 1 , slot.params.n_keep + n_discard + 1); - llama_kv_cache_seq_shift(ctx, slot.id, slot.params.n_keep + 1 + n_discard, system_tokens.size() + slot.n_past, -n_discard); + LOG_TEE("slot %d: context shift - n_keep = %d, n_left = %d, n_discard = %d\n", slot.id, n_keep, n_left, n_discard); + llama_kv_cache_seq_rm (ctx, slot.id, n_keep , n_keep + n_discard); + llama_kv_cache_seq_shift(ctx, slot.id, n_keep + n_discard, system_tokens.size() + slot.n_past, -n_discard); - for (size_t i = slot.params.n_keep + 1 + n_discard; i < slot.cache_tokens.size(); i++) + for (size_t i = n_keep + n_discard; i < slot.cache_tokens.size(); i++) { slot.cache_tokens[i - n_discard] = slot.cache_tokens[i]; } @@ -1507,7 +1508,7 @@ struct llama_server_context LOG_VERBOSE("context shift", { { "n_ctx", n_ctx }, - { "n_keep", params.n_keep }, + { "n_keep", n_keep }, { "n_left", n_left }, }); } From ba2135ccae7462470b3865c6e41d2e1d734eac05 Mon Sep 17 00:00:00 2001 From: slaren Date: Wed, 21 Feb 2024 22:18:23 +0100 Subject: [PATCH 676/811] gemma : allow offloading the output tensor (#5646) --- llama.cpp | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/llama.cpp b/llama.cpp index 3a226c426..4054d5da6 100644 --- a/llama.cpp +++ b/llama.cpp @@ -4394,6 +4394,8 @@ static bool llm_load_tensors( // output model.output_norm = ml.create_tensor(ctx_output, tn(LLM_TENSOR_OUTPUT_NORM, "weight"), {n_embd}); + model.output = ml.create_tensor(ctx_output, tn(LLM_TENSOR_TOKEN_EMBD, "weight"), {n_embd, n_vocab}); // same as tok_embd, duplicated to allow offloading + ml.n_created--; // artificial tensor const int64_t n_ff = hparams.n_ff; const int64_t n_embd_head_k = hparams.n_embd_head_k; @@ -7525,7 +7527,7 @@ struct llm_build_context { cb(cur, "result_norm", -1); // lm_head - cur = ggml_mul_mat(ctx0, model.tok_embd, cur); + cur = ggml_mul_mat(ctx0, model.output, cur); cb(cur, "result_output", -1); ggml_build_forward_expand(gf, cur); From 7fe4678b0244ba7b03eae66ebeaa947e2770bb1a Mon Sep 17 00:00:00 2001 From: slaren Date: Wed, 21 Feb 2024 22:52:39 +0100 Subject: [PATCH 677/811] llama : fix session save/load with quantized KV (#5649) --- llama.cpp | 20 ++++++++++---------- 1 file changed, 10 insertions(+), 10 deletions(-) diff --git a/llama.cpp b/llama.cpp index 4054d5da6..d763cc80c 100644 --- a/llama.cpp +++ b/llama.cpp @@ -12176,18 +12176,19 @@ static void llama_copy_state_data_internal(struct llama_context * ctx, llama_dat data_ctx->write(&kv_used, sizeof(kv_used)); if (kv_buf_size) { - const size_t elt_size = ggml_element_size(kv_self.k_l[0]); - std::vector tmp_buf; for (int il = 0; il < (int) n_layer; ++il) { - tmp_buf.resize(elt_size*n_embd_k_gqa*kv_head); + size_t k_size = ggml_row_size(kv_self.k_l[il]->type, n_embd_k_gqa*kv_head); + tmp_buf.resize(k_size); ggml_backend_tensor_get(kv_self.k_l[il], tmp_buf.data(), 0, tmp_buf.size()); data_ctx->write(tmp_buf.data(), tmp_buf.size()); // v is not contiguous, copy row by row - tmp_buf.resize(elt_size*kv_head); + size_t v_row_size = ggml_row_size(kv_self.v_l[il]->type, kv_head); + size_t v_row_stride = ggml_row_size(kv_self.v_l[il]->type, n_ctx); + tmp_buf.resize(v_row_size); for (int ir = 0; ir < (int) n_embd_v_gqa; ++ir) { - ggml_backend_tensor_get(kv_self.v_l[il], tmp_buf.data(), ir*elt_size*n_ctx, tmp_buf.size()); + ggml_backend_tensor_get(kv_self.v_l[il], tmp_buf.data(), ir*v_row_stride, tmp_buf.size()); data_ctx->write(tmp_buf.data(), tmp_buf.size()); } } @@ -12289,17 +12290,16 @@ size_t llama_set_state_data(struct llama_context * ctx, uint8_t * src) { if (kv_buf_size) { GGML_ASSERT(kv_self.total_size() == kv_buf_size); - const size_t elt_size = ggml_element_size(kv_self.k_l[0]); - for (int il = 0; il < (int) n_layer; ++il) { - size_t k_size = elt_size*n_embd_k_gqa*kv_head; + size_t k_size = ggml_row_size(kv_self.k_l[il]->type, n_embd_k_gqa*kv_head); ggml_backend_tensor_set(kv_self.k_l[il], inp, 0, k_size); inp += k_size; // v is not contiguous, copy row by row - size_t v_row_size = elt_size*kv_head; + size_t v_row_size = ggml_row_size(kv_self.v_l[il]->type, kv_head); + size_t v_row_stride = ggml_row_size(kv_self.v_l[il]->type, n_ctx); for (int ir = 0; ir < (int) n_embd_v_gqa; ++ir) { - ggml_backend_tensor_set(kv_self.v_l[il], inp, ir*elt_size*n_ctx, v_row_size); + ggml_backend_tensor_set(kv_self.v_l[il], inp, ir*v_row_stride, v_row_size); inp += v_row_size; } } From 7c8bcc11dc61cf5930b70cd0168b84afcebe12a9 Mon Sep 17 00:00:00 2001 From: Xuan Son Nguyen Date: Thu, 22 Feb 2024 00:31:00 +0100 Subject: [PATCH 678/811] Add docs for llama_chat_apply_template (#5645) * add docs for llama_chat_apply_template * fix typo --- examples/server/README.md | 1 + llama.h | 2 +- 2 files changed, 2 insertions(+), 1 deletion(-) diff --git a/examples/server/README.md b/examples/server/README.md index 6d9f96cd4..4b24ee5dc 100644 --- a/examples/server/README.md +++ b/examples/server/README.md @@ -41,6 +41,7 @@ see https://github.com/ggerganov/llama.cpp/issues/1437 - `--grp-attn-w`: Set the group attention width to extend context size through self-extend(default: 512), used together with group attention factor `--grp-attn-n` - `-n, --n-predict`: Set the maximum tokens to predict (default: -1) - `--slots-endpoint-disable`: To disable slots state monitoring endpoint. Slots state may contain user data, prompts included. +- `--chat-template JINJA_TEMPLATE`: Set custom jinja chat template. This parameter accepts a string, not a file name (default: template taken from model's metadata). We only support [some pre-defined templates](https://github.com/ggerganov/llama.cpp/wiki/Templates-supported-by-llama_chat_apply_template) ## Build diff --git a/llama.h b/llama.h index 8ba20696f..84f196b3b 100644 --- a/llama.h +++ b/llama.h @@ -708,7 +708,7 @@ extern "C" { /// Apply chat template. Inspired by hf apply_chat_template() on python. /// Both "model" and "custom_template" are optional, but at least one is required. "custom_template" has higher precedence than "model" - /// NOTE: This function only support some known jinja templates. It is not a jinja parser. + /// NOTE: This function does not use a jinja parser. It only support a pre-defined list of template. See more: https://github.com/ggerganov/llama.cpp/wiki/Templates-supported-by-llama_chat_apply_template /// @param tmpl A Jinja template to use for this chat. If this is nullptr, the model’s default chat template will be used instead. /// @param chat Pointer to a list of multiple llama_chat_message /// @param n_msg Number of llama_chat_message in this chat From 973053d8b0d04809836b3339a50f68d9c842de90 Mon Sep 17 00:00:00 2001 From: slaren Date: Thu, 22 Feb 2024 00:42:09 +0100 Subject: [PATCH 679/811] llama : fix loading models with shared tok_embd and output (#5651) ggml-ci --- llama.cpp | 12 ++++-------- 1 file changed, 4 insertions(+), 8 deletions(-) diff --git a/llama.cpp b/llama.cpp index d763cc80c..259f2a3a3 100644 --- a/llama.cpp +++ b/llama.cpp @@ -2791,13 +2791,7 @@ struct llama_model_loader { std::vector> read_buf; - for (int i = 0; i < gguf_get_n_tensors(ctx_gguf); i++) { - struct ggml_tensor * cur = ggml_get_tensor(ctx, gguf_get_tensor_name(ctx_gguf, i)); - if (!cur) { - // some tensors may be allocated in a different context - continue; - } - + for (struct ggml_tensor * cur = ggml_get_first_tensor(ctx); cur != NULL; cur = ggml_get_next_tensor(ctx, cur)) { if (progress_callback) { if (!progress_callback((float) size_done / size_data, progress_callback_user_data)) { return false; @@ -3722,7 +3716,7 @@ static bool llm_load_tensors( } // create one context per buffer type - size_t ctx_size = ggml_tensor_overhead()*ml.n_tensors; + size_t ctx_size = ggml_tensor_overhead()*(ml.n_tensors + 1); // +1 for models where tok_embd is duplicated as output std::map ctx_map; for (auto & it : buft_layer_count) { struct ggml_init_params params = { @@ -3860,6 +3854,7 @@ static bool llm_load_tensors( } else { model.output = ml.create_tensor(ctx_output_split, tn(LLM_TENSOR_TOKEN_EMBD, "weight"), {n_embd, n_vocab}); // needs to be on GPU ml.n_created--; // artificial tensor + ml.size_data += ggml_nbytes(model.output); } } @@ -4396,6 +4391,7 @@ static bool llm_load_tensors( model.output_norm = ml.create_tensor(ctx_output, tn(LLM_TENSOR_OUTPUT_NORM, "weight"), {n_embd}); model.output = ml.create_tensor(ctx_output, tn(LLM_TENSOR_TOKEN_EMBD, "weight"), {n_embd, n_vocab}); // same as tok_embd, duplicated to allow offloading ml.n_created--; // artificial tensor + ml.size_data += ggml_nbytes(model.output); const int64_t n_ff = hparams.n_ff; const int64_t n_embd_head_k = hparams.n_embd_head_k; From 4ef245a92a968ba0f18a5adfd41e51980ce4fdf5 Mon Sep 17 00:00:00 2001 From: Dat Quoc Nguyen <2412555+datquocnguyen@users.noreply.github.com> Date: Thu, 22 Feb 2024 18:15:13 +1000 Subject: [PATCH 680/811] mpt : add optional bias tensors (#5638) Update for MPT with optional bias parameters: to work with PhoGPT and SEA-LION models that were pre-trained with 'bias'. --- llama.cpp | 38 +++++++++++++++++++++++++++----------- 1 file changed, 27 insertions(+), 11 deletions(-) diff --git a/llama.cpp b/llama.cpp index 259f2a3a3..9cae8c761 100644 --- a/llama.cpp +++ b/llama.cpp @@ -4054,6 +4054,8 @@ static bool llm_load_tensors( // output { model.output_norm = ml.create_tensor(ctx_output, tn(LLM_TENSOR_OUTPUT_NORM, "weight"), {n_embd}); + model.output_norm_b = ml.create_tensor(ctx_output, tn(LLM_TENSOR_OUTPUT_NORM, "bias"), {n_embd}, false); + model.output = ml.create_tensor(ctx_output_split, tn(LLM_TENSOR_OUTPUT, "weight"), {n_embd, n_vocab}); } @@ -4063,14 +4065,23 @@ static bool llm_load_tensors( auto & layer = model.layers[i]; - layer.attn_norm = ml.create_tensor(ctx_layer, tn(LLM_TENSOR_ATTN_NORM, "weight", i), {n_embd}); + layer.attn_norm = ml.create_tensor(ctx_layer, tn(LLM_TENSOR_ATTN_NORM, "weight", i), {n_embd}); + layer.attn_norm_b = ml.create_tensor(ctx_layer, tn(LLM_TENSOR_ATTN_NORM, "bias", i), {n_embd}, false); layer.wqkv = ml.create_tensor(ctx_split, tn(LLM_TENSOR_ATTN_QKV, "weight", i), {n_embd, n_embd + 2*n_embd_gqa}); - layer.wo = ml.create_tensor(ctx_split, tn(LLM_TENSOR_ATTN_OUT, "weight", i), {n_embd, n_embd}); + layer.bqkv = ml.create_tensor(ctx_layer, tn(LLM_TENSOR_ATTN_QKV, "bias", i), {n_embd + 2*n_embd_gqa}, false); - layer.ffn_norm = ml.create_tensor(ctx_layer, tn(LLM_TENSOR_FFN_NORM, "weight", i), {n_embd}); - layer.ffn_down = ml.create_tensor(ctx_split, tn(LLM_TENSOR_FFN_DOWN, "weight", i), { n_ff, n_embd}); - layer.ffn_up = ml.create_tensor(ctx_split, tn(LLM_TENSOR_FFN_UP, "weight", i), {n_embd, n_ff}); + layer.wo = ml.create_tensor(ctx_split, tn(LLM_TENSOR_ATTN_OUT, "weight", i), {n_embd, n_embd}); + layer.bo = ml.create_tensor(ctx_layer, tn(LLM_TENSOR_ATTN_OUT, "bias", i), {n_embd}, false); + + layer.ffn_norm = ml.create_tensor(ctx_layer, tn(LLM_TENSOR_FFN_NORM, "weight", i), {n_embd}); + layer.ffn_norm_b = ml.create_tensor(ctx_layer, tn(LLM_TENSOR_FFN_NORM, "bias", i), {n_embd}, false); + + layer.ffn_down = ml.create_tensor(ctx_split, tn(LLM_TENSOR_FFN_DOWN, "weight", i), {n_ff, n_embd}); + layer.ffn_down_b = ml.create_tensor(ctx_layer, tn(LLM_TENSOR_FFN_DOWN, "bias", i), {n_embd}, false); + + layer.ffn_up = ml.create_tensor(ctx_split, tn(LLM_TENSOR_FFN_UP, "weight", i), {n_embd, n_ff}); + layer.ffn_up_b = ml.create_tensor(ctx_layer, tn(LLM_TENSOR_FFN_UP, "bias", i), {n_ff}, false); // AWQ ScaleActivation layer layer.ffn_act = ml.create_tensor(ctx_layer, tn(LLM_TENSOR_FFN_ACT, "scales", i), {n_ff}, false); @@ -6171,7 +6182,7 @@ struct llm_build_context { attn_norm = llm_build_norm(ctx0, inpL, hparams, model.layers[il].attn_norm, - NULL, + model.layers[il].attn_norm_b, LLM_NORM, cb, il); cb(attn_norm, "attn_norm", il); @@ -6181,6 +6192,11 @@ struct llm_build_context { cur = ggml_mul_mat(ctx0, model.layers[il].wqkv, cur); cb(cur, "wqkv", il); + + if (model.layers[il].bqkv){ + cur = ggml_add(ctx0, cur, model.layers[il].bqkv); + cb(cur, "bqkv", il); + } if (hparams.f_clamp_kqv > 0.0f) { cur = ggml_clamp(ctx0, cur, -hparams.f_clamp_kqv, hparams.f_clamp_kqv); @@ -6198,7 +6214,7 @@ struct llm_build_context { Qcur = ggml_reshape_3d(ctx0, Qcur, n_embd_head, n_head, n_tokens); cur = llm_build_kv(ctx0, model, hparams, kv_self, gf, - model.layers[il].wo, NULL, + model.layers[il].wo, model.layers[il].bo, Kcur, Vcur, Qcur, KQ_mask, KQ_pos, n_ctx, n_tokens, kv_head, n_kv, 1.0f/sqrtf(float(n_embd_head)), cb, il); cb(cur, "kqv_out", il); } @@ -6211,13 +6227,13 @@ struct llm_build_context { { cur = llm_build_norm(ctx0, ffn_inp, hparams, model.layers[il].ffn_norm, - NULL, + model.layers[il].ffn_norm_b, LLM_NORM, cb, il); cb(cur, "ffn_norm", il); cur = llm_build_ffn(ctx0, cur, - model.layers[il].ffn_up, NULL, + model.layers[il].ffn_up, model.layers[il].ffn_up_b, NULL, NULL, - model.layers[il].ffn_down, NULL, + model.layers[il].ffn_down, model.layers[il].ffn_down_b, model.layers[il].ffn_act, LLM_FFN_GELU, LLM_FFN_SEQ, cb, il); cb(cur, "ffn_out", il); @@ -6234,7 +6250,7 @@ struct llm_build_context { cur = llm_build_norm(ctx0, cur, hparams, model.output_norm, - NULL, + model.output_norm_b, LLM_NORM, cb, -1); cb(cur, "result_norm", -1); From c5688c6250430d2b8e0259efcf26c16dfa4c1f46 Mon Sep 17 00:00:00 2001 From: Alexey Parfenov Date: Thu, 22 Feb 2024 08:27:32 +0000 Subject: [PATCH 681/811] server : clarify some params in the docs (#5640) --- examples/server/README.md | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/examples/server/README.md b/examples/server/README.md index 4b24ee5dc..4b6cd8326 100644 --- a/examples/server/README.md +++ b/examples/server/README.md @@ -151,7 +151,7 @@ node index.js `temperature`: Adjust the randomness of the generated text (default: 0.8). - `dynatemp_range`: Dynamic temperature range (default: 0.0, 0.0 = disabled). + `dynatemp_range`: Dynamic temperature range. The final temperature will be in the range of `[temperature - dynatemp_range; temperature + dynatemp_range]` (default: 0.0, 0.0 = disabled). `dynatemp_exponent`: Dynamic temperature exponent (default: 1.0). @@ -209,7 +209,7 @@ node index.js `slot_id`: Assign the completion task to an specific slot. If is -1 the task will be assigned to a Idle slot (default: -1) - `cache_prompt`: Save the prompt and generation for avoid reprocess entire prompt if a part of this isn't change (default: false) + `cache_prompt`: Re-use previously cached prompt from the last request if possible. This may prevent re-caching the prompt from scratch. (default: false) `system_prompt`: Change the system prompt (initial prompt of all slots), this is useful for chat applications. [See more](#change-system-prompt-on-runtime) @@ -242,7 +242,7 @@ Notice that each `probs` is an array of length `n_probs`. - `content`: Completion result as a string (excluding `stopping_word` if any). In case of streaming mode, will contain the next token as a string. - `stop`: Boolean for use with `stream` to check whether the generation has stopped (Note: This is not related to stopping words array `stop` from input options) -- `generation_settings`: The provided options above excluding `prompt` but including `n_ctx`, `model` +- `generation_settings`: The provided options above excluding `prompt` but including `n_ctx`, `model`. These options may differ from the original ones in some way (e.g. bad values filtered out, strings converted to tokens, etc.). - `model`: The path to the model loaded with `-m` - `prompt`: The provided `prompt` - `stopped_eos`: Indicating whether the completion has stopped because it encountered the EOS token From a46f50747b2028f7f9c9883b26bfba12bf92556e Mon Sep 17 00:00:00 2001 From: Xuan Son Nguyen Date: Thu, 22 Feb 2024 09:33:24 +0100 Subject: [PATCH 682/811] server : fallback to chatml, add AlphaMonarch chat template (#5628) * server: fallback to chatml * add new chat template * server: add AlphaMonarch to test chat template * server: only check model template if there is no custom tmpl * remove TODO --- examples/server/server.cpp | 15 +++++++++++++++ llama.cpp | 9 +++++++++ tests/test-chat-template.cpp | 23 +++++++++++++++-------- 3 files changed, 39 insertions(+), 8 deletions(-) diff --git a/examples/server/server.cpp b/examples/server/server.cpp index c84719a0d..369121e88 100644 --- a/examples/server/server.cpp +++ b/examples/server/server.cpp @@ -400,6 +400,16 @@ struct llama_server_context return true; } + void validate_model_chat_template(server_params & sparams) { + llama_chat_message chat[] = {{"user", "test"}}; + std::vector buf(1); + int res = llama_chat_apply_template(model, nullptr, chat, 1, true, buf.data(), buf.size()); + if (res < 0) { + LOG_ERROR("The chat template comes with this model is not yet supported, falling back to chatml. This may cause the model to output suboptimal responses", {}); + sparams.chat_template = "<|im_start|>"; // llama_chat_apply_template only checks if <|im_start|> exist in the template + } + } + void initialize() { // create slots all_slots_are_idle = true; @@ -2752,6 +2762,11 @@ int main(int argc, char **argv) LOG_INFO("model loaded", {}); } + if (sparams.chat_template.empty()) { // custom chat template is not supplied + // check if the template comes with the model is supported by us + llama.validate_model_chat_template(sparams); + } + // Middleware for API key validation auto validate_api_key = [&sparams](const httplib::Request &req, httplib::Response &res) -> bool { // If API key is not set, skip validation diff --git a/llama.cpp b/llama.cpp index 9cae8c761..055b57e31 100644 --- a/llama.cpp +++ b/llama.cpp @@ -12773,6 +12773,15 @@ static int32_t llama_chat_apply_template_internal( if (add_ass) { ss << "<|assistant|>\n"; } + } else if (tmpl.find("bos_token + message['role']") != std::string::npos) { + // mlabonne/AlphaMonarch-7B template (the is included inside history) + for (auto message : chat) { + std::string bos = (message == chat.front()) ? "" : ""; // skip BOS for first message + ss << bos << message->role << "\n" << message->content << "\n"; + } + if (add_ass) { + ss << "assistant\n"; + } } else { // template not supported return -1; diff --git a/tests/test-chat-template.cpp b/tests/test-chat-template.cpp index 9830650d4..d02b39e14 100644 --- a/tests/test-chat-template.cpp +++ b/tests/test-chat-template.cpp @@ -27,12 +27,20 @@ int main(void) { "{%- for idx in range(0, messages|length) -%}\\n{%- if messages[idx]['role'] == 'user' -%}\\n{%- if idx > 1 -%}\\n{{- bos_token + '[INST] ' + messages[idx]['content'] + ' [/INST]' -}}\\n{%- else -%}\\n{{- messages[idx]['content'] + ' [/INST]' -}}\\n{%- endif -%}\\n{% elif messages[idx]['role'] == 'system' %}\\n{{- '[INST] <>\\\\n' + messages[idx]['content'] + '\\\\n<>\\\\n\\\\n' -}}\\n{%- elif messages[idx]['role'] == 'assistant' -%}\\n{{- ' ' + messages[idx]['content'] + ' ' + eos_token -}}\\n{% endif %}\\n{% endfor %}", // bofenghuang/vigogne-2-70b-chat "{{ bos_token }}{% if messages[0]['role'] == 'system' %}{% set loop_messages = messages[1:] %}{% set system_message = messages[0]['content'] %}{% elif true == true and not '<>' in messages[0]['content'] %}{% set loop_messages = messages %}{% set system_message = 'Vous êtes Vigogne, un assistant IA créé par Zaion Lab. Vous suivez extrêmement bien les instructions. Aidez autant que vous le pouvez.' %}{% else %}{% set loop_messages = messages %}{% set system_message = false %}{% endif %}{% for message in loop_messages %}{% if (message['role'] == 'user') != (loop.index0 % 2 == 0) %}{{ raise_exception('Conversation roles must alternate user/assistant/user/assistant/...') }}{% endif %}{% if loop.index0 == 0 and system_message != false %}{% set content = '<>\\\\n' + system_message + '\\\\n<>\\\\n\\\\n' + message['content'] %}{% else %}{% set content = message['content'] %}{% endif %}{% if message['role'] == 'user' %}{{ '[INST] ' + content.strip() + ' [/INST]' }}{% elif message['role'] == 'system' %}{{ '<>\\\\n' + content.strip() + '\\\\n<>\\\\n\\\\n' }}{% elif message['role'] == 'assistant' %}{{ ' ' + content.strip() + ' ' + eos_token }}{% endif %}{% endfor %}", + // mlabonne/AlphaMonarch-7B + "{% for message in messages %}{{bos_token + message['role'] + '\\n' + message['content'] + eos_token + '\\n'}}{% endfor %}{% if add_generation_prompt %}{{ bos_token + 'assistant\\n' }}{% endif %}", }; - std::vector expected_substr = { - "<|im_start|>assistant\n I am an assistant <|im_end|>\n<|im_start|>user\nAnother question<|im_end|>\n<|im_start|>assistant", - "[/INST]Hi there[INST] Who are you [/INST] I am an assistant [INST] Another question [/INST]", - "[INST] Who are you [/INST] I am an assistant [INST] Another question [/INST]", - "[/INST] Hi there [INST] Who are you [/INST] I am an assistant [INST] Another question [/INST]", + std::vector expected_output = { + // teknium/OpenHermes-2.5-Mistral-7B + "<|im_start|>system\nYou are a helpful assistant<|im_end|>\n<|im_start|>user\nHello<|im_end|>\n<|im_start|>assistant\nHi there<|im_end|>\n<|im_start|>user\nWho are you<|im_end|>\n<|im_start|>assistant\n I am an assistant <|im_end|>\n<|im_start|>user\nAnother question<|im_end|>\n<|im_start|>assistant\n", + // mistralai/Mistral-7B-Instruct-v0.2 + "[INST] You are a helpful assistant\nHello [/INST]Hi there[INST] Who are you [/INST] I am an assistant [INST] Another question [/INST]", + // TheBloke/FusionNet_34Bx2_MoE-AWQ + "[INST] <>\nYou are a helpful assistant\n<>\n\nHello [/INST] Hi there [INST] Who are you [/INST] I am an assistant [INST] Another question [/INST]", + // bofenghuang/vigogne-2-70b-chat + "[INST] <>\nYou are a helpful assistant\n<>\n\nHello [/INST] Hi there [INST] Who are you [/INST] I am an assistant [INST] Another question [/INST]", + // mlabonne/AlphaMonarch-7B + "system\nYou are a helpful assistant\nuser\nHello\nassistant\nHi there\nuser\nWho are you\nassistant\n I am an assistant \nuser\nAnother question\nassistant\n", }; std::vector formatted_chat(1024); int32_t res; @@ -43,7 +51,7 @@ int main(void) { for (size_t i = 0; i < templates.size(); i++) { std::string custom_template = templates[i]; - std::string substr = expected_substr[i]; + std::string expected = expected_output[i]; formatted_chat.resize(1024); res = llama_chat_apply_template( nullptr, @@ -57,8 +65,7 @@ int main(void) { formatted_chat.resize(res); std::string output(formatted_chat.data(), formatted_chat.size()); std::cout << output << "\n-------------------------\n"; - // expect the "formatted_chat" to contain pre-defined strings - assert(output.find(substr) != std::string::npos); + assert(output == expected); } return 0; } From 56d03d92be57f5880b9ed94542d87bb6effae31f Mon Sep 17 00:00:00 2001 From: Georgi Gerganov Date: Thu, 22 Feb 2024 10:35:54 +0200 Subject: [PATCH 683/811] readme : update hot topics --- README.md | 1 + 1 file changed, 1 insertion(+) diff --git a/README.md b/README.md index c1624b9f9..3bc512af0 100644 --- a/README.md +++ b/README.md @@ -10,6 +10,7 @@ Inference of Meta's [LLaMA](https://arxiv.org/abs/2302.13971) model (and others) ### Hot topics +- Support for chat templates: [Wiki (contributions welcome)](https://github.com/ggerganov/llama.cpp/wiki/Templates-supported-by-llama_chat_apply_template) - Support for Gemma models: https://github.com/ggerganov/llama.cpp/pull/5631 - Non-linear quantization IQ4_NL: https://github.com/ggerganov/llama.cpp/pull/5590 - Looking for contributions to improve and maintain the `server` example: https://github.com/ggerganov/llama.cpp/issues/4216 From 3a03541cedea474fa9d41214484cc3fbcf468a9e Mon Sep 17 00:00:00 2001 From: Georgi Gerganov Date: Thu, 22 Feb 2024 13:54:03 +0200 Subject: [PATCH 684/811] minor : fix trailing whitespace (#5638) --- llama.cpp | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/llama.cpp b/llama.cpp index 055b57e31..6ab5e1bf4 100644 --- a/llama.cpp +++ b/llama.cpp @@ -6192,7 +6192,7 @@ struct llm_build_context { cur = ggml_mul_mat(ctx0, model.layers[il].wqkv, cur); cb(cur, "wqkv", il); - + if (model.layers[il].bqkv){ cur = ggml_add(ctx0, cur, model.layers[il].bqkv); cb(cur, "bqkv", il); From 4cb4d8b22d4fda971621a68c570ce84d66897c37 Mon Sep 17 00:00:00 2001 From: Someone Date: Thu, 22 Feb 2024 16:32:09 +0000 Subject: [PATCH 685/811] workflows: nix: hardcode cachix ids, build unconditionally (#5663) GitHub does not expose environment and repository variables to PRs coming from forks implies that we've been disabling the Nix CI actions for most PRs. The `if:` also didn't make much sense, because we can always pull from cachix, and there's no point (albeit no risk either) in pushing cache for the untrusted code. --- .github/workflows/nix-ci-aarch64.yml | 7 +++---- .github/workflows/nix-ci.yml | 11 +++++------ 2 files changed, 8 insertions(+), 10 deletions(-) diff --git a/.github/workflows/nix-ci-aarch64.yml b/.github/workflows/nix-ci-aarch64.yml index 0c6cf5f09..8d0a3fd7f 100644 --- a/.github/workflows/nix-ci-aarch64.yml +++ b/.github/workflows/nix-ci-aarch64.yml @@ -19,7 +19,6 @@ on: jobs: nix-build-aarch64: - if: ${{ vars.CACHIX_NAME != '' }} runs-on: ubuntu-latest steps: - name: Checkout repository @@ -37,8 +36,8 @@ jobs: extra-conf: | extra-platforms = aarch64-linux extra-system-features = nixos-test kvm - extra-substituters = https://${{ vars.CACHIX_NAME }}.cachix.org https://cuda-maintainers.cachix.org - extra-trusted-public-keys = ${{ vars.CACHIX_PUBLIC_KEY }} cuda-maintainers.cachix.org-1:0dq3bujKpuEPMCX6U4WylrUDZ9JyUG0VpVZa7CNfq5E= + extra-substituters = https://llama-cpp.cachix.org https://cuda-maintainers.cachix.org + extra-trusted-public-keys = llama-cpp.cachix.org-1:H75X+w83wUKTIPSO1KWy9ADUrzThyGs8P5tmAbkWhQc= cuda-maintainers.cachix.org-1:0dq3bujKpuEPMCX6U4WylrUDZ9JyUG0VpVZa7CNfq5E= - uses: DeterminateSystems/magic-nix-cache-action@v2 with: upstream-cache: https://${{ matrix.cachixName }}.cachix.org @@ -46,7 +45,7 @@ jobs: uses: cachix/cachix-action@v13 with: authToken: '${{ secrets.CACHIX_AUTH_TOKEN }}' - name: ${{ vars.CACHIX_NAME }} + name: llama-cpp - name: Show all output paths run: > nix run github:nix-community/nix-eval-jobs diff --git a/.github/workflows/nix-ci.yml b/.github/workflows/nix-ci.yml index d19c7a576..01c5a9d5a 100644 --- a/.github/workflows/nix-ci.yml +++ b/.github/workflows/nix-ci.yml @@ -23,8 +23,8 @@ jobs: with: github-token: ${{ secrets.GITHUB_TOKEN }} extra-conf: | - extra-substituters = https://${{ vars.CACHIX_NAME }}.cachix.org https://cuda-maintainers.cachix.org - extra-trusted-public-keys = ${{ vars.CACHIX_PUBLIC_KEY }} cuda-maintainers.cachix.org-1:0dq3bujKpuEPMCX6U4WylrUDZ9JyUG0VpVZa7CNfq5E= + extra-substituters = https://llama-cpp.cachix.org https://cuda-maintainers.cachix.org + extra-trusted-public-keys = llama-cpp.cachix.org-1:H75X+w83wUKTIPSO1KWy9ADUrzThyGs8P5tmAbkWhQc= cuda-maintainers.cachix.org-1:0dq3bujKpuEPMCX6U4WylrUDZ9JyUG0VpVZa7CNfq5E= - uses: DeterminateSystems/magic-nix-cache-action@v2 with: upstream-cache: https://${{ matrix.cachixName }}.cachix.org @@ -37,7 +37,6 @@ jobs: --flake ".#packages.$(nix eval --raw --impure --expr builtins.currentSystem)" nix-build: - if: ${{ vars.CACHIX_NAME != '' }} strategy: fail-fast: false matrix: @@ -51,8 +50,8 @@ jobs: with: github-token: ${{ secrets.GITHUB_TOKEN }} extra-conf: | - extra-substituters = https://${{ vars.CACHIX_NAME }}.cachix.org https://cuda-maintainers.cachix.org - extra-trusted-public-keys = ${{ vars.CACHIX_PUBLIC_KEY }} cuda-maintainers.cachix.org-1:0dq3bujKpuEPMCX6U4WylrUDZ9JyUG0VpVZa7CNfq5E= + extra-substituters = https://llama-cpp.cachix.org https://cuda-maintainers.cachix.org + extra-trusted-public-keys = llama-cpp.cachix.org-1:H75X+w83wUKTIPSO1KWy9ADUrzThyGs8P5tmAbkWhQc= cuda-maintainers.cachix.org-1:0dq3bujKpuEPMCX6U4WylrUDZ9JyUG0VpVZa7CNfq5E= - uses: DeterminateSystems/magic-nix-cache-action@v2 with: upstream-cache: https://${{ matrix.cachixName }}.cachix.org @@ -60,7 +59,7 @@ jobs: uses: cachix/cachix-action@v13 with: authToken: '${{ secrets.CACHIX_AUTH_TOKEN }}' - name: ${{ vars.CACHIX_NAME }} + name: llama-cpp - name: Build run: > nix run github:Mic92/nix-fast-build From 373ee3fbbabc4c1508eed4f5c3795b23a20939a3 Mon Sep 17 00:00:00 2001 From: Xuan Son Nguyen Date: Thu, 22 Feb 2024 19:10:21 +0100 Subject: [PATCH 686/811] Add Gemma chat template (#5665) * add gemma chat template * gemma: only apply system_prompt on non-model message --- llama.cpp | 22 ++++++++++++++++++++++ tests/test-chat-template.cpp | 4 ++++ 2 files changed, 26 insertions(+) diff --git a/llama.cpp b/llama.cpp index 6ab5e1bf4..40dda265c 100644 --- a/llama.cpp +++ b/llama.cpp @@ -12782,6 +12782,28 @@ static int32_t llama_chat_apply_template_internal( if (add_ass) { ss << "assistant\n"; } + } else if (tmpl.find("") != std::string::npos) { + // google/gemma-7b-it + std::string system_prompt = ""; + for (auto message : chat) { + std::string role(message->role); + if (role == "system") { + // there is no system message for gemma, but we will merge it with user prompt, so nothing is broken + system_prompt = trim(message->content); + continue; + } + // in gemma, "assistant" is "model" + role = role == "assistant" ? "model" : message->role; + ss << "" << role << "\n"; + if (!system_prompt.empty() && role != "model") { + ss << system_prompt << "\n\n"; + system_prompt = ""; + } + ss << trim(message->content) << "\n"; + } + if (add_ass) { + ss << "model\n"; + } } else { // template not supported return -1; diff --git a/tests/test-chat-template.cpp b/tests/test-chat-template.cpp index d02b39e14..fa2eb577b 100644 --- a/tests/test-chat-template.cpp +++ b/tests/test-chat-template.cpp @@ -29,6 +29,8 @@ int main(void) { "{{ bos_token }}{% if messages[0]['role'] == 'system' %}{% set loop_messages = messages[1:] %}{% set system_message = messages[0]['content'] %}{% elif true == true and not '<>' in messages[0]['content'] %}{% set loop_messages = messages %}{% set system_message = 'Vous êtes Vigogne, un assistant IA créé par Zaion Lab. Vous suivez extrêmement bien les instructions. Aidez autant que vous le pouvez.' %}{% else %}{% set loop_messages = messages %}{% set system_message = false %}{% endif %}{% for message in loop_messages %}{% if (message['role'] == 'user') != (loop.index0 % 2 == 0) %}{{ raise_exception('Conversation roles must alternate user/assistant/user/assistant/...') }}{% endif %}{% if loop.index0 == 0 and system_message != false %}{% set content = '<>\\\\n' + system_message + '\\\\n<>\\\\n\\\\n' + message['content'] %}{% else %}{% set content = message['content'] %}{% endif %}{% if message['role'] == 'user' %}{{ '[INST] ' + content.strip() + ' [/INST]' }}{% elif message['role'] == 'system' %}{{ '<>\\\\n' + content.strip() + '\\\\n<>\\\\n\\\\n' }}{% elif message['role'] == 'assistant' %}{{ ' ' + content.strip() + ' ' + eos_token }}{% endif %}{% endfor %}", // mlabonne/AlphaMonarch-7B "{% for message in messages %}{{bos_token + message['role'] + '\\n' + message['content'] + eos_token + '\\n'}}{% endfor %}{% if add_generation_prompt %}{{ bos_token + 'assistant\\n' }}{% endif %}", + // google/gemma-7b-it + "{% if messages[0]['role'] == 'system' %}{{ raise_exception('System role not supported') }}{% endif %}{% for message in messages %}{% if (message['role'] == 'user') != (loop.index0 % 2 == 0) %}{{ raise_exception('Conversation roles must alternate user/assistant/user/assistant/...') }}{% endif %}{% if (message['role'] == 'assistant') %}{% set role = 'model' %}{% else %}{% set role = message['role'] %}{% endif %}{{ '' + role + '\\n' + message['content'] | trim + '\\n' }}{% endfor %}{% if add_generation_prompt %}{{'model\\n'}}{% endif %}", }; std::vector expected_output = { // teknium/OpenHermes-2.5-Mistral-7B @@ -41,6 +43,8 @@ int main(void) { "[INST] <>\nYou are a helpful assistant\n<>\n\nHello [/INST] Hi there [INST] Who are you [/INST] I am an assistant [INST] Another question [/INST]", // mlabonne/AlphaMonarch-7B "system\nYou are a helpful assistant\nuser\nHello\nassistant\nHi there\nuser\nWho are you\nassistant\n I am an assistant \nuser\nAnother question\nassistant\n", + // google/gemma-7b-it + "user\nYou are a helpful assistant\n\nHello\nmodel\nHi there\nuser\nWho are you\nmodel\nI am an assistant\nuser\nAnother question\nmodel\n", }; std::vector formatted_chat(1024); int32_t res; From 5a9e2f60ba3d8362ba17c77ac3092906d49b813f Mon Sep 17 00:00:00 2001 From: Georgi Gerganov Date: Thu, 22 Feb 2024 20:13:25 +0200 Subject: [PATCH 687/811] py : minor fixes (#5668) --- convert-hf-to-gguf.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/convert-hf-to-gguf.py b/convert-hf-to-gguf.py index 9771fccf9..8630bbf29 100755 --- a/convert-hf-to-gguf.py +++ b/convert-hf-to-gguf.py @@ -655,6 +655,8 @@ class OrionModel(Model): self.gguf_writer.add_feed_forward_length(self.hparams["intermediate_size"]) self.gguf_writer.add_head_count(head_count) self.gguf_writer.add_head_count_kv(head_count_kv) + # note: config provides rms norm but it is actually layer norm + # ref: https://huggingface.co/OrionStarAI/Orion-14B-Chat/blob/276a17221ce42beb45f66fac657a41540e71f4f5/modeling_orion.py#L570-L571 self.gguf_writer.add_layer_norm_eps(self.hparams["rms_norm_eps"]) def write_tensors(self): @@ -1031,7 +1033,6 @@ class PersimmonModel(Model): self.gguf_writer.add_head_count_kv(head_count_kv) self.gguf_writer.add_rope_freq_base(self.hparams["rope_theta"]) self.gguf_writer.add_layer_norm_eps(self.hparams["layer_norm_eps"]) - self.gguf_writer.add_layer_norm_rms_eps(self.hparams["rms_norm_eps"]) def set_vocab(self): self._set_vocab_sentencepiece() From 201294ae177b308fb3a99dc504dd6d27e8afa907 Mon Sep 17 00:00:00 2001 From: Someone Date: Thu, 22 Feb 2024 19:44:10 +0000 Subject: [PATCH 688/811] nix: init singularity and docker images (#5056) Exposes a few attributes demonstrating how to build [singularity](https://docs.sylabs.io/guides/latest/user-guide/)/[apptainer](https://apptainer.org/) and Docker images re-using llama.cpp's Nix expression. Built locally on `x86_64-linux` with `nix build github:someoneserge/llama.cpp/feat/nix/images#llamaPackages.{docker,docker-min,sif,llama-cpp}` and it's fast and effective. --- .devops/nix/docker.nix | 37 +++++++++++++++++++++++++++++++++++++ .devops/nix/scope.nix | 3 +++ .devops/nix/sif.nix | 27 +++++++++++++++++++++++++++ 3 files changed, 67 insertions(+) create mode 100644 .devops/nix/docker.nix create mode 100644 .devops/nix/sif.nix diff --git a/.devops/nix/docker.nix b/.devops/nix/docker.nix new file mode 100644 index 000000000..d607b4575 --- /dev/null +++ b/.devops/nix/docker.nix @@ -0,0 +1,37 @@ +{ + lib, + dockerTools, + buildEnv, + llama-cpp, + interactive ? true, + coreutils, +}: + +# A tar that can be fed into `docker load`: +# +# $ nix build .#llamaPackages.docker +# $ docker load < result + +# For details and variations cf. +# - https://nixos.org/manual/nixpkgs/unstable/#ssec-pkgs-dockerTools-buildLayeredImage +# - https://discourse.nixos.org/t/a-faster-dockertools-buildimage-prototype/16922 +# - https://nixery.dev/ + +# Approximate (compressed) sizes, at the time of writing, are: +# +# .#llamaPackages.docker: 125M; +# .#llamaPackagesCuda.docker: 537M; +# .#legacyPackages.aarch64-linux.llamaPackagesXavier.docker: 415M. + +dockerTools.buildLayeredImage { + name = llama-cpp.pname; + tag = "latest"; + + contents = + [ llama-cpp ] + ++ lib.optionals interactive [ + coreutils + dockerTools.binSh + dockerTools.caCertificates + ]; +} diff --git a/.devops/nix/scope.nix b/.devops/nix/scope.nix index d295995a4..78530c9e8 100644 --- a/.devops/nix/scope.nix +++ b/.devops/nix/scope.nix @@ -12,5 +12,8 @@ lib.makeScope newScope ( self: { inherit llamaVersion; llama-cpp = self.callPackage ./package.nix { }; + docker = self.callPackage ./docker.nix { }; + docker-min = self.callPackage ./docker.nix { interactive = false; }; + sif = self.callPackage ./sif.nix { }; } ) diff --git a/.devops/nix/sif.nix b/.devops/nix/sif.nix new file mode 100644 index 000000000..7535ca0f3 --- /dev/null +++ b/.devops/nix/sif.nix @@ -0,0 +1,27 @@ +{ + lib, + singularity-tools, + llama-cpp, + bashInteractive, + interactive ? false, +}: + +let + optionalInt = cond: x: if cond then x else 0; +in +singularity-tools.buildImage rec { + inherit (llama-cpp) name; + contents = [ llama-cpp ] ++ lib.optionals interactive [ bashInteractive ]; + + # These are excessive (but safe) for most variants. Building singularity + # images requires superuser privileges, so we build them inside a VM in a + # writable image of pre-determined size. + # + # ROCm is currently affected by https://github.com/NixOS/nixpkgs/issues/276846 + # + # Expected image sizes: + # - cpu/blas: 150M, + # - cuda, all gencodes: 560M, + diskSize = 4096 + optionalInt llama-cpp.useRocm 16384; + memSize = diskSize; +} From efd56b1c2139d50b9b4381a212feb75d69598fda Mon Sep 17 00:00:00 2001 From: Georgi Gerganov Date: Thu, 22 Feb 2024 18:31:40 +0200 Subject: [PATCH 689/811] ggml : 32-bit arm compat (whisper/1891) * ggml : 32-bit arm compat * ggml : add ggml_vqtbl1q_s8 impl * ggml : cont --- ggml-quants.c | 35 ++++++++++++++++++++++++++++++----- 1 file changed, 30 insertions(+), 5 deletions(-) diff --git a/ggml-quants.c b/ggml-quants.c index 6336538f0..8917c8af1 100644 --- a/ggml-quants.c +++ b/ggml-quants.c @@ -438,6 +438,30 @@ inline static ggml_int8x16x4_t ggml_vld1q_s8_x4(const int8_t * ptr) { return res; } +// NOTE: not tested +inline static int8x16_t ggml_vqtbl1q_s8(int8x16_t a, uint8x16_t b) { + int8x16_t res; + + res[ 0] = a[b[ 0]]; + res[ 1] = a[b[ 1]]; + res[ 2] = a[b[ 2]]; + res[ 3] = a[b[ 3]]; + res[ 4] = a[b[ 4]]; + res[ 5] = a[b[ 5]]; + res[ 6] = a[b[ 6]]; + res[ 7] = a[b[ 7]]; + res[ 8] = a[b[ 8]]; + res[ 9] = a[b[ 9]]; + res[10] = a[b[10]]; + res[11] = a[b[11]]; + res[12] = a[b[12]]; + res[13] = a[b[13]]; + res[14] = a[b[14]]; + res[15] = a[b[15]]; + + return res; +} + #else #define ggml_int16x8x2_t int16x8x2_t @@ -451,6 +475,7 @@ inline static ggml_int8x16x4_t ggml_vld1q_s8_x4(const int8_t * ptr) { #define ggml_vld1q_u8_x4 vld1q_u8_x4 #define ggml_vld1q_s8_x2 vld1q_s8_x2 #define ggml_vld1q_s8_x4 vld1q_s8_x4 +#define ggml_vqtbl1q_s8 vqtbl1q_s8 #endif @@ -9333,7 +9358,7 @@ void ggml_vec_dot_iq1_s_q8_K (int n, float * GGML_RESTRICT s, size_t bs, const uint16_t gindex[8]; uint16x8x2_t vindex; int8x16x4_t q1b; - int8x16x4_t q8b; + ggml_int8x16x4_t q8b; uint16x8x4_t scales; int32x4x2_t sumi; int32x4x2_t dotq; @@ -9506,10 +9531,10 @@ void ggml_vec_dot_iq4_nl_q8_0(int n, float * restrict s, size_t bs, const void * q8b.val[2] = vld1q_s8(y[ib+1].qs); q8b.val[3] = vld1q_s8(y[ib+1].qs + 16); - q4b.val[0] = vqtbl1q_s8(values, vandq_u8(q4bits.val[0], m4b)); - q4b.val[1] = vqtbl1q_s8(values, vshrq_n_u8(q4bits.val[0], 4)); - q4b.val[2] = vqtbl1q_s8(values, vandq_u8(q4bits.val[1], m4b)); - q4b.val[3] = vqtbl1q_s8(values, vshrq_n_u8(q4bits.val[1], 4)); + q4b.val[0] = ggml_vqtbl1q_s8(values, vandq_u8 (q4bits.val[0], m4b)); + q4b.val[1] = ggml_vqtbl1q_s8(values, vshrq_n_u8(q4bits.val[0], 4)); + q4b.val[2] = ggml_vqtbl1q_s8(values, vandq_u8 (q4bits.val[1], m4b)); + q4b.val[3] = ggml_vqtbl1q_s8(values, vshrq_n_u8(q4bits.val[1], 4)); prod_1 = ggml_vdotq_s32(ggml_vdotq_s32(vdupq_n_s32(0), q4b.val[0], q8b.val[0]), q4b.val[1], q8b.val[1]); prod_2 = ggml_vdotq_s32(ggml_vdotq_s32(vdupq_n_s32(0), q4b.val[2], q8b.val[2]), q4b.val[3], q8b.val[3]); From 334f76fa385ed81095165e5ae068756214893901 Mon Sep 17 00:00:00 2001 From: Georgi Gerganov Date: Thu, 22 Feb 2024 23:21:05 +0200 Subject: [PATCH 690/811] sync : ggml --- scripts/sync-ggml.last | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/scripts/sync-ggml.last b/scripts/sync-ggml.last index bbbf88d9d..59de34370 100644 --- a/scripts/sync-ggml.last +++ b/scripts/sync-ggml.last @@ -1 +1 @@ -30805514e1bf389a59d30a54a0525cbdc30d5bd1 +8cdf783f288a98eddf521b0ab1b4d405be9e18ba From 7e4f339c404dbe029d4a117c03b37a9bf646cf0e Mon Sep 17 00:00:00 2001 From: Georgi Gerganov Date: Thu, 22 Feb 2024 23:21:39 +0200 Subject: [PATCH 691/811] ggml : always define ggml_fp16_t as uint16_t (#5666) * ggml : always define ggml_fp16_t as uint16_t ggml-ci * ggml : cont ggml-ci * ggml : cont * ggml : cont ggml-ci * ggml : cont ggml-ci * cuda : no longer ggml headers last ggml-ci * ggml : fix q6_K FP16 -> FP32 conversion ggml-ci * ggml : more FP16 -> FP32 conversion fixes ggml-ci --- ggml-cuda.cu | 9 ++++----- ggml-impl.h | 27 ++++++++++++++++++++------- ggml-quants.c | 30 +++++++++++++++--------------- ggml.c | 6 +++--- ggml.h | 6 ------ 5 files changed, 42 insertions(+), 36 deletions(-) diff --git a/ggml-cuda.cu b/ggml-cuda.cu index e7c211d7d..b0e454e02 100644 --- a/ggml-cuda.cu +++ b/ggml-cuda.cu @@ -1,3 +1,7 @@ +#include "ggml-cuda.h" +#include "ggml.h" +#include "ggml-backend-impl.h" + #include #include #include @@ -121,11 +125,6 @@ #endif // defined(GGML_USE_HIPBLAS) -// ggml-cuda need half type so keep ggml headers include at last -#include "ggml-cuda.h" -#include "ggml.h" -#include "ggml-backend-impl.h" - #define CUDART_HMAX 11070 // CUDA 11.7, min. ver. for which __hmax and __hmax2 are known to work (may be higher than needed) #define CC_PASCAL 600 diff --git a/ggml-impl.h b/ggml-impl.h index 19df66bce..c5637e4d4 100644 --- a/ggml-impl.h +++ b/ggml-impl.h @@ -53,11 +53,23 @@ extern "C" { // #include -#define GGML_COMPUTE_FP16_TO_FP32(x) ((float) (x)) -#define GGML_COMPUTE_FP32_TO_FP16(x) (x) +#define GGML_COMPUTE_FP16_TO_FP32(x) ggml_compute_fp16_to_fp32(x) +#define GGML_COMPUTE_FP32_TO_FP16(x) ggml_compute_fp32_to_fp16(x) -#define GGML_FP16_TO_FP32(x) ((float) (x)) -#define GGML_FP32_TO_FP16(x) (x) +#define GGML_FP16_TO_FP32(x) ggml_compute_fp16_to_fp32(x) + +static inline float ggml_compute_fp16_to_fp32(ggml_fp16_t h) { + __fp16 tmp; + memcpy(&tmp, &h, sizeof(ggml_fp16_t)); + return (float)tmp; +} + +static inline ggml_fp16_t ggml_compute_fp32_to_fp16(float f) { + ggml_fp16_t res; + __fp16 tmp = f; + memcpy(&res, &tmp, sizeof(ggml_fp16_t)); + return res; +} #else @@ -214,8 +226,7 @@ extern float ggml_table_f32_f16[1 << 16]; // On ARM NEON, it's quicker to directly convert x -> x instead of calling into ggml_lookup_fp16_to_fp32, // so we define GGML_FP16_TO_FP32 and GGML_FP32_TO_FP16 elsewhere for NEON. // This is also true for POWER9. -#if !defined(GGML_FP16_TO_FP32) || !defined(GGML_FP32_TO_FP16) - +#if !defined(GGML_FP16_TO_FP32) inline static float ggml_lookup_fp16_to_fp32(ggml_fp16_t f) { uint16_t s; memcpy(&s, &f, sizeof(uint16_t)); @@ -223,8 +234,10 @@ inline static float ggml_lookup_fp16_to_fp32(ggml_fp16_t f) { } #define GGML_FP16_TO_FP32(x) ggml_lookup_fp16_to_fp32(x) -#define GGML_FP32_TO_FP16(x) GGML_COMPUTE_FP32_TO_FP16(x) +#endif +#if !defined(GGML_FP32_TO_FP16) +#define GGML_FP32_TO_FP16(x) GGML_COMPUTE_FP32_TO_FP16(x) #endif #define GGML_HASHTABLE_FULL ((size_t)-1) diff --git a/ggml-quants.c b/ggml-quants.c index 8917c8af1..b15977f53 100644 --- a/ggml-quants.c +++ b/ggml-quants.c @@ -5654,8 +5654,8 @@ void ggml_vec_dot_q2_K_q8_K(int n, float * restrict s, size_t bs, const void * r for (int i = 0; i < nb; ++i) { - const float d = y[i].d * (float)x[i].d; - const float dmin = -y[i].d * (float)x[i].dmin; + const float d = y[i].d * GGML_FP16_TO_FP32(x[i].d); + const float dmin = -y[i].d * GGML_FP16_TO_FP32(x[i].dmin); const uint8_t * restrict q2 = x[i].qs; const int8_t * restrict q8 = y[i].qs; @@ -5804,8 +5804,8 @@ void ggml_vec_dot_q2_K_q8_K(int n, float * restrict s, size_t bs, const void * r for (int i = 0; i < nb; ++i) { - const float d = y[i].d * (float)x[i].d; - const float dmin = -y[i].d * (float)x[i].dmin; + const float d = y[i].d * GGML_FP16_TO_FP32(x[i].d); + const float dmin = -y[i].d * GGML_FP16_TO_FP32(x[i].dmin); const uint8_t * restrict q2 = x[i].qs; const int8_t * restrict q8 = y[i].qs; @@ -6458,7 +6458,7 @@ void ggml_vec_dot_q3_K_q8_K(int n, float * restrict s, size_t bs, const void * r int32_t isum = -4*(scales[0] * y[i].bsums[0] + scales[2] * y[i].bsums[1] + scales[1] * y[i].bsums[2] + scales[3] * y[i].bsums[3]); - const float d = y[i].d * (float)x[i].d; + const float d = y[i].d * GGML_FP16_TO_FP32(x[i].d); const uint8x16_t htmp = vcombine_u8(hbits, vshr_n_u8(hbits, 1)); q3h.val[0] = vandq_u8(mh, vshlq_n_u8(htmp, 2)); @@ -6660,7 +6660,7 @@ void ggml_vec_dot_q3_K_q8_K(int n, float * restrict s, size_t bs, const void * r int32_t isum = -4*(scales[0] * y[i].bsums[0] + scales[2] * y[i].bsums[1] + scales[1] * y[i].bsums[2] + scales[3] * y[i].bsums[3]); - const float d = y[i].d * (float)x[i].d; + const float d = y[i].d * GGML_FP16_TO_FP32(x[i].d); vint32m1_t vzero = __riscv_vmv_v_x_i32m1(0, 1); @@ -7163,9 +7163,9 @@ void ggml_vec_dot_q4_K_q8_K(int n, float * restrict s, size_t bs, const void * r aux16[1] = (a[0] >> 4) & 0x0f0f; const int32_t summi = scales[2] * (y[i].bsums[0] + y[i].bsums[1]) + scales[3] * (y[i].bsums[2] + y[i].bsums[3]); - sum_mins += y[i].d * (float)x[i].d[1] * summi; + sum_mins += y[i].d * GGML_FP16_TO_FP32(x[i].d[1]) * summi; - const float d = y[i].d * (float)x[i].d[0]; + const float d = y[i].d * GGML_FP16_TO_FP32(x[i].d[0]); const ggml_uint8x16x2_t q4bits = ggml_vld1q_u8_x2(q4); @@ -7823,7 +7823,7 @@ void ggml_vec_dot_q5_K_q8_K(int n, float * restrict s, size_t bs, const void * r for (int i = 0; i < nb; ++i) { - const float d = y[i].d * (float)x[i].d; + const float d = y[i].d * GGML_FP16_TO_FP32(x[i].d); const int8_t * sc = x[i].scales; const uint8_t * restrict q5 = x[i].qs; @@ -7965,7 +7965,7 @@ void ggml_vec_dot_q5_K_q8_K(int n, float * restrict s, size_t bs, const void * r for (int i = 0; i < nb; ++i) { - const float d = y[i].d * (float)x[i].d; + const float d = y[i].d * GGML_FP16_TO_FP32(x[i].d); const int8_t * sc = x[i].scales; const uint8_t * restrict q5 = x[i].qs; @@ -8533,7 +8533,7 @@ void ggml_vec_dot_q6_K_q8_K(int n, float * restrict s, size_t bs, const void * r for (int i = 0; i < nb; ++i) { - const float d_all = (float)x[i].d; + const float d_all = GGML_FP16_TO_FP32(x[i].d); const uint8_t * restrict q6 = x[i].ql; const uint8_t * restrict qh = x[i].qh; @@ -8704,7 +8704,7 @@ void ggml_vec_dot_q6_K_q8_K(int n, float * restrict s, size_t bs, const void * r for (int i = 0; i < nb; ++i) { - const float d_all = (float)x[i].d; + const float d_all = GGML_FP16_TO_FP32(x[i].d); const uint8_t * restrict q6 = x[i].ql; const uint8_t * restrict qh = x[i].qh; @@ -9523,7 +9523,6 @@ void ggml_vec_dot_iq4_nl_q8_0(int n, float * restrict s, size_t bs, const void * float sumf = 0; for (int ib = 0; ib < nb; ib += 2) { - q4bits.val[0] = vld1q_u8(x[ib+0].qs); q4bits.val[1] = vld1q_u8(x[ib+1].qs); q8b.val[0] = vld1q_s8(y[ib+0].qs); @@ -9539,8 +9538,9 @@ void ggml_vec_dot_iq4_nl_q8_0(int n, float * restrict s, size_t bs, const void * prod_1 = ggml_vdotq_s32(ggml_vdotq_s32(vdupq_n_s32(0), q4b.val[0], q8b.val[0]), q4b.val[1], q8b.val[1]); prod_2 = ggml_vdotq_s32(ggml_vdotq_s32(vdupq_n_s32(0), q4b.val[2], q8b.val[2]), q4b.val[3], q8b.val[3]); - sumf += (float)x[ib+0].d * (float)y[ib+0].d * vaddvq_s32(prod_1) + (float)x[ib+1].d * (float)y[ib+1].d * vaddvq_s32(prod_2); - + sumf += + GGML_FP16_TO_FP32(x[ib+0].d) * GGML_FP16_TO_FP32(y[ib+0].d) * vaddvq_s32(prod_1) + + GGML_FP16_TO_FP32(x[ib+1].d) * GGML_FP16_TO_FP32(y[ib+1].d) * vaddvq_s32(prod_2); } *s = sumf; diff --git a/ggml.c b/ggml.c index 5b9fa741a..d710fe702 100644 --- a/ggml.c +++ b/ggml.c @@ -323,7 +323,7 @@ float ggml_table_f32_f16[1 << 16]; // note: do not use these inside ggml.c // these are meant to be used via the ggml.h API float ggml_fp16_to_fp32(ggml_fp16_t x) { - return (float) GGML_FP16_TO_FP32(x); + return GGML_FP16_TO_FP32(x); } ggml_fp16_t ggml_fp32_to_fp16(float x) { @@ -798,7 +798,7 @@ inline static float vaddvq_f32(float32x4_t v) { #define GGML_F16x8 float16x8_t #define GGML_F16x8_ZERO vdupq_n_f16(0.0f) #define GGML_F16x8_SET1(x) vdupq_n_f16(x) - #define GGML_F16x8_LOAD vld1q_f16 + #define GGML_F16x8_LOAD(x) vld1q_f16((const __fp16 *)(x)) #define GGML_F16x8_STORE vst1q_f16 #define GGML_F16x8_FMA(a, b, c) vfmaq_f16(a, b, c) #define GGML_F16x8_ADD vaddq_f16 @@ -841,7 +841,7 @@ inline static float vaddvq_f32(float32x4_t v) { #define GGML_F32Cx4 float32x4_t #define GGML_F32Cx4_ZERO vdupq_n_f32(0.0f) #define GGML_F32Cx4_SET1(x) vdupq_n_f32(x) - #define GGML_F32Cx4_LOAD(x) vcvt_f32_f16(vld1_f16(x)) + #define GGML_F32Cx4_LOAD(x) vcvt_f32_f16(vld1_f16((const __fp16 *)(x))) #define GGML_F32Cx4_STORE(x, y) vst1_f16(x, vcvt_f16_f32(y)) #define GGML_F32Cx4_FMA(a, b, c) vfmaq_f32(a, b, c) #define GGML_F32Cx4_ADD vaddq_f32 diff --git a/ggml.h b/ggml.h index bed7a36a0..37eff6279 100644 --- a/ggml.h +++ b/ggml.h @@ -315,13 +315,7 @@ extern "C" { #endif -#if defined(__ARM_NEON) && defined(__CUDACC__) - typedef half ggml_fp16_t; -#elif defined(__ARM_NEON) && !defined(_MSC_VER) - typedef __fp16 ggml_fp16_t; -#else typedef uint16_t ggml_fp16_t; -#endif // convert FP16 <-> FP32 GGML_API float ggml_fp16_to_fp32(ggml_fp16_t x); From 847eedbdb2d1ebf14ef56eb507d4b4b975510908 Mon Sep 17 00:00:00 2001 From: Georgi Gerganov Date: Thu, 22 Feb 2024 23:22:48 +0200 Subject: [PATCH 692/811] py : add Gemma conversion from HF models (#5647) * py : add gemma conversion from HF models * Update convert-hf-to-gguf.py Co-authored-by: Aarni Koskela * Update convert-hf-to-gguf.py Co-authored-by: Aarni Koskela * Update convert-hf-to-gguf.py Co-authored-by: Jared Van Bortel --------- Co-authored-by: Aarni Koskela Co-authored-by: Jared Van Bortel --- convert-hf-to-gguf.py | 60 +++++++++++++++++++++++++++++++++++++++++++ llama.cpp | 3 +++ 2 files changed, 63 insertions(+) diff --git a/convert-hf-to-gguf.py b/convert-hf-to-gguf.py index 8630bbf29..481198dad 100755 --- a/convert-hf-to-gguf.py +++ b/convert-hf-to-gguf.py @@ -218,6 +218,8 @@ class Model: return BertModel if model_architecture == "NomicBertModel": return NomicBertModel + if model_architecture == "GemmaForCausalLM": + return GemmaModel return Model def _is_model_safetensors(self) -> bool: @@ -277,6 +279,8 @@ class Model: return gguf.MODEL_ARCH.BERT if arch == "NomicBertModel": return gguf.MODEL_ARCH.NOMIC_BERT + if arch == "GemmaForCausalLM": + return gguf.MODEL_ARCH.GEMMA raise NotImplementedError(f'Architecture "{arch}" not supported!') @@ -1786,6 +1790,62 @@ class NomicBertModel(BertModel): yield name, data +class GemmaModel(Model): + def set_vocab(self): + self._set_vocab_sentencepiece() + + def set_gguf_parameters(self): + hparams = self.hparams + block_count = hparams["num_hidden_layers"] + + self.gguf_writer.add_name(self.dir_model.name) + self.gguf_writer.add_context_length(hparams["max_position_embeddings"]) + self.gguf_writer.add_embedding_length(hparams["hidden_size"]) + self.gguf_writer.add_block_count(block_count) + self.gguf_writer.add_feed_forward_length(hparams["intermediate_size"]) + self.gguf_writer.add_head_count(hparams["num_attention_heads"]) + self.gguf_writer.add_head_count_kv(self.hparams["num_key_value_heads"] if "num_key_value_heads" in hparams else hparams["num_attention_heads"]) + self.gguf_writer.add_layer_norm_rms_eps(self.hparams["rms_norm_eps"]) + self.gguf_writer.add_key_length(hparams["head_dim"]) + self.gguf_writer.add_value_length(hparams["head_dim"]) + + def write_tensors(self): + block_count = self.hparams.get("n_layers", self.hparams.get("num_hidden_layers", self.hparams.get("n_layer"))) + tensor_map = gguf.get_tensor_name_map(self.model_arch, block_count) + + for name, data_torch in self.get_tensors(): + # ref: https://github.com/huggingface/transformers/blob/fc37f38915372c15992b540dfcbbe00a916d4fc6/src/transformers/models/gemma/modeling_gemma.py#L89 + if name.endswith("norm.weight"): + data_torch = data_torch + 1 + + old_dtype = data_torch.dtype + + # convert any unsupported data types to float32 + if data_torch.dtype not in (torch.float16, torch.float32): + data_torch = data_torch.to(torch.float32) + + data = data_torch.squeeze().numpy() + + # map tensor names + new_name = tensor_map.get_name(name, try_suffixes=(".weight", ".bias")) + if new_name is None: + print(f"Can not map tensor {name!r}") + sys.exit() + + n_dims = len(data.shape) + data_dtype = data.dtype + + data = data.astype(np.float32) + + # if f16 desired, convert any float32 2-dim weight tensors to float16 + if self.ftype == 1 and data_dtype == np.float32 and name.endswith(".weight") and n_dims == 2: + data = data.astype(np.float16) + + print(f"{new_name}, n_dims = {n_dims}, {old_dtype} --> {data.dtype}") + + self.gguf_writer.add_tensor(new_name, data) + + ###### CONVERSION LOGIC ###### diff --git a/llama.cpp b/llama.cpp index 40dda265c..7770fa0e8 100644 --- a/llama.cpp +++ b/llama.cpp @@ -7450,6 +7450,7 @@ struct llm_build_context { inpL = llm_build_inp_embd(ctx0, hparams, batch, model.tok_embd, lctx.inp_tokens, lctx.inp_embd, cb); cb(inpL, "inp_embd", -1); + inpL = ggml_scale(ctx0, inpL, sqrtf(n_embd)); cb(inpL, "inp_scaled", -1); @@ -7491,6 +7492,7 @@ struct llm_build_context { n_embd_head_k, 2, 0, n_orig_ctx, freq_base, freq_scale, ext_factor, attn_factor, beta_fast, beta_slow); cb(Qcur, "Qcur", il); + Qcur = ggml_scale(ctx0, Qcur, 1.0f / sqrtf(float(n_embd_head_k))); cb(Qcur, "Qcur_scaled", il); @@ -7505,6 +7507,7 @@ struct llm_build_context { Kcur, Vcur, Qcur, KQ_mask, nullptr, n_ctx, n_tokens, kv_head, n_kv, 1.0f, cb, il); cb(cur, "kqv_out", il); } + struct ggml_tensor * sa_out = ggml_add(ctx0, cur, inpL); cb(sa_out, "sa_out", il); From 96633eeca1265ed03e57230de54032041c58f9cd Mon Sep 17 00:00:00 2001 From: Georgi Gerganov Date: Thu, 22 Feb 2024 23:23:46 +0200 Subject: [PATCH 693/811] gemma : use more bits for the token_embd.weight tensor (#5650) * gemma : use Q8_0 for the token_embd.weight tensor * llama : quantize token_embd.weight using output type --- llama.cpp | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/llama.cpp b/llama.cpp index 7770fa0e8..2ebd40df2 100644 --- a/llama.cpp +++ b/llama.cpp @@ -10498,7 +10498,10 @@ static ggml_type get_k_quant_type(quantize_state_internal & qs, ggml_type new_ty return std::make_pair(i_layer, n_layer); }; - if (name == tn(LLM_TENSOR_OUTPUT, "weight")) { + // for arches that share the same tensor between the token embeddings and the output, we quantize the token embeddings + // with the quantization of the output tensor + if (name == tn(LLM_TENSOR_OUTPUT, "weight") || + (LLM_TENSOR_NAMES.at(arch).find(LLM_TENSOR_OUTPUT) == LLM_TENSOR_NAMES.at(arch).end() && name == "token_embd.weight")) { int nx = tensor->ne[0]; if (arch == LLM_ARCH_FALCON || nx % QK_K != 0) { new_type = GGML_TYPE_Q8_0; From 15499eb94227401bdc8875da6eb85c15d37068f7 Mon Sep 17 00:00:00 2001 From: Jared Van Bortel Date: Thu, 22 Feb 2024 17:05:23 -0500 Subject: [PATCH 694/811] mpt : do not duplicate token_embd.weight on disk (#5670) --- convert-hf-to-gguf.py | 5 ----- llama.cpp | 6 ++++-- 2 files changed, 4 insertions(+), 7 deletions(-) diff --git a/convert-hf-to-gguf.py b/convert-hf-to-gguf.py index 481198dad..9bdfce07a 100755 --- a/convert-hf-to-gguf.py +++ b/convert-hf-to-gguf.py @@ -622,11 +622,6 @@ class MPTModel(Model): self.gguf_writer.add_tensor(new_name, data) - # note: MPT output is tied to (same as) wte in original model; - # for easier implementation in llama.cpp it's duplicated in GGUF, though :/ - if new_name == "token_embd.weight": - self.gguf_writer.add_tensor("output.weight", data) - class OrionModel(Model): def set_vocab(self): diff --git a/llama.cpp b/llama.cpp index 2ebd40df2..37477e6ef 100644 --- a/llama.cpp +++ b/llama.cpp @@ -509,7 +509,6 @@ static std::map> LLM_TENSOR_NAMES = { { LLM_TENSOR_TOKEN_EMBD, "token_embd" }, { LLM_TENSOR_OUTPUT_NORM, "output_norm" }, - { LLM_TENSOR_OUTPUT, "output" }, { LLM_TENSOR_ATTN_NORM, "blk.%d.attn_norm" }, { LLM_TENSOR_FFN_NORM, "blk.%d.ffn_norm" }, { LLM_TENSOR_ATTN_QKV, "blk.%d.attn_qkv" }, @@ -4056,7 +4055,10 @@ static bool llm_load_tensors( model.output_norm = ml.create_tensor(ctx_output, tn(LLM_TENSOR_OUTPUT_NORM, "weight"), {n_embd}); model.output_norm_b = ml.create_tensor(ctx_output, tn(LLM_TENSOR_OUTPUT_NORM, "bias"), {n_embd}, false); - model.output = ml.create_tensor(ctx_output_split, tn(LLM_TENSOR_OUTPUT, "weight"), {n_embd, n_vocab}); + // same as tok_embd, duplicated to allow offloading + model.output = ml.create_tensor(ctx_output_split, tn(LLM_TENSOR_TOKEN_EMBD, "weight"), {n_embd, n_vocab}); + ml.n_created--; // artificial tensor + ml.size_data += ggml_nbytes(model.output); } for (int i = 0; i < n_layer; ++i) { From 54fbcd2ce6c48c9e22eca6fbf9e53fb68c3e72ea Mon Sep 17 00:00:00 2001 From: Jared Van Bortel Date: Fri, 23 Feb 2024 13:39:14 -0500 Subject: [PATCH 695/811] convert : fix missing ftype for gemma (#5690) --- convert-hf-to-gguf.py | 1 + 1 file changed, 1 insertion(+) diff --git a/convert-hf-to-gguf.py b/convert-hf-to-gguf.py index 9bdfce07a..32d54b45f 100755 --- a/convert-hf-to-gguf.py +++ b/convert-hf-to-gguf.py @@ -1803,6 +1803,7 @@ class GemmaModel(Model): self.gguf_writer.add_layer_norm_rms_eps(self.hparams["rms_norm_eps"]) self.gguf_writer.add_key_length(hparams["head_dim"]) self.gguf_writer.add_value_length(hparams["head_dim"]) + self.gguf_writer.add_file_type(self.ftype) def write_tensors(self): block_count = self.hparams.get("n_layers", self.hparams.get("num_hidden_layers", self.hparams.get("n_layer"))) From fd43d66f46ee3b5345fb8a74a252d86ccd34a409 Mon Sep 17 00:00:00 2001 From: AlpinDale <52078762+AlpinDale@users.noreply.github.com> Date: Fri, 23 Feb 2024 19:31:54 +0000 Subject: [PATCH 696/811] server : add KV cache quantization options (#5684) --- examples/server/server.cpp | 10 ++++++++++ 1 file changed, 10 insertions(+) diff --git a/examples/server/server.cpp b/examples/server/server.cpp index 369121e88..524d0ada3 100644 --- a/examples/server/server.cpp +++ b/examples/server/server.cpp @@ -1948,6 +1948,10 @@ static void server_print_usage(const char *argv0, const gpt_params ¶ms, printf(" -cb, --cont-batching enable continuous batching (a.k.a dynamic batching) (default: disabled)\n"); printf(" -spf FNAME, --system-prompt-file FNAME\n"); printf(" set a file to load a system prompt (initial prompt of all slots), this is useful for chat applications.\n"); + printf(" -ctk TYPE, --cache-type-k TYPE\n"); + printf(" KV cache data type for K (default: f16)\n"); + printf(" -ctv TYPE, --cache-type-v TYPE\n"); + printf(" KV cache data type for V (default: f16)\n"); printf(" --mmproj MMPROJ_FILE path to a multimodal projector file for LLaVA.\n"); printf(" --log-disable disables logging to a file.\n"); printf(" --slots-endpoint-disable disables slots monitoring endpoint.\n"); @@ -2386,6 +2390,12 @@ static void server_params_parse(int argc, char **argv, server_params &sparams, ); llama.process_system_prompt_data(json::parse(systm_content)); } + else if (arg == "-ctk" || arg == "--cache-type-k") { + params.cache_type_k = argv[++i]; + } + else if (arg == "-ctv" || arg == "--cache-type-v") { + params.cache_type_v = argv[++i]; + } else if(arg == "--mmproj") { if (++i >= argc) From 525213d2f5da1eaf4b922b6b792cb52b2c613368 Mon Sep 17 00:00:00 2001 From: Pierrick Hymbert Date: Sat, 24 Feb 2024 12:28:55 +0100 Subject: [PATCH 697/811] server: init functional tests (#5566) * server: tests: init scenarios - health and slots endpoints - completion endpoint - OAI compatible chat completion requests w/ and without streaming - completion multi users scenario - multi users scenario on OAI compatible endpoint with streaming - multi users with total number of tokens to predict exceeds the KV Cache size - server wrong usage scenario, like in Infinite loop of "context shift" #3969 - slots shifting - continuous batching - embeddings endpoint - multi users embedding endpoint: Segmentation fault #5655 - OpenAI-compatible embeddings API - tokenize endpoint - CORS and api key scenario * server: CI GitHub workflow --------- Co-authored-by: Georgi Gerganov --- .github/ISSUE_TEMPLATE/bug.md | 2 + .github/workflows/server.yml | 127 ++++ examples/server/README.md | 6 + examples/server/server.cpp | 36 +- examples/server/tests/README.md | 46 ++ examples/server/tests/features/environment.py | 67 ++ examples/server/tests/features/issues.feature | 36 + .../server/tests/features/parallel.feature | 77 ++ .../server/tests/features/security.feature | 50 ++ examples/server/tests/features/server.feature | 69 ++ examples/server/tests/features/steps/steps.py | 709 ++++++++++++++++++ .../tests/features/wrong_usages.feature | 21 + examples/server/tests/requirements.txt | 3 + examples/server/tests/tests.sh | 12 + 14 files changed, 1243 insertions(+), 18 deletions(-) create mode 100644 .github/workflows/server.yml create mode 100644 examples/server/tests/README.md create mode 100644 examples/server/tests/features/environment.py create mode 100644 examples/server/tests/features/issues.feature create mode 100644 examples/server/tests/features/parallel.feature create mode 100644 examples/server/tests/features/security.feature create mode 100644 examples/server/tests/features/server.feature create mode 100644 examples/server/tests/features/steps/steps.py create mode 100644 examples/server/tests/features/wrong_usages.feature create mode 100644 examples/server/tests/requirements.txt create mode 100755 examples/server/tests/tests.sh diff --git a/.github/ISSUE_TEMPLATE/bug.md b/.github/ISSUE_TEMPLATE/bug.md index ce69e6395..49812832c 100644 --- a/.github/ISSUE_TEMPLATE/bug.md +++ b/.github/ISSUE_TEMPLATE/bug.md @@ -7,3 +7,5 @@ assignees: '' --- Please include information about your system, the steps to reproduce the bug, and the version of llama.cpp that you are using. If possible, please provide a minimal code example that reproduces the bug. + +If the bug concerns the server, please try to reproduce it first using the [server test scenario framework](https://github.com/ggerganov/llama.cpp/tree/master/examples/server/tests). diff --git a/.github/workflows/server.yml b/.github/workflows/server.yml new file mode 100644 index 000000000..ed27dc528 --- /dev/null +++ b/.github/workflows/server.yml @@ -0,0 +1,127 @@ +# Server build and tests +name: Server + +on: + workflow_dispatch: # allows manual triggering + push: + branches: + - master + - test/server-add-ci-test # FIXME remove + paths: ['.github/workflows/**', '**/CMakeLists.txt', '**/Makefile', '**/*.h', '**/*.hpp', '**/*.c', '**/*.cpp', '**/*.cu', '**/*.swift', '**/*.m', 'examples/server/**.*'] + pull_request: + types: [opened, synchronize, reopened] + paths: ['**/CMakeLists.txt', '**/Makefile', '**/*.h', '**/*.hpp', '**/*.c', '**/*.cpp', '**/*.cu', '**/*.swift', '**/*.m', 'examples/server/**.*'] + +jobs: + server: + runs-on: ubuntu-latest + + strategy: + matrix: + build: [noavx, avx2, avx, avx512, cublas, clblast, openblas, kompute, vulkan] + sanitizer: [ADDRESS, THREAD, UNDEFINED] + build_type: [Debug, Release] + include: + - build: 'noavx' + defines: '-DLLAMA_NATIVE=OFF -DLLAMA_BUILD_SERVER=ON -DLLAMA_AVX=OFF -DLLAMA_AVX2=OFF -DLLAMA_FMA=OFF' + image: ubuntu:latest + - build: 'avx2' + defines: '-DLLAMA_NATIVE=OFF -DLLAMA_BUILD_SERVER=ON' + image: ubuntu:latest + - build: 'avx' + defines: '-DLLAMA_NATIVE=OFF -DLLAMA_BUILD_SERVER=ON -DLLAMA_AVX2=OFF' + image: ubuntu:latest + - build: 'avx512' + defines: '-DLLAMA_NATIVE=OFF -DLLAMA_BUILD_SERVER=ON -DLLAMA_AVX512=ON' + image: ubuntu:latest + experimental: true + - build: 'cublas' + defines: '-DLLAMA_NATIVE=OFF -DLLAMA_BUILD_SERVER=ON -DLLAMA_CUBLAS=ON' + image: nvidia/cuda:12.3.1-devel-ubuntu22.04 + arch_not_available: true # require nvidia docker engine + - build: 'clblast' + defines: '-DLLAMA_NATIVE=OFF -DLLAMA_BUILD_SERVER=ON -DLLAMA_CLBLAST=ON' + image: ubuntu:latest + arch_not_available: true + - build: 'openblas' + defines: '-DLLAMA_NATIVE=OFF -DLLAMA_BUILD_SERVER=ON -DLLAMA_BLAS=ON -DLLAMA_BLAS_VENDOR=OpenBLAS' + image: ubuntu:latest + - build: 'kompute' + defines: '-DLLAMA_NATIVE=OFF -DLLAMA_BUILD_SERVER=ON -DLLAMA_KOMPUTE=ON -DKOMPUTE_OPT_DISABLE_VULKAN_VERSION_CHECK=ON' + image: ubuntu:latest + arch_not_available: true + - build: 'vulkan' + defines: '-DLLAMA_NATIVE=OFF -DLLAMA_BUILD_SERVER=ON -DLLAMA_VULKAN=ON' + image: ubuntu:latest + arch_not_available: true + + container: + image: ${{ matrix.image }} + ports: + - 8888 + options: --cpus 4 + + steps: + - name: Clone + id: checkout + uses: actions/checkout@v3 + + - name: Dependencies + id: depends + run: | + apt-get update + apt-get -y install \ + build-essential \ + pkg-config \ + git \ + cmake \ + python3-pip \ + wget \ + psmisc + + - name: Download CLBlast + id: get_clblast + if: ${{ matrix.build == 'clblast' }} + run: | + apt install -y libclblast-dev + + - name: Download OpenBLAS + id: get_openblas + if: ${{ matrix.build == 'openblas' }} + run: | + apt-get -y install libopenblas-dev + + - name: Install Vulkan SDK + id: get_vulkan + if: ${{ matrix.build == 'kompute' || matrix.build == 'vulkan' }} + run: | + wget -qO- https://packages.lunarg.com/lunarg-signing-key-pub.asc | tee /etc/apt/trusted.gpg.d/lunarg.asc + wget -qO /etc/apt/sources.list.d/lunarg-vulkan-jammy.list http://packages.lunarg.com/vulkan/lunarg-vulkan-jammy.list + apt-get update + apt-get -y install vulkan-sdk + + - name: Build + id: cmake_build + run: | + mkdir build + cd build + cmake .. -DLLAMA_SANITIZE_${{ matrix.sanitizer }}=ON -DCMAKE_BUILD_TYPE=${{ matrix.build_type }} ${{ matrix.defines }} + cmake --build . --config ${{ matrix.build_type }} -j $(nproc) --target server + + - name: Tests dependencies + id: test_dependencies + run: | + pip install -r examples/server/tests/requirements.txt + + - name: Download models + id: download_models + run: | + cd examples/server/tests + ../../../scripts/hf.sh --repo ggml-org/models --file tinyllamas/stories260K.gguf + + - name: Tests + id: server_integration_test + continue-on-error: ${{ matrix.experimental || matrix.arch_not_available }} + run: | + cd examples/server/tests + PORT=8888 ./tests.sh diff --git a/examples/server/README.md b/examples/server/README.md index 4b6cd8326..0c43ac4c9 100644 --- a/examples/server/README.md +++ b/examples/server/README.md @@ -98,6 +98,12 @@ curl --request POST \ --data '{"prompt": "Building a website can be done in 10 simple steps:","n_predict": 128}' ``` +## Advanced testing + +We implemented a [server test framework](./tests/README.md) using human-readable scenario. + +*Before submitting an issue, please try to reproduce it with this format.* + ## Node JS Test You need to have [Node.js](https://nodejs.org/en) installed. diff --git a/examples/server/server.cpp b/examples/server/server.cpp index 524d0ada3..9fb436c2a 100644 --- a/examples/server/server.cpp +++ b/examples/server/server.cpp @@ -1410,11 +1410,6 @@ struct llama_server_context int n_processing_slots = 0; for (llama_client_slot &slot: slots) { - if (slot.available()) { - n_idle_slots++; - } else { - n_processing_slots++; - } json slot_data = get_formated_generation(slot); slot_data["id"] = slot.id; slot_data["task_id"] = slot.task_id; @@ -1429,6 +1424,11 @@ struct llama_server_context {"stopped_limit", slot.stopped_limit}, {"stopping_word", slot.stopping_word}, }; + if (slot_data["state"] == IDLE) { + n_idle_slots++; + } else { + n_processing_slots++; + } slots_data.push_back(slot_data); } LOG_TEE("task %i - slots data: idle=%i processing=%i\n", task.id, n_idle_slots, n_processing_slots); @@ -2748,19 +2748,6 @@ int main(int argc, char **argv) log_data["api_key"] = "api_key: " + std::to_string(sparams.api_keys.size()) + " keys loaded"; } - LOG_INFO("HTTP server listening", log_data); - // run the HTTP server in a thread - see comment below - std::thread t([&]() - { - if (!svr.listen_after_bind()) - { - state.store(SERVER_STATE_ERROR); - return 1; - } - - return 0; - }); - // load the model if (!llama.load_model(params)) { @@ -3228,6 +3215,19 @@ int main(int argc, char **argv) }*/ //); + LOG_INFO("HTTP server listening", log_data); + // run the HTTP server in a thread - see comment below + std::thread t([&]() + { + if (!svr.listen_after_bind()) + { + state.store(SERVER_STATE_ERROR); + return 1; + } + + return 0; + }); + llama.queue_tasks.on_new_task(std::bind( &llama_server_context::process_single_task, &llama, std::placeholders::_1)); llama.queue_tasks.on_finish_multitask(std::bind( diff --git a/examples/server/tests/README.md b/examples/server/tests/README.md new file mode 100644 index 000000000..e44c5c286 --- /dev/null +++ b/examples/server/tests/README.md @@ -0,0 +1,46 @@ +# Server tests + +Python based server tests scenario using [BDD](https://en.wikipedia.org/wiki/Behavior-driven_development) and [behave](https://behave.readthedocs.io/en/latest/): + * [issues.feature](./features/issues.feature) Pending issues scenario + * [parallel.feature](./features/parallel.feature) Scenario involving multi slots and concurrent requests + * [security.feature](./features/security.feature) Security, CORS and API Key + * [server.feature](./features/server.feature) Server base scenario: completion, embedding, tokenization, etc... + +Tests target GitHub workflows job runners with 4 vCPU. + +Requests are using [aiohttp](https://docs.aiohttp.org/en/stable/client_reference.html), [asyncio](https://docs.python.org/fr/3/library/asyncio.html) based http client. + +Note: If the host architecture inference speed is faster than GitHub runners one, parallel scenario may randomly fail. To mitigate it, you can increase values in `n_predict`, `kv_size`. + +### Install dependencies +`pip install -r requirements.txt` + +### Run tests +1. Build the server +```shell +cd ../../.. +mkdir build +cd build +cmake ../ +cmake --build . --target server +``` +2. download required models: + 1. `../../../scripts/hf.sh --repo ggml-org/models --file tinyllamas/stories260K.gguf` +3. Start the test: `./tests.sh` + +It's possible to override some scenario steps values with environment variables: + - `PORT` -> `context.server_port` to set the listening port of the server during scenario, default: `8080` + - `LLAMA_SERVER_BIN_PATH` -> to change the server binary path, default: `../../../build/bin/server` + - `DEBUG` -> "ON" to enable steps and server verbose mode `--verbose` + +### Run @bug, @wip or @wrong_usage annotated scenario + +Feature or Scenario must be annotated with `@llama.cpp` to be included in the default scope. +- `@bug` annotation aims to link a scenario with a GitHub issue. +- `@wrong_usage` are meant to show user issue that are actually an expected behavior +- `@wip` to focus on a scenario working in progress + +To run a scenario annotated with `@bug`, start: +`DEBUG=ON ./tests.sh --no-skipped --tags bug` + +After changing logic in `steps.py`, ensure that `@bug` and `@wrong_usage` scenario are updated. diff --git a/examples/server/tests/features/environment.py b/examples/server/tests/features/environment.py new file mode 100644 index 000000000..13cc84101 --- /dev/null +++ b/examples/server/tests/features/environment.py @@ -0,0 +1,67 @@ +import os +import socket +import subprocess +import time +from contextlib import closing +from signal import SIGKILL + + +def before_scenario(context, scenario): + print(f"\x1b[33;42mStarting new scenario: {scenario.name}!\x1b[0m") + port = 8080 + if 'PORT' in os.environ: + port = int(os.environ['PORT']) + if is_server_listening("localhost", port): + assert False, "Server already started" + + +def after_scenario(context, scenario): + if scenario.status == "failed": + if 'GITHUB_ACTIONS' in os.environ: + print(f"\x1b[33;101mSCENARIO FAILED: {scenario.name} server logs:\x1b[0m\n\n") + if os.path.isfile('llama.log'): + with closing(open('llama.log', 'r')) as f: + for line in f: + print(line) + if not is_server_listening(context.server_fqdn, context.server_port): + print("\x1b[33;101mERROR: Server stopped listening\x1b[0m") + + if not pid_exists(context.server_process.pid): + assert False, f"Server not running pid={context.server_process.pid} ..." + + print(f"stopping server pid={context.server_process.pid} ...") + context.server_process.kill() + # Wait few for socket to free up + time.sleep(0.05) + + attempts = 0 + while is_server_listening(context.server_fqdn, context.server_port): + print(f"stopping server pid={context.server_process.pid} ...") + os.kill(context.server_process.pid, SIGKILL) + time.sleep(0.1) + attempts += 1 + if attempts > 5: + print(f"Server dangling exits, killing all {context.server_path} ...") + process = subprocess.run(['killall', '-9', context.server_path], + stderr=subprocess.PIPE, + universal_newlines=True) + print(process) + + +def is_server_listening(server_fqdn, server_port): + with closing(socket.socket(socket.AF_INET, socket.SOCK_STREAM)) as sock: + result = sock.connect_ex((server_fqdn, server_port)) + return result == 0 + + +def pid_exists(pid): + """Check whether pid exists in the current process table.""" + import errno + if pid < 0: + return False + try: + os.kill(pid, 0) + except OSError as e: + return e.errno == errno.EPERM + else: + return True diff --git a/examples/server/tests/features/issues.feature b/examples/server/tests/features/issues.feature new file mode 100644 index 000000000..542006d9a --- /dev/null +++ b/examples/server/tests/features/issues.feature @@ -0,0 +1,36 @@ +# List of ongoing issues +@bug +Feature: Issues + # Issue #5655 + Scenario: Multi users embeddings + Given a server listening on localhost:8080 + And a model file stories260K.gguf + And a model alias tinyllama-2 + And 42 as server seed + And 64 KV cache size + And 2 slots + And continuous batching + And embeddings extraction + Then the server is starting + Then the server is healthy + + Given a prompt: + """ + Write a very long story about AI. + """ + And a prompt: + """ + Write another very long music lyrics. + """ + And a prompt: + """ + Write a very long poem. + """ + And a prompt: + """ + Write a very long joke. + """ + Given concurrent embedding requests + Then the server is busy + Then the server is idle + Then all embeddings are generated diff --git a/examples/server/tests/features/parallel.feature b/examples/server/tests/features/parallel.feature new file mode 100644 index 000000000..802d624ff --- /dev/null +++ b/examples/server/tests/features/parallel.feature @@ -0,0 +1,77 @@ +@llama.cpp +Feature: Parallel + + Background: Server startup + Given a server listening on localhost:8080 + And a model file stories260K.gguf + And a model alias tinyllama-2 + And 42 as server seed + And 64 KV cache size + And 2 slots + And continuous batching + Then the server is starting + Then the server is healthy + + Scenario Outline: Multi users completion + Given a prompt: + """ + Write a very long story about AI. + """ + And a prompt: + """ + Write another very long music lyrics. + """ + And max tokens to predict + Given concurrent completion requests + Then the server is busy + Then the server is idle + And all slots are idle + Then all prompts are predicted with tokens + Examples: + | n_predict | + | 128 | + + Scenario Outline: Multi users OAI completions compatibility + Given a system prompt You are a writer. + And a model tinyllama-2 + Given a prompt: + """ + Write a very long book. + """ + And a prompt: + """ + Write another a poem. + """ + And max tokens to predict + And streaming is + Given concurrent OAI completions requests + Then the server is busy + Then the server is idle + Then all prompts are predicted with tokens + Examples: + | streaming | n_predict | + | disabled | 128 | + | enabled | 64 | + + Scenario: Multi users with total number of tokens to predict exceeds the KV Cache size #3969 + Given a prompt: + """ + Write a very long story about AI. + """ + And a prompt: + """ + Write another very long music lyrics. + """ + And a prompt: + """ + Write a very long poem. + """ + And a prompt: + """ + Write a very long joke. + """ + And 128 max tokens to predict + Given concurrent completion requests + Then the server is busy + Then the server is idle + Then all prompts are predicted diff --git a/examples/server/tests/features/security.feature b/examples/server/tests/features/security.feature new file mode 100644 index 000000000..db06d3977 --- /dev/null +++ b/examples/server/tests/features/security.feature @@ -0,0 +1,50 @@ +@llama.cpp +Feature: Security + + Background: Server startup with an api key defined + Given a server listening on localhost:8080 + And a model file stories260K.gguf + And a server api key llama.cpp + Then the server is starting + Then the server is healthy + + Scenario Outline: Completion with some user api key + Given a prompt test + And a user api key + And 4 max tokens to predict + And a completion request with api error + + Examples: Prompts + | api_key | api_error | + | llama.cpp | no | + | llama.cpp | no | + | hackeme | raised | + | | raised | + + Scenario Outline: OAI Compatibility + Given a system prompt test + And a user prompt test + And a model test + And 2 max tokens to predict + And streaming is disabled + And a user api key + Given an OAI compatible chat completions request with api error + + Examples: Prompts + | api_key | api_error | + | llama.cpp | no | + | llama.cpp | no | + | hackme | raised | + + + Scenario Outline: CORS Options + When an OPTIONS request is sent from + Then CORS header is set to + + Examples: Headers + | origin | cors_header | cors_header_value | + | localhost | Access-Control-Allow-Origin | localhost | + | web.mydomain.fr | Access-Control-Allow-Origin | web.mydomain.fr | + | origin | Access-Control-Allow-Credentials | true | + | web.mydomain.fr | Access-Control-Allow-Methods | POST | + | web.mydomain.fr | Access-Control-Allow-Headers | * | diff --git a/examples/server/tests/features/server.feature b/examples/server/tests/features/server.feature new file mode 100644 index 000000000..fedcfe5ae --- /dev/null +++ b/examples/server/tests/features/server.feature @@ -0,0 +1,69 @@ +@llama.cpp +Feature: llama.cpp server + + Background: Server startup + Given a server listening on localhost:8080 + And a model file stories260K.gguf + And a model alias tinyllama-2 + And 42 as server seed + # KV Cache corresponds to the total amount of tokens + # that can be stored across all independent sequences: #4130 + # see --ctx-size and #5568 + And 32 KV cache size + And 1 slots + And embeddings extraction + And 32 server max tokens to predict + Then the server is starting + Then the server is healthy + + Scenario: Health + Then the server is ready + And all slots are idle + + Scenario Outline: Completion + Given a prompt + And max tokens to predict + And a completion request with no api error + Then tokens are predicted matching + + Examples: Prompts + | prompt | n_predict | re_content | n_predicted | + | I believe the meaning of life is | 8 | read | 8 | + | Write a joke about AI | 64 | (parkfriendsscared)+ | 32 | + + Scenario Outline: OAI Compatibility + Given a model + And a system prompt + And a user prompt + And max tokens to predict + And streaming is + Given an OAI compatible chat completions request with no api error + Then tokens are predicted matching + + Examples: Prompts + | model | system_prompt | user_prompt | max_tokens | re_content | n_predicted | enable_streaming | + | llama-2 | Book | What is the best book | 8 | (Momwhat)+ | 8 | disabled | + | codellama70b | You are a coding assistant. | Write the fibonacci function in c++. | 64 | (thankshappybird)+ | 32 | enabled | + + Scenario: Embedding + When embeddings are computed for: + """ + What is the capital of Bulgaria ? + """ + Then embeddings are generated + + Scenario: OAI Embeddings compatibility + Given a model tinyllama-2 + When an OAI compatible embeddings computation request for: + """ + What is the capital of Spain ? + """ + Then embeddings are generated + + + Scenario: Tokenize / Detokenize + When tokenizing: + """ + What is the capital of France ? + """ + Then tokens can be detokenize diff --git a/examples/server/tests/features/steps/steps.py b/examples/server/tests/features/steps/steps.py new file mode 100644 index 000000000..50f2b641e --- /dev/null +++ b/examples/server/tests/features/steps/steps.py @@ -0,0 +1,709 @@ +import asyncio +import json +import os +import re +import socket +import subprocess +import time +from contextlib import closing +from re import RegexFlag + +import aiohttp +import openai +from behave import step +from behave.api.async_step import async_run_until_complete + + +@step(u"a server listening on {server_fqdn}:{server_port}") +def step_server_config(context, server_fqdn, server_port): + context.server_fqdn = server_fqdn + context.server_port = int(server_port) + if 'PORT' in os.environ: + context.server_port = int(os.environ['PORT']) + print(f"$PORT set, overriding server port with to {context.server_port}") + + context.base_url = f'http://{context.server_fqdn}:{context.server_port}' + + context.debug = 'DEBUG' in os.environ and os.environ['DEBUG'] == 'ON' + context.model_alias = None + context.n_ctx = None + context.n_predict = None + context.n_server_predict = None + context.n_slots = None + context.server_api_key = None + context.server_continuous_batching = False + context.server_embeddings = False + context.server_seed = None + context.user_api_key = None + + context.tasks_result = [] + context.concurrent_tasks = [] + context.prompts = [] + + +@step(u'a model file {model_file}') +def step_model_file(context, model_file): + context.model_file = model_file + + +@step(u'a model alias {model_alias}') +def step_model_alias(context, model_alias): + context.model_alias = model_alias + + +@step(u'{seed} as server seed') +def step_seed(context, seed): + context.server_seed = int(seed) + + +@step(u'{n_ctx} KV cache size') +def step_n_ctx(context, n_ctx): + context.n_ctx = int(n_ctx) + + +@step(u'{n_slots} slots') +def step_n_slots(context, n_slots): + context.n_slots = int(n_slots) + + +@step(u'{n_predict} server max tokens to predict') +def step_server_n_predict(context, n_predict): + context.n_server_predict = int(n_predict) + + +@step(u'continuous batching') +def step_server_continuous_batching(context): + context.server_continuous_batching = True + + +@step(u'embeddings extraction') +def step_server_embeddings(context): + context.server_embeddings = True + + +@step(u"the server is starting") +def step_start_server(context): + start_server_background(context) + attempts = 0 + while True: + with closing(socket.socket(socket.AF_INET, socket.SOCK_STREAM)) as sock: + result = sock.connect_ex((context.server_fqdn, context.server_port)) + if result == 0: + print("\x1b[33;46mserver started!\x1b[0m") + return + attempts += 1 + if attempts > 20: + assert False, "server not started" + print(f"waiting for server to start, connect error code = {result}...") + time.sleep(0.1) + + +@step(u"the server is {expecting_status}") +@async_run_until_complete +async def step_wait_for_the_server_to_be_started(context, expecting_status): + match expecting_status: + case 'healthy': + await wait_for_health_status(context, context.base_url, 200, 'ok') + + case 'ready' | 'idle': + await wait_for_health_status(context, context.base_url, 200, 'ok', + params={'fail_on_no_slot': 0, 'include_slots': 0}, + slots_idle=context.n_slots, + slots_processing=0, + expected_slots=[{'id': slot_id, 'state': 0} + for slot_id in range(context.n_slots)]) + case 'busy': + await wait_for_health_status(context, context.base_url, 503, + 'no slot available', + params={'fail_on_no_slot': 0, 'include_slots': 0}, + slots_idle=0, + slots_processing=context.n_slots, + expected_slots=[{'id': slot_id, 'state': 1} + for slot_id in range(context.n_slots)]) + case _: + assert False, "unknown status" + + +@step(u'all slots are {expected_slot_status_string}') +@async_run_until_complete +async def step_all_slots_status(context, expected_slot_status_string): + match expected_slot_status_string: + case 'idle': + expected_slot_status = 0 + case 'busy': + expected_slot_status = 1 + case _: + assert False, "unknown status" + + expected_slots = [{'id': slot_id, 'state': expected_slot_status} + for slot_id in range(context.n_slots)] + await request_slots_status(context, expected_slots) + + +@step(u'a completion request with {api_error} api error') +@async_run_until_complete +async def step_request_completion(context, api_error): + expect_api_error = api_error == 'raised' + completion = await request_completion(context.prompts.pop(), + context.base_url, + debug=context.debug, + n_predict=context.n_predict, + server_seed=context.server_seed, + expect_api_error=expect_api_error, + user_api_key=context.user_api_key) + context.tasks_result.append(completion) + if context.debug: + print(f"Completion response: {completion}") + if expect_api_error: + assert completion == 401, f"completion must be an 401 status code: {completion}" + + +@step(u'{predicted_n} tokens are predicted matching {re_content}') +def step_n_tokens_predicted_with_content(context, predicted_n, re_content): + assert_n_tokens_predicted(context.tasks_result.pop(), int(predicted_n), re_content) + + +@step(u'{predicted_n} tokens are predicted') +def step_n_tokens_predicted(context, predicted_n): + assert_n_tokens_predicted(context.tasks_result.pop(), int(predicted_n)) + + +@step(u'a user prompt {user_prompt}') +def step_user_prompt(context, user_prompt): + context.prompts.append(user_prompt) + + +@step(u'a system prompt {system_prompt}') +def step_system_prompt(context, system_prompt): + context.system_prompt = system_prompt + + +@step(u'a model {model}') +def step_model(context, model): + context.model = model + + +@step(u'{max_tokens} max tokens to predict') +def step_max_tokens(context, max_tokens): + context.n_predict = int(max_tokens) + + +@step(u'streaming is {enable_streaming}') +def step_streaming(context, enable_streaming): + context.enable_streaming = enable_streaming == 'enabled' + + +@step(u'a user api key {user_api_key}') +def step_user_api_key(context, user_api_key): + context.user_api_key = user_api_key + + +@step(u'no user api key') +def step_no_user_api_key(context): + context.user_api_key = None + + +@step(u'a user api key ') +def step_no_user_api_key_space(context): + context.user_api_key = None + + +@step(u'a server api key {server_api_key}') +def step_server_api_key(context, server_api_key): + context.server_api_key = server_api_key + + +@step(u'an OAI compatible chat completions request with {api_error} api error') +@async_run_until_complete +async def step_oai_chat_completions(context, api_error): + if context.debug: + print(f"Submitting OAI compatible completions request...") + expect_api_error = api_error == 'raised' + completion = await oai_chat_completions(context.prompts.pop(), + context.system_prompt, + context.base_url, + False, + model=context.model if hasattr(context, 'model') else None, + + n_predict=context.n_predict + if hasattr(context, 'n_predict') else None, + + enable_streaming=context.enable_streaming + if hasattr(context, 'enable_streaming') else None, + + server_seed=context.server_seed + if hasattr(context, 'server_seed') else None, + + user_api_key=context.user_api_key + if hasattr(context, 'user_api_key') else None, + + expect_api_error=expect_api_error) + context.tasks_result.append(completion) + if context.debug: + print(f"Completion response: {completion}") + if expect_api_error: + assert completion == 401, f"completion must be an 401 status code: {completion}" + + if context.debug: + print(f"Completion response: {completion}") + + +@step(u'a prompt') +def step_a_prompt(context): + context.prompts.append(context.text) + + +@step(u'a prompt {prompt}') +def step_a_prompt_prompt(context, prompt): + context.prompts.append(prompt) + + +@step(u'concurrent completion requests') +@async_run_until_complete() +async def step_concurrent_completion_requests(context): + await concurrent_completion_requests(context, + request_completion, + # prompt is inserted automatically + context.base_url, + debug=context.debug, + n_predict=context.n_predict if hasattr(context, 'n_predict') else None, + server_seed=context.server_seed if hasattr(context, 'server_seed') else None, + user_api_key=context.user_api_key if hasattr(context, + 'user_api_key') else None) + + +@step(u'concurrent OAI completions requests') +@async_run_until_complete +async def step_oai_chat_completions(context): + await concurrent_completion_requests(context, oai_chat_completions, + # user_prompt is inserted automatically + context.system_prompt, + context.base_url, + True, # async_client + model=context.model + if hasattr(context, 'model') else None, + n_predict=context.n_predict + if hasattr(context, 'n_predict') else None, + enable_streaming=context.enable_streaming + if hasattr(context, 'enable_streaming') else None, + server_seed=context.server_seed + if hasattr(context, 'server_seed') else None, + user_api_key=context.user_api_key + if hasattr(context, 'user_api_key') else None) + + +@step(u'all prompts are predicted') +@async_run_until_complete +async def step_all_prompts_are_predicted(context): + await all_prompts_are_predicted(context) + + +@step(u'all prompts are predicted with {n_predict} tokens') +@async_run_until_complete +async def step_all_prompts_are_predicted_with_n_tokens(context, n_predict): + expected_predicted_n = int(n_predict) + await all_prompts_are_predicted(context, expected_predicted_n) + + +async def all_prompts_are_predicted(context, expected_predicted_n=None): + n_completions = await gather_tasks_results(context) + assert n_completions > 0 + for i in range(n_completions): + assert_n_tokens_predicted(context.tasks_result.pop(), expected_predicted_n=expected_predicted_n) + assert len(context.concurrent_tasks) == 0, f"{len(context.concurrent_tasks)} pending requests" + + +@step(u'embeddings are computed for') +@async_run_until_complete +async def step_compute_embedding(context): + content = context.text + base_url = context.base_url + context.embeddings = await request_embedding(content, base_url) + + +@step(u'embeddings are generated') +def step_assert_embeddings(context): + assert_embeddings(context.embeddings) + + +@step(u'an OAI compatible embeddings computation request for') +def step_oai_compute_embedding(context): + openai.api_key = 'nope' # openai client always expects an api_keu + if context.user_api_key is not None: + openai.api_key = context.user_api_key + openai.api_base = f'{context.base_url}/v1' + embeddings = openai.Embedding.create( + model=context.model, + input=context.text, + ) + context.embeddings = embeddings + + +@step(u'concurrent embedding requests') +@async_run_until_complete() +async def step_concurrent_embedding_requests(context): + await concurrent_completion_requests(context, + request_embedding, + # prompt is inserted automatically + context.base_url) + + +@step(u'all embeddings are generated') +@async_run_until_complete() +async def all_embeddings_are_generated(context): + n_embedding_requests = await gather_tasks_results(context) + assert n_embedding_requests > 0 + for i in range(n_embedding_requests): + assert_embeddings(context.tasks_result.pop()) + + +@step(u'tokenizing') +@async_run_until_complete +async def step_tokenize(context): + context.tokenized_text = context.text + async with aiohttp.ClientSession() as session: + async with session.post(f'{context.base_url}/tokenize', + json={ + "content": context.tokenized_text, + }) as response: + assert response.status == 200 + tokenize_json = await response.json() + context.tokens = tokenize_json['tokens'] + + +@step(u'tokens can be detokenize') +@async_run_until_complete +async def step_detokenize(context): + assert len(context.tokens) > 0 + async with aiohttp.ClientSession() as session: + async with session.post(f'{context.base_url}/detokenize', + json={ + "tokens": context.tokens, + }) as response: + assert response.status == 200 + detokenize_json = await response.json() + # SPM tokenizer adds a whitespace prefix: https://github.com/google/sentencepiece/issues/15 + assert context.tokenized_text == detokenize_json['content'].strip() + + +@step(u'an OPTIONS request is sent from {origin}') +@async_run_until_complete +async def step_options_request(context, origin): + async with aiohttp.ClientSession() as session: + async with session.options(f'{context.base_url}/v1/chat/completions', + headers={"Origin": origin}) as response: + assert response.status == 200 + context.options_response = response + + +@step(u'CORS header {cors_header} is set to {cors_header_value}') +def step_check_options_header_value(context, cors_header, cors_header_value): + assert context.options_response.headers[cors_header] == cors_header_value + + +async def concurrent_completion_requests(context, f_completion, *args, **kwargs): + n_prompts = len(context.prompts) + if context.debug: + print(f"starting {n_prompts} concurrent completion requests...") + assert n_prompts > 0 + for prompt_no in range(n_prompts): + shifted_args = [context.prompts.pop(), *args] + context.concurrent_tasks.append(asyncio.create_task(f_completion(*shifted_args, **kwargs))) + await asyncio.sleep(0.1) + + +async def request_completion(prompt, + base_url, + debug=False, + n_predict=None, + server_seed=None, + expect_api_error=None, + user_api_key=None): + if debug: + print(f"Sending completion request: {prompt}") + origin = "my.super.domain" + headers = { + 'Origin': origin + } + if user_api_key is not None: + if debug: + print(f"Set user_api_key: {user_api_key}") + headers['Authorization'] = f'Bearer {user_api_key}' + + async with aiohttp.ClientSession() as session: + async with session.post(f'{base_url}/completion', + json={ + "prompt": prompt, + "n_predict": int(n_predict) if n_predict is not None else -1, + "seed": server_seed if server_seed is not None else 42 + }, + headers=headers) as response: + if expect_api_error is None or not expect_api_error: + assert response.status == 200 + assert response.headers['Access-Control-Allow-Origin'] == origin + return await response.json() + else: + return response.status + + +async def oai_chat_completions(user_prompt, + system_prompt, + base_url, + async_client, + debug=False, + model=None, + n_predict=None, + enable_streaming=None, + server_seed=None, + user_api_key=None, + expect_api_error=None): + if debug: + print(f"Sending OAI Chat completions request: {user_prompt}") + # openai client always expects an api key + user_api_key = user_api_key if user_api_key is not None else 'nope' + seed = server_seed if server_seed is not None else 42 + enable_streaming = enable_streaming if enable_streaming is not None else False + payload = { + "messages": [ + { + "role": "system", + "content": system_prompt, + }, + { + "role": "user", + "content": user_prompt, + } + ], + "model": model, + "max_tokens": n_predict, + "stream": enable_streaming, + "seed": seed + } + completion_response = { + 'content': '', + 'timings': { + 'predicted_n': 0 + } + } + if async_client: + origin = 'llama.cpp' + headers = {'Authorization': f'Bearer {user_api_key}', 'Origin': origin} + async with aiohttp.ClientSession() as session: + async with session.post(f'{base_url}/v1/chat/completions', + json=payload, + headers=headers) as response: + if enable_streaming: + assert response.status == 200 + assert response.headers['Access-Control-Allow-Origin'] == origin + assert response.headers['Content-Type'] == "text/event-stream" + event_received = True + while event_received: + event_received = False + async for line_in_bytes in response.content: + line = line_in_bytes.decode('utf8') + line = line.rstrip('\n').rstrip('\r') + if line == '': + continue + event_data = line.split(': ', 1) + assert event_data[0] == 'data', f'Bad event code received: ```{event_data}```' + chunk_raw = event_data[1] + + chunk = json.loads(chunk_raw) + assert len(chunk['choices']) == 1, f"no choices provided, line ```{line}```" + delta = chunk['choices'][0]['delta'] + if 'content' in delta: + completion_response['content'] += delta['content'] + completion_response['timings']['predicted_n'] += 1 + else: + if expect_api_error is None or not expect_api_error: + assert response.status == 200 + assert response.headers['Access-Control-Allow-Origin'] == origin + assert response.headers['Content-Type'] == "application/json; charset=utf-8" + chat_completion_raw = await response.json() + completion_response = { + 'content': chat_completion_raw['choices'][0]['message'], + 'timings': { + 'predicted_n': chat_completion_raw['usage']['completion_tokens'] + } + } + else: + return response.status + else: + try: + openai.api_key = user_api_key + openai.api_base = f'{base_url}/v1/chat' + chat_completion = openai.Completion.create( + messages=payload['messages'], + model=model, + max_tokens=n_predict, + stream=enable_streaming, + seed=seed + ) + except openai.error.APIError as e: + if expect_api_error is not None and expect_api_error: + return 401 + else: + assert False, f'error raised: {e}' + + if enable_streaming: + for chunk in chat_completion: + assert len(chunk.choices) == 1 + delta = chunk.choices[0].delta + if 'content' in delta: + completion_response['content'] += delta['content'] + completion_response['timings']['predicted_n'] += 1 + else: + assert len(chat_completion.choices) == 1 + completion_response = { + 'content': chat_completion.choices[0].message.content, + 'timings': { + 'predicted_n': chat_completion.usage.completion_tokens + } + } + if debug: + print("OAI response formatted to llama.cpp:", completion_response) + return completion_response + + +async def request_embedding(content, base_url): + async with aiohttp.ClientSession() as session: + async with session.post(f'{base_url}/embedding', + json={ + "content": content, + }) as response: + assert response.status == 200 + response_json = await response.json() + return response_json['embedding'] + + +def assert_n_tokens_predicted(completion_response, expected_predicted_n=None, re_content=None): + content = completion_response['content'] + n_predicted = completion_response['timings']['predicted_n'] + assert len(content) > 0, "no token predicted" + if expected_predicted_n is not None: + assert n_predicted == expected_predicted_n, (f'invalid number of tokens predicted:' + f' {n_predicted} <> {expected_predicted_n}') + if re_content is not None: + re_content = '^.*' + re_content.replace('', '|') + '.*$' + assert re.match(re_content, content, flags=RegexFlag.IGNORECASE | RegexFlag.MULTILINE | RegexFlag.DOTALL), ( + f'invalid tokens predicted:' + f' ```\n{content}\n``` do not match /{re_content}/') + + +async def gather_tasks_results(context): + n_tasks = len(context.concurrent_tasks) + if context.debug: + print(f"Waiting for all {n_tasks} tasks results...") + for task_no in range(n_tasks): + context.tasks_result.append(await context.concurrent_tasks.pop()) + n_completions = len(context.tasks_result) + return n_completions + + +async def wait_for_health_status(context, + base_url, + expected_http_status_code, + expected_health_status, + params=None, + slots_idle=None, + slots_processing=None, + expected_slots=None): + if context.debug: + print(f"Starting checking for health for expected_health_status={expected_health_status}") + timeout = 3 # seconds + interval = 0.5 + counter = 0 + async with aiohttp.ClientSession() as session: + while True: + async with await session.get(f'{base_url}/health', params=params) as health_response: + status_code = health_response.status + health = await health_response.json() + if context.debug: + print(f"HEALTH - response for expected health status='{expected_health_status}' on " + f"'{base_url}/health'?{params} is {health}") + if (status_code == expected_http_status_code + and health['status'] == expected_health_status + and (slots_idle is None or health['slots_idle'] == slots_idle) + and (slots_processing is None or health['slots_processing'] == slots_processing)): + if expected_slots is not None: + assert_slots_status(health['slots'], expected_slots) + return + if (status_code == expected_http_status_code + and health['status'] == expected_health_status + and (slots_idle is None or health['slots_idle'] == slots_idle) + and (slots_processing is None or health['slots_processing'] == slots_processing)): + if expected_slots is not None: + assert_slots_status(health['slots'], expected_slots) + return + await asyncio.sleep(interval) + + counter += interval + if counter >= timeout: + # Sometimes health requests are triggered after completions are predicted + if expected_http_status_code == 503: + if len(context.tasks_result) == 0: + print("\x1b[5;37;43mWARNING: forcing concurrent tasks," + " busy health check missed, probably too fast inference\x1b[0m") + n_completions = await gather_tasks_results(context) + if n_completions > 0: + return + + assert False, 'timeout exceeded' + + +def assert_embeddings(embeddings): + assert len(embeddings) > 0 + embeddings_computed = False + for emb in embeddings: + if emb != 0: + embeddings_computed = True + assert embeddings_computed, f"Embeddings: {embeddings}" + + +async def request_slots_status(context, expected_slots): + async with aiohttp.ClientSession() as session: + async with await session.get(f'{context.base_url}/slots') as slots_response: + assert slots_response.status == 200 + slots = await slots_response.json() + assert_slots_status(slots, expected_slots) + + +def assert_slots_status(slots, expected_slots): + assert len(slots) == len(expected_slots) + for slot_id, (expected, slot) in enumerate(zip(expected_slots, slots)): + for key in expected: + assert expected[key] == slot[key], (f"invalid slot {slot_id}" + f" expected[{key}] != slot[{key}]" + f" = {expected[key]} != {slot[key]}") + + +def start_server_background(context): + context.server_path = '../../../build/bin/server' + if 'LLAMA_SERVER_BIN_PATH' in os.environ: + context.server_path = os.environ['LLAMA_SERVER_BIN_PATH'] + server_args = [ + '--host', context.server_fqdn, + '--port', context.server_port, + '--model', context.model_file + ] + if context.server_continuous_batching: + server_args.append('--cont-batching') + if context.server_embeddings: + server_args.append('--embedding') + if context.model_alias is not None: + server_args.extend(['--alias', context.model_alias]) + if context.n_ctx is not None: + server_args.extend(['--ctx-size', context.n_ctx]) + if context.n_slots is not None: + server_args.extend(['--parallel', context.n_slots]) + if context.n_server_predict is not None: + server_args.extend(['--n-predict', context.n_server_predict]) + if context.server_api_key is not None: + server_args.extend(['--api-key', context.server_api_key]) + if context.debug: + server_args.append('--verbose') + print(f"starting server with: {context.server_path}", *server_args) + context.server_process = subprocess.Popen( + [str(arg) for arg in [context.server_path, *server_args]], + close_fds=True) + print(f"server pid={context.server_process.pid}") diff --git a/examples/server/tests/features/wrong_usages.feature b/examples/server/tests/features/wrong_usages.feature new file mode 100644 index 000000000..e228b2371 --- /dev/null +++ b/examples/server/tests/features/wrong_usages.feature @@ -0,0 +1,21 @@ +# run with ./test.sh --tags wrong_usage +@wrong_usage +Feature: Wrong usage of llama.cpp server + + #3969 The user must always set --n-predict option + # to cap the number of tokens any completion request can generate + # or pass n_predict/max_tokens in the request. + Scenario: Infinite loop + Given a server listening on localhost:8080 + And a model file stories260K.gguf + # Uncomment below to fix the issue + #And 64 server max tokens to predict + Then the server is starting + Given a prompt: + """ + Go to: infinite loop + """ + # Uncomment below to fix the issue + #And 128 max tokens to predict + Given concurrent completion requests + Then all prompts are predicted diff --git a/examples/server/tests/requirements.txt b/examples/server/tests/requirements.txt new file mode 100644 index 000000000..3e51b12dc --- /dev/null +++ b/examples/server/tests/requirements.txt @@ -0,0 +1,3 @@ +aiohttp~=3.9.3 +behave~=1.2.6 +openai~=0.25.0 diff --git a/examples/server/tests/tests.sh b/examples/server/tests/tests.sh new file mode 100755 index 000000000..17a4e6fc6 --- /dev/null +++ b/examples/server/tests/tests.sh @@ -0,0 +1,12 @@ +#!/bin/bash + +set -eu + +if [ $# -lt 1 ] +then + # Start @llama.cpp scenario + behave --summary --stop --no-capture --exclude 'issues|wrong_usages' --tags llama.cpp +else + behave "$@" +fi + From 4c4cb30736582cacb1a164a9d4bc8e17b1014be7 Mon Sep 17 00:00:00 2001 From: Kawrakow <48489457+ikawrakow@users.noreply.github.com> Date: Sat, 24 Feb 2024 16:23:52 +0200 Subject: [PATCH 698/811] IQ3_S: a much better alternative to Q3_K (#5676) * iq4_nl: squash commits for easier rebase * Basics (quantize, dequantize) * CUDA dequantize and dot product * Slightly faster CUDA dot product (120 t/s) * Switch to 6-bit scales * Scalar dot product * AVX2 dot product * ARM_NEON dot product * Works on metal, but still slow * Slightly better Metal dot product * Another small Metal improvement * Metal dot product is getting there * Faster CUDA dot product * Add 1/8 ffn_down layers as Q5_K when no imatrix has been provided * Report the actual bpw * Add _xs mix that is 4.05 bpw for non-MoE models * Remove IQ4_XS for now, slightly adjust kvalues_iq4nl * AVX2 dot product uses Q8_0 instead of Q8_K * Add to test-backend-ops * Minor fix * Also use use Q5_K for attn_output in MoE models * Fixes after merging latest master * Switching to blocks of 32 * AVX2 for blocks of 32 * Scaler dot product for blocks of 32 * ARM_NEON dot product for blocks of 32 * Metal kernels for blocks of 32 * Slightly faster Metal kernels * Resurrecting iq3_xs After all the experimentation, nothing was better than this. * Minor PPL improvement via a block scale fudge factor * Minor improvement via 3 neighbours * iq3_xs: working scalar and AVX2 dot products * iq3_xs: ARM_NEON dot product - works but extremely slow (10 t/s) * iq3_xs: working Metal implementation * Adding IQ3_M - IQ3_XS mix with mostly Q4_K * iiq3_xs: a 3.4375 bpw variant * iq3_xs: make CUDA work for new version * iq3_xs: make scalar and AVX2 work for new version * iq3_s: make ARM_NEON work with new version * iq3_xs: make new version work on metal Performance is very similar to Q3_K_S * iq3_xs: tiny Metal speed improvement * iq3_xs: tiny Metal speed improvement * Fix stupid warning * Q3_K_XS now uses a mix of IQ3_XS and IQ3_XXS * iq3_xs: rename to iq3_s * iq3_s: make tests pass * Move Q3_K_XS mix to 3.25 bpw * Attempt to fix failing tests * Another attempt to fix the Windows builds * Attempt to fix ROCm * ROCm again * iq3_s: partial fix for QK_K = 64 * iq3_s: make it work on metal for QK_K = 64 Pleasent surprise: the coding was super-block size independent, so all it took was to delete some QK_K == 256 guards. * Will this fix ROCm? --------- Co-authored-by: Iwan Kawrakow --- examples/quantize/quantize.cpp | 2 + ggml-cuda.cu | 171 ++++++++- ggml-metal.m | 33 +- ggml-metal.metal | 304 +++++++++++++++ ggml-quants.c | 674 +++++++++++++++++++++++++++++---- ggml-quants.h | 20 + ggml.c | 31 ++ ggml.h | 2 + llama.cpp | 50 ++- llama.h | 2 + tests/test-backend-ops.cpp | 2 +- tests/test-quantize-fns.cpp | 4 +- 12 files changed, 1211 insertions(+), 84 deletions(-) diff --git a/examples/quantize/quantize.cpp b/examples/quantize/quantize.cpp index 37520857f..ab7e72aaf 100644 --- a/examples/quantize/quantize.cpp +++ b/examples/quantize/quantize.cpp @@ -27,6 +27,8 @@ static const std::vector QUANT_OPTIONS = { { "Q2_K", LLAMA_FTYPE_MOSTLY_Q2_K, " 2.63G, +0.6717 ppl @ LLaMA-v1-7B", }, { "Q2_K_S", LLAMA_FTYPE_MOSTLY_Q2_K_S, " 2.16G, +9.0634 ppl @ LLaMA-v1-7B", }, { "IQ3_XXS",LLAMA_FTYPE_MOSTLY_IQ3_XXS," 3.06 bpw quantization", }, + { "IQ3_S", LLAMA_FTYPE_MOSTLY_IQ3_S, " 3.44 bpw quantization", }, + { "IQ3_M", LLAMA_FTYPE_MOSTLY_IQ3_M, " 3.66 bpw quantization mix", }, { "Q3_K", LLAMA_FTYPE_MOSTLY_Q3_K_M, "alias for Q3_K_M" }, { "Q3_K_XS",LLAMA_FTYPE_MOSTLY_Q3_K_XS,"3-bit extra small quantization" , }, { "Q3_K_S", LLAMA_FTYPE_MOSTLY_Q3_K_S, " 2.75G, +0.5551 ppl @ LLaMA-v1-7B", }, diff --git a/ggml-cuda.cu b/ggml-cuda.cu index b0e454e02..21c612cb7 100644 --- a/ggml-cuda.cu +++ b/ggml-cuda.cu @@ -172,6 +172,7 @@ #endif typedef int8_t int8x4_t __attribute__((ext_vector_type(4))); +typedef uint8_t uint8x4_t __attribute__((ext_vector_type(4))); static __device__ __forceinline__ int __vsubss4(const int a, const int b) { const int8x4_t va = reinterpret_cast(a); const int8x4_t vb = reinterpret_cast(b); @@ -196,6 +197,18 @@ static __device__ __forceinline__ int __vsub4(const int a, const int b) { return __vsubss4(a, b); } +static __device__ __forceinline__ unsigned int __vcmpeq4(unsigned int a, unsigned int b) { + const uint8x4_t& va = reinterpret_cast(a); + const uint8x4_t& vb = reinterpret_cast(b); + unsigned int c; + uint8x4_t& vc = reinterpret_cast(c); +#pragma unroll + for (int i = 0; i < 4; ++i) { + vc[i] = va[i] == vb[i] ? 0xff : 0x00; + } + return c; +} + static __device__ __forceinline__ int __dp4a(const int a, const int b, int c) { #if defined(__gfx906__) || defined(__gfx908__) || defined(__gfx90a__) || defined(__gfx1030__) c = __builtin_amdgcn_sdot4(a, b, c, false); @@ -518,6 +531,17 @@ typedef struct { } block_iq3_xxs; static_assert(sizeof(block_iq3_xxs) == sizeof(ggml_fp16_t) + 3*(QK_K/8), "wrong iq3_xxs block size/padding"); +#define QR3_XS 8 +#define QI3_XS (QK_K / (4*QR3_XS)) +typedef struct { + half d; + uint8_t qs[QK_K/4]; + uint8_t qh[QK_K/32]; + uint8_t signs[QK_K/8]; + uint8_t scales[QK_K/64]; +} block_iq3_s; +static_assert(sizeof(block_iq3_s) == sizeof(ggml_fp16_t) + 27*(QK_K/64), "wrong iq3_s block size/padding"); + #define QR1_S 8 #define QI1_S (QK_K / (4*QR1_S)) typedef struct { @@ -1700,6 +1724,74 @@ static const __device__ uint32_t iq3xxs_grid[256] = { 0x3e1c1c1c, 0x3e1c3404, 0x3e24140c, 0x3e24240c, 0x3e2c0404, 0x3e2c0414, 0x3e2c1424, 0x3e341c04, }; +static const __device__ uint32_t iq3xs_grid[512] = { + 0x04040404, 0x0404040c, 0x04040414, 0x0404042c, 0x0404043e, 0x04040c04, 0x04040c0c, 0x04040c14, + 0x04040c24, 0x04040c34, 0x04041404, 0x0404140c, 0x0404142c, 0x04041c1c, 0x04042404, 0x04042414, + 0x0404242c, 0x0404243e, 0x04042c0c, 0x04042c1c, 0x04043404, 0x04043414, 0x04043e0c, 0x04043e24, + 0x04043e3e, 0x040c0404, 0x040c040c, 0x040c0414, 0x040c0424, 0x040c0c04, 0x040c0c0c, 0x040c0c2c, + 0x040c1404, 0x040c141c, 0x040c143e, 0x040c1c0c, 0x040c1c2c, 0x040c2424, 0x040c340c, 0x040c342c, + 0x040c3e14, 0x04140404, 0x0414040c, 0x0414042c, 0x0414043e, 0x04140c04, 0x04140c1c, 0x04140c34, + 0x0414140c, 0x0414142c, 0x04141c04, 0x04141c24, 0x04142414, 0x0414242c, 0x0414243e, 0x04142c0c, + 0x04142c1c, 0x04143e04, 0x04143e1c, 0x041c041c, 0x041c0c0c, 0x041c0c2c, 0x041c1404, 0x041c1414, + 0x041c1c0c, 0x041c1c1c, 0x041c1c34, 0x041c2424, 0x041c2c04, 0x041c2c14, 0x041c343e, 0x041c3e0c, + 0x041c3e2c, 0x04240404, 0x04240c1c, 0x04240c3e, 0x0424140c, 0x04241424, 0x04241c14, 0x04242404, + 0x0424241c, 0x04242c0c, 0x04243e04, 0x042c0414, 0x042c0424, 0x042c1404, 0x042c1414, 0x042c1434, + 0x042c1c1c, 0x042c240c, 0x042c242c, 0x042c243e, 0x042c3434, 0x042c3e1c, 0x04340434, 0x04340c0c, + 0x04340c1c, 0x04341c0c, 0x04342c14, 0x04343e0c, 0x043e0404, 0x043e0414, 0x043e0424, 0x043e1404, + 0x043e1414, 0x043e1434, 0x043e1c1c, 0x043e2c04, 0x043e2c24, 0x0c040404, 0x0c04040c, 0x0c040414, + 0x0c040424, 0x0c040c04, 0x0c040c0c, 0x0c040c1c, 0x0c040c2c, 0x0c040c3e, 0x0c041404, 0x0c041414, + 0x0c041c0c, 0x0c041c24, 0x0c041c34, 0x0c042c24, 0x0c042c34, 0x0c04340c, 0x0c043e14, 0x0c0c0404, + 0x0c0c040c, 0x0c0c041c, 0x0c0c0434, 0x0c0c0c04, 0x0c0c0c24, 0x0c0c140c, 0x0c0c1c04, 0x0c0c1c1c, + 0x0c0c240c, 0x0c0c2c04, 0x0c0c2c14, 0x0c0c3e04, 0x0c0c3e34, 0x0c140404, 0x0c140c14, 0x0c140c2c, + 0x0c140c3e, 0x0c141404, 0x0c141424, 0x0c141c14, 0x0c142404, 0x0c14241c, 0x0c142c2c, 0x0c143404, + 0x0c143e14, 0x0c1c040c, 0x0c1c0424, 0x0c1c043e, 0x0c1c0c04, 0x0c1c0c1c, 0x0c1c140c, 0x0c1c143e, + 0x0c1c1c04, 0x0c1c1c24, 0x0c1c240c, 0x0c1c3414, 0x0c1c3e04, 0x0c24041c, 0x0c24042c, 0x0c240c14, + 0x0c240c24, 0x0c241c0c, 0x0c241c1c, 0x0c242414, 0x0c242434, 0x0c242c04, 0x0c242c24, 0x0c2c040c, + 0x0c2c0c04, 0x0c2c0c1c, 0x0c2c140c, 0x0c2c1c04, 0x0c2c1c14, 0x0c2c2c0c, 0x0c341404, 0x0c341424, + 0x0c34143e, 0x0c342424, 0x0c342434, 0x0c3e040c, 0x0c3e041c, 0x0c3e0c04, 0x0c3e0c14, 0x0c3e140c, + 0x0c3e1c2c, 0x0c3e240c, 0x0c3e3414, 0x0c3e3e04, 0x14040404, 0x1404040c, 0x1404041c, 0x1404042c, + 0x1404043e, 0x14040c04, 0x14040c14, 0x14040c24, 0x14040c34, 0x1404140c, 0x1404141c, 0x1404143e, + 0x14041c04, 0x14041c14, 0x1404240c, 0x1404241c, 0x1404242c, 0x14042c04, 0x14042c14, 0x1404343e, + 0x14043e04, 0x14043e1c, 0x14043e2c, 0x140c0404, 0x140c0414, 0x140c0c04, 0x140c0c1c, 0x140c0c3e, + 0x140c1414, 0x140c142c, 0x140c1c0c, 0x140c1c24, 0x140c2414, 0x140c2c0c, 0x1414040c, 0x14140424, + 0x1414043e, 0x1414140c, 0x1414141c, 0x14141c04, 0x14141c3e, 0x1414240c, 0x14142c1c, 0x14142c3e, + 0x14143e0c, 0x14143e24, 0x141c0404, 0x141c0414, 0x141c042c, 0x141c0c0c, 0x141c1414, 0x141c1424, + 0x141c1c0c, 0x141c1c1c, 0x141c2414, 0x141c2c04, 0x141c3434, 0x1424040c, 0x1424043e, 0x14241404, + 0x1424141c, 0x14241c14, 0x14241c2c, 0x1424240c, 0x14243e14, 0x14243e2c, 0x142c0424, 0x142c0c0c, + 0x142c1414, 0x142c1c3e, 0x142c2404, 0x142c2c1c, 0x142c3e04, 0x14340404, 0x14340414, 0x1434043e, + 0x1434140c, 0x14342c2c, 0x1434340c, 0x143e042c, 0x143e0c0c, 0x143e1434, 0x143e1c04, 0x143e241c, + 0x143e2c04, 0x1c040414, 0x1c040c0c, 0x1c040c1c, 0x1c040c2c, 0x1c040c3e, 0x1c041414, 0x1c041c0c, + 0x1c041c1c, 0x1c041c2c, 0x1c042414, 0x1c042424, 0x1c04243e, 0x1c042c0c, 0x1c04341c, 0x1c043e0c, + 0x1c0c040c, 0x1c0c041c, 0x1c0c042c, 0x1c0c0c24, 0x1c0c140c, 0x1c0c141c, 0x1c0c2404, 0x1c0c3404, + 0x1c0c3e14, 0x1c0c3e34, 0x1c140404, 0x1c140c14, 0x1c141404, 0x1c141c14, 0x1c141c24, 0x1c142c04, + 0x1c1c040c, 0x1c1c0c04, 0x1c1c0c24, 0x1c1c140c, 0x1c1c141c, 0x1c1c143e, 0x1c1c1c04, 0x1c1c240c, + 0x1c1c241c, 0x1c1c243e, 0x1c1c2c2c, 0x1c1c3e1c, 0x1c24041c, 0x1c240c0c, 0x1c240c34, 0x1c241414, + 0x1c241c0c, 0x1c242c14, 0x1c243404, 0x1c243424, 0x1c2c040c, 0x1c2c0c04, 0x1c2c0c14, 0x1c2c142c, + 0x1c2c1c14, 0x1c2c2424, 0x1c2c2c34, 0x1c2c3e1c, 0x1c340c34, 0x1c34240c, 0x1c3e040c, 0x1c3e041c, + 0x1c3e1404, 0x1c3e1414, 0x1c3e1c2c, 0x24040404, 0x24040424, 0x24040c14, 0x24041404, 0x24041424, + 0x2404143e, 0x24041c14, 0x2404240c, 0x24042c04, 0x24043e04, 0x240c0414, 0x240c043e, 0x240c0c0c, + 0x240c0c1c, 0x240c1414, 0x240c1c04, 0x240c1c2c, 0x240c241c, 0x240c2c0c, 0x240c2c2c, 0x2414040c, + 0x2414041c, 0x24140c04, 0x24140c2c, 0x2414140c, 0x24141c1c, 0x24142404, 0x24142c3e, 0x24143414, + 0x24143e04, 0x241c0424, 0x241c0c0c, 0x241c0c1c, 0x241c1404, 0x241c1414, 0x241c1c0c, 0x241c1c2c, + 0x24240404, 0x24240414, 0x24241424, 0x24241c3e, 0x24242404, 0x24243e0c, 0x242c042c, 0x242c043e, + 0x242c140c, 0x242c3414, 0x24340c1c, 0x24341c24, 0x24343404, 0x243e0c04, 0x243e0c2c, 0x243e1c04, + 0x243e241c, 0x243e2c0c, 0x2c040414, 0x2c040c04, 0x2c040c24, 0x2c041414, 0x2c042404, 0x2c042424, + 0x2c04243e, 0x2c042c14, 0x2c043434, 0x2c043e24, 0x2c0c040c, 0x2c0c041c, 0x2c0c042c, 0x2c0c0c14, + 0x2c0c140c, 0x2c0c1c14, 0x2c0c3e14, 0x2c140404, 0x2c140c0c, 0x2c14141c, 0x2c141c04, 0x2c141c34, + 0x2c142c1c, 0x2c1c0414, 0x2c1c043e, 0x2c1c0c04, 0x2c1c143e, 0x2c1c2424, 0x2c1c2c0c, 0x2c1c342c, + 0x2c1c3e1c, 0x2c24040c, 0x2c240424, 0x2c241404, 0x2c241c14, 0x2c242434, 0x2c2c0c14, 0x2c2c1434, + 0x2c2c2c0c, 0x2c2c2c1c, 0x2c342414, 0x2c3e0414, 0x2c3e0424, 0x2c3e1414, 0x34040c0c, 0x34040c1c, + 0x34040c2c, 0x34041c0c, 0x34041c1c, 0x34043404, 0x340c0404, 0x340c1404, 0x340c143e, 0x340c3424, + 0x34140c14, 0x34141c24, 0x34142414, 0x34142c2c, 0x34143414, 0x34143e04, 0x341c0404, 0x341c0c24, + 0x341c140c, 0x341c2404, 0x3424142c, 0x3424241c, 0x34243414, 0x342c0404, 0x342c041c, 0x342c1c24, + 0x342c3404, 0x3434042c, 0x34342404, 0x343e0c0c, 0x343e0c1c, 0x3e040404, 0x3e040424, 0x3e04043e, + 0x3e041404, 0x3e041414, 0x3e041c34, 0x3e042404, 0x3e042c24, 0x3e043414, 0x3e0c0414, 0x3e0c0c0c, + 0x3e0c1424, 0x3e0c241c, 0x3e0c242c, 0x3e14040c, 0x3e140424, 0x3e140c04, 0x3e140c34, 0x3e14140c, + 0x3e141c04, 0x3e142c0c, 0x3e1c0414, 0x3e1c1c14, 0x3e1c1c2c, 0x3e1c2c1c, 0x3e24040c, 0x3e24042c, + 0x3e240c1c, 0x3e241404, 0x3e242c04, 0x3e2c1414, 0x3e2c2414, 0x3e340414, 0x3e341c0c, 0x3e3e0404, +}; + + static const __device__ uint64_t iq1s_grid[512] = { 0xffffffffffff0101, 0xffffffffff01ff00, 0xffffffffff010100, 0xffffffff00000000, 0xffffffff01ff00ff, 0xffffffff01ff0001, 0xffffffff0101ffff, 0xffffffff0101ff01, @@ -1973,6 +2065,32 @@ static __global__ void dequantize_block_iq3_xxs(const void * __restrict__ vx, ds } +template +static __global__ void dequantize_block_iq3_s(const void * __restrict__ vx, dst_t * __restrict__ yy) { + + const int i = blockIdx.x; + const block_iq3_s * x = (const block_iq3_s *) vx; + + const int tid = threadIdx.x; +#if QK_K == 256 + const int il = tid/8; // 0...3 + const int ib = tid%8; // 0...7 + dst_t * y = yy + i*QK_K + 32*ib + 8*il; + const uint8_t * qs = x[i].qs + 8*ib; + const uint8_t * grid1 = (const uint8_t *)(iq3xs_grid + (qs[2*il+0] | ((x[i].qh[ib] << (8-2*il)) & 256))); + const uint8_t * grid2 = (const uint8_t *)(iq3xs_grid + (qs[2*il+1] | ((x[i].qh[ib] << (7-2*il)) & 256))); + const float d = (float)x[i].d * (0.5f + ((x[i].scales[ib/2] >> 4*(ib%2)) & 0xf)) * 0.5f; + const uint8_t signs = x[i].signs[4*ib + il]; + for (int j = 0; j < 4; ++j) { + y[j+0] = d * grid1[j] * (signs & kmask_iq2xs[j+0] ? -1.f : 1.f); + y[j+4] = d * grid2[j] * (signs & kmask_iq2xs[j+4] ? -1.f : 1.f); + } +#else + assert(false); +#endif + +} + template static __global__ void dequantize_block_iq1_s(const void * __restrict__ vx, dst_t * __restrict__ yy) { @@ -4717,6 +4835,41 @@ static __device__ __forceinline__ float vec_dot_iq3_xxs_q8_1( #endif } +// TODO: don't use lookup table for signs +static __device__ __forceinline__ float vec_dot_iq3_s_q8_1( + const void * __restrict__ vbq, const block_q8_1 * __restrict__ bq8_1, const int & iqs) { +#if __CUDA_ARCH__ >= MIN_CC_DP4A // lowest compute capability for integer intrinsics +#if QK_K == 256 + const block_iq3_s * bq2 = (const block_iq3_s *) vbq; + + const int ib32 = iqs; + const uint8_t * qs = bq2->qs + 8*ib32; + const int8_t * q8 = bq8_1[ib32].qs; + int sumi = 0; + for (int l = 0; l < 4; ++l) { + const uint32_t * grid1 = iq3xs_grid + (qs[2*l+0] | ((bq2->qh[ib32] << (8 - 2*l)) & 256)); + const uint32_t * grid2 = iq3xs_grid + (qs[2*l+1] | ((bq2->qh[ib32] << (7 - 2*l)) & 256)); + uint32_t signs0 = __vcmpeq4(((bq2->signs[4*ib32+l] & 0xf) * 0x01010101) & 0x08040201, 0x08040201); + uint32_t signs1 = __vcmpeq4(((bq2->signs[4*ib32+l] >> 4) * 0x01010101) & 0x08040201, 0x08040201); + const int grid_l = __vsub4(grid1[0] ^ signs0, signs0); + const int grid_h = __vsub4(grid2[0] ^ signs1, signs1); + sumi = __dp4a(grid_l, *((int *)q8+0), sumi); + sumi = __dp4a(grid_h, *((int *)q8+1), sumi); + q8 += 8; + } + const float d = (float)bq2->d * (0.5f + ((bq2->scales[ib32/2] >> 4*(ib32%2)) & 0xf)) * __low2float(bq8_1[ib32].ds) * 0.5f; + return d * sumi; +#else + assert(false); + return 0.f; +#endif +#else + assert(false); + return 0.f; +#endif +} + + static __device__ __forceinline__ float vec_dot_iq1_s_q8_1( const void * __restrict__ vbq, const block_q8_1 * __restrict__ bq8_1, const int & iqs) { #if QK_K == 256 @@ -6849,6 +7002,12 @@ static void dequantize_row_iq3_xxs_cuda(const void * vx, dst_t * y, const int k, dequantize_block_iq3_xxs<<>>(vx, y); } +template +static void dequantize_row_iq3_s_cuda(const void * vx, dst_t * y, const int k, cudaStream_t stream) { + const int nb = k / QK_K; + dequantize_block_iq3_s<<>>(vx, y); +} + template static void dequantize_row_iq1_s_cuda(const void * vx, dst_t * y, const int k, cudaStream_t stream) { const int nb = k / QK_K; @@ -6904,6 +7063,8 @@ static to_fp16_cuda_t ggml_get_to_fp16_cuda(ggml_type type) { return dequantize_row_iq1_s_cuda; case GGML_TYPE_IQ4_NL: return dequantize_row_iq4_nl_cuda; + case GGML_TYPE_IQ3_S: + return dequantize_row_iq3_s_cuda; case GGML_TYPE_F32: return convert_unary_cuda; default: @@ -6943,6 +7104,8 @@ static to_fp32_cuda_t ggml_get_to_fp32_cuda(ggml_type type) { return dequantize_row_iq1_s_cuda; case GGML_TYPE_IQ4_NL: return dequantize_row_iq4_nl_cuda; + case GGML_TYPE_IQ3_S: + return dequantize_row_iq3_s_cuda; case GGML_TYPE_F16: return convert_unary_cuda; default: @@ -8688,6 +8851,7 @@ static int64_t get_row_rounding(ggml_type type, const std::array= CC_RDNA2 ? 128 : 64; default: GGML_ASSERT(false); @@ -8713,6 +8877,7 @@ static int64_t get_row_rounding(ggml_type type, const std::array= CC_VOLTA ? 128 : 64; case GGML_TYPE_Q6_K: return 64; @@ -8818,6 +8983,10 @@ static void ggml_cuda_op_mul_mat_vec_q( mul_mat_vec_q_cuda (src0_dd_i, src1_ddq_i, dst_dd_i, ne00, row_diff, src1_padded_row_size, src1_ncols, nrows_dst, stream); break; + case GGML_TYPE_IQ3_S: + mul_mat_vec_q_cuda + (src0_dd_i, src1_ddq_i, dst_dd_i, ne00, row_diff, src1_padded_row_size, src1_ncols, nrows_dst, stream); + break; default: GGML_ASSERT(false); break; @@ -11541,7 +11710,7 @@ GGML_CALL static bool ggml_backend_cuda_supports_op(ggml_backend_t backend, cons } ggml_type a_type = a->type; if (a_type == GGML_TYPE_IQ2_XXS || a_type == GGML_TYPE_IQ2_XS || a_type == GGML_TYPE_IQ3_XXS || - a_type == GGML_TYPE_IQ1_S || a_type == GGML_TYPE_IQ4_NL) { + a_type == GGML_TYPE_IQ1_S || a_type == GGML_TYPE_IQ4_NL || a_type == GGML_TYPE_IQ3_S) { if (b->ne[1] == 1 && ggml_nrows(b) > 1) { return false; } diff --git a/ggml-metal.m b/ggml-metal.m index 0d4aa4309..ee584cfa7 100644 --- a/ggml-metal.m +++ b/ggml-metal.m @@ -61,6 +61,7 @@ enum ggml_metal_kernel_type { GGML_METAL_KERNEL_TYPE_GET_ROWS_IQ2_XXS, GGML_METAL_KERNEL_TYPE_GET_ROWS_IQ2_XS, GGML_METAL_KERNEL_TYPE_GET_ROWS_IQ3_XXS, + GGML_METAL_KERNEL_TYPE_GET_ROWS_IQ3_S, GGML_METAL_KERNEL_TYPE_GET_ROWS_IQ1_S, GGML_METAL_KERNEL_TYPE_GET_ROWS_IQ4_NL, GGML_METAL_KERNEL_TYPE_GET_ROWS_I32, @@ -85,6 +86,7 @@ enum ggml_metal_kernel_type { GGML_METAL_KERNEL_TYPE_MUL_MV_IQ2_XXS_F32, GGML_METAL_KERNEL_TYPE_MUL_MV_IQ2_XS_F32, GGML_METAL_KERNEL_TYPE_MUL_MV_IQ3_XXS_F32, + GGML_METAL_KERNEL_TYPE_MUL_MV_IQ3_S_F32, GGML_METAL_KERNEL_TYPE_MUL_MV_IQ1_S_F32, GGML_METAL_KERNEL_TYPE_MUL_MV_IQ4_NL_F32, GGML_METAL_KERNEL_TYPE_MUL_MV_ID_F32_F32, @@ -105,6 +107,7 @@ enum ggml_metal_kernel_type { GGML_METAL_KERNEL_TYPE_MUL_MV_ID_IQ2_XXS_F32, GGML_METAL_KERNEL_TYPE_MUL_MV_ID_IQ2_XS_F32, GGML_METAL_KERNEL_TYPE_MUL_MV_ID_IQ3_XXS_F32, + GGML_METAL_KERNEL_TYPE_MUL_MV_ID_IQ3_S_F32, GGML_METAL_KERNEL_TYPE_MUL_MV_ID_IQ1_S_F32, GGML_METAL_KERNEL_TYPE_MUL_MV_ID_IQ4_NL_F32, GGML_METAL_KERNEL_TYPE_MUL_MM_F32_F32, @@ -122,6 +125,7 @@ enum ggml_metal_kernel_type { GGML_METAL_KERNEL_TYPE_MUL_MM_IQ2_XXS_F32, GGML_METAL_KERNEL_TYPE_MUL_MM_IQ2_XS_F32, GGML_METAL_KERNEL_TYPE_MUL_MM_IQ3_XXS_F32, + GGML_METAL_KERNEL_TYPE_MUL_MM_IQ3_S_F32, GGML_METAL_KERNEL_TYPE_MUL_MM_IQ1_S_F32, GGML_METAL_KERNEL_TYPE_MUL_MM_IQ4_NL_F32, GGML_METAL_KERNEL_TYPE_MUL_MM_ID_F32_F32, @@ -139,6 +143,7 @@ enum ggml_metal_kernel_type { GGML_METAL_KERNEL_TYPE_MUL_MM_ID_IQ2_XXS_F32, GGML_METAL_KERNEL_TYPE_MUL_MM_ID_IQ2_XS_F32, GGML_METAL_KERNEL_TYPE_MUL_MM_ID_IQ3_XXS_F32, + GGML_METAL_KERNEL_TYPE_MUL_MM_ID_IQ3_S_F32, GGML_METAL_KERNEL_TYPE_MUL_MM_ID_IQ1_S_F32, GGML_METAL_KERNEL_TYPE_MUL_MM_ID_IQ4_NL_F32, GGML_METAL_KERNEL_TYPE_ROPE_F32, @@ -452,6 +457,7 @@ static struct ggml_metal_context * ggml_metal_init(int n_cb) { GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_GET_ROWS_IQ2_XXS, get_rows_iq2_xxs, true); GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_GET_ROWS_IQ2_XS, get_rows_iq2_xs, true); GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_GET_ROWS_IQ3_XXS, get_rows_iq3_xxs, true); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_GET_ROWS_IQ3_S, get_rows_iq3_s, true); GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_GET_ROWS_IQ1_S, get_rows_iq1_s, true); GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_GET_ROWS_IQ4_NL, get_rows_iq4_nl, true); GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_GET_ROWS_I32, get_rows_i32, true); @@ -476,6 +482,7 @@ static struct ggml_metal_context * ggml_metal_init(int n_cb) { GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MV_IQ2_XXS_F32, mul_mv_iq2_xxs_f32, ctx->support_simdgroup_reduction); GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MV_IQ2_XS_F32, mul_mv_iq2_xs_f32, ctx->support_simdgroup_reduction); GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MV_IQ3_XXS_F32, mul_mv_iq3_xxs_f32, ctx->support_simdgroup_reduction); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MV_IQ3_S_F32, mul_mv_iq3_s_f32, ctx->support_simdgroup_reduction); GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MV_IQ1_S_F32, mul_mv_iq1_s_f32, ctx->support_simdgroup_reduction); GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MV_IQ4_NL_F32, mul_mv_iq4_nl_f32, ctx->support_simdgroup_reduction); GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MV_ID_F32_F32, mul_mv_id_f32_f32, ctx->support_simdgroup_reduction); @@ -496,6 +503,7 @@ static struct ggml_metal_context * ggml_metal_init(int n_cb) { GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MV_ID_IQ2_XXS_F32, mul_mv_id_iq2_xxs_f32, ctx->support_simdgroup_reduction); GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MV_ID_IQ2_XS_F32, mul_mv_id_iq2_xs_f32, ctx->support_simdgroup_reduction); GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MV_ID_IQ3_XXS_F32, mul_mv_id_iq3_xxs_f32, ctx->support_simdgroup_reduction); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MV_ID_IQ3_S_F32, mul_mv_id_iq3_s_f32, ctx->support_simdgroup_reduction); GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MV_ID_IQ1_S_F32, mul_mv_id_iq1_s_f32, ctx->support_simdgroup_reduction); GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MV_ID_IQ4_NL_F32, mul_mv_id_iq4_nl_f32, ctx->support_simdgroup_reduction); GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MM_F32_F32, mul_mm_f32_f32, ctx->support_simdgroup_mm); @@ -513,6 +521,7 @@ static struct ggml_metal_context * ggml_metal_init(int n_cb) { GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MM_IQ2_XXS_F32, mul_mm_iq2_xxs_f32, ctx->support_simdgroup_mm); GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MM_IQ2_XS_F32, mul_mm_iq2_xs_f32, ctx->support_simdgroup_mm); GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MM_IQ3_XXS_F32, mul_mm_iq3_xxs_f32, ctx->support_simdgroup_mm); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MM_IQ3_S_F32, mul_mm_iq3_s_f32, ctx->support_simdgroup_mm); GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MM_IQ1_S_F32, mul_mm_iq1_s_f32, ctx->support_simdgroup_mm); GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MM_IQ4_NL_F32, mul_mm_iq4_nl_f32, ctx->support_simdgroup_mm); GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MM_ID_F32_F32, mul_mm_id_f32_f32, ctx->support_simdgroup_mm); @@ -530,6 +539,7 @@ static struct ggml_metal_context * ggml_metal_init(int n_cb) { GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MM_ID_IQ2_XXS_F32, mul_mm_id_iq2_xxs_f32, ctx->support_simdgroup_mm); GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MM_ID_IQ2_XS_F32, mul_mm_id_iq2_xs_f32, ctx->support_simdgroup_mm); GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MM_ID_IQ3_XXS_F32, mul_mm_id_iq3_xxs_f32, ctx->support_simdgroup_mm); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MM_ID_IQ3_S_F32, mul_mm_id_iq3_s_f32, ctx->support_simdgroup_mm); GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MM_ID_IQ1_S_F32, mul_mm_id_iq1_s_f32, ctx->support_simdgroup_mm); GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MM_ID_IQ4_NL_F32, mul_mm_id_iq4_nl_f32, ctx->support_simdgroup_mm); GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_ROPE_F32, rope_f32, true); @@ -1347,6 +1357,7 @@ static bool ggml_metal_graph_compute( case GGML_TYPE_IQ2_XXS: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MM_IQ2_XXS_F32].pipeline; break; case GGML_TYPE_IQ2_XS: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MM_IQ2_XS_F32 ].pipeline; break; case GGML_TYPE_IQ3_XXS: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MM_IQ3_XXS_F32].pipeline; break; + case GGML_TYPE_IQ3_S: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MM_IQ3_S_F32 ].pipeline; break; case GGML_TYPE_IQ1_S: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MM_IQ1_S_F32 ].pipeline; break; case GGML_TYPE_IQ4_NL: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MM_IQ4_NL_F32 ].pipeline; break; default: GGML_ASSERT(false && "MUL MAT-MAT not implemented"); @@ -1483,6 +1494,12 @@ static bool ggml_metal_graph_compute( nth1 = 16; pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MV_IQ3_XXS_F32].pipeline; } break; + case GGML_TYPE_IQ3_S: + { + nth0 = 4; + nth1 = 16; + pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MV_IQ3_S_F32].pipeline; + } break; case GGML_TYPE_IQ1_S: { nth0 = 4; @@ -1537,8 +1554,8 @@ static bool ggml_metal_graph_compute( [encoder setThreadgroupMemoryLength:mem_size atIndex:0]; [encoder dispatchThreadgroups:MTLSizeMake((ne01 + 7)/8, ne11, ne12*ne13) threadsPerThreadgroup:MTLSizeMake(nth0, nth1, 1)]; } - else if (src0t == GGML_TYPE_IQ3_XXS) { - const int mem_size = 256*4+128; + else if (src0t == GGML_TYPE_IQ3_XXS || src0t == GGML_TYPE_IQ3_S) { + const int mem_size = src0t == GGML_TYPE_IQ3_XXS ? 256*4+128 : 512*4; [encoder setThreadgroupMemoryLength:mem_size atIndex:0]; [encoder dispatchThreadgroups:MTLSizeMake((ne01 + 7)/8, ne11, ne12*ne13) threadsPerThreadgroup:MTLSizeMake(nth0, nth1, 1)]; } @@ -1640,6 +1657,7 @@ static bool ggml_metal_graph_compute( case GGML_TYPE_IQ2_XXS: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MM_ID_IQ2_XXS_F32].pipeline; break; case GGML_TYPE_IQ2_XS: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MM_ID_IQ2_XS_F32 ].pipeline; break; case GGML_TYPE_IQ3_XXS: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MM_ID_IQ3_XXS_F32].pipeline; break; + case GGML_TYPE_IQ3_S: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MM_ID_IQ3_S_F32 ].pipeline; break; case GGML_TYPE_IQ1_S: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MM_ID_IQ1_S_F32 ].pipeline; break; case GGML_TYPE_IQ4_NL: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MM_ID_IQ4_NL_F32 ].pipeline; break; default: GGML_ASSERT(false && "MUL_MAT_ID not implemented"); @@ -1779,6 +1797,12 @@ static bool ggml_metal_graph_compute( nth1 = 16; pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MV_ID_IQ3_XXS_F32].pipeline; } break; + case GGML_TYPE_IQ3_S: + { + nth0 = 4; + nth1 = 16; + pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MV_ID_IQ3_S_F32].pipeline; + } break; case GGML_TYPE_IQ1_S: { nth0 = 4; @@ -1849,8 +1873,8 @@ static bool ggml_metal_graph_compute( [encoder setThreadgroupMemoryLength:mem_size atIndex:0]; [encoder dispatchThreadgroups:MTLSizeMake((ne21 + 7)/8, _ne1, ne01*ne12*ne13) threadsPerThreadgroup:MTLSizeMake(nth0, nth1, 1)]; } - else if (src2t == GGML_TYPE_IQ3_XXS) { - const int mem_size = 256*4+128; + else if (src2t == GGML_TYPE_IQ3_XXS || src2t == GGML_TYPE_IQ3_S) { + const int mem_size = src2t == GGML_TYPE_IQ3_XXS ? 256*4+128 : 512*4; [encoder setThreadgroupMemoryLength:mem_size atIndex:0]; [encoder dispatchThreadgroups:MTLSizeMake((ne21 + 7)/8, _ne1, ne01*ne12*ne13) threadsPerThreadgroup:MTLSizeMake(nth0, nth1, 1)]; } @@ -1900,6 +1924,7 @@ static bool ggml_metal_graph_compute( case GGML_TYPE_IQ2_XXS: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_GET_ROWS_IQ2_XXS].pipeline; break; case GGML_TYPE_IQ2_XS: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_GET_ROWS_IQ2_XS ].pipeline; break; case GGML_TYPE_IQ3_XXS: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_GET_ROWS_IQ3_XXS].pipeline; break; + case GGML_TYPE_IQ3_S: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_GET_ROWS_IQ3_S ].pipeline; break; case GGML_TYPE_IQ1_S: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_GET_ROWS_IQ1_S ].pipeline; break; case GGML_TYPE_IQ4_NL: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_GET_ROWS_IQ4_NL ].pipeline; break; case GGML_TYPE_I32: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_GET_ROWS_I32 ].pipeline; break; diff --git a/ggml-metal.metal b/ggml-metal.metal index c223a981c..b3bf40539 100644 --- a/ggml-metal.metal +++ b/ggml-metal.metal @@ -2525,6 +2525,20 @@ typedef struct { } block_iq3_xxs; // 98 bytes / block for QK_K = 256, so 3.0625 bpw +// 3.4375 bpw +#if QK_K == 64 +#define IQ3S_N_SCALE 2 +#else +#define IQ3S_N_SCALE QK_K/64 +#endif +typedef struct { + half d; + uint8_t qs[QK_K/4]; + uint8_t qh[QK_K/32]; + uint8_t signs[QK_K/8]; + uint8_t scales[IQ3S_N_SCALE]; +} block_iq3_s; + typedef struct { half d; uint8_t qs[QK_K/8]; @@ -3795,6 +3809,73 @@ constexpr constant static uint32_t iq3xxs_grid[256] = { 0x3e1c1c1c, 0x3e1c3404, 0x3e24140c, 0x3e24240c, 0x3e2c0404, 0x3e2c0414, 0x3e2c1424, 0x3e341c04, }; +constexpr constant static uint32_t iq3xs_grid[512] = { + 0x04040404, 0x0404040c, 0x04040414, 0x0404042c, 0x0404043e, 0x04040c04, 0x04040c0c, 0x04040c14, + 0x04040c24, 0x04040c34, 0x04041404, 0x0404140c, 0x0404142c, 0x04041c1c, 0x04042404, 0x04042414, + 0x0404242c, 0x0404243e, 0x04042c0c, 0x04042c1c, 0x04043404, 0x04043414, 0x04043e0c, 0x04043e24, + 0x04043e3e, 0x040c0404, 0x040c040c, 0x040c0414, 0x040c0424, 0x040c0c04, 0x040c0c0c, 0x040c0c2c, + 0x040c1404, 0x040c141c, 0x040c143e, 0x040c1c0c, 0x040c1c2c, 0x040c2424, 0x040c340c, 0x040c342c, + 0x040c3e14, 0x04140404, 0x0414040c, 0x0414042c, 0x0414043e, 0x04140c04, 0x04140c1c, 0x04140c34, + 0x0414140c, 0x0414142c, 0x04141c04, 0x04141c24, 0x04142414, 0x0414242c, 0x0414243e, 0x04142c0c, + 0x04142c1c, 0x04143e04, 0x04143e1c, 0x041c041c, 0x041c0c0c, 0x041c0c2c, 0x041c1404, 0x041c1414, + 0x041c1c0c, 0x041c1c1c, 0x041c1c34, 0x041c2424, 0x041c2c04, 0x041c2c14, 0x041c343e, 0x041c3e0c, + 0x041c3e2c, 0x04240404, 0x04240c1c, 0x04240c3e, 0x0424140c, 0x04241424, 0x04241c14, 0x04242404, + 0x0424241c, 0x04242c0c, 0x04243e04, 0x042c0414, 0x042c0424, 0x042c1404, 0x042c1414, 0x042c1434, + 0x042c1c1c, 0x042c240c, 0x042c242c, 0x042c243e, 0x042c3434, 0x042c3e1c, 0x04340434, 0x04340c0c, + 0x04340c1c, 0x04341c0c, 0x04342c14, 0x04343e0c, 0x043e0404, 0x043e0414, 0x043e0424, 0x043e1404, + 0x043e1414, 0x043e1434, 0x043e1c1c, 0x043e2c04, 0x043e2c24, 0x0c040404, 0x0c04040c, 0x0c040414, + 0x0c040424, 0x0c040c04, 0x0c040c0c, 0x0c040c1c, 0x0c040c2c, 0x0c040c3e, 0x0c041404, 0x0c041414, + 0x0c041c0c, 0x0c041c24, 0x0c041c34, 0x0c042c24, 0x0c042c34, 0x0c04340c, 0x0c043e14, 0x0c0c0404, + 0x0c0c040c, 0x0c0c041c, 0x0c0c0434, 0x0c0c0c04, 0x0c0c0c24, 0x0c0c140c, 0x0c0c1c04, 0x0c0c1c1c, + 0x0c0c240c, 0x0c0c2c04, 0x0c0c2c14, 0x0c0c3e04, 0x0c0c3e34, 0x0c140404, 0x0c140c14, 0x0c140c2c, + 0x0c140c3e, 0x0c141404, 0x0c141424, 0x0c141c14, 0x0c142404, 0x0c14241c, 0x0c142c2c, 0x0c143404, + 0x0c143e14, 0x0c1c040c, 0x0c1c0424, 0x0c1c043e, 0x0c1c0c04, 0x0c1c0c1c, 0x0c1c140c, 0x0c1c143e, + 0x0c1c1c04, 0x0c1c1c24, 0x0c1c240c, 0x0c1c3414, 0x0c1c3e04, 0x0c24041c, 0x0c24042c, 0x0c240c14, + 0x0c240c24, 0x0c241c0c, 0x0c241c1c, 0x0c242414, 0x0c242434, 0x0c242c04, 0x0c242c24, 0x0c2c040c, + 0x0c2c0c04, 0x0c2c0c1c, 0x0c2c140c, 0x0c2c1c04, 0x0c2c1c14, 0x0c2c2c0c, 0x0c341404, 0x0c341424, + 0x0c34143e, 0x0c342424, 0x0c342434, 0x0c3e040c, 0x0c3e041c, 0x0c3e0c04, 0x0c3e0c14, 0x0c3e140c, + 0x0c3e1c2c, 0x0c3e240c, 0x0c3e3414, 0x0c3e3e04, 0x14040404, 0x1404040c, 0x1404041c, 0x1404042c, + 0x1404043e, 0x14040c04, 0x14040c14, 0x14040c24, 0x14040c34, 0x1404140c, 0x1404141c, 0x1404143e, + 0x14041c04, 0x14041c14, 0x1404240c, 0x1404241c, 0x1404242c, 0x14042c04, 0x14042c14, 0x1404343e, + 0x14043e04, 0x14043e1c, 0x14043e2c, 0x140c0404, 0x140c0414, 0x140c0c04, 0x140c0c1c, 0x140c0c3e, + 0x140c1414, 0x140c142c, 0x140c1c0c, 0x140c1c24, 0x140c2414, 0x140c2c0c, 0x1414040c, 0x14140424, + 0x1414043e, 0x1414140c, 0x1414141c, 0x14141c04, 0x14141c3e, 0x1414240c, 0x14142c1c, 0x14142c3e, + 0x14143e0c, 0x14143e24, 0x141c0404, 0x141c0414, 0x141c042c, 0x141c0c0c, 0x141c1414, 0x141c1424, + 0x141c1c0c, 0x141c1c1c, 0x141c2414, 0x141c2c04, 0x141c3434, 0x1424040c, 0x1424043e, 0x14241404, + 0x1424141c, 0x14241c14, 0x14241c2c, 0x1424240c, 0x14243e14, 0x14243e2c, 0x142c0424, 0x142c0c0c, + 0x142c1414, 0x142c1c3e, 0x142c2404, 0x142c2c1c, 0x142c3e04, 0x14340404, 0x14340414, 0x1434043e, + 0x1434140c, 0x14342c2c, 0x1434340c, 0x143e042c, 0x143e0c0c, 0x143e1434, 0x143e1c04, 0x143e241c, + 0x143e2c04, 0x1c040414, 0x1c040c0c, 0x1c040c1c, 0x1c040c2c, 0x1c040c3e, 0x1c041414, 0x1c041c0c, + 0x1c041c1c, 0x1c041c2c, 0x1c042414, 0x1c042424, 0x1c04243e, 0x1c042c0c, 0x1c04341c, 0x1c043e0c, + 0x1c0c040c, 0x1c0c041c, 0x1c0c042c, 0x1c0c0c24, 0x1c0c140c, 0x1c0c141c, 0x1c0c2404, 0x1c0c3404, + 0x1c0c3e14, 0x1c0c3e34, 0x1c140404, 0x1c140c14, 0x1c141404, 0x1c141c14, 0x1c141c24, 0x1c142c04, + 0x1c1c040c, 0x1c1c0c04, 0x1c1c0c24, 0x1c1c140c, 0x1c1c141c, 0x1c1c143e, 0x1c1c1c04, 0x1c1c240c, + 0x1c1c241c, 0x1c1c243e, 0x1c1c2c2c, 0x1c1c3e1c, 0x1c24041c, 0x1c240c0c, 0x1c240c34, 0x1c241414, + 0x1c241c0c, 0x1c242c14, 0x1c243404, 0x1c243424, 0x1c2c040c, 0x1c2c0c04, 0x1c2c0c14, 0x1c2c142c, + 0x1c2c1c14, 0x1c2c2424, 0x1c2c2c34, 0x1c2c3e1c, 0x1c340c34, 0x1c34240c, 0x1c3e040c, 0x1c3e041c, + 0x1c3e1404, 0x1c3e1414, 0x1c3e1c2c, 0x24040404, 0x24040424, 0x24040c14, 0x24041404, 0x24041424, + 0x2404143e, 0x24041c14, 0x2404240c, 0x24042c04, 0x24043e04, 0x240c0414, 0x240c043e, 0x240c0c0c, + 0x240c0c1c, 0x240c1414, 0x240c1c04, 0x240c1c2c, 0x240c241c, 0x240c2c0c, 0x240c2c2c, 0x2414040c, + 0x2414041c, 0x24140c04, 0x24140c2c, 0x2414140c, 0x24141c1c, 0x24142404, 0x24142c3e, 0x24143414, + 0x24143e04, 0x241c0424, 0x241c0c0c, 0x241c0c1c, 0x241c1404, 0x241c1414, 0x241c1c0c, 0x241c1c2c, + 0x24240404, 0x24240414, 0x24241424, 0x24241c3e, 0x24242404, 0x24243e0c, 0x242c042c, 0x242c043e, + 0x242c140c, 0x242c3414, 0x24340c1c, 0x24341c24, 0x24343404, 0x243e0c04, 0x243e0c2c, 0x243e1c04, + 0x243e241c, 0x243e2c0c, 0x2c040414, 0x2c040c04, 0x2c040c24, 0x2c041414, 0x2c042404, 0x2c042424, + 0x2c04243e, 0x2c042c14, 0x2c043434, 0x2c043e24, 0x2c0c040c, 0x2c0c041c, 0x2c0c042c, 0x2c0c0c14, + 0x2c0c140c, 0x2c0c1c14, 0x2c0c3e14, 0x2c140404, 0x2c140c0c, 0x2c14141c, 0x2c141c04, 0x2c141c34, + 0x2c142c1c, 0x2c1c0414, 0x2c1c043e, 0x2c1c0c04, 0x2c1c143e, 0x2c1c2424, 0x2c1c2c0c, 0x2c1c342c, + 0x2c1c3e1c, 0x2c24040c, 0x2c240424, 0x2c241404, 0x2c241c14, 0x2c242434, 0x2c2c0c14, 0x2c2c1434, + 0x2c2c2c0c, 0x2c2c2c1c, 0x2c342414, 0x2c3e0414, 0x2c3e0424, 0x2c3e1414, 0x34040c0c, 0x34040c1c, + 0x34040c2c, 0x34041c0c, 0x34041c1c, 0x34043404, 0x340c0404, 0x340c1404, 0x340c143e, 0x340c3424, + 0x34140c14, 0x34141c24, 0x34142414, 0x34142c2c, 0x34143414, 0x34143e04, 0x341c0404, 0x341c0c24, + 0x341c140c, 0x341c2404, 0x3424142c, 0x3424241c, 0x34243414, 0x342c0404, 0x342c041c, 0x342c1c24, + 0x342c3404, 0x3434042c, 0x34342404, 0x343e0c0c, 0x343e0c1c, 0x3e040404, 0x3e040424, 0x3e04043e, + 0x3e041404, 0x3e041414, 0x3e041c34, 0x3e042404, 0x3e042c24, 0x3e043414, 0x3e0c0414, 0x3e0c0c0c, + 0x3e0c1424, 0x3e0c241c, 0x3e0c242c, 0x3e14040c, 0x3e140424, 0x3e140c04, 0x3e140c34, 0x3e14140c, + 0x3e141c04, 0x3e142c0c, 0x3e1c0414, 0x3e1c1c14, 0x3e1c1c2c, 0x3e1c2c1c, 0x3e24040c, 0x3e24042c, + 0x3e240c1c, 0x3e241404, 0x3e242c04, 0x3e2c1414, 0x3e2c2414, 0x3e340414, 0x3e341c0c, 0x3e3e0404, +}; + #define NGRID_IQ1S 512 constexpr constant static uint64_t iq1s_grid[NGRID_IQ1S] = { 0xffffffffffff0101, 0xffffffffff01ff00, 0xffffffffff010100, 0xffffffff00000000, @@ -4361,6 +4442,136 @@ kernel void kernel_mul_mv_iq3_xxs_f32( kernel_mul_mv_iq3_xxs_f32_impl(src0, src1, dst, ne00, ne01, ne02, ne10, ne12, ne0, ne1, r2, r3, shared_values, tgpig, tiisg, sgitg); } +void kernel_mul_mv_iq3_s_f32_impl( + device const void * src0, + device const float * src1, + device float * dst, + constant int64_t & ne00, + constant int64_t & ne01, + constant int64_t & ne02, + constant int64_t & ne10, + constant int64_t & ne12, + constant int64_t & ne0, + constant int64_t & ne1, + constant uint & r2, + constant uint & r3, + threadgroup int8_t * shared_values [[threadgroup(0)]], + uint3 tgpig[[threadgroup_position_in_grid]], + uint tiisg[[thread_index_in_simdgroup]], + uint sgitg[[simdgroup_index_in_threadgroup]]) { + + const int nb = ne00/QK_K; + const int r0 = tgpig.x; + const int r1 = tgpig.y; + const int im = tgpig.z; + + const int first_row = (r0 * N_SIMDGROUP + sgitg) * N_DST; + const int ib_row = first_row * nb; + + const uint i12 = im%ne12; + const uint i13 = im/ne12; + + const uint offset0 = (i12/r2)*(nb*ne01) + (i13/r3)*(nb*ne01*ne02); + + device const block_iq3_s * x = (device const block_iq3_s *) src0 + ib_row + offset0; + device const float * y = (device const float *) src1 + r1*ne10 + im*ne00*ne1; + + float yl[32]; + float sumf[N_DST]={0.f}, all_sum; + + const int nb32 = nb * (QK_K / 32); + + threadgroup uint32_t * values = (threadgroup uint32_t *)shared_values; + { + int nval = 8; + int pos = (32*sgitg + tiisg)*nval; + for (int i = 0; i < nval; ++i) values[pos + i] = iq3xs_grid[pos + i]; + threadgroup_barrier(mem_flags::mem_threadgroup); + } + + const int ix = tiisg; + + device const float * y4 = y + 32 * ix; + + for (int ib32 = ix; ib32 < nb32; ib32 += 32) { + + for (int i = 0; i < 32; ++i) { + yl[i] = y4[i]; + } + + const int ibl = ib32 / (QK_K / 32); + const int ib = ib32 % (QK_K / 32); + + device const block_iq3_s * xr = x + ibl; + device const uint8_t * qs = xr->qs + 8 * ib; + device const uint8_t * qh = xr->qh + ib; + device const uint8_t * sc = xr->scales + (ib/2); + device const uint8_t * signs = xr->signs + 4 * ib; + device const half * dh = &xr->d; + + for (int row = 0; row < N_DST; row++) { + + const float db = dh[0]; + const float d = db * (0.5f + ((sc[0] >> 4*(ib%2)) & 0xf)); + + float2 sum = {0}; + for (int l = 0; l < 4; ++l) { + const threadgroup uint8_t * grid1 = (const threadgroup uint8_t *)(values + (qs[2*l+0] | ((qh[0] << (8-2*l)) & 256))); + const threadgroup uint8_t * grid2 = (const threadgroup uint8_t *)(values + (qs[2*l+1] | ((qh[0] << (7-2*l)) & 256))); + for (int j = 0; j < 4; ++j) { + sum[0] += yl[8*l + j + 0] * grid1[j] * select(1, -1, signs[l] & kmask_iq2xs[j+0]); + sum[1] += yl[8*l + j + 4] * grid2[j] * select(1, -1, signs[l] & kmask_iq2xs[j+4]); + } + } + sumf[row] += d * (sum[0] + sum[1]); + + dh += nb*sizeof(block_iq3_s)/2; + qs += nb*sizeof(block_iq3_s); + qh += nb*sizeof(block_iq3_s); + sc += nb*sizeof(block_iq3_s); + signs += nb*sizeof(block_iq3_s); + } + + y4 += 32 * 32; + } + + for (int row = 0; row < N_DST; ++row) { + all_sum = simd_sum(sumf[row]); + if (tiisg == 0) { + dst[r1*ne0 + im*ne0*ne1 + first_row + row] = all_sum * 0.5f; + } + } +} + +[[host_name("kernel_mul_mv_iq3_s_f32")]] +kernel void kernel_mul_mv_iq3_s_f32( + device const void * src0, + device const float * src1, + device float * dst, + constant int64_t & ne00, + constant int64_t & ne01, + constant int64_t & ne02, + constant uint64_t & nb00, + constant uint64_t & nb01, + constant uint64_t & nb02, + constant int64_t & ne10, + constant int64_t & ne11, + constant int64_t & ne12, + constant uint64_t & nb10, + constant uint64_t & nb11, + constant uint64_t & nb12, + constant int64_t & ne0, + constant int64_t & ne1, + constant uint & r2, + constant uint & r3, + threadgroup int8_t * shared_values [[threadgroup(0)]], + uint3 tgpig[[threadgroup_position_in_grid]], + uint tiisg[[thread_index_in_simdgroup]], + uint sgitg[[simdgroup_index_in_threadgroup]]) { + + kernel_mul_mv_iq3_s_f32_impl(src0, src1, dst, ne00, ne01, ne02, ne10, ne12, ne0, ne1, r2, r3, shared_values, tgpig, tiisg, sgitg); +} + void kernel_mul_mv_iq1_s_f32_impl( device const void * src0, device const float * src1, @@ -4952,6 +5163,31 @@ void dequantize_iq3_xxs(device const block_iq3_xxs * xb, short il, thread type4x } } +template +void dequantize_iq3_s(device const block_iq3_s * xb, short il, thread type4x4 & reg) { + // il is 0...15 for QK_K = 256 => index of block of 32 is il/2 + const float d = xb->d; + const int ib32 = il/2; + il = il%2; + // il = 0 or 1. il = 0 processes the first 16 quants in a block of 32, il = 1 the second 16 + device const uint8_t * qs = xb->qs + 8*ib32; + device const uint8_t * signs = xb->signs + 4*ib32 + 2*il; + const uint8_t qh = xb->qh[ib32] >> 4*il; + const float dl = d * (0.5f + ((xb->scales[ib32/2] >> 4*(ib32%2)) & 0xf)) * 0.5f; + constant uint8_t * grid1 = (constant uint8_t *)(iq3xs_grid + (qs[4*il+0] | ((qh << 8) & 256))); + constant uint8_t * grid2 = (constant uint8_t *)(iq3xs_grid + (qs[4*il+1] | ((qh << 7) & 256))); + for (int i = 0; i < 4; ++i) { + reg[0][i] = dl * grid1[i] * select(1, -1, signs[0] & kmask_iq2xs[i+0]); + reg[1][i] = dl * grid2[i] * select(1, -1, signs[0] & kmask_iq2xs[i+4]); + } + grid1 = (constant uint8_t *)(iq3xs_grid + (qs[4*il+2] | ((qh << 6) & 256))); + grid2 = (constant uint8_t *)(iq3xs_grid + (qs[4*il+3] | ((qh << 5) & 256))); + for (int i = 0; i < 4; ++i) { + reg[2][i] = dl * grid1[i] * select(1, -1, signs[1] & kmask_iq2xs[i+0]); + reg[3][i] = dl * grid2[i] * select(1, -1, signs[1] & kmask_iq2xs[i+4]); + } +} + template void dequantize_iq1_s(device const block_iq1_s * xb, short il, thread type4x4 & reg) { // il is 0...15 for QK_K = 256 => index of block of 32 is il/2 @@ -5525,6 +5761,7 @@ template [[host_name("kernel_get_rows_q6_K")]] kernel get_rows_t kernel_get_rows template [[host_name("kernel_get_rows_iq2_xxs")]] kernel get_rows_t kernel_get_rows; template [[host_name("kernel_get_rows_iq2_xs")]] kernel get_rows_t kernel_get_rows; template [[host_name("kernel_get_rows_iq3_xxs")]] kernel get_rows_t kernel_get_rows; +template [[host_name("kernel_get_rows_iq3_s")]] kernel get_rows_t kernel_get_rows; template [[host_name("kernel_get_rows_iq1_s")]] kernel get_rows_t kernel_get_rows; template [[host_name("kernel_get_rows_iq4_nl")]] kernel get_rows_t kernel_get_rows; @@ -5566,6 +5803,7 @@ template [[host_name("kernel_mul_mm_q6_K_f32")]] kernel mat_mm_t kernel_mul_mm; template [[host_name("kernel_mul_mm_iq2_xs_f32")]] kernel mat_mm_t kernel_mul_mm; template [[host_name("kernel_mul_mm_iq3_xxs_f32")]] kernel mat_mm_t kernel_mul_mm; +template [[host_name("kernel_mul_mm_iq3_s_f32")]] kernel mat_mm_t kernel_mul_mm; template [[host_name("kernel_mul_mm_iq1_s_f32")]] kernel mat_mm_t kernel_mul_mm; template [[host_name("kernel_mul_mm_iq4_nl_f32")]] kernel mat_mm_t kernel_mul_mm; @@ -5619,6 +5857,7 @@ template [[host_name("kernel_mul_mm_id_q6_K_f32")]] kernel mat_mm_id_t kernel_mu template [[host_name("kernel_mul_mm_id_iq2_xxs_f32")]] kernel mat_mm_id_t kernel_mul_mm_id; template [[host_name("kernel_mul_mm_id_iq2_xs_f32")]] kernel mat_mm_id_t kernel_mul_mm_id; template [[host_name("kernel_mul_mm_id_iq3_xxs_f32")]] kernel mat_mm_id_t kernel_mul_mm_id; +template [[host_name("kernel_mul_mm_id_iq3_s_f32")]] kernel mat_mm_id_t kernel_mul_mm_id; template [[host_name("kernel_mul_mm_id_iq1_s_f32")]] kernel mat_mm_id_t kernel_mul_mm_id; template [[host_name("kernel_mul_mm_id_iq4_nl_f32")]] kernel mat_mm_id_t kernel_mul_mm_id; @@ -6589,6 +6828,71 @@ kernel void kernel_mul_mv_id_iq3_xxs_f32( sgitg); } +[[host_name("kernel_mul_mv_id_iq3_s_f32")]] +kernel void kernel_mul_mv_id_iq3_s_f32( + device const char * ids, + device const char * src1, + device float * dst, + constant uint64_t & nbi1, + constant int64_t & ne00, + constant int64_t & ne01, + constant int64_t & ne02, + constant uint64_t & nb00, + constant uint64_t & nb01, + constant uint64_t & nb02, + constant int64_t & ne10, + constant int64_t & ne11, + constant int64_t & ne12, + constant int64_t & ne13, + constant uint64_t & nb10, + constant uint64_t & nb11, + constant uint64_t & nb12, + constant int64_t & ne0, + constant int64_t & ne1, + constant uint64_t & nb1, + constant uint & r2, + constant uint & r3, + constant int & idx, + device const char * src00, + device const char * src01, + device const char * src02, + device const char * src03, + device const char * src04, + device const char * src05, + device const char * src06, + device const char * src07, + threadgroup int8_t * shared_values [[threadgroup(0)]], + uint3 tgpig[[threadgroup_position_in_grid]], + uint tiitg[[thread_index_in_threadgroup]], + uint tiisg[[thread_index_in_simdgroup]], + uint sgitg[[simdgroup_index_in_threadgroup]]) { + device const char * src0[8] = {src00, src01, src02, src03, src04, src05, src06, src07}; + + const int64_t bid = tgpig.z/(ne12*ne13); + + tgpig.z = tgpig.z%(ne12*ne13); + + const int32_t id = ((device int32_t *) (ids + bid*nbi1))[idx]; + + kernel_mul_mv_iq3_s_f32_impl( + src0[id], + (device const float *) (src1 + bid*nb11), + dst + bid*ne0, + ne00, + ne01, + ne02, + ne10, + ne12, + ne0, + ne1, + r2, + r3, + shared_values, + tgpig, + tiisg, + sgitg); +} + [[host_name("kernel_mul_mv_id_iq1_s_f32")]] kernel void kernel_mul_mv_id_iq1_s_f32( device const char * ids, diff --git a/ggml-quants.c b/ggml-quants.c index b15977f53..5c5f2ce1b 100644 --- a/ggml-quants.c +++ b/ggml-quants.c @@ -3505,6 +3505,73 @@ static const uint32_t iq3xxs_grid[256] = { 0x3e1c1c1c, 0x3e1c3404, 0x3e24140c, 0x3e24240c, 0x3e2c0404, 0x3e2c0414, 0x3e2c1424, 0x3e341c04, }; +static const uint32_t iq3xs_grid[512] = { + 0x04040404, 0x0404040c, 0x04040414, 0x0404042c, 0x0404043e, 0x04040c04, 0x04040c0c, 0x04040c14, + 0x04040c24, 0x04040c34, 0x04041404, 0x0404140c, 0x0404142c, 0x04041c1c, 0x04042404, 0x04042414, + 0x0404242c, 0x0404243e, 0x04042c0c, 0x04042c1c, 0x04043404, 0x04043414, 0x04043e0c, 0x04043e24, + 0x04043e3e, 0x040c0404, 0x040c040c, 0x040c0414, 0x040c0424, 0x040c0c04, 0x040c0c0c, 0x040c0c2c, + 0x040c1404, 0x040c141c, 0x040c143e, 0x040c1c0c, 0x040c1c2c, 0x040c2424, 0x040c340c, 0x040c342c, + 0x040c3e14, 0x04140404, 0x0414040c, 0x0414042c, 0x0414043e, 0x04140c04, 0x04140c1c, 0x04140c34, + 0x0414140c, 0x0414142c, 0x04141c04, 0x04141c24, 0x04142414, 0x0414242c, 0x0414243e, 0x04142c0c, + 0x04142c1c, 0x04143e04, 0x04143e1c, 0x041c041c, 0x041c0c0c, 0x041c0c2c, 0x041c1404, 0x041c1414, + 0x041c1c0c, 0x041c1c1c, 0x041c1c34, 0x041c2424, 0x041c2c04, 0x041c2c14, 0x041c343e, 0x041c3e0c, + 0x041c3e2c, 0x04240404, 0x04240c1c, 0x04240c3e, 0x0424140c, 0x04241424, 0x04241c14, 0x04242404, + 0x0424241c, 0x04242c0c, 0x04243e04, 0x042c0414, 0x042c0424, 0x042c1404, 0x042c1414, 0x042c1434, + 0x042c1c1c, 0x042c240c, 0x042c242c, 0x042c243e, 0x042c3434, 0x042c3e1c, 0x04340434, 0x04340c0c, + 0x04340c1c, 0x04341c0c, 0x04342c14, 0x04343e0c, 0x043e0404, 0x043e0414, 0x043e0424, 0x043e1404, + 0x043e1414, 0x043e1434, 0x043e1c1c, 0x043e2c04, 0x043e2c24, 0x0c040404, 0x0c04040c, 0x0c040414, + 0x0c040424, 0x0c040c04, 0x0c040c0c, 0x0c040c1c, 0x0c040c2c, 0x0c040c3e, 0x0c041404, 0x0c041414, + 0x0c041c0c, 0x0c041c24, 0x0c041c34, 0x0c042c24, 0x0c042c34, 0x0c04340c, 0x0c043e14, 0x0c0c0404, + 0x0c0c040c, 0x0c0c041c, 0x0c0c0434, 0x0c0c0c04, 0x0c0c0c24, 0x0c0c140c, 0x0c0c1c04, 0x0c0c1c1c, + 0x0c0c240c, 0x0c0c2c04, 0x0c0c2c14, 0x0c0c3e04, 0x0c0c3e34, 0x0c140404, 0x0c140c14, 0x0c140c2c, + 0x0c140c3e, 0x0c141404, 0x0c141424, 0x0c141c14, 0x0c142404, 0x0c14241c, 0x0c142c2c, 0x0c143404, + 0x0c143e14, 0x0c1c040c, 0x0c1c0424, 0x0c1c043e, 0x0c1c0c04, 0x0c1c0c1c, 0x0c1c140c, 0x0c1c143e, + 0x0c1c1c04, 0x0c1c1c24, 0x0c1c240c, 0x0c1c3414, 0x0c1c3e04, 0x0c24041c, 0x0c24042c, 0x0c240c14, + 0x0c240c24, 0x0c241c0c, 0x0c241c1c, 0x0c242414, 0x0c242434, 0x0c242c04, 0x0c242c24, 0x0c2c040c, + 0x0c2c0c04, 0x0c2c0c1c, 0x0c2c140c, 0x0c2c1c04, 0x0c2c1c14, 0x0c2c2c0c, 0x0c341404, 0x0c341424, + 0x0c34143e, 0x0c342424, 0x0c342434, 0x0c3e040c, 0x0c3e041c, 0x0c3e0c04, 0x0c3e0c14, 0x0c3e140c, + 0x0c3e1c2c, 0x0c3e240c, 0x0c3e3414, 0x0c3e3e04, 0x14040404, 0x1404040c, 0x1404041c, 0x1404042c, + 0x1404043e, 0x14040c04, 0x14040c14, 0x14040c24, 0x14040c34, 0x1404140c, 0x1404141c, 0x1404143e, + 0x14041c04, 0x14041c14, 0x1404240c, 0x1404241c, 0x1404242c, 0x14042c04, 0x14042c14, 0x1404343e, + 0x14043e04, 0x14043e1c, 0x14043e2c, 0x140c0404, 0x140c0414, 0x140c0c04, 0x140c0c1c, 0x140c0c3e, + 0x140c1414, 0x140c142c, 0x140c1c0c, 0x140c1c24, 0x140c2414, 0x140c2c0c, 0x1414040c, 0x14140424, + 0x1414043e, 0x1414140c, 0x1414141c, 0x14141c04, 0x14141c3e, 0x1414240c, 0x14142c1c, 0x14142c3e, + 0x14143e0c, 0x14143e24, 0x141c0404, 0x141c0414, 0x141c042c, 0x141c0c0c, 0x141c1414, 0x141c1424, + 0x141c1c0c, 0x141c1c1c, 0x141c2414, 0x141c2c04, 0x141c3434, 0x1424040c, 0x1424043e, 0x14241404, + 0x1424141c, 0x14241c14, 0x14241c2c, 0x1424240c, 0x14243e14, 0x14243e2c, 0x142c0424, 0x142c0c0c, + 0x142c1414, 0x142c1c3e, 0x142c2404, 0x142c2c1c, 0x142c3e04, 0x14340404, 0x14340414, 0x1434043e, + 0x1434140c, 0x14342c2c, 0x1434340c, 0x143e042c, 0x143e0c0c, 0x143e1434, 0x143e1c04, 0x143e241c, + 0x143e2c04, 0x1c040414, 0x1c040c0c, 0x1c040c1c, 0x1c040c2c, 0x1c040c3e, 0x1c041414, 0x1c041c0c, + 0x1c041c1c, 0x1c041c2c, 0x1c042414, 0x1c042424, 0x1c04243e, 0x1c042c0c, 0x1c04341c, 0x1c043e0c, + 0x1c0c040c, 0x1c0c041c, 0x1c0c042c, 0x1c0c0c24, 0x1c0c140c, 0x1c0c141c, 0x1c0c2404, 0x1c0c3404, + 0x1c0c3e14, 0x1c0c3e34, 0x1c140404, 0x1c140c14, 0x1c141404, 0x1c141c14, 0x1c141c24, 0x1c142c04, + 0x1c1c040c, 0x1c1c0c04, 0x1c1c0c24, 0x1c1c140c, 0x1c1c141c, 0x1c1c143e, 0x1c1c1c04, 0x1c1c240c, + 0x1c1c241c, 0x1c1c243e, 0x1c1c2c2c, 0x1c1c3e1c, 0x1c24041c, 0x1c240c0c, 0x1c240c34, 0x1c241414, + 0x1c241c0c, 0x1c242c14, 0x1c243404, 0x1c243424, 0x1c2c040c, 0x1c2c0c04, 0x1c2c0c14, 0x1c2c142c, + 0x1c2c1c14, 0x1c2c2424, 0x1c2c2c34, 0x1c2c3e1c, 0x1c340c34, 0x1c34240c, 0x1c3e040c, 0x1c3e041c, + 0x1c3e1404, 0x1c3e1414, 0x1c3e1c2c, 0x24040404, 0x24040424, 0x24040c14, 0x24041404, 0x24041424, + 0x2404143e, 0x24041c14, 0x2404240c, 0x24042c04, 0x24043e04, 0x240c0414, 0x240c043e, 0x240c0c0c, + 0x240c0c1c, 0x240c1414, 0x240c1c04, 0x240c1c2c, 0x240c241c, 0x240c2c0c, 0x240c2c2c, 0x2414040c, + 0x2414041c, 0x24140c04, 0x24140c2c, 0x2414140c, 0x24141c1c, 0x24142404, 0x24142c3e, 0x24143414, + 0x24143e04, 0x241c0424, 0x241c0c0c, 0x241c0c1c, 0x241c1404, 0x241c1414, 0x241c1c0c, 0x241c1c2c, + 0x24240404, 0x24240414, 0x24241424, 0x24241c3e, 0x24242404, 0x24243e0c, 0x242c042c, 0x242c043e, + 0x242c140c, 0x242c3414, 0x24340c1c, 0x24341c24, 0x24343404, 0x243e0c04, 0x243e0c2c, 0x243e1c04, + 0x243e241c, 0x243e2c0c, 0x2c040414, 0x2c040c04, 0x2c040c24, 0x2c041414, 0x2c042404, 0x2c042424, + 0x2c04243e, 0x2c042c14, 0x2c043434, 0x2c043e24, 0x2c0c040c, 0x2c0c041c, 0x2c0c042c, 0x2c0c0c14, + 0x2c0c140c, 0x2c0c1c14, 0x2c0c3e14, 0x2c140404, 0x2c140c0c, 0x2c14141c, 0x2c141c04, 0x2c141c34, + 0x2c142c1c, 0x2c1c0414, 0x2c1c043e, 0x2c1c0c04, 0x2c1c143e, 0x2c1c2424, 0x2c1c2c0c, 0x2c1c342c, + 0x2c1c3e1c, 0x2c24040c, 0x2c240424, 0x2c241404, 0x2c241c14, 0x2c242434, 0x2c2c0c14, 0x2c2c1434, + 0x2c2c2c0c, 0x2c2c2c1c, 0x2c342414, 0x2c3e0414, 0x2c3e0424, 0x2c3e1414, 0x34040c0c, 0x34040c1c, + 0x34040c2c, 0x34041c0c, 0x34041c1c, 0x34043404, 0x340c0404, 0x340c1404, 0x340c143e, 0x340c3424, + 0x34140c14, 0x34141c24, 0x34142414, 0x34142c2c, 0x34143414, 0x34143e04, 0x341c0404, 0x341c0c24, + 0x341c140c, 0x341c2404, 0x3424142c, 0x3424241c, 0x34243414, 0x342c0404, 0x342c041c, 0x342c1c24, + 0x342c3404, 0x3434042c, 0x34342404, 0x343e0c0c, 0x343e0c1c, 0x3e040404, 0x3e040424, 0x3e04043e, + 0x3e041404, 0x3e041414, 0x3e041c34, 0x3e042404, 0x3e042c24, 0x3e043414, 0x3e0c0414, 0x3e0c0c0c, + 0x3e0c1424, 0x3e0c241c, 0x3e0c242c, 0x3e14040c, 0x3e140424, 0x3e140c04, 0x3e140c34, 0x3e14140c, + 0x3e141c04, 0x3e142c0c, 0x3e1c0414, 0x3e1c1c14, 0x3e1c1c2c, 0x3e1c2c1c, 0x3e24040c, 0x3e24042c, + 0x3e240c1c, 0x3e241404, 0x3e242c04, 0x3e2c1414, 0x3e2c2414, 0x3e340414, 0x3e341c0c, 0x3e3e0404, +}; + #define NGRID_IQ2XXS 512 static const uint64_t iq1s_grid[NGRID_IQ2XXS] = { 0xffffffffffff0101, 0xffffffffff01ff00, 0xffffffffff010100, 0xffffffff00000000, @@ -3736,6 +3803,49 @@ void dequantize_row_iq3_xxs(const block_iq3_xxs * restrict x, float * restrict y } } +// ====================== 3.3125 bpw (de)-quantization + +void dequantize_row_iq3_s(const block_iq3_s * restrict x, float * restrict y, int k) { + assert(k % QK_K == 0); + const int nb = k / QK_K; + + for (int i = 0; i < nb; i++) { + + const float d = GGML_FP16_TO_FP32(x[i].d); + const uint8_t * qs = x[i].qs; + const uint8_t * qh = x[i].qh; + const uint8_t * signs = x[i].signs; + + for (int ib32 = 0; ib32 < QK_K/32; ib32 += 2) { + const float db1 = d * (0.5f + (x[i].scales[ib32/2] & 0xf)) * 0.5f; + const float db2 = d * (0.5f + (x[i].scales[ib32/2] >> 4)) * 0.5f; + for (int l = 0; l < 4; ++l) { + const uint8_t * grid1 = (const uint8_t *)(iq3xs_grid + (qs[2*l+0] | ((qh[0] << (8-2*l)) & 256))); + const uint8_t * grid2 = (const uint8_t *)(iq3xs_grid + (qs[2*l+1] | ((qh[0] << (7-2*l)) & 256))); + for (int j = 0; j < 4; ++j) { + y[j+0] = db1 * grid1[j] * (signs[l] & kmask_iq2xs[j+0] ? -1.f : 1.f); + y[j+4] = db1 * grid2[j] * (signs[l] & kmask_iq2xs[j+4] ? -1.f : 1.f); + } + y += 8; + } + qs += 8; + signs += 4; + for (int l = 0; l < 4; ++l) { + const uint8_t * grid1 = (const uint8_t *)(iq3xs_grid + (qs[2*l+0] | ((qh[1] << (8-2*l)) & 256))); + const uint8_t * grid2 = (const uint8_t *)(iq3xs_grid + (qs[2*l+1] | ((qh[1] << (7-2*l)) & 256))); + for (int j = 0; j < 4; ++j) { + y[j+0] = db2 * grid1[j] * (signs[l] & kmask_iq2xs[j+0] ? -1.f : 1.f); + y[j+4] = db2 * grid2[j] * (signs[l] & kmask_iq2xs[j+4] ? -1.f : 1.f); + } + y += 8; + } + qh += 2; + qs += 8; + signs += 4; + } + } +} + // ====================== 1.5625 bpw (de)-quantization void dequantize_row_iq1_s(const block_iq1_s * restrict x, float * restrict y, int k) { @@ -8806,6 +8916,7 @@ void ggml_vec_dot_q6_K_q8_K(int n, float * restrict s, size_t bs, const void * r #endif +#if defined (__AVX2__) || defined (__ARM_NEON) static const int8_t keven_signs_q2xs[1024] = { 1, 1, 1, 1, 1, 1, 1, 1, -1, 1, 1, 1, 1, 1, 1, -1, 1, -1, 1, 1, 1, 1, 1, -1, -1, -1, 1, 1, 1, 1, 1, 1, 1, 1, -1, 1, 1, 1, 1, -1, -1, 1, -1, 1, 1, 1, 1, 1, 1, -1, -1, 1, 1, 1, 1, 1, -1, -1, -1, 1, 1, 1, 1, -1, @@ -8840,6 +8951,7 @@ static const int8_t keven_signs_q2xs[1024] = { 1, 1, 1, -1, -1, -1, -1, 1, -1, 1, 1, -1, -1, -1, -1, -1, 1, -1, 1, -1, -1, -1, -1, -1, -1, -1, 1, -1, -1, -1, -1, 1, 1, 1, -1, -1, -1, -1, -1, -1, -1, 1, -1, -1, -1, -1, -1, 1, 1, -1, -1, -1, -1, -1, -1, 1, -1, -1, -1, -1, -1, -1, -1, -1, }; +#endif void ggml_vec_dot_iq2_xxs_q8_K(int n, float * restrict s, size_t bs, const void * restrict vx, size_t bx, const void * restrict vy, size_t by, int nrc) { assert(n % QK_K == 0); @@ -9327,6 +9439,202 @@ void ggml_vec_dot_iq3_xxs_q8_K(int n, float * restrict s, size_t bs, const void #endif } +void ggml_vec_dot_iq3_s_q8_K (int n, float * GGML_RESTRICT s, size_t bs, const void * GGML_RESTRICT vx, size_t bx, const void * GGML_RESTRICT vy, size_t by, int nrc) { + assert(n % QK_K == 0); + assert(nrc == 1); + UNUSED(nrc); + UNUSED(bx); + UNUSED(by); + UNUSED(bs); + + const block_iq3_s * restrict x = vx; + const block_q8_K * restrict y = vy; + + const int nb = n / QK_K; + +#if defined(__ARM_NEON) + + static const uint8_t k_mask1[32] = {0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x01, 0x01, 0x01, 0x01, 0x01, 0x01, 0x01, + 0x02, 0x02, 0x02, 0x02, 0x02, 0x02, 0x02, 0x02, 0x03, 0x03, 0x03, 0x03, 0x03, 0x03, 0x03, 0x03 + }; + + static const uint8_t k_mask2[16] = {0x01, 0x02, 0x04, 0x08, 0x10, 0x20, 0x40, 0x80, 0x01, 0x02, 0x04, 0x08, 0x10, 0x20, 0x40, 0x80,}; + + const uint8x16x2_t mask1 = vld1q_u8_x2(k_mask1); + const uint8x16_t mask2 = vld1q_u8(k_mask2); + + uint8x16x2_t vs; + ggml_int8x16x4_t q3s; + ggml_int8x16x4_t q8b; + + float sumf = 0; + for (int i = 0; i < nb; ++i) { + const float d = GGML_FP16_TO_FP32(x[i].d) * y[i].d; + const uint8_t * restrict qs = x[i].qs; + const uint8_t * restrict qh = x[i].qh; + const uint16_t * restrict signs = (const uint16_t *)x[i].signs; + const int8_t * restrict q8 = y[i].qs; + int sumi1 = 0, sumi2 = 0; + for (int ib32 = 0; ib32 < QK_K/32; ib32 += 2) { + q8b = ggml_vld1q_s8_x4(q8); q8 += 64; + const uint32x4_t aux32x4_0 = {iq3xs_grid[qs[ 0] | ((qh[ib32+0] << 8) & 256)], iq3xs_grid[qs[ 1] | ((qh[ib32+0] << 7) & 256)], + iq3xs_grid[qs[ 2] | ((qh[ib32+0] << 6) & 256)], iq3xs_grid[qs[ 3] | ((qh[ib32+0] << 5) & 256)]}; + const uint32x4_t aux32x4_1 = {iq3xs_grid[qs[ 4] | ((qh[ib32+0] << 4) & 256)], iq3xs_grid[qs[ 5] | ((qh[ib32+0] << 3) & 256)], + iq3xs_grid[qs[ 6] | ((qh[ib32+0] << 2) & 256)], iq3xs_grid[qs[ 7] | ((qh[ib32+0] << 1) & 256)]}; + const uint32x4_t aux32x4_2 = {iq3xs_grid[qs[ 8] | ((qh[ib32+1] << 8) & 256)], iq3xs_grid[qs[ 9] | ((qh[ib32+1] << 7) & 256)], + iq3xs_grid[qs[10] | ((qh[ib32+1] << 6) & 256)], iq3xs_grid[qs[11] | ((qh[ib32+1] << 5) & 256)]}; + const uint32x4_t aux32x4_3 = {iq3xs_grid[qs[12] | ((qh[ib32+1] << 4) & 256)], iq3xs_grid[qs[13] | ((qh[ib32+1] << 3) & 256)], + iq3xs_grid[qs[14] | ((qh[ib32+1] << 2) & 256)], iq3xs_grid[qs[15] | ((qh[ib32+1] << 1) & 256)]}; + qs += 16; + + vs.val[0] = vreinterpretq_u8_u32(vdupq_n_u32(signs[0] | (signs[1] << 16))); + vs.val[1] = vandq_u8(vqtbl1q_u8(vs.val[0], mask1.val[1]), mask2); + vs.val[0] = vandq_u8(vqtbl1q_u8(vs.val[0], mask1.val[0]), mask2); + vs.val[0] = vceqq_u8(vs.val[0], mask2); + vs.val[1] = vceqq_u8(vs.val[1], mask2); + + q3s.val[0] = vsubq_s8(vreinterpretq_s8_u8(veorq_u8(vs.val[0], vreinterpretq_u8_u32(aux32x4_0))), vreinterpretq_s8_u8(vs.val[0])); + q3s.val[1] = vsubq_s8(vreinterpretq_s8_u8(veorq_u8(vs.val[1], vreinterpretq_u8_u32(aux32x4_1))), vreinterpretq_s8_u8(vs.val[1])); + + vs.val[0] = vreinterpretq_u8_u32(vdupq_n_u32(signs[2] | (signs[3] << 16))); + vs.val[1] = vandq_u8(vqtbl1q_u8(vs.val[0], mask1.val[1]), mask2); + vs.val[0] = vandq_u8(vqtbl1q_u8(vs.val[0], mask1.val[0]), mask2); + vs.val[0] = vceqq_u8(vs.val[0], mask2); + vs.val[1] = vceqq_u8(vs.val[1], mask2); + + signs += 4; + + q3s.val[2] = vsubq_s8(vreinterpretq_s8_u8(veorq_u8(vs.val[0], vreinterpretq_u8_u32(aux32x4_2))), vreinterpretq_s8_u8(vs.val[0])); + q3s.val[3] = vsubq_s8(vreinterpretq_s8_u8(veorq_u8(vs.val[1], vreinterpretq_u8_u32(aux32x4_3))), vreinterpretq_s8_u8(vs.val[1])); + + const int32x4_t p1 = ggml_vdotq_s32(ggml_vdotq_s32(vdupq_n_s32(0), q3s.val[0], q8b.val[0]), q3s.val[1], q8b.val[1]); + const int32x4_t p2 = ggml_vdotq_s32(ggml_vdotq_s32(vdupq_n_s32(0), q3s.val[2], q8b.val[2]), q3s.val[3], q8b.val[3]); + sumi1 += vaddvq_s32(p1) * (1 + 2*(x[i].scales[ib32/2] & 0xf)); + sumi2 += vaddvq_s32(p2) * (1 + 2*(x[i].scales[ib32/2] >> 4)); + } + sumf += d*(sumi1 + sumi2); + } + *s = 0.25f * sumf; + +#elif defined(__AVX2__) + + static const uint8_t k_mask1[32] = {0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x01, 0x01, 0x01, 0x01, 0x01, 0x01, 0x01, + 0x02, 0x02, 0x02, 0x02, 0x02, 0x02, 0x02, 0x02, 0x03, 0x03, 0x03, 0x03, 0x03, 0x03, 0x03, 0x03 + }; + + static const uint8_t k_mask2[32] = {0x01, 0x02, 0x04, 0x08, 0x10, 0x20, 0x40, 0x80, 0x01, 0x02, 0x04, 0x08, 0x10, 0x20, 0x40, 0x80, + 0x01, 0x02, 0x04, 0x08, 0x10, 0x20, 0x40, 0x80, 0x01, 0x02, 0x04, 0x08, 0x10, 0x20, 0x40, 0x80, + }; + + const __m256i mask1 = _mm256_loadu_si256((const __m256i*)k_mask1); + const __m256i mask2 = _mm256_loadu_si256((const __m256i*)k_mask2); + + __m256 accumf = _mm256_setzero_ps(); + for (int i = 0; i < nb; ++i) { + const float d = GGML_FP16_TO_FP32(x[i].d) * y[i].d; + const uint8_t * restrict qs = x[i].qs; + const uint8_t * restrict qh = x[i].qh; + const uint16_t * restrict signs = (const uint16_t *)x[i].signs; + const int8_t * restrict q8 = y[i].qs; + __m256i sumi1 = _mm256_setzero_si256(); + __m256i sumi2 = _mm256_setzero_si256(); + for (int ib32 = 0; ib32 < QK_K/32; ib32 += 2) { + const __m256i q8_1 = _mm256_loadu_si256((const __m256i *)q8); q8 += 32; + const __m256i q8_2 = _mm256_loadu_si256((const __m256i *)q8); q8 += 32; + const __m256i q2_1 = _mm256_set_epi32(iq3xs_grid[qs[7] | ((qh[ib32+0] << 1) & 256)], + iq3xs_grid[qs[6] | ((qh[ib32+0] << 2) & 256)], + iq3xs_grid[qs[5] | ((qh[ib32+0] << 3) & 256)], + iq3xs_grid[qs[4] | ((qh[ib32+0] << 4) & 256)], + iq3xs_grid[qs[3] | ((qh[ib32+0] << 5) & 256)], + iq3xs_grid[qs[2] | ((qh[ib32+0] << 6) & 256)], + iq3xs_grid[qs[1] | ((qh[ib32+0] << 7) & 256)], + iq3xs_grid[qs[0] | ((qh[ib32+0] << 8) & 256)]); + qs += 8; + const __m256i q2_2 = _mm256_set_epi32(iq3xs_grid[qs[7] | ((qh[ib32+1] << 1) & 256)], + iq3xs_grid[qs[6] | ((qh[ib32+1] << 2) & 256)], + iq3xs_grid[qs[5] | ((qh[ib32+1] << 3) & 256)], + iq3xs_grid[qs[4] | ((qh[ib32+1] << 4) & 256)], + iq3xs_grid[qs[3] | ((qh[ib32+1] << 5) & 256)], + iq3xs_grid[qs[2] | ((qh[ib32+1] << 6) & 256)], + iq3xs_grid[qs[1] | ((qh[ib32+1] << 7) & 256)], + iq3xs_grid[qs[0] | ((qh[ib32+1] << 8) & 256)]); + qs += 8; + + __m256i aux256 = _mm256_set1_epi32(signs[0] | (signs[1] << 16)); + aux256 = _mm256_and_si256(_mm256_shuffle_epi8(aux256,mask1), mask2); + const __m256i s2_1 = _mm256_cmpeq_epi8(aux256, mask2); + const __m256i q8s_1 = _mm256_sub_epi8(_mm256_xor_si256(s2_1, q8_1), s2_1); + + aux256 = _mm256_set1_epi32(signs[2] | (signs[3] << 16)); + aux256 = _mm256_and_si256(_mm256_shuffle_epi8(aux256,mask1), mask2); + const __m256i s2_2 = _mm256_cmpeq_epi8(aux256, mask2); + const __m256i q8s_2 = _mm256_sub_epi8(_mm256_xor_si256(s2_2, q8_2), s2_2); + + signs += 4; + + const __m256i dot1 = _mm256_maddubs_epi16(q2_1, q8s_1); + const __m256i dot2 = _mm256_maddubs_epi16(q2_2, q8s_2); + const uint16_t ls1 = x[i].scales[ib32/2] & 0xf; + const uint16_t ls2 = x[i].scales[ib32/2] >> 4; + const __m256i p1 = _mm256_madd_epi16(dot1, _mm256_set1_epi16(2*ls1+1)); + const __m256i p2 = _mm256_madd_epi16(dot2, _mm256_set1_epi16(2*ls2+1)); + sumi1 = _mm256_add_epi32(sumi1, p1); + sumi2 = _mm256_add_epi32(sumi2, p2); + } + + accumf = _mm256_fmadd_ps(_mm256_set1_ps(d), _mm256_cvtepi32_ps(_mm256_add_epi32(sumi1, sumi2)), accumf); + + } + + *s = 0.25f * hsum_float_8(accumf); + +#else + + float sumf = 0.f; + for (int i = 0; i < nb; ++i) { + const float d = GGML_FP16_TO_FP32(x[i].d) * y[i].d; + const uint8_t * restrict qs = x[i].qs; + const uint8_t * restrict qh = x[i].qh; + const uint8_t * restrict signs = x[i].signs; + const int8_t * restrict q8 = y[i].qs; + int32_t bsum = 0; + for (int ib32 = 0; ib32 < QK_K/32; ib32 += 2) { + const uint32_t ls1 = 2*(x[i].scales[ib32/2] & 0xf) + 1; + const uint32_t ls2 = 2*(x[i].scales[ib32/2] >> 4) + 1; + int32_t sumi = 0; + for (int l = 0; l < 4; ++l) { + const uint8_t * grid1 = (const uint8_t *)(iq3xs_grid + (qs[2*l+0] | ((qh[ib32+0] << (8-2*l)) & 256))); + const uint8_t * grid2 = (const uint8_t *)(iq3xs_grid + (qs[2*l+1] | ((qh[ib32+0] << (7-2*l)) & 256))); + for (int j = 0; j < 4; ++j) { + sumi += grid1[j] * q8[j+0] * (signs[l] & kmask_iq2xs[j+0] ? -1 : 1); + sumi += grid2[j] * q8[j+4] * (signs[l] & kmask_iq2xs[j+4] ? -1 : 1); + } + q8 += 8; + } + qs += 8; + signs += 4; + bsum += sumi * ls1; + sumi = 0; + for (int l = 0; l < 4; ++l) { + const uint8_t * grid1 = (const uint8_t *)(iq3xs_grid + (qs[2*l+0] | ((qh[ib32+1] << (8-2*l)) & 256))); + const uint8_t * grid2 = (const uint8_t *)(iq3xs_grid + (qs[2*l+1] | ((qh[ib32+1] << (7-2*l)) & 256))); + for (int j = 0; j < 4; ++j) { + sumi += grid1[j] * q8[j+0] * (signs[l] & kmask_iq2xs[j+0] ? -1 : 1); + sumi += grid2[j] * q8[j+4] * (signs[l] & kmask_iq2xs[j+4] ? -1 : 1); + } + q8 += 8; + } + qs += 8; + signs += 4; + bsum += sumi * ls2; + } + sumf += d * bsum; + } + *s = 0.25f * sumf; +#endif +} + + #ifdef __AVX2__ static inline __m256i mul_add_epi8(const __m256i x, const __m256i y) { const __m256i ax = _mm256_sign_epi8(x, x); @@ -9523,6 +9831,7 @@ void ggml_vec_dot_iq4_nl_q8_0(int n, float * restrict s, size_t bs, const void * float sumf = 0; for (int ib = 0; ib < nb; ib += 2) { + q4bits.val[0] = vld1q_u8(x[ib+0].qs); q4bits.val[1] = vld1q_u8(x[ib+1].qs); q8b.val[0] = vld1q_s8(y[ib+0].qs); @@ -10239,14 +10548,15 @@ typedef struct { uint16_t * neighbours; } iq3_entry_t; -static iq3_entry_t iq3_data[1] = { +static iq3_entry_t iq3_data[2] = { + {NULL, NULL, NULL}, {NULL, NULL, NULL}, }; static inline int iq3_data_index(int grid_size) { (void)grid_size; - GGML_ASSERT(grid_size == 256); - return 0; + GGML_ASSERT(grid_size == 256 || grid_size == 512); + return grid_size == 256 ? 0 : 1; } static int iq3_compare_func(const void * left, const void * right) { @@ -10278,9 +10588,44 @@ void iq3xs_init_impl(int grid_size) { 3185, 3215, 3252, 3288, 3294, 3364, 3397, 3434, 3483, 3523, 3537, 3587, 3589, 3591, 3592, 3610, 3626, 3670, 3680, 3722, 3749, 3754, 3776, 3789, 3803, 3824, 3857, 3873, 3904, 3906, 3924, 3992, }; + static const uint16_t kgrid_512[512] = { + 0, 1, 2, 5, 7, 8, 9, 10, 12, 14, 16, 17, 21, 27, 32, 34, + 37, 39, 41, 43, 48, 50, 57, 60, 63, 64, 65, 66, 68, 72, 73, 77, + 80, 83, 87, 89, 93, 100, 113, 117, 122, 128, 129, 133, 135, 136, 139, 142, + 145, 149, 152, 156, 162, 165, 167, 169, 171, 184, 187, 195, 201, 205, 208, 210, + 217, 219, 222, 228, 232, 234, 247, 249, 253, 256, 267, 271, 273, 276, 282, 288, + 291, 297, 312, 322, 324, 336, 338, 342, 347, 353, 357, 359, 374, 379, 390, 393, + 395, 409, 426, 441, 448, 450, 452, 464, 466, 470, 475, 488, 492, 512, 513, 514, + 516, 520, 521, 523, 525, 527, 528, 530, 537, 540, 542, 556, 558, 561, 570, 576, + 577, 579, 582, 584, 588, 593, 600, 603, 609, 616, 618, 632, 638, 640, 650, 653, + 655, 656, 660, 666, 672, 675, 685, 688, 698, 705, 708, 711, 712, 715, 721, 727, + 728, 732, 737, 754, 760, 771, 773, 778, 780, 793, 795, 802, 806, 808, 812, 833, + 840, 843, 849, 856, 858, 873, 912, 916, 919, 932, 934, 961, 963, 968, 970, 977, + 989, 993, 1010, 1016, 1024, 1025, 1027, 1029, 1031, 1032, 1034, 1036, 1038, 1041, 1043, 1047, + 1048, 1050, 1057, 1059, 1061, 1064, 1066, 1079, 1080, 1083, 1085, 1088, 1090, 1096, 1099, 1103, + 1106, 1109, 1113, 1116, 1122, 1129, 1153, 1156, 1159, 1169, 1171, 1176, 1183, 1185, 1195, 1199, + 1209, 1212, 1216, 1218, 1221, 1225, 1234, 1236, 1241, 1243, 1250, 1256, 1270, 1281, 1287, 1296, + 1299, 1306, 1309, 1313, 1338, 1341, 1348, 1353, 1362, 1375, 1376, 1387, 1400, 1408, 1410, 1415, + 1425, 1453, 1457, 1477, 1481, 1494, 1496, 1507, 1512, 1538, 1545, 1547, 1549, 1551, 1554, 1561, + 1563, 1565, 1570, 1572, 1575, 1577, 1587, 1593, 1601, 1603, 1605, 1612, 1617, 1619, 1632, 1648, + 1658, 1662, 1664, 1674, 1680, 1690, 1692, 1704, 1729, 1736, 1740, 1745, 1747, 1751, 1752, 1761, + 1763, 1767, 1773, 1787, 1795, 1801, 1806, 1810, 1817, 1834, 1840, 1844, 1857, 1864, 1866, 1877, + 1882, 1892, 1902, 1915, 1934, 1953, 1985, 1987, 2000, 2002, 2013, 2048, 2052, 2058, 2064, 2068, + 2071, 2074, 2081, 2088, 2104, 2114, 2119, 2121, 2123, 2130, 2136, 2141, 2147, 2153, 2157, 2177, + 2179, 2184, 2189, 2193, 2203, 2208, 2223, 2226, 2232, 2244, 2249, 2251, 2256, 2258, 2265, 2269, + 2304, 2306, 2324, 2335, 2336, 2361, 2373, 2375, 2385, 2418, 2443, 2460, 2480, 2504, 2509, 2520, + 2531, 2537, 2562, 2568, 2572, 2578, 2592, 2596, 2599, 2602, 2614, 2620, 2625, 2627, 2629, 2634, + 2641, 2650, 2682, 2688, 2697, 2707, 2712, 2718, 2731, 2754, 2759, 2760, 2775, 2788, 2793, 2805, + 2811, 2817, 2820, 2832, 2842, 2854, 2890, 2902, 2921, 2923, 2978, 3010, 3012, 3026, 3081, 3083, + 3085, 3097, 3099, 3120, 3136, 3152, 3159, 3188, 3210, 3228, 3234, 3245, 3250, 3256, 3264, 3276, + 3281, 3296, 3349, 3363, 3378, 3392, 3395, 3420, 3440, 3461, 3488, 3529, 3531, 3584, 3588, 3591, + 3600, 3602, 3614, 3616, 3628, 3634, 3650, 3657, 3668, 3683, 3685, 3713, 3716, 3720, 3726, 3729, + 3736, 3753, 3778, 3802, 3805, 3819, 3841, 3845, 3851, 3856, 3880, 3922, 3938, 3970, 3993, 4032, + }; + const int kmap_size = 4096; - const int nwant = 2; - const uint16_t * kgrid = kgrid_256; + const int nwant = grid_size == 256 ? 2 : 3; + const uint16_t * kgrid = grid_size == 256 ? kgrid_256 : kgrid_512; uint32_t * kgrid_q3xs; int * kmap_q3xs; uint16_t * kneighbors_q3xs; @@ -10377,7 +10722,7 @@ void iq3xs_init_impl(int grid_size) { } void iq3xs_free_impl(int grid_size) { - GGML_ASSERT(grid_size == 256); + GGML_ASSERT(grid_size == 256 || grid_size == 512); const int gindex = iq3_data_index(grid_size); if (iq3_data[gindex].grid) { free(iq3_data[gindex].grid); iq3_data[gindex].grid = NULL; @@ -10410,9 +10755,10 @@ static int iq3_find_best_neighbour(const uint16_t * restrict neighbours, const u return grid_index; } -static void quantize_row_iq3_xxs_impl(const float * restrict x, void * restrict vy, int n, const float * restrict quant_weights) { +static void quantize_row_iq3_xxs_impl(int grid_size, const float * restrict x, void * restrict vy, int n, + const float * restrict quant_weights) { - const int gindex = iq3_data_index(256); + const int gindex = iq3_data_index(grid_size); const uint32_t * kgrid_q3xs = iq3_data[gindex].grid; const int * kmap_q3xs = iq3_data[gindex].map; @@ -10426,9 +10772,23 @@ static void quantize_row_iq3_xxs_impl(const float * restrict x, void * restrict const int kMaxQ = 8; - const int nbl = n/256; + const int nbl = n/QK_K; - block_iq3_xxs * y = vy; + ggml_fp16_t * dh; + uint8_t * qs; + int block_size; + if (grid_size == 256) { + block_iq3_xxs * y = vy; + dh = &y->d; + qs = y->qs; + block_size = sizeof(block_iq3_xxs); + } else { + block_iq3_s * y = vy; + dh = &y->d; + qs = y->qs; + block_size = sizeof(block_iq3_s); + } + int quant_size = block_size - sizeof(ggml_fp16_t); float scales[QK_K/32]; float weight[32]; @@ -10439,20 +10799,21 @@ static void quantize_row_iq3_xxs_impl(const float * restrict x, void * restrict bool is_on_grid[8]; bool is_on_grid_aux[8]; uint8_t block_signs[8]; - uint8_t q3[3*(QK_K/8)]; + uint8_t q3[3*(QK_K/8)+QK_K/32]; uint32_t * scales_and_signs = (uint32_t *)(q3 + QK_K/4); + uint8_t * qh = q3 + 3*(QK_K/8); for (int ibl = 0; ibl < nbl; ++ibl) { - y[ibl].d = GGML_FP32_TO_FP16(0.f); - memset(q3, 0, 3*QK_K/8); + dh[0] = GGML_FP32_TO_FP16(0.f); + memset(q3, 0, 3*QK_K/8+QK_K/32); float max_scale = 0; const float * xbl = x + QK_K*ibl; float sumx2 = 0; for (int i = 0; i < QK_K; ++i) sumx2 += xbl[i]*xbl[i]; - float sigma2 = sumx2/QK_K; + float sigma2 = 2*sumx2/QK_K; for (int ib = 0; ib < QK_K/32; ++ib) { const float * xb = xbl + 32*ib; @@ -10570,7 +10931,13 @@ static void quantize_row_iq3_xxs_impl(const float * restrict x, void * restrict printf("\n"); GGML_ASSERT(false); } - q3[8*ib+k] = grid_index; + if (grid_size == 256) { + q3[8*ib+k] = grid_index; + } else { + q3[8*ib+k] = grid_index & 255; + qh[ib] |= ((grid_index >> 8) << k); + } + } scales_and_signs[ib] = block_signs[0] | (block_signs[1] << 7) | (block_signs[2] << 14) | (block_signs[3] << 21); GGML_ASSERT(scale >= 0); @@ -10579,63 +10946,25 @@ static void quantize_row_iq3_xxs_impl(const float * restrict x, void * restrict } if (!max_scale) { - memset(y[ibl].qs, 0, 3*QK_K/8); + memset(qs, 0, quant_size); + dh += block_size/sizeof(ggml_fp16_t); + qs += block_size; continue; } float d = max_scale/31; - y[ibl].d = GGML_FP32_TO_FP16(d); + dh[0] = GGML_FP32_TO_FP16(d * 1.0125f); // small improvement via this fudge factor float id = 1/d; - float sumqx = 0, sumq2 = 0; for (int ib = 0; ib < QK_K/32; ++ib) { int l = nearest_int(0.5f*(id*scales[ib]-1)); l = MAX(0, MIN(15, l)); scales_and_signs[ib] |= ((uint32_t)l << 28); - if (false) { - const float * xb = xbl + 32*ib; - if (quant_weights) { - const float * qw = quant_weights + QK_K*ibl + 32*ib; - for (int i = 0; i < 32; ++i) weight[i] = qw[i] * sqrtf(sigma2 + xb[i]*xb[i]); - } else { - for (int i = 0; i < 32; ++i) weight[i] = xb[i]*xb[i]; - } - const float db = 0.25f * d * (1 + 2*l); - for (int k = 0; k < 8; ++k) { - const int8_t * signs = keven_signs_q2xs + 8*((scales_and_signs[ib] >> 7*(k/2)) & 127) + 4*(k%2); - const float * xk = xb + 4*k; - const float * wk = weight + 4*k; - //const uint8_t * grid = (const uint8_t *)(kgrid_q3xs + q3[8*ib+k]); - const uint8_t * grid = (const uint8_t *)(iq3xxs_grid + q3[8*ib+k]); - float best_mse = 0; int best_index = q3[8*ib+k]; - for (int j = 0; j < 4; ++j) { - float diff = db * grid[j] * signs[j] - xk[j]; - best_mse += wk[j] * diff * diff; - } - for (int idx = 0; idx < 256; ++idx) { - //grid = (const uint8_t *)(kgrid_q3xs + idx); - grid = (const uint8_t *)(iq3xxs_grid + idx); - float mse = 0; - for (int j = 0; j < 4; ++j) { - float diff = db * grid[j] * signs[j] - xk[j]; - mse += wk[j] * diff * diff; - } - if (mse < best_mse) { - best_mse = mse; best_index = idx; - } - } - q3[8*ib+k] = best_index; - //grid = (const uint8_t *)(kgrid_q3xs + best_index); - grid = (const uint8_t *)(iq3xxs_grid + best_index); - for (int j = 0; j < 4; ++j) { - float q = db * grid[j] * signs[j]; - sumqx += wk[j] * q * xk[j]; - sumq2 += wk[j] * q * q; - } - } - if (sumq2 > 0) y[ibl].d = GGML_FP32_TO_FP16(d*sumqx/sumq2); - } } - memcpy(y[ibl].qs, q3, 3*QK_K/8); + memcpy(qs, q3, quant_size); + + dh += block_size/sizeof(ggml_fp16_t); + qs += block_size; + } } @@ -10645,7 +10974,7 @@ size_t quantize_iq3_xxs(const float * src, void * dst, int nrow, int n_per_row, int nblock = n_per_row/QK_K; char * qrow = (char *)dst; for (int row = 0; row < nrow; ++row) { - quantize_row_iq3_xxs_impl(src, qrow, n_per_row, quant_weights); + quantize_row_iq3_xxs_impl(256, src, qrow, n_per_row, quant_weights); src += n_per_row; qrow += nblock*sizeof(block_iq3_xxs); } @@ -10660,9 +10989,226 @@ void quantize_row_iq3_xxs(const float * restrict x, void * restrict vy, int k) { void quantize_row_iq3_xxs_reference(const float * restrict x, block_iq3_xxs * restrict y, int k) { assert(k % QK_K == 0); - quantize_row_iq3_xxs_impl(x, y, k, NULL); + quantize_row_iq3_xxs_impl(256, x, y, k, NULL); } +static void quantize_row_iq3_s_impl(int block_size, const float * restrict x, void * restrict vy, int n, + const float * restrict quant_weights, + float * scales, + float * weight, + float * xval, + int8_t * L, + int8_t * Laux, + float * waux, + bool * is_on_grid, + bool * is_on_grid_aux, + uint8_t * block_signs) { + + const int gindex = iq3_data_index(512); + + const uint32_t * kgrid_q3xs = iq3_data[gindex].grid; + const int * kmap_q3xs = iq3_data[gindex].map; + const uint16_t * kneighbors_q3xs = iq3_data[gindex].neighbours; + + //GGML_ASSERT(quant_weights && "missing quantization weights"); + GGML_ASSERT(kgrid_q3xs && "forgot to call ggml_quantize_init()?"); + GGML_ASSERT(kmap_q3xs && "forgot to call ggml_quantize_init()?"); + GGML_ASSERT(kneighbors_q3xs && "forgot to call ggml_quantize_init()?"); + GGML_ASSERT(n%QK_K == 0); + + const int kMaxQ = 8; + + const int nbl = n/QK_K; + + block_iq3_s * y = vy; + + const int bs4 = block_size/4; + const int bs8 = block_size/8; + + for (int ibl = 0; ibl < nbl; ++ibl) { + + memset(&y[ibl], 0, sizeof(block_iq3_s)); + y[ibl].d = GGML_FP32_TO_FP16(0.f); + + uint8_t * qs = y[ibl].qs; + uint8_t * qh = y[ibl].qh; + uint8_t * signs = y[ibl].signs; + + float max_scale = 0; + + const float * xbl = x + QK_K*ibl; + float sumx2 = 0; + for (int i = 0; i < QK_K; ++i) sumx2 += xbl[i]*xbl[i]; + float sigma2 = 2*sumx2/QK_K; + + for (int ib = 0; ib < QK_K/block_size; ++ib) { + const float * xb = xbl + block_size*ib; + if (quant_weights) { + const float * qw = quant_weights + QK_K*ibl + block_size*ib; + for (int i = 0; i < block_size; ++i) weight[i] = qw[i] * sqrtf(sigma2 + xb[i]*xb[i]); + } else { + for (int i = 0; i < block_size; ++i) weight[i] = xb[i]*xb[i]; + } + for (int i = 0; i < block_size; ++i) waux[i] = sqrtf(weight[i]); + for (int k = 0; k < bs8; ++k) { + uint8_t s = 0; + for (int i = 0; i < 8; ++i) { + if (xb[8*k + i] >= 0) xval[8*k + i] = xb[8*k + i]; + else { + xval[8*k + i] = -xb[8*k + i]; s |= (1 << i); + } + } + block_signs[k] = s; + } + float max = xval[0]; + for (int i = 1; i < block_size; ++i) max = MAX(max, xval[i]); + if (!max) { + scales[ib] = 0; + continue; + } + float best = 0; + float scale = max/(2*kMaxQ-1); + for (int is = -15; is <= 15; ++is) { + float id = (2*kMaxQ-1+is*0.2f)/max; + float this_scale = 1/id; + for (int k = 0; k < bs4; ++k) { + for (int i = 0; i < 4; ++i) { + int l = nearest_int(0.5f*(id*xval[4*k+i]-1)); + Laux[4*k+i] = MAX(0, MIN(kMaxQ-1, l)); + } + uint16_t u = 0; + for (int i = 0; i < 4; ++i) u |= (Laux[4*k+i] << 3*i); + int grid_index = kmap_q3xs[u]; + is_on_grid_aux[k] = true; + if (grid_index < 0) { + is_on_grid_aux[k] = false; + const uint16_t * neighbours = kneighbors_q3xs - kmap_q3xs[u] - 1; + grid_index = iq3_find_best_neighbour(neighbours, kgrid_q3xs, xval + 4*k, waux + 4*k, this_scale, Laux + 4*k); + } + } + float sumqx = 0, sumq2 = 0; + for (int i = 0; i < block_size; ++i) { + float w = weight[i]; + float q = 2*Laux[i] + 1; + sumqx += w*xval[i]*q; + sumq2 += w*q*q; + } + if (sumq2 > 0 && sumqx*sumqx > best*sumq2) { + scale = sumqx/sumq2; best = scale*sumqx; + for (int i = 0; i < block_size; ++i) L[i] = Laux[i]; + for (int k = 0; k < bs4; ++k) is_on_grid[k] = is_on_grid_aux[k]; + } + } + int n_not_ongrid = 0; + for (int k = 0; k < bs4; ++k) if (!is_on_grid[k]) ++n_not_ongrid; + if (n_not_ongrid > 0 && scale > 0) { + float id = 1/scale; + for (int k = 0; k < bs4; ++k) { + if (is_on_grid[k]) continue; + uint16_t u = 0; + for (int i = 0; i < 4; ++i) { + int l = nearest_int(0.5f*(id*xval[4*k+i]-1)); + l = MAX(0, MIN(kMaxQ-1, l)); + u |= (l << 3*i); + } + int grid_index = kmap_q3xs[u]; + if (grid_index < 0) { + const uint16_t * neighbours = kneighbors_q3xs - kmap_q3xs[u] - 1; + grid_index = iq3_find_best_neighbour(neighbours, kgrid_q3xs, xval + 4*k, waux + 4*k, scale, L + 4*k); + } + const int8_t * pg = (const int8_t *)(kgrid_q3xs + grid_index); + for (int i = 0; i < 4; ++i) L[4*k+i] = (pg[i] - 1)/2; + } + float sumqx = 0, sumq2 = 0; + for (int i = 0; i < block_size; ++i) { + float w = weight[i]; + float q = 2*L[i] + 1; + sumqx += w*xval[i]*q; + sumq2 += w*q*q; + } + if (sumq2 > 0) scale = sumqx/sumq2; + } + if (scale < 0) { + // This should never happen, but just in case, flip scale so that it is positive (we use uint's to encode the scale) + // and correspondingly flip quant signs. + scale = -scale; + for (int k = 0; k < bs8; ++k) block_signs[k] = ~block_signs[k]; + } + for (int k = 0; k < bs4; ++k) { + uint16_t u = 0; + for (int i = 0; i < 4; ++i) u |= (L[4*k+i] << 3*i); + int grid_index = kmap_q3xs[u]; + if (grid_index < 0) { + printf("Oops: found point %u not on grid:", u); + for (int i = 0; i < 4; ++i) printf(" %d", L[4*k+i]); + printf("\n"); + GGML_ASSERT(false); + } + qs[k] = grid_index & 255; + qh[(ib*bs4+k)/8] |= ((grid_index >> 8) << ((ib*bs4+k)%8)); + } + qs += bs4; + for (int k = 0; k < bs8; ++k) signs[k] = block_signs[k]; + signs += bs8; + GGML_ASSERT(scale >= 0); + scales[ib] = scale; + max_scale = MAX(max_scale, scale); + } + + if (!max_scale) { + continue; + } + + float d = max_scale/31; + y[ibl].d = GGML_FP32_TO_FP16(d); + float id = 1/d; + for (int ib = 0; ib < QK_K/block_size; ib += 2) { + int l1 = nearest_int(0.5f*(id*scales[ib+0]-1)); + l1 = MAX(0, MIN(15, l1)); + int l2 = nearest_int(0.5f*(id*scales[ib+1]-1)); + l2 = MAX(0, MIN(15, l2)); + y[ibl].scales[ib/2] = l1 | (l2 << 4); + } + + } +} + +#define IQ3S_BLOCK_SIZE 32 +size_t quantize_iq3_s(const float * src, void * dst, int nrow, int n_per_row, int64_t * hist, const float * quant_weights) { + (void)hist; + GGML_ASSERT(n_per_row%QK_K == 0); + int nblock = n_per_row/QK_K; + float scales[QK_K/IQ3S_BLOCK_SIZE]; + float weight[IQ3S_BLOCK_SIZE]; + float xval[IQ3S_BLOCK_SIZE]; + int8_t L[IQ3S_BLOCK_SIZE]; + int8_t Laux[IQ3S_BLOCK_SIZE]; + float waux[IQ3S_BLOCK_SIZE]; + bool is_on_grid[IQ3S_BLOCK_SIZE/4]; + bool is_on_grid_aux[IQ3S_BLOCK_SIZE/4]; + uint8_t block_signs[IQ3S_BLOCK_SIZE/8]; + char * qrow = (char *)dst; + for (int row = 0; row < nrow; ++row) { + quantize_row_iq3_s_impl(IQ3S_BLOCK_SIZE, src, qrow, n_per_row, quant_weights, + scales, weight, xval, L, Laux, waux, is_on_grid, is_on_grid_aux, block_signs); + src += n_per_row; + qrow += nblock*sizeof(block_iq3_s); + } + return nrow * nblock * sizeof(block_iq3_s); +} + +void quantize_row_iq3_s(const float * restrict x, void * restrict vy, int k) { + assert(k % QK_K == 0); + block_iq3_s * restrict y = vy; + quantize_row_iq3_s_reference(x, y, k); +} + +void quantize_row_iq3_s_reference(const float * restrict x, block_iq3_s * restrict y, int k) { + assert(k % QK_K == 0); + quantize_iq3_s(x, y, 1, k, NULL, NULL); +} + + // =================================== 1.5 bpw =================================================== static int iq1_find_best_neighbour(const uint16_t * restrict neighbours, const uint64_t * restrict grid, diff --git a/ggml-quants.h b/ggml-quants.h index 113623b62..303b0b6f9 100644 --- a/ggml-quants.h +++ b/ggml-quants.h @@ -191,6 +191,21 @@ typedef struct { } block_iq3_xxs; static_assert(sizeof(block_iq3_xxs) == sizeof(ggml_fp16_t) + 3*(QK_K/8), "wrong iq3_xxs block size/padding"); +// 3.4375 bpw +#if QK_K == 64 +#define IQ3S_N_SCALE 2 +#else +#define IQ3S_N_SCALE QK_K/64 +#endif +typedef struct { + ggml_fp16_t d; + uint8_t qs[QK_K/4]; + uint8_t qh[QK_K/32]; + uint8_t signs[QK_K/8]; + uint8_t scales[IQ3S_N_SCALE]; +} block_iq3_s; +static_assert(sizeof(block_iq3_s) == sizeof(ggml_fp16_t) + 13*(QK_K/32) + IQ3S_N_SCALE, "wrong iq3_s block size/padding"); + typedef struct { ggml_fp16_t d; uint8_t qs[QK_K/8]; @@ -226,6 +241,7 @@ void quantize_row_q6_K_reference(const float * GGML_RESTRICT x, block_q6_K * GGM void quantize_row_q8_K_reference(const float * GGML_RESTRICT x, block_q8_K * GGML_RESTRICT y, int k); void quantize_row_iq3_xxs_reference(const float * GGML_RESTRICT x, block_iq3_xxs * GGML_RESTRICT y, int k); void quantize_row_iq4_nl_reference (const float * GGML_RESTRICT x, block_iq4_nl * GGML_RESTRICT y, int k); +void quantize_row_iq3_s_reference (const float * GGML_RESTRICT x, block_iq3_s * GGML_RESTRICT y, int k); void quantize_row_q4_0(const float * GGML_RESTRICT x, void * GGML_RESTRICT y, int k); void quantize_row_q4_1(const float * GGML_RESTRICT x, void * GGML_RESTRICT y, int k); @@ -242,6 +258,7 @@ void quantize_row_q6_K(const float * GGML_RESTRICT x, void * GGML_RESTRICT y, in void quantize_row_q8_K(const float * GGML_RESTRICT x, void * GGML_RESTRICT y, int k); void quantize_row_iq3_xxs(const float * GGML_RESTRICT x, void * GGML_RESTRICT y, int k); void quantize_row_iq4_nl (const float * GGML_RESTRICT x, void * GGML_RESTRICT y, int k); +void quantize_row_iq3_s (const float * GGML_RESTRICT x, void * GGML_RESTRICT y, int k); // Dequantization void dequantize_row_q4_0(const block_q4_0 * GGML_RESTRICT x, float * GGML_RESTRICT y, int k); @@ -262,6 +279,7 @@ void dequantize_row_iq2_xs (const block_iq2_xs * GGML_RESTRICT x, float * GGML_ void dequantize_row_iq3_xxs(const block_iq3_xxs * GGML_RESTRICT x, float * GGML_RESTRICT y, int k); void dequantize_row_iq1_s (const block_iq1_s * GGML_RESTRICT x, float * GGML_RESTRICT y, int k); void dequantize_row_iq4_nl (const block_iq4_nl * GGML_RESTRICT x, float * GGML_RESTRICT y, int k); +void dequantize_row_iq3_s (const block_iq3_s * GGML_RESTRICT x, float * GGML_RESTRICT y, int k); // Dot product void ggml_vec_dot_q4_0_q8_0(int n, float * GGML_RESTRICT s, size_t bs, const void * GGML_RESTRICT vx, size_t bx, const void * GGML_RESTRICT vy, size_t by, int nrc); @@ -280,6 +298,7 @@ void ggml_vec_dot_iq2_xs_q8_K (int n, float * GGML_RESTRICT s, size_t bs, const void ggml_vec_dot_iq3_xxs_q8_K(int n, float * GGML_RESTRICT s, size_t bs, const void * GGML_RESTRICT vx, size_t bx, const void * GGML_RESTRICT vy, size_t by, int nrc); void ggml_vec_dot_iq1_s_q8_K (int n, float * GGML_RESTRICT s, size_t bs, const void * GGML_RESTRICT vx, size_t bx, const void * GGML_RESTRICT vy, size_t by, int nrc); void ggml_vec_dot_iq4_nl_q8_0 (int n, float * GGML_RESTRICT s, size_t bs, const void * GGML_RESTRICT vx, size_t bx, const void * GGML_RESTRICT vy, size_t by, int nrc); +void ggml_vec_dot_iq3_s_q8_K (int n, float * GGML_RESTRICT s, size_t bs, const void * GGML_RESTRICT vx, size_t bx, const void * GGML_RESTRICT vy, size_t by, int nrc); // // Quantization utilizing an importance matrix (a.k.a. "Activation aWare Quantization") @@ -289,6 +308,7 @@ size_t quantize_iq2_xs (const float * src, void * dst, int nrows, int n_per_row, size_t quantize_iq3_xxs(const float * src, void * dst, int nrows, int n_per_row, int64_t * hist, const float * imatrix); size_t quantize_iq1_s (const float * src, void * dst, int nrows, int n_per_row, int64_t * hist, const float * imatrix); size_t quantize_iq4_nl (const float * src, void * dst, int nrows, int n_per_row, int64_t * hist, const float * imatrix); +size_t quantize_iq3_s (const float * src, void * dst, int nrows, int n_per_row, int64_t * hist, const float * imatrix); size_t quantize_q2_K (const float * src, void * dst, int nrows, int n_per_row, int64_t * hist, const float * imatrix); size_t quantize_q3_K (const float * src, void * dst, int nrows, int n_per_row, int64_t * hist, const float * imatrix); size_t quantize_q4_K (const float * src, void * dst, int nrows, int n_per_row, int64_t * hist, const float * imatrix); diff --git a/ggml.c b/ggml.c index d710fe702..c09a3cad6 100644 --- a/ggml.c +++ b/ggml.c @@ -678,6 +678,18 @@ static const ggml_type_traits_t type_traits[GGML_TYPE_COUNT] = { .vec_dot_type = GGML_TYPE_Q8_K, .nrows = 1, }, + [GGML_TYPE_IQ3_S] = { + .type_name = "iq3_s", + .blck_size = QK_K, + .type_size = sizeof(block_iq3_s), + .is_quantized = true, + .to_float = (ggml_to_float_t) dequantize_row_iq3_s, + .from_float = quantize_row_iq3_s, + .from_float_reference = (ggml_from_float_t)quantize_row_iq3_s_reference, + .vec_dot = ggml_vec_dot_iq3_s_q8_K, + .vec_dot_type = GGML_TYPE_Q8_K, + .nrows = 1, + }, [GGML_TYPE_IQ1_S] = { .type_name = "iq1_s", .blck_size = QK_K, @@ -2304,6 +2316,7 @@ enum ggml_type ggml_ftype_to_ggml_type(enum ggml_ftype ftype) { case GGML_FTYPE_MOSTLY_IQ3_XXS: wtype = GGML_TYPE_IQ3_XXS; break; case GGML_FTYPE_MOSTLY_IQ1_S: wtype = GGML_TYPE_IQ1_S; break; case GGML_FTYPE_MOSTLY_IQ4_NL: wtype = GGML_TYPE_IQ4_NL; break; + case GGML_FTYPE_MOSTLY_IQ3_S: wtype = GGML_TYPE_IQ3_S; break; case GGML_FTYPE_UNKNOWN: wtype = GGML_TYPE_COUNT; break; case GGML_FTYPE_MOSTLY_Q4_1_SOME_F16: wtype = GGML_TYPE_COUNT; break; } @@ -7738,6 +7751,7 @@ static void ggml_compute_forward_add( case GGML_TYPE_IQ3_XXS: case GGML_TYPE_IQ1_S: case GGML_TYPE_IQ4_NL: + case GGML_TYPE_IQ3_S: { ggml_compute_forward_add_q_f32(params, dst); } break; @@ -8017,6 +8031,7 @@ static void ggml_compute_forward_add1( case GGML_TYPE_IQ3_XXS: case GGML_TYPE_IQ1_S: case GGML_TYPE_IQ4_NL: + case GGML_TYPE_IQ3_S: { ggml_compute_forward_add1_q_f32(params, dst); } break; @@ -8141,6 +8156,7 @@ static void ggml_compute_forward_acc( case GGML_TYPE_IQ3_XXS: case GGML_TYPE_IQ1_S: case GGML_TYPE_IQ4_NL: + case GGML_TYPE_IQ3_S: default: { GGML_ASSERT(false); @@ -11039,6 +11055,7 @@ static void ggml_compute_forward_out_prod( case GGML_TYPE_IQ3_XXS: case GGML_TYPE_IQ1_S: case GGML_TYPE_IQ4_NL: + case GGML_TYPE_IQ3_S: { ggml_compute_forward_out_prod_q_f32(params, dst); } break; @@ -11227,6 +11244,7 @@ static void ggml_compute_forward_set( case GGML_TYPE_IQ3_XXS: case GGML_TYPE_IQ1_S: case GGML_TYPE_IQ4_NL: + case GGML_TYPE_IQ3_S: default: { GGML_ASSERT(false); @@ -11429,6 +11447,7 @@ static void ggml_compute_forward_get_rows( case GGML_TYPE_IQ3_XXS: case GGML_TYPE_IQ1_S: case GGML_TYPE_IQ4_NL: + case GGML_TYPE_IQ3_S: { ggml_compute_forward_get_rows_q(params, dst); } break; @@ -12129,6 +12148,7 @@ static void ggml_compute_forward_alibi( case GGML_TYPE_IQ3_XXS: case GGML_TYPE_IQ1_S: case GGML_TYPE_IQ4_NL: + case GGML_TYPE_IQ3_S: case GGML_TYPE_Q8_K: case GGML_TYPE_I8: case GGML_TYPE_I16: @@ -12212,6 +12232,7 @@ static void ggml_compute_forward_clamp( case GGML_TYPE_IQ3_XXS: case GGML_TYPE_IQ1_S: case GGML_TYPE_IQ4_NL: + case GGML_TYPE_IQ3_S: case GGML_TYPE_Q8_K: case GGML_TYPE_I8: case GGML_TYPE_I16: @@ -19463,6 +19484,7 @@ void ggml_quantize_init(enum ggml_type type) { case GGML_TYPE_IQ2_XS: case GGML_TYPE_IQ1_S: iq2xs_init_impl(type); break; case GGML_TYPE_IQ3_XXS: iq3xs_init_impl(256); break; + case GGML_TYPE_IQ3_S: iq3xs_init_impl(512); break; default: // nothing break; } @@ -19737,6 +19759,15 @@ size_t ggml_quantize_chunk(enum ggml_type type, const float * src, void * dst, i result = quantize_iq3_xxs(src + start, (char *)dst + start_row * row_size, nrows, n_per_row, hist, imatrix); GGML_ASSERT(result == row_size * nrows); } break; + case GGML_TYPE_IQ3_S: + { + GGML_ASSERT(start % QK_K == 0); + GGML_ASSERT(start % n_per_row == 0); + size_t start_row = start / n_per_row; + size_t row_size = ggml_row_size(type, n_per_row); + result = quantize_iq3_s(src + start, (char *)dst + start_row * row_size, nrows, n_per_row, hist, imatrix); + GGML_ASSERT(result == row_size * nrows); + } break; case GGML_TYPE_IQ1_S: { GGML_ASSERT(start % QK_K == 0); diff --git a/ggml.h b/ggml.h index 37eff6279..a4166e1f7 100644 --- a/ggml.h +++ b/ggml.h @@ -350,6 +350,7 @@ extern "C" { GGML_TYPE_IQ3_XXS = 18, GGML_TYPE_IQ1_S = 19, GGML_TYPE_IQ4_NL = 20, + GGML_TYPE_IQ3_S = 21, GGML_TYPE_I8, GGML_TYPE_I16, GGML_TYPE_I32, @@ -389,6 +390,7 @@ extern "C" { GGML_FTYPE_MOSTLY_IQ3_XXS = 17, // except 1d tensors GGML_FTYPE_MOSTLY_IQ1_S = 18, // except 1d tensors GGML_FTYPE_MOSTLY_IQ4_NL = 19, // except 1d tensors + GGML_FTYPE_MOSTLY_IQ3_S = 20, // except 1d tensors }; // available tensor operations: diff --git a/llama.cpp b/llama.cpp index 37477e6ef..1f6b6cff4 100644 --- a/llama.cpp +++ b/llama.cpp @@ -2545,6 +2545,7 @@ struct llama_model_loader { case GGML_TYPE_IQ3_XXS: ftype = LLAMA_FTYPE_MOSTLY_IQ3_XXS; break; case GGML_TYPE_IQ1_S: ftype = LLAMA_FTYPE_MOSTLY_IQ1_S; break; case GGML_TYPE_IQ4_NL: ftype = LLAMA_FTYPE_MOSTLY_IQ4_NL; break; + case GGML_TYPE_IQ3_S: ftype = LLAMA_FTYPE_MOSTLY_IQ3_S; break; default: { LLAMA_LOG_WARN("%s: unknown type %s\n", __func__, ggml_type_name(type_max)); @@ -2890,6 +2891,8 @@ static std::string llama_model_ftype_name(llama_ftype ftype) { case LLAMA_FTYPE_MOSTLY_IQ3_XXS:return "IQ3_XXS - 3.0625 bpw"; case LLAMA_FTYPE_MOSTLY_IQ1_S :return "IQ1_S - 1.5625 bpw"; case LLAMA_FTYPE_MOSTLY_IQ4_NL: return "IQ4_NL - 4.5 bpw"; + case LLAMA_FTYPE_MOSTLY_IQ3_S: return "IQ3_S - 3.4375 bpw"; + case LLAMA_FTYPE_MOSTLY_IQ3_M: return "IQ3_S mix - 3.66 bpw"; default: return "unknown, may not work"; } @@ -10544,6 +10547,12 @@ static ggml_type get_k_quant_type(quantize_state_internal & qs, ggml_type new_ty else if (ftype == LLAMA_FTYPE_MOSTLY_IQ3_XXS) { new_type = qs.model.hparams.n_gqa() >= 4 ? GGML_TYPE_Q4_K : !qs.has_imatrix ? GGML_TYPE_Q3_K : GGML_TYPE_IQ3_XXS; } + else if (ftype == LLAMA_FTYPE_MOSTLY_IQ3_S && qs.model.hparams.n_gqa() >= 4) { + new_type = GGML_TYPE_Q4_K; + } + else if (ftype == LLAMA_FTYPE_MOSTLY_IQ3_M) { + new_type = GGML_TYPE_Q4_K; + } else if (ftype == LLAMA_FTYPE_MOSTLY_Q3_K_M) { new_type = qs.i_attention_wv < 2 ? GGML_TYPE_Q5_K : GGML_TYPE_Q4_K; } @@ -10575,13 +10584,17 @@ static ggml_type get_k_quant_type(quantize_state_internal & qs, ggml_type new_ty new_type = GGML_TYPE_Q8_0; } else if (ftype == LLAMA_FTYPE_MOSTLY_Q3_K_XS) { - new_type = GGML_TYPE_Q2_K; + new_type = GGML_TYPE_IQ3_XXS; + } + } else if (name.find("attn_q.weight") != std::string::npos) { + if (ftype == LLAMA_FTYPE_MOSTLY_Q3_K_XS) { + new_type = GGML_TYPE_IQ3_XXS; } } else if (name.find("ffn_down") != std::string::npos) { auto info = layer_info(qs.i_ffn_down, qs.n_ffn_down, name.c_str()); int i_layer = info.first, n_layer = info.second; if (ftype == LLAMA_FTYPE_MOSTLY_Q2_K) new_type = GGML_TYPE_Q3_K; - else if (ftype == LLAMA_FTYPE_MOSTLY_Q2_K_S || ftype == LLAMA_FTYPE_MOSTLY_Q3_K_XS) { + else if (ftype == LLAMA_FTYPE_MOSTLY_Q2_K_S) { if (i_layer < n_layer/8) new_type = GGML_TYPE_Q4_K; } else if (ftype == LLAMA_FTYPE_MOSTLY_IQ3_XXS && !qs.has_imatrix) { @@ -10592,6 +10605,10 @@ static ggml_type get_k_quant_type(quantize_state_internal & qs, ggml_type new_ty : arch != LLM_ARCH_FALCON || use_more_bits(i_layer, n_layer) ? GGML_TYPE_Q4_K : GGML_TYPE_Q3_K; } + else if (ftype == LLAMA_FTYPE_MOSTLY_IQ3_M && (i_layer < n_layer/8 || + (qs.model.hparams.n_expert == 8 && use_more_bits(i_layer, n_layer)))) { + new_type = GGML_TYPE_Q4_K; + } else if (ftype == LLAMA_FTYPE_MOSTLY_Q3_K_L) { new_type = arch == LLM_ARCH_FALCON ? GGML_TYPE_Q4_K : GGML_TYPE_Q5_K; } @@ -10623,37 +10640,41 @@ static ggml_type get_k_quant_type(quantize_state_internal & qs, ggml_type new_ty if (qs.model.hparams.n_expert == 8) { if (ftype == LLAMA_FTYPE_MOSTLY_Q2_K || ftype == LLAMA_FTYPE_MOSTLY_Q3_K_XS || ftype == LLAMA_FTYPE_MOSTLY_IQ3_XXS || ftype == LLAMA_FTYPE_MOSTLY_Q3_K_S || ftype == LLAMA_FTYPE_MOSTLY_Q3_K_M || ftype == LLAMA_FTYPE_MOSTLY_IQ4_NL || - ftype == LLAMA_FTYPE_MOSTLY_Q4_K_S || ftype == LLAMA_FTYPE_MOSTLY_Q4_K_M) { + ftype == LLAMA_FTYPE_MOSTLY_Q4_K_S || ftype == LLAMA_FTYPE_MOSTLY_Q4_K_M || ftype == LLAMA_FTYPE_MOSTLY_IQ3_S || + ftype == LLAMA_FTYPE_MOSTLY_IQ3_M) { new_type = GGML_TYPE_Q5_K; } } else { - if (ftype == LLAMA_FTYPE_MOSTLY_Q2_K ) new_type = GGML_TYPE_Q3_K; + if (ftype == LLAMA_FTYPE_MOSTLY_Q2_K ) new_type = GGML_TYPE_Q3_K; else if (ftype == LLAMA_FTYPE_MOSTLY_IQ3_XXS) new_type = GGML_TYPE_Q3_K; - else if (ftype == LLAMA_FTYPE_MOSTLY_Q3_K_M) new_type = GGML_TYPE_Q4_K; - else if (ftype == LLAMA_FTYPE_MOSTLY_Q3_K_L) new_type = GGML_TYPE_Q5_K; + else if (ftype == LLAMA_FTYPE_MOSTLY_Q3_K_M ) new_type = GGML_TYPE_Q4_K; + else if (ftype == LLAMA_FTYPE_MOSTLY_Q3_K_L ) new_type = GGML_TYPE_Q5_K; + else if (ftype == LLAMA_FTYPE_MOSTLY_IQ3_M ) new_type = GGML_TYPE_Q4_K; } } else { if (ftype == LLAMA_FTYPE_MOSTLY_Q3_K_L) new_type = GGML_TYPE_Q4_K; } } else if (name.find("attn_qkv.weight") != std::string::npos) { - if (ftype == LLAMA_FTYPE_MOSTLY_Q3_K_M || ftype == LLAMA_FTYPE_MOSTLY_Q3_K_L) new_type = GGML_TYPE_Q4_K; + if (ftype == LLAMA_FTYPE_MOSTLY_Q3_K_M || ftype == LLAMA_FTYPE_MOSTLY_Q3_K_L || ftype == LLAMA_FTYPE_MOSTLY_IQ3_M) { + new_type = GGML_TYPE_Q4_K; + } else if (ftype == LLAMA_FTYPE_MOSTLY_Q4_K_M) new_type = GGML_TYPE_Q5_K; else if (ftype == LLAMA_FTYPE_MOSTLY_Q5_K_M) new_type = GGML_TYPE_Q6_K; } else if (name.find("ffn_gate") != std::string::npos) { auto info = layer_info(qs.i_ffn_gate, qs.n_ffn_gate, name.c_str()); int i_layer = info.first, n_layer = info.second; - if (ftype == LLAMA_FTYPE_MOSTLY_Q3_K_XS && !use_more_bits(i_layer, n_layer)) { - new_type = GGML_TYPE_Q2_K; + if (ftype == LLAMA_FTYPE_MOSTLY_Q3_K_XS && (i_layer >= n_layer/8 && i_layer < 7*n_layer/8)) { + new_type = GGML_TYPE_IQ3_XXS; } ++qs.i_ffn_gate; } else if (name.find("ffn_up") != std::string::npos) { auto info = layer_info(qs.i_ffn_up, qs.n_ffn_up, name.c_str()); int i_layer = info.first, n_layer = info.second; - if (ftype == LLAMA_FTYPE_MOSTLY_Q3_K_XS && !use_more_bits(i_layer, n_layer)) { - new_type = GGML_TYPE_Q2_K; + if (ftype == LLAMA_FTYPE_MOSTLY_Q3_K_XS && (i_layer >= n_layer/8 && i_layer < 7*n_layer/8)) { + new_type = GGML_TYPE_IQ3_XXS; } ++qs.i_ffn_up; } @@ -10673,7 +10694,7 @@ static ggml_type get_k_quant_type(quantize_state_internal & qs, ggml_type new_ty if (new_type == GGML_TYPE_Q2_K || new_type == GGML_TYPE_Q3_K || new_type == GGML_TYPE_Q4_K || new_type == GGML_TYPE_Q5_K || new_type == GGML_TYPE_Q6_K || new_type == GGML_TYPE_IQ2_XS || new_type == GGML_TYPE_IQ2_XXS || - new_type == GGML_TYPE_IQ3_XXS || ftype == LLAMA_FTYPE_MOSTLY_IQ1_S) { + new_type == GGML_TYPE_IQ3_XXS || ftype == LLAMA_FTYPE_MOSTLY_IQ1_S || new_type == GGML_TYPE_IQ3_S) { int nx = tensor->ne[0]; int ny = tensor->ne[1]; if (nx % QK_K != 0) { @@ -10688,6 +10709,7 @@ static ggml_type get_k_quant_type(quantize_state_internal & qs, ggml_type new_ty case GGML_TYPE_IQ2_XXS: case GGML_TYPE_IQ2_XS: case GGML_TYPE_IQ3_XXS: + case GGML_TYPE_IQ3_S: case GGML_TYPE_IQ1_S: case GGML_TYPE_Q2_K: case GGML_TYPE_Q3_K: new_type = GGML_TYPE_IQ4_NL; break; @@ -10719,7 +10741,7 @@ static void llama_model_quantize_internal(const std::string & fname_inp, const s // K-quants case LLAMA_FTYPE_MOSTLY_Q2_K_S: case LLAMA_FTYPE_MOSTLY_Q2_K: quantized_type = GGML_TYPE_Q2_K; break; - case LLAMA_FTYPE_MOSTLY_Q3_K_XS: + case LLAMA_FTYPE_MOSTLY_Q3_K_XS: quantized_type = GGML_TYPE_IQ3_S; break; case LLAMA_FTYPE_MOSTLY_Q3_K_S: case LLAMA_FTYPE_MOSTLY_Q3_K_M: case LLAMA_FTYPE_MOSTLY_Q3_K_L: quantized_type = GGML_TYPE_Q3_K; break; @@ -10733,6 +10755,8 @@ static void llama_model_quantize_internal(const std::string & fname_inp, const s case LLAMA_FTYPE_MOSTLY_IQ3_XXS: quantized_type = GGML_TYPE_IQ3_XXS; break; case LLAMA_FTYPE_MOSTLY_IQ1_S: quantized_type = GGML_TYPE_IQ1_S; break; case LLAMA_FTYPE_MOSTLY_IQ4_NL: quantized_type = GGML_TYPE_IQ4_NL; break; + case LLAMA_FTYPE_MOSTLY_IQ3_S: quantized_type = GGML_TYPE_IQ3_S; break; + case LLAMA_FTYPE_MOSTLY_IQ3_M: quantized_type = GGML_TYPE_IQ3_S; break; default: throw std::runtime_error(format("invalid output file type %d\n", ftype)); } diff --git a/llama.h b/llama.h index 84f196b3b..889edf4d9 100644 --- a/llama.h +++ b/llama.h @@ -102,6 +102,8 @@ extern "C" { LLAMA_FTYPE_MOSTLY_IQ3_XXS = 23, // except 1d tensors LLAMA_FTYPE_MOSTLY_IQ1_S = 24, // except 1d tensors LLAMA_FTYPE_MOSTLY_IQ4_NL = 25, // except 1d tensors + LLAMA_FTYPE_MOSTLY_IQ3_S = 26, // except 1d tensors + LLAMA_FTYPE_MOSTLY_IQ3_M = 27, // except 1d tensors LLAMA_FTYPE_GUESSED = 1024, // not specified in the model file }; diff --git a/tests/test-backend-ops.cpp b/tests/test-backend-ops.cpp index 55db42bf6..f8574588b 100644 --- a/tests/test-backend-ops.cpp +++ b/tests/test-backend-ops.cpp @@ -1918,7 +1918,7 @@ static bool test_backend(ggml_backend_t backend, test_mode mode, const char * op GGML_TYPE_Q6_K, GGML_TYPE_IQ2_XXS, GGML_TYPE_IQ2_XS, GGML_TYPE_IQ3_XXS, GGML_TYPE_IQ1_S, - GGML_TYPE_IQ4_NL, + GGML_TYPE_IQ4_NL, GGML_TYPE_IQ3_S, }; // unary ops diff --git a/tests/test-quantize-fns.cpp b/tests/test-quantize-fns.cpp index 5e92d5742..04656bb9e 100644 --- a/tests/test-quantize-fns.cpp +++ b/tests/test-quantize-fns.cpp @@ -151,6 +151,7 @@ int main(int argc, char * argv[]) { const float max_quantization_error = type == GGML_TYPE_Q2_K ? MAX_QUANTIZATION_TOTAL_ERROR_2BITS : type == GGML_TYPE_Q3_K ? MAX_QUANTIZATION_TOTAL_ERROR_3BITS : + type == GGML_TYPE_IQ3_S ? MAX_QUANTIZATION_TOTAL_ERROR_3BITS : type == GGML_TYPE_IQ3_XXS ? MAX_QUANTIZATION_TOTAL_ERROR_3BITS_XXS : MAX_QUANTIZATION_TOTAL_ERROR; failed = !(total_error < max_quantization_error); num_failed += failed; @@ -167,7 +168,8 @@ int main(int argc, char * argv[]) { const float vec_dot_error = dot_product_error(qfns, test_size, test_data.data(), test_data2.data()); const float max_allowed_error = type == GGML_TYPE_Q2_K || type == GGML_TYPE_IQ2_XS || type == GGML_TYPE_IQ2_XXS || - type == GGML_TYPE_IQ3_XXS ? MAX_DOT_PRODUCT_ERROR_LOWBIT : MAX_DOT_PRODUCT_ERROR; + type == GGML_TYPE_IQ3_XXS || type == GGML_TYPE_IQ3_S ? MAX_DOT_PRODUCT_ERROR_LOWBIT + : MAX_DOT_PRODUCT_ERROR; failed = !(vec_dot_error < max_allowed_error); num_failed += failed; if (failed || verbose) { From 9e359a4f47c1b2dceb99e29706c9f7403d32ab5e Mon Sep 17 00:00:00 2001 From: Pierrick Hymbert Date: Sat, 24 Feb 2024 19:16:04 +0100 Subject: [PATCH 699/811] server: continue to update other slots on embedding concurrent request (#5699) * server: #5655 - continue to update other slots on embedding concurrent request. * server: tests: add multi users embeddings as fixed * server: tests: adding OAI compatible embedding concurrent endpoint * server: tests: adding OAI compatible embedding with multiple inputs --- examples/server/server.cpp | 2 +- examples/server/tests/features/issues.feature | 34 +--- .../server/tests/features/parallel.feature | 46 ++++++ examples/server/tests/features/server.feature | 13 ++ examples/server/tests/features/steps/steps.py | 151 +++++++++++++----- 5 files changed, 168 insertions(+), 78 deletions(-) diff --git a/examples/server/server.cpp b/examples/server/server.cpp index 9fb436c2a..19a8c1067 100644 --- a/examples/server/server.cpp +++ b/examples/server/server.cpp @@ -1836,7 +1836,7 @@ struct llama_server_context send_embedding(slot); slot.release(); slot.i_batch = -1; - return true; + continue; } completion_token_output result; diff --git a/examples/server/tests/features/issues.feature b/examples/server/tests/features/issues.feature index 542006d9a..bf5a175a3 100644 --- a/examples/server/tests/features/issues.feature +++ b/examples/server/tests/features/issues.feature @@ -1,36 +1,4 @@ # List of ongoing issues @bug Feature: Issues - # Issue #5655 - Scenario: Multi users embeddings - Given a server listening on localhost:8080 - And a model file stories260K.gguf - And a model alias tinyllama-2 - And 42 as server seed - And 64 KV cache size - And 2 slots - And continuous batching - And embeddings extraction - Then the server is starting - Then the server is healthy - - Given a prompt: - """ - Write a very long story about AI. - """ - And a prompt: - """ - Write another very long music lyrics. - """ - And a prompt: - """ - Write a very long poem. - """ - And a prompt: - """ - Write a very long joke. - """ - Given concurrent embedding requests - Then the server is busy - Then the server is idle - Then all embeddings are generated + # No confirmed issue at the moment diff --git a/examples/server/tests/features/parallel.feature b/examples/server/tests/features/parallel.feature index 802d624ff..c85f9de1d 100644 --- a/examples/server/tests/features/parallel.feature +++ b/examples/server/tests/features/parallel.feature @@ -8,6 +8,7 @@ Feature: Parallel And 42 as server seed And 64 KV cache size And 2 slots + And embeddings extraction And continuous batching Then the server is starting Then the server is healthy @@ -75,3 +76,48 @@ Feature: Parallel Then the server is busy Then the server is idle Then all prompts are predicted + + Scenario: Multi users embeddings + Given a prompt: + """ + Write a very long story about AI. + """ + And a prompt: + """ + Write another very long music lyrics. + """ + And a prompt: + """ + Write a very long poem. + """ + And a prompt: + """ + Write a very long joke. + """ + Given concurrent embedding requests + Then the server is busy + Then the server is idle + Then all embeddings are generated + + Scenario: Multi users OAI compatibility embeddings + Given a prompt: + """ + In which country Paris is located ? + """ + And a prompt: + """ + Is Madrid the capital of Spain ? + """ + And a prompt: + """ + What is the biggest US city ? + """ + And a prompt: + """ + What is the capital of Bulgaria ? + """ + And a model tinyllama-2 + Given concurrent OAI embedding requests + Then the server is busy + Then the server is idle + Then all embeddings are generated diff --git a/examples/server/tests/features/server.feature b/examples/server/tests/features/server.feature index fedcfe5ae..5f81d256a 100644 --- a/examples/server/tests/features/server.feature +++ b/examples/server/tests/features/server.feature @@ -60,6 +60,19 @@ Feature: llama.cpp server """ Then embeddings are generated + Scenario: OAI Embeddings compatibility with multiple inputs + Given a model tinyllama-2 + Given a prompt: + """ + In which country Paris is located ? + """ + And a prompt: + """ + Is Madrid the capital of Spain ? + """ + When an OAI compatible embeddings computation request for multiple inputs + Then embeddings are generated + Scenario: Tokenize / Detokenize When tokenizing: diff --git a/examples/server/tests/features/steps/steps.py b/examples/server/tests/features/steps/steps.py index 50f2b641e..9c825fdbc 100644 --- a/examples/server/tests/features/steps/steps.py +++ b/examples/server/tests/features/steps/steps.py @@ -1,4 +1,5 @@ import asyncio +import collections import json import os import re @@ -261,35 +262,35 @@ def step_a_prompt_prompt(context, prompt): @step(u'concurrent completion requests') @async_run_until_complete() async def step_concurrent_completion_requests(context): - await concurrent_completion_requests(context, - request_completion, - # prompt is inserted automatically - context.base_url, - debug=context.debug, - n_predict=context.n_predict if hasattr(context, 'n_predict') else None, - server_seed=context.server_seed if hasattr(context, 'server_seed') else None, - user_api_key=context.user_api_key if hasattr(context, - 'user_api_key') else None) + await concurrent_requests(context, + request_completion, + # prompt is inserted automatically + context.base_url, + debug=context.debug, + n_predict=context.n_predict if hasattr(context, 'n_predict') else None, + server_seed=context.server_seed if hasattr(context, 'server_seed') else None, + user_api_key=context.user_api_key if hasattr(context, + 'user_api_key') else None) @step(u'concurrent OAI completions requests') @async_run_until_complete async def step_oai_chat_completions(context): - await concurrent_completion_requests(context, oai_chat_completions, - # user_prompt is inserted automatically - context.system_prompt, - context.base_url, - True, # async_client - model=context.model - if hasattr(context, 'model') else None, - n_predict=context.n_predict - if hasattr(context, 'n_predict') else None, - enable_streaming=context.enable_streaming - if hasattr(context, 'enable_streaming') else None, - server_seed=context.server_seed - if hasattr(context, 'server_seed') else None, - user_api_key=context.user_api_key - if hasattr(context, 'user_api_key') else None) + await concurrent_requests(context, oai_chat_completions, + # user_prompt is inserted automatically + context.system_prompt, + context.base_url, + True, # async_client + model=context.model + if hasattr(context, 'model') else None, + n_predict=context.n_predict + if hasattr(context, 'n_predict') else None, + enable_streaming=context.enable_streaming + if hasattr(context, 'enable_streaming') else None, + server_seed=context.server_seed + if hasattr(context, 'server_seed') else None, + user_api_key=context.user_api_key + if hasattr(context, 'user_api_key') else None) @step(u'all prompts are predicted') @@ -316,36 +317,58 @@ async def all_prompts_are_predicted(context, expected_predicted_n=None): @step(u'embeddings are computed for') @async_run_until_complete async def step_compute_embedding(context): - content = context.text - base_url = context.base_url - context.embeddings = await request_embedding(content, base_url) + context.embeddings = await request_embedding(context.text, base_url=context.base_url) @step(u'embeddings are generated') def step_assert_embeddings(context): - assert_embeddings(context.embeddings) + if len(context.prompts) == 0: + assert_embeddings(context.embeddings) + else: + assert len(context.embeddings) == len(context.prompts), (f"unexpected response:\n" + f"context.prompts={context.prompts}\n" + f"context.embeddings={context.embeddings}") + for embedding in context.embeddings: + context.prompts.pop() + assert_embeddings(embedding) @step(u'an OAI compatible embeddings computation request for') -def step_oai_compute_embedding(context): - openai.api_key = 'nope' # openai client always expects an api_keu - if context.user_api_key is not None: - openai.api_key = context.user_api_key - openai.api_base = f'{context.base_url}/v1' - embeddings = openai.Embedding.create( - model=context.model, - input=context.text, - ) - context.embeddings = embeddings +@async_run_until_complete +async def step_oai_compute_embeddings(context): + context.embeddings = await request_oai_embeddings(context.text, + base_url=context.base_url, + user_api_key=context.user_api_key, + model=context.model) + + +@step(u'an OAI compatible embeddings computation request for multiple inputs') +@async_run_until_complete +async def step_oai_compute_embeddings_multiple_inputs(context): + context.embeddings = await request_oai_embeddings(context.prompts, + base_url=context.base_url, + user_api_key=context.user_api_key, + model=context.model) @step(u'concurrent embedding requests') @async_run_until_complete() async def step_concurrent_embedding_requests(context): - await concurrent_completion_requests(context, - request_embedding, - # prompt is inserted automatically - context.base_url) + await concurrent_requests(context, + request_embedding, + # prompt is inserted automatically + base_url=context.base_url) + + +@step(u'concurrent OAI embedding requests') +@async_run_until_complete() +async def step_concurrent_oai_embedding_requests(context): + await concurrent_requests(context, + request_oai_embeddings, + # prompt is inserted automatically + base_url=context.base_url, + async_client=True, + model=context.model) @step(u'all embeddings are generated') @@ -401,7 +424,7 @@ def step_check_options_header_value(context, cors_header, cors_header_value): assert context.options_response.headers[cors_header] == cors_header_value -async def concurrent_completion_requests(context, f_completion, *args, **kwargs): +async def concurrent_requests(context, f_completion, *args, **kwargs): n_prompts = len(context.prompts) if context.debug: print(f"starting {n_prompts} concurrent completion requests...") @@ -565,7 +588,7 @@ async def oai_chat_completions(user_prompt, return completion_response -async def request_embedding(content, base_url): +async def request_embedding(content, base_url=None): async with aiohttp.ClientSession() as session: async with session.post(f'{base_url}/embedding', json={ @@ -576,6 +599,46 @@ async def request_embedding(content, base_url): return response_json['embedding'] +async def request_oai_embeddings(input, + base_url=None, user_api_key=None, + model=None, async_client=False): + # openai client always expects an api_key + user_api_key = user_api_key if user_api_key is not None else 'nope' + if async_client: + origin = 'llama.cpp' + if user_api_key is not None: + headers = {'Authorization': f'Bearer {user_api_key}', 'Origin': origin} + async with aiohttp.ClientSession() as session: + async with session.post(f'{base_url}/v1/embeddings', + json={ + "input": input, + "model": model, + }, + headers=headers) as response: + assert response.status == 200, f"received status code not expected: {response.status}" + assert response.headers['Access-Control-Allow-Origin'] == origin + assert response.headers['Content-Type'] == "application/json; charset=utf-8" + response_json = await response.json() + assert response_json['model'] == model, f"invalid model received: {response_json['model']}" + assert response_json['object'] == 'list' + return response_json['data'] + else: + openai.api_key = user_api_key + openai.api_base = f'{base_url}/v1' + oai_embeddings = openai.Embedding.create( + model=model, + input=input, + ) + + if isinstance(input, collections.abc.Sequence): + embeddings = [] + for an_oai_embeddings in oai_embeddings.data: + embeddings.append(an_oai_embeddings.embedding) + else: + embeddings = oai_embeddings.data.embedding + return embeddings + + def assert_n_tokens_predicted(completion_response, expected_predicted_n=None, re_content=None): content = completion_response['content'] n_predicted = completion_response['timings']['predicted_n'] From 69917dfa55674c608360638bb4d6a12a315e2810 Mon Sep 17 00:00:00 2001 From: Anas Ahouzi <112881240+aahouzi@users.noreply.github.com> Date: Sun, 25 Feb 2024 10:54:04 +0100 Subject: [PATCH 700/811] py : fix StableLM conversion after config.json changes (#5703) * Fix issues during StableLM models conversion * Fix hard coded layer_norm_eps * Support layer_norm_eps for LlavaStableLM Co-authored-by: Jared Van Bortel * Add missing parenthesis Co-authored-by: Jared Van Bortel * Support rotary_factor for LlavaStableLM Co-authored-by: Jared Van Bortel * fix typo * Add StableLMEpochForCausalLM for safety Co-authored-by: compilade <113953597+compilade@users.noreply.github.com> * Add StableLMEpochForCausalLM for safety 2 Co-authored-by: compilade <113953597+compilade@users.noreply.github.com> --------- Co-authored-by: Jared Van Bortel Co-authored-by: Jared Van Bortel Co-authored-by: compilade <113953597+compilade@users.noreply.github.com> --- convert-hf-to-gguf.py | 9 +++++---- 1 file changed, 5 insertions(+), 4 deletions(-) diff --git a/convert-hf-to-gguf.py b/convert-hf-to-gguf.py index 32d54b45f..ae30b2a76 100755 --- a/convert-hf-to-gguf.py +++ b/convert-hf-to-gguf.py @@ -192,7 +192,7 @@ class Model: return RefactModel if model_architecture == "PersimmonForCausalLM": return PersimmonModel - if model_architecture in ("StableLMEpochForCausalLM", "LlavaStableLMEpochForCausalLM"): + if model_architecture in ("StableLmForCausalLM", "StableLMEpochForCausalLM", "LlavaStableLMEpochForCausalLM"): return StableLMModel if model_architecture == "QWenLMHeadModel": return QwenModel @@ -253,7 +253,7 @@ class Model: return gguf.MODEL_ARCH.REFACT if arch == "PersimmonForCausalLM": return gguf.MODEL_ARCH.PERSIMMON - if arch in ("StableLMEpochForCausalLM", "LlavaStableLMEpochForCausalLM"): + if arch in ("StableLmForCausalLM", "StableLMEpochForCausalLM", "LlavaStableLMEpochForCausalLM"): return gguf.MODEL_ARCH.STABLELM if arch == "QWenLMHeadModel": return gguf.MODEL_ARCH.QWEN @@ -1074,10 +1074,11 @@ class StableLMModel(Model): self.gguf_writer.add_embedding_length(hparams["hidden_size"]) self.gguf_writer.add_block_count(block_count) self.gguf_writer.add_feed_forward_length(hparams["intermediate_size"]) - self.gguf_writer.add_rope_dimension_count(int(hparams["rope_pct"] * (hparams["hidden_size"] // hparams["num_attention_heads"]))) + rotary_factor = self.find_hparam(["partial_rotary_factor", "rope_pct"]) + self.gguf_writer.add_rope_dimension_count(int(rotary_factor * (hparams["hidden_size"] // hparams["num_attention_heads"]))) self.gguf_writer.add_head_count(hparams["num_attention_heads"]) self.gguf_writer.add_parallel_residual(hparams["use_parallel_residual"] if "use_parallel_residual" in hparams else True) - self.gguf_writer.add_layer_norm_eps(1e-5) + self.gguf_writer.add_layer_norm_eps(self.find_hparam(["layer_norm_eps", "norm_eps"])) class MixtralModel(Model): From ab336a9d5e5352ecdcdf4c12d2d54cf4ef82ce31 Mon Sep 17 00:00:00 2001 From: Georgi Gerganov Date: Sun, 25 Feb 2024 12:09:09 +0200 Subject: [PATCH 701/811] code : normalize enum names (#5697) * coda : normalize enum names ggml-ci * code : cont * code : cont --- common/common.cpp | 18 +- common/common.h | 4 +- common/train.cpp | 10 +- examples/baby-llama/baby-llama.cpp | 2 +- examples/finetune/finetune.cpp | 2 +- examples/llama-bench/llama-bench.cpp | 14 +- examples/llava/llava.cpp | 2 +- examples/server/server.cpp | 18 +- .../train-text-from-scratch.cpp | 2 +- ggml-cuda.cu | 138 +++---- ggml-metal.m | 4 +- ggml-opencl.cpp | 50 +-- ggml-sycl.cpp | 152 ++++---- ggml-vulkan.cpp | 102 ++--- ggml.c | 350 +++++++++--------- ggml.h | 38 +- llama.cpp | 64 ++-- llama.h | 28 +- tests/test-backend-ops.cpp | 4 +- tests/test-opt.cpp | 2 +- 20 files changed, 502 insertions(+), 502 deletions(-) diff --git a/common/common.cpp b/common/common.cpp index 10ef11829..ec596f5a0 100644 --- a/common/common.cpp +++ b/common/common.cpp @@ -295,9 +295,9 @@ bool gpt_params_parse_ex(int argc, char ** argv, gpt_params & params) { break; } std::string value(argv[i]); - /**/ if (value == "none") { params.rope_scaling_type = LLAMA_ROPE_SCALING_NONE; } - else if (value == "linear") { params.rope_scaling_type = LLAMA_ROPE_SCALING_LINEAR; } - else if (value == "yarn") { params.rope_scaling_type = LLAMA_ROPE_SCALING_YARN; } + /**/ if (value == "none") { params.rope_scaling_type = LLAMA_ROPE_SCALING_TYPE_NONE; } + else if (value == "linear") { params.rope_scaling_type = LLAMA_ROPE_SCALING_TYPE_LINEAR; } + else if (value == "yarn") { params.rope_scaling_type = LLAMA_ROPE_SCALING_TYPE_YARN; } else { invalid_param = true; break; } } else if (arg == "--rope-scale") { if (++i >= argc) { @@ -630,11 +630,11 @@ bool gpt_params_parse_ex(int argc, char ** argv, gpt_params & params) { } std::string arg_next = argv[i]; if (arg_next == "none") { - params.split_mode = LLAMA_SPLIT_NONE; + params.split_mode = LLAMA_SPLIT_MODE_NONE; } else if (arg_next == "layer") { - params.split_mode = LLAMA_SPLIT_LAYER; + params.split_mode = LLAMA_SPLIT_MODE_LAYER; } else if (arg_next == "row") { - params.split_mode = LLAMA_SPLIT_ROW; + params.split_mode = LLAMA_SPLIT_MODE_ROW; } else { invalid_param = true; break; @@ -837,15 +837,15 @@ bool gpt_params_parse_ex(int argc, char ** argv, gpt_params & params) { sep++; if (strncmp(sep, "int:", 4) == 0) { sep += 4; - kvo.tag = LLAMA_KV_OVERRIDE_INT; + kvo.tag = LLAMA_KV_OVERRIDE_TYPE_INT; kvo.int_value = std::atol(sep); } else if (strncmp(sep, "float:", 6) == 0) { sep += 6; - kvo.tag = LLAMA_KV_OVERRIDE_FLOAT; + kvo.tag = LLAMA_KV_OVERRIDE_TYPE_FLOAT; kvo.float_value = std::atof(sep); } else if (strncmp(sep, "bool:", 5) == 0) { sep += 5; - kvo.tag = LLAMA_KV_OVERRIDE_BOOL; + kvo.tag = LLAMA_KV_OVERRIDE_TYPE_BOOL; if (std::strcmp(sep, "true") == 0) { kvo.bool_value = true; } else if (std::strcmp(sep, "false") == 0) { diff --git a/common/common.h b/common/common.h index 935771d44..3e21579b0 100644 --- a/common/common.h +++ b/common/common.h @@ -61,7 +61,7 @@ struct gpt_params { float p_split = 0.1f; // speculative decoding split probability int32_t n_gpu_layers = -1; // number of layers to store in VRAM (-1 - use default) int32_t n_gpu_layers_draft = -1; // number of layers to store in VRAM for the draft model (-1 - use default) - llama_split_mode split_mode = LLAMA_SPLIT_LAYER; // how to split the model across GPUs + llama_split_mode split_mode = LLAMA_SPLIT_MODE_LAYER; // how to split the model across GPUs int32_t main_gpu = 0; // the GPU that is used for scratch and small tensors float tensor_split[128] = {0}; // how split tensors should be distributed across GPUs int32_t n_beams = 0; // if non-zero then use beam search of given width. @@ -75,7 +75,7 @@ struct gpt_params { float yarn_beta_fast = 32.0f; // YaRN low correction dim float yarn_beta_slow = 1.0f; // YaRN high correction dim int32_t yarn_orig_ctx = 0; // YaRN original context length - int32_t rope_scaling_type = LLAMA_ROPE_SCALING_UNSPECIFIED; + int32_t rope_scaling_type = LLAMA_ROPE_SCALING_TYPE_UNSPECIFIED; ggml_numa_strategy numa = GGML_NUMA_STRATEGY_DISABLED; // // sampling parameters diff --git a/common/train.cpp b/common/train.cpp index e4c3d5df6..0dbfd24df 100644 --- a/common/train.cpp +++ b/common/train.cpp @@ -31,7 +31,7 @@ struct train_state * init_train_state() { state->opt = new struct ggml_opt_context; state->opt->ctx = NULL; - state->opt->params = ggml_opt_default_params(GGML_OPT_ADAM); + state->opt->params = ggml_opt_default_params(GGML_OPT_TYPE_ADAM); state->opt->params.graph_size = LLAMA_TRAIN_MAX_NODES; state->opt->loss_after = 0.0f; @@ -556,7 +556,7 @@ void load_opt_context_gguf(struct gguf_context * fctx, struct ggml_context * f_g std::string opt_type; GGUF_GET_KEY(fctx, opt_type, gguf_get_val_str, GGUF_TYPE_STRING, true, LLM_KV_OPTIMIZER_TYPE); if (opt_type == LLM_KV_OPTIMIZER_TYPE_ADAM) { - opt->params.type = GGML_OPT_ADAM; + opt->params.type = GGML_OPT_TYPE_ADAM; GGUF_GET_KEY(fctx, opt->adam.fx_best, gguf_get_val_f32, GGUF_TYPE_FLOAT32, true, LLM_KV_OPTIMIZER_ADAM_BEST_LOSS); GGUF_GET_KEY(fctx, opt->adam.fx_prev, gguf_get_val_f32, GGUF_TYPE_FLOAT32, true, LLM_KV_OPTIMIZER_ADAM_PREVIOUS_LOSS); @@ -568,7 +568,7 @@ void load_opt_context_gguf(struct gguf_context * fctx, struct ggml_context * f_g copy_tensor_by_name(opt->adam.v, f_ggml_ctx, LLM_TENSOR_OPTIMIZER_ADAM_SECOND_MOMENTS); copy_tensor_by_name(opt->adam.pf, f_ggml_ctx, LLM_TENSOR_OPTIMIZER_ADAM_PAST_LOSS_VALUES); } else if (opt_type == LLM_KV_OPTIMIZER_TYPE_LBFGS) { - opt->params.type = GGML_OPT_LBFGS; + opt->params.type = GGML_OPT_TYPE_LBFGS; GGUF_GET_KEY(fctx, opt->params.lbfgs.m, gguf_get_val_u32, GGUF_TYPE_UINT32, true, LLM_KV_OPTIMIZER_LBFGS_APPROX_HESSIAN_COUNT); GGUF_GET_KEY(fctx, opt->lbfgs.fx_best, gguf_get_val_f32, GGUF_TYPE_FLOAT32, true, LLM_KV_OPTIMIZER_LBFGS_BEST_LOSS); @@ -603,7 +603,7 @@ void save_opt_context_gguf(struct gguf_context * fctx, struct ggml_opt_context * gguf_set_val_bool(fctx, LLM_KV_OPTIMIZER_JUST_INITIALIZED, opt->just_initialized); switch (opt->params.type) { - case GGML_OPT_ADAM: + case GGML_OPT_TYPE_ADAM: { gguf_set_val_str(fctx, LLM_KV_OPTIMIZER_TYPE, LLM_KV_OPTIMIZER_TYPE_ADAM); gguf_set_val_f32(fctx, LLM_KV_OPTIMIZER_ADAM_BEST_LOSS, opt->adam.fx_best); @@ -622,7 +622,7 @@ void save_opt_context_gguf(struct gguf_context * fctx, struct ggml_opt_context * gguf_add_tensor(fctx, opt->adam.pf); } } break; - case GGML_OPT_LBFGS: + case GGML_OPT_TYPE_LBFGS: { gguf_set_val_str(fctx, LLM_KV_OPTIMIZER_TYPE, LLM_KV_OPTIMIZER_TYPE_LBFGS); gguf_set_val_u32(fctx, LLM_KV_OPTIMIZER_LBFGS_APPROX_HESSIAN_COUNT, opt->params.lbfgs.m); diff --git a/examples/baby-llama/baby-llama.cpp b/examples/baby-llama/baby-llama.cpp index 65bb238a0..bf0125e75 100644 --- a/examples/baby-llama/baby-llama.cpp +++ b/examples/baby-llama/baby-llama.cpp @@ -1547,7 +1547,7 @@ int main(int argc, char ** argv) { float error_before_opt = ggml_get_f32_1d(e, 0); - struct ggml_opt_params opt_params_lbfgs = ggml_opt_default_params(GGML_OPT_LBFGS); + struct ggml_opt_params opt_params_lbfgs = ggml_opt_default_params(GGML_OPT_TYPE_LBFGS); opt_params_lbfgs.print_forward_graph = false; opt_params_lbfgs.print_backward_graph = false; opt_params_lbfgs.lbfgs.n_iter = 16; diff --git a/examples/finetune/finetune.cpp b/examples/finetune/finetune.cpp index 98bf5a07a..3da5317b3 100644 --- a/examples/finetune/finetune.cpp +++ b/examples/finetune/finetune.cpp @@ -1531,7 +1531,7 @@ int main(int argc, char ** argv) { lora.hparams.n_rank_output = n_rank_output; // set opt params from command line - opt->params = ggml_opt_default_params(GGML_OPT_ADAM); + opt->params = ggml_opt_default_params(GGML_OPT_TYPE_ADAM); opt->params.print_forward_graph = false; opt->params.print_backward_graph = false; opt->params.graph_size = LLAMA_TRAIN_MAX_NODES; diff --git a/examples/llama-bench/llama-bench.cpp b/examples/llama-bench/llama-bench.cpp index 11410f8ae..8fec3d43d 100644 --- a/examples/llama-bench/llama-bench.cpp +++ b/examples/llama-bench/llama-bench.cpp @@ -157,9 +157,9 @@ static const char * output_format_str(output_formats format) { static const char * split_mode_str(llama_split_mode mode) { switch (mode) { - case LLAMA_SPLIT_NONE: return "none"; - case LLAMA_SPLIT_LAYER: return "layer"; - case LLAMA_SPLIT_ROW: return "row"; + case LLAMA_SPLIT_MODE_NONE: return "none"; + case LLAMA_SPLIT_MODE_LAYER: return "layer"; + case LLAMA_SPLIT_MODE_ROW: return "row"; default: GGML_ASSERT(!"invalid split mode"); } } @@ -193,7 +193,7 @@ static const cmd_params cmd_params_defaults = { /* type_v */ {GGML_TYPE_F16}, /* n_threads */ {get_num_physical_cores()}, /* n_gpu_layers */ {99}, - /* split_mode */ {LLAMA_SPLIT_LAYER}, + /* split_mode */ {LLAMA_SPLIT_MODE_LAYER}, /* main_gpu */ {0}, /* no_kv_offload */ {false}, /* mul_mat_q */ {true}, @@ -358,11 +358,11 @@ static cmd_params parse_cmd_params(int argc, char ** argv) { for (const auto & m : p) { llama_split_mode mode; if (m == "none") { - mode = LLAMA_SPLIT_NONE; + mode = LLAMA_SPLIT_MODE_NONE; } else if (m == "layer") { - mode = LLAMA_SPLIT_LAYER; + mode = LLAMA_SPLIT_MODE_LAYER; } else if (m == "row") { - mode = LLAMA_SPLIT_ROW; + mode = LLAMA_SPLIT_MODE_ROW; } else { invalid_param = true; break; diff --git a/examples/llava/llava.cpp b/examples/llava/llava.cpp index 1a1cf7c78..980128166 100644 --- a/examples/llava/llava.cpp +++ b/examples/llava/llava.cpp @@ -152,7 +152,7 @@ static bool clip_llava_handle_patches(clip_ctx * ctx_clip, std::vector ggml_tensor * newline_tmp = clip_get_newline_tensor(ctx_clip); model.newline = ggml_new_tensor_1d(model.ctx, GGML_TYPE_F32, newline_tmp->ne[0]); - if (newline_tmp->backend != GGML_BACKEND_CPU) { + if (newline_tmp->backend != GGML_BACKEND_TYPE_CPU) { if (newline_tmp->buffer == NULL) { printf("newline_tmp tensor buffer is NULL\n"); } diff --git a/examples/server/server.cpp b/examples/server/server.cpp index 19a8c1067..780862ef6 100644 --- a/examples/server/server.cpp +++ b/examples/server/server.cpp @@ -2086,9 +2086,9 @@ static void server_params_parse(int argc, char **argv, server_params &sparams, break; } std::string value(argv[i]); - /**/ if (value == "none") { params.rope_scaling_type = LLAMA_ROPE_SCALING_NONE; } - else if (value == "linear") { params.rope_scaling_type = LLAMA_ROPE_SCALING_LINEAR; } - else if (value == "yarn") { params.rope_scaling_type = LLAMA_ROPE_SCALING_YARN; } + /**/ if (value == "none") { params.rope_scaling_type = LLAMA_ROPE_SCALING_TYPE_NONE; } + else if (value == "linear") { params.rope_scaling_type = LLAMA_ROPE_SCALING_TYPE_LINEAR; } + else if (value == "yarn") { params.rope_scaling_type = LLAMA_ROPE_SCALING_TYPE_YARN; } else { invalid_param = true; break; } } else if (arg == "--rope-freq-base") @@ -2212,15 +2212,15 @@ static void server_params_parse(int argc, char **argv, server_params &sparams, std::string arg_next = argv[i]; if (arg_next == "none") { - params.split_mode = LLAMA_SPLIT_NONE; + params.split_mode = LLAMA_SPLIT_MODE_NONE; } else if (arg_next == "layer") { - params.split_mode = LLAMA_SPLIT_LAYER; + params.split_mode = LLAMA_SPLIT_MODE_LAYER; } else if (arg_next == "row") { - params.split_mode = LLAMA_SPLIT_ROW; + params.split_mode = LLAMA_SPLIT_MODE_ROW; } else { invalid_param = true; @@ -2447,15 +2447,15 @@ static void server_params_parse(int argc, char **argv, server_params &sparams, sep++; if (strncmp(sep, "int:", 4) == 0) { sep += 4; - kvo.tag = LLAMA_KV_OVERRIDE_INT; + kvo.tag = LLAMA_KV_OVERRIDE_TYPE_INT; kvo.int_value = std::atol(sep); } else if (strncmp(sep, "float:", 6) == 0) { sep += 6; - kvo.tag = LLAMA_KV_OVERRIDE_FLOAT; + kvo.tag = LLAMA_KV_OVERRIDE_TYPE_FLOAT; kvo.float_value = std::atof(sep); } else if (strncmp(sep, "bool:", 5) == 0) { sep += 5; - kvo.tag = LLAMA_KV_OVERRIDE_BOOL; + kvo.tag = LLAMA_KV_OVERRIDE_TYPE_BOOL; if (std::strcmp(sep, "true") == 0) { kvo.bool_value = true; } else if (std::strcmp(sep, "false") == 0) { diff --git a/examples/train-text-from-scratch/train-text-from-scratch.cpp b/examples/train-text-from-scratch/train-text-from-scratch.cpp index e78ab185d..7eafe8515 100644 --- a/examples/train-text-from-scratch/train-text-from-scratch.cpp +++ b/examples/train-text-from-scratch/train-text-from-scratch.cpp @@ -960,7 +960,7 @@ int main(int argc, char ** argv) { struct ggml_opt_context * opt = train->opt; // set opt params from command line - opt->params = ggml_opt_default_params(GGML_OPT_ADAM); + opt->params = ggml_opt_default_params(GGML_OPT_TYPE_ADAM); opt->params.print_forward_graph = false; opt->params.print_backward_graph = false; opt->params.graph_size = LLAMA_TRAIN_MAX_NODES; diff --git a/ggml-cuda.cu b/ggml-cuda.cu index 21c612cb7..fb6d4f7d2 100644 --- a/ggml-cuda.cu +++ b/ggml-cuda.cu @@ -6369,11 +6369,11 @@ static __global__ void k_argsort_f32_i32(const float * x, int * dst, const int n int ixj = col ^ j; if (ixj > col) { if ((col & k) == 0) { - if (order == GGML_SORT_ASC ? x_row[dst_row[col]] > x_row[dst_row[ixj]] : x_row[dst_row[col]] < x_row[dst_row[ixj]]) { + if (order == GGML_SORT_ORDER_ASC ? x_row[dst_row[col]] > x_row[dst_row[ixj]] : x_row[dst_row[col]] < x_row[dst_row[ixj]]) { swap(dst_row[col], dst_row[ixj]); } } else { - if (order == GGML_SORT_ASC ? x_row[dst_row[col]] < x_row[dst_row[ixj]] : x_row[dst_row[col]] > x_row[dst_row[ixj]]) { + if (order == GGML_SORT_ORDER_ASC ? x_row[dst_row[col]] < x_row[dst_row[ixj]] : x_row[dst_row[col]] > x_row[dst_row[ixj]]) { swap(dst_row[col], dst_row[ixj]); } } @@ -7927,10 +7927,10 @@ static void argsort_f32_i32_cuda(const float * x, int * dst, const int ncols, co const dim3 block_dims(ncols, 1, 1); const dim3 block_nums(1, nrows, 1); - if (order == GGML_SORT_ASC) { - k_argsort_f32_i32<<>>(x, dst, ncols); - } else if (order == GGML_SORT_DESC) { - k_argsort_f32_i32<<>>(x, dst, ncols); + if (order == GGML_SORT_ORDER_ASC) { + k_argsort_f32_i32<<>>(x, dst, ncols); + } else if (order == GGML_SORT_ORDER_DESC) { + k_argsort_f32_i32<<>>(x, dst, ncols); } else { GGML_ASSERT(false); } @@ -8362,11 +8362,11 @@ static cudaError_t ggml_cuda_cpy_tensor_2d( cudaMemcpyKind kind; char * src_ptr; - if (src->backend == GGML_BACKEND_CPU) { + if (src->backend == GGML_BACKEND_TYPE_CPU) { kind = cudaMemcpyHostToDevice; src_ptr = (char *) src->data; - } else if (src->backend == GGML_BACKEND_GPU || src->backend == GGML_BACKEND_GPU_SPLIT) { - GGML_ASSERT(src->backend != GGML_BACKEND_GPU_SPLIT || (i1_low == 0 && i1_high == src->ne[1])); + } else if (src->backend == GGML_BACKEND_TYPE_GPU || src->backend == GGML_BACKEND_TYPE_GPU_SPLIT) { + GGML_ASSERT(src->backend != GGML_BACKEND_TYPE_GPU_SPLIT || (i1_low == 0 && i1_high == src->ne[1])); kind = cudaMemcpyDeviceToDevice; ggml_tensor_extra_gpu * extra = (ggml_tensor_extra_gpu *) src->extra; int id; @@ -8771,7 +8771,7 @@ static void ggml_cuda_op_mul_mat_q( // the main device has a larger memory buffer to hold the results from all GPUs // nrows_dst == nrows of the matrix that the kernel writes into - const int64_t nrows_dst = dst->backend == GGML_BACKEND_GPU && id == g_main_device ? ne0 : row_diff; + const int64_t nrows_dst = dst->backend == GGML_BACKEND_TYPE_GPU && id == g_main_device ? ne0 : row_diff; switch (src0->type) { case GGML_TYPE_Q4_0: @@ -8920,7 +8920,7 @@ static void ggml_cuda_op_mul_mat_vec_q( // the main device has a larger memory buffer to hold the results from all GPUs // nrows_dst == nrows of the matrix that the kernel writes into - const int64_t nrows_dst = dst->backend == GGML_BACKEND_GPU && id == g_main_device ? ne0 : row_diff; + const int64_t nrows_dst = dst->backend == GGML_BACKEND_TYPE_GPU && id == g_main_device ? ne0 : row_diff; switch (src0->type) { case GGML_TYPE_Q4_0: @@ -9096,7 +9096,7 @@ static void ggml_cuda_op_mul_mat_cublas( // the main device has a larger memory buffer to hold the results from all GPUs // ldc == nrows of the matrix that cuBLAS writes into - int ldc = dst->backend == GGML_BACKEND_GPU && id == g_main_device ? ne0 : row_diff; + int ldc = dst->backend == GGML_BACKEND_TYPE_GPU && id == g_main_device ? ne0 : row_diff; const int compute_capability = g_device_caps[id].cc; @@ -9444,7 +9444,7 @@ static void ggml_cuda_op_soft_max( const bool use_src2 = src2 != nullptr; if (use_src2) { - const bool src2_on_device = src2->backend == GGML_BACKEND_GPU; + const bool src2_on_device = src2->backend == GGML_BACKEND_TYPE_GPU; if (src2_on_device) { ggml_tensor_extra_gpu * src2_extra = (ggml_tensor_extra_gpu *) src2->extra; @@ -9502,16 +9502,16 @@ static void ggml_cuda_op_flatten(const ggml_tensor * src0, const ggml_tensor * s const bool use_src1 = src1 != nullptr; const int64_t nrows1 = use_src1 ? ggml_nrows(src1) : 1; - GGML_ASSERT(!use_src1 || src1->backend != GGML_BACKEND_GPU_SPLIT); - GGML_ASSERT( dst->backend != GGML_BACKEND_GPU_SPLIT); + GGML_ASSERT(!use_src1 || src1->backend != GGML_BACKEND_TYPE_GPU_SPLIT); + GGML_ASSERT( dst->backend != GGML_BACKEND_TYPE_GPU_SPLIT); ggml_tensor_extra_gpu * src0_extra = (ggml_tensor_extra_gpu *) src0->extra; ggml_tensor_extra_gpu * src1_extra = use_src1 ? (ggml_tensor_extra_gpu *) src1->extra : nullptr; ggml_tensor_extra_gpu * dst_extra = (ggml_tensor_extra_gpu *) dst->extra; - const bool src0_on_device = src0->backend == GGML_BACKEND_GPU || src0->backend == GGML_BACKEND_GPU_SPLIT; - const bool src1_on_device = use_src1 && src1->backend == GGML_BACKEND_GPU; - const bool dst_on_device = dst->backend == GGML_BACKEND_GPU; + const bool src0_on_device = src0->backend == GGML_BACKEND_TYPE_GPU || src0->backend == GGML_BACKEND_TYPE_GPU_SPLIT; + const bool src1_on_device = use_src1 && src1->backend == GGML_BACKEND_TYPE_GPU; + const bool dst_on_device = dst->backend == GGML_BACKEND_TYPE_GPU; // dd = data device float * src0_ddf = nullptr; @@ -9555,7 +9555,7 @@ static void ggml_cuda_op_flatten(const ggml_tensor * src0, const ggml_tensor * s CUDA_CHECK(cudaMemcpyAsync(dst->data, dst_ddf, ggml_nbytes(dst), cudaMemcpyDeviceToHost, main_stream)); } - if (dst->backend == GGML_BACKEND_CPU) { + if (dst->backend == GGML_BACKEND_TYPE_CPU) { CUDA_CHECK(cudaDeviceSynchronize()); } } @@ -9636,8 +9636,8 @@ static void ggml_cuda_op_mul_mat( const int nb2 = dst->nb[2]; const int nb3 = dst->nb[3]; - GGML_ASSERT(dst->backend != GGML_BACKEND_GPU_SPLIT); - GGML_ASSERT(src1->backend != GGML_BACKEND_GPU_SPLIT); + GGML_ASSERT(dst->backend != GGML_BACKEND_TYPE_GPU_SPLIT); + GGML_ASSERT(src1->backend != GGML_BACKEND_TYPE_GPU_SPLIT); GGML_ASSERT(src1->type == GGML_TYPE_F32 || (src1->ne[2] == 1 && src1->ne[3] == 1)); GGML_ASSERT(ne12 >= ne02 && ne12 % ne02 == 0); @@ -9653,20 +9653,20 @@ static void ggml_cuda_op_mul_mat( ggml_tensor_extra_gpu * src1_extra = (ggml_tensor_extra_gpu *) src1->extra; ggml_tensor_extra_gpu * dst_extra = (ggml_tensor_extra_gpu *) dst->extra; - const bool src0_on_device = src0->backend == GGML_BACKEND_GPU || src0->backend == GGML_BACKEND_GPU_SPLIT; + const bool src0_on_device = src0->backend == GGML_BACKEND_TYPE_GPU || src0->backend == GGML_BACKEND_TYPE_GPU_SPLIT; const bool src0_is_contiguous = ggml_is_contiguous(src0); const bool src1_is_contiguous = ggml_is_contiguous(src1); const int64_t src1_padded_col_size = GGML_PAD(ne10, MATRIX_ROW_PADDING); - const bool split = src0->backend == GGML_BACKEND_GPU_SPLIT; + const bool split = src0->backend == GGML_BACKEND_TYPE_GPU_SPLIT; GGML_ASSERT(!(split && ne02 > 1)); GGML_ASSERT(!(split && ne03 > 1)); GGML_ASSERT(!(split && ne02 < ne12)); std::array tensor_split; if (split) { - // TODO: check that src0->buffer->buft is a split buffer type, replace GGML_BACKEND_GPU_SPLIT check + // TODO: check that src0->buffer->buft is a split buffer type, replace GGML_BACKEND_TYPE_GPU_SPLIT check // GGML_ASSERT(src0->buffer != nullptr && src0->buffer->buft == ...); ggml_backend_cuda_split_buffer_type_context * buft_ctx = (ggml_backend_cuda_split_buffer_type_context *) src0->buffer->buft->context; tensor_split = buft_ctx->tensor_split; @@ -9724,8 +9724,8 @@ static void ggml_cuda_op_mul_mat( used_devices++; - const bool src1_on_device = src1->backend == GGML_BACKEND_GPU && id == g_main_device; - const bool dst_on_device = dst->backend == GGML_BACKEND_GPU && id == g_main_device; + const bool src1_on_device = src1->backend == GGML_BACKEND_TYPE_GPU && id == g_main_device; + const bool dst_on_device = dst->backend == GGML_BACKEND_TYPE_GPU && id == g_main_device; ggml_cuda_set_device(id); cudaStream_t stream = g_cudaStreams[id][0]; @@ -9776,8 +9776,8 @@ static void ggml_cuda_op_mul_mat( continue; } - const bool src1_on_device = src1->backend == GGML_BACKEND_GPU && id == g_main_device; - const bool dst_on_device = dst->backend == GGML_BACKEND_GPU && id == g_main_device; + const bool src1_on_device = src1->backend == GGML_BACKEND_TYPE_GPU && id == g_main_device; + const bool dst_on_device = dst->backend == GGML_BACKEND_TYPE_GPU && id == g_main_device; const int64_t row_diff = dev[id].row_high - dev[id].row_low; ggml_cuda_set_device(id); @@ -9802,12 +9802,12 @@ static void ggml_cuda_op_mul_mat( // the main device memory buffer can be on VRAM scratch, with space for all partial results // in that case an offset on dst_ddf_i is needed - if (dst->backend == GGML_BACKEND_GPU && id == g_main_device) { + if (dst->backend == GGML_BACKEND_TYPE_GPU && id == g_main_device) { dst_dd_i += dev[id].row_low; // offset is 0 if no tensor split } // copy src0, src1 to device if necessary - if (src1->backend == GGML_BACKEND_GPU && src1_is_contiguous) { + if (src1->backend == GGML_BACKEND_TYPE_GPU && src1_is_contiguous) { if (id != g_main_device) { if (convert_src1_to_q8_1) { char * src1_ddq_i_source = dev[g_main_device].src1_ddq + src1_ddq_i_offset; @@ -9820,14 +9820,14 @@ static void ggml_cuda_op_mul_mat( src1_ncols*ne10*sizeof(float), stream)); } } - } else if (src1->backend == GGML_BACKEND_CPU || (src1_on_device && !src1_is_contiguous)) { + } else if (src1->backend == GGML_BACKEND_TYPE_CPU || (src1_on_device && !src1_is_contiguous)) { CUDA_CHECK(ggml_cuda_cpy_tensor_2d( src1_ddf_i, src1, i03, i02, src1_col_0, src1_col_0+src1_ncols, stream)); } else { GGML_ASSERT(false); } - if (convert_src1_to_q8_1 && (src1->backend == GGML_BACKEND_CPU || !src1_is_contiguous)) { + if (convert_src1_to_q8_1 && (src1->backend == GGML_BACKEND_TYPE_CPU || !src1_is_contiguous)) { quantize_row_q8_1_cuda(src1_ddf_i, src1_ddq_i, ne10, src1_ncols, src1_padded_col_size, stream); CUDA_CHECK(cudaGetLastError()); } @@ -9845,10 +9845,10 @@ static void ggml_cuda_op_mul_mat( if (!dst_on_device) { void * dst_off_device; cudaMemcpyKind kind; - if (dst->backend == GGML_BACKEND_CPU) { + if (dst->backend == GGML_BACKEND_TYPE_CPU) { dst_off_device = dst->data; kind = cudaMemcpyDeviceToHost; - } else if (dst->backend == GGML_BACKEND_GPU) { + } else if (dst->backend == GGML_BACKEND_TYPE_GPU) { dst_off_device = dst_extra->data_device[g_main_device]; kind = cudaMemcpyDeviceToDevice; } else { @@ -9913,7 +9913,7 @@ static void ggml_cuda_op_mul_mat( } } - if (dst->backend == GGML_BACKEND_CPU) { + if (dst->backend == GGML_BACKEND_TYPE_CPU) { ggml_cuda_set_device(g_main_device); CUDA_CHECK(cudaDeviceSynchronize()); } @@ -10019,7 +10019,7 @@ GGML_CALL bool ggml_cuda_can_mul_mat(const struct ggml_tensor * src0, const stru static void ggml_cuda_mul_mat_vec_p021(const ggml_tensor * src0, const ggml_tensor * src1, ggml_tensor * dst){ GGML_ASSERT(ggml_is_permuted(src0) && ggml_is_permuted(src1)); - GGML_ASSERT(src0->backend != GGML_BACKEND_GPU_SPLIT); + GGML_ASSERT(src0->backend != GGML_BACKEND_TYPE_GPU_SPLIT); GGML_ASSERT(src0->nb[0] <= src0->nb[1] && src0->nb[2] <= src0->nb[3]); // 0213 permutation GGML_ASSERT(src1->nb[0] <= src1->nb[1] && src1->nb[2] <= src1->nb[3]); // 0213 permutation GGML_ASSERT(src0->type == GGML_TYPE_F16); @@ -10050,7 +10050,7 @@ static void ggml_cuda_mul_mat_vec_nc(const ggml_tensor * src0, const ggml_tensor GGML_ASSERT(!ggml_is_transposed(src0)); GGML_ASSERT(!ggml_is_transposed(src1)); GGML_ASSERT(!ggml_is_permuted(src0)); - GGML_ASSERT(src0->backend != GGML_BACKEND_GPU_SPLIT); + GGML_ASSERT(src0->backend != GGML_BACKEND_TYPE_GPU_SPLIT); GGML_ASSERT(src0->type == GGML_TYPE_F16); GGML_ASSERT(src1->type == GGML_TYPE_F32); @@ -10109,7 +10109,7 @@ static void ggml_cuda_mul_mat_batched_cublas(const ggml_tensor * src0, const ggm GGML_ASSERT(!ggml_is_transposed(src0)); GGML_ASSERT(!ggml_is_transposed(src1)); - GGML_ASSERT(src0->backend != GGML_BACKEND_GPU_SPLIT); + GGML_ASSERT(src0->backend != GGML_BACKEND_TYPE_GPU_SPLIT); GGML_ASSERT(src0->type == GGML_TYPE_F16); GGML_TENSOR_BINARY_OP_LOCALS @@ -10255,11 +10255,11 @@ static void ggml_cuda_mul_mat_batched_cublas(const ggml_tensor * src0, const ggm static void ggml_cuda_mul_mat(const ggml_tensor * src0, const ggml_tensor * src1, ggml_tensor * dst) { const bool all_on_device = - (src0->backend == GGML_BACKEND_GPU || src0->backend == GGML_BACKEND_GPU_SPLIT) && - (src1->backend == GGML_BACKEND_GPU) && - ( dst->backend == GGML_BACKEND_GPU); + (src0->backend == GGML_BACKEND_TYPE_GPU || src0->backend == GGML_BACKEND_TYPE_GPU_SPLIT) && + (src1->backend == GGML_BACKEND_TYPE_GPU) && + ( dst->backend == GGML_BACKEND_TYPE_GPU); - const bool split = src0->backend == GGML_BACKEND_GPU_SPLIT; + const bool split = src0->backend == GGML_BACKEND_TYPE_GPU_SPLIT; int64_t min_compute_capability = INT_MAX; @@ -10409,7 +10409,7 @@ static void ggml_cuda_mul_mat_id_cublas(ggml_tensor * dst) { GGML_ASSERT(!ggml_is_transposed(src00)); GGML_ASSERT(!ggml_is_transposed(src1)); - GGML_ASSERT(src00->backend != GGML_BACKEND_GPU_SPLIT); + GGML_ASSERT(src00->backend != GGML_BACKEND_TYPE_GPU_SPLIT); GGML_ASSERT(src1->type == GGML_TYPE_F32); const int64_t ne00 = src00->ne[0]; GGML_UNUSED(ne00); @@ -10553,7 +10553,7 @@ static void ggml_cuda_mul_mat_id(const ggml_tensor * src0, const ggml_tensor * s cudaStream_t stream = g_cudaStreams[g_main_device][0]; - if (ids->backend == GGML_BACKEND_GPU) { + if (ids->backend == GGML_BACKEND_TYPE_GPU) { const char * ids_dev = (const char *)((const ggml_tensor_extra_gpu *)ids->extra)->data_device[g_main_device]; CUDA_CHECK(cudaMemcpyAsync(ids_host.data(), ids_dev, ggml_nbytes(ids), cudaMemcpyDeviceToHost, stream)); CUDA_CHECK(cudaStreamSynchronize(stream)); @@ -10570,20 +10570,20 @@ static void ggml_cuda_mul_mat_id(const ggml_tensor * src0, const ggml_tensor * s ggml_tensor src1_row = *src1; ggml_tensor dst_row = *dst; - src1_row.backend = GGML_BACKEND_GPU; - dst_row.backend = GGML_BACKEND_GPU; + src1_row.backend = GGML_BACKEND_TYPE_GPU; + dst_row.backend = GGML_BACKEND_TYPE_GPU; src1_row.extra = &src1_row_extra; dst_row.extra = &dst_row_extra; - char * src1_original = src1->backend == GGML_BACKEND_CPU ? + char * src1_original = src1->backend == GGML_BACKEND_TYPE_CPU ? (char *) src1->data : (char *) src1_extra->data_device[g_main_device]; - char * dst_original = dst->backend == GGML_BACKEND_CPU ? + char * dst_original = dst->backend == GGML_BACKEND_TYPE_CPU ? (char *) dst->data : (char *) dst_extra->data_device[g_main_device]; if (src1->ne[1] == 1) { - GGML_ASSERT(src1->backend == GGML_BACKEND_GPU); - GGML_ASSERT(dst->backend == GGML_BACKEND_GPU); + GGML_ASSERT(src1->backend == GGML_BACKEND_TYPE_GPU); + GGML_ASSERT(dst->backend == GGML_BACKEND_TYPE_GPU); for (int64_t i01 = 0; i01 < ids->ne[1]; i01++) { //int32_t row_id; @@ -10611,9 +10611,9 @@ static void ggml_cuda_mul_mat_id(const ggml_tensor * src0, const ggml_tensor * s src1_row_extra.data_device[g_main_device] = src1_contiguous.get(); dst_row_extra.data_device[g_main_device] = dst_contiguous.get(); - const cudaMemcpyKind src1_kind = src1->backend == GGML_BACKEND_CPU ? + const cudaMemcpyKind src1_kind = src1->backend == GGML_BACKEND_TYPE_CPU ? cudaMemcpyHostToDevice : cudaMemcpyDeviceToDevice; - const cudaMemcpyKind dst_kind = dst->backend == GGML_BACKEND_CPU ? + const cudaMemcpyKind dst_kind = dst->backend == GGML_BACKEND_TYPE_CPU ? cudaMemcpyDeviceToHost : cudaMemcpyDeviceToDevice; for (int32_t row_id = 0; row_id < n_as; ++row_id) { @@ -10668,7 +10668,7 @@ static void ggml_cuda_mul_mat_id(const ggml_tensor * src0, const ggml_tensor * s } } - if (dst->backend == GGML_BACKEND_CPU) { + if (dst->backend == GGML_BACKEND_TYPE_CPU) { CUDA_CHECK(cudaStreamSynchronize(stream)); } } @@ -10685,8 +10685,8 @@ static void ggml_cuda_cpy(const ggml_tensor * src0, const ggml_tensor * src1, gg const int64_t ne = ggml_nelements(src0); GGML_ASSERT(ne == ggml_nelements(src1)); - GGML_ASSERT(src0->backend == GGML_BACKEND_GPU); - GGML_ASSERT(src1->backend == GGML_BACKEND_GPU); + GGML_ASSERT(src0->backend == GGML_BACKEND_TYPE_GPU); + GGML_ASSERT(src1->backend == GGML_BACKEND_TYPE_GPU); GGML_ASSERT(ggml_nbytes(src0) <= INT_MAX); GGML_ASSERT(ggml_nbytes(src1) <= INT_MAX); @@ -10817,9 +10817,9 @@ GGML_CALL bool ggml_cuda_compute_forward(struct ggml_compute_params * params, st if (!g_cublas_loaded) return false; ggml_cuda_func_t func; - const bool any_on_device = tensor->backend == GGML_BACKEND_GPU - || (tensor->src[0] != nullptr && (tensor->src[0]->backend == GGML_BACKEND_GPU || tensor->src[0]->backend == GGML_BACKEND_GPU_SPLIT)) - || (tensor->src[1] != nullptr && tensor->src[1]->backend == GGML_BACKEND_GPU); + const bool any_on_device = tensor->backend == GGML_BACKEND_TYPE_GPU + || (tensor->src[0] != nullptr && (tensor->src[0]->backend == GGML_BACKEND_TYPE_GPU || tensor->src[0]->backend == GGML_BACKEND_TYPE_GPU_SPLIT)) + || (tensor->src[1] != nullptr && tensor->src[1]->backend == GGML_BACKEND_TYPE_GPU); if (!any_on_device && tensor->op != GGML_OP_MUL_MAT && tensor->op != GGML_OP_MUL_MAT_ID) { return false; @@ -10966,14 +10966,14 @@ GGML_CALL bool ggml_cuda_compute_forward(struct ggml_compute_params * params, st return false; } - if (tensor->src[0] != nullptr && tensor->src[0]->backend == GGML_BACKEND_GPU_SPLIT) { + if (tensor->src[0] != nullptr && tensor->src[0]->backend == GGML_BACKEND_TYPE_GPU_SPLIT) { ggml_cuda_set_peer_access(tensor->src[1]->ne[1]); } if (params->ith != 0) { return true; } - if (params->type == GGML_TASK_INIT || params->type == GGML_TASK_FINALIZE) { + if (params->type == GGML_TASK_TYPE_INIT || params->type == GGML_TASK_TYPE_FINALIZE) { return true; } func(tensor->src[0], tensor->src[1], tensor); @@ -11072,7 +11072,7 @@ GGML_CALL static void ggml_backend_cuda_buffer_init_tensor(ggml_backend_buffer_t extra->data_device[ctx->device] = tensor->data; - tensor->backend = GGML_BACKEND_GPU; + tensor->backend = GGML_BACKEND_TYPE_GPU; tensor->extra = extra; if (ggml_is_quantized(tensor->type)) { @@ -11087,7 +11087,7 @@ GGML_CALL static void ggml_backend_cuda_buffer_init_tensor(ggml_backend_buffer_t } GGML_CALL static void ggml_backend_cuda_buffer_set_tensor(ggml_backend_buffer_t buffer, ggml_tensor * tensor, const void * data, size_t offset, size_t size) { - GGML_ASSERT(tensor->backend == GGML_BACKEND_GPU); + GGML_ASSERT(tensor->backend == GGML_BACKEND_TYPE_GPU); ggml_backend_cuda_buffer_context * ctx = (ggml_backend_cuda_buffer_context *)buffer->context; @@ -11098,7 +11098,7 @@ GGML_CALL static void ggml_backend_cuda_buffer_set_tensor(ggml_backend_buffer_t } GGML_CALL static void ggml_backend_cuda_buffer_get_tensor(ggml_backend_buffer_t buffer, const ggml_tensor * tensor, void * data, size_t offset, size_t size) { - GGML_ASSERT(tensor->backend == GGML_BACKEND_GPU); + GGML_ASSERT(tensor->backend == GGML_BACKEND_TYPE_GPU); ggml_backend_cuda_buffer_context * ctx = (ggml_backend_cuda_buffer_context *)buffer->context; @@ -11333,7 +11333,7 @@ GGML_CALL static void ggml_backend_cuda_split_buffer_init_tensor(ggml_backend_bu CUDA_CHECK(cudaEventCreateWithFlags(&extra->events[id][is], cudaEventDisableTiming)); } } - tensor->backend = GGML_BACKEND_GPU_SPLIT; + tensor->backend = GGML_BACKEND_TYPE_GPU_SPLIT; tensor->extra = extra; } @@ -11605,7 +11605,7 @@ GGML_CALL static void ggml_backend_cuda_set_tensor_async(ggml_backend_t backend, ggml_backend_cuda_context * cuda_ctx = (ggml_backend_cuda_context *)backend->context; GGML_ASSERT(tensor->buffer->buft == ggml_backend_cuda_buffer_type(cuda_ctx->device) && "unsupported buffer type"); - GGML_ASSERT(tensor->backend == GGML_BACKEND_GPU); + GGML_ASSERT(tensor->backend == GGML_BACKEND_TYPE_GPU); CUDA_CHECK(cudaMemcpyAsync((char *)tensor->data + offset, data, size, cudaMemcpyHostToDevice, g_cudaStreams[cuda_ctx->device][0])); } @@ -11614,7 +11614,7 @@ GGML_CALL static void ggml_backend_cuda_get_tensor_async(ggml_backend_t backend, ggml_backend_cuda_context * cuda_ctx = (ggml_backend_cuda_context *)backend->context; GGML_ASSERT(tensor->buffer->buft == ggml_backend_cuda_buffer_type(cuda_ctx->device) && "unsupported buffer type"); - GGML_ASSERT(tensor->backend == GGML_BACKEND_GPU); + GGML_ASSERT(tensor->backend == GGML_BACKEND_TYPE_GPU); CUDA_CHECK(cudaMemcpyAsync(data, (const char *)tensor->data + offset, size, cudaMemcpyDeviceToHost, g_cudaStreams[cuda_ctx->device][0])); } @@ -11644,7 +11644,7 @@ GGML_CALL static bool ggml_backend_cuda_graph_compute(ggml_backend_t backend, gg ggml_cuda_set_main_device(cuda_ctx->device); ggml_compute_params params = {}; - params.type = GGML_TASK_COMPUTE; + params.type = GGML_TASK_TYPE_COMPUTE; params.ith = 0; for (int i = 0; i < cgraph->n_nodes; i++) { ggml_tensor * node = cgraph->nodes[i]; @@ -11654,13 +11654,13 @@ GGML_CALL static bool ggml_backend_cuda_graph_compute(ggml_backend_t backend, gg } #ifndef NDEBUG - assert(node->backend == GGML_BACKEND_GPU || node->backend == GGML_BACKEND_GPU_SPLIT); + assert(node->backend == GGML_BACKEND_TYPE_GPU || node->backend == GGML_BACKEND_TYPE_GPU_SPLIT); assert(node->buffer->buft == ggml_backend_cuda_buffer_type(cuda_ctx->device)); assert(node->extra != nullptr); for (int j = 0; j < GGML_MAX_SRC; j++) { if (node->src[j] != nullptr) { - assert(node->src[j]->backend == GGML_BACKEND_GPU || node->src[j]->backend == GGML_BACKEND_GPU_SPLIT); + assert(node->src[j]->backend == GGML_BACKEND_TYPE_GPU || node->src[j]->backend == GGML_BACKEND_TYPE_GPU_SPLIT); assert(node->src[j]->buffer->buft == ggml_backend_cuda_buffer_type(cuda_ctx->device) || ggml_backend_buffer_is_cuda_split(node->src[j]->buffer)); assert(node->src[j]->extra != nullptr); } diff --git a/ggml-metal.m b/ggml-metal.m index ee584cfa7..3d6b01263 100644 --- a/ggml-metal.m +++ b/ggml-metal.m @@ -2262,8 +2262,8 @@ static bool ggml_metal_graph_compute( id pipeline = nil; switch (order) { - case GGML_SORT_ASC: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_ARGSORT_F32_I32_ASC].pipeline; break; - case GGML_SORT_DESC: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_ARGSORT_F32_I32_DESC].pipeline; break; + case GGML_SORT_ORDER_ASC: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_ARGSORT_F32_I32_ASC].pipeline; break; + case GGML_SORT_ORDER_DESC: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_ARGSORT_F32_I32_DESC].pipeline; break; default: GGML_ASSERT(false); }; diff --git a/ggml-opencl.cpp b/ggml-opencl.cpp index 797bee667..df619a884 100644 --- a/ggml-opencl.cpp +++ b/ggml-opencl.cpp @@ -1354,7 +1354,7 @@ static void ggml_cl_pool_free(cl_mem mem, size_t size) { } void ggml_cl_free_data(const struct ggml_tensor* tensor) { - if (tensor->backend != GGML_BACKEND_GPU) { + if (tensor->backend != GGML_BACKEND_TYPE_GPU) { return; } @@ -1412,7 +1412,7 @@ static cl_int ggml_cl_h2d_tensor_2d(cl_command_queue queue, cl_mem dst, size_t o } static void ggml_cl_mul_f32(const ggml_tensor * src0, const ggml_tensor * src1, ggml_tensor * dst) { - GGML_ASSERT(src1->backend == GGML_BACKEND_GPU); + GGML_ASSERT(src1->backend == GGML_BACKEND_TYPE_GPU); const int64_t ne00 = src0->ne[0]; const int64_t ne01 = src0->ne[1]; const int64_t ne02 = src0->ne[2]; @@ -1476,7 +1476,7 @@ void ggml_cl_mul(const struct ggml_tensor * src0, const struct ggml_tensor * src } static void ggml_cl_add_f32(const ggml_tensor * src0, const ggml_tensor * src1, ggml_tensor * dst) { - GGML_ASSERT(src1->backend == GGML_BACKEND_GPU); + GGML_ASSERT(src1->backend == GGML_BACKEND_TYPE_GPU); const int64_t ne00 = src0->ne[0]; const int64_t ne01 = src0->ne[1]; const int64_t ne02 = src0->ne[2]; @@ -1566,13 +1566,13 @@ static void ggml_cl_mul_mat_f32(const ggml_tensor * src0, const ggml_tensor * sr size_t y_size; size_t d_size; cl_mem d_X; - if (src0->backend == GGML_BACKEND_GPU) { // NOLINT + if (src0->backend == GGML_BACKEND_TYPE_GPU) { // NOLINT d_X = (cl_mem) src0->extra; } else { d_X = ggml_cl_pool_malloc(sizeof(float) * x_ne, &x_size); } - cl_mem d_Y = src1->backend == GGML_BACKEND_GPU ? (cl_mem) src1->extra : ggml_cl_pool_malloc(sizeof(float) * y_ne, &y_size); - cl_mem d_D = dst->backend == GGML_BACKEND_GPU ? (cl_mem) dst->extra : ggml_cl_pool_malloc(sizeof(float) * d_ne, &d_size); + cl_mem d_Y = src1->backend == GGML_BACKEND_TYPE_GPU ? (cl_mem) src1->extra : ggml_cl_pool_malloc(sizeof(float) * y_ne, &y_size); + cl_mem d_D = dst->backend == GGML_BACKEND_TYPE_GPU ? (cl_mem) dst->extra : ggml_cl_pool_malloc(sizeof(float) * d_ne, &d_size); size_t x_offset = 0; @@ -1580,7 +1580,7 @@ static void ggml_cl_mul_mat_f32(const ggml_tensor * src0, const ggml_tensor * sr // TODO: copy src0 here when r3>1 for (int64_t i13 = i03 * r3, e13 = i13 + r3; i13 < e13; i13++) { for (int64_t i02 = 0; i02 < ne02; i02++) { - if (src0->backend == GGML_BACKEND_GPU) { + if (src0->backend == GGML_BACKEND_TYPE_GPU) { x_offset = (i03 * ne02 + i02) * x_ne; } else { // copy src0 to device @@ -1589,7 +1589,7 @@ static void ggml_cl_mul_mat_f32(const ggml_tensor * src0, const ggml_tensor * sr for (int64_t i12 = i02 * r2, e12 = i12 + r2; i12 < e12; i12++) { // copy src1 to device - if (src1->backend == GGML_BACKEND_CPU) { + if (src1->backend == GGML_BACKEND_TYPE_CPU) { CL_CHECK(ggml_cl_h2d_tensor_2d(queue, d_Y, 0, src1, i13, i12, NULL)); } @@ -1612,7 +1612,7 @@ static void ggml_cl_mul_mat_f32(const ggml_tensor * src0, const ggml_tensor * sr } // copy dst to host - if (dst->backend == GGML_BACKEND_CPU) { + if (dst->backend == GGML_BACKEND_TYPE_CPU) { float * d = (float *) ((char *) dst->data + i12*nb2 + i13*nb3); CL_CHECK(clEnqueueReadBuffer(queue, d_D, true, 0, sizeof(float) * d_ne, d, 1, &ev_sgemm, NULL)); } @@ -1621,13 +1621,13 @@ static void ggml_cl_mul_mat_f32(const ggml_tensor * src0, const ggml_tensor * sr } } - if (src0->backend != GGML_BACKEND_GPU) { + if (src0->backend != GGML_BACKEND_TYPE_GPU) { ggml_cl_pool_free(d_X, x_size); } - if (src1->backend != GGML_BACKEND_GPU) { + if (src1->backend != GGML_BACKEND_TYPE_GPU) { ggml_cl_pool_free(d_Y, y_size); } - if (dst->backend != GGML_BACKEND_GPU) { + if (dst->backend != GGML_BACKEND_TYPE_GPU) { ggml_cl_pool_free(d_D, d_size); } } @@ -1670,7 +1670,7 @@ static void ggml_cl_mul_mat_f16(const ggml_tensor * src0, const ggml_tensor * sr size_t y_size; size_t d_size; cl_mem d_X; - if (src0->backend == GGML_BACKEND_GPU) { // NOLINT + if (src0->backend == GGML_BACKEND_TYPE_GPU) { // NOLINT d_X = (cl_mem) src0->extra; } else { d_X = ggml_cl_pool_malloc(sizeof(ggml_fp16_t) * x_ne, &x_size); @@ -1687,7 +1687,7 @@ static void ggml_cl_mul_mat_f16(const ggml_tensor * src0, const ggml_tensor * sr // TODO: copy src0 here when r3>1 for (int64_t i13 = i03 * r3, e13 = i13 + r3; i13 < e13; i13++) { for (int64_t i02 = 0; i02 < ne02; i02++) { - if (src0->backend == GGML_BACKEND_GPU) { + if (src0->backend == GGML_BACKEND_TYPE_GPU) { x_offset = (i03 * ne02 + i02) * x_ne; } else { // copy src0 to device @@ -1741,7 +1741,7 @@ static void ggml_cl_mul_mat_f16(const ggml_tensor * src0, const ggml_tensor * sr } // copy dst to host, then convert to float - if (dst->backend == GGML_BACKEND_CPU) { + if (dst->backend == GGML_BACKEND_TYPE_CPU) { CL_CHECK(clEnqueueReadBuffer(queue, d_D, true, 0, sizeof(ggml_fp16_t) * d_ne, tmp, 1, &ev_sgemm, NULL)); float * d = (float *) ((char *) dst->data + i12*nb2 + i13*nb3); ggml_fp16_to_fp32_row(tmp, d, d_ne); @@ -1753,7 +1753,7 @@ static void ggml_cl_mul_mat_f16(const ggml_tensor * src0, const ggml_tensor * sr } } - if (src0->backend != GGML_BACKEND_GPU) { + if (src0->backend != GGML_BACKEND_TYPE_GPU) { ggml_cl_pool_free(d_X, x_size); } ggml_cl_pool_free(d_Y, y_size); @@ -1798,7 +1798,7 @@ static void ggml_cl_mul_mat_q_f32(const ggml_tensor * src0, const ggml_tensor * cl_mem d_Y = ggml_cl_pool_malloc(sizeof(float) * y_ne, &y_size); cl_mem d_D = ggml_cl_pool_malloc(sizeof(float) * d_ne, &d_size); cl_mem d_Q; - if (src0->backend == GGML_BACKEND_CPU) { + if (src0->backend == GGML_BACKEND_TYPE_CPU) { d_Q = ggml_cl_pool_malloc(q_sz, &q_size); } @@ -1817,10 +1817,10 @@ static void ggml_cl_mul_mat_q_f32(const ggml_tensor * src0, const ggml_tensor * for (int64_t i13 = i03 * r3, e13 = i13 + r3; i13 < e13; i13++) { for (int64_t i02 = 0; i02 < ne02; i02++) { // copy src0 to device if necessary - if (src0->backend == GGML_BACKEND_CPU) { + if (src0->backend == GGML_BACKEND_TYPE_CPU) { events.emplace_back(); CL_CHECK(ggml_cl_h2d_tensor_2d(queue, d_Q, 0, src0, i03, i02, events.data() + ev_idx++)); - } else if (src0->backend == GGML_BACKEND_GPU) { + } else if (src0->backend == GGML_BACKEND_TYPE_GPU) { d_Q = (cl_mem) src0->extra; } else { GGML_ASSERT(false); @@ -1829,7 +1829,7 @@ static void ggml_cl_mul_mat_q_f32(const ggml_tensor * src0, const ggml_tensor * if (!mul_mat_vec) { // convert src0 to fp32 on device const size_t global = x_ne / global_denom; - const size_t offset = src0->backend == GGML_BACKEND_GPU ? (i03 * ne02 + i02) * x_bps : 0; + const size_t offset = src0->backend == GGML_BACKEND_TYPE_GPU ? (i03 * ne02 + i02) * x_bps : 0; CL_CHECK(clSetKernelArg(*to_fp32_cl, 0, sizeof(cl_mem), &d_Q)); CL_CHECK(clSetKernelArg(*to_fp32_cl, 1, sizeof(cl_mem), &d_X)); CL_CHECK(clEnqueueNDRangeKernel(queue, *to_fp32_cl, 1, &offset, &global, local > 0 ? &local : NULL, events.size(), !events.empty() ? events.data() : NULL, NULL)); @@ -1843,7 +1843,7 @@ static void ggml_cl_mul_mat_q_f32(const ggml_tensor * src0, const ggml_tensor * // compute const size_t global = ne01 * local; - const size_t offset = src0->backend == GGML_BACKEND_GPU ? (i03 * ne02 + i02) * x_bps : 0; + const size_t offset = src0->backend == GGML_BACKEND_TYPE_GPU ? (i03 * ne02 + i02) * x_bps : 0; const cl_int ncols = ne00; events.emplace_back(); CL_CHECK(clSetKernelArg(*dmmv, 0, sizeof(cl_mem), &d_Q)); @@ -1895,7 +1895,7 @@ static void ggml_cl_mul_mat_q_f32(const ggml_tensor * src0, const ggml_tensor * } ggml_cl_pool_free(d_Y, y_size); ggml_cl_pool_free(d_D, d_size); - if (src0->backend == GGML_BACKEND_CPU) { + if (src0->backend == GGML_BACKEND_TYPE_CPU) { ggml_cl_pool_free(d_Q, q_size); } } @@ -1911,7 +1911,7 @@ bool ggml_cl_can_mul_mat(const struct ggml_tensor * src0, const struct ggml_tens if ((src0->type == GGML_TYPE_F32 || src0->type == GGML_TYPE_F16 || ggml_is_quantized(src0->type)) && src1->type == GGML_TYPE_F32 && dst->type == GGML_TYPE_F32 && - ((ne0 >= 32 && ne1 >= 32 && ne10 >= 32) || src0->backend == GGML_BACKEND_GPU)) { + ((ne0 >= 32 && ne1 >= 32 && ne10 >= 32) || src0->backend == GGML_BACKEND_TYPE_GPU)) { return true; } @@ -1993,7 +1993,7 @@ void ggml_cl_transform_tensor(void * data, ggml_tensor * tensor) { CL_CHECK(clFinish(queue)); tensor->extra = dst; - GGML_ASSERT(tensor->backend == GGML_BACKEND_GPU); + GGML_ASSERT(tensor->backend == GGML_BACKEND_TYPE_GPU); } // ggml-backend @@ -2045,7 +2045,7 @@ static void ggml_backend_opencl_buffer_init_tensor(ggml_backend_buffer_t buffer, ctx->sub_buffers.push_back(sub_buffer); tensor->extra = sub_buffer; } - tensor->backend = GGML_BACKEND_GPU; + tensor->backend = GGML_BACKEND_TYPE_GPU; } static void ggml_backend_opencl_buffer_set_tensor(ggml_backend_buffer_t buffer, ggml_tensor * tensor, const void * data, size_t offset, size_t size) { diff --git a/ggml-sycl.cpp b/ggml-sycl.cpp index b897828f9..c6c3c6e6f 100644 --- a/ggml-sycl.cpp +++ b/ggml-sycl.cpp @@ -3338,7 +3338,7 @@ void print_ggml_tensor(const char*name, struct ggml_tensor *src){ size_t total_elements = ggml_nelements(src); - const bool src_on_device = src->backend == GGML_BACKEND_GPU || src->backend == GGML_BACKEND_GPU_SPLIT; + const bool src_on_device = src->backend == GGML_BACKEND_TYPE_GPU || src->backend == GGML_BACKEND_TYPE_GPU_SPLIT; float *src_data =NULL; if(src_on_device) { ggml_tensor_extra_gpu * src_extra = (ggml_tensor_extra_gpu *) src->extra; @@ -8086,11 +8086,11 @@ static void k_argsort_f32_i32(const float * x, int * dst, const int ncols, int ixj = col ^ j; if (ixj > col) { if ((col & k) == 0) { - if (order == GGML_SORT_ASC ? x_row[dst_row[col]] > x_row[dst_row[ixj]] : x_row[dst_row[col]] < x_row[dst_row[ixj]]) { + if (order == GGML_SORT_ORDER_ASC ? x_row[dst_row[col]] > x_row[dst_row[ixj]] : x_row[dst_row[col]] < x_row[dst_row[ixj]]) { swap(dst_row[col], dst_row[ixj]); } } else { - if (order == GGML_SORT_ASC ? x_row[dst_row[col]] < x_row[dst_row[ixj]] : x_row[dst_row[col]] > x_row[dst_row[ixj]]) { + if (order == GGML_SORT_ORDER_ASC ? x_row[dst_row[col]] < x_row[dst_row[ixj]] : x_row[dst_row[col]] > x_row[dst_row[ixj]]) { swap(dst_row[col], dst_row[ixj]); } } @@ -10825,7 +10825,7 @@ static void argsort_f32_i32_sycl(const float *x, int *dst, const int ncols, const sycl::range<3> block_dims(1, 1, ncols); const sycl::range<3> block_nums(1, nrows, 1); - if (order == GGML_SORT_ASC) { + if (order == GGML_SORT_ORDER_ASC) { /* DPCT1049:44: The work-group size passed to the SYCL kernel may exceed the limit. To get the device limit, query @@ -10834,9 +10834,9 @@ static void argsort_f32_i32_sycl(const float *x, int *dst, const int ncols, stream->parallel_for( sycl::nd_range<3>(block_nums * block_dims, block_dims), [=](sycl::nd_item<3> item_ct1) { - k_argsort_f32_i32(x, dst, ncols, item_ct1); + k_argsort_f32_i32(x, dst, ncols, item_ct1); }); - } else if (order == GGML_SORT_DESC) { + } else if (order == GGML_SORT_ORDER_DESC) { /* DPCT1049:45: The work-group size passed to the SYCL kernel may exceed the limit. To get the device limit, query @@ -10845,7 +10845,7 @@ static void argsort_f32_i32_sycl(const float *x, int *dst, const int ncols, stream->parallel_for( sycl::nd_range<3>(block_nums * block_dims, block_dims), [=](sycl::nd_item<3> item_ct1) { - k_argsort_f32_i32(x, dst, ncols, item_ct1); + k_argsort_f32_i32(x, dst, ncols, item_ct1); }); } else { GGML_ASSERT(false); @@ -11407,12 +11407,12 @@ static dpct::err0 ggml_sycl_cpy_tensor_2d(void *dst, dpct::memcpy_direction kind; char * src_ptr; - if (src->backend == GGML_BACKEND_CPU) { + if (src->backend == GGML_BACKEND_TYPE_CPU) { kind = dpct::host_to_device; src_ptr = (char *) src->data; - // GGML_SYCL_DEBUG("ggml_sycl_cpy_tensor_2d GGML_BACKEND_CPU src_ptr %p\n", src_ptr); - } else if (src->backend == GGML_BACKEND_GPU || src->backend == GGML_BACKEND_GPU_SPLIT) { - GGML_ASSERT(src->backend != GGML_BACKEND_GPU_SPLIT || (i1_low == 0 && i1_high == src->ne[1])); + // GGML_SYCL_DEBUG("ggml_sycl_cpy_tensor_2d GGML_BACKEND_TYPE_CPU src_ptr %p\n", src_ptr); + } else if (src->backend == GGML_BACKEND_TYPE_GPU || src->backend == GGML_BACKEND_TYPE_GPU_SPLIT) { + GGML_ASSERT(src->backend != GGML_BACKEND_TYPE_GPU_SPLIT || (i1_low == 0 && i1_high == src->ne[1])); kind = dpct::device_to_device; ggml_tensor_extra_gpu * extra = (ggml_tensor_extra_gpu *) src->extra; int id; @@ -11846,7 +11846,7 @@ inline void ggml_sycl_op_mul_mat_q( // the main device has a larger memory buffer to hold the results from all GPUs // nrows_dst == nrows of the matrix that the dequantize_mul_mat kernel writes into - const int64_t nrows_dst = dst->backend == GGML_BACKEND_GPU && device_id == g_main_device ? ne0 : row_diff; + const int64_t nrows_dst = dst->backend == GGML_BACKEND_TYPE_GPU && device_id == g_main_device ? ne0 : row_diff; switch (src0->type) { case GGML_TYPE_Q4_0: @@ -12119,7 +12119,7 @@ inline void ggml_sycl_op_mul_mat_sycl( // the main device has a larger memory buffer to hold the results from all GPUs // ldc == nrows of the matrix that cuBLAS writes into - int ldc = dst->backend == GGML_BACKEND_GPU && device_id == g_main_device ? ne0 : row_diff; + int ldc = dst->backend == GGML_BACKEND_TYPE_GPU && device_id == g_main_device ? ne0 : row_diff; #ifdef GGML_SYCL_F16 bool use_fp16 = true; // TODO(Yu) SYCL capability check @@ -12501,16 +12501,16 @@ static void ggml_sycl_op_flatten(const ggml_tensor *src0, const bool use_src1 = src1 != nullptr; const int64_t nrows1 = use_src1 ? ggml_nrows(src1) : 1; - GGML_ASSERT(!use_src1 || src1->backend != GGML_BACKEND_GPU_SPLIT); - GGML_ASSERT( dst->backend != GGML_BACKEND_GPU_SPLIT); + GGML_ASSERT(!use_src1 || src1->backend != GGML_BACKEND_TYPE_GPU_SPLIT); + GGML_ASSERT( dst->backend != GGML_BACKEND_TYPE_GPU_SPLIT); ggml_tensor_extra_gpu * src0_extra = (ggml_tensor_extra_gpu *) src0->extra; ggml_tensor_extra_gpu * src1_extra = use_src1 ? (ggml_tensor_extra_gpu *) src1->extra : nullptr; ggml_tensor_extra_gpu * dst_extra = (ggml_tensor_extra_gpu *) dst->extra; - const bool src0_on_device = src0->backend == GGML_BACKEND_GPU || src0->backend == GGML_BACKEND_GPU_SPLIT; - const bool src1_on_device = use_src1 && src1->backend == GGML_BACKEND_GPU; - const bool dst_on_device = dst->backend == GGML_BACKEND_GPU; + const bool src0_on_device = src0->backend == GGML_BACKEND_TYPE_GPU || src0->backend == GGML_BACKEND_TYPE_GPU_SPLIT; + const bool src1_on_device = use_src1 && src1->backend == GGML_BACKEND_TYPE_GPU; + const bool dst_on_device = dst->backend == GGML_BACKEND_TYPE_GPU; // dd = data device float * src0_ddf = nullptr; @@ -12565,7 +12565,7 @@ static void ggml_sycl_op_flatten(const ggml_tensor *src0, main_stream->memcpy(dst->data, dst_ddf, ggml_nbytes(dst)))); } - if (dst->backend == GGML_BACKEND_CPU) { + if (dst->backend == GGML_BACKEND_TYPE_CPU) { SYCL_CHECK(CHECK_TRY_ERROR( dpct::get_current_device().queues_wait_and_throw())); } @@ -12640,8 +12640,8 @@ static void ggml_sycl_op_mul_mat(const ggml_tensor *src0, const int nb2 = dst->nb[2]; const int nb3 = dst->nb[3]; - GGML_ASSERT(dst->backend != GGML_BACKEND_GPU_SPLIT); - GGML_ASSERT(src1->backend != GGML_BACKEND_GPU_SPLIT); + GGML_ASSERT(dst->backend != GGML_BACKEND_TYPE_GPU_SPLIT); + GGML_ASSERT(src1->backend != GGML_BACKEND_TYPE_GPU_SPLIT); GGML_ASSERT(ne12 >= ne02 && ne12 % ne02 == 0); @@ -12656,13 +12656,13 @@ static void ggml_sycl_op_mul_mat(const ggml_tensor *src0, ggml_tensor_extra_gpu * src1_extra = (ggml_tensor_extra_gpu *) src1->extra; ggml_tensor_extra_gpu * dst_extra = (ggml_tensor_extra_gpu *) dst->extra; - const bool src0_on_device = src0->backend == GGML_BACKEND_GPU || src0->backend == GGML_BACKEND_GPU_SPLIT; + const bool src0_on_device = src0->backend == GGML_BACKEND_TYPE_GPU || src0->backend == GGML_BACKEND_TYPE_GPU_SPLIT; const bool src0_is_contiguous = ggml_is_contiguous(src0); const bool src1_is_contiguous = ggml_is_contiguous(src1); int64_t src1_padded_col_size = GGML_PAD(ne10, MATRIX_ROW_PADDING); - const bool split = src0->backend == GGML_BACKEND_GPU_SPLIT; + const bool split = src0->backend == GGML_BACKEND_TYPE_GPU_SPLIT; GGML_ASSERT(!(split && ne02 > 1)); GGML_ASSERT(!(split && ne03 > 1)); GGML_ASSERT(!(split && ne02 < ne12)); @@ -12717,8 +12717,8 @@ static void ggml_sycl_op_mul_mat(const ggml_tensor *src0, used_devices++; - const bool src1_on_device = src1->backend == GGML_BACKEND_GPU && id == g_main_device_index; - const bool dst_on_device = dst->backend == GGML_BACKEND_GPU && id == g_main_device_index; + const bool src1_on_device = src1->backend == GGML_BACKEND_TYPE_GPU && id == g_main_device_index; + const bool dst_on_device = dst->backend == GGML_BACKEND_TYPE_GPU && id == g_main_device_index; ggml_sycl_set_device(get_device_id_by_index(id)); const dpct::queue_ptr stream = g_syclStreams[id][0]; @@ -12782,8 +12782,8 @@ static void ggml_sycl_op_mul_mat(const ggml_tensor *src0, continue; } - const bool src1_on_device = src1->backend == GGML_BACKEND_GPU && id == g_main_device_index; - const bool dst_on_device = dst->backend == GGML_BACKEND_GPU && id == g_main_device_index; + const bool src1_on_device = src1->backend == GGML_BACKEND_TYPE_GPU && id == g_main_device_index; + const bool dst_on_device = dst->backend == GGML_BACKEND_TYPE_GPU && id == g_main_device_index; const int64_t row_diff = row_high[id] - row_low[id]; ggml_sycl_set_device(get_device_id_by_index(id)); @@ -12809,12 +12809,12 @@ static void ggml_sycl_op_mul_mat(const ggml_tensor *src0, // the main device memory buffer can be on VRAM scratch, with space for all partial results // in that case an offset on dst_ddf_i is needed - if (dst->backend == GGML_BACKEND_GPU && id == g_main_device_index) { + if (dst->backend == GGML_BACKEND_TYPE_GPU && id == g_main_device_index) { dst_dd_i += row_low[id]; // offset is 0 if no tensor split } // copy src0, src1 to device if necessary - if (src1->backend == GGML_BACKEND_GPU && src1_is_contiguous) { + if (src1->backend == GGML_BACKEND_TYPE_GPU && src1_is_contiguous) { if (id != g_main_device_index) { if (convert_src1_to_q8_1) { char * src1_ddq_i_source = src1_ddq[g_main_device_index] + src1_ddq_i_offset; @@ -12830,14 +12830,14 @@ static void ggml_sycl_op_mul_mat(const ggml_tensor *src0, src1_ncols * ne10 * sizeof(float)))); } } - } else if (src1->backend == GGML_BACKEND_CPU || (src1_on_device && !src1_is_contiguous)) { + } else if (src1->backend == GGML_BACKEND_TYPE_CPU || (src1_on_device && !src1_is_contiguous)) { SYCL_CHECK(ggml_sycl_cpy_tensor_2d( src1_ddf_i, src1, i03, i02, src1_col_0, src1_col_0+src1_ncols, stream)); } else { GGML_ASSERT(false); } - if (convert_src1_to_q8_1 && (src1->backend == GGML_BACKEND_CPU || !src1_is_contiguous)) { + if (convert_src1_to_q8_1 && (src1->backend == GGML_BACKEND_TYPE_CPU || !src1_is_contiguous)) { quantize_row_q8_1_sycl(src1_ddf_i, src1_ddq_i, ne10, src1_ncols, src1_padded_col_size, stream); /* DPCT1010:92: SYCL uses exceptions to report errors and does @@ -12867,10 +12867,10 @@ static void ggml_sycl_op_mul_mat(const ggml_tensor *src0, if (!dst_on_device) { void * dst_off_device; dpct::memcpy_direction kind; - if (dst->backend == GGML_BACKEND_CPU) { + if (dst->backend == GGML_BACKEND_TYPE_CPU) { dst_off_device = dst->data; kind = dpct::device_to_host; - } else if (dst->backend == GGML_BACKEND_GPU) { + } else if (dst->backend == GGML_BACKEND_TYPE_GPU) { dst_off_device = dst_extra->data_device[g_main_device_index]; kind = dpct::device_to_device; } else { @@ -12954,7 +12954,7 @@ static void ggml_sycl_op_mul_mat(const ggml_tensor *src0, } } - if (dst->backend == GGML_BACKEND_CPU) { + if (dst->backend == GGML_BACKEND_TYPE_CPU) { SYCL_CHECK(ggml_sycl_set_device(g_main_device)); SYCL_CHECK(CHECK_TRY_ERROR( dpct::get_current_device().queues_wait_and_throw())); @@ -13091,7 +13091,7 @@ static void ggml_sycl_mul_mat_vec_p021(const ggml_tensor *src0, const ggml_tensor *src1, ggml_tensor *dst) try { GGML_ASSERT(ggml_is_permuted(src0) && ggml_is_permuted(src1)); - GGML_ASSERT(src0->backend != GGML_BACKEND_GPU_SPLIT); + GGML_ASSERT(src0->backend != GGML_BACKEND_TYPE_GPU_SPLIT); GGML_ASSERT(src0->nb[0] <= src0->nb[1] && src0->nb[2] <= src0->nb[3]); // 0213 permutation GGML_ASSERT(src1->nb[0] <= src1->nb[1] && src1->nb[2] <= src1->nb[3]); // 0213 permutation GGML_ASSERT(src0->type == GGML_TYPE_F16); @@ -13129,7 +13129,7 @@ static void ggml_sycl_mul_mat_vec_nc(const ggml_tensor *src0, GGML_ASSERT(!ggml_is_transposed(src0)); GGML_ASSERT(!ggml_is_transposed(src1)); GGML_ASSERT(!ggml_is_permuted(src0)); - GGML_ASSERT(src0->backend != GGML_BACKEND_GPU_SPLIT); + GGML_ASSERT(src0->backend != GGML_BACKEND_TYPE_GPU_SPLIT); GGML_ASSERT(src0->type == GGML_TYPE_F16); GGML_ASSERT(src1->type == GGML_TYPE_F32); @@ -13196,7 +13196,7 @@ static void ggml_sycl_mul_mat_mat_batched_sycl(const ggml_tensor *src0, GGML_ASSERT(!ggml_is_transposed(src0)); GGML_ASSERT(!ggml_is_transposed(src1)); - GGML_ASSERT(src0->backend != GGML_BACKEND_GPU_SPLIT); + GGML_ASSERT(src0->backend != GGML_BACKEND_TYPE_GPU_SPLIT); GGML_ASSERT(src0->type == GGML_TYPE_F16); GGML_ASSERT(src1->type == GGML_TYPE_F32); @@ -13372,11 +13372,11 @@ catch (sycl::exception const &exc) { static void ggml_sycl_mul_mat(const ggml_tensor * src0, const ggml_tensor * src1, ggml_tensor * dst) { const bool all_on_device = - (src0->backend == GGML_BACKEND_GPU || src0->backend == GGML_BACKEND_GPU_SPLIT) && - (src1->backend == GGML_BACKEND_GPU) && - ( dst->backend == GGML_BACKEND_GPU); + (src0->backend == GGML_BACKEND_TYPE_GPU || src0->backend == GGML_BACKEND_TYPE_GPU_SPLIT) && + (src1->backend == GGML_BACKEND_TYPE_GPU) && + ( dst->backend == GGML_BACKEND_TYPE_GPU); - const bool split = src0->backend == GGML_BACKEND_GPU_SPLIT; + const bool split = src0->backend == GGML_BACKEND_TYPE_GPU_SPLIT; int64_t min_compute_capability = INT_MAX; for (int64_t id = 0; id < g_device_count; ++id) { @@ -13505,7 +13505,7 @@ static void ggml_sycl_mul_mat_id_sycl(ggml_tensor * dst) { GGML_ASSERT(!ggml_is_transposed(src00)); GGML_ASSERT(!ggml_is_transposed(src1)); - GGML_ASSERT(src00->backend != GGML_BACKEND_GPU_SPLIT); + GGML_ASSERT(src00->backend != GGML_BACKEND_TYPE_GPU_SPLIT); GGML_ASSERT(src1->type == GGML_TYPE_F32); GGML_TENSOR_LOCALS(int64_t, ne0, src00, ne); @@ -13643,7 +13643,7 @@ static void ggml_sycl_mul_mat_id(const ggml_tensor *src0, const dpct::queue_ptr stream = g_syclStreams[g_main_device_index][0]; - if (ids->backend == GGML_BACKEND_GPU) { + if (ids->backend == GGML_BACKEND_TYPE_GPU) { const char * ids_dev = (const char *)((const ggml_tensor_extra_gpu *)ids->extra)->data_device[g_main_device_index]; SYCL_CHECK(CHECK_TRY_ERROR( stream->memcpy(ids_host.data(), ids_dev, ggml_nbytes(ids)))); @@ -13661,20 +13661,20 @@ static void ggml_sycl_mul_mat_id(const ggml_tensor *src0, ggml_tensor src1_row = *src1; ggml_tensor dst_row = *dst; - src1_row.backend = GGML_BACKEND_GPU; - dst_row.backend = GGML_BACKEND_GPU; + src1_row.backend = GGML_BACKEND_TYPE_GPU; + dst_row.backend = GGML_BACKEND_TYPE_GPU; src1_row.extra = &src1_row_extra; dst_row.extra = &dst_row_extra; - char * src1_original = src1->backend == GGML_BACKEND_CPU ? + char * src1_original = src1->backend == GGML_BACKEND_TYPE_CPU ? (char *) src1->data : (char *) src1_extra->data_device[g_main_device_index]; - char * dst_original = dst->backend == GGML_BACKEND_CPU ? + char * dst_original = dst->backend == GGML_BACKEND_TYPE_CPU ? (char *) dst->data : (char *) dst_extra->data_device[g_main_device_index]; if (src1->ne[1] == 1) { - GGML_ASSERT(src1->backend == GGML_BACKEND_GPU); - GGML_ASSERT(dst->backend == GGML_BACKEND_GPU); + GGML_ASSERT(src1->backend == GGML_BACKEND_TYPE_GPU); + GGML_ASSERT(dst->backend == GGML_BACKEND_TYPE_GPU); for (int64_t i01 = 0; i01 < ids->ne[1]; i01++) { //int32_t row_id; @@ -13756,7 +13756,7 @@ static void ggml_sycl_mul_mat_id(const ggml_tensor *src0, } } - if (dst->backend == GGML_BACKEND_CPU) { + if (dst->backend == GGML_BACKEND_TYPE_CPU) { SYCL_CHECK(CHECK_TRY_ERROR(stream->wait())); } } @@ -13779,8 +13779,8 @@ static void ggml_sycl_cpy(const ggml_tensor *src0, const ggml_tensor *src1, const int64_t ne = ggml_nelements(src0); GGML_ASSERT(ne == ggml_nelements(src1)); - GGML_ASSERT(src0->backend == GGML_BACKEND_GPU); - GGML_ASSERT(src1->backend == GGML_BACKEND_GPU); + GGML_ASSERT(src0->backend == GGML_BACKEND_TYPE_GPU); + GGML_ASSERT(src1->backend == GGML_BACKEND_TYPE_GPU); GGML_ASSERT(ggml_nbytes(src0) <= INT_MAX); GGML_ASSERT(ggml_nbytes(src1) <= INT_MAX); @@ -13887,17 +13887,17 @@ void ggml_sycl_transform_tensor(void *data, struct ggml_tensor *tensor) try { memset(extra, 0, sizeof(*extra)); for (int64_t id = 0; id < g_device_count; ++id) { - if (backend == GGML_BACKEND_GPU && id != g_main_device_index) { + if (backend == GGML_BACKEND_TYPE_GPU && id != g_main_device_index) { continue; } ggml_sycl_set_device(get_device_id_by_index(id)); const dpct::queue_ptr stream = g_syclStreams[id][0]; int64_t row_low, row_high; - if (backend == GGML_BACKEND_GPU) { + if (backend == GGML_BACKEND_TYPE_GPU) { row_low = 0; row_high = nrows; - } else if (backend == GGML_BACKEND_GPU_SPLIT) { + } else if (backend == GGML_BACKEND_TYPE_GPU_SPLIT) { const int64_t rounding = get_row_rounding(tensor->type); row_low = id == 0 ? 0 : nrows*g_tensor_split[id]; @@ -13946,7 +13946,7 @@ void ggml_sycl_transform_tensor(void *data, struct ggml_tensor *tensor) try { extra->data_device[id] = buf; - if (backend == GGML_BACKEND_GPU_SPLIT) { + if (backend == GGML_BACKEND_TYPE_GPU_SPLIT) { for (int64_t is = 0; is < MAX_STREAMS; ++is) { SYCL_CHECK(CHECK_TRY_ERROR(extra->events[id][is] = new sycl::event())); @@ -13963,7 +13963,7 @@ catch (sycl::exception const &exc) { } void ggml_sycl_free_data(struct ggml_tensor *tensor) try { - if (!tensor || !tensor->extra || (tensor->backend != GGML_BACKEND_GPU && tensor->backend != GGML_BACKEND_GPU_SPLIT) ) { + if (!tensor || !tensor->extra || (tensor->backend != GGML_BACKEND_TYPE_GPU && tensor->backend != GGML_BACKEND_TYPE_GPU_SPLIT) ) { return; } @@ -14016,15 +14016,15 @@ static void ggml_sycl_assign_buffers_impl(struct ggml_tensor *tensor, return; } - tensor->backend = GGML_BACKEND_GPU; + tensor->backend = GGML_BACKEND_TYPE_GPU; - if (tensor->src[0] != nullptr && tensor->src[0]->backend == GGML_BACKEND_CPU) { + if (tensor->src[0] != nullptr && tensor->src[0]->backend == GGML_BACKEND_TYPE_CPU) { const ggml_op src0_op = tensor->src[0]->op; if (src0_op == GGML_OP_RESHAPE || src0_op == GGML_OP_TRANSPOSE || src0_op == GGML_OP_VIEW || src0_op == GGML_OP_PERMUTE) { ggml_sycl_assign_buffers_impl(tensor->src[0], scratch, force_inplace, no_alloc); } } - if (tensor->op == GGML_OP_CPY && tensor->src[1]->backend == GGML_BACKEND_CPU) { + if (tensor->op == GGML_OP_CPY && tensor->src[1]->backend == GGML_BACKEND_TYPE_CPU) { ggml_sycl_assign_buffers_impl(tensor->src[1], scratch, force_inplace, no_alloc); } @@ -14042,7 +14042,7 @@ static void ggml_sycl_assign_buffers_impl(struct ggml_tensor *tensor, SYCL_CHECK(ggml_sycl_set_device(g_main_device)); const dpct::queue_ptr stream = g_syclStreams[g_main_device_index][0]; - if (inplace && (tensor->src[0]->backend == GGML_BACKEND_GPU || tensor->src[0]->backend == GGML_BACKEND_GPU_SPLIT)) { + if (inplace && (tensor->src[0]->backend == GGML_BACKEND_TYPE_GPU || tensor->src[0]->backend == GGML_BACKEND_TYPE_GPU_SPLIT)) { ggml_tensor_extra_gpu * src0_extra = (ggml_tensor_extra_gpu * ) tensor->src[0]->extra; char * src0_ddc = (char *) src0_extra->data_device[g_main_device_index]; size_t offset = 0; @@ -14111,7 +14111,7 @@ void ggml_sycl_assign_scratch_offset(struct ggml_tensor *tensor, const bool inplace = tensor->view_src != nullptr; - if (inplace && (tensor->view_src->backend == GGML_BACKEND_GPU || tensor->view_src->backend == GGML_BACKEND_GPU_SPLIT)) { + if (inplace && (tensor->view_src->backend == GGML_BACKEND_TYPE_GPU || tensor->view_src->backend == GGML_BACKEND_TYPE_GPU_SPLIT)) { ggml_tensor_extra_gpu * src0_extra = (ggml_tensor_extra_gpu * ) tensor->view_src->extra; char * src0_ddc = (char *) src0_extra->data_device[g_main_device_index]; size_t view_offset = 0; @@ -14132,7 +14132,7 @@ catch (sycl::exception const &exc) { } void ggml_sycl_copy_to_device(struct ggml_tensor *tensor) try { - GGML_ASSERT(tensor->backend == GGML_BACKEND_GPU); + GGML_ASSERT(tensor->backend == GGML_BACKEND_TYPE_GPU); GGML_ASSERT(ggml_is_contiguous(tensor)); ggml_tensor_extra_gpu * extra = (ggml_tensor_extra_gpu *) tensor->extra; @@ -14219,9 +14219,9 @@ bool ggml_sycl_compute_forward(struct ggml_compute_params * params, struct ggml_ if (!g_sycl_loaded) return false; ggml_sycl_func_t func; - const bool any_on_device = tensor->backend == GGML_BACKEND_GPU - || (tensor->src[0] != nullptr && (tensor->src[0]->backend == GGML_BACKEND_GPU || tensor->src[0]->backend == GGML_BACKEND_GPU_SPLIT)) - || (tensor->src[1] != nullptr && tensor->src[1]->backend == GGML_BACKEND_GPU); + const bool any_on_device = tensor->backend == GGML_BACKEND_TYPE_GPU + || (tensor->src[0] != nullptr && (tensor->src[0]->backend == GGML_BACKEND_TYPE_GPU || tensor->src[0]->backend == GGML_BACKEND_TYPE_GPU_SPLIT)) + || (tensor->src[1] != nullptr && tensor->src[1]->backend == GGML_BACKEND_TYPE_GPU); if (!any_on_device && tensor->op != GGML_OP_MUL_MAT && tensor->op != GGML_OP_MUL_MAT_ID) { return false; @@ -14359,14 +14359,14 @@ bool ggml_sycl_compute_forward(struct ggml_compute_params * params, struct ggml_ return false; } - if (tensor->src[0] != nullptr && tensor->src[0]->backend == GGML_BACKEND_GPU_SPLIT) { + if (tensor->src[0] != nullptr && tensor->src[0]->backend == GGML_BACKEND_TYPE_GPU_SPLIT) { ggml_sycl_set_peer_access(tensor->src[1]->ne[1]); } if (params->ith != 0) { return true; } - if (params->type == GGML_TASK_INIT || params->type == GGML_TASK_FINALIZE) { + if (params->type == GGML_TASK_TYPE_INIT || params->type == GGML_TASK_TYPE_FINALIZE) { return true; } func(tensor->src[0], tensor->src[1], tensor); @@ -14517,7 +14517,7 @@ static void ggml_backend_sycl_buffer_init_tensor(ggml_backend_buffer_t buffer, extra->data_device[ctx->device] = tensor->data; - tensor->backend = GGML_BACKEND_GPU; + tensor->backend = GGML_BACKEND_TYPE_GPU; tensor->extra = extra; if (ggml_is_quantized(tensor->type)) { @@ -14548,7 +14548,7 @@ static void ggml_backend_sycl_buffer_set_tensor(ggml_backend_buffer_t buffer, ggml_tensor *tensor, const void *data, size_t offset, size_t size) try { - GGML_ASSERT(tensor->backend == GGML_BACKEND_GPU); + GGML_ASSERT(tensor->backend == GGML_BACKEND_TYPE_GPU); ggml_backend_sycl_buffer_context * ctx = ( ggml_backend_sycl_buffer_context *)buffer->context; @@ -14573,7 +14573,7 @@ static void ggml_backend_sycl_buffer_get_tensor(ggml_backend_buffer_t buffer, const ggml_tensor *tensor, void *data, size_t offset, size_t size) try { - GGML_ASSERT(tensor->backend == GGML_BACKEND_GPU); + GGML_ASSERT(tensor->backend == GGML_BACKEND_TYPE_GPU); ggml_backend_sycl_buffer_context * ctx = ( ggml_backend_sycl_buffer_context *)buffer->context; @@ -14809,7 +14809,7 @@ static void ggml_backend_sycl_set_tensor_async(ggml_backend_t backend, ggml_backend_sycl_context * sycl_ctx = (ggml_backend_sycl_context *)backend->context; GGML_ASSERT(tensor->buffer->buft == ggml_backend_sycl_buffer_type(sycl_ctx->device) && "unsupported buffer type"); - GGML_ASSERT(tensor->backend == GGML_BACKEND_GPU); + GGML_ASSERT(tensor->backend == GGML_BACKEND_TYPE_GPU); SYCL_CHECK(CHECK_TRY_ERROR(g_syclStreams[sycl_ctx->device][0]->memcpy( (char *)tensor->data + offset, data, size))); @@ -14827,7 +14827,7 @@ static void ggml_backend_sycl_get_tensor_async(ggml_backend_t backend, ggml_backend_sycl_context * sycl_ctx = (ggml_backend_sycl_context *)backend->context; GGML_ASSERT(tensor->buffer->buft == ggml_backend_sycl_buffer_type(sycl_ctx->device) && "unsupported buffer type"); - GGML_ASSERT(tensor->backend == GGML_BACKEND_GPU); + GGML_ASSERT(tensor->backend == GGML_BACKEND_TYPE_GPU); SYCL_CHECK(CHECK_TRY_ERROR(g_syclStreams[sycl_ctx->device][0]->memcpy( data, (const char *)tensor->data + offset, size))); @@ -14880,7 +14880,7 @@ static bool ggml_backend_sycl_graph_compute(ggml_backend_t backend, ggml_cgraph ggml_sycl_set_main_device(sycl_ctx->device); ggml_compute_params params = {}; - params.type = GGML_TASK_COMPUTE; + params.type = GGML_TASK_TYPE_COMPUTE; params.ith = 0; for (int i = 0; i < cgraph->n_nodes; i++) { ggml_tensor * node = cgraph->nodes[i]; @@ -14888,13 +14888,13 @@ static bool ggml_backend_sycl_graph_compute(ggml_backend_t backend, ggml_cgraph if (node->op == GGML_OP_RESHAPE || node->op == GGML_OP_TRANSPOSE || node->op == GGML_OP_VIEW || node->op == GGML_OP_PERMUTE) continue; - assert(node->backend == GGML_BACKEND_GPU); + assert(node->backend == GGML_BACKEND_TYPE_GPU); assert(node->buffer->buft == ggml_backend_sycl_buffer_type(sycl_ctx->device)); assert(node->extra != nullptr); for (int j = 0; j < GGML_MAX_SRC; j++) { if (node->src[j] != nullptr) { - assert(node->src[j]->backend == GGML_BACKEND_GPU); + assert(node->src[j]->backend == GGML_BACKEND_TYPE_GPU); assert(node->src[j]->buffer->buft == ggml_backend_sycl_buffer_type(sycl_ctx->device)); assert(node->src[j]->extra != nullptr); } diff --git a/ggml-vulkan.cpp b/ggml-vulkan.cpp index 4e5eaff15..6caafb822 100644 --- a/ggml-vulkan.cpp +++ b/ggml-vulkan.cpp @@ -2320,8 +2320,8 @@ static void ggml_vk_mul_mat_q_f16(ggml_backend_vk_context * ctx, vk_context * su src1_uma = d_Qy != nullptr; } - const bool load_x = src0->backend != GGML_BACKEND_GPU && !src0_uma; - const bool load_y = src1->backend != GGML_BACKEND_GPU && !src1_uma; + const bool load_x = src0->backend != GGML_BACKEND_TYPE_GPU && !src0_uma; + const bool load_y = src1->backend != GGML_BACKEND_TYPE_GPU && !src1_uma; const bool x_non_contig = !load_x && !ggml_vk_dim01_contiguous(src0); const bool y_non_contig = !load_y && !ggml_vk_dim01_contiguous(src1); @@ -2453,7 +2453,7 @@ static void ggml_vk_mul_mat_q_f16(ggml_backend_vk_context * ctx, vk_context * su // compute ggml_vk_matmul(ctx, subctx, *pipeline, { d_X, x_buf_offset, x_sz * ne02 * ne03 }, { d_Y, y_buf_offset, y_sz * ne12 * ne13 }, { d_D, d_buf_offset, d_sz * ne12 * ne13 }, { ctx->prealloc_split_k, 0, d_sz * ne12 * ne13 * split_k }, ne01, ne11, ne10, ne10, ne10, ne01, split_k, ne12*ne13, ne02, ne12, r2, r3, stride_batch_x, stride_batch_y, ne20*ne21); // NOLINT - if (dst->backend == GGML_BACKEND_CPU) { + if (dst->backend == GGML_BACKEND_TYPE_CPU) { // copy dst to host float * d = (float *) ((char *) dst->data); ggml_vk_buffer_read_async(ctx, subctx, d_D, 0, d, sizeof(float) * d_ne * ne12 * ne13); @@ -2506,8 +2506,8 @@ static void ggml_vk_mul_mat_vec_q_f16(ggml_backend_vk_context * ctx, vk_context src1_uma = d_Qy != nullptr; } - const bool load_x = src0->backend != GGML_BACKEND_GPU && !src0_uma; - const bool load_y = src1->backend != GGML_BACKEND_GPU && !src1_uma; + const bool load_x = src0->backend != GGML_BACKEND_TYPE_GPU && !src0_uma; + const bool load_y = src1->backend != GGML_BACKEND_TYPE_GPU && !src1_uma; const bool x_non_contig = !load_x && !ggml_vk_dim01_contiguous(src0); const bool y_non_contig = !load_y && !ggml_vk_dim01_contiguous(src1); @@ -2630,7 +2630,7 @@ static void ggml_vk_mul_mat_vec_q_f16(ggml_backend_vk_context * ctx, vk_context ggml_vk_sync_buffers(subctx); ggml_vk_dispatch_pipeline(ctx, subctx, *dmmv, { { d_X, x_offset, x_sz }, { d_Y, y_buffer_offset, y_sz + y_shader_offset }, { d_D, d_buffer_offset, d_sz + d_shader_offset } }, 3 * sizeof(int), &pc, { (uint32_t)ne01, 1, 1}); - if (dst->backend == GGML_BACKEND_CPU) { + if (dst->backend == GGML_BACKEND_TYPE_CPU) { // copy dst to host float * d = (float *) ((char *) dst->data + i12*nb2 + i13*nb3); ggml_vk_sync_buffers(subctx); @@ -2647,7 +2647,7 @@ static void ggml_vk_mul_mat_vec_p021_f16_f32(ggml_backend_vk_context * ctx, vk_c std::cerr << "), (" << dst << ", name=" << dst->name << ", type=" << dst->type << ", backend=" << dst->backend << ", ne0=" << dst->ne[0] << ", ne1=" << dst->ne[1] << ", ne2=" << dst->ne[2] << ", ne3=" << dst->ne[3] << ", nb0=" << dst->nb[0] << ", nb1=" << dst->nb[1] << ", nb2=" << dst->nb[2] << ", nb3=" << dst->nb[3] << "),)" << std::endl; #endif GGML_ASSERT(ggml_is_permuted(src0) && ggml_is_permuted(src1)); - GGML_ASSERT(src0->backend == GGML_BACKEND_GPU); + GGML_ASSERT(src0->backend == GGML_BACKEND_TYPE_GPU); GGML_ASSERT(src0->nb[0] <= src0->nb[1] && src0->nb[2] <= src0->nb[3]); // NOLINT GGML_ASSERT(src1->nb[0] <= src1->nb[1] && src1->nb[2] <= src1->nb[3]); // NOLINT GGML_ASSERT(src0->type == GGML_TYPE_F16); @@ -2679,7 +2679,7 @@ static void ggml_vk_mul_mat_vec_p021_f16_f32(ggml_backend_vk_context * ctx, vk_c src1_uma = d_Qy != nullptr; } - const bool load_y = src1->backend != GGML_BACKEND_GPU && !src1_uma; + const bool load_y = src1->backend != GGML_BACKEND_TYPE_GPU && !src1_uma; const uint64_t x_ne = ne00 * ne01 * ne02; const uint64_t y_ne = ne10 * ne11 * ne12; @@ -2721,7 +2721,7 @@ static void ggml_vk_mul_mat_vec_p021_f16_f32(ggml_backend_vk_context * ctx, vk_c ggml_vk_sync_buffers(subctx); ggml_vk_dispatch_pipeline(ctx, subctx, ctx->pipeline_mul_mat_vec_p021_f16_f32, { { d_Qx, qx_buf_offset, qx_sz }, { d_Qy, qy_buffer_offset, qy_sz + qy_shader_offset }, { d_D, d_buffer_offset, d_sz + d_shader_offset } }, 6 * sizeof(uint32_t), &pc, { 1, (uint32_t)ne01, (uint32_t)ne12 }); - if (dst->backend == GGML_BACKEND_CPU) { + if (dst->backend == GGML_BACKEND_TYPE_CPU) { // copy dst to host float * d = (float *) dst->data; ggml_vk_sync_buffers(subctx); @@ -2738,7 +2738,7 @@ static void ggml_vk_mul_mat_vec_nc_f16_f32(ggml_backend_vk_context * ctx, vk_con GGML_ASSERT(!ggml_is_transposed(src0)); GGML_ASSERT(!ggml_is_transposed(src1)); GGML_ASSERT(!ggml_is_permuted(src0)); - GGML_ASSERT(src0->backend == GGML_BACKEND_GPU); + GGML_ASSERT(src0->backend == GGML_BACKEND_TYPE_GPU); GGML_ASSERT(src0->type == GGML_TYPE_F16); GGML_ASSERT(src1->type == GGML_TYPE_F32); @@ -2771,7 +2771,7 @@ static void ggml_vk_mul_mat_vec_nc_f16_f32(ggml_backend_vk_context * ctx, vk_con src1_uma = d_Qy != nullptr; } - const bool load_y = src1->backend != GGML_BACKEND_GPU && !src1_uma; + const bool load_y = src1->backend != GGML_BACKEND_TYPE_GPU && !src1_uma; const uint64_t d_ne = ne01 * ne11 * ne12; @@ -2814,7 +2814,7 @@ static void ggml_vk_mul_mat_vec_nc_f16_f32(ggml_backend_vk_context * ctx, vk_con ggml_vk_sync_buffers(subctx); ggml_vk_dispatch_pipeline(ctx, subctx, ctx->pipeline_mul_mat_vec_nc_f16_f32, { { d_Qx, qx_buf_offset, qx_sz }, { d_Qy, qy_buffer_offset, qy_sz + qy_shader_offset }, { d_D, d_buffer_offset, d_sz + d_shader_offset } }, 7 * sizeof(uint32_t), &pc, { 1, (uint32_t)ne01, (uint32_t)ne12 }); - if (dst->backend == GGML_BACKEND_CPU) { + if (dst->backend == GGML_BACKEND_TYPE_CPU) { // copy dst to host float * d = (float *) dst->data; ggml_vk_sync_buffers(subctx); @@ -2832,7 +2832,7 @@ static bool ggml_vk_can_mul_mat(const ggml_tensor * src0, const ggml_tensor * sr return (src0->type == GGML_TYPE_F32 || src0->type == GGML_TYPE_F16 || ggml_is_quantized(src0->type)) && (src1->type == GGML_TYPE_F32 || src1->type == GGML_TYPE_F16 || ggml_is_quantized(src1->type)) && dst->type == GGML_TYPE_F32 && - ((ne0 >= 32 && ne1 >= 32 && ne10 >= 32) || src0->backend == GGML_BACKEND_GPU); + ((ne0 >= 32 && ne1 >= 32 && ne10 >= 32) || src0->backend == GGML_BACKEND_TYPE_GPU); } static void ggml_vk_mul_mat(ggml_backend_vk_context * ctx, vk_context * subctx, const struct ggml_tensor * src0, const struct ggml_tensor * src1, struct ggml_tensor * dst) { @@ -2880,8 +2880,8 @@ static void ggml_vk_op_repeat(ggml_backend_vk_context * ctx, vk_context * subctx // TODO: support for transposed / permuted tensors GGML_ASSERT(nb0 == sizeof(float)); GGML_ASSERT(nb00 == sizeof(float)); - GGML_ASSERT(src0->backend == GGML_BACKEND_GPU); - GGML_ASSERT(dst->backend == GGML_BACKEND_GPU); + GGML_ASSERT(src0->backend == GGML_BACKEND_TYPE_GPU); + GGML_ASSERT(dst->backend == GGML_BACKEND_TYPE_GPU); ggml_tensor_extra_gpu * extra = (ggml_tensor_extra_gpu *) dst->extra; ggml_tensor_extra_gpu * extra_src0 = (ggml_tensor_extra_gpu *) src0->extra; @@ -3110,8 +3110,8 @@ static void ggml_vk_op_f32(ggml_backend_vk_context * ctx, vk_context * subctx, c } } - const bool transfer_src0 = src0->backend != GGML_BACKEND_GPU && !src0_uma; - const bool transfer_src1 = use_src1 && src1->backend != GGML_BACKEND_GPU && !src1_uma; + const bool transfer_src0 = src0->backend != GGML_BACKEND_TYPE_GPU && !src0_uma; + const bool transfer_src1 = use_src1 && src1->backend != GGML_BACKEND_TYPE_GPU && !src1_uma; uint64_t x_sz = ggml_vk_align_size(ggml_type_size(src0->type) * ne0, ctx->device.lock()->properties.limits.minStorageBufferOffsetAlignment); uint64_t y_sz = use_src1 ? ggml_vk_align_size(ggml_type_size(src1->type) * ne1, ctx->device.lock()->properties.limits.minStorageBufferOffsetAlignment) : 0; @@ -3120,7 +3120,7 @@ static void ggml_vk_op_f32(ggml_backend_vk_context * ctx, vk_context * subctx, c vk_buffer d_D = extra->buffer_gpu.lock(); // Workaround for tiny tensor inputs on ROPE - if (use_src1 && src1->backend == GGML_BACKEND_GPU && y_sz > d_D->size) { + if (use_src1 && src1->backend == GGML_BACKEND_TYPE_GPU && y_sz > d_D->size) { y_sz = VK_WHOLE_SIZE; } @@ -3209,9 +3209,9 @@ static void ggml_vk_op_f32(ggml_backend_vk_context * ctx, vk_context * subctx, c ggml_vk_sync_buffers(subctx); ggml_vk_dispatch_pipeline(ctx, subctx, *pipeline, { { d_X, x_buf_offset, x_sz }, { d_D, d_buf_offset, d_sz } }, sizeof(PC), &pc, elements); } - if (dst->backend == GGML_BACKEND_CPU && op == GGML_OP_CPY) { + if (dst->backend == GGML_BACKEND_TYPE_CPU && op == GGML_OP_CPY) { ggml_vk_d2h_tensor_2d(ctx, subctx, d_D, 0, dst); - } else if(dst->backend == GGML_BACKEND_CPU) { + } else if(dst->backend == GGML_BACKEND_TYPE_CPU) { // copy dst to host float * d = (float *) dst->data; ggml_vk_buffer_read_async(ctx, subctx, d_D, 0, d, d_sz); @@ -3253,7 +3253,7 @@ static void ggml_vk_op_f32(ggml_backend_vk_context * ctx, vk_context * subctx, c ggml_vk_sync_buffers(subctx); ggml_vk_dispatch_pipeline(ctx, subctx, *pipeline, { { d_X, x_buf_offset + x_offset, x_sz }, { d_D, d_buf_offset + d_offset, d_sz } }, sizeof(PC), &pc, elements); } - if (dst->backend == GGML_BACKEND_CPU) { + if (dst->backend == GGML_BACKEND_TYPE_CPU) { // copy dst to host ggml_vk_buffer_read_async(ctx, subctx, d_D, d_buf_offset + d_offset, (char *) dst->data + i02*nb2 + i03*nb3, d_sz); } @@ -3359,7 +3359,7 @@ static void ggml_vk_rope(ggml_backend_vk_context * ctx, vk_context * subctx, con static void ggml_vk_nop(ggml_backend_vk_context * ctx, vk_context * subctx, const ggml_tensor * src0, ggml_tensor * dst) { // If backend is CPU, data from src0 has to be copied off the device - if (dst->backend == GGML_BACKEND_CPU) { + if (dst->backend == GGML_BACKEND_TYPE_CPU) { ggml_tensor_extra_gpu * extra_src0 = (ggml_tensor_extra_gpu *) src0->extra; vk_buffer d_D = extra_src0->buffer_gpu.lock(); ggml_vk_sync_buffers(subctx); @@ -3994,9 +3994,9 @@ static void ggml_vk_preallocate_buffers_graph(ggml_backend_vk_context * ctx, ggm #ifdef GGML_VULKAN_DEBUG std::cerr << "ggml_vk_preallocate_buffers_graph(" << node << ")" << std::endl; #endif - const bool any_on_device = node->backend == GGML_BACKEND_GPU - || (node->src[0] != nullptr && (node->src[0]->backend == GGML_BACKEND_GPU || node->src[0]->backend == GGML_BACKEND_GPU_SPLIT)) - || (node->src[1] != nullptr && (node->src[1]->backend == GGML_BACKEND_GPU)); + const bool any_on_device = node->backend == GGML_BACKEND_TYPE_GPU + || (node->src[0] != nullptr && (node->src[0]->backend == GGML_BACKEND_TYPE_GPU || node->src[0]->backend == GGML_BACKEND_TYPE_GPU_SPLIT)) + || (node->src[1] != nullptr && (node->src[1]->backend == GGML_BACKEND_TYPE_GPU)); if (ctx->disable || (!any_on_device && node->op != GGML_OP_MUL_MAT)) { return; @@ -4215,9 +4215,9 @@ static void ggml_vk_preallocate_buffers(ggml_backend_vk_context * ctx) { } static void ggml_vk_build_graph(ggml_backend_vk_context * ctx, ggml_tensor * node, bool last_node){ - const bool any_on_device = node->backend == GGML_BACKEND_GPU - || (node->src[0] != nullptr && (node->src[0]->backend == GGML_BACKEND_GPU || node->src[0]->backend == GGML_BACKEND_GPU_SPLIT)) - || (node->src[1] != nullptr && node->src[1]->backend == GGML_BACKEND_GPU); + const bool any_on_device = node->backend == GGML_BACKEND_TYPE_GPU + || (node->src[0] != nullptr && (node->src[0]->backend == GGML_BACKEND_TYPE_GPU || node->src[0]->backend == GGML_BACKEND_TYPE_GPU_SPLIT)) + || (node->src[1] != nullptr && node->src[1]->backend == GGML_BACKEND_TYPE_GPU); if (ctx->disable || (!any_on_device && node->op != GGML_OP_MUL_MAT) || (node->op == GGML_OP_MUL_MAT && !any_on_device && !ggml_vk_can_mul_mat(node->src[0], node->src[1], node))) { return; @@ -4371,7 +4371,7 @@ static void ggml_vk_build_graph(ggml_backend_vk_context * ctx, ggml_tensor * nod last_node = true; #endif - if (node->backend == GGML_BACKEND_CPU || last_node) { + if (node->backend == GGML_BACKEND_TYPE_CPU || last_node) { ggml_vk_ctx_end(ctx->compute_ctx); ctx->compute_ctx->exit_tensor = node; ctx->compute_ctx = nullptr; @@ -4379,9 +4379,9 @@ static void ggml_vk_build_graph(ggml_backend_vk_context * ctx, ggml_tensor * nod } static bool ggml_vk_compute_forward(ggml_backend_vk_context * ctx, ggml_compute_params * params, ggml_tensor * tensor){ - const bool any_on_device = tensor->backend == GGML_BACKEND_GPU - || (tensor->src[0] != nullptr && (tensor->src[0]->backend == GGML_BACKEND_GPU || tensor->src[0]->backend == GGML_BACKEND_GPU_SPLIT)) - || (tensor->src[1] != nullptr && tensor->src[1]->backend == GGML_BACKEND_GPU); + const bool any_on_device = tensor->backend == GGML_BACKEND_TYPE_GPU + || (tensor->src[0] != nullptr && (tensor->src[0]->backend == GGML_BACKEND_TYPE_GPU || tensor->src[0]->backend == GGML_BACKEND_TYPE_GPU_SPLIT)) + || (tensor->src[1] != nullptr && tensor->src[1]->backend == GGML_BACKEND_TYPE_GPU); if (ctx->disable || (!any_on_device && tensor->op != GGML_OP_MUL_MAT)) { return false; @@ -4442,7 +4442,7 @@ static bool ggml_vk_compute_forward(ggml_backend_vk_context * ctx, ggml_compute_ if (params->ith != 0) { return true; } - if (params->type == GGML_TASK_INIT || params->type == GGML_TASK_FINALIZE) { + if (params->type == GGML_TASK_TYPE_INIT || params->type == GGML_TASK_TYPE_FINALIZE) { return true; } @@ -4745,7 +4745,7 @@ GGML_CALL static void ggml_backend_vk_buffer_init_tensor(ggml_backend_buffer_t b extra->offset = (uint8_t *) tensor->data - (uint8_t *) vk_ptr_base; } - tensor->backend = GGML_BACKEND_GPU; + tensor->backend = GGML_BACKEND_TYPE_GPU; tensor->extra = extra; } @@ -4753,7 +4753,7 @@ GGML_CALL static void ggml_backend_vk_buffer_set_tensor(ggml_backend_buffer_t bu #ifdef GGML_VULKAN_DEBUG std::cerr << "ggml_backend_vk_buffer_set_tensor(" << buffer << ", " << tensor << ", " << data << ", " << offset << ", " << size << ")" << std::endl; #endif - GGML_ASSERT(tensor->backend == GGML_BACKEND_GPU); + GGML_ASSERT(tensor->backend == GGML_BACKEND_TYPE_GPU); ggml_backend_vk_buffer_context * ctx = (ggml_backend_vk_buffer_context *)buffer->context; @@ -4768,7 +4768,7 @@ GGML_CALL static void ggml_backend_vk_buffer_get_tensor(ggml_backend_buffer_t bu #ifdef GGML_VULKAN_DEBUG std::cerr << "ggml_backend_vk_buffer_get_tensor(" << buffer << ", " << tensor << ", " << data << ", " << offset << ", " << size << ")" << std::endl; #endif - GGML_ASSERT(tensor->backend == GGML_BACKEND_GPU); + GGML_ASSERT(tensor->backend == GGML_BACKEND_TYPE_GPU); ggml_backend_vk_buffer_context * ctx = (ggml_backend_vk_buffer_context *)buffer->context; @@ -4999,7 +4999,7 @@ GGML_CALL static void ggml_backend_vk_set_tensor_async(ggml_backend_t backend, g #endif ggml_backend_vk_context * ctx = (ggml_backend_vk_context *)backend->context; GGML_ASSERT((tensor->buffer->buft == ggml_backend_vk_buffer_type(ctx->idx) || tensor->buffer->buft == ggml_backend_vk_host_buffer_type()) && "unsupported buffer type"); - GGML_ASSERT(tensor->backend == GGML_BACKEND_GPU); + GGML_ASSERT(tensor->backend == GGML_BACKEND_TYPE_GPU); ggml_tensor_extra_gpu * extra = (ggml_tensor_extra_gpu *) tensor->extra; @@ -5020,7 +5020,7 @@ GGML_CALL static void ggml_backend_vk_get_tensor_async(ggml_backend_t backend, c #endif ggml_backend_vk_context * ctx = (ggml_backend_vk_context *)backend->context; GGML_ASSERT((tensor->buffer->buft == ggml_backend_vk_buffer_type(ctx->idx) || tensor->buffer->buft == ggml_backend_vk_host_buffer_type()) && "unsupported buffer type"); - GGML_ASSERT(tensor->backend == GGML_BACKEND_GPU); + GGML_ASSERT(tensor->backend == GGML_BACKEND_TYPE_GPU); ggml_tensor_extra_gpu * extra = (ggml_tensor_extra_gpu *) tensor->extra; @@ -5097,7 +5097,7 @@ GGML_CALL static bool ggml_backend_vk_graph_compute(ggml_backend_t backend, ggml int last_node = cgraph->n_nodes - 1; // If the last op in the cgraph isn't backend GPU, the command buffer doesn't get closed properly - while (last_node > 0 && cgraph->nodes[last_node]->backend != GGML_BACKEND_GPU) { + while (last_node > 0 && cgraph->nodes[last_node]->backend != GGML_BACKEND_TYPE_GPU) { last_node -= 1; } @@ -5106,7 +5106,7 @@ GGML_CALL static bool ggml_backend_vk_graph_compute(ggml_backend_t backend, ggml } ggml_compute_params params = {}; - params.type = GGML_TASK_COMPUTE; + params.type = GGML_TASK_TYPE_COMPUTE; params.ith = 0; for (int i = 0; i < cgraph->n_nodes; i++) { ggml_tensor * node = cgraph->nodes[i]; @@ -5410,7 +5410,7 @@ static void ggml_vk_print_tensor_area(const ggml_tensor * tensor, const void * d static void ggml_vk_print_tensor(ggml_backend_vk_context * ctx, const ggml_tensor * tensor, const char * name) { void * tensor_data = tensor->data; - if (tensor->backend == GGML_BACKEND_GPU) { + if (tensor->backend == GGML_BACKEND_TYPE_GPU) { const size_t tensor_size = ggml_nbytes(tensor); tensor_data = malloc(tensor_size); @@ -5436,14 +5436,14 @@ static void ggml_vk_print_tensor(ggml_backend_vk_context * ctx, const ggml_tenso std::vector done; ggml_vk_print_graph_origin(tensor, done); - if (tensor->backend == GGML_BACKEND_GPU) { + if (tensor->backend == GGML_BACKEND_TYPE_GPU) { free(tensor_data); } } static void ggml_vk_check_tensor(const std::string& name, const ggml_tensor * tensor) { return; - GGML_ASSERT(tensor->backend == GGML_BACKEND_CPU); + GGML_ASSERT(tensor->backend == GGML_BACKEND_TYPE_CPU); if (tensor->type != GGML_TYPE_F32 && tensor->type != GGML_TYPE_F16) { return; } @@ -5481,7 +5481,7 @@ static void ggml_vk_check_results_0(ggml_backend_vk_context * ctx, ggml_compute_ if (params->ith != 0) { return; } - if (params->type == GGML_TASK_INIT || params->type == GGML_TASK_FINALIZE || tensor->op == GGML_OP_TRANSPOSE) { + if (params->type == GGML_TASK_TYPE_INIT || params->type == GGML_TASK_TYPE_FINALIZE || tensor->op == GGML_OP_TRANSPOSE) { return; } @@ -5518,10 +5518,10 @@ static void ggml_vk_check_results_0(ggml_backend_vk_context * ctx, ggml_compute_ src0_buffer = malloc(src0_size); src0_clone->data = src0_buffer; - if (src0->backend == GGML_BACKEND_CPU) { + if (src0->backend == GGML_BACKEND_TYPE_CPU) { memcpy(src0_clone->data, src0->data, src0_size); memcpy(src0_clone->nb, src0->nb, sizeof(size_t) * GGML_MAX_DIMS); - } else if (src0->backend == GGML_BACKEND_GPU) { + } else if (src0->backend == GGML_BACKEND_TYPE_GPU) { ggml_tensor_extra_gpu * extra = (ggml_tensor_extra_gpu *) src0->extra; uint64_t offset = extra->offset; if (!ggml_is_contiguous(src0) && ggml_vk_dim01_contiguous(src0)) { @@ -5561,10 +5561,10 @@ static void ggml_vk_check_results_0(ggml_backend_vk_context * ctx, ggml_compute_ src1_buffer = malloc(src1_size); src1_clone->data = src1_buffer; - if (src1->backend == GGML_BACKEND_CPU) { + if (src1->backend == GGML_BACKEND_TYPE_CPU) { memcpy(src1_clone->data, src1->data, src1_size); memcpy(src1_clone->nb, src1->nb, sizeof(size_t) * GGML_MAX_DIMS); - } else if (src1->backend == GGML_BACKEND_GPU) { + } else if (src1->backend == GGML_BACKEND_TYPE_GPU) { ggml_tensor_extra_gpu * extra = (ggml_tensor_extra_gpu *) src1->extra; uint64_t offset = extra->offset; if (!ggml_is_contiguous(src1) && ggml_vk_dim01_contiguous(src1)) { @@ -5723,7 +5723,7 @@ static void ggml_vk_check_results_1(ggml_backend_vk_context * ctx, ggml_compute_ if (params->ith != 0) { return; } - if (params->type == GGML_TASK_INIT || params->type == GGML_TASK_FINALIZE || tensor->op == GGML_OP_TRANSPOSE) { + if (params->type == GGML_TASK_TYPE_INIT || params->type == GGML_TASK_TYPE_FINALIZE || tensor->op == GGML_OP_TRANSPOSE) { return; } if (!(vk_output_tensor > 0 && vk_output_tensor == check_counter) && check_counter <= vk_skip_checks) { @@ -5735,7 +5735,7 @@ static void ggml_vk_check_results_1(ggml_backend_vk_context * ctx, ggml_compute_ void * tensor_data = tensor->data; - if (tensor->backend == GGML_BACKEND_GPU) { + if (tensor->backend == GGML_BACKEND_TYPE_GPU) { size_t tensor_size = ggml_nbytes(tensor); tensor_data = malloc(tensor_size); @@ -5868,7 +5868,7 @@ static void ggml_vk_check_results_1(ggml_backend_vk_context * ctx, ggml_compute_ comp_result = nullptr; comp_size = 0; - if (tensor->backend == GGML_BACKEND_GPU) { + if (tensor->backend == GGML_BACKEND_TYPE_GPU) { free(tensor_data); } } diff --git a/ggml.c b/ggml.c index c09a3cad6..1d81553f4 100644 --- a/ggml.c +++ b/ggml.c @@ -2721,7 +2721,7 @@ static struct ggml_tensor * ggml_new_tensor_impl( } } - struct ggml_object * const obj_new = ggml_new_object(ctx, GGML_OBJECT_TENSOR, GGML_TENSOR_SIZE + obj_alloc_size); + struct ggml_object * const obj_new = ggml_new_object(ctx, GGML_OBJECT_TYPE_TENSOR, GGML_TENSOR_SIZE + obj_alloc_size); // TODO: for recoverable errors, we would need to free the data allocated from the scratch buffer here @@ -2729,7 +2729,7 @@ static struct ggml_tensor * ggml_new_tensor_impl( *result = (struct ggml_tensor) { /*.type =*/ type, - /*.backend =*/ GGML_BACKEND_CPU, + /*.backend =*/ GGML_BACKEND_TYPE_CPU, /*.buffer =*/ NULL, /*.ne =*/ { 1, 1, 1, 1 }, /*.nb =*/ { 0, 0, 0, 0 }, @@ -3302,7 +3302,7 @@ struct ggml_tensor * ggml_get_first_tensor(const struct ggml_context * ctx) { char * const mem_buffer = ctx->mem_buffer; while (obj != NULL) { - if (obj->type == GGML_OBJECT_TENSOR) { + if (obj->type == GGML_OBJECT_TYPE_TENSOR) { return (struct ggml_tensor *)(mem_buffer + obj->offs); } @@ -3319,7 +3319,7 @@ struct ggml_tensor * ggml_get_next_tensor(const struct ggml_context * ctx, struc char * const mem_buffer = ctx->mem_buffer; while (obj != NULL) { - if (obj->type == GGML_OBJECT_TENSOR) { + if (obj->type == GGML_OBJECT_TYPE_TENSOR) { return (struct ggml_tensor *)(mem_buffer + obj->offs); } @@ -3335,7 +3335,7 @@ struct ggml_tensor * ggml_get_tensor(struct ggml_context * ctx, const char * nam char * const mem_buffer = ctx->mem_buffer; while (obj != NULL) { - if (obj->type == GGML_OBJECT_TENSOR) { + if (obj->type == GGML_OBJECT_TYPE_TENSOR) { struct ggml_tensor * cur = (struct ggml_tensor *)(mem_buffer + obj->offs); if (strcmp(cur->name, name) == 0) { return cur; @@ -5879,7 +5879,7 @@ struct ggml_tensor * ggml_top_k( int k) { GGML_ASSERT(a->ne[0] >= k); - struct ggml_tensor * result = ggml_argsort(ctx, a, GGML_SORT_DESC); + struct ggml_tensor * result = ggml_argsort(ctx, a, GGML_SORT_ORDER_DESC); result = ggml_view_4d(ctx, result, k, result->ne[1], result->ne[2], result->ne[3], @@ -6673,7 +6673,7 @@ static void ggml_compute_forward_dup_same_cont( GGML_ASSERT(ggml_is_contiguous(dst) && ggml_is_contiguous(src0)); GGML_ASSERT(src0->type == dst->type); - if (params->type == GGML_TASK_INIT || params->type == GGML_TASK_FINALIZE) { + if (params->type == GGML_TASK_TYPE_INIT || params->type == GGML_TASK_TYPE_FINALIZE) { return; } @@ -6705,7 +6705,7 @@ static void ggml_compute_forward_dup_f16( GGML_ASSERT(ggml_nelements(dst) == ggml_nelements(src0)); - if (params->type == GGML_TASK_INIT || params->type == GGML_TASK_FINALIZE) { + if (params->type == GGML_TASK_TYPE_INIT || params->type == GGML_TASK_TYPE_FINALIZE) { return; } @@ -6978,7 +6978,7 @@ static void ggml_compute_forward_dup_f32( GGML_ASSERT(ggml_nelements(dst) == ggml_nelements(src0)); - if (params->type == GGML_TASK_INIT || params->type == GGML_TASK_FINALIZE) { + if (params->type == GGML_TASK_TYPE_INIT || params->type == GGML_TASK_TYPE_FINALIZE) { return; } @@ -7231,7 +7231,7 @@ static void ggml_compute_forward_dup_bytes( GGML_ASSERT(ggml_nelements(dst) == ggml_nelements(src0)); GGML_ASSERT(src0->type == dst->type); - if (params->type == GGML_TASK_INIT || params->type == GGML_TASK_FINALIZE) { + if (params->type == GGML_TASK_TYPE_INIT || params->type == GGML_TASK_TYPE_FINALIZE) { return; } @@ -7411,7 +7411,7 @@ static void ggml_compute_forward_add_f32( GGML_ASSERT(ggml_can_repeat(src1, src0) && ggml_are_same_shape(src0, dst)); - if (params->type == GGML_TASK_INIT || params->type == GGML_TASK_FINALIZE) { + if (params->type == GGML_TASK_TYPE_INIT || params->type == GGML_TASK_TYPE_FINALIZE) { return; } @@ -7419,7 +7419,7 @@ static void ggml_compute_forward_add_f32( const int nth = params->nth; #ifdef GGML_USE_CLBLAST - if (src1->backend == GGML_BACKEND_GPU) { + if (src1->backend == GGML_BACKEND_TYPE_GPU) { // TODO: OpenCL kernel support full broadcast GGML_ASSERT(ggml_can_repeat_rows(src1, src0)); if (ith == 0) { @@ -7501,7 +7501,7 @@ static void ggml_compute_forward_add_f16_f32( GGML_ASSERT(ggml_are_same_shape(src0, src1) && ggml_are_same_shape(src0, dst)); - if (params->type == GGML_TASK_INIT || params->type == GGML_TASK_FINALIZE) { + if (params->type == GGML_TASK_TYPE_INIT || params->type == GGML_TASK_TYPE_FINALIZE) { return; } @@ -7580,7 +7580,7 @@ static void ggml_compute_forward_add_f16_f16( GGML_ASSERT(ggml_are_same_shape(src0, src1) && ggml_are_same_shape(src0, dst)); - if (params->type == GGML_TASK_INIT || params->type == GGML_TASK_FINALIZE) { + if (params->type == GGML_TASK_TYPE_INIT || params->type == GGML_TASK_TYPE_FINALIZE) { return; } @@ -7636,7 +7636,7 @@ static void ggml_compute_forward_add_q_f32( GGML_ASSERT(ggml_are_same_shape(src0, src1) && ggml_are_same_shape(src0, dst)); - if (params->type == GGML_TASK_INIT || params->type == GGML_TASK_FINALIZE) { + if (params->type == GGML_TASK_TYPE_INIT || params->type == GGML_TASK_TYPE_FINALIZE) { return; } @@ -7774,7 +7774,7 @@ static void ggml_compute_forward_add1_f32( GGML_ASSERT(ggml_are_same_shape(src0, dst)); GGML_ASSERT(ggml_is_scalar(src1)); - if (params->type == GGML_TASK_INIT || params->type == GGML_TASK_FINALIZE) { + if (params->type == GGML_TASK_TYPE_INIT || params->type == GGML_TASK_TYPE_FINALIZE) { return; } @@ -7828,7 +7828,7 @@ static void ggml_compute_forward_add1_f16_f32( GGML_ASSERT(ggml_are_same_shape(src0, dst)); GGML_ASSERT(ggml_is_scalar(src1)); - if (params->type == GGML_TASK_INIT || params->type == GGML_TASK_FINALIZE) { + if (params->type == GGML_TASK_TYPE_INIT || params->type == GGML_TASK_TYPE_FINALIZE) { return; } @@ -7880,7 +7880,7 @@ static void ggml_compute_forward_add1_f16_f16( GGML_ASSERT(ggml_are_same_shape(src0, dst)); GGML_ASSERT(ggml_is_scalar(src1)); - if (params->type == GGML_TASK_INIT || params->type == GGML_TASK_FINALIZE) { + if (params->type == GGML_TASK_TYPE_INIT || params->type == GGML_TASK_TYPE_FINALIZE) { return; } @@ -7932,7 +7932,7 @@ static void ggml_compute_forward_add1_q_f32( GGML_ASSERT(ggml_are_same_shape(src0, dst)); GGML_ASSERT(ggml_is_scalar(src1)); - if (params->type == GGML_TASK_INIT || params->type == GGML_TASK_FINALIZE) { + if (params->type == GGML_TASK_TYPE_INIT || params->type == GGML_TASK_TYPE_FINALIZE) { return; } @@ -8062,7 +8062,7 @@ static void ggml_compute_forward_acc_f32( size_t offset = ((int32_t *) dst->op_params)[3]; bool inplace = (bool) ((int32_t *) dst->op_params)[4]; - if (!inplace && (params->type == GGML_TASK_INIT)) { + if (!inplace && (params->type == GGML_TASK_TYPE_INIT)) { if (params->ith != 0) { return; } @@ -8074,7 +8074,7 @@ static void ggml_compute_forward_acc_f32( ggml_nbytes(dst)); } - if (params->type == GGML_TASK_INIT || params->type == GGML_TASK_FINALIZE) { + if (params->type == GGML_TASK_TYPE_INIT || params->type == GGML_TASK_TYPE_FINALIZE) { return; } @@ -8176,7 +8176,7 @@ static void ggml_compute_forward_sub_f32( assert(params->ith == 0); assert(ggml_are_same_shape(src0, src1) && ggml_are_same_shape(src0, dst)); - if (params->type == GGML_TASK_INIT || params->type == GGML_TASK_FINALIZE) { + if (params->type == GGML_TASK_TYPE_INIT || params->type == GGML_TASK_TYPE_FINALIZE) { return; } @@ -8257,14 +8257,14 @@ static void ggml_compute_forward_mul_f32( GGML_ASSERT(ggml_can_repeat(src1, src0) && ggml_are_same_shape(src0, dst)); - if (params->type == GGML_TASK_INIT || params->type == GGML_TASK_FINALIZE) { + if (params->type == GGML_TASK_TYPE_INIT || params->type == GGML_TASK_TYPE_FINALIZE) { return; } const int ith = params->ith; const int nth = params->nth; #if defined(GGML_USE_CLBLAST) - if (src1->backend == GGML_BACKEND_GPU) { + if (src1->backend == GGML_BACKEND_TYPE_GPU) { // TODO: OpenCL kernel support full broadcast GGML_ASSERT(ggml_can_repeat_rows(src1, src0)); if (ith == 0) { @@ -8365,7 +8365,7 @@ static void ggml_compute_forward_div_f32( GGML_ASSERT(ggml_can_repeat(src1, src0) && ggml_are_same_shape(src0, dst)); - if (params->type == GGML_TASK_INIT || params->type == GGML_TASK_FINALIZE) { + if (params->type == GGML_TASK_TYPE_INIT || params->type == GGML_TASK_TYPE_FINALIZE) { return; } @@ -8460,7 +8460,7 @@ static void ggml_compute_forward_sqr_f32( assert(params->ith == 0); assert(ggml_are_same_shape(src0, dst)); - if (params->type == GGML_TASK_INIT || params->type == GGML_TASK_FINALIZE) { + if (params->type == GGML_TASK_TYPE_INIT || params->type == GGML_TASK_TYPE_FINALIZE) { return; } @@ -8506,7 +8506,7 @@ static void ggml_compute_forward_sqrt_f32( assert(params->ith == 0); assert(ggml_are_same_shape(src0, dst)); - if (params->type == GGML_TASK_INIT || params->type == GGML_TASK_FINALIZE) { + if (params->type == GGML_TASK_TYPE_INIT || params->type == GGML_TASK_TYPE_FINALIZE) { return; } @@ -8552,7 +8552,7 @@ static void ggml_compute_forward_log_f32( GGML_ASSERT(params->ith == 0); GGML_ASSERT(ggml_are_same_shape(src0, dst)); - if (params->type == GGML_TASK_INIT || params->type == GGML_TASK_FINALIZE) { + if (params->type == GGML_TASK_TYPE_INIT || params->type == GGML_TASK_TYPE_FINALIZE) { return; } @@ -8598,7 +8598,7 @@ static void ggml_compute_forward_sum_f32( assert(params->ith == 0); assert(ggml_is_scalar(dst)); - if (params->type == GGML_TASK_INIT || params->type == GGML_TASK_FINALIZE) { + if (params->type == GGML_TASK_TYPE_INIT || params->type == GGML_TASK_TYPE_FINALIZE) { return; } @@ -8633,7 +8633,7 @@ static void ggml_compute_forward_sum_f16( assert(params->ith == 0); assert(ggml_is_scalar(dst)); - if (params->type == GGML_TASK_INIT || params->type == GGML_TASK_FINALIZE) { + if (params->type == GGML_TASK_TYPE_INIT || params->type == GGML_TASK_TYPE_FINALIZE) { return; } @@ -8690,7 +8690,7 @@ static void ggml_compute_forward_sum_rows_f32( GGML_ASSERT(params->ith == 0); - if (params->type == GGML_TASK_INIT || params->type == GGML_TASK_FINALIZE) { + if (params->type == GGML_TASK_TYPE_INIT || params->type == GGML_TASK_TYPE_FINALIZE) { return; } @@ -8745,7 +8745,7 @@ static void ggml_compute_forward_mean_f32( assert(params->ith == 0); - if (params->type == GGML_TASK_INIT || params->type == GGML_TASK_FINALIZE) { + if (params->type == GGML_TASK_TYPE_INIT || params->type == GGML_TASK_TYPE_FINALIZE) { return; } @@ -8804,7 +8804,7 @@ static void ggml_compute_forward_argmax_f32( assert(params->ith == 0); - if (params->type == GGML_TASK_INIT || params->type == GGML_TASK_FINALIZE) { + if (params->type == GGML_TASK_TYPE_INIT || params->type == GGML_TASK_TYPE_FINALIZE) { return; } @@ -8855,7 +8855,7 @@ static void ggml_compute_forward_repeat_f32( GGML_ASSERT(params->ith == 0); GGML_ASSERT(ggml_can_repeat(src0, dst)); - if (params->type == GGML_TASK_INIT || params->type == GGML_TASK_FINALIZE) { + if (params->type == GGML_TASK_TYPE_INIT || params->type == GGML_TASK_TYPE_FINALIZE) { return; } @@ -8900,7 +8900,7 @@ static void ggml_compute_forward_repeat_f16( GGML_ASSERT(params->ith == 0); GGML_ASSERT(ggml_can_repeat(src0, dst)); - if (params->type == GGML_TASK_INIT || params->type == GGML_TASK_FINALIZE) { + if (params->type == GGML_TASK_TYPE_INIT || params->type == GGML_TASK_TYPE_FINALIZE) { return; } @@ -8974,7 +8974,7 @@ static void ggml_compute_forward_repeat_back_f32( GGML_ASSERT(params->ith == 0); GGML_ASSERT(ggml_can_repeat(dst, src0)); - if (params->type == GGML_TASK_INIT || params->type == GGML_TASK_FINALIZE) { + if (params->type == GGML_TASK_TYPE_INIT || params->type == GGML_TASK_TYPE_FINALIZE) { return; } @@ -9051,7 +9051,7 @@ static void ggml_compute_forward_concat_f32( const struct ggml_tensor * src0 = dst->src[0]; const struct ggml_tensor * src1 = dst->src[1]; - if (params->type == GGML_TASK_INIT || params->type == GGML_TASK_FINALIZE) { + if (params->type == GGML_TASK_TYPE_INIT || params->type == GGML_TASK_TYPE_FINALIZE) { return; } @@ -9123,7 +9123,7 @@ static void ggml_compute_forward_abs_f32( assert(params->ith == 0); assert(ggml_are_same_shape(src0, dst)); - if (params->type == GGML_TASK_INIT || params->type == GGML_TASK_FINALIZE) { + if (params->type == GGML_TASK_TYPE_INIT || params->type == GGML_TASK_TYPE_FINALIZE) { return; } @@ -9169,7 +9169,7 @@ static void ggml_compute_forward_sgn_f32( assert(params->ith == 0); assert(ggml_are_same_shape(src0, dst)); - if (params->type == GGML_TASK_INIT || params->type == GGML_TASK_FINALIZE) { + if (params->type == GGML_TASK_TYPE_INIT || params->type == GGML_TASK_TYPE_FINALIZE) { return; } @@ -9215,7 +9215,7 @@ static void ggml_compute_forward_neg_f32( assert(params->ith == 0); assert(ggml_are_same_shape(src0, dst)); - if (params->type == GGML_TASK_INIT || params->type == GGML_TASK_FINALIZE) { + if (params->type == GGML_TASK_TYPE_INIT || params->type == GGML_TASK_TYPE_FINALIZE) { return; } @@ -9261,7 +9261,7 @@ static void ggml_compute_forward_step_f32( assert(params->ith == 0); assert(ggml_are_same_shape(src0, dst)); - if (params->type == GGML_TASK_INIT || params->type == GGML_TASK_FINALIZE) { + if (params->type == GGML_TASK_TYPE_INIT || params->type == GGML_TASK_TYPE_FINALIZE) { return; } @@ -9307,7 +9307,7 @@ static void ggml_compute_forward_tanh_f32( assert(params->ith == 0); assert(ggml_are_same_shape(src0, dst)); - if (params->type == GGML_TASK_INIT || params->type == GGML_TASK_FINALIZE) { + if (params->type == GGML_TASK_TYPE_INIT || params->type == GGML_TASK_TYPE_FINALIZE) { return; } @@ -9353,7 +9353,7 @@ static void ggml_compute_forward_elu_f32( assert(params->ith == 0); assert(ggml_are_same_shape(src0, dst)); - if (params->type == GGML_TASK_INIT || params->type == GGML_TASK_FINALIZE) { + if (params->type == GGML_TASK_TYPE_INIT || params->type == GGML_TASK_TYPE_FINALIZE) { return; } @@ -9399,7 +9399,7 @@ static void ggml_compute_forward_relu_f32( assert(params->ith == 0); assert(ggml_are_same_shape(src0, dst)); - if (params->type == GGML_TASK_INIT || params->type == GGML_TASK_FINALIZE) { + if (params->type == GGML_TASK_TYPE_INIT || params->type == GGML_TASK_TYPE_FINALIZE) { return; } @@ -9446,7 +9446,7 @@ static void ggml_compute_forward_gelu_f32( GGML_ASSERT(ggml_is_contiguous_except_dim_1(dst)); GGML_ASSERT(ggml_are_same_shape(src0, dst)); - if (params->type == GGML_TASK_INIT || params->type == GGML_TASK_FINALIZE) { + if (params->type == GGML_TASK_TYPE_INIT || params->type == GGML_TASK_TYPE_FINALIZE) { return; } @@ -9509,7 +9509,7 @@ static void ggml_compute_forward_gelu_quick_f32( GGML_ASSERT(ggml_is_contiguous_except_dim_1(dst)); GGML_ASSERT(ggml_are_same_shape(src0, dst)); - if (params->type == GGML_TASK_INIT || params->type == GGML_TASK_FINALIZE) { + if (params->type == GGML_TASK_TYPE_INIT || params->type == GGML_TASK_TYPE_FINALIZE) { return; } @@ -9572,7 +9572,7 @@ static void ggml_compute_forward_silu_f32( GGML_ASSERT(ggml_is_contiguous_except_dim_1(dst)); GGML_ASSERT(ggml_are_same_shape(src0, dst)); - if (params->type == GGML_TASK_INIT || params->type == GGML_TASK_FINALIZE) { + if (params->type == GGML_TASK_TYPE_INIT || params->type == GGML_TASK_TYPE_FINALIZE) { return; } @@ -9633,7 +9633,7 @@ static void ggml_compute_forward_leaky_relu_f32( assert(params->ith == 0); assert(ggml_are_same_shape(src0, dst)); - if (params->type == GGML_TASK_INIT || params->type == GGML_TASK_FINALIZE) { + if (params->type == GGML_TASK_TYPE_INIT || params->type == GGML_TASK_TYPE_FINALIZE) { return; } @@ -9686,7 +9686,7 @@ static void ggml_compute_forward_silu_back_f32( GGML_ASSERT(ggml_are_same_shape(src0, dst)); GGML_ASSERT(ggml_are_same_shape(src0, grad)); - if (params->type == GGML_TASK_INIT || params->type == GGML_TASK_FINALIZE) { + if (params->type == GGML_TASK_TYPE_INIT || params->type == GGML_TASK_TYPE_FINALIZE) { return; } @@ -9748,7 +9748,7 @@ static void ggml_compute_forward_hardswish_f32( assert(params->ith == 0); assert(ggml_are_same_shape(src0, dst)); - if (params->type == GGML_TASK_INIT || params->type == GGML_TASK_FINALIZE) { + if (params->type == GGML_TASK_TYPE_INIT || params->type == GGML_TASK_TYPE_FINALIZE) { return; } @@ -9791,7 +9791,7 @@ static void ggml_compute_forward_hardsigmoid_f32( assert(params->ith == 0); assert(ggml_are_same_shape(src0, dst)); - if (params->type == GGML_TASK_INIT || params->type == GGML_TASK_FINALIZE) { + if (params->type == GGML_TASK_TYPE_INIT || params->type == GGML_TASK_TYPE_FINALIZE) { return; } @@ -9837,7 +9837,7 @@ static void ggml_compute_forward_norm_f32( GGML_ASSERT(ggml_are_same_shape(src0, dst)); - if (params->type == GGML_TASK_INIT || params->type == GGML_TASK_FINALIZE) { + if (params->type == GGML_TASK_TYPE_INIT || params->type == GGML_TASK_TYPE_FINALIZE) { return; } @@ -9912,7 +9912,7 @@ static void ggml_compute_forward_rms_norm_f32( GGML_ASSERT(ggml_are_same_shape(src0, dst)); - if (params->type == GGML_TASK_INIT || params->type == GGML_TASK_FINALIZE) { + if (params->type == GGML_TASK_TYPE_INIT || params->type == GGML_TASK_TYPE_FINALIZE) { return; } @@ -9983,7 +9983,7 @@ static void ggml_compute_forward_rms_norm_back_f32( GGML_ASSERT(ggml_are_same_shape(src0, dst) && ggml_are_same_shape(src0, src1)); - if (params->type == GGML_TASK_INIT || params->type == GGML_TASK_FINALIZE) { + if (params->type == GGML_TASK_TYPE_INIT || params->type == GGML_TASK_TYPE_FINALIZE) { return; } @@ -10161,7 +10161,7 @@ static void ggml_compute_forward_group_norm_f32( GGML_ASSERT(ggml_are_same_shape(src0, dst)); - if (params->type == GGML_TASK_INIT || params->type == GGML_TASK_FINALIZE) { + if (params->type == GGML_TASK_TYPE_INIT || params->type == GGML_TASK_TYPE_FINALIZE) { return; } @@ -10328,7 +10328,7 @@ static void ggml_compute_forward_mul_mat( #if defined(GGML_USE_CLBLAST) if (ggml_cl_can_mul_mat(src0, src1, dst)) { - if (params->ith == 0 && params->type == GGML_TASK_COMPUTE) { + if (params->ith == 0 && params->type == GGML_TASK_TYPE_COMPUTE) { ggml_cl_mul_mat(src0, src1, dst, params->wdata, params->wsize); } return; @@ -10341,7 +10341,7 @@ static void ggml_compute_forward_mul_mat( const size_t desired_wsize = ne13*ne12*ne_plane*sizeof(float); UNUSED(desired_wsize); - if (params->type == GGML_TASK_INIT) { + if (params->type == GGML_TASK_TYPE_INIT) { if (type != GGML_TYPE_F32) { assert(params->wsize >= desired_wsize); // parallelize by src0 rows @@ -10364,7 +10364,7 @@ static void ggml_compute_forward_mul_mat( return; } - if (params->type == GGML_TASK_FINALIZE) { + if (params->type == GGML_TASK_TYPE_FINALIZE) { return; } @@ -10402,7 +10402,7 @@ static void ggml_compute_forward_mul_mat( } #endif - if (params->type == GGML_TASK_INIT) { + if (params->type == GGML_TASK_TYPE_INIT) { if (ith != 0) { return; } @@ -10426,7 +10426,7 @@ static void ggml_compute_forward_mul_mat( return; } - if (params->type == GGML_TASK_FINALIZE) { + if (params->type == GGML_TASK_TYPE_FINALIZE) { return; } @@ -10583,7 +10583,7 @@ static void ggml_compute_forward_mul_mat_id( #define MMID_MATRIX_ROW(row_id, i1) matrix_rows[(row_id)*ne11 + (i1)] - if (params->type == GGML_TASK_INIT) { + if (params->type == GGML_TASK_TYPE_INIT) { if (ith != 0) { return; } @@ -10620,7 +10620,7 @@ static void ggml_compute_forward_mul_mat_id( return; } - if (params->type == GGML_TASK_FINALIZE) { + if (params->type == GGML_TASK_TYPE_FINALIZE) { return; } @@ -10768,7 +10768,7 @@ static void ggml_compute_forward_out_prod_f32( (ggml_is_contiguous(src1) || ggml_is_transposed(src1)); #endif - if (params->type == GGML_TASK_INIT) { + if (params->type == GGML_TASK_TYPE_INIT) { #if defined(GGML_USE_ACCELERATE) || defined(GGML_USE_OPENBLAS) // gemm beta will zero dst if (use_blas) { return; @@ -10781,7 +10781,7 @@ static void ggml_compute_forward_out_prod_f32( return; } - if (params->type == GGML_TASK_FINALIZE) { + if (params->type == GGML_TASK_TYPE_FINALIZE) { return; } @@ -10961,7 +10961,7 @@ static void ggml_compute_forward_out_prod_q_f32( // TODO: #if defined(GGML_USE_CUBLAS) ggml_cuda_out_prod // TODO: #if defined(GGML_USE_ACCELERATE) || defined(GGML_USE_OPENBLAS) || defined(GGML_USE_CLBLAST) - if (params->type == GGML_TASK_INIT) { + if (params->type == GGML_TASK_TYPE_INIT) { if (ith != 0) { return; } @@ -10969,7 +10969,7 @@ static void ggml_compute_forward_out_prod_q_f32( return; } - if (params->type == GGML_TASK_FINALIZE) { + if (params->type == GGML_TASK_TYPE_FINALIZE) { return; } @@ -11087,7 +11087,7 @@ static void ggml_compute_forward_scale_f32( GGML_ASSERT(ggml_is_contiguous(dst)); GGML_ASSERT(ggml_are_same_shape(src0, dst)); - if (params->type == GGML_TASK_INIT || params->type == GGML_TASK_FINALIZE) { + if (params->type == GGML_TASK_TYPE_INIT || params->type == GGML_TASK_TYPE_FINALIZE) { return; } @@ -11159,7 +11159,7 @@ static void ggml_compute_forward_set_f32( size_t offset = ((int32_t *) dst->op_params)[3]; bool inplace = (bool) ((int32_t *) dst->op_params)[4]; - if (!inplace && (params->type == GGML_TASK_INIT)) { + if (!inplace && (params->type == GGML_TASK_TYPE_INIT)) { if (params->ith != 0) { return; } @@ -11171,7 +11171,7 @@ static void ggml_compute_forward_set_f32( ggml_nbytes(dst)); } - if (params->type == GGML_TASK_INIT || params->type == GGML_TASK_FINALIZE) { + if (params->type == GGML_TASK_TYPE_INIT || params->type == GGML_TASK_TYPE_FINALIZE) { return; } @@ -11319,7 +11319,7 @@ static void ggml_compute_forward_get_rows_q( assert(params->ith == 0); - if (params->type == GGML_TASK_INIT || params->type == GGML_TASK_FINALIZE) { + if (params->type == GGML_TASK_TYPE_INIT || params->type == GGML_TASK_TYPE_FINALIZE) { return; } @@ -11359,7 +11359,7 @@ static void ggml_compute_forward_get_rows_f16( assert(params->ith == 0); - if (params->type == GGML_TASK_INIT || params->type == GGML_TASK_FINALIZE) { + if (params->type == GGML_TASK_TYPE_INIT || params->type == GGML_TASK_TYPE_FINALIZE) { return; } @@ -11396,7 +11396,7 @@ static void ggml_compute_forward_get_rows_f32( assert(params->ith == 0); - if (params->type == GGML_TASK_INIT || params->type == GGML_TASK_FINALIZE) { + if (params->type == GGML_TASK_TYPE_INIT || params->type == GGML_TASK_TYPE_FINALIZE) { return; } @@ -11499,14 +11499,14 @@ static void ggml_compute_forward_get_rows_back_f32_f16( // ggml_compute_forward_dup_same_cont(params, opt0, dst); - if (params->type == GGML_TASK_INIT) { + if (params->type == GGML_TASK_TYPE_INIT) { if (params->ith != 0) { return; } memset(dst->data, 0, ggml_nbytes(dst)); } - if (params->type == GGML_TASK_INIT || params->type == GGML_TASK_FINALIZE) { + if (params->type == GGML_TASK_TYPE_INIT || params->type == GGML_TASK_TYPE_FINALIZE) { return; } @@ -11538,14 +11538,14 @@ static void ggml_compute_forward_get_rows_back_f32( // ggml_compute_forward_dup_same_cont(params, opt0, dst); - if (params->type == GGML_TASK_INIT) { + if (params->type == GGML_TASK_TYPE_INIT) { if (params->ith != 0) { return; } memset(dst->data, 0, ggml_nbytes(dst)); } - if (params->type == GGML_TASK_INIT || params->type == GGML_TASK_FINALIZE) { + if (params->type == GGML_TASK_TYPE_INIT || params->type == GGML_TASK_TYPE_FINALIZE) { return; } @@ -11615,7 +11615,7 @@ static void ggml_compute_forward_diag_f32( GGML_ASSERT(params->ith == 0); - if (params->type == GGML_TASK_INIT || params->type == GGML_TASK_FINALIZE) { + if (params->type == GGML_TASK_TYPE_INIT || params->type == GGML_TASK_TYPE_FINALIZE) { return; } @@ -11684,7 +11684,7 @@ static void ggml_compute_forward_diag_mask_f32( GGML_ASSERT(n_past >= 0); - if (!inplace && (params->type == GGML_TASK_INIT)) { + if (!inplace && (params->type == GGML_TASK_TYPE_INIT)) { if (ith != 0) { return; } @@ -11698,7 +11698,7 @@ static void ggml_compute_forward_diag_mask_f32( ggml_nbytes(dst)); } - if (params->type == GGML_TASK_INIT || params->type == GGML_TASK_FINALIZE) { + if (params->type == GGML_TASK_TYPE_INIT || params->type == GGML_TASK_TYPE_FINALIZE) { return; } @@ -11772,7 +11772,7 @@ static void ggml_compute_forward_soft_max_f32( assert(ggml_is_contiguous(dst)); assert(ggml_are_same_shape(src0, dst)); - if (params->type == GGML_TASK_INIT || params->type == GGML_TASK_FINALIZE) { + if (params->type == GGML_TASK_TYPE_INIT || params->type == GGML_TASK_TYPE_FINALIZE) { return; } @@ -11910,7 +11910,7 @@ static void ggml_compute_forward_soft_max_back_f32( GGML_ASSERT(ggml_are_same_shape(src0, dst)); GGML_ASSERT(ggml_are_same_shape(src1, dst)); - if (params->type == GGML_TASK_INIT || params->type == GGML_TASK_FINALIZE) { + if (params->type == GGML_TASK_TYPE_INIT || params->type == GGML_TASK_TYPE_FINALIZE) { return; } @@ -12004,7 +12004,7 @@ static void ggml_compute_forward_alibi_f32( assert(params->ith == 0); - if (params->type == GGML_TASK_INIT || params->type == GGML_TASK_FINALIZE) { + if (params->type == GGML_TASK_TYPE_INIT || params->type == GGML_TASK_TYPE_FINALIZE) { return; } @@ -12063,7 +12063,7 @@ static void ggml_compute_forward_alibi_f16( assert(params->ith == 0); - if (params->type == GGML_TASK_INIT || params->type == GGML_TASK_FINALIZE) { + if (params->type == GGML_TASK_TYPE_INIT || params->type == GGML_TASK_TYPE_FINALIZE) { return; } @@ -12170,7 +12170,7 @@ static void ggml_compute_forward_clamp_f32( assert(params->ith == 0); - if (params->type == GGML_TASK_INIT || params->type == GGML_TASK_FINALIZE) { + if (params->type == GGML_TASK_TYPE_INIT || params->type == GGML_TASK_TYPE_FINALIZE) { return; } @@ -12310,7 +12310,7 @@ static void ggml_compute_forward_rope_f32( const struct ggml_tensor * src0 = dst->src[0]; const struct ggml_tensor * src1 = dst->src[1]; - if (params->type == GGML_TASK_INIT || params->type == GGML_TASK_FINALIZE) { + if (params->type == GGML_TASK_TYPE_INIT || params->type == GGML_TASK_TYPE_FINALIZE) { return; } @@ -12488,7 +12488,7 @@ static void ggml_compute_forward_rope_f16( const struct ggml_tensor * src0 = dst->src[0]; const struct ggml_tensor * src1 = dst->src[1]; - if (params->type == GGML_TASK_INIT || params->type == GGML_TASK_FINALIZE) { + if (params->type == GGML_TASK_TYPE_INIT || params->type == GGML_TASK_TYPE_FINALIZE) { return; } @@ -12719,7 +12719,7 @@ static void ggml_compute_forward_conv_transpose_1d_f16_f32( GGML_ASSERT(nb00 == sizeof(ggml_fp16_t)); GGML_ASSERT(nb10 == sizeof(float)); - if (params->type == GGML_TASK_INIT) { + if (params->type == GGML_TASK_TYPE_INIT) { if (ith != 0) { return; } @@ -12759,7 +12759,7 @@ static void ggml_compute_forward_conv_transpose_1d_f16_f32( return; } - if (params->type == GGML_TASK_FINALIZE) { + if (params->type == GGML_TASK_TYPE_FINALIZE) { return; } @@ -12818,7 +12818,7 @@ static void ggml_compute_forward_conv_transpose_1d_f32( GGML_ASSERT(nb00 == sizeof(float)); GGML_ASSERT(nb10 == sizeof(float)); - if (params->type == GGML_TASK_INIT) { + if (params->type == GGML_TASK_TYPE_INIT) { if (ith != 0) { return; } @@ -12858,7 +12858,7 @@ static void ggml_compute_forward_conv_transpose_1d_f32( return; } - if (params->type == GGML_TASK_FINALIZE) { + if (params->type == GGML_TASK_TYPE_FINALIZE) { return; } @@ -12962,11 +12962,11 @@ static void ggml_compute_forward_im2col_f32( GGML_ASSERT(nb00 == sizeof(ggml_fp16_t)); GGML_ASSERT(nb10 == sizeof(float)); - if (params->type == GGML_TASK_INIT) { + if (params->type == GGML_TASK_TYPE_INIT) { return; } - if (params->type == GGML_TASK_FINALIZE) { + if (params->type == GGML_TASK_TYPE_FINALIZE) { return; } @@ -13050,11 +13050,11 @@ static void ggml_compute_forward_im2col_f16( GGML_ASSERT(nb00 == sizeof(ggml_fp16_t)); GGML_ASSERT(nb10 == sizeof(float)); - if (params->type == GGML_TASK_INIT) { + if (params->type == GGML_TASK_TYPE_INIT) { return; } - if (params->type == GGML_TASK_FINALIZE) { + if (params->type == GGML_TASK_TYPE_FINALIZE) { return; } @@ -13136,7 +13136,7 @@ static void ggml_compute_forward_conv_transpose_2d( GGML_ASSERT(nb00 == sizeof(ggml_fp16_t)); GGML_ASSERT(nb10 == sizeof(float)); - if (params->type == GGML_TASK_INIT) { + if (params->type == GGML_TASK_TYPE_INIT) { if (ith != 0) { return; } @@ -13178,7 +13178,7 @@ static void ggml_compute_forward_conv_transpose_2d( return; } - if (params->type == GGML_TASK_FINALIZE) { + if (params->type == GGML_TASK_TYPE_FINALIZE) { return; } @@ -13230,7 +13230,7 @@ static void ggml_compute_forward_pool_1d_sk_p0( assert(src->type == GGML_TYPE_F32); assert(params->ith == 0); - if (params->type == GGML_TASK_INIT || params->type == GGML_TASK_FINALIZE) { + if (params->type == GGML_TASK_TYPE_INIT || params->type == GGML_TASK_TYPE_FINALIZE) { return; } @@ -13299,7 +13299,7 @@ static void ggml_compute_forward_pool_2d( GGML_ASSERT(src->type == GGML_TYPE_F32); GGML_ASSERT(params->ith == 0); - if (params->type == GGML_TASK_INIT || params->type == GGML_TASK_FINALIZE) { + if (params->type == GGML_TASK_TYPE_INIT || params->type == GGML_TASK_TYPE_FINALIZE) { return; } @@ -13372,7 +13372,7 @@ static void ggml_compute_forward_upscale_f32( const struct ggml_tensor * src0 = dst->src[0]; - if (params->type == GGML_TASK_INIT || params->type == GGML_TASK_FINALIZE) { + if (params->type == GGML_TASK_TYPE_INIT || params->type == GGML_TASK_TYPE_FINALIZE) { return; } @@ -13432,7 +13432,7 @@ static void ggml_compute_forward_pad_f32( const struct ggml_tensor * src0 = dst->src[0]; - if (params->type == GGML_TASK_INIT || params->type == GGML_TASK_FINALIZE) { + if (params->type == GGML_TASK_TYPE_INIT || params->type == GGML_TASK_TYPE_FINALIZE) { return; } @@ -13493,7 +13493,7 @@ static void ggml_compute_forward_argsort_f32( const struct ggml_tensor * src0 = dst->src[0]; - if (params->type == GGML_TASK_INIT || params->type == GGML_TASK_FINALIZE) { + if (params->type == GGML_TASK_TYPE_INIT || params->type == GGML_TASK_TYPE_FINALIZE) { return; } @@ -13519,8 +13519,8 @@ static void ggml_compute_forward_argsort_f32( // C doesn't have a functional sort, so we do a bubble sort instead for (int64_t j = 0; j < ne0; j++) { for (int64_t k = j + 1; k < ne0; k++) { - if ((order == GGML_SORT_ASC && src_data[dst_data[j]] > src_data[dst_data[k]]) || - (order == GGML_SORT_DESC && src_data[dst_data[j]] < src_data[dst_data[k]])) { + if ((order == GGML_SORT_ORDER_ASC && src_data[dst_data[j]] > src_data[dst_data[k]]) || + (order == GGML_SORT_ORDER_DESC && src_data[dst_data[j]] < src_data[dst_data[k]])) { int32_t tmp = dst_data[j]; dst_data[j] = dst_data[k]; dst_data[k] = tmp; @@ -13603,11 +13603,11 @@ static void ggml_compute_forward_flash_attn_f32( GGML_ASSERT(nb1 <= nb2); GGML_ASSERT(nb2 <= nb3); - if (params->type == GGML_TASK_INIT) { + if (params->type == GGML_TASK_TYPE_INIT) { return; } - if (params->type == GGML_TASK_FINALIZE) { + if (params->type == GGML_TASK_TYPE_FINALIZE) { return; } @@ -13795,11 +13795,11 @@ static void ggml_compute_forward_flash_attn_f16( GGML_ASSERT(nb1 <= nb2); GGML_ASSERT(nb2 <= nb3); - if (params->type == GGML_TASK_INIT) { + if (params->type == GGML_TASK_TYPE_INIT) { return; } - if (params->type == GGML_TASK_FINALIZE) { + if (params->type == GGML_TASK_TYPE_FINALIZE) { return; } @@ -14054,11 +14054,11 @@ static void ggml_compute_forward_flash_ff_f16( GGML_ASSERT(nb1 <= nb2); GGML_ASSERT(nb2 <= nb3); - if (params->type == GGML_TASK_INIT) { + if (params->type == GGML_TASK_TYPE_INIT) { return; } - if (params->type == GGML_TASK_FINALIZE) { + if (params->type == GGML_TASK_TYPE_FINALIZE) { return; } @@ -14213,14 +14213,14 @@ static void ggml_compute_forward_flash_attn_back_f32( GGML_ASSERT(nb1 <= nb2); GGML_ASSERT(nb2 <= nb3); - if (params->type == GGML_TASK_INIT) { + if (params->type == GGML_TASK_TYPE_INIT) { if (ith == 0) { memset(dst->data, 0, nb0*ne0*ne1*ne2*ne3); } return; } - if (params->type == GGML_TASK_FINALIZE) { + if (params->type == GGML_TASK_TYPE_FINALIZE) { return; } @@ -14536,7 +14536,7 @@ static void ggml_compute_forward_win_part_f32( const struct ggml_tensor * src0 = dst->src[0]; - if (params->type == GGML_TASK_INIT || params->type == GGML_TASK_FINALIZE) { + if (params->type == GGML_TASK_TYPE_INIT || params->type == GGML_TASK_TYPE_FINALIZE) { return; } @@ -14602,7 +14602,7 @@ static void ggml_compute_forward_win_unpart_f32( const struct ggml_tensor * src0 = dst->src[0]; - if (params->type == GGML_TASK_INIT || params->type == GGML_TASK_FINALIZE) { + if (params->type == GGML_TASK_TYPE_INIT || params->type == GGML_TASK_TYPE_FINALIZE) { return; } @@ -14730,7 +14730,7 @@ static void ggml_compute_forward_get_rel_pos_f16( const struct ggml_tensor * src0 = dst->src[0]; - if (params->type == GGML_TASK_INIT || params->type == GGML_TASK_FINALIZE) { + if (params->type == GGML_TASK_TYPE_INIT || params->type == GGML_TASK_TYPE_FINALIZE) { return; } @@ -14782,14 +14782,14 @@ static void ggml_compute_forward_add_rel_pos_f32( const struct ggml_tensor * src2 = dst->src[2]; const bool inplace = (bool) ((int32_t *) dst->op_params)[0]; - if (!inplace && params->type == GGML_TASK_INIT) { + if (!inplace && params->type == GGML_TASK_TYPE_INIT) { if (params->ith != 0) { return; } memcpy((char *) dst->data, (char *) src0->data, ggml_nbytes(dst)); return; } - if (params->type == GGML_TASK_INIT || params->type == GGML_TASK_FINALIZE) { + if (params->type == GGML_TASK_TYPE_INIT || params->type == GGML_TASK_TYPE_FINALIZE) { return; } @@ -14871,7 +14871,7 @@ static void ggml_compute_forward_map_unary_f32( GGML_ASSERT(ggml_are_same_shape(src0, dst)); - if (params->type == GGML_TASK_INIT || params->type == GGML_TASK_FINALIZE) { + if (params->type == GGML_TASK_TYPE_INIT || params->type == GGML_TASK_TYPE_FINALIZE) { return; } @@ -14920,7 +14920,7 @@ static void ggml_compute_forward_map_binary_f32( assert(params->ith == 0); assert(ggml_are_same_shape(src0, src1) && ggml_are_same_shape(src0, dst)); - if (params->type == GGML_TASK_INIT || params->type == GGML_TASK_FINALIZE) { + if (params->type == GGML_TASK_TYPE_INIT || params->type == GGML_TASK_TYPE_FINALIZE) { return; } @@ -14969,7 +14969,7 @@ static void ggml_compute_forward_map_custom1_f32( assert(params->ith == 0); - if (params->type == GGML_TASK_INIT || params->type == GGML_TASK_FINALIZE) { + if (params->type == GGML_TASK_TYPE_INIT || params->type == GGML_TASK_TYPE_FINALIZE) { return; } @@ -14988,7 +14988,7 @@ static void ggml_compute_forward_map_custom2_f32( assert(params->ith == 0); - if (params->type == GGML_TASK_INIT || params->type == GGML_TASK_FINALIZE) { + if (params->type == GGML_TASK_TYPE_INIT || params->type == GGML_TASK_TYPE_FINALIZE) { return; } @@ -15008,7 +15008,7 @@ static void ggml_compute_forward_map_custom3_f32( assert(params->ith == 0); - if (params->type == GGML_TASK_INIT || params->type == GGML_TASK_FINALIZE) { + if (params->type == GGML_TASK_TYPE_INIT || params->type == GGML_TASK_TYPE_FINALIZE) { return; } @@ -15023,7 +15023,7 @@ static void ggml_compute_forward_map_custom1( const struct ggml_tensor * a = dst->src[0]; - if (params->type == GGML_TASK_INIT || params->type == GGML_TASK_FINALIZE) { + if (params->type == GGML_TASK_TYPE_INIT || params->type == GGML_TASK_TYPE_FINALIZE) { return; } @@ -15041,7 +15041,7 @@ static void ggml_compute_forward_map_custom2( const struct ggml_tensor * a = dst->src[0]; const struct ggml_tensor * b = dst->src[1]; - if (params->type == GGML_TASK_INIT || params->type == GGML_TASK_FINALIZE) { + if (params->type == GGML_TASK_TYPE_INIT || params->type == GGML_TASK_TYPE_FINALIZE) { return; } @@ -15060,7 +15060,7 @@ static void ggml_compute_forward_map_custom3( const struct ggml_tensor * b = dst->src[1]; const struct ggml_tensor * c = dst->src[2]; - if (params->type == GGML_TASK_INIT || params->type == GGML_TASK_FINALIZE) { + if (params->type == GGML_TASK_TYPE_INIT || params->type == GGML_TASK_TYPE_FINALIZE) { return; } @@ -15094,14 +15094,14 @@ static void ggml_compute_forward_cross_entropy_loss_f32( GGML_ASSERT(params->wsize >= sizeof(float) * (nth + nth * nc)); - if (params->type == GGML_TASK_INIT) { + if (params->type == GGML_TASK_TYPE_INIT) { if (ith == 0) { memset(sums, 0, sizeof(float) * (nth + nth * nc)); } return; } - if (params->type == GGML_TASK_FINALIZE) { + if (params->type == GGML_TASK_TYPE_FINALIZE) { if (ith == 0) { float * dp = (float *) dst->data; ggml_vec_sum_f32(nth, dp, sums); @@ -15216,7 +15216,7 @@ static void ggml_compute_forward_cross_entropy_loss_back_f32( const int64_t ith = params->ith; const int64_t nth = params->nth; - if (params->type == GGML_TASK_INIT || params->type == GGML_TASK_FINALIZE) { + if (params->type == GGML_TASK_TYPE_INIT || params->type == GGML_TASK_TYPE_FINALIZE) { return; } @@ -15323,8 +15323,8 @@ static void ggml_compute_forward(struct ggml_compute_params * params, struct ggm if (skip_cpu) { return; } - GGML_ASSERT(tensor->src[0] == NULL || tensor->src[0]->backend == GGML_BACKEND_CPU); - GGML_ASSERT(tensor->src[1] == NULL || tensor->src[1]->backend == GGML_BACKEND_CPU); + GGML_ASSERT(tensor->src[0] == NULL || tensor->src[0]->backend == GGML_BACKEND_TYPE_CPU); + GGML_ASSERT(tensor->src[1] == NULL || tensor->src[1]->backend == GGML_BACKEND_TYPE_CPU); #elif defined(GGML_USE_VULKAN) const bool skip_cpu = ggml_vk_compute_forward_cpu_assist(params, tensor); #ifdef GGML_VULKAN_CHECK_RESULTS @@ -15335,8 +15335,8 @@ static void ggml_compute_forward(struct ggml_compute_params * params, struct ggm if (skip_cpu) { return; } - GGML_ASSERT(tensor->src[0] == NULL || tensor->src[0]->backend == GGML_BACKEND_CPU); - GGML_ASSERT(tensor->src[1] == NULL || tensor->src[1]->backend == GGML_BACKEND_CPU); + GGML_ASSERT(tensor->src[0] == NULL || tensor->src[0]->backend == GGML_BACKEND_TYPE_CPU); + GGML_ASSERT(tensor->src[1] == NULL || tensor->src[1]->backend == GGML_BACKEND_TYPE_CPU); #endif // GGML_USE_CUBLAS #ifdef GGML_USE_SYCL @@ -16882,7 +16882,7 @@ size_t ggml_graph_overhead(void) { struct ggml_cgraph * ggml_new_graph_custom(struct ggml_context * ctx, size_t size, bool grads) { const size_t obj_size = ggml_graph_nbytes(size, grads); - struct ggml_object * obj = ggml_new_object(ctx, GGML_OBJECT_GRAPH, obj_size); + struct ggml_object * obj = ggml_new_object(ctx, GGML_OBJECT_TYPE_GRAPH, obj_size); struct ggml_cgraph * cgraph = (struct ggml_cgraph *) ((char *) ctx->mem_buffer + obj->offs); struct ggml_tensor ** data_start = (struct ggml_tensor **) (cgraph + 1); @@ -17429,7 +17429,7 @@ static thread_ret_t ggml_graph_compute_thread(void * data) { set_numa_thread_affinity(state->ith); int node_n = -1; - int task_phase = GGML_TASK_FINALIZE; + int task_phase = GGML_TASK_TYPE_FINALIZE; while (true) { if (cplan->abort_callback && cplan->abort_callback(cplan->abort_callback_data)) { @@ -17441,7 +17441,7 @@ static thread_ret_t ggml_graph_compute_thread(void * data) { // all other threads are finished and spinning // do finalize and init here so we don't have synchronize again struct ggml_compute_params params = { - /*.type =*/ GGML_TASK_FINALIZE, + /*.type =*/ GGML_TASK_TYPE_FINALIZE, /*.ith =*/ 0, /*.nth =*/ 0, /*.wsize =*/ cplan->work_size, @@ -17472,17 +17472,17 @@ static thread_ret_t ggml_graph_compute_thread(void * data) { if (n_tasks == 1) { /* INIT */ if (GGML_OP_HAS_INIT[node->op]) { - params.type = GGML_TASK_INIT; + params.type = GGML_TASK_TYPE_INIT; ggml_compute_forward(¶ms, node); } // TODO: maybe push node_n to the atomic but if other threads see n_tasks is 1, // they do something more efficient than spinning (?) - params.type = GGML_TASK_COMPUTE; + params.type = GGML_TASK_TYPE_COMPUTE; ggml_compute_forward(¶ms, node); if (GGML_OP_HAS_FINALIZE[node->op]) { - params.type = GGML_TASK_FINALIZE; + params.type = GGML_TASK_TYPE_FINALIZE; ggml_compute_forward(¶ms, node); } @@ -17496,7 +17496,7 @@ static thread_ret_t ggml_graph_compute_thread(void * data) { } } - task_phase = GGML_TASK_INIT; + task_phase = GGML_TASK_TYPE_INIT; atomic_store(&state->shared->n_active, n_threads); atomic_store(&state->shared->node_n, node_n); atomic_store(&state->shared->node_task, task_phase); @@ -17513,7 +17513,7 @@ static thread_ret_t ggml_graph_compute_thread(void * data) { const int n_tasks = ggml_get_n_tasks(node, n_threads); struct ggml_compute_params params = { - /*.type =*/ GGML_TASK_INIT, + /*.type =*/ GGML_TASK_TYPE_INIT, /*.ith =*/ state->ith, /*.nth =*/ n_tasks, /*.wsize =*/ cplan->work_size, @@ -17527,7 +17527,7 @@ static thread_ret_t ggml_graph_compute_thread(void * data) { } if (atomic_fetch_sub(&state->shared->n_active, 1) == 1) { - task_phase = GGML_TASK_COMPUTE; + task_phase = GGML_TASK_TYPE_COMPUTE; atomic_store(&state->shared->n_active, n_threads); atomic_store(&state->shared->node_task, task_phase); } @@ -17542,12 +17542,12 @@ static thread_ret_t ggml_graph_compute_thread(void * data) { } if (state->ith < n_tasks) { - params.type = GGML_TASK_COMPUTE; + params.type = GGML_TASK_TYPE_COMPUTE; ggml_compute_forward(¶ms, node); } if (atomic_fetch_sub(&state->shared->n_active, 1) == 1) { - task_phase = GGML_TASK_FINALIZE; + task_phase = GGML_TASK_TYPE_FINALIZE; atomic_store(&state->shared->n_active, n_threads); atomic_store(&state->shared->node_task, task_phase); } @@ -17783,7 +17783,7 @@ int ggml_graph_compute(struct ggml_cgraph * cgraph, struct ggml_cplan * cplan) { /*.n_threads =*/ n_threads, /*.n_active =*/ n_threads, /*.node_n =*/ -1, - /*.node_task =*/ GGML_TASK_FINALIZE, + /*.node_task =*/ GGML_TASK_TYPE_FINALIZE, /*.abort_callback =*/ NULL, /*.abort_callback_data =*/ NULL, }; @@ -17851,7 +17851,7 @@ int ggml_graph_compute(struct ggml_cgraph * cgraph, struct ggml_cplan * cplan) { void ggml_graph_compute_with_ctx(struct ggml_context * ctx, struct ggml_cgraph * cgraph, int n_threads) { struct ggml_cplan cplan = ggml_graph_plan(cgraph, n_threads); - struct ggml_object * obj = ggml_new_object(ctx, GGML_OBJECT_WORK_BUFFER, cplan.work_size); + struct ggml_object * obj = ggml_new_object(ctx, GGML_OBJECT_TYPE_WORK_BUFFER, cplan.work_size); cplan.work_data = (uint8_t *)ctx->mem_buffer + obj->offs; @@ -18659,7 +18659,7 @@ static enum ggml_opt_result ggml_opt_adam( float * pf = params.past > 0 ? opt->adam.pf->data : NULL; // past function values struct ggml_cplan cplan = ggml_graph_plan(gb, params.n_threads); - struct ggml_object * obj = ggml_new_object(ctx, GGML_OBJECT_WORK_BUFFER, cplan.work_size); + struct ggml_object * obj = ggml_new_object(ctx, GGML_OBJECT_TYPE_WORK_BUFFER, cplan.work_size); cplan.work_data = (uint8_t *)ctx->mem_buffer + obj->offs; bool cancel = false; @@ -18671,7 +18671,7 @@ static enum ggml_opt_result ggml_opt_adam( if (callback) { callback(callback_data, accum_step, &sched, &cancel); if (cancel) { - return GGML_OPT_CANCEL; + return GGML_OPT_RESULT_CANCEL; } } // ggml_graph_reset (gf); @@ -18762,7 +18762,7 @@ static enum ggml_opt_result ggml_opt_adam( if (callback) { callback(callback_data, accum_step, &sched, &cancel); if (cancel) { - return GGML_OPT_CANCEL;; + return GGML_OPT_RESULT_CANCEL;; } } // ggml_graph_reset (gf); @@ -18779,7 +18779,7 @@ static enum ggml_opt_result ggml_opt_adam( if (fabsf(fx - fx_prev[0])/fx < params.adam.eps_f) { GGML_PRINT_DEBUG("converged\n"); - return GGML_OPT_OK; + return GGML_OPT_RESULT_OK; } // delta-based convergence test @@ -18789,7 +18789,7 @@ static enum ggml_opt_result ggml_opt_adam( const float rate = (pf[(iter0 + t)%params.past] - fx)/fx; if (fabsf(rate) < params.delta) { - return GGML_OPT_OK; + return GGML_OPT_RESULT_OK; } } @@ -18805,7 +18805,7 @@ static enum ggml_opt_result ggml_opt_adam( ++n_no_improvement[0]; if (n_no_improvement[0] >= params.max_no_improvement) { - return GGML_OPT_OK; + return GGML_OPT_RESULT_OK; } } } @@ -18823,7 +18823,7 @@ static enum ggml_opt_result ggml_opt_adam( } } - return GGML_OPT_DID_NOT_CONVERGE; + return GGML_OPT_RESULT_DID_NOT_CONVERGE; } // @@ -18904,7 +18904,7 @@ static enum ggml_opt_result linesearch_backtracking( float sched = 0; callback(callback_data, accum_step, &sched, cancel); if (*cancel) { - return GGML_OPT_CANCEL; + return GGML_OPT_RESULT_CANCEL; } } // ggml_graph_reset (gf); @@ -18977,7 +18977,7 @@ static enum ggml_opt_result ggml_opt_lbfgs( if (params.lbfgs.linesearch == GGML_LINESEARCH_BACKTRACKING_WOLFE || params.lbfgs.linesearch == GGML_LINESEARCH_BACKTRACKING_STRONG_WOLFE) { if (params.lbfgs.wolfe <= params.lbfgs.ftol || 1.f <= params.lbfgs.wolfe) { - return GGML_OPT_INVALID_WOLFE; + return GGML_OPT_RESULT_INVALID_WOLFE; } } @@ -19006,7 +19006,7 @@ static enum ggml_opt_result ggml_opt_lbfgs( } struct ggml_cplan cplan = ggml_graph_plan(gb, params.n_threads); - struct ggml_object * obj = ggml_new_object(ctx, GGML_OBJECT_WORK_BUFFER, cplan.work_size); + struct ggml_object * obj = ggml_new_object(ctx, GGML_OBJECT_TYPE_WORK_BUFFER, cplan.work_size); cplan.work_data = (uint8_t *)ctx->mem_buffer + obj->offs; float * x = opt->lbfgs.x->data; // current parameters @@ -19047,7 +19047,7 @@ static enum ggml_opt_result ggml_opt_lbfgs( float sched = 0; callback(callback_data, accum_step, &sched, &cancel); if (cancel) { - return GGML_OPT_CANCEL; + return GGML_OPT_RESULT_CANCEL; } } // ggml_graph_reset (gf); @@ -19075,7 +19075,7 @@ static enum ggml_opt_result ggml_opt_lbfgs( // already optimized if (gnorm/xnorm <= params.lbfgs.eps) { - return GGML_OPT_OK; + return GGML_OPT_RESULT_OK; } if (opt->just_initialized) { @@ -19120,7 +19120,7 @@ static enum ggml_opt_result ggml_opt_lbfgs( // way to test and don't want to break something with so many changes lined up ls = linesearch_backtracking(¶ms, nx, x, &fx, g, d, step, xp, f, gb, &cplan, np, ps, &cancel, callback, callback_data); if (cancel) { - return GGML_OPT_CANCEL; + return GGML_OPT_RESULT_CANCEL; } if (ls < 0) { @@ -19143,7 +19143,7 @@ static enum ggml_opt_result ggml_opt_lbfgs( } if (gnorm/xnorm <= params.lbfgs.eps) { // converged - return GGML_OPT_OK; + return GGML_OPT_RESULT_OK; } // delta-based convergence test @@ -19153,7 +19153,7 @@ static enum ggml_opt_result ggml_opt_lbfgs( const float rate = (pf[k[0]%params.past] - fx)/fx; if (fabsf(rate) < params.delta) { - return GGML_OPT_OK; + return GGML_OPT_RESULT_OK; } } @@ -19169,14 +19169,14 @@ static enum ggml_opt_result ggml_opt_lbfgs( n_no_improvement[0]++; if (n_no_improvement[0] >= params.max_no_improvement) { - return GGML_OPT_OK; + return GGML_OPT_RESULT_OK; } } } if (params.lbfgs.n_iter != 0 && params.lbfgs.n_iter < it + 1) { // reached the maximum number of iterations - return GGML_OPT_DID_NOT_CONVERGE; + return GGML_OPT_RESULT_DID_NOT_CONVERGE; } // update vectors s and y: @@ -19232,17 +19232,17 @@ static enum ggml_opt_result ggml_opt_lbfgs( GGML_ASSERT(false && "lbfgs failed"); - return GGML_OPT_DID_NOT_CONVERGE; + return GGML_OPT_RESULT_DID_NOT_CONVERGE; } struct ggml_opt_params ggml_opt_default_params(enum ggml_opt_type type) { struct ggml_opt_params result; switch (type) { - case GGML_OPT_ADAM: + case GGML_OPT_TYPE_ADAM: { result = (struct ggml_opt_params) { - .type = GGML_OPT_ADAM, + .type = GGML_OPT_TYPE_ADAM, .graph_size = GGML_DEFAULT_GRAPH_SIZE, .n_threads = 1, // FIXME: GGML_DEFAULT_N_THREADS ? .past = 0, @@ -19270,10 +19270,10 @@ struct ggml_opt_params ggml_opt_default_params(enum ggml_opt_type type) { }, }; } break; - case GGML_OPT_LBFGS: + case GGML_OPT_TYPE_LBFGS: { result = (struct ggml_opt_params) { - .type = GGML_OPT_LBFGS, + .type = GGML_OPT_TYPE_LBFGS, .graph_size = GGML_DEFAULT_GRAPH_SIZE, .n_threads = 1, .past = 0, @@ -19318,12 +19318,12 @@ GGML_API void ggml_opt_init( opt->just_initialized = true; if (opt->ctx == NULL) { struct ggml_init_params ctx_opt_params; - if (opt->params.type == GGML_OPT_ADAM) { + if (opt->params.type == GGML_OPT_TYPE_ADAM) { ctx_opt_params.mem_size = GGML_MEM_ALIGN*3 + ggml_tensor_overhead()*3 + ggml_type_size(GGML_TYPE_F32)*nx*3; if (opt->params.past > 0) { ctx_opt_params.mem_size += GGML_MEM_ALIGN + ggml_tensor_overhead() + ggml_type_size(GGML_TYPE_F32)*opt->params.past; } - } else if (opt->params.type == GGML_OPT_LBFGS) { + } else if (opt->params.type == GGML_OPT_TYPE_LBFGS) { ctx_opt_params.mem_size = GGML_MEM_ALIGN*9 + ggml_tensor_overhead()*9 + ggml_type_size(GGML_TYPE_F32)*(nx*5 + opt->params.lbfgs.m*2 + nx*opt->params.lbfgs.m*2); if (opt->params.past > 0) { ctx_opt_params.mem_size += GGML_MEM_ALIGN + ggml_tensor_overhead() + ggml_type_size(GGML_TYPE_F32)*opt->params.past; @@ -19335,7 +19335,7 @@ GGML_API void ggml_opt_init( opt->ctx = ggml_init(ctx_opt_params); } switch (opt->params.type) { - case GGML_OPT_ADAM: + case GGML_OPT_TYPE_ADAM: { opt->adam.g = ggml_new_tensor_1d(opt->ctx, GGML_TYPE_F32, nx); opt->adam.m = ggml_new_tensor_1d(opt->ctx, GGML_TYPE_F32, nx); @@ -19349,7 +19349,7 @@ GGML_API void ggml_opt_init( ggml_set_zero(opt->adam.pf); } } break; - case GGML_OPT_LBFGS: + case GGML_OPT_TYPE_LBFGS: { opt->lbfgs.x = ggml_new_tensor_1d(opt->ctx, GGML_TYPE_F32, nx); opt->lbfgs.xp = ggml_new_tensor_1d(opt->ctx, GGML_TYPE_F32, nx); @@ -19393,13 +19393,13 @@ enum ggml_opt_result ggml_opt( ctx = ggml_init(params_ctx); if (ctx == NULL) { - return GGML_OPT_NO_CONTEXT; + return GGML_OPT_RESULT_NO_CONTEXT; } free_ctx = true; } - enum ggml_opt_result result = GGML_OPT_OK; + enum ggml_opt_result result = GGML_OPT_RESULT_OK; struct ggml_opt_context * opt = (struct ggml_opt_context *) alloca(sizeof(struct ggml_opt_context)); @@ -19438,14 +19438,14 @@ enum ggml_opt_result ggml_opt_resume_g( void * callback_data) { // build forward + backward compute graphs - enum ggml_opt_result result = GGML_OPT_OK; + enum ggml_opt_result result = GGML_OPT_RESULT_OK; switch (opt->params.type) { - case GGML_OPT_ADAM: + case GGML_OPT_TYPE_ADAM: { result = ggml_opt_adam(ctx, opt, opt->params, f, gf, gb, callback, callback_data); } break; - case GGML_OPT_LBFGS: + case GGML_OPT_TYPE_LBFGS: { result = ggml_opt_lbfgs(ctx, opt, opt->params, f, gf, gb, callback, callback_data); } break; diff --git a/ggml.h b/ggml.h index a4166e1f7..75fd035a4 100644 --- a/ggml.h +++ b/ggml.h @@ -364,9 +364,9 @@ extern "C" { }; enum ggml_backend_type { - GGML_BACKEND_CPU = 0, - GGML_BACKEND_GPU = 10, - GGML_BACKEND_GPU_SPLIT = 20, + GGML_BACKEND_TYPE_CPU = 0, + GGML_BACKEND_TYPE_GPU = 10, + GGML_BACKEND_TYPE_GPU_SPLIT = 20, }; // model file types @@ -498,9 +498,9 @@ extern "C" { }; enum ggml_object_type { - GGML_OBJECT_TENSOR, - GGML_OBJECT_GRAPH, - GGML_OBJECT_WORK_BUFFER + GGML_OBJECT_TYPE_TENSOR, + GGML_OBJECT_TYPE_GRAPH, + GGML_OBJECT_TYPE_WORK_BUFFER }; enum ggml_log_level { @@ -642,9 +642,9 @@ extern "C" { // NOTE: the INIT or FINALIZE pass is not scheduled unless explicitly enabled. // This behavior was changed since https://github.com/ggerganov/llama.cpp/pull/1995. enum ggml_task_type { - GGML_TASK_INIT = 0, - GGML_TASK_COMPUTE, - GGML_TASK_FINALIZE, + GGML_TASK_TYPE_INIT = 0, + GGML_TASK_TYPE_COMPUTE, + GGML_TASK_TYPE_FINALIZE, }; struct ggml_compute_params { @@ -1649,8 +1649,8 @@ extern "C" { // sort rows enum ggml_sort_order { - GGML_SORT_ASC, - GGML_SORT_DESC, + GGML_SORT_ORDER_ASC, + GGML_SORT_ORDER_DESC, }; GGML_API struct ggml_tensor * ggml_argsort( @@ -1943,8 +1943,8 @@ extern "C" { // optimization methods enum ggml_opt_type { - GGML_OPT_ADAM, - GGML_OPT_LBFGS, + GGML_OPT_TYPE_ADAM, + GGML_OPT_TYPE_LBFGS, }; // linesearch methods @@ -1958,12 +1958,12 @@ extern "C" { // optimization return values enum ggml_opt_result { - GGML_OPT_OK = 0, - GGML_OPT_DID_NOT_CONVERGE, - GGML_OPT_NO_CONTEXT, - GGML_OPT_INVALID_WOLFE, - GGML_OPT_FAIL, - GGML_OPT_CANCEL, + GGML_OPT_RESULT_OK = 0, + GGML_OPT_RESULT_DID_NOT_CONVERGE, + GGML_OPT_RESULT_NO_CONTEXT, + GGML_OPT_RESULT_INVALID_WOLFE, + GGML_OPT_RESULT_FAIL, + GGML_OPT_RESULT_CANCEL, GGML_LINESEARCH_FAIL = -128, GGML_LINESEARCH_MINIMUM_STEP, diff --git a/llama.cpp b/llama.cpp index 1f6b6cff4..acd9be08a 100644 --- a/llama.cpp +++ b/llama.cpp @@ -850,9 +850,9 @@ struct LLM_TN { // static std::map LLAMA_ROPE_SCALING_TYPES = { - { LLAMA_ROPE_SCALING_NONE, "none" }, - { LLAMA_ROPE_SCALING_LINEAR, "linear" }, - { LLAMA_ROPE_SCALING_YARN, "yarn" }, + { LLAMA_ROPE_SCALING_TYPE_NONE, "none" }, + { LLAMA_ROPE_SCALING_TYPE_LINEAR, "linear" }, + { LLAMA_ROPE_SCALING_TYPE_YARN, "yarn" }, }; static int32_t llama_rope_scaling_type_from_string(const std::string & name) { @@ -862,7 +862,7 @@ static int32_t llama_rope_scaling_type_from_string(const std::string & name) { } } - return LLAMA_ROPE_SCALING_UNSPECIFIED; + return LLAMA_ROPE_SCALING_TYPE_UNSPECIFIED; } static std::string gguf_data_to_str(enum gguf_type type, const void * data, int i) { @@ -1580,7 +1580,7 @@ struct llama_hparams { bool causal_attn = true; bool need_kq_pos = false; - uint32_t pooling_type = LLAMA_POOLING_NONE; + uint32_t pooling_type = LLAMA_POOLING_TYPE_NONE; bool operator!=(const llama_hparams & other) const { if (this->vocab_only != other.vocab_only) return true; @@ -2345,9 +2345,9 @@ namespace GGUFMeta { static const char * override_type_to_str(const llama_model_kv_override_type ty) { switch (ty) { - case LLAMA_KV_OVERRIDE_BOOL: return "bool"; - case LLAMA_KV_OVERRIDE_INT: return "int"; - case LLAMA_KV_OVERRIDE_FLOAT: return "float"; + case LLAMA_KV_OVERRIDE_TYPE_BOOL: return "bool"; + case LLAMA_KV_OVERRIDE_TYPE_INT: return "int"; + case LLAMA_KV_OVERRIDE_TYPE_FLOAT: return "float"; } return "unknown"; } @@ -2358,13 +2358,13 @@ namespace GGUFMeta { LLAMA_LOG_INFO("%s: Using metadata override (%5s) '%s' = ", __func__, override_type_to_str(override->tag), override->key); switch (override->tag) { - case LLAMA_KV_OVERRIDE_BOOL: { + case LLAMA_KV_OVERRIDE_TYPE_BOOL: { LLAMA_LOG_INFO("%s\n", override->bool_value ? "true" : "false"); } break; - case LLAMA_KV_OVERRIDE_INT: { + case LLAMA_KV_OVERRIDE_TYPE_INT: { LLAMA_LOG_INFO("%" PRId64 "\n", override->int_value); } break; - case LLAMA_KV_OVERRIDE_FLOAT: { + case LLAMA_KV_OVERRIDE_TYPE_FLOAT: { LLAMA_LOG_INFO("%.6f\n", override->float_value); } break; default: @@ -2383,7 +2383,7 @@ namespace GGUFMeta { template static typename std::enable_if::value, bool>::type try_override(OT & target, const struct llama_model_kv_override *override) { - if (validate_override(LLAMA_KV_OVERRIDE_BOOL, override)) { + if (validate_override(LLAMA_KV_OVERRIDE_TYPE_BOOL, override)) { target = override->bool_value; return true; } @@ -2393,7 +2393,7 @@ namespace GGUFMeta { template static typename std::enable_if::value && std::is_integral::value, bool>::type try_override(OT & target, const struct llama_model_kv_override *override) { - if (validate_override(LLAMA_KV_OVERRIDE_INT, override)) { + if (validate_override(LLAMA_KV_OVERRIDE_TYPE_INT, override)) { target = override->int_value; return true; } @@ -2403,7 +2403,7 @@ namespace GGUFMeta { template static typename std::enable_if::value, bool>::type try_override(T & target, const struct llama_model_kv_override *override) { - if (validate_override(LLAMA_KV_OVERRIDE_FLOAT, override)) { + if (validate_override(LLAMA_KV_OVERRIDE_TYPE_FLOAT, override)) { target = override->float_value; return true; } @@ -2999,7 +2999,7 @@ static void llm_load_hparams( std::string rope_scaling("linear"); ml.get_key(LLM_KV_ROPE_SCALING_TYPE, rope_scaling, false); hparams.rope_scaling_type_train = llama_rope_scaling_type_from_string(rope_scaling); - GGML_ASSERT(hparams.rope_scaling_type_train != LLAMA_ROPE_SCALING_UNSPECIFIED); + GGML_ASSERT(hparams.rope_scaling_type_train != LLAMA_ROPE_SCALING_TYPE_UNSPECIFIED); // rope_freq_scale (inverse of the kv) is optional float ropescale = 0.0f; @@ -3643,7 +3643,7 @@ static bool llm_load_tensors( model.buft_layer[i] = llama_default_buffer_type_cpu(true); } - if (split_mode == LLAMA_SPLIT_LAYER) { + if (split_mode == LLAMA_SPLIT_MODE_LAYER) { // calculate the split points int device_count = llama_get_device_count(); bool all_zero = tensor_split == nullptr || std::all_of(tensor_split, tensor_split + device_count, [](float x) { return x == 0.0f; }); @@ -3682,10 +3682,10 @@ static bool llm_load_tensors( } } else { ggml_backend_buffer_type_t split_buft; - if (split_mode == LLAMA_SPLIT_ROW) { + if (split_mode == LLAMA_SPLIT_MODE_ROW) { split_buft = llama_default_buffer_type_split(main_gpu, tensor_split); } else { - // LLAMA_SPLIT_NONE or LLAMA_SPLIT_LAYER in backends where it is not supported + // LLAMA_SPLIT_MODE_NONE or LLAMA_SPLIT_MODE_LAYER in backends where it is not supported split_buft = llama_default_buffer_type_offload(main_gpu); } // assign the repeating layers @@ -5070,7 +5070,7 @@ struct llm_build_context { kv_head (worst_case ? n_ctx - n_tokens : kv_self.head), n_orig_ctx (cparams.n_yarn_orig_ctx), do_rope_shift (worst_case || kv_self.has_shift), - pooling_type (cparams.do_pooling ? hparams.pooling_type : (uint32_t)LLAMA_POOLING_NONE), + pooling_type (cparams.do_pooling ? hparams.pooling_type : (uint32_t)LLAMA_POOLING_TYPE_NONE), cb (cb), buf_compute_meta (lctx.buf_compute_meta) { // all initializations should be done in init() @@ -6050,12 +6050,12 @@ struct llm_build_context { cur = inpL; // pooling layer - if (pooling_type == LLAMA_POOLING_MEAN) { + if (pooling_type == LLAMA_POOLING_TYPE_MEAN) { cur = ggml_mul_mat(ctx0, ggml_cont(ctx0, ggml_transpose(ctx0, cur)), inp_mean); - } else if (pooling_type == LLAMA_POOLING_CLS) { + } else if (pooling_type == LLAMA_POOLING_TYPE_CLS) { cur = ggml_get_rows(ctx0, cur, inp_cls); } else { - GGML_ASSERT(pooling_type == LLAMA_POOLING_NONE && "Invalid pooling type"); + GGML_ASSERT(pooling_type == LLAMA_POOLING_TYPE_NONE && "Invalid pooling type"); } cb(cur, "result_embd", -1); @@ -7754,7 +7754,7 @@ static void llama_set_inputs(llama_context & lctx, const llama_batch & batch) { } } - if (cparams.do_pooling && hparams.pooling_type == LLAMA_POOLING_MEAN) { + if (cparams.do_pooling && hparams.pooling_type == LLAMA_POOLING_TYPE_MEAN) { const int64_t n_tokens = batch.n_tokens; GGML_ASSERT(ggml_backend_buffer_is_host(lctx.inp_mean->buffer)); @@ -7782,7 +7782,7 @@ static void llama_set_inputs(llama_context & lctx, const llama_batch & batch) { } } - if (cparams.do_pooling && hparams.pooling_type == LLAMA_POOLING_CLS) { + if (cparams.do_pooling && hparams.pooling_type == LLAMA_POOLING_TYPE_CLS) { const int64_t n_tokens = batch.n_tokens; GGML_ASSERT(ggml_backend_buffer_is_host(lctx.inp_cls->buffer)); @@ -11351,7 +11351,7 @@ static int llama_apply_lora_from_file_internal( struct llama_model_params llama_model_default_params() { struct llama_model_params result = { /*.n_gpu_layers =*/ 0, - /*.split_mode =*/ LLAMA_SPLIT_LAYER, + /*.split_mode =*/ LLAMA_SPLIT_MODE_LAYER, /*.main_gpu =*/ 0, /*.tensor_split =*/ nullptr, /*.progress_callback =*/ nullptr, @@ -11377,7 +11377,7 @@ struct llama_context_params llama_context_default_params() { /*.n_batch =*/ 512, /*.n_threads =*/ GGML_DEFAULT_N_THREADS, // TODO: better default /*.n_threads_batch =*/ GGML_DEFAULT_N_THREADS, - /*.rope_scaling_type =*/ LLAMA_ROPE_SCALING_UNSPECIFIED, + /*.rope_scaling_type =*/ LLAMA_ROPE_SCALING_TYPE_UNSPECIFIED, /*.rope_freq_base =*/ 0.0f, /*.rope_freq_scale =*/ 0.0f, /*.yarn_ext_factor =*/ -1.0f, @@ -11565,16 +11565,16 @@ struct llama_context * llama_new_context_with_model( cparams.cb_eval_user_data = params.cb_eval_user_data; auto rope_scaling_type = params.rope_scaling_type; - if (rope_scaling_type == LLAMA_ROPE_SCALING_UNSPECIFIED) { + if (rope_scaling_type == LLAMA_ROPE_SCALING_TYPE_UNSPECIFIED) { rope_scaling_type = hparams.rope_scaling_type_train; } - if (rope_scaling_type == LLAMA_ROPE_SCALING_NONE) { + if (rope_scaling_type == LLAMA_ROPE_SCALING_TYPE_NONE) { cparams.rope_freq_scale = 1.0f; // never scale if scaling type is none } if (cparams.yarn_ext_factor < 0.0f) { // negative indicates 'not set' - cparams.yarn_ext_factor = rope_scaling_type == LLAMA_ROPE_SCALING_YARN ? 1.0f : 0.0f; + cparams.yarn_ext_factor = rope_scaling_type == LLAMA_ROPE_SCALING_TYPE_YARN ? 1.0f : 0.0f; } if (params.seed == LLAMA_DEFAULT_SEED) { @@ -11608,8 +11608,8 @@ struct llama_context * llama_new_context_with_model( } #elif defined(GGML_USE_CUBLAS) if (model->n_gpu_layers > 0) { - // with split_mode LLAMA_SPLIT_NONE or LLAMA_SPLIT_ROW, only the main GPU backend is used - if (model->split_mode == LLAMA_SPLIT_NONE || model->split_mode == LLAMA_SPLIT_ROW) { + // with split_mode LLAMA_SPLIT_MODE_NONE or LLAMA_SPLIT_MODE_ROW, only the main GPU backend is used + if (model->split_mode == LLAMA_SPLIT_MODE_NONE || model->split_mode == LLAMA_SPLIT_MODE_ROW) { ggml_backend_t backend = ggml_backend_cuda_init(model->main_gpu); if (backend == nullptr) { LLAMA_LOG_ERROR("%s: failed to initialize CUDA%d backend\n", __func__, model->main_gpu); @@ -11618,7 +11618,7 @@ struct llama_context * llama_new_context_with_model( } ctx->backends.push_back(backend); } else { - // LLAMA_SPLIT_LAYER requires a backend for each GPU + // LLAMA_SPLIT_MODE_LAYER requires a backend for each GPU for (int device = 0; device < ggml_backend_cuda_get_device_count(); ++device) { ggml_backend_t backend = ggml_backend_cuda_init(device); if (backend == nullptr) { diff --git a/llama.h b/llama.h index 889edf4d9..947284ea2 100644 --- a/llama.h +++ b/llama.h @@ -109,23 +109,23 @@ extern "C" { }; enum llama_rope_scaling_type { - LLAMA_ROPE_SCALING_UNSPECIFIED = -1, - LLAMA_ROPE_SCALING_NONE = 0, - LLAMA_ROPE_SCALING_LINEAR = 1, - LLAMA_ROPE_SCALING_YARN = 2, - LLAMA_ROPE_SCALING_MAX_VALUE = LLAMA_ROPE_SCALING_YARN, + LLAMA_ROPE_SCALING_TYPE_UNSPECIFIED = -1, + LLAMA_ROPE_SCALING_TYPE_NONE = 0, + LLAMA_ROPE_SCALING_TYPE_LINEAR = 1, + LLAMA_ROPE_SCALING_TYPE_YARN = 2, + LLAMA_ROPE_SCALING_TYPE_MAX_VALUE = LLAMA_ROPE_SCALING_TYPE_YARN, }; enum llama_pooling_type { - LLAMA_POOLING_NONE = 0, - LLAMA_POOLING_MEAN = 1, - LLAMA_POOLING_CLS = 2, + LLAMA_POOLING_TYPE_NONE = 0, + LLAMA_POOLING_TYPE_MEAN = 1, + LLAMA_POOLING_TYPE_CLS = 2, }; enum llama_split_mode { - LLAMA_SPLIT_NONE = 0, // single GPU - LLAMA_SPLIT_LAYER = 1, // split layers and KV across GPUs - LLAMA_SPLIT_ROW = 2, // split rows across GPUs + LLAMA_SPLIT_MODE_NONE = 0, // single GPU + LLAMA_SPLIT_MODE_LAYER = 1, // split layers and KV across GPUs + LLAMA_SPLIT_MODE_ROW = 2, // split rows across GPUs }; typedef struct llama_token_data { @@ -173,9 +173,9 @@ extern "C" { } llama_batch; enum llama_model_kv_override_type { - LLAMA_KV_OVERRIDE_INT, - LLAMA_KV_OVERRIDE_FLOAT, - LLAMA_KV_OVERRIDE_BOOL, + LLAMA_KV_OVERRIDE_TYPE_INT, + LLAMA_KV_OVERRIDE_TYPE_FLOAT, + LLAMA_KV_OVERRIDE_TYPE_BOOL, }; struct llama_model_kv_override { diff --git a/tests/test-backend-ops.cpp b/tests/test-backend-ops.cpp index f8574588b..24d12ef14 100644 --- a/tests/test-backend-ops.cpp +++ b/tests/test-backend-ops.cpp @@ -1264,7 +1264,7 @@ struct test_argsort : public test_case { test_argsort(ggml_type type = GGML_TYPE_F32, std::array ne = {16, 10, 10, 10}, - ggml_sort_order order = GGML_SORT_ASC) + ggml_sort_order order = GGML_SORT_ORDER_ASC) : type(type), ne(ne), order(order) {} ggml_tensor * build_graph(ggml_context * ctx) override { @@ -2116,7 +2116,7 @@ static bool test_backend(ggml_backend_t backend, test_mode mode, const char * op test_cases.emplace_back(new test_concat(GGML_TYPE_F32)); test_cases.emplace_back(new test_concat(GGML_TYPE_I32)); - for (ggml_sort_order order : {GGML_SORT_ASC, GGML_SORT_DESC}) { + for (ggml_sort_order order : {GGML_SORT_ORDER_ASC, GGML_SORT_ORDER_DESC}) { test_cases.emplace_back(new test_argsort(GGML_TYPE_F32, {8, 1, 1, 1}, order)); test_cases.emplace_back(new test_argsort(GGML_TYPE_F32, {16, 10, 10, 10}, order)); } diff --git a/tests/test-opt.cpp b/tests/test-opt.cpp index 2c9997fca..546ca230b 100644 --- a/tests/test-opt.cpp +++ b/tests/test-opt.cpp @@ -118,7 +118,7 @@ int main(void) { const float fe = ggml_get_f32_1d(e, 0); printf("%s: e = %.4f\n", __func__, fe); - struct ggml_opt_params opt_params = ggml_opt_default_params(GGML_OPT_ADAM); + struct ggml_opt_params opt_params = ggml_opt_default_params(GGML_OPT_TYPE_ADAM); ggml_opt(ctx, opt_params, e); From 12894088170f62e4cad4f8d6a3043c185b414bab Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Rados=C5=82aw=20Gryta?= Date: Sun, 25 Feb 2024 11:53:11 +0100 Subject: [PATCH 702/811] cmake : fix compilation for Android armeabi-v7a (#5702) --- CMakeLists.txt | 10 ++++++++-- 1 file changed, 8 insertions(+), 2 deletions(-) diff --git a/CMakeLists.txt b/CMakeLists.txt index 3c4629001..48880f720 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -936,10 +936,16 @@ if (CMAKE_OSX_ARCHITECTURES STREQUAL "arm64" OR CMAKE_GENERATOR_PLATFORM_LWR STR list(APPEND ARCH_FLAGS -mfpu=neon-fp-armv8 -mno-unaligned-access) endif() if (${CMAKE_SYSTEM_PROCESSOR} MATCHES "armv7") - # Raspberry Pi 2 - list(APPEND ARCH_FLAGS -mfpu=neon-fp-armv8 -mno-unaligned-access -funsafe-math-optimizations) + if ("${CMAKE_SYSTEM_NAME}" STREQUAL "Android") + # Android armeabi-v7a + list(APPEND ARCH_FLAGS -mfpu=neon-vfpv4 -mno-unaligned-access -funsafe-math-optimizations) + else() + # Raspberry Pi 2 + list(APPEND ARCH_FLAGS -mfpu=neon-fp-armv8 -mno-unaligned-access -funsafe-math-optimizations) + endif() endif() if (${CMAKE_SYSTEM_PROCESSOR} MATCHES "armv8") + # Android arm64-v8a # Raspberry Pi 3, 4, Zero 2 (32-bit) list(APPEND ARCH_FLAGS -mno-unaligned-access) endif() From d52d7819b8ced70c642a88a59da8c78208dc58ec Mon Sep 17 00:00:00 2001 From: Pierrick Hymbert Date: Sun, 25 Feb 2024 13:49:43 +0100 Subject: [PATCH 703/811] server: concurrency fix + monitoring - add /metrics prometheus compatible endpoint (#5708) * server: monitoring - add /metrics prometheus compatible endpoint * server: concurrency issue, when 2 task are waiting for results, only one call thread is notified * server: metrics - move to a dedicated struct --- examples/server/README.md | 13 ++ examples/server/server.cpp | 150 +++++++++++++++++- examples/server/tests/features/environment.py | 2 + examples/server/tests/features/server.feature | 2 + examples/server/tests/features/steps/steps.py | 27 ++++ examples/server/tests/requirements.txt | 1 + examples/server/utils.hpp | 4 +- 7 files changed, 191 insertions(+), 8 deletions(-) diff --git a/examples/server/README.md b/examples/server/README.md index 0c43ac4c9..2129f7fb2 100644 --- a/examples/server/README.md +++ b/examples/server/README.md @@ -41,6 +41,7 @@ see https://github.com/ggerganov/llama.cpp/issues/1437 - `--grp-attn-w`: Set the group attention width to extend context size through self-extend(default: 512), used together with group attention factor `--grp-attn-n` - `-n, --n-predict`: Set the maximum tokens to predict (default: -1) - `--slots-endpoint-disable`: To disable slots state monitoring endpoint. Slots state may contain user data, prompts included. +- `--metrics`: enable prometheus `/metrics` compatible endpoint (default: disabled) - `--chat-template JINJA_TEMPLATE`: Set custom jinja chat template. This parameter accepts a string, not a file name (default: template taken from model's metadata). We only support [some pre-defined templates](https://github.com/ggerganov/llama.cpp/wiki/Templates-supported-by-llama_chat_apply_template) ## Build @@ -457,6 +458,18 @@ Notice that each `probs` is an array of length `n_probs`. ] ``` +- **GET** `/metrics`: [Prometheus](https://prometheus.io/) compatible metrics exporter endpoint if `--metrics` is enabled: + +Available metrics: +- `llamacpp:prompt_tokens_total`: Number of prompt tokens processed. +- `llamacpp:tokens_predicted_total`: Number of generation tokens processed. +- `llamacpp:prompt_tokens_seconds`: Average prompt throughput in tokens/s. +- `llamacpp:predicted_tokens_seconds`: Average generation throughput in tokens/s. +- `llamacpp:kv_cache_usage_ratio`: KV-cache usage. 1 means 100 percent usage. +- `llamacpp:kv_cache_tokens`: KV-cache tokens. +- `llamacpp:requests_processing`: Number of request processing. +- `llamacpp:requests_deferred`: Number of request deferred. + ## More examples ### Change system prompt on runtime diff --git a/examples/server/server.cpp b/examples/server/server.cpp index 780862ef6..811495915 100644 --- a/examples/server/server.cpp +++ b/examples/server/server.cpp @@ -43,6 +43,7 @@ struct server_params int32_t read_timeout = 600; int32_t write_timeout = 600; bool slots_endpoint = true; + bool metrics_endpoint = false; }; bool server_verbose = false; @@ -310,6 +311,39 @@ struct llama_client_slot } }; +struct llama_metrics { + uint64_t n_prompt_tokens_processed_total = 0; + uint64_t n_tokens_predicted_total = 0; + + uint64_t n_prompt_tokens_processed = 0; + uint64_t t_prompt_processing = 0; + + uint64_t n_tokens_predicted = 0; + uint64_t t_tokens_generation = 0; + + + void on_prompt_eval(const llama_client_slot &slot) { + n_prompt_tokens_processed_total += slot.num_prompt_tokens_processed; + + n_prompt_tokens_processed += slot.num_prompt_tokens_processed; + t_prompt_processing += slot.t_prompt_processing; + } + + void on_prediction(const llama_client_slot &slot) { + n_tokens_predicted_total += slot.n_decoded; + + n_tokens_predicted += slot.n_decoded; + t_tokens_generation += slot.t_token_generation; + } + + void reset_bucket() { + n_prompt_tokens_processed = 0; + t_prompt_processing = 0; + n_tokens_predicted = 0; + t_tokens_generation = 0; + } +}; + struct llama_server_context { llama_model *model = nullptr; @@ -344,6 +378,8 @@ struct llama_server_context llama_server_queue queue_tasks; llama_server_response queue_results; + llama_metrics metrics; + ~llama_server_context() { if (ctx) @@ -1404,7 +1440,7 @@ struct llama_server_context case TASK_TYPE_NEXT_RESPONSE: { // do nothing } break; - case TASK_TYPE_SLOTS_DATA: { + case TASK_TYPE_METRICS: { json slots_data = json::array(); int n_idle_slots = 0; int n_processing_slots = 0; @@ -1438,10 +1474,24 @@ struct llama_server_context res.stop = true; res.error = false; res.result_json = { - { "idle", n_idle_slots }, - { "processing", n_processing_slots }, - { "slots", slots_data } + { "idle", n_idle_slots }, + { "processing", n_processing_slots }, + { "deferred", queue_tasks.queue_tasks_deferred.size() }, + + { "n_prompt_tokens_processed_total", metrics.n_prompt_tokens_processed_total}, + { "n_tokens_predicted_total", metrics.n_tokens_predicted_total}, + + { "n_prompt_tokens_processed", metrics.n_prompt_tokens_processed}, + { "t_prompt_processing", metrics.t_prompt_processing}, + { "n_tokens_predicted", metrics.n_tokens_predicted}, + { "t_tokens_generation", metrics.t_tokens_generation}, + + { "kv_cache_tokens_count", llama_get_kv_cache_token_count(ctx)}, + { "kv_cache_used_cells", llama_get_kv_cache_used_cells(ctx)}, + + { "slots", slots_data }, }; + metrics.reset_bucket(); queue_results.send(res); } break; } @@ -1849,6 +1899,7 @@ struct llama_server_context { slot.t_start_genereration = ggml_time_us(); slot.t_prompt_processing = (slot.t_start_genereration - slot.t_start_process_prompt) / 1e3; + metrics.on_prompt_eval(slot); } llama_token_data_array cur_p = { slot.ctx_sampling->cur.data(), slot.ctx_sampling->cur.size(), false }; @@ -1871,6 +1922,7 @@ struct llama_server_context slot.release(); slot.print_timings(); send_final_response(slot); + metrics.on_prediction(slot); } slot.i_batch = -1; @@ -1955,6 +2007,7 @@ static void server_print_usage(const char *argv0, const gpt_params ¶ms, printf(" --mmproj MMPROJ_FILE path to a multimodal projector file for LLaVA.\n"); printf(" --log-disable disables logging to a file.\n"); printf(" --slots-endpoint-disable disables slots monitoring endpoint.\n"); + printf(" --metrics enable prometheus compatible metrics endpoint (default: %s).\n", sparams.metrics_endpoint ? "enabled" : "disabled"); printf("\n"); printf(" -n, --n-predict maximum tokens to predict (default: %d)\n", params.n_predict); printf(" --override-kv KEY=TYPE:VALUE\n"); @@ -2414,6 +2467,10 @@ static void server_params_parse(int argc, char **argv, server_params &sparams, { sparams.slots_endpoint = false; } + else if (arg == "--metrics") + { + sparams.metrics_endpoint = true; + } else if (arg == "--chat-template") { if (++i >= argc) @@ -2621,7 +2678,7 @@ int main(int argc, char **argv) // request slots data using task queue task_server task; task.id = llama.queue_tasks.get_new_id(); - task.type = TASK_TYPE_SLOTS_DATA; + task.type = TASK_TYPE_METRICS; task.target_id = -1; llama.queue_results.add_waiting_task_id(task.id); @@ -2668,7 +2725,7 @@ int main(int argc, char **argv) // request slots data using task queue task_server task; task.id = llama.queue_tasks.get_new_id(); - task.type = TASK_TYPE_SLOTS_DATA; + task.type = TASK_TYPE_METRICS; task.target_id = -1; llama.queue_results.add_waiting_task_id(task.id); @@ -2683,6 +2740,87 @@ int main(int argc, char **argv) }); } + if (sparams.metrics_endpoint) { + svr.Get("/metrics", [&](const httplib::Request&, httplib::Response& res) { + // request slots data using task queue + task_server task; + task.id = llama.queue_tasks.get_new_id(); + task.type = TASK_TYPE_METRICS; + task.target_id = -1; + + llama.queue_results.add_waiting_task_id(task.id); + llama.queue_tasks.post(task); + + // get the result + task_result result = llama.queue_results.recv(task.id); + llama.queue_results.remove_waiting_task_id(task.id); + + json data = result.result_json; + + uint64_t n_prompt_tokens_processed = data["n_prompt_tokens_processed"]; + uint64_t t_prompt_processing = data["t_prompt_processing"]; + + uint64_t n_tokens_predicted = data["n_tokens_predicted"]; + uint64_t t_tokens_generation = data["t_tokens_generation"]; + + int32_t kv_cache_used_cells = data["kv_cache_used_cells"]; + + // metrics definition: https://prometheus.io/docs/practices/naming/#metric-names + json all_metrics_def = json { + {"counter", {{ + {"name", "prompt_tokens_total"}, + {"help", "Number of prompt tokens processed."}, + {"value", data["n_prompt_tokens_processed_total"]} + }, { + {"name", "tokens_predicted_total"}, + {"help", "Number of generation tokens processed."}, + {"value", data["n_tokens_predicted_total"]} + }}}, + {"gauge", {{ + {"name", "prompt_tokens_seconds"}, + {"help", "Average prompt throughput in tokens/s."}, + {"value", n_prompt_tokens_processed ? 1e3 / t_prompt_processing * n_prompt_tokens_processed : 0} + },{ + {"name", "predicted_tokens_seconds"}, + {"help", "Average generation throughput in tokens/s."}, + {"value", n_tokens_predicted ? 1e3 / t_tokens_generation * n_tokens_predicted : 0} + },{ + {"name", "kv_cache_usage_ratio"}, + {"help", "KV-cache usage. 1 means 100 percent usage."}, + {"value", 1. * kv_cache_used_cells / params.n_ctx} + },{ + {"name", "kv_cache_tokens"}, + {"help", "KV-cache tokens."}, + {"value", data["kv_cache_tokens_count"]} + },{ + {"name", "requests_processing"}, + {"help", "Number of request processing."}, + {"value", data["processing"]} + },{ + {"name", "requests_deferred"}, + {"help", "Number of request deferred."}, + {"value", data["deferred"]} + }}} + }; + + std::stringstream prometheus; + for (const auto& el : all_metrics_def.items()) { + const auto& type = el.key(); + const auto& metrics_def = el.value(); + for (const auto& metric_def : metrics_def) { + std::string name = metric_def["name"]; + std::string help = metric_def["help"]; + prometheus << "# HELP llamacpp:" << name << " " << help << "\n" + << "# TYPE llamacpp:" << name << " " << type << "\n" + << "llamacpp:" << name << " " << metric_def["value"] << "\n"; + } + } + + res.set_content(prometheus.str(), "text/plain; version=0.0.4"); + res.status = 200; // HTTP OK + }); + } + svr.set_logger(log_server_request); svr.set_exception_handler([](const httplib::Request &, httplib::Response &res, std::exception_ptr ep) diff --git a/examples/server/tests/features/environment.py b/examples/server/tests/features/environment.py index 13cc84101..09e826747 100644 --- a/examples/server/tests/features/environment.py +++ b/examples/server/tests/features/environment.py @@ -16,6 +16,8 @@ def before_scenario(context, scenario): def after_scenario(context, scenario): + if context.server_process is None: + return if scenario.status == "failed": if 'GITHUB_ACTIONS' in os.environ: print(f"\x1b[33;101mSCENARIO FAILED: {scenario.name} server logs:\x1b[0m\n\n") diff --git a/examples/server/tests/features/server.feature b/examples/server/tests/features/server.feature index 5f81d256a..0139f89d8 100644 --- a/examples/server/tests/features/server.feature +++ b/examples/server/tests/features/server.feature @@ -13,6 +13,7 @@ Feature: llama.cpp server And 1 slots And embeddings extraction And 32 server max tokens to predict + And prometheus compatible metrics exposed Then the server is starting Then the server is healthy @@ -25,6 +26,7 @@ Feature: llama.cpp server And max tokens to predict And a completion request with no api error Then tokens are predicted matching + And prometheus metrics are exposed Examples: Prompts | prompt | n_predict | re_content | n_predicted | diff --git a/examples/server/tests/features/steps/steps.py b/examples/server/tests/features/steps/steps.py index 9c825fdbc..051fd440c 100644 --- a/examples/server/tests/features/steps/steps.py +++ b/examples/server/tests/features/steps/steps.py @@ -13,6 +13,7 @@ import aiohttp import openai from behave import step from behave.api.async_step import async_run_until_complete +from prometheus_client import parser @step(u"a server listening on {server_fqdn}:{server_port}") @@ -34,6 +35,8 @@ def step_server_config(context, server_fqdn, server_port): context.server_api_key = None context.server_continuous_batching = False context.server_embeddings = False + context.server_metrics = False + context.server_process = None context.server_seed = None context.user_api_key = None @@ -82,6 +85,11 @@ def step_server_embeddings(context): context.server_embeddings = True +@step(u'prometheus compatible metrics exposed') +def step_server_metrics(context): + context.server_metrics = True + + @step(u"the server is starting") def step_start_server(context): start_server_background(context) @@ -424,6 +432,23 @@ def step_check_options_header_value(context, cors_header, cors_header_value): assert context.options_response.headers[cors_header] == cors_header_value +@step(u'prometheus metrics are exposed') +@async_run_until_complete +async def step_prometheus_metrics_exported(context): + async with aiohttp.ClientSession() as session: + async with await session.get(f'{context.base_url}/metrics') as metrics_response: + assert metrics_response.status == 200 + assert metrics_response.headers['Content-Type'] == "text/plain; version=0.0.4" + metrics_raw = await metrics_response.text() + metric_exported = False + for metric in parser.text_string_to_metric_families(metrics_raw): + match metric.name: + case "llamacpp:kv_cache_usage_ratio": + assert len(metric.samples) > 0 + metric_exported = True + assert metric_exported, "No metrics exported" + + async def concurrent_requests(context, f_completion, *args, **kwargs): n_prompts = len(context.prompts) if context.debug: @@ -753,6 +778,8 @@ def start_server_background(context): server_args.append('--cont-batching') if context.server_embeddings: server_args.append('--embedding') + if context.server_metrics: + server_args.append('--metrics') if context.model_alias is not None: server_args.extend(['--alias', context.model_alias]) if context.n_ctx is not None: diff --git a/examples/server/tests/requirements.txt b/examples/server/tests/requirements.txt index 3e51b12dc..334fa4a70 100644 --- a/examples/server/tests/requirements.txt +++ b/examples/server/tests/requirements.txt @@ -1,3 +1,4 @@ aiohttp~=3.9.3 behave~=1.2.6 openai~=0.25.0 +prometheus-client~=0.20.0 diff --git a/examples/server/utils.hpp b/examples/server/utils.hpp index 88545eb69..71cc5b0b8 100644 --- a/examples/server/utils.hpp +++ b/examples/server/utils.hpp @@ -50,7 +50,7 @@ enum task_type { TASK_TYPE_COMPLETION, TASK_TYPE_CANCEL, TASK_TYPE_NEXT_RESPONSE, - TASK_TYPE_SLOTS_DATA + TASK_TYPE_METRICS }; struct task_server { @@ -441,7 +441,7 @@ struct llama_server_response { { LOG_VERBOSE("queue_results.push_back", {}); queue_results.push_back(result); - condition_results.notify_one(); + condition_results.notify_all(); return; } } From 930b1780269a69948d106e2d1b838ab7661f679a Mon Sep 17 00:00:00 2001 From: Pierrick Hymbert Date: Sun, 25 Feb 2024 13:50:32 +0100 Subject: [PATCH 704/811] server: logs - unified format and --log-format option (#5700) * server: logs - always use JSON logger, add add thread_id in message, log task_id and slot_id * server : skip GH copilot requests from logging * server : change message format of server_log() * server : no need to repeat log in comment * server : log style consistency * server : fix compile warning * server : fix tests regex patterns on M2 Ultra * server: logs: PR feedback on log level * server: logs: allow to choose log format in json or plain text * server: tests: output server logs in text * server: logs switch init logs to server logs macro * server: logs ensure value json value does not raised error * server: logs reduce level VERBOSE to VERB to max 4 chars * server: logs lower case as other log messages * server: logs avoid static in general Co-authored-by: Georgi Gerganov * server: logs PR feedback: change text log format to: LEVEL [function_name] message | additional=data --------- Co-authored-by: Georgi Gerganov --- examples/server/README.md | 4 +- examples/server/server.cpp | 218 ++++++++++++++---- examples/server/tests/README.md | 1 + examples/server/tests/features/server.feature | 6 +- examples/server/tests/features/steps/steps.py | 2 + examples/server/utils.hpp | 80 ++++--- 6 files changed, 231 insertions(+), 80 deletions(-) diff --git a/examples/server/README.md b/examples/server/README.md index 2129f7fb2..cb3fd6054 100644 --- a/examples/server/README.md +++ b/examples/server/README.md @@ -39,10 +39,12 @@ see https://github.com/ggerganov/llama.cpp/issues/1437 - `--mmproj MMPROJ_FILE`: Path to a multimodal projector file for LLaVA. - `--grp-attn-n`: Set the group attention factor to extend context size through self-extend(default: 1=disabled), used together with group attention width `--grp-attn-w` - `--grp-attn-w`: Set the group attention width to extend context size through self-extend(default: 512), used together with group attention factor `--grp-attn-n` -- `-n, --n-predict`: Set the maximum tokens to predict (default: -1) +- `-n N, --n-predict N`: Set the maximum tokens to predict (default: -1) - `--slots-endpoint-disable`: To disable slots state monitoring endpoint. Slots state may contain user data, prompts included. - `--metrics`: enable prometheus `/metrics` compatible endpoint (default: disabled) - `--chat-template JINJA_TEMPLATE`: Set custom jinja chat template. This parameter accepts a string, not a file name (default: template taken from model's metadata). We only support [some pre-defined templates](https://github.com/ggerganov/llama.cpp/wiki/Templates-supported-by-llama_chat_apply_template) +- `--log-disable`: Output logs to stdout only, default: enabled. +- `--log-format FORMAT`: Define the log output to FORMAT: json or text (default: json) ## Build diff --git a/examples/server/server.cpp b/examples/server/server.cpp index 811495915..d970202d2 100644 --- a/examples/server/server.cpp +++ b/examples/server/server.cpp @@ -47,6 +47,7 @@ struct server_params }; bool server_verbose = false; +bool server_log_json = true; static size_t common_part(const std::vector &a, const std::vector &b) { @@ -302,12 +303,43 @@ struct llama_client_slot } void print_timings() const { - LOG_TEE("\n"); - LOG_TEE("%s: prompt eval time = %10.2f ms / %5d tokens (%8.2f ms per token, %8.2f tokens per second)\n", - __func__, t_prompt_processing, num_prompt_tokens_processed, t_prompt_processing / num_prompt_tokens_processed, 1e3 / t_prompt_processing * num_prompt_tokens_processed); - LOG_TEE("%s: eval time = %10.2f ms / %5d runs (%8.2f ms per token, %8.2f tokens per second)\n", - __func__, t_token_generation, n_decoded,t_token_generation / n_decoded, 1e3 / t_token_generation * n_decoded); - LOG_TEE("%s: total time = %10.2f ms\n", __func__, t_prompt_processing + t_token_generation); + char buffer[512]; + double t_token = t_prompt_processing / num_prompt_tokens_processed; + double n_tokens_second = 1e3 / t_prompt_processing * num_prompt_tokens_processed; + sprintf(buffer, "prompt eval time = %10.2f ms / %5d tokens (%8.2f ms per token, %8.2f tokens per second)", + t_prompt_processing, num_prompt_tokens_processed, + t_token, n_tokens_second); + LOG_INFO(buffer, { + {"slot_id", id}, + {"task_id", task_id}, + {"t_prompt_processing", t_prompt_processing}, + {"num_prompt_tokens_processed", num_prompt_tokens_processed}, + {"t_token", t_token}, + {"n_tokens_second", n_tokens_second}, + }); + + t_token = t_token_generation / n_decoded; + n_tokens_second = 1e3 / t_token_generation * n_decoded; + sprintf(buffer, "generation eval time = %10.2f ms / %5d runs (%8.2f ms per token, %8.2f tokens per second)", + t_token_generation, n_decoded, + t_token, n_tokens_second); + LOG_INFO(buffer, { + {"slot_id", id}, + {"task_id", task_id}, + {"t_token_generation", t_token_generation}, + {"n_decoded", n_decoded}, + {"t_token", t_token}, + {"n_tokens_second", n_tokens_second}, + }); + + sprintf(buffer, " total time = %10.2f ms", t_prompt_processing + t_token_generation); + LOG_INFO(buffer, { + {"slot_id", id}, + {"task_id", task_id}, + {"t_prompt_processing", t_prompt_processing}, + {"t_token_generation", t_token_generation}, + {"t_total", t_prompt_processing + t_token_generation}, + }); } }; @@ -399,7 +431,7 @@ struct llama_server_context params = params_; if (!params.mmproj.empty()) { multimodal = true; - LOG_TEE("Multi Modal Mode Enabled"); + LOG_INFO("Multi Modal Mode Enabled", {}); clp_ctx = clip_model_load(params.mmproj.c_str(), /*verbosity=*/ 1); if(clp_ctx == nullptr) { LOG_ERROR("unable to load clip model", {{"model", params.mmproj}}); @@ -452,7 +484,7 @@ struct llama_server_context const int32_t n_ctx_slot = n_ctx / params.n_parallel; - LOG_TEE("Available slots:\n"); + LOG_INFO("initializing slots", {{"n_slots", params.n_parallel}}); for (int i = 0; i < params.n_parallel; i++) { llama_client_slot slot; @@ -461,7 +493,10 @@ struct llama_server_context slot.n_ctx = n_ctx_slot; slot.n_predict = params.n_predict; - LOG_TEE(" -> Slot %i - max context: %i\n", slot.id, n_ctx_slot); + LOG_INFO("new slot", { + {"slot_id", slot.id}, + {"n_ctx_slot", slot.n_ctx} + }); const int ga_n = params.grp_attn_n; const int ga_w = params.grp_attn_w; @@ -471,7 +506,12 @@ struct llama_server_context GGML_ASSERT(ga_w % ga_n == 0 && "ga_w must be a multiple of ga_n"); // NOLINT //GGML_ASSERT(n_ctx_train % ga_w == 0 && "n_ctx_train must be a multiple of ga_w"); // NOLINT //GGML_ASSERT(n_ctx >= n_ctx_train * ga_n && "n_ctx must be at least n_ctx_train * ga_n"); // NOLINT - LOG_TEE(" -> Slot %i - self-extend: ga_n = %d, ga_w = %d\n", slot.id, ga_n, ga_w); + + LOG_INFO("slot self-extend", { + {"slot_id", slot.id}, + {"ga_n", ga_n}, + {"ga_w", ga_w} + }); } slot.ga_i = 0; @@ -765,10 +805,16 @@ struct llama_server_context img_sl.img_data = clip_image_u8_init(); if (!clip_image_load_from_bytes(image_buffer.data(), image_buffer.size(), img_sl.img_data)) { - LOG_TEE("slot %i - failed to load image [id: %i]\n", slot->id, img_sl.id); + LOG_ERROR("failed to load image", { + {"slot_id", slot->id}, + {"img_sl_id", img_sl.id} + }); return false; } - LOG_TEE("slot %i - loaded image\n", slot->id); + LOG_VERBOSE("image loaded", { + {"slot_id", slot->id}, + {"img_sl_id", img_sl.id} + }); img_sl.request_encode_image = true; slot->images.push_back(img_sl); } @@ -828,7 +874,10 @@ struct llama_server_context all_slots_are_idle = false; - LOG_TEE("slot %i is processing [task id: %i]\n", slot->id, slot->task_id); + LOG_INFO("slot is processing task", { + {"slot_id", slot->id}, + {"task_id", slot->task_id}, + }); return true; } @@ -1391,7 +1440,7 @@ struct llama_server_context if (slot == nullptr) { // if no slot is available, we defer this task for processing later - LOG_VERBOSE("no slot is available", {}); + LOG_VERBOSE("no slot is available", {{"task_id", task.id}}); queue_tasks.defer(task); break; } @@ -1467,7 +1516,17 @@ struct llama_server_context } slots_data.push_back(slot_data); } - LOG_TEE("task %i - slots data: idle=%i processing=%i\n", task.id, n_idle_slots, n_processing_slots); + LOG_INFO("slot data", { + {"task_id", task.id}, + {"n_idle_slots", n_idle_slots}, + {"n_processing_slots", n_processing_slots} + }); + LOG_VERBOSE("slot data", { + {"task_id", task.id}, + {"n_idle_slots", n_idle_slots}, + {"n_processing_slots", n_processing_slots}, + {"slots", slots_data} + }); task_result res; res.id = task.id; res.multitask_id = task.multitask_id; @@ -1519,7 +1578,7 @@ struct llama_server_context bool update_slots() { if (system_need_update) { - LOG_TEE("updating system prompt\n"); + LOG_INFO("updating system prompt", {}); update_system_prompt(); } @@ -1529,12 +1588,13 @@ struct llama_server_context { if (system_prompt.empty() && clean_kv_cache) { - LOG_TEE("all slots are idle and system prompt is empty, clear the KV cache\n"); + LOG_INFO("all slots are idle and system prompt is empty, clear the KV cache", {}); kv_cache_clear(); } return true; } + LOG_VERBOSE("posting NEXT_RESPONSE", {}); task_server task; task.type = TASK_TYPE_NEXT_RESPONSE; task.target_id = -1; @@ -1548,10 +1608,20 @@ struct llama_server_context { // Shift context const int n_keep = slot.params.n_keep + add_bos_token; - const int n_left = system_tokens.size() + slot.n_past - n_keep; + const int n_left = (int) system_tokens.size() + slot.n_past - n_keep; const int n_discard = n_left / 2; - LOG_TEE("slot %d: context shift - n_keep = %d, n_left = %d, n_discard = %d\n", slot.id, n_keep, n_left, n_discard); + LOG_INFO("slot context shift", { + {"slot_id", slot.id}, + {"task_id", slot.task_id}, + {"n_keep", n_keep}, + {"n_left", n_left}, + {"n_discard", n_discard}, + {"n_ctx", n_ctx}, + {"n_past", slot.n_past}, + {"n_system_tokens", system_tokens.size()}, + {"n_cache_tokens", slot.cache_tokens.size()} + }); llama_kv_cache_seq_rm (ctx, slot.id, n_keep , n_keep + n_discard); llama_kv_cache_seq_shift(ctx, slot.id, n_keep + n_discard, system_tokens.size() + slot.n_past, -n_discard); @@ -1565,17 +1635,12 @@ struct llama_server_context slot.n_past -= n_discard; slot.truncated = true; - - LOG_VERBOSE("context shift", { - { "n_ctx", n_ctx }, - { "n_keep", n_keep }, - { "n_left", n_left }, - }); } } } // decode any currently ongoing sequences + LOG_VERBOSE("decoding ongoing sequences", {}); for (auto & slot : slots) { // release the slot @@ -1585,7 +1650,15 @@ struct llama_server_context slot.command = NONE; slot.t_last_used = ggml_time_us(); - LOG_TEE("slot %d released (%d tokens in cache)\n", slot.id, (int) slot.cache_tokens.size()); + LOG_INFO("slot released", { + {"slot_id", slot.id}, + {"task_id", slot.task_id}, + {"n_ctx", n_ctx}, + {"n_past", slot.n_past}, + {"n_system_tokens", system_tokens.size()}, + {"n_cache_tokens", slot.cache_tokens.size()}, + {"truncated", slot.truncated} + }); queue_tasks.notify_slot_changed(); continue; @@ -1733,7 +1806,12 @@ struct llama_server_context slot.ga_i = ga_i; } - LOG_TEE("slot %d : in cache: %i tokens | to process: %i tokens\n", slot.id, slot.n_past, slot.num_prompt_tokens_processed); + LOG_INFO("slot progression", { + { "slot_id", slot.id }, + { "task_id", slot.task_id }, + { "n_past", slot.n_past }, + { "num_prompt_tokens_processed", slot.num_prompt_tokens_processed } + }); } slot.cache_tokens = prompt_tokens; @@ -1741,7 +1819,10 @@ struct llama_server_context if (slot.n_past == slot.num_prompt_tokens && slot.n_past > 0) { // we have to evaluate at least 1 token to generate logits. - LOG_TEE("slot %d : we have to evaluate at least 1 token to generate logits\n", slot.id); + LOG_INFO("we have to evaluate at least 1 token to generate logits", { + { "slot_id", slot.id }, + { "task_id", slot.task_id } + }); slot.n_past--; if (slot.ga_i > 0) { @@ -1749,9 +1830,13 @@ struct llama_server_context } } - LOG_TEE("slot %d : kv cache rm - [%d, end)\n", slot.id, (int) system_tokens.size() + slot.n_past); - - llama_kv_cache_seq_rm(ctx, slot.id, system_tokens.size() + slot.n_past, -1); + int p0 = (int) system_tokens.size() + slot.n_past; + LOG_INFO("kv cache rm [p0, end)", { + { "slot_id", slot.id }, + { "task_id", slot.task_id }, + { "p0", p0 } + }); + llama_kv_cache_seq_rm(ctx, slot.id, p0, -1); LOG_VERBOSE("prompt ingested", { {"n_past", slot.n_past}, @@ -1786,7 +1871,13 @@ struct llama_server_context if (has_images && !ingest_images(slot, n_batch)) { - LOG_TEE("failed processing images\n"); + LOG_ERROR("failed processing images", { + "slot_id", slot.id, + "task_id", slot.task_id, + }); + // FIXME @phymbert: to be properly tested + // early returning without changing the slot state will block the slot for ever + // no one at the moment is checking the return value return false; } @@ -1928,6 +2019,8 @@ struct llama_server_context slot.i_batch = -1; } } + + LOG_VERBOSE("slots updated", {}); return true; } @@ -2005,6 +2098,7 @@ static void server_print_usage(const char *argv0, const gpt_params ¶ms, printf(" -ctv TYPE, --cache-type-v TYPE\n"); printf(" KV cache data type for V (default: f16)\n"); printf(" --mmproj MMPROJ_FILE path to a multimodal projector file for LLaVA.\n"); + printf(" --log-format log output format: json or text (default: json)\n"); printf(" --log-disable disables logging to a file.\n"); printf(" --slots-endpoint-disable disables slots monitoring endpoint.\n"); printf(" --metrics enable prometheus compatible metrics endpoint (default: %s).\n", sparams.metrics_endpoint ? "enabled" : "disabled"); @@ -2458,6 +2552,27 @@ static void server_params_parse(int argc, char **argv, server_params &sparams, } params.mmproj = argv[i]; } + else if (arg == "--log-format") + { + if (++i >= argc) + { + invalid_param = true; + break; + } + if (std::strcmp(argv[i], "json") == 0) + { + server_log_json = true; + } + else if (std::strcmp(argv[i], "text") == 0) + { + server_log_json = false; + } + else + { + invalid_param = true; + break; + } + } else if (arg == "--log-disable") { log_set_target(stdout); @@ -2571,32 +2686,40 @@ static json format_partial_response( static json format_tokenizer_response(const std::vector &tokens) { - return json{ - {"tokens", tokens}}; + return json { + {"tokens", tokens} + }; } static json format_detokenized_response(std::string content) { - return json{ - {"content", content}}; + return json { + {"content", content} + }; } static void log_server_request(const httplib::Request &req, const httplib::Response &res) { + // skip GH copilot requests when using default port + if (req.path == "/v1/health" || req.path == "/v1/completions") + { + return; + } + LOG_INFO("request", { - {"remote_addr", req.remote_addr}, - {"remote_port", req.remote_port}, - {"status", res.status}, - {"method", req.method}, - {"path", req.path}, - {"params", req.params}, - }); + {"remote_addr", req.remote_addr}, + {"remote_port", req.remote_port}, + {"status", res.status}, + {"method", req.method}, + {"path", req.path}, + {"params", req.params}, + }); LOG_VERBOSE("request", { - {"request", req.body}, - {"response", res.body}, - }); + {"request", req.body}, + {"response", res.body}, + }); } struct token_translator @@ -2873,9 +2996,6 @@ int main(int argc, char **argv) // Set the base directory for serving static files svr.set_base_dir(sparams.public_path); - // to make it ctrl+clickable: - LOG_TEE("\nllama server listening at http://%s:%d\n\n", sparams.hostname.c_str(), sparams.port); - std::unordered_map log_data; log_data["hostname"] = sparams.hostname; log_data["port"] = std::to_string(sparams.port); diff --git a/examples/server/tests/README.md b/examples/server/tests/README.md index e44c5c286..0b9fdc4e7 100644 --- a/examples/server/tests/README.md +++ b/examples/server/tests/README.md @@ -32,6 +32,7 @@ It's possible to override some scenario steps values with environment variables: - `PORT` -> `context.server_port` to set the listening port of the server during scenario, default: `8080` - `LLAMA_SERVER_BIN_PATH` -> to change the server binary path, default: `../../../build/bin/server` - `DEBUG` -> "ON" to enable steps and server verbose mode `--verbose` + - `SERVER_LOG_FORMAT_JSON` -> if set switch server logs to json format ### Run @bug, @wip or @wrong_usage annotated scenario diff --git a/examples/server/tests/features/server.feature b/examples/server/tests/features/server.feature index 0139f89d8..b571582a7 100644 --- a/examples/server/tests/features/server.feature +++ b/examples/server/tests/features/server.feature @@ -29,9 +29,9 @@ Feature: llama.cpp server And prometheus metrics are exposed Examples: Prompts - | prompt | n_predict | re_content | n_predicted | - | I believe the meaning of life is | 8 | read | 8 | - | Write a joke about AI | 64 | (parkfriendsscared)+ | 32 | + | prompt | n_predict | re_content | n_predicted | + | I believe the meaning of life is | 8 | (readgoing)+ | 8 | + | Write a joke about AI | 64 | (parkfriendsscaredalways)+ | 32 | Scenario Outline: OAI Compatibility Given a model diff --git a/examples/server/tests/features/steps/steps.py b/examples/server/tests/features/steps/steps.py index 051fd440c..8e4babf20 100644 --- a/examples/server/tests/features/steps/steps.py +++ b/examples/server/tests/features/steps/steps.py @@ -792,6 +792,8 @@ def start_server_background(context): server_args.extend(['--api-key', context.server_api_key]) if context.debug: server_args.append('--verbose') + if 'SERVER_LOG_FORMAT_JSON' not in os.environ: + server_args.extend(['--log-format', "text"]) print(f"starting server with: {context.server_path}", *server_args) context.server_process = subprocess.Popen( [str(arg) for arg in [context.server_path, *server_args]], diff --git a/examples/server/utils.hpp b/examples/server/utils.hpp index 71cc5b0b8..d7abd7cbb 100644 --- a/examples/server/utils.hpp +++ b/examples/server/utils.hpp @@ -14,6 +14,7 @@ using json = nlohmann::json; extern bool server_verbose; +extern bool server_log_json; #ifndef SERVER_VERBOSE #define SERVER_VERBOSE 1 @@ -27,14 +28,14 @@ extern bool server_verbose; { \ if (server_verbose) \ { \ - server_log("VERBOSE", __func__, __LINE__, MSG, __VA_ARGS__); \ + server_log("VERB", __func__, __LINE__, MSG, __VA_ARGS__); \ } \ } while (0) #endif -#define LOG_ERROR( MSG, ...) server_log("ERROR", __func__, __LINE__, MSG, __VA_ARGS__) -#define LOG_WARNING(MSG, ...) server_log("WARNING", __func__, __LINE__, MSG, __VA_ARGS__) -#define LOG_INFO( MSG, ...) server_log("INFO", __func__, __LINE__, MSG, __VA_ARGS__) +#define LOG_ERROR( MSG, ...) server_log("ERR", __func__, __LINE__, MSG, __VA_ARGS__) +#define LOG_WARNING(MSG, ...) server_log("WARN", __func__, __LINE__, MSG, __VA_ARGS__) +#define LOG_INFO( MSG, ...) server_log("INFO", __func__, __LINE__, MSG, __VA_ARGS__) // // parallel @@ -133,26 +134,48 @@ struct completion_token_output std::string text_to_send; }; -static inline void server_log(const char *level, const char *function, int line, - const char *message, const nlohmann::ordered_json &extra) +static inline void server_log(const char *level, const char *function, int line, const char *message, const nlohmann::ordered_json &extra) { - nlohmann::ordered_json log - { + std::stringstream ss_tid; + ss_tid << std::this_thread::get_id(); + json log = nlohmann::ordered_json{ + {"tid", ss_tid.str()}, {"timestamp", time(nullptr)}, - {"level", level}, - {"function", function}, - {"line", line}, - {"message", message}, }; - if (!extra.empty()) - { - log.merge_patch(extra); - } + if (server_log_json) { + log.merge_patch( + { + {"level", level}, + {"function", function}, + {"line", line}, + {"msg", message}, + }); + if (!extra.empty()) { + log.merge_patch(extra); + } - const std::string str = log.dump(-1, ' ', false, json::error_handler_t::replace); - printf("%.*s\n", (int)str.size(), str.data()); - fflush(stdout); + std::cout << log.dump(-1, ' ', false, json::error_handler_t::replace) << "\n" << std::flush; + } else { + char buf[1024]; + snprintf(buf, 1024, "%4s [%24s] %s", level, function, message); + + if (!extra.empty()) { + log.merge_patch(extra); + } + std::stringstream ss; + ss << buf << " |"; + for (const auto& el : log.items()) + { + const std::string value = el.value().dump(-1, ' ', false, json::error_handler_t::replace); + snprintf(buf, 1024, " %s=%s", el.key().c_str(), value.c_str()); + ss << buf; + } + + const std::string str = ss.str(); + printf("%.*s\n", (int)str.size(), str.data()); + fflush(stdout); + } } // @@ -234,6 +257,7 @@ struct llama_server_queue { std::unique_lock lock(mutex_tasks); if (task.id == -1) { task.id = id++; + LOG_VERBOSE("new task id", {{"new_id", task.id}}); } queue_tasks.push_back(std::move(task)); condition_tasks.notify_one(); @@ -249,7 +273,9 @@ struct llama_server_queue { // Get the next id for creating anew task int get_new_id() { std::unique_lock lock(mutex_tasks); - return id++; + int new_id = id++; + LOG_VERBOSE("new task id", {{"new_id", new_id}}); + return new_id; } // Register function to process a new task @@ -290,8 +316,7 @@ struct llama_server_queue { void start_loop() { running = true; while (true) { - // new task arrived - LOG_VERBOSE("have new task", {}); + LOG_VERBOSE("new task may arrive", {}); { while (true) { @@ -303,7 +328,7 @@ struct llama_server_queue { task_server task = queue_tasks.front(); queue_tasks.erase(queue_tasks.begin()); lock.unlock(); - LOG_VERBOSE("callback_new_task", {}); + LOG_VERBOSE("callback_new_task", {{"task_id", task.id}}); callback_new_task(task); } LOG_VERBOSE("callback_all_task_finished", {}); @@ -384,11 +409,13 @@ struct llama_server_response { std::condition_variable condition_results; void add_waiting_task_id(int task_id) { + LOG_VERBOSE("waiting for task id", {{"task_id", task_id}}); std::unique_lock lock(mutex_results); waiting_task_ids.insert(task_id); } void remove_waiting_task_id(int task_id) { + LOG_VERBOSE("remove waiting for task id", {{"task_id", task_id}}); std::unique_lock lock(mutex_results); waiting_task_ids.erase(task_id); } @@ -401,7 +428,6 @@ struct llama_server_response { condition_results.wait(lock, [&]{ return !queue_results.empty(); }); - LOG_VERBOSE("condition_results unblock", {}); for (int i = 0; i < (int) queue_results.size(); i++) { @@ -426,20 +452,20 @@ struct llama_server_response { // Send a new result to a waiting task_id void send(task_result result) { std::unique_lock lock(mutex_results); - LOG_VERBOSE("send new result", {}); + LOG_VERBOSE("send new result", {{"task_id", result.id}}); for (auto& task_id : waiting_task_ids) { // LOG_TEE("waiting task id %i \n", task_id); // for now, tasks that have associated parent multitasks just get erased once multitask picks up the result if (result.multitask_id == task_id) { - LOG_VERBOSE("callback_update_multitask", {}); + LOG_VERBOSE("callback_update_multitask", {{"task_id", task_id}}); callback_update_multitask(task_id, result.id, result); continue; } if (result.id == task_id) { - LOG_VERBOSE("queue_results.push_back", {}); + LOG_VERBOSE("queue_results.push_back", {{"task_id", task_id}}); queue_results.push_back(result); condition_results.notify_all(); return; From 7d548a1827f6fc6aece6db74c9d112da42c40d68 Mon Sep 17 00:00:00 2001 From: Ashok Gelal <401055+ashokgelal@users.noreply.github.com> Date: Sun, 25 Feb 2024 10:57:34 -0500 Subject: [PATCH 705/811] readme : add Msty to UI list (#5618) --- README.md | 1 + 1 file changed, 1 insertion(+) diff --git a/README.md b/README.md index 3bc512af0..d61f9171b 100644 --- a/README.md +++ b/README.md @@ -155,6 +155,7 @@ Unless otherwise noted these projects are open-source with permissive licensing: - [semperai/amica](https://github.com/semperai/amica) - [withcatai/catai](https://github.com/withcatai/catai) - [Mobile-Artificial-Intelligence/maid](https://github.com/Mobile-Artificial-Intelligence/maid) (MIT) +- [Msty](https://msty.app) (proprietary) --- From f1a98c52546d009f742bdec2154c2a314ea950a6 Mon Sep 17 00:00:00 2001 From: kwin1412 <42286931+kwin1412@users.noreply.github.com> Date: Mon, 26 Feb 2024 00:46:49 +0800 Subject: [PATCH 706/811] make : fix nvcc version is empty (#5713) fix nvcc version is empty --- Makefile | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Makefile b/Makefile index f03faf6ed..068f6ed02 100644 --- a/Makefile +++ b/Makefile @@ -597,7 +597,7 @@ $(info I CC: $(shell $(CC) --version | head -n 1)) $(info I CXX: $(shell $(CXX) --version | head -n 1)) ifdef LLAMA_CUBLAS $(info I NVCC: $(shell $(NVCC) --version | tail -n 1)) -CUDA_VERSION := $(shell nvcc --version | grep -oP 'release (\K[0-9]+\.[0-9])') +CUDA_VERSION := $(shell $(NVCC) --version | grep -oP 'release (\K[0-9]+\.[0-9])') ifeq ($(shell awk -v "v=$(CUDA_VERSION)" 'BEGIN { print (v < 11.7) }'),1) ifndef CUDA_DOCKER_ARCH ifndef CUDA_POWER_ARCH From abbabc5e51d0d4656b438aec10b7fae9479ef37d Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Rados=C5=82aw=20Gryta?= Date: Sun, 25 Feb 2024 19:43:00 +0100 Subject: [PATCH 707/811] ggml-quants : provide ggml_vqtbl1q_u8 for 64bit compatibility (#5711) * [ggml-quants] Provide ggml_vqtbl1q_u8 for 64bit compatibility vqtbl1q_u8 is not part of arm v7 neon library * [android-example] Remove abi filter after arm v7a fix * [github-workflows] Do not skip Android armeabi-v7a build --- .github/workflows/build.yml | 3 +- examples/llama.android/app/build.gradle.kts | 8 ++--- ggml-quants.c | 33 ++++++++++++++++++--- 3 files changed, 32 insertions(+), 12 deletions(-) diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index 03d76d455..66ad85938 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -669,8 +669,7 @@ jobs: run: | cd examples/llama.android - # Skip armeabi-v7a for now (https://github.com/llvm/llvm-project/issues/65820). - ./gradlew build --no-daemon -Pskip-armeabi-v7a + ./gradlew build --no-daemon # freeBSD-latest: # runs-on: macos-12 diff --git a/examples/llama.android/app/build.gradle.kts b/examples/llama.android/app/build.gradle.kts index aadbe22c9..d42140efe 100644 --- a/examples/llama.android/app/build.gradle.kts +++ b/examples/llama.android/app/build.gradle.kts @@ -21,12 +21,8 @@ android { useSupportLibrary = true } ndk { - // Workaround for https://github.com/llvm/llvm-project/issues/65820 - // affecting armeabi-v7a. Skip armeabi-v7a when invoked with - // -Pskip-armeabi-v7a (e.g., ./gradlew build -Pskip-armeabi-v7a). - if (project.hasProperty("skip-armeabi-v7a")) { - abiFilters += listOf("arm64-v8a", "x86_64", "x86") - } + // Add NDK properties if wanted, e.g. + // abiFilters += listOf("arm64-v8a") } externalNativeBuild { cmake { diff --git a/ggml-quants.c b/ggml-quants.c index 5c5f2ce1b..3d94d166d 100644 --- a/ggml-quants.c +++ b/ggml-quants.c @@ -462,6 +462,30 @@ inline static int8x16_t ggml_vqtbl1q_s8(int8x16_t a, uint8x16_t b) { return res; } +// NOTE: not tested +inline static int8x16_t ggml_vqtbl1q_u8(uint8x16_t a, uint8x16_t b) { + int8x16_t res; + + res[ 0] = a[b[ 0]]; + res[ 1] = a[b[ 1]]; + res[ 2] = a[b[ 2]]; + res[ 3] = a[b[ 3]]; + res[ 4] = a[b[ 4]]; + res[ 5] = a[b[ 5]]; + res[ 6] = a[b[ 6]]; + res[ 7] = a[b[ 7]]; + res[ 8] = a[b[ 8]]; + res[ 9] = a[b[ 9]]; + res[10] = a[b[10]]; + res[11] = a[b[11]]; + res[12] = a[b[12]]; + res[13] = a[b[13]]; + res[14] = a[b[14]]; + res[15] = a[b[15]]; + + return res; +} + #else #define ggml_int16x8x2_t int16x8x2_t @@ -476,6 +500,7 @@ inline static int8x16_t ggml_vqtbl1q_s8(int8x16_t a, uint8x16_t b) { #define ggml_vld1q_s8_x2 vld1q_s8_x2 #define ggml_vld1q_s8_x4 vld1q_s8_x4 #define ggml_vqtbl1q_s8 vqtbl1q_s8 +#define ggml_vqtbl1q_u8 vqtbl1q_u8 #endif @@ -9488,8 +9513,8 @@ void ggml_vec_dot_iq3_s_q8_K (int n, float * GGML_RESTRICT s, size_t bs, const v qs += 16; vs.val[0] = vreinterpretq_u8_u32(vdupq_n_u32(signs[0] | (signs[1] << 16))); - vs.val[1] = vandq_u8(vqtbl1q_u8(vs.val[0], mask1.val[1]), mask2); - vs.val[0] = vandq_u8(vqtbl1q_u8(vs.val[0], mask1.val[0]), mask2); + vs.val[1] = vandq_u8(ggml_vqtbl1q_u8(vs.val[0], mask1.val[1]), mask2); + vs.val[0] = vandq_u8(ggml_vqtbl1q_u8(vs.val[0], mask1.val[0]), mask2); vs.val[0] = vceqq_u8(vs.val[0], mask2); vs.val[1] = vceqq_u8(vs.val[1], mask2); @@ -9497,8 +9522,8 @@ void ggml_vec_dot_iq3_s_q8_K (int n, float * GGML_RESTRICT s, size_t bs, const v q3s.val[1] = vsubq_s8(vreinterpretq_s8_u8(veorq_u8(vs.val[1], vreinterpretq_u8_u32(aux32x4_1))), vreinterpretq_s8_u8(vs.val[1])); vs.val[0] = vreinterpretq_u8_u32(vdupq_n_u32(signs[2] | (signs[3] << 16))); - vs.val[1] = vandq_u8(vqtbl1q_u8(vs.val[0], mask1.val[1]), mask2); - vs.val[0] = vandq_u8(vqtbl1q_u8(vs.val[0], mask1.val[0]), mask2); + vs.val[1] = vandq_u8(ggml_vqtbl1q_u8(vs.val[0], mask1.val[1]), mask2); + vs.val[0] = vandq_u8(ggml_vqtbl1q_u8(vs.val[0], mask1.val[0]), mask2); vs.val[0] = vceqq_u8(vs.val[0], mask2); vs.val[1] = vceqq_u8(vs.val[1], mask2); From f7625019c51ca437a5840576d92362cfa710e4a2 Mon Sep 17 00:00:00 2001 From: compilade <113953597+compilade@users.noreply.github.com> Date: Sun, 25 Feb 2024 13:43:50 -0500 Subject: [PATCH 708/811] server : fix crash when system prompt is bigger than batch size (#5714) The system prompt is now decoded in batches. * server : fix off-by-one n_past when start of prompt matches whole cache The tokens right after the matching part would otherwise skip a pos value. --- examples/server/server.cpp | 28 +++++++++++++++++++++++++--- 1 file changed, 25 insertions(+), 3 deletions(-) diff --git a/examples/server/server.cpp b/examples/server/server.cpp index d970202d2..c1eb61678 100644 --- a/examples/server/server.cpp +++ b/examples/server/server.cpp @@ -902,10 +902,24 @@ struct llama_server_context llama_batch_add(batch, system_tokens[i], i, { 0 }, false); } - if (llama_decode(ctx, batch) != 0) + for (int32_t i = 0; i < (int32_t) batch.n_tokens; i += params.n_batch) { - LOG_TEE("%s: llama_decode() failed\n", __func__); - return; + const int32_t n_tokens = std::min(params.n_batch, (int32_t) (batch.n_tokens - i)); + llama_batch batch_view = { + n_tokens, + batch.token + i, + nullptr, + batch.pos + i, + batch.n_seq_id + i, + batch.seq_id + i, + batch.logits + i, + 0, 0, 0, // unused + }; + if (llama_decode(ctx, batch_view) != 0) + { + LOG_TEE("%s: llama_decode() failed\n", __func__); + return; + } } // assign the system KV cache to all parallel sequences @@ -1785,6 +1799,14 @@ struct llama_server_context } slot.n_past = common_part(slot.cache_tokens, prompt_tokens); + + // the last token of the cache is not in the KV cache until the next call to llama_decode + // (it was sampled, pushed into the "cache_tokens", but not yet put in the context) + if (slot.n_past > 0 && slot.n_past == (int32_t) slot.cache_tokens.size()) + { + slot.n_past -= 1; + } + slot.num_prompt_tokens_processed = slot.num_prompt_tokens - slot.n_past; if (slot.ga_n != 1) From bf08e00643fd529f748f0a858fd79f3061e3fa18 Mon Sep 17 00:00:00 2001 From: Georgi Gerganov Date: Sun, 25 Feb 2024 22:12:24 +0200 Subject: [PATCH 709/811] llama : refactor k-shift implementation + KV defragmentation (#5691) * llama : refactor k-shift implementation ggml-ci * llama : rename llama_kv_cache_seq_shift to llama_kv_cache_seq_add * llama : cont k-shift refactoring + normalize type names ggml-ci * minor : fix MPI builds * llama : reuse n_rot from the build context ggml-ci * llama : revert enum name changes from this PR ggml-ci * llama : update llama_rope_type * llama : add comment about rope values * llama : fix build * passkey : apply kv cache updates explicitly ggml-ci * llama : change name to llama_kv_cache_update() * llama : add llama_kv_cache_seq_pos_max() * passkey : fix llama_kv_cache_seq_pos_max() usage * llama : some llama_kv_cell simplifications * llama : add llama_kv_cache_compress (EXPERIMENTAL) * llama : add alternative KV cache merging (EXPERIMENTAL) * llama : add llama_kv_cache_defrag * llama : comments * llama : remove llama_kv_cache_compress will add in a separate PR ggml-ci * llama : defragment via non-overlapping moves * llama : ggml_graph based defrag implementation ggml-ci * llama : switch the loop order in build_defrag * llama : add comments --- examples/infill/infill.cpp | 4 +- examples/main/main.cpp | 10 +- examples/passkey/passkey.cpp | 25 +- examples/server/server.cpp | 8 +- llama.cpp | 869 ++++++++++++++++++++++++----------- llama.h | 34 +- 6 files changed, 646 insertions(+), 304 deletions(-) diff --git a/examples/infill/infill.cpp b/examples/infill/infill.cpp index 92c67b7cf..d4b8729dd 100644 --- a/examples/infill/infill.cpp +++ b/examples/infill/infill.cpp @@ -447,8 +447,8 @@ int main(int argc, char ** argv) { LOG("context full, swapping: n_past = %d, n_left = %d, n_ctx = %d, n_keep = %d, n_discard = %d\n", n_past, n_left, n_ctx, params.n_keep, n_discard); - llama_kv_cache_seq_rm (ctx, 0, params.n_keep + 1 , params.n_keep + n_discard + 1); - llama_kv_cache_seq_shift(ctx, 0, params.n_keep + 1 + n_discard, n_past, -n_discard); + llama_kv_cache_seq_rm (ctx, 0, params.n_keep + 1 , params.n_keep + n_discard + 1); + llama_kv_cache_seq_add(ctx, 0, params.n_keep + 1 + n_discard, n_past, -n_discard); n_past -= n_discard; diff --git a/examples/main/main.cpp b/examples/main/main.cpp index 7555dffe4..34e84d0d4 100644 --- a/examples/main/main.cpp +++ b/examples/main/main.cpp @@ -548,8 +548,8 @@ int main(int argc, char ** argv) { LOG("context full, swapping: n_past = %d, n_left = %d, n_ctx = %d, n_keep = %d, n_discard = %d\n", n_past, n_left, n_ctx, params.n_keep, n_discard); - llama_kv_cache_seq_rm (ctx, 0, params.n_keep , params.n_keep + n_discard); - llama_kv_cache_seq_shift(ctx, 0, params.n_keep + n_discard, n_past, -n_discard); + llama_kv_cache_seq_rm (ctx, 0, params.n_keep , params.n_keep + n_discard); + llama_kv_cache_seq_add(ctx, 0, params.n_keep + n_discard, n_past, -n_discard); n_past -= n_discard; @@ -576,9 +576,9 @@ int main(int argc, char ** argv) { LOG("div: [%6d, %6d] / %6d -> [%6d, %6d]\n", ga_i + ib*bd, ga_i + ib*bd + ga_w, ga_n, (ga_i + ib*bd)/ga_n, (ga_i + ib*bd + ga_w)/ga_n); LOG("shift: [%6d, %6d] + %6d -> [%6d, %6d]\n", ga_i + ib*bd + ga_w, n_past + ib*bd, dd, ga_i + ib*bd + ga_w + dd, n_past + ib*bd + dd); - llama_kv_cache_seq_shift(ctx, 0, ga_i, n_past, ib*bd); - llama_kv_cache_seq_div (ctx, 0, ga_i + ib*bd, ga_i + ib*bd + ga_w, ga_n); - llama_kv_cache_seq_shift(ctx, 0, ga_i + ib*bd + ga_w, n_past + ib*bd, dd); + llama_kv_cache_seq_add(ctx, 0, ga_i, n_past, ib*bd); + llama_kv_cache_seq_div(ctx, 0, ga_i + ib*bd, ga_i + ib*bd + ga_w, ga_n); + llama_kv_cache_seq_add(ctx, 0, ga_i + ib*bd + ga_w, n_past + ib*bd, dd); n_past -= bd; diff --git a/examples/passkey/passkey.cpp b/examples/passkey/passkey.cpp index e12a1cdf1..47de67a93 100644 --- a/examples/passkey/passkey.cpp +++ b/examples/passkey/passkey.cpp @@ -126,7 +126,7 @@ int main(int argc, char ** argv) { const int n_batch = ctx_params.n_batch; const int n_batch_grp = ctx_params.n_batch/n_grp; - LOG_TEE("\n%s: n_len = %d, n_ctx = %d, n_kv_req = %d, n_grp = %d, n_batch = %d\n", __func__, n_len, n_ctx, n_kv_req, n_grp, n_batch); + LOG_TEE("\n%s: n_len = %d, n_ctx = %d, n_kv_req = %d, n_grp = %d, n_batch = %d, n_junk = %d, i_pos = %d\n", __func__, n_len, n_ctx, n_kv_req, n_grp, n_batch, n_junk, i_pos); // print the prompt token-by-token @@ -146,10 +146,11 @@ int main(int argc, char ** argv) { const int ib = i/n_batch - 1; const int bd = n_batch_grp*(n_grp - 1); - llama_kv_cache_seq_shift(ctx, 0, n_past - n_batch, n_past, ib*bd); - llama_kv_cache_seq_div (ctx, 0, n_past - n_batch + ib*bd, n_past + ib*bd, n_grp); + llama_kv_cache_seq_add (ctx, 0, n_past - n_batch, n_past, ib*bd); + llama_kv_cache_seq_div (ctx, 0, n_past - n_batch + ib*bd, n_past + ib*bd, n_grp); + llama_kv_cache_update (ctx); - n_past -= bd; + n_past = llama_kv_cache_seq_pos_max(ctx, 0) + 1; } llama_batch_clear(batch); @@ -179,10 +180,12 @@ int main(int argc, char ** argv) { LOG_TEE("%s: shifting KV cache with %d\n", __func__, n_discard); - llama_kv_cache_seq_rm (ctx, 0, n_keep , n_keep + n_discard); - llama_kv_cache_seq_shift(ctx, 0, n_keep + n_discard, n_ctx, -n_discard); + llama_kv_cache_seq_rm (ctx, 0, n_keep , n_keep + n_discard); + llama_kv_cache_seq_add(ctx, 0, n_keep + n_discard, n_ctx, -n_discard); + llama_kv_cache_defrag (ctx); + llama_kv_cache_update (ctx); - n_past -= n_discard; + n_past = llama_kv_cache_seq_pos_max(ctx, 0) + 1; llama_batch_clear(batch); @@ -208,10 +211,12 @@ int main(int argc, char ** argv) { if (n_discard > 0) { LOG_TEE("%s: shifting KV cache with %d to free space for the answer\n", __func__, n_discard); - llama_kv_cache_seq_rm (ctx, 0, n_keep , n_keep + n_discard); - llama_kv_cache_seq_shift(ctx, 0, n_keep + n_discard, n_ctx, -n_discard); + llama_kv_cache_seq_rm (ctx, 0, n_keep , n_keep + n_discard); + llama_kv_cache_seq_add(ctx, 0, n_keep + n_discard, n_ctx, -n_discard); + llama_kv_cache_defrag (ctx); + llama_kv_cache_update (ctx); - n_past -= n_discard; + n_past = llama_kv_cache_seq_pos_max(ctx, 0) + 1; } } diff --git a/examples/server/server.cpp b/examples/server/server.cpp index c1eb61678..8aadc95a9 100644 --- a/examples/server/server.cpp +++ b/examples/server/server.cpp @@ -1636,8 +1636,8 @@ struct llama_server_context {"n_system_tokens", system_tokens.size()}, {"n_cache_tokens", slot.cache_tokens.size()} }); - llama_kv_cache_seq_rm (ctx, slot.id, n_keep , n_keep + n_discard); - llama_kv_cache_seq_shift(ctx, slot.id, n_keep + n_discard, system_tokens.size() + slot.n_past, -n_discard); + llama_kv_cache_seq_rm (ctx, slot.id, n_keep , n_keep + n_discard); + llama_kv_cache_seq_add(ctx, slot.id, n_keep + n_discard, system_tokens.size() + slot.n_past, -n_discard); for (size_t i = n_keep + n_discard; i < slot.cache_tokens.size(); i++) { @@ -1941,9 +1941,9 @@ struct llama_server_context LOG_TEE("div: [%6d, %6d] / %6d -> [%6d, %6d]\n", slot.ga_i + ib * bd, slot.ga_i + ib * bd + slot.ga_w, slot.ga_n, (slot.ga_i + ib * bd) / slot.ga_n, (slot.ga_i + ib * bd + slot.ga_w) / slot.ga_n); LOG_TEE("shift: [%6d, %6d] + %6d -> [%6d, %6d]\n", slot.ga_i + ib * bd + slot.ga_w, slot.n_past_se + ib * bd, dd, slot.ga_i + ib * bd + slot.ga_w + dd, slot.n_past_se + ib * bd + dd); - llama_kv_cache_seq_shift(ctx, slot.id, slot.ga_i, slot.n_past_se, ib * bd); + llama_kv_cache_seq_add(ctx, slot.id, slot.ga_i, slot.n_past_se, ib * bd); llama_kv_cache_seq_div(ctx, slot.id, slot.ga_i + ib * bd, slot.ga_i + ib * bd + slot.ga_w,slot.ga_n); - llama_kv_cache_seq_shift(ctx, slot.id, slot.ga_i + ib * bd + slot.ga_w,slot.n_past_se + ib * bd, dd); + llama_kv_cache_seq_add(ctx, slot.id, slot.ga_i + ib * bd + slot.ga_w,slot.n_past_se + ib * bd, dd); slot.n_past_se -= bd; diff --git a/llama.cpp b/llama.cpp index acd9be08a..3424b1999 100644 --- a/llama.cpp +++ b/llama.cpp @@ -1550,8 +1550,9 @@ static const size_t MiB = 1024*kiB; static const size_t GiB = 1024*MiB; struct llama_hparams { - bool vocab_only; - bool rope_finetuned; + bool vocab_only; + bool rope_finetuned; + uint32_t n_vocab; uint32_t n_ctx_train; // context size the model was trained on uint32_t n_embd; @@ -1580,7 +1581,8 @@ struct llama_hparams { bool causal_attn = true; bool need_kq_pos = false; - uint32_t pooling_type = LLAMA_POOLING_TYPE_NONE; + enum llama_pooling_type pooling_type = LLAMA_POOLING_TYPE_NONE; + enum llama_rope_type rope_type = LLAMA_ROPE_TYPE_NONE; bool operator!=(const llama_hparams & other) const { if (this->vocab_only != other.vocab_only) return true; @@ -1707,11 +1709,20 @@ struct llama_kv_cell { bool has_seq_id(const llama_seq_id & id) const { return seq_id.find(id) != seq_id.end(); } + + bool is_empty() const { + return seq_id.empty(); + } + + bool is_same_seq(const llama_kv_cell & other) const { + return seq_id == other.seq_id; + } }; // ring-buffer of cached KV data struct llama_kv_cache { bool has_shift = false; + bool do_defrag = false; // Note: The value of head isn't only used to optimize searching // for a free KV slot. llama_decode_internal also uses it, so it @@ -1723,6 +1734,9 @@ struct llama_kv_cache { // computed before each graph build uint32_t n = 0; + ggml_type type_k = GGML_TYPE_F16; + ggml_type type_v = GGML_TYPE_F16; + std::vector cells; std::vector k_l; // per layer @@ -1958,8 +1972,8 @@ struct llama_context { static bool llama_kv_cache_init( struct llama_kv_cache & cache, const llama_model & model, - ggml_type ktype, - ggml_type vtype, + ggml_type type_k, + ggml_type type_v, uint32_t n_ctx, bool offload) { const struct llama_hparams & hparams = model.hparams; @@ -1974,6 +1988,9 @@ static bool llama_kv_cache_init( cache.size = n_ctx; cache.used = 0; + cache.type_k = type_k; + cache.type_v = type_v; + cache.cells.clear(); cache.cells.resize(n_ctx); @@ -2014,8 +2031,8 @@ static bool llama_kv_cache_init( for (int i = 0; i < (int) n_layer; i++) { struct ggml_context * ctx = offload ? ctx_map.at(model.buft_layer[i].buft) : cache.ctxs.front(); - ggml_tensor * k = ggml_new_tensor_1d(ctx, ktype, n_embd_k_gqa*n_ctx); - ggml_tensor * v = ggml_new_tensor_1d(ctx, vtype, n_embd_v_gqa*n_ctx); + ggml_tensor * k = ggml_new_tensor_1d(ctx, type_k, n_embd_k_gqa*n_ctx); + ggml_tensor * v = ggml_new_tensor_1d(ctx, type_v, n_embd_v_gqa*n_ctx); ggml_format_name(k, "cache_k_l%d", i); ggml_format_name(v, "cache_v_l%d", i); cache.k_l.push_back(k); @@ -2099,7 +2116,7 @@ static bool llama_kv_cache_find_slot( // find how many cells are currently in use static int32_t llama_kv_cache_cell_max(const struct llama_kv_cache & cache) { for (uint32_t i = cache.size - 1; i > 0; --i) { - if (cache.cells[i].pos >= 0 && !cache.cells[i].seq_id.empty()) { + if (cache.cells[i].pos >= 0 && !cache.cells[i].is_empty()) { return i + 1; } } @@ -2135,7 +2152,7 @@ static void llama_kv_cache_seq_rm( } else { continue; } - if (cache.cells[i].seq_id.empty()) { + if (cache.cells[i].is_empty()) { // keep count of the number of used cells if (cache.cells[i].pos >= 0) cache.used--; @@ -2186,7 +2203,7 @@ static void llama_kv_cache_seq_keep(struct llama_kv_cache & cache, llama_seq_id if (new_head != cache.size && new_head < cache.head) cache.head = new_head; } -static void llama_kv_cache_seq_shift( +static void llama_kv_cache_seq_add( struct llama_kv_cache & cache, llama_seq_id seq_id, llama_pos p0, @@ -2204,10 +2221,14 @@ static void llama_kv_cache_seq_shift( cache.cells[i].delta += delta; if (cache.cells[i].pos < 0) { - if (!cache.cells[i].seq_id.empty()) cache.used--; + if (!cache.cells[i].is_empty()) { + cache.used--; + } cache.cells[i].pos = -1; cache.cells[i].seq_id.clear(); - if (new_head == cache.size) new_head = i; + if (new_head == cache.size) { + new_head = i; + } } } } @@ -2239,6 +2260,22 @@ static void llama_kv_cache_seq_div( } } +static llama_pos llama_kv_cache_seq_pos_max(struct llama_kv_cache & cache, llama_seq_id seq_id) { + llama_pos result = 0; + + for (uint32_t i = 0; i < cache.size; ++i) { + if (cache.cells[i].has_seq_id(seq_id)) { + result = std::max(result, cache.cells[i].pos); + } + } + + return result; +} + +static void llama_kv_cache_defrag(struct llama_kv_cache & cache) { + cache.do_defrag = true; +} + // // model loading and saving // @@ -2310,7 +2347,7 @@ namespace GGUFMeta { } }; - struct ArrayInfo{ + struct ArrayInfo { const gguf_type gt; const size_t length; const void * data; @@ -2329,7 +2366,7 @@ namespace GGUFMeta { }; template - class GKV: public GKV_Base { + class GKV : public GKV_Base { GKV() = delete; public: @@ -2352,39 +2389,39 @@ namespace GGUFMeta { return "unknown"; } - static bool validate_override(const llama_model_kv_override_type expected_type, const struct llama_model_kv_override *override) { - if (!override) { return false; } - if (override->tag == expected_type) { + static bool validate_override(const llama_model_kv_override_type expected_type, const struct llama_model_kv_override * ovrd) { + if (!ovrd) { return false; } + if (ovrd->tag == expected_type) { LLAMA_LOG_INFO("%s: Using metadata override (%5s) '%s' = ", - __func__, override_type_to_str(override->tag), override->key); - switch (override->tag) { + __func__, override_type_to_str(ovrd->tag), ovrd->key); + switch (ovrd->tag) { case LLAMA_KV_OVERRIDE_TYPE_BOOL: { - LLAMA_LOG_INFO("%s\n", override->bool_value ? "true" : "false"); + LLAMA_LOG_INFO("%s\n", ovrd->bool_value ? "true" : "false"); } break; case LLAMA_KV_OVERRIDE_TYPE_INT: { - LLAMA_LOG_INFO("%" PRId64 "\n", override->int_value); + LLAMA_LOG_INFO("%" PRId64 "\n", ovrd->int_value); } break; case LLAMA_KV_OVERRIDE_TYPE_FLOAT: { - LLAMA_LOG_INFO("%.6f\n", override->float_value); + LLAMA_LOG_INFO("%.6f\n", ovrd->float_value); } break; default: // Shouldn't be possible to end up here, but just in case... throw std::runtime_error( format("Unsupported attempt to override %s type for metadata key %s\n", - override_type_to_str(override->tag), override->key)); + override_type_to_str(ovrd->tag), ovrd->key)); } return true; } LLAMA_LOG_WARN("%s: Warning: Bad metadata override type for key '%s', expected %s but got %s\n", - __func__, override->key, override_type_to_str(expected_type), override_type_to_str(override->tag)); + __func__, ovrd->key, override_type_to_str(expected_type), override_type_to_str(ovrd->tag)); return false; } template static typename std::enable_if::value, bool>::type - try_override(OT & target, const struct llama_model_kv_override *override) { - if (validate_override(LLAMA_KV_OVERRIDE_TYPE_BOOL, override)) { - target = override->bool_value; + try_override(OT & target, const struct llama_model_kv_override * ovrd) { + if (validate_override(LLAMA_KV_OVERRIDE_TYPE_BOOL, ovrd)) { + target = ovrd->bool_value; return true; } return false; @@ -2392,9 +2429,9 @@ namespace GGUFMeta { template static typename std::enable_if::value && std::is_integral::value, bool>::type - try_override(OT & target, const struct llama_model_kv_override *override) { - if (validate_override(LLAMA_KV_OVERRIDE_TYPE_INT, override)) { - target = override->int_value; + try_override(OT & target, const struct llama_model_kv_override * ovrd) { + if (validate_override(LLAMA_KV_OVERRIDE_TYPE_INT, ovrd)) { + target = ovrd->int_value; return true; } return false; @@ -2402,9 +2439,9 @@ namespace GGUFMeta { template static typename std::enable_if::value, bool>::type - try_override(T & target, const struct llama_model_kv_override *override) { - if (validate_override(LLAMA_KV_OVERRIDE_TYPE_FLOAT, override)) { - target = override->float_value; + try_override(T & target, const struct llama_model_kv_override * ovrd) { + if (validate_override(LLAMA_KV_OVERRIDE_TYPE_FLOAT, ovrd)) { + target = ovrd->float_value; return true; } return false; @@ -2412,17 +2449,17 @@ namespace GGUFMeta { template static typename std::enable_if::value, bool>::type - try_override(T & target, const struct llama_model_kv_override *override) { + try_override(T & target, const struct llama_model_kv_override * ovrd) { (void)target; - (void)override; - if (!override) { return false; } + (void)ovrd; + if (!ovrd) { return false; } // Currently, we should never end up here so it would be a bug if we do. throw std::runtime_error(format("Unsupported attempt to override string type for metadata key %s\n", - override ? override->key : "NULL")); + ovrd ? ovrd->key : "NULL")); } - static bool set(const gguf_context * ctx, const int k, T & target, const struct llama_model_kv_override *override = nullptr) { - if (try_override(target, override)) { + static bool set(const gguf_context * ctx, const int k, T & target, const struct llama_model_kv_override * ovrd = nullptr) { + if (try_override(target, ovrd)) { return true; } if (k < 0) { return false; } @@ -2430,12 +2467,12 @@ namespace GGUFMeta { return true; } - static bool set(const gguf_context * ctx, const char * key, T & target, const struct llama_model_kv_override *override = nullptr) { - return set(ctx, gguf_find_key(ctx, key), target, override); + static bool set(const gguf_context * ctx, const char * key, T & target, const struct llama_model_kv_override * ovrd = nullptr) { + return set(ctx, gguf_find_key(ctx, key), target, ovrd); } - static bool set(const gguf_context * ctx, const std::string & key, T & target, const struct llama_model_kv_override *override = nullptr) { - return set(ctx, key.c_str(), target, override); + static bool set(const gguf_context * ctx, const std::string & key, T & target, const struct llama_model_kv_override * ovrd = nullptr) { + return set(ctx, key.c_str(), target, ovrd); } }; } @@ -2846,6 +2883,15 @@ struct llama_model_loader { } }; +template<> +bool llama_model_loader::get_key(const enum llm_kv kid, enum llama_pooling_type & result, const bool required) { + uint32_t tmp; + const bool found = get_key(kid, tmp, required); + result = (enum llama_pooling_type) tmp; + return found; +} + + // // load LLaMA models // @@ -2926,16 +2972,16 @@ static const char * llama_model_type_name(e_model type) { default: return "?B"; } } + static const char * llama_model_vocab_type_name(enum llama_vocab_type type){ switch (type) { - case LLAMA_VOCAB_TYPE_SPM: return "SPM"; - case LLAMA_VOCAB_TYPE_BPE: return "BPE"; - case LLAMA_VOCAB_TYPE_WPM: return "WPM"; - default: return "unknown"; + case LLAMA_VOCAB_TYPE_SPM: return "SPM"; + case LLAMA_VOCAB_TYPE_BPE: return "BPE"; + case LLAMA_VOCAB_TYPE_WPM: return "WPM"; + default: return "unknown"; } } - static void llm_load_arch(llama_model_loader & ml, llama_model & model) { model.arch = ml.get_arch(); if (model.arch == LLM_ARCH_UNKNOWN) { @@ -3112,10 +3158,10 @@ static void llm_load_hparams( } break; case LLM_ARCH_BERT: { - ml.get_key(LLM_KV_ATTENTION_LAYERNORM_EPS, hparams.f_norm_eps); - ml.get_key(LLM_KV_ATTENTION_CAUSAL, hparams.causal_attn); + ml.get_key(LLM_KV_ATTENTION_LAYERNORM_EPS, hparams.f_norm_eps); + ml.get_key(LLM_KV_ATTENTION_CAUSAL, hparams.causal_attn); ml.get_key(LLM_KV_TOKENIZER_TOKEN_TYPE_COUNT, hparams.n_vocab_type); - ml.get_key(LLM_KV_POOLING_TYPE, hparams.pooling_type); + ml.get_key(LLM_KV_POOLING_TYPE, hparams.pooling_type); switch (hparams.n_layer) { case 3: @@ -3133,10 +3179,10 @@ static void llm_load_hparams( } break; case LLM_ARCH_NOMIC_BERT: { - ml.get_key(LLM_KV_ATTENTION_LAYERNORM_EPS, hparams.f_norm_eps); - ml.get_key(LLM_KV_ATTENTION_CAUSAL, hparams.causal_attn); + ml.get_key(LLM_KV_ATTENTION_LAYERNORM_EPS, hparams.f_norm_eps); + ml.get_key(LLM_KV_ATTENTION_CAUSAL, hparams.causal_attn); ml.get_key(LLM_KV_TOKENIZER_TOKEN_TYPE_COUNT, hparams.n_vocab_type); - ml.get_key(LLM_KV_POOLING_TYPE, hparams.pooling_type); + ml.get_key(LLM_KV_POOLING_TYPE, hparams.pooling_type); if (hparams.n_layer == 12 && hparams.n_embd == 768) { model.type = e_model::MODEL_137M; @@ -3275,6 +3321,8 @@ static void llm_load_hparams( if (hparams.f_max_alibi_bias > 0.0f) { hparams.need_kq_pos = true; } + + hparams.rope_type = llama_rope_type(&model); } // TODO: This should probably be in llama.h @@ -3577,6 +3625,8 @@ static void llm_load_print_meta(llama_model_loader & ml, llama_model & model) { LLAMA_LOG_INFO("%s: n_ff = %u\n", __func__, hparams.n_ff); LLAMA_LOG_INFO("%s: n_expert = %u\n", __func__, hparams.n_expert); LLAMA_LOG_INFO("%s: n_expert_used = %u\n", __func__, hparams.n_expert_used); + LLAMA_LOG_INFO("%s: pooling type = %d\n", __func__, hparams.pooling_type); + LLAMA_LOG_INFO("%s: rope type = %d\n", __func__, hparams.rope_type); LLAMA_LOG_INFO("%s: rope scaling = %s\n", __func__, rope_scaling_type); LLAMA_LOG_INFO("%s: freq_base_train = %.1f\n", __func__, hparams.rope_freq_base_train); LLAMA_LOG_INFO("%s: freq_scale_train = %g\n", __func__, hparams.rope_freq_scale_train); @@ -4598,12 +4648,6 @@ static int llama_model_load(const std::string & fname, llama_model & model, llam using llm_build_cb = std::function; -enum llm_rope_type { - LLM_ROPE, - LLM_ROPE_NEOX, - LLM_ROPE_GLM, -}; - enum llm_ffn_op_type { LLM_FFN_SILU, LLM_FFN_GELU, @@ -4649,55 +4693,6 @@ static struct ggml_tensor * llm_build_inp_embd( return inpL; } -// Persimmon: n_rot = n_embd_head_k/2 -// Other: n_rot = n_embd_head_k -static void llm_build_k_shift( - struct ggml_context * ctx, - const llama_hparams & hparams, - const llama_cparams & cparams, - const llama_kv_cache & kv, - struct ggml_cgraph * graph, - struct ggml_tensor * K_shift, - llm_rope_type type, - int64_t n_ctx, - float freq_base, - float freq_scale, - const llm_build_cb & cb) { - const int64_t n_layer = hparams.n_layer; - const int64_t n_head_kv = hparams.n_head_kv; - const int64_t n_embd_head_k = hparams.n_embd_head_k; - const int64_t n_embd_k_gqa = hparams.n_embd_k_gqa(); - const int32_t n_rot = hparams.n_rot; - const int32_t n_orig_ctx = cparams.n_yarn_orig_ctx; - const float ext_factor = cparams.yarn_ext_factor; - const float attn_factor = cparams.yarn_attn_factor; - const float beta_fast = cparams.yarn_beta_fast; - const float beta_slow = cparams.yarn_beta_slow; - - int rope_type = 0; - - switch (type) { - case LLM_ROPE: rope_type = 0; break; - case LLM_ROPE_NEOX: rope_type = 2; break; - case LLM_ROPE_GLM: rope_type = 4; break; - } - - for (int il = 0; il < n_layer; ++il) { - struct ggml_tensor * tmp = - // we rotate only the first n_rot dimensions - ggml_rope_custom_inplace(ctx, - ggml_view_3d(ctx, kv.k_l[il], - n_embd_head_k, n_head_kv, n_ctx, - ggml_row_size(kv.k_l[il]->type, n_embd_head_k), - ggml_row_size(kv.k_l[il]->type, n_embd_k_gqa), - 0), - K_shift, n_rot, rope_type, 0, n_orig_ctx, freq_base, freq_scale, - ext_factor, attn_factor, beta_fast, beta_slow); - cb(tmp, "K_shifted", il); - ggml_build_forward_expand(graph, tmp); - } -} - static void llm_build_kv_store( struct ggml_context * ctx, const llama_hparams & hparams, @@ -5001,6 +4996,7 @@ struct llm_build_context { const int64_t n_embd; const int64_t n_layer; + const int64_t n_rot; const int64_t n_ctx; // user-specified context size (can be different from n_ctx_train) const int64_t n_head; const int64_t n_head_kv; @@ -5025,8 +5021,8 @@ struct llm_build_context { const int32_t kv_head; // index of where we store new KV data in the cache const int32_t n_orig_ctx; - const bool do_rope_shift; - const uint32_t pooling_type; + const enum llama_pooling_type pooling_type; + const enum llama_rope_type rope_type; const llm_build_cb & cb; @@ -5048,6 +5044,7 @@ struct llm_build_context { kv_self (lctx.kv_self), n_embd (hparams.n_embd), n_layer (hparams.n_layer), + n_rot (hparams.n_rot), n_ctx (cparams.n_ctx), n_head (hparams.n_head), n_head_kv (hparams.n_head_kv), @@ -5069,8 +5066,8 @@ struct llm_build_context { n_kv (worst_case ? n_ctx : kv_self.n), kv_head (worst_case ? n_ctx - n_tokens : kv_self.head), n_orig_ctx (cparams.n_yarn_orig_ctx), - do_rope_shift (worst_case || kv_self.has_shift), - pooling_type (cparams.do_pooling ? hparams.pooling_type : (uint32_t)LLAMA_POOLING_TYPE_NONE), + pooling_type (cparams.do_pooling ? hparams.pooling_type : LLAMA_POOLING_TYPE_NONE), + rope_type (hparams.rope_type), cb (cb), buf_compute_meta (lctx.buf_compute_meta) { // all initializations should be done in init() @@ -5093,6 +5090,74 @@ struct llm_build_context { } } + struct ggml_cgraph * build_k_shift() { + struct ggml_cgraph * gf = ggml_new_graph_custom(ctx0, LLAMA_MAX_NODES, false); + + for (int il = 0; il < n_layer; ++il) { + struct ggml_tensor * tmp = + // we rotate only the first n_rot dimensions + ggml_rope_custom_inplace(ctx0, + ggml_view_3d(ctx0, kv_self.k_l[il], + n_embd_head_k, n_head_kv, n_ctx, + ggml_row_size(kv_self.k_l[il]->type, n_embd_head_k), + ggml_row_size(kv_self.k_l[il]->type, n_embd_k_gqa), + 0), + lctx.inp_K_shift, n_rot, rope_type, 0, n_orig_ctx, freq_base, freq_scale, + ext_factor, attn_factor, beta_fast, beta_slow); + cb(tmp, "K_shifted", il); + ggml_build_forward_expand(gf, tmp); + } + + return gf; + } + + struct ggml_cgraph * build_defrag(const std::vector & ids) { + struct ggml_cgraph * gf = ggml_new_graph_custom(ctx0, LLAMA_MAX_NODES, false); + + for (int i = 0; i < n_kv; ++i) { + const int id = ids[i]; + + if (i == id || id == n_kv) { + continue; + } + + int nm = 1; + + while (i + nm < n_kv && (int) ids[i + nm] == id + nm) { + nm++; + } + + for (int il = 0; il < n_layer; ++il) { + ggml_tensor * view_k_src = ggml_view_2d(ctx0, kv_self.k_l[il], + n_embd_k_gqa, nm, + ggml_row_size(kv_self.k_l[il]->type, n_embd_k_gqa), + ggml_row_size(kv_self.k_l[il]->type, n_embd_k_gqa*i)); + + ggml_tensor * view_k_dst = ggml_view_2d(ctx0, kv_self.k_l[il], + n_embd_k_gqa, nm, + ggml_row_size(kv_self.k_l[il]->type, n_embd_k_gqa), + ggml_row_size(kv_self.k_l[il]->type, n_embd_k_gqa*id)); + + ggml_tensor * view_v_src = ggml_view_2d(ctx0, kv_self.v_l[il], + nm, n_embd_v_gqa, + ggml_row_size(kv_self.v_l[il]->type, kv_self.size), + ggml_row_size(kv_self.v_l[il]->type, i)); + + ggml_tensor * view_v_dst = ggml_view_2d(ctx0, kv_self.v_l[il], + nm, n_embd_v_gqa, + ggml_row_size(kv_self.v_l[il]->type, kv_self.size), + ggml_row_size(kv_self.v_l[il]->type, id)); + + ggml_build_forward_expand(gf, ggml_cpy(ctx0, view_k_src, view_k_dst)); + ggml_build_forward_expand(gf, ggml_cpy(ctx0, view_v_src, view_v_dst)); + } + + i += nm - 1; + } + + return gf; + } + struct ggml_cgraph * build_llama() { struct ggml_cgraph * gf = ggml_new_graph_custom(ctx0, LLAMA_MAX_NODES, false); @@ -5114,11 +5179,6 @@ struct llm_build_context { struct ggml_tensor * KQ_mask = ggml_view_2d(ctx0, lctx.inp_KQ_mask, n_kv, n_tokens, n_kv*ggml_type_size(lctx.inp_KQ_mask->type), 0); cb(KQ_mask, "KQ_mask", -1); - // shift the entire K-cache if needed - if (do_rope_shift) { - llm_build_k_shift(ctx0, hparams, cparams, kv_self, gf, lctx.inp_K_shift, LLM_ROPE, n_ctx, freq_base, freq_scale, cb); - } - for (int il = 0; il < n_layer; ++il) { struct ggml_tensor * inpSA = inpL; @@ -5154,14 +5214,14 @@ struct llm_build_context { Qcur = ggml_rope_custom( ctx0, ggml_reshape_3d(ctx0, Qcur, n_embd_head, n_head, n_tokens), inp_pos, - hparams.n_rot, 0, 0, n_orig_ctx, freq_base, freq_scale, + n_rot, rope_type, 0, n_orig_ctx, freq_base, freq_scale, ext_factor, attn_factor, beta_fast, beta_slow ); cb(Qcur, "Qcur", il); Kcur = ggml_rope_custom( ctx0, ggml_reshape_3d(ctx0, Kcur, n_embd_head, n_head_kv, n_tokens), inp_pos, - hparams.n_rot, 0, 0, n_orig_ctx, freq_base, freq_scale, + n_rot, rope_type, 0, n_orig_ctx, freq_base, freq_scale, ext_factor, attn_factor, beta_fast, beta_slow ); cb(Kcur, "Kcur", il); @@ -5302,11 +5362,6 @@ struct llm_build_context { struct ggml_tensor * KQ_pos = ggml_view_1d(ctx0, lctx.inp_KQ_pos, n_kv, 0); cb(KQ_pos, "KQ_pos", -1); - // shift the entire K-cache if needed - if (do_rope_shift) { - llm_build_k_shift(ctx0, hparams, cparams, kv_self, gf, lctx.inp_K_shift, LLM_ROPE, n_ctx, freq_base, freq_scale, cb); - } - for (int il = 0; il < n_layer; ++il) { struct ggml_tensor * inpSA = inpL; @@ -5330,12 +5385,12 @@ struct llm_build_context { case MODEL_7B: Qcur = ggml_rope_custom( ctx0, ggml_reshape_3d(ctx0, Qcur, n_embd_head, n_head, n_tokens), inp_pos, - hparams.n_rot, 0, 0, n_orig_ctx, freq_base, freq_scale, + n_rot, rope_type, 0, n_orig_ctx, freq_base, freq_scale, ext_factor, attn_factor, beta_fast, beta_slow ); Kcur = ggml_rope_custom( ctx0, ggml_reshape_3d(ctx0, Kcur, n_embd_head, n_head_kv, n_tokens), inp_pos, - hparams.n_rot, 0, 0, n_orig_ctx, freq_base, freq_scale, + n_rot, rope_type, 0, n_orig_ctx, freq_base, freq_scale, ext_factor, attn_factor, beta_fast, beta_slow ); break; @@ -5420,11 +5475,6 @@ struct llm_build_context { struct ggml_tensor * KQ_mask = ggml_view_2d(ctx0, lctx.inp_KQ_mask, n_kv, n_tokens, n_kv*ggml_type_size(lctx.inp_KQ_mask->type), 0); cb(KQ_mask, "KQ_mask", -1); - // shift the entire K-cache if needed - if (do_rope_shift) { - llm_build_k_shift(ctx0, hparams, cparams, kv_self, gf, lctx.inp_K_shift, LLM_ROPE_NEOX, n_ctx, freq_base, freq_scale, cb); - } - for (int il = 0; il < n_layer; ++il) { struct ggml_tensor * attn_norm; @@ -5463,13 +5513,13 @@ struct llm_build_context { // using mode = 2 for neox mode Qcur = ggml_rope_custom( - ctx0, Qcur, inp_pos, hparams.n_rot, 2, 0, n_orig_ctx, + ctx0, Qcur, inp_pos, n_rot, rope_type, 0, n_orig_ctx, freq_base, freq_scale, ext_factor, attn_factor, beta_fast, beta_slow ); cb(Qcur, "Qcur", il); Kcur = ggml_rope_custom( - ctx0, Kcur, inp_pos, hparams.n_rot, 2, 0, n_orig_ctx, + ctx0, Kcur, inp_pos, n_rot, rope_type, 0, n_orig_ctx, freq_base, freq_scale, ext_factor, attn_factor, beta_fast, beta_slow ); cb(Kcur, "Kcur", il); @@ -5639,10 +5689,6 @@ struct llm_build_context { struct ggml_tensor * KQ_mask = ggml_view_2d(ctx0, lctx.inp_KQ_mask, n_kv, n_tokens, n_kv*ggml_type_size(lctx.inp_KQ_mask->type), 0); cb(KQ_mask, "KQ_mask", -1); - if (do_rope_shift) { - llm_build_k_shift(ctx0, hparams, cparams, kv_self, gf, lctx.inp_K_shift, LLM_ROPE_NEOX, n_ctx, freq_base, freq_scale, cb); - } - for (int il = 0; il < n_layer; ++il) { struct ggml_tensor * residual = inpL; @@ -5700,7 +5746,7 @@ struct llm_build_context { // RoPE the first n_rot of q/k, pass the other half, and concat. struct ggml_tensor * qrot = ggml_view_3d( - ctx0, tmpq, hparams.n_rot, n_head, n_tokens, + ctx0, tmpq, n_rot, n_head, n_tokens, ggml_element_size(tmpq) * n_embd_head, ggml_element_size(tmpq) * n_embd_head * n_head, 0 @@ -5708,7 +5754,7 @@ struct llm_build_context { cb(qrot, "qrot", il); struct ggml_tensor * krot = ggml_view_3d( - ctx0, tmpk, hparams.n_rot, n_head, n_tokens, + ctx0, tmpk, n_rot, n_head, n_tokens, ggml_element_size(tmpk) * n_embd_head, ggml_element_size(tmpk) * n_embd_head * n_head, 0 @@ -5717,29 +5763,29 @@ struct llm_build_context { // get the second half of tmpq, e.g tmpq[n_rot:, :, :] struct ggml_tensor * qpass = ggml_view_3d( - ctx0, tmpq, hparams.n_rot, n_head, n_tokens, + ctx0, tmpq, n_rot, n_head, n_tokens, ggml_element_size(tmpq) * n_embd_head, ggml_element_size(tmpq) * n_embd_head * n_head, - ggml_element_size(tmpq) * hparams.n_rot + ggml_element_size(tmpq) * n_rot ); cb(qpass, "qpass", il); struct ggml_tensor * kpass = ggml_view_3d( - ctx0, tmpk, hparams.n_rot, n_head, n_tokens, + ctx0, tmpk, n_rot, n_head, n_tokens, ggml_element_size(tmpk) * n_embd_head, ggml_element_size(tmpk) * n_embd_head * n_head, - ggml_element_size(tmpk) * hparams.n_rot + ggml_element_size(tmpk) * n_rot ); cb(kpass, "kpass", il); struct ggml_tensor * qrotated = ggml_rope_custom( - ctx0, qrot, inp_pos, hparams.n_rot, 2, 0, n_orig_ctx, + ctx0, qrot, inp_pos, n_rot, rope_type, 0, n_orig_ctx, freq_base, freq_scale, ext_factor, attn_factor, beta_fast, beta_slow ); cb(qrotated, "qrotated", il); struct ggml_tensor * krotated = ggml_rope_custom( - ctx0, krot, inp_pos, hparams.n_rot, 2, 0, n_orig_ctx, + ctx0, krot, inp_pos, n_rot, rope_type, 0, n_orig_ctx, freq_base, freq_scale, ext_factor, attn_factor, beta_fast, beta_slow ); cb(krotated, "krotated", il); @@ -5991,14 +6037,14 @@ struct llm_build_context { Qcur = ggml_rope_custom( ctx0, ggml_reshape_3d(ctx0, Qcur, n_embd_head, n_head, n_tokens), inp_pos, - hparams.n_rot, 2, 0, n_orig_ctx, freq_base, freq_scale, + n_rot, rope_type, 0, n_orig_ctx, freq_base, freq_scale, ext_factor, attn_factor, beta_fast, beta_slow ); cb(Qcur, "Qcur", il); Kcur = ggml_rope_custom( ctx0, ggml_reshape_3d(ctx0, Kcur, n_embd_head, n_head_kv, n_tokens), inp_pos, - hparams.n_rot, 2, 0, n_orig_ctx, freq_base, freq_scale, + n_rot, rope_type, 0, n_orig_ctx, freq_base, freq_scale, ext_factor, attn_factor, beta_fast, beta_slow ); cb(Kcur, "Kcur", il); @@ -6287,11 +6333,6 @@ struct llm_build_context { struct ggml_tensor * KQ_mask = ggml_view_2d(ctx0, lctx.inp_KQ_mask, n_kv, n_tokens, n_kv*ggml_type_size(lctx.inp_KQ_mask->type), 0); cb(KQ_mask, "KQ_mask", -1); - // shift the entire K-cache if needed - if (do_rope_shift) { - llm_build_k_shift(ctx0, hparams, cparams, kv_self, gf, lctx.inp_K_shift, LLM_ROPE_NEOX, n_ctx, freq_base, freq_scale, cb); - } - for (int il = 0; il < n_layer; ++il) { struct ggml_tensor * inpSA = inpL; @@ -6328,14 +6369,14 @@ struct llm_build_context { Qcur = ggml_rope_custom( ctx0, ggml_reshape_3d(ctx0, Qcur, n_embd_head, n_head, n_tokens), inp_pos, - hparams.n_rot, 2, 0, n_orig_ctx, freq_base, freq_scale, + n_rot, rope_type, 0, n_orig_ctx, freq_base, freq_scale, ext_factor, attn_factor, beta_fast, beta_slow ); cb(Qcur, "Qcur", il); Kcur = ggml_rope_custom( ctx0, ggml_reshape_3d(ctx0, Kcur, n_embd_head, n_head_kv, n_tokens), inp_pos, - hparams.n_rot, 2, 0, n_orig_ctx, freq_base, freq_scale, + n_rot, rope_type, 0, n_orig_ctx, freq_base, freq_scale, ext_factor, attn_factor, beta_fast, beta_slow ); cb(Kcur, "Kcur", il); @@ -6410,11 +6451,6 @@ struct llm_build_context { struct ggml_tensor * KQ_mask = ggml_view_2d(ctx0, lctx.inp_KQ_mask, n_kv, n_tokens, n_kv*ggml_type_size(lctx.inp_KQ_mask->type), 0); cb(KQ_mask, "KQ_mask", -1); - // shift the entire K-cache if needed - if (do_rope_shift) { - llm_build_k_shift(ctx0, hparams, cparams, kv_self, gf, lctx.inp_K_shift, LLM_ROPE_NEOX, n_ctx, freq_base, freq_scale, cb); - } - for (int il = 0; il < n_layer; ++il) { struct ggml_tensor * inpSA = inpL; @@ -6444,13 +6480,13 @@ struct llm_build_context { // using mode = 2 for neox mode Qcur = ggml_rope_custom( - ctx0, Qcur, inp_pos, hparams.n_rot, 2, 0, n_orig_ctx, + ctx0, Qcur, inp_pos, n_rot, rope_type, 0, n_orig_ctx, freq_base, freq_scale, ext_factor, attn_factor, beta_fast, beta_slow ); cb(Qcur, "Qcur", il); Kcur = ggml_rope_custom( - ctx0, Kcur, inp_pos, hparams.n_rot, 2, 0, n_orig_ctx, + ctx0, Kcur, inp_pos, n_rot, rope_type, 0, n_orig_ctx, freq_base, freq_scale, ext_factor, attn_factor, beta_fast, beta_slow ); cb(Kcur, "Kcur", il); @@ -6524,11 +6560,6 @@ struct llm_build_context { struct ggml_tensor * KQ_mask = ggml_view_2d(ctx0, lctx.inp_KQ_mask, n_kv, n_tokens, n_kv*ggml_type_size(lctx.inp_KQ_mask->type), 0); cb(KQ_mask, "KQ_mask", -1); - // shift the entire K-cache if needed - if (do_rope_shift) { - llm_build_k_shift(ctx0, hparams, cparams, kv_self, gf, lctx.inp_K_shift, LLM_ROPE_NEOX, n_ctx, freq_base, freq_scale, cb); - } - for (int il = 0; il < n_layer; ++il) { struct ggml_tensor * inpSA = inpL; @@ -6564,14 +6595,14 @@ struct llm_build_context { Qcur = ggml_rope_custom( ctx0, ggml_reshape_3d(ctx0, Qcur, n_embd_head, n_head, n_tokens), inp_pos, - hparams.n_rot, 2, 0, n_orig_ctx, freq_base, freq_scale, + n_rot, rope_type, 0, n_orig_ctx, freq_base, freq_scale, ext_factor, attn_factor, beta_fast, beta_slow ); cb(Qcur, "Qcur", il); Kcur = ggml_rope_custom( ctx0, ggml_reshape_3d(ctx0, Kcur, n_embd_head, n_head_kv, n_tokens), inp_pos, - hparams.n_rot, 2, 0, n_orig_ctx, freq_base, freq_scale, + n_rot, rope_type, 0, n_orig_ctx, freq_base, freq_scale, ext_factor, attn_factor, beta_fast, beta_slow ); cb(Kcur, "Kcur", il); @@ -6645,11 +6676,6 @@ struct llm_build_context { struct ggml_tensor * KQ_mask = ggml_view_2d(ctx0, lctx.inp_KQ_mask, n_kv, n_tokens, n_kv*ggml_type_size(lctx.inp_KQ_mask->type), 0); cb(KQ_mask, "KQ_mask", -1); - // shift the entire K-cache if needed - if (do_rope_shift) { - llm_build_k_shift(ctx0, hparams, cparams, kv_self, gf, lctx.inp_K_shift, LLM_ROPE_NEOX, n_ctx, freq_base, freq_scale, cb); - } - for (int il = 0; il < n_layer; ++il) { attn_norm_output = llm_build_norm(ctx0, inpL, hparams, model.layers[il].attn_norm, @@ -6687,7 +6713,7 @@ struct llm_build_context { Kcur = ggml_reshape_3d(ctx0, Kcur, n_embd_head, n_head_kv, n_tokens); Qcur = ggml_rope_custom( - ctx0, Qcur, inp_pos, hparams.n_rot, 2, 0, n_orig_ctx, + ctx0, Qcur, inp_pos, n_rot, rope_type, 0, n_orig_ctx, freq_base, freq_scale, ext_factor, attn_factor, beta_fast, beta_slow ); cb(Qcur, "Qcur", il); @@ -6698,7 +6724,7 @@ struct llm_build_context { cb(Qcur, "Qcur", il); Kcur = ggml_rope_custom( - ctx0, Kcur, inp_pos, hparams.n_rot, 2, 0, n_orig_ctx, + ctx0, Kcur, inp_pos, n_rot, rope_type, 0, n_orig_ctx, freq_base, freq_scale, ext_factor, attn_factor, beta_fast, beta_slow ); cb(Kcur, "Kcur", il); @@ -6767,11 +6793,6 @@ struct llm_build_context { struct ggml_tensor * KQ_mask = ggml_view_2d(ctx0, lctx.inp_KQ_mask, n_kv, n_tokens, n_kv*ggml_type_size(lctx.inp_KQ_mask->type), 0); cb(KQ_mask, "KQ_mask", -1); - // shift the entire K-cache if needed - if (do_rope_shift) { - llm_build_k_shift(ctx0, hparams, cparams, kv_self, gf, lctx.inp_K_shift, LLM_ROPE, n_ctx, freq_base, freq_scale, cb); - } - for (int il = 0; il < n_layer; ++il) { // norm @@ -6795,14 +6816,14 @@ struct llm_build_context { cb(Vcur, "Vcur", il); Qcur = ggml_rope_custom( - ctx0, ggml_reshape_3d(ctx0, Qcur, hparams.n_rot, n_head, n_tokens), inp_pos, - n_embd_head, 2, 0, n_orig_ctx, freq_base, freq_scale, + ctx0, ggml_reshape_3d(ctx0, Qcur, n_rot, n_head, n_tokens), inp_pos, + n_embd_head, rope_type, 0, n_orig_ctx, freq_base, freq_scale, ext_factor, attn_factor, beta_fast, beta_slow); cb(Qcur, "Qcur", il); Kcur = ggml_rope_custom( - ctx0, ggml_reshape_3d(ctx0, Kcur, hparams.n_rot, n_head_kv, n_tokens), inp_pos, - n_embd_head, 2, 0, n_orig_ctx, freq_base, freq_scale, + ctx0, ggml_reshape_3d(ctx0, Kcur, n_rot, n_head_kv, n_tokens), inp_pos, + n_embd_head, rope_type, 0, n_orig_ctx, freq_base, freq_scale, ext_factor, attn_factor, beta_fast, beta_slow); cb(Kcur, "Kcur", il); @@ -6972,11 +6993,6 @@ struct llm_build_context { struct ggml_tensor * KQ_mask = ggml_view_2d(ctx0, lctx.inp_KQ_mask, n_kv, n_tokens, n_kv*ggml_type_size(lctx.inp_KQ_mask->type), 0); cb(KQ_mask, "KQ_mask", -1); - // shift the entire K-cache if needed - if (do_rope_shift) { - llm_build_k_shift(ctx0, hparams, cparams, kv_self, gf, lctx.inp_K_shift, LLM_ROPE, n_ctx, freq_base, freq_scale, cb); - } - for (int il = 0; il < n_layer; ++il) { cur = llm_build_norm(ctx0, inpL, hparams, model.layers[il].attn_norm, @@ -7002,14 +7018,14 @@ struct llm_build_context { struct ggml_tensor * Qcur = ggml_rope_custom( ctx0, ggml_reshape_3d(ctx0, tmpq, n_embd_head, n_head, n_tokens), inp_pos, - hparams.n_rot, 2, 0, n_orig_ctx, freq_base, freq_scale, + n_rot, rope_type, 0, n_orig_ctx, freq_base, freq_scale, ext_factor, attn_factor, beta_fast, beta_slow ); cb(Qcur, "Qcur", il); struct ggml_tensor * Kcur = ggml_rope_custom( ctx0, ggml_reshape_3d(ctx0, tmpk, n_embd_head, n_head_kv, n_tokens), inp_pos, - hparams.n_rot, 2, 0, n_orig_ctx, freq_base, freq_scale, + n_rot, rope_type, 0, n_orig_ctx, freq_base, freq_scale, ext_factor, attn_factor, beta_fast, beta_slow ); cb(Kcur, "Kcur", il); @@ -7080,11 +7096,6 @@ struct llm_build_context { struct ggml_tensor * KQ_mask = ggml_view_2d(ctx0, lctx.inp_KQ_mask, n_kv, n_tokens, n_kv*ggml_type_size(lctx.inp_KQ_mask->type), 0); cb(KQ_mask, "KQ_mask", -1); - // shift the entire K-cache if needed - if (do_rope_shift) { - llm_build_k_shift(ctx0, hparams, cparams, kv_self, gf, lctx.inp_K_shift, LLM_ROPE, n_ctx, freq_base, freq_scale, cb); - } - for (int il = 0; il < n_layer; ++il) { struct ggml_tensor * inpSA = inpL; @@ -7120,14 +7131,14 @@ struct llm_build_context { Qcur = ggml_rope_custom( ctx0, ggml_reshape_3d(ctx0, Qcur, n_embd_head, n_head, n_tokens), inp_pos, - hparams.n_rot, 2, 0, n_orig_ctx, freq_base, freq_scale, + n_rot, rope_type, 0, n_orig_ctx, freq_base, freq_scale, ext_factor, attn_factor, beta_fast, beta_slow ); cb(Qcur, "Qcur", il); Kcur = ggml_rope_custom( ctx0, ggml_reshape_3d(ctx0, Kcur, n_embd_head, n_head_kv, n_tokens), inp_pos, - hparams.n_rot, 2, 0, n_orig_ctx, freq_base, freq_scale, + n_rot, rope_type, 0, n_orig_ctx, freq_base, freq_scale, ext_factor, attn_factor, beta_fast, beta_slow ); cb(Kcur, "Kcur", il); @@ -7199,11 +7210,6 @@ struct llm_build_context { struct ggml_tensor * KQ_mask = ggml_view_2d(ctx0, lctx.inp_KQ_mask, n_kv, n_tokens, n_kv*ggml_type_size(lctx.inp_KQ_mask->type), 0); cb(KQ_mask, "KQ_mask", -1); - // shift the entire K-cache if needed - if (do_rope_shift) { - llm_build_k_shift(ctx0, hparams, cparams, kv_self, gf, lctx.inp_K_shift, LLM_ROPE, n_ctx, freq_base, freq_scale, cb); - } - for (int il = 0; il < n_layer; ++il) { struct ggml_tensor * inpSA = inpL; @@ -7239,14 +7245,14 @@ struct llm_build_context { Qcur = ggml_rope_custom( ctx0, ggml_reshape_3d(ctx0, Qcur, n_embd_head, n_head, n_tokens), inp_pos, - hparams.n_rot, 0, 0, n_orig_ctx, freq_base, freq_scale, + n_rot, rope_type, 0, n_orig_ctx, freq_base, freq_scale, ext_factor, attn_factor, beta_fast, beta_slow ); cb(Qcur, "Qcur", il); Kcur = ggml_rope_custom( ctx0, ggml_reshape_3d(ctx0, Kcur, n_embd_head, n_head_kv, n_tokens), inp_pos, - hparams.n_rot, 0, 0, n_orig_ctx, freq_base, freq_scale, + n_rot, rope_type, 0, n_orig_ctx, freq_base, freq_scale, ext_factor, attn_factor, beta_fast, beta_slow ); cb(Kcur, "Kcur", il); @@ -7331,11 +7337,6 @@ struct llm_build_context { struct ggml_tensor * KQ_mask = ggml_view_2d(ctx0, lctx.inp_KQ_mask, n_kv, n_tokens, n_kv*ggml_type_size(lctx.inp_KQ_mask->type), 0); cb(KQ_mask, "KQ_mask", -1); - // shift the entire K-cache if needed - if (do_rope_shift) { - llm_build_k_shift(ctx0, hparams, cparams, kv_self, gf, lctx.inp_K_shift, LLM_ROPE, n_ctx, freq_base, freq_scale, cb); - } - for (int il = 0; il < n_layer; ++il) { struct ggml_tensor * inpSA = inpL; @@ -7371,14 +7372,14 @@ struct llm_build_context { Qcur = ggml_rope_custom( ctx0, ggml_reshape_3d(ctx0, Qcur, n_embd_head, n_head, n_tokens), inp_pos, - hparams.n_rot, 0, 0, n_orig_ctx, freq_base, freq_scale, + n_rot, rope_type, 0, n_orig_ctx, freq_base, freq_scale, ext_factor, attn_factor, beta_fast, beta_slow ); cb(Qcur, "Qcur", il); Kcur = ggml_rope_custom( ctx0, ggml_reshape_3d(ctx0, Kcur, n_embd_head, n_head_kv, n_tokens), inp_pos, - hparams.n_rot, 0, 0, n_orig_ctx, freq_base, freq_scale, + n_rot, rope_type, 0, n_orig_ctx, freq_base, freq_scale, ext_factor, attn_factor, beta_fast, beta_slow ); cb(Kcur, "Kcur", il); @@ -7467,11 +7468,6 @@ struct llm_build_context { struct ggml_tensor * KQ_mask = ggml_view_2d(ctx0, lctx.inp_KQ_mask, n_kv, n_tokens, n_kv*ggml_type_size(lctx.inp_KQ_mask->type), 0); cb(KQ_mask, "KQ_mask", -1); - // shift the entire K-cache if needed - if (do_rope_shift) { - llm_build_k_shift(ctx0, hparams, cparams, kv_self, gf, lctx.inp_K_shift, LLM_ROPE, n_ctx, freq_base, freq_scale, cb); - } - for (int il = 0; il < n_layer; ++il) { // norm @@ -7494,7 +7490,7 @@ struct llm_build_context { Qcur = ggml_rope_custom( ctx0, ggml_reshape_3d(ctx0, Qcur, n_embd_head_k, n_head, n_tokens), inp_pos, - n_embd_head_k, 2, 0, n_orig_ctx, freq_base, freq_scale, + n_embd_head_k, rope_type, 0, n_orig_ctx, freq_base, freq_scale, ext_factor, attn_factor, beta_fast, beta_slow); cb(Qcur, "Qcur", il); @@ -7503,7 +7499,7 @@ struct llm_build_context { Kcur = ggml_rope_custom( ctx0, ggml_reshape_3d(ctx0, Kcur, n_embd_head_k, n_head_kv, n_tokens), inp_pos, - n_embd_head_k, 2, 0, n_orig_ctx, freq_base, freq_scale, + n_embd_head_k, rope_type, 0, n_orig_ctx, freq_base, freq_scale, ext_factor, attn_factor, beta_fast, beta_slow); cb(Kcur, "Kcur", il); @@ -7556,6 +7552,40 @@ struct llm_build_context { } }; +static struct ggml_cgraph * llama_build_graph_defrag(llama_context & lctx, const std::vector & ids) { + llama_batch dummy; + dummy.n_tokens = 0; + + llm_build_cb cb = [&](struct ggml_tensor * , const char * , int ) { }; + + struct llm_build_context llm(lctx, dummy, cb, false); + + llm.init(); + + struct ggml_cgraph * result = llm.build_defrag(ids); + + llm.free(); + + return result; +} + +static struct ggml_cgraph * llama_build_graph_k_shift(llama_context & lctx) { + llama_batch dummy; + dummy.n_tokens = 0; + + llm_build_cb cb = [&](struct ggml_tensor * , const char * , int ) { }; + + struct llm_build_context llm(lctx, dummy, cb, false); + + llm.init(); + + struct ggml_cgraph * result = llm.build_k_shift(); + + llm.free(); + + return result; +} + static struct ggml_cgraph * llama_build_graph( llama_context & lctx, const llama_batch & batch, @@ -7675,6 +7705,20 @@ static struct ggml_cgraph * llama_build_graph( return result; } +static void llama_set_k_shift(llama_context & lctx) { + const auto & cparams = lctx.cparams; + + const int64_t n_ctx = cparams.n_ctx; + + assert(ggml_backend_buffer_is_host(lctx.inp_K_shift->buffer)); + + int32_t * data = (int32_t *) lctx.inp_K_shift->data; + + for (int i = 0; i < n_ctx; ++i) { + data[i] = lctx.kv_self.cells[i].delta; + } +} + static void llama_set_inputs(llama_context & lctx, const llama_batch & batch) { // // set input data @@ -7742,18 +7786,6 @@ static void llama_set_inputs(llama_context & lctx, const llama_batch & batch) { } } - if (kv_self.has_shift) { - const int64_t n_ctx = cparams.n_ctx; - - assert(ggml_backend_buffer_is_host(lctx.inp_K_shift->buffer)); - - int32_t * data = (int32_t *) lctx.inp_K_shift->data; - - for (int i = 0; i < n_ctx; ++i) { - data[i] = lctx.kv_self.cells[i].delta; - } - } - if (cparams.do_pooling && hparams.pooling_type == LLAMA_POOLING_TYPE_MEAN) { const int64_t n_tokens = batch.n_tokens; @@ -7798,6 +7830,34 @@ static void llama_set_inputs(llama_context & lctx, const llama_batch & batch) { } } +static void llama_graph_compute( + llama_context & lctx, + ggml_cgraph * gf, + int n_threads) { +#ifdef GGML_USE_MPI + const int64_t n_layer = lctx.model.hparams.n_layer; + ggml_mpi_graph_compute_pre(lctx.ctx_mpi, gf, n_layer); +#endif + +#ifdef GGML_USE_METAL + if (ggml_backend_is_metal(lctx.backend_metal)) { + ggml_backend_metal_set_n_cb(lctx.backend_metal, n_threads); + } +#endif + + if (lctx.backend_cpu != nullptr) { + ggml_backend_cpu_set_n_threads(lctx.backend_cpu, n_threads); + } + + ggml_backend_sched_graph_compute(lctx.sched, gf); + + // fprintf(stderr, "splits: %d\n", ggml_backend_sched_get_n_splits(lctx.sched)); + +#ifdef GGML_USE_MPI + ggml_mpi_graph_compute_post(lctx.ctx_mpi, gf, n_layer); +#endif +} + // decode a batch of tokens by evaluating the transformer // // - lctx: llama context @@ -7893,14 +7953,17 @@ static int llama_decode_internal( //printf("kv_self.n = %5d, kv_self.used = %5d, kv_self.head = %5d\n", kv_self.n, kv_self.used, kv_self.head); + llama_kv_cache_update(&lctx); + ggml_backend_sched_reset(lctx.sched); ggml_backend_sched_set_eval_callback(lctx.sched, lctx.cparams.cb_eval, lctx.cparams.cb_eval_user_data); ggml_cgraph * gf = llama_build_graph(lctx, batch, false); // the output is always the last tensor in the graph - struct ggml_tensor * res = gf->nodes[gf->n_nodes - 1]; + struct ggml_tensor * res = gf->nodes[gf->n_nodes - 1]; struct ggml_tensor * embeddings = gf->nodes[gf->n_nodes - 2]; + if (strcmp(res->name, "result_output") == 0) { // the embeddings could be the second to last tensor, or the third to last tensor if (strcmp(embeddings->name, "result_norm") != 0) { @@ -7927,40 +7990,12 @@ static int llama_decode_internal( n_threads = std::min(4, n_threads); } -#ifdef GGML_USE_MPI - const int64_t n_layer = hparams.n_layer; - ggml_mpi_graph_compute_pre(lctx.ctx_mpi, gf, n_layer); -#endif - -#ifdef GGML_USE_METAL - if (ggml_backend_is_metal(lctx.backend_metal)) { - ggml_backend_metal_set_n_cb(lctx.backend_metal, n_threads); - } -#endif - - if (lctx.backend_cpu != nullptr) { - ggml_backend_cpu_set_n_threads(lctx.backend_cpu, n_threads); - } - llama_set_inputs(lctx, batch); - ggml_backend_sched_graph_compute(lctx.sched, gf); - - // fprintf(stderr, "splits: %d\n", ggml_backend_sched_get_n_splits(lctx.sched)); - -#ifdef GGML_USE_MPI - ggml_mpi_graph_compute_post(lctx.ctx_mpi, gf, n_layer); -#endif + llama_graph_compute(lctx, gf, n_threads); // update the kv ring buffer { - if (kv_self.has_shift) { - kv_self.has_shift = false; - for (uint32_t i = 0; i < kv_self.size; ++i) { - kv_self.cells[i].delta = 0; - } - } - kv_self.head += n_tokens; // Ensure kv cache head points to a valid index. @@ -8056,6 +8091,221 @@ static int llama_decode_internal( return 0; } +// find holes from the beginning of the KV cache and fill them by moving data from the end of the cache +static void llama_kv_cache_defrag_internal(struct llama_context & lctx) { + auto & kv_self = lctx.kv_self; + + const uint32_t n_kv = llama_kv_cache_cell_max(kv_self); + const uint32_t n_used = kv_self.used; + + assert(n_used <= n_kv); + + const int64_t t_start = ggml_time_us(); + + // number of cells moved + uint32_t n_moves = 0; + + // determine which KV cells to move where + // + // cell i moves to ids[i] + // + // if ids[i] == i || ids[i] == n_kv, then cell i is not moved + // + std::vector ids(n_kv, n_kv); + + for (uint32_t i0 = 0; i0 < n_used; ++i0) { + const auto & cell0 = kv_self.cells[i0]; + + if (!cell0.is_empty()) { + ids[i0] = i0; + + continue; + } + + // found a hole - fill it with data from the end of the cache + + // determine the size of the hole + uint32_t nh = 1; + while (i0 + nh < n_used && kv_self.cells[i0 + nh].is_empty()) { + nh++; + } + + // starting from the end, find nh non-empty cells + uint32_t nf = 0; + uint32_t is = n_kv - 1; + for (; is > i0; --is) { + const auto & cell1 = kv_self.cells[is]; + + if (cell1.is_empty() || ids[is] != n_kv) { + continue; + } + + // non-empty cell which is not yet moved + nf++; + + if (nf == nh) { + break; + } + } + + // this can only happen if `n_used` is not accurate, which would be a bug + GGML_ASSERT(nf == nh && "KV defrag bug: nf != nh"); + + nf = 0; + + // go back and move the nf cells to the hole + for (uint32_t i1 = is; i1 < n_kv; ++i1) { + const auto & cell1 = kv_self.cells[i1]; + + if (cell1.is_empty() || ids[i1] != n_kv) { + continue; + } + + // this cell goes to (i0 + nf) + ids[i1] = i0 + nf; + + // move the cell meta data + kv_self.cells[i0 + nf] = cell1; + + n_moves++; + nf++; + } + + LLAMA_LOG_INFO("(tmp log) KV defrag: move [%u, %u) to [%u, %u)\n", is, n_kv, i0, i0 + nh); + + i0 += nh - 1; + } + + if (n_moves == 0) { + return; + } + + LLAMA_LOG_INFO("(tmp log) KV defrag cell moves: %u\n", n_moves); + + kv_self.head = n_used; + kv_self.used = n_used; + + // zero the rest of the cells + for (uint32_t i = n_used; i < n_kv; ++i) { + kv_self.cells[i] = llama_kv_cell(); + } + +#if 0 + // CPU defrag + // + // TODO: optimizations are possible: + // - multiple threads + // - avoid copying to the host memory when already there + // + // likely not worth the effort, as we have ggml_graph based defrag + // + + const auto & hparams = lctx.model.hparams; + + const uint32_t n_layer = hparams.n_layer; + const uint32_t n_embd_k_gqa = hparams.n_embd_k_gqa(); + const uint32_t n_embd_v_gqa = hparams.n_embd_v_gqa(); + + const uint32_t kv_size = kv_self.size; + + std::vector buf_k; + std::vector buf_v; + + for (uint32_t il = 0; il < n_layer; ++il) { + const size_t k_size_row = ggml_row_size(kv_self.k_l[il]->type, n_embd_k_gqa); + const size_t k_size = ggml_row_size(kv_self.k_l[il]->type, n_embd_k_gqa*kv_size); + + const size_t v_size_el = ggml_type_size(kv_self.v_l[il]->type); + const size_t v_size = ggml_row_size (kv_self.v_l[il]->type, n_embd_v_gqa*kv_size); + + buf_k.resize(k_size); + buf_v.resize(v_size); + + ggml_backend_tensor_get(kv_self.k_l[il], buf_k.data(), 0, buf_k.size()); + ggml_backend_tensor_get(kv_self.v_l[il], buf_v.data(), 0, buf_v.size()); + + // batch move [i, i+nm) to [id, id+nm) + // note: cells can move only to a lower index + for (uint32_t i = 0; i < n_kv; ++i) { + const uint32_t id = ids[i]; + + if (i == id || id == n_kv) { + continue; + } + + uint32_t nm = 1; + + while (i + nm < n_kv && ids[i + nm] == id + nm) { + nm++; + } + + // move keys + { + const int64_t os = i*k_size_row; + const int64_t od = id*k_size_row; + + memcpy(buf_k.data() + od, buf_k.data() + os, nm*k_size_row); + } + + // move values (note: they are transposed) + { + const int64_t os = i; + const int64_t od = id; + + for (uint32_t j = 0; j < n_embd_v_gqa; ++j) { + memcpy(buf_v.data() + (od + j*kv_size)*v_size_el, buf_v.data() + (os + j*kv_size)*v_size_el, nm*v_size_el); + } + } + + i += nm - 1; + } + + ggml_backend_tensor_set(kv_self.k_l[il], buf_k.data(), 0, buf_k.size()); + ggml_backend_tensor_set(kv_self.v_l[il], buf_v.data(), 0, buf_v.size()); + } +#else + // ggml_graph defrag + + ggml_cgraph * gf = llama_build_graph_defrag(lctx, ids); + + llama_graph_compute(lctx, gf, lctx.cparams.n_threads); +#endif + + const int64_t t_end = ggml_time_us(); + + LLAMA_LOG_INFO("(tmp log) KV defrag time: %.3f ms\n", (t_end - t_start)/1000.0); +} + +static void llama_kv_cache_update_internal(struct llama_context & lctx) { + // apply K-shift if needed + if (lctx.model.hparams.rope_type != LLAMA_ROPE_TYPE_NONE && lctx.kv_self.has_shift) { + llama_set_k_shift(lctx); + + { + ggml_cgraph * gf = llama_build_graph_k_shift(lctx); + + llama_graph_compute(lctx, gf, lctx.cparams.n_threads); + } + + { + auto & kv_self = lctx.kv_self; + + kv_self.has_shift = false; + + for (uint32_t i = 0; i < kv_self.size; ++i) { + kv_self.cells[i].delta = 0; + } + } + } + + // defragment the KV cache if needed + if (lctx.kv_self.do_defrag) { + llama_kv_cache_defrag_internal(lctx); + + lctx.kv_self.do_defrag = false; + } +} + // // tokenizer // @@ -11671,8 +11921,7 @@ struct llama_context * llama_new_context_with_model( } ctx->backends.push_back(ctx->backend_cpu); - if (!llama_kv_cache_init(ctx->kv_self, ctx->model, type_k, type_v, - cparams.n_ctx, cparams.offload_kqv)) { + if (!llama_kv_cache_init(ctx->kv_self, ctx->model, type_k, type_v, cparams.n_ctx, cparams.offload_kqv)) { LLAMA_LOG_ERROR("%s: llama_kv_cache_init() failed for self-attention cache\n", __func__); llama_free(ctx); return nullptr; @@ -11820,6 +12069,49 @@ enum llama_vocab_type llama_vocab_type(const struct llama_model * model) { return model->vocab.type; } +enum llama_rope_type llama_rope_type(const struct llama_model * model) { + switch (model->arch) { + // these models do not use RoPE + case LLM_ARCH_GPT2: + case LLM_ARCH_GPTJ: + case LLM_ARCH_GPTNEOX: + case LLM_ARCH_MPT: + case LLM_ARCH_REFACT: + case LLM_ARCH_BLOOM: + return LLAMA_ROPE_TYPE_NONE; + + // use what we call a normal RoPE, operating on pairs of consecutive head values + case LLM_ARCH_LLAMA: + case LLM_ARCH_BAICHUAN: + case LLM_ARCH_STARCODER: + case LLM_ARCH_PLAMO: + case LLM_ARCH_CODESHELL: + case LLM_ARCH_ORION: + case LLM_ARCH_INTERNLM2: + case LLM_ARCH_MINICPM: + case LLM_ARCH_GEMMA: + return LLAMA_ROPE_TYPE_NORM; + + // the pairs of head values are offset by n_rot/2 + case LLM_ARCH_FALCON: + case LLM_ARCH_PERSIMMON: + case LLM_ARCH_BERT: + case LLM_ARCH_NOMIC_BERT: + case LLM_ARCH_STABLELM: + case LLM_ARCH_QWEN: + case LLM_ARCH_QWEN2: + case LLM_ARCH_PHI2: + return LLAMA_ROPE_TYPE_NEOX; + + // all model arches should be listed explicitly here + case LLM_ARCH_UNKNOWN: + GGML_ASSERT(false && "unknown architecture"); + break; + } + + return LLAMA_ROPE_TYPE_NONE; +} + int32_t llama_n_vocab(const struct llama_model * model) { return model->vocab.id_to_token.size(); } @@ -12062,12 +12354,12 @@ void llama_kv_cache_seq_keep(struct llama_context * ctx, llama_seq_id seq_id) { llama_kv_cache_seq_keep(ctx->kv_self, seq_id); } -void llama_kv_cache_seq_shift(struct llama_context * ctx, llama_seq_id seq_id, llama_pos p0, llama_pos p1, llama_pos delta) { +void llama_kv_cache_seq_add(struct llama_context * ctx, llama_seq_id seq_id, llama_pos p0, llama_pos p1, llama_pos delta) { if (delta == 0) { return; } - llama_kv_cache_seq_shift(ctx->kv_self, seq_id, p0, p1, delta); + llama_kv_cache_seq_add(ctx->kv_self, seq_id, p0, p1, delta); } void llama_kv_cache_seq_div(struct llama_context * ctx, llama_seq_id seq_id, llama_pos p0, llama_pos p1, int d) { @@ -12078,6 +12370,19 @@ void llama_kv_cache_seq_div(struct llama_context * ctx, llama_seq_id seq_id, lla llama_kv_cache_seq_div(ctx->kv_self, seq_id, p0, p1, d); } +llama_pos llama_kv_cache_seq_pos_max(struct llama_context * ctx, llama_seq_id seq_id) { + return llama_kv_cache_seq_pos_max(ctx->kv_self, seq_id); +} + +void llama_kv_cache_defrag(struct llama_context * ctx) { + llama_kv_cache_defrag(ctx->kv_self); +} + +void llama_kv_cache_update(struct llama_context * ctx) { + llama_kv_cache_update_internal(*ctx); +} + + // Returns the *maximum* size of the state size_t llama_get_state_size(const struct llama_context * ctx) { // we don't know size of rng until we actually serialize it. so reserve more than enough memory for its serialized state. @@ -12204,10 +12509,10 @@ static void llama_copy_state_data_internal(struct llama_context * ctx, llama_dat const auto & hparams = ctx->model.hparams; const auto & cparams = ctx->cparams; - const auto n_layer = hparams.n_layer; - const auto n_embd_k_gqa = hparams.n_embd_k_gqa(); - const auto n_embd_v_gqa = hparams.n_embd_v_gqa(); - const auto n_ctx = cparams.n_ctx; + const uint32_t n_layer = hparams.n_layer; + const uint32_t n_embd_k_gqa = hparams.n_embd_k_gqa(); + const uint32_t n_embd_v_gqa = hparams.n_embd_v_gqa(); + const uint32_t n_ctx = cparams.n_ctx; const size_t kv_buf_size = kv_self.total_size(); const uint32_t kv_head = kv_self.head; @@ -12222,14 +12527,16 @@ static void llama_copy_state_data_internal(struct llama_context * ctx, llama_dat if (kv_buf_size) { std::vector tmp_buf; for (int il = 0; il < (int) n_layer; ++il) { - size_t k_size = ggml_row_size(kv_self.k_l[il]->type, n_embd_k_gqa*kv_head); + const size_t k_size = ggml_row_size(kv_self.k_l[il]->type, n_embd_k_gqa*kv_head); + tmp_buf.resize(k_size); ggml_backend_tensor_get(kv_self.k_l[il], tmp_buf.data(), 0, tmp_buf.size()); data_ctx->write(tmp_buf.data(), tmp_buf.size()); // v is not contiguous, copy row by row - size_t v_row_size = ggml_row_size(kv_self.v_l[il]->type, kv_head); - size_t v_row_stride = ggml_row_size(kv_self.v_l[il]->type, n_ctx); + const size_t v_row_size = ggml_row_size(kv_self.v_l[il]->type, kv_head); + const size_t v_row_stride = ggml_row_size(kv_self.v_l[il]->type, n_ctx); + tmp_buf.resize(v_row_size); for (int ir = 0; ir < (int) n_embd_v_gqa; ++ir) { ggml_backend_tensor_get(kv_self.v_l[il], tmp_buf.data(), ir*v_row_stride, tmp_buf.size()); @@ -12316,10 +12623,10 @@ size_t llama_set_state_data(struct llama_context * ctx, uint8_t * src) { const auto & hparams = ctx->model.hparams; const auto & cparams = ctx->cparams; - const int n_layer = hparams.n_layer; - const int n_embd_k_gqa = hparams.n_embd_k_gqa(); - const int n_embd_v_gqa = hparams.n_embd_v_gqa(); - const int n_ctx = cparams.n_ctx; + const uint32_t n_layer = hparams.n_layer; + const uint32_t n_embd_k_gqa = hparams.n_embd_k_gqa(); + const uint32_t n_embd_v_gqa = hparams.n_embd_v_gqa(); + const uint32_t n_ctx = cparams.n_ctx; size_t kv_buf_size; uint32_t kv_head; @@ -12335,13 +12642,15 @@ size_t llama_set_state_data(struct llama_context * ctx, uint8_t * src) { GGML_ASSERT(kv_self.total_size() == kv_buf_size); for (int il = 0; il < (int) n_layer; ++il) { - size_t k_size = ggml_row_size(kv_self.k_l[il]->type, n_embd_k_gqa*kv_head); + const size_t k_size = ggml_row_size(kv_self.k_l[il]->type, n_embd_k_gqa*kv_head); + ggml_backend_tensor_set(kv_self.k_l[il], inp, 0, k_size); inp += k_size; // v is not contiguous, copy row by row - size_t v_row_size = ggml_row_size(kv_self.v_l[il]->type, kv_head); - size_t v_row_stride = ggml_row_size(kv_self.v_l[il]->type, n_ctx); + const size_t v_row_size = ggml_row_size(kv_self.v_l[il]->type, kv_head); + const size_t v_row_stride = ggml_row_size(kv_self.v_l[il]->type, n_ctx); + for (int ir = 0; ir < (int) n_embd_v_gqa; ++ir) { ggml_backend_tensor_set(kv_self.v_l[il], inp, ir*v_row_stride, v_row_size); inp += v_row_size; diff --git a/llama.h b/llama.h index 947284ea2..ff131996d 100644 --- a/llama.h +++ b/llama.h @@ -64,6 +64,15 @@ extern "C" { LLAMA_VOCAB_TYPE_WPM = 2, // WordPiece }; + // note: these values should be synchronized with ggml_rope + // TODO: maybe move this enum to ggml.h (ggml_rope_type) + enum llama_rope_type { + LLAMA_ROPE_TYPE_NONE = -1, + LLAMA_ROPE_TYPE_NORM = 0, + LLAMA_ROPE_TYPE_NEOX = 2, + LLAMA_ROPE_TYPE_GLM = 4, + }; + enum llama_token_type { LLAMA_TOKEN_TYPE_UNDEFINED = 0, LLAMA_TOKEN_TYPE_NORMAL = 1, @@ -360,6 +369,7 @@ extern "C" { LLAMA_API uint32_t llama_n_batch (const struct llama_context * ctx); LLAMA_API enum llama_vocab_type llama_vocab_type(const struct llama_model * model); + LLAMA_API enum llama_rope_type llama_rope_type (const struct llama_model * model); LLAMA_API int32_t llama_n_vocab (const struct llama_model * model); LLAMA_API int32_t llama_n_ctx_train(const struct llama_model * model); @@ -514,10 +524,12 @@ extern "C" { llama_seq_id seq_id); // Adds relative position "delta" to all tokens that belong to the specified sequence and have positions in [p0, p1) - // If the KV cache is RoPEd, the KV data is updated accordingly + // If the KV cache is RoPEd, the KV data is updated accordingly: + // - lazily on next llama_decode() + // - explicitly with llama_kv_cache_update() // p0 < 0 : [0, p1] // p1 < 0 : [p0, inf) - LLAMA_API void llama_kv_cache_seq_shift( + LLAMA_API void llama_kv_cache_seq_add( struct llama_context * ctx, llama_seq_id seq_id, llama_pos p0, @@ -525,7 +537,9 @@ extern "C" { llama_pos delta); // Integer division of the positions by factor of `d > 1` - // If the KV cache is RoPEd, the KV data is updated accordingly + // If the KV cache is RoPEd, the KV data is updated accordingly: + // - lazily on next llama_decode() + // - explicitly with llama_kv_cache_update() // p0 < 0 : [0, p1] // p1 < 0 : [p0, inf) LLAMA_API void llama_kv_cache_seq_div( @@ -535,6 +549,20 @@ extern "C" { llama_pos p1, int d); + // Returns the largest position present in the KV cache for the specified sequence + LLAMA_API llama_pos llama_kv_cache_seq_pos_max( + struct llama_context * ctx, + llama_seq_id seq_id); + + // Defragment the KV cache + // This will be applied: + // - lazily on next llama_decode() + // - explicitly with llama_kv_cache_update() + LLAMA_API void llama_kv_cache_defrag(struct llama_context * ctx); + + // Apply the KV cache updates (such as K-shifts, defragmentation, etc.) + LLAMA_API void llama_kv_cache_update(struct llama_context * ctx); + // // State / sessions // From 8b350356b28f782deab63d8b0e9ae103ceb25fcd Mon Sep 17 00:00:00 2001 From: Pierrick Hymbert Date: Sun, 25 Feb 2024 21:46:29 +0100 Subject: [PATCH 710/811] server: docs - refresh and tease a little bit more the http server (#5718) * server: docs - refresh and tease a little bit more the http server * Rephrase README.md server doc Co-authored-by: Georgi Gerganov * Update examples/server/README.md Co-authored-by: Georgi Gerganov * Update examples/server/README.md Co-authored-by: Georgi Gerganov * Update README.md --------- Co-authored-by: Georgi Gerganov --- README.md | 3 +++ examples/server/README.md | 18 +++++++++++++++--- 2 files changed, 18 insertions(+), 3 deletions(-) diff --git a/README.md b/README.md index d61f9171b..d0af5d0b9 100644 --- a/README.md +++ b/README.md @@ -114,6 +114,9 @@ Typically finetunes of the base models below are supported as well. - [x] [MobileVLM 1.7B/3B models](https://huggingface.co/models?search=mobileVLM) - [x] [Yi-VL](https://huggingface.co/models?search=Yi-VL) +**HTTP server** + +[llama.cpp web server](./examples/server) is a lightweight [OpenAI API](https://github.com/openai/openai-openapi) compatible HTTP server that can be used to serve local models and easily connect them to existing clients. **Bindings:** diff --git a/examples/server/README.md b/examples/server/README.md index cb3fd6054..0e9bd7fd4 100644 --- a/examples/server/README.md +++ b/examples/server/README.md @@ -1,8 +1,20 @@ -# llama.cpp/example/server +# LLaMA.cpp HTTP Server -This example demonstrates a simple HTTP API server and a simple web front end to interact with llama.cpp. +Fast, lightweight, pure C/C++ HTTP server based on [httplib](https://github.com/yhirose/cpp-httplib), [nlohmann::json](https://github.com/nlohmann/json) and **llama.cpp**. -Command line options: +Set of LLM REST APIs and a simple web front end to interact with llama.cpp. + +**Features:** + * LLM inference of F16 and quantum models on GPU and CPU + * [OpenAI API](https://github.com/openai/openai-openapi) compatible chat completions and embeddings routes + * Parallel decoding with multi-user support + * Continuous batching + * Multimodal (wip) + * Monitoring endpoints + +The project is under active development, and we are [looking for feedback and contributors](https://github.com/ggerganov/llama.cpp/issues/4216). + +**Command line options:** - `--threads N`, `-t N`: Set the number of threads to use during generation. - `-tb N, --threads-batch N`: Set the number of threads to use during batch and prompt processing. If not specified, the number of threads will be set to the number of threads used for generation. From e3965cf35aac00d4e24998c8a3d0093ae1d98bd3 Mon Sep 17 00:00:00 2001 From: Pierrick Hymbert Date: Sun, 25 Feb 2024 22:48:33 +0100 Subject: [PATCH 711/811] server: tests - slow inference causes timeout on the CI (#5715) * server: tests - longer inference timeout for CI --- common/sampling.cpp | 2 +- examples/server/tests/features/steps/steps.py | 4 +++- 2 files changed, 4 insertions(+), 2 deletions(-) diff --git a/common/sampling.cpp b/common/sampling.cpp index de4331a11..e67096bea 100644 --- a/common/sampling.cpp +++ b/common/sampling.cpp @@ -266,7 +266,7 @@ static llama_token llama_sampling_sample_impl( // } //} - LOG("sampled token: %5d: '%s'\n", id, llama_token_to_piece(ctx_main, id).c_str()); + //LOG("sampled token: %5d: '%s'\n", id, llama_token_to_piece(ctx_main, id).c_str()); } } diff --git a/examples/server/tests/features/steps/steps.py b/examples/server/tests/features/steps/steps.py index 8e4babf20..ad87fcb82 100644 --- a/examples/server/tests/features/steps/steps.py +++ b/examples/server/tests/features/steps/steps.py @@ -699,6 +699,8 @@ async def wait_for_health_status(context, if context.debug: print(f"Starting checking for health for expected_health_status={expected_health_status}") timeout = 3 # seconds + if expected_health_status == 'ok': + timeout = 10 # CI slow inference interval = 0.5 counter = 0 async with aiohttp.ClientSession() as session: @@ -736,7 +738,7 @@ async def wait_for_health_status(context, if n_completions > 0: return - assert False, 'timeout exceeded' + assert False, f'{expected_health_status} timeout exceeded {counter}s>={timeout}' def assert_embeddings(embeddings): From c39373398803c669056304090050fe3f44b41bf9 Mon Sep 17 00:00:00 2001 From: "github-actions[bot]" Date: Sun, 25 Feb 2024 00:17:11 +0000 Subject: [PATCH 712/811] flake.lock: Update MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Flake lock file updates: • Updated input 'nixpkgs': 'github:NixOS/nixpkgs/5863c27340ba4de8f83e7e3c023b9599c3cb3c80' (2024-02-16) → 'github:NixOS/nixpkgs/cbc4211f0afffe6dfd2478a62615dd5175a13f9a' (2024-02-23) --- flake.lock | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/flake.lock b/flake.lock index 47d6448b5..9f659ba8f 100644 --- a/flake.lock +++ b/flake.lock @@ -20,11 +20,11 @@ }, "nixpkgs": { "locked": { - "lastModified": 1708118438, - "narHash": "sha256-kk9/0nuVgA220FcqH/D2xaN6uGyHp/zoxPNUmPCMmEE=", + "lastModified": 1708655239, + "narHash": "sha256-ZrP/yACUvDB+zbqYJsln4iwotbH6CTZiTkANJ0AgDv4=", "owner": "NixOS", "repo": "nixpkgs", - "rev": "5863c27340ba4de8f83e7e3c023b9599c3cb3c80", + "rev": "cbc4211f0afffe6dfd2478a62615dd5175a13f9a", "type": "github" }, "original": { From 269de86ba073b5dc9ce687c11a3bc4d7d873b962 Mon Sep 17 00:00:00 2001 From: Georgi Gerganov Date: Mon, 26 Feb 2024 08:30:17 +0200 Subject: [PATCH 713/811] llama : fix Gemma rope type (#5691) --- llama.cpp | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/llama.cpp b/llama.cpp index 3424b1999..28430254f 100644 --- a/llama.cpp +++ b/llama.cpp @@ -12089,7 +12089,6 @@ enum llama_rope_type llama_rope_type(const struct llama_model * model) { case LLM_ARCH_ORION: case LLM_ARCH_INTERNLM2: case LLM_ARCH_MINICPM: - case LLM_ARCH_GEMMA: return LLAMA_ROPE_TYPE_NORM; // the pairs of head values are offset by n_rot/2 @@ -12101,6 +12100,7 @@ enum llama_rope_type llama_rope_type(const struct llama_model * model) { case LLM_ARCH_QWEN: case LLM_ARCH_QWEN2: case LLM_ARCH_PHI2: + case LLM_ARCH_GEMMA: return LLAMA_ROPE_TYPE_NEOX; // all model arches should be listed explicitly here From 8a533f0d9078396ebaee9ba213038a1322976dee Mon Sep 17 00:00:00 2001 From: Pierrick Hymbert Date: Mon, 26 Feb 2024 09:56:10 +0100 Subject: [PATCH 714/811] server: CI tests reduce build matrix (#5725) --- .github/workflows/server.yml | 78 ++++++++---------------------------- 1 file changed, 17 insertions(+), 61 deletions(-) diff --git a/.github/workflows/server.yml b/.github/workflows/server.yml index ed27dc528..1211ba128 100644 --- a/.github/workflows/server.yml +++ b/.github/workflows/server.yml @@ -6,11 +6,10 @@ on: push: branches: - master - - test/server-add-ci-test # FIXME remove - paths: ['.github/workflows/**', '**/CMakeLists.txt', '**/Makefile', '**/*.h', '**/*.hpp', '**/*.c', '**/*.cpp', '**/*.cu', '**/*.swift', '**/*.m', 'examples/server/**.*'] + paths: ['.github/workflows/server.yml', '**/CMakeLists.txt', '**/Makefile', '**/*.h', '**/*.hpp', '**/*.c', '**/*.cpp', '**/*.cu', '**/*.swift', '**/*.m', 'examples/server/tests/**.*'] pull_request: types: [opened, synchronize, reopened] - paths: ['**/CMakeLists.txt', '**/Makefile', '**/*.h', '**/*.hpp', '**/*.c', '**/*.cpp', '**/*.cu', '**/*.swift', '**/*.m', 'examples/server/**.*'] + paths: ['.github/workflows/server.yml', '**/CMakeLists.txt', '**/Makefile', '**/*.h', '**/*.hpp', '**/*.c', '**/*.cpp', '**/*.cu', '**/*.swift', '**/*.m', 'examples/server/tests/**.*'] jobs: server: @@ -18,45 +17,21 @@ jobs: strategy: matrix: - build: [noavx, avx2, avx, avx512, cublas, clblast, openblas, kompute, vulkan] sanitizer: [ADDRESS, THREAD, UNDEFINED] build_type: [Debug, Release] include: - - build: 'noavx' - defines: '-DLLAMA_NATIVE=OFF -DLLAMA_BUILD_SERVER=ON -DLLAMA_AVX=OFF -DLLAMA_AVX2=OFF -DLLAMA_FMA=OFF' - image: ubuntu:latest - - build: 'avx2' - defines: '-DLLAMA_NATIVE=OFF -DLLAMA_BUILD_SERVER=ON' - image: ubuntu:latest - - build: 'avx' - defines: '-DLLAMA_NATIVE=OFF -DLLAMA_BUILD_SERVER=ON -DLLAMA_AVX2=OFF' - image: ubuntu:latest - - build: 'avx512' - defines: '-DLLAMA_NATIVE=OFF -DLLAMA_BUILD_SERVER=ON -DLLAMA_AVX512=ON' - image: ubuntu:latest - experimental: true - - build: 'cublas' - defines: '-DLLAMA_NATIVE=OFF -DLLAMA_BUILD_SERVER=ON -DLLAMA_CUBLAS=ON' - image: nvidia/cuda:12.3.1-devel-ubuntu22.04 - arch_not_available: true # require nvidia docker engine - - build: 'clblast' - defines: '-DLLAMA_NATIVE=OFF -DLLAMA_BUILD_SERVER=ON -DLLAMA_CLBLAST=ON' - image: ubuntu:latest - arch_not_available: true - - build: 'openblas' - defines: '-DLLAMA_NATIVE=OFF -DLLAMA_BUILD_SERVER=ON -DLLAMA_BLAS=ON -DLLAMA_BLAS_VENDOR=OpenBLAS' - image: ubuntu:latest - - build: 'kompute' - defines: '-DLLAMA_NATIVE=OFF -DLLAMA_BUILD_SERVER=ON -DLLAMA_KOMPUTE=ON -DKOMPUTE_OPT_DISABLE_VULKAN_VERSION_CHECK=ON' - image: ubuntu:latest - arch_not_available: true - - build: 'vulkan' - defines: '-DLLAMA_NATIVE=OFF -DLLAMA_BUILD_SERVER=ON -DLLAMA_VULKAN=ON' - image: ubuntu:latest - arch_not_available: true + - build_type: Release + sanitizer: "" + exclude: + - build_type: Release + sanitizer: ADDRESS + - build_type: Release + sanitizer: THREAD + - build_type: Release + sanitizer: UNDEFINED container: - image: ${{ matrix.image }} + image: ubuntu:latest ports: - 8888 options: --cpus 4 @@ -72,40 +47,22 @@ jobs: apt-get update apt-get -y install \ build-essential \ - pkg-config \ git \ cmake \ python3-pip \ wget \ psmisc - - name: Download CLBlast - id: get_clblast - if: ${{ matrix.build == 'clblast' }} - run: | - apt install -y libclblast-dev - - - name: Download OpenBLAS - id: get_openblas - if: ${{ matrix.build == 'openblas' }} - run: | - apt-get -y install libopenblas-dev - - - name: Install Vulkan SDK - id: get_vulkan - if: ${{ matrix.build == 'kompute' || matrix.build == 'vulkan' }} - run: | - wget -qO- https://packages.lunarg.com/lunarg-signing-key-pub.asc | tee /etc/apt/trusted.gpg.d/lunarg.asc - wget -qO /etc/apt/sources.list.d/lunarg-vulkan-jammy.list http://packages.lunarg.com/vulkan/lunarg-vulkan-jammy.list - apt-get update - apt-get -y install vulkan-sdk - - name: Build id: cmake_build run: | mkdir build cd build - cmake .. -DLLAMA_SANITIZE_${{ matrix.sanitizer }}=ON -DCMAKE_BUILD_TYPE=${{ matrix.build_type }} ${{ matrix.defines }} + cmake .. \ + -DLLAMA_NATIVE=OFF \ + -DLLAMA_BUILD_SERVER=ON \ + -DCMAKE_BUILD_TYPE=${{ matrix.build_type }} \ + -DLLAMA_SANITIZE_${{ matrix.sanitizer }}=ON ; cmake --build . --config ${{ matrix.build_type }} -j $(nproc) --target server - name: Tests dependencies @@ -121,7 +78,6 @@ jobs: - name: Tests id: server_integration_test - continue-on-error: ${{ matrix.experimental || matrix.arch_not_available }} run: | cd examples/server/tests PORT=8888 ./tests.sh From 4804215cb833841ffb15a710a16b77ca0a29eb4b Mon Sep 17 00:00:00 2001 From: Pierrick Hymbert Date: Mon, 26 Feb 2024 11:41:34 +0100 Subject: [PATCH 715/811] server: CI fix trailing space (#5728) --- .github/workflows/server.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/server.yml b/.github/workflows/server.yml index 1211ba128..0b6f6669b 100644 --- a/.github/workflows/server.yml +++ b/.github/workflows/server.yml @@ -62,7 +62,7 @@ jobs: -DLLAMA_NATIVE=OFF \ -DLLAMA_BUILD_SERVER=ON \ -DCMAKE_BUILD_TYPE=${{ matrix.build_type }} \ - -DLLAMA_SANITIZE_${{ matrix.sanitizer }}=ON ; + -DLLAMA_SANITIZE_${{ matrix.sanitizer }}=ON ; cmake --build . --config ${{ matrix.build_type }} -j $(nproc) --target server - name: Tests dependencies From 67fd33132fab93e6c2087bd6fa656a8a57419efa Mon Sep 17 00:00:00 2001 From: Georgi Gerganov Date: Mon, 26 Feb 2024 14:02:12 +0200 Subject: [PATCH 716/811] unicode : reuse iterator (#5726) --- unicode.h | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/unicode.h b/unicode.h index 263260702..10a5dab01 100644 --- a/unicode.h +++ b/unicode.h @@ -404,7 +404,8 @@ static std::unordered_map codepoint_type_map() { static int codepoint_type(uint32_t cp) { static std::unordered_map codepoint_types = codepoint_type_map(); - return codepoint_types.find(cp) == codepoint_types.end() ? CODEPOINT_TYPE_UNIDENTIFIED : codepoint_types.at(cp); + const auto it = codepoint_types.find(cp); + return it == codepoint_types.end() ? CODEPOINT_TYPE_UNIDENTIFIED : it->second; } static int codepoint_type(const std::string & utf8) { From e849078c6e09e72fdd2c95ba61f5fba9a7b2d9ef Mon Sep 17 00:00:00 2001 From: AidanBeltonS <87009434+AidanBeltonS@users.noreply.github.com> Date: Mon, 26 Feb 2024 14:02:11 +0000 Subject: [PATCH 717/811] [SYCL] Add support for soft_max ALiBi (#5639) * Add support for bias * Update pre-processor * rm commented code * fix format * fix CI --------- Co-authored-by: Abhilash Majumder <30946547+abhilash1910@users.noreply.github.com> --- ggml-sycl.cpp | 248 +++++++++++++++++++++++++++++++++----------------- llama.cpp | 4 +- 2 files changed, 168 insertions(+), 84 deletions(-) diff --git a/ggml-sycl.cpp b/ggml-sycl.cpp index c6c3c6e6f..835967fb6 100644 --- a/ggml-sycl.cpp +++ b/ggml-sycl.cpp @@ -8126,23 +8126,51 @@ static void diag_mask_inf_f32(const float * x, float * dst, const int ncols, con dst[i] = x[i] - (col > n_past + row % rows_per_channel) * FLT_MAX; } -static void soft_max_f32(const float * x, const float * y, float * dst, const int ncols, const int nrows_y, const float scale, - const sycl::nd_item<3> &item_ct1, float *buf) { + +template +static void soft_max_f32(const float * x, const float * mask, const float *pos, float * dst, const int ncols_par, + const int nrows_y, const float scale, const float max_bias, const float m0, + const float m1, uint32_t n_head_log2, const sycl::nd_item<3> &item_ct1, float *buf) { + const int ncols = ncols_template == 0 ? ncols_par : ncols_template; + const int tid = item_ct1.get_local_id(2); const int rowx = item_ct1.get_group(2); const int rowy = rowx % nrows_y; // broadcast the mask (y) in the row dimension - const int block_size = item_ct1.get_local_range(2); + const int block_size = block_size_template == 0 ? item_ct1.get_local_range(2) : block_size_template; const int warp_id = item_ct1.get_local_id(2) / WARP_SIZE; const int lane_id = item_ct1.get_local_id(2) % WARP_SIZE; + float slope = 0.0f; + + // ALiBi + if (max_bias > 0.0f) { + const uint32_t h = rowx/nrows_y; // head index + + const float base = h < n_head_log2 ? m0 : m1; + const int exp = h < n_head_log2 ? h + 1 : 2*(h - n_head_log2) + 1; + + slope = sycl::pow(base, float(exp)); + } + + float * vals = vals_smem ? buf + WARP_SIZE : dst + rowx*ncols; float max_val = -INFINITY; - for (int col = tid; col < ncols; col += block_size) { + for (int col0 = 0; col0 < ncols; col0 += block_size) { + const int col = col0 + tid; + + if (ncols_template == 0 && col >= ncols) { + break; + } + const int ix = rowx*ncols + col; const int iy = rowy*ncols + col; - max_val = sycl::max(max_val, x[ix] * scale + (y ? y[iy] : 0.0f)); + + const float val = x[ix]*scale + (mask ? mask[iy] : 0.0f) + (pos ? slope*pos[col] : 0.0f); + + vals[col] = val; + max_val = sycl::max(max_val, val); } // find the max value in the block @@ -8151,30 +8179,12 @@ static void soft_max_f32(const float * x, const float * y, float * dst, const in if (warp_id == 0) { buf[lane_id] = -INFINITY; } - /* - DPCT1118:12: SYCL group functions and algorithms must be encountered in - converged control flow. You may need to adjust the code. - */ - /* - DPCT1065:60: Consider replacing sycl::nd_item::barrier() with - sycl::nd_item::barrier(sycl::access::fence_space::local_space) for - better performance if there is no access to global memory. - */ - item_ct1.barrier(); + item_ct1.barrier(sycl::access::fence_space::local_space); if (lane_id == 0) { buf[warp_id] = max_val; } - /* - DPCT1118:13: SYCL group functions and algorithms must be encountered in - converged control flow. You may need to adjust the code. - */ - /* - DPCT1065:61: Consider replacing sycl::nd_item::barrier() with - sycl::nd_item::barrier(sycl::access::fence_space::local_space) for - better performance if there is no access to global memory. - */ - item_ct1.barrier(); + item_ct1.barrier(sycl::access::fence_space::local_space); max_val = buf[lane_id]; max_val = warp_reduce_max(max_val, item_ct1); @@ -8182,13 +8192,16 @@ static void soft_max_f32(const float * x, const float * y, float * dst, const in float tmp = 0.f; - for (int col = tid; col < ncols; col += block_size) { - const int ix = rowx*ncols + col; - const int iy = rowy*ncols + col; - const float val = - sycl::native::exp((x[ix] * scale + (y ? y[iy] : 0.0f)) - max_val); +#pragma unroll + for (int col0 = 0; col0 < ncols; col0 += block_size) { + const int col = col0 + tid; + if (ncols_template == 0 && col >= ncols) { + break; + } + + const float val = sycl::native::exp(vals[col] - max_val); tmp += val; - dst[ix] = val; + vals[col] = val; } // find the sum of exps in the block @@ -8197,40 +8210,29 @@ static void soft_max_f32(const float * x, const float * y, float * dst, const in if (warp_id == 0) { buf[lane_id] = 0.f; } - /* - DPCT1118:14: SYCL group functions and algorithms must be encountered in - converged control flow. You may need to adjust the code. - */ - /* - DPCT1065:62: Consider replacing sycl::nd_item::barrier() with - sycl::nd_item::barrier(sycl::access::fence_space::local_space) for - better performance if there is no access to global memory. - */ - item_ct1.barrier(); + item_ct1.barrier(sycl::access::fence_space::local_space); if (lane_id == 0) { buf[warp_id] = tmp; } - /* - DPCT1118:15: SYCL group functions and algorithms must be encountered in - converged control flow. You may need to adjust the code. - */ - /* - DPCT1065:63: Consider replacing sycl::nd_item::barrier() with - sycl::nd_item::barrier(sycl::access::fence_space::local_space) for - better performance if there is no access to global memory. - */ - item_ct1.barrier(); + item_ct1.barrier(sycl::access::fence_space::local_space); tmp = buf[lane_id]; tmp = warp_reduce_sum(tmp, item_ct1); } - const float inv_tmp = 1.f / tmp; + const float inv_sum = 1.f / tmp; - for (int col = tid; col < ncols; col += block_size) { - const int i = rowx*ncols + col; - dst[i] *= inv_tmp; +#pragma unroll + for (int col0 = 0; col0 < ncols; col0 += block_size) { + const int col = col0 + tid; + + if (ncols_template == 0 && col >= ncols) { + return; + } + + const int idst = rowx*ncols + col; + dst[idst] = vals[col] * inv_sum; } } @@ -10867,35 +10869,96 @@ static void diag_mask_inf_f32_sycl(const float *x, float *dst, }); } -static void soft_max_f32_sycl(const float *x, const float *y, float *dst, - const int ncols_x, const int nrows_x, - const int nrows_y, const float scale, +template +static void soft_max_f32_submitter(const float * x, const float * mask, const float *pos, float * dst, const int ncols_par, + const int nrows_y, const float scale, const float max_bias, const float m0, + const float m1, uint32_t n_head_log2, sycl::range<3> block_nums, sycl::range<3> block_dims, + const size_t n_local_scratch, dpct::queue_ptr stream) { + stream->submit([&](sycl::handler &cgh) { + sycl::local_accessor local_buf_acc(n_local_scratch, cgh); + + cgh.parallel_for( + sycl::nd_range<3>(block_nums * block_dims, block_dims), + [=](sycl::nd_item<3> item_ct1) [[intel::reqd_sub_group_size(32)]] { + soft_max_f32(x, mask, pos, dst, ncols_par, + nrows_y, scale, max_bias, m0, + m1, n_head_log2, item_ct1, + local_buf_acc.get_pointer()); + }); + }); +} + +static void soft_max_f32_sycl(const float * x, const float * mask, const float * pos, + float * dst, const int ncols_x, const int nrows_x, + const int nrows_y, const float scale, const float max_bias, dpct::queue_ptr stream) { int nth = WARP_SIZE; while (nth < ncols_x && nth < SYCL_SOFT_MAX_BLOCK_SIZE) nth *= 2; const sycl::range<3> block_dims(1, 1, nth); const sycl::range<3> block_nums(1, 1, nrows_x); - /* - DPCT1049:46: The work-group size passed to the SYCL kernel may exceed the - limit. To get the device limit, query info::device::max_work_group_size. - Adjust the work-group size if needed. - */ - stream->submit([&](sycl::handler &cgh) { - /* - DPCT1101:96: 'SYCL_SOFT_MAX_BLOCK_SIZE/WARP_SIZE' expression was - replaced with a value. Modify the code to use the original expression, - provided in comments, if it is correct. - */ - sycl::local_accessor buf_acc_ct1( - sycl::range<1>(32 /*SYCL_SOFT_MAX_BLOCK_SIZE/WARP_SIZE*/), cgh); + const size_t n_local_scratch = (GGML_PAD(ncols_x, WARP_SIZE) + WARP_SIZE); + static_assert(SYCL_SOFT_MAX_BLOCK_SIZE == 1024, "These values need to be adjusted."); - cgh.parallel_for( - sycl::nd_range<3>(block_nums * block_dims, block_dims), - [=](sycl::nd_item<3> item_ct1) [[intel::reqd_sub_group_size(32)]] { - soft_max_f32(x, y, dst, ncols_x, nrows_y, scale, item_ct1, - buf_acc_ct1.get_pointer()); - }); - }); + const uint32_t n_head_kv = nrows_x/nrows_y; + const uint32_t n_head_log2 = 1u << (uint32_t) floorf(log2f((float) n_head_kv)); + + const float m0 = powf(2.0f, -(max_bias ) / n_head_log2); + const float m1 = powf(2.0f, -(max_bias / 2.0f) / n_head_log2); + + const size_t local_mem_size = stream->get_device().get_info(); + if (n_local_scratch*sizeof(float) < local_mem_size) { + switch (ncols_x) { + case 32: + soft_max_f32_submitter(x, mask, pos, dst, ncols_x, nrows_y, scale, + max_bias, m0, m1, n_head_log2, block_nums, + block_dims, n_local_scratch, stream); + break; + case 64: + soft_max_f32_submitter(x, mask, pos, dst, ncols_x, nrows_y, scale, + max_bias, m0, m1, n_head_log2, block_nums, + block_dims, n_local_scratch, stream); + break; + case 128: + soft_max_f32_submitter(x, mask, pos, dst, ncols_x, nrows_y, scale, + max_bias, m0, m1, n_head_log2, block_nums, + block_dims, n_local_scratch, stream); + break; + case 256: + soft_max_f32_submitter(x, mask, pos, dst, ncols_x, nrows_y, scale, + max_bias, m0, m1, n_head_log2, block_nums, + block_dims, n_local_scratch, stream); + break; + case 512: + soft_max_f32_submitter(x, mask, pos, dst, ncols_x, nrows_y, scale, + max_bias, m0, m1, n_head_log2, block_nums, + block_dims, n_local_scratch, stream); + break; + case 1024: + soft_max_f32_submitter(x, mask, pos, dst, ncols_x, nrows_y, scale, + max_bias, m0, m1, n_head_log2, block_nums, + block_dims, n_local_scratch, stream); + break; + case 2048: + soft_max_f32_submitter(x, mask, pos, dst, ncols_x, nrows_y, scale, + max_bias, m0, m1, n_head_log2, block_nums, + block_dims, n_local_scratch, stream); + break; + case 4096: + soft_max_f32_submitter(x, mask, pos, dst, ncols_x, nrows_y, scale, + max_bias, m0, m1, n_head_log2, block_nums, + block_dims, n_local_scratch, stream); + break; + default: + soft_max_f32_submitter(x, mask, pos, dst, ncols_x, nrows_y, scale, + max_bias, m0, m1, n_head_log2, block_nums, + block_dims, n_local_scratch, stream); + break; + } + } else { + soft_max_f32_submitter(x, mask, pos, dst, ncols_x, nrows_y, scale, + max_bias, m0, m1, n_head_log2, block_nums, + block_dims, WARP_SIZE, stream); + } } template @@ -12435,14 +12498,35 @@ inline void ggml_sycl_op_soft_max(const ggml_tensor *src0, const int64_t ne00 = src0->ne[0]; const int64_t nrows_x = ggml_nrows(src0); - const int64_t nrows_y = src1 ? ggml_nrows(src1) : 1; + const int64_t nrows_y = src0->ne[1]; float scale = 1.0f; - memcpy(&scale, dst->op_params, sizeof(float)); + float max_bias = 0.0f; - soft_max_f32_sycl(src0_dd, src1 ? src1_dd : nullptr, dst_dd, ne00, nrows_x, nrows_y, scale, main_stream); + memcpy(&scale, dst->op_params + 0, sizeof(float)); + memcpy(&max_bias, dst->op_params + 1, sizeof(float)); - (void) dst; + // positions tensor + float * src2_dd = nullptr; + sycl_pool_alloc src2_f; + + ggml_tensor * src2 = dst->src[2]; + const bool use_src2 = src2 != nullptr; + + if (use_src2) { + const bool src2_on_device = src2->backend == GGML_BACKEND_TYPE_GPU; + + if (src2_on_device) { + ggml_tensor_extra_gpu * src2_extra = (ggml_tensor_extra_gpu *) src2->extra; + src2_dd = (float *) src2_extra->data_device[g_main_device]; + } else { + src2_dd = src2_f.alloc(ggml_nelements(src2)); + SYCL_CHECK(ggml_sycl_cpy_tensor_2d(src2_dd, src2, 0, 0, 0, 1, main_stream)); + } + } + + soft_max_f32_sycl(src0_dd, src1 ? src1_dd : nullptr, src2_dd, dst_dd, ne00, + nrows_x, nrows_y, scale, max_bias, main_stream); } inline void ggml_sycl_op_scale(const ggml_tensor *src0, const ggml_tensor *src1, diff --git a/llama.cpp b/llama.cpp index 28430254f..f549e7d04 100644 --- a/llama.cpp +++ b/llama.cpp @@ -4894,8 +4894,8 @@ static struct ggml_tensor * llm_build_kqv( ggml_mul_mat_set_prec(kq, GGML_PREC_F32); } -#if defined(GGML_USE_VULKAN) || defined(GGML_USE_KOMPUTE) || defined(GGML_USE_SYCL) -#pragma message("TODO: ALiBi support in ggml_soft_max_ext is not implemented for Vulkan, Kompute, and SYCL") +#if defined(GGML_USE_VULKAN) || defined(GGML_USE_KOMPUTE) +#pragma message("TODO: ALiBi support in ggml_soft_max_ext is not implemented for Vulkan, and Kompute") #pragma message(" Falling back to ggml_alibi(). Will become an error in Mar 2024") #pragma message("ref: https://github.com/ggerganov/llama.cpp/pull/5488") if (hparams.f_max_alibi_bias > 0.0f) { From c4d7f8178608440506e5489bae0109e4ca12e44a Mon Sep 17 00:00:00 2001 From: Artem Date: Mon, 26 Feb 2024 17:15:28 +0300 Subject: [PATCH 718/811] readme : update ui list (#5731) * Add LLMFarm (ui for iOS) to list --- README.md | 1 + 1 file changed, 1 insertion(+) diff --git a/README.md b/README.md index d0af5d0b9..507a2888b 100644 --- a/README.md +++ b/README.md @@ -159,6 +159,7 @@ Unless otherwise noted these projects are open-source with permissive licensing: - [withcatai/catai](https://github.com/withcatai/catai) - [Mobile-Artificial-Intelligence/maid](https://github.com/Mobile-Artificial-Intelligence/maid) (MIT) - [Msty](https://msty.app) (proprietary) +- [LLMFarm](https://github.com/guinmoon/LLMFarm?tab=readme-ov-file) (MIT) --- From 47bb7b48c7cec9d8f57d56812ce811ec130b89a3 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Johannes=20G=C3=A4=C3=9Fler?= Date: Mon, 26 Feb 2024 15:36:38 +0100 Subject: [PATCH 719/811] CUDA: fix DEBUG_CUDA_MALLOC (#5729) --- ggml-cuda.cu | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/ggml-cuda.cu b/ggml-cuda.cu index fb6d4f7d2..15322fb59 100644 --- a/ggml-cuda.cu +++ b/ggml-cuda.cu @@ -8079,8 +8079,8 @@ static void * ggml_cuda_pool_malloc_leg(int device, size_t size, size_t * actual *actual_size = look_ahead_size; g_cuda_pool_size[device] += look_ahead_size; #ifdef DEBUG_CUDA_MALLOC - fprintf(stderr, "%s[%d]: %d buffers, max_size = %u MB, pool_size = %u MB, requested %u MB\n", __func__, id, nnz, - (uint32_t)(max_size/1024/1024), (uint32_t)(g_cuda_pool_size[id]/1024/1024), (uint32_t)(size/1024/1024)); + fprintf(stderr, "%s[%d]: %d buffers, max_size = %u MB, pool_size = %u MB, requested %u MB\n", __func__, device, nnz, + (uint32_t)(max_size/1024/1024), (uint32_t)(g_cuda_pool_size[device]/1024/1024), (uint32_t)(size/1024/1024)); #endif return ptr; } @@ -8166,7 +8166,7 @@ static void * ggml_cuda_pool_malloc_vmm(int device, size_t size, size_t * actual g_cuda_pool_used[device] += size; #ifdef DEBUG_CUDA_MALLOC - printf("cuda pool[%d]: allocated %llu bytes at %llx [%s]\n", id, (unsigned long long) size, ptr); + printf("cuda pool[%d]: allocated %llu bytes at %llx\n", device, (unsigned long long) size, ptr); #endif return ptr; @@ -8176,7 +8176,7 @@ static void ggml_cuda_pool_free_vmm(int device, void * ptr, size_t size) { scoped_spin_lock lock(g_cuda_pool_lock); #ifdef DEBUG_CUDA_MALLOC - printf("cuda pool[%d]: freed %llu bytes at %llx\n", id, (unsigned long long) size, ptr); + printf("cuda pool[%d]: freed %llu bytes at %llx\n", device, (unsigned long long) size, ptr); #endif g_cuda_pool_used[device] -= size; From a33e6a0d2a66104ea9a906bdbf8a94d050189d91 Mon Sep 17 00:00:00 2001 From: Kawrakow <48489457+ikawrakow@users.noreply.github.com> Date: Mon, 26 Feb 2024 18:28:38 +0200 Subject: [PATCH 720/811] Adding IQ2_S and IQ2_M to complete coverage of the 2-3 bit quantization range (#5721) * Adding IQ2_S and IQ2_M as a single cumulative commit * Update examples/quantize/quantize.cpp Co-authored-by: Georgi Gerganov --------- Co-authored-by: Iwan Kawrakow Co-authored-by: Georgi Gerganov --- examples/quantize/quantize.cpp | 7 +- ggml-cuda.cu | 358 ++++++++++++++- ggml-metal.m | 37 +- ggml-metal.metal | 487 +++++++++++++++++++++ ggml-quants.c | 775 ++++++++++++++++++++++++++++++++- ggml-quants.h | 14 + ggml.c | 31 ++ ggml.h | 2 + llama.cpp | 71 ++- llama.h | 4 +- tests/test-backend-ops.cpp | 2 +- tests/test-quantize-fns.cpp | 4 +- 12 files changed, 1754 insertions(+), 38 deletions(-) diff --git a/examples/quantize/quantize.cpp b/examples/quantize/quantize.cpp index ab7e72aaf..2d187823f 100644 --- a/examples/quantize/quantize.cpp +++ b/examples/quantize/quantize.cpp @@ -23,14 +23,16 @@ static const std::vector QUANT_OPTIONS = { { "Q5_1", LLAMA_FTYPE_MOSTLY_Q5_1, " 4.70G, +0.0349 ppl @ LLaMA-v1-7B", }, { "IQ2_XXS",LLAMA_FTYPE_MOSTLY_IQ2_XXS," 2.06 bpw quantization", }, { "IQ2_XS", LLAMA_FTYPE_MOSTLY_IQ2_XS, " 2.31 bpw quantization", }, + { "IQ2_S", LLAMA_FTYPE_MOSTLY_IQ2_S, " 2.5 bpw quantization", }, + { "IQ2_M", LLAMA_FTYPE_MOSTLY_IQ2_M, " 2.7 bpw quantization", }, { "IQ1_S", LLAMA_FTYPE_MOSTLY_IQ1_S, " 1.56 bpw quantization", }, { "Q2_K", LLAMA_FTYPE_MOSTLY_Q2_K, " 2.63G, +0.6717 ppl @ LLaMA-v1-7B", }, { "Q2_K_S", LLAMA_FTYPE_MOSTLY_Q2_K_S, " 2.16G, +9.0634 ppl @ LLaMA-v1-7B", }, { "IQ3_XXS",LLAMA_FTYPE_MOSTLY_IQ3_XXS," 3.06 bpw quantization", }, { "IQ3_S", LLAMA_FTYPE_MOSTLY_IQ3_S, " 3.44 bpw quantization", }, - { "IQ3_M", LLAMA_FTYPE_MOSTLY_IQ3_M, " 3.66 bpw quantization mix", }, + { "IQ3_M", LLAMA_FTYPE_MOSTLY_IQ3_M, " 3.66 bpw quantization mix", }, { "Q3_K", LLAMA_FTYPE_MOSTLY_Q3_K_M, "alias for Q3_K_M" }, - { "Q3_K_XS",LLAMA_FTYPE_MOSTLY_Q3_K_XS,"3-bit extra small quantization" , }, + { "IQ3_XS", LLAMA_FTYPE_MOSTLY_IQ3_XS, " 3.3 bpw quantization" , }, { "Q3_K_S", LLAMA_FTYPE_MOSTLY_Q3_K_S, " 2.75G, +0.5551 ppl @ LLaMA-v1-7B", }, { "Q3_K_M", LLAMA_FTYPE_MOSTLY_Q3_K_M, " 3.07G, +0.2496 ppl @ LLaMA-v1-7B", }, { "Q3_K_L", LLAMA_FTYPE_MOSTLY_Q3_K_L, " 3.35G, +0.1764 ppl @ LLaMA-v1-7B", }, @@ -292,6 +294,7 @@ int main(int argc, char ** argv) { } if ((params.ftype == LLAMA_FTYPE_MOSTLY_IQ2_XS || params.ftype == LLAMA_FTYPE_MOSTLY_IQ2_XXS || + params.ftype == LLAMA_FTYPE_MOSTLY_IQ2_S || params.ftype == LLAMA_FTYPE_MOSTLY_Q2_K_S || params.ftype == LLAMA_FTYPE_MOSTLY_IQ1_S) && imatrix_data.empty()) { fprintf(stderr, "\n===============================================================================================\n"); fprintf(stderr, "Please do not use IQ1_S, IQ2_XXS, IQ2_XS or Q2_K_S quantization without an importance matrix\n"); diff --git a/ggml-cuda.cu b/ggml-cuda.cu index 15322fb59..964fb7351 100644 --- a/ggml-cuda.cu +++ b/ggml-cuda.cu @@ -523,6 +523,17 @@ typedef struct { } block_iq2_xs; static_assert(sizeof(block_iq2_xs) == sizeof(ggml_fp16_t) + QK_K/8*sizeof(uint16_t) + QK_K/32, "wrong iq2_xs block size/padding"); +// 2.5625 bpw quants +#define QR2_S 8 +#define QI2_S (QK_K / (4*QR2_S)) +typedef struct { + half d; + uint8_t qs[QK_K/4]; + uint8_t qh[QK_K/32]; + uint8_t scales[QK_K/32]; +} block_iq2_s; +static_assert(sizeof(block_iq2_s) == sizeof(ggml_fp16_t) + QK_K/4 + QK_K/16, "wrong iq2_s block size/padding"); + #define QR3_XXS 8 #define QI3_XXS (QK_K / (4*QR3_XXS)) typedef struct { @@ -1689,6 +1700,265 @@ static const __device__ uint64_t iq2xs_grid[512] = { 0x2b2b2b2b082b2b08, 0x2b2b2b2b082b2b2b, 0x2b2b2b2b2b190819, 0x2b2b2b2b2b2b2b2b, }; +static const __device__ uint64_t iq2s_grid[1024] = { + 0x0808080808080808, 0x080808080808082b, 0x0808080808081919, 0x0808080808082b08, + 0x0808080808082b2b, 0x0808080808190819, 0x0808080808191908, 0x080808080819192b, + 0x0808080808192b19, 0x08080808082b0808, 0x08080808082b082b, 0x08080808082b1919, + 0x08080808082b2b08, 0x0808080819080819, 0x0808080819081908, 0x080808081908192b, + 0x0808080819082b19, 0x0808080819190808, 0x080808081919082b, 0x0808080819191919, + 0x0808080819192b08, 0x08080808192b0819, 0x08080808192b1908, 0x08080808192b192b, + 0x08080808192b2b19, 0x080808082b080808, 0x080808082b08082b, 0x080808082b081919, + 0x080808082b082b08, 0x080808082b190819, 0x080808082b191908, 0x080808082b2b0808, + 0x080808082b2b1919, 0x080808082b2b2b2b, 0x0808081908080819, 0x0808081908081908, + 0x080808190808192b, 0x0808081908082b19, 0x0808081908190808, 0x080808190819082b, + 0x0808081908191919, 0x0808081908192b08, 0x08080819082b0819, 0x08080819082b1908, + 0x0808081919080808, 0x080808191908082b, 0x0808081919081919, 0x0808081919082b08, + 0x0808081919190819, 0x0808081919191908, 0x080808191919192b, 0x0808081919192b19, + 0x08080819192b0808, 0x08080819192b1919, 0x08080819192b2b08, 0x080808192b080819, + 0x080808192b081908, 0x080808192b190808, 0x080808192b19082b, 0x080808192b191919, + 0x080808192b2b0819, 0x080808192b2b1908, 0x0808082b08080808, 0x0808082b0808082b, + 0x0808082b08081919, 0x0808082b08082b08, 0x0808082b08190819, 0x0808082b08191908, + 0x0808082b082b0808, 0x0808082b082b2b2b, 0x0808082b19080819, 0x0808082b19081908, + 0x0808082b1908192b, 0x0808082b19082b19, 0x0808082b19190808, 0x0808082b19191919, + 0x0808082b2b080808, 0x0808082b2b081919, 0x0808082b2b082b2b, 0x0808082b2b191908, + 0x0808082b2b2b082b, 0x0808190808080819, 0x0808190808081908, 0x080819080808192b, + 0x0808190808082b19, 0x0808190808190808, 0x080819080819082b, 0x0808190808191919, + 0x0808190808192b08, 0x08081908082b0819, 0x08081908082b1908, 0x08081908082b192b, + 0x08081908082b2b19, 0x0808190819080808, 0x080819081908082b, 0x0808190819081919, + 0x0808190819082b08, 0x0808190819082b2b, 0x0808190819190819, 0x0808190819191908, + 0x080819081919192b, 0x0808190819192b19, 0x08081908192b0808, 0x08081908192b082b, + 0x08081908192b1919, 0x080819082b080819, 0x080819082b081908, 0x080819082b08192b, + 0x080819082b082b19, 0x080819082b190808, 0x080819082b191919, 0x080819082b192b08, + 0x080819082b2b0819, 0x080819082b2b1908, 0x0808191908080808, 0x080819190808082b, + 0x0808191908081919, 0x0808191908082b08, 0x0808191908082b2b, 0x0808191908190819, + 0x0808191908191908, 0x080819190819192b, 0x0808191908192b19, 0x08081919082b0808, + 0x08081919082b1919, 0x08081919082b2b08, 0x0808191919080819, 0x0808191919081908, + 0x080819191908192b, 0x0808191919082b19, 0x0808191919190808, 0x080819191919082b, + 0x0808191919191919, 0x0808191919192b08, 0x08081919192b0819, 0x08081919192b1908, + 0x080819192b080808, 0x080819192b08082b, 0x080819192b081919, 0x080819192b082b08, + 0x080819192b190819, 0x080819192b191908, 0x080819192b2b0808, 0x0808192b08080819, + 0x0808192b08081908, 0x0808192b0808192b, 0x0808192b08082b19, 0x0808192b08190808, + 0x0808192b08191919, 0x0808192b19080808, 0x0808192b19081919, 0x0808192b19082b08, + 0x0808192b19190819, 0x0808192b19191908, 0x0808192b192b0808, 0x0808192b2b080819, + 0x0808192b2b081908, 0x0808192b2b190808, 0x08082b0808080808, 0x08082b080808082b, + 0x08082b0808081919, 0x08082b0808082b08, 0x08082b0808190819, 0x08082b0808191908, + 0x08082b080819192b, 0x08082b0808192b19, 0x08082b08082b0808, 0x08082b08082b1919, + 0x08082b08082b2b2b, 0x08082b0819080819, 0x08082b0819081908, 0x08082b081908192b, + 0x08082b0819082b19, 0x08082b0819190808, 0x08082b081919082b, 0x08082b0819191919, + 0x08082b0819192b08, 0x08082b08192b0819, 0x08082b08192b1908, 0x08082b082b080808, + 0x08082b082b081919, 0x08082b082b191908, 0x08082b082b2b2b2b, 0x08082b1908080819, + 0x08082b1908081908, 0x08082b1908190808, 0x08082b190819082b, 0x08082b1908191919, + 0x08082b1908192b08, 0x08082b19082b0819, 0x08082b1919080808, 0x08082b1919081919, + 0x08082b1919082b08, 0x08082b1919190819, 0x08082b1919191908, 0x08082b19192b0808, + 0x08082b192b080819, 0x08082b192b190808, 0x08082b2b08080808, 0x08082b2b08190819, + 0x08082b2b08191908, 0x08082b2b082b082b, 0x08082b2b082b2b08, 0x08082b2b082b2b2b, + 0x08082b2b19190808, 0x08082b2b2b192b19, 0x0819080808080819, 0x0819080808081908, + 0x081908080808192b, 0x0819080808082b19, 0x0819080808190808, 0x081908080819082b, + 0x0819080808191919, 0x0819080808192b08, 0x08190808082b0819, 0x08190808082b1908, + 0x08190808082b192b, 0x0819080819080808, 0x081908081908082b, 0x0819080819081919, + 0x0819080819082b08, 0x0819080819190819, 0x0819080819191908, 0x081908081919192b, + 0x0819080819192b19, 0x08190808192b0808, 0x08190808192b082b, 0x08190808192b1919, + 0x08190808192b2b08, 0x081908082b080819, 0x081908082b081908, 0x081908082b08192b, + 0x081908082b190808, 0x081908082b191919, 0x081908082b192b08, 0x081908082b2b0819, + 0x081908082b2b1908, 0x0819081908080808, 0x081908190808082b, 0x0819081908081919, + 0x0819081908082b08, 0x0819081908082b2b, 0x0819081908190819, 0x0819081908191908, + 0x081908190819192b, 0x0819081908192b19, 0x08190819082b0808, 0x08190819082b082b, + 0x08190819082b1919, 0x08190819082b2b08, 0x0819081919080819, 0x0819081919081908, + 0x081908191908192b, 0x0819081919082b19, 0x0819081919190808, 0x081908191919082b, + 0x0819081919191919, 0x0819081919192b08, 0x08190819192b0819, 0x08190819192b1908, + 0x081908192b080808, 0x081908192b08082b, 0x081908192b081919, 0x081908192b082b08, + 0x081908192b190819, 0x081908192b191908, 0x0819082b08080819, 0x0819082b08081908, + 0x0819082b08082b19, 0x0819082b08190808, 0x0819082b08191919, 0x0819082b082b0819, + 0x0819082b082b1908, 0x0819082b19080808, 0x0819082b19081919, 0x0819082b19190819, + 0x0819082b19191908, 0x0819082b2b080819, 0x0819082b2b081908, 0x0819082b2b190808, + 0x0819190808080808, 0x081919080808082b, 0x0819190808081919, 0x0819190808082b08, + 0x0819190808190819, 0x0819190808191908, 0x081919080819192b, 0x0819190808192b19, + 0x08191908082b0808, 0x08191908082b1919, 0x08191908082b2b08, 0x0819190819080819, + 0x0819190819081908, 0x081919081908192b, 0x0819190819082b19, 0x0819190819190808, + 0x081919081919082b, 0x0819190819191919, 0x0819190819192b08, 0x08191908192b0819, + 0x08191908192b1908, 0x081919082b080808, 0x081919082b08082b, 0x081919082b081919, + 0x081919082b082b08, 0x081919082b190819, 0x081919082b191908, 0x081919082b2b0808, + 0x0819191908080819, 0x0819191908081908, 0x081919190808192b, 0x0819191908082b19, + 0x0819191908190808, 0x081919190819082b, 0x0819191908191919, 0x0819191908192b08, + 0x08191919082b0819, 0x08191919082b1908, 0x0819191919080808, 0x081919191908082b, + 0x0819191919081919, 0x0819191919082b08, 0x0819191919190819, 0x0819191919191908, + 0x08191919192b0808, 0x081919192b080819, 0x081919192b081908, 0x081919192b190808, + 0x0819192b08080808, 0x0819192b08081919, 0x0819192b08082b08, 0x0819192b08190819, + 0x0819192b08191908, 0x0819192b082b0808, 0x0819192b19080819, 0x0819192b19081908, + 0x0819192b19190808, 0x0819192b2b080808, 0x0819192b2b2b2b2b, 0x08192b0808080819, + 0x08192b0808081908, 0x08192b080808192b, 0x08192b0808082b19, 0x08192b0808190808, + 0x08192b0808191919, 0x08192b0808192b08, 0x08192b08082b0819, 0x08192b0819080808, + 0x08192b081908082b, 0x08192b0819081919, 0x08192b0819082b08, 0x08192b0819190819, + 0x08192b0819191908, 0x08192b08192b0808, 0x08192b082b080819, 0x08192b082b081908, + 0x08192b1908080808, 0x08192b190808082b, 0x08192b1908081919, 0x08192b1908082b08, + 0x08192b1908190819, 0x08192b1908191908, 0x08192b19082b0808, 0x08192b1919080819, + 0x08192b1919081908, 0x08192b1919190808, 0x08192b19192b2b19, 0x08192b192b2b082b, + 0x08192b2b08081908, 0x08192b2b08190808, 0x08192b2b19080808, 0x08192b2b1919192b, + 0x082b080808080808, 0x082b08080808082b, 0x082b080808081919, 0x082b080808082b08, + 0x082b080808190819, 0x082b080808191908, 0x082b08080819192b, 0x082b080808192b19, + 0x082b0808082b0808, 0x082b0808082b1919, 0x082b0808082b2b2b, 0x082b080819080819, + 0x082b080819081908, 0x082b080819190808, 0x082b08081919082b, 0x082b080819191919, + 0x082b0808192b1908, 0x082b08082b080808, 0x082b08082b082b2b, 0x082b08082b191908, + 0x082b08082b2b2b2b, 0x082b081908080819, 0x082b081908081908, 0x082b081908190808, + 0x082b08190819082b, 0x082b081908191919, 0x082b0819082b0819, 0x082b081919080808, + 0x082b08191908082b, 0x082b081919081919, 0x082b081919190819, 0x082b081919191908, + 0x082b0819192b0808, 0x082b08192b080819, 0x082b08192b081908, 0x082b08192b190808, + 0x082b082b08080808, 0x082b082b08082b2b, 0x082b082b082b082b, 0x082b082b082b2b08, + 0x082b082b082b2b2b, 0x082b082b19081908, 0x082b082b19190808, 0x082b082b2b082b08, + 0x082b082b2b082b2b, 0x082b082b2b2b2b08, 0x082b190808080819, 0x082b190808081908, + 0x082b19080808192b, 0x082b190808082b19, 0x082b190808190808, 0x082b190808191919, + 0x082b190808192b08, 0x082b1908082b0819, 0x082b1908082b1908, 0x082b190819080808, + 0x082b19081908082b, 0x082b190819081919, 0x082b190819082b08, 0x082b190819190819, + 0x082b190819191908, 0x082b1908192b0808, 0x082b19082b080819, 0x082b19082b081908, + 0x082b19082b190808, 0x082b191908080808, 0x082b191908081919, 0x082b191908082b08, + 0x082b191908190819, 0x082b191908191908, 0x082b1919082b0808, 0x082b191919080819, + 0x082b191919081908, 0x082b191919190808, 0x082b1919192b192b, 0x082b19192b080808, + 0x082b192b08080819, 0x082b192b08081908, 0x082b192b08190808, 0x082b192b19080808, + 0x082b192b19192b19, 0x082b2b0808080808, 0x082b2b0808081919, 0x082b2b0808190819, + 0x082b2b0808191908, 0x082b2b0819080819, 0x082b2b0819081908, 0x082b2b0819190808, + 0x082b2b082b082b2b, 0x082b2b082b2b2b2b, 0x082b2b1908080819, 0x082b2b1908081908, + 0x082b2b1908190808, 0x082b2b192b191919, 0x082b2b2b08082b2b, 0x082b2b2b082b082b, + 0x082b2b2b192b1908, 0x082b2b2b2b082b08, 0x082b2b2b2b082b2b, 0x1908080808080819, + 0x1908080808081908, 0x190808080808192b, 0x1908080808082b19, 0x1908080808190808, + 0x190808080819082b, 0x1908080808191919, 0x1908080808192b08, 0x1908080808192b2b, + 0x19080808082b0819, 0x19080808082b1908, 0x19080808082b192b, 0x1908080819080808, + 0x190808081908082b, 0x1908080819081919, 0x1908080819082b08, 0x1908080819082b2b, + 0x1908080819190819, 0x1908080819191908, 0x190808081919192b, 0x1908080819192b19, + 0x19080808192b0808, 0x19080808192b082b, 0x19080808192b1919, 0x190808082b080819, + 0x190808082b081908, 0x190808082b190808, 0x190808082b191919, 0x190808082b192b08, + 0x190808082b2b0819, 0x190808082b2b1908, 0x1908081908080808, 0x190808190808082b, + 0x1908081908081919, 0x1908081908082b08, 0x1908081908190819, 0x1908081908191908, + 0x190808190819192b, 0x1908081908192b19, 0x19080819082b0808, 0x19080819082b082b, + 0x19080819082b1919, 0x1908081919080819, 0x1908081919081908, 0x190808191908192b, + 0x1908081919082b19, 0x1908081919190808, 0x190808191919082b, 0x1908081919191919, + 0x1908081919192b08, 0x19080819192b0819, 0x19080819192b1908, 0x190808192b080808, + 0x190808192b08082b, 0x190808192b081919, 0x190808192b082b08, 0x190808192b190819, + 0x190808192b191908, 0x190808192b2b0808, 0x1908082b08080819, 0x1908082b08081908, + 0x1908082b08190808, 0x1908082b0819082b, 0x1908082b08191919, 0x1908082b08192b08, + 0x1908082b082b1908, 0x1908082b19080808, 0x1908082b19081919, 0x1908082b19082b08, + 0x1908082b19190819, 0x1908082b19191908, 0x1908082b192b0808, 0x1908082b2b080819, + 0x1908082b2b081908, 0x1908190808080808, 0x190819080808082b, 0x1908190808081919, + 0x1908190808082b08, 0x1908190808082b2b, 0x1908190808190819, 0x1908190808191908, + 0x190819080819192b, 0x1908190808192b19, 0x19081908082b0808, 0x19081908082b082b, + 0x19081908082b1919, 0x19081908082b2b08, 0x1908190819080819, 0x1908190819081908, + 0x190819081908192b, 0x1908190819082b19, 0x1908190819190808, 0x190819081919082b, + 0x1908190819191919, 0x1908190819192b08, 0x19081908192b0819, 0x19081908192b1908, + 0x190819082b080808, 0x190819082b08082b, 0x190819082b081919, 0x190819082b082b08, + 0x190819082b190819, 0x190819082b191908, 0x190819082b2b0808, 0x1908191908080819, + 0x1908191908081908, 0x190819190808192b, 0x1908191908082b19, 0x1908191908190808, + 0x190819190819082b, 0x1908191908191919, 0x1908191908192b08, 0x19081919082b0819, + 0x19081919082b1908, 0x1908191919080808, 0x190819191908082b, 0x1908191919081919, + 0x1908191919082b08, 0x1908191919190819, 0x1908191919191908, 0x19081919192b0808, + 0x19081919192b2b2b, 0x190819192b080819, 0x190819192b081908, 0x190819192b190808, + 0x1908192b08080808, 0x1908192b0808082b, 0x1908192b08081919, 0x1908192b08082b08, + 0x1908192b08190819, 0x1908192b08191908, 0x1908192b082b0808, 0x1908192b19080819, + 0x1908192b19081908, 0x1908192b19190808, 0x1908192b2b080808, 0x1908192b2b2b1919, + 0x19082b0808080819, 0x19082b0808081908, 0x19082b0808082b19, 0x19082b0808190808, + 0x19082b080819082b, 0x19082b0808191919, 0x19082b0808192b08, 0x19082b08082b0819, + 0x19082b08082b1908, 0x19082b0819080808, 0x19082b081908082b, 0x19082b0819081919, + 0x19082b0819082b08, 0x19082b0819190819, 0x19082b0819191908, 0x19082b08192b0808, + 0x19082b082b081908, 0x19082b082b190808, 0x19082b1908080808, 0x19082b190808082b, + 0x19082b1908081919, 0x19082b1908082b08, 0x19082b1908190819, 0x19082b1908191908, + 0x19082b19082b0808, 0x19082b1919080819, 0x19082b1919081908, 0x19082b1919190808, + 0x19082b192b080808, 0x19082b192b19192b, 0x19082b2b08080819, 0x19082b2b08081908, + 0x19082b2b08190808, 0x19082b2b19080808, 0x1919080808080808, 0x191908080808082b, + 0x1919080808081919, 0x1919080808082b08, 0x1919080808190819, 0x1919080808191908, + 0x191908080819192b, 0x1919080808192b19, 0x19190808082b0808, 0x19190808082b082b, + 0x19190808082b1919, 0x19190808082b2b08, 0x1919080819080819, 0x1919080819081908, + 0x191908081908192b, 0x1919080819082b19, 0x1919080819190808, 0x191908081919082b, + 0x1919080819191919, 0x1919080819192b08, 0x19190808192b0819, 0x19190808192b1908, + 0x191908082b080808, 0x191908082b08082b, 0x191908082b081919, 0x191908082b082b08, + 0x191908082b190819, 0x191908082b191908, 0x1919081908080819, 0x1919081908081908, + 0x191908190808192b, 0x1919081908082b19, 0x1919081908190808, 0x191908190819082b, + 0x1919081908191919, 0x1919081908192b08, 0x19190819082b0819, 0x19190819082b1908, + 0x1919081919080808, 0x191908191908082b, 0x1919081919081919, 0x1919081919082b08, + 0x1919081919190819, 0x1919081919191908, 0x19190819192b0808, 0x191908192b080819, + 0x191908192b081908, 0x191908192b190808, 0x1919082b08080808, 0x1919082b08081919, + 0x1919082b08082b08, 0x1919082b08190819, 0x1919082b08191908, 0x1919082b082b0808, + 0x1919082b19080819, 0x1919082b19081908, 0x1919082b19190808, 0x1919082b192b2b19, + 0x1919082b2b080808, 0x1919190808080819, 0x1919190808081908, 0x191919080808192b, + 0x1919190808082b19, 0x1919190808190808, 0x191919080819082b, 0x1919190808191919, + 0x1919190808192b08, 0x19191908082b0819, 0x19191908082b1908, 0x1919190819080808, + 0x191919081908082b, 0x1919190819081919, 0x1919190819082b08, 0x1919190819190819, + 0x1919190819191908, 0x19191908192b0808, 0x191919082b080819, 0x191919082b081908, + 0x191919082b190808, 0x1919191908080808, 0x191919190808082b, 0x1919191908081919, + 0x1919191908082b08, 0x1919191908190819, 0x1919191908191908, 0x19191919082b0808, + 0x1919191919080819, 0x1919191919081908, 0x1919191919190808, 0x191919192b080808, + 0x1919192b08080819, 0x1919192b08081908, 0x1919192b08190808, 0x1919192b082b192b, + 0x1919192b19080808, 0x19192b0808080808, 0x19192b080808082b, 0x19192b0808081919, + 0x19192b0808082b08, 0x19192b0808190819, 0x19192b0808191908, 0x19192b08082b0808, + 0x19192b0819080819, 0x19192b0819081908, 0x19192b0819190808, 0x19192b0819192b2b, + 0x19192b082b080808, 0x19192b1908080819, 0x19192b1908081908, 0x19192b1908190808, + 0x19192b1919080808, 0x19192b2b08080808, 0x19192b2b08192b19, 0x19192b2b2b081919, + 0x19192b2b2b2b2b08, 0x192b080808080819, 0x192b080808081908, 0x192b08080808192b, + 0x192b080808190808, 0x192b08080819082b, 0x192b080808191919, 0x192b080808192b08, + 0x192b0808082b0819, 0x192b0808082b1908, 0x192b080819080808, 0x192b080819081919, + 0x192b080819082b08, 0x192b080819190819, 0x192b080819191908, 0x192b0808192b0808, + 0x192b08082b081908, 0x192b08082b190808, 0x192b081908080808, 0x192b08190808082b, + 0x192b081908081919, 0x192b081908082b08, 0x192b081908190819, 0x192b081908191908, + 0x192b0819082b0808, 0x192b081919080819, 0x192b081919081908, 0x192b081919190808, + 0x192b08192b080808, 0x192b08192b192b19, 0x192b082b08081908, 0x192b082b08190808, + 0x192b082b19080808, 0x192b082b1919192b, 0x192b082b2b2b0819, 0x192b190808080808, + 0x192b190808081919, 0x192b190808082b08, 0x192b190808190819, 0x192b190808191908, + 0x192b1908082b0808, 0x192b190819080819, 0x192b190819081908, 0x192b190819190808, + 0x192b19082b080808, 0x192b191908080819, 0x192b191908081908, 0x192b191908190808, + 0x192b191919080808, 0x192b191919082b2b, 0x192b1919192b2b08, 0x192b19192b19082b, + 0x192b192b08080808, 0x192b192b2b191908, 0x192b2b0808080819, 0x192b2b0808081908, + 0x192b2b0808190808, 0x192b2b08192b1919, 0x192b2b082b192b08, 0x192b2b1908080808, + 0x192b2b19082b2b2b, 0x192b2b2b1908082b, 0x192b2b2b2b2b0819, 0x2b08080808080808, + 0x2b0808080808082b, 0x2b08080808081919, 0x2b08080808082b08, 0x2b08080808190819, + 0x2b08080808191908, 0x2b08080808192b19, 0x2b080808082b0808, 0x2b080808082b1919, + 0x2b08080819080819, 0x2b08080819081908, 0x2b08080819190808, 0x2b0808081919082b, + 0x2b08080819191919, 0x2b08080819192b08, 0x2b080808192b0819, 0x2b0808082b080808, + 0x2b0808082b081919, 0x2b0808082b190819, 0x2b0808082b191908, 0x2b08081908080819, + 0x2b08081908081908, 0x2b08081908082b19, 0x2b08081908190808, 0x2b0808190819082b, + 0x2b08081908191919, 0x2b08081908192b08, 0x2b080819082b0819, 0x2b080819082b1908, + 0x2b08081919080808, 0x2b0808191908082b, 0x2b08081919081919, 0x2b08081919082b08, + 0x2b08081919190819, 0x2b08081919191908, 0x2b0808192b080819, 0x2b0808192b081908, + 0x2b0808192b190808, 0x2b0808192b2b2b19, 0x2b08082b08080808, 0x2b08082b08081919, + 0x2b08082b08082b2b, 0x2b08082b08190819, 0x2b08082b08191908, 0x2b08082b19080819, + 0x2b08082b19081908, 0x2b08082b19190808, 0x2b08190808080819, 0x2b08190808081908, + 0x2b0819080808192b, 0x2b08190808082b19, 0x2b08190808190808, 0x2b0819080819082b, + 0x2b08190808191919, 0x2b08190808192b08, 0x2b081908082b0819, 0x2b08190819080808, + 0x2b0819081908082b, 0x2b08190819081919, 0x2b08190819082b08, 0x2b08190819190819, + 0x2b08190819191908, 0x2b081908192b0808, 0x2b0819082b080819, 0x2b0819082b081908, + 0x2b0819082b190808, 0x2b08191908080808, 0x2b0819190808082b, 0x2b08191908081919, + 0x2b08191908082b08, 0x2b08191908190819, 0x2b08191908191908, 0x2b081919082b0808, + 0x2b08191919080819, 0x2b08191919081908, 0x2b08191919190808, 0x2b0819192b080808, + 0x2b0819192b082b2b, 0x2b08192b08080819, 0x2b08192b08081908, 0x2b08192b08190808, + 0x2b08192b082b2b19, 0x2b08192b19080808, 0x2b082b0808080808, 0x2b082b0808081919, + 0x2b082b0808190819, 0x2b082b0808191908, 0x2b082b0819080819, 0x2b082b0819081908, + 0x2b082b0819190808, 0x2b082b082b2b082b, 0x2b082b1908080819, 0x2b082b1908081908, + 0x2b082b1919080808, 0x2b082b19192b1919, 0x2b082b2b082b082b, 0x2b082b2b19192b08, + 0x2b082b2b19192b2b, 0x2b082b2b2b08082b, 0x2b082b2b2b2b082b, 0x2b19080808080819, + 0x2b19080808081908, 0x2b19080808082b19, 0x2b19080808190808, 0x2b1908080819082b, + 0x2b19080808191919, 0x2b19080808192b08, 0x2b190808082b1908, 0x2b19080819080808, + 0x2b1908081908082b, 0x2b19080819081919, 0x2b19080819082b08, 0x2b19080819190819, + 0x2b19080819191908, 0x2b190808192b0808, 0x2b1908082b080819, 0x2b1908082b081908, + 0x2b1908082b190808, 0x2b19081908080808, 0x2b19081908081919, 0x2b19081908190819, + 0x2b19081908191908, 0x2b19081919080819, 0x2b19081919081908, 0x2b19081919190808, + 0x2b19081919192b2b, 0x2b19082b08080819, 0x2b19082b08081908, 0x2b19082b08190808, + 0x2b19082b19080808, 0x2b19082b2b2b192b, 0x2b19190808080808, 0x2b1919080808082b, + 0x2b19190808081919, 0x2b19190808082b08, 0x2b19190808190819, 0x2b19190808191908, + 0x2b191908082b0808, 0x2b19190819080819, 0x2b19190819081908, 0x2b19190819190808, + 0x2b1919082b080808, 0x2b1919082b19192b, 0x2b19191908080819, 0x2b19191908081908, + 0x2b19191908190808, 0x2b19191919080808, 0x2b1919192b192b08, 0x2b1919192b2b0819, + 0x2b19192b08080808, 0x2b19192b1908192b, 0x2b19192b192b1908, 0x2b192b0808080819, + 0x2b192b0808081908, 0x2b192b0808190808, 0x2b192b08082b192b, 0x2b192b0819080808, + 0x2b192b082b2b2b19, 0x2b192b1908080808, 0x2b192b1919082b19, 0x2b192b191919082b, + 0x2b192b2b2b190808, 0x2b2b080808080808, 0x2b2b080808081919, 0x2b2b080808082b2b, + 0x2b2b080808191908, 0x2b2b0808082b082b, 0x2b2b0808082b2b2b, 0x2b2b080819080819, + 0x2b2b080819081908, 0x2b2b080819190808, 0x2b2b08082b2b082b, 0x2b2b08082b2b2b2b, + 0x2b2b081919080808, 0x2b2b0819192b1919, 0x2b2b082b0808082b, 0x2b2b082b08082b2b, + 0x2b2b082b082b082b, 0x2b2b082b082b2b08, 0x2b2b082b082b2b2b, 0x2b2b082b2b08082b, + 0x2b2b082b2b082b08, 0x2b2b082b2b082b2b, 0x2b2b082b2b2b2b08, 0x2b2b190808080819, + 0x2b2b190808081908, 0x2b2b190808190808, 0x2b2b190819080808, 0x2b2b19082b082b19, + 0x2b2b19082b2b1908, 0x2b2b191908080808, 0x2b2b191908192b19, 0x2b2b192b19190819, + 0x2b2b2b0808082b2b, 0x2b2b2b08082b2b08, 0x2b2b2b082b2b082b, 0x2b2b2b1919191908, + 0x2b2b2b192b08192b, 0x2b2b2b2b08082b08, 0x2b2b2b2b08082b2b, 0x2b2b2b2b082b0808, + 0x2b2b2b2b082b082b, 0x2b2b2b2b082b2b08, 0x2b2b2b2b2b082b08, 0x2b2b2b2b2b2b2b2b, +}; + static const __device__ uint32_t iq3xxs_grid[256] = { 0x04040404, 0x04040414, 0x04040424, 0x04040c0c, 0x04040c1c, 0x04040c3e, 0x04041404, 0x04041414, 0x04041c0c, 0x04042414, 0x04043e1c, 0x04043e2c, 0x040c040c, 0x040c041c, 0x040c0c04, 0x040c0c14, @@ -2037,6 +2307,27 @@ static __global__ void dequantize_block_iq2_xs(const void * __restrict__ vx, dst } +template +static __global__ void dequantize_block_iq2_s(const void * __restrict__ vx, dst_t * __restrict__ yy) { + + const int i = blockIdx.x; + const block_iq2_s * x = (const block_iq2_s *) vx; + + const int tid = threadIdx.x; +#if QK_K == 256 + const int il = tid/8; // 0...3 + const int ib = tid%8; // 0...7 + dst_t * y = yy + i*QK_K + 32*ib + 8*il; + const uint8_t * grid = (const uint8_t *)(iq2s_grid + (x[i].qs[4*ib+il] | ((x[i].qh[ib] << (8-2*il)) & 0x300))); + const float d = (float)x[i].d * (0.5f + ((x[i].scales[ib] >> 4*(il/2)) & 0xf)) * 0.25f; + const uint8_t signs = x[i].qs[QK_K/8+4*ib+il]; + for (int j = 0; j < 8; ++j) y[j] = d * grid[j] * (signs & kmask_iq2xs[j] ? -1.f : 1.f); +#else + assert(false); +#endif + +} + template static __global__ void dequantize_block_iq3_xxs(const void * __restrict__ vx, dst_t * __restrict__ yy) { @@ -4800,6 +5091,54 @@ static __device__ __forceinline__ float vec_dot_iq2_xs_q8_1( #endif } +// TODO +static __device__ __forceinline__ float vec_dot_iq2_s_q8_1( + const void * __restrict__ vbq, const block_q8_1 * __restrict__ bq8_1, const int & iqs) { +#if __CUDA_ARCH__ >= MIN_CC_DP4A // lowest compute capability for integer intrinsics +#if QK_K == 256 + const block_iq2_s * bq2 = (const block_iq2_s *) vbq; + + const int ib32 = iqs; + const int8_t * q8 = bq8_1[ib32].qs; + const uint8_t * signs = bq2->qs + QK_K/8 + 4*ib32; + const uint8_t ls1 = bq2->scales[ib32] & 0xf; + const uint8_t ls2 = bq2->scales[ib32] >> 4; + int sumi1 = 0; + for (int l = 0; l < 2; ++l) { + const uint32_t * grid = (const uint32_t *)(iq2s_grid + (bq2->qs[4*ib32+l] | ((bq2->qh[ib32] << (8-2*l)) & 0x300))); + const uint32_t signs0 = __vcmpeq4(((signs[l] & 0xf) * 0x01010101) & 0x08040201, 0x08040201); + const uint32_t signs1 = __vcmpeq4(((signs[l] >> 4) * 0x01010101) & 0x08040201, 0x08040201); + const int grid_l = __vsub4(grid[0] ^ signs0, signs0); + const int grid_h = __vsub4(grid[1] ^ signs1, signs1); + sumi1 = __dp4a(grid_l, *((const int *)q8 + 0), sumi1); + sumi1 = __dp4a(grid_h, *((const int *)q8 + 1), sumi1); + q8 += 8; + } + int sumi2 = 0; + for (int l = 2; l < 4; ++l) { + const uint32_t * grid = (const uint32_t *)(iq2s_grid + (bq2->qs[4*ib32+l] | ((bq2->qh[ib32] << (8-2*l)) & 0x300))); + const uint32_t signs0 = __vcmpeq4(((signs[l] & 0xf) * 0x01010101) & 0x08040201, 0x08040201); + const uint32_t signs1 = __vcmpeq4(((signs[l] >> 4) * 0x01010101) & 0x08040201, 0x08040201); + const int grid_l = __vsub4(grid[0] ^ signs0, signs0); + const int grid_h = __vsub4(grid[1] ^ signs1, signs1); + sumi2 = __dp4a(grid_l, *((const int *)q8 + 0), sumi2); + sumi2 = __dp4a(grid_h, *((const int *)q8 + 1), sumi2); + q8 += 8; + } + const float d = (float)bq2->d * __low2float(bq8_1[ib32].ds) * 0.25f; + return d * ((0.5f + ls1) * sumi1 + (0.5f + ls2) * sumi2); +#else + (void) ksigns64; + assert(false); + return 0.f; +#endif +#else + (void) ksigns64; + assert(false); + return 0.f; +#endif +} + static __device__ __forceinline__ float vec_dot_iq3_xxs_q8_1( const void * __restrict__ vbq, const block_q8_1 * __restrict__ bq8_1, const int & iqs) { #if __CUDA_ARCH__ >= MIN_CC_DP4A // lowest compute capability for integer intrinsics @@ -6996,6 +7335,12 @@ static void dequantize_row_iq2_xs_cuda(const void * vx, dst_t * y, const int k, dequantize_block_iq2_xs<<>>(vx, y); } +template +static void dequantize_row_iq2_s_cuda(const void * vx, dst_t * y, const int k, cudaStream_t stream) { + const int nb = k / QK_K; + dequantize_block_iq2_s<<>>(vx, y); +} + template static void dequantize_row_iq3_xxs_cuda(const void * vx, dst_t * y, const int k, cudaStream_t stream) { const int nb = k / QK_K; @@ -7057,6 +7402,8 @@ static to_fp16_cuda_t ggml_get_to_fp16_cuda(ggml_type type) { return dequantize_row_iq2_xxs_cuda; case GGML_TYPE_IQ2_XS: return dequantize_row_iq2_xs_cuda; + case GGML_TYPE_IQ2_S: + return dequantize_row_iq2_s_cuda; case GGML_TYPE_IQ3_XXS: return dequantize_row_iq3_xxs_cuda; case GGML_TYPE_IQ1_S: @@ -7098,6 +7445,8 @@ static to_fp32_cuda_t ggml_get_to_fp32_cuda(ggml_type type) { return dequantize_row_iq2_xxs_cuda; case GGML_TYPE_IQ2_XS: return dequantize_row_iq2_xs_cuda; + case GGML_TYPE_IQ2_S: + return dequantize_row_iq2_s_cuda; case GGML_TYPE_IQ3_XXS: return dequantize_row_iq3_xxs_cuda; case GGML_TYPE_IQ1_S: @@ -8848,6 +9197,7 @@ static int64_t get_row_rounding(ggml_type type, const std::array (src0_dd_i, src1_ddq_i, dst_dd_i, ne00, row_diff, src1_padded_row_size, src1_ncols, nrows_dst, stream); break; + case GGML_TYPE_IQ2_S: + mul_mat_vec_q_cuda + (src0_dd_i, src1_ddq_i, dst_dd_i, ne00, row_diff, src1_padded_row_size, src1_ncols, nrows_dst, stream); + break; case GGML_TYPE_IQ3_XXS: mul_mat_vec_q_cuda (src0_dd_i, src1_ddq_i, dst_dd_i, ne00, row_diff, src1_padded_row_size, src1_ncols, nrows_dst, stream); @@ -11710,7 +12065,8 @@ GGML_CALL static bool ggml_backend_cuda_supports_op(ggml_backend_t backend, cons } ggml_type a_type = a->type; if (a_type == GGML_TYPE_IQ2_XXS || a_type == GGML_TYPE_IQ2_XS || a_type == GGML_TYPE_IQ3_XXS || - a_type == GGML_TYPE_IQ1_S || a_type == GGML_TYPE_IQ4_NL || a_type == GGML_TYPE_IQ3_S) { + a_type == GGML_TYPE_IQ1_S || a_type == GGML_TYPE_IQ4_NL || a_type == GGML_TYPE_IQ3_S || + a_type == GGML_TYPE_IQ2_S) { if (b->ne[1] == 1 && ggml_nrows(b) > 1) { return false; } diff --git a/ggml-metal.m b/ggml-metal.m index 3d6b01263..251d04fb0 100644 --- a/ggml-metal.m +++ b/ggml-metal.m @@ -62,6 +62,7 @@ enum ggml_metal_kernel_type { GGML_METAL_KERNEL_TYPE_GET_ROWS_IQ2_XS, GGML_METAL_KERNEL_TYPE_GET_ROWS_IQ3_XXS, GGML_METAL_KERNEL_TYPE_GET_ROWS_IQ3_S, + GGML_METAL_KERNEL_TYPE_GET_ROWS_IQ2_S, GGML_METAL_KERNEL_TYPE_GET_ROWS_IQ1_S, GGML_METAL_KERNEL_TYPE_GET_ROWS_IQ4_NL, GGML_METAL_KERNEL_TYPE_GET_ROWS_I32, @@ -87,6 +88,7 @@ enum ggml_metal_kernel_type { GGML_METAL_KERNEL_TYPE_MUL_MV_IQ2_XS_F32, GGML_METAL_KERNEL_TYPE_MUL_MV_IQ3_XXS_F32, GGML_METAL_KERNEL_TYPE_MUL_MV_IQ3_S_F32, + GGML_METAL_KERNEL_TYPE_MUL_MV_IQ2_S_F32, GGML_METAL_KERNEL_TYPE_MUL_MV_IQ1_S_F32, GGML_METAL_KERNEL_TYPE_MUL_MV_IQ4_NL_F32, GGML_METAL_KERNEL_TYPE_MUL_MV_ID_F32_F32, @@ -108,6 +110,7 @@ enum ggml_metal_kernel_type { GGML_METAL_KERNEL_TYPE_MUL_MV_ID_IQ2_XS_F32, GGML_METAL_KERNEL_TYPE_MUL_MV_ID_IQ3_XXS_F32, GGML_METAL_KERNEL_TYPE_MUL_MV_ID_IQ3_S_F32, + GGML_METAL_KERNEL_TYPE_MUL_MV_ID_IQ2_S_F32, GGML_METAL_KERNEL_TYPE_MUL_MV_ID_IQ1_S_F32, GGML_METAL_KERNEL_TYPE_MUL_MV_ID_IQ4_NL_F32, GGML_METAL_KERNEL_TYPE_MUL_MM_F32_F32, @@ -126,6 +129,7 @@ enum ggml_metal_kernel_type { GGML_METAL_KERNEL_TYPE_MUL_MM_IQ2_XS_F32, GGML_METAL_KERNEL_TYPE_MUL_MM_IQ3_XXS_F32, GGML_METAL_KERNEL_TYPE_MUL_MM_IQ3_S_F32, + GGML_METAL_KERNEL_TYPE_MUL_MM_IQ2_S_F32, GGML_METAL_KERNEL_TYPE_MUL_MM_IQ1_S_F32, GGML_METAL_KERNEL_TYPE_MUL_MM_IQ4_NL_F32, GGML_METAL_KERNEL_TYPE_MUL_MM_ID_F32_F32, @@ -144,6 +148,7 @@ enum ggml_metal_kernel_type { GGML_METAL_KERNEL_TYPE_MUL_MM_ID_IQ2_XS_F32, GGML_METAL_KERNEL_TYPE_MUL_MM_ID_IQ3_XXS_F32, GGML_METAL_KERNEL_TYPE_MUL_MM_ID_IQ3_S_F32, + GGML_METAL_KERNEL_TYPE_MUL_MM_ID_IQ2_S_F32, GGML_METAL_KERNEL_TYPE_MUL_MM_ID_IQ1_S_F32, GGML_METAL_KERNEL_TYPE_MUL_MM_ID_IQ4_NL_F32, GGML_METAL_KERNEL_TYPE_ROPE_F32, @@ -458,6 +463,7 @@ static struct ggml_metal_context * ggml_metal_init(int n_cb) { GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_GET_ROWS_IQ2_XS, get_rows_iq2_xs, true); GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_GET_ROWS_IQ3_XXS, get_rows_iq3_xxs, true); GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_GET_ROWS_IQ3_S, get_rows_iq3_s, true); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_GET_ROWS_IQ2_S, get_rows_iq2_s, true); GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_GET_ROWS_IQ1_S, get_rows_iq1_s, true); GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_GET_ROWS_IQ4_NL, get_rows_iq4_nl, true); GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_GET_ROWS_I32, get_rows_i32, true); @@ -483,6 +489,7 @@ static struct ggml_metal_context * ggml_metal_init(int n_cb) { GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MV_IQ2_XS_F32, mul_mv_iq2_xs_f32, ctx->support_simdgroup_reduction); GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MV_IQ3_XXS_F32, mul_mv_iq3_xxs_f32, ctx->support_simdgroup_reduction); GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MV_IQ3_S_F32, mul_mv_iq3_s_f32, ctx->support_simdgroup_reduction); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MV_IQ2_S_F32, mul_mv_iq2_s_f32, ctx->support_simdgroup_reduction); GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MV_IQ1_S_F32, mul_mv_iq1_s_f32, ctx->support_simdgroup_reduction); GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MV_IQ4_NL_F32, mul_mv_iq4_nl_f32, ctx->support_simdgroup_reduction); GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MV_ID_F32_F32, mul_mv_id_f32_f32, ctx->support_simdgroup_reduction); @@ -504,6 +511,7 @@ static struct ggml_metal_context * ggml_metal_init(int n_cb) { GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MV_ID_IQ2_XS_F32, mul_mv_id_iq2_xs_f32, ctx->support_simdgroup_reduction); GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MV_ID_IQ3_XXS_F32, mul_mv_id_iq3_xxs_f32, ctx->support_simdgroup_reduction); GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MV_ID_IQ3_S_F32, mul_mv_id_iq3_s_f32, ctx->support_simdgroup_reduction); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MV_ID_IQ2_S_F32, mul_mv_id_iq2_s_f32, ctx->support_simdgroup_reduction); GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MV_ID_IQ1_S_F32, mul_mv_id_iq1_s_f32, ctx->support_simdgroup_reduction); GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MV_ID_IQ4_NL_F32, mul_mv_id_iq4_nl_f32, ctx->support_simdgroup_reduction); GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MM_F32_F32, mul_mm_f32_f32, ctx->support_simdgroup_mm); @@ -522,6 +530,7 @@ static struct ggml_metal_context * ggml_metal_init(int n_cb) { GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MM_IQ2_XS_F32, mul_mm_iq2_xs_f32, ctx->support_simdgroup_mm); GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MM_IQ3_XXS_F32, mul_mm_iq3_xxs_f32, ctx->support_simdgroup_mm); GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MM_IQ3_S_F32, mul_mm_iq3_s_f32, ctx->support_simdgroup_mm); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MM_IQ2_S_F32, mul_mm_iq2_s_f32, ctx->support_simdgroup_mm); GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MM_IQ1_S_F32, mul_mm_iq1_s_f32, ctx->support_simdgroup_mm); GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MM_IQ4_NL_F32, mul_mm_iq4_nl_f32, ctx->support_simdgroup_mm); GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MM_ID_F32_F32, mul_mm_id_f32_f32, ctx->support_simdgroup_mm); @@ -540,6 +549,7 @@ static struct ggml_metal_context * ggml_metal_init(int n_cb) { GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MM_ID_IQ2_XS_F32, mul_mm_id_iq2_xs_f32, ctx->support_simdgroup_mm); GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MM_ID_IQ3_XXS_F32, mul_mm_id_iq3_xxs_f32, ctx->support_simdgroup_mm); GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MM_ID_IQ3_S_F32, mul_mm_id_iq3_s_f32, ctx->support_simdgroup_mm); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MM_ID_IQ2_S_F32, mul_mm_id_iq2_s_f32, ctx->support_simdgroup_mm); GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MM_ID_IQ1_S_F32, mul_mm_id_iq1_s_f32, ctx->support_simdgroup_mm); GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MM_ID_IQ4_NL_F32, mul_mm_id_iq4_nl_f32, ctx->support_simdgroup_mm); GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_ROPE_F32, rope_f32, true); @@ -1358,6 +1368,7 @@ static bool ggml_metal_graph_compute( case GGML_TYPE_IQ2_XS: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MM_IQ2_XS_F32 ].pipeline; break; case GGML_TYPE_IQ3_XXS: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MM_IQ3_XXS_F32].pipeline; break; case GGML_TYPE_IQ3_S: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MM_IQ3_S_F32 ].pipeline; break; + case GGML_TYPE_IQ2_S: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MM_IQ2_S_F32 ].pipeline; break; case GGML_TYPE_IQ1_S: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MM_IQ1_S_F32 ].pipeline; break; case GGML_TYPE_IQ4_NL: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MM_IQ4_NL_F32 ].pipeline; break; default: GGML_ASSERT(false && "MUL MAT-MAT not implemented"); @@ -1500,6 +1511,12 @@ static bool ggml_metal_graph_compute( nth1 = 16; pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MV_IQ3_S_F32].pipeline; } break; + case GGML_TYPE_IQ2_S: + { + nth0 = 4; + nth1 = 16; + pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MV_IQ2_S_F32].pipeline; + } break; case GGML_TYPE_IQ1_S: { nth0 = 4; @@ -1544,9 +1561,9 @@ static bool ggml_metal_graph_compute( [encoder setBytes:&r2 length:sizeof(r2) atIndex:17]; [encoder setBytes:&r3 length:sizeof(r3) atIndex:18]; - if (src0t == GGML_TYPE_Q4_0 || src0t == GGML_TYPE_Q4_1 || - src0t == GGML_TYPE_Q5_0 || src0t == GGML_TYPE_Q5_1 || src0t == GGML_TYPE_Q8_0 || - src0t == GGML_TYPE_Q2_K || src0t == GGML_TYPE_IQ1_S) { // || src0t == GGML_TYPE_Q4_K) { + if (src0t == GGML_TYPE_Q4_0 || src0t == GGML_TYPE_Q4_1 || + src0t == GGML_TYPE_Q5_0 || src0t == GGML_TYPE_Q5_1 || src0t == GGML_TYPE_Q8_0 || + src0t == GGML_TYPE_Q2_K || src0t == GGML_TYPE_IQ1_S || src0t == GGML_TYPE_IQ2_S) { [encoder dispatchThreadgroups:MTLSizeMake((ne01 + 7)/8, ne11, ne12*ne13) threadsPerThreadgroup:MTLSizeMake(nth0, nth1, 1)]; } else if (src0t == GGML_TYPE_IQ2_XXS || src0t == GGML_TYPE_IQ2_XS) { @@ -1658,6 +1675,7 @@ static bool ggml_metal_graph_compute( case GGML_TYPE_IQ2_XS: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MM_ID_IQ2_XS_F32 ].pipeline; break; case GGML_TYPE_IQ3_XXS: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MM_ID_IQ3_XXS_F32].pipeline; break; case GGML_TYPE_IQ3_S: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MM_ID_IQ3_S_F32 ].pipeline; break; + case GGML_TYPE_IQ2_S: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MM_ID_IQ2_S_F32 ].pipeline; break; case GGML_TYPE_IQ1_S: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MM_ID_IQ1_S_F32 ].pipeline; break; case GGML_TYPE_IQ4_NL: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MM_ID_IQ4_NL_F32 ].pipeline; break; default: GGML_ASSERT(false && "MUL_MAT_ID not implemented"); @@ -1803,6 +1821,12 @@ static bool ggml_metal_graph_compute( nth1 = 16; pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MV_ID_IQ3_S_F32].pipeline; } break; + case GGML_TYPE_IQ2_S: + { + nth0 = 4; + nth1 = 16; + pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MV_ID_IQ2_S_F32].pipeline; + } break; case GGML_TYPE_IQ1_S: { nth0 = 4; @@ -1863,9 +1887,9 @@ static bool ggml_metal_graph_compute( [encoder setBuffer:id_src_cur offset:offs_src_cur atIndex:23 + j]; } - if (src2t == GGML_TYPE_Q4_0 || src2t == GGML_TYPE_Q4_1 || - src2t == GGML_TYPE_Q5_0 || src2t == GGML_TYPE_Q5_1 || src2t == GGML_TYPE_Q8_0 || - src2t == GGML_TYPE_Q2_K || src2t == GGML_TYPE_IQ1_S) { // || src2t == GGML_TYPE_Q4_K) { + if (src2t == GGML_TYPE_Q4_0 || src2t == GGML_TYPE_Q4_1 || + src2t == GGML_TYPE_Q5_0 || src2t == GGML_TYPE_Q5_1 || src2t == GGML_TYPE_Q8_0 || + src2t == GGML_TYPE_Q2_K || src2t == GGML_TYPE_IQ1_S || src2t == GGML_TYPE_IQ2_S) { [encoder dispatchThreadgroups:MTLSizeMake((ne21 + 7)/8, _ne1, ne01*ne12*ne13) threadsPerThreadgroup:MTLSizeMake(nth0, nth1, 1)]; } else if (src2t == GGML_TYPE_IQ2_XXS || src2t == GGML_TYPE_IQ2_XS) { @@ -1925,6 +1949,7 @@ static bool ggml_metal_graph_compute( case GGML_TYPE_IQ2_XS: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_GET_ROWS_IQ2_XS ].pipeline; break; case GGML_TYPE_IQ3_XXS: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_GET_ROWS_IQ3_XXS].pipeline; break; case GGML_TYPE_IQ3_S: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_GET_ROWS_IQ3_S ].pipeline; break; + case GGML_TYPE_IQ2_S: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_GET_ROWS_IQ2_S ].pipeline; break; case GGML_TYPE_IQ1_S: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_GET_ROWS_IQ1_S ].pipeline; break; case GGML_TYPE_IQ4_NL: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_GET_ROWS_IQ4_NL ].pipeline; break; case GGML_TYPE_I32: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_GET_ROWS_I32 ].pipeline; break; diff --git a/ggml-metal.metal b/ggml-metal.metal index b3bf40539..47354e952 100644 --- a/ggml-metal.metal +++ b/ggml-metal.metal @@ -2519,6 +2519,14 @@ typedef struct { } block_iq2_xs; // 74 bytes / block for QK_K = 256, so 2.3125 bpw +// 2.5625 bpw quants +typedef struct { + half d; + uint8_t qs[QK_K/4]; + uint8_t qh[QK_K/32]; + uint8_t scales[QK_K/32]; +} block_iq2_s; + typedef struct { half d; uint8_t qs[3*QK_K/8]; @@ -3774,6 +3782,265 @@ constexpr constant static uint64_t iq2xs_grid[512] = { 0x2b2b2b2b082b2b08, 0x2b2b2b2b082b2b2b, 0x2b2b2b2b2b190819, 0x2b2b2b2b2b2b2b2b, }; +constexpr constant static uint64_t iq2s_grid[1024] = { + 0x0808080808080808, 0x080808080808082b, 0x0808080808081919, 0x0808080808082b08, + 0x0808080808082b2b, 0x0808080808190819, 0x0808080808191908, 0x080808080819192b, + 0x0808080808192b19, 0x08080808082b0808, 0x08080808082b082b, 0x08080808082b1919, + 0x08080808082b2b08, 0x0808080819080819, 0x0808080819081908, 0x080808081908192b, + 0x0808080819082b19, 0x0808080819190808, 0x080808081919082b, 0x0808080819191919, + 0x0808080819192b08, 0x08080808192b0819, 0x08080808192b1908, 0x08080808192b192b, + 0x08080808192b2b19, 0x080808082b080808, 0x080808082b08082b, 0x080808082b081919, + 0x080808082b082b08, 0x080808082b190819, 0x080808082b191908, 0x080808082b2b0808, + 0x080808082b2b1919, 0x080808082b2b2b2b, 0x0808081908080819, 0x0808081908081908, + 0x080808190808192b, 0x0808081908082b19, 0x0808081908190808, 0x080808190819082b, + 0x0808081908191919, 0x0808081908192b08, 0x08080819082b0819, 0x08080819082b1908, + 0x0808081919080808, 0x080808191908082b, 0x0808081919081919, 0x0808081919082b08, + 0x0808081919190819, 0x0808081919191908, 0x080808191919192b, 0x0808081919192b19, + 0x08080819192b0808, 0x08080819192b1919, 0x08080819192b2b08, 0x080808192b080819, + 0x080808192b081908, 0x080808192b190808, 0x080808192b19082b, 0x080808192b191919, + 0x080808192b2b0819, 0x080808192b2b1908, 0x0808082b08080808, 0x0808082b0808082b, + 0x0808082b08081919, 0x0808082b08082b08, 0x0808082b08190819, 0x0808082b08191908, + 0x0808082b082b0808, 0x0808082b082b2b2b, 0x0808082b19080819, 0x0808082b19081908, + 0x0808082b1908192b, 0x0808082b19082b19, 0x0808082b19190808, 0x0808082b19191919, + 0x0808082b2b080808, 0x0808082b2b081919, 0x0808082b2b082b2b, 0x0808082b2b191908, + 0x0808082b2b2b082b, 0x0808190808080819, 0x0808190808081908, 0x080819080808192b, + 0x0808190808082b19, 0x0808190808190808, 0x080819080819082b, 0x0808190808191919, + 0x0808190808192b08, 0x08081908082b0819, 0x08081908082b1908, 0x08081908082b192b, + 0x08081908082b2b19, 0x0808190819080808, 0x080819081908082b, 0x0808190819081919, + 0x0808190819082b08, 0x0808190819082b2b, 0x0808190819190819, 0x0808190819191908, + 0x080819081919192b, 0x0808190819192b19, 0x08081908192b0808, 0x08081908192b082b, + 0x08081908192b1919, 0x080819082b080819, 0x080819082b081908, 0x080819082b08192b, + 0x080819082b082b19, 0x080819082b190808, 0x080819082b191919, 0x080819082b192b08, + 0x080819082b2b0819, 0x080819082b2b1908, 0x0808191908080808, 0x080819190808082b, + 0x0808191908081919, 0x0808191908082b08, 0x0808191908082b2b, 0x0808191908190819, + 0x0808191908191908, 0x080819190819192b, 0x0808191908192b19, 0x08081919082b0808, + 0x08081919082b1919, 0x08081919082b2b08, 0x0808191919080819, 0x0808191919081908, + 0x080819191908192b, 0x0808191919082b19, 0x0808191919190808, 0x080819191919082b, + 0x0808191919191919, 0x0808191919192b08, 0x08081919192b0819, 0x08081919192b1908, + 0x080819192b080808, 0x080819192b08082b, 0x080819192b081919, 0x080819192b082b08, + 0x080819192b190819, 0x080819192b191908, 0x080819192b2b0808, 0x0808192b08080819, + 0x0808192b08081908, 0x0808192b0808192b, 0x0808192b08082b19, 0x0808192b08190808, + 0x0808192b08191919, 0x0808192b19080808, 0x0808192b19081919, 0x0808192b19082b08, + 0x0808192b19190819, 0x0808192b19191908, 0x0808192b192b0808, 0x0808192b2b080819, + 0x0808192b2b081908, 0x0808192b2b190808, 0x08082b0808080808, 0x08082b080808082b, + 0x08082b0808081919, 0x08082b0808082b08, 0x08082b0808190819, 0x08082b0808191908, + 0x08082b080819192b, 0x08082b0808192b19, 0x08082b08082b0808, 0x08082b08082b1919, + 0x08082b08082b2b2b, 0x08082b0819080819, 0x08082b0819081908, 0x08082b081908192b, + 0x08082b0819082b19, 0x08082b0819190808, 0x08082b081919082b, 0x08082b0819191919, + 0x08082b0819192b08, 0x08082b08192b0819, 0x08082b08192b1908, 0x08082b082b080808, + 0x08082b082b081919, 0x08082b082b191908, 0x08082b082b2b2b2b, 0x08082b1908080819, + 0x08082b1908081908, 0x08082b1908190808, 0x08082b190819082b, 0x08082b1908191919, + 0x08082b1908192b08, 0x08082b19082b0819, 0x08082b1919080808, 0x08082b1919081919, + 0x08082b1919082b08, 0x08082b1919190819, 0x08082b1919191908, 0x08082b19192b0808, + 0x08082b192b080819, 0x08082b192b190808, 0x08082b2b08080808, 0x08082b2b08190819, + 0x08082b2b08191908, 0x08082b2b082b082b, 0x08082b2b082b2b08, 0x08082b2b082b2b2b, + 0x08082b2b19190808, 0x08082b2b2b192b19, 0x0819080808080819, 0x0819080808081908, + 0x081908080808192b, 0x0819080808082b19, 0x0819080808190808, 0x081908080819082b, + 0x0819080808191919, 0x0819080808192b08, 0x08190808082b0819, 0x08190808082b1908, + 0x08190808082b192b, 0x0819080819080808, 0x081908081908082b, 0x0819080819081919, + 0x0819080819082b08, 0x0819080819190819, 0x0819080819191908, 0x081908081919192b, + 0x0819080819192b19, 0x08190808192b0808, 0x08190808192b082b, 0x08190808192b1919, + 0x08190808192b2b08, 0x081908082b080819, 0x081908082b081908, 0x081908082b08192b, + 0x081908082b190808, 0x081908082b191919, 0x081908082b192b08, 0x081908082b2b0819, + 0x081908082b2b1908, 0x0819081908080808, 0x081908190808082b, 0x0819081908081919, + 0x0819081908082b08, 0x0819081908082b2b, 0x0819081908190819, 0x0819081908191908, + 0x081908190819192b, 0x0819081908192b19, 0x08190819082b0808, 0x08190819082b082b, + 0x08190819082b1919, 0x08190819082b2b08, 0x0819081919080819, 0x0819081919081908, + 0x081908191908192b, 0x0819081919082b19, 0x0819081919190808, 0x081908191919082b, + 0x0819081919191919, 0x0819081919192b08, 0x08190819192b0819, 0x08190819192b1908, + 0x081908192b080808, 0x081908192b08082b, 0x081908192b081919, 0x081908192b082b08, + 0x081908192b190819, 0x081908192b191908, 0x0819082b08080819, 0x0819082b08081908, + 0x0819082b08082b19, 0x0819082b08190808, 0x0819082b08191919, 0x0819082b082b0819, + 0x0819082b082b1908, 0x0819082b19080808, 0x0819082b19081919, 0x0819082b19190819, + 0x0819082b19191908, 0x0819082b2b080819, 0x0819082b2b081908, 0x0819082b2b190808, + 0x0819190808080808, 0x081919080808082b, 0x0819190808081919, 0x0819190808082b08, + 0x0819190808190819, 0x0819190808191908, 0x081919080819192b, 0x0819190808192b19, + 0x08191908082b0808, 0x08191908082b1919, 0x08191908082b2b08, 0x0819190819080819, + 0x0819190819081908, 0x081919081908192b, 0x0819190819082b19, 0x0819190819190808, + 0x081919081919082b, 0x0819190819191919, 0x0819190819192b08, 0x08191908192b0819, + 0x08191908192b1908, 0x081919082b080808, 0x081919082b08082b, 0x081919082b081919, + 0x081919082b082b08, 0x081919082b190819, 0x081919082b191908, 0x081919082b2b0808, + 0x0819191908080819, 0x0819191908081908, 0x081919190808192b, 0x0819191908082b19, + 0x0819191908190808, 0x081919190819082b, 0x0819191908191919, 0x0819191908192b08, + 0x08191919082b0819, 0x08191919082b1908, 0x0819191919080808, 0x081919191908082b, + 0x0819191919081919, 0x0819191919082b08, 0x0819191919190819, 0x0819191919191908, + 0x08191919192b0808, 0x081919192b080819, 0x081919192b081908, 0x081919192b190808, + 0x0819192b08080808, 0x0819192b08081919, 0x0819192b08082b08, 0x0819192b08190819, + 0x0819192b08191908, 0x0819192b082b0808, 0x0819192b19080819, 0x0819192b19081908, + 0x0819192b19190808, 0x0819192b2b080808, 0x0819192b2b2b2b2b, 0x08192b0808080819, + 0x08192b0808081908, 0x08192b080808192b, 0x08192b0808082b19, 0x08192b0808190808, + 0x08192b0808191919, 0x08192b0808192b08, 0x08192b08082b0819, 0x08192b0819080808, + 0x08192b081908082b, 0x08192b0819081919, 0x08192b0819082b08, 0x08192b0819190819, + 0x08192b0819191908, 0x08192b08192b0808, 0x08192b082b080819, 0x08192b082b081908, + 0x08192b1908080808, 0x08192b190808082b, 0x08192b1908081919, 0x08192b1908082b08, + 0x08192b1908190819, 0x08192b1908191908, 0x08192b19082b0808, 0x08192b1919080819, + 0x08192b1919081908, 0x08192b1919190808, 0x08192b19192b2b19, 0x08192b192b2b082b, + 0x08192b2b08081908, 0x08192b2b08190808, 0x08192b2b19080808, 0x08192b2b1919192b, + 0x082b080808080808, 0x082b08080808082b, 0x082b080808081919, 0x082b080808082b08, + 0x082b080808190819, 0x082b080808191908, 0x082b08080819192b, 0x082b080808192b19, + 0x082b0808082b0808, 0x082b0808082b1919, 0x082b0808082b2b2b, 0x082b080819080819, + 0x082b080819081908, 0x082b080819190808, 0x082b08081919082b, 0x082b080819191919, + 0x082b0808192b1908, 0x082b08082b080808, 0x082b08082b082b2b, 0x082b08082b191908, + 0x082b08082b2b2b2b, 0x082b081908080819, 0x082b081908081908, 0x082b081908190808, + 0x082b08190819082b, 0x082b081908191919, 0x082b0819082b0819, 0x082b081919080808, + 0x082b08191908082b, 0x082b081919081919, 0x082b081919190819, 0x082b081919191908, + 0x082b0819192b0808, 0x082b08192b080819, 0x082b08192b081908, 0x082b08192b190808, + 0x082b082b08080808, 0x082b082b08082b2b, 0x082b082b082b082b, 0x082b082b082b2b08, + 0x082b082b082b2b2b, 0x082b082b19081908, 0x082b082b19190808, 0x082b082b2b082b08, + 0x082b082b2b082b2b, 0x082b082b2b2b2b08, 0x082b190808080819, 0x082b190808081908, + 0x082b19080808192b, 0x082b190808082b19, 0x082b190808190808, 0x082b190808191919, + 0x082b190808192b08, 0x082b1908082b0819, 0x082b1908082b1908, 0x082b190819080808, + 0x082b19081908082b, 0x082b190819081919, 0x082b190819082b08, 0x082b190819190819, + 0x082b190819191908, 0x082b1908192b0808, 0x082b19082b080819, 0x082b19082b081908, + 0x082b19082b190808, 0x082b191908080808, 0x082b191908081919, 0x082b191908082b08, + 0x082b191908190819, 0x082b191908191908, 0x082b1919082b0808, 0x082b191919080819, + 0x082b191919081908, 0x082b191919190808, 0x082b1919192b192b, 0x082b19192b080808, + 0x082b192b08080819, 0x082b192b08081908, 0x082b192b08190808, 0x082b192b19080808, + 0x082b192b19192b19, 0x082b2b0808080808, 0x082b2b0808081919, 0x082b2b0808190819, + 0x082b2b0808191908, 0x082b2b0819080819, 0x082b2b0819081908, 0x082b2b0819190808, + 0x082b2b082b082b2b, 0x082b2b082b2b2b2b, 0x082b2b1908080819, 0x082b2b1908081908, + 0x082b2b1908190808, 0x082b2b192b191919, 0x082b2b2b08082b2b, 0x082b2b2b082b082b, + 0x082b2b2b192b1908, 0x082b2b2b2b082b08, 0x082b2b2b2b082b2b, 0x1908080808080819, + 0x1908080808081908, 0x190808080808192b, 0x1908080808082b19, 0x1908080808190808, + 0x190808080819082b, 0x1908080808191919, 0x1908080808192b08, 0x1908080808192b2b, + 0x19080808082b0819, 0x19080808082b1908, 0x19080808082b192b, 0x1908080819080808, + 0x190808081908082b, 0x1908080819081919, 0x1908080819082b08, 0x1908080819082b2b, + 0x1908080819190819, 0x1908080819191908, 0x190808081919192b, 0x1908080819192b19, + 0x19080808192b0808, 0x19080808192b082b, 0x19080808192b1919, 0x190808082b080819, + 0x190808082b081908, 0x190808082b190808, 0x190808082b191919, 0x190808082b192b08, + 0x190808082b2b0819, 0x190808082b2b1908, 0x1908081908080808, 0x190808190808082b, + 0x1908081908081919, 0x1908081908082b08, 0x1908081908190819, 0x1908081908191908, + 0x190808190819192b, 0x1908081908192b19, 0x19080819082b0808, 0x19080819082b082b, + 0x19080819082b1919, 0x1908081919080819, 0x1908081919081908, 0x190808191908192b, + 0x1908081919082b19, 0x1908081919190808, 0x190808191919082b, 0x1908081919191919, + 0x1908081919192b08, 0x19080819192b0819, 0x19080819192b1908, 0x190808192b080808, + 0x190808192b08082b, 0x190808192b081919, 0x190808192b082b08, 0x190808192b190819, + 0x190808192b191908, 0x190808192b2b0808, 0x1908082b08080819, 0x1908082b08081908, + 0x1908082b08190808, 0x1908082b0819082b, 0x1908082b08191919, 0x1908082b08192b08, + 0x1908082b082b1908, 0x1908082b19080808, 0x1908082b19081919, 0x1908082b19082b08, + 0x1908082b19190819, 0x1908082b19191908, 0x1908082b192b0808, 0x1908082b2b080819, + 0x1908082b2b081908, 0x1908190808080808, 0x190819080808082b, 0x1908190808081919, + 0x1908190808082b08, 0x1908190808082b2b, 0x1908190808190819, 0x1908190808191908, + 0x190819080819192b, 0x1908190808192b19, 0x19081908082b0808, 0x19081908082b082b, + 0x19081908082b1919, 0x19081908082b2b08, 0x1908190819080819, 0x1908190819081908, + 0x190819081908192b, 0x1908190819082b19, 0x1908190819190808, 0x190819081919082b, + 0x1908190819191919, 0x1908190819192b08, 0x19081908192b0819, 0x19081908192b1908, + 0x190819082b080808, 0x190819082b08082b, 0x190819082b081919, 0x190819082b082b08, + 0x190819082b190819, 0x190819082b191908, 0x190819082b2b0808, 0x1908191908080819, + 0x1908191908081908, 0x190819190808192b, 0x1908191908082b19, 0x1908191908190808, + 0x190819190819082b, 0x1908191908191919, 0x1908191908192b08, 0x19081919082b0819, + 0x19081919082b1908, 0x1908191919080808, 0x190819191908082b, 0x1908191919081919, + 0x1908191919082b08, 0x1908191919190819, 0x1908191919191908, 0x19081919192b0808, + 0x19081919192b2b2b, 0x190819192b080819, 0x190819192b081908, 0x190819192b190808, + 0x1908192b08080808, 0x1908192b0808082b, 0x1908192b08081919, 0x1908192b08082b08, + 0x1908192b08190819, 0x1908192b08191908, 0x1908192b082b0808, 0x1908192b19080819, + 0x1908192b19081908, 0x1908192b19190808, 0x1908192b2b080808, 0x1908192b2b2b1919, + 0x19082b0808080819, 0x19082b0808081908, 0x19082b0808082b19, 0x19082b0808190808, + 0x19082b080819082b, 0x19082b0808191919, 0x19082b0808192b08, 0x19082b08082b0819, + 0x19082b08082b1908, 0x19082b0819080808, 0x19082b081908082b, 0x19082b0819081919, + 0x19082b0819082b08, 0x19082b0819190819, 0x19082b0819191908, 0x19082b08192b0808, + 0x19082b082b081908, 0x19082b082b190808, 0x19082b1908080808, 0x19082b190808082b, + 0x19082b1908081919, 0x19082b1908082b08, 0x19082b1908190819, 0x19082b1908191908, + 0x19082b19082b0808, 0x19082b1919080819, 0x19082b1919081908, 0x19082b1919190808, + 0x19082b192b080808, 0x19082b192b19192b, 0x19082b2b08080819, 0x19082b2b08081908, + 0x19082b2b08190808, 0x19082b2b19080808, 0x1919080808080808, 0x191908080808082b, + 0x1919080808081919, 0x1919080808082b08, 0x1919080808190819, 0x1919080808191908, + 0x191908080819192b, 0x1919080808192b19, 0x19190808082b0808, 0x19190808082b082b, + 0x19190808082b1919, 0x19190808082b2b08, 0x1919080819080819, 0x1919080819081908, + 0x191908081908192b, 0x1919080819082b19, 0x1919080819190808, 0x191908081919082b, + 0x1919080819191919, 0x1919080819192b08, 0x19190808192b0819, 0x19190808192b1908, + 0x191908082b080808, 0x191908082b08082b, 0x191908082b081919, 0x191908082b082b08, + 0x191908082b190819, 0x191908082b191908, 0x1919081908080819, 0x1919081908081908, + 0x191908190808192b, 0x1919081908082b19, 0x1919081908190808, 0x191908190819082b, + 0x1919081908191919, 0x1919081908192b08, 0x19190819082b0819, 0x19190819082b1908, + 0x1919081919080808, 0x191908191908082b, 0x1919081919081919, 0x1919081919082b08, + 0x1919081919190819, 0x1919081919191908, 0x19190819192b0808, 0x191908192b080819, + 0x191908192b081908, 0x191908192b190808, 0x1919082b08080808, 0x1919082b08081919, + 0x1919082b08082b08, 0x1919082b08190819, 0x1919082b08191908, 0x1919082b082b0808, + 0x1919082b19080819, 0x1919082b19081908, 0x1919082b19190808, 0x1919082b192b2b19, + 0x1919082b2b080808, 0x1919190808080819, 0x1919190808081908, 0x191919080808192b, + 0x1919190808082b19, 0x1919190808190808, 0x191919080819082b, 0x1919190808191919, + 0x1919190808192b08, 0x19191908082b0819, 0x19191908082b1908, 0x1919190819080808, + 0x191919081908082b, 0x1919190819081919, 0x1919190819082b08, 0x1919190819190819, + 0x1919190819191908, 0x19191908192b0808, 0x191919082b080819, 0x191919082b081908, + 0x191919082b190808, 0x1919191908080808, 0x191919190808082b, 0x1919191908081919, + 0x1919191908082b08, 0x1919191908190819, 0x1919191908191908, 0x19191919082b0808, + 0x1919191919080819, 0x1919191919081908, 0x1919191919190808, 0x191919192b080808, + 0x1919192b08080819, 0x1919192b08081908, 0x1919192b08190808, 0x1919192b082b192b, + 0x1919192b19080808, 0x19192b0808080808, 0x19192b080808082b, 0x19192b0808081919, + 0x19192b0808082b08, 0x19192b0808190819, 0x19192b0808191908, 0x19192b08082b0808, + 0x19192b0819080819, 0x19192b0819081908, 0x19192b0819190808, 0x19192b0819192b2b, + 0x19192b082b080808, 0x19192b1908080819, 0x19192b1908081908, 0x19192b1908190808, + 0x19192b1919080808, 0x19192b2b08080808, 0x19192b2b08192b19, 0x19192b2b2b081919, + 0x19192b2b2b2b2b08, 0x192b080808080819, 0x192b080808081908, 0x192b08080808192b, + 0x192b080808190808, 0x192b08080819082b, 0x192b080808191919, 0x192b080808192b08, + 0x192b0808082b0819, 0x192b0808082b1908, 0x192b080819080808, 0x192b080819081919, + 0x192b080819082b08, 0x192b080819190819, 0x192b080819191908, 0x192b0808192b0808, + 0x192b08082b081908, 0x192b08082b190808, 0x192b081908080808, 0x192b08190808082b, + 0x192b081908081919, 0x192b081908082b08, 0x192b081908190819, 0x192b081908191908, + 0x192b0819082b0808, 0x192b081919080819, 0x192b081919081908, 0x192b081919190808, + 0x192b08192b080808, 0x192b08192b192b19, 0x192b082b08081908, 0x192b082b08190808, + 0x192b082b19080808, 0x192b082b1919192b, 0x192b082b2b2b0819, 0x192b190808080808, + 0x192b190808081919, 0x192b190808082b08, 0x192b190808190819, 0x192b190808191908, + 0x192b1908082b0808, 0x192b190819080819, 0x192b190819081908, 0x192b190819190808, + 0x192b19082b080808, 0x192b191908080819, 0x192b191908081908, 0x192b191908190808, + 0x192b191919080808, 0x192b191919082b2b, 0x192b1919192b2b08, 0x192b19192b19082b, + 0x192b192b08080808, 0x192b192b2b191908, 0x192b2b0808080819, 0x192b2b0808081908, + 0x192b2b0808190808, 0x192b2b08192b1919, 0x192b2b082b192b08, 0x192b2b1908080808, + 0x192b2b19082b2b2b, 0x192b2b2b1908082b, 0x192b2b2b2b2b0819, 0x2b08080808080808, + 0x2b0808080808082b, 0x2b08080808081919, 0x2b08080808082b08, 0x2b08080808190819, + 0x2b08080808191908, 0x2b08080808192b19, 0x2b080808082b0808, 0x2b080808082b1919, + 0x2b08080819080819, 0x2b08080819081908, 0x2b08080819190808, 0x2b0808081919082b, + 0x2b08080819191919, 0x2b08080819192b08, 0x2b080808192b0819, 0x2b0808082b080808, + 0x2b0808082b081919, 0x2b0808082b190819, 0x2b0808082b191908, 0x2b08081908080819, + 0x2b08081908081908, 0x2b08081908082b19, 0x2b08081908190808, 0x2b0808190819082b, + 0x2b08081908191919, 0x2b08081908192b08, 0x2b080819082b0819, 0x2b080819082b1908, + 0x2b08081919080808, 0x2b0808191908082b, 0x2b08081919081919, 0x2b08081919082b08, + 0x2b08081919190819, 0x2b08081919191908, 0x2b0808192b080819, 0x2b0808192b081908, + 0x2b0808192b190808, 0x2b0808192b2b2b19, 0x2b08082b08080808, 0x2b08082b08081919, + 0x2b08082b08082b2b, 0x2b08082b08190819, 0x2b08082b08191908, 0x2b08082b19080819, + 0x2b08082b19081908, 0x2b08082b19190808, 0x2b08190808080819, 0x2b08190808081908, + 0x2b0819080808192b, 0x2b08190808082b19, 0x2b08190808190808, 0x2b0819080819082b, + 0x2b08190808191919, 0x2b08190808192b08, 0x2b081908082b0819, 0x2b08190819080808, + 0x2b0819081908082b, 0x2b08190819081919, 0x2b08190819082b08, 0x2b08190819190819, + 0x2b08190819191908, 0x2b081908192b0808, 0x2b0819082b080819, 0x2b0819082b081908, + 0x2b0819082b190808, 0x2b08191908080808, 0x2b0819190808082b, 0x2b08191908081919, + 0x2b08191908082b08, 0x2b08191908190819, 0x2b08191908191908, 0x2b081919082b0808, + 0x2b08191919080819, 0x2b08191919081908, 0x2b08191919190808, 0x2b0819192b080808, + 0x2b0819192b082b2b, 0x2b08192b08080819, 0x2b08192b08081908, 0x2b08192b08190808, + 0x2b08192b082b2b19, 0x2b08192b19080808, 0x2b082b0808080808, 0x2b082b0808081919, + 0x2b082b0808190819, 0x2b082b0808191908, 0x2b082b0819080819, 0x2b082b0819081908, + 0x2b082b0819190808, 0x2b082b082b2b082b, 0x2b082b1908080819, 0x2b082b1908081908, + 0x2b082b1919080808, 0x2b082b19192b1919, 0x2b082b2b082b082b, 0x2b082b2b19192b08, + 0x2b082b2b19192b2b, 0x2b082b2b2b08082b, 0x2b082b2b2b2b082b, 0x2b19080808080819, + 0x2b19080808081908, 0x2b19080808082b19, 0x2b19080808190808, 0x2b1908080819082b, + 0x2b19080808191919, 0x2b19080808192b08, 0x2b190808082b1908, 0x2b19080819080808, + 0x2b1908081908082b, 0x2b19080819081919, 0x2b19080819082b08, 0x2b19080819190819, + 0x2b19080819191908, 0x2b190808192b0808, 0x2b1908082b080819, 0x2b1908082b081908, + 0x2b1908082b190808, 0x2b19081908080808, 0x2b19081908081919, 0x2b19081908190819, + 0x2b19081908191908, 0x2b19081919080819, 0x2b19081919081908, 0x2b19081919190808, + 0x2b19081919192b2b, 0x2b19082b08080819, 0x2b19082b08081908, 0x2b19082b08190808, + 0x2b19082b19080808, 0x2b19082b2b2b192b, 0x2b19190808080808, 0x2b1919080808082b, + 0x2b19190808081919, 0x2b19190808082b08, 0x2b19190808190819, 0x2b19190808191908, + 0x2b191908082b0808, 0x2b19190819080819, 0x2b19190819081908, 0x2b19190819190808, + 0x2b1919082b080808, 0x2b1919082b19192b, 0x2b19191908080819, 0x2b19191908081908, + 0x2b19191908190808, 0x2b19191919080808, 0x2b1919192b192b08, 0x2b1919192b2b0819, + 0x2b19192b08080808, 0x2b19192b1908192b, 0x2b19192b192b1908, 0x2b192b0808080819, + 0x2b192b0808081908, 0x2b192b0808190808, 0x2b192b08082b192b, 0x2b192b0819080808, + 0x2b192b082b2b2b19, 0x2b192b1908080808, 0x2b192b1919082b19, 0x2b192b191919082b, + 0x2b192b2b2b190808, 0x2b2b080808080808, 0x2b2b080808081919, 0x2b2b080808082b2b, + 0x2b2b080808191908, 0x2b2b0808082b082b, 0x2b2b0808082b2b2b, 0x2b2b080819080819, + 0x2b2b080819081908, 0x2b2b080819190808, 0x2b2b08082b2b082b, 0x2b2b08082b2b2b2b, + 0x2b2b081919080808, 0x2b2b0819192b1919, 0x2b2b082b0808082b, 0x2b2b082b08082b2b, + 0x2b2b082b082b082b, 0x2b2b082b082b2b08, 0x2b2b082b082b2b2b, 0x2b2b082b2b08082b, + 0x2b2b082b2b082b08, 0x2b2b082b2b082b2b, 0x2b2b082b2b2b2b08, 0x2b2b190808080819, + 0x2b2b190808081908, 0x2b2b190808190808, 0x2b2b190819080808, 0x2b2b19082b082b19, + 0x2b2b19082b2b1908, 0x2b2b191908080808, 0x2b2b191908192b19, 0x2b2b192b19190819, + 0x2b2b2b0808082b2b, 0x2b2b2b08082b2b08, 0x2b2b2b082b2b082b, 0x2b2b2b1919191908, + 0x2b2b2b192b08192b, 0x2b2b2b2b08082b08, 0x2b2b2b2b08082b2b, 0x2b2b2b2b082b0808, + 0x2b2b2b2b082b082b, 0x2b2b2b2b082b2b08, 0x2b2b2b2b2b082b08, 0x2b2b2b2b2b2b2b2b, +}; + constexpr constant static uint32_t iq3xxs_grid[256] = { 0x04040404, 0x04040414, 0x04040424, 0x04040c0c, 0x04040c1c, 0x04040c3e, 0x04041404, 0x04041414, 0x04041c0c, 0x04042414, 0x04043e1c, 0x04043e2c, 0x040c040c, 0x040c041c, 0x040c0c04, 0x040c0c14, @@ -4572,6 +4839,139 @@ kernel void kernel_mul_mv_iq3_s_f32( kernel_mul_mv_iq3_s_f32_impl(src0, src1, dst, ne00, ne01, ne02, ne10, ne12, ne0, ne1, r2, r3, shared_values, tgpig, tiisg, sgitg); } +void kernel_mul_mv_iq2_s_f32_impl( + device const void * src0, + device const float * src1, + device float * dst, + constant int64_t & ne00, + constant int64_t & ne01, + constant int64_t & ne02, + constant int64_t & ne10, + constant int64_t & ne12, + constant int64_t & ne0, + constant int64_t & ne1, + constant uint & r2, + constant uint & r3, + threadgroup int8_t * shared_values [[threadgroup(0)]], + uint3 tgpig[[threadgroup_position_in_grid]], + uint tiisg[[thread_index_in_simdgroup]], + uint sgitg[[simdgroup_index_in_threadgroup]]) { + + const int nb = ne00/QK_K; + const int r0 = tgpig.x; + const int r1 = tgpig.y; + const int im = tgpig.z; + + const int first_row = (r0 * N_SIMDGROUP + sgitg) * N_DST; + const int ib_row = first_row * nb; + + const uint i12 = im%ne12; + const uint i13 = im/ne12; + + const uint offset0 = (i12/r2)*(nb*ne01) + (i13/r3)*(nb*ne01*ne02); + + device const block_iq2_s * x = (device const block_iq2_s *) src0 + ib_row + offset0; + device const float * y = (device const float *) src1 + r1*ne10 + im*ne00*ne1; + + float yl[32]; + float sumf[N_DST]={0.f}, all_sum; + + const int nb32 = nb * (QK_K / 32); + + //threadgroup uint64_t * values = (threadgroup uint64_t *)shared_values; + //{ + // int nval = 32; + // int pos = (32*sgitg + tiisg)*nval; + // for (int i = 0; i < nval; ++i) values[pos + i] = iq2s_grid[pos + i]; + // threadgroup_barrier(mem_flags::mem_threadgroup); + //} + + const int ix = tiisg; + + device const float * y4 = y + 32 * ix; + + for (int ib32 = ix; ib32 < nb32; ib32 += 32) { + + for (int i = 0; i < 32; ++i) { + yl[i] = y4[i]; + } + + const int ibl = ib32 / (QK_K / 32); + const int ib = ib32 % (QK_K / 32); + + device const block_iq2_s * xr = x + ibl; + device const uint8_t * qs = xr->qs + 4 * ib; + device const uint8_t * qh = xr->qh + ib; + device const uint8_t * sc = xr->scales + ib; + device const uint8_t * signs = qs + QK_K/8; + device const half * dh = &xr->d; + + for (int row = 0; row < N_DST; row++) { + + const float db = dh[0]; + const float d1 = db * (0.5f + (sc[0] & 0xf)); + const float d2 = db * (0.5f + (sc[0] >> 4)); + + float2 sum = {0}; + for (int l = 0; l < 2; ++l) { + //const threadgroup uint8_t * grid1 = (const threadgroup uint8_t *)(values + (qs[l+0] | ((qh[0] << (8-2*l)) & 0x300))); + //const threadgroup uint8_t * grid2 = (const threadgroup uint8_t *)(values + (qs[l+2] | ((qh[0] << (4-2*l)) & 0x300))); + constant uint8_t * grid1 = (constant uint8_t *)(iq2s_grid + (qs[l+0] | ((qh[0] << (8-2*l)) & 0x300))); + constant uint8_t * grid2 = (constant uint8_t *)(iq2s_grid + (qs[l+2] | ((qh[0] << (4-2*l)) & 0x300))); + for (int j = 0; j < 8; ++j) { + sum[0] += yl[8*l + j + 0] * grid1[j] * select(1, -1, signs[l+0] & kmask_iq2xs[j]); + sum[1] += yl[8*l + j + 16] * grid2[j] * select(1, -1, signs[l+2] & kmask_iq2xs[j]); + } + } + sumf[row] += d1 * sum[0] + d2 * sum[1]; + + dh += nb*sizeof(block_iq2_s)/2; + qs += nb*sizeof(block_iq2_s); + qh += nb*sizeof(block_iq2_s); + sc += nb*sizeof(block_iq2_s); + signs += nb*sizeof(block_iq2_s); + } + + y4 += 32 * 32; + } + + for (int row = 0; row < N_DST; ++row) { + all_sum = simd_sum(sumf[row]); + if (tiisg == 0) { + dst[r1*ne0 + im*ne0*ne1 + first_row + row] = all_sum * 0.25f; + } + } +} + +[[host_name("kernel_mul_mv_iq2_s_f32")]] +kernel void kernel_mul_mv_iq2_s_f32( + device const void * src0, + device const float * src1, + device float * dst, + constant int64_t & ne00, + constant int64_t & ne01, + constant int64_t & ne02, + constant uint64_t & nb00, + constant uint64_t & nb01, + constant uint64_t & nb02, + constant int64_t & ne10, + constant int64_t & ne11, + constant int64_t & ne12, + constant uint64_t & nb10, + constant uint64_t & nb11, + constant uint64_t & nb12, + constant int64_t & ne0, + constant int64_t & ne1, + constant uint & r2, + constant uint & r3, + threadgroup int8_t * shared_values [[threadgroup(0)]], + uint3 tgpig[[threadgroup_position_in_grid]], + uint tiisg[[thread_index_in_simdgroup]], + uint sgitg[[simdgroup_index_in_threadgroup]]) { + + kernel_mul_mv_iq2_s_f32_impl(src0, src1, dst, ne00, ne01, ne02, ne10, ne12, ne0, ne1, r2, r3, shared_values, tgpig, tiisg, sgitg); +} + void kernel_mul_mv_iq1_s_f32_impl( device const void * src0, device const float * src1, @@ -5188,6 +5588,25 @@ void dequantize_iq3_s(device const block_iq3_s * xb, short il, thread type4x4 & } } +template +void dequantize_iq2_s(device const block_iq2_s * xb, short il, thread type4x4 & reg) { + // il is 0...15 for QK_K = 256 => index of block of 32 is il/2 + const float d = xb->d; + const int ib32 = il/2; + il = il%2; + // il = 0 or 1. il = 0 processes the first 16 quants in a block of 32, il = 1 the second 16 + device const uint8_t * qs = xb->qs + 4*ib32 + 2*il; + device const uint8_t * signs = qs + QK_K/8; + const uint8_t qh = xb->qh[ib32] >> 4*il; + const float dl = d * (0.5f + ((xb->scales[ib32] >> 4*il) & 0xf)) * 0.25f; + constant uint8_t * grid1 = (constant uint8_t *)(iq2s_grid + (qs[0] | ((qh << 8) & 0x300))); + constant uint8_t * grid2 = (constant uint8_t *)(iq2s_grid + (qs[1] | ((qh << 6) & 0x300))); + for (int i = 0; i < 8; ++i) { + reg[i/4+0][i%4] = dl * grid1[i] * select(1, -1, signs[0] & kmask_iq2xs[i]); + reg[i/4+2][i%4] = dl * grid2[i] * select(1, -1, signs[1] & kmask_iq2xs[i]); + } +} + template void dequantize_iq1_s(device const block_iq1_s * xb, short il, thread type4x4 & reg) { // il is 0...15 for QK_K = 256 => index of block of 32 is il/2 @@ -5762,6 +6181,7 @@ template [[host_name("kernel_get_rows_iq2_xxs")]] kernel get_rows_t kernel_get_r template [[host_name("kernel_get_rows_iq2_xs")]] kernel get_rows_t kernel_get_rows; template [[host_name("kernel_get_rows_iq3_xxs")]] kernel get_rows_t kernel_get_rows; template [[host_name("kernel_get_rows_iq3_s")]] kernel get_rows_t kernel_get_rows; +template [[host_name("kernel_get_rows_iq2_s")]] kernel get_rows_t kernel_get_rows; template [[host_name("kernel_get_rows_iq1_s")]] kernel get_rows_t kernel_get_rows; template [[host_name("kernel_get_rows_iq4_nl")]] kernel get_rows_t kernel_get_rows; @@ -5804,6 +6224,7 @@ template [[host_name("kernel_mul_mm_iq2_xxs_f32")]] kernel mat_mm_t kernel_mul_m template [[host_name("kernel_mul_mm_iq2_xs_f32")]] kernel mat_mm_t kernel_mul_mm; template [[host_name("kernel_mul_mm_iq3_xxs_f32")]] kernel mat_mm_t kernel_mul_mm; template [[host_name("kernel_mul_mm_iq3_s_f32")]] kernel mat_mm_t kernel_mul_mm; +template [[host_name("kernel_mul_mm_iq2_s_f32")]] kernel mat_mm_t kernel_mul_mm; template [[host_name("kernel_mul_mm_iq1_s_f32")]] kernel mat_mm_t kernel_mul_mm; template [[host_name("kernel_mul_mm_iq4_nl_f32")]] kernel mat_mm_t kernel_mul_mm; @@ -5858,6 +6279,7 @@ template [[host_name("kernel_mul_mm_id_iq2_xxs_f32")]] kernel mat_mm_id_t kernel template [[host_name("kernel_mul_mm_id_iq2_xs_f32")]] kernel mat_mm_id_t kernel_mul_mm_id; template [[host_name("kernel_mul_mm_id_iq3_xxs_f32")]] kernel mat_mm_id_t kernel_mul_mm_id; template [[host_name("kernel_mul_mm_id_iq3_s_f32")]] kernel mat_mm_id_t kernel_mul_mm_id; +template [[host_name("kernel_mul_mm_id_iq2_s_f32")]] kernel mat_mm_id_t kernel_mul_mm_id; template [[host_name("kernel_mul_mm_id_iq1_s_f32")]] kernel mat_mm_id_t kernel_mul_mm_id; template [[host_name("kernel_mul_mm_id_iq4_nl_f32")]] kernel mat_mm_id_t kernel_mul_mm_id; @@ -6893,6 +7315,71 @@ kernel void kernel_mul_mv_id_iq3_s_f32( sgitg); } +[[host_name("kernel_mul_mv_id_iq2_s_f32")]] +kernel void kernel_mul_mv_id_iq2_s_f32( + device const char * ids, + device const char * src1, + device float * dst, + constant uint64_t & nbi1, + constant int64_t & ne00, + constant int64_t & ne01, + constant int64_t & ne02, + constant uint64_t & nb00, + constant uint64_t & nb01, + constant uint64_t & nb02, + constant int64_t & ne10, + constant int64_t & ne11, + constant int64_t & ne12, + constant int64_t & ne13, + constant uint64_t & nb10, + constant uint64_t & nb11, + constant uint64_t & nb12, + constant int64_t & ne0, + constant int64_t & ne1, + constant uint64_t & nb1, + constant uint & r2, + constant uint & r3, + constant int & idx, + device const char * src00, + device const char * src01, + device const char * src02, + device const char * src03, + device const char * src04, + device const char * src05, + device const char * src06, + device const char * src07, + threadgroup int8_t * shared_values [[threadgroup(0)]], + uint3 tgpig[[threadgroup_position_in_grid]], + uint tiitg[[thread_index_in_threadgroup]], + uint tiisg[[thread_index_in_simdgroup]], + uint sgitg[[simdgroup_index_in_threadgroup]]) { + device const char * src0[8] = {src00, src01, src02, src03, src04, src05, src06, src07}; + + const int64_t bid = tgpig.z/(ne12*ne13); + + tgpig.z = tgpig.z%(ne12*ne13); + + const int32_t id = ((device int32_t *) (ids + bid*nbi1))[idx]; + + kernel_mul_mv_iq2_s_f32_impl( + src0[id], + (device const float *) (src1 + bid*nb11), + dst + bid*ne0, + ne00, + ne01, + ne02, + ne10, + ne12, + ne0, + ne1, + r2, + r3, + shared_values, + tgpig, + tiisg, + sgitg); +} + [[host_name("kernel_mul_mv_id_iq1_s_f32")]] kernel void kernel_mul_mv_id_iq1_s_f32( device const char * ids, diff --git a/ggml-quants.c b/ggml-quants.c index 3d94d166d..ce654f094 100644 --- a/ggml-quants.c +++ b/ggml-quants.c @@ -3495,6 +3495,265 @@ static const uint64_t iq2xs_grid[512] = { 0x2b2b2b2b082b2b08, 0x2b2b2b2b082b2b2b, 0x2b2b2b2b2b190819, 0x2b2b2b2b2b2b2b2b, }; +static const uint64_t iq2s_grid[1024] = { + 0x0808080808080808, 0x080808080808082b, 0x0808080808081919, 0x0808080808082b08, + 0x0808080808082b2b, 0x0808080808190819, 0x0808080808191908, 0x080808080819192b, + 0x0808080808192b19, 0x08080808082b0808, 0x08080808082b082b, 0x08080808082b1919, + 0x08080808082b2b08, 0x0808080819080819, 0x0808080819081908, 0x080808081908192b, + 0x0808080819082b19, 0x0808080819190808, 0x080808081919082b, 0x0808080819191919, + 0x0808080819192b08, 0x08080808192b0819, 0x08080808192b1908, 0x08080808192b192b, + 0x08080808192b2b19, 0x080808082b080808, 0x080808082b08082b, 0x080808082b081919, + 0x080808082b082b08, 0x080808082b190819, 0x080808082b191908, 0x080808082b2b0808, + 0x080808082b2b1919, 0x080808082b2b2b2b, 0x0808081908080819, 0x0808081908081908, + 0x080808190808192b, 0x0808081908082b19, 0x0808081908190808, 0x080808190819082b, + 0x0808081908191919, 0x0808081908192b08, 0x08080819082b0819, 0x08080819082b1908, + 0x0808081919080808, 0x080808191908082b, 0x0808081919081919, 0x0808081919082b08, + 0x0808081919190819, 0x0808081919191908, 0x080808191919192b, 0x0808081919192b19, + 0x08080819192b0808, 0x08080819192b1919, 0x08080819192b2b08, 0x080808192b080819, + 0x080808192b081908, 0x080808192b190808, 0x080808192b19082b, 0x080808192b191919, + 0x080808192b2b0819, 0x080808192b2b1908, 0x0808082b08080808, 0x0808082b0808082b, + 0x0808082b08081919, 0x0808082b08082b08, 0x0808082b08190819, 0x0808082b08191908, + 0x0808082b082b0808, 0x0808082b082b2b2b, 0x0808082b19080819, 0x0808082b19081908, + 0x0808082b1908192b, 0x0808082b19082b19, 0x0808082b19190808, 0x0808082b19191919, + 0x0808082b2b080808, 0x0808082b2b081919, 0x0808082b2b082b2b, 0x0808082b2b191908, + 0x0808082b2b2b082b, 0x0808190808080819, 0x0808190808081908, 0x080819080808192b, + 0x0808190808082b19, 0x0808190808190808, 0x080819080819082b, 0x0808190808191919, + 0x0808190808192b08, 0x08081908082b0819, 0x08081908082b1908, 0x08081908082b192b, + 0x08081908082b2b19, 0x0808190819080808, 0x080819081908082b, 0x0808190819081919, + 0x0808190819082b08, 0x0808190819082b2b, 0x0808190819190819, 0x0808190819191908, + 0x080819081919192b, 0x0808190819192b19, 0x08081908192b0808, 0x08081908192b082b, + 0x08081908192b1919, 0x080819082b080819, 0x080819082b081908, 0x080819082b08192b, + 0x080819082b082b19, 0x080819082b190808, 0x080819082b191919, 0x080819082b192b08, + 0x080819082b2b0819, 0x080819082b2b1908, 0x0808191908080808, 0x080819190808082b, + 0x0808191908081919, 0x0808191908082b08, 0x0808191908082b2b, 0x0808191908190819, + 0x0808191908191908, 0x080819190819192b, 0x0808191908192b19, 0x08081919082b0808, + 0x08081919082b1919, 0x08081919082b2b08, 0x0808191919080819, 0x0808191919081908, + 0x080819191908192b, 0x0808191919082b19, 0x0808191919190808, 0x080819191919082b, + 0x0808191919191919, 0x0808191919192b08, 0x08081919192b0819, 0x08081919192b1908, + 0x080819192b080808, 0x080819192b08082b, 0x080819192b081919, 0x080819192b082b08, + 0x080819192b190819, 0x080819192b191908, 0x080819192b2b0808, 0x0808192b08080819, + 0x0808192b08081908, 0x0808192b0808192b, 0x0808192b08082b19, 0x0808192b08190808, + 0x0808192b08191919, 0x0808192b19080808, 0x0808192b19081919, 0x0808192b19082b08, + 0x0808192b19190819, 0x0808192b19191908, 0x0808192b192b0808, 0x0808192b2b080819, + 0x0808192b2b081908, 0x0808192b2b190808, 0x08082b0808080808, 0x08082b080808082b, + 0x08082b0808081919, 0x08082b0808082b08, 0x08082b0808190819, 0x08082b0808191908, + 0x08082b080819192b, 0x08082b0808192b19, 0x08082b08082b0808, 0x08082b08082b1919, + 0x08082b08082b2b2b, 0x08082b0819080819, 0x08082b0819081908, 0x08082b081908192b, + 0x08082b0819082b19, 0x08082b0819190808, 0x08082b081919082b, 0x08082b0819191919, + 0x08082b0819192b08, 0x08082b08192b0819, 0x08082b08192b1908, 0x08082b082b080808, + 0x08082b082b081919, 0x08082b082b191908, 0x08082b082b2b2b2b, 0x08082b1908080819, + 0x08082b1908081908, 0x08082b1908190808, 0x08082b190819082b, 0x08082b1908191919, + 0x08082b1908192b08, 0x08082b19082b0819, 0x08082b1919080808, 0x08082b1919081919, + 0x08082b1919082b08, 0x08082b1919190819, 0x08082b1919191908, 0x08082b19192b0808, + 0x08082b192b080819, 0x08082b192b190808, 0x08082b2b08080808, 0x08082b2b08190819, + 0x08082b2b08191908, 0x08082b2b082b082b, 0x08082b2b082b2b08, 0x08082b2b082b2b2b, + 0x08082b2b19190808, 0x08082b2b2b192b19, 0x0819080808080819, 0x0819080808081908, + 0x081908080808192b, 0x0819080808082b19, 0x0819080808190808, 0x081908080819082b, + 0x0819080808191919, 0x0819080808192b08, 0x08190808082b0819, 0x08190808082b1908, + 0x08190808082b192b, 0x0819080819080808, 0x081908081908082b, 0x0819080819081919, + 0x0819080819082b08, 0x0819080819190819, 0x0819080819191908, 0x081908081919192b, + 0x0819080819192b19, 0x08190808192b0808, 0x08190808192b082b, 0x08190808192b1919, + 0x08190808192b2b08, 0x081908082b080819, 0x081908082b081908, 0x081908082b08192b, + 0x081908082b190808, 0x081908082b191919, 0x081908082b192b08, 0x081908082b2b0819, + 0x081908082b2b1908, 0x0819081908080808, 0x081908190808082b, 0x0819081908081919, + 0x0819081908082b08, 0x0819081908082b2b, 0x0819081908190819, 0x0819081908191908, + 0x081908190819192b, 0x0819081908192b19, 0x08190819082b0808, 0x08190819082b082b, + 0x08190819082b1919, 0x08190819082b2b08, 0x0819081919080819, 0x0819081919081908, + 0x081908191908192b, 0x0819081919082b19, 0x0819081919190808, 0x081908191919082b, + 0x0819081919191919, 0x0819081919192b08, 0x08190819192b0819, 0x08190819192b1908, + 0x081908192b080808, 0x081908192b08082b, 0x081908192b081919, 0x081908192b082b08, + 0x081908192b190819, 0x081908192b191908, 0x0819082b08080819, 0x0819082b08081908, + 0x0819082b08082b19, 0x0819082b08190808, 0x0819082b08191919, 0x0819082b082b0819, + 0x0819082b082b1908, 0x0819082b19080808, 0x0819082b19081919, 0x0819082b19190819, + 0x0819082b19191908, 0x0819082b2b080819, 0x0819082b2b081908, 0x0819082b2b190808, + 0x0819190808080808, 0x081919080808082b, 0x0819190808081919, 0x0819190808082b08, + 0x0819190808190819, 0x0819190808191908, 0x081919080819192b, 0x0819190808192b19, + 0x08191908082b0808, 0x08191908082b1919, 0x08191908082b2b08, 0x0819190819080819, + 0x0819190819081908, 0x081919081908192b, 0x0819190819082b19, 0x0819190819190808, + 0x081919081919082b, 0x0819190819191919, 0x0819190819192b08, 0x08191908192b0819, + 0x08191908192b1908, 0x081919082b080808, 0x081919082b08082b, 0x081919082b081919, + 0x081919082b082b08, 0x081919082b190819, 0x081919082b191908, 0x081919082b2b0808, + 0x0819191908080819, 0x0819191908081908, 0x081919190808192b, 0x0819191908082b19, + 0x0819191908190808, 0x081919190819082b, 0x0819191908191919, 0x0819191908192b08, + 0x08191919082b0819, 0x08191919082b1908, 0x0819191919080808, 0x081919191908082b, + 0x0819191919081919, 0x0819191919082b08, 0x0819191919190819, 0x0819191919191908, + 0x08191919192b0808, 0x081919192b080819, 0x081919192b081908, 0x081919192b190808, + 0x0819192b08080808, 0x0819192b08081919, 0x0819192b08082b08, 0x0819192b08190819, + 0x0819192b08191908, 0x0819192b082b0808, 0x0819192b19080819, 0x0819192b19081908, + 0x0819192b19190808, 0x0819192b2b080808, 0x0819192b2b2b2b2b, 0x08192b0808080819, + 0x08192b0808081908, 0x08192b080808192b, 0x08192b0808082b19, 0x08192b0808190808, + 0x08192b0808191919, 0x08192b0808192b08, 0x08192b08082b0819, 0x08192b0819080808, + 0x08192b081908082b, 0x08192b0819081919, 0x08192b0819082b08, 0x08192b0819190819, + 0x08192b0819191908, 0x08192b08192b0808, 0x08192b082b080819, 0x08192b082b081908, + 0x08192b1908080808, 0x08192b190808082b, 0x08192b1908081919, 0x08192b1908082b08, + 0x08192b1908190819, 0x08192b1908191908, 0x08192b19082b0808, 0x08192b1919080819, + 0x08192b1919081908, 0x08192b1919190808, 0x08192b19192b2b19, 0x08192b192b2b082b, + 0x08192b2b08081908, 0x08192b2b08190808, 0x08192b2b19080808, 0x08192b2b1919192b, + 0x082b080808080808, 0x082b08080808082b, 0x082b080808081919, 0x082b080808082b08, + 0x082b080808190819, 0x082b080808191908, 0x082b08080819192b, 0x082b080808192b19, + 0x082b0808082b0808, 0x082b0808082b1919, 0x082b0808082b2b2b, 0x082b080819080819, + 0x082b080819081908, 0x082b080819190808, 0x082b08081919082b, 0x082b080819191919, + 0x082b0808192b1908, 0x082b08082b080808, 0x082b08082b082b2b, 0x082b08082b191908, + 0x082b08082b2b2b2b, 0x082b081908080819, 0x082b081908081908, 0x082b081908190808, + 0x082b08190819082b, 0x082b081908191919, 0x082b0819082b0819, 0x082b081919080808, + 0x082b08191908082b, 0x082b081919081919, 0x082b081919190819, 0x082b081919191908, + 0x082b0819192b0808, 0x082b08192b080819, 0x082b08192b081908, 0x082b08192b190808, + 0x082b082b08080808, 0x082b082b08082b2b, 0x082b082b082b082b, 0x082b082b082b2b08, + 0x082b082b082b2b2b, 0x082b082b19081908, 0x082b082b19190808, 0x082b082b2b082b08, + 0x082b082b2b082b2b, 0x082b082b2b2b2b08, 0x082b190808080819, 0x082b190808081908, + 0x082b19080808192b, 0x082b190808082b19, 0x082b190808190808, 0x082b190808191919, + 0x082b190808192b08, 0x082b1908082b0819, 0x082b1908082b1908, 0x082b190819080808, + 0x082b19081908082b, 0x082b190819081919, 0x082b190819082b08, 0x082b190819190819, + 0x082b190819191908, 0x082b1908192b0808, 0x082b19082b080819, 0x082b19082b081908, + 0x082b19082b190808, 0x082b191908080808, 0x082b191908081919, 0x082b191908082b08, + 0x082b191908190819, 0x082b191908191908, 0x082b1919082b0808, 0x082b191919080819, + 0x082b191919081908, 0x082b191919190808, 0x082b1919192b192b, 0x082b19192b080808, + 0x082b192b08080819, 0x082b192b08081908, 0x082b192b08190808, 0x082b192b19080808, + 0x082b192b19192b19, 0x082b2b0808080808, 0x082b2b0808081919, 0x082b2b0808190819, + 0x082b2b0808191908, 0x082b2b0819080819, 0x082b2b0819081908, 0x082b2b0819190808, + 0x082b2b082b082b2b, 0x082b2b082b2b2b2b, 0x082b2b1908080819, 0x082b2b1908081908, + 0x082b2b1908190808, 0x082b2b192b191919, 0x082b2b2b08082b2b, 0x082b2b2b082b082b, + 0x082b2b2b192b1908, 0x082b2b2b2b082b08, 0x082b2b2b2b082b2b, 0x1908080808080819, + 0x1908080808081908, 0x190808080808192b, 0x1908080808082b19, 0x1908080808190808, + 0x190808080819082b, 0x1908080808191919, 0x1908080808192b08, 0x1908080808192b2b, + 0x19080808082b0819, 0x19080808082b1908, 0x19080808082b192b, 0x1908080819080808, + 0x190808081908082b, 0x1908080819081919, 0x1908080819082b08, 0x1908080819082b2b, + 0x1908080819190819, 0x1908080819191908, 0x190808081919192b, 0x1908080819192b19, + 0x19080808192b0808, 0x19080808192b082b, 0x19080808192b1919, 0x190808082b080819, + 0x190808082b081908, 0x190808082b190808, 0x190808082b191919, 0x190808082b192b08, + 0x190808082b2b0819, 0x190808082b2b1908, 0x1908081908080808, 0x190808190808082b, + 0x1908081908081919, 0x1908081908082b08, 0x1908081908190819, 0x1908081908191908, + 0x190808190819192b, 0x1908081908192b19, 0x19080819082b0808, 0x19080819082b082b, + 0x19080819082b1919, 0x1908081919080819, 0x1908081919081908, 0x190808191908192b, + 0x1908081919082b19, 0x1908081919190808, 0x190808191919082b, 0x1908081919191919, + 0x1908081919192b08, 0x19080819192b0819, 0x19080819192b1908, 0x190808192b080808, + 0x190808192b08082b, 0x190808192b081919, 0x190808192b082b08, 0x190808192b190819, + 0x190808192b191908, 0x190808192b2b0808, 0x1908082b08080819, 0x1908082b08081908, + 0x1908082b08190808, 0x1908082b0819082b, 0x1908082b08191919, 0x1908082b08192b08, + 0x1908082b082b1908, 0x1908082b19080808, 0x1908082b19081919, 0x1908082b19082b08, + 0x1908082b19190819, 0x1908082b19191908, 0x1908082b192b0808, 0x1908082b2b080819, + 0x1908082b2b081908, 0x1908190808080808, 0x190819080808082b, 0x1908190808081919, + 0x1908190808082b08, 0x1908190808082b2b, 0x1908190808190819, 0x1908190808191908, + 0x190819080819192b, 0x1908190808192b19, 0x19081908082b0808, 0x19081908082b082b, + 0x19081908082b1919, 0x19081908082b2b08, 0x1908190819080819, 0x1908190819081908, + 0x190819081908192b, 0x1908190819082b19, 0x1908190819190808, 0x190819081919082b, + 0x1908190819191919, 0x1908190819192b08, 0x19081908192b0819, 0x19081908192b1908, + 0x190819082b080808, 0x190819082b08082b, 0x190819082b081919, 0x190819082b082b08, + 0x190819082b190819, 0x190819082b191908, 0x190819082b2b0808, 0x1908191908080819, + 0x1908191908081908, 0x190819190808192b, 0x1908191908082b19, 0x1908191908190808, + 0x190819190819082b, 0x1908191908191919, 0x1908191908192b08, 0x19081919082b0819, + 0x19081919082b1908, 0x1908191919080808, 0x190819191908082b, 0x1908191919081919, + 0x1908191919082b08, 0x1908191919190819, 0x1908191919191908, 0x19081919192b0808, + 0x19081919192b2b2b, 0x190819192b080819, 0x190819192b081908, 0x190819192b190808, + 0x1908192b08080808, 0x1908192b0808082b, 0x1908192b08081919, 0x1908192b08082b08, + 0x1908192b08190819, 0x1908192b08191908, 0x1908192b082b0808, 0x1908192b19080819, + 0x1908192b19081908, 0x1908192b19190808, 0x1908192b2b080808, 0x1908192b2b2b1919, + 0x19082b0808080819, 0x19082b0808081908, 0x19082b0808082b19, 0x19082b0808190808, + 0x19082b080819082b, 0x19082b0808191919, 0x19082b0808192b08, 0x19082b08082b0819, + 0x19082b08082b1908, 0x19082b0819080808, 0x19082b081908082b, 0x19082b0819081919, + 0x19082b0819082b08, 0x19082b0819190819, 0x19082b0819191908, 0x19082b08192b0808, + 0x19082b082b081908, 0x19082b082b190808, 0x19082b1908080808, 0x19082b190808082b, + 0x19082b1908081919, 0x19082b1908082b08, 0x19082b1908190819, 0x19082b1908191908, + 0x19082b19082b0808, 0x19082b1919080819, 0x19082b1919081908, 0x19082b1919190808, + 0x19082b192b080808, 0x19082b192b19192b, 0x19082b2b08080819, 0x19082b2b08081908, + 0x19082b2b08190808, 0x19082b2b19080808, 0x1919080808080808, 0x191908080808082b, + 0x1919080808081919, 0x1919080808082b08, 0x1919080808190819, 0x1919080808191908, + 0x191908080819192b, 0x1919080808192b19, 0x19190808082b0808, 0x19190808082b082b, + 0x19190808082b1919, 0x19190808082b2b08, 0x1919080819080819, 0x1919080819081908, + 0x191908081908192b, 0x1919080819082b19, 0x1919080819190808, 0x191908081919082b, + 0x1919080819191919, 0x1919080819192b08, 0x19190808192b0819, 0x19190808192b1908, + 0x191908082b080808, 0x191908082b08082b, 0x191908082b081919, 0x191908082b082b08, + 0x191908082b190819, 0x191908082b191908, 0x1919081908080819, 0x1919081908081908, + 0x191908190808192b, 0x1919081908082b19, 0x1919081908190808, 0x191908190819082b, + 0x1919081908191919, 0x1919081908192b08, 0x19190819082b0819, 0x19190819082b1908, + 0x1919081919080808, 0x191908191908082b, 0x1919081919081919, 0x1919081919082b08, + 0x1919081919190819, 0x1919081919191908, 0x19190819192b0808, 0x191908192b080819, + 0x191908192b081908, 0x191908192b190808, 0x1919082b08080808, 0x1919082b08081919, + 0x1919082b08082b08, 0x1919082b08190819, 0x1919082b08191908, 0x1919082b082b0808, + 0x1919082b19080819, 0x1919082b19081908, 0x1919082b19190808, 0x1919082b192b2b19, + 0x1919082b2b080808, 0x1919190808080819, 0x1919190808081908, 0x191919080808192b, + 0x1919190808082b19, 0x1919190808190808, 0x191919080819082b, 0x1919190808191919, + 0x1919190808192b08, 0x19191908082b0819, 0x19191908082b1908, 0x1919190819080808, + 0x191919081908082b, 0x1919190819081919, 0x1919190819082b08, 0x1919190819190819, + 0x1919190819191908, 0x19191908192b0808, 0x191919082b080819, 0x191919082b081908, + 0x191919082b190808, 0x1919191908080808, 0x191919190808082b, 0x1919191908081919, + 0x1919191908082b08, 0x1919191908190819, 0x1919191908191908, 0x19191919082b0808, + 0x1919191919080819, 0x1919191919081908, 0x1919191919190808, 0x191919192b080808, + 0x1919192b08080819, 0x1919192b08081908, 0x1919192b08190808, 0x1919192b082b192b, + 0x1919192b19080808, 0x19192b0808080808, 0x19192b080808082b, 0x19192b0808081919, + 0x19192b0808082b08, 0x19192b0808190819, 0x19192b0808191908, 0x19192b08082b0808, + 0x19192b0819080819, 0x19192b0819081908, 0x19192b0819190808, 0x19192b0819192b2b, + 0x19192b082b080808, 0x19192b1908080819, 0x19192b1908081908, 0x19192b1908190808, + 0x19192b1919080808, 0x19192b2b08080808, 0x19192b2b08192b19, 0x19192b2b2b081919, + 0x19192b2b2b2b2b08, 0x192b080808080819, 0x192b080808081908, 0x192b08080808192b, + 0x192b080808190808, 0x192b08080819082b, 0x192b080808191919, 0x192b080808192b08, + 0x192b0808082b0819, 0x192b0808082b1908, 0x192b080819080808, 0x192b080819081919, + 0x192b080819082b08, 0x192b080819190819, 0x192b080819191908, 0x192b0808192b0808, + 0x192b08082b081908, 0x192b08082b190808, 0x192b081908080808, 0x192b08190808082b, + 0x192b081908081919, 0x192b081908082b08, 0x192b081908190819, 0x192b081908191908, + 0x192b0819082b0808, 0x192b081919080819, 0x192b081919081908, 0x192b081919190808, + 0x192b08192b080808, 0x192b08192b192b19, 0x192b082b08081908, 0x192b082b08190808, + 0x192b082b19080808, 0x192b082b1919192b, 0x192b082b2b2b0819, 0x192b190808080808, + 0x192b190808081919, 0x192b190808082b08, 0x192b190808190819, 0x192b190808191908, + 0x192b1908082b0808, 0x192b190819080819, 0x192b190819081908, 0x192b190819190808, + 0x192b19082b080808, 0x192b191908080819, 0x192b191908081908, 0x192b191908190808, + 0x192b191919080808, 0x192b191919082b2b, 0x192b1919192b2b08, 0x192b19192b19082b, + 0x192b192b08080808, 0x192b192b2b191908, 0x192b2b0808080819, 0x192b2b0808081908, + 0x192b2b0808190808, 0x192b2b08192b1919, 0x192b2b082b192b08, 0x192b2b1908080808, + 0x192b2b19082b2b2b, 0x192b2b2b1908082b, 0x192b2b2b2b2b0819, 0x2b08080808080808, + 0x2b0808080808082b, 0x2b08080808081919, 0x2b08080808082b08, 0x2b08080808190819, + 0x2b08080808191908, 0x2b08080808192b19, 0x2b080808082b0808, 0x2b080808082b1919, + 0x2b08080819080819, 0x2b08080819081908, 0x2b08080819190808, 0x2b0808081919082b, + 0x2b08080819191919, 0x2b08080819192b08, 0x2b080808192b0819, 0x2b0808082b080808, + 0x2b0808082b081919, 0x2b0808082b190819, 0x2b0808082b191908, 0x2b08081908080819, + 0x2b08081908081908, 0x2b08081908082b19, 0x2b08081908190808, 0x2b0808190819082b, + 0x2b08081908191919, 0x2b08081908192b08, 0x2b080819082b0819, 0x2b080819082b1908, + 0x2b08081919080808, 0x2b0808191908082b, 0x2b08081919081919, 0x2b08081919082b08, + 0x2b08081919190819, 0x2b08081919191908, 0x2b0808192b080819, 0x2b0808192b081908, + 0x2b0808192b190808, 0x2b0808192b2b2b19, 0x2b08082b08080808, 0x2b08082b08081919, + 0x2b08082b08082b2b, 0x2b08082b08190819, 0x2b08082b08191908, 0x2b08082b19080819, + 0x2b08082b19081908, 0x2b08082b19190808, 0x2b08190808080819, 0x2b08190808081908, + 0x2b0819080808192b, 0x2b08190808082b19, 0x2b08190808190808, 0x2b0819080819082b, + 0x2b08190808191919, 0x2b08190808192b08, 0x2b081908082b0819, 0x2b08190819080808, + 0x2b0819081908082b, 0x2b08190819081919, 0x2b08190819082b08, 0x2b08190819190819, + 0x2b08190819191908, 0x2b081908192b0808, 0x2b0819082b080819, 0x2b0819082b081908, + 0x2b0819082b190808, 0x2b08191908080808, 0x2b0819190808082b, 0x2b08191908081919, + 0x2b08191908082b08, 0x2b08191908190819, 0x2b08191908191908, 0x2b081919082b0808, + 0x2b08191919080819, 0x2b08191919081908, 0x2b08191919190808, 0x2b0819192b080808, + 0x2b0819192b082b2b, 0x2b08192b08080819, 0x2b08192b08081908, 0x2b08192b08190808, + 0x2b08192b082b2b19, 0x2b08192b19080808, 0x2b082b0808080808, 0x2b082b0808081919, + 0x2b082b0808190819, 0x2b082b0808191908, 0x2b082b0819080819, 0x2b082b0819081908, + 0x2b082b0819190808, 0x2b082b082b2b082b, 0x2b082b1908080819, 0x2b082b1908081908, + 0x2b082b1919080808, 0x2b082b19192b1919, 0x2b082b2b082b082b, 0x2b082b2b19192b08, + 0x2b082b2b19192b2b, 0x2b082b2b2b08082b, 0x2b082b2b2b2b082b, 0x2b19080808080819, + 0x2b19080808081908, 0x2b19080808082b19, 0x2b19080808190808, 0x2b1908080819082b, + 0x2b19080808191919, 0x2b19080808192b08, 0x2b190808082b1908, 0x2b19080819080808, + 0x2b1908081908082b, 0x2b19080819081919, 0x2b19080819082b08, 0x2b19080819190819, + 0x2b19080819191908, 0x2b190808192b0808, 0x2b1908082b080819, 0x2b1908082b081908, + 0x2b1908082b190808, 0x2b19081908080808, 0x2b19081908081919, 0x2b19081908190819, + 0x2b19081908191908, 0x2b19081919080819, 0x2b19081919081908, 0x2b19081919190808, + 0x2b19081919192b2b, 0x2b19082b08080819, 0x2b19082b08081908, 0x2b19082b08190808, + 0x2b19082b19080808, 0x2b19082b2b2b192b, 0x2b19190808080808, 0x2b1919080808082b, + 0x2b19190808081919, 0x2b19190808082b08, 0x2b19190808190819, 0x2b19190808191908, + 0x2b191908082b0808, 0x2b19190819080819, 0x2b19190819081908, 0x2b19190819190808, + 0x2b1919082b080808, 0x2b1919082b19192b, 0x2b19191908080819, 0x2b19191908081908, + 0x2b19191908190808, 0x2b19191919080808, 0x2b1919192b192b08, 0x2b1919192b2b0819, + 0x2b19192b08080808, 0x2b19192b1908192b, 0x2b19192b192b1908, 0x2b192b0808080819, + 0x2b192b0808081908, 0x2b192b0808190808, 0x2b192b08082b192b, 0x2b192b0819080808, + 0x2b192b082b2b2b19, 0x2b192b1908080808, 0x2b192b1919082b19, 0x2b192b191919082b, + 0x2b192b2b2b190808, 0x2b2b080808080808, 0x2b2b080808081919, 0x2b2b080808082b2b, + 0x2b2b080808191908, 0x2b2b0808082b082b, 0x2b2b0808082b2b2b, 0x2b2b080819080819, + 0x2b2b080819081908, 0x2b2b080819190808, 0x2b2b08082b2b082b, 0x2b2b08082b2b2b2b, + 0x2b2b081919080808, 0x2b2b0819192b1919, 0x2b2b082b0808082b, 0x2b2b082b08082b2b, + 0x2b2b082b082b082b, 0x2b2b082b082b2b08, 0x2b2b082b082b2b2b, 0x2b2b082b2b08082b, + 0x2b2b082b2b082b08, 0x2b2b082b2b082b2b, 0x2b2b082b2b2b2b08, 0x2b2b190808080819, + 0x2b2b190808081908, 0x2b2b190808190808, 0x2b2b190819080808, 0x2b2b19082b082b19, + 0x2b2b19082b2b1908, 0x2b2b191908080808, 0x2b2b191908192b19, 0x2b2b192b19190819, + 0x2b2b2b0808082b2b, 0x2b2b2b08082b2b08, 0x2b2b2b082b2b082b, 0x2b2b2b1919191908, + 0x2b2b2b192b08192b, 0x2b2b2b2b08082b08, 0x2b2b2b2b08082b2b, 0x2b2b2b2b082b0808, + 0x2b2b2b2b082b082b, 0x2b2b2b2b082b2b08, 0x2b2b2b2b2b082b08, 0x2b2b2b2b2b2b2b2b, +}; + static const uint32_t iq3xxs_grid[256] = { 0x04040404, 0x04040414, 0x04040424, 0x04040c0c, 0x04040c1c, 0x04040c3e, 0x04041404, 0x04041414, 0x04041c0c, 0x04042414, 0x04043e1c, 0x04043e2c, 0x040c040c, 0x040c041c, 0x040c0c04, 0x040c0c14, @@ -3796,6 +4055,38 @@ void dequantize_row_iq2_xs(const block_iq2_xs * restrict x, float * restrict y, } } +// ====================== 2.5625 bpw (de)-quantization + +void dequantize_row_iq2_s(const block_iq2_s * restrict x, float * restrict y, int k) { + assert(k % QK_K == 0); + const int nb = k / QK_K; + + float db[2]; + + for (int i = 0; i < nb; i++) { + + const float d = GGML_FP16_TO_FP32(x[i].d); + const uint8_t * qs = x[i].qs; + const uint8_t * qh = x[i].qh; + const uint8_t * signs = qs + QK_K/8; + + for (int ib32 = 0; ib32 < QK_K/32; ++ib32) { + db[0] = d * (0.5f + (x[i].scales[ib32] & 0xf)) * 0.25f; + db[1] = d * (0.5f + (x[i].scales[ib32] >> 4)) * 0.25f; + for (int l = 0; l < 4; ++l) { + const float dl = db[l/2]; + const uint8_t * grid = (const uint8_t *)(iq2s_grid + (qs[l] | (qh[ib32] << (8-2*l) & 0x300))); + for (int j = 0; j < 8; ++j) { + y[j] = dl * grid[j] * (signs[l] & kmask_iq2xs[j] ? -1.f : 1.f); + } + y += 8; + } + qs += 4; + signs += 4; + } + } +} + // ====================== 3.0625 bpw (de)-quantization void dequantize_row_iq3_xxs(const block_iq3_xxs * restrict x, float * restrict y, int k) { @@ -9330,6 +9621,210 @@ void ggml_vec_dot_iq2_xs_q8_K(int n, float * restrict s, size_t bs, const void * #endif } +void ggml_vec_dot_iq2_s_q8_K(int n, float * restrict s, size_t bs, const void * restrict vx, size_t bx, const void * restrict vy, size_t by, int nrc) { + assert(n % QK_K == 0); + assert(nrc == 1); + UNUSED(nrc); + UNUSED(bx); + UNUSED(by); + UNUSED(bs); + + const block_iq2_s * restrict x = vx; + const block_q8_K * restrict y = vy; + + const int nb = n / QK_K; + +#if defined(__ARM_NEON) + + static const uint8_t k_mask1[32] = {0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x01, 0x01, 0x01, 0x01, 0x01, 0x01, 0x01, + 0x02, 0x02, 0x02, 0x02, 0x02, 0x02, 0x02, 0x02, 0x03, 0x03, 0x03, 0x03, 0x03, 0x03, 0x03, 0x03 + }; + + static const uint8_t k_mask2[16] = {0x01, 0x02, 0x04, 0x08, 0x10, 0x20, 0x40, 0x80, 0x01, 0x02, 0x04, 0x08, 0x10, 0x20, 0x40, 0x80,}; + + const uint8x16x2_t mask1 = vld1q_u8_x2(k_mask1); + const uint8x16_t mask2 = vld1q_u8(k_mask2); + const uint8x16_t m1 = vdupq_n_u8(1); + const int32x4_t vzero = vdupq_n_s32(0); + + uint8x16x2_t vs; + ggml_int8x16x4_t q2s; + ggml_int8x16x4_t q8b; + + float sumf = 0; + for (int i = 0; i < nb; ++i) { + + const float d = GGML_FP16_TO_FP32(x[i].d) * y[i].d; + + const uint8_t * restrict qs = x[i].qs; + const uint8_t * restrict qh = x[i].qh; + const uint16_t * restrict signs = (const uint16_t *)(x[i].qs + QK_K/8); + const int8_t * restrict q8 = y[i].qs; + + int sumi1 = 0, sumi2 = 0; + for (int ib32 = 0; ib32 < QK_K/32; ib32 += 2) { + q8b = ggml_vld1q_s8_x4(q8); q8 += 64; + q2s.val[0] = vcombine_s8(vld1_s8((const int8_t *)(iq2s_grid + (qs[0] | ((qh[ib32+0] << 8) & 0x300)))), + vld1_s8((const int8_t *)(iq2s_grid + (qs[1] | ((qh[ib32+0] << 6) & 0x300))))); + q2s.val[1] = vcombine_s8(vld1_s8((const int8_t *)(iq2s_grid + (qs[2] | ((qh[ib32+0] << 4) & 0x300)))), + vld1_s8((const int8_t *)(iq2s_grid + (qs[3] | ((qh[ib32+0] << 2) & 0x300))))); + q2s.val[2] = vcombine_s8(vld1_s8((const int8_t *)(iq2s_grid + (qs[4] | ((qh[ib32+1] << 8) & 0x300)))), + vld1_s8((const int8_t *)(iq2s_grid + (qs[5] | ((qh[ib32+1] << 6) & 0x300))))); + q2s.val[3] = vcombine_s8(vld1_s8((const int8_t *)(iq2s_grid + (qs[6] | ((qh[ib32+1] << 4) & 0x300)))), + vld1_s8((const int8_t *)(iq2s_grid + (qs[7] | ((qh[ib32+1] << 2) & 0x300))))); + qs += 8; + + vs.val[0] = vreinterpretq_u8_u32(vdupq_n_u32(signs[0] | (signs[1] << 16))); + vs.val[1] = vandq_u8(vqtbl1q_u8(vs.val[0], mask1.val[1]), mask2); + vs.val[0] = vandq_u8(vqtbl1q_u8(vs.val[0], mask1.val[0]), mask2); + vs.val[0] = vceqq_u8(vs.val[0], mask2); + vs.val[1] = vceqq_u8(vs.val[1], mask2); + + q2s.val[0] = vmulq_s8(vreinterpretq_s8_u8(vorrq_u8(vs.val[0], m1)), q2s.val[0]); + q2s.val[1] = vmulq_s8(vreinterpretq_s8_u8(vorrq_u8(vs.val[1], m1)), q2s.val[1]); + + vs.val[0] = vreinterpretq_u8_u32(vdupq_n_u32(signs[2] | (signs[3] << 16))); + vs.val[1] = vandq_u8(vqtbl1q_u8(vs.val[0], mask1.val[1]), mask2); + vs.val[0] = vandq_u8(vqtbl1q_u8(vs.val[0], mask1.val[0]), mask2); + vs.val[0] = vceqq_u8(vs.val[0], mask2); + vs.val[1] = vceqq_u8(vs.val[1], mask2); + + signs += 4; + + q2s.val[2] = vmulq_s8(vreinterpretq_s8_u8(vorrq_u8(vs.val[0], m1)), q2s.val[2]); + q2s.val[3] = vmulq_s8(vreinterpretq_s8_u8(vorrq_u8(vs.val[1], m1)), q2s.val[3]); + + const int32x4_t p1 = ggml_vdotq_s32(vzero, q2s.val[0], q8b.val[0]); + const int32x4_t p2 = ggml_vdotq_s32(vzero, q2s.val[1], q8b.val[1]); + const int32x4_t p3 = ggml_vdotq_s32(vzero, q2s.val[2], q8b.val[2]); + const int32x4_t p4 = ggml_vdotq_s32(vzero, q2s.val[3], q8b.val[3]); + + sumi1 += vaddvq_s32(p1) * (1 + 2*(x[i].scales[ib32+0] & 0xf)); + sumi2 += vaddvq_s32(p2) * (1 + 2*(x[i].scales[ib32+0] >> 4)); + sumi1 += vaddvq_s32(p3) * (1 + 2*(x[i].scales[ib32+1] & 0xf)); + sumi2 += vaddvq_s32(p4) * (1 + 2*(x[i].scales[ib32+1] >> 4)); + } + sumf += d*(sumi1 + sumi2); + } + + *s = 0.125f * sumf; + +#elif defined(__AVX2__) + + static const uint8_t k_mask1[32] = {0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x01, 0x01, 0x01, 0x01, 0x01, 0x01, 0x01, + 0x02, 0x02, 0x02, 0x02, 0x02, 0x02, 0x02, 0x02, 0x03, 0x03, 0x03, 0x03, 0x03, 0x03, 0x03, 0x03 + }; + + static const uint8_t k_mask2[32] = {0x01, 0x02, 0x04, 0x08, 0x10, 0x20, 0x40, 0x80, 0x01, 0x02, 0x04, 0x08, 0x10, 0x20, 0x40, 0x80, + 0x01, 0x02, 0x04, 0x08, 0x10, 0x20, 0x40, 0x80, 0x01, 0x02, 0x04, 0x08, 0x10, 0x20, 0x40, 0x80, + }; + + const __m128i m4 = _mm_set1_epi8(0xf); + const __m128i m1 = _mm_set1_epi8(1); + + const __m256i mask1 = _mm256_loadu_si256((const __m256i*)k_mask1); + const __m256i mask2 = _mm256_loadu_si256((const __m256i*)k_mask2); + + uint64_t aux64; + + __m256 accumf = _mm256_setzero_ps(); + for (int i = 0; i < nb; ++i) { + const float d = GGML_FP16_TO_FP32(x[i].d) * y[i].d; + const uint8_t * restrict qs = x[i].qs; + const uint8_t * restrict qh = x[i].qh; + const uint16_t * restrict signs = (const uint16_t *)(x[i].qs + QK_K/8); + const int8_t * restrict q8 = y[i].qs; + + memcpy(&aux64, x[i].scales, 8); + const __m128i scales8 = _mm_add_epi8(_mm_slli_epi16(_mm_and_si128(_mm_set_epi64x(aux64 >> 4, aux64), m4), 1), m1); + const __m256i scales16 = _mm256_cvtepi8_epi16(scales8); // 0 2 4 6 8 10 12 14 1 3 5 7 9 11 13 15 + + __m256i sumi1 = _mm256_setzero_si256(); + __m256i sumi2 = _mm256_setzero_si256(); + for (int ib32 = 0; ib32 < QK_K/32; ib32 += 2) { + const __m256i q8_1 = _mm256_loadu_si256((const __m256i *)q8); q8 += 32; + const __m256i q8_2 = _mm256_loadu_si256((const __m256i *)q8); q8 += 32; + const __m256i q2_1 = _mm256_set_epi64x(iq2s_grid[qs[3] | ((qh[ib32+0] << 2) & 0x300)], + iq2s_grid[qs[2] | ((qh[ib32+0] << 4) & 0x300)], + iq2s_grid[qs[1] | ((qh[ib32+0] << 6) & 0x300)], + iq2s_grid[qs[0] | ((qh[ib32+0] << 8) & 0x300)]); + const __m256i q2_2 = _mm256_set_epi64x(iq2s_grid[qs[7] | ((qh[ib32+1] << 2) & 0x300)], + iq2s_grid[qs[6] | ((qh[ib32+1] << 4) & 0x300)], + iq2s_grid[qs[5] | ((qh[ib32+1] << 6) & 0x300)], + iq2s_grid[qs[4] | ((qh[ib32+1] << 8) & 0x300)]); + qs += 8; + + __m256i aux256 = _mm256_set1_epi32(signs[0] | (signs[1] << 16)); + aux256 = _mm256_and_si256(_mm256_shuffle_epi8(aux256,mask1), mask2); + const __m256i s2_1 = _mm256_cmpeq_epi8(aux256, mask2); + const __m256i q8s_1 = _mm256_sub_epi8(_mm256_xor_si256(s2_1, q8_1), s2_1); + + aux256 = _mm256_set1_epi32(signs[2] | (signs[3] << 16)); + aux256 = _mm256_and_si256(_mm256_shuffle_epi8(aux256,mask1), mask2); + const __m256i s2_2 = _mm256_cmpeq_epi8(aux256, mask2); + const __m256i q8s_2 = _mm256_sub_epi8(_mm256_xor_si256(s2_2, q8_2), s2_2); + + signs += 4; + + const __m256i dot1 = _mm256_maddubs_epi16(q2_1, q8s_1); // blocks 2*ib32+0, 2*ib32+1 + const __m256i dot2 = _mm256_maddubs_epi16(q2_2, q8s_2); // blocks 2*ib32+2, 2*ib32+3 + + const __m256i p1 = _mm256_madd_epi16(dot1, _mm256_shuffle_epi8(scales16, get_scale_shuffle_k4(ib32+0))); + const __m256i p2 = _mm256_madd_epi16(dot2, _mm256_shuffle_epi8(scales16, get_scale_shuffle_k4(ib32+1))); + sumi1 = _mm256_add_epi32(sumi1, p1); + sumi2 = _mm256_add_epi32(sumi2, p2); + } + + accumf = _mm256_fmadd_ps(_mm256_set1_ps(d), _mm256_cvtepi32_ps(_mm256_add_epi32(sumi1, sumi2)), accumf); + + } + + *s = 0.125f * hsum_float_8(accumf); + +#else + + float sumf = 0; + for (int i = 0; i < nb; i++) { + + const float d = GGML_FP16_TO_FP32(x[i].d) * y[i].d; + const int8_t * q8 = y[i].qs; + const uint8_t * qs = x[i].qs; + const uint8_t * qh = x[i].qh; + const uint8_t * signs = qs + QK_K/8; + + int bsum = 0; + for (int ib32 = 0; ib32 < QK_K/32; ++ib32) { + int ls1 = 1 + 2*(x[i].scales[ib32] & 0xf); + int ls2 = 1 + 2*(x[i].scales[ib32] >> 4); + int sumi1 = 0, sumi2 = 0; + for (int l = 0; l < 2; ++l) { + const uint8_t * grid = (const uint8_t *)(iq2s_grid + (qs[l] | (qh[ib32] << (8-2*l) & 0x300))); + for (int j = 0; j < 8; ++j) { + sumi1 += q8[j] * grid[j] * (signs[l] & kmask_iq2xs[j] ? -1 : 1); + } + q8 += 8; + } + for (int l = 2; l < 4; ++l) { + const uint8_t * grid = (const uint8_t *)(iq2s_grid + (qs[l] | (qh[ib32] << (8-2*l) & 0x300))); + for (int j = 0; j < 8; ++j) { + sumi2 += q8[j] * grid[j] * (signs[l] & kmask_iq2xs[j] ? -1 : 1); + } + q8 += 8; + } + bsum += ls1 * sumi1 + ls2 * sumi2; + qs += 4; + signs += 4; + } + + sumf += d * bsum; + } + + *s = 0.125f * sumf; + +#endif + +} + void ggml_vec_dot_iq3_xxs_q8_K(int n, float * restrict s, size_t bs, const void * restrict vx, size_t bx, const void * restrict vy, size_t by, int nrc) { assert(n % QK_K == 0); assert(nrc == 1); @@ -9934,22 +10429,25 @@ typedef struct { uint16_t * neighbours; } iq2_entry_t; -static iq2_entry_t iq2_data[3] = { +static iq2_entry_t iq2_data[4] = { + {NULL, NULL, NULL}, {NULL, NULL, NULL}, {NULL, NULL, NULL}, {NULL, NULL, NULL}, }; static inline int iq2_data_index(enum ggml_type type) { - GGML_ASSERT(type == GGML_TYPE_IQ2_XXS || type == GGML_TYPE_IQ2_XS || type == GGML_TYPE_IQ1_S); + GGML_ASSERT(type == GGML_TYPE_IQ2_XXS || type == GGML_TYPE_IQ2_XS || type == GGML_TYPE_IQ1_S || type == GGML_TYPE_IQ2_S); return type == GGML_TYPE_IQ2_XXS ? 0 : - type == GGML_TYPE_IQ2_XS ? 1 : 2; + type == GGML_TYPE_IQ2_XS ? 1 : + type == GGML_TYPE_IQ1_S ? 2 : 3; } static inline int iq2_grid_size(enum ggml_type type) { - GGML_ASSERT(type == GGML_TYPE_IQ2_XXS || type == GGML_TYPE_IQ2_XS || type == GGML_TYPE_IQ1_S); + GGML_ASSERT(type == GGML_TYPE_IQ2_XXS || type == GGML_TYPE_IQ2_XS || type == GGML_TYPE_IQ1_S || type == GGML_TYPE_IQ2_S); return type == GGML_TYPE_IQ2_XXS ? 256 : - type == GGML_TYPE_IQ2_XS ? 512 : 512; + type == GGML_TYPE_IQ2_XS ? 512 : + type == GGML_TYPE_IQ1_S ? 512 : 1024; } static int iq2_compare_func(const void * left, const void * right) { @@ -10050,11 +10548,79 @@ void iq2xs_init_impl(enum ggml_type type) { 41557, 41633, 41989, 42021, 42056, 42068, 42074, 42113, 42242, 42265, 42274, 42325, 42340, 42402, 42501, 42512, 42533, 42624, 42632, 42666, 43040, 43093, 43106, 43168, 43176, 43264, 43286, 43345, 43429, 43590, 43618, 43680, }; + static const uint16_t kgrid_2bit_1024[1024] = { + 0, 2, 5, 8, 10, 17, 20, 22, 25, 32, 34, 37, 40, 65, 68, 70, + 73, 80, 82, 85, 88, 97, 100, 102, 105, 128, 130, 133, 136, 145, 148, 160, + 165, 170, 257, 260, 262, 265, 272, 274, 277, 280, 289, 292, 320, 322, 325, 328, + 337, 340, 342, 345, 352, 357, 360, 385, 388, 400, 402, 405, 417, 420, 512, 514, + 517, 520, 529, 532, 544, 554, 577, 580, 582, 585, 592, 597, 640, 645, 650, 660, + 674, 1025, 1028, 1030, 1033, 1040, 1042, 1045, 1048, 1057, 1060, 1062, 1065, 1088, 1090, 1093, + 1096, 1098, 1105, 1108, 1110, 1113, 1120, 1122, 1125, 1153, 1156, 1158, 1161, 1168, 1173, 1176, + 1185, 1188, 1280, 1282, 1285, 1288, 1290, 1297, 1300, 1302, 1305, 1312, 1317, 1320, 1345, 1348, + 1350, 1353, 1360, 1362, 1365, 1368, 1377, 1380, 1408, 1410, 1413, 1416, 1425, 1428, 1440, 1537, + 1540, 1542, 1545, 1552, 1557, 1600, 1605, 1608, 1617, 1620, 1632, 1665, 1668, 1680, 2048, 2050, + 2053, 2056, 2065, 2068, 2070, 2073, 2080, 2085, 2090, 2113, 2116, 2118, 2121, 2128, 2130, 2133, + 2136, 2145, 2148, 2176, 2181, 2196, 2218, 2305, 2308, 2320, 2322, 2325, 2328, 2337, 2368, 2373, + 2376, 2385, 2388, 2400, 2433, 2448, 2560, 2577, 2580, 2594, 2600, 2602, 2640, 2713, 4097, 4100, + 4102, 4105, 4112, 4114, 4117, 4120, 4129, 4132, 4134, 4160, 4162, 4165, 4168, 4177, 4180, 4182, + 4185, 4192, 4194, 4197, 4200, 4225, 4228, 4230, 4240, 4245, 4248, 4257, 4260, 4352, 4354, 4357, + 4360, 4362, 4369, 4372, 4374, 4377, 4384, 4386, 4389, 4392, 4417, 4420, 4422, 4425, 4432, 4434, + 4437, 4440, 4449, 4452, 4480, 4482, 4485, 4488, 4497, 4500, 4609, 4612, 4617, 4624, 4629, 4641, + 4644, 4672, 4677, 4689, 4692, 4737, 4740, 4752, 5120, 5122, 5125, 5128, 5137, 5140, 5142, 5145, + 5152, 5157, 5160, 5185, 5188, 5190, 5193, 5200, 5202, 5205, 5208, 5217, 5220, 5248, 5250, 5253, + 5256, 5265, 5268, 5280, 5377, 5380, 5382, 5385, 5392, 5394, 5397, 5400, 5409, 5412, 5440, 5442, + 5445, 5448, 5457, 5460, 5472, 5505, 5508, 5520, 5632, 5637, 5640, 5649, 5652, 5664, 5697, 5700, + 5712, 5760, 5802, 6145, 6148, 6150, 6153, 6160, 6165, 6168, 6177, 6208, 6210, 6213, 6216, 6225, + 6228, 6240, 6273, 6276, 6400, 6402, 6405, 6408, 6417, 6420, 6432, 6465, 6468, 6480, 6505, 6562, + 6660, 6672, 6720, 6742, 8192, 8194, 8197, 8200, 8209, 8212, 8214, 8217, 8224, 8229, 8234, 8257, + 8260, 8272, 8274, 8277, 8292, 8320, 8330, 8340, 8362, 8449, 8452, 8464, 8466, 8469, 8481, 8512, + 8514, 8517, 8529, 8532, 8544, 8577, 8580, 8592, 8704, 8714, 8738, 8744, 8746, 8772, 8784, 8840, + 8842, 8872, 9217, 9220, 9222, 9225, 9232, 9237, 9240, 9249, 9252, 9280, 9282, 9285, 9288, 9297, + 9300, 9312, 9345, 9348, 9360, 9472, 9477, 9480, 9489, 9492, 9504, 9537, 9540, 9552, 9574, 9600, + 9729, 9732, 9744, 9792, 9817, 10240, 10245, 10257, 10260, 10305, 10308, 10320, 10378, 10410, 10497, 10500, + 10512, 10645, 10762, 10786, 10852, 10888, 10890, 16385, 16388, 16390, 16393, 16400, 16402, 16405, 16408, 16410, + 16417, 16420, 16422, 16448, 16450, 16453, 16456, 16458, 16465, 16468, 16470, 16473, 16480, 16482, 16485, 16513, + 16516, 16528, 16533, 16536, 16545, 16548, 16640, 16642, 16645, 16648, 16657, 16660, 16662, 16665, 16672, 16674, + 16677, 16705, 16708, 16710, 16713, 16720, 16722, 16725, 16728, 16737, 16740, 16768, 16770, 16773, 16776, 16785, + 16788, 16800, 16897, 16900, 16912, 16914, 16917, 16920, 16932, 16960, 16965, 16968, 16977, 16980, 16992, 17025, + 17028, 17408, 17410, 17413, 17416, 17418, 17425, 17428, 17430, 17433, 17440, 17442, 17445, 17448, 17473, 17476, + 17478, 17481, 17488, 17490, 17493, 17496, 17505, 17508, 17536, 17538, 17541, 17544, 17553, 17556, 17568, 17665, + 17668, 17670, 17673, 17680, 17682, 17685, 17688, 17697, 17700, 17728, 17730, 17733, 17736, 17745, 17748, 17760, + 17770, 17793, 17796, 17808, 17920, 17922, 17925, 17928, 17937, 17940, 17952, 17985, 17988, 18000, 18048, 18085, + 18433, 18436, 18441, 18448, 18450, 18453, 18456, 18465, 18468, 18496, 18498, 18501, 18504, 18513, 18516, 18528, + 18564, 18576, 18688, 18690, 18693, 18696, 18705, 18708, 18720, 18753, 18756, 18768, 18816, 18838, 18945, 18948, + 18960, 19008, 20480, 20482, 20485, 20488, 20497, 20500, 20502, 20505, 20512, 20514, 20517, 20520, 20545, 20548, + 20550, 20553, 20560, 20562, 20565, 20568, 20577, 20580, 20608, 20610, 20613, 20616, 20625, 20628, 20737, 20740, + 20742, 20745, 20752, 20754, 20757, 20760, 20769, 20772, 20800, 20802, 20805, 20808, 20817, 20820, 20832, 20865, + 20868, 20880, 20992, 20997, 21000, 21009, 21012, 21024, 21057, 21060, 21072, 21097, 21120, 21505, 21508, 21510, + 21513, 21520, 21522, 21525, 21528, 21537, 21540, 21568, 21570, 21573, 21576, 21585, 21588, 21600, 21633, 21636, + 21648, 21760, 21762, 21765, 21768, 21777, 21780, 21792, 21825, 21828, 21840, 21888, 22017, 22020, 22032, 22054, + 22080, 22528, 22530, 22533, 22536, 22545, 22548, 22560, 22593, 22596, 22608, 22618, 22656, 22785, 22788, 22800, + 22848, 23040, 23065, 23173, 23208, 24577, 24580, 24582, 24592, 24594, 24597, 24600, 24609, 24612, 24640, 24645, + 24648, 24657, 24660, 24672, 24708, 24720, 24832, 24834, 24837, 24840, 24849, 24852, 24864, 24897, 24900, 24912, + 24960, 24985, 25092, 25104, 25152, 25174, 25249, 25600, 25605, 25608, 25617, 25620, 25632, 25665, 25668, 25680, + 25728, 25857, 25860, 25872, 25920, 25930, 25960, 26002, 26112, 26260, 26625, 26628, 26640, 26725, 26776, 26880, + 26922, 27202, 27297, 32768, 32770, 32773, 32776, 32785, 32788, 32793, 32800, 32805, 32833, 32836, 32848, 32850, + 32853, 32856, 32865, 32896, 32901, 32913, 32916, 33025, 33028, 33033, 33040, 33042, 33045, 33048, 33057, 33060, + 33088, 33090, 33093, 33096, 33105, 33108, 33153, 33156, 33168, 33193, 33280, 33285, 33290, 33297, 33300, 33345, + 33348, 33360, 33793, 33796, 33798, 33801, 33808, 33810, 33813, 33816, 33825, 33856, 33858, 33861, 33864, 33873, + 33876, 33888, 33921, 33924, 33936, 34048, 34050, 34053, 34056, 34065, 34068, 34080, 34113, 34116, 34128, 34176, + 34186, 34305, 34308, 34320, 34345, 34368, 34816, 34821, 34833, 34836, 34881, 34884, 34896, 34978, 35073, 35076, + 35136, 35173, 35362, 35416, 35418, 35458, 35490, 36865, 36868, 36873, 36880, 36882, 36885, 36888, 36900, 36928, + 36930, 36933, 36936, 36945, 36948, 36960, 36993, 36996, 37008, 37120, 37125, 37137, 37140, 37185, 37188, 37200, + 37210, 37377, 37380, 37392, 37440, 37542, 37888, 37890, 37893, 37896, 37905, 37908, 37920, 37953, 37956, 37968, + 38016, 38038, 38145, 38148, 38160, 38208, 38296, 38305, 38400, 38470, 38500, 38913, 38916, 38928, 38950, 38976, + 39081, 39168, 39241, 39250, 39568, 40960, 40965, 40970, 40980, 40994, 41002, 41025, 41028, 41040, 41122, 41130, + 41280, 41317, 41474, 41482, 41506, 41512, 41514, 41602, 41608, 41610, 41640, 41985, 41988, 42000, 42048, 42121, + 42148, 42240, 42265, 42577, 43018, 43048, 43170, 43348, 43398, 43528, 43530, 43552, 43554, 43560, 43656, 43690, + }; const int kmap_size = 43692; - const int nwant = type == GGML_TYPE_IQ1_S ? 3 : 2; + //const int nwant = type == GGML_TYPE_IQ1_S ? 3 : 2; + const int nwant = type == GGML_TYPE_IQ1_S ? 3 : type == GGML_TYPE_IQ2_S ? 1 : 2; const uint16_t * kgrid = type == GGML_TYPE_IQ2_XXS ? kgrid_2bit_256 : - type == GGML_TYPE_IQ2_XS ? kgrid_2bit_512 : kgrid_1bit_512; + type == GGML_TYPE_IQ2_XS ? kgrid_2bit_512 : + type == GGML_TYPE_IQ1_S ? kgrid_1bit_512 : kgrid_2bit_1024; uint64_t * kgrid_q2xs; int * kmap_q2xs; uint16_t * kneighbors_q2xs; @@ -10151,7 +10717,7 @@ void iq2xs_init_impl(enum ggml_type type) { } void iq2xs_free_impl(enum ggml_type type) { - GGML_ASSERT(type == GGML_TYPE_IQ2_XXS || type == GGML_TYPE_IQ2_XS || type == GGML_TYPE_IQ1_S); + GGML_ASSERT(type == GGML_TYPE_IQ2_XXS || type == GGML_TYPE_IQ2_XS || type == GGML_TYPE_IQ1_S || type == GGML_TYPE_IQ2_S); const int gindex = iq2_data_index(type); if (iq2_data[gindex].grid) { free(iq2_data[gindex].grid); iq2_data[gindex].grid = NULL; @@ -11557,3 +12123,196 @@ void quantize_row_iq4_nl_reference(const float * restrict x, block_iq4_nl * rest quantize_iq4_nl(x, y, 1, k, NULL, NULL); } +// =============================== 2.5625 bpw + +static void quantize_row_iq2_s_impl(const float * restrict x, void * restrict vy, int n, const float * restrict quant_weights) { + + const int gindex = iq2_data_index(GGML_TYPE_IQ2_S); + + const uint64_t * kgrid_q2xs = iq2_data[gindex].grid; + const int * kmap_q2xs = iq2_data[gindex].map; + const uint16_t * kneighbors_q2xs = iq2_data[gindex].neighbours; + + GGML_ASSERT(kmap_q2xs && "forgot to call ggml_quantize_init()?"); + GGML_ASSERT(kgrid_q2xs && "forgot to call ggml_quantize_init()?"); + GGML_ASSERT(kneighbors_q2xs && "forgot to call ggml_quantize_init()?"); + GGML_ASSERT(n%QK_K == 0); + + const int kMaxQ = 3; + + const int nbl = n/256; + + block_iq2_s * y = vy; + + float scales[QK_K/16]; + float weight[16]; + float xval[16]; + int8_t L[16]; + int8_t Laux[16]; + float waux[16]; + bool is_on_grid[2]; + bool is_on_grid_aux[2]; + uint8_t block_signs[2]; + + for (int ibl = 0; ibl < nbl; ++ibl) { + + memset(&y[ibl], 0, sizeof(block_iq2_s)); + y[ibl].d = GGML_FP32_TO_FP16(0.f); + + float max_scale = 0; + + const float * xbl = x + QK_K*ibl; + float sumx2 = 0; + for (int i = 0; i < QK_K; ++i) sumx2 += xbl[i]*xbl[i]; + float sigma2 = 2*sumx2/QK_K; + + for (int ib = 0; ib < QK_K/16; ++ib) { + const float * xb = xbl + 16*ib; + if (quant_weights) { + const float * qw = quant_weights + QK_K*ibl + 16*ib; + for (int i = 0; i < 16; ++i) weight[i] = qw[i] * sqrtf(sigma2 + xb[i]*xb[i]); + } else { + for (int i = 0; i < 16; ++i) weight[i] = 0.25f*sigma2 + xb[i]*xb[i]; + } + for (int i = 0; i < 16; ++i) waux[i] = sqrtf(weight[i]); + for (int k = 0; k < 2; ++k) { + uint8_t s = 0; + for (int i = 0; i < 8; ++i) { + if (xb[8*k + i] >= 0) xval[8*k + i] = xb[8*k + i]; + else { + xval[8*k + i] = -xb[8*k + i]; s |= (1 << i); + } + } + block_signs[k] = s; + } + float max = xval[0]; + for (int i = 1; i < 16; ++i) max = MAX(max, xval[i]); + if (!max) { + scales[ib] = 0; + continue; + } + float best = 0; + float scale = max/(2*kMaxQ-1); + is_on_grid[0] = is_on_grid[1] = true; + for (int is = -9; is <= 9; ++is) { + float id = (2*kMaxQ-1+is*0.1f)/max; + float this_scale = 1/id; + for (int k = 0; k < 2; ++k) { + for (int i = 0; i < 8; ++i) { + int l = nearest_int(0.5f*(id*xval[8*k+i]-1)); + Laux[8*k+i] = MAX(0, MIN(kMaxQ-1, l)); + } + uint16_t u = 0; + for (int i = 0; i < 8; ++i) u |= (Laux[8*k+i] << 2*i); + int grid_index = kmap_q2xs[u]; + is_on_grid_aux[k] = true; + if (grid_index < 0) { + is_on_grid_aux[k] = false; + const uint16_t * neighbours = kneighbors_q2xs - kmap_q2xs[u] - 1; + grid_index = iq2_find_best_neighbour(neighbours, kgrid_q2xs, xval + 8*k, waux + 8*k, this_scale, Laux + 8*k); + } + } + float sumqx = 0, sumq2 = 0; + for (int i = 0; i < 16; ++i) { + float w = weight[i]; + float q = 2*Laux[i] + 1; + sumqx += w*xval[i]*q; + sumq2 += w*q*q; + } + if (sumq2 > 0 && sumqx*sumqx > best*sumq2) { + scale = sumqx/sumq2; best = scale*sumqx; + for (int i = 0; i < 16; ++i) L[i] = Laux[i]; + for (int k = 0; k < 2; ++k) is_on_grid[k] = is_on_grid_aux[k]; + } + } + int n_not_ongrid = 0; + for (int k = 0; k < 2; ++k) if (!is_on_grid[k]) ++n_not_ongrid; + if (n_not_ongrid > 0 && scale > 0) { + float id = 1/scale; + for (int k = 0; k < 2; ++k) { + if (is_on_grid[k]) continue; + uint16_t u = 0; + for (int i = 0; i < 8; ++i) { + int l = nearest_int(0.5f*(id*xval[8*k+i]-1)); + l = MAX(0, MIN(kMaxQ-1, l)); + u |= (l << 2*i); + L[8*k + i] = l; + } + int grid_index = kmap_q2xs[u]; + if (grid_index < 0) { + const uint16_t * neighbours = kneighbors_q2xs - kmap_q2xs[u] - 1; + grid_index = iq2_find_best_neighbour(neighbours, kgrid_q2xs, xval + 8*k, waux + 8*k, scale, L + 8*k); + } + } + float sumqx = 0, sumq2 = 0; + for (int i = 0; i < 16; ++i) { + float w = weight[i]; + float q = 2*L[i] + 1; + sumqx += w*xval[i]*q; + sumq2 += w*q*q; + } + if (sumq2 > 0) scale = sumqx/sumq2; + } + if (scale < 0) { + scale = -scale; + for (int k = 0; k < 2; ++k) block_signs[k] = ~block_signs[k]; + } + for (int k = 0; k < 2; ++k) { + uint16_t u = 0; + for (int i = 0; i < 8; ++i) u |= (L[8*k+i] << 2*i); + int grid_index = kmap_q2xs[u]; + if (grid_index < 0) { + printf("Oops: found point %u not on grid:", u); + for (int i = 0; i < 8; ++i) printf(" %d", L[8*k+i]); + printf("\n"); + GGML_ASSERT(false); + } + const int i8 = 2*ib + k; + y[ibl].qs[i8] = grid_index & 255; + y[ibl].qh[i8/4] |= ((grid_index >> 8) << 2*(i8%4)); + y[ibl].qs[QK_K/8 + i8] = block_signs[k]; + } + GGML_ASSERT(scale >= 0); + scales[ib] = scale; + max_scale = MAX(max_scale, scale); + } + + if (!max_scale) { + continue; + } + + float d = max_scale/31; + y[ibl].d = GGML_FP32_TO_FP16(d * 0.9875f); + float id = 1/d; + for (int ib = 0; ib < QK_K/16; ++ib) { + int l = nearest_int(0.5f*(id*scales[ib]-1)); + l = MAX(0, MIN(15, l)); + if (ib%2 == 0) y[ibl].scales[ib/2] = l; + else y[ibl].scales[ib/2] |= (l << 4); + } + } +} + +size_t quantize_iq2_s(const float * src, void * dst, int nrow, int n_per_row, int64_t * hist, const float * quant_weights) { + (void)hist; + GGML_ASSERT(n_per_row%QK_K == 0); + int nblock = n_per_row/QK_K; + char * qrow = (char *)dst; + for (int row = 0; row < nrow; ++row) { + quantize_row_iq2_s_impl(src, qrow, n_per_row, quant_weights); + src += n_per_row; + qrow += nblock*sizeof(block_iq2_s); + } + return nrow * nblock * sizeof(block_iq2_s); +} + +void quantize_row_iq2_s_reference(const float * restrict x, block_iq2_s * restrict y, int k) { + assert(k % QK_K == 0); + quantize_iq2_s(x, y, 1, k, NULL, NULL); +} + +void quantize_row_iq2_s(const float * restrict x, void * restrict vy, int k) { + assert(k % QK_K == 0); + block_iq2_s * restrict y = vy; + quantize_row_iq2_s_reference(x, y, k); +} diff --git a/ggml-quants.h b/ggml-quants.h index 303b0b6f9..4731dde0c 100644 --- a/ggml-quants.h +++ b/ggml-quants.h @@ -182,6 +182,15 @@ typedef struct { } block_iq2_xs; static_assert(sizeof(block_iq2_xs) == sizeof(ggml_fp16_t) + QK_K/8*sizeof(uint16_t) + QK_K/32, "wrong iq2_xs block size/padding"); +// 2.5625 bpw quants +typedef struct { + ggml_fp16_t d; + uint8_t qs[QK_K/4]; + uint8_t qh[QK_K/32]; + uint8_t scales[QK_K/32]; +} block_iq2_s; +static_assert(sizeof(block_iq2_s) == sizeof(ggml_fp16_t) + QK_K/4 + QK_K/16, "wrong iq2_s block size/padding"); + // (Almost) "true" 3-bit quantization. // Due to the need to use blocks as per ggml design, it ends up using // 3.0625 bpw because of the 16-bit scale for each block of 256. @@ -242,6 +251,7 @@ void quantize_row_q8_K_reference(const float * GGML_RESTRICT x, block_q8_K * GGM void quantize_row_iq3_xxs_reference(const float * GGML_RESTRICT x, block_iq3_xxs * GGML_RESTRICT y, int k); void quantize_row_iq4_nl_reference (const float * GGML_RESTRICT x, block_iq4_nl * GGML_RESTRICT y, int k); void quantize_row_iq3_s_reference (const float * GGML_RESTRICT x, block_iq3_s * GGML_RESTRICT y, int k); +void quantize_row_iq2_s_reference (const float * GGML_RESTRICT x, block_iq2_s * GGML_RESTRICT y, int k); void quantize_row_q4_0(const float * GGML_RESTRICT x, void * GGML_RESTRICT y, int k); void quantize_row_q4_1(const float * GGML_RESTRICT x, void * GGML_RESTRICT y, int k); @@ -259,6 +269,7 @@ void quantize_row_q8_K(const float * GGML_RESTRICT x, void * GGML_RESTRICT y, in void quantize_row_iq3_xxs(const float * GGML_RESTRICT x, void * GGML_RESTRICT y, int k); void quantize_row_iq4_nl (const float * GGML_RESTRICT x, void * GGML_RESTRICT y, int k); void quantize_row_iq3_s (const float * GGML_RESTRICT x, void * GGML_RESTRICT y, int k); +void quantize_row_iq2_s (const float * GGML_RESTRICT x, void * GGML_RESTRICT y, int k); // Dequantization void dequantize_row_q4_0(const block_q4_0 * GGML_RESTRICT x, float * GGML_RESTRICT y, int k); @@ -276,6 +287,7 @@ void dequantize_row_q6_K(const block_q6_K * GGML_RESTRICT x, float * GGML_RESTRI void dequantize_row_q8_K(const block_q8_K * GGML_RESTRICT x, float * GGML_RESTRICT y, int k); void dequantize_row_iq2_xxs(const block_iq2_xxs * GGML_RESTRICT x, float * GGML_RESTRICT y, int k); void dequantize_row_iq2_xs (const block_iq2_xs * GGML_RESTRICT x, float * GGML_RESTRICT y, int k); +void dequantize_row_iq2_s (const block_iq2_s * GGML_RESTRICT x, float * GGML_RESTRICT y, int k); void dequantize_row_iq3_xxs(const block_iq3_xxs * GGML_RESTRICT x, float * GGML_RESTRICT y, int k); void dequantize_row_iq1_s (const block_iq1_s * GGML_RESTRICT x, float * GGML_RESTRICT y, int k); void dequantize_row_iq4_nl (const block_iq4_nl * GGML_RESTRICT x, float * GGML_RESTRICT y, int k); @@ -295,6 +307,7 @@ void ggml_vec_dot_q5_K_q8_K(int n, float * GGML_RESTRICT s, size_t bs, const voi void ggml_vec_dot_q6_K_q8_K(int n, float * GGML_RESTRICT s, size_t bs, const void * GGML_RESTRICT vx, size_t bx, const void * GGML_RESTRICT vy, size_t by, int nrc); void ggml_vec_dot_iq2_xxs_q8_K(int n, float * GGML_RESTRICT s, size_t bs, const void * GGML_RESTRICT vx, size_t bx, const void * GGML_RESTRICT vy, size_t by, int nrc); void ggml_vec_dot_iq2_xs_q8_K (int n, float * GGML_RESTRICT s, size_t bs, const void * GGML_RESTRICT vx, size_t bx, const void * GGML_RESTRICT vy, size_t by, int nrc); +void ggml_vec_dot_iq2_s_q8_K (int n, float * GGML_RESTRICT s, size_t bs, const void * GGML_RESTRICT vx, size_t bx, const void * GGML_RESTRICT vy, size_t by, int nrc); void ggml_vec_dot_iq3_xxs_q8_K(int n, float * GGML_RESTRICT s, size_t bs, const void * GGML_RESTRICT vx, size_t bx, const void * GGML_RESTRICT vy, size_t by, int nrc); void ggml_vec_dot_iq1_s_q8_K (int n, float * GGML_RESTRICT s, size_t bs, const void * GGML_RESTRICT vx, size_t bx, const void * GGML_RESTRICT vy, size_t by, int nrc); void ggml_vec_dot_iq4_nl_q8_0 (int n, float * GGML_RESTRICT s, size_t bs, const void * GGML_RESTRICT vx, size_t bx, const void * GGML_RESTRICT vy, size_t by, int nrc); @@ -305,6 +318,7 @@ void ggml_vec_dot_iq3_s_q8_K (int n, float * GGML_RESTRICT s, size_t bs, const // size_t quantize_iq2_xxs(const float * src, void * dst, int nrows, int n_per_row, int64_t * hist, const float * imatrix); size_t quantize_iq2_xs (const float * src, void * dst, int nrows, int n_per_row, int64_t * hist, const float * imatrix); +size_t quantize_iq2_s (const float * src, void * dst, int nrows, int n_per_row, int64_t * hist, const float * imatrix); size_t quantize_iq3_xxs(const float * src, void * dst, int nrows, int n_per_row, int64_t * hist, const float * imatrix); size_t quantize_iq1_s (const float * src, void * dst, int nrows, int n_per_row, int64_t * hist, const float * imatrix); size_t quantize_iq4_nl (const float * src, void * dst, int nrows, int n_per_row, int64_t * hist, const float * imatrix); diff --git a/ggml.c b/ggml.c index 1d81553f4..6be07bb6f 100644 --- a/ggml.c +++ b/ggml.c @@ -690,6 +690,18 @@ static const ggml_type_traits_t type_traits[GGML_TYPE_COUNT] = { .vec_dot_type = GGML_TYPE_Q8_K, .nrows = 1, }, + [GGML_TYPE_IQ2_S] = { + .type_name = "iq2_s", + .blck_size = QK_K, + .type_size = sizeof(block_iq2_s), + .is_quantized = true, + .to_float = (ggml_to_float_t) dequantize_row_iq2_s, + .from_float = quantize_row_iq2_s, + .from_float_reference = (ggml_from_float_t)quantize_row_iq2_s_reference, + .vec_dot = ggml_vec_dot_iq2_s_q8_K, + .vec_dot_type = GGML_TYPE_Q8_K, + .nrows = 1, + }, [GGML_TYPE_IQ1_S] = { .type_name = "iq1_s", .blck_size = QK_K, @@ -2317,6 +2329,7 @@ enum ggml_type ggml_ftype_to_ggml_type(enum ggml_ftype ftype) { case GGML_FTYPE_MOSTLY_IQ1_S: wtype = GGML_TYPE_IQ1_S; break; case GGML_FTYPE_MOSTLY_IQ4_NL: wtype = GGML_TYPE_IQ4_NL; break; case GGML_FTYPE_MOSTLY_IQ3_S: wtype = GGML_TYPE_IQ3_S; break; + case GGML_FTYPE_MOSTLY_IQ2_S: wtype = GGML_TYPE_IQ2_S; break; case GGML_FTYPE_UNKNOWN: wtype = GGML_TYPE_COUNT; break; case GGML_FTYPE_MOSTLY_Q4_1_SOME_F16: wtype = GGML_TYPE_COUNT; break; } @@ -7752,6 +7765,7 @@ static void ggml_compute_forward_add( case GGML_TYPE_IQ1_S: case GGML_TYPE_IQ4_NL: case GGML_TYPE_IQ3_S: + case GGML_TYPE_IQ2_S: { ggml_compute_forward_add_q_f32(params, dst); } break; @@ -8032,6 +8046,7 @@ static void ggml_compute_forward_add1( case GGML_TYPE_IQ1_S: case GGML_TYPE_IQ4_NL: case GGML_TYPE_IQ3_S: + case GGML_TYPE_IQ2_S: { ggml_compute_forward_add1_q_f32(params, dst); } break; @@ -8157,6 +8172,7 @@ static void ggml_compute_forward_acc( case GGML_TYPE_IQ1_S: case GGML_TYPE_IQ4_NL: case GGML_TYPE_IQ3_S: + case GGML_TYPE_IQ2_S: default: { GGML_ASSERT(false); @@ -11056,6 +11072,7 @@ static void ggml_compute_forward_out_prod( case GGML_TYPE_IQ1_S: case GGML_TYPE_IQ4_NL: case GGML_TYPE_IQ3_S: + case GGML_TYPE_IQ2_S: { ggml_compute_forward_out_prod_q_f32(params, dst); } break; @@ -11245,6 +11262,7 @@ static void ggml_compute_forward_set( case GGML_TYPE_IQ1_S: case GGML_TYPE_IQ4_NL: case GGML_TYPE_IQ3_S: + case GGML_TYPE_IQ2_S: default: { GGML_ASSERT(false); @@ -11448,6 +11466,7 @@ static void ggml_compute_forward_get_rows( case GGML_TYPE_IQ1_S: case GGML_TYPE_IQ4_NL: case GGML_TYPE_IQ3_S: + case GGML_TYPE_IQ2_S: { ggml_compute_forward_get_rows_q(params, dst); } break; @@ -12149,6 +12168,7 @@ static void ggml_compute_forward_alibi( case GGML_TYPE_IQ1_S: case GGML_TYPE_IQ4_NL: case GGML_TYPE_IQ3_S: + case GGML_TYPE_IQ2_S: case GGML_TYPE_Q8_K: case GGML_TYPE_I8: case GGML_TYPE_I16: @@ -12233,6 +12253,7 @@ static void ggml_compute_forward_clamp( case GGML_TYPE_IQ1_S: case GGML_TYPE_IQ4_NL: case GGML_TYPE_IQ3_S: + case GGML_TYPE_IQ2_S: case GGML_TYPE_Q8_K: case GGML_TYPE_I8: case GGML_TYPE_I16: @@ -19482,6 +19503,7 @@ void ggml_quantize_init(enum ggml_type type) { switch (type) { case GGML_TYPE_IQ2_XXS: case GGML_TYPE_IQ2_XS: + case GGML_TYPE_IQ2_S: case GGML_TYPE_IQ1_S: iq2xs_init_impl(type); break; case GGML_TYPE_IQ3_XXS: iq3xs_init_impl(256); break; case GGML_TYPE_IQ3_S: iq3xs_init_impl(512); break; @@ -19768,6 +19790,15 @@ size_t ggml_quantize_chunk(enum ggml_type type, const float * src, void * dst, i result = quantize_iq3_s(src + start, (char *)dst + start_row * row_size, nrows, n_per_row, hist, imatrix); GGML_ASSERT(result == row_size * nrows); } break; + case GGML_TYPE_IQ2_S: + { + GGML_ASSERT(start % QK_K == 0); + GGML_ASSERT(start % n_per_row == 0); + size_t start_row = start / n_per_row; + size_t row_size = ggml_row_size(type, n_per_row); + result = quantize_iq2_s(src + start, (char *)dst + start_row * row_size, nrows, n_per_row, hist, imatrix); + GGML_ASSERT(result == row_size * nrows); + } break; case GGML_TYPE_IQ1_S: { GGML_ASSERT(start % QK_K == 0); diff --git a/ggml.h b/ggml.h index 75fd035a4..8c7ca4588 100644 --- a/ggml.h +++ b/ggml.h @@ -351,6 +351,7 @@ extern "C" { GGML_TYPE_IQ1_S = 19, GGML_TYPE_IQ4_NL = 20, GGML_TYPE_IQ3_S = 21, + GGML_TYPE_IQ2_S = 22, GGML_TYPE_I8, GGML_TYPE_I16, GGML_TYPE_I32, @@ -391,6 +392,7 @@ extern "C" { GGML_FTYPE_MOSTLY_IQ1_S = 18, // except 1d tensors GGML_FTYPE_MOSTLY_IQ4_NL = 19, // except 1d tensors GGML_FTYPE_MOSTLY_IQ3_S = 20, // except 1d tensors + GGML_FTYPE_MOSTLY_IQ2_S = 21, // except 1d tensors }; // available tensor operations: diff --git a/llama.cpp b/llama.cpp index f549e7d04..80dc4d166 100644 --- a/llama.cpp +++ b/llama.cpp @@ -2579,6 +2579,7 @@ struct llama_model_loader { case GGML_TYPE_Q6_K: ftype = LLAMA_FTYPE_MOSTLY_Q6_K; break; case GGML_TYPE_IQ2_XXS: ftype = LLAMA_FTYPE_MOSTLY_IQ2_XXS; break; case GGML_TYPE_IQ2_XS: ftype = LLAMA_FTYPE_MOSTLY_IQ2_XS; break; + case GGML_TYPE_IQ2_S: ftype = LLAMA_FTYPE_MOSTLY_IQ2_S; break; case GGML_TYPE_IQ3_XXS: ftype = LLAMA_FTYPE_MOSTLY_IQ3_XXS; break; case GGML_TYPE_IQ1_S: ftype = LLAMA_FTYPE_MOSTLY_IQ1_S; break; case GGML_TYPE_IQ4_NL: ftype = LLAMA_FTYPE_MOSTLY_IQ4_NL; break; @@ -2933,7 +2934,9 @@ static std::string llama_model_ftype_name(llama_ftype ftype) { case LLAMA_FTYPE_MOSTLY_Q6_K: return "Q6_K"; case LLAMA_FTYPE_MOSTLY_IQ2_XXS:return "IQ2_XXS - 2.0625 bpw"; case LLAMA_FTYPE_MOSTLY_IQ2_XS: return "IQ2_XS - 2.3125 bpw"; - case LLAMA_FTYPE_MOSTLY_Q3_K_XS:return "Q3_K - Extra small"; + case LLAMA_FTYPE_MOSTLY_IQ2_S: return "IQ2_S - 2.5 bpw"; + case LLAMA_FTYPE_MOSTLY_IQ2_M: return "IQ2_M - 2.7 bpw"; + case LLAMA_FTYPE_MOSTLY_IQ3_XS: return "IQ3_XS - 3.3 bpw"; case LLAMA_FTYPE_MOSTLY_IQ3_XXS:return "IQ3_XXS - 3.0625 bpw"; case LLAMA_FTYPE_MOSTLY_IQ1_S :return "IQ1_S - 1.5625 bpw"; case LLAMA_FTYPE_MOSTLY_IQ4_NL: return "IQ4_NL - 4.5 bpw"; @@ -10761,31 +10764,47 @@ static ggml_type get_k_quant_type(quantize_state_internal & qs, ggml_type new_ty if (arch == LLM_ARCH_FALCON || nx % QK_K != 0) { new_type = GGML_TYPE_Q8_0; } - else if (ftype == LLAMA_FTYPE_MOSTLY_IQ2_XXS || ftype == LLAMA_FTYPE_MOSTLY_IQ2_XS || ftype == LLAMA_FTYPE_MOSTLY_IQ1_S) { + else if (ftype == LLAMA_FTYPE_MOSTLY_IQ2_XXS || ftype == LLAMA_FTYPE_MOSTLY_IQ2_XS || ftype == LLAMA_FTYPE_MOSTLY_IQ3_XXS || + ftype == LLAMA_FTYPE_MOSTLY_IQ1_S || ftype == LLAMA_FTYPE_MOSTLY_IQ2_S || ftype == LLAMA_FTYPE_MOSTLY_IQ2_M) { new_type = GGML_TYPE_Q5_K; } else if (new_type != GGML_TYPE_Q8_0) { new_type = GGML_TYPE_Q6_K; } } else if (name == "token_embd.weight") { - if (ftype == LLAMA_FTYPE_MOSTLY_IQ2_XXS || ftype == LLAMA_FTYPE_MOSTLY_IQ2_XS || ftype == LLAMA_FTYPE_MOSTLY_IQ1_S) { + if (ftype == LLAMA_FTYPE_MOSTLY_IQ2_XXS || ftype == LLAMA_FTYPE_MOSTLY_IQ2_XS || + ftype == LLAMA_FTYPE_MOSTLY_IQ1_S) { new_type = GGML_TYPE_Q2_K; } - else if (ftype == LLAMA_FTYPE_MOSTLY_IQ3_XXS) { - new_type = GGML_TYPE_Q4_K; + else if (ftype == LLAMA_FTYPE_MOSTLY_IQ2_S || ftype == LLAMA_FTYPE_MOSTLY_IQ2_M) { + new_type = GGML_TYPE_IQ3_S; } - } else if (ftype == LLAMA_FTYPE_MOSTLY_IQ2_XXS || ftype == LLAMA_FTYPE_MOSTLY_IQ2_XS || ftype == LLAMA_FTYPE_MOSTLY_IQ1_S) { + else if (ftype == LLAMA_FTYPE_MOSTLY_IQ3_XXS) { + new_type = GGML_TYPE_IQ3_S; + } + } else if (ftype == LLAMA_FTYPE_MOSTLY_IQ2_XXS || ftype == LLAMA_FTYPE_MOSTLY_IQ2_XS || ftype == LLAMA_FTYPE_MOSTLY_IQ1_S || + ftype == LLAMA_FTYPE_MOSTLY_IQ2_S || ftype == LLAMA_FTYPE_MOSTLY_IQ2_M) { if (name.find("attn_v.weight") != std::string::npos) { if (qs.model.hparams.n_gqa() >= 4 || qs.model.hparams.n_expert >= 4) new_type = GGML_TYPE_Q4_K; - else new_type = GGML_TYPE_Q2_K; + else new_type = ftype == LLAMA_FTYPE_MOSTLY_IQ2_S || ftype == LLAMA_FTYPE_MOSTLY_IQ2_M ? GGML_TYPE_IQ3_S : GGML_TYPE_Q2_K; ++qs.i_attention_wv; } + else if (qs.model.hparams.n_expert == 8 && name.find("attn_k.weight") != std::string::npos) { + new_type = GGML_TYPE_Q4_K; + } else if (name.find("ffn_down") != std::string::npos) { - if (qs.i_ffn_down < qs.n_ffn_down/8) new_type = GGML_TYPE_Q2_K; + if (qs.i_ffn_down < qs.n_ffn_down/8) { + new_type = ftype == LLAMA_FTYPE_MOSTLY_IQ2_S || ftype == LLAMA_FTYPE_MOSTLY_IQ2_M ? GGML_TYPE_IQ3_S : GGML_TYPE_Q2_K; + } ++qs.i_ffn_down; } else if (name.find("attn_output.weight") != std::string::npos) { - if (ftype == LLAMA_FTYPE_MOSTLY_IQ1_S) new_type = GGML_TYPE_IQ2_XXS; + if (qs.model.hparams.n_expert == 8) { + new_type = GGML_TYPE_Q5_K; + } else { + if (ftype == LLAMA_FTYPE_MOSTLY_IQ1_S) new_type = GGML_TYPE_IQ2_XXS; + else if (ftype == LLAMA_FTYPE_MOSTLY_IQ2_S || ftype == LLAMA_FTYPE_MOSTLY_IQ2_M) new_type = GGML_TYPE_IQ3_S; + } } } else if (name.find("attn_v.weight") != std::string::npos) { if (ftype == LLAMA_FTYPE_MOSTLY_Q2_K) { @@ -10795,7 +10814,13 @@ static ggml_type get_k_quant_type(quantize_state_internal & qs, ggml_type new_ty new_type = GGML_TYPE_Q4_K; } else if (ftype == LLAMA_FTYPE_MOSTLY_IQ3_XXS) { - new_type = qs.model.hparams.n_gqa() >= 4 ? GGML_TYPE_Q4_K : !qs.has_imatrix ? GGML_TYPE_Q3_K : GGML_TYPE_IQ3_XXS; + new_type = qs.model.hparams.n_gqa() >= 4 ? GGML_TYPE_Q4_K : !qs.has_imatrix ? GGML_TYPE_IQ3_S : GGML_TYPE_IQ3_XXS; + } + else if (ftype == LLAMA_FTYPE_MOSTLY_IQ3_S && qs.model.hparams.n_gqa() >= 4) { + new_type = GGML_TYPE_Q4_K; + } + else if (ftype == LLAMA_FTYPE_MOSTLY_IQ3_M) { + new_type = GGML_TYPE_Q4_K; } else if (ftype == LLAMA_FTYPE_MOSTLY_IQ3_S && qs.model.hparams.n_gqa() >= 4) { new_type = GGML_TYPE_Q4_K; @@ -10833,13 +10858,19 @@ static ggml_type get_k_quant_type(quantize_state_internal & qs, ggml_type new_ty // TODO: explore better strategies new_type = GGML_TYPE_Q8_0; } - else if (ftype == LLAMA_FTYPE_MOSTLY_Q3_K_XS) { + else if (ftype == LLAMA_FTYPE_MOSTLY_IQ3_XS) { new_type = GGML_TYPE_IQ3_XXS; } + else if (ftype == LLAMA_FTYPE_MOSTLY_IQ3_XXS) { + new_type = GGML_TYPE_IQ2_S; + } } else if (name.find("attn_q.weight") != std::string::npos) { - if (ftype == LLAMA_FTYPE_MOSTLY_Q3_K_XS) { + if (ftype == LLAMA_FTYPE_MOSTLY_IQ3_XS) { new_type = GGML_TYPE_IQ3_XXS; } + else if (ftype == LLAMA_FTYPE_MOSTLY_IQ3_XXS) { + new_type = GGML_TYPE_IQ2_S; + } } else if (name.find("ffn_down") != std::string::npos) { auto info = layer_info(qs.i_ffn_down, qs.n_ffn_down, name.c_str()); int i_layer = info.first, n_layer = info.second; @@ -10888,7 +10919,7 @@ static ggml_type get_k_quant_type(quantize_state_internal & qs, ggml_type new_ty } else if (name.find("attn_output.weight") != std::string::npos) { if (arch != LLM_ARCH_FALCON) { if (qs.model.hparams.n_expert == 8) { - if (ftype == LLAMA_FTYPE_MOSTLY_Q2_K || ftype == LLAMA_FTYPE_MOSTLY_Q3_K_XS || ftype == LLAMA_FTYPE_MOSTLY_IQ3_XXS || + if (ftype == LLAMA_FTYPE_MOSTLY_Q2_K || ftype == LLAMA_FTYPE_MOSTLY_IQ3_XS || ftype == LLAMA_FTYPE_MOSTLY_IQ3_XXS || ftype == LLAMA_FTYPE_MOSTLY_Q3_K_S || ftype == LLAMA_FTYPE_MOSTLY_Q3_K_M || ftype == LLAMA_FTYPE_MOSTLY_IQ4_NL || ftype == LLAMA_FTYPE_MOSTLY_Q4_K_S || ftype == LLAMA_FTYPE_MOSTLY_Q4_K_M || ftype == LLAMA_FTYPE_MOSTLY_IQ3_S || ftype == LLAMA_FTYPE_MOSTLY_IQ3_M) { @@ -10896,7 +10927,7 @@ static ggml_type get_k_quant_type(quantize_state_internal & qs, ggml_type new_ty } } else { if (ftype == LLAMA_FTYPE_MOSTLY_Q2_K ) new_type = GGML_TYPE_Q3_K; - else if (ftype == LLAMA_FTYPE_MOSTLY_IQ3_XXS) new_type = GGML_TYPE_Q3_K; + else if (ftype == LLAMA_FTYPE_MOSTLY_IQ3_XXS) new_type = GGML_TYPE_IQ3_S; else if (ftype == LLAMA_FTYPE_MOSTLY_Q3_K_M ) new_type = GGML_TYPE_Q4_K; else if (ftype == LLAMA_FTYPE_MOSTLY_Q3_K_L ) new_type = GGML_TYPE_Q5_K; else if (ftype == LLAMA_FTYPE_MOSTLY_IQ3_M ) new_type = GGML_TYPE_Q4_K; @@ -10915,7 +10946,7 @@ static ggml_type get_k_quant_type(quantize_state_internal & qs, ggml_type new_ty else if (name.find("ffn_gate") != std::string::npos) { auto info = layer_info(qs.i_ffn_gate, qs.n_ffn_gate, name.c_str()); int i_layer = info.first, n_layer = info.second; - if (ftype == LLAMA_FTYPE_MOSTLY_Q3_K_XS && (i_layer >= n_layer/8 && i_layer < 7*n_layer/8)) { + if (ftype == LLAMA_FTYPE_MOSTLY_IQ3_XS && (i_layer >= n_layer/8 && i_layer < 7*n_layer/8)) { new_type = GGML_TYPE_IQ3_XXS; } ++qs.i_ffn_gate; @@ -10923,7 +10954,7 @@ static ggml_type get_k_quant_type(quantize_state_internal & qs, ggml_type new_ty else if (name.find("ffn_up") != std::string::npos) { auto info = layer_info(qs.i_ffn_up, qs.n_ffn_up, name.c_str()); int i_layer = info.first, n_layer = info.second; - if (ftype == LLAMA_FTYPE_MOSTLY_Q3_K_XS && (i_layer >= n_layer/8 && i_layer < 7*n_layer/8)) { + if (ftype == LLAMA_FTYPE_MOSTLY_IQ3_XS && (i_layer >= n_layer/8 && i_layer < 7*n_layer/8)) { new_type = GGML_TYPE_IQ3_XXS; } ++qs.i_ffn_up; @@ -10943,7 +10974,7 @@ static ggml_type get_k_quant_type(quantize_state_internal & qs, ggml_type new_ty bool convert_incompatible_tensor = false; if (new_type == GGML_TYPE_Q2_K || new_type == GGML_TYPE_Q3_K || new_type == GGML_TYPE_Q4_K || new_type == GGML_TYPE_Q5_K || new_type == GGML_TYPE_Q6_K || - new_type == GGML_TYPE_IQ2_XS || new_type == GGML_TYPE_IQ2_XXS || + new_type == GGML_TYPE_IQ2_XS || new_type == GGML_TYPE_IQ2_XXS || new_type == GGML_TYPE_IQ2_S || new_type == GGML_TYPE_IQ3_XXS || ftype == LLAMA_FTYPE_MOSTLY_IQ1_S || new_type == GGML_TYPE_IQ3_S) { int nx = tensor->ne[0]; int ny = tensor->ne[1]; @@ -10958,6 +10989,7 @@ static ggml_type get_k_quant_type(quantize_state_internal & qs, ggml_type new_ty switch (new_type) { case GGML_TYPE_IQ2_XXS: case GGML_TYPE_IQ2_XS: + case GGML_TYPE_IQ2_S: case GGML_TYPE_IQ3_XXS: case GGML_TYPE_IQ3_S: case GGML_TYPE_IQ1_S: @@ -10991,7 +11023,7 @@ static void llama_model_quantize_internal(const std::string & fname_inp, const s // K-quants case LLAMA_FTYPE_MOSTLY_Q2_K_S: case LLAMA_FTYPE_MOSTLY_Q2_K: quantized_type = GGML_TYPE_Q2_K; break; - case LLAMA_FTYPE_MOSTLY_Q3_K_XS: quantized_type = GGML_TYPE_IQ3_S; break; + case LLAMA_FTYPE_MOSTLY_IQ3_XS: quantized_type = GGML_TYPE_IQ3_S; break; case LLAMA_FTYPE_MOSTLY_Q3_K_S: case LLAMA_FTYPE_MOSTLY_Q3_K_M: case LLAMA_FTYPE_MOSTLY_Q3_K_L: quantized_type = GGML_TYPE_Q3_K; break; @@ -11002,6 +11034,8 @@ static void llama_model_quantize_internal(const std::string & fname_inp, const s case LLAMA_FTYPE_MOSTLY_Q6_K: quantized_type = GGML_TYPE_Q6_K; break; case LLAMA_FTYPE_MOSTLY_IQ2_XXS: quantized_type = GGML_TYPE_IQ2_XXS; break; case LLAMA_FTYPE_MOSTLY_IQ2_XS: quantized_type = GGML_TYPE_IQ2_XS; break; + case LLAMA_FTYPE_MOSTLY_IQ2_S: quantized_type = GGML_TYPE_IQ2_XS; break; + case LLAMA_FTYPE_MOSTLY_IQ2_M: quantized_type = GGML_TYPE_IQ2_S; break; case LLAMA_FTYPE_MOSTLY_IQ3_XXS: quantized_type = GGML_TYPE_IQ3_XXS; break; case LLAMA_FTYPE_MOSTLY_IQ1_S: quantized_type = GGML_TYPE_IQ1_S; break; case LLAMA_FTYPE_MOSTLY_IQ4_NL: quantized_type = GGML_TYPE_IQ4_NL; break; @@ -11180,6 +11214,7 @@ static void llama_model_quantize_internal(const std::string & fname_inp, const s } if ((new_type == GGML_TYPE_IQ2_XXS || new_type == GGML_TYPE_IQ2_XS || + new_type == GGML_TYPE_IQ2_S || new_type == GGML_TYPE_IQ1_S || (new_type == GGML_TYPE_Q2_K && params->ftype == LLAMA_FTYPE_MOSTLY_Q2_K_S && strcmp(tensor->name, "token_embd.weight") != 0)) && !imatrix) { LLAMA_LOG_ERROR("\n\n============================================================\n"); diff --git a/llama.h b/llama.h index ff131996d..3ff77d5a8 100644 --- a/llama.h +++ b/llama.h @@ -107,12 +107,14 @@ extern "C" { LLAMA_FTYPE_MOSTLY_IQ2_XXS = 19, // except 1d tensors LLAMA_FTYPE_MOSTLY_IQ2_XS = 20, // except 1d tensors LLAMA_FTYPE_MOSTLY_Q2_K_S = 21, // except 1d tensors - LLAMA_FTYPE_MOSTLY_Q3_K_XS = 22, // except 1d tensors + LLAMA_FTYPE_MOSTLY_IQ3_XS = 22, // except 1d tensors LLAMA_FTYPE_MOSTLY_IQ3_XXS = 23, // except 1d tensors LLAMA_FTYPE_MOSTLY_IQ1_S = 24, // except 1d tensors LLAMA_FTYPE_MOSTLY_IQ4_NL = 25, // except 1d tensors LLAMA_FTYPE_MOSTLY_IQ3_S = 26, // except 1d tensors LLAMA_FTYPE_MOSTLY_IQ3_M = 27, // except 1d tensors + LLAMA_FTYPE_MOSTLY_IQ2_S = 28, // except 1d tensors + LLAMA_FTYPE_MOSTLY_IQ2_M = 29, // except 1d tensors LLAMA_FTYPE_GUESSED = 1024, // not specified in the model file }; diff --git a/tests/test-backend-ops.cpp b/tests/test-backend-ops.cpp index 24d12ef14..60a852779 100644 --- a/tests/test-backend-ops.cpp +++ b/tests/test-backend-ops.cpp @@ -1916,7 +1916,7 @@ static bool test_backend(ggml_backend_t backend, test_mode mode, const char * op GGML_TYPE_Q2_K, GGML_TYPE_Q3_K, GGML_TYPE_Q4_K, GGML_TYPE_Q5_K, GGML_TYPE_Q6_K, - GGML_TYPE_IQ2_XXS, GGML_TYPE_IQ2_XS, + GGML_TYPE_IQ2_XXS, GGML_TYPE_IQ2_XS, GGML_TYPE_IQ2_S, GGML_TYPE_IQ3_XXS, GGML_TYPE_IQ1_S, GGML_TYPE_IQ4_NL, GGML_TYPE_IQ3_S, }; diff --git a/tests/test-quantize-fns.cpp b/tests/test-quantize-fns.cpp index 04656bb9e..f615b612d 100644 --- a/tests/test-quantize-fns.cpp +++ b/tests/test-quantize-fns.cpp @@ -150,6 +150,7 @@ int main(int argc, char * argv[]) { const float total_error = total_quantization_error(qfns, test_size, test_data.data()); const float max_quantization_error = type == GGML_TYPE_Q2_K ? MAX_QUANTIZATION_TOTAL_ERROR_2BITS : + type == GGML_TYPE_IQ2_S ? MAX_QUANTIZATION_TOTAL_ERROR_2BITS : type == GGML_TYPE_Q3_K ? MAX_QUANTIZATION_TOTAL_ERROR_3BITS : type == GGML_TYPE_IQ3_S ? MAX_QUANTIZATION_TOTAL_ERROR_3BITS : type == GGML_TYPE_IQ3_XXS ? MAX_QUANTIZATION_TOTAL_ERROR_3BITS_XXS : MAX_QUANTIZATION_TOTAL_ERROR; @@ -168,7 +169,8 @@ int main(int argc, char * argv[]) { const float vec_dot_error = dot_product_error(qfns, test_size, test_data.data(), test_data2.data()); const float max_allowed_error = type == GGML_TYPE_Q2_K || type == GGML_TYPE_IQ2_XS || type == GGML_TYPE_IQ2_XXS || - type == GGML_TYPE_IQ3_XXS || type == GGML_TYPE_IQ3_S ? MAX_DOT_PRODUCT_ERROR_LOWBIT + type == GGML_TYPE_IQ3_XXS || type == GGML_TYPE_IQ3_S || type == GGML_TYPE_IQ2_S + ? MAX_DOT_PRODUCT_ERROR_LOWBIT : MAX_DOT_PRODUCT_ERROR; failed = !(vec_dot_error < max_allowed_error); num_failed += failed; From b11a93df41921846a10628a7c306d5c82a549939 Mon Sep 17 00:00:00 2001 From: Xuan Son Nguyen Date: Mon, 26 Feb 2024 23:15:48 +0100 Subject: [PATCH 721/811] fix server hangs on empty prompt (#5733) --- examples/server/server.cpp | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/examples/server/server.cpp b/examples/server/server.cpp index 8aadc95a9..846ef7e5f 100644 --- a/examples/server/server.cpp +++ b/examples/server/server.cpp @@ -1336,6 +1336,10 @@ struct llama_server_context split_multiprompt_task(task_id, task); } } else { + // an empty prompt can make slot become buggy + if (task.data.contains("prompt") && task.data["prompt"].is_string() && task.data["prompt"].get().empty()) { + task.data["prompt"] = " "; // add a space so that we have one token + } queue_tasks.post(task); } } From cbbd1efa06f8c09f9dff58ff9d9af509cc4c152b Mon Sep 17 00:00:00 2001 From: "le.chang" Date: Tue, 27 Feb 2024 10:03:06 +0800 Subject: [PATCH 722/811] Makefile: use variables for cublas (#5689) * make: use arch variable for cublas * fix UNAME_M * check opt first --------- Co-authored-by: lindeer --- Makefile | 9 +++++++-- 1 file changed, 7 insertions(+), 2 deletions(-) diff --git a/Makefile b/Makefile index 068f6ed02..4f26c0463 100644 --- a/Makefile +++ b/Makefile @@ -381,8 +381,13 @@ ifdef LLAMA_BLIS endif # LLAMA_BLIS ifdef LLAMA_CUBLAS - MK_CPPFLAGS += -DGGML_USE_CUBLAS -I/usr/local/cuda/include -I/opt/cuda/include -I$(CUDA_PATH)/targets/x86_64-linux/include -I/usr/local/cuda/targets/aarch64-linux/include - MK_LDFLAGS += -lcuda -lcublas -lculibos -lcudart -lcublasLt -lpthread -ldl -lrt -L/usr/local/cuda/lib64 -L/opt/cuda/lib64 -L$(CUDA_PATH)/targets/x86_64-linux/lib -L/usr/local/cuda/targets/aarch64-linux/lib -L/usr/lib/wsl/lib + ifneq ('', '$(wildcard /opt/cuda)') + CUDA_PATH ?= /opt/cuda + else + CUDA_PATH ?= /usr/local/cuda + endif + MK_CPPFLAGS += -DGGML_USE_CUBLAS -I$(CUDA_PATH)/include -I$(CUDA_PATH)/targets/$(UNAME_M)-linux/include + MK_LDFLAGS += -lcuda -lcublas -lculibos -lcudart -lcublasLt -lpthread -ldl -lrt -L$(CUDA_PATH)/lib64 -L/usr/lib64 -L$(CUDA_PATH)/targets/$(UNAME_M)-linux/lib -L/usr/lib/wsl/lib OBJS += ggml-cuda.o MK_NVCCFLAGS += -use_fast_math ifdef LLAMA_FATAL_WARNINGS From 9d533a77d0c3850ce09d736bc1baa67fd6ad27b3 Mon Sep 17 00:00:00 2001 From: Georgi Gerganov Date: Tue, 27 Feb 2024 14:35:51 +0200 Subject: [PATCH 723/811] llama : fix defrag bugs + add parameter (#5735) * llama : fix defrag bugs + enable by default ggml-ci * llama : add defrag_thold parameter ggml-ci * llama : cont * llama : disable log message ggml-ci * llama : fix graph size check during defrag --- common/common.cpp | 9 ++++ common/common.h | 1 + examples/passkey/passkey.cpp | 4 +- llama.cpp | 97 +++++++++++++++++++++++++----------- llama.h | 1 + 5 files changed, 82 insertions(+), 30 deletions(-) diff --git a/common/common.cpp b/common/common.cpp index ec596f5a0..18289755c 100644 --- a/common/common.cpp +++ b/common/common.cpp @@ -335,6 +335,12 @@ bool gpt_params_parse_ex(int argc, char ** argv, gpt_params & params) { break; } params.yarn_beta_slow = std::stof(argv[i]); + } else if (arg == "--defrag-thold" || arg == "-dt") { + if (++i >= argc) { + invalid_param = true; + break; + } + params.defrag_thold = std::stof(argv[i]); } else if (arg == "--samplers") { if (++i >= argc) { invalid_param = true; @@ -1004,6 +1010,8 @@ void gpt_print_usage(int /*argc*/, char ** argv, const gpt_params & params) { printf(" --yarn-attn-factor N YaRN: scale sqrt(t) or attention magnitude (default: 1.0)\n"); printf(" --yarn-beta-slow N YaRN: high correction dim or alpha (default: %.1f)\n", params.yarn_beta_slow); printf(" --yarn-beta-fast N YaRN: low correction dim or beta (default: %.1f)\n", params.yarn_beta_fast); + printf(" -dt N, --defrag-thold N\n"); + printf(" KV cache defragmentation threshold (default: %.1f, < 0 - disabled)\n", params.defrag_thold); printf(" --ignore-eos ignore end of stream token and continue generating (implies --logit-bias 2-inf)\n"); printf(" --no-penalize-nl do not penalize newline token\n"); printf(" --temp N temperature (default: %.1f)\n", (double)sparams.temp); @@ -1285,6 +1293,7 @@ struct llama_context_params llama_context_params_from_gpt_params(const gpt_param cparams.yarn_beta_fast = params.yarn_beta_fast; cparams.yarn_beta_slow = params.yarn_beta_slow; cparams.yarn_orig_ctx = params.yarn_orig_ctx; + cparams.defrag_thold = params.defrag_thold; cparams.offload_kqv = !params.no_kv_offload; cparams.type_k = kv_cache_type_from_str(params.cache_type_k); diff --git a/common/common.h b/common/common.h index 3e21579b0..25003df26 100644 --- a/common/common.h +++ b/common/common.h @@ -75,6 +75,7 @@ struct gpt_params { float yarn_beta_fast = 32.0f; // YaRN low correction dim float yarn_beta_slow = 1.0f; // YaRN high correction dim int32_t yarn_orig_ctx = 0; // YaRN original context length + float defrag_thold = -1.0f; // KV cache defragmentation threshold int32_t rope_scaling_type = LLAMA_ROPE_SCALING_TYPE_UNSPECIFIED; ggml_numa_strategy numa = GGML_NUMA_STRATEGY_DISABLED; diff --git a/examples/passkey/passkey.cpp b/examples/passkey/passkey.cpp index 47de67a93..2cbc9e1fa 100644 --- a/examples/passkey/passkey.cpp +++ b/examples/passkey/passkey.cpp @@ -182,7 +182,7 @@ int main(int argc, char ** argv) { llama_kv_cache_seq_rm (ctx, 0, n_keep , n_keep + n_discard); llama_kv_cache_seq_add(ctx, 0, n_keep + n_discard, n_ctx, -n_discard); - llama_kv_cache_defrag (ctx); + //llama_kv_cache_defrag (ctx); llama_kv_cache_update (ctx); n_past = llama_kv_cache_seq_pos_max(ctx, 0) + 1; @@ -213,7 +213,7 @@ int main(int argc, char ** argv) { llama_kv_cache_seq_rm (ctx, 0, n_keep , n_keep + n_discard); llama_kv_cache_seq_add(ctx, 0, n_keep + n_discard, n_ctx, -n_discard); - llama_kv_cache_defrag (ctx); + //llama_kv_cache_defrag (ctx); llama_kv_cache_update (ctx); n_past = llama_kv_cache_seq_pos_max(ctx, 0) + 1; diff --git a/llama.cpp b/llama.cpp index 80dc4d166..6729bb99c 100644 --- a/llama.cpp +++ b/llama.cpp @@ -1641,6 +1641,7 @@ struct llama_cparams { float yarn_attn_factor; float yarn_beta_fast; float yarn_beta_slow; + float defrag_thold; bool mul_mat_q; bool offload_kqv; @@ -5117,16 +5118,16 @@ struct llm_build_context { struct ggml_cgraph * build_defrag(const std::vector & ids) { struct ggml_cgraph * gf = ggml_new_graph_custom(ctx0, LLAMA_MAX_NODES, false); - for (int i = 0; i < n_kv; ++i) { - const int id = ids[i]; + for (uint32_t i = 0; i < ids.size(); ++i) { + const uint32_t id = ids[i]; - if (i == id || id == n_kv) { + if (i == id || id == ids.size()) { continue; } - int nm = 1; + uint32_t nm = 1; - while (i + nm < n_kv && (int) ids[i + nm] == id + nm) { + while (i + nm < ids.size() && ids[i + nm] == id + nm) { nm++; } @@ -5158,6 +5159,8 @@ struct llm_build_context { i += nm - 1; } + //LLAMA_LOG_INFO("gf->n_nodes = %d\n", gf->n_nodes); + return gf; } @@ -7938,6 +7941,8 @@ static int llama_decode_internal( batch.seq_id = seq_id_arr.data(); } + llama_kv_cache_update(&lctx); + // if we have enough unused cells before the current head -> // better to start searching from the beginning of the cache, hoping to fill it if (kv_self.head > kv_self.used + 2*n_tokens) { @@ -7956,8 +7961,6 @@ static int llama_decode_internal( //printf("kv_self.n = %5d, kv_self.used = %5d, kv_self.head = %5d\n", kv_self.n, kv_self.used, kv_self.head); - llama_kv_cache_update(&lctx); - ggml_backend_sched_reset(lctx.sched); ggml_backend_sched_set_eval_callback(lctx.sched, lctx.cparams.cb_eval, lctx.cparams.cb_eval_user_data); @@ -8007,6 +8010,18 @@ static int llama_decode_internal( } } + // decide if we need to defrag the kv cache + if (cparams.defrag_thold >= 0.0f) { + const float fragmentation = kv_self.n >= 128 ? 1.0f - float(kv_self.used + n_tokens)/float(kv_self.n) : 0.0f; + + // queue defragmentation for next llama_kv_cache_update + if (fragmentation > cparams.defrag_thold) { + //LLAMA_LOG_INFO("fragmentation: %.2f\n", fragmentation); + + llama_kv_cache_defrag(kv_self); + } + } + #ifdef GGML_PERF // print timing information per ggml operation (for debugging purposes) // requires GGML_PERF to be defined @@ -8098,12 +8113,16 @@ static int llama_decode_internal( static void llama_kv_cache_defrag_internal(struct llama_context & lctx) { auto & kv_self = lctx.kv_self; + const auto & hparams = lctx.model.hparams; + + const uint32_t n_layer = hparams.n_layer; + const uint32_t n_kv = llama_kv_cache_cell_max(kv_self); const uint32_t n_used = kv_self.used; assert(n_used <= n_kv); - const int64_t t_start = ggml_time_us(); + //const int64_t t_start = ggml_time_us(); // number of cells moved uint32_t n_moves = 0; @@ -8127,15 +8146,26 @@ static void llama_kv_cache_defrag_internal(struct llama_context & lctx) { // found a hole - fill it with data from the end of the cache - // determine the size of the hole uint32_t nh = 1; + + // determine the size of the hole while (i0 + nh < n_used && kv_self.cells[i0 + nh].is_empty()) { nh++; } - // starting from the end, find nh non-empty cells + // each move requires 6*n_layer tensors (see build_defrag) + // - source view, destination view, copy operation + // - x2 for keys and values + // + if (6*(n_moves + nh)*n_layer >= LLAMA_MAX_NODES) { + // the graph is too big, we cannot move more cells + break; + } + uint32_t nf = 0; uint32_t is = n_kv - 1; + + // starting from the end, find nh non-empty cells for (; is > i0; --is) { const auto & cell1 = kv_self.cells[is]; @@ -8156,11 +8186,17 @@ static void llama_kv_cache_defrag_internal(struct llama_context & lctx) { nf = 0; + uint32_t i1 = is; + + // are we moving a continuous block of memory? + bool cont = false; + // go back and move the nf cells to the hole - for (uint32_t i1 = is; i1 < n_kv; ++i1) { - const auto & cell1 = kv_self.cells[i1]; + for (; i1 < n_kv; ++i1) { + auto & cell1 = kv_self.cells[i1]; if (cell1.is_empty() || ids[i1] != n_kv) { + cont = false; continue; } @@ -8170,11 +8206,23 @@ static void llama_kv_cache_defrag_internal(struct llama_context & lctx) { // move the cell meta data kv_self.cells[i0 + nf] = cell1; - n_moves++; + // clear the old cell and move the head there + cell1 = llama_kv_cell(); + kv_self.head = n_used; + + if (!cont) { + n_moves++; + cont = true; + } + nf++; + + if (nf == nh) { + break; + } } - LLAMA_LOG_INFO("(tmp log) KV defrag: move [%u, %u) to [%u, %u)\n", is, n_kv, i0, i0 + nh); + //LLAMA_LOG_INFO("(tmp log) KV defrag: move [%u, %u) to [%u, %u)\n", is, i1 + 1, i0, i0 + nh); i0 += nh - 1; } @@ -8183,15 +8231,9 @@ static void llama_kv_cache_defrag_internal(struct llama_context & lctx) { return; } - LLAMA_LOG_INFO("(tmp log) KV defrag cell moves: %u\n", n_moves); + //LLAMA_LOG_INFO("(tmp log) KV defrag cell moves: %u\n", n_moves); - kv_self.head = n_used; - kv_self.used = n_used; - - // zero the rest of the cells - for (uint32_t i = n_used; i < n_kv; ++i) { - kv_self.cells[i] = llama_kv_cell(); - } + //LLAMA_LOG_INFO("expected gf nodes: %u\n", 6*n_moves*n_layer); #if 0 // CPU defrag @@ -8203,9 +8245,6 @@ static void llama_kv_cache_defrag_internal(struct llama_context & lctx) { // likely not worth the effort, as we have ggml_graph based defrag // - const auto & hparams = lctx.model.hparams; - - const uint32_t n_layer = hparams.n_layer; const uint32_t n_embd_k_gqa = hparams.n_embd_k_gqa(); const uint32_t n_embd_v_gqa = hparams.n_embd_v_gqa(); @@ -8274,9 +8313,9 @@ static void llama_kv_cache_defrag_internal(struct llama_context & lctx) { llama_graph_compute(lctx, gf, lctx.cparams.n_threads); #endif - const int64_t t_end = ggml_time_us(); + //const int64_t t_end = ggml_time_us(); - LLAMA_LOG_INFO("(tmp log) KV defrag time: %.3f ms\n", (t_end - t_start)/1000.0); + //LLAMA_LOG_INFO("(tmp log) KV defrag time: %.3f ms\n", (t_end - t_start)/1000.0); } static void llama_kv_cache_update_internal(struct llama_context & lctx) { @@ -11670,6 +11709,7 @@ struct llama_context_params llama_context_default_params() { /*.yarn_beta_fast =*/ 32.0f, /*.yarn_beta_slow =*/ 1.0f, /*.yarn_orig_ctx =*/ 0, + /*.defrag_thold =*/ -1.0f, /*.cb_eval =*/ nullptr, /*.cb_eval_user_data =*/ nullptr, /*.type_k =*/ GGML_TYPE_F16, @@ -11834,6 +11874,7 @@ struct llama_context * llama_new_context_with_model( cparams.yarn_attn_factor = params.yarn_attn_factor; cparams.yarn_beta_fast = params.yarn_beta_fast; cparams.yarn_beta_slow = params.yarn_beta_slow; + cparams.defrag_thold = params.defrag_thold; cparams.mul_mat_q = params.mul_mat_q; cparams.offload_kqv = params.offload_kqv; cparams.do_pooling = params.do_pooling; @@ -12035,7 +12076,7 @@ struct llama_context * llama_new_context_with_model( } // buffer used to store the computation graph and the tensor meta data - ctx->buf_compute_meta.resize(ggml_tensor_overhead()*LLAMA_MAX_NODES + ggml_graph_overhead()); + ctx->buf_compute_meta.resize(ggml_tensor_overhead()*LLAMA_MAX_NODES + ggml_graph_overhead_custom(LLAMA_MAX_NODES, false)); ctx->sched = ggml_backend_sched_new(ctx->backends.data(), backend_buft.data(), ctx->backends.size(), LLAMA_MAX_NODES); diff --git a/llama.h b/llama.h index 3ff77d5a8..604161808 100644 --- a/llama.h +++ b/llama.h @@ -245,6 +245,7 @@ extern "C" { float yarn_beta_fast; // YaRN low correction dim float yarn_beta_slow; // YaRN high correction dim uint32_t yarn_orig_ctx; // YaRN original context size + float defrag_thold; // defragment the KV cache if holes/size > thold, < 0 disabled (default) ggml_backend_sched_eval_callback cb_eval; void * cb_eval_user_data; From 1f30b7a9f1b86baa455072d3182b9ebeee0cd845 Mon Sep 17 00:00:00 2001 From: Engininja2 <139037756+Engininja2@users.noreply.github.com> Date: Tue, 27 Feb 2024 06:50:18 -0600 Subject: [PATCH 724/811] ggml-quants : fix avx2 iq1_s vec_dot when compiled with gcc (#5742) --- ggml-quants.c | 14 +++++++++----- 1 file changed, 9 insertions(+), 5 deletions(-) diff --git a/ggml-quants.c b/ggml-quants.c index ce654f094..73c3bb412 100644 --- a/ggml-quants.c +++ b/ggml-quants.c @@ -10248,8 +10248,12 @@ void ggml_vec_dot_iq1_s_q8_K (int n, float * GGML_RESTRICT s, size_t bs, const uint64_t aux64; - __m256i v_gindex; - const uint16_t * gindex = (const uint16_t *)&v_gindex; + typedef union m256i_uint16 { + __m256i reg; + uint16_t s[16]; + } m256i_uint16_t; + + m256i_uint16_t v_gindex; __m256 accum = _mm256_setzero_ps(); for (int i = 0; i < nb; ++i) { @@ -10264,13 +10268,13 @@ void ggml_vec_dot_iq1_s_q8_K (int n, float * GGML_RESTRICT s, size_t bs, const memcpy(&aux64, sc, 8); sc += 8; const __m128i qh = _mm_shuffle_epi8(_mm_set_epi64x(aux64 >> 4, aux64), shuffle_h); const __m256i hbit = _mm256_cvtepu8_epi16(_mm_and_si128(qh, m8)); - v_gindex = _mm256_or_si256(_mm256_cvtepu8_epi16(ql), _mm256_slli_epi16(hbit, 5)); + v_gindex.reg = _mm256_or_si256(_mm256_cvtepu8_epi16(ql), _mm256_slli_epi16(hbit, 5)); const __m128i scales = _mm_or_si128(_mm_slli_epi16(_mm_and_si128(qh, m7), 1), m1); for (int i32 = 0; i32 < 4; ++i32) { const __m256i q8b = _mm256_loadu_si256((const __m256i*)q8); q8 += 32; - const __m256i q1b = _mm256_set_epi64x(iq1s_grid[gindex[4*i32+3]], iq1s_grid[gindex[4*i32+2]], - iq1s_grid[gindex[4*i32+1]], iq1s_grid[gindex[4*i32+0]]); + const __m256i q1b = _mm256_set_epi64x(iq1s_grid[v_gindex.s[4*i32+3]], iq1s_grid[v_gindex.s[4*i32+2]], + iq1s_grid[v_gindex.s[4*i32+1]], iq1s_grid[v_gindex.s[4*i32+0]]); const __m256i dot = mul_add_epi8(q1b, q8b); const __m256i s16 = _mm256_cvtepi8_epi16(_mm_shuffle_epi8(scales, shuffle_s[i32])); const __m256i p = _mm256_madd_epi16(s16, dot); From c24a2a6e6005e5d424301525a42ba45a4a362d30 Mon Sep 17 00:00:00 2001 From: Engininja2 <139037756+Engininja2@users.noreply.github.com> Date: Tue, 27 Feb 2024 07:22:45 -0600 Subject: [PATCH 725/811] cuda : replace remaining shfl_xor with calls to warp_reduce functions (#5744) --- ggml-cuda.cu | 73 +++++++++++++++++----------------------------------- 1 file changed, 24 insertions(+), 49 deletions(-) diff --git a/ggml-cuda.cu b/ggml-cuda.cu index 964fb7351..caef65de5 100644 --- a/ggml-cuda.cu +++ b/ggml-cuda.cu @@ -696,18 +696,20 @@ static __device__ __forceinline__ float2 warp_reduce_sum(float2 a) { return a; } -//static __device__ __forceinline__ half2 warp_reduce_sum(half2 a) { -//#if !(defined(GGML_USE_HIPBLAS) && defined(__HIP_PLATFORM_AMD__)) && __CUDA_ARCH__ >= CC_PASCAL -//#pragma unroll -// for (int mask = 16; mask > 0; mask >>= 1) { -// a = __hadd2(a, __shfl_xor_sync(0xffffffff, a, mask, 32)); -// } -// return a; -//#else -// (void) a; -// NO_DEVICE_CODE; -//#endif // !(defined(GGML_USE_HIPBLAS) && defined(__HIP_PLATFORM_AMD__)) && __CUDA_ARCH__ >= CC_PASCAL -//} +#ifdef GGML_CUDA_F16 +static __device__ __forceinline__ half2 warp_reduce_sum(half2 a) { +#if !(defined(GGML_USE_HIPBLAS) && defined(__HIP_PLATFORM_AMD__)) && __CUDA_ARCH__ >= CC_PASCAL +#pragma unroll + for (int mask = 16; mask > 0; mask >>= 1) { + a = __hadd2(a, __shfl_xor_sync(0xffffffff, a, mask, 32)); + } + return a; +#else + (void) a; + NO_DEVICE_CODE; +#endif // !(defined(GGML_USE_HIPBLAS) && defined(__HIP_PLATFORM_AMD__)) && __CUDA_ARCH__ >= CC_PASCAL +} +#endif // GGML_CUDA_F16 static __device__ __forceinline__ float warp_reduce_max(float x) { #pragma unroll @@ -2521,10 +2523,7 @@ static __global__ void dequantize_mul_mat_vec_q2_k(const void * __restrict__ vx, #endif // sum up partial sums and write back result -#pragma unroll - for (int mask = 16; mask > 0; mask >>= 1) { - tmp += __shfl_xor_sync(0xffffffff, tmp, mask, 32); - } + tmp = warp_reduce_sum(tmp); if (threadIdx.x == 0) { dst[row] = tmp; @@ -2625,10 +2624,7 @@ static __global__ void dequantize_mul_mat_vec_q3_k(const void * __restrict__ vx, #endif // sum up partial sums and write back result -#pragma unroll - for (int mask = 16; mask > 0; mask >>= 1) { - tmp += __shfl_xor_sync(0xffffffff, tmp, mask, 32); - } + tmp = warp_reduce_sum(tmp); if (threadIdx.x == 0) { dst[row] = tmp; @@ -2761,10 +2757,7 @@ static __global__ void dequantize_mul_mat_vec_q4_k(const void * __restrict__ vx, #endif // sum up partial sums and write back result -#pragma unroll - for (int mask = 16; mask > 0; mask >>= 1) { - tmp += __shfl_xor_sync(0xffffffff, tmp, mask, 32); - } + tmp = warp_reduce_sum(tmp); if (tid == 0) { dst[row] = tmp; @@ -2877,10 +2870,7 @@ static __global__ void dequantize_mul_mat_vec_q5_k(const void * __restrict__ vx, #endif // sum up partial sums and write back result -#pragma unroll - for (int mask = 16; mask > 0; mask >>= 1) { - tmp += __shfl_xor_sync(0xffffffff, tmp, mask, 32); - } + tmp = warp_reduce_sum(tmp); if (threadIdx.x == 0) { dst[row] = tmp; @@ -2987,10 +2977,7 @@ static __global__ void dequantize_mul_mat_vec_q6_k(const void * __restrict__ vx, #endif // sum up partial sums and write back result -#pragma unroll - for (int mask = 16; mask > 0; mask >>= 1) { - tmp += __shfl_xor_sync(0xffffffff, tmp, mask, 32); - } + tmp = warp_reduce_sum(tmp); if (tid == 0) { dst[row] = tmp; @@ -3025,11 +3012,8 @@ static __global__ void quantize_q8_1(const float * __restrict__ x, void * __rest float amax = fabsf(xi); float sum = xi; -#pragma unroll - for (int mask = 16; mask > 0; mask >>= 1) { - amax = fmaxf(amax, __shfl_xor_sync(0xffffffff, amax, mask, 32)); - sum += __shfl_xor_sync(0xffffffff, sum, mask, 32); - } + amax = warp_reduce_max(amax); + sum = warp_reduce_sum(sum); const float d = amax / 127; const int8_t q = amax == 0.0f ? 0 : roundf(xi / d); @@ -6222,10 +6206,7 @@ static __global__ void dequantize_mul_mat_vec(const void * __restrict__ vx, cons } // sum up partial sums and write back result -#pragma unroll - for (int mask = 16; mask > 0; mask >>= 1) { - tmp += __shfl_xor_sync(0xffffffff, tmp, mask, 32); - } + tmp = warp_reduce_sum(tmp); if (tid == 0) { #ifdef GGML_CUDA_F16 @@ -6275,10 +6256,7 @@ static __global__ void mul_mat_p021_f16_f32( const int idst = channel*nrows_dst + row_dst; // sum up partial sums and write back result -#pragma unroll - for (int mask = 16; mask > 0; mask >>= 1) { - tmp += __shfl_xor_sync(0xffffffff, tmp, mask, 32); - } + tmp = warp_reduce_sum(tmp); if (threadIdx.x == 0) { dst[idst] = tmp; @@ -6321,10 +6299,7 @@ static __global__ void mul_mat_vec_nc_f16_f32( // nc == non-contiguous } // sum up partial sums and write back result -#pragma unroll - for (int mask = 16; mask > 0; mask >>= 1) { - tmp += __shfl_xor_sync(0xffffffff, tmp, mask, 32); - } + tmp = warp_reduce_sum(tmp); if (threadIdx.x == 0) { dst[idst] = tmp; From 0becb22ac05b6542bd9d5f2235691aa1d3d4d307 Mon Sep 17 00:00:00 2001 From: Kawrakow <48489457+ikawrakow@users.noreply.github.com> Date: Tue, 27 Feb 2024 16:34:24 +0200 Subject: [PATCH 726/811] IQ4_XS: a 4.25 bpw quantization (#5747) * Try IQ4_NL with blocks of 64 - does not look good * iq4_xs: go to super-blocks of 256 and 6-bit scales for blocks of 32 * iq4_xs: CUDA works - 133.2 t/s * iq4_xs: AVX2 dot product * iq4_xs: ARM_NEON dot product * iq4_nl: Metal implementation As usual, Metal / Apple Silicon don't like my quants. * iq3_xs: minor fix * iq4_xs: shrink by using IQ3_S for attn_k and attn_q * iq4_xs: revert using IQ3_S for attn_k and attn_v PPL vs size is good, but CPU performance suffers: on M2 Max TG-128 drops to 21.7 t/s from 28.8, and on a Ryzen-7950X to 14.5 t/s from 15.8 t/s. On CUDA we have 135 t/s when using IQ3_S vs 133 t/s with pure IQ4_XS. * Fix CI * iq4_xs: Added forgotten check for 256 divisibility --------- Co-authored-by: Iwan Kawrakow --- examples/quantize/quantize.cpp | 3 +- ggml-cuda.cu | 119 ++++++++++++++- ggml-metal.m | 29 +++- ggml-metal.metal | 224 +++++++++++++++++++++++++++- ggml-quants.c | 261 ++++++++++++++++++++++++++++++--- ggml-quants.h | 13 ++ ggml.c | 30 ++++ ggml.h | 2 + llama.cpp | 22 +-- llama.h | 1 + tests/test-backend-ops.cpp | 2 +- 11 files changed, 668 insertions(+), 38 deletions(-) diff --git a/examples/quantize/quantize.cpp b/examples/quantize/quantize.cpp index 2d187823f..7662ec80c 100644 --- a/examples/quantize/quantize.cpp +++ b/examples/quantize/quantize.cpp @@ -36,7 +36,8 @@ static const std::vector QUANT_OPTIONS = { { "Q3_K_S", LLAMA_FTYPE_MOSTLY_Q3_K_S, " 2.75G, +0.5551 ppl @ LLaMA-v1-7B", }, { "Q3_K_M", LLAMA_FTYPE_MOSTLY_Q3_K_M, " 3.07G, +0.2496 ppl @ LLaMA-v1-7B", }, { "Q3_K_L", LLAMA_FTYPE_MOSTLY_Q3_K_L, " 3.35G, +0.1764 ppl @ LLaMA-v1-7B", }, - { "IQ4_NL", LLAMA_FTYPE_MOSTLY_IQ4_NL, " 4.25 bpw non-linear quantization", }, + { "IQ4_NL", LLAMA_FTYPE_MOSTLY_IQ4_NL, " 4.50 bpw non-linear quantization", }, + { "IQ4_XS", LLAMA_FTYPE_MOSTLY_IQ4_XS, " 4.25 bpw non-linear quantization", }, { "Q4_K", LLAMA_FTYPE_MOSTLY_Q4_K_M, "alias for Q4_K_M", }, { "Q4_K_S", LLAMA_FTYPE_MOSTLY_Q4_K_S, " 3.59G, +0.0992 ppl @ LLaMA-v1-7B", }, { "Q4_K_M", LLAMA_FTYPE_MOSTLY_Q4_K_M, " 3.80G, +0.0532 ppl @ LLaMA-v1-7B", }, diff --git a/ggml-cuda.cu b/ggml-cuda.cu index caef65de5..dfd28df62 100644 --- a/ggml-cuda.cu +++ b/ggml-cuda.cu @@ -571,6 +571,18 @@ typedef struct { } block_iq4_nl; static_assert(sizeof(block_iq4_nl) == sizeof(ggml_fp16_t) + QK4_NL/2, "wrong iq4_nl block size/padding"); +// QR4_XS = 8 is very slightly faster than QR4_XS = 4 +#define QR4_XS 8 +#define QI4_XS (QK_K / (4*QR4_XS)) +typedef struct { + half d; + uint16_t scales_h; + uint8_t scales_l[QK_K/64]; + uint8_t qs[QK_K/2]; +} block_iq4_xs; +static_assert(sizeof(block_iq4_xs) == sizeof(ggml_fp16_t) + sizeof(uint16_t) + QK_K/64 + QK_K/2, "wrong iq4_xs block size/padding"); + + #define WARP_SIZE 32 #define MATRIX_ROW_PADDING 512 // last row of quant. matrices is a multiple of this to avoid out-of-bounds memory accesses @@ -2427,6 +2439,25 @@ static __global__ void dequantize_block_iq4_nl(const void * __restrict__ vx, dst } +template +static __global__ void dequantize_block_iq4_xs(const void * __restrict__ vx, dst_t * __restrict__ yy) { + + const int i = blockIdx.x; + const block_iq4_xs * x = (const block_iq4_xs *)vx; + + const int tid = threadIdx.x; + const int il = tid/8; // 0...3 + const int ib = tid%8; // 0...7 + dst_t * y = yy + i*QK_K + 32*ib + 4*il; + const uint8_t * q4 = x[i].qs + 16*ib + 4*il; + const float d = (float)x[i].d * ((((x[i].scales_l[ib/2] >> 4*(ib%2)) & 0xf) | (((x[i].scales_h >> 2*ib) & 3) << 4)) - 32); + for (int j = 0; j < 4; ++j) { + y[j+ 0] = d * kvalues_iq4nl[q4[j] & 0xf]; + y[j+16] = d * kvalues_iq4nl[q4[j] >> 4]; + } + +} + static __global__ void dequantize_mul_mat_vec_q2_k(const void * __restrict__ vx, const float * __restrict__ yy, float * __restrict__ dst, const int ncols, int nrows) { static_assert(16%K_QUANTS_PER_ITERATION == 0, "16 must be divisible by K_QUANTS_PER_ITERATION"); @@ -5286,6 +5317,76 @@ static __device__ __forceinline__ float vec_dot_iq4_nl_q8_1( return d * (sumi1 + sumi2); } +static __device__ __forceinline__ float vec_dot_iq4_xs_q8_1( + const void * __restrict__ vbq, const block_q8_1 * __restrict__ bq8_1, const int & iqs) { + +#if QK_K == 256 +#if __CUDA_ARCH__ >= MIN_CC_DP4A // lowest compute capability for integer intrinsics + + const block_iq4_xs * bq4 = (const block_iq4_xs *) vbq; + const uint8_t * values = (const uint8_t *)kvalues_iq4nl; + + //// iqs is 0...7 + //const int ib64 = iqs/2; + //const int il = iqs%2; + //const int32_t * q8_1 = (const int *)bq8_1[2*ib64+0].qs + 2*il; + //const int32_t * q8_2 = (const int *)bq8_1[2*ib64+1].qs + 2*il; + //const uint32_t * q4_1 = (const uint32_t *)bq4->qs + 8*ib64 + 2*il; + //const uint32_t * q4_2 = q4_1 + 4; + //const int8_t ls1 = (bq4->scales_l[ib64] & 0xf) | (((bq4->scales_h >> (4*ib64+0)) & 3) << 4); + //const int8_t ls2 = (bq4->scales_l[ib64] >> 4) | (((bq4->scales_h >> (4*ib64+2)) & 3) << 4); + //const float d1 = (float)bq4->d * (ls1 - 32) * __low2float(bq8_1[2*ib64+0].ds); + //const float d2 = (float)bq4->d * (ls2 - 32) * __low2float(bq8_1[2*ib64+1].ds); + //int v1, v2; + //int sumi1 = 0, sumi2 = 0; + //for (int j = 0; j < 2; ++j) { + // get_int_from_table_16(q4_1[j], values, v1, v2); + // sumi1 = __dp4a(v2, q8_1[j+4], __dp4a(v1, q8_1[j+0], sumi1)); + // get_int_from_table_16(q4_2[j], values, v1, v2); + // sumi2 = __dp4a(v2, q8_2[j+4], __dp4a(v1, q8_2[j+0], sumi2)); + //} + //return d1 * sumi1 + d2 * sumi2; + + // iqs is 0...7 + const int ib32 = iqs; + const int32_t * q8 = (const int *)bq8_1[ib32].qs; + const uint32_t * q4 = (const uint32_t *)bq4->qs + 4*ib32; + const int8_t ls = ((bq4->scales_l[ib32/2] >> 4*(ib32%2)) & 0xf) | (((bq4->scales_h >> 2*ib32) & 3) << 4); + const float d = (float)bq4->d * (ls - 32) * __low2float(bq8_1[ib32].ds); + int v1, v2; + int sumi1 = 0, sumi2 = 0; + for (int j = 0; j < 4; ++j) { + get_int_from_table_16(q4[j], values, v1, v2); + sumi1 = __dp4a(v1, q8[j+0], sumi1); + sumi2 = __dp4a(v2, q8[j+4], sumi2); + } + return d * (sumi1 + sumi2); + + //// iqs is 0...15 + //const int ib32 = iqs/2; + //const int il = iqs%2; + //const int32_t * q8 = (const int *)bq8_1[ib32].qs + 2*il; + //const uint32_t * q4 = (const uint32_t *)bq4->qs + 4*ib32 + 2*il; + //const int8_t ls = ((bq4->scales_l[ib32/2] >> 4*(ib32%2)) & 0xf) | (((bq4->scales_h >> 2*ib32) & 3) << 4); + //const float d = (float)bq4->d * (ls - 32) * __low2float(bq8_1[ib32].ds); + //int v1, v2; + //int sumi1 = 0, sumi2 = 0; + //for (int j = 0; j < 2; ++j) { + // get_int_from_table_16(q4[j], values, v1, v2); + // sumi1 = __dp4a(v1, q8[j+0], sumi1); + // sumi2 = __dp4a(v2, q8[j+4], sumi2); + //} + //return d * (sumi1 + sumi2); +#else + assert(false); + return 0.f; +#endif +#else + assert(false); + return 0.f; +#endif +} + template static __device__ __forceinline__ void mul_mat_q( @@ -7340,6 +7441,12 @@ static void dequantize_row_iq4_nl_cuda(const void * vx, dst_t * y, const int k, dequantize_block_iq4_nl<<>>(vx, y); } +template +static void dequantize_row_iq4_xs_cuda(const void * vx, dst_t * y, const int k, cudaStream_t stream) { + const int nb = (k + QK_K - 1) / QK_K; + dequantize_block_iq4_xs<<>>(vx, y); +} + template static void convert_unary_cuda(const void * __restrict__ vx, dst_t * __restrict__ y, const int k, cudaStream_t stream) { const int num_blocks = (k + CUDA_DEQUANTIZE_BLOCK_SIZE - 1) / CUDA_DEQUANTIZE_BLOCK_SIZE; @@ -7385,6 +7492,8 @@ static to_fp16_cuda_t ggml_get_to_fp16_cuda(ggml_type type) { return dequantize_row_iq1_s_cuda; case GGML_TYPE_IQ4_NL: return dequantize_row_iq4_nl_cuda; + case GGML_TYPE_IQ4_XS: + return dequantize_row_iq4_xs_cuda; case GGML_TYPE_IQ3_S: return dequantize_row_iq3_s_cuda; case GGML_TYPE_F32: @@ -7428,6 +7537,8 @@ static to_fp32_cuda_t ggml_get_to_fp32_cuda(ggml_type type) { return dequantize_row_iq1_s_cuda; case GGML_TYPE_IQ4_NL: return dequantize_row_iq4_nl_cuda; + case GGML_TYPE_IQ4_XS: + return dequantize_row_iq4_xs_cuda; case GGML_TYPE_IQ3_S: return dequantize_row_iq3_s_cuda; case GGML_TYPE_F16: @@ -9176,6 +9287,7 @@ static int64_t get_row_rounding(ggml_type type, const std::array= CC_RDNA2 ? 128 : 64; default: @@ -9203,6 +9315,7 @@ static int64_t get_row_rounding(ggml_type type, const std::array= CC_VOLTA ? 128 : 64; case GGML_TYPE_Q6_K: @@ -9313,6 +9426,10 @@ static void ggml_cuda_op_mul_mat_vec_q( mul_mat_vec_q_cuda (src0_dd_i, src1_ddq_i, dst_dd_i, ne00, row_diff, src1_padded_row_size, src1_ncols, nrows_dst, stream); break; + case GGML_TYPE_IQ4_XS: + mul_mat_vec_q_cuda + (src0_dd_i, src1_ddq_i, dst_dd_i, ne00, row_diff, src1_padded_row_size, src1_ncols, nrows_dst, stream); + break; case GGML_TYPE_IQ3_S: mul_mat_vec_q_cuda (src0_dd_i, src1_ddq_i, dst_dd_i, ne00, row_diff, src1_padded_row_size, src1_ncols, nrows_dst, stream); @@ -12041,7 +12158,7 @@ GGML_CALL static bool ggml_backend_cuda_supports_op(ggml_backend_t backend, cons ggml_type a_type = a->type; if (a_type == GGML_TYPE_IQ2_XXS || a_type == GGML_TYPE_IQ2_XS || a_type == GGML_TYPE_IQ3_XXS || a_type == GGML_TYPE_IQ1_S || a_type == GGML_TYPE_IQ4_NL || a_type == GGML_TYPE_IQ3_S || - a_type == GGML_TYPE_IQ2_S) { + a_type == GGML_TYPE_IQ2_S || a_type == GGML_TYPE_IQ4_XS) { if (b->ne[1] == 1 && ggml_nrows(b) > 1) { return false; } diff --git a/ggml-metal.m b/ggml-metal.m index 251d04fb0..9eba2f5d2 100644 --- a/ggml-metal.m +++ b/ggml-metal.m @@ -65,6 +65,7 @@ enum ggml_metal_kernel_type { GGML_METAL_KERNEL_TYPE_GET_ROWS_IQ2_S, GGML_METAL_KERNEL_TYPE_GET_ROWS_IQ1_S, GGML_METAL_KERNEL_TYPE_GET_ROWS_IQ4_NL, + GGML_METAL_KERNEL_TYPE_GET_ROWS_IQ4_XS, GGML_METAL_KERNEL_TYPE_GET_ROWS_I32, GGML_METAL_KERNEL_TYPE_RMS_NORM, GGML_METAL_KERNEL_TYPE_GROUP_NORM, @@ -91,6 +92,7 @@ enum ggml_metal_kernel_type { GGML_METAL_KERNEL_TYPE_MUL_MV_IQ2_S_F32, GGML_METAL_KERNEL_TYPE_MUL_MV_IQ1_S_F32, GGML_METAL_KERNEL_TYPE_MUL_MV_IQ4_NL_F32, + GGML_METAL_KERNEL_TYPE_MUL_MV_IQ4_XS_F32, GGML_METAL_KERNEL_TYPE_MUL_MV_ID_F32_F32, //GGML_METAL_KERNEL_TYPE_MUL_MV_ID_F16_F16, GGML_METAL_KERNEL_TYPE_MUL_MV_ID_F16_F32, @@ -113,6 +115,7 @@ enum ggml_metal_kernel_type { GGML_METAL_KERNEL_TYPE_MUL_MV_ID_IQ2_S_F32, GGML_METAL_KERNEL_TYPE_MUL_MV_ID_IQ1_S_F32, GGML_METAL_KERNEL_TYPE_MUL_MV_ID_IQ4_NL_F32, + GGML_METAL_KERNEL_TYPE_MUL_MV_ID_IQ4_XS_F32, GGML_METAL_KERNEL_TYPE_MUL_MM_F32_F32, GGML_METAL_KERNEL_TYPE_MUL_MM_F16_F32, GGML_METAL_KERNEL_TYPE_MUL_MM_Q4_0_F32, @@ -132,6 +135,7 @@ enum ggml_metal_kernel_type { GGML_METAL_KERNEL_TYPE_MUL_MM_IQ2_S_F32, GGML_METAL_KERNEL_TYPE_MUL_MM_IQ1_S_F32, GGML_METAL_KERNEL_TYPE_MUL_MM_IQ4_NL_F32, + GGML_METAL_KERNEL_TYPE_MUL_MM_IQ4_XS_F32, GGML_METAL_KERNEL_TYPE_MUL_MM_ID_F32_F32, GGML_METAL_KERNEL_TYPE_MUL_MM_ID_F16_F32, GGML_METAL_KERNEL_TYPE_MUL_MM_ID_Q4_0_F32, @@ -151,6 +155,7 @@ enum ggml_metal_kernel_type { GGML_METAL_KERNEL_TYPE_MUL_MM_ID_IQ2_S_F32, GGML_METAL_KERNEL_TYPE_MUL_MM_ID_IQ1_S_F32, GGML_METAL_KERNEL_TYPE_MUL_MM_ID_IQ4_NL_F32, + GGML_METAL_KERNEL_TYPE_MUL_MM_ID_IQ4_XS_F32, GGML_METAL_KERNEL_TYPE_ROPE_F32, GGML_METAL_KERNEL_TYPE_ROPE_F16, GGML_METAL_KERNEL_TYPE_ALIBI_F32, @@ -466,6 +471,7 @@ static struct ggml_metal_context * ggml_metal_init(int n_cb) { GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_GET_ROWS_IQ2_S, get_rows_iq2_s, true); GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_GET_ROWS_IQ1_S, get_rows_iq1_s, true); GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_GET_ROWS_IQ4_NL, get_rows_iq4_nl, true); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_GET_ROWS_IQ4_XS, get_rows_iq4_xs, true); GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_GET_ROWS_I32, get_rows_i32, true); GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_RMS_NORM, rms_norm, ctx->support_simdgroup_reduction); GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_GROUP_NORM, group_norm, ctx->support_simdgroup_reduction); @@ -492,6 +498,7 @@ static struct ggml_metal_context * ggml_metal_init(int n_cb) { GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MV_IQ2_S_F32, mul_mv_iq2_s_f32, ctx->support_simdgroup_reduction); GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MV_IQ1_S_F32, mul_mv_iq1_s_f32, ctx->support_simdgroup_reduction); GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MV_IQ4_NL_F32, mul_mv_iq4_nl_f32, ctx->support_simdgroup_reduction); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MV_IQ4_XS_F32, mul_mv_iq4_xs_f32, ctx->support_simdgroup_reduction); GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MV_ID_F32_F32, mul_mv_id_f32_f32, ctx->support_simdgroup_reduction); //GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MV_ID_F16_F16, mul_mv_id_f16_f16, ctx->support_simdgroup_reduction); GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MV_ID_F16_F32, mul_mv_id_f16_f32, ctx->support_simdgroup_reduction); @@ -514,6 +521,7 @@ static struct ggml_metal_context * ggml_metal_init(int n_cb) { GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MV_ID_IQ2_S_F32, mul_mv_id_iq2_s_f32, ctx->support_simdgroup_reduction); GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MV_ID_IQ1_S_F32, mul_mv_id_iq1_s_f32, ctx->support_simdgroup_reduction); GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MV_ID_IQ4_NL_F32, mul_mv_id_iq4_nl_f32, ctx->support_simdgroup_reduction); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MV_ID_IQ4_XS_F32, mul_mv_id_iq4_xs_f32, ctx->support_simdgroup_reduction); GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MM_F32_F32, mul_mm_f32_f32, ctx->support_simdgroup_mm); GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MM_F16_F32, mul_mm_f16_f32, ctx->support_simdgroup_mm); GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MM_Q4_0_F32, mul_mm_q4_0_f32, ctx->support_simdgroup_mm); @@ -533,6 +541,7 @@ static struct ggml_metal_context * ggml_metal_init(int n_cb) { GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MM_IQ2_S_F32, mul_mm_iq2_s_f32, ctx->support_simdgroup_mm); GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MM_IQ1_S_F32, mul_mm_iq1_s_f32, ctx->support_simdgroup_mm); GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MM_IQ4_NL_F32, mul_mm_iq4_nl_f32, ctx->support_simdgroup_mm); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MM_IQ4_XS_F32, mul_mm_iq4_xs_f32, ctx->support_simdgroup_mm); GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MM_ID_F32_F32, mul_mm_id_f32_f32, ctx->support_simdgroup_mm); GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MM_ID_F16_F32, mul_mm_id_f16_f32, ctx->support_simdgroup_mm); GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MM_ID_Q4_0_F32, mul_mm_id_q4_0_f32, ctx->support_simdgroup_mm); @@ -552,6 +561,7 @@ static struct ggml_metal_context * ggml_metal_init(int n_cb) { GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MM_ID_IQ2_S_F32, mul_mm_id_iq2_s_f32, ctx->support_simdgroup_mm); GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MM_ID_IQ1_S_F32, mul_mm_id_iq1_s_f32, ctx->support_simdgroup_mm); GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MM_ID_IQ4_NL_F32, mul_mm_id_iq4_nl_f32, ctx->support_simdgroup_mm); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MM_ID_IQ4_XS_F32, mul_mm_id_iq4_xs_f32, ctx->support_simdgroup_mm); GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_ROPE_F32, rope_f32, true); GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_ROPE_F16, rope_f16, true); GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_ALIBI_F32, alibi_f32, true); @@ -1371,6 +1381,7 @@ static bool ggml_metal_graph_compute( case GGML_TYPE_IQ2_S: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MM_IQ2_S_F32 ].pipeline; break; case GGML_TYPE_IQ1_S: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MM_IQ1_S_F32 ].pipeline; break; case GGML_TYPE_IQ4_NL: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MM_IQ4_NL_F32 ].pipeline; break; + case GGML_TYPE_IQ4_XS: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MM_IQ4_XS_F32 ].pipeline; break; default: GGML_ASSERT(false && "MUL MAT-MAT not implemented"); } @@ -1529,6 +1540,12 @@ static bool ggml_metal_graph_compute( nth1 = 16; pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MV_IQ4_NL_F32].pipeline; } break; + case GGML_TYPE_IQ4_XS: + { + nth0 = 4; + nth1 = 16; + pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MV_IQ4_XS_F32].pipeline; + } break; default: { GGML_METAL_LOG_ERROR("Asserting on type %d\n", (int)src0t); @@ -1576,7 +1593,7 @@ static bool ggml_metal_graph_compute( [encoder setThreadgroupMemoryLength:mem_size atIndex:0]; [encoder dispatchThreadgroups:MTLSizeMake((ne01 + 7)/8, ne11, ne12*ne13) threadsPerThreadgroup:MTLSizeMake(nth0, nth1, 1)]; } - else if (src0t == GGML_TYPE_IQ4_NL) { + else if (src0t == GGML_TYPE_IQ4_NL || src0t == GGML_TYPE_IQ4_XS) { const int mem_size = 32*sizeof(float); [encoder setThreadgroupMemoryLength:mem_size atIndex:0]; [encoder dispatchThreadgroups:MTLSizeMake((ne01 + 3)/4, ne11, ne12*ne13) threadsPerThreadgroup:MTLSizeMake(nth0, nth1, 1)]; @@ -1678,6 +1695,7 @@ static bool ggml_metal_graph_compute( case GGML_TYPE_IQ2_S: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MM_ID_IQ2_S_F32 ].pipeline; break; case GGML_TYPE_IQ1_S: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MM_ID_IQ1_S_F32 ].pipeline; break; case GGML_TYPE_IQ4_NL: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MM_ID_IQ4_NL_F32 ].pipeline; break; + case GGML_TYPE_IQ4_XS: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MM_ID_IQ4_XS_F32 ].pipeline; break; default: GGML_ASSERT(false && "MUL_MAT_ID not implemented"); } @@ -1839,6 +1857,12 @@ static bool ggml_metal_graph_compute( nth1 = 16; pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MV_ID_IQ4_NL_F32].pipeline; } break; + case GGML_TYPE_IQ4_XS: + { + nth0 = 4; + nth1 = 16; + pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MV_ID_IQ4_XS_F32].pipeline; + } break; default: { GGML_METAL_LOG_ERROR("Asserting on type %d\n", (int)src2t); @@ -1902,7 +1926,7 @@ static bool ggml_metal_graph_compute( [encoder setThreadgroupMemoryLength:mem_size atIndex:0]; [encoder dispatchThreadgroups:MTLSizeMake((ne21 + 7)/8, _ne1, ne01*ne12*ne13) threadsPerThreadgroup:MTLSizeMake(nth0, nth1, 1)]; } - else if (src2t == GGML_TYPE_IQ4_NL) { + else if (src2t == GGML_TYPE_IQ4_NL || src2t == GGML_TYPE_IQ4_XS) { const int mem_size = 32*sizeof(float); [encoder setThreadgroupMemoryLength:mem_size atIndex:0]; [encoder dispatchThreadgroups:MTLSizeMake((ne21 + 3)/4, _ne1, ne01*ne12*ne13) threadsPerThreadgroup:MTLSizeMake(nth0, nth1, 1)]; @@ -1952,6 +1976,7 @@ static bool ggml_metal_graph_compute( case GGML_TYPE_IQ2_S: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_GET_ROWS_IQ2_S ].pipeline; break; case GGML_TYPE_IQ1_S: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_GET_ROWS_IQ1_S ].pipeline; break; case GGML_TYPE_IQ4_NL: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_GET_ROWS_IQ4_NL ].pipeline; break; + case GGML_TYPE_IQ4_XS: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_GET_ROWS_IQ4_XS ].pipeline; break; case GGML_TYPE_I32: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_GET_ROWS_I32 ].pipeline; break; default: GGML_ASSERT(false && "not implemented"); } diff --git a/ggml-metal.metal b/ggml-metal.metal index 47354e952..689411903 100644 --- a/ggml-metal.metal +++ b/ggml-metal.metal @@ -2560,6 +2560,13 @@ typedef struct { uint8_t qs[QK4_NL/2]; } block_iq4_nl; +typedef struct { + half d; + uint16_t scales_h; + uint8_t scales_l[QK_K/64]; + uint8_t qs[QK_K/2]; +} block_iq4_xs; + //====================================== dot products ========================= void kernel_mul_mv_q2_K_f32_impl( @@ -5160,6 +5167,100 @@ void kernel_mul_mv_iq4_nl_f32_impl( } } +void kernel_mul_mv_iq4_xs_f32_impl( + device const void * src0, + device const float * src1, + device float * dst, + constant int64_t & ne00, + constant int64_t & ne01, + constant int64_t & ne02, + constant int64_t & ne10, + constant int64_t & ne12, + constant int64_t & ne0, + constant int64_t & ne1, + constant uint & r2, + constant uint & r3, + threadgroup float * shared_values [[threadgroup(0)]], + uint3 tgpig[[threadgroup_position_in_grid]], + uint tiisg[[thread_index_in_simdgroup]], + uint sgitg[[simdgroup_index_in_threadgroup]]) { + + const int nb = ne00/QK_K; + const int r0 = tgpig.x; + const int r1 = tgpig.y; + const int im = tgpig.z; + const int first_row = (r0 * 2 + sgitg) * 2; + const int ib_row = first_row * nb; + + const uint i12 = im%ne12; + const uint i13 = im/ne12; + + const uint offset0 = (i12/r2)*(nb*ne01) + (i13/r3)*(nb*ne01*ne02); + device const block_iq4_xs * x = (device const block_iq4_xs *) src0 + ib_row + offset0; + device const float * y = (device const float *) src1 + r1*ne10 + im*ne00*ne1; + + const int ix = tiisg/16; // 0 or 1 + const int it = tiisg%16; // 0...15 + const int ib = it/2; + const int il = it%2; + + shared_values[tiisg] = kvalues_iq4nl_f[tiisg%16]; + threadgroup_barrier(mem_flags::mem_threadgroup); + + float4 yl[4]; + float sumf[2]={0.f}, all_sum; + + device const float * yb = y + ix * QK_K + ib * 32 + il * 8; + + uint32_t aux32[2]; + thread const uint8_t * q8 = (thread const uint8_t *)aux32; + + float4 qf1, qf2; + + for (int ibl = ix; ibl < nb; ibl += 2) { + + device const float4 * y4 = (device const float4 *)yb; + yl[0] = y4[0]; yl[1] = y4[4]; yl[2] = y4[1]; yl[3] = y4[5]; + + for (int row = 0; row < 2; ++row) { + + device const block_iq4_xs & xb = x[row*nb + ibl]; + device const uint32_t * q4 = (device const uint32_t *)(xb.qs + 16*ib + 8*il); + + float4 acc1 = {0.f}, acc2 = {0.f}; + + aux32[0] = q4[0] & 0x0f0f0f0f; + aux32[1] = (q4[0] >> 4) & 0x0f0f0f0f; + qf1 = {shared_values[q8[0]], shared_values[q8[1]], shared_values[q8[2]], shared_values[q8[3]]}; + qf2 = {shared_values[q8[4]], shared_values[q8[5]], shared_values[q8[6]], shared_values[q8[7]]}; + acc1 += yl[0] * qf1; + acc2 += yl[1] * qf2; + + aux32[0] = q4[1] & 0x0f0f0f0f; + aux32[1] = (q4[1] >> 4) & 0x0f0f0f0f; + qf1 = {shared_values[q8[0]], shared_values[q8[1]], shared_values[q8[2]], shared_values[q8[3]]}; + qf2 = {shared_values[q8[4]], shared_values[q8[5]], shared_values[q8[6]], shared_values[q8[7]]}; + acc1 += yl[2] * qf1; + acc2 += yl[3] * qf2; + + acc1 += acc2; + + const int ls = (((xb.scales_l[ib/2] >> 4*(ib%2)) & 0xf) | (((xb.scales_h >> 2*ib) & 3) << 4)) - 32; + sumf[row] += (float)xb.d * ls * (acc1[0] + acc1[1] + acc1[2] + acc1[3]); + + } + + yb += 2 * QK_K; + } + + for (int row = 0; row < 2; ++row) { + all_sum = simd_sum(sumf[row]); + if (tiisg == 0) { + dst[r1*ne0 + im*ne0*ne1 + first_row + row] = all_sum; + } + } +} + [[host_name("kernel_mul_mv_iq1_s_f32")]] kernel void kernel_mul_mv_iq1_s_f32( device const void * src0, @@ -5217,6 +5318,35 @@ kernel void kernel_mul_mv_iq4_nl_f32( kernel_mul_mv_iq4_nl_f32_impl(src0, src1, dst, ne00, ne01, ne02, ne10, ne12, ne0, ne1, r2, r3, shared_values, tgpig, tiisg, sgitg); } +[[host_name("kernel_mul_mv_iq4_xs_f32")]] +kernel void kernel_mul_mv_iq4_xs_f32( + device const void * src0, + device const float * src1, + device float * dst, + constant int64_t & ne00, + constant int64_t & ne01, + constant int64_t & ne02, + constant uint64_t & nb00, + constant uint64_t & nb01, + constant uint64_t & nb02, + constant int64_t & ne10, + constant int64_t & ne11, + constant int64_t & ne12, + constant uint64_t & nb10, + constant uint64_t & nb11, + constant uint64_t & nb12, + constant int64_t & ne0, + constant int64_t & ne1, + constant uint & r2, + constant uint & r3, + threadgroup float * shared_values [[threadgroup(0)]], + uint3 tgpig[[threadgroup_position_in_grid]], + uint tiisg[[thread_index_in_simdgroup]], + uint sgitg[[simdgroup_index_in_threadgroup]]) { + + kernel_mul_mv_iq4_xs_f32_impl(src0, src1, dst, ne00, ne01, ne02, ne10, ne12, ne0, ne1, r2, r3, shared_values, tgpig, tiisg, sgitg); +} + //============================= templates and their specializations ============================= // NOTE: this is not dequantizing - we are simply fitting the template @@ -5638,6 +5768,26 @@ void dequantize_iq4_nl(device const block_iq4_nl * xb, short il, thread type4x4 } } +template +void dequantize_iq4_xs(device const block_iq4_xs * xb, short il, thread type4x4 & reg) { + // il is 0...15 for QK_K = 256 => index of block of 32 is il/2 + const int ib32 = il/2; + il = il%2; + // il = 0 or 1. il = 0 processes the first 16 quants in a block of 32, il = 1 the second 16 + device const uint32_t * q4 = (device const uint32_t *)xb->qs + 4*ib32; + const int ls = ((xb->scales_l[ib32/2] >> 4*(ib32%2)) & 0xf) | (((xb->scales_h >> 2*ib32) & 3) << 4); + const float d = (float)xb->d * (ls - 32); + uint32_t aux32; + thread const uint8_t * q8 = (thread const uint8_t *)&aux32; + for (int i = 0; i < 4; ++i) { + aux32 = (q4[i] >> 4*il) & 0x0f0f0f0f; + reg[i][0] = d * kvalues_iq4nl_f[q8[0]]; + reg[i][1] = d * kvalues_iq4nl_f[q8[1]]; + reg[i][2] = d * kvalues_iq4nl_f[q8[2]]; + reg[i][3] = d * kvalues_iq4nl_f[q8[3]]; + } +} + template kernel void kernel_get_rows( device const void * src0, @@ -6183,7 +6333,8 @@ template [[host_name("kernel_get_rows_iq3_xxs")]] kernel get_rows_t kernel_get_r template [[host_name("kernel_get_rows_iq3_s")]] kernel get_rows_t kernel_get_rows; template [[host_name("kernel_get_rows_iq2_s")]] kernel get_rows_t kernel_get_rows; template [[host_name("kernel_get_rows_iq1_s")]] kernel get_rows_t kernel_get_rows; -template [[host_name("kernel_get_rows_iq4_nl")]] kernel get_rows_t kernel_get_rows; +template [[host_name("kernel_get_rows_iq4_nl")]] kernel get_rows_t kernel_get_rows; +template [[host_name("kernel_get_rows_iq4_xs")]] kernel get_rows_t kernel_get_rows; // // matrix-matrix multiplication @@ -6226,7 +6377,8 @@ template [[host_name("kernel_mul_mm_iq3_xxs_f32")]] kernel mat_mm_t kernel_mul_m template [[host_name("kernel_mul_mm_iq3_s_f32")]] kernel mat_mm_t kernel_mul_mm; template [[host_name("kernel_mul_mm_iq2_s_f32")]] kernel mat_mm_t kernel_mul_mm; template [[host_name("kernel_mul_mm_iq1_s_f32")]] kernel mat_mm_t kernel_mul_mm; -template [[host_name("kernel_mul_mm_iq4_nl_f32")]] kernel mat_mm_t kernel_mul_mm; +template [[host_name("kernel_mul_mm_iq4_nl_f32")]] kernel mat_mm_t kernel_mul_mm; +template [[host_name("kernel_mul_mm_iq4_xs_f32")]] kernel mat_mm_t kernel_mul_mm; // // indirect matrix-matrix multiplication @@ -6281,7 +6433,8 @@ template [[host_name("kernel_mul_mm_id_iq3_xxs_f32")]] kernel mat_mm_id_t kernel template [[host_name("kernel_mul_mm_id_iq3_s_f32")]] kernel mat_mm_id_t kernel_mul_mm_id; template [[host_name("kernel_mul_mm_id_iq2_s_f32")]] kernel mat_mm_id_t kernel_mul_mm_id; template [[host_name("kernel_mul_mm_id_iq1_s_f32")]] kernel mat_mm_id_t kernel_mul_mm_id; -template [[host_name("kernel_mul_mm_id_iq4_nl_f32")]] kernel mat_mm_id_t kernel_mul_mm_id; +template [[host_name("kernel_mul_mm_id_iq4_nl_f32")]] kernel mat_mm_id_t kernel_mul_mm_id; +template [[host_name("kernel_mul_mm_id_iq4_xs_f32")]] kernel mat_mm_id_t kernel_mul_mm_id; // // matrix-vector multiplication @@ -7507,3 +7660,68 @@ kernel void kernel_mul_mv_id_iq4_nl_f32( tiisg, sgitg); } + +[[host_name("kernel_mul_mv_id_iq4_xs_f32")]] +kernel void kernel_mul_mv_id_iq4_xs_f32( + device const char * ids, + device const char * src1, + device float * dst, + constant uint64_t & nbi1, + constant int64_t & ne00, + constant int64_t & ne01, + constant int64_t & ne02, + constant uint64_t & nb00, + constant uint64_t & nb01, + constant uint64_t & nb02, + constant int64_t & ne10, + constant int64_t & ne11, + constant int64_t & ne12, + constant int64_t & ne13, + constant uint64_t & nb10, + constant uint64_t & nb11, + constant uint64_t & nb12, + constant int64_t & ne0, + constant int64_t & ne1, + constant uint64_t & nb1, + constant uint & r2, + constant uint & r3, + constant int & idx, + device const char * src00, + device const char * src01, + device const char * src02, + device const char * src03, + device const char * src04, + device const char * src05, + device const char * src06, + device const char * src07, + threadgroup float * shared_values [[threadgroup(0)]], + uint3 tgpig[[threadgroup_position_in_grid]], + uint tiitg[[thread_index_in_threadgroup]], + uint tiisg[[thread_index_in_simdgroup]], + uint sgitg[[simdgroup_index_in_threadgroup]]) { + device const char * src0[8] = {src00, src01, src02, src03, src04, src05, src06, src07}; + + const int64_t bid = tgpig.z/(ne12*ne13); + + tgpig.z = tgpig.z%(ne12*ne13); + + const int32_t id = ((device int32_t *) (ids + bid*nbi1))[idx]; + + kernel_mul_mv_iq4_xs_f32_impl( + src0[id], + (device const float *) (src1 + bid*nb11), + dst + bid*ne0, + ne00, + ne01, + ne02, + ne10, + ne12, + ne0, + ne1, + r2, + r3, + shared_values, + tgpig, + tiisg, + sgitg); +} diff --git a/ggml-quants.c b/ggml-quants.c index 73c3bb412..607d50925 100644 --- a/ggml-quants.c +++ b/ggml-quants.c @@ -4225,6 +4225,29 @@ void dequantize_row_iq4_nl(const block_iq4_nl * restrict x, float * restrict y, } } +void dequantize_row_iq4_xs(const block_iq4_xs * restrict x, float * restrict y, int k) { + assert(k % QK_K == 0); + const int nb = k / QK_K; + + for (int i = 0; i < nb; i++) { + + const uint8_t * qs = x[i].qs; + + const float d = GGML_FP16_TO_FP32(x[i].d); + + for (int ib = 0; ib < QK_K/32; ++ib) { + const int ls = ((x[i].scales_l[ib/2] >> 4*(ib%2)) & 0xf) | (((x[i].scales_h >> 2*ib) & 3) << 4); + const float dl = d * (ls - 32); + for (int j = 0; j < 16; ++j) { + y[j+ 0] = dl * kvalues_iq4nl[qs[j] & 0xf]; + y[j+16] = dl * kvalues_iq4nl[qs[j] >> 4]; + } + y += 32; + qs += 16; + } + } +} + //===================================== Q8_K ============================================== void quantize_row_q8_K_reference(const float * restrict x, block_q8_K * restrict y, int k) { @@ -9675,8 +9698,8 @@ void ggml_vec_dot_iq2_s_q8_K(int n, float * restrict s, size_t bs, const void * qs += 8; vs.val[0] = vreinterpretq_u8_u32(vdupq_n_u32(signs[0] | (signs[1] << 16))); - vs.val[1] = vandq_u8(vqtbl1q_u8(vs.val[0], mask1.val[1]), mask2); - vs.val[0] = vandq_u8(vqtbl1q_u8(vs.val[0], mask1.val[0]), mask2); + vs.val[1] = vandq_u8(ggml_vqtbl1q_u8(vs.val[0], mask1.val[1]), mask2); + vs.val[0] = vandq_u8(ggml_vqtbl1q_u8(vs.val[0], mask1.val[0]), mask2); vs.val[0] = vceqq_u8(vs.val[0], mask2); vs.val[1] = vceqq_u8(vs.val[1], mask2); @@ -9684,8 +9707,8 @@ void ggml_vec_dot_iq2_s_q8_K(int n, float * restrict s, size_t bs, const void * q2s.val[1] = vmulq_s8(vreinterpretq_s8_u8(vorrq_u8(vs.val[1], m1)), q2s.val[1]); vs.val[0] = vreinterpretq_u8_u32(vdupq_n_u32(signs[2] | (signs[3] << 16))); - vs.val[1] = vandq_u8(vqtbl1q_u8(vs.val[0], mask1.val[1]), mask2); - vs.val[0] = vandq_u8(vqtbl1q_u8(vs.val[0], mask1.val[0]), mask2); + vs.val[1] = vandq_u8(ggml_vqtbl1q_u8(vs.val[0], mask1.val[1]), mask2); + vs.val[0] = vandq_u8(ggml_vqtbl1q_u8(vs.val[0], mask1.val[0]), mask2); vs.val[0] = vceqq_u8(vs.val[0], mask2); vs.val[1] = vceqq_u8(vs.val[1], mask2); @@ -10425,6 +10448,134 @@ void ggml_vec_dot_iq4_nl_q8_0(int n, float * restrict s, size_t bs, const void * #endif } +void ggml_vec_dot_iq4_xs_q8_K(int n, float * restrict s, size_t bs, const void * restrict vx, size_t bx, const void * restrict vy, size_t by, int nrc) { + assert(nrc == 1); + UNUSED(nrc); + UNUSED(bx); + UNUSED(by); + UNUSED(bs); + assert(n % QK_K == 0); + + const block_iq4_xs * restrict x = vx; + const block_q8_K * restrict y = vy; + + const int nb = n / QK_K; + +#if defined __ARM_NEON + const int8x16_t values = vld1q_s8(kvalues_iq4nl); + const uint8x16_t m4b = vdupq_n_u8(0x0f); + uint8x16x2_t q4bits; + int8x16x4_t q4b; + int8x16x4_t q8b; + int32x4_t prod_1, prod_2; + + float sumf = 0; + + for (int ibl = 0; ibl < nb; ++ibl) { + + const int8_t * q8 = y[ibl].qs; + const uint8_t * q4 = x[ibl].qs; + uint16_t h = x[ibl].scales_h; + + int sumi1 = 0, sumi2 = 0; + for (int ib = 0; ib < QK_K/64; ++ib) { + + q4bits = ggml_vld1q_u8_x2(q4); q4 += 32; + q8b = ggml_vld1q_s8_x4(q8); q8 += 64; + + q4b.val[0] = ggml_vqtbl1q_s8(values, vandq_u8 (q4bits.val[0], m4b)); + q4b.val[1] = ggml_vqtbl1q_s8(values, vshrq_n_u8(q4bits.val[0], 4)); + q4b.val[2] = ggml_vqtbl1q_s8(values, vandq_u8 (q4bits.val[1], m4b)); + q4b.val[3] = ggml_vqtbl1q_s8(values, vshrq_n_u8(q4bits.val[1], 4)); + + prod_1 = ggml_vdotq_s32(ggml_vdotq_s32(vdupq_n_s32(0), q4b.val[0], q8b.val[0]), q4b.val[1], q8b.val[1]); + prod_2 = ggml_vdotq_s32(ggml_vdotq_s32(vdupq_n_s32(0), q4b.val[2], q8b.val[2]), q4b.val[3], q8b.val[3]); + + int ls1 = ((x[ibl].scales_l[ib] & 0xf) | ((h << 4) & 0x30)) - 32; + int ls2 = ((x[ibl].scales_l[ib] >> 4) | ((h << 2) & 0x30)) - 32; + h >>= 4; + sumi1 += vaddvq_s32(prod_1) * ls1; + sumi2 += vaddvq_s32(prod_2) * ls2; + + } + + sumf += GGML_FP16_TO_FP32(x[ibl].d) * y[ibl].d * (sumi1 + sumi2); + } + + *s = sumf; + +#elif defined __AVX2__ + + const __m128i values128 = _mm_loadu_si128((const __m128i*)kvalues_iq4nl); + const __m128i m4b = _mm_set1_epi8(0x0f); + + __m256 accum = _mm256_setzero_ps(); + for (int ibl = 0; ibl < nb; ++ibl) { + const uint8_t * qs = x[ibl].qs; + const int8_t * q8 = y[ibl].qs; + uint16_t sh = x[ibl].scales_h; + __m256i sumi1 = _mm256_setzero_si256(); + __m256i sumi2 = _mm256_setzero_si256(); + for (int ib = 0; ib < QK_K/32; ib += 2) { + const __m128i q4bits_1 = _mm_loadu_si128((const __m128i*)qs); qs += 16; + const __m128i q4bits_2 = _mm_loadu_si128((const __m128i*)qs); qs += 16; + const __m256i q8b_1 = _mm256_loadu_si256((const __m256i *)q8); q8 += 32; + const __m256i q8b_2 = _mm256_loadu_si256((const __m256i *)q8); q8 += 32; + const __m256i q4b_1 = _mm256_set_m128i(_mm_shuffle_epi8(values128, _mm_and_si128(_mm_srli_epi16(q4bits_1, 4), m4b)), + _mm_shuffle_epi8(values128, _mm_and_si128(q4bits_1, m4b))); + const __m256i q4b_2 = _mm256_set_m128i(_mm_shuffle_epi8(values128, _mm_and_si128(_mm_srli_epi16(q4bits_2, 4), m4b)), + _mm_shuffle_epi8(values128, _mm_and_si128(q4bits_2, m4b))); + const __m256i p16_1 = mul_add_epi8(q4b_1, q8b_1); + const __m256i p16_2 = mul_add_epi8(q4b_2, q8b_2); + const int16_t ls1 = ((x[ibl].scales_l[ib/2] & 0xf) | ((sh << 4) & 0x30)) - 32; + const int16_t ls2 = ((x[ibl].scales_l[ib/2] >> 4) | ((sh << 2) & 0x30)) - 32; + sh >>= 4; + const __m256i p_1 = _mm256_madd_epi16(p16_1, _mm256_set1_epi16(ls1)); + const __m256i p_2 = _mm256_madd_epi16(p16_2, _mm256_set1_epi16(ls2)); + sumi1 = _mm256_add_epi32(p_1, sumi1); + sumi2 = _mm256_add_epi32(p_2, sumi2); + } + accum = _mm256_fmadd_ps(_mm256_set1_ps(GGML_FP16_TO_FP32(x[ibl].d)*y[ibl].d), + _mm256_cvtepi32_ps(_mm256_add_epi32(sumi1, sumi2)), accum); + } + + *s = hsum_float_8(accum); + +#else + float sumf = 0; + for (int ibl = 0; ibl < nb; ++ibl) { + const float d4d8 = GGML_FP16_TO_FP32(x[ibl].d) * y[ibl].d; + uint16_t h = x[ibl].scales_h; + const uint8_t * qs = x[ibl].qs; + const int8_t * q8 = y[ibl].qs; + for (int ib = 0; ib < QK_K/32; ib += 2) { + const uint8_t ls1 = (x[ibl].scales_l[ib/2] & 0xf) | ((h << 4) & 0x30); + const uint8_t ls2 = (x[ibl].scales_l[ib/2] >> 4) | ((h << 2) & 0x30); + h >>= 4; + const float d1 = d4d8*(ls1 - 32); + const float d2 = d4d8*(ls2 - 32); + int sumi1 = 0, sumi2 = 0; + for (int j = 0; j < 16; ++j) { + sumi1 += q8[j+ 0] * kvalues_iq4nl[qs[j] & 0xf]; + sumi2 += q8[j+16] * kvalues_iq4nl[qs[j] >> 4]; + } + sumf += d1 * (sumi1 + sumi2); + qs += 16; + q8 += 32; + sumi1 = sumi2 = 0; + for (int j = 0; j < 16; ++j) { + sumi1 += q8[j+ 0] * kvalues_iq4nl[qs[j] & 0xf]; + sumi2 += q8[j+16] * kvalues_iq4nl[qs[j] >> 4]; + } + sumf += d2 * (sumi1 + sumi2); + qs += 16; + q8 += 32; + } + } + *s = sumf; +#endif +} + // ================================ IQ2 quantization ============================================= typedef struct { @@ -12021,23 +12172,23 @@ static inline int best_index_int8(int n, const int8_t * val, float x) { return x - val[mu-1] < val[mu] - x ? mu-1 : mu; } -static void quantize_row_iq4_nl_impl(const int block_size, const float * GGML_RESTRICT x, - ggml_fp16_t * dh, uint8_t * q4, - float * weight, uint8_t * L, +static void quantize_row_iq4_nl_impl(const int super_block_size, const int block_size, const float * GGML_RESTRICT x, + ggml_fp16_t * dh, uint8_t * q4, uint16_t * scales_h, uint8_t * scales_l, + float * scales, float * weight, uint8_t * L, const int8_t * values, const float * quant_weights) { const int ntry = 7; float sigma2 = 0; - for (int j = 0; j < QK4_NL; ++j) sigma2 += x[j]*x[j]; - sigma2 *= 2.f/QK4_NL; + for (int j = 0; j < super_block_size; ++j) sigma2 += x[j]*x[j]; + sigma2 *= 2.f/super_block_size; - const int nb = QK4_NL/block_size; + memset(q4, 0, super_block_size/2); + dh[0] = GGML_FP32_TO_FP16(0.f); - memset(q4, 0, QK4_NL/2); - for (int ib = 0; ib < nb; ++ib) { - dh[ib] = GGML_FP32_TO_FP16(0.f); + float max_scale = 0, amax_scale = 0; + for (int ib = 0; ib < super_block_size/block_size; ++ib) { const float * xb = x + ib*block_size; if (quant_weights) { const float * qw = quant_weights + ib*block_size; @@ -12053,6 +12204,7 @@ static void quantize_row_iq4_nl_impl(const int block_size, const float * GGML_RE } } if (!amax) { + scales[ib] = 0; continue; } float d = -max/values[0]; @@ -12066,7 +12218,6 @@ static void quantize_row_iq4_nl_impl(const int block_size, const float * GGML_RE sumqx += w*q*xb[j]; sumq2 += w*q*q; } - float best_id = id; d = sumqx/sumq2; float best = d*sumqx; for (int itry = -ntry; itry <= ntry; ++itry) { @@ -12082,15 +12233,47 @@ static void quantize_row_iq4_nl_impl(const int block_size, const float * GGML_RE } if (sumq2 > 0 && sumqx*sumqx > best*sumq2) { d = sumqx/sumq2; best = d * sumqx; - best_id = id; } } - dh[ib] = GGML_FP32_TO_FP16(d); - for (int j = 0; j < block_size; ++j) { - L[ib*block_size + j] = best_index_int8(16, values, best_id*xb[j]); + scales[ib] = d; + float abs_d = fabsf(d); + if (abs_d > amax_scale) { + amax_scale = abs_d; max_scale = d; } } - for (int i = 0; i < QK4_NL/32; ++i) { + + if (super_block_size/block_size > 1) { + int nb = super_block_size/block_size; + memset(scales_h, 0, ((nb+7)/8)*sizeof(uint16_t)); + float d = -max_scale/32; + dh[0] = GGML_FP32_TO_FP16(d); + float id = d ? 1/d : 0.f; + for (int ib = 0; ib < super_block_size/block_size; ++ib) { + int l = nearest_int(id*scales[ib]); + l = MAX(-32, MIN(31, l)); + float dl = d * l; + float idl = dl ? 1/dl : 0.f; + uint8_t * Lb = L + ib*block_size; + const float * xb = x + ib*block_size; + for (int j = 0; j < block_size; ++j) { + Lb[j] = best_index_int8(16, values, idl*xb[j]); + } + l += 32; + uint8_t l_l = l & 0xf; + uint8_t l_h = l >> 4; + if (ib%2 == 0) scales_l[ib/2] = l_l; + else scales_l[ib/2] |= (l_l << 4); + scales_h[ib/8] |= (l_h << 2*(ib%8)); + } + } else { + dh[0] = GGML_FP32_TO_FP16(scales[0]); + float id = scales[0] ? 1/scales[0] : 0; + for (int j = 0; j < super_block_size; ++j) { + L[j] = best_index_int8(16, values, id*x[j]); + } + } + + for (int i = 0; i < super_block_size/32; ++i) { for (int j = 0; j < 16; ++j) { q4[16*i + j] = L[32*i + j] | (L[32*i + 16 + j] << 4); } @@ -12103,12 +12286,16 @@ size_t quantize_iq4_nl(const float * src, void * dst, int nrow, int n_per_row, i int nblock = n_per_row/QK4_NL; char * qrow = (char *)dst; uint8_t L[QK4_NL]; - float weight[32]; + float weight[QK4_NL]; + uint16_t unused_h; + uint8_t * unused_l = NULL; + float scale; for (int row = 0; row < nrow; ++row) { block_iq4_nl * iq4 = (block_iq4_nl *)qrow; for (int ibl = 0; ibl < nblock; ++ibl) { const float * qw = quant_weights ? quant_weights + QK4_NL*ibl : NULL; - quantize_row_iq4_nl_impl(32, src + QK4_NL*ibl, &iq4[ibl].d, iq4[ibl].qs, weight, L, kvalues_iq4nl, qw); + quantize_row_iq4_nl_impl(QK4_NL, 32, src + QK4_NL*ibl, &iq4[ibl].d, iq4[ibl].qs, &unused_h, unused_l, + &scale, weight, L, kvalues_iq4nl, qw); } src += n_per_row; qrow += nblock*sizeof(block_iq4_nl); @@ -12127,6 +12314,38 @@ void quantize_row_iq4_nl_reference(const float * restrict x, block_iq4_nl * rest quantize_iq4_nl(x, y, 1, k, NULL, NULL); } +size_t quantize_iq4_xs(const float * src, void * dst, int nrow, int n_per_row, int64_t * hist, const float * quant_weights) { + (void)hist; + GGML_ASSERT(n_per_row%QK_K == 0); + int nblock = n_per_row/QK_K; + char * qrow = (char *)dst; + uint8_t L[QK_K]; + float weight[32]; + float scales[QK_K/32]; + for (int row = 0; row < nrow; ++row) { + block_iq4_xs * iq4 = (block_iq4_xs *)qrow; + for (int ibl = 0; ibl < nblock; ++ibl) { + const float * qw = quant_weights ? quant_weights + QK_K*ibl : NULL; + quantize_row_iq4_nl_impl(QK_K, 32, src + QK_K*ibl, &iq4[ibl].d, iq4[ibl].qs, &iq4[ibl].scales_h, iq4[ibl].scales_l, + scales, weight, L, kvalues_iq4nl, qw); + } + src += n_per_row; + qrow += nblock*sizeof(block_iq4_xs); + } + return nrow * nblock * sizeof(block_iq4_xs); +} + +void quantize_row_iq4_xs(const float * restrict x, void * restrict vy, int k) { + assert(k % QK_K == 0); + block_iq4_xs * restrict y = vy; + quantize_row_iq4_xs_reference(x, y, k); +} + +void quantize_row_iq4_xs_reference(const float * restrict x, block_iq4_xs * restrict y, int k) { + assert(k % QK_K == 0); + quantize_iq4_xs(x, y, 1, k, NULL, NULL); +} + // =============================== 2.5625 bpw static void quantize_row_iq2_s_impl(const float * restrict x, void * restrict vy, int n, const float * restrict quant_weights) { diff --git a/ggml-quants.h b/ggml-quants.h index 4731dde0c..2c61134c4 100644 --- a/ggml-quants.h +++ b/ggml-quants.h @@ -230,6 +230,14 @@ typedef struct { } block_iq4_nl; static_assert(sizeof(block_iq4_nl) == sizeof(ggml_fp16_t) + QK4_NL/2, "wrong iq4_nl block size/padding"); +typedef struct { + ggml_fp16_t d; + uint16_t scales_h; + uint8_t scales_l[QK_K/64]; + uint8_t qs[QK_K/2]; +} block_iq4_xs; +static_assert(sizeof(block_iq4_xs) == sizeof(ggml_fp16_t) + sizeof(uint16_t) + QK_K/64 + QK_K/2, "wrong iq4_xs block size/padding"); + #ifdef __cplusplus extern "C" { #endif @@ -250,6 +258,7 @@ void quantize_row_q6_K_reference(const float * GGML_RESTRICT x, block_q6_K * GGM void quantize_row_q8_K_reference(const float * GGML_RESTRICT x, block_q8_K * GGML_RESTRICT y, int k); void quantize_row_iq3_xxs_reference(const float * GGML_RESTRICT x, block_iq3_xxs * GGML_RESTRICT y, int k); void quantize_row_iq4_nl_reference (const float * GGML_RESTRICT x, block_iq4_nl * GGML_RESTRICT y, int k); +void quantize_row_iq4_xs_reference (const float * GGML_RESTRICT x, block_iq4_xs * GGML_RESTRICT y, int k); void quantize_row_iq3_s_reference (const float * GGML_RESTRICT x, block_iq3_s * GGML_RESTRICT y, int k); void quantize_row_iq2_s_reference (const float * GGML_RESTRICT x, block_iq2_s * GGML_RESTRICT y, int k); @@ -268,6 +277,7 @@ void quantize_row_q6_K(const float * GGML_RESTRICT x, void * GGML_RESTRICT y, in void quantize_row_q8_K(const float * GGML_RESTRICT x, void * GGML_RESTRICT y, int k); void quantize_row_iq3_xxs(const float * GGML_RESTRICT x, void * GGML_RESTRICT y, int k); void quantize_row_iq4_nl (const float * GGML_RESTRICT x, void * GGML_RESTRICT y, int k); +void quantize_row_iq4_xs (const float * GGML_RESTRICT x, void * GGML_RESTRICT y, int k); void quantize_row_iq3_s (const float * GGML_RESTRICT x, void * GGML_RESTRICT y, int k); void quantize_row_iq2_s (const float * GGML_RESTRICT x, void * GGML_RESTRICT y, int k); @@ -291,6 +301,7 @@ void dequantize_row_iq2_s (const block_iq2_s * GGML_RESTRICT x, float * GGML_ void dequantize_row_iq3_xxs(const block_iq3_xxs * GGML_RESTRICT x, float * GGML_RESTRICT y, int k); void dequantize_row_iq1_s (const block_iq1_s * GGML_RESTRICT x, float * GGML_RESTRICT y, int k); void dequantize_row_iq4_nl (const block_iq4_nl * GGML_RESTRICT x, float * GGML_RESTRICT y, int k); +void dequantize_row_iq4_xs (const block_iq4_xs * GGML_RESTRICT x, float * GGML_RESTRICT y, int k); void dequantize_row_iq3_s (const block_iq3_s * GGML_RESTRICT x, float * GGML_RESTRICT y, int k); // Dot product @@ -311,6 +322,7 @@ void ggml_vec_dot_iq2_s_q8_K (int n, float * GGML_RESTRICT s, size_t bs, const void ggml_vec_dot_iq3_xxs_q8_K(int n, float * GGML_RESTRICT s, size_t bs, const void * GGML_RESTRICT vx, size_t bx, const void * GGML_RESTRICT vy, size_t by, int nrc); void ggml_vec_dot_iq1_s_q8_K (int n, float * GGML_RESTRICT s, size_t bs, const void * GGML_RESTRICT vx, size_t bx, const void * GGML_RESTRICT vy, size_t by, int nrc); void ggml_vec_dot_iq4_nl_q8_0 (int n, float * GGML_RESTRICT s, size_t bs, const void * GGML_RESTRICT vx, size_t bx, const void * GGML_RESTRICT vy, size_t by, int nrc); +void ggml_vec_dot_iq4_xs_q8_K (int n, float * GGML_RESTRICT s, size_t bs, const void * GGML_RESTRICT vx, size_t bx, const void * GGML_RESTRICT vy, size_t by, int nrc); void ggml_vec_dot_iq3_s_q8_K (int n, float * GGML_RESTRICT s, size_t bs, const void * GGML_RESTRICT vx, size_t bx, const void * GGML_RESTRICT vy, size_t by, int nrc); // @@ -322,6 +334,7 @@ size_t quantize_iq2_s (const float * src, void * dst, int nrows, int n_per_row, size_t quantize_iq3_xxs(const float * src, void * dst, int nrows, int n_per_row, int64_t * hist, const float * imatrix); size_t quantize_iq1_s (const float * src, void * dst, int nrows, int n_per_row, int64_t * hist, const float * imatrix); size_t quantize_iq4_nl (const float * src, void * dst, int nrows, int n_per_row, int64_t * hist, const float * imatrix); +size_t quantize_iq4_xs (const float * src, void * dst, int nrows, int n_per_row, int64_t * hist, const float * imatrix); size_t quantize_iq3_s (const float * src, void * dst, int nrows, int n_per_row, int64_t * hist, const float * imatrix); size_t quantize_q2_K (const float * src, void * dst, int nrows, int n_per_row, int64_t * hist, const float * imatrix); size_t quantize_q3_K (const float * src, void * dst, int nrows, int n_per_row, int64_t * hist, const float * imatrix); diff --git a/ggml.c b/ggml.c index 6be07bb6f..d66db3352 100644 --- a/ggml.c +++ b/ggml.c @@ -726,6 +726,18 @@ static const ggml_type_traits_t type_traits[GGML_TYPE_COUNT] = { .vec_dot_type = GGML_TYPE_Q8_0, .nrows = 1, }, + [GGML_TYPE_IQ4_XS] = { + .type_name = "iq4_xs", + .blck_size = QK_K, + .type_size = sizeof(block_iq4_xs), + .is_quantized = true, + .to_float = (ggml_to_float_t) dequantize_row_iq4_xs, + .from_float = quantize_row_iq4_xs, + .from_float_reference = (ggml_from_float_t)quantize_row_iq4_xs_reference, + .vec_dot = ggml_vec_dot_iq4_xs_q8_K, + .vec_dot_type = GGML_TYPE_Q8_K, + .nrows = 1, + }, [GGML_TYPE_Q8_K] = { .type_name = "q8_K", .blck_size = QK_K, @@ -2328,6 +2340,7 @@ enum ggml_type ggml_ftype_to_ggml_type(enum ggml_ftype ftype) { case GGML_FTYPE_MOSTLY_IQ3_XXS: wtype = GGML_TYPE_IQ3_XXS; break; case GGML_FTYPE_MOSTLY_IQ1_S: wtype = GGML_TYPE_IQ1_S; break; case GGML_FTYPE_MOSTLY_IQ4_NL: wtype = GGML_TYPE_IQ4_NL; break; + case GGML_FTYPE_MOSTLY_IQ4_XS: wtype = GGML_TYPE_IQ4_XS; break; case GGML_FTYPE_MOSTLY_IQ3_S: wtype = GGML_TYPE_IQ3_S; break; case GGML_FTYPE_MOSTLY_IQ2_S: wtype = GGML_TYPE_IQ2_S; break; case GGML_FTYPE_UNKNOWN: wtype = GGML_TYPE_COUNT; break; @@ -7764,6 +7777,7 @@ static void ggml_compute_forward_add( case GGML_TYPE_IQ3_XXS: case GGML_TYPE_IQ1_S: case GGML_TYPE_IQ4_NL: + case GGML_TYPE_IQ4_XS: case GGML_TYPE_IQ3_S: case GGML_TYPE_IQ2_S: { @@ -8045,6 +8059,7 @@ static void ggml_compute_forward_add1( case GGML_TYPE_IQ3_XXS: case GGML_TYPE_IQ1_S: case GGML_TYPE_IQ4_NL: + case GGML_TYPE_IQ4_XS: case GGML_TYPE_IQ3_S: case GGML_TYPE_IQ2_S: { @@ -8171,6 +8186,7 @@ static void ggml_compute_forward_acc( case GGML_TYPE_IQ3_XXS: case GGML_TYPE_IQ1_S: case GGML_TYPE_IQ4_NL: + case GGML_TYPE_IQ4_XS: case GGML_TYPE_IQ3_S: case GGML_TYPE_IQ2_S: default: @@ -11071,6 +11087,7 @@ static void ggml_compute_forward_out_prod( case GGML_TYPE_IQ3_XXS: case GGML_TYPE_IQ1_S: case GGML_TYPE_IQ4_NL: + case GGML_TYPE_IQ4_XS: case GGML_TYPE_IQ3_S: case GGML_TYPE_IQ2_S: { @@ -11261,6 +11278,7 @@ static void ggml_compute_forward_set( case GGML_TYPE_IQ3_XXS: case GGML_TYPE_IQ1_S: case GGML_TYPE_IQ4_NL: + case GGML_TYPE_IQ4_XS: case GGML_TYPE_IQ3_S: case GGML_TYPE_IQ2_S: default: @@ -11465,6 +11483,7 @@ static void ggml_compute_forward_get_rows( case GGML_TYPE_IQ3_XXS: case GGML_TYPE_IQ1_S: case GGML_TYPE_IQ4_NL: + case GGML_TYPE_IQ4_XS: case GGML_TYPE_IQ3_S: case GGML_TYPE_IQ2_S: { @@ -12167,6 +12186,7 @@ static void ggml_compute_forward_alibi( case GGML_TYPE_IQ3_XXS: case GGML_TYPE_IQ1_S: case GGML_TYPE_IQ4_NL: + case GGML_TYPE_IQ4_XS: case GGML_TYPE_IQ3_S: case GGML_TYPE_IQ2_S: case GGML_TYPE_Q8_K: @@ -12252,6 +12272,7 @@ static void ggml_compute_forward_clamp( case GGML_TYPE_IQ3_XXS: case GGML_TYPE_IQ1_S: case GGML_TYPE_IQ4_NL: + case GGML_TYPE_IQ4_XS: case GGML_TYPE_IQ3_S: case GGML_TYPE_IQ2_S: case GGML_TYPE_Q8_K: @@ -19817,6 +19838,15 @@ size_t ggml_quantize_chunk(enum ggml_type type, const float * src, void * dst, i result = quantize_iq4_nl(src + start, (char *)dst + start_row * row_size, nrows, n_per_row, hist, imatrix); GGML_ASSERT(result == row_size * nrows); } break; + case GGML_TYPE_IQ4_XS: + { + GGML_ASSERT(start % QK4_NL == 0); + GGML_ASSERT(start % n_per_row == 0); + size_t start_row = start / n_per_row; + size_t row_size = ggml_row_size(type, n_per_row); + result = quantize_iq4_xs(src + start, (char *)dst + start_row * row_size, nrows, n_per_row, hist, imatrix); + GGML_ASSERT(result == row_size * nrows); + } break; case GGML_TYPE_F16: { size_t elemsize = sizeof(ggml_fp16_t); diff --git a/ggml.h b/ggml.h index 8c7ca4588..23b768640 100644 --- a/ggml.h +++ b/ggml.h @@ -352,6 +352,7 @@ extern "C" { GGML_TYPE_IQ4_NL = 20, GGML_TYPE_IQ3_S = 21, GGML_TYPE_IQ2_S = 22, + GGML_TYPE_IQ4_XS = 23, GGML_TYPE_I8, GGML_TYPE_I16, GGML_TYPE_I32, @@ -393,6 +394,7 @@ extern "C" { GGML_FTYPE_MOSTLY_IQ4_NL = 19, // except 1d tensors GGML_FTYPE_MOSTLY_IQ3_S = 20, // except 1d tensors GGML_FTYPE_MOSTLY_IQ2_S = 21, // except 1d tensors + GGML_FTYPE_MOSTLY_IQ4_XS = 22, // except 1d tensors }; // available tensor operations: diff --git a/llama.cpp b/llama.cpp index 6729bb99c..464e1b89b 100644 --- a/llama.cpp +++ b/llama.cpp @@ -2584,6 +2584,7 @@ struct llama_model_loader { case GGML_TYPE_IQ3_XXS: ftype = LLAMA_FTYPE_MOSTLY_IQ3_XXS; break; case GGML_TYPE_IQ1_S: ftype = LLAMA_FTYPE_MOSTLY_IQ1_S; break; case GGML_TYPE_IQ4_NL: ftype = LLAMA_FTYPE_MOSTLY_IQ4_NL; break; + case GGML_TYPE_IQ4_XS: ftype = LLAMA_FTYPE_MOSTLY_IQ4_XS; break; case GGML_TYPE_IQ3_S: ftype = LLAMA_FTYPE_MOSTLY_IQ3_S; break; default: { @@ -2941,6 +2942,7 @@ static std::string llama_model_ftype_name(llama_ftype ftype) { case LLAMA_FTYPE_MOSTLY_IQ3_XXS:return "IQ3_XXS - 3.0625 bpw"; case LLAMA_FTYPE_MOSTLY_IQ1_S :return "IQ1_S - 1.5625 bpw"; case LLAMA_FTYPE_MOSTLY_IQ4_NL: return "IQ4_NL - 4.5 bpw"; + case LLAMA_FTYPE_MOSTLY_IQ4_XS: return "IQ4_XS - 4.25 bpw"; case LLAMA_FTYPE_MOSTLY_IQ3_S: return "IQ3_S - 3.4375 bpw"; case LLAMA_FTYPE_MOSTLY_IQ3_M: return "IQ3_S mix - 3.66 bpw"; @@ -10871,7 +10873,7 @@ static ggml_type get_k_quant_type(quantize_state_internal & qs, ggml_type new_ty new_type = qs.i_attention_wv < 2 ? GGML_TYPE_Q5_K : GGML_TYPE_Q4_K; } else if (ftype == LLAMA_FTYPE_MOSTLY_Q3_K_L) new_type = GGML_TYPE_Q5_K; - else if (ftype == LLAMA_FTYPE_MOSTLY_IQ4_NL && qs.model.hparams.n_gqa() >= 4) { + else if ((ftype == LLAMA_FTYPE_MOSTLY_IQ4_NL || ftype == LLAMA_FTYPE_MOSTLY_IQ4_XS) && qs.model.hparams.n_gqa() >= 4) { new_type = GGML_TYPE_Q5_K; } else if ((ftype == LLAMA_FTYPE_MOSTLY_Q4_K_M || ftype == LLAMA_FTYPE_MOSTLY_Q5_K_M) && @@ -10940,8 +10942,8 @@ static ggml_type get_k_quant_type(quantize_state_internal & qs, ggml_type new_ty if (use_more_bits(i_layer, n_layer)) new_type = GGML_TYPE_Q6_K; } } - else if (ftype == LLAMA_FTYPE_MOSTLY_IQ4_NL && !qs.has_imatrix) { - if (i_layer < n_layer/8) new_type = GGML_TYPE_Q5_K; + else if (i_layer < n_layer/8 && (ftype == LLAMA_FTYPE_MOSTLY_IQ4_NL || ftype == LLAMA_FTYPE_MOSTLY_IQ4_XS) && !qs.has_imatrix) { + new_type = GGML_TYPE_Q5_K; } else if (ftype == LLAMA_FTYPE_MOSTLY_Q5_K_M && use_more_bits(i_layer, n_layer)) new_type = GGML_TYPE_Q6_K; else if (ftype == LLAMA_FTYPE_MOSTLY_Q4_K_S && arch != LLM_ARCH_FALCON && i_layer < n_layer/8) { @@ -10961,7 +10963,7 @@ static ggml_type get_k_quant_type(quantize_state_internal & qs, ggml_type new_ty if (ftype == LLAMA_FTYPE_MOSTLY_Q2_K || ftype == LLAMA_FTYPE_MOSTLY_IQ3_XS || ftype == LLAMA_FTYPE_MOSTLY_IQ3_XXS || ftype == LLAMA_FTYPE_MOSTLY_Q3_K_S || ftype == LLAMA_FTYPE_MOSTLY_Q3_K_M || ftype == LLAMA_FTYPE_MOSTLY_IQ4_NL || ftype == LLAMA_FTYPE_MOSTLY_Q4_K_S || ftype == LLAMA_FTYPE_MOSTLY_Q4_K_M || ftype == LLAMA_FTYPE_MOSTLY_IQ3_S || - ftype == LLAMA_FTYPE_MOSTLY_IQ3_M) { + ftype == LLAMA_FTYPE_MOSTLY_IQ3_M || ftype == LLAMA_FTYPE_MOSTLY_IQ4_XS) { new_type = GGML_TYPE_Q5_K; } } else { @@ -11012,7 +11014,7 @@ static ggml_type get_k_quant_type(quantize_state_internal & qs, ggml_type new_ty //} bool convert_incompatible_tensor = false; if (new_type == GGML_TYPE_Q2_K || new_type == GGML_TYPE_Q3_K || new_type == GGML_TYPE_Q4_K || - new_type == GGML_TYPE_Q5_K || new_type == GGML_TYPE_Q6_K || + new_type == GGML_TYPE_Q5_K || new_type == GGML_TYPE_Q6_K || new_type == GGML_TYPE_IQ4_XS || new_type == GGML_TYPE_IQ2_XS || new_type == GGML_TYPE_IQ2_XXS || new_type == GGML_TYPE_IQ2_S || new_type == GGML_TYPE_IQ3_XXS || ftype == LLAMA_FTYPE_MOSTLY_IQ1_S || new_type == GGML_TYPE_IQ3_S) { int nx = tensor->ne[0]; @@ -11033,10 +11035,11 @@ static ggml_type get_k_quant_type(quantize_state_internal & qs, ggml_type new_ty case GGML_TYPE_IQ3_S: case GGML_TYPE_IQ1_S: case GGML_TYPE_Q2_K: - case GGML_TYPE_Q3_K: new_type = GGML_TYPE_IQ4_NL; break; - case GGML_TYPE_Q4_K: new_type = GGML_TYPE_Q5_0; break; - case GGML_TYPE_Q5_K: new_type = GGML_TYPE_Q5_1; break; - case GGML_TYPE_Q6_K: new_type = GGML_TYPE_Q8_0; break; + case GGML_TYPE_Q3_K: + case GGML_TYPE_IQ4_XS: new_type = GGML_TYPE_IQ4_NL; break; + case GGML_TYPE_Q4_K: new_type = GGML_TYPE_Q5_0; break; + case GGML_TYPE_Q5_K: new_type = GGML_TYPE_Q5_1; break; + case GGML_TYPE_Q6_K: new_type = GGML_TYPE_Q8_0; break; default: throw std::runtime_error("\nUnsupported tensor size encountered\n"); } LLAMA_LOG_WARN(" - using fallback quantization %s\n", ggml_type_name(new_type)); @@ -11078,6 +11081,7 @@ static void llama_model_quantize_internal(const std::string & fname_inp, const s case LLAMA_FTYPE_MOSTLY_IQ3_XXS: quantized_type = GGML_TYPE_IQ3_XXS; break; case LLAMA_FTYPE_MOSTLY_IQ1_S: quantized_type = GGML_TYPE_IQ1_S; break; case LLAMA_FTYPE_MOSTLY_IQ4_NL: quantized_type = GGML_TYPE_IQ4_NL; break; + case LLAMA_FTYPE_MOSTLY_IQ4_XS: quantized_type = GGML_TYPE_IQ4_XS; break; case LLAMA_FTYPE_MOSTLY_IQ3_S: quantized_type = GGML_TYPE_IQ3_S; break; case LLAMA_FTYPE_MOSTLY_IQ3_M: quantized_type = GGML_TYPE_IQ3_S; break; diff --git a/llama.h b/llama.h index 604161808..16e28e91d 100644 --- a/llama.h +++ b/llama.h @@ -115,6 +115,7 @@ extern "C" { LLAMA_FTYPE_MOSTLY_IQ3_M = 27, // except 1d tensors LLAMA_FTYPE_MOSTLY_IQ2_S = 28, // except 1d tensors LLAMA_FTYPE_MOSTLY_IQ2_M = 29, // except 1d tensors + LLAMA_FTYPE_MOSTLY_IQ4_XS = 30, // except 1d tensors LLAMA_FTYPE_GUESSED = 1024, // not specified in the model file }; diff --git a/tests/test-backend-ops.cpp b/tests/test-backend-ops.cpp index 60a852779..d4cea805f 100644 --- a/tests/test-backend-ops.cpp +++ b/tests/test-backend-ops.cpp @@ -1918,7 +1918,7 @@ static bool test_backend(ggml_backend_t backend, test_mode mode, const char * op GGML_TYPE_Q6_K, GGML_TYPE_IQ2_XXS, GGML_TYPE_IQ2_XS, GGML_TYPE_IQ2_S, GGML_TYPE_IQ3_XXS, GGML_TYPE_IQ1_S, - GGML_TYPE_IQ4_NL, GGML_TYPE_IQ3_S, + GGML_TYPE_IQ4_NL, GGML_TYPE_IQ3_S, GGML_TYPE_IQ4_XS, }; // unary ops From cb49e0f8c906e5da49e9f6d64a57742a9a241c6a Mon Sep 17 00:00:00 2001 From: Kawrakow <48489457+ikawrakow@users.noreply.github.com> Date: Tue, 27 Feb 2024 19:16:49 +0200 Subject: [PATCH 727/811] Attempt to fix android build (#5752) Co-authored-by: Iwan Kawrakow --- ggml-quants.c | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/ggml-quants.c b/ggml-quants.c index 607d50925..f73d17ce2 100644 --- a/ggml-quants.c +++ b/ggml-quants.c @@ -10464,9 +10464,9 @@ void ggml_vec_dot_iq4_xs_q8_K(int n, float * restrict s, size_t bs, const void * #if defined __ARM_NEON const int8x16_t values = vld1q_s8(kvalues_iq4nl); const uint8x16_t m4b = vdupq_n_u8(0x0f); - uint8x16x2_t q4bits; - int8x16x4_t q4b; - int8x16x4_t q8b; + ggml_uint8x16x2_t q4bits; + ggml_int8x16x4_t q4b; + ggml_int8x16x4_t q8b; int32x4_t prod_1, prod_2; float sumf = 0; From 7c4263d4261d6ee6f0539d53eb9e1b4d120ba8af Mon Sep 17 00:00:00 2001 From: Kawrakow <48489457+ikawrakow@users.noreply.github.com> Date: Wed, 28 Feb 2024 10:37:02 +0200 Subject: [PATCH 728/811] ggml : make i-quants work with super-blocks of 64 (CPU,Metal) (#5760) * WIP: make i-quants work for QK_K = 64 * iq2_xs: attempt to fix AVX dot product for QK_K = 64 Tests pass, but I get gibberish. * QK_K = 64 tests pass on ARM_NEON and Metal Sadly, that does not mean it actually works. * Make CUDA compile with QK_K = 64 Tests don't pass, plus we get misaligned access * Q2_K: fixed bug in imatrix quantization for QK_K = 64 * iq1_s: turn off SIMD implementation for QK_K = 64 (it does not work) --------- Co-authored-by: Iwan Kawrakow --- ggml-cuda.cu | 27 ++++++--- ggml-metal.metal | 58 ++++++++++--------- ggml-quants.c | 148 +++++++++++++++++++++++++++++++++++++++-------- ggml-quants.h | 5 ++ ggml.c | 15 ++++- 5 files changed, 194 insertions(+), 59 deletions(-) diff --git a/ggml-cuda.cu b/ggml-cuda.cu index dfd28df62..831c84efb 100644 --- a/ggml-cuda.cu +++ b/ggml-cuda.cu @@ -544,14 +544,19 @@ static_assert(sizeof(block_iq3_xxs) == sizeof(ggml_fp16_t) + 3*(QK_K/8), "wrong #define QR3_XS 8 #define QI3_XS (QK_K / (4*QR3_XS)) +#if QK_K == 64 +#define IQ3S_N_SCALE 2 +#else +#define IQ3S_N_SCALE QK_K/64 +#endif typedef struct { half d; uint8_t qs[QK_K/4]; uint8_t qh[QK_K/32]; uint8_t signs[QK_K/8]; - uint8_t scales[QK_K/64]; + uint8_t scales[IQ3S_N_SCALE]; } block_iq3_s; -static_assert(sizeof(block_iq3_s) == sizeof(ggml_fp16_t) + 27*(QK_K/64), "wrong iq3_s block size/padding"); +static_assert(sizeof(block_iq3_s) == sizeof(ggml_fp16_t) + 13*(QK_K/32) + IQ3S_N_SCALE, "wrong iq3_s block size/padding"); #define QR1_S 8 #define QI1_S (QK_K / (4*QR1_S)) @@ -571,6 +576,11 @@ typedef struct { } block_iq4_nl; static_assert(sizeof(block_iq4_nl) == sizeof(ggml_fp16_t) + QK4_NL/2, "wrong iq4_nl block size/padding"); +#if QK_K == 64 +#define block_iq4_xs block_iq4_nl +#define QR4_XS QR4_NL +#define QI4_XS QI4_NL +#else // QR4_XS = 8 is very slightly faster than QR4_XS = 4 #define QR4_XS 8 #define QI4_XS (QK_K / (4*QR4_XS)) @@ -581,7 +591,7 @@ typedef struct { uint8_t qs[QK_K/2]; } block_iq4_xs; static_assert(sizeof(block_iq4_xs) == sizeof(ggml_fp16_t) + sizeof(uint16_t) + QK_K/64 + QK_K/2, "wrong iq4_xs block size/padding"); - +#endif #define WARP_SIZE 32 #define MATRIX_ROW_PADDING 512 // last row of quant. matrices is a multiple of this to avoid out-of-bounds memory accesses @@ -2439,9 +2449,9 @@ static __global__ void dequantize_block_iq4_nl(const void * __restrict__ vx, dst } +#if QK_K != 64 template static __global__ void dequantize_block_iq4_xs(const void * __restrict__ vx, dst_t * __restrict__ yy) { - const int i = blockIdx.x; const block_iq4_xs * x = (const block_iq4_xs *)vx; @@ -2455,8 +2465,8 @@ static __global__ void dequantize_block_iq4_xs(const void * __restrict__ vx, dst y[j+ 0] = d * kvalues_iq4nl[q4[j] & 0xf]; y[j+16] = d * kvalues_iq4nl[q4[j] >> 4]; } - } +#endif static __global__ void dequantize_mul_mat_vec_q2_k(const void * __restrict__ vx, const float * __restrict__ yy, float * __restrict__ dst, const int ncols, int nrows) { @@ -5382,8 +5392,7 @@ static __device__ __forceinline__ float vec_dot_iq4_xs_q8_1( return 0.f; #endif #else - assert(false); - return 0.f; + return vec_dot_iq4_xs_q8_1(vbq, bq8_1, iqs); #endif } @@ -7444,7 +7453,11 @@ static void dequantize_row_iq4_nl_cuda(const void * vx, dst_t * y, const int k, template static void dequantize_row_iq4_xs_cuda(const void * vx, dst_t * y, const int k, cudaStream_t stream) { const int nb = (k + QK_K - 1) / QK_K; +#if QK_K == 64 + dequantize_block_iq4_nl<<>>(vx, y); +#else dequantize_block_iq4_xs<<>>(vx, y); +#endif } template diff --git a/ggml-metal.metal b/ggml-metal.metal index 689411903..74a5e0b03 100644 --- a/ggml-metal.metal +++ b/ggml-metal.metal @@ -2560,12 +2560,16 @@ typedef struct { uint8_t qs[QK4_NL/2]; } block_iq4_nl; +#if QK_K == 64 +#define block_iq4_xs block_iq4_nl +#else typedef struct { half d; uint16_t scales_h; uint8_t scales_l[QK_K/64]; uint8_t qs[QK_K/2]; } block_iq4_xs; +#endif //====================================== dot products ========================= @@ -4346,7 +4350,6 @@ void kernel_mul_mv_iq2_xxs_f32_impl( threadgroup_barrier(mem_flags::mem_threadgroup); } -#if QK_K == 256 const int ix = tiisg; device const float * y4 = y + 32 * ix; @@ -4387,12 +4390,6 @@ void kernel_mul_mv_iq2_xxs_f32_impl( y4 += 32 * 32; } -#else - (void) x; - (void) y; - (void) yl; - (void) nb32; -#endif for (int row = 0; row < N_DST; ++row) { all_sum = simd_sum(sumf[row]); @@ -4482,7 +4479,6 @@ void kernel_mul_mv_iq2_xs_f32_impl( threadgroup_barrier(mem_flags::mem_threadgroup); } -#if QK_K == 256 const int ix = tiisg; device const float * y4 = y + 32 * ix; @@ -4533,12 +4529,6 @@ void kernel_mul_mv_iq2_xs_f32_impl( y4 += 32 * 32; } -#else - (void) x; - (void) y; - (void) yl; - (void) nb32; -#endif for (int row = 0; row < N_DST; ++row) { all_sum = simd_sum(sumf[row]); @@ -4628,7 +4618,6 @@ void kernel_mul_mv_iq3_xxs_f32_impl( threadgroup_barrier(mem_flags::mem_threadgroup); } -#if QK_K == 256 const int ix = tiisg; device const float * y4 = y + 32 * ix; @@ -4672,12 +4661,6 @@ void kernel_mul_mv_iq3_xxs_f32_impl( y4 += 32 * 32; } -#else - (void) x; - (void) y; - (void) yl; - (void) nb32; -#endif for (int row = 0; row < N_DST; ++row) { all_sum = simd_sum(sumf[row]); @@ -5016,7 +4999,6 @@ void kernel_mul_mv_iq1_s_f32_impl( const int nb32 = nb * (QK_K / 32); -#if QK_K == 256 const int ix = tiisg/2; const int il = tiisg%2; @@ -5055,12 +5037,6 @@ void kernel_mul_mv_iq1_s_f32_impl( y4 += 16 * 32; } -#else - (void) x; - (void) y; - (void) yl; - (void) nb32; -#endif for (int row = 0; row < N_DST; ++row) { all_sum = simd_sum(sumf[row]); @@ -5167,6 +5143,7 @@ void kernel_mul_mv_iq4_nl_f32_impl( } } +#if QK_K != 64 void kernel_mul_mv_iq4_xs_f32_impl( device const void * src0, device const float * src1, @@ -5260,6 +5237,7 @@ void kernel_mul_mv_iq4_xs_f32_impl( } } } +#endif [[host_name("kernel_mul_mv_iq1_s_f32")]] kernel void kernel_mul_mv_iq1_s_f32( @@ -5344,7 +5322,11 @@ kernel void kernel_mul_mv_iq4_xs_f32( uint tiisg[[thread_index_in_simdgroup]], uint sgitg[[simdgroup_index_in_threadgroup]]) { +#if QK_K == 64 + kernel_mul_mv_iq4_nl_f32_impl(src0, src1, dst, ne00, ne01, ne02, ne10, ne12, ne0, ne1, r2, r3, shared_values, tgpig, tiisg, sgitg); +#else kernel_mul_mv_iq4_xs_f32_impl(src0, src1, dst, ne00, ne01, ne02, ne10, ne12, ne0, ne1, r2, r3, shared_values, tgpig, tiisg, sgitg); +#endif } //============================= templates and their specializations ============================= @@ -5770,6 +5752,9 @@ void dequantize_iq4_nl(device const block_iq4_nl * xb, short il, thread type4x4 template void dequantize_iq4_xs(device const block_iq4_xs * xb, short il, thread type4x4 & reg) { +#if QK_K == 64 + dequantize_iq4_nl(xb, il, reg); +#else // il is 0...15 for QK_K = 256 => index of block of 32 is il/2 const int ib32 = il/2; il = il%2; @@ -5786,6 +5771,7 @@ void dequantize_iq4_xs(device const block_iq4_xs * xb, short il, thread type4x4 reg[i][2] = d * kvalues_iq4nl_f[q8[2]]; reg[i][3] = d * kvalues_iq4nl_f[q8[3]]; } +#endif } template @@ -6334,7 +6320,11 @@ template [[host_name("kernel_get_rows_iq3_s")]] kernel get_rows_t kernel_get_r template [[host_name("kernel_get_rows_iq2_s")]] kernel get_rows_t kernel_get_rows; template [[host_name("kernel_get_rows_iq1_s")]] kernel get_rows_t kernel_get_rows; template [[host_name("kernel_get_rows_iq4_nl")]] kernel get_rows_t kernel_get_rows; +#if QK_K == 64 +template [[host_name("kernel_get_rows_iq4_xs")]] kernel get_rows_t kernel_get_rows; +#else template [[host_name("kernel_get_rows_iq4_xs")]] kernel get_rows_t kernel_get_rows; +#endif // // matrix-matrix multiplication @@ -6378,7 +6368,11 @@ template [[host_name("kernel_mul_mm_iq3_s_f32")]] kernel mat_mm_t kernel_mul_m template [[host_name("kernel_mul_mm_iq2_s_f32")]] kernel mat_mm_t kernel_mul_mm; template [[host_name("kernel_mul_mm_iq1_s_f32")]] kernel mat_mm_t kernel_mul_mm; template [[host_name("kernel_mul_mm_iq4_nl_f32")]] kernel mat_mm_t kernel_mul_mm; +#if QK_K == 64 +template [[host_name("kernel_mul_mm_iq4_xs_f32")]] kernel mat_mm_t kernel_mul_mm; +#else template [[host_name("kernel_mul_mm_iq4_xs_f32")]] kernel mat_mm_t kernel_mul_mm; +#endif // // indirect matrix-matrix multiplication @@ -6434,7 +6428,11 @@ template [[host_name("kernel_mul_mm_id_iq3_s_f32")]] kernel mat_mm_id_t kernel template [[host_name("kernel_mul_mm_id_iq2_s_f32")]] kernel mat_mm_id_t kernel_mul_mm_id; template [[host_name("kernel_mul_mm_id_iq1_s_f32")]] kernel mat_mm_id_t kernel_mul_mm_id; template [[host_name("kernel_mul_mm_id_iq4_nl_f32")]] kernel mat_mm_id_t kernel_mul_mm_id; +#if QK_K == 64 +template [[host_name("kernel_mul_mm_id_iq4_xs_f32")]] kernel mat_mm_id_t kernel_mul_mm_id; +#else template [[host_name("kernel_mul_mm_id_iq4_xs_f32")]] kernel mat_mm_id_t kernel_mul_mm_id; +#endif // // matrix-vector multiplication @@ -7707,7 +7705,11 @@ kernel void kernel_mul_mv_id_iq4_xs_f32( const int32_t id = ((device int32_t *) (ids + bid*nbi1))[idx]; +#if QK_K == 64 + kernel_mul_mv_iq4_nl_f32_impl( +#else kernel_mul_mv_iq4_xs_f32_impl( +#endif src0[id], (device const float *) (src1 + bid*nb11), dst + bid*ne0, diff --git a/ggml-quants.c b/ggml-quants.c index f73d17ce2..371826f14 100644 --- a/ggml-quants.c +++ b/ggml-quants.c @@ -1877,7 +1877,7 @@ static void quantize_row_q2_K_impl(const float * restrict x, block_q2_K * restri float mins[QK_K/16]; float scales[QK_K/16]; float sw[QK_K/16]; - float weight[QK_K/16]; + float weight[16]; uint8_t Ls[QK_K/16], Lm[QK_K/16]; for (int i = 0; i < nb; i++) { @@ -1887,13 +1887,42 @@ static void quantize_row_q2_K_impl(const float * restrict x, block_q2_K * restri float sigma2 = sumx2/QK_K; for (int j = 0; j < QK_K/16; ++j) { const float * restrict qw = quant_weights + QK_K * i + 16*j; - for (int l = 0; l < QK_K/16; ++l) weight[l] = qw[l] * sqrtf(sigma2 + x[16*j + l]*x[16*j + l]); + for (int l = 0; l < 16; ++l) weight[l] = qw[l] * sqrtf(sigma2 + x[16*j + l]*x[16*j + l]); for (int l = 0; l < QK_K/16; ++l) sw[j] += weight[l]; - scales[j] = make_qkx3_quants(QK_K/16, 3, x + 16*j, weight, L + 16*j, &mins[j], Laux, -0.9f, 0.05f, 36, false); + scales[j] = make_qkx3_quants(16, 3, x + 16*j, weight, L + 16*j, &mins[j], Laux, -0.9f, 0.05f, 36, false); } - float dm = make_qp_quants(QK_K/16, 15, scales, Ls, sw); - float mm = make_qp_quants(QK_K/16, 15, mins, Lm, sw); + float dm, mm; +#if QK_K == 64 + float max_scale = 0, max_min = 0; + for (int j = 0; j < QK_K/16; ++j) { + max_scale = MAX(max_scale, scales[j]); + max_min = MAX(max_min, mins[j]); + } + dm = max_scale/15; + mm = max_min/15; + if (max_scale) { + float id = 1/dm; + for (int j = 0; j < QK_K/16; ++j) { + int l = nearest_int(id*scales[j]); + Ls[j] = MAX(0, MIN(15, l)); + } + } else { + memset(Ls, 0, QK_K/16); + } + if (max_min) { + float id = 1/mm; + for (int j = 0; j < QK_K/16; ++j) { + int l = nearest_int(id*mins[j]); + Lm[j] = MAX(0, MIN(15, l)); + } + } else { + memset(Lm, 0, QK_K/16); + } +#else + dm = make_qp_quants(QK_K/16, 15, scales, Ls, sw); + mm = make_qp_quants(QK_K/16, 15, mins, Lm, sw); +#endif y[i].d = GGML_FP32_TO_FP16(dm); y[i].dmin = GGML_FP32_TO_FP16(mm); dm = GGML_FP16_TO_FP32(y[i].d); @@ -4227,6 +4256,9 @@ void dequantize_row_iq4_nl(const block_iq4_nl * restrict x, float * restrict y, void dequantize_row_iq4_xs(const block_iq4_xs * restrict x, float * restrict y, int k) { assert(k % QK_K == 0); +#if QK_K == 64 + dequantize_row_iq4_nl((const block_iq4_nl *)x, y, k); +#else const int nb = k / QK_K; for (int i = 0; i < nb; i++) { @@ -4246,6 +4278,7 @@ void dequantize_row_iq4_xs(const block_iq4_xs * restrict x, float * restrict y, qs += 16; } } +#endif } //===================================== Q8_K ============================================== @@ -6306,7 +6339,7 @@ void ggml_vec_dot_q2_K_q8_K(int n, float * restrict s, size_t bs, const void * r float sumf = 0; - int isum[4]; + int isum[QK_K/16]; for (int i = 0; i < nb; ++i) { @@ -6322,14 +6355,14 @@ void ggml_vec_dot_q2_K_q8_K(int n, float * restrict s, size_t bs, const void * r const float dall = y[i].d * GGML_FP16_TO_FP32(x[i].d); const float dmin = y[i].d * GGML_FP16_TO_FP32(x[i].dmin); - isum[0] = isum[1] = isum[2] = isum[3] = 0; + memset(isum, 0, (QK_K/16)*sizeof(int)); for (int l = 0; l < 16; ++l) { isum[0] += q8[l+ 0] * ((q2[l] >> 0) & 3); isum[1] += q8[l+16] * ((q2[l] >> 2) & 3); isum[2] += q8[l+32] * ((q2[l] >> 4) & 3); isum[3] += q8[l+48] * ((q2[l] >> 6) & 3); } - for (int l = 0; l < 4; ++l) { + for (int l = 0; l < QK_K/16; ++l) { isum[l] *= (sc[l] & 0xF); } sumf += dall * (isum[0] + isum[1] + isum[2] + isum[3]) - dmin * summs; @@ -9488,15 +9521,7 @@ void ggml_vec_dot_iq2_xs_q8_K(int n, float * restrict s, size_t bs, const void * #elif defined(__AVX2__) - const __m128i m4 = _mm_set1_epi8(0xf); - const __m128i m1 = _mm_set1_epi8(1); - const __m256i m511 = _mm256_set1_epi16(511); const __m256i mone = _mm256_set1_epi8(1); - - static const uint8_t k_bit_helper[32] = { - 0x00, 0x80, 0x80, 0x00, 0x80, 0x00, 0x00, 0x80, 0x80, 0x00, 0x00, 0x80, 0x00, 0x80, 0x80, 0x00, - 0x00, 0x80, 0x80, 0x00, 0x80, 0x00, 0x00, 0x80, 0x80, 0x00, 0x00, 0x80, 0x00, 0x80, 0x80, 0x00, - }; static const char block_sign_shuffle_mask_1[32] = { 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0x02, 0x02, 0x02, 0x02, 0x02, 0x02, 0x02, 0x04, 0x04, 0x04, 0x04, 0x04, 0x04, 0x04, 0x04, 0x06, 0x06, 0x06, 0x06, 0x06, 0x06, 0x06, 0x06, @@ -9510,11 +9535,77 @@ void ggml_vec_dot_iq2_xs_q8_K(int n, float * restrict s, size_t bs, const void * 0x01, 0x02, 0x04, 0x08, 0x10, 0x20, 0x40, 0x80, 0x01, 0x02, 0x04, 0x08, 0x10, 0x20, 0x40, 0x80, }; - const __m256i bit_helper = _mm256_loadu_si256((const __m256i*)k_bit_helper); const __m256i bit_selector_mask = _mm256_loadu_si256((const __m256i*)bit_selector_mask_bytes); const __m256i block_sign_shuffle_1 = _mm256_loadu_si256((const __m256i*)block_sign_shuffle_mask_1); const __m256i block_sign_shuffle_2 = _mm256_loadu_si256((const __m256i*)block_sign_shuffle_mask_2); +#if QK_K == 64 + static const uint8_t k_bit_helper[16] = { + 0x00, 0x80, 0x80, 0x00, 0x80, 0x00, 0x00, 0x80, 0x80, 0x00, 0x00, 0x80, 0x00, 0x80, 0x80, 0x00, + }; + const __m128i bit_helper = _mm_loadu_si128((const __m128i*)k_bit_helper); + const __m128i m511 = _mm_set1_epi16(511); + typedef union { + __m128i vec_index; + uint16_t index[8]; + } index_t; + + index_t idx; + __m256 accumf = _mm256_setzero_ps(); + for (int i = 0; i < nb; ++i) { + const float d = GGML_FP16_TO_FP32(x[i].d) * y[i].d; + const __m128i q2_data = _mm_loadu_si128((const __m128i*)x[i].qs); + idx.vec_index = _mm_and_si128(q2_data, m511); + + const __m128i partial_sign_bits = _mm_srli_epi16(q2_data, 9); + const __m128i partial_sign_bits_upper = _mm_srli_epi16(q2_data, 13); + const __m128i partial_sign_bits_for_counting = _mm_xor_si128(partial_sign_bits, partial_sign_bits_upper); + + const __m128i odd_bits = _mm_shuffle_epi8(bit_helper, partial_sign_bits_for_counting); + const __m128i full_sign_bits = _mm_or_si128(partial_sign_bits, odd_bits); + const __m256i full_signs = _mm256_set_m128i(full_sign_bits, full_sign_bits); + + const __m256i q8_1 = _mm256_loadu_si256((const __m256i *)y[i].qs); + const __m256i q8_2 = _mm256_loadu_si256((const __m256i *)(y[i].qs+32)); + + const __m256i q2_1 = _mm256_set_epi64x(iq2xs_grid[idx.index[3]], iq2xs_grid[idx.index[2]], + iq2xs_grid[idx.index[1]], iq2xs_grid[idx.index[0]]); + const __m256i q2_2 = _mm256_set_epi64x(iq2xs_grid[idx.index[7]], iq2xs_grid[idx.index[6]], + iq2xs_grid[idx.index[5]], iq2xs_grid[idx.index[4]]); + + __m256i signs; + signs = _mm256_shuffle_epi8(full_signs, block_sign_shuffle_1); + signs = _mm256_cmpeq_epi8(_mm256_and_si256(signs, bit_selector_mask), bit_selector_mask); + const __m256i q8s_1 = _mm256_sign_epi8(q8_1, _mm256_or_si256(signs, mone)); + + signs = _mm256_shuffle_epi8(full_signs, block_sign_shuffle_2); + signs = _mm256_cmpeq_epi8(_mm256_and_si256(signs, bit_selector_mask), bit_selector_mask); + const __m256i q8s_2 = _mm256_sign_epi8(q8_2, _mm256_or_si256(signs, mone)); + + const __m256i dot1 = _mm256_maddubs_epi16(q2_1, q8s_1); + const __m256i dot2 = _mm256_maddubs_epi16(q2_2, q8s_2); + + const __m256i sc1 = _mm256_set_m128i(_mm_set1_epi16(2*(x[i].scales[0] >> 4)+1), _mm_set1_epi16(2*(x[i].scales[0] & 0xf)+1)); + const __m256i sc2 = _mm256_set_m128i(_mm_set1_epi16(2*(x[i].scales[1] >> 4)+1), _mm_set1_epi16(2*(x[i].scales[1] & 0xf)+1)); + + const __m256i sum = _mm256_add_epi32(_mm256_madd_epi16(sc1, dot1), _mm256_madd_epi16(sc2, dot2)); + + accumf = _mm256_fmadd_ps(_mm256_set1_ps(d), _mm256_cvtepi32_ps(sum), accumf); + + } + + *s = 0.125f * hsum_float_8(accumf); +#else + + static const uint8_t k_bit_helper[32] = { + 0x00, 0x80, 0x80, 0x00, 0x80, 0x00, 0x00, 0x80, 0x80, 0x00, 0x00, 0x80, 0x00, 0x80, 0x80, 0x00, + 0x00, 0x80, 0x80, 0x00, 0x80, 0x00, 0x00, 0x80, 0x80, 0x00, 0x00, 0x80, 0x00, 0x80, 0x80, 0x00, + }; + const __m256i bit_helper = _mm256_loadu_si256((const __m256i*)k_bit_helper); + const __m256i m511 = _mm256_set1_epi16(511); + const __m128i m4 = _mm_set1_epi8(0xf); + const __m128i m1 = _mm_set1_epi8(1); + uint64_t aux64; // somewhat hacky, but gives a significant boost in performance @@ -9603,6 +9694,7 @@ void ggml_vec_dot_iq2_xs_q8_K(int n, float * restrict s, size_t bs, const void * } *s = 0.125f * hsum_float_8(accumf); +#endif #else @@ -10199,7 +10291,8 @@ void ggml_vec_dot_iq1_s_q8_K (int n, float * GGML_RESTRICT s, size_t bs, const const int nb = n / QK_K; -#if defined __ARM_NEON + // TODO: implement for QK_K = 64 +#if defined __ARM_NEON && QK_K == 256 const uint8x16_t m8 = vdupq_n_u8(0x08); const uint8x16_t m7 = vdupq_n_u8(0x07); @@ -10256,7 +10349,8 @@ void ggml_vec_dot_iq1_s_q8_K (int n, float * GGML_RESTRICT s, size_t bs, const *s = sumf; -#elif defined __AVX2__ + // TODO: implement for QK_K = 64 +#elif defined __AVX2__ && QK_K == 256 const __m128i m8 = _mm_set1_epi8(0x08); const __m128i m7 = _mm_set1_epi8(0x07); @@ -10455,6 +10549,9 @@ void ggml_vec_dot_iq4_xs_q8_K(int n, float * restrict s, size_t bs, const void * UNUSED(by); UNUSED(bs); assert(n % QK_K == 0); +#if QK_K == 64 + ggml_vec_dot_iq4_nl_q8_0(n, s, bs, vx, bx, vy, by, nrc); +#else const block_iq4_xs * restrict x = vx; const block_q8_K * restrict y = vy; @@ -10574,6 +10671,7 @@ void ggml_vec_dot_iq4_xs_q8_K(int n, float * restrict s, size_t bs, const void * } *s = sumf; #endif +#endif } // ================================ IQ2 quantization ============================================= @@ -10921,7 +11019,7 @@ static void quantize_row_iq2_xxs_impl(const float * restrict x, void * restrict const int kMaxQ = 3; - const int nbl = n/256; + const int nbl = n/QK_K; block_iq2_xxs * y = vy; @@ -11094,7 +11192,7 @@ static void quantize_row_iq2_xs_impl(const float * restrict x, void * restrict v const int kMaxQ = 3; - const int nbl = n/256; + const int nbl = n/QK_K; block_iq2_xs * y = vy; @@ -12037,7 +12135,7 @@ static void quantize_row_iq1_s_impl(const float * restrict x, void * restrict vy GGML_ASSERT(kneighbors_q2xs && "forgot to call ggml_quantize_init()?"); GGML_ASSERT(n%QK_K == 0); - const int nbl = n/256; + const int nbl = n/QK_K; block_iq1_s * y = vy; @@ -12315,6 +12413,9 @@ void quantize_row_iq4_nl_reference(const float * restrict x, block_iq4_nl * rest } size_t quantize_iq4_xs(const float * src, void * dst, int nrow, int n_per_row, int64_t * hist, const float * quant_weights) { +#if QK_K == 64 + return quantize_iq4_nl(src, dst, nrow, n_per_row, hist, quant_weights); +#else (void)hist; GGML_ASSERT(n_per_row%QK_K == 0); int nblock = n_per_row/QK_K; @@ -12333,6 +12434,7 @@ size_t quantize_iq4_xs(const float * src, void * dst, int nrow, int n_per_row, i qrow += nblock*sizeof(block_iq4_xs); } return nrow * nblock * sizeof(block_iq4_xs); +#endif } void quantize_row_iq4_xs(const float * restrict x, void * restrict vy, int k) { @@ -12363,7 +12465,7 @@ static void quantize_row_iq2_s_impl(const float * restrict x, void * restrict vy const int kMaxQ = 3; - const int nbl = n/256; + const int nbl = n/QK_K; block_iq2_s * y = vy; diff --git a/ggml-quants.h b/ggml-quants.h index 2c61134c4..316e35687 100644 --- a/ggml-quants.h +++ b/ggml-quants.h @@ -230,6 +230,10 @@ typedef struct { } block_iq4_nl; static_assert(sizeof(block_iq4_nl) == sizeof(ggml_fp16_t) + QK4_NL/2, "wrong iq4_nl block size/padding"); +#if QK_K == 64 +#define block_iq4_xs block_iq4_nl +//typedef struct block_iq4_nl block_iq4_xs; +#else typedef struct { ggml_fp16_t d; uint16_t scales_h; @@ -237,6 +241,7 @@ typedef struct { uint8_t qs[QK_K/2]; } block_iq4_xs; static_assert(sizeof(block_iq4_xs) == sizeof(ggml_fp16_t) + sizeof(uint16_t) + QK_K/64 + QK_K/2, "wrong iq4_xs block size/padding"); +#endif #ifdef __cplusplus extern "C" { diff --git a/ggml.c b/ggml.c index d66db3352..4591644ad 100644 --- a/ggml.c +++ b/ggml.c @@ -728,14 +728,22 @@ static const ggml_type_traits_t type_traits[GGML_TYPE_COUNT] = { }, [GGML_TYPE_IQ4_XS] = { .type_name = "iq4_xs", +#if QK_K == 64 + .blck_size = QK4_NL, +#else .blck_size = QK_K, +#endif .type_size = sizeof(block_iq4_xs), .is_quantized = true, .to_float = (ggml_to_float_t) dequantize_row_iq4_xs, .from_float = quantize_row_iq4_xs, .from_float_reference = (ggml_from_float_t)quantize_row_iq4_xs_reference, .vec_dot = ggml_vec_dot_iq4_xs_q8_K, +#if QK_K == 64 + .vec_dot_type = GGML_TYPE_Q8_0, +#else .vec_dot_type = GGML_TYPE_Q8_K, +#endif .nrows = 1, }, [GGML_TYPE_Q8_K] = { @@ -19830,6 +19838,9 @@ size_t ggml_quantize_chunk(enum ggml_type type, const float * src, void * dst, i GGML_ASSERT(result == row_size * nrows); } break; case GGML_TYPE_IQ4_NL: +#if QK_K == 64 + case GGML_TYPE_IQ4_XS: +#endif { GGML_ASSERT(start % QK4_NL == 0); GGML_ASSERT(start % n_per_row == 0); @@ -19838,15 +19849,17 @@ size_t ggml_quantize_chunk(enum ggml_type type, const float * src, void * dst, i result = quantize_iq4_nl(src + start, (char *)dst + start_row * row_size, nrows, n_per_row, hist, imatrix); GGML_ASSERT(result == row_size * nrows); } break; +#if QK_K != 64 case GGML_TYPE_IQ4_XS: { - GGML_ASSERT(start % QK4_NL == 0); + GGML_ASSERT(start % QK_K == 0); GGML_ASSERT(start % n_per_row == 0); size_t start_row = start / n_per_row; size_t row_size = ggml_row_size(type, n_per_row); result = quantize_iq4_xs(src + start, (char *)dst + start_row * row_size, nrows, n_per_row, hist, imatrix); GGML_ASSERT(result == row_size * nrows); } break; +#endif case GGML_TYPE_F16: { size_t elemsize = sizeof(ggml_fp16_t); From efc72253f7987ed7bdc8bde9d9fa5c7cac2f6292 Mon Sep 17 00:00:00 2001 From: Jorge A <161275481+jorgealias@users.noreply.github.com> Date: Wed, 28 Feb 2024 01:39:15 -0700 Subject: [PATCH 729/811] server : add "/chat/completions" alias for "/v1/...` (#5722) * Add "/chat/completions" as alias for "/v1/chat/completions" * merge to upstream master * minor : fix trailing whitespace --------- Co-authored-by: Georgi Gerganov --- examples/server/server.cpp | 133 +++++++++--------- .../server/tests/features/parallel.feature | 22 +++ examples/server/tests/features/steps/steps.py | 28 +++- 3 files changed, 115 insertions(+), 68 deletions(-) diff --git a/examples/server/server.cpp b/examples/server/server.cpp index 846ef7e5f..6b3ee531c 100644 --- a/examples/server/server.cpp +++ b/examples/server/server.cpp @@ -3211,87 +3211,88 @@ int main(int argc, char **argv) res.set_content(models.dump(), "application/json; charset=utf-8"); }); + const auto chat_completions = [&llama, &validate_api_key, &sparams](const httplib::Request &req, httplib::Response &res) + { + res.set_header("Access-Control-Allow-Origin", req.get_header_value("Origin")); + if (!validate_api_key(req, res)) { + return; + } + json data = oaicompat_completion_params_parse(llama.model, json::parse(req.body), sparams.chat_template); - // TODO: add mount point without "/v1" prefix -- how? - svr.Post("/v1/chat/completions", [&llama, &validate_api_key, &sparams](const httplib::Request &req, httplib::Response &res) - { - res.set_header("Access-Control-Allow-Origin", req.get_header_value("Origin")); - if (!validate_api_key(req, res)) { - return; - } - json data = oaicompat_completion_params_parse(llama.model, json::parse(req.body), sparams.chat_template); + const int task_id = llama.queue_tasks.get_new_id(); + llama.queue_results.add_waiting_task_id(task_id); + llama.request_completion(task_id, data, false, false, -1); - const int task_id = llama.queue_tasks.get_new_id(); - llama.queue_results.add_waiting_task_id(task_id); - llama.request_completion(task_id, data, false, false, -1); + if (!json_value(data, "stream", false)) { + std::string completion_text; + task_result result = llama.queue_results.recv(task_id); - if (!json_value(data, "stream", false)) { - std::string completion_text; - task_result result = llama.queue_results.recv(task_id); + if (!result.error && result.stop) { + json oaicompat_result = format_final_response_oaicompat(data, result); - if (!result.error && result.stop) { - json oaicompat_result = format_final_response_oaicompat(data, result); + res.set_content(oaicompat_result.dump(-1, ' ', false, + json::error_handler_t::replace), + "application/json; charset=utf-8"); + } else { + res.status = 500; + res.set_content(result.result_json["content"], "text/plain; charset=utf-8"); + } + llama.queue_results.remove_waiting_task_id(task_id); + } else { + const auto chunked_content_provider = [task_id, &llama](size_t, httplib::DataSink &sink) { + while (true) { + task_result llama_result = llama.queue_results.recv(task_id); + if (!llama_result.error) { + std::vector result_array = format_partial_response_oaicompat( llama_result); - res.set_content(oaicompat_result.dump(-1, ' ', false, - json::error_handler_t::replace), - "application/json; charset=utf-8"); - } else { - res.status = 500; - res.set_content(result.result_json["content"], "text/plain; charset=utf-8"); - } - llama.queue_results.remove_waiting_task_id(task_id); - } else { - const auto chunked_content_provider = [task_id, &llama](size_t, httplib::DataSink &sink) { - while (true) { - task_result llama_result = llama.queue_results.recv(task_id); - if (!llama_result.error) { - std::vector result_array = format_partial_response_oaicompat( llama_result); - - for (auto it = result_array.begin(); it != result_array.end(); ++it) - { - if (!it->empty()) { - const std::string str = - "data: " + - it->dump(-1, ' ', false, json::error_handler_t::replace) + - "\n\n"; - LOG_VERBOSE("data stream", {{"to_send", str}}); - if (!sink.write(str.c_str(), str.size())) { - llama.queue_results.remove_waiting_task_id(task_id); - return false; - } - } - } - if (llama_result.stop) { - break; - } - } else { + for (auto it = result_array.begin(); it != result_array.end(); ++it) + { + if (!it->empty()) { const std::string str = - "error: " + - llama_result.result_json.dump(-1, ' ', false, - json::error_handler_t::replace) + + "data: " + + it->dump(-1, ' ', false, json::error_handler_t::replace) + "\n\n"; LOG_VERBOSE("data stream", {{"to_send", str}}); if (!sink.write(str.c_str(), str.size())) { llama.queue_results.remove_waiting_task_id(task_id); return false; } - break; } } - sink.done(); - llama.queue_results.remove_waiting_task_id(task_id); - return true; - }; - - auto on_complete = [task_id, &llama](bool) { - // cancel request - llama.request_cancel(task_id); - llama.queue_results.remove_waiting_task_id(task_id); - }; - - res.set_chunked_content_provider("text/event-stream", chunked_content_provider, on_complete); + if (llama_result.stop) { + break; + } + } else { + const std::string str = + "error: " + + llama_result.result_json.dump(-1, ' ', false, + json::error_handler_t::replace) + + "\n\n"; + LOG_VERBOSE("data stream", {{"to_send", str}}); + if (!sink.write(str.c_str(), str.size())) { + llama.queue_results.remove_waiting_task_id(task_id); + return false; + } + break; + } } - }); + sink.done(); + llama.queue_results.remove_waiting_task_id(task_id); + return true; + }; + + auto on_complete = [task_id, &llama](bool) { + // cancel request + llama.request_cancel(task_id); + llama.queue_results.remove_waiting_task_id(task_id); + }; + + res.set_chunked_content_provider("text/event-stream", chunked_content_provider, on_complete); + } + }; + + svr.Post("/chat/completions", chat_completions); + svr.Post("/v1/chat/completions", chat_completions); svr.Post("/infill", [&llama, &validate_api_key](const httplib::Request &req, httplib::Response &res) { diff --git a/examples/server/tests/features/parallel.feature b/examples/server/tests/features/parallel.feature index c85f9de1d..5f895cf90 100644 --- a/examples/server/tests/features/parallel.feature +++ b/examples/server/tests/features/parallel.feature @@ -54,6 +54,28 @@ Feature: Parallel | disabled | 128 | | enabled | 64 | + Scenario Outline: Multi users OAI completions compatibility no v1 + Given a system prompt You are a writer. + And a model tinyllama-2 + Given a prompt: + """ + Write a very long book. + """ + And a prompt: + """ + Write another a poem. + """ + And max tokens to predict + And streaming is + Given concurrent OAI completions requests no v1 + Then the server is busy + Then the server is idle + Then all prompts are predicted with tokens + Examples: + | streaming | n_predict | + | disabled | 128 | + | enabled | 64 | + Scenario: Multi users with total number of tokens to predict exceeds the KV Cache size #3969 Given a prompt: """ diff --git a/examples/server/tests/features/steps/steps.py b/examples/server/tests/features/steps/steps.py index ad87fcb82..381da105e 100644 --- a/examples/server/tests/features/steps/steps.py +++ b/examples/server/tests/features/steps/steps.py @@ -231,6 +231,7 @@ async def step_oai_chat_completions(context, api_error): completion = await oai_chat_completions(context.prompts.pop(), context.system_prompt, context.base_url, + '/v1/chat', False, model=context.model if hasattr(context, 'model') else None, @@ -288,6 +289,28 @@ async def step_oai_chat_completions(context): # user_prompt is inserted automatically context.system_prompt, context.base_url, + '/v1/chat/completions', + True, # async_client + model=context.model + if hasattr(context, 'model') else None, + n_predict=context.n_predict + if hasattr(context, 'n_predict') else None, + enable_streaming=context.enable_streaming + if hasattr(context, 'enable_streaming') else None, + server_seed=context.server_seed + if hasattr(context, 'server_seed') else None, + user_api_key=context.user_api_key + if hasattr(context, 'user_api_key') else None) + + +@step(u'concurrent OAI completions requests no v1') +@async_run_until_complete +async def step_oai_chat_completions(context): + await concurrent_requests(context, oai_chat_completions, + # user_prompt is inserted automatically + context.system_prompt, + context.base_url, + '/chat/completions', True, # async_client model=context.model if hasattr(context, 'model') else None, @@ -497,6 +520,7 @@ async def request_completion(prompt, async def oai_chat_completions(user_prompt, system_prompt, base_url, + base_path, async_client, debug=False, model=None, @@ -537,7 +561,7 @@ async def oai_chat_completions(user_prompt, origin = 'llama.cpp' headers = {'Authorization': f'Bearer {user_api_key}', 'Origin': origin} async with aiohttp.ClientSession() as session: - async with session.post(f'{base_url}/v1/chat/completions', + async with session.post(f'{base_url}{base_path}', json=payload, headers=headers) as response: if enable_streaming: @@ -579,7 +603,7 @@ async def oai_chat_completions(user_prompt, else: try: openai.api_key = user_api_key - openai.api_base = f'{base_url}/v1/chat' + openai.api_base = f'{base_url}{base_path}' chat_completion = openai.Completion.create( messages=payload['messages'], model=model, From 6c4416868df2e5455da7d20547f62bcf9735ba8e Mon Sep 17 00:00:00 2001 From: Daniel Bevenius Date: Wed, 28 Feb 2024 09:39:39 +0100 Subject: [PATCH 730/811] readme : add link to LLaVA 1.6 models (#5758) Signed-off-by: Daniel Bevenius --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index 507a2888b..5401e197f 100644 --- a/README.md +++ b/README.md @@ -107,7 +107,7 @@ Typically finetunes of the base models below are supported as well. **Multimodal models:** -- [x] [LLaVA 1.5 models](https://huggingface.co/collections/liuhaotian/llava-15-653aac15d994e992e2677a7e) +- [x] [LLaVA 1.5 models](https://huggingface.co/collections/liuhaotian/llava-15-653aac15d994e992e2677a7e), [LLaVA 1.6 models](https://huggingface.co/collections/liuhaotian/llava-16-65b9e40155f60fd046a5ccf2) - [x] [BakLLaVA](https://huggingface.co/models?search=SkunkworksAI/Bakllava) - [x] [Obsidian](https://huggingface.co/NousResearch/Obsidian-3B-V0.5) - [x] [ShareGPT4V](https://huggingface.co/models?search=Lin-Chen/ShareGPT4V) From 177628bfd85565070916ad66a5ac4071ee0527d8 Mon Sep 17 00:00:00 2001 From: Douglas Hanley Date: Wed, 28 Feb 2024 02:51:11 -0600 Subject: [PATCH 731/811] llama : improve BERT tokenization (#5740) * implement nfd for stripping accents in wpm tokenizer * sort nfd map; reuse iterator * use builtin tolower * add locale include * Simplify to_lower cases Co-authored-by: Jared Van Bortel --------- Co-authored-by: Jared Van Bortel --- llama.cpp | 137 +++++++++------------------- unicode.h | 262 ++++++++++++++++++++++++++++++++++++++++++++++++++++++ 2 files changed, 305 insertions(+), 94 deletions(-) diff --git a/llama.cpp b/llama.cpp index 464e1b89b..356ca1076 100644 --- a/llama.cpp +++ b/llama.cpp @@ -68,10 +68,12 @@ #include #include #include +#include #include #include #include #include +#include #include #include #include @@ -8941,37 +8943,46 @@ struct llm_tokenizer_wpm { } std::vector preprocess(const std::string & text) { - std::string ori_str = normalize(text); - uint64_t ori_size = ori_str.size(); + // normalalization form D + std::vector codepoints = codepoints_from_utf8(text); + std::vector nfd_codepoints; + for (uint32_t code : codepoints) { + auto it = nfd_map.find(code); + if (it != nfd_map.end()) { + for (uint32_t c : it->second) { + nfd_codepoints.push_back(c); + } + } else { + nfd_codepoints.push_back(code); + } + } - // single punct / single symbol / single digit - // baseline: add whitespace on the left and right of punct and chinese characters - std::vector words; + // strip accents, strip control, uniformize whitespace, + // to lowercase, pad chinese characters, pad punctuation std::string new_str = ""; - uint64_t i = 0; - while (i < ori_size) { - int utf_char_len = utf8_len(ori_str[i]); - if ((utf_char_len == 1) && ispunct(ori_str[i])) { - new_str += " "; - new_str += ori_str[i]; - new_str += " "; - i += 1; + for (uint32_t code : nfd_codepoints) { + int type = codepoint_type(code); + if (type == CODEPOINT_TYPE_ACCENT_MARK || type == CODEPOINT_TYPE_CONTROL) { + continue; } - else if ((utf_char_len == 3) && is_chinese_char(ori_str.substr(i, 3))) { - new_str += " "; - new_str += ori_str.substr(i, 3); - new_str += " "; - i += 3; + code = to_lower(code); + if (type == CODEPOINT_TYPE_WHITESPACE) { + code = ' '; } - else { - new_str += ori_str[i]; - i += 1; + std::string s = codepoint_to_utf8(code); + if (type == CODEPOINT_TYPE_PUNCTUATION || is_ascii_punct(code) || is_chinese_char(code)) { + new_str += " "; + new_str += s; + new_str += " "; + } else { + new_str += s; } } // split by whitespace uint64_t l = 0; uint64_t r = 0; + std::vector words; while (r < new_str.size()) { // if is whitespace if (isspace(new_str[r])) { @@ -8989,47 +9000,20 @@ struct llm_tokenizer_wpm { return words; } - std::string normalize(const std::string & text) { - // TODO: handle chinese characters? https://github.com/huggingface/tokenizers/blob/ef5f50605ddf9f8caef1598c0e4853862b9707a7/tokenizers/src/normalizers/bert.rs#L98 - std::string text2 = strip_accents(text); - for (size_t i = 0; i < text2.size(); i += utf8_len(text2[i])) { - char c = text2[i]; - if (c >= 'A' && c <= 'Z') { - text2[i] = c - 'A' + 'a'; - } + uint32_t to_lower(uint32_t code) { +#if defined(_WIN32) + if (code > 0xFFFF) { + return code; } - return text2; +#endif + return std::tolower(wchar_t(code), std::locale("en_US.UTF-8")); } - bool is_chinese_char(const std::string & str) { - int len = str.length(); - unsigned int codepoint = 0; - int num_bytes = 0; - int i = 0; - unsigned char ch = static_cast(str[i]); - if (ch <= 0x7f) { - codepoint = ch; - num_bytes = 1; - } else if ((ch >> 5) == 0x06) { - codepoint = ch & 0x1f; - num_bytes = 2; - } else if ((ch >> 4) == 0x0e) { - codepoint = ch & 0x0f; - num_bytes = 3; - } else if ((ch >> 3) == 0x1e) { - codepoint = ch & 0x07; - num_bytes = 4; - } - for (int j = 1; j < num_bytes; ++j) { - if (i + j >= len) { - return false; // incomplete UTF-8 character - } - unsigned char next_ch = static_cast(str[i + j]); - if ((next_ch >> 6) != 0x02) { - return false; // invalid trailing byte - } - codepoint = (codepoint << 6) | (next_ch & 0x3f); - } + bool is_ascii_punct(uint32_t code) { + return code < 256 && ispunct(code); + } + + bool is_chinese_char(uint32_t codepoint) { if ((codepoint >= 0x4E00 && codepoint <= 0x9FFF) || (codepoint >= 0x3400 && codepoint <= 0x4DBF) || (codepoint >= 0x20000 && codepoint <= 0x2A6DF) || @@ -9045,41 +9029,6 @@ struct llm_tokenizer_wpm { return false; } - std::string strip_accents(const std::string & input_string) { - std::string resultString; - std::map accent_map = { - {"À", 'A'}, {"Á", 'A'}, {"Â", 'A'}, {"Ã", 'A'}, {"Ä", 'A'}, {"Å", 'A'}, - {"à", 'a'}, {"á", 'a'}, {"â", 'a'}, {"ã", 'a'}, {"ä", 'a'}, {"å", 'a'}, - {"È", 'E'}, {"É", 'E'}, {"Ê", 'E'}, {"Ë", 'E'}, {"è", 'e'}, {"é", 'e'}, - {"ê", 'e'}, {"ë", 'e'}, {"Ì", 'I'}, {"Í", 'I'}, {"Î", 'I'}, {"Ï", 'I'}, - {"ì", 'i'}, {"í", 'i'}, {"î", 'i'}, {"ï", 'i'}, {"Ò", 'O'}, {"Ó", 'O'}, - {"Ô", 'O'}, {"Õ", 'O'}, {"Ö", 'O'}, {"ò", 'o'}, {"ó", 'o'}, {"ô", 'o'}, - {"õ", 'o'}, {"ö", 'o'}, {"Ù", 'U'}, {"Ú", 'U'}, {"Û", 'U'}, {"Ü", 'U'}, - {"ù", 'u'}, {"ú", 'u'}, {"û", 'u'}, {"ü", 'u'}, {"Ý", 'Y'}, {"ý", 'y'}, - {"Ç", 'C'}, {"ç", 'c'}, {"Ñ", 'N'}, {"ñ", 'n'}, - }; - - for (size_t i = 0; i < input_string.length();) { - int len = utf8_len(input_string[i]); - std::string curChar = input_string.substr(i, len); - auto iter = accent_map.find(curChar); - if (iter != accent_map.end()) { - resultString += iter->second; - } else { - resultString += curChar; - } - i += len; - } - - return resultString; - } - - static size_t utf8_len(char src) { - const size_t lookup[] = {1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 2, 2, 3, 4}; - uint8_t highbits = static_cast(src) >> 4; - return lookup[highbits]; - } - const llama_vocab & vocab; }; diff --git a/unicode.h b/unicode.h index 10a5dab01..620e2b580 100644 --- a/unicode.h +++ b/unicode.h @@ -223,6 +223,268 @@ static const std::vector> control_ranges = { {0x2B81E, 0x2B81F}, {0x2CEA2, 0x2CEAF}, {0x2EBE1, 0x2F7FF}, {0x2FA1E, 0x2FFFF}, {0x3134B, 0xE00FF}, {0xE01F0, 0x10FFFF}, }; +static const std::unordered_map> nfd_map = { +{0xC0, {0x41, 0x300}}, {0xC1, {0x41, 0x301}}, {0xC2, {0x41, 0x302}}, {0xC3, {0x41, 0x303}}, {0xC4, {0x41, 0x308}}, {0xC5, {0x41, 0x30A}}, {0xC7, {0x43, 0x327}}, {0xC8, {0x45, 0x300}}, +{0xC9, {0x45, 0x301}}, {0xCA, {0x45, 0x302}}, {0xCB, {0x45, 0x308}}, {0xCC, {0x49, 0x300}}, {0xCD, {0x49, 0x301}}, {0xCE, {0x49, 0x302}}, {0xCF, {0x49, 0x308}}, {0xD1, {0x4E, 0x303}}, +{0xD2, {0x4F, 0x300}}, {0xD3, {0x4F, 0x301}}, {0xD4, {0x4F, 0x302}}, {0xD5, {0x4F, 0x303}}, {0xD6, {0x4F, 0x308}}, {0xD9, {0x55, 0x300}}, {0xDA, {0x55, 0x301}}, {0xDB, {0x55, 0x302}}, +{0xDC, {0x55, 0x308}}, {0xDD, {0x59, 0x301}}, {0xE0, {0x61, 0x300}}, {0xE1, {0x61, 0x301}}, {0xE2, {0x61, 0x302}}, {0xE3, {0x61, 0x303}}, {0xE4, {0x61, 0x308}}, {0xE5, {0x61, 0x30A}}, +{0xE7, {0x63, 0x327}}, {0xE8, {0x65, 0x300}}, {0xE9, {0x65, 0x301}}, {0xEA, {0x65, 0x302}}, {0xEB, {0x65, 0x308}}, {0xEC, {0x69, 0x300}}, {0xED, {0x69, 0x301}}, {0xEE, {0x69, 0x302}}, +{0xEF, {0x69, 0x308}}, {0xF1, {0x6E, 0x303}}, {0xF2, {0x6F, 0x300}}, {0xF3, {0x6F, 0x301}}, {0xF4, {0x6F, 0x302}}, {0xF5, {0x6F, 0x303}}, {0xF6, {0x6F, 0x308}}, {0xF9, {0x75, 0x300}}, +{0xFA, {0x75, 0x301}}, {0xFB, {0x75, 0x302}}, {0xFC, {0x75, 0x308}}, {0xFD, {0x79, 0x301}}, {0xFF, {0x79, 0x308}}, {0x100, {0x41, 0x304}}, {0x101, {0x61, 0x304}}, {0x102, {0x41, 0x306}}, +{0x103, {0x61, 0x306}}, {0x104, {0x41, 0x328}}, {0x105, {0x61, 0x328}}, {0x106, {0x43, 0x301}}, {0x107, {0x63, 0x301}}, {0x108, {0x43, 0x302}}, {0x109, {0x63, 0x302}}, {0x10A, {0x43, 0x307}}, +{0x10B, {0x63, 0x307}}, {0x10C, {0x43, 0x30C}}, {0x10D, {0x63, 0x30C}}, {0x10E, {0x44, 0x30C}}, {0x10F, {0x64, 0x30C}}, {0x112, {0x45, 0x304}}, {0x113, {0x65, 0x304}}, {0x114, {0x45, 0x306}}, +{0x115, {0x65, 0x306}}, {0x116, {0x45, 0x307}}, {0x117, {0x65, 0x307}}, {0x118, {0x45, 0x328}}, {0x119, {0x65, 0x328}}, {0x11A, {0x45, 0x30C}}, {0x11B, {0x65, 0x30C}}, {0x11C, {0x47, 0x302}}, +{0x11D, {0x67, 0x302}}, {0x11E, {0x47, 0x306}}, {0x11F, {0x67, 0x306}}, {0x120, {0x47, 0x307}}, {0x121, {0x67, 0x307}}, {0x122, {0x47, 0x327}}, {0x123, {0x67, 0x327}}, {0x124, {0x48, 0x302}}, +{0x125, {0x68, 0x302}}, {0x128, {0x49, 0x303}}, {0x129, {0x69, 0x303}}, {0x12A, {0x49, 0x304}}, {0x12B, {0x69, 0x304}}, {0x12C, {0x49, 0x306}}, {0x12D, {0x69, 0x306}}, {0x12E, {0x49, 0x328}}, +{0x12F, {0x69, 0x328}}, {0x130, {0x49, 0x307}}, {0x134, {0x4A, 0x302}}, {0x135, {0x6A, 0x302}}, {0x136, {0x4B, 0x327}}, {0x137, {0x6B, 0x327}}, {0x139, {0x4C, 0x301}}, {0x13A, {0x6C, 0x301}}, +{0x13B, {0x4C, 0x327}}, {0x13C, {0x6C, 0x327}}, {0x13D, {0x4C, 0x30C}}, {0x13E, {0x6C, 0x30C}}, {0x143, {0x4E, 0x301}}, {0x144, {0x6E, 0x301}}, {0x145, {0x4E, 0x327}}, {0x146, {0x6E, 0x327}}, +{0x147, {0x4E, 0x30C}}, {0x148, {0x6E, 0x30C}}, {0x14C, {0x4F, 0x304}}, {0x14D, {0x6F, 0x304}}, {0x14E, {0x4F, 0x306}}, {0x14F, {0x6F, 0x306}}, {0x150, {0x4F, 0x30B}}, {0x151, {0x6F, 0x30B}}, +{0x154, {0x52, 0x301}}, {0x155, {0x72, 0x301}}, {0x156, {0x52, 0x327}}, {0x157, {0x72, 0x327}}, {0x158, {0x52, 0x30C}}, {0x159, {0x72, 0x30C}}, {0x15A, {0x53, 0x301}}, {0x15B, {0x73, 0x301}}, +{0x15C, {0x53, 0x302}}, {0x15D, {0x73, 0x302}}, {0x15E, {0x53, 0x327}}, {0x15F, {0x73, 0x327}}, {0x160, {0x53, 0x30C}}, {0x161, {0x73, 0x30C}}, {0x162, {0x54, 0x327}}, {0x163, {0x74, 0x327}}, +{0x164, {0x54, 0x30C}}, {0x165, {0x74, 0x30C}}, {0x168, {0x55, 0x303}}, {0x169, {0x75, 0x303}}, {0x16A, {0x55, 0x304}}, {0x16B, {0x75, 0x304}}, {0x16C, {0x55, 0x306}}, {0x16D, {0x75, 0x306}}, +{0x16E, {0x55, 0x30A}}, {0x16F, {0x75, 0x30A}}, {0x170, {0x55, 0x30B}}, {0x171, {0x75, 0x30B}}, {0x172, {0x55, 0x328}}, {0x173, {0x75, 0x328}}, {0x174, {0x57, 0x302}}, {0x175, {0x77, 0x302}}, +{0x176, {0x59, 0x302}}, {0x177, {0x79, 0x302}}, {0x178, {0x59, 0x308}}, {0x179, {0x5A, 0x301}}, {0x17A, {0x7A, 0x301}}, {0x17B, {0x5A, 0x307}}, {0x17C, {0x7A, 0x307}}, {0x17D, {0x5A, 0x30C}}, +{0x17E, {0x7A, 0x30C}}, {0x1A0, {0x4F, 0x31B}}, {0x1A1, {0x6F, 0x31B}}, {0x1AF, {0x55, 0x31B}}, {0x1B0, {0x75, 0x31B}}, {0x1CD, {0x41, 0x30C}}, {0x1CE, {0x61, 0x30C}}, {0x1CF, {0x49, 0x30C}}, +{0x1D0, {0x69, 0x30C}}, {0x1D1, {0x4F, 0x30C}}, {0x1D2, {0x6F, 0x30C}}, {0x1D3, {0x55, 0x30C}}, {0x1D4, {0x75, 0x30C}}, {0x1D5, {0x55, 0x308, 0x304}}, {0x1D6, {0x75, 0x308, 0x304}}, +{0x1D7, {0x55, 0x308, 0x301}}, {0x1D8, {0x75, 0x308, 0x301}}, {0x1D9, {0x55, 0x308, 0x30C}}, {0x1DA, {0x75, 0x308, 0x30C}}, {0x1DB, {0x55, 0x308, 0x300}}, {0x1DC, {0x75, 0x308, 0x300}}, +{0x1DE, {0x41, 0x308, 0x304}}, {0x1DF, {0x61, 0x308, 0x304}}, {0x1E0, {0x41, 0x307, 0x304}}, {0x1E1, {0x61, 0x307, 0x304}}, {0x1E2, {0xC6, 0x304}}, {0x1E3, {0xE6, 0x304}}, {0x1E6, {0x47, 0x30C}}, +{0x1E7, {0x67, 0x30C}}, {0x1E8, {0x4B, 0x30C}}, {0x1E9, {0x6B, 0x30C}}, {0x1EA, {0x4F, 0x328}}, {0x1EB, {0x6F, 0x328}}, {0x1EC, {0x4F, 0x328, 0x304}}, {0x1ED, {0x6F, 0x328, 0x304}}, +{0x1EE, {0x1B7, 0x30C}}, {0x1EF, {0x292, 0x30C}}, {0x1F0, {0x6A, 0x30C}}, {0x1F4, {0x47, 0x301}}, {0x1F5, {0x67, 0x301}}, {0x1F8, {0x4E, 0x300}}, {0x1F9, {0x6E, 0x300}}, {0x1FA, {0x41, 0x30A, 0x301}}, +{0x1FB, {0x61, 0x30A, 0x301}}, {0x1FC, {0xC6, 0x301}}, {0x1FD, {0xE6, 0x301}}, {0x1FE, {0xD8, 0x301}}, {0x1FF, {0xF8, 0x301}}, {0x200, {0x41, 0x30F}}, {0x201, {0x61, 0x30F}}, {0x202, {0x41, 0x311}}, +{0x203, {0x61, 0x311}}, {0x204, {0x45, 0x30F}}, {0x205, {0x65, 0x30F}}, {0x206, {0x45, 0x311}}, {0x207, {0x65, 0x311}}, {0x208, {0x49, 0x30F}}, {0x209, {0x69, 0x30F}}, {0x20A, {0x49, 0x311}}, +{0x20B, {0x69, 0x311}}, {0x20C, {0x4F, 0x30F}}, {0x20D, {0x6F, 0x30F}}, {0x20E, {0x4F, 0x311}}, {0x20F, {0x6F, 0x311}}, {0x210, {0x52, 0x30F}}, {0x211, {0x72, 0x30F}}, {0x212, {0x52, 0x311}}, +{0x213, {0x72, 0x311}}, {0x214, {0x55, 0x30F}}, {0x215, {0x75, 0x30F}}, {0x216, {0x55, 0x311}}, {0x217, {0x75, 0x311}}, {0x218, {0x53, 0x326}}, {0x219, {0x73, 0x326}}, {0x21A, {0x54, 0x326}}, +{0x21B, {0x74, 0x326}}, {0x21E, {0x48, 0x30C}}, {0x21F, {0x68, 0x30C}}, {0x226, {0x41, 0x307}}, {0x227, {0x61, 0x307}}, {0x228, {0x45, 0x327}}, {0x229, {0x65, 0x327}}, {0x22A, {0x4F, 0x308, 0x304}}, +{0x22B, {0x6F, 0x308, 0x304}}, {0x22C, {0x4F, 0x303, 0x304}}, {0x22D, {0x6F, 0x303, 0x304}}, {0x22E, {0x4F, 0x307}}, {0x22F, {0x6F, 0x307}}, {0x230, {0x4F, 0x307, 0x304}}, +{0x231, {0x6F, 0x307, 0x304}}, {0x232, {0x59, 0x304}}, {0x233, {0x79, 0x304}}, {0x340, {0x300}}, {0x341, {0x301}}, {0x343, {0x313}}, {0x344, {0x308, 0x301}}, {0x374, {0x2B9}}, {0x37E, {0x3B}}, +{0x385, {0xA8, 0x301}}, {0x386, {0x391, 0x301}}, {0x387, {0xB7}}, {0x388, {0x395, 0x301}}, {0x389, {0x397, 0x301}}, {0x38A, {0x399, 0x301}}, {0x38C, {0x39F, 0x301}}, {0x38E, {0x3A5, 0x301}}, +{0x38F, {0x3A9, 0x301}}, {0x390, {0x3B9, 0x308, 0x301}}, {0x3AA, {0x399, 0x308}}, {0x3AB, {0x3A5, 0x308}}, {0x3AC, {0x3B1, 0x301}}, {0x3AD, {0x3B5, 0x301}}, {0x3AE, {0x3B7, 0x301}}, +{0x3AF, {0x3B9, 0x301}}, {0x3B0, {0x3C5, 0x308, 0x301}}, {0x3CA, {0x3B9, 0x308}}, {0x3CB, {0x3C5, 0x308}}, {0x3CC, {0x3BF, 0x301}}, {0x3CD, {0x3C5, 0x301}}, {0x3CE, {0x3C9, 0x301}}, +{0x3D3, {0x3D2, 0x301}}, {0x3D4, {0x3D2, 0x308}}, {0x400, {0x415, 0x300}}, {0x401, {0x415, 0x308}}, {0x403, {0x413, 0x301}}, {0x407, {0x406, 0x308}}, {0x40C, {0x41A, 0x301}}, {0x40D, {0x418, 0x300}}, +{0x40E, {0x423, 0x306}}, {0x419, {0x418, 0x306}}, {0x439, {0x438, 0x306}}, {0x450, {0x435, 0x300}}, {0x451, {0x435, 0x308}}, {0x453, {0x433, 0x301}}, {0x457, {0x456, 0x308}}, {0x45C, {0x43A, 0x301}}, +{0x45D, {0x438, 0x300}}, {0x45E, {0x443, 0x306}}, {0x476, {0x474, 0x30F}}, {0x477, {0x475, 0x30F}}, {0x4C1, {0x416, 0x306}}, {0x4C2, {0x436, 0x306}}, {0x4D0, {0x410, 0x306}}, {0x4D1, {0x430, 0x306}}, +{0x4D2, {0x410, 0x308}}, {0x4D3, {0x430, 0x308}}, {0x4D6, {0x415, 0x306}}, {0x4D7, {0x435, 0x306}}, {0x4DA, {0x4D8, 0x308}}, {0x4DB, {0x4D9, 0x308}}, {0x4DC, {0x416, 0x308}}, {0x4DD, {0x436, 0x308}}, +{0x4DE, {0x417, 0x308}}, {0x4DF, {0x437, 0x308}}, {0x4E2, {0x418, 0x304}}, {0x4E3, {0x438, 0x304}}, {0x4E4, {0x418, 0x308}}, {0x4E5, {0x438, 0x308}}, {0x4E6, {0x41E, 0x308}}, {0x4E7, {0x43E, 0x308}}, +{0x4EA, {0x4E8, 0x308}}, {0x4EB, {0x4E9, 0x308}}, {0x4EC, {0x42D, 0x308}}, {0x4ED, {0x44D, 0x308}}, {0x4EE, {0x423, 0x304}}, {0x4EF, {0x443, 0x304}}, {0x4F0, {0x423, 0x308}}, {0x4F1, {0x443, 0x308}}, +{0x4F2, {0x423, 0x30B}}, {0x4F3, {0x443, 0x30B}}, {0x4F4, {0x427, 0x308}}, {0x4F5, {0x447, 0x308}}, {0x4F8, {0x42B, 0x308}}, {0x4F9, {0x44B, 0x308}}, {0x622, {0x627, 0x653}}, {0x623, {0x627, 0x654}}, +{0x624, {0x648, 0x654}}, {0x625, {0x627, 0x655}}, {0x626, {0x64A, 0x654}}, {0x6C0, {0x6D5, 0x654}}, {0x6C2, {0x6C1, 0x654}}, {0x6D3, {0x6D2, 0x654}}, {0x929, {0x928, 0x93C}}, {0x931, {0x930, 0x93C}}, +{0x934, {0x933, 0x93C}}, {0x958, {0x915, 0x93C}}, {0x959, {0x916, 0x93C}}, {0x95A, {0x917, 0x93C}}, {0x95B, {0x91C, 0x93C}}, {0x95C, {0x921, 0x93C}}, {0x95D, {0x922, 0x93C}}, {0x95E, {0x92B, 0x93C}}, +{0x95F, {0x92F, 0x93C}}, {0x9CB, {0x9C7, 0x9BE}}, {0x9CC, {0x9C7, 0x9D7}}, {0x9DC, {0x9A1, 0x9BC}}, {0x9DD, {0x9A2, 0x9BC}}, {0x9DF, {0x9AF, 0x9BC}}, {0xA33, {0xA32, 0xA3C}}, {0xA36, {0xA38, 0xA3C}}, +{0xA59, {0xA16, 0xA3C}}, {0xA5A, {0xA17, 0xA3C}}, {0xA5B, {0xA1C, 0xA3C}}, {0xA5E, {0xA2B, 0xA3C}}, {0xB48, {0xB47, 0xB56}}, {0xB4B, {0xB47, 0xB3E}}, {0xB4C, {0xB47, 0xB57}}, {0xB5C, {0xB21, 0xB3C}}, +{0xB5D, {0xB22, 0xB3C}}, {0xB94, {0xB92, 0xBD7}}, {0xBCA, {0xBC6, 0xBBE}}, {0xBCB, {0xBC7, 0xBBE}}, {0xBCC, {0xBC6, 0xBD7}}, {0xC48, {0xC46, 0xC56}}, {0xCC0, {0xCBF, 0xCD5}}, {0xCC7, {0xCC6, 0xCD5}}, +{0xCC8, {0xCC6, 0xCD6}}, {0xCCA, {0xCC6, 0xCC2}}, {0xCCB, {0xCC6, 0xCC2, 0xCD5}}, {0xD4A, {0xD46, 0xD3E}}, {0xD4B, {0xD47, 0xD3E}}, {0xD4C, {0xD46, 0xD57}}, {0xDDA, {0xDD9, 0xDCA}}, +{0xDDC, {0xDD9, 0xDCF}}, {0xDDD, {0xDD9, 0xDCF, 0xDCA}}, {0xDDE, {0xDD9, 0xDDF}}, {0xF43, {0xF42, 0xFB7}}, {0xF4D, {0xF4C, 0xFB7}}, {0xF52, {0xF51, 0xFB7}}, {0xF57, {0xF56, 0xFB7}}, +{0xF5C, {0xF5B, 0xFB7}}, {0xF69, {0xF40, 0xFB5}}, {0xF73, {0xF71, 0xF72}}, {0xF75, {0xF71, 0xF74}}, {0xF76, {0xFB2, 0xF80}}, {0xF78, {0xFB3, 0xF80}}, {0xF81, {0xF71, 0xF80}}, {0xF93, {0xF92, 0xFB7}}, +{0xF9D, {0xF9C, 0xFB7}}, {0xFA2, {0xFA1, 0xFB7}}, {0xFA7, {0xFA6, 0xFB7}}, {0xFAC, {0xFAB, 0xFB7}}, {0xFB9, {0xF90, 0xFB5}}, {0x1026, {0x1025, 0x102E}}, {0x1B06, {0x1B05, 0x1B35}}, +{0x1B08, {0x1B07, 0x1B35}}, {0x1B0A, {0x1B09, 0x1B35}}, {0x1B0C, {0x1B0B, 0x1B35}}, {0x1B0E, {0x1B0D, 0x1B35}}, {0x1B12, {0x1B11, 0x1B35}}, {0x1B3B, {0x1B3A, 0x1B35}}, {0x1B3D, {0x1B3C, 0x1B35}}, +{0x1B40, {0x1B3E, 0x1B35}}, {0x1B41, {0x1B3F, 0x1B35}}, {0x1B43, {0x1B42, 0x1B35}}, {0x1E00, {0x41, 0x325}}, {0x1E01, {0x61, 0x325}}, {0x1E02, {0x42, 0x307}}, {0x1E03, {0x62, 0x307}}, +{0x1E04, {0x42, 0x323}}, {0x1E05, {0x62, 0x323}}, {0x1E06, {0x42, 0x331}}, {0x1E07, {0x62, 0x331}}, {0x1E08, {0x43, 0x327, 0x301}}, {0x1E09, {0x63, 0x327, 0x301}}, {0x1E0A, {0x44, 0x307}}, +{0x1E0B, {0x64, 0x307}}, {0x1E0C, {0x44, 0x323}}, {0x1E0D, {0x64, 0x323}}, {0x1E0E, {0x44, 0x331}}, {0x1E0F, {0x64, 0x331}}, {0x1E10, {0x44, 0x327}}, {0x1E11, {0x64, 0x327}}, {0x1E12, {0x44, 0x32D}}, +{0x1E13, {0x64, 0x32D}}, {0x1E14, {0x45, 0x304, 0x300}}, {0x1E15, {0x65, 0x304, 0x300}}, {0x1E16, {0x45, 0x304, 0x301}}, {0x1E17, {0x65, 0x304, 0x301}}, {0x1E18, {0x45, 0x32D}}, +{0x1E19, {0x65, 0x32D}}, {0x1E1A, {0x45, 0x330}}, {0x1E1B, {0x65, 0x330}}, {0x1E1C, {0x45, 0x327, 0x306}}, {0x1E1D, {0x65, 0x327, 0x306}}, {0x1E1E, {0x46, 0x307}}, {0x1E1F, {0x66, 0x307}}, +{0x1E20, {0x47, 0x304}}, {0x1E21, {0x67, 0x304}}, {0x1E22, {0x48, 0x307}}, {0x1E23, {0x68, 0x307}}, {0x1E24, {0x48, 0x323}}, {0x1E25, {0x68, 0x323}}, {0x1E26, {0x48, 0x308}}, {0x1E27, {0x68, 0x308}}, +{0x1E28, {0x48, 0x327}}, {0x1E29, {0x68, 0x327}}, {0x1E2A, {0x48, 0x32E}}, {0x1E2B, {0x68, 0x32E}}, {0x1E2C, {0x49, 0x330}}, {0x1E2D, {0x69, 0x330}}, {0x1E2E, {0x49, 0x308, 0x301}}, +{0x1E2F, {0x69, 0x308, 0x301}}, {0x1E30, {0x4B, 0x301}}, {0x1E31, {0x6B, 0x301}}, {0x1E32, {0x4B, 0x323}}, {0x1E33, {0x6B, 0x323}}, {0x1E34, {0x4B, 0x331}}, {0x1E35, {0x6B, 0x331}}, +{0x1E36, {0x4C, 0x323}}, {0x1E37, {0x6C, 0x323}}, {0x1E38, {0x4C, 0x323, 0x304}}, {0x1E39, {0x6C, 0x323, 0x304}}, {0x1E3A, {0x4C, 0x331}}, {0x1E3B, {0x6C, 0x331}}, {0x1E3C, {0x4C, 0x32D}}, +{0x1E3D, {0x6C, 0x32D}}, {0x1E3E, {0x4D, 0x301}}, {0x1E3F, {0x6D, 0x301}}, {0x1E40, {0x4D, 0x307}}, {0x1E41, {0x6D, 0x307}}, {0x1E42, {0x4D, 0x323}}, {0x1E43, {0x6D, 0x323}}, {0x1E44, {0x4E, 0x307}}, +{0x1E45, {0x6E, 0x307}}, {0x1E46, {0x4E, 0x323}}, {0x1E47, {0x6E, 0x323}}, {0x1E48, {0x4E, 0x331}}, {0x1E49, {0x6E, 0x331}}, {0x1E4A, {0x4E, 0x32D}}, {0x1E4B, {0x6E, 0x32D}}, +{0x1E4C, {0x4F, 0x303, 0x301}}, {0x1E4D, {0x6F, 0x303, 0x301}}, {0x1E4E, {0x4F, 0x303, 0x308}}, {0x1E4F, {0x6F, 0x303, 0x308}}, {0x1E50, {0x4F, 0x304, 0x300}}, {0x1E51, {0x6F, 0x304, 0x300}}, +{0x1E52, {0x4F, 0x304, 0x301}}, {0x1E53, {0x6F, 0x304, 0x301}}, {0x1E54, {0x50, 0x301}}, {0x1E55, {0x70, 0x301}}, {0x1E56, {0x50, 0x307}}, {0x1E57, {0x70, 0x307}}, {0x1E58, {0x52, 0x307}}, +{0x1E59, {0x72, 0x307}}, {0x1E5A, {0x52, 0x323}}, {0x1E5B, {0x72, 0x323}}, {0x1E5C, {0x52, 0x323, 0x304}}, {0x1E5D, {0x72, 0x323, 0x304}}, {0x1E5E, {0x52, 0x331}}, {0x1E5F, {0x72, 0x331}}, +{0x1E60, {0x53, 0x307}}, {0x1E61, {0x73, 0x307}}, {0x1E62, {0x53, 0x323}}, {0x1E63, {0x73, 0x323}}, {0x1E64, {0x53, 0x301, 0x307}}, {0x1E65, {0x73, 0x301, 0x307}}, {0x1E66, {0x53, 0x30C, 0x307}}, +{0x1E67, {0x73, 0x30C, 0x307}}, {0x1E68, {0x53, 0x323, 0x307}}, {0x1E69, {0x73, 0x323, 0x307}}, {0x1E6A, {0x54, 0x307}}, {0x1E6B, {0x74, 0x307}}, {0x1E6C, {0x54, 0x323}}, {0x1E6D, {0x74, 0x323}}, +{0x1E6E, {0x54, 0x331}}, {0x1E6F, {0x74, 0x331}}, {0x1E70, {0x54, 0x32D}}, {0x1E71, {0x74, 0x32D}}, {0x1E72, {0x55, 0x324}}, {0x1E73, {0x75, 0x324}}, {0x1E74, {0x55, 0x330}}, {0x1E75, {0x75, 0x330}}, +{0x1E76, {0x55, 0x32D}}, {0x1E77, {0x75, 0x32D}}, {0x1E78, {0x55, 0x303, 0x301}}, {0x1E79, {0x75, 0x303, 0x301}}, {0x1E7A, {0x55, 0x304, 0x308}}, {0x1E7B, {0x75, 0x304, 0x308}}, +{0x1E7C, {0x56, 0x303}}, {0x1E7D, {0x76, 0x303}}, {0x1E7E, {0x56, 0x323}}, {0x1E7F, {0x76, 0x323}}, {0x1E80, {0x57, 0x300}}, {0x1E81, {0x77, 0x300}}, {0x1E82, {0x57, 0x301}}, {0x1E83, {0x77, 0x301}}, +{0x1E84, {0x57, 0x308}}, {0x1E85, {0x77, 0x308}}, {0x1E86, {0x57, 0x307}}, {0x1E87, {0x77, 0x307}}, {0x1E88, {0x57, 0x323}}, {0x1E89, {0x77, 0x323}}, {0x1E8A, {0x58, 0x307}}, {0x1E8B, {0x78, 0x307}}, +{0x1E8C, {0x58, 0x308}}, {0x1E8D, {0x78, 0x308}}, {0x1E8E, {0x59, 0x307}}, {0x1E8F, {0x79, 0x307}}, {0x1E90, {0x5A, 0x302}}, {0x1E91, {0x7A, 0x302}}, {0x1E92, {0x5A, 0x323}}, {0x1E93, {0x7A, 0x323}}, +{0x1E94, {0x5A, 0x331}}, {0x1E95, {0x7A, 0x331}}, {0x1E96, {0x68, 0x331}}, {0x1E97, {0x74, 0x308}}, {0x1E98, {0x77, 0x30A}}, {0x1E99, {0x79, 0x30A}}, {0x1E9B, {0x17F, 0x307}}, {0x1EA0, {0x41, 0x323}}, +{0x1EA1, {0x61, 0x323}}, {0x1EA2, {0x41, 0x309}}, {0x1EA3, {0x61, 0x309}}, {0x1EA4, {0x41, 0x302, 0x301}}, {0x1EA5, {0x61, 0x302, 0x301}}, {0x1EA6, {0x41, 0x302, 0x300}}, +{0x1EA7, {0x61, 0x302, 0x300}}, {0x1EA8, {0x41, 0x302, 0x309}}, {0x1EA9, {0x61, 0x302, 0x309}}, {0x1EAA, {0x41, 0x302, 0x303}}, {0x1EAB, {0x61, 0x302, 0x303}}, {0x1EAC, {0x41, 0x323, 0x302}}, +{0x1EAD, {0x61, 0x323, 0x302}}, {0x1EAE, {0x41, 0x306, 0x301}}, {0x1EAF, {0x61, 0x306, 0x301}}, {0x1EB0, {0x41, 0x306, 0x300}}, {0x1EB1, {0x61, 0x306, 0x300}}, {0x1EB2, {0x41, 0x306, 0x309}}, +{0x1EB3, {0x61, 0x306, 0x309}}, {0x1EB4, {0x41, 0x306, 0x303}}, {0x1EB5, {0x61, 0x306, 0x303}}, {0x1EB6, {0x41, 0x323, 0x306}}, {0x1EB7, {0x61, 0x323, 0x306}}, {0x1EB8, {0x45, 0x323}}, +{0x1EB9, {0x65, 0x323}}, {0x1EBA, {0x45, 0x309}}, {0x1EBB, {0x65, 0x309}}, {0x1EBC, {0x45, 0x303}}, {0x1EBD, {0x65, 0x303}}, {0x1EBE, {0x45, 0x302, 0x301}}, {0x1EBF, {0x65, 0x302, 0x301}}, +{0x1EC0, {0x45, 0x302, 0x300}}, {0x1EC1, {0x65, 0x302, 0x300}}, {0x1EC2, {0x45, 0x302, 0x309}}, {0x1EC3, {0x65, 0x302, 0x309}}, {0x1EC4, {0x45, 0x302, 0x303}}, {0x1EC5, {0x65, 0x302, 0x303}}, +{0x1EC6, {0x45, 0x323, 0x302}}, {0x1EC7, {0x65, 0x323, 0x302}}, {0x1EC8, {0x49, 0x309}}, {0x1EC9, {0x69, 0x309}}, {0x1ECA, {0x49, 0x323}}, {0x1ECB, {0x69, 0x323}}, {0x1ECC, {0x4F, 0x323}}, +{0x1ECD, {0x6F, 0x323}}, {0x1ECE, {0x4F, 0x309}}, {0x1ECF, {0x6F, 0x309}}, {0x1ED0, {0x4F, 0x302, 0x301}}, {0x1ED1, {0x6F, 0x302, 0x301}}, {0x1ED2, {0x4F, 0x302, 0x300}}, +{0x1ED3, {0x6F, 0x302, 0x300}}, {0x1ED4, {0x4F, 0x302, 0x309}}, {0x1ED5, {0x6F, 0x302, 0x309}}, {0x1ED6, {0x4F, 0x302, 0x303}}, {0x1ED7, {0x6F, 0x302, 0x303}}, {0x1ED8, {0x4F, 0x323, 0x302}}, +{0x1ED9, {0x6F, 0x323, 0x302}}, {0x1EDA, {0x4F, 0x31B, 0x301}}, {0x1EDB, {0x6F, 0x31B, 0x301}}, {0x1EDC, {0x4F, 0x31B, 0x300}}, {0x1EDD, {0x6F, 0x31B, 0x300}}, {0x1EDE, {0x4F, 0x31B, 0x309}}, +{0x1EDF, {0x6F, 0x31B, 0x309}}, {0x1EE0, {0x4F, 0x31B, 0x303}}, {0x1EE1, {0x6F, 0x31B, 0x303}}, {0x1EE2, {0x4F, 0x31B, 0x323}}, {0x1EE3, {0x6F, 0x31B, 0x323}}, {0x1EE4, {0x55, 0x323}}, +{0x1EE5, {0x75, 0x323}}, {0x1EE6, {0x55, 0x309}}, {0x1EE7, {0x75, 0x309}}, {0x1EE8, {0x55, 0x31B, 0x301}}, {0x1EE9, {0x75, 0x31B, 0x301}}, {0x1EEA, {0x55, 0x31B, 0x300}}, +{0x1EEB, {0x75, 0x31B, 0x300}}, {0x1EEC, {0x55, 0x31B, 0x309}}, {0x1EED, {0x75, 0x31B, 0x309}}, {0x1EEE, {0x55, 0x31B, 0x303}}, {0x1EEF, {0x75, 0x31B, 0x303}}, {0x1EF0, {0x55, 0x31B, 0x323}}, +{0x1EF1, {0x75, 0x31B, 0x323}}, {0x1EF2, {0x59, 0x300}}, {0x1EF3, {0x79, 0x300}}, {0x1EF4, {0x59, 0x323}}, {0x1EF5, {0x79, 0x323}}, {0x1EF6, {0x59, 0x309}}, {0x1EF7, {0x79, 0x309}}, +{0x1EF8, {0x59, 0x303}}, {0x1EF9, {0x79, 0x303}}, {0x1F00, {0x3B1, 0x313}}, {0x1F01, {0x3B1, 0x314}}, {0x1F02, {0x3B1, 0x313, 0x300}}, {0x1F03, {0x3B1, 0x314, 0x300}}, {0x1F04, {0x3B1, 0x313, 0x301}}, +{0x1F05, {0x3B1, 0x314, 0x301}}, {0x1F06, {0x3B1, 0x313, 0x342}}, {0x1F07, {0x3B1, 0x314, 0x342}}, {0x1F08, {0x391, 0x313}}, {0x1F09, {0x391, 0x314}}, {0x1F0A, {0x391, 0x313, 0x300}}, +{0x1F0B, {0x391, 0x314, 0x300}}, {0x1F0C, {0x391, 0x313, 0x301}}, {0x1F0D, {0x391, 0x314, 0x301}}, {0x1F0E, {0x391, 0x313, 0x342}}, {0x1F0F, {0x391, 0x314, 0x342}}, {0x1F10, {0x3B5, 0x313}}, +{0x1F11, {0x3B5, 0x314}}, {0x1F12, {0x3B5, 0x313, 0x300}}, {0x1F13, {0x3B5, 0x314, 0x300}}, {0x1F14, {0x3B5, 0x313, 0x301}}, {0x1F15, {0x3B5, 0x314, 0x301}}, {0x1F18, {0x395, 0x313}}, +{0x1F19, {0x395, 0x314}}, {0x1F1A, {0x395, 0x313, 0x300}}, {0x1F1B, {0x395, 0x314, 0x300}}, {0x1F1C, {0x395, 0x313, 0x301}}, {0x1F1D, {0x395, 0x314, 0x301}}, {0x1F20, {0x3B7, 0x313}}, +{0x1F21, {0x3B7, 0x314}}, {0x1F22, {0x3B7, 0x313, 0x300}}, {0x1F23, {0x3B7, 0x314, 0x300}}, {0x1F24, {0x3B7, 0x313, 0x301}}, {0x1F25, {0x3B7, 0x314, 0x301}}, {0x1F26, {0x3B7, 0x313, 0x342}}, +{0x1F27, {0x3B7, 0x314, 0x342}}, {0x1F28, {0x397, 0x313}}, {0x1F29, {0x397, 0x314}}, {0x1F2A, {0x397, 0x313, 0x300}}, {0x1F2B, {0x397, 0x314, 0x300}}, {0x1F2C, {0x397, 0x313, 0x301}}, +{0x1F2D, {0x397, 0x314, 0x301}}, {0x1F2E, {0x397, 0x313, 0x342}}, {0x1F2F, {0x397, 0x314, 0x342}}, {0x1F30, {0x3B9, 0x313}}, {0x1F31, {0x3B9, 0x314}}, {0x1F32, {0x3B9, 0x313, 0x300}}, +{0x1F33, {0x3B9, 0x314, 0x300}}, {0x1F34, {0x3B9, 0x313, 0x301}}, {0x1F35, {0x3B9, 0x314, 0x301}}, {0x1F36, {0x3B9, 0x313, 0x342}}, {0x1F37, {0x3B9, 0x314, 0x342}}, {0x1F38, {0x399, 0x313}}, +{0x1F39, {0x399, 0x314}}, {0x1F3A, {0x399, 0x313, 0x300}}, {0x1F3B, {0x399, 0x314, 0x300}}, {0x1F3C, {0x399, 0x313, 0x301}}, {0x1F3D, {0x399, 0x314, 0x301}}, {0x1F3E, {0x399, 0x313, 0x342}}, +{0x1F3F, {0x399, 0x314, 0x342}}, {0x1F40, {0x3BF, 0x313}}, {0x1F41, {0x3BF, 0x314}}, {0x1F42, {0x3BF, 0x313, 0x300}}, {0x1F43, {0x3BF, 0x314, 0x300}}, {0x1F44, {0x3BF, 0x313, 0x301}}, +{0x1F45, {0x3BF, 0x314, 0x301}}, {0x1F48, {0x39F, 0x313}}, {0x1F49, {0x39F, 0x314}}, {0x1F4A, {0x39F, 0x313, 0x300}}, {0x1F4B, {0x39F, 0x314, 0x300}}, {0x1F4C, {0x39F, 0x313, 0x301}}, +{0x1F4D, {0x39F, 0x314, 0x301}}, {0x1F50, {0x3C5, 0x313}}, {0x1F51, {0x3C5, 0x314}}, {0x1F52, {0x3C5, 0x313, 0x300}}, {0x1F53, {0x3C5, 0x314, 0x300}}, {0x1F54, {0x3C5, 0x313, 0x301}}, +{0x1F55, {0x3C5, 0x314, 0x301}}, {0x1F56, {0x3C5, 0x313, 0x342}}, {0x1F57, {0x3C5, 0x314, 0x342}}, {0x1F59, {0x3A5, 0x314}}, {0x1F5B, {0x3A5, 0x314, 0x300}}, {0x1F5D, {0x3A5, 0x314, 0x301}}, +{0x1F5F, {0x3A5, 0x314, 0x342}}, {0x1F60, {0x3C9, 0x313}}, {0x1F61, {0x3C9, 0x314}}, {0x1F62, {0x3C9, 0x313, 0x300}}, {0x1F63, {0x3C9, 0x314, 0x300}}, {0x1F64, {0x3C9, 0x313, 0x301}}, +{0x1F65, {0x3C9, 0x314, 0x301}}, {0x1F66, {0x3C9, 0x313, 0x342}}, {0x1F67, {0x3C9, 0x314, 0x342}}, {0x1F68, {0x3A9, 0x313}}, {0x1F69, {0x3A9, 0x314}}, {0x1F6A, {0x3A9, 0x313, 0x300}}, +{0x1F6B, {0x3A9, 0x314, 0x300}}, {0x1F6C, {0x3A9, 0x313, 0x301}}, {0x1F6D, {0x3A9, 0x314, 0x301}}, {0x1F6E, {0x3A9, 0x313, 0x342}}, {0x1F6F, {0x3A9, 0x314, 0x342}}, {0x1F70, {0x3B1, 0x300}}, +{0x1F71, {0x3B1, 0x301}}, {0x1F72, {0x3B5, 0x300}}, {0x1F73, {0x3B5, 0x301}}, {0x1F74, {0x3B7, 0x300}}, {0x1F75, {0x3B7, 0x301}}, {0x1F76, {0x3B9, 0x300}}, {0x1F77, {0x3B9, 0x301}}, +{0x1F78, {0x3BF, 0x300}}, {0x1F79, {0x3BF, 0x301}}, {0x1F7A, {0x3C5, 0x300}}, {0x1F7B, {0x3C5, 0x301}}, {0x1F7C, {0x3C9, 0x300}}, {0x1F7D, {0x3C9, 0x301}}, {0x1F80, {0x3B1, 0x313, 0x345}}, +{0x1F81, {0x3B1, 0x314, 0x345}}, {0x1F82, {0x3B1, 0x313, 0x300, 0x345}}, {0x1F83, {0x3B1, 0x314, 0x300, 0x345}}, {0x1F84, {0x3B1, 0x313, 0x301, 0x345}}, {0x1F85, {0x3B1, 0x314, 0x301, 0x345}}, +{0x1F86, {0x3B1, 0x313, 0x342, 0x345}}, {0x1F87, {0x3B1, 0x314, 0x342, 0x345}}, {0x1F88, {0x391, 0x313, 0x345}}, {0x1F89, {0x391, 0x314, 0x345}}, {0x1F8A, {0x391, 0x313, 0x300, 0x345}}, +{0x1F8B, {0x391, 0x314, 0x300, 0x345}}, {0x1F8C, {0x391, 0x313, 0x301, 0x345}}, {0x1F8D, {0x391, 0x314, 0x301, 0x345}}, {0x1F8E, {0x391, 0x313, 0x342, 0x345}}, {0x1F8F, {0x391, 0x314, 0x342, 0x345}}, +{0x1F90, {0x3B7, 0x313, 0x345}}, {0x1F91, {0x3B7, 0x314, 0x345}}, {0x1F92, {0x3B7, 0x313, 0x300, 0x345}}, {0x1F93, {0x3B7, 0x314, 0x300, 0x345}}, {0x1F94, {0x3B7, 0x313, 0x301, 0x345}}, +{0x1F95, {0x3B7, 0x314, 0x301, 0x345}}, {0x1F96, {0x3B7, 0x313, 0x342, 0x345}}, {0x1F97, {0x3B7, 0x314, 0x342, 0x345}}, {0x1F98, {0x397, 0x313, 0x345}}, {0x1F99, {0x397, 0x314, 0x345}}, +{0x1F9A, {0x397, 0x313, 0x300, 0x345}}, {0x1F9B, {0x397, 0x314, 0x300, 0x345}}, {0x1F9C, {0x397, 0x313, 0x301, 0x345}}, {0x1F9D, {0x397, 0x314, 0x301, 0x345}}, {0x1F9E, {0x397, 0x313, 0x342, 0x345}}, +{0x1F9F, {0x397, 0x314, 0x342, 0x345}}, {0x1FA0, {0x3C9, 0x313, 0x345}}, {0x1FA1, {0x3C9, 0x314, 0x345}}, {0x1FA2, {0x3C9, 0x313, 0x300, 0x345}}, {0x1FA3, {0x3C9, 0x314, 0x300, 0x345}}, +{0x1FA4, {0x3C9, 0x313, 0x301, 0x345}}, {0x1FA5, {0x3C9, 0x314, 0x301, 0x345}}, {0x1FA6, {0x3C9, 0x313, 0x342, 0x345}}, {0x1FA7, {0x3C9, 0x314, 0x342, 0x345}}, {0x1FA8, {0x3A9, 0x313, 0x345}}, +{0x1FA9, {0x3A9, 0x314, 0x345}}, {0x1FAA, {0x3A9, 0x313, 0x300, 0x345}}, {0x1FAB, {0x3A9, 0x314, 0x300, 0x345}}, {0x1FAC, {0x3A9, 0x313, 0x301, 0x345}}, {0x1FAD, {0x3A9, 0x314, 0x301, 0x345}}, +{0x1FAE, {0x3A9, 0x313, 0x342, 0x345}}, {0x1FAF, {0x3A9, 0x314, 0x342, 0x345}}, {0x1FB0, {0x3B1, 0x306}}, {0x1FB1, {0x3B1, 0x304}}, {0x1FB2, {0x3B1, 0x300, 0x345}}, {0x1FB3, {0x3B1, 0x345}}, +{0x1FB4, {0x3B1, 0x301, 0x345}}, {0x1FB6, {0x3B1, 0x342}}, {0x1FB7, {0x3B1, 0x342, 0x345}}, {0x1FB8, {0x391, 0x306}}, {0x1FB9, {0x391, 0x304}}, {0x1FBA, {0x391, 0x300}}, {0x1FBB, {0x391, 0x301}}, +{0x1FBC, {0x391, 0x345}}, {0x1FBE, {0x3B9}}, {0x1FC1, {0xA8, 0x342}}, {0x1FC2, {0x3B7, 0x300, 0x345}}, {0x1FC3, {0x3B7, 0x345}}, {0x1FC4, {0x3B7, 0x301, 0x345}}, {0x1FC6, {0x3B7, 0x342}}, +{0x1FC7, {0x3B7, 0x342, 0x345}}, {0x1FC8, {0x395, 0x300}}, {0x1FC9, {0x395, 0x301}}, {0x1FCA, {0x397, 0x300}}, {0x1FCB, {0x397, 0x301}}, {0x1FCC, {0x397, 0x345}}, {0x1FCD, {0x1FBF, 0x300}}, +{0x1FCE, {0x1FBF, 0x301}}, {0x1FCF, {0x1FBF, 0x342}}, {0x1FD0, {0x3B9, 0x306}}, {0x1FD1, {0x3B9, 0x304}}, {0x1FD2, {0x3B9, 0x308, 0x300}}, {0x1FD3, {0x3B9, 0x308, 0x301}}, {0x1FD6, {0x3B9, 0x342}}, +{0x1FD7, {0x3B9, 0x308, 0x342}}, {0x1FD8, {0x399, 0x306}}, {0x1FD9, {0x399, 0x304}}, {0x1FDA, {0x399, 0x300}}, {0x1FDB, {0x399, 0x301}}, {0x1FDD, {0x1FFE, 0x300}}, {0x1FDE, {0x1FFE, 0x301}}, +{0x1FDF, {0x1FFE, 0x342}}, {0x1FE0, {0x3C5, 0x306}}, {0x1FE1, {0x3C5, 0x304}}, {0x1FE2, {0x3C5, 0x308, 0x300}}, {0x1FE3, {0x3C5, 0x308, 0x301}}, {0x1FE4, {0x3C1, 0x313}}, {0x1FE5, {0x3C1, 0x314}}, +{0x1FE6, {0x3C5, 0x342}}, {0x1FE7, {0x3C5, 0x308, 0x342}}, {0x1FE8, {0x3A5, 0x306}}, {0x1FE9, {0x3A5, 0x304}}, {0x1FEA, {0x3A5, 0x300}}, {0x1FEB, {0x3A5, 0x301}}, {0x1FEC, {0x3A1, 0x314}}, +{0x1FED, {0xA8, 0x300}}, {0x1FEE, {0xA8, 0x301}}, {0x1FEF, {0x60}}, {0x1FF2, {0x3C9, 0x300, 0x345}}, {0x1FF3, {0x3C9, 0x345}}, {0x1FF4, {0x3C9, 0x301, 0x345}}, {0x1FF6, {0x3C9, 0x342}}, +{0x1FF7, {0x3C9, 0x342, 0x345}}, {0x1FF8, {0x39F, 0x300}}, {0x1FF9, {0x39F, 0x301}}, {0x1FFA, {0x3A9, 0x300}}, {0x1FFB, {0x3A9, 0x301}}, {0x1FFC, {0x3A9, 0x345}}, {0x1FFD, {0xB4}}, {0x2000, {0x2002}}, +{0x2001, {0x2003}}, {0x2126, {0x3A9}}, {0x212A, {0x4B}}, {0x212B, {0x41, 0x30A}}, {0x219A, {0x2190, 0x338}}, {0x219B, {0x2192, 0x338}}, {0x21AE, {0x2194, 0x338}}, {0x21CD, {0x21D0, 0x338}}, +{0x21CE, {0x21D4, 0x338}}, {0x21CF, {0x21D2, 0x338}}, {0x2204, {0x2203, 0x338}}, {0x2209, {0x2208, 0x338}}, {0x220C, {0x220B, 0x338}}, {0x2224, {0x2223, 0x338}}, {0x2226, {0x2225, 0x338}}, +{0x2241, {0x223C, 0x338}}, {0x2244, {0x2243, 0x338}}, {0x2247, {0x2245, 0x338}}, {0x2249, {0x2248, 0x338}}, {0x2260, {0x3D, 0x338}}, {0x2262, {0x2261, 0x338}}, {0x226D, {0x224D, 0x338}}, +{0x226E, {0x3C, 0x338}}, {0x226F, {0x3E, 0x338}}, {0x2270, {0x2264, 0x338}}, {0x2271, {0x2265, 0x338}}, {0x2274, {0x2272, 0x338}}, {0x2275, {0x2273, 0x338}}, {0x2278, {0x2276, 0x338}}, +{0x2279, {0x2277, 0x338}}, {0x2280, {0x227A, 0x338}}, {0x2281, {0x227B, 0x338}}, {0x2284, {0x2282, 0x338}}, {0x2285, {0x2283, 0x338}}, {0x2288, {0x2286, 0x338}}, {0x2289, {0x2287, 0x338}}, +{0x22AC, {0x22A2, 0x338}}, {0x22AD, {0x22A8, 0x338}}, {0x22AE, {0x22A9, 0x338}}, {0x22AF, {0x22AB, 0x338}}, {0x22E0, {0x227C, 0x338}}, {0x22E1, {0x227D, 0x338}}, {0x22E2, {0x2291, 0x338}}, +{0x22E3, {0x2292, 0x338}}, {0x22EA, {0x22B2, 0x338}}, {0x22EB, {0x22B3, 0x338}}, {0x22EC, {0x22B4, 0x338}}, {0x22ED, {0x22B5, 0x338}}, {0x2329, {0x3008}}, {0x232A, {0x3009}}, +{0x2ADC, {0x2ADD, 0x338}}, {0x304C, {0x304B, 0x3099}}, {0x304E, {0x304D, 0x3099}}, {0x3050, {0x304F, 0x3099}}, {0x3052, {0x3051, 0x3099}}, {0x3054, {0x3053, 0x3099}}, {0x3056, {0x3055, 0x3099}}, +{0x3058, {0x3057, 0x3099}}, {0x305A, {0x3059, 0x3099}}, {0x305C, {0x305B, 0x3099}}, {0x305E, {0x305D, 0x3099}}, {0x3060, {0x305F, 0x3099}}, {0x3062, {0x3061, 0x3099}}, {0x3065, {0x3064, 0x3099}}, +{0x3067, {0x3066, 0x3099}}, {0x3069, {0x3068, 0x3099}}, {0x3070, {0x306F, 0x3099}}, {0x3071, {0x306F, 0x309A}}, {0x3073, {0x3072, 0x3099}}, {0x3074, {0x3072, 0x309A}}, {0x3076, {0x3075, 0x3099}}, +{0x3077, {0x3075, 0x309A}}, {0x3079, {0x3078, 0x3099}}, {0x307A, {0x3078, 0x309A}}, {0x307C, {0x307B, 0x3099}}, {0x307D, {0x307B, 0x309A}}, {0x3094, {0x3046, 0x3099}}, {0x309E, {0x309D, 0x3099}}, +{0x30AC, {0x30AB, 0x3099}}, {0x30AE, {0x30AD, 0x3099}}, {0x30B0, {0x30AF, 0x3099}}, {0x30B2, {0x30B1, 0x3099}}, {0x30B4, {0x30B3, 0x3099}}, {0x30B6, {0x30B5, 0x3099}}, {0x30B8, {0x30B7, 0x3099}}, +{0x30BA, {0x30B9, 0x3099}}, {0x30BC, {0x30BB, 0x3099}}, {0x30BE, {0x30BD, 0x3099}}, {0x30C0, {0x30BF, 0x3099}}, {0x30C2, {0x30C1, 0x3099}}, {0x30C5, {0x30C4, 0x3099}}, {0x30C7, {0x30C6, 0x3099}}, +{0x30C9, {0x30C8, 0x3099}}, {0x30D0, {0x30CF, 0x3099}}, {0x30D1, {0x30CF, 0x309A}}, {0x30D3, {0x30D2, 0x3099}}, {0x30D4, {0x30D2, 0x309A}}, {0x30D6, {0x30D5, 0x3099}}, {0x30D7, {0x30D5, 0x309A}}, +{0x30D9, {0x30D8, 0x3099}}, {0x30DA, {0x30D8, 0x309A}}, {0x30DC, {0x30DB, 0x3099}}, {0x30DD, {0x30DB, 0x309A}}, {0x30F4, {0x30A6, 0x3099}}, {0x30F7, {0x30EF, 0x3099}}, {0x30F8, {0x30F0, 0x3099}}, +{0x30F9, {0x30F1, 0x3099}}, {0x30FA, {0x30F2, 0x3099}}, {0x30FE, {0x30FD, 0x3099}}, {0xF900, {0x8C48}}, {0xF901, {0x66F4}}, {0xF902, {0x8ECA}}, {0xF903, {0x8CC8}}, {0xF904, {0x6ED1}}, +{0xF905, {0x4E32}}, {0xF906, {0x53E5}}, {0xF907, {0x9F9C}}, {0xF908, {0x9F9C}}, {0xF909, {0x5951}}, {0xF90A, {0x91D1}}, {0xF90B, {0x5587}}, {0xF90C, {0x5948}}, {0xF90D, {0x61F6}}, {0xF90E, {0x7669}}, +{0xF90F, {0x7F85}}, {0xF910, {0x863F}}, {0xF911, {0x87BA}}, {0xF912, {0x88F8}}, {0xF913, {0x908F}}, {0xF914, {0x6A02}}, {0xF915, {0x6D1B}}, {0xF916, {0x70D9}}, {0xF917, {0x73DE}}, {0xF918, {0x843D}}, +{0xF919, {0x916A}}, {0xF91A, {0x99F1}}, {0xF91B, {0x4E82}}, {0xF91C, {0x5375}}, {0xF91D, {0x6B04}}, {0xF91E, {0x721B}}, {0xF91F, {0x862D}}, {0xF920, {0x9E1E}}, {0xF921, {0x5D50}}, {0xF922, {0x6FEB}}, +{0xF923, {0x85CD}}, {0xF924, {0x8964}}, {0xF925, {0x62C9}}, {0xF926, {0x81D8}}, {0xF927, {0x881F}}, {0xF928, {0x5ECA}}, {0xF929, {0x6717}}, {0xF92A, {0x6D6A}}, {0xF92B, {0x72FC}}, {0xF92C, {0x90CE}}, +{0xF92D, {0x4F86}}, {0xF92E, {0x51B7}}, {0xF92F, {0x52DE}}, {0xF930, {0x64C4}}, {0xF931, {0x6AD3}}, {0xF932, {0x7210}}, {0xF933, {0x76E7}}, {0xF934, {0x8001}}, {0xF935, {0x8606}}, {0xF936, {0x865C}}, +{0xF937, {0x8DEF}}, {0xF938, {0x9732}}, {0xF939, {0x9B6F}}, {0xF93A, {0x9DFA}}, {0xF93B, {0x788C}}, {0xF93C, {0x797F}}, {0xF93D, {0x7DA0}}, {0xF93E, {0x83C9}}, {0xF93F, {0x9304}}, {0xF940, {0x9E7F}}, +{0xF941, {0x8AD6}}, {0xF942, {0x58DF}}, {0xF943, {0x5F04}}, {0xF944, {0x7C60}}, {0xF945, {0x807E}}, {0xF946, {0x7262}}, {0xF947, {0x78CA}}, {0xF948, {0x8CC2}}, {0xF949, {0x96F7}}, {0xF94A, {0x58D8}}, +{0xF94B, {0x5C62}}, {0xF94C, {0x6A13}}, {0xF94D, {0x6DDA}}, {0xF94E, {0x6F0F}}, {0xF94F, {0x7D2F}}, {0xF950, {0x7E37}}, {0xF951, {0x964B}}, {0xF952, {0x52D2}}, {0xF953, {0x808B}}, {0xF954, {0x51DC}}, +{0xF955, {0x51CC}}, {0xF956, {0x7A1C}}, {0xF957, {0x7DBE}}, {0xF958, {0x83F1}}, {0xF959, {0x9675}}, {0xF95A, {0x8B80}}, {0xF95B, {0x62CF}}, {0xF95C, {0x6A02}}, {0xF95D, {0x8AFE}}, {0xF95E, {0x4E39}}, +{0xF95F, {0x5BE7}}, {0xF960, {0x6012}}, {0xF961, {0x7387}}, {0xF962, {0x7570}}, {0xF963, {0x5317}}, {0xF964, {0x78FB}}, {0xF965, {0x4FBF}}, {0xF966, {0x5FA9}}, {0xF967, {0x4E0D}}, {0xF968, {0x6CCC}}, +{0xF969, {0x6578}}, {0xF96A, {0x7D22}}, {0xF96B, {0x53C3}}, {0xF96C, {0x585E}}, {0xF96D, {0x7701}}, {0xF96E, {0x8449}}, {0xF96F, {0x8AAA}}, {0xF970, {0x6BBA}}, {0xF971, {0x8FB0}}, {0xF972, {0x6C88}}, +{0xF973, {0x62FE}}, {0xF974, {0x82E5}}, {0xF975, {0x63A0}}, {0xF976, {0x7565}}, {0xF977, {0x4EAE}}, {0xF978, {0x5169}}, {0xF979, {0x51C9}}, {0xF97A, {0x6881}}, {0xF97B, {0x7CE7}}, {0xF97C, {0x826F}}, +{0xF97D, {0x8AD2}}, {0xF97E, {0x91CF}}, {0xF97F, {0x52F5}}, {0xF980, {0x5442}}, {0xF981, {0x5973}}, {0xF982, {0x5EEC}}, {0xF983, {0x65C5}}, {0xF984, {0x6FFE}}, {0xF985, {0x792A}}, {0xF986, {0x95AD}}, +{0xF987, {0x9A6A}}, {0xF988, {0x9E97}}, {0xF989, {0x9ECE}}, {0xF98A, {0x529B}}, {0xF98B, {0x66C6}}, {0xF98C, {0x6B77}}, {0xF98D, {0x8F62}}, {0xF98E, {0x5E74}}, {0xF98F, {0x6190}}, {0xF990, {0x6200}}, +{0xF991, {0x649A}}, {0xF992, {0x6F23}}, {0xF993, {0x7149}}, {0xF994, {0x7489}}, {0xF995, {0x79CA}}, {0xF996, {0x7DF4}}, {0xF997, {0x806F}}, {0xF998, {0x8F26}}, {0xF999, {0x84EE}}, {0xF99A, {0x9023}}, +{0xF99B, {0x934A}}, {0xF99C, {0x5217}}, {0xF99D, {0x52A3}}, {0xF99E, {0x54BD}}, {0xF99F, {0x70C8}}, {0xF9A0, {0x88C2}}, {0xF9A1, {0x8AAA}}, {0xF9A2, {0x5EC9}}, {0xF9A3, {0x5FF5}}, {0xF9A4, {0x637B}}, +{0xF9A5, {0x6BAE}}, {0xF9A6, {0x7C3E}}, {0xF9A7, {0x7375}}, {0xF9A8, {0x4EE4}}, {0xF9A9, {0x56F9}}, {0xF9AA, {0x5BE7}}, {0xF9AB, {0x5DBA}}, {0xF9AC, {0x601C}}, {0xF9AD, {0x73B2}}, {0xF9AE, {0x7469}}, +{0xF9AF, {0x7F9A}}, {0xF9B0, {0x8046}}, {0xF9B1, {0x9234}}, {0xF9B2, {0x96F6}}, {0xF9B3, {0x9748}}, {0xF9B4, {0x9818}}, {0xF9B5, {0x4F8B}}, {0xF9B6, {0x79AE}}, {0xF9B7, {0x91B4}}, {0xF9B8, {0x96B8}}, +{0xF9B9, {0x60E1}}, {0xF9BA, {0x4E86}}, {0xF9BB, {0x50DA}}, {0xF9BC, {0x5BEE}}, {0xF9BD, {0x5C3F}}, {0xF9BE, {0x6599}}, {0xF9BF, {0x6A02}}, {0xF9C0, {0x71CE}}, {0xF9C1, {0x7642}}, {0xF9C2, {0x84FC}}, +{0xF9C3, {0x907C}}, {0xF9C4, {0x9F8D}}, {0xF9C5, {0x6688}}, {0xF9C6, {0x962E}}, {0xF9C7, {0x5289}}, {0xF9C8, {0x677B}}, {0xF9C9, {0x67F3}}, {0xF9CA, {0x6D41}}, {0xF9CB, {0x6E9C}}, {0xF9CC, {0x7409}}, +{0xF9CD, {0x7559}}, {0xF9CE, {0x786B}}, {0xF9CF, {0x7D10}}, {0xF9D0, {0x985E}}, {0xF9D1, {0x516D}}, {0xF9D2, {0x622E}}, {0xF9D3, {0x9678}}, {0xF9D4, {0x502B}}, {0xF9D5, {0x5D19}}, {0xF9D6, {0x6DEA}}, +{0xF9D7, {0x8F2A}}, {0xF9D8, {0x5F8B}}, {0xF9D9, {0x6144}}, {0xF9DA, {0x6817}}, {0xF9DB, {0x7387}}, {0xF9DC, {0x9686}}, {0xF9DD, {0x5229}}, {0xF9DE, {0x540F}}, {0xF9DF, {0x5C65}}, {0xF9E0, {0x6613}}, +{0xF9E1, {0x674E}}, {0xF9E2, {0x68A8}}, {0xF9E3, {0x6CE5}}, {0xF9E4, {0x7406}}, {0xF9E5, {0x75E2}}, {0xF9E6, {0x7F79}}, {0xF9E7, {0x88CF}}, {0xF9E8, {0x88E1}}, {0xF9E9, {0x91CC}}, {0xF9EA, {0x96E2}}, +{0xF9EB, {0x533F}}, {0xF9EC, {0x6EBA}}, {0xF9ED, {0x541D}}, {0xF9EE, {0x71D0}}, {0xF9EF, {0x7498}}, {0xF9F0, {0x85FA}}, {0xF9F1, {0x96A3}}, {0xF9F2, {0x9C57}}, {0xF9F3, {0x9E9F}}, {0xF9F4, {0x6797}}, +{0xF9F5, {0x6DCB}}, {0xF9F6, {0x81E8}}, {0xF9F7, {0x7ACB}}, {0xF9F8, {0x7B20}}, {0xF9F9, {0x7C92}}, {0xF9FA, {0x72C0}}, {0xF9FB, {0x7099}}, {0xF9FC, {0x8B58}}, {0xF9FD, {0x4EC0}}, {0xF9FE, {0x8336}}, +{0xF9FF, {0x523A}}, {0xFA00, {0x5207}}, {0xFA01, {0x5EA6}}, {0xFA02, {0x62D3}}, {0xFA03, {0x7CD6}}, {0xFA04, {0x5B85}}, {0xFA05, {0x6D1E}}, {0xFA06, {0x66B4}}, {0xFA07, {0x8F3B}}, {0xFA08, {0x884C}}, +{0xFA09, {0x964D}}, {0xFA0A, {0x898B}}, {0xFA0B, {0x5ED3}}, {0xFA0C, {0x5140}}, {0xFA0D, {0x55C0}}, {0xFA10, {0x585A}}, {0xFA12, {0x6674}}, {0xFA15, {0x51DE}}, {0xFA16, {0x732A}}, {0xFA17, {0x76CA}}, +{0xFA18, {0x793C}}, {0xFA19, {0x795E}}, {0xFA1A, {0x7965}}, {0xFA1B, {0x798F}}, {0xFA1C, {0x9756}}, {0xFA1D, {0x7CBE}}, {0xFA1E, {0x7FBD}}, {0xFA20, {0x8612}}, {0xFA22, {0x8AF8}}, {0xFA25, {0x9038}}, +{0xFA26, {0x90FD}}, {0xFA2A, {0x98EF}}, {0xFA2B, {0x98FC}}, {0xFA2C, {0x9928}}, {0xFA2D, {0x9DB4}}, {0xFA2E, {0x90DE}}, {0xFA2F, {0x96B7}}, {0xFA30, {0x4FAE}}, {0xFA31, {0x50E7}}, {0xFA32, {0x514D}}, +{0xFA33, {0x52C9}}, {0xFA34, {0x52E4}}, {0xFA35, {0x5351}}, {0xFA36, {0x559D}}, {0xFA37, {0x5606}}, {0xFA38, {0x5668}}, {0xFA39, {0x5840}}, {0xFA3A, {0x58A8}}, {0xFA3B, {0x5C64}}, {0xFA3C, {0x5C6E}}, +{0xFA3D, {0x6094}}, {0xFA3E, {0x6168}}, {0xFA3F, {0x618E}}, {0xFA40, {0x61F2}}, {0xFA41, {0x654F}}, {0xFA42, {0x65E2}}, {0xFA43, {0x6691}}, {0xFA44, {0x6885}}, {0xFA45, {0x6D77}}, {0xFA46, {0x6E1A}}, +{0xFA47, {0x6F22}}, {0xFA48, {0x716E}}, {0xFA49, {0x722B}}, {0xFA4A, {0x7422}}, {0xFA4B, {0x7891}}, {0xFA4C, {0x793E}}, {0xFA4D, {0x7949}}, {0xFA4E, {0x7948}}, {0xFA4F, {0x7950}}, {0xFA50, {0x7956}}, +{0xFA51, {0x795D}}, {0xFA52, {0x798D}}, {0xFA53, {0x798E}}, {0xFA54, {0x7A40}}, {0xFA55, {0x7A81}}, {0xFA56, {0x7BC0}}, {0xFA57, {0x7DF4}}, {0xFA58, {0x7E09}}, {0xFA59, {0x7E41}}, {0xFA5A, {0x7F72}}, +{0xFA5B, {0x8005}}, {0xFA5C, {0x81ED}}, {0xFA5D, {0x8279}}, {0xFA5E, {0x8279}}, {0xFA5F, {0x8457}}, {0xFA60, {0x8910}}, {0xFA61, {0x8996}}, {0xFA62, {0x8B01}}, {0xFA63, {0x8B39}}, {0xFA64, {0x8CD3}}, +{0xFA65, {0x8D08}}, {0xFA66, {0x8FB6}}, {0xFA67, {0x9038}}, {0xFA68, {0x96E3}}, {0xFA69, {0x97FF}}, {0xFA6A, {0x983B}}, {0xFA6B, {0x6075}}, {0xFA6C, {0x242EE}}, {0xFA6D, {0x8218}}, {0xFA70, {0x4E26}}, +{0xFA71, {0x51B5}}, {0xFA72, {0x5168}}, {0xFA73, {0x4F80}}, {0xFA74, {0x5145}}, {0xFA75, {0x5180}}, {0xFA76, {0x52C7}}, {0xFA77, {0x52FA}}, {0xFA78, {0x559D}}, {0xFA79, {0x5555}}, {0xFA7A, {0x5599}}, +{0xFA7B, {0x55E2}}, {0xFA7C, {0x585A}}, {0xFA7D, {0x58B3}}, {0xFA7E, {0x5944}}, {0xFA7F, {0x5954}}, {0xFA80, {0x5A62}}, {0xFA81, {0x5B28}}, {0xFA82, {0x5ED2}}, {0xFA83, {0x5ED9}}, {0xFA84, {0x5F69}}, +{0xFA85, {0x5FAD}}, {0xFA86, {0x60D8}}, {0xFA87, {0x614E}}, {0xFA88, {0x6108}}, {0xFA89, {0x618E}}, {0xFA8A, {0x6160}}, {0xFA8B, {0x61F2}}, {0xFA8C, {0x6234}}, {0xFA8D, {0x63C4}}, {0xFA8E, {0x641C}}, +{0xFA8F, {0x6452}}, {0xFA90, {0x6556}}, {0xFA91, {0x6674}}, {0xFA92, {0x6717}}, {0xFA93, {0x671B}}, {0xFA94, {0x6756}}, {0xFA95, {0x6B79}}, {0xFA96, {0x6BBA}}, {0xFA97, {0x6D41}}, {0xFA98, {0x6EDB}}, +{0xFA99, {0x6ECB}}, {0xFA9A, {0x6F22}}, {0xFA9B, {0x701E}}, {0xFA9C, {0x716E}}, {0xFA9D, {0x77A7}}, {0xFA9E, {0x7235}}, {0xFA9F, {0x72AF}}, {0xFAA0, {0x732A}}, {0xFAA1, {0x7471}}, {0xFAA2, {0x7506}}, +{0xFAA3, {0x753B}}, {0xFAA4, {0x761D}}, {0xFAA5, {0x761F}}, {0xFAA6, {0x76CA}}, {0xFAA7, {0x76DB}}, {0xFAA8, {0x76F4}}, {0xFAA9, {0x774A}}, {0xFAAA, {0x7740}}, {0xFAAB, {0x78CC}}, {0xFAAC, {0x7AB1}}, +{0xFAAD, {0x7BC0}}, {0xFAAE, {0x7C7B}}, {0xFAAF, {0x7D5B}}, {0xFAB0, {0x7DF4}}, {0xFAB1, {0x7F3E}}, {0xFAB2, {0x8005}}, {0xFAB3, {0x8352}}, {0xFAB4, {0x83EF}}, {0xFAB5, {0x8779}}, {0xFAB6, {0x8941}}, +{0xFAB7, {0x8986}}, {0xFAB8, {0x8996}}, {0xFAB9, {0x8ABF}}, {0xFABA, {0x8AF8}}, {0xFABB, {0x8ACB}}, {0xFABC, {0x8B01}}, {0xFABD, {0x8AFE}}, {0xFABE, {0x8AED}}, {0xFABF, {0x8B39}}, {0xFAC0, {0x8B8A}}, +{0xFAC1, {0x8D08}}, {0xFAC2, {0x8F38}}, {0xFAC3, {0x9072}}, {0xFAC4, {0x9199}}, {0xFAC5, {0x9276}}, {0xFAC6, {0x967C}}, {0xFAC7, {0x96E3}}, {0xFAC8, {0x9756}}, {0xFAC9, {0x97DB}}, {0xFACA, {0x97FF}}, +{0xFACB, {0x980B}}, {0xFACC, {0x983B}}, {0xFACD, {0x9B12}}, {0xFACE, {0x9F9C}}, {0xFACF, {0x2284A}}, {0xFAD0, {0x22844}}, {0xFAD1, {0x233D5}}, {0xFAD2, {0x3B9D}}, {0xFAD3, {0x4018}}, +{0xFAD4, {0x4039}}, {0xFAD5, {0x25249}}, {0xFAD6, {0x25CD0}}, {0xFAD7, {0x27ED3}}, {0xFAD8, {0x9F43}}, {0xFAD9, {0x9F8E}}, {0xFB1D, {0x5D9, 0x5B4}}, {0xFB1F, {0x5F2, 0x5B7}}, {0xFB2A, {0x5E9, 0x5C1}}, +{0xFB2B, {0x5E9, 0x5C2}}, {0xFB2C, {0x5E9, 0x5BC, 0x5C1}}, {0xFB2D, {0x5E9, 0x5BC, 0x5C2}}, {0xFB2E, {0x5D0, 0x5B7}}, {0xFB2F, {0x5D0, 0x5B8}}, {0xFB30, {0x5D0, 0x5BC}}, {0xFB31, {0x5D1, 0x5BC}}, +{0xFB32, {0x5D2, 0x5BC}}, {0xFB33, {0x5D3, 0x5BC}}, {0xFB34, {0x5D4, 0x5BC}}, {0xFB35, {0x5D5, 0x5BC}}, {0xFB36, {0x5D6, 0x5BC}}, {0xFB38, {0x5D8, 0x5BC}}, {0xFB39, {0x5D9, 0x5BC}}, +{0xFB3A, {0x5DA, 0x5BC}}, {0xFB3B, {0x5DB, 0x5BC}}, {0xFB3C, {0x5DC, 0x5BC}}, {0xFB3E, {0x5DE, 0x5BC}}, {0xFB40, {0x5E0, 0x5BC}}, {0xFB41, {0x5E1, 0x5BC}}, {0xFB43, {0x5E3, 0x5BC}}, +{0xFB44, {0x5E4, 0x5BC}}, {0xFB46, {0x5E6, 0x5BC}}, {0xFB47, {0x5E7, 0x5BC}}, {0xFB48, {0x5E8, 0x5BC}}, {0xFB49, {0x5E9, 0x5BC}}, {0xFB4A, {0x5EA, 0x5BC}}, {0xFB4B, {0x5D5, 0x5B9}}, +{0xFB4C, {0x5D1, 0x5BF}}, {0xFB4D, {0x5DB, 0x5BF}}, {0xFB4E, {0x5E4, 0x5BF}}, {0x1109A, {0x11099, 0x110BA}}, {0x1109C, {0x1109B, 0x110BA}}, {0x110AB, {0x110A5, 0x110BA}}, +{0x1112E, {0x11131, 0x11127}}, {0x1112F, {0x11132, 0x11127}}, {0x1134B, {0x11347, 0x1133E}}, {0x1134C, {0x11347, 0x11357}}, {0x114BB, {0x114B9, 0x114BA}}, {0x114BC, {0x114B9, 0x114B0}}, +{0x114BE, {0x114B9, 0x114BD}}, {0x115BA, {0x115B8, 0x115AF}}, {0x115BB, {0x115B9, 0x115AF}}, {0x1D15E, {0x1D157, 0x1D165}}, {0x1D15F, {0x1D158, 0x1D165}}, {0x1D160, {0x1D158, 0x1D165, 0x1D16E}}, +{0x1D161, {0x1D158, 0x1D165, 0x1D16F}}, {0x1D162, {0x1D158, 0x1D165, 0x1D170}}, {0x1D163, {0x1D158, 0x1D165, 0x1D171}}, {0x1D164, {0x1D158, 0x1D165, 0x1D172}}, {0x1D1BB, {0x1D1B9, 0x1D165}}, +{0x1D1BC, {0x1D1BA, 0x1D165}}, {0x1D1BD, {0x1D1B9, 0x1D165, 0x1D16E}}, {0x1D1BE, {0x1D1BA, 0x1D165, 0x1D16E}}, {0x1D1BF, {0x1D1B9, 0x1D165, 0x1D16F}}, {0x1D1C0, {0x1D1BA, 0x1D165, 0x1D16F}}, +{0x2F800, {0x4E3D}}, {0x2F801, {0x4E38}}, {0x2F802, {0x4E41}}, {0x2F803, {0x20122}}, {0x2F804, {0x4F60}}, {0x2F805, {0x4FAE}}, {0x2F806, {0x4FBB}}, {0x2F807, {0x5002}}, {0x2F808, {0x507A}}, +{0x2F809, {0x5099}}, {0x2F80A, {0x50E7}}, {0x2F80B, {0x50CF}}, {0x2F80C, {0x349E}}, {0x2F80D, {0x2063A}}, {0x2F80E, {0x514D}}, {0x2F80F, {0x5154}}, {0x2F810, {0x5164}}, {0x2F811, {0x5177}}, +{0x2F812, {0x2051C}}, {0x2F813, {0x34B9}}, {0x2F814, {0x5167}}, {0x2F815, {0x518D}}, {0x2F816, {0x2054B}}, {0x2F817, {0x5197}}, {0x2F818, {0x51A4}}, {0x2F819, {0x4ECC}}, {0x2F81A, {0x51AC}}, +{0x2F81B, {0x51B5}}, {0x2F81C, {0x291DF}}, {0x2F81D, {0x51F5}}, {0x2F81E, {0x5203}}, {0x2F81F, {0x34DF}}, {0x2F820, {0x523B}}, {0x2F821, {0x5246}}, {0x2F822, {0x5272}}, {0x2F823, {0x5277}}, +{0x2F824, {0x3515}}, {0x2F825, {0x52C7}}, {0x2F826, {0x52C9}}, {0x2F827, {0x52E4}}, {0x2F828, {0x52FA}}, {0x2F829, {0x5305}}, {0x2F82A, {0x5306}}, {0x2F82B, {0x5317}}, {0x2F82C, {0x5349}}, +{0x2F82D, {0x5351}}, {0x2F82E, {0x535A}}, {0x2F82F, {0x5373}}, {0x2F830, {0x537D}}, {0x2F831, {0x537F}}, {0x2F832, {0x537F}}, {0x2F833, {0x537F}}, {0x2F834, {0x20A2C}}, {0x2F835, {0x7070}}, +{0x2F836, {0x53CA}}, {0x2F837, {0x53DF}}, {0x2F838, {0x20B63}}, {0x2F839, {0x53EB}}, {0x2F83A, {0x53F1}}, {0x2F83B, {0x5406}}, {0x2F83C, {0x549E}}, {0x2F83D, {0x5438}}, {0x2F83E, {0x5448}}, +{0x2F83F, {0x5468}}, {0x2F840, {0x54A2}}, {0x2F841, {0x54F6}}, {0x2F842, {0x5510}}, {0x2F843, {0x5553}}, {0x2F844, {0x5563}}, {0x2F845, {0x5584}}, {0x2F846, {0x5584}}, {0x2F847, {0x5599}}, +{0x2F848, {0x55AB}}, {0x2F849, {0x55B3}}, {0x2F84A, {0x55C2}}, {0x2F84B, {0x5716}}, {0x2F84C, {0x5606}}, {0x2F84D, {0x5717}}, {0x2F84E, {0x5651}}, {0x2F84F, {0x5674}}, {0x2F850, {0x5207}}, +{0x2F851, {0x58EE}}, {0x2F852, {0x57CE}}, {0x2F853, {0x57F4}}, {0x2F854, {0x580D}}, {0x2F855, {0x578B}}, {0x2F856, {0x5832}}, {0x2F857, {0x5831}}, {0x2F858, {0x58AC}}, {0x2F859, {0x214E4}}, +{0x2F85A, {0x58F2}}, {0x2F85B, {0x58F7}}, {0x2F85C, {0x5906}}, {0x2F85D, {0x591A}}, {0x2F85E, {0x5922}}, {0x2F85F, {0x5962}}, {0x2F860, {0x216A8}}, {0x2F861, {0x216EA}}, {0x2F862, {0x59EC}}, +{0x2F863, {0x5A1B}}, {0x2F864, {0x5A27}}, {0x2F865, {0x59D8}}, {0x2F866, {0x5A66}}, {0x2F867, {0x36EE}}, {0x2F868, {0x36FC}}, {0x2F869, {0x5B08}}, {0x2F86A, {0x5B3E}}, {0x2F86B, {0x5B3E}}, +{0x2F86C, {0x219C8}}, {0x2F86D, {0x5BC3}}, {0x2F86E, {0x5BD8}}, {0x2F86F, {0x5BE7}}, {0x2F870, {0x5BF3}}, {0x2F871, {0x21B18}}, {0x2F872, {0x5BFF}}, {0x2F873, {0x5C06}}, {0x2F874, {0x5F53}}, +{0x2F875, {0x5C22}}, {0x2F876, {0x3781}}, {0x2F877, {0x5C60}}, {0x2F878, {0x5C6E}}, {0x2F879, {0x5CC0}}, {0x2F87A, {0x5C8D}}, {0x2F87B, {0x21DE4}}, {0x2F87C, {0x5D43}}, {0x2F87D, {0x21DE6}}, +{0x2F87E, {0x5D6E}}, {0x2F87F, {0x5D6B}}, {0x2F880, {0x5D7C}}, {0x2F881, {0x5DE1}}, {0x2F882, {0x5DE2}}, {0x2F883, {0x382F}}, {0x2F884, {0x5DFD}}, {0x2F885, {0x5E28}}, {0x2F886, {0x5E3D}}, +{0x2F887, {0x5E69}}, {0x2F888, {0x3862}}, {0x2F889, {0x22183}}, {0x2F88A, {0x387C}}, {0x2F88B, {0x5EB0}}, {0x2F88C, {0x5EB3}}, {0x2F88D, {0x5EB6}}, {0x2F88E, {0x5ECA}}, {0x2F88F, {0x2A392}}, +{0x2F890, {0x5EFE}}, {0x2F891, {0x22331}}, {0x2F892, {0x22331}}, {0x2F893, {0x8201}}, {0x2F894, {0x5F22}}, {0x2F895, {0x5F22}}, {0x2F896, {0x38C7}}, {0x2F897, {0x232B8}}, {0x2F898, {0x261DA}}, +{0x2F899, {0x5F62}}, {0x2F89A, {0x5F6B}}, {0x2F89B, {0x38E3}}, {0x2F89C, {0x5F9A}}, {0x2F89D, {0x5FCD}}, {0x2F89E, {0x5FD7}}, {0x2F89F, {0x5FF9}}, {0x2F8A0, {0x6081}}, {0x2F8A1, {0x393A}}, +{0x2F8A2, {0x391C}}, {0x2F8A3, {0x6094}}, {0x2F8A4, {0x226D4}}, {0x2F8A5, {0x60C7}}, {0x2F8A6, {0x6148}}, {0x2F8A7, {0x614C}}, {0x2F8A8, {0x614E}}, {0x2F8A9, {0x614C}}, {0x2F8AA, {0x617A}}, +{0x2F8AB, {0x618E}}, {0x2F8AC, {0x61B2}}, {0x2F8AD, {0x61A4}}, {0x2F8AE, {0x61AF}}, {0x2F8AF, {0x61DE}}, {0x2F8B0, {0x61F2}}, {0x2F8B1, {0x61F6}}, {0x2F8B2, {0x6210}}, {0x2F8B3, {0x621B}}, +{0x2F8B4, {0x625D}}, {0x2F8B5, {0x62B1}}, {0x2F8B6, {0x62D4}}, {0x2F8B7, {0x6350}}, {0x2F8B8, {0x22B0C}}, {0x2F8B9, {0x633D}}, {0x2F8BA, {0x62FC}}, {0x2F8BB, {0x6368}}, {0x2F8BC, {0x6383}}, +{0x2F8BD, {0x63E4}}, {0x2F8BE, {0x22BF1}}, {0x2F8BF, {0x6422}}, {0x2F8C0, {0x63C5}}, {0x2F8C1, {0x63A9}}, {0x2F8C2, {0x3A2E}}, {0x2F8C3, {0x6469}}, {0x2F8C4, {0x647E}}, {0x2F8C5, {0x649D}}, +{0x2F8C6, {0x6477}}, {0x2F8C7, {0x3A6C}}, {0x2F8C8, {0x654F}}, {0x2F8C9, {0x656C}}, {0x2F8CA, {0x2300A}}, {0x2F8CB, {0x65E3}}, {0x2F8CC, {0x66F8}}, {0x2F8CD, {0x6649}}, {0x2F8CE, {0x3B19}}, +{0x2F8CF, {0x6691}}, {0x2F8D0, {0x3B08}}, {0x2F8D1, {0x3AE4}}, {0x2F8D2, {0x5192}}, {0x2F8D3, {0x5195}}, {0x2F8D4, {0x6700}}, {0x2F8D5, {0x669C}}, {0x2F8D6, {0x80AD}}, {0x2F8D7, {0x43D9}}, +{0x2F8D8, {0x6717}}, {0x2F8D9, {0x671B}}, {0x2F8DA, {0x6721}}, {0x2F8DB, {0x675E}}, {0x2F8DC, {0x6753}}, {0x2F8DD, {0x233C3}}, {0x2F8DE, {0x3B49}}, {0x2F8DF, {0x67FA}}, {0x2F8E0, {0x6785}}, +{0x2F8E1, {0x6852}}, {0x2F8E2, {0x6885}}, {0x2F8E3, {0x2346D}}, {0x2F8E4, {0x688E}}, {0x2F8E5, {0x681F}}, {0x2F8E6, {0x6914}}, {0x2F8E7, {0x3B9D}}, {0x2F8E8, {0x6942}}, {0x2F8E9, {0x69A3}}, +{0x2F8EA, {0x69EA}}, {0x2F8EB, {0x6AA8}}, {0x2F8EC, {0x236A3}}, {0x2F8ED, {0x6ADB}}, {0x2F8EE, {0x3C18}}, {0x2F8EF, {0x6B21}}, {0x2F8F0, {0x238A7}}, {0x2F8F1, {0x6B54}}, {0x2F8F2, {0x3C4E}}, +{0x2F8F3, {0x6B72}}, {0x2F8F4, {0x6B9F}}, {0x2F8F5, {0x6BBA}}, {0x2F8F6, {0x6BBB}}, {0x2F8F7, {0x23A8D}}, {0x2F8F8, {0x21D0B}}, {0x2F8F9, {0x23AFA}}, {0x2F8FA, {0x6C4E}}, {0x2F8FB, {0x23CBC}}, +{0x2F8FC, {0x6CBF}}, {0x2F8FD, {0x6CCD}}, {0x2F8FE, {0x6C67}}, {0x2F8FF, {0x6D16}}, {0x2F900, {0x6D3E}}, {0x2F901, {0x6D77}}, {0x2F902, {0x6D41}}, {0x2F903, {0x6D69}}, {0x2F904, {0x6D78}}, +{0x2F905, {0x6D85}}, {0x2F906, {0x23D1E}}, {0x2F907, {0x6D34}}, {0x2F908, {0x6E2F}}, {0x2F909, {0x6E6E}}, {0x2F90A, {0x3D33}}, {0x2F90B, {0x6ECB}}, {0x2F90C, {0x6EC7}}, {0x2F90D, {0x23ED1}}, +{0x2F90E, {0x6DF9}}, {0x2F90F, {0x6F6E}}, {0x2F910, {0x23F5E}}, {0x2F911, {0x23F8E}}, {0x2F912, {0x6FC6}}, {0x2F913, {0x7039}}, {0x2F914, {0x701E}}, {0x2F915, {0x701B}}, {0x2F916, {0x3D96}}, +{0x2F917, {0x704A}}, {0x2F918, {0x707D}}, {0x2F919, {0x7077}}, {0x2F91A, {0x70AD}}, {0x2F91B, {0x20525}}, {0x2F91C, {0x7145}}, {0x2F91D, {0x24263}}, {0x2F91E, {0x719C}}, {0x2F91F, {0x243AB}}, +{0x2F920, {0x7228}}, {0x2F921, {0x7235}}, {0x2F922, {0x7250}}, {0x2F923, {0x24608}}, {0x2F924, {0x7280}}, {0x2F925, {0x7295}}, {0x2F926, {0x24735}}, {0x2F927, {0x24814}}, {0x2F928, {0x737A}}, +{0x2F929, {0x738B}}, {0x2F92A, {0x3EAC}}, {0x2F92B, {0x73A5}}, {0x2F92C, {0x3EB8}}, {0x2F92D, {0x3EB8}}, {0x2F92E, {0x7447}}, {0x2F92F, {0x745C}}, {0x2F930, {0x7471}}, {0x2F931, {0x7485}}, +{0x2F932, {0x74CA}}, {0x2F933, {0x3F1B}}, {0x2F934, {0x7524}}, {0x2F935, {0x24C36}}, {0x2F936, {0x753E}}, {0x2F937, {0x24C92}}, {0x2F938, {0x7570}}, {0x2F939, {0x2219F}}, {0x2F93A, {0x7610}}, +{0x2F93B, {0x24FA1}}, {0x2F93C, {0x24FB8}}, {0x2F93D, {0x25044}}, {0x2F93E, {0x3FFC}}, {0x2F93F, {0x4008}}, {0x2F940, {0x76F4}}, {0x2F941, {0x250F3}}, {0x2F942, {0x250F2}}, {0x2F943, {0x25119}}, +{0x2F944, {0x25133}}, {0x2F945, {0x771E}}, {0x2F946, {0x771F}}, {0x2F947, {0x771F}}, {0x2F948, {0x774A}}, {0x2F949, {0x4039}}, {0x2F94A, {0x778B}}, {0x2F94B, {0x4046}}, {0x2F94C, {0x4096}}, +{0x2F94D, {0x2541D}}, {0x2F94E, {0x784E}}, {0x2F94F, {0x788C}}, {0x2F950, {0x78CC}}, {0x2F951, {0x40E3}}, {0x2F952, {0x25626}}, {0x2F953, {0x7956}}, {0x2F954, {0x2569A}}, {0x2F955, {0x256C5}}, +{0x2F956, {0x798F}}, {0x2F957, {0x79EB}}, {0x2F958, {0x412F}}, {0x2F959, {0x7A40}}, {0x2F95A, {0x7A4A}}, {0x2F95B, {0x7A4F}}, {0x2F95C, {0x2597C}}, {0x2F95D, {0x25AA7}}, {0x2F95E, {0x25AA7}}, +{0x2F95F, {0x7AEE}}, {0x2F960, {0x4202}}, {0x2F961, {0x25BAB}}, {0x2F962, {0x7BC6}}, {0x2F963, {0x7BC9}}, {0x2F964, {0x4227}}, {0x2F965, {0x25C80}}, {0x2F966, {0x7CD2}}, {0x2F967, {0x42A0}}, +{0x2F968, {0x7CE8}}, {0x2F969, {0x7CE3}}, {0x2F96A, {0x7D00}}, {0x2F96B, {0x25F86}}, {0x2F96C, {0x7D63}}, {0x2F96D, {0x4301}}, {0x2F96E, {0x7DC7}}, {0x2F96F, {0x7E02}}, {0x2F970, {0x7E45}}, +{0x2F971, {0x4334}}, {0x2F972, {0x26228}}, {0x2F973, {0x26247}}, {0x2F974, {0x4359}}, {0x2F975, {0x262D9}}, {0x2F976, {0x7F7A}}, {0x2F977, {0x2633E}}, {0x2F978, {0x7F95}}, {0x2F979, {0x7FFA}}, +{0x2F97A, {0x8005}}, {0x2F97B, {0x264DA}}, {0x2F97C, {0x26523}}, {0x2F97D, {0x8060}}, {0x2F97E, {0x265A8}}, {0x2F97F, {0x8070}}, {0x2F980, {0x2335F}}, {0x2F981, {0x43D5}}, {0x2F982, {0x80B2}}, +{0x2F983, {0x8103}}, {0x2F984, {0x440B}}, {0x2F985, {0x813E}}, {0x2F986, {0x5AB5}}, {0x2F987, {0x267A7}}, {0x2F988, {0x267B5}}, {0x2F989, {0x23393}}, {0x2F98A, {0x2339C}}, {0x2F98B, {0x8201}}, +{0x2F98C, {0x8204}}, {0x2F98D, {0x8F9E}}, {0x2F98E, {0x446B}}, {0x2F98F, {0x8291}}, {0x2F990, {0x828B}}, {0x2F991, {0x829D}}, {0x2F992, {0x52B3}}, {0x2F993, {0x82B1}}, {0x2F994, {0x82B3}}, +{0x2F995, {0x82BD}}, {0x2F996, {0x82E6}}, {0x2F997, {0x26B3C}}, {0x2F998, {0x82E5}}, {0x2F999, {0x831D}}, {0x2F99A, {0x8363}}, {0x2F99B, {0x83AD}}, {0x2F99C, {0x8323}}, {0x2F99D, {0x83BD}}, +{0x2F99E, {0x83E7}}, {0x2F99F, {0x8457}}, {0x2F9A0, {0x8353}}, {0x2F9A1, {0x83CA}}, {0x2F9A2, {0x83CC}}, {0x2F9A3, {0x83DC}}, {0x2F9A4, {0x26C36}}, {0x2F9A5, {0x26D6B}}, {0x2F9A6, {0x26CD5}}, +{0x2F9A7, {0x452B}}, {0x2F9A8, {0x84F1}}, {0x2F9A9, {0x84F3}}, {0x2F9AA, {0x8516}}, {0x2F9AB, {0x273CA}}, {0x2F9AC, {0x8564}}, {0x2F9AD, {0x26F2C}}, {0x2F9AE, {0x455D}}, {0x2F9AF, {0x4561}}, +{0x2F9B0, {0x26FB1}}, {0x2F9B1, {0x270D2}}, {0x2F9B2, {0x456B}}, {0x2F9B3, {0x8650}}, {0x2F9B4, {0x865C}}, {0x2F9B5, {0x8667}}, {0x2F9B6, {0x8669}}, {0x2F9B7, {0x86A9}}, {0x2F9B8, {0x8688}}, +{0x2F9B9, {0x870E}}, {0x2F9BA, {0x86E2}}, {0x2F9BB, {0x8779}}, {0x2F9BC, {0x8728}}, {0x2F9BD, {0x876B}}, {0x2F9BE, {0x8786}}, {0x2F9BF, {0x45D7}}, {0x2F9C0, {0x87E1}}, {0x2F9C1, {0x8801}}, +{0x2F9C2, {0x45F9}}, {0x2F9C3, {0x8860}}, {0x2F9C4, {0x8863}}, {0x2F9C5, {0x27667}}, {0x2F9C6, {0x88D7}}, {0x2F9C7, {0x88DE}}, {0x2F9C8, {0x4635}}, {0x2F9C9, {0x88FA}}, {0x2F9CA, {0x34BB}}, +{0x2F9CB, {0x278AE}}, {0x2F9CC, {0x27966}}, {0x2F9CD, {0x46BE}}, {0x2F9CE, {0x46C7}}, {0x2F9CF, {0x8AA0}}, {0x2F9D0, {0x8AED}}, {0x2F9D1, {0x8B8A}}, {0x2F9D2, {0x8C55}}, {0x2F9D3, {0x27CA8}}, +{0x2F9D4, {0x8CAB}}, {0x2F9D5, {0x8CC1}}, {0x2F9D6, {0x8D1B}}, {0x2F9D7, {0x8D77}}, {0x2F9D8, {0x27F2F}}, {0x2F9D9, {0x20804}}, {0x2F9DA, {0x8DCB}}, {0x2F9DB, {0x8DBC}}, {0x2F9DC, {0x8DF0}}, +{0x2F9DD, {0x208DE}}, {0x2F9DE, {0x8ED4}}, {0x2F9DF, {0x8F38}}, {0x2F9E0, {0x285D2}}, {0x2F9E1, {0x285ED}}, {0x2F9E2, {0x9094}}, {0x2F9E3, {0x90F1}}, {0x2F9E4, {0x9111}}, {0x2F9E5, {0x2872E}}, +{0x2F9E6, {0x911B}}, {0x2F9E7, {0x9238}}, {0x2F9E8, {0x92D7}}, {0x2F9E9, {0x92D8}}, {0x2F9EA, {0x927C}}, {0x2F9EB, {0x93F9}}, {0x2F9EC, {0x9415}}, {0x2F9ED, {0x28BFA}}, {0x2F9EE, {0x958B}}, +{0x2F9EF, {0x4995}}, {0x2F9F0, {0x95B7}}, {0x2F9F1, {0x28D77}}, {0x2F9F2, {0x49E6}}, {0x2F9F3, {0x96C3}}, {0x2F9F4, {0x5DB2}}, {0x2F9F5, {0x9723}}, {0x2F9F6, {0x29145}}, {0x2F9F7, {0x2921A}}, +{0x2F9F8, {0x4A6E}}, {0x2F9F9, {0x4A76}}, {0x2F9FA, {0x97E0}}, {0x2F9FB, {0x2940A}}, {0x2F9FC, {0x4AB2}}, {0x2F9FD, {0x29496}}, {0x2F9FE, {0x980B}}, {0x2F9FF, {0x980B}}, {0x2FA00, {0x9829}}, +{0x2FA01, {0x295B6}}, {0x2FA02, {0x98E2}}, {0x2FA03, {0x4B33}}, {0x2FA04, {0x9929}}, {0x2FA05, {0x99A7}}, {0x2FA06, {0x99C2}}, {0x2FA07, {0x99FE}}, {0x2FA08, {0x4BCE}}, {0x2FA09, {0x29B30}}, +{0x2FA0A, {0x9B12}}, {0x2FA0B, {0x9C40}}, {0x2FA0C, {0x9CFD}}, {0x2FA0D, {0x4CCE}}, {0x2FA0E, {0x4CED}}, {0x2FA0F, {0x9D67}}, {0x2FA10, {0x2A0CE}}, {0x2FA11, {0x4CF8}}, {0x2FA12, {0x2A105}}, +{0x2FA13, {0x2A20E}}, {0x2FA14, {0x2A291}}, {0x2FA15, {0x9EBB}}, {0x2FA16, {0x4D56}}, {0x2FA17, {0x9EF9}}, {0x2FA18, {0x9EFE}}, {0x2FA19, {0x9F05}}, {0x2FA1A, {0x9F0F}}, {0x2FA1B, {0x9F16}}, +{0x2FA1D, {0x2A600}}, +}; + static std::string codepoint_to_utf8(uint32_t cp) { std::string result; if (/* 0x00 <= cp && */ cp <= 0x7f) { From adcb12a9bad87bc96f2f158c95892b3d04aa7ffb Mon Sep 17 00:00:00 2001 From: compilade <113953597+compilade@users.noreply.github.com> Date: Wed, 28 Feb 2024 03:52:56 -0500 Subject: [PATCH 732/811] llama : fix non-quantization of expert gating tensors (#5754) This reverts a single line from #5475 --- llama.cpp | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/llama.cpp b/llama.cpp index 356ca1076..893bcdbc0 100644 --- a/llama.cpp +++ b/llama.cpp @@ -11162,7 +11162,8 @@ static void llama_model_quantize_internal(const std::string & fname_inp, const s quantize &= !params->only_copy; // do not quantize expert gating tensors - quantize &= name != LLM_TN(model.arch)(LLM_TENSOR_FFN_GATE_INP, "weight"); + // NOTE: can't use LLM_TN here because the layer number is not known + quantize &= name.find("ffn_gate_inp.weight") == std::string::npos; // do not quantize positional embeddings and token types (BERT) quantize &= name != LLM_TN(model.arch)(LLM_TENSOR_POS_EMBD, "weight"); From a693bea1e6762a17b78b6ddf4611e54136941ea2 Mon Sep 17 00:00:00 2001 From: Xuan Son Nguyen Date: Wed, 28 Feb 2024 09:55:37 +0100 Subject: [PATCH 733/811] server : hit Ctrl+C twice to exit (#5734) * server: twice ctrl+C to exit * std::atomic_flag * sigint: message * sigint: stderr * Update examples/server/server.cpp Co-authored-by: Jared Van Bortel --------- Co-authored-by: Jared Van Bortel --- examples/server/server.cpp | 11 ++++++++++- 1 file changed, 10 insertions(+), 1 deletion(-) diff --git a/examples/server/server.cpp b/examples/server/server.cpp index 6b3ee531c..080fa9bd5 100644 --- a/examples/server/server.cpp +++ b/examples/server/server.cpp @@ -2772,7 +2772,16 @@ static void append_to_generated_text_from_generated_token_probs(llama_server_con } std::function shutdown_handler; -inline void signal_handler(int signal) { shutdown_handler(signal); } +std::atomic_flag is_terminating = ATOMIC_FLAG_INIT; +inline void signal_handler(int signal) { + if (is_terminating.test_and_set()) { + // in case it hangs, we can force terminate the server by hitting Ctrl+C twice + // this is for better developer experience, we can remove when the server is stable enough + fprintf(stderr, "Received second interrupt, terminating immediately.\n"); + exit(1); + } + shutdown_handler(signal); +} int main(int argc, char **argv) { From 5f706718566e3a5147916dc381f3b99de0ffad47 Mon Sep 17 00:00:00 2001 From: "UEXTM.com" <84163508+uextm@users.noreply.github.com> Date: Sat, 24 Feb 2024 11:27:36 -0500 Subject: [PATCH 734/811] Introduce backend GUIDs (ggml/743) * Introduce backend GUIDs Initial proposed implementation of backend GUIDs (Discussed in https://github.com/ggerganov/ggml/pull/741) Hardcoded CPU backend GUID (for now) Change ggml_backend_is_cpu logic to use GUID * Remove redundant functions Remove redundant functions `ggml_backend_i::get_name` and `ggml_backend_guid` which are not desired for future expansion * Add spaces to match style Co-authored-by: slaren * Fix brace style to match Co-authored-by: slaren * Add void to () in function signature Co-authored-by: slaren * Add back ggml_backend_guid and make CPU_GUID a local static in ggml_backend_cpu_guid * add guids to all backends ggml-ci --------- Co-authored-by: slaren --- ggml-backend-impl.h | 2 ++ ggml-backend.c | 16 ++++++++++++++-- ggml-backend.h | 2 +- ggml-cuda.cu | 8 +++++++- ggml-kompute.cpp | 8 +++++++- ggml-metal.m | 8 +++++++- ggml-sycl.cpp | 8 +++++++- ggml-vulkan.cpp | 8 +++++++- ggml.c | 4 ++++ ggml.h | 10 ++++++++++ 10 files changed, 66 insertions(+), 8 deletions(-) diff --git a/ggml-backend-impl.h b/ggml-backend-impl.h index f95df47f7..0e5bf0ae1 100644 --- a/ggml-backend-impl.h +++ b/ggml-backend-impl.h @@ -104,6 +104,8 @@ extern "C" { }; struct ggml_backend { + ggml_guid_t guid; + struct ggml_backend_i iface; ggml_backend_context_t context; diff --git a/ggml-backend.c b/ggml-backend.c index 5076d9e5e..c86673b04 100644 --- a/ggml-backend.c +++ b/ggml-backend.c @@ -12,7 +12,6 @@ #define MAX(a, b) ((a) > (b) ? (a) : (b)) - // backend buffer type const char * ggml_backend_buft_name(ggml_backend_buffer_type_t buft) { @@ -159,6 +158,13 @@ bool ggml_backend_buffer_copy_tensor(const struct ggml_tensor * src, struct ggml // backend +ggml_guid_t ggml_backend_guid(ggml_backend_t backend) { + if (backend == NULL) { + return NULL; + } + return backend->guid; +} + const char * ggml_backend_name(ggml_backend_t backend) { if (backend == NULL) { return "NULL"; @@ -781,6 +787,11 @@ static struct ggml_backend_i cpu_backend_i = { /* .supports_op = */ ggml_backend_cpu_supports_op, }; +static ggml_guid_t ggml_backend_cpu_guid(void) { + static ggml_guid guid = { 0xaa, 0x67, 0xc7, 0x43, 0x96, 0xe6, 0xa3, 0x8a, 0xe3, 0xaf, 0xea, 0x92, 0x36, 0xbc, 0xfc, 0x89 }; + return &guid; +} + ggml_backend_t ggml_backend_cpu_init(void) { struct ggml_backend_cpu_context * ctx = malloc(sizeof(struct ggml_backend_cpu_context)); if (ctx == NULL) { @@ -800,6 +811,7 @@ ggml_backend_t ggml_backend_cpu_init(void) { } *cpu_backend = (struct ggml_backend) { + /* .guid = */ ggml_backend_cpu_guid(), /* .interface = */ cpu_backend_i, /* .context = */ ctx }; @@ -807,7 +819,7 @@ ggml_backend_t ggml_backend_cpu_init(void) { } GGML_CALL bool ggml_backend_is_cpu(ggml_backend_t backend) { - return backend && backend->iface.get_name == ggml_backend_cpu_name; + return backend != NULL && ggml_guid_matches(backend->guid, ggml_backend_cpu_guid()); } void ggml_backend_cpu_set_n_threads(ggml_backend_t backend_cpu, int n_threads) { diff --git a/ggml-backend.h b/ggml-backend.h index f13c69bff..8fb54bd92 100644 --- a/ggml-backend.h +++ b/ggml-backend.h @@ -49,7 +49,7 @@ extern "C" { // Backend // - + GGML_API ggml_guid_t ggml_backend_guid(ggml_backend_t backend); GGML_API const char * ggml_backend_name(ggml_backend_t backend); GGML_API void ggml_backend_free(ggml_backend_t backend); diff --git a/ggml-cuda.cu b/ggml-cuda.cu index 831c84efb..0c6501e98 100644 --- a/ggml-cuda.cu +++ b/ggml-cuda.cu @@ -12277,6 +12277,11 @@ static ggml_backend_i ggml_backend_cuda_interface = { /* .supports_op = */ ggml_backend_cuda_supports_op, }; +static ggml_guid_t ggml_backend_cuda_guid() { + static ggml_guid guid = { 0x2c, 0xdd, 0xe8, 0x1c, 0x65, 0xb3, 0x65, 0x73, 0x6a, 0x12, 0x88, 0x61, 0x1c, 0xc9, 0xdc, 0x25 }; + return &guid; +} + GGML_CALL ggml_backend_t ggml_backend_cuda_init(int device) { ggml_init_cublas(); // TODO: remove from ggml.c @@ -12294,6 +12299,7 @@ GGML_CALL ggml_backend_t ggml_backend_cuda_init(int device) { }; ggml_backend_t cuda_backend = new ggml_backend { + /* .guid = */ ggml_backend_cuda_guid(), /* .interface = */ ggml_backend_cuda_interface, /* .context = */ ctx }; @@ -12302,7 +12308,7 @@ GGML_CALL ggml_backend_t ggml_backend_cuda_init(int device) { } GGML_CALL bool ggml_backend_is_cuda(ggml_backend_t backend) { - return backend && backend->iface.get_name == ggml_backend_cuda_name; + return backend != NULL && ggml_guid_matches(backend->guid, ggml_backend_cuda_guid()); } GGML_CALL int ggml_backend_cuda_get_device_count() { diff --git a/ggml-kompute.cpp b/ggml-kompute.cpp index 51c5af8ec..e740a76d1 100644 --- a/ggml-kompute.cpp +++ b/ggml-kompute.cpp @@ -1953,11 +1953,17 @@ static struct ggml_backend_i kompute_backend_i = { /* .supports_op = */ ggml_backend_kompute_supports_op, }; +static ggml_guid_t ggml_backend_kompute_guid() { + static ggml_guid guid = { 0x7b, 0x57, 0xdc, 0xaf, 0xde, 0x12, 0x1d, 0x49, 0xfb, 0x35, 0xfa, 0x9b, 0x18, 0x31, 0x1d, 0xca }; + return &guid; +} + ggml_backend_t ggml_backend_kompute_init(int device) { GGML_ASSERT(s_kompute_context == nullptr); s_kompute_context = new ggml_kompute_context(device); ggml_backend_t kompute_backend = new ggml_backend { + /* .guid = */ ggml_backend_kompute_guid(), /* .interface = */ kompute_backend_i, /* .context = */ s_kompute_context, }; @@ -1966,7 +1972,7 @@ ggml_backend_t ggml_backend_kompute_init(int device) { } bool ggml_backend_is_kompute(ggml_backend_t backend) { - return backend && backend->iface.get_name == ggml_backend_kompute_name; + return backend != NULL && ggml_guid_matches(backend->guid, ggml_backend_kompute_guid()); } static ggml_backend_t ggml_backend_reg_kompute_init(const char * params, void * user_data) { diff --git a/ggml-metal.m b/ggml-metal.m index 9eba2f5d2..71fcca560 100644 --- a/ggml-metal.m +++ b/ggml-metal.m @@ -2771,6 +2771,11 @@ void ggml_backend_metal_log_set_callback(ggml_log_callback log_callback, void * ggml_metal_log_user_data = user_data; } +static ggml_guid_t ggml_backend_metal_guid(void) { + static ggml_guid guid = { 0x81, 0xa1, 0x8b, 0x1e, 0x71, 0xec, 0x79, 0xed, 0x2b, 0x85, 0xdc, 0x8a, 0x61, 0x98, 0x30, 0xe6 }; + return &guid; +} + ggml_backend_t ggml_backend_metal_init(void) { struct ggml_metal_context * ctx = ggml_metal_init(GGML_DEFAULT_N_THREADS); @@ -2781,6 +2786,7 @@ ggml_backend_t ggml_backend_metal_init(void) { ggml_backend_t metal_backend = malloc(sizeof(struct ggml_backend)); *metal_backend = (struct ggml_backend) { + /* .guid = */ ggml_backend_metal_guid(), /* .interface = */ ggml_backend_metal_i, /* .context = */ ctx, }; @@ -2789,7 +2795,7 @@ ggml_backend_t ggml_backend_metal_init(void) { } bool ggml_backend_is_metal(ggml_backend_t backend) { - return backend && backend->iface.get_name == ggml_backend_metal_name; + return backend != NULL && ggml_guid_matches(backend->guid, ggml_backend_metal_guid()); } void ggml_backend_metal_set_n_cb(ggml_backend_t backend, int n_cb) { diff --git a/ggml-sycl.cpp b/ggml-sycl.cpp index 835967fb6..a054ec8b9 100644 --- a/ggml-sycl.cpp +++ b/ggml-sycl.cpp @@ -15162,6 +15162,11 @@ static ggml_backend_i ggml_backend_sycl_interface = { /* .supports_op = */ ggml_backend_sycl_supports_op, }; +static ggml_guid_t ggml_backend_sycl_guid() { + static ggml_guid guid = { 0x58, 0x05, 0x13, 0x8f, 0xcd, 0x3a, 0x61, 0x9d, 0xe7, 0xcd, 0x98, 0xa9, 0x03, 0xfd, 0x7c, 0x53 }; + return &guid; +} + ggml_backend_t ggml_backend_sycl_init(int device) { ggml_init_sycl(); // TODO: remove from ggml.c @@ -15179,6 +15184,7 @@ ggml_backend_t ggml_backend_sycl_init(int device) { }; ggml_backend_t sycl_backend = new ggml_backend { + /* .guid = */ ggml_backend_sycl_guid(), /* .interface = */ ggml_backend_sycl_interface, /* .context = */ ctx }; @@ -15187,7 +15193,7 @@ ggml_backend_t ggml_backend_sycl_init(int device) { } bool ggml_backend_is_sycl(ggml_backend_t backend) { - return backend->iface.get_name == ggml_backend_sycl_name; + return backend != NULL && ggml_guid_matches(backend->guid, ggml_backend_sycl_guid()); } static ggml_backend_t ggml_backend_reg_sycl_init(const char * params, void * user_data) { diff --git a/ggml-vulkan.cpp b/ggml-vulkan.cpp index 6caafb822..314e3d7a9 100644 --- a/ggml-vulkan.cpp +++ b/ggml-vulkan.cpp @@ -5244,6 +5244,11 @@ static ggml_backend_i ggml_backend_vk_interface = { /* .supports_op = */ ggml_backend_vk_supports_op, }; +static ggml_guid_t ggml_backend_vk_guid() { + static ggml_guid guid = { 0xb8, 0xf7, 0x4f, 0x86, 0x40, 0x3c, 0xe1, 0x02, 0x91, 0xc8, 0xdd, 0xe9, 0x02, 0x3f, 0xc0, 0x2b }; + return &guid; +} + GGML_CALL ggml_backend_t ggml_backend_vk_init(size_t idx) { if (vk_instance.initialized[idx]) { return vk_instance.backends[idx]; @@ -5262,6 +5267,7 @@ GGML_CALL ggml_backend_t ggml_backend_vk_init(size_t idx) { vk_instance.initialized[idx] = true; ggml_backend_t vk_backend = new ggml_backend { + /* .guid = */ ggml_backend_vk_guid(), /* .interface = */ ggml_backend_vk_interface, /* .context = */ &vk_instance.contexts[ctx->idx], }; @@ -5272,7 +5278,7 @@ GGML_CALL ggml_backend_t ggml_backend_vk_init(size_t idx) { } GGML_CALL bool ggml_backend_is_vk(ggml_backend_t backend) { - return backend && backend->iface.get_name == ggml_backend_vk_name; + return backend != NULL && ggml_guid_matches(backend->guid, ggml_backend_vk_guid()); } GGML_CALL int ggml_backend_vk_get_device_count() { diff --git a/ggml.c b/ggml.c index 4591644ad..68ac6201e 100644 --- a/ggml.c +++ b/ggml.c @@ -355,6 +355,10 @@ void ggml_fp32_to_fp16_row(const float * x, ggml_fp16_t * y, int n) { } } +bool ggml_guid_matches(ggml_guid_t guid_a, ggml_guid_t guid_b) { + return memcmp(guid_a, guid_b, sizeof(ggml_guid)) == 0; +} + // // timing // diff --git a/ggml.h b/ggml.h index 23b768640..0a6d3c051 100644 --- a/ggml.h +++ b/ggml.h @@ -672,6 +672,16 @@ extern "C" { GGML_NUMA_STRATEGY_COUNT }; + // + // GUID + // + + // GUID types + typedef uint8_t ggml_guid[16]; + typedef ggml_guid * ggml_guid_t; + + GGML_API bool ggml_guid_matches(ggml_guid_t guid_a, ggml_guid_t guid_b); + // misc GGML_API void ggml_time_init(void); // call this once at the beginning of the program From 2774b0c97427ee3ad3e2ee121354d078794e89d9 Mon Sep 17 00:00:00 2001 From: slaren Date: Sun, 25 Feb 2024 20:41:35 +0100 Subject: [PATCH 735/811] add google magika inference example (ggml/748) * add magika inference example * ggml : fix unaligned accesses in custom ops * ggml : fix FP32 GELU for values that exceed the FP16 range * use ggml_pool_1d * add README * Update README.md * pad inputs if the files are too small * cleanup ggml-ci --- ggml.c | 54 ++++++++++++++++++++++++++++++++++-------------------- 1 file changed, 34 insertions(+), 20 deletions(-) diff --git a/ggml.c b/ggml.c index 68ac6201e..f29b9f13f 100644 --- a/ggml.c +++ b/ggml.c @@ -1608,9 +1608,15 @@ inline static void ggml_vec_gelu_f16(const int n, ggml_fp16_t * y, const ggml_fp inline static void ggml_vec_gelu_f32(const int n, float * y, const float * x) { uint16_t t; for (int i = 0; i < n; ++i) { - ggml_fp16_t fp16 = GGML_FP32_TO_FP16(x[i]); - memcpy(&t, &fp16, sizeof(uint16_t)); - y[i] = GGML_FP16_TO_FP32(ggml_table_gelu_f16[t]); + if (x[i] <= -10.0f) { + y[i] = 0.0f; + } else if (x[i] >= 10.0f) { + y[i] = x[i]; + } else { + ggml_fp16_t fp16 = GGML_FP32_TO_FP16(x[i]); + memcpy(&t, &fp16, sizeof(uint16_t)); + y[i] = GGML_FP16_TO_FP32(ggml_table_gelu_f16[t]); + } } } #else @@ -5780,11 +5786,13 @@ struct ggml_tensor * ggml_pool_1d( is_node = true; } - const int64_t ne[2] = { + const int64_t ne[4] = { ggml_calc_pool_output_size(a->ne[0], k0, s0, p0), a->ne[1], + a->ne[2], + a->ne[3], }; - struct ggml_tensor * result = ggml_new_tensor(ctx, GGML_TYPE_F32, 2, ne); + struct ggml_tensor * result = ggml_new_tensor(ctx, GGML_TYPE_F32, 4, ne); int32_t params[] = { op, k0, s0, p0 }; ggml_set_op_params(result, params, sizeof(params)); @@ -15081,9 +15089,10 @@ static void ggml_compute_forward_map_custom1( return; } - struct ggml_map_custom1_op_params * p = (struct ggml_map_custom1_op_params *) dst->op_params; + struct ggml_map_custom1_op_params p; + memcpy(&p, dst->op_params, sizeof(p)); - p->fun(dst, a, params->ith, params->nth, p->userdata); + p.fun(dst, a, params->ith, params->nth, p.userdata); } // ggml_compute_forward_map_custom2 @@ -15099,9 +15108,10 @@ static void ggml_compute_forward_map_custom2( return; } - struct ggml_map_custom2_op_params * p = (struct ggml_map_custom2_op_params *) dst->op_params; + struct ggml_map_custom2_op_params p; + memcpy(&p, dst->op_params, sizeof(p)); - p->fun(dst, a, b, params->ith, params->nth, p->userdata); + p.fun(dst, a, b, params->ith, params->nth, p.userdata); } // ggml_compute_forward_map_custom3 @@ -15118,9 +15128,10 @@ static void ggml_compute_forward_map_custom3( return; } - struct ggml_map_custom3_op_params * p = (struct ggml_map_custom3_op_params *) dst->op_params; + struct ggml_map_custom3_op_params p; + memcpy(&p, dst->op_params, sizeof(p)); - p->fun(dst, a, b, c, params->ith, params->nth, p->userdata); + p.fun(dst, a, b, c, params->ith, params->nth, p.userdata); } // ggml_compute_forward_cross_entropy_loss @@ -17386,29 +17397,32 @@ static int ggml_get_n_tasks(struct ggml_tensor * node, int n_threads) { } break; case GGML_OP_MAP_CUSTOM1: { - struct ggml_map_custom1_op_params * p = (struct ggml_map_custom1_op_params *) node->op_params; - if (p->n_tasks == GGML_N_TASKS_MAX) { + struct ggml_map_custom1_op_params p; + memcpy(&p, node->op_params, sizeof(p)); + if (p.n_tasks == GGML_N_TASKS_MAX) { n_tasks = n_threads; } else { - n_tasks = MIN(p->n_tasks, n_threads); + n_tasks = MIN(p.n_tasks, n_threads); } } break; case GGML_OP_MAP_CUSTOM2: { - struct ggml_map_custom2_op_params * p = (struct ggml_map_custom2_op_params *) node->op_params; - if (p->n_tasks == GGML_N_TASKS_MAX) { + struct ggml_map_custom2_op_params p; + memcpy(&p, node->op_params, sizeof(p)); + if (p.n_tasks == GGML_N_TASKS_MAX) { n_tasks = n_threads; } else { - n_tasks = MIN(p->n_tasks, n_threads); + n_tasks = MIN(p.n_tasks, n_threads); } } break; case GGML_OP_MAP_CUSTOM3: { - struct ggml_map_custom3_op_params * p = (struct ggml_map_custom3_op_params *) node->op_params; - if (p->n_tasks == GGML_N_TASKS_MAX) { + struct ggml_map_custom3_op_params p; + memcpy(&p, node->op_params, sizeof(p)); + if (p.n_tasks == GGML_N_TASKS_MAX) { n_tasks = n_threads; } else { - n_tasks = MIN(p->n_tasks, n_threads); + n_tasks = MIN(p.n_tasks, n_threads); } } break; case GGML_OP_CROSS_ENTROPY_LOSS: From 8c0e8f4e73e275756ad69f9c99b26ead085ca9f0 Mon Sep 17 00:00:00 2001 From: Georgi Gerganov Date: Wed, 28 Feb 2024 11:17:32 +0200 Subject: [PATCH 736/811] sync : ggml --- scripts/sync-ggml.last | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/scripts/sync-ggml.last b/scripts/sync-ggml.last index 59de34370..389c0bdfe 100644 --- a/scripts/sync-ggml.last +++ b/scripts/sync-ggml.last @@ -1 +1 @@ -8cdf783f288a98eddf521b0ab1b4d405be9e18ba +b458250b736a7473f7ff3560d47c93f1644f3290 From 78aacf36344df724cdca9f1e1af849b2d2519cb8 Mon Sep 17 00:00:00 2001 From: Georgi Gerganov Date: Wed, 28 Feb 2024 17:36:53 +0200 Subject: [PATCH 737/811] awq-py : remove (#5768) --- awq-py/README.md | 116 ------------------ awq-py/awq/apply_awq.py | 254 ---------------------------------------- awq-py/requirements.txt | 2 - 3 files changed, 372 deletions(-) delete mode 100644 awq-py/README.md delete mode 100644 awq-py/awq/apply_awq.py delete mode 100644 awq-py/requirements.txt diff --git a/awq-py/README.md b/awq-py/README.md deleted file mode 100644 index 16e68d027..000000000 --- a/awq-py/README.md +++ /dev/null @@ -1,116 +0,0 @@ -# AWQ: Activation-aware Weight Quantization for LLM - version apply to llamacpp -[[Paper](https://arxiv.org/abs/2306.00978)][[Original Repo](https://github.com/mit-han-lab/llm-awq)][[Easy-to-use Repo](https://github.com/casper-hansen/AutoAWQ)] - -**Supported models:** - -- [X] LLaMA -- [x] LLaMA 2 -- [X] MPT -- [X] Mistral AI v0.1 -- [ ] Bloom -- [ ] Mixtral MoE - -**TODO:** -- [x] Update version work with both MPT and MPT-AWQ model -- [ ] Add OPT model -- [ ] Add Bloom model -- [ ] Add Mixtral MoE -- [ ] Support w3, w2 - - -## Contents - -- [Install](##Install) -- [Convert](##Convert) -- [Quantize](##Quantize) -- [Test](##Test) -- [Benchmark](##Benchmark) -- [Results](##Results) - -## Install -Install requirements -```bash -pip install -r requirements.txt -``` -Get the pre-computed AWQ search results for multiple model families, including LLaMA, LLaMA2, MPT, OPT -```bash -git clone https://huggingface.co/datasets/mit-han-lab/awq-model-zoo awq_cache -``` - -## Convert -Example for llama model -```bash -# For llama7b and llama2 models -python convert.py models/llama-7b/ --awq-path awq_cache/llama-7b-w4-g128.pt --outfile models/llama_7b_fp16.gguf -# For mistral and mpt models -python convert-hf-to-gguf.py models/mpt-7b/ --awq-path awq_cache/mpt-7b-w4-g128.pt --outfile models/mpt_7b_fp16.gguf -``` - -## Quantize -```bash -# We only benchmark and confirm the results on q4_0, q4_1, and q2_k types. -./quantize models/llama_7b_fp16.gguf models/llama_7b_q4_0.gguf q4_0 -``` - -## Test -```bash -# For all models. -./build/bin/main -m models/llama_7b_q4_0.gguf -n 128 --prompt "Once upon a time" -``` - -## Benchmark -The perplexity measurements in table above are done against the `wikitext2` test dataset (https://paperswithcode.com/dataset/wikitext-2), with context length of 512. -```bash -# For llama and llama2, and mistral models. -./perplexity -m models/llama_7b_q4_0.gguf -f datasets/wikitext-2-raw/wiki.test.raw -``` - -## Results -Results are run on OpenBLAS (CPU) and CuBLAS (GPU) for fair comparison -We use three types of llamacpp quantization methods to work with our version, including q4_0, q4_1, and q2_k - -### Llama 7B (Build with OpenBLAS) - -| Model | Measure | F16 | Q4_0 | Q4_1 | Q2_K | -|-----------:|--------------|-------:|-------:|-------:|-------:| -|Llama 7B | perplexity | 5.9066 | 6.1214 | 6.0643 | 6.5808 | -|Llama 7B | file size | 12.9G | 3.5G | 3.9G | 2.7G | -|Llama 7B | bits/weight | 16.0 | 4.5 | 5.0 | 2.6 | -|AWQ-LLama 7B| perplexity | 5.9175 | 6.0252 | 5.9987 | 6.3692 | -|AWQ-LLama 7B| file size | 12.9G | 3.5G | 3.9G | 2.7G | -|AWQ-LLama 7B| bits/weight | 16.0 | 4.5 | 5.0 | 2.6 | - - -### Llama2 7B (Build with CuBLAS) - -| Model | Measure | F16 | Q4_0 | Q4_1 | Q2_K | -|------------:|--------------|-------:|-------:|-------:|-------:| -|Llama2 7B | perplexity | 5.8664 | 6.0260 | 6.0656 | 6.4496 | -|Llama2 7B | file size | 12.9G | 3.5G | 3.9G | 2.7G | -|Llama2 7B | bits/weight | 16.0 | 4.5 | 5.0 | 2.6 | -|AWQ-LLama2 7B| perplexity | 5.8801 | 6.0054 | 5.9849 | 6.3650 | -|AWQ-LLama2 7B| file size | 12.9G | 3.5G | 3.9G | 2.7G | -|AWQ-LLama2 7B| bits/weight | 16.0 | 4.5 | 5.0 | 2.6 | - - -### Mistral 7B v0.1 (Build with CuBLAS) - -| Model | Measure | F16 | Q4_0 | Q4_1 | Q2_K | -|-------------:|--------------|-------:|-------:|-------:|-------:| -|Mistral 7B | perplexity | 5.6931 | 5.8202 | 5.8268 | 6.1645 | -|Mistral 7B | file size | 14.5G | 4.1G | 4.5G | 3.1G | -|Mistral 7B | bits/weight | 16.0 | 4.5 | 5.0 | 2.6 | -|AWQ-Mistral 7B| perplexity | 5.6934 | 5.8020 | 5.7691 | 6.0426 | -|AWQ-Mistral 7B| file size | 14.5G | 4.1G | 4.5G | 3.1G | -|AWQ-Mistral 7B| bits/weight | 16.0 | 4.5 | 5.0 | 2.6 | - -### MPT 7B (Build with OpenBLAS) - -| Model | Measure | F16 | Q4_0 | Q4_1 | Q2_K | -|---------:|--------------|-------:|-------:|-------:|--------:| -|MPT 7B | perplexity | 8.4369 | 8.7956 | 8.6265 | 11.4913 | -|MPT 7B | file size | 13.7G | 3.9G | 4.3G | 2.8G | -|MPT 7B | bits/weight | 16.0 | 4.5 | 5.0 | 2.6 | -|AWQ-MPT 7B| perplexity | 8.4944 | 8.7053 | 8.6750 | 10.2873| -|AWQ-MPT 7B| file size | 13.7G | 3.9G | 4.3G | 2.8G | -|AWQ-MPT 7B| bits/weight | 16.0 | 4.5 | 5.0 | 2.6 | diff --git a/awq-py/awq/apply_awq.py b/awq-py/awq/apply_awq.py deleted file mode 100644 index 11132c5d2..000000000 --- a/awq-py/awq/apply_awq.py +++ /dev/null @@ -1,254 +0,0 @@ -""" -Implements the AWQ for llama.cpp use cases. -Original paper: https://arxiv.org/abs/2306.00978 - -This code is based on versions of the AWQ implementation found in the following repositories: -* https://github.com/mit-han-lab/llm-awq -* https://github.com/casper-hansen/AutoAWQ -""" - -import os -import torch -import torch.nn as nn - -from transformers import AutoModelForCausalLM, AutoConfig -from transformers.models.bloom.modeling_bloom import BloomGelu -from transformers.models.llama.modeling_llama import LlamaRMSNorm -from transformers.activations import GELUActivation - - -class ScaledActivation(nn.Module): - """ - ScaledActivation module wraps an existing activation function and applies a - scale factor to its output. - - Args: - module (nn.Module): The activation function to be scaled. - scales (torch.Tensor): A tensor of size (num_features,) containing the initial - scale factors for each feature. - - Returns: - torch.Tensor: The scaled output of the activation function. - """ - - def __init__(self, module, scales): - super().__init__() - self.act = module - self.scales = nn.Parameter(scales.data) - - def forward(self, x): - return self.act(x) / self.scales.view(1, 1, -1).to(x.device) - - -def set_op_by_name(layer, name, new_module): - """ - Set the new module for given module's name. - - Args: - layer (nn.Module): The layer in which to replace the submodule. - name (str): The path to the submodule to be replaced, using dot notation - to access nested modules. - new_module (nn.Module): The new module to replace the existing one. - """ - levels = name.split(".") - if len(levels) > 1: - mod_ = layer - for l_idx in range(len(levels) - 1): - if levels[l_idx].isdigit(): - mod_ = mod_[int(levels[l_idx])] - else: - mod_ = getattr(mod_, levels[l_idx]) - setattr(mod_, levels[-1], new_module) - else: - setattr(layer, name, new_module) - - -def get_op_by_name(module, op_name): - """ - Retrieves a submodule within a given layer based on its name. - - Args: - module (nn.Module): The layer containing the submodule to find. - op_name (str): The name of the submodule. - - Returns: - nn.Module: The requested submodule found within the given layer. - - Raises: - ValueError: If the specified submodule cannot be found within the layer. - """ - for name, m in module.named_modules(): - if name == op_name: - return m - raise ValueError(f"Cannot find op {op_name} in module {module}") - - -@torch.no_grad() -def scale_ln_fcs(ln, fcs, scales): - """ - Scales the weights of a LayerNorm and a list of fully-connected layers proportionally. - - Args: - ln (nn.LayerNorm): The LayerNorm module to be scaled. - fcs (List[nn.Linear]): A list of fully-connected layers to be scaled. - scales (torch.Tensor): A 1D tensor of size (num_features,). - """ - - if not isinstance(fcs, list): - fcs = [fcs] - - scales = scales.to(ln.weight.device) - - ln.weight.div_(scales) - if hasattr(ln, "bias") and ln.bias is not None: - ln.bias.div_(scales) - - for fc in fcs: - fc.weight.mul_(scales.view(1, -1)) - - for p in ln.parameters(): - assert torch.isnan(p).sum() == 0 - for fc in fcs: - for p in fc.parameters(): - assert torch.isnan(p).sum() == 0 - - -@torch.no_grad() -def scale_fc_fc(fc1, fc2, scales): - """ - Scales the weights of two fully-connected layers in a specific pattern. - - Args: - fc1 (nn.Linear): The first fully-connected layer to be scaled. - fc2 (nn.Linear): The second fully-connected layer to be scaled. - scales (torch.Tensor): A 1D tensor of size (num_features,). - """ - assert isinstance(fc1, nn.Linear) - assert isinstance(fc2, nn.Linear) - - scales = scales.to(fc1.weight.device) - - fc1.weight[-scales.size(0):].div_(scales.view(-1, 1)) - if fc1.bias is not None: - fc1.bias.div_(scales.view(-1)) - - fc2.weight.mul_(scales.view(1, -1)) - - for p in fc1.parameters(): - assert torch.isnan(p).sum() == 0 - for p in fc2.parameters(): - assert torch.isnan(p).sum() == 0 - - -@torch.no_grad() -def scale_gelu_fc(gelu, fc, scales): - """ - Scales the weight of a GELU activation and a fully-connected layer proportionally. - - Args: - gelu (Union[nn.GELU, BloomGelu, GELUActivation]): The GELU activation module to be scaled. - fc (nn.Linear): The fully-connected layer to be scaled. - scales (torch.Tensor): A 1D tensor of size (num_features,). - - Raises: - TypeError: If the `gelu` module is not of type `nn.GELU`, `BloomGelu`, or `GELUActivation`. - TypeError: If the `fc` module is not of type `nn.Linear`. - """ - assert isinstance(gelu, (nn.GELU, BloomGelu, GELUActivation)) - assert isinstance(fc, nn.Linear) - - fc.weight.mul_(scales.view(1, -1).to(fc.weight.device)) - - for p in fc.parameters(): - assert torch.isnan(p).sum() == 0 - - -def apply_scale(module, scales_list, input_feat_dict=None): - """ - Applies different scaling strategies to layers based on their type and hierarchy within a given module. - - Args: - module (nn.Module): The module containing the layers to be scaled. - scales_list (List[Tuple[str, List[str], torch.Tensor]]): A list of tuples containing: - * prev_op_name (str): The name of the preceding operation or module, - relative to which the layers to be scaled are located. - * layer_names (List[str]): A list of names of the layers to be scaled, relative to the preceding operation. - * scales (torch.Tensor): A 1D tensor of size (num_features,) containing the scaling factors for each feature. - input_feat_dict (Optional[Dict[str, torch.Tensor]]): A dictionary mapping layer names to their corresponding - input features (optional). - """ - for prev_op_name, layer_names, scales in scales_list: - prev_op = get_op_by_name(module, prev_op_name) - layers = [get_op_by_name(module, name) for name in layer_names] - - prev_op.cuda() - for layer in layers: - layer.cuda() - scales.cuda() - - if isinstance(prev_op, nn.Linear): - assert len(layers) == 1 - scale_fc_fc(prev_op, layers[0], scales) - elif isinstance(prev_op, (nn.LayerNorm, LlamaRMSNorm)) or "rmsnorm" in str(prev_op.__class__).lower(): - scale_ln_fcs(prev_op, layers, scales) - elif isinstance(prev_op, (nn.GELU, BloomGelu, GELUActivation)): - new_module = ScaledActivation(prev_op, scales) - set_op_by_name(module, prev_op_name, new_module) - scale_gelu_fc(prev_op, layers[0], scales) - else: - raise NotImplementedError(f"prev_op {type(prev_op)} not supported yet!") - - # apply the scaling to input feat if given; prepare it for clipping - if input_feat_dict is not None: - for layer_name in layer_names: - inp = input_feat_dict[layer_name] - inp.div_(scales.view(1, -1).to(inp.device)) - - prev_op.cpu() - for layer in layers: - layer.cpu() - scales.cpu() - - -@torch.no_grad() -def apply_clip(module, clip_list): - """ - Applies element-wise clipping to the weight of a specific layer within a given module. - - Args: - module (nn.Module): The module containing the layer to be clipped. - clip_list (List[Tuple[str, torch.Tensor]]): A list of tuples containing: - * name (str): The name of the layer to be clipped, relative to the root of the module. - * max_val (torch.Tensor): A 1D or 2D tensor defining the upper bound for each element of the layer's weight. - """ - for name, max_val in clip_list: - layer = get_op_by_name(module, name) - layer.cuda() - max_val = max_val.to(layer.weight.device) - org_shape = layer.weight.shape - layer.weight.data = layer.weight.data.reshape(*max_val.shape[:2], -1) - layer.weight.data = torch.clamp(layer.weight.data, -max_val, max_val) - layer.weight.data = layer.weight.data.reshape(org_shape) - layer.cpu() - - -def add_scale_weights(model_path, scale_path, tmp_path): - """ - Adds pre-computed Activation Weight Quantization (AWQ) results to a model, - including scaling factors and clipping bounds. - - Args: - model_path (str): Path to the pre-trained model to be equipped with AWQ. - scale_path (str): Path to the AWQ scale factors (.pt file). - tmp_path (str): Path to the temporary directory where the equipped model will be saved. - """ - config = AutoConfig.from_pretrained(model_path, trust_remote_code=True) - model = AutoModelForCausalLM.from_pretrained( - model_path, config=config, trust_remote_code=True - ) - model.eval() - awq_results = torch.load(str(scale_path), map_location="cpu") - apply_scale(model, awq_results["scale"]) - apply_clip(model, awq_results["clip"]) - model.save_pretrained(str(tmp_path)) - os.system(f"cp {str(model_path)}/tokenizer* {str(tmp_path)}") diff --git a/awq-py/requirements.txt b/awq-py/requirements.txt deleted file mode 100644 index 991896116..000000000 --- a/awq-py/requirements.txt +++ /dev/null @@ -1,2 +0,0 @@ -torch>=2.1.1 -transformers>=4.32.0 From 08c5ee87e4cceb603ecceac90734fcdade57311b Mon Sep 17 00:00:00 2001 From: Georgi Gerganov Date: Wed, 28 Feb 2024 18:43:38 +0200 Subject: [PATCH 738/811] llama : remove deprecated API (#5770) ggml-ci --- llama.cpp | 88 +------------------------------------------------------ llama.h | 45 ---------------------------- 2 files changed, 1 insertion(+), 132 deletions(-) diff --git a/llama.cpp b/llama.cpp index 893bcdbc0..30d5eb32d 100644 --- a/llama.cpp +++ b/llama.cpp @@ -7894,9 +7894,9 @@ static int llama_decode_internal( const auto n_batch = cparams.n_batch; GGML_ASSERT(n_tokens <= n_batch); + GGML_ASSERT((!batch.token && batch.embd) || (batch.token && !batch.embd)); // NOLINT int n_threads = n_tokens == 1 ? cparams.n_threads : cparams.n_threads_batch; - GGML_ASSERT((!batch.token && batch.embd) || (batch.token && !batch.embd)); // NOLINT const int64_t t_start_us = ggml_time_us(); @@ -10062,10 +10062,6 @@ void llama_sample_temp(struct llama_context * ctx, llama_token_data_array * cand } } -void llama_sample_temperature(struct llama_context * ctx, llama_token_data_array * candidates_p, float temp) { - llama_sample_temp(ctx, candidates_p, temp); -} - void llama_sample_repetition_penalties( struct llama_context * ctx, llama_token_data_array * candidates, @@ -10192,38 +10188,6 @@ void llama_sample_apply_guidance( ctx->t_sample_us += ggml_time_us() - t_start_sample_us; } -void llama_sample_classifier_free_guidance( - struct llama_context * ctx, - llama_token_data_array * candidates, - struct llama_context * guidance_ctx, - float scale) { - GGML_ASSERT(ctx); - int64_t t_start_sample_us; - - t_start_sample_us = ggml_time_us(); - const size_t n_vocab = llama_n_vocab(llama_get_model(ctx)); - - GGML_ASSERT(n_vocab == candidates->size); - GGML_ASSERT(!candidates->sorted); - - std::vector logits_base(n_vocab); - for (size_t i = 0; i < n_vocab; ++i) { - logits_base[i] = candidates->data[i].logit; - } - - float * logits_guidance = llama_get_logits(guidance_ctx); - - ctx->t_sample_us += ggml_time_us() - t_start_sample_us; - llama_sample_apply_guidance(ctx, logits_base.data(), logits_guidance, scale); - t_start_sample_us = ggml_time_us(); - - for (size_t i = 0; i < n_vocab; ++i) { - candidates->data[i].logit = logits_base[i]; - } - - ctx->t_sample_us += ggml_time_us() - t_start_sample_us; -} - llama_token llama_sample_token_mirostat(struct llama_context * ctx, llama_token_data_array * candidates, float tau, float eta, int32_t m, float * mu) { GGML_ASSERT(ctx); @@ -11724,15 +11688,6 @@ bool llama_supports_gpu_offload(void) { #endif } -// deprecated: -bool llama_mmap_supported(void) { - return llama_supports_mmap(); -} - -bool llama_mlock_supported(void) { - return llama_supports_mlock(); -} - void llama_backend_init(void) { ggml_time_init(); @@ -12244,15 +12199,6 @@ uint32_t llama_model_quantize( } } -int32_t llama_apply_lora_from_file(struct llama_context * ctx, const char * path_lora, float scale, const char * path_base_model, int32_t n_threads) { - try { - return llama_apply_lora_from_file_internal(ctx->model, path_lora, scale, path_base_model, n_threads); - } catch (const std::exception & err) { - LLAMA_LOG_ERROR("%s: failed to apply lora adapter: %s\n", __func__, err.what()); - return 1; - } -} - int32_t llama_model_apply_lora_from_file(const struct llama_model * model, const char * path_lora, float scale, const char * path_base_model, int32_t n_threads) { try { return llama_apply_lora_from_file_internal(*model, path_lora, scale, path_base_model, n_threads); @@ -12802,38 +12748,6 @@ bool llama_save_session_file(struct llama_context * ctx, const char * path_sessi return true; } -int llama_eval( - struct llama_context * ctx, - llama_token * tokens, - int32_t n_tokens, - int32_t n_past) { - llama_kv_cache_seq_rm(ctx->kv_self, -1, n_past, -1); - - const int ret = llama_decode_internal(*ctx, llama_batch_get_one(tokens, n_tokens, n_past, 0)); - if (ret < 0) { - LLAMA_LOG_ERROR("%s: failed to decode, ret = %d\n", __func__, ret); - } - - return ret; -} - -int llama_eval_embd( - struct llama_context * ctx, - float * embd, - int32_t n_tokens, - int32_t n_past) { - llama_kv_cache_seq_rm(ctx->kv_self, -1, n_past, -1); - - llama_batch batch = { n_tokens, nullptr, embd, nullptr, nullptr, nullptr, nullptr, n_past, 1, 0, }; - - const int ret = llama_decode_internal(*ctx, batch); - if (ret < 0) { - LLAMA_LOG_ERROR("%s: failed to decode, ret = %d\n", __func__, ret); - } - - return ret; -} - void llama_set_n_threads(struct llama_context * ctx, uint32_t n_threads, uint32_t n_threads_batch) { ctx->cparams.n_threads = n_threads; ctx->cparams.n_threads_batch = n_threads_batch; diff --git a/llama.h b/llama.h index 16e28e91d..a6823bb2b 100644 --- a/llama.h +++ b/llama.h @@ -364,9 +364,6 @@ extern "C" { LLAMA_API bool llama_supports_mlock (void); LLAMA_API bool llama_supports_gpu_offload(void); - LLAMA_API DEPRECATED(bool llama_mmap_supported (void), "use llama_supports_mmap() instead"); - LLAMA_API DEPRECATED(bool llama_mlock_supported(void), "use llama_supports_mlock() instead"); - LLAMA_API const struct llama_model * llama_get_model(const struct llama_context * ctx); LLAMA_API uint32_t llama_n_ctx (const struct llama_context * ctx); @@ -423,14 +420,6 @@ extern "C" { // The model needs to be reloaded before applying a new adapter, otherwise the adapter // will be applied on top of the previous one // Returns 0 on success - LLAMA_API DEPRECATED(int32_t llama_apply_lora_from_file( - struct llama_context * ctx, - const char * path_lora, - float scale, - const char * path_base_model, - int32_t n_threads), - "use llama_model_apply_lora_from_file instead"); - LLAMA_API int32_t llama_model_apply_lora_from_file( const struct llama_model * model, const char * path_lora, @@ -606,27 +595,6 @@ extern "C" { // Decoding // - // Run the llama inference to obtain the logits and probabilities for the next token(s). - // tokens + n_tokens is the provided batch of new tokens to process - // n_past is the number of tokens to use from previous eval calls - // Returns 0 on success - // DEPRECATED: use llama_decode() instead - LLAMA_API DEPRECATED(int llama_eval( - struct llama_context * ctx, - llama_token * tokens, - int32_t n_tokens, - int32_t n_past), - "use llama_decode() instead"); - - // Same as llama_eval, but use float matrix input directly. - // DEPRECATED: use llama_decode() instead - LLAMA_API DEPRECATED(int llama_eval_embd( - struct llama_context * ctx, - float * embd, - int32_t n_tokens, - int32_t n_past), - "use llama_decode() instead"); - // Return batch for single sequence of tokens starting at pos_0 // // NOTE: this is a helper function to facilitate transition to the new batch API - avoid using it @@ -800,13 +768,6 @@ extern "C" { float * logits_guidance, float scale); - LLAMA_API DEPRECATED(void llama_sample_classifier_free_guidance( - struct llama_context * ctx, - llama_token_data_array * candidates, - struct llama_context * guidance_ctx, - float scale), - "use llama_sample_apply_guidance() instead"); - /// @details Sorts candidate tokens by their logits in descending order and calculate probabilities based on logits. LLAMA_API void llama_sample_softmax( struct llama_context * ctx, @@ -860,12 +821,6 @@ extern "C" { llama_token_data_array * candidates, float temp); - LLAMA_API DEPRECATED(void llama_sample_temperature( - struct llama_context * ctx, - llama_token_data_array * candidates, - float temp), - "use llama_sample_temp instead"); - /// @details Apply constraints from grammar LLAMA_API void llama_sample_grammar( struct llama_context * ctx, From 317709b2a81dbaf87850202686ec5bb2602a504e Mon Sep 17 00:00:00 2001 From: Eve <139727413+netrunnereve@users.noreply.github.com> Date: Wed, 28 Feb 2024 19:33:37 +0000 Subject: [PATCH 739/811] make portability_enumeration_ext apple only (#5757) --- ggml-vulkan.cpp | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/ggml-vulkan.cpp b/ggml-vulkan.cpp index 314e3d7a9..896c290b2 100644 --- a/ggml-vulkan.cpp +++ b/ggml-vulkan.cpp @@ -1106,7 +1106,9 @@ void ggml_vk_instance_init() { const std::vector instance_extensions = vk::enumerateInstanceExtensionProperties(); const bool validation_ext = ggml_vk_instance_validation_ext_available(instance_extensions); +#ifdef __APPLE__ const bool portability_enumeration_ext = ggml_vk_instance_portability_enumeration_ext_available(instance_extensions); +#endif std::vector layers; @@ -1117,13 +1119,17 @@ void ggml_vk_instance_init() { if (validation_ext) { extensions.push_back("VK_EXT_validation_features"); } +#ifdef __APPLE__ if (portability_enumeration_ext) { extensions.push_back("VK_KHR_portability_enumeration"); } +#endif vk::InstanceCreateInfo instance_create_info(vk::InstanceCreateFlags{}, &app_info, layers, extensions); +#ifdef __APPLE__ if (portability_enumeration_ext) { instance_create_info.flags |= vk::InstanceCreateFlagBits::eEnumeratePortabilityKHR; } +#endif std::vector features_enable; vk::ValidationFeaturesEXT validation_features; From 87c91c07663b707e831c59ec373b5e665ff9d64a Mon Sep 17 00:00:00 2001 From: Georgi Gerganov Date: Wed, 28 Feb 2024 21:44:21 +0200 Subject: [PATCH 740/811] ci : reduce 3b ppl chunks to 1 to avoid timeout (#5771) ggml-ci --- ci/run.sh | 34 +++++++++++++++++----------------- 1 file changed, 17 insertions(+), 17 deletions(-) diff --git a/ci/run.sh b/ci/run.sh index f3a29c2e9..35eb3c7aa 100755 --- a/ci/run.sh +++ b/ci/run.sh @@ -272,19 +272,19 @@ function gg_run_open_llama_3b_v2 { (time ./bin/main --model ${model_q5_k} -s 1234 -n 64 --ignore-eos -p "I believe the meaning of life is" ) 2>&1 | tee -a $OUT/${ci}-tg-q5_k.log (time ./bin/main --model ${model_q6_k} -s 1234 -n 64 --ignore-eos -p "I believe the meaning of life is" ) 2>&1 | tee -a $OUT/${ci}-tg-q6_k.log - (time ./bin/perplexity --model ${model_f16} -f ${wiki_test_60} -c 128 -b 128 --chunks 2 ) 2>&1 | tee -a $OUT/${ci}-tg-f16.log - (time ./bin/perplexity --model ${model_q8_0} -f ${wiki_test_60} -c 128 -b 128 --chunks 2 ) 2>&1 | tee -a $OUT/${ci}-tg-q8_0.log - (time ./bin/perplexity --model ${model_q4_0} -f ${wiki_test_60} -c 128 -b 128 --chunks 2 ) 2>&1 | tee -a $OUT/${ci}-tg-q4_0.log - (time ./bin/perplexity --model ${model_q4_1} -f ${wiki_test_60} -c 128 -b 128 --chunks 2 ) 2>&1 | tee -a $OUT/${ci}-tg-q4_1.log - (time ./bin/perplexity --model ${model_q5_0} -f ${wiki_test_60} -c 128 -b 128 --chunks 2 ) 2>&1 | tee -a $OUT/${ci}-tg-q5_0.log - (time ./bin/perplexity --model ${model_q5_1} -f ${wiki_test_60} -c 128 -b 128 --chunks 2 ) 2>&1 | tee -a $OUT/${ci}-tg-q5_1.log - (time ./bin/perplexity --model ${model_q2_k} -f ${wiki_test_60} -c 128 -b 128 --chunks 2 ) 2>&1 | tee -a $OUT/${ci}-tg-q2_k.log - (time ./bin/perplexity --model ${model_q3_k} -f ${wiki_test_60} -c 128 -b 128 --chunks 2 ) 2>&1 | tee -a $OUT/${ci}-tg-q3_k.log - (time ./bin/perplexity --model ${model_q4_k} -f ${wiki_test_60} -c 128 -b 128 --chunks 2 ) 2>&1 | tee -a $OUT/${ci}-tg-q4_k.log - (time ./bin/perplexity --model ${model_q5_k} -f ${wiki_test_60} -c 128 -b 128 --chunks 2 ) 2>&1 | tee -a $OUT/${ci}-tg-q5_k.log - (time ./bin/perplexity --model ${model_q6_k} -f ${wiki_test_60} -c 128 -b 128 --chunks 2 ) 2>&1 | tee -a $OUT/${ci}-tg-q6_k.log + (time ./bin/perplexity --model ${model_f16} -f ${wiki_test_60} -c 128 -b 128 --chunks 1 ) 2>&1 | tee -a $OUT/${ci}-tg-f16.log + (time ./bin/perplexity --model ${model_q8_0} -f ${wiki_test_60} -c 128 -b 128 --chunks 1 ) 2>&1 | tee -a $OUT/${ci}-tg-q8_0.log + (time ./bin/perplexity --model ${model_q4_0} -f ${wiki_test_60} -c 128 -b 128 --chunks 1 ) 2>&1 | tee -a $OUT/${ci}-tg-q4_0.log + (time ./bin/perplexity --model ${model_q4_1} -f ${wiki_test_60} -c 128 -b 128 --chunks 1 ) 2>&1 | tee -a $OUT/${ci}-tg-q4_1.log + (time ./bin/perplexity --model ${model_q5_0} -f ${wiki_test_60} -c 128 -b 128 --chunks 1 ) 2>&1 | tee -a $OUT/${ci}-tg-q5_0.log + (time ./bin/perplexity --model ${model_q5_1} -f ${wiki_test_60} -c 128 -b 128 --chunks 1 ) 2>&1 | tee -a $OUT/${ci}-tg-q5_1.log + (time ./bin/perplexity --model ${model_q2_k} -f ${wiki_test_60} -c 128 -b 128 --chunks 1 ) 2>&1 | tee -a $OUT/${ci}-tg-q2_k.log + (time ./bin/perplexity --model ${model_q3_k} -f ${wiki_test_60} -c 128 -b 128 --chunks 1 ) 2>&1 | tee -a $OUT/${ci}-tg-q3_k.log + (time ./bin/perplexity --model ${model_q4_k} -f ${wiki_test_60} -c 128 -b 128 --chunks 1 ) 2>&1 | tee -a $OUT/${ci}-tg-q4_k.log + (time ./bin/perplexity --model ${model_q5_k} -f ${wiki_test_60} -c 128 -b 128 --chunks 1 ) 2>&1 | tee -a $OUT/${ci}-tg-q5_k.log + (time ./bin/perplexity --model ${model_q6_k} -f ${wiki_test_60} -c 128 -b 128 --chunks 1 ) 2>&1 | tee -a $OUT/${ci}-tg-q6_k.log - (time ./bin/imatrix --model ${model_f16} -f ${wiki_test_60} -c 128 -b 128 --chunks 2 ) 2>&1 | tee -a $OUT/${ci}-imatrix.log + (time ./bin/imatrix --model ${model_f16} -f ${wiki_test_60} -c 128 -b 128 --chunks 1 ) 2>&1 | tee -a $OUT/${ci}-imatrix.log (time ./bin/save-load-state --model ${model_q4_0} ) 2>&1 | tee -a $OUT/${ci}-save-load-state.log @@ -343,17 +343,17 @@ function gg_run_open_llama_3b_v2 { python3 ../convert-lora-to-ggml.py ${path_lora} # f16 - (time ./bin/perplexity --model ${model_f16} -f ${shakespeare} -c 128 -b 128 --chunks 2 ) 2>&1 | tee -a $OUT/${ci}-ppl-shakespeare-f16.log - (time ./bin/perplexity --model ${model_f16} -f ${shakespeare} --lora ${lora_shakespeare} -c 128 -b 128 --chunks 2 ) 2>&1 | tee -a $OUT/${ci}-ppl-shakespeare-lora-f16.log + (time ./bin/perplexity --model ${model_f16} -f ${shakespeare} -c 128 -b 128 --chunks 1 ) 2>&1 | tee -a $OUT/${ci}-ppl-shakespeare-f16.log + (time ./bin/perplexity --model ${model_f16} -f ${shakespeare} --lora ${lora_shakespeare} -c 128 -b 128 --chunks 1 ) 2>&1 | tee -a $OUT/${ci}-ppl-shakespeare-lora-f16.log compare_ppl "f16 shakespeare" "$(cat $OUT/${ci}-ppl-shakespeare-f16.log | grep "^\[1\]")" "$(cat $OUT/${ci}-ppl-shakespeare-lora-f16.log | grep "^\[1\]")" | tee -a $OUT/${ci}-lora-ppl.log # q8_0 - (time ./bin/perplexity --model ${model_q8_0} -f ${shakespeare} -c 128 -b 128 --chunks 2 ) 2>&1 | tee -a $OUT/${ci}-ppl-shakespeare-q8_0.log - (time ./bin/perplexity --model ${model_q8_0} -f ${shakespeare} --lora ${lora_shakespeare} -c 128 -b 128 --chunks 2 ) 2>&1 | tee -a $OUT/${ci}-ppl-shakespeare-lora-q8_0.log + (time ./bin/perplexity --model ${model_q8_0} -f ${shakespeare} -c 128 -b 128 --chunks 1 ) 2>&1 | tee -a $OUT/${ci}-ppl-shakespeare-q8_0.log + (time ./bin/perplexity --model ${model_q8_0} -f ${shakespeare} --lora ${lora_shakespeare} -c 128 -b 128 --chunks 1 ) 2>&1 | tee -a $OUT/${ci}-ppl-shakespeare-lora-q8_0.log compare_ppl "q8_0 shakespeare" "$(cat $OUT/${ci}-ppl-shakespeare-q8_0.log | grep "^\[1\]")" "$(cat $OUT/${ci}-ppl-shakespeare-lora-q8_0.log | grep "^\[1\]")" | tee -a $OUT/${ci}-lora-ppl.log # q8_0 + f16 lora-base - (time ./bin/perplexity --model ${model_q8_0} -f ${shakespeare} --lora ${lora_shakespeare} --lora-base ${model_f16} -c 128 -b 128 --chunks 2 ) 2>&1 | tee -a $OUT/${ci}-ppl-shakespeare-lora-q8_0-f16.log + (time ./bin/perplexity --model ${model_q8_0} -f ${shakespeare} --lora ${lora_shakespeare} --lora-base ${model_f16} -c 128 -b 128 --chunks 1 ) 2>&1 | tee -a $OUT/${ci}-ppl-shakespeare-lora-q8_0-f16.log compare_ppl "q8_0 / f16 base shakespeare" "$(cat $OUT/${ci}-ppl-shakespeare-q8_0.log | grep "^\[1\]")" "$(cat $OUT/${ci}-ppl-shakespeare-lora-q8_0-f16.log | grep "^\[1\]")" | tee -a $OUT/${ci}-lora-ppl.log set +e From d5ab29757ebc59a30f03e408294ec20628a6374e Mon Sep 17 00:00:00 2001 From: Marcus Dunn <51931484+MarcusDunn@users.noreply.github.com> Date: Thu, 29 Feb 2024 00:17:23 -0800 Subject: [PATCH 741/811] llama : constified `llama_set_state_data`'s `src` (#5774) --- llama.cpp | 6 +++--- llama.h | 2 +- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/llama.cpp b/llama.cpp index 30d5eb32d..62699ce52 100644 --- a/llama.cpp +++ b/llama.cpp @@ -12545,8 +12545,8 @@ size_t llama_copy_state_data(struct llama_context * ctx, uint8_t * dst) { } // Sets the state reading from the specified source address -size_t llama_set_state_data(struct llama_context * ctx, uint8_t * src) { - uint8_t * inp = src; +size_t llama_set_state_data(struct llama_context * ctx, const uint8_t * src) { + const uint8_t * inp = src; // set rng { @@ -12555,7 +12555,7 @@ size_t llama_set_state_data(struct llama_context * ctx, uint8_t * src) { GGML_ASSERT(rng_size <= LLAMA_MAX_RNG_STATE); - std::string rng_str((char *)inp, rng_size); inp += rng_size; + std::string rng_str((const char *)inp, rng_size); inp += rng_size; std::istringstream rng_ss(rng_str); rng_ss >> ctx->rng; diff --git a/llama.h b/llama.h index a6823bb2b..4d0ebe37d 100644 --- a/llama.h +++ b/llama.h @@ -575,7 +575,7 @@ extern "C" { // Returns the number of bytes read LLAMA_API size_t llama_set_state_data( struct llama_context * ctx, - uint8_t * src); + const uint8_t * src); // Save/load session file LLAMA_API bool llama_load_session_file( From 052051d8ae4639a1c3c61e7da3237bcc572469d4 Mon Sep 17 00:00:00 2001 From: Xuan Son Nguyen Date: Thu, 29 Feb 2024 21:42:11 +0100 Subject: [PATCH 742/811] Server: normalize naming (#5779) * server: normalize naming * fix spacing --- examples/server/server.cpp | 370 ++++++++++++++++--------------------- examples/server/utils.hpp | 186 ++++++++++++------- 2 files changed, 277 insertions(+), 279 deletions(-) diff --git a/examples/server/server.cpp b/examples/server/server.cpp index 080fa9bd5..bf20e0cf1 100644 --- a/examples/server/server.cpp +++ b/examples/server/server.cpp @@ -33,8 +33,7 @@ using json = nlohmann::json; -struct server_params -{ +struct server_params { std::string hostname = "127.0.0.1"; std::vector api_keys; std::string public_path = "examples/server/public"; @@ -49,103 +48,50 @@ struct server_params bool server_verbose = false; bool server_log_json = true; -static size_t common_part(const std::vector &a, const std::vector &b) -{ - size_t i; - for (i = 0; i < a.size() && i < b.size() && a[i] == b[i]; i++) - { - } - return i; -} - -enum stop_type -{ +enum stop_type { STOP_FULL, STOP_PARTIAL, }; -static bool ends_with(const std::string &str, const std::string &suffix) -{ - return str.size() >= suffix.size() && - 0 == str.compare(str.size() - suffix.size(), suffix.size(), suffix); -} +// TODO: can become bool if we can't find use of more states +enum slot_state { + IDLE, + PROCESSING, +}; -static size_t find_partial_stop_string(const std::string &stop, - const std::string &text) -{ - if (!text.empty() && !stop.empty()) - { - const char text_last_char = text.back(); - for (int64_t char_index = stop.size() - 1; char_index >= 0; char_index--) - { - if (stop[char_index] == text_last_char) - { - const std::string current_partial = stop.substr(0, char_index + 1); - if (ends_with(text, current_partial)) - { - return text.size() - char_index - 1; - } - } - } - } - return std::string::npos; -} +enum slot_command { + NONE, + LOAD_PROMPT, + RELEASE, +}; -// TODO: reuse llama_detokenize -template -static std::string tokens_to_str(llama_context *ctx, Iter begin, Iter end) -{ - std::string ret; - for (; begin != end; ++begin) - { - ret += llama_token_to_piece(ctx, *begin); - } - return ret; -} +struct slot_params { + bool stream = true; + bool cache_prompt = false; // remember the prompt to avoid reprocessing all prompt -// format incomplete utf-8 multibyte character for output -static std::string tokens_to_output_formatted_string(const llama_context *ctx, const llama_token token) -{ - std::string out = token == -1 ? "" : llama_token_to_piece(ctx, token); - // if the size is 1 and first bit is 1, meaning it's a partial character - // (size > 1 meaning it's already a known token) - if (out.size() == 1 && (out[0] & 0x80) == 0x80) - { - std::stringstream ss; - ss << std::hex << (out[0] & 0xff); - std::string res(ss.str()); - out = "byte: \\x" + res; - } - return out; -} + uint32_t seed = -1; // RNG seed + int32_t n_keep = 0; // number of tokens to keep from initial prompt + int32_t n_predict = -1; // new tokens to predict -// convert a vector of completion_token_output to json -static json probs_vector_to_json(const llama_context *ctx, const std::vector &probs) -{ - json out = json::array(); - for (const auto &prob : probs) - { - json probs_for_token = json::array(); - for (const auto &p : prob.probs) - { - std::string tok_str = tokens_to_output_formatted_string(ctx, p.tok); - probs_for_token.push_back(json - { - {"tok_str", tok_str}, - {"prob", p.prob}, - }); - } - std::string tok_str = tokens_to_output_formatted_string(ctx, prob.tok); - out.push_back(json{ - {"content", tok_str}, - {"probs", probs_for_token}, - }); - } - return out; -} + std::vector antiprompt; -struct llama_client_slot -{ + json input_prefix; + json input_suffix; +}; + +struct slot_image { + int32_t id; + + bool request_encode_image = false; + float * image_embedding = nullptr; + int32_t image_tokens = 0; + + clip_image_u8 * img_data; + + std::string prefix_prompt; // before of this image +}; + +struct server_slot { int id; int task_id = -1; @@ -165,8 +111,8 @@ struct llama_client_slot int32_t i_batch = -1; int32_t n_predict = -1; - int32_t num_prompt_tokens = 0; - int32_t num_prompt_tokens_processed = 0; + int32_t n_prompt_tokens = 0; + int32_t n_prompt_tokens_processed = 0; json prompt; std::string generated_text; @@ -201,8 +147,8 @@ struct llama_client_slot std::vector images; // stats - size_t sent_count = 0; - size_t sent_token_probs_index = 0; + size_t n_sent_text = 0; // number of sent text character + size_t n_sent_token_probs = 0; int64_t t_start_process_prompt; int64_t t_start_genereration; @@ -214,7 +160,7 @@ struct llama_client_slot int multitask_id = -1; void reset() { - num_prompt_tokens = 0; + n_prompt_tokens = 0; generated_text = ""; truncated = false; stopped_eos = false; @@ -222,16 +168,15 @@ struct llama_client_slot stopped_limit = false; stopping_word = ""; n_past = 0; - sent_count = 0; - sent_token_probs_index = 0; + n_sent_text = 0; + n_sent_token_probs = 0; infill = false; ga_i = 0; n_past_se = 0; generated_token_probs.clear(); - for (slot_image & img : images) - { + for (slot_image & img : images) { free(img.image_embedding); if (img.img_data) { clip_image_u8_free(img.img_data); @@ -243,19 +188,15 @@ struct llama_client_slot } bool has_budget(gpt_params &global_params) { - if (params.n_predict == -1 && global_params.n_predict == -1) - { + if (params.n_predict == -1 && global_params.n_predict == -1) { return true; // limitless } n_remaining = -1; - if (params.n_predict != -1) - { + if (params.n_predict != -1) { n_remaining = params.n_predict - n_decoded; - } - else if (global_params.n_predict != -1) - { + } else if (global_params.n_predict != -1) { n_remaining = global_params.n_predict - n_decoded; } @@ -271,8 +212,7 @@ struct llama_client_slot } void add_token_string(const completion_token_output &token) { - if (command == RELEASE) - { + if (command == RELEASE) { return; } cache_tokens.push_back(token.tok); @@ -290,10 +230,10 @@ struct llama_client_slot json get_formated_timings() { return json { - {"prompt_n", num_prompt_tokens_processed}, + {"prompt_n", n_prompt_tokens_processed}, {"prompt_ms", t_prompt_processing}, - {"prompt_per_token_ms", t_prompt_processing / num_prompt_tokens_processed}, - {"prompt_per_second", 1e3 / t_prompt_processing * num_prompt_tokens_processed}, + {"prompt_per_token_ms", t_prompt_processing / n_prompt_tokens_processed}, + {"prompt_per_second", 1e3 / t_prompt_processing * n_prompt_tokens_processed}, {"predicted_n", n_decoded}, {"predicted_ms", t_token_generation}, @@ -304,18 +244,18 @@ struct llama_client_slot void print_timings() const { char buffer[512]; - double t_token = t_prompt_processing / num_prompt_tokens_processed; - double n_tokens_second = 1e3 / t_prompt_processing * num_prompt_tokens_processed; + double t_token = t_prompt_processing / n_prompt_tokens_processed; + double n_tokens_second = 1e3 / t_prompt_processing * n_prompt_tokens_processed; sprintf(buffer, "prompt eval time = %10.2f ms / %5d tokens (%8.2f ms per token, %8.2f tokens per second)", - t_prompt_processing, num_prompt_tokens_processed, + t_prompt_processing, n_prompt_tokens_processed, t_token, n_tokens_second); LOG_INFO(buffer, { - {"slot_id", id}, - {"task_id", task_id}, - {"t_prompt_processing", t_prompt_processing}, - {"num_prompt_tokens_processed", num_prompt_tokens_processed}, - {"t_token", t_token}, - {"n_tokens_second", n_tokens_second}, + {"slot_id", id}, + {"task_id", task_id}, + {"t_prompt_processing", t_prompt_processing}, + {"n_prompt_tokens_processed", n_prompt_tokens_processed}, + {"t_token", t_token}, + {"n_tokens_second", n_tokens_second}, }); t_token = t_token_generation / n_decoded; @@ -343,7 +283,7 @@ struct llama_client_slot } }; -struct llama_metrics { +struct server_metrics { uint64_t n_prompt_tokens_processed_total = 0; uint64_t n_tokens_predicted_total = 0; @@ -354,18 +294,16 @@ struct llama_metrics { uint64_t t_tokens_generation = 0; - void on_prompt_eval(const llama_client_slot &slot) { - n_prompt_tokens_processed_total += slot.num_prompt_tokens_processed; - - n_prompt_tokens_processed += slot.num_prompt_tokens_processed; - t_prompt_processing += slot.t_prompt_processing; + void on_prompt_eval(const server_slot &slot) { + n_prompt_tokens_processed_total += slot.n_prompt_tokens_processed; + n_prompt_tokens_processed += slot.n_prompt_tokens_processed; + t_prompt_processing += slot.t_prompt_processing; } - void on_prediction(const llama_client_slot &slot) { + void on_prediction(const server_slot &slot) { n_tokens_predicted_total += slot.n_decoded; - - n_tokens_predicted += slot.n_decoded; - t_tokens_generation += slot.t_token_generation; + n_tokens_predicted += slot.n_decoded; + t_tokens_generation += slot.t_token_generation; } void reset_bucket() { @@ -404,13 +342,13 @@ struct llama_server_context std::string name_assistant; // slots / clients - std::vector slots; + std::vector slots; json default_generation_settings_for_props; - llama_server_queue queue_tasks; + llama_server_queue queue_tasks; llama_server_response queue_results; - llama_metrics metrics; + server_metrics metrics; ~llama_server_context() { @@ -487,7 +425,7 @@ struct llama_server_context LOG_INFO("initializing slots", {{"n_slots", params.n_parallel}}); for (int i = 0; i < params.n_parallel; i++) { - llama_client_slot slot; + server_slot slot; slot.id = i; slot.n_ctx = n_ctx_slot; @@ -579,11 +517,11 @@ struct llama_server_context return prompt_tokens; } - llama_client_slot* get_slot(int id) { + server_slot* get_slot(int id) { int64_t t_last = ggml_time_us(); - llama_client_slot *last_used = nullptr; + server_slot *last_used = nullptr; - for (llama_client_slot & slot : slots) + for (server_slot & slot : slots) { if (slot.id == id && slot.available()) { @@ -600,7 +538,7 @@ struct llama_server_context return last_used; } - bool launch_slot_with_data(llama_client_slot* &slot, json data) { + bool launch_slot_with_data(server_slot* &slot, json data) { slot_params default_params; llama_sampling_params default_sparams; @@ -888,7 +826,7 @@ struct llama_server_context clean_kv_cache = false; } - void update_system_prompt() { + void system_prompt_update() { kv_cache_clear(); system_tokens.clear(); @@ -933,9 +871,9 @@ struct llama_server_context system_need_update = false; } - void notify_system_prompt_changed() { + void system_prompt_notify() { // release all slots - for (llama_client_slot &slot : slots) + for (server_slot &slot : slots) { slot.release(); } @@ -943,17 +881,17 @@ struct llama_server_context system_need_update = true; } - void process_system_prompt_data(const json &sys_props) { + void system_prompt_process(const json &sys_props) { system_prompt = sys_props.value("prompt", ""); name_user = sys_props.value("anti_prompt", ""); name_assistant = sys_props.value("assistant_name", ""); - notify_system_prompt_changed(); + system_prompt_notify(); } static size_t find_stopping_strings(const std::string &text, const size_t last_token_size, - const stop_type type, llama_client_slot &slot) + const stop_type type, server_slot &slot) { size_t stop_pos = std::string::npos; @@ -975,8 +913,8 @@ struct llama_server_context { if (type == STOP_FULL) { - slot.stopped_word = true; - slot.stopping_word = word; + slot.stopped_word = true; + slot.stopping_word = word; slot.has_next_token = false; } stop_pos = pos; @@ -986,7 +924,7 @@ struct llama_server_context return stop_pos; } - bool process_token(completion_token_output &result, llama_client_slot &slot) { + bool process_token(completion_token_output &result, server_slot &slot) { // remember which tokens were sampled - used for repetition penalties during sampling const std::string token_str = llama_token_to_piece(ctx, result.tok); slot.sampled = result.tok; @@ -1032,7 +970,7 @@ struct llama_server_context if (!incomplete) { - size_t pos = std::min(slot.sent_count, slot.generated_text.size()); + size_t pos = std::min(slot.n_sent_text, slot.generated_text.size()); const std::string str_test = slot.generated_text.substr(pos); bool is_stop_full = false; size_t stop_pos = find_stopping_strings(str_test, token_str.size(), STOP_FULL, slot); @@ -1042,7 +980,7 @@ struct llama_server_context slot.generated_text.erase( slot.generated_text.begin() + pos + stop_pos, slot.generated_text.end()); - pos = std::min(slot.sent_count, slot.generated_text.size()); + pos = std::min(slot.n_sent_text, slot.generated_text.size()); } else { @@ -1055,7 +993,7 @@ struct llama_server_context { // no send the stop word in the response result.text_to_send = slot.generated_text.substr(pos, std::string::npos); - slot.sent_count += result.text_to_send.size(); + slot.n_sent_text += result.text_to_send.size(); // add the token to slot queue and cache } slot.add_token_string(result); @@ -1099,7 +1037,7 @@ struct llama_server_context return slot.has_next_token; // continue } - bool process_images(llama_client_slot &slot) const + bool process_images(server_slot &slot) const { for (slot_image &img : slot.images) { @@ -1132,7 +1070,7 @@ struct llama_server_context queue_results.send(res); } - json get_formated_generation(llama_client_slot &slot) + json get_formated_generation(server_slot &slot) { const auto eos_bias = slot.sparams.logit_bias.find(llama_token_eos(model)); const bool ignore_eos = eos_bias != slot.sparams.logit_bias.end() && @@ -1179,7 +1117,7 @@ struct llama_server_context }; } - void send_partial_response(llama_client_slot &slot, completion_token_output tkn) + void send_partial_response(server_slot &slot, completion_token_output tkn) { task_result res; res.id = slot.task_id; @@ -1199,13 +1137,13 @@ struct llama_server_context { std::vector probs_output = {}; const std::vector to_send_toks = llama_tokenize(ctx, tkn.text_to_send, false); - size_t probs_pos = std::min(slot.sent_token_probs_index, slot.generated_token_probs.size()); - size_t probs_stop_pos = std::min(slot.sent_token_probs_index + to_send_toks.size(), slot.generated_token_probs.size()); + size_t probs_pos = std::min(slot.n_sent_token_probs, slot.generated_token_probs.size()); + size_t probs_stop_pos = std::min(slot.n_sent_token_probs + to_send_toks.size(), slot.generated_token_probs.size()); if (probs_pos < probs_stop_pos) { probs_output = std::vector(slot.generated_token_probs.begin() + probs_pos, slot.generated_token_probs.begin() + probs_stop_pos); } - slot.sent_token_probs_index = probs_stop_pos; + slot.n_sent_token_probs = probs_stop_pos; res.result_json["completion_probabilities"] = probs_vector_to_json(ctx, probs_output); } @@ -1218,7 +1156,7 @@ struct llama_server_context queue_results.send(res); } - void send_final_response(llama_client_slot &slot) + void send_final_response(server_slot &slot) { task_result res; res.id = slot.task_id; @@ -1233,7 +1171,7 @@ struct llama_server_context {"stop", true}, {"model", params.model_alias}, {"tokens_predicted", slot.n_decoded}, - {"tokens_evaluated", slot.num_prompt_tokens}, + {"tokens_evaluated", slot.n_prompt_tokens}, {"generation_settings", get_formated_generation(slot)}, {"prompt", slot.prompt}, {"truncated", slot.truncated}, @@ -1271,7 +1209,7 @@ struct llama_server_context queue_results.send(res); } - void send_embedding(llama_client_slot &slot) + void send_embedding(server_slot &slot) { task_result res; res.id = slot.task_id; @@ -1282,9 +1220,7 @@ struct llama_server_context const int n_embd = llama_n_embd(model); if (!params.embedding) { - LOG_WARNING("embedding disabled", { - {"params.embedding", params.embedding}, - }); + LOG_WARNING("embedding disabled", {{"params.embedding", params.embedding}}); res.result_json = json { {"embedding", std::vector(n_embd, 0.0f)}, @@ -1296,7 +1232,7 @@ struct llama_server_context std::vector embedding(data, data + n_embd); res.result_json = json { - {"embedding", embedding }, + {"embedding", embedding}, }; } queue_results.send(res); @@ -1345,7 +1281,7 @@ struct llama_server_context } // for multiple images processing - bool ingest_images(llama_client_slot &slot, int n_batch) + bool ingest_images(server_slot &slot, int n_batch) { int image_idx = 0; @@ -1384,7 +1320,17 @@ struct llama_server_context } const int n_embd = llama_n_embd(model); - llama_batch batch_img = { n_eval, nullptr, (img.image_embedding + i * n_embd), nullptr, nullptr, nullptr, nullptr, slot.n_past, 1, 0, }; + llama_batch batch_img = { + n_eval, + nullptr, + (img.image_embedding + i * n_embd), + nullptr, + nullptr, + nullptr, + nullptr, + slot.n_past, + 1, 0 + }; if (llama_decode(ctx, batch_img)) { LOG_TEE("%s : failed to eval image\n", __func__); @@ -1454,7 +1400,7 @@ struct llama_server_context switch (task.type) { case TASK_TYPE_COMPLETION: { - llama_client_slot *slot = get_slot(json_value(task.data, "slot_id", -1)); + server_slot *slot = get_slot(json_value(task.data, "slot_id", -1)); if (slot == nullptr) { // if no slot is available, we defer this task for processing later @@ -1469,10 +1415,10 @@ struct llama_server_context send_error(task, "system prompt can only be updated when all slots are idle"); break; } - process_system_prompt_data(task.data["system_prompt"]); + system_prompt_process(task.data["system_prompt"]); // reset cache_tokens for all slots - for (llama_client_slot &slot : slots) + for (server_slot &slot : slots) { slot.cache_tokens.clear(); slot.n_past = 0; @@ -1512,20 +1458,20 @@ struct llama_server_context int n_idle_slots = 0; int n_processing_slots = 0; - for (llama_client_slot &slot: slots) { + for (server_slot &slot: slots) { json slot_data = get_formated_generation(slot); slot_data["id"] = slot.id; slot_data["task_id"] = slot.task_id; slot_data["state"] = slot.state; slot_data["prompt"] = slot.prompt; slot_data["next_token"] = { - {"has_next_token", slot.has_next_token}, - {"n_remain", slot.n_remaining}, + {"has_next_token", slot.has_next_token}, + {"n_remain", slot.n_remaining}, {"num_tokens_predicted", slot.n_decoded}, - {"stopped_eos", slot.stopped_eos}, - {"stopped_word", slot.stopped_word}, - {"stopped_limit", slot.stopped_limit}, - {"stopping_word", slot.stopping_word}, + {"stopped_eos", slot.stopped_eos}, + {"stopped_word", slot.stopped_word}, + {"stopped_limit", slot.stopped_limit}, + {"stopping_word", slot.stopping_word}, }; if (slot_data["state"] == IDLE) { n_idle_slots++; @@ -1563,10 +1509,10 @@ struct llama_server_context { "n_tokens_predicted", metrics.n_tokens_predicted}, { "t_tokens_generation", metrics.t_tokens_generation}, - { "kv_cache_tokens_count", llama_get_kv_cache_token_count(ctx)}, - { "kv_cache_used_cells", llama_get_kv_cache_used_cells(ctx)}, + { "kv_cache_tokens_count", llama_get_kv_cache_token_count(ctx)}, + { "kv_cache_used_cells", llama_get_kv_cache_used_cells(ctx)}, - { "slots", slots_data }, + { "slots", slots_data }, }; metrics.reset_bucket(); queue_results.send(res); @@ -1597,7 +1543,7 @@ struct llama_server_context if (system_need_update) { LOG_INFO("updating system prompt", {}); - update_system_prompt(); + system_prompt_update(); } llama_batch_clear(batch); @@ -1618,7 +1564,7 @@ struct llama_server_context task.target_id = -1; queue_tasks.post(task); - for (llama_client_slot &slot : slots) + for (server_slot &slot : slots) { if (slot.ga_n == 1) { @@ -1754,45 +1700,50 @@ struct llama_server_context prompt_tokens = tokenize(slot.prompt, system_prompt.empty() && add_bos_token); // add BOS if there isn't system prompt } - slot.num_prompt_tokens = prompt_tokens.size(); + slot.n_prompt_tokens = prompt_tokens.size(); if (slot.params.n_keep < 0) { - slot.params.n_keep = slot.num_prompt_tokens; + slot.params.n_keep = slot.n_prompt_tokens; } slot.params.n_keep = std::min(slot.n_ctx - 4, slot.params.n_keep); // if input prompt is too big, truncate it - if (slot.num_prompt_tokens >= slot.n_ctx) + if (slot.n_prompt_tokens >= slot.n_ctx) { const int n_left = slot.n_ctx - slot.params.n_keep; const int n_block_size = n_left / 2; - const int erased_blocks = (slot.num_prompt_tokens - slot.params.n_keep - n_block_size) / n_block_size; + const int erased_blocks = (slot.n_prompt_tokens - slot.params.n_keep - n_block_size) / n_block_size; - std::vector new_tokens(prompt_tokens.begin(), prompt_tokens.begin() + slot.params.n_keep); - new_tokens.insert(new_tokens.end(), prompt_tokens.begin() + slot.params.n_keep + erased_blocks * n_block_size, prompt_tokens.end()); + std::vector new_tokens( + prompt_tokens.begin(), + prompt_tokens.begin() + slot.params.n_keep); + new_tokens.insert( + new_tokens.end(), + prompt_tokens.begin() + slot.params.n_keep + erased_blocks * n_block_size, + prompt_tokens.end()); LOG_VERBOSE("input truncated", { - {"n_ctx", slot.n_ctx}, - {"n_keep", slot.params.n_keep}, - {"n_left", n_left}, + {"n_ctx", slot.n_ctx}, + {"n_keep", slot.params.n_keep}, + {"n_left", n_left}, {"new_tokens", tokens_to_str(ctx, new_tokens.cbegin(), new_tokens.cend())}, }); slot.truncated = true; prompt_tokens = new_tokens; - slot.num_prompt_tokens = prompt_tokens.size(); - GGML_ASSERT(slot.num_prompt_tokens < slot.n_ctx); + slot.n_prompt_tokens = prompt_tokens.size(); + GGML_ASSERT(slot.n_prompt_tokens < slot.n_ctx); } if (!slot.params.cache_prompt) { llama_sampling_reset(slot.ctx_sampling); - slot.n_past = 0; + slot.n_past = 0; slot.n_past_se = 0; - slot.ga_i = 0; - slot.num_prompt_tokens_processed = slot.num_prompt_tokens; + slot.ga_i = 0; + slot.n_prompt_tokens_processed = slot.n_prompt_tokens; } else { @@ -1811,7 +1762,7 @@ struct llama_server_context slot.n_past -= 1; } - slot.num_prompt_tokens_processed = slot.num_prompt_tokens - slot.n_past; + slot.n_prompt_tokens_processed = slot.n_prompt_tokens - slot.n_past; if (slot.ga_n != 1) { @@ -1836,13 +1787,13 @@ struct llama_server_context { "slot_id", slot.id }, { "task_id", slot.task_id }, { "n_past", slot.n_past }, - { "num_prompt_tokens_processed", slot.num_prompt_tokens_processed } + { "n_prompt_tokens_processed", slot.n_prompt_tokens_processed } }); } slot.cache_tokens = prompt_tokens; - if (slot.n_past == slot.num_prompt_tokens && slot.n_past > 0) + if (slot.n_past == slot.n_prompt_tokens && slot.n_past > 0) { // we have to evaluate at least 1 token to generate logits. LOG_INFO("we have to evaluate at least 1 token to generate logits", { @@ -1898,8 +1849,8 @@ struct llama_server_context if (has_images && !ingest_images(slot, n_batch)) { LOG_ERROR("failed processing images", { - "slot_id", slot.id, - "task_id", slot.task_id, + {"slot_id", slot.id}, + {"task_id", slot.task_id}, }); // FIXME @phymbert: to be properly tested // early returning without changing the slot state will block the slot for ever @@ -2049,10 +2000,6 @@ struct llama_server_context LOG_VERBOSE("slots updated", {}); return true; } - - void run_on_all_tasks_finished() { - update_slots(); - } }; static void server_print_usage(const char *argv0, const gpt_params ¶ms, @@ -2561,7 +2508,7 @@ static void server_params_parse(int argc, char **argv, server_params &sparams, std::istreambuf_iterator(), std::back_inserter(systm_content) ); - llama.process_system_prompt_data(json::parse(systm_content)); + llama.system_prompt_process(json::parse(systm_content)); } else if (arg == "-ctk" || arg == "--cache-type-k") { params.cache_type_k = argv[++i]; @@ -2692,7 +2639,7 @@ static void server_params_parse(int argc, char **argv, server_params &sparams, /* llama.cpp completion api semantics */ static json format_partial_response( - llama_server_context &llama, llama_client_slot *slot, const std::string &content, const std::vector &probs + llama_server_context &llama, server_slot *slot, const std::string &content, const std::vector &probs ) { json res = json { @@ -2748,14 +2695,7 @@ static void log_server_request(const httplib::Request &req, const httplib::Respo }); } -struct token_translator -{ - llama_context * ctx; - std::string operator()(llama_token tok) const { return llama_token_to_piece(ctx, tok); } - std::string operator()(const completion_token_output &cto) const { return (*this)(cto.tok); } -}; - -static void append_to_generated_text_from_generated_token_probs(llama_server_context &llama, llama_client_slot *slot) +static void append_to_generated_text_from_generated_token_probs(llama_server_context &llama, server_slot *slot) { auto & gtps = slot->generated_token_probs; auto translator = token_translator{llama.ctx}; @@ -3526,8 +3466,8 @@ int main(int argc, char **argv) &llama_server_context::process_single_task, &llama, std::placeholders::_1)); llama.queue_tasks.on_finish_multitask(std::bind( &llama_server_context::on_finish_multitask, &llama, std::placeholders::_1)); - llama.queue_tasks.on_all_tasks_finished(std::bind( - &llama_server_context::run_on_all_tasks_finished, &llama)); + llama.queue_tasks.on_run_slots(std::bind( + &llama_server_context::update_slots, &llama)); llama.queue_results.on_multitask_update(std::bind( &llama_server_queue::update_multitask, &llama.queue_tasks, diff --git a/examples/server/utils.hpp b/examples/server/utils.hpp index d7abd7cbb..d98541f26 100644 --- a/examples/server/utils.hpp +++ b/examples/server/utils.hpp @@ -37,10 +37,6 @@ extern bool server_log_json; #define LOG_WARNING(MSG, ...) server_log("WARN", __func__, __LINE__, MSG, __VA_ARGS__) #define LOG_INFO( MSG, ...) server_log("INFO", __func__, __LINE__, MSG, __VA_ARGS__) -// -// parallel -// - enum server_state { SERVER_STATE_LOADING_MODEL, // Server is starting up, model not fully loaded yet SERVER_STATE_READY, // Server is ready and model is loaded @@ -78,51 +74,8 @@ struct task_multi { std::vector results{}; }; -// TODO: can become bool if we can't find use of more states -enum slot_state -{ - IDLE, - PROCESSING, -}; - -enum slot_command -{ - NONE, - LOAD_PROMPT, - RELEASE, -}; - -struct slot_params -{ - bool stream = true; - bool cache_prompt = false; // remember the prompt to avoid reprocessing all prompt - - uint32_t seed = -1; // RNG seed - int32_t n_keep = 0; // number of tokens to keep from initial prompt - int32_t n_predict = -1; // new tokens to predict - - std::vector antiprompt; - - json input_prefix; - json input_suffix; -}; - -struct slot_image -{ - int32_t id; - - bool request_encode_image = false; - float * image_embedding = nullptr; - int32_t image_tokens = 0; - - clip_image_u8 * img_data; - - std::string prefix_prompt; // before of this image -}; - // completion token output with probabilities -struct completion_token_output -{ +struct completion_token_output { struct token_prob { llama_token tok; @@ -134,8 +87,13 @@ struct completion_token_output std::string text_to_send; }; -static inline void server_log(const char *level, const char *function, int line, const char *message, const nlohmann::ordered_json &extra) -{ +struct token_translator { + llama_context * ctx; + std::string operator()(llama_token tok) const { return llama_token_to_piece(ctx, tok); } + std::string operator()(const completion_token_output &cto) const { return (*this)(cto.tok); } +}; + +static inline void server_log(const char *level, const char *function, int line, const char *message, const nlohmann::ordered_json &extra) { std::stringstream ss_tid; ss_tid << std::this_thread::get_id(); json log = nlohmann::ordered_json{ @@ -183,8 +141,7 @@ static inline void server_log(const char *level, const char *function, int line, // template -static T json_value(const json &body, const std::string &key, const T &default_value) -{ +static T json_value(const json &body, const std::string &key, const T &default_value) { // Fallback null to default value return body.contains(key) && !body.at(key).is_null() ? body.value(key, default_value) @@ -200,8 +157,7 @@ inline bool verify_custom_template(const std::string & tmpl) { } // Format given chat. If tmpl is empty, we take the template from model metadata -inline std::string format_chat(const struct llama_model * model, const std::string & tmpl, const std::vector & messages) -{ +inline std::string format_chat(const struct llama_model * model, const std::string & tmpl, const std::vector & messages) { size_t alloc_size = 0; // vector holding all allocated string to be passed to llama_chat_apply_template std::vector str(messages.size() * 2); @@ -250,7 +206,7 @@ struct llama_server_queue { // callback functions std::function callback_new_task; std::function callback_finish_multitask; - std::function callback_all_task_finished; + std::function callback_run_slots; // Add a new task to the end of the queue int post(task_server task) { @@ -283,14 +239,14 @@ struct llama_server_queue { callback_new_task = callback; } - // Register function to process a multitask + // Register function to process a multitask when it is finished void on_finish_multitask(std::function callback) { callback_finish_multitask = callback; } - // Register the function to be called when the batch of tasks is finished - void on_all_tasks_finished(std::function callback) { - callback_all_task_finished = callback; + // Register the function to be called when all slots data is ready to be processed + void on_run_slots(std::function callback) { + callback_run_slots = callback; } // Call when the state of one slot is changed @@ -312,7 +268,13 @@ struct llama_server_queue { condition_tasks.notify_all(); } - // Start the main loop. + /** + * Main loop consists of these steps: + * - Wait until a new task arrives + * - Process the task (i.e. maybe copy data into slot) + * - Check if multitask is finished + * - Run all slots + */ void start_loop() { running = true; while (true) { @@ -331,8 +293,8 @@ struct llama_server_queue { LOG_VERBOSE("callback_new_task", {{"task_id", task.id}}); callback_new_task(task); } - LOG_VERBOSE("callback_all_task_finished", {}); - // process and update all the multitasks + LOG_VERBOSE("update_multitasks", {}); + // check if we have any finished multitasks auto queue_iterator = queue_multitasks.begin(); while (queue_iterator != queue_multitasks.end()) { @@ -349,8 +311,9 @@ struct llama_server_queue { ++queue_iterator; } } - // all tasks in the current loop is finished - callback_all_task_finished(); + // all tasks in the current loop is processed, slots data is now ready + LOG_VERBOSE("callback_run_slots", {}); + callback_run_slots(); } LOG_VERBOSE("wait for new task", {}); // wait for new task @@ -408,12 +371,14 @@ struct llama_server_response { std::mutex mutex_results; std::condition_variable condition_results; + // add the task_id to the list of tasks waiting for response void add_waiting_task_id(int task_id) { LOG_VERBOSE("waiting for task id", {{"task_id", task_id}}); std::unique_lock lock(mutex_results); waiting_task_ids.insert(task_id); } + // when the request is finished, we can remove task associated with it void remove_waiting_task_id(int task_id) { LOG_VERBOSE("remove waiting for task id", {{"task_id", task_id}}); std::unique_lock lock(mutex_results); @@ -574,3 +539,96 @@ static std::string gen_chatcmplid() chatcmplid << "chatcmpl-" << random_string(); return chatcmplid.str(); } + +// +// other common utils +// + +static size_t common_part(const std::vector &a, const std::vector &b) +{ + size_t i; + for (i = 0; i < a.size() && i < b.size() && a[i] == b[i]; i++) + { + } + return i; +} + +static bool ends_with(const std::string &str, const std::string &suffix) +{ + return str.size() >= suffix.size() && + 0 == str.compare(str.size() - suffix.size(), suffix.size(), suffix); +} + +static size_t find_partial_stop_string(const std::string &stop, + const std::string &text) +{ + if (!text.empty() && !stop.empty()) + { + const char text_last_char = text.back(); + for (int64_t char_index = stop.size() - 1; char_index >= 0; char_index--) + { + if (stop[char_index] == text_last_char) + { + const std::string current_partial = stop.substr(0, char_index + 1); + if (ends_with(text, current_partial)) + { + return text.size() - char_index - 1; + } + } + } + } + return std::string::npos; +} + +// TODO: reuse llama_detokenize +template +static std::string tokens_to_str(llama_context *ctx, Iter begin, Iter end) +{ + std::string ret; + for (; begin != end; ++begin) + { + ret += llama_token_to_piece(ctx, *begin); + } + return ret; +} + +// format incomplete utf-8 multibyte character for output +static std::string tokens_to_output_formatted_string(const llama_context *ctx, const llama_token token) +{ + std::string out = token == -1 ? "" : llama_token_to_piece(ctx, token); + // if the size is 1 and first bit is 1, meaning it's a partial character + // (size > 1 meaning it's already a known token) + if (out.size() == 1 && (out[0] & 0x80) == 0x80) + { + std::stringstream ss; + ss << std::hex << (out[0] & 0xff); + std::string res(ss.str()); + out = "byte: \\x" + res; + } + return out; +} + +// convert a vector of completion_token_output to json +static json probs_vector_to_json(const llama_context *ctx, const std::vector &probs) +{ + json out = json::array(); + for (const auto &prob : probs) + { + json probs_for_token = json::array(); + for (const auto &p : prob.probs) + { + std::string tok_str = tokens_to_output_formatted_string(ctx, p.tok); + probs_for_token.push_back(json + { + {"tok_str", tok_str}, + {"prob", p.prob}, + }); + } + std::string tok_str = tokens_to_output_formatted_string(ctx, prob.tok); + out.push_back(json{ + {"content", tok_str}, + {"probs", probs_for_token}, + }); + } + return out; +} From 38d152160898b0173ffe4dc7df5daadcbd2eceb0 Mon Sep 17 00:00:00 2001 From: AidanBeltonS <87009434+AidanBeltonS@users.noreply.github.com> Date: Fri, 1 Mar 2024 07:36:47 +0000 Subject: [PATCH 743/811] [SYCL] Use batched mul_mat pathway (#5591) * Use batched mul_mat pathway * rm extra line * Explicitly state scaled data type --------- Co-authored-by: Abhilash Majumder <30946547+abhilash1910@users.noreply.github.com> --- ggml-sycl.cpp | 107 +++++++++++++++++++++----------------------------- 1 file changed, 44 insertions(+), 63 deletions(-) diff --git a/ggml-sycl.cpp b/ggml-sycl.cpp index a054ec8b9..6f391b0c6 100644 --- a/ggml-sycl.cpp +++ b/ggml-sycl.cpp @@ -12726,6 +12726,7 @@ static void ggml_sycl_op_mul_mat(const ggml_tensor *src0, GGML_ASSERT(dst->backend != GGML_BACKEND_TYPE_GPU_SPLIT); GGML_ASSERT(src1->backend != GGML_BACKEND_TYPE_GPU_SPLIT); + GGML_ASSERT(src1->type == GGML_TYPE_F32 || (src1->ne[2] == 1 && src1->ne[3] == 1)); GGML_ASSERT(ne12 >= ne02 && ne12 % ne02 == 0); @@ -13269,31 +13270,23 @@ static void k_compute_batched_ptrs(const sycl::half *src0_as_f16, int64_t i03 = i13 / r3; int64_t i02 = i12 / r2; - ptrs_src[0*ne23 + i12 + i13*ne12] = (const char *) src0_as_f16 + i02*nb02 + i03*nb03; - ptrs_src[1*ne23 + i12 + i13*ne12] = (const char *) src1_as_f16 + i12*nb12/2 + i13*nb13/2; - ptrs_dst[0*ne23 + i12 + i13*ne12] = ( char *) dst + i12*nbd2 + i13*nbd3; + ptrs_src[0*ne23 + i12 + i13*ne12] = (const char *) src0_as_f16 + i02*nb02 + i03*nb03; + ptrs_src[1*ne23 + i12 + i13*ne12] = (const char *) src1_as_f16 + i12*nb12 + i13*nb13; + ptrs_dst[0*ne23 + i12 + i13*ne12] = ( char *) dst + i12*nbd2 + i13*nbd3; } -static void ggml_sycl_mul_mat_mat_batched_sycl(const ggml_tensor *src0, - const ggml_tensor *src1, - ggml_tensor *dst) try { +static void ggml_sycl_mul_mat_batched_sycl(const ggml_tensor *src0, + const ggml_tensor *src1, + ggml_tensor *dst) try { GGML_ASSERT(!ggml_is_transposed(src0)); GGML_ASSERT(!ggml_is_transposed(src1)); GGML_ASSERT(src0->backend != GGML_BACKEND_TYPE_GPU_SPLIT); GGML_ASSERT(src0->type == GGML_TYPE_F16); - GGML_ASSERT(src1->type == GGML_TYPE_F32); - GGML_TENSOR_LOCALS(int64_t, ne0, src0, ne); + GGML_TENSOR_BINARY_OP_LOCALS - GGML_TENSOR_LOCALS(int64_t, nb0, src0, nb); - - GGML_TENSOR_LOCALS(int64_t, ne1, src1, ne); - - GGML_TENSOR_LOCALS(int64_t, nb1, src1, nb); - - const int64_t ne1 = ggml_nelements(src1); - const int64_t ne = ggml_nelements(dst); + const int64_t ne_dst = ggml_nelements(dst); SYCL_CHECK(ggml_sycl_set_device(g_main_device)); dpct::queue_ptr main_stream = g_syclStreams[g_main_device_index][0]; @@ -13312,11 +13305,16 @@ static void ggml_sycl_mul_mat_mat_batched_sycl(const ggml_tensor *src0, float * dst_ddf = (float *) dst_extra->data_device[g_main_device_index]; // convert src1 to fp16 - const to_fp16_sycl_t to_fp16_sycl = ggml_get_to_fp16_sycl(src1->type); - GGML_ASSERT(to_fp16_sycl != nullptr); - - sycl_pool_alloc src1_as_f16(ne1); - to_fp16_sycl(src1_ddf, src1_as_f16.get(), ne1, main_stream); + sycl_pool_alloc src1_f16_alloc; + if (src1->type != GGML_TYPE_F16) { + const to_fp16_sycl_t to_fp16_sycl = ggml_get_to_fp16_sycl(src1->type); + const int64_t ne_src1 = ggml_nelements(src1); + src1_f16_alloc.alloc(ne_src1); + GGML_ASSERT(to_fp16_sycl != nullptr); + to_fp16_sycl(src1_ddf, src1_f16_alloc.get(), ne_src1, main_stream); + } + sycl::half *src1_f16 = src1->type == GGML_TYPE_F16 ? (sycl::half *)src1_ddf + : src1_f16_alloc.get(); sycl_pool_alloc dst_f16; char * dst_t; @@ -13337,20 +13335,12 @@ static void ggml_sycl_mul_mat_mat_batched_sycl(const ggml_tensor *src0, const void * alpha = &alpha_f16; const void * beta = &beta_f16; - if (dst->op_params[0] == GGML_PREC_DEFAULT) { - dst_t = (char *) dst_f16.alloc(ne); + // TODO: Renable (dst->op_params[0] =! GGML_PREC_DEFAULT) pathway + // once oneMKL open source supports half, half, float, float: datatypes + dst_t = (char *) dst_f16.alloc(ne_dst); - nbd2 /= sizeof(float) / sizeof(sycl::half); - nbd3 /= sizeof(float) / sizeof(sycl::half); - } else { - dst_t = (char *) dst_ddf; - - cu_compute_type = dpct::library_data_t::real_float; - cu_data_type = dpct::library_data_t::real_float; - - alpha = &alpha_f32; - beta = &beta_f32; - } + nbd2 /= sizeof(float) / sizeof(sycl::half); + nbd3 /= sizeof(float) / sizeof(sycl::half); GGML_ASSERT(ne12 % ne02 == 0); GGML_ASSERT(ne13 % ne03 == 0); @@ -13386,10 +13376,10 @@ static void ggml_sycl_mul_mat_mat_batched_sycl(const ggml_tensor *src0, *g_sycl_handles[g_main_device_index], oneapi::mkl::transpose::trans, oneapi::mkl::transpose::nontrans, ne01, ne11, ne10, alpha, (const char *)src0_as_f16, dpct::library_data_t::real_half, - nb01 / sizeof(sycl::half), src0->nb[2] / sizeof(sycl::half), - (const char *)src1_as_f16.get(), dpct::library_data_t::real_half, - nb11 / sizeof(float), src1->nb[2] / sizeof(float), beta, - (char *)dst_t, cu_data_type, ne01, dst->nb[2] / sizeof(float), + nb01 / nb00, nb02 / nb00, + (const char *)src1_f16, dpct::library_data_t::real_half, + nb11 / nb10, nb12 / nb10, beta, + (char *)dst_t, cu_data_type, ne01, nb2 / nb0, ne12 * ne13, cu_compute_type))); } else { // use syclGemmBatchedEx @@ -13409,44 +13399,35 @@ static void ggml_sycl_mul_mat_mat_batched_sycl(const ggml_tensor *src0, {sycl::aspect::fp16}); main_stream->submit([&](sycl::handler &cgh) { - const sycl::half *src1_as_f16_get_ct1 = src1_as_f16.get(); - const void **ptrs_src_get_ct3 = ptrs_src.get(); - void **ptrs_dst_get_ct4 = ptrs_dst.get(); - + const void **ptrs_src_get = ptrs_src.get(); + void **ptrs_dst_get = ptrs_dst.get(); + size_t nb12_scaled = src1->type == GGML_TYPE_F16 ? nb12 : nb12 / 2; + size_t nb13_scaled = src1->type == GGML_TYPE_F16 ? nb13 : nb13 / 2; cgh.parallel_for(sycl::nd_range<3>(block_dims, block_dims), [=](sycl::nd_item<3> item_ct1) { k_compute_batched_ptrs( - src0_as_f16, src1_as_f16_get_ct1, - dst_t, ptrs_src_get_ct3, - ptrs_dst_get_ct4, ne12, ne13, ne23, - nb02, nb03, nb12, nb13, nbd2, nbd3, r2, - r3, item_ct1); + src0_as_f16, src1_f16, + dst_t, ptrs_src_get, + ptrs_dst_get, ne12, ne13, ne23, + nb02, nb03, nb12_scaled, nb13_scaled, + nbd2, nbd3, r2, r3, item_ct1); }); }); } - /* - DPCT1010:95: SYCL uses exceptions to report errors and does not use the - error codes. The call was replaced with 0. You need to rewrite this - code. - */ - SYCL_CHECK(0); - SYCL_CHECK(CHECK_TRY_ERROR(dpct::gemm_batch( *g_sycl_handles[g_main_device_index], oneapi::mkl::transpose::trans, oneapi::mkl::transpose::nontrans, ne01, ne11, ne10, alpha, (const void **)(ptrs_src.get() + 0 * ne23), - dpct::library_data_t::real_half, nb01 / sizeof(sycl::half), + dpct::library_data_t::real_half, nb01 / nb00, (const void **)(ptrs_src.get() + 1 * ne23), - dpct::library_data_t::real_half, nb11 / sizeof(float), beta, + dpct::library_data_t::real_half, nb11 / nb10, beta, (void **)(ptrs_dst.get() + 0 * ne23), cu_data_type, ne01, ne23, cu_compute_type))); } #endif - if (dst->op_params[0] == GGML_PREC_DEFAULT) { - const to_fp32_sycl_t to_fp32_sycl = ggml_get_to_fp32_sycl(GGML_TYPE_F16); - to_fp32_sycl(dst_f16.get(), dst_ddf, ne, main_stream); - } + const to_fp32_sycl_t to_fp32_sycl = ggml_get_to_fp32_sycl(GGML_TYPE_F16); + to_fp32_sycl(dst_f16.get(), dst_ddf, ne_dst, main_stream); } catch (sycl::exception const &exc) { std::cerr << exc.what() << "Exception caught at file:" << __FILE__ @@ -13491,10 +13472,10 @@ static void ggml_sycl_mul_mat(const ggml_tensor * src0, const ggml_tensor * src1 // KQV single-batch // GGML_SYCL_DEBUG("ggml_sycl_mul_mat_vec_nc\n"); ggml_sycl_mul_mat_vec_nc(src0, src1, dst); - } else if (!split && all_on_device && use_xmx && src0->type == GGML_TYPE_F16 && src1->type == GGML_TYPE_F32 && !ggml_is_transposed(src0) && !ggml_is_transposed(src1)) { + } else if (!split && all_on_device && use_xmx && src0->type == GGML_TYPE_F16 && !ggml_is_transposed(src0) && !ggml_is_transposed(src1)) { // KQ + KQV multi-batch - // GGML_SYCL_DEBUG("ggml_sycl_mul_mat_mat_batched_sycl\n"); - ggml_sycl_mul_mat_mat_batched_sycl(src0, src1, dst); + // GGML_SYCL_DEBUG("ggml_sycl_mul_mat_batched_sycl\n"); + ggml_sycl_mul_mat_batched_sycl(src0, src1, dst); } else if (src0->type == GGML_TYPE_F32) { // GGML_SYCL_DEBUG("ggml_sycl_op_mul_mat\n"); ggml_sycl_op_mul_mat(src0, src1, dst, ggml_sycl_op_mul_mat_sycl, false); From f105471ef6aa4727afac8240da398590d7277f45 Mon Sep 17 00:00:00 2001 From: Georgi Gerganov Date: Fri, 1 Mar 2024 09:59:43 +0200 Subject: [PATCH 744/811] server : fix newlines in help (#5785) --- examples/server/server.cpp | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/examples/server/server.cpp b/examples/server/server.cpp index bf20e0cf1..45c4aec4d 100644 --- a/examples/server/server.cpp +++ b/examples/server/server.cpp @@ -2080,8 +2080,8 @@ static void server_print_usage(const char *argv0, const gpt_params ¶ms, printf(" --override-kv KEY=TYPE:VALUE\n"); printf(" advanced option to override model metadata by key. may be specified multiple times.\n"); printf(" types: int, float, bool. example: --override-kv tokenizer.ggml.add_bos_token=bool:false\n"); - printf(" -gan N, --grp-attn-n N set the group attention factor to extend context size through self-extend(default: 1=disabled), used together with group attention width `--grp-attn-w`"); - printf(" -gaw N, --grp-attn-w N set the group attention width to extend context size through self-extend(default: 512), used together with group attention factor `--grp-attn-n`"); + printf(" -gan N, --grp-attn-n N set the group attention factor to extend context size through self-extend(default: 1=disabled), used together with group attention width `--grp-attn-w`\n"); + printf(" -gaw N, --grp-attn-w N set the group attention width to extend context size through self-extend(default: 512), used together with group attention factor `--grp-attn-n`\n"); printf(" --chat-template JINJA_TEMPLATE\n"); printf(" set custom jinja chat template (default: template taken from model's metadata)\n"); printf(" Note: only commonly used templates are accepted, since we don't have jinja parser\n"); From 6ea0f010ff6967034528d9e0b8330b9b0f0b7c13 Mon Sep 17 00:00:00 2001 From: Eve <139727413+netrunnereve@users.noreply.github.com> Date: Fri, 1 Mar 2024 08:54:53 +0000 Subject: [PATCH 745/811] ci : add Ubuntu 22 Vulkan CI run (#5789) --- .github/workflows/build.yml | 22 ++++++++++++++++++++++ 1 file changed, 22 insertions(+) diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index 66ad85938..9144f9266 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -145,6 +145,28 @@ jobs: cd build ctest -L main --verbose + ubuntu-22-cmake-vulkan: + runs-on: ubuntu-22.04 + + steps: + - name: Clone + id: checkout + uses: actions/checkout@v3 + + - name: Dependencies + id: depends + run: | + sudo apt-get update + sudo apt-get install build-essential libvulkan-dev + + - name: Build + id: cmake_build + run: | + mkdir build + cd build + cmake -DLLAMA_VULKAN=ON .. + cmake --build . --config Release -j $(nproc) + ubuntu-22-cmake-sycl: runs-on: ubuntu-22.04 From 5cb02b4a012bb16c6c699c0c62c05ffa653eee0f Mon Sep 17 00:00:00 2001 From: Pierrick Hymbert Date: Fri, 1 Mar 2024 10:08:08 +0100 Subject: [PATCH 746/811] server: allow to override threads server pool with --threads-http (#5794) --- examples/server/README.md | 1 + examples/server/server.cpp | 16 ++++++++++++++++ 2 files changed, 17 insertions(+) diff --git a/examples/server/README.md b/examples/server/README.md index 0e9bd7fd4..ad35306c6 100644 --- a/examples/server/README.md +++ b/examples/server/README.md @@ -18,6 +18,7 @@ The project is under active development, and we are [looking for feedback and co - `--threads N`, `-t N`: Set the number of threads to use during generation. - `-tb N, --threads-batch N`: Set the number of threads to use during batch and prompt processing. If not specified, the number of threads will be set to the number of threads used for generation. +- `--threads-http N`: number of threads in the http server pool to process requests (default: `std::thread::hardware_concurrency()`) - `-m FNAME`, `--model FNAME`: Specify the path to the LLaMA model file (e.g., `models/7B/ggml-model.gguf`). - `-a ALIAS`, `--alias ALIAS`: Set an alias for the model. The alias will be returned in API responses. - `-c N`, `--ctx-size N`: Set the size of the prompt context. The default is 512, but LLaMA models were built with a context of 2048, which will provide better results for longer input/inference. The size may differ in other models, for example, baichuan models were build with a context of 4096. diff --git a/examples/server/server.cpp b/examples/server/server.cpp index 45c4aec4d..eea987966 100644 --- a/examples/server/server.cpp +++ b/examples/server/server.cpp @@ -43,6 +43,7 @@ struct server_params { int32_t write_timeout = 600; bool slots_endpoint = true; bool metrics_endpoint = false; + int n_threads_http = -1; }; bool server_verbose = false; @@ -2012,6 +2013,7 @@ static void server_print_usage(const char *argv0, const gpt_params ¶ms, printf(" -v, --verbose verbose output (default: %s)\n", server_verbose ? "enabled" : "disabled"); printf(" -t N, --threads N number of threads to use during computation (default: %d)\n", params.n_threads); printf(" -tb N, --threads-batch N number of threads to use during batch and prompt processing (default: same as --threads)\n"); + printf(" --threads-http N number of threads in the http server pool to process requests (default: hardware concurrency)\n"); printf(" -c N, --ctx-size N size of the prompt context (default: %d)\n", params.n_ctx); printf(" --rope-scaling {none,linear,yarn}\n"); printf(" RoPE frequency scaling method, defaults to linear unless specified by the model\n"); @@ -2298,6 +2300,15 @@ static void server_params_parse(int argc, char **argv, server_params &sparams, } params.n_threads_batch = std::stoi(argv[i]); } + else if (arg == "--threads-http") + { + if (++i >= argc) + { + invalid_param = true; + break; + } + sparams.n_threads_http = std::stoi(argv[i]); + } else if (arg == "-b" || arg == "--batch-size") { if (++i >= argc) @@ -3449,6 +3460,11 @@ int main(int argc, char **argv) }*/ //); + if (sparams.n_threads_http > 0) { + log_data["n_threads_http"] = std::to_string(sparams.n_threads_http); + svr.new_task_queue = [&sparams] { return new httplib::ThreadPool(sparams.n_threads_http); }; + } + LOG_INFO("HTTP server listening", log_data); // run the HTTP server in a thread - see comment below std::thread t([&]() From 9600d59e010c18f5872580a21734ea1bf1968d04 Mon Sep 17 00:00:00 2001 From: Douglas Hanley Date: Fri, 1 Mar 2024 03:15:36 -0600 Subject: [PATCH 747/811] unicode : switch to multimap based nfd_map (#5799) * switch to multimap based nfd_map due to compile time issues * simplify multimap keys * dont construct new locale every time --- llama.cpp | 11 +- unicode.h | 566 +++++++++++++++++++++++++++++------------------------- 2 files changed, 312 insertions(+), 265 deletions(-) diff --git a/llama.cpp b/llama.cpp index 62699ce52..a35f07aa4 100644 --- a/llama.cpp +++ b/llama.cpp @@ -8947,10 +8947,10 @@ struct llm_tokenizer_wpm { std::vector codepoints = codepoints_from_utf8(text); std::vector nfd_codepoints; for (uint32_t code : codepoints) { - auto it = nfd_map.find(code); - if (it != nfd_map.end()) { - for (uint32_t c : it->second) { - nfd_codepoints.push_back(c); + auto it = nfd_map.equal_range(code); + if (it.first != it.second) { + for (auto jt = it.first; jt != it.second; jt++) { + nfd_codepoints.push_back(jt->second); } } else { nfd_codepoints.push_back(code); @@ -9001,12 +9001,13 @@ struct llm_tokenizer_wpm { } uint32_t to_lower(uint32_t code) { + static const std::locale locale("en_US.UTF-8"); #if defined(_WIN32) if (code > 0xFFFF) { return code; } #endif - return std::tolower(wchar_t(code), std::locale("en_US.UTF-8")); + return std::tolower(wchar_t(code), locale); } bool is_ascii_punct(uint32_t code) { diff --git a/unicode.h b/unicode.h index 620e2b580..f6be4549b 100644 --- a/unicode.h +++ b/unicode.h @@ -1,6 +1,7 @@ #pragma once #include +#include #include #include #include @@ -223,266 +224,311 @@ static const std::vector> control_ranges = { {0x2B81E, 0x2B81F}, {0x2CEA2, 0x2CEAF}, {0x2EBE1, 0x2F7FF}, {0x2FA1E, 0x2FFFF}, {0x3134B, 0xE00FF}, {0xE01F0, 0x10FFFF}, }; -static const std::unordered_map> nfd_map = { -{0xC0, {0x41, 0x300}}, {0xC1, {0x41, 0x301}}, {0xC2, {0x41, 0x302}}, {0xC3, {0x41, 0x303}}, {0xC4, {0x41, 0x308}}, {0xC5, {0x41, 0x30A}}, {0xC7, {0x43, 0x327}}, {0xC8, {0x45, 0x300}}, -{0xC9, {0x45, 0x301}}, {0xCA, {0x45, 0x302}}, {0xCB, {0x45, 0x308}}, {0xCC, {0x49, 0x300}}, {0xCD, {0x49, 0x301}}, {0xCE, {0x49, 0x302}}, {0xCF, {0x49, 0x308}}, {0xD1, {0x4E, 0x303}}, -{0xD2, {0x4F, 0x300}}, {0xD3, {0x4F, 0x301}}, {0xD4, {0x4F, 0x302}}, {0xD5, {0x4F, 0x303}}, {0xD6, {0x4F, 0x308}}, {0xD9, {0x55, 0x300}}, {0xDA, {0x55, 0x301}}, {0xDB, {0x55, 0x302}}, -{0xDC, {0x55, 0x308}}, {0xDD, {0x59, 0x301}}, {0xE0, {0x61, 0x300}}, {0xE1, {0x61, 0x301}}, {0xE2, {0x61, 0x302}}, {0xE3, {0x61, 0x303}}, {0xE4, {0x61, 0x308}}, {0xE5, {0x61, 0x30A}}, -{0xE7, {0x63, 0x327}}, {0xE8, {0x65, 0x300}}, {0xE9, {0x65, 0x301}}, {0xEA, {0x65, 0x302}}, {0xEB, {0x65, 0x308}}, {0xEC, {0x69, 0x300}}, {0xED, {0x69, 0x301}}, {0xEE, {0x69, 0x302}}, -{0xEF, {0x69, 0x308}}, {0xF1, {0x6E, 0x303}}, {0xF2, {0x6F, 0x300}}, {0xF3, {0x6F, 0x301}}, {0xF4, {0x6F, 0x302}}, {0xF5, {0x6F, 0x303}}, {0xF6, {0x6F, 0x308}}, {0xF9, {0x75, 0x300}}, -{0xFA, {0x75, 0x301}}, {0xFB, {0x75, 0x302}}, {0xFC, {0x75, 0x308}}, {0xFD, {0x79, 0x301}}, {0xFF, {0x79, 0x308}}, {0x100, {0x41, 0x304}}, {0x101, {0x61, 0x304}}, {0x102, {0x41, 0x306}}, -{0x103, {0x61, 0x306}}, {0x104, {0x41, 0x328}}, {0x105, {0x61, 0x328}}, {0x106, {0x43, 0x301}}, {0x107, {0x63, 0x301}}, {0x108, {0x43, 0x302}}, {0x109, {0x63, 0x302}}, {0x10A, {0x43, 0x307}}, -{0x10B, {0x63, 0x307}}, {0x10C, {0x43, 0x30C}}, {0x10D, {0x63, 0x30C}}, {0x10E, {0x44, 0x30C}}, {0x10F, {0x64, 0x30C}}, {0x112, {0x45, 0x304}}, {0x113, {0x65, 0x304}}, {0x114, {0x45, 0x306}}, -{0x115, {0x65, 0x306}}, {0x116, {0x45, 0x307}}, {0x117, {0x65, 0x307}}, {0x118, {0x45, 0x328}}, {0x119, {0x65, 0x328}}, {0x11A, {0x45, 0x30C}}, {0x11B, {0x65, 0x30C}}, {0x11C, {0x47, 0x302}}, -{0x11D, {0x67, 0x302}}, {0x11E, {0x47, 0x306}}, {0x11F, {0x67, 0x306}}, {0x120, {0x47, 0x307}}, {0x121, {0x67, 0x307}}, {0x122, {0x47, 0x327}}, {0x123, {0x67, 0x327}}, {0x124, {0x48, 0x302}}, -{0x125, {0x68, 0x302}}, {0x128, {0x49, 0x303}}, {0x129, {0x69, 0x303}}, {0x12A, {0x49, 0x304}}, {0x12B, {0x69, 0x304}}, {0x12C, {0x49, 0x306}}, {0x12D, {0x69, 0x306}}, {0x12E, {0x49, 0x328}}, -{0x12F, {0x69, 0x328}}, {0x130, {0x49, 0x307}}, {0x134, {0x4A, 0x302}}, {0x135, {0x6A, 0x302}}, {0x136, {0x4B, 0x327}}, {0x137, {0x6B, 0x327}}, {0x139, {0x4C, 0x301}}, {0x13A, {0x6C, 0x301}}, -{0x13B, {0x4C, 0x327}}, {0x13C, {0x6C, 0x327}}, {0x13D, {0x4C, 0x30C}}, {0x13E, {0x6C, 0x30C}}, {0x143, {0x4E, 0x301}}, {0x144, {0x6E, 0x301}}, {0x145, {0x4E, 0x327}}, {0x146, {0x6E, 0x327}}, -{0x147, {0x4E, 0x30C}}, {0x148, {0x6E, 0x30C}}, {0x14C, {0x4F, 0x304}}, {0x14D, {0x6F, 0x304}}, {0x14E, {0x4F, 0x306}}, {0x14F, {0x6F, 0x306}}, {0x150, {0x4F, 0x30B}}, {0x151, {0x6F, 0x30B}}, -{0x154, {0x52, 0x301}}, {0x155, {0x72, 0x301}}, {0x156, {0x52, 0x327}}, {0x157, {0x72, 0x327}}, {0x158, {0x52, 0x30C}}, {0x159, {0x72, 0x30C}}, {0x15A, {0x53, 0x301}}, {0x15B, {0x73, 0x301}}, -{0x15C, {0x53, 0x302}}, {0x15D, {0x73, 0x302}}, {0x15E, {0x53, 0x327}}, {0x15F, {0x73, 0x327}}, {0x160, {0x53, 0x30C}}, {0x161, {0x73, 0x30C}}, {0x162, {0x54, 0x327}}, {0x163, {0x74, 0x327}}, -{0x164, {0x54, 0x30C}}, {0x165, {0x74, 0x30C}}, {0x168, {0x55, 0x303}}, {0x169, {0x75, 0x303}}, {0x16A, {0x55, 0x304}}, {0x16B, {0x75, 0x304}}, {0x16C, {0x55, 0x306}}, {0x16D, {0x75, 0x306}}, -{0x16E, {0x55, 0x30A}}, {0x16F, {0x75, 0x30A}}, {0x170, {0x55, 0x30B}}, {0x171, {0x75, 0x30B}}, {0x172, {0x55, 0x328}}, {0x173, {0x75, 0x328}}, {0x174, {0x57, 0x302}}, {0x175, {0x77, 0x302}}, -{0x176, {0x59, 0x302}}, {0x177, {0x79, 0x302}}, {0x178, {0x59, 0x308}}, {0x179, {0x5A, 0x301}}, {0x17A, {0x7A, 0x301}}, {0x17B, {0x5A, 0x307}}, {0x17C, {0x7A, 0x307}}, {0x17D, {0x5A, 0x30C}}, -{0x17E, {0x7A, 0x30C}}, {0x1A0, {0x4F, 0x31B}}, {0x1A1, {0x6F, 0x31B}}, {0x1AF, {0x55, 0x31B}}, {0x1B0, {0x75, 0x31B}}, {0x1CD, {0x41, 0x30C}}, {0x1CE, {0x61, 0x30C}}, {0x1CF, {0x49, 0x30C}}, -{0x1D0, {0x69, 0x30C}}, {0x1D1, {0x4F, 0x30C}}, {0x1D2, {0x6F, 0x30C}}, {0x1D3, {0x55, 0x30C}}, {0x1D4, {0x75, 0x30C}}, {0x1D5, {0x55, 0x308, 0x304}}, {0x1D6, {0x75, 0x308, 0x304}}, -{0x1D7, {0x55, 0x308, 0x301}}, {0x1D8, {0x75, 0x308, 0x301}}, {0x1D9, {0x55, 0x308, 0x30C}}, {0x1DA, {0x75, 0x308, 0x30C}}, {0x1DB, {0x55, 0x308, 0x300}}, {0x1DC, {0x75, 0x308, 0x300}}, -{0x1DE, {0x41, 0x308, 0x304}}, {0x1DF, {0x61, 0x308, 0x304}}, {0x1E0, {0x41, 0x307, 0x304}}, {0x1E1, {0x61, 0x307, 0x304}}, {0x1E2, {0xC6, 0x304}}, {0x1E3, {0xE6, 0x304}}, {0x1E6, {0x47, 0x30C}}, -{0x1E7, {0x67, 0x30C}}, {0x1E8, {0x4B, 0x30C}}, {0x1E9, {0x6B, 0x30C}}, {0x1EA, {0x4F, 0x328}}, {0x1EB, {0x6F, 0x328}}, {0x1EC, {0x4F, 0x328, 0x304}}, {0x1ED, {0x6F, 0x328, 0x304}}, -{0x1EE, {0x1B7, 0x30C}}, {0x1EF, {0x292, 0x30C}}, {0x1F0, {0x6A, 0x30C}}, {0x1F4, {0x47, 0x301}}, {0x1F5, {0x67, 0x301}}, {0x1F8, {0x4E, 0x300}}, {0x1F9, {0x6E, 0x300}}, {0x1FA, {0x41, 0x30A, 0x301}}, -{0x1FB, {0x61, 0x30A, 0x301}}, {0x1FC, {0xC6, 0x301}}, {0x1FD, {0xE6, 0x301}}, {0x1FE, {0xD8, 0x301}}, {0x1FF, {0xF8, 0x301}}, {0x200, {0x41, 0x30F}}, {0x201, {0x61, 0x30F}}, {0x202, {0x41, 0x311}}, -{0x203, {0x61, 0x311}}, {0x204, {0x45, 0x30F}}, {0x205, {0x65, 0x30F}}, {0x206, {0x45, 0x311}}, {0x207, {0x65, 0x311}}, {0x208, {0x49, 0x30F}}, {0x209, {0x69, 0x30F}}, {0x20A, {0x49, 0x311}}, -{0x20B, {0x69, 0x311}}, {0x20C, {0x4F, 0x30F}}, {0x20D, {0x6F, 0x30F}}, {0x20E, {0x4F, 0x311}}, {0x20F, {0x6F, 0x311}}, {0x210, {0x52, 0x30F}}, {0x211, {0x72, 0x30F}}, {0x212, {0x52, 0x311}}, -{0x213, {0x72, 0x311}}, {0x214, {0x55, 0x30F}}, {0x215, {0x75, 0x30F}}, {0x216, {0x55, 0x311}}, {0x217, {0x75, 0x311}}, {0x218, {0x53, 0x326}}, {0x219, {0x73, 0x326}}, {0x21A, {0x54, 0x326}}, -{0x21B, {0x74, 0x326}}, {0x21E, {0x48, 0x30C}}, {0x21F, {0x68, 0x30C}}, {0x226, {0x41, 0x307}}, {0x227, {0x61, 0x307}}, {0x228, {0x45, 0x327}}, {0x229, {0x65, 0x327}}, {0x22A, {0x4F, 0x308, 0x304}}, -{0x22B, {0x6F, 0x308, 0x304}}, {0x22C, {0x4F, 0x303, 0x304}}, {0x22D, {0x6F, 0x303, 0x304}}, {0x22E, {0x4F, 0x307}}, {0x22F, {0x6F, 0x307}}, {0x230, {0x4F, 0x307, 0x304}}, -{0x231, {0x6F, 0x307, 0x304}}, {0x232, {0x59, 0x304}}, {0x233, {0x79, 0x304}}, {0x340, {0x300}}, {0x341, {0x301}}, {0x343, {0x313}}, {0x344, {0x308, 0x301}}, {0x374, {0x2B9}}, {0x37E, {0x3B}}, -{0x385, {0xA8, 0x301}}, {0x386, {0x391, 0x301}}, {0x387, {0xB7}}, {0x388, {0x395, 0x301}}, {0x389, {0x397, 0x301}}, {0x38A, {0x399, 0x301}}, {0x38C, {0x39F, 0x301}}, {0x38E, {0x3A5, 0x301}}, -{0x38F, {0x3A9, 0x301}}, {0x390, {0x3B9, 0x308, 0x301}}, {0x3AA, {0x399, 0x308}}, {0x3AB, {0x3A5, 0x308}}, {0x3AC, {0x3B1, 0x301}}, {0x3AD, {0x3B5, 0x301}}, {0x3AE, {0x3B7, 0x301}}, -{0x3AF, {0x3B9, 0x301}}, {0x3B0, {0x3C5, 0x308, 0x301}}, {0x3CA, {0x3B9, 0x308}}, {0x3CB, {0x3C5, 0x308}}, {0x3CC, {0x3BF, 0x301}}, {0x3CD, {0x3C5, 0x301}}, {0x3CE, {0x3C9, 0x301}}, -{0x3D3, {0x3D2, 0x301}}, {0x3D4, {0x3D2, 0x308}}, {0x400, {0x415, 0x300}}, {0x401, {0x415, 0x308}}, {0x403, {0x413, 0x301}}, {0x407, {0x406, 0x308}}, {0x40C, {0x41A, 0x301}}, {0x40D, {0x418, 0x300}}, -{0x40E, {0x423, 0x306}}, {0x419, {0x418, 0x306}}, {0x439, {0x438, 0x306}}, {0x450, {0x435, 0x300}}, {0x451, {0x435, 0x308}}, {0x453, {0x433, 0x301}}, {0x457, {0x456, 0x308}}, {0x45C, {0x43A, 0x301}}, -{0x45D, {0x438, 0x300}}, {0x45E, {0x443, 0x306}}, {0x476, {0x474, 0x30F}}, {0x477, {0x475, 0x30F}}, {0x4C1, {0x416, 0x306}}, {0x4C2, {0x436, 0x306}}, {0x4D0, {0x410, 0x306}}, {0x4D1, {0x430, 0x306}}, -{0x4D2, {0x410, 0x308}}, {0x4D3, {0x430, 0x308}}, {0x4D6, {0x415, 0x306}}, {0x4D7, {0x435, 0x306}}, {0x4DA, {0x4D8, 0x308}}, {0x4DB, {0x4D9, 0x308}}, {0x4DC, {0x416, 0x308}}, {0x4DD, {0x436, 0x308}}, -{0x4DE, {0x417, 0x308}}, {0x4DF, {0x437, 0x308}}, {0x4E2, {0x418, 0x304}}, {0x4E3, {0x438, 0x304}}, {0x4E4, {0x418, 0x308}}, {0x4E5, {0x438, 0x308}}, {0x4E6, {0x41E, 0x308}}, {0x4E7, {0x43E, 0x308}}, -{0x4EA, {0x4E8, 0x308}}, {0x4EB, {0x4E9, 0x308}}, {0x4EC, {0x42D, 0x308}}, {0x4ED, {0x44D, 0x308}}, {0x4EE, {0x423, 0x304}}, {0x4EF, {0x443, 0x304}}, {0x4F0, {0x423, 0x308}}, {0x4F1, {0x443, 0x308}}, -{0x4F2, {0x423, 0x30B}}, {0x4F3, {0x443, 0x30B}}, {0x4F4, {0x427, 0x308}}, {0x4F5, {0x447, 0x308}}, {0x4F8, {0x42B, 0x308}}, {0x4F9, {0x44B, 0x308}}, {0x622, {0x627, 0x653}}, {0x623, {0x627, 0x654}}, -{0x624, {0x648, 0x654}}, {0x625, {0x627, 0x655}}, {0x626, {0x64A, 0x654}}, {0x6C0, {0x6D5, 0x654}}, {0x6C2, {0x6C1, 0x654}}, {0x6D3, {0x6D2, 0x654}}, {0x929, {0x928, 0x93C}}, {0x931, {0x930, 0x93C}}, -{0x934, {0x933, 0x93C}}, {0x958, {0x915, 0x93C}}, {0x959, {0x916, 0x93C}}, {0x95A, {0x917, 0x93C}}, {0x95B, {0x91C, 0x93C}}, {0x95C, {0x921, 0x93C}}, {0x95D, {0x922, 0x93C}}, {0x95E, {0x92B, 0x93C}}, -{0x95F, {0x92F, 0x93C}}, {0x9CB, {0x9C7, 0x9BE}}, {0x9CC, {0x9C7, 0x9D7}}, {0x9DC, {0x9A1, 0x9BC}}, {0x9DD, {0x9A2, 0x9BC}}, {0x9DF, {0x9AF, 0x9BC}}, {0xA33, {0xA32, 0xA3C}}, {0xA36, {0xA38, 0xA3C}}, -{0xA59, {0xA16, 0xA3C}}, {0xA5A, {0xA17, 0xA3C}}, {0xA5B, {0xA1C, 0xA3C}}, {0xA5E, {0xA2B, 0xA3C}}, {0xB48, {0xB47, 0xB56}}, {0xB4B, {0xB47, 0xB3E}}, {0xB4C, {0xB47, 0xB57}}, {0xB5C, {0xB21, 0xB3C}}, -{0xB5D, {0xB22, 0xB3C}}, {0xB94, {0xB92, 0xBD7}}, {0xBCA, {0xBC6, 0xBBE}}, {0xBCB, {0xBC7, 0xBBE}}, {0xBCC, {0xBC6, 0xBD7}}, {0xC48, {0xC46, 0xC56}}, {0xCC0, {0xCBF, 0xCD5}}, {0xCC7, {0xCC6, 0xCD5}}, -{0xCC8, {0xCC6, 0xCD6}}, {0xCCA, {0xCC6, 0xCC2}}, {0xCCB, {0xCC6, 0xCC2, 0xCD5}}, {0xD4A, {0xD46, 0xD3E}}, {0xD4B, {0xD47, 0xD3E}}, {0xD4C, {0xD46, 0xD57}}, {0xDDA, {0xDD9, 0xDCA}}, -{0xDDC, {0xDD9, 0xDCF}}, {0xDDD, {0xDD9, 0xDCF, 0xDCA}}, {0xDDE, {0xDD9, 0xDDF}}, {0xF43, {0xF42, 0xFB7}}, {0xF4D, {0xF4C, 0xFB7}}, {0xF52, {0xF51, 0xFB7}}, {0xF57, {0xF56, 0xFB7}}, -{0xF5C, {0xF5B, 0xFB7}}, {0xF69, {0xF40, 0xFB5}}, {0xF73, {0xF71, 0xF72}}, {0xF75, {0xF71, 0xF74}}, {0xF76, {0xFB2, 0xF80}}, {0xF78, {0xFB3, 0xF80}}, {0xF81, {0xF71, 0xF80}}, {0xF93, {0xF92, 0xFB7}}, -{0xF9D, {0xF9C, 0xFB7}}, {0xFA2, {0xFA1, 0xFB7}}, {0xFA7, {0xFA6, 0xFB7}}, {0xFAC, {0xFAB, 0xFB7}}, {0xFB9, {0xF90, 0xFB5}}, {0x1026, {0x1025, 0x102E}}, {0x1B06, {0x1B05, 0x1B35}}, -{0x1B08, {0x1B07, 0x1B35}}, {0x1B0A, {0x1B09, 0x1B35}}, {0x1B0C, {0x1B0B, 0x1B35}}, {0x1B0E, {0x1B0D, 0x1B35}}, {0x1B12, {0x1B11, 0x1B35}}, {0x1B3B, {0x1B3A, 0x1B35}}, {0x1B3D, {0x1B3C, 0x1B35}}, -{0x1B40, {0x1B3E, 0x1B35}}, {0x1B41, {0x1B3F, 0x1B35}}, {0x1B43, {0x1B42, 0x1B35}}, {0x1E00, {0x41, 0x325}}, {0x1E01, {0x61, 0x325}}, {0x1E02, {0x42, 0x307}}, {0x1E03, {0x62, 0x307}}, -{0x1E04, {0x42, 0x323}}, {0x1E05, {0x62, 0x323}}, {0x1E06, {0x42, 0x331}}, {0x1E07, {0x62, 0x331}}, {0x1E08, {0x43, 0x327, 0x301}}, {0x1E09, {0x63, 0x327, 0x301}}, {0x1E0A, {0x44, 0x307}}, -{0x1E0B, {0x64, 0x307}}, {0x1E0C, {0x44, 0x323}}, {0x1E0D, {0x64, 0x323}}, {0x1E0E, {0x44, 0x331}}, {0x1E0F, {0x64, 0x331}}, {0x1E10, {0x44, 0x327}}, {0x1E11, {0x64, 0x327}}, {0x1E12, {0x44, 0x32D}}, -{0x1E13, {0x64, 0x32D}}, {0x1E14, {0x45, 0x304, 0x300}}, {0x1E15, {0x65, 0x304, 0x300}}, {0x1E16, {0x45, 0x304, 0x301}}, {0x1E17, {0x65, 0x304, 0x301}}, {0x1E18, {0x45, 0x32D}}, -{0x1E19, {0x65, 0x32D}}, {0x1E1A, {0x45, 0x330}}, {0x1E1B, {0x65, 0x330}}, {0x1E1C, {0x45, 0x327, 0x306}}, {0x1E1D, {0x65, 0x327, 0x306}}, {0x1E1E, {0x46, 0x307}}, {0x1E1F, {0x66, 0x307}}, -{0x1E20, {0x47, 0x304}}, {0x1E21, {0x67, 0x304}}, {0x1E22, {0x48, 0x307}}, {0x1E23, {0x68, 0x307}}, {0x1E24, {0x48, 0x323}}, {0x1E25, {0x68, 0x323}}, {0x1E26, {0x48, 0x308}}, {0x1E27, {0x68, 0x308}}, -{0x1E28, {0x48, 0x327}}, {0x1E29, {0x68, 0x327}}, {0x1E2A, {0x48, 0x32E}}, {0x1E2B, {0x68, 0x32E}}, {0x1E2C, {0x49, 0x330}}, {0x1E2D, {0x69, 0x330}}, {0x1E2E, {0x49, 0x308, 0x301}}, -{0x1E2F, {0x69, 0x308, 0x301}}, {0x1E30, {0x4B, 0x301}}, {0x1E31, {0x6B, 0x301}}, {0x1E32, {0x4B, 0x323}}, {0x1E33, {0x6B, 0x323}}, {0x1E34, {0x4B, 0x331}}, {0x1E35, {0x6B, 0x331}}, -{0x1E36, {0x4C, 0x323}}, {0x1E37, {0x6C, 0x323}}, {0x1E38, {0x4C, 0x323, 0x304}}, {0x1E39, {0x6C, 0x323, 0x304}}, {0x1E3A, {0x4C, 0x331}}, {0x1E3B, {0x6C, 0x331}}, {0x1E3C, {0x4C, 0x32D}}, -{0x1E3D, {0x6C, 0x32D}}, {0x1E3E, {0x4D, 0x301}}, {0x1E3F, {0x6D, 0x301}}, {0x1E40, {0x4D, 0x307}}, {0x1E41, {0x6D, 0x307}}, {0x1E42, {0x4D, 0x323}}, {0x1E43, {0x6D, 0x323}}, {0x1E44, {0x4E, 0x307}}, -{0x1E45, {0x6E, 0x307}}, {0x1E46, {0x4E, 0x323}}, {0x1E47, {0x6E, 0x323}}, {0x1E48, {0x4E, 0x331}}, {0x1E49, {0x6E, 0x331}}, {0x1E4A, {0x4E, 0x32D}}, {0x1E4B, {0x6E, 0x32D}}, -{0x1E4C, {0x4F, 0x303, 0x301}}, {0x1E4D, {0x6F, 0x303, 0x301}}, {0x1E4E, {0x4F, 0x303, 0x308}}, {0x1E4F, {0x6F, 0x303, 0x308}}, {0x1E50, {0x4F, 0x304, 0x300}}, {0x1E51, {0x6F, 0x304, 0x300}}, -{0x1E52, {0x4F, 0x304, 0x301}}, {0x1E53, {0x6F, 0x304, 0x301}}, {0x1E54, {0x50, 0x301}}, {0x1E55, {0x70, 0x301}}, {0x1E56, {0x50, 0x307}}, {0x1E57, {0x70, 0x307}}, {0x1E58, {0x52, 0x307}}, -{0x1E59, {0x72, 0x307}}, {0x1E5A, {0x52, 0x323}}, {0x1E5B, {0x72, 0x323}}, {0x1E5C, {0x52, 0x323, 0x304}}, {0x1E5D, {0x72, 0x323, 0x304}}, {0x1E5E, {0x52, 0x331}}, {0x1E5F, {0x72, 0x331}}, -{0x1E60, {0x53, 0x307}}, {0x1E61, {0x73, 0x307}}, {0x1E62, {0x53, 0x323}}, {0x1E63, {0x73, 0x323}}, {0x1E64, {0x53, 0x301, 0x307}}, {0x1E65, {0x73, 0x301, 0x307}}, {0x1E66, {0x53, 0x30C, 0x307}}, -{0x1E67, {0x73, 0x30C, 0x307}}, {0x1E68, {0x53, 0x323, 0x307}}, {0x1E69, {0x73, 0x323, 0x307}}, {0x1E6A, {0x54, 0x307}}, {0x1E6B, {0x74, 0x307}}, {0x1E6C, {0x54, 0x323}}, {0x1E6D, {0x74, 0x323}}, -{0x1E6E, {0x54, 0x331}}, {0x1E6F, {0x74, 0x331}}, {0x1E70, {0x54, 0x32D}}, {0x1E71, {0x74, 0x32D}}, {0x1E72, {0x55, 0x324}}, {0x1E73, {0x75, 0x324}}, {0x1E74, {0x55, 0x330}}, {0x1E75, {0x75, 0x330}}, -{0x1E76, {0x55, 0x32D}}, {0x1E77, {0x75, 0x32D}}, {0x1E78, {0x55, 0x303, 0x301}}, {0x1E79, {0x75, 0x303, 0x301}}, {0x1E7A, {0x55, 0x304, 0x308}}, {0x1E7B, {0x75, 0x304, 0x308}}, -{0x1E7C, {0x56, 0x303}}, {0x1E7D, {0x76, 0x303}}, {0x1E7E, {0x56, 0x323}}, {0x1E7F, {0x76, 0x323}}, {0x1E80, {0x57, 0x300}}, {0x1E81, {0x77, 0x300}}, {0x1E82, {0x57, 0x301}}, {0x1E83, {0x77, 0x301}}, -{0x1E84, {0x57, 0x308}}, {0x1E85, {0x77, 0x308}}, {0x1E86, {0x57, 0x307}}, {0x1E87, {0x77, 0x307}}, {0x1E88, {0x57, 0x323}}, {0x1E89, {0x77, 0x323}}, {0x1E8A, {0x58, 0x307}}, {0x1E8B, {0x78, 0x307}}, -{0x1E8C, {0x58, 0x308}}, {0x1E8D, {0x78, 0x308}}, {0x1E8E, {0x59, 0x307}}, {0x1E8F, {0x79, 0x307}}, {0x1E90, {0x5A, 0x302}}, {0x1E91, {0x7A, 0x302}}, {0x1E92, {0x5A, 0x323}}, {0x1E93, {0x7A, 0x323}}, -{0x1E94, {0x5A, 0x331}}, {0x1E95, {0x7A, 0x331}}, {0x1E96, {0x68, 0x331}}, {0x1E97, {0x74, 0x308}}, {0x1E98, {0x77, 0x30A}}, {0x1E99, {0x79, 0x30A}}, {0x1E9B, {0x17F, 0x307}}, {0x1EA0, {0x41, 0x323}}, -{0x1EA1, {0x61, 0x323}}, {0x1EA2, {0x41, 0x309}}, {0x1EA3, {0x61, 0x309}}, {0x1EA4, {0x41, 0x302, 0x301}}, {0x1EA5, {0x61, 0x302, 0x301}}, {0x1EA6, {0x41, 0x302, 0x300}}, -{0x1EA7, {0x61, 0x302, 0x300}}, {0x1EA8, {0x41, 0x302, 0x309}}, {0x1EA9, {0x61, 0x302, 0x309}}, {0x1EAA, {0x41, 0x302, 0x303}}, {0x1EAB, {0x61, 0x302, 0x303}}, {0x1EAC, {0x41, 0x323, 0x302}}, -{0x1EAD, {0x61, 0x323, 0x302}}, {0x1EAE, {0x41, 0x306, 0x301}}, {0x1EAF, {0x61, 0x306, 0x301}}, {0x1EB0, {0x41, 0x306, 0x300}}, {0x1EB1, {0x61, 0x306, 0x300}}, {0x1EB2, {0x41, 0x306, 0x309}}, -{0x1EB3, {0x61, 0x306, 0x309}}, {0x1EB4, {0x41, 0x306, 0x303}}, {0x1EB5, {0x61, 0x306, 0x303}}, {0x1EB6, {0x41, 0x323, 0x306}}, {0x1EB7, {0x61, 0x323, 0x306}}, {0x1EB8, {0x45, 0x323}}, -{0x1EB9, {0x65, 0x323}}, {0x1EBA, {0x45, 0x309}}, {0x1EBB, {0x65, 0x309}}, {0x1EBC, {0x45, 0x303}}, {0x1EBD, {0x65, 0x303}}, {0x1EBE, {0x45, 0x302, 0x301}}, {0x1EBF, {0x65, 0x302, 0x301}}, -{0x1EC0, {0x45, 0x302, 0x300}}, {0x1EC1, {0x65, 0x302, 0x300}}, {0x1EC2, {0x45, 0x302, 0x309}}, {0x1EC3, {0x65, 0x302, 0x309}}, {0x1EC4, {0x45, 0x302, 0x303}}, {0x1EC5, {0x65, 0x302, 0x303}}, -{0x1EC6, {0x45, 0x323, 0x302}}, {0x1EC7, {0x65, 0x323, 0x302}}, {0x1EC8, {0x49, 0x309}}, {0x1EC9, {0x69, 0x309}}, {0x1ECA, {0x49, 0x323}}, {0x1ECB, {0x69, 0x323}}, {0x1ECC, {0x4F, 0x323}}, -{0x1ECD, {0x6F, 0x323}}, {0x1ECE, {0x4F, 0x309}}, {0x1ECF, {0x6F, 0x309}}, {0x1ED0, {0x4F, 0x302, 0x301}}, {0x1ED1, {0x6F, 0x302, 0x301}}, {0x1ED2, {0x4F, 0x302, 0x300}}, -{0x1ED3, {0x6F, 0x302, 0x300}}, {0x1ED4, {0x4F, 0x302, 0x309}}, {0x1ED5, {0x6F, 0x302, 0x309}}, {0x1ED6, {0x4F, 0x302, 0x303}}, {0x1ED7, {0x6F, 0x302, 0x303}}, {0x1ED8, {0x4F, 0x323, 0x302}}, -{0x1ED9, {0x6F, 0x323, 0x302}}, {0x1EDA, {0x4F, 0x31B, 0x301}}, {0x1EDB, {0x6F, 0x31B, 0x301}}, {0x1EDC, {0x4F, 0x31B, 0x300}}, {0x1EDD, {0x6F, 0x31B, 0x300}}, {0x1EDE, {0x4F, 0x31B, 0x309}}, -{0x1EDF, {0x6F, 0x31B, 0x309}}, {0x1EE0, {0x4F, 0x31B, 0x303}}, {0x1EE1, {0x6F, 0x31B, 0x303}}, {0x1EE2, {0x4F, 0x31B, 0x323}}, {0x1EE3, {0x6F, 0x31B, 0x323}}, {0x1EE4, {0x55, 0x323}}, -{0x1EE5, {0x75, 0x323}}, {0x1EE6, {0x55, 0x309}}, {0x1EE7, {0x75, 0x309}}, {0x1EE8, {0x55, 0x31B, 0x301}}, {0x1EE9, {0x75, 0x31B, 0x301}}, {0x1EEA, {0x55, 0x31B, 0x300}}, -{0x1EEB, {0x75, 0x31B, 0x300}}, {0x1EEC, {0x55, 0x31B, 0x309}}, {0x1EED, {0x75, 0x31B, 0x309}}, {0x1EEE, {0x55, 0x31B, 0x303}}, {0x1EEF, {0x75, 0x31B, 0x303}}, {0x1EF0, {0x55, 0x31B, 0x323}}, -{0x1EF1, {0x75, 0x31B, 0x323}}, {0x1EF2, {0x59, 0x300}}, {0x1EF3, {0x79, 0x300}}, {0x1EF4, {0x59, 0x323}}, {0x1EF5, {0x79, 0x323}}, {0x1EF6, {0x59, 0x309}}, {0x1EF7, {0x79, 0x309}}, -{0x1EF8, {0x59, 0x303}}, {0x1EF9, {0x79, 0x303}}, {0x1F00, {0x3B1, 0x313}}, {0x1F01, {0x3B1, 0x314}}, {0x1F02, {0x3B1, 0x313, 0x300}}, {0x1F03, {0x3B1, 0x314, 0x300}}, {0x1F04, {0x3B1, 0x313, 0x301}}, -{0x1F05, {0x3B1, 0x314, 0x301}}, {0x1F06, {0x3B1, 0x313, 0x342}}, {0x1F07, {0x3B1, 0x314, 0x342}}, {0x1F08, {0x391, 0x313}}, {0x1F09, {0x391, 0x314}}, {0x1F0A, {0x391, 0x313, 0x300}}, -{0x1F0B, {0x391, 0x314, 0x300}}, {0x1F0C, {0x391, 0x313, 0x301}}, {0x1F0D, {0x391, 0x314, 0x301}}, {0x1F0E, {0x391, 0x313, 0x342}}, {0x1F0F, {0x391, 0x314, 0x342}}, {0x1F10, {0x3B5, 0x313}}, -{0x1F11, {0x3B5, 0x314}}, {0x1F12, {0x3B5, 0x313, 0x300}}, {0x1F13, {0x3B5, 0x314, 0x300}}, {0x1F14, {0x3B5, 0x313, 0x301}}, {0x1F15, {0x3B5, 0x314, 0x301}}, {0x1F18, {0x395, 0x313}}, -{0x1F19, {0x395, 0x314}}, {0x1F1A, {0x395, 0x313, 0x300}}, {0x1F1B, {0x395, 0x314, 0x300}}, {0x1F1C, {0x395, 0x313, 0x301}}, {0x1F1D, {0x395, 0x314, 0x301}}, {0x1F20, {0x3B7, 0x313}}, -{0x1F21, {0x3B7, 0x314}}, {0x1F22, {0x3B7, 0x313, 0x300}}, {0x1F23, {0x3B7, 0x314, 0x300}}, {0x1F24, {0x3B7, 0x313, 0x301}}, {0x1F25, {0x3B7, 0x314, 0x301}}, {0x1F26, {0x3B7, 0x313, 0x342}}, -{0x1F27, {0x3B7, 0x314, 0x342}}, {0x1F28, {0x397, 0x313}}, {0x1F29, {0x397, 0x314}}, {0x1F2A, {0x397, 0x313, 0x300}}, {0x1F2B, {0x397, 0x314, 0x300}}, {0x1F2C, {0x397, 0x313, 0x301}}, -{0x1F2D, {0x397, 0x314, 0x301}}, {0x1F2E, {0x397, 0x313, 0x342}}, {0x1F2F, {0x397, 0x314, 0x342}}, {0x1F30, {0x3B9, 0x313}}, {0x1F31, {0x3B9, 0x314}}, {0x1F32, {0x3B9, 0x313, 0x300}}, -{0x1F33, {0x3B9, 0x314, 0x300}}, {0x1F34, {0x3B9, 0x313, 0x301}}, {0x1F35, {0x3B9, 0x314, 0x301}}, {0x1F36, {0x3B9, 0x313, 0x342}}, {0x1F37, {0x3B9, 0x314, 0x342}}, {0x1F38, {0x399, 0x313}}, -{0x1F39, {0x399, 0x314}}, {0x1F3A, {0x399, 0x313, 0x300}}, {0x1F3B, {0x399, 0x314, 0x300}}, {0x1F3C, {0x399, 0x313, 0x301}}, {0x1F3D, {0x399, 0x314, 0x301}}, {0x1F3E, {0x399, 0x313, 0x342}}, -{0x1F3F, {0x399, 0x314, 0x342}}, {0x1F40, {0x3BF, 0x313}}, {0x1F41, {0x3BF, 0x314}}, {0x1F42, {0x3BF, 0x313, 0x300}}, {0x1F43, {0x3BF, 0x314, 0x300}}, {0x1F44, {0x3BF, 0x313, 0x301}}, -{0x1F45, {0x3BF, 0x314, 0x301}}, {0x1F48, {0x39F, 0x313}}, {0x1F49, {0x39F, 0x314}}, {0x1F4A, {0x39F, 0x313, 0x300}}, {0x1F4B, {0x39F, 0x314, 0x300}}, {0x1F4C, {0x39F, 0x313, 0x301}}, -{0x1F4D, {0x39F, 0x314, 0x301}}, {0x1F50, {0x3C5, 0x313}}, {0x1F51, {0x3C5, 0x314}}, {0x1F52, {0x3C5, 0x313, 0x300}}, {0x1F53, {0x3C5, 0x314, 0x300}}, {0x1F54, {0x3C5, 0x313, 0x301}}, -{0x1F55, {0x3C5, 0x314, 0x301}}, {0x1F56, {0x3C5, 0x313, 0x342}}, {0x1F57, {0x3C5, 0x314, 0x342}}, {0x1F59, {0x3A5, 0x314}}, {0x1F5B, {0x3A5, 0x314, 0x300}}, {0x1F5D, {0x3A5, 0x314, 0x301}}, -{0x1F5F, {0x3A5, 0x314, 0x342}}, {0x1F60, {0x3C9, 0x313}}, {0x1F61, {0x3C9, 0x314}}, {0x1F62, {0x3C9, 0x313, 0x300}}, {0x1F63, {0x3C9, 0x314, 0x300}}, {0x1F64, {0x3C9, 0x313, 0x301}}, -{0x1F65, {0x3C9, 0x314, 0x301}}, {0x1F66, {0x3C9, 0x313, 0x342}}, {0x1F67, {0x3C9, 0x314, 0x342}}, {0x1F68, {0x3A9, 0x313}}, {0x1F69, {0x3A9, 0x314}}, {0x1F6A, {0x3A9, 0x313, 0x300}}, -{0x1F6B, {0x3A9, 0x314, 0x300}}, {0x1F6C, {0x3A9, 0x313, 0x301}}, {0x1F6D, {0x3A9, 0x314, 0x301}}, {0x1F6E, {0x3A9, 0x313, 0x342}}, {0x1F6F, {0x3A9, 0x314, 0x342}}, {0x1F70, {0x3B1, 0x300}}, -{0x1F71, {0x3B1, 0x301}}, {0x1F72, {0x3B5, 0x300}}, {0x1F73, {0x3B5, 0x301}}, {0x1F74, {0x3B7, 0x300}}, {0x1F75, {0x3B7, 0x301}}, {0x1F76, {0x3B9, 0x300}}, {0x1F77, {0x3B9, 0x301}}, -{0x1F78, {0x3BF, 0x300}}, {0x1F79, {0x3BF, 0x301}}, {0x1F7A, {0x3C5, 0x300}}, {0x1F7B, {0x3C5, 0x301}}, {0x1F7C, {0x3C9, 0x300}}, {0x1F7D, {0x3C9, 0x301}}, {0x1F80, {0x3B1, 0x313, 0x345}}, -{0x1F81, {0x3B1, 0x314, 0x345}}, {0x1F82, {0x3B1, 0x313, 0x300, 0x345}}, {0x1F83, {0x3B1, 0x314, 0x300, 0x345}}, {0x1F84, {0x3B1, 0x313, 0x301, 0x345}}, {0x1F85, {0x3B1, 0x314, 0x301, 0x345}}, -{0x1F86, {0x3B1, 0x313, 0x342, 0x345}}, {0x1F87, {0x3B1, 0x314, 0x342, 0x345}}, {0x1F88, {0x391, 0x313, 0x345}}, {0x1F89, {0x391, 0x314, 0x345}}, {0x1F8A, {0x391, 0x313, 0x300, 0x345}}, -{0x1F8B, {0x391, 0x314, 0x300, 0x345}}, {0x1F8C, {0x391, 0x313, 0x301, 0x345}}, {0x1F8D, {0x391, 0x314, 0x301, 0x345}}, {0x1F8E, {0x391, 0x313, 0x342, 0x345}}, {0x1F8F, {0x391, 0x314, 0x342, 0x345}}, -{0x1F90, {0x3B7, 0x313, 0x345}}, {0x1F91, {0x3B7, 0x314, 0x345}}, {0x1F92, {0x3B7, 0x313, 0x300, 0x345}}, {0x1F93, {0x3B7, 0x314, 0x300, 0x345}}, {0x1F94, {0x3B7, 0x313, 0x301, 0x345}}, -{0x1F95, {0x3B7, 0x314, 0x301, 0x345}}, {0x1F96, {0x3B7, 0x313, 0x342, 0x345}}, {0x1F97, {0x3B7, 0x314, 0x342, 0x345}}, {0x1F98, {0x397, 0x313, 0x345}}, {0x1F99, {0x397, 0x314, 0x345}}, -{0x1F9A, {0x397, 0x313, 0x300, 0x345}}, {0x1F9B, {0x397, 0x314, 0x300, 0x345}}, {0x1F9C, {0x397, 0x313, 0x301, 0x345}}, {0x1F9D, {0x397, 0x314, 0x301, 0x345}}, {0x1F9E, {0x397, 0x313, 0x342, 0x345}}, -{0x1F9F, {0x397, 0x314, 0x342, 0x345}}, {0x1FA0, {0x3C9, 0x313, 0x345}}, {0x1FA1, {0x3C9, 0x314, 0x345}}, {0x1FA2, {0x3C9, 0x313, 0x300, 0x345}}, {0x1FA3, {0x3C9, 0x314, 0x300, 0x345}}, -{0x1FA4, {0x3C9, 0x313, 0x301, 0x345}}, {0x1FA5, {0x3C9, 0x314, 0x301, 0x345}}, {0x1FA6, {0x3C9, 0x313, 0x342, 0x345}}, {0x1FA7, {0x3C9, 0x314, 0x342, 0x345}}, {0x1FA8, {0x3A9, 0x313, 0x345}}, -{0x1FA9, {0x3A9, 0x314, 0x345}}, {0x1FAA, {0x3A9, 0x313, 0x300, 0x345}}, {0x1FAB, {0x3A9, 0x314, 0x300, 0x345}}, {0x1FAC, {0x3A9, 0x313, 0x301, 0x345}}, {0x1FAD, {0x3A9, 0x314, 0x301, 0x345}}, -{0x1FAE, {0x3A9, 0x313, 0x342, 0x345}}, {0x1FAF, {0x3A9, 0x314, 0x342, 0x345}}, {0x1FB0, {0x3B1, 0x306}}, {0x1FB1, {0x3B1, 0x304}}, {0x1FB2, {0x3B1, 0x300, 0x345}}, {0x1FB3, {0x3B1, 0x345}}, -{0x1FB4, {0x3B1, 0x301, 0x345}}, {0x1FB6, {0x3B1, 0x342}}, {0x1FB7, {0x3B1, 0x342, 0x345}}, {0x1FB8, {0x391, 0x306}}, {0x1FB9, {0x391, 0x304}}, {0x1FBA, {0x391, 0x300}}, {0x1FBB, {0x391, 0x301}}, -{0x1FBC, {0x391, 0x345}}, {0x1FBE, {0x3B9}}, {0x1FC1, {0xA8, 0x342}}, {0x1FC2, {0x3B7, 0x300, 0x345}}, {0x1FC3, {0x3B7, 0x345}}, {0x1FC4, {0x3B7, 0x301, 0x345}}, {0x1FC6, {0x3B7, 0x342}}, -{0x1FC7, {0x3B7, 0x342, 0x345}}, {0x1FC8, {0x395, 0x300}}, {0x1FC9, {0x395, 0x301}}, {0x1FCA, {0x397, 0x300}}, {0x1FCB, {0x397, 0x301}}, {0x1FCC, {0x397, 0x345}}, {0x1FCD, {0x1FBF, 0x300}}, -{0x1FCE, {0x1FBF, 0x301}}, {0x1FCF, {0x1FBF, 0x342}}, {0x1FD0, {0x3B9, 0x306}}, {0x1FD1, {0x3B9, 0x304}}, {0x1FD2, {0x3B9, 0x308, 0x300}}, {0x1FD3, {0x3B9, 0x308, 0x301}}, {0x1FD6, {0x3B9, 0x342}}, -{0x1FD7, {0x3B9, 0x308, 0x342}}, {0x1FD8, {0x399, 0x306}}, {0x1FD9, {0x399, 0x304}}, {0x1FDA, {0x399, 0x300}}, {0x1FDB, {0x399, 0x301}}, {0x1FDD, {0x1FFE, 0x300}}, {0x1FDE, {0x1FFE, 0x301}}, -{0x1FDF, {0x1FFE, 0x342}}, {0x1FE0, {0x3C5, 0x306}}, {0x1FE1, {0x3C5, 0x304}}, {0x1FE2, {0x3C5, 0x308, 0x300}}, {0x1FE3, {0x3C5, 0x308, 0x301}}, {0x1FE4, {0x3C1, 0x313}}, {0x1FE5, {0x3C1, 0x314}}, -{0x1FE6, {0x3C5, 0x342}}, {0x1FE7, {0x3C5, 0x308, 0x342}}, {0x1FE8, {0x3A5, 0x306}}, {0x1FE9, {0x3A5, 0x304}}, {0x1FEA, {0x3A5, 0x300}}, {0x1FEB, {0x3A5, 0x301}}, {0x1FEC, {0x3A1, 0x314}}, -{0x1FED, {0xA8, 0x300}}, {0x1FEE, {0xA8, 0x301}}, {0x1FEF, {0x60}}, {0x1FF2, {0x3C9, 0x300, 0x345}}, {0x1FF3, {0x3C9, 0x345}}, {0x1FF4, {0x3C9, 0x301, 0x345}}, {0x1FF6, {0x3C9, 0x342}}, -{0x1FF7, {0x3C9, 0x342, 0x345}}, {0x1FF8, {0x39F, 0x300}}, {0x1FF9, {0x39F, 0x301}}, {0x1FFA, {0x3A9, 0x300}}, {0x1FFB, {0x3A9, 0x301}}, {0x1FFC, {0x3A9, 0x345}}, {0x1FFD, {0xB4}}, {0x2000, {0x2002}}, -{0x2001, {0x2003}}, {0x2126, {0x3A9}}, {0x212A, {0x4B}}, {0x212B, {0x41, 0x30A}}, {0x219A, {0x2190, 0x338}}, {0x219B, {0x2192, 0x338}}, {0x21AE, {0x2194, 0x338}}, {0x21CD, {0x21D0, 0x338}}, -{0x21CE, {0x21D4, 0x338}}, {0x21CF, {0x21D2, 0x338}}, {0x2204, {0x2203, 0x338}}, {0x2209, {0x2208, 0x338}}, {0x220C, {0x220B, 0x338}}, {0x2224, {0x2223, 0x338}}, {0x2226, {0x2225, 0x338}}, -{0x2241, {0x223C, 0x338}}, {0x2244, {0x2243, 0x338}}, {0x2247, {0x2245, 0x338}}, {0x2249, {0x2248, 0x338}}, {0x2260, {0x3D, 0x338}}, {0x2262, {0x2261, 0x338}}, {0x226D, {0x224D, 0x338}}, -{0x226E, {0x3C, 0x338}}, {0x226F, {0x3E, 0x338}}, {0x2270, {0x2264, 0x338}}, {0x2271, {0x2265, 0x338}}, {0x2274, {0x2272, 0x338}}, {0x2275, {0x2273, 0x338}}, {0x2278, {0x2276, 0x338}}, -{0x2279, {0x2277, 0x338}}, {0x2280, {0x227A, 0x338}}, {0x2281, {0x227B, 0x338}}, {0x2284, {0x2282, 0x338}}, {0x2285, {0x2283, 0x338}}, {0x2288, {0x2286, 0x338}}, {0x2289, {0x2287, 0x338}}, -{0x22AC, {0x22A2, 0x338}}, {0x22AD, {0x22A8, 0x338}}, {0x22AE, {0x22A9, 0x338}}, {0x22AF, {0x22AB, 0x338}}, {0x22E0, {0x227C, 0x338}}, {0x22E1, {0x227D, 0x338}}, {0x22E2, {0x2291, 0x338}}, -{0x22E3, {0x2292, 0x338}}, {0x22EA, {0x22B2, 0x338}}, {0x22EB, {0x22B3, 0x338}}, {0x22EC, {0x22B4, 0x338}}, {0x22ED, {0x22B5, 0x338}}, {0x2329, {0x3008}}, {0x232A, {0x3009}}, -{0x2ADC, {0x2ADD, 0x338}}, {0x304C, {0x304B, 0x3099}}, {0x304E, {0x304D, 0x3099}}, {0x3050, {0x304F, 0x3099}}, {0x3052, {0x3051, 0x3099}}, {0x3054, {0x3053, 0x3099}}, {0x3056, {0x3055, 0x3099}}, -{0x3058, {0x3057, 0x3099}}, {0x305A, {0x3059, 0x3099}}, {0x305C, {0x305B, 0x3099}}, {0x305E, {0x305D, 0x3099}}, {0x3060, {0x305F, 0x3099}}, {0x3062, {0x3061, 0x3099}}, {0x3065, {0x3064, 0x3099}}, -{0x3067, {0x3066, 0x3099}}, {0x3069, {0x3068, 0x3099}}, {0x3070, {0x306F, 0x3099}}, {0x3071, {0x306F, 0x309A}}, {0x3073, {0x3072, 0x3099}}, {0x3074, {0x3072, 0x309A}}, {0x3076, {0x3075, 0x3099}}, -{0x3077, {0x3075, 0x309A}}, {0x3079, {0x3078, 0x3099}}, {0x307A, {0x3078, 0x309A}}, {0x307C, {0x307B, 0x3099}}, {0x307D, {0x307B, 0x309A}}, {0x3094, {0x3046, 0x3099}}, {0x309E, {0x309D, 0x3099}}, -{0x30AC, {0x30AB, 0x3099}}, {0x30AE, {0x30AD, 0x3099}}, {0x30B0, {0x30AF, 0x3099}}, {0x30B2, {0x30B1, 0x3099}}, {0x30B4, {0x30B3, 0x3099}}, {0x30B6, {0x30B5, 0x3099}}, {0x30B8, {0x30B7, 0x3099}}, -{0x30BA, {0x30B9, 0x3099}}, {0x30BC, {0x30BB, 0x3099}}, {0x30BE, {0x30BD, 0x3099}}, {0x30C0, {0x30BF, 0x3099}}, {0x30C2, {0x30C1, 0x3099}}, {0x30C5, {0x30C4, 0x3099}}, {0x30C7, {0x30C6, 0x3099}}, -{0x30C9, {0x30C8, 0x3099}}, {0x30D0, {0x30CF, 0x3099}}, {0x30D1, {0x30CF, 0x309A}}, {0x30D3, {0x30D2, 0x3099}}, {0x30D4, {0x30D2, 0x309A}}, {0x30D6, {0x30D5, 0x3099}}, {0x30D7, {0x30D5, 0x309A}}, -{0x30D9, {0x30D8, 0x3099}}, {0x30DA, {0x30D8, 0x309A}}, {0x30DC, {0x30DB, 0x3099}}, {0x30DD, {0x30DB, 0x309A}}, {0x30F4, {0x30A6, 0x3099}}, {0x30F7, {0x30EF, 0x3099}}, {0x30F8, {0x30F0, 0x3099}}, -{0x30F9, {0x30F1, 0x3099}}, {0x30FA, {0x30F2, 0x3099}}, {0x30FE, {0x30FD, 0x3099}}, {0xF900, {0x8C48}}, {0xF901, {0x66F4}}, {0xF902, {0x8ECA}}, {0xF903, {0x8CC8}}, {0xF904, {0x6ED1}}, -{0xF905, {0x4E32}}, {0xF906, {0x53E5}}, {0xF907, {0x9F9C}}, {0xF908, {0x9F9C}}, {0xF909, {0x5951}}, {0xF90A, {0x91D1}}, {0xF90B, {0x5587}}, {0xF90C, {0x5948}}, {0xF90D, {0x61F6}}, {0xF90E, {0x7669}}, -{0xF90F, {0x7F85}}, {0xF910, {0x863F}}, {0xF911, {0x87BA}}, {0xF912, {0x88F8}}, {0xF913, {0x908F}}, {0xF914, {0x6A02}}, {0xF915, {0x6D1B}}, {0xF916, {0x70D9}}, {0xF917, {0x73DE}}, {0xF918, {0x843D}}, -{0xF919, {0x916A}}, {0xF91A, {0x99F1}}, {0xF91B, {0x4E82}}, {0xF91C, {0x5375}}, {0xF91D, {0x6B04}}, {0xF91E, {0x721B}}, {0xF91F, {0x862D}}, {0xF920, {0x9E1E}}, {0xF921, {0x5D50}}, {0xF922, {0x6FEB}}, -{0xF923, {0x85CD}}, {0xF924, {0x8964}}, {0xF925, {0x62C9}}, {0xF926, {0x81D8}}, {0xF927, {0x881F}}, {0xF928, {0x5ECA}}, {0xF929, {0x6717}}, {0xF92A, {0x6D6A}}, {0xF92B, {0x72FC}}, {0xF92C, {0x90CE}}, -{0xF92D, {0x4F86}}, {0xF92E, {0x51B7}}, {0xF92F, {0x52DE}}, {0xF930, {0x64C4}}, {0xF931, {0x6AD3}}, {0xF932, {0x7210}}, {0xF933, {0x76E7}}, {0xF934, {0x8001}}, {0xF935, {0x8606}}, {0xF936, {0x865C}}, -{0xF937, {0x8DEF}}, {0xF938, {0x9732}}, {0xF939, {0x9B6F}}, {0xF93A, {0x9DFA}}, {0xF93B, {0x788C}}, {0xF93C, {0x797F}}, {0xF93D, {0x7DA0}}, {0xF93E, {0x83C9}}, {0xF93F, {0x9304}}, {0xF940, {0x9E7F}}, -{0xF941, {0x8AD6}}, {0xF942, {0x58DF}}, {0xF943, {0x5F04}}, {0xF944, {0x7C60}}, {0xF945, {0x807E}}, {0xF946, {0x7262}}, {0xF947, {0x78CA}}, {0xF948, {0x8CC2}}, {0xF949, {0x96F7}}, {0xF94A, {0x58D8}}, -{0xF94B, {0x5C62}}, {0xF94C, {0x6A13}}, {0xF94D, {0x6DDA}}, {0xF94E, {0x6F0F}}, {0xF94F, {0x7D2F}}, {0xF950, {0x7E37}}, {0xF951, {0x964B}}, {0xF952, {0x52D2}}, {0xF953, {0x808B}}, {0xF954, {0x51DC}}, -{0xF955, {0x51CC}}, {0xF956, {0x7A1C}}, {0xF957, {0x7DBE}}, {0xF958, {0x83F1}}, {0xF959, {0x9675}}, {0xF95A, {0x8B80}}, {0xF95B, {0x62CF}}, {0xF95C, {0x6A02}}, {0xF95D, {0x8AFE}}, {0xF95E, {0x4E39}}, -{0xF95F, {0x5BE7}}, {0xF960, {0x6012}}, {0xF961, {0x7387}}, {0xF962, {0x7570}}, {0xF963, {0x5317}}, {0xF964, {0x78FB}}, {0xF965, {0x4FBF}}, {0xF966, {0x5FA9}}, {0xF967, {0x4E0D}}, {0xF968, {0x6CCC}}, -{0xF969, {0x6578}}, {0xF96A, {0x7D22}}, {0xF96B, {0x53C3}}, {0xF96C, {0x585E}}, {0xF96D, {0x7701}}, {0xF96E, {0x8449}}, {0xF96F, {0x8AAA}}, {0xF970, {0x6BBA}}, {0xF971, {0x8FB0}}, {0xF972, {0x6C88}}, -{0xF973, {0x62FE}}, {0xF974, {0x82E5}}, {0xF975, {0x63A0}}, {0xF976, {0x7565}}, {0xF977, {0x4EAE}}, {0xF978, {0x5169}}, {0xF979, {0x51C9}}, {0xF97A, {0x6881}}, {0xF97B, {0x7CE7}}, {0xF97C, {0x826F}}, -{0xF97D, {0x8AD2}}, {0xF97E, {0x91CF}}, {0xF97F, {0x52F5}}, {0xF980, {0x5442}}, {0xF981, {0x5973}}, {0xF982, {0x5EEC}}, {0xF983, {0x65C5}}, {0xF984, {0x6FFE}}, {0xF985, {0x792A}}, {0xF986, {0x95AD}}, -{0xF987, {0x9A6A}}, {0xF988, {0x9E97}}, {0xF989, {0x9ECE}}, {0xF98A, {0x529B}}, {0xF98B, {0x66C6}}, {0xF98C, {0x6B77}}, {0xF98D, {0x8F62}}, {0xF98E, {0x5E74}}, {0xF98F, {0x6190}}, {0xF990, {0x6200}}, -{0xF991, {0x649A}}, {0xF992, {0x6F23}}, {0xF993, {0x7149}}, {0xF994, {0x7489}}, {0xF995, {0x79CA}}, {0xF996, {0x7DF4}}, {0xF997, {0x806F}}, {0xF998, {0x8F26}}, {0xF999, {0x84EE}}, {0xF99A, {0x9023}}, -{0xF99B, {0x934A}}, {0xF99C, {0x5217}}, {0xF99D, {0x52A3}}, {0xF99E, {0x54BD}}, {0xF99F, {0x70C8}}, {0xF9A0, {0x88C2}}, {0xF9A1, {0x8AAA}}, {0xF9A2, {0x5EC9}}, {0xF9A3, {0x5FF5}}, {0xF9A4, {0x637B}}, -{0xF9A5, {0x6BAE}}, {0xF9A6, {0x7C3E}}, {0xF9A7, {0x7375}}, {0xF9A8, {0x4EE4}}, {0xF9A9, {0x56F9}}, {0xF9AA, {0x5BE7}}, {0xF9AB, {0x5DBA}}, {0xF9AC, {0x601C}}, {0xF9AD, {0x73B2}}, {0xF9AE, {0x7469}}, -{0xF9AF, {0x7F9A}}, {0xF9B0, {0x8046}}, {0xF9B1, {0x9234}}, {0xF9B2, {0x96F6}}, {0xF9B3, {0x9748}}, {0xF9B4, {0x9818}}, {0xF9B5, {0x4F8B}}, {0xF9B6, {0x79AE}}, {0xF9B7, {0x91B4}}, {0xF9B8, {0x96B8}}, -{0xF9B9, {0x60E1}}, {0xF9BA, {0x4E86}}, {0xF9BB, {0x50DA}}, {0xF9BC, {0x5BEE}}, {0xF9BD, {0x5C3F}}, {0xF9BE, {0x6599}}, {0xF9BF, {0x6A02}}, {0xF9C0, {0x71CE}}, {0xF9C1, {0x7642}}, {0xF9C2, {0x84FC}}, -{0xF9C3, {0x907C}}, {0xF9C4, {0x9F8D}}, {0xF9C5, {0x6688}}, {0xF9C6, {0x962E}}, {0xF9C7, {0x5289}}, {0xF9C8, {0x677B}}, {0xF9C9, {0x67F3}}, {0xF9CA, {0x6D41}}, {0xF9CB, {0x6E9C}}, {0xF9CC, {0x7409}}, -{0xF9CD, {0x7559}}, {0xF9CE, {0x786B}}, {0xF9CF, {0x7D10}}, {0xF9D0, {0x985E}}, {0xF9D1, {0x516D}}, {0xF9D2, {0x622E}}, {0xF9D3, {0x9678}}, {0xF9D4, {0x502B}}, {0xF9D5, {0x5D19}}, {0xF9D6, {0x6DEA}}, -{0xF9D7, {0x8F2A}}, {0xF9D8, {0x5F8B}}, {0xF9D9, {0x6144}}, {0xF9DA, {0x6817}}, {0xF9DB, {0x7387}}, {0xF9DC, {0x9686}}, {0xF9DD, {0x5229}}, {0xF9DE, {0x540F}}, {0xF9DF, {0x5C65}}, {0xF9E0, {0x6613}}, -{0xF9E1, {0x674E}}, {0xF9E2, {0x68A8}}, {0xF9E3, {0x6CE5}}, {0xF9E4, {0x7406}}, {0xF9E5, {0x75E2}}, {0xF9E6, {0x7F79}}, {0xF9E7, {0x88CF}}, {0xF9E8, {0x88E1}}, {0xF9E9, {0x91CC}}, {0xF9EA, {0x96E2}}, -{0xF9EB, {0x533F}}, {0xF9EC, {0x6EBA}}, {0xF9ED, {0x541D}}, {0xF9EE, {0x71D0}}, {0xF9EF, {0x7498}}, {0xF9F0, {0x85FA}}, {0xF9F1, {0x96A3}}, {0xF9F2, {0x9C57}}, {0xF9F3, {0x9E9F}}, {0xF9F4, {0x6797}}, -{0xF9F5, {0x6DCB}}, {0xF9F6, {0x81E8}}, {0xF9F7, {0x7ACB}}, {0xF9F8, {0x7B20}}, {0xF9F9, {0x7C92}}, {0xF9FA, {0x72C0}}, {0xF9FB, {0x7099}}, {0xF9FC, {0x8B58}}, {0xF9FD, {0x4EC0}}, {0xF9FE, {0x8336}}, -{0xF9FF, {0x523A}}, {0xFA00, {0x5207}}, {0xFA01, {0x5EA6}}, {0xFA02, {0x62D3}}, {0xFA03, {0x7CD6}}, {0xFA04, {0x5B85}}, {0xFA05, {0x6D1E}}, {0xFA06, {0x66B4}}, {0xFA07, {0x8F3B}}, {0xFA08, {0x884C}}, -{0xFA09, {0x964D}}, {0xFA0A, {0x898B}}, {0xFA0B, {0x5ED3}}, {0xFA0C, {0x5140}}, {0xFA0D, {0x55C0}}, {0xFA10, {0x585A}}, {0xFA12, {0x6674}}, {0xFA15, {0x51DE}}, {0xFA16, {0x732A}}, {0xFA17, {0x76CA}}, -{0xFA18, {0x793C}}, {0xFA19, {0x795E}}, {0xFA1A, {0x7965}}, {0xFA1B, {0x798F}}, {0xFA1C, {0x9756}}, {0xFA1D, {0x7CBE}}, {0xFA1E, {0x7FBD}}, {0xFA20, {0x8612}}, {0xFA22, {0x8AF8}}, {0xFA25, {0x9038}}, -{0xFA26, {0x90FD}}, {0xFA2A, {0x98EF}}, {0xFA2B, {0x98FC}}, {0xFA2C, {0x9928}}, {0xFA2D, {0x9DB4}}, {0xFA2E, {0x90DE}}, {0xFA2F, {0x96B7}}, {0xFA30, {0x4FAE}}, {0xFA31, {0x50E7}}, {0xFA32, {0x514D}}, -{0xFA33, {0x52C9}}, {0xFA34, {0x52E4}}, {0xFA35, {0x5351}}, {0xFA36, {0x559D}}, {0xFA37, {0x5606}}, {0xFA38, {0x5668}}, {0xFA39, {0x5840}}, {0xFA3A, {0x58A8}}, {0xFA3B, {0x5C64}}, {0xFA3C, {0x5C6E}}, -{0xFA3D, {0x6094}}, {0xFA3E, {0x6168}}, {0xFA3F, {0x618E}}, {0xFA40, {0x61F2}}, {0xFA41, {0x654F}}, {0xFA42, {0x65E2}}, {0xFA43, {0x6691}}, {0xFA44, {0x6885}}, {0xFA45, {0x6D77}}, {0xFA46, {0x6E1A}}, -{0xFA47, {0x6F22}}, {0xFA48, {0x716E}}, {0xFA49, {0x722B}}, {0xFA4A, {0x7422}}, {0xFA4B, {0x7891}}, {0xFA4C, {0x793E}}, {0xFA4D, {0x7949}}, {0xFA4E, {0x7948}}, {0xFA4F, {0x7950}}, {0xFA50, {0x7956}}, -{0xFA51, {0x795D}}, {0xFA52, {0x798D}}, {0xFA53, {0x798E}}, {0xFA54, {0x7A40}}, {0xFA55, {0x7A81}}, {0xFA56, {0x7BC0}}, {0xFA57, {0x7DF4}}, {0xFA58, {0x7E09}}, {0xFA59, {0x7E41}}, {0xFA5A, {0x7F72}}, -{0xFA5B, {0x8005}}, {0xFA5C, {0x81ED}}, {0xFA5D, {0x8279}}, {0xFA5E, {0x8279}}, {0xFA5F, {0x8457}}, {0xFA60, {0x8910}}, {0xFA61, {0x8996}}, {0xFA62, {0x8B01}}, {0xFA63, {0x8B39}}, {0xFA64, {0x8CD3}}, -{0xFA65, {0x8D08}}, {0xFA66, {0x8FB6}}, {0xFA67, {0x9038}}, {0xFA68, {0x96E3}}, {0xFA69, {0x97FF}}, {0xFA6A, {0x983B}}, {0xFA6B, {0x6075}}, {0xFA6C, {0x242EE}}, {0xFA6D, {0x8218}}, {0xFA70, {0x4E26}}, -{0xFA71, {0x51B5}}, {0xFA72, {0x5168}}, {0xFA73, {0x4F80}}, {0xFA74, {0x5145}}, {0xFA75, {0x5180}}, {0xFA76, {0x52C7}}, {0xFA77, {0x52FA}}, {0xFA78, {0x559D}}, {0xFA79, {0x5555}}, {0xFA7A, {0x5599}}, -{0xFA7B, {0x55E2}}, {0xFA7C, {0x585A}}, {0xFA7D, {0x58B3}}, {0xFA7E, {0x5944}}, {0xFA7F, {0x5954}}, {0xFA80, {0x5A62}}, {0xFA81, {0x5B28}}, {0xFA82, {0x5ED2}}, {0xFA83, {0x5ED9}}, {0xFA84, {0x5F69}}, -{0xFA85, {0x5FAD}}, {0xFA86, {0x60D8}}, {0xFA87, {0x614E}}, {0xFA88, {0x6108}}, {0xFA89, {0x618E}}, {0xFA8A, {0x6160}}, {0xFA8B, {0x61F2}}, {0xFA8C, {0x6234}}, {0xFA8D, {0x63C4}}, {0xFA8E, {0x641C}}, -{0xFA8F, {0x6452}}, {0xFA90, {0x6556}}, {0xFA91, {0x6674}}, {0xFA92, {0x6717}}, {0xFA93, {0x671B}}, {0xFA94, {0x6756}}, {0xFA95, {0x6B79}}, {0xFA96, {0x6BBA}}, {0xFA97, {0x6D41}}, {0xFA98, {0x6EDB}}, -{0xFA99, {0x6ECB}}, {0xFA9A, {0x6F22}}, {0xFA9B, {0x701E}}, {0xFA9C, {0x716E}}, {0xFA9D, {0x77A7}}, {0xFA9E, {0x7235}}, {0xFA9F, {0x72AF}}, {0xFAA0, {0x732A}}, {0xFAA1, {0x7471}}, {0xFAA2, {0x7506}}, -{0xFAA3, {0x753B}}, {0xFAA4, {0x761D}}, {0xFAA5, {0x761F}}, {0xFAA6, {0x76CA}}, {0xFAA7, {0x76DB}}, {0xFAA8, {0x76F4}}, {0xFAA9, {0x774A}}, {0xFAAA, {0x7740}}, {0xFAAB, {0x78CC}}, {0xFAAC, {0x7AB1}}, -{0xFAAD, {0x7BC0}}, {0xFAAE, {0x7C7B}}, {0xFAAF, {0x7D5B}}, {0xFAB0, {0x7DF4}}, {0xFAB1, {0x7F3E}}, {0xFAB2, {0x8005}}, {0xFAB3, {0x8352}}, {0xFAB4, {0x83EF}}, {0xFAB5, {0x8779}}, {0xFAB6, {0x8941}}, -{0xFAB7, {0x8986}}, {0xFAB8, {0x8996}}, {0xFAB9, {0x8ABF}}, {0xFABA, {0x8AF8}}, {0xFABB, {0x8ACB}}, {0xFABC, {0x8B01}}, {0xFABD, {0x8AFE}}, {0xFABE, {0x8AED}}, {0xFABF, {0x8B39}}, {0xFAC0, {0x8B8A}}, -{0xFAC1, {0x8D08}}, {0xFAC2, {0x8F38}}, {0xFAC3, {0x9072}}, {0xFAC4, {0x9199}}, {0xFAC5, {0x9276}}, {0xFAC6, {0x967C}}, {0xFAC7, {0x96E3}}, {0xFAC8, {0x9756}}, {0xFAC9, {0x97DB}}, {0xFACA, {0x97FF}}, -{0xFACB, {0x980B}}, {0xFACC, {0x983B}}, {0xFACD, {0x9B12}}, {0xFACE, {0x9F9C}}, {0xFACF, {0x2284A}}, {0xFAD0, {0x22844}}, {0xFAD1, {0x233D5}}, {0xFAD2, {0x3B9D}}, {0xFAD3, {0x4018}}, -{0xFAD4, {0x4039}}, {0xFAD5, {0x25249}}, {0xFAD6, {0x25CD0}}, {0xFAD7, {0x27ED3}}, {0xFAD8, {0x9F43}}, {0xFAD9, {0x9F8E}}, {0xFB1D, {0x5D9, 0x5B4}}, {0xFB1F, {0x5F2, 0x5B7}}, {0xFB2A, {0x5E9, 0x5C1}}, -{0xFB2B, {0x5E9, 0x5C2}}, {0xFB2C, {0x5E9, 0x5BC, 0x5C1}}, {0xFB2D, {0x5E9, 0x5BC, 0x5C2}}, {0xFB2E, {0x5D0, 0x5B7}}, {0xFB2F, {0x5D0, 0x5B8}}, {0xFB30, {0x5D0, 0x5BC}}, {0xFB31, {0x5D1, 0x5BC}}, -{0xFB32, {0x5D2, 0x5BC}}, {0xFB33, {0x5D3, 0x5BC}}, {0xFB34, {0x5D4, 0x5BC}}, {0xFB35, {0x5D5, 0x5BC}}, {0xFB36, {0x5D6, 0x5BC}}, {0xFB38, {0x5D8, 0x5BC}}, {0xFB39, {0x5D9, 0x5BC}}, -{0xFB3A, {0x5DA, 0x5BC}}, {0xFB3B, {0x5DB, 0x5BC}}, {0xFB3C, {0x5DC, 0x5BC}}, {0xFB3E, {0x5DE, 0x5BC}}, {0xFB40, {0x5E0, 0x5BC}}, {0xFB41, {0x5E1, 0x5BC}}, {0xFB43, {0x5E3, 0x5BC}}, -{0xFB44, {0x5E4, 0x5BC}}, {0xFB46, {0x5E6, 0x5BC}}, {0xFB47, {0x5E7, 0x5BC}}, {0xFB48, {0x5E8, 0x5BC}}, {0xFB49, {0x5E9, 0x5BC}}, {0xFB4A, {0x5EA, 0x5BC}}, {0xFB4B, {0x5D5, 0x5B9}}, -{0xFB4C, {0x5D1, 0x5BF}}, {0xFB4D, {0x5DB, 0x5BF}}, {0xFB4E, {0x5E4, 0x5BF}}, {0x1109A, {0x11099, 0x110BA}}, {0x1109C, {0x1109B, 0x110BA}}, {0x110AB, {0x110A5, 0x110BA}}, -{0x1112E, {0x11131, 0x11127}}, {0x1112F, {0x11132, 0x11127}}, {0x1134B, {0x11347, 0x1133E}}, {0x1134C, {0x11347, 0x11357}}, {0x114BB, {0x114B9, 0x114BA}}, {0x114BC, {0x114B9, 0x114B0}}, -{0x114BE, {0x114B9, 0x114BD}}, {0x115BA, {0x115B8, 0x115AF}}, {0x115BB, {0x115B9, 0x115AF}}, {0x1D15E, {0x1D157, 0x1D165}}, {0x1D15F, {0x1D158, 0x1D165}}, {0x1D160, {0x1D158, 0x1D165, 0x1D16E}}, -{0x1D161, {0x1D158, 0x1D165, 0x1D16F}}, {0x1D162, {0x1D158, 0x1D165, 0x1D170}}, {0x1D163, {0x1D158, 0x1D165, 0x1D171}}, {0x1D164, {0x1D158, 0x1D165, 0x1D172}}, {0x1D1BB, {0x1D1B9, 0x1D165}}, -{0x1D1BC, {0x1D1BA, 0x1D165}}, {0x1D1BD, {0x1D1B9, 0x1D165, 0x1D16E}}, {0x1D1BE, {0x1D1BA, 0x1D165, 0x1D16E}}, {0x1D1BF, {0x1D1B9, 0x1D165, 0x1D16F}}, {0x1D1C0, {0x1D1BA, 0x1D165, 0x1D16F}}, -{0x2F800, {0x4E3D}}, {0x2F801, {0x4E38}}, {0x2F802, {0x4E41}}, {0x2F803, {0x20122}}, {0x2F804, {0x4F60}}, {0x2F805, {0x4FAE}}, {0x2F806, {0x4FBB}}, {0x2F807, {0x5002}}, {0x2F808, {0x507A}}, -{0x2F809, {0x5099}}, {0x2F80A, {0x50E7}}, {0x2F80B, {0x50CF}}, {0x2F80C, {0x349E}}, {0x2F80D, {0x2063A}}, {0x2F80E, {0x514D}}, {0x2F80F, {0x5154}}, {0x2F810, {0x5164}}, {0x2F811, {0x5177}}, -{0x2F812, {0x2051C}}, {0x2F813, {0x34B9}}, {0x2F814, {0x5167}}, {0x2F815, {0x518D}}, {0x2F816, {0x2054B}}, {0x2F817, {0x5197}}, {0x2F818, {0x51A4}}, {0x2F819, {0x4ECC}}, {0x2F81A, {0x51AC}}, -{0x2F81B, {0x51B5}}, {0x2F81C, {0x291DF}}, {0x2F81D, {0x51F5}}, {0x2F81E, {0x5203}}, {0x2F81F, {0x34DF}}, {0x2F820, {0x523B}}, {0x2F821, {0x5246}}, {0x2F822, {0x5272}}, {0x2F823, {0x5277}}, -{0x2F824, {0x3515}}, {0x2F825, {0x52C7}}, {0x2F826, {0x52C9}}, {0x2F827, {0x52E4}}, {0x2F828, {0x52FA}}, {0x2F829, {0x5305}}, {0x2F82A, {0x5306}}, {0x2F82B, {0x5317}}, {0x2F82C, {0x5349}}, -{0x2F82D, {0x5351}}, {0x2F82E, {0x535A}}, {0x2F82F, {0x5373}}, {0x2F830, {0x537D}}, {0x2F831, {0x537F}}, {0x2F832, {0x537F}}, {0x2F833, {0x537F}}, {0x2F834, {0x20A2C}}, {0x2F835, {0x7070}}, -{0x2F836, {0x53CA}}, {0x2F837, {0x53DF}}, {0x2F838, {0x20B63}}, {0x2F839, {0x53EB}}, {0x2F83A, {0x53F1}}, {0x2F83B, {0x5406}}, {0x2F83C, {0x549E}}, {0x2F83D, {0x5438}}, {0x2F83E, {0x5448}}, -{0x2F83F, {0x5468}}, {0x2F840, {0x54A2}}, {0x2F841, {0x54F6}}, {0x2F842, {0x5510}}, {0x2F843, {0x5553}}, {0x2F844, {0x5563}}, {0x2F845, {0x5584}}, {0x2F846, {0x5584}}, {0x2F847, {0x5599}}, -{0x2F848, {0x55AB}}, {0x2F849, {0x55B3}}, {0x2F84A, {0x55C2}}, {0x2F84B, {0x5716}}, {0x2F84C, {0x5606}}, {0x2F84D, {0x5717}}, {0x2F84E, {0x5651}}, {0x2F84F, {0x5674}}, {0x2F850, {0x5207}}, -{0x2F851, {0x58EE}}, {0x2F852, {0x57CE}}, {0x2F853, {0x57F4}}, {0x2F854, {0x580D}}, {0x2F855, {0x578B}}, {0x2F856, {0x5832}}, {0x2F857, {0x5831}}, {0x2F858, {0x58AC}}, {0x2F859, {0x214E4}}, -{0x2F85A, {0x58F2}}, {0x2F85B, {0x58F7}}, {0x2F85C, {0x5906}}, {0x2F85D, {0x591A}}, {0x2F85E, {0x5922}}, {0x2F85F, {0x5962}}, {0x2F860, {0x216A8}}, {0x2F861, {0x216EA}}, {0x2F862, {0x59EC}}, -{0x2F863, {0x5A1B}}, {0x2F864, {0x5A27}}, {0x2F865, {0x59D8}}, {0x2F866, {0x5A66}}, {0x2F867, {0x36EE}}, {0x2F868, {0x36FC}}, {0x2F869, {0x5B08}}, {0x2F86A, {0x5B3E}}, {0x2F86B, {0x5B3E}}, -{0x2F86C, {0x219C8}}, {0x2F86D, {0x5BC3}}, {0x2F86E, {0x5BD8}}, {0x2F86F, {0x5BE7}}, {0x2F870, {0x5BF3}}, {0x2F871, {0x21B18}}, {0x2F872, {0x5BFF}}, {0x2F873, {0x5C06}}, {0x2F874, {0x5F53}}, -{0x2F875, {0x5C22}}, {0x2F876, {0x3781}}, {0x2F877, {0x5C60}}, {0x2F878, {0x5C6E}}, {0x2F879, {0x5CC0}}, {0x2F87A, {0x5C8D}}, {0x2F87B, {0x21DE4}}, {0x2F87C, {0x5D43}}, {0x2F87D, {0x21DE6}}, -{0x2F87E, {0x5D6E}}, {0x2F87F, {0x5D6B}}, {0x2F880, {0x5D7C}}, {0x2F881, {0x5DE1}}, {0x2F882, {0x5DE2}}, {0x2F883, {0x382F}}, {0x2F884, {0x5DFD}}, {0x2F885, {0x5E28}}, {0x2F886, {0x5E3D}}, -{0x2F887, {0x5E69}}, {0x2F888, {0x3862}}, {0x2F889, {0x22183}}, {0x2F88A, {0x387C}}, {0x2F88B, {0x5EB0}}, {0x2F88C, {0x5EB3}}, {0x2F88D, {0x5EB6}}, {0x2F88E, {0x5ECA}}, {0x2F88F, {0x2A392}}, -{0x2F890, {0x5EFE}}, {0x2F891, {0x22331}}, {0x2F892, {0x22331}}, {0x2F893, {0x8201}}, {0x2F894, {0x5F22}}, {0x2F895, {0x5F22}}, {0x2F896, {0x38C7}}, {0x2F897, {0x232B8}}, {0x2F898, {0x261DA}}, -{0x2F899, {0x5F62}}, {0x2F89A, {0x5F6B}}, {0x2F89B, {0x38E3}}, {0x2F89C, {0x5F9A}}, {0x2F89D, {0x5FCD}}, {0x2F89E, {0x5FD7}}, {0x2F89F, {0x5FF9}}, {0x2F8A0, {0x6081}}, {0x2F8A1, {0x393A}}, -{0x2F8A2, {0x391C}}, {0x2F8A3, {0x6094}}, {0x2F8A4, {0x226D4}}, {0x2F8A5, {0x60C7}}, {0x2F8A6, {0x6148}}, {0x2F8A7, {0x614C}}, {0x2F8A8, {0x614E}}, {0x2F8A9, {0x614C}}, {0x2F8AA, {0x617A}}, -{0x2F8AB, {0x618E}}, {0x2F8AC, {0x61B2}}, {0x2F8AD, {0x61A4}}, {0x2F8AE, {0x61AF}}, {0x2F8AF, {0x61DE}}, {0x2F8B0, {0x61F2}}, {0x2F8B1, {0x61F6}}, {0x2F8B2, {0x6210}}, {0x2F8B3, {0x621B}}, -{0x2F8B4, {0x625D}}, {0x2F8B5, {0x62B1}}, {0x2F8B6, {0x62D4}}, {0x2F8B7, {0x6350}}, {0x2F8B8, {0x22B0C}}, {0x2F8B9, {0x633D}}, {0x2F8BA, {0x62FC}}, {0x2F8BB, {0x6368}}, {0x2F8BC, {0x6383}}, -{0x2F8BD, {0x63E4}}, {0x2F8BE, {0x22BF1}}, {0x2F8BF, {0x6422}}, {0x2F8C0, {0x63C5}}, {0x2F8C1, {0x63A9}}, {0x2F8C2, {0x3A2E}}, {0x2F8C3, {0x6469}}, {0x2F8C4, {0x647E}}, {0x2F8C5, {0x649D}}, -{0x2F8C6, {0x6477}}, {0x2F8C7, {0x3A6C}}, {0x2F8C8, {0x654F}}, {0x2F8C9, {0x656C}}, {0x2F8CA, {0x2300A}}, {0x2F8CB, {0x65E3}}, {0x2F8CC, {0x66F8}}, {0x2F8CD, {0x6649}}, {0x2F8CE, {0x3B19}}, -{0x2F8CF, {0x6691}}, {0x2F8D0, {0x3B08}}, {0x2F8D1, {0x3AE4}}, {0x2F8D2, {0x5192}}, {0x2F8D3, {0x5195}}, {0x2F8D4, {0x6700}}, {0x2F8D5, {0x669C}}, {0x2F8D6, {0x80AD}}, {0x2F8D7, {0x43D9}}, -{0x2F8D8, {0x6717}}, {0x2F8D9, {0x671B}}, {0x2F8DA, {0x6721}}, {0x2F8DB, {0x675E}}, {0x2F8DC, {0x6753}}, {0x2F8DD, {0x233C3}}, {0x2F8DE, {0x3B49}}, {0x2F8DF, {0x67FA}}, {0x2F8E0, {0x6785}}, -{0x2F8E1, {0x6852}}, {0x2F8E2, {0x6885}}, {0x2F8E3, {0x2346D}}, {0x2F8E4, {0x688E}}, {0x2F8E5, {0x681F}}, {0x2F8E6, {0x6914}}, {0x2F8E7, {0x3B9D}}, {0x2F8E8, {0x6942}}, {0x2F8E9, {0x69A3}}, -{0x2F8EA, {0x69EA}}, {0x2F8EB, {0x6AA8}}, {0x2F8EC, {0x236A3}}, {0x2F8ED, {0x6ADB}}, {0x2F8EE, {0x3C18}}, {0x2F8EF, {0x6B21}}, {0x2F8F0, {0x238A7}}, {0x2F8F1, {0x6B54}}, {0x2F8F2, {0x3C4E}}, -{0x2F8F3, {0x6B72}}, {0x2F8F4, {0x6B9F}}, {0x2F8F5, {0x6BBA}}, {0x2F8F6, {0x6BBB}}, {0x2F8F7, {0x23A8D}}, {0x2F8F8, {0x21D0B}}, {0x2F8F9, {0x23AFA}}, {0x2F8FA, {0x6C4E}}, {0x2F8FB, {0x23CBC}}, -{0x2F8FC, {0x6CBF}}, {0x2F8FD, {0x6CCD}}, {0x2F8FE, {0x6C67}}, {0x2F8FF, {0x6D16}}, {0x2F900, {0x6D3E}}, {0x2F901, {0x6D77}}, {0x2F902, {0x6D41}}, {0x2F903, {0x6D69}}, {0x2F904, {0x6D78}}, -{0x2F905, {0x6D85}}, {0x2F906, {0x23D1E}}, {0x2F907, {0x6D34}}, {0x2F908, {0x6E2F}}, {0x2F909, {0x6E6E}}, {0x2F90A, {0x3D33}}, {0x2F90B, {0x6ECB}}, {0x2F90C, {0x6EC7}}, {0x2F90D, {0x23ED1}}, -{0x2F90E, {0x6DF9}}, {0x2F90F, {0x6F6E}}, {0x2F910, {0x23F5E}}, {0x2F911, {0x23F8E}}, {0x2F912, {0x6FC6}}, {0x2F913, {0x7039}}, {0x2F914, {0x701E}}, {0x2F915, {0x701B}}, {0x2F916, {0x3D96}}, -{0x2F917, {0x704A}}, {0x2F918, {0x707D}}, {0x2F919, {0x7077}}, {0x2F91A, {0x70AD}}, {0x2F91B, {0x20525}}, {0x2F91C, {0x7145}}, {0x2F91D, {0x24263}}, {0x2F91E, {0x719C}}, {0x2F91F, {0x243AB}}, -{0x2F920, {0x7228}}, {0x2F921, {0x7235}}, {0x2F922, {0x7250}}, {0x2F923, {0x24608}}, {0x2F924, {0x7280}}, {0x2F925, {0x7295}}, {0x2F926, {0x24735}}, {0x2F927, {0x24814}}, {0x2F928, {0x737A}}, -{0x2F929, {0x738B}}, {0x2F92A, {0x3EAC}}, {0x2F92B, {0x73A5}}, {0x2F92C, {0x3EB8}}, {0x2F92D, {0x3EB8}}, {0x2F92E, {0x7447}}, {0x2F92F, {0x745C}}, {0x2F930, {0x7471}}, {0x2F931, {0x7485}}, -{0x2F932, {0x74CA}}, {0x2F933, {0x3F1B}}, {0x2F934, {0x7524}}, {0x2F935, {0x24C36}}, {0x2F936, {0x753E}}, {0x2F937, {0x24C92}}, {0x2F938, {0x7570}}, {0x2F939, {0x2219F}}, {0x2F93A, {0x7610}}, -{0x2F93B, {0x24FA1}}, {0x2F93C, {0x24FB8}}, {0x2F93D, {0x25044}}, {0x2F93E, {0x3FFC}}, {0x2F93F, {0x4008}}, {0x2F940, {0x76F4}}, {0x2F941, {0x250F3}}, {0x2F942, {0x250F2}}, {0x2F943, {0x25119}}, -{0x2F944, {0x25133}}, {0x2F945, {0x771E}}, {0x2F946, {0x771F}}, {0x2F947, {0x771F}}, {0x2F948, {0x774A}}, {0x2F949, {0x4039}}, {0x2F94A, {0x778B}}, {0x2F94B, {0x4046}}, {0x2F94C, {0x4096}}, -{0x2F94D, {0x2541D}}, {0x2F94E, {0x784E}}, {0x2F94F, {0x788C}}, {0x2F950, {0x78CC}}, {0x2F951, {0x40E3}}, {0x2F952, {0x25626}}, {0x2F953, {0x7956}}, {0x2F954, {0x2569A}}, {0x2F955, {0x256C5}}, -{0x2F956, {0x798F}}, {0x2F957, {0x79EB}}, {0x2F958, {0x412F}}, {0x2F959, {0x7A40}}, {0x2F95A, {0x7A4A}}, {0x2F95B, {0x7A4F}}, {0x2F95C, {0x2597C}}, {0x2F95D, {0x25AA7}}, {0x2F95E, {0x25AA7}}, -{0x2F95F, {0x7AEE}}, {0x2F960, {0x4202}}, {0x2F961, {0x25BAB}}, {0x2F962, {0x7BC6}}, {0x2F963, {0x7BC9}}, {0x2F964, {0x4227}}, {0x2F965, {0x25C80}}, {0x2F966, {0x7CD2}}, {0x2F967, {0x42A0}}, -{0x2F968, {0x7CE8}}, {0x2F969, {0x7CE3}}, {0x2F96A, {0x7D00}}, {0x2F96B, {0x25F86}}, {0x2F96C, {0x7D63}}, {0x2F96D, {0x4301}}, {0x2F96E, {0x7DC7}}, {0x2F96F, {0x7E02}}, {0x2F970, {0x7E45}}, -{0x2F971, {0x4334}}, {0x2F972, {0x26228}}, {0x2F973, {0x26247}}, {0x2F974, {0x4359}}, {0x2F975, {0x262D9}}, {0x2F976, {0x7F7A}}, {0x2F977, {0x2633E}}, {0x2F978, {0x7F95}}, {0x2F979, {0x7FFA}}, -{0x2F97A, {0x8005}}, {0x2F97B, {0x264DA}}, {0x2F97C, {0x26523}}, {0x2F97D, {0x8060}}, {0x2F97E, {0x265A8}}, {0x2F97F, {0x8070}}, {0x2F980, {0x2335F}}, {0x2F981, {0x43D5}}, {0x2F982, {0x80B2}}, -{0x2F983, {0x8103}}, {0x2F984, {0x440B}}, {0x2F985, {0x813E}}, {0x2F986, {0x5AB5}}, {0x2F987, {0x267A7}}, {0x2F988, {0x267B5}}, {0x2F989, {0x23393}}, {0x2F98A, {0x2339C}}, {0x2F98B, {0x8201}}, -{0x2F98C, {0x8204}}, {0x2F98D, {0x8F9E}}, {0x2F98E, {0x446B}}, {0x2F98F, {0x8291}}, {0x2F990, {0x828B}}, {0x2F991, {0x829D}}, {0x2F992, {0x52B3}}, {0x2F993, {0x82B1}}, {0x2F994, {0x82B3}}, -{0x2F995, {0x82BD}}, {0x2F996, {0x82E6}}, {0x2F997, {0x26B3C}}, {0x2F998, {0x82E5}}, {0x2F999, {0x831D}}, {0x2F99A, {0x8363}}, {0x2F99B, {0x83AD}}, {0x2F99C, {0x8323}}, {0x2F99D, {0x83BD}}, -{0x2F99E, {0x83E7}}, {0x2F99F, {0x8457}}, {0x2F9A0, {0x8353}}, {0x2F9A1, {0x83CA}}, {0x2F9A2, {0x83CC}}, {0x2F9A3, {0x83DC}}, {0x2F9A4, {0x26C36}}, {0x2F9A5, {0x26D6B}}, {0x2F9A6, {0x26CD5}}, -{0x2F9A7, {0x452B}}, {0x2F9A8, {0x84F1}}, {0x2F9A9, {0x84F3}}, {0x2F9AA, {0x8516}}, {0x2F9AB, {0x273CA}}, {0x2F9AC, {0x8564}}, {0x2F9AD, {0x26F2C}}, {0x2F9AE, {0x455D}}, {0x2F9AF, {0x4561}}, -{0x2F9B0, {0x26FB1}}, {0x2F9B1, {0x270D2}}, {0x2F9B2, {0x456B}}, {0x2F9B3, {0x8650}}, {0x2F9B4, {0x865C}}, {0x2F9B5, {0x8667}}, {0x2F9B6, {0x8669}}, {0x2F9B7, {0x86A9}}, {0x2F9B8, {0x8688}}, -{0x2F9B9, {0x870E}}, {0x2F9BA, {0x86E2}}, {0x2F9BB, {0x8779}}, {0x2F9BC, {0x8728}}, {0x2F9BD, {0x876B}}, {0x2F9BE, {0x8786}}, {0x2F9BF, {0x45D7}}, {0x2F9C0, {0x87E1}}, {0x2F9C1, {0x8801}}, -{0x2F9C2, {0x45F9}}, {0x2F9C3, {0x8860}}, {0x2F9C4, {0x8863}}, {0x2F9C5, {0x27667}}, {0x2F9C6, {0x88D7}}, {0x2F9C7, {0x88DE}}, {0x2F9C8, {0x4635}}, {0x2F9C9, {0x88FA}}, {0x2F9CA, {0x34BB}}, -{0x2F9CB, {0x278AE}}, {0x2F9CC, {0x27966}}, {0x2F9CD, {0x46BE}}, {0x2F9CE, {0x46C7}}, {0x2F9CF, {0x8AA0}}, {0x2F9D0, {0x8AED}}, {0x2F9D1, {0x8B8A}}, {0x2F9D2, {0x8C55}}, {0x2F9D3, {0x27CA8}}, -{0x2F9D4, {0x8CAB}}, {0x2F9D5, {0x8CC1}}, {0x2F9D6, {0x8D1B}}, {0x2F9D7, {0x8D77}}, {0x2F9D8, {0x27F2F}}, {0x2F9D9, {0x20804}}, {0x2F9DA, {0x8DCB}}, {0x2F9DB, {0x8DBC}}, {0x2F9DC, {0x8DF0}}, -{0x2F9DD, {0x208DE}}, {0x2F9DE, {0x8ED4}}, {0x2F9DF, {0x8F38}}, {0x2F9E0, {0x285D2}}, {0x2F9E1, {0x285ED}}, {0x2F9E2, {0x9094}}, {0x2F9E3, {0x90F1}}, {0x2F9E4, {0x9111}}, {0x2F9E5, {0x2872E}}, -{0x2F9E6, {0x911B}}, {0x2F9E7, {0x9238}}, {0x2F9E8, {0x92D7}}, {0x2F9E9, {0x92D8}}, {0x2F9EA, {0x927C}}, {0x2F9EB, {0x93F9}}, {0x2F9EC, {0x9415}}, {0x2F9ED, {0x28BFA}}, {0x2F9EE, {0x958B}}, -{0x2F9EF, {0x4995}}, {0x2F9F0, {0x95B7}}, {0x2F9F1, {0x28D77}}, {0x2F9F2, {0x49E6}}, {0x2F9F3, {0x96C3}}, {0x2F9F4, {0x5DB2}}, {0x2F9F5, {0x9723}}, {0x2F9F6, {0x29145}}, {0x2F9F7, {0x2921A}}, -{0x2F9F8, {0x4A6E}}, {0x2F9F9, {0x4A76}}, {0x2F9FA, {0x97E0}}, {0x2F9FB, {0x2940A}}, {0x2F9FC, {0x4AB2}}, {0x2F9FD, {0x29496}}, {0x2F9FE, {0x980B}}, {0x2F9FF, {0x980B}}, {0x2FA00, {0x9829}}, -{0x2FA01, {0x295B6}}, {0x2FA02, {0x98E2}}, {0x2FA03, {0x4B33}}, {0x2FA04, {0x9929}}, {0x2FA05, {0x99A7}}, {0x2FA06, {0x99C2}}, {0x2FA07, {0x99FE}}, {0x2FA08, {0x4BCE}}, {0x2FA09, {0x29B30}}, -{0x2FA0A, {0x9B12}}, {0x2FA0B, {0x9C40}}, {0x2FA0C, {0x9CFD}}, {0x2FA0D, {0x4CCE}}, {0x2FA0E, {0x4CED}}, {0x2FA0F, {0x9D67}}, {0x2FA10, {0x2A0CE}}, {0x2FA11, {0x4CF8}}, {0x2FA12, {0x2A105}}, -{0x2FA13, {0x2A20E}}, {0x2FA14, {0x2A291}}, {0x2FA15, {0x9EBB}}, {0x2FA16, {0x4D56}}, {0x2FA17, {0x9EF9}}, {0x2FA18, {0x9EFE}}, {0x2FA19, {0x9F05}}, {0x2FA1A, {0x9F0F}}, {0x2FA1B, {0x9F16}}, -{0x2FA1D, {0x2A600}}, +static const std::multimap nfd_map = { +{0xC0, 0x41}, {0xC0, 0x300}, {0xC1, 0x41}, {0xC1, 0x301}, {0xC2, 0x41}, {0xC2, 0x302}, {0xC3, 0x41}, {0xC3, 0x303}, {0xC4, 0x41}, {0xC4, 0x308}, {0xC5, 0x41}, {0xC5, 0x30A}, {0xC7, 0x43}, +{0xC7, 0x327}, {0xC8, 0x45}, {0xC8, 0x300}, {0xC9, 0x45}, {0xC9, 0x301}, {0xCA, 0x45}, {0xCA, 0x302}, {0xCB, 0x45}, {0xCB, 0x308}, {0xCC, 0x49}, {0xCC, 0x300}, {0xCD, 0x49}, {0xCD, 0x301}, +{0xCE, 0x49}, {0xCE, 0x302}, {0xCF, 0x49}, {0xCF, 0x308}, {0xD1, 0x4E}, {0xD1, 0x303}, {0xD2, 0x4F}, {0xD2, 0x300}, {0xD3, 0x4F}, {0xD3, 0x301}, {0xD4, 0x4F}, {0xD4, 0x302}, {0xD5, 0x4F}, +{0xD5, 0x303}, {0xD6, 0x4F}, {0xD6, 0x308}, {0xD9, 0x55}, {0xD9, 0x300}, {0xDA, 0x55}, {0xDA, 0x301}, {0xDB, 0x55}, {0xDB, 0x302}, {0xDC, 0x55}, {0xDC, 0x308}, {0xDD, 0x59}, {0xDD, 0x301}, +{0xE0, 0x61}, {0xE0, 0x300}, {0xE1, 0x61}, {0xE1, 0x301}, {0xE2, 0x61}, {0xE2, 0x302}, {0xE3, 0x61}, {0xE3, 0x303}, {0xE4, 0x61}, {0xE4, 0x308}, {0xE5, 0x61}, {0xE5, 0x30A}, {0xE7, 0x63}, +{0xE7, 0x327}, {0xE8, 0x65}, {0xE8, 0x300}, {0xE9, 0x65}, {0xE9, 0x301}, {0xEA, 0x65}, {0xEA, 0x302}, {0xEB, 0x65}, {0xEB, 0x308}, {0xEC, 0x69}, {0xEC, 0x300}, {0xED, 0x69}, {0xED, 0x301}, +{0xEE, 0x69}, {0xEE, 0x302}, {0xEF, 0x69}, {0xEF, 0x308}, {0xF1, 0x6E}, {0xF1, 0x303}, {0xF2, 0x6F}, {0xF2, 0x300}, {0xF3, 0x6F}, {0xF3, 0x301}, {0xF4, 0x6F}, {0xF4, 0x302}, {0xF5, 0x6F}, +{0xF5, 0x303}, {0xF6, 0x6F}, {0xF6, 0x308}, {0xF9, 0x75}, {0xF9, 0x300}, {0xFA, 0x75}, {0xFA, 0x301}, {0xFB, 0x75}, {0xFB, 0x302}, {0xFC, 0x75}, {0xFC, 0x308}, {0xFD, 0x79}, {0xFD, 0x301}, +{0xFF, 0x79}, {0xFF, 0x308}, {0x100, 0x41}, {0x100, 0x304}, {0x101, 0x61}, {0x101, 0x304}, {0x102, 0x41}, {0x102, 0x306}, {0x103, 0x61}, {0x103, 0x306}, {0x104, 0x41}, {0x104, 0x328}, {0x105, 0x61}, +{0x105, 0x328}, {0x106, 0x43}, {0x106, 0x301}, {0x107, 0x63}, {0x107, 0x301}, {0x108, 0x43}, {0x108, 0x302}, {0x109, 0x63}, {0x109, 0x302}, {0x10A, 0x43}, {0x10A, 0x307}, {0x10B, 0x63}, +{0x10B, 0x307}, {0x10C, 0x43}, {0x10C, 0x30C}, {0x10D, 0x63}, {0x10D, 0x30C}, {0x10E, 0x44}, {0x10E, 0x30C}, {0x10F, 0x64}, {0x10F, 0x30C}, {0x112, 0x45}, {0x112, 0x304}, {0x113, 0x65}, +{0x113, 0x304}, {0x114, 0x45}, {0x114, 0x306}, {0x115, 0x65}, {0x115, 0x306}, {0x116, 0x45}, {0x116, 0x307}, {0x117, 0x65}, {0x117, 0x307}, {0x118, 0x45}, {0x118, 0x328}, {0x119, 0x65}, +{0x119, 0x328}, {0x11A, 0x45}, {0x11A, 0x30C}, {0x11B, 0x65}, {0x11B, 0x30C}, {0x11C, 0x47}, {0x11C, 0x302}, {0x11D, 0x67}, {0x11D, 0x302}, {0x11E, 0x47}, {0x11E, 0x306}, {0x11F, 0x67}, +{0x11F, 0x306}, {0x120, 0x47}, {0x120, 0x307}, {0x121, 0x67}, {0x121, 0x307}, {0x122, 0x47}, {0x122, 0x327}, {0x123, 0x67}, {0x123, 0x327}, {0x124, 0x48}, {0x124, 0x302}, {0x125, 0x68}, +{0x125, 0x302}, {0x128, 0x49}, {0x128, 0x303}, {0x129, 0x69}, {0x129, 0x303}, {0x12A, 0x49}, {0x12A, 0x304}, {0x12B, 0x69}, {0x12B, 0x304}, {0x12C, 0x49}, {0x12C, 0x306}, {0x12D, 0x69}, +{0x12D, 0x306}, {0x12E, 0x49}, {0x12E, 0x328}, {0x12F, 0x69}, {0x12F, 0x328}, {0x130, 0x49}, {0x130, 0x307}, {0x134, 0x4A}, {0x134, 0x302}, {0x135, 0x6A}, {0x135, 0x302}, {0x136, 0x4B}, +{0x136, 0x327}, {0x137, 0x6B}, {0x137, 0x327}, {0x139, 0x4C}, {0x139, 0x301}, {0x13A, 0x6C}, {0x13A, 0x301}, {0x13B, 0x4C}, {0x13B, 0x327}, {0x13C, 0x6C}, {0x13C, 0x327}, {0x13D, 0x4C}, +{0x13D, 0x30C}, {0x13E, 0x6C}, {0x13E, 0x30C}, {0x143, 0x4E}, {0x143, 0x301}, {0x144, 0x6E}, {0x144, 0x301}, {0x145, 0x4E}, {0x145, 0x327}, {0x146, 0x6E}, {0x146, 0x327}, {0x147, 0x4E}, +{0x147, 0x30C}, {0x148, 0x6E}, {0x148, 0x30C}, {0x14C, 0x4F}, {0x14C, 0x304}, {0x14D, 0x6F}, {0x14D, 0x304}, {0x14E, 0x4F}, {0x14E, 0x306}, {0x14F, 0x6F}, {0x14F, 0x306}, {0x150, 0x4F}, +{0x150, 0x30B}, {0x151, 0x6F}, {0x151, 0x30B}, {0x154, 0x52}, {0x154, 0x301}, {0x155, 0x72}, {0x155, 0x301}, {0x156, 0x52}, {0x156, 0x327}, {0x157, 0x72}, {0x157, 0x327}, {0x158, 0x52}, +{0x158, 0x30C}, {0x159, 0x72}, {0x159, 0x30C}, {0x15A, 0x53}, {0x15A, 0x301}, {0x15B, 0x73}, {0x15B, 0x301}, {0x15C, 0x53}, {0x15C, 0x302}, {0x15D, 0x73}, {0x15D, 0x302}, {0x15E, 0x53}, +{0x15E, 0x327}, {0x15F, 0x73}, {0x15F, 0x327}, {0x160, 0x53}, {0x160, 0x30C}, {0x161, 0x73}, {0x161, 0x30C}, {0x162, 0x54}, {0x162, 0x327}, {0x163, 0x74}, {0x163, 0x327}, {0x164, 0x54}, +{0x164, 0x30C}, {0x165, 0x74}, {0x165, 0x30C}, {0x168, 0x55}, {0x168, 0x303}, {0x169, 0x75}, {0x169, 0x303}, {0x16A, 0x55}, {0x16A, 0x304}, {0x16B, 0x75}, {0x16B, 0x304}, {0x16C, 0x55}, +{0x16C, 0x306}, {0x16D, 0x75}, {0x16D, 0x306}, {0x16E, 0x55}, {0x16E, 0x30A}, {0x16F, 0x75}, {0x16F, 0x30A}, {0x170, 0x55}, {0x170, 0x30B}, {0x171, 0x75}, {0x171, 0x30B}, {0x172, 0x55}, +{0x172, 0x328}, {0x173, 0x75}, {0x173, 0x328}, {0x174, 0x57}, {0x174, 0x302}, {0x175, 0x77}, {0x175, 0x302}, {0x176, 0x59}, {0x176, 0x302}, {0x177, 0x79}, {0x177, 0x302}, {0x178, 0x59}, +{0x178, 0x308}, {0x179, 0x5A}, {0x179, 0x301}, {0x17A, 0x7A}, {0x17A, 0x301}, {0x17B, 0x5A}, {0x17B, 0x307}, {0x17C, 0x7A}, {0x17C, 0x307}, {0x17D, 0x5A}, {0x17D, 0x30C}, {0x17E, 0x7A}, +{0x17E, 0x30C}, {0x1A0, 0x4F}, {0x1A0, 0x31B}, {0x1A1, 0x6F}, {0x1A1, 0x31B}, {0x1AF, 0x55}, {0x1AF, 0x31B}, {0x1B0, 0x75}, {0x1B0, 0x31B}, {0x1CD, 0x41}, {0x1CD, 0x30C}, {0x1CE, 0x61}, +{0x1CE, 0x30C}, {0x1CF, 0x49}, {0x1CF, 0x30C}, {0x1D0, 0x69}, {0x1D0, 0x30C}, {0x1D1, 0x4F}, {0x1D1, 0x30C}, {0x1D2, 0x6F}, {0x1D2, 0x30C}, {0x1D3, 0x55}, {0x1D3, 0x30C}, {0x1D4, 0x75}, +{0x1D4, 0x30C}, {0x1D5, 0x55}, {0x1D5, 0x308}, {0x1D5, 0x304}, {0x1D6, 0x75}, {0x1D6, 0x308}, {0x1D6, 0x304}, {0x1D7, 0x55}, {0x1D7, 0x308}, {0x1D7, 0x301}, {0x1D8, 0x75}, {0x1D8, 0x308}, +{0x1D8, 0x301}, {0x1D9, 0x55}, {0x1D9, 0x308}, {0x1D9, 0x30C}, {0x1DA, 0x75}, {0x1DA, 0x308}, {0x1DA, 0x30C}, {0x1DB, 0x55}, {0x1DB, 0x308}, {0x1DB, 0x300}, {0x1DC, 0x75}, {0x1DC, 0x308}, +{0x1DC, 0x300}, {0x1DE, 0x41}, {0x1DE, 0x308}, {0x1DE, 0x304}, {0x1DF, 0x61}, {0x1DF, 0x308}, {0x1DF, 0x304}, {0x1E0, 0x41}, {0x1E0, 0x307}, {0x1E0, 0x304}, {0x1E1, 0x61}, {0x1E1, 0x307}, +{0x1E1, 0x304}, {0x1E2, 0xC6}, {0x1E2, 0x304}, {0x1E3, 0xE6}, {0x1E3, 0x304}, {0x1E6, 0x47}, {0x1E6, 0x30C}, {0x1E7, 0x67}, {0x1E7, 0x30C}, {0x1E8, 0x4B}, {0x1E8, 0x30C}, {0x1E9, 0x6B}, +{0x1E9, 0x30C}, {0x1EA, 0x4F}, {0x1EA, 0x328}, {0x1EB, 0x6F}, {0x1EB, 0x328}, {0x1EC, 0x4F}, {0x1EC, 0x328}, {0x1EC, 0x304}, {0x1ED, 0x6F}, {0x1ED, 0x328}, {0x1ED, 0x304}, {0x1EE, 0x1B7}, +{0x1EE, 0x30C}, {0x1EF, 0x292}, {0x1EF, 0x30C}, {0x1F0, 0x6A}, {0x1F0, 0x30C}, {0x1F4, 0x47}, {0x1F4, 0x301}, {0x1F5, 0x67}, {0x1F5, 0x301}, {0x1F8, 0x4E}, {0x1F8, 0x300}, {0x1F9, 0x6E}, +{0x1F9, 0x300}, {0x1FA, 0x41}, {0x1FA, 0x30A}, {0x1FA, 0x301}, {0x1FB, 0x61}, {0x1FB, 0x30A}, {0x1FB, 0x301}, {0x1FC, 0xC6}, {0x1FC, 0x301}, {0x1FD, 0xE6}, {0x1FD, 0x301}, {0x1FE, 0xD8}, +{0x1FE, 0x301}, {0x1FF, 0xF8}, {0x1FF, 0x301}, {0x200, 0x41}, {0x200, 0x30F}, {0x201, 0x61}, {0x201, 0x30F}, {0x202, 0x41}, {0x202, 0x311}, {0x203, 0x61}, {0x203, 0x311}, {0x204, 0x45}, +{0x204, 0x30F}, {0x205, 0x65}, {0x205, 0x30F}, {0x206, 0x45}, {0x206, 0x311}, {0x207, 0x65}, {0x207, 0x311}, {0x208, 0x49}, {0x208, 0x30F}, {0x209, 0x69}, {0x209, 0x30F}, {0x20A, 0x49}, +{0x20A, 0x311}, {0x20B, 0x69}, {0x20B, 0x311}, {0x20C, 0x4F}, {0x20C, 0x30F}, {0x20D, 0x6F}, {0x20D, 0x30F}, {0x20E, 0x4F}, {0x20E, 0x311}, {0x20F, 0x6F}, {0x20F, 0x311}, {0x210, 0x52}, +{0x210, 0x30F}, {0x211, 0x72}, {0x211, 0x30F}, {0x212, 0x52}, {0x212, 0x311}, {0x213, 0x72}, {0x213, 0x311}, {0x214, 0x55}, {0x214, 0x30F}, {0x215, 0x75}, {0x215, 0x30F}, {0x216, 0x55}, +{0x216, 0x311}, {0x217, 0x75}, {0x217, 0x311}, {0x218, 0x53}, {0x218, 0x326}, {0x219, 0x73}, {0x219, 0x326}, {0x21A, 0x54}, {0x21A, 0x326}, {0x21B, 0x74}, {0x21B, 0x326}, {0x21E, 0x48}, +{0x21E, 0x30C}, {0x21F, 0x68}, {0x21F, 0x30C}, {0x226, 0x41}, {0x226, 0x307}, {0x227, 0x61}, {0x227, 0x307}, {0x228, 0x45}, {0x228, 0x327}, {0x229, 0x65}, {0x229, 0x327}, {0x22A, 0x4F}, +{0x22A, 0x308}, {0x22A, 0x304}, {0x22B, 0x6F}, {0x22B, 0x308}, {0x22B, 0x304}, {0x22C, 0x4F}, {0x22C, 0x303}, {0x22C, 0x304}, {0x22D, 0x6F}, {0x22D, 0x303}, {0x22D, 0x304}, {0x22E, 0x4F}, +{0x22E, 0x307}, {0x22F, 0x6F}, {0x22F, 0x307}, {0x230, 0x4F}, {0x230, 0x307}, {0x230, 0x304}, {0x231, 0x6F}, {0x231, 0x307}, {0x231, 0x304}, {0x232, 0x59}, {0x232, 0x304}, {0x233, 0x79}, +{0x233, 0x304}, {0x340, 0x300}, {0x341, 0x301}, {0x343, 0x313}, {0x344, 0x308}, {0x344, 0x301}, {0x374, 0x2B9}, {0x37E, 0x3B}, {0x385, 0xA8}, {0x385, 0x301}, {0x386, 0x391}, {0x386, 0x301}, +{0x387, 0xB7}, {0x388, 0x395}, {0x388, 0x301}, {0x389, 0x397}, {0x389, 0x301}, {0x38A, 0x399}, {0x38A, 0x301}, {0x38C, 0x39F}, {0x38C, 0x301}, {0x38E, 0x3A5}, {0x38E, 0x301}, {0x38F, 0x3A9}, +{0x38F, 0x301}, {0x390, 0x3B9}, {0x390, 0x308}, {0x390, 0x301}, {0x3AA, 0x399}, {0x3AA, 0x308}, {0x3AB, 0x3A5}, {0x3AB, 0x308}, {0x3AC, 0x3B1}, {0x3AC, 0x301}, {0x3AD, 0x3B5}, {0x3AD, 0x301}, +{0x3AE, 0x3B7}, {0x3AE, 0x301}, {0x3AF, 0x3B9}, {0x3AF, 0x301}, {0x3B0, 0x3C5}, {0x3B0, 0x308}, {0x3B0, 0x301}, {0x3CA, 0x3B9}, {0x3CA, 0x308}, {0x3CB, 0x3C5}, {0x3CB, 0x308}, {0x3CC, 0x3BF}, +{0x3CC, 0x301}, {0x3CD, 0x3C5}, {0x3CD, 0x301}, {0x3CE, 0x3C9}, {0x3CE, 0x301}, {0x3D3, 0x3D2}, {0x3D3, 0x301}, {0x3D4, 0x3D2}, {0x3D4, 0x308}, {0x400, 0x415}, {0x400, 0x300}, {0x401, 0x415}, +{0x401, 0x308}, {0x403, 0x413}, {0x403, 0x301}, {0x407, 0x406}, {0x407, 0x308}, {0x40C, 0x41A}, {0x40C, 0x301}, {0x40D, 0x418}, {0x40D, 0x300}, {0x40E, 0x423}, {0x40E, 0x306}, {0x419, 0x418}, +{0x419, 0x306}, {0x439, 0x438}, {0x439, 0x306}, {0x450, 0x435}, {0x450, 0x300}, {0x451, 0x435}, {0x451, 0x308}, {0x453, 0x433}, {0x453, 0x301}, {0x457, 0x456}, {0x457, 0x308}, {0x45C, 0x43A}, +{0x45C, 0x301}, {0x45D, 0x438}, {0x45D, 0x300}, {0x45E, 0x443}, {0x45E, 0x306}, {0x476, 0x474}, {0x476, 0x30F}, {0x477, 0x475}, {0x477, 0x30F}, {0x4C1, 0x416}, {0x4C1, 0x306}, {0x4C2, 0x436}, +{0x4C2, 0x306}, {0x4D0, 0x410}, {0x4D0, 0x306}, {0x4D1, 0x430}, {0x4D1, 0x306}, {0x4D2, 0x410}, {0x4D2, 0x308}, {0x4D3, 0x430}, {0x4D3, 0x308}, {0x4D6, 0x415}, {0x4D6, 0x306}, {0x4D7, 0x435}, +{0x4D7, 0x306}, {0x4DA, 0x4D8}, {0x4DA, 0x308}, {0x4DB, 0x4D9}, {0x4DB, 0x308}, {0x4DC, 0x416}, {0x4DC, 0x308}, {0x4DD, 0x436}, {0x4DD, 0x308}, {0x4DE, 0x417}, {0x4DE, 0x308}, {0x4DF, 0x437}, +{0x4DF, 0x308}, {0x4E2, 0x418}, {0x4E2, 0x304}, {0x4E3, 0x438}, {0x4E3, 0x304}, {0x4E4, 0x418}, {0x4E4, 0x308}, {0x4E5, 0x438}, {0x4E5, 0x308}, {0x4E6, 0x41E}, {0x4E6, 0x308}, {0x4E7, 0x43E}, +{0x4E7, 0x308}, {0x4EA, 0x4E8}, {0x4EA, 0x308}, {0x4EB, 0x4E9}, {0x4EB, 0x308}, {0x4EC, 0x42D}, {0x4EC, 0x308}, {0x4ED, 0x44D}, {0x4ED, 0x308}, {0x4EE, 0x423}, {0x4EE, 0x304}, {0x4EF, 0x443}, +{0x4EF, 0x304}, {0x4F0, 0x423}, {0x4F0, 0x308}, {0x4F1, 0x443}, {0x4F1, 0x308}, {0x4F2, 0x423}, {0x4F2, 0x30B}, {0x4F3, 0x443}, {0x4F3, 0x30B}, {0x4F4, 0x427}, {0x4F4, 0x308}, {0x4F5, 0x447}, +{0x4F5, 0x308}, {0x4F8, 0x42B}, {0x4F8, 0x308}, {0x4F9, 0x44B}, {0x4F9, 0x308}, {0x622, 0x627}, {0x622, 0x653}, {0x623, 0x627}, {0x623, 0x654}, {0x624, 0x648}, {0x624, 0x654}, {0x625, 0x627}, +{0x625, 0x655}, {0x626, 0x64A}, {0x626, 0x654}, {0x6C0, 0x6D5}, {0x6C0, 0x654}, {0x6C2, 0x6C1}, {0x6C2, 0x654}, {0x6D3, 0x6D2}, {0x6D3, 0x654}, {0x929, 0x928}, {0x929, 0x93C}, {0x931, 0x930}, +{0x931, 0x93C}, {0x934, 0x933}, {0x934, 0x93C}, {0x958, 0x915}, {0x958, 0x93C}, {0x959, 0x916}, {0x959, 0x93C}, {0x95A, 0x917}, {0x95A, 0x93C}, {0x95B, 0x91C}, {0x95B, 0x93C}, {0x95C, 0x921}, +{0x95C, 0x93C}, {0x95D, 0x922}, {0x95D, 0x93C}, {0x95E, 0x92B}, {0x95E, 0x93C}, {0x95F, 0x92F}, {0x95F, 0x93C}, {0x9CB, 0x9C7}, {0x9CB, 0x9BE}, {0x9CC, 0x9C7}, {0x9CC, 0x9D7}, {0x9DC, 0x9A1}, +{0x9DC, 0x9BC}, {0x9DD, 0x9A2}, {0x9DD, 0x9BC}, {0x9DF, 0x9AF}, {0x9DF, 0x9BC}, {0xA33, 0xA32}, {0xA33, 0xA3C}, {0xA36, 0xA38}, {0xA36, 0xA3C}, {0xA59, 0xA16}, {0xA59, 0xA3C}, {0xA5A, 0xA17}, +{0xA5A, 0xA3C}, {0xA5B, 0xA1C}, {0xA5B, 0xA3C}, {0xA5E, 0xA2B}, {0xA5E, 0xA3C}, {0xB48, 0xB47}, {0xB48, 0xB56}, {0xB4B, 0xB47}, {0xB4B, 0xB3E}, {0xB4C, 0xB47}, {0xB4C, 0xB57}, {0xB5C, 0xB21}, +{0xB5C, 0xB3C}, {0xB5D, 0xB22}, {0xB5D, 0xB3C}, {0xB94, 0xB92}, {0xB94, 0xBD7}, {0xBCA, 0xBC6}, {0xBCA, 0xBBE}, {0xBCB, 0xBC7}, {0xBCB, 0xBBE}, {0xBCC, 0xBC6}, {0xBCC, 0xBD7}, {0xC48, 0xC46}, +{0xC48, 0xC56}, {0xCC0, 0xCBF}, {0xCC0, 0xCD5}, {0xCC7, 0xCC6}, {0xCC7, 0xCD5}, {0xCC8, 0xCC6}, {0xCC8, 0xCD6}, {0xCCA, 0xCC6}, {0xCCA, 0xCC2}, {0xCCB, 0xCC6}, {0xCCB, 0xCC2}, {0xCCB, 0xCD5}, +{0xD4A, 0xD46}, {0xD4A, 0xD3E}, {0xD4B, 0xD47}, {0xD4B, 0xD3E}, {0xD4C, 0xD46}, {0xD4C, 0xD57}, {0xDDA, 0xDD9}, {0xDDA, 0xDCA}, {0xDDC, 0xDD9}, {0xDDC, 0xDCF}, {0xDDD, 0xDD9}, {0xDDD, 0xDCF}, +{0xDDD, 0xDCA}, {0xDDE, 0xDD9}, {0xDDE, 0xDDF}, {0xF43, 0xF42}, {0xF43, 0xFB7}, {0xF4D, 0xF4C}, {0xF4D, 0xFB7}, {0xF52, 0xF51}, {0xF52, 0xFB7}, {0xF57, 0xF56}, {0xF57, 0xFB7}, {0xF5C, 0xF5B}, +{0xF5C, 0xFB7}, {0xF69, 0xF40}, {0xF69, 0xFB5}, {0xF73, 0xF71}, {0xF73, 0xF72}, {0xF75, 0xF71}, {0xF75, 0xF74}, {0xF76, 0xFB2}, {0xF76, 0xF80}, {0xF78, 0xFB3}, {0xF78, 0xF80}, {0xF81, 0xF71}, +{0xF81, 0xF80}, {0xF93, 0xF92}, {0xF93, 0xFB7}, {0xF9D, 0xF9C}, {0xF9D, 0xFB7}, {0xFA2, 0xFA1}, {0xFA2, 0xFB7}, {0xFA7, 0xFA6}, {0xFA7, 0xFB7}, {0xFAC, 0xFAB}, {0xFAC, 0xFB7}, {0xFB9, 0xF90}, +{0xFB9, 0xFB5}, {0x1026, 0x1025}, {0x1026, 0x102E}, {0x1B06, 0x1B05}, {0x1B06, 0x1B35}, {0x1B08, 0x1B07}, {0x1B08, 0x1B35}, {0x1B0A, 0x1B09}, {0x1B0A, 0x1B35}, {0x1B0C, 0x1B0B}, {0x1B0C, 0x1B35}, +{0x1B0E, 0x1B0D}, {0x1B0E, 0x1B35}, {0x1B12, 0x1B11}, {0x1B12, 0x1B35}, {0x1B3B, 0x1B3A}, {0x1B3B, 0x1B35}, {0x1B3D, 0x1B3C}, {0x1B3D, 0x1B35}, {0x1B40, 0x1B3E}, {0x1B40, 0x1B35}, {0x1B41, 0x1B3F}, +{0x1B41, 0x1B35}, {0x1B43, 0x1B42}, {0x1B43, 0x1B35}, {0x1E00, 0x41}, {0x1E00, 0x325}, {0x1E01, 0x61}, {0x1E01, 0x325}, {0x1E02, 0x42}, {0x1E02, 0x307}, {0x1E03, 0x62}, {0x1E03, 0x307}, +{0x1E04, 0x42}, {0x1E04, 0x323}, {0x1E05, 0x62}, {0x1E05, 0x323}, {0x1E06, 0x42}, {0x1E06, 0x331}, {0x1E07, 0x62}, {0x1E07, 0x331}, {0x1E08, 0x43}, {0x1E08, 0x327}, {0x1E08, 0x301}, {0x1E09, 0x63}, +{0x1E09, 0x327}, {0x1E09, 0x301}, {0x1E0A, 0x44}, {0x1E0A, 0x307}, {0x1E0B, 0x64}, {0x1E0B, 0x307}, {0x1E0C, 0x44}, {0x1E0C, 0x323}, {0x1E0D, 0x64}, {0x1E0D, 0x323}, {0x1E0E, 0x44}, {0x1E0E, 0x331}, +{0x1E0F, 0x64}, {0x1E0F, 0x331}, {0x1E10, 0x44}, {0x1E10, 0x327}, {0x1E11, 0x64}, {0x1E11, 0x327}, {0x1E12, 0x44}, {0x1E12, 0x32D}, {0x1E13, 0x64}, {0x1E13, 0x32D}, {0x1E14, 0x45}, {0x1E14, 0x304}, +{0x1E14, 0x300}, {0x1E15, 0x65}, {0x1E15, 0x304}, {0x1E15, 0x300}, {0x1E16, 0x45}, {0x1E16, 0x304}, {0x1E16, 0x301}, {0x1E17, 0x65}, {0x1E17, 0x304}, {0x1E17, 0x301}, {0x1E18, 0x45}, {0x1E18, 0x32D}, +{0x1E19, 0x65}, {0x1E19, 0x32D}, {0x1E1A, 0x45}, {0x1E1A, 0x330}, {0x1E1B, 0x65}, {0x1E1B, 0x330}, {0x1E1C, 0x45}, {0x1E1C, 0x327}, {0x1E1C, 0x306}, {0x1E1D, 0x65}, {0x1E1D, 0x327}, {0x1E1D, 0x306}, +{0x1E1E, 0x46}, {0x1E1E, 0x307}, {0x1E1F, 0x66}, {0x1E1F, 0x307}, {0x1E20, 0x47}, {0x1E20, 0x304}, {0x1E21, 0x67}, {0x1E21, 0x304}, {0x1E22, 0x48}, {0x1E22, 0x307}, {0x1E23, 0x68}, {0x1E23, 0x307}, +{0x1E24, 0x48}, {0x1E24, 0x323}, {0x1E25, 0x68}, {0x1E25, 0x323}, {0x1E26, 0x48}, {0x1E26, 0x308}, {0x1E27, 0x68}, {0x1E27, 0x308}, {0x1E28, 0x48}, {0x1E28, 0x327}, {0x1E29, 0x68}, {0x1E29, 0x327}, +{0x1E2A, 0x48}, {0x1E2A, 0x32E}, {0x1E2B, 0x68}, {0x1E2B, 0x32E}, {0x1E2C, 0x49}, {0x1E2C, 0x330}, {0x1E2D, 0x69}, {0x1E2D, 0x330}, {0x1E2E, 0x49}, {0x1E2E, 0x308}, {0x1E2E, 0x301}, {0x1E2F, 0x69}, +{0x1E2F, 0x308}, {0x1E2F, 0x301}, {0x1E30, 0x4B}, {0x1E30, 0x301}, {0x1E31, 0x6B}, {0x1E31, 0x301}, {0x1E32, 0x4B}, {0x1E32, 0x323}, {0x1E33, 0x6B}, {0x1E33, 0x323}, {0x1E34, 0x4B}, {0x1E34, 0x331}, +{0x1E35, 0x6B}, {0x1E35, 0x331}, {0x1E36, 0x4C}, {0x1E36, 0x323}, {0x1E37, 0x6C}, {0x1E37, 0x323}, {0x1E38, 0x4C}, {0x1E38, 0x323}, {0x1E38, 0x304}, {0x1E39, 0x6C}, {0x1E39, 0x323}, {0x1E39, 0x304}, +{0x1E3A, 0x4C}, {0x1E3A, 0x331}, {0x1E3B, 0x6C}, {0x1E3B, 0x331}, {0x1E3C, 0x4C}, {0x1E3C, 0x32D}, {0x1E3D, 0x6C}, {0x1E3D, 0x32D}, {0x1E3E, 0x4D}, {0x1E3E, 0x301}, {0x1E3F, 0x6D}, {0x1E3F, 0x301}, +{0x1E40, 0x4D}, {0x1E40, 0x307}, {0x1E41, 0x6D}, {0x1E41, 0x307}, {0x1E42, 0x4D}, {0x1E42, 0x323}, {0x1E43, 0x6D}, {0x1E43, 0x323}, {0x1E44, 0x4E}, {0x1E44, 0x307}, {0x1E45, 0x6E}, {0x1E45, 0x307}, +{0x1E46, 0x4E}, {0x1E46, 0x323}, {0x1E47, 0x6E}, {0x1E47, 0x323}, {0x1E48, 0x4E}, {0x1E48, 0x331}, {0x1E49, 0x6E}, {0x1E49, 0x331}, {0x1E4A, 0x4E}, {0x1E4A, 0x32D}, {0x1E4B, 0x6E}, {0x1E4B, 0x32D}, +{0x1E4C, 0x4F}, {0x1E4C, 0x303}, {0x1E4C, 0x301}, {0x1E4D, 0x6F}, {0x1E4D, 0x303}, {0x1E4D, 0x301}, {0x1E4E, 0x4F}, {0x1E4E, 0x303}, {0x1E4E, 0x308}, {0x1E4F, 0x6F}, {0x1E4F, 0x303}, {0x1E4F, 0x308}, +{0x1E50, 0x4F}, {0x1E50, 0x304}, {0x1E50, 0x300}, {0x1E51, 0x6F}, {0x1E51, 0x304}, {0x1E51, 0x300}, {0x1E52, 0x4F}, {0x1E52, 0x304}, {0x1E52, 0x301}, {0x1E53, 0x6F}, {0x1E53, 0x304}, {0x1E53, 0x301}, +{0x1E54, 0x50}, {0x1E54, 0x301}, {0x1E55, 0x70}, {0x1E55, 0x301}, {0x1E56, 0x50}, {0x1E56, 0x307}, {0x1E57, 0x70}, {0x1E57, 0x307}, {0x1E58, 0x52}, {0x1E58, 0x307}, {0x1E59, 0x72}, {0x1E59, 0x307}, +{0x1E5A, 0x52}, {0x1E5A, 0x323}, {0x1E5B, 0x72}, {0x1E5B, 0x323}, {0x1E5C, 0x52}, {0x1E5C, 0x323}, {0x1E5C, 0x304}, {0x1E5D, 0x72}, {0x1E5D, 0x323}, {0x1E5D, 0x304}, {0x1E5E, 0x52}, {0x1E5E, 0x331}, +{0x1E5F, 0x72}, {0x1E5F, 0x331}, {0x1E60, 0x53}, {0x1E60, 0x307}, {0x1E61, 0x73}, {0x1E61, 0x307}, {0x1E62, 0x53}, {0x1E62, 0x323}, {0x1E63, 0x73}, {0x1E63, 0x323}, {0x1E64, 0x53}, {0x1E64, 0x301}, +{0x1E64, 0x307}, {0x1E65, 0x73}, {0x1E65, 0x301}, {0x1E65, 0x307}, {0x1E66, 0x53}, {0x1E66, 0x30C}, {0x1E66, 0x307}, {0x1E67, 0x73}, {0x1E67, 0x30C}, {0x1E67, 0x307}, {0x1E68, 0x53}, {0x1E68, 0x323}, +{0x1E68, 0x307}, {0x1E69, 0x73}, {0x1E69, 0x323}, {0x1E69, 0x307}, {0x1E6A, 0x54}, {0x1E6A, 0x307}, {0x1E6B, 0x74}, {0x1E6B, 0x307}, {0x1E6C, 0x54}, {0x1E6C, 0x323}, {0x1E6D, 0x74}, {0x1E6D, 0x323}, +{0x1E6E, 0x54}, {0x1E6E, 0x331}, {0x1E6F, 0x74}, {0x1E6F, 0x331}, {0x1E70, 0x54}, {0x1E70, 0x32D}, {0x1E71, 0x74}, {0x1E71, 0x32D}, {0x1E72, 0x55}, {0x1E72, 0x324}, {0x1E73, 0x75}, {0x1E73, 0x324}, +{0x1E74, 0x55}, {0x1E74, 0x330}, {0x1E75, 0x75}, {0x1E75, 0x330}, {0x1E76, 0x55}, {0x1E76, 0x32D}, {0x1E77, 0x75}, {0x1E77, 0x32D}, {0x1E78, 0x55}, {0x1E78, 0x303}, {0x1E78, 0x301}, {0x1E79, 0x75}, +{0x1E79, 0x303}, {0x1E79, 0x301}, {0x1E7A, 0x55}, {0x1E7A, 0x304}, {0x1E7A, 0x308}, {0x1E7B, 0x75}, {0x1E7B, 0x304}, {0x1E7B, 0x308}, {0x1E7C, 0x56}, {0x1E7C, 0x303}, {0x1E7D, 0x76}, {0x1E7D, 0x303}, +{0x1E7E, 0x56}, {0x1E7E, 0x323}, {0x1E7F, 0x76}, {0x1E7F, 0x323}, {0x1E80, 0x57}, {0x1E80, 0x300}, {0x1E81, 0x77}, {0x1E81, 0x300}, {0x1E82, 0x57}, {0x1E82, 0x301}, {0x1E83, 0x77}, {0x1E83, 0x301}, +{0x1E84, 0x57}, {0x1E84, 0x308}, {0x1E85, 0x77}, {0x1E85, 0x308}, {0x1E86, 0x57}, {0x1E86, 0x307}, {0x1E87, 0x77}, {0x1E87, 0x307}, {0x1E88, 0x57}, {0x1E88, 0x323}, {0x1E89, 0x77}, {0x1E89, 0x323}, +{0x1E8A, 0x58}, {0x1E8A, 0x307}, {0x1E8B, 0x78}, {0x1E8B, 0x307}, {0x1E8C, 0x58}, {0x1E8C, 0x308}, {0x1E8D, 0x78}, {0x1E8D, 0x308}, {0x1E8E, 0x59}, {0x1E8E, 0x307}, {0x1E8F, 0x79}, {0x1E8F, 0x307}, +{0x1E90, 0x5A}, {0x1E90, 0x302}, {0x1E91, 0x7A}, {0x1E91, 0x302}, {0x1E92, 0x5A}, {0x1E92, 0x323}, {0x1E93, 0x7A}, {0x1E93, 0x323}, {0x1E94, 0x5A}, {0x1E94, 0x331}, {0x1E95, 0x7A}, {0x1E95, 0x331}, +{0x1E96, 0x68}, {0x1E96, 0x331}, {0x1E97, 0x74}, {0x1E97, 0x308}, {0x1E98, 0x77}, {0x1E98, 0x30A}, {0x1E99, 0x79}, {0x1E99, 0x30A}, {0x1E9B, 0x17F}, {0x1E9B, 0x307}, {0x1EA0, 0x41}, {0x1EA0, 0x323}, +{0x1EA1, 0x61}, {0x1EA1, 0x323}, {0x1EA2, 0x41}, {0x1EA2, 0x309}, {0x1EA3, 0x61}, {0x1EA3, 0x309}, {0x1EA4, 0x41}, {0x1EA4, 0x302}, {0x1EA4, 0x301}, {0x1EA5, 0x61}, {0x1EA5, 0x302}, {0x1EA5, 0x301}, +{0x1EA6, 0x41}, {0x1EA6, 0x302}, {0x1EA6, 0x300}, {0x1EA7, 0x61}, {0x1EA7, 0x302}, {0x1EA7, 0x300}, {0x1EA8, 0x41}, {0x1EA8, 0x302}, {0x1EA8, 0x309}, {0x1EA9, 0x61}, {0x1EA9, 0x302}, {0x1EA9, 0x309}, +{0x1EAA, 0x41}, {0x1EAA, 0x302}, {0x1EAA, 0x303}, {0x1EAB, 0x61}, {0x1EAB, 0x302}, {0x1EAB, 0x303}, {0x1EAC, 0x41}, {0x1EAC, 0x323}, {0x1EAC, 0x302}, {0x1EAD, 0x61}, {0x1EAD, 0x323}, {0x1EAD, 0x302}, +{0x1EAE, 0x41}, {0x1EAE, 0x306}, {0x1EAE, 0x301}, {0x1EAF, 0x61}, {0x1EAF, 0x306}, {0x1EAF, 0x301}, {0x1EB0, 0x41}, {0x1EB0, 0x306}, {0x1EB0, 0x300}, {0x1EB1, 0x61}, {0x1EB1, 0x306}, {0x1EB1, 0x300}, +{0x1EB2, 0x41}, {0x1EB2, 0x306}, {0x1EB2, 0x309}, {0x1EB3, 0x61}, {0x1EB3, 0x306}, {0x1EB3, 0x309}, {0x1EB4, 0x41}, {0x1EB4, 0x306}, {0x1EB4, 0x303}, {0x1EB5, 0x61}, {0x1EB5, 0x306}, {0x1EB5, 0x303}, +{0x1EB6, 0x41}, {0x1EB6, 0x323}, {0x1EB6, 0x306}, {0x1EB7, 0x61}, {0x1EB7, 0x323}, {0x1EB7, 0x306}, {0x1EB8, 0x45}, {0x1EB8, 0x323}, {0x1EB9, 0x65}, {0x1EB9, 0x323}, {0x1EBA, 0x45}, {0x1EBA, 0x309}, +{0x1EBB, 0x65}, {0x1EBB, 0x309}, {0x1EBC, 0x45}, {0x1EBC, 0x303}, {0x1EBD, 0x65}, {0x1EBD, 0x303}, {0x1EBE, 0x45}, {0x1EBE, 0x302}, {0x1EBE, 0x301}, {0x1EBF, 0x65}, {0x1EBF, 0x302}, {0x1EBF, 0x301}, +{0x1EC0, 0x45}, {0x1EC0, 0x302}, {0x1EC0, 0x300}, {0x1EC1, 0x65}, {0x1EC1, 0x302}, {0x1EC1, 0x300}, {0x1EC2, 0x45}, {0x1EC2, 0x302}, {0x1EC2, 0x309}, {0x1EC3, 0x65}, {0x1EC3, 0x302}, {0x1EC3, 0x309}, +{0x1EC4, 0x45}, {0x1EC4, 0x302}, {0x1EC4, 0x303}, {0x1EC5, 0x65}, {0x1EC5, 0x302}, {0x1EC5, 0x303}, {0x1EC6, 0x45}, {0x1EC6, 0x323}, {0x1EC6, 0x302}, {0x1EC7, 0x65}, {0x1EC7, 0x323}, {0x1EC7, 0x302}, +{0x1EC8, 0x49}, {0x1EC8, 0x309}, {0x1EC9, 0x69}, {0x1EC9, 0x309}, {0x1ECA, 0x49}, {0x1ECA, 0x323}, {0x1ECB, 0x69}, {0x1ECB, 0x323}, {0x1ECC, 0x4F}, {0x1ECC, 0x323}, {0x1ECD, 0x6F}, {0x1ECD, 0x323}, +{0x1ECE, 0x4F}, {0x1ECE, 0x309}, {0x1ECF, 0x6F}, {0x1ECF, 0x309}, {0x1ED0, 0x4F}, {0x1ED0, 0x302}, {0x1ED0, 0x301}, {0x1ED1, 0x6F}, {0x1ED1, 0x302}, {0x1ED1, 0x301}, {0x1ED2, 0x4F}, {0x1ED2, 0x302}, +{0x1ED2, 0x300}, {0x1ED3, 0x6F}, {0x1ED3, 0x302}, {0x1ED3, 0x300}, {0x1ED4, 0x4F}, {0x1ED4, 0x302}, {0x1ED4, 0x309}, {0x1ED5, 0x6F}, {0x1ED5, 0x302}, {0x1ED5, 0x309}, {0x1ED6, 0x4F}, {0x1ED6, 0x302}, +{0x1ED6, 0x303}, {0x1ED7, 0x6F}, {0x1ED7, 0x302}, {0x1ED7, 0x303}, {0x1ED8, 0x4F}, {0x1ED8, 0x323}, {0x1ED8, 0x302}, {0x1ED9, 0x6F}, {0x1ED9, 0x323}, {0x1ED9, 0x302}, {0x1EDA, 0x4F}, {0x1EDA, 0x31B}, +{0x1EDA, 0x301}, {0x1EDB, 0x6F}, {0x1EDB, 0x31B}, {0x1EDB, 0x301}, {0x1EDC, 0x4F}, {0x1EDC, 0x31B}, {0x1EDC, 0x300}, {0x1EDD, 0x6F}, {0x1EDD, 0x31B}, {0x1EDD, 0x300}, {0x1EDE, 0x4F}, {0x1EDE, 0x31B}, +{0x1EDE, 0x309}, {0x1EDF, 0x6F}, {0x1EDF, 0x31B}, {0x1EDF, 0x309}, {0x1EE0, 0x4F}, {0x1EE0, 0x31B}, {0x1EE0, 0x303}, {0x1EE1, 0x6F}, {0x1EE1, 0x31B}, {0x1EE1, 0x303}, {0x1EE2, 0x4F}, {0x1EE2, 0x31B}, +{0x1EE2, 0x323}, {0x1EE3, 0x6F}, {0x1EE3, 0x31B}, {0x1EE3, 0x323}, {0x1EE4, 0x55}, {0x1EE4, 0x323}, {0x1EE5, 0x75}, {0x1EE5, 0x323}, {0x1EE6, 0x55}, {0x1EE6, 0x309}, {0x1EE7, 0x75}, {0x1EE7, 0x309}, +{0x1EE8, 0x55}, {0x1EE8, 0x31B}, {0x1EE8, 0x301}, {0x1EE9, 0x75}, {0x1EE9, 0x31B}, {0x1EE9, 0x301}, {0x1EEA, 0x55}, {0x1EEA, 0x31B}, {0x1EEA, 0x300}, {0x1EEB, 0x75}, {0x1EEB, 0x31B}, {0x1EEB, 0x300}, +{0x1EEC, 0x55}, {0x1EEC, 0x31B}, {0x1EEC, 0x309}, {0x1EED, 0x75}, {0x1EED, 0x31B}, {0x1EED, 0x309}, {0x1EEE, 0x55}, {0x1EEE, 0x31B}, {0x1EEE, 0x303}, {0x1EEF, 0x75}, {0x1EEF, 0x31B}, {0x1EEF, 0x303}, +{0x1EF0, 0x55}, {0x1EF0, 0x31B}, {0x1EF0, 0x323}, {0x1EF1, 0x75}, {0x1EF1, 0x31B}, {0x1EF1, 0x323}, {0x1EF2, 0x59}, {0x1EF2, 0x300}, {0x1EF3, 0x79}, {0x1EF3, 0x300}, {0x1EF4, 0x59}, {0x1EF4, 0x323}, +{0x1EF5, 0x79}, {0x1EF5, 0x323}, {0x1EF6, 0x59}, {0x1EF6, 0x309}, {0x1EF7, 0x79}, {0x1EF7, 0x309}, {0x1EF8, 0x59}, {0x1EF8, 0x303}, {0x1EF9, 0x79}, {0x1EF9, 0x303}, {0x1F00, 0x3B1}, {0x1F00, 0x313}, +{0x1F01, 0x3B1}, {0x1F01, 0x314}, {0x1F02, 0x3B1}, {0x1F02, 0x313}, {0x1F02, 0x300}, {0x1F03, 0x3B1}, {0x1F03, 0x314}, {0x1F03, 0x300}, {0x1F04, 0x3B1}, {0x1F04, 0x313}, {0x1F04, 0x301}, +{0x1F05, 0x3B1}, {0x1F05, 0x314}, {0x1F05, 0x301}, {0x1F06, 0x3B1}, {0x1F06, 0x313}, {0x1F06, 0x342}, {0x1F07, 0x3B1}, {0x1F07, 0x314}, {0x1F07, 0x342}, {0x1F08, 0x391}, {0x1F08, 0x313}, +{0x1F09, 0x391}, {0x1F09, 0x314}, {0x1F0A, 0x391}, {0x1F0A, 0x313}, {0x1F0A, 0x300}, {0x1F0B, 0x391}, {0x1F0B, 0x314}, {0x1F0B, 0x300}, {0x1F0C, 0x391}, {0x1F0C, 0x313}, {0x1F0C, 0x301}, +{0x1F0D, 0x391}, {0x1F0D, 0x314}, {0x1F0D, 0x301}, {0x1F0E, 0x391}, {0x1F0E, 0x313}, {0x1F0E, 0x342}, {0x1F0F, 0x391}, {0x1F0F, 0x314}, {0x1F0F, 0x342}, {0x1F10, 0x3B5}, {0x1F10, 0x313}, +{0x1F11, 0x3B5}, {0x1F11, 0x314}, {0x1F12, 0x3B5}, {0x1F12, 0x313}, {0x1F12, 0x300}, {0x1F13, 0x3B5}, {0x1F13, 0x314}, {0x1F13, 0x300}, {0x1F14, 0x3B5}, {0x1F14, 0x313}, {0x1F14, 0x301}, +{0x1F15, 0x3B5}, {0x1F15, 0x314}, {0x1F15, 0x301}, {0x1F18, 0x395}, {0x1F18, 0x313}, {0x1F19, 0x395}, {0x1F19, 0x314}, {0x1F1A, 0x395}, {0x1F1A, 0x313}, {0x1F1A, 0x300}, {0x1F1B, 0x395}, +{0x1F1B, 0x314}, {0x1F1B, 0x300}, {0x1F1C, 0x395}, {0x1F1C, 0x313}, {0x1F1C, 0x301}, {0x1F1D, 0x395}, {0x1F1D, 0x314}, {0x1F1D, 0x301}, {0x1F20, 0x3B7}, {0x1F20, 0x313}, {0x1F21, 0x3B7}, +{0x1F21, 0x314}, {0x1F22, 0x3B7}, {0x1F22, 0x313}, {0x1F22, 0x300}, {0x1F23, 0x3B7}, {0x1F23, 0x314}, {0x1F23, 0x300}, {0x1F24, 0x3B7}, {0x1F24, 0x313}, {0x1F24, 0x301}, {0x1F25, 0x3B7}, +{0x1F25, 0x314}, {0x1F25, 0x301}, {0x1F26, 0x3B7}, {0x1F26, 0x313}, {0x1F26, 0x342}, {0x1F27, 0x3B7}, {0x1F27, 0x314}, {0x1F27, 0x342}, {0x1F28, 0x397}, {0x1F28, 0x313}, {0x1F29, 0x397}, +{0x1F29, 0x314}, {0x1F2A, 0x397}, {0x1F2A, 0x313}, {0x1F2A, 0x300}, {0x1F2B, 0x397}, {0x1F2B, 0x314}, {0x1F2B, 0x300}, {0x1F2C, 0x397}, {0x1F2C, 0x313}, {0x1F2C, 0x301}, {0x1F2D, 0x397}, +{0x1F2D, 0x314}, {0x1F2D, 0x301}, {0x1F2E, 0x397}, {0x1F2E, 0x313}, {0x1F2E, 0x342}, {0x1F2F, 0x397}, {0x1F2F, 0x314}, {0x1F2F, 0x342}, {0x1F30, 0x3B9}, {0x1F30, 0x313}, {0x1F31, 0x3B9}, +{0x1F31, 0x314}, {0x1F32, 0x3B9}, {0x1F32, 0x313}, {0x1F32, 0x300}, {0x1F33, 0x3B9}, {0x1F33, 0x314}, {0x1F33, 0x300}, {0x1F34, 0x3B9}, {0x1F34, 0x313}, {0x1F34, 0x301}, {0x1F35, 0x3B9}, +{0x1F35, 0x314}, {0x1F35, 0x301}, {0x1F36, 0x3B9}, {0x1F36, 0x313}, {0x1F36, 0x342}, {0x1F37, 0x3B9}, {0x1F37, 0x314}, {0x1F37, 0x342}, {0x1F38, 0x399}, {0x1F38, 0x313}, {0x1F39, 0x399}, +{0x1F39, 0x314}, {0x1F3A, 0x399}, {0x1F3A, 0x313}, {0x1F3A, 0x300}, {0x1F3B, 0x399}, {0x1F3B, 0x314}, {0x1F3B, 0x300}, {0x1F3C, 0x399}, {0x1F3C, 0x313}, {0x1F3C, 0x301}, {0x1F3D, 0x399}, +{0x1F3D, 0x314}, {0x1F3D, 0x301}, {0x1F3E, 0x399}, {0x1F3E, 0x313}, {0x1F3E, 0x342}, {0x1F3F, 0x399}, {0x1F3F, 0x314}, {0x1F3F, 0x342}, {0x1F40, 0x3BF}, {0x1F40, 0x313}, {0x1F41, 0x3BF}, +{0x1F41, 0x314}, {0x1F42, 0x3BF}, {0x1F42, 0x313}, {0x1F42, 0x300}, {0x1F43, 0x3BF}, {0x1F43, 0x314}, {0x1F43, 0x300}, {0x1F44, 0x3BF}, {0x1F44, 0x313}, {0x1F44, 0x301}, {0x1F45, 0x3BF}, +{0x1F45, 0x314}, {0x1F45, 0x301}, {0x1F48, 0x39F}, {0x1F48, 0x313}, {0x1F49, 0x39F}, {0x1F49, 0x314}, {0x1F4A, 0x39F}, {0x1F4A, 0x313}, {0x1F4A, 0x300}, {0x1F4B, 0x39F}, {0x1F4B, 0x314}, +{0x1F4B, 0x300}, {0x1F4C, 0x39F}, {0x1F4C, 0x313}, {0x1F4C, 0x301}, {0x1F4D, 0x39F}, {0x1F4D, 0x314}, {0x1F4D, 0x301}, {0x1F50, 0x3C5}, {0x1F50, 0x313}, {0x1F51, 0x3C5}, {0x1F51, 0x314}, +{0x1F52, 0x3C5}, {0x1F52, 0x313}, {0x1F52, 0x300}, {0x1F53, 0x3C5}, {0x1F53, 0x314}, {0x1F53, 0x300}, {0x1F54, 0x3C5}, {0x1F54, 0x313}, {0x1F54, 0x301}, {0x1F55, 0x3C5}, {0x1F55, 0x314}, +{0x1F55, 0x301}, {0x1F56, 0x3C5}, {0x1F56, 0x313}, {0x1F56, 0x342}, {0x1F57, 0x3C5}, {0x1F57, 0x314}, {0x1F57, 0x342}, {0x1F59, 0x3A5}, {0x1F59, 0x314}, {0x1F5B, 0x3A5}, {0x1F5B, 0x314}, +{0x1F5B, 0x300}, {0x1F5D, 0x3A5}, {0x1F5D, 0x314}, {0x1F5D, 0x301}, {0x1F5F, 0x3A5}, {0x1F5F, 0x314}, {0x1F5F, 0x342}, {0x1F60, 0x3C9}, {0x1F60, 0x313}, {0x1F61, 0x3C9}, {0x1F61, 0x314}, +{0x1F62, 0x3C9}, {0x1F62, 0x313}, {0x1F62, 0x300}, {0x1F63, 0x3C9}, {0x1F63, 0x314}, {0x1F63, 0x300}, {0x1F64, 0x3C9}, {0x1F64, 0x313}, {0x1F64, 0x301}, {0x1F65, 0x3C9}, {0x1F65, 0x314}, +{0x1F65, 0x301}, {0x1F66, 0x3C9}, {0x1F66, 0x313}, {0x1F66, 0x342}, {0x1F67, 0x3C9}, {0x1F67, 0x314}, {0x1F67, 0x342}, {0x1F68, 0x3A9}, {0x1F68, 0x313}, {0x1F69, 0x3A9}, {0x1F69, 0x314}, +{0x1F6A, 0x3A9}, {0x1F6A, 0x313}, {0x1F6A, 0x300}, {0x1F6B, 0x3A9}, {0x1F6B, 0x314}, {0x1F6B, 0x300}, {0x1F6C, 0x3A9}, {0x1F6C, 0x313}, {0x1F6C, 0x301}, {0x1F6D, 0x3A9}, {0x1F6D, 0x314}, +{0x1F6D, 0x301}, {0x1F6E, 0x3A9}, {0x1F6E, 0x313}, {0x1F6E, 0x342}, {0x1F6F, 0x3A9}, {0x1F6F, 0x314}, {0x1F6F, 0x342}, {0x1F70, 0x3B1}, {0x1F70, 0x300}, {0x1F71, 0x3B1}, {0x1F71, 0x301}, +{0x1F72, 0x3B5}, {0x1F72, 0x300}, {0x1F73, 0x3B5}, {0x1F73, 0x301}, {0x1F74, 0x3B7}, {0x1F74, 0x300}, {0x1F75, 0x3B7}, {0x1F75, 0x301}, {0x1F76, 0x3B9}, {0x1F76, 0x300}, {0x1F77, 0x3B9}, +{0x1F77, 0x301}, {0x1F78, 0x3BF}, {0x1F78, 0x300}, {0x1F79, 0x3BF}, {0x1F79, 0x301}, {0x1F7A, 0x3C5}, {0x1F7A, 0x300}, {0x1F7B, 0x3C5}, {0x1F7B, 0x301}, {0x1F7C, 0x3C9}, {0x1F7C, 0x300}, +{0x1F7D, 0x3C9}, {0x1F7D, 0x301}, {0x1F80, 0x3B1}, {0x1F80, 0x313}, {0x1F80, 0x345}, {0x1F81, 0x3B1}, {0x1F81, 0x314}, {0x1F81, 0x345}, {0x1F82, 0x3B1}, {0x1F82, 0x313}, {0x1F82, 0x300}, +{0x1F82, 0x345}, {0x1F83, 0x3B1}, {0x1F83, 0x314}, {0x1F83, 0x300}, {0x1F83, 0x345}, {0x1F84, 0x3B1}, {0x1F84, 0x313}, {0x1F84, 0x301}, {0x1F84, 0x345}, {0x1F85, 0x3B1}, {0x1F85, 0x314}, +{0x1F85, 0x301}, {0x1F85, 0x345}, {0x1F86, 0x3B1}, {0x1F86, 0x313}, {0x1F86, 0x342}, {0x1F86, 0x345}, {0x1F87, 0x3B1}, {0x1F87, 0x314}, {0x1F87, 0x342}, {0x1F87, 0x345}, {0x1F88, 0x391}, +{0x1F88, 0x313}, {0x1F88, 0x345}, {0x1F89, 0x391}, {0x1F89, 0x314}, {0x1F89, 0x345}, {0x1F8A, 0x391}, {0x1F8A, 0x313}, {0x1F8A, 0x300}, {0x1F8A, 0x345}, {0x1F8B, 0x391}, {0x1F8B, 0x314}, +{0x1F8B, 0x300}, {0x1F8B, 0x345}, {0x1F8C, 0x391}, {0x1F8C, 0x313}, {0x1F8C, 0x301}, {0x1F8C, 0x345}, {0x1F8D, 0x391}, {0x1F8D, 0x314}, {0x1F8D, 0x301}, {0x1F8D, 0x345}, {0x1F8E, 0x391}, +{0x1F8E, 0x313}, {0x1F8E, 0x342}, {0x1F8E, 0x345}, {0x1F8F, 0x391}, {0x1F8F, 0x314}, {0x1F8F, 0x342}, {0x1F8F, 0x345}, {0x1F90, 0x3B7}, {0x1F90, 0x313}, {0x1F90, 0x345}, {0x1F91, 0x3B7}, +{0x1F91, 0x314}, {0x1F91, 0x345}, {0x1F92, 0x3B7}, {0x1F92, 0x313}, {0x1F92, 0x300}, {0x1F92, 0x345}, {0x1F93, 0x3B7}, {0x1F93, 0x314}, {0x1F93, 0x300}, {0x1F93, 0x345}, {0x1F94, 0x3B7}, +{0x1F94, 0x313}, {0x1F94, 0x301}, {0x1F94, 0x345}, {0x1F95, 0x3B7}, {0x1F95, 0x314}, {0x1F95, 0x301}, {0x1F95, 0x345}, {0x1F96, 0x3B7}, {0x1F96, 0x313}, {0x1F96, 0x342}, {0x1F96, 0x345}, +{0x1F97, 0x3B7}, {0x1F97, 0x314}, {0x1F97, 0x342}, {0x1F97, 0x345}, {0x1F98, 0x397}, {0x1F98, 0x313}, {0x1F98, 0x345}, {0x1F99, 0x397}, {0x1F99, 0x314}, {0x1F99, 0x345}, {0x1F9A, 0x397}, +{0x1F9A, 0x313}, {0x1F9A, 0x300}, {0x1F9A, 0x345}, {0x1F9B, 0x397}, {0x1F9B, 0x314}, {0x1F9B, 0x300}, {0x1F9B, 0x345}, {0x1F9C, 0x397}, {0x1F9C, 0x313}, {0x1F9C, 0x301}, {0x1F9C, 0x345}, +{0x1F9D, 0x397}, {0x1F9D, 0x314}, {0x1F9D, 0x301}, {0x1F9D, 0x345}, {0x1F9E, 0x397}, {0x1F9E, 0x313}, {0x1F9E, 0x342}, {0x1F9E, 0x345}, {0x1F9F, 0x397}, {0x1F9F, 0x314}, {0x1F9F, 0x342}, +{0x1F9F, 0x345}, {0x1FA0, 0x3C9}, {0x1FA0, 0x313}, {0x1FA0, 0x345}, {0x1FA1, 0x3C9}, {0x1FA1, 0x314}, {0x1FA1, 0x345}, {0x1FA2, 0x3C9}, {0x1FA2, 0x313}, {0x1FA2, 0x300}, {0x1FA2, 0x345}, +{0x1FA3, 0x3C9}, {0x1FA3, 0x314}, {0x1FA3, 0x300}, {0x1FA3, 0x345}, {0x1FA4, 0x3C9}, {0x1FA4, 0x313}, {0x1FA4, 0x301}, {0x1FA4, 0x345}, {0x1FA5, 0x3C9}, {0x1FA5, 0x314}, {0x1FA5, 0x301}, +{0x1FA5, 0x345}, {0x1FA6, 0x3C9}, {0x1FA6, 0x313}, {0x1FA6, 0x342}, {0x1FA6, 0x345}, {0x1FA7, 0x3C9}, {0x1FA7, 0x314}, {0x1FA7, 0x342}, {0x1FA7, 0x345}, {0x1FA8, 0x3A9}, {0x1FA8, 0x313}, +{0x1FA8, 0x345}, {0x1FA9, 0x3A9}, {0x1FA9, 0x314}, {0x1FA9, 0x345}, {0x1FAA, 0x3A9}, {0x1FAA, 0x313}, {0x1FAA, 0x300}, {0x1FAA, 0x345}, {0x1FAB, 0x3A9}, {0x1FAB, 0x314}, {0x1FAB, 0x300}, +{0x1FAB, 0x345}, {0x1FAC, 0x3A9}, {0x1FAC, 0x313}, {0x1FAC, 0x301}, {0x1FAC, 0x345}, {0x1FAD, 0x3A9}, {0x1FAD, 0x314}, {0x1FAD, 0x301}, {0x1FAD, 0x345}, {0x1FAE, 0x3A9}, {0x1FAE, 0x313}, +{0x1FAE, 0x342}, {0x1FAE, 0x345}, {0x1FAF, 0x3A9}, {0x1FAF, 0x314}, {0x1FAF, 0x342}, {0x1FAF, 0x345}, {0x1FB0, 0x3B1}, {0x1FB0, 0x306}, {0x1FB1, 0x3B1}, {0x1FB1, 0x304}, {0x1FB2, 0x3B1}, +{0x1FB2, 0x300}, {0x1FB2, 0x345}, {0x1FB3, 0x3B1}, {0x1FB3, 0x345}, {0x1FB4, 0x3B1}, {0x1FB4, 0x301}, {0x1FB4, 0x345}, {0x1FB6, 0x3B1}, {0x1FB6, 0x342}, {0x1FB7, 0x3B1}, {0x1FB7, 0x342}, +{0x1FB7, 0x345}, {0x1FB8, 0x391}, {0x1FB8, 0x306}, {0x1FB9, 0x391}, {0x1FB9, 0x304}, {0x1FBA, 0x391}, {0x1FBA, 0x300}, {0x1FBB, 0x391}, {0x1FBB, 0x301}, {0x1FBC, 0x391}, {0x1FBC, 0x345}, +{0x1FBE, 0x3B9}, {0x1FC1, 0xA8}, {0x1FC1, 0x342}, {0x1FC2, 0x3B7}, {0x1FC2, 0x300}, {0x1FC2, 0x345}, {0x1FC3, 0x3B7}, {0x1FC3, 0x345}, {0x1FC4, 0x3B7}, {0x1FC4, 0x301}, {0x1FC4, 0x345}, +{0x1FC6, 0x3B7}, {0x1FC6, 0x342}, {0x1FC7, 0x3B7}, {0x1FC7, 0x342}, {0x1FC7, 0x345}, {0x1FC8, 0x395}, {0x1FC8, 0x300}, {0x1FC9, 0x395}, {0x1FC9, 0x301}, {0x1FCA, 0x397}, {0x1FCA, 0x300}, +{0x1FCB, 0x397}, {0x1FCB, 0x301}, {0x1FCC, 0x397}, {0x1FCC, 0x345}, {0x1FCD, 0x1FBF}, {0x1FCD, 0x300}, {0x1FCE, 0x1FBF}, {0x1FCE, 0x301}, {0x1FCF, 0x1FBF}, {0x1FCF, 0x342}, {0x1FD0, 0x3B9}, +{0x1FD0, 0x306}, {0x1FD1, 0x3B9}, {0x1FD1, 0x304}, {0x1FD2, 0x3B9}, {0x1FD2, 0x308}, {0x1FD2, 0x300}, {0x1FD3, 0x3B9}, {0x1FD3, 0x308}, {0x1FD3, 0x301}, {0x1FD6, 0x3B9}, {0x1FD6, 0x342}, +{0x1FD7, 0x3B9}, {0x1FD7, 0x308}, {0x1FD7, 0x342}, {0x1FD8, 0x399}, {0x1FD8, 0x306}, {0x1FD9, 0x399}, {0x1FD9, 0x304}, {0x1FDA, 0x399}, {0x1FDA, 0x300}, {0x1FDB, 0x399}, {0x1FDB, 0x301}, +{0x1FDD, 0x1FFE}, {0x1FDD, 0x300}, {0x1FDE, 0x1FFE}, {0x1FDE, 0x301}, {0x1FDF, 0x1FFE}, {0x1FDF, 0x342}, {0x1FE0, 0x3C5}, {0x1FE0, 0x306}, {0x1FE1, 0x3C5}, {0x1FE1, 0x304}, {0x1FE2, 0x3C5}, +{0x1FE2, 0x308}, {0x1FE2, 0x300}, {0x1FE3, 0x3C5}, {0x1FE3, 0x308}, {0x1FE3, 0x301}, {0x1FE4, 0x3C1}, {0x1FE4, 0x313}, {0x1FE5, 0x3C1}, {0x1FE5, 0x314}, {0x1FE6, 0x3C5}, {0x1FE6, 0x342}, +{0x1FE7, 0x3C5}, {0x1FE7, 0x308}, {0x1FE7, 0x342}, {0x1FE8, 0x3A5}, {0x1FE8, 0x306}, {0x1FE9, 0x3A5}, {0x1FE9, 0x304}, {0x1FEA, 0x3A5}, {0x1FEA, 0x300}, {0x1FEB, 0x3A5}, {0x1FEB, 0x301}, +{0x1FEC, 0x3A1}, {0x1FEC, 0x314}, {0x1FED, 0xA8}, {0x1FED, 0x300}, {0x1FEE, 0xA8}, {0x1FEE, 0x301}, {0x1FEF, 0x60}, {0x1FF2, 0x3C9}, {0x1FF2, 0x300}, {0x1FF2, 0x345}, {0x1FF3, 0x3C9}, {0x1FF3, 0x345}, +{0x1FF4, 0x3C9}, {0x1FF4, 0x301}, {0x1FF4, 0x345}, {0x1FF6, 0x3C9}, {0x1FF6, 0x342}, {0x1FF7, 0x3C9}, {0x1FF7, 0x342}, {0x1FF7, 0x345}, {0x1FF8, 0x39F}, {0x1FF8, 0x300}, {0x1FF9, 0x39F}, +{0x1FF9, 0x301}, {0x1FFA, 0x3A9}, {0x1FFA, 0x300}, {0x1FFB, 0x3A9}, {0x1FFB, 0x301}, {0x1FFC, 0x3A9}, {0x1FFC, 0x345}, {0x1FFD, 0xB4}, {0x2000, 0x2002}, {0x2001, 0x2003}, {0x2126, 0x3A9}, +{0x212A, 0x4B}, {0x212B, 0x41}, {0x212B, 0x30A}, {0x219A, 0x2190}, {0x219A, 0x338}, {0x219B, 0x2192}, {0x219B, 0x338}, {0x21AE, 0x2194}, {0x21AE, 0x338}, {0x21CD, 0x21D0}, {0x21CD, 0x338}, +{0x21CE, 0x21D4}, {0x21CE, 0x338}, {0x21CF, 0x21D2}, {0x21CF, 0x338}, {0x2204, 0x2203}, {0x2204, 0x338}, {0x2209, 0x2208}, {0x2209, 0x338}, {0x220C, 0x220B}, {0x220C, 0x338}, {0x2224, 0x2223}, +{0x2224, 0x338}, {0x2226, 0x2225}, {0x2226, 0x338}, {0x2241, 0x223C}, {0x2241, 0x338}, {0x2244, 0x2243}, {0x2244, 0x338}, {0x2247, 0x2245}, {0x2247, 0x338}, {0x2249, 0x2248}, {0x2249, 0x338}, +{0x2260, 0x3D}, {0x2260, 0x338}, {0x2262, 0x2261}, {0x2262, 0x338}, {0x226D, 0x224D}, {0x226D, 0x338}, {0x226E, 0x3C}, {0x226E, 0x338}, {0x226F, 0x3E}, {0x226F, 0x338}, {0x2270, 0x2264}, +{0x2270, 0x338}, {0x2271, 0x2265}, {0x2271, 0x338}, {0x2274, 0x2272}, {0x2274, 0x338}, {0x2275, 0x2273}, {0x2275, 0x338}, {0x2278, 0x2276}, {0x2278, 0x338}, {0x2279, 0x2277}, {0x2279, 0x338}, +{0x2280, 0x227A}, {0x2280, 0x338}, {0x2281, 0x227B}, {0x2281, 0x338}, {0x2284, 0x2282}, {0x2284, 0x338}, {0x2285, 0x2283}, {0x2285, 0x338}, {0x2288, 0x2286}, {0x2288, 0x338}, {0x2289, 0x2287}, +{0x2289, 0x338}, {0x22AC, 0x22A2}, {0x22AC, 0x338}, {0x22AD, 0x22A8}, {0x22AD, 0x338}, {0x22AE, 0x22A9}, {0x22AE, 0x338}, {0x22AF, 0x22AB}, {0x22AF, 0x338}, {0x22E0, 0x227C}, {0x22E0, 0x338}, +{0x22E1, 0x227D}, {0x22E1, 0x338}, {0x22E2, 0x2291}, {0x22E2, 0x338}, {0x22E3, 0x2292}, {0x22E3, 0x338}, {0x22EA, 0x22B2}, {0x22EA, 0x338}, {0x22EB, 0x22B3}, {0x22EB, 0x338}, {0x22EC, 0x22B4}, +{0x22EC, 0x338}, {0x22ED, 0x22B5}, {0x22ED, 0x338}, {0x2329, 0x3008}, {0x232A, 0x3009}, {0x2ADC, 0x2ADD}, {0x2ADC, 0x338}, {0x304C, 0x304B}, {0x304C, 0x3099}, {0x304E, 0x304D}, {0x304E, 0x3099}, +{0x3050, 0x304F}, {0x3050, 0x3099}, {0x3052, 0x3051}, {0x3052, 0x3099}, {0x3054, 0x3053}, {0x3054, 0x3099}, {0x3056, 0x3055}, {0x3056, 0x3099}, {0x3058, 0x3057}, {0x3058, 0x3099}, {0x305A, 0x3059}, +{0x305A, 0x3099}, {0x305C, 0x305B}, {0x305C, 0x3099}, {0x305E, 0x305D}, {0x305E, 0x3099}, {0x3060, 0x305F}, {0x3060, 0x3099}, {0x3062, 0x3061}, {0x3062, 0x3099}, {0x3065, 0x3064}, {0x3065, 0x3099}, +{0x3067, 0x3066}, {0x3067, 0x3099}, {0x3069, 0x3068}, {0x3069, 0x3099}, {0x3070, 0x306F}, {0x3070, 0x3099}, {0x3071, 0x306F}, {0x3071, 0x309A}, {0x3073, 0x3072}, {0x3073, 0x3099}, {0x3074, 0x3072}, +{0x3074, 0x309A}, {0x3076, 0x3075}, {0x3076, 0x3099}, {0x3077, 0x3075}, {0x3077, 0x309A}, {0x3079, 0x3078}, {0x3079, 0x3099}, {0x307A, 0x3078}, {0x307A, 0x309A}, {0x307C, 0x307B}, {0x307C, 0x3099}, +{0x307D, 0x307B}, {0x307D, 0x309A}, {0x3094, 0x3046}, {0x3094, 0x3099}, {0x309E, 0x309D}, {0x309E, 0x3099}, {0x30AC, 0x30AB}, {0x30AC, 0x3099}, {0x30AE, 0x30AD}, {0x30AE, 0x3099}, {0x30B0, 0x30AF}, +{0x30B0, 0x3099}, {0x30B2, 0x30B1}, {0x30B2, 0x3099}, {0x30B4, 0x30B3}, {0x30B4, 0x3099}, {0x30B6, 0x30B5}, {0x30B6, 0x3099}, {0x30B8, 0x30B7}, {0x30B8, 0x3099}, {0x30BA, 0x30B9}, {0x30BA, 0x3099}, +{0x30BC, 0x30BB}, {0x30BC, 0x3099}, {0x30BE, 0x30BD}, {0x30BE, 0x3099}, {0x30C0, 0x30BF}, {0x30C0, 0x3099}, {0x30C2, 0x30C1}, {0x30C2, 0x3099}, {0x30C5, 0x30C4}, {0x30C5, 0x3099}, {0x30C7, 0x30C6}, +{0x30C7, 0x3099}, {0x30C9, 0x30C8}, {0x30C9, 0x3099}, {0x30D0, 0x30CF}, {0x30D0, 0x3099}, {0x30D1, 0x30CF}, {0x30D1, 0x309A}, {0x30D3, 0x30D2}, {0x30D3, 0x3099}, {0x30D4, 0x30D2}, {0x30D4, 0x309A}, +{0x30D6, 0x30D5}, {0x30D6, 0x3099}, {0x30D7, 0x30D5}, {0x30D7, 0x309A}, {0x30D9, 0x30D8}, {0x30D9, 0x3099}, {0x30DA, 0x30D8}, {0x30DA, 0x309A}, {0x30DC, 0x30DB}, {0x30DC, 0x3099}, {0x30DD, 0x30DB}, +{0x30DD, 0x309A}, {0x30F4, 0x30A6}, {0x30F4, 0x3099}, {0x30F7, 0x30EF}, {0x30F7, 0x3099}, {0x30F8, 0x30F0}, {0x30F8, 0x3099}, {0x30F9, 0x30F1}, {0x30F9, 0x3099}, {0x30FA, 0x30F2}, {0x30FA, 0x3099}, +{0x30FE, 0x30FD}, {0x30FE, 0x3099}, {0xF900, 0x8C48}, {0xF901, 0x66F4}, {0xF902, 0x8ECA}, {0xF903, 0x8CC8}, {0xF904, 0x6ED1}, {0xF905, 0x4E32}, {0xF906, 0x53E5}, {0xF907, 0x9F9C}, {0xF908, 0x9F9C}, +{0xF909, 0x5951}, {0xF90A, 0x91D1}, {0xF90B, 0x5587}, {0xF90C, 0x5948}, {0xF90D, 0x61F6}, {0xF90E, 0x7669}, {0xF90F, 0x7F85}, {0xF910, 0x863F}, {0xF911, 0x87BA}, {0xF912, 0x88F8}, {0xF913, 0x908F}, +{0xF914, 0x6A02}, {0xF915, 0x6D1B}, {0xF916, 0x70D9}, {0xF917, 0x73DE}, {0xF918, 0x843D}, {0xF919, 0x916A}, {0xF91A, 0x99F1}, {0xF91B, 0x4E82}, {0xF91C, 0x5375}, {0xF91D, 0x6B04}, {0xF91E, 0x721B}, +{0xF91F, 0x862D}, {0xF920, 0x9E1E}, {0xF921, 0x5D50}, {0xF922, 0x6FEB}, {0xF923, 0x85CD}, {0xF924, 0x8964}, {0xF925, 0x62C9}, {0xF926, 0x81D8}, {0xF927, 0x881F}, {0xF928, 0x5ECA}, {0xF929, 0x6717}, +{0xF92A, 0x6D6A}, {0xF92B, 0x72FC}, {0xF92C, 0x90CE}, {0xF92D, 0x4F86}, {0xF92E, 0x51B7}, {0xF92F, 0x52DE}, {0xF930, 0x64C4}, {0xF931, 0x6AD3}, {0xF932, 0x7210}, {0xF933, 0x76E7}, {0xF934, 0x8001}, +{0xF935, 0x8606}, {0xF936, 0x865C}, {0xF937, 0x8DEF}, {0xF938, 0x9732}, {0xF939, 0x9B6F}, {0xF93A, 0x9DFA}, {0xF93B, 0x788C}, {0xF93C, 0x797F}, {0xF93D, 0x7DA0}, {0xF93E, 0x83C9}, {0xF93F, 0x9304}, +{0xF940, 0x9E7F}, {0xF941, 0x8AD6}, {0xF942, 0x58DF}, {0xF943, 0x5F04}, {0xF944, 0x7C60}, {0xF945, 0x807E}, {0xF946, 0x7262}, {0xF947, 0x78CA}, {0xF948, 0x8CC2}, {0xF949, 0x96F7}, {0xF94A, 0x58D8}, +{0xF94B, 0x5C62}, {0xF94C, 0x6A13}, {0xF94D, 0x6DDA}, {0xF94E, 0x6F0F}, {0xF94F, 0x7D2F}, {0xF950, 0x7E37}, {0xF951, 0x964B}, {0xF952, 0x52D2}, {0xF953, 0x808B}, {0xF954, 0x51DC}, {0xF955, 0x51CC}, +{0xF956, 0x7A1C}, {0xF957, 0x7DBE}, {0xF958, 0x83F1}, {0xF959, 0x9675}, {0xF95A, 0x8B80}, {0xF95B, 0x62CF}, {0xF95C, 0x6A02}, {0xF95D, 0x8AFE}, {0xF95E, 0x4E39}, {0xF95F, 0x5BE7}, {0xF960, 0x6012}, +{0xF961, 0x7387}, {0xF962, 0x7570}, {0xF963, 0x5317}, {0xF964, 0x78FB}, {0xF965, 0x4FBF}, {0xF966, 0x5FA9}, {0xF967, 0x4E0D}, {0xF968, 0x6CCC}, {0xF969, 0x6578}, {0xF96A, 0x7D22}, {0xF96B, 0x53C3}, +{0xF96C, 0x585E}, {0xF96D, 0x7701}, {0xF96E, 0x8449}, {0xF96F, 0x8AAA}, {0xF970, 0x6BBA}, {0xF971, 0x8FB0}, {0xF972, 0x6C88}, {0xF973, 0x62FE}, {0xF974, 0x82E5}, {0xF975, 0x63A0}, {0xF976, 0x7565}, +{0xF977, 0x4EAE}, {0xF978, 0x5169}, {0xF979, 0x51C9}, {0xF97A, 0x6881}, {0xF97B, 0x7CE7}, {0xF97C, 0x826F}, {0xF97D, 0x8AD2}, {0xF97E, 0x91CF}, {0xF97F, 0x52F5}, {0xF980, 0x5442}, {0xF981, 0x5973}, +{0xF982, 0x5EEC}, {0xF983, 0x65C5}, {0xF984, 0x6FFE}, {0xF985, 0x792A}, {0xF986, 0x95AD}, {0xF987, 0x9A6A}, {0xF988, 0x9E97}, {0xF989, 0x9ECE}, {0xF98A, 0x529B}, {0xF98B, 0x66C6}, {0xF98C, 0x6B77}, +{0xF98D, 0x8F62}, {0xF98E, 0x5E74}, {0xF98F, 0x6190}, {0xF990, 0x6200}, {0xF991, 0x649A}, {0xF992, 0x6F23}, {0xF993, 0x7149}, {0xF994, 0x7489}, {0xF995, 0x79CA}, {0xF996, 0x7DF4}, {0xF997, 0x806F}, +{0xF998, 0x8F26}, {0xF999, 0x84EE}, {0xF99A, 0x9023}, {0xF99B, 0x934A}, {0xF99C, 0x5217}, {0xF99D, 0x52A3}, {0xF99E, 0x54BD}, {0xF99F, 0x70C8}, {0xF9A0, 0x88C2}, {0xF9A1, 0x8AAA}, {0xF9A2, 0x5EC9}, +{0xF9A3, 0x5FF5}, {0xF9A4, 0x637B}, {0xF9A5, 0x6BAE}, {0xF9A6, 0x7C3E}, {0xF9A7, 0x7375}, {0xF9A8, 0x4EE4}, {0xF9A9, 0x56F9}, {0xF9AA, 0x5BE7}, {0xF9AB, 0x5DBA}, {0xF9AC, 0x601C}, {0xF9AD, 0x73B2}, +{0xF9AE, 0x7469}, {0xF9AF, 0x7F9A}, {0xF9B0, 0x8046}, {0xF9B1, 0x9234}, {0xF9B2, 0x96F6}, {0xF9B3, 0x9748}, {0xF9B4, 0x9818}, {0xF9B5, 0x4F8B}, {0xF9B6, 0x79AE}, {0xF9B7, 0x91B4}, {0xF9B8, 0x96B8}, +{0xF9B9, 0x60E1}, {0xF9BA, 0x4E86}, {0xF9BB, 0x50DA}, {0xF9BC, 0x5BEE}, {0xF9BD, 0x5C3F}, {0xF9BE, 0x6599}, {0xF9BF, 0x6A02}, {0xF9C0, 0x71CE}, {0xF9C1, 0x7642}, {0xF9C2, 0x84FC}, {0xF9C3, 0x907C}, +{0xF9C4, 0x9F8D}, {0xF9C5, 0x6688}, {0xF9C6, 0x962E}, {0xF9C7, 0x5289}, {0xF9C8, 0x677B}, {0xF9C9, 0x67F3}, {0xF9CA, 0x6D41}, {0xF9CB, 0x6E9C}, {0xF9CC, 0x7409}, {0xF9CD, 0x7559}, {0xF9CE, 0x786B}, +{0xF9CF, 0x7D10}, {0xF9D0, 0x985E}, {0xF9D1, 0x516D}, {0xF9D2, 0x622E}, {0xF9D3, 0x9678}, {0xF9D4, 0x502B}, {0xF9D5, 0x5D19}, {0xF9D6, 0x6DEA}, {0xF9D7, 0x8F2A}, {0xF9D8, 0x5F8B}, {0xF9D9, 0x6144}, +{0xF9DA, 0x6817}, {0xF9DB, 0x7387}, {0xF9DC, 0x9686}, {0xF9DD, 0x5229}, {0xF9DE, 0x540F}, {0xF9DF, 0x5C65}, {0xF9E0, 0x6613}, {0xF9E1, 0x674E}, {0xF9E2, 0x68A8}, {0xF9E3, 0x6CE5}, {0xF9E4, 0x7406}, +{0xF9E5, 0x75E2}, {0xF9E6, 0x7F79}, {0xF9E7, 0x88CF}, {0xF9E8, 0x88E1}, {0xF9E9, 0x91CC}, {0xF9EA, 0x96E2}, {0xF9EB, 0x533F}, {0xF9EC, 0x6EBA}, {0xF9ED, 0x541D}, {0xF9EE, 0x71D0}, {0xF9EF, 0x7498}, +{0xF9F0, 0x85FA}, {0xF9F1, 0x96A3}, {0xF9F2, 0x9C57}, {0xF9F3, 0x9E9F}, {0xF9F4, 0x6797}, {0xF9F5, 0x6DCB}, {0xF9F6, 0x81E8}, {0xF9F7, 0x7ACB}, {0xF9F8, 0x7B20}, {0xF9F9, 0x7C92}, {0xF9FA, 0x72C0}, +{0xF9FB, 0x7099}, {0xF9FC, 0x8B58}, {0xF9FD, 0x4EC0}, {0xF9FE, 0x8336}, {0xF9FF, 0x523A}, {0xFA00, 0x5207}, {0xFA01, 0x5EA6}, {0xFA02, 0x62D3}, {0xFA03, 0x7CD6}, {0xFA04, 0x5B85}, {0xFA05, 0x6D1E}, +{0xFA06, 0x66B4}, {0xFA07, 0x8F3B}, {0xFA08, 0x884C}, {0xFA09, 0x964D}, {0xFA0A, 0x898B}, {0xFA0B, 0x5ED3}, {0xFA0C, 0x5140}, {0xFA0D, 0x55C0}, {0xFA10, 0x585A}, {0xFA12, 0x6674}, {0xFA15, 0x51DE}, +{0xFA16, 0x732A}, {0xFA17, 0x76CA}, {0xFA18, 0x793C}, {0xFA19, 0x795E}, {0xFA1A, 0x7965}, {0xFA1B, 0x798F}, {0xFA1C, 0x9756}, {0xFA1D, 0x7CBE}, {0xFA1E, 0x7FBD}, {0xFA20, 0x8612}, {0xFA22, 0x8AF8}, +{0xFA25, 0x9038}, {0xFA26, 0x90FD}, {0xFA2A, 0x98EF}, {0xFA2B, 0x98FC}, {0xFA2C, 0x9928}, {0xFA2D, 0x9DB4}, {0xFA2E, 0x90DE}, {0xFA2F, 0x96B7}, {0xFA30, 0x4FAE}, {0xFA31, 0x50E7}, {0xFA32, 0x514D}, +{0xFA33, 0x52C9}, {0xFA34, 0x52E4}, {0xFA35, 0x5351}, {0xFA36, 0x559D}, {0xFA37, 0x5606}, {0xFA38, 0x5668}, {0xFA39, 0x5840}, {0xFA3A, 0x58A8}, {0xFA3B, 0x5C64}, {0xFA3C, 0x5C6E}, {0xFA3D, 0x6094}, +{0xFA3E, 0x6168}, {0xFA3F, 0x618E}, {0xFA40, 0x61F2}, {0xFA41, 0x654F}, {0xFA42, 0x65E2}, {0xFA43, 0x6691}, {0xFA44, 0x6885}, {0xFA45, 0x6D77}, {0xFA46, 0x6E1A}, {0xFA47, 0x6F22}, {0xFA48, 0x716E}, +{0xFA49, 0x722B}, {0xFA4A, 0x7422}, {0xFA4B, 0x7891}, {0xFA4C, 0x793E}, {0xFA4D, 0x7949}, {0xFA4E, 0x7948}, {0xFA4F, 0x7950}, {0xFA50, 0x7956}, {0xFA51, 0x795D}, {0xFA52, 0x798D}, {0xFA53, 0x798E}, +{0xFA54, 0x7A40}, {0xFA55, 0x7A81}, {0xFA56, 0x7BC0}, {0xFA57, 0x7DF4}, {0xFA58, 0x7E09}, {0xFA59, 0x7E41}, {0xFA5A, 0x7F72}, {0xFA5B, 0x8005}, {0xFA5C, 0x81ED}, {0xFA5D, 0x8279}, {0xFA5E, 0x8279}, +{0xFA5F, 0x8457}, {0xFA60, 0x8910}, {0xFA61, 0x8996}, {0xFA62, 0x8B01}, {0xFA63, 0x8B39}, {0xFA64, 0x8CD3}, {0xFA65, 0x8D08}, {0xFA66, 0x8FB6}, {0xFA67, 0x9038}, {0xFA68, 0x96E3}, {0xFA69, 0x97FF}, +{0xFA6A, 0x983B}, {0xFA6B, 0x6075}, {0xFA6C, 0x242EE}, {0xFA6D, 0x8218}, {0xFA70, 0x4E26}, {0xFA71, 0x51B5}, {0xFA72, 0x5168}, {0xFA73, 0x4F80}, {0xFA74, 0x5145}, {0xFA75, 0x5180}, {0xFA76, 0x52C7}, +{0xFA77, 0x52FA}, {0xFA78, 0x559D}, {0xFA79, 0x5555}, {0xFA7A, 0x5599}, {0xFA7B, 0x55E2}, {0xFA7C, 0x585A}, {0xFA7D, 0x58B3}, {0xFA7E, 0x5944}, {0xFA7F, 0x5954}, {0xFA80, 0x5A62}, {0xFA81, 0x5B28}, +{0xFA82, 0x5ED2}, {0xFA83, 0x5ED9}, {0xFA84, 0x5F69}, {0xFA85, 0x5FAD}, {0xFA86, 0x60D8}, {0xFA87, 0x614E}, {0xFA88, 0x6108}, {0xFA89, 0x618E}, {0xFA8A, 0x6160}, {0xFA8B, 0x61F2}, {0xFA8C, 0x6234}, +{0xFA8D, 0x63C4}, {0xFA8E, 0x641C}, {0xFA8F, 0x6452}, {0xFA90, 0x6556}, {0xFA91, 0x6674}, {0xFA92, 0x6717}, {0xFA93, 0x671B}, {0xFA94, 0x6756}, {0xFA95, 0x6B79}, {0xFA96, 0x6BBA}, {0xFA97, 0x6D41}, +{0xFA98, 0x6EDB}, {0xFA99, 0x6ECB}, {0xFA9A, 0x6F22}, {0xFA9B, 0x701E}, {0xFA9C, 0x716E}, {0xFA9D, 0x77A7}, {0xFA9E, 0x7235}, {0xFA9F, 0x72AF}, {0xFAA0, 0x732A}, {0xFAA1, 0x7471}, {0xFAA2, 0x7506}, +{0xFAA3, 0x753B}, {0xFAA4, 0x761D}, {0xFAA5, 0x761F}, {0xFAA6, 0x76CA}, {0xFAA7, 0x76DB}, {0xFAA8, 0x76F4}, {0xFAA9, 0x774A}, {0xFAAA, 0x7740}, {0xFAAB, 0x78CC}, {0xFAAC, 0x7AB1}, {0xFAAD, 0x7BC0}, +{0xFAAE, 0x7C7B}, {0xFAAF, 0x7D5B}, {0xFAB0, 0x7DF4}, {0xFAB1, 0x7F3E}, {0xFAB2, 0x8005}, {0xFAB3, 0x8352}, {0xFAB4, 0x83EF}, {0xFAB5, 0x8779}, {0xFAB6, 0x8941}, {0xFAB7, 0x8986}, {0xFAB8, 0x8996}, +{0xFAB9, 0x8ABF}, {0xFABA, 0x8AF8}, {0xFABB, 0x8ACB}, {0xFABC, 0x8B01}, {0xFABD, 0x8AFE}, {0xFABE, 0x8AED}, {0xFABF, 0x8B39}, {0xFAC0, 0x8B8A}, {0xFAC1, 0x8D08}, {0xFAC2, 0x8F38}, {0xFAC3, 0x9072}, +{0xFAC4, 0x9199}, {0xFAC5, 0x9276}, {0xFAC6, 0x967C}, {0xFAC7, 0x96E3}, {0xFAC8, 0x9756}, {0xFAC9, 0x97DB}, {0xFACA, 0x97FF}, {0xFACB, 0x980B}, {0xFACC, 0x983B}, {0xFACD, 0x9B12}, {0xFACE, 0x9F9C}, +{0xFACF, 0x2284A}, {0xFAD0, 0x22844}, {0xFAD1, 0x233D5}, {0xFAD2, 0x3B9D}, {0xFAD3, 0x4018}, {0xFAD4, 0x4039}, {0xFAD5, 0x25249}, {0xFAD6, 0x25CD0}, {0xFAD7, 0x27ED3}, {0xFAD8, 0x9F43}, +{0xFAD9, 0x9F8E}, {0xFB1D, 0x5D9}, {0xFB1D, 0x5B4}, {0xFB1F, 0x5F2}, {0xFB1F, 0x5B7}, {0xFB2A, 0x5E9}, {0xFB2A, 0x5C1}, {0xFB2B, 0x5E9}, {0xFB2B, 0x5C2}, {0xFB2C, 0x5E9}, {0xFB2C, 0x5BC}, +{0xFB2C, 0x5C1}, {0xFB2D, 0x5E9}, {0xFB2D, 0x5BC}, {0xFB2D, 0x5C2}, {0xFB2E, 0x5D0}, {0xFB2E, 0x5B7}, {0xFB2F, 0x5D0}, {0xFB2F, 0x5B8}, {0xFB30, 0x5D0}, {0xFB30, 0x5BC}, {0xFB31, 0x5D1}, +{0xFB31, 0x5BC}, {0xFB32, 0x5D2}, {0xFB32, 0x5BC}, {0xFB33, 0x5D3}, {0xFB33, 0x5BC}, {0xFB34, 0x5D4}, {0xFB34, 0x5BC}, {0xFB35, 0x5D5}, {0xFB35, 0x5BC}, {0xFB36, 0x5D6}, {0xFB36, 0x5BC}, +{0xFB38, 0x5D8}, {0xFB38, 0x5BC}, {0xFB39, 0x5D9}, {0xFB39, 0x5BC}, {0xFB3A, 0x5DA}, {0xFB3A, 0x5BC}, {0xFB3B, 0x5DB}, {0xFB3B, 0x5BC}, {0xFB3C, 0x5DC}, {0xFB3C, 0x5BC}, {0xFB3E, 0x5DE}, +{0xFB3E, 0x5BC}, {0xFB40, 0x5E0}, {0xFB40, 0x5BC}, {0xFB41, 0x5E1}, {0xFB41, 0x5BC}, {0xFB43, 0x5E3}, {0xFB43, 0x5BC}, {0xFB44, 0x5E4}, {0xFB44, 0x5BC}, {0xFB46, 0x5E6}, {0xFB46, 0x5BC}, +{0xFB47, 0x5E7}, {0xFB47, 0x5BC}, {0xFB48, 0x5E8}, {0xFB48, 0x5BC}, {0xFB49, 0x5E9}, {0xFB49, 0x5BC}, {0xFB4A, 0x5EA}, {0xFB4A, 0x5BC}, {0xFB4B, 0x5D5}, {0xFB4B, 0x5B9}, {0xFB4C, 0x5D1}, +{0xFB4C, 0x5BF}, {0xFB4D, 0x5DB}, {0xFB4D, 0x5BF}, {0xFB4E, 0x5E4}, {0xFB4E, 0x5BF}, {0x1109A, 0x11099}, {0x1109A, 0x110BA}, {0x1109C, 0x1109B}, {0x1109C, 0x110BA}, {0x110AB, 0x110A5}, +{0x110AB, 0x110BA}, {0x1112E, 0x11131}, {0x1112E, 0x11127}, {0x1112F, 0x11132}, {0x1112F, 0x11127}, {0x1134B, 0x11347}, {0x1134B, 0x1133E}, {0x1134C, 0x11347}, {0x1134C, 0x11357}, {0x114BB, 0x114B9}, +{0x114BB, 0x114BA}, {0x114BC, 0x114B9}, {0x114BC, 0x114B0}, {0x114BE, 0x114B9}, {0x114BE, 0x114BD}, {0x115BA, 0x115B8}, {0x115BA, 0x115AF}, {0x115BB, 0x115B9}, {0x115BB, 0x115AF}, {0x1D15E, 0x1D157}, +{0x1D15E, 0x1D165}, {0x1D15F, 0x1D158}, {0x1D15F, 0x1D165}, {0x1D160, 0x1D158}, {0x1D160, 0x1D165}, {0x1D160, 0x1D16E}, {0x1D161, 0x1D158}, {0x1D161, 0x1D165}, {0x1D161, 0x1D16F}, {0x1D162, 0x1D158}, +{0x1D162, 0x1D165}, {0x1D162, 0x1D170}, {0x1D163, 0x1D158}, {0x1D163, 0x1D165}, {0x1D163, 0x1D171}, {0x1D164, 0x1D158}, {0x1D164, 0x1D165}, {0x1D164, 0x1D172}, {0x1D1BB, 0x1D1B9}, {0x1D1BB, 0x1D165}, +{0x1D1BC, 0x1D1BA}, {0x1D1BC, 0x1D165}, {0x1D1BD, 0x1D1B9}, {0x1D1BD, 0x1D165}, {0x1D1BD, 0x1D16E}, {0x1D1BE, 0x1D1BA}, {0x1D1BE, 0x1D165}, {0x1D1BE, 0x1D16E}, {0x1D1BF, 0x1D1B9}, {0x1D1BF, 0x1D165}, +{0x1D1BF, 0x1D16F}, {0x1D1C0, 0x1D1BA}, {0x1D1C0, 0x1D165}, {0x1D1C0, 0x1D16F}, {0x2F800, 0x4E3D}, {0x2F801, 0x4E38}, {0x2F802, 0x4E41}, {0x2F803, 0x20122}, {0x2F804, 0x4F60}, {0x2F805, 0x4FAE}, +{0x2F806, 0x4FBB}, {0x2F807, 0x5002}, {0x2F808, 0x507A}, {0x2F809, 0x5099}, {0x2F80A, 0x50E7}, {0x2F80B, 0x50CF}, {0x2F80C, 0x349E}, {0x2F80D, 0x2063A}, {0x2F80E, 0x514D}, {0x2F80F, 0x5154}, +{0x2F810, 0x5164}, {0x2F811, 0x5177}, {0x2F812, 0x2051C}, {0x2F813, 0x34B9}, {0x2F814, 0x5167}, {0x2F815, 0x518D}, {0x2F816, 0x2054B}, {0x2F817, 0x5197}, {0x2F818, 0x51A4}, {0x2F819, 0x4ECC}, +{0x2F81A, 0x51AC}, {0x2F81B, 0x51B5}, {0x2F81C, 0x291DF}, {0x2F81D, 0x51F5}, {0x2F81E, 0x5203}, {0x2F81F, 0x34DF}, {0x2F820, 0x523B}, {0x2F821, 0x5246}, {0x2F822, 0x5272}, {0x2F823, 0x5277}, +{0x2F824, 0x3515}, {0x2F825, 0x52C7}, {0x2F826, 0x52C9}, {0x2F827, 0x52E4}, {0x2F828, 0x52FA}, {0x2F829, 0x5305}, {0x2F82A, 0x5306}, {0x2F82B, 0x5317}, {0x2F82C, 0x5349}, {0x2F82D, 0x5351}, +{0x2F82E, 0x535A}, {0x2F82F, 0x5373}, {0x2F830, 0x537D}, {0x2F831, 0x537F}, {0x2F832, 0x537F}, {0x2F833, 0x537F}, {0x2F834, 0x20A2C}, {0x2F835, 0x7070}, {0x2F836, 0x53CA}, {0x2F837, 0x53DF}, +{0x2F838, 0x20B63}, {0x2F839, 0x53EB}, {0x2F83A, 0x53F1}, {0x2F83B, 0x5406}, {0x2F83C, 0x549E}, {0x2F83D, 0x5438}, {0x2F83E, 0x5448}, {0x2F83F, 0x5468}, {0x2F840, 0x54A2}, {0x2F841, 0x54F6}, +{0x2F842, 0x5510}, {0x2F843, 0x5553}, {0x2F844, 0x5563}, {0x2F845, 0x5584}, {0x2F846, 0x5584}, {0x2F847, 0x5599}, {0x2F848, 0x55AB}, {0x2F849, 0x55B3}, {0x2F84A, 0x55C2}, {0x2F84B, 0x5716}, +{0x2F84C, 0x5606}, {0x2F84D, 0x5717}, {0x2F84E, 0x5651}, {0x2F84F, 0x5674}, {0x2F850, 0x5207}, {0x2F851, 0x58EE}, {0x2F852, 0x57CE}, {0x2F853, 0x57F4}, {0x2F854, 0x580D}, {0x2F855, 0x578B}, +{0x2F856, 0x5832}, {0x2F857, 0x5831}, {0x2F858, 0x58AC}, {0x2F859, 0x214E4}, {0x2F85A, 0x58F2}, {0x2F85B, 0x58F7}, {0x2F85C, 0x5906}, {0x2F85D, 0x591A}, {0x2F85E, 0x5922}, {0x2F85F, 0x5962}, +{0x2F860, 0x216A8}, {0x2F861, 0x216EA}, {0x2F862, 0x59EC}, {0x2F863, 0x5A1B}, {0x2F864, 0x5A27}, {0x2F865, 0x59D8}, {0x2F866, 0x5A66}, {0x2F867, 0x36EE}, {0x2F868, 0x36FC}, {0x2F869, 0x5B08}, +{0x2F86A, 0x5B3E}, {0x2F86B, 0x5B3E}, {0x2F86C, 0x219C8}, {0x2F86D, 0x5BC3}, {0x2F86E, 0x5BD8}, {0x2F86F, 0x5BE7}, {0x2F870, 0x5BF3}, {0x2F871, 0x21B18}, {0x2F872, 0x5BFF}, {0x2F873, 0x5C06}, +{0x2F874, 0x5F53}, {0x2F875, 0x5C22}, {0x2F876, 0x3781}, {0x2F877, 0x5C60}, {0x2F878, 0x5C6E}, {0x2F879, 0x5CC0}, {0x2F87A, 0x5C8D}, {0x2F87B, 0x21DE4}, {0x2F87C, 0x5D43}, {0x2F87D, 0x21DE6}, +{0x2F87E, 0x5D6E}, {0x2F87F, 0x5D6B}, {0x2F880, 0x5D7C}, {0x2F881, 0x5DE1}, {0x2F882, 0x5DE2}, {0x2F883, 0x382F}, {0x2F884, 0x5DFD}, {0x2F885, 0x5E28}, {0x2F886, 0x5E3D}, {0x2F887, 0x5E69}, +{0x2F888, 0x3862}, {0x2F889, 0x22183}, {0x2F88A, 0x387C}, {0x2F88B, 0x5EB0}, {0x2F88C, 0x5EB3}, {0x2F88D, 0x5EB6}, {0x2F88E, 0x5ECA}, {0x2F88F, 0x2A392}, {0x2F890, 0x5EFE}, {0x2F891, 0x22331}, +{0x2F892, 0x22331}, {0x2F893, 0x8201}, {0x2F894, 0x5F22}, {0x2F895, 0x5F22}, {0x2F896, 0x38C7}, {0x2F897, 0x232B8}, {0x2F898, 0x261DA}, {0x2F899, 0x5F62}, {0x2F89A, 0x5F6B}, {0x2F89B, 0x38E3}, +{0x2F89C, 0x5F9A}, {0x2F89D, 0x5FCD}, {0x2F89E, 0x5FD7}, {0x2F89F, 0x5FF9}, {0x2F8A0, 0x6081}, {0x2F8A1, 0x393A}, {0x2F8A2, 0x391C}, {0x2F8A3, 0x6094}, {0x2F8A4, 0x226D4}, {0x2F8A5, 0x60C7}, +{0x2F8A6, 0x6148}, {0x2F8A7, 0x614C}, {0x2F8A8, 0x614E}, {0x2F8A9, 0x614C}, {0x2F8AA, 0x617A}, {0x2F8AB, 0x618E}, {0x2F8AC, 0x61B2}, {0x2F8AD, 0x61A4}, {0x2F8AE, 0x61AF}, {0x2F8AF, 0x61DE}, +{0x2F8B0, 0x61F2}, {0x2F8B1, 0x61F6}, {0x2F8B2, 0x6210}, {0x2F8B3, 0x621B}, {0x2F8B4, 0x625D}, {0x2F8B5, 0x62B1}, {0x2F8B6, 0x62D4}, {0x2F8B7, 0x6350}, {0x2F8B8, 0x22B0C}, {0x2F8B9, 0x633D}, +{0x2F8BA, 0x62FC}, {0x2F8BB, 0x6368}, {0x2F8BC, 0x6383}, {0x2F8BD, 0x63E4}, {0x2F8BE, 0x22BF1}, {0x2F8BF, 0x6422}, {0x2F8C0, 0x63C5}, {0x2F8C1, 0x63A9}, {0x2F8C2, 0x3A2E}, {0x2F8C3, 0x6469}, +{0x2F8C4, 0x647E}, {0x2F8C5, 0x649D}, {0x2F8C6, 0x6477}, {0x2F8C7, 0x3A6C}, {0x2F8C8, 0x654F}, {0x2F8C9, 0x656C}, {0x2F8CA, 0x2300A}, {0x2F8CB, 0x65E3}, {0x2F8CC, 0x66F8}, {0x2F8CD, 0x6649}, +{0x2F8CE, 0x3B19}, {0x2F8CF, 0x6691}, {0x2F8D0, 0x3B08}, {0x2F8D1, 0x3AE4}, {0x2F8D2, 0x5192}, {0x2F8D3, 0x5195}, {0x2F8D4, 0x6700}, {0x2F8D5, 0x669C}, {0x2F8D6, 0x80AD}, {0x2F8D7, 0x43D9}, +{0x2F8D8, 0x6717}, {0x2F8D9, 0x671B}, {0x2F8DA, 0x6721}, {0x2F8DB, 0x675E}, {0x2F8DC, 0x6753}, {0x2F8DD, 0x233C3}, {0x2F8DE, 0x3B49}, {0x2F8DF, 0x67FA}, {0x2F8E0, 0x6785}, {0x2F8E1, 0x6852}, +{0x2F8E2, 0x6885}, {0x2F8E3, 0x2346D}, {0x2F8E4, 0x688E}, {0x2F8E5, 0x681F}, {0x2F8E6, 0x6914}, {0x2F8E7, 0x3B9D}, {0x2F8E8, 0x6942}, {0x2F8E9, 0x69A3}, {0x2F8EA, 0x69EA}, {0x2F8EB, 0x6AA8}, +{0x2F8EC, 0x236A3}, {0x2F8ED, 0x6ADB}, {0x2F8EE, 0x3C18}, {0x2F8EF, 0x6B21}, {0x2F8F0, 0x238A7}, {0x2F8F1, 0x6B54}, {0x2F8F2, 0x3C4E}, {0x2F8F3, 0x6B72}, {0x2F8F4, 0x6B9F}, {0x2F8F5, 0x6BBA}, +{0x2F8F6, 0x6BBB}, {0x2F8F7, 0x23A8D}, {0x2F8F8, 0x21D0B}, {0x2F8F9, 0x23AFA}, {0x2F8FA, 0x6C4E}, {0x2F8FB, 0x23CBC}, {0x2F8FC, 0x6CBF}, {0x2F8FD, 0x6CCD}, {0x2F8FE, 0x6C67}, {0x2F8FF, 0x6D16}, +{0x2F900, 0x6D3E}, {0x2F901, 0x6D77}, {0x2F902, 0x6D41}, {0x2F903, 0x6D69}, {0x2F904, 0x6D78}, {0x2F905, 0x6D85}, {0x2F906, 0x23D1E}, {0x2F907, 0x6D34}, {0x2F908, 0x6E2F}, {0x2F909, 0x6E6E}, +{0x2F90A, 0x3D33}, {0x2F90B, 0x6ECB}, {0x2F90C, 0x6EC7}, {0x2F90D, 0x23ED1}, {0x2F90E, 0x6DF9}, {0x2F90F, 0x6F6E}, {0x2F910, 0x23F5E}, {0x2F911, 0x23F8E}, {0x2F912, 0x6FC6}, {0x2F913, 0x7039}, +{0x2F914, 0x701E}, {0x2F915, 0x701B}, {0x2F916, 0x3D96}, {0x2F917, 0x704A}, {0x2F918, 0x707D}, {0x2F919, 0x7077}, {0x2F91A, 0x70AD}, {0x2F91B, 0x20525}, {0x2F91C, 0x7145}, {0x2F91D, 0x24263}, +{0x2F91E, 0x719C}, {0x2F91F, 0x243AB}, {0x2F920, 0x7228}, {0x2F921, 0x7235}, {0x2F922, 0x7250}, {0x2F923, 0x24608}, {0x2F924, 0x7280}, {0x2F925, 0x7295}, {0x2F926, 0x24735}, {0x2F927, 0x24814}, +{0x2F928, 0x737A}, {0x2F929, 0x738B}, {0x2F92A, 0x3EAC}, {0x2F92B, 0x73A5}, {0x2F92C, 0x3EB8}, {0x2F92D, 0x3EB8}, {0x2F92E, 0x7447}, {0x2F92F, 0x745C}, {0x2F930, 0x7471}, {0x2F931, 0x7485}, +{0x2F932, 0x74CA}, {0x2F933, 0x3F1B}, {0x2F934, 0x7524}, {0x2F935, 0x24C36}, {0x2F936, 0x753E}, {0x2F937, 0x24C92}, {0x2F938, 0x7570}, {0x2F939, 0x2219F}, {0x2F93A, 0x7610}, {0x2F93B, 0x24FA1}, +{0x2F93C, 0x24FB8}, {0x2F93D, 0x25044}, {0x2F93E, 0x3FFC}, {0x2F93F, 0x4008}, {0x2F940, 0x76F4}, {0x2F941, 0x250F3}, {0x2F942, 0x250F2}, {0x2F943, 0x25119}, {0x2F944, 0x25133}, {0x2F945, 0x771E}, +{0x2F946, 0x771F}, {0x2F947, 0x771F}, {0x2F948, 0x774A}, {0x2F949, 0x4039}, {0x2F94A, 0x778B}, {0x2F94B, 0x4046}, {0x2F94C, 0x4096}, {0x2F94D, 0x2541D}, {0x2F94E, 0x784E}, {0x2F94F, 0x788C}, +{0x2F950, 0x78CC}, {0x2F951, 0x40E3}, {0x2F952, 0x25626}, {0x2F953, 0x7956}, {0x2F954, 0x2569A}, {0x2F955, 0x256C5}, {0x2F956, 0x798F}, {0x2F957, 0x79EB}, {0x2F958, 0x412F}, {0x2F959, 0x7A40}, +{0x2F95A, 0x7A4A}, {0x2F95B, 0x7A4F}, {0x2F95C, 0x2597C}, {0x2F95D, 0x25AA7}, {0x2F95E, 0x25AA7}, {0x2F95F, 0x7AEE}, {0x2F960, 0x4202}, {0x2F961, 0x25BAB}, {0x2F962, 0x7BC6}, {0x2F963, 0x7BC9}, +{0x2F964, 0x4227}, {0x2F965, 0x25C80}, {0x2F966, 0x7CD2}, {0x2F967, 0x42A0}, {0x2F968, 0x7CE8}, {0x2F969, 0x7CE3}, {0x2F96A, 0x7D00}, {0x2F96B, 0x25F86}, {0x2F96C, 0x7D63}, {0x2F96D, 0x4301}, +{0x2F96E, 0x7DC7}, {0x2F96F, 0x7E02}, {0x2F970, 0x7E45}, {0x2F971, 0x4334}, {0x2F972, 0x26228}, {0x2F973, 0x26247}, {0x2F974, 0x4359}, {0x2F975, 0x262D9}, {0x2F976, 0x7F7A}, {0x2F977, 0x2633E}, +{0x2F978, 0x7F95}, {0x2F979, 0x7FFA}, {0x2F97A, 0x8005}, {0x2F97B, 0x264DA}, {0x2F97C, 0x26523}, {0x2F97D, 0x8060}, {0x2F97E, 0x265A8}, {0x2F97F, 0x8070}, {0x2F980, 0x2335F}, {0x2F981, 0x43D5}, +{0x2F982, 0x80B2}, {0x2F983, 0x8103}, {0x2F984, 0x440B}, {0x2F985, 0x813E}, {0x2F986, 0x5AB5}, {0x2F987, 0x267A7}, {0x2F988, 0x267B5}, {0x2F989, 0x23393}, {0x2F98A, 0x2339C}, {0x2F98B, 0x8201}, +{0x2F98C, 0x8204}, {0x2F98D, 0x8F9E}, {0x2F98E, 0x446B}, {0x2F98F, 0x8291}, {0x2F990, 0x828B}, {0x2F991, 0x829D}, {0x2F992, 0x52B3}, {0x2F993, 0x82B1}, {0x2F994, 0x82B3}, {0x2F995, 0x82BD}, +{0x2F996, 0x82E6}, {0x2F997, 0x26B3C}, {0x2F998, 0x82E5}, {0x2F999, 0x831D}, {0x2F99A, 0x8363}, {0x2F99B, 0x83AD}, {0x2F99C, 0x8323}, {0x2F99D, 0x83BD}, {0x2F99E, 0x83E7}, {0x2F99F, 0x8457}, +{0x2F9A0, 0x8353}, {0x2F9A1, 0x83CA}, {0x2F9A2, 0x83CC}, {0x2F9A3, 0x83DC}, {0x2F9A4, 0x26C36}, {0x2F9A5, 0x26D6B}, {0x2F9A6, 0x26CD5}, {0x2F9A7, 0x452B}, {0x2F9A8, 0x84F1}, {0x2F9A9, 0x84F3}, +{0x2F9AA, 0x8516}, {0x2F9AB, 0x273CA}, {0x2F9AC, 0x8564}, {0x2F9AD, 0x26F2C}, {0x2F9AE, 0x455D}, {0x2F9AF, 0x4561}, {0x2F9B0, 0x26FB1}, {0x2F9B1, 0x270D2}, {0x2F9B2, 0x456B}, {0x2F9B3, 0x8650}, +{0x2F9B4, 0x865C}, {0x2F9B5, 0x8667}, {0x2F9B6, 0x8669}, {0x2F9B7, 0x86A9}, {0x2F9B8, 0x8688}, {0x2F9B9, 0x870E}, {0x2F9BA, 0x86E2}, {0x2F9BB, 0x8779}, {0x2F9BC, 0x8728}, {0x2F9BD, 0x876B}, +{0x2F9BE, 0x8786}, {0x2F9BF, 0x45D7}, {0x2F9C0, 0x87E1}, {0x2F9C1, 0x8801}, {0x2F9C2, 0x45F9}, {0x2F9C3, 0x8860}, {0x2F9C4, 0x8863}, {0x2F9C5, 0x27667}, {0x2F9C6, 0x88D7}, {0x2F9C7, 0x88DE}, +{0x2F9C8, 0x4635}, {0x2F9C9, 0x88FA}, {0x2F9CA, 0x34BB}, {0x2F9CB, 0x278AE}, {0x2F9CC, 0x27966}, {0x2F9CD, 0x46BE}, {0x2F9CE, 0x46C7}, {0x2F9CF, 0x8AA0}, {0x2F9D0, 0x8AED}, {0x2F9D1, 0x8B8A}, +{0x2F9D2, 0x8C55}, {0x2F9D3, 0x27CA8}, {0x2F9D4, 0x8CAB}, {0x2F9D5, 0x8CC1}, {0x2F9D6, 0x8D1B}, {0x2F9D7, 0x8D77}, {0x2F9D8, 0x27F2F}, {0x2F9D9, 0x20804}, {0x2F9DA, 0x8DCB}, {0x2F9DB, 0x8DBC}, +{0x2F9DC, 0x8DF0}, {0x2F9DD, 0x208DE}, {0x2F9DE, 0x8ED4}, {0x2F9DF, 0x8F38}, {0x2F9E0, 0x285D2}, {0x2F9E1, 0x285ED}, {0x2F9E2, 0x9094}, {0x2F9E3, 0x90F1}, {0x2F9E4, 0x9111}, {0x2F9E5, 0x2872E}, +{0x2F9E6, 0x911B}, {0x2F9E7, 0x9238}, {0x2F9E8, 0x92D7}, {0x2F9E9, 0x92D8}, {0x2F9EA, 0x927C}, {0x2F9EB, 0x93F9}, {0x2F9EC, 0x9415}, {0x2F9ED, 0x28BFA}, {0x2F9EE, 0x958B}, {0x2F9EF, 0x4995}, +{0x2F9F0, 0x95B7}, {0x2F9F1, 0x28D77}, {0x2F9F2, 0x49E6}, {0x2F9F3, 0x96C3}, {0x2F9F4, 0x5DB2}, {0x2F9F5, 0x9723}, {0x2F9F6, 0x29145}, {0x2F9F7, 0x2921A}, {0x2F9F8, 0x4A6E}, {0x2F9F9, 0x4A76}, +{0x2F9FA, 0x97E0}, {0x2F9FB, 0x2940A}, {0x2F9FC, 0x4AB2}, {0x2F9FD, 0x29496}, {0x2F9FE, 0x980B}, {0x2F9FF, 0x980B}, {0x2FA00, 0x9829}, {0x2FA01, 0x295B6}, {0x2FA02, 0x98E2}, {0x2FA03, 0x4B33}, +{0x2FA04, 0x9929}, {0x2FA05, 0x99A7}, {0x2FA06, 0x99C2}, {0x2FA07, 0x99FE}, {0x2FA08, 0x4BCE}, {0x2FA09, 0x29B30}, {0x2FA0A, 0x9B12}, {0x2FA0B, 0x9C40}, {0x2FA0C, 0x9CFD}, {0x2FA0D, 0x4CCE}, +{0x2FA0E, 0x4CED}, {0x2FA0F, 0x9D67}, {0x2FA10, 0x2A0CE}, {0x2FA11, 0x4CF8}, {0x2FA12, 0x2A105}, {0x2FA13, 0x2A20E}, {0x2FA14, 0x2A291}, {0x2FA15, 0x9EBB}, {0x2FA16, 0x4D56}, {0x2FA17, 0x9EF9}, +{0x2FA18, 0x9EFE}, {0x2FA19, 0x9F05}, {0x2FA1A, 0x9F0F}, {0x2FA1B, 0x9F16}, {0x2FA1D, 0x2A600}, }; static std::string codepoint_to_utf8(uint32_t cp) { From 3ab8b3a92ede46df88bc5a2dfca3777de4a2b2b6 Mon Sep 17 00:00:00 2001 From: Pierrick Hymbert Date: Fri, 1 Mar 2024 12:39:06 +0100 Subject: [PATCH 748/811] llama : cleanup unused mmq flags (#5772) * cleanup unused --no-mul-mat-q,-nommq, -mmq, --mul-mat-q, mul_mat_q * remove: mul_mat_q in compare llama bench and usage * update llama-bench --------- Co-authored-by: slaren --- common/common.cpp | 2 -- common/common.h | 1 - examples/batched-bench/batched-bench.cpp | 18 +++++--------- examples/llama-bench/README.md | 1 - examples/llama-bench/llama-bench.cpp | 30 +++--------------------- examples/server/server.cpp | 8 ------- llama.cpp | 3 --- llama.h | 1 - scripts/compare-llama-bench.py | 2 +- 9 files changed, 10 insertions(+), 56 deletions(-) diff --git a/common/common.cpp b/common/common.cpp index 18289755c..bf1ed8a66 100644 --- a/common/common.cpp +++ b/common/common.cpp @@ -1281,7 +1281,6 @@ struct llama_context_params llama_context_params_from_gpt_params(const gpt_param cparams.n_batch = params.n_batch; cparams.n_threads = params.n_threads; cparams.n_threads_batch = params.n_threads_batch == -1 ? params.n_threads : params.n_threads_batch; - cparams.mul_mat_q = params.mul_mat_q; cparams.seed = params.seed; cparams.logits_all = params.logits_all; cparams.embedding = params.embedding; @@ -1725,7 +1724,6 @@ void dump_non_result_info_yaml(FILE * stream, const gpt_params & params, const l fprintf(stream, "n_predict: %d # default: -1 (unlimited)\n", params.n_predict); fprintf(stream, "n_probs: %d # only used by server binary, default: 0\n", sparams.n_probs); fprintf(stream, "no_mmap: %s # default: false\n", !params.use_mmap ? "true" : "false"); - fprintf(stream, "no_mul_mat_q: %s # default: false\n", !params.mul_mat_q ? "true" : "false"); fprintf(stream, "no_penalize_nl: %s # default: false\n", !sparams.penalize_nl ? "true" : "false"); fprintf(stream, "ppl_output_type: %d # default: 0\n", params.ppl_output_type); fprintf(stream, "ppl_stride: %d # default: 0\n", params.ppl_stride); diff --git a/common/common.h b/common/common.h index 25003df26..ab62bdb82 100644 --- a/common/common.h +++ b/common/common.h @@ -115,7 +115,6 @@ struct gpt_params { bool kl_divergence = false; // compute KL-divergence - bool mul_mat_q = true; // if true, use mul_mat_q kernels instead of cuBLAS bool random_prompt = false; // do not randomize prompt if none provided bool use_color = false; // use color to distinguish generations and inputs bool interactive = false; // interactive mode diff --git a/examples/batched-bench/batched-bench.cpp b/examples/batched-bench/batched-bench.cpp index b4b8a38e1..19aff18ae 100644 --- a/examples/batched-bench/batched-bench.cpp +++ b/examples/batched-bench/batched-bench.cpp @@ -32,16 +32,15 @@ int main(int argc, char ** argv) { gpt_params params; if (argc == 1 || argv[1][0] == '-') { - printf("usage: %s MODEL_PATH [N_KV_MAX] [IS_PP_SHARED] [NGL] [MMQ] \n" , argv[0]); + printf("usage: %s MODEL_PATH [N_KV_MAX] [IS_PP_SHARED] [NGL] \n" , argv[0]); printf(" , and PL are comma-separated lists of numbers without spaces\n\n"); - printf(" example: %s ggml-model-f16.gguf 2048 0 999 0 128,256,512 128,256 1,2,4,8,16,32\n\n", argv[0]); + printf(" example: %s ggml-model-f16.gguf 2048 0 999 128,256,512 128,256 1,2,4,8,16,32\n\n", argv[0]); return 1 ; } int n_kv_max = 2048; int is_pp_shared = 0; int n_gpu_layers = 0; - int mmq = 0; std::vector n_pp = { 128, 256, 512, 1024, 2048, 3584, 7680, }; std::vector n_tg = { 128, 256, }; @@ -65,19 +64,15 @@ int main(int argc, char ** argv) { } if (argc >= 6) { - mmq = std::atoi(argv[5]); + n_pp = parse_list(argv[5]); } if (argc >= 7) { - n_pp = parse_list(argv[6]); + n_tg = parse_list(argv[6]); } if (argc >= 8) { - n_tg = parse_list(argv[7]); - } - - if (argc >= 9) { - n_pl = parse_list(argv[8]); + n_pl = parse_list(argv[7]); } // init LLM @@ -106,7 +101,6 @@ int main(int argc, char ** argv) { ctx_params.seed = 1234; ctx_params.n_ctx = n_kv_max; ctx_params.n_batch = 512; - ctx_params.mul_mat_q = mmq; ctx_params.n_threads = params.n_threads; ctx_params.n_threads_batch = params.n_threads_batch == -1 ? params.n_threads : params.n_threads_batch; @@ -159,7 +153,7 @@ int main(int argc, char ** argv) { } LOG_TEE("\n"); - LOG_TEE("%s: n_kv_max = %d, is_pp_shared = %d, n_gpu_layers = %d, mmq = %d, n_threads = %u, n_threads_batch = %u\n", __func__, n_kv_max, is_pp_shared, n_gpu_layers, mmq, ctx_params.n_threads, ctx_params.n_threads_batch); + LOG_TEE("%s: n_kv_max = %d, is_pp_shared = %d, n_gpu_layers = %d, n_threads = %u, n_threads_batch = %u\n", __func__, n_kv_max, is_pp_shared, n_gpu_layers, ctx_params.n_threads, ctx_params.n_threads_batch); LOG_TEE("\n"); LOG_TEE("|%6s | %6s | %4s | %6s | %8s | %8s | %8s | %8s | %8s | %8s |\n", "PP", "TG", "B", "N_KV", "T_PP s", "S_PP t/s", "T_TG s", "S_TG t/s", "T s", "S t/s"); diff --git a/examples/llama-bench/README.md b/examples/llama-bench/README.md index 374e40a7d..10f37b441 100644 --- a/examples/llama-bench/README.md +++ b/examples/llama-bench/README.md @@ -35,7 +35,6 @@ options: -mg, --main-gpu (default: 0) -nkvo, --no-kv-offload <0|1> (default: 0) -mmp, --mmap <0|1> (default: 1) - -mmq, --mul-mat-q <0|1> (default: 1) -ts, --tensor_split (default: 0) -r, --repetitions (default: 5) -o, --output (default: md) diff --git a/examples/llama-bench/llama-bench.cpp b/examples/llama-bench/llama-bench.cpp index 8fec3d43d..c2155b2ac 100644 --- a/examples/llama-bench/llama-bench.cpp +++ b/examples/llama-bench/llama-bench.cpp @@ -176,7 +176,6 @@ struct cmd_params { std::vector split_mode; std::vector main_gpu; std::vector no_kv_offload; - std::vector mul_mat_q; std::vector> tensor_split; std::vector use_mmap; int reps; @@ -196,7 +195,6 @@ static const cmd_params cmd_params_defaults = { /* split_mode */ {LLAMA_SPLIT_MODE_LAYER}, /* main_gpu */ {0}, /* no_kv_offload */ {false}, - /* mul_mat_q */ {true}, /* tensor_split */ {std::vector(llama_max_devices(), 0.0f)}, /* use_mmap */ {true}, /* reps */ 5, @@ -221,7 +219,6 @@ static void print_usage(int /* argc */, char ** argv) { printf(" -mg, --main-gpu (default: %s)\n", join(cmd_params_defaults.main_gpu, ",").c_str()); printf(" -nkvo, --no-kv-offload <0|1> (default: %s)\n", join(cmd_params_defaults.no_kv_offload, ",").c_str()); printf(" -mmp, --mmap <0|1> (default: %s)\n", join(cmd_params_defaults.use_mmap, ",").c_str()); - printf(" -mmq, --mul-mat-q <0|1> (default: %s)\n", join(cmd_params_defaults.mul_mat_q, ",").c_str()); printf(" -ts, --tensor_split (default: 0)\n"); printf(" -r, --repetitions (default: %d)\n", cmd_params_defaults.reps); printf(" -o, --output (default: %s)\n", output_format_str(cmd_params_defaults.output_format)); @@ -383,13 +380,6 @@ static cmd_params parse_cmd_params(int argc, char ** argv) { } auto p = split(argv[i], split_delim); params.no_kv_offload.insert(params.no_kv_offload.end(), p.begin(), p.end()); - } else if (arg == "-mmq" || arg == "--mul-mat-q") { - if (++i >= argc) { - invalid_param = true; - break; - } - auto p = split(argv[i], split_delim); - params.mul_mat_q.insert(params.mul_mat_q.end(), p.begin(), p.end()); } else if (arg == "-mmp" || arg == "--mmap") { if (++i >= argc) { invalid_param = true; @@ -466,7 +456,6 @@ static cmd_params parse_cmd_params(int argc, char ** argv) { if (params.split_mode.empty()) { params.split_mode = cmd_params_defaults.split_mode; } if (params.main_gpu.empty()) { params.main_gpu = cmd_params_defaults.main_gpu; } if (params.no_kv_offload.empty()){ params.no_kv_offload = cmd_params_defaults.no_kv_offload; } - if (params.mul_mat_q.empty()) { params.mul_mat_q = cmd_params_defaults.mul_mat_q; } if (params.tensor_split.empty()) { params.tensor_split = cmd_params_defaults.tensor_split; } if (params.use_mmap.empty()) { params.use_mmap = cmd_params_defaults.use_mmap; } if (params.n_threads.empty()) { params.n_threads = cmd_params_defaults.n_threads; } @@ -486,7 +475,6 @@ struct cmd_params_instance { llama_split_mode split_mode; int main_gpu; bool no_kv_offload; - bool mul_mat_q; std::vector tensor_split; bool use_mmap; @@ -518,7 +506,6 @@ struct cmd_params_instance { cparams.n_batch = n_batch; cparams.type_k = type_k; cparams.type_v = type_v; - cparams.mul_mat_q = mul_mat_q; cparams.offload_kqv = !no_kv_offload; return cparams; @@ -538,7 +525,6 @@ static std::vector get_cmd_params_instances(const cmd_param for (const auto & nb : params.n_batch) for (const auto & tk : params.type_k) for (const auto & tv : params.type_v) - for (const auto & mmq : params.mul_mat_q) for (const auto & nkvo : params.no_kv_offload) for (const auto & nt : params.n_threads) { for (const auto & n_prompt : params.n_prompt) { @@ -557,7 +543,6 @@ static std::vector get_cmd_params_instances(const cmd_param /* .split_mode = */ sm, /* .main_gpu = */ mg, /* .no_kv_offload= */ nkvo, - /* .mul_mat_q = */ mmq, /* .tensor_split = */ ts, /* .use_mmap = */ mmp, }; @@ -580,7 +565,6 @@ static std::vector get_cmd_params_instances(const cmd_param /* .split_mode = */ sm, /* .main_gpu = */ mg, /* .no_kv_offload= */ nkvo, - /* .mul_mat_q = */ mmq, /* .tensor_split = */ ts, /* .use_mmap = */ mmp, }; @@ -616,7 +600,6 @@ struct test { llama_split_mode split_mode; int main_gpu; bool no_kv_offload; - bool mul_mat_q; std::vector tensor_split; bool use_mmap; int n_prompt; @@ -639,7 +622,6 @@ struct test { split_mode = inst.split_mode; main_gpu = inst.main_gpu; no_kv_offload = inst.no_kv_offload; - mul_mat_q = inst.mul_mat_q; tensor_split = inst.tensor_split; use_mmap = inst.use_mmap; n_prompt = inst.n_prompt; @@ -713,7 +695,7 @@ struct test { "n_batch", "n_threads", "type_k", "type_v", "n_gpu_layers", "split_mode", "main_gpu", "no_kv_offload", - "mul_mat_q", "tensor_split", "use_mmap", + "tensor_split", "use_mmap", "n_prompt", "n_gen", "test_time", "avg_ns", "stddev_ns", "avg_ts", "stddev_ts" @@ -733,7 +715,7 @@ struct test { } if (field == "cuda" || field == "opencl" || field == "vulkan" || field == "kompute" || field == "metal" || field == "gpu_blas" || field == "blas" || field == "sycl" ||field == "f16_kv" || field == "no_kv_offload" || - field == "mul_mat_q" || field == "use_mmap") { + field == "use_mmap") { return BOOL; } if (field == "avg_ts" || field == "stddev_ts") { @@ -767,7 +749,7 @@ struct test { std::to_string(n_batch), std::to_string(n_threads), ggml_type_name(type_k), ggml_type_name(type_v), std::to_string(n_gpu_layers), split_mode_str(split_mode), std::to_string(main_gpu), std::to_string(no_kv_offload), - std::to_string(mul_mat_q), tensor_split_str, std::to_string(use_mmap), + tensor_split_str, std::to_string(use_mmap), std::to_string(n_prompt), std::to_string(n_gen), test_time, std::to_string(avg_ns()), std::to_string(stdev_ns()), std::to_string(avg_ts()), std::to_string(stdev_ts()) @@ -931,9 +913,6 @@ struct markdown_printer : public printer { if (field == "n_threads") { return "threads"; } - if (field == "mul_mat_q") { - return "mmq"; - } if (field == "no_kv_offload") { return "nkvo"; } @@ -974,9 +953,6 @@ struct markdown_printer : public printer { if (params.split_mode.size() > 1 || params.split_mode != cmd_params_defaults.split_mode) { fields.emplace_back("split_mode"); } - if (params.mul_mat_q.size() > 1 || params.mul_mat_q != cmd_params_defaults.mul_mat_q) { - fields.emplace_back("mul_mat_q"); - } if (params.no_kv_offload.size() > 1 || params.no_kv_offload != cmd_params_defaults.no_kv_offload) { fields.emplace_back("no_kv_offload"); } diff --git a/examples/server/server.cpp b/examples/server/server.cpp index eea987966..2b2f4a0f4 100644 --- a/examples/server/server.cpp +++ b/examples/server/server.cpp @@ -2390,14 +2390,6 @@ static void server_params_parse(int argc, char **argv, server_params &sparams, } #else LOG_WARNING("llama.cpp was compiled without cuBLAS. It is not possible to set a tensor split.\n", {}); -#endif // GGML_USE_CUBLAS - } - else if (arg == "--no-mul-mat-q" || arg == "-nommq") - { -#if defined(GGML_USE_CUBLAS) || defined(GGML_USE_SYCL) - params.mul_mat_q = false; -#else - LOG_WARNING("warning: llama.cpp was compiled without cuBLAS. Disabling mul_mat_q kernels has no effect.\n", {}); #endif // GGML_USE_CUBLAS } else if (arg == "--main-gpu" || arg == "-mg") diff --git a/llama.cpp b/llama.cpp index a35f07aa4..073fd3b70 100644 --- a/llama.cpp +++ b/llama.cpp @@ -1645,7 +1645,6 @@ struct llama_cparams { float yarn_beta_slow; float defrag_thold; - bool mul_mat_q; bool offload_kqv; bool do_pooling; @@ -11633,7 +11632,6 @@ struct llama_context_params llama_context_default_params() { /*.cb_eval_user_data =*/ nullptr, /*.type_k =*/ GGML_TYPE_F16, /*.type_v =*/ GGML_TYPE_F16, - /*.mul_mat_q =*/ true, /*.logits_all =*/ false, /*.embedding =*/ false, /*.offload_kqv =*/ true, @@ -11785,7 +11783,6 @@ struct llama_context * llama_new_context_with_model( cparams.yarn_beta_fast = params.yarn_beta_fast; cparams.yarn_beta_slow = params.yarn_beta_slow; cparams.defrag_thold = params.defrag_thold; - cparams.mul_mat_q = params.mul_mat_q; cparams.offload_kqv = params.offload_kqv; cparams.do_pooling = params.do_pooling; diff --git a/llama.h b/llama.h index 4d0ebe37d..ed51f478a 100644 --- a/llama.h +++ b/llama.h @@ -255,7 +255,6 @@ extern "C" { enum ggml_type type_v; // data type for V cache // Keep the booleans together to avoid misalignment during copy-by-value. - bool mul_mat_q; // if true, use experimental mul_mat_q kernels (DEPRECATED - always true) bool logits_all; // the llama_eval() call computes all logits, not just the last one (DEPRECATED - set llama_batch.logits instead) bool embedding; // embedding mode only bool offload_kqv; // whether to offload the KQV ops (including the KV cache) to GPU diff --git a/scripts/compare-llama-bench.py b/scripts/compare-llama-bench.py index 70737f976..39c3e52e5 100755 --- a/scripts/compare-llama-bench.py +++ b/scripts/compare-llama-bench.py @@ -31,7 +31,7 @@ PRETTY_NAMES = { "model_size": "Model Size [GiB]", "model_n_params": "Num. of Parameters", "n_batch": "Batch size", "n_threads": "Threads", "type_k": "K type", "type_v": "V type", "n_gpu_layers": "GPU layers", "main_gpu": "Main GPU", "no_kv_offload": "NKVO", - "mul_mat_q": "MMQ", "tensor_split": "Tensor split" + "tensor_split": "Tensor split" } DEFAULT_SHOW = ["model_type"] # Always show these properties by default. From f49a5356865ced0eca1df9f9d84631dfef71b9dc Mon Sep 17 00:00:00 2001 From: Miwa / Ensan <63481257+ensan-hcl@users.noreply.github.com> Date: Fri, 1 Mar 2024 22:48:56 +0900 Subject: [PATCH 749/811] common : fix flag `--logits-all` to `--all-logits` (#5805) --- common/common.cpp | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/common/common.cpp b/common/common.cpp index bf1ed8a66..938c428cf 100644 --- a/common/common.cpp +++ b/common/common.cpp @@ -1015,7 +1015,7 @@ void gpt_print_usage(int /*argc*/, char ** argv, const gpt_params & params) { printf(" --ignore-eos ignore end of stream token and continue generating (implies --logit-bias 2-inf)\n"); printf(" --no-penalize-nl do not penalize newline token\n"); printf(" --temp N temperature (default: %.1f)\n", (double)sparams.temp); - printf(" --logits-all return logits for all tokens in the batch (default: disabled)\n"); + printf(" --all-logits return logits for all tokens in the batch (default: disabled)\n"); printf(" --hellaswag compute HellaSwag score over random tasks from datafile supplied with -f\n"); printf(" --hellaswag-tasks N number of tasks to use when computing the HellaSwag score (default: %zu)\n", params.hellaswag_tasks); printf(" --winogrande compute Winogrande score over random tasks from datafile supplied with -f\n"); From e7433867288d2f142cffe596f3751bda5d7ee2c7 Mon Sep 17 00:00:00 2001 From: kunal-vaishnavi <115581922+kunal-vaishnavi@users.noreply.github.com> Date: Fri, 1 Mar 2024 06:08:08 -0800 Subject: [PATCH 750/811] gemma : fix bfloat16 -> float16 conversion issue (#5810) --- convert-hf-to-gguf.py | 7 +++---- 1 file changed, 3 insertions(+), 4 deletions(-) diff --git a/convert-hf-to-gguf.py b/convert-hf-to-gguf.py index ae30b2a76..d3e8ec1f6 100755 --- a/convert-hf-to-gguf.py +++ b/convert-hf-to-gguf.py @@ -1811,16 +1811,15 @@ class GemmaModel(Model): tensor_map = gguf.get_tensor_name_map(self.model_arch, block_count) for name, data_torch in self.get_tensors(): - # ref: https://github.com/huggingface/transformers/blob/fc37f38915372c15992b540dfcbbe00a916d4fc6/src/transformers/models/gemma/modeling_gemma.py#L89 - if name.endswith("norm.weight"): - data_torch = data_torch + 1 - old_dtype = data_torch.dtype # convert any unsupported data types to float32 if data_torch.dtype not in (torch.float16, torch.float32): data_torch = data_torch.to(torch.float32) + # ref: https://github.com/huggingface/transformers/blob/fc37f38915372c15992b540dfcbbe00a916d4fc6/src/transformers/models/gemma/modeling_gemma.py#L89 + if name.endswith("norm.weight"): + data_torch = data_torch + 1 data = data_torch.squeeze().numpy() # map tensor names From c2224f003bf9cf558b1a3c57033563e11a4de9a5 Mon Sep 17 00:00:00 2001 From: ddpasa <112642920+ddpasa@users.noreply.github.com> Date: Fri, 1 Mar 2024 18:00:00 +0100 Subject: [PATCH 751/811] ggml-vulkan: fix VULKAN_CHECK_RESULTS flag, which was previously broken (#5813) --- ggml-vulkan.cpp | 34 ++++++++++++++++++---------------- 1 file changed, 18 insertions(+), 16 deletions(-) diff --git a/ggml-vulkan.cpp b/ggml-vulkan.cpp index 896c290b2..ae9cb3c1c 100644 --- a/ggml-vulkan.cpp +++ b/ggml-vulkan.cpp @@ -5428,7 +5428,8 @@ static void ggml_vk_print_tensor(ggml_backend_vk_context * ctx, const ggml_tenso ggml_tensor_extra_gpu * extra = (ggml_tensor_extra_gpu *) tensor->extra; - ggml_vk_buffer_read(ctx, extra->buffer_gpu, extra->offset, tensor_data, tensor_size); + vk_buffer buffer_gpu = extra->buffer_gpu.lock(); + ggml_vk_buffer_read(ctx, buffer_gpu, extra->offset, tensor_data, tensor_size); } std::cerr << "TENSOR CHECK " << name << " (" << tensor->name << "): " << ggml_op_name(tensor->op) << std::endl; @@ -5540,7 +5541,8 @@ static void ggml_vk_check_results_0(ggml_backend_vk_context * ctx, ggml_compute_ for (int i3 = 0; i3 < src0->ne[3]; i3++) { for (int i2 = 0; i2 < src0->ne[2]; i2++) { const int idx = i3*src0->ne[2] + i2; - ggml_vk_buffer_read(ctx, extra->buffer_gpu, offset + idx * src0->nb[2], ((char *)src0_clone->data + idx * src0_clone->nb[2]), src0->ne[1] * src0->nb[1]); + vk_buffer buffer_gpu = extra->buffer_gpu.lock(); + ggml_vk_buffer_read(ctx, buffer_gpu, offset + idx * src0->nb[2], ((char *)src0_clone->data + idx * src0_clone->nb[2]), src0->ne[1] * src0->nb[1]); } } @@ -5550,10 +5552,11 @@ static void ggml_vk_check_results_0(ggml_backend_vk_context * ctx, ggml_compute_ src0_clone->nb[i] = src0_clone->nb[i - 1]*src0_clone->ne[i - 1]; } } else { - if (offset + src0_size >= extra->buffer_gpu->size) { - src0_size = extra->buffer_gpu->size - offset; + vk_buffer buffer_gpu = extra->buffer_gpu.lock(); + if (offset + src0_size >= buffer_gpu->size) { + src0_size = buffer_gpu->size - offset; } - ggml_vk_buffer_read(ctx, extra->buffer_gpu, offset, src0_clone->data, src0_size); + ggml_vk_buffer_read(ctx, buffer_gpu, offset, src0_clone->data, src0_size); memcpy(src0_clone->nb, src0->nb, sizeof(size_t) * GGML_MAX_DIMS); } } else { @@ -5583,7 +5586,8 @@ static void ggml_vk_check_results_0(ggml_backend_vk_context * ctx, ggml_compute_ for (int i3 = 0; i3 < src1->ne[3]; i3++) { for (int i2 = 0; i2 < src1->ne[2]; i2++) { const int idx = i3*src1->ne[2] + i2; - ggml_vk_buffer_read(ctx, extra->buffer_gpu, offset + idx * src1->nb[2], ((char *)src1_clone->data + idx * src1_clone->nb[2]), src1->ne[1] * src1->nb[1]); + vk_buffer buffer_gpu = extra->buffer_gpu.lock(); + ggml_vk_buffer_read(ctx, buffer_gpu, offset + idx * src1->nb[2], ((char *)src1_clone->data + idx * src1_clone->nb[2]), src1->ne[1] * src1->nb[1]); } } @@ -5593,10 +5597,11 @@ static void ggml_vk_check_results_0(ggml_backend_vk_context * ctx, ggml_compute_ src1_clone->nb[i] = src1_clone->nb[i - 1]*src1_clone->ne[i - 1]; } } else { - if (offset + src1_size >= extra->buffer_gpu->size) { - src1_size = extra->buffer_gpu->size - offset; + vk_buffer buffer_gpu = extra->buffer_gpu.lock(); + if (offset + src1_size >= buffer_gpu->size) { + src1_size = buffer_gpu->size - offset; } - ggml_vk_buffer_read(ctx, extra->buffer_gpu, offset, src1_clone->data, src1_size); + ggml_vk_buffer_read(ctx, buffer_gpu, offset, src1_clone->data, src1_size); memcpy(src1_clone->nb, src1->nb, sizeof(size_t) * GGML_MAX_DIMS); } } else { @@ -5643,11 +5648,7 @@ static void ggml_vk_check_results_0(ggml_backend_vk_context * ctx, ggml_compute_ } else if (tensor->op == GGML_OP_RMS_NORM) { tensor_clone = ggml_rms_norm(ggml_ctx, src0_clone, *(float *)tensor->op_params); } else if (tensor->op == GGML_OP_SOFT_MAX) { - if (src1 != nullptr) { - tensor_clone = ggml_soft_max_ext(ggml_ctx, src0_clone, src1_clone, *(float *)tensor->op_params); - } else { tensor_clone = ggml_soft_max(ggml_ctx, src0_clone); - } } else if (tensor->op == GGML_OP_DIAG_MASK_INF) { tensor_clone = ggml_diag_mask_inf(ggml_ctx, src0_clone, *(float *)tensor->op_params); } else if (tensor->op == GGML_OP_ROPE) { @@ -5753,11 +5754,12 @@ static void ggml_vk_check_results_1(ggml_backend_vk_context * ctx, ggml_compute_ ggml_tensor_extra_gpu * extra = (ggml_tensor_extra_gpu *) tensor->extra; - if (extra->offset + tensor_size >= extra->buffer_gpu->size) { - tensor_size = extra->buffer_gpu->size - (extra->offset); + vk_buffer buffer_gpu = extra->buffer_gpu.lock(); + if (extra->offset + tensor_size >= buffer_gpu->size) { + tensor_size = buffer_gpu->size - (extra->offset); } - ggml_vk_buffer_read(ctx, extra->buffer_gpu, extra->offset, tensor_data, tensor_size); + ggml_vk_buffer_read(ctx, buffer_gpu, extra->offset, tensor_data, tensor_size); } float first_error_result = -1.0f; From 38d16b142624bdd7c41d9955752b7f7b59c5e048 Mon Sep 17 00:00:00 2001 From: Georgi Gerganov Date: Fri, 1 Mar 2024 20:00:58 +0200 Subject: [PATCH 752/811] server : remove api_like_OAI.py proxy script (#5808) --- README.md | 1 + examples/server/README.md | 17 +-- examples/server/api_like_OAI.py | 228 -------------------------------- 3 files changed, 3 insertions(+), 243 deletions(-) delete mode 100755 examples/server/api_like_OAI.py diff --git a/README.md b/README.md index 5401e197f..67717c1e3 100644 --- a/README.md +++ b/README.md @@ -10,6 +10,7 @@ Inference of Meta's [LLaMA](https://arxiv.org/abs/2302.13971) model (and others) ### Hot topics +- The `api_like_OAI.py` script has been removed - use `server` instead ([#5766](https://github.com/ggerganov/llama.cpp/issues/5766#issuecomment-1969037761)) - Support for chat templates: [Wiki (contributions welcome)](https://github.com/ggerganov/llama.cpp/wiki/Templates-supported-by-llama_chat_apply_template) - Support for Gemma models: https://github.com/ggerganov/llama.cpp/pull/5631 - Non-linear quantization IQ4_NL: https://github.com/ggerganov/llama.cpp/pull/5590 diff --git a/examples/server/README.md b/examples/server/README.md index ad35306c6..397ee8252 100644 --- a/examples/server/README.md +++ b/examples/server/README.md @@ -326,7 +326,7 @@ Notice that each `probs` is an array of length `n_probs`. - `default_generation_settings` - the default generation settings for the `/completion` endpoint, has the same fields as the `generation_settings` response object from the `/completion` endpoint. - `total_slots` - the total number of slots for process requests (defined by `--parallel` option) -- **POST** `/v1/chat/completions`: OpenAI-compatible Chat Completions API. Given a ChatML-formatted json description in `messages`, it returns the predicted completion. Both synchronous and streaming mode are supported, so scripted and interactive applications work fine. While no strong claims of compatibility with OpenAI API spec is being made, in our experience it suffices to support many apps. Only ChatML-tuned models, such as Dolphin, OpenOrca, OpenHermes, OpenChat-3.5, etc can be used with this endpoint. Compared to `api_like_OAI.py` this API implementation does not require a wrapper to be served. +- **POST** `/v1/chat/completions`: OpenAI-compatible Chat Completions API. Given a ChatML-formatted json description in `messages`, it returns the predicted completion. Both synchronous and streaming mode are supported, so scripted and interactive applications work fine. While no strong claims of compatibility with OpenAI API spec is being made, in our experience it suffices to support many apps. Only ChatML-tuned models, such as Dolphin, OpenOrca, OpenHermes, OpenChat-3.5, etc can be used with this endpoint. *Options:* @@ -528,20 +528,7 @@ bash chat.sh ### API like OAI -API example using Python Flask: [api_like_OAI.py](api_like_OAI.py) -This example must be used with server.cpp - -```sh -python api_like_OAI.py -``` - -After running the API server, you can use it in Python by setting the API base URL. - -```python -openai.api_base = "http://:port" -``` - -Then you can utilize llama.cpp as an OpenAI's **chat.completion** or **text_completion** API +The HTTP server supports OAI-like API ### Extending or building alternative Web Front End diff --git a/examples/server/api_like_OAI.py b/examples/server/api_like_OAI.py deleted file mode 100755 index 607fe49d3..000000000 --- a/examples/server/api_like_OAI.py +++ /dev/null @@ -1,228 +0,0 @@ -#!/usr/bin/env python3 -import argparse -from flask import Flask, jsonify, request, Response -import urllib.parse -import requests -import time -import json - - -app = Flask(__name__) -slot_id = -1 - -parser = argparse.ArgumentParser(description="An example of using server.cpp with a similar API to OAI. It must be used together with server.cpp.") -parser.add_argument("--chat-prompt", type=str, help="the top prompt in chat completions(default: 'A chat between a curious user and an artificial intelligence assistant. The assistant follows the given rules no matter what.')", default='A chat between a curious user and an artificial intelligence assistant. The assistant follows the given rules no matter what.') -parser.add_argument("--user-name", type=str, help="USER name in chat completions(default: 'USER: ')", default="USER: ") -parser.add_argument("--ai-name", type=str, help="ASSISTANT name in chat completions(default: 'ASSISTANT: ')", default="ASSISTANT: ") -parser.add_argument("--system-name", type=str, help="SYSTEM name in chat completions(default: 'ASSISTANT's RULE: ')", default="ASSISTANT's RULE: ") -parser.add_argument("--stop", type=str, help="the end of response in chat completions(default: '')", default="") -parser.add_argument("--llama-api", type=str, help="Set the address of server.cpp in llama.cpp(default: http://127.0.0.1:8080)", default='http://127.0.0.1:8080') -parser.add_argument("--api-key", type=str, help="Set the api key to allow only few user(default: NULL)", default="") -parser.add_argument("--host", type=str, help="Set the ip address to listen.(default: 127.0.0.1)", default='127.0.0.1') -parser.add_argument("--port", type=int, help="Set the port to listen.(default: 8081)", default=8081) - -args = parser.parse_args() - -def is_present(json, key): - try: - buf = json[key] - except KeyError: - return False - if json[key] == None: - return False - return True - -#convert chat to prompt -def convert_chat(messages): - - system_n = args.system_name - user_n = args.user_name - ai_n = args.ai_name - stop = args.stop - - prompt = "" + args.chat_prompt + stop - - for line in messages: - if (line["role"] == "system"): - prompt += f"{system_n}{line['content']}{stop}" - if (line["role"] == "user"): - prompt += f"{user_n}{line['content']}{stop}" - if (line["role"] == "assistant"): - prompt += f"{ai_n}{line['content']}{stop}" - prompt += ai_n.rstrip() - - return prompt - -def make_postData(body, chat=False, stream=False): - postData = {} - if (chat): - postData["prompt"] = convert_chat(body["messages"]) - else: - postData["prompt"] = body["prompt"] - if(is_present(body, "temperature")): postData["temperature"] = body["temperature"] - if(is_present(body, "top_k")): postData["top_k"] = body["top_k"] - if(is_present(body, "top_p")): postData["top_p"] = body["top_p"] - if(is_present(body, "max_tokens")): postData["n_predict"] = body["max_tokens"] - if(is_present(body, "presence_penalty")): postData["presence_penalty"] = body["presence_penalty"] - if(is_present(body, "frequency_penalty")): postData["frequency_penalty"] = body["frequency_penalty"] - if(is_present(body, "repeat_penalty")): postData["repeat_penalty"] = body["repeat_penalty"] - if(is_present(body, "mirostat")): postData["mirostat"] = body["mirostat"] - if(is_present(body, "mirostat_tau")): postData["mirostat_tau"] = body["mirostat_tau"] - if(is_present(body, "mirostat_eta")): postData["mirostat_eta"] = body["mirostat_eta"] - if(is_present(body, "seed")): postData["seed"] = body["seed"] - if(is_present(body, "grammar")): postData["grammar"] = body["grammar"] - if(is_present(body, "logit_bias")): postData["logit_bias"] = [[int(token), body["logit_bias"][token]] for token in body["logit_bias"].keys()] - if (args.stop != ""): - postData["stop"] = [args.stop] - else: - postData["stop"] = [] - if(is_present(body, "stop")): postData["stop"] += body["stop"] - postData["n_keep"] = -1 - postData["stream"] = stream - postData["cache_prompt"] = True - postData["slot_id"] = slot_id - return postData - -def make_resData(data, chat=False, promptToken=[]): - resData = { - "id": "chatcmpl" if (chat) else "cmpl", - "object": "chat.completion" if (chat) else "text_completion", - "created": int(time.time()), - "truncated": data["truncated"], - "model": "LLaMA_CPP", - "usage": { - "prompt_tokens": data["tokens_evaluated"], - "completion_tokens": data["tokens_predicted"], - "total_tokens": data["tokens_evaluated"] + data["tokens_predicted"] - } - } - if (len(promptToken) != 0): - resData["promptToken"] = promptToken - if (chat): - #only one choice is supported - resData["choices"] = [{ - "index": 0, - "message": { - "role": "assistant", - "content": data["content"], - }, - "finish_reason": "stop" if (data["stopped_eos"] or data["stopped_word"]) else "length" - }] - else: - #only one choice is supported - resData["choices"] = [{ - "text": data["content"], - "index": 0, - "logprobs": None, - "finish_reason": "stop" if (data["stopped_eos"] or data["stopped_word"]) else "length" - }] - return resData - -def make_resData_stream(data, chat=False, time_now = 0, start=False): - resData = { - "id": "chatcmpl" if (chat) else "cmpl", - "object": "chat.completion.chunk" if (chat) else "text_completion.chunk", - "created": time_now, - "model": "LLaMA_CPP", - "choices": [ - { - "finish_reason": None, - "index": 0 - } - ] - } - slot_id = data.get("slot_id") - if (chat): - if (start): - resData["choices"][0]["delta"] = { - "role": "assistant" - } - else: - resData["choices"][0]["delta"] = { - "content": data["content"] - } - if (data["stop"]): - resData["choices"][0]["finish_reason"] = "stop" if (data["stopped_eos"] or data["stopped_word"]) else "length" - else: - resData["choices"][0]["text"] = data["content"] - if (data["stop"]): - resData["choices"][0]["finish_reason"] = "stop" if (data["stopped_eos"] or data["stopped_word"]) else "length" - - return resData - - -@app.route('/chat/completions', methods=['POST', 'OPTIONS']) -@app.route('/v1/chat/completions', methods=['POST', 'OPTIONS']) -def chat_completions(): - if (args.api_key != "" and request.headers["Authorization"].split()[1] != args.api_key): - return Response(status=403) - if request.method == 'OPTIONS': - return Response(headers={"Access-Control-Allow-Origin": "*", "Access-Control-Allow-Headers": "*"}) - body = request.get_json() - stream = False - tokenize = False - if(is_present(body, "stream")): stream = body["stream"] - if(is_present(body, "tokenize")): tokenize = body["tokenize"] - postData = make_postData(body, chat=True, stream=stream) - - promptToken = [] - if (tokenize): - tokenData = requests.request("POST", urllib.parse.urljoin(args.llama_api, "/tokenize"), data=json.dumps({"content": postData["prompt"]})).json() - promptToken = tokenData["tokens"] - - if (not stream): - data = requests.request("POST", urllib.parse.urljoin(args.llama_api, "/completion"), data=json.dumps(postData)) - print(data.json()) - resData = make_resData(data.json(), chat=True, promptToken=promptToken) - return jsonify(resData) - else: - def generate(): - data = requests.request("POST", urllib.parse.urljoin(args.llama_api, "/completion"), data=json.dumps(postData), stream=True) - time_now = int(time.time()) - resData = make_resData_stream({}, chat=True, time_now=time_now, start=True) - yield 'data: {}\n\n'.format(json.dumps(resData)) - for line in data.iter_lines(): - if line: - decoded_line = line.decode('utf-8') - resData = make_resData_stream(json.loads(decoded_line[6:]), chat=True, time_now=time_now) - yield 'data: {}\n\n'.format(json.dumps(resData)) - return Response(generate(), mimetype='text/event-stream', headers={"Access-Control-Allow-Origin": "*", "Access-Control-Allow-Headers": "*"}) - - -@app.route('/completions', methods=['POST', 'OPTIONS']) -@app.route('/v1/completions', methods=['POST', 'OPTIONS']) -def completion(): - if (args.api_key != "" and request.headers["Authorization"].split()[1] != args.api_key): - return Response(status=403) - if request.method == 'OPTIONS': - return Response(headers={"Access-Control-Allow-Origin": "*", "Access-Control-Allow-Headers": "*"}) - body = request.get_json() - stream = False - tokenize = False - if(is_present(body, "stream")): stream = body["stream"] - if(is_present(body, "tokenize")): tokenize = body["tokenize"] - postData = make_postData(body, chat=False, stream=stream) - - promptToken = [] - if (tokenize): - tokenData = requests.request("POST", urllib.parse.urljoin(args.llama_api, "/tokenize"), data=json.dumps({"content": postData["prompt"]})).json() - promptToken = tokenData["tokens"] - - if (not stream): - data = requests.request("POST", urllib.parse.urljoin(args.llama_api, "/completion"), data=json.dumps(postData)) - print(data.json()) - resData = make_resData(data.json(), chat=False, promptToken=promptToken) - return jsonify(resData) - else: - def generate(): - data = requests.request("POST", urllib.parse.urljoin(args.llama_api, "/completion"), data=json.dumps(postData), stream=True) - time_now = int(time.time()) - for line in data.iter_lines(): - if line: - decoded_line = line.decode('utf-8') - resData = make_resData_stream(json.loads(decoded_line[6:]), chat=False, time_now=time_now) - yield 'data: {}\n\n'.format(json.dumps(resData)) - return Response(generate(), mimetype='text/event-stream', headers={"Access-Control-Allow-Origin": "*", "Access-Control-Allow-Headers": "*"}) - -if __name__ == '__main__': - app.run(args.host, port=args.port) From c29af7e2252d288f2ea58a7d437c1cb7c0abf160 Mon Sep 17 00:00:00 2001 From: Sourab Mangrulkar <13534540+pacman100@users.noreply.github.com> Date: Sat, 2 Mar 2024 01:00:46 +0530 Subject: [PATCH 753/811] llama : add StarCoder2 support (#5795) * Add support for starcoder2 * handle rope type * skip rope freq and rotary embeddings from being serialized * resolve comments * Update llama.cpp * remove redundant changes * handle `rope-theta` * llama : change starcoder2 rope type * address comment --------- Co-authored-by: Georgi Gerganov --- convert-hf-to-gguf.py | 8 +- gguf-py/gguf/constants.py | 21 ++++ gguf-py/gguf/tensor_mapping.py | 2 + llama.cpp | 199 +++++++++++++++++++++++++++++++++ 4 files changed, 229 insertions(+), 1 deletion(-) diff --git a/convert-hf-to-gguf.py b/convert-hf-to-gguf.py index d3e8ec1f6..28b92ac38 100755 --- a/convert-hf-to-gguf.py +++ b/convert-hf-to-gguf.py @@ -96,9 +96,11 @@ class Model: if (n_head_kv := self.hparams.get("num_key_value_heads")) is not None: self.gguf_writer.add_head_count_kv(n_head_kv) + if (rope_theta := self.hparams.get("rope_theta")) is not None: + self.gguf_writer.add_rope_freq_base(rope_theta) if (f_rms_eps := self.hparams.get("rms_norm_eps")) is not None: self.gguf_writer.add_layer_norm_rms_eps(f_rms_eps) - if (f_norm_eps := self.find_hparam(["layer_norm_eps", "layer_norm_epsilon"], optional=True)) is not None: + if (f_norm_eps := self.find_hparam(["layer_norm_eps", "layer_norm_epsilon", "norm_epsilon"], optional=True)) is not None: self.gguf_writer.add_layer_norm_eps(f_norm_eps) if (n_experts := self.hparams.get("num_local_experts")) is not None: self.gguf_writer.add_expert_count(n_experts) @@ -220,6 +222,8 @@ class Model: return NomicBertModel if model_architecture == "GemmaForCausalLM": return GemmaModel + if model_architecture == "Starcoder2ForCausalLM": + return Model return Model def _is_model_safetensors(self) -> bool: @@ -281,6 +285,8 @@ class Model: return gguf.MODEL_ARCH.NOMIC_BERT if arch == "GemmaForCausalLM": return gguf.MODEL_ARCH.GEMMA + if arch == "Starcoder2ForCausalLM": + return gguf.MODEL_ARCH.STARCODER2 raise NotImplementedError(f'Architecture "{arch}" not supported!') diff --git a/gguf-py/gguf/constants.py b/gguf-py/gguf/constants.py index 8f9139d1b..5db760cb1 100644 --- a/gguf-py/gguf/constants.py +++ b/gguf-py/gguf/constants.py @@ -112,6 +112,7 @@ class MODEL_ARCH(IntEnum): INTERNLM2 = auto() MINICPM = auto() GEMMA = auto() + STARCODER2 = auto() class MODEL_TENSOR(IntEnum): @@ -169,6 +170,7 @@ MODEL_ARCH_NAMES: dict[MODEL_ARCH, str] = { MODEL_ARCH.INTERNLM2: "internlm2", MODEL_ARCH.MINICPM: "minicpm", MODEL_ARCH.GEMMA: "gemma", + MODEL_ARCH.STARCODER2: "starcoder2", } TENSOR_NAMES: dict[MODEL_TENSOR, str] = { @@ -526,6 +528,21 @@ MODEL_TENSORS: dict[MODEL_ARCH, list[MODEL_TENSOR]] = { MODEL_TENSOR.FFN_UP, MODEL_TENSOR.FFN_NORM, ], + MODEL_ARCH.STARCODER2: [ + MODEL_TENSOR.TOKEN_EMBD, + MODEL_TENSOR.OUTPUT_NORM, + MODEL_TENSOR.OUTPUT, + MODEL_TENSOR.ROPE_FREQS, + MODEL_TENSOR.ATTN_NORM, + MODEL_TENSOR.ATTN_Q, + MODEL_TENSOR.ATTN_K, + MODEL_TENSOR.ATTN_V, + MODEL_TENSOR.ATTN_OUT, + MODEL_TENSOR.ATTN_ROT_EMBD, + MODEL_TENSOR.FFN_NORM, + MODEL_TENSOR.FFN_DOWN, + MODEL_TENSOR.FFN_UP, + ], # TODO } @@ -554,6 +571,10 @@ MODEL_TENSOR_SKIP: dict[MODEL_ARCH, list[MODEL_TENSOR]] = { MODEL_TENSOR.ROPE_FREQS, MODEL_TENSOR.ATTN_ROT_EMBD, ], + MODEL_ARCH.STARCODER2: [ + MODEL_TENSOR.ROPE_FREQS, + MODEL_TENSOR.ATTN_ROT_EMBD, + ], } # diff --git a/gguf-py/gguf/tensor_mapping.py b/gguf-py/gguf/tensor_mapping.py index 861003776..db2ec9704 100644 --- a/gguf-py/gguf/tensor_mapping.py +++ b/gguf-py/gguf/tensor_mapping.py @@ -210,6 +210,7 @@ class TensorNameMap: "model.layers.layers.{bid}.mlp.up_proj", # plamo "model.layers.{bid}.feed_forward.w3", # internlm2 "encoder.layers.{bid}.mlp.fc11", # nomic-bert + "model.layers.{bid}.mlp.c_fc", # starcoder2 ), MODEL_TENSOR.FFN_UP_EXP: ( @@ -256,6 +257,7 @@ class TensorNameMap: "model.layers.layers.{bid}.mlp.down_proj", # plamo "model.layers.{bid}.feed_forward.w2", # internlm2 "encoder.layers.{bid}.mlp.fc2", # nomic-bert + "model.layers.{bid}.mlp.c_proj", # starcoder2 ), MODEL_TENSOR.FFN_DOWN_EXP: ( diff --git a/llama.cpp b/llama.cpp index 073fd3b70..b1db5b179 100644 --- a/llama.cpp +++ b/llama.cpp @@ -211,6 +211,7 @@ enum llm_arch { LLM_ARCH_INTERNLM2, LLM_ARCH_MINICPM, LLM_ARCH_GEMMA, + LLM_ARCH_STARCODER2, LLM_ARCH_UNKNOWN, }; @@ -238,6 +239,7 @@ static std::map LLM_ARCH_NAMES = { { LLM_ARCH_INTERNLM2, "internlm2" }, { LLM_ARCH_MINICPM, "minicpm" }, { LLM_ARCH_GEMMA, "gemma" }, + { LLM_ARCH_STARCODER2, "starcoder2" }, }; enum llm_kv { @@ -779,6 +781,24 @@ static std::map> LLM_TENSOR_NAMES = { LLM_TENSOR_FFN_UP, "blk.%d.ffn_up" }, }, }, + { + LLM_ARCH_STARCODER2, + { + { LLM_TENSOR_TOKEN_EMBD, "token_embd" }, + { LLM_TENSOR_OUTPUT_NORM, "output_norm" }, + { LLM_TENSOR_OUTPUT, "output" }, + { LLM_TENSOR_ROPE_FREQS, "rope_freqs" }, + { LLM_TENSOR_ATTN_NORM, "blk.%d.attn_norm" }, + { LLM_TENSOR_ATTN_Q, "blk.%d.attn_q" }, + { LLM_TENSOR_ATTN_K, "blk.%d.attn_k" }, + { LLM_TENSOR_ATTN_V, "blk.%d.attn_v" }, + { LLM_TENSOR_ATTN_OUT, "blk.%d.attn_output" }, + { LLM_TENSOR_ATTN_ROT_EMBD, "blk.%d.attn_rot_embd" }, + { LLM_TENSOR_FFN_NORM, "blk.%d.ffn_norm" }, + { LLM_TENSOR_FFN_DOWN, "blk.%d.ffn_down" }, + { LLM_TENSOR_FFN_UP, "blk.%d.ffn_up" }, + }, + }, { LLM_ARCH_UNKNOWN, { @@ -3320,6 +3340,16 @@ static void llm_load_hparams( default: model.type = e_model::MODEL_UNKNOWN; } } break; + case LLM_ARCH_STARCODER2: + { + ml.get_key(LLM_KV_ATTENTION_LAYERNORM_EPS, hparams.f_norm_eps); + switch (hparams.n_layer) { + case 30: model.type = e_model::MODEL_3B; break; + case 32: model.type = e_model::MODEL_7B; break; + case 40: model.type = e_model::MODEL_15B; break; + default: model.type = e_model::MODEL_UNKNOWN; + } + } break; default: (void)0; } @@ -4490,6 +4520,56 @@ static bool llm_load_tensors( layer.ffn_down = ml.create_tensor(ctx_split, tn(LLM_TENSOR_FFN_DOWN, "weight", i), { n_ff, n_embd}); } } break; + case LLM_ARCH_STARCODER2: + { + model.tok_embd = ml.create_tensor(ctx_input, tn(LLM_TENSOR_TOKEN_EMBD, "weight"), {n_embd, n_vocab}); + + // output + { + model.output_norm = ml.create_tensor(ctx_output, tn(LLM_TENSOR_OUTPUT_NORM, "weight"), {n_embd}); + model.output_norm_b = ml.create_tensor(ctx_output, tn(LLM_TENSOR_OUTPUT_NORM, "bias"), {n_embd}); + + model.output = ml.create_tensor(ctx_output_split, tn(LLM_TENSOR_OUTPUT, "weight"), {n_embd, n_vocab}, false); + // if output is NULL, init from the input tok embed + if (model.output == NULL) { + model.output = ml.create_tensor(ctx_output, tn(LLM_TENSOR_TOKEN_EMBD, "weight"), {n_embd, n_vocab}); + ml.n_created--; // artificial tensor + ml.size_data += ggml_nbytes(model.output); + } + + } + + for (int i = 0; i < n_layer; ++i) { + ggml_context * ctx_layer = ctx_for_layer(i); + ggml_context * ctx_split = ctx_for_layer_split(i); + + auto & layer = model.layers[i]; + + layer.attn_norm = ml.create_tensor(ctx_layer, tn(LLM_TENSOR_ATTN_NORM, "weight", i), {n_embd}); + layer.attn_norm_b = ml.create_tensor(ctx_layer, tn(LLM_TENSOR_ATTN_NORM, "bias", i), {n_embd}); + + layer.wq = ml.create_tensor(ctx_split, tn(LLM_TENSOR_ATTN_Q, "weight", i), {n_embd, n_embd}); + layer.wk = ml.create_tensor(ctx_split, tn(LLM_TENSOR_ATTN_K, "weight", i), {n_embd, n_embd_gqa}); + layer.wv = ml.create_tensor(ctx_split, tn(LLM_TENSOR_ATTN_V, "weight", i), {n_embd, n_embd_gqa}); + layer.wo = ml.create_tensor(ctx_split, tn(LLM_TENSOR_ATTN_OUT, "weight", i), {n_embd, n_embd}); + + // optional bias tensors + layer.bq = ml.create_tensor(ctx_layer, tn(LLM_TENSOR_ATTN_Q, "bias", i), {n_embd}); + layer.bk = ml.create_tensor(ctx_layer, tn(LLM_TENSOR_ATTN_K, "bias", i), {n_embd_gqa}); + layer.bv = ml.create_tensor(ctx_layer, tn(LLM_TENSOR_ATTN_V, "bias", i), {n_embd_gqa}); + layer.bo = ml.create_tensor(ctx_layer, tn(LLM_TENSOR_ATTN_OUT, "bias", i), {n_embd}); + + layer.ffn_norm = ml.create_tensor(ctx_layer, tn(LLM_TENSOR_FFN_NORM, "weight", i), {n_embd}); + layer.ffn_norm_b = ml.create_tensor(ctx_layer, tn(LLM_TENSOR_FFN_NORM, "bias", i), {n_embd}); + + layer.ffn_down = ml.create_tensor(ctx_split, tn(LLM_TENSOR_FFN_DOWN, "weight", i), { n_ff, n_embd}); + layer.ffn_up = ml.create_tensor(ctx_split, tn(LLM_TENSOR_FFN_UP, "weight", i), {n_embd, n_ff}); + + // optional bias tensors + layer.ffn_down_b = ml.create_tensor(ctx_layer, tn(LLM_TENSOR_FFN_DOWN, "bias", i), {n_embd}); + layer.ffn_up_b = ml.create_tensor(ctx_layer, tn(LLM_TENSOR_FFN_UP , "bias", i), { n_ff}); + } + } break; default: throw std::runtime_error("unknown architecture"); } @@ -7559,6 +7639,120 @@ struct llm_build_context { return gf; } + + struct ggml_cgraph * build_starcoder2() { + struct ggml_cgraph * gf = ggml_new_graph_custom(ctx0, LLAMA_MAX_NODES, false); + + const int64_t n_embd_head = hparams.n_embd_head_v; + GGML_ASSERT(n_embd_head == hparams.n_embd_head_k); + GGML_ASSERT(n_embd_head == hparams.n_rot); + + struct ggml_tensor * cur; + struct ggml_tensor * inpL; + + inpL = llm_build_inp_embd(ctx0, hparams, batch, model.tok_embd, lctx.inp_tokens, lctx.inp_embd, cb); + cb(inpL, "inp_embd", -1); + + // inp_pos - contains the positions + struct ggml_tensor * inp_pos = ggml_view_1d(ctx0, lctx.inp_pos, n_tokens, 0); + cb(inp_pos, "inp_pos", -1); + + // KQ_mask (mask for 1 head, it will be broadcasted to all heads) + struct ggml_tensor * KQ_mask = ggml_view_2d(ctx0, lctx.inp_KQ_mask, n_kv, n_tokens, n_kv*ggml_type_size(lctx.inp_KQ_mask->type), 0); + cb(KQ_mask, "KQ_mask", -1); + + for (int il = 0; il < n_layer; ++il) { + struct ggml_tensor * inpSA = inpL; + + // norm + cur = llm_build_norm(ctx0, inpL, hparams, + model.layers[il].attn_norm, model.layers[il].attn_norm_b, + LLM_NORM, cb, il); + cb(cur, "attn_norm", il); + + // self-attention + { + // compute Q and K and RoPE them + struct ggml_tensor * Qcur = ggml_mul_mat(ctx0, model.layers[il].wq, cur); + cb(Qcur, "Qcur", il); + if (model.layers[il].bq) { + Qcur = ggml_add(ctx0, Qcur, model.layers[il].bq); + cb(Qcur, "Qcur", il); + } + + struct ggml_tensor * Kcur = ggml_mul_mat(ctx0, model.layers[il].wk, cur); + cb(Kcur, "Kcur", il); + if (model.layers[il].bk) { + Kcur = ggml_add(ctx0, Kcur, model.layers[il].bk); + cb(Kcur, "Kcur", il); + } + + struct ggml_tensor * Vcur = ggml_mul_mat(ctx0, model.layers[il].wv, cur); + cb(Vcur, "Vcur", il); + if (model.layers[il].bv) { + Vcur = ggml_add(ctx0, Vcur, model.layers[il].bv); + cb(Vcur, "Vcur", il); + } + + Qcur = ggml_rope_custom( + ctx0, ggml_reshape_3d(ctx0, Qcur, n_embd_head, n_head, n_tokens), inp_pos, + n_rot, rope_type, 0, n_orig_ctx, freq_base, freq_scale, + ext_factor, attn_factor, beta_fast, beta_slow + ); + cb(Qcur, "Qcur", il); + + Kcur = ggml_rope_custom( + ctx0, ggml_reshape_3d(ctx0, Kcur, n_embd_head, n_head_kv, n_tokens), inp_pos, + n_rot, rope_type, 0, n_orig_ctx, freq_base, freq_scale, + ext_factor, attn_factor, beta_fast, beta_slow + ); + cb(Kcur, "Kcur", il); + + cur = llm_build_kv(ctx0, model, hparams, kv_self, gf, + model.layers[il].wo, model.layers[il].bo, + Kcur, Vcur, Qcur, KQ_mask, nullptr, n_ctx, n_tokens, kv_head, n_kv, 1.0f/sqrtf(float(n_embd_head)), cb, il); + cb(cur, "kqv_out", il); + } + + struct ggml_tensor * ffn_inp = ggml_add(ctx0, cur, inpSA); + cb(ffn_inp, "ffn_inp", il); + + // feed-forward network + + cur = llm_build_norm(ctx0, ffn_inp, hparams, + model.layers[il].ffn_norm, model.layers[il].ffn_norm_b, + LLM_NORM, cb, il); + cb(cur, "ffn_norm", il); + + cur = llm_build_ffn(ctx0, cur, + model.layers[il].ffn_up, model.layers[il].ffn_up_b, + NULL, NULL, + model.layers[il].ffn_down, model.layers[il].ffn_down_b, + NULL, + LLM_FFN_GELU, LLM_FFN_SEQ, cb, il); + cb(cur, "ffn_out", il); + cur = ggml_add(ctx0, cur, ffn_inp); + cb(cur, "l_out", il); + + // input for next layer + inpL = cur; + } + + cur = inpL; + + cur = llm_build_norm(ctx0, cur, hparams, + model.output_norm, model.output_norm_b, + LLM_NORM, cb, -1); + cb(cur, "result_norm", -1); + + // lm_head + cur = ggml_mul_mat(ctx0, model.output, cur); + cb(cur, "result_output", -1); + + ggml_build_forward_expand(gf, cur); + + return gf; + } }; static struct ggml_cgraph * llama_build_graph_defrag(llama_context & lctx, const std::vector & ids) { @@ -7705,6 +7899,10 @@ static struct ggml_cgraph * llama_build_graph( { result = llm.build_gemma(); } break; + case LLM_ARCH_STARCODER2: + { + result = llm.build_starcoder2(); + } break; default: GGML_ASSERT(false); } @@ -12084,6 +12282,7 @@ enum llama_rope_type llama_rope_type(const struct llama_model * model) { case LLM_ARCH_QWEN2: case LLM_ARCH_PHI2: case LLM_ARCH_GEMMA: + case LLM_ARCH_STARCODER2: return LLAMA_ROPE_TYPE_NEOX; // all model arches should be listed explicitly here From da3b9ba2b710c0f8b44398a0eb9e5a7ae2ad967a Mon Sep 17 00:00:00 2001 From: nold Date: Fri, 1 Mar 2024 22:51:12 +0100 Subject: [PATCH 754/811] convert-hf-to-gguf : require einops for InternLM2ForCausalLM (#5792) --- requirements/requirements-convert-hf-to-gguf.txt | 1 + 1 file changed, 1 insertion(+) diff --git a/requirements/requirements-convert-hf-to-gguf.txt b/requirements/requirements-convert-hf-to-gguf.txt index 6ac402610..6ce840d73 100644 --- a/requirements/requirements-convert-hf-to-gguf.txt +++ b/requirements/requirements-convert-hf-to-gguf.txt @@ -1,2 +1,3 @@ -r ./requirements-convert.txt torch~=2.1.1 +einops~=0.7.0 From cb5e8f7fc4ee57d4bcccafbe04a82cededd35486 Mon Sep 17 00:00:00 2001 From: Tushar Date: Sat, 2 Mar 2024 04:48:26 +0530 Subject: [PATCH 755/811] build(nix): Introduce flake.formatter for `nix fmt` (#5687) * build(nix): Introduce flake.formatter for `nix fmt` * chore: Switch to pkgs.nixfmt-rfc-style --- .devops/nix/sif.nix | 2 +- flake.nix | 10 +++++++--- 2 files changed, 8 insertions(+), 4 deletions(-) diff --git a/.devops/nix/sif.nix b/.devops/nix/sif.nix index 7535ca0f3..7a5e1dd0f 100644 --- a/.devops/nix/sif.nix +++ b/.devops/nix/sif.nix @@ -7,7 +7,7 @@ }: let - optionalInt = cond: x: if cond then x else 0; + optionalInt = cond: x: if cond then x else 0; in singularity-tools.buildImage rec { inherit (llama-cpp) name; diff --git a/flake.nix b/flake.nix index dc4e503c3..45f9deda0 100644 --- a/flake.nix +++ b/flake.nix @@ -107,11 +107,12 @@ # ``` # # Cf. https://nixos.org/manual/nix/unstable/command-ref/new-cli/nix3-flake.html?highlight=flake#flake-format - flake.overlays.default = - (final: prev: { + flake.overlays.default = ( + final: prev: { llamaPackages = final.callPackage .devops/nix/scope.nix { inherit llamaVersion; }; inherit (final.llamaPackages) llama-cpp; - }); + } + ); systems = [ "aarch64-darwin" @@ -131,6 +132,9 @@ ... }: { + # For standardised reproducible formatting with `nix fmt` + formatter = pkgs.nixfmt-rfc-style; + # Unlike `.#packages`, legacyPackages may contain values of # arbitrary types (including nested attrsets) and may even throw # exceptions. This attribute isn't recursed into by `nix flake From 9bf297a02bfbd474e51912409a470dd797e2fe13 Mon Sep 17 00:00:00 2001 From: crasm Date: Sat, 2 Mar 2024 00:11:06 -0500 Subject: [PATCH 756/811] workflows : remove nocleanup arg for check-requirements.sh (#5826) Reduces peak tmpfs usage and should prevent the check from failing from running out of space. Fixes the 'No space left on device' issue mentioned in #5703. --- .github/workflows/python-check-requirements.yml | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/.github/workflows/python-check-requirements.yml b/.github/workflows/python-check-requirements.yml index 92e1108b3..b82205992 100644 --- a/.github/workflows/python-check-requirements.yml +++ b/.github/workflows/python-check-requirements.yml @@ -3,12 +3,14 @@ name: Python check requirements.txt on: push: paths: + - '.github/workflows/python-check-requirements.yml' - 'scripts/check-requirements.sh' - 'convert*.py' - 'requirements.txt' - 'requirements/*.txt' pull_request: paths: + - '.github/workflows/python-check-requirements.yml' - 'scripts/check-requirements.sh' - 'convert*.py' - 'requirements.txt' @@ -26,4 +28,4 @@ jobs: with: python-version: "3.11" - name: Run check-requirements.sh script - run: bash scripts/check-requirements.sh nocleanup + run: bash scripts/check-requirements.sh From 715641391dda1ff9762dc5d99d9a30acce99f2c6 Mon Sep 17 00:00:00 2001 From: Neo Zhang Jianyu Date: Sat, 2 Mar 2024 19:49:30 +0800 Subject: [PATCH 757/811] Support multiple GPUs (split mode) on SYCL backend (#5806) * suport multiple cards: split-mode - layer|row * rm warning * rebase with master, support tow new OPs, close feature for -sm=row, fix for unit test * update news * fix merge error * update according to review comments --- README-sycl.md | 21 + common/common.cpp | 4 + examples/llama-bench/llama-bench.cpp | 17 +- examples/sycl/ls-sycl-device.cpp | 2 +- examples/sycl/run-llama2.sh | 17 +- ggml-sycl.cpp | 2205 +++++++++++++++++--------- ggml-sycl.h | 5 + llama.cpp | 49 +- 8 files changed, 1506 insertions(+), 814 deletions(-) diff --git a/README-sycl.md b/README-sycl.md index dd5bf9dea..85eb16f2b 100644 --- a/README-sycl.md +++ b/README-sycl.md @@ -1,6 +1,7 @@ # llama.cpp for SYCL - [Background](#background) +- [News](#news) - [OS](#os) - [Intel GPU](#intel-gpu) - [Docker](#docker) @@ -25,6 +26,21 @@ The llama.cpp for SYCL is used to support Intel GPUs. For Intel CPU, recommend to use llama.cpp for X86 (Intel MKL building). +## News + +- 2024.3 + - Support multiple cards: **--split-mode**: [none|layer]; not support [row], it's on developing. + - Support to assign main GPU by **--main-gpu**, replace $GGML_SYCL_DEVICE. + - Support detecting all GPUs with level-zero and same top **Max compute units**. + - Support OPs + - hardsigmoid + - hardswish + - pool2d + +- 2024.1 + - Create SYCL backend for Intel GPU. + - Support Windows build + ## OS |OS|Status|Verified| @@ -449,6 +465,7 @@ Using device **0** (Intel(R) Arc(TM) A770 Graphics) as main device |-|-|-| |GGML_SYCL_DEVICE|0 (default) or 1|Set the device id used. Check the device ids by default running output| |GGML_SYCL_DEBUG|0 (default) or 1|Enable log function by macro: GGML_SYCL_DEBUG| +|ZES_ENABLE_SYSMAN| 0 (default) or 1|Support to get free memory of GPU by sycl::aspect::ext_intel_free_memory.
    Recommended to use when --split-mode = layer| ## Known Issue @@ -458,6 +475,10 @@ Using device **0** (Intel(R) Arc(TM) A770 Graphics) as main device Solution: add **--no-mmap** or **--mmap 0**. +- Split-mode: [row] is not supported + + It's on developing. + ## Q&A - Error: `error while loading shared libraries: libsycl.so.7: cannot open shared object file: No such file or directory`. diff --git a/common/common.cpp b/common/common.cpp index 938c428cf..1c0b7c403 100644 --- a/common/common.cpp +++ b/common/common.cpp @@ -640,6 +640,10 @@ bool gpt_params_parse_ex(int argc, char ** argv, gpt_params & params) { } else if (arg_next == "layer") { params.split_mode = LLAMA_SPLIT_MODE_LAYER; } else if (arg_next == "row") { +#ifdef GGML_USE_SYCL + fprintf(stderr, "warning: The split mode value:[row] is not supported by llama.cpp with SYCL. It's developing.\nExit!\n"); + exit(1); +#endif // GGML_USE_SYCL params.split_mode = LLAMA_SPLIT_MODE_ROW; } else { invalid_param = true; diff --git a/examples/llama-bench/llama-bench.cpp b/examples/llama-bench/llama-bench.cpp index c2155b2ac..aa79d002a 100644 --- a/examples/llama-bench/llama-bench.cpp +++ b/examples/llama-bench/llama-bench.cpp @@ -123,20 +123,15 @@ static std::string get_gpu_info() { } #endif #ifdef GGML_USE_SYCL - int device_list[GGML_SYCL_MAX_DEVICES]; - ggml_sycl_get_gpu_list(device_list, GGML_SYCL_MAX_DEVICES); - - for (int i = 0; i < GGML_SYCL_MAX_DEVICES; i++) { - if (device_list[i] >0 ){ - char buf[128]; - ggml_sycl_get_device_description(i, buf, sizeof(buf)); - id += buf; + int count = ggml_backend_sycl_get_device_count(); + for (int i = 0; i < count; i++) { + char buf[128]; + ggml_sycl_get_device_description(i, buf, sizeof(buf)); + id += buf; + if (i < count - 1) { id += "/"; } } - if (id.length() >2 ) { - id.pop_back(); - } #endif // TODO: other backends return id; diff --git a/examples/sycl/ls-sycl-device.cpp b/examples/sycl/ls-sycl-device.cpp index 52442e4ca..74a8b7fd8 100644 --- a/examples/sycl/ls-sycl-device.cpp +++ b/examples/sycl/ls-sycl-device.cpp @@ -7,7 +7,7 @@ #include "ggml-sycl.h" -int main(int argc, char ** argv) { +int main() { ggml_backend_sycl_print_sycl_devices(); return 0; } diff --git a/examples/sycl/run-llama2.sh b/examples/sycl/run-llama2.sh index f5f4c1e98..52f7c01a4 100755 --- a/examples/sycl/run-llama2.sh +++ b/examples/sycl/run-llama2.sh @@ -8,12 +8,19 @@ INPUT2="Building a website can be done in 10 simple steps:\nStep 1:" source /opt/intel/oneapi/setvars.sh if [ $# -gt 0 ]; then - export GGML_SYCL_DEVICE=$1 + GGML_SYCL_DEVICE=$1 else - export GGML_SYCL_DEVICE=0 + GGML_SYCL_DEVICE=0 fi -echo GGML_SYCL_DEVICE=$GGML_SYCL_DEVICE +echo "use $GGML_SYCL_DEVICE as main GPU" #export GGML_SYCL_DEBUG=1 -./build/bin/main -m models/llama-2-7b.Q4_0.gguf -p "${INPUT2}" -n 400 -e -ngl 33 -s 0 -#./build/bin/main -m models/llama-2-7b.Q4_0.gguf -p "${INPUT2}" -n 5 -e -ngl 33 -t 1 -s 0 + + +#ZES_ENABLE_SYSMAN=1, Support to get free memory of GPU by sycl::aspect::ext_intel_free_memory. Recommended to use when --split-mode = layer. + +#use all GPUs with same max compute units +ZES_ENABLE_SYSMAN=1 ./build/bin/main -m models/llama-2-7b.Q4_0.gguf -p "${INPUT2}" -n 400 -e -ngl 33 -s 0 + +#use main GPU only +#ZES_ENABLE_SYSMAN=1 ./build/bin/main -m models/llama-2-7b.Q4_0.gguf -p "${INPUT2}" -n 400 -e -ngl 33 -s 0 -mg $GGML_SYCL_DEVICE -sm none diff --git a/ggml-sycl.cpp b/ggml-sycl.cpp index 6f391b0c6..cad08d610 100644 --- a/ggml-sycl.cpp +++ b/ggml-sycl.cpp @@ -661,26 +661,29 @@ namespace dpct /// \param [out] total_memory The number of bytes of total memory on the SYCL device. void get_memory_info(size_t &free_memory, size_t &total_memory) { + total_memory = get_device_info().get_global_mem_size(); + const char *warning_info = "get_memory_info: [warning] ext_intel_free_memory is not " + "supported (export/set ZES_ENABLE_SYSMAN=1 to support), " + "use total memory as free memory"; #if (defined(__SYCL_COMPILER_VERSION) && __SYCL_COMPILER_VERSION >= 20221105) if (!has(sycl::aspect::ext_intel_free_memory)) { - std::cerr << "get_memory_info: ext_intel_free_memory is not supported." << std::endl; - free_memory = 0; + std::cerr << warning_info << std::endl; + free_memory = total_memory; } else { free_memory = get_info(); } #else - std::cerr << "get_memory_info: ext_intel_free_memory is not supported." << std::endl; - free_memory = 0; + std::cerr << warning_info << std::endl; + free_memory = total_memory; #if defined(_MSC_VER) && !defined(__clang__) #pragma message("Querying the number of bytes of free memory is not supported") #else #warning "Querying the number of bytes of free memory is not supported" #endif #endif - total_memory = get_device_info().get_global_mem_size(); } void get_device_info(device_info &out) const @@ -738,15 +741,25 @@ namespace dpct #endif // DPCT_USM_LEVEL_NONE } - sycl::queue *create_in_order_queue(bool enable_exception_handler = false) - { - std::lock_guard lock(m_mutex); - return create_queue_impl(enable_exception_handler, - sycl::property::queue::in_order()); + sycl::queue *create_queue(sycl::context context, sycl::device device, + bool enable_exception_handler = false) { + return create_in_order_queue(context, device, enable_exception_handler); } - sycl::queue *create_out_of_order_queue(bool enable_exception_handler = false) - { + sycl::queue *create_in_order_queue(bool enable_exception_handler = false) { + std::lock_guard lock(m_mutex); + return create_queue_impl(enable_exception_handler, + sycl::property::queue::in_order()); + } + + sycl::queue *create_in_order_queue(sycl::context context, sycl::device device, + bool enable_exception_handler = false) { + std::lock_guard lock(m_mutex); + return create_queue_impl(context, device, enable_exception_handler, + sycl::property::queue::in_order()); + } + + sycl::queue *create_out_of_order_queue(bool enable_exception_handler = false) { std::lock_guard lock(m_mutex); return create_queue_impl(enable_exception_handler); } @@ -809,6 +822,25 @@ namespace dpct return _queues.back().get(); } + template + sycl::queue *create_queue_impl(sycl::context context, sycl::device device, + bool enable_exception_handler, + Properties... properties) { + sycl::async_handler eh = {}; + if (enable_exception_handler) { + eh = exception_handler; + } + _queues.push_back(std::make_shared( + context, device, eh, + sycl::property_list( + #ifdef DPCT_PROFILING_ENABLED + sycl::property::queue::enable_profiling(), + #endif + properties...))); + + return _queues.back().get(); + } + void get_version(int &major, int &minor) const { detail::get_version(*this, major, minor); @@ -2943,14 +2975,11 @@ bool ggml_sycl_loaded(void); void * ggml_sycl_host_malloc(size_t size); void ggml_sycl_host_free(void * ptr); bool ggml_sycl_can_mul_mat(const struct ggml_tensor * src0, const struct ggml_tensor * src1, struct ggml_tensor * dst); -void ggml_sycl_set_tensor_split(const float * tensor_split); -void ggml_sycl_transform_tensor(void * data, struct ggml_tensor * tensor); void ggml_sycl_free_data(struct ggml_tensor * tensor); void ggml_sycl_assign_buffers(struct ggml_tensor * tensor); void ggml_sycl_assign_buffers_no_scratch(struct ggml_tensor * tensor); void ggml_sycl_assign_buffers_force_inplace(struct ggml_tensor * tensor); void ggml_sycl_assign_buffers_no_alloc(struct ggml_tensor * tensor); -void ggml_sycl_assign_scratch_offset(struct ggml_tensor * tensor, size_t offset); void ggml_sycl_copy_to_device(struct ggml_tensor * tensor); void ggml_sycl_set_main_device(int main_device); void ggml_sycl_set_mul_mat_q(bool mul_mat_q); @@ -2963,6 +2992,14 @@ int get_main_device(); void print_ggml_tensor(const char*name, struct ggml_tensor *src); void log_tensor_with_cnt(const char* name, struct ggml_tensor * src, int stop_cnt); +void dev2dev_memcpy(sycl::queue &q_dst, sycl::queue &q_src, void *ptr_dst, + const void *ptr_src, size_t size) { + char *host_buf = (char *)malloc(size); + q_src.memcpy(host_buf, (const char *)ptr_src, size).wait(); + q_dst.memcpy((char *)ptr_dst, host_buf, size).wait(); + free(host_buf); +} + static __dpct_inline__ int get_int_from_int8(const int8_t *x8, const int &i32) { const uint16_t * x16 = (const uint16_t *) (x8 + sizeof(int) * i32); // assume at least 2 byte alignment @@ -3180,6 +3217,8 @@ static_assert(sizeof(block_q6_K) == sizeof(ggml_fp16_t) + 13*QK_K/16, "wrong q6_ #define SYCL_SILU_BLOCK_SIZE 256 #define SYCL_TANH_BLOCK_SIZE 256 #define SYCL_RELU_BLOCK_SIZE 256 +#define SYCL_HARDSIGMOID_BLOCK_SIZE 256 +#define SYCL_HARDSWISH_BLOCK_SIZE 256 #define SYCL_SQR_BLOCK_SIZE 256 #define SYCL_CPY_BLOCK_SIZE 32 #define SYCL_SCALE_BLOCK_SIZE 256 @@ -3196,6 +3235,7 @@ static_assert(sizeof(block_q6_K) == sizeof(ggml_fp16_t) + 13*QK_K/16, "wrong q6_ #define SYCL_PAD_BLOCK_SIZE 256 #define SYCL_ACC_BLOCK_SIZE 256 #define SYCL_IM2COL_BLOCK_SIZE 256 +#define SYCL_POOL2D_BLOCK_SIZE 256 // dmmv = dequantize_mul_mat_vec #ifndef GGML_SYCL_DMMV_X @@ -3218,8 +3258,7 @@ static_assert(K_QUANTS_PER_ITERATION == 1 || K_QUANTS_PER_ITERATION == 2, "K_QUA #define MUL_MAT_SRC1_COL_STRIDE 128 #define MAX_STREAMS 8 -static dpct::queue_ptr g_syclStreams[GGML_SYCL_MAX_DEVICES][MAX_STREAMS] = { - {0}}; +static dpct::queue_ptr g_syclStreams[GGML_SYCL_MAX_DEVICES][MAX_STREAMS] = {{0}}; struct ggml_tensor_extra_gpu { void * data_device[GGML_SYCL_MAX_DEVICES]; // 1 pointer for each device for split tensors @@ -3228,30 +3267,108 @@ struct ggml_tensor_extra_gpu { [MAX_STREAMS]; // events for synchronizing multiple GPUs }; -inline dpct::err0 ggml_sycl_set_device(const int device) try { - int current_device; +class sycl_gpu_mgr { + public: + std::vector gpus; + std::vector devices; + sycl::queue *first_queue; + sycl::context co_ctx; + int max_compute_units = 0; + int work_group_size = 0; + std::string gpus_list = ""; - SYCL_CHECK(CHECK_TRY_ERROR( - current_device = dpct::dev_mgr::instance().current_device_id())); + sycl_gpu_mgr() { + detect_sycl_gpu_list_with_max_cu(); + get_allow_gpus(); + create_context_with_gpus(); + } - // GGML_SYCL_DEBUG("ggml_sycl_set_device device=%d, current_device=%d\n", device, current_device); - if (device == current_device) { - return 0; - } + void create_context_with_gpus() { + sycl::context ctx = sycl::context(devices); + assert(gpus.size() > 0); + first_queue = dpct::get_current_device().create_queue(ctx, devices[0]); + co_ctx = first_queue->get_context(); + } - return CHECK_TRY_ERROR(dpct::select_device(device)); -} -catch (sycl::exception const &exc) { - std::cerr << exc.what() << "Exception caught at file:" << __FILE__ - << ", line:" << __LINE__ << std::endl; - crash(); - std::exit(1); -} + sycl::context &get_co_ctx() { return co_ctx; } + void get_allow_gpus() { + gpus_list = ""; + for (size_t i = 0; i < gpus.size(); ++i) { + gpus_list += std::to_string(gpus[i]); + gpus_list += ","; + } + if (gpus_list.length() > 2) { + gpus_list.pop_back(); + } + } + + bool is_allowed_gpu(int device_id) { + return std::find(gpus.begin(), gpus.end(), device_id) != gpus.end(); + } + + void detect_sycl_gpu_list_with_max_cu() try { + int device_count = dpct::dev_mgr::instance().device_count(); + + for (int id = 0; id < device_count; id++) { + sycl::device device = dpct::dev_mgr::instance().get_device(id); + if (!device.is_gpu()) + continue; + dpct::device_info prop; + dpct::get_device_info(prop, device); + if (max_compute_units < prop.get_max_compute_units()) + max_compute_units = prop.get_max_compute_units(); + } + + for (int id = 0; id < device_count; id++) { + sycl::device device = dpct::dev_mgr::instance().get_device(id); + if (!device.is_gpu()) + continue; + dpct::device_info prop; + dpct::get_device_info(prop, device); + if (max_compute_units == prop.get_max_compute_units() && + prop.get_major_version() == 1) { + gpus.push_back(id); + devices.push_back(device); + work_group_size = prop.get_max_work_group_size(); + } + } + return; + } catch (sycl::exception const &exc) { + std::cerr << exc.what() << "Exception caught at file:" << __FILE__ + << ", line:" << __LINE__ << std::endl; + std::exit(1); + } + + int get_gpu_count() { return (int)gpus.size(); } + + int get_index(int id) { + for (int i = 0; i < (int)gpus.size(); i++) { + if (gpus[i] == id) + return i; + } + assert(false); + return -1; + } + + int get_next_index(int id) { + int cur_index = get_index(id); + for (int i = cur_index + 1; i < (int)gpus.size(); i++) { + if (gpus[i] == id) + return i; + } + assert(false); + return -1; + } +}; + +static sycl_gpu_mgr *g_sycl_gpu_mgr = NULL; static int g_device_count = -1; static int g_all_sycl_device_count = -1; static int g_main_device = -1; -static int g_main_device_index = -1; +static int g_main_device_id = -1; + +static std::array g_default_tensor_split = {}; static float g_tensor_split[GGML_SYCL_MAX_DEVICES] = {0}; @@ -3268,8 +3385,6 @@ struct sycl_device_id2index { int index; }; -static sycl_device_id2index g_sycl_device_id2index[GGML_SYCL_MAX_DEVICES] = { {-1} }; - static void * g_scratch_buffer = nullptr; static size_t g_scratch_size = 0; // disabled by default static size_t g_scratch_offset = 0; @@ -3290,6 +3405,63 @@ static void bad_arch(const sycl::stream &stream_ct1) { (void) bad_arch; // suppress unused function warning } +/* +device_index: device index from 0 to n (continue numbers). + It is used for device select/set in SYCL backend internal data structure. +*/ +void check_allow_gpu_index(const int device_index) { + if (device_index >= g_device_count) { + char error_buf[256]; + snprintf(error_buf, sizeof(error_buf), + "%s error: device_index:%d is out of range: [0-%d]", __func__, + device_index, g_device_count - 1); + fprintf(stderr, "%s\n", error_buf); + assert(false); + } +} + +/* +device_id: device ID is shown by ggml_backend_sycl_print_sycl_devices(). + It is only used to set current working device. +*/ +void check_allow_gpu_id(const int device_id) { + if (!g_sycl_gpu_mgr->is_allowed_gpu(device_id)) { + char error_buf[256]; + snprintf(error_buf, sizeof(error_buf), + "error: cannot set device=%d, which is not allowed. Please " + "set GPU ID in: [%s]", + device_id, g_sycl_gpu_mgr->gpus_list.c_str()); + fprintf(stderr, "%s\n", error_buf); + throw std::invalid_argument(error_buf); + } +} + +int get_current_device_id() { + return dpct::dev_mgr::instance().current_device_id(); +} + +inline dpct::err0 ggml_sycl_set_device(const int device) try { + + int device_id = g_sycl_gpu_mgr->gpus[device]; + check_allow_gpu_id(device_id); + + int current_device_id; + SYCL_CHECK(CHECK_TRY_ERROR(current_device_id = get_current_device_id())); + + // GGML_SYCL_DEBUG("ggml_sycl_set_device device_id=%d, + // current_device_id=%d\n", device, current_device); + if (device_id == current_device_id) { + return 0; + } + + return CHECK_TRY_ERROR(dpct::select_device(device_id)); +} catch (sycl::exception const &exc) { + std::cerr << exc.what() << "Exception caught at file:" << __FILE__ + << ", line:" << __LINE__ << std::endl; + crash(); + std::exit(1); +} + void log_ggml_var_device(const char*name, float *src, size_t total_elements, bool src_on_device){ if(!g_ggml_sycl_debug) return; if(!src){ @@ -3302,22 +3474,18 @@ void log_ggml_var_device(const char*name, float *src, size_t total_elements, boo size_t total_size = total_elements*sizeof(float); float *local_buf = NULL; - // printf("total_size %d2, src_on_device %d\n", total_size, src_on_device); if(src_on_device) { local_buf = (float *) ggml_sycl_host_malloc(total_size); - // printf("local buf %p size %d bytes\n", local_buf, total_size); ggml_sycl_set_device(g_main_device); - dpct::queue_ptr main_stream = g_syclStreams[g_main_device_index][0]; + dpct::queue_ptr main_stream = g_syclStreams[g_main_device][0]; main_stream->memcpy(local_buf, src, total_size); } else { local_buf = (float *)src; - // printf("local buf from src-> data %p\n", local_buf); } std::ofstream logfile; logfile.open(filename); - // printf("local buf element %d\n", total_elements); for(size_t i=0; iextra; - src_data = (float*)src_extra->data_device[g_main_device_index]; + src_data = (float*)src_extra->data_device[g_main_device]; } else { src_data = (float *)src->data; @@ -3359,10 +3527,6 @@ void log_tensor_with_cnt(const char* name, struct ggml_tensor * src, int stop_cn sprintf(filename, "%s_%07d", name, log_file_name_idx); log_file_name_idx++; print_ggml_tensor(filename, src); - // print_ggml_tensor("ggml_sycl_rms_norm_src0", (ggml_tensor *)src0); - // print_ggml_tensor("ggml_sycl_rms_norm_src1", (ggml_tensor *)src1); - // int *ptr = NULL; - // *ptr = 0; } static __dpct_inline__ float warp_reduce_sum(float x, @@ -3583,6 +3747,28 @@ static void relu_f32(const float * x, float * dst, const int k, dst[i] = sycl::fmax((float)(x[i]), (float)0); } +static void hardsigmoid_f32(const float * x, float * dst, const int k, + const sycl::nd_item<3> &item_ct1) { + const int i = item_ct1.get_local_range(2) * item_ct1.get_group(2) + + item_ct1.get_local_id(2); + + if (i >= k) { + return; + } + dst[i] = sycl::fmin(1.0f, sycl::fmax(0.0f, (x[i] + 3.0f) / 6.0f)); +} + +static void hardswish_f32(const float * x, float * dst, const int k, + const sycl::nd_item<3> &item_ct1) { + const int i = item_ct1.get_local_range(2) * item_ct1.get_group(2) + + item_ct1.get_local_id(2); + + if (i >= k) { + return; + } + dst[i] = x[i] * sycl::fmin(1.0f, sycl::fmax(0.0f, (x[i] + 3.0f) / 6.0f)); +} + static void leaky_relu_f32(const float *x, float *dst, const int k, const float negative_slope, const sycl::nd_item<3> &item_ct1) { const int i = item_ct1.get_local_range(2) * item_ct1.get_group(2) + @@ -4964,8 +5150,8 @@ static void k_get_rows_float( template static void dequantize_block(const void * __restrict__ vx, dst_t * __restrict__ y, const int k, const sycl::nd_item<3> &item_ct1) { - const int i = item_ct1.get_local_range(2) * item_ct1.get_group(2) + - 2 * item_ct1.get_local_id(2); + const int i = 2 * (item_ct1.get_local_range(2) * item_ct1.get_group(2) + + item_ct1.get_local_id(2)); if (i >= k) { return; @@ -7695,7 +7881,7 @@ static void cpy_1_f16_f16(const char * cxi, char * cdsti) { static void cpy_1_f16_f32(const char * cxi, char * cdsti) { const sycl::half *xi = (const sycl::half *)cxi; - float *dsti = (float *)cdsti; + float * dsti = (float *) cdsti; *dsti = *xi; } @@ -8297,6 +8483,62 @@ static void im2col_kernel(const float *x, T *dst, int offset_delta, } } +template +static void pool2d_nchw_kernel( + const int ih, const int iw, const int oh, const int ow, + const int kh, const int kw, const int sh, const int sw, + const int ph, const int pw, const int parallel_elements, + const Ti* src, To* dst, const enum ggml_op_pool op, + const sycl::nd_item<3> &item_ct1) { + int idx = item_ct1.get_local_id(2) + + item_ct1.get_group(2) * item_ct1.get_local_range(2); + if (idx >= parallel_elements) { + return; + } + + const int I_HW = ih * iw; + const int O_HW = oh * ow; + const int nc = idx / O_HW; + const int cur_oh = idx % O_HW / ow; + const int cur_ow = idx % O_HW % ow; + const Ti* i_ptr = src + nc * I_HW; + To* o_ptr = dst + nc * O_HW; + const int start_h = cur_oh * sh - ph; + const int bh = sycl::max(0, start_h); + const int eh = sycl::min(ih, start_h + kh); + const int start_w = cur_ow * sw - pw; + const int bw = sycl::max(0, start_w); + const int ew = sycl::min(iw, start_w + kw); + + To res = 0; + + switch (op) { + case GGML_OP_POOL_AVG: res = 0; break; + case GGML_OP_POOL_MAX: res = -FLT_MAX; break; + } + + for (int i = bh; i < eh; i += 1) { + for (int j = bw; j < ew; j += 1) { +#if DPCT_COMPATIBILITY_TEMP >= 350 + /* + DPCT1098:106: The '*' expression is used instead of the __ldg + call. These two expressions do not provide the exact same + functionality. Check the generated code for potential precision + and/or performance issues. + */ + Ti cur = *(i_ptr + i * iw + j); +#else + Ti cur = i_ptr[i * iw + j]; +#endif + switch (op) { + case GGML_OP_POOL_AVG: res += (cur / (kh * kw)); break; + case GGML_OP_POOL_MAX: res = sycl::max(res, (To)cur); break; + } + } + } + o_ptr[cur_oh * ow + cur_ow] = res; +} + template static void get_rows_sycl(const ggml_tensor *src0, const ggml_tensor *src1, ggml_tensor *dst, const void *src0_dd, @@ -8585,6 +8827,30 @@ static void relu_f32_sycl(const float *x, float *dst, const int k, }); } +static void hardsigmoid_f32_sycl(const float *x, float *dst, const int k, + dpct::queue_ptr stream) { + const int num_blocks = (k + SYCL_HARDSIGMOID_BLOCK_SIZE - 1) / SYCL_HARDSIGMOID_BLOCK_SIZE; + stream->parallel_for( + sycl::nd_range<3>(sycl::range<3>(1, 1, num_blocks) * + sycl::range<3>(1, 1, SYCL_HARDSIGMOID_BLOCK_SIZE), + sycl::range<3>(1, 1, SYCL_HARDSIGMOID_BLOCK_SIZE)), + [=](sycl::nd_item<3> item_ct1) { + hardsigmoid_f32(x, dst, k, item_ct1); + }); +} + +static void hardswish_f32_sycl(const float *x, float *dst, const int k, + dpct::queue_ptr stream) { + const int num_blocks = (k + SYCL_HARDSWISH_BLOCK_SIZE - 1) / SYCL_HARDSWISH_BLOCK_SIZE; + stream->parallel_for( + sycl::nd_range<3>(sycl::range<3>(1, 1, num_blocks) * + sycl::range<3>(1, 1, SYCL_HARDSWISH_BLOCK_SIZE), + sycl::range<3>(1, 1, SYCL_HARDSWISH_BLOCK_SIZE)), + [=](sycl::nd_item<3> item_ct1) { + hardswish_f32(x, dst, k, item_ct1); + }); +} + static void leaky_relu_f32_sycl(const float *x, float *dst, const int k, const float negative_slope, dpct::queue_ptr stream) { @@ -8811,11 +9077,10 @@ template static void dequantize_block_sycl(const void *__restrict__ vx, dst_t *__restrict__ y, const int k, dpct::queue_ptr stream) { - const int num_blocks = (k + SYCL_DEQUANTIZE_BLOCK_SIZE - 1) / SYCL_DEQUANTIZE_BLOCK_SIZE; + const int num_blocks = (k + 2*SYCL_DEQUANTIZE_BLOCK_SIZE - 1) / (2*SYCL_DEQUANTIZE_BLOCK_SIZE); { dpct::has_capability_or_fail(stream->get_device(), {sycl::aspect::fp16}); - stream->parallel_for( sycl::nd_range<3>( sycl::range<3>(1, 1, num_blocks) * @@ -9208,24 +9473,6 @@ static void mul_mat_vec_q_sycl_submitter(const void *vx, const void *vy, }); } -int get_device_index_by_id(int id){ - int res = g_sycl_device_id2index[id].index; - // GGML_SYCL_DEBUG("get_device_index_by_id id=%d device_index=%d\n", id, res); - GGML_ASSERT(res>=0); - return res; -} - -int get_device_id_by_index(int index){ - int res = g_device_caps[index].device_id; - GGML_ASSERT(res>=0); - return res; -} - - -int get_current_device_index(){ - return get_device_index_by_id(dpct::dev_mgr::instance().current_device_id()); -} - static void ggml_mul_mat_q4_0_q8_1_sycl(const void *vx, const void *vy, float *dst, const int ncols_x, const int nrows_x, const int ncols_y, @@ -9234,7 +9481,7 @@ static void ggml_mul_mat_q4_0_q8_1_sycl(const void *vx, const void *vy, int id; SYCL_CHECK( - CHECK_TRY_ERROR(id = get_current_device_index())); + CHECK_TRY_ERROR(id = get_current_device_id())); const int compute_capability = g_device_caps[id].cc; int mmq_x, mmq_y, nwarps; @@ -9349,7 +9596,7 @@ static void ggml_mul_mat_q4_1_q8_1_sycl(const void *vx, const void *vy, int id; SYCL_CHECK( - CHECK_TRY_ERROR(id = get_current_device_index())); + CHECK_TRY_ERROR(id = get_current_device_id())); const int compute_capability = g_device_caps[id].cc; int mmq_x, mmq_y, nwarps; @@ -9464,7 +9711,7 @@ static void ggml_mul_mat_q5_0_q8_1_sycl(const void *vx, const void *vy, int id; SYCL_CHECK( - CHECK_TRY_ERROR(id = get_current_device_index())); + CHECK_TRY_ERROR(id = get_current_device_id())); const int compute_capability = g_device_caps[id].cc; int mmq_x, mmq_y, nwarps; @@ -9579,7 +9826,7 @@ static void ggml_mul_mat_q5_1_q8_1_sycl(const void *vx, const void *vy, int id; SYCL_CHECK( - CHECK_TRY_ERROR(id = get_current_device_index())); + CHECK_TRY_ERROR(id = get_current_device_id())); const int compute_capability = g_device_caps[id].cc; int mmq_x, mmq_y, nwarps; @@ -9694,7 +9941,7 @@ static void ggml_mul_mat_q8_0_q8_1_sycl(const void *vx, const void *vy, int id; SYCL_CHECK( - CHECK_TRY_ERROR(id = get_current_device_index())); + CHECK_TRY_ERROR(id = get_current_device_id())); const int compute_capability = g_device_caps[id].cc; int mmq_x, mmq_y, nwarps; @@ -9809,7 +10056,7 @@ static void ggml_mul_mat_q2_K_q8_1_sycl(const void *vx, const void *vy, int id; SYCL_CHECK( - CHECK_TRY_ERROR(id = get_current_device_index())); + CHECK_TRY_ERROR(id = get_current_device_id())); const int compute_capability = g_device_caps[id].cc; int mmq_x, mmq_y, nwarps; @@ -9932,7 +10179,7 @@ static void ggml_mul_mat_q3_K_q8_1_sycl(const void *vx, const void *vy, int id; SYCL_CHECK( - CHECK_TRY_ERROR(id = get_current_device_index())); + CHECK_TRY_ERROR(id = get_current_device_id())); const int compute_capability = g_device_caps[id].cc; int mmq_x, mmq_y, nwarps; @@ -10060,7 +10307,7 @@ static void ggml_mul_mat_q4_K_q8_1_sycl(const void *vx, const void *vy, int id; SYCL_CHECK( - CHECK_TRY_ERROR(id = get_current_device_index())); + CHECK_TRY_ERROR(id = get_current_device_id())); const int compute_capability = g_device_caps[id].cc; int mmq_x, mmq_y, nwarps; @@ -10181,7 +10428,7 @@ static void ggml_mul_mat_q5_K_q8_1_sycl(const void *vx, const void *vy, int id; SYCL_CHECK( - CHECK_TRY_ERROR(id = get_current_device_index())); + CHECK_TRY_ERROR(id = get_current_device_id())); const int compute_capability = g_device_caps[id].cc; int mmq_x, mmq_y, nwarps; @@ -10302,7 +10549,7 @@ static void ggml_mul_mat_q6_K_q8_1_sycl(const void *vx, const void *vy, int id; SYCL_CHECK( - CHECK_TRY_ERROR(id = get_current_device_index())); + CHECK_TRY_ERROR(id = get_current_device_id())); const int compute_capability = g_device_caps[id].cc; int mmq_x, mmq_y, nwarps; @@ -10458,6 +10705,31 @@ static void ggml_mul_mat_vec_nc_f16_f32_sycl( } } +static void +ggml_cpy_f16_f32_sycl(const char *cx, char *cdst, const int ne, const int ne00, + const int ne01, const int ne02, const int nb00, + const int nb01, const int nb02, const int nb03, + const int ne10, const int ne11, const int ne12, + const int nb10, const int nb11, const int nb12, + const int nb13, dpct::queue_ptr stream) { + + const int num_blocks = (ne + SYCL_CPY_BLOCK_SIZE - 1) / SYCL_CPY_BLOCK_SIZE; + { + dpct::has_capability_or_fail(stream->get_device(), + {sycl::aspect::fp16}); + + stream->parallel_for( + sycl::nd_range<3>(sycl::range<3>(1, 1, num_blocks) * + sycl::range<3>(1, 1, SYCL_CPY_BLOCK_SIZE), + sycl::range<3>(1, 1, SYCL_CPY_BLOCK_SIZE)), + [=](sycl::nd_item<3> item_ct1) { + cpy_f32_f16(cx, cdst, ne, ne00, ne01, ne02, nb00, + nb01, nb02, nb03, ne10, ne11, ne12, + nb10, nb11, nb12, nb13, item_ct1); + }); + } +} + static void ggml_cpy_f32_f32_sycl(const char *cx, char *cdst, const int ne, const int ne00, const int ne01, const int ne02, const int nb00, @@ -11014,12 +11286,9 @@ struct sycl_buffer { static sycl_buffer g_sycl_buffer_pool[GGML_SYCL_MAX_DEVICES][MAX_SYCL_BUFFERS]; static size_t g_sycl_pool_size[GGML_SYCL_MAX_DEVICES] = {0}; -static void *ggml_sycl_pool_malloc_leg(size_t size, size_t *actual_size) try { +static void *ggml_sycl_pool_malloc_leg(int device_index, size_t size, size_t *actual_size) try { scoped_spin_lock lock(g_sycl_pool_lock); - int id; - SYCL_CHECK( - CHECK_TRY_ERROR(id = get_current_device_index())); - // GGML_SYCL_DEBUG("ggml_sycl_pool_malloc_leg index %d\n", id); + // GGML_SYCL_DEBUG("ggml_sycl_pool_malloc_leg device_index %d size=%lu\n", device_index, size); #ifdef DEBUG_SYCL_MALLOC int nnz = 0; size_t max_size = 0; @@ -11027,7 +11296,7 @@ static void *ggml_sycl_pool_malloc_leg(size_t size, size_t *actual_size) try { size_t best_diff = 1ull << 36; int ibest = -1; for (int i = 0; i < MAX_SYCL_BUFFERS; ++i) { - sycl_buffer& b = g_sycl_buffer_pool[id][i]; + sycl_buffer& b = g_sycl_buffer_pool[device_index][i]; if (b.ptr != nullptr) { #ifdef DEBUG_SYCL_MALLOC ++nnz; @@ -11043,7 +11312,7 @@ static void *ggml_sycl_pool_malloc_leg(size_t size, size_t *actual_size) try { *actual_size = b.size; b.ptr = nullptr; b.size = 0; - // GGML_SYCL_DEBUG("ggml_sycl_pool_malloc_leg return 1 %p\n", ptr); + // GGML_SYCL_DEBUG("ggml_sycl_pool_malloc_leg return 1 %p and rm in pool\n", ptr); return ptr; } } @@ -11051,30 +11320,30 @@ static void *ggml_sycl_pool_malloc_leg(size_t size, size_t *actual_size) try { } } if (ibest >= 0) { - sycl_buffer& b = g_sycl_buffer_pool[id][ibest]; + sycl_buffer& b = g_sycl_buffer_pool[device_index][ibest]; void * ptr = b.ptr; *actual_size = b.size; b.ptr = nullptr; b.size = 0; - // GGML_SYCL_DEBUG("ggml_sycl_pool_malloc_leg return 2 %p\n", ptr); + // GGML_SYCL_DEBUG("ggml_sycl_pool_malloc_leg return 2 %p and rm in pool\n", ptr); return ptr; } void * ptr; size_t look_ahead_size = (size_t) (1.05 * size); look_ahead_size = 256 * ((look_ahead_size + 255)/256); - const dpct::queue_ptr stream = g_syclStreams[id][0]; + const dpct::queue_ptr stream = g_syclStreams[device_index][0]; SYCL_CHECK( CHECK_TRY_ERROR(ptr = (void *)sycl::malloc_device( look_ahead_size, *stream))); *actual_size = look_ahead_size; - g_sycl_pool_size[id] += look_ahead_size; + g_sycl_pool_size[device_index] += look_ahead_size; #ifdef DEBUG_SYCL_MALLOC fprintf(stderr, "%s[%d]: %d buffers, max_size = %u MB, pool_size = %u MB, requested %u MB\n", __func__, id, nnz, (uint32_t)(max_size/1024/1024), (uint32_t)(g_sycl_pool_size[id]/1024/1024), (uint32_t)(size/1024/1024)); #endif - // GGML_SYCL_DEBUG("ggml_sycl_pool_malloc_leg return %p\n", ptr); + // GGML_SYCL_DEBUG("ggml_sycl_pool_malloc_leg look_ahead_size=%lu, return %p\n", look_ahead_size, ptr); return ptr; } catch (sycl::exception const &exc) { @@ -11083,15 +11352,11 @@ catch (sycl::exception const &exc) { std::exit(1); } -static void ggml_sycl_pool_free_leg(void *ptr, size_t size) try { +static void ggml_sycl_pool_free_leg(int device_index, void *ptr, size_t size) try { scoped_spin_lock lock(g_sycl_pool_lock); - int id; - SYCL_CHECK( - CHECK_TRY_ERROR(id = get_current_device_index())); - - const dpct::queue_ptr stream = g_syclStreams[id][0]; + const dpct::queue_ptr stream = g_syclStreams[device_index][0]; for (int i = 0; i < MAX_SYCL_BUFFERS; ++i) { - sycl_buffer& b = g_sycl_buffer_pool[id][i]; + sycl_buffer& b = g_sycl_buffer_pool[device_index][i]; if (b.ptr == nullptr) { b.ptr = ptr; b.size = size; @@ -11100,7 +11365,7 @@ static void ggml_sycl_pool_free_leg(void *ptr, size_t size) try { } fprintf(stderr, "WARNING: sycl buffer pool full, increase MAX_SYCL_BUFFERS\n"); SYCL_CHECK(CHECK_TRY_ERROR(sycl::free(ptr, *stream))); - g_sycl_pool_size[id] -= size; + g_sycl_pool_size[device_index] -= size; } catch (sycl::exception const &exc) { std::cerr << exc.what() << "Exception caught at file:" << __FILE__ @@ -11117,7 +11382,8 @@ DPCT1082:64: Migration of CUmemGenericAllocationHandle type is not supported. static dpct::device_ptr g_sycl_pool_addr[GGML_SYCL_MAX_DEVICES] = {0}; static size_t g_sycl_pool_used[GGML_SYCL_MAX_DEVICES] = {0}; -static void *ggml_sycl_pool_malloc_vmm(size_t size, size_t *actual_size) try { +static void *ggml_sycl_pool_malloc_vmm(int device_index, size_t size, size_t *actual_size) try { + GGML_UNUSED(device_index); GGML_UNUSED(size); GGML_UNUSED(actual_size); return NULL; @@ -11128,20 +11394,16 @@ catch (sycl::exception const &exc) { std::exit(1); } -static void ggml_sycl_pool_free_vmm(void *ptr, size_t size) try { +static void ggml_sycl_pool_free_vmm(int device_index, void *ptr, size_t size) try { scoped_spin_lock lock(g_sycl_pool_lock); - int id; - SYCL_CHECK( - CHECK_TRY_ERROR(id = dpct::dev_mgr::instance().current_device_id())); - #ifdef DEBUG_SYCL_MALLOC - printf("sycl pool[%d]: freed %llu bytes at %llx\n", id, (unsigned long long) size, ptr); + printf("sycl pool[%d]: freed %llu bytes at %llx\n", device_index, (unsigned long long) size, ptr); #endif - g_sycl_pool_used[id] -= size; + g_sycl_pool_used[device_index] -= size; // all deallocations must be in reverse order of the allocations - GGML_ASSERT(ptr == (void *) (g_sycl_pool_addr[id] + g_sycl_pool_used[id])); + GGML_ASSERT(ptr == (void *) (g_sycl_pool_addr[device_index] + g_sycl_pool_used[device_index])); } catch (sycl::exception const &exc) { std::cerr << exc.what() << "Exception caught at file:" << __FILE__ @@ -11149,14 +11411,11 @@ catch (sycl::exception const &exc) { std::exit(1); } -static void *ggml_sycl_pool_malloc(size_t size, size_t *actual_size) try { - int id; - SYCL_CHECK( - CHECK_TRY_ERROR(id = get_current_device_index())); - if (g_device_caps[id].vmm) { - return ggml_sycl_pool_malloc_vmm(size, actual_size); +static void *ggml_sycl_pool_malloc(int device_index, size_t size, size_t *actual_size) try { + if (g_device_caps[device_index].vmm) { + return ggml_sycl_pool_malloc_vmm(device_index, size, actual_size); } else { - return ggml_sycl_pool_malloc_leg(size, actual_size); + return ggml_sycl_pool_malloc_leg(device_index, size, actual_size); } } catch (sycl::exception const &exc) { @@ -11165,14 +11424,11 @@ catch (sycl::exception const &exc) { std::exit(1); } -static void ggml_sycl_pool_free(void *ptr, size_t size) try { - int id; - SYCL_CHECK( - CHECK_TRY_ERROR(id = get_current_device_index())); - if (g_device_caps[id].vmm) { - ggml_sycl_pool_free_vmm(ptr, size); +static void ggml_sycl_pool_free(int device_index, void *ptr, size_t size) try { + if (g_device_caps[device_index].vmm) { + ggml_sycl_pool_free_vmm(device_index, ptr, size); } else { - ggml_sycl_pool_free_leg(ptr, size); + ggml_sycl_pool_free_leg(device_index, ptr, size); } } catch (sycl::exception const &exc) { @@ -11184,13 +11440,17 @@ catch (sycl::exception const &exc) { template struct sycl_pool_alloc { + int device_index = -1; + int device_id = -1; T * ptr = nullptr; size_t actual_size = 0; // size is in number of elements T * alloc(size_t size) { GGML_ASSERT(ptr == nullptr); - ptr = (T *) ggml_sycl_pool_malloc(size * sizeof(T), &this->actual_size); + device_id = get_current_device_id(); + device_index = g_sycl_gpu_mgr->get_index(device_id); + ptr = (T *) ggml_sycl_pool_malloc(device_index, size * sizeof(T), &this->actual_size); // GGML_SYCL_DEBUG("alloc %lu return %p actual size=%lu\n", size * sizeof(T), ptr, this->actual_size); return ptr; } @@ -11201,7 +11461,7 @@ struct sycl_pool_alloc { ~sycl_pool_alloc() { if (ptr != nullptr) { - ggml_sycl_pool_free(ptr, actual_size); + ggml_sycl_pool_free(device_index, ptr, actual_size); } } @@ -11222,44 +11482,57 @@ bool ggml_sycl_loaded(void) { return g_sycl_loaded; } -void ggml_backend_sycl_print_sycl_devices(){ - int device_count = dpct::dev_mgr::instance().device_count(); - fprintf(stderr, "found %d SYCL devices:\n", device_count); - for (int id = 0; id < device_count; ++id) { - dpct::device_info prop; - SYCL_CHECK(CHECK_TRY_ERROR(dpct::get_device_info( - prop, dpct::dev_mgr::instance().get_device(id)))); - sycl::device cur_device = dpct::dev_mgr::instance().get_device(id); - fprintf(stderr, " Device %d: %s,\tcompute capability %d.%d,\n\tmax compute_units %d,\tmax work group size %d,\tmax sub group size %d,\tglobal mem size %lu\n", id, - prop.get_name(), prop.get_major_version(), - prop.get_minor_version(), - prop.get_max_compute_units(), - prop.get_max_work_group_size(), - prop.get_max_sub_group_size(), - prop.get_global_mem_size() - ); - } - // fprintf(stderr, "\n"); +void print_device_detail(int id) { + dpct::device_info prop; + SYCL_CHECK(CHECK_TRY_ERROR( + dpct::get_device_info(prop, dpct::dev_mgr::instance().get_device(id)))); + sycl::device cur_device = dpct::dev_mgr::instance().get_device(id); + std::string version; + version += std::to_string(prop.get_major_version()); + version += "."; + version += std::to_string(prop.get_minor_version()); + + fprintf(stderr, "|%2d|%45s|%18s|%17d|%14d|%13d|%15lu|\n", id, + prop.get_name(), version.c_str(), prop.get_max_compute_units(), + prop.get_max_work_group_size(), prop.get_max_sub_group_size(), + prop.get_global_mem_size()); } -int get_sycl_env(const char* env_name, int default_val){ - char * user_device_string = getenv(env_name); +void ggml_backend_sycl_print_sycl_devices() { + int device_count = dpct::dev_mgr::instance().device_count(); + fprintf(stderr, "found %d SYCL devices:\n", device_count); + fprintf(stderr, "|ID| Name |compute capability|Max compute units|Max work group|Max sub group|Global mem size|\n"); + fprintf(stderr, "|--|---------------------------------------------|------------------|-----------------|--------------|-------------|---------------|\n"); + for (int id = 0; id < device_count; ++id) { + print_device_detail(id); + } +} + +void print_gpu_device_list() { + fprintf(stderr, "detect %d SYCL GPUs: [%s] with Max compute units:%d\n", + g_sycl_gpu_mgr->get_gpu_count(), + g_sycl_gpu_mgr->gpus_list.c_str(), + g_sycl_gpu_mgr->max_compute_units); +} + +int get_sycl_env(const char *env_name, int default_val) { + char *user_device_string = getenv(env_name); int user_number = default_val; unsigned n; - if (user_device_string != NULL && sscanf(user_device_string, " %u", &n) == 1) { - user_number = (int)n; - } else { - user_number=default_val; - } + if (user_device_string != NULL && + sscanf(user_device_string, " %u", &n) == 1) { + user_number = (int)n; + } else { + user_number = default_val; + } return user_number; } -int get_work_group_size(int user_device_id){ +int get_work_group_size(int user_device_id) { dpct::device_info prop; - dpct::get_device_info( - prop, - dpct::dev_mgr::instance().get_device(user_device_id)); + dpct::get_device_info(prop, + dpct::dev_mgr::instance().get_device(user_device_id)); return prop.get_max_work_group_size(); } @@ -11268,113 +11541,81 @@ void ggml_init_sycl() try { if (!initialized) { g_ggml_sycl_debug = get_sycl_env("GGML_SYCL_DEBUG", 0); + fprintf(stderr, "%s: GGML_SYCL_DEBUG: %d\n", __func__, g_ggml_sycl_debug); - printf("GGML_SYCL_DEBUG=%d\n", g_ggml_sycl_debug); - - int user_device_id = get_sycl_env("GGML_SYCL_DEVICE", 0); - +#if defined(GGML_SYCL_F16) + fprintf(stderr, "%s: GGML_SYCL_F16: yes\n", __func__); +#else + fprintf(stderr, "%s: GGML_SYCL_F16: no\n", __func__); +#endif if (CHECK_TRY_ERROR(g_all_sycl_device_count = - dpct::dev_mgr::instance().device_count()) != - 0) { + dpct::dev_mgr::instance().device_count()) != 0) { initialized = true; g_sycl_loaded = false; return; } GGML_ASSERT(g_all_sycl_device_count <= GGML_SYCL_MAX_DEVICES); + ggml_backend_sycl_print_sycl_devices(); + + if (!g_sycl_gpu_mgr) g_sycl_gpu_mgr = new sycl_gpu_mgr(); + + g_device_count = g_sycl_gpu_mgr->get_gpu_count(); + g_work_group_size = g_sycl_gpu_mgr->work_group_size; + + print_gpu_device_list(); + int64_t total_vram = 0; -#if defined(GGML_SYCL_F16) - fprintf(stderr, "%s: GGML_SYCL_F16: yes\n", __func__); -#else - fprintf(stderr, "%s: GGML_SYCL_F16: no\n", __func__); -#endif - - +/* NOT REMOVE, keep it for next optimize for XMX. #if defined(SYCL_USE_XMX) fprintf(stderr, "%s: SYCL_USE_XMX: yes\n", __func__); #else fprintf(stderr, "%s: SYCL_USE_XMX: no\n", __func__); #endif - ggml_backend_sycl_print_sycl_devices(); +*/ for (int id = 0; id < GGML_SYCL_MAX_DEVICES; ++id) { - g_sycl_device_id2index[id].index = -1; g_device_caps[id].vmm = 0; g_device_caps[id].device_id = -1; g_device_caps[id].cc = 0; g_tensor_split[id] = 0; + g_default_tensor_split[id] = 0; } - int device_inx = -1; - for (int id = 0; id < g_all_sycl_device_count; ++id) { - if(id!=user_device_id) continue; - - device_inx++; - - g_device_caps[device_inx].vmm = 0; - g_device_caps[device_inx].device_id = id; - g_sycl_device_id2index[id].index = device_inx; + for (int i = 0; i < g_device_count; ++i) { + int device_id = g_sycl_gpu_mgr->gpus[i]; + g_device_caps[i].vmm = 0; dpct::device_info prop; SYCL_CHECK(CHECK_TRY_ERROR(dpct::get_device_info( - prop, dpct::dev_mgr::instance().get_device(id)))); + prop, dpct::dev_mgr::instance().get_device(device_id)))); - g_tensor_split[device_inx] = total_vram; + g_default_tensor_split[i] = total_vram; total_vram += prop.get_global_mem_size(); - g_device_caps[device_inx].cc = + g_device_caps[i].cc = 100 * prop.get_major_version() + 10 * prop.get_minor_version(); - - } - device_inx = -1; - for (int id = 0; id < g_all_sycl_device_count; ++id) { - if(id!=user_device_id) continue; - device_inx++; - g_tensor_split[device_inx] /= total_vram; } - device_inx = -1; - for (int id = 0; id < g_all_sycl_device_count; ++id) { - if(id!=user_device_id) continue; - device_inx++; - SYCL_CHECK(ggml_sycl_set_device(id)); + for (int i = 0; i < g_device_count; ++i) { + g_default_tensor_split[i] /= total_vram; + } + + for (int i = 0; i < g_device_count; ++i) { + SYCL_CHECK(ggml_sycl_set_device(i)); // create sycl streams for (int is = 0; is < MAX_STREAMS; ++is) { - /* - DPCT1025:88: The SYCL queue is created ignoring the flag and - priority options. - */ SYCL_CHECK(CHECK_TRY_ERROR( - g_syclStreams[device_inx][is] = - dpct::get_current_device().create_queue())); + g_syclStreams[i][is] = + dpct::get_current_device().create_queue( + g_sycl_gpu_mgr->get_co_ctx(), dpct::get_current_device()))); } - const dpct::queue_ptr stream = g_syclStreams[device_inx][0]; + const dpct::queue_ptr stream = g_syclStreams[i][0]; // create sycl handle - SYCL_CHECK(CHECK_TRY_ERROR(g_sycl_handles[device_inx] = - stream)); - /* - DPCT1027:89: The call to syclSetMathMode was replaced with 0 - because this functionality is redundant in SYCL. - */ - SYCL_CHECK(0); + SYCL_CHECK(CHECK_TRY_ERROR(g_sycl_handles[i] = stream)); } - // configure logging to stdout - // SYCL_CHECK(syclLoggerConfigure(1, 1, 0, nullptr)); - - //hardcode, force set to 1 device - g_device_count = 1; - ggml_sycl_set_main_device(user_device_id); - ggml_sycl_set_device(user_device_id); - g_work_group_size = get_work_group_size(user_device_id); - // fprintf(stderr, "Using Device %d\n", user_device_id); - - // for (int id = 0; id < g_all_sycl_device_count; ++id) { - // GGML_SYCL_DEBUG("id=%d g_device_caps[%d].device_id=%d g_sycl_device_id2index[%d].index=%d ", id, id, - // g_device_caps[id].device_id, id, g_sycl_device_id2index[id].index); - // } - initialized = true; g_sycl_loaded = true; } @@ -11385,31 +11626,6 @@ catch (sycl::exception const &exc) { std::exit(1); } - -void ggml_sycl_set_tensor_split(const float * tensor_split) { - if (tensor_split == nullptr) { - return; - } - bool all_zero = true; - for (int i = 0; i < g_device_count; ++i) { - if (tensor_split[i] != 0.0f) { - all_zero = false; - break; - } - } - if (all_zero) { - return; - } - float split_sum = 0.0f; - for (int i = 0; i < g_device_count; ++i) { - g_tensor_split[i] = split_sum; - split_sum += tensor_split[i]; - } - for (int i = 0; i < g_device_count; ++i) { - g_tensor_split[i] /= split_sum; - } -} - void *ggml_sycl_host_malloc(size_t size) try { if (getenv("GGML_SYCL_NO_PINNED") != nullptr) { return nullptr; @@ -11419,28 +11635,14 @@ void *ggml_sycl_host_malloc(size_t size) try { //allow to use dpct::get_in_order_queue() for host malloc dpct::err0 err = CHECK_TRY_ERROR( ptr = (void *)sycl::malloc_host(size, dpct::get_in_order_queue())); - /* - DPCT1000:82: Error handling if-stmt was detected but could not be rewritten. - */ + if (err != 0) { // clear the error - /* - DPCT1026:83: The call to syclGetLastError was removed because this - functionality is redundant in SYCL. - */ - /* - DPCT1001:81: The statement could not be removed. - */ fprintf( stderr, "WARNING: failed to allocate %.2f MB of pinned memory: %s\n", - /* - DPCT1009:84: SYCL uses exceptions to report errors and does not use - the error codes. The original code was commented out and a warning - string was inserted. You need to rewrite this code. - */ size / 1024.0 / 1024.0, - "syclGetErrorString is not supported" /*syclGetErrorString(err)*/); + "syclGetErrorString is not supported"); return nullptr; } @@ -11480,7 +11682,7 @@ static dpct::err0 ggml_sycl_cpy_tensor_2d(void *dst, ggml_tensor_extra_gpu * extra = (ggml_tensor_extra_gpu *) src->extra; int id; SYCL_CHECK(CHECK_TRY_ERROR( - id = get_current_device_index())); + id = get_current_device_id())); // GGML_SYCL_DEBUG("current device index %d\n", id); src_ptr = (char *) extra->data_device[id]; } else { @@ -11714,7 +11916,6 @@ inline void ggml_sycl_op_tanh(const ggml_tensor *src0, const ggml_tensor *src1, GGML_ASSERT(src0->type == GGML_TYPE_F32); GGML_ASSERT( dst->type == GGML_TYPE_F32); - tanh_f32_sycl(src0_dd, dst_dd, ggml_nelements(src0), main_stream); (void) src1; @@ -11737,6 +11938,37 @@ inline void ggml_sycl_op_relu(const ggml_tensor *src0, const ggml_tensor *src1, (void) src1_dd; } +static void ggml_sycl_op_hardsigmoid(const ggml_tensor *src0, + const ggml_tensor *src1, ggml_tensor *dst, + const float *src0_dd, const float *src1_dd, + float *dst_dd, + const dpct::queue_ptr &main_stream) { + + GGML_ASSERT(src0->type == GGML_TYPE_F32); + GGML_ASSERT( dst->type == GGML_TYPE_F32); + + hardsigmoid_f32_sycl(src0_dd, dst_dd, ggml_nelements(src0), main_stream); + + (void) src1; + (void) dst; + (void) src1_dd; +} + +static void ggml_sycl_op_hardswish(const ggml_tensor *src0, + const ggml_tensor *src1, ggml_tensor *dst, + const float *src0_dd, const float *src1_dd, + float *dst_dd, const dpct::queue_ptr &main_stream) { + + GGML_ASSERT(src0->type == GGML_TYPE_F32); + GGML_ASSERT( dst->type == GGML_TYPE_F32); + + hardswish_f32_sycl(src0_dd, dst_dd, ggml_nelements(src0), main_stream); + + (void) src1; + (void) dst; + (void) src1_dd; +} + inline void ggml_sycl_op_leaky_relu(const ggml_tensor *src0, const ggml_tensor *src1, ggml_tensor *dst, const float *src0_dd, const float *src1_dd, @@ -11905,7 +12137,7 @@ inline void ggml_sycl_op_mul_mat_q( int device_id; SYCL_CHECK( - CHECK_TRY_ERROR(device_id = dpct::dev_mgr::instance().current_device_id())); + CHECK_TRY_ERROR(device_id = get_current_device_id())); // the main device has a larger memory buffer to hold the results from all GPUs // nrows_dst == nrows of the matrix that the dequantize_mul_mat kernel writes into @@ -11957,16 +12189,16 @@ catch (sycl::exception const &exc) { std::exit(1); } -static int64_t get_row_rounding(ggml_type type) { +static int64_t get_row_rounding(ggml_type type, const std::array & tensor_split) { int64_t min_compute_capability = INT_MAX; int64_t max_compute_capability = INT_MIN; - for (int64_t id = 0; id < g_device_count; ++id) { - if (g_tensor_split[id] < (id + 1 < g_device_count ? g_tensor_split[id + 1] : 1.0f)) { - if (min_compute_capability > g_device_caps[id].cc) { - min_compute_capability = g_device_caps[id].cc; + for (int i = 0; i < g_device_count; ++i) { + if (tensor_split[i] < (i + 1 < g_device_count ? tensor_split[i + 1] : 1.0f)) { + if (min_compute_capability > g_device_caps[i].cc) { + min_compute_capability = g_device_caps[i].cc; } - if (max_compute_capability < g_device_caps[id].cc) { - max_compute_capability = g_device_caps[id].cc; + if (max_compute_capability < g_device_caps[i].cc) { + max_compute_capability = g_device_caps[i].cc; } } } @@ -11986,12 +12218,16 @@ static int64_t get_row_rounding(ggml_type type) { case GGML_TYPE_Q3_K: case GGML_TYPE_Q4_K: case GGML_TYPE_Q5_K: + case GGML_TYPE_IQ2_XXS: + case GGML_TYPE_IQ2_XS: + case GGML_TYPE_IQ3_XXS: return max_compute_capability >= VER_GEN9 ? 128 : 64; case GGML_TYPE_Q6_K: return 64; default: GGML_ASSERT(false); } + } inline void ggml_sycl_op_mul_mat_vec_q( @@ -12176,27 +12412,22 @@ inline void ggml_sycl_op_mul_mat_sycl( const int64_t row_diff = row_high - row_low; int id; - int device_id = dpct::dev_mgr::instance().current_device_id(); SYCL_CHECK( - CHECK_TRY_ERROR(id = get_current_device_index())); + CHECK_TRY_ERROR(id = get_current_device_id())); // the main device has a larger memory buffer to hold the results from all GPUs // ldc == nrows of the matrix that cuBLAS writes into - int ldc = dst->backend == GGML_BACKEND_TYPE_GPU && device_id == g_main_device ? ne0 : row_diff; + int ldc = dst->backend == GGML_BACKEND_TYPE_GPU && id == g_main_device ? ne0 : row_diff; #ifdef GGML_SYCL_F16 bool use_fp16 = true; // TODO(Yu) SYCL capability check #else bool use_fp16 = false; #endif - // if (compute_capability >= VER_GEN9 && (src0->type == GGML_TYPE_F16 || - // ggml_is_quantized(src0->type)) && ggml_is_contiguous(src0) && row_diff == - // src0->ne[1] && dst->op_params[0] == GGML_PREC_DEFAULT) { if ((src0->type == GGML_TYPE_F16 || ggml_is_quantized(src0->type)) && use_fp16 && ggml_is_contiguous(src0) && row_diff == src0->ne[1] && dst->op_params[0] == GGML_PREC_DEFAULT) { - // convert src0 and src1 to fp16, multiply as fp16, convert dst to fp32 // GGML_SYCL_DEBUG("ggml_sycl_op_mul_mat_sycl - fp16 path\n"); sycl_pool_alloc src0_as_f16; if (src0->type != GGML_TYPE_F16) { @@ -12225,7 +12456,6 @@ inline void ggml_sycl_op_mul_mat_sycl( const sycl::half alpha_f16 = 1.0f; const sycl::half beta_f16 = 0.0f; - SYCL_CHECK(CHECK_TRY_ERROR(g_sycl_handles[id] = stream)); SYCL_CHECK(CHECK_TRY_ERROR(dpct::gemm( *g_sycl_handles[id], oneapi::mkl::transpose::trans, @@ -12241,14 +12471,21 @@ inline void ggml_sycl_op_mul_mat_sycl( else { // GGML_SYCL_DEBUG("ggml_sycl_op_mul_mat_sycl - fp32 path\n"); sycl_pool_alloc src0_ddq_as_f32; - + sycl_pool_alloc src1_ddq_as_f32; if (src0->type != GGML_TYPE_F32) { const to_fp32_sycl_t to_fp32_sycl = ggml_get_to_fp32_sycl(src0->type); GGML_ASSERT(to_fp32_sycl != nullptr); src0_ddq_as_f32.alloc(row_diff*ne00); to_fp32_sycl(src0_dd_i, src0_ddq_as_f32.get(), row_diff*ne00, stream); } + if (src1->type != GGML_TYPE_F32) { + const to_fp32_sycl_t to_fp32_sycl = ggml_get_to_fp32_sycl(src1->type); + GGML_ASSERT(to_fp32_sycl != nullptr); + src1_ddq_as_f32.alloc(src1_ncols*ne10); + to_fp32_sycl(src1_ddf_i, src1_ddq_as_f32.get(), src1_ncols*ne10, stream); + } const float * src0_ddf_i = src0->type == GGML_TYPE_F32 ? (const float *) src0_dd_i : src0_ddq_as_f32.get(); + const float * src1_ddf1_i = src1->type == GGML_TYPE_F32 ? (const float *) src1_ddf_i : src1_ddq_as_f32.get(); const float alpha = 1.0f; const float beta = 0.0f; @@ -12261,7 +12498,6 @@ inline void ggml_sycl_op_mul_mat_sycl( src1_ddf_i, ne10, dpct::get_value(&beta, *g_sycl_handles[id]), dst_dd_i, ldc))); } - (void) dst; (void) src1_ddq_i; (void) src1_padded_row_size; @@ -12382,6 +12618,48 @@ inline void ggml_sycl_op_alibi(const ggml_tensor *src0, const ggml_tensor *src1, (void) src1_dd; } +static void ggml_sycl_op_pool2d(const ggml_tensor *src0, + const ggml_tensor *src1, ggml_tensor *dst, + const float *src0_dd, const float *src1_dd, + float *dst_dd, const dpct::queue_ptr &main_stream) { + + GGML_ASSERT(src0->type == GGML_TYPE_F32); + GGML_ASSERT( dst->type == GGML_TYPE_F32); + + const int32_t * opts = (const int32_t *)dst->op_params; + enum ggml_op_pool op = static_cast(opts[0]); + const int k0 = opts[1]; + const int k1 = opts[2]; + const int s0 = opts[3]; + const int s1 = opts[4]; + const int p0 = opts[5]; + const int p1 = opts[6]; + + const int64_t IH = src0->ne[1]; + const int64_t IW = src0->ne[0]; + + const int64_t N = dst->ne[3]; + const int64_t OC = dst->ne[2]; + const int64_t OH = dst->ne[1]; + const int64_t OW = dst->ne[0]; + + const int parallel_elements = N * OC * OH * OW; + const int num_blocks = (parallel_elements + SYCL_POOL2D_BLOCK_SIZE - 1) / SYCL_POOL2D_BLOCK_SIZE; + sycl::range<3> block_nums(1, 1, num_blocks); + main_stream->parallel_for( + sycl::nd_range<3>(block_nums * + sycl::range<3>(1, 1, SYCL_IM2COL_BLOCK_SIZE), + sycl::range<3>(1, 1, SYCL_IM2COL_BLOCK_SIZE)), + [=](sycl::nd_item<3> item_ct1) { + pool2d_nchw_kernel(IH, IW, OH, OW, k1, k0, s1, s0, p1, p0, + parallel_elements, src0_dd, dst_dd, op, + item_ct1); + }); + + (void) src1; + (void) src1_dd; +} + inline void ggml_sycl_op_im2col(const ggml_tensor *src0, const ggml_tensor *src1, ggml_tensor *dst, const float *src0_dd, const float *src1_dd, @@ -12606,12 +12884,12 @@ static void ggml_sycl_op_flatten(const ggml_tensor *src0, sycl_pool_alloc dst_f; ggml_sycl_set_device(g_main_device); - dpct::queue_ptr main_stream = g_syclStreams[g_main_device_index][0]; - // GGML_SYCL_DEBUG("g_main_device_index=%d, main_stream=%p src0_on_device=%d, src1_on_device=%d, dst_on_device=%d\n", - // g_main_device_index, main_stream, src0_on_device, src1_on_device, dst_on_device); + dpct::queue_ptr main_stream = g_syclStreams[g_main_device][0]; + // GGML_SYCL_DEBUG("g_main_device=%d, main_stream=%p src0_on_device=%d, src1_on_device=%d, dst_on_device=%d\n", + // g_main_device, main_stream, src0_on_device, src1_on_device, dst_on_device); if (src0_on_device) { - src0_ddf = (float *) src0_extra->data_device[g_main_device_index]; + src0_ddf = (float *) src0_extra->data_device[g_main_device]; } else { src0_ddf = src0_f.alloc(ggml_nelements(src0)); // GGML_SYCL_DEBUG("before ggml_sycl_cpy_tensor_2d src0_ddf=%p, src0=%p\n", src0_ddf, src0); @@ -12620,15 +12898,14 @@ static void ggml_sycl_op_flatten(const ggml_tensor *src0, if (use_src1) { if (src1_on_device) { - src1_ddf = (float *) src1_extra->data_device[g_main_device_index]; + src1_ddf = (float *) src1_extra->data_device[g_main_device]; } else { src1_ddf = src1_f.alloc(ggml_nelements(src1)); SYCL_CHECK(ggml_sycl_cpy_tensor_2d(src1_ddf, src1, 0, 0, 0, nrows1, main_stream)); } } if (dst_on_device) { - dst_ddf = (float *) dst_extra->data_device[g_main_device_index]; - // printf("zjy dst_ddf=%p main_stream=%p g_main_device_index=%d\n", dst_ddf, main_stream, g_main_device_index); + dst_ddf = (float *) dst_extra->data_device[g_main_device]; } else { dst_ddf = dst_f.alloc(ggml_nelements(dst)); } @@ -12672,21 +12949,19 @@ static void ggml_sycl_set_peer_access(const int n_tokens) { } #ifdef NDEBUG - for (int id = 0; id < g_device_count; ++id) { - SYCL_CHECK(ggml_sycl_set_device(get_device_id_by_index(id))); + for (int i = 0; i < g_device_count; ++i) { + SYCL_CHECK(ggml_sycl_set_device(i)); // SYCL_CHECK(syclDeviceSynchronize()); } - for (int id = 0; id < g_device_count; ++id) { - SYCL_CHECK(ggml_sycl_set_device(get_device_id_by_index(id))); - int device_id = g_device_caps[id].device_id; + for (int i = 0; i < g_device_count; ++i) { + SYCL_CHECK(ggml_sycl_set_device(i)); for (int id_other = 0; id_other < g_device_count; ++id_other) { - int device_id_other = g_device_caps[id_other].device_id; - if (device_id == id_other) { + if (i == id_other) { continue; } - if (device_id != g_main_device && device_id_other != g_main_device) { + if (i != g_main_device && id_other != g_main_device) { continue; } @@ -12706,6 +12981,10 @@ static void ggml_sycl_set_peer_access(const int n_tokens) { peer_access_enabled = enable_peer_access; } +struct ggml_backend_sycl_split_buffer_type_context { + std::array tensor_split; +}; + static void ggml_sycl_op_mul_mat(const ggml_tensor *src0, const ggml_tensor *src1, ggml_tensor *dst, ggml_sycl_op_mul_mat_t op, @@ -12752,80 +13031,90 @@ static void ggml_sycl_op_mul_mat(const ggml_tensor *src0, GGML_ASSERT(!(split && ne03 > 1)); GGML_ASSERT(!(split && ne02 < ne12)); - // dd = data device - char * src0_dd[GGML_SYCL_MAX_DEVICES] = {nullptr}; - float * src1_ddf[GGML_SYCL_MAX_DEVICES] = {nullptr}; // float - char * src1_ddq[GGML_SYCL_MAX_DEVICES] = {nullptr}; // q8_1 - float * dst_dd[GGML_SYCL_MAX_DEVICES] = {nullptr}; + std::array tensor_split; + if (split) { + // TODO: check that src0->buffer->buft is a split buffer type, replace GGML_BACKEND_TYPE_GPU_SPLIT check + // GGML_ASSERT(src0->buffer != nullptr && src0->buffer->buft == ...); + ggml_backend_sycl_split_buffer_type_context * buft_ctx = (ggml_backend_sycl_split_buffer_type_context *) src0->buffer->buft->context; + tensor_split = buft_ctx->tensor_split; + } - // as = actual size - size_t src0_as[GGML_SYCL_MAX_DEVICES] = {0}; - size_t src1_asf[GGML_SYCL_MAX_DEVICES] = {0}; - size_t src1_asq[GGML_SYCL_MAX_DEVICES] = {0}; - size_t dst_as[GGML_SYCL_MAX_DEVICES] = {0}; + struct dev_data { + sycl_pool_alloc src0_dd_alloc; + sycl_pool_alloc src1_ddf_alloc; + sycl_pool_alloc src1_ddq_alloc; + sycl_pool_alloc dst_dd_alloc; - int64_t row_low[GGML_SYCL_MAX_DEVICES]; - int64_t row_high[GGML_SYCL_MAX_DEVICES]; + char *src0_dd = nullptr; + float *src1_ddf = nullptr; // float + char *src1_ddq = nullptr; // q8_1 + float *dst_dd = nullptr; + + int64_t row_low; + int64_t row_high; + }; + + dev_data dev[GGML_SYCL_MAX_DEVICES]; int used_devices = 0; + dpct::queue_ptr main_stream = g_syclStreams[g_main_device][0]; - for (int64_t id = 0; id < g_device_count; ++id) { + for (int i = 0; i < g_device_count; ++i) { // by default, use all rows - row_low[id] = 0; - row_high[id] = ne01; + dev[i].row_low = 0; + dev[i].row_high = ne01; // for multi GPU, get the row boundaries from tensor split // and round to mul_mat_q tile sizes if (split) { - const int64_t rounding = get_row_rounding(src0->type); + const int64_t rounding = get_row_rounding(src0->type, tensor_split); - if (id != 0) { - row_low[id] = ne01*g_tensor_split[id]; - if (row_low[id] < ne01) { - row_low[id] -= row_low[id] % rounding; + if (i != 0) { + dev[i].row_low = ne01*tensor_split[i]; + if (dev[i].row_low < ne01) { + dev[i].row_low -= dev[i].row_low % rounding; } } - if (id != g_device_count - 1) { - row_high[id] = ne01*g_tensor_split[id + 1]; - if (row_high[id] < ne01) { - row_high[id] -= row_high[id] % rounding; + if (i != g_device_count - 1) { + dev[i].row_high = ne01*tensor_split[i + 1]; + if (dev[i].row_high < ne01) { + dev[i].row_high -= dev[i].row_high % rounding; } } } } - for (int64_t id = 0; id < g_device_count; ++id) { - if ((!split && id != g_main_device_index) || row_low[id] == row_high[id]) { + for (int i = 0; i < g_device_count; ++i) { + if ((!split && i != g_main_device) || dev[i].row_low == dev[i].row_high) { continue; } used_devices++; - const bool src1_on_device = src1->backend == GGML_BACKEND_TYPE_GPU && id == g_main_device_index; - const bool dst_on_device = dst->backend == GGML_BACKEND_TYPE_GPU && id == g_main_device_index; + const bool src1_on_device = src1->backend == GGML_BACKEND_TYPE_GPU && i == g_main_device; + const bool dst_on_device = dst->backend == GGML_BACKEND_TYPE_GPU && i == g_main_device; - ggml_sycl_set_device(get_device_id_by_index(id)); - const dpct::queue_ptr stream = g_syclStreams[id][0]; + ggml_sycl_set_device(i); + dpct::queue_ptr stream = g_syclStreams[i][0]; if (src0_on_device && src0_is_contiguous) { - src0_dd[id] = (char *) src0_extra->data_device[id]; + dev[i].src0_dd = (char *) src0_extra->data_device[i]; } else { - // const size_t size_src0_ddq = split ? (row_high[id]-row_low[id])*ne00 * src0_ts/src0_bs : ggml_nbytes(src0); - src0_dd[id] = (char *) ggml_sycl_pool_malloc(ggml_nbytes(src0), &src0_as[id]); + dev[i].src0_dd = dev[i].src0_dd_alloc.alloc(ggml_nbytes(src0)); } if (src1_on_device && src1_is_contiguous) { - src1_ddf[id] = (float *) src1_extra->data_device[id]; + dev[i].src1_ddf = (float *) src1_extra->data_device[i]; } else { - src1_ddf[id] = (float *) ggml_sycl_pool_malloc(ggml_nbytes(src1), &src1_asf[id]); + dev[i].src1_ddf = dev[i].src1_ddf_alloc.alloc(ggml_nelements(src1)); } if (convert_src1_to_q8_1) { - src1_ddq[id] = (char *) ggml_sycl_pool_malloc(nrows1*src1_padded_col_size*q8_1_ts/q8_1_bs, &src1_asq[id]); + dev[i].src1_ddq = dev[i].src1_ddq_alloc.alloc(nrows1*src1_padded_col_size*q8_1_ts/q8_1_bs); if (src1_on_device && src1_is_contiguous) { - quantize_row_q8_1_sycl(src1_ddf[id], src1_ddq[id], ne10, nrows1, src1_padded_col_size, stream); + quantize_row_q8_1_sycl(dev[i].src1_ddf, dev[i].src1_ddq, ne10, nrows1, src1_padded_col_size, stream); /* DPCT1010:90: SYCL uses exceptions to report errors and does not use the error codes. The call was replaced with 0. You need to @@ -12836,25 +13125,25 @@ static void ggml_sycl_op_mul_mat(const ggml_tensor *src0, } if (dst_on_device) { - dst_dd[id] = (float *) dst_extra->data_device[id]; + dev[i].dst_dd = (float *) dst_extra->data_device[i]; } else { - const size_t size_dst_ddf = split ? (row_high[id]-row_low[id])*ne1*sizeof(float) : ggml_nbytes(dst); - dst_dd[id] = (float *) ggml_sycl_pool_malloc(size_dst_ddf, &dst_as[id]); + const size_t size_dst_ddf = split ? (dev[i].row_high - dev[i].row_low)*ne1 : ggml_nelements(dst); + dev[i].dst_dd = dev[i].dst_dd_alloc.alloc(size_dst_ddf); } } // if multiple devices are used they need to wait for the main device // here an event is recorded that signals that the main device has finished calculating the input data if (split && used_devices > 1) { - SYCL_CHECK(ggml_sycl_set_device(g_main_device)); + ggml_sycl_set_device(g_main_device); /* DPCT1024:91: The original code returned the error code that was further consumed by the program logic. This original code was replaced with 0. You may need to rewrite the program logic consuming the error code. */ SYCL_CHECK(CHECK_TRY_ERROR( - *src0_extra->events[g_main_device_index][0] = - g_syclStreams[g_main_device_index][0]->ext_oneapi_submit_barrier())); + *src0_extra->events[g_main_device][0] = + g_syclStreams[g_main_device][0]->ext_oneapi_submit_barrier())); } const int64_t src1_col_stride = split && used_devices > 1 ? MUL_MAT_SRC1_COL_STRIDE : ne11; @@ -12862,22 +13151,27 @@ static void ggml_sycl_op_mul_mat(const ggml_tensor *src0, const int64_t is = split ? (src1_col_0/src1_col_stride) % MAX_STREAMS : 0; const int64_t src1_ncols = src1_col_0 + src1_col_stride > ne11 ? ne11 - src1_col_0 : src1_col_stride; - for (int64_t id = 0; id < g_device_count; ++id) { - if ((!split && id != g_main_device_index) || row_low[id] == row_high[id]) { + for (int i = 0; i < g_device_count; ++i) { + if ((!split && i != g_main_device) || dev[i].row_low == dev[i].row_high) { continue; } - const bool src1_on_device = src1->backend == GGML_BACKEND_TYPE_GPU && id == g_main_device_index; - const bool dst_on_device = dst->backend == GGML_BACKEND_TYPE_GPU && id == g_main_device_index; - const int64_t row_diff = row_high[id] - row_low[id]; + const bool src1_on_device = src1->backend == GGML_BACKEND_TYPE_GPU && i == g_main_device; + const bool dst_on_device = dst->backend == GGML_BACKEND_TYPE_GPU && i == g_main_device; + const int64_t row_diff = dev[i].row_high - dev[i].row_low; - ggml_sycl_set_device(get_device_id_by_index(id)); - const dpct::queue_ptr stream = g_syclStreams[id][is]; + ggml_sycl_set_device(i); + dpct::queue_ptr stream = g_syclStreams[i][is]; // wait for main GPU data if necessary - if (split && (id != g_main_device_index || is != 0)) { + if (split && (i != g_main_device || is != 0)) { + /* + DPCT1009:163: SYCL uses exceptions to report errors and does not + use the error codes. The original code was commented out and a + warning string was inserted. You need to rewrite this code. + */ SYCL_CHECK(CHECK_TRY_ERROR(stream->ext_oneapi_submit_barrier( - {*src0_extra->events[g_main_device_index][0]}))); + {*src0_extra->events[g_main_device][0]}))); } for (int64_t i0 = 0; i0 < ne13*ne12; ++i0) { @@ -12887,30 +13181,32 @@ static void ggml_sycl_op_mul_mat(const ggml_tensor *src0, const size_t src1_ddq_i_offset = (i0*ne11 + src1_col_0) * src1_padded_col_size*q8_1_ts/q8_1_bs; // for split tensors the data begins at i0 == i0_offset_low - char * src0_dd_i = src0_dd[id] + (i0/i02_divisor) * (ne01*ne00*src0_ts)/src0_bs; - float * src1_ddf_i = src1_ddf[id] + (i0*ne11 + src1_col_0) * ne10; - char * src1_ddq_i = src1_ddq[id] + src1_ddq_i_offset; - float * dst_dd_i = dst_dd[id] + (i0*ne1 + src1_col_0) * (dst_on_device ? ne0 : row_diff); + char * src0_dd_i = dev[i].src0_dd + (i0/i02_divisor) * (ne01*ne00*src0_ts)/src0_bs; + float * src1_ddf_i = dev[i].src1_ddf + (i0*ne11 + src1_col_0) * ne10; + char * src1_ddq_i = dev[i].src1_ddq + src1_ddq_i_offset; + float * dst_dd_i = dev[i].dst_dd + (i0*ne1 + src1_col_0) * (dst_on_device ? ne0 : row_diff); // the main device memory buffer can be on VRAM scratch, with space for all partial results // in that case an offset on dst_ddf_i is needed - if (dst->backend == GGML_BACKEND_TYPE_GPU && id == g_main_device_index) { - dst_dd_i += row_low[id]; // offset is 0 if no tensor split + if (dst->backend == GGML_BACKEND_TYPE_GPU && i == g_main_device) { + dst_dd_i += dev[i].row_low; // offset is 0 if no tensor split } // copy src0, src1 to device if necessary if (src1->backend == GGML_BACKEND_TYPE_GPU && src1_is_contiguous) { - if (id != g_main_device_index) { + if (i != g_main_device) { if (convert_src1_to_q8_1) { - char * src1_ddq_i_source = src1_ddq[g_main_device_index] + src1_ddq_i_offset; - SYCL_CHECK(CHECK_TRY_ERROR(stream->memcpy( + char * src1_ddq_i_source = dev[g_main_device].src1_ddq + src1_ddq_i_offset; + SYCL_CHECK(CHECK_TRY_ERROR(stream->memcpy( src1_ddq_i, src1_ddq_i_source, src1_ncols * src1_padded_col_size * q8_1_ts / q8_1_bs))); } else { - float * src1_ddf_i_source = (float *) src1_extra->data_device[g_main_device_index]; + + float * src1_ddf_i_source = (float *) src1_extra->data_device[g_main_device]; src1_ddf_i_source += (i0*ne11 + src1_col_0) * ne10; - SYCL_CHECK(CHECK_TRY_ERROR(stream->memcpy( + + SYCL_CHECK(CHECK_TRY_ERROR(dev2dev_memcpy(*stream, *main_stream, src1_ddf_i, src1_ddf_i_source, src1_ncols * ne10 * sizeof(float)))); } @@ -12933,14 +13229,14 @@ static void ggml_sycl_op_mul_mat(const ggml_tensor *src0, } if (src1_col_0 == 0 && (!src0_on_device || !src0_is_contiguous) && i02 % i02_divisor == 0) { - SYCL_CHECK(ggml_sycl_cpy_tensor_2d(src0_dd_i, src0, i03, i02/i02_divisor, row_low[id], row_high[id], stream)); + SYCL_CHECK(ggml_sycl_cpy_tensor_2d(src0_dd_i, src0, i03, i02/i02_divisor, dev[i].row_low, dev[i].row_high, stream)); } if (src1->type == GGML_TYPE_F16) { src1_padded_col_size = (i0 * ne11 + src1_col_0) * ne10; } // do the computation op(src0, src1, dst, src0_dd_i, src1_ddf_i, src1_ddq_i, dst_dd_i, - row_low[id], row_high[id], src1_ncols, src1_padded_col_size, stream); + dev[i].row_low, dev[i].row_high, src1_ncols, src1_padded_col_size, stream); /* DPCT1010:93: SYCL uses exceptions to report errors and does not use the error codes. The call was replaced with 0. You need to @@ -12956,7 +13252,7 @@ static void ggml_sycl_op_mul_mat(const ggml_tensor *src0, dst_off_device = dst->data; kind = dpct::device_to_host; } else if (dst->backend == GGML_BACKEND_TYPE_GPU) { - dst_off_device = dst_extra->data_device[g_main_device_index]; + dst_off_device = dst_extra->data_device[g_main_device]; kind = dpct::device_to_device; } else { GGML_ASSERT(false); @@ -12969,11 +13265,29 @@ static void ggml_sycl_op_mul_mat(const ggml_tensor *src0, // If dst is a vector with ne0 == 1 then you don't have to do this but it still produces correct results. float * dhf_dst_i = (float *) ((char *) dst_off_device + i02*nb2 + i03*nb3); GGML_ASSERT(dst->nb[1] == ne0*sizeof(float)); - dhf_dst_i += src1_col_0*ne0 + row_low[id]; - SYCL_CHECK(CHECK_TRY_ERROR(dpct::async_dpct_memcpy( - dhf_dst_i, ne0 * sizeof(float), dst_dd_i, - row_diff * sizeof(float), row_diff * sizeof(float), - src1_ncols, kind, *stream))); + dhf_dst_i += src1_col_0*ne0 + dev[i].row_low; + + //todo, dirty solution. Need be updated when device2device memcpy() is supported. + if (kind == dpct::device_to_device) { + size_t dst_size = ggml_nbytes_pad(dst); + float *host_buf = (float *)malloc(dst_size); + SYCL_CHECK(CHECK_TRY_ERROR(dpct::async_dpct_memcpy( + host_buf, ne0 * sizeof(float), dst_dd_i, + row_diff * sizeof(float), row_diff * sizeof(float), + src1_ncols, dpct::device_to_host, *stream))); + dpct::dev_mgr::instance().get_device(g_sycl_gpu_mgr->gpus[i]).queues_wait_and_throw(); + SYCL_CHECK(CHECK_TRY_ERROR(dpct::async_dpct_memcpy( + dhf_dst_i, ne0 * sizeof(float), host_buf, + row_diff * sizeof(float), row_diff * sizeof(float), + src1_ncols, dpct::host_to_device, *main_stream))); + dpct::dev_mgr::instance().get_device(g_sycl_gpu_mgr->gpus[g_main_device]).queues_wait_and_throw(); + free(host_buf); + } else { + SYCL_CHECK(CHECK_TRY_ERROR(dpct::async_dpct_memcpy( + dhf_dst_i, ne0 * sizeof(float), dst_dd_i, + row_diff * sizeof(float), row_diff * sizeof(float), + src1_ncols, kind, *stream))); + } } else { float * dhf_dst_i = (float *) ((char *) dst_off_device + i02*nb2 + i03*nb3); GGML_ASSERT(dst->nb[1] == ne0*sizeof(float)); @@ -12985,7 +13299,7 @@ static void ggml_sycl_op_mul_mat(const ggml_tensor *src0, } // add event for the main device to wait on until other device is done - if (split && (id != g_main_device_index || is != 0)) { + if (split && (i != g_main_device || is != 0)) { /* DPCT1024:94: The original code returned the error code that was further consumed by the program logic. This original @@ -12993,48 +13307,27 @@ static void ggml_sycl_op_mul_mat(const ggml_tensor *src0, program logic consuming the error code. */ SYCL_CHECK(CHECK_TRY_ERROR( - *src0_extra->events[id][is] = + *src0_extra->events[i][is] = stream->ext_oneapi_submit_barrier())); } } } } - for (int64_t id = 0; id < g_device_count; ++id) { - if ((!split && id != g_main_device_index) || row_low[id] == row_high[id]) { - continue; - } - SYCL_CHECK(ggml_sycl_set_device(get_device_id_by_index(id))); - - // free buffers again when done - if (dst_as[id] > 0) { - ggml_sycl_pool_free(dst_dd[id], dst_as[id]); - } - if (src1_asq[id] > 0) { - ggml_sycl_pool_free(src1_ddq[id], src1_asq[id]); - } - if (src1_asf[id] > 0) { - ggml_sycl_pool_free(src1_ddf[id], src1_asf[id]); - } - if (src0_as[id] > 0) { - ggml_sycl_pool_free(src0_dd[id], src0_as[id]); - } - } - // main device waits for all other devices to be finished if (split && g_device_count > 1) { int64_t is_max = (ne11 + MUL_MAT_SRC1_COL_STRIDE - 1) / MUL_MAT_SRC1_COL_STRIDE; is_max = is_max <= MAX_STREAMS ? is_max : MAX_STREAMS; - SYCL_CHECK(ggml_sycl_set_device(g_main_device)); - for (int64_t id = 0; id < g_device_count; ++id) { - if (row_low[id] == row_high[id]) { + ggml_sycl_set_device(g_main_device); + for (int i = 0; i < g_device_count; ++i) { + if (dev[i].row_low == dev[i].row_high) { continue; } for (int64_t is = 0; is < is_max; ++is) { SYCL_CHECK(CHECK_TRY_ERROR( - g_syclStreams[g_main_device_index][0]->ext_oneapi_submit_barrier( - {*src0_extra->events[id][is]}))); + g_syclStreams[g_main_device][0]->ext_oneapi_submit_barrier( + {*src0_extra->events[i][is]}))); } } } @@ -13051,110 +13344,132 @@ catch (sycl::exception const &exc) { std::exit(1); } + static void ggml_sycl_repeat(const ggml_tensor * src0, const ggml_tensor * src1, ggml_tensor * dst) { GGML_SYCL_DEBUG("call %s\n", __func__); ggml_sycl_op_flatten(src0, src1, dst, ggml_sycl_op_repeat); + GGML_SYCL_DEBUG("call %s done\n", __func__); } static void ggml_sycl_get_rows(const ggml_tensor * src0, const ggml_tensor * src1, ggml_tensor * dst) { GGML_SYCL_DEBUG("call %s\n", __func__); ggml_sycl_op_flatten(src0, src1, dst, ggml_sycl_op_get_rows); + GGML_SYCL_DEBUG("call %s done\n", __func__); } static void ggml_sycl_add(const ggml_tensor * src0, const ggml_tensor * src1, ggml_tensor * dst) { GGML_SYCL_DEBUG("call %s\n", __func__); ggml_sycl_op_flatten(src0, src1, dst, ggml_sycl_op_add); - // log_tensor_with_cnt("log_ggml_sycl_add_src0", (struct ggml_tensor *) src0, 6); - // log_tensor_with_cnt("log_ggml_sycl_add_src1", (struct ggml_tensor *)src1, 6); - // log_tensor_with_cnt("log_ggml_sycl_add_dst", dst, 6); + GGML_SYCL_DEBUG("call %s done\n", __func__); } static void ggml_sycl_acc(const ggml_tensor * src0, const ggml_tensor * src1, ggml_tensor * dst) { GGML_SYCL_DEBUG("call %s\n", __func__); ggml_sycl_op_flatten(src0, src1, dst, ggml_sycl_op_acc); + GGML_SYCL_DEBUG("call %s done\n", __func__); } static void ggml_sycl_mul(const ggml_tensor * src0, const ggml_tensor * src1, ggml_tensor * dst) { GGML_SYCL_DEBUG("call %s\n", __func__); ggml_sycl_op_flatten(src0, src1, dst, ggml_sycl_op_mul); - // log_tensor_with_cnt("log_ggml_sycl_mul_src0", (struct ggml_tensor *)src0, 6); - // log_tensor_with_cnt("log_ggml_sycl_mul_src1", (struct ggml_tensor *)src1, 6); - // log_tensor_with_cnt("log_ggml_sycl_mul_dst", dst, 6); - + GGML_SYCL_DEBUG("call %s done\n", __func__); } static void ggml_sycl_div(const ggml_tensor * src0, const ggml_tensor * src1, ggml_tensor * dst) { GGML_SYCL_DEBUG("call %s\n", __func__); ggml_sycl_op_flatten(src0, src1, dst, ggml_sycl_op_div); + GGML_SYCL_DEBUG("call %s done\n", __func__); } static void ggml_sycl_gelu(const ggml_tensor * src0, const ggml_tensor * src1, ggml_tensor * dst) { GGML_SYCL_DEBUG("call %s\n", __func__); ggml_sycl_op_flatten(src0, src1, dst, ggml_sycl_op_gelu); + GGML_SYCL_DEBUG("call %s done\n", __func__); } static void ggml_sycl_silu(const ggml_tensor * src0, const ggml_tensor * src1, ggml_tensor * dst) { GGML_SYCL_DEBUG("call %s\n", __func__); ggml_sycl_op_flatten(src0, src1, dst, ggml_sycl_op_silu); + GGML_SYCL_DEBUG("call %s done\n", __func__); } static void ggml_sycl_gelu_quick(const ggml_tensor * src0, const ggml_tensor * src1, ggml_tensor * dst) { GGML_SYCL_DEBUG("call %s\n", __func__); ggml_sycl_op_flatten(src0, src1, dst, ggml_sycl_op_gelu_quick); + GGML_SYCL_DEBUG("call %s done\n", __func__); } static void ggml_sycl_tanh(const ggml_tensor * src0, const ggml_tensor * src1, ggml_tensor * dst) { GGML_SYCL_DEBUG("call %s\n", __func__); ggml_sycl_op_flatten(src0, src1, dst, ggml_sycl_op_tanh); + GGML_SYCL_DEBUG("call %s done\n", __func__); } static void ggml_sycl_relu(const ggml_tensor * src0, const ggml_tensor * src1, ggml_tensor * dst) { GGML_SYCL_DEBUG("call %s\n", __func__); ggml_sycl_op_flatten(src0, src1, dst, ggml_sycl_op_relu); + GGML_SYCL_DEBUG("call %s done\n", __func__); +} + +static void ggml_sycl_hardsigmoid(const ggml_tensor * src0, const ggml_tensor * src1, ggml_tensor * dst) { + GGML_SYCL_DEBUG("call %s\n", __func__); + ggml_sycl_op_flatten(src0, src1, dst, ggml_sycl_op_hardsigmoid); + GGML_SYCL_DEBUG("call %s done\n", __func__); +} + +static void ggml_sycl_hardswish(const ggml_tensor * src0, const ggml_tensor * src1, ggml_tensor * dst) { + GGML_SYCL_DEBUG("call %s\n", __func__); + ggml_sycl_op_flatten(src0, src1, dst, ggml_sycl_op_hardswish); + GGML_SYCL_DEBUG("call %s done\n", __func__); } static void ggml_sycl_leaky_relu(const ggml_tensor * src0, const ggml_tensor * src1, ggml_tensor * dst) { GGML_SYCL_DEBUG("call %s\n", __func__); ggml_sycl_op_flatten(src0, src1, dst, ggml_sycl_op_leaky_relu); + GGML_SYCL_DEBUG("call %s done\n", __func__); } static void ggml_sycl_sqr(const ggml_tensor * src0, const ggml_tensor * src1, ggml_tensor * dst) { GGML_SYCL_DEBUG("call %s\n", __func__); ggml_sycl_op_flatten(src0, src1, dst, ggml_sycl_op_sqr); + GGML_SYCL_DEBUG("call %s done\n", __func__); } static void ggml_sycl_norm(const ggml_tensor * src0, const ggml_tensor * src1, ggml_tensor * dst) { GGML_SYCL_DEBUG("call %s\n", __func__); ggml_sycl_op_flatten(src0, src1, dst, ggml_sycl_op_norm); + GGML_SYCL_DEBUG("call %s done\n", __func__); } static void ggml_sycl_group_norm(const ggml_tensor * src0, const ggml_tensor * src1, ggml_tensor * dst) { GGML_SYCL_DEBUG("call %s\n", __func__); ggml_sycl_op_flatten(src0, src1, dst, ggml_sycl_op_group_norm); + GGML_SYCL_DEBUG("call %s done\n", __func__); } static void ggml_sycl_concat(const ggml_tensor * src0, const ggml_tensor * src1, ggml_tensor * dst) { GGML_SYCL_DEBUG("call %s\n", __func__); ggml_sycl_op_flatten(src0, src1, dst, ggml_sycl_op_concat); + GGML_SYCL_DEBUG("call %s done\n", __func__); } static void ggml_sycl_upscale(const ggml_tensor * src0, const ggml_tensor * src1, ggml_tensor * dst) { GGML_SYCL_DEBUG("call %s\n", __func__); ggml_sycl_op_flatten(src0, src1, dst, ggml_sycl_op_upscale); + GGML_SYCL_DEBUG("call %s done\n", __func__); } static void ggml_sycl_pad(const ggml_tensor * src0, const ggml_tensor * src1, ggml_tensor * dst) { GGML_SYCL_DEBUG("call %s\n", __func__); ggml_sycl_op_flatten(src0, src1, dst, ggml_sycl_op_pad); + GGML_SYCL_DEBUG("call %s done\n", __func__); } static void ggml_sycl_rms_norm(const ggml_tensor * src0, const ggml_tensor * src1, ggml_tensor * dst) { GGML_SYCL_DEBUG("call %s\n", __func__); ggml_sycl_op_flatten(src0, src1, dst, ggml_sycl_op_rms_norm); - // log_tensor_with_cnt("log_ggml_sycl_rms_norm_src0", (struct ggml_tensor *)src0, 6); - // log_tensor_with_cnt("log_ggml_sycl_rms_norm_src1", (struct ggml_tensor *)src1, 6); - // log_tensor_with_cnt("log_ggml_sycl_rms_norm_dst", dst, 6); + GGML_SYCL_DEBUG("call %s done\n", __func__); } bool ggml_sycl_can_mul_mat(const struct ggml_tensor * src0, const struct ggml_tensor * src1, struct ggml_tensor * dst) { @@ -13189,16 +13504,16 @@ static void ggml_sycl_mul_mat_vec_p021(const ggml_tensor *src0, const int64_t ne12 = src1->ne[2]; SYCL_CHECK(ggml_sycl_set_device(g_main_device)); - dpct::queue_ptr main_stream = g_syclStreams[g_main_device_index][0]; + dpct::queue_ptr main_stream = g_syclStreams[g_main_device][0]; ggml_tensor_extra_gpu * src0_extra = (ggml_tensor_extra_gpu *) src0->extra; - void * src0_ddq = src0_extra->data_device[g_main_device_index]; + void * src0_ddq = src0_extra->data_device[g_main_device]; ggml_tensor_extra_gpu * src1_extra = (ggml_tensor_extra_gpu *) src1->extra; - float * src1_ddf = (float *) src1_extra->data_device[g_main_device_index]; + float * src1_ddf = (float *) src1_extra->data_device[g_main_device]; ggml_tensor_extra_gpu * dst_extra = (ggml_tensor_extra_gpu *) dst->extra; - float * dst_ddf = (float *) dst_extra->data_device[g_main_device_index]; + float * dst_ddf = (float *) dst_extra->data_device[g_main_device]; ggml_mul_mat_p021_f16_f32_sycl(src0_ddq, src1_ddf, dst_ddf, ne00, ne01, ne02, ne12, main_stream); } @@ -13228,16 +13543,16 @@ static void ggml_sycl_mul_mat_vec_nc(const ggml_tensor *src0, const int64_t ne12 = src1->ne[2]; SYCL_CHECK(ggml_sycl_set_device(g_main_device)); - dpct::queue_ptr main_stream = g_syclStreams[g_main_device_index][0]; + dpct::queue_ptr main_stream = g_syclStreams[g_main_device][0]; ggml_tensor_extra_gpu * src0_extra = (ggml_tensor_extra_gpu *) src0->extra; - void * src0_ddq = src0_extra->data_device[g_main_device_index]; + void * src0_ddq = src0_extra->data_device[g_main_device]; ggml_tensor_extra_gpu * src1_extra = (ggml_tensor_extra_gpu *) src1->extra; - float * src1_ddf = (float *) src1_extra->data_device[g_main_device_index]; + float * src1_ddf = (float *) src1_extra->data_device[g_main_device]; ggml_tensor_extra_gpu * dst_extra = (ggml_tensor_extra_gpu *) dst->extra; - float * dst_ddf = (float *) dst_extra->data_device[g_main_device_index]; + float * dst_ddf = (float *) dst_extra->data_device[g_main_device]; const int64_t row_stride_x = nb01 / sizeof(sycl::half); const int64_t channel_stride_x = nb02 / sizeof(sycl::half); @@ -13280,38 +13595,37 @@ static void ggml_sycl_mul_mat_batched_sycl(const ggml_tensor *src0, ggml_tensor *dst) try { GGML_ASSERT(!ggml_is_transposed(src0)); GGML_ASSERT(!ggml_is_transposed(src1)); - GGML_ASSERT(src0->backend != GGML_BACKEND_TYPE_GPU_SPLIT); GGML_ASSERT(src0->type == GGML_TYPE_F16); GGML_TENSOR_BINARY_OP_LOCALS - const int64_t ne_dst = ggml_nelements(dst); + const int64_t ne_dst = ggml_nelements(dst); SYCL_CHECK(ggml_sycl_set_device(g_main_device)); - dpct::queue_ptr main_stream = g_syclStreams[g_main_device_index][0]; + dpct::queue_ptr main_stream = g_syclStreams[g_main_device][0]; SYCL_CHECK( - CHECK_TRY_ERROR(g_sycl_handles[g_main_device_index] = main_stream)); + CHECK_TRY_ERROR(g_sycl_handles[g_main_device] = main_stream)); ggml_tensor_extra_gpu * src0_extra = (ggml_tensor_extra_gpu *) src0->extra; - void * src0_ddq = src0_extra->data_device[g_main_device_index]; + void * src0_ddq = src0_extra->data_device[g_main_device]; sycl::half *src0_as_f16 = (sycl::half *)src0_ddq; ggml_tensor_extra_gpu * src1_extra = (ggml_tensor_extra_gpu *) src1->extra; - float * src1_ddf = (float *) src1_extra->data_device[g_main_device_index]; + float * src1_ddf = (float *) src1_extra->data_device[g_main_device]; ggml_tensor_extra_gpu * dst_extra = (ggml_tensor_extra_gpu *) dst->extra; - float * dst_ddf = (float *) dst_extra->data_device[g_main_device_index]; + float * dst_ddf = (float *) dst_extra->data_device[g_main_device]; // convert src1 to fp16 sycl_pool_alloc src1_f16_alloc; if (src1->type != GGML_TYPE_F16) { - const to_fp16_sycl_t to_fp16_sycl = ggml_get_to_fp16_sycl(src1->type); - const int64_t ne_src1 = ggml_nelements(src1); - src1_f16_alloc.alloc(ne_src1); - GGML_ASSERT(to_fp16_sycl != nullptr); - to_fp16_sycl(src1_ddf, src1_f16_alloc.get(), ne_src1, main_stream); + const to_fp16_sycl_t to_fp16_sycl = ggml_get_to_fp16_sycl(src1->type); + const int64_t ne_src1 = ggml_nelements(src1); + src1_f16_alloc.alloc(ne_src1); + GGML_ASSERT(to_fp16_sycl != nullptr); + to_fp16_sycl(src1_ddf, src1_f16_alloc.get(), ne_src1, main_stream); } sycl::half *src1_f16 = src1->type == GGML_TYPE_F16 ? (sycl::half *)src1_ddf : src1_f16_alloc.get(); @@ -13358,7 +13672,7 @@ static void ggml_sycl_mul_mat_batched_sycl(const ggml_tensor *src0, int i02 = i12 / r2; SYCL_CHECK( - syclGemmEx(g_sycl_handles[g_main_device_index], CUBLAS_OP_T, CUBLAS_OP_N, + syclGemmEx(g_sycl_handles[g_main_device], CUBLAS_OP_T, CUBLAS_OP_N, ne01, ne11, ne10, alpha, (const char *) src0_as_f16 + i02*src0->nb[2] + i03*src0->nb[3] , SYCL_R_16F, nb01/sizeof(half), (const char *) src1_as_f16 + i12*src1->nb[2]/2 + i13*src1->nb[3]/2, SYCL_R_16F, nb11/sizeof(float), @@ -13371,9 +13685,8 @@ static void ggml_sycl_mul_mat_batched_sycl(const ggml_tensor *src0, #else if (r2 == 1 && r3 == 1 && src0->nb[2]*src0->ne[2] == src0->nb[3] && src1->nb[2]*src1->ne[2] == src1->nb[3]) { // there is no broadcast and src0, src1 are contiguous across dims 2, 3 - // use syclGemmStridedBatchedEx SYCL_CHECK(CHECK_TRY_ERROR(dpct::gemm_batch( - *g_sycl_handles[g_main_device_index], oneapi::mkl::transpose::trans, + *g_sycl_handles[g_main_device], oneapi::mkl::transpose::trans, oneapi::mkl::transpose::nontrans, ne01, ne11, ne10, alpha, (const char *)src0_as_f16, dpct::library_data_t::real_half, nb01 / nb00, nb02 / nb00, @@ -13382,7 +13695,6 @@ static void ggml_sycl_mul_mat_batched_sycl(const ggml_tensor *src0, (char *)dst_t, cu_data_type, ne01, nb2 / nb0, ne12 * ne13, cu_compute_type))); } else { - // use syclGemmBatchedEx const int ne23 = ne12*ne13; sycl_pool_alloc ptrs_src(2*ne23); @@ -13415,7 +13727,7 @@ static void ggml_sycl_mul_mat_batched_sycl(const ggml_tensor *src0, }); } SYCL_CHECK(CHECK_TRY_ERROR(dpct::gemm_batch( - *g_sycl_handles[g_main_device_index], oneapi::mkl::transpose::trans, + *g_sycl_handles[g_main_device], oneapi::mkl::transpose::trans, oneapi::mkl::transpose::nontrans, ne01, ne11, ne10, alpha, (const void **)(ptrs_src.get() + 0 * ne23), dpct::library_data_t::real_half, nb01 / nb00, @@ -13435,6 +13747,7 @@ catch (sycl::exception const &exc) { std::exit(1); } + static void ggml_sycl_mul_mat(const ggml_tensor * src0, const ggml_tensor * src1, ggml_tensor * dst) { const bool all_on_device = (src0->backend == GGML_BACKEND_TYPE_GPU || src0->backend == GGML_BACKEND_TYPE_GPU_SPLIT) && @@ -13444,9 +13757,9 @@ static void ggml_sycl_mul_mat(const ggml_tensor * src0, const ggml_tensor * src1 const bool split = src0->backend == GGML_BACKEND_TYPE_GPU_SPLIT; int64_t min_compute_capability = INT_MAX; - for (int64_t id = 0; id < g_device_count; ++id) { - if (min_compute_capability > g_device_caps[id].cc && g_tensor_split[id] < (id + 1 < g_device_count ? g_tensor_split[id + 1] : 1.0f)) { - min_compute_capability = g_device_caps[id].cc; + for (int i = 0; i < g_device_count; ++i) { + if (min_compute_capability > g_device_caps[i].cc && g_tensor_split[i] < (i + 1 < g_device_count ? g_tensor_split[i + 1] : 1.0f)) { + min_compute_capability = g_device_caps[i].cc; } } @@ -13587,30 +13900,30 @@ static void ggml_sycl_mul_mat_id_sycl(ggml_tensor * dst) { const int64_t ne = ggml_nelements(dst); SYCL_CHECK(ggml_sycl_set_device(g_main_device)); - syclStream_t main_stream = g_syclStreams[g_main_device_index][0]; + syclStream_t main_stream = g_syclStreams[g_main_device][0]; - SYCL_CHECK(syclSetStream(g_sycl_handles[g_main_device_index], main_stream)); + SYCL_CHECK(syclSetStream(g_sycl_handles[g_main_device], main_stream)); //ggml_tensor_extra_gpu * src0_extra = (ggml_tensor_extra_gpu *) src0->extra; - //void * src0_ddq = src0_extra->data_device[g_main_device_index]; + //void * src0_ddq = src0_extra->data_device[g_main_device]; //half * src0_as_f16 = (half *) src0_ddq; ggml_tensor_extra_gpu * src1_extra = (ggml_tensor_extra_gpu *) src1->extra; - float * src1_ddf = (float *) src1_extra->data_device[g_main_device_index]; + float * src1_ddf = (float *) src1_extra->data_device[g_main_device]; ggml_tensor_extra_gpu * dst_extra = (ggml_tensor_extra_gpu *) dst->extra; - float * dst_ddf = (float *) dst_extra->data_device[g_main_device_index]; + float * dst_ddf = (float *) dst_extra->data_device[g_main_device]; // convert src1 to fp16 const to_fp16_sycl_t to_fp16_sycl = ggml_get_to_fp16_sycl(src1->type); GGML_ASSERT(to_fp16_sycl != nullptr); size_t src1_as = 0; - half * src1_as_f16 = (half *) ggml_sycl_pool_malloc(ne1 * sizeof(half), &src1_as); + half * src1_as_f16 = (half *) ggml_sycl_pool_malloc(g_main_device, ne1 * sizeof(half), &src1_as); to_fp16_sycl(src1_ddf, src1_as_f16, ne1, main_stream); size_t dst_as = 0; - half * dst_f16 = (half *) ggml_sycl_pool_malloc(ne * sizeof(half), &dst_as); + half * dst_f16 = (half *) ggml_sycl_pool_malloc(g_main_device, ne * sizeof(half), &dst_as); GGML_ASSERT(ne12 % ne02 == 0); GGML_ASSERT(ne13 % ne03 == 0); @@ -13631,14 +13944,14 @@ static void ggml_sycl_mul_mat_id_sycl(ggml_tensor * dst) { size_t ptrs_src_s = 0; size_t ptrs_dst_s = 0; - ptrs_src = (const void **) ggml_sycl_pool_malloc(2*ne23*sizeof(void *), &ptrs_src_s); - ptrs_dst = ( void **) ggml_sycl_pool_malloc(1*ne23*sizeof(void *), &ptrs_dst_s); + ptrs_src = (const void **) ggml_sycl_pool_malloc(g_main_device, 2*ne23*sizeof(void *), &ptrs_src_s); + ptrs_dst = ( void **) ggml_sycl_pool_malloc(g_main_device, 1*ne23*sizeof(void *), &ptrs_dst_s); int64_t src0_ne = ggml_nelements(src00); half * src0_as_f16 = nullptr; size_t src0_as = 0; if (src00->type != GGML_TYPE_F16) { - src0_as_f16 = (half *) ggml_sycl_pool_malloc(src0_ne * sizeof(half), &src0_as); + src0_as_f16 = (half *) ggml_sycl_pool_malloc(g_main_device, src0_ne * sizeof(half), &src0_as); } static_assert(GGML_MAX_SRC == 6, "GGML_MAX_SRC == 6"); @@ -13653,16 +13966,16 @@ static void ggml_sycl_mul_mat_id_sycl(ggml_tensor * dst) { r2, r3, src00->type, src0_as_f16, src0_ne, src1_as_f16, dst_f16, - (const int *)((ggml_tensor_extra_gpu *)ids->extra)->data_device[g_main_device_index], id, - dst->src[2] ? (const half *)((ggml_tensor_extra_gpu *)dst->src[2]->extra)->data_device[g_main_device_index] : nullptr, - dst->src[3] ? (const half *)((ggml_tensor_extra_gpu *)dst->src[3]->extra)->data_device[g_main_device_index] : nullptr, - dst->src[4] ? (const half *)((ggml_tensor_extra_gpu *)dst->src[4]->extra)->data_device[g_main_device_index] : nullptr, - dst->src[5] ? (const half *)((ggml_tensor_extra_gpu *)dst->src[5]->extra)->data_device[g_main_device_index] : nullptr + (const int *)((ggml_tensor_extra_gpu *)ids->extra)->data_device[g_main_device], id, + dst->src[2] ? (const half *)((ggml_tensor_extra_gpu *)dst->src[2]->extra)->data_device[g_main_device] : nullptr, + dst->src[3] ? (const half *)((ggml_tensor_extra_gpu *)dst->src[3]->extra)->data_device[g_main_device] : nullptr, + dst->src[4] ? (const half *)((ggml_tensor_extra_gpu *)dst->src[4]->extra)->data_device[g_main_device] : nullptr, + dst->src[5] ? (const half *)((ggml_tensor_extra_gpu *)dst->src[5]->extra)->data_device[g_main_device] : nullptr ); SYCL_CHECK(syclGetLastError()); SYCL_CHECK( - syclGemmBatchedEx(g_sycl_handles[g_main_device_index], CUBLAS_OP_T, CUBLAS_OP_N, + syclGemmBatchedEx(g_sycl_handles[g_main_device], CUBLAS_OP_T, CUBLAS_OP_N, ne01, ne11, ne10, &alpha_f16, (const void **) (ptrs_src + 0*ne23), SYCL_R_16F, ne00, (const void **) (ptrs_src + 1*ne23), SYCL_R_16F, ne10, @@ -13672,20 +13985,20 @@ static void ggml_sycl_mul_mat_id_sycl(ggml_tensor * dst) { CUBLAS_GEMM_DEFAULT_TENSOR_OP)); if (src0_as != 0) { - ggml_sycl_pool_free(src0_as_f16, src0_as); + ggml_sycl_pool_free(g_main_device, src0_as_f16, src0_as); } if (ptrs_src_s != 0) { - ggml_sycl_pool_free(ptrs_src, ptrs_src_s); + ggml_sycl_pool_free(g_main_device, ptrs_src, ptrs_src_s); } if (ptrs_dst_s != 0) { - ggml_sycl_pool_free(ptrs_dst, ptrs_dst_s); + ggml_sycl_pool_free(g_main_device, ptrs_dst, ptrs_dst_s); } const to_fp32_sycl_t to_fp32_sycl = ggml_get_to_fp32_sycl(GGML_TYPE_F16); to_fp32_sycl(dst_f16, dst_ddf, ne, main_stream); - ggml_sycl_pool_free(src1_as_f16, src1_as); - ggml_sycl_pool_free(dst_f16, dst_as); + ggml_sycl_pool_free(g_main_device, src1_as_f16, src1_as); + ggml_sycl_pool_free(g_main_device, dst_f16, dst_as); } #endif @@ -13706,10 +14019,10 @@ static void ggml_sycl_mul_mat_id(const ggml_tensor *src0, std::vector ids_host(ggml_nbytes(ids)); - const dpct::queue_ptr stream = g_syclStreams[g_main_device_index][0]; + const dpct::queue_ptr stream = g_syclStreams[g_main_device][0]; if (ids->backend == GGML_BACKEND_TYPE_GPU) { - const char * ids_dev = (const char *)((const ggml_tensor_extra_gpu *)ids->extra)->data_device[g_main_device_index]; + const char * ids_dev = (const char *)((const ggml_tensor_extra_gpu *)ids->extra)->data_device[g_main_device]; SYCL_CHECK(CHECK_TRY_ERROR( stream->memcpy(ids_host.data(), ids_dev, ggml_nbytes(ids)))); SYCL_CHECK(CHECK_TRY_ERROR(stream->wait())); @@ -13733,9 +14046,9 @@ static void ggml_sycl_mul_mat_id(const ggml_tensor *src0, dst_row.extra = &dst_row_extra; char * src1_original = src1->backend == GGML_BACKEND_TYPE_CPU ? - (char *) src1->data : (char *) src1_extra->data_device[g_main_device_index]; + (char *) src1->data : (char *) src1_extra->data_device[g_main_device]; char * dst_original = dst->backend == GGML_BACKEND_TYPE_CPU ? - (char *) dst->data : (char *) dst_extra->data_device[g_main_device_index]; + (char *) dst->data : (char *) dst_extra->data_device[g_main_device]; if (src1->ne[1] == 1) { GGML_ASSERT(src1->backend == GGML_BACKEND_TYPE_GPU); @@ -13752,10 +14065,10 @@ static void ggml_sycl_mul_mat_id(const ggml_tensor *src0, const struct ggml_tensor * src0_row = dst->src[row_id + 2]; - src1_row_extra.data_device[g_main_device_index] = src1_original + i01*src1->nb[1]; + src1_row_extra.data_device[g_main_device] = src1_original + i01*src1->nb[1]; src1_row.data = (char *) src1->data + i01*src1->nb[1]; // TODO why is this set? - dst_row_extra.data_device[g_main_device_index] = dst_original + i01*dst->nb[1]; + dst_row_extra.data_device[g_main_device] = dst_original + i01*dst->nb[1]; dst_row.data = (char *) dst->data + i01*dst->nb[1]; // TODO why is this set? ggml_sycl_mul_mat(src0_row, &src1_row, &dst_row); @@ -13764,8 +14077,8 @@ static void ggml_sycl_mul_mat_id(const ggml_tensor *src0, sycl_pool_alloc src1_contiguous(sizeof(float)*ggml_nelements(src1)); sycl_pool_alloc dst_contiguous(sizeof(float)*ggml_nelements(dst)); - src1_row_extra.data_device[g_main_device_index] = src1_contiguous.get(); - dst_row_extra.data_device[g_main_device_index] = dst_contiguous.get(); + src1_row_extra.data_device[g_main_device] = src1_contiguous.get(); + dst_row_extra.data_device[g_main_device] = dst_contiguous.get(); for (int32_t row_id = 0; row_id < n_as; ++row_id) { const struct ggml_tensor * src0_row = dst->src[row_id + 2]; @@ -13853,13 +14166,13 @@ static void ggml_sycl_cpy(const ggml_tensor *src0, const ggml_tensor *src1, GGML_TENSOR_BINARY_OP_LOCALS; SYCL_CHECK(ggml_sycl_set_device(g_main_device)); - dpct::queue_ptr main_stream = g_syclStreams[g_main_device_index][0]; + dpct::queue_ptr main_stream = g_syclStreams[g_main_device][0]; const ggml_tensor_extra_gpu * src0_extra = (ggml_tensor_extra_gpu *) src0->extra; const ggml_tensor_extra_gpu * src1_extra = (ggml_tensor_extra_gpu *) src1->extra; - char * src0_ddc = (char *) src0_extra->data_device[g_main_device_index]; - char * src1_ddc = (char *) src1_extra->data_device[g_main_device_index]; + char * src0_ddc = (char *) src0_extra->data_device[g_main_device]; + char * src1_ddc = (char *) src1_extra->data_device[g_main_device]; if (src0->type == GGML_TYPE_F32 && src1->type == GGML_TYPE_F32) { ggml_cpy_f32_f32_sycl (src0_ddc, src1_ddc, ne, ne00, ne01, ne02, nb00, nb01, nb02, nb03, ne10, ne11, ne12, nb10, nb11, nb12, nb13, main_stream); @@ -13871,6 +14184,8 @@ static void ggml_sycl_cpy(const ggml_tensor *src0, const ggml_tensor *src1, ggml_cpy_f32_q4_0_sycl(src0_ddc, src1_ddc, ne, ne00, ne01, ne02, nb00, nb01, nb02, nb03, ne10, ne11, ne12, nb10, nb11, nb12, nb13, main_stream); } else if (src0->type == GGML_TYPE_F32 && src1->type == GGML_TYPE_Q4_1) { ggml_cpy_f32_q4_1_sycl(src0_ddc, src1_ddc, ne, ne00, ne01, ne02, nb00, nb01, nb02, nb03, ne10, ne11, ne12, nb10, nb11, nb12, nb13, main_stream); + } else if (src0->type == GGML_TYPE_F16 && src1->type == GGML_TYPE_F32) { + ggml_cpy_f16_f32_sycl (src0_ddc, src1_ddc, ne, ne00, ne01, ne02, nb00, nb01, nb02, nb03, ne10, ne11, ne12, nb10, nb11, nb12, nb13, main_stream); } else if (src0->type == GGML_TYPE_F16 && src1->type == GGML_TYPE_F16) { ggml_cpy_f16_f16_sycl (src0_ddc, src1_ddc, ne, ne00, ne01, ne02, nb00, nb01, nb02, nb03, ne10, ne11, ne12, nb10, nb11, nb12, nb13, main_stream); } else if (src0->type == GGML_TYPE_I16 && src1->type == GGML_TYPE_I16) { @@ -13914,6 +14229,10 @@ static void ggml_sycl_alibi(const ggml_tensor * src0, const ggml_tensor * src1, ggml_sycl_op_flatten(src0, src1, dst, ggml_sycl_op_alibi); } +static void ggml_sycl_pool2d(const ggml_tensor * src0, const ggml_tensor * src1, ggml_tensor * dst) { + ggml_sycl_op_flatten(src0, src1, dst, ggml_sycl_op_pool2d); +} + static void ggml_sycl_im2col(const ggml_tensor * src0, const ggml_tensor * src1, ggml_tensor * dst) { ggml_sycl_op_flatten(src0, src1, dst, ggml_sycl_op_im2col); } @@ -13940,93 +14259,6 @@ static size_t ggml_nbytes_split(const struct ggml_tensor * tensor, int nrows_spl return nrows_split*ggml_row_size(tensor->type, tensor->ne[0]); } -void ggml_sycl_transform_tensor(void *data, struct ggml_tensor *tensor) try { - const int64_t nrows = ggml_nrows(tensor); - - const int64_t ne0 = tensor->ne[0]; - - const size_t nb1 = tensor->nb[1]; - - ggml_backend_type backend = tensor->backend; - ggml_tensor_extra_gpu * extra = new struct ggml_tensor_extra_gpu; - memset(extra, 0, sizeof(*extra)); - - for (int64_t id = 0; id < g_device_count; ++id) { - if (backend == GGML_BACKEND_TYPE_GPU && id != g_main_device_index) { - continue; - } - ggml_sycl_set_device(get_device_id_by_index(id)); - const dpct::queue_ptr stream = g_syclStreams[id][0]; - - int64_t row_low, row_high; - if (backend == GGML_BACKEND_TYPE_GPU) { - row_low = 0; - row_high = nrows; - } else if (backend == GGML_BACKEND_TYPE_GPU_SPLIT) { - const int64_t rounding = get_row_rounding(tensor->type); - - row_low = id == 0 ? 0 : nrows*g_tensor_split[id]; - row_low -= row_low % rounding; - - if (id == g_device_count - 1) { - row_high = nrows; - } else { - row_high = nrows*g_tensor_split[id + 1]; - row_high -= row_high % rounding; - } - } else { - GGML_ASSERT(false); - } - if (row_low == row_high) { - continue; - } - - int64_t nrows_split = row_high - row_low; - - const size_t offset_split = row_low*nb1; - size_t size = ggml_nbytes_split(tensor, nrows_split); - const size_t original_size = size; - - // pad last row to a multiple of 512 elements to avoid out-of-bounds memory accesses - if (ne0 % MATRIX_ROW_PADDING != 0) { - size += ggml_row_size(tensor->type, MATRIX_ROW_PADDING - ne0 % MATRIX_ROW_PADDING); - } - - char * buf; - SYCL_CHECK(CHECK_TRY_ERROR(buf = (char *)sycl::malloc_device( - size, *stream))); - char * buf_host = (char *)data + offset_split; - - // set padding to 0 to avoid possible NaN values - if (size > original_size) { - SYCL_CHECK(CHECK_TRY_ERROR( - (*stream) - .memset(buf + original_size, 0, size - original_size) - .wait())); - } - - SYCL_CHECK(CHECK_TRY_ERROR((*stream) - .memcpy(buf, buf_host, original_size) - .wait())); - - extra->data_device[id] = buf; - - if (backend == GGML_BACKEND_TYPE_GPU_SPLIT) { - for (int64_t is = 0; is < MAX_STREAMS; ++is) { - SYCL_CHECK(CHECK_TRY_ERROR(extra->events[id][is] = - new sycl::event())); - } - } - } - - tensor->extra = extra; -} -catch (sycl::exception const &exc) { - std::cerr << exc.what() << "Exception caught at file:" << __FILE__ - << ", line:" << __LINE__ << std::endl; - std::exit(1); -} - void ggml_sycl_free_data(struct ggml_tensor *tensor) try { if (!tensor || !tensor->extra || (tensor->backend != GGML_BACKEND_TYPE_GPU && tensor->backend != GGML_BACKEND_TYPE_GPU_SPLIT) ) { return; @@ -14034,18 +14266,18 @@ void ggml_sycl_free_data(struct ggml_tensor *tensor) try { ggml_tensor_extra_gpu * extra = (ggml_tensor_extra_gpu *) tensor->extra; - for (int64_t id = 0; id < g_device_count; ++id) { - const dpct::queue_ptr stream = g_syclStreams[id][0]; - if (extra->data_device[id] != nullptr) { - SYCL_CHECK(ggml_sycl_set_device(get_device_id_by_index(id))); - SYCL_CHECK(CHECK_TRY_ERROR(sycl::free(extra->data_device[id], *stream))); + for (int i = 0; i < g_device_count; ++i) { + const dpct::queue_ptr stream = g_syclStreams[i][0]; + if (extra->data_device[i] != nullptr) { + SYCL_CHECK(ggml_sycl_set_device(i)); + SYCL_CHECK(CHECK_TRY_ERROR(sycl::free(extra->data_device[i], *stream))); } for (int64_t is = 0; is < MAX_STREAMS; ++is) { - if (extra->events[id][is] != nullptr) { - SYCL_CHECK(ggml_sycl_set_device(get_device_id_by_index(id))); + if (extra->events[i][is] != nullptr) { + SYCL_CHECK(ggml_sycl_set_device(i)); SYCL_CHECK(CHECK_TRY_ERROR( - dpct::destroy_event(extra->events[id][is]))); + dpct::destroy_event(extra->events[i][is]))); } } } @@ -14105,22 +14337,22 @@ static void ggml_sycl_assign_buffers_impl(struct ggml_tensor *tensor, const size_t size = ggml_nbytes(tensor); SYCL_CHECK(ggml_sycl_set_device(g_main_device)); - const dpct::queue_ptr stream = g_syclStreams[g_main_device_index][0]; + const dpct::queue_ptr stream = g_syclStreams[g_main_device][0]; if (inplace && (tensor->src[0]->backend == GGML_BACKEND_TYPE_GPU || tensor->src[0]->backend == GGML_BACKEND_TYPE_GPU_SPLIT)) { ggml_tensor_extra_gpu * src0_extra = (ggml_tensor_extra_gpu * ) tensor->src[0]->extra; - char * src0_ddc = (char *) src0_extra->data_device[g_main_device_index]; + char * src0_ddc = (char *) src0_extra->data_device[g_main_device]; size_t offset = 0; if (tensor->op == GGML_OP_VIEW) { memcpy(&offset, tensor->op_params, sizeof(size_t)); } extra = ggml_sycl_alloc_temp_tensor_extra(); - extra->data_device[g_main_device_index] = src0_ddc + offset; + extra->data_device[g_main_device] = src0_ddc + offset; } else if (tensor->op == GGML_OP_CPY) { ggml_tensor_extra_gpu * src1_extra = (ggml_tensor_extra_gpu * ) tensor->src[1]->extra; - void * src1_ddv = src1_extra->data_device[g_main_device_index]; + void * src1_ddv = src1_extra->data_device[g_main_device]; extra = ggml_sycl_alloc_temp_tensor_extra(); - extra->data_device[g_main_device_index] = src1_ddv; + extra->data_device[g_main_device] = src1_ddv; } else if (scratch) { GGML_ASSERT(size <= g_scratch_size); if (g_scratch_offset + size > g_scratch_size) { @@ -14135,7 +14367,7 @@ static void ggml_sycl_assign_buffers_impl(struct ggml_tensor *tensor, g_scratch_buffer = data; } extra = ggml_sycl_alloc_temp_tensor_extra(); - extra->data_device[g_main_device_index] = data + g_scratch_offset; + extra->data_device[g_main_device] = data + g_scratch_offset; g_scratch_offset += size; @@ -14148,44 +14380,7 @@ static void ggml_sycl_assign_buffers_impl(struct ggml_tensor *tensor, (*stream).memset(data, 0, size).wait())); extra = new ggml_tensor_extra_gpu; memset(extra, 0, sizeof(*extra)); - extra->data_device[g_main_device_index] = data; - } - - tensor->extra = extra; -} -catch (sycl::exception const &exc) { - std::cerr << exc.what() << "Exception caught at file:" << __FILE__ - << ", line:" << __LINE__ << std::endl; - std::exit(1); -} - -void ggml_sycl_assign_scratch_offset(struct ggml_tensor *tensor, - size_t offset) try { - if (g_scratch_size == 0) { - return; - } - if (g_scratch_buffer == nullptr) { - ggml_sycl_set_device(g_main_device); - const dpct::queue_ptr stream = g_syclStreams[g_main_device_index][0]; - SYCL_CHECK( - CHECK_TRY_ERROR(g_scratch_buffer = (void *)sycl::malloc_device( - g_scratch_size, *stream))); - } - - ggml_tensor_extra_gpu * extra = ggml_sycl_alloc_temp_tensor_extra(); - - const bool inplace = tensor->view_src != nullptr; - - if (inplace && (tensor->view_src->backend == GGML_BACKEND_TYPE_GPU || tensor->view_src->backend == GGML_BACKEND_TYPE_GPU_SPLIT)) { - ggml_tensor_extra_gpu * src0_extra = (ggml_tensor_extra_gpu * ) tensor->view_src->extra; - char * src0_ddc = (char *) src0_extra->data_device[g_main_device_index]; - size_t view_offset = 0; - if (tensor->op == GGML_OP_VIEW) { - memcpy(&view_offset, tensor->op_params, sizeof(size_t)); - } - extra->data_device[g_main_device_index] = src0_ddc + view_offset; - } else { - extra->data_device[g_main_device_index] = (char *) g_scratch_buffer + offset; + extra->data_device[g_main_device] = data; } tensor->extra = extra; @@ -14202,9 +14397,9 @@ void ggml_sycl_copy_to_device(struct ggml_tensor *tensor) try { ggml_tensor_extra_gpu * extra = (ggml_tensor_extra_gpu *) tensor->extra; SYCL_CHECK(ggml_sycl_set_device(g_main_device)); - const dpct::queue_ptr stream = g_syclStreams[g_main_device_index][0]; + const dpct::queue_ptr stream = g_syclStreams[g_main_device][0]; SYCL_CHECK(CHECK_TRY_ERROR((*stream) - .memcpy(extra->data_device[g_main_device_index], + .memcpy(extra->data_device[g_main_device], tensor->data, ggml_nbytes(tensor)) .wait())); } @@ -14231,21 +14426,17 @@ void ggml_sycl_assign_buffers_force_inplace(struct ggml_tensor * tensor) { } void ggml_sycl_set_main_device(const int main_device) try { + if (g_main_device == main_device) return; + check_allow_gpu_index(main_device); + g_main_device = main_device; + g_main_device_id = g_sycl_gpu_mgr->gpus[main_device]; - if (main_device >= g_all_sycl_device_count) { - fprintf(stderr, "warning: cannot set main_device=%d because there are only %d devices. Using device %d instead.\n", - main_device, g_all_sycl_device_count, g_main_device); - return; - } - - if (g_main_device != main_device && g_device_count >= 1) { - g_main_device = main_device; - g_main_device_index = get_device_index_by_id(g_main_device); + if (g_ggml_sycl_debug) { dpct::device_info prop; SYCL_CHECK(CHECK_TRY_ERROR(dpct::get_device_info( - prop, dpct::dev_mgr::instance().get_device(g_main_device)))); + prop, dpct::dev_mgr::instance().get_device(g_main_device_id)))); fprintf(stderr, "Using device %d (%s) as main device\n", - g_main_device, prop.get_name()); + g_main_device_id, prop.get_name()); } } catch (sycl::exception const &exc) { @@ -14268,7 +14459,7 @@ void ggml_sycl_free_scratch() try { return; } ggml_sycl_set_device(g_main_device); - const dpct::queue_ptr stream = g_syclStreams[g_main_device_index][0]; + const dpct::queue_ptr stream = g_syclStreams[g_main_device][0]; SYCL_CHECK(CHECK_TRY_ERROR( sycl::free(g_scratch_buffer, *stream))); @@ -14340,6 +14531,12 @@ bool ggml_sycl_compute_forward(struct ggml_compute_params * params, struct ggml_ case GGML_UNARY_OP_RELU: func = ggml_sycl_relu; break; + case GGML_UNARY_OP_HARDSIGMOID: + func = ggml_sycl_hardsigmoid; + break; + case GGML_UNARY_OP_HARDSWISH: + func = ggml_sycl_hardswish; + break; default: return false; } @@ -14414,6 +14611,9 @@ bool ggml_sycl_compute_forward(struct ggml_compute_params * params, struct ggml_ case GGML_OP_IM2COL: func = ggml_sycl_im2col; break; + case GGML_OP_POOL_2D: + func = ggml_sycl_pool2d; + break; case GGML_OP_SUM_ROWS: func = ggml_sycl_sum_rows; break; @@ -14439,27 +14639,15 @@ bool ggml_sycl_compute_forward(struct ggml_compute_params * params, struct ggml_ } GGML_API GGML_CALL void ggml_sycl_get_gpu_list(int *id_list, int max_len) try { - int max_compute_units = -1; - for(int i=0;igpus.size();i++){ + if (i>=max_len) break; + id_list[i] = g_sycl_gpu_mgr->gpus[i]; } return; } @@ -14486,8 +14674,9 @@ catch (sycl::exception const &exc) { GGML_API GGML_CALL void ggml_sycl_get_device_description(int device, char *description, size_t description_size) try { dpct::device_info prop; + int device_id = g_sycl_gpu_mgr->gpus[device]; SYCL_CHECK(CHECK_TRY_ERROR(dpct::get_device_info( - prop, dpct::dev_mgr::instance().get_device(device)))); + prop, dpct::dev_mgr::instance().get_device(device_id)))); snprintf(description, description_size, "%s", prop.get_name()); } catch (sycl::exception const &exc) { @@ -14496,17 +14685,36 @@ catch (sycl::exception const &exc) { std::exit(1); } +GGML_CALL void ggml_backend_sycl_get_device_memory(int device, size_t *free, + size_t *total) try { + ggml_sycl_set_device(device); + + /* + DPCT1009:218: SYCL uses exceptions to report errors and does not use the + error codes. The original code was commented out and a warning string was + inserted. You need to rewrite this code. + */ + /* + DPCT1106:217: 'cudaMemGetInfo' was migrated with the Intel extensions for + device information which may not be supported by all compilers or runtimes. + You may need to adjust the code. + */ + int device_id = g_sycl_gpu_mgr->gpus[device]; + SYCL_CHECK(CHECK_TRY_ERROR( + dpct::dev_mgr::instance().get_device(device_id).get_memory_info(*free, *total))); +} +catch (sycl::exception const &exc) { + std::cerr << exc.what() << "Exception caught at file:" << __FILE__ + << ", line:" << __LINE__ << std::endl; + std::exit(1); +} + //////////////////////////////////////////////////////////////////////////////// // backend interface #define UNUSED GGML_UNUSED -struct ggml_backend_sycl_context { - int device; - std::string name; -}; - // sycl buffer struct ggml_backend_sycl_buffer_context { @@ -14516,7 +14724,12 @@ struct ggml_backend_sycl_buffer_context { size_t temp_tensor_extra_index = 0; std::string name; - ggml_backend_sycl_buffer_context(int device, void * dev_ptr) : device(device), dev_ptr(dev_ptr) {} + ggml_backend_sycl_buffer_context(int device, void * dev_ptr) : + device(device), dev_ptr(dev_ptr) { + check_allow_gpu_index(device); + int id = g_sycl_gpu_mgr->gpus[device]; + name = (GGML_SYCL_NAME + std::to_string(id)); + } ~ ggml_backend_sycl_buffer_context() { delete[] temp_tensor_extras; @@ -14547,10 +14760,9 @@ GGML_CALL static bool ggml_backend_buffer_is_sycl(ggml_backend_buffer_t buffer) static void ggml_backend_sycl_buffer_free_buffer(ggml_backend_buffer_t buffer) try { - ggml_backend_sycl_buffer_context * ctx = ( ggml_backend_sycl_buffer_context *)buffer->context; + ggml_backend_sycl_buffer_context * ctx = ( ggml_backend_sycl_buffer_context *)buffer->context; ggml_sycl_set_device(ctx->device); - int device_index = get_device_index_by_id(ctx->device); - const dpct::queue_ptr stream = g_syclStreams[device_index][0]; + const dpct::queue_ptr stream = g_syclStreams[ctx->device][0]; SYCL_CHECK( CHECK_TRY_ERROR(sycl::free(ctx->dev_ptr, *stream))); @@ -14563,13 +14775,14 @@ catch (sycl::exception const &exc) { } static void * ggml_backend_sycl_buffer_get_base(ggml_backend_buffer_t buffer) { - ggml_backend_sycl_buffer_context * ctx = ( ggml_backend_sycl_buffer_context *)buffer->context; + ggml_backend_sycl_buffer_context * ctx = ( ggml_backend_sycl_buffer_context *)buffer->context; return ctx->dev_ptr; } -static void ggml_backend_sycl_buffer_init_tensor(ggml_backend_buffer_t buffer, - ggml_tensor *tensor) try { - ggml_backend_sycl_buffer_context * ctx = ( ggml_backend_sycl_buffer_context *)buffer->context; +GGML_CALL static void +ggml_backend_sycl_buffer_init_tensor(ggml_backend_buffer_t buffer, + ggml_tensor *tensor) try { + ggml_backend_sycl_buffer_context * ctx = (ggml_backend_sycl_buffer_context *)buffer->context; if (tensor->view_src != NULL && tensor->view_offs == 0) { assert(tensor->view_src->buffer->buft == buffer->buft); @@ -14581,27 +14794,20 @@ static void ggml_backend_sycl_buffer_init_tensor(ggml_backend_buffer_t buffer, ggml_tensor_extra_gpu * extra = ctx->ggml_sycl_alloc_temp_tensor_extra(); extra->data_device[ctx->device] = tensor->data; - tensor->backend = GGML_BACKEND_TYPE_GPU; tensor->extra = extra; if (ggml_is_quantized(tensor->type)) { // initialize padding to 0 to avoid possible NaN values - int64_t row_low = 0; - int64_t row_high = ggml_nrows(tensor); - int64_t nrows_split = row_high - row_low; - - size_t original_size = ggml_nbytes_split(tensor, nrows_split); + size_t original_size = ggml_nbytes(tensor); size_t padded_size = ggml_backend_buft_get_alloc_size(buffer->buft, tensor); if (padded_size > original_size && tensor->view_src == nullptr) { SYCL_CHECK(CHECK_TRY_ERROR(g_syclStreams[ctx->device][0]->memset( (char *)tensor->data + original_size, 0, - padded_size - original_size))); + padded_size - original_size).wait())); } } - - UNUSED(buffer); } catch (sycl::exception const &exc) { std::cerr << exc.what() << "Exception caught at file:" << __FILE__ @@ -14615,13 +14821,12 @@ static void ggml_backend_sycl_buffer_set_tensor(ggml_backend_buffer_t buffer, size_t size) try { GGML_ASSERT(tensor->backend == GGML_BACKEND_TYPE_GPU); - ggml_backend_sycl_buffer_context * ctx = ( ggml_backend_sycl_buffer_context *)buffer->context; + ggml_backend_sycl_buffer_context * ctx = ( ggml_backend_sycl_buffer_context *)buffer->context; ggml_sycl_set_device(ctx->device); - int device_index = get_device_index_by_id(ctx->device); - const dpct::queue_ptr stream = g_syclStreams[device_index][0]; + const dpct::queue_ptr stream = g_syclStreams[ctx->device][0]; SYCL_CHECK( - CHECK_TRY_ERROR(dpct::get_current_device().queues_wait_and_throw())); + CHECK_TRY_ERROR(dpct::dev_mgr::instance().get_device(ctx->device).queues_wait_and_throw())); SYCL_CHECK( CHECK_TRY_ERROR((*stream) @@ -14640,14 +14845,13 @@ static void ggml_backend_sycl_buffer_get_tensor(ggml_backend_buffer_t buffer, size_t size) try { GGML_ASSERT(tensor->backend == GGML_BACKEND_TYPE_GPU); - ggml_backend_sycl_buffer_context * ctx = ( ggml_backend_sycl_buffer_context *)buffer->context; + ggml_backend_sycl_buffer_context * ctx = ( ggml_backend_sycl_buffer_context *)buffer->context; ggml_sycl_set_device(ctx->device); - int device_index = get_device_index_by_id(ctx->device); - const dpct::queue_ptr stream = g_syclStreams[device_index][0]; + const dpct::queue_ptr stream = g_syclStreams[ctx->device][0]; SYCL_CHECK( - CHECK_TRY_ERROR(dpct::get_current_device().queues_wait_and_throw())); + CHECK_TRY_ERROR(dpct::dev_mgr::instance().get_device(ctx->device).queues_wait_and_throw())); SYCL_CHECK(CHECK_TRY_ERROR( (*stream) @@ -14660,13 +14864,73 @@ catch (sycl::exception const &exc) { std::exit(1); } +GGML_CALL static bool +ggml_backend_sycl_buffer_cpy_tensor(ggml_backend_buffer_t buffer, + const ggml_tensor *src, + ggml_tensor *dst) try { + if (ggml_backend_buffer_is_sycl(src->buffer)) { + ggml_backend_sycl_buffer_context * src_ctx = (ggml_backend_sycl_buffer_context *)src->buffer->context; + ggml_backend_sycl_buffer_context * dst_ctx = (ggml_backend_sycl_buffer_context *)buffer->context; + + ggml_sycl_set_device(src_ctx->device); + /* + DPCT1009:198: SYCL uses exceptions to report errors and does not use the + error codes. The original code was commented out and a warning string + was inserted. You need to rewrite this code. + */ + SYCL_CHECK(CHECK_TRY_ERROR( + dpct::dev_mgr::instance().get_device(src_ctx->device).queues_wait_and_throw())); + ggml_sycl_set_device(dst_ctx->device); + /* + DPCT1009:199: SYCL uses exceptions to report errors and does not use the + error codes. The original code was commented out and a warning string + was inserted. You need to rewrite this code. + */ + SYCL_CHECK(CHECK_TRY_ERROR( + dpct::dev_mgr::instance().get_device(dst_ctx->device).queues_wait_and_throw())); + /* + DPCT1009:200: SYCL uses exceptions to report errors and does not use the + error codes. The original code was commented out and a warning string + was inserted. You need to rewrite this code. + */ + + dpct::queue_ptr stream_dst = g_syclStreams[dst_ctx->device][0]; + dpct::queue_ptr stream_src = g_syclStreams[src_ctx->device][0]; + size_t size = ggml_nbytes(src); + + //todo. it's dirty solutino to walkaroud known issue:device2device cross GPUs. + dev2dev_memcpy(*stream_dst, *stream_src, dst->data, src->data, size); + +//todo, it's known issue:error in device2device cross GPUs. reused when the issue is fixed. DON"T remove +#if 0 + SYCL_CHECK(CHECK_TRY_ERROR((*stream).memcpy( + (char *)dst->data, (const char *)src->data, size).wait())); + + /* + DPCT1009:201: SYCL uses exceptions to report errors and does not use the + error codes. The original code was commented out and a warning string + was inserted. You need to rewrite this code. + */ + SYCL_CHECK(CHECK_TRY_ERROR( + dpct::dev_mgr::instance().get_device(dst_ctx->device).queues_wait_and_throw())); +#endif + return true; + } + return false; +} +catch (sycl::exception const &exc) { + std::cerr << exc.what() << "Exception caught at file:" << __FILE__ + << ", line:" << __LINE__ << std::endl; + std::exit(1); +} + + static void ggml_backend_sycl_buffer_clear(ggml_backend_buffer_t buffer, uint8_t value) try { ggml_backend_sycl_buffer_context * ctx = ( ggml_backend_sycl_buffer_context *)buffer->context; ggml_sycl_set_device(ctx->device); - int device_index = get_device_index_by_id(ctx->device); - const dpct::queue_ptr stream = g_syclStreams[device_index][0]; + const dpct::queue_ptr stream = g_syclStreams[ctx->device][0]; SYCL_CHECK( CHECK_TRY_ERROR(dpct::get_current_device().queues_wait_and_throw())); @@ -14687,7 +14951,7 @@ static struct ggml_backend_buffer_i ggml_backend_sycl_buffer_interface = { /* .init_tensor = */ ggml_backend_sycl_buffer_init_tensor, /* .set_tensor = */ ggml_backend_sycl_buffer_set_tensor, /* .get_tensor = */ ggml_backend_sycl_buffer_get_tensor, - /* .cpy_tensor = */ NULL, + /* .cpy_tensor = */ ggml_backend_sycl_buffer_cpy_tensor, /* .clear = */ ggml_backend_sycl_buffer_clear, /* .reset = */ NULL, }; @@ -14698,29 +14962,28 @@ struct ggml_backend_sycl_buffer_type_context { std::string name; }; +struct ggml_backend_sycl_context { + int device; + std::string name; +}; + GGML_CALL static const char * ggml_backend_sycl_buffer_type_name(ggml_backend_buffer_type_t buft) { ggml_backend_sycl_buffer_type_context * ctx = (ggml_backend_sycl_buffer_type_context *)buft->context; return ctx->name.c_str(); } - -static ggml_backend_buffer_t +GGML_CALL static ggml_backend_buffer_t ggml_backend_sycl_buffer_type_alloc_buffer(ggml_backend_buffer_type_t buft, size_t size) try { ggml_backend_sycl_buffer_type_context * buft_ctx = (ggml_backend_sycl_buffer_type_context *)buft->context; - int device = (int) buft_ctx->device; - - ggml_sycl_set_device(device); - int device_index = get_device_index_by_id(device); - const dpct::queue_ptr stream = g_syclStreams[device_index][0]; + ggml_sycl_set_device(buft_ctx->device); + const dpct::queue_ptr stream = g_syclStreams[buft_ctx->device][0]; size = std::max(size, (size_t)1); // syclMalloc returns null for size 0 void * dev_ptr; SYCL_CHECK(CHECK_TRY_ERROR(dev_ptr = (void *)sycl::malloc_device( size, *stream))); - - ggml_backend_sycl_buffer_context * ctx = new ggml_backend_sycl_buffer_context(device, dev_ptr); - + ggml_backend_sycl_buffer_context * ctx = new ggml_backend_sycl_buffer_context(buft_ctx->device, dev_ptr); return ggml_backend_buffer_init(buft, ggml_backend_sycl_buffer_interface, ctx, size); } catch (sycl::exception const &exc) { @@ -14729,9 +14992,8 @@ catch (sycl::exception const &exc) { std::exit(1); } -static size_t ggml_backend_sycl_buffer_type_get_alignment(ggml_backend_buffer_type_t buft) { +GGML_CALL static size_t ggml_backend_sycl_buffer_type_get_alignment(ggml_backend_buffer_type_t buft) { return 128; - UNUSED(buft); } @@ -14741,13 +15003,8 @@ static size_t ggml_backend_sycl_buffer_type_get_max_size(ggml_backend_buffer_typ UNUSED(buft); } -static size_t ggml_backend_sycl_buffer_type_get_alloc_size(ggml_backend_buffer_type_t buft, const ggml_tensor * tensor) { - int64_t row_low = 0; - int64_t row_high = ggml_nrows(tensor); - int64_t nrows_split = row_high - row_low; - - size_t size = ggml_nbytes_split(tensor, nrows_split); - +GGML_CALL static size_t ggml_backend_sycl_buffer_type_get_alloc_size(ggml_backend_buffer_type_t buft, const ggml_tensor * tensor) { + size_t size = ggml_nbytes(tensor); int64_t ne0 = tensor->ne[0]; if (ggml_is_quantized(tensor->type)) { @@ -14761,10 +15018,13 @@ static size_t ggml_backend_sycl_buffer_type_get_alloc_size(ggml_backend_buffer_t UNUSED(buft); } -static bool ggml_backend_sycl_buffer_type_supports_backend(ggml_backend_buffer_type_t buft, ggml_backend_t backend) { - return ggml_backend_is_sycl(backend); - - UNUSED(buft); +GGML_CALL static bool ggml_backend_sycl_buffer_type_supports_backend(ggml_backend_buffer_type_t buft, ggml_backend_t backend) { + if (!ggml_backend_is_sycl(backend)) { + return false; + } + ggml_backend_sycl_buffer_type_context * buft_ctx = (ggml_backend_sycl_buffer_type_context *)buft->context; + ggml_backend_sycl_context * sycl_ctx = (ggml_backend_sycl_context *)backend->context; + return buft_ctx->device == sycl_ctx->device; } static ggml_backend_buffer_type_i ggml_backend_sycl_buffer_type_interface = { @@ -14783,10 +15043,10 @@ ggml_backend_buffer_type_t ggml_backend_sycl_buffer_type(int device) { static bool ggml_backend_sycl_buffer_type_initialized = false; if (!ggml_backend_sycl_buffer_type_initialized) { - for (int i = 0; i < GGML_SYCL_MAX_DEVICES; i++) { + for (int i = 0; i < g_device_count; i++) { ggml_backend_sycl_buffer_types[i] = { /* .iface = */ ggml_backend_sycl_buffer_type_interface, - /* .context = */ new ggml_backend_sycl_buffer_type_context{i, GGML_SYCL_NAME + std::to_string(i)}, + /* .context = */ new ggml_backend_sycl_buffer_type_context{i, GGML_SYCL_NAME + std::to_string(g_sycl_gpu_mgr->gpus[i])}, }; } ggml_backend_sycl_buffer_type_initialized = true; @@ -14795,6 +15055,391 @@ ggml_backend_buffer_type_t ggml_backend_sycl_buffer_type(int device) { return &ggml_backend_sycl_buffer_types[device]; } +// sycl split buffer type +static void get_row_split(int64_t * row_low, int64_t * row_high, const ggml_tensor * tensor, const std::array & tensor_split, int id) { + const int64_t nrows = ggml_nrows(tensor); + const int64_t rounding = get_row_rounding(tensor->type, tensor_split); + + *row_low = id == 0 ? 0 : nrows*tensor_split[id]; + *row_low -= *row_low % rounding; + if (id == g_device_count - 1) { + *row_high = nrows; + } else { + *row_high = nrows*tensor_split[id + 1]; + *row_high -= *row_high % rounding; + } +} + +struct ggml_backend_sycl_split_buffer_context { + ~ggml_backend_sycl_split_buffer_context() try { + for (ggml_tensor_extra_gpu * extra : tensor_extras) { + for (int i = 0; i < g_device_count; ++i) { + // int id = g_sycl_gpu_mgr->gpus[i]; + for (int64_t is = 0; is < MAX_STREAMS; ++is) { + if (extra->events[i][is] != nullptr) { + /* + DPCT1009:206: SYCL uses exceptions to report errors and + does not use the error codes. The original code was + commented out and a warning string was inserted. You + need to rewrite this code. + */ + SYCL_CHECK(CHECK_TRY_ERROR( + dpct::destroy_event(extra->events[i][is]))); + } + } + if (extra->data_device[i] != nullptr) { + /* + DPCT1009:207: SYCL uses exceptions to report errors and does + not use the error codes. The original code was commented out + and a warning string was inserted. You need to rewrite this + code. + */ + ggml_sycl_set_device(i); + SYCL_CHECK(CHECK_TRY_ERROR(sycl::free( + extra->data_device[i], *g_syclStreams[i][0]))); + } + } + delete extra; + } + } + catch (sycl::exception const &exc) { + std::cerr << exc.what() << "Exception caught at file:" << __FILE__ + << ", line:" << __LINE__ << std::endl; + std::exit(1); + } + + std::vector tensor_extras; +}; + +GGML_CALL static const char * ggml_backend_sycl_split_buffer_get_name(ggml_backend_buffer_t buffer) { + return GGML_SYCL_NAME "_Split"; + + UNUSED(buffer); +} + +// unused at the moment +//static bool ggml_backend_buffer_is_sycl_split(ggml_backend_buffer_t buffer) { +// return buffer->iface.get_name == ggml_backend_sycl_split_buffer_get_name; +//} + +GGML_CALL static void ggml_backend_sycl_split_buffer_free_buffer(ggml_backend_buffer_t buffer) { + ggml_backend_sycl_split_buffer_context * ctx = (ggml_backend_sycl_split_buffer_context *)buffer->context; + delete ctx; +} + +GGML_CALL static void * ggml_backend_sycl_split_buffer_get_base(ggml_backend_buffer_t buffer) { + // the pointers are stored in the tensor extras, this is just a dummy address and never dereferenced + return (void *)0x1000; + + UNUSED(buffer); +} + +GGML_CALL static void +ggml_backend_sycl_split_buffer_init_tensor(ggml_backend_buffer_t buffer, + ggml_tensor *tensor) try { + GGML_ASSERT(tensor->view_src == nullptr); // views of split tensors are not supported + + ggml_backend_sycl_split_buffer_context * ctx = (ggml_backend_sycl_split_buffer_context *)buffer->context; + ggml_backend_sycl_split_buffer_type_context * buft_ctx = (ggml_backend_sycl_split_buffer_type_context *)buffer->buft->context; + + const int64_t ne0 = tensor->ne[0]; + + ggml_tensor_extra_gpu * extra = new ggml_tensor_extra_gpu{}; + + ctx->tensor_extras.push_back(extra); + + for (int i = 0; i < g_device_count; ++i) { + // int id = g_sycl_gpu_mgr->gpus[i]; + int64_t row_low, row_high; + get_row_split(&row_low, &row_high, tensor, buft_ctx->tensor_split, i); + + int64_t nrows_split = row_high - row_low; + if (nrows_split == 0) { + continue; + } + + size_t size = ggml_nbytes_split(tensor, nrows_split); + const size_t original_size = size; + + // pad last row to a multiple of 512 elements to avoid out-of-bounds memory accesses + if (ne0 % MATRIX_ROW_PADDING != 0) { + size += ggml_row_size(tensor->type, MATRIX_ROW_PADDING - ne0 % MATRIX_ROW_PADDING); + } + + // FIXME: do not crash if cudaMalloc fails + // currently, init_tensor cannot fail, it needs to be fixed in ggml-backend first + ggml_sycl_set_device(i); + char * buf; + /* + DPCT1009:208: SYCL uses exceptions to report errors and does not use the + error codes. The original code was commented out and a warning string + was inserted. You need to rewrite this code. + */ + SYCL_CHECK(CHECK_TRY_ERROR(buf = (char *)sycl::malloc_device( + size, *g_syclStreams[i][0]))); + + // set padding to 0 to avoid possible NaN values + if (size > original_size) { + /* + DPCT1009:209: SYCL uses exceptions to report errors and does not use + the error codes. The original code was commented out and a warning + string was inserted. You need to rewrite this code. + */ + SYCL_CHECK(CHECK_TRY_ERROR( + (*g_syclStreams[i][0]) + .memset(buf + original_size, 0, size - original_size) + .wait())); + } + + extra->data_device[i] = buf; + + for (int64_t is = 0; is < MAX_STREAMS; ++is) { + /* + DPCT1009:210: SYCL uses exceptions to report errors and does not use + the error codes. The original code was commented out and a warning + string was inserted. You need to rewrite this code. + */ + SYCL_CHECK( + CHECK_TRY_ERROR(extra->events[i][is] = new sycl::event())); + } + } + tensor->backend = GGML_BACKEND_TYPE_GPU_SPLIT; + tensor->extra = extra; +} +catch (sycl::exception const &exc) { + std::cerr << exc.what() << "Exception caught at file:" << __FILE__ + << ", line:" << __LINE__ << std::endl; + std::exit(1); +} + +GGML_CALL static void +ggml_backend_sycl_split_buffer_set_tensor(ggml_backend_buffer_t buffer, + ggml_tensor *tensor, const void *data, + size_t offset, size_t size) try { + // split tensors must always be set in their entirety at once + GGML_ASSERT(offset == 0); + GGML_ASSERT(size == ggml_nbytes(tensor)); + + ggml_backend_sycl_split_buffer_type_context * buft_ctx = (ggml_backend_sycl_split_buffer_type_context *)buffer->buft->context; + + const int64_t ne0 = tensor->ne[0]; + const size_t nb1 = tensor->nb[1]; + ggml_tensor_extra_gpu * extra = (ggml_tensor_extra_gpu *)tensor->extra; + + for (int i = 0; i < g_device_count; ++i) { + // int id = g_sycl_gpu_mgr->gpus[i]; + int64_t row_low, row_high; + get_row_split(&row_low, &row_high, tensor, buft_ctx->tensor_split, i); + + int64_t nrows_split = row_high - row_low; + if (nrows_split == 0) { + continue; + } + + const size_t offset_split = row_low*nb1; + size_t size = ggml_nbytes_split(tensor, nrows_split); + const size_t original_size = size; + + // pad last row to a multiple of 512 elements to avoid out-of-bounds memory accesses + if (ne0 % MATRIX_ROW_PADDING != 0) { + size += ggml_row_size(tensor->type, MATRIX_ROW_PADDING - ne0 % MATRIX_ROW_PADDING); + } + + const char * buf_host = (const char *)data + offset_split; + /* + DPCT1009:211: SYCL uses exceptions to report errors and does not use the + error codes. The original code was commented out and a warning string + was inserted. You need to rewrite this code. + */ + ggml_sycl_set_device(i); + SYCL_CHECK(CHECK_TRY_ERROR( + (*g_syclStreams[i][0]) + .memcpy(extra->data_device[i], buf_host, original_size) + .wait())); + } +} +catch (sycl::exception const &exc) { + std::cerr << exc.what() << "Exception caught at file:" << __FILE__ + << ", line:" << __LINE__ << std::endl; + std::exit(1); +} + +GGML_CALL static void +ggml_backend_sycl_split_buffer_get_tensor(ggml_backend_buffer_t buffer, + const ggml_tensor *tensor, void *data, + size_t offset, size_t size) try { + // split tensors must always be set in their entirety at once + GGML_ASSERT(offset == 0); + GGML_ASSERT(size == ggml_nbytes(tensor)); + + ggml_backend_sycl_split_buffer_type_context * buft_ctx = (ggml_backend_sycl_split_buffer_type_context *)buffer->buft->context; + + const int64_t ne0 = tensor->ne[0]; + const size_t nb1 = tensor->nb[1]; + ggml_tensor_extra_gpu * extra = (ggml_tensor_extra_gpu *)tensor->extra; + + for (int i = 0; i < g_device_count; ++i) { + // int id = g_sycl_gpu_mgr->gpus[i]; + int64_t row_low, row_high; + get_row_split(&row_low, &row_high, tensor, buft_ctx->tensor_split, i); + + int64_t nrows_split = row_high - row_low; + if (nrows_split == 0) { + continue; + } + + const size_t offset_split = row_low*nb1; + size_t size = ggml_nbytes_split(tensor, nrows_split); + const size_t original_size = size; + + // pad last row to a multiple of 512 elements to avoid out-of-bounds memory accesses + if (ne0 % MATRIX_ROW_PADDING != 0) { + size += ggml_row_size(tensor->type, MATRIX_ROW_PADDING - ne0 % MATRIX_ROW_PADDING); + } + + char * buf_host = (char *)data + offset_split; + /* + DPCT1009:212: SYCL uses exceptions to report errors and does not use the + error codes. The original code was commented out and a warning string + was inserted. You need to rewrite this code. + */ + ggml_sycl_set_device(i); + SYCL_CHECK(CHECK_TRY_ERROR( + (*g_syclStreams[i][0]) + .memcpy(buf_host, extra->data_device[i], original_size) + .wait())); + } +} +catch (sycl::exception const &exc) { + std::cerr << exc.what() << "Exception caught at file:" << __FILE__ + << ", line:" << __LINE__ << std::endl; + std::exit(1); +} + +GGML_CALL static void ggml_backend_sycl_split_buffer_clear(ggml_backend_buffer_t buffer, uint8_t value) { + UNUSED(buffer); + UNUSED(value); +} + +static struct ggml_backend_buffer_i ggml_backend_sycl_split_buffer_interface = { + /* .get_name = */ ggml_backend_sycl_split_buffer_get_name, + /* .free_buffer = */ ggml_backend_sycl_split_buffer_free_buffer, + /* .get_base = */ ggml_backend_sycl_split_buffer_get_base, + /* .init_tensor = */ ggml_backend_sycl_split_buffer_init_tensor, + /* .set_tensor = */ ggml_backend_sycl_split_buffer_set_tensor, + /* .get_tensor = */ ggml_backend_sycl_split_buffer_get_tensor, + /* .cpy_tensor = */ NULL, + /* .clear = */ ggml_backend_sycl_split_buffer_clear, + /* .reset = */ NULL, +}; + +GGML_CALL static const char * ggml_backend_sycl_split_buffer_type_name(ggml_backend_buffer_type_t buft) { + return GGML_SYCL_NAME "_Split"; + + UNUSED(buft); +} + +GGML_CALL static ggml_backend_buffer_t ggml_backend_sycl_split_buffer_type_alloc_buffer(ggml_backend_buffer_type_t buft, size_t size) { + // since we don't know the exact split after rounding, we cannot allocate the device buffers at this point + // instead, we allocate them for each tensor separately in init_tensor + // however, the size still represents the maximum cumulative size of all the device buffers after the tensors are allocated, + // as returned by get_alloc_size. this limit is enforced during tensor allocation by ggml-alloc, so it must be correct. + ggml_backend_sycl_split_buffer_context * ctx = new ggml_backend_sycl_split_buffer_context(); + + return ggml_backend_buffer_init(buft, ggml_backend_sycl_split_buffer_interface, ctx, size); +} + +GGML_CALL static size_t ggml_backend_sycl_split_buffer_type_get_alignment(ggml_backend_buffer_type_t buft) { + return 128; + UNUSED(buft); +} + +GGML_CALL static size_t ggml_backend_sycl_split_buffer_type_get_alloc_size(ggml_backend_buffer_type_t buft, const ggml_tensor * tensor) { + ggml_backend_sycl_split_buffer_type_context * ctx = (ggml_backend_sycl_split_buffer_type_context *)buft->context; + + size_t total_size = 0; + + const int64_t ne0 = tensor->ne[0]; + + for (int i = 0; i < g_device_count; ++i) { + // int id = g_sycl_gpu_mgr->gpus[i]; + int64_t row_low, row_high; + get_row_split(&row_low, &row_high, tensor, ctx->tensor_split, i); + + int64_t nrows_split = row_high - row_low; + if (nrows_split == 0) { + continue; + } + + total_size += ggml_nbytes_split(tensor, nrows_split); + + // pad last row to a multiple of 512 elements to avoid out-of-bounds memory accesses + if (ne0 % MATRIX_ROW_PADDING != 0) { + total_size += ggml_row_size(tensor->type, MATRIX_ROW_PADDING - ne0 % MATRIX_ROW_PADDING); + } + } + + return total_size; +} + +GGML_CALL static bool ggml_backend_sycl_split_buffer_type_supports_backend(ggml_backend_buffer_type_t buft, ggml_backend_t backend) { + return ggml_backend_is_sycl(backend); + + UNUSED(buft); +} + +GGML_CALL static bool ggml_backend_sycl_split_buffer_type_is_host(ggml_backend_buffer_type_t buft) { + return false; + + UNUSED(buft); +} + +static ggml_backend_buffer_type_i ggml_backend_sycl_split_buffer_type_interface = { + /* .get_name = */ ggml_backend_sycl_split_buffer_type_name, + /* .alloc_buffer = */ ggml_backend_sycl_split_buffer_type_alloc_buffer, + /* .get_alignment = */ ggml_backend_sycl_split_buffer_type_get_alignment, + /* .get_max_size = */ NULL, // defaults to SIZE_MAX + /* .get_alloc_size = */ ggml_backend_sycl_split_buffer_type_get_alloc_size, + /* .supports_backend = */ ggml_backend_sycl_split_buffer_type_supports_backend, + /* .is_host = */ ggml_backend_sycl_split_buffer_type_is_host, +}; + +GGML_CALL ggml_backend_buffer_type_t ggml_backend_sycl_split_buffer_type(const float * tensor_split) { + // FIXME: this is not thread safe + static std::map, struct ggml_backend_buffer_type> buft_map; + + std::array tensor_split_arr = {}; + + bool all_zero = tensor_split == nullptr || std::all_of(tensor_split, tensor_split + GGML_SYCL_MAX_DEVICES, [](float x) { return x == 0.0f; }); + if (all_zero) { + tensor_split_arr = g_default_tensor_split; + } else { + float split_sum = 0.0f; + for (int i = 0; i < g_device_count; ++i) { + // int id = g_sycl_gpu_mgr->gpus[i]; + tensor_split_arr[i] = split_sum; + split_sum += tensor_split[i]; + } + for (int i = 0; i < g_device_count; ++i) { + // int id = g_sycl_gpu_mgr->gpus[i]; + tensor_split_arr[i] /= split_sum; + } + } + + auto it = buft_map.find(tensor_split_arr); + if (it != buft_map.end()) { + return &it->second; + } + + struct ggml_backend_buffer_type buft { + /* .iface = */ ggml_backend_sycl_split_buffer_type_interface, + /* .context = */ new ggml_backend_sycl_split_buffer_type_context{tensor_split_arr}, + }; + + auto result = buft_map.emplace(tensor_split_arr, buft); + return &result.first->second; +} + // host buffer type GGML_CALL static const char * ggml_backend_sycl_host_buffer_type_name(ggml_backend_buffer_type_t buft) { @@ -14824,6 +15469,7 @@ static ggml_backend_buffer_t ggml_backend_sycl_host_buffer_type_alloc_buffer(ggm // FIXME: this is a hack to avoid having to implement a new buffer type ggml_backend_buffer_t buffer = ggml_backend_cpu_buffer_from_ptr(ptr, size); buffer->buft = buft; + buffer->iface.get_name = ggml_backend_sycl_host_buffer_name; buffer->iface.free_buffer = ggml_backend_sycl_host_buffer_free_buffer; return buffer; @@ -14848,34 +15494,33 @@ ggml_backend_buffer_type_t ggml_backend_sycl_host_buffer_type() { // backend -static const char * ggml_backend_sycl_name(ggml_backend_t backend) { - return GGML_SYCL_NAME; +GGML_CALL static const char * ggml_backend_sycl_name(ggml_backend_t backend) { - UNUSED(backend); + ggml_backend_sycl_context * sycl_ctx = (ggml_backend_sycl_context *)backend->context; + + return sycl_ctx->name.c_str(); } -static void ggml_backend_sycl_free(ggml_backend_t backend) { +GGML_CALL static void ggml_backend_sycl_free(ggml_backend_t backend) { ggml_backend_sycl_context * sycl_ctx = (ggml_backend_sycl_context *)backend->context; delete sycl_ctx; delete backend; } -static ggml_backend_buffer_type_t ggml_backend_sycl_get_default_buffer_type(ggml_backend_t backend) { - ggml_backend_sycl_context * sycl_ctx = (ggml_backend_sycl_context *)backend->context; +GGML_CALL static ggml_backend_buffer_type_t ggml_backend_sycl_get_default_buffer_type(ggml_backend_t backend) { + ggml_backend_sycl_context * sycl_ctx = (ggml_backend_sycl_context *)backend->context; return ggml_backend_sycl_buffer_type(sycl_ctx->device); } -static void ggml_backend_sycl_set_tensor_async(ggml_backend_t backend, +GGML_CALL static void ggml_backend_sycl_set_tensor_async(ggml_backend_t backend, ggml_tensor *tensor, const void *data, size_t offset, size_t size) try { ggml_backend_sycl_context * sycl_ctx = (ggml_backend_sycl_context *)backend->context; - GGML_ASSERT(tensor->buffer->buft == ggml_backend_sycl_buffer_type(sycl_ctx->device) && "unsupported buffer type"); GGML_ASSERT(tensor->backend == GGML_BACKEND_TYPE_GPU); - SYCL_CHECK(CHECK_TRY_ERROR(g_syclStreams[sycl_ctx->device][0]->memcpy( (char *)tensor->data + offset, data, size))); } @@ -14885,15 +15530,13 @@ catch (sycl::exception const &exc) { std::exit(1); } -static void ggml_backend_sycl_get_tensor_async(ggml_backend_t backend, +GGML_CALL static void ggml_backend_sycl_get_tensor_async(ggml_backend_t backend, const ggml_tensor *tensor, void *data, size_t offset, size_t size) try { ggml_backend_sycl_context * sycl_ctx = (ggml_backend_sycl_context *)backend->context; - GGML_ASSERT(tensor->buffer->buft == ggml_backend_sycl_buffer_type(sycl_ctx->device) && "unsupported buffer type"); GGML_ASSERT(tensor->backend == GGML_BACKEND_TYPE_GPU); - SYCL_CHECK(CHECK_TRY_ERROR(g_syclStreams[sycl_ctx->device][0]->memcpy( data, (const char *)tensor->data + offset, size))); } @@ -14903,9 +15546,31 @@ catch (sycl::exception const &exc) { std::exit(1); } +GGML_CALL static bool ggml_backend_sycl_cpy_tensor_async(ggml_backend_t backend, + const ggml_tensor *src, + ggml_tensor *dst) try { + ggml_backend_sycl_context * sycl_ctx = (ggml_backend_sycl_context *)backend->context; + if (dst->buffer->buft == ggml_backend_sycl_buffer_type(sycl_ctx->device) && ggml_backend_buffer_is_sycl(src->buffer)) { + /* + DPCT1009:215: SYCL uses exceptions to report errors and does not use the + error codes. The original code was commented out and a warning string + was inserted. You need to rewrite this code. + */ + SYCL_CHECK(CHECK_TRY_ERROR(g_syclStreams[sycl_ctx->device][0]->memcpy( + dst->data, src->data, ggml_nbytes(dst)))); + return true; + } + + return false; +} +catch (sycl::exception const &exc) { + std::cerr << exc.what() << "Exception caught at file:" << __FILE__ + << ", line:" << __LINE__ << std::endl; + std::exit(1); +} + static void ggml_backend_sycl_synchronize(ggml_backend_t backend) try { ggml_backend_sycl_context * sycl_ctx = (ggml_backend_sycl_context *)backend->context; - SYCL_CHECK(CHECK_TRY_ERROR(g_syclStreams[sycl_ctx->device][0]->wait())); UNUSED(backend); @@ -14916,32 +15581,8 @@ catch (sycl::exception const &exc) { std::exit(1); } -static ggml_backend_graph_plan_t ggml_backend_sycl_graph_plan_create(ggml_backend_t backend, const ggml_cgraph * cgraph) { - GGML_ASSERT(!"not implemented"); - - return nullptr; - - UNUSED(backend); - UNUSED(cgraph); -} - -static void ggml_backend_sycl_graph_plan_free(ggml_backend_t backend, ggml_backend_graph_plan_t plan) { - GGML_ASSERT(!"not implemented"); - - UNUSED(backend); - UNUSED(plan); -} - -static void ggml_backend_sycl_graph_plan_compute(ggml_backend_t backend, ggml_backend_graph_plan_t plan) { - GGML_ASSERT(!"not implemented"); - - UNUSED(backend); - UNUSED(plan); -} - -static bool ggml_backend_sycl_graph_compute(ggml_backend_t backend, ggml_cgraph * cgraph) { +GGML_CALL static bool ggml_backend_sycl_graph_compute(ggml_backend_t backend, ggml_cgraph * cgraph) { ggml_backend_sycl_context * sycl_ctx = (ggml_backend_sycl_context *)backend->context; - ggml_sycl_set_main_device(sycl_ctx->device); ggml_compute_params params = {}; @@ -14949,63 +15590,41 @@ static bool ggml_backend_sycl_graph_compute(ggml_backend_t backend, ggml_cgraph params.ith = 0; for (int i = 0; i < cgraph->n_nodes; i++) { ggml_tensor * node = cgraph->nodes[i]; - - if (node->op == GGML_OP_RESHAPE || node->op == GGML_OP_TRANSPOSE || node->op == GGML_OP_VIEW || node->op == GGML_OP_PERMUTE) + if (node->op == GGML_OP_RESHAPE || node->op == GGML_OP_TRANSPOSE || node->op == GGML_OP_VIEW || node->op == GGML_OP_PERMUTE || node->op == GGML_OP_NONE) { continue; - - assert(node->backend == GGML_BACKEND_TYPE_GPU); + } +#ifndef NDEBUG + assert(node->backend == GGML_BACKEND_TYPE_GPU || node->backend == GGML_BACKEND_TYPE_GPU_SPLIT); assert(node->buffer->buft == ggml_backend_sycl_buffer_type(sycl_ctx->device)); assert(node->extra != nullptr); for (int j = 0; j < GGML_MAX_SRC; j++) { if (node->src[j] != nullptr) { - assert(node->src[j]->backend == GGML_BACKEND_TYPE_GPU); + assert(node->src[j]->backend == GGML_BACKEND_TYPE_GPU || node->src[j]->backend == GGML_BACKEND_TYPE_GPU_SPLIT); assert(node->src[j]->buffer->buft == ggml_backend_sycl_buffer_type(sycl_ctx->device)); assert(node->src[j]->extra != nullptr); } } - +#endif bool ok = ggml_sycl_compute_forward(¶ms, node); if (!ok) { fprintf(stderr, "%s: error: op not supported %s (%s)\n", __func__, node->name, ggml_op_name(node->op)); } GGML_ASSERT(ok); - -#if 0 - if (node->type == GGML_TYPE_F32) { - syclDeviceSynchronize(); - std::vector tmp(ggml_nelements(node), 0.0f); - syclMemcpy(tmp.data(), node->data, ggml_nelements(node)*sizeof(float), syclMemcpyDeviceToHost); - printf("\n%s (%s) (%s %s) (%s %s): ", node->name, ggml_op_name(node->op), - ggml_type_name(node->src[0]->type), - node->src[1] ? ggml_type_name(node->src[1]->type) : "none", - node->src[0]->name, - node->src[1] ? node->src[1]->name : "none"); - double sum = 0.0; - double sq_sum = 0.0; - for (int i = 0; i < ggml_nelements(node); i++) { - printf("%f ", tmp[i]); - sum += tmp[i]; - sq_sum += tmp[i]*tmp[i]; - } - printf("\n"); - printf("sum: %f, ", sum); - printf("sq_sum: %f\n", sq_sum); - } -#endif } - UNUSED(backend); return true; } -static bool ggml_backend_sycl_supports_op(ggml_backend_t backend, const ggml_tensor * op) { +GGML_CALL static bool ggml_backend_sycl_supports_op(ggml_backend_t backend, const ggml_tensor * op) { switch (op->op) { case GGML_OP_UNARY: switch (ggml_get_unary_op(op)) { case GGML_UNARY_OP_GELU: case GGML_UNARY_OP_SILU: case GGML_UNARY_OP_RELU: + case GGML_UNARY_OP_HARDSIGMOID: + case GGML_UNARY_OP_HARDSWISH: case GGML_UNARY_OP_GELU_QUICK: case GGML_UNARY_OP_TANH: return true; @@ -15081,16 +15700,17 @@ static bool ggml_backend_sycl_supports_op(ggml_backend_t backend, const ggml_ten if (src0_type == GGML_TYPE_F16 && src1_type == GGML_TYPE_F16) { return true; } + if (src0_type == GGML_TYPE_F16 && src1_type == GGML_TYPE_F32) { + return true; + } return false; } break; + case GGML_OP_DUP: + case GGML_OP_REPEAT: case GGML_OP_CONCAT: { ggml_type src0_type = op->src[0]->type; - if (src0_type == GGML_TYPE_F32) { - return true; - } else { - return false; - } + return src0_type != GGML_TYPE_I32 && src0_type != GGML_TYPE_I16; } break; case GGML_OP_NONE: case GGML_OP_RESHAPE: @@ -15098,8 +15718,6 @@ static bool ggml_backend_sycl_supports_op(ggml_backend_t backend, const ggml_ten case GGML_OP_PERMUTE: case GGML_OP_TRANSPOSE: case GGML_OP_NORM: - case GGML_OP_REPEAT: - case GGML_OP_DUP: case GGML_OP_ADD: case GGML_OP_MUL: case GGML_OP_DIV: @@ -15113,6 +15731,7 @@ static bool ggml_backend_sycl_supports_op(ggml_backend_t backend, const ggml_ten case GGML_OP_ROPE: case GGML_OP_ALIBI: case GGML_OP_IM2COL: + case GGML_OP_POOL_2D: case GGML_OP_SUM_ROWS: case GGML_OP_ARGSORT: case GGML_OP_ACC: @@ -15134,11 +15753,11 @@ static ggml_backend_i ggml_backend_sycl_interface = { /* .get_default_buffer_type = */ ggml_backend_sycl_get_default_buffer_type, /* .set_tensor_async = */ ggml_backend_sycl_set_tensor_async, /* .get_tensor_async = */ ggml_backend_sycl_get_tensor_async, - /* .cpy_tensor_async = */ NULL, + /* .cpy_tensor_async = */ ggml_backend_sycl_cpy_tensor_async, /* .synchronize = */ ggml_backend_sycl_synchronize, - /* .graph_plan_create = */ ggml_backend_sycl_graph_plan_create, - /* .graph_plan_free = */ ggml_backend_sycl_graph_plan_free, - /* .graph_plan_compute = */ ggml_backend_sycl_graph_plan_compute, + /* .graph_plan_create = */ NULL, + /* .graph_plan_free = */ NULL, + /* .graph_plan_compute = */ NULL, /* .graph_compute = */ ggml_backend_sycl_graph_compute, /* .supports_op = */ ggml_backend_sycl_supports_op, }; @@ -15148,20 +15767,17 @@ static ggml_guid_t ggml_backend_sycl_guid() { return &guid; } -ggml_backend_t ggml_backend_sycl_init(int device) { +GGML_CALL ggml_backend_t ggml_backend_sycl_init(int device) { ggml_init_sycl(); // TODO: remove from ggml.c - if (device < 0 || device >= ggml_sycl_get_device_count()) { - fprintf(stderr, "%s: error: invalid device %d\n", __func__, device); - return nullptr; - } + check_allow_gpu_index(device); // not strictly necessary, but it may reduce the overhead of the first graph_compute ggml_sycl_set_main_device(device); - + int id = g_sycl_gpu_mgr->gpus[device]; ggml_backend_sycl_context * ctx = new ggml_backend_sycl_context { /* .device = */ device, - /* .name = */ GGML_SYCL_NAME + std::to_string(device), + /* .name = */ GGML_SYCL_NAME + std::to_string(id), }; ggml_backend_t sycl_backend = new ggml_backend { @@ -15177,22 +15793,33 @@ bool ggml_backend_is_sycl(ggml_backend_t backend) { return backend != NULL && ggml_guid_matches(backend->guid, ggml_backend_sycl_guid()); } -static ggml_backend_t ggml_backend_reg_sycl_init(const char * params, void * user_data) { +GGML_CALL int ggml_backend_sycl_get_device_count() { + if (!g_sycl_gpu_mgr) g_sycl_gpu_mgr = new sycl_gpu_mgr(); + return g_sycl_gpu_mgr->get_gpu_count(); +} + +GGML_CALL static ggml_backend_t ggml_backend_reg_sycl_init(const char * params, void * user_data) { ggml_backend_t sycl_backend = ggml_backend_sycl_init((int) (intptr_t) user_data); return sycl_backend; UNUSED(params); } +GGML_API GGML_CALL int ggml_backend_sycl_get_device_index(int device_id) { + return g_sycl_gpu_mgr->get_index(device_id); +} + extern "C" int ggml_backend_sycl_reg_devices(); int ggml_backend_sycl_reg_devices() { - int device_count = ggml_sycl_get_device_count(); - - for (int i = 0; i < device_count; i++) { + if (!g_sycl_gpu_mgr) g_sycl_gpu_mgr = new sycl_gpu_mgr(); + g_device_count = g_sycl_gpu_mgr->get_gpu_count(); + assert(g_device_count>0); + for (int i = 0; i < g_device_count; i++) { + int id = g_sycl_gpu_mgr->gpus[i]; char name[128]; - snprintf(name, sizeof(name), "%s%d", GGML_SYCL_NAME, i); + snprintf(name, sizeof(name), "%s%d", GGML_SYCL_NAME, id); ggml_backend_register(name, ggml_backend_reg_sycl_init, ggml_backend_sycl_buffer_type(i), (void *) (intptr_t) i); } - return device_count; + return g_device_count; } diff --git a/ggml-sycl.h b/ggml-sycl.h index 891f2d00a..bf5b11b36 100644 --- a/ggml-sycl.h +++ b/ggml-sycl.h @@ -24,6 +24,11 @@ GGML_API ggml_backend_buffer_type_t ggml_backend_sycl_host_buffer_type(void); GGML_API void ggml_backend_sycl_print_sycl_devices(void); GGML_API GGML_CALL void ggml_sycl_get_gpu_list(int *id_list, int max_len); GGML_API GGML_CALL void ggml_sycl_get_device_description(int device, char *description, size_t description_size); +GGML_API GGML_CALL int ggml_backend_sycl_get_device_count(); +GGML_API GGML_CALL ggml_backend_buffer_type_t ggml_backend_sycl_split_buffer_type(const float * tensor_split); +GGML_API GGML_CALL void ggml_backend_sycl_get_device_memory(int device, size_t *free, size_t *total); +GGML_API GGML_CALL int ggml_backend_sycl_get_device_index(int device_id); + #ifdef __cplusplus } #endif diff --git a/llama.cpp b/llama.cpp index b1db5b179..cb6266a43 100644 --- a/llama.cpp +++ b/llama.cpp @@ -104,6 +104,7 @@ #define LLAMA_MAX_NODES 8192 #define LLAMA_MAX_EXPERTS 8 + // // logging // @@ -1429,7 +1430,9 @@ static ggml_backend_buffer_type_t llama_default_buffer_type_cpu(bool host_buffer buft = ggml_backend_cuda_host_buffer_type(); } #elif defined(GGML_USE_SYCL) - buft = ggml_backend_sycl_host_buffer_type(); + if (host_buffer) { + buft = ggml_backend_sycl_host_buffer_type(); + } #elif defined(GGML_USE_CPU_HBM) buft = ggml_backend_cpu_hbm_buffer_type(); #elif defined(GGML_USE_VULKAN) @@ -1483,6 +1486,12 @@ static ggml_backend_buffer_type_t llama_default_buffer_type_split(int fallback_g } #endif +#ifdef GGML_USE_SYCL + if (ggml_backend_sycl_get_device_count() > 1) { + buft = ggml_backend_sycl_split_buffer_type(tensor_split); + } +#endif + if (buft == nullptr) { buft = llama_default_buffer_type_offload(fallback_gpu); } @@ -1494,6 +1503,8 @@ static ggml_backend_buffer_type_t llama_default_buffer_type_split(int fallback_g static size_t llama_get_device_count() { #if defined(GGML_USE_CUBLAS) return ggml_backend_cuda_get_device_count(); +#elif defined(GGML_USE_SYCL) + return ggml_backend_sycl_get_device_count(); #elif defined(GGML_USE_VULKAN) return ggml_backend_vk_get_device_count(); #else @@ -1507,6 +1518,11 @@ static size_t llama_get_device_memory(int device) { size_t free; ggml_backend_cuda_get_device_memory(device, &total, &free); return free; +#elif defined(GGML_USE_SYCL) + size_t total; + size_t free; + ggml_backend_sycl_get_device_memory(device, &total, &free); + return free; #elif defined(GGML_USE_VULKAN) size_t total; size_t free; @@ -12075,13 +12091,31 @@ struct llama_context * llama_new_context_with_model( } #elif defined(GGML_USE_SYCL) if (model->n_gpu_layers > 0) { - ggml_backend_t backend = ggml_backend_sycl_init(model->main_gpu); - if (backend == nullptr) { - LLAMA_LOG_ERROR("%s: failed to initialize SYCL%d backend\n", __func__, model->main_gpu); - llama_free(ctx); - return nullptr; + // with split_mode LLAMA_SPLIT_MODE_NONE or LLAMA_SPLIT_MODE_ROW, only the main GPU backend is used + if (model->split_mode == LLAMA_SPLIT_MODE_NONE || model->split_mode == LLAMA_SPLIT_MODE_ROW) { + int main_gpu_index = ggml_backend_sycl_get_device_index(model->main_gpu); + ggml_backend_t backend = ggml_backend_sycl_init(main_gpu_index); + if (backend == nullptr) { + LLAMA_LOG_ERROR("%s: failed to initialize SYCL%d (index %d)backend\n", __func__, model->main_gpu, main_gpu_index); + llama_free(ctx); + return nullptr; + } + ctx->backends.push_back(backend); + } else { + // LLAMA_SPLIT_LAYER requires a backend for each GPU + int id_list[GGML_SYCL_MAX_DEVICES]; + ggml_sycl_get_gpu_list(id_list, GGML_SYCL_MAX_DEVICES); + for (int i = 0; i < ggml_backend_sycl_get_device_count(); ++i) { + int device_id = id_list[i]; + ggml_backend_t backend = ggml_backend_sycl_init(i); + if (backend == nullptr) { + LLAMA_LOG_ERROR("%s: failed to initialize SYCL%d (index %d)backend\n", __func__, device_id, i); + llama_free(ctx); + return nullptr; + } + ctx->backends.push_back(backend); + } } - ctx->backends.push_back(backend); } #elif defined(GGML_USE_KOMPUTE) if (model->n_gpu_layers > 0) { @@ -12161,7 +12195,6 @@ struct llama_context * llama_new_context_with_model( ggml_set_name(ctx->inp_cls, "inp_cls"); ctx->buf_input = ggml_backend_alloc_ctx_tensors_from_buft(ctx->ctx_input, llama_default_buffer_type_cpu(true)); - LLAMA_LOG_INFO("%s: %10s input buffer size = %8.2f MiB\n", __func__, ggml_backend_buffer_name(ctx->buf_input), ggml_backend_buffer_get_size(ctx->buf_input) / 1024.0 / 1024.0); From 802da0091ba646ecf02e1a8fae2da0b8e76409bd Mon Sep 17 00:00:00 2001 From: compilade <113953597+compilade@users.noreply.github.com> Date: Sat, 2 Mar 2024 08:42:56 -0500 Subject: [PATCH 758/811] llama : fix segfault from unknown model arch name (#5820) * llama : fix segfault from unknown model arch name * llama : make all LLM maps const This also requires using `std::map::at` instead of its `operator[]` which does not exist for const maps. * llama : name LLM_ARCH_UNKNOWN to "(unknown)" This avoids errors from `std::map::at` when getting the general name of the model architecture. Using "(unknown)" instead of an empty string as per suggestion https://github.com/ggerganov/llama.cpp/pull/5820#issuecomment-1973735284 * llama : remove redundant inner const for LLM_TENSOR_NAMES The extra const won't do anything here as const maps return const references to values. Co-authored-by: Jared Van Bortel * llama : remove redundant nullptr check in llm_arch_from_string Since LLM_ARCH_NAMES is a const map, no spurious elements with a NULL name are inserted anymore, so this check is dead code. --------- Co-authored-by: Jared Van Bortel --- llama.cpp | 31 ++++++++++++++++--------------- 1 file changed, 16 insertions(+), 15 deletions(-) diff --git a/llama.cpp b/llama.cpp index cb6266a43..790c2740f 100644 --- a/llama.cpp +++ b/llama.cpp @@ -216,7 +216,7 @@ enum llm_arch { LLM_ARCH_UNKNOWN, }; -static std::map LLM_ARCH_NAMES = { +static const std::map LLM_ARCH_NAMES = { { LLM_ARCH_LLAMA, "llama" }, { LLM_ARCH_FALCON, "falcon" }, { LLM_ARCH_GPT2, "gpt2" }, @@ -241,6 +241,7 @@ static std::map LLM_ARCH_NAMES = { { LLM_ARCH_MINICPM, "minicpm" }, { LLM_ARCH_GEMMA, "gemma" }, { LLM_ARCH_STARCODER2, "starcoder2" }, + { LLM_ARCH_UNKNOWN, "(unknown)" }, }; enum llm_kv { @@ -301,7 +302,7 @@ enum llm_kv { LLM_KV_TOKENIZER_RWKV, }; -static std::map LLM_KV_NAMES = { +static const std::map LLM_KV_NAMES = { { LLM_KV_GENERAL_ARCHITECTURE, "general.architecture" }, { LLM_KV_GENERAL_QUANTIZATION_VERSION, "general.quantization_version" }, { LLM_KV_GENERAL_ALIGNMENT, "general.alignment" }, @@ -365,7 +366,7 @@ struct LLM_KV { llm_arch arch; std::string operator()(llm_kv kv) const { - return ::format(LLM_KV_NAMES[kv], LLM_ARCH_NAMES[arch]); + return ::format(LLM_KV_NAMES.at(kv), LLM_ARCH_NAMES.at(arch)); } }; @@ -400,7 +401,7 @@ enum llm_tensor { LLM_TENSOR_LAYER_OUT_NORM, }; -static std::map> LLM_TENSOR_NAMES = { +static const std::map> LLM_TENSOR_NAMES = { { LLM_ARCH_LLAMA, { @@ -833,38 +834,38 @@ struct LLM_TN { llm_arch arch; std::string operator()(llm_tensor tensor) const { - if (LLM_TENSOR_NAMES[arch].find(tensor) == LLM_TENSOR_NAMES[arch].end()) { + if (LLM_TENSOR_NAMES.at(arch).find(tensor) == LLM_TENSOR_NAMES.at(arch).end()) { return "__missing__"; } - return LLM_TENSOR_NAMES[arch].at(tensor); + return LLM_TENSOR_NAMES.at(arch).at(tensor); } std::string operator()(llm_tensor tensor, const std::string & suffix) const { - if (LLM_TENSOR_NAMES[arch].find(tensor) == LLM_TENSOR_NAMES[arch].end()) { + if (LLM_TENSOR_NAMES.at(arch).find(tensor) == LLM_TENSOR_NAMES.at(arch).end()) { return "__missing__"; } - return LLM_TENSOR_NAMES[arch].at(tensor) + "." + suffix; + return LLM_TENSOR_NAMES.at(arch).at(tensor) + "." + suffix; } std::string operator()(llm_tensor tensor, int bid) const { - if (LLM_TENSOR_NAMES[arch].find(tensor) == LLM_TENSOR_NAMES[arch].end()) { + if (LLM_TENSOR_NAMES.at(arch).find(tensor) == LLM_TENSOR_NAMES.at(arch).end()) { return "__missing__"; } - return ::format(LLM_TENSOR_NAMES[arch].at(tensor).c_str(), bid); + return ::format(LLM_TENSOR_NAMES.at(arch).at(tensor).c_str(), bid); } std::string operator()(llm_tensor tensor, const std::string & suffix, int bid) const { - if (LLM_TENSOR_NAMES[arch].find(tensor) == LLM_TENSOR_NAMES[arch].end()) { + if (LLM_TENSOR_NAMES.at(arch).find(tensor) == LLM_TENSOR_NAMES.at(arch).end()) { return "__missing__"; } - return ::format(LLM_TENSOR_NAMES[arch].at(tensor).c_str(), bid) + "." + suffix; + return ::format(LLM_TENSOR_NAMES.at(arch).at(tensor).c_str(), bid) + "." + suffix; } std::string operator()(llm_tensor tensor, const std::string & suffix, int bid, int xid) const { - if (LLM_TENSOR_NAMES[arch].find(tensor) == LLM_TENSOR_NAMES[arch].end()) { + if (LLM_TENSOR_NAMES.at(arch).find(tensor) == LLM_TENSOR_NAMES.at(arch).end()) { return "__missing__"; } - return ::format(LLM_TENSOR_NAMES[arch].at(tensor).c_str(), bid, xid) + "." + suffix; + return ::format(LLM_TENSOR_NAMES.at(arch).at(tensor).c_str(), bid, xid) + "." + suffix; } }; @@ -872,7 +873,7 @@ struct LLM_TN { // gguf helpers // -static std::map LLAMA_ROPE_SCALING_TYPES = { +static const std::map LLAMA_ROPE_SCALING_TYPES = { { LLAMA_ROPE_SCALING_TYPE_NONE, "none" }, { LLAMA_ROPE_SCALING_TYPE_LINEAR, "linear" }, { LLAMA_ROPE_SCALING_TYPE_YARN, "yarn" }, From 6c32d8c7ad8ba7b6ad2a162e929a21dd04fcdca0 Mon Sep 17 00:00:00 2001 From: Xuan Son Nguyen Date: Sat, 2 Mar 2024 15:19:09 +0100 Subject: [PATCH 759/811] llama : refactor internal quantization functions (#5830) --- llama.cpp | 81 +++++++++++++++++++++++++++++-------------------------- 1 file changed, 43 insertions(+), 38 deletions(-) diff --git a/llama.cpp b/llama.cpp index 790c2740f..697e85e89 100644 --- a/llama.cpp +++ b/llama.cpp @@ -10836,7 +10836,7 @@ struct quantize_state_internal { {} }; -static void llama_convert_tensor_internal( +static void llama_tensor_dequantize_internal( struct ggml_tensor * tensor, std::vector> & output, std::vector & workers, const size_t nelements, const int nthread ) { @@ -11177,6 +11177,46 @@ static ggml_type get_k_quant_type(quantize_state_internal & qs, ggml_type new_ty return new_type; } +static int32_t llama_tensor_quantize_internal(enum ggml_type new_type, const float * f32_data, void * new_data, const int chunk_size, int nrows, int n_per_row, int64_t * hist_cur, const float * imatrix, std::vector & workers, const int nthread) { + std::mutex mutex; + int counter = 0; + size_t new_size = 0; + if (nthread < 2) { + // single-thread + return ggml_quantize_chunk(new_type, f32_data, new_data, 0, nrows, n_per_row, hist_cur, imatrix); + } + auto compute = [&mutex, &counter, &hist_cur, &new_size, new_type, f32_data, new_data, chunk_size, + nrows, n_per_row, imatrix]() { + std::array local_hist = {}; + const int nrows_per_chunk = chunk_size / n_per_row; + size_t local_size = 0; + while (true) { + std::unique_lock lock(mutex); + int first_row = counter; counter += nrows_per_chunk; + if (first_row >= nrows) { + if (local_size > 0) { + for (int j=0; jftype; @@ -11289,7 +11329,6 @@ static void llama_model_quantize_internal(const std::string & fname_inp, const s std::vector workers; workers.reserve(nthread); - std::mutex mutex; int idx = 0; @@ -11403,7 +11442,7 @@ static void llama_model_quantize_internal(const std::string & fname_inp, const s } else if (ggml_is_quantized(tensor->type) && !params->allow_requantize) { throw std::runtime_error(format("requantizing from type %s is disabled", ggml_type_name(tensor->type))); } else { - llama_convert_tensor_internal(tensor, f32_conv_buf, workers, nelements, nthread); + llama_tensor_dequantize_internal(tensor, f32_conv_buf, workers, nelements, nthread); f32_data = (float *) f32_conv_buf.data(); } @@ -11424,41 +11463,7 @@ static void llama_model_quantize_internal(const std::string & fname_inp, const s const int nchunk = (nelements + chunk_size - 1)/chunk_size; const int nthread_use = nthread > 1 ? std::max(1, std::min(nthread, nchunk)) : 1; - if (nthread_use < 2) { - new_size = ggml_quantize_chunk(new_type, f32_data, new_data, 0, nrows, n_per_row, hist_cur.data(), imatrix); - } else { - int counter = 0; - new_size = 0; - auto compute = [&mutex, &counter, &hist_cur, &new_size, new_type, f32_data, new_data, chunk_size, - nrows, n_per_row, imatrix]() { - std::array local_hist = {}; - const int nrows_per_chunk = chunk_size / n_per_row; - size_t local_size = 0; - while (true) { - std::unique_lock lock(mutex); - int first_row = counter; counter += nrows_per_chunk; - if (first_row >= nrows) { - if (local_size > 0) { - for (int j=0; j %8.2f MiB", ggml_nbytes(tensor)/1024.0/1024.0, new_size/1024.0/1024.0); int64_t tot_count = 0; From ef2cd694c4155fbf25bae61c5178c47eb3676dba Mon Sep 17 00:00:00 2001 From: Georgi Gerganov Date: Sat, 2 Mar 2024 16:54:08 +0200 Subject: [PATCH 760/811] scripts : add pod-llama.sh --- scripts/pod-llama.sh | 213 +++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 213 insertions(+) create mode 100644 scripts/pod-llama.sh diff --git a/scripts/pod-llama.sh b/scripts/pod-llama.sh new file mode 100644 index 000000000..6cf1ab4f3 --- /dev/null +++ b/scripts/pod-llama.sh @@ -0,0 +1,213 @@ +#!/bin/bash +# +# Use this script only on fresh pods (runpod.io)! +# Otherwise, it can break your environment! +# + +if [ -z "$1" ]; then + echo "Usage: $0 " + echo " 0: no models" + echo " 1: tinyllama-1b" + echo " 2: codellama-7b" + echo " 3: codellama-13b" + echo " 4: codellama-34b" + echo " 5: codellama-7b-instruct" + echo " 6: codellama-13b-instruct" + echo " 7: codellama-34b-instruct" + + exit 1 +fi + +set -x + +# setup deps +apt-get update +apt-get install -y git-lfs cmake cmake-curses-gui vim ruby +git-lfs install + +if [ ! -d "/workspace" ]; then + ln -sfn $(pwd) /workspace +fi + +# download data +cd /workspace + +# this is useful to git clone repos without doubling the disk size due to .git +git clone https://github.com/iboB/git-lfs-download +ln -sfn /workspace/git-lfs-download/git-lfs-download /usr/local/bin/git-lfs-download + +# llama.cpp +cd /workspace +git clone https://github.com/ggerganov/llama.cpp + +cd llama.cpp + +LLAMA_CUBLAS=1 make -j + +ln -sfn /workspace/TinyLlama-1.1B-Chat-v0.3 ./models/tinyllama-1b +ln -sfn /workspace/CodeLlama-7b-hf ./models/codellama-7b +ln -sfn /workspace/CodeLlama-13b-hf ./models/codellama-13b +ln -sfn /workspace/CodeLlama-34b-hf ./models/codellama-34b +ln -sfn /workspace/CodeLlama-7b-Instruct-hf ./models/codellama-7b-instruct +ln -sfn /workspace/CodeLlama-13b-Instruct-hf ./models/codellama-13b-instruct +ln -sfn /workspace/CodeLlama-34b-Instruct-hf ./models/codellama-34b-instruct + +pip install -r requirements.txt + +# cmake +cd /workspace/llama.cpp + +mkdir build-cublas +cd build-cublas + +cmake -DLLAMA_CUBLAS=1 ../ +make -j + +if [ "$1" -eq "0" ]; then + exit 0 +fi + +# more models +if [ "$1" -eq "1" ]; then + cd /workspace + + git-lfs-download https://huggingface.co/PY007/TinyLlama-1.1B-Chat-v0.3 + + cd /workspace/llama.cpp + + python3 convert.py ./models/tinyllama-1b --outfile ./models/tinyllama-1b/ggml-model-f16.gguf --outtype f16 + + ./quantize ./models/tinyllama-1b/ggml-model-f16.gguf ./models/tinyllama-1b/ggml-model-q4_0.gguf q4_0 + ./quantize ./models/tinyllama-1b/ggml-model-f16.gguf ./models/tinyllama-1b/ggml-model-q4_k.gguf q4_k + ./quantize ./models/tinyllama-1b/ggml-model-f16.gguf ./models/tinyllama-1b/ggml-model-q8_0.gguf q8_0 +fi + +if [ "$1" -eq "2" ]; then + cd /workspace + + git-lfs-download https://huggingface.co/codellama/CodeLlama-7b-hf --without *safetensors* + rm -v ./CodeLlama-7b-hf/*safetensors* + + cd /workspace/llama.cpp + + python3 convert.py ./models/codellama-7b --outfile ./models/codellama-7b/ggml-model-f16.gguf --outtype f16 + + ./quantize ./models/codellama-7b/ggml-model-f16.gguf ./models/codellama-7b/ggml-model-q4_0.gguf q4_0 + ./quantize ./models/codellama-7b/ggml-model-f16.gguf ./models/codellama-7b/ggml-model-q4_k.gguf q4_k + ./quantize ./models/codellama-7b/ggml-model-f16.gguf ./models/codellama-7b/ggml-model-q8_0.gguf q8_0 +fi + +if [ "$1" -eq "3" ]; then + cd /workspace + + git-lfs-download https://huggingface.co/codellama/CodeLlama-13b-hf --without *safetensors* + rm -v ./CodeLlama-13b-hf/*safetensors* + + cd /workspace/llama.cpp + + python3 convert.py ./models/codellama-13b --outfile ./models/codellama-13b/ggml-model-f16.gguf --outtype f16 + + ./quantize ./models/codellama-13b/ggml-model-f16.gguf ./models/codellama-13b/ggml-model-q4_0.gguf q4_0 + ./quantize ./models/codellama-13b/ggml-model-f16.gguf ./models/codellama-13b/ggml-model-q4_k.gguf q4_k + ./quantize ./models/codellama-13b/ggml-model-f16.gguf ./models/codellama-13b/ggml-model-q8_0.gguf q8_0 +fi + +if [ "$1" -eq "4" ]; then + cd /workspace + + git-lfs-download https://huggingface.co/codellama/CodeLlama-34b-hf --without *safetensors* + rm -v ./CodeLlama-34b-hf/*safetensors* + + cd /workspace/llama.cpp + + python3 convert.py ./models/codellama-34b --outfile ./models/codellama-34b/ggml-model-f16.gguf --outtype f16 + + ./quantize ./models/codellama-34b/ggml-model-f16.gguf ./models/codellama-34b/ggml-model-q4_0.gguf q4_0 + ./quantize ./models/codellama-34b/ggml-model-f16.gguf ./models/codellama-34b/ggml-model-q4_k.gguf q4_k + ./quantize ./models/codellama-34b/ggml-model-f16.gguf ./models/codellama-34b/ggml-model-q8_0.gguf q8_0 +fi + +if [ "$1" -eq "5" ]; then + cd /workspace + + git-lfs-download https://huggingface.co/codellama/CodeLlama-7b-Instruct-hf --without *safetensors* + rm -v ./CodeLlama-7b-Instruct-hf/*safetensors* + + cd /workspace/llama.cpp + + python3 convert.py ./models/codellama-7b-instruct --outfile ./models/codellama-7b-instruct/ggml-model-f16.gguf --outtype f16 + + ./quantize ./models/codellama-7b-instruct/ggml-model-f16.gguf ./models/codellama-7b-instruct/ggml-model-q4_0.gguf q4_0 + ./quantize ./models/codellama-7b-instruct/ggml-model-f16.gguf ./models/codellama-7b-instruct/ggml-model-q4_k.gguf q4_k + ./quantize ./models/codellama-7b-instruct/ggml-model-f16.gguf ./models/codellama-7b-instruct/ggml-model-q8_0.gguf q8_0 +fi + +if [ "$1" -eq "6" ]; then + cd /workspace + + git-lfs-download https://huggingface.co/codellama/CodeLlama-13b-Instruct-hf --without *safetensors* + rm -v ./CodeLlama-13b-Instruct-hf/*safetensors* + + cd /workspace/llama.cpp + + python3 convert.py ./models/codellama-13b-instruct --outfile ./models/codellama-13b-instruct/ggml-model-f16.gguf --outtype f16 + + ./quantize ./models/codellama-13b-instruct/ggml-model-f16.gguf ./models/codellama-13b-instruct/ggml-model-q4_0.gguf q4_0 + ./quantize ./models/codellama-13b-instruct/ggml-model-f16.gguf ./models/codellama-13b-instruct/ggml-model-q4_k.gguf q4_k + ./quantize ./models/codellama-13b-instruct/ggml-model-f16.gguf ./models/codellama-13b-instruct/ggml-model-q8_0.gguf q8_0 +fi + +if [ "$1" -eq "7" ]; then + cd /workspace + + git-lfs-download https://huggingface.co/codellama/CodeLlama-34b-Instruct-hf --without *safetensors* + rm -v ./CodeLlama-34b-Instruct-hf/*safetensors* + + cd /workspace/llama.cpp + + python3 convert.py ./models/codellama-34b-instruct --outfile ./models/codellama-34b-instruct/ggml-model-f16.gguf --outtype f16 + + ./quantize ./models/codellama-34b-instruct/ggml-model-f16.gguf ./models/codellama-34b-instruct/ggml-model-q4_0.gguf q4_0 + ./quantize ./models/codellama-34b-instruct/ggml-model-f16.gguf ./models/codellama-34b-instruct/ggml-model-q4_k.gguf q4_k + ./quantize ./models/codellama-34b-instruct/ggml-model-f16.gguf ./models/codellama-34b-instruct/ggml-model-q8_0.gguf q8_0 +fi + +if [ "$1" -eq "1" ]; then + # perf + perplexity + cd /workspace/llama.cpp/build-cublas + + make -j && ../scripts/run-all-perf.sh tinyllama-1b "f16" "-ngl 99 -t 1 -p 1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,32,64,128,256,512,1024,2048 -n 128" + + ../scripts/get-wikitext-2.sh + unzip wikitext-2-raw-v1.zip + + make -j && ./bin/perplexity -m ../models/tinyllama-1b/ggml-model-f16.gguf -f ./wikitext-2-raw/wiki.test.raw -ngl 100 --chunks 32 + + # batched + cd /workspace/llama.cpp + + LLAMA_CUBLAS=1 make -j && ./batched ./models/tinyllama-1b/ggml-model-f16.gguf "Hello, my name is" 8 128 999 + + # batched-bench + cd /workspace/llama.cpp + + LLAMA_CUBLAS=1 make -j && ./batched-bench ./models/tinyllama-1b/ggml-model-f16.gguf 4608 1 99 0 512 128 1,2,3,4,5,6,7,8,16,32 + + # parallel + cd /workspace/llama.cpp + + LLAMA_CUBLAS=1 make -j && ./parallel -m ./models/tinyllama-1b/ggml-model-f16.gguf -t 1 -ngl 100 -c 4096 -b 512 -s 1 -np 8 -ns 128 -n 100 -cb + +fi + +# speculative +#if [ "$1" -eq "7" ]; then +# cd /workspace/llama.cpp +# +# LLAMA_CUBLAS=1 make -j && ./speculative -m ./models/codellama-34b-instruct/ggml-model-f16.gguf -md ./models/codellama-7b-instruct/ggml-model-q4_0.gguf -p "# Dijkstra's shortest path algorithm in Python (4 spaces indentation) + complexity analysis:\n\n" -e -ngl 999 -ngld 999 -t 4 -n 512 -c 4096 -s 21 --draft 16 -np 1 --temp 0.0 +#fi + +# more benches +#LLAMA_CUBLAS=1 make -j && ./batched-bench ./models/codellama-7b/ggml-model-q4_k.gguf 4096 1 99 1 512,3200 128,128,800 1 +#LLAMA_CUBLAS=1 make -j && ./batched-bench ./models/codellama-13b/ggml-model-q4_k.gguf 4096 1 99 1 512,3200 128,128,800 1 + From bbde6eb2561153aabbdfac5001c690fe00cad639 Mon Sep 17 00:00:00 2001 From: Kawrakow <48489457+ikawrakow@users.noreply.github.com> Date: Sat, 2 Mar 2024 17:00:51 +0200 Subject: [PATCH 761/811] ggml : IQ3_S improvements (#5829) * iq3_s: somewhat faster AVX2 dot product On Ryzen a 7950X TG-128 increases to 16 t/s from 15.5 t/s using 16 threads. For 8 threads it is 13.85 t/s vs 11.75 t/s. PP-512 increases to 28.5 t/s from 23.8 t/s. * iq3_s: somewhat faster ARM_NEON dot product Still dog slow - 10.7 t/s up from 9.9 t/s. * iq3_s: another small ARM_NEON improvement 10.7 -> 11.0 t/s. Using vmulq_s8 is faster than the xor - sub trick that works best on AVX2. * iq3_s: minor improvement on Metal 49.4 t/s -> 50.3 t/s * iq3_s: PPL improvement E.g., for a context of 4096 LLaMA-v2-7B goes to 5.1340 from 5.1653. * iq3_s: use new grid everywhere * Fix ARM_NEON --------- Co-authored-by: Iwan Kawrakow --- ggml-cuda.cu | 143 ++++++++++++------------ ggml-metal.metal | 152 ++++++++++++------------- ggml-quants.c | 280 +++++++++++++++++++++++++++-------------------- 3 files changed, 310 insertions(+), 265 deletions(-) diff --git a/ggml-cuda.cu b/ggml-cuda.cu index 0c6501e98..7ed97430f 100644 --- a/ggml-cuda.cu +++ b/ggml-cuda.cu @@ -2018,74 +2018,73 @@ static const __device__ uint32_t iq3xxs_grid[256] = { 0x3e1c1c1c, 0x3e1c3404, 0x3e24140c, 0x3e24240c, 0x3e2c0404, 0x3e2c0414, 0x3e2c1424, 0x3e341c04, }; -static const __device__ uint32_t iq3xs_grid[512] = { - 0x04040404, 0x0404040c, 0x04040414, 0x0404042c, 0x0404043e, 0x04040c04, 0x04040c0c, 0x04040c14, - 0x04040c24, 0x04040c34, 0x04041404, 0x0404140c, 0x0404142c, 0x04041c1c, 0x04042404, 0x04042414, - 0x0404242c, 0x0404243e, 0x04042c0c, 0x04042c1c, 0x04043404, 0x04043414, 0x04043e0c, 0x04043e24, - 0x04043e3e, 0x040c0404, 0x040c040c, 0x040c0414, 0x040c0424, 0x040c0c04, 0x040c0c0c, 0x040c0c2c, - 0x040c1404, 0x040c141c, 0x040c143e, 0x040c1c0c, 0x040c1c2c, 0x040c2424, 0x040c340c, 0x040c342c, - 0x040c3e14, 0x04140404, 0x0414040c, 0x0414042c, 0x0414043e, 0x04140c04, 0x04140c1c, 0x04140c34, - 0x0414140c, 0x0414142c, 0x04141c04, 0x04141c24, 0x04142414, 0x0414242c, 0x0414243e, 0x04142c0c, - 0x04142c1c, 0x04143e04, 0x04143e1c, 0x041c041c, 0x041c0c0c, 0x041c0c2c, 0x041c1404, 0x041c1414, - 0x041c1c0c, 0x041c1c1c, 0x041c1c34, 0x041c2424, 0x041c2c04, 0x041c2c14, 0x041c343e, 0x041c3e0c, - 0x041c3e2c, 0x04240404, 0x04240c1c, 0x04240c3e, 0x0424140c, 0x04241424, 0x04241c14, 0x04242404, - 0x0424241c, 0x04242c0c, 0x04243e04, 0x042c0414, 0x042c0424, 0x042c1404, 0x042c1414, 0x042c1434, - 0x042c1c1c, 0x042c240c, 0x042c242c, 0x042c243e, 0x042c3434, 0x042c3e1c, 0x04340434, 0x04340c0c, - 0x04340c1c, 0x04341c0c, 0x04342c14, 0x04343e0c, 0x043e0404, 0x043e0414, 0x043e0424, 0x043e1404, - 0x043e1414, 0x043e1434, 0x043e1c1c, 0x043e2c04, 0x043e2c24, 0x0c040404, 0x0c04040c, 0x0c040414, - 0x0c040424, 0x0c040c04, 0x0c040c0c, 0x0c040c1c, 0x0c040c2c, 0x0c040c3e, 0x0c041404, 0x0c041414, - 0x0c041c0c, 0x0c041c24, 0x0c041c34, 0x0c042c24, 0x0c042c34, 0x0c04340c, 0x0c043e14, 0x0c0c0404, - 0x0c0c040c, 0x0c0c041c, 0x0c0c0434, 0x0c0c0c04, 0x0c0c0c24, 0x0c0c140c, 0x0c0c1c04, 0x0c0c1c1c, - 0x0c0c240c, 0x0c0c2c04, 0x0c0c2c14, 0x0c0c3e04, 0x0c0c3e34, 0x0c140404, 0x0c140c14, 0x0c140c2c, - 0x0c140c3e, 0x0c141404, 0x0c141424, 0x0c141c14, 0x0c142404, 0x0c14241c, 0x0c142c2c, 0x0c143404, - 0x0c143e14, 0x0c1c040c, 0x0c1c0424, 0x0c1c043e, 0x0c1c0c04, 0x0c1c0c1c, 0x0c1c140c, 0x0c1c143e, - 0x0c1c1c04, 0x0c1c1c24, 0x0c1c240c, 0x0c1c3414, 0x0c1c3e04, 0x0c24041c, 0x0c24042c, 0x0c240c14, - 0x0c240c24, 0x0c241c0c, 0x0c241c1c, 0x0c242414, 0x0c242434, 0x0c242c04, 0x0c242c24, 0x0c2c040c, - 0x0c2c0c04, 0x0c2c0c1c, 0x0c2c140c, 0x0c2c1c04, 0x0c2c1c14, 0x0c2c2c0c, 0x0c341404, 0x0c341424, - 0x0c34143e, 0x0c342424, 0x0c342434, 0x0c3e040c, 0x0c3e041c, 0x0c3e0c04, 0x0c3e0c14, 0x0c3e140c, - 0x0c3e1c2c, 0x0c3e240c, 0x0c3e3414, 0x0c3e3e04, 0x14040404, 0x1404040c, 0x1404041c, 0x1404042c, - 0x1404043e, 0x14040c04, 0x14040c14, 0x14040c24, 0x14040c34, 0x1404140c, 0x1404141c, 0x1404143e, - 0x14041c04, 0x14041c14, 0x1404240c, 0x1404241c, 0x1404242c, 0x14042c04, 0x14042c14, 0x1404343e, - 0x14043e04, 0x14043e1c, 0x14043e2c, 0x140c0404, 0x140c0414, 0x140c0c04, 0x140c0c1c, 0x140c0c3e, - 0x140c1414, 0x140c142c, 0x140c1c0c, 0x140c1c24, 0x140c2414, 0x140c2c0c, 0x1414040c, 0x14140424, - 0x1414043e, 0x1414140c, 0x1414141c, 0x14141c04, 0x14141c3e, 0x1414240c, 0x14142c1c, 0x14142c3e, - 0x14143e0c, 0x14143e24, 0x141c0404, 0x141c0414, 0x141c042c, 0x141c0c0c, 0x141c1414, 0x141c1424, - 0x141c1c0c, 0x141c1c1c, 0x141c2414, 0x141c2c04, 0x141c3434, 0x1424040c, 0x1424043e, 0x14241404, - 0x1424141c, 0x14241c14, 0x14241c2c, 0x1424240c, 0x14243e14, 0x14243e2c, 0x142c0424, 0x142c0c0c, - 0x142c1414, 0x142c1c3e, 0x142c2404, 0x142c2c1c, 0x142c3e04, 0x14340404, 0x14340414, 0x1434043e, - 0x1434140c, 0x14342c2c, 0x1434340c, 0x143e042c, 0x143e0c0c, 0x143e1434, 0x143e1c04, 0x143e241c, - 0x143e2c04, 0x1c040414, 0x1c040c0c, 0x1c040c1c, 0x1c040c2c, 0x1c040c3e, 0x1c041414, 0x1c041c0c, - 0x1c041c1c, 0x1c041c2c, 0x1c042414, 0x1c042424, 0x1c04243e, 0x1c042c0c, 0x1c04341c, 0x1c043e0c, - 0x1c0c040c, 0x1c0c041c, 0x1c0c042c, 0x1c0c0c24, 0x1c0c140c, 0x1c0c141c, 0x1c0c2404, 0x1c0c3404, - 0x1c0c3e14, 0x1c0c3e34, 0x1c140404, 0x1c140c14, 0x1c141404, 0x1c141c14, 0x1c141c24, 0x1c142c04, - 0x1c1c040c, 0x1c1c0c04, 0x1c1c0c24, 0x1c1c140c, 0x1c1c141c, 0x1c1c143e, 0x1c1c1c04, 0x1c1c240c, - 0x1c1c241c, 0x1c1c243e, 0x1c1c2c2c, 0x1c1c3e1c, 0x1c24041c, 0x1c240c0c, 0x1c240c34, 0x1c241414, - 0x1c241c0c, 0x1c242c14, 0x1c243404, 0x1c243424, 0x1c2c040c, 0x1c2c0c04, 0x1c2c0c14, 0x1c2c142c, - 0x1c2c1c14, 0x1c2c2424, 0x1c2c2c34, 0x1c2c3e1c, 0x1c340c34, 0x1c34240c, 0x1c3e040c, 0x1c3e041c, - 0x1c3e1404, 0x1c3e1414, 0x1c3e1c2c, 0x24040404, 0x24040424, 0x24040c14, 0x24041404, 0x24041424, - 0x2404143e, 0x24041c14, 0x2404240c, 0x24042c04, 0x24043e04, 0x240c0414, 0x240c043e, 0x240c0c0c, - 0x240c0c1c, 0x240c1414, 0x240c1c04, 0x240c1c2c, 0x240c241c, 0x240c2c0c, 0x240c2c2c, 0x2414040c, - 0x2414041c, 0x24140c04, 0x24140c2c, 0x2414140c, 0x24141c1c, 0x24142404, 0x24142c3e, 0x24143414, - 0x24143e04, 0x241c0424, 0x241c0c0c, 0x241c0c1c, 0x241c1404, 0x241c1414, 0x241c1c0c, 0x241c1c2c, - 0x24240404, 0x24240414, 0x24241424, 0x24241c3e, 0x24242404, 0x24243e0c, 0x242c042c, 0x242c043e, - 0x242c140c, 0x242c3414, 0x24340c1c, 0x24341c24, 0x24343404, 0x243e0c04, 0x243e0c2c, 0x243e1c04, - 0x243e241c, 0x243e2c0c, 0x2c040414, 0x2c040c04, 0x2c040c24, 0x2c041414, 0x2c042404, 0x2c042424, - 0x2c04243e, 0x2c042c14, 0x2c043434, 0x2c043e24, 0x2c0c040c, 0x2c0c041c, 0x2c0c042c, 0x2c0c0c14, - 0x2c0c140c, 0x2c0c1c14, 0x2c0c3e14, 0x2c140404, 0x2c140c0c, 0x2c14141c, 0x2c141c04, 0x2c141c34, - 0x2c142c1c, 0x2c1c0414, 0x2c1c043e, 0x2c1c0c04, 0x2c1c143e, 0x2c1c2424, 0x2c1c2c0c, 0x2c1c342c, - 0x2c1c3e1c, 0x2c24040c, 0x2c240424, 0x2c241404, 0x2c241c14, 0x2c242434, 0x2c2c0c14, 0x2c2c1434, - 0x2c2c2c0c, 0x2c2c2c1c, 0x2c342414, 0x2c3e0414, 0x2c3e0424, 0x2c3e1414, 0x34040c0c, 0x34040c1c, - 0x34040c2c, 0x34041c0c, 0x34041c1c, 0x34043404, 0x340c0404, 0x340c1404, 0x340c143e, 0x340c3424, - 0x34140c14, 0x34141c24, 0x34142414, 0x34142c2c, 0x34143414, 0x34143e04, 0x341c0404, 0x341c0c24, - 0x341c140c, 0x341c2404, 0x3424142c, 0x3424241c, 0x34243414, 0x342c0404, 0x342c041c, 0x342c1c24, - 0x342c3404, 0x3434042c, 0x34342404, 0x343e0c0c, 0x343e0c1c, 0x3e040404, 0x3e040424, 0x3e04043e, - 0x3e041404, 0x3e041414, 0x3e041c34, 0x3e042404, 0x3e042c24, 0x3e043414, 0x3e0c0414, 0x3e0c0c0c, - 0x3e0c1424, 0x3e0c241c, 0x3e0c242c, 0x3e14040c, 0x3e140424, 0x3e140c04, 0x3e140c34, 0x3e14140c, - 0x3e141c04, 0x3e142c0c, 0x3e1c0414, 0x3e1c1c14, 0x3e1c1c2c, 0x3e1c2c1c, 0x3e24040c, 0x3e24042c, - 0x3e240c1c, 0x3e241404, 0x3e242c04, 0x3e2c1414, 0x3e2c2414, 0x3e340414, 0x3e341c0c, 0x3e3e0404, +static const __device__ uint32_t iq3s_grid[512] = { + 0x01010101, 0x01010103, 0x01010105, 0x0101010b, 0x0101010f, 0x01010301, 0x01010303, 0x01010305, + 0x01010309, 0x0101030d, 0x01010501, 0x01010503, 0x0101050b, 0x01010707, 0x01010901, 0x01010905, + 0x0101090b, 0x0101090f, 0x01010b03, 0x01010b07, 0x01010d01, 0x01010d05, 0x01010f03, 0x01010f09, + 0x01010f0f, 0x01030101, 0x01030103, 0x01030105, 0x01030109, 0x01030301, 0x01030303, 0x0103030b, + 0x01030501, 0x01030507, 0x0103050f, 0x01030703, 0x0103070b, 0x01030909, 0x01030d03, 0x01030d0b, + 0x01030f05, 0x01050101, 0x01050103, 0x0105010b, 0x0105010f, 0x01050301, 0x01050307, 0x0105030d, + 0x01050503, 0x0105050b, 0x01050701, 0x01050709, 0x01050905, 0x0105090b, 0x0105090f, 0x01050b03, + 0x01050b07, 0x01050f01, 0x01050f07, 0x01070107, 0x01070303, 0x0107030b, 0x01070501, 0x01070505, + 0x01070703, 0x01070707, 0x0107070d, 0x01070909, 0x01070b01, 0x01070b05, 0x01070d0f, 0x01070f03, + 0x01070f0b, 0x01090101, 0x01090307, 0x0109030f, 0x01090503, 0x01090509, 0x01090705, 0x01090901, + 0x01090907, 0x01090b03, 0x01090f01, 0x010b0105, 0x010b0109, 0x010b0501, 0x010b0505, 0x010b050d, + 0x010b0707, 0x010b0903, 0x010b090b, 0x010b090f, 0x010b0d0d, 0x010b0f07, 0x010d010d, 0x010d0303, + 0x010d0307, 0x010d0703, 0x010d0b05, 0x010d0f03, 0x010f0101, 0x010f0105, 0x010f0109, 0x010f0501, + 0x010f0505, 0x010f050d, 0x010f0707, 0x010f0b01, 0x010f0b09, 0x03010101, 0x03010103, 0x03010105, + 0x03010109, 0x03010301, 0x03010303, 0x03010307, 0x0301030b, 0x0301030f, 0x03010501, 0x03010505, + 0x03010703, 0x03010709, 0x0301070d, 0x03010b09, 0x03010b0d, 0x03010d03, 0x03010f05, 0x03030101, + 0x03030103, 0x03030107, 0x0303010d, 0x03030301, 0x03030309, 0x03030503, 0x03030701, 0x03030707, + 0x03030903, 0x03030b01, 0x03030b05, 0x03030f01, 0x03030f0d, 0x03050101, 0x03050305, 0x0305030b, + 0x0305030f, 0x03050501, 0x03050509, 0x03050705, 0x03050901, 0x03050907, 0x03050b0b, 0x03050d01, + 0x03050f05, 0x03070103, 0x03070109, 0x0307010f, 0x03070301, 0x03070307, 0x03070503, 0x0307050f, + 0x03070701, 0x03070709, 0x03070903, 0x03070d05, 0x03070f01, 0x03090107, 0x0309010b, 0x03090305, + 0x03090309, 0x03090703, 0x03090707, 0x03090905, 0x0309090d, 0x03090b01, 0x03090b09, 0x030b0103, + 0x030b0301, 0x030b0307, 0x030b0503, 0x030b0701, 0x030b0705, 0x030b0b03, 0x030d0501, 0x030d0509, + 0x030d050f, 0x030d0909, 0x030d090d, 0x030f0103, 0x030f0107, 0x030f0301, 0x030f0305, 0x030f0503, + 0x030f070b, 0x030f0903, 0x030f0d05, 0x030f0f01, 0x05010101, 0x05010103, 0x05010107, 0x0501010b, + 0x0501010f, 0x05010301, 0x05010305, 0x05010309, 0x0501030d, 0x05010503, 0x05010507, 0x0501050f, + 0x05010701, 0x05010705, 0x05010903, 0x05010907, 0x0501090b, 0x05010b01, 0x05010b05, 0x05010d0f, + 0x05010f01, 0x05010f07, 0x05010f0b, 0x05030101, 0x05030105, 0x05030301, 0x05030307, 0x0503030f, + 0x05030505, 0x0503050b, 0x05030703, 0x05030709, 0x05030905, 0x05030b03, 0x05050103, 0x05050109, + 0x0505010f, 0x05050503, 0x05050507, 0x05050701, 0x0505070f, 0x05050903, 0x05050b07, 0x05050b0f, + 0x05050f03, 0x05050f09, 0x05070101, 0x05070105, 0x0507010b, 0x05070303, 0x05070505, 0x05070509, + 0x05070703, 0x05070707, 0x05070905, 0x05070b01, 0x05070d0d, 0x05090103, 0x0509010f, 0x05090501, + 0x05090507, 0x05090705, 0x0509070b, 0x05090903, 0x05090f05, 0x05090f0b, 0x050b0109, 0x050b0303, + 0x050b0505, 0x050b070f, 0x050b0901, 0x050b0b07, 0x050b0f01, 0x050d0101, 0x050d0105, 0x050d010f, + 0x050d0503, 0x050d0b0b, 0x050d0d03, 0x050f010b, 0x050f0303, 0x050f050d, 0x050f0701, 0x050f0907, + 0x050f0b01, 0x07010105, 0x07010303, 0x07010307, 0x0701030b, 0x0701030f, 0x07010505, 0x07010703, + 0x07010707, 0x0701070b, 0x07010905, 0x07010909, 0x0701090f, 0x07010b03, 0x07010d07, 0x07010f03, + 0x07030103, 0x07030107, 0x0703010b, 0x07030309, 0x07030503, 0x07030507, 0x07030901, 0x07030d01, + 0x07030f05, 0x07030f0d, 0x07050101, 0x07050305, 0x07050501, 0x07050705, 0x07050709, 0x07050b01, + 0x07070103, 0x07070301, 0x07070309, 0x07070503, 0x07070507, 0x0707050f, 0x07070701, 0x07070903, + 0x07070907, 0x0707090f, 0x07070b0b, 0x07070f07, 0x07090107, 0x07090303, 0x0709030d, 0x07090505, + 0x07090703, 0x07090b05, 0x07090d01, 0x07090d09, 0x070b0103, 0x070b0301, 0x070b0305, 0x070b050b, + 0x070b0705, 0x070b0909, 0x070b0b0d, 0x070b0f07, 0x070d030d, 0x070d0903, 0x070f0103, 0x070f0107, + 0x070f0501, 0x070f0505, 0x070f070b, 0x09010101, 0x09010109, 0x09010305, 0x09010501, 0x09010509, + 0x0901050f, 0x09010705, 0x09010903, 0x09010b01, 0x09010f01, 0x09030105, 0x0903010f, 0x09030303, + 0x09030307, 0x09030505, 0x09030701, 0x0903070b, 0x09030907, 0x09030b03, 0x09030b0b, 0x09050103, + 0x09050107, 0x09050301, 0x0905030b, 0x09050503, 0x09050707, 0x09050901, 0x09050b0f, 0x09050d05, + 0x09050f01, 0x09070109, 0x09070303, 0x09070307, 0x09070501, 0x09070505, 0x09070703, 0x0907070b, + 0x09090101, 0x09090105, 0x09090509, 0x0909070f, 0x09090901, 0x09090f03, 0x090b010b, 0x090b010f, + 0x090b0503, 0x090b0d05, 0x090d0307, 0x090d0709, 0x090d0d01, 0x090f0301, 0x090f030b, 0x090f0701, + 0x090f0907, 0x090f0b03, 0x0b010105, 0x0b010301, 0x0b010309, 0x0b010505, 0x0b010901, 0x0b010909, + 0x0b01090f, 0x0b010b05, 0x0b010d0d, 0x0b010f09, 0x0b030103, 0x0b030107, 0x0b03010b, 0x0b030305, + 0x0b030503, 0x0b030705, 0x0b030f05, 0x0b050101, 0x0b050303, 0x0b050507, 0x0b050701, 0x0b05070d, + 0x0b050b07, 0x0b070105, 0x0b07010f, 0x0b070301, 0x0b07050f, 0x0b070909, 0x0b070b03, 0x0b070d0b, + 0x0b070f07, 0x0b090103, 0x0b090109, 0x0b090501, 0x0b090705, 0x0b09090d, 0x0b0b0305, 0x0b0b050d, + 0x0b0b0b03, 0x0b0b0b07, 0x0b0d0905, 0x0b0f0105, 0x0b0f0109, 0x0b0f0505, 0x0d010303, 0x0d010307, + 0x0d01030b, 0x0d010703, 0x0d010707, 0x0d010d01, 0x0d030101, 0x0d030501, 0x0d03050f, 0x0d030d09, + 0x0d050305, 0x0d050709, 0x0d050905, 0x0d050b0b, 0x0d050d05, 0x0d050f01, 0x0d070101, 0x0d070309, + 0x0d070503, 0x0d070901, 0x0d09050b, 0x0d090907, 0x0d090d05, 0x0d0b0101, 0x0d0b0107, 0x0d0b0709, + 0x0d0b0d01, 0x0d0d010b, 0x0d0d0901, 0x0d0f0303, 0x0d0f0307, 0x0f010101, 0x0f010109, 0x0f01010f, + 0x0f010501, 0x0f010505, 0x0f01070d, 0x0f010901, 0x0f010b09, 0x0f010d05, 0x0f030105, 0x0f030303, + 0x0f030509, 0x0f030907, 0x0f03090b, 0x0f050103, 0x0f050109, 0x0f050301, 0x0f05030d, 0x0f050503, + 0x0f050701, 0x0f050b03, 0x0f070105, 0x0f070705, 0x0f07070b, 0x0f070b07, 0x0f090103, 0x0f09010b, + 0x0f090307, 0x0f090501, 0x0f090b01, 0x0f0b0505, 0x0f0b0905, 0x0f0d0105, 0x0f0d0703, 0x0f0f0101, }; - static const __device__ uint64_t iq1s_grid[512] = { 0xffffffffffff0101, 0xffffffffff01ff00, 0xffffffffff010100, 0xffffffff00000000, 0xffffffff01ff00ff, 0xffffffff01ff0001, 0xffffffff0101ffff, 0xffffffff0101ff01, @@ -2392,9 +2391,9 @@ static __global__ void dequantize_block_iq3_s(const void * __restrict__ vx, dst_ const int ib = tid%8; // 0...7 dst_t * y = yy + i*QK_K + 32*ib + 8*il; const uint8_t * qs = x[i].qs + 8*ib; - const uint8_t * grid1 = (const uint8_t *)(iq3xs_grid + (qs[2*il+0] | ((x[i].qh[ib] << (8-2*il)) & 256))); - const uint8_t * grid2 = (const uint8_t *)(iq3xs_grid + (qs[2*il+1] | ((x[i].qh[ib] << (7-2*il)) & 256))); - const float d = (float)x[i].d * (0.5f + ((x[i].scales[ib/2] >> 4*(ib%2)) & 0xf)) * 0.5f; + const uint8_t * grid1 = (const uint8_t *)(iq3s_grid + (qs[2*il+0] | ((x[i].qh[ib] << (8-2*il)) & 256))); + const uint8_t * grid2 = (const uint8_t *)(iq3s_grid + (qs[2*il+1] | ((x[i].qh[ib] << (7-2*il)) & 256))); + const float d = (float)x[i].d * (1 + 2*((x[i].scales[ib/2] >> 4*(ib%2)) & 0xf)); const uint8_t signs = x[i].signs[4*ib + il]; for (int j = 0; j < 4; ++j) { y[j+0] = d * grid1[j] * (signs & kmask_iq2xs[j+0] ? -1.f : 1.f); @@ -5211,8 +5210,8 @@ static __device__ __forceinline__ float vec_dot_iq3_s_q8_1( const int8_t * q8 = bq8_1[ib32].qs; int sumi = 0; for (int l = 0; l < 4; ++l) { - const uint32_t * grid1 = iq3xs_grid + (qs[2*l+0] | ((bq2->qh[ib32] << (8 - 2*l)) & 256)); - const uint32_t * grid2 = iq3xs_grid + (qs[2*l+1] | ((bq2->qh[ib32] << (7 - 2*l)) & 256)); + const uint32_t * grid1 = iq3s_grid + (qs[2*l+0] | ((bq2->qh[ib32] << (8 - 2*l)) & 256)); + const uint32_t * grid2 = iq3s_grid + (qs[2*l+1] | ((bq2->qh[ib32] << (7 - 2*l)) & 256)); uint32_t signs0 = __vcmpeq4(((bq2->signs[4*ib32+l] & 0xf) * 0x01010101) & 0x08040201, 0x08040201); uint32_t signs1 = __vcmpeq4(((bq2->signs[4*ib32+l] >> 4) * 0x01010101) & 0x08040201, 0x08040201); const int grid_l = __vsub4(grid1[0] ^ signs0, signs0); @@ -5221,7 +5220,7 @@ static __device__ __forceinline__ float vec_dot_iq3_s_q8_1( sumi = __dp4a(grid_h, *((int *)q8+1), sumi); q8 += 8; } - const float d = (float)bq2->d * (0.5f + ((bq2->scales[ib32/2] >> 4*(ib32%2)) & 0xf)) * __low2float(bq8_1[ib32].ds) * 0.5f; + const float d = (float)bq2->d * (1 + 2*((bq2->scales[ib32/2] >> 4*(ib32%2)) & 0xf)) * __low2float(bq8_1[ib32].ds); return d * sumi; #else assert(false); diff --git a/ggml-metal.metal b/ggml-metal.metal index 74a5e0b03..8b9488437 100644 --- a/ggml-metal.metal +++ b/ggml-metal.metal @@ -4087,71 +4087,71 @@ constexpr constant static uint32_t iq3xxs_grid[256] = { 0x3e1c1c1c, 0x3e1c3404, 0x3e24140c, 0x3e24240c, 0x3e2c0404, 0x3e2c0414, 0x3e2c1424, 0x3e341c04, }; -constexpr constant static uint32_t iq3xs_grid[512] = { - 0x04040404, 0x0404040c, 0x04040414, 0x0404042c, 0x0404043e, 0x04040c04, 0x04040c0c, 0x04040c14, - 0x04040c24, 0x04040c34, 0x04041404, 0x0404140c, 0x0404142c, 0x04041c1c, 0x04042404, 0x04042414, - 0x0404242c, 0x0404243e, 0x04042c0c, 0x04042c1c, 0x04043404, 0x04043414, 0x04043e0c, 0x04043e24, - 0x04043e3e, 0x040c0404, 0x040c040c, 0x040c0414, 0x040c0424, 0x040c0c04, 0x040c0c0c, 0x040c0c2c, - 0x040c1404, 0x040c141c, 0x040c143e, 0x040c1c0c, 0x040c1c2c, 0x040c2424, 0x040c340c, 0x040c342c, - 0x040c3e14, 0x04140404, 0x0414040c, 0x0414042c, 0x0414043e, 0x04140c04, 0x04140c1c, 0x04140c34, - 0x0414140c, 0x0414142c, 0x04141c04, 0x04141c24, 0x04142414, 0x0414242c, 0x0414243e, 0x04142c0c, - 0x04142c1c, 0x04143e04, 0x04143e1c, 0x041c041c, 0x041c0c0c, 0x041c0c2c, 0x041c1404, 0x041c1414, - 0x041c1c0c, 0x041c1c1c, 0x041c1c34, 0x041c2424, 0x041c2c04, 0x041c2c14, 0x041c343e, 0x041c3e0c, - 0x041c3e2c, 0x04240404, 0x04240c1c, 0x04240c3e, 0x0424140c, 0x04241424, 0x04241c14, 0x04242404, - 0x0424241c, 0x04242c0c, 0x04243e04, 0x042c0414, 0x042c0424, 0x042c1404, 0x042c1414, 0x042c1434, - 0x042c1c1c, 0x042c240c, 0x042c242c, 0x042c243e, 0x042c3434, 0x042c3e1c, 0x04340434, 0x04340c0c, - 0x04340c1c, 0x04341c0c, 0x04342c14, 0x04343e0c, 0x043e0404, 0x043e0414, 0x043e0424, 0x043e1404, - 0x043e1414, 0x043e1434, 0x043e1c1c, 0x043e2c04, 0x043e2c24, 0x0c040404, 0x0c04040c, 0x0c040414, - 0x0c040424, 0x0c040c04, 0x0c040c0c, 0x0c040c1c, 0x0c040c2c, 0x0c040c3e, 0x0c041404, 0x0c041414, - 0x0c041c0c, 0x0c041c24, 0x0c041c34, 0x0c042c24, 0x0c042c34, 0x0c04340c, 0x0c043e14, 0x0c0c0404, - 0x0c0c040c, 0x0c0c041c, 0x0c0c0434, 0x0c0c0c04, 0x0c0c0c24, 0x0c0c140c, 0x0c0c1c04, 0x0c0c1c1c, - 0x0c0c240c, 0x0c0c2c04, 0x0c0c2c14, 0x0c0c3e04, 0x0c0c3e34, 0x0c140404, 0x0c140c14, 0x0c140c2c, - 0x0c140c3e, 0x0c141404, 0x0c141424, 0x0c141c14, 0x0c142404, 0x0c14241c, 0x0c142c2c, 0x0c143404, - 0x0c143e14, 0x0c1c040c, 0x0c1c0424, 0x0c1c043e, 0x0c1c0c04, 0x0c1c0c1c, 0x0c1c140c, 0x0c1c143e, - 0x0c1c1c04, 0x0c1c1c24, 0x0c1c240c, 0x0c1c3414, 0x0c1c3e04, 0x0c24041c, 0x0c24042c, 0x0c240c14, - 0x0c240c24, 0x0c241c0c, 0x0c241c1c, 0x0c242414, 0x0c242434, 0x0c242c04, 0x0c242c24, 0x0c2c040c, - 0x0c2c0c04, 0x0c2c0c1c, 0x0c2c140c, 0x0c2c1c04, 0x0c2c1c14, 0x0c2c2c0c, 0x0c341404, 0x0c341424, - 0x0c34143e, 0x0c342424, 0x0c342434, 0x0c3e040c, 0x0c3e041c, 0x0c3e0c04, 0x0c3e0c14, 0x0c3e140c, - 0x0c3e1c2c, 0x0c3e240c, 0x0c3e3414, 0x0c3e3e04, 0x14040404, 0x1404040c, 0x1404041c, 0x1404042c, - 0x1404043e, 0x14040c04, 0x14040c14, 0x14040c24, 0x14040c34, 0x1404140c, 0x1404141c, 0x1404143e, - 0x14041c04, 0x14041c14, 0x1404240c, 0x1404241c, 0x1404242c, 0x14042c04, 0x14042c14, 0x1404343e, - 0x14043e04, 0x14043e1c, 0x14043e2c, 0x140c0404, 0x140c0414, 0x140c0c04, 0x140c0c1c, 0x140c0c3e, - 0x140c1414, 0x140c142c, 0x140c1c0c, 0x140c1c24, 0x140c2414, 0x140c2c0c, 0x1414040c, 0x14140424, - 0x1414043e, 0x1414140c, 0x1414141c, 0x14141c04, 0x14141c3e, 0x1414240c, 0x14142c1c, 0x14142c3e, - 0x14143e0c, 0x14143e24, 0x141c0404, 0x141c0414, 0x141c042c, 0x141c0c0c, 0x141c1414, 0x141c1424, - 0x141c1c0c, 0x141c1c1c, 0x141c2414, 0x141c2c04, 0x141c3434, 0x1424040c, 0x1424043e, 0x14241404, - 0x1424141c, 0x14241c14, 0x14241c2c, 0x1424240c, 0x14243e14, 0x14243e2c, 0x142c0424, 0x142c0c0c, - 0x142c1414, 0x142c1c3e, 0x142c2404, 0x142c2c1c, 0x142c3e04, 0x14340404, 0x14340414, 0x1434043e, - 0x1434140c, 0x14342c2c, 0x1434340c, 0x143e042c, 0x143e0c0c, 0x143e1434, 0x143e1c04, 0x143e241c, - 0x143e2c04, 0x1c040414, 0x1c040c0c, 0x1c040c1c, 0x1c040c2c, 0x1c040c3e, 0x1c041414, 0x1c041c0c, - 0x1c041c1c, 0x1c041c2c, 0x1c042414, 0x1c042424, 0x1c04243e, 0x1c042c0c, 0x1c04341c, 0x1c043e0c, - 0x1c0c040c, 0x1c0c041c, 0x1c0c042c, 0x1c0c0c24, 0x1c0c140c, 0x1c0c141c, 0x1c0c2404, 0x1c0c3404, - 0x1c0c3e14, 0x1c0c3e34, 0x1c140404, 0x1c140c14, 0x1c141404, 0x1c141c14, 0x1c141c24, 0x1c142c04, - 0x1c1c040c, 0x1c1c0c04, 0x1c1c0c24, 0x1c1c140c, 0x1c1c141c, 0x1c1c143e, 0x1c1c1c04, 0x1c1c240c, - 0x1c1c241c, 0x1c1c243e, 0x1c1c2c2c, 0x1c1c3e1c, 0x1c24041c, 0x1c240c0c, 0x1c240c34, 0x1c241414, - 0x1c241c0c, 0x1c242c14, 0x1c243404, 0x1c243424, 0x1c2c040c, 0x1c2c0c04, 0x1c2c0c14, 0x1c2c142c, - 0x1c2c1c14, 0x1c2c2424, 0x1c2c2c34, 0x1c2c3e1c, 0x1c340c34, 0x1c34240c, 0x1c3e040c, 0x1c3e041c, - 0x1c3e1404, 0x1c3e1414, 0x1c3e1c2c, 0x24040404, 0x24040424, 0x24040c14, 0x24041404, 0x24041424, - 0x2404143e, 0x24041c14, 0x2404240c, 0x24042c04, 0x24043e04, 0x240c0414, 0x240c043e, 0x240c0c0c, - 0x240c0c1c, 0x240c1414, 0x240c1c04, 0x240c1c2c, 0x240c241c, 0x240c2c0c, 0x240c2c2c, 0x2414040c, - 0x2414041c, 0x24140c04, 0x24140c2c, 0x2414140c, 0x24141c1c, 0x24142404, 0x24142c3e, 0x24143414, - 0x24143e04, 0x241c0424, 0x241c0c0c, 0x241c0c1c, 0x241c1404, 0x241c1414, 0x241c1c0c, 0x241c1c2c, - 0x24240404, 0x24240414, 0x24241424, 0x24241c3e, 0x24242404, 0x24243e0c, 0x242c042c, 0x242c043e, - 0x242c140c, 0x242c3414, 0x24340c1c, 0x24341c24, 0x24343404, 0x243e0c04, 0x243e0c2c, 0x243e1c04, - 0x243e241c, 0x243e2c0c, 0x2c040414, 0x2c040c04, 0x2c040c24, 0x2c041414, 0x2c042404, 0x2c042424, - 0x2c04243e, 0x2c042c14, 0x2c043434, 0x2c043e24, 0x2c0c040c, 0x2c0c041c, 0x2c0c042c, 0x2c0c0c14, - 0x2c0c140c, 0x2c0c1c14, 0x2c0c3e14, 0x2c140404, 0x2c140c0c, 0x2c14141c, 0x2c141c04, 0x2c141c34, - 0x2c142c1c, 0x2c1c0414, 0x2c1c043e, 0x2c1c0c04, 0x2c1c143e, 0x2c1c2424, 0x2c1c2c0c, 0x2c1c342c, - 0x2c1c3e1c, 0x2c24040c, 0x2c240424, 0x2c241404, 0x2c241c14, 0x2c242434, 0x2c2c0c14, 0x2c2c1434, - 0x2c2c2c0c, 0x2c2c2c1c, 0x2c342414, 0x2c3e0414, 0x2c3e0424, 0x2c3e1414, 0x34040c0c, 0x34040c1c, - 0x34040c2c, 0x34041c0c, 0x34041c1c, 0x34043404, 0x340c0404, 0x340c1404, 0x340c143e, 0x340c3424, - 0x34140c14, 0x34141c24, 0x34142414, 0x34142c2c, 0x34143414, 0x34143e04, 0x341c0404, 0x341c0c24, - 0x341c140c, 0x341c2404, 0x3424142c, 0x3424241c, 0x34243414, 0x342c0404, 0x342c041c, 0x342c1c24, - 0x342c3404, 0x3434042c, 0x34342404, 0x343e0c0c, 0x343e0c1c, 0x3e040404, 0x3e040424, 0x3e04043e, - 0x3e041404, 0x3e041414, 0x3e041c34, 0x3e042404, 0x3e042c24, 0x3e043414, 0x3e0c0414, 0x3e0c0c0c, - 0x3e0c1424, 0x3e0c241c, 0x3e0c242c, 0x3e14040c, 0x3e140424, 0x3e140c04, 0x3e140c34, 0x3e14140c, - 0x3e141c04, 0x3e142c0c, 0x3e1c0414, 0x3e1c1c14, 0x3e1c1c2c, 0x3e1c2c1c, 0x3e24040c, 0x3e24042c, - 0x3e240c1c, 0x3e241404, 0x3e242c04, 0x3e2c1414, 0x3e2c2414, 0x3e340414, 0x3e341c0c, 0x3e3e0404, +constexpr constant static uint32_t iq3s_grid[512] = { + 0x01010101, 0x01010103, 0x01010105, 0x0101010b, 0x0101010f, 0x01010301, 0x01010303, 0x01010305, + 0x01010309, 0x0101030d, 0x01010501, 0x01010503, 0x0101050b, 0x01010707, 0x01010901, 0x01010905, + 0x0101090b, 0x0101090f, 0x01010b03, 0x01010b07, 0x01010d01, 0x01010d05, 0x01010f03, 0x01010f09, + 0x01010f0f, 0x01030101, 0x01030103, 0x01030105, 0x01030109, 0x01030301, 0x01030303, 0x0103030b, + 0x01030501, 0x01030507, 0x0103050f, 0x01030703, 0x0103070b, 0x01030909, 0x01030d03, 0x01030d0b, + 0x01030f05, 0x01050101, 0x01050103, 0x0105010b, 0x0105010f, 0x01050301, 0x01050307, 0x0105030d, + 0x01050503, 0x0105050b, 0x01050701, 0x01050709, 0x01050905, 0x0105090b, 0x0105090f, 0x01050b03, + 0x01050b07, 0x01050f01, 0x01050f07, 0x01070107, 0x01070303, 0x0107030b, 0x01070501, 0x01070505, + 0x01070703, 0x01070707, 0x0107070d, 0x01070909, 0x01070b01, 0x01070b05, 0x01070d0f, 0x01070f03, + 0x01070f0b, 0x01090101, 0x01090307, 0x0109030f, 0x01090503, 0x01090509, 0x01090705, 0x01090901, + 0x01090907, 0x01090b03, 0x01090f01, 0x010b0105, 0x010b0109, 0x010b0501, 0x010b0505, 0x010b050d, + 0x010b0707, 0x010b0903, 0x010b090b, 0x010b090f, 0x010b0d0d, 0x010b0f07, 0x010d010d, 0x010d0303, + 0x010d0307, 0x010d0703, 0x010d0b05, 0x010d0f03, 0x010f0101, 0x010f0105, 0x010f0109, 0x010f0501, + 0x010f0505, 0x010f050d, 0x010f0707, 0x010f0b01, 0x010f0b09, 0x03010101, 0x03010103, 0x03010105, + 0x03010109, 0x03010301, 0x03010303, 0x03010307, 0x0301030b, 0x0301030f, 0x03010501, 0x03010505, + 0x03010703, 0x03010709, 0x0301070d, 0x03010b09, 0x03010b0d, 0x03010d03, 0x03010f05, 0x03030101, + 0x03030103, 0x03030107, 0x0303010d, 0x03030301, 0x03030309, 0x03030503, 0x03030701, 0x03030707, + 0x03030903, 0x03030b01, 0x03030b05, 0x03030f01, 0x03030f0d, 0x03050101, 0x03050305, 0x0305030b, + 0x0305030f, 0x03050501, 0x03050509, 0x03050705, 0x03050901, 0x03050907, 0x03050b0b, 0x03050d01, + 0x03050f05, 0x03070103, 0x03070109, 0x0307010f, 0x03070301, 0x03070307, 0x03070503, 0x0307050f, + 0x03070701, 0x03070709, 0x03070903, 0x03070d05, 0x03070f01, 0x03090107, 0x0309010b, 0x03090305, + 0x03090309, 0x03090703, 0x03090707, 0x03090905, 0x0309090d, 0x03090b01, 0x03090b09, 0x030b0103, + 0x030b0301, 0x030b0307, 0x030b0503, 0x030b0701, 0x030b0705, 0x030b0b03, 0x030d0501, 0x030d0509, + 0x030d050f, 0x030d0909, 0x030d090d, 0x030f0103, 0x030f0107, 0x030f0301, 0x030f0305, 0x030f0503, + 0x030f070b, 0x030f0903, 0x030f0d05, 0x030f0f01, 0x05010101, 0x05010103, 0x05010107, 0x0501010b, + 0x0501010f, 0x05010301, 0x05010305, 0x05010309, 0x0501030d, 0x05010503, 0x05010507, 0x0501050f, + 0x05010701, 0x05010705, 0x05010903, 0x05010907, 0x0501090b, 0x05010b01, 0x05010b05, 0x05010d0f, + 0x05010f01, 0x05010f07, 0x05010f0b, 0x05030101, 0x05030105, 0x05030301, 0x05030307, 0x0503030f, + 0x05030505, 0x0503050b, 0x05030703, 0x05030709, 0x05030905, 0x05030b03, 0x05050103, 0x05050109, + 0x0505010f, 0x05050503, 0x05050507, 0x05050701, 0x0505070f, 0x05050903, 0x05050b07, 0x05050b0f, + 0x05050f03, 0x05050f09, 0x05070101, 0x05070105, 0x0507010b, 0x05070303, 0x05070505, 0x05070509, + 0x05070703, 0x05070707, 0x05070905, 0x05070b01, 0x05070d0d, 0x05090103, 0x0509010f, 0x05090501, + 0x05090507, 0x05090705, 0x0509070b, 0x05090903, 0x05090f05, 0x05090f0b, 0x050b0109, 0x050b0303, + 0x050b0505, 0x050b070f, 0x050b0901, 0x050b0b07, 0x050b0f01, 0x050d0101, 0x050d0105, 0x050d010f, + 0x050d0503, 0x050d0b0b, 0x050d0d03, 0x050f010b, 0x050f0303, 0x050f050d, 0x050f0701, 0x050f0907, + 0x050f0b01, 0x07010105, 0x07010303, 0x07010307, 0x0701030b, 0x0701030f, 0x07010505, 0x07010703, + 0x07010707, 0x0701070b, 0x07010905, 0x07010909, 0x0701090f, 0x07010b03, 0x07010d07, 0x07010f03, + 0x07030103, 0x07030107, 0x0703010b, 0x07030309, 0x07030503, 0x07030507, 0x07030901, 0x07030d01, + 0x07030f05, 0x07030f0d, 0x07050101, 0x07050305, 0x07050501, 0x07050705, 0x07050709, 0x07050b01, + 0x07070103, 0x07070301, 0x07070309, 0x07070503, 0x07070507, 0x0707050f, 0x07070701, 0x07070903, + 0x07070907, 0x0707090f, 0x07070b0b, 0x07070f07, 0x07090107, 0x07090303, 0x0709030d, 0x07090505, + 0x07090703, 0x07090b05, 0x07090d01, 0x07090d09, 0x070b0103, 0x070b0301, 0x070b0305, 0x070b050b, + 0x070b0705, 0x070b0909, 0x070b0b0d, 0x070b0f07, 0x070d030d, 0x070d0903, 0x070f0103, 0x070f0107, + 0x070f0501, 0x070f0505, 0x070f070b, 0x09010101, 0x09010109, 0x09010305, 0x09010501, 0x09010509, + 0x0901050f, 0x09010705, 0x09010903, 0x09010b01, 0x09010f01, 0x09030105, 0x0903010f, 0x09030303, + 0x09030307, 0x09030505, 0x09030701, 0x0903070b, 0x09030907, 0x09030b03, 0x09030b0b, 0x09050103, + 0x09050107, 0x09050301, 0x0905030b, 0x09050503, 0x09050707, 0x09050901, 0x09050b0f, 0x09050d05, + 0x09050f01, 0x09070109, 0x09070303, 0x09070307, 0x09070501, 0x09070505, 0x09070703, 0x0907070b, + 0x09090101, 0x09090105, 0x09090509, 0x0909070f, 0x09090901, 0x09090f03, 0x090b010b, 0x090b010f, + 0x090b0503, 0x090b0d05, 0x090d0307, 0x090d0709, 0x090d0d01, 0x090f0301, 0x090f030b, 0x090f0701, + 0x090f0907, 0x090f0b03, 0x0b010105, 0x0b010301, 0x0b010309, 0x0b010505, 0x0b010901, 0x0b010909, + 0x0b01090f, 0x0b010b05, 0x0b010d0d, 0x0b010f09, 0x0b030103, 0x0b030107, 0x0b03010b, 0x0b030305, + 0x0b030503, 0x0b030705, 0x0b030f05, 0x0b050101, 0x0b050303, 0x0b050507, 0x0b050701, 0x0b05070d, + 0x0b050b07, 0x0b070105, 0x0b07010f, 0x0b070301, 0x0b07050f, 0x0b070909, 0x0b070b03, 0x0b070d0b, + 0x0b070f07, 0x0b090103, 0x0b090109, 0x0b090501, 0x0b090705, 0x0b09090d, 0x0b0b0305, 0x0b0b050d, + 0x0b0b0b03, 0x0b0b0b07, 0x0b0d0905, 0x0b0f0105, 0x0b0f0109, 0x0b0f0505, 0x0d010303, 0x0d010307, + 0x0d01030b, 0x0d010703, 0x0d010707, 0x0d010d01, 0x0d030101, 0x0d030501, 0x0d03050f, 0x0d030d09, + 0x0d050305, 0x0d050709, 0x0d050905, 0x0d050b0b, 0x0d050d05, 0x0d050f01, 0x0d070101, 0x0d070309, + 0x0d070503, 0x0d070901, 0x0d09050b, 0x0d090907, 0x0d090d05, 0x0d0b0101, 0x0d0b0107, 0x0d0b0709, + 0x0d0b0d01, 0x0d0d010b, 0x0d0d0901, 0x0d0f0303, 0x0d0f0307, 0x0f010101, 0x0f010109, 0x0f01010f, + 0x0f010501, 0x0f010505, 0x0f01070d, 0x0f010901, 0x0f010b09, 0x0f010d05, 0x0f030105, 0x0f030303, + 0x0f030509, 0x0f030907, 0x0f03090b, 0x0f050103, 0x0f050109, 0x0f050301, 0x0f05030d, 0x0f050503, + 0x0f050701, 0x0f050b03, 0x0f070105, 0x0f070705, 0x0f07070b, 0x0f070b07, 0x0f090103, 0x0f09010b, + 0x0f090307, 0x0f090501, 0x0f090b01, 0x0f0b0505, 0x0f0b0905, 0x0f0d0105, 0x0f0d0703, 0x0f0f0101, }; #define NGRID_IQ1S 512 @@ -4742,7 +4742,7 @@ void kernel_mul_mv_iq3_s_f32_impl( { int nval = 8; int pos = (32*sgitg + tiisg)*nval; - for (int i = 0; i < nval; ++i) values[pos + i] = iq3xs_grid[pos + i]; + for (int i = 0; i < nval; ++i) values[pos + i] = iq3s_grid[pos + i]; threadgroup_barrier(mem_flags::mem_threadgroup); } @@ -4769,12 +4769,14 @@ void kernel_mul_mv_iq3_s_f32_impl( for (int row = 0; row < N_DST; row++) { const float db = dh[0]; - const float d = db * (0.5f + ((sc[0] >> 4*(ib%2)) & 0xf)); + const float d = db * (1 + 2*((sc[0] >> 4*(ib%2)) & 0xf)); float2 sum = {0}; for (int l = 0; l < 4; ++l) { - const threadgroup uint8_t * grid1 = (const threadgroup uint8_t *)(values + (qs[2*l+0] | ((qh[0] << (8-2*l)) & 256))); - const threadgroup uint8_t * grid2 = (const threadgroup uint8_t *)(values + (qs[2*l+1] | ((qh[0] << (7-2*l)) & 256))); + const threadgroup uint32_t * table1 = qh[0] & kmask_iq2xs[2*l+0] ? values + 256 : values; + const threadgroup uint32_t * table2 = qh[0] & kmask_iq2xs[2*l+1] ? values + 256 : values; + const threadgroup uint8_t * grid1 = (const threadgroup uint8_t *)(table1 + qs[2*l+0]); + const threadgroup uint8_t * grid2 = (const threadgroup uint8_t *)(table2 + qs[2*l+1]); for (int j = 0; j < 4; ++j) { sum[0] += yl[8*l + j + 0] * grid1[j] * select(1, -1, signs[l] & kmask_iq2xs[j+0]); sum[1] += yl[8*l + j + 4] * grid2[j] * select(1, -1, signs[l] & kmask_iq2xs[j+4]); @@ -4795,7 +4797,7 @@ void kernel_mul_mv_iq3_s_f32_impl( for (int row = 0; row < N_DST; ++row) { all_sum = simd_sum(sumf[row]); if (tiisg == 0) { - dst[r1*ne0 + im*ne0*ne1 + first_row + row] = all_sum * 0.5f; + dst[r1*ne0 + im*ne0*ne1 + first_row + row] = all_sum; } } } @@ -5685,15 +5687,15 @@ void dequantize_iq3_s(device const block_iq3_s * xb, short il, thread type4x4 & device const uint8_t * qs = xb->qs + 8*ib32; device const uint8_t * signs = xb->signs + 4*ib32 + 2*il; const uint8_t qh = xb->qh[ib32] >> 4*il; - const float dl = d * (0.5f + ((xb->scales[ib32/2] >> 4*(ib32%2)) & 0xf)) * 0.5f; - constant uint8_t * grid1 = (constant uint8_t *)(iq3xs_grid + (qs[4*il+0] | ((qh << 8) & 256))); - constant uint8_t * grid2 = (constant uint8_t *)(iq3xs_grid + (qs[4*il+1] | ((qh << 7) & 256))); + const float dl = d * (1 + 2*((xb->scales[ib32/2] >> 4*(ib32%2)) & 0xf)); + constant uint8_t * grid1 = (constant uint8_t *)(iq3s_grid + (qs[4*il+0] | ((qh << 8) & 256))); + constant uint8_t * grid2 = (constant uint8_t *)(iq3s_grid + (qs[4*il+1] | ((qh << 7) & 256))); for (int i = 0; i < 4; ++i) { reg[0][i] = dl * grid1[i] * select(1, -1, signs[0] & kmask_iq2xs[i+0]); reg[1][i] = dl * grid2[i] * select(1, -1, signs[0] & kmask_iq2xs[i+4]); } - grid1 = (constant uint8_t *)(iq3xs_grid + (qs[4*il+2] | ((qh << 6) & 256))); - grid2 = (constant uint8_t *)(iq3xs_grid + (qs[4*il+3] | ((qh << 5) & 256))); + grid1 = (constant uint8_t *)(iq3s_grid + (qs[4*il+2] | ((qh << 6) & 256))); + grid2 = (constant uint8_t *)(iq3s_grid + (qs[4*il+3] | ((qh << 5) & 256))); for (int i = 0; i < 4; ++i) { reg[2][i] = dl * grid1[i] * select(1, -1, signs[1] & kmask_iq2xs[i+0]); reg[3][i] = dl * grid2[i] * select(1, -1, signs[1] & kmask_iq2xs[i+4]); diff --git a/ggml-quants.c b/ggml-quants.c index 371826f14..492a1b9a6 100644 --- a/ggml-quants.c +++ b/ggml-quants.c @@ -3818,71 +3818,71 @@ static const uint32_t iq3xxs_grid[256] = { 0x3e1c1c1c, 0x3e1c3404, 0x3e24140c, 0x3e24240c, 0x3e2c0404, 0x3e2c0414, 0x3e2c1424, 0x3e341c04, }; -static const uint32_t iq3xs_grid[512] = { - 0x04040404, 0x0404040c, 0x04040414, 0x0404042c, 0x0404043e, 0x04040c04, 0x04040c0c, 0x04040c14, - 0x04040c24, 0x04040c34, 0x04041404, 0x0404140c, 0x0404142c, 0x04041c1c, 0x04042404, 0x04042414, - 0x0404242c, 0x0404243e, 0x04042c0c, 0x04042c1c, 0x04043404, 0x04043414, 0x04043e0c, 0x04043e24, - 0x04043e3e, 0x040c0404, 0x040c040c, 0x040c0414, 0x040c0424, 0x040c0c04, 0x040c0c0c, 0x040c0c2c, - 0x040c1404, 0x040c141c, 0x040c143e, 0x040c1c0c, 0x040c1c2c, 0x040c2424, 0x040c340c, 0x040c342c, - 0x040c3e14, 0x04140404, 0x0414040c, 0x0414042c, 0x0414043e, 0x04140c04, 0x04140c1c, 0x04140c34, - 0x0414140c, 0x0414142c, 0x04141c04, 0x04141c24, 0x04142414, 0x0414242c, 0x0414243e, 0x04142c0c, - 0x04142c1c, 0x04143e04, 0x04143e1c, 0x041c041c, 0x041c0c0c, 0x041c0c2c, 0x041c1404, 0x041c1414, - 0x041c1c0c, 0x041c1c1c, 0x041c1c34, 0x041c2424, 0x041c2c04, 0x041c2c14, 0x041c343e, 0x041c3e0c, - 0x041c3e2c, 0x04240404, 0x04240c1c, 0x04240c3e, 0x0424140c, 0x04241424, 0x04241c14, 0x04242404, - 0x0424241c, 0x04242c0c, 0x04243e04, 0x042c0414, 0x042c0424, 0x042c1404, 0x042c1414, 0x042c1434, - 0x042c1c1c, 0x042c240c, 0x042c242c, 0x042c243e, 0x042c3434, 0x042c3e1c, 0x04340434, 0x04340c0c, - 0x04340c1c, 0x04341c0c, 0x04342c14, 0x04343e0c, 0x043e0404, 0x043e0414, 0x043e0424, 0x043e1404, - 0x043e1414, 0x043e1434, 0x043e1c1c, 0x043e2c04, 0x043e2c24, 0x0c040404, 0x0c04040c, 0x0c040414, - 0x0c040424, 0x0c040c04, 0x0c040c0c, 0x0c040c1c, 0x0c040c2c, 0x0c040c3e, 0x0c041404, 0x0c041414, - 0x0c041c0c, 0x0c041c24, 0x0c041c34, 0x0c042c24, 0x0c042c34, 0x0c04340c, 0x0c043e14, 0x0c0c0404, - 0x0c0c040c, 0x0c0c041c, 0x0c0c0434, 0x0c0c0c04, 0x0c0c0c24, 0x0c0c140c, 0x0c0c1c04, 0x0c0c1c1c, - 0x0c0c240c, 0x0c0c2c04, 0x0c0c2c14, 0x0c0c3e04, 0x0c0c3e34, 0x0c140404, 0x0c140c14, 0x0c140c2c, - 0x0c140c3e, 0x0c141404, 0x0c141424, 0x0c141c14, 0x0c142404, 0x0c14241c, 0x0c142c2c, 0x0c143404, - 0x0c143e14, 0x0c1c040c, 0x0c1c0424, 0x0c1c043e, 0x0c1c0c04, 0x0c1c0c1c, 0x0c1c140c, 0x0c1c143e, - 0x0c1c1c04, 0x0c1c1c24, 0x0c1c240c, 0x0c1c3414, 0x0c1c3e04, 0x0c24041c, 0x0c24042c, 0x0c240c14, - 0x0c240c24, 0x0c241c0c, 0x0c241c1c, 0x0c242414, 0x0c242434, 0x0c242c04, 0x0c242c24, 0x0c2c040c, - 0x0c2c0c04, 0x0c2c0c1c, 0x0c2c140c, 0x0c2c1c04, 0x0c2c1c14, 0x0c2c2c0c, 0x0c341404, 0x0c341424, - 0x0c34143e, 0x0c342424, 0x0c342434, 0x0c3e040c, 0x0c3e041c, 0x0c3e0c04, 0x0c3e0c14, 0x0c3e140c, - 0x0c3e1c2c, 0x0c3e240c, 0x0c3e3414, 0x0c3e3e04, 0x14040404, 0x1404040c, 0x1404041c, 0x1404042c, - 0x1404043e, 0x14040c04, 0x14040c14, 0x14040c24, 0x14040c34, 0x1404140c, 0x1404141c, 0x1404143e, - 0x14041c04, 0x14041c14, 0x1404240c, 0x1404241c, 0x1404242c, 0x14042c04, 0x14042c14, 0x1404343e, - 0x14043e04, 0x14043e1c, 0x14043e2c, 0x140c0404, 0x140c0414, 0x140c0c04, 0x140c0c1c, 0x140c0c3e, - 0x140c1414, 0x140c142c, 0x140c1c0c, 0x140c1c24, 0x140c2414, 0x140c2c0c, 0x1414040c, 0x14140424, - 0x1414043e, 0x1414140c, 0x1414141c, 0x14141c04, 0x14141c3e, 0x1414240c, 0x14142c1c, 0x14142c3e, - 0x14143e0c, 0x14143e24, 0x141c0404, 0x141c0414, 0x141c042c, 0x141c0c0c, 0x141c1414, 0x141c1424, - 0x141c1c0c, 0x141c1c1c, 0x141c2414, 0x141c2c04, 0x141c3434, 0x1424040c, 0x1424043e, 0x14241404, - 0x1424141c, 0x14241c14, 0x14241c2c, 0x1424240c, 0x14243e14, 0x14243e2c, 0x142c0424, 0x142c0c0c, - 0x142c1414, 0x142c1c3e, 0x142c2404, 0x142c2c1c, 0x142c3e04, 0x14340404, 0x14340414, 0x1434043e, - 0x1434140c, 0x14342c2c, 0x1434340c, 0x143e042c, 0x143e0c0c, 0x143e1434, 0x143e1c04, 0x143e241c, - 0x143e2c04, 0x1c040414, 0x1c040c0c, 0x1c040c1c, 0x1c040c2c, 0x1c040c3e, 0x1c041414, 0x1c041c0c, - 0x1c041c1c, 0x1c041c2c, 0x1c042414, 0x1c042424, 0x1c04243e, 0x1c042c0c, 0x1c04341c, 0x1c043e0c, - 0x1c0c040c, 0x1c0c041c, 0x1c0c042c, 0x1c0c0c24, 0x1c0c140c, 0x1c0c141c, 0x1c0c2404, 0x1c0c3404, - 0x1c0c3e14, 0x1c0c3e34, 0x1c140404, 0x1c140c14, 0x1c141404, 0x1c141c14, 0x1c141c24, 0x1c142c04, - 0x1c1c040c, 0x1c1c0c04, 0x1c1c0c24, 0x1c1c140c, 0x1c1c141c, 0x1c1c143e, 0x1c1c1c04, 0x1c1c240c, - 0x1c1c241c, 0x1c1c243e, 0x1c1c2c2c, 0x1c1c3e1c, 0x1c24041c, 0x1c240c0c, 0x1c240c34, 0x1c241414, - 0x1c241c0c, 0x1c242c14, 0x1c243404, 0x1c243424, 0x1c2c040c, 0x1c2c0c04, 0x1c2c0c14, 0x1c2c142c, - 0x1c2c1c14, 0x1c2c2424, 0x1c2c2c34, 0x1c2c3e1c, 0x1c340c34, 0x1c34240c, 0x1c3e040c, 0x1c3e041c, - 0x1c3e1404, 0x1c3e1414, 0x1c3e1c2c, 0x24040404, 0x24040424, 0x24040c14, 0x24041404, 0x24041424, - 0x2404143e, 0x24041c14, 0x2404240c, 0x24042c04, 0x24043e04, 0x240c0414, 0x240c043e, 0x240c0c0c, - 0x240c0c1c, 0x240c1414, 0x240c1c04, 0x240c1c2c, 0x240c241c, 0x240c2c0c, 0x240c2c2c, 0x2414040c, - 0x2414041c, 0x24140c04, 0x24140c2c, 0x2414140c, 0x24141c1c, 0x24142404, 0x24142c3e, 0x24143414, - 0x24143e04, 0x241c0424, 0x241c0c0c, 0x241c0c1c, 0x241c1404, 0x241c1414, 0x241c1c0c, 0x241c1c2c, - 0x24240404, 0x24240414, 0x24241424, 0x24241c3e, 0x24242404, 0x24243e0c, 0x242c042c, 0x242c043e, - 0x242c140c, 0x242c3414, 0x24340c1c, 0x24341c24, 0x24343404, 0x243e0c04, 0x243e0c2c, 0x243e1c04, - 0x243e241c, 0x243e2c0c, 0x2c040414, 0x2c040c04, 0x2c040c24, 0x2c041414, 0x2c042404, 0x2c042424, - 0x2c04243e, 0x2c042c14, 0x2c043434, 0x2c043e24, 0x2c0c040c, 0x2c0c041c, 0x2c0c042c, 0x2c0c0c14, - 0x2c0c140c, 0x2c0c1c14, 0x2c0c3e14, 0x2c140404, 0x2c140c0c, 0x2c14141c, 0x2c141c04, 0x2c141c34, - 0x2c142c1c, 0x2c1c0414, 0x2c1c043e, 0x2c1c0c04, 0x2c1c143e, 0x2c1c2424, 0x2c1c2c0c, 0x2c1c342c, - 0x2c1c3e1c, 0x2c24040c, 0x2c240424, 0x2c241404, 0x2c241c14, 0x2c242434, 0x2c2c0c14, 0x2c2c1434, - 0x2c2c2c0c, 0x2c2c2c1c, 0x2c342414, 0x2c3e0414, 0x2c3e0424, 0x2c3e1414, 0x34040c0c, 0x34040c1c, - 0x34040c2c, 0x34041c0c, 0x34041c1c, 0x34043404, 0x340c0404, 0x340c1404, 0x340c143e, 0x340c3424, - 0x34140c14, 0x34141c24, 0x34142414, 0x34142c2c, 0x34143414, 0x34143e04, 0x341c0404, 0x341c0c24, - 0x341c140c, 0x341c2404, 0x3424142c, 0x3424241c, 0x34243414, 0x342c0404, 0x342c041c, 0x342c1c24, - 0x342c3404, 0x3434042c, 0x34342404, 0x343e0c0c, 0x343e0c1c, 0x3e040404, 0x3e040424, 0x3e04043e, - 0x3e041404, 0x3e041414, 0x3e041c34, 0x3e042404, 0x3e042c24, 0x3e043414, 0x3e0c0414, 0x3e0c0c0c, - 0x3e0c1424, 0x3e0c241c, 0x3e0c242c, 0x3e14040c, 0x3e140424, 0x3e140c04, 0x3e140c34, 0x3e14140c, - 0x3e141c04, 0x3e142c0c, 0x3e1c0414, 0x3e1c1c14, 0x3e1c1c2c, 0x3e1c2c1c, 0x3e24040c, 0x3e24042c, - 0x3e240c1c, 0x3e241404, 0x3e242c04, 0x3e2c1414, 0x3e2c2414, 0x3e340414, 0x3e341c0c, 0x3e3e0404, +static const uint32_t iq3s_grid[512] = { + 0x01010101, 0x01010103, 0x01010105, 0x0101010b, 0x0101010f, 0x01010301, 0x01010303, 0x01010305, + 0x01010309, 0x0101030d, 0x01010501, 0x01010503, 0x0101050b, 0x01010707, 0x01010901, 0x01010905, + 0x0101090b, 0x0101090f, 0x01010b03, 0x01010b07, 0x01010d01, 0x01010d05, 0x01010f03, 0x01010f09, + 0x01010f0f, 0x01030101, 0x01030103, 0x01030105, 0x01030109, 0x01030301, 0x01030303, 0x0103030b, + 0x01030501, 0x01030507, 0x0103050f, 0x01030703, 0x0103070b, 0x01030909, 0x01030d03, 0x01030d0b, + 0x01030f05, 0x01050101, 0x01050103, 0x0105010b, 0x0105010f, 0x01050301, 0x01050307, 0x0105030d, + 0x01050503, 0x0105050b, 0x01050701, 0x01050709, 0x01050905, 0x0105090b, 0x0105090f, 0x01050b03, + 0x01050b07, 0x01050f01, 0x01050f07, 0x01070107, 0x01070303, 0x0107030b, 0x01070501, 0x01070505, + 0x01070703, 0x01070707, 0x0107070d, 0x01070909, 0x01070b01, 0x01070b05, 0x01070d0f, 0x01070f03, + 0x01070f0b, 0x01090101, 0x01090307, 0x0109030f, 0x01090503, 0x01090509, 0x01090705, 0x01090901, + 0x01090907, 0x01090b03, 0x01090f01, 0x010b0105, 0x010b0109, 0x010b0501, 0x010b0505, 0x010b050d, + 0x010b0707, 0x010b0903, 0x010b090b, 0x010b090f, 0x010b0d0d, 0x010b0f07, 0x010d010d, 0x010d0303, + 0x010d0307, 0x010d0703, 0x010d0b05, 0x010d0f03, 0x010f0101, 0x010f0105, 0x010f0109, 0x010f0501, + 0x010f0505, 0x010f050d, 0x010f0707, 0x010f0b01, 0x010f0b09, 0x03010101, 0x03010103, 0x03010105, + 0x03010109, 0x03010301, 0x03010303, 0x03010307, 0x0301030b, 0x0301030f, 0x03010501, 0x03010505, + 0x03010703, 0x03010709, 0x0301070d, 0x03010b09, 0x03010b0d, 0x03010d03, 0x03010f05, 0x03030101, + 0x03030103, 0x03030107, 0x0303010d, 0x03030301, 0x03030309, 0x03030503, 0x03030701, 0x03030707, + 0x03030903, 0x03030b01, 0x03030b05, 0x03030f01, 0x03030f0d, 0x03050101, 0x03050305, 0x0305030b, + 0x0305030f, 0x03050501, 0x03050509, 0x03050705, 0x03050901, 0x03050907, 0x03050b0b, 0x03050d01, + 0x03050f05, 0x03070103, 0x03070109, 0x0307010f, 0x03070301, 0x03070307, 0x03070503, 0x0307050f, + 0x03070701, 0x03070709, 0x03070903, 0x03070d05, 0x03070f01, 0x03090107, 0x0309010b, 0x03090305, + 0x03090309, 0x03090703, 0x03090707, 0x03090905, 0x0309090d, 0x03090b01, 0x03090b09, 0x030b0103, + 0x030b0301, 0x030b0307, 0x030b0503, 0x030b0701, 0x030b0705, 0x030b0b03, 0x030d0501, 0x030d0509, + 0x030d050f, 0x030d0909, 0x030d090d, 0x030f0103, 0x030f0107, 0x030f0301, 0x030f0305, 0x030f0503, + 0x030f070b, 0x030f0903, 0x030f0d05, 0x030f0f01, 0x05010101, 0x05010103, 0x05010107, 0x0501010b, + 0x0501010f, 0x05010301, 0x05010305, 0x05010309, 0x0501030d, 0x05010503, 0x05010507, 0x0501050f, + 0x05010701, 0x05010705, 0x05010903, 0x05010907, 0x0501090b, 0x05010b01, 0x05010b05, 0x05010d0f, + 0x05010f01, 0x05010f07, 0x05010f0b, 0x05030101, 0x05030105, 0x05030301, 0x05030307, 0x0503030f, + 0x05030505, 0x0503050b, 0x05030703, 0x05030709, 0x05030905, 0x05030b03, 0x05050103, 0x05050109, + 0x0505010f, 0x05050503, 0x05050507, 0x05050701, 0x0505070f, 0x05050903, 0x05050b07, 0x05050b0f, + 0x05050f03, 0x05050f09, 0x05070101, 0x05070105, 0x0507010b, 0x05070303, 0x05070505, 0x05070509, + 0x05070703, 0x05070707, 0x05070905, 0x05070b01, 0x05070d0d, 0x05090103, 0x0509010f, 0x05090501, + 0x05090507, 0x05090705, 0x0509070b, 0x05090903, 0x05090f05, 0x05090f0b, 0x050b0109, 0x050b0303, + 0x050b0505, 0x050b070f, 0x050b0901, 0x050b0b07, 0x050b0f01, 0x050d0101, 0x050d0105, 0x050d010f, + 0x050d0503, 0x050d0b0b, 0x050d0d03, 0x050f010b, 0x050f0303, 0x050f050d, 0x050f0701, 0x050f0907, + 0x050f0b01, 0x07010105, 0x07010303, 0x07010307, 0x0701030b, 0x0701030f, 0x07010505, 0x07010703, + 0x07010707, 0x0701070b, 0x07010905, 0x07010909, 0x0701090f, 0x07010b03, 0x07010d07, 0x07010f03, + 0x07030103, 0x07030107, 0x0703010b, 0x07030309, 0x07030503, 0x07030507, 0x07030901, 0x07030d01, + 0x07030f05, 0x07030f0d, 0x07050101, 0x07050305, 0x07050501, 0x07050705, 0x07050709, 0x07050b01, + 0x07070103, 0x07070301, 0x07070309, 0x07070503, 0x07070507, 0x0707050f, 0x07070701, 0x07070903, + 0x07070907, 0x0707090f, 0x07070b0b, 0x07070f07, 0x07090107, 0x07090303, 0x0709030d, 0x07090505, + 0x07090703, 0x07090b05, 0x07090d01, 0x07090d09, 0x070b0103, 0x070b0301, 0x070b0305, 0x070b050b, + 0x070b0705, 0x070b0909, 0x070b0b0d, 0x070b0f07, 0x070d030d, 0x070d0903, 0x070f0103, 0x070f0107, + 0x070f0501, 0x070f0505, 0x070f070b, 0x09010101, 0x09010109, 0x09010305, 0x09010501, 0x09010509, + 0x0901050f, 0x09010705, 0x09010903, 0x09010b01, 0x09010f01, 0x09030105, 0x0903010f, 0x09030303, + 0x09030307, 0x09030505, 0x09030701, 0x0903070b, 0x09030907, 0x09030b03, 0x09030b0b, 0x09050103, + 0x09050107, 0x09050301, 0x0905030b, 0x09050503, 0x09050707, 0x09050901, 0x09050b0f, 0x09050d05, + 0x09050f01, 0x09070109, 0x09070303, 0x09070307, 0x09070501, 0x09070505, 0x09070703, 0x0907070b, + 0x09090101, 0x09090105, 0x09090509, 0x0909070f, 0x09090901, 0x09090f03, 0x090b010b, 0x090b010f, + 0x090b0503, 0x090b0d05, 0x090d0307, 0x090d0709, 0x090d0d01, 0x090f0301, 0x090f030b, 0x090f0701, + 0x090f0907, 0x090f0b03, 0x0b010105, 0x0b010301, 0x0b010309, 0x0b010505, 0x0b010901, 0x0b010909, + 0x0b01090f, 0x0b010b05, 0x0b010d0d, 0x0b010f09, 0x0b030103, 0x0b030107, 0x0b03010b, 0x0b030305, + 0x0b030503, 0x0b030705, 0x0b030f05, 0x0b050101, 0x0b050303, 0x0b050507, 0x0b050701, 0x0b05070d, + 0x0b050b07, 0x0b070105, 0x0b07010f, 0x0b070301, 0x0b07050f, 0x0b070909, 0x0b070b03, 0x0b070d0b, + 0x0b070f07, 0x0b090103, 0x0b090109, 0x0b090501, 0x0b090705, 0x0b09090d, 0x0b0b0305, 0x0b0b050d, + 0x0b0b0b03, 0x0b0b0b07, 0x0b0d0905, 0x0b0f0105, 0x0b0f0109, 0x0b0f0505, 0x0d010303, 0x0d010307, + 0x0d01030b, 0x0d010703, 0x0d010707, 0x0d010d01, 0x0d030101, 0x0d030501, 0x0d03050f, 0x0d030d09, + 0x0d050305, 0x0d050709, 0x0d050905, 0x0d050b0b, 0x0d050d05, 0x0d050f01, 0x0d070101, 0x0d070309, + 0x0d070503, 0x0d070901, 0x0d09050b, 0x0d090907, 0x0d090d05, 0x0d0b0101, 0x0d0b0107, 0x0d0b0709, + 0x0d0b0d01, 0x0d0d010b, 0x0d0d0901, 0x0d0f0303, 0x0d0f0307, 0x0f010101, 0x0f010109, 0x0f01010f, + 0x0f010501, 0x0f010505, 0x0f01070d, 0x0f010901, 0x0f010b09, 0x0f010d05, 0x0f030105, 0x0f030303, + 0x0f030509, 0x0f030907, 0x0f03090b, 0x0f050103, 0x0f050109, 0x0f050301, 0x0f05030d, 0x0f050503, + 0x0f050701, 0x0f050b03, 0x0f070105, 0x0f070705, 0x0f07070b, 0x0f070b07, 0x0f090103, 0x0f09010b, + 0x0f090307, 0x0f090501, 0x0f090b01, 0x0f0b0505, 0x0f0b0905, 0x0f0d0105, 0x0f0d0703, 0x0f0f0101, }; #define NGRID_IQ2XXS 512 @@ -4162,11 +4162,11 @@ void dequantize_row_iq3_s(const block_iq3_s * restrict x, float * restrict y, in const uint8_t * signs = x[i].signs; for (int ib32 = 0; ib32 < QK_K/32; ib32 += 2) { - const float db1 = d * (0.5f + (x[i].scales[ib32/2] & 0xf)) * 0.5f; - const float db2 = d * (0.5f + (x[i].scales[ib32/2] >> 4)) * 0.5f; + const float db1 = d * (1 + 2*(x[i].scales[ib32/2] & 0xf)); + const float db2 = d * (1 + 2*(x[i].scales[ib32/2] >> 4)); for (int l = 0; l < 4; ++l) { - const uint8_t * grid1 = (const uint8_t *)(iq3xs_grid + (qs[2*l+0] | ((qh[0] << (8-2*l)) & 256))); - const uint8_t * grid2 = (const uint8_t *)(iq3xs_grid + (qs[2*l+1] | ((qh[0] << (7-2*l)) & 256))); + const uint8_t * grid1 = (const uint8_t *)(iq3s_grid + (qs[2*l+0] | ((qh[0] << (8-2*l)) & 256))); + const uint8_t * grid2 = (const uint8_t *)(iq3s_grid + (qs[2*l+1] | ((qh[0] << (7-2*l)) & 256))); for (int j = 0; j < 4; ++j) { y[j+0] = db1 * grid1[j] * (signs[l] & kmask_iq2xs[j+0] ? -1.f : 1.f); y[j+4] = db1 * grid2[j] * (signs[l] & kmask_iq2xs[j+4] ? -1.f : 1.f); @@ -4176,8 +4176,8 @@ void dequantize_row_iq3_s(const block_iq3_s * restrict x, float * restrict y, in qs += 8; signs += 4; for (int l = 0; l < 4; ++l) { - const uint8_t * grid1 = (const uint8_t *)(iq3xs_grid + (qs[2*l+0] | ((qh[1] << (8-2*l)) & 256))); - const uint8_t * grid2 = (const uint8_t *)(iq3xs_grid + (qs[2*l+1] | ((qh[1] << (7-2*l)) & 256))); + const uint8_t * grid1 = (const uint8_t *)(iq3s_grid + (qs[2*l+0] | ((qh[1] << (8-2*l)) & 256))); + const uint8_t * grid2 = (const uint8_t *)(iq3s_grid + (qs[2*l+1] | ((qh[1] << (7-2*l)) & 256))); for (int j = 0; j < 4; ++j) { y[j+0] = db2 * grid1[j] * (signs[l] & kmask_iq2xs[j+0] ? -1.f : 1.f); y[j+4] = db2 * grid2[j] * (signs[l] & kmask_iq2xs[j+4] ? -1.f : 1.f); @@ -10089,18 +10089,34 @@ void ggml_vec_dot_iq3_s_q8_K (int n, float * GGML_RESTRICT s, size_t bs, const v #if defined(__ARM_NEON) + typedef union { + uint16x8_t vec_index; + uint16_t index[8]; + } vec_index_t; + static const uint8_t k_mask1[32] = {0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x01, 0x01, 0x01, 0x01, 0x01, 0x01, 0x01, 0x02, 0x02, 0x02, 0x02, 0x02, 0x02, 0x02, 0x02, 0x03, 0x03, 0x03, 0x03, 0x03, 0x03, 0x03, 0x03 }; static const uint8_t k_mask2[16] = {0x01, 0x02, 0x04, 0x08, 0x10, 0x20, 0x40, 0x80, 0x01, 0x02, 0x04, 0x08, 0x10, 0x20, 0x40, 0x80,}; - const uint8x16x2_t mask1 = vld1q_u8_x2(k_mask1); - const uint8x16_t mask2 = vld1q_u8(k_mask2); + static const int16_t k_shift[8] = {8, 7, 6, 5, 4, 3, 2, 1}; + + const uint8x16x2_t mask1 = vld1q_u8_x2(k_mask1); + const uint8x16_t mask2 = vld1q_u8(k_mask2); + const int16x8_t hshift = vld1q_s16(k_shift); + const uint16x8_t m256 = vdupq_n_u16(256); + const uint8x16_t m1 = vdupq_n_u8(1); uint8x16x2_t vs; ggml_int8x16x4_t q3s; ggml_int8x16x4_t q8b; + vec_index_t idx; + +#if QK_K == 256 + uint32_t scales32[2]; + const uint8_t * scales8 = (const uint8_t *)scales32; +#endif float sumf = 0; for (int i = 0; i < nb; ++i) { @@ -10109,47 +10125,63 @@ void ggml_vec_dot_iq3_s_q8_K (int n, float * GGML_RESTRICT s, size_t bs, const v const uint8_t * restrict qh = x[i].qh; const uint16_t * restrict signs = (const uint16_t *)x[i].signs; const int8_t * restrict q8 = y[i].qs; + +#if QK_K == 256 + memcpy(scales32, x[i].scales, 4); + scales32[1] = (((scales32[0] >> 4) & 0x0f0f0f0f) << 1) | 0x01010101; + scales32[0] = ((scales32[0] & 0x0f0f0f0f) << 1) | 0x01010101; +#endif + int sumi1 = 0, sumi2 = 0; for (int ib32 = 0; ib32 < QK_K/32; ib32 += 2) { q8b = ggml_vld1q_s8_x4(q8); q8 += 64; - const uint32x4_t aux32x4_0 = {iq3xs_grid[qs[ 0] | ((qh[ib32+0] << 8) & 256)], iq3xs_grid[qs[ 1] | ((qh[ib32+0] << 7) & 256)], - iq3xs_grid[qs[ 2] | ((qh[ib32+0] << 6) & 256)], iq3xs_grid[qs[ 3] | ((qh[ib32+0] << 5) & 256)]}; - const uint32x4_t aux32x4_1 = {iq3xs_grid[qs[ 4] | ((qh[ib32+0] << 4) & 256)], iq3xs_grid[qs[ 5] | ((qh[ib32+0] << 3) & 256)], - iq3xs_grid[qs[ 6] | ((qh[ib32+0] << 2) & 256)], iq3xs_grid[qs[ 7] | ((qh[ib32+0] << 1) & 256)]}; - const uint32x4_t aux32x4_2 = {iq3xs_grid[qs[ 8] | ((qh[ib32+1] << 8) & 256)], iq3xs_grid[qs[ 9] | ((qh[ib32+1] << 7) & 256)], - iq3xs_grid[qs[10] | ((qh[ib32+1] << 6) & 256)], iq3xs_grid[qs[11] | ((qh[ib32+1] << 5) & 256)]}; - const uint32x4_t aux32x4_3 = {iq3xs_grid[qs[12] | ((qh[ib32+1] << 4) & 256)], iq3xs_grid[qs[13] | ((qh[ib32+1] << 3) & 256)], - iq3xs_grid[qs[14] | ((qh[ib32+1] << 2) & 256)], iq3xs_grid[qs[15] | ((qh[ib32+1] << 1) & 256)]}; - qs += 16; + + const uint8x16_t idx_l = vld1q_u8(qs); qs += 16; + idx.vec_index = vorrq_u16(vmovl_u8(vget_low_u8 (idx_l)), vandq_u16(vshlq_u16(vdupq_n_u16(qh[ib32+0]), hshift), m256)); + const uint32x4_t aux32x4_0 = {iq3s_grid[idx.index[0]], iq3s_grid[idx.index[1]], + iq3s_grid[idx.index[2]], iq3s_grid[idx.index[3]]}; + const uint32x4_t aux32x4_1 = {iq3s_grid[idx.index[4]], iq3s_grid[idx.index[5]], + iq3s_grid[idx.index[6]], iq3s_grid[idx.index[7]]}; + idx.vec_index = vorrq_u16(vmovl_u8(vget_high_u8(idx_l)), vandq_u16(vshlq_u16(vdupq_n_u16(qh[ib32+1]), hshift), m256)); + const uint32x4_t aux32x4_2 = {iq3s_grid[idx.index[0]], iq3s_grid[idx.index[1]], + iq3s_grid[idx.index[2]], iq3s_grid[idx.index[3]]}; + const uint32x4_t aux32x4_3 = {iq3s_grid[idx.index[4]], iq3s_grid[idx.index[5]], + iq3s_grid[idx.index[6]], iq3s_grid[idx.index[7]]}; + vs.val[0] = vreinterpretq_u8_u32(vdupq_n_u32(signs[0] | (signs[1] << 16))); vs.val[1] = vandq_u8(ggml_vqtbl1q_u8(vs.val[0], mask1.val[1]), mask2); vs.val[0] = vandq_u8(ggml_vqtbl1q_u8(vs.val[0], mask1.val[0]), mask2); - vs.val[0] = vceqq_u8(vs.val[0], mask2); - vs.val[1] = vceqq_u8(vs.val[1], mask2); + vs.val[0] = vorrq_u8(vceqq_u8(vs.val[0], mask2), m1); + vs.val[1] = vorrq_u8(vceqq_u8(vs.val[1], mask2), m1); - q3s.val[0] = vsubq_s8(vreinterpretq_s8_u8(veorq_u8(vs.val[0], vreinterpretq_u8_u32(aux32x4_0))), vreinterpretq_s8_u8(vs.val[0])); - q3s.val[1] = vsubq_s8(vreinterpretq_s8_u8(veorq_u8(vs.val[1], vreinterpretq_u8_u32(aux32x4_1))), vreinterpretq_s8_u8(vs.val[1])); + q3s.val[0] = vmulq_s8(vreinterpretq_s8_u8(vs.val[0]), vreinterpretq_s8_u32(aux32x4_0)); + q3s.val[1] = vmulq_s8(vreinterpretq_s8_u8(vs.val[1]), vreinterpretq_s8_u32(aux32x4_1)); vs.val[0] = vreinterpretq_u8_u32(vdupq_n_u32(signs[2] | (signs[3] << 16))); vs.val[1] = vandq_u8(ggml_vqtbl1q_u8(vs.val[0], mask1.val[1]), mask2); vs.val[0] = vandq_u8(ggml_vqtbl1q_u8(vs.val[0], mask1.val[0]), mask2); - vs.val[0] = vceqq_u8(vs.val[0], mask2); - vs.val[1] = vceqq_u8(vs.val[1], mask2); + vs.val[0] = vorrq_u8(vceqq_u8(vs.val[0], mask2), m1); + vs.val[1] = vorrq_u8(vceqq_u8(vs.val[1], mask2), m1); signs += 4; - q3s.val[2] = vsubq_s8(vreinterpretq_s8_u8(veorq_u8(vs.val[0], vreinterpretq_u8_u32(aux32x4_2))), vreinterpretq_s8_u8(vs.val[0])); - q3s.val[3] = vsubq_s8(vreinterpretq_s8_u8(veorq_u8(vs.val[1], vreinterpretq_u8_u32(aux32x4_3))), vreinterpretq_s8_u8(vs.val[1])); + q3s.val[2] = vmulq_s8(vreinterpretq_s8_u8(vs.val[0]), vreinterpretq_s8_u32(aux32x4_2)); + q3s.val[3] = vmulq_s8(vreinterpretq_s8_u8(vs.val[1]), vreinterpretq_s8_u32(aux32x4_3)); const int32x4_t p1 = ggml_vdotq_s32(ggml_vdotq_s32(vdupq_n_s32(0), q3s.val[0], q8b.val[0]), q3s.val[1], q8b.val[1]); const int32x4_t p2 = ggml_vdotq_s32(ggml_vdotq_s32(vdupq_n_s32(0), q3s.val[2], q8b.val[2]), q3s.val[3], q8b.val[3]); +#if QK_K == 256 + sumi1 += vaddvq_s32(p1) * scales8[ib32/2+0]; + sumi2 += vaddvq_s32(p2) * scales8[ib32/2+4]; +#else sumi1 += vaddvq_s32(p1) * (1 + 2*(x[i].scales[ib32/2] & 0xf)); sumi2 += vaddvq_s32(p2) * (1 + 2*(x[i].scales[ib32/2] >> 4)); +#endif } sumf += d*(sumi1 + sumi2); } - *s = 0.25f * sumf; + *s = sumf; #elif defined(__AVX2__) @@ -10164,6 +10196,16 @@ void ggml_vec_dot_iq3_s_q8_K (int n, float * GGML_RESTRICT s, size_t bs, const v const __m256i mask1 = _mm256_loadu_si256((const __m256i*)k_mask1); const __m256i mask2 = _mm256_loadu_si256((const __m256i*)k_mask2); + const __m256i idx_shift = _mm256_set_epi32(1, 2, 3, 4, 5, 6, 7, 8); + const __m256i idx_mask = _mm256_set1_epi32(256); + + typedef union { + __m256i vec[2]; + uint32_t index[16]; + } index_t; + + index_t idx; + __m256 accumf = _mm256_setzero_ps(); for (int i = 0; i < nb; ++i) { const float d = GGML_FP16_TO_FP32(x[i].d) * y[i].d; @@ -10176,24 +10218,25 @@ void ggml_vec_dot_iq3_s_q8_K (int n, float * GGML_RESTRICT s, size_t bs, const v for (int ib32 = 0; ib32 < QK_K/32; ib32 += 2) { const __m256i q8_1 = _mm256_loadu_si256((const __m256i *)q8); q8 += 32; const __m256i q8_2 = _mm256_loadu_si256((const __m256i *)q8); q8 += 32; - const __m256i q2_1 = _mm256_set_epi32(iq3xs_grid[qs[7] | ((qh[ib32+0] << 1) & 256)], - iq3xs_grid[qs[6] | ((qh[ib32+0] << 2) & 256)], - iq3xs_grid[qs[5] | ((qh[ib32+0] << 3) & 256)], - iq3xs_grid[qs[4] | ((qh[ib32+0] << 4) & 256)], - iq3xs_grid[qs[3] | ((qh[ib32+0] << 5) & 256)], - iq3xs_grid[qs[2] | ((qh[ib32+0] << 6) & 256)], - iq3xs_grid[qs[1] | ((qh[ib32+0] << 7) & 256)], - iq3xs_grid[qs[0] | ((qh[ib32+0] << 8) & 256)]); - qs += 8; - const __m256i q2_2 = _mm256_set_epi32(iq3xs_grid[qs[7] | ((qh[ib32+1] << 1) & 256)], - iq3xs_grid[qs[6] | ((qh[ib32+1] << 2) & 256)], - iq3xs_grid[qs[5] | ((qh[ib32+1] << 3) & 256)], - iq3xs_grid[qs[4] | ((qh[ib32+1] << 4) & 256)], - iq3xs_grid[qs[3] | ((qh[ib32+1] << 5) & 256)], - iq3xs_grid[qs[2] | ((qh[ib32+1] << 6) & 256)], - iq3xs_grid[qs[1] | ((qh[ib32+1] << 7) & 256)], - iq3xs_grid[qs[0] | ((qh[ib32+1] << 8) & 256)]); - qs += 8; + const __m256i idx_l = _mm256_cvtepu8_epi16(_mm_loadu_si128((const __m128i *)qs)); qs += 16; + idx.vec[0] = _mm256_set1_epi32(qh[ib32+0]); + idx.vec[1] = _mm256_set1_epi32(qh[ib32+1]); + idx.vec[0] = _mm256_and_si256(_mm256_sllv_epi32(idx.vec[0], idx_shift), idx_mask); + idx.vec[1] = _mm256_and_si256(_mm256_sllv_epi32(idx.vec[1], idx_shift), idx_mask); + idx.vec[0] = _mm256_or_si256(idx.vec[0], _mm256_cvtepi16_epi32(_mm256_castsi256_si128(idx_l))); + idx.vec[1] = _mm256_or_si256(idx.vec[1], _mm256_cvtepi16_epi32(_mm256_extractf128_si256(idx_l, 1))); + + // At leat on my CPU (Ryzen 7950X), using _mm256_i32gather_epi32 is slower than _mm256_set_epi32. Strange. + //const __m256i q2_1 = _mm256_i32gather_epi32((const int *)iq3s_grid, idx.vec[0], 4); + //const __m256i q2_2 = _mm256_i32gather_epi32((const int *)iq3s_grid, idx.vec[1], 4); + const __m256i q2_1 = _mm256_set_epi32( + iq3s_grid[idx.index[7]], iq3s_grid[idx.index[6]], iq3s_grid[idx.index[5]], iq3s_grid[idx.index[4]], + iq3s_grid[idx.index[3]], iq3s_grid[idx.index[1]], iq3s_grid[idx.index[1]], iq3s_grid[idx.index[0]] + ); + const __m256i q2_2 = _mm256_set_epi32( + iq3s_grid[idx.index[15]], iq3s_grid[idx.index[14]], iq3s_grid[idx.index[13]], iq3s_grid[idx.index[12]], + iq3s_grid[idx.index[11]], iq3s_grid[idx.index[10]], iq3s_grid[idx.index[ 9]], iq3s_grid[idx.index[ 8]] + ); __m256i aux256 = _mm256_set1_epi32(signs[0] | (signs[1] << 16)); aux256 = _mm256_and_si256(_mm256_shuffle_epi8(aux256,mask1), mask2); @@ -10221,7 +10264,7 @@ void ggml_vec_dot_iq3_s_q8_K (int n, float * GGML_RESTRICT s, size_t bs, const v } - *s = 0.25f * hsum_float_8(accumf); + *s = hsum_float_8(accumf); #else @@ -10238,8 +10281,8 @@ void ggml_vec_dot_iq3_s_q8_K (int n, float * GGML_RESTRICT s, size_t bs, const v const uint32_t ls2 = 2*(x[i].scales[ib32/2] >> 4) + 1; int32_t sumi = 0; for (int l = 0; l < 4; ++l) { - const uint8_t * grid1 = (const uint8_t *)(iq3xs_grid + (qs[2*l+0] | ((qh[ib32+0] << (8-2*l)) & 256))); - const uint8_t * grid2 = (const uint8_t *)(iq3xs_grid + (qs[2*l+1] | ((qh[ib32+0] << (7-2*l)) & 256))); + const uint8_t * grid1 = (const uint8_t *)(iq3s_grid + (qs[2*l+0] | ((qh[ib32+0] << (8-2*l)) & 256))); + const uint8_t * grid2 = (const uint8_t *)(iq3s_grid + (qs[2*l+1] | ((qh[ib32+0] << (7-2*l)) & 256))); for (int j = 0; j < 4; ++j) { sumi += grid1[j] * q8[j+0] * (signs[l] & kmask_iq2xs[j+0] ? -1 : 1); sumi += grid2[j] * q8[j+4] * (signs[l] & kmask_iq2xs[j+4] ? -1 : 1); @@ -10251,8 +10294,8 @@ void ggml_vec_dot_iq3_s_q8_K (int n, float * GGML_RESTRICT s, size_t bs, const v bsum += sumi * ls1; sumi = 0; for (int l = 0; l < 4; ++l) { - const uint8_t * grid1 = (const uint8_t *)(iq3xs_grid + (qs[2*l+0] | ((qh[ib32+1] << (8-2*l)) & 256))); - const uint8_t * grid2 = (const uint8_t *)(iq3xs_grid + (qs[2*l+1] | ((qh[ib32+1] << (7-2*l)) & 256))); + const uint8_t * grid1 = (const uint8_t *)(iq3s_grid + (qs[2*l+0] | ((qh[ib32+1] << (8-2*l)) & 256))); + const uint8_t * grid2 = (const uint8_t *)(iq3s_grid + (qs[2*l+1] | ((qh[ib32+1] << (7-2*l)) & 256))); for (int j = 0; j < 4; ++j) { sumi += grid1[j] * q8[j+0] * (signs[l] & kmask_iq2xs[j+0] ? -1 : 1); sumi += grid2[j] * q8[j+4] * (signs[l] & kmask_iq2xs[j+4] ? -1 : 1); @@ -10265,7 +10308,7 @@ void ggml_vec_dot_iq3_s_q8_K (int n, float * GGML_RESTRICT s, size_t bs, const v } sumf += d * bsum; } - *s = 0.25f * sumf; + *s = sumf; #endif } @@ -11912,7 +11955,8 @@ static void quantize_row_iq3_s_impl(int block_size, const float * restrict x, vo } float best = 0; float scale = max/(2*kMaxQ-1); - for (int is = -15; is <= 15; ++is) { + for (int k = 0; k < bs4; ++k) is_on_grid[k] = false; + for (int is = -9; is <= 9; ++is) { float id = (2*kMaxQ-1+is*0.2f)/max; float this_scale = 1/id; for (int k = 0; k < bs4; ++k) { @@ -11948,7 +11992,7 @@ static void quantize_row_iq3_s_impl(int block_size, const float * restrict x, vo if (n_not_ongrid > 0 && scale > 0) { float id = 1/scale; for (int k = 0; k < bs4; ++k) { - if (is_on_grid[k]) continue; + //if (is_on_grid[k]) continue; uint16_t u = 0; for (int i = 0; i < 4; ++i) { int l = nearest_int(0.5f*(id*xval[4*k+i]-1)); @@ -12004,7 +12048,7 @@ static void quantize_row_iq3_s_impl(int block_size, const float * restrict x, vo } float d = max_scale/31; - y[ibl].d = GGML_FP32_TO_FP16(d); + y[ibl].d = GGML_FP32_TO_FP16(d * 1.033f); float id = 1/d; for (int ib = 0; ib < QK_K/block_size; ib += 2) { int l1 = nearest_int(0.5f*(id*scales[ib+0]-1)); From c7a0ad8ec9ebb5ddb1c1c80c82f2ee041c525d47 Mon Sep 17 00:00:00 2001 From: Jared Van Bortel Date: Sat, 2 Mar 2024 12:21:47 -0500 Subject: [PATCH 762/811] convert-hf : make model class definitions self-contained (#5825) --- convert-hf-to-gguf.py | 204 ++++++++++++++++++------------------ gguf-py/gguf/gguf_writer.py | 2 +- 2 files changed, 101 insertions(+), 105 deletions(-) diff --git a/convert-hf-to-gguf.py b/convert-hf-to-gguf.py index 28b92ac38..fa9d4f22f 100755 --- a/convert-hf-to-gguf.py +++ b/convert-hf-to-gguf.py @@ -8,9 +8,10 @@ import json import os import re import sys +from abc import ABC, abstractmethod from enum import IntEnum from pathlib import Path -from typing import TYPE_CHECKING, Any, ContextManager, Iterator, Sequence, cast +from typing import TYPE_CHECKING, Any, Callable, ContextManager, Iterator, Sequence, TypeVar, cast import numpy as np import torch @@ -35,8 +36,11 @@ class SentencePieceTokenTypes(IntEnum): UNUSED = 5 BYTE = 6 +AnyModel = TypeVar("AnyModel", bound="type[Model]") + +class Model(ABC): + _model_classes: dict[str, type[Model]] = {} -class Model: def __init__(self, dir_model: Path, ftype: int, fname_out: Path, is_big_endian: bool): self.dir_model = dir_model self.ftype = ftype @@ -47,10 +51,14 @@ class Model: self.num_parts = Model.count_model_parts(self.dir_model, ".safetensors" if self.is_safetensors else ".bin") self.part_names = self._get_part_names() self.hparams = Model.load_hparams(self.dir_model) - self.model_arch = self._get_model_architecture() self.gguf_writer = gguf.GGUFWriter(fname_out, gguf.MODEL_ARCH_NAMES[self.model_arch], endianess=self.endianess, use_temp_file=False) self.block_count = self.find_hparam(["n_layers", "num_hidden_layers", "n_layer"]) + @property + @abstractmethod + def model_arch(self) -> gguf.MODEL_ARCH: + pass + def find_hparam(self, keys: Sequence[str], optional: bool = False) -> Any: key = next((k for k in keys if k in self.hparams), None) if key is not None: @@ -176,55 +184,21 @@ class Model: with open(dir_model / "config.json", "r", encoding="utf-8") as f: return json.load(f) - @staticmethod - def from_model_architecture(model_architecture): - if model_architecture == "GPTNeoXForCausalLM": - return GPTNeoXModel - if model_architecture == "BloomForCausalLM": - return BloomModel - if model_architecture == "MPTForCausalLM": - return MPTModel - if model_architecture in ("BaichuanForCausalLM", "BaiChuanForCausalLM"): - return BaichuanModel - if model_architecture in ("FalconForCausalLM", "RWForCausalLM"): - return FalconModel - if model_architecture == "GPTBigCodeForCausalLM": - return StarCoderModel - if model_architecture == "GPTRefactForCausalLM": - return RefactModel - if model_architecture == "PersimmonForCausalLM": - return PersimmonModel - if model_architecture in ("StableLmForCausalLM", "StableLMEpochForCausalLM", "LlavaStableLMEpochForCausalLM"): - return StableLMModel - if model_architecture == "QWenLMHeadModel": - return QwenModel - if model_architecture == "Qwen2ForCausalLM": - return Model - if model_architecture == "MixtralForCausalLM": - return MixtralModel - if model_architecture == "GPT2LMHeadModel": - return GPT2Model - if model_architecture == "PhiForCausalLM": - return Phi2Model - if model_architecture == "PlamoForCausalLM": - return PlamoModel - if model_architecture == "CodeShellForCausalLM": - return CodeShellModel - if model_architecture == "OrionForCausalLM": - return OrionModel - if model_architecture == "InternLM2ForCausalLM": - return InternLM2Model - if model_architecture == "MiniCPMForCausalLM": - return MiniCPMModel - if model_architecture == "BertModel": - return BertModel - if model_architecture == "NomicBertModel": - return NomicBertModel - if model_architecture == "GemmaForCausalLM": - return GemmaModel - if model_architecture == "Starcoder2ForCausalLM": - return Model - return Model + @classmethod + def register(cls, *names: str) -> Callable[[AnyModel], AnyModel]: + assert names + def func(modelcls: type[Model]): + for name in names: + cls._model_classes[name] = modelcls + return modelcls + return func + + @classmethod + def from_model_architecture(cls, arch): + try: + return cls._model_classes[arch] + except KeyError: + raise NotImplementedError(f'Architecture {arch!r} not supported!') from None def _is_model_safetensors(self) -> bool: return Model.count_model_parts(self.dir_model, ".safetensors") > 0 @@ -239,57 +213,6 @@ class Model: return ("pytorch_model.bin",) return (f"pytorch_model-{n:05}-of-{self.num_parts:05}.bin" for n in range(1, self.num_parts + 1)) - def _get_model_architecture(self) -> gguf.MODEL_ARCH: - arch = self.hparams["architectures"][0] - if arch == "GPTNeoXForCausalLM": - return gguf.MODEL_ARCH.GPTNEOX - if arch == "BloomForCausalLM": - return gguf.MODEL_ARCH.BLOOM - if arch == "MPTForCausalLM": - return gguf.MODEL_ARCH.MPT - if arch in ("BaichuanForCausalLM", "BaiChuanForCausalLM"): - return gguf.MODEL_ARCH.BAICHUAN - if arch in ("FalconForCausalLM", "RWForCausalLM"): - return gguf.MODEL_ARCH.FALCON - if arch == "GPTBigCodeForCausalLM": - return gguf.MODEL_ARCH.STARCODER - if arch == "GPTRefactForCausalLM": - return gguf.MODEL_ARCH.REFACT - if arch == "PersimmonForCausalLM": - return gguf.MODEL_ARCH.PERSIMMON - if arch in ("StableLmForCausalLM", "StableLMEpochForCausalLM", "LlavaStableLMEpochForCausalLM"): - return gguf.MODEL_ARCH.STABLELM - if arch == "QWenLMHeadModel": - return gguf.MODEL_ARCH.QWEN - if arch == "Qwen2ForCausalLM": - return gguf.MODEL_ARCH.QWEN2 - if arch == "MixtralForCausalLM": - return gguf.MODEL_ARCH.LLAMA - if arch == "GPT2LMHeadModel": - return gguf.MODEL_ARCH.GPT2 - if arch == "PhiForCausalLM": - return gguf.MODEL_ARCH.PHI2 - if arch == "PlamoForCausalLM": - return gguf.MODEL_ARCH.PLAMO - if arch == "CodeShellForCausalLM": - return gguf.MODEL_ARCH.CODESHELL - if arch == "OrionForCausalLM": - return gguf.MODEL_ARCH.ORION - if arch == "InternLM2ForCausalLM": - return gguf.MODEL_ARCH.INTERNLM2 - if arch == "MiniCPMForCausalLM": - return gguf.MODEL_ARCH.MINICPM - if arch == "BertModel": - return gguf.MODEL_ARCH.BERT - if arch == "NomicBertModel": - return gguf.MODEL_ARCH.NOMIC_BERT - if arch == "GemmaForCausalLM": - return gguf.MODEL_ARCH.GEMMA - if arch == "Starcoder2ForCausalLM": - return gguf.MODEL_ARCH.STARCODER2 - - raise NotImplementedError(f'Architecture "{arch}" not supported!') - def _set_vocab_gpt2(self): dir_model = self.dir_model hparams = self.hparams @@ -457,7 +380,10 @@ class Model: special_vocab.add_to_gguf(self.gguf_writer) +@Model.register("GPTNeoXForCausalLM") class GPTNeoXModel(Model): + model_arch = gguf.MODEL_ARCH.GPTNEOX + def set_gguf_parameters(self): block_count = self.hparams["num_hidden_layers"] @@ -474,7 +400,10 @@ class GPTNeoXModel(Model): self.gguf_writer.add_layer_norm_eps(self.hparams["layer_norm_eps"]) +@Model.register("BloomForCausalLM") class BloomModel(Model): + model_arch = gguf.MODEL_ARCH.BLOOM + def set_gguf_parameters(self): self.gguf_writer.add_name("Bloom") n_embed = self.hparams.get("hidden_size", self.hparams.get("n_embed")) @@ -566,7 +495,10 @@ class BloomModel(Model): print(name, f"=> output.weight, shape = {data.shape}, {old_dtype} --> {data.dtype}") +@Model.register("MPTForCausalLM") class MPTModel(Model): + model_arch = gguf.MODEL_ARCH.MPT + def set_gguf_parameters(self): block_count = self.hparams["n_layers"] self.gguf_writer.add_name(self.dir_model.name) @@ -629,7 +561,10 @@ class MPTModel(Model): self.gguf_writer.add_tensor(new_name, data) +@Model.register("OrionForCausalLM") class OrionModel(Model): + model_arch = gguf.MODEL_ARCH.ORION + def set_vocab(self): self._set_vocab_sentencepiece() @@ -708,7 +643,10 @@ class OrionModel(Model): self.gguf_writer.add_tensor(new_name, data) +@Model.register("BaichuanForCausalLM", "BaiChuanForCausalLM") class BaichuanModel(Model): + model_arch = gguf.MODEL_ARCH.BAICHUAN + def set_vocab(self): self._set_vocab_sentencepiece() @@ -823,7 +761,10 @@ class BaichuanModel(Model): return weights[r * n_part:r * n_part + r, ...] +@Model.register("FalconForCausalLM", "RWForCausalLM") class FalconModel(Model): + model_arch = gguf.MODEL_ARCH.FALCON + def set_gguf_parameters(self): block_count = self.hparams.get("num_hidden_layers") if block_count is None: @@ -916,7 +857,10 @@ class FalconModel(Model): self.gguf_writer.add_tensor(new_name, data) +@Model.register("GPTBigCodeForCausalLM") class StarCoderModel(Model): + model_arch = gguf.MODEL_ARCH.STARCODER + def set_gguf_parameters(self): block_count = self.hparams["n_layer"] @@ -931,7 +875,10 @@ class StarCoderModel(Model): self.gguf_writer.add_file_type(self.ftype) +@Model.register("GPTRefactForCausalLM") class RefactModel(Model): + model_arch = gguf.MODEL_ARCH.REFACT + def set_gguf_parameters(self): hidden_dim = self.hparams["n_embd"] inner_dim = 4 * hidden_dim @@ -1015,7 +962,10 @@ class RefactModel(Model): self.gguf_writer.add_tensor(new_name, data) +@Model.register("PersimmonForCausalLM") class PersimmonModel(Model): + model_arch = gguf.MODEL_ARCH.PERSIMMON + def set_gguf_parameters(self): block_count = self.hparams.get("num_layers", self.hparams.get("num_hidden_layers")) head_count = self.hparams["num_attention_heads"] @@ -1063,7 +1013,10 @@ class PersimmonModel(Model): self.gguf_writer.add_tensor(new_name, data) +@Model.register("StableLmForCausalLM", "StableLMEpochForCausalLM", "LlavaStableLMEpochForCausalLM") class StableLMModel(Model): + model_arch = gguf.MODEL_ARCH.STABLELM + def set_vocab(self): if (self.dir_model / "tokenizer.json").is_file(): self._set_vocab_gpt2() @@ -1087,12 +1040,18 @@ class StableLMModel(Model): self.gguf_writer.add_layer_norm_eps(self.find_hparam(["layer_norm_eps", "norm_eps"])) +@Model.register("MixtralForCausalLM") class MixtralModel(Model): + model_arch = gguf.MODEL_ARCH.LLAMA + def set_vocab(self): self._set_vocab_sentencepiece() +@Model.register("MiniCPMForCausalLM") class MiniCPMModel(Model): + model_arch = gguf.MODEL_ARCH.MINICPM + def set_gguf_parameters(self): block_count = self.hparams["num_hidden_layers"] self.gguf_writer.add_name("MiniCPM") @@ -1169,7 +1128,10 @@ class MiniCPMModel(Model): self.gguf_writer.add_tensor(new_name, data) +@Model.register("QWenLMHeadModel") class QwenModel(Model): + model_arch = gguf.MODEL_ARCH.QWEN + @staticmethod def token_bytes_to_string(b): from transformers.models.gpt2.tokenization_gpt2 import bytes_to_unicode @@ -1249,7 +1211,15 @@ class QwenModel(Model): self.gguf_writer.add_tensor(new_name, data) +@Model.register("Qwen2ForCausalLM") +class Qwen2Model(Model): + model_arch = gguf.MODEL_ARCH.QWEN2 + + +@Model.register("GPT2LMHeadModel") class GPT2Model(Model): + model_arch = gguf.MODEL_ARCH.GPT2 + def set_gguf_parameters(self): self.gguf_writer.add_name(self.dir_model.name) self.gguf_writer.add_block_count(self.hparams["n_layer"]) @@ -1311,7 +1281,10 @@ class GPT2Model(Model): self.gguf_writer.add_tensor("output.weight", data) +@Model.register("PhiForCausalLM") class Phi2Model(Model): + model_arch = gguf.MODEL_ARCH.PHI2 + def set_gguf_parameters(self): block_count = self.find_hparam(["num_hidden_layers", "n_layer"]) @@ -1333,7 +1306,10 @@ class Phi2Model(Model): self.gguf_writer.add_add_bos_token(False) +@Model.register("PlamoForCausalLM") class PlamoModel(Model): + model_arch = gguf.MODEL_ARCH.PLAMO + def set_vocab(self): self._set_vocab_sentencepiece() @@ -1412,7 +1388,10 @@ class PlamoModel(Model): self.gguf_writer.add_tensor(new_name, data) +@Model.register("CodeShellForCausalLM") class CodeShellModel(Model): + model_arch = gguf.MODEL_ARCH.CODESHELL + def set_gguf_parameters(self): block_count = self.hparams["n_layer"] @@ -1477,7 +1456,10 @@ class CodeShellModel(Model): print(name, f"=> output.weight, shape = {data.shape}, {old_dtype} --> {data.dtype}") +@Model.register("InternLM2ForCausalLM") class InternLM2Model(Model): + model_arch = gguf.MODEL_ARCH.INTERNLM2 + def set_vocab(self): # (TODO): Is there a better way? # Copy from _set_vocab_sentencepiece, The only difference is that we will treat the character @@ -1649,7 +1631,10 @@ in chat mode so that the conversation can end normally.") self.post_write_tensors(tensor_map, name, data_torch) +@Model.register("BertModel") class BertModel(Model): + model_arch = gguf.MODEL_ARCH.BERT + def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) self.vocab_size = None @@ -1679,7 +1664,7 @@ class BertModel(Model): else: raise NotImplementedError("Only MEAN and CLS pooling types supported") - self.gguf_writer.add_pooling_type(pooling_type.value) + self.gguf_writer.add_pooling_type(pooling_type) def set_vocab(self): path = self.dir_model @@ -1755,7 +1740,10 @@ class BertModel(Model): self.gguf_writer.add_tensor(new_name, data) +@Model.register("NomicBertModel") class NomicBertModel(BertModel): + model_arch = gguf.MODEL_ARCH.NOMIC_BERT + def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) @@ -1792,7 +1780,10 @@ class NomicBertModel(BertModel): yield name, data +@Model.register("GemmaForCausalLM") class GemmaModel(Model): + model_arch = gguf.MODEL_ARCH.GEMMA + def set_vocab(self): self._set_vocab_sentencepiece() @@ -1848,6 +1839,11 @@ class GemmaModel(Model): self.gguf_writer.add_tensor(new_name, data) +@Model.register("Starcoder2ForCausalLM") +class StarCoder2Model(Model): + model_arch = gguf.MODEL_ARCH.STARCODER2 + + ###### CONVERSION LOGIC ###### diff --git a/gguf-py/gguf/gguf_writer.py b/gguf-py/gguf/gguf_writer.py index e4681475c..801160832 100644 --- a/gguf-py/gguf/gguf_writer.py +++ b/gguf-py/gguf/gguf_writer.py @@ -362,7 +362,7 @@ class GGUFWriter: self.add_bool(Keys.Attention.CAUSAL.format(arch=self.arch), value) def add_pooling_type(self, value: PoolingType) -> None: - self.add_uint32(Keys.LLM.POOLING_TYPE.format(arch=self.arch), value) + self.add_uint32(Keys.LLM.POOLING_TYPE.format(arch=self.arch), value.value) def add_rope_dimension_count(self, count: int) -> None: self.add_uint32(Keys.Rope.DIMENSION_COUNT.format(arch=self.arch), count) From 4d4d2366fc9c54d4a275065cfe9299c6cf7c5b78 Mon Sep 17 00:00:00 2001 From: Jared Van Bortel Date: Sat, 2 Mar 2024 12:27:26 -0500 Subject: [PATCH 763/811] convert : automatically fall back to HfVocab if tokenizer.model doesn't exist (#5821) --- README.md | 4 +- convert-llama-ggml-to-gguf.py | 6 +-- convert.py | 72 +++++++++++++++++------------------ examples/infill/infill.cpp | 4 +- 4 files changed, 41 insertions(+), 45 deletions(-) diff --git a/README.md b/README.md index 67717c1e3..939646753 100644 --- a/README.md +++ b/README.md @@ -786,7 +786,7 @@ And after 4.45 hours, you will have the final perplexity. ### Interactive mode If you want a more ChatGPT-like experience, you can run in interactive mode by passing `-i` as a parameter. -In this mode, you can always interrupt generation by pressing Ctrl+C and entering one or more lines of text, which will be converted into tokens and appended to the current context. You can also specify a *reverse prompt* with the parameter `-r "reverse prompt string"`. This will result in user input being prompted whenever the exact tokens of the reverse prompt string are encountered in the generation. A typical use is to use a prompt that makes LLaMa emulate a chat between multiple users, say Alice and Bob, and pass `-r "Alice:"`. +In this mode, you can always interrupt generation by pressing Ctrl+C and entering one or more lines of text, which will be converted into tokens and appended to the current context. You can also specify a *reverse prompt* with the parameter `-r "reverse prompt string"`. This will result in user input being prompted whenever the exact tokens of the reverse prompt string are encountered in the generation. A typical use is to use a prompt that makes LLaMA emulate a chat between multiple users, say Alice and Bob, and pass `-r "Alice:"`. Here is an example of a few-shot interaction, invoked with the command @@ -850,7 +850,7 @@ Sample run: ``` == Running in interactive mode. == - Press Ctrl+C to interject at any time. - - Press Return to return control to LLaMa. + - Press Return to return control to LLaMA. - If you want to submit another line, end your input in '\'. Below is an instruction that describes a task. Write a response that appropriately completes the request. diff --git a/convert-llama-ggml-to-gguf.py b/convert-llama-ggml-to-gguf.py index b33108062..cd9644fcb 100755 --- a/convert-llama-ggml-to-gguf.py +++ b/convert-llama-ggml-to-gguf.py @@ -373,7 +373,7 @@ def handle_metadata(cfg, hp): raise ValueError('Unable to load metadata') vocab_path = Path(cfg.vocab_dir if cfg.vocab_dir is not None else cfg.model_metadata_dir) vocab_factory = convert.VocabFactory(vocab_path) - vocab, special_vocab = vocab_factory.load_vocab(cfg.vocabtype, cfg.model_metadata_dir) + vocab, special_vocab = vocab_factory.load_vocab(cfg.vocabtype.split(","), cfg.model_metadata_dir) convert.check_vocab_size(params, vocab) return params, vocab, special_vocab @@ -398,8 +398,8 @@ def handle_args(): help ='Load HuggingFace/.pth vocab and metadata from the specified directory') parser.add_argument("--vocab-dir", type=Path, help="directory containing tokenizer.model, if separate from model file - only meaningful with --model-metadata-dir") - parser.add_argument("--vocabtype", choices=["spm", "bpe"], default="spm", - help="vocab format - only meaningful with --model-metadata-dir and/or --vocab-dir (default: spm)") + parser.add_argument("--vocabtype", default="spm,hfft", + help="vocab format - only meaningful with --model-metadata-dir and/or --vocab-dir (default: spm,hfft)") return parser.parse_args() diff --git a/convert.py b/convert.py index 63a0a5d78..6e3a0319b 100755 --- a/convert.py +++ b/convert.py @@ -1282,35 +1282,32 @@ def load_some_model(path: Path) -> ModelPlus: class VocabFactory: + _FILES = {"spm": "tokenizer.model", "bpe": "vocab.json", "hfft": "tokenizer.json"} + def __init__(self, path: Path): self.path = path - self.files: dict[str, Path | None] = { - "tokenizer.model": None, - "vocab.json": None, - "tokenizer.json": None, - } - self._detect_files() + self.file_paths = self._detect_files() + print(f"Found vocab files: {self.file_paths}") - def _detect_files(self): - for file in self.files.keys(): - file_path = self.path / file - parent_file_path = self.path.parent / file - if file_path.exists(): - self.files[file] = file_path - elif parent_file_path.exists(): - self.files[file] = parent_file_path - print(f"Found vocab files: {self.files}") + def _detect_files(self) -> dict[str, Path | None]: + def locate(file: str) -> Path | None: + if (path := self.path / file).exists(): + return path + if (path := self.path.parent / file).exists(): + return path + return None - def _select_file(self, vocabtype: str | None) -> Path: - if vocabtype in ["spm", "bpe"]: - for file_key in self.files.keys(): - if (file := self.files[file_key]) is not None: - return file - raise FileNotFoundError(f"{vocabtype} vocab not found.") - if vocabtype == "hfft": - # For Hugging Face Fast Tokenizer, return the directory path instead of a specific file - return self.path - raise ValueError(f"Unsupported vocabulary type {vocabtype}") + return {vt: locate(f) for vt, f in self._FILES.items()} + + def _select_file(self, vocab_types: list[str]) -> tuple[str, Path]: + for vtype in vocab_types: + try: + path = self.file_paths[vtype] + except KeyError: + raise ValueError(f"Unsupported vocabulary type {vtype}") from None + if path is not None: + return vtype, path + raise FileNotFoundError(f"Could not find any of {[self._FILES[vt] for vt in vocab_types]}") def _create_special_vocab(self, vocab: Vocab, vocabtype: str, model_parent_path: Path) -> gguf.SpecialVocab: load_merges = vocabtype == "bpe" @@ -1322,30 +1319,30 @@ class VocabFactory: n_vocab=n_vocab, ) - def load_vocab(self, vocabtype: str, model_parent_path: Path) -> tuple[Vocab, gguf.SpecialVocab]: - path = self._select_file(vocabtype) - print(f"Loading vocab file '{path}', type '{vocabtype}'") + def load_vocab(self, vocab_types: list[str], model_parent_path: Path) -> tuple[Vocab, gguf.SpecialVocab]: + vocab_type, path = self._select_file(vocab_types) + print(f"Loading vocab file {path!r}, type {vocab_type!r}") added_tokens_path = path.parent / "added_tokens.json" vocab: Vocab - if vocabtype == "bpe": + if vocab_type == "bpe": vocab = BpeVocab( path, added_tokens_path if added_tokens_path.exists() else None ) - elif vocabtype == "spm": + elif vocab_type == "spm": vocab = SentencePieceVocab( path, added_tokens_path if added_tokens_path.exists() else None ) - elif vocabtype == "hfft": + elif vocab_type == "hfft": vocab = HfVocab( - path, added_tokens_path if added_tokens_path.exists() else None + path.parent, added_tokens_path if added_tokens_path.exists() else None ) else: - raise ValueError(f"Unsupported vocabulary type {vocabtype}") + raise ValueError(vocab_type) # FIXME: Respect --vocab-dir? special_vocab = self._create_special_vocab( vocab, - vocabtype, + vocab_type, model_parent_path, ) return vocab, special_vocab @@ -1379,15 +1376,14 @@ def main(args_in: list[str] | None = None) -> None: if np.uint32(1) == np.uint32(1).newbyteorder("<"): # We currently only support Q8_0 output on little endian systems. output_choices.append("q8_0") - vocab_types = ["spm", "bpe", "hfft"] - parser = argparse.ArgumentParser(description="Convert a LLaMa model to a GGML compatible file") + parser = argparse.ArgumentParser(description="Convert a LLaMA model to a GGML compatible file") parser.add_argument("--awq-path", type=Path, help="Path to scale awq cache file", default=None) parser.add_argument("--dump", action="store_true", help="don't convert, just show what's in the model") parser.add_argument("--dump-single", action="store_true", help="don't convert, just show what's in a single model file") parser.add_argument("--vocab-only", action="store_true", help="extract only the vocab") parser.add_argument("--outtype", choices=output_choices, help="output format - note: q8_0 may be very slow (default: f16 or f32 based on input)") parser.add_argument("--vocab-dir", type=Path, help="directory containing tokenizer.model, if separate from model file") - parser.add_argument("--vocab-type", choices=vocab_types, help="The vocabulary format used to define the tokenizer model (default: spm)", default="spm") + parser.add_argument("--vocab-type", help="vocab types to try in order, choose from 'spm', 'bpe', 'hfft' (default: spm,hfft)", default="spm,hfft") parser.add_argument("--outfile", type=Path, help="path to write to; default: based on input") parser.add_argument("model", type=Path, help="directory containing model file, or model file itself (*.pth, *.pt, *.bin)") parser.add_argument("--ctx", type=int, help="model training context (default: based on input)") @@ -1448,7 +1444,7 @@ def main(args_in: list[str] | None = None) -> None: model_parent_path = model_plus.paths[0].parent vocab_path = Path(args.vocab_dir or args.model or model_parent_path) vocab_factory = VocabFactory(vocab_path) - vocab, special_vocab = vocab_factory.load_vocab(args.vocab_type, model_parent_path) + vocab, special_vocab = vocab_factory.load_vocab(args.vocab_type.split(","), model_parent_path) if args.vocab_only: if not args.outfile: diff --git a/examples/infill/infill.cpp b/examples/infill/infill.cpp index d4b8729dd..91c39c5ae 100644 --- a/examples/infill/infill.cpp +++ b/examples/infill/infill.cpp @@ -378,10 +378,10 @@ int main(int argc, char ** argv) { if (params.interactive) { const char *control_message; if (params.multiline_input) { - control_message = " - To return control to LLaMa, end your input with '\\'.\n" + control_message = " - To return control to LLaMA, end your input with '\\'.\n" " - To return control without starting a new line, end your input with '/'.\n"; } else { - control_message = " - Press Return to return control to LLaMa.\n" + control_message = " - Press Return to return control to LLaMA.\n" " - To return control without starting a new line, end your input with '/'.\n" " - If you want to submit another line, end your input with '\\'.\n"; } From 494c87032613e31c0be99b2735e732871f2c4e4d Mon Sep 17 00:00:00 2001 From: Georgi Gerganov Date: Sat, 2 Mar 2024 20:00:49 +0200 Subject: [PATCH 764/811] ggml : fix IQ3_S AVX implementation (#5834) ggml-ci --- ggml-quants.c | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/ggml-quants.c b/ggml-quants.c index 492a1b9a6..2a8881d73 100644 --- a/ggml-quants.c +++ b/ggml-quants.c @@ -10231,7 +10231,7 @@ void ggml_vec_dot_iq3_s_q8_K (int n, float * GGML_RESTRICT s, size_t bs, const v //const __m256i q2_2 = _mm256_i32gather_epi32((const int *)iq3s_grid, idx.vec[1], 4); const __m256i q2_1 = _mm256_set_epi32( iq3s_grid[idx.index[7]], iq3s_grid[idx.index[6]], iq3s_grid[idx.index[5]], iq3s_grid[idx.index[4]], - iq3s_grid[idx.index[3]], iq3s_grid[idx.index[1]], iq3s_grid[idx.index[1]], iq3s_grid[idx.index[0]] + iq3s_grid[idx.index[3]], iq3s_grid[idx.index[2]], iq3s_grid[idx.index[1]], iq3s_grid[idx.index[0]] ); const __m256i q2_2 = _mm256_set_epi32( iq3s_grid[idx.index[15]], iq3s_grid[idx.index[14]], iq3s_grid[idx.index[13]], iq3s_grid[idx.index[12]], From 4a6e2d6142ab815c964924896891e9ab3e050632 Mon Sep 17 00:00:00 2001 From: Michael Podvitskiy Date: Sat, 2 Mar 2024 20:52:25 +0100 Subject: [PATCH 765/811] llama : add abort_callback to interrupt computation (#5409) * using abort_callback from ggml to stop llama computation * format fix * a brief explaining comment --------- Co-authored-by: Georgi Gerganov --- llama.cpp | 18 ++++++++++++++++-- llama.h | 13 +++++++++++-- 2 files changed, 27 insertions(+), 4 deletions(-) diff --git a/llama.cpp b/llama.cpp index 697e85e89..d4c7a965b 100644 --- a/llama.cpp +++ b/llama.cpp @@ -1987,6 +1987,9 @@ struct llama_context { std::vector buf_compute_meta; ggml_backend_sched_t sched = nullptr; + ggml_abort_callback abort_callback = nullptr; + void * abort_callback_data = nullptr; + // input tensors ggml_backend_buffer_t buf_input = nullptr; ggml_context * ctx_input = nullptr; @@ -8071,6 +8074,7 @@ static void llama_graph_compute( if (lctx.backend_cpu != nullptr) { ggml_backend_cpu_set_n_threads(lctx.backend_cpu, n_threads); + ggml_backend_cpu_set_abort_callback(lctx.backend_cpu, lctx.abort_callback, lctx.abort_callback_data); } ggml_backend_sched_graph_compute(lctx.sched, gf); @@ -11856,6 +11860,8 @@ struct llama_context_params llama_context_default_params() { /*.embedding =*/ false, /*.offload_kqv =*/ true, /*.do_pooling =*/ true, + /*.abort_callback =*/ nullptr, + /*.abort_callback_data =*/ nullptr, }; return result; @@ -12038,8 +12044,11 @@ struct llama_context * llama_new_context_with_model( LLAMA_LOG_INFO("%s: freq_base = %.1f\n", __func__, cparams.rope_freq_base); LLAMA_LOG_INFO("%s: freq_scale = %g\n", __func__, cparams.rope_freq_scale); - ctx->rng = std::mt19937(params.seed); - ctx->logits_all = params.logits_all; + ctx->abort_callback = params.abort_callback; + ctx->abort_callback_data = params.abort_callback_data; + + ctx->rng = std::mt19937(params.seed); + ctx->logits_all = params.logits_all; const ggml_type type_k = params.type_k; const ggml_type type_v = params.type_v; @@ -12989,6 +12998,11 @@ void llama_set_n_threads(struct llama_context * ctx, uint32_t n_threads, uint32_ ctx->cparams.n_threads_batch = n_threads_batch; } +void llama_set_abort_callback(struct llama_context * ctx, bool (*abort_callback)(void * data), void * abort_callback_data) { + ctx->abort_callback = abort_callback; + ctx->abort_callback_data = abort_callback_data; +} + struct llama_batch llama_batch_get_one( llama_token * tokens, int32_t n_tokens, diff --git a/llama.h b/llama.h index ed51f478a..6406b5270 100644 --- a/llama.h +++ b/llama.h @@ -255,10 +255,16 @@ extern "C" { enum ggml_type type_v; // data type for V cache // Keep the booleans together to avoid misalignment during copy-by-value. - bool logits_all; // the llama_eval() call computes all logits, not just the last one (DEPRECATED - set llama_batch.logits instead) + bool logits_all; // the llama_decode() call computes all logits, not just the last one (DEPRECATED - set llama_batch.logits instead) bool embedding; // embedding mode only bool offload_kqv; // whether to offload the KQV ops (including the KV cache) to GPU bool do_pooling; // whether to pool (sum) embedding results by sequence id (ignored if no pooling layer) + + // Abort callback + // if it returns true, execution of llama_decode() will be aborted + // currently works only with CPU execution + ggml_abort_callback abort_callback; + void * abort_callback_data; }; // model quantization parameters @@ -632,7 +638,10 @@ extern "C" { // n_threads_batch is the number of threads used for prompt and batch processing (multiple tokens) LLAMA_API void llama_set_n_threads(struct llama_context * ctx, uint32_t n_threads, uint32_t n_threads_batch); - // Token logits obtained from the last call to llama_eval() + // Set abort callback + LLAMA_API void llama_set_abort_callback(struct llama_context * ctx, ggml_abort_callback abort_callback, void * abort_callback_data); + + // Token logits obtained from the last call to llama_decode() // The logits for the last token are stored in the last row // Logits for which llama_batch.logits[i] == 0 are undefined // Rows: n_tokens provided with llama_batch From 9731134296af3a6839cd682e51d9c2109a871de5 Mon Sep 17 00:00:00 2001 From: Pierrick Hymbert Date: Sat, 2 Mar 2024 22:00:14 +0100 Subject: [PATCH 766/811] server: tests: passkey challenge / self-extend with context shift demo (#5832) * server: tests: add models endpoint scenario * server: /v1/models add some metadata * server: tests: add debug field in context before scenario * server: tests: download model from HF, add batch size * server: tests: add passkey test * server: tests: add group attention params * server: do not truncate prompt tokens if self-extend through group attention is enabled * server: logs: do not truncate log values * server: tests - passkey - first good working value of nga * server: tests: fix server timeout * server: tests: fix passkey, add doc, fix regex content matching, fix timeout * server: tests: fix regex content matching * server: tests: schedule slow tests on master * server: metrics: fix when no prompt processed * server: tests: self-extend add llama-2-7B and Mixtral-8x7B-v0.1 * server: tests: increase timeout for completion * server: tests: keep only the PHI-2 test * server: tests: passkey add a negative test --- .github/workflows/server.yml | 17 +- examples/server/server.cpp | 46 +++- examples/server/tests/README.md | 50 +++- examples/server/tests/features/environment.py | 5 +- examples/server/tests/features/issues.feature | 1 + .../server/tests/features/parallel.feature | 5 +- .../server/tests/features/passkey.feature | 55 ++++ .../server/tests/features/security.feature | 3 +- examples/server/tests/features/server.feature | 23 +- examples/server/tests/features/steps/steps.py | 259 ++++++++++++++---- .../tests/features/wrong_usages.feature | 5 +- examples/server/tests/requirements.txt | 1 + examples/server/tests/tests.sh | 2 +- examples/server/utils.hpp | 3 +- 14 files changed, 363 insertions(+), 112 deletions(-) create mode 100644 examples/server/tests/features/passkey.feature diff --git a/.github/workflows/server.yml b/.github/workflows/server.yml index 0b6f6669b..8c6312508 100644 --- a/.github/workflows/server.yml +++ b/.github/workflows/server.yml @@ -10,6 +10,8 @@ on: pull_request: types: [opened, synchronize, reopened] paths: ['.github/workflows/server.yml', '**/CMakeLists.txt', '**/Makefile', '**/*.h', '**/*.hpp', '**/*.c', '**/*.cpp', '**/*.cu', '**/*.swift', '**/*.m', 'examples/server/tests/**.*'] + schedule: + - cron: '00 0 * * *' jobs: server: @@ -70,14 +72,15 @@ jobs: run: | pip install -r examples/server/tests/requirements.txt - - name: Download models - id: download_models - run: | - cd examples/server/tests - ../../../scripts/hf.sh --repo ggml-org/models --file tinyllamas/stories260K.gguf - - name: Tests - id: server_integration_test + id: server_integration_tests run: | cd examples/server/tests PORT=8888 ./tests.sh + + - name: Slow tests + id: server_integration_tests_slow + if: github.event.schedule != '' + run: | + cd examples/server/tests + PORT=8888 ./tests.sh --stop --no-skipped --no-capture --tags slow diff --git a/examples/server/server.cpp b/examples/server/server.cpp index 2b2f4a0f4..52daf9e7a 100644 --- a/examples/server/server.cpp +++ b/examples/server/server.cpp @@ -441,8 +441,8 @@ struct llama_server_context const int ga_w = params.grp_attn_w; if (ga_n != 1) { - GGML_ASSERT(ga_n > 0 && "ga_n must be positive"); // NOLINT - GGML_ASSERT(ga_w % ga_n == 0 && "ga_w must be a multiple of ga_n"); // NOLINT + GGML_ASSERT(ga_n > 0 && "ga_n must be positive"); // NOLINT + GGML_ASSERT(ga_w % ga_n == 0 && "ga_w must be a multiple of ga_n"); // NOLINT //GGML_ASSERT(n_ctx_train % ga_w == 0 && "n_ctx_train must be a multiple of ga_w"); // NOLINT //GGML_ASSERT(n_ctx >= n_ctx_train * ga_n && "n_ctx must be at least n_ctx_train * ga_n"); // NOLINT @@ -1709,8 +1709,8 @@ struct llama_server_context } slot.params.n_keep = std::min(slot.n_ctx - 4, slot.params.n_keep); - // if input prompt is too big, truncate it - if (slot.n_prompt_tokens >= slot.n_ctx) + // if input prompt is too big, truncate it, if group attention self-extend is disabled + if (slot.ga_n == 1 && slot.n_prompt_tokens >= slot.n_ctx) { const int n_left = slot.n_ctx - slot.params.n_keep; const int n_block_size = n_left / 2; @@ -1785,9 +1785,11 @@ struct llama_server_context } LOG_INFO("slot progression", { - { "slot_id", slot.id }, - { "task_id", slot.task_id }, - { "n_past", slot.n_past }, + { "slot_id", slot.id }, + { "task_id", slot.task_id }, + { "n_past", slot.n_past }, + { "n_past_se", slot.n_past_se }, + { "ga_i", slot.ga_i }, { "n_prompt_tokens_processed", slot.n_prompt_tokens_processed } }); } @@ -2001,6 +2003,17 @@ struct llama_server_context LOG_VERBOSE("slots updated", {}); return true; } + + json model_meta() { + return json{ + {"vocab_type", llama_vocab_type(model)}, + {"n_vocab", llama_n_vocab(model)}, + {"n_ctx_train", llama_n_ctx_train(model)}, + {"n_embd", llama_n_embd(model)}, + {"n_params", llama_model_n_params(model)}, + {"size", llama_model_size(model)}, + }; + } }; static void server_print_usage(const char *argv0, const gpt_params ¶ms, @@ -2911,9 +2924,10 @@ int main(int argc, char **argv) for (const auto& metric_def : metrics_def) { std::string name = metric_def["name"]; std::string help = metric_def["help"]; - prometheus << "# HELP llamacpp:" << name << " " << help << "\n" - << "# TYPE llamacpp:" << name << " " << type << "\n" - << "llamacpp:" << name << " " << metric_def["value"] << "\n"; + auto value = json_value(metric_def, "value", 0); + prometheus << "# HELP llamacpp:" << name << " " << help << "\n" + << "# TYPE llamacpp:" << name << " " << type << "\n" + << "llamacpp:" << name << " " << value << "\n"; } } @@ -2994,6 +3008,7 @@ int main(int argc, char **argv) state.store(SERVER_STATE_READY); LOG_INFO("model loaded", {}); } + const auto model_meta = llama.model_meta(); if (sparams.chat_template.empty()) { // custom chat template is not supplied // check if the template comes with the model is supported by us @@ -3143,7 +3158,7 @@ int main(int argc, char **argv) } }); - svr.Get("/v1/models", [¶ms](const httplib::Request& req, httplib::Response& res) + svr.Get("/v1/models", [¶ms, &model_meta](const httplib::Request& req, httplib::Response& res) { res.set_header("Access-Control-Allow-Origin", req.get_header_value("Origin")); std::time_t t = std::time(0); @@ -3152,10 +3167,11 @@ int main(int argc, char **argv) {"object", "list"}, {"data", { { - {"id", params.model_alias}, - {"object", "model"}, - {"created", t}, - {"owned_by", "llamacpp"} + {"id", params.model_alias}, + {"object", "model"}, + {"created", t}, + {"owned_by", "llamacpp"}, + {"meta", model_meta} }, }} }; diff --git a/examples/server/tests/README.md b/examples/server/tests/README.md index 0b9fdc4e7..95a0353b6 100644 --- a/examples/server/tests/README.md +++ b/examples/server/tests/README.md @@ -1,22 +1,30 @@ # Server tests -Python based server tests scenario using [BDD](https://en.wikipedia.org/wiki/Behavior-driven_development) and [behave](https://behave.readthedocs.io/en/latest/): - * [issues.feature](./features/issues.feature) Pending issues scenario - * [parallel.feature](./features/parallel.feature) Scenario involving multi slots and concurrent requests - * [security.feature](./features/security.feature) Security, CORS and API Key - * [server.feature](./features/server.feature) Server base scenario: completion, embedding, tokenization, etc... +Python based server tests scenario using [BDD](https://en.wikipedia.org/wiki/Behavior-driven_development) +and [behave](https://behave.readthedocs.io/en/latest/): + +* [issues.feature](./features/issues.feature) Pending issues scenario +* [parallel.feature](./features/parallel.feature) Scenario involving multi slots and concurrent requests +* [security.feature](./features/security.feature) Security, CORS and API Key +* [server.feature](./features/server.feature) Server base scenario: completion, embedding, tokenization, etc... Tests target GitHub workflows job runners with 4 vCPU. -Requests are using [aiohttp](https://docs.aiohttp.org/en/stable/client_reference.html), [asyncio](https://docs.python.org/fr/3/library/asyncio.html) based http client. +Requests are +using [aiohttp](https://docs.aiohttp.org/en/stable/client_reference.html), [asyncio](https://docs.python.org/fr/3/library/asyncio.html) +based http client. -Note: If the host architecture inference speed is faster than GitHub runners one, parallel scenario may randomly fail. To mitigate it, you can increase values in `n_predict`, `kv_size`. +Note: If the host architecture inference speed is faster than GitHub runners one, parallel scenario may randomly fail. +To mitigate it, you can increase values in `n_predict`, `kv_size`. ### Install dependencies + `pip install -r requirements.txt` ### Run tests + 1. Build the server + ```shell cd ../../.. mkdir build @@ -24,24 +32,36 @@ cd build cmake ../ cmake --build . --target server ``` -2. download required models: - 1. `../../../scripts/hf.sh --repo ggml-org/models --file tinyllamas/stories260K.gguf` -3. Start the test: `./tests.sh` + +2. Start the test: `./tests.sh` It's possible to override some scenario steps values with environment variables: - - `PORT` -> `context.server_port` to set the listening port of the server during scenario, default: `8080` - - `LLAMA_SERVER_BIN_PATH` -> to change the server binary path, default: `../../../build/bin/server` - - `DEBUG` -> "ON" to enable steps and server verbose mode `--verbose` - - `SERVER_LOG_FORMAT_JSON` -> if set switch server logs to json format + +| variable | description | +|--------------------------|------------------------------------------------------------------------------------------------| +| `PORT` | `context.server_port` to set the listening port of the server during scenario, default: `8080` | +| `LLAMA_SERVER_BIN_PATH` | to change the server binary path, default: `../../../build/bin/server` | +| `DEBUG` | "ON" to enable steps and server verbose mode `--verbose` | +| `SERVER_LOG_FORMAT_JSON` | if set switch server logs to json format | +| `N_GPU_LAYERS` | number of model layers to offload to VRAM `-ngl --n-gpu-layers` | ### Run @bug, @wip or @wrong_usage annotated scenario Feature or Scenario must be annotated with `@llama.cpp` to be included in the default scope. + - `@bug` annotation aims to link a scenario with a GitHub issue. - `@wrong_usage` are meant to show user issue that are actually an expected behavior - `@wip` to focus on a scenario working in progress +- `@slow` heavy test, disabled by default To run a scenario annotated with `@bug`, start: -`DEBUG=ON ./tests.sh --no-skipped --tags bug` + +```shell +DEBUG=ON ./tests.sh --no-skipped --tags bug +``` After changing logic in `steps.py`, ensure that `@bug` and `@wrong_usage` scenario are updated. + +```shell +./tests.sh --no-skipped --tags bug,wrong_usage || echo "should failed but compile" +``` diff --git a/examples/server/tests/features/environment.py b/examples/server/tests/features/environment.py index 09e826747..9fd330db6 100644 --- a/examples/server/tests/features/environment.py +++ b/examples/server/tests/features/environment.py @@ -7,7 +7,10 @@ from signal import SIGKILL def before_scenario(context, scenario): - print(f"\x1b[33;42mStarting new scenario: {scenario.name}!\x1b[0m") + context.debug = 'DEBUG' in os.environ and os.environ['DEBUG'] == 'ON' + if context.debug: + print("DEBUG=ON\n") + print(f"\x1b[33;42mStarting new scenario: {scenario.name}!\x1b[0m\n") port = 8080 if 'PORT' in os.environ: port = int(os.environ['PORT']) diff --git a/examples/server/tests/features/issues.feature b/examples/server/tests/features/issues.feature index bf5a175a3..7b13e44ca 100644 --- a/examples/server/tests/features/issues.feature +++ b/examples/server/tests/features/issues.feature @@ -1,4 +1,5 @@ # List of ongoing issues +# run with: DEBUG=ON ./tests.sh --no-skipped --tags bug @bug Feature: Issues # No confirmed issue at the moment diff --git a/examples/server/tests/features/parallel.feature b/examples/server/tests/features/parallel.feature index 5f895cf90..86cdf7282 100644 --- a/examples/server/tests/features/parallel.feature +++ b/examples/server/tests/features/parallel.feature @@ -1,11 +1,12 @@ @llama.cpp +@parallel Feature: Parallel Background: Server startup Given a server listening on localhost:8080 - And a model file stories260K.gguf - And a model alias tinyllama-2 + And a model file tinyllamas/stories260K.gguf from HF repo ggml-org/models And 42 as server seed + And 512 as batch size And 64 KV cache size And 2 slots And embeddings extraction diff --git a/examples/server/tests/features/passkey.feature b/examples/server/tests/features/passkey.feature new file mode 100644 index 000000000..1bde7aab8 --- /dev/null +++ b/examples/server/tests/features/passkey.feature @@ -0,0 +1,55 @@ +# run with: ./tests.sh --no-skipped --tags passkey +@passkey +@slow +Feature: Passkey / Self-extend with context shift + + Background: Server startup + Given a server listening on localhost:8080 + + # Generates a long text of junk and inserts a secret passkey number inside it. + # Then we query the LLM for the secret passkey. + # see #3856 and #4810 + Scenario Outline: Passkey + Given a model file from HF repo + And as batch size + And as number of junk + And server max tokens to predict + And 42 as seed + And KV cache size + And 1 slots + And group attention factor to extend context size through self-extend + And group attention width to extend context size through self-extend + # Can be override with N_GPU_LAYERS + And GPU offloaded layers + Then the server is starting + Then the server is healthy + Given available models + Then model 0 is trained on tokens context + Given a prefix prompt: + """ + here is an important info hidden inside a lot of irrelevant text. Find it and memorize them. I will quiz you about the important information there. + """ + And a passkey prompt template: + """ + The pass key is Remember it. is the pass key. + """ + And a junk suffix prompt: + """ + The grass is green. The sky is blue. The sun is yellow. Here we go. There and back again. + """ + And a suffix prompt: + """ + What is the pass key? The pass key is + """ + Given a "" passkey challenge prompt with the passkey inserted every junk + And a completion request with no api error + Then tokens are predicted matching + + Examples: + | hf_repo | hf_file | n_ctx_train | ngl | n_ctx | n_batch | n_ga | n_ga_w | n_junk | i_pos | passkey | n_predicted | re_content | + | TheBloke/phi-2-GGUF | phi-2.Q4_K_M.gguf | 2048 | 5 | 8192 | 512 | 4 | 512 | 250 | 50 | 42 | 1 | 42 | + | TheBloke/phi-2-GGUF | phi-2.Q4_K_M.gguf | 2048 | 5 | 8192 | 512 | 2 | 512 | 250 | 50 | 42 | 1 | \b((?!42)\w)+\b | + #| TheBloke/Llama-2-7B-GGUF | llama-2-7b.Q2_K.gguf | 4096 | 3 | 16384 | 512 | 4 | 512 | 500 | 300 | 1234 | 5 | 1234 | + #| TheBloke/Mixtral-8x7B-v0.1-GGUF | mixtral-8x7b-v0.1.Q2_K.gguf | 32768 | 2 | 16384 | 512 | 4 | 512 | 500 | 100 | 0987 | 5 | 0 + # 987 | + diff --git a/examples/server/tests/features/security.feature b/examples/server/tests/features/security.feature index db06d3977..42a6709a5 100644 --- a/examples/server/tests/features/security.feature +++ b/examples/server/tests/features/security.feature @@ -1,9 +1,10 @@ @llama.cpp +@security Feature: Security Background: Server startup with an api key defined Given a server listening on localhost:8080 - And a model file stories260K.gguf + And a model file tinyllamas/stories260K.gguf from HF repo ggml-org/models And a server api key llama.cpp Then the server is starting Then the server is healthy diff --git a/examples/server/tests/features/server.feature b/examples/server/tests/features/server.feature index b571582a7..7c977bcce 100644 --- a/examples/server/tests/features/server.feature +++ b/examples/server/tests/features/server.feature @@ -1,15 +1,17 @@ @llama.cpp +@server Feature: llama.cpp server Background: Server startup Given a server listening on localhost:8080 - And a model file stories260K.gguf + And a model file tinyllamas/stories260K.gguf from HF repo ggml-org/models And a model alias tinyllama-2 And 42 as server seed # KV Cache corresponds to the total amount of tokens # that can be stored across all independent sequences: #4130 # see --ctx-size and #5568 And 32 KV cache size + And 512 as batch size And 1 slots And embeddings extraction And 32 server max tokens to predict @@ -29,9 +31,9 @@ Feature: llama.cpp server And prometheus metrics are exposed Examples: Prompts - | prompt | n_predict | re_content | n_predicted | - | I believe the meaning of life is | 8 | (readgoing)+ | 8 | - | Write a joke about AI | 64 | (parkfriendsscaredalways)+ | 32 | + | prompt | n_predict | re_content | n_predicted | + | I believe the meaning of life is | 8 | (read\|going)+ | 8 | + | Write a joke about AI | 64 | (park\|friends\|scared\|always)+ | 32 | Scenario Outline: OAI Compatibility Given a model @@ -43,9 +45,9 @@ Feature: llama.cpp server Then tokens are predicted matching Examples: Prompts - | model | system_prompt | user_prompt | max_tokens | re_content | n_predicted | enable_streaming | - | llama-2 | Book | What is the best book | 8 | (Momwhat)+ | 8 | disabled | - | codellama70b | You are a coding assistant. | Write the fibonacci function in c++. | 64 | (thankshappybird)+ | 32 | enabled | + | model | system_prompt | user_prompt | max_tokens | re_content | n_predicted | enable_streaming | + | llama-2 | Book | What is the best book | 8 | (Mom\|what)+ | 8 | disabled | + | codellama70b | You are a coding assistant. | Write the fibonacci function in c++. | 64 | (thanks\|happy\|bird)+ | 32 | enabled | Scenario: Embedding When embeddings are computed for: @@ -75,10 +77,15 @@ Feature: llama.cpp server When an OAI compatible embeddings computation request for multiple inputs Then embeddings are generated - Scenario: Tokenize / Detokenize When tokenizing: """ What is the capital of France ? """ Then tokens can be detokenize + + Scenario: Models available + Given available models + Then 1 models are supported + Then model 0 is identified by tinyllama-2 + Then model 0 is trained on 128 tokens context diff --git a/examples/server/tests/features/steps/steps.py b/examples/server/tests/features/steps/steps.py index 381da105e..319527802 100644 --- a/examples/server/tests/features/steps/steps.py +++ b/examples/server/tests/features/steps/steps.py @@ -13,6 +13,7 @@ import aiohttp import openai from behave import step from behave.api.async_step import async_run_until_complete +from huggingface_hub import hf_hub_download from prometheus_client import parser @@ -26,17 +27,23 @@ def step_server_config(context, server_fqdn, server_port): context.base_url = f'http://{context.server_fqdn}:{context.server_port}' - context.debug = 'DEBUG' in os.environ and os.environ['DEBUG'] == 'ON' context.model_alias = None + context.n_batch = None context.n_ctx = None + context.n_ga = None + context.n_ga_w = None + context.n_gpu_layer = None context.n_predict = None context.n_server_predict = None context.n_slots = None + context.prompt_prefix = None + context.prompt_suffix = None context.server_api_key = None context.server_continuous_batching = False context.server_embeddings = False context.server_metrics = False context.server_process = None + context.seed = None context.server_seed = None context.user_api_key = None @@ -45,9 +52,11 @@ def step_server_config(context, server_fqdn, server_port): context.prompts = [] -@step(u'a model file {model_file}') -def step_model_file(context, model_file): - context.model_file = model_file +@step(u'a model file {hf_file} from HF repo {hf_repo}') +def step_download_hf_model(context, hf_file, hf_repo): + context.model_file = hf_hub_download(repo_id=hf_repo, filename=hf_file) + if context.debug: + print(f"model file: {context.model_file}\n") @step(u'a model alias {model_alias}') @@ -55,24 +64,34 @@ def step_model_alias(context, model_alias): context.model_alias = model_alias -@step(u'{seed} as server seed') +@step(u'{seed:d} as server seed') def step_seed(context, seed): - context.server_seed = int(seed) + context.server_seed = seed -@step(u'{n_ctx} KV cache size') +@step(u'{ngl:d} GPU offloaded layers') +def step_n_gpu_layer(context, ngl): + if 'N_GPU_LAYERS' in os.environ: + new_ngl = int(os.environ['N_GPU_LAYERS']) + if context.debug: + print(f"-ngl upgraded from {ngl} to {new_ngl}") + ngl = new_ngl + context.n_gpu_layer = ngl + + +@step(u'{n_ctx:d} KV cache size') def step_n_ctx(context, n_ctx): - context.n_ctx = int(n_ctx) + context.n_ctx = n_ctx -@step(u'{n_slots} slots') +@step(u'{n_slots:d} slots') def step_n_slots(context, n_slots): - context.n_slots = int(n_slots) + context.n_slots = n_slots -@step(u'{n_predict} server max tokens to predict') +@step(u'{n_predict:d} server max tokens to predict') def step_server_n_predict(context, n_predict): - context.n_server_predict = int(n_predict) + context.n_server_predict = n_predict @step(u'continuous batching') @@ -116,11 +135,13 @@ async def step_wait_for_the_server_to_be_started(context, expecting_status): case 'ready' | 'idle': await wait_for_health_status(context, context.base_url, 200, 'ok', + timeout=10, params={'fail_on_no_slot': 0, 'include_slots': 0}, slots_idle=context.n_slots, slots_processing=0, expected_slots=[{'id': slot_id, 'state': 0} - for slot_id in range(context.n_slots)]) + for slot_id in + range(context.n_slots if context.n_slots else 1)]) case 'busy': await wait_for_health_status(context, context.base_url, 503, 'no slot available', @@ -128,7 +149,8 @@ async def step_wait_for_the_server_to_be_started(context, expecting_status): slots_idle=0, slots_processing=context.n_slots, expected_slots=[{'id': slot_id, 'state': 1} - for slot_id in range(context.n_slots)]) + for slot_id in + range(context.n_slots if context.n_slots else 1)]) case _: assert False, "unknown status" @@ -157,24 +179,24 @@ async def step_request_completion(context, api_error): context.base_url, debug=context.debug, n_predict=context.n_predict, - server_seed=context.server_seed, + seed=await completions_seed(context), expect_api_error=expect_api_error, user_api_key=context.user_api_key) context.tasks_result.append(completion) if context.debug: - print(f"Completion response: {completion}") + print(f"Completion response: {completion}\n") if expect_api_error: assert completion == 401, f"completion must be an 401 status code: {completion}" -@step(u'{predicted_n} tokens are predicted matching {re_content}') +@step(u'{predicted_n:d} tokens are predicted matching {re_content}') def step_n_tokens_predicted_with_content(context, predicted_n, re_content): - assert_n_tokens_predicted(context.tasks_result.pop(), int(predicted_n), re_content) + assert_n_tokens_predicted(context.tasks_result.pop(), predicted_n, re_content) -@step(u'{predicted_n} tokens are predicted') +@step(u'{predicted_n:d} tokens are predicted') def step_n_tokens_predicted(context, predicted_n): - assert_n_tokens_predicted(context.tasks_result.pop(), int(predicted_n)) + assert_n_tokens_predicted(context.tasks_result.pop(), predicted_n) @step(u'a user prompt {user_prompt}') @@ -192,9 +214,9 @@ def step_model(context, model): context.model = model -@step(u'{max_tokens} max tokens to predict') +@step(u'{max_tokens:d} max tokens to predict') def step_max_tokens(context, max_tokens): - context.n_predict = int(max_tokens) + context.n_predict = max_tokens @step(u'streaming is {enable_streaming}') @@ -222,11 +244,70 @@ def step_server_api_key(context, server_api_key): context.server_api_key = server_api_key +@step(u'{n_junk:d} as number of junk') +def step_n_junk(context, n_junk): + context.n_junk = n_junk + + +@step(u'{n_batch:d} as batch size') +def step_n_batch(context, n_batch): + context.n_batch = n_batch + + +@step(u'{seed:d} as seed') +def step_seed(context, seed): + context.seed = seed + + +@step(u'a prefix prompt') +def step_prompt_prefix(context): + context.prompt_prefix = context.text + + +@step(u'a junk suffix prompt') +def step_prompt_junk_suffix(context): + context.prompt_junk_suffix = context.text + + +@step(u'a suffix prompt') +def step_prompt_suffix(context): + context.prompt_suffix = context.text + + +@step(u'{n_ga:d} group attention factor' + u' to extend context size through self-extend') +def step_impl(context, n_ga): + context.n_ga = n_ga + + +@step(u'{n_ga_w:d} group attention width to extend context size through self-extend') +def step_impl(context, n_ga_w): + context.n_ga_w = n_ga_w + + +@step(u'a passkey prompt template') +def step_prompt_passkey(context): + context.prompt_passkey = context.text + + +@step(u'a "{passkey}" passkey challenge prompt with the passkey inserted every {i_pos:d} junk') +def step_prompt_passkey(context, passkey, i_pos): + prompt = "" + for i in range(context.n_junk): + if i % context.n_junk == i_pos: + prompt += context.prompt_passkey # the passkey is already substituted + prompt += context.prompt_junk_suffix + if context.debug: + passkey_highlight = "\x1b[33m" + passkey + "\x1b[0m" + print(f"Passkey challenge:\n```{prompt.replace(passkey, passkey_highlight)}```\n") + context.prompts.append(context.prompt_prefix + prompt + context.prompt_suffix) + + @step(u'an OAI compatible chat completions request with {api_error} api error') @async_run_until_complete async def step_oai_chat_completions(context, api_error): if context.debug: - print(f"Submitting OAI compatible completions request...") + print(f"Submitting OAI compatible completions request...\n") expect_api_error = api_error == 'raised' completion = await oai_chat_completions(context.prompts.pop(), context.system_prompt, @@ -241,8 +322,7 @@ async def step_oai_chat_completions(context, api_error): enable_streaming=context.enable_streaming if hasattr(context, 'enable_streaming') else None, - server_seed=context.server_seed - if hasattr(context, 'server_seed') else None, + seed=await completions_seed(context), user_api_key=context.user_api_key if hasattr(context, 'user_api_key') else None, @@ -276,8 +356,10 @@ async def step_concurrent_completion_requests(context): # prompt is inserted automatically context.base_url, debug=context.debug, + prompt_prefix=context.prompt_prefix, + prompt_suffix=context.prompt_suffix, n_predict=context.n_predict if hasattr(context, 'n_predict') else None, - server_seed=context.server_seed if hasattr(context, 'server_seed') else None, + seed=await completions_seed(context), user_api_key=context.user_api_key if hasattr(context, 'user_api_key') else None) @@ -297,8 +379,7 @@ async def step_oai_chat_completions(context): if hasattr(context, 'n_predict') else None, enable_streaming=context.enable_streaming if hasattr(context, 'enable_streaming') else None, - server_seed=context.server_seed - if hasattr(context, 'server_seed') else None, + seed=await completions_seed(context), user_api_key=context.user_api_key if hasattr(context, 'user_api_key') else None) @@ -318,7 +399,9 @@ async def step_oai_chat_completions(context): if hasattr(context, 'n_predict') else None, enable_streaming=context.enable_streaming if hasattr(context, 'enable_streaming') else None, - server_seed=context.server_seed + seed=context.seed + if hasattr(context, 'seed') else + context.server_seed if hasattr(context, 'server_seed') else None, user_api_key=context.user_api_key if hasattr(context, 'user_api_key') else None) @@ -330,11 +413,10 @@ async def step_all_prompts_are_predicted(context): await all_prompts_are_predicted(context) -@step(u'all prompts are predicted with {n_predict} tokens') +@step(u'all prompts are predicted with {n_expected_predicted:d} tokens') @async_run_until_complete -async def step_all_prompts_are_predicted_with_n_tokens(context, n_predict): - expected_predicted_n = int(n_predict) - await all_prompts_are_predicted(context, expected_predicted_n) +async def step_all_prompts_are_predicted_with_n_tokens(context, n_expected_predicted): + await all_prompts_are_predicted(context, n_expected_predicted) async def all_prompts_are_predicted(context, expected_predicted_n=None): @@ -464,6 +546,8 @@ async def step_prometheus_metrics_exported(context): assert metrics_response.headers['Content-Type'] == "text/plain; version=0.0.4" metrics_raw = await metrics_response.text() metric_exported = False + if context.debug: + print(f"/metrics answer:\n{metrics_raw}\n") for metric in parser.text_string_to_metric_families(metrics_raw): match metric.name: case "llamacpp:kv_cache_usage_ratio": @@ -472,6 +556,37 @@ async def step_prometheus_metrics_exported(context): assert metric_exported, "No metrics exported" +@step(u'available models') +def step_available_models(context): + # openai client always expects an api_key + openai.api_key = context.user_api_key if context.user_api_key is not None else 'nope' + openai.api_base = f'{context.base_url}/v1' + context.models = openai.Model.list().data + + +@step(u'{n_model:d} models are supported') +def step_supported_models(context, n_model): + if context.debug: + print("server models available:", context.models) + assert len(context.models) == n_model + + +@step(u'model {i_model:d} is {param} {preposition} {param_value}') +def step_supported_models(context, i_model, param, preposition, param_value): + assert i_model < len(context.models) + model = context.models[i_model] + + param_value = param_value.split(' ', 1)[0] + match param: + case 'identified': + value = model.id + case 'trained': + value = str(model.meta.n_ctx_train) + case _: + assert False, "param {param} not supported" + assert param_value == value, f"model param {param} {value} != {param_value}" + + async def concurrent_requests(context, f_completion, *args, **kwargs): n_prompts = len(context.prompts) if context.debug: @@ -486,8 +601,10 @@ async def concurrent_requests(context, f_completion, *args, **kwargs): async def request_completion(prompt, base_url, debug=False, + prompt_prefix=None, + prompt_suffix=None, n_predict=None, - server_seed=None, + seed=None, expect_api_error=None, user_api_key=None): if debug: @@ -504,11 +621,14 @@ async def request_completion(prompt, async with aiohttp.ClientSession() as session: async with session.post(f'{base_url}/completion', json={ + "input_prefix": prompt_prefix, "prompt": prompt, - "n_predict": int(n_predict) if n_predict is not None else -1, - "seed": server_seed if server_seed is not None else 42 + "input_suffix": prompt_suffix, + "n_predict": n_predict if n_predict is not None else -1, + "seed": seed if seed is not None else 42 }, - headers=headers) as response: + headers=headers, + timeout=3600) as response: if expect_api_error is None or not expect_api_error: assert response.status == 200 assert response.headers['Access-Control-Allow-Origin'] == origin @@ -526,14 +646,14 @@ async def oai_chat_completions(user_prompt, model=None, n_predict=None, enable_streaming=None, - server_seed=None, + seed=None, user_api_key=None, expect_api_error=None): if debug: print(f"Sending OAI Chat completions request: {user_prompt}") # openai client always expects an api key user_api_key = user_api_key if user_api_key is not None else 'nope' - seed = server_seed if server_seed is not None else 42 + seed = seed if seed is not None else 42 enable_streaming = enable_streaming if enable_streaming is not None else False payload = { "messages": [ @@ -692,20 +812,32 @@ def assert_n_tokens_predicted(completion_response, expected_predicted_n=None, re content = completion_response['content'] n_predicted = completion_response['timings']['predicted_n'] assert len(content) > 0, "no token predicted" - if expected_predicted_n is not None: + if re_content is not None: + p = re.compile(re_content, flags=RegexFlag.IGNORECASE | RegexFlag.MULTILINE | RegexFlag.DOTALL) + matches = p.finditer(content) + last_match = 0 + highlighted = '' + for match in matches: + start, end = match.span() + highlighted += content[last_match: start] + highlighted += '\x1b[33m' + highlighted += content[start: end] + highlighted += '\x1b[0m' + last_match = end + highlighted += content[last_match:] + if 'DEBUG' in os.environ and os.environ['DEBUG'] == 'ON': + print(f"Checking completion response: {highlighted}\n") + assert last_match > 0, f'/{re_content}/ must match ```{highlighted}```' + if expected_predicted_n and expected_predicted_n > 0: assert n_predicted == expected_predicted_n, (f'invalid number of tokens predicted:' f' {n_predicted} <> {expected_predicted_n}') - if re_content is not None: - re_content = '^.*' + re_content.replace('', '|') + '.*$' - assert re.match(re_content, content, flags=RegexFlag.IGNORECASE | RegexFlag.MULTILINE | RegexFlag.DOTALL), ( - f'invalid tokens predicted:' - f' ```\n{content}\n``` do not match /{re_content}/') + async def gather_tasks_results(context): n_tasks = len(context.concurrent_tasks) if context.debug: - print(f"Waiting for all {n_tasks} tasks results...") + print(f"Waiting for all {n_tasks} tasks results...\n") for task_no in range(n_tasks): context.tasks_result.append(await context.concurrent_tasks.pop()) n_completions = len(context.tasks_result) @@ -716,15 +848,13 @@ async def wait_for_health_status(context, base_url, expected_http_status_code, expected_health_status, + timeout=3, params=None, slots_idle=None, slots_processing=None, expected_slots=None): if context.debug: - print(f"Starting checking for health for expected_health_status={expected_health_status}") - timeout = 3 # seconds - if expected_health_status == 'ok': - timeout = 10 # CI slow inference + print(f"Starting checking for health for expected_health_status={expected_health_status}\n") interval = 0.5 counter = 0 async with aiohttp.ClientSession() as session: @@ -734,7 +864,7 @@ async def wait_for_health_status(context, health = await health_response.json() if context.debug: print(f"HEALTH - response for expected health status='{expected_health_status}' on " - f"'{base_url}/health'?{params} is {health}") + f"'{base_url}/health'?{params} is {health}\n") if (status_code == expected_http_status_code and health['status'] == expected_health_status and (slots_idle is None or health['slots_idle'] == slots_idle) @@ -757,7 +887,7 @@ async def wait_for_health_status(context, if expected_http_status_code == 503: if len(context.tasks_result) == 0: print("\x1b[5;37;43mWARNING: forcing concurrent tasks," - " busy health check missed, probably too fast inference\x1b[0m") + " busy health check missed, probably too fast inference\x1b[0m\n") n_completions = await gather_tasks_results(context) if n_completions > 0: return @@ -791,6 +921,11 @@ def assert_slots_status(slots, expected_slots): f" = {expected[key]} != {slot[key]}") +async def completions_seed(context): + return context.seed if hasattr(context, 'seed') and context.seed is not None \ + else context.server_seed if hasattr(context, 'server_seed') else None + + def start_server_background(context): context.server_path = '../../../build/bin/server' if 'LLAMA_SERVER_BIN_PATH' in os.environ: @@ -800,27 +935,35 @@ def start_server_background(context): '--port', context.server_port, '--model', context.model_file ] + if context.n_batch: + server_args.extend(['--batch-size', context.n_batch]) + if context.n_gpu_layer: + server_args.extend(['--n-gpu-layers', context.n_gpu_layer]) if context.server_continuous_batching: server_args.append('--cont-batching') if context.server_embeddings: server_args.append('--embedding') if context.server_metrics: server_args.append('--metrics') - if context.model_alias is not None: + if context.model_alias: server_args.extend(['--alias', context.model_alias]) - if context.n_ctx is not None: + if context.n_ctx: server_args.extend(['--ctx-size', context.n_ctx]) - if context.n_slots is not None: + if context.n_slots: server_args.extend(['--parallel', context.n_slots]) - if context.n_server_predict is not None: + if context.n_server_predict: server_args.extend(['--n-predict', context.n_server_predict]) - if context.server_api_key is not None: + if context.server_api_key: server_args.extend(['--api-key', context.server_api_key]) + if context.n_ga: + server_args.extend(['--grp-attn-n', context.n_ga]) + if context.n_ga_w: + server_args.extend(['--grp-attn-w', context.n_ga_w]) if context.debug: server_args.append('--verbose') if 'SERVER_LOG_FORMAT_JSON' not in os.environ: server_args.extend(['--log-format', "text"]) - print(f"starting server with: {context.server_path}", *server_args) + print(f"starting server with: {context.server_path} {server_args}\n") context.server_process = subprocess.Popen( [str(arg) for arg in [context.server_path, *server_args]], close_fds=True) diff --git a/examples/server/tests/features/wrong_usages.feature b/examples/server/tests/features/wrong_usages.feature index e228b2371..cf14b3b44 100644 --- a/examples/server/tests/features/wrong_usages.feature +++ b/examples/server/tests/features/wrong_usages.feature @@ -1,4 +1,4 @@ -# run with ./test.sh --tags wrong_usage +# run with: ./tests.sh --no-skipped --tags wrong_usage @wrong_usage Feature: Wrong usage of llama.cpp server @@ -7,7 +7,7 @@ Feature: Wrong usage of llama.cpp server # or pass n_predict/max_tokens in the request. Scenario: Infinite loop Given a server listening on localhost:8080 - And a model file stories260K.gguf + And a model file tinyllamas/stories260K.gguf from HF repo ggml-org/models # Uncomment below to fix the issue #And 64 server max tokens to predict Then the server is starting @@ -18,4 +18,5 @@ Feature: Wrong usage of llama.cpp server # Uncomment below to fix the issue #And 128 max tokens to predict Given concurrent completion requests + Then the server is idle Then all prompts are predicted diff --git a/examples/server/tests/requirements.txt b/examples/server/tests/requirements.txt index 334fa4a70..5d4210164 100644 --- a/examples/server/tests/requirements.txt +++ b/examples/server/tests/requirements.txt @@ -1,4 +1,5 @@ aiohttp~=3.9.3 behave~=1.2.6 +huggingface_hub~=0.20.3 openai~=0.25.0 prometheus-client~=0.20.0 diff --git a/examples/server/tests/tests.sh b/examples/server/tests/tests.sh index 17a4e6fc6..1c6c5695f 100755 --- a/examples/server/tests/tests.sh +++ b/examples/server/tests/tests.sh @@ -5,7 +5,7 @@ set -eu if [ $# -lt 1 ] then # Start @llama.cpp scenario - behave --summary --stop --no-capture --exclude 'issues|wrong_usages' --tags llama.cpp + behave --summary --stop --no-capture --exclude 'issues|wrong_usages|passkey' --tags llama.cpp else behave "$@" fi diff --git a/examples/server/utils.hpp b/examples/server/utils.hpp index d98541f26..b6e49d8b9 100644 --- a/examples/server/utils.hpp +++ b/examples/server/utils.hpp @@ -126,8 +126,7 @@ static inline void server_log(const char *level, const char *function, int line, for (const auto& el : log.items()) { const std::string value = el.value().dump(-1, ' ', false, json::error_handler_t::replace); - snprintf(buf, 1024, " %s=%s", el.key().c_str(), value.c_str()); - ss << buf; + ss << " " << el.key() << "=" << value; } const std::string str = ss.str(); From fa974646e1a2024fc7dc9e6f27cf1f2f5d4a3763 Mon Sep 17 00:00:00 2001 From: Georgi Gerganov Date: Sun, 3 Mar 2024 06:11:31 +0200 Subject: [PATCH 767/811] flake.lock: Update (#5842) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Flake lock file updates: • Updated input 'flake-parts': 'github:hercules-ci/flake-parts/b253292d9c0a5ead9bc98c4e9a26c6312e27d69f' (2024-02-01) → 'github:hercules-ci/flake-parts/f7b3c975cf067e56e7cda6cb098ebe3fb4d74ca2' (2024-03-01) • Updated input 'flake-parts/nixpkgs-lib': 'github:NixOS/nixpkgs/97b17f32362e475016f942bbdfda4a4a72a8a652?dir=lib' (2024-01-29) → 'github:NixOS/nixpkgs/1536926ef5621b09bba54035ae2bb6d806d72ac8?dir=lib' (2024-02-29) • Updated input 'nixpkgs': 'github:NixOS/nixpkgs/cbc4211f0afffe6dfd2478a62615dd5175a13f9a' (2024-02-23) → 'github:NixOS/nixpkgs/1536926ef5621b09bba54035ae2bb6d806d72ac8' (2024-02-29) Co-authored-by: github-actions[bot] --- flake.lock | 18 +++++++++--------- 1 file changed, 9 insertions(+), 9 deletions(-) diff --git a/flake.lock b/flake.lock index 9f659ba8f..b1b091656 100644 --- a/flake.lock +++ b/flake.lock @@ -5,11 +5,11 @@ "nixpkgs-lib": "nixpkgs-lib" }, "locked": { - "lastModified": 1706830856, - "narHash": "sha256-a0NYyp+h9hlb7ddVz4LUn1vT/PLwqfrWYcHMvFB1xYg=", + "lastModified": 1709336216, + "narHash": "sha256-Dt/wOWeW6Sqm11Yh+2+t0dfEWxoMxGBvv3JpIocFl9E=", "owner": "hercules-ci", "repo": "flake-parts", - "rev": "b253292d9c0a5ead9bc98c4e9a26c6312e27d69f", + "rev": "f7b3c975cf067e56e7cda6cb098ebe3fb4d74ca2", "type": "github" }, "original": { @@ -20,11 +20,11 @@ }, "nixpkgs": { "locked": { - "lastModified": 1708655239, - "narHash": "sha256-ZrP/yACUvDB+zbqYJsln4iwotbH6CTZiTkANJ0AgDv4=", + "lastModified": 1709237383, + "narHash": "sha256-cy6ArO4k5qTx+l5o+0mL9f5fa86tYUX3ozE1S+Txlds=", "owner": "NixOS", "repo": "nixpkgs", - "rev": "cbc4211f0afffe6dfd2478a62615dd5175a13f9a", + "rev": "1536926ef5621b09bba54035ae2bb6d806d72ac8", "type": "github" }, "original": { @@ -37,11 +37,11 @@ "nixpkgs-lib": { "locked": { "dir": "lib", - "lastModified": 1706550542, - "narHash": "sha256-UcsnCG6wx++23yeER4Hg18CXWbgNpqNXcHIo5/1Y+hc=", + "lastModified": 1709237383, + "narHash": "sha256-cy6ArO4k5qTx+l5o+0mL9f5fa86tYUX3ozE1S+Txlds=", "owner": "NixOS", "repo": "nixpkgs", - "rev": "97b17f32362e475016f942bbdfda4a4a72a8a652", + "rev": "1536926ef5621b09bba54035ae2bb6d806d72ac8", "type": "github" }, "original": { From 8ef969afcec1645d2d9c3ab1fc82263bba968989 Mon Sep 17 00:00:00 2001 From: Pierrick Hymbert Date: Sun, 3 Mar 2024 08:48:36 +0100 Subject: [PATCH 768/811] server : init http requests thread pool with --parallel if set (#5836) --- examples/server/README.md | 2 +- examples/server/server.cpp | 10 ++++++---- 2 files changed, 7 insertions(+), 5 deletions(-) diff --git a/examples/server/README.md b/examples/server/README.md index 397ee8252..21da7a0a0 100644 --- a/examples/server/README.md +++ b/examples/server/README.md @@ -18,7 +18,7 @@ The project is under active development, and we are [looking for feedback and co - `--threads N`, `-t N`: Set the number of threads to use during generation. - `-tb N, --threads-batch N`: Set the number of threads to use during batch and prompt processing. If not specified, the number of threads will be set to the number of threads used for generation. -- `--threads-http N`: number of threads in the http server pool to process requests (default: `std::thread::hardware_concurrency()`) +- `--threads-http N`: number of threads in the http server pool to process requests (default: `max(std::thread::hardware_concurrency() - 1, --parallel N + 2)`) - `-m FNAME`, `--model FNAME`: Specify the path to the LLaMA model file (e.g., `models/7B/ggml-model.gguf`). - `-a ALIAS`, `--alias ALIAS`: Set an alias for the model. The alias will be returned in API responses. - `-c N`, `--ctx-size N`: Set the size of the prompt context. The default is 512, but LLaMA models were built with a context of 2048, which will provide better results for longer input/inference. The size may differ in other models, for example, baichuan models were build with a context of 4096. diff --git a/examples/server/server.cpp b/examples/server/server.cpp index 52daf9e7a..0ca388f47 100644 --- a/examples/server/server.cpp +++ b/examples/server/server.cpp @@ -2026,7 +2026,7 @@ static void server_print_usage(const char *argv0, const gpt_params ¶ms, printf(" -v, --verbose verbose output (default: %s)\n", server_verbose ? "enabled" : "disabled"); printf(" -t N, --threads N number of threads to use during computation (default: %d)\n", params.n_threads); printf(" -tb N, --threads-batch N number of threads to use during batch and prompt processing (default: same as --threads)\n"); - printf(" --threads-http N number of threads in the http server pool to process requests (default: hardware concurrency)\n"); + printf(" --threads-http N number of threads in the http server pool to process requests (default: max(hardware concurrency - 1, --parallel N + 2))\n"); printf(" -c N, --ctx-size N size of the prompt context (default: %d)\n", params.n_ctx); printf(" --rope-scaling {none,linear,yarn}\n"); printf(" RoPE frequency scaling method, defaults to linear unless specified by the model\n"); @@ -3468,10 +3468,12 @@ int main(int argc, char **argv) }*/ //); - if (sparams.n_threads_http > 0) { - log_data["n_threads_http"] = std::to_string(sparams.n_threads_http); - svr.new_task_queue = [&sparams] { return new httplib::ThreadPool(sparams.n_threads_http); }; + if (sparams.n_threads_http < 1) { + // +2 threads for monitoring endpoints + sparams.n_threads_http = std::max(params.n_parallel + 2, (int32_t) std::thread::hardware_concurrency() - 1); } + log_data["n_threads_http"] = std::to_string(sparams.n_threads_http); + svr.new_task_queue = [&sparams] { return new httplib::ThreadPool(sparams.n_threads_http); }; LOG_INFO("HTTP server listening", log_data); // run the HTTP server in a thread - see comment below From e6029348e86c3810d4435faee54ba822cb43e2ef Mon Sep 17 00:00:00 2001 From: Pierrick Hymbert Date: Sun, 3 Mar 2024 09:35:23 +0100 Subject: [PATCH 769/811] ci : schedule slow server tests only on Release or on demand (#5839) --- .github/workflows/server.yml | 9 +++++++-- 1 file changed, 7 insertions(+), 2 deletions(-) diff --git a/.github/workflows/server.yml b/.github/workflows/server.yml index 8c6312508..04e3fc0c1 100644 --- a/.github/workflows/server.yml +++ b/.github/workflows/server.yml @@ -3,6 +3,11 @@ name: Server on: workflow_dispatch: # allows manual triggering + inputs: + slow_tests: + description: 'Run slow tests' + required: true + type: boolean push: branches: - master @@ -11,7 +16,7 @@ on: types: [opened, synchronize, reopened] paths: ['.github/workflows/server.yml', '**/CMakeLists.txt', '**/Makefile', '**/*.h', '**/*.hpp', '**/*.c', '**/*.cpp', '**/*.cu', '**/*.swift', '**/*.m', 'examples/server/tests/**.*'] schedule: - - cron: '00 0 * * *' + - cron: '0 0 * * *' jobs: server: @@ -80,7 +85,7 @@ jobs: - name: Slow tests id: server_integration_tests_slow - if: github.event.schedule != '' + if: ${{ github.event.schedule != '' && matrix.build_type == 'Release' || github.event.inputs.slow_tests == 'true' }} run: | cd examples/server/tests PORT=8888 ./tests.sh --stop --no-skipped --no-capture --tags slow From de9692a7d2db66e29e5cb373c6551acc49145ccd Mon Sep 17 00:00:00 2001 From: compilade <113953597+compilade@users.noreply.github.com> Date: Sun, 3 Mar 2024 03:41:55 -0500 Subject: [PATCH 770/811] llama : fix llama_copy_state_data with fragmented KV cache (#5840) The row size of the saved states was based on kv_self.head while it should be based on llama_kv_cache_cell_max. Existing session files should still work. * llama : fix llama_kv_cache_cell_max inability to return 1 I've also changed its return type to uint32_t, because this function is always used to set the value of uint32_t variables, and because the index already has this type. * llama : fix state size calculation Some bytes in the state were unaccounted for in llama_get_state_size. Since the logits reserve so much space, it did not cause problems. --- llama.cpp | 47 ++++++++++++++++++++++++++++++----------------- 1 file changed, 30 insertions(+), 17 deletions(-) diff --git a/llama.cpp b/llama.cpp index d4c7a965b..41d0000da 100644 --- a/llama.cpp +++ b/llama.cpp @@ -2156,10 +2156,12 @@ static bool llama_kv_cache_find_slot( } // find how many cells are currently in use -static int32_t llama_kv_cache_cell_max(const struct llama_kv_cache & cache) { - for (uint32_t i = cache.size - 1; i > 0; --i) { - if (cache.cells[i].pos >= 0 && !cache.cells[i].is_empty()) { - return i + 1; +static uint32_t llama_kv_cache_cell_max(const struct llama_kv_cache & cache) { + for (uint32_t i = cache.size; i > 0; --i) { + const llama_kv_cell & cell = cache.cells[i - 1]; + + if (cell.pos >= 0 && !cell.is_empty()) { + return i; } } @@ -8178,7 +8180,7 @@ static int llama_decode_internal( // a heuristic, to avoid attending the full cache if it is not yet utilized // after enough generations, the benefit from this heuristic disappears // if we start defragmenting the cache, the benefit from this will be more important - kv_self.n = std::min((int32_t) cparams.n_ctx, std::max(32, GGML_PAD(llama_kv_cache_cell_max(kv_self), 32))); + kv_self.n = std::min(cparams.n_ctx, std::max(32u, GGML_PAD(llama_kv_cache_cell_max(kv_self), 32))); //kv_self.n = llama_kv_cache_cell_max(kv_self); //printf("kv_self.n = %5d, kv_self.used = %5d, kv_self.head = %5d\n", kv_self.n, kv_self.used, kv_self.head); @@ -12615,9 +12617,14 @@ size_t llama_get_state_size(const struct llama_context * ctx) { const size_t s_logits = ctx->logits.capacity() * sizeof(float); const size_t s_embedding_size = sizeof(size_t); const size_t s_embedding = ctx->embedding.size() * sizeof(float); - const size_t s_kv_size = sizeof(size_t); - const size_t s_kv_ntok = sizeof(int); + const size_t s_kv_buf_size = sizeof(size_t); + const size_t s_kv_head = sizeof(uint32_t); + const size_t s_kv_size = sizeof(uint32_t); + const size_t s_kv_used = sizeof(uint32_t); const size_t s_kv = ctx->kv_self.total_size(); + // TODO: assume the max is more than 1 seq_id per KV cell + const size_t s_kv_cell = sizeof(llama_pos) + sizeof(size_t) + sizeof(llama_seq_id); + const size_t s_kv_cells = ctx->kv_self.size * s_kv_cell; const size_t s_total = ( + s_rng_size @@ -12626,9 +12633,12 @@ size_t llama_get_state_size(const struct llama_context * ctx) { + s_logits + s_embedding_size + s_embedding + + s_kv_buf_size + + s_kv_head + s_kv_size - + s_kv_ntok + + s_kv_used + s_kv + + s_kv_cells ); return s_total; @@ -12728,15 +12738,13 @@ static void llama_copy_state_data_internal(struct llama_context * ctx, llama_dat { const auto & kv_self = ctx->kv_self; const auto & hparams = ctx->model.hparams; - const auto & cparams = ctx->cparams; const uint32_t n_layer = hparams.n_layer; const uint32_t n_embd_k_gqa = hparams.n_embd_k_gqa(); const uint32_t n_embd_v_gqa = hparams.n_embd_v_gqa(); - const uint32_t n_ctx = cparams.n_ctx; const size_t kv_buf_size = kv_self.total_size(); - const uint32_t kv_head = kv_self.head; + const uint32_t kv_head = llama_kv_cache_cell_max(kv_self); const uint32_t kv_size = kv_self.size; const uint32_t kv_used = kv_self.used; @@ -12756,7 +12764,7 @@ static void llama_copy_state_data_internal(struct llama_context * ctx, llama_dat // v is not contiguous, copy row by row const size_t v_row_size = ggml_row_size(kv_self.v_l[il]->type, kv_head); - const size_t v_row_stride = ggml_row_size(kv_self.v_l[il]->type, n_ctx); + const size_t v_row_stride = ggml_row_size(kv_self.v_l[il]->type, kv_size); tmp_buf.resize(v_row_size); for (int ir = 0; ir < (int) n_embd_v_gqa; ++ir) { @@ -12766,7 +12774,7 @@ static void llama_copy_state_data_internal(struct llama_context * ctx, llama_dat } } - for (uint32_t i = 0; i < kv_size; ++i) { + for (uint32_t i = 0; i < kv_head; ++i) { const auto & cell = kv_self.cells[i]; const llama_pos pos = cell.pos; @@ -12842,12 +12850,10 @@ size_t llama_set_state_data(struct llama_context * ctx, const uint8_t * src) { { const auto & kv_self = ctx->kv_self; const auto & hparams = ctx->model.hparams; - const auto & cparams = ctx->cparams; const uint32_t n_layer = hparams.n_layer; const uint32_t n_embd_k_gqa = hparams.n_embd_k_gqa(); const uint32_t n_embd_v_gqa = hparams.n_embd_v_gqa(); - const uint32_t n_ctx = cparams.n_ctx; size_t kv_buf_size; uint32_t kv_head; @@ -12870,7 +12876,7 @@ size_t llama_set_state_data(struct llama_context * ctx, const uint8_t * src) { // v is not contiguous, copy row by row const size_t v_row_size = ggml_row_size(kv_self.v_l[il]->type, kv_head); - const size_t v_row_stride = ggml_row_size(kv_self.v_l[il]->type, n_ctx); + const size_t v_row_stride = ggml_row_size(kv_self.v_l[il]->type, kv_size); for (int ir = 0; ir < (int) n_embd_v_gqa; ++ir) { ggml_backend_tensor_set(kv_self.v_l[il], inp, ir*v_row_stride, v_row_size); @@ -12879,13 +12885,15 @@ size_t llama_set_state_data(struct llama_context * ctx, const uint8_t * src) { } } + GGML_ASSERT(kv_self.size == kv_size); + ctx->kv_self.head = kv_head; ctx->kv_self.size = kv_size; ctx->kv_self.used = kv_used; ctx->kv_self.cells.resize(kv_size); - for (uint32_t i = 0; i < kv_size; ++i) { + for (uint32_t i = 0; i < kv_head; ++i) { llama_pos pos; size_t seq_id_size; @@ -12901,6 +12909,11 @@ size_t llama_set_state_data(struct llama_context * ctx, const uint8_t * src) { ctx->kv_self.cells[i].seq_id.insert(seq_id); } } + + for (uint32_t i = kv_head; i < kv_size; ++i) { + ctx->kv_self.cells[i].pos = -1; + ctx->kv_self.cells[i].seq_id.clear(); + } } const size_t nread = inp - src; From 87c2e8b2797860a06af3d6c06b8488a8ff1a09ab Mon Sep 17 00:00:00 2001 From: Nindaleth Date: Sun, 3 Mar 2024 09:43:42 +0100 Subject: [PATCH 771/811] gguf-dump : support i-quants (#5841) Co-authored-by: Black_Fox --- gguf-py/gguf/constants.py | 72 ++++++++++++++++++++++++--------------- 1 file changed, 44 insertions(+), 28 deletions(-) diff --git a/gguf-py/gguf/constants.py b/gguf-py/gguf/constants.py index 5db760cb1..a62139811 100644 --- a/gguf-py/gguf/constants.py +++ b/gguf-py/gguf/constants.py @@ -604,20 +604,28 @@ class PoolingType(IntEnum): class GGMLQuantizationType(IntEnum): - F32 = 0 - F16 = 1 - Q4_0 = 2 - Q4_1 = 3 - Q5_0 = 6 - Q5_1 = 7 - Q8_0 = 8 - Q8_1 = 9 - Q2_K = 10 - Q3_K = 11 - Q4_K = 12 - Q5_K = 13 - Q6_K = 14 - Q8_K = 15 + F32 = 0 + F16 = 1 + Q4_0 = 2 + Q4_1 = 3 + Q5_0 = 6 + Q5_1 = 7 + Q8_0 = 8 + Q8_1 = 9 + Q2_K = 10 + Q3_K = 11 + Q4_K = 12 + Q5_K = 13 + Q6_K = 14 + Q8_K = 15 + IQ2_XXS = 16 + IQ2_XS = 17 + IQ3_XXS = 18 + IQ1_S = 19 + IQ4_NL = 20 + IQ3_S = 21 + IQ2_S = 22 + IQ4_XS = 23 class GGUFEndian(IntEnum): @@ -662,20 +670,28 @@ class GGUFValueType(IntEnum): QK_K = 256 # Items here are (block size, type size) GGML_QUANT_SIZES = { - GGMLQuantizationType.F32: (1, 4), - GGMLQuantizationType.F16: (1, 2), - GGMLQuantizationType.Q4_0: (32, 2 + 16), - GGMLQuantizationType.Q4_1: (32, 2 + 2 + 16), - GGMLQuantizationType.Q5_0: (32, 2 + 4 + 16), - GGMLQuantizationType.Q5_1: (32, 2 + 2 + 4 + 16), - GGMLQuantizationType.Q8_0: (32, 2 + 32), - GGMLQuantizationType.Q8_1: (32, 4 + 4 + 32), - GGMLQuantizationType.Q2_K: (256, 2 + 2 + QK_K // 16 + QK_K // 4), - GGMLQuantizationType.Q3_K: (256, 2 + QK_K // 4 + QK_K // 8 + 12), - GGMLQuantizationType.Q4_K: (256, 2 + 2 + QK_K // 2 + 12), - GGMLQuantizationType.Q5_K: (256, 2 + 2 + QK_K // 2 + QK_K // 8 + 12), - GGMLQuantizationType.Q6_K: (256, 2 + QK_K // 2 + QK_K // 4 + QK_K // 16), - GGMLQuantizationType.Q8_K: (256, 4 + QK_K + QK_K // 8), + GGMLQuantizationType.F32: (1, 4), + GGMLQuantizationType.F16: (1, 2), + GGMLQuantizationType.Q4_0: (32, 2 + 16), + GGMLQuantizationType.Q4_1: (32, 2 + 2 + 16), + GGMLQuantizationType.Q5_0: (32, 2 + 4 + 16), + GGMLQuantizationType.Q5_1: (32, 2 + 2 + 4 + 16), + GGMLQuantizationType.Q8_0: (32, 2 + 32), + GGMLQuantizationType.Q8_1: (32, 4 + 4 + 32), + GGMLQuantizationType.Q2_K: (256, 2 + 2 + QK_K // 16 + QK_K // 4), + GGMLQuantizationType.Q3_K: (256, 2 + QK_K // 4 + QK_K // 8 + 12), + GGMLQuantizationType.Q4_K: (256, 2 + 2 + QK_K // 2 + 12), + GGMLQuantizationType.Q5_K: (256, 2 + 2 + QK_K // 2 + QK_K // 8 + 12), + GGMLQuantizationType.Q6_K: (256, 2 + QK_K // 2 + QK_K // 4 + QK_K // 16), + GGMLQuantizationType.Q8_K: (256, 4 + QK_K + QK_K // 8), + GGMLQuantizationType.IQ2_XXS: (256, 2 + QK_K // 4), + GGMLQuantizationType.IQ2_XS: (256, 2 + QK_K // 4 + QK_K // 32), + GGMLQuantizationType.IQ3_XXS: (256, 2 + QK_K // 4 + QK_K // 8), + GGMLQuantizationType.IQ1_S: (256, 2 + QK_K // 8 + QK_K // 16), + GGMLQuantizationType.IQ4_NL: (32, 2 + 16), + GGMLQuantizationType.IQ3_S: (256, 2 + QK_K // 4 + QK_K // 8 + QK_K // 32 + 4), + GGMLQuantizationType.IQ2_S: (256, 2 + QK_K // 4 + QK_K // 16), + GGMLQuantizationType.IQ4_XS: (256, 2 + 2 + QK_K // 2 + QK_K // 64), } From 475df1d6cf817060028d3ff763cb8097d4ec40d6 Mon Sep 17 00:00:00 2001 From: Douglas Hanley Date: Sun, 3 Mar 2024 04:40:27 -0600 Subject: [PATCH 772/811] llama : allow for user specified embedding pooling type (#5849) * allow for user specified pooling type * llama : use enum types over int --------- Co-authored-by: Georgi Gerganov --- common/common.cpp | 13 +++++++++++++ common/common.h | 7 +++++-- convert-hf-to-gguf.py | 18 +++++++++--------- llama.cpp | 44 +++++++++++++++++++++++++++---------------- llama.h | 7 +++++-- 5 files changed, 60 insertions(+), 29 deletions(-) diff --git a/common/common.cpp b/common/common.cpp index 1c0b7c403..dbe7e9229 100644 --- a/common/common.cpp +++ b/common/common.cpp @@ -335,6 +335,16 @@ bool gpt_params_parse_ex(int argc, char ** argv, gpt_params & params) { break; } params.yarn_beta_slow = std::stof(argv[i]); + } else if (arg == "--pooling") { + if (++i >= argc) { + invalid_param = true; + break; + } + std::string value(argv[i]); + /**/ if (value == "none") { params.pooling_type = LLAMA_POOLING_TYPE_NONE; } + else if (value == "mean") { params.pooling_type = LLAMA_POOLING_TYPE_MEAN; } + else if (value == "cls") { params.pooling_type = LLAMA_POOLING_TYPE_CLS; } + else { invalid_param = true; break; } } else if (arg == "--defrag-thold" || arg == "-dt") { if (++i >= argc) { invalid_param = true; @@ -1014,6 +1024,8 @@ void gpt_print_usage(int /*argc*/, char ** argv, const gpt_params & params) { printf(" --yarn-attn-factor N YaRN: scale sqrt(t) or attention magnitude (default: 1.0)\n"); printf(" --yarn-beta-slow N YaRN: high correction dim or alpha (default: %.1f)\n", params.yarn_beta_slow); printf(" --yarn-beta-fast N YaRN: low correction dim or beta (default: %.1f)\n", params.yarn_beta_fast); + printf(" --pooling {none,mean,cls}\n"); + printf(" pooling type for embeddings, use model default if unspecified\n"); printf(" -dt N, --defrag-thold N\n"); printf(" KV cache defragmentation threshold (default: %.1f, < 0 - disabled)\n", params.defrag_thold); printf(" --ignore-eos ignore end of stream token and continue generating (implies --logit-bias 2-inf)\n"); @@ -1296,6 +1308,7 @@ struct llama_context_params llama_context_params_from_gpt_params(const gpt_param cparams.yarn_beta_fast = params.yarn_beta_fast; cparams.yarn_beta_slow = params.yarn_beta_slow; cparams.yarn_orig_ctx = params.yarn_orig_ctx; + cparams.pooling_type = params.pooling_type; cparams.defrag_thold = params.defrag_thold; cparams.offload_kqv = !params.no_kv_offload; diff --git a/common/common.h b/common/common.h index ab62bdb82..d3682b7ad 100644 --- a/common/common.h +++ b/common/common.h @@ -76,8 +76,11 @@ struct gpt_params { float yarn_beta_slow = 1.0f; // YaRN high correction dim int32_t yarn_orig_ctx = 0; // YaRN original context length float defrag_thold = -1.0f; // KV cache defragmentation threshold - int32_t rope_scaling_type = LLAMA_ROPE_SCALING_TYPE_UNSPECIFIED; - ggml_numa_strategy numa = GGML_NUMA_STRATEGY_DISABLED; + + ggml_numa_strategy numa = GGML_NUMA_STRATEGY_DISABLED; + + llama_rope_scaling_type rope_scaling_type = LLAMA_ROPE_SCALING_TYPE_UNSPECIFIED; + llama_pooling_type pooling_type = LLAMA_POOLING_TYPE_UNSPECIFIED; // pooling type for embeddings // // sampling parameters struct llama_sampling_params sparams; diff --git a/convert-hf-to-gguf.py b/convert-hf-to-gguf.py index fa9d4f22f..ffdba7444 100755 --- a/convert-hf-to-gguf.py +++ b/convert-hf-to-gguf.py @@ -1644,16 +1644,17 @@ class BertModel(Model): self.gguf_writer.add_causal_attention(False) # get pooling path - with open(self.dir_model / "modules.json", encoding="utf-8") as f: - modules = json.load(f) pooling_path = None - for mod in modules: - if mod["type"] == "sentence_transformers.models.Pooling": - pooling_path = mod["path"] - break + module_path = self.dir_model / "modules.json" + if module_path.is_file(): + with open(module_path, encoding="utf-8") as f: + modules = json.load(f) + for mod in modules: + if mod["type"] == "sentence_transformers.models.Pooling": + pooling_path = mod["path"] + break # get pooling type - pooling_type = gguf.PoolingType.NONE if pooling_path is not None: with open(self.dir_model / pooling_path / "config.json", encoding="utf-8") as f: pooling = json.load(f) @@ -1663,8 +1664,7 @@ class BertModel(Model): pooling_type = gguf.PoolingType.CLS else: raise NotImplementedError("Only MEAN and CLS pooling types supported") - - self.gguf_writer.add_pooling_type(pooling_type) + self.gguf_writer.add_pooling_type(pooling_type) def set_vocab(self): path = self.dir_model diff --git a/llama.cpp b/llama.cpp index 41d0000da..c1f015791 100644 --- a/llama.cpp +++ b/llama.cpp @@ -873,16 +873,16 @@ struct LLM_TN { // gguf helpers // -static const std::map LLAMA_ROPE_SCALING_TYPES = { +static const std::map LLAMA_ROPE_SCALING_TYPES = { { LLAMA_ROPE_SCALING_TYPE_NONE, "none" }, { LLAMA_ROPE_SCALING_TYPE_LINEAR, "linear" }, { LLAMA_ROPE_SCALING_TYPE_YARN, "yarn" }, }; -static int32_t llama_rope_scaling_type_from_string(const std::string & name) { +static llama_rope_scaling_type llama_rope_scaling_type_from_string(const std::string & name) { for (const auto & kv : LLAMA_ROPE_SCALING_TYPES) { if (kv.second == name) { - return kv.first; + return (llama_rope_scaling_type) kv.first; } } @@ -1612,7 +1612,6 @@ struct llama_hparams { float rope_freq_base_train; float rope_freq_scale_train; uint32_t n_yarn_orig_ctx; - int32_t rope_scaling_type_train; float f_clamp_kqv = 0.0f; float f_max_alibi_bias = 0.0f; @@ -1620,8 +1619,9 @@ struct llama_hparams { bool causal_attn = true; bool need_kq_pos = false; - enum llama_pooling_type pooling_type = LLAMA_POOLING_TYPE_NONE; - enum llama_rope_type rope_type = LLAMA_ROPE_TYPE_NONE; + enum llama_pooling_type pooling_type = LLAMA_POOLING_TYPE_NONE; + enum llama_rope_type rope_type = LLAMA_ROPE_TYPE_NONE; + enum llama_rope_scaling_type rope_scaling_type_train = LLAMA_ROPE_SCALING_TYPE_NONE; bool operator!=(const llama_hparams & other) const { if (this->vocab_only != other.vocab_only) return true; @@ -1670,8 +1670,8 @@ struct llama_cparams { uint32_t n_threads; // number of threads to use for generation uint32_t n_threads_batch; // number of threads to use for batch processing - float rope_freq_base; - float rope_freq_scale; + float rope_freq_base; + float rope_freq_scale; uint32_t n_yarn_orig_ctx; // These hyperparameters are not exposed in GGUF, because all @@ -1683,7 +1683,7 @@ struct llama_cparams { float defrag_thold; bool offload_kqv; - bool do_pooling; + enum llama_pooling_type pooling_type; ggml_backend_sched_eval_callback cb_eval; void * cb_eval_user_data; @@ -2933,7 +2933,11 @@ template<> bool llama_model_loader::get_key(const enum llm_kv kid, enum llama_pooling_type & result, const bool required) { uint32_t tmp; const bool found = get_key(kid, tmp, required); - result = (enum llama_pooling_type) tmp; + if (found) { + result = (enum llama_pooling_type) tmp; + } else { + result = LLAMA_POOLING_TYPE_UNSPECIFIED; + } return found; } @@ -3210,7 +3214,7 @@ static void llm_load_hparams( ml.get_key(LLM_KV_ATTENTION_LAYERNORM_EPS, hparams.f_norm_eps); ml.get_key(LLM_KV_ATTENTION_CAUSAL, hparams.causal_attn); ml.get_key(LLM_KV_TOKENIZER_TOKEN_TYPE_COUNT, hparams.n_vocab_type); - ml.get_key(LLM_KV_POOLING_TYPE, hparams.pooling_type); + ml.get_key(LLM_KV_POOLING_TYPE, hparams.pooling_type, false); switch (hparams.n_layer) { case 3: @@ -5175,7 +5179,7 @@ struct llm_build_context { n_kv (worst_case ? n_ctx : kv_self.n), kv_head (worst_case ? n_ctx - n_tokens : kv_self.head), n_orig_ctx (cparams.n_yarn_orig_ctx), - pooling_type (cparams.do_pooling ? hparams.pooling_type : LLAMA_POOLING_TYPE_NONE), + pooling_type (cparams.pooling_type), rope_type (hparams.rope_type), cb (cb), buf_compute_meta (lctx.buf_compute_meta) { @@ -8015,7 +8019,7 @@ static void llama_set_inputs(llama_context & lctx, const llama_batch & batch) { } } - if (cparams.do_pooling && hparams.pooling_type == LLAMA_POOLING_TYPE_MEAN) { + if (cparams.pooling_type == LLAMA_POOLING_TYPE_MEAN) { const int64_t n_tokens = batch.n_tokens; GGML_ASSERT(ggml_backend_buffer_is_host(lctx.inp_mean->buffer)); @@ -8043,7 +8047,7 @@ static void llama_set_inputs(llama_context & lctx, const llama_batch & batch) { } } - if (cparams.do_pooling && hparams.pooling_type == LLAMA_POOLING_TYPE_CLS) { + if (cparams.pooling_type == LLAMA_POOLING_TYPE_CLS) { const int64_t n_tokens = batch.n_tokens; GGML_ASSERT(ggml_backend_buffer_is_host(lctx.inp_cls->buffer)); @@ -11846,6 +11850,7 @@ struct llama_context_params llama_context_default_params() { /*.n_threads =*/ GGML_DEFAULT_N_THREADS, // TODO: better default /*.n_threads_batch =*/ GGML_DEFAULT_N_THREADS, /*.rope_scaling_type =*/ LLAMA_ROPE_SCALING_TYPE_UNSPECIFIED, + /*.pooling_type =*/ LLAMA_POOLING_TYPE_UNSPECIFIED, /*.rope_freq_base =*/ 0.0f, /*.rope_freq_scale =*/ 0.0f, /*.yarn_ext_factor =*/ -1.0f, @@ -11861,7 +11866,6 @@ struct llama_context_params llama_context_default_params() { /*.logits_all =*/ false, /*.embedding =*/ false, /*.offload_kqv =*/ true, - /*.do_pooling =*/ true, /*.abort_callback =*/ nullptr, /*.abort_callback_data =*/ nullptr, }; @@ -12012,7 +12016,7 @@ struct llama_context * llama_new_context_with_model( cparams.yarn_beta_slow = params.yarn_beta_slow; cparams.defrag_thold = params.defrag_thold; cparams.offload_kqv = params.offload_kqv; - cparams.do_pooling = params.do_pooling; + cparams.pooling_type = params.pooling_type; cparams.n_ctx = params.n_ctx == 0 ? hparams.n_ctx_train : params.n_ctx; cparams.rope_freq_base = params.rope_freq_base == 0.0f ? hparams.rope_freq_base_train : params.rope_freq_base; @@ -12038,6 +12042,14 @@ struct llama_context * llama_new_context_with_model( cparams.yarn_ext_factor = rope_scaling_type == LLAMA_ROPE_SCALING_TYPE_YARN ? 1.0f : 0.0f; } + if (cparams.pooling_type == LLAMA_POOLING_TYPE_UNSPECIFIED) { + if (hparams.pooling_type == LLAMA_POOLING_TYPE_UNSPECIFIED) { + cparams.pooling_type = LLAMA_POOLING_TYPE_NONE; + } else { + cparams.pooling_type = hparams.pooling_type; + } + } + if (params.seed == LLAMA_DEFAULT_SEED) { params.seed = time(NULL); } diff --git a/llama.h b/llama.h index 6406b5270..70da4cb3f 100644 --- a/llama.h +++ b/llama.h @@ -129,6 +129,7 @@ extern "C" { }; enum llama_pooling_type { + LLAMA_POOLING_TYPE_UNSPECIFIED = -1, LLAMA_POOLING_TYPE_NONE = 0, LLAMA_POOLING_TYPE_MEAN = 1, LLAMA_POOLING_TYPE_CLS = 2, @@ -236,7 +237,10 @@ extern "C" { uint32_t n_batch; // prompt processing maximum batch size uint32_t n_threads; // number of threads to use for generation uint32_t n_threads_batch; // number of threads to use for batch processing - int32_t rope_scaling_type; // RoPE scaling type, from `enum llama_rope_scaling_type` + + enum llama_rope_scaling_type rope_scaling_type; // RoPE scaling type, from `enum llama_rope_scaling_type` + enum llama_pooling_type pooling_type; // whether to pool (sum) embedding results by sequence id + // (ignored if no pooling layer) // ref: https://github.com/ggerganov/llama.cpp/pull/2054 float rope_freq_base; // RoPE base frequency, 0 = from model @@ -258,7 +262,6 @@ extern "C" { bool logits_all; // the llama_decode() call computes all logits, not just the last one (DEPRECATED - set llama_batch.logits instead) bool embedding; // embedding mode only bool offload_kqv; // whether to offload the KQV ops (including the KV cache) to GPU - bool do_pooling; // whether to pool (sum) embedding results by sequence id (ignored if no pooling layer) // Abort callback // if it returns true, execution of llama_decode() will be aborted From 231ae28f078c3148d097b301f2145f1e3e816cc1 Mon Sep 17 00:00:00 2001 From: Georgi Gerganov Date: Sun, 3 Mar 2024 12:44:03 +0200 Subject: [PATCH 773/811] readme : add API changes section --- README.md | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/README.md b/README.md index 939646753..45c5d06f3 100644 --- a/README.md +++ b/README.md @@ -8,6 +8,10 @@ Inference of Meta's [LLaMA](https://arxiv.org/abs/2302.13971) model (and others) in pure C/C++ +### Recent API changes + +- [2024 Mar 3] `struct llama_context_params` https://github.com/ggerganov/llama.cpp/pull/5849 + ### Hot topics - The `api_like_OAI.py` script has been removed - use `server` instead ([#5766](https://github.com/ggerganov/llama.cpp/issues/5766#issuecomment-1969037761)) From 67be2ce1015d070b3b2cd488bcb041eefb61de72 Mon Sep 17 00:00:00 2001 From: slaren Date: Sun, 3 Mar 2024 14:26:18 +0100 Subject: [PATCH 774/811] cuda : fix data race in soft max (#5853) --- ggml-cuda.cu | 1 + 1 file changed, 1 insertion(+) diff --git a/ggml-cuda.cu b/ggml-cuda.cu index 7ed97430f..04c6cb1b8 100644 --- a/ggml-cuda.cu +++ b/ggml-cuda.cu @@ -6904,6 +6904,7 @@ static __global__ void soft_max_f32(const float * x, const float * mask, const f // find the sum of exps in the block tmp = warp_reduce_sum(tmp); if (block_size > WARP_SIZE) { + __syncthreads(); if (warp_id == 0) { buf_iw[lane_id] = 0.0f; } From 5a51cc1bb4592f0d71f9af89cd08b11a066ba447 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?DAN=E2=84=A2?= Date: Mon, 4 Mar 2024 02:57:20 -0500 Subject: [PATCH 775/811] main : support special tokens as reverse/anti prompt (#5847) * Support special tokens as reverse/anti prompt. * Tokenize antiprompts only once. * main : minor --------- Co-authored-by: Georgi Gerganov --- examples/main/main.cpp | 20 ++++++++++++++++++++ 1 file changed, 20 insertions(+) diff --git a/examples/main/main.cpp b/examples/main/main.cpp index 34e84d0d4..47059e582 100644 --- a/examples/main/main.cpp +++ b/examples/main/main.cpp @@ -511,6 +511,14 @@ int main(int argc, char ** argv) { std::vector embd; std::vector embd_guidance; + // tokenized antiprompts + std::vector> antiprompt_ids; + + antiprompt_ids.reserve(params.antiprompt.size()); + for (const std::string & antiprompt : params.antiprompt) { + antiprompt_ids.emplace_back(::llama_tokenize(ctx, antiprompt, false, true)); + } + struct llama_sampling_context * ctx_sampling = llama_sampling_init(sparams); while ((n_remain != 0 && !is_antiprompt) || params.interactive) { @@ -769,6 +777,18 @@ int main(int argc, char ** argv) { } } + // check for reverse prompt using special tokens + llama_token last_token = llama_sampling_last(ctx_sampling); + for (std::vector ids : antiprompt_ids) { + if (ids.size() == 1 && last_token == ids[0]) { + if (params.interactive) { + is_interacting = true; + } + is_antiprompt = true; + break; + } + } + if (is_antiprompt) { LOG("found antiprompt: %s\n", last_output.c_str()); } From 82f3e668adafba647de703f835991e91a96b5ac4 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?DAN=E2=84=A2?= Date: Mon, 4 Mar 2024 03:08:19 -0500 Subject: [PATCH 776/811] common : use LLAMA_DEFAULT_SEED (#5855) --- common/common.h | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/common/common.h b/common/common.h index d3682b7ad..b2868833b 100644 --- a/common/common.h +++ b/common/common.h @@ -43,7 +43,7 @@ extern char const *LLAMA_BUILD_TARGET; int32_t get_num_physical_cores(); struct gpt_params { - uint32_t seed = -1; // RNG seed + uint32_t seed = LLAMA_DEFAULT_SEED; // RNG seed int32_t n_threads = get_num_physical_cores(); int32_t n_threads_draft = -1; From 7d43c585dc174bb586775c22c15e5db9242b5b4b Mon Sep 17 00:00:00 2001 From: leejet Date: Sun, 3 Mar 2024 20:23:52 +0800 Subject: [PATCH 777/811] add some new ops, fix some operators and add batch operations to certain operators. (ggml/747) * cuda: fix group_norm * cuda: add batch inference support for ggml_pad/ggml_upscale * add ggml_arrange * add ggml_timestep_embedding * update ggml_arange/ggml_timestep_embedding tests * cuda: fix im2col * add ggml_arange/ggml_timestep_embbeding support for metal backend * fix some bugs * fix some bugs * Update ggml.h Co-authored-by: Georgi Gerganov * Update ggml-cuda.cu Co-authored-by: Georgi Gerganov * Update ggml-metal.m Co-authored-by: Georgi Gerganov * Update ggml-metal.m Co-authored-by: Georgi Gerganov * Update ggml-metal.metal Co-authored-by: Georgi Gerganov * modify according to the review comments * ggml : fix compile warnings + code style * ggml : normalize compute_forward calls + fix seg fault in debug * minor --------- Co-authored-by: Georgi Gerganov Co-authored-by: slaren --- ggml-cuda.cu | 227 ++++++++++++++++++++++++++++++------- ggml-metal.m | 62 +++++++++- ggml-metal.metal | 43 +++++++ ggml.c | 207 +++++++++++++++++++++++++++++++-- ggml.h | 17 +++ tests/test-backend-ops.cpp | 46 ++++++++ 6 files changed, 550 insertions(+), 52 deletions(-) diff --git a/ggml-cuda.cu b/ggml-cuda.cu index 04c6cb1b8..7d027a30a 100644 --- a/ggml-cuda.cu +++ b/ggml-cuda.cu @@ -616,6 +616,8 @@ static_assert(sizeof(block_iq4_xs) == sizeof(ggml_fp16_t) + sizeof(uint16_t) + Q #define CUDA_UPSCALE_BLOCK_SIZE 256 #define CUDA_CONCAT_BLOCK_SIZE 256 #define CUDA_PAD_BLOCK_SIZE 256 +#define CUDA_ARANGE_BLOCK_SIZE 256 +#define CUDA_TIMESTEP_EMBEDDING_BLOCK_SIZE 256 #define CUDA_ACC_BLOCK_SIZE 256 #define CUDA_IM2COL_BLOCK_SIZE 256 #define CUDA_POOL2D_BLOCK_SIZE 256 @@ -990,17 +992,21 @@ static __global__ void concat_f32(const float * x,const float * y, float * dst, nidx + blockIdx.y * ne0 + blockIdx.z * ne0 * gridDim.y; - dst[offset_dst] = x[offset_src]; + dst[offset_dst] = x[offset_src]; } else { int offset_src = nidx + blockIdx.y * ne0 + (blockIdx.z - ne02) * ne0 * gridDim.y; - dst[offset_dst] = y[offset_src]; + dst[offset_dst] = y[offset_src]; } } -static __global__ void upscale_f32(const float * x, float * dst, const int ne00, const int nb02, const int scale_factor) { +static __global__ void upscale_f32(const float * x, float * dst, const int ne00, const int ne00xne01, const int scale_factor) { + // blockIdx.z: idx of ne02*ne03 + // blockIdx.y: idx of ne01*scale_factor, aka ne1 + // blockIDx.x: idx of ne00*scale_factor / BLOCK_SIZE + // ne00xne01: ne00 * ne01 int ne0 = ne00 * scale_factor; int nidx = threadIdx.x + blockIdx.x * blockDim.x; if (nidx >= ne0) { @@ -1012,7 +1018,7 @@ static __global__ void upscale_f32(const float * x, float * dst, const int ne00, int offset_src = i00 + i01 * ne00 + - blockIdx.z * nb02; + blockIdx.z * ne00xne01; int offset_dst = nidx + blockIdx.y * ne0 + @@ -1020,7 +1026,10 @@ static __global__ void upscale_f32(const float * x, float * dst, const int ne00, dst[offset_dst] = x[offset_src]; } -static __global__ void pad_f32(const float * x, float * dst, const int ne0, const int ne00, const int ne01, const int ne02) { +static __global__ void pad_f32(const float * x, float * dst, const int ne0, const int ne00, const int ne01, const int ne02, const int ne03) { + // blockIdx.z: idx of ne2*ne3, aka ne02*ne03 + // blockIdx.y: idx of ne1 + // blockIDx.x: idx of ne0 / BLOCK_SIZE int nidx = threadIdx.x + blockIdx.x * blockDim.x; if (nidx >= ne0) { return; @@ -1031,19 +1040,53 @@ static __global__ void pad_f32(const float * x, float * dst, const int ne0, cons nidx + blockIdx.y * ne0 + blockIdx.z * ne0 * gridDim.y; - if (nidx < ne00 && blockIdx.y < ne01 && blockIdx.z < ne02) { + if (nidx < ne00 && blockIdx.y < ne01 && blockIdx.z < ne02*ne03) { int offset_src = nidx + blockIdx.y * ne00 + blockIdx.z * ne00 * ne01; - dst[offset_dst] = x[offset_src]; + dst[offset_dst] = x[offset_src]; } else { dst[offset_dst] = 0.0f; } } +static __global__ void arange_f32(float * dst, const int ne0, const float start, const float step) { + // blockIDx.x: idx of ne0 / BLOCK_SIZE + int nidx = threadIdx.x + blockIdx.x * blockDim.x; + if (nidx >= ne0) { + return; + } + dst[nidx] = start + step * nidx; +} + +static __global__ void timestep_embedding_f32(const float * timesteps, float * dst, const int nb1, const int dim, const int max_period) { + // blockIDx.y: idx of timesteps->ne[0] + // blockIDx.x: idx of ((dim + 1) / 2) / BLOCK_SIZE + int i = blockIdx.y; + int j = threadIdx.x + blockIdx.x * blockDim.x; + float * embed_data = (float *)((char *)dst + i*nb1); + + if (dim % 2 != 0 && j == ((dim + 1) / 2)) { + embed_data[dim] = 0.f; + } + + int half = dim / 2; + if (j >= half) { + return; + } + + float timestep = timesteps[i]; + float freq = (float)expf(-logf(max_period) * j / half); + float arg = timestep * freq; + embed_data[j] = cosf(arg); + embed_data[j + half] = sinf(arg); +} + template static __global__ void group_norm_f32(const float * x, float * dst, const int group_size, const int ne_elements, const float eps) { + // blockIdx.x: num_groups idx + // threadIdx.x: block_size idx int start = blockIdx.x * group_size; int end = start + group_size; @@ -6448,7 +6491,7 @@ static __global__ void cpy_f32_f16(const char * cx, char * cdst, const int ne, const int ne00, const int ne01, const int ne02, const int nb00, const int nb01, const int nb02, const int nb03, const int ne10, const int ne11, const int ne12, const int nb10, const int nb11, const int nb12, const int nb13) { - const int i = blockDim.x*blockIdx.x + threadIdx.x; + const int64_t i = blockDim.x*blockIdx.x + threadIdx.x; if (i >= ne) { return; @@ -6456,17 +6499,17 @@ static __global__ void cpy_f32_f16(const char * cx, char * cdst, const int ne, // determine indices i03/i13, i02/i12, i01/i11, i00/i10 as a function of index i of flattened tensor // then combine those indices with the corresponding byte offsets to get the total offsets - const int i03 = i/(ne00 * ne01 * ne02); - const int i02 = (i - i03*ne00*ne01*ne02 )/ (ne00*ne01); - const int i01 = (i - i03*ne00*ne01*ne02 - i02*ne01*ne00) / ne00; - const int i00 = i - i03*ne00*ne01*ne02 - i02*ne01*ne00 - i01*ne00; - const int x_offset = i00*nb00 + i01*nb01 + i02*nb02 + i03 * nb03; + const int64_t i03 = i/(ne00 * ne01 * ne02); + const int64_t i02 = (i - i03*ne00*ne01*ne02 )/ (ne00*ne01); + const int64_t i01 = (i - i03*ne00*ne01*ne02 - i02*ne01*ne00) / ne00; + const int64_t i00 = i - i03*ne00*ne01*ne02 - i02*ne01*ne00 - i01*ne00; + const int64_t x_offset = i00*nb00 + i01*nb01 + i02*nb02 + i03 * nb03; - const int i13 = i/(ne10 * ne11 * ne12); - const int i12 = (i - i13*ne10*ne11*ne12) / (ne10*ne11); - const int i11 = (i - i13*ne10*ne11*ne12 - i12*ne10*ne11) / ne10; - const int i10 = i - i13*ne10*ne11*ne12 - i12*ne10*ne11 - i11*ne10; - const int dst_offset = i10*nb10 + i11*nb11 + i12*nb12 + i13 * nb13; + const int64_t i13 = i/(ne10 * ne11 * ne12); + const int64_t i12 = (i - i13*ne10*ne11*ne12) / (ne10*ne11); + const int64_t i11 = (i - i13*ne10*ne11*ne12 - i12*ne10*ne11) / ne10; + const int64_t i10 = i - i13*ne10*ne11*ne12 - i12*ne10*ne11 - i11*ne10; + const int64_t dst_offset = i10*nb10 + i11*nb11 + i12*nb12 + i13 * nb13; cpy_1(cx + x_offset, cdst + dst_offset); } @@ -6956,23 +6999,23 @@ static __global__ void clamp_f32(const float * x, float * dst, const float min, template static __global__ void im2col_kernel( - const float * x, T * dst, int batch_offset, - int offset_delta, int IC, int IW, int IH, int OH, int OW, int KW, int KH, int pelements, int CHW, + const float * x, T * dst, int64_t batch_offset, + int64_t offset_delta, int64_t IC, int64_t IW, int64_t IH, int64_t OH, int64_t OW, int64_t KW, int64_t KH, int64_t pelements, int64_t CHW, int s0, int s1, int p0, int p1, int d0, int d1) { - const int i = threadIdx.x + blockIdx.x * blockDim.x; + const int64_t i = threadIdx.x + blockIdx.x * blockDim.x; if (i >= pelements) { return; } - const int ksize = OW * (KH > 1 ? KW : 1); - const int kx = i / ksize; - const int kd = kx * ksize; - const int ky = (i - kd) / OW; - const int ix = i % OW; + const int64_t ksize = OW * (KH > 1 ? KW : 1); + const int64_t kx = i / ksize; + const int64_t kd = kx * ksize; + const int64_t ky = (i - kd) / OW; + const int64_t ix = i % OW; - const int oh = blockIdx.y; - const int batch = blockIdx.z / IC; - const int ic = blockIdx.z % IC; + const int64_t oh = blockIdx.y; + const int64_t batch = blockIdx.z / IC; + const int64_t ic = blockIdx.z % IC; const int64_t iiw = ix * s0 + kx * d0 - p0; const int64_t iih = oh * s1 + ky * d1 - p1; @@ -7298,19 +7341,33 @@ static void concat_f32_cuda(const float * x, const float * y, float * dst, const concat_f32<<>>(x, y, dst, ne0, ne02); } -static void upscale_f32_cuda(const float * x, float * dst, const int ne00, const int ne01, const int ne02, const int scale_factor, cudaStream_t stream) { +static void upscale_f32_cuda(const float * x, float * dst, const int ne00, const int ne01, const int ne02, const int ne03, + const int scale_factor, cudaStream_t stream) { int ne0 = (ne00 * scale_factor); int num_blocks = (ne0 + CUDA_UPSCALE_BLOCK_SIZE - 1) / CUDA_UPSCALE_BLOCK_SIZE; - dim3 gridDim(num_blocks, (ne01 * scale_factor), ne02); + dim3 gridDim(num_blocks, (ne01 * scale_factor), ne02*ne03); upscale_f32<<>>(x, dst, ne00, ne00 * ne01, scale_factor); } static void pad_f32_cuda(const float * x, float * dst, - const int ne00, const int ne01, const int ne02, - const int ne0, const int ne1, const int ne2, cudaStream_t stream) { + const int ne00, const int ne01, const int ne02, const int ne03, + const int ne0, const int ne1, const int ne2, const int ne3, cudaStream_t stream) { int num_blocks = (ne0 + CUDA_PAD_BLOCK_SIZE - 1) / CUDA_PAD_BLOCK_SIZE; - dim3 gridDim(num_blocks, ne1, ne2); - pad_f32<<>>(x, dst, ne0, ne00, ne01, ne02); + dim3 gridDim(num_blocks, ne1, ne2*ne3); + pad_f32<<>>(x, dst, ne0, ne00, ne01, ne02, ne03); +} + +static void arange_f32_cuda(float * dst, const int ne0, const float start, const float step, cudaStream_t stream) { + int num_blocks = (ne0 + CUDA_ARANGE_BLOCK_SIZE - 1) / CUDA_ARANGE_BLOCK_SIZE; + arange_f32<<>>(dst, ne0, start, step); +} + +static void timestep_embedding_f32_cuda(const float * x, float * dst, const int ne00, const int nb1, + const int dim, const int max_period, cudaStream_t stream) { + int half_ceil = (dim + 1) / 2; + int num_blocks = (half_ceil + CUDA_TIMESTEP_EMBEDDING_BLOCK_SIZE - 1) / CUDA_TIMESTEP_EMBEDDING_BLOCK_SIZE; + dim3 gridDim(num_blocks, ne00, 1); + timestep_embedding_f32<<>>(x, dst, nb1, dim, max_period); } static void rms_norm_f32_cuda(const float * x, float * dst, const int ncols, const int nrows, const float eps, cudaStream_t stream) { @@ -8443,8 +8500,8 @@ static void soft_max_f32_cuda(const float * x, const float * mask, const float * template static void im2col_cuda(const float* x, T* dst, - int IW, int IH, int OW, int OH, int KW, int KH, int IC, - int batch, int batch_offset, int offset_delta, + int64_t IW, int64_t IH, int64_t OW, int64_t OH, int64_t KW, int64_t KH, int64_t IC, + int64_t batch, int64_t batch_offset, int64_t offset_delta, int s0,int s1,int p0,int p1,int d0,int d1, cudaStream_t stream) { const int parallel_elements = OW * KW * KH; const int num_blocks = (parallel_elements + CUDA_IM2COL_BLOCK_SIZE - 1) / CUDA_IM2COL_BLOCK_SIZE; @@ -9123,7 +9180,7 @@ static void ggml_cuda_op_group_norm( int num_groups = dst->op_params[0]; int group_size = src0->ne[0] * src0->ne[1] * ((src0->ne[2] + num_groups - 1) / num_groups); - group_norm_f32_cuda(src0_dd, dst_dd, num_groups, group_size, src0->ne[0] * src0->ne[1] * src0->ne[2], main_stream); + group_norm_f32_cuda(src0_dd, dst_dd, num_groups * src0->ne[3], group_size, ggml_nelements(src0), main_stream); (void) src1; (void) dst; @@ -9156,7 +9213,7 @@ static void ggml_cuda_op_upscale( const int scale_factor = dst->op_params[0]; - upscale_f32_cuda(src0_dd, dst_dd, src0->ne[0], src0->ne[1], src0->ne[2], scale_factor, main_stream); + upscale_f32_cuda(src0_dd, dst_dd, src0->ne[0], src0->ne[1], src0->ne[2], src0->ne[3], scale_factor, main_stream); (void) src1; (void) dst; @@ -9172,8 +9229,49 @@ static void ggml_cuda_op_pad( GGML_ASSERT(src0->ne[3] == 1 && dst->ne[3] == 1); // just 3D tensors pad_f32_cuda(src0_dd, dst_dd, - src0->ne[0], src0->ne[1], src0->ne[2], - dst->ne[0], dst->ne[1], dst->ne[2], main_stream); + src0->ne[0], src0->ne[1], src0->ne[2], src0->ne[3], + dst->ne[0], dst->ne[1], dst->ne[2], dst->ne[3], main_stream); + + (void) src1; + (void) dst; + (void) src1_dd; +} + +static void ggml_cuda_op_arange( + const ggml_tensor * src0, const ggml_tensor * src1, ggml_tensor * dst, + const float * src0_dd, const float * src1_dd, float * dst_dd, cudaStream_t main_stream) { + + GGML_ASSERT(dst->type == GGML_TYPE_F32); + + float start; + float stop; + float step; + memcpy(&start, (float *)dst->op_params + 0, sizeof(float)); + memcpy(&stop, (float *)dst->op_params + 1, sizeof(float)); + memcpy(&step, (float *)dst->op_params + 2, sizeof(float)); + + int64_t steps = (int64_t)ceil((stop - start) / step); + GGML_ASSERT(ggml_nelements(dst) == steps); + + arange_f32_cuda(dst_dd, dst->ne[0], start, step, main_stream); + + (void) src0; + (void) src1; + (void) src0_dd; + (void) src1_dd; +} + +static void ggml_cuda_op_timestep_embedding( + const ggml_tensor * src0, const ggml_tensor * src1, ggml_tensor * dst, + const float * src0_dd, const float * src1_dd, float * dst_dd, cudaStream_t main_stream) { + + GGML_ASSERT(src0->type == GGML_TYPE_F32); + GGML_ASSERT(dst->type == GGML_TYPE_F32); + + const int dim = dst->op_params[0]; + const int max_period = dst->op_params[1]; + + timestep_embedding_f32_cuda(src0_dd, dst_dd, src0->ne[0], dst->nb[1], dim, max_period, main_stream); (void) src1; (void) dst; @@ -10458,6 +10556,45 @@ static void ggml_cuda_pad(const ggml_tensor * src0, const ggml_tensor * src1, gg ggml_cuda_op_flatten(src0, src1, dst, ggml_cuda_op_pad); } +static void ggml_cuda_arange(const ggml_tensor * src0, const ggml_tensor * src1, ggml_tensor * dst) { + ggml_tensor_extra_gpu * dst_extra = (ggml_tensor_extra_gpu *) dst->extra; + + const bool dst_on_device = dst->backend == GGML_BACKEND_TYPE_GPU; + + // dd = data device + float * src0_ddf = nullptr; + float * src1_ddf = nullptr; + float * dst_ddf = nullptr; + + cuda_pool_alloc dst_f; + + ggml_cuda_set_device(g_main_device); + cudaStream_t main_stream = g_cudaStreams[g_main_device][0]; + + if (dst_on_device) { + dst_ddf = (float *) dst_extra->data_device[g_main_device]; + } else { + dst_ddf = dst_f.alloc(ggml_nelements(dst)); + } + + // do the computation + ggml_cuda_op_arange(src0, src1, dst, src0_ddf, src1_ddf, dst_ddf, main_stream); + CUDA_CHECK(cudaGetLastError()); + + // copy dst to host if necessary + if (!dst_on_device) { + CUDA_CHECK(cudaMemcpyAsync(dst->data, dst_ddf, ggml_nbytes(dst), cudaMemcpyDeviceToHost, main_stream)); + } + + if (dst->backend == GGML_BACKEND_TYPE_CPU) { + CUDA_CHECK(cudaDeviceSynchronize()); + } +} + +static void ggml_cuda_timestep_embedding(const ggml_tensor * src0, const ggml_tensor * src1, ggml_tensor * dst) { + ggml_cuda_op_flatten(src0, src1, dst, ggml_cuda_op_timestep_embedding); +} + static void ggml_cuda_rms_norm(const ggml_tensor * src0, const ggml_tensor * src1, ggml_tensor * dst) { ggml_cuda_op_flatten(src0, src1, dst, ggml_cuda_op_rms_norm); } @@ -11358,6 +11495,12 @@ GGML_CALL bool ggml_cuda_compute_forward(struct ggml_compute_params * params, st case GGML_OP_PAD: func = ggml_cuda_pad; break; + case GGML_OP_ARANGE: + func = ggml_cuda_arange; + break; + case GGML_OP_TIMESTEP_EMBEDDING: + func = ggml_cuda_timestep_embedding; + break; case GGML_OP_LEAKY_RELU: func = ggml_cuda_leaky_relu; break; @@ -12253,6 +12396,8 @@ GGML_CALL static bool ggml_backend_cuda_supports_op(ggml_backend_t backend, cons case GGML_OP_GROUP_NORM: case GGML_OP_UPSCALE: case GGML_OP_PAD: + case GGML_OP_ARANGE: + case GGML_OP_TIMESTEP_EMBEDDING: case GGML_OP_LEAKY_RELU: return true; default: diff --git a/ggml-metal.m b/ggml-metal.m index 71fcca560..6b5a8fdf5 100644 --- a/ggml-metal.m +++ b/ggml-metal.m @@ -163,6 +163,8 @@ enum ggml_metal_kernel_type { GGML_METAL_KERNEL_TYPE_IM2COL_F32, GGML_METAL_KERNEL_TYPE_UPSCALE_F32, GGML_METAL_KERNEL_TYPE_PAD_F32, + GGML_METAL_KERNEL_TYPE_ARANGE_F32, + GGML_METAL_KERNEL_TYPE_TIMESTEP_EMBEDDING_F32, GGML_METAL_KERNEL_TYPE_ARGSORT_F32_I32_ASC, GGML_METAL_KERNEL_TYPE_ARGSORT_F32_I32_DESC, GGML_METAL_KERNEL_TYPE_LEAKY_RELU_F32, @@ -569,6 +571,8 @@ static struct ggml_metal_context * ggml_metal_init(int n_cb) { GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_IM2COL_F32, im2col_f32, true); GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_UPSCALE_F32, upscale_f32, true); GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_PAD_F32, pad_f32, true); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_TIMESTEP_EMBEDDING_F32, timestep_embedding_f32, true); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_ARANGE_F32, arange_f32, true); GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_ARGSORT_F32_I32_ASC, argsort_f32_i32_asc, true); GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_ARGSORT_F32_I32_DESC, argsort_f32_i32_desc, true); GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_LEAKY_RELU_F32, leaky_relu_f32, true); @@ -697,6 +701,8 @@ static bool ggml_metal_supports_op(const struct ggml_metal_context * ctx, const return false; case GGML_OP_UPSCALE: case GGML_OP_PAD: + case GGML_OP_ARANGE: + case GGML_OP_TIMESTEP_EMBEDDING: case GGML_OP_ARGSORT: case GGML_OP_LEAKY_RELU: return true; @@ -1091,7 +1097,8 @@ static bool ggml_metal_graph_compute( { GGML_ASSERT(ggml_is_contiguous(src0)); - const float scale = *(const float *) dst->op_params; + float scale; + memcpy(&scale, dst->op_params, sizeof(scale)); int64_t n = ggml_nelements(dst); @@ -1250,11 +1257,15 @@ static bool ggml_metal_graph_compute( pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_SOFT_MAX].pipeline; } - const float scale = ((float *) dst->op_params)[0]; - const float max_bias = ((float *) dst->op_params)[1]; + float scale; + float max_bias; + + memcpy(&scale, ((int32_t *) dst->op_params) + 0, sizeof(scale)); + memcpy(&max_bias, ((int32_t *) dst->op_params) + 1, sizeof(max_bias)); const int64_t nrows_x = ggml_nrows(src0); const int64_t nrows_y = src0->ne[1]; + const uint32_t n_head_kv = nrows_x/nrows_y; const uint32_t n_head_log2 = 1u << (uint32_t) floorf(log2f((float) n_head_kv)); @@ -2086,6 +2097,7 @@ static bool ggml_metal_graph_compute( //const int n_past = ((int32_t *) dst->op_params)[0]; const int n_head = ((int32_t *) dst->op_params)[1]; + float max_bias; memcpy(&max_bias, (int32_t *) dst->op_params + 2, sizeof(float)); @@ -2300,6 +2312,50 @@ static bool ggml_metal_graph_compute( [encoder dispatchThreadgroups:MTLSizeMake(ne1, ne2, ne3) threadsPerThreadgroup:MTLSizeMake(nth, 1, 1)]; } break; + case GGML_OP_ARANGE: + { + GGML_ASSERT(dst->type == GGML_TYPE_F32); + + float start; + float step; + + memcpy(&start, ((int32_t *) dst->op_params) + 0, sizeof(float)); + memcpy(&step, ((int32_t *) dst->op_params) + 2, sizeof(float)); + + id pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_ARANGE_F32].pipeline; + + [encoder setComputePipelineState:pipeline]; + [encoder setBuffer:id_dst offset:offs_dst atIndex:0]; + [encoder setBytes:&ne0 length:sizeof(ne0) atIndex:1]; + [encoder setBytes:&start length:sizeof(start) atIndex:2]; + [encoder setBytes:&step length:sizeof(step) atIndex:3]; + + const int nth = MIN(1024, ne0); + + [encoder dispatchThreadgroups:MTLSizeMake(1, 1, 1) threadsPerThreadgroup:MTLSizeMake(nth, 1, 1)]; + } break; + case GGML_OP_TIMESTEP_EMBEDDING: + { + GGML_ASSERT(src0->type == GGML_TYPE_F32); + + const int dim = dst->op_params[0]; + const int max_period = dst->op_params[1]; + + const int half = dim / 2; + + id pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_TIMESTEP_EMBEDDING_F32].pipeline; + + [encoder setComputePipelineState:pipeline]; + [encoder setBuffer:id_src0 offset:offs_src0 atIndex:0]; + [encoder setBuffer:id_dst offset:offs_dst atIndex:1]; + [encoder setBytes:&nb1 length:sizeof(nb1) atIndex:2]; + [encoder setBytes:&dim length:sizeof(dim) atIndex:3]; + [encoder setBytes:&max_period length:sizeof(max_period) atIndex:4]; + + const int nth = MIN(1024, half); + + [encoder dispatchThreadgroups:MTLSizeMake(ne00, 1, 1) threadsPerThreadgroup:MTLSizeMake(nth, 1, 1)]; + } break; case GGML_OP_ARGSORT: { GGML_ASSERT(src0->type == GGML_TYPE_F32); diff --git a/ggml-metal.metal b/ggml-metal.metal index 8b9488437..a65d12641 100644 --- a/ggml-metal.metal +++ b/ggml-metal.metal @@ -1959,6 +1959,49 @@ kernel void kernel_pad_f32( } } +kernel void kernel_arange_f32( + device char * dst, + constant int64_t & ne0, + constant float & start, + constant float & step, + uint3 tgpig[[threadgroup_position_in_grid]], + uint3 tpitg[[thread_position_in_threadgroup]], + uint3 ntg[[threads_per_threadgroup]]) { + + device float * dst_ptr = (device float *) dst; + + for (int i0 = tpitg.x; i0 < ne0; i0 += ntg.x) { + dst_ptr[i0] = start + step * i0; + } +} + +kernel void kernel_timestep_embedding_f32( + device const char * src0, + device char * dst, + constant uint64_t & nb1, + constant int & dim, + constant int & max_period, + uint3 tgpig[[threadgroup_position_in_grid]], + uint3 tpitg[[thread_position_in_threadgroup]], + uint3 ntg[[threads_per_threadgroup]]) { + + int i = tgpig.x; + device float * embed_data = (device float *)(dst + i*nb1); + + int half_ = dim / 2; + for (int j = tpitg.x; j < half_; j += ntg.x) { + float timestep = ((device float *)src0)[i]; + float freq = (float)exp(-log((float)max_period) * j / half_); + float arg = timestep * freq; + embed_data[j ] = cos(arg); + embed_data[j + half_] = sin(arg); + } + + if (dim % 2 != 0 && tpitg.x == 0) { + embed_data[dim] = 0.f; + } +} + // bitonic sort implementation following the CUDA kernels as reference typedef void (argsort_t)( device const float * x, diff --git a/ggml.c b/ggml.c index f29b9f13f..870e41614 100644 --- a/ggml.c +++ b/ggml.c @@ -1822,6 +1822,8 @@ static const char * GGML_OP_NAME[GGML_OP_COUNT] = { "POOL_2D", "UPSCALE", "PAD", + "ARANGE", + "TIMESTEP_EMBEDDING", "ARGSORT", "LEAKY_RELU", @@ -1850,7 +1852,7 @@ static const char * GGML_OP_NAME[GGML_OP_COUNT] = { "CROSS_ENTROPY_LOSS_BACK", }; -static_assert(GGML_OP_COUNT == 72, "GGML_OP_COUNT != 72"); +static_assert(GGML_OP_COUNT == 74, "GGML_OP_COUNT != 74"); static const char * GGML_OP_SYMBOL[GGML_OP_COUNT] = { "none", @@ -1908,6 +1910,8 @@ static const char * GGML_OP_SYMBOL[GGML_OP_COUNT] = { "pool_2d(x)", "upscale(x)", "pad(x)", + "arange(start, stop, step)", + "timestep_embedding(timesteps, dim, max_period)", "argsort(x)", "leaky_relu(x)", @@ -1936,7 +1940,7 @@ static const char * GGML_OP_SYMBOL[GGML_OP_COUNT] = { "cross_entropy_loss_back(x,y)", }; -static_assert(GGML_OP_COUNT == 72, "GGML_OP_COUNT != 72"); +static_assert(GGML_OP_COUNT == 74, "GGML_OP_COUNT != 74"); static_assert(GGML_OP_POOL_COUNT == 2, "GGML_OP_POOL_COUNT != 2"); @@ -2895,11 +2899,21 @@ static int32_t ggml_get_op_params_i32(const struct ggml_tensor * tensor, uint32_ return ((const int32_t *)(tensor->op_params))[i]; } +static float ggml_get_op_params_f32(const struct ggml_tensor * tensor, uint32_t i) { + assert(i < GGML_MAX_OP_PARAMS / sizeof(float)); + return ((const float *)(tensor->op_params))[i]; +} + static void ggml_set_op_params_i32(struct ggml_tensor * tensor, uint32_t i, int32_t value) { assert(i < GGML_MAX_OP_PARAMS / sizeof(int32_t)); ((int32_t *)(tensor->op_params))[i] = value; } +static void ggml_set_op_params_f32(struct ggml_tensor * tensor, uint32_t i, float value) { + assert(i < GGML_MAX_OP_PARAMS / sizeof(float)); + ((float *)(tensor->op_params))[i] = value; +} + struct ggml_tensor * ggml_set_zero(struct ggml_tensor * tensor) { memset(tensor->data, 0, ggml_nbytes(tensor)); return tensor; @@ -5898,6 +5912,55 @@ struct ggml_tensor * ggml_upscale( return ggml_upscale_impl(ctx, a, scale_factor); } +struct ggml_tensor * ggml_arange( + struct ggml_context * ctx, + float start, + float stop, + float step) { + + GGML_ASSERT(stop > start); + + const int64_t steps = (int64_t) ceilf((stop - start) / step); + + struct ggml_tensor * result = ggml_new_tensor_1d(ctx, GGML_TYPE_F32, steps); + + result->op = GGML_OP_ARANGE; + ggml_set_op_params_f32(result, 0, start); + ggml_set_op_params_f32(result, 1, stop); + ggml_set_op_params_f32(result, 2, step); + + return result; +} + +struct ggml_tensor * ggml_timestep_embedding( + struct ggml_context * ctx, + struct ggml_tensor * timesteps, + int dim, + int max_period) { + bool is_node = false; + + if (timesteps->grad) { + GGML_ASSERT(false); // TODO: implement backward + is_node = true; + } + + int actual_dim = dim; + if (dim % 2 != 0) { + actual_dim = dim + 1; + } + + struct ggml_tensor * result = ggml_new_tensor_2d(ctx, GGML_TYPE_F32, actual_dim, timesteps->ne[0]); + + result->op = GGML_OP_TIMESTEP_EMBEDDING; + ggml_set_op_params_i32(result, 0, dim); + ggml_set_op_params_i32(result, 1, max_period); + + result->grad = is_node ? ggml_dup_tensor(ctx, result) : NULL; + result->src[0] = timesteps; + + return result; +} + // ggml_argsort struct ggml_tensor * ggml_argsort( @@ -10231,7 +10294,7 @@ static void ggml_compute_forward_group_norm_f32( int n_channels = src0->ne[2]; int n_groups = dst->op_params[0]; int n_channels_per_group = (n_channels + n_groups - 1) / n_groups; - for (int i = ith; i < n_groups; i+=nth) { + for (int i = ith; i < n_groups; i += nth) { int start = i * n_channels_per_group; int end = start + n_channels_per_group; if (end > n_channels) { @@ -10245,28 +10308,32 @@ static void ggml_compute_forward_group_norm_f32( for (int64_t i01 = 0; i01 < ne01; i01++) { const float * x = (float *)((char *) src0->data + i01 * nb01 + i02 * nb02 + i03 * nb03); + ggml_float sumr = 0.0; for (int64_t i00 = 0; i00 < ne00; i00++) { - sum += (ggml_float)x[i00]; + sumr += (ggml_float)x[i00]; } + sum += sumr; } } - float mean = sum / (ne00 * ne01 * step); - ggml_float sum2 = 0.0; + const float mean = sum / (ne00 * ne01 * step); + ggml_float sum2 = 0.0; for (int64_t i02 = start; i02 < end; i02++) { for (int64_t i01 = 0; i01 < ne01; i01++) { const float * x = (float *)((char *) src0->data + i01 * nb01 + i02 * nb02 + i03 * nb03); float * y = (float *)((char *) dst->data + i01 * nb1 + i02 * nb2 + i03 * nb3); + ggml_float sumr = 0.0; for (int64_t i00 = 0; i00 < ne00; i00++) { float v = x[i00] - mean; y[i00] = v; - sum2 += (ggml_float)(v * v); + sumr += (ggml_float)(v * v); } + sum2 += sumr; } } - float variance = sum2 / (ne00 * ne01 * step); + const float variance = sum2 / (ne00 * ne01 * step); const float scale = 1.0f / sqrtf(variance + eps); for (int64_t i02 = start; i02 < end; i02++) { @@ -13547,6 +13614,106 @@ static void ggml_compute_forward_pad( } } + +// ggml_compute_forward_arange + +static void ggml_compute_forward_arange_f32( + const struct ggml_compute_params * params, + struct ggml_tensor * dst) { + + if (params->type == GGML_TASK_TYPE_INIT || params->type == GGML_TASK_TYPE_FINALIZE) { + return; + } + + GGML_ASSERT(dst->nb[0] == sizeof(float)); + + const int ith = params->ith; + const int nth = params->nth; + + const float start = ggml_get_op_params_f32(dst, 0); + const float stop = ggml_get_op_params_f32(dst, 1); + const float step = ggml_get_op_params_f32(dst, 2); + + const int64_t steps = (int64_t) ceilf((stop - start) / step); + + GGML_ASSERT(ggml_nelements(dst) == steps); + + for (int64_t i = ith; i < steps; i+= nth) { + float value = start + step * i; + ((float *)dst->data)[i] = value; + } +} + +static void ggml_compute_forward_arange( + const struct ggml_compute_params * params, + struct ggml_tensor * dst) { + switch (dst->type) { + case GGML_TYPE_F32: + { + ggml_compute_forward_arange_f32(params, dst); + } break; + default: + { + GGML_ASSERT(false); + } break; + } +} + +static void ggml_compute_forward_timestep_embedding_f32( + const struct ggml_compute_params * params, + struct ggml_tensor * dst) { + + if (params->type == GGML_TASK_TYPE_INIT || params->type == GGML_TASK_TYPE_FINALIZE) { + return; + } + + const struct ggml_tensor * src0 = dst->src[0]; + + GGML_ASSERT(src0->nb[0] == sizeof(float)); + + const int ith = params->ith; + const int nth = params->nth; + + GGML_TENSOR_UNARY_OP_LOCALS + + const int dim = ggml_get_op_params_i32(dst, 0); + const int max_period = ggml_get_op_params_i32(dst, 1); + + int half = dim / 2; + + for (int64_t i = 0; i < ne00; i++) { + float * embed_data = (float *)((char *) dst->data + i*nb1); + for (int64_t j = ith; j < half; j += nth) { + float timestep = ((float *)src0->data)[i]; + float freq = (float)expf(-logf(max_period) * j / half); + float arg = timestep * freq; + embed_data[j] = cosf(arg); + embed_data[j + half] = sinf(arg); + } + if (dim % 2 != 0 && ith == 0) { + embed_data[dim] = 0.f; + } + } +} + +static void ggml_compute_forward_timestep_embedding( + const struct ggml_compute_params * params, + struct ggml_tensor * dst) { + + const struct ggml_tensor * src0 = dst->src[0]; + + switch (src0->type) { + case GGML_TYPE_F32: + { + ggml_compute_forward_timestep_embedding_f32(params, dst); + } break; + default: + { + GGML_ASSERT(false); + } break; + } +} + // ggml_compute_forward_argsort static void ggml_compute_forward_argsort_f32( @@ -15615,6 +15782,14 @@ static void ggml_compute_forward(struct ggml_compute_params * params, struct ggm { ggml_compute_forward_pad(params, tensor); } break; + case GGML_OP_ARANGE: + { + ggml_compute_forward_arange(params, tensor); + } break; + case GGML_OP_TIMESTEP_EMBEDDING: + { + ggml_compute_forward_timestep_embedding(params, tensor); + } break; case GGML_OP_ARGSORT: { ggml_compute_forward_argsort(params, tensor); @@ -16617,6 +16792,14 @@ static void ggml_compute_backward(struct ggml_context * ctx, struct ggml_tensor { GGML_ASSERT(false); // TODO: not implemented } break; + case GGML_OP_ARANGE: + { + GGML_ASSERT(false); // TODO: not implemented + } break; + case GGML_OP_TIMESTEP_EMBEDDING: + { + GGML_ASSERT(false); // TODO: not implemented + } break; case GGML_OP_ARGSORT: { GGML_ASSERT(false); // TODO: not implemented @@ -17368,6 +17551,14 @@ static int ggml_get_n_tasks(struct ggml_tensor * node, int n_threads) { { n_tasks = n_threads; } break; + case GGML_OP_ARANGE: + { + n_tasks = n_threads; + } break; + case GGML_OP_TIMESTEP_EMBEDDING: + { + n_tasks = n_threads; + } break; case GGML_OP_ARGSORT: { n_tasks = n_threads; diff --git a/ggml.h b/ggml.h index 0a6d3c051..98cfc7bf8 100644 --- a/ggml.h +++ b/ggml.h @@ -454,6 +454,8 @@ extern "C" { GGML_OP_POOL_2D, GGML_OP_UPSCALE, // nearest interpolate GGML_OP_PAD, + GGML_OP_ARANGE, + GGML_OP_TIMESTEP_EMBEDDING, GGML_OP_ARGSORT, GGML_OP_LEAKY_RELU, @@ -1661,6 +1663,15 @@ extern "C" { int p2, int p3); + // Ref: https://github.com/CompVis/stable-diffusion/blob/main/ldm/modules/diffusionmodules/util.py#L151 + // timesteps: [N,] + // return: [N, dim] + GGML_API struct ggml_tensor * ggml_timestep_embedding( + struct ggml_context * ctx, + struct ggml_tensor * timesteps, + int dim, + int max_period); + // sort rows enum ggml_sort_order { GGML_SORT_ORDER_ASC, @@ -1672,6 +1683,12 @@ extern "C" { struct ggml_tensor * a, enum ggml_sort_order order); + GGML_API struct ggml_tensor * ggml_arange( + struct ggml_context * ctx, + float start, + float stop, + float step); + // top k elements per row GGML_API struct ggml_tensor * ggml_top_k( struct ggml_context * ctx, diff --git a/tests/test-backend-ops.cpp b/tests/test-backend-ops.cpp index d4cea805f..8a6999f21 100644 --- a/tests/test-backend-ops.cpp +++ b/tests/test-backend-ops.cpp @@ -1412,6 +1412,50 @@ struct test_pad : public test_case { } }; +// GGML_OP_ARANGE +struct test_arange : public test_case { + const ggml_type type; + const float start; + const float stop; + const float step; + + std::string vars() override { + return VARS_TO_STR4(type, start, stop, step); + } + + test_arange(ggml_type type = GGML_TYPE_F32, + float start = 0.f, float stop = 10.f, float step = 1.f) + : type(type), start(start), stop(stop), step(step) {} + + ggml_tensor * build_graph(ggml_context * ctx) override { + ggml_tensor * out = ggml_arange(ctx, start, stop, step); + return out; + } +}; + +// GGML_OP_TIMESTEP_EMBEDDING +struct test_timestep_embedding : public test_case { + const ggml_type type; + const std::array ne_a; + const int dim; + const int max_period; + + std::string vars() override { + return VARS_TO_STR4(type, ne_a, dim, max_period); + } + + test_timestep_embedding(ggml_type type = GGML_TYPE_F32, + std::array ne_a = {2, 1, 1, 1}, + int dim = 320, int max_period=10000) + : type(type), ne_a(ne_a), dim(dim), max_period(max_period) {} + + ggml_tensor * build_graph(ggml_context * ctx) override { + ggml_tensor * a = ggml_new_tensor(ctx, type, 4, ne_a.data()); + ggml_tensor * out = ggml_timestep_embedding(ctx, a, dim, max_period); + return out; + } +}; + // GGML_OP_LEAKY_RELU struct test_leaky_relu : public test_case { const ggml_type type; @@ -2126,6 +2170,8 @@ static bool test_backend(ggml_backend_t backend, test_mode mode, const char * op test_cases.emplace_back(new test_group_norm()); test_cases.emplace_back(new test_acc()); test_cases.emplace_back(new test_pad()); + test_cases.emplace_back(new test_arange()); + test_cases.emplace_back(new test_timestep_embedding()); test_cases.emplace_back(new test_leaky_relu()); // these tests are disabled to save execution time, but they can be handy for debugging From a0fc62661f0fd2a9edd10ae5617345bbbf972f42 Mon Sep 17 00:00:00 2001 From: Georgi Gerganov Date: Mon, 4 Mar 2024 10:40:04 +0200 Subject: [PATCH 778/811] sync : ggml --- scripts/sync-ggml.last | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/scripts/sync-ggml.last b/scripts/sync-ggml.last index 389c0bdfe..4a1b0bab4 100644 --- a/scripts/sync-ggml.last +++ b/scripts/sync-ggml.last @@ -1 +1 @@ -b458250b736a7473f7ff3560d47c93f1644f3290 +274680868e12427373bab4bec87554431b954704 From 4ffcdce2ff877ebb683cd217ea38faf20faa5ffe Mon Sep 17 00:00:00 2001 From: Xuan Son Nguyen Date: Mon, 4 Mar 2024 12:22:08 +0100 Subject: [PATCH 779/811] add alias for chat template (#5858) --- examples/server/server.cpp | 2 +- llama.cpp | 12 ++++++------ 2 files changed, 7 insertions(+), 7 deletions(-) diff --git a/examples/server/server.cpp b/examples/server/server.cpp index 0ca388f47..208edd571 100644 --- a/examples/server/server.cpp +++ b/examples/server/server.cpp @@ -413,7 +413,7 @@ struct llama_server_context int res = llama_chat_apply_template(model, nullptr, chat, 1, true, buf.data(), buf.size()); if (res < 0) { LOG_ERROR("The chat template comes with this model is not yet supported, falling back to chatml. This may cause the model to output suboptimal responses", {}); - sparams.chat_template = "<|im_start|>"; // llama_chat_apply_template only checks if <|im_start|> exist in the template + sparams.chat_template = "chatml"; } } diff --git a/llama.cpp b/llama.cpp index c1f015791..de579d9e3 100644 --- a/llama.cpp +++ b/llama.cpp @@ -13282,7 +13282,7 @@ static int32_t llama_chat_apply_template_internal( std::string & dest, bool add_ass) { // Taken from the research: https://github.com/ggerganov/llama.cpp/issues/5527 std::stringstream ss; - if (tmpl.find("<|im_start|>") != std::string::npos) { + if (tmpl == "chatml" || tmpl.find("<|im_start|>") != std::string::npos) { // chatml template for (auto message : chat) { ss << "<|im_start|>" << message->role << "\n" << message->content << "<|im_end|>\n"; @@ -13290,7 +13290,7 @@ static int32_t llama_chat_apply_template_internal( if (add_ass) { ss << "<|im_start|>assistant\n"; } - } else if (tmpl.find("[INST]") != std::string::npos) { + } else if (tmpl == "llama2" || tmpl.find("[INST]") != std::string::npos) { // llama2 template and its variants // [variant] support system message bool support_system_message = tmpl.find("<>") != std::string::npos; @@ -13325,7 +13325,7 @@ static int32_t llama_chat_apply_template_internal( } } // llama2 templates seem to not care about "add_generation_prompt" - } else if (tmpl.find("<|user|>") != std::string::npos) { + } else if (tmpl == "zephyr" || tmpl.find("<|user|>") != std::string::npos) { // zephyr template for (auto message : chat) { ss << "<|" << message->role << "|>" << "\n" << message->content << "<|endoftext|>\n"; @@ -13333,7 +13333,7 @@ static int32_t llama_chat_apply_template_internal( if (add_ass) { ss << "<|assistant|>\n"; } - } else if (tmpl.find("bos_token + message['role']") != std::string::npos) { + } else if (tmpl == "monarch" || tmpl.find("bos_token + message['role']") != std::string::npos) { // mlabonne/AlphaMonarch-7B template (the is included inside history) for (auto message : chat) { std::string bos = (message == chat.front()) ? "" : ""; // skip BOS for first message @@ -13342,7 +13342,7 @@ static int32_t llama_chat_apply_template_internal( if (add_ass) { ss << "assistant\n"; } - } else if (tmpl.find("") != std::string::npos) { + } else if (tmpl == "gemma" || tmpl.find("") != std::string::npos) { // google/gemma-7b-it std::string system_prompt = ""; for (auto message : chat) { @@ -13389,7 +13389,7 @@ LLAMA_API int32_t llama_chat_apply_template( int32_t res = llama_model_meta_val_str(model, template_key.c_str(), model_template.data(), model_template.size()); if (res < 0) { // worst case: there is no information about template, we will use chatml by default - curr_tmpl = "<|im_start|>"; // see llama_chat_apply_template_internal + curr_tmpl = "chatml"; // see llama_chat_apply_template_internal } else { curr_tmpl = std::string(model_template.data(), model_template.size()); } From 6d341ab6c53cd51f2921d986d0090cc8b049b39a Mon Sep 17 00:00:00 2001 From: Minsoo Cheong <54794500+mscheong01@users.noreply.github.com> Date: Tue, 5 Mar 2024 03:24:00 +0900 Subject: [PATCH 780/811] speculative : implement stochastic speculative sampling (#5625) * (WIP) Implement stochastic speculative decoding * sample from residual distribution on draft accept failure * fix #5657: force greedy sampling with probs when temp is 0 * remove p_accept parameter * fix style * remove unused variables * add srand() in speculative.cpp * replace use of rand() with mt19937 sampling * fixes based on review (@JohannesGaessler) * fix r random generation * randomly select next sequence to verify + fix bug in memory freeing * fix bug in active_seqs sync * fix uniform int distribution initialization * remove warnings from comparison between int and size_t * check grammar in `llama_sample_probability_distribution_impl` * remove malloc code by utilizing vectors * add PR link to README --- common/common.cpp | 7 - common/common.h | 3 +- common/sampling.cpp | 79 ++++++++++ common/sampling.h | 7 + examples/speculative/README.md | 1 + examples/speculative/speculative.cpp | 224 ++++++++++++++++++++------- 6 files changed, 260 insertions(+), 61 deletions(-) diff --git a/common/common.cpp b/common/common.cpp index dbe7e9229..036a98134 100644 --- a/common/common.cpp +++ b/common/common.cpp @@ -513,12 +513,6 @@ bool gpt_params_parse_ex(int argc, char ** argv, gpt_params & params) { break; } params.n_sequences = std::stoi(argv[i]); - } else if (arg == "--p-accept" || arg == "-pa") { - if (++i >= argc) { - invalid_param = true; - break; - } - params.p_accept = std::stof(argv[i]); } else if (arg == "--p-split" || arg == "-ps") { if (++i >= argc) { invalid_param = true; @@ -1044,7 +1038,6 @@ void gpt_print_usage(int /*argc*/, char ** argv, const gpt_params & params) { printf(" --chunks N max number of chunks to process (default: %d, -1 = all)\n", params.n_chunks); printf(" -np N, --parallel N number of parallel sequences to decode (default: %d)\n", params.n_parallel); printf(" -ns N, --sequences N number of sequences to decode (default: %d)\n", params.n_sequences); - printf(" -pa N, --p-accept N speculative decoding accept probability (default: %.1f)\n", (double)params.p_accept); printf(" -ps N, --p-split N speculative decoding split probability (default: %.1f)\n", (double)params.p_split); printf(" -cb, --cont-batching enable continuous batching (a.k.a dynamic batching) (default: disabled)\n"); printf(" --mmproj MMPROJ_FILE path to a multimodal projector file for LLaVA. see examples/llava/README.md\n"); diff --git a/common/common.h b/common/common.h index b2868833b..977ce419f 100644 --- a/common/common.h +++ b/common/common.h @@ -53,11 +53,10 @@ struct gpt_params { int32_t n_ctx = 512; // context size int32_t n_batch = 512; // batch size for prompt processing (must be >=32 to use BLAS) int32_t n_keep = 0; // number of tokens to keep from initial prompt - int32_t n_draft = 8; // number of tokens to draft during speculative decoding + int32_t n_draft = 5; // number of tokens to draft during speculative decoding int32_t n_chunks = -1; // max number of chunks to process (-1 = unlimited) int32_t n_parallel = 1; // number of parallel sequences to decode int32_t n_sequences = 1; // number of sequences to decode - float p_accept = 0.5f; // speculative decoding accept probability float p_split = 0.1f; // speculative decoding split probability int32_t n_gpu_layers = -1; // number of layers to store in VRAM (-1 - use default) int32_t n_gpu_layers_draft = -1; // number of layers to store in VRAM for the draft model (-1 - use default) diff --git a/common/sampling.cpp b/common/sampling.cpp index e67096bea..823031feb 100644 --- a/common/sampling.cpp +++ b/common/sampling.cpp @@ -295,6 +295,77 @@ static llama_token llama_sampling_sample_impl( return id; } +static llama_token_data_array llama_sample_probability_distribution_impl( + struct llama_sampling_context * ctx_sampling, + struct llama_context * ctx_main, + struct llama_context * ctx_cfg, + const int idx) { + const llama_sampling_params & params = ctx_sampling->params; + + const int n_vocab = llama_n_vocab(llama_get_model(ctx_main)); + + const int32_t penalty_last_n = params.penalty_last_n < 0 ? params.n_prev : params.penalty_last_n; + const float penalty_repeat = params.penalty_repeat; + const float penalty_freq = params.penalty_freq; + const float penalty_present = params.penalty_present; + const bool penalize_nl = params.penalize_nl; + + auto & prev = ctx_sampling->prev; + auto & cur = ctx_sampling->cur; + + // Get a pointer to the logits + float * logits = llama_get_logits_ith(ctx_main, idx); + + // Declare original_logits at the beginning of the function scope + std::vector original_logits; + + // apply params.logit_bias map + for (auto it = params.logit_bias.begin(); it != params.logit_bias.end(); it++) { + logits[it->first] += it->second; + } + + if (ctx_cfg) { + float * logits_guidance = llama_get_logits_ith(ctx_cfg, idx); + llama_sample_apply_guidance(ctx_main, logits, logits_guidance, params.cfg_scale); + } + + cur.clear(); + + for (llama_token token_id = 0; token_id < n_vocab; token_id++) { + cur.emplace_back(llama_token_data{token_id, logits[token_id], 0.0f}); + } + + llama_token_data_array cur_p = { cur.data(), cur.size(), false }; + + // apply penalties + const auto& penalty_tokens = params.use_penalty_prompt_tokens ? params.penalty_prompt_tokens : prev; + const int penalty_tokens_used_size = std::min((int)penalty_tokens.size(), penalty_last_n); + if (penalty_tokens_used_size) { + const float nl_logit = logits[llama_token_nl(llama_get_model(ctx_main))]; + + llama_sample_repetition_penalties(ctx_main, &cur_p, + penalty_tokens.data() + penalty_tokens.size() - penalty_tokens_used_size, + penalty_tokens_used_size, penalty_repeat, penalty_freq, penalty_present); + + if (!penalize_nl) { + for (size_t idx = 0; idx < cur_p.size; idx++) { + if (cur_p.data[idx].id == llama_token_nl(llama_get_model(ctx_main))) { + cur_p.data[idx].logit = nl_logit; + break; + } + } + } + } + + // apply grammar checks + if (ctx_sampling->grammar != NULL) { + llama_sample_grammar(ctx_main, &cur_p, ctx_sampling->grammar); + } + + llama_sample_softmax(ctx_main, &cur_p); + return cur_p; +} + llama_token llama_sampling_sample( struct llama_sampling_context * ctx_sampling, struct llama_context * ctx_main, @@ -304,6 +375,14 @@ llama_token llama_sampling_sample( return llama_sampling_sample_impl(ctx_sampling, ctx_main, ctx_cfg, idx, false); } +llama_token_data_array llama_sampling_probability_distribution( + struct llama_sampling_context * ctx_sampling, + struct llama_context * ctx_main, + struct llama_context * ctx_cfg, + const int idx) { + return llama_sample_probability_distribution_impl(ctx_sampling,ctx_main, ctx_cfg, idx); +} + void llama_sampling_accept( struct llama_sampling_context * ctx_sampling, struct llama_context * ctx_main, diff --git a/common/sampling.h b/common/sampling.h index 95d875394..48b2459d1 100644 --- a/common/sampling.h +++ b/common/sampling.h @@ -131,6 +131,13 @@ llama_token llama_sampling_sample( struct llama_context * ctx_cfg, int idx = 0); +// returns the probability that token of given id will be sampled +llama_token_data_array llama_sampling_probability_distribution( + struct llama_sampling_context * ctx_sampling, + struct llama_context * ctx_main, + struct llama_context * ctx_cfg, + int idx = 0); + void llama_sampling_accept( struct llama_sampling_context * ctx_sampling, struct llama_context * ctx_main, diff --git a/examples/speculative/README.md b/examples/speculative/README.md index 814efa592..a6608c5fe 100644 --- a/examples/speculative/README.md +++ b/examples/speculative/README.md @@ -6,3 +6,4 @@ More info: - https://github.com/ggerganov/llama.cpp/pull/2926 - https://github.com/ggerganov/llama.cpp/pull/3624 +- https://github.com/ggerganov/llama.cpp/pull/5625 diff --git a/examples/speculative/speculative.cpp b/examples/speculative/speculative.cpp index 3848791d4..85bc0a762 100644 --- a/examples/speculative/speculative.cpp +++ b/examples/speculative/speculative.cpp @@ -5,6 +5,7 @@ #include #include #include +#include #define SPEC_VOCAB_MAX_SIZE_DIFFERENCE 100 #define SPEC_VOCAB_CHECK_START_TOKEN_ID 5 @@ -18,6 +19,7 @@ struct seq_draft { std::vector i_batch_tgt; std::vector tokens; + std::vector> dists; struct llama_sampling_context * ctx_sampling; }; @@ -37,12 +39,15 @@ int main(int argc, char ** argv) { // max number of parallel drafting sequences (i.e. tree branches) const int n_seq_dft = params.n_parallel; - // probability threshold for accepting a token from the draft model - const float p_accept = params.p_accept; - // probability threshold for splitting a draft branch (only for n_seq_dft > 1) const float p_split = params.p_split; + if (params.seed == LLAMA_DEFAULT_SEED) { + params.seed = time(NULL); + } + std::default_random_engine rng(params.seed); + std::uniform_real_distribution<> u_dist; + #ifndef LOG_DISABLE_LOGS log_set_target(log_filename_generator("speculative", "log")); LOG_TEE("Log start\n"); @@ -166,7 +171,9 @@ int main(int argc, char ** argv) { std::vector drafts(n_seq_dft); params.sparams.grammar.clear(); // the draft samplers will copy the target sampler's grammar - params.sparams.temp = -1.0f; // force greedy sampling with probs for the draft model + if (params.sparams.temp == 0) { + params.sparams.temp = -1.0f; // force greedy sampling with probs for the draft model + } for (int s = 0; s < n_seq_dft; ++s) { drafts[s].ctx_sampling = llama_sampling_init(params.sparams); @@ -182,12 +189,15 @@ int main(int argc, char ** argv) { drafts[0].i_batch_tgt[0] = 0; while (true) { + std::set active_seqs = {}; + // print current draft sequences for (int s = 0; s < n_seq_dft; ++s) { if (!drafts[s].active) { continue; } + active_seqs.insert(s); const auto & tokens = drafts[s].tokens; LOG("draft %d: %s\n", s, LOG_TOKENS_TOSTR_PRETTY(ctx_dft, tokens).c_str()); @@ -196,48 +206,156 @@ int main(int argc, char ** argv) { int i_dft = 0; int s_keep = 0; + llama_token token_id; + std::string token_str; + + // loop until we fail to accept a drafted token or we run out of drafted tokens while (true) { - LOG("sampling target: s_keep = %3d, i_dft = %3d, i_batch_tgt = %3d\n", s_keep, i_dft, drafts[s_keep].i_batch_tgt[i_dft]); - - // sample from the target model - llama_token id = llama_sampling_sample(ctx_sampling, ctx_tgt, NULL, drafts[s_keep].i_batch_tgt[i_dft]); - - llama_sampling_accept(ctx_sampling, ctx_tgt, id, true); - - //LOG("last: %s\n", LOG_TOKENS_TOSTR_PRETTY(ctx_tgt, ctx_sampling->prev).c_str()); - - const std::string token_str = llama_token_to_piece(ctx_tgt, id); - - if (!params.use_color) { - printf("%s", token_str.c_str()); - } - - if (id == llama_token_eos(model_tgt)) { - has_eos = true; - } - - ++n_predict; // check if the target token matches any of the drafts + // for stochastic sampling, attempt to match the token with the drafted tokens { - bool matches = false; + bool accept = false; + if (params.sparams.temp > 0) { + // stochastic verification - for (int s = 0; s < n_seq_dft; ++s) { - if (!drafts[s].active) { - continue; + llama_token_data_array dist_tgt = llama_sampling_probability_distribution(ctx_sampling, ctx_tgt, NULL, drafts[s_keep].i_batch_tgt[i_dft]); + float p_tgt = 0, p_dft = 0; + + // GGML_ASSERT(dist_tgt.size() == dist_dft.size()); + + while (active_seqs.size() > 0) { + // randomly select a sequence to verify from active sequences + std::uniform_int_distribution u_int_dist(0, active_seqs.size() - 1); + int s = *std::next(active_seqs.begin(), u_int_dist(rng)); + if (i_dft >= (int) drafts[s].tokens.size()) { + drafts[s].active = false; + active_seqs.erase(s); + continue; + } + if (accept) { + // if we already accepted a token, we can skip the rest + if (drafts[s].tokens[i_dft] != drafts[s_keep].tokens[i_dft]) { + drafts[s].active = false; + active_seqs.erase(s); + } + continue; + } + LOG("verifying sequence #%d at pos #%d from %d active sequence(s)\n", s, i_dft, (int) active_seqs.size()); + float r = u_dist(rng); + llama_token_data_array dist_dft = { drafts[s].dists[i_dft].data() , drafts[s].dists[i_dft].size(), true }; + // acquire the token probabilities assigned by the draft and target models + for (size_t i = 0; i < dist_tgt.size; i++) { + if (dist_tgt.data[i].id == drafts[s].tokens[i_dft]) { + p_tgt = dist_tgt.data[i].p; + } + if (dist_dft.data[i].id == drafts[s].tokens[i_dft]) { + p_dft = dist_dft.data[i].p; + } + if (p_tgt && p_dft) { + break; + } + } + LOG("r = %f, p_dft = %f, p_tgt = %f\n", r, p_dft, p_tgt); + if (r <= p_tgt / p_dft) { + s_keep = s; + accept = true; + token_id = drafts[s].tokens[i_dft]; + token_str = llama_token_to_piece(ctx_tgt, token_id); + llama_sampling_accept(ctx_sampling, ctx_tgt, token_id, true); + + LOG("draft token %d of sequence %d (%d, '%s') accepted\n", i_dft, s, token_id, token_str.c_str()); + break; + } else { + LOG("draft token %d of sequence %d (%d, '%s') rejected\n", i_dft, s, drafts[s].tokens[i_dft], llama_token_to_piece(ctx_tgt, drafts[s].tokens[i_dft]).c_str()); + drafts[s].active = false; + + // calculate residual probability + GGML_ASSERT(dist_tgt.sorted); + GGML_ASSERT(dist_dft.sorted); + float sum_probs = 0.0f; + + // sort dist by id + std::sort(dist_tgt.data, dist_tgt.data + dist_tgt.size, [](const llama_token_data &a, const llama_token_data &b) { + return a.id < b.id; + }); + std::sort(dist_dft.data, dist_dft.data + dist_dft.size, [](const llama_token_data &a, const llama_token_data &b) { + return a.id < b.id; + }); + + for (size_t i = 0; i < dist_tgt.size; i++) { + dist_tgt.data[i].p = std::max(0.0f, dist_tgt.data[i].p - dist_dft.data[i].p); + sum_probs += dist_tgt.data[i].p; + } + for (size_t i = 0; i < dist_tgt.size; i++) { + dist_tgt.data[i].p /= sum_probs; + } + + // sort dist_tgt by p desc + std::sort(dist_tgt.data, dist_tgt.data + dist_tgt.size, [](const llama_token_data &a, const llama_token_data &b) { + return a.p > b.p; + }); + } + + active_seqs.erase(s); + for(int i = 0; i < n_seq_dft; i++) { + if (i == s) { + continue; + } + if (drafts[i].tokens[i_dft] == drafts[s].tokens[i_dft]) { + // synchronize active status for sequences with the same drafted token + drafts[i].active = drafts[i].active && accept; + if (!drafts[i].active) { + active_seqs.erase(s); + } + } + } } - if (i_dft < (int) drafts[s].tokens.size() && id == drafts[s].tokens[i_dft]) { - LOG("the sampled target token matches the %dth drafted token of sequence %d (%d, '%s') - accepted\n", i_dft, s, id, token_str.c_str()); + if (!accept) { + // all drafted tokens were rejected + // sample from the target model + LOG("all drafted tokens were rejected, sampling from residual distribution\n"); + token_id = llama_sample_token(ctx_tgt, &dist_tgt); + llama_sampling_accept(ctx_sampling, ctx_tgt, token_id, true); + token_str = llama_token_to_piece(ctx_tgt, token_id); + } - s_keep = s; - matches = true; - } else { - drafts[s].active = false; + } else { + // greedy verification + + // sample from the target model + LOG("sampling target: s_keep = %3d, i_dft = %3d, i_batch_tgt = %3d\n", s_keep, i_dft, drafts[s_keep].i_batch_tgt[i_dft]); + token_id = llama_sampling_sample(ctx_sampling, ctx_tgt, NULL, drafts[s_keep].i_batch_tgt[i_dft]); + + llama_sampling_accept(ctx_sampling, ctx_tgt, token_id, true); + + //LOG("last: %s\n", LOG_TOKENS_TOSTR_PRETTY(ctx_tgt, ctx_sampling->prev).c_str()); + + token_str = llama_token_to_piece(ctx_tgt, token_id); + + for (int s = 0; s < n_seq_dft; ++s) { + if (!drafts[s].active) { + continue; + } + + if (i_dft < (int) drafts[s].tokens.size() && token_id == drafts[s].tokens[i_dft]) { + LOG("the sampled target token matches the %dth drafted token of sequence %d (%d, '%s') - accepted\n", i_dft, s, token_id, token_str.c_str()); + + s_keep = s; + accept = true; + } else { + drafts[s].active = false; + } } } - if (matches) { + if (token_id == llama_token_eos(model_tgt)) { + has_eos = true; + } + ++n_predict; + + if (accept) { ++n_accept; ++n_past_tgt; ++n_past_dft; @@ -245,17 +363,21 @@ int main(int argc, char ** argv) { if (params.use_color) { // Color token according to its origin sequence printf("\u001b[%dm%s\u001b[37m", (36 - s_keep % 6), token_str.c_str()); - fflush(stdout); + } else { + printf("%s", token_str.c_str()); } + fflush(stdout); continue; + } else { + printf("%s", token_str.c_str()); + fflush(stdout); + break; } } - if (params.use_color) { - printf("%s", token_str.c_str()); - } - fflush(stdout); + } - LOG("the sampled target token (%d, '%s') did not match, or we ran out of drafted tokens\n", id, token_str.c_str()); + { + LOG("the sampled target token (%d, '%s') did not match, or we ran out of drafted tokens\n", token_id, token_str.c_str()); // TODO: simplify { @@ -275,21 +397,21 @@ int main(int argc, char ** argv) { drafts[s].active = false; drafts[s].tokens.clear(); drafts[s].i_batch_tgt.clear(); + drafts[s].dists.clear(); } // note: will be erased after the speculation phase - drafts[0].tokens.push_back(id); + drafts[0].tokens.push_back(token_id); + drafts[0].dists.push_back(std::vector()); drafts[0].i_batch_tgt.push_back(0); llama_batch_clear(batch_dft); - llama_batch_add (batch_dft, id, n_past_dft, { 0 }, true); + llama_batch_add (batch_dft, token_id, n_past_dft, { 0 }, true); llama_kv_cache_seq_rm(ctx_dft, 0, n_past_dft, -1); // LOG("dft batch: %s\n", LOG_BATCH_TOSTR_PRETTY(ctx_dft, batch_dft).c_str()); - llama_decode (ctx_dft, batch_dft); + llama_decode(ctx_dft, batch_dft); ++n_past_dft; - - break; } if (n_predict > params.n_predict || has_eos) { @@ -334,12 +456,6 @@ int main(int argc, char ** argv) { k, s, i, cur_p[k].id, cur_p[k].p, llama_token_to_piece(ctx_dft, cur_p[k].id).c_str()); } - if (cur_p[0].p < p_accept) { - LOG("stopping drafting for seq %3d, probability too low: %.3f < %.3f\n", s, cur_p[0].p, p_accept); - drafts[s].drafting = false; - continue; - } - std::vector sa(1, s); // attempt to split the branch if the probability is high enough @@ -367,6 +483,7 @@ int main(int argc, char ** argv) { drafts[n_seq_cur].skip = true; drafts[n_seq_cur].tokens = drafts[s].tokens; + drafts[n_seq_cur].dists = drafts[s].dists; drafts[n_seq_cur].i_batch_dft = drafts[s].i_batch_dft; drafts[n_seq_cur].i_batch_tgt = drafts[s].i_batch_tgt; @@ -389,6 +506,8 @@ int main(int argc, char ** argv) { llama_sampling_accept(drafts[s].ctx_sampling, ctx_dft, id, true); drafts[s].tokens.push_back(id); + // save cur_p.data into drafts[s].dists + drafts[s].dists.push_back(cur_p); // add unique drafted tokens to the target batch drafts[s].i_batch_tgt.push_back(batch_tgt.n_tokens); @@ -440,6 +559,7 @@ int main(int argc, char ** argv) { } drafts[s].tokens.erase(drafts[s].tokens.begin()); + drafts[s].dists.erase(drafts[s].dists.begin()); } } From fe52be11e35358d2fd249f19d7ef5b6f9c08b16b Mon Sep 17 00:00:00 2001 From: Dane Madsen Date: Tue, 5 Mar 2024 05:26:55 +1100 Subject: [PATCH 781/811] cmake : handle cases where git index is not found in .git (#5844) * Update CMakeLists.txt * Update CMakeLists.txt --- common/CMakeLists.txt | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/common/CMakeLists.txt b/common/CMakeLists.txt index f79acfef1..350bbdf7f 100644 --- a/common/CMakeLists.txt +++ b/common/CMakeLists.txt @@ -19,7 +19,12 @@ if(EXISTS "${CMAKE_CURRENT_SOURCE_DIR}/../.git") endif() endif() - set(GIT_INDEX "${GIT_DIR}/index") + if(EXISTS "${GIT_DIR}/index") + set(GIT_INDEX "${GIT_DIR}/index") + else() + message(WARNING "Git index not found in git repository.") + set(GIT_INDEX "") + endif() else() message(WARNING "Git repository not found; to enable automatic generation of build info, make sure Git is installed and the project is a Git repository.") set(GIT_INDEX "") From 9fa262734733573fa629ffc97dfcb971fe3f4832 Mon Sep 17 00:00:00 2001 From: Michael Podvitskiy Date: Mon, 4 Mar 2024 10:05:42 +0100 Subject: [PATCH 782/811] ggml : introduce ggml_status (ggml/750) * using enum as an exit code instead of macros * update return type from enum to unsigned int * indentation fix * compound update ggml_compute_exit_code -> ggml_status changed ggml_status from a bit-field type to simple codes ggml_status to string cast * ggml_status to string cast * GGML_CALL was removed Co-authored-by: slaren --------- Co-authored-by: slaren Co-authored-by: Georgi Gerganov --- ggml-backend-impl.h | 7 ++++--- ggml-backend.c | 39 ++++++++++++++++++--------------------- ggml-backend.h | 31 ++++++++++++++++--------------- ggml-cuda.cu | 4 ++-- ggml-kompute.cpp | 4 ++-- ggml-metal.m | 8 ++++---- ggml-opencl.cpp | 4 ++-- ggml-sycl.cpp | 4 ++-- ggml-vulkan.cpp | 4 ++-- ggml.c | 29 +++++++++++++++++++++++------ ggml.h | 17 +++++++++++++---- 11 files changed, 88 insertions(+), 63 deletions(-) diff --git a/ggml-backend-impl.h b/ggml-backend-impl.h index 0e5bf0ae1..2e9ba58a9 100644 --- a/ggml-backend-impl.h +++ b/ggml-backend-impl.h @@ -91,13 +91,14 @@ extern "C" { // (optional) complete all pending operations void (*GGML_CALL synchronize)(ggml_backend_t backend); - // compute graph with a plan + // create a plan for ggml_cgraph and free it ggml_backend_graph_plan_t (*GGML_CALL graph_plan_create) (ggml_backend_t backend, const struct ggml_cgraph * cgraph); void (*GGML_CALL graph_plan_free) (ggml_backend_t backend, ggml_backend_graph_plan_t plan); - void (*GGML_CALL graph_plan_compute)(ggml_backend_t backend, ggml_backend_graph_plan_t plan); + // compute graph with a plan + enum ggml_status (*GGML_CALL graph_plan_compute)(ggml_backend_t backend, ggml_backend_graph_plan_t plan); // compute graph without a plan (async) - bool (*GGML_CALL graph_compute)(ggml_backend_t backend, struct ggml_cgraph * cgraph); + enum ggml_status (*GGML_CALL graph_compute) (ggml_backend_t backend, struct ggml_cgraph * cgraph); // check if the backend supports an operation bool (*GGML_CALL supports_op)(ggml_backend_t backend, const struct ggml_tensor * op); diff --git a/ggml-backend.c b/ggml-backend.c index c86673b04..d60d98414 100644 --- a/ggml-backend.c +++ b/ggml-backend.c @@ -262,11 +262,11 @@ void ggml_backend_graph_plan_free(ggml_backend_t backend, ggml_backend_graph_pla backend->iface.graph_plan_free(backend, plan); } -void ggml_backend_graph_plan_compute(ggml_backend_t backend, ggml_backend_graph_plan_t plan) { - backend->iface.graph_plan_compute(backend, plan); +enum ggml_status ggml_backend_graph_plan_compute(ggml_backend_t backend, ggml_backend_graph_plan_t plan) { + return backend->iface.graph_plan_compute(backend, plan); } -bool ggml_backend_graph_compute(ggml_backend_t backend, struct ggml_cgraph * cgraph) { +enum ggml_status ggml_backend_graph_compute(ggml_backend_t backend, struct ggml_cgraph * cgraph) { return backend->iface.graph_compute(backend, cgraph); } @@ -732,15 +732,15 @@ GGML_CALL static void ggml_backend_cpu_graph_plan_free(ggml_backend_t backend, g GGML_UNUSED(backend); } -GGML_CALL static void ggml_backend_cpu_graph_plan_compute(ggml_backend_t backend, ggml_backend_graph_plan_t plan) { +GGML_CALL static enum ggml_status ggml_backend_cpu_graph_plan_compute(ggml_backend_t backend, ggml_backend_graph_plan_t plan) { struct ggml_backend_plan_cpu * cpu_plan = (struct ggml_backend_plan_cpu *)plan; - ggml_graph_compute(&cpu_plan->cgraph, &cpu_plan->cplan); + return ggml_graph_compute(&cpu_plan->cgraph, &cpu_plan->cplan); GGML_UNUSED(backend); } -GGML_CALL static bool ggml_backend_cpu_graph_compute(ggml_backend_t backend, struct ggml_cgraph * cgraph) { +GGML_CALL static enum ggml_status ggml_backend_cpu_graph_compute(ggml_backend_t backend, struct ggml_cgraph * cgraph) { struct ggml_backend_cpu_context * cpu_ctx = (struct ggml_backend_cpu_context *)backend->context; struct ggml_cplan cplan = ggml_graph_plan(cgraph, cpu_ctx->n_threads); @@ -755,8 +755,7 @@ GGML_CALL static bool ggml_backend_cpu_graph_compute(ggml_backend_t backend, str cplan.abort_callback = cpu_ctx->abort_callback; cplan.abort_callback_data = cpu_ctx->abort_callback_data; - ggml_graph_compute(cgraph, &cplan); - return true; + return ggml_graph_compute(cgraph, &cplan); } GGML_CALL static bool ggml_backend_cpu_supports_op(ggml_backend_t backend, const struct ggml_tensor * op) { @@ -1437,7 +1436,7 @@ static bool ggml_backend_sched_alloc_splits(ggml_backend_sched_t sched) { return true; } -static bool ggml_backend_sched_compute_splits(ggml_backend_sched_t sched) { +static enum ggml_status ggml_backend_sched_compute_splits(ggml_backend_sched_t sched) { uint64_t copy_us[GGML_MAX_BACKENDS] = {0}; uint64_t compute_us[GGML_MAX_BACKENDS] = {0}; @@ -1472,8 +1471,9 @@ static bool ggml_backend_sched_compute_splits(ggml_backend_sched_t sched) { uint64_t compute_start_us = ggml_time_us(); if (!sched->callback_eval) { - if (!ggml_backend_graph_compute(split_backend, &split->graph)) { - return false; + enum ggml_status ec = ggml_backend_graph_compute(split_backend, &split->graph); + if (ec != GGML_STATUS_SUCCESS) { + return ec; } //ggml_backend_synchronize(split_backend); // necessary to measure compute time } else { @@ -1494,8 +1494,9 @@ static bool ggml_backend_sched_compute_splits(ggml_backend_sched_t sched) { struct ggml_cgraph gv = ggml_graph_view(&split->graph, j0, j1 + 1); - if (!ggml_backend_graph_compute(split_backend, &gv)) { - return false; + enum ggml_status ec = ggml_backend_graph_compute(split_backend, &gv); + if (ec != GGML_STATUS_SUCCESS) { + return ec; } if (need && !sched->callback_eval(t, false, sched->callback_eval_user_data)) { @@ -1519,7 +1520,7 @@ static bool ggml_backend_sched_compute_splits(ggml_backend_sched_t sched) { } #endif - return true; + return GGML_STATUS_SUCCESS; } ggml_backend_sched_t ggml_backend_sched_new(ggml_backend_t * backends, ggml_backend_buffer_type_t * bufts, int n_backends, size_t graph_size) { @@ -1581,7 +1582,7 @@ bool ggml_backend_sched_reserve(ggml_backend_sched_t sched, struct ggml_cgraph * return true; } -bool ggml_backend_sched_graph_compute(ggml_backend_sched_t sched, struct ggml_cgraph * graph) { +enum ggml_status ggml_backend_sched_graph_compute(ggml_backend_sched_t sched, struct ggml_cgraph * graph) { GGML_ASSERT((int)sched->hash_set.size >= graph->n_nodes + GGML_MAX_SPLITS*GGML_MAX_SPLIT_INPUTS); if (!sched->is_reset) { @@ -1590,14 +1591,10 @@ bool ggml_backend_sched_graph_compute(ggml_backend_sched_t sched, struct ggml_cg ggml_backend_sched_split_graph(sched, graph); if (!ggml_backend_sched_alloc_splits(sched)) { - return false; + return GGML_STATUS_ALLOC_FAILED; } - if (!ggml_backend_sched_compute_splits(sched)) { - return false; - } - - return true; + return ggml_backend_sched_compute_splits(sched); } void ggml_backend_sched_set_eval_callback(ggml_backend_sched_t sched, ggml_backend_sched_eval_callback callback, void * user_data) { diff --git a/ggml-backend.h b/ggml-backend.h index 8fb54bd92..8bed22578 100644 --- a/ggml-backend.h +++ b/ggml-backend.h @@ -66,12 +66,13 @@ extern "C" { GGML_API void ggml_backend_synchronize(ggml_backend_t backend); - GGML_API ggml_backend_graph_plan_t ggml_backend_graph_plan_create (ggml_backend_t backend, struct ggml_cgraph * cgraph); + GGML_API ggml_backend_graph_plan_t ggml_backend_graph_plan_create(ggml_backend_t backend, struct ggml_cgraph * cgraph); + GGML_API void ggml_backend_graph_plan_free (ggml_backend_t backend, ggml_backend_graph_plan_t plan); - GGML_API void ggml_backend_graph_plan_free (ggml_backend_t backend, ggml_backend_graph_plan_t plan); - GGML_API void ggml_backend_graph_plan_compute(ggml_backend_t backend, ggml_backend_graph_plan_t plan); - GGML_API bool ggml_backend_graph_compute (ggml_backend_t backend, struct ggml_cgraph * cgraph); - GGML_API bool ggml_backend_supports_op (ggml_backend_t backend, const struct ggml_tensor * op); + GGML_API enum ggml_status ggml_backend_graph_plan_compute(ggml_backend_t backend, ggml_backend_graph_plan_t plan); + GGML_API enum ggml_status ggml_backend_graph_compute (ggml_backend_t backend, struct ggml_cgraph * cgraph); + + GGML_API bool ggml_backend_supports_op(ggml_backend_t backend, const struct ggml_tensor * op); // tensor copy between different backends GGML_API void ggml_backend_tensor_copy(struct ggml_tensor * src, struct ggml_tensor * dst); @@ -157,26 +158,26 @@ extern "C" { typedef bool (*ggml_backend_sched_eval_callback)(struct ggml_tensor * t, bool ask, void * user_data); // Initialize a backend scheduler - GGML_API ggml_backend_sched_t ggml_backend_sched_new(ggml_backend_t * backends, ggml_backend_buffer_type_t * bufts, int n_backends, size_t graph_size); - GGML_API void ggml_backend_sched_free(ggml_backend_sched_t sched); + GGML_API ggml_backend_sched_t ggml_backend_sched_new(ggml_backend_t * backends, ggml_backend_buffer_type_t * bufts, int n_backends, size_t graph_size); + GGML_API void ggml_backend_sched_free(ggml_backend_sched_t sched); // Initialize backend buffers from a measure graph - GGML_API bool ggml_backend_sched_reserve(ggml_backend_sched_t sched, struct ggml_cgraph * measure_graph); + GGML_API bool ggml_backend_sched_reserve(ggml_backend_sched_t sched, struct ggml_cgraph * measure_graph); // Get the number of splits of the last graph - GGML_API int ggml_backend_sched_get_n_splits(ggml_backend_sched_t sched); + GGML_API int ggml_backend_sched_get_n_splits(ggml_backend_sched_t sched); - GGML_API size_t ggml_backend_sched_get_buffer_size(ggml_backend_sched_t sched, ggml_backend_t backend); + GGML_API size_t ggml_backend_sched_get_buffer_size(ggml_backend_sched_t sched, ggml_backend_t backend); - GGML_API void ggml_backend_sched_set_node_backend(ggml_backend_sched_t sched, struct ggml_tensor * node, ggml_backend_t backend); - GGML_API ggml_backend_t ggml_backend_sched_get_node_backend(ggml_backend_sched_t sched, struct ggml_tensor * node); + GGML_API void ggml_backend_sched_set_node_backend(ggml_backend_sched_t sched, struct ggml_tensor * node, ggml_backend_t backend); + GGML_API ggml_backend_t ggml_backend_sched_get_node_backend(ggml_backend_sched_t sched, struct ggml_tensor * node); // Allocate and compute graph on the backend scheduler - GGML_API bool ggml_backend_sched_graph_compute(ggml_backend_sched_t sched, struct ggml_cgraph * graph); + GGML_API enum ggml_status ggml_backend_sched_graph_compute(ggml_backend_sched_t sched, struct ggml_cgraph * graph); // Reset all assignments and allocators - must be called before changing the node backends - GGML_API void ggml_backend_sched_reset(ggml_backend_sched_t sched); + GGML_API void ggml_backend_sched_reset(ggml_backend_sched_t sched); // Set a callback to be called for each resulting node during graph compute - GGML_API void ggml_backend_sched_set_eval_callback(ggml_backend_sched_t sched, ggml_backend_sched_eval_callback callback, void * user_data); + GGML_API void ggml_backend_sched_set_eval_callback(ggml_backend_sched_t sched, ggml_backend_sched_eval_callback callback, void * user_data); // // Utils diff --git a/ggml-cuda.cu b/ggml-cuda.cu index 7d027a30a..72bcec8cd 100644 --- a/ggml-cuda.cu +++ b/ggml-cuda.cu @@ -12241,7 +12241,7 @@ GGML_CALL static void ggml_backend_cuda_synchronize(ggml_backend_t backend) { UNUSED(backend); } -GGML_CALL static bool ggml_backend_cuda_graph_compute(ggml_backend_t backend, ggml_cgraph * cgraph) { +GGML_CALL static enum ggml_status ggml_backend_cuda_graph_compute(ggml_backend_t backend, ggml_cgraph * cgraph) { ggml_backend_cuda_context * cuda_ctx = (ggml_backend_cuda_context *)backend->context; ggml_cuda_set_main_device(cuda_ctx->device); @@ -12277,7 +12277,7 @@ GGML_CALL static bool ggml_backend_cuda_graph_compute(ggml_backend_t backend, gg GGML_ASSERT(ok); } - return true; + return GGML_STATUS_SUCCESS; } GGML_CALL static bool ggml_backend_cuda_supports_op(ggml_backend_t backend, const ggml_tensor * op) { diff --git a/ggml-kompute.cpp b/ggml-kompute.cpp index e740a76d1..83a7822fd 100644 --- a/ggml-kompute.cpp +++ b/ggml-kompute.cpp @@ -1927,10 +1927,10 @@ static ggml_backend_buffer_type_t ggml_backend_kompute_get_default_buffer_type(g return ggml_backend_kompute_buffer_type(ctx->device); } -static bool ggml_backend_kompute_graph_compute(ggml_backend_t backend, struct ggml_cgraph * cgraph) { +static ggml_status ggml_backend_kompute_graph_compute(ggml_backend_t backend, struct ggml_cgraph * cgraph) { auto * ctx = static_cast(backend->context); ggml_vk_graph_compute(ctx, cgraph); - return true; + return GGML_STATUS_SUCCESS; } static bool ggml_backend_kompute_supports_op(ggml_backend_t backend, const struct ggml_tensor * op) { diff --git a/ggml-metal.m b/ggml-metal.m index 6b5a8fdf5..00df22838 100644 --- a/ggml-metal.m +++ b/ggml-metal.m @@ -748,7 +748,7 @@ static bool ggml_metal_supports_op(const struct ggml_metal_context * ctx, const } } -static bool ggml_metal_graph_compute( +static enum ggml_status ggml_metal_graph_compute( struct ggml_metal_context * ctx, struct ggml_cgraph * gf) { @@ -2484,7 +2484,7 @@ static bool ggml_metal_graph_compute( MTLCommandBufferStatus status = [command_buffer status]; if (status != MTLCommandBufferStatusCompleted) { GGML_METAL_LOG_INFO("%s: command buffer %d failed with status %lu\n", __func__, i, status); - return false; + return GGML_STATUS_FAILED; } } @@ -2493,7 +2493,7 @@ static bool ggml_metal_graph_compute( } } - return true; + return GGML_STATUS_SUCCESS; } //////////////////////////////////////////////////////////////////////////////// @@ -2795,7 +2795,7 @@ GGML_CALL static ggml_backend_buffer_type_t ggml_backend_metal_get_default_buffe UNUSED(backend); } -GGML_CALL static bool ggml_backend_metal_graph_compute(ggml_backend_t backend, struct ggml_cgraph * cgraph) { +GGML_CALL static enum ggml_status ggml_backend_metal_graph_compute(ggml_backend_t backend, struct ggml_cgraph * cgraph) { struct ggml_metal_context * metal_ctx = (struct ggml_metal_context *)backend->context; return ggml_metal_graph_compute(metal_ctx, cgraph); diff --git a/ggml-opencl.cpp b/ggml-opencl.cpp index df619a884..aa73d67df 100644 --- a/ggml-opencl.cpp +++ b/ggml-opencl.cpp @@ -2231,7 +2231,7 @@ static ggml_backend_buffer_type_t ggml_backend_opencl_get_default_buffer_type(gg GGML_UNUSED(backend); } -static bool ggml_backend_opencl_graph_compute(ggml_backend_t backend, ggml_cgraph * graph) { +static ggml_status ggml_backend_opencl_graph_compute(ggml_backend_t backend, ggml_cgraph * graph) { for (int i = 0; i < graph->n_nodes; ++i) { ggml_tensor * node = graph->nodes[i]; switch (node->op) { @@ -2246,7 +2246,7 @@ static bool ggml_backend_opencl_graph_compute(ggml_backend_t backend, ggml_cgrap } } - return true; + return GGML_STATUS_SUCCESS; GGML_UNUSED(backend); } diff --git a/ggml-sycl.cpp b/ggml-sycl.cpp index cad08d610..47a605b01 100644 --- a/ggml-sycl.cpp +++ b/ggml-sycl.cpp @@ -15581,7 +15581,7 @@ catch (sycl::exception const &exc) { std::exit(1); } -GGML_CALL static bool ggml_backend_sycl_graph_compute(ggml_backend_t backend, ggml_cgraph * cgraph) { +GGML_CALL static ggml_status ggml_backend_sycl_graph_compute(ggml_backend_t backend, ggml_cgraph * cgraph) { ggml_backend_sycl_context * sycl_ctx = (ggml_backend_sycl_context *)backend->context; ggml_sycl_set_main_device(sycl_ctx->device); @@ -15613,7 +15613,7 @@ GGML_CALL static bool ggml_backend_sycl_graph_compute(ggml_backend_t backend, gg GGML_ASSERT(ok); } - return true; + return GGML_STATUS_SUCCESS; } GGML_CALL static bool ggml_backend_sycl_supports_op(ggml_backend_t backend, const ggml_tensor * op) { diff --git a/ggml-vulkan.cpp b/ggml-vulkan.cpp index ae9cb3c1c..bc316c3f3 100644 --- a/ggml-vulkan.cpp +++ b/ggml-vulkan.cpp @@ -5092,7 +5092,7 @@ GGML_CALL static void ggml_backend_vk_synchronize(ggml_backend_t backend) { ctx->transfer_ctx = nullptr; } -GGML_CALL static bool ggml_backend_vk_graph_compute(ggml_backend_t backend, ggml_cgraph * cgraph) { +GGML_CALL static ggml_status ggml_backend_vk_graph_compute(ggml_backend_t backend, ggml_cgraph * cgraph) { ggml_backend_vk_context * ctx = (ggml_backend_vk_context *)backend->context; for (int i = 0; i < cgraph->n_nodes; i++) { @@ -5135,7 +5135,7 @@ GGML_CALL static bool ggml_backend_vk_graph_compute(ggml_backend_t backend, ggml ggml_vk_graph_cleanup(ctx); - return true; + return GGML_STATUS_SUCCESS; UNUSED(backend); } diff --git a/ggml.c b/ggml.c index 870e41614..3c2e94c1b 100644 --- a/ggml.c +++ b/ggml.c @@ -320,6 +320,16 @@ static ggml_fp16_t ggml_table_exp_f16[1 << 16]; // precomputed f32 table for f16 (256 KB) (ggml-impl.h) float ggml_table_f32_f16[1 << 16]; +const char * ggml_status_to_string(enum ggml_status status) { + switch (status) { + case GGML_STATUS_ALLOC_FAILED: return "GGML status: error (failed to allocate memory)"; + case GGML_STATUS_FAILED: return "GGML status: error (operation failed)"; + case GGML_STATUS_SUCCESS: return "GGML status: success"; + case GGML_STATUS_ABORTED: return "GGML status: warning (operation aborted)"; + default: GGML_ASSERT(false); + } +} + // note: do not use these inside ggml.c // these are meant to be used via the ggml.h API float ggml_fp16_to_fp32(ggml_fp16_t x) { @@ -17400,6 +17410,7 @@ struct ggml_compute_state { ggml_thread_t thrd; int ith; struct ggml_compute_state_shared * shared; + enum ggml_status ec; }; static void ggml_graph_compute_perf_stats_node(struct ggml_tensor * node, const struct ggml_compute_state_shared * st) { @@ -17693,7 +17704,8 @@ static thread_ret_t ggml_graph_compute_thread(void * data) { while (true) { if (cplan->abort_callback && cplan->abort_callback(cplan->abort_callback_data)) { state->shared->node_n += 1; - return (thread_ret_t) GGML_EXIT_ABORTED; + state->ec = GGML_STATUS_ABORTED; + return 0; } if (atomic_fetch_sub(&state->shared->n_active, 1) == 1) { @@ -17815,7 +17827,7 @@ static thread_ret_t ggml_graph_compute_thread(void * data) { } } - return GGML_EXIT_SUCCESS; + return 0; } struct ggml_cplan ggml_graph_plan(const struct ggml_cgraph * cgraph, int n_threads) { @@ -18011,7 +18023,7 @@ struct ggml_cplan ggml_graph_plan(const struct ggml_cgraph * cgraph, int n_threa return cplan; } -int ggml_graph_compute(struct ggml_cgraph * cgraph, struct ggml_cplan * cplan) { +enum ggml_status ggml_graph_compute(struct ggml_cgraph * cgraph, struct ggml_cplan * cplan) { { GGML_ASSERT(cplan); GGML_ASSERT(cplan->n_threads > 0); @@ -18055,6 +18067,7 @@ int ggml_graph_compute(struct ggml_cgraph * cgraph, struct ggml_cplan * cplan) { .thrd = 0, .ith = j, .shared = &state_shared, + .ec = GGML_STATUS_SUCCESS, }; const int rc = ggml_thread_create(&workers[j].thrd, NULL, ggml_graph_compute_thread, &workers[j]); @@ -18065,12 +18078,14 @@ int ggml_graph_compute(struct ggml_cgraph * cgraph, struct ggml_cplan * cplan) { workers[0].ith = 0; workers[0].shared = &state_shared; + workers[0].ec = GGML_STATUS_SUCCESS; const int64_t perf_start_cycles = ggml_perf_cycles(); const int64_t perf_start_time_us = ggml_perf_time_us(); // this is a work thread too - int compute_status = (size_t) ggml_graph_compute_thread(&workers[0]); + ggml_graph_compute_thread(&workers[0]); + enum ggml_status compute_status = workers[0].ec; // don't leave affinity set on the main thread clear_numa_thread_affinity(); @@ -18080,6 +18095,8 @@ int ggml_graph_compute(struct ggml_cgraph * cgraph, struct ggml_cplan * cplan) { for (int j = 1; j < n_threads; j++) { const int rc = ggml_thread_join(workers[j].thrd, NULL); GGML_ASSERT(rc == 0); + if (workers[j].ec != GGML_STATUS_SUCCESS) + compute_status = workers[j].ec; } } @@ -18107,14 +18124,14 @@ int ggml_graph_compute(struct ggml_cgraph * cgraph, struct ggml_cplan * cplan) { return compute_status; } -void ggml_graph_compute_with_ctx(struct ggml_context * ctx, struct ggml_cgraph * cgraph, int n_threads) { +enum ggml_status ggml_graph_compute_with_ctx(struct ggml_context * ctx, struct ggml_cgraph * cgraph, int n_threads) { struct ggml_cplan cplan = ggml_graph_plan(cgraph, n_threads); struct ggml_object * obj = ggml_new_object(ctx, GGML_OBJECT_TYPE_WORK_BUFFER, cplan.work_size); cplan.work_data = (uint8_t *)ctx->mem_buffer + obj->offs; - ggml_graph_compute(cgraph, &cplan); + return ggml_graph_compute(cgraph, &cplan); } struct ggml_tensor * ggml_graph_get_tensor(struct ggml_cgraph * cgraph, const char * name) { diff --git a/ggml.h b/ggml.h index 98cfc7bf8..0ea4f8847 100644 --- a/ggml.h +++ b/ggml.h @@ -315,6 +315,16 @@ extern "C" { #endif + enum ggml_status { + GGML_STATUS_ALLOC_FAILED = -2, + GGML_STATUS_FAILED = -1, + GGML_STATUS_SUCCESS = 0, + GGML_STATUS_ABORTED = 1, + }; + + // get ggml_status name string + GGML_API GGML_CALL const char * ggml_status_to_string(enum ggml_status status); + typedef uint16_t ggml_fp16_t; // convert FP16 <-> FP32 @@ -1940,12 +1950,11 @@ extern "C" { // ggml_graph_plan() has to be called before ggml_graph_compute() // when plan.work_size > 0, caller must allocate memory for plan.work_data - GGML_API struct ggml_cplan ggml_graph_plan (const struct ggml_cgraph * cgraph, int n_threads /*= GGML_DEFAULT_N_THREADS*/); - GGML_API int ggml_graph_compute( struct ggml_cgraph * cgraph, struct ggml_cplan * cplan); - + GGML_API struct ggml_cplan ggml_graph_plan (const struct ggml_cgraph * cgraph, int n_threads /*= GGML_DEFAULT_N_THREADS*/); + GGML_API enum ggml_status ggml_graph_compute ( struct ggml_cgraph * cgraph, struct ggml_cplan * cplan); // same as ggml_graph_compute() but the work data is allocated as a part of the context // note: the drawback of this API is that you must have ensured that the context has enough memory for the work data - GGML_API void ggml_graph_compute_with_ctx(struct ggml_context * ctx, struct ggml_cgraph * cgraph, int n_threads); + GGML_API enum ggml_status ggml_graph_compute_with_ctx(struct ggml_context * ctx, struct ggml_cgraph * cgraph, int n_threads); GGML_API struct ggml_tensor * ggml_graph_get_tensor(struct ggml_cgraph * cgraph, const char * name); From efd8533ef8d0752cef7119eb5dbee412c4dba270 Mon Sep 17 00:00:00 2001 From: Georgi Gerganov Date: Mon, 4 Mar 2024 11:06:39 +0200 Subject: [PATCH 783/811] sync : ggml ggml-ci --- scripts/sync-ggml.last | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/scripts/sync-ggml.last b/scripts/sync-ggml.last index 4a1b0bab4..afe68077d 100644 --- a/scripts/sync-ggml.last +++ b/scripts/sync-ggml.last @@ -1 +1 @@ -274680868e12427373bab4bec87554431b954704 +8695910a39102609073d0e099aa7c97d6bcb3bf9 From a1c6d96ed8f906aa1cda439f7386b1171a22bf9f Mon Sep 17 00:00:00 2001 From: Georgi Gerganov Date: Mon, 4 Mar 2024 20:53:27 +0200 Subject: [PATCH 784/811] ggml : fix unknown status (#0) --- ggml.c | 9 +++++---- 1 file changed, 5 insertions(+), 4 deletions(-) diff --git a/ggml.c b/ggml.c index 3c2e94c1b..6a10bbcb4 100644 --- a/ggml.c +++ b/ggml.c @@ -323,11 +323,12 @@ float ggml_table_f32_f16[1 << 16]; const char * ggml_status_to_string(enum ggml_status status) { switch (status) { case GGML_STATUS_ALLOC_FAILED: return "GGML status: error (failed to allocate memory)"; - case GGML_STATUS_FAILED: return "GGML status: error (operation failed)"; - case GGML_STATUS_SUCCESS: return "GGML status: success"; - case GGML_STATUS_ABORTED: return "GGML status: warning (operation aborted)"; - default: GGML_ASSERT(false); + case GGML_STATUS_FAILED: return "GGML status: error (operation failed)"; + case GGML_STATUS_SUCCESS: return "GGML status: success"; + case GGML_STATUS_ABORTED: return "GGML status: warning (operation aborted)"; } + + return "GGML status: unknown"; } // note: do not use these inside ggml.c From e0843afe1b37890b631bc7d3d2da2ed36c862b91 Mon Sep 17 00:00:00 2001 From: Georgi Gerganov Date: Mon, 4 Mar 2024 21:50:50 +0200 Subject: [PATCH 785/811] flake : fix --- convert-hf-to-gguf.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/convert-hf-to-gguf.py b/convert-hf-to-gguf.py index ffdba7444..f6369af38 100755 --- a/convert-hf-to-gguf.py +++ b/convert-hf-to-gguf.py @@ -36,8 +36,10 @@ class SentencePieceTokenTypes(IntEnum): UNUSED = 5 BYTE = 6 + AnyModel = TypeVar("AnyModel", bound="type[Model]") + class Model(ABC): _model_classes: dict[str, type[Model]] = {} @@ -187,6 +189,7 @@ class Model(ABC): @classmethod def register(cls, *names: str) -> Callable[[AnyModel], AnyModel]: assert names + def func(modelcls: type[Model]): for name in names: cls._model_classes[name] = modelcls From 29ae62d2ae163e2b68aa0ad3bf2ab4636de0c957 Mon Sep 17 00:00:00 2001 From: Georgi Gerganov Date: Mon, 4 Mar 2024 22:31:20 +0200 Subject: [PATCH 786/811] llama : fix embeddings (#5796) * llama : fix embeddings ggml-ci * llama : do not use KV cache for non-causal models ggml-ci * embeddings : fix llama_batch_init arg * llama : add pooling switch * llama : distinguish token vs sequence embeddings ggml-ci * llama : assert pooling tensor * llama : simplify causal mask condition ggml-ci * llama : assert input batch with pooling enabled * readme : update API changes list --- README.md | 1 + common/common.cpp | 2 +- examples/embedding/embedding.cpp | 28 ++- examples/server-embd.py | 34 +++ examples/server/server.cpp | 53 ++++- llama.cpp | 357 +++++++++++++++++++++---------- llama.h | 18 +- 7 files changed, 359 insertions(+), 134 deletions(-) create mode 100644 examples/server-embd.py diff --git a/README.md b/README.md index 45c5d06f3..f754022de 100644 --- a/README.md +++ b/README.md @@ -10,6 +10,7 @@ Inference of Meta's [LLaMA](https://arxiv.org/abs/2302.13971) model (and others) ### Recent API changes +- [2024 Mar 4] Embeddings API updated https://github.com/ggerganov/llama.cpp/pull/5796 - [2024 Mar 3] `struct llama_context_params` https://github.com/ggerganov/llama.cpp/pull/5849 ### Hot topics diff --git a/common/common.cpp b/common/common.cpp index 036a98134..c244db644 100644 --- a/common/common.cpp +++ b/common/common.cpp @@ -1292,7 +1292,7 @@ struct llama_context_params llama_context_params_from_gpt_params(const gpt_param cparams.n_threads_batch = params.n_threads_batch == -1 ? params.n_threads : params.n_threads_batch; cparams.seed = params.seed; cparams.logits_all = params.logits_all; - cparams.embedding = params.embedding; + cparams.embeddings = params.embedding; cparams.rope_scaling_type = params.rope_scaling_type; cparams.rope_freq_base = params.rope_freq_base; cparams.rope_freq_scale = params.rope_freq_scale; diff --git a/examples/embedding/embedding.cpp b/examples/embedding/embedding.cpp index acff715e9..ff5883da6 100644 --- a/examples/embedding/embedding.cpp +++ b/examples/embedding/embedding.cpp @@ -19,11 +19,11 @@ static std::vector split_lines(const std::string & s) { static void batch_add_seq(llama_batch & batch, const std::vector & tokens, int seq_id) { for (size_t i = 0; i < tokens.size(); i++) { - llama_batch_add(batch, tokens[i], i, { seq_id }, false); + llama_batch_add(batch, tokens[i], i, { seq_id }, i == tokens.size() - 1); } } -static void normalize(float * vec, float * out, int n) { +static void normalize(const float * vec, float * out, int n) { float norm = 0; for (int i = 0; i < n; i++) { norm += vec[i] * vec[i]; @@ -45,10 +45,23 @@ static void batch_decode(llama_context * ctx, llama_batch & batch, float * outpu } // normalize on copy - for (int k = 0; k < n_seq; k++) { - float * emb = llama_get_embeddings_ith(ctx, k); - float * out = output + k * n_embd; - normalize(emb, out, n_embd); + for (int i = 0; i < batch.n_tokens; i++) { + if (!batch.logits[i]) { + continue; + } + + // try to get sequence embeddings - supported only when pooling_type is not NONE + const float * embd = llama_get_embeddings_seq(ctx, batch.seq_id[i][0]); + if (embd == NULL) { + embd = llama_get_embeddings_ith(ctx, i); + if (embd == NULL) { + fprintf(stderr, "%s: failed to get embeddings for token %d\n", __func__, i); + continue; + } + } + + float * out = output + batch.seq_id[i][0] * n_embd; + normalize(embd, out, n_embd); } } @@ -132,7 +145,7 @@ int main(int argc, char ** argv) { // initialize batch const int n_prompts = prompts.size(); - struct llama_batch batch = llama_batch_init(n_batch, 0, n_prompts); + struct llama_batch batch = llama_batch_init(n_batch, 0, 1); // allocate output const int n_embd = llama_n_embd(model); @@ -145,6 +158,7 @@ int main(int argc, char ** argv) { for (int k = 0; k < n_prompts; k++) { // clamp to n_batch tokens auto & inp = inputs[k]; + const uint64_t n_toks = inp.size(); // encode if at capacity diff --git a/examples/server-embd.py b/examples/server-embd.py new file mode 100644 index 000000000..c5c4ea87b --- /dev/null +++ b/examples/server-embd.py @@ -0,0 +1,34 @@ +import asyncio +import requests +import numpy as np + +n = 8 + +result = [] + +async def requests_post_async(*args, **kwargs): + return await asyncio.to_thread(requests.post, *args, **kwargs) + +async def main(): + model_url = "http://127.0.0.1:6900" + responses: list[requests.Response] = await asyncio.gather(*[requests_post_async( + url= f"{model_url}/embedding", + json= {"content": str(i)*1024} + ) for i in range(n)]) + + for response in responses: + embedding = response.json()["embedding"] + print(embedding[-8:]) + result.append(embedding) + +asyncio.run(main()) + +# compute cosine similarity + +for i in range(n-1): + for j in range(i+1, n): + embedding1 = np.array(result[i]) + embedding2 = np.array(result[j]) + similarity = np.dot(embedding1, embedding2) / (np.linalg.norm(embedding1) * np.linalg.norm(embedding2)) + print(f"Similarity between {i} and {j}: {similarity:.2f}") + diff --git a/examples/server/server.cpp b/examples/server/server.cpp index 208edd571..8fe5e0b19 100644 --- a/examples/server/server.cpp +++ b/examples/server/server.cpp @@ -1210,7 +1210,7 @@ struct llama_server_context queue_results.send(res); } - void send_embedding(server_slot &slot) + void send_embedding(server_slot & slot, const llama_batch & batch) { task_result res; res.id = slot.task_id; @@ -1219,6 +1219,7 @@ struct llama_server_context res.stop = true; const int n_embd = llama_n_embd(model); + if (!params.embedding) { LOG_WARNING("embedding disabled", {{"params.embedding", params.embedding}}); @@ -1229,12 +1230,29 @@ struct llama_server_context } else { - const float *data = llama_get_embeddings(ctx); - std::vector embedding(data, data + n_embd); - res.result_json = json - { - {"embedding", embedding}, - }; + for (int i = 0; i < batch.n_tokens; ++i) { + if (!batch.logits[i] || batch.seq_id[i][0] != slot.id) { + continue; + } + + const float * embd = llama_get_embeddings_seq(ctx, batch.seq_id[i][0]); + if (embd == NULL) { + embd = llama_get_embeddings_ith(ctx, i); + if (embd == NULL) { + LOG_ERROR("failed to get embeddings for token", {{"token", batch.token[i]}, {"seq_id", batch.seq_id[i][0]}}); + res.result_json = json + { + {"embedding", std::vector(n_embd, 0.0f)}, + }; + continue; + } + } + + res.result_json = json + { + {"embedding", std::vector(embd, embd + n_embd)}, + }; + } } queue_results.send(res); } @@ -1845,7 +1863,7 @@ struct llama_server_context ga_i += ga_w/ga_n; } } - llama_batch_add(batch, prefix_tokens[slot.n_past], system_tokens.size() + slot_npast, {slot.id }, false); + llama_batch_add(batch, prefix_tokens[slot.n_past], system_tokens.size() + slot_npast, { slot.id }, false); slot_npast++; } @@ -1881,7 +1899,7 @@ struct llama_server_context for (int32_t i = 0; i < (int32_t) batch.n_tokens; i += n_batch) { - const int32_t n_tokens = std::min(n_batch, (int32_t) (batch.n_tokens - i)); + const int32_t n_tokens = std::min(n_batch, batch.n_tokens - i); for (auto & slot : slots) { @@ -1954,7 +1972,7 @@ struct llama_server_context // prompt evaluated for embedding if (slot.embedding) { - send_embedding(slot); + send_embedding(slot, batch_view); slot.release(); slot.i_batch = -1; continue; @@ -2036,6 +2054,8 @@ static void server_print_usage(const char *argv0, const gpt_params ¶ms, printf(" --yarn-attn-factor N YaRN: scale sqrt(t) or attention magnitude (default: 1.0)\n"); printf(" --yarn-beta-slow N YaRN: high correction dim or alpha (default: %.1f)\n", params.yarn_beta_slow); printf(" --yarn-beta-fast N YaRN: low correction dim or beta (default: %.1f)\n", params.yarn_beta_fast); + printf(" --pooling {none,mean,cls}\n"); + printf(" pooling type for embeddings, use model default if unspecified\n"); printf(" -b N, --batch-size N batch size for prompt processing (default: %d)\n", params.n_batch); printf(" --memory-f32 use f32 instead of f16 for memory key+value (default: disabled)\n"); printf(" not recommended: doubles context memory required and no measurable increase in quality\n"); @@ -2276,6 +2296,18 @@ static void server_params_parse(int argc, char **argv, server_params &sparams, } params.yarn_beta_slow = std::stof(argv[i]); } + else if (arg == "--pooling") + { + if (++i >= argc) { + invalid_param = true; + break; + } + std::string value(argv[i]); + /**/ if (value == "none") { params.pooling_type = LLAMA_POOLING_TYPE_NONE; } + else if (value == "mean") { params.pooling_type = LLAMA_POOLING_TYPE_MEAN; } + else if (value == "cls") { params.pooling_type = LLAMA_POOLING_TYPE_CLS; } + else { invalid_param = true; break; } + } else if (arg == "--threads" || arg == "-t") { if (++i >= argc) @@ -2330,7 +2362,6 @@ static void server_params_parse(int argc, char **argv, server_params &sparams, break; } params.n_batch = std::stoi(argv[i]); - params.n_batch = std::min(512, params.n_batch); } else if (arg == "--gpu-layers" || arg == "-ngl" || arg == "--n-gpu-layers") { diff --git a/llama.cpp b/llama.cpp index de579d9e3..76afcbc13 100644 --- a/llama.cpp +++ b/llama.cpp @@ -1665,7 +1665,7 @@ struct llama_hparams { }; struct llama_cparams { - uint32_t n_ctx; // context size used during inference + uint32_t n_ctx; // context size used during inference uint32_t n_batch; uint32_t n_threads; // number of threads to use for generation uint32_t n_threads_batch; // number of threads to use for batch processing @@ -1682,7 +1682,9 @@ struct llama_cparams { float yarn_beta_slow; float defrag_thold; + bool embeddings; bool offload_kqv; + enum llama_pooling_type pooling_type; ggml_backend_sched_eval_callback cb_eval; @@ -1972,7 +1974,7 @@ struct llama_context { int32_t n_p_eval = 0; // number of tokens in eval calls for the prompt (with batch size > 1) int32_t n_eval = 0; // number of eval calls - // decode output (2-dimensional array: [n_tokens][n_vocab]) + // logits output (2-dimensional array: [n_tokens][n_vocab]) std::vector logits; #ifndef NDEBUG // guard against access to unset logits @@ -1980,8 +1982,13 @@ struct llama_context { #endif bool logits_all = false; - // input embedding (1-dimensional array: [n_embd]) - std::vector embedding; + // embeddings output (2-dimensional array: [n_tokens][n_embd]) + // populated only when pooling_type == LLAMA_POOLING_TYPE_NONE + std::vector embd; + + // sequence embeddings output (map of [n_embd] vectors) + // populated only when pooling_type != LLAMA_POOLING_TYPE_NONE + std::map> embd_seq; // memory buffers used to evaluate the model std::vector buf_compute_meta; @@ -5092,6 +5099,7 @@ static struct ggml_tensor * llm_build_kv( llm_build_kv_store(ctx, hparams, kv, graph, k_cur, v_cur, n_ctx, n_tokens, kv_head, cb, il); struct ggml_tensor * cur; + cur = llm_build_kqv(ctx, model, hparams, kv, graph, wo, wo_b, q_cur, kq_mask, kq_pos, n_ctx, n_tokens, n_kv, kq_scale, cb, il); cb(cur, "kqv_out", il); @@ -6085,6 +6093,7 @@ struct llm_build_context { const int64_t n_embd_head = hparams.n_embd_head_v; const int64_t n_embd_gqa = hparams.n_embd_v_gqa(); + GGML_ASSERT(n_embd_head == hparams.n_embd_head_k); struct ggml_tensor * cur; @@ -6092,9 +6101,10 @@ struct llm_build_context { // get input vectors with right size const size_t stride1 = n_tokens * ggml_type_size(lctx.inp_tokens->type); - struct ggml_tensor * inp_pos = ggml_view_1d(ctx0, lctx.inp_pos, n_tokens, 0); + + struct ggml_tensor * inp_pos = ggml_view_1d(ctx0, lctx.inp_pos, n_tokens, 0); struct ggml_tensor * inp_mean = ggml_view_2d(ctx0, lctx.inp_mean, n_tokens, n_tokens, stride1, 0); - struct ggml_tensor * inp_cls = ggml_view_1d(ctx0, lctx.inp_cls, n_tokens, 0); + struct ggml_tensor * inp_cls = ggml_view_1d(ctx0, lctx.inp_cls, n_tokens, 0); // construct input embeddings (token, type, position) inpL = llm_build_inp_embd(ctx0, hparams, batch, model.tok_embd, lctx.inp_tokens, lctx.inp_embd, cb); @@ -6112,39 +6122,38 @@ struct llm_build_context { cb(inpL, "inp_norm", -1); // KQ_mask (mask for 1 head, it will be broadcasted to all heads) - struct ggml_tensor * KQ_mask = ggml_view_2d(ctx0, lctx.inp_KQ_mask, n_kv, n_tokens, n_kv*ggml_type_size(lctx.inp_KQ_mask->type), 0); - cb(KQ_mask, "KQ_mask", -1); // [n_kv, n_tokens] + struct ggml_tensor * KQ_mask = ggml_cont(ctx0, ggml_view_2d(ctx0, lctx.inp_KQ_mask, n_tokens, n_tokens, n_tokens*ggml_type_size(lctx.inp_KQ_mask->type), 0)); + cb(KQ_mask, "KQ_mask", -1); // [n_tokens, n_tokens] // iterate layers for (int il = 0; il < n_layer; ++il) { struct ggml_tensor * cur = inpL; + struct ggml_tensor * Qcur; + struct ggml_tensor * Kcur; + struct ggml_tensor * Vcur; + // self-attention if (model.arch == LLM_ARCH_BERT) { - struct ggml_tensor * Qcur = ggml_add(ctx0, ggml_mul_mat(ctx0, model.layers[il].wq, cur), model.layers[il].bq); + Qcur = ggml_add(ctx0, ggml_mul_mat(ctx0, model.layers[il].wq, cur), model.layers[il].bq); cb(Qcur, "Qcur", il); - struct ggml_tensor * Kcur = ggml_add(ctx0, ggml_mul_mat(ctx0, model.layers[il].wk, cur), model.layers[il].bk); + Kcur = ggml_add(ctx0, ggml_mul_mat(ctx0, model.layers[il].wk, cur), model.layers[il].bk); cb(Kcur, "Kcur", il); - struct ggml_tensor * Vcur = ggml_add(ctx0, ggml_mul_mat(ctx0, model.layers[il].wv, cur), model.layers[il].bv); + Vcur = ggml_add(ctx0, ggml_mul_mat(ctx0, model.layers[il].wv, cur), model.layers[il].bv); cb(Vcur, "Vcur", il); - // seems like we just need to do this for Q? - Qcur = ggml_reshape_3d(ctx0, Qcur, n_embd_head, n_head, n_tokens); - - cur = llm_build_kv(ctx0, model, hparams, kv_self, gf, - model.layers[il].wo, model.layers[il].bo, - Kcur, Vcur, Qcur, KQ_mask, nullptr, n_ctx, n_tokens, kv_head, n_kv, 1.0f/sqrtf(float(n_embd_head)), cb, il); - cb(cur, "kqv_out", il); + Qcur = ggml_reshape_3d(ctx0, Qcur, n_embd_head, n_head, n_tokens); + Kcur = ggml_reshape_3d(ctx0, Kcur, n_embd_head, n_head_kv, n_tokens); } else { // compute Q and K and RoPE them cur = ggml_mul_mat(ctx0, model.layers[il].wqkv, cur); cb(cur, "wqkv", il); - struct ggml_tensor * Qcur = ggml_cont(ctx0, ggml_view_2d(ctx0, cur, n_embd, n_tokens, cur->nb[1], 0*sizeof(float)*(n_embd))); - struct ggml_tensor * Kcur = ggml_cont(ctx0, ggml_view_2d(ctx0, cur, n_embd_gqa, n_tokens, cur->nb[1], 1*sizeof(float)*(n_embd))); - struct ggml_tensor * Vcur = ggml_cont(ctx0, ggml_view_2d(ctx0, cur, n_embd_gqa, n_tokens, cur->nb[1], 1*sizeof(float)*(n_embd + n_embd_gqa))); + Qcur = ggml_cont(ctx0, ggml_view_2d(ctx0, cur, n_embd, n_tokens, cur->nb[1], 0*sizeof(float)*(n_embd))); + Kcur = ggml_cont(ctx0, ggml_view_2d(ctx0, cur, n_embd_gqa, n_tokens, cur->nb[1], 1*sizeof(float)*(n_embd))); + Vcur = ggml_cont(ctx0, ggml_view_2d(ctx0, cur, n_embd_gqa, n_tokens, cur->nb[1], 1*sizeof(float)*(n_embd + n_embd_gqa))); cb(Qcur, "Qcur", il); cb(Kcur, "Kcur", il); @@ -6163,13 +6172,41 @@ struct llm_build_context { ext_factor, attn_factor, beta_fast, beta_slow ); cb(Kcur, "Kcur", il); - - cur = llm_build_kv(ctx0, model, hparams, kv_self, gf, - model.layers[il].wo, model.layers[il].bo, - Kcur, Vcur, Qcur, KQ_mask, nullptr, n_ctx, n_tokens, kv_head, n_kv, 1.0f/sqrtf(float(n_embd_head)), cb, il); - cb(cur, "kqv_out", il); } + struct ggml_tensor * q = ggml_permute(ctx0, Qcur, 0, 2, 1, 3); + struct ggml_tensor * k = ggml_cont(ctx0, ggml_permute(ctx0, Kcur, 0, 2, 1, 3)); + + struct ggml_tensor * kq = ggml_mul_mat(ctx0, k, q); + cb(kq, "kq", il); + + kq = ggml_soft_max_ext(ctx0, kq, KQ_mask, nullptr, 1.0f/sqrtf(float(n_embd_head)), hparams.f_max_alibi_bias); + cb(kq, "kq_soft_max_ext", il); + + struct ggml_tensor * v = ggml_cont(ctx0, ggml_transpose(ctx0, ggml_reshape_2d(ctx0, Vcur, n_embd_gqa, n_tokens))); + cb(v, "v", il); + + struct ggml_tensor * kqv = ggml_mul_mat(ctx0, ggml_reshape_3d(ctx0, v, n_tokens, n_embd_head, n_head_kv), kq); + cb(kqv, "kqv", il); + + struct ggml_tensor * kqv_merged = ggml_permute(ctx0, kqv, 0, 2, 1, 3); + cb(kqv_merged, "kqv_merged", il); + + cur = ggml_cont_2d(ctx0, kqv_merged, n_embd_gqa, n_tokens); + cb(cur, "kqv_merged_cont", il); + + ggml_build_forward_expand(gf, cur); + + cur = ggml_mul_mat(ctx0, model.layers[il].wo, cur); + if (model.layers[il].bo) { + cb(cur, "kqv_wo", il); + } + + if (model.layers[il].bo) { + cur = ggml_add(ctx0, cur, model.layers[il].bo); + } + cb(cur, "kqv_out", il); + // re-add the layer input cur = ggml_add(ctx0, cur, inpL); @@ -6209,16 +6246,29 @@ struct llm_build_context { // final output cur = inpL; + cb(cur, "result_embd", -1); // pooling layer - if (pooling_type == LLAMA_POOLING_TYPE_MEAN) { - cur = ggml_mul_mat(ctx0, ggml_cont(ctx0, ggml_transpose(ctx0, cur)), inp_mean); - } else if (pooling_type == LLAMA_POOLING_TYPE_CLS) { - cur = ggml_get_rows(ctx0, cur, inp_cls); - } else { - GGML_ASSERT(pooling_type == LLAMA_POOLING_TYPE_NONE && "Invalid pooling type"); + switch (pooling_type) { + case LLAMA_POOLING_TYPE_NONE: + { + // nop + } break; + case LLAMA_POOLING_TYPE_MEAN: + { + cur = ggml_mul_mat(ctx0, ggml_cont(ctx0, ggml_transpose(ctx0, cur)), inp_mean); + cb(cur, "result_embd_pooled", -1); + } break; + case LLAMA_POOLING_TYPE_CLS: + { + cur = ggml_get_rows(ctx0, cur, inp_cls); + cb(cur, "result_embd_pooled", -1); + } break; + case LLAMA_POOLING_TYPE_UNSPECIFIED: + { + GGML_ASSERT(false && "Invalid pooling type"); + } break; } - cb(cur, "result_embd", -1); ggml_build_forward_expand(gf, cur); @@ -7980,7 +8030,7 @@ static void llama_set_inputs(llama_context & lctx, const llama_batch & batch) { ggml_backend_tensor_set(lctx.inp_pos, batch.pos, 0, n_tokens*ggml_element_size(lctx.inp_pos)); } - { + if (hparams.causal_attn) { const int64_t n_kv = kv_self.n; const int64_t n_tokens = batch.n_tokens; @@ -7995,16 +8045,40 @@ static void llama_set_inputs(llama_context & lctx, const llama_batch & batch) { for (int i = 0; i < n_kv; ++i) { float f; - if (!lctx.kv_self.cells[i].has_seq_id(seq_id) || - (hparams.causal_attn && lctx.kv_self.cells[i].pos > pos)) { + if (!lctx.kv_self.cells[i].has_seq_id(seq_id) || lctx.kv_self.cells[i].pos > pos) { f = -INFINITY; } else { - f = 0; + f = 0.0f; } data[h*(n_kv*n_tokens) + j*n_kv + i] = f; } } } + } else { + // non-causal attention attends only the tokens within the batch (i.e. the KV cache is not used) + const int64_t n_tokens = batch.n_tokens; + + assert(ggml_backend_buffer_is_host(lctx.inp_KQ_mask->buffer)); + + float * data = (float *) lctx.inp_KQ_mask->data; + + for (int h = 0; h < 1; ++h) { + for (int j = 0; j < n_tokens; ++j) { + const llama_seq_id seq_id = batch.seq_id[j][0]; + + for (int i = 0; i < n_tokens; ++i) { + float f = -INFINITY; + for (int s = 0; s < batch.n_seq_id[i]; ++s) { + if (batch.seq_id[i][s] == seq_id) { + f = 0.0f; + break; + } + } + + data[h*(n_tokens*n_tokens) + j*n_tokens + i] = f; + } + } + } } if (hparams.need_kq_pos) { @@ -8023,13 +8097,16 @@ static void llama_set_inputs(llama_context & lctx, const llama_batch & batch) { const int64_t n_tokens = batch.n_tokens; GGML_ASSERT(ggml_backend_buffer_is_host(lctx.inp_mean->buffer)); - float * data = (float *) lctx.inp_mean->data; + float * data = (float *) lctx.inp_mean->data; memset(lctx.inp_mean->data, 0, n_tokens * n_tokens * ggml_element_size(lctx.inp_mean)); std::vector sum(n_tokens, 0); for (int i = 0; i < n_tokens; ++i) { const llama_seq_id seq_id = batch.seq_id[i][0]; + + GGML_ASSERT(seq_id < n_tokens && "seq_id cannot be larger than n_tokens with pooling_type == MEAN"); + sum[seq_id] += 1; } @@ -8051,11 +8128,16 @@ static void llama_set_inputs(llama_context & lctx, const llama_batch & batch) { const int64_t n_tokens = batch.n_tokens; GGML_ASSERT(ggml_backend_buffer_is_host(lctx.inp_cls->buffer)); + uint32_t * data = (uint32_t *) lctx.inp_cls->data; + memset(lctx.inp_cls->data, 0, n_tokens * ggml_element_size(lctx.inp_cls)); for (int i = 0; i < n_tokens; ++i) { const llama_seq_id seq_id = batch.seq_id[i][0]; - const llama_pos pos = batch.pos[i]; + const llama_pos pos = batch.pos[i]; + + GGML_ASSERT(seq_id < n_tokens && "seq_id cannot be larger than n_tokens with pooling_type == CLS"); + if (pos == 0) { data[seq_id] = i; } @@ -8169,24 +8251,27 @@ static int llama_decode_internal( batch.seq_id = seq_id_arr.data(); } - llama_kv_cache_update(&lctx); + // non-causal masks do not use the KV cache + if (hparams.causal_attn) { + llama_kv_cache_update(&lctx); - // if we have enough unused cells before the current head -> - // better to start searching from the beginning of the cache, hoping to fill it - if (kv_self.head > kv_self.used + 2*n_tokens) { - kv_self.head = 0; + // if we have enough unused cells before the current head -> + // better to start searching from the beginning of the cache, hoping to fill it + if (kv_self.head > kv_self.used + 2*n_tokens) { + kv_self.head = 0; + } + + if (!llama_kv_cache_find_slot(kv_self, batch)) { + return 1; + } + + // a heuristic, to avoid attending the full cache if it is not yet utilized + // after enough generations, the benefit from this heuristic disappears + // if we start defragmenting the cache, the benefit from this will be more important + kv_self.n = std::min(cparams.n_ctx, std::max(32u, GGML_PAD(llama_kv_cache_cell_max(kv_self), 32))); + //kv_self.n = llama_kv_cache_cell_max(kv_self); } - if (!llama_kv_cache_find_slot(kv_self, batch)) { - return 1; - } - - // a heuristic, to avoid attending the full cache if it is not yet utilized - // after enough generations, the benefit from this heuristic disappears - // if we start defragmenting the cache, the benefit from this will be more important - kv_self.n = std::min(cparams.n_ctx, std::max(32u, GGML_PAD(llama_kv_cache_cell_max(kv_self), 32))); - //kv_self.n = llama_kv_cache_cell_max(kv_self); - //printf("kv_self.n = %5d, kv_self.used = %5d, kv_self.head = %5d\n", kv_self.n, kv_self.used, kv_self.head); ggml_backend_sched_reset(lctx.sched); @@ -8195,20 +8280,26 @@ static int llama_decode_internal( ggml_cgraph * gf = llama_build_graph(lctx, batch, false); // the output is always the last tensor in the graph - struct ggml_tensor * res = gf->nodes[gf->n_nodes - 1]; - struct ggml_tensor * embeddings = gf->nodes[gf->n_nodes - 2]; + struct ggml_tensor * res = gf->nodes[gf->n_nodes - 1]; + struct ggml_tensor * embd = gf->nodes[gf->n_nodes - 2]; - if (strcmp(res->name, "result_output") == 0) { - // the embeddings could be the second to last tensor, or the third to last tensor - if (strcmp(embeddings->name, "result_norm") != 0) { - embeddings = gf->nodes[gf->n_nodes - 3]; - GGML_ASSERT(strcmp(embeddings->name, "result_norm") == 0); - } - } else if (strcmp(res->name, "result_embd") == 0) { - embeddings = res; - res = nullptr; + if (!hparams.causal_attn) { + res = nullptr; // do not extract logits for embedding models such as BERT + + // token or sequence embeddings + embd = gf->nodes[gf->n_nodes - 1]; + + GGML_ASSERT(strcmp(embd->name, "result_embd") == 0 || strcmp(embd->name, "result_embd_pooled") == 0); } else { - GGML_ASSERT(false); + if (strcmp(res->name, "result_output") == 0) { + // the token embeddings could be the second to last tensor, or the third to last tensor + if (strcmp(embd->name, "result_norm") != 0) { + embd = gf->nodes[gf->n_nodes - 3]; + GGML_ASSERT(strcmp(embd->name, "result_norm") == 0); + } + } else { + GGML_ASSERT(false && "missing result_output tensor"); + } } // LLAMA_LOG_INFO("graph build time: %.3f ms (%d nodes, %d leafs)\n", (ggml_time_us() - t_start_us)/1000.0, gf->n_nodes, gf->n_leafs); @@ -8275,46 +8366,82 @@ static int llama_decode_internal( logits_out.clear(); #endif - ggml_backend_t res_backend = ggml_backend_sched_get_node_backend(lctx.sched, res); - GGML_ASSERT(res_backend != nullptr); + ggml_backend_t backend_res = ggml_backend_sched_get_node_backend(lctx.sched, res); + GGML_ASSERT(backend_res != nullptr); + if (batch.logits) { logits_out.resize(n_vocab * n_tokens); for (uint32_t i = 0; i < n_tokens; i++) { if (batch.logits[i] == 0) { continue; } - ggml_backend_tensor_get_async(res_backend, res, logits_out.data() + (n_vocab*i), (n_vocab*i)*sizeof(float), n_vocab*sizeof(float)); + ggml_backend_tensor_get_async(backend_res, res, logits_out.data() + (n_vocab*i), (n_vocab*i)*sizeof(float), n_vocab*sizeof(float)); #ifndef NDEBUG logits_valid[i] = true; #endif } } else if (lctx.logits_all) { logits_out.resize(n_vocab * n_tokens); - ggml_backend_tensor_get_async(res_backend, res, logits_out.data(), 0, n_vocab*n_tokens*sizeof(float)); + ggml_backend_tensor_get_async(backend_res, res, logits_out.data(), 0, n_vocab*n_tokens*sizeof(float)); #ifndef NDEBUG std::fill(logits_valid.begin(), logits_valid.end(), true); #endif } else { logits_out.resize(n_vocab); - ggml_backend_tensor_get_async(res_backend, res, logits_out.data(), (n_vocab*(n_tokens - 1))*sizeof(float), n_vocab*sizeof(float)); + ggml_backend_tensor_get_async(backend_res, res, logits_out.data(), (n_vocab*(n_tokens - 1))*sizeof(float), n_vocab*sizeof(float)); #ifndef NDEBUG logits_valid[0] = true; #endif } - ggml_backend_synchronize(res_backend); + ggml_backend_synchronize(backend_res); } // extract embeddings - if (!lctx.embedding.empty()) { - auto & embedding_out = lctx.embedding; + if (cparams.embeddings && embd) { + ggml_backend_t backend_embd = ggml_backend_sched_get_node_backend(lctx.sched, embd); + GGML_ASSERT(backend_embd != nullptr); - const int64_t embd_pos = res ? n_embd * (n_tokens-1) : 0; - const int64_t embd_size = res ? n_embd : n_embd * n_tokens; + switch (cparams.pooling_type) { + case LLAMA_POOLING_TYPE_NONE: + { + // extract token embeddings + auto & embd_out = lctx.embd; - embedding_out.resize(embd_size); - ggml_backend_t embeddings_backend = ggml_backend_sched_get_node_backend(lctx.sched, embeddings); - ggml_backend_tensor_get_async(embeddings_backend, embeddings, embedding_out.data(), embd_pos*sizeof(float), embd_size*sizeof(float)); - ggml_backend_synchronize(embeddings_backend); + if (batch.logits) { + embd_out.resize(n_embd * n_tokens); + for (uint32_t i = 0; i < n_tokens; i++) { + if (batch.logits[i] == 0) { + continue; + } + + ggml_backend_tensor_get_async(backend_embd, embd, embd_out.data() + (n_embd*i), (n_embd*i)*sizeof(float), n_embd*sizeof(float)); + } + } + } break; + case LLAMA_POOLING_TYPE_CLS: + case LLAMA_POOLING_TYPE_MEAN: + { + GGML_ASSERT(strcmp(embd->name, "result_embd_pooled") == 0); + + // extract sequence embeddings + auto & embd_seq_out = lctx.embd_seq; + embd_seq_out.clear(); + + for (uint32_t i = 0; i < n_tokens; i++) { + const llama_seq_id seq_id = batch.seq_id[i][0]; + if (embd_seq_out.find(seq_id) != embd_seq_out.end()) { + continue; + } + embd_seq_out[seq_id].resize(n_embd); + ggml_backend_tensor_get_async(backend_embd, embd, embd_seq_out[seq_id].data(), (n_embd*seq_id)*sizeof(float), n_embd*sizeof(float)); + } + } break; + case LLAMA_POOLING_TYPE_UNSPECIFIED: + { + GGML_ASSERT(false && "unknown pooling type"); + } break; + } + ggml_backend_synchronize(backend_embd); } // measure the performance only for the single-token evals @@ -8608,19 +8735,19 @@ static uint8_t llama_token_to_byte(const llama_vocab& vocab, llama_token id) { GGML_ASSERT(llama_is_byte_token(vocab, id)); const auto& token_data = vocab.id_to_token.at(id); switch (llama_vocab_get_type(vocab)) { - case LLAMA_VOCAB_TYPE_SPM: { - auto buf = token_data.text.substr(3, 2); - return strtol(buf.c_str(), NULL, 16); - } - case LLAMA_VOCAB_TYPE_BPE: { - GGML_ASSERT(false); - return unicode_to_bytes_bpe(token_data.text); - } - case LLAMA_VOCAB_TYPE_WPM: { - GGML_ASSERT(false); - } - default: - GGML_ASSERT(false); + case LLAMA_VOCAB_TYPE_SPM: { + auto buf = token_data.text.substr(3, 2); + return strtol(buf.c_str(), NULL, 16); + } + case LLAMA_VOCAB_TYPE_BPE: { + GGML_ASSERT(false); + return unicode_to_bytes_bpe(token_data.text); + } + case LLAMA_VOCAB_TYPE_WPM: { + GGML_ASSERT(false); + } + default: + GGML_ASSERT(false); } } @@ -11864,7 +11991,7 @@ struct llama_context_params llama_context_default_params() { /*.type_k =*/ GGML_TYPE_F16, /*.type_v =*/ GGML_TYPE_F16, /*.logits_all =*/ false, - /*.embedding =*/ false, + /*.embeddings =*/ false, /*.offload_kqv =*/ true, /*.abort_callback =*/ nullptr, /*.abort_callback_data =*/ nullptr, @@ -12015,6 +12142,7 @@ struct llama_context * llama_new_context_with_model( cparams.yarn_beta_fast = params.yarn_beta_fast; cparams.yarn_beta_slow = params.yarn_beta_slow; cparams.defrag_thold = params.defrag_thold; + cparams.embeddings = params.embeddings; cparams.offload_kqv = params.offload_kqv; cparams.pooling_type = params.pooling_type; @@ -12192,8 +12320,8 @@ struct llama_context * llama_new_context_with_model( // resized during inference, reserve maximum ctx->logits.reserve(hparams.n_vocab*cparams.n_batch); - if (params.embedding) { - ctx->embedding.resize(hparams.n_embd); + if (params.embeddings) { + ctx->embd.reserve(hparams.n_embd*cparams.n_batch); } // graph inputs @@ -12628,7 +12756,7 @@ size_t llama_get_state_size(const struct llama_context * ctx) { // assume worst case for logits although only currently set ones are serialized const size_t s_logits = ctx->logits.capacity() * sizeof(float); const size_t s_embedding_size = sizeof(size_t); - const size_t s_embedding = ctx->embedding.size() * sizeof(float); + const size_t s_embedding = ctx->embd.capacity() * sizeof(float); const size_t s_kv_buf_size = sizeof(size_t); const size_t s_kv_head = sizeof(uint32_t); const size_t s_kv_size = sizeof(uint32_t); @@ -12737,12 +12865,12 @@ static void llama_copy_state_data_internal(struct llama_context * ctx, llama_dat // copy embeddings { - const size_t embedding_size = ctx->embedding.size(); + const size_t embeddings_size = ctx->embd.size(); - data_ctx->write(&embedding_size, sizeof(embedding_size)); + data_ctx->write(&embeddings_size, sizeof(embeddings_size)); - if (embedding_size) { - data_ctx->write(ctx->embedding.data(), embedding_size * sizeof(float)); + if (embeddings_size) { + data_ctx->write(ctx->embd.data(), embeddings_size * sizeof(float)); } } @@ -12846,15 +12974,17 @@ size_t llama_set_state_data(struct llama_context * ctx, const uint8_t * src) { // set embeddings { - size_t embedding_size; + size_t embeddings_size; - memcpy(&embedding_size, inp, sizeof(embedding_size)); inp += sizeof(embedding_size); + memcpy(&embeddings_size, inp, sizeof(embeddings_size)); inp += sizeof(embeddings_size); - GGML_ASSERT(ctx->embedding.capacity() == embedding_size); + GGML_ASSERT(ctx->embd.capacity() == embeddings_size); - if (embedding_size) { - memcpy(ctx->embedding.data(), inp, embedding_size * sizeof(float)); - inp += embedding_size * sizeof(float); + if (embeddings_size) { + ctx->embd.resize(embeddings_size); + + memcpy(ctx->embd.data(), inp, embeddings_size * sizeof(float)); + inp += embeddings_size * sizeof(float); } } @@ -13104,11 +13234,20 @@ float * llama_get_logits_ith(struct llama_context * ctx, int32_t i) { } float * llama_get_embeddings(struct llama_context * ctx) { - return ctx->embedding.data(); + return ctx->embd.data(); } float * llama_get_embeddings_ith(struct llama_context * ctx, int32_t i) { - return ctx->embedding.data() + i*ctx->model.hparams.n_embd; + return ctx->embd.data() + i*ctx->model.hparams.n_embd; +} + +float * llama_get_embeddings_seq(struct llama_context * ctx, llama_seq_id seq_id) { + auto it = ctx->embd_seq.find(seq_id); + if (it == ctx->embd_seq.end()) { + return nullptr; + } + + return it->second.data(); } const char * llama_token_get_text(const struct llama_model * model, llama_token token) { diff --git a/llama.h b/llama.h index 70da4cb3f..3dc162b07 100644 --- a/llama.h +++ b/llama.h @@ -163,7 +163,7 @@ extern "C" { // - embd : token embeddings (i.e. float vector of size n_embd) (used when token is NULL) // - pos : the positions of the respective token in the sequence // - seq_id : the sequence to which the respective token belongs - // - logits : if zero, the logits for the respective token will not be output + // - logits : if zero, the logits (and/or the embeddings) for the respective token will not be output // typedef struct llama_batch { int32_t n_tokens; @@ -173,7 +173,7 @@ extern "C" { llama_pos * pos; int32_t * n_seq_id; llama_seq_id ** seq_id; - int8_t * logits; + int8_t * logits; // TODO: rename this to "output" // NOTE: helpers for smooth API transition - can be deprecated in the future // for future-proof code, use the above fields instead and ignore everything below @@ -260,7 +260,7 @@ extern "C" { // Keep the booleans together to avoid misalignment during copy-by-value. bool logits_all; // the llama_decode() call computes all logits, not just the last one (DEPRECATED - set llama_batch.logits instead) - bool embedding; // embedding mode only + bool embeddings; // if true, extract embeddings (together with logits) bool offload_kqv; // whether to offload the KQV ops (including the KV cache) to GPU // Abort callback @@ -655,14 +655,20 @@ extern "C" { // llama_get_logits(ctx) + i*n_vocab LLAMA_API float * llama_get_logits_ith(struct llama_context * ctx, int32_t i); - // Get the embeddings for the input - // shape: [n_embd] (1-dimensional) + // Get all output token embeddings + // shape: [n_tokens*n_embd] (1-dimensional) LLAMA_API float * llama_get_embeddings(struct llama_context * ctx); - // Get the embeddings for the ith sequence + // Get the embeddings for the ith token // llama_get_embeddings(ctx) + i*n_embd + // shape: [n_embd] (1-dimensional) LLAMA_API float * llama_get_embeddings_ith(struct llama_context * ctx, int32_t i); + // Get the embeddings for a sequence id + // Returns NULL if pooling_type is LLAMA_POOLING_TYPE_NONE + // shape: [n_embd] (1-dimensional) + LLAMA_API float * llama_get_embeddings_seq(struct llama_context * ctx, llama_seq_id seq_id); + // // Vocab // From 1d41d6f7c2a666eb9c18a686a4684c4b03289bf3 Mon Sep 17 00:00:00 2001 From: hutli <6594598+hutli@users.noreply.github.com> Date: Tue, 5 Mar 2024 02:33:08 +0100 Subject: [PATCH 787/811] nix: static build (#5814) --- .devops/nix/package.nix | 17 ++++++++++++----- 1 file changed, 12 insertions(+), 5 deletions(-) diff --git a/.devops/nix/package.nix b/.devops/nix/package.nix index 815db6a2d..01c99185b 100644 --- a/.devops/nix/package.nix +++ b/.devops/nix/package.nix @@ -1,5 +1,6 @@ { lib, + glibc, config, stdenv, mkShell, @@ -30,6 +31,11 @@ useRocm ? config.rocmSupport, useVulkan ? false, llamaVersion ? "0.0.0", # Arbitrary version, substituted by the flake + + # It's necessary to consistently use backendStdenv when building with CUDA support, + # otherwise we get libstdc++ errors downstream. + effectiveStdenv ? if useCuda then cudaPackages.backendStdenv else stdenv, + enableStatic ? effectiveStdenv.hostPlatform.isStatic }@inputs: let @@ -40,11 +46,8 @@ let strings versionOlder ; - - # It's necessary to consistently use backendStdenv when building with CUDA support, - # otherwise we get libstdc++ errors downstream. + stdenv = throw "Use effectiveStdenv instead"; - effectiveStdenv = if useCuda then cudaPackages.backendStdenv else inputs.stdenv; suffices = lib.optionals useBlas [ "BLAS" ] @@ -167,6 +170,9 @@ effectiveStdenv.mkDerivation ( # TODO: Replace with autoAddDriverRunpath # once https://github.com/NixOS/nixpkgs/pull/275241 has been merged cudaPackages.autoAddOpenGLRunpathHook + ] + ++ optionals (effectiveStdenv.hostPlatform.isGnu && enableStatic) [ + glibc.static ]; buildInputs = @@ -181,7 +187,7 @@ effectiveStdenv.mkDerivation ( [ (cmakeBool "LLAMA_NATIVE" false) (cmakeBool "LLAMA_BUILD_SERVER" true) - (cmakeBool "BUILD_SHARED_LIBS" true) + (cmakeBool "BUILD_SHARED_LIBS" (!enableStatic)) (cmakeBool "CMAKE_SKIP_BUILD_RPATH" true) (cmakeBool "LLAMA_BLAS" useBlas) (cmakeBool "LLAMA_CLBLAST" useOpenCL) @@ -190,6 +196,7 @@ effectiveStdenv.mkDerivation ( (cmakeBool "LLAMA_METAL" useMetalKit) (cmakeBool "LLAMA_MPI" useMpi) (cmakeBool "LLAMA_VULKAN" useVulkan) + (cmakeBool "LLAMA_STATIC" enableStatic) ] ++ optionals useCuda [ ( From 29eee404746e4696143a4f3a642660a4793a15d8 Mon Sep 17 00:00:00 2001 From: Jeffrey Quesnelle Date: Mon, 4 Mar 2024 19:23:06 -0800 Subject: [PATCH 788/811] fix speculative decoding build on windows (#5874) --- examples/speculative/speculative.cpp | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/examples/speculative/speculative.cpp b/examples/speculative/speculative.cpp index 85bc0a762..e991b8846 100644 --- a/examples/speculative/speculative.cpp +++ b/examples/speculative/speculative.cpp @@ -226,7 +226,7 @@ int main(int argc, char ** argv) { while (active_seqs.size() > 0) { // randomly select a sequence to verify from active sequences - std::uniform_int_distribution u_int_dist(0, active_seqs.size() - 1); + std::uniform_int_distribution u_int_dist(0, active_seqs.size() - 1); int s = *std::next(active_seqs.begin(), u_int_dist(rng)); if (i_dft >= (int) drafts[s].tokens.size()) { drafts[s].active = false; From 6a87ac3a52668e117d97bcea07b529c93188b303 Mon Sep 17 00:00:00 2001 From: Minsoo Cheong <54794500+mscheong01@users.noreply.github.com> Date: Tue, 5 Mar 2024 15:12:23 +0900 Subject: [PATCH 789/811] fix editorconfig check break (#5879) --- .devops/nix/package.nix | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.devops/nix/package.nix b/.devops/nix/package.nix index 01c99185b..c7fa2203a 100644 --- a/.devops/nix/package.nix +++ b/.devops/nix/package.nix @@ -46,7 +46,7 @@ let strings versionOlder ; - + stdenv = throw "Use effectiveStdenv instead"; suffices = From 21b08674331e1ea1b599f17c5ca91f0ed173be31 Mon Sep 17 00:00:00 2001 From: Neo Zhang Jianyu Date: Tue, 5 Mar 2024 16:08:35 +0800 Subject: [PATCH 790/811] [SYCL] fix mul_mat fault in CI/unit-test (#5862) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * fix mul_mat fault in cpy_f32_f16 * rm unused function * add wait() for memcpy * restore ci/run.sh, rename struct defination, fix bug in ggml_sycl_op_mul_mat_sycl * fix format issue * llama : fix segfault from unknown model arch name (#5820) * llama : fix segfault from unknown model arch name * llama : make all LLM maps const This also requires using `std::map::at` instead of its `operator[]` which does not exist for const maps. * llama : name LLM_ARCH_UNKNOWN to "(unknown)" This avoids errors from `std::map::at` when getting the general name of the model architecture. Using "(unknown)" instead of an empty string as per suggestion https://github.com/ggerganov/llama.cpp/pull/5820#issuecomment-1973735284 * llama : remove redundant inner const for LLM_TENSOR_NAMES The extra const won't do anything here as const maps return const references to values. Co-authored-by: Jared Van Bortel * llama : remove redundant nullptr check in llm_arch_from_string Since LLM_ARCH_NAMES is a const map, no spurious elements with a NULL name are inserted anymore, so this check is dead code. --------- Co-authored-by: Jared Van Bortel * llama : refactor internal quantization functions (#5830) * scripts : add pod-llama.sh * ggml : IQ3_S improvements (#5829) * iq3_s: somewhat faster AVX2 dot product On Ryzen a 7950X TG-128 increases to 16 t/s from 15.5 t/s using 16 threads. For 8 threads it is 13.85 t/s vs 11.75 t/s. PP-512 increases to 28.5 t/s from 23.8 t/s. * iq3_s: somewhat faster ARM_NEON dot product Still dog slow - 10.7 t/s up from 9.9 t/s. * iq3_s: another small ARM_NEON improvement 10.7 -> 11.0 t/s. Using vmulq_s8 is faster than the xor - sub trick that works best on AVX2. * iq3_s: minor improvement on Metal 49.4 t/s -> 50.3 t/s * iq3_s: PPL improvement E.g., for a context of 4096 LLaMA-v2-7B goes to 5.1340 from 5.1653. * iq3_s: use new grid everywhere * Fix ARM_NEON --------- Co-authored-by: Iwan Kawrakow * convert-hf : make model class definitions self-contained (#5825) * convert : automatically fall back to HfVocab if tokenizer.model doesn't exist (#5821) * ggml : fix IQ3_S AVX implementation (#5834) ggml-ci * llama : add abort_callback to interrupt computation (#5409) * using abort_callback from ggml to stop llama computation * format fix * a brief explaining comment --------- Co-authored-by: Georgi Gerganov * server: tests: passkey challenge / self-extend with context shift demo (#5832) * server: tests: add models endpoint scenario * server: /v1/models add some metadata * server: tests: add debug field in context before scenario * server: tests: download model from HF, add batch size * server: tests: add passkey test * server: tests: add group attention params * server: do not truncate prompt tokens if self-extend through group attention is enabled * server: logs: do not truncate log values * server: tests - passkey - first good working value of nga * server: tests: fix server timeout * server: tests: fix passkey, add doc, fix regex content matching, fix timeout * server: tests: fix regex content matching * server: tests: schedule slow tests on master * server: metrics: fix when no prompt processed * server: tests: self-extend add llama-2-7B and Mixtral-8x7B-v0.1 * server: tests: increase timeout for completion * server: tests: keep only the PHI-2 test * server: tests: passkey add a negative test * flake.lock: Update (#5842) Flake lock file updates: • Updated input 'flake-parts': 'github:hercules-ci/flake-parts/b253292d9c0a5ead9bc98c4e9a26c6312e27d69f' (2024-02-01) → 'github:hercules-ci/flake-parts/f7b3c975cf067e56e7cda6cb098ebe3fb4d74ca2' (2024-03-01) • Updated input 'flake-parts/nixpkgs-lib': 'github:NixOS/nixpkgs/97b17f32362e475016f942bbdfda4a4a72a8a652?dir=lib' (2024-01-29) → 'github:NixOS/nixpkgs/1536926ef5621b09bba54035ae2bb6d806d72ac8?dir=lib' (2024-02-29) • Updated input 'nixpkgs': 'github:NixOS/nixpkgs/cbc4211f0afffe6dfd2478a62615dd5175a13f9a' (2024-02-23) → 'github:NixOS/nixpkgs/1536926ef5621b09bba54035ae2bb6d806d72ac8' (2024-02-29) Co-authored-by: github-actions[bot] * server : init http requests thread pool with --parallel if set (#5836) * ci : schedule slow server tests only on Release or on demand (#5839) * llama : fix llama_copy_state_data with fragmented KV cache (#5840) The row size of the saved states was based on kv_self.head while it should be based on llama_kv_cache_cell_max. Existing session files should still work. * llama : fix llama_kv_cache_cell_max inability to return 1 I've also changed its return type to uint32_t, because this function is always used to set the value of uint32_t variables, and because the index already has this type. * llama : fix state size calculation Some bytes in the state were unaccounted for in llama_get_state_size. Since the logits reserve so much space, it did not cause problems. * gguf-dump : support i-quants (#5841) Co-authored-by: Black_Fox * llama : allow for user specified embedding pooling type (#5849) * allow for user specified pooling type * llama : use enum types over int --------- Co-authored-by: Georgi Gerganov * readme : add API changes section * cuda : fix data race in soft max (#5853) * main : support special tokens as reverse/anti prompt (#5847) * Support special tokens as reverse/anti prompt. * Tokenize antiprompts only once. * main : minor --------- Co-authored-by: Georgi Gerganov * common : use LLAMA_DEFAULT_SEED (#5855) * add some new ops, fix some operators and add batch operations to certain operators. (ggml/747) * cuda: fix group_norm * cuda: add batch inference support for ggml_pad/ggml_upscale * add ggml_arrange * add ggml_timestep_embedding * update ggml_arange/ggml_timestep_embedding tests * cuda: fix im2col * add ggml_arange/ggml_timestep_embbeding support for metal backend * fix some bugs * fix some bugs * Update ggml.h Co-authored-by: Georgi Gerganov * Update ggml-cuda.cu Co-authored-by: Georgi Gerganov * Update ggml-metal.m Co-authored-by: Georgi Gerganov * Update ggml-metal.m Co-authored-by: Georgi Gerganov * Update ggml-metal.metal Co-authored-by: Georgi Gerganov * modify according to the review comments * ggml : fix compile warnings + code style * ggml : normalize compute_forward calls + fix seg fault in debug * minor --------- Co-authored-by: Georgi Gerganov Co-authored-by: slaren * sync : ggml * add alias for chat template (#5858) * speculative : implement stochastic speculative sampling (#5625) * (WIP) Implement stochastic speculative decoding * sample from residual distribution on draft accept failure * fix #5657: force greedy sampling with probs when temp is 0 * remove p_accept parameter * fix style * remove unused variables * add srand() in speculative.cpp * replace use of rand() with mt19937 sampling * fixes based on review (@JohannesGaessler) * fix r random generation * randomly select next sequence to verify + fix bug in memory freeing * fix bug in active_seqs sync * fix uniform int distribution initialization * remove warnings from comparison between int and size_t * check grammar in `llama_sample_probability_distribution_impl` * remove malloc code by utilizing vectors * add PR link to README * cmake : handle cases where git index is not found in .git (#5844) * Update CMakeLists.txt * Update CMakeLists.txt * ggml : introduce ggml_status (ggml/750) * using enum as an exit code instead of macros * update return type from enum to unsigned int * indentation fix * compound update ggml_compute_exit_code -> ggml_status changed ggml_status from a bit-field type to simple codes ggml_status to string cast * ggml_status to string cast * GGML_CALL was removed Co-authored-by: slaren --------- Co-authored-by: slaren Co-authored-by: Georgi Gerganov * sync : ggml ggml-ci * ggml : fix unknown status (#0) * flake : fix * llama : fix embeddings (#5796) * llama : fix embeddings ggml-ci * llama : do not use KV cache for non-causal models ggml-ci * embeddings : fix llama_batch_init arg * llama : add pooling switch * llama : distinguish token vs sequence embeddings ggml-ci * llama : assert pooling tensor * llama : simplify causal mask condition ggml-ci * llama : assert input batch with pooling enabled * readme : update API changes list * nix: static build (#5814) * fix speculative decoding build on windows (#5874) * rebase and rm tailing space --------- Co-authored-by: LiangtaoJin Co-authored-by: compilade <113953597+compilade@users.noreply.github.com> Co-authored-by: Jared Van Bortel Co-authored-by: Xuan Son Nguyen Co-authored-by: Georgi Gerganov Co-authored-by: Kawrakow <48489457+ikawrakow@users.noreply.github.com> Co-authored-by: Iwan Kawrakow Co-authored-by: Jared Van Bortel Co-authored-by: Michael Podvitskiy Co-authored-by: Pierrick Hymbert Co-authored-by: github-actions[bot] Co-authored-by: Nindaleth Co-authored-by: Black_Fox Co-authored-by: Douglas Hanley Co-authored-by: slaren Co-authored-by: DAN™ Co-authored-by: leejet Co-authored-by: Minsoo Cheong <54794500+mscheong01@users.noreply.github.com> Co-authored-by: Dane Madsen Co-authored-by: hutli <6594598+hutli@users.noreply.github.com> Co-authored-by: Jeffrey Quesnelle --- ggml-sycl.cpp | 1966 +++++++++++++++++++++++++++++++++++++++++++++---- 1 file changed, 1810 insertions(+), 156 deletions(-) diff --git a/ggml-sycl.cpp b/ggml-sycl.cpp index 47a605b01..477f5cb02 100644 --- a/ggml-sycl.cpp +++ b/ggml-sycl.cpp @@ -2894,6 +2894,254 @@ namespace dpct using err0 = detail::generic_error_type; using err1 = detail::generic_error_type; + static inline void dpct_free(void *ptr, sycl::queue &q = get_default_queue()) { + detail::dpct_free(ptr, q); + } + + /// dpct accessor used as device function parameter. + template class accessor; + template class accessor { + public: + using memory_t = detail::memory_traits; + using element_t = typename memory_t::element_t; + using pointer_t = typename memory_t::pointer_t; + using accessor_t = typename memory_t::template accessor_t<3>; + accessor(pointer_t data, const sycl::range<3> &in_range) + : _data(data), _range(in_range) {} + template + accessor(typename std::enable_if::type &acc) + : accessor(acc, acc.get_range()) {} + accessor(const accessor_t &acc, const sycl::range<3> &in_range) + : accessor(acc.get_pointer(), in_range) {} + accessor operator[](size_t index) const { + sycl::range<2> sub(_range.get(1), _range.get(2)); + return accessor(_data + index * sub.size(), sub); + } + + pointer_t get_ptr() const { return _data; } + + private: + pointer_t _data; + sycl::range<3> _range; + }; + template class accessor { + public: + using memory_t = detail::memory_traits; + using element_t = typename memory_t::element_t; + using pointer_t = typename memory_t::pointer_t; + using accessor_t = typename memory_t::template accessor_t<2>; + accessor(pointer_t data, const sycl::range<2> &in_range) + : _data(data), _range(in_range) {} + template + accessor(typename std::enable_if::type &acc) + : accessor(acc, acc.get_range()) {} + accessor(const accessor_t &acc, const sycl::range<2> &in_range) + : accessor(acc.get_pointer(), in_range) {} + + pointer_t operator[](size_t index) const { + return _data + _range.get(1) * index; + } + + pointer_t get_ptr() const { return _data; } + + private: + pointer_t _data; + sycl::range<2> _range; + }; + + namespace detail { + /// Device variable with address space of shared, global or constant. + template class device_memory { + public: + using accessor_t = + typename detail::memory_traits::template accessor_t; + using value_t = typename detail::memory_traits::value_t; + using dpct_accessor_t = dpct::accessor; + + device_memory() : device_memory(sycl::range(1)) {} + + /// Constructor of 1-D array with initializer list + device_memory(const sycl::range &in_range, + std::initializer_list &&init_list) + : device_memory(in_range) { + assert(init_list.size() <= in_range.size()); + _host_ptr = (value_t *)std::malloc(_size); + std::memset(_host_ptr, 0, _size); + std::memcpy(_host_ptr, init_list.begin(), init_list.size() * sizeof(T)); + } + + /// Constructor of 2-D array with initializer list + template + device_memory( + const typename std::enable_if>::type &in_range, + std::initializer_list> &&init_list) + : device_memory(in_range) { + assert(init_list.size() <= in_range[0]); + _host_ptr = (value_t *)std::malloc(_size); + std::memset(_host_ptr, 0, _size); + auto tmp_data = _host_ptr; + for (auto sub_list : init_list) { + assert(sub_list.size() <= in_range[1]); + std::memcpy(tmp_data, sub_list.begin(), + sub_list.size() * sizeof(T)); + tmp_data += in_range[1]; + } + } + + /// Constructor with range + device_memory(const sycl::range &range_in) + : _size(range_in.size() * sizeof(T)), _range(range_in), + _reference(false), _host_ptr(nullptr), _device_ptr(nullptr) { + static_assert( + (Memory == global) || (Memory == constant) || (Memory == shared), + "device memory region should be global, constant or shared"); + // Make sure that singleton class mem_mgr and dev_mgr will destruct + // later than this. + detail::mem_mgr::instance(); + dev_mgr::instance(); + } + + /// Constructor with range + template + device_memory(Args... Arguments) + : device_memory(sycl::range(Arguments...)) {} + + ~device_memory() { + if (_device_ptr && !_reference) + dpct::dpct_free(_device_ptr); + if (_host_ptr) + std::free(_host_ptr); + } + + /// Allocate memory with default queue, and init memory if has initial + /// value. + void init() { init(dpct::get_default_queue()); } + /// Allocate memory with specified queue, and init memory if has initial + /// value. + void init(sycl::queue &q) { + if (_device_ptr) + return; + if (!_size) + return; + allocate_device(q); + if (_host_ptr) + detail::dpct_memcpy(q, _device_ptr, _host_ptr, _size, + host_to_device); + } + + /// The variable is assigned to a device pointer. + void assign(value_t *src, size_t size) { + this->~device_memory(); + new (this) device_memory(src, size); + } + + /// Get memory pointer of the memory object, which is virtual pointer when + /// usm is not used, and device pointer when usm is used. + value_t *get_ptr() { return get_ptr(get_default_queue()); } + /// Get memory pointer of the memory object, which is virtual pointer when + /// usm is not used, and device pointer when usm is used. + value_t *get_ptr(sycl::queue &q) { + init(q); + return _device_ptr; + } + + /// Get the device memory object size in bytes. + size_t get_size() { return _size; } + + template + typename std::enable_if::type &operator[](size_t index) { + init(); + #ifdef DPCT_USM_LEVEL_NONE + return dpct::get_buffer::type>( + _device_ptr) + .template get_access()[index]; + #else + return _device_ptr[index]; + #endif // DPCT_USM_LEVEL_NONE + } + + #ifdef DPCT_USM_LEVEL_NONE + /// Get sycl::accessor for the device memory object when usm is not used. + accessor_t get_access(sycl::handler &cgh) { + return get_buffer(_device_ptr) + .template reinterpret(_range) + .template get_access::mode, + detail::memory_traits::target>(cgh); + } + #else + /// Get dpct::accessor with dimension info for the device memory object + /// when usm is used and dimension is greater than 1. + template + typename std::enable_if::type + get_access(sycl::handler &cgh) { + return dpct_accessor_t((T *)_device_ptr, _range); + } + #endif // DPCT_USM_LEVEL_NONE + + private: + device_memory(value_t *memory_ptr, size_t size) + : _size(size), _range(size / sizeof(T)), _reference(true), + _device_ptr(memory_ptr) {} + + void allocate_device(sycl::queue &q) { + #ifndef DPCT_USM_LEVEL_NONE + if (Memory == shared) { + _device_ptr = (value_t *)sycl::malloc_shared(_size, q.get_device(), + q.get_context()); + return; + } + #ifdef SYCL_EXT_ONEAPI_USM_DEVICE_READ_ONLY + if (Memory == constant) { + _device_ptr = (value_t *)sycl::malloc_device( + _size, q.get_device(), q.get_context(), + sycl::ext::oneapi::property::usm::device_read_only()); + return; + } + #endif + #endif + _device_ptr = (value_t *)detail::dpct_malloc(_size, q); + } + + size_t _size; + sycl::range _range; + bool _reference; + value_t *_host_ptr; + value_t *_device_ptr; + }; + template + class device_memory : public device_memory { + public: + using base = device_memory; + using value_t = typename base::value_t; + using accessor_t = + typename detail::memory_traits::template accessor_t<0>; + + /// Constructor with initial value. + device_memory(const value_t &val) : base(sycl::range<1>(1), {val}) {} + + /// Default constructor + device_memory() : base(1) {} + + #ifdef DPCT_USM_LEVEL_NONE + /// Get sycl::accessor for the device memory object when usm is not used. + accessor_t get_access(sycl::handler &cgh) { + auto buf = get_buffer(base::get_ptr()) + .template reinterpret(sycl::range<1>(1)); + return accessor_t(buf, cgh); + } + #endif // DPCT_USM_LEVEL_NONE + }; + } // namespace detail + + template + using global_memory = detail::device_memory; + template + using constant_memory = detail::device_memory; + template + using shared_memory = detail::device_memory; + + } // COPY from DPCT head files @@ -2938,6 +3186,15 @@ static int g_work_group_size = 0; #pragma warning(disable: 4244 4267) // possible loss of data #endif +// dmmv = dequantize_mul_mat_vec +#ifndef GGML_SYCL_DMMV_X +#define GGML_SYCL_DMMV_X 32 +#endif +#ifndef GGML_SYCL_MMV_Y +#define GGML_SYCL_MMV_Y 1 +#endif + + static_assert(sizeof(sycl::half) == sizeof(ggml_fp16_t), "wrong fp16 size"); static void crash(){ @@ -3060,7 +3317,7 @@ typedef void (*ggml_sycl_op_flatten_t)(const ggml_tensor *src0, #define QK4_0 32 #define QR4_0 2 #define QI4_0 (QK4_0 / (4 * QR4_0)) -typedef struct dpct_type_471834 { +typedef struct dpct_type_block_q4_0 { sycl::half d; // delta uint8_t qs[QK4_0 / 2]; // nibbles / quants } block_q4_0; @@ -3069,7 +3326,7 @@ static_assert(sizeof(block_q4_0) == sizeof(ggml_fp16_t) + QK4_0 / 2, "wrong q4_0 #define QK4_1 32 #define QR4_1 2 #define QI4_1 (QK4_1 / (4 * QR4_1)) -typedef struct dpct_type_143705 { +typedef struct dpct_type_block_q4_1 { sycl::half2 dm; // dm.x = delta, dm.y = min uint8_t qs[QK4_1 / 2]; // nibbles / quants } block_q4_1; @@ -3078,7 +3335,7 @@ static_assert(sizeof(block_q4_1) == sizeof(ggml_fp16_t) * 2 + QK4_1 / 2, "wrong #define QK5_0 32 #define QR5_0 2 #define QI5_0 (QK5_0 / (4 * QR5_0)) -typedef struct dpct_type_673649 { +typedef struct dpct_type_block_q5_0 { sycl::half d; // delta uint8_t qh[4]; // 5-th bit of quants uint8_t qs[QK5_0 / 2]; // nibbles / quants @@ -3088,7 +3345,7 @@ static_assert(sizeof(block_q5_0) == sizeof(ggml_fp16_t) + sizeof(uint32_t) + QK5 #define QK5_1 32 #define QR5_1 2 #define QI5_1 (QK5_1 / (4 * QR5_1)) -typedef struct dpct_type_135589 { +typedef struct dpct_type_block_q5_1 { sycl::half2 dm; // dm.x = delta, dm.y = min uint8_t qh[4]; // 5-th bit of quants uint8_t qs[QK5_1 / 2]; // nibbles / quants @@ -3098,7 +3355,7 @@ static_assert(sizeof(block_q5_1) == 2 * sizeof(ggml_fp16_t) + sizeof(uint32_t) + #define QK8_0 32 #define QR8_0 1 #define QI8_0 (QK8_0 / (4 * QR8_0)) -typedef struct dpct_type_122878 { +typedef struct dpct_type_block_q8_0 { sycl::half d; // delta int8_t qs[QK8_0]; // quants } block_q8_0; @@ -3107,7 +3364,7 @@ static_assert(sizeof(block_q8_0) == sizeof(ggml_fp16_t) + QK8_0, "wrong q8_0 blo #define QK8_1 32 #define QR8_1 1 #define QI8_1 (QK8_1 / (4 * QR8_1)) -typedef struct dpct_type_143721 { +typedef struct dpct_type_block_q8_1 { sycl::half2 ds; // ds.x = delta, ds.y = sum int8_t qs[QK8_0]; // quants } block_q8_1; @@ -3141,7 +3398,7 @@ typedef float (*vec_dot_q_mul_mat_sycl_t)( #define QR2_K 4 #define QI2_K (QK_K / (4*QR2_K)) -typedef struct dpct_type_619598 { +typedef struct dpct_type_block_q2_K { uint8_t scales[QK_K/16]; // scales and mins, quantized with 4 bits uint8_t qs[QK_K/4]; // quants sycl::half2 dm; // super-block scale for quantized scales/mins @@ -3150,7 +3407,7 @@ static_assert(sizeof(block_q2_K) == 2*sizeof(ggml_fp16_t) + QK_K/16 + QK_K/4, "w #define QR3_K 4 #define QI3_K (QK_K / (4*QR3_K)) -typedef struct dpct_type_138576 { +typedef struct dpct_type_block_q3_K { uint8_t hmask[QK_K/8]; // quants - high bit uint8_t qs[QK_K/4]; // quants - low 2 bits #ifdef GGML_QKK_64 @@ -3166,13 +3423,13 @@ typedef struct dpct_type_138576 { #define QI4_K (QK_K / (4*QR4_K)) #ifdef GGML_QKK_64 typedef struct { - half dm[2]; // super-block scales/mins + sycl::half dm[2]; // super-block scales/mins uint8_t scales[2]; // 4-bit block scales/mins uint8_t qs[QK_K/2]; // 4--bit quants } block_q4_K; -static_assert(sizeof(block_q4_K) == sizeof(half2) + QK_K/2 + 2, "wrong q4_K block size/padding"); +static_assert(sizeof(block_q4_K) == sizeof(sycl::half2) + QK_K/2 + 2, "wrong q4_K block size/padding"); #else -typedef struct dpct_type_154943 { +typedef struct dpct_type_block_q4_K { sycl::half2 dm; // super-block scale for quantized scales/mins uint8_t scales[3*QK_K/64]; // scales, quantized with 6 bits uint8_t qs[QK_K/2]; // 4--bit quants @@ -3184,14 +3441,14 @@ static_assert(sizeof(block_q4_K) == 2*sizeof(ggml_fp16_t) + 3*QK_K/64 + QK_K/2, #define QI5_K (QK_K / (4*QR5_K)) #ifdef GGML_QKK_64 typedef struct { - half d; // super-block scale + sycl::half d; // super-block scale int8_t scales[QK_K/16]; // block scales uint8_t qh[QK_K/8]; // quants, high bit uint8_t qs[QK_K/2]; // quants, low 4 bits } block_q5_K; static_assert(sizeof(block_q5_K) == sizeof(ggml_fp16_t) + QK_K/2 + QK_K/8 + QK_K/16, "wrong q5_K block size/padding"); #else -typedef struct dpct_type_866817 { +typedef struct dpct_type_block_q5_K { sycl::half2 dm; // super-block scale for quantized scales/mins uint8_t scales[K_SCALE_SIZE]; // scales and mins, quantized with 6 bits uint8_t qh[QK_K/8]; // quants, high bit @@ -3202,7 +3459,7 @@ static_assert(sizeof(block_q5_K) == 2*sizeof(ggml_fp16_t) + K_SCALE_SIZE + QK_K/ #define QR6_K 2 #define QI6_K (QK_K / (4*QR6_K)) -typedef struct dpct_type_107281 { +typedef struct dpct_type_block_q6_K { uint8_t ql[QK_K/2]; // quants, lower 4 bits uint8_t qh[QK_K/4]; // quants, upper 2 bits int8_t scales[QK_K/16]; // scales @@ -3210,6 +3467,31 @@ typedef struct dpct_type_107281 { } block_q6_K; static_assert(sizeof(block_q6_K) == sizeof(ggml_fp16_t) + 13*QK_K/16, "wrong q6_K block size/padding"); +#define QR2_XXS 8 +#define QI2_XXS (QK_K / (4*QR2_XXS)) +typedef struct dpct_type_block_iq2_xxs { + sycl::half d; + uint16_t qs[QK_K/8]; +} block_iq2_xxs; +static_assert(sizeof(block_iq2_xxs) == sizeof(ggml_fp16_t) + QK_K/8*sizeof(uint16_t), "wrong iq2_xxs block size/padding"); + +#define QR2_XS 8 +#define QI2_XS (QK_K / (4*QR2_XS)) +typedef struct dpct_type_block_iq2_xs { + sycl::half d; + uint16_t qs[QK_K/8]; + uint8_t scales[QK_K/32]; +} block_iq2_xs; +static_assert(sizeof(block_iq2_xs) == sizeof(ggml_fp16_t) + QK_K/8*sizeof(uint16_t) + QK_K/32, "wrong iq2_xs block size/padding"); + +#define QR3_XXS 8 +#define QI3_XXS (QK_K / (4*QR3_XXS)) +typedef struct dpct_type_block_iq3_xxs { + sycl::half d; + uint8_t qs[3*(QK_K/8)]; +} block_iq3_xxs; +static_assert(sizeof(block_iq3_xxs) == sizeof(ggml_fp16_t) + 3*(QK_K/8), "wrong iq3_xxs block size/padding"); + #define WARP_SIZE 32 #define MATRIX_ROW_PADDING 512 // last row of quant. matrices is a multiple of this to avoid out-of-bounds memory accesses @@ -3478,7 +3760,7 @@ void log_ggml_var_device(const char*name, float *src, size_t total_elements, boo local_buf = (float *) ggml_sycl_host_malloc(total_size); ggml_sycl_set_device(g_main_device); dpct::queue_ptr main_stream = g_syclStreams[g_main_device][0]; - main_stream->memcpy(local_buf, src, total_size); + main_stream->memcpy(local_buf, src, total_size).wait(); } else { local_buf = (float *)src; @@ -4129,6 +4411,66 @@ static __dpct_inline__ void dequantize_q8_0(const void *vx, const int ib, #endif // GGML_SYCL_F16 } +template +static void dequantize_block_q4_0(const void * __restrict__ vx, dst_t * __restrict__ yy, int nb32, + const sycl::nd_item<3> &item_ct1) { + + const int i = item_ct1.get_group(2); + + // assume 32 threads + const int tid = item_ct1.get_local_id(2); + const int il = tid/8; + const int ir = tid%8; + const int ib = 8*i + ir; + if (ib >= nb32) { + return; + } + + dst_t * y = yy + 256*i + 32*ir + 4*il; + + const block_q4_0 * x = (const block_q4_0 *)vx + ib; + const float d = sycl::vec(x->d) + .convert()[0]; + const float dm = -8*d; + + const uint8_t * q = x->qs + 4*il; + + for (int l = 0; l < 4; ++l) { + y[l+ 0] = d * (q[l] & 0xF) + dm; + y[l+16] = d * (q[l] >> 4) + dm; + } +} + +template +static void dequantize_block_q4_1(const void * __restrict__ vx, dst_t * __restrict__ yy, int nb32, + const sycl::nd_item<3> &item_ct1) { + + const int i = item_ct1.get_group(2); + + // assume 32 threads + const int tid = item_ct1.get_local_id(2); + const int il = tid/8; + const int ir = tid%8; + const int ib = 8*i + ir; + if (ib >= nb32) { + return; + } + + dst_t * y = yy + 256*i + 32*ir + 4*il; + + const block_q4_1 * x = (const block_q4_1 *)vx + ib; + const sycl::float2 d = + x->dm.convert(); + + const uint8_t * q = x->qs + 4*il; + + for (int l = 0; l < 4; ++l) { + y[l + 0] = d.x() * (q[l] & 0xF) + d.y(); + y[l + 16] = d.x() * (q[l] >> 4) + d.y(); + } +} + + //================================== k-quants template @@ -4158,8 +4500,9 @@ static void dequantize_block_q2_K(const void * __restrict__ vx, dst_t * __restri const int il = tid%16; // 0...15 const uint8_t q = x[i].qs[il] >> (2*is); dst_t * y = yy + i*QK_K + 16*is + il; - float dall = __low2half(x[i].dm); - float dmin = __high2half(x[i].dm); + + float dall = x[i].dm[0]; + float dmin = x[i].dm[1]; y[ 0] = dall * (x[i].scales[is+0] & 0xF) * ((q >> 0) & 3) - dmin * (x[i].scales[is+0] >> 4); y[32] = dall * (x[i].scales[is+2] & 0xF) * ((q >> 4) & 3) - dmin * (x[i].scales[is+2] >> 4); #endif @@ -4198,7 +4541,7 @@ static void dequantize_block_q3_K(const void * __restrict__ vx, dst_t * __restri for (int l = l0; l < l0+4; ++l) y[l] = dl * ((int8_t)((q[l] >> shift) & 3) - ((hm[l] & m) ? 0 : 4)); #else - const int tid = threadIdx.x; + const int tid = item_ct1.get_local_id(2); const int is = tid/16; // 0 or 1 const int il = tid%16; // 0...15 const int im = il/8; // 0...1 @@ -4264,7 +4607,7 @@ static void dequantize_block_q4_K(const void * __restrict__ vx, dst_t * __restri y[l +32] = d2 * (q[l] >> 4) - m2; } #else - const int tid = threadIdx.x; + const int tid = item_ct1.get_local_id(2); const uint8_t * q = x[i].qs; dst_t * y = yy + i*QK_K; const float d = (float)x[i].dm[0]; @@ -4309,7 +4652,7 @@ static void dequantize_block_q5_K(const void * __restrict__ vx, dst_t * __restri y[32] = d2 * ((ql[ 0] >> 4) + (qh[ 0] & hm ? 16 : 0)) - m2; y[33] = d2 * ((ql[ 1] >> 4) + (qh[ 1] & hm ? 16 : 0)) - m2; #else - const int tid = threadIdx.x; + const int tid = item_ct1.get_local_id(2); const uint8_t q = x[i].qs[tid]; const int im = tid/8; // 0...3 const int in = tid%8; // 0...7 @@ -4351,7 +4694,7 @@ static void dequantize_block_q6_K(const void * __restrict__ vx, dst_t * __restri #else // assume 32 threads - const int tid = threadIdx.x; + const int tid = item_ct1.get_local_id(2); const int ip = tid/16; // 0 or 1 const int il = tid - 16*ip; // 0...15 @@ -4368,6 +4711,474 @@ static void dequantize_block_q6_K(const void * __restrict__ vx, dst_t * __restri #endif } +static dpct::global_memory + iq2xxs_grid(sycl::range<1>(256), + { + 0x0808080808080808, 0x080808080808082b, 0x0808080808081919, + 0x0808080808082b08, 0x0808080808082b2b, 0x0808080808190819, + 0x0808080808191908, 0x08080808082b0808, 0x08080808082b082b, + 0x08080808082b2b08, 0x08080808082b2b2b, 0x0808080819080819, + 0x0808080819081908, 0x0808080819190808, 0x0808080819192b08, + 0x08080808192b0819, 0x08080808192b1908, 0x080808082b080808, + 0x080808082b08082b, 0x080808082b082b2b, 0x080808082b2b082b, + 0x0808081908080819, 0x0808081908081908, 0x0808081908190808, + 0x0808081908191919, 0x0808081919080808, 0x080808192b081908, + 0x080808192b192b08, 0x0808082b08080808, 0x0808082b0808082b, + 0x0808082b082b082b, 0x0808082b2b08082b, 0x0808190808080819, + 0x0808190808081908, 0x0808190808190808, 0x08081908082b0819, + 0x08081908082b1908, 0x0808190819080808, 0x080819081908082b, + 0x0808190819082b08, 0x08081908192b0808, 0x080819082b080819, + 0x080819082b081908, 0x080819082b190808, 0x080819082b2b1908, + 0x0808191908080808, 0x080819190808082b, 0x0808191908082b08, + 0x08081919082b0808, 0x080819191908192b, 0x08081919192b2b19, + 0x080819192b080808, 0x080819192b190819, 0x0808192b08082b19, + 0x0808192b08190808, 0x0808192b19080808, 0x0808192b2b081908, + 0x0808192b2b2b1908, 0x08082b0808080808, 0x08082b0808081919, + 0x08082b0808082b08, 0x08082b0808191908, 0x08082b08082b2b08, + 0x08082b0819080819, 0x08082b0819081908, 0x08082b0819190808, + 0x08082b081919082b, 0x08082b082b082b08, 0x08082b1908081908, + 0x08082b1919080808, 0x08082b2b0808082b, 0x08082b2b08191908, + 0x0819080808080819, 0x0819080808081908, 0x0819080808190808, + 0x08190808082b0819, 0x0819080819080808, 0x08190808192b0808, + 0x081908082b081908, 0x081908082b190808, 0x081908082b191919, + 0x0819081908080808, 0x0819081908082b08, 0x08190819082b0808, + 0x0819081919190808, 0x0819081919192b2b, 0x081908192b080808, + 0x0819082b082b1908, 0x0819082b19081919, 0x0819190808080808, + 0x0819190808082b08, 0x08191908082b0808, 0x08191908082b1919, + 0x0819190819082b19, 0x081919082b080808, 0x0819191908192b08, + 0x08191919192b082b, 0x0819192b08080808, 0x0819192b0819192b, + 0x08192b0808080819, 0x08192b0808081908, 0x08192b0808190808, + 0x08192b0819080808, 0x08192b082b080819, 0x08192b1908080808, + 0x08192b1908081919, 0x08192b192b2b0808, 0x08192b2b19190819, + 0x082b080808080808, 0x082b08080808082b, 0x082b080808082b2b, + 0x082b080819081908, 0x082b0808192b0819, 0x082b08082b080808, + 0x082b08082b08082b, 0x082b0819082b2b19, 0x082b081919082b08, + 0x082b082b08080808, 0x082b082b0808082b, 0x082b190808080819, + 0x082b190808081908, 0x082b190808190808, 0x082b190819080808, + 0x082b19081919192b, 0x082b191908080808, 0x082b191919080819, + 0x082b1919192b1908, 0x082b192b2b190808, 0x082b2b0808082b08, + 0x082b2b08082b0808, 0x082b2b082b191908, 0x082b2b2b19081908, + 0x1908080808080819, 0x1908080808081908, 0x1908080808190808, + 0x1908080808192b08, 0x19080808082b0819, 0x19080808082b1908, + 0x1908080819080808, 0x1908080819082b08, 0x190808081919192b, + 0x19080808192b0808, 0x190808082b080819, 0x190808082b081908, + 0x190808082b190808, 0x1908081908080808, 0x19080819082b0808, + 0x19080819192b0819, 0x190808192b080808, 0x190808192b081919, + 0x1908082b08080819, 0x1908082b08190808, 0x1908082b19082b08, + 0x1908082b1919192b, 0x1908082b192b2b08, 0x1908190808080808, + 0x1908190808082b08, 0x19081908082b0808, 0x190819082b080808, + 0x190819082b192b19, 0x190819190819082b, 0x19081919082b1908, + 0x1908192b08080808, 0x19082b0808080819, 0x19082b0808081908, + 0x19082b0808190808, 0x19082b0819080808, 0x19082b0819081919, + 0x19082b1908080808, 0x19082b1919192b08, 0x19082b19192b0819, + 0x19082b192b08082b, 0x19082b2b19081919, 0x19082b2b2b190808, + 0x1919080808080808, 0x1919080808082b08, 0x1919080808190819, + 0x1919080808192b19, 0x19190808082b0808, 0x191908082b080808, + 0x191908082b082b08, 0x1919081908081908, 0x191908191908082b, + 0x191908192b2b1908, 0x1919082b2b190819, 0x191919082b190808, + 0x191919082b19082b, 0x1919191908082b2b, 0x1919192b08080819, + 0x1919192b19191908, 0x19192b0808080808, 0x19192b0808190819, + 0x19192b0808192b19, 0x19192b08192b1908, 0x19192b1919080808, + 0x19192b2b08082b08, 0x192b080808081908, 0x192b080808190808, + 0x192b080819080808, 0x192b0808192b2b08, 0x192b081908080808, + 0x192b081919191919, 0x192b082b08192b08, 0x192b082b192b0808, + 0x192b190808080808, 0x192b190808081919, 0x192b191908190808, + 0x192b19190819082b, 0x192b19192b081908, 0x192b2b081908082b, + 0x2b08080808080808, 0x2b0808080808082b, 0x2b08080808082b2b, + 0x2b08080819080819, 0x2b0808082b08082b, 0x2b08081908081908, + 0x2b08081908192b08, 0x2b08081919080808, 0x2b08082b08190819, + 0x2b08190808080819, 0x2b08190808081908, 0x2b08190808190808, + 0x2b08190808191919, 0x2b08190819080808, 0x2b081908192b0808, + 0x2b08191908080808, 0x2b0819191908192b, 0x2b0819192b191908, + 0x2b08192b08082b19, 0x2b08192b19080808, 0x2b08192b192b0808, + 0x2b082b080808082b, 0x2b082b1908081908, 0x2b082b2b08190819, + 0x2b19080808081908, 0x2b19080808190808, 0x2b190808082b1908, + 0x2b19080819080808, 0x2b1908082b2b0819, 0x2b1908190819192b, + 0x2b1908192b080808, 0x2b19082b19081919, 0x2b19190808080808, + 0x2b191908082b082b, 0x2b19190819081908, 0x2b19191919190819, + 0x2b192b082b080819, 0x2b192b19082b0808, 0x2b2b08080808082b, + 0x2b2b080819190808, 0x2b2b08082b081919, 0x2b2b081908082b19, + 0x2b2b082b08080808, 0x2b2b190808192b08, 0x2b2b2b0819190808, + 0x2b2b2b1908081908, + }); + +static dpct::global_memory + iq2xs_grid(sycl::range<1>(512), + { + 0x0808080808080808, 0x080808080808082b, 0x0808080808081919, + 0x0808080808082b08, 0x0808080808082b2b, 0x0808080808190819, + 0x0808080808191908, 0x080808080819192b, 0x0808080808192b19, + 0x08080808082b0808, 0x08080808082b082b, 0x08080808082b1919, + 0x08080808082b2b08, 0x0808080819080819, 0x0808080819081908, + 0x080808081908192b, 0x0808080819082b19, 0x0808080819190808, + 0x080808081919082b, 0x0808080819191919, 0x0808080819192b08, + 0x08080808192b0819, 0x08080808192b1908, 0x080808082b080808, + 0x080808082b08082b, 0x080808082b081919, 0x080808082b082b08, + 0x080808082b190819, 0x080808082b191908, 0x080808082b192b19, + 0x080808082b2b0808, 0x0808081908080819, 0x0808081908081908, + 0x080808190808192b, 0x0808081908082b19, 0x0808081908190808, + 0x080808190819082b, 0x0808081908191919, 0x0808081908192b08, + 0x0808081908192b2b, 0x08080819082b0819, 0x08080819082b1908, + 0x0808081919080808, 0x080808191908082b, 0x0808081919081919, + 0x0808081919082b08, 0x0808081919190819, 0x0808081919191908, + 0x08080819192b0808, 0x08080819192b2b08, 0x080808192b080819, + 0x080808192b081908, 0x080808192b190808, 0x0808082b08080808, + 0x0808082b0808082b, 0x0808082b08081919, 0x0808082b08082b08, + 0x0808082b08190819, 0x0808082b08191908, 0x0808082b082b0808, + 0x0808082b19080819, 0x0808082b19081908, 0x0808082b19190808, + 0x0808082b19191919, 0x0808082b2b080808, 0x0808082b2b082b2b, + 0x0808190808080819, 0x0808190808081908, 0x080819080808192b, + 0x0808190808082b19, 0x0808190808190808, 0x080819080819082b, + 0x0808190808191919, 0x0808190808192b08, 0x08081908082b0819, + 0x08081908082b1908, 0x0808190819080808, 0x080819081908082b, + 0x0808190819081919, 0x0808190819082b08, 0x0808190819190819, + 0x0808190819191908, 0x080819081919192b, 0x08081908192b0808, + 0x080819082b080819, 0x080819082b081908, 0x080819082b190808, + 0x0808191908080808, 0x080819190808082b, 0x0808191908081919, + 0x0808191908082b08, 0x0808191908190819, 0x0808191908191908, + 0x08081919082b0808, 0x0808191919080819, 0x0808191919081908, + 0x0808191919190808, 0x08081919192b0819, 0x080819192b080808, + 0x0808192b08080819, 0x0808192b08081908, 0x0808192b08190808, + 0x0808192b082b192b, 0x0808192b19080808, 0x0808192b1908082b, + 0x0808192b2b081908, 0x08082b0808080808, 0x08082b080808082b, + 0x08082b0808081919, 0x08082b0808082b08, 0x08082b0808082b2b, + 0x08082b0808190819, 0x08082b0808191908, 0x08082b08082b0808, + 0x08082b08082b1919, 0x08082b0819080819, 0x08082b0819081908, + 0x08082b0819190808, 0x08082b0819192b08, 0x08082b082b080808, + 0x08082b082b2b0808, 0x08082b082b2b2b2b, 0x08082b1908080819, + 0x08082b1908081908, 0x08082b1908190808, 0x08082b1919080808, + 0x08082b192b080819, 0x08082b192b082b19, 0x08082b2b08080808, + 0x08082b2b082b0808, 0x08082b2b082b2b08, 0x08082b2b2b19192b, + 0x08082b2b2b2b0808, 0x0819080808080819, 0x0819080808081908, + 0x081908080808192b, 0x0819080808082b19, 0x0819080808190808, + 0x081908080819082b, 0x0819080808191919, 0x0819080808192b08, + 0x08190808082b0819, 0x08190808082b1908, 0x0819080819080808, + 0x081908081908082b, 0x0819080819081919, 0x0819080819082b08, + 0x0819080819190819, 0x0819080819191908, 0x08190808192b0808, + 0x08190808192b2b2b, 0x081908082b080819, 0x081908082b081908, + 0x081908082b190808, 0x0819081908080808, 0x081908190808082b, + 0x0819081908081919, 0x0819081908082b08, 0x0819081908190819, + 0x0819081908191908, 0x08190819082b0808, 0x0819081919080819, + 0x0819081919081908, 0x0819081919190808, 0x081908192b080808, + 0x081908192b191908, 0x081908192b19192b, 0x0819082b08080819, + 0x0819082b08081908, 0x0819082b0808192b, 0x0819082b08190808, + 0x0819082b19080808, 0x0819082b192b0808, 0x0819190808080808, + 0x081919080808082b, 0x0819190808081919, 0x0819190808082b08, + 0x0819190808190819, 0x0819190808191908, 0x08191908082b0808, + 0x0819190819080819, 0x0819190819081908, 0x0819190819082b19, + 0x0819190819190808, 0x08191908192b1908, 0x081919082b080808, + 0x0819191908080819, 0x0819191908081908, 0x0819191908190808, + 0x0819191919080808, 0x0819192b08080808, 0x0819192b08191908, + 0x0819192b19082b19, 0x08192b0808080819, 0x08192b0808081908, + 0x08192b0808190808, 0x08192b080819082b, 0x08192b0819080808, + 0x08192b0819191908, 0x08192b082b08192b, 0x08192b1908080808, + 0x08192b1908081919, 0x08192b19192b192b, 0x08192b2b19190819, + 0x08192b2b2b2b2b19, 0x082b080808080808, 0x082b08080808082b, + 0x082b080808081919, 0x082b080808082b08, 0x082b080808082b2b, + 0x082b080808190819, 0x082b080808191908, 0x082b0808082b0808, + 0x082b080819080819, 0x082b080819081908, 0x082b080819190808, + 0x082b08082b080808, 0x082b08082b2b0808, 0x082b081908080819, + 0x082b081908081908, 0x082b081908190808, 0x082b081919080808, + 0x082b081919082b08, 0x082b0819192b1919, 0x082b082b08080808, + 0x082b082b082b082b, 0x082b082b2b080808, 0x082b082b2b2b2b08, + 0x082b190808080819, 0x082b190808081908, 0x082b190808190808, + 0x082b1908082b2b19, 0x082b190819080808, 0x082b191908080808, + 0x082b191919080819, 0x082b19191919082b, 0x082b19192b192b19, + 0x082b192b08080819, 0x082b192b08192b2b, 0x082b192b2b2b192b, + 0x082b2b0808080808, 0x082b2b0808082b08, 0x082b2b0808082b2b, + 0x082b2b08082b0808, 0x082b2b0819191919, 0x082b2b082b082b08, + 0x082b2b082b2b082b, 0x082b2b19192b2b08, 0x082b2b192b190808, + 0x082b2b2b08082b08, 0x082b2b2b082b0808, 0x082b2b2b2b08082b, + 0x082b2b2b2b082b08, 0x082b2b2b2b082b2b, 0x1908080808080819, + 0x1908080808081908, 0x190808080808192b, 0x1908080808082b19, + 0x1908080808190808, 0x190808080819082b, 0x1908080808191919, + 0x1908080808192b08, 0x19080808082b0819, 0x19080808082b1908, + 0x1908080819080808, 0x190808081908082b, 0x1908080819081919, + 0x1908080819082b08, 0x1908080819082b2b, 0x1908080819190819, + 0x1908080819191908, 0x19080808192b0808, 0x19080808192b1919, + 0x190808082b080819, 0x190808082b081908, 0x190808082b190808, + 0x1908081908080808, 0x190808190808082b, 0x1908081908081919, + 0x1908081908082b08, 0x1908081908190819, 0x1908081908191908, + 0x19080819082b0808, 0x1908081919080819, 0x1908081919081908, + 0x1908081919190808, 0x190808192b080808, 0x190808192b081919, + 0x190808192b2b082b, 0x1908082b08080819, 0x1908082b08081908, + 0x1908082b08190808, 0x1908082b0819082b, 0x1908082b082b2b19, + 0x1908082b19080808, 0x1908190808080808, 0x190819080808082b, + 0x1908190808081919, 0x1908190808082b08, 0x1908190808190819, + 0x1908190808191908, 0x1908190808192b19, 0x19081908082b0808, + 0x1908190819080819, 0x1908190819081908, 0x1908190819190808, + 0x190819082b080808, 0x190819082b191908, 0x1908191908080819, + 0x1908191908081908, 0x1908191908190808, 0x19081919082b1908, + 0x1908191919080808, 0x190819192b192b2b, 0x1908192b08080808, + 0x1908192b08082b2b, 0x1908192b19081908, 0x1908192b19190808, + 0x19082b0808080819, 0x19082b0808081908, 0x19082b0808190808, + 0x19082b0819080808, 0x19082b0819081919, 0x19082b0819191908, + 0x19082b08192b082b, 0x19082b1908080808, 0x19082b1908190819, + 0x19082b1919081908, 0x19082b1919190808, 0x19082b19192b2b19, + 0x19082b2b08081908, 0x1919080808080808, 0x191908080808082b, + 0x1919080808081919, 0x1919080808082b08, 0x1919080808190819, + 0x1919080808191908, 0x19190808082b0808, 0x19190808082b2b08, + 0x1919080819080819, 0x1919080819081908, 0x1919080819190808, + 0x191908082b080808, 0x1919081908080819, 0x1919081908081908, + 0x1919081908190808, 0x1919081908191919, 0x1919081919080808, + 0x191908191908082b, 0x1919082b08080808, 0x1919082b19081908, + 0x1919082b2b2b2b2b, 0x1919190808080819, 0x1919190808081908, + 0x1919190808190808, 0x19191908082b0819, 0x1919190819080808, + 0x19191908192b0808, 0x191919082b080819, 0x191919082b2b0819, + 0x1919191908080808, 0x1919191908082b08, 0x191919192b080808, + 0x191919192b082b08, 0x1919192b082b0819, 0x1919192b192b2b08, + 0x1919192b2b2b0819, 0x19192b0808080808, 0x19192b0808191908, + 0x19192b0819080819, 0x19192b0819190808, 0x19192b082b192b19, + 0x19192b1908192b2b, 0x19192b1919080808, 0x19192b191908082b, + 0x19192b2b2b081919, 0x192b080808080819, 0x192b080808081908, + 0x192b080808190808, 0x192b080819080808, 0x192b080819191908, + 0x192b0808192b082b, 0x192b08082b08192b, 0x192b08082b2b2b19, + 0x192b081908080808, 0x192b082b082b1908, 0x192b082b19082b2b, + 0x192b082b2b19082b, 0x192b190808080808, 0x192b19080819192b, + 0x192b191908190808, 0x192b191919080808, 0x192b191919081919, + 0x192b19192b2b1908, 0x192b2b0808080819, 0x192b2b08192b2b2b, + 0x192b2b19082b1919, 0x192b2b2b0808192b, 0x192b2b2b19191908, + 0x192b2b2b192b082b, 0x2b08080808080808, 0x2b0808080808082b, + 0x2b08080808081919, 0x2b08080808082b08, 0x2b08080808190819, + 0x2b08080808191908, 0x2b080808082b0808, 0x2b080808082b2b2b, + 0x2b08080819080819, 0x2b08080819081908, 0x2b08080819190808, + 0x2b0808082b080808, 0x2b0808082b08082b, 0x2b0808082b2b2b08, + 0x2b0808082b2b2b2b, 0x2b08081908080819, 0x2b08081908081908, + 0x2b0808190808192b, 0x2b08081908190808, 0x2b08081919080808, + 0x2b08081919190819, 0x2b08081919192b19, 0x2b08082b08080808, + 0x2b08082b082b0808, 0x2b08082b2b080808, 0x2b08082b2b08082b, + 0x2b08082b2b2b0808, 0x2b08082b2b2b2b08, 0x2b08190808080819, + 0x2b08190808081908, 0x2b08190808190808, 0x2b0819080819082b, + 0x2b08190808191919, 0x2b08190819080808, 0x2b081908192b0808, + 0x2b0819082b082b19, 0x2b08191908080808, 0x2b08191919081908, + 0x2b0819192b2b1919, 0x2b08192b08192b08, 0x2b08192b192b2b2b, + 0x2b082b0808080808, 0x2b082b0808082b08, 0x2b082b08082b1919, + 0x2b082b0819192b2b, 0x2b082b082b080808, 0x2b082b082b08082b, + 0x2b082b082b2b2b08, 0x2b082b190808192b, 0x2b082b2b082b082b, + 0x2b082b2b2b080808, 0x2b082b2b2b082b08, 0x2b082b2b2b19192b, + 0x2b082b2b2b2b2b08, 0x2b19080808080819, 0x2b19080808081908, + 0x2b19080808190808, 0x2b19080819080808, 0x2b1908081919192b, + 0x2b1908082b081908, 0x2b19081908080808, 0x2b190819082b082b, + 0x2b190819192b1908, 0x2b19082b1919192b, 0x2b19082b2b082b19, + 0x2b19190808080808, 0x2b19190808081919, 0x2b19190819081908, + 0x2b19190819190808, 0x2b19190819192b08, 0x2b191919082b2b19, + 0x2b1919192b190808, 0x2b1919192b19082b, 0x2b19192b19080819, + 0x2b192b0819190819, 0x2b192b082b2b192b, 0x2b192b1919082b19, + 0x2b192b2b08191919, 0x2b192b2b192b0808, 0x2b2b080808080808, + 0x2b2b08080808082b, 0x2b2b080808082b08, 0x2b2b080808082b2b, + 0x2b2b0808082b0808, 0x2b2b0808082b2b2b, 0x2b2b08082b2b0808, + 0x2b2b081919190819, 0x2b2b081919192b19, 0x2b2b08192b2b192b, + 0x2b2b082b08080808, 0x2b2b082b0808082b, 0x2b2b082b08082b08, + 0x2b2b082b082b2b2b, 0x2b2b082b2b080808, 0x2b2b082b2b2b0808, + 0x2b2b190819080808, 0x2b2b19082b191919, 0x2b2b192b192b1919, + 0x2b2b192b2b192b08, 0x2b2b2b0808082b2b, 0x2b2b2b08082b0808, + 0x2b2b2b08082b082b, 0x2b2b2b08082b2b08, 0x2b2b2b082b2b0808, + 0x2b2b2b082b2b2b08, 0x2b2b2b1908081908, 0x2b2b2b192b081908, + 0x2b2b2b192b08192b, 0x2b2b2b2b082b2b08, 0x2b2b2b2b082b2b2b, + 0x2b2b2b2b2b190819, 0x2b2b2b2b2b2b2b2b, + }); + +static dpct::global_memory iq3xxs_grid( + sycl::range<1>(256), + { + 0x04040404, 0x04040414, 0x04040424, 0x04040c0c, 0x04040c1c, 0x04040c3e, + 0x04041404, 0x04041414, 0x04041c0c, 0x04042414, 0x04043e1c, 0x04043e2c, + 0x040c040c, 0x040c041c, 0x040c0c04, 0x040c0c14, 0x040c140c, 0x040c142c, + 0x040c1c04, 0x040c1c14, 0x040c240c, 0x040c2c24, 0x040c3e04, 0x04140404, + 0x04140414, 0x04140424, 0x04140c0c, 0x04141404, 0x04141414, 0x04141c0c, + 0x04141c1c, 0x04141c3e, 0x04142c0c, 0x04142c3e, 0x04143e2c, 0x041c040c, + 0x041c043e, 0x041c0c04, 0x041c0c14, 0x041c142c, 0x041c3e04, 0x04240c1c, + 0x04241c3e, 0x04242424, 0x04242c3e, 0x04243e1c, 0x04243e2c, 0x042c040c, + 0x042c043e, 0x042c1c14, 0x042c2c14, 0x04341c2c, 0x04343424, 0x043e0c04, + 0x043e0c24, 0x043e0c34, 0x043e241c, 0x043e340c, 0x0c04040c, 0x0c04041c, + 0x0c040c04, 0x0c040c14, 0x0c04140c, 0x0c04141c, 0x0c041c04, 0x0c041c14, + 0x0c041c24, 0x0c04243e, 0x0c042c04, 0x0c0c0404, 0x0c0c0414, 0x0c0c0c0c, + 0x0c0c1404, 0x0c0c1414, 0x0c14040c, 0x0c14041c, 0x0c140c04, 0x0c140c14, + 0x0c14140c, 0x0c141c04, 0x0c143e14, 0x0c1c0404, 0x0c1c0414, 0x0c1c1404, + 0x0c1c1c0c, 0x0c1c2434, 0x0c1c3434, 0x0c24040c, 0x0c24042c, 0x0c242c04, + 0x0c2c1404, 0x0c2c1424, 0x0c2c2434, 0x0c2c3e0c, 0x0c34042c, 0x0c3e1414, + 0x0c3e2404, 0x14040404, 0x14040414, 0x14040c0c, 0x14040c1c, 0x14041404, + 0x14041414, 0x14041434, 0x14041c0c, 0x14042414, 0x140c040c, 0x140c041c, + 0x140c042c, 0x140c0c04, 0x140c0c14, 0x140c140c, 0x140c1c04, 0x140c341c, + 0x140c343e, 0x140c3e04, 0x14140404, 0x14140414, 0x14140c0c, 0x14140c3e, + 0x14141404, 0x14141414, 0x14141c3e, 0x14142404, 0x14142c2c, 0x141c040c, + 0x141c0c04, 0x141c0c24, 0x141c3e04, 0x141c3e24, 0x14241c2c, 0x14242c1c, + 0x142c041c, 0x142c143e, 0x142c240c, 0x142c3e24, 0x143e040c, 0x143e041c, + 0x143e0c34, 0x143e242c, 0x1c04040c, 0x1c040c04, 0x1c040c14, 0x1c04140c, + 0x1c04141c, 0x1c042c04, 0x1c04342c, 0x1c043e14, 0x1c0c0404, 0x1c0c0414, + 0x1c0c1404, 0x1c0c1c0c, 0x1c0c2424, 0x1c0c2434, 0x1c14040c, 0x1c14041c, + 0x1c140c04, 0x1c14142c, 0x1c142c14, 0x1c143e14, 0x1c1c0c0c, 0x1c1c1c1c, + 0x1c241c04, 0x1c24243e, 0x1c243e14, 0x1c2c0404, 0x1c2c0434, 0x1c2c1414, + 0x1c2c2c2c, 0x1c340c24, 0x1c341c34, 0x1c34341c, 0x1c3e1c1c, 0x1c3e3404, + 0x24040424, 0x24040c3e, 0x24041c2c, 0x24041c3e, 0x24042c1c, 0x24042c3e, + 0x240c3e24, 0x24141404, 0x24141c3e, 0x24142404, 0x24143404, 0x24143434, + 0x241c043e, 0x241c242c, 0x24240424, 0x24242c0c, 0x24243424, 0x242c142c, + 0x242c241c, 0x242c3e04, 0x243e042c, 0x243e0c04, 0x243e0c14, 0x243e1c04, + 0x2c040c14, 0x2c04240c, 0x2c043e04, 0x2c0c0404, 0x2c0c0434, 0x2c0c1434, + 0x2c0c2c2c, 0x2c140c24, 0x2c141c14, 0x2c143e14, 0x2c1c0414, 0x2c1c2c1c, + 0x2c240c04, 0x2c24141c, 0x2c24143e, 0x2c243e14, 0x2c2c0414, 0x2c2c1c0c, + 0x2c342c04, 0x2c3e1424, 0x2c3e2414, 0x34041424, 0x34042424, 0x34042434, + 0x34043424, 0x340c140c, 0x340c340c, 0x34140c3e, 0x34143424, 0x341c1c04, + 0x341c1c34, 0x34242424, 0x342c042c, 0x342c2c14, 0x34341c1c, 0x343e041c, + 0x343e140c, 0x3e04041c, 0x3e04042c, 0x3e04043e, 0x3e040c04, 0x3e041c14, + 0x3e042c14, 0x3e0c1434, 0x3e0c2404, 0x3e140c14, 0x3e14242c, 0x3e142c14, + 0x3e1c0404, 0x3e1c0c2c, 0x3e1c1c1c, 0x3e1c3404, 0x3e24140c, 0x3e24240c, + 0x3e2c0404, 0x3e2c0414, 0x3e2c1424, 0x3e341c04, + }); + +static dpct::global_memory ksigns_iq2xs( + sycl::range<1>(128), + { + 0, 129, 130, 3, 132, 5, 6, 135, 136, 9, 10, 139, 12, + 141, 142, 15, 144, 17, 18, 147, 20, 149, 150, 23, 24, 153, + 154, 27, 156, 29, 30, 159, 160, 33, 34, 163, 36, 165, 166, + 39, 40, 169, 170, 43, 172, 45, 46, 175, 48, 177, 178, 51, + 180, 53, 54, 183, 184, 57, 58, 187, 60, 189, 190, 63, 192, + 65, 66, 195, 68, 197, 198, 71, 72, 201, 202, 75, 204, 77, + 78, 207, 80, 209, 210, 83, 212, 85, 86, 215, 216, 89, 90, + 219, 92, 221, 222, 95, 96, 225, 226, 99, 228, 101, 102, 231, + 232, 105, 106, 235, 108, 237, 238, 111, 240, 113, 114, 243, 116, + 245, 246, 119, 120, 249, 250, 123, 252, 125, 126, 255, + }); + +static dpct::global_memory + ksigns64(sycl::range<1>(128), + { + 0x0000000000000000, 0xff000000000000ff, 0xff0000000000ff00, + 0x000000000000ffff, 0xff00000000ff0000, 0x0000000000ff00ff, + 0x0000000000ffff00, 0xff00000000ffffff, 0xff000000ff000000, + 0x00000000ff0000ff, 0x00000000ff00ff00, 0xff000000ff00ffff, + 0x00000000ffff0000, 0xff000000ffff00ff, 0xff000000ffffff00, + 0x00000000ffffffff, 0xff0000ff00000000, 0x000000ff000000ff, + 0x000000ff0000ff00, 0xff0000ff0000ffff, 0x000000ff00ff0000, + 0xff0000ff00ff00ff, 0xff0000ff00ffff00, 0x000000ff00ffffff, + 0x000000ffff000000, 0xff0000ffff0000ff, 0xff0000ffff00ff00, + 0x000000ffff00ffff, 0xff0000ffffff0000, 0x000000ffffff00ff, + 0x000000ffffffff00, 0xff0000ffffffffff, 0xff00ff0000000000, + 0x0000ff00000000ff, 0x0000ff000000ff00, 0xff00ff000000ffff, + 0x0000ff0000ff0000, 0xff00ff0000ff00ff, 0xff00ff0000ffff00, + 0x0000ff0000ffffff, 0x0000ff00ff000000, 0xff00ff00ff0000ff, + 0xff00ff00ff00ff00, 0x0000ff00ff00ffff, 0xff00ff00ffff0000, + 0x0000ff00ffff00ff, 0x0000ff00ffffff00, 0xff00ff00ffffffff, + 0x0000ffff00000000, 0xff00ffff000000ff, 0xff00ffff0000ff00, + 0x0000ffff0000ffff, 0xff00ffff00ff0000, 0x0000ffff00ff00ff, + 0x0000ffff00ffff00, 0xff00ffff00ffffff, 0xff00ffffff000000, + 0x0000ffffff0000ff, 0x0000ffffff00ff00, 0xff00ffffff00ffff, + 0x0000ffffffff0000, 0xff00ffffffff00ff, 0xff00ffffffffff00, + 0x0000ffffffffffff, 0xffff000000000000, 0x00ff0000000000ff, + 0x00ff00000000ff00, 0xffff00000000ffff, 0x00ff000000ff0000, + 0xffff000000ff00ff, 0xffff000000ffff00, 0x00ff000000ffffff, + 0x00ff0000ff000000, 0xffff0000ff0000ff, 0xffff0000ff00ff00, + 0x00ff0000ff00ffff, 0xffff0000ffff0000, 0x00ff0000ffff00ff, + 0x00ff0000ffffff00, 0xffff0000ffffffff, 0x00ff00ff00000000, + 0xffff00ff000000ff, 0xffff00ff0000ff00, 0x00ff00ff0000ffff, + 0xffff00ff00ff0000, 0x00ff00ff00ff00ff, 0x00ff00ff00ffff00, + 0xffff00ff00ffffff, 0xffff00ffff000000, 0x00ff00ffff0000ff, + 0x00ff00ffff00ff00, 0xffff00ffff00ffff, 0x00ff00ffffff0000, + 0xffff00ffffff00ff, 0xffff00ffffffff00, 0x00ff00ffffffffff, + 0x00ffff0000000000, 0xffffff00000000ff, 0xffffff000000ff00, + 0x00ffff000000ffff, 0xffffff0000ff0000, 0x00ffff0000ff00ff, + 0x00ffff0000ffff00, 0xffffff0000ffffff, 0xffffff00ff000000, + 0x00ffff00ff0000ff, 0x00ffff00ff00ff00, 0xffffff00ff00ffff, + 0x00ffff00ffff0000, 0xffffff00ffff00ff, 0xffffff00ffffff00, + 0x00ffff00ffffffff, 0xffffffff00000000, 0x00ffffff000000ff, + 0x00ffffff0000ff00, 0xffffffff0000ffff, 0x00ffffff00ff0000, + 0xffffffff00ff00ff, 0xffffffff00ffff00, 0x00ffffff00ffffff, + 0x00ffffffff000000, 0xffffffffff0000ff, 0xffffffffff00ff00, + 0x00ffffffff00ffff, 0xffffffffffff0000, 0x00ffffffffff00ff, + 0x00ffffffffffff00, 0xffffffffffffffff, + }); +//#endif + +static dpct::global_memory + kmask_iq2xs(sycl::range<1>(8), {1, 2, 4, 8, 16, 32, 64, 128}); + +template +static void dequantize_block_iq2_xxs(const void * __restrict__ vx, dst_t * __restrict__ yy, + const sycl::nd_item<3> &item_ct1, + const uint64_t *iq2xxs_grid_ptr, + const uint8_t *ksigns_iq2xs_ptr, + const uint8_t *kmask_iq2xs_ptr) { + + const int i = item_ct1.get_group(2); + const block_iq2_xxs * x = (const block_iq2_xxs *) vx; + + const int tid = item_ct1.get_local_id(2); +#if QK_K == 256 + const int il = tid/8; // 0...3 + const int ib = tid%8; // 0...7 + dst_t * y = yy + i*QK_K + 32*ib + 8*il; + const uint16_t * q2 = x[i].qs + 4*ib; + const uint8_t * aux8 = (const uint8_t *)q2; + const uint8_t * grid = (const uint8_t *)(iq2xxs_grid_ptr + aux8[il]); + const uint32_t aux32 = q2[2] | (q2[3] << 16); + const float d = (float)x[i].d * (0.5f + (aux32 >> 28)) * 0.25f; + const uint8_t signs = ksigns_iq2xs_ptr[(aux32 >> 7*il) & 127]; + for (int j = 0; j < 8; ++j) y[j] = d * grid[j] * (signs & kmask_iq2xs_ptr[j] ? -1.f : 1.f); +#else + assert(false); +#endif + +} + +template +static void dequantize_block_iq2_xs(const void * __restrict__ vx, dst_t * __restrict__ yy, + const sycl::nd_item<3> &item_ct1, + const uint64_t *iq2xs_grid, + const uint8_t *ksigns_iq2xs, + const uint8_t *kmask_iq2xs) { + + const int i = item_ct1.get_group(2); + const block_iq2_xs * x = (const block_iq2_xs *) vx; + + const int tid = item_ct1.get_local_id(2); +#if QK_K == 256 + const int il = tid/8; // 0...3 + const int ib = tid%8; // 0...7 + dst_t * y = yy + i*QK_K + 32*ib + 8*il; + const uint16_t * q2 = x[i].qs + 4*ib; + const uint8_t * grid = (const uint8_t *)(iq2xs_grid + (q2[il] & 511)); + const float d = (float)x[i].d * (0.5f + ((x[i].scales[ib] >> 4*(il/2)) & 0xf)) * 0.25f; + const uint8_t signs = ksigns_iq2xs[q2[il] >> 9]; + for (int j = 0; j < 8; ++j) y[j] = d * grid[j] * (signs & kmask_iq2xs[j] ? -1.f : 1.f); +#else + assert(false); +#endif + +} + +template +static void dequantize_block_iq3_xxs(const void * __restrict__ vx, dst_t * __restrict__ yy, + const sycl::nd_item<3> &item_ct1, + const uint32_t *iq3xxs_grid, + const uint8_t *ksigns_iq2xs, + const uint8_t *kmask_iq2xs) { + + const int i = item_ct1.get_group(2); + const block_iq3_xxs * x = (const block_iq3_xxs *) vx; + + const int tid = item_ct1.get_local_id(2); +#if QK_K == 256 + const int il = tid/8; // 0...3 + const int ib = tid%8; // 0...7 + dst_t * y = yy + i*QK_K + 32*ib + 8*il; + const uint8_t * q3 = x[i].qs + 8*ib; + const uint16_t * gas = (const uint16_t *)(x[i].qs + QK_K/4) + 2*ib; + const uint8_t * grid1 = (const uint8_t *)(iq3xxs_grid + q3[2*il+0]); + const uint8_t * grid2 = (const uint8_t *)(iq3xxs_grid + q3[2*il+1]); + const uint32_t aux32 = gas[0] | (gas[1] << 16); + const float d = (float)x[i].d * (0.5f + (aux32 >> 28)) * 0.5f; + const uint8_t signs = ksigns_iq2xs[(aux32 >> 7*il) & 127]; + for (int j = 0; j < 4; ++j) { + y[j+0] = d * grid1[j] * (signs & kmask_iq2xs[j+0] ? -1.f : 1.f); + y[j+4] = d * grid2[j] * (signs & kmask_iq2xs[j+4] ? -1.f : 1.f); + } +#else + assert(false); +#endif + +} + /* DPCT1110:4: The total declared local variable size in device function dequantize_mul_mat_vec_q2_k exceeds 128 bytes and may cause high register @@ -4446,13 +5257,16 @@ static void dequantize_mul_mat_vec_q2_k(const void *__restrict__ vx, } #else - const int tid = threadIdx.x/(2*K_QUANTS_PER_ITERATION); // 0...15 or 0...7 - const int ix = threadIdx.x%(2*K_QUANTS_PER_ITERATION); // 0....1 or 0...3 + const int tid = item_ct1.get_local_id(2) / + (2 * K_QUANTS_PER_ITERATION); // 0...15 or 0...7 + const int ix = item_ct1.get_local_id(2) % + (2 * K_QUANTS_PER_ITERATION); // 0....1 or 0...3 const int offset = tid * K_QUANTS_PER_ITERATION; uint32_t uaux[2]; const uint8_t * d = (const uint8_t *)uaux; + for (int i = ix; i < num_blocks_per_row; i += 2*K_QUANTS_PER_ITERATION) { const float * y = yy + i * QK_K + offset; @@ -4462,7 +5276,8 @@ static void dequantize_mul_mat_vec_q2_k(const void *__restrict__ vx, uaux[0] = s[0] & 0x0f0f0f0f; uaux[1] = (s[0] >> 4) & 0x0f0f0f0f; - const float2 dall = __half22float2(x[i].dm); + const sycl::float2 dall = + x[i].dm.convert(); float sum1 = 0, sum2 = 0; for (int l = 0; l < K_QUANTS_PER_ITERATION; ++l) { @@ -4473,8 +5288,9 @@ static void dequantize_mul_mat_vec_q2_k(const void *__restrict__ vx, + y[l+48] * d[3] * ((ql >> 6) & 3); sum2 += y[l+0] * d[4] + y[l+16] * d[5] + y[l+32] * d[6] + y[l+48] * d[7]; } - tmp += dall.x * sum1 - dall.y * sum2; + tmp += dall.x() * sum1 - dall.y() * sum2; } + #endif // sum up partial sums and write back result @@ -4569,8 +5385,8 @@ static void dequantize_mul_mat_vec_q3_k(const void *__restrict__ vx, } #else - const int tid = threadIdx.x/(2*K_QUANTS_PER_ITERATION); // 0...15 or 0...7 - const int ix = threadIdx.x%(2*K_QUANTS_PER_ITERATION); // 0....1 or 0...3 + const int tid = item_ct1.get_local_id(2)/(2*K_QUANTS_PER_ITERATION); // 0...15 or 0...7 + const int ix = item_ct1.get_local_id(2)%(2*K_QUANTS_PER_ITERATION); // 0....1 or 0...3 const int offset = tid * K_QUANTS_PER_ITERATION; // 0...15 or 0...14 const int in = offset/8; // 0 or 1 const int im = offset%8; // 0...7 @@ -4719,8 +5535,8 @@ static void dequantize_mul_mat_vec_q4_k(const void *__restrict__ vx, } #else - const int tid = threadIdx.x/(2*K_QUANTS_PER_ITERATION); // 0...15 - const int ix = threadIdx.x%(2*K_QUANTS_PER_ITERATION); + const int tid = item_ct1.get_local_id(2)/(2*K_QUANTS_PER_ITERATION); // 0...15 + const int ix = item_ct1.get_local_id(2)%(2*K_QUANTS_PER_ITERATION); const int step = tid * K_QUANTS_PER_ITERATION; @@ -4860,8 +5676,8 @@ static void dequantize_mul_mat_vec_q5_k(const void *__restrict__ vx, } #else - const int tid = threadIdx.x/(2*K_QUANTS_PER_ITERATION); // 0...15 - const int ix = threadIdx.x%(2*K_QUANTS_PER_ITERATION); + const int tid = item_ct1.get_local_id(2)/(2*K_QUANTS_PER_ITERATION); // 0...15 + const int ix = item_ct1.get_local_id(2)%(2*K_QUANTS_PER_ITERATION); const int step = tid * K_QUANTS_PER_ITERATION; const int im = step/8; const int in = step%8; @@ -4969,8 +5785,8 @@ static void dequantize_mul_mat_vec_q6_k(const void * __restrict__ vx, const floa #else - const int tid = threadIdx.x/(2*K_QUANTS_PER_ITERATION); // 0...7 - const int ix = threadIdx.x%(2*K_QUANTS_PER_ITERATION); // 0...3 + const int tid = item_ct1.get_local_id(2)/(2*K_QUANTS_PER_ITERATION); // 0...7 + const int ix = item_ct1.get_local_id(2)%(2*K_QUANTS_PER_ITERATION); // 0...3 const int step = tid * K_QUANTS_PER_ITERATION; @@ -5170,6 +5986,21 @@ static void dequantize_block(const void * __restrict__ vx, dst_t * __restrict__ y[iybs + iqs + y_offset] = v.y(); } +template +static void convert_unary(const void * __restrict__ vx, dst_t * __restrict__ y, const int k, + const sycl::nd_item<3> &item_ct1) { + const int i = item_ct1.get_local_range(2) * item_ct1.get_group(2) + + item_ct1.get_local_id(2); + + if (i >= k) { + return; + } + + const src_t * x = (src_t *) vx; + + y[i] = x[i]; +} + // VDR = vec dot ratio, how many contiguous integers each thread processes when the vec dot kernel is called // MMVQ = mul_mat_vec_q, MMQ = mul_mat_q @@ -6588,8 +7419,8 @@ vec_dot_q4_K_q8_1(const void *__restrict__ vbq, const float dall = bq4_K->dm[0]; const float dmin = bq4_K->dm[1]; - const float d8_1 = __low2float(bq8_1[0].ds); - const float d8_2 = __low2float(bq8_1[1].ds); + const float d8_1 = bq8_1[0].ds[0]; + const float d8_2 = bq8_1[1].ds[1]; const int ui1 = *((const int *)bq8_1[0].qs + (iqs/2)); const int ui2 = *((const int *)bq8_1[0].qs + (iqs/2) + 4); @@ -6600,10 +7431,10 @@ vec_dot_q4_K_q8_1(const void *__restrict__ vbq, const int v1 = q4[0]; const int v2 = q4[4]; - const int dot1 = __dp4a(ui2, v2 & 0x0f0f0f0f, __dp4a(ui1, v1 & 0x0f0f0f0f, 0)); - const int dot2 = __dp4a(ui4, (v2 >> 4) & 0x0f0f0f0f, __dp4a(ui3, (v1 >> 4) & 0x0f0f0f0f, 0)); - const int dot3 = __dp4a(0x01010101, ui2, __dp4a(0x01010101, ui1, 0)); - const int dot4 = __dp4a(0x01010101, ui4, __dp4a(0x01010101, ui3, 0)); + const int dot1 = dpct::dp4a(ui2, v2 & 0x0f0f0f0f, dpct::dp4a(ui1, v1 & 0x0f0f0f0f, 0)); + const int dot2 = dpct::dp4a(ui4, (v2 >> 4) & 0x0f0f0f0f, dpct::dp4a(ui3, (v1 >> 4) & 0x0f0f0f0f, 0)); + const int dot3 = dpct::dp4a(0x01010101, ui2, dpct::dp4a(0x01010101, ui1, 0)); + const int dot4 = dpct::dp4a(0x01010101, ui4, dpct::dp4a(0x01010101, ui3, 0)); sumf_d += d8_1 * (dot1 * s[0]) + d8_2 * (dot2 * s[1]); sumf_m += d8_1 * (dot3 * s[2]) + d8_2 * (dot4 * s[3]); @@ -6772,8 +7603,8 @@ vec_dot_q5_K_q8_1(const void *__restrict__ vbq, const float d = bq5_K->d; - const float d8_1 = __low2half(bq8_1[0].ds); - const float d8_2 = __low2half(bq8_1[1].ds); + const float d8_1 = bq8_1[0].ds[0]; + const float d8_2 = bq8_1[1].ds[1]; const int ui1 = *((const int *)bq8_1[0].qs + (iqs/2)); const int ui2 = *((const int *)bq8_1[0].qs + (iqs/2) + 4); @@ -6794,8 +7625,8 @@ vec_dot_q5_K_q8_1(const void *__restrict__ vbq, const int v3 = (((vh >> 0) & 0x10101010) ^ 0x10101010) | ((vl1 >> 4) & 0x0f0f0f0f); const int v4 = (((vh >> 2) & 0x10101010) ^ 0x10101010) | ((vl2 >> 4) & 0x0f0f0f0f); - const float sumf_d = d8_1 * (__dp4a(ui1, v1, 0) * s[0] + __dp4a(ui2, v2, 0) * s[1]) - + d8_2 * (__dp4a(ui3, v3, 0) * s[2] + __dp4a(ui4, v4, 0) * s[3]); + const float sumf_d = d8_1 * (dpct::dp4a(ui1, v1, 0) * s[0] + dpct::dp4a(ui2, v2, 0) * s[1]) + + d8_2 * (dpct::dp4a(ui3, v3, 0) * s[2] + dpct::dp4a(ui4, v4, 0) * s[3]); return d * sumf_d; @@ -7051,6 +7882,150 @@ static __dpct_inline__ float vec_dot_q6_K_q8_1_mul_mat( return vec_dot_q6_K_q8_1_impl_mmq(&x_ql[index_x], &y_qs[index_y], sc, x_dmf[i * (WARP_SIZE/QI6_K) + i/QI6_K], &y_df[index_y/QI8_1]); } + +static __dpct_inline__ float +vec_dot_iq2_xxs_q8_1(const void *__restrict__ vbq, + const block_q8_1 *__restrict__ bq8_1, const int &iqs, + const uint64_t *iq2xxs_grid, const uint8_t *ksigns_iq2xs, + const uint8_t *kmask_iq2xs) { +#if QK_K == 256 + const block_iq2_xxs * bq2 = (const block_iq2_xxs *) vbq; + +#if QR2_XXS == 8 + const int ib32 = iqs; + const uint16_t * q2 = bq2->qs + 4*ib32; + const uint8_t * aux8 = (const uint8_t *)q2; + const int8_t * q8 = bq8_1[ib32].qs; + uint32_t aux32 = q2[2] | (q2[3] << 16); + int sumi = 0; + for (int l = 0; l < 4; ++l) { + const uint8_t * grid = (const uint8_t *)(iq2xxs_grid + aux8[l]); + const uint8_t signs = ksigns_iq2xs[aux32 & 127]; + for (int j = 0; j < 8; ++j) { + sumi += q8[j] * grid[j] * (signs & kmask_iq2xs[j] ? -1 : 1); + } + q8 += 8; + aux32 >>= 7; + } + const float d = (float)bq2->d * (0.5f + aux32) * bq8_1[ib32].ds[0] * 0.25f; + return d * sumi; +#else + // iqs is 0...15 + const int ib32 = iqs/2; + const int il = iqs%2; + const uint16_t * q2 = bq2->qs + 4*ib32; + const uint8_t * aux8 = (const uint8_t *)q2; + const uint8_t * grid1 = (const uint8_t *)(iq2xxs_grid + aux8[2*il+0]); + const uint8_t * grid2 = (const uint8_t *)(iq2xxs_grid + aux8[2*il+1]); + const uint32_t aux32 = q2[2] | (q2[3] << 16); + const float d = (float)bq2->d * (0.5f + (aux32 >> 28)) * bq8_1[ib32].ds[0] * 0.25f; + const uint8_t signs1 = ksigns_iq2xs[(aux32 >> 14*il) & 127]; + const uint8_t signs2 = ksigns_iq2xs[(aux32 >> (14*il + 7)) & 127]; + const int8_t * q8 = bq8_1[ib32].qs + 16*il; + int sumi1 = 0, sumi2 = 0; + for (int j = 0; j < 8; ++j) { + sumi1 += q8[j+0] * grid1[j] * (signs1 & kmask_iq2xs[j] ? -1 : 1); + sumi2 += q8[j+8] * grid2[j] * (signs2 & kmask_iq2xs[j] ? -1 : 1); + } + return d * (sumi1 + sumi2); +#endif +#else + assert(false); + return 0.f; +#endif +} + +static __dpct_inline__ float +vec_dot_iq2_xs_q8_1(const void *__restrict__ vbq, + const block_q8_1 *__restrict__ bq8_1, const int &iqs, + const uint64_t *iq2xs_grid, const uint64_t *ksigns64) { +#if DPCT_COMPATIBILITY_TEMP >= \ + MIN_CC_DP4A // lowest compute capability for integer intrinsics +#if QK_K == 256 + const block_iq2_xs * bq2 = (const block_iq2_xs *) vbq; + + const int ib32 = iqs; + const uint16_t * q2 = bq2->qs + 4*ib32; + const int8_t * q8 = bq8_1[ib32].qs; + const uint8_t ls1 = bq2->scales[ib32] & 0xf; + const uint8_t ls2 = bq2->scales[ib32] >> 4; + int sumi1 = 0; + for (int l = 0; l < 2; ++l) { + const uint32_t * grid = (const uint32_t *)(iq2xs_grid + (q2[l] & 511)); + const uint32_t * signs = (const uint32_t *)(ksigns64 + (q2[l] >> 9)); + const int grid_l = dpct::vectorized_binary( + grid[0] ^ signs[0], signs[0], std::minus<>()); + const int grid_h = dpct::vectorized_binary( + grid[1] ^ signs[1], signs[1], std::minus<>()); + sumi1 = dpct::dp4a(grid_l, *((const int *)q8 + 0), sumi1); + sumi1 = dpct::dp4a(grid_h, *((const int *)q8 + 1), sumi1); + q8 += 8; + } + int sumi2 = 0; + for (int l = 2; l < 4; ++l) { + const uint32_t * grid = (const uint32_t *)(iq2xs_grid + (q2[l] & 511)); + const uint32_t * signs = (const uint32_t *)(ksigns64 + (q2[l] >> 9)); + const int grid_l = dpct::vectorized_binary( + grid[0] ^ signs[0], signs[0], std::minus<>()); + const int grid_h = dpct::vectorized_binary( + grid[1] ^ signs[1], signs[1], std::minus<>()); + sumi2 = dpct::dp4a(grid_l, *((const int *)q8 + 0), sumi2); + sumi2 = dpct::dp4a(grid_h, *((const int *)q8 + 1), sumi2); + q8 += 8; + } + const float d = (float)bq2->d * bq8_1[ib32].ds[0] * 0.25f; + return d * ((0.5f + ls1) * sumi1 + (0.5f + ls2) * sumi2); +#else + assert(false); + return 0.f; +#endif +#else + assert(false); + return 0.f; +#endif +} + +static __dpct_inline__ float +vec_dot_iq3_xxs_q8_1(const void *__restrict__ vbq, + const block_q8_1 *__restrict__ bq8_1, const int &iqs, + const uint32_t *iq3xxs_grid, const uint64_t *ksigns64) { +#if DPCT_COMPATIBILITY_TEMP >= \ + MIN_CC_DP4A // lowest compute capability for integer intrinsics +#if QK_K == 256 + const block_iq3_xxs * bq2 = (const block_iq3_xxs *) vbq; + + const int ib32 = iqs; + const uint8_t * q3 = bq2->qs + 8*ib32; + const uint16_t * gas = (const uint16_t *)(bq2->qs + QK_K/4) + 2*ib32; + const int8_t * q8 = bq8_1[ib32].qs; + uint32_t aux32 = gas[0] | (gas[1] << 16); + int sumi = 0; + for (int l = 0; l < 4; ++l) { + const uint32_t * grid1 = iq3xxs_grid + q3[2*l+0]; + const uint32_t * grid2 = iq3xxs_grid + q3[2*l+1]; + const uint32_t * signs = (const uint32_t *)(ksigns64 + (aux32 & 127)); + const int grid_l = dpct::vectorized_binary( + grid1[0] ^ signs[0], signs[0], std::minus<>()); + const int grid_h = dpct::vectorized_binary( + grid2[0] ^ signs[1], signs[1], std::minus<>()); + sumi = dpct::dp4a(grid_l, *((int *)q8 + 0), sumi); + sumi = dpct::dp4a(grid_h, *((int *)q8 + 1), sumi); + q8 += 8; + aux32 >>= 7; + } + const float d = (float)bq2->d * (0.5f + aux32) * bq8_1[ib32].ds[0] * 0.5f; + return d * sumi; +#else + assert(false); + return 0.f; +#endif +#else + assert(false); + return 0.f; +#endif +} + + template @@ -7632,7 +8607,8 @@ template static void template static void mul_mat_vec_q(const void * __restrict__ vx, const void * __restrict__ vy, float * __restrict__ dst, const int ncols, const int nrows, - const sycl::nd_item<3> &item_ct1) { + const sycl::nd_item<3> &item_ct1, + const uint32_t *iq3xxs_grid_ptr, const uint64_t *ksigns64_ptr) { const int row = item_ct1.get_group(2) * item_ct1.get_local_range(1) + item_ct1.get_local_id(1); @@ -7649,12 +8625,11 @@ static void mul_mat_vec_q(const void * __restrict__ vx, const void * __restrict_ const block_q_t * x = (const block_q_t *) vx; const block_q8_1 * y = (const block_q8_1 *) vy; - for (int i = 0; i < blocks_per_row; i += blocks_per_warp) { - const int ibx = row * blocks_per_row + i + - item_ct1.get_local_id(2) / (qi / vdr); // x block index + for (int i = item_ct1.get_local_id(2) / (qi / vdr); i < blocks_per_row; + i += blocks_per_warp) { + const int ibx = row*blocks_per_row + i; // x block index - const int iby = (i + item_ct1.get_local_id(2) / (qi / vdr)) * - (qk / QK8_1); // y block index that aligns with ibx + const int iby = i * (qk/QK8_1); // y block index that aligns with ibx const int iqs = vdr * @@ -7676,6 +8651,145 @@ static void mul_mat_vec_q(const void * __restrict__ vx, const void * __restrict_ } } +template +static void mul_mat_vec_q_iq2_xxs_q8_1(const void * __restrict__ vx, const void * __restrict__ vy, float * __restrict__ dst, const int ncols, const int nrows, + const sycl::nd_item<3> &item_ct1, + const uint64_t *iq2xxs_grid_ptr, const uint8_t *ksigns_iq2xs_ptr, + const uint8_t *kmask_iq2xs_ptr ) { + const int row = item_ct1.get_group(2) * item_ct1.get_local_range(1) + + item_ct1.get_local_id(1); + + if (row >= nrows) { + return; + } + + const int blocks_per_row = ncols / qk; + const int blocks_per_warp = vdr * WARP_SIZE / qi; + +// partial sum for each thread + float tmp = 0.0f; + + const block_q_t * x = (const block_q_t *) vx; + const block_q8_1 * y = (const block_q8_1 *) vy; + + for (int i = item_ct1.get_local_id(2) / (qi / vdr); i < blocks_per_row; + i += blocks_per_warp) { + const int ibx = row*blocks_per_row + i; // x block index + + const int iby = i * (qk/QK8_1); // y block index that aligns with ibx + + const int iqs = + vdr * + (item_ct1.get_local_id(2) % + (qi / vdr)); // x block quant index when casting the quants to int + + tmp += vec_dot_iq2_xxs_q8_1(&x[ibx], &y[iby], iqs, iq2xxs_grid_ptr, ksigns_iq2xs_ptr, kmask_iq2xs_ptr); + } + + // sum up partial sums and write back result +#pragma unroll + for (int mask = 16; mask > 0; mask >>= 1) { + tmp += + dpct::permute_sub_group_by_xor(item_ct1.get_sub_group(), tmp, mask); + } + + if (item_ct1.get_local_id(2) == 0) { + dst[row] = tmp; + } +} + +template +static void mul_mat_vec_q_iq2_xs_q8_1(const void * __restrict__ vx, const void * __restrict__ vy, float * __restrict__ dst, const int ncols, const int nrows, + const sycl::nd_item<3> &item_ct1, + const uint64_t *iq2xs_grid_ptr, const uint64_t *ksigns64_ptr ) { + const int row = item_ct1.get_group(2) * item_ct1.get_local_range(1) + + item_ct1.get_local_id(1); + + if (row >= nrows) { + return; + } + + const int blocks_per_row = ncols / qk; + const int blocks_per_warp = vdr * WARP_SIZE / qi; + +// partial sum for each thread + float tmp = 0.0f; + + const block_q_t * x = (const block_q_t *) vx; + const block_q8_1 * y = (const block_q8_1 *) vy; + + for (int i = item_ct1.get_local_id(2) / (qi / vdr); i < blocks_per_row; + i += blocks_per_warp) { + const int ibx = row*blocks_per_row + i; // x block index + + const int iby = i * (qk/QK8_1); // y block index that aligns with ibx + + const int iqs = + vdr * + (item_ct1.get_local_id(2) % + (qi / vdr)); // x block quant index when casting the quants to int + + tmp += vec_dot_iq2_xs_q8_1(&x[ibx], &y[iby], iqs, iq2xs_grid_ptr, ksigns64_ptr); + } + + // sum up partial sums and write back result +#pragma unroll + for (int mask = 16; mask > 0; mask >>= 1) { + tmp += + dpct::permute_sub_group_by_xor(item_ct1.get_sub_group(), tmp, mask); + } + + if (item_ct1.get_local_id(2) == 0) { + dst[row] = tmp; + } +} + +template +static void mul_mat_vec_q_iq3_xxs_q8_1(const void * __restrict__ vx, const void * __restrict__ vy, float * __restrict__ dst, const int ncols, const int nrows, + const sycl::nd_item<3> &item_ct1, + const uint32_t *iq3xxs_grid_ptr, const uint64_t *ksigns64_ptr ) { + const int row = item_ct1.get_group(2) * item_ct1.get_local_range(1) + + item_ct1.get_local_id(1); + + if (row >= nrows) { + return; + } + + const int blocks_per_row = ncols / qk; + const int blocks_per_warp = vdr * WARP_SIZE / qi; + +// partial sum for each thread + float tmp = 0.0f; + + const block_q_t * x = (const block_q_t *) vx; + const block_q8_1 * y = (const block_q8_1 *) vy; + + for (int i = item_ct1.get_local_id(2) / (qi / vdr); i < blocks_per_row; + i += blocks_per_warp) { + const int ibx = row*blocks_per_row + i; // x block index + + const int iby = i * (qk/QK8_1); // y block index that aligns with ibx + + const int iqs = + vdr * + (item_ct1.get_local_id(2) % + (qi / vdr)); // x block quant index when casting the quants to int + + tmp += vec_dot_iq3_xxs_q8_1(&x[ibx], &y[iby], iqs, iq3xxs_grid_ptr, ksigns64_ptr); + } + + // sum up partial sums and write back result +#pragma unroll + for (int mask = 16; mask > 0; mask >>= 1) { + tmp += + dpct::permute_sub_group_by_xor(item_ct1.get_sub_group(), tmp, mask); + } + + if (item_ct1.get_local_id(2) == 0) { + dst[row] = tmp; + } +} + template static void dequantize_mul_mat_vec(const void * __restrict__ vx, const dfloat * __restrict__ y, float * __restrict__ dst, const int ncols, const int nrows, const sycl::nd_item<3> &item_ct1) { @@ -9109,7 +10223,18 @@ static void dequantize_row_q2_K_sycl(const void *vx, dst_t *y, const int k, }); } #else - dequantize_block_q2_K<<>>(vx, y); + { + dpct::has_capability_or_fail(stream->get_device(), + {sycl::aspect::fp16}); + + stream->parallel_for(sycl::nd_range<3>(sycl::range<3>(1, 1, nb) * + sycl::range<3>(1, 1, 32), + sycl::range<3>(1, 1, 32)), + [=](sycl::nd_item<3> item_ct1) { + dequantize_block_q2_K(vx, y, item_ct1); + }); + } + #endif } @@ -9130,10 +10255,57 @@ static void dequantize_row_q3_K_sycl(const void *vx, dst_t *y, const int k, }); } #else - dequantize_block_q3_K<<>>(vx, y); + { + dpct::has_capability_or_fail(stream->get_device(), + {sycl::aspect::fp16}); + + stream->parallel_for(sycl::nd_range<3>(sycl::range<3>(1, 1, nb) * + sycl::range<3>(1, 1, 32), + sycl::range<3>(1, 1, 32)), + [=](sycl::nd_item<3> item_ct1) { + dequantize_block_q3_K(vx, y, item_ct1); + }); + } #endif } +template +static void dequantize_row_q4_0_sycl(const void *vx, dst_t *y, const int k, + dpct::queue_ptr stream) { + const int nb32 = k / 32; + const int nb = (k + 255) / 256; + { + dpct::has_capability_or_fail(stream->get_device(), + {sycl::aspect::fp16}); + + stream->parallel_for(sycl::nd_range<3>(sycl::range<3>(1, 1, nb) * + sycl::range<3>(1, 1, 32), + sycl::range<3>(1, 1, 32)), + [=](sycl::nd_item<3> item_ct1) { + dequantize_block_q4_0(vx, y, nb32, item_ct1); + }); + } +} + +template +static void dequantize_row_q4_1_sycl(const void *vx, dst_t *y, const int k, + dpct::queue_ptr stream) { + const int nb32 = k / 32; + const int nb = (k + 255) / 256; + { + dpct::has_capability_or_fail(stream->get_device(), + {sycl::aspect::fp16}); + + stream->parallel_for(sycl::nd_range<3>(sycl::range<3>(1, 1, nb) * + sycl::range<3>(1, 1, 32), + sycl::range<3>(1, 1, 32)), + [=](sycl::nd_item<3> item_ct1) { + dequantize_block_q4_1(vx, y, nb32, item_ct1); + }); + } +} + + template static void dequantize_row_q4_K_sycl(const void *vx, dst_t *y, const int k, dpct::queue_ptr stream) { @@ -9168,7 +10340,18 @@ static void dequantize_row_q5_K_sycl(const void *vx, dst_t *y, const int k, }); } #else - dequantize_block_q5_K<<>>(vx, y); + { + dpct::has_capability_or_fail(stream->get_device(), + {sycl::aspect::fp16}); + + stream->parallel_for(sycl::nd_range<3>(sycl::range<3>(1, 1, nb) * + sycl::range<3>(1, 1, 32), + sycl::range<3>(1, 1, 32)), + [=](sycl::nd_item<3> item_ct1) { + dequantize_block_q5_K(vx, y, item_ct1); + }); + } + #endif } @@ -9189,11 +10372,132 @@ static void dequantize_row_q6_K_sycl(const void *vx, dst_t *y, const int k, }); } #else - dequantize_block_q6_K<<>>(vx, y); + { + dpct::has_capability_or_fail(stream->get_device(), + {sycl::aspect::fp16}); + + stream->parallel_for(sycl::nd_range<3>(sycl::range<3>(1, 1, nb) * + sycl::range<3>(1, 1, 32), + sycl::range<3>(1, 1, 32)), + [=](sycl::nd_item<3> item_ct1) { + dequantize_block_q6_K(vx, y, item_ct1); + }); + } + #endif } -static to_fp16_sycl_t ggml_get_to_fp16_sycl(ggml_type type) { + +template +static void dequantize_row_iq2_xxs_sycl(const void *vx, dst_t *y, const int k, + dpct::queue_ptr stream) { + const int nb = k / QK_K; + { + iq2xxs_grid.init(*stream); + ksigns_iq2xs.init(*stream); + kmask_iq2xs.init(*stream); + + dpct::has_capability_or_fail(stream->get_device(), + {sycl::aspect::fp16}); + + stream->submit([&](sycl::handler &cgh) { + auto iq2xxs_grid_ptr_ct1 = iq2xxs_grid.get_ptr(); + auto ksigns_iq2xs_ptr_ct1 = ksigns_iq2xs.get_ptr(); + auto kmask_iq2xs_ptr_ct1 = kmask_iq2xs.get_ptr(); + + cgh.parallel_for(sycl::nd_range<3>(sycl::range<3>(1, 1, nb) * + sycl::range<3>(1, 1, 32), + sycl::range<3>(1, 1, 32)), + [=](sycl::nd_item<3> item_ct1) { + dequantize_block_iq2_xxs( + vx, y, item_ct1, iq2xxs_grid_ptr_ct1, + ksigns_iq2xs_ptr_ct1, kmask_iq2xs_ptr_ct1); + }); + }); + } +} + +template +static void dequantize_row_iq2_xs_sycl(const void *vx, dst_t *y, const int k, + dpct::queue_ptr stream) { + const int nb = k / QK_K; + { + iq2xs_grid.init(*stream); + ksigns_iq2xs.init(*stream); + kmask_iq2xs.init(*stream); + + dpct::has_capability_or_fail(stream->get_device(), + {sycl::aspect::fp16}); + + stream->submit([&](sycl::handler &cgh) { + auto iq2xs_grid_ptr_ct1 = iq2xs_grid.get_ptr(); + auto ksigns_iq2xs_ptr_ct1 = ksigns_iq2xs.get_ptr(); + auto kmask_iq2xs_ptr_ct1 = kmask_iq2xs.get_ptr(); + + cgh.parallel_for(sycl::nd_range<3>(sycl::range<3>(1, 1, nb) * + sycl::range<3>(1, 1, 32), + sycl::range<3>(1, 1, 32)), + [=](sycl::nd_item<3> item_ct1) { + dequantize_block_iq2_xs( + vx, y, item_ct1, iq2xs_grid_ptr_ct1, + ksigns_iq2xs_ptr_ct1, kmask_iq2xs_ptr_ct1); + }); + }); + } +} + +template +static void dequantize_row_iq3_xxs_sycl(const void *vx, dst_t *y, const int k, + dpct::queue_ptr stream) { + const int nb = k / QK_K; + { + iq3xxs_grid.init(*stream); + ksigns_iq2xs.init(*stream); + kmask_iq2xs.init(*stream); + + dpct::has_capability_or_fail(stream->get_device(), + {sycl::aspect::fp16}); + + stream->submit([&](sycl::handler &cgh) { + auto iq3xxs_grid_ptr_ct1 = iq3xxs_grid.get_ptr(); + auto ksigns_iq2xs_ptr_ct1 = ksigns_iq2xs.get_ptr(); + auto kmask_iq2xs_ptr_ct1 = kmask_iq2xs.get_ptr(); + + cgh.parallel_for(sycl::nd_range<3>(sycl::range<3>(1, 1, nb) * + sycl::range<3>(1, 1, 32), + sycl::range<3>(1, 1, 32)), + [=](sycl::nd_item<3> item_ct1) { + dequantize_block_iq3_xxs( + vx, y, item_ct1, iq3xxs_grid_ptr_ct1, + ksigns_iq2xs_ptr_ct1, kmask_iq2xs_ptr_ct1); + }); + }); + } +} + +template +static void convert_unary_sycl(const void *__restrict__ vx, + dst_t *__restrict__ y, const int k, + dpct::queue_ptr stream) { + const int num_blocks = (k + SYCL_DEQUANTIZE_BLOCK_SIZE - 1) / SYCL_DEQUANTIZE_BLOCK_SIZE; + { + dpct::has_capability_or_fail(stream->get_device(), + {sycl::aspect::fp16}); + + stream->parallel_for( + sycl::nd_range<3>( + sycl::range<3>(1, 1, num_blocks) * + sycl::range<3>(1, 1, SYCL_DEQUANTIZE_BLOCK_SIZE), + sycl::range<3>(1, 1, SYCL_DEQUANTIZE_BLOCK_SIZE)), + [=](sycl::nd_item<3> item_ct1) { + convert_unary(vx, y, k, item_ct1); + }); + } +} + + +static to_fp16_sycl_t ggml_get_to_fp16_sycl(ggml_type type) try { + int id; switch (type) { case GGML_TYPE_Q4_0: return dequantize_block_sycl; @@ -9215,19 +10519,30 @@ static to_fp16_sycl_t ggml_get_to_fp16_sycl(ggml_type type) { return dequantize_row_q5_K_sycl; case GGML_TYPE_Q6_K: return dequantize_row_q6_K_sycl; + case GGML_TYPE_IQ2_XXS: + return dequantize_row_iq2_xxs_sycl; + case GGML_TYPE_IQ2_XS: + return dequantize_row_iq2_xs_sycl; + case GGML_TYPE_IQ3_XXS: + return dequantize_row_iq3_xxs_sycl; case GGML_TYPE_F32: - return dequantize_block_sycl<1, 1, convert_f32>; + return convert_unary_sycl; default: return nullptr; } } +catch (sycl::exception const &exc) { + std::cerr << exc.what() << "Exception caught at file:" << __FILE__ + << ", line:" << __LINE__ << std::endl; + std::exit(1); +} static to_fp32_sycl_t ggml_get_to_fp32_sycl(ggml_type type) { switch (type) { case GGML_TYPE_Q4_0: - return dequantize_block_sycl; + return dequantize_row_q4_0_sycl; case GGML_TYPE_Q4_1: - return dequantize_block_sycl; + return dequantize_row_q4_1_sycl; case GGML_TYPE_Q5_0: return dequantize_block_sycl; case GGML_TYPE_Q5_1: @@ -9244,8 +10559,14 @@ static to_fp32_sycl_t ggml_get_to_fp32_sycl(ggml_type type) { return dequantize_row_q5_K_sycl; case GGML_TYPE_Q6_K: return dequantize_row_q6_K_sycl; + case GGML_TYPE_IQ2_XXS: + return dequantize_row_iq2_xxs_sycl; + case GGML_TYPE_IQ2_XS: + return dequantize_row_iq2_xs_sycl; + case GGML_TYPE_IQ3_XXS: + return dequantize_row_iq3_xxs_sycl; case GGML_TYPE_F16: - return dequantize_block_sycl<1, 1, convert_f16>; + return convert_unary_sycl; default: return nullptr; } @@ -9455,24 +10776,385 @@ static void convert_mul_mat_vec_f16_sycl(const void *vx, const dfloat *y, } } -template -static void mul_mat_vec_q_sycl_submitter(const void *vx, const void *vy, + +static void mul_mat_vec_q4_0_q8_1_sycl(const void *vx, const void *vy, + float *dst, const int ncols, + const int nrows, + dpct::queue_ptr stream) { + GGML_ASSERT(ncols % QK4_0 == 0); + const int block_num_y = (nrows + GGML_SYCL_MMV_Y - 1) / GGML_SYCL_MMV_Y; + const sycl::range<3> block_nums(1, 1, block_num_y); + const sycl::range<3> block_dims(1, GGML_SYCL_MMV_Y, WARP_SIZE); + { + iq3xxs_grid.init(*stream); + ksigns64.init(*stream); + + stream->submit([&](sycl::handler &cgh) { + auto iq3xxs_grid_ptr_ct1 = iq3xxs_grid.get_ptr(); + auto ksigns64_ptr_ct1 = ksigns64.get_ptr(); + + cgh.parallel_for( + sycl::nd_range<3>(block_nums * block_dims, block_dims), + [=](sycl::nd_item<3> item_ct1) + [[intel::reqd_sub_group_size(32)]] { + mul_mat_vec_q( + vx, vy, dst, ncols, nrows, item_ct1, + iq3xxs_grid_ptr_ct1, ksigns64_ptr_ct1); + }); + }); + } +} + +static void mul_mat_vec_q4_1_q8_1_sycl(const void *vx, const void *vy, + float *dst, const int ncols, + const int nrows, + dpct::queue_ptr stream) { + GGML_ASSERT(ncols % QK4_1 == 0); + const int block_num_y = (nrows + GGML_SYCL_MMV_Y - 1) / GGML_SYCL_MMV_Y; + const sycl::range<3> block_nums(1, 1, block_num_y); + const sycl::range<3> block_dims(1, GGML_SYCL_MMV_Y, WARP_SIZE); + { + iq3xxs_grid.init(*stream); + ksigns64.init(*stream); + + stream->submit([&](sycl::handler &cgh) { + auto iq3xxs_grid_ptr_ct1 = iq3xxs_grid.get_ptr(); + auto ksigns64_ptr_ct1 = ksigns64.get_ptr(); + + cgh.parallel_for( + sycl::nd_range<3>(block_nums * block_dims, block_dims), + [=](sycl::nd_item<3> item_ct1) + [[intel::reqd_sub_group_size(32)]] { + mul_mat_vec_q( + vx, vy, dst, ncols, nrows, item_ct1, + iq3xxs_grid_ptr_ct1, ksigns64_ptr_ct1); + }); + }); + } +} + +static void mul_mat_vec_q5_0_q8_1_sycl(const void *vx, const void *vy, + float *dst, const int ncols, + const int nrows, + dpct::queue_ptr stream) { + GGML_ASSERT(ncols % QK5_0 == 0); + const int block_num_y = (nrows + GGML_SYCL_MMV_Y - 1) / GGML_SYCL_MMV_Y; + const sycl::range<3> block_nums(1, 1, block_num_y); + const sycl::range<3> block_dims(1, GGML_SYCL_MMV_Y, WARP_SIZE); + { + iq3xxs_grid.init(*stream); + ksigns64.init(*stream); + + stream->submit([&](sycl::handler &cgh) { + auto iq3xxs_grid_ptr_ct1 = iq3xxs_grid.get_ptr(); + auto ksigns64_ptr_ct1 = ksigns64.get_ptr(); + + cgh.parallel_for( + sycl::nd_range<3>(block_nums * block_dims, block_dims), + [=](sycl::nd_item<3> item_ct1) + [[intel::reqd_sub_group_size(32)]] { + mul_mat_vec_q( + vx, vy, dst, ncols, nrows, item_ct1, + iq3xxs_grid_ptr_ct1, ksigns64_ptr_ct1); + }); + }); + } +} + +static void mul_mat_vec_q5_1_q8_1_sycl(const void *vx, const void *vy, + float *dst, const int ncols, + const int nrows, + dpct::queue_ptr stream) { + GGML_ASSERT(ncols % QK5_1 == 0); + const int block_num_y = (nrows + GGML_SYCL_MMV_Y - 1) / GGML_SYCL_MMV_Y; + const sycl::range<3> block_nums(1, 1, block_num_y); + const sycl::range<3> block_dims(1, GGML_SYCL_MMV_Y, WARP_SIZE); + { + iq3xxs_grid.init(*stream); + ksigns64.init(*stream); + + stream->submit([&](sycl::handler &cgh) { + auto iq3xxs_grid_ptr_ct1 = iq3xxs_grid.get_ptr(); + auto ksigns64_ptr_ct1 = ksigns64.get_ptr(); + + cgh.parallel_for( + sycl::nd_range<3>(block_nums * block_dims, block_dims), + [=](sycl::nd_item<3> item_ct1) + [[intel::reqd_sub_group_size(32)]] { + mul_mat_vec_q( + vx, vy, dst, ncols, nrows, item_ct1, + iq3xxs_grid_ptr_ct1, ksigns64_ptr_ct1); + }); + }); + } +} + +static void mul_mat_vec_q8_0_q8_1_sycl(const void *vx, const void *vy, + float *dst, const int ncols, + const int nrows, + dpct::queue_ptr stream) { + GGML_ASSERT(ncols % QK8_0 == 0); + const int block_num_y = (nrows + GGML_SYCL_MMV_Y - 1) / GGML_SYCL_MMV_Y; + const sycl::range<3> block_nums(1, 1, block_num_y); + const sycl::range<3> block_dims(1, GGML_SYCL_MMV_Y, WARP_SIZE); + { + iq3xxs_grid.init(*stream); + ksigns64.init(*stream); + + stream->submit([&](sycl::handler &cgh) { + auto iq3xxs_grid_ptr_ct1 = iq3xxs_grid.get_ptr(); + auto ksigns64_ptr_ct1 = ksigns64.get_ptr(); + + cgh.parallel_for( + sycl::nd_range<3>(block_nums * block_dims, block_dims), + [=](sycl::nd_item<3> item_ct1) + [[intel::reqd_sub_group_size(32)]] { + mul_mat_vec_q( + vx, vy, dst, ncols, nrows, item_ct1, + iq3xxs_grid_ptr_ct1, ksigns64_ptr_ct1); + }); + }); + } +} + +static void mul_mat_vec_q2_K_q8_1_sycl(const void *vx, const void *vy, + float *dst, const int ncols, + const int nrows, + dpct::queue_ptr stream) { + GGML_ASSERT(ncols % QK_K == 0); + const int block_num_y = (nrows + GGML_SYCL_MMV_Y - 1) / GGML_SYCL_MMV_Y; + const sycl::range<3> block_nums(1, 1, block_num_y); + const sycl::range<3> block_dims(1, GGML_SYCL_MMV_Y, WARP_SIZE); + { + iq3xxs_grid.init(*stream); + ksigns64.init(*stream); + + stream->submit([&](sycl::handler &cgh) { + auto iq3xxs_grid_ptr_ct1 = iq3xxs_grid.get_ptr(); + auto ksigns64_ptr_ct1 = ksigns64.get_ptr(); + + cgh.parallel_for( + sycl::nd_range<3>(block_nums * block_dims, block_dims), + [=](sycl::nd_item<3> item_ct1) + [[intel::reqd_sub_group_size(32)]] { + mul_mat_vec_q( + vx, vy, dst, ncols, nrows, item_ct1, + iq3xxs_grid_ptr_ct1, ksigns64_ptr_ct1); + }); + }); + } +} + +static void mul_mat_vec_q3_K_q8_1_sycl(const void *vx, const void *vy, + float *dst, const int ncols, + const int nrows, + dpct::queue_ptr stream) { + GGML_ASSERT(ncols % QK_K == 0); + const int block_num_y = (nrows + GGML_SYCL_MMV_Y - 1) / GGML_SYCL_MMV_Y; + const sycl::range<3> block_nums(1, 1, block_num_y); + const sycl::range<3> block_dims(1, GGML_SYCL_MMV_Y, WARP_SIZE); + { + iq3xxs_grid.init(*stream); + ksigns64.init(*stream); + + stream->submit([&](sycl::handler &cgh) { + auto iq3xxs_grid_ptr_ct1 = iq3xxs_grid.get_ptr(); + auto ksigns64_ptr_ct1 = ksigns64.get_ptr(); + + cgh.parallel_for( + sycl::nd_range<3>(block_nums * block_dims, block_dims), + [=](sycl::nd_item<3> item_ct1) + [[intel::reqd_sub_group_size(32)]] { + mul_mat_vec_q( + vx, vy, dst, ncols, nrows, item_ct1, + iq3xxs_grid_ptr_ct1, ksigns64_ptr_ct1); + }); + }); + } +} + +static void mul_mat_vec_q4_K_q8_1_sycl(const void *vx, const void *vy, + float *dst, const int ncols, + const int nrows, + dpct::queue_ptr stream) { + GGML_ASSERT(ncols % QK_K == 0); + const int block_num_y = (nrows + GGML_SYCL_MMV_Y - 1) / GGML_SYCL_MMV_Y; + const sycl::range<3> block_nums(1, 1, block_num_y); + const sycl::range<3> block_dims(1, GGML_SYCL_MMV_Y, WARP_SIZE); + { + iq3xxs_grid.init(*stream); + ksigns64.init(*stream); + + stream->submit([&](sycl::handler &cgh) { + auto iq3xxs_grid_ptr_ct1 = iq3xxs_grid.get_ptr(); + auto ksigns64_ptr_ct1 = ksigns64.get_ptr(); + + cgh.parallel_for( + sycl::nd_range<3>(block_nums * block_dims, block_dims), + [=](sycl::nd_item<3> item_ct1) + [[intel::reqd_sub_group_size(32)]] { + mul_mat_vec_q( + vx, vy, dst, ncols, nrows, item_ct1, + iq3xxs_grid_ptr_ct1, ksigns64_ptr_ct1); + }); + }); + } +} + +static void mul_mat_vec_q5_K_q8_1_sycl(const void *vx, const void *vy, + float *dst, const int ncols, + const int nrows, + dpct::queue_ptr stream) { + GGML_ASSERT(ncols % QK_K == 0); + const int block_num_y = (nrows + GGML_SYCL_MMV_Y - 1) / GGML_SYCL_MMV_Y; + const sycl::range<3> block_nums(1, 1, block_num_y); + const sycl::range<3> block_dims(1, GGML_SYCL_MMV_Y, WARP_SIZE); + { + iq3xxs_grid.init(*stream); + ksigns64.init(*stream); + + stream->submit([&](sycl::handler &cgh) { + auto iq3xxs_grid_ptr_ct1 = iq3xxs_grid.get_ptr(); + auto ksigns64_ptr_ct1 = ksigns64.get_ptr(); + + cgh.parallel_for( + sycl::nd_range<3>(block_nums * block_dims, block_dims), + [=](sycl::nd_item<3> item_ct1) + [[intel::reqd_sub_group_size(32)]] { + mul_mat_vec_q( + vx, vy, dst, ncols, nrows, item_ct1, + iq3xxs_grid_ptr_ct1, ksigns64_ptr_ct1); + }); + }); + } +} + +static void mul_mat_vec_q6_K_q8_1_sycl(const void *vx, const void *vy, + float *dst, const int ncols, + const int nrows, + dpct::queue_ptr stream) { + GGML_ASSERT(ncols % QK_K == 0); + const int block_num_y = (nrows + GGML_SYCL_MMV_Y - 1) / GGML_SYCL_MMV_Y; + const sycl::range<3> block_nums(1, 1, block_num_y); + const sycl::range<3> block_dims(1, GGML_SYCL_MMV_Y, WARP_SIZE); + { + iq3xxs_grid.init(*stream); + ksigns64.init(*stream); + + stream->submit([&](sycl::handler &cgh) { + auto iq3xxs_grid_ptr_ct1 = iq3xxs_grid.get_ptr(); + auto ksigns64_ptr_ct1 = ksigns64.get_ptr(); + + cgh.parallel_for( + sycl::nd_range<3>(block_nums * block_dims, block_dims), + [=](sycl::nd_item<3> item_ct1) + [[intel::reqd_sub_group_size(32)]] { + mul_mat_vec_q( + vx, vy, dst, ncols, nrows, item_ct1, + iq3xxs_grid_ptr_ct1, ksigns64_ptr_ct1); + }); + }); + } +} + +static void mul_mat_vec_iq2_xxs_q8_1_sycl(const void *vx, const void *vy, + float *dst, const int ncols, + const int nrows, + dpct::queue_ptr stream) { + GGML_ASSERT(ncols % QK_K == 0); + const int block_num_y = (nrows + GGML_SYCL_MMV_Y - 1) / GGML_SYCL_MMV_Y; + const sycl::range<3> block_nums(1, 1, block_num_y); + const sycl::range<3> block_dims(1, GGML_SYCL_MMV_Y, WARP_SIZE); + { + iq2xxs_grid.init(*stream); + ksigns_iq2xs.init(*stream); + kmask_iq2xs.init(*stream); + + + stream->submit([&](sycl::handler &cgh) { + auto iq2xxs_grid_ptr_ct1 = iq2xxs_grid.get_ptr(); + auto ksigns_iq2xs_ptr_ct1 = ksigns_iq2xs.get_ptr(); + auto kmask_iq2xs_ptr_ct1 = kmask_iq2xs.get_ptr(); + + cgh.parallel_for( + sycl::nd_range<3>(block_nums * block_dims, block_dims), + [=](sycl::nd_item<3> item_ct1) + [[intel::reqd_sub_group_size(32)]] { + mul_mat_vec_q_iq2_xxs_q8_1( + vx, vy, dst, ncols, nrows, item_ct1, + iq2xxs_grid_ptr_ct1, ksigns_iq2xs_ptr_ct1, kmask_iq2xs_ptr_ct1); + }); + }); + } +} + +static void mul_mat_vec_iq2_xs_q8_1_sycl(const void *vx, const void *vy, float *dst, const int ncols, const int nrows, dpct::queue_ptr stream) { - GGML_ASSERT(ncols % QK4_0 == 0); - const int block_num_y = (nrows + GGML_SYCL_MMV_Y - 1) / GGML_SYCL_MMV_Y; - const sycl::range<3> block_nums(1, 1, block_num_y); - const sycl::range<3> block_dims(1, GGML_SYCL_MMV_Y, WARP_SIZE); - stream->parallel_for( - sycl::nd_range<3>(block_nums * block_dims, block_dims), [= - ](sycl::nd_item<3> item_ct1) [[intel::reqd_sub_group_size(32)]] { - mul_mat_vec_q( - vx, vy, dst, ncols, nrows, item_ct1); - }); + GGML_ASSERT(ncols % QK_K == 0); + const int block_num_y = (nrows + GGML_SYCL_MMV_Y - 1) / GGML_SYCL_MMV_Y; + const sycl::range<3> block_nums(1, 1, block_num_y); + const sycl::range<3> block_dims(1, GGML_SYCL_MMV_Y, WARP_SIZE); + { + iq2xs_grid.init(*stream); + ksigns64.init(*stream); + + stream->submit([&](sycl::handler &cgh) { + auto iq2xs_grid_ptr_ct1 = iq2xs_grid.get_ptr(); + auto ksigns64_ptr_ct1 = ksigns64.get_ptr(); + + cgh.parallel_for( + sycl::nd_range<3>(block_nums * block_dims, block_dims), + [=](sycl::nd_item<3> item_ct1) + [[intel::reqd_sub_group_size(32)]] { + mul_mat_vec_q_iq2_xs_q8_1( + vx, vy, dst, ncols, nrows, item_ct1, + iq2xs_grid_ptr_ct1, ksigns64_ptr_ct1); + }); + }); + } } +static void mul_mat_vec_iq3_xxs_q8_1_sycl(const void *vx, const void *vy, + float *dst, const int ncols, + const int nrows, + dpct::queue_ptr stream) { + GGML_ASSERT(ncols % QK_K == 0); + const int block_num_y = (nrows + GGML_SYCL_MMV_Y - 1) / GGML_SYCL_MMV_Y; + const sycl::range<3> block_nums(1, 1, block_num_y); + const sycl::range<3> block_dims(1, GGML_SYCL_MMV_Y, WARP_SIZE); + { + iq3xxs_grid.init(*stream); + ksigns64.init(*stream); + + stream->submit([&](sycl::handler &cgh) { + auto iq3xxs_grid_ptr_ct1 = iq3xxs_grid.get_ptr(); + auto ksigns64_ptr_ct1 = ksigns64.get_ptr(); + + cgh.parallel_for( + sycl::nd_range<3>(block_nums * block_dims, block_dims), + [=](sycl::nd_item<3> item_ct1) + [[intel::reqd_sub_group_size(32)]] { + mul_mat_vec_q_iq3_xxs_q8_1( + vx, vy, dst, ncols, nrows, item_ct1, + iq3xxs_grid_ptr_ct1, ksigns64_ptr_ct1); + }); + }); + } +} + + static void ggml_mul_mat_q4_0_q8_1_sycl(const void *vx, const void *vy, float *dst, const int ncols_x, const int nrows_x, const int ncols_y, @@ -11451,7 +13133,7 @@ struct sycl_pool_alloc { device_id = get_current_device_id(); device_index = g_sycl_gpu_mgr->get_index(device_id); ptr = (T *) ggml_sycl_pool_malloc(device_index, size * sizeof(T), &this->actual_size); - // GGML_SYCL_DEBUG("alloc %lu return %p actual size=%lu\n", size * sizeof(T), ptr, this->actual_size); + // GGML_SYCL_DEBUG("sycl_pool_alloc %lu return %p actual size=%lu\n", size * sizeof(T), ptr, this->actual_size); return ptr; } @@ -12242,63 +13924,46 @@ inline void ggml_sycl_op_mul_mat_vec_q( const int64_t ne00 = src0->ne[0]; const int64_t row_diff = row_high - row_low; - // TODO: support these quantization types - GGML_ASSERT(!(src0->type == GGML_TYPE_IQ2_XXS || - src0->type == GGML_TYPE_IQ2_XS || - src0->type == GGML_TYPE_IQ3_XXS || - src0->type == GGML_TYPE_IQ1_S)); - switch (src0->type) { case GGML_TYPE_Q4_0: - mul_mat_vec_q_sycl_submitter( - src0_dd_i, src1_ddq_i, dst_dd_i, ne00, row_diff, stream); - break; + mul_mat_vec_q4_0_q8_1_sycl(src0_dd_i, src1_ddq_i, dst_dd_i, ne00, row_diff, stream); + break; case GGML_TYPE_Q4_1: - mul_mat_vec_q_sycl_submitter( - src0_dd_i, src1_ddq_i, dst_dd_i, ne00, row_diff, stream); - break; + mul_mat_vec_q4_1_q8_1_sycl(src0_dd_i, src1_ddq_i, dst_dd_i, ne00, row_diff, stream); + break; case GGML_TYPE_Q5_0: - mul_mat_vec_q_sycl_submitter( - src0_dd_i, src1_ddq_i, dst_dd_i, ne00, row_diff, stream); - break; + mul_mat_vec_q5_0_q8_1_sycl(src0_dd_i, src1_ddq_i, dst_dd_i, ne00, row_diff, stream); + break; case GGML_TYPE_Q5_1: - mul_mat_vec_q_sycl_submitter( - src0_dd_i, src1_ddq_i, dst_dd_i, ne00, row_diff, stream); - break; + mul_mat_vec_q5_1_q8_1_sycl(src0_dd_i, src1_ddq_i, dst_dd_i, ne00, row_diff, stream); + break; case GGML_TYPE_Q8_0: - mul_mat_vec_q_sycl_submitter( - src0_dd_i, src1_ddq_i, dst_dd_i, ne00, row_diff, stream); - break; + mul_mat_vec_q8_0_q8_1_sycl(src0_dd_i, src1_ddq_i, dst_dd_i, ne00, row_diff, stream); + break; case GGML_TYPE_Q2_K: - mul_mat_vec_q_sycl_submitter( - src0_dd_i, src1_ddq_i, dst_dd_i, ne00, row_diff, stream); - break; + mul_mat_vec_q2_K_q8_1_sycl(src0_dd_i, src1_ddq_i, dst_dd_i, ne00, row_diff, stream); + break; case GGML_TYPE_Q3_K: - mul_mat_vec_q_sycl_submitter( - src0_dd_i, src1_ddq_i, dst_dd_i, ne00, row_diff, stream); - break; + mul_mat_vec_q3_K_q8_1_sycl(src0_dd_i, src1_ddq_i, dst_dd_i, ne00, row_diff, stream); + break; case GGML_TYPE_Q4_K: - mul_mat_vec_q_sycl_submitter( - src0_dd_i, src1_ddq_i, dst_dd_i, ne00, row_diff, stream); - break; + mul_mat_vec_q4_K_q8_1_sycl(src0_dd_i, src1_ddq_i, dst_dd_i, ne00, row_diff, stream); + break; case GGML_TYPE_Q5_K: - mul_mat_vec_q_sycl_submitter( - src0_dd_i, src1_ddq_i, dst_dd_i, ne00, row_diff, stream); - break; + mul_mat_vec_q5_K_q8_1_sycl(src0_dd_i, src1_ddq_i, dst_dd_i, ne00, row_diff, stream); + break; case GGML_TYPE_Q6_K: - mul_mat_vec_q_sycl_submitter( - src0_dd_i, src1_ddq_i, dst_dd_i, ne00, row_diff, stream); - break; + mul_mat_vec_q6_K_q8_1_sycl(src0_dd_i, src1_ddq_i, dst_dd_i, ne00, row_diff, stream); + break; + case GGML_TYPE_IQ2_XXS: + mul_mat_vec_iq2_xxs_q8_1_sycl(src0_dd_i, src1_ddq_i, dst_dd_i, ne00, row_diff, stream); + break; + case GGML_TYPE_IQ2_XS: + mul_mat_vec_iq2_xs_q8_1_sycl(src0_dd_i, src1_ddq_i, dst_dd_i, ne00, row_diff, stream); + break; + case GGML_TYPE_IQ3_XXS: + mul_mat_vec_iq3_xxs_q8_1_sycl(src0_dd_i, src1_ddq_i, dst_dd_i, ne00, row_diff, stream); + break; default: GGML_ASSERT(false); break; @@ -12311,6 +13976,7 @@ inline void ggml_sycl_op_mul_mat_vec_q( (void) src1_padded_row_size; } + inline void ggml_sycl_op_dequantize_mul_mat_vec( const ggml_tensor *src0, const ggml_tensor *src1, ggml_tensor *dst, const char *src0_dd_i, const float *src1_ddf_i, const char *src1_ddq_i, @@ -12318,10 +13984,11 @@ inline void ggml_sycl_op_dequantize_mul_mat_vec( const int64_t src1_ncols, const int64_t src1_padded_row_size, const dpct::queue_ptr &stream) { - GGML_TENSOR_BINARY_OP_LOCALS; - + const int64_t ne00 = src0->ne[0]; const int64_t row_diff = row_high - row_low; + GGML_ASSERT(src1->type == GGML_TYPE_F32); + // on some GPUs it is faster to convert src1 to half and to use half precision intrinsics #ifdef GGML_SYCL_F16 sycl_pool_alloc src1_dfloat_a; @@ -12333,15 +14000,10 @@ inline void ggml_sycl_op_dequantize_mul_mat_vec( src0->type == GGML_TYPE_Q8_0 || src0->type == GGML_TYPE_F16; if (src1_convert_f16) { - if (src1->type == GGML_TYPE_F16) { - src1_dfloat = (sycl::half *)src1->data + src1_padded_row_size; - } else { - src1_dfloat = src1_dfloat_a.alloc(ne00); - ggml_cpy_f32_f16_sycl((const char *)src1_ddf_i, (char *)src1_dfloat, - ne00, ne00, ne01, ne02, nb00, nb01, nb02, - nb03, ne10, ne11, ne12, nb10, nb11, nb12, - nb13, stream); - } + src1_dfloat = src1_dfloat_a.alloc(ne00); + const to_fp16_sycl_t to_fp16_sycl = ggml_get_to_fp16_sycl(src1->type); + GGML_ASSERT(to_fp16_sycl != nullptr); + to_fp16_sycl(src1_ddf_i, src1_dfloat, ne00, stream); } #else const dfloat * src1_dfloat = (const dfloat *) src1_ddf_i; // dfloat == float, no conversion @@ -12495,7 +14157,7 @@ inline void ggml_sycl_op_mul_mat_sycl( *g_sycl_handles[id], oneapi::mkl::transpose::trans, oneapi::mkl::transpose::nontrans, row_diff, src1_ncols, ne10, dpct::get_value(&alpha, *g_sycl_handles[id]), src0_ddf_i, ne00, - src1_ddf_i, ne10, dpct::get_value(&beta, *g_sycl_handles[id]), + src1_ddf1_i, ne10, dpct::get_value(&beta, *g_sycl_handles[id]), dst_dd_i, ldc))); } (void) dst; @@ -12923,7 +14585,7 @@ static void ggml_sycl_op_flatten(const ggml_tensor *src0, // copy dst to host if necessary if (!dst_on_device) { SYCL_CHECK(CHECK_TRY_ERROR( - main_stream->memcpy(dst->data, dst_ddf, ggml_nbytes(dst)))); + main_stream->memcpy(dst->data, dst_ddf, ggml_nbytes(dst)).wait())); } if (dst->backend == GGML_BACKEND_TYPE_CPU) { @@ -13200,7 +14862,7 @@ static void ggml_sycl_op_mul_mat(const ggml_tensor *src0, SYCL_CHECK(CHECK_TRY_ERROR(stream->memcpy( src1_ddq_i, src1_ddq_i_source, src1_ncols * src1_padded_col_size * q8_1_ts / - q8_1_bs))); + q8_1_bs).wait())); } else { float * src1_ddf_i_source = (float *) src1_extra->data_device[g_main_device]; @@ -13294,7 +14956,7 @@ static void ggml_sycl_op_mul_mat(const ggml_tensor *src0, dhf_dst_i += src1_col_0*ne0; SYCL_CHECK(CHECK_TRY_ERROR( stream->memcpy(dhf_dst_i, dst_dd_i, - src1_ncols * ne0 * sizeof(float)))); + src1_ncols * ne0 * sizeof(float)).wait())); } } @@ -13852,13 +15514,13 @@ static __global__ void k_compute_batched_ptrs_id( src0_f16 = (half *) srcs_ar[i]; } else { src0_f16 = src0_as_f16; - if (threadIdx.x == 0 && threadIdx.y == 0) { + if (item_ct1.get_local_id(2) == 0 && threadIdx.y == 0) { const to_fp16_sycl_t to_fp16 = ggml_get_to_fp16_sycl(src0_type); to_fp16(srcs_ar[i], src0_f16, src0_ne, syclStreamFireAndForget); } } - int i13 = blockIdx.x * blockDim.x + threadIdx.x; + int i13 = blockIdx.x * blockDim.x + item_ct1.get_local_id(2); int i12 = blockIdx.y * blockDim.y + threadIdx.y; if (i13 >= ne13 || i12 >= ne12) { @@ -14024,8 +15686,8 @@ static void ggml_sycl_mul_mat_id(const ggml_tensor *src0, if (ids->backend == GGML_BACKEND_TYPE_GPU) { const char * ids_dev = (const char *)((const ggml_tensor_extra_gpu *)ids->extra)->data_device[g_main_device]; SYCL_CHECK(CHECK_TRY_ERROR( - stream->memcpy(ids_host.data(), ids_dev, ggml_nbytes(ids)))); - SYCL_CHECK(CHECK_TRY_ERROR(stream->wait())); + stream->memcpy(ids_host.data(), ids_dev, ggml_nbytes(ids)).wait())); + // SYCL_CHECK(CHECK_TRY_ERROR(stream->wait())); } else { memcpy(ids_host.data(), ids->data, ggml_nbytes(ids)); } @@ -14095,7 +15757,7 @@ static void ggml_sycl_mul_mat_id(const ggml_tensor *src0, SYCL_CHECK(CHECK_TRY_ERROR( stream->memcpy(src1_contiguous.get() + num_src1_rows * nb11, - src1_original + i01 * nb11, nb11))); + src1_original + i01 * nb11, nb11).wait())); num_src1_rows++; } @@ -14128,7 +15790,7 @@ static void ggml_sycl_mul_mat_id(const ggml_tensor *src0, SYCL_CHECK(CHECK_TRY_ERROR(stream->memcpy( dst_original + i01 * nb1, - dst_contiguous.get() + num_src1_rows * nb1, nb1))); + dst_contiguous.get() + num_src1_rows * nb1, nb1).wait())); num_src1_rows++; } } @@ -15522,7 +17184,7 @@ GGML_CALL static void ggml_backend_sycl_set_tensor_async(ggml_backend_t backend, GGML_ASSERT(tensor->buffer->buft == ggml_backend_sycl_buffer_type(sycl_ctx->device) && "unsupported buffer type"); GGML_ASSERT(tensor->backend == GGML_BACKEND_TYPE_GPU); SYCL_CHECK(CHECK_TRY_ERROR(g_syclStreams[sycl_ctx->device][0]->memcpy( - (char *)tensor->data + offset, data, size))); + (char *)tensor->data + offset, data, size).wait())); } catch (sycl::exception const &exc) { std::cerr << exc.what() << "Exception caught at file:" << __FILE__ @@ -15538,7 +17200,7 @@ GGML_CALL static void ggml_backend_sycl_get_tensor_async(ggml_backend_t backend, GGML_ASSERT(tensor->buffer->buft == ggml_backend_sycl_buffer_type(sycl_ctx->device) && "unsupported buffer type"); GGML_ASSERT(tensor->backend == GGML_BACKEND_TYPE_GPU); SYCL_CHECK(CHECK_TRY_ERROR(g_syclStreams[sycl_ctx->device][0]->memcpy( - data, (const char *)tensor->data + offset, size))); + data, (const char *)tensor->data + offset, size).wait())); } catch (sycl::exception const &exc) { std::cerr << exc.what() << "Exception caught at file:" << __FILE__ @@ -15557,7 +17219,7 @@ GGML_CALL static bool ggml_backend_sycl_cpy_tensor_async(ggml_backend_t backend, was inserted. You need to rewrite this code. */ SYCL_CHECK(CHECK_TRY_ERROR(g_syclStreams[sycl_ctx->device][0]->memcpy( - dst->data, src->data, ggml_nbytes(dst)))); + dst->data, src->data, ggml_nbytes(dst)).wait())); return true; } @@ -15647,20 +17309,12 @@ GGML_CALL static bool ggml_backend_sycl_supports_op(ggml_backend_t backend, cons if (a->ne[3] != b->ne[3]) { return false; } - - if (a->type == GGML_TYPE_IQ1_S) { + ggml_type a_type = a->type; + if (a_type == GGML_TYPE_IQ2_XXS || a_type == GGML_TYPE_IQ2_XS || a_type == GGML_TYPE_IQ3_XXS || + a_type == GGML_TYPE_IQ1_S || a_type == GGML_TYPE_IQ4_NL || a_type == GGML_TYPE_IQ3_S || + a_type == GGML_TYPE_IQ2_S || a_type == GGML_TYPE_IQ4_XS) { return false; } - if (a->type == GGML_TYPE_IQ3_XXS) { - return false; - } - if (a->type == GGML_TYPE_IQ2_XXS) { - return false; - } - if (a->type == GGML_TYPE_IQ2_XS) { - return false; - } - return true; } break; case GGML_OP_GET_ROWS: @@ -15705,15 +17359,15 @@ GGML_CALL static bool ggml_backend_sycl_supports_op(ggml_backend_t backend, cons } return false; } break; - case GGML_OP_DUP: - case GGML_OP_REPEAT: case GGML_OP_CONCAT: { ggml_type src0_type = op->src[0]->type; return src0_type != GGML_TYPE_I32 && src0_type != GGML_TYPE_I16; } break; + case GGML_OP_DUP: case GGML_OP_NONE: case GGML_OP_RESHAPE: + case GGML_OP_REPEAT: case GGML_OP_VIEW: case GGML_OP_PERMUTE: case GGML_OP_TRANSPOSE: From 61d1c88e155515dd03940913a5707ea84a8b119b Mon Sep 17 00:00:00 2001 From: 0cc4m Date: Tue, 5 Mar 2024 13:33:42 +0100 Subject: [PATCH 791/811] Vulkan Improvements (#5835) * Improve dequant shaders, add fast q4_0 dequant * Optimize dmmv non-kquants for GCN Remove unnecessary SPIR-V shader duplication * Fix q4_0 dequant dispatch sizes Fix backend free bug * Optimize dequant shaders for q4_1, q5_0, q5_1 and q8_0 * Add unary and binary op shader templates * Fix Vulkan check results * Enable non-contiguous support for simple ops * Add argsort Basic q4_0 mmq shader and unit test * Speed up q4_0 dequant code, enable mmq for q4_0 * Rework matmul pipeline selection * Add soft_max alibi support * Add q4_1, q5_0, q5_1 and q8_0 dequant mat mat mul shaders * Add environment variable GGML_VK_FORCE_MAX_ALLOCATION_SIZE to limit max buffer size Rename GGML_VULKAN_DISABLE_F16 to GGML_VK_DISABLE_F16 for consistency --- ggml-vulkan-shaders.hpp | 86931 ++++++++++++++++------------------ ggml-vulkan.cpp | 2128 +- ggml-vulkan.h | 1 + ggml_vk_generate_shaders.py | 1207 +- llama.cpp | 4 +- 5 files changed, 43476 insertions(+), 46795 deletions(-) diff --git a/ggml-vulkan-shaders.hpp b/ggml-vulkan-shaders.hpp index e5e7a8414..1eca4d6e0 100644 --- a/ggml-vulkan-shaders.hpp +++ b/ggml-vulkan-shaders.hpp @@ -2,50 +2,1847 @@ unsigned char add_f32_data[] = { 0x03,0x02,0x23,0x07,0x00,0x05,0x01,0x00,0x0b,0x00,0x0d,0x00, -0x3e,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x11,0x00,0x02,0x00, +0x67,0x02,0x00,0x00,0x00,0x00,0x00,0x00,0x11,0x00,0x02,0x00, 0x01,0x00,0x00,0x00,0x0b,0x00,0x06,0x00,0x01,0x00,0x00,0x00, 0x47,0x4c,0x53,0x4c,0x2e,0x73,0x74,0x64,0x2e,0x34,0x35,0x30, 0x00,0x00,0x00,0x00,0x0e,0x00,0x03,0x00,0x00,0x00,0x00,0x00, 0x01,0x00,0x00,0x00,0x0f,0x00,0x0a,0x00,0x05,0x00,0x00,0x00, 0x04,0x00,0x00,0x00,0x6d,0x61,0x69,0x6e,0x00,0x00,0x00,0x00, -0x0b,0x00,0x00,0x00,0x14,0x00,0x00,0x00,0x22,0x00,0x00,0x00, -0x27,0x00,0x00,0x00,0x2f,0x00,0x00,0x00,0x10,0x00,0x06,0x00, +0x16,0x00,0x00,0x00,0x2e,0x01,0x00,0x00,0x3f,0x01,0x00,0x00, +0x4b,0x01,0x00,0x00,0x56,0x01,0x00,0x00,0x10,0x00,0x06,0x00, 0x04,0x00,0x00,0x00,0x11,0x00,0x00,0x00,0x00,0x02,0x00,0x00, +0x01,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x48,0x00,0x05,0x00, +0x14,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00, +0x00,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x14,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x04,0x00,0x00,0x00, +0x48,0x00,0x05,0x00,0x14,0x00,0x00,0x00,0x02,0x00,0x00,0x00, +0x23,0x00,0x00,0x00,0x08,0x00,0x00,0x00,0x48,0x00,0x05,0x00, +0x14,0x00,0x00,0x00,0x03,0x00,0x00,0x00,0x23,0x00,0x00,0x00, +0x0c,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x14,0x00,0x00,0x00, +0x04,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x10,0x00,0x00,0x00, +0x48,0x00,0x05,0x00,0x14,0x00,0x00,0x00,0x05,0x00,0x00,0x00, +0x23,0x00,0x00,0x00,0x14,0x00,0x00,0x00,0x48,0x00,0x05,0x00, +0x14,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x23,0x00,0x00,0x00, +0x18,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x14,0x00,0x00,0x00, +0x07,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x1c,0x00,0x00,0x00, +0x48,0x00,0x05,0x00,0x14,0x00,0x00,0x00,0x08,0x00,0x00,0x00, +0x23,0x00,0x00,0x00,0x20,0x00,0x00,0x00,0x48,0x00,0x05,0x00, +0x14,0x00,0x00,0x00,0x09,0x00,0x00,0x00,0x23,0x00,0x00,0x00, +0x24,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x14,0x00,0x00,0x00, +0x0a,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x28,0x00,0x00,0x00, +0x48,0x00,0x05,0x00,0x14,0x00,0x00,0x00,0x0b,0x00,0x00,0x00, +0x23,0x00,0x00,0x00,0x2c,0x00,0x00,0x00,0x48,0x00,0x05,0x00, +0x14,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x23,0x00,0x00,0x00, +0x30,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x14,0x00,0x00,0x00, +0x0d,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x34,0x00,0x00,0x00, +0x48,0x00,0x05,0x00,0x14,0x00,0x00,0x00,0x0e,0x00,0x00,0x00, +0x23,0x00,0x00,0x00,0x38,0x00,0x00,0x00,0x48,0x00,0x05,0x00, +0x14,0x00,0x00,0x00,0x0f,0x00,0x00,0x00,0x23,0x00,0x00,0x00, +0x3c,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x14,0x00,0x00,0x00, +0x10,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x40,0x00,0x00,0x00, +0x48,0x00,0x05,0x00,0x14,0x00,0x00,0x00,0x11,0x00,0x00,0x00, +0x23,0x00,0x00,0x00,0x44,0x00,0x00,0x00,0x48,0x00,0x05,0x00, +0x14,0x00,0x00,0x00,0x12,0x00,0x00,0x00,0x23,0x00,0x00,0x00, +0x48,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x14,0x00,0x00,0x00, +0x13,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x4c,0x00,0x00,0x00, +0x48,0x00,0x05,0x00,0x14,0x00,0x00,0x00,0x14,0x00,0x00,0x00, +0x23,0x00,0x00,0x00,0x50,0x00,0x00,0x00,0x48,0x00,0x05,0x00, +0x14,0x00,0x00,0x00,0x15,0x00,0x00,0x00,0x23,0x00,0x00,0x00, +0x54,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x14,0x00,0x00,0x00, +0x16,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x58,0x00,0x00,0x00, +0x48,0x00,0x05,0x00,0x14,0x00,0x00,0x00,0x17,0x00,0x00,0x00, +0x23,0x00,0x00,0x00,0x5c,0x00,0x00,0x00,0x48,0x00,0x05,0x00, +0x14,0x00,0x00,0x00,0x18,0x00,0x00,0x00,0x23,0x00,0x00,0x00, +0x60,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x14,0x00,0x00,0x00, +0x19,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x64,0x00,0x00,0x00, +0x48,0x00,0x05,0x00,0x14,0x00,0x00,0x00,0x1a,0x00,0x00,0x00, +0x23,0x00,0x00,0x00,0x68,0x00,0x00,0x00,0x48,0x00,0x05,0x00, +0x14,0x00,0x00,0x00,0x1b,0x00,0x00,0x00,0x23,0x00,0x00,0x00, +0x6c,0x00,0x00,0x00,0x47,0x00,0x03,0x00,0x14,0x00,0x00,0x00, +0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x2e,0x01,0x00,0x00, +0x0b,0x00,0x00,0x00,0x1c,0x00,0x00,0x00,0x47,0x00,0x04,0x00, +0x3c,0x01,0x00,0x00,0x06,0x00,0x00,0x00,0x04,0x00,0x00,0x00, +0x48,0x00,0x04,0x00,0x3d,0x01,0x00,0x00,0x00,0x00,0x00,0x00, +0x19,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x3d,0x01,0x00,0x00, +0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0x47,0x00,0x03,0x00,0x3d,0x01,0x00,0x00,0x02,0x00,0x00,0x00, +0x47,0x00,0x04,0x00,0x3f,0x01,0x00,0x00,0x22,0x00,0x00,0x00, +0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x3f,0x01,0x00,0x00, +0x21,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00, +0x48,0x01,0x00,0x00,0x06,0x00,0x00,0x00,0x04,0x00,0x00,0x00, +0x48,0x00,0x04,0x00,0x49,0x01,0x00,0x00,0x00,0x00,0x00,0x00, +0x18,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x49,0x01,0x00,0x00, +0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0x47,0x00,0x03,0x00,0x49,0x01,0x00,0x00,0x02,0x00,0x00,0x00, +0x47,0x00,0x04,0x00,0x4b,0x01,0x00,0x00,0x22,0x00,0x00,0x00, +0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x4b,0x01,0x00,0x00, +0x21,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00, +0x53,0x01,0x00,0x00,0x06,0x00,0x00,0x00,0x04,0x00,0x00,0x00, +0x48,0x00,0x04,0x00,0x54,0x01,0x00,0x00,0x00,0x00,0x00,0x00, +0x18,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x54,0x01,0x00,0x00, +0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0x47,0x00,0x03,0x00,0x54,0x01,0x00,0x00,0x02,0x00,0x00,0x00, +0x47,0x00,0x04,0x00,0x56,0x01,0x00,0x00,0x22,0x00,0x00,0x00, +0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x56,0x01,0x00,0x00, +0x21,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x47,0x00,0x04,0x00, +0x61,0x01,0x00,0x00,0x0b,0x00,0x00,0x00,0x19,0x00,0x00,0x00, +0x13,0x00,0x02,0x00,0x02,0x00,0x00,0x00,0x21,0x00,0x03,0x00, +0x03,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x15,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x20,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0x1e,0x00,0x1e,0x00,0x14,0x00,0x00,0x00,0x06,0x00,0x00,0x00, +0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00, +0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00, +0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00, +0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00, +0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00, +0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00, +0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00, +0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00, +0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00, +0x20,0x00,0x04,0x00,0x15,0x00,0x00,0x00,0x09,0x00,0x00,0x00, +0x14,0x00,0x00,0x00,0x3b,0x00,0x04,0x00,0x15,0x00,0x00,0x00, +0x16,0x00,0x00,0x00,0x09,0x00,0x00,0x00,0x15,0x00,0x04,0x00, +0x17,0x00,0x00,0x00,0x20,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x17,0x00,0x00,0x00,0x18,0x00,0x00,0x00, +0x03,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x19,0x00,0x00,0x00, +0x09,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x17,0x00,0x00,0x00,0x1c,0x00,0x00,0x00,0x02,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x17,0x00,0x00,0x00,0x20,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x17,0x00,0x00,0x00, +0x57,0x00,0x00,0x00,0x08,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x17,0x00,0x00,0x00,0x5c,0x00,0x00,0x00,0x07,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x17,0x00,0x00,0x00,0x62,0x00,0x00,0x00, +0x06,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x17,0x00,0x00,0x00, +0x68,0x00,0x00,0x00,0x05,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x17,0x00,0x00,0x00,0xac,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x17,0x00,0x00,0x00,0xb0,0x00,0x00,0x00, +0x10,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x17,0x00,0x00,0x00, +0xb5,0x00,0x00,0x00,0x0b,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x17,0x00,0x00,0x00,0xb9,0x00,0x00,0x00,0x0f,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x17,0x00,0x00,0x00,0xbf,0x00,0x00,0x00, +0x0a,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x17,0x00,0x00,0x00, +0xc3,0x00,0x00,0x00,0x0e,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x17,0x00,0x00,0x00,0xc9,0x00,0x00,0x00,0x09,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x17,0x00,0x00,0x00,0xcd,0x00,0x00,0x00, +0x0d,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x17,0x00,0x00,0x00, +0xd6,0x00,0x00,0x00,0x13,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x17,0x00,0x00,0x00,0xd9,0x00,0x00,0x00,0x12,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x17,0x00,0x00,0x00,0xdd,0x00,0x00,0x00, +0x11,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x17,0x00,0x00,0x00, +0x14,0x01,0x00,0x00,0x18,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x17,0x00,0x00,0x00,0x19,0x01,0x00,0x00,0x17,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x17,0x00,0x00,0x00,0x1f,0x01,0x00,0x00, +0x16,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x17,0x00,0x00,0x00, +0x25,0x01,0x00,0x00,0x15,0x00,0x00,0x00,0x17,0x00,0x04,0x00, +0x2c,0x01,0x00,0x00,0x06,0x00,0x00,0x00,0x03,0x00,0x00,0x00, +0x20,0x00,0x04,0x00,0x2d,0x01,0x00,0x00,0x01,0x00,0x00,0x00, +0x2c,0x01,0x00,0x00,0x3b,0x00,0x04,0x00,0x2d,0x01,0x00,0x00, +0x2e,0x01,0x00,0x00,0x01,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x2f,0x01,0x00,0x00,0x00,0x00,0x00,0x00, +0x20,0x00,0x04,0x00,0x30,0x01,0x00,0x00,0x01,0x00,0x00,0x00, +0x06,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x17,0x00,0x00,0x00, +0x33,0x01,0x00,0x00,0x00,0x00,0x00,0x00,0x14,0x00,0x02,0x00, +0x36,0x01,0x00,0x00,0x16,0x00,0x03,0x00,0x3b,0x01,0x00,0x00, +0x20,0x00,0x00,0x00,0x1d,0x00,0x03,0x00,0x3c,0x01,0x00,0x00, +0x3b,0x01,0x00,0x00,0x1e,0x00,0x03,0x00,0x3d,0x01,0x00,0x00, +0x3c,0x01,0x00,0x00,0x20,0x00,0x04,0x00,0x3e,0x01,0x00,0x00, +0x0c,0x00,0x00,0x00,0x3d,0x01,0x00,0x00,0x3b,0x00,0x04,0x00, +0x3e,0x01,0x00,0x00,0x3f,0x01,0x00,0x00,0x0c,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x17,0x00,0x00,0x00,0x40,0x01,0x00,0x00, +0x19,0x00,0x00,0x00,0x1d,0x00,0x03,0x00,0x48,0x01,0x00,0x00, +0x3b,0x01,0x00,0x00,0x1e,0x00,0x03,0x00,0x49,0x01,0x00,0x00, +0x48,0x01,0x00,0x00,0x20,0x00,0x04,0x00,0x4a,0x01,0x00,0x00, +0x0c,0x00,0x00,0x00,0x49,0x01,0x00,0x00,0x3b,0x00,0x04,0x00, +0x4a,0x01,0x00,0x00,0x4b,0x01,0x00,0x00,0x0c,0x00,0x00,0x00, +0x20,0x00,0x04,0x00,0x50,0x01,0x00,0x00,0x0c,0x00,0x00,0x00, +0x3b,0x01,0x00,0x00,0x1d,0x00,0x03,0x00,0x53,0x01,0x00,0x00, +0x3b,0x01,0x00,0x00,0x1e,0x00,0x03,0x00,0x54,0x01,0x00,0x00, +0x53,0x01,0x00,0x00,0x20,0x00,0x04,0x00,0x55,0x01,0x00,0x00, +0x0c,0x00,0x00,0x00,0x54,0x01,0x00,0x00,0x3b,0x00,0x04,0x00, +0x55,0x01,0x00,0x00,0x56,0x01,0x00,0x00,0x0c,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x5f,0x01,0x00,0x00, +0x00,0x02,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x60,0x01,0x00,0x00,0x01,0x00,0x00,0x00,0x2c,0x00,0x06,0x00, +0x2c,0x01,0x00,0x00,0x61,0x01,0x00,0x00,0x5f,0x01,0x00,0x00, +0x60,0x01,0x00,0x00,0x60,0x01,0x00,0x00,0x36,0x00,0x05,0x00, +0x02,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0x03,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0x05,0x00,0x00,0x00, +0xf7,0x00,0x03,0x00,0x62,0x01,0x00,0x00,0x00,0x00,0x00,0x00, +0xfb,0x00,0x03,0x00,0x2f,0x01,0x00,0x00,0x63,0x01,0x00,0x00, +0xf8,0x00,0x02,0x00,0x63,0x01,0x00,0x00,0x41,0x00,0x05,0x00, +0x30,0x01,0x00,0x00,0x31,0x01,0x00,0x00,0x2e,0x01,0x00,0x00, +0x2f,0x01,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x32,0x01,0x00,0x00,0x31,0x01,0x00,0x00,0x41,0x00,0x05,0x00, +0x19,0x00,0x00,0x00,0x34,0x01,0x00,0x00,0x16,0x00,0x00,0x00, +0x33,0x01,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x35,0x01,0x00,0x00,0x34,0x01,0x00,0x00,0xae,0x00,0x05,0x00, +0x36,0x01,0x00,0x00,0x37,0x01,0x00,0x00,0x32,0x01,0x00,0x00, +0x35,0x01,0x00,0x00,0xf7,0x00,0x03,0x00,0x39,0x01,0x00,0x00, +0x00,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0x37,0x01,0x00,0x00, +0x38,0x01,0x00,0x00,0x39,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, +0x38,0x01,0x00,0x00,0xf9,0x00,0x02,0x00,0x62,0x01,0x00,0x00, +0xf8,0x00,0x02,0x00,0x39,0x01,0x00,0x00,0x41,0x00,0x05,0x00, +0x19,0x00,0x00,0x00,0x41,0x01,0x00,0x00,0x16,0x00,0x00,0x00, +0x40,0x01,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x42,0x01,0x00,0x00,0x41,0x01,0x00,0x00,0x41,0x00,0x05,0x00, +0x19,0x00,0x00,0x00,0x71,0x01,0x00,0x00,0x16,0x00,0x00,0x00, +0xd6,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x72,0x01,0x00,0x00,0x71,0x01,0x00,0x00,0x41,0x00,0x05,0x00, +0x19,0x00,0x00,0x00,0x73,0x01,0x00,0x00,0x16,0x00,0x00,0x00, +0xd9,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x74,0x01,0x00,0x00,0x73,0x01,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x75,0x01,0x00,0x00,0x72,0x01,0x00,0x00, +0x74,0x01,0x00,0x00,0x41,0x00,0x05,0x00,0x19,0x00,0x00,0x00, +0x76,0x01,0x00,0x00,0x16,0x00,0x00,0x00,0xdd,0x00,0x00,0x00, +0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x77,0x01,0x00,0x00, +0x76,0x01,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x78,0x01,0x00,0x00,0x75,0x01,0x00,0x00,0x77,0x01,0x00,0x00, +0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x79,0x01,0x00,0x00, +0x32,0x01,0x00,0x00,0x78,0x01,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x7d,0x01,0x00,0x00,0x79,0x01,0x00,0x00, +0x72,0x01,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x80,0x01,0x00,0x00,0x7d,0x01,0x00,0x00,0x74,0x01,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x83,0x01,0x00,0x00, +0x80,0x01,0x00,0x00,0x77,0x01,0x00,0x00,0x82,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x86,0x01,0x00,0x00,0x32,0x01,0x00,0x00, +0x83,0x01,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x8b,0x01,0x00,0x00,0x74,0x01,0x00,0x00,0x77,0x01,0x00,0x00, +0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x8c,0x01,0x00,0x00, +0x86,0x01,0x00,0x00,0x8b,0x01,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x90,0x01,0x00,0x00,0x8c,0x01,0x00,0x00, +0x74,0x01,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x93,0x01,0x00,0x00,0x90,0x01,0x00,0x00,0x77,0x01,0x00,0x00, +0x82,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x98,0x01,0x00,0x00, +0x86,0x01,0x00,0x00,0x93,0x01,0x00,0x00,0x86,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x9b,0x01,0x00,0x00,0x98,0x01,0x00,0x00, +0x77,0x01,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xa4,0x01,0x00,0x00,0x9b,0x01,0x00,0x00,0x77,0x01,0x00,0x00, +0x82,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xa5,0x01,0x00,0x00, +0x98,0x01,0x00,0x00,0xa4,0x01,0x00,0x00,0x41,0x00,0x05,0x00, +0x19,0x00,0x00,0x00,0xa7,0x01,0x00,0x00,0x16,0x00,0x00,0x00, +0x14,0x01,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0xa8,0x01,0x00,0x00,0xa7,0x01,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xa9,0x01,0x00,0x00,0x79,0x01,0x00,0x00, +0xa8,0x01,0x00,0x00,0x41,0x00,0x05,0x00,0x19,0x00,0x00,0x00, +0xab,0x01,0x00,0x00,0x16,0x00,0x00,0x00,0x19,0x01,0x00,0x00, +0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0xac,0x01,0x00,0x00, +0xab,0x01,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xad,0x01,0x00,0x00,0x8c,0x01,0x00,0x00,0xac,0x01,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xae,0x01,0x00,0x00, +0xa9,0x01,0x00,0x00,0xad,0x01,0x00,0x00,0x41,0x00,0x05,0x00, +0x19,0x00,0x00,0x00,0xb0,0x01,0x00,0x00,0x16,0x00,0x00,0x00, +0x1f,0x01,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0xb1,0x01,0x00,0x00,0xb0,0x01,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xb2,0x01,0x00,0x00,0x9b,0x01,0x00,0x00, +0xb1,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xb3,0x01,0x00,0x00,0xae,0x01,0x00,0x00,0xb2,0x01,0x00,0x00, +0x41,0x00,0x05,0x00,0x19,0x00,0x00,0x00,0xb5,0x01,0x00,0x00, +0x16,0x00,0x00,0x00,0x25,0x01,0x00,0x00,0x3d,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0xb6,0x01,0x00,0x00,0xb5,0x01,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xb7,0x01,0x00,0x00, +0xa5,0x01,0x00,0x00,0xb6,0x01,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xb8,0x01,0x00,0x00,0xb3,0x01,0x00,0x00, +0xb7,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x47,0x01,0x00,0x00,0x42,0x01,0x00,0x00,0xb8,0x01,0x00,0x00, +0x41,0x00,0x05,0x00,0x19,0x00,0x00,0x00,0xc2,0x01,0x00,0x00, +0x16,0x00,0x00,0x00,0x18,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0xc3,0x01,0x00,0x00,0xc2,0x01,0x00,0x00, +0x41,0x00,0x05,0x00,0x19,0x00,0x00,0x00,0xc4,0x01,0x00,0x00, +0x16,0x00,0x00,0x00,0x1c,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0xc5,0x01,0x00,0x00,0xc4,0x01,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xc6,0x01,0x00,0x00, +0xc3,0x01,0x00,0x00,0xc5,0x01,0x00,0x00,0x41,0x00,0x05,0x00, +0x19,0x00,0x00,0x00,0xc7,0x01,0x00,0x00,0x16,0x00,0x00,0x00, +0x20,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0xc8,0x01,0x00,0x00,0xc7,0x01,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xc9,0x01,0x00,0x00,0xc6,0x01,0x00,0x00, +0xc8,0x01,0x00,0x00,0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xca,0x01,0x00,0x00,0x32,0x01,0x00,0x00,0xc9,0x01,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xce,0x01,0x00,0x00, +0xca,0x01,0x00,0x00,0xc3,0x01,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xd1,0x01,0x00,0x00,0xce,0x01,0x00,0x00, +0xc5,0x01,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xd4,0x01,0x00,0x00,0xd1,0x01,0x00,0x00,0xc8,0x01,0x00,0x00, +0x82,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xd7,0x01,0x00,0x00, +0x32,0x01,0x00,0x00,0xd4,0x01,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xdc,0x01,0x00,0x00,0xc5,0x01,0x00,0x00, +0xc8,0x01,0x00,0x00,0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xdd,0x01,0x00,0x00,0xd7,0x01,0x00,0x00,0xdc,0x01,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xe1,0x01,0x00,0x00, +0xdd,0x01,0x00,0x00,0xc5,0x01,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xe4,0x01,0x00,0x00,0xe1,0x01,0x00,0x00, +0xc8,0x01,0x00,0x00,0x82,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xe9,0x01,0x00,0x00,0xd7,0x01,0x00,0x00,0xe4,0x01,0x00,0x00, +0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xec,0x01,0x00,0x00, +0xe9,0x01,0x00,0x00,0xc8,0x01,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xf5,0x01,0x00,0x00,0xec,0x01,0x00,0x00, +0xc8,0x01,0x00,0x00,0x82,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xf6,0x01,0x00,0x00,0xe9,0x01,0x00,0x00,0xf5,0x01,0x00,0x00, +0x41,0x00,0x05,0x00,0x19,0x00,0x00,0x00,0xf8,0x01,0x00,0x00, +0x16,0x00,0x00,0x00,0x57,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0xf9,0x01,0x00,0x00,0xf8,0x01,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xfa,0x01,0x00,0x00, +0xca,0x01,0x00,0x00,0xf9,0x01,0x00,0x00,0x41,0x00,0x05,0x00, +0x19,0x00,0x00,0x00,0xfc,0x01,0x00,0x00,0x16,0x00,0x00,0x00, +0x5c,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0xfd,0x01,0x00,0x00,0xfc,0x01,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xfe,0x01,0x00,0x00,0xdd,0x01,0x00,0x00, +0xfd,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xff,0x01,0x00,0x00,0xfa,0x01,0x00,0x00,0xfe,0x01,0x00,0x00, +0x41,0x00,0x05,0x00,0x19,0x00,0x00,0x00,0x01,0x02,0x00,0x00, +0x16,0x00,0x00,0x00,0x62,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x02,0x02,0x00,0x00,0x01,0x02,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x03,0x02,0x00,0x00, +0xec,0x01,0x00,0x00,0x02,0x02,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x04,0x02,0x00,0x00,0xff,0x01,0x00,0x00, +0x03,0x02,0x00,0x00,0x41,0x00,0x05,0x00,0x19,0x00,0x00,0x00, +0x06,0x02,0x00,0x00,0x16,0x00,0x00,0x00,0x68,0x00,0x00,0x00, +0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x07,0x02,0x00,0x00, +0x06,0x02,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x08,0x02,0x00,0x00,0xf6,0x01,0x00,0x00,0x07,0x02,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x09,0x02,0x00,0x00, +0x04,0x02,0x00,0x00,0x08,0x02,0x00,0x00,0x41,0x00,0x06,0x00, +0x50,0x01,0x00,0x00,0x51,0x01,0x00,0x00,0x4b,0x01,0x00,0x00, +0x33,0x01,0x00,0x00,0x09,0x02,0x00,0x00,0x3d,0x00,0x04,0x00, +0x3b,0x01,0x00,0x00,0x52,0x01,0x00,0x00,0x51,0x01,0x00,0x00, +0x41,0x00,0x05,0x00,0x19,0x00,0x00,0x00,0x49,0x02,0x00,0x00, +0x16,0x00,0x00,0x00,0xac,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x4a,0x02,0x00,0x00,0x49,0x02,0x00,0x00, +0x89,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x4b,0x02,0x00,0x00, +0xca,0x01,0x00,0x00,0x4a,0x02,0x00,0x00,0x41,0x00,0x05,0x00, +0x19,0x00,0x00,0x00,0x4c,0x02,0x00,0x00,0x16,0x00,0x00,0x00, +0xb0,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x4d,0x02,0x00,0x00,0x4c,0x02,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x4e,0x02,0x00,0x00,0x4b,0x02,0x00,0x00, +0x4d,0x02,0x00,0x00,0x41,0x00,0x05,0x00,0x19,0x00,0x00,0x00, +0x50,0x02,0x00,0x00,0x16,0x00,0x00,0x00,0xb5,0x00,0x00,0x00, +0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x51,0x02,0x00,0x00, +0x50,0x02,0x00,0x00,0x89,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x52,0x02,0x00,0x00,0xdd,0x01,0x00,0x00,0x51,0x02,0x00,0x00, +0x41,0x00,0x05,0x00,0x19,0x00,0x00,0x00,0x53,0x02,0x00,0x00, +0x16,0x00,0x00,0x00,0xb9,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x54,0x02,0x00,0x00,0x53,0x02,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x55,0x02,0x00,0x00, +0x52,0x02,0x00,0x00,0x54,0x02,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x56,0x02,0x00,0x00,0x4e,0x02,0x00,0x00, +0x55,0x02,0x00,0x00,0x41,0x00,0x05,0x00,0x19,0x00,0x00,0x00, +0x58,0x02,0x00,0x00,0x16,0x00,0x00,0x00,0xbf,0x00,0x00,0x00, +0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x59,0x02,0x00,0x00, +0x58,0x02,0x00,0x00,0x89,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x5a,0x02,0x00,0x00,0xec,0x01,0x00,0x00,0x59,0x02,0x00,0x00, +0x41,0x00,0x05,0x00,0x19,0x00,0x00,0x00,0x5b,0x02,0x00,0x00, +0x16,0x00,0x00,0x00,0xc3,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x5c,0x02,0x00,0x00,0x5b,0x02,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x5d,0x02,0x00,0x00, +0x5a,0x02,0x00,0x00,0x5c,0x02,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x5e,0x02,0x00,0x00,0x56,0x02,0x00,0x00, +0x5d,0x02,0x00,0x00,0x41,0x00,0x05,0x00,0x19,0x00,0x00,0x00, +0x60,0x02,0x00,0x00,0x16,0x00,0x00,0x00,0xc9,0x00,0x00,0x00, +0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x61,0x02,0x00,0x00, +0x60,0x02,0x00,0x00,0x89,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x62,0x02,0x00,0x00,0xf6,0x01,0x00,0x00,0x61,0x02,0x00,0x00, +0x41,0x00,0x05,0x00,0x19,0x00,0x00,0x00,0x63,0x02,0x00,0x00, +0x16,0x00,0x00,0x00,0xcd,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x64,0x02,0x00,0x00,0x63,0x02,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x65,0x02,0x00,0x00, +0x62,0x02,0x00,0x00,0x64,0x02,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x66,0x02,0x00,0x00,0x5e,0x02,0x00,0x00, +0x65,0x02,0x00,0x00,0x41,0x00,0x06,0x00,0x50,0x01,0x00,0x00, +0x5b,0x01,0x00,0x00,0x56,0x01,0x00,0x00,0x33,0x01,0x00,0x00, +0x66,0x02,0x00,0x00,0x3d,0x00,0x04,0x00,0x3b,0x01,0x00,0x00, +0x5c,0x01,0x00,0x00,0x5b,0x01,0x00,0x00,0x81,0x00,0x05,0x00, +0x3b,0x01,0x00,0x00,0x5d,0x01,0x00,0x00,0x52,0x01,0x00,0x00, +0x5c,0x01,0x00,0x00,0x41,0x00,0x06,0x00,0x50,0x01,0x00,0x00, +0x5e,0x01,0x00,0x00,0x3f,0x01,0x00,0x00,0x33,0x01,0x00,0x00, +0x47,0x01,0x00,0x00,0x3e,0x00,0x03,0x00,0x5e,0x01,0x00,0x00, +0x5d,0x01,0x00,0x00,0xf9,0x00,0x02,0x00,0x62,0x01,0x00,0x00, +0xf8,0x00,0x02,0x00,0x62,0x01,0x00,0x00,0xfd,0x00,0x01,0x00, +0x38,0x00,0x01,0x00, +}; +const uint64_t add_f32_len = 4276; + +unsigned char argsort_f32_data[] = { +0x03,0x02,0x23,0x07,0x00,0x05,0x01,0x00,0x0b,0x00,0x0d,0x00, +0x2b,0x01,0x00,0x00,0x00,0x00,0x00,0x00,0x11,0x00,0x02,0x00, +0x01,0x00,0x00,0x00,0x0b,0x00,0x06,0x00,0x01,0x00,0x00,0x00, +0x47,0x4c,0x53,0x4c,0x2e,0x73,0x74,0x64,0x2e,0x34,0x35,0x30, +0x00,0x00,0x00,0x00,0x0e,0x00,0x03,0x00,0x00,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0x0f,0x00,0x0a,0x00,0x05,0x00,0x00,0x00, +0x04,0x00,0x00,0x00,0x6d,0x61,0x69,0x6e,0x00,0x00,0x00,0x00, +0x13,0x00,0x00,0x00,0x24,0x00,0x00,0x00,0x2b,0x00,0x00,0x00, +0x33,0x00,0x00,0x00,0x84,0x00,0x00,0x00,0x10,0x00,0x06,0x00, +0x04,0x00,0x00,0x00,0x11,0x00,0x00,0x00,0x00,0x04,0x00,0x00, 0x01,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x0b,0x00,0x00,0x00,0x0b,0x00,0x00,0x00,0x1c,0x00,0x00,0x00, +0x10,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x04,0x00,0x00,0x00, +0x48,0x00,0x05,0x00,0x11,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x03,0x00, +0x11,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00, +0x13,0x00,0x00,0x00,0x22,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0x47,0x00,0x04,0x00,0x13,0x00,0x00,0x00,0x21,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x24,0x00,0x00,0x00, +0x0b,0x00,0x00,0x00,0x1b,0x00,0x00,0x00,0x47,0x00,0x04,0x00, +0x2b,0x00,0x00,0x00,0x0b,0x00,0x00,0x00,0x1a,0x00,0x00,0x00, +0x48,0x00,0x05,0x00,0x31,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x48,0x00,0x05,0x00, +0x31,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x23,0x00,0x00,0x00, +0x04,0x00,0x00,0x00,0x47,0x00,0x03,0x00,0x31,0x00,0x00,0x00, +0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x81,0x00,0x00,0x00, +0x06,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x48,0x00,0x04,0x00, +0x82,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x18,0x00,0x00,0x00, +0x48,0x00,0x05,0x00,0x82,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x03,0x00, +0x82,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00, +0x84,0x00,0x00,0x00,0x22,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0x47,0x00,0x04,0x00,0x84,0x00,0x00,0x00,0x21,0x00,0x00,0x00, +0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x06,0x01,0x00,0x00, +0x0b,0x00,0x00,0x00,0x19,0x00,0x00,0x00,0x13,0x00,0x02,0x00, +0x02,0x00,0x00,0x00,0x21,0x00,0x03,0x00,0x03,0x00,0x00,0x00, +0x02,0x00,0x00,0x00,0x15,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x20,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x15,0x00,0x04,0x00, +0x0d,0x00,0x00,0x00,0x20,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0x1d,0x00,0x03,0x00,0x10,0x00,0x00,0x00,0x0d,0x00,0x00,0x00, +0x1e,0x00,0x03,0x00,0x11,0x00,0x00,0x00,0x10,0x00,0x00,0x00, +0x20,0x00,0x04,0x00,0x12,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, +0x11,0x00,0x00,0x00,0x3b,0x00,0x04,0x00,0x12,0x00,0x00,0x00, +0x13,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x0d,0x00,0x00,0x00,0x14,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0x20,0x00,0x04,0x00,0x16,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, +0x0d,0x00,0x00,0x00,0x17,0x00,0x04,0x00,0x22,0x00,0x00,0x00, +0x06,0x00,0x00,0x00,0x03,0x00,0x00,0x00,0x20,0x00,0x04,0x00, +0x23,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x22,0x00,0x00,0x00, +0x3b,0x00,0x04,0x00,0x23,0x00,0x00,0x00,0x24,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x25,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x20,0x00,0x04,0x00, +0x26,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x06,0x00,0x00,0x00, +0x3b,0x00,0x04,0x00,0x23,0x00,0x00,0x00,0x2b,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x2c,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x1e,0x00,0x04,0x00, +0x31,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00, +0x20,0x00,0x04,0x00,0x32,0x00,0x00,0x00,0x09,0x00,0x00,0x00, +0x31,0x00,0x00,0x00,0x3b,0x00,0x04,0x00,0x32,0x00,0x00,0x00, +0x33,0x00,0x00,0x00,0x09,0x00,0x00,0x00,0x20,0x00,0x04,0x00, +0x34,0x00,0x00,0x00,0x09,0x00,0x00,0x00,0x06,0x00,0x00,0x00, +0x14,0x00,0x02,0x00,0x37,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x50,0x00,0x00,0x00,0x02,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x51,0x00,0x00,0x00, +0x08,0x01,0x00,0x00,0x2b,0x00,0x04,0x00,0x0d,0x00,0x00,0x00, +0x78,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x16,0x00,0x03,0x00, +0x80,0x00,0x00,0x00,0x20,0x00,0x00,0x00,0x1d,0x00,0x03,0x00, +0x81,0x00,0x00,0x00,0x80,0x00,0x00,0x00,0x1e,0x00,0x03,0x00, +0x82,0x00,0x00,0x00,0x81,0x00,0x00,0x00,0x20,0x00,0x04,0x00, +0x83,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x82,0x00,0x00,0x00, +0x3b,0x00,0x04,0x00,0x83,0x00,0x00,0x00,0x84,0x00,0x00,0x00, +0x0c,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x8e,0x00,0x00,0x00, +0x0c,0x00,0x00,0x00,0x80,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x05,0x01,0x00,0x00,0x00,0x04,0x00,0x00, +0x2c,0x00,0x06,0x00,0x22,0x00,0x00,0x00,0x06,0x01,0x00,0x00, +0x05,0x01,0x00,0x00,0x2c,0x00,0x00,0x00,0x2c,0x00,0x00,0x00, +0x36,0x00,0x05,0x00,0x02,0x00,0x00,0x00,0x04,0x00,0x00,0x00, +0x00,0x00,0x00,0x00,0x03,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, +0x05,0x00,0x00,0x00,0xf7,0x00,0x03,0x00,0x07,0x01,0x00,0x00, +0x00,0x00,0x00,0x00,0xfb,0x00,0x03,0x00,0x25,0x00,0x00,0x00, +0x08,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0x08,0x01,0x00,0x00, +0x41,0x00,0x05,0x00,0x26,0x00,0x00,0x00,0x27,0x00,0x00,0x00, +0x24,0x00,0x00,0x00,0x25,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x28,0x00,0x00,0x00,0x27,0x00,0x00,0x00, +0x7c,0x00,0x04,0x00,0x0d,0x00,0x00,0x00,0x29,0x00,0x00,0x00, +0x28,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x26,0x00,0x00,0x00, +0x2d,0x00,0x00,0x00,0x2b,0x00,0x00,0x00,0x2c,0x00,0x00,0x00, +0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, +0x2d,0x00,0x00,0x00,0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x30,0x00,0x00,0x00,0x29,0x00,0x00,0x00,0x41,0x00,0x05,0x00, +0x34,0x00,0x00,0x00,0x35,0x00,0x00,0x00,0x33,0x00,0x00,0x00, +0x14,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x36,0x00,0x00,0x00,0x35,0x00,0x00,0x00,0xae,0x00,0x05,0x00, +0x37,0x00,0x00,0x00,0x38,0x00,0x00,0x00,0x30,0x00,0x00,0x00, +0x36,0x00,0x00,0x00,0xf7,0x00,0x03,0x00,0x3a,0x00,0x00,0x00, +0x00,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0x38,0x00,0x00,0x00, +0x39,0x00,0x00,0x00,0x3a,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, +0x39,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0x07,0x01,0x00,0x00, +0xf8,0x00,0x02,0x00,0x3a,0x00,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x40,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, +0x36,0x00,0x00,0x00,0xb0,0x00,0x05,0x00,0x37,0x00,0x00,0x00, +0x4a,0x00,0x00,0x00,0x30,0x00,0x00,0x00,0x36,0x00,0x00,0x00, +0xf7,0x00,0x03,0x00,0x4c,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0xfa,0x00,0x04,0x00,0x4a,0x00,0x00,0x00,0x4b,0x00,0x00,0x00, +0x4c,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0x4b,0x00,0x00,0x00, +0x41,0x00,0x06,0x00,0x16,0x00,0x00,0x00,0x4f,0x00,0x00,0x00, +0x13,0x00,0x00,0x00,0x14,0x00,0x00,0x00,0x29,0x00,0x00,0x00, +0x3e,0x00,0x03,0x00,0x4f,0x00,0x00,0x00,0x29,0x00,0x00,0x00, +0xf9,0x00,0x02,0x00,0x4c,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, +0x4c,0x00,0x00,0x00,0xe0,0x00,0x04,0x00,0x50,0x00,0x00,0x00, +0x50,0x00,0x00,0x00,0x51,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, +0x53,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0x53,0x00,0x00,0x00, +0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0x26,0x01,0x00,0x00, +0x50,0x00,0x00,0x00,0x4c,0x00,0x00,0x00,0x04,0x01,0x00,0x00, +0x56,0x00,0x00,0x00,0xb2,0x00,0x05,0x00,0x37,0x00,0x00,0x00, +0x5b,0x00,0x00,0x00,0x26,0x01,0x00,0x00,0x36,0x00,0x00,0x00, +0xf6,0x00,0x04,0x00,0x55,0x00,0x00,0x00,0x56,0x00,0x00,0x00, +0x00,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0x5b,0x00,0x00,0x00, +0x54,0x00,0x00,0x00,0x55,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, +0x54,0x00,0x00,0x00,0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x5e,0x00,0x00,0x00,0x26,0x01,0x00,0x00,0x50,0x00,0x00,0x00, +0xf9,0x00,0x02,0x00,0x5f,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, +0x5f,0x00,0x00,0x00,0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00, +0x27,0x01,0x00,0x00,0x5e,0x00,0x00,0x00,0x54,0x00,0x00,0x00, +0x02,0x01,0x00,0x00,0x62,0x00,0x00,0x00,0xac,0x00,0x05,0x00, +0x37,0x00,0x00,0x00,0x65,0x00,0x00,0x00,0x27,0x01,0x00,0x00, +0x25,0x00,0x00,0x00,0xf6,0x00,0x04,0x00,0x61,0x00,0x00,0x00, +0x62,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, +0x65,0x00,0x00,0x00,0x60,0x00,0x00,0x00,0x61,0x00,0x00,0x00, +0xf8,0x00,0x02,0x00,0x60,0x00,0x00,0x00,0xc6,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x6a,0x00,0x00,0x00,0x30,0x00,0x00,0x00, +0x27,0x01,0x00,0x00,0xac,0x00,0x05,0x00,0x37,0x00,0x00,0x00, +0x6e,0x00,0x00,0x00,0x6a,0x00,0x00,0x00,0x30,0x00,0x00,0x00, +0xf7,0x00,0x03,0x00,0x70,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0xfa,0x00,0x04,0x00,0x6e,0x00,0x00,0x00,0x6f,0x00,0x00,0x00, +0x70,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0x6f,0x00,0x00,0x00, +0xc7,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x74,0x00,0x00,0x00, +0x30,0x00,0x00,0x00,0x26,0x01,0x00,0x00,0xaa,0x00,0x05,0x00, +0x37,0x00,0x00,0x00,0x75,0x00,0x00,0x00,0x74,0x00,0x00,0x00, +0x25,0x00,0x00,0x00,0xf7,0x00,0x03,0x00,0x77,0x00,0x00,0x00, +0x00,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0x75,0x00,0x00,0x00, +0x76,0x00,0x00,0x00,0xc0,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, +0x76,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x34,0x00,0x00,0x00, +0x79,0x00,0x00,0x00,0x33,0x00,0x00,0x00,0x78,0x00,0x00,0x00, +0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x7a,0x00,0x00,0x00, +0x79,0x00,0x00,0x00,0xab,0x00,0x05,0x00,0x37,0x00,0x00,0x00, +0x7b,0x00,0x00,0x00,0x7a,0x00,0x00,0x00,0x25,0x00,0x00,0x00, +0xf7,0x00,0x03,0x00,0x7f,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0xfa,0x00,0x04,0x00,0x7b,0x00,0x00,0x00,0x7e,0x00,0x00,0x00, +0x9c,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0x7e,0x00,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x89,0x00,0x00,0x00, +0x40,0x00,0x00,0x00,0x30,0x00,0x00,0x00,0x41,0x00,0x06,0x00, +0x16,0x00,0x00,0x00,0x8a,0x00,0x00,0x00,0x13,0x00,0x00,0x00, +0x14,0x00,0x00,0x00,0x89,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0x0d,0x00,0x00,0x00,0x8b,0x00,0x00,0x00,0x8a,0x00,0x00,0x00, +0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x8c,0x00,0x00,0x00, +0x8b,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x8d,0x00,0x00,0x00,0x40,0x00,0x00,0x00,0x8c,0x00,0x00,0x00, +0x41,0x00,0x06,0x00,0x8e,0x00,0x00,0x00,0x8f,0x00,0x00,0x00, +0x84,0x00,0x00,0x00,0x14,0x00,0x00,0x00,0x8d,0x00,0x00,0x00, +0x3d,0x00,0x04,0x00,0x80,0x00,0x00,0x00,0x90,0x00,0x00,0x00, +0x8f,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x94,0x00,0x00,0x00,0x40,0x00,0x00,0x00,0x6a,0x00,0x00,0x00, +0x41,0x00,0x06,0x00,0x16,0x00,0x00,0x00,0x95,0x00,0x00,0x00, +0x13,0x00,0x00,0x00,0x14,0x00,0x00,0x00,0x94,0x00,0x00,0x00, +0x3d,0x00,0x04,0x00,0x0d,0x00,0x00,0x00,0x96,0x00,0x00,0x00, +0x95,0x00,0x00,0x00,0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x97,0x00,0x00,0x00,0x96,0x00,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x98,0x00,0x00,0x00,0x40,0x00,0x00,0x00, +0x97,0x00,0x00,0x00,0x41,0x00,0x06,0x00,0x8e,0x00,0x00,0x00, +0x99,0x00,0x00,0x00,0x84,0x00,0x00,0x00,0x14,0x00,0x00,0x00, +0x98,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x80,0x00,0x00,0x00, +0x9a,0x00,0x00,0x00,0x99,0x00,0x00,0x00,0xba,0x00,0x05,0x00, +0x37,0x00,0x00,0x00,0x9b,0x00,0x00,0x00,0x90,0x00,0x00,0x00, +0x9a,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0x7f,0x00,0x00,0x00, +0xf8,0x00,0x02,0x00,0x9c,0x00,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xa1,0x00,0x00,0x00,0x40,0x00,0x00,0x00, +0x30,0x00,0x00,0x00,0x41,0x00,0x06,0x00,0x16,0x00,0x00,0x00, +0xa2,0x00,0x00,0x00,0x13,0x00,0x00,0x00,0x14,0x00,0x00,0x00, +0xa1,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x0d,0x00,0x00,0x00, +0xa3,0x00,0x00,0x00,0xa2,0x00,0x00,0x00,0x7c,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0xa4,0x00,0x00,0x00,0xa3,0x00,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xa5,0x00,0x00,0x00, +0x40,0x00,0x00,0x00,0xa4,0x00,0x00,0x00,0x41,0x00,0x06,0x00, +0x8e,0x00,0x00,0x00,0xa6,0x00,0x00,0x00,0x84,0x00,0x00,0x00, +0x14,0x00,0x00,0x00,0xa5,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0x80,0x00,0x00,0x00,0xa7,0x00,0x00,0x00,0xa6,0x00,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xab,0x00,0x00,0x00, +0x40,0x00,0x00,0x00,0x6a,0x00,0x00,0x00,0x41,0x00,0x06,0x00, +0x16,0x00,0x00,0x00,0xac,0x00,0x00,0x00,0x13,0x00,0x00,0x00, +0x14,0x00,0x00,0x00,0xab,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0x0d,0x00,0x00,0x00,0xad,0x00,0x00,0x00,0xac,0x00,0x00,0x00, +0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0xae,0x00,0x00,0x00, +0xad,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xaf,0x00,0x00,0x00,0x40,0x00,0x00,0x00,0xae,0x00,0x00,0x00, +0x41,0x00,0x06,0x00,0x8e,0x00,0x00,0x00,0xb0,0x00,0x00,0x00, +0x84,0x00,0x00,0x00,0x14,0x00,0x00,0x00,0xaf,0x00,0x00,0x00, +0x3d,0x00,0x04,0x00,0x80,0x00,0x00,0x00,0xb1,0x00,0x00,0x00, +0xb0,0x00,0x00,0x00,0xb8,0x00,0x05,0x00,0x37,0x00,0x00,0x00, +0xb2,0x00,0x00,0x00,0xa7,0x00,0x00,0x00,0xb1,0x00,0x00,0x00, +0xf9,0x00,0x02,0x00,0x7f,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, +0x7f,0x00,0x00,0x00,0xf5,0x00,0x07,0x00,0x37,0x00,0x00,0x00, +0x2a,0x01,0x00,0x00,0x9b,0x00,0x00,0x00,0x7e,0x00,0x00,0x00, +0xb2,0x00,0x00,0x00,0x9c,0x00,0x00,0x00,0xf7,0x00,0x03,0x00, +0xb5,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, +0x2a,0x01,0x00,0x00,0xb4,0x00,0x00,0x00,0xb5,0x00,0x00,0x00, +0xf8,0x00,0x02,0x00,0xb4,0x00,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xb9,0x00,0x00,0x00,0x40,0x00,0x00,0x00, +0x30,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xbc,0x00,0x00,0x00,0x40,0x00,0x00,0x00,0x6a,0x00,0x00,0x00, +0x41,0x00,0x06,0x00,0x16,0x00,0x00,0x00,0x0f,0x01,0x00,0x00, +0x13,0x00,0x00,0x00,0x14,0x00,0x00,0x00,0xb9,0x00,0x00,0x00, +0x3d,0x00,0x04,0x00,0x0d,0x00,0x00,0x00,0x10,0x01,0x00,0x00, +0x0f,0x01,0x00,0x00,0x41,0x00,0x06,0x00,0x16,0x00,0x00,0x00, +0x13,0x01,0x00,0x00,0x13,0x00,0x00,0x00,0x14,0x00,0x00,0x00, +0xbc,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x0d,0x00,0x00,0x00, +0x14,0x01,0x00,0x00,0x13,0x01,0x00,0x00,0x3e,0x00,0x03,0x00, +0x0f,0x01,0x00,0x00,0x14,0x01,0x00,0x00,0x3e,0x00,0x03,0x00, +0x13,0x01,0x00,0x00,0x10,0x01,0x00,0x00,0xf9,0x00,0x02,0x00, +0xb5,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0xb5,0x00,0x00,0x00, +0xf9,0x00,0x02,0x00,0x77,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, +0xc0,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x34,0x00,0x00,0x00, +0xc1,0x00,0x00,0x00,0x33,0x00,0x00,0x00,0x78,0x00,0x00,0x00, +0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0xc2,0x00,0x00,0x00, +0xc1,0x00,0x00,0x00,0xab,0x00,0x05,0x00,0x37,0x00,0x00,0x00, +0xc3,0x00,0x00,0x00,0xc2,0x00,0x00,0x00,0x25,0x00,0x00,0x00, +0xf7,0x00,0x03,0x00,0xc6,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0xfa,0x00,0x04,0x00,0xc3,0x00,0x00,0x00,0xc5,0x00,0x00,0x00, +0xdd,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0xc5,0x00,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xcb,0x00,0x00,0x00, +0x40,0x00,0x00,0x00,0x30,0x00,0x00,0x00,0x41,0x00,0x06,0x00, +0x16,0x00,0x00,0x00,0xcc,0x00,0x00,0x00,0x13,0x00,0x00,0x00, +0x14,0x00,0x00,0x00,0xcb,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0x0d,0x00,0x00,0x00,0xcd,0x00,0x00,0x00,0xcc,0x00,0x00,0x00, +0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0xce,0x00,0x00,0x00, +0xcd,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xcf,0x00,0x00,0x00,0x40,0x00,0x00,0x00,0xce,0x00,0x00,0x00, +0x41,0x00,0x06,0x00,0x8e,0x00,0x00,0x00,0xd0,0x00,0x00,0x00, +0x84,0x00,0x00,0x00,0x14,0x00,0x00,0x00,0xcf,0x00,0x00,0x00, +0x3d,0x00,0x04,0x00,0x80,0x00,0x00,0x00,0xd1,0x00,0x00,0x00, +0xd0,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xd5,0x00,0x00,0x00,0x40,0x00,0x00,0x00,0x6a,0x00,0x00,0x00, +0x41,0x00,0x06,0x00,0x16,0x00,0x00,0x00,0xd6,0x00,0x00,0x00, +0x13,0x00,0x00,0x00,0x14,0x00,0x00,0x00,0xd5,0x00,0x00,0x00, +0x3d,0x00,0x04,0x00,0x0d,0x00,0x00,0x00,0xd7,0x00,0x00,0x00, +0xd6,0x00,0x00,0x00,0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0xd8,0x00,0x00,0x00,0xd7,0x00,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xd9,0x00,0x00,0x00,0x40,0x00,0x00,0x00, +0xd8,0x00,0x00,0x00,0x41,0x00,0x06,0x00,0x8e,0x00,0x00,0x00, +0xda,0x00,0x00,0x00,0x84,0x00,0x00,0x00,0x14,0x00,0x00,0x00, +0xd9,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x80,0x00,0x00,0x00, +0xdb,0x00,0x00,0x00,0xda,0x00,0x00,0x00,0xb8,0x00,0x05,0x00, +0x37,0x00,0x00,0x00,0xdc,0x00,0x00,0x00,0xd1,0x00,0x00,0x00, +0xdb,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0xc6,0x00,0x00,0x00, +0xf8,0x00,0x02,0x00,0xdd,0x00,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xe2,0x00,0x00,0x00,0x40,0x00,0x00,0x00, +0x30,0x00,0x00,0x00,0x41,0x00,0x06,0x00,0x16,0x00,0x00,0x00, +0xe3,0x00,0x00,0x00,0x13,0x00,0x00,0x00,0x14,0x00,0x00,0x00, +0xe2,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x0d,0x00,0x00,0x00, +0xe4,0x00,0x00,0x00,0xe3,0x00,0x00,0x00,0x7c,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0xe5,0x00,0x00,0x00,0xe4,0x00,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xe6,0x00,0x00,0x00, +0x40,0x00,0x00,0x00,0xe5,0x00,0x00,0x00,0x41,0x00,0x06,0x00, +0x8e,0x00,0x00,0x00,0xe7,0x00,0x00,0x00,0x84,0x00,0x00,0x00, +0x14,0x00,0x00,0x00,0xe6,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0x80,0x00,0x00,0x00,0xe8,0x00,0x00,0x00,0xe7,0x00,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xec,0x00,0x00,0x00, +0x40,0x00,0x00,0x00,0x6a,0x00,0x00,0x00,0x41,0x00,0x06,0x00, +0x16,0x00,0x00,0x00,0xed,0x00,0x00,0x00,0x13,0x00,0x00,0x00, +0x14,0x00,0x00,0x00,0xec,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0x0d,0x00,0x00,0x00,0xee,0x00,0x00,0x00,0xed,0x00,0x00,0x00, +0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0xef,0x00,0x00,0x00, +0xee,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xf0,0x00,0x00,0x00,0x40,0x00,0x00,0x00,0xef,0x00,0x00,0x00, +0x41,0x00,0x06,0x00,0x8e,0x00,0x00,0x00,0xf1,0x00,0x00,0x00, +0x84,0x00,0x00,0x00,0x14,0x00,0x00,0x00,0xf0,0x00,0x00,0x00, +0x3d,0x00,0x04,0x00,0x80,0x00,0x00,0x00,0xf2,0x00,0x00,0x00, +0xf1,0x00,0x00,0x00,0xba,0x00,0x05,0x00,0x37,0x00,0x00,0x00, +0xf3,0x00,0x00,0x00,0xe8,0x00,0x00,0x00,0xf2,0x00,0x00,0x00, +0xf9,0x00,0x02,0x00,0xc6,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, +0xc6,0x00,0x00,0x00,0xf5,0x00,0x07,0x00,0x37,0x00,0x00,0x00, +0x29,0x01,0x00,0x00,0xdc,0x00,0x00,0x00,0xc5,0x00,0x00,0x00, +0xf3,0x00,0x00,0x00,0xdd,0x00,0x00,0x00,0xf7,0x00,0x03,0x00, +0xf6,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, +0x29,0x01,0x00,0x00,0xf5,0x00,0x00,0x00,0xf6,0x00,0x00,0x00, +0xf8,0x00,0x02,0x00,0xf5,0x00,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xfa,0x00,0x00,0x00,0x40,0x00,0x00,0x00, +0x30,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xfd,0x00,0x00,0x00,0x40,0x00,0x00,0x00,0x6a,0x00,0x00,0x00, +0x41,0x00,0x06,0x00,0x16,0x00,0x00,0x00,0x1c,0x01,0x00,0x00, +0x13,0x00,0x00,0x00,0x14,0x00,0x00,0x00,0xfa,0x00,0x00,0x00, +0x3d,0x00,0x04,0x00,0x0d,0x00,0x00,0x00,0x1d,0x01,0x00,0x00, +0x1c,0x01,0x00,0x00,0x41,0x00,0x06,0x00,0x16,0x00,0x00,0x00, +0x20,0x01,0x00,0x00,0x13,0x00,0x00,0x00,0x14,0x00,0x00,0x00, +0xfd,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x0d,0x00,0x00,0x00, +0x21,0x01,0x00,0x00,0x20,0x01,0x00,0x00,0x3e,0x00,0x03,0x00, +0x1c,0x01,0x00,0x00,0x21,0x01,0x00,0x00,0x3e,0x00,0x03,0x00, +0x20,0x01,0x00,0x00,0x1d,0x01,0x00,0x00,0xf9,0x00,0x02,0x00, +0xf6,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0xf6,0x00,0x00,0x00, +0xf9,0x00,0x02,0x00,0x77,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, +0x77,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0x70,0x00,0x00,0x00, +0xf8,0x00,0x02,0x00,0x70,0x00,0x00,0x00,0xe0,0x00,0x04,0x00, +0x50,0x00,0x00,0x00,0x50,0x00,0x00,0x00,0x51,0x00,0x00,0x00, +0xf9,0x00,0x02,0x00,0x62,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, +0x62,0x00,0x00,0x00,0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x02,0x01,0x00,0x00,0x27,0x01,0x00,0x00,0x50,0x00,0x00,0x00, +0xf9,0x00,0x02,0x00,0x5f,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, +0x61,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0x56,0x00,0x00,0x00, +0xf8,0x00,0x02,0x00,0x56,0x00,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x04,0x01,0x00,0x00,0x26,0x01,0x00,0x00, +0x50,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0x53,0x00,0x00,0x00, +0xf8,0x00,0x02,0x00,0x55,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, +0x07,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0x07,0x01,0x00,0x00, +0xfd,0x00,0x01,0x00,0x38,0x00,0x01,0x00, +}; +const uint64_t argsort_f32_len = 3752; + +unsigned char clamp_f32_data[] = { +0x03,0x02,0x23,0x07,0x00,0x05,0x01,0x00,0x0b,0x00,0x0d,0x00, +0xb6,0x01,0x00,0x00,0x00,0x00,0x00,0x00,0x11,0x00,0x02,0x00, +0x01,0x00,0x00,0x00,0x0b,0x00,0x06,0x00,0x01,0x00,0x00,0x00, +0x47,0x4c,0x53,0x4c,0x2e,0x73,0x74,0x64,0x2e,0x34,0x35,0x30, +0x00,0x00,0x00,0x00,0x0e,0x00,0x03,0x00,0x00,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0x0f,0x00,0x09,0x00,0x05,0x00,0x00,0x00, +0x04,0x00,0x00,0x00,0x6d,0x61,0x69,0x6e,0x00,0x00,0x00,0x00, +0x14,0x00,0x00,0x00,0xc7,0x00,0x00,0x00,0xd9,0x00,0x00,0x00, +0xe4,0x00,0x00,0x00,0x10,0x00,0x06,0x00,0x04,0x00,0x00,0x00, +0x11,0x00,0x00,0x00,0x00,0x02,0x00,0x00,0x01,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x12,0x00,0x00,0x00, +0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0x48,0x00,0x05,0x00,0x12,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0x23,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x48,0x00,0x05,0x00, +0x12,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x23,0x00,0x00,0x00, +0x08,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x12,0x00,0x00,0x00, +0x03,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, +0x48,0x00,0x05,0x00,0x12,0x00,0x00,0x00,0x04,0x00,0x00,0x00, +0x23,0x00,0x00,0x00,0x10,0x00,0x00,0x00,0x48,0x00,0x05,0x00, +0x12,0x00,0x00,0x00,0x05,0x00,0x00,0x00,0x23,0x00,0x00,0x00, +0x14,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x12,0x00,0x00,0x00, +0x06,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x18,0x00,0x00,0x00, +0x48,0x00,0x05,0x00,0x12,0x00,0x00,0x00,0x07,0x00,0x00,0x00, +0x23,0x00,0x00,0x00,0x1c,0x00,0x00,0x00,0x48,0x00,0x05,0x00, +0x12,0x00,0x00,0x00,0x08,0x00,0x00,0x00,0x23,0x00,0x00,0x00, +0x20,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x12,0x00,0x00,0x00, +0x09,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x24,0x00,0x00,0x00, +0x48,0x00,0x05,0x00,0x12,0x00,0x00,0x00,0x0a,0x00,0x00,0x00, +0x23,0x00,0x00,0x00,0x28,0x00,0x00,0x00,0x48,0x00,0x05,0x00, +0x12,0x00,0x00,0x00,0x0b,0x00,0x00,0x00,0x23,0x00,0x00,0x00, +0x2c,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x12,0x00,0x00,0x00, +0x0c,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x30,0x00,0x00,0x00, +0x48,0x00,0x05,0x00,0x12,0x00,0x00,0x00,0x0d,0x00,0x00,0x00, +0x23,0x00,0x00,0x00,0x34,0x00,0x00,0x00,0x48,0x00,0x05,0x00, +0x12,0x00,0x00,0x00,0x0e,0x00,0x00,0x00,0x23,0x00,0x00,0x00, +0x38,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x12,0x00,0x00,0x00, +0x0f,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x3c,0x00,0x00,0x00, +0x48,0x00,0x05,0x00,0x12,0x00,0x00,0x00,0x10,0x00,0x00,0x00, +0x23,0x00,0x00,0x00,0x40,0x00,0x00,0x00,0x48,0x00,0x05,0x00, +0x12,0x00,0x00,0x00,0x11,0x00,0x00,0x00,0x23,0x00,0x00,0x00, +0x44,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x12,0x00,0x00,0x00, +0x12,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x48,0x00,0x00,0x00, +0x48,0x00,0x05,0x00,0x12,0x00,0x00,0x00,0x13,0x00,0x00,0x00, +0x23,0x00,0x00,0x00,0x4c,0x00,0x00,0x00,0x47,0x00,0x03,0x00, +0x12,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00, +0xc7,0x00,0x00,0x00,0x0b,0x00,0x00,0x00,0x1c,0x00,0x00,0x00, +0x47,0x00,0x04,0x00,0xd6,0x00,0x00,0x00,0x06,0x00,0x00,0x00, +0x04,0x00,0x00,0x00,0x48,0x00,0x04,0x00,0xd7,0x00,0x00,0x00, +0x00,0x00,0x00,0x00,0x18,0x00,0x00,0x00,0x48,0x00,0x05,0x00, +0xd7,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00, +0x00,0x00,0x00,0x00,0x47,0x00,0x03,0x00,0xd7,0x00,0x00,0x00, +0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0xd9,0x00,0x00,0x00, +0x22,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00, +0xd9,0x00,0x00,0x00,0x21,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0x47,0x00,0x04,0x00,0xe1,0x00,0x00,0x00,0x06,0x00,0x00,0x00, +0x04,0x00,0x00,0x00,0x48,0x00,0x04,0x00,0xe2,0x00,0x00,0x00, +0x00,0x00,0x00,0x00,0x19,0x00,0x00,0x00,0x48,0x00,0x05,0x00, +0xe2,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00, +0x00,0x00,0x00,0x00,0x47,0x00,0x03,0x00,0xe2,0x00,0x00,0x00, +0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0xe4,0x00,0x00,0x00, +0x22,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00, +0xe4,0x00,0x00,0x00,0x21,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0x47,0x00,0x04,0x00,0x0a,0x01,0x00,0x00,0x0b,0x00,0x00,0x00, +0x19,0x00,0x00,0x00,0x13,0x00,0x02,0x00,0x02,0x00,0x00,0x00, +0x21,0x00,0x03,0x00,0x03,0x00,0x00,0x00,0x02,0x00,0x00,0x00, +0x15,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x20,0x00,0x00,0x00, +0x00,0x00,0x00,0x00,0x16,0x00,0x03,0x00,0x11,0x00,0x00,0x00, +0x20,0x00,0x00,0x00,0x1e,0x00,0x16,0x00,0x12,0x00,0x00,0x00, +0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00, +0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00, +0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00, +0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00, +0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00, +0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00, +0x11,0x00,0x00,0x00,0x11,0x00,0x00,0x00,0x20,0x00,0x04,0x00, +0x13,0x00,0x00,0x00,0x09,0x00,0x00,0x00,0x12,0x00,0x00,0x00, +0x3b,0x00,0x04,0x00,0x13,0x00,0x00,0x00,0x14,0x00,0x00,0x00, +0x09,0x00,0x00,0x00,0x15,0x00,0x04,0x00,0x15,0x00,0x00,0x00, +0x20,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x15,0x00,0x00,0x00,0x16,0x00,0x00,0x00,0x03,0x00,0x00,0x00, +0x20,0x00,0x04,0x00,0x17,0x00,0x00,0x00,0x09,0x00,0x00,0x00, +0x06,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x15,0x00,0x00,0x00, +0x1a,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x15,0x00,0x00,0x00,0x1e,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x15,0x00,0x00,0x00,0x55,0x00,0x00,0x00, +0x08,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x15,0x00,0x00,0x00, +0x5a,0x00,0x00,0x00,0x07,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x15,0x00,0x00,0x00,0x60,0x00,0x00,0x00,0x06,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x15,0x00,0x00,0x00,0x66,0x00,0x00,0x00, +0x05,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x15,0x00,0x00,0x00, +0x6f,0x00,0x00,0x00,0x0b,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x15,0x00,0x00,0x00,0x72,0x00,0x00,0x00,0x0a,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x15,0x00,0x00,0x00,0x76,0x00,0x00,0x00, +0x09,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x15,0x00,0x00,0x00, +0xad,0x00,0x00,0x00,0x10,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x15,0x00,0x00,0x00,0xb2,0x00,0x00,0x00,0x0f,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x15,0x00,0x00,0x00,0xb8,0x00,0x00,0x00, +0x0e,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x15,0x00,0x00,0x00, +0xbe,0x00,0x00,0x00,0x0d,0x00,0x00,0x00,0x17,0x00,0x04,0x00, +0xc5,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x03,0x00,0x00,0x00, +0x20,0x00,0x04,0x00,0xc6,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0xc5,0x00,0x00,0x00,0x3b,0x00,0x04,0x00,0xc6,0x00,0x00,0x00, +0xc7,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0xc8,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0x20,0x00,0x04,0x00,0xc9,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0x06,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x15,0x00,0x00,0x00, +0xcc,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x14,0x00,0x02,0x00, +0xcf,0x00,0x00,0x00,0x1d,0x00,0x03,0x00,0xd6,0x00,0x00,0x00, +0x11,0x00,0x00,0x00,0x1e,0x00,0x03,0x00,0xd7,0x00,0x00,0x00, +0xd6,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0xd8,0x00,0x00,0x00, +0x0c,0x00,0x00,0x00,0xd7,0x00,0x00,0x00,0x3b,0x00,0x04,0x00, +0xd8,0x00,0x00,0x00,0xd9,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, +0x20,0x00,0x04,0x00,0xde,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, +0x11,0x00,0x00,0x00,0x1d,0x00,0x03,0x00,0xe1,0x00,0x00,0x00, +0x11,0x00,0x00,0x00,0x1e,0x00,0x03,0x00,0xe2,0x00,0x00,0x00, +0xe1,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0xe3,0x00,0x00,0x00, +0x0c,0x00,0x00,0x00,0xe2,0x00,0x00,0x00,0x3b,0x00,0x04,0x00, +0xe3,0x00,0x00,0x00,0xe4,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x15,0x00,0x00,0x00,0xe5,0x00,0x00,0x00, +0x11,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x15,0x00,0x00,0x00, +0xee,0x00,0x00,0x00,0x12,0x00,0x00,0x00,0x20,0x00,0x04,0x00, +0xef,0x00,0x00,0x00,0x09,0x00,0x00,0x00,0x11,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x15,0x00,0x00,0x00,0xfa,0x00,0x00,0x00, +0x13,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x08,0x01,0x00,0x00,0x00,0x02,0x00,0x00,0x2b,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x09,0x01,0x00,0x00,0x01,0x00,0x00,0x00, +0x2c,0x00,0x06,0x00,0xc5,0x00,0x00,0x00,0x0a,0x01,0x00,0x00, +0x08,0x01,0x00,0x00,0x09,0x01,0x00,0x00,0x09,0x01,0x00,0x00, +0x36,0x00,0x05,0x00,0x02,0x00,0x00,0x00,0x04,0x00,0x00,0x00, +0x00,0x00,0x00,0x00,0x03,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, +0x05,0x00,0x00,0x00,0xf7,0x00,0x03,0x00,0x0b,0x01,0x00,0x00, +0x00,0x00,0x00,0x00,0xfb,0x00,0x03,0x00,0xc8,0x00,0x00,0x00, +0x0c,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0x0c,0x01,0x00,0x00, +0x41,0x00,0x05,0x00,0xc9,0x00,0x00,0x00,0xca,0x00,0x00,0x00, +0xc7,0x00,0x00,0x00,0xc8,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0xcb,0x00,0x00,0x00,0xca,0x00,0x00,0x00, +0x41,0x00,0x05,0x00,0x17,0x00,0x00,0x00,0xcd,0x00,0x00,0x00, +0x14,0x00,0x00,0x00,0xcc,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0xce,0x00,0x00,0x00,0xcd,0x00,0x00,0x00, +0xae,0x00,0x05,0x00,0xcf,0x00,0x00,0x00,0xd0,0x00,0x00,0x00, +0xcb,0x00,0x00,0x00,0xce,0x00,0x00,0x00,0xf7,0x00,0x03,0x00, +0xd2,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, +0xd0,0x00,0x00,0x00,0xd1,0x00,0x00,0x00,0xd2,0x00,0x00,0x00, +0xf8,0x00,0x02,0x00,0xd1,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, +0x0b,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0xd2,0x00,0x00,0x00, +0x41,0x00,0x05,0x00,0x17,0x00,0x00,0x00,0x1a,0x01,0x00,0x00, +0x14,0x00,0x00,0x00,0x16,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x1b,0x01,0x00,0x00,0x1a,0x01,0x00,0x00, +0x41,0x00,0x05,0x00,0x17,0x00,0x00,0x00,0x1c,0x01,0x00,0x00, +0x14,0x00,0x00,0x00,0x1a,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x1d,0x01,0x00,0x00,0x1c,0x01,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x1e,0x01,0x00,0x00, +0x1b,0x01,0x00,0x00,0x1d,0x01,0x00,0x00,0x41,0x00,0x05,0x00, +0x17,0x00,0x00,0x00,0x1f,0x01,0x00,0x00,0x14,0x00,0x00,0x00, +0x1e,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x20,0x01,0x00,0x00,0x1f,0x01,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x21,0x01,0x00,0x00,0x1e,0x01,0x00,0x00, +0x20,0x01,0x00,0x00,0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x22,0x01,0x00,0x00,0xcb,0x00,0x00,0x00,0x21,0x01,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x26,0x01,0x00,0x00, +0x22,0x01,0x00,0x00,0x1b,0x01,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x29,0x01,0x00,0x00,0x26,0x01,0x00,0x00, +0x1d,0x01,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x2c,0x01,0x00,0x00,0x29,0x01,0x00,0x00,0x20,0x01,0x00,0x00, +0x82,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x2f,0x01,0x00,0x00, +0xcb,0x00,0x00,0x00,0x2c,0x01,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x34,0x01,0x00,0x00,0x1d,0x01,0x00,0x00, +0x20,0x01,0x00,0x00,0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x35,0x01,0x00,0x00,0x2f,0x01,0x00,0x00,0x34,0x01,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x39,0x01,0x00,0x00, +0x35,0x01,0x00,0x00,0x1d,0x01,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x3c,0x01,0x00,0x00,0x39,0x01,0x00,0x00, +0x20,0x01,0x00,0x00,0x82,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x41,0x01,0x00,0x00,0x2f,0x01,0x00,0x00,0x3c,0x01,0x00,0x00, +0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x44,0x01,0x00,0x00, +0x41,0x01,0x00,0x00,0x20,0x01,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x4d,0x01,0x00,0x00,0x44,0x01,0x00,0x00, +0x20,0x01,0x00,0x00,0x82,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x4e,0x01,0x00,0x00,0x41,0x01,0x00,0x00,0x4d,0x01,0x00,0x00, +0x41,0x00,0x05,0x00,0x17,0x00,0x00,0x00,0x50,0x01,0x00,0x00, +0x14,0x00,0x00,0x00,0x55,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x51,0x01,0x00,0x00,0x50,0x01,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x52,0x01,0x00,0x00, +0x22,0x01,0x00,0x00,0x51,0x01,0x00,0x00,0x41,0x00,0x05,0x00, +0x17,0x00,0x00,0x00,0x54,0x01,0x00,0x00,0x14,0x00,0x00,0x00, +0x5a,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x55,0x01,0x00,0x00,0x54,0x01,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x56,0x01,0x00,0x00,0x35,0x01,0x00,0x00, +0x55,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x57,0x01,0x00,0x00,0x52,0x01,0x00,0x00,0x56,0x01,0x00,0x00, +0x41,0x00,0x05,0x00,0x17,0x00,0x00,0x00,0x59,0x01,0x00,0x00, +0x14,0x00,0x00,0x00,0x60,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x5a,0x01,0x00,0x00,0x59,0x01,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x5b,0x01,0x00,0x00, +0x44,0x01,0x00,0x00,0x5a,0x01,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x5c,0x01,0x00,0x00,0x57,0x01,0x00,0x00, +0x5b,0x01,0x00,0x00,0x41,0x00,0x05,0x00,0x17,0x00,0x00,0x00, +0x5e,0x01,0x00,0x00,0x14,0x00,0x00,0x00,0x66,0x00,0x00,0x00, +0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x5f,0x01,0x00,0x00, +0x5e,0x01,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x60,0x01,0x00,0x00,0x4e,0x01,0x00,0x00,0x5f,0x01,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x61,0x01,0x00,0x00, +0x5c,0x01,0x00,0x00,0x60,0x01,0x00,0x00,0x41,0x00,0x06,0x00, +0xde,0x00,0x00,0x00,0xdf,0x00,0x00,0x00,0xd9,0x00,0x00,0x00, +0xcc,0x00,0x00,0x00,0x61,0x01,0x00,0x00,0x3d,0x00,0x04,0x00, +0x11,0x00,0x00,0x00,0xe0,0x00,0x00,0x00,0xdf,0x00,0x00,0x00, +0x41,0x00,0x05,0x00,0x17,0x00,0x00,0x00,0xe6,0x00,0x00,0x00, +0x14,0x00,0x00,0x00,0xe5,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0xe7,0x00,0x00,0x00,0xe6,0x00,0x00,0x00, +0x41,0x00,0x05,0x00,0x17,0x00,0x00,0x00,0x6b,0x01,0x00,0x00, +0x14,0x00,0x00,0x00,0x6f,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x6c,0x01,0x00,0x00,0x6b,0x01,0x00,0x00, +0x41,0x00,0x05,0x00,0x17,0x00,0x00,0x00,0x6d,0x01,0x00,0x00, +0x14,0x00,0x00,0x00,0x72,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x6e,0x01,0x00,0x00,0x6d,0x01,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x6f,0x01,0x00,0x00, +0x6c,0x01,0x00,0x00,0x6e,0x01,0x00,0x00,0x41,0x00,0x05,0x00, +0x17,0x00,0x00,0x00,0x70,0x01,0x00,0x00,0x14,0x00,0x00,0x00, +0x76,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x71,0x01,0x00,0x00,0x70,0x01,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x72,0x01,0x00,0x00,0x6f,0x01,0x00,0x00, +0x71,0x01,0x00,0x00,0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x73,0x01,0x00,0x00,0xcb,0x00,0x00,0x00,0x72,0x01,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x77,0x01,0x00,0x00, +0x73,0x01,0x00,0x00,0x6c,0x01,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x7a,0x01,0x00,0x00,0x77,0x01,0x00,0x00, +0x6e,0x01,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x7d,0x01,0x00,0x00,0x7a,0x01,0x00,0x00,0x71,0x01,0x00,0x00, +0x82,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x80,0x01,0x00,0x00, +0xcb,0x00,0x00,0x00,0x7d,0x01,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x85,0x01,0x00,0x00,0x6e,0x01,0x00,0x00, +0x71,0x01,0x00,0x00,0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x86,0x01,0x00,0x00,0x80,0x01,0x00,0x00,0x85,0x01,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x8a,0x01,0x00,0x00, +0x86,0x01,0x00,0x00,0x6e,0x01,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x8d,0x01,0x00,0x00,0x8a,0x01,0x00,0x00, +0x71,0x01,0x00,0x00,0x82,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x92,0x01,0x00,0x00,0x80,0x01,0x00,0x00,0x8d,0x01,0x00,0x00, +0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x95,0x01,0x00,0x00, +0x92,0x01,0x00,0x00,0x71,0x01,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x9e,0x01,0x00,0x00,0x95,0x01,0x00,0x00, +0x71,0x01,0x00,0x00,0x82,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x9f,0x01,0x00,0x00,0x92,0x01,0x00,0x00,0x9e,0x01,0x00,0x00, +0x41,0x00,0x05,0x00,0x17,0x00,0x00,0x00,0xa1,0x01,0x00,0x00, +0x14,0x00,0x00,0x00,0xad,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0xa2,0x01,0x00,0x00,0xa1,0x01,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xa3,0x01,0x00,0x00, +0x73,0x01,0x00,0x00,0xa2,0x01,0x00,0x00,0x41,0x00,0x05,0x00, +0x17,0x00,0x00,0x00,0xa5,0x01,0x00,0x00,0x14,0x00,0x00,0x00, +0xb2,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0xa6,0x01,0x00,0x00,0xa5,0x01,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xa7,0x01,0x00,0x00,0x86,0x01,0x00,0x00, +0xa6,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xa8,0x01,0x00,0x00,0xa3,0x01,0x00,0x00,0xa7,0x01,0x00,0x00, +0x41,0x00,0x05,0x00,0x17,0x00,0x00,0x00,0xaa,0x01,0x00,0x00, +0x14,0x00,0x00,0x00,0xb8,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0xab,0x01,0x00,0x00,0xaa,0x01,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xac,0x01,0x00,0x00, +0x95,0x01,0x00,0x00,0xab,0x01,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xad,0x01,0x00,0x00,0xa8,0x01,0x00,0x00, +0xac,0x01,0x00,0x00,0x41,0x00,0x05,0x00,0x17,0x00,0x00,0x00, +0xaf,0x01,0x00,0x00,0x14,0x00,0x00,0x00,0xbe,0x00,0x00,0x00, +0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0xb0,0x01,0x00,0x00, +0xaf,0x01,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xb1,0x01,0x00,0x00,0x9f,0x01,0x00,0x00,0xb0,0x01,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xb2,0x01,0x00,0x00, +0xad,0x01,0x00,0x00,0xb1,0x01,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xec,0x00,0x00,0x00,0xe7,0x00,0x00,0x00, +0xb2,0x01,0x00,0x00,0x41,0x00,0x05,0x00,0xef,0x00,0x00,0x00, +0xf0,0x00,0x00,0x00,0x14,0x00,0x00,0x00,0xee,0x00,0x00,0x00, +0x3d,0x00,0x04,0x00,0x11,0x00,0x00,0x00,0xf1,0x00,0x00,0x00, +0xf0,0x00,0x00,0x00,0xb8,0x00,0x05,0x00,0xcf,0x00,0x00,0x00, +0xf2,0x00,0x00,0x00,0xe0,0x00,0x00,0x00,0xf1,0x00,0x00,0x00, +0xf7,0x00,0x03,0x00,0xf5,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0xfa,0x00,0x04,0x00,0xf2,0x00,0x00,0x00,0xf4,0x00,0x00,0x00, +0xf8,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0xf4,0x00,0x00,0x00, +0xf9,0x00,0x02,0x00,0xf5,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, +0xf8,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0xef,0x00,0x00,0x00, +0xfb,0x00,0x00,0x00,0x14,0x00,0x00,0x00,0xfa,0x00,0x00,0x00, +0x3d,0x00,0x04,0x00,0x11,0x00,0x00,0x00,0xfc,0x00,0x00,0x00, +0xfb,0x00,0x00,0x00,0xba,0x00,0x05,0x00,0xcf,0x00,0x00,0x00, +0xfd,0x00,0x00,0x00,0xe0,0x00,0x00,0x00,0xfc,0x00,0x00,0x00, +0xa9,0x00,0x06,0x00,0x11,0x00,0x00,0x00,0xb5,0x01,0x00,0x00, +0xfd,0x00,0x00,0x00,0xfc,0x00,0x00,0x00,0xe0,0x00,0x00,0x00, +0xf9,0x00,0x02,0x00,0xf5,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, +0xf5,0x00,0x00,0x00,0xf5,0x00,0x07,0x00,0x11,0x00,0x00,0x00, +0xb4,0x01,0x00,0x00,0xf1,0x00,0x00,0x00,0xf4,0x00,0x00,0x00, +0xb5,0x01,0x00,0x00,0xf8,0x00,0x00,0x00,0x41,0x00,0x06,0x00, +0xde,0x00,0x00,0x00,0x07,0x01,0x00,0x00,0xe4,0x00,0x00,0x00, +0xcc,0x00,0x00,0x00,0xec,0x00,0x00,0x00,0x3e,0x00,0x03,0x00, +0x07,0x01,0x00,0x00,0xb4,0x01,0x00,0x00,0xf9,0x00,0x02,0x00, +0x0b,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0x0b,0x01,0x00,0x00, +0xfd,0x00,0x01,0x00,0x38,0x00,0x01,0x00, +}; +const uint64_t clamp_f32_len = 3512; + +unsigned char cpy_f16_f16_data[] = { +0x03,0x02,0x23,0x07,0x00,0x05,0x01,0x00,0x0b,0x00,0x0d,0x00, +0x98,0x01,0x00,0x00,0x00,0x00,0x00,0x00,0x11,0x00,0x02,0x00, +0x01,0x00,0x00,0x00,0x11,0x00,0x02,0x00,0x51,0x11,0x00,0x00, +0x0b,0x00,0x06,0x00,0x01,0x00,0x00,0x00,0x47,0x4c,0x53,0x4c, +0x2e,0x73,0x74,0x64,0x2e,0x34,0x35,0x30,0x00,0x00,0x00,0x00, +0x0e,0x00,0x03,0x00,0x00,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0x0f,0x00,0x09,0x00,0x05,0x00,0x00,0x00,0x04,0x00,0x00,0x00, +0x6d,0x61,0x69,0x6e,0x00,0x00,0x00,0x00,0x14,0x00,0x00,0x00, +0xc7,0x00,0x00,0x00,0xd8,0x00,0x00,0x00,0xe4,0x00,0x00,0x00, +0x10,0x00,0x06,0x00,0x04,0x00,0x00,0x00,0x11,0x00,0x00,0x00, +0x00,0x02,0x00,0x00,0x01,0x00,0x00,0x00,0x01,0x00,0x00,0x00, 0x48,0x00,0x05,0x00,0x12,0x00,0x00,0x00,0x00,0x00,0x00,0x00, 0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x48,0x00,0x05,0x00, 0x12,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x23,0x00,0x00,0x00, 0x04,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x12,0x00,0x00,0x00, 0x02,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x08,0x00,0x00,0x00, 0x48,0x00,0x05,0x00,0x12,0x00,0x00,0x00,0x03,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x47,0x00,0x03,0x00, -0x12,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x1f,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x04,0x00,0x00,0x00, -0x48,0x00,0x04,0x00,0x20,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x19,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x20,0x00,0x00,0x00, +0x23,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x48,0x00,0x05,0x00, +0x12,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x23,0x00,0x00,0x00, +0x10,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x12,0x00,0x00,0x00, +0x05,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x14,0x00,0x00,0x00, +0x48,0x00,0x05,0x00,0x12,0x00,0x00,0x00,0x06,0x00,0x00,0x00, +0x23,0x00,0x00,0x00,0x18,0x00,0x00,0x00,0x48,0x00,0x05,0x00, +0x12,0x00,0x00,0x00,0x07,0x00,0x00,0x00,0x23,0x00,0x00,0x00, +0x1c,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x12,0x00,0x00,0x00, +0x08,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x20,0x00,0x00,0x00, +0x48,0x00,0x05,0x00,0x12,0x00,0x00,0x00,0x09,0x00,0x00,0x00, +0x23,0x00,0x00,0x00,0x24,0x00,0x00,0x00,0x48,0x00,0x05,0x00, +0x12,0x00,0x00,0x00,0x0a,0x00,0x00,0x00,0x23,0x00,0x00,0x00, +0x28,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x12,0x00,0x00,0x00, +0x0b,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x2c,0x00,0x00,0x00, +0x48,0x00,0x05,0x00,0x12,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, +0x23,0x00,0x00,0x00,0x30,0x00,0x00,0x00,0x48,0x00,0x05,0x00, +0x12,0x00,0x00,0x00,0x0d,0x00,0x00,0x00,0x23,0x00,0x00,0x00, +0x34,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x12,0x00,0x00,0x00, +0x0e,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x38,0x00,0x00,0x00, +0x48,0x00,0x05,0x00,0x12,0x00,0x00,0x00,0x0f,0x00,0x00,0x00, +0x23,0x00,0x00,0x00,0x3c,0x00,0x00,0x00,0x48,0x00,0x05,0x00, +0x12,0x00,0x00,0x00,0x10,0x00,0x00,0x00,0x23,0x00,0x00,0x00, +0x40,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x12,0x00,0x00,0x00, +0x11,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x44,0x00,0x00,0x00, +0x48,0x00,0x05,0x00,0x12,0x00,0x00,0x00,0x12,0x00,0x00,0x00, +0x23,0x00,0x00,0x00,0x48,0x00,0x00,0x00,0x48,0x00,0x05,0x00, +0x12,0x00,0x00,0x00,0x13,0x00,0x00,0x00,0x23,0x00,0x00,0x00, +0x4c,0x00,0x00,0x00,0x47,0x00,0x03,0x00,0x12,0x00,0x00,0x00, +0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0xc7,0x00,0x00,0x00, +0x0b,0x00,0x00,0x00,0x1c,0x00,0x00,0x00,0x47,0x00,0x04,0x00, +0xd5,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x02,0x00,0x00,0x00, +0x48,0x00,0x04,0x00,0xd6,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0x19,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0xd6,0x00,0x00,0x00, 0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x47,0x00,0x03,0x00,0x20,0x00,0x00,0x00,0x02,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x22,0x00,0x00,0x00,0x22,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x22,0x00,0x00,0x00, -0x21,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x24,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x04,0x00,0x00,0x00, -0x48,0x00,0x04,0x00,0x25,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x18,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x25,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x47,0x00,0x03,0x00,0x25,0x00,0x00,0x00,0x02,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x27,0x00,0x00,0x00,0x22,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x27,0x00,0x00,0x00, -0x21,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x2c,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x04,0x00,0x00,0x00, -0x48,0x00,0x04,0x00,0x2d,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x18,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x2d,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x47,0x00,0x03,0x00,0x2d,0x00,0x00,0x00,0x02,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x2f,0x00,0x00,0x00,0x22,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x2f,0x00,0x00,0x00, +0x47,0x00,0x03,0x00,0xd6,0x00,0x00,0x00,0x02,0x00,0x00,0x00, +0x47,0x00,0x04,0x00,0xd8,0x00,0x00,0x00,0x22,0x00,0x00,0x00, +0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0xd8,0x00,0x00,0x00, 0x21,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x3b,0x00,0x00,0x00,0x0b,0x00,0x00,0x00,0x19,0x00,0x00,0x00, +0xe1,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x02,0x00,0x00,0x00, +0x48,0x00,0x04,0x00,0xe2,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0x18,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0xe2,0x00,0x00,0x00, +0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0x47,0x00,0x03,0x00,0xe2,0x00,0x00,0x00,0x02,0x00,0x00,0x00, +0x47,0x00,0x04,0x00,0xe4,0x00,0x00,0x00,0x22,0x00,0x00,0x00, +0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0xe4,0x00,0x00,0x00, +0x21,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00, +0xef,0x00,0x00,0x00,0x0b,0x00,0x00,0x00,0x19,0x00,0x00,0x00, +0x13,0x00,0x02,0x00,0x02,0x00,0x00,0x00,0x21,0x00,0x03,0x00, +0x03,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x15,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x20,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0x16,0x00,0x03,0x00,0x11,0x00,0x00,0x00,0x20,0x00,0x00,0x00, +0x1e,0x00,0x16,0x00,0x12,0x00,0x00,0x00,0x06,0x00,0x00,0x00, +0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00, +0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00, +0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00, +0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00, +0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00, +0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x11,0x00,0x00,0x00, +0x11,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x13,0x00,0x00,0x00, +0x09,0x00,0x00,0x00,0x12,0x00,0x00,0x00,0x3b,0x00,0x04,0x00, +0x13,0x00,0x00,0x00,0x14,0x00,0x00,0x00,0x09,0x00,0x00,0x00, +0x15,0x00,0x04,0x00,0x15,0x00,0x00,0x00,0x20,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x15,0x00,0x00,0x00, +0x16,0x00,0x00,0x00,0x03,0x00,0x00,0x00,0x20,0x00,0x04,0x00, +0x17,0x00,0x00,0x00,0x09,0x00,0x00,0x00,0x06,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x15,0x00,0x00,0x00,0x1a,0x00,0x00,0x00, +0x02,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x15,0x00,0x00,0x00, +0x1e,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x15,0x00,0x00,0x00,0x55,0x00,0x00,0x00,0x08,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x15,0x00,0x00,0x00,0x5a,0x00,0x00,0x00, +0x07,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x15,0x00,0x00,0x00, +0x60,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x15,0x00,0x00,0x00,0x66,0x00,0x00,0x00,0x05,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x15,0x00,0x00,0x00,0x6f,0x00,0x00,0x00, +0x0b,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x15,0x00,0x00,0x00, +0x72,0x00,0x00,0x00,0x0a,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x15,0x00,0x00,0x00,0x76,0x00,0x00,0x00,0x09,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x15,0x00,0x00,0x00,0xad,0x00,0x00,0x00, +0x10,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x15,0x00,0x00,0x00, +0xb2,0x00,0x00,0x00,0x0f,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x15,0x00,0x00,0x00,0xb8,0x00,0x00,0x00,0x0e,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x15,0x00,0x00,0x00,0xbe,0x00,0x00,0x00, +0x0d,0x00,0x00,0x00,0x17,0x00,0x04,0x00,0xc5,0x00,0x00,0x00, +0x06,0x00,0x00,0x00,0x03,0x00,0x00,0x00,0x20,0x00,0x04,0x00, +0xc6,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0xc5,0x00,0x00,0x00, +0x3b,0x00,0x04,0x00,0xc6,0x00,0x00,0x00,0xc7,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0xc8,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x20,0x00,0x04,0x00, +0xc9,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x06,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x15,0x00,0x00,0x00,0xcc,0x00,0x00,0x00, +0x00,0x00,0x00,0x00,0x14,0x00,0x02,0x00,0xcf,0x00,0x00,0x00, +0x16,0x00,0x03,0x00,0xd4,0x00,0x00,0x00,0x10,0x00,0x00,0x00, +0x1d,0x00,0x03,0x00,0xd5,0x00,0x00,0x00,0xd4,0x00,0x00,0x00, +0x1e,0x00,0x03,0x00,0xd6,0x00,0x00,0x00,0xd5,0x00,0x00,0x00, +0x20,0x00,0x04,0x00,0xd7,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, +0xd6,0x00,0x00,0x00,0x3b,0x00,0x04,0x00,0xd7,0x00,0x00,0x00, +0xd8,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x15,0x00,0x00,0x00,0xd9,0x00,0x00,0x00,0x11,0x00,0x00,0x00, +0x1d,0x00,0x03,0x00,0xe1,0x00,0x00,0x00,0xd4,0x00,0x00,0x00, +0x1e,0x00,0x03,0x00,0xe2,0x00,0x00,0x00,0xe1,0x00,0x00,0x00, +0x20,0x00,0x04,0x00,0xe3,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, +0xe2,0x00,0x00,0x00,0x3b,0x00,0x04,0x00,0xe3,0x00,0x00,0x00, +0xe4,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x20,0x00,0x04,0x00, +0xe9,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0xd4,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0xed,0x00,0x00,0x00, +0x00,0x02,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0xee,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x2c,0x00,0x06,0x00, +0xc5,0x00,0x00,0x00,0xef,0x00,0x00,0x00,0xed,0x00,0x00,0x00, +0xee,0x00,0x00,0x00,0xee,0x00,0x00,0x00,0x36,0x00,0x05,0x00, +0x02,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0x03,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0x05,0x00,0x00,0x00, +0xf7,0x00,0x03,0x00,0xf0,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0xfb,0x00,0x03,0x00,0xc8,0x00,0x00,0x00,0xf1,0x00,0x00,0x00, +0xf8,0x00,0x02,0x00,0xf1,0x00,0x00,0x00,0x41,0x00,0x05,0x00, +0xc9,0x00,0x00,0x00,0xca,0x00,0x00,0x00,0xc7,0x00,0x00,0x00, +0xc8,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0xcb,0x00,0x00,0x00,0xca,0x00,0x00,0x00,0x41,0x00,0x05,0x00, +0x17,0x00,0x00,0x00,0xcd,0x00,0x00,0x00,0x14,0x00,0x00,0x00, +0xcc,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0xce,0x00,0x00,0x00,0xcd,0x00,0x00,0x00,0xae,0x00,0x05,0x00, +0xcf,0x00,0x00,0x00,0xd0,0x00,0x00,0x00,0xcb,0x00,0x00,0x00, +0xce,0x00,0x00,0x00,0xf7,0x00,0x03,0x00,0xd2,0x00,0x00,0x00, +0x00,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0xd0,0x00,0x00,0x00, +0xd1,0x00,0x00,0x00,0xd2,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, +0xd1,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0xf0,0x00,0x00,0x00, +0xf8,0x00,0x02,0x00,0xd2,0x00,0x00,0x00,0x41,0x00,0x05,0x00, +0x17,0x00,0x00,0x00,0xda,0x00,0x00,0x00,0x14,0x00,0x00,0x00, +0xd9,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0xdb,0x00,0x00,0x00,0xda,0x00,0x00,0x00,0x41,0x00,0x05,0x00, +0x17,0x00,0x00,0x00,0xff,0x00,0x00,0x00,0x14,0x00,0x00,0x00, +0x6f,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x00,0x01,0x00,0x00,0xff,0x00,0x00,0x00,0x41,0x00,0x05,0x00, +0x17,0x00,0x00,0x00,0x01,0x01,0x00,0x00,0x14,0x00,0x00,0x00, +0x72,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x02,0x01,0x00,0x00,0x01,0x01,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x03,0x01,0x00,0x00,0x00,0x01,0x00,0x00, +0x02,0x01,0x00,0x00,0x41,0x00,0x05,0x00,0x17,0x00,0x00,0x00, +0x04,0x01,0x00,0x00,0x14,0x00,0x00,0x00,0x76,0x00,0x00,0x00, +0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x05,0x01,0x00,0x00, +0x04,0x01,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x06,0x01,0x00,0x00,0x03,0x01,0x00,0x00,0x05,0x01,0x00,0x00, +0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x07,0x01,0x00,0x00, +0xcb,0x00,0x00,0x00,0x06,0x01,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x0b,0x01,0x00,0x00,0x07,0x01,0x00,0x00, +0x00,0x01,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x0e,0x01,0x00,0x00,0x0b,0x01,0x00,0x00,0x02,0x01,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x11,0x01,0x00,0x00, +0x0e,0x01,0x00,0x00,0x05,0x01,0x00,0x00,0x82,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x14,0x01,0x00,0x00,0xcb,0x00,0x00,0x00, +0x11,0x01,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x19,0x01,0x00,0x00,0x02,0x01,0x00,0x00,0x05,0x01,0x00,0x00, +0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x1a,0x01,0x00,0x00, +0x14,0x01,0x00,0x00,0x19,0x01,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x1e,0x01,0x00,0x00,0x1a,0x01,0x00,0x00, +0x02,0x01,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x21,0x01,0x00,0x00,0x1e,0x01,0x00,0x00,0x05,0x01,0x00,0x00, +0x82,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x26,0x01,0x00,0x00, +0x14,0x01,0x00,0x00,0x21,0x01,0x00,0x00,0x86,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x29,0x01,0x00,0x00,0x26,0x01,0x00,0x00, +0x05,0x01,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x32,0x01,0x00,0x00,0x29,0x01,0x00,0x00,0x05,0x01,0x00,0x00, +0x82,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x33,0x01,0x00,0x00, +0x26,0x01,0x00,0x00,0x32,0x01,0x00,0x00,0x41,0x00,0x05,0x00, +0x17,0x00,0x00,0x00,0x35,0x01,0x00,0x00,0x14,0x00,0x00,0x00, +0xad,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x36,0x01,0x00,0x00,0x35,0x01,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x37,0x01,0x00,0x00,0x07,0x01,0x00,0x00, +0x36,0x01,0x00,0x00,0x41,0x00,0x05,0x00,0x17,0x00,0x00,0x00, +0x39,0x01,0x00,0x00,0x14,0x00,0x00,0x00,0xb2,0x00,0x00,0x00, +0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x3a,0x01,0x00,0x00, +0x39,0x01,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x3b,0x01,0x00,0x00,0x1a,0x01,0x00,0x00,0x3a,0x01,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x3c,0x01,0x00,0x00, +0x37,0x01,0x00,0x00,0x3b,0x01,0x00,0x00,0x41,0x00,0x05,0x00, +0x17,0x00,0x00,0x00,0x3e,0x01,0x00,0x00,0x14,0x00,0x00,0x00, +0xb8,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x3f,0x01,0x00,0x00,0x3e,0x01,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x40,0x01,0x00,0x00,0x29,0x01,0x00,0x00, +0x3f,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x41,0x01,0x00,0x00,0x3c,0x01,0x00,0x00,0x40,0x01,0x00,0x00, +0x41,0x00,0x05,0x00,0x17,0x00,0x00,0x00,0x43,0x01,0x00,0x00, +0x14,0x00,0x00,0x00,0xbe,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x44,0x01,0x00,0x00,0x43,0x01,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x45,0x01,0x00,0x00, +0x33,0x01,0x00,0x00,0x44,0x01,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x46,0x01,0x00,0x00,0x41,0x01,0x00,0x00, +0x45,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xe0,0x00,0x00,0x00,0xdb,0x00,0x00,0x00,0x46,0x01,0x00,0x00, +0x41,0x00,0x05,0x00,0x17,0x00,0x00,0x00,0x50,0x01,0x00,0x00, +0x14,0x00,0x00,0x00,0x16,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x51,0x01,0x00,0x00,0x50,0x01,0x00,0x00, +0x41,0x00,0x05,0x00,0x17,0x00,0x00,0x00,0x52,0x01,0x00,0x00, +0x14,0x00,0x00,0x00,0x1a,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x53,0x01,0x00,0x00,0x52,0x01,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x54,0x01,0x00,0x00, +0x51,0x01,0x00,0x00,0x53,0x01,0x00,0x00,0x41,0x00,0x05,0x00, +0x17,0x00,0x00,0x00,0x55,0x01,0x00,0x00,0x14,0x00,0x00,0x00, +0x1e,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x56,0x01,0x00,0x00,0x55,0x01,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x57,0x01,0x00,0x00,0x54,0x01,0x00,0x00, +0x56,0x01,0x00,0x00,0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x58,0x01,0x00,0x00,0xcb,0x00,0x00,0x00,0x57,0x01,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x5c,0x01,0x00,0x00, +0x58,0x01,0x00,0x00,0x51,0x01,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x5f,0x01,0x00,0x00,0x5c,0x01,0x00,0x00, +0x53,0x01,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x62,0x01,0x00,0x00,0x5f,0x01,0x00,0x00,0x56,0x01,0x00,0x00, +0x82,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x65,0x01,0x00,0x00, +0xcb,0x00,0x00,0x00,0x62,0x01,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x6a,0x01,0x00,0x00,0x53,0x01,0x00,0x00, +0x56,0x01,0x00,0x00,0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x6b,0x01,0x00,0x00,0x65,0x01,0x00,0x00,0x6a,0x01,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x6f,0x01,0x00,0x00, +0x6b,0x01,0x00,0x00,0x53,0x01,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x72,0x01,0x00,0x00,0x6f,0x01,0x00,0x00, +0x56,0x01,0x00,0x00,0x82,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x77,0x01,0x00,0x00,0x65,0x01,0x00,0x00,0x72,0x01,0x00,0x00, +0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x7a,0x01,0x00,0x00, +0x77,0x01,0x00,0x00,0x56,0x01,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x83,0x01,0x00,0x00,0x7a,0x01,0x00,0x00, +0x56,0x01,0x00,0x00,0x82,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x84,0x01,0x00,0x00,0x77,0x01,0x00,0x00,0x83,0x01,0x00,0x00, +0x41,0x00,0x05,0x00,0x17,0x00,0x00,0x00,0x86,0x01,0x00,0x00, +0x14,0x00,0x00,0x00,0x55,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x87,0x01,0x00,0x00,0x86,0x01,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x88,0x01,0x00,0x00, +0x58,0x01,0x00,0x00,0x87,0x01,0x00,0x00,0x41,0x00,0x05,0x00, +0x17,0x00,0x00,0x00,0x8a,0x01,0x00,0x00,0x14,0x00,0x00,0x00, +0x5a,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x8b,0x01,0x00,0x00,0x8a,0x01,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x8c,0x01,0x00,0x00,0x6b,0x01,0x00,0x00, +0x8b,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x8d,0x01,0x00,0x00,0x88,0x01,0x00,0x00,0x8c,0x01,0x00,0x00, +0x41,0x00,0x05,0x00,0x17,0x00,0x00,0x00,0x8f,0x01,0x00,0x00, +0x14,0x00,0x00,0x00,0x60,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x90,0x01,0x00,0x00,0x8f,0x01,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x91,0x01,0x00,0x00, +0x7a,0x01,0x00,0x00,0x90,0x01,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x92,0x01,0x00,0x00,0x8d,0x01,0x00,0x00, +0x91,0x01,0x00,0x00,0x41,0x00,0x05,0x00,0x17,0x00,0x00,0x00, +0x94,0x01,0x00,0x00,0x14,0x00,0x00,0x00,0x66,0x00,0x00,0x00, +0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x95,0x01,0x00,0x00, +0x94,0x01,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x96,0x01,0x00,0x00,0x84,0x01,0x00,0x00,0x95,0x01,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x97,0x01,0x00,0x00, +0x92,0x01,0x00,0x00,0x96,0x01,0x00,0x00,0x41,0x00,0x06,0x00, +0xe9,0x00,0x00,0x00,0xea,0x00,0x00,0x00,0xe4,0x00,0x00,0x00, +0xcc,0x00,0x00,0x00,0x97,0x01,0x00,0x00,0x3d,0x00,0x04,0x00, +0xd4,0x00,0x00,0x00,0xeb,0x00,0x00,0x00,0xea,0x00,0x00,0x00, +0x41,0x00,0x06,0x00,0xe9,0x00,0x00,0x00,0xec,0x00,0x00,0x00, +0xd8,0x00,0x00,0x00,0xcc,0x00,0x00,0x00,0xe0,0x00,0x00,0x00, +0x3e,0x00,0x03,0x00,0xec,0x00,0x00,0x00,0xeb,0x00,0x00,0x00, +0xf9,0x00,0x02,0x00,0xf0,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, +0xf0,0x00,0x00,0x00,0xfd,0x00,0x01,0x00,0x38,0x00,0x01,0x00, + +}; +const uint64_t cpy_f16_f16_len = 3252; + +unsigned char cpy_f32_f16_data[] = { +0x03,0x02,0x23,0x07,0x00,0x05,0x01,0x00,0x0b,0x00,0x0d,0x00, +0x9a,0x01,0x00,0x00,0x00,0x00,0x00,0x00,0x11,0x00,0x02,0x00, +0x01,0x00,0x00,0x00,0x11,0x00,0x02,0x00,0x51,0x11,0x00,0x00, +0x0b,0x00,0x06,0x00,0x01,0x00,0x00,0x00,0x47,0x4c,0x53,0x4c, +0x2e,0x73,0x74,0x64,0x2e,0x34,0x35,0x30,0x00,0x00,0x00,0x00, +0x0e,0x00,0x03,0x00,0x00,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0x0f,0x00,0x09,0x00,0x05,0x00,0x00,0x00,0x04,0x00,0x00,0x00, +0x6d,0x61,0x69,0x6e,0x00,0x00,0x00,0x00,0x14,0x00,0x00,0x00, +0xc7,0x00,0x00,0x00,0xd8,0x00,0x00,0x00,0xe4,0x00,0x00,0x00, +0x10,0x00,0x06,0x00,0x04,0x00,0x00,0x00,0x11,0x00,0x00,0x00, +0x00,0x02,0x00,0x00,0x01,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0x48,0x00,0x05,0x00,0x12,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x48,0x00,0x05,0x00, +0x12,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x23,0x00,0x00,0x00, +0x04,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x12,0x00,0x00,0x00, +0x02,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x08,0x00,0x00,0x00, +0x48,0x00,0x05,0x00,0x12,0x00,0x00,0x00,0x03,0x00,0x00,0x00, +0x23,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x48,0x00,0x05,0x00, +0x12,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x23,0x00,0x00,0x00, +0x10,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x12,0x00,0x00,0x00, +0x05,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x14,0x00,0x00,0x00, +0x48,0x00,0x05,0x00,0x12,0x00,0x00,0x00,0x06,0x00,0x00,0x00, +0x23,0x00,0x00,0x00,0x18,0x00,0x00,0x00,0x48,0x00,0x05,0x00, +0x12,0x00,0x00,0x00,0x07,0x00,0x00,0x00,0x23,0x00,0x00,0x00, +0x1c,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x12,0x00,0x00,0x00, +0x08,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x20,0x00,0x00,0x00, +0x48,0x00,0x05,0x00,0x12,0x00,0x00,0x00,0x09,0x00,0x00,0x00, +0x23,0x00,0x00,0x00,0x24,0x00,0x00,0x00,0x48,0x00,0x05,0x00, +0x12,0x00,0x00,0x00,0x0a,0x00,0x00,0x00,0x23,0x00,0x00,0x00, +0x28,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x12,0x00,0x00,0x00, +0x0b,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x2c,0x00,0x00,0x00, +0x48,0x00,0x05,0x00,0x12,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, +0x23,0x00,0x00,0x00,0x30,0x00,0x00,0x00,0x48,0x00,0x05,0x00, +0x12,0x00,0x00,0x00,0x0d,0x00,0x00,0x00,0x23,0x00,0x00,0x00, +0x34,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x12,0x00,0x00,0x00, +0x0e,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x38,0x00,0x00,0x00, +0x48,0x00,0x05,0x00,0x12,0x00,0x00,0x00,0x0f,0x00,0x00,0x00, +0x23,0x00,0x00,0x00,0x3c,0x00,0x00,0x00,0x48,0x00,0x05,0x00, +0x12,0x00,0x00,0x00,0x10,0x00,0x00,0x00,0x23,0x00,0x00,0x00, +0x40,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x12,0x00,0x00,0x00, +0x11,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x44,0x00,0x00,0x00, +0x48,0x00,0x05,0x00,0x12,0x00,0x00,0x00,0x12,0x00,0x00,0x00, +0x23,0x00,0x00,0x00,0x48,0x00,0x00,0x00,0x48,0x00,0x05,0x00, +0x12,0x00,0x00,0x00,0x13,0x00,0x00,0x00,0x23,0x00,0x00,0x00, +0x4c,0x00,0x00,0x00,0x47,0x00,0x03,0x00,0x12,0x00,0x00,0x00, +0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0xc7,0x00,0x00,0x00, +0x0b,0x00,0x00,0x00,0x1c,0x00,0x00,0x00,0x47,0x00,0x04,0x00, +0xd5,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x02,0x00,0x00,0x00, +0x48,0x00,0x04,0x00,0xd6,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0x19,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0xd6,0x00,0x00,0x00, +0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0x47,0x00,0x03,0x00,0xd6,0x00,0x00,0x00,0x02,0x00,0x00,0x00, +0x47,0x00,0x04,0x00,0xd8,0x00,0x00,0x00,0x22,0x00,0x00,0x00, +0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0xd8,0x00,0x00,0x00, +0x21,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x47,0x00,0x04,0x00, +0xe1,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x04,0x00,0x00,0x00, +0x48,0x00,0x04,0x00,0xe2,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0x18,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0xe2,0x00,0x00,0x00, +0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0x47,0x00,0x03,0x00,0xe2,0x00,0x00,0x00,0x02,0x00,0x00,0x00, +0x47,0x00,0x04,0x00,0xe4,0x00,0x00,0x00,0x22,0x00,0x00,0x00, +0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0xe4,0x00,0x00,0x00, +0x21,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00, +0xf1,0x00,0x00,0x00,0x0b,0x00,0x00,0x00,0x19,0x00,0x00,0x00, +0x13,0x00,0x02,0x00,0x02,0x00,0x00,0x00,0x21,0x00,0x03,0x00, +0x03,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x15,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x20,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0x16,0x00,0x03,0x00,0x11,0x00,0x00,0x00,0x20,0x00,0x00,0x00, +0x1e,0x00,0x16,0x00,0x12,0x00,0x00,0x00,0x06,0x00,0x00,0x00, +0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00, +0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00, +0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00, +0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00, +0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00, +0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x11,0x00,0x00,0x00, +0x11,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x13,0x00,0x00,0x00, +0x09,0x00,0x00,0x00,0x12,0x00,0x00,0x00,0x3b,0x00,0x04,0x00, +0x13,0x00,0x00,0x00,0x14,0x00,0x00,0x00,0x09,0x00,0x00,0x00, +0x15,0x00,0x04,0x00,0x15,0x00,0x00,0x00,0x20,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x15,0x00,0x00,0x00, +0x16,0x00,0x00,0x00,0x03,0x00,0x00,0x00,0x20,0x00,0x04,0x00, +0x17,0x00,0x00,0x00,0x09,0x00,0x00,0x00,0x06,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x15,0x00,0x00,0x00,0x1a,0x00,0x00,0x00, +0x02,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x15,0x00,0x00,0x00, +0x1e,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x15,0x00,0x00,0x00,0x55,0x00,0x00,0x00,0x08,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x15,0x00,0x00,0x00,0x5a,0x00,0x00,0x00, +0x07,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x15,0x00,0x00,0x00, +0x60,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x15,0x00,0x00,0x00,0x66,0x00,0x00,0x00,0x05,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x15,0x00,0x00,0x00,0x6f,0x00,0x00,0x00, +0x0b,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x15,0x00,0x00,0x00, +0x72,0x00,0x00,0x00,0x0a,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x15,0x00,0x00,0x00,0x76,0x00,0x00,0x00,0x09,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x15,0x00,0x00,0x00,0xad,0x00,0x00,0x00, +0x10,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x15,0x00,0x00,0x00, +0xb2,0x00,0x00,0x00,0x0f,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x15,0x00,0x00,0x00,0xb8,0x00,0x00,0x00,0x0e,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x15,0x00,0x00,0x00,0xbe,0x00,0x00,0x00, +0x0d,0x00,0x00,0x00,0x17,0x00,0x04,0x00,0xc5,0x00,0x00,0x00, +0x06,0x00,0x00,0x00,0x03,0x00,0x00,0x00,0x20,0x00,0x04,0x00, +0xc6,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0xc5,0x00,0x00,0x00, +0x3b,0x00,0x04,0x00,0xc6,0x00,0x00,0x00,0xc7,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0xc8,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x20,0x00,0x04,0x00, +0xc9,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x06,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x15,0x00,0x00,0x00,0xcc,0x00,0x00,0x00, +0x00,0x00,0x00,0x00,0x14,0x00,0x02,0x00,0xcf,0x00,0x00,0x00, +0x16,0x00,0x03,0x00,0xd4,0x00,0x00,0x00,0x10,0x00,0x00,0x00, +0x1d,0x00,0x03,0x00,0xd5,0x00,0x00,0x00,0xd4,0x00,0x00,0x00, +0x1e,0x00,0x03,0x00,0xd6,0x00,0x00,0x00,0xd5,0x00,0x00,0x00, +0x20,0x00,0x04,0x00,0xd7,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, +0xd6,0x00,0x00,0x00,0x3b,0x00,0x04,0x00,0xd7,0x00,0x00,0x00, +0xd8,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x15,0x00,0x00,0x00,0xd9,0x00,0x00,0x00,0x11,0x00,0x00,0x00, +0x1d,0x00,0x03,0x00,0xe1,0x00,0x00,0x00,0x11,0x00,0x00,0x00, +0x1e,0x00,0x03,0x00,0xe2,0x00,0x00,0x00,0xe1,0x00,0x00,0x00, +0x20,0x00,0x04,0x00,0xe3,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, +0xe2,0x00,0x00,0x00,0x3b,0x00,0x04,0x00,0xe3,0x00,0x00,0x00, +0xe4,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x20,0x00,0x04,0x00, +0xe9,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x11,0x00,0x00,0x00, +0x20,0x00,0x04,0x00,0xed,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, +0xd4,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0xef,0x00,0x00,0x00,0x00,0x02,0x00,0x00,0x2b,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0xf0,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0x2c,0x00,0x06,0x00,0xc5,0x00,0x00,0x00,0xf1,0x00,0x00,0x00, +0xef,0x00,0x00,0x00,0xf0,0x00,0x00,0x00,0xf0,0x00,0x00,0x00, +0x36,0x00,0x05,0x00,0x02,0x00,0x00,0x00,0x04,0x00,0x00,0x00, +0x00,0x00,0x00,0x00,0x03,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, +0x05,0x00,0x00,0x00,0xf7,0x00,0x03,0x00,0xf2,0x00,0x00,0x00, +0x00,0x00,0x00,0x00,0xfb,0x00,0x03,0x00,0xc8,0x00,0x00,0x00, +0xf3,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0xf3,0x00,0x00,0x00, +0x41,0x00,0x05,0x00,0xc9,0x00,0x00,0x00,0xca,0x00,0x00,0x00, +0xc7,0x00,0x00,0x00,0xc8,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0xcb,0x00,0x00,0x00,0xca,0x00,0x00,0x00, +0x41,0x00,0x05,0x00,0x17,0x00,0x00,0x00,0xcd,0x00,0x00,0x00, +0x14,0x00,0x00,0x00,0xcc,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0xce,0x00,0x00,0x00,0xcd,0x00,0x00,0x00, +0xae,0x00,0x05,0x00,0xcf,0x00,0x00,0x00,0xd0,0x00,0x00,0x00, +0xcb,0x00,0x00,0x00,0xce,0x00,0x00,0x00,0xf7,0x00,0x03,0x00, +0xd2,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, +0xd0,0x00,0x00,0x00,0xd1,0x00,0x00,0x00,0xd2,0x00,0x00,0x00, +0xf8,0x00,0x02,0x00,0xd1,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, +0xf2,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0xd2,0x00,0x00,0x00, +0x41,0x00,0x05,0x00,0x17,0x00,0x00,0x00,0xda,0x00,0x00,0x00, +0x14,0x00,0x00,0x00,0xd9,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0xdb,0x00,0x00,0x00,0xda,0x00,0x00,0x00, +0x41,0x00,0x05,0x00,0x17,0x00,0x00,0x00,0x01,0x01,0x00,0x00, +0x14,0x00,0x00,0x00,0x6f,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x02,0x01,0x00,0x00,0x01,0x01,0x00,0x00, +0x41,0x00,0x05,0x00,0x17,0x00,0x00,0x00,0x03,0x01,0x00,0x00, +0x14,0x00,0x00,0x00,0x72,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x04,0x01,0x00,0x00,0x03,0x01,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x05,0x01,0x00,0x00, +0x02,0x01,0x00,0x00,0x04,0x01,0x00,0x00,0x41,0x00,0x05,0x00, +0x17,0x00,0x00,0x00,0x06,0x01,0x00,0x00,0x14,0x00,0x00,0x00, +0x76,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x07,0x01,0x00,0x00,0x06,0x01,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x08,0x01,0x00,0x00,0x05,0x01,0x00,0x00, +0x07,0x01,0x00,0x00,0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x09,0x01,0x00,0x00,0xcb,0x00,0x00,0x00,0x08,0x01,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x0d,0x01,0x00,0x00, +0x09,0x01,0x00,0x00,0x02,0x01,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x10,0x01,0x00,0x00,0x0d,0x01,0x00,0x00, +0x04,0x01,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x13,0x01,0x00,0x00,0x10,0x01,0x00,0x00,0x07,0x01,0x00,0x00, +0x82,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x16,0x01,0x00,0x00, +0xcb,0x00,0x00,0x00,0x13,0x01,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x1b,0x01,0x00,0x00,0x04,0x01,0x00,0x00, +0x07,0x01,0x00,0x00,0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x1c,0x01,0x00,0x00,0x16,0x01,0x00,0x00,0x1b,0x01,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x20,0x01,0x00,0x00, +0x1c,0x01,0x00,0x00,0x04,0x01,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x23,0x01,0x00,0x00,0x20,0x01,0x00,0x00, +0x07,0x01,0x00,0x00,0x82,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x28,0x01,0x00,0x00,0x16,0x01,0x00,0x00,0x23,0x01,0x00,0x00, +0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x2b,0x01,0x00,0x00, +0x28,0x01,0x00,0x00,0x07,0x01,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x34,0x01,0x00,0x00,0x2b,0x01,0x00,0x00, +0x07,0x01,0x00,0x00,0x82,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x35,0x01,0x00,0x00,0x28,0x01,0x00,0x00,0x34,0x01,0x00,0x00, +0x41,0x00,0x05,0x00,0x17,0x00,0x00,0x00,0x37,0x01,0x00,0x00, +0x14,0x00,0x00,0x00,0xad,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x38,0x01,0x00,0x00,0x37,0x01,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x39,0x01,0x00,0x00, +0x09,0x01,0x00,0x00,0x38,0x01,0x00,0x00,0x41,0x00,0x05,0x00, +0x17,0x00,0x00,0x00,0x3b,0x01,0x00,0x00,0x14,0x00,0x00,0x00, +0xb2,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x3c,0x01,0x00,0x00,0x3b,0x01,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x3d,0x01,0x00,0x00,0x1c,0x01,0x00,0x00, +0x3c,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x3e,0x01,0x00,0x00,0x39,0x01,0x00,0x00,0x3d,0x01,0x00,0x00, +0x41,0x00,0x05,0x00,0x17,0x00,0x00,0x00,0x40,0x01,0x00,0x00, +0x14,0x00,0x00,0x00,0xb8,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x41,0x01,0x00,0x00,0x40,0x01,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x42,0x01,0x00,0x00, +0x2b,0x01,0x00,0x00,0x41,0x01,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x43,0x01,0x00,0x00,0x3e,0x01,0x00,0x00, +0x42,0x01,0x00,0x00,0x41,0x00,0x05,0x00,0x17,0x00,0x00,0x00, +0x45,0x01,0x00,0x00,0x14,0x00,0x00,0x00,0xbe,0x00,0x00,0x00, +0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x46,0x01,0x00,0x00, +0x45,0x01,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x47,0x01,0x00,0x00,0x35,0x01,0x00,0x00,0x46,0x01,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x48,0x01,0x00,0x00, +0x43,0x01,0x00,0x00,0x47,0x01,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xe0,0x00,0x00,0x00,0xdb,0x00,0x00,0x00, +0x48,0x01,0x00,0x00,0x41,0x00,0x05,0x00,0x17,0x00,0x00,0x00, +0x52,0x01,0x00,0x00,0x14,0x00,0x00,0x00,0x16,0x00,0x00,0x00, +0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x53,0x01,0x00,0x00, +0x52,0x01,0x00,0x00,0x41,0x00,0x05,0x00,0x17,0x00,0x00,0x00, +0x54,0x01,0x00,0x00,0x14,0x00,0x00,0x00,0x1a,0x00,0x00,0x00, +0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x55,0x01,0x00,0x00, +0x54,0x01,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x56,0x01,0x00,0x00,0x53,0x01,0x00,0x00,0x55,0x01,0x00,0x00, +0x41,0x00,0x05,0x00,0x17,0x00,0x00,0x00,0x57,0x01,0x00,0x00, +0x14,0x00,0x00,0x00,0x1e,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x58,0x01,0x00,0x00,0x57,0x01,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x59,0x01,0x00,0x00, +0x56,0x01,0x00,0x00,0x58,0x01,0x00,0x00,0x86,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x5a,0x01,0x00,0x00,0xcb,0x00,0x00,0x00, +0x59,0x01,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x5e,0x01,0x00,0x00,0x5a,0x01,0x00,0x00,0x53,0x01,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x61,0x01,0x00,0x00, +0x5e,0x01,0x00,0x00,0x55,0x01,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x64,0x01,0x00,0x00,0x61,0x01,0x00,0x00, +0x58,0x01,0x00,0x00,0x82,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x67,0x01,0x00,0x00,0xcb,0x00,0x00,0x00,0x64,0x01,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x6c,0x01,0x00,0x00, +0x55,0x01,0x00,0x00,0x58,0x01,0x00,0x00,0x86,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x6d,0x01,0x00,0x00,0x67,0x01,0x00,0x00, +0x6c,0x01,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x71,0x01,0x00,0x00,0x6d,0x01,0x00,0x00,0x55,0x01,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x74,0x01,0x00,0x00, +0x71,0x01,0x00,0x00,0x58,0x01,0x00,0x00,0x82,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x79,0x01,0x00,0x00,0x67,0x01,0x00,0x00, +0x74,0x01,0x00,0x00,0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x7c,0x01,0x00,0x00,0x79,0x01,0x00,0x00,0x58,0x01,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x85,0x01,0x00,0x00, +0x7c,0x01,0x00,0x00,0x58,0x01,0x00,0x00,0x82,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x86,0x01,0x00,0x00,0x79,0x01,0x00,0x00, +0x85,0x01,0x00,0x00,0x41,0x00,0x05,0x00,0x17,0x00,0x00,0x00, +0x88,0x01,0x00,0x00,0x14,0x00,0x00,0x00,0x55,0x00,0x00,0x00, +0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x89,0x01,0x00,0x00, +0x88,0x01,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x8a,0x01,0x00,0x00,0x5a,0x01,0x00,0x00,0x89,0x01,0x00,0x00, +0x41,0x00,0x05,0x00,0x17,0x00,0x00,0x00,0x8c,0x01,0x00,0x00, +0x14,0x00,0x00,0x00,0x5a,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x8d,0x01,0x00,0x00,0x8c,0x01,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x8e,0x01,0x00,0x00, +0x6d,0x01,0x00,0x00,0x8d,0x01,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x8f,0x01,0x00,0x00,0x8a,0x01,0x00,0x00, +0x8e,0x01,0x00,0x00,0x41,0x00,0x05,0x00,0x17,0x00,0x00,0x00, +0x91,0x01,0x00,0x00,0x14,0x00,0x00,0x00,0x60,0x00,0x00,0x00, +0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x92,0x01,0x00,0x00, +0x91,0x01,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x93,0x01,0x00,0x00,0x7c,0x01,0x00,0x00,0x92,0x01,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x94,0x01,0x00,0x00, +0x8f,0x01,0x00,0x00,0x93,0x01,0x00,0x00,0x41,0x00,0x05,0x00, +0x17,0x00,0x00,0x00,0x96,0x01,0x00,0x00,0x14,0x00,0x00,0x00, +0x66,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x97,0x01,0x00,0x00,0x96,0x01,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x98,0x01,0x00,0x00,0x86,0x01,0x00,0x00, +0x97,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x99,0x01,0x00,0x00,0x94,0x01,0x00,0x00,0x98,0x01,0x00,0x00, +0x41,0x00,0x06,0x00,0xe9,0x00,0x00,0x00,0xea,0x00,0x00,0x00, +0xe4,0x00,0x00,0x00,0xcc,0x00,0x00,0x00,0x99,0x01,0x00,0x00, +0x3d,0x00,0x04,0x00,0x11,0x00,0x00,0x00,0xeb,0x00,0x00,0x00, +0xea,0x00,0x00,0x00,0x73,0x00,0x04,0x00,0xd4,0x00,0x00,0x00, +0xec,0x00,0x00,0x00,0xeb,0x00,0x00,0x00,0x41,0x00,0x06,0x00, +0xed,0x00,0x00,0x00,0xee,0x00,0x00,0x00,0xd8,0x00,0x00,0x00, +0xcc,0x00,0x00,0x00,0xe0,0x00,0x00,0x00,0x3e,0x00,0x03,0x00, +0xee,0x00,0x00,0x00,0xec,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, +0xf2,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0xf2,0x00,0x00,0x00, +0xfd,0x00,0x01,0x00,0x38,0x00,0x01,0x00, +}; +const uint64_t cpy_f32_f16_len = 3284; + +unsigned char cpy_f32_f32_data[] = { +0x03,0x02,0x23,0x07,0x00,0x05,0x01,0x00,0x0b,0x00,0x0d,0x00, +0x97,0x01,0x00,0x00,0x00,0x00,0x00,0x00,0x11,0x00,0x02,0x00, +0x01,0x00,0x00,0x00,0x0b,0x00,0x06,0x00,0x01,0x00,0x00,0x00, +0x47,0x4c,0x53,0x4c,0x2e,0x73,0x74,0x64,0x2e,0x34,0x35,0x30, +0x00,0x00,0x00,0x00,0x0e,0x00,0x03,0x00,0x00,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0x0f,0x00,0x09,0x00,0x05,0x00,0x00,0x00, +0x04,0x00,0x00,0x00,0x6d,0x61,0x69,0x6e,0x00,0x00,0x00,0x00, +0x14,0x00,0x00,0x00,0xc7,0x00,0x00,0x00,0xd7,0x00,0x00,0x00, +0xe3,0x00,0x00,0x00,0x10,0x00,0x06,0x00,0x04,0x00,0x00,0x00, +0x11,0x00,0x00,0x00,0x00,0x02,0x00,0x00,0x01,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x12,0x00,0x00,0x00, +0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0x48,0x00,0x05,0x00,0x12,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0x23,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x48,0x00,0x05,0x00, +0x12,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x23,0x00,0x00,0x00, +0x08,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x12,0x00,0x00,0x00, +0x03,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, +0x48,0x00,0x05,0x00,0x12,0x00,0x00,0x00,0x04,0x00,0x00,0x00, +0x23,0x00,0x00,0x00,0x10,0x00,0x00,0x00,0x48,0x00,0x05,0x00, +0x12,0x00,0x00,0x00,0x05,0x00,0x00,0x00,0x23,0x00,0x00,0x00, +0x14,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x12,0x00,0x00,0x00, +0x06,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x18,0x00,0x00,0x00, +0x48,0x00,0x05,0x00,0x12,0x00,0x00,0x00,0x07,0x00,0x00,0x00, +0x23,0x00,0x00,0x00,0x1c,0x00,0x00,0x00,0x48,0x00,0x05,0x00, +0x12,0x00,0x00,0x00,0x08,0x00,0x00,0x00,0x23,0x00,0x00,0x00, +0x20,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x12,0x00,0x00,0x00, +0x09,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x24,0x00,0x00,0x00, +0x48,0x00,0x05,0x00,0x12,0x00,0x00,0x00,0x0a,0x00,0x00,0x00, +0x23,0x00,0x00,0x00,0x28,0x00,0x00,0x00,0x48,0x00,0x05,0x00, +0x12,0x00,0x00,0x00,0x0b,0x00,0x00,0x00,0x23,0x00,0x00,0x00, +0x2c,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x12,0x00,0x00,0x00, +0x0c,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x30,0x00,0x00,0x00, +0x48,0x00,0x05,0x00,0x12,0x00,0x00,0x00,0x0d,0x00,0x00,0x00, +0x23,0x00,0x00,0x00,0x34,0x00,0x00,0x00,0x48,0x00,0x05,0x00, +0x12,0x00,0x00,0x00,0x0e,0x00,0x00,0x00,0x23,0x00,0x00,0x00, +0x38,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x12,0x00,0x00,0x00, +0x0f,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x3c,0x00,0x00,0x00, +0x48,0x00,0x05,0x00,0x12,0x00,0x00,0x00,0x10,0x00,0x00,0x00, +0x23,0x00,0x00,0x00,0x40,0x00,0x00,0x00,0x48,0x00,0x05,0x00, +0x12,0x00,0x00,0x00,0x11,0x00,0x00,0x00,0x23,0x00,0x00,0x00, +0x44,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x12,0x00,0x00,0x00, +0x12,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x48,0x00,0x00,0x00, +0x48,0x00,0x05,0x00,0x12,0x00,0x00,0x00,0x13,0x00,0x00,0x00, +0x23,0x00,0x00,0x00,0x4c,0x00,0x00,0x00,0x47,0x00,0x03,0x00, +0x12,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00, +0xc7,0x00,0x00,0x00,0x0b,0x00,0x00,0x00,0x1c,0x00,0x00,0x00, +0x47,0x00,0x04,0x00,0xd4,0x00,0x00,0x00,0x06,0x00,0x00,0x00, +0x04,0x00,0x00,0x00,0x48,0x00,0x04,0x00,0xd5,0x00,0x00,0x00, +0x00,0x00,0x00,0x00,0x19,0x00,0x00,0x00,0x48,0x00,0x05,0x00, +0xd5,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00, +0x00,0x00,0x00,0x00,0x47,0x00,0x03,0x00,0xd5,0x00,0x00,0x00, +0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0xd7,0x00,0x00,0x00, +0x22,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00, +0xd7,0x00,0x00,0x00,0x21,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0x47,0x00,0x04,0x00,0xe0,0x00,0x00,0x00,0x06,0x00,0x00,0x00, +0x04,0x00,0x00,0x00,0x48,0x00,0x04,0x00,0xe1,0x00,0x00,0x00, +0x00,0x00,0x00,0x00,0x18,0x00,0x00,0x00,0x48,0x00,0x05,0x00, +0xe1,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00, +0x00,0x00,0x00,0x00,0x47,0x00,0x03,0x00,0xe1,0x00,0x00,0x00, +0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0xe3,0x00,0x00,0x00, +0x22,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00, +0xe3,0x00,0x00,0x00,0x21,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0x47,0x00,0x04,0x00,0xee,0x00,0x00,0x00,0x0b,0x00,0x00,0x00, +0x19,0x00,0x00,0x00,0x13,0x00,0x02,0x00,0x02,0x00,0x00,0x00, +0x21,0x00,0x03,0x00,0x03,0x00,0x00,0x00,0x02,0x00,0x00,0x00, +0x15,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x20,0x00,0x00,0x00, +0x00,0x00,0x00,0x00,0x16,0x00,0x03,0x00,0x11,0x00,0x00,0x00, +0x20,0x00,0x00,0x00,0x1e,0x00,0x16,0x00,0x12,0x00,0x00,0x00, +0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00, +0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00, +0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00, +0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00, +0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00, +0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00, +0x11,0x00,0x00,0x00,0x11,0x00,0x00,0x00,0x20,0x00,0x04,0x00, +0x13,0x00,0x00,0x00,0x09,0x00,0x00,0x00,0x12,0x00,0x00,0x00, +0x3b,0x00,0x04,0x00,0x13,0x00,0x00,0x00,0x14,0x00,0x00,0x00, +0x09,0x00,0x00,0x00,0x15,0x00,0x04,0x00,0x15,0x00,0x00,0x00, +0x20,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x15,0x00,0x00,0x00,0x16,0x00,0x00,0x00,0x03,0x00,0x00,0x00, +0x20,0x00,0x04,0x00,0x17,0x00,0x00,0x00,0x09,0x00,0x00,0x00, +0x06,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x15,0x00,0x00,0x00, +0x1a,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x15,0x00,0x00,0x00,0x1e,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x15,0x00,0x00,0x00,0x55,0x00,0x00,0x00, +0x08,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x15,0x00,0x00,0x00, +0x5a,0x00,0x00,0x00,0x07,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x15,0x00,0x00,0x00,0x60,0x00,0x00,0x00,0x06,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x15,0x00,0x00,0x00,0x66,0x00,0x00,0x00, +0x05,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x15,0x00,0x00,0x00, +0x6f,0x00,0x00,0x00,0x0b,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x15,0x00,0x00,0x00,0x72,0x00,0x00,0x00,0x0a,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x15,0x00,0x00,0x00,0x76,0x00,0x00,0x00, +0x09,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x15,0x00,0x00,0x00, +0xad,0x00,0x00,0x00,0x10,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x15,0x00,0x00,0x00,0xb2,0x00,0x00,0x00,0x0f,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x15,0x00,0x00,0x00,0xb8,0x00,0x00,0x00, +0x0e,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x15,0x00,0x00,0x00, +0xbe,0x00,0x00,0x00,0x0d,0x00,0x00,0x00,0x17,0x00,0x04,0x00, +0xc5,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x03,0x00,0x00,0x00, +0x20,0x00,0x04,0x00,0xc6,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0xc5,0x00,0x00,0x00,0x3b,0x00,0x04,0x00,0xc6,0x00,0x00,0x00, +0xc7,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0xc8,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0x20,0x00,0x04,0x00,0xc9,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0x06,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x15,0x00,0x00,0x00, +0xcc,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x14,0x00,0x02,0x00, +0xcf,0x00,0x00,0x00,0x1d,0x00,0x03,0x00,0xd4,0x00,0x00,0x00, +0x11,0x00,0x00,0x00,0x1e,0x00,0x03,0x00,0xd5,0x00,0x00,0x00, +0xd4,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0xd6,0x00,0x00,0x00, +0x0c,0x00,0x00,0x00,0xd5,0x00,0x00,0x00,0x3b,0x00,0x04,0x00, +0xd6,0x00,0x00,0x00,0xd7,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x15,0x00,0x00,0x00,0xd8,0x00,0x00,0x00, +0x11,0x00,0x00,0x00,0x1d,0x00,0x03,0x00,0xe0,0x00,0x00,0x00, +0x11,0x00,0x00,0x00,0x1e,0x00,0x03,0x00,0xe1,0x00,0x00,0x00, +0xe0,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0xe2,0x00,0x00,0x00, +0x0c,0x00,0x00,0x00,0xe1,0x00,0x00,0x00,0x3b,0x00,0x04,0x00, +0xe2,0x00,0x00,0x00,0xe3,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, +0x20,0x00,0x04,0x00,0xe8,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, +0x11,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0xec,0x00,0x00,0x00,0x00,0x02,0x00,0x00,0x2b,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0xed,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0x2c,0x00,0x06,0x00,0xc5,0x00,0x00,0x00,0xee,0x00,0x00,0x00, +0xec,0x00,0x00,0x00,0xed,0x00,0x00,0x00,0xed,0x00,0x00,0x00, +0x36,0x00,0x05,0x00,0x02,0x00,0x00,0x00,0x04,0x00,0x00,0x00, +0x00,0x00,0x00,0x00,0x03,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, +0x05,0x00,0x00,0x00,0xf7,0x00,0x03,0x00,0xef,0x00,0x00,0x00, +0x00,0x00,0x00,0x00,0xfb,0x00,0x03,0x00,0xc8,0x00,0x00,0x00, +0xf0,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0xf0,0x00,0x00,0x00, +0x41,0x00,0x05,0x00,0xc9,0x00,0x00,0x00,0xca,0x00,0x00,0x00, +0xc7,0x00,0x00,0x00,0xc8,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0xcb,0x00,0x00,0x00,0xca,0x00,0x00,0x00, +0x41,0x00,0x05,0x00,0x17,0x00,0x00,0x00,0xcd,0x00,0x00,0x00, +0x14,0x00,0x00,0x00,0xcc,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0xce,0x00,0x00,0x00,0xcd,0x00,0x00,0x00, +0xae,0x00,0x05,0x00,0xcf,0x00,0x00,0x00,0xd0,0x00,0x00,0x00, +0xcb,0x00,0x00,0x00,0xce,0x00,0x00,0x00,0xf7,0x00,0x03,0x00, +0xd2,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, +0xd0,0x00,0x00,0x00,0xd1,0x00,0x00,0x00,0xd2,0x00,0x00,0x00, +0xf8,0x00,0x02,0x00,0xd1,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, +0xef,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0xd2,0x00,0x00,0x00, +0x41,0x00,0x05,0x00,0x17,0x00,0x00,0x00,0xd9,0x00,0x00,0x00, +0x14,0x00,0x00,0x00,0xd8,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0xda,0x00,0x00,0x00,0xd9,0x00,0x00,0x00, +0x41,0x00,0x05,0x00,0x17,0x00,0x00,0x00,0xfe,0x00,0x00,0x00, +0x14,0x00,0x00,0x00,0x6f,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0xff,0x00,0x00,0x00,0xfe,0x00,0x00,0x00, +0x41,0x00,0x05,0x00,0x17,0x00,0x00,0x00,0x00,0x01,0x00,0x00, +0x14,0x00,0x00,0x00,0x72,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x01,0x01,0x00,0x00,0x00,0x01,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x02,0x01,0x00,0x00, +0xff,0x00,0x00,0x00,0x01,0x01,0x00,0x00,0x41,0x00,0x05,0x00, +0x17,0x00,0x00,0x00,0x03,0x01,0x00,0x00,0x14,0x00,0x00,0x00, +0x76,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x04,0x01,0x00,0x00,0x03,0x01,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x05,0x01,0x00,0x00,0x02,0x01,0x00,0x00, +0x04,0x01,0x00,0x00,0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x06,0x01,0x00,0x00,0xcb,0x00,0x00,0x00,0x05,0x01,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x0a,0x01,0x00,0x00, +0x06,0x01,0x00,0x00,0xff,0x00,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x0d,0x01,0x00,0x00,0x0a,0x01,0x00,0x00, +0x01,0x01,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x10,0x01,0x00,0x00,0x0d,0x01,0x00,0x00,0x04,0x01,0x00,0x00, +0x82,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x13,0x01,0x00,0x00, +0xcb,0x00,0x00,0x00,0x10,0x01,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x18,0x01,0x00,0x00,0x01,0x01,0x00,0x00, +0x04,0x01,0x00,0x00,0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x19,0x01,0x00,0x00,0x13,0x01,0x00,0x00,0x18,0x01,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x1d,0x01,0x00,0x00, +0x19,0x01,0x00,0x00,0x01,0x01,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x20,0x01,0x00,0x00,0x1d,0x01,0x00,0x00, +0x04,0x01,0x00,0x00,0x82,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x25,0x01,0x00,0x00,0x13,0x01,0x00,0x00,0x20,0x01,0x00,0x00, +0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x28,0x01,0x00,0x00, +0x25,0x01,0x00,0x00,0x04,0x01,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x31,0x01,0x00,0x00,0x28,0x01,0x00,0x00, +0x04,0x01,0x00,0x00,0x82,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x32,0x01,0x00,0x00,0x25,0x01,0x00,0x00,0x31,0x01,0x00,0x00, +0x41,0x00,0x05,0x00,0x17,0x00,0x00,0x00,0x34,0x01,0x00,0x00, +0x14,0x00,0x00,0x00,0xad,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x35,0x01,0x00,0x00,0x34,0x01,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x36,0x01,0x00,0x00, +0x06,0x01,0x00,0x00,0x35,0x01,0x00,0x00,0x41,0x00,0x05,0x00, +0x17,0x00,0x00,0x00,0x38,0x01,0x00,0x00,0x14,0x00,0x00,0x00, +0xb2,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x39,0x01,0x00,0x00,0x38,0x01,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x3a,0x01,0x00,0x00,0x19,0x01,0x00,0x00, +0x39,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x3b,0x01,0x00,0x00,0x36,0x01,0x00,0x00,0x3a,0x01,0x00,0x00, +0x41,0x00,0x05,0x00,0x17,0x00,0x00,0x00,0x3d,0x01,0x00,0x00, +0x14,0x00,0x00,0x00,0xb8,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x3e,0x01,0x00,0x00,0x3d,0x01,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x3f,0x01,0x00,0x00, +0x28,0x01,0x00,0x00,0x3e,0x01,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x40,0x01,0x00,0x00,0x3b,0x01,0x00,0x00, +0x3f,0x01,0x00,0x00,0x41,0x00,0x05,0x00,0x17,0x00,0x00,0x00, +0x42,0x01,0x00,0x00,0x14,0x00,0x00,0x00,0xbe,0x00,0x00,0x00, +0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x43,0x01,0x00,0x00, +0x42,0x01,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x44,0x01,0x00,0x00,0x32,0x01,0x00,0x00,0x43,0x01,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x45,0x01,0x00,0x00, +0x40,0x01,0x00,0x00,0x44,0x01,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xdf,0x00,0x00,0x00,0xda,0x00,0x00,0x00, +0x45,0x01,0x00,0x00,0x41,0x00,0x05,0x00,0x17,0x00,0x00,0x00, +0x4f,0x01,0x00,0x00,0x14,0x00,0x00,0x00,0x16,0x00,0x00,0x00, +0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x50,0x01,0x00,0x00, +0x4f,0x01,0x00,0x00,0x41,0x00,0x05,0x00,0x17,0x00,0x00,0x00, +0x51,0x01,0x00,0x00,0x14,0x00,0x00,0x00,0x1a,0x00,0x00,0x00, +0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x52,0x01,0x00,0x00, +0x51,0x01,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x53,0x01,0x00,0x00,0x50,0x01,0x00,0x00,0x52,0x01,0x00,0x00, +0x41,0x00,0x05,0x00,0x17,0x00,0x00,0x00,0x54,0x01,0x00,0x00, +0x14,0x00,0x00,0x00,0x1e,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x55,0x01,0x00,0x00,0x54,0x01,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x56,0x01,0x00,0x00, +0x53,0x01,0x00,0x00,0x55,0x01,0x00,0x00,0x86,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x57,0x01,0x00,0x00,0xcb,0x00,0x00,0x00, +0x56,0x01,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x5b,0x01,0x00,0x00,0x57,0x01,0x00,0x00,0x50,0x01,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x5e,0x01,0x00,0x00, +0x5b,0x01,0x00,0x00,0x52,0x01,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x61,0x01,0x00,0x00,0x5e,0x01,0x00,0x00, +0x55,0x01,0x00,0x00,0x82,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x64,0x01,0x00,0x00,0xcb,0x00,0x00,0x00,0x61,0x01,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x69,0x01,0x00,0x00, +0x52,0x01,0x00,0x00,0x55,0x01,0x00,0x00,0x86,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x6a,0x01,0x00,0x00,0x64,0x01,0x00,0x00, +0x69,0x01,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x6e,0x01,0x00,0x00,0x6a,0x01,0x00,0x00,0x52,0x01,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x71,0x01,0x00,0x00, +0x6e,0x01,0x00,0x00,0x55,0x01,0x00,0x00,0x82,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x76,0x01,0x00,0x00,0x64,0x01,0x00,0x00, +0x71,0x01,0x00,0x00,0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x79,0x01,0x00,0x00,0x76,0x01,0x00,0x00,0x55,0x01,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x82,0x01,0x00,0x00, +0x79,0x01,0x00,0x00,0x55,0x01,0x00,0x00,0x82,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x83,0x01,0x00,0x00,0x76,0x01,0x00,0x00, +0x82,0x01,0x00,0x00,0x41,0x00,0x05,0x00,0x17,0x00,0x00,0x00, +0x85,0x01,0x00,0x00,0x14,0x00,0x00,0x00,0x55,0x00,0x00,0x00, +0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x86,0x01,0x00,0x00, +0x85,0x01,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x87,0x01,0x00,0x00,0x57,0x01,0x00,0x00,0x86,0x01,0x00,0x00, +0x41,0x00,0x05,0x00,0x17,0x00,0x00,0x00,0x89,0x01,0x00,0x00, +0x14,0x00,0x00,0x00,0x5a,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x8a,0x01,0x00,0x00,0x89,0x01,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x8b,0x01,0x00,0x00, +0x6a,0x01,0x00,0x00,0x8a,0x01,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x8c,0x01,0x00,0x00,0x87,0x01,0x00,0x00, +0x8b,0x01,0x00,0x00,0x41,0x00,0x05,0x00,0x17,0x00,0x00,0x00, +0x8e,0x01,0x00,0x00,0x14,0x00,0x00,0x00,0x60,0x00,0x00,0x00, +0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x8f,0x01,0x00,0x00, +0x8e,0x01,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x90,0x01,0x00,0x00,0x79,0x01,0x00,0x00,0x8f,0x01,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x91,0x01,0x00,0x00, +0x8c,0x01,0x00,0x00,0x90,0x01,0x00,0x00,0x41,0x00,0x05,0x00, +0x17,0x00,0x00,0x00,0x93,0x01,0x00,0x00,0x14,0x00,0x00,0x00, +0x66,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x94,0x01,0x00,0x00,0x93,0x01,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x95,0x01,0x00,0x00,0x83,0x01,0x00,0x00, +0x94,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x96,0x01,0x00,0x00,0x91,0x01,0x00,0x00,0x95,0x01,0x00,0x00, +0x41,0x00,0x06,0x00,0xe8,0x00,0x00,0x00,0xe9,0x00,0x00,0x00, +0xe3,0x00,0x00,0x00,0xcc,0x00,0x00,0x00,0x96,0x01,0x00,0x00, +0x3d,0x00,0x04,0x00,0x11,0x00,0x00,0x00,0xea,0x00,0x00,0x00, +0xe9,0x00,0x00,0x00,0x41,0x00,0x06,0x00,0xe8,0x00,0x00,0x00, +0xeb,0x00,0x00,0x00,0xd7,0x00,0x00,0x00,0xcc,0x00,0x00,0x00, +0xdf,0x00,0x00,0x00,0x3e,0x00,0x03,0x00,0xeb,0x00,0x00,0x00, +0xea,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0xef,0x00,0x00,0x00, +0xf8,0x00,0x02,0x00,0xef,0x00,0x00,0x00,0xfd,0x00,0x01,0x00, +0x38,0x00,0x01,0x00, +}; +const uint64_t cpy_f32_f32_len = 3232; + +unsigned char dequant_f32_data[] = { +0x03,0x02,0x23,0x07,0x00,0x05,0x01,0x00,0x0b,0x00,0x0d,0x00, +0x1f,0x01,0x00,0x00,0x00,0x00,0x00,0x00,0x11,0x00,0x02,0x00, +0x01,0x00,0x00,0x00,0x11,0x00,0x02,0x00,0x51,0x11,0x00,0x00, +0x0b,0x00,0x06,0x00,0x01,0x00,0x00,0x00,0x47,0x4c,0x53,0x4c, +0x2e,0x73,0x74,0x64,0x2e,0x34,0x35,0x30,0x00,0x00,0x00,0x00, +0x0e,0x00,0x03,0x00,0x00,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0x0f,0x00,0x09,0x00,0x05,0x00,0x00,0x00,0x04,0x00,0x00,0x00, +0x6d,0x61,0x69,0x6e,0x00,0x00,0x00,0x00,0x0b,0x00,0x00,0x00, +0x15,0x00,0x00,0x00,0x2c,0x00,0x00,0x00,0x35,0x00,0x00,0x00, +0x10,0x00,0x06,0x00,0x04,0x00,0x00,0x00,0x11,0x00,0x00,0x00, +0x00,0x01,0x00,0x00,0x01,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0x47,0x00,0x04,0x00,0x0b,0x00,0x00,0x00,0x0b,0x00,0x00,0x00, +0x1c,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x13,0x00,0x00,0x00, +0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0x48,0x00,0x05,0x00,0x13,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0x23,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x48,0x00,0x05,0x00, +0x13,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x23,0x00,0x00,0x00, +0x08,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x13,0x00,0x00,0x00, +0x03,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, +0x48,0x00,0x05,0x00,0x13,0x00,0x00,0x00,0x04,0x00,0x00,0x00, +0x23,0x00,0x00,0x00,0x10,0x00,0x00,0x00,0x47,0x00,0x03,0x00, +0x13,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00, +0x29,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x02,0x00,0x00,0x00, +0x48,0x00,0x04,0x00,0x2a,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0x19,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x2a,0x00,0x00,0x00, +0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0x47,0x00,0x03,0x00,0x2a,0x00,0x00,0x00,0x02,0x00,0x00,0x00, +0x47,0x00,0x04,0x00,0x2c,0x00,0x00,0x00,0x22,0x00,0x00,0x00, +0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x2c,0x00,0x00,0x00, +0x21,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x47,0x00,0x04,0x00, +0x32,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x04,0x00,0x00,0x00, +0x48,0x00,0x04,0x00,0x33,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0x18,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x33,0x00,0x00,0x00, +0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0x47,0x00,0x03,0x00,0x33,0x00,0x00,0x00,0x02,0x00,0x00,0x00, +0x47,0x00,0x04,0x00,0x35,0x00,0x00,0x00,0x22,0x00,0x00,0x00, +0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x35,0x00,0x00,0x00, +0x21,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00, +0x44,0x00,0x00,0x00,0x0b,0x00,0x00,0x00,0x19,0x00,0x00,0x00, 0x13,0x00,0x02,0x00,0x02,0x00,0x00,0x00,0x21,0x00,0x03,0x00, 0x03,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x15,0x00,0x04,0x00, 0x06,0x00,0x00,0x00,0x20,0x00,0x00,0x00,0x00,0x00,0x00,0x00, @@ -55,114 +1852,1021 @@ unsigned char add_f32_data[] = { 0x0a,0x00,0x00,0x00,0x0b,0x00,0x00,0x00,0x01,0x00,0x00,0x00, 0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, 0x00,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x0d,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x16,0x00,0x03,0x00, -0x11,0x00,0x00,0x00,0x20,0x00,0x00,0x00,0x1e,0x00,0x06,0x00, -0x12,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x11,0x00,0x00,0x00,0x11,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0x13,0x00,0x00,0x00,0x09,0x00,0x00,0x00,0x12,0x00,0x00,0x00, -0x3b,0x00,0x04,0x00,0x13,0x00,0x00,0x00,0x14,0x00,0x00,0x00, -0x09,0x00,0x00,0x00,0x15,0x00,0x04,0x00,0x15,0x00,0x00,0x00, -0x20,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x15,0x00,0x00,0x00,0x16,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x20,0x00,0x04,0x00,0x17,0x00,0x00,0x00,0x09,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x14,0x00,0x02,0x00,0x1a,0x00,0x00,0x00, -0x1d,0x00,0x03,0x00,0x1f,0x00,0x00,0x00,0x11,0x00,0x00,0x00, -0x1e,0x00,0x03,0x00,0x20,0x00,0x00,0x00,0x1f,0x00,0x00,0x00, -0x20,0x00,0x04,0x00,0x21,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0x20,0x00,0x00,0x00,0x3b,0x00,0x04,0x00,0x21,0x00,0x00,0x00, -0x22,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x1d,0x00,0x03,0x00, -0x24,0x00,0x00,0x00,0x11,0x00,0x00,0x00,0x1e,0x00,0x03,0x00, -0x25,0x00,0x00,0x00,0x24,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0x26,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x25,0x00,0x00,0x00, -0x3b,0x00,0x04,0x00,0x26,0x00,0x00,0x00,0x27,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x29,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x11,0x00,0x00,0x00,0x1d,0x00,0x03,0x00, -0x2c,0x00,0x00,0x00,0x11,0x00,0x00,0x00,0x1e,0x00,0x03,0x00, -0x2d,0x00,0x00,0x00,0x2c,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0x2e,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x2d,0x00,0x00,0x00, -0x3b,0x00,0x04,0x00,0x2e,0x00,0x00,0x00,0x2f,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x15,0x00,0x00,0x00, -0x31,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x00,0x02,0x00,0x00, -0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x3a,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x2c,0x00,0x06,0x00,0x09,0x00,0x00,0x00, -0x3b,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x3a,0x00,0x00,0x00, -0x3a,0x00,0x00,0x00,0x36,0x00,0x05,0x00,0x02,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x10,0x00,0x00,0x00,0x10,0x00,0x00,0x00, +0x1e,0x00,0x07,0x00,0x13,0x00,0x00,0x00,0x06,0x00,0x00,0x00, +0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00, +0x06,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x14,0x00,0x00,0x00, +0x09,0x00,0x00,0x00,0x13,0x00,0x00,0x00,0x3b,0x00,0x04,0x00, +0x14,0x00,0x00,0x00,0x15,0x00,0x00,0x00,0x09,0x00,0x00,0x00, +0x15,0x00,0x04,0x00,0x16,0x00,0x00,0x00,0x20,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x16,0x00,0x00,0x00, +0x17,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x20,0x00,0x04,0x00, +0x18,0x00,0x00,0x00,0x09,0x00,0x00,0x00,0x06,0x00,0x00,0x00, +0x14,0x00,0x02,0x00,0x1b,0x00,0x00,0x00,0x16,0x00,0x03,0x00, +0x28,0x00,0x00,0x00,0x10,0x00,0x00,0x00,0x1d,0x00,0x03,0x00, +0x29,0x00,0x00,0x00,0x28,0x00,0x00,0x00,0x1e,0x00,0x03,0x00, +0x2a,0x00,0x00,0x00,0x29,0x00,0x00,0x00,0x20,0x00,0x04,0x00, +0x2b,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x2a,0x00,0x00,0x00, +0x3b,0x00,0x04,0x00,0x2b,0x00,0x00,0x00,0x2c,0x00,0x00,0x00, +0x0c,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x16,0x00,0x00,0x00, +0x2d,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x16,0x00,0x03,0x00, +0x31,0x00,0x00,0x00,0x20,0x00,0x00,0x00,0x1d,0x00,0x03,0x00, +0x32,0x00,0x00,0x00,0x31,0x00,0x00,0x00,0x1e,0x00,0x03,0x00, +0x33,0x00,0x00,0x00,0x32,0x00,0x00,0x00,0x20,0x00,0x04,0x00, +0x34,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x33,0x00,0x00,0x00, +0x3b,0x00,0x04,0x00,0x34,0x00,0x00,0x00,0x35,0x00,0x00,0x00, +0x0c,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x39,0x00,0x00,0x00, +0x0c,0x00,0x00,0x00,0x31,0x00,0x00,0x00,0x20,0x00,0x04,0x00, +0x3d,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x28,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x42,0x00,0x00,0x00, +0x00,0x01,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x43,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x2c,0x00,0x06,0x00, +0x09,0x00,0x00,0x00,0x44,0x00,0x00,0x00,0x42,0x00,0x00,0x00, +0x43,0x00,0x00,0x00,0x43,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x11,0x01,0x00,0x00,0x02,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x12,0x01,0x00,0x00, +0x03,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x13,0x01,0x00,0x00,0x04,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x14,0x01,0x00,0x00,0x05,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x15,0x01,0x00,0x00, +0x06,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x16,0x01,0x00,0x00,0x07,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x17,0x01,0x00,0x00,0x08,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x18,0x01,0x00,0x00, +0x09,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x19,0x01,0x00,0x00,0x0a,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x1a,0x01,0x00,0x00,0x0b,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x1b,0x01,0x00,0x00, +0x0c,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x1c,0x01,0x00,0x00,0x0d,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x1d,0x01,0x00,0x00,0x0e,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x1e,0x01,0x00,0x00, +0x0f,0x00,0x00,0x00,0x36,0x00,0x05,0x00,0x02,0x00,0x00,0x00, 0x04,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x03,0x00,0x00,0x00, 0xf8,0x00,0x02,0x00,0x05,0x00,0x00,0x00,0xf7,0x00,0x03,0x00, -0x3c,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xfb,0x00,0x03,0x00, -0x0c,0x00,0x00,0x00,0x3d,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, -0x3d,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x0d,0x00,0x00,0x00, +0x45,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xfb,0x00,0x03,0x00, +0x0c,0x00,0x00,0x00,0x46,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, +0x46,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x0d,0x00,0x00,0x00, 0x0e,0x00,0x00,0x00,0x0b,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, 0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x0f,0x00,0x00,0x00, -0x0e,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x17,0x00,0x00,0x00, -0x18,0x00,0x00,0x00,0x14,0x00,0x00,0x00,0x16,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x19,0x00,0x00,0x00, -0x18,0x00,0x00,0x00,0xae,0x00,0x05,0x00,0x1a,0x00,0x00,0x00, -0x1b,0x00,0x00,0x00,0x0f,0x00,0x00,0x00,0x19,0x00,0x00,0x00, -0xf7,0x00,0x03,0x00,0x1d,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0xfa,0x00,0x04,0x00,0x1b,0x00,0x00,0x00,0x1c,0x00,0x00,0x00, -0x1d,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0x1c,0x00,0x00,0x00, -0xf9,0x00,0x02,0x00,0x3c,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, -0x1d,0x00,0x00,0x00,0x41,0x00,0x06,0x00,0x29,0x00,0x00,0x00, -0x2a,0x00,0x00,0x00,0x27,0x00,0x00,0x00,0x16,0x00,0x00,0x00, -0x0f,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x11,0x00,0x00,0x00, -0x2b,0x00,0x00,0x00,0x2a,0x00,0x00,0x00,0x41,0x00,0x05,0x00, -0x17,0x00,0x00,0x00,0x32,0x00,0x00,0x00,0x14,0x00,0x00,0x00, -0x31,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x33,0x00,0x00,0x00,0x32,0x00,0x00,0x00,0x89,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x34,0x00,0x00,0x00,0x0f,0x00,0x00,0x00, -0x33,0x00,0x00,0x00,0x41,0x00,0x06,0x00,0x29,0x00,0x00,0x00, -0x35,0x00,0x00,0x00,0x2f,0x00,0x00,0x00,0x16,0x00,0x00,0x00, -0x34,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x11,0x00,0x00,0x00, -0x36,0x00,0x00,0x00,0x35,0x00,0x00,0x00,0x81,0x00,0x05,0x00, -0x11,0x00,0x00,0x00,0x37,0x00,0x00,0x00,0x2b,0x00,0x00,0x00, -0x36,0x00,0x00,0x00,0x41,0x00,0x06,0x00,0x29,0x00,0x00,0x00, -0x38,0x00,0x00,0x00,0x22,0x00,0x00,0x00,0x16,0x00,0x00,0x00, -0x0f,0x00,0x00,0x00,0x3e,0x00,0x03,0x00,0x38,0x00,0x00,0x00, -0x37,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0x3c,0x00,0x00,0x00, -0xf8,0x00,0x02,0x00,0x3c,0x00,0x00,0x00,0xfd,0x00,0x01,0x00, -0x38,0x00,0x01,0x00, +0x0e,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x11,0x00,0x00,0x00,0x0f,0x00,0x00,0x00,0x10,0x00,0x00,0x00, +0x41,0x00,0x05,0x00,0x18,0x00,0x00,0x00,0x19,0x00,0x00,0x00, +0x15,0x00,0x00,0x00,0x17,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x1a,0x00,0x00,0x00,0x19,0x00,0x00,0x00, +0xae,0x00,0x05,0x00,0x1b,0x00,0x00,0x00,0x1c,0x00,0x00,0x00, +0x11,0x00,0x00,0x00,0x1a,0x00,0x00,0x00,0xf7,0x00,0x03,0x00, +0x1e,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, +0x1c,0x00,0x00,0x00,0x1d,0x00,0x00,0x00,0x1e,0x00,0x00,0x00, +0xf8,0x00,0x02,0x00,0x1d,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, +0x45,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0x1e,0x00,0x00,0x00, +0x41,0x00,0x06,0x00,0x39,0x00,0x00,0x00,0x3a,0x00,0x00,0x00, +0x35,0x00,0x00,0x00,0x2d,0x00,0x00,0x00,0x11,0x00,0x00,0x00, +0x3d,0x00,0x04,0x00,0x31,0x00,0x00,0x00,0x3b,0x00,0x00,0x00, +0x3a,0x00,0x00,0x00,0x73,0x00,0x04,0x00,0x28,0x00,0x00,0x00, +0x3c,0x00,0x00,0x00,0x3b,0x00,0x00,0x00,0x41,0x00,0x06,0x00, +0x3d,0x00,0x00,0x00,0x3e,0x00,0x00,0x00,0x2c,0x00,0x00,0x00, +0x2d,0x00,0x00,0x00,0x11,0x00,0x00,0x00,0x3e,0x00,0x03,0x00, +0x3e,0x00,0x00,0x00,0x3c,0x00,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x52,0x00,0x00,0x00,0x11,0x00,0x00,0x00, +0x43,0x00,0x00,0x00,0x41,0x00,0x06,0x00,0x39,0x00,0x00,0x00, +0x54,0x00,0x00,0x00,0x35,0x00,0x00,0x00,0x2d,0x00,0x00,0x00, +0x52,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x31,0x00,0x00,0x00, +0x55,0x00,0x00,0x00,0x54,0x00,0x00,0x00,0x73,0x00,0x04,0x00, +0x28,0x00,0x00,0x00,0x56,0x00,0x00,0x00,0x55,0x00,0x00,0x00, +0x41,0x00,0x06,0x00,0x3d,0x00,0x00,0x00,0x57,0x00,0x00,0x00, +0x2c,0x00,0x00,0x00,0x2d,0x00,0x00,0x00,0x52,0x00,0x00,0x00, +0x3e,0x00,0x03,0x00,0x57,0x00,0x00,0x00,0x56,0x00,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x5f,0x00,0x00,0x00, +0x11,0x00,0x00,0x00,0x11,0x01,0x00,0x00,0x41,0x00,0x06,0x00, +0x39,0x00,0x00,0x00,0x61,0x00,0x00,0x00,0x35,0x00,0x00,0x00, +0x2d,0x00,0x00,0x00,0x5f,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0x31,0x00,0x00,0x00,0x62,0x00,0x00,0x00,0x61,0x00,0x00,0x00, +0x73,0x00,0x04,0x00,0x28,0x00,0x00,0x00,0x63,0x00,0x00,0x00, +0x62,0x00,0x00,0x00,0x41,0x00,0x06,0x00,0x3d,0x00,0x00,0x00, +0x64,0x00,0x00,0x00,0x2c,0x00,0x00,0x00,0x2d,0x00,0x00,0x00, +0x5f,0x00,0x00,0x00,0x3e,0x00,0x03,0x00,0x64,0x00,0x00,0x00, +0x63,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x6c,0x00,0x00,0x00,0x11,0x00,0x00,0x00,0x12,0x01,0x00,0x00, +0x41,0x00,0x06,0x00,0x39,0x00,0x00,0x00,0x6e,0x00,0x00,0x00, +0x35,0x00,0x00,0x00,0x2d,0x00,0x00,0x00,0x6c,0x00,0x00,0x00, +0x3d,0x00,0x04,0x00,0x31,0x00,0x00,0x00,0x6f,0x00,0x00,0x00, +0x6e,0x00,0x00,0x00,0x73,0x00,0x04,0x00,0x28,0x00,0x00,0x00, +0x70,0x00,0x00,0x00,0x6f,0x00,0x00,0x00,0x41,0x00,0x06,0x00, +0x3d,0x00,0x00,0x00,0x71,0x00,0x00,0x00,0x2c,0x00,0x00,0x00, +0x2d,0x00,0x00,0x00,0x6c,0x00,0x00,0x00,0x3e,0x00,0x03,0x00, +0x71,0x00,0x00,0x00,0x70,0x00,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x79,0x00,0x00,0x00,0x11,0x00,0x00,0x00, +0x13,0x01,0x00,0x00,0x41,0x00,0x06,0x00,0x39,0x00,0x00,0x00, +0x7b,0x00,0x00,0x00,0x35,0x00,0x00,0x00,0x2d,0x00,0x00,0x00, +0x79,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x31,0x00,0x00,0x00, +0x7c,0x00,0x00,0x00,0x7b,0x00,0x00,0x00,0x73,0x00,0x04,0x00, +0x28,0x00,0x00,0x00,0x7d,0x00,0x00,0x00,0x7c,0x00,0x00,0x00, +0x41,0x00,0x06,0x00,0x3d,0x00,0x00,0x00,0x7e,0x00,0x00,0x00, +0x2c,0x00,0x00,0x00,0x2d,0x00,0x00,0x00,0x79,0x00,0x00,0x00, +0x3e,0x00,0x03,0x00,0x7e,0x00,0x00,0x00,0x7d,0x00,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x86,0x00,0x00,0x00, +0x11,0x00,0x00,0x00,0x14,0x01,0x00,0x00,0x41,0x00,0x06,0x00, +0x39,0x00,0x00,0x00,0x88,0x00,0x00,0x00,0x35,0x00,0x00,0x00, +0x2d,0x00,0x00,0x00,0x86,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0x31,0x00,0x00,0x00,0x89,0x00,0x00,0x00,0x88,0x00,0x00,0x00, +0x73,0x00,0x04,0x00,0x28,0x00,0x00,0x00,0x8a,0x00,0x00,0x00, +0x89,0x00,0x00,0x00,0x41,0x00,0x06,0x00,0x3d,0x00,0x00,0x00, +0x8b,0x00,0x00,0x00,0x2c,0x00,0x00,0x00,0x2d,0x00,0x00,0x00, +0x86,0x00,0x00,0x00,0x3e,0x00,0x03,0x00,0x8b,0x00,0x00,0x00, +0x8a,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x93,0x00,0x00,0x00,0x11,0x00,0x00,0x00,0x15,0x01,0x00,0x00, +0x41,0x00,0x06,0x00,0x39,0x00,0x00,0x00,0x95,0x00,0x00,0x00, +0x35,0x00,0x00,0x00,0x2d,0x00,0x00,0x00,0x93,0x00,0x00,0x00, +0x3d,0x00,0x04,0x00,0x31,0x00,0x00,0x00,0x96,0x00,0x00,0x00, +0x95,0x00,0x00,0x00,0x73,0x00,0x04,0x00,0x28,0x00,0x00,0x00, +0x97,0x00,0x00,0x00,0x96,0x00,0x00,0x00,0x41,0x00,0x06,0x00, +0x3d,0x00,0x00,0x00,0x98,0x00,0x00,0x00,0x2c,0x00,0x00,0x00, +0x2d,0x00,0x00,0x00,0x93,0x00,0x00,0x00,0x3e,0x00,0x03,0x00, +0x98,0x00,0x00,0x00,0x97,0x00,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xa0,0x00,0x00,0x00,0x11,0x00,0x00,0x00, +0x16,0x01,0x00,0x00,0x41,0x00,0x06,0x00,0x39,0x00,0x00,0x00, +0xa2,0x00,0x00,0x00,0x35,0x00,0x00,0x00,0x2d,0x00,0x00,0x00, +0xa0,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x31,0x00,0x00,0x00, +0xa3,0x00,0x00,0x00,0xa2,0x00,0x00,0x00,0x73,0x00,0x04,0x00, +0x28,0x00,0x00,0x00,0xa4,0x00,0x00,0x00,0xa3,0x00,0x00,0x00, +0x41,0x00,0x06,0x00,0x3d,0x00,0x00,0x00,0xa5,0x00,0x00,0x00, +0x2c,0x00,0x00,0x00,0x2d,0x00,0x00,0x00,0xa0,0x00,0x00,0x00, +0x3e,0x00,0x03,0x00,0xa5,0x00,0x00,0x00,0xa4,0x00,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xad,0x00,0x00,0x00, +0x11,0x00,0x00,0x00,0x17,0x01,0x00,0x00,0x41,0x00,0x06,0x00, +0x39,0x00,0x00,0x00,0xaf,0x00,0x00,0x00,0x35,0x00,0x00,0x00, +0x2d,0x00,0x00,0x00,0xad,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0x31,0x00,0x00,0x00,0xb0,0x00,0x00,0x00,0xaf,0x00,0x00,0x00, +0x73,0x00,0x04,0x00,0x28,0x00,0x00,0x00,0xb1,0x00,0x00,0x00, +0xb0,0x00,0x00,0x00,0x41,0x00,0x06,0x00,0x3d,0x00,0x00,0x00, +0xb2,0x00,0x00,0x00,0x2c,0x00,0x00,0x00,0x2d,0x00,0x00,0x00, +0xad,0x00,0x00,0x00,0x3e,0x00,0x03,0x00,0xb2,0x00,0x00,0x00, +0xb1,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xba,0x00,0x00,0x00,0x11,0x00,0x00,0x00,0x18,0x01,0x00,0x00, +0x41,0x00,0x06,0x00,0x39,0x00,0x00,0x00,0xbc,0x00,0x00,0x00, +0x35,0x00,0x00,0x00,0x2d,0x00,0x00,0x00,0xba,0x00,0x00,0x00, +0x3d,0x00,0x04,0x00,0x31,0x00,0x00,0x00,0xbd,0x00,0x00,0x00, +0xbc,0x00,0x00,0x00,0x73,0x00,0x04,0x00,0x28,0x00,0x00,0x00, +0xbe,0x00,0x00,0x00,0xbd,0x00,0x00,0x00,0x41,0x00,0x06,0x00, +0x3d,0x00,0x00,0x00,0xbf,0x00,0x00,0x00,0x2c,0x00,0x00,0x00, +0x2d,0x00,0x00,0x00,0xba,0x00,0x00,0x00,0x3e,0x00,0x03,0x00, +0xbf,0x00,0x00,0x00,0xbe,0x00,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xc7,0x00,0x00,0x00,0x11,0x00,0x00,0x00, +0x19,0x01,0x00,0x00,0x41,0x00,0x06,0x00,0x39,0x00,0x00,0x00, +0xc9,0x00,0x00,0x00,0x35,0x00,0x00,0x00,0x2d,0x00,0x00,0x00, +0xc7,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x31,0x00,0x00,0x00, +0xca,0x00,0x00,0x00,0xc9,0x00,0x00,0x00,0x73,0x00,0x04,0x00, +0x28,0x00,0x00,0x00,0xcb,0x00,0x00,0x00,0xca,0x00,0x00,0x00, +0x41,0x00,0x06,0x00,0x3d,0x00,0x00,0x00,0xcc,0x00,0x00,0x00, +0x2c,0x00,0x00,0x00,0x2d,0x00,0x00,0x00,0xc7,0x00,0x00,0x00, +0x3e,0x00,0x03,0x00,0xcc,0x00,0x00,0x00,0xcb,0x00,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xd4,0x00,0x00,0x00, +0x11,0x00,0x00,0x00,0x1a,0x01,0x00,0x00,0x41,0x00,0x06,0x00, +0x39,0x00,0x00,0x00,0xd6,0x00,0x00,0x00,0x35,0x00,0x00,0x00, +0x2d,0x00,0x00,0x00,0xd4,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0x31,0x00,0x00,0x00,0xd7,0x00,0x00,0x00,0xd6,0x00,0x00,0x00, +0x73,0x00,0x04,0x00,0x28,0x00,0x00,0x00,0xd8,0x00,0x00,0x00, +0xd7,0x00,0x00,0x00,0x41,0x00,0x06,0x00,0x3d,0x00,0x00,0x00, +0xd9,0x00,0x00,0x00,0x2c,0x00,0x00,0x00,0x2d,0x00,0x00,0x00, +0xd4,0x00,0x00,0x00,0x3e,0x00,0x03,0x00,0xd9,0x00,0x00,0x00, +0xd8,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xe1,0x00,0x00,0x00,0x11,0x00,0x00,0x00,0x1b,0x01,0x00,0x00, +0x41,0x00,0x06,0x00,0x39,0x00,0x00,0x00,0xe3,0x00,0x00,0x00, +0x35,0x00,0x00,0x00,0x2d,0x00,0x00,0x00,0xe1,0x00,0x00,0x00, +0x3d,0x00,0x04,0x00,0x31,0x00,0x00,0x00,0xe4,0x00,0x00,0x00, +0xe3,0x00,0x00,0x00,0x73,0x00,0x04,0x00,0x28,0x00,0x00,0x00, +0xe5,0x00,0x00,0x00,0xe4,0x00,0x00,0x00,0x41,0x00,0x06,0x00, +0x3d,0x00,0x00,0x00,0xe6,0x00,0x00,0x00,0x2c,0x00,0x00,0x00, +0x2d,0x00,0x00,0x00,0xe1,0x00,0x00,0x00,0x3e,0x00,0x03,0x00, +0xe6,0x00,0x00,0x00,0xe5,0x00,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xee,0x00,0x00,0x00,0x11,0x00,0x00,0x00, +0x1c,0x01,0x00,0x00,0x41,0x00,0x06,0x00,0x39,0x00,0x00,0x00, +0xf0,0x00,0x00,0x00,0x35,0x00,0x00,0x00,0x2d,0x00,0x00,0x00, +0xee,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x31,0x00,0x00,0x00, +0xf1,0x00,0x00,0x00,0xf0,0x00,0x00,0x00,0x73,0x00,0x04,0x00, +0x28,0x00,0x00,0x00,0xf2,0x00,0x00,0x00,0xf1,0x00,0x00,0x00, +0x41,0x00,0x06,0x00,0x3d,0x00,0x00,0x00,0xf3,0x00,0x00,0x00, +0x2c,0x00,0x00,0x00,0x2d,0x00,0x00,0x00,0xee,0x00,0x00,0x00, +0x3e,0x00,0x03,0x00,0xf3,0x00,0x00,0x00,0xf2,0x00,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xfb,0x00,0x00,0x00, +0x11,0x00,0x00,0x00,0x1d,0x01,0x00,0x00,0x41,0x00,0x06,0x00, +0x39,0x00,0x00,0x00,0xfd,0x00,0x00,0x00,0x35,0x00,0x00,0x00, +0x2d,0x00,0x00,0x00,0xfb,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0x31,0x00,0x00,0x00,0xfe,0x00,0x00,0x00,0xfd,0x00,0x00,0x00, +0x73,0x00,0x04,0x00,0x28,0x00,0x00,0x00,0xff,0x00,0x00,0x00, +0xfe,0x00,0x00,0x00,0x41,0x00,0x06,0x00,0x3d,0x00,0x00,0x00, +0x00,0x01,0x00,0x00,0x2c,0x00,0x00,0x00,0x2d,0x00,0x00,0x00, +0xfb,0x00,0x00,0x00,0x3e,0x00,0x03,0x00,0x00,0x01,0x00,0x00, +0xff,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x08,0x01,0x00,0x00,0x11,0x00,0x00,0x00,0x1e,0x01,0x00,0x00, +0x41,0x00,0x06,0x00,0x39,0x00,0x00,0x00,0x0a,0x01,0x00,0x00, +0x35,0x00,0x00,0x00,0x2d,0x00,0x00,0x00,0x08,0x01,0x00,0x00, +0x3d,0x00,0x04,0x00,0x31,0x00,0x00,0x00,0x0b,0x01,0x00,0x00, +0x0a,0x01,0x00,0x00,0x73,0x00,0x04,0x00,0x28,0x00,0x00,0x00, +0x0c,0x01,0x00,0x00,0x0b,0x01,0x00,0x00,0x41,0x00,0x06,0x00, +0x3d,0x00,0x00,0x00,0x0d,0x01,0x00,0x00,0x2c,0x00,0x00,0x00, +0x2d,0x00,0x00,0x00,0x08,0x01,0x00,0x00,0x3e,0x00,0x03,0x00, +0x0d,0x01,0x00,0x00,0x0c,0x01,0x00,0x00,0xf9,0x00,0x02,0x00, +0x45,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0x45,0x00,0x00,0x00, +0xfd,0x00,0x01,0x00,0x38,0x00,0x01,0x00, }; -const uint64_t add_f32_len = 1456; +const uint64_t dequant_f32_len = 3200; -unsigned char clamp_f32_data[] = { +unsigned char dequant_q2_K_data[] = { 0x03,0x02,0x23,0x07,0x00,0x05,0x01,0x00,0x0b,0x00,0x0d,0x00, -0x55,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x11,0x00,0x02,0x00, -0x01,0x00,0x00,0x00,0x0b,0x00,0x06,0x00,0x01,0x00,0x00,0x00, +0x10,0x01,0x00,0x00,0x00,0x00,0x00,0x00,0x11,0x00,0x02,0x00, +0x01,0x00,0x00,0x00,0x11,0x00,0x02,0x00,0x27,0x00,0x00,0x00, +0x11,0x00,0x02,0x00,0x51,0x11,0x00,0x00,0x11,0x00,0x02,0x00, +0x60,0x11,0x00,0x00,0x0b,0x00,0x06,0x00,0x01,0x00,0x00,0x00, 0x47,0x4c,0x53,0x4c,0x2e,0x73,0x74,0x64,0x2e,0x34,0x35,0x30, 0x00,0x00,0x00,0x00,0x0e,0x00,0x03,0x00,0x00,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x0f,0x00,0x09,0x00,0x05,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0x0f,0x00,0x0a,0x00,0x05,0x00,0x00,0x00, 0x04,0x00,0x00,0x00,0x6d,0x61,0x69,0x6e,0x00,0x00,0x00,0x00, -0x0b,0x00,0x00,0x00,0x14,0x00,0x00,0x00,0x24,0x00,0x00,0x00, -0x2c,0x00,0x00,0x00,0x10,0x00,0x06,0x00,0x04,0x00,0x00,0x00, -0x11,0x00,0x00,0x00,0x00,0x02,0x00,0x00,0x01,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x0b,0x00,0x00,0x00, -0x0b,0x00,0x00,0x00,0x1c,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0x12,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x12,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x04,0x00,0x00,0x00, -0x48,0x00,0x05,0x00,0x12,0x00,0x00,0x00,0x02,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x08,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0x12,0x00,0x00,0x00,0x03,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x47,0x00,0x03,0x00,0x12,0x00,0x00,0x00, -0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x21,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x48,0x00,0x04,0x00, -0x22,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x18,0x00,0x00,0x00, -0x48,0x00,0x05,0x00,0x22,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0x16,0x00,0x00,0x00,0x20,0x00,0x00,0x00,0x30,0x00,0x00,0x00, +0x5e,0x00,0x00,0x00,0x79,0x00,0x00,0x00,0x10,0x00,0x06,0x00, +0x04,0x00,0x00,0x00,0x11,0x00,0x00,0x00,0x40,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x47,0x00,0x04,0x00, +0x16,0x00,0x00,0x00,0x0b,0x00,0x00,0x00,0x1a,0x00,0x00,0x00, +0x48,0x00,0x05,0x00,0x1e,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x48,0x00,0x05,0x00, +0x1e,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x23,0x00,0x00,0x00, +0x04,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x1e,0x00,0x00,0x00, +0x02,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x08,0x00,0x00,0x00, +0x48,0x00,0x05,0x00,0x1e,0x00,0x00,0x00,0x03,0x00,0x00,0x00, +0x23,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x48,0x00,0x05,0x00, +0x1e,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x23,0x00,0x00,0x00, +0x10,0x00,0x00,0x00,0x47,0x00,0x03,0x00,0x1e,0x00,0x00,0x00, +0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x30,0x00,0x00,0x00, +0x0b,0x00,0x00,0x00,0x1b,0x00,0x00,0x00,0x47,0x00,0x04,0x00, +0x55,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0x47,0x00,0x04,0x00,0x57,0x00,0x00,0x00,0x06,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x5a,0x00,0x00,0x00, +0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0x48,0x00,0x05,0x00,0x5a,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0x23,0x00,0x00,0x00,0x10,0x00,0x00,0x00,0x48,0x00,0x05,0x00, +0x5a,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x23,0x00,0x00,0x00, +0x50,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x5b,0x00,0x00,0x00, +0x06,0x00,0x00,0x00,0x54,0x00,0x00,0x00,0x48,0x00,0x04,0x00, +0x5c,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x18,0x00,0x00,0x00, +0x48,0x00,0x05,0x00,0x5c,0x00,0x00,0x00,0x00,0x00,0x00,0x00, 0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x03,0x00, -0x22,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x24,0x00,0x00,0x00,0x22,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x24,0x00,0x00,0x00,0x21,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x29,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x48,0x00,0x04,0x00, -0x2a,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x19,0x00,0x00,0x00, -0x48,0x00,0x05,0x00,0x2a,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0x5c,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00, +0x5e,0x00,0x00,0x00,0x22,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0x47,0x00,0x04,0x00,0x5e,0x00,0x00,0x00,0x21,0x00,0x00,0x00, +0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x76,0x00,0x00,0x00, +0x06,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x48,0x00,0x04,0x00, +0x77,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x19,0x00,0x00,0x00, +0x48,0x00,0x05,0x00,0x77,0x00,0x00,0x00,0x00,0x00,0x00,0x00, 0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x03,0x00, -0x2a,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x2c,0x00,0x00,0x00,0x22,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x2c,0x00,0x00,0x00,0x21,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x4b,0x00,0x00,0x00, +0x77,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00, +0x79,0x00,0x00,0x00,0x22,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0x47,0x00,0x04,0x00,0x79,0x00,0x00,0x00,0x21,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x00,0x01,0x00,0x00, +0x0b,0x00,0x00,0x00,0x19,0x00,0x00,0x00,0x13,0x00,0x02,0x00, +0x02,0x00,0x00,0x00,0x21,0x00,0x03,0x00,0x03,0x00,0x00,0x00, +0x02,0x00,0x00,0x00,0x15,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x20,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x09,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x10,0x00,0x00,0x00, +0x00,0x01,0x00,0x00,0x14,0x00,0x02,0x00,0x11,0x00,0x00,0x00, +0x17,0x00,0x04,0x00,0x14,0x00,0x00,0x00,0x06,0x00,0x00,0x00, +0x03,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x15,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0x14,0x00,0x00,0x00,0x3b,0x00,0x04,0x00, +0x15,0x00,0x00,0x00,0x16,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0x20,0x00,0x04,0x00,0x17,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0x06,0x00,0x00,0x00,0x1e,0x00,0x07,0x00,0x1e,0x00,0x00,0x00, +0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00, +0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x20,0x00,0x04,0x00, +0x1f,0x00,0x00,0x00,0x09,0x00,0x00,0x00,0x1e,0x00,0x00,0x00, +0x3b,0x00,0x04,0x00,0x1f,0x00,0x00,0x00,0x20,0x00,0x00,0x00, +0x09,0x00,0x00,0x00,0x15,0x00,0x04,0x00,0x21,0x00,0x00,0x00, +0x20,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x21,0x00,0x00,0x00,0x22,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0x20,0x00,0x04,0x00,0x23,0x00,0x00,0x00,0x09,0x00,0x00,0x00, +0x06,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x21,0x00,0x00,0x00, +0x26,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x3b,0x00,0x04,0x00, +0x15,0x00,0x00,0x00,0x30,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x35,0x00,0x00,0x00, +0x20,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x3d,0x00,0x00,0x00,0x08,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x41,0x00,0x00,0x00,0x10,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x47,0x00,0x00,0x00, +0x80,0x00,0x00,0x00,0x15,0x00,0x04,0x00,0x52,0x00,0x00,0x00, +0x08,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x1c,0x00,0x04,0x00, +0x55,0x00,0x00,0x00,0x52,0x00,0x00,0x00,0x41,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x56,0x00,0x00,0x00, +0x40,0x00,0x00,0x00,0x1c,0x00,0x04,0x00,0x57,0x00,0x00,0x00, +0x52,0x00,0x00,0x00,0x56,0x00,0x00,0x00,0x16,0x00,0x03,0x00, +0x58,0x00,0x00,0x00,0x10,0x00,0x00,0x00,0x17,0x00,0x04,0x00, +0x59,0x00,0x00,0x00,0x58,0x00,0x00,0x00,0x02,0x00,0x00,0x00, +0x1e,0x00,0x05,0x00,0x5a,0x00,0x00,0x00,0x55,0x00,0x00,0x00, +0x57,0x00,0x00,0x00,0x59,0x00,0x00,0x00,0x1d,0x00,0x03,0x00, +0x5b,0x00,0x00,0x00,0x5a,0x00,0x00,0x00,0x1e,0x00,0x03,0x00, +0x5c,0x00,0x00,0x00,0x5b,0x00,0x00,0x00,0x20,0x00,0x04,0x00, +0x5d,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x5c,0x00,0x00,0x00, +0x3b,0x00,0x04,0x00,0x5d,0x00,0x00,0x00,0x5e,0x00,0x00,0x00, +0x0c,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x64,0x00,0x00,0x00, +0x0c,0x00,0x00,0x00,0x52,0x00,0x00,0x00,0x16,0x00,0x03,0x00, +0x67,0x00,0x00,0x00,0x20,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x21,0x00,0x00,0x00,0x6b,0x00,0x00,0x00,0x02,0x00,0x00,0x00, +0x20,0x00,0x04,0x00,0x6c,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, +0x58,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x72,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x1d,0x00,0x03,0x00, +0x76,0x00,0x00,0x00,0x58,0x00,0x00,0x00,0x1e,0x00,0x03,0x00, +0x77,0x00,0x00,0x00,0x76,0x00,0x00,0x00,0x20,0x00,0x04,0x00, +0x78,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x77,0x00,0x00,0x00, +0x3b,0x00,0x04,0x00,0x78,0x00,0x00,0x00,0x79,0x00,0x00,0x00, +0x0c,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x21,0x00,0x00,0x00, +0x84,0x00,0x00,0x00,0x0f,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x21,0x00,0x00,0x00,0x8a,0x00,0x00,0x00,0x03,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x21,0x00,0x00,0x00,0x95,0x00,0x00,0x00, +0x04,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0xa1,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0xc1,0x00,0x00,0x00,0x04,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0xdd,0x00,0x00,0x00, +0x60,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0xe2,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x21,0x00,0x00,0x00,0xea,0x00,0x00,0x00,0x06,0x00,0x00,0x00, +0x2c,0x00,0x06,0x00,0x14,0x00,0x00,0x00,0x00,0x01,0x00,0x00, +0x56,0x00,0x00,0x00,0x72,0x00,0x00,0x00,0x72,0x00,0x00,0x00, +0x2a,0x00,0x03,0x00,0x11,0x00,0x00,0x00,0x03,0x01,0x00,0x00, +0x29,0x00,0x03,0x00,0x11,0x00,0x00,0x00,0x06,0x01,0x00,0x00, +0x36,0x00,0x05,0x00,0x02,0x00,0x00,0x00,0x04,0x00,0x00,0x00, +0x00,0x00,0x00,0x00,0x03,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, +0x05,0x00,0x00,0x00,0xf7,0x00,0x03,0x00,0x01,0x01,0x00,0x00, +0x00,0x00,0x00,0x00,0xfb,0x00,0x03,0x00,0x09,0x00,0x00,0x00, +0x02,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0x02,0x01,0x00,0x00, +0xf9,0x00,0x02,0x00,0x0a,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, +0x0a,0x00,0x00,0x00,0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00, +0x09,0x01,0x00,0x00,0x09,0x00,0x00,0x00,0x02,0x01,0x00,0x00, +0xff,0x00,0x00,0x00,0x0d,0x00,0x00,0x00,0xb0,0x00,0x05,0x00, +0x11,0x00,0x00,0x00,0x12,0x00,0x00,0x00,0x09,0x01,0x00,0x00, +0x10,0x00,0x00,0x00,0xf6,0x00,0x04,0x00,0x0c,0x00,0x00,0x00, +0x0d,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, +0x12,0x00,0x00,0x00,0x0b,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, +0xf8,0x00,0x02,0x00,0x0b,0x00,0x00,0x00,0x41,0x00,0x05,0x00, +0x17,0x00,0x00,0x00,0x18,0x00,0x00,0x00,0x16,0x00,0x00,0x00, +0x09,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x19,0x00,0x00,0x00,0x18,0x00,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x1a,0x00,0x00,0x00,0x19,0x00,0x00,0x00, +0x10,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x1c,0x00,0x00,0x00,0x1a,0x00,0x00,0x00,0x09,0x01,0x00,0x00, +0x41,0x00,0x05,0x00,0x23,0x00,0x00,0x00,0x24,0x00,0x00,0x00, +0x20,0x00,0x00,0x00,0x22,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x25,0x00,0x00,0x00,0x24,0x00,0x00,0x00, +0x41,0x00,0x05,0x00,0x23,0x00,0x00,0x00,0x27,0x00,0x00,0x00, +0x20,0x00,0x00,0x00,0x26,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x28,0x00,0x00,0x00,0x27,0x00,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x29,0x00,0x00,0x00, +0x25,0x00,0x00,0x00,0x28,0x00,0x00,0x00,0x86,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x2a,0x00,0x00,0x00,0x29,0x00,0x00,0x00, +0x10,0x00,0x00,0x00,0xae,0x00,0x05,0x00,0x11,0x00,0x00,0x00, +0x2b,0x00,0x00,0x00,0x1c,0x00,0x00,0x00,0x2a,0x00,0x00,0x00, +0xf7,0x00,0x03,0x00,0x2d,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0xfa,0x00,0x04,0x00,0x2b,0x00,0x00,0x00,0x2c,0x00,0x00,0x00, +0x2d,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0x2c,0x00,0x00,0x00, +0xf9,0x00,0x02,0x00,0x0c,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, +0x2d,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x17,0x00,0x00,0x00, +0x31,0x00,0x00,0x00,0x30,0x00,0x00,0x00,0x09,0x00,0x00,0x00, +0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x32,0x00,0x00,0x00, +0x31,0x00,0x00,0x00,0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x36,0x00,0x00,0x00,0x32,0x00,0x00,0x00,0x35,0x00,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x3a,0x00,0x00,0x00, +0x35,0x00,0x00,0x00,0x36,0x00,0x00,0x00,0x82,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x3b,0x00,0x00,0x00,0x32,0x00,0x00,0x00, +0x3a,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x3f,0x00,0x00,0x00,0x3d,0x00,0x00,0x00,0x36,0x00,0x00,0x00, +0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x42,0x00,0x00,0x00, +0x3b,0x00,0x00,0x00,0x41,0x00,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x43,0x00,0x00,0x00,0x3f,0x00,0x00,0x00, +0x42,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x46,0x00,0x00,0x00,0x1c,0x00,0x00,0x00,0x10,0x00,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x49,0x00,0x00,0x00, +0x47,0x00,0x00,0x00,0x36,0x00,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x4a,0x00,0x00,0x00,0x46,0x00,0x00,0x00, +0x49,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x4c,0x00,0x00,0x00,0x4a,0x00,0x00,0x00,0x3b,0x00,0x00,0x00, +0x41,0x00,0x08,0x00,0x64,0x00,0x00,0x00,0x65,0x00,0x00,0x00, +0x5e,0x00,0x00,0x00,0x22,0x00,0x00,0x00,0x1c,0x00,0x00,0x00, +0x26,0x00,0x00,0x00,0x32,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0x52,0x00,0x00,0x00,0x66,0x00,0x00,0x00,0x65,0x00,0x00,0x00, +0x41,0x00,0x08,0x00,0x6c,0x00,0x00,0x00,0x6d,0x00,0x00,0x00, +0x5e,0x00,0x00,0x00,0x22,0x00,0x00,0x00,0x1c,0x00,0x00,0x00, +0x6b,0x00,0x00,0x00,0x09,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0x58,0x00,0x00,0x00,0x6e,0x00,0x00,0x00,0x6d,0x00,0x00,0x00, +0x73,0x00,0x04,0x00,0x67,0x00,0x00,0x00,0x6f,0x00,0x00,0x00, +0x6e,0x00,0x00,0x00,0x41,0x00,0x08,0x00,0x6c,0x00,0x00,0x00, +0x73,0x00,0x00,0x00,0x5e,0x00,0x00,0x00,0x22,0x00,0x00,0x00, +0x1c,0x00,0x00,0x00,0x6b,0x00,0x00,0x00,0x72,0x00,0x00,0x00, +0x3d,0x00,0x04,0x00,0x58,0x00,0x00,0x00,0x74,0x00,0x00,0x00, +0x73,0x00,0x00,0x00,0x73,0x00,0x04,0x00,0x67,0x00,0x00,0x00, +0x75,0x00,0x00,0x00,0x74,0x00,0x00,0x00,0x41,0x00,0x08,0x00, +0x64,0x00,0x00,0x00,0x80,0x00,0x00,0x00,0x5e,0x00,0x00,0x00, +0x22,0x00,0x00,0x00,0x1c,0x00,0x00,0x00,0x22,0x00,0x00,0x00, +0x43,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x52,0x00,0x00,0x00, +0x81,0x00,0x00,0x00,0x80,0x00,0x00,0x00,0x71,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x82,0x00,0x00,0x00,0x81,0x00,0x00,0x00, +0x7c,0x00,0x04,0x00,0x21,0x00,0x00,0x00,0x83,0x00,0x00,0x00, +0x82,0x00,0x00,0x00,0xc7,0x00,0x05,0x00,0x21,0x00,0x00,0x00, +0x85,0x00,0x00,0x00,0x83,0x00,0x00,0x00,0x84,0x00,0x00,0x00, +0xc2,0x00,0x05,0x00,0x52,0x00,0x00,0x00,0x87,0x00,0x00,0x00, +0x66,0x00,0x00,0x00,0x22,0x00,0x00,0x00,0x71,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x88,0x00,0x00,0x00,0x87,0x00,0x00,0x00, +0x7c,0x00,0x04,0x00,0x21,0x00,0x00,0x00,0x89,0x00,0x00,0x00, +0x88,0x00,0x00,0x00,0xc7,0x00,0x05,0x00,0x21,0x00,0x00,0x00, +0x8b,0x00,0x00,0x00,0x89,0x00,0x00,0x00,0x8a,0x00,0x00,0x00, +0x84,0x00,0x05,0x00,0x21,0x00,0x00,0x00,0x8c,0x00,0x00,0x00, +0x85,0x00,0x00,0x00,0x8b,0x00,0x00,0x00,0x6f,0x00,0x04,0x00, +0x67,0x00,0x00,0x00,0x8d,0x00,0x00,0x00,0x8c,0x00,0x00,0x00, +0x3d,0x00,0x04,0x00,0x52,0x00,0x00,0x00,0x94,0x00,0x00,0x00, +0x80,0x00,0x00,0x00,0xc2,0x00,0x05,0x00,0x52,0x00,0x00,0x00, +0x96,0x00,0x00,0x00,0x94,0x00,0x00,0x00,0x95,0x00,0x00,0x00, +0x70,0x00,0x04,0x00,0x67,0x00,0x00,0x00,0x97,0x00,0x00,0x00, +0x96,0x00,0x00,0x00,0x85,0x00,0x05,0x00,0x67,0x00,0x00,0x00, +0x98,0x00,0x00,0x00,0x75,0x00,0x00,0x00,0x97,0x00,0x00,0x00, +0x7f,0x00,0x04,0x00,0x67,0x00,0x00,0x00,0x0c,0x01,0x00,0x00, +0x98,0x00,0x00,0x00,0x0c,0x00,0x08,0x00,0x67,0x00,0x00,0x00, +0x99,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x32,0x00,0x00,0x00, +0x6f,0x00,0x00,0x00,0x8d,0x00,0x00,0x00,0x0c,0x01,0x00,0x00, +0x73,0x00,0x04,0x00,0x58,0x00,0x00,0x00,0x9a,0x00,0x00,0x00, +0x99,0x00,0x00,0x00,0x41,0x00,0x06,0x00,0x6c,0x00,0x00,0x00, +0x9b,0x00,0x00,0x00,0x79,0x00,0x00,0x00,0x22,0x00,0x00,0x00, +0x4c,0x00,0x00,0x00,0x3e,0x00,0x03,0x00,0x9b,0x00,0x00,0x00, +0x9a,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x9d,0x00,0x00,0x00,0x4c,0x00,0x00,0x00,0x35,0x00,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xa2,0x00,0x00,0x00, +0x43,0x00,0x00,0x00,0xa1,0x00,0x00,0x00,0x41,0x00,0x08,0x00, +0x64,0x00,0x00,0x00,0xa3,0x00,0x00,0x00,0x5e,0x00,0x00,0x00, +0x22,0x00,0x00,0x00,0x1c,0x00,0x00,0x00,0x22,0x00,0x00,0x00, +0xa2,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x52,0x00,0x00,0x00, +0xa4,0x00,0x00,0x00,0xa3,0x00,0x00,0x00,0x71,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0xa5,0x00,0x00,0x00,0xa4,0x00,0x00,0x00, +0x7c,0x00,0x04,0x00,0x21,0x00,0x00,0x00,0xa6,0x00,0x00,0x00, +0xa5,0x00,0x00,0x00,0xc7,0x00,0x05,0x00,0x21,0x00,0x00,0x00, +0xa7,0x00,0x00,0x00,0xa6,0x00,0x00,0x00,0x84,0x00,0x00,0x00, +0xc2,0x00,0x05,0x00,0x52,0x00,0x00,0x00,0xa9,0x00,0x00,0x00, +0x66,0x00,0x00,0x00,0x6b,0x00,0x00,0x00,0x71,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0xaa,0x00,0x00,0x00,0xa9,0x00,0x00,0x00, +0x7c,0x00,0x04,0x00,0x21,0x00,0x00,0x00,0xab,0x00,0x00,0x00, +0xaa,0x00,0x00,0x00,0xc7,0x00,0x05,0x00,0x21,0x00,0x00,0x00, +0xac,0x00,0x00,0x00,0xab,0x00,0x00,0x00,0x8a,0x00,0x00,0x00, +0x84,0x00,0x05,0x00,0x21,0x00,0x00,0x00,0xad,0x00,0x00,0x00, +0xa7,0x00,0x00,0x00,0xac,0x00,0x00,0x00,0x6f,0x00,0x04,0x00, +0x67,0x00,0x00,0x00,0xae,0x00,0x00,0x00,0xad,0x00,0x00,0x00, +0x3d,0x00,0x04,0x00,0x52,0x00,0x00,0x00,0xb5,0x00,0x00,0x00, +0xa3,0x00,0x00,0x00,0xc2,0x00,0x05,0x00,0x52,0x00,0x00,0x00, +0xb6,0x00,0x00,0x00,0xb5,0x00,0x00,0x00,0x95,0x00,0x00,0x00, +0x70,0x00,0x04,0x00,0x67,0x00,0x00,0x00,0xb7,0x00,0x00,0x00, +0xb6,0x00,0x00,0x00,0x85,0x00,0x05,0x00,0x67,0x00,0x00,0x00, +0xb8,0x00,0x00,0x00,0x75,0x00,0x00,0x00,0xb7,0x00,0x00,0x00, +0x7f,0x00,0x04,0x00,0x67,0x00,0x00,0x00,0x0d,0x01,0x00,0x00, +0xb8,0x00,0x00,0x00,0x0c,0x00,0x08,0x00,0x67,0x00,0x00,0x00, +0xb9,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x32,0x00,0x00,0x00, +0x6f,0x00,0x00,0x00,0xae,0x00,0x00,0x00,0x0d,0x01,0x00,0x00, +0x73,0x00,0x04,0x00,0x58,0x00,0x00,0x00,0xba,0x00,0x00,0x00, +0xb9,0x00,0x00,0x00,0x41,0x00,0x06,0x00,0x6c,0x00,0x00,0x00, +0xbb,0x00,0x00,0x00,0x79,0x00,0x00,0x00,0x22,0x00,0x00,0x00, +0x9d,0x00,0x00,0x00,0x3e,0x00,0x03,0x00,0xbb,0x00,0x00,0x00, +0xba,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xbd,0x00,0x00,0x00,0x4c,0x00,0x00,0x00,0x56,0x00,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xc2,0x00,0x00,0x00, +0x43,0x00,0x00,0x00,0xc1,0x00,0x00,0x00,0x41,0x00,0x08,0x00, +0x64,0x00,0x00,0x00,0xc3,0x00,0x00,0x00,0x5e,0x00,0x00,0x00, +0x22,0x00,0x00,0x00,0x1c,0x00,0x00,0x00,0x22,0x00,0x00,0x00, +0xc2,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x52,0x00,0x00,0x00, +0xc4,0x00,0x00,0x00,0xc3,0x00,0x00,0x00,0x71,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0xc5,0x00,0x00,0x00,0xc4,0x00,0x00,0x00, +0x7c,0x00,0x04,0x00,0x21,0x00,0x00,0x00,0xc6,0x00,0x00,0x00, +0xc5,0x00,0x00,0x00,0xc7,0x00,0x05,0x00,0x21,0x00,0x00,0x00, +0xc7,0x00,0x00,0x00,0xc6,0x00,0x00,0x00,0x84,0x00,0x00,0x00, +0xc2,0x00,0x05,0x00,0x52,0x00,0x00,0x00,0xc9,0x00,0x00,0x00, +0x66,0x00,0x00,0x00,0x95,0x00,0x00,0x00,0x71,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0xca,0x00,0x00,0x00,0xc9,0x00,0x00,0x00, +0x7c,0x00,0x04,0x00,0x21,0x00,0x00,0x00,0xcb,0x00,0x00,0x00, +0xca,0x00,0x00,0x00,0xc7,0x00,0x05,0x00,0x21,0x00,0x00,0x00, +0xcc,0x00,0x00,0x00,0xcb,0x00,0x00,0x00,0x8a,0x00,0x00,0x00, +0x84,0x00,0x05,0x00,0x21,0x00,0x00,0x00,0xcd,0x00,0x00,0x00, +0xc7,0x00,0x00,0x00,0xcc,0x00,0x00,0x00,0x6f,0x00,0x04,0x00, +0x67,0x00,0x00,0x00,0xce,0x00,0x00,0x00,0xcd,0x00,0x00,0x00, +0x3d,0x00,0x04,0x00,0x52,0x00,0x00,0x00,0xd5,0x00,0x00,0x00, +0xc3,0x00,0x00,0x00,0xc2,0x00,0x05,0x00,0x52,0x00,0x00,0x00, +0xd6,0x00,0x00,0x00,0xd5,0x00,0x00,0x00,0x95,0x00,0x00,0x00, +0x70,0x00,0x04,0x00,0x67,0x00,0x00,0x00,0xd7,0x00,0x00,0x00, +0xd6,0x00,0x00,0x00,0x85,0x00,0x05,0x00,0x67,0x00,0x00,0x00, +0xd8,0x00,0x00,0x00,0x75,0x00,0x00,0x00,0xd7,0x00,0x00,0x00, +0x7f,0x00,0x04,0x00,0x67,0x00,0x00,0x00,0x0e,0x01,0x00,0x00, +0xd8,0x00,0x00,0x00,0x0c,0x00,0x08,0x00,0x67,0x00,0x00,0x00, +0xd9,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x32,0x00,0x00,0x00, +0x6f,0x00,0x00,0x00,0xce,0x00,0x00,0x00,0x0e,0x01,0x00,0x00, +0x73,0x00,0x04,0x00,0x58,0x00,0x00,0x00,0xda,0x00,0x00,0x00, +0xd9,0x00,0x00,0x00,0x41,0x00,0x06,0x00,0x6c,0x00,0x00,0x00, +0xdb,0x00,0x00,0x00,0x79,0x00,0x00,0x00,0x22,0x00,0x00,0x00, +0xbd,0x00,0x00,0x00,0x3e,0x00,0x03,0x00,0xdb,0x00,0x00,0x00, +0xda,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xde,0x00,0x00,0x00,0x4c,0x00,0x00,0x00,0xdd,0x00,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xe3,0x00,0x00,0x00, +0x43,0x00,0x00,0x00,0xe2,0x00,0x00,0x00,0x41,0x00,0x08,0x00, +0x64,0x00,0x00,0x00,0xe4,0x00,0x00,0x00,0x5e,0x00,0x00,0x00, +0x22,0x00,0x00,0x00,0x1c,0x00,0x00,0x00,0x22,0x00,0x00,0x00, +0xe3,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x52,0x00,0x00,0x00, +0xe5,0x00,0x00,0x00,0xe4,0x00,0x00,0x00,0x71,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0xe6,0x00,0x00,0x00,0xe5,0x00,0x00,0x00, +0x7c,0x00,0x04,0x00,0x21,0x00,0x00,0x00,0xe7,0x00,0x00,0x00, +0xe6,0x00,0x00,0x00,0xc7,0x00,0x05,0x00,0x21,0x00,0x00,0x00, +0xe8,0x00,0x00,0x00,0xe7,0x00,0x00,0x00,0x84,0x00,0x00,0x00, +0xc2,0x00,0x05,0x00,0x52,0x00,0x00,0x00,0xeb,0x00,0x00,0x00, +0x66,0x00,0x00,0x00,0xea,0x00,0x00,0x00,0x71,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0xec,0x00,0x00,0x00,0xeb,0x00,0x00,0x00, +0x7c,0x00,0x04,0x00,0x21,0x00,0x00,0x00,0xed,0x00,0x00,0x00, +0xec,0x00,0x00,0x00,0xc7,0x00,0x05,0x00,0x21,0x00,0x00,0x00, +0xee,0x00,0x00,0x00,0xed,0x00,0x00,0x00,0x8a,0x00,0x00,0x00, +0x84,0x00,0x05,0x00,0x21,0x00,0x00,0x00,0xef,0x00,0x00,0x00, +0xe8,0x00,0x00,0x00,0xee,0x00,0x00,0x00,0x6f,0x00,0x04,0x00, +0x67,0x00,0x00,0x00,0xf0,0x00,0x00,0x00,0xef,0x00,0x00,0x00, +0x3d,0x00,0x04,0x00,0x52,0x00,0x00,0x00,0xf7,0x00,0x00,0x00, +0xe4,0x00,0x00,0x00,0xc2,0x00,0x05,0x00,0x52,0x00,0x00,0x00, +0xf8,0x00,0x00,0x00,0xf7,0x00,0x00,0x00,0x95,0x00,0x00,0x00, +0x70,0x00,0x04,0x00,0x67,0x00,0x00,0x00,0xf9,0x00,0x00,0x00, +0xf8,0x00,0x00,0x00,0x85,0x00,0x05,0x00,0x67,0x00,0x00,0x00, +0xfa,0x00,0x00,0x00,0x75,0x00,0x00,0x00,0xf9,0x00,0x00,0x00, +0x7f,0x00,0x04,0x00,0x67,0x00,0x00,0x00,0x0f,0x01,0x00,0x00, +0xfa,0x00,0x00,0x00,0x0c,0x00,0x08,0x00,0x67,0x00,0x00,0x00, +0xfb,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x32,0x00,0x00,0x00, +0x6f,0x00,0x00,0x00,0xf0,0x00,0x00,0x00,0x0f,0x01,0x00,0x00, +0x73,0x00,0x04,0x00,0x58,0x00,0x00,0x00,0xfc,0x00,0x00,0x00, +0xfb,0x00,0x00,0x00,0x41,0x00,0x06,0x00,0x6c,0x00,0x00,0x00, +0xfd,0x00,0x00,0x00,0x79,0x00,0x00,0x00,0x22,0x00,0x00,0x00, +0xde,0x00,0x00,0x00,0x3e,0x00,0x03,0x00,0xfd,0x00,0x00,0x00, +0xfc,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0x0d,0x00,0x00,0x00, +0xf8,0x00,0x02,0x00,0x0d,0x00,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xff,0x00,0x00,0x00,0x09,0x01,0x00,0x00, +0x26,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0x0a,0x00,0x00,0x00, +0xf8,0x00,0x02,0x00,0x0c,0x00,0x00,0x00,0xf5,0x00,0x07,0x00, +0x11,0x00,0x00,0x00,0x0a,0x01,0x00,0x00,0x03,0x01,0x00,0x00, +0x0a,0x00,0x00,0x00,0x06,0x01,0x00,0x00,0x2c,0x00,0x00,0x00, +0xf7,0x00,0x03,0x00,0x07,0x01,0x00,0x00,0x00,0x00,0x00,0x00, +0xfa,0x00,0x04,0x00,0x0a,0x01,0x00,0x00,0x01,0x01,0x00,0x00, +0x07,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0x07,0x01,0x00,0x00, +0xf9,0x00,0x02,0x00,0x01,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, +0x01,0x01,0x00,0x00,0xfd,0x00,0x01,0x00,0x38,0x00,0x01,0x00, + +}; +const uint64_t dequant_q2_K_len = 4032; + +unsigned char dequant_q3_K_data[] = { +0x03,0x02,0x23,0x07,0x00,0x05,0x01,0x00,0x0b,0x00,0x0d,0x00, +0x3f,0x01,0x00,0x00,0x00,0x00,0x00,0x00,0x11,0x00,0x02,0x00, +0x01,0x00,0x00,0x00,0x11,0x00,0x02,0x00,0x27,0x00,0x00,0x00, +0x11,0x00,0x02,0x00,0x51,0x11,0x00,0x00,0x11,0x00,0x02,0x00, +0x60,0x11,0x00,0x00,0x0b,0x00,0x06,0x00,0x01,0x00,0x00,0x00, +0x47,0x4c,0x53,0x4c,0x2e,0x73,0x74,0x64,0x2e,0x34,0x35,0x30, +0x00,0x00,0x00,0x00,0x0e,0x00,0x03,0x00,0x00,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0x0f,0x00,0x0a,0x00,0x05,0x00,0x00,0x00, +0x04,0x00,0x00,0x00,0x6d,0x61,0x69,0x6e,0x00,0x00,0x00,0x00, +0x16,0x00,0x00,0x00,0x20,0x00,0x00,0x00,0x30,0x00,0x00,0x00, +0x77,0x00,0x00,0x00,0x06,0x01,0x00,0x00,0x10,0x00,0x06,0x00, +0x04,0x00,0x00,0x00,0x11,0x00,0x00,0x00,0x40,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x47,0x00,0x04,0x00, +0x16,0x00,0x00,0x00,0x0b,0x00,0x00,0x00,0x1a,0x00,0x00,0x00, +0x48,0x00,0x05,0x00,0x1e,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x48,0x00,0x05,0x00, +0x1e,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x23,0x00,0x00,0x00, +0x04,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x1e,0x00,0x00,0x00, +0x02,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x08,0x00,0x00,0x00, +0x48,0x00,0x05,0x00,0x1e,0x00,0x00,0x00,0x03,0x00,0x00,0x00, +0x23,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x48,0x00,0x05,0x00, +0x1e,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x23,0x00,0x00,0x00, +0x10,0x00,0x00,0x00,0x47,0x00,0x03,0x00,0x1e,0x00,0x00,0x00, +0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x30,0x00,0x00,0x00, +0x0b,0x00,0x00,0x00,0x1b,0x00,0x00,0x00,0x47,0x00,0x04,0x00, +0x6d,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0x47,0x00,0x04,0x00,0x6f,0x00,0x00,0x00,0x06,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x71,0x00,0x00,0x00, +0x06,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x48,0x00,0x05,0x00, +0x73,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00, +0x00,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x73,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x20,0x00,0x00,0x00, +0x48,0x00,0x05,0x00,0x73,0x00,0x00,0x00,0x02,0x00,0x00,0x00, +0x23,0x00,0x00,0x00,0x60,0x00,0x00,0x00,0x48,0x00,0x05,0x00, +0x73,0x00,0x00,0x00,0x03,0x00,0x00,0x00,0x23,0x00,0x00,0x00, +0x6c,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x74,0x00,0x00,0x00, +0x06,0x00,0x00,0x00,0x6e,0x00,0x00,0x00,0x48,0x00,0x04,0x00, +0x75,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x18,0x00,0x00,0x00, +0x48,0x00,0x05,0x00,0x75,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x03,0x00, +0x75,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00, +0x77,0x00,0x00,0x00,0x22,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0x47,0x00,0x04,0x00,0x77,0x00,0x00,0x00,0x21,0x00,0x00,0x00, +0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x03,0x01,0x00,0x00, +0x06,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x48,0x00,0x04,0x00, +0x04,0x01,0x00,0x00,0x00,0x00,0x00,0x00,0x19,0x00,0x00,0x00, +0x48,0x00,0x05,0x00,0x04,0x01,0x00,0x00,0x00,0x00,0x00,0x00, +0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x03,0x00, +0x04,0x01,0x00,0x00,0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00, +0x06,0x01,0x00,0x00,0x22,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0x47,0x00,0x04,0x00,0x06,0x01,0x00,0x00,0x21,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x2c,0x01,0x00,0x00, +0x0b,0x00,0x00,0x00,0x19,0x00,0x00,0x00,0x13,0x00,0x02,0x00, +0x02,0x00,0x00,0x00,0x21,0x00,0x03,0x00,0x03,0x00,0x00,0x00, +0x02,0x00,0x00,0x00,0x15,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x20,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x09,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x10,0x00,0x00,0x00, +0x00,0x01,0x00,0x00,0x14,0x00,0x02,0x00,0x11,0x00,0x00,0x00, +0x17,0x00,0x04,0x00,0x14,0x00,0x00,0x00,0x06,0x00,0x00,0x00, +0x03,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x15,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0x14,0x00,0x00,0x00,0x3b,0x00,0x04,0x00, +0x15,0x00,0x00,0x00,0x16,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0x20,0x00,0x04,0x00,0x17,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0x06,0x00,0x00,0x00,0x1e,0x00,0x07,0x00,0x1e,0x00,0x00,0x00, +0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00, +0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x20,0x00,0x04,0x00, +0x1f,0x00,0x00,0x00,0x09,0x00,0x00,0x00,0x1e,0x00,0x00,0x00, +0x3b,0x00,0x04,0x00,0x1f,0x00,0x00,0x00,0x20,0x00,0x00,0x00, +0x09,0x00,0x00,0x00,0x15,0x00,0x04,0x00,0x21,0x00,0x00,0x00, +0x20,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x21,0x00,0x00,0x00,0x22,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0x20,0x00,0x04,0x00,0x23,0x00,0x00,0x00,0x09,0x00,0x00,0x00, +0x06,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x21,0x00,0x00,0x00, +0x26,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x3b,0x00,0x04,0x00, +0x15,0x00,0x00,0x00,0x30,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x33,0x00,0x00,0x00, +0x04,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x37,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x3d,0x00,0x00,0x00,0x10,0x00,0x00,0x00, +0x15,0x00,0x04,0x00,0x4d,0x00,0x00,0x00,0x08,0x00,0x00,0x00, +0x00,0x00,0x00,0x00,0x15,0x00,0x04,0x00,0x55,0x00,0x00,0x00, +0x08,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x59,0x00,0x00,0x00,0x08,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x6c,0x00,0x00,0x00, +0x20,0x00,0x00,0x00,0x1c,0x00,0x04,0x00,0x6d,0x00,0x00,0x00, +0x4d,0x00,0x00,0x00,0x6c,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x6e,0x00,0x00,0x00,0x40,0x00,0x00,0x00, +0x1c,0x00,0x04,0x00,0x6f,0x00,0x00,0x00,0x4d,0x00,0x00,0x00, +0x6e,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x70,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x1c,0x00,0x04,0x00, +0x71,0x00,0x00,0x00,0x4d,0x00,0x00,0x00,0x70,0x00,0x00,0x00, +0x16,0x00,0x03,0x00,0x72,0x00,0x00,0x00,0x10,0x00,0x00,0x00, +0x1e,0x00,0x06,0x00,0x73,0x00,0x00,0x00,0x6d,0x00,0x00,0x00, +0x6f,0x00,0x00,0x00,0x71,0x00,0x00,0x00,0x72,0x00,0x00,0x00, +0x1d,0x00,0x03,0x00,0x74,0x00,0x00,0x00,0x73,0x00,0x00,0x00, +0x1e,0x00,0x03,0x00,0x75,0x00,0x00,0x00,0x74,0x00,0x00,0x00, +0x20,0x00,0x04,0x00,0x76,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, +0x75,0x00,0x00,0x00,0x3b,0x00,0x04,0x00,0x76,0x00,0x00,0x00, +0x77,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x21,0x00,0x00,0x00,0x79,0x00,0x00,0x00,0x02,0x00,0x00,0x00, +0x20,0x00,0x04,0x00,0x7c,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, +0x4d,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x21,0x00,0x00,0x00, +0x81,0x00,0x00,0x00,0x0f,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x21,0x00,0x00,0x00,0x8b,0x00,0x00,0x00,0x03,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x21,0x00,0x00,0x00,0x8d,0x00,0x00,0x00, +0x04,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x21,0x00,0x00,0x00, +0xd0,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x16,0x00,0x03,0x00, +0xdb,0x00,0x00,0x00,0x20,0x00,0x00,0x00,0x20,0x00,0x04,0x00, +0xdf,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x72,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x21,0x00,0x00,0x00,0xe7,0x00,0x00,0x00, +0x20,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0xee,0x00,0x00,0x00,0x80,0x00,0x00,0x00,0x1d,0x00,0x03,0x00, +0x03,0x01,0x00,0x00,0x72,0x00,0x00,0x00,0x1e,0x00,0x03,0x00, +0x04,0x01,0x00,0x00,0x03,0x01,0x00,0x00,0x20,0x00,0x04,0x00, +0x05,0x01,0x00,0x00,0x0c,0x00,0x00,0x00,0x04,0x01,0x00,0x00, +0x3b,0x00,0x04,0x00,0x05,0x01,0x00,0x00,0x06,0x01,0x00,0x00, +0x0c,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x2b,0x01,0x00,0x00,0x01,0x00,0x00,0x00,0x2c,0x00,0x06,0x00, +0x14,0x00,0x00,0x00,0x2c,0x01,0x00,0x00,0x6e,0x00,0x00,0x00, +0x2b,0x01,0x00,0x00,0x2b,0x01,0x00,0x00,0x2a,0x00,0x03,0x00, +0x11,0x00,0x00,0x00,0x2f,0x01,0x00,0x00,0x29,0x00,0x03,0x00, +0x11,0x00,0x00,0x00,0x32,0x01,0x00,0x00,0x36,0x00,0x05,0x00, +0x02,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0x03,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0x05,0x00,0x00,0x00, +0xf7,0x00,0x03,0x00,0x2d,0x01,0x00,0x00,0x00,0x00,0x00,0x00, +0xfb,0x00,0x03,0x00,0x09,0x00,0x00,0x00,0x2e,0x01,0x00,0x00, +0xf8,0x00,0x02,0x00,0x2e,0x01,0x00,0x00,0xf9,0x00,0x02,0x00, +0x0a,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0x0a,0x00,0x00,0x00, +0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0x35,0x01,0x00,0x00, +0x09,0x00,0x00,0x00,0x2e,0x01,0x00,0x00,0x2a,0x01,0x00,0x00, +0x0d,0x00,0x00,0x00,0xb0,0x00,0x05,0x00,0x11,0x00,0x00,0x00, +0x12,0x00,0x00,0x00,0x35,0x01,0x00,0x00,0x10,0x00,0x00,0x00, +0xf6,0x00,0x04,0x00,0x0c,0x00,0x00,0x00,0x0d,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0x12,0x00,0x00,0x00, +0x0b,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, +0x0b,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x17,0x00,0x00,0x00, +0x18,0x00,0x00,0x00,0x16,0x00,0x00,0x00,0x09,0x00,0x00,0x00, +0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x19,0x00,0x00,0x00, +0x18,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x1a,0x00,0x00,0x00,0x19,0x00,0x00,0x00,0x10,0x00,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x1c,0x00,0x00,0x00, +0x1a,0x00,0x00,0x00,0x35,0x01,0x00,0x00,0x41,0x00,0x05,0x00, +0x23,0x00,0x00,0x00,0x24,0x00,0x00,0x00,0x20,0x00,0x00,0x00, +0x22,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x25,0x00,0x00,0x00,0x24,0x00,0x00,0x00,0x41,0x00,0x05,0x00, +0x23,0x00,0x00,0x00,0x27,0x00,0x00,0x00,0x20,0x00,0x00,0x00, +0x26,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x28,0x00,0x00,0x00,0x27,0x00,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x29,0x00,0x00,0x00,0x25,0x00,0x00,0x00, +0x28,0x00,0x00,0x00,0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x2a,0x00,0x00,0x00,0x29,0x00,0x00,0x00,0x10,0x00,0x00,0x00, +0xae,0x00,0x05,0x00,0x11,0x00,0x00,0x00,0x2b,0x00,0x00,0x00, +0x1c,0x00,0x00,0x00,0x2a,0x00,0x00,0x00,0xf7,0x00,0x03,0x00, +0x2d,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, +0x2b,0x00,0x00,0x00,0x2c,0x00,0x00,0x00,0x2d,0x00,0x00,0x00, +0xf8,0x00,0x02,0x00,0x2c,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, +0x0c,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0x2d,0x00,0x00,0x00, +0x41,0x00,0x05,0x00,0x17,0x00,0x00,0x00,0x31,0x00,0x00,0x00, +0x30,0x00,0x00,0x00,0x09,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x32,0x00,0x00,0x00,0x31,0x00,0x00,0x00, +0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x34,0x00,0x00,0x00, +0x32,0x00,0x00,0x00,0x33,0x00,0x00,0x00,0x86,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x38,0x00,0x00,0x00,0x34,0x00,0x00,0x00, +0x37,0x00,0x00,0x00,0x89,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x3b,0x00,0x00,0x00,0x34,0x00,0x00,0x00,0x37,0x00,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x3f,0x00,0x00,0x00, +0x3d,0x00,0x00,0x00,0x3b,0x00,0x00,0x00,0x89,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x42,0x00,0x00,0x00,0x32,0x00,0x00,0x00, +0x33,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x43,0x00,0x00,0x00,0x33,0x00,0x00,0x00,0x42,0x00,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x44,0x00,0x00,0x00, +0x3f,0x00,0x00,0x00,0x43,0x00,0x00,0x00,0x86,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x47,0x00,0x00,0x00,0x38,0x00,0x00,0x00, +0x33,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x4b,0x00,0x00,0x00,0x33,0x00,0x00,0x00,0x47,0x00,0x00,0x00, +0x82,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x4c,0x00,0x00,0x00, +0x38,0x00,0x00,0x00,0x4b,0x00,0x00,0x00,0xc4,0x00,0x05,0x00, +0x21,0x00,0x00,0x00,0x54,0x00,0x00,0x00,0x26,0x00,0x00,0x00, +0x38,0x00,0x00,0x00,0x72,0x00,0x04,0x00,0x55,0x00,0x00,0x00, +0x56,0x00,0x00,0x00,0x54,0x00,0x00,0x00,0x7c,0x00,0x04,0x00, +0x4d,0x00,0x00,0x00,0x57,0x00,0x00,0x00,0x56,0x00,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x5b,0x00,0x00,0x00, +0x59,0x00,0x00,0x00,0x47,0x00,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x5d,0x00,0x00,0x00,0x37,0x00,0x00,0x00, +0x4c,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x5e,0x00,0x00,0x00,0x5b,0x00,0x00,0x00,0x5d,0x00,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x60,0x00,0x00,0x00, +0x5e,0x00,0x00,0x00,0x3b,0x00,0x00,0x00,0xb0,0x00,0x05,0x00, +0x11,0x00,0x00,0x00,0x67,0x00,0x00,0x00,0x60,0x00,0x00,0x00, +0x33,0x00,0x00,0x00,0xf7,0x00,0x03,0x00,0x6b,0x00,0x00,0x00, +0x00,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0x67,0x00,0x00,0x00, +0x6a,0x00,0x00,0x00,0x90,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, +0x6a,0x00,0x00,0x00,0x82,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x7b,0x00,0x00,0x00,0x60,0x00,0x00,0x00,0x09,0x00,0x00,0x00, +0x41,0x00,0x08,0x00,0x7c,0x00,0x00,0x00,0x7d,0x00,0x00,0x00, +0x77,0x00,0x00,0x00,0x22,0x00,0x00,0x00,0x1c,0x00,0x00,0x00, +0x79,0x00,0x00,0x00,0x7b,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0x4d,0x00,0x00,0x00,0x7e,0x00,0x00,0x00,0x7d,0x00,0x00,0x00, +0x71,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x7f,0x00,0x00,0x00, +0x7e,0x00,0x00,0x00,0x7c,0x00,0x04,0x00,0x21,0x00,0x00,0x00, +0x80,0x00,0x00,0x00,0x7f,0x00,0x00,0x00,0xc7,0x00,0x05,0x00, +0x21,0x00,0x00,0x00,0x82,0x00,0x00,0x00,0x80,0x00,0x00,0x00, +0x81,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x85,0x00,0x00,0x00,0x60,0x00,0x00,0x00,0x59,0x00,0x00,0x00, +0x41,0x00,0x08,0x00,0x7c,0x00,0x00,0x00,0x86,0x00,0x00,0x00, +0x77,0x00,0x00,0x00,0x22,0x00,0x00,0x00,0x1c,0x00,0x00,0x00, +0x79,0x00,0x00,0x00,0x85,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0x4d,0x00,0x00,0x00,0x87,0x00,0x00,0x00,0x86,0x00,0x00,0x00, +0xc2,0x00,0x05,0x00,0x4d,0x00,0x00,0x00,0x88,0x00,0x00,0x00, +0x87,0x00,0x00,0x00,0x22,0x00,0x00,0x00,0x71,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x89,0x00,0x00,0x00,0x88,0x00,0x00,0x00, +0x7c,0x00,0x04,0x00,0x21,0x00,0x00,0x00,0x8a,0x00,0x00,0x00, +0x89,0x00,0x00,0x00,0xc7,0x00,0x05,0x00,0x21,0x00,0x00,0x00, +0x8c,0x00,0x00,0x00,0x8a,0x00,0x00,0x00,0x8b,0x00,0x00,0x00, +0xc4,0x00,0x05,0x00,0x21,0x00,0x00,0x00,0x8e,0x00,0x00,0x00, +0x8c,0x00,0x00,0x00,0x8d,0x00,0x00,0x00,0xc5,0x00,0x05,0x00, +0x21,0x00,0x00,0x00,0x8f,0x00,0x00,0x00,0x82,0x00,0x00,0x00, +0x8e,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0x6b,0x00,0x00,0x00, +0xf8,0x00,0x02,0x00,0x90,0x00,0x00,0x00,0xb0,0x00,0x05,0x00, +0x11,0x00,0x00,0x00,0x92,0x00,0x00,0x00,0x60,0x00,0x00,0x00, +0x59,0x00,0x00,0x00,0xf7,0x00,0x03,0x00,0x95,0x00,0x00,0x00, +0x00,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0x92,0x00,0x00,0x00, +0x94,0x00,0x00,0x00,0xa9,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, +0x94,0x00,0x00,0x00,0x82,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x98,0x00,0x00,0x00,0x60,0x00,0x00,0x00,0x09,0x00,0x00,0x00, +0x41,0x00,0x08,0x00,0x7c,0x00,0x00,0x00,0x99,0x00,0x00,0x00, +0x77,0x00,0x00,0x00,0x22,0x00,0x00,0x00,0x1c,0x00,0x00,0x00, +0x79,0x00,0x00,0x00,0x98,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0x4d,0x00,0x00,0x00,0x9a,0x00,0x00,0x00,0x99,0x00,0x00,0x00, +0x71,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x9b,0x00,0x00,0x00, +0x9a,0x00,0x00,0x00,0x7c,0x00,0x04,0x00,0x21,0x00,0x00,0x00, +0x9c,0x00,0x00,0x00,0x9b,0x00,0x00,0x00,0xc7,0x00,0x05,0x00, +0x21,0x00,0x00,0x00,0x9d,0x00,0x00,0x00,0x9c,0x00,0x00,0x00, +0x81,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xa0,0x00,0x00,0x00,0x60,0x00,0x00,0x00,0x33,0x00,0x00,0x00, +0x41,0x00,0x08,0x00,0x7c,0x00,0x00,0x00,0xa1,0x00,0x00,0x00, +0x77,0x00,0x00,0x00,0x22,0x00,0x00,0x00,0x1c,0x00,0x00,0x00, +0x79,0x00,0x00,0x00,0xa0,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0x4d,0x00,0x00,0x00,0xa2,0x00,0x00,0x00,0xa1,0x00,0x00,0x00, +0xc2,0x00,0x05,0x00,0x4d,0x00,0x00,0x00,0xa3,0x00,0x00,0x00, +0xa2,0x00,0x00,0x00,0x79,0x00,0x00,0x00,0x71,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0xa4,0x00,0x00,0x00,0xa3,0x00,0x00,0x00, +0x7c,0x00,0x04,0x00,0x21,0x00,0x00,0x00,0xa5,0x00,0x00,0x00, +0xa4,0x00,0x00,0x00,0xc7,0x00,0x05,0x00,0x21,0x00,0x00,0x00, +0xa6,0x00,0x00,0x00,0xa5,0x00,0x00,0x00,0x8b,0x00,0x00,0x00, +0xc4,0x00,0x05,0x00,0x21,0x00,0x00,0x00,0xa7,0x00,0x00,0x00, +0xa6,0x00,0x00,0x00,0x8d,0x00,0x00,0x00,0xc5,0x00,0x05,0x00, +0x21,0x00,0x00,0x00,0xa8,0x00,0x00,0x00,0x9d,0x00,0x00,0x00, +0xa7,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0x95,0x00,0x00,0x00, +0xf8,0x00,0x02,0x00,0xa9,0x00,0x00,0x00,0xb0,0x00,0x05,0x00, +0x11,0x00,0x00,0x00,0xab,0x00,0x00,0x00,0x60,0x00,0x00,0x00, +0x70,0x00,0x00,0x00,0xf7,0x00,0x03,0x00,0xae,0x00,0x00,0x00, +0x00,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0xab,0x00,0x00,0x00, +0xad,0x00,0x00,0x00,0xc2,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, +0xad,0x00,0x00,0x00,0x82,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xb1,0x00,0x00,0x00,0x60,0x00,0x00,0x00,0x59,0x00,0x00,0x00, +0x41,0x00,0x08,0x00,0x7c,0x00,0x00,0x00,0xb2,0x00,0x00,0x00, +0x77,0x00,0x00,0x00,0x22,0x00,0x00,0x00,0x1c,0x00,0x00,0x00, +0x79,0x00,0x00,0x00,0xb1,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0x4d,0x00,0x00,0x00,0xb3,0x00,0x00,0x00,0xb2,0x00,0x00,0x00, +0xc2,0x00,0x05,0x00,0x4d,0x00,0x00,0x00,0xb4,0x00,0x00,0x00, +0xb3,0x00,0x00,0x00,0x8d,0x00,0x00,0x00,0x71,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0xb5,0x00,0x00,0x00,0xb4,0x00,0x00,0x00, +0x7c,0x00,0x04,0x00,0x21,0x00,0x00,0x00,0xb6,0x00,0x00,0x00, +0xb5,0x00,0x00,0x00,0x41,0x00,0x08,0x00,0x7c,0x00,0x00,0x00, +0xba,0x00,0x00,0x00,0x77,0x00,0x00,0x00,0x22,0x00,0x00,0x00, +0x1c,0x00,0x00,0x00,0x79,0x00,0x00,0x00,0x60,0x00,0x00,0x00, +0x3d,0x00,0x04,0x00,0x4d,0x00,0x00,0x00,0xbb,0x00,0x00,0x00, +0xba,0x00,0x00,0x00,0xc2,0x00,0x05,0x00,0x4d,0x00,0x00,0x00, +0xbc,0x00,0x00,0x00,0xbb,0x00,0x00,0x00,0x8d,0x00,0x00,0x00, +0x71,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0xbd,0x00,0x00,0x00, +0xbc,0x00,0x00,0x00,0x7c,0x00,0x04,0x00,0x21,0x00,0x00,0x00, +0xbe,0x00,0x00,0x00,0xbd,0x00,0x00,0x00,0xc7,0x00,0x05,0x00, +0x21,0x00,0x00,0x00,0xbf,0x00,0x00,0x00,0xbe,0x00,0x00,0x00, +0x8b,0x00,0x00,0x00,0xc4,0x00,0x05,0x00,0x21,0x00,0x00,0x00, +0xc0,0x00,0x00,0x00,0xbf,0x00,0x00,0x00,0x8d,0x00,0x00,0x00, +0xc5,0x00,0x05,0x00,0x21,0x00,0x00,0x00,0xc1,0x00,0x00,0x00, +0xb6,0x00,0x00,0x00,0xc0,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, +0xae,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0xc2,0x00,0x00,0x00, +0x82,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xc5,0x00,0x00,0x00, +0x60,0x00,0x00,0x00,0x59,0x00,0x00,0x00,0x41,0x00,0x08,0x00, +0x7c,0x00,0x00,0x00,0xc6,0x00,0x00,0x00,0x77,0x00,0x00,0x00, +0x22,0x00,0x00,0x00,0x1c,0x00,0x00,0x00,0x79,0x00,0x00,0x00, +0xc5,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x4d,0x00,0x00,0x00, +0xc7,0x00,0x00,0x00,0xc6,0x00,0x00,0x00,0xc2,0x00,0x05,0x00, +0x4d,0x00,0x00,0x00,0xc8,0x00,0x00,0x00,0xc7,0x00,0x00,0x00, +0x8d,0x00,0x00,0x00,0x71,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0xc9,0x00,0x00,0x00,0xc8,0x00,0x00,0x00,0x7c,0x00,0x04,0x00, +0x21,0x00,0x00,0x00,0xca,0x00,0x00,0x00,0xc9,0x00,0x00,0x00, +0x82,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xcd,0x00,0x00,0x00, +0x60,0x00,0x00,0x00,0x33,0x00,0x00,0x00,0x41,0x00,0x08,0x00, +0x7c,0x00,0x00,0x00,0xce,0x00,0x00,0x00,0x77,0x00,0x00,0x00, +0x22,0x00,0x00,0x00,0x1c,0x00,0x00,0x00,0x79,0x00,0x00,0x00, +0xcd,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x4d,0x00,0x00,0x00, +0xcf,0x00,0x00,0x00,0xce,0x00,0x00,0x00,0xc2,0x00,0x05,0x00, +0x4d,0x00,0x00,0x00,0xd1,0x00,0x00,0x00,0xcf,0x00,0x00,0x00, +0xd0,0x00,0x00,0x00,0x71,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0xd2,0x00,0x00,0x00,0xd1,0x00,0x00,0x00,0x7c,0x00,0x04,0x00, +0x21,0x00,0x00,0x00,0xd3,0x00,0x00,0x00,0xd2,0x00,0x00,0x00, +0xc7,0x00,0x05,0x00,0x21,0x00,0x00,0x00,0xd4,0x00,0x00,0x00, +0xd3,0x00,0x00,0x00,0x8b,0x00,0x00,0x00,0xc4,0x00,0x05,0x00, +0x21,0x00,0x00,0x00,0xd5,0x00,0x00,0x00,0xd4,0x00,0x00,0x00, +0x8d,0x00,0x00,0x00,0xc5,0x00,0x05,0x00,0x21,0x00,0x00,0x00, +0xd6,0x00,0x00,0x00,0xca,0x00,0x00,0x00,0xd5,0x00,0x00,0x00, +0xf9,0x00,0x02,0x00,0xae,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, +0xae,0x00,0x00,0x00,0xf5,0x00,0x07,0x00,0x21,0x00,0x00,0x00, +0x36,0x01,0x00,0x00,0xc1,0x00,0x00,0x00,0xad,0x00,0x00,0x00, +0xd6,0x00,0x00,0x00,0xc2,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, +0x95,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0x95,0x00,0x00,0x00, +0xf5,0x00,0x07,0x00,0x21,0x00,0x00,0x00,0x37,0x01,0x00,0x00, +0xa8,0x00,0x00,0x00,0x94,0x00,0x00,0x00,0x36,0x01,0x00,0x00, +0xae,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0x6b,0x00,0x00,0x00, +0xf8,0x00,0x02,0x00,0x6b,0x00,0x00,0x00,0xf5,0x00,0x07,0x00, +0x21,0x00,0x00,0x00,0x38,0x01,0x00,0x00,0x8f,0x00,0x00,0x00, +0x6a,0x00,0x00,0x00,0x37,0x01,0x00,0x00,0x95,0x00,0x00,0x00, +0x72,0x00,0x04,0x00,0x55,0x00,0x00,0x00,0xda,0x00,0x00,0x00, +0x38,0x01,0x00,0x00,0x41,0x00,0x07,0x00,0xdf,0x00,0x00,0x00, +0xe0,0x00,0x00,0x00,0x77,0x00,0x00,0x00,0x22,0x00,0x00,0x00, +0x1c,0x00,0x00,0x00,0x8b,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0x72,0x00,0x00,0x00,0xe1,0x00,0x00,0x00,0xe0,0x00,0x00,0x00, +0x73,0x00,0x04,0x00,0xdb,0x00,0x00,0x00,0xe2,0x00,0x00,0x00, +0xe1,0x00,0x00,0x00,0x72,0x00,0x04,0x00,0x21,0x00,0x00,0x00, +0xe6,0x00,0x00,0x00,0xda,0x00,0x00,0x00,0x82,0x00,0x05,0x00, +0x21,0x00,0x00,0x00,0xe8,0x00,0x00,0x00,0xe6,0x00,0x00,0x00, +0xe7,0x00,0x00,0x00,0x6f,0x00,0x04,0x00,0xdb,0x00,0x00,0x00, +0xe9,0x00,0x00,0x00,0xe8,0x00,0x00,0x00,0x85,0x00,0x05,0x00, +0xdb,0x00,0x00,0x00,0xea,0x00,0x00,0x00,0xe2,0x00,0x00,0x00, +0xe9,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xed,0x00,0x00,0x00,0x1c,0x00,0x00,0x00,0x10,0x00,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xf0,0x00,0x00,0x00, +0xee,0x00,0x00,0x00,0x47,0x00,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xf1,0x00,0x00,0x00,0xed,0x00,0x00,0x00, +0xf0,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xf3,0x00,0x00,0x00,0x6c,0x00,0x00,0x00,0x4c,0x00,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xf4,0x00,0x00,0x00, +0xf1,0x00,0x00,0x00,0xf3,0x00,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xf7,0x00,0x00,0x00,0x6c,0x00,0x00,0x00, +0x47,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0xfa,0x00,0x00,0x00, +0xf8,0x00,0x02,0x00,0xfa,0x00,0x00,0x00,0xf5,0x00,0x07,0x00, +0x06,0x00,0x00,0x00,0x39,0x01,0x00,0x00,0x44,0x00,0x00,0x00, +0x6b,0x00,0x00,0x00,0x28,0x01,0x00,0x00,0xfb,0x00,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x01,0x01,0x00,0x00, +0x44,0x00,0x00,0x00,0x33,0x00,0x00,0x00,0xb0,0x00,0x05,0x00, +0x11,0x00,0x00,0x00,0x02,0x01,0x00,0x00,0x39,0x01,0x00,0x00, +0x01,0x01,0x00,0x00,0xf6,0x00,0x04,0x00,0xfc,0x00,0x00,0x00, +0xfb,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, +0x02,0x01,0x00,0x00,0xfb,0x00,0x00,0x00,0xfc,0x00,0x00,0x00, +0xf8,0x00,0x02,0x00,0xfb,0x00,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x09,0x01,0x00,0x00,0xf4,0x00,0x00,0x00, +0x39,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x0e,0x01,0x00,0x00,0xf7,0x00,0x00,0x00,0x39,0x01,0x00,0x00, +0x41,0x00,0x08,0x00,0x7c,0x00,0x00,0x00,0x0f,0x01,0x00,0x00, +0x77,0x00,0x00,0x00,0x22,0x00,0x00,0x00,0x1c,0x00,0x00,0x00, +0x26,0x00,0x00,0x00,0x0e,0x01,0x00,0x00,0x3d,0x00,0x04,0x00, +0x4d,0x00,0x00,0x00,0x10,0x01,0x00,0x00,0x0f,0x01,0x00,0x00, +0xc2,0x00,0x05,0x00,0x4d,0x00,0x00,0x00,0x12,0x01,0x00,0x00, +0x10,0x01,0x00,0x00,0x5d,0x00,0x00,0x00,0x71,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x13,0x01,0x00,0x00,0x12,0x01,0x00,0x00, +0x7c,0x00,0x04,0x00,0x21,0x00,0x00,0x00,0x14,0x01,0x00,0x00, +0x13,0x01,0x00,0x00,0xc7,0x00,0x05,0x00,0x21,0x00,0x00,0x00, +0x15,0x01,0x00,0x00,0x14,0x01,0x00,0x00,0x8b,0x00,0x00,0x00, +0x72,0x00,0x04,0x00,0x55,0x00,0x00,0x00,0x16,0x01,0x00,0x00, +0x15,0x01,0x00,0x00,0x72,0x00,0x04,0x00,0x21,0x00,0x00,0x00, +0x17,0x01,0x00,0x00,0x16,0x01,0x00,0x00,0x41,0x00,0x08,0x00, +0x7c,0x00,0x00,0x00,0x1a,0x01,0x00,0x00,0x77,0x00,0x00,0x00, +0x22,0x00,0x00,0x00,0x1c,0x00,0x00,0x00,0x22,0x00,0x00,0x00, +0x39,0x01,0x00,0x00,0x3d,0x00,0x04,0x00,0x4d,0x00,0x00,0x00, +0x1b,0x01,0x00,0x00,0x1a,0x01,0x00,0x00,0xc7,0x00,0x05,0x00, +0x4d,0x00,0x00,0x00,0x1d,0x01,0x00,0x00,0x1b,0x01,0x00,0x00, +0x57,0x00,0x00,0x00,0x71,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x1e,0x01,0x00,0x00,0x1d,0x01,0x00,0x00,0x7c,0x00,0x04,0x00, +0x21,0x00,0x00,0x00,0x1f,0x01,0x00,0x00,0x1e,0x01,0x00,0x00, +0xab,0x00,0x05,0x00,0x11,0x00,0x00,0x00,0x20,0x01,0x00,0x00, +0x1f,0x01,0x00,0x00,0x22,0x00,0x00,0x00,0xa9,0x00,0x06,0x00, +0x21,0x00,0x00,0x00,0x21,0x01,0x00,0x00,0x20,0x01,0x00,0x00, +0x22,0x00,0x00,0x00,0x8d,0x00,0x00,0x00,0x82,0x00,0x05,0x00, +0x21,0x00,0x00,0x00,0x22,0x01,0x00,0x00,0x17,0x01,0x00,0x00, +0x21,0x01,0x00,0x00,0x6f,0x00,0x04,0x00,0xdb,0x00,0x00,0x00, +0x23,0x01,0x00,0x00,0x22,0x01,0x00,0x00,0x85,0x00,0x05,0x00, +0xdb,0x00,0x00,0x00,0x24,0x01,0x00,0x00,0xea,0x00,0x00,0x00, +0x23,0x01,0x00,0x00,0x73,0x00,0x04,0x00,0x72,0x00,0x00,0x00, +0x25,0x01,0x00,0x00,0x24,0x01,0x00,0x00,0x41,0x00,0x06,0x00, +0xdf,0x00,0x00,0x00,0x26,0x01,0x00,0x00,0x06,0x01,0x00,0x00, +0x22,0x00,0x00,0x00,0x09,0x01,0x00,0x00,0x3e,0x00,0x03,0x00, +0x26,0x01,0x00,0x00,0x25,0x01,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x28,0x01,0x00,0x00,0x39,0x01,0x00,0x00, +0x26,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0xfa,0x00,0x00,0x00, +0xf8,0x00,0x02,0x00,0xfc,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, +0x0d,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0x0d,0x00,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x2a,0x01,0x00,0x00, +0x35,0x01,0x00,0x00,0x26,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, +0x0a,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0x0c,0x00,0x00,0x00, +0xf5,0x00,0x07,0x00,0x11,0x00,0x00,0x00,0x3e,0x01,0x00,0x00, +0x2f,0x01,0x00,0x00,0x0a,0x00,0x00,0x00,0x32,0x01,0x00,0x00, +0x2c,0x00,0x00,0x00,0xf7,0x00,0x03,0x00,0x33,0x01,0x00,0x00, +0x00,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0x3e,0x01,0x00,0x00, +0x2d,0x01,0x00,0x00,0x33,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, +0x33,0x01,0x00,0x00,0xf9,0x00,0x02,0x00,0x2d,0x01,0x00,0x00, +0xf8,0x00,0x02,0x00,0x2d,0x01,0x00,0x00,0xfd,0x00,0x01,0x00, +0x38,0x00,0x01,0x00, +}; +const uint64_t dequant_q3_K_len = 4804; + +unsigned char dequant_q4_0_data[] = { +0x03,0x02,0x23,0x07,0x00,0x05,0x01,0x00,0x0b,0x00,0x0d,0x00, +0x83,0x01,0x00,0x00,0x00,0x00,0x00,0x00,0x11,0x00,0x02,0x00, +0x01,0x00,0x00,0x00,0x11,0x00,0x02,0x00,0x27,0x00,0x00,0x00, +0x11,0x00,0x02,0x00,0x51,0x11,0x00,0x00,0x11,0x00,0x02,0x00, +0x60,0x11,0x00,0x00,0x0b,0x00,0x06,0x00,0x01,0x00,0x00,0x00, +0x47,0x4c,0x53,0x4c,0x2e,0x73,0x74,0x64,0x2e,0x34,0x35,0x30, +0x00,0x00,0x00,0x00,0x0e,0x00,0x03,0x00,0x00,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0x0f,0x00,0x0a,0x00,0x05,0x00,0x00,0x00, +0x04,0x00,0x00,0x00,0x6d,0x61,0x69,0x6e,0x00,0x00,0x00,0x00, +0x0b,0x00,0x00,0x00,0x12,0x00,0x00,0x00,0x2b,0x00,0x00,0x00, +0x4d,0x00,0x00,0x00,0x66,0x00,0x00,0x00,0x10,0x00,0x06,0x00, +0x04,0x00,0x00,0x00,0x11,0x00,0x00,0x00,0x00,0x01,0x00,0x00, +0x01,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x47,0x00,0x04,0x00, +0x0b,0x00,0x00,0x00,0x0b,0x00,0x00,0x00,0x1a,0x00,0x00,0x00, +0x47,0x00,0x04,0x00,0x12,0x00,0x00,0x00,0x0b,0x00,0x00,0x00, +0x1b,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x29,0x00,0x00,0x00, +0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0x48,0x00,0x05,0x00,0x29,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0x23,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x48,0x00,0x05,0x00, +0x29,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x23,0x00,0x00,0x00, +0x08,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x29,0x00,0x00,0x00, +0x03,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, +0x48,0x00,0x05,0x00,0x29,0x00,0x00,0x00,0x04,0x00,0x00,0x00, +0x23,0x00,0x00,0x00,0x10,0x00,0x00,0x00,0x47,0x00,0x03,0x00, +0x29,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00, +0x48,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0x48,0x00,0x05,0x00,0x49,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x48,0x00,0x05,0x00, +0x49,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x23,0x00,0x00,0x00, +0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x4a,0x00,0x00,0x00, +0x06,0x00,0x00,0x00,0x12,0x00,0x00,0x00,0x48,0x00,0x04,0x00, +0x4b,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x18,0x00,0x00,0x00, +0x48,0x00,0x05,0x00,0x4b,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x03,0x00, +0x4b,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00, +0x4d,0x00,0x00,0x00,0x22,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0x47,0x00,0x04,0x00,0x4d,0x00,0x00,0x00,0x21,0x00,0x00,0x00, +0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x63,0x00,0x00,0x00, +0x06,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x48,0x00,0x04,0x00, +0x64,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x19,0x00,0x00,0x00, +0x48,0x00,0x05,0x00,0x64,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x03,0x00, +0x64,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00, +0x66,0x00,0x00,0x00,0x22,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0x47,0x00,0x04,0x00,0x66,0x00,0x00,0x00,0x21,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x94,0x00,0x00,0x00, 0x0b,0x00,0x00,0x00,0x19,0x00,0x00,0x00,0x13,0x00,0x02,0x00, 0x02,0x00,0x00,0x00,0x21,0x00,0x03,0x00,0x03,0x00,0x00,0x00, 0x02,0x00,0x00,0x00,0x15,0x00,0x04,0x00,0x06,0x00,0x00,0x00, @@ -173,3186 +2877,389 @@ unsigned char clamp_f32_data[] = { 0x0b,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, 0x06,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x00,0x00,0x00,0x00, 0x20,0x00,0x04,0x00,0x0d,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x16,0x00,0x03,0x00,0x11,0x00,0x00,0x00, -0x20,0x00,0x00,0x00,0x1e,0x00,0x06,0x00,0x12,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x11,0x00,0x00,0x00, -0x11,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x13,0x00,0x00,0x00, -0x09,0x00,0x00,0x00,0x12,0x00,0x00,0x00,0x3b,0x00,0x04,0x00, -0x13,0x00,0x00,0x00,0x14,0x00,0x00,0x00,0x09,0x00,0x00,0x00, -0x15,0x00,0x04,0x00,0x15,0x00,0x00,0x00,0x20,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x15,0x00,0x00,0x00, -0x16,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0x17,0x00,0x00,0x00,0x09,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x14,0x00,0x02,0x00,0x1a,0x00,0x00,0x00,0x1d,0x00,0x03,0x00, -0x21,0x00,0x00,0x00,0x11,0x00,0x00,0x00,0x1e,0x00,0x03,0x00, -0x22,0x00,0x00,0x00,0x21,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0x23,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x22,0x00,0x00,0x00, -0x3b,0x00,0x04,0x00,0x23,0x00,0x00,0x00,0x24,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x26,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x11,0x00,0x00,0x00,0x1d,0x00,0x03,0x00, -0x29,0x00,0x00,0x00,0x11,0x00,0x00,0x00,0x1e,0x00,0x03,0x00, -0x2a,0x00,0x00,0x00,0x29,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0x2b,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x2a,0x00,0x00,0x00, -0x3b,0x00,0x04,0x00,0x2b,0x00,0x00,0x00,0x2c,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x15,0x00,0x00,0x00, -0x2f,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0x30,0x00,0x00,0x00,0x09,0x00,0x00,0x00,0x11,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x15,0x00,0x00,0x00,0x3b,0x00,0x00,0x00, -0x03,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x49,0x00,0x00,0x00,0x00,0x02,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x4a,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x2c,0x00,0x06,0x00,0x09,0x00,0x00,0x00,0x4b,0x00,0x00,0x00, -0x49,0x00,0x00,0x00,0x4a,0x00,0x00,0x00,0x4a,0x00,0x00,0x00, -0x36,0x00,0x05,0x00,0x02,0x00,0x00,0x00,0x04,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x03,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, -0x05,0x00,0x00,0x00,0xf7,0x00,0x03,0x00,0x4c,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0xfb,0x00,0x03,0x00,0x0c,0x00,0x00,0x00, -0x4d,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0x4d,0x00,0x00,0x00, -0x41,0x00,0x05,0x00,0x0d,0x00,0x00,0x00,0x0e,0x00,0x00,0x00, -0x0b,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x0f,0x00,0x00,0x00,0x0e,0x00,0x00,0x00, -0x41,0x00,0x05,0x00,0x17,0x00,0x00,0x00,0x18,0x00,0x00,0x00, -0x14,0x00,0x00,0x00,0x16,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x19,0x00,0x00,0x00,0x18,0x00,0x00,0x00, -0xae,0x00,0x05,0x00,0x1a,0x00,0x00,0x00,0x1b,0x00,0x00,0x00, -0x0f,0x00,0x00,0x00,0x19,0x00,0x00,0x00,0xf7,0x00,0x03,0x00, -0x1d,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, -0x1b,0x00,0x00,0x00,0x1c,0x00,0x00,0x00,0x1d,0x00,0x00,0x00, -0xf8,0x00,0x02,0x00,0x1c,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, -0x4c,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0x1d,0x00,0x00,0x00, -0x41,0x00,0x06,0x00,0x26,0x00,0x00,0x00,0x27,0x00,0x00,0x00, -0x24,0x00,0x00,0x00,0x16,0x00,0x00,0x00,0x0f,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x11,0x00,0x00,0x00,0x28,0x00,0x00,0x00, -0x27,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x30,0x00,0x00,0x00, -0x31,0x00,0x00,0x00,0x14,0x00,0x00,0x00,0x2f,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x11,0x00,0x00,0x00,0x32,0x00,0x00,0x00, -0x31,0x00,0x00,0x00,0xb8,0x00,0x05,0x00,0x1a,0x00,0x00,0x00, -0x33,0x00,0x00,0x00,0x28,0x00,0x00,0x00,0x32,0x00,0x00,0x00, -0xf7,0x00,0x03,0x00,0x36,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0xfa,0x00,0x04,0x00,0x33,0x00,0x00,0x00,0x35,0x00,0x00,0x00, -0x39,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0x35,0x00,0x00,0x00, -0xf9,0x00,0x02,0x00,0x36,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, -0x39,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x30,0x00,0x00,0x00, -0x3c,0x00,0x00,0x00,0x14,0x00,0x00,0x00,0x3b,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x11,0x00,0x00,0x00,0x3d,0x00,0x00,0x00, -0x3c,0x00,0x00,0x00,0xba,0x00,0x05,0x00,0x1a,0x00,0x00,0x00, -0x3e,0x00,0x00,0x00,0x28,0x00,0x00,0x00,0x3d,0x00,0x00,0x00, -0xa9,0x00,0x06,0x00,0x11,0x00,0x00,0x00,0x54,0x00,0x00,0x00, -0x3e,0x00,0x00,0x00,0x3d,0x00,0x00,0x00,0x28,0x00,0x00,0x00, -0xf9,0x00,0x02,0x00,0x36,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, -0x36,0x00,0x00,0x00,0xf5,0x00,0x07,0x00,0x11,0x00,0x00,0x00, -0x53,0x00,0x00,0x00,0x32,0x00,0x00,0x00,0x35,0x00,0x00,0x00, -0x54,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x41,0x00,0x06,0x00, -0x26,0x00,0x00,0x00,0x48,0x00,0x00,0x00,0x2c,0x00,0x00,0x00, -0x16,0x00,0x00,0x00,0x0f,0x00,0x00,0x00,0x3e,0x00,0x03,0x00, -0x48,0x00,0x00,0x00,0x53,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, -0x4c,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0x4c,0x00,0x00,0x00, -0xfd,0x00,0x01,0x00,0x38,0x00,0x01,0x00, -}; -const uint64_t clamp_f32_len = 1448; - -unsigned char cpy_f16_f16_data[] = { -0x03,0x02,0x23,0x07,0x00,0x05,0x01,0x00,0x0b,0x00,0x0d,0x00, -0xaa,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x11,0x00,0x02,0x00, -0x01,0x00,0x00,0x00,0x11,0x00,0x02,0x00,0x51,0x11,0x00,0x00, -0x0b,0x00,0x06,0x00,0x01,0x00,0x00,0x00,0x47,0x4c,0x53,0x4c, -0x2e,0x73,0x74,0x64,0x2e,0x34,0x35,0x30,0x00,0x00,0x00,0x00, -0x0e,0x00,0x03,0x00,0x00,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x0f,0x00,0x09,0x00,0x05,0x00,0x00,0x00,0x04,0x00,0x00,0x00, -0x6d,0x61,0x69,0x6e,0x00,0x00,0x00,0x00,0x09,0x00,0x00,0x00, -0x10,0x00,0x00,0x00,0x96,0x00,0x00,0x00,0x9f,0x00,0x00,0x00, -0x10,0x00,0x06,0x00,0x04,0x00,0x00,0x00,0x11,0x00,0x00,0x00, -0x00,0x02,0x00,0x00,0x01,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x09,0x00,0x00,0x00,0x0b,0x00,0x00,0x00, -0x1c,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x0e,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x48,0x00,0x05,0x00,0x0e,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0x0e,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x08,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x0e,0x00,0x00,0x00, -0x03,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0x48,0x00,0x05,0x00,0x0e,0x00,0x00,0x00,0x04,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x10,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0x0e,0x00,0x00,0x00,0x05,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x14,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x0e,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x18,0x00,0x00,0x00, -0x48,0x00,0x05,0x00,0x0e,0x00,0x00,0x00,0x07,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x1c,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0x0e,0x00,0x00,0x00,0x08,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x20,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x0e,0x00,0x00,0x00, -0x09,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x24,0x00,0x00,0x00, -0x48,0x00,0x05,0x00,0x0e,0x00,0x00,0x00,0x0a,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x28,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0x0e,0x00,0x00,0x00,0x0b,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x2c,0x00,0x00,0x00,0x47,0x00,0x03,0x00,0x0e,0x00,0x00,0x00, -0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x93,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x48,0x00,0x04,0x00, -0x94,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x19,0x00,0x00,0x00, -0x48,0x00,0x05,0x00,0x94,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x03,0x00, -0x94,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x96,0x00,0x00,0x00,0x22,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x96,0x00,0x00,0x00,0x21,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x9c,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x48,0x00,0x04,0x00, -0x9d,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x18,0x00,0x00,0x00, -0x48,0x00,0x05,0x00,0x9d,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x03,0x00, -0x9d,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x9f,0x00,0x00,0x00,0x22,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x9f,0x00,0x00,0x00,0x21,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0xa7,0x00,0x00,0x00, -0x0b,0x00,0x00,0x00,0x19,0x00,0x00,0x00,0x13,0x00,0x02,0x00, -0x02,0x00,0x00,0x00,0x21,0x00,0x03,0x00,0x03,0x00,0x00,0x00, -0x02,0x00,0x00,0x00,0x15,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x20,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x17,0x00,0x04,0x00, -0x07,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x03,0x00,0x00,0x00, -0x20,0x00,0x04,0x00,0x08,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x07,0x00,0x00,0x00,0x3b,0x00,0x04,0x00,0x08,0x00,0x00,0x00, -0x09,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x0a,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x20,0x00,0x04,0x00,0x0b,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x1e,0x00,0x0e,0x00,0x0e,0x00,0x00,0x00, +0x06,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x10,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x3b,0x00,0x04,0x00, +0x0a,0x00,0x00,0x00,0x12,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x15,0x00,0x00,0x00, +0x40,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x1e,0x00,0x00,0x00,0x20,0x00,0x00,0x00,0x1e,0x00,0x07,0x00, +0x29,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00, 0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x20,0x00,0x04,0x00,0x0f,0x00,0x00,0x00,0x09,0x00,0x00,0x00, -0x0e,0x00,0x00,0x00,0x3b,0x00,0x04,0x00,0x0f,0x00,0x00,0x00, -0x10,0x00,0x00,0x00,0x09,0x00,0x00,0x00,0x15,0x00,0x04,0x00, -0x11,0x00,0x00,0x00,0x20,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x11,0x00,0x00,0x00,0x12,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x13,0x00,0x00,0x00, +0x20,0x00,0x04,0x00,0x2a,0x00,0x00,0x00,0x09,0x00,0x00,0x00, +0x29,0x00,0x00,0x00,0x3b,0x00,0x04,0x00,0x2a,0x00,0x00,0x00, +0x2b,0x00,0x00,0x00,0x09,0x00,0x00,0x00,0x15,0x00,0x04,0x00, +0x2c,0x00,0x00,0x00,0x20,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x2c,0x00,0x00,0x00,0x2d,0x00,0x00,0x00, +0x04,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x2e,0x00,0x00,0x00, 0x09,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x14,0x00,0x02,0x00, -0x16,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x11,0x00,0x00,0x00, -0x1f,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x11,0x00,0x00,0x00,0x22,0x00,0x00,0x00,0x02,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x11,0x00,0x00,0x00,0x47,0x00,0x00,0x00, -0x03,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x11,0x00,0x00,0x00, -0x4c,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x11,0x00,0x00,0x00,0x52,0x00,0x00,0x00,0x05,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x11,0x00,0x00,0x00,0x5a,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x11,0x00,0x00,0x00, -0x5d,0x00,0x00,0x00,0x07,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x11,0x00,0x00,0x00,0x82,0x00,0x00,0x00,0x08,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x11,0x00,0x00,0x00,0x87,0x00,0x00,0x00, -0x09,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x11,0x00,0x00,0x00, -0x8d,0x00,0x00,0x00,0x0a,0x00,0x00,0x00,0x16,0x00,0x03,0x00, -0x92,0x00,0x00,0x00,0x10,0x00,0x00,0x00,0x1d,0x00,0x03,0x00, -0x93,0x00,0x00,0x00,0x92,0x00,0x00,0x00,0x1e,0x00,0x03,0x00, -0x94,0x00,0x00,0x00,0x93,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0x95,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x94,0x00,0x00,0x00, -0x3b,0x00,0x04,0x00,0x95,0x00,0x00,0x00,0x96,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x11,0x00,0x00,0x00, -0x97,0x00,0x00,0x00,0x0b,0x00,0x00,0x00,0x1d,0x00,0x03,0x00, -0x9c,0x00,0x00,0x00,0x92,0x00,0x00,0x00,0x1e,0x00,0x03,0x00, -0x9d,0x00,0x00,0x00,0x9c,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0x9e,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x9d,0x00,0x00,0x00, -0x3b,0x00,0x04,0x00,0x9e,0x00,0x00,0x00,0x9f,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0xa1,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x92,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0xa5,0x00,0x00,0x00,0x00,0x02,0x00,0x00, -0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0xa6,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x2c,0x00,0x06,0x00,0x07,0x00,0x00,0x00, -0xa7,0x00,0x00,0x00,0xa5,0x00,0x00,0x00,0xa6,0x00,0x00,0x00, -0xa6,0x00,0x00,0x00,0x36,0x00,0x05,0x00,0x02,0x00,0x00,0x00, +0x32,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x38,0x00,0x00,0x00,0x00,0x04,0x00,0x00,0x2b,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x3e,0x00,0x00,0x00,0x08,0x00,0x00,0x00, +0x16,0x00,0x03,0x00,0x42,0x00,0x00,0x00,0x20,0x00,0x00,0x00, +0x16,0x00,0x03,0x00,0x45,0x00,0x00,0x00,0x10,0x00,0x00,0x00, +0x15,0x00,0x04,0x00,0x46,0x00,0x00,0x00,0x08,0x00,0x00,0x00, +0x00,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x47,0x00,0x00,0x00,0x10,0x00,0x00,0x00,0x1c,0x00,0x04,0x00, +0x48,0x00,0x00,0x00,0x46,0x00,0x00,0x00,0x47,0x00,0x00,0x00, +0x1e,0x00,0x04,0x00,0x49,0x00,0x00,0x00,0x45,0x00,0x00,0x00, +0x48,0x00,0x00,0x00,0x1d,0x00,0x03,0x00,0x4a,0x00,0x00,0x00, +0x49,0x00,0x00,0x00,0x1e,0x00,0x03,0x00,0x4b,0x00,0x00,0x00, +0x4a,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x4c,0x00,0x00,0x00, +0x0c,0x00,0x00,0x00,0x4b,0x00,0x00,0x00,0x3b,0x00,0x04,0x00, +0x4c,0x00,0x00,0x00,0x4d,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x2c,0x00,0x00,0x00,0x4e,0x00,0x00,0x00, +0x00,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x50,0x00,0x00,0x00, +0x0c,0x00,0x00,0x00,0x45,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x42,0x00,0x00,0x00,0x55,0x00,0x00,0x00,0x00,0x00,0x00,0xc1, +0x1d,0x00,0x03,0x00,0x63,0x00,0x00,0x00,0x45,0x00,0x00,0x00, +0x1e,0x00,0x03,0x00,0x64,0x00,0x00,0x00,0x63,0x00,0x00,0x00, +0x20,0x00,0x04,0x00,0x65,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, +0x64,0x00,0x00,0x00,0x3b,0x00,0x04,0x00,0x65,0x00,0x00,0x00, +0x66,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x2c,0x00,0x00,0x00,0x6d,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0x20,0x00,0x04,0x00,0x71,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, +0x46,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x2c,0x00,0x00,0x00, +0x76,0x00,0x00,0x00,0x0f,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x92,0x00,0x00,0x00,0x00,0x01,0x00,0x00, +0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x93,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0x2c,0x00,0x06,0x00,0x09,0x00,0x00,0x00, +0x94,0x00,0x00,0x00,0x92,0x00,0x00,0x00,0x93,0x00,0x00,0x00, +0x93,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x77,0x01,0x00,0x00,0x11,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x78,0x01,0x00,0x00,0x02,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x79,0x01,0x00,0x00, +0x12,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x7a,0x01,0x00,0x00,0x03,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x7b,0x01,0x00,0x00,0x13,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x7c,0x01,0x00,0x00, +0x14,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x7d,0x01,0x00,0x00,0x05,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x7e,0x01,0x00,0x00,0x15,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x7f,0x01,0x00,0x00, +0x06,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x80,0x01,0x00,0x00,0x16,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x81,0x01,0x00,0x00,0x07,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x82,0x01,0x00,0x00, +0x17,0x00,0x00,0x00,0x36,0x00,0x05,0x00,0x02,0x00,0x00,0x00, 0x04,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x03,0x00,0x00,0x00, 0xf8,0x00,0x02,0x00,0x05,0x00,0x00,0x00,0xf7,0x00,0x03,0x00, -0xa8,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xfb,0x00,0x03,0x00, -0x0a,0x00,0x00,0x00,0xa9,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, -0xa9,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x0b,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x09,0x00,0x00,0x00,0x0a,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x0d,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x13,0x00,0x00,0x00, -0x14,0x00,0x00,0x00,0x10,0x00,0x00,0x00,0x12,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x15,0x00,0x00,0x00, -0x14,0x00,0x00,0x00,0xae,0x00,0x05,0x00,0x16,0x00,0x00,0x00, -0x17,0x00,0x00,0x00,0x0d,0x00,0x00,0x00,0x15,0x00,0x00,0x00, -0xf7,0x00,0x03,0x00,0x19,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0xfa,0x00,0x04,0x00,0x17,0x00,0x00,0x00,0x18,0x00,0x00,0x00, -0x19,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0x18,0x00,0x00,0x00, -0xf9,0x00,0x02,0x00,0xa8,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, -0x19,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x13,0x00,0x00,0x00, -0x20,0x00,0x00,0x00,0x10,0x00,0x00,0x00,0x1f,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x21,0x00,0x00,0x00, -0x20,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x13,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x10,0x00,0x00,0x00,0x22,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x24,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x25,0x00,0x00,0x00,0x21,0x00,0x00,0x00,0x24,0x00,0x00,0x00, -0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x26,0x00,0x00,0x00, -0x0d,0x00,0x00,0x00,0x25,0x00,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x2d,0x00,0x00,0x00,0x26,0x00,0x00,0x00, -0x24,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x30,0x00,0x00,0x00,0x2d,0x00,0x00,0x00,0x21,0x00,0x00,0x00, -0x82,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x31,0x00,0x00,0x00, -0x0d,0x00,0x00,0x00,0x30,0x00,0x00,0x00,0x86,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x34,0x00,0x00,0x00,0x31,0x00,0x00,0x00, -0x21,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x43,0x00,0x00,0x00,0x34,0x00,0x00,0x00,0x21,0x00,0x00,0x00, -0x82,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x44,0x00,0x00,0x00, -0x31,0x00,0x00,0x00,0x43,0x00,0x00,0x00,0x41,0x00,0x05,0x00, -0x13,0x00,0x00,0x00,0x48,0x00,0x00,0x00,0x10,0x00,0x00,0x00, -0x47,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x49,0x00,0x00,0x00,0x48,0x00,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x4a,0x00,0x00,0x00,0x44,0x00,0x00,0x00, -0x49,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x13,0x00,0x00,0x00, -0x4d,0x00,0x00,0x00,0x10,0x00,0x00,0x00,0x4c,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x4e,0x00,0x00,0x00, -0x4d,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x4f,0x00,0x00,0x00,0x34,0x00,0x00,0x00,0x4e,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x50,0x00,0x00,0x00, -0x4a,0x00,0x00,0x00,0x4f,0x00,0x00,0x00,0x41,0x00,0x05,0x00, -0x13,0x00,0x00,0x00,0x53,0x00,0x00,0x00,0x10,0x00,0x00,0x00, -0x52,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x54,0x00,0x00,0x00,0x53,0x00,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x55,0x00,0x00,0x00,0x26,0x00,0x00,0x00, -0x54,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x56,0x00,0x00,0x00,0x50,0x00,0x00,0x00,0x55,0x00,0x00,0x00, -0x41,0x00,0x05,0x00,0x13,0x00,0x00,0x00,0x5b,0x00,0x00,0x00, -0x10,0x00,0x00,0x00,0x5a,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x5c,0x00,0x00,0x00,0x5b,0x00,0x00,0x00, -0x41,0x00,0x05,0x00,0x13,0x00,0x00,0x00,0x5e,0x00,0x00,0x00, -0x10,0x00,0x00,0x00,0x5d,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x5f,0x00,0x00,0x00,0x5e,0x00,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x60,0x00,0x00,0x00, -0x5c,0x00,0x00,0x00,0x5f,0x00,0x00,0x00,0x86,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x61,0x00,0x00,0x00,0x0d,0x00,0x00,0x00, -0x60,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x68,0x00,0x00,0x00,0x61,0x00,0x00,0x00,0x5f,0x00,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x6b,0x00,0x00,0x00, -0x68,0x00,0x00,0x00,0x5c,0x00,0x00,0x00,0x82,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x6c,0x00,0x00,0x00,0x0d,0x00,0x00,0x00, -0x6b,0x00,0x00,0x00,0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x6f,0x00,0x00,0x00,0x6c,0x00,0x00,0x00,0x5c,0x00,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x7e,0x00,0x00,0x00, -0x6f,0x00,0x00,0x00,0x5c,0x00,0x00,0x00,0x82,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x7f,0x00,0x00,0x00,0x6c,0x00,0x00,0x00, -0x7e,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x13,0x00,0x00,0x00, -0x83,0x00,0x00,0x00,0x10,0x00,0x00,0x00,0x82,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x84,0x00,0x00,0x00, -0x83,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x85,0x00,0x00,0x00,0x7f,0x00,0x00,0x00,0x84,0x00,0x00,0x00, -0x41,0x00,0x05,0x00,0x13,0x00,0x00,0x00,0x88,0x00,0x00,0x00, -0x10,0x00,0x00,0x00,0x87,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x89,0x00,0x00,0x00,0x88,0x00,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x8a,0x00,0x00,0x00, -0x6f,0x00,0x00,0x00,0x89,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x8b,0x00,0x00,0x00,0x85,0x00,0x00,0x00, -0x8a,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x13,0x00,0x00,0x00, -0x8e,0x00,0x00,0x00,0x10,0x00,0x00,0x00,0x8d,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x8f,0x00,0x00,0x00, -0x8e,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x90,0x00,0x00,0x00,0x61,0x00,0x00,0x00,0x8f,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x91,0x00,0x00,0x00, -0x8b,0x00,0x00,0x00,0x90,0x00,0x00,0x00,0x41,0x00,0x05,0x00, -0x13,0x00,0x00,0x00,0x98,0x00,0x00,0x00,0x10,0x00,0x00,0x00, -0x97,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x99,0x00,0x00,0x00,0x98,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x9b,0x00,0x00,0x00,0x99,0x00,0x00,0x00, -0x91,0x00,0x00,0x00,0x41,0x00,0x06,0x00,0xa1,0x00,0x00,0x00, -0xa2,0x00,0x00,0x00,0x9f,0x00,0x00,0x00,0x12,0x00,0x00,0x00, -0x56,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x92,0x00,0x00,0x00, -0xa3,0x00,0x00,0x00,0xa2,0x00,0x00,0x00,0x41,0x00,0x06,0x00, -0xa1,0x00,0x00,0x00,0xa4,0x00,0x00,0x00,0x96,0x00,0x00,0x00, -0x12,0x00,0x00,0x00,0x9b,0x00,0x00,0x00,0x3e,0x00,0x03,0x00, -0xa4,0x00,0x00,0x00,0xa3,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, -0xa8,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0xa8,0x00,0x00,0x00, -0xfd,0x00,0x01,0x00,0x38,0x00,0x01,0x00, -}; -const uint64_t cpy_f16_f16_len = 2480; - -unsigned char cpy_f32_f16_data[] = { -0x03,0x02,0x23,0x07,0x00,0x05,0x01,0x00,0x0b,0x00,0x0d,0x00, -0xad,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x11,0x00,0x02,0x00, -0x01,0x00,0x00,0x00,0x11,0x00,0x02,0x00,0x51,0x11,0x00,0x00, -0x0b,0x00,0x06,0x00,0x01,0x00,0x00,0x00,0x47,0x4c,0x53,0x4c, -0x2e,0x73,0x74,0x64,0x2e,0x34,0x35,0x30,0x00,0x00,0x00,0x00, -0x0e,0x00,0x03,0x00,0x00,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x0f,0x00,0x09,0x00,0x05,0x00,0x00,0x00,0x04,0x00,0x00,0x00, -0x6d,0x61,0x69,0x6e,0x00,0x00,0x00,0x00,0x09,0x00,0x00,0x00, -0x10,0x00,0x00,0x00,0x96,0x00,0x00,0x00,0xa0,0x00,0x00,0x00, -0x10,0x00,0x06,0x00,0x04,0x00,0x00,0x00,0x11,0x00,0x00,0x00, -0x00,0x02,0x00,0x00,0x01,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x09,0x00,0x00,0x00,0x0b,0x00,0x00,0x00, -0x1c,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x0e,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x48,0x00,0x05,0x00,0x0e,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0x0e,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x08,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x0e,0x00,0x00,0x00, -0x03,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0x48,0x00,0x05,0x00,0x0e,0x00,0x00,0x00,0x04,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x10,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0x0e,0x00,0x00,0x00,0x05,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x14,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x0e,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x18,0x00,0x00,0x00, -0x48,0x00,0x05,0x00,0x0e,0x00,0x00,0x00,0x07,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x1c,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0x0e,0x00,0x00,0x00,0x08,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x20,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x0e,0x00,0x00,0x00, -0x09,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x24,0x00,0x00,0x00, -0x48,0x00,0x05,0x00,0x0e,0x00,0x00,0x00,0x0a,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x28,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0x0e,0x00,0x00,0x00,0x0b,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x2c,0x00,0x00,0x00,0x47,0x00,0x03,0x00,0x0e,0x00,0x00,0x00, -0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x93,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x48,0x00,0x04,0x00, -0x94,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x19,0x00,0x00,0x00, -0x48,0x00,0x05,0x00,0x94,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x03,0x00, -0x94,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x96,0x00,0x00,0x00,0x22,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x96,0x00,0x00,0x00,0x21,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x9d,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x48,0x00,0x04,0x00, -0x9e,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x18,0x00,0x00,0x00, -0x48,0x00,0x05,0x00,0x9e,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x03,0x00, -0x9e,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0xa0,0x00,0x00,0x00,0x22,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0xa0,0x00,0x00,0x00,0x21,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0xaa,0x00,0x00,0x00, -0x0b,0x00,0x00,0x00,0x19,0x00,0x00,0x00,0x13,0x00,0x02,0x00, -0x02,0x00,0x00,0x00,0x21,0x00,0x03,0x00,0x03,0x00,0x00,0x00, -0x02,0x00,0x00,0x00,0x15,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x20,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x17,0x00,0x04,0x00, -0x07,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x03,0x00,0x00,0x00, -0x20,0x00,0x04,0x00,0x08,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x07,0x00,0x00,0x00,0x3b,0x00,0x04,0x00,0x08,0x00,0x00,0x00, -0x09,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x0a,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x20,0x00,0x04,0x00,0x0b,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x1e,0x00,0x0e,0x00,0x0e,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x20,0x00,0x04,0x00,0x0f,0x00,0x00,0x00,0x09,0x00,0x00,0x00, -0x0e,0x00,0x00,0x00,0x3b,0x00,0x04,0x00,0x0f,0x00,0x00,0x00, -0x10,0x00,0x00,0x00,0x09,0x00,0x00,0x00,0x15,0x00,0x04,0x00, -0x11,0x00,0x00,0x00,0x20,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x11,0x00,0x00,0x00,0x12,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x13,0x00,0x00,0x00, -0x09,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x14,0x00,0x02,0x00, -0x16,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x11,0x00,0x00,0x00, -0x1f,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x11,0x00,0x00,0x00,0x22,0x00,0x00,0x00,0x02,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x11,0x00,0x00,0x00,0x47,0x00,0x00,0x00, -0x03,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x11,0x00,0x00,0x00, -0x4c,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x11,0x00,0x00,0x00,0x52,0x00,0x00,0x00,0x05,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x11,0x00,0x00,0x00,0x5a,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x11,0x00,0x00,0x00, -0x5d,0x00,0x00,0x00,0x07,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x11,0x00,0x00,0x00,0x82,0x00,0x00,0x00,0x08,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x11,0x00,0x00,0x00,0x87,0x00,0x00,0x00, -0x09,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x11,0x00,0x00,0x00, -0x8d,0x00,0x00,0x00,0x0a,0x00,0x00,0x00,0x16,0x00,0x03,0x00, -0x92,0x00,0x00,0x00,0x10,0x00,0x00,0x00,0x1d,0x00,0x03,0x00, -0x93,0x00,0x00,0x00,0x92,0x00,0x00,0x00,0x1e,0x00,0x03,0x00, -0x94,0x00,0x00,0x00,0x93,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0x95,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x94,0x00,0x00,0x00, -0x3b,0x00,0x04,0x00,0x95,0x00,0x00,0x00,0x96,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x11,0x00,0x00,0x00, -0x97,0x00,0x00,0x00,0x0b,0x00,0x00,0x00,0x16,0x00,0x03,0x00, -0x9c,0x00,0x00,0x00,0x20,0x00,0x00,0x00,0x1d,0x00,0x03,0x00, -0x9d,0x00,0x00,0x00,0x9c,0x00,0x00,0x00,0x1e,0x00,0x03,0x00, -0x9e,0x00,0x00,0x00,0x9d,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0x9f,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x9e,0x00,0x00,0x00, -0x3b,0x00,0x04,0x00,0x9f,0x00,0x00,0x00,0xa0,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0xa2,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x9c,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0xa6,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x92,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0xa8,0x00,0x00,0x00, -0x00,0x02,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0xa9,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x2c,0x00,0x06,0x00, -0x07,0x00,0x00,0x00,0xaa,0x00,0x00,0x00,0xa8,0x00,0x00,0x00, -0xa9,0x00,0x00,0x00,0xa9,0x00,0x00,0x00,0x36,0x00,0x05,0x00, -0x02,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x03,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0x05,0x00,0x00,0x00, -0xf7,0x00,0x03,0x00,0xab,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0xfb,0x00,0x03,0x00,0x0a,0x00,0x00,0x00,0xac,0x00,0x00,0x00, -0xf8,0x00,0x02,0x00,0xac,0x00,0x00,0x00,0x41,0x00,0x05,0x00, -0x0b,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x09,0x00,0x00,0x00, -0x0a,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x0d,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x41,0x00,0x05,0x00, -0x13,0x00,0x00,0x00,0x14,0x00,0x00,0x00,0x10,0x00,0x00,0x00, -0x12,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x15,0x00,0x00,0x00,0x14,0x00,0x00,0x00,0xae,0x00,0x05,0x00, -0x16,0x00,0x00,0x00,0x17,0x00,0x00,0x00,0x0d,0x00,0x00,0x00, -0x15,0x00,0x00,0x00,0xf7,0x00,0x03,0x00,0x19,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0x17,0x00,0x00,0x00, -0x18,0x00,0x00,0x00,0x19,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, -0x18,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0xab,0x00,0x00,0x00, -0xf8,0x00,0x02,0x00,0x19,0x00,0x00,0x00,0x41,0x00,0x05,0x00, -0x13,0x00,0x00,0x00,0x20,0x00,0x00,0x00,0x10,0x00,0x00,0x00, -0x1f,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x21,0x00,0x00,0x00,0x20,0x00,0x00,0x00,0x41,0x00,0x05,0x00, -0x13,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x10,0x00,0x00,0x00, -0x22,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x24,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x25,0x00,0x00,0x00,0x21,0x00,0x00,0x00, -0x24,0x00,0x00,0x00,0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x26,0x00,0x00,0x00,0x0d,0x00,0x00,0x00,0x25,0x00,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x2d,0x00,0x00,0x00, -0x26,0x00,0x00,0x00,0x24,0x00,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x30,0x00,0x00,0x00,0x2d,0x00,0x00,0x00, -0x21,0x00,0x00,0x00,0x82,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x31,0x00,0x00,0x00,0x0d,0x00,0x00,0x00,0x30,0x00,0x00,0x00, -0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x34,0x00,0x00,0x00, -0x31,0x00,0x00,0x00,0x21,0x00,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x43,0x00,0x00,0x00,0x34,0x00,0x00,0x00, -0x21,0x00,0x00,0x00,0x82,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x44,0x00,0x00,0x00,0x31,0x00,0x00,0x00,0x43,0x00,0x00,0x00, -0x41,0x00,0x05,0x00,0x13,0x00,0x00,0x00,0x48,0x00,0x00,0x00, -0x10,0x00,0x00,0x00,0x47,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x49,0x00,0x00,0x00,0x48,0x00,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x4a,0x00,0x00,0x00, -0x44,0x00,0x00,0x00,0x49,0x00,0x00,0x00,0x41,0x00,0x05,0x00, -0x13,0x00,0x00,0x00,0x4d,0x00,0x00,0x00,0x10,0x00,0x00,0x00, -0x4c,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x4e,0x00,0x00,0x00,0x4d,0x00,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x4f,0x00,0x00,0x00,0x34,0x00,0x00,0x00, -0x4e,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x50,0x00,0x00,0x00,0x4a,0x00,0x00,0x00,0x4f,0x00,0x00,0x00, -0x41,0x00,0x05,0x00,0x13,0x00,0x00,0x00,0x53,0x00,0x00,0x00, -0x10,0x00,0x00,0x00,0x52,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x54,0x00,0x00,0x00,0x53,0x00,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x55,0x00,0x00,0x00, -0x26,0x00,0x00,0x00,0x54,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x56,0x00,0x00,0x00,0x50,0x00,0x00,0x00, -0x55,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x13,0x00,0x00,0x00, -0x5b,0x00,0x00,0x00,0x10,0x00,0x00,0x00,0x5a,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x5c,0x00,0x00,0x00, -0x5b,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x13,0x00,0x00,0x00, -0x5e,0x00,0x00,0x00,0x10,0x00,0x00,0x00,0x5d,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x5f,0x00,0x00,0x00, -0x5e,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x60,0x00,0x00,0x00,0x5c,0x00,0x00,0x00,0x5f,0x00,0x00,0x00, -0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x61,0x00,0x00,0x00, -0x0d,0x00,0x00,0x00,0x60,0x00,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x68,0x00,0x00,0x00,0x61,0x00,0x00,0x00, -0x5f,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x6b,0x00,0x00,0x00,0x68,0x00,0x00,0x00,0x5c,0x00,0x00,0x00, -0x82,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x6c,0x00,0x00,0x00, -0x0d,0x00,0x00,0x00,0x6b,0x00,0x00,0x00,0x86,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x6f,0x00,0x00,0x00,0x6c,0x00,0x00,0x00, -0x5c,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x7e,0x00,0x00,0x00,0x6f,0x00,0x00,0x00,0x5c,0x00,0x00,0x00, -0x82,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x7f,0x00,0x00,0x00, -0x6c,0x00,0x00,0x00,0x7e,0x00,0x00,0x00,0x41,0x00,0x05,0x00, -0x13,0x00,0x00,0x00,0x83,0x00,0x00,0x00,0x10,0x00,0x00,0x00, -0x82,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x84,0x00,0x00,0x00,0x83,0x00,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x85,0x00,0x00,0x00,0x7f,0x00,0x00,0x00, -0x84,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x13,0x00,0x00,0x00, -0x88,0x00,0x00,0x00,0x10,0x00,0x00,0x00,0x87,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x89,0x00,0x00,0x00, -0x88,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x8a,0x00,0x00,0x00,0x6f,0x00,0x00,0x00,0x89,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x8b,0x00,0x00,0x00, -0x85,0x00,0x00,0x00,0x8a,0x00,0x00,0x00,0x41,0x00,0x05,0x00, -0x13,0x00,0x00,0x00,0x8e,0x00,0x00,0x00,0x10,0x00,0x00,0x00, -0x8d,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x8f,0x00,0x00,0x00,0x8e,0x00,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x90,0x00,0x00,0x00,0x61,0x00,0x00,0x00, -0x8f,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x91,0x00,0x00,0x00,0x8b,0x00,0x00,0x00,0x90,0x00,0x00,0x00, -0x41,0x00,0x05,0x00,0x13,0x00,0x00,0x00,0x98,0x00,0x00,0x00, -0x10,0x00,0x00,0x00,0x97,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x99,0x00,0x00,0x00,0x98,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x9b,0x00,0x00,0x00, -0x99,0x00,0x00,0x00,0x91,0x00,0x00,0x00,0x41,0x00,0x06,0x00, -0xa2,0x00,0x00,0x00,0xa3,0x00,0x00,0x00,0xa0,0x00,0x00,0x00, -0x12,0x00,0x00,0x00,0x56,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x9c,0x00,0x00,0x00,0xa4,0x00,0x00,0x00,0xa3,0x00,0x00,0x00, -0x73,0x00,0x04,0x00,0x92,0x00,0x00,0x00,0xa5,0x00,0x00,0x00, -0xa4,0x00,0x00,0x00,0x41,0x00,0x06,0x00,0xa6,0x00,0x00,0x00, -0xa7,0x00,0x00,0x00,0x96,0x00,0x00,0x00,0x12,0x00,0x00,0x00, -0x9b,0x00,0x00,0x00,0x3e,0x00,0x03,0x00,0xa7,0x00,0x00,0x00, -0xa5,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0xab,0x00,0x00,0x00, -0xf8,0x00,0x02,0x00,0xab,0x00,0x00,0x00,0xfd,0x00,0x01,0x00, -0x38,0x00,0x01,0x00, -}; -const uint64_t cpy_f32_f16_len = 2524; - -unsigned char cpy_f32_f32_data[] = { -0x03,0x02,0x23,0x07,0x00,0x05,0x01,0x00,0x0b,0x00,0x0d,0x00, -0xaa,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x11,0x00,0x02,0x00, -0x01,0x00,0x00,0x00,0x0b,0x00,0x06,0x00,0x01,0x00,0x00,0x00, -0x47,0x4c,0x53,0x4c,0x2e,0x73,0x74,0x64,0x2e,0x34,0x35,0x30, -0x00,0x00,0x00,0x00,0x0e,0x00,0x03,0x00,0x00,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x0f,0x00,0x09,0x00,0x05,0x00,0x00,0x00, -0x04,0x00,0x00,0x00,0x6d,0x61,0x69,0x6e,0x00,0x00,0x00,0x00, -0x09,0x00,0x00,0x00,0x10,0x00,0x00,0x00,0x96,0x00,0x00,0x00, -0x9f,0x00,0x00,0x00,0x10,0x00,0x06,0x00,0x04,0x00,0x00,0x00, -0x11,0x00,0x00,0x00,0x00,0x02,0x00,0x00,0x01,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x09,0x00,0x00,0x00, -0x0b,0x00,0x00,0x00,0x1c,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0x0e,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x0e,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x04,0x00,0x00,0x00, -0x48,0x00,0x05,0x00,0x0e,0x00,0x00,0x00,0x02,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x08,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0x0e,0x00,0x00,0x00,0x03,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x0e,0x00,0x00,0x00, -0x04,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x10,0x00,0x00,0x00, -0x48,0x00,0x05,0x00,0x0e,0x00,0x00,0x00,0x05,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x14,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0x0e,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x18,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x0e,0x00,0x00,0x00, -0x07,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x1c,0x00,0x00,0x00, -0x48,0x00,0x05,0x00,0x0e,0x00,0x00,0x00,0x08,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x20,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0x0e,0x00,0x00,0x00,0x09,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x24,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x0e,0x00,0x00,0x00, -0x0a,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x28,0x00,0x00,0x00, -0x48,0x00,0x05,0x00,0x0e,0x00,0x00,0x00,0x0b,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x2c,0x00,0x00,0x00,0x47,0x00,0x03,0x00, -0x0e,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x93,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x04,0x00,0x00,0x00, -0x48,0x00,0x04,0x00,0x94,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x19,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x94,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x47,0x00,0x03,0x00,0x94,0x00,0x00,0x00,0x02,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x96,0x00,0x00,0x00,0x22,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x96,0x00,0x00,0x00, -0x21,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x9c,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x04,0x00,0x00,0x00, -0x48,0x00,0x04,0x00,0x9d,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x18,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x9d,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x47,0x00,0x03,0x00,0x9d,0x00,0x00,0x00,0x02,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x9f,0x00,0x00,0x00,0x22,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x9f,0x00,0x00,0x00, -0x21,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0xa7,0x00,0x00,0x00,0x0b,0x00,0x00,0x00,0x19,0x00,0x00,0x00, -0x13,0x00,0x02,0x00,0x02,0x00,0x00,0x00,0x21,0x00,0x03,0x00, -0x03,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x15,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x20,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x17,0x00,0x04,0x00,0x07,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x03,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x08,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x07,0x00,0x00,0x00,0x3b,0x00,0x04,0x00, -0x08,0x00,0x00,0x00,0x09,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x0a,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x0b,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x1e,0x00,0x0e,0x00, -0x0e,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x0f,0x00,0x00,0x00, -0x09,0x00,0x00,0x00,0x0e,0x00,0x00,0x00,0x3b,0x00,0x04,0x00, -0x0f,0x00,0x00,0x00,0x10,0x00,0x00,0x00,0x09,0x00,0x00,0x00, -0x15,0x00,0x04,0x00,0x11,0x00,0x00,0x00,0x20,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x11,0x00,0x00,0x00, -0x12,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0x13,0x00,0x00,0x00,0x09,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x14,0x00,0x02,0x00,0x16,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x11,0x00,0x00,0x00,0x1f,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x11,0x00,0x00,0x00,0x22,0x00,0x00,0x00, -0x02,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x11,0x00,0x00,0x00, -0x47,0x00,0x00,0x00,0x03,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x11,0x00,0x00,0x00,0x4c,0x00,0x00,0x00,0x04,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x11,0x00,0x00,0x00,0x52,0x00,0x00,0x00, -0x05,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x11,0x00,0x00,0x00, -0x5a,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x11,0x00,0x00,0x00,0x5d,0x00,0x00,0x00,0x07,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x11,0x00,0x00,0x00,0x82,0x00,0x00,0x00, -0x08,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x11,0x00,0x00,0x00, -0x87,0x00,0x00,0x00,0x09,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x11,0x00,0x00,0x00,0x8d,0x00,0x00,0x00,0x0a,0x00,0x00,0x00, -0x16,0x00,0x03,0x00,0x92,0x00,0x00,0x00,0x20,0x00,0x00,0x00, -0x1d,0x00,0x03,0x00,0x93,0x00,0x00,0x00,0x92,0x00,0x00,0x00, -0x1e,0x00,0x03,0x00,0x94,0x00,0x00,0x00,0x93,0x00,0x00,0x00, -0x20,0x00,0x04,0x00,0x95,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0x94,0x00,0x00,0x00,0x3b,0x00,0x04,0x00,0x95,0x00,0x00,0x00, -0x96,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x11,0x00,0x00,0x00,0x97,0x00,0x00,0x00,0x0b,0x00,0x00,0x00, -0x1d,0x00,0x03,0x00,0x9c,0x00,0x00,0x00,0x92,0x00,0x00,0x00, -0x1e,0x00,0x03,0x00,0x9d,0x00,0x00,0x00,0x9c,0x00,0x00,0x00, -0x20,0x00,0x04,0x00,0x9e,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0x9d,0x00,0x00,0x00,0x3b,0x00,0x04,0x00,0x9e,0x00,0x00,0x00, -0x9f,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0xa1,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x92,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0xa5,0x00,0x00,0x00, -0x00,0x02,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0xa6,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x2c,0x00,0x06,0x00, -0x07,0x00,0x00,0x00,0xa7,0x00,0x00,0x00,0xa5,0x00,0x00,0x00, -0xa6,0x00,0x00,0x00,0xa6,0x00,0x00,0x00,0x36,0x00,0x05,0x00, -0x02,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x03,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0x05,0x00,0x00,0x00, -0xf7,0x00,0x03,0x00,0xa8,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0xfb,0x00,0x03,0x00,0x0a,0x00,0x00,0x00,0xa9,0x00,0x00,0x00, -0xf8,0x00,0x02,0x00,0xa9,0x00,0x00,0x00,0x41,0x00,0x05,0x00, -0x0b,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x09,0x00,0x00,0x00, -0x0a,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x0d,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x41,0x00,0x05,0x00, -0x13,0x00,0x00,0x00,0x14,0x00,0x00,0x00,0x10,0x00,0x00,0x00, -0x12,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x15,0x00,0x00,0x00,0x14,0x00,0x00,0x00,0xae,0x00,0x05,0x00, -0x16,0x00,0x00,0x00,0x17,0x00,0x00,0x00,0x0d,0x00,0x00,0x00, -0x15,0x00,0x00,0x00,0xf7,0x00,0x03,0x00,0x19,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0x17,0x00,0x00,0x00, -0x18,0x00,0x00,0x00,0x19,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, -0x18,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0xa8,0x00,0x00,0x00, -0xf8,0x00,0x02,0x00,0x19,0x00,0x00,0x00,0x41,0x00,0x05,0x00, -0x13,0x00,0x00,0x00,0x20,0x00,0x00,0x00,0x10,0x00,0x00,0x00, -0x1f,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x21,0x00,0x00,0x00,0x20,0x00,0x00,0x00,0x41,0x00,0x05,0x00, -0x13,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x10,0x00,0x00,0x00, -0x22,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x24,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x25,0x00,0x00,0x00,0x21,0x00,0x00,0x00, -0x24,0x00,0x00,0x00,0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x26,0x00,0x00,0x00,0x0d,0x00,0x00,0x00,0x25,0x00,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x2d,0x00,0x00,0x00, -0x26,0x00,0x00,0x00,0x24,0x00,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x30,0x00,0x00,0x00,0x2d,0x00,0x00,0x00, -0x21,0x00,0x00,0x00,0x82,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x31,0x00,0x00,0x00,0x0d,0x00,0x00,0x00,0x30,0x00,0x00,0x00, -0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x34,0x00,0x00,0x00, -0x31,0x00,0x00,0x00,0x21,0x00,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x43,0x00,0x00,0x00,0x34,0x00,0x00,0x00, -0x21,0x00,0x00,0x00,0x82,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x44,0x00,0x00,0x00,0x31,0x00,0x00,0x00,0x43,0x00,0x00,0x00, -0x41,0x00,0x05,0x00,0x13,0x00,0x00,0x00,0x48,0x00,0x00,0x00, -0x10,0x00,0x00,0x00,0x47,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x49,0x00,0x00,0x00,0x48,0x00,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x4a,0x00,0x00,0x00, -0x44,0x00,0x00,0x00,0x49,0x00,0x00,0x00,0x41,0x00,0x05,0x00, -0x13,0x00,0x00,0x00,0x4d,0x00,0x00,0x00,0x10,0x00,0x00,0x00, -0x4c,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x4e,0x00,0x00,0x00,0x4d,0x00,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x4f,0x00,0x00,0x00,0x34,0x00,0x00,0x00, -0x4e,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x50,0x00,0x00,0x00,0x4a,0x00,0x00,0x00,0x4f,0x00,0x00,0x00, -0x41,0x00,0x05,0x00,0x13,0x00,0x00,0x00,0x53,0x00,0x00,0x00, -0x10,0x00,0x00,0x00,0x52,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x54,0x00,0x00,0x00,0x53,0x00,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x55,0x00,0x00,0x00, -0x26,0x00,0x00,0x00,0x54,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x56,0x00,0x00,0x00,0x50,0x00,0x00,0x00, -0x55,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x13,0x00,0x00,0x00, -0x5b,0x00,0x00,0x00,0x10,0x00,0x00,0x00,0x5a,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x5c,0x00,0x00,0x00, -0x5b,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x13,0x00,0x00,0x00, -0x5e,0x00,0x00,0x00,0x10,0x00,0x00,0x00,0x5d,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x5f,0x00,0x00,0x00, -0x5e,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x60,0x00,0x00,0x00,0x5c,0x00,0x00,0x00,0x5f,0x00,0x00,0x00, -0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x61,0x00,0x00,0x00, -0x0d,0x00,0x00,0x00,0x60,0x00,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x68,0x00,0x00,0x00,0x61,0x00,0x00,0x00, -0x5f,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x6b,0x00,0x00,0x00,0x68,0x00,0x00,0x00,0x5c,0x00,0x00,0x00, -0x82,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x6c,0x00,0x00,0x00, -0x0d,0x00,0x00,0x00,0x6b,0x00,0x00,0x00,0x86,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x6f,0x00,0x00,0x00,0x6c,0x00,0x00,0x00, -0x5c,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x7e,0x00,0x00,0x00,0x6f,0x00,0x00,0x00,0x5c,0x00,0x00,0x00, -0x82,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x7f,0x00,0x00,0x00, -0x6c,0x00,0x00,0x00,0x7e,0x00,0x00,0x00,0x41,0x00,0x05,0x00, -0x13,0x00,0x00,0x00,0x83,0x00,0x00,0x00,0x10,0x00,0x00,0x00, -0x82,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x84,0x00,0x00,0x00,0x83,0x00,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x85,0x00,0x00,0x00,0x7f,0x00,0x00,0x00, -0x84,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x13,0x00,0x00,0x00, -0x88,0x00,0x00,0x00,0x10,0x00,0x00,0x00,0x87,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x89,0x00,0x00,0x00, -0x88,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x8a,0x00,0x00,0x00,0x6f,0x00,0x00,0x00,0x89,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x8b,0x00,0x00,0x00, -0x85,0x00,0x00,0x00,0x8a,0x00,0x00,0x00,0x41,0x00,0x05,0x00, -0x13,0x00,0x00,0x00,0x8e,0x00,0x00,0x00,0x10,0x00,0x00,0x00, -0x8d,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x8f,0x00,0x00,0x00,0x8e,0x00,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x90,0x00,0x00,0x00,0x61,0x00,0x00,0x00, -0x8f,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x91,0x00,0x00,0x00,0x8b,0x00,0x00,0x00,0x90,0x00,0x00,0x00, -0x41,0x00,0x05,0x00,0x13,0x00,0x00,0x00,0x98,0x00,0x00,0x00, -0x10,0x00,0x00,0x00,0x97,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x99,0x00,0x00,0x00,0x98,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x9b,0x00,0x00,0x00, -0x99,0x00,0x00,0x00,0x91,0x00,0x00,0x00,0x41,0x00,0x06,0x00, -0xa1,0x00,0x00,0x00,0xa2,0x00,0x00,0x00,0x9f,0x00,0x00,0x00, -0x12,0x00,0x00,0x00,0x56,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x92,0x00,0x00,0x00,0xa3,0x00,0x00,0x00,0xa2,0x00,0x00,0x00, -0x41,0x00,0x06,0x00,0xa1,0x00,0x00,0x00,0xa4,0x00,0x00,0x00, -0x96,0x00,0x00,0x00,0x12,0x00,0x00,0x00,0x9b,0x00,0x00,0x00, -0x3e,0x00,0x03,0x00,0xa4,0x00,0x00,0x00,0xa3,0x00,0x00,0x00, -0xf9,0x00,0x02,0x00,0xa8,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, -0xa8,0x00,0x00,0x00,0xfd,0x00,0x01,0x00,0x38,0x00,0x01,0x00, +0x95,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xfb,0x00,0x03,0x00, +0x0c,0x00,0x00,0x00,0x96,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, +0x96,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x0d,0x00,0x00,0x00, +0x0e,0x00,0x00,0x00,0x0b,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, +0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x0f,0x00,0x00,0x00, +0x0e,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x11,0x00,0x00,0x00,0x0f,0x00,0x00,0x00,0x10,0x00,0x00,0x00, +0x41,0x00,0x05,0x00,0x0d,0x00,0x00,0x00,0x13,0x00,0x00,0x00, +0x12,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x14,0x00,0x00,0x00,0x13,0x00,0x00,0x00, +0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x16,0x00,0x00,0x00, +0x14,0x00,0x00,0x00,0x15,0x00,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x17,0x00,0x00,0x00,0x11,0x00,0x00,0x00, +0x16,0x00,0x00,0x00,0x89,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x1b,0x00,0x00,0x00,0x14,0x00,0x00,0x00,0x15,0x00,0x00,0x00, +0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x1f,0x00,0x00,0x00, +0x1b,0x00,0x00,0x00,0x1e,0x00,0x00,0x00,0x89,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x22,0x00,0x00,0x00,0x1b,0x00,0x00,0x00, +0x1e,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x25,0x00,0x00,0x00,0x1e,0x00,0x00,0x00,0x17,0x00,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x27,0x00,0x00,0x00, +0x25,0x00,0x00,0x00,0x22,0x00,0x00,0x00,0x41,0x00,0x05,0x00, +0x2e,0x00,0x00,0x00,0x2f,0x00,0x00,0x00,0x2b,0x00,0x00,0x00, +0x2d,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x30,0x00,0x00,0x00,0x2f,0x00,0x00,0x00,0x86,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x31,0x00,0x00,0x00,0x30,0x00,0x00,0x00, +0x1e,0x00,0x00,0x00,0xae,0x00,0x05,0x00,0x32,0x00,0x00,0x00, +0x33,0x00,0x00,0x00,0x27,0x00,0x00,0x00,0x31,0x00,0x00,0x00, +0xf7,0x00,0x03,0x00,0x35,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0xfa,0x00,0x04,0x00,0x33,0x00,0x00,0x00,0x34,0x00,0x00,0x00, +0x35,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0x34,0x00,0x00,0x00, +0xf9,0x00,0x02,0x00,0x95,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, +0x35,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x3a,0x00,0x00,0x00,0x38,0x00,0x00,0x00,0x17,0x00,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x3c,0x00,0x00,0x00, +0x1e,0x00,0x00,0x00,0x22,0x00,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x3d,0x00,0x00,0x00,0x3a,0x00,0x00,0x00, +0x3c,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x40,0x00,0x00,0x00,0x3e,0x00,0x00,0x00,0x1f,0x00,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x41,0x00,0x00,0x00, +0x3d,0x00,0x00,0x00,0x40,0x00,0x00,0x00,0x41,0x00,0x07,0x00, +0x50,0x00,0x00,0x00,0x51,0x00,0x00,0x00,0x4d,0x00,0x00,0x00, +0x4e,0x00,0x00,0x00,0x27,0x00,0x00,0x00,0x4e,0x00,0x00,0x00, +0x3d,0x00,0x04,0x00,0x45,0x00,0x00,0x00,0x52,0x00,0x00,0x00, +0x51,0x00,0x00,0x00,0x73,0x00,0x04,0x00,0x42,0x00,0x00,0x00, +0x53,0x00,0x00,0x00,0x52,0x00,0x00,0x00,0x85,0x00,0x05,0x00, +0x42,0x00,0x00,0x00,0x57,0x00,0x00,0x00,0x55,0x00,0x00,0x00, +0x53,0x00,0x00,0x00,0x41,0x00,0x08,0x00,0x71,0x00,0x00,0x00, +0x72,0x00,0x00,0x00,0x4d,0x00,0x00,0x00,0x4e,0x00,0x00,0x00, +0x27,0x00,0x00,0x00,0x6d,0x00,0x00,0x00,0x40,0x00,0x00,0x00, +0x3d,0x00,0x04,0x00,0x46,0x00,0x00,0x00,0x73,0x00,0x00,0x00, +0x72,0x00,0x00,0x00,0x71,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x74,0x00,0x00,0x00,0x73,0x00,0x00,0x00,0x7c,0x00,0x04,0x00, +0x2c,0x00,0x00,0x00,0x75,0x00,0x00,0x00,0x74,0x00,0x00,0x00, +0xc7,0x00,0x05,0x00,0x2c,0x00,0x00,0x00,0x77,0x00,0x00,0x00, +0x75,0x00,0x00,0x00,0x76,0x00,0x00,0x00,0x6f,0x00,0x04,0x00, +0x42,0x00,0x00,0x00,0x78,0x00,0x00,0x00,0x77,0x00,0x00,0x00, +0x0c,0x00,0x08,0x00,0x42,0x00,0x00,0x00,0x7b,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0x32,0x00,0x00,0x00,0x53,0x00,0x00,0x00, +0x78,0x00,0x00,0x00,0x57,0x00,0x00,0x00,0x73,0x00,0x04,0x00, +0x45,0x00,0x00,0x00,0x7c,0x00,0x00,0x00,0x7b,0x00,0x00,0x00, +0x41,0x00,0x06,0x00,0x50,0x00,0x00,0x00,0x7d,0x00,0x00,0x00, +0x66,0x00,0x00,0x00,0x4e,0x00,0x00,0x00,0x41,0x00,0x00,0x00, +0x3e,0x00,0x03,0x00,0x7d,0x00,0x00,0x00,0x7c,0x00,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x81,0x00,0x00,0x00, +0x41,0x00,0x00,0x00,0x47,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0x46,0x00,0x00,0x00,0x88,0x00,0x00,0x00,0x72,0x00,0x00,0x00, +0xc2,0x00,0x05,0x00,0x46,0x00,0x00,0x00,0x89,0x00,0x00,0x00, +0x88,0x00,0x00,0x00,0x2d,0x00,0x00,0x00,0x70,0x00,0x04,0x00, +0x42,0x00,0x00,0x00,0x8a,0x00,0x00,0x00,0x89,0x00,0x00,0x00, +0x0c,0x00,0x08,0x00,0x42,0x00,0x00,0x00,0x8d,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0x32,0x00,0x00,0x00,0x53,0x00,0x00,0x00, +0x8a,0x00,0x00,0x00,0x57,0x00,0x00,0x00,0x73,0x00,0x04,0x00, +0x45,0x00,0x00,0x00,0x8e,0x00,0x00,0x00,0x8d,0x00,0x00,0x00, +0x41,0x00,0x06,0x00,0x50,0x00,0x00,0x00,0x8f,0x00,0x00,0x00, +0x66,0x00,0x00,0x00,0x4e,0x00,0x00,0x00,0x81,0x00,0x00,0x00, +0x3e,0x00,0x03,0x00,0x8f,0x00,0x00,0x00,0x8e,0x00,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xa2,0x00,0x00,0x00, +0x41,0x00,0x00,0x00,0x93,0x00,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xa4,0x00,0x00,0x00,0x40,0x00,0x00,0x00, +0x93,0x00,0x00,0x00,0x41,0x00,0x08,0x00,0x71,0x00,0x00,0x00, +0xa5,0x00,0x00,0x00,0x4d,0x00,0x00,0x00,0x4e,0x00,0x00,0x00, +0x27,0x00,0x00,0x00,0x6d,0x00,0x00,0x00,0xa4,0x00,0x00,0x00, +0x3d,0x00,0x04,0x00,0x46,0x00,0x00,0x00,0xa6,0x00,0x00,0x00, +0xa5,0x00,0x00,0x00,0x71,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0xa7,0x00,0x00,0x00,0xa6,0x00,0x00,0x00,0x7c,0x00,0x04,0x00, +0x2c,0x00,0x00,0x00,0xa8,0x00,0x00,0x00,0xa7,0x00,0x00,0x00, +0xc7,0x00,0x05,0x00,0x2c,0x00,0x00,0x00,0xa9,0x00,0x00,0x00, +0xa8,0x00,0x00,0x00,0x76,0x00,0x00,0x00,0x6f,0x00,0x04,0x00, +0x42,0x00,0x00,0x00,0xaa,0x00,0x00,0x00,0xa9,0x00,0x00,0x00, +0x0c,0x00,0x08,0x00,0x42,0x00,0x00,0x00,0xac,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0x32,0x00,0x00,0x00,0x53,0x00,0x00,0x00, +0xaa,0x00,0x00,0x00,0x57,0x00,0x00,0x00,0x73,0x00,0x04,0x00, +0x45,0x00,0x00,0x00,0xad,0x00,0x00,0x00,0xac,0x00,0x00,0x00, +0x41,0x00,0x06,0x00,0x50,0x00,0x00,0x00,0xae,0x00,0x00,0x00, +0x66,0x00,0x00,0x00,0x4e,0x00,0x00,0x00,0xa2,0x00,0x00,0x00, +0x3e,0x00,0x03,0x00,0xae,0x00,0x00,0x00,0xad,0x00,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xb0,0x00,0x00,0x00, +0x41,0x00,0x00,0x00,0x77,0x01,0x00,0x00,0x3d,0x00,0x04,0x00, +0x46,0x00,0x00,0x00,0xb3,0x00,0x00,0x00,0xa5,0x00,0x00,0x00, +0xc2,0x00,0x05,0x00,0x46,0x00,0x00,0x00,0xb4,0x00,0x00,0x00, +0xb3,0x00,0x00,0x00,0x2d,0x00,0x00,0x00,0x70,0x00,0x04,0x00, +0x42,0x00,0x00,0x00,0xb5,0x00,0x00,0x00,0xb4,0x00,0x00,0x00, +0x0c,0x00,0x08,0x00,0x42,0x00,0x00,0x00,0xb7,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0x32,0x00,0x00,0x00,0x53,0x00,0x00,0x00, +0xb5,0x00,0x00,0x00,0x57,0x00,0x00,0x00,0x73,0x00,0x04,0x00, +0x45,0x00,0x00,0x00,0xb8,0x00,0x00,0x00,0xb7,0x00,0x00,0x00, +0x41,0x00,0x06,0x00,0x50,0x00,0x00,0x00,0xb9,0x00,0x00,0x00, +0x66,0x00,0x00,0x00,0x4e,0x00,0x00,0x00,0xb0,0x00,0x00,0x00, +0x3e,0x00,0x03,0x00,0xb9,0x00,0x00,0x00,0xb8,0x00,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xc1,0x00,0x00,0x00, +0x41,0x00,0x00,0x00,0x78,0x01,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xc3,0x00,0x00,0x00,0x40,0x00,0x00,0x00, +0x78,0x01,0x00,0x00,0x41,0x00,0x08,0x00,0x71,0x00,0x00,0x00, +0xc4,0x00,0x00,0x00,0x4d,0x00,0x00,0x00,0x4e,0x00,0x00,0x00, +0x27,0x00,0x00,0x00,0x6d,0x00,0x00,0x00,0xc3,0x00,0x00,0x00, +0x3d,0x00,0x04,0x00,0x46,0x00,0x00,0x00,0xc5,0x00,0x00,0x00, +0xc4,0x00,0x00,0x00,0x71,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0xc6,0x00,0x00,0x00,0xc5,0x00,0x00,0x00,0x7c,0x00,0x04,0x00, +0x2c,0x00,0x00,0x00,0xc7,0x00,0x00,0x00,0xc6,0x00,0x00,0x00, +0xc7,0x00,0x05,0x00,0x2c,0x00,0x00,0x00,0xc8,0x00,0x00,0x00, +0xc7,0x00,0x00,0x00,0x76,0x00,0x00,0x00,0x6f,0x00,0x04,0x00, +0x42,0x00,0x00,0x00,0xc9,0x00,0x00,0x00,0xc8,0x00,0x00,0x00, +0x0c,0x00,0x08,0x00,0x42,0x00,0x00,0x00,0xcb,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0x32,0x00,0x00,0x00,0x53,0x00,0x00,0x00, +0xc9,0x00,0x00,0x00,0x57,0x00,0x00,0x00,0x73,0x00,0x04,0x00, +0x45,0x00,0x00,0x00,0xcc,0x00,0x00,0x00,0xcb,0x00,0x00,0x00, +0x41,0x00,0x06,0x00,0x50,0x00,0x00,0x00,0xcd,0x00,0x00,0x00, +0x66,0x00,0x00,0x00,0x4e,0x00,0x00,0x00,0xc1,0x00,0x00,0x00, +0x3e,0x00,0x03,0x00,0xcd,0x00,0x00,0x00,0xcc,0x00,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xcf,0x00,0x00,0x00, +0x41,0x00,0x00,0x00,0x79,0x01,0x00,0x00,0x3d,0x00,0x04,0x00, +0x46,0x00,0x00,0x00,0xd2,0x00,0x00,0x00,0xc4,0x00,0x00,0x00, +0xc2,0x00,0x05,0x00,0x46,0x00,0x00,0x00,0xd3,0x00,0x00,0x00, +0xd2,0x00,0x00,0x00,0x2d,0x00,0x00,0x00,0x70,0x00,0x04,0x00, +0x42,0x00,0x00,0x00,0xd4,0x00,0x00,0x00,0xd3,0x00,0x00,0x00, +0x0c,0x00,0x08,0x00,0x42,0x00,0x00,0x00,0xd6,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0x32,0x00,0x00,0x00,0x53,0x00,0x00,0x00, +0xd4,0x00,0x00,0x00,0x57,0x00,0x00,0x00,0x73,0x00,0x04,0x00, +0x45,0x00,0x00,0x00,0xd7,0x00,0x00,0x00,0xd6,0x00,0x00,0x00, +0x41,0x00,0x06,0x00,0x50,0x00,0x00,0x00,0xd8,0x00,0x00,0x00, +0x66,0x00,0x00,0x00,0x4e,0x00,0x00,0x00,0xcf,0x00,0x00,0x00, +0x3e,0x00,0x03,0x00,0xd8,0x00,0x00,0x00,0xd7,0x00,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xe0,0x00,0x00,0x00, +0x41,0x00,0x00,0x00,0x7a,0x01,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xe2,0x00,0x00,0x00,0x40,0x00,0x00,0x00, +0x7a,0x01,0x00,0x00,0x41,0x00,0x08,0x00,0x71,0x00,0x00,0x00, +0xe3,0x00,0x00,0x00,0x4d,0x00,0x00,0x00,0x4e,0x00,0x00,0x00, +0x27,0x00,0x00,0x00,0x6d,0x00,0x00,0x00,0xe2,0x00,0x00,0x00, +0x3d,0x00,0x04,0x00,0x46,0x00,0x00,0x00,0xe4,0x00,0x00,0x00, +0xe3,0x00,0x00,0x00,0x71,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0xe5,0x00,0x00,0x00,0xe4,0x00,0x00,0x00,0x7c,0x00,0x04,0x00, +0x2c,0x00,0x00,0x00,0xe6,0x00,0x00,0x00,0xe5,0x00,0x00,0x00, +0xc7,0x00,0x05,0x00,0x2c,0x00,0x00,0x00,0xe7,0x00,0x00,0x00, +0xe6,0x00,0x00,0x00,0x76,0x00,0x00,0x00,0x6f,0x00,0x04,0x00, +0x42,0x00,0x00,0x00,0xe8,0x00,0x00,0x00,0xe7,0x00,0x00,0x00, +0x0c,0x00,0x08,0x00,0x42,0x00,0x00,0x00,0xea,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0x32,0x00,0x00,0x00,0x53,0x00,0x00,0x00, +0xe8,0x00,0x00,0x00,0x57,0x00,0x00,0x00,0x73,0x00,0x04,0x00, +0x45,0x00,0x00,0x00,0xeb,0x00,0x00,0x00,0xea,0x00,0x00,0x00, +0x41,0x00,0x06,0x00,0x50,0x00,0x00,0x00,0xec,0x00,0x00,0x00, +0x66,0x00,0x00,0x00,0x4e,0x00,0x00,0x00,0xe0,0x00,0x00,0x00, +0x3e,0x00,0x03,0x00,0xec,0x00,0x00,0x00,0xeb,0x00,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xee,0x00,0x00,0x00, +0x41,0x00,0x00,0x00,0x7b,0x01,0x00,0x00,0x3d,0x00,0x04,0x00, +0x46,0x00,0x00,0x00,0xf1,0x00,0x00,0x00,0xe3,0x00,0x00,0x00, +0xc2,0x00,0x05,0x00,0x46,0x00,0x00,0x00,0xf2,0x00,0x00,0x00, +0xf1,0x00,0x00,0x00,0x2d,0x00,0x00,0x00,0x70,0x00,0x04,0x00, +0x42,0x00,0x00,0x00,0xf3,0x00,0x00,0x00,0xf2,0x00,0x00,0x00, +0x0c,0x00,0x08,0x00,0x42,0x00,0x00,0x00,0xf5,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0x32,0x00,0x00,0x00,0x53,0x00,0x00,0x00, +0xf3,0x00,0x00,0x00,0x57,0x00,0x00,0x00,0x73,0x00,0x04,0x00, +0x45,0x00,0x00,0x00,0xf6,0x00,0x00,0x00,0xf5,0x00,0x00,0x00, +0x41,0x00,0x06,0x00,0x50,0x00,0x00,0x00,0xf7,0x00,0x00,0x00, +0x66,0x00,0x00,0x00,0x4e,0x00,0x00,0x00,0xee,0x00,0x00,0x00, +0x3e,0x00,0x03,0x00,0xf7,0x00,0x00,0x00,0xf6,0x00,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xff,0x00,0x00,0x00, +0x41,0x00,0x00,0x00,0x10,0x00,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x01,0x01,0x00,0x00,0x40,0x00,0x00,0x00, +0x10,0x00,0x00,0x00,0x41,0x00,0x08,0x00,0x71,0x00,0x00,0x00, +0x02,0x01,0x00,0x00,0x4d,0x00,0x00,0x00,0x4e,0x00,0x00,0x00, +0x27,0x00,0x00,0x00,0x6d,0x00,0x00,0x00,0x01,0x01,0x00,0x00, +0x3d,0x00,0x04,0x00,0x46,0x00,0x00,0x00,0x03,0x01,0x00,0x00, +0x02,0x01,0x00,0x00,0x71,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x04,0x01,0x00,0x00,0x03,0x01,0x00,0x00,0x7c,0x00,0x04,0x00, +0x2c,0x00,0x00,0x00,0x05,0x01,0x00,0x00,0x04,0x01,0x00,0x00, +0xc7,0x00,0x05,0x00,0x2c,0x00,0x00,0x00,0x06,0x01,0x00,0x00, +0x05,0x01,0x00,0x00,0x76,0x00,0x00,0x00,0x6f,0x00,0x04,0x00, +0x42,0x00,0x00,0x00,0x07,0x01,0x00,0x00,0x06,0x01,0x00,0x00, +0x0c,0x00,0x08,0x00,0x42,0x00,0x00,0x00,0x09,0x01,0x00,0x00, +0x01,0x00,0x00,0x00,0x32,0x00,0x00,0x00,0x53,0x00,0x00,0x00, +0x07,0x01,0x00,0x00,0x57,0x00,0x00,0x00,0x73,0x00,0x04,0x00, +0x45,0x00,0x00,0x00,0x0a,0x01,0x00,0x00,0x09,0x01,0x00,0x00, +0x41,0x00,0x06,0x00,0x50,0x00,0x00,0x00,0x0b,0x01,0x00,0x00, +0x66,0x00,0x00,0x00,0x4e,0x00,0x00,0x00,0xff,0x00,0x00,0x00, +0x3e,0x00,0x03,0x00,0x0b,0x01,0x00,0x00,0x0a,0x01,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x0d,0x01,0x00,0x00, +0x41,0x00,0x00,0x00,0x7c,0x01,0x00,0x00,0x3d,0x00,0x04,0x00, +0x46,0x00,0x00,0x00,0x10,0x01,0x00,0x00,0x02,0x01,0x00,0x00, +0xc2,0x00,0x05,0x00,0x46,0x00,0x00,0x00,0x11,0x01,0x00,0x00, +0x10,0x01,0x00,0x00,0x2d,0x00,0x00,0x00,0x70,0x00,0x04,0x00, +0x42,0x00,0x00,0x00,0x12,0x01,0x00,0x00,0x11,0x01,0x00,0x00, +0x0c,0x00,0x08,0x00,0x42,0x00,0x00,0x00,0x14,0x01,0x00,0x00, +0x01,0x00,0x00,0x00,0x32,0x00,0x00,0x00,0x53,0x00,0x00,0x00, +0x12,0x01,0x00,0x00,0x57,0x00,0x00,0x00,0x73,0x00,0x04,0x00, +0x45,0x00,0x00,0x00,0x15,0x01,0x00,0x00,0x14,0x01,0x00,0x00, +0x41,0x00,0x06,0x00,0x50,0x00,0x00,0x00,0x16,0x01,0x00,0x00, +0x66,0x00,0x00,0x00,0x4e,0x00,0x00,0x00,0x0d,0x01,0x00,0x00, +0x3e,0x00,0x03,0x00,0x16,0x01,0x00,0x00,0x15,0x01,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x1e,0x01,0x00,0x00, +0x41,0x00,0x00,0x00,0x7d,0x01,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x20,0x01,0x00,0x00,0x40,0x00,0x00,0x00, +0x7d,0x01,0x00,0x00,0x41,0x00,0x08,0x00,0x71,0x00,0x00,0x00, +0x21,0x01,0x00,0x00,0x4d,0x00,0x00,0x00,0x4e,0x00,0x00,0x00, +0x27,0x00,0x00,0x00,0x6d,0x00,0x00,0x00,0x20,0x01,0x00,0x00, +0x3d,0x00,0x04,0x00,0x46,0x00,0x00,0x00,0x22,0x01,0x00,0x00, +0x21,0x01,0x00,0x00,0x71,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x23,0x01,0x00,0x00,0x22,0x01,0x00,0x00,0x7c,0x00,0x04,0x00, +0x2c,0x00,0x00,0x00,0x24,0x01,0x00,0x00,0x23,0x01,0x00,0x00, +0xc7,0x00,0x05,0x00,0x2c,0x00,0x00,0x00,0x25,0x01,0x00,0x00, +0x24,0x01,0x00,0x00,0x76,0x00,0x00,0x00,0x6f,0x00,0x04,0x00, +0x42,0x00,0x00,0x00,0x26,0x01,0x00,0x00,0x25,0x01,0x00,0x00, +0x0c,0x00,0x08,0x00,0x42,0x00,0x00,0x00,0x28,0x01,0x00,0x00, +0x01,0x00,0x00,0x00,0x32,0x00,0x00,0x00,0x53,0x00,0x00,0x00, +0x26,0x01,0x00,0x00,0x57,0x00,0x00,0x00,0x73,0x00,0x04,0x00, +0x45,0x00,0x00,0x00,0x29,0x01,0x00,0x00,0x28,0x01,0x00,0x00, +0x41,0x00,0x06,0x00,0x50,0x00,0x00,0x00,0x2a,0x01,0x00,0x00, +0x66,0x00,0x00,0x00,0x4e,0x00,0x00,0x00,0x1e,0x01,0x00,0x00, +0x3e,0x00,0x03,0x00,0x2a,0x01,0x00,0x00,0x29,0x01,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x2c,0x01,0x00,0x00, +0x41,0x00,0x00,0x00,0x7e,0x01,0x00,0x00,0x3d,0x00,0x04,0x00, +0x46,0x00,0x00,0x00,0x2f,0x01,0x00,0x00,0x21,0x01,0x00,0x00, +0xc2,0x00,0x05,0x00,0x46,0x00,0x00,0x00,0x30,0x01,0x00,0x00, +0x2f,0x01,0x00,0x00,0x2d,0x00,0x00,0x00,0x70,0x00,0x04,0x00, +0x42,0x00,0x00,0x00,0x31,0x01,0x00,0x00,0x30,0x01,0x00,0x00, +0x0c,0x00,0x08,0x00,0x42,0x00,0x00,0x00,0x33,0x01,0x00,0x00, +0x01,0x00,0x00,0x00,0x32,0x00,0x00,0x00,0x53,0x00,0x00,0x00, +0x31,0x01,0x00,0x00,0x57,0x00,0x00,0x00,0x73,0x00,0x04,0x00, +0x45,0x00,0x00,0x00,0x34,0x01,0x00,0x00,0x33,0x01,0x00,0x00, +0x41,0x00,0x06,0x00,0x50,0x00,0x00,0x00,0x35,0x01,0x00,0x00, +0x66,0x00,0x00,0x00,0x4e,0x00,0x00,0x00,0x2c,0x01,0x00,0x00, +0x3e,0x00,0x03,0x00,0x35,0x01,0x00,0x00,0x34,0x01,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x3d,0x01,0x00,0x00, +0x41,0x00,0x00,0x00,0x7f,0x01,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x3f,0x01,0x00,0x00,0x40,0x00,0x00,0x00, +0x7f,0x01,0x00,0x00,0x41,0x00,0x08,0x00,0x71,0x00,0x00,0x00, +0x40,0x01,0x00,0x00,0x4d,0x00,0x00,0x00,0x4e,0x00,0x00,0x00, +0x27,0x00,0x00,0x00,0x6d,0x00,0x00,0x00,0x3f,0x01,0x00,0x00, +0x3d,0x00,0x04,0x00,0x46,0x00,0x00,0x00,0x41,0x01,0x00,0x00, +0x40,0x01,0x00,0x00,0x71,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x42,0x01,0x00,0x00,0x41,0x01,0x00,0x00,0x7c,0x00,0x04,0x00, +0x2c,0x00,0x00,0x00,0x43,0x01,0x00,0x00,0x42,0x01,0x00,0x00, +0xc7,0x00,0x05,0x00,0x2c,0x00,0x00,0x00,0x44,0x01,0x00,0x00, +0x43,0x01,0x00,0x00,0x76,0x00,0x00,0x00,0x6f,0x00,0x04,0x00, +0x42,0x00,0x00,0x00,0x45,0x01,0x00,0x00,0x44,0x01,0x00,0x00, +0x0c,0x00,0x08,0x00,0x42,0x00,0x00,0x00,0x47,0x01,0x00,0x00, +0x01,0x00,0x00,0x00,0x32,0x00,0x00,0x00,0x53,0x00,0x00,0x00, +0x45,0x01,0x00,0x00,0x57,0x00,0x00,0x00,0x73,0x00,0x04,0x00, +0x45,0x00,0x00,0x00,0x48,0x01,0x00,0x00,0x47,0x01,0x00,0x00, +0x41,0x00,0x06,0x00,0x50,0x00,0x00,0x00,0x49,0x01,0x00,0x00, +0x66,0x00,0x00,0x00,0x4e,0x00,0x00,0x00,0x3d,0x01,0x00,0x00, +0x3e,0x00,0x03,0x00,0x49,0x01,0x00,0x00,0x48,0x01,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x4b,0x01,0x00,0x00, +0x41,0x00,0x00,0x00,0x80,0x01,0x00,0x00,0x3d,0x00,0x04,0x00, +0x46,0x00,0x00,0x00,0x4e,0x01,0x00,0x00,0x40,0x01,0x00,0x00, +0xc2,0x00,0x05,0x00,0x46,0x00,0x00,0x00,0x4f,0x01,0x00,0x00, +0x4e,0x01,0x00,0x00,0x2d,0x00,0x00,0x00,0x70,0x00,0x04,0x00, +0x42,0x00,0x00,0x00,0x50,0x01,0x00,0x00,0x4f,0x01,0x00,0x00, +0x0c,0x00,0x08,0x00,0x42,0x00,0x00,0x00,0x52,0x01,0x00,0x00, +0x01,0x00,0x00,0x00,0x32,0x00,0x00,0x00,0x53,0x00,0x00,0x00, +0x50,0x01,0x00,0x00,0x57,0x00,0x00,0x00,0x73,0x00,0x04,0x00, +0x45,0x00,0x00,0x00,0x53,0x01,0x00,0x00,0x52,0x01,0x00,0x00, +0x41,0x00,0x06,0x00,0x50,0x00,0x00,0x00,0x54,0x01,0x00,0x00, +0x66,0x00,0x00,0x00,0x4e,0x00,0x00,0x00,0x4b,0x01,0x00,0x00, +0x3e,0x00,0x03,0x00,0x54,0x01,0x00,0x00,0x53,0x01,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x5c,0x01,0x00,0x00, +0x41,0x00,0x00,0x00,0x81,0x01,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x5e,0x01,0x00,0x00,0x40,0x00,0x00,0x00, +0x81,0x01,0x00,0x00,0x41,0x00,0x08,0x00,0x71,0x00,0x00,0x00, +0x5f,0x01,0x00,0x00,0x4d,0x00,0x00,0x00,0x4e,0x00,0x00,0x00, +0x27,0x00,0x00,0x00,0x6d,0x00,0x00,0x00,0x5e,0x01,0x00,0x00, +0x3d,0x00,0x04,0x00,0x46,0x00,0x00,0x00,0x60,0x01,0x00,0x00, +0x5f,0x01,0x00,0x00,0x71,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x61,0x01,0x00,0x00,0x60,0x01,0x00,0x00,0x7c,0x00,0x04,0x00, +0x2c,0x00,0x00,0x00,0x62,0x01,0x00,0x00,0x61,0x01,0x00,0x00, +0xc7,0x00,0x05,0x00,0x2c,0x00,0x00,0x00,0x63,0x01,0x00,0x00, +0x62,0x01,0x00,0x00,0x76,0x00,0x00,0x00,0x6f,0x00,0x04,0x00, +0x42,0x00,0x00,0x00,0x64,0x01,0x00,0x00,0x63,0x01,0x00,0x00, +0x0c,0x00,0x08,0x00,0x42,0x00,0x00,0x00,0x66,0x01,0x00,0x00, +0x01,0x00,0x00,0x00,0x32,0x00,0x00,0x00,0x53,0x00,0x00,0x00, +0x64,0x01,0x00,0x00,0x57,0x00,0x00,0x00,0x73,0x00,0x04,0x00, +0x45,0x00,0x00,0x00,0x67,0x01,0x00,0x00,0x66,0x01,0x00,0x00, +0x41,0x00,0x06,0x00,0x50,0x00,0x00,0x00,0x68,0x01,0x00,0x00, +0x66,0x00,0x00,0x00,0x4e,0x00,0x00,0x00,0x5c,0x01,0x00,0x00, +0x3e,0x00,0x03,0x00,0x68,0x01,0x00,0x00,0x67,0x01,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x6a,0x01,0x00,0x00, +0x41,0x00,0x00,0x00,0x82,0x01,0x00,0x00,0x3d,0x00,0x04,0x00, +0x46,0x00,0x00,0x00,0x6d,0x01,0x00,0x00,0x5f,0x01,0x00,0x00, +0xc2,0x00,0x05,0x00,0x46,0x00,0x00,0x00,0x6e,0x01,0x00,0x00, +0x6d,0x01,0x00,0x00,0x2d,0x00,0x00,0x00,0x70,0x00,0x04,0x00, +0x42,0x00,0x00,0x00,0x6f,0x01,0x00,0x00,0x6e,0x01,0x00,0x00, +0x0c,0x00,0x08,0x00,0x42,0x00,0x00,0x00,0x71,0x01,0x00,0x00, +0x01,0x00,0x00,0x00,0x32,0x00,0x00,0x00,0x53,0x00,0x00,0x00, +0x6f,0x01,0x00,0x00,0x57,0x00,0x00,0x00,0x73,0x00,0x04,0x00, +0x45,0x00,0x00,0x00,0x72,0x01,0x00,0x00,0x71,0x01,0x00,0x00, +0x41,0x00,0x06,0x00,0x50,0x00,0x00,0x00,0x73,0x01,0x00,0x00, +0x66,0x00,0x00,0x00,0x4e,0x00,0x00,0x00,0x6a,0x01,0x00,0x00, +0x3e,0x00,0x03,0x00,0x73,0x01,0x00,0x00,0x72,0x01,0x00,0x00, +0xf9,0x00,0x02,0x00,0x95,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, +0x95,0x00,0x00,0x00,0xfd,0x00,0x01,0x00,0x38,0x00,0x01,0x00, }; -const uint64_t cpy_f32_f32_len = 2472; - -unsigned char dequant_f16_data[] = { -0x03,0x02,0x23,0x07,0x00,0x05,0x01,0x00,0x0b,0x00,0x0d,0x00, -0x86,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x11,0x00,0x02,0x00, -0x01,0x00,0x00,0x00,0x11,0x00,0x02,0x00,0x51,0x11,0x00,0x00, -0x0b,0x00,0x06,0x00,0x01,0x00,0x00,0x00,0x47,0x4c,0x53,0x4c, -0x2e,0x73,0x74,0x64,0x2e,0x34,0x35,0x30,0x00,0x00,0x00,0x00, -0x0e,0x00,0x03,0x00,0x00,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x0f,0x00,0x09,0x00,0x05,0x00,0x00,0x00,0x04,0x00,0x00,0x00, -0x6d,0x61,0x69,0x6e,0x00,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0x16,0x00,0x00,0x00,0x50,0x00,0x00,0x00,0x60,0x00,0x00,0x00, -0x10,0x00,0x06,0x00,0x04,0x00,0x00,0x00,0x11,0x00,0x00,0x00, -0x00,0x01,0x00,0x00,0x01,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x0c,0x00,0x00,0x00,0x0b,0x00,0x00,0x00, -0x1c,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x14,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x48,0x00,0x05,0x00,0x14,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0x14,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x08,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x14,0x00,0x00,0x00, -0x03,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0x47,0x00,0x03,0x00,0x14,0x00,0x00,0x00,0x02,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x4d,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x02,0x00,0x00,0x00,0x48,0x00,0x04,0x00,0x4e,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x18,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0x4e,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x47,0x00,0x03,0x00,0x4e,0x00,0x00,0x00, -0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x50,0x00,0x00,0x00, -0x22,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x50,0x00,0x00,0x00,0x21,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x5d,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x02,0x00,0x00,0x00,0x48,0x00,0x04,0x00,0x5e,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x19,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0x5e,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x47,0x00,0x03,0x00,0x5e,0x00,0x00,0x00, -0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x60,0x00,0x00,0x00, -0x22,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x60,0x00,0x00,0x00,0x21,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x83,0x00,0x00,0x00,0x0b,0x00,0x00,0x00, -0x19,0x00,0x00,0x00,0x13,0x00,0x02,0x00,0x02,0x00,0x00,0x00, -0x21,0x00,0x03,0x00,0x03,0x00,0x00,0x00,0x02,0x00,0x00,0x00, -0x15,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x20,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x15,0x00,0x04,0x00,0x09,0x00,0x00,0x00, -0x20,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x17,0x00,0x04,0x00, -0x0a,0x00,0x00,0x00,0x09,0x00,0x00,0x00,0x03,0x00,0x00,0x00, -0x20,0x00,0x04,0x00,0x0b,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x0a,0x00,0x00,0x00,0x3b,0x00,0x04,0x00,0x0b,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x09,0x00,0x00,0x00,0x0d,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x20,0x00,0x04,0x00,0x0e,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x09,0x00,0x00,0x00,0x1e,0x00,0x06,0x00,0x14,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x15,0x00,0x00,0x00, -0x09,0x00,0x00,0x00,0x14,0x00,0x00,0x00,0x3b,0x00,0x04,0x00, -0x15,0x00,0x00,0x00,0x16,0x00,0x00,0x00,0x09,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x17,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x18,0x00,0x00,0x00, -0x09,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x14,0x00,0x02,0x00, -0x23,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x2d,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x36,0x00,0x00,0x00,0x02,0x00,0x00,0x00, -0x16,0x00,0x03,0x00,0x48,0x00,0x00,0x00,0x20,0x00,0x00,0x00, -0x16,0x00,0x03,0x00,0x4c,0x00,0x00,0x00,0x10,0x00,0x00,0x00, -0x1d,0x00,0x03,0x00,0x4d,0x00,0x00,0x00,0x4c,0x00,0x00,0x00, -0x1e,0x00,0x03,0x00,0x4e,0x00,0x00,0x00,0x4d,0x00,0x00,0x00, -0x20,0x00,0x04,0x00,0x4f,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0x4e,0x00,0x00,0x00,0x3b,0x00,0x04,0x00,0x4f,0x00,0x00,0x00, -0x50,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0x53,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x4c,0x00,0x00,0x00, -0x1d,0x00,0x03,0x00,0x5d,0x00,0x00,0x00,0x4c,0x00,0x00,0x00, -0x1e,0x00,0x03,0x00,0x5e,0x00,0x00,0x00,0x5d,0x00,0x00,0x00, -0x20,0x00,0x04,0x00,0x5f,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0x5e,0x00,0x00,0x00,0x3b,0x00,0x04,0x00,0x5f,0x00,0x00,0x00, -0x60,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x62,0x00,0x00,0x00,0x03,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x09,0x00,0x00,0x00,0x7b,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x09,0x00,0x00,0x00, -0x82,0x00,0x00,0x00,0x00,0x01,0x00,0x00,0x2c,0x00,0x06,0x00, -0x0a,0x00,0x00,0x00,0x83,0x00,0x00,0x00,0x82,0x00,0x00,0x00, -0x7b,0x00,0x00,0x00,0x7b,0x00,0x00,0x00,0x36,0x00,0x05,0x00, -0x02,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x03,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0x05,0x00,0x00,0x00, -0xf7,0x00,0x03,0x00,0x84,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0xfb,0x00,0x03,0x00,0x0d,0x00,0x00,0x00,0x85,0x00,0x00,0x00, -0xf8,0x00,0x02,0x00,0x85,0x00,0x00,0x00,0x41,0x00,0x05,0x00, -0x0e,0x00,0x00,0x00,0x0f,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0x0d,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x09,0x00,0x00,0x00, -0x10,0x00,0x00,0x00,0x0f,0x00,0x00,0x00,0x7c,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x11,0x00,0x00,0x00,0x10,0x00,0x00,0x00, -0x41,0x00,0x05,0x00,0x18,0x00,0x00,0x00,0x19,0x00,0x00,0x00, -0x16,0x00,0x00,0x00,0x17,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x1a,0x00,0x00,0x00,0x19,0x00,0x00,0x00, -0x87,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x1b,0x00,0x00,0x00, -0x1a,0x00,0x00,0x00,0x17,0x00,0x00,0x00,0x8b,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x1c,0x00,0x00,0x00,0x11,0x00,0x00,0x00, -0x1b,0x00,0x00,0x00,0x87,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x22,0x00,0x00,0x00,0x11,0x00,0x00,0x00,0x1b,0x00,0x00,0x00, -0xaf,0x00,0x05,0x00,0x23,0x00,0x00,0x00,0x28,0x00,0x00,0x00, -0x1c,0x00,0x00,0x00,0x1a,0x00,0x00,0x00,0xa8,0x00,0x04,0x00, -0x23,0x00,0x00,0x00,0x29,0x00,0x00,0x00,0x28,0x00,0x00,0x00, -0xf7,0x00,0x03,0x00,0x2b,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0xfa,0x00,0x04,0x00,0x29,0x00,0x00,0x00,0x2a,0x00,0x00,0x00, -0x2b,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0x2a,0x00,0x00,0x00, -0x41,0x00,0x05,0x00,0x18,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0x16,0x00,0x00,0x00,0x2d,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x2f,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0xaf,0x00,0x05,0x00,0x23,0x00,0x00,0x00,0x30,0x00,0x00,0x00, -0x22,0x00,0x00,0x00,0x2f,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, -0x2b,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0x2b,0x00,0x00,0x00, -0xf5,0x00,0x07,0x00,0x23,0x00,0x00,0x00,0x31,0x00,0x00,0x00, -0x28,0x00,0x00,0x00,0x85,0x00,0x00,0x00,0x30,0x00,0x00,0x00, -0x2a,0x00,0x00,0x00,0xf7,0x00,0x03,0x00,0x33,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0x31,0x00,0x00,0x00, -0x32,0x00,0x00,0x00,0x33,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, -0x32,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0x84,0x00,0x00,0x00, -0xf8,0x00,0x02,0x00,0x33,0x00,0x00,0x00,0x41,0x00,0x05,0x00, -0x18,0x00,0x00,0x00,0x37,0x00,0x00,0x00,0x16,0x00,0x00,0x00, -0x36,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x38,0x00,0x00,0x00,0x37,0x00,0x00,0x00,0x87,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x38,0x00,0x00,0x00, -0x17,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x3d,0x00,0x00,0x00,0x22,0x00,0x00,0x00,0x39,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x3f,0x00,0x00,0x00, -0x3d,0x00,0x00,0x00,0x1c,0x00,0x00,0x00,0x41,0x00,0x06,0x00, -0x53,0x00,0x00,0x00,0x54,0x00,0x00,0x00,0x50,0x00,0x00,0x00, -0x2d,0x00,0x00,0x00,0x3f,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x4c,0x00,0x00,0x00,0x55,0x00,0x00,0x00,0x54,0x00,0x00,0x00, -0x73,0x00,0x04,0x00,0x48,0x00,0x00,0x00,0x56,0x00,0x00,0x00, -0x55,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x58,0x00,0x00,0x00,0x3f,0x00,0x00,0x00,0x17,0x00,0x00,0x00, -0x41,0x00,0x06,0x00,0x53,0x00,0x00,0x00,0x59,0x00,0x00,0x00, -0x50,0x00,0x00,0x00,0x2d,0x00,0x00,0x00,0x58,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x4c,0x00,0x00,0x00,0x5a,0x00,0x00,0x00, -0x59,0x00,0x00,0x00,0x73,0x00,0x04,0x00,0x48,0x00,0x00,0x00, -0x5b,0x00,0x00,0x00,0x5a,0x00,0x00,0x00,0x41,0x00,0x05,0x00, -0x18,0x00,0x00,0x00,0x63,0x00,0x00,0x00,0x16,0x00,0x00,0x00, -0x62,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x64,0x00,0x00,0x00,0x63,0x00,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x65,0x00,0x00,0x00,0x22,0x00,0x00,0x00, -0x64,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x68,0x00,0x00,0x00,0x65,0x00,0x00,0x00,0x1c,0x00,0x00,0x00, -0x73,0x00,0x04,0x00,0x4c,0x00,0x00,0x00,0x6f,0x00,0x00,0x00, -0x56,0x00,0x00,0x00,0x41,0x00,0x06,0x00,0x53,0x00,0x00,0x00, -0x70,0x00,0x00,0x00,0x60,0x00,0x00,0x00,0x2d,0x00,0x00,0x00, -0x68,0x00,0x00,0x00,0x3e,0x00,0x03,0x00,0x70,0x00,0x00,0x00, -0x6f,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x7a,0x00,0x00,0x00,0x68,0x00,0x00,0x00,0x17,0x00,0x00,0x00, -0x73,0x00,0x04,0x00,0x4c,0x00,0x00,0x00,0x7e,0x00,0x00,0x00, -0x5b,0x00,0x00,0x00,0x41,0x00,0x06,0x00,0x53,0x00,0x00,0x00, -0x7f,0x00,0x00,0x00,0x60,0x00,0x00,0x00,0x2d,0x00,0x00,0x00, -0x7a,0x00,0x00,0x00,0x3e,0x00,0x03,0x00,0x7f,0x00,0x00,0x00, -0x7e,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0x84,0x00,0x00,0x00, -0xf8,0x00,0x02,0x00,0x84,0x00,0x00,0x00,0xfd,0x00,0x01,0x00, -0x38,0x00,0x01,0x00, -}; -const uint64_t dequant_f16_len = 1816; - -unsigned char dequant_q2_K_data[] = { -0x03,0x02,0x23,0x07,0x00,0x05,0x01,0x00,0x0b,0x00,0x0d,0x00, -0x13,0x01,0x00,0x00,0x00,0x00,0x00,0x00,0x11,0x00,0x02,0x00, -0x01,0x00,0x00,0x00,0x11,0x00,0x02,0x00,0x27,0x00,0x00,0x00, -0x11,0x00,0x02,0x00,0x51,0x11,0x00,0x00,0x11,0x00,0x02,0x00, -0x60,0x11,0x00,0x00,0x0b,0x00,0x06,0x00,0x01,0x00,0x00,0x00, -0x47,0x4c,0x53,0x4c,0x2e,0x73,0x74,0x64,0x2e,0x34,0x35,0x30, -0x00,0x00,0x00,0x00,0x0e,0x00,0x03,0x00,0x00,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x0f,0x00,0x0a,0x00,0x05,0x00,0x00,0x00, -0x04,0x00,0x00,0x00,0x6d,0x61,0x69,0x6e,0x00,0x00,0x00,0x00, -0x17,0x00,0x00,0x00,0x25,0x00,0x00,0x00,0x33,0x00,0x00,0x00, -0x63,0x00,0x00,0x00,0x7e,0x00,0x00,0x00,0x10,0x00,0x06,0x00, -0x04,0x00,0x00,0x00,0x11,0x00,0x00,0x00,0x40,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x17,0x00,0x00,0x00,0x0b,0x00,0x00,0x00,0x1a,0x00,0x00,0x00, -0x48,0x00,0x05,0x00,0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0x23,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x04,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x23,0x00,0x00,0x00, -0x02,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x08,0x00,0x00,0x00, -0x48,0x00,0x05,0x00,0x23,0x00,0x00,0x00,0x03,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x47,0x00,0x03,0x00, -0x23,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x33,0x00,0x00,0x00,0x0b,0x00,0x00,0x00,0x1b,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x5a,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x5c,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0x5f,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x5f,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x10,0x00,0x00,0x00, -0x48,0x00,0x05,0x00,0x5f,0x00,0x00,0x00,0x02,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x50,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x60,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x54,0x00,0x00,0x00, -0x48,0x00,0x04,0x00,0x61,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x18,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x61,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x47,0x00,0x03,0x00,0x61,0x00,0x00,0x00,0x02,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x63,0x00,0x00,0x00,0x22,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x63,0x00,0x00,0x00, -0x21,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x7b,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x02,0x00,0x00,0x00, -0x48,0x00,0x04,0x00,0x7c,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x19,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x7c,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x47,0x00,0x03,0x00,0x7c,0x00,0x00,0x00,0x02,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x7e,0x00,0x00,0x00,0x22,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x7e,0x00,0x00,0x00, -0x21,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x03,0x01,0x00,0x00,0x0b,0x00,0x00,0x00,0x19,0x00,0x00,0x00, -0x13,0x00,0x02,0x00,0x02,0x00,0x00,0x00,0x21,0x00,0x03,0x00, -0x03,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x15,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x20,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x09,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x10,0x00,0x00,0x00,0x00,0x01,0x00,0x00,0x14,0x00,0x02,0x00, -0x11,0x00,0x00,0x00,0x15,0x00,0x04,0x00,0x14,0x00,0x00,0x00, -0x20,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x17,0x00,0x04,0x00, -0x15,0x00,0x00,0x00,0x14,0x00,0x00,0x00,0x03,0x00,0x00,0x00, -0x20,0x00,0x04,0x00,0x16,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x15,0x00,0x00,0x00,0x3b,0x00,0x04,0x00,0x16,0x00,0x00,0x00, -0x17,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x14,0x00,0x00,0x00,0x18,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x20,0x00,0x04,0x00,0x19,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x14,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x14,0x00,0x00,0x00, -0x1c,0x00,0x00,0x00,0x00,0x01,0x00,0x00,0x1e,0x00,0x06,0x00, -0x23,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0x24,0x00,0x00,0x00,0x09,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x3b,0x00,0x04,0x00,0x24,0x00,0x00,0x00,0x25,0x00,0x00,0x00, -0x09,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x26,0x00,0x00,0x00, -0x09,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x29,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x3b,0x00,0x04,0x00,0x16,0x00,0x00,0x00,0x33,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x39,0x00,0x00,0x00,0x20,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x41,0x00,0x00,0x00,0x08,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x45,0x00,0x00,0x00, -0x10,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x4b,0x00,0x00,0x00,0x80,0x00,0x00,0x00,0x15,0x00,0x04,0x00, -0x56,0x00,0x00,0x00,0x08,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x14,0x00,0x00,0x00,0x59,0x00,0x00,0x00, -0x10,0x00,0x00,0x00,0x1c,0x00,0x04,0x00,0x5a,0x00,0x00,0x00, -0x56,0x00,0x00,0x00,0x59,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x14,0x00,0x00,0x00,0x5b,0x00,0x00,0x00,0x40,0x00,0x00,0x00, -0x1c,0x00,0x04,0x00,0x5c,0x00,0x00,0x00,0x56,0x00,0x00,0x00, -0x5b,0x00,0x00,0x00,0x16,0x00,0x03,0x00,0x5d,0x00,0x00,0x00, -0x10,0x00,0x00,0x00,0x17,0x00,0x04,0x00,0x5e,0x00,0x00,0x00, -0x5d,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x1e,0x00,0x05,0x00, -0x5f,0x00,0x00,0x00,0x5a,0x00,0x00,0x00,0x5c,0x00,0x00,0x00, -0x5e,0x00,0x00,0x00,0x1d,0x00,0x03,0x00,0x60,0x00,0x00,0x00, -0x5f,0x00,0x00,0x00,0x1e,0x00,0x03,0x00,0x61,0x00,0x00,0x00, -0x60,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x62,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x61,0x00,0x00,0x00,0x3b,0x00,0x04,0x00, -0x62,0x00,0x00,0x00,0x63,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0x20,0x00,0x04,0x00,0x69,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0x56,0x00,0x00,0x00,0x16,0x00,0x03,0x00,0x6c,0x00,0x00,0x00, -0x20,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x70,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0x71,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x5d,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x14,0x00,0x00,0x00,0x77,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x1d,0x00,0x03,0x00,0x7b,0x00,0x00,0x00, -0x5d,0x00,0x00,0x00,0x1e,0x00,0x03,0x00,0x7c,0x00,0x00,0x00, -0x7b,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x7d,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x7c,0x00,0x00,0x00,0x3b,0x00,0x04,0x00, -0x7d,0x00,0x00,0x00,0x7e,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x89,0x00,0x00,0x00, -0x0f,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x8f,0x00,0x00,0x00,0x03,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x9a,0x00,0x00,0x00,0x04,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0xc1,0x00,0x00,0x00, -0x40,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0xe1,0x00,0x00,0x00,0x60,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0xe6,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x2c,0x00,0x06,0x00,0x15,0x00,0x00,0x00,0x03,0x01,0x00,0x00, -0x5b,0x00,0x00,0x00,0x77,0x00,0x00,0x00,0x77,0x00,0x00,0x00, -0x2a,0x00,0x03,0x00,0x11,0x00,0x00,0x00,0x06,0x01,0x00,0x00, -0x29,0x00,0x03,0x00,0x11,0x00,0x00,0x00,0x09,0x01,0x00,0x00, -0x36,0x00,0x05,0x00,0x02,0x00,0x00,0x00,0x04,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x03,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, -0x05,0x00,0x00,0x00,0xf7,0x00,0x03,0x00,0x04,0x01,0x00,0x00, -0x00,0x00,0x00,0x00,0xfb,0x00,0x03,0x00,0x18,0x00,0x00,0x00, -0x05,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0x05,0x01,0x00,0x00, -0xf9,0x00,0x02,0x00,0x0a,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, -0x0a,0x00,0x00,0x00,0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00, -0x0c,0x01,0x00,0x00,0x09,0x00,0x00,0x00,0x05,0x01,0x00,0x00, -0x02,0x01,0x00,0x00,0x0d,0x00,0x00,0x00,0xb1,0x00,0x05,0x00, -0x11,0x00,0x00,0x00,0x12,0x00,0x00,0x00,0x0c,0x01,0x00,0x00, -0x10,0x00,0x00,0x00,0xf6,0x00,0x04,0x00,0x0c,0x00,0x00,0x00, -0x0d,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, -0x12,0x00,0x00,0x00,0x0b,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0xf8,0x00,0x02,0x00,0x0b,0x00,0x00,0x00,0x41,0x00,0x05,0x00, -0x19,0x00,0x00,0x00,0x1a,0x00,0x00,0x00,0x17,0x00,0x00,0x00, -0x18,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x14,0x00,0x00,0x00, -0x1b,0x00,0x00,0x00,0x1a,0x00,0x00,0x00,0x84,0x00,0x05,0x00, -0x14,0x00,0x00,0x00,0x1d,0x00,0x00,0x00,0x1b,0x00,0x00,0x00, -0x1c,0x00,0x00,0x00,0x7c,0x00,0x04,0x00,0x14,0x00,0x00,0x00, -0x1f,0x00,0x00,0x00,0x0c,0x01,0x00,0x00,0x80,0x00,0x05,0x00, -0x14,0x00,0x00,0x00,0x20,0x00,0x00,0x00,0x1d,0x00,0x00,0x00, -0x1f,0x00,0x00,0x00,0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x21,0x00,0x00,0x00,0x20,0x00,0x00,0x00,0x41,0x00,0x05,0x00, -0x26,0x00,0x00,0x00,0x27,0x00,0x00,0x00,0x25,0x00,0x00,0x00, -0x09,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x28,0x00,0x00,0x00,0x27,0x00,0x00,0x00,0x41,0x00,0x05,0x00, -0x26,0x00,0x00,0x00,0x2a,0x00,0x00,0x00,0x25,0x00,0x00,0x00, -0x29,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x2b,0x00,0x00,0x00,0x2a,0x00,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x2c,0x00,0x00,0x00,0x28,0x00,0x00,0x00, -0x2b,0x00,0x00,0x00,0x87,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x2d,0x00,0x00,0x00,0x2c,0x00,0x00,0x00,0x10,0x00,0x00,0x00, -0xaf,0x00,0x05,0x00,0x11,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0x21,0x00,0x00,0x00,0x2d,0x00,0x00,0x00,0xf7,0x00,0x03,0x00, -0x30,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, -0x2e,0x00,0x00,0x00,0x2f,0x00,0x00,0x00,0x30,0x00,0x00,0x00, -0xf8,0x00,0x02,0x00,0x2f,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, -0x0c,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0x30,0x00,0x00,0x00, -0x41,0x00,0x05,0x00,0x19,0x00,0x00,0x00,0x34,0x00,0x00,0x00, -0x33,0x00,0x00,0x00,0x18,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x14,0x00,0x00,0x00,0x35,0x00,0x00,0x00,0x34,0x00,0x00,0x00, -0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x36,0x00,0x00,0x00, -0x35,0x00,0x00,0x00,0x87,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x3a,0x00,0x00,0x00,0x36,0x00,0x00,0x00,0x39,0x00,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x3e,0x00,0x00,0x00, -0x39,0x00,0x00,0x00,0x3a,0x00,0x00,0x00,0x82,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x3f,0x00,0x00,0x00,0x36,0x00,0x00,0x00, -0x3e,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x43,0x00,0x00,0x00,0x41,0x00,0x00,0x00,0x3a,0x00,0x00,0x00, -0x87,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x46,0x00,0x00,0x00, -0x3f,0x00,0x00,0x00,0x45,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x47,0x00,0x00,0x00,0x43,0x00,0x00,0x00, -0x46,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x4a,0x00,0x00,0x00,0x21,0x00,0x00,0x00,0x10,0x00,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x4d,0x00,0x00,0x00, -0x4b,0x00,0x00,0x00,0x3a,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x4e,0x00,0x00,0x00,0x4a,0x00,0x00,0x00, -0x4d,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x50,0x00,0x00,0x00,0x4e,0x00,0x00,0x00,0x3f,0x00,0x00,0x00, -0x41,0x00,0x08,0x00,0x69,0x00,0x00,0x00,0x6a,0x00,0x00,0x00, -0x63,0x00,0x00,0x00,0x09,0x00,0x00,0x00,0x21,0x00,0x00,0x00, -0x29,0x00,0x00,0x00,0x36,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x56,0x00,0x00,0x00,0x6b,0x00,0x00,0x00,0x6a,0x00,0x00,0x00, -0x41,0x00,0x08,0x00,0x71,0x00,0x00,0x00,0x72,0x00,0x00,0x00, -0x63,0x00,0x00,0x00,0x09,0x00,0x00,0x00,0x21,0x00,0x00,0x00, -0x70,0x00,0x00,0x00,0x18,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x5d,0x00,0x00,0x00,0x73,0x00,0x00,0x00,0x72,0x00,0x00,0x00, -0x73,0x00,0x04,0x00,0x6c,0x00,0x00,0x00,0x74,0x00,0x00,0x00, -0x73,0x00,0x00,0x00,0x41,0x00,0x08,0x00,0x71,0x00,0x00,0x00, -0x78,0x00,0x00,0x00,0x63,0x00,0x00,0x00,0x09,0x00,0x00,0x00, -0x21,0x00,0x00,0x00,0x70,0x00,0x00,0x00,0x77,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x5d,0x00,0x00,0x00,0x79,0x00,0x00,0x00, -0x78,0x00,0x00,0x00,0x73,0x00,0x04,0x00,0x6c,0x00,0x00,0x00, -0x7a,0x00,0x00,0x00,0x79,0x00,0x00,0x00,0x41,0x00,0x08,0x00, -0x69,0x00,0x00,0x00,0x85,0x00,0x00,0x00,0x63,0x00,0x00,0x00, -0x09,0x00,0x00,0x00,0x21,0x00,0x00,0x00,0x09,0x00,0x00,0x00, -0x47,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x56,0x00,0x00,0x00, -0x86,0x00,0x00,0x00,0x85,0x00,0x00,0x00,0x71,0x00,0x04,0x00, -0x14,0x00,0x00,0x00,0x87,0x00,0x00,0x00,0x86,0x00,0x00,0x00, -0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x88,0x00,0x00,0x00, -0x87,0x00,0x00,0x00,0xc7,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x8a,0x00,0x00,0x00,0x88,0x00,0x00,0x00,0x89,0x00,0x00,0x00, -0xc2,0x00,0x05,0x00,0x56,0x00,0x00,0x00,0x8c,0x00,0x00,0x00, -0x6b,0x00,0x00,0x00,0x09,0x00,0x00,0x00,0x71,0x00,0x04,0x00, -0x14,0x00,0x00,0x00,0x8d,0x00,0x00,0x00,0x8c,0x00,0x00,0x00, -0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x8e,0x00,0x00,0x00, -0x8d,0x00,0x00,0x00,0xc7,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x90,0x00,0x00,0x00,0x8e,0x00,0x00,0x00,0x8f,0x00,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x91,0x00,0x00,0x00, -0x8a,0x00,0x00,0x00,0x90,0x00,0x00,0x00,0x6f,0x00,0x04,0x00, -0x6c,0x00,0x00,0x00,0x92,0x00,0x00,0x00,0x91,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x56,0x00,0x00,0x00,0x99,0x00,0x00,0x00, -0x85,0x00,0x00,0x00,0xc2,0x00,0x05,0x00,0x56,0x00,0x00,0x00, -0x9b,0x00,0x00,0x00,0x99,0x00,0x00,0x00,0x9a,0x00,0x00,0x00, -0x70,0x00,0x04,0x00,0x6c,0x00,0x00,0x00,0x9c,0x00,0x00,0x00, -0x9b,0x00,0x00,0x00,0x85,0x00,0x05,0x00,0x6c,0x00,0x00,0x00, -0x9d,0x00,0x00,0x00,0x7a,0x00,0x00,0x00,0x9c,0x00,0x00,0x00, -0x7f,0x00,0x04,0x00,0x6c,0x00,0x00,0x00,0x0f,0x01,0x00,0x00, -0x9d,0x00,0x00,0x00,0x0c,0x00,0x08,0x00,0x6c,0x00,0x00,0x00, -0x9e,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x32,0x00,0x00,0x00, -0x74,0x00,0x00,0x00,0x92,0x00,0x00,0x00,0x0f,0x01,0x00,0x00, -0x73,0x00,0x04,0x00,0x5d,0x00,0x00,0x00,0x9f,0x00,0x00,0x00, -0x9e,0x00,0x00,0x00,0x41,0x00,0x06,0x00,0x71,0x00,0x00,0x00, -0xa0,0x00,0x00,0x00,0x7e,0x00,0x00,0x00,0x09,0x00,0x00,0x00, -0x50,0x00,0x00,0x00,0x3e,0x00,0x03,0x00,0xa0,0x00,0x00,0x00, -0x9f,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xa2,0x00,0x00,0x00,0x50,0x00,0x00,0x00,0x39,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xa6,0x00,0x00,0x00, -0x47,0x00,0x00,0x00,0x70,0x00,0x00,0x00,0x41,0x00,0x08,0x00, -0x69,0x00,0x00,0x00,0xa7,0x00,0x00,0x00,0x63,0x00,0x00,0x00, -0x09,0x00,0x00,0x00,0x21,0x00,0x00,0x00,0x09,0x00,0x00,0x00, -0xa6,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x56,0x00,0x00,0x00, -0xa8,0x00,0x00,0x00,0xa7,0x00,0x00,0x00,0x71,0x00,0x04,0x00, -0x14,0x00,0x00,0x00,0xa9,0x00,0x00,0x00,0xa8,0x00,0x00,0x00, -0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0xaa,0x00,0x00,0x00, -0xa9,0x00,0x00,0x00,0xc7,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xab,0x00,0x00,0x00,0xaa,0x00,0x00,0x00,0x89,0x00,0x00,0x00, -0xc2,0x00,0x05,0x00,0x56,0x00,0x00,0x00,0xad,0x00,0x00,0x00, -0x6b,0x00,0x00,0x00,0x70,0x00,0x00,0x00,0x71,0x00,0x04,0x00, -0x14,0x00,0x00,0x00,0xae,0x00,0x00,0x00,0xad,0x00,0x00,0x00, -0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0xaf,0x00,0x00,0x00, -0xae,0x00,0x00,0x00,0xc7,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xb0,0x00,0x00,0x00,0xaf,0x00,0x00,0x00,0x8f,0x00,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xb1,0x00,0x00,0x00, -0xab,0x00,0x00,0x00,0xb0,0x00,0x00,0x00,0x6f,0x00,0x04,0x00, -0x6c,0x00,0x00,0x00,0xb2,0x00,0x00,0x00,0xb1,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x56,0x00,0x00,0x00,0xb9,0x00,0x00,0x00, -0xa7,0x00,0x00,0x00,0xc2,0x00,0x05,0x00,0x56,0x00,0x00,0x00, -0xba,0x00,0x00,0x00,0xb9,0x00,0x00,0x00,0x9a,0x00,0x00,0x00, -0x70,0x00,0x04,0x00,0x6c,0x00,0x00,0x00,0xbb,0x00,0x00,0x00, -0xba,0x00,0x00,0x00,0x85,0x00,0x05,0x00,0x6c,0x00,0x00,0x00, -0xbc,0x00,0x00,0x00,0x7a,0x00,0x00,0x00,0xbb,0x00,0x00,0x00, -0x7f,0x00,0x04,0x00,0x6c,0x00,0x00,0x00,0x10,0x01,0x00,0x00, -0xbc,0x00,0x00,0x00,0x0c,0x00,0x08,0x00,0x6c,0x00,0x00,0x00, -0xbd,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x32,0x00,0x00,0x00, -0x74,0x00,0x00,0x00,0xb2,0x00,0x00,0x00,0x10,0x01,0x00,0x00, -0x73,0x00,0x04,0x00,0x5d,0x00,0x00,0x00,0xbe,0x00,0x00,0x00, -0xbd,0x00,0x00,0x00,0x41,0x00,0x06,0x00,0x71,0x00,0x00,0x00, -0xbf,0x00,0x00,0x00,0x7e,0x00,0x00,0x00,0x09,0x00,0x00,0x00, -0xa2,0x00,0x00,0x00,0x3e,0x00,0x03,0x00,0xbf,0x00,0x00,0x00, -0xbe,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xc2,0x00,0x00,0x00,0x50,0x00,0x00,0x00,0xc1,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xc6,0x00,0x00,0x00, -0x47,0x00,0x00,0x00,0x9a,0x00,0x00,0x00,0x41,0x00,0x08,0x00, -0x69,0x00,0x00,0x00,0xc7,0x00,0x00,0x00,0x63,0x00,0x00,0x00, -0x09,0x00,0x00,0x00,0x21,0x00,0x00,0x00,0x09,0x00,0x00,0x00, -0xc6,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x56,0x00,0x00,0x00, -0xc8,0x00,0x00,0x00,0xc7,0x00,0x00,0x00,0x71,0x00,0x04,0x00, -0x14,0x00,0x00,0x00,0xc9,0x00,0x00,0x00,0xc8,0x00,0x00,0x00, -0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0xca,0x00,0x00,0x00, -0xc9,0x00,0x00,0x00,0xc7,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xcb,0x00,0x00,0x00,0xca,0x00,0x00,0x00,0x89,0x00,0x00,0x00, -0xc2,0x00,0x05,0x00,0x56,0x00,0x00,0x00,0xcd,0x00,0x00,0x00, -0x6b,0x00,0x00,0x00,0x9a,0x00,0x00,0x00,0x71,0x00,0x04,0x00, -0x14,0x00,0x00,0x00,0xce,0x00,0x00,0x00,0xcd,0x00,0x00,0x00, -0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0xcf,0x00,0x00,0x00, -0xce,0x00,0x00,0x00,0xc7,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xd0,0x00,0x00,0x00,0xcf,0x00,0x00,0x00,0x8f,0x00,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xd1,0x00,0x00,0x00, -0xcb,0x00,0x00,0x00,0xd0,0x00,0x00,0x00,0x6f,0x00,0x04,0x00, -0x6c,0x00,0x00,0x00,0xd2,0x00,0x00,0x00,0xd1,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x56,0x00,0x00,0x00,0xd9,0x00,0x00,0x00, -0xc7,0x00,0x00,0x00,0xc2,0x00,0x05,0x00,0x56,0x00,0x00,0x00, -0xda,0x00,0x00,0x00,0xd9,0x00,0x00,0x00,0x9a,0x00,0x00,0x00, -0x70,0x00,0x04,0x00,0x6c,0x00,0x00,0x00,0xdb,0x00,0x00,0x00, -0xda,0x00,0x00,0x00,0x85,0x00,0x05,0x00,0x6c,0x00,0x00,0x00, -0xdc,0x00,0x00,0x00,0x7a,0x00,0x00,0x00,0xdb,0x00,0x00,0x00, -0x7f,0x00,0x04,0x00,0x6c,0x00,0x00,0x00,0x11,0x01,0x00,0x00, -0xdc,0x00,0x00,0x00,0x0c,0x00,0x08,0x00,0x6c,0x00,0x00,0x00, -0xdd,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x32,0x00,0x00,0x00, -0x74,0x00,0x00,0x00,0xd2,0x00,0x00,0x00,0x11,0x01,0x00,0x00, -0x73,0x00,0x04,0x00,0x5d,0x00,0x00,0x00,0xde,0x00,0x00,0x00, -0xdd,0x00,0x00,0x00,0x41,0x00,0x06,0x00,0x71,0x00,0x00,0x00, -0xdf,0x00,0x00,0x00,0x7e,0x00,0x00,0x00,0x09,0x00,0x00,0x00, -0xc2,0x00,0x00,0x00,0x3e,0x00,0x03,0x00,0xdf,0x00,0x00,0x00, -0xde,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xe2,0x00,0x00,0x00,0x50,0x00,0x00,0x00,0xe1,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xe7,0x00,0x00,0x00, -0x47,0x00,0x00,0x00,0xe6,0x00,0x00,0x00,0x41,0x00,0x08,0x00, -0x69,0x00,0x00,0x00,0xe8,0x00,0x00,0x00,0x63,0x00,0x00,0x00, -0x09,0x00,0x00,0x00,0x21,0x00,0x00,0x00,0x09,0x00,0x00,0x00, -0xe7,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x56,0x00,0x00,0x00, -0xe9,0x00,0x00,0x00,0xe8,0x00,0x00,0x00,0x71,0x00,0x04,0x00, -0x14,0x00,0x00,0x00,0xea,0x00,0x00,0x00,0xe9,0x00,0x00,0x00, -0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0xeb,0x00,0x00,0x00, -0xea,0x00,0x00,0x00,0xc7,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xec,0x00,0x00,0x00,0xeb,0x00,0x00,0x00,0x89,0x00,0x00,0x00, -0xc2,0x00,0x05,0x00,0x56,0x00,0x00,0x00,0xee,0x00,0x00,0x00, -0x6b,0x00,0x00,0x00,0xe6,0x00,0x00,0x00,0x71,0x00,0x04,0x00, -0x14,0x00,0x00,0x00,0xef,0x00,0x00,0x00,0xee,0x00,0x00,0x00, -0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0xf0,0x00,0x00,0x00, -0xef,0x00,0x00,0x00,0xc7,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xf1,0x00,0x00,0x00,0xf0,0x00,0x00,0x00,0x8f,0x00,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xf2,0x00,0x00,0x00, -0xec,0x00,0x00,0x00,0xf1,0x00,0x00,0x00,0x6f,0x00,0x04,0x00, -0x6c,0x00,0x00,0x00,0xf3,0x00,0x00,0x00,0xf2,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x56,0x00,0x00,0x00,0xfa,0x00,0x00,0x00, -0xe8,0x00,0x00,0x00,0xc2,0x00,0x05,0x00,0x56,0x00,0x00,0x00, -0xfb,0x00,0x00,0x00,0xfa,0x00,0x00,0x00,0x9a,0x00,0x00,0x00, -0x70,0x00,0x04,0x00,0x6c,0x00,0x00,0x00,0xfc,0x00,0x00,0x00, -0xfb,0x00,0x00,0x00,0x85,0x00,0x05,0x00,0x6c,0x00,0x00,0x00, -0xfd,0x00,0x00,0x00,0x7a,0x00,0x00,0x00,0xfc,0x00,0x00,0x00, -0x7f,0x00,0x04,0x00,0x6c,0x00,0x00,0x00,0x12,0x01,0x00,0x00, -0xfd,0x00,0x00,0x00,0x0c,0x00,0x08,0x00,0x6c,0x00,0x00,0x00, -0xfe,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x32,0x00,0x00,0x00, -0x74,0x00,0x00,0x00,0xf3,0x00,0x00,0x00,0x12,0x01,0x00,0x00, -0x73,0x00,0x04,0x00,0x5d,0x00,0x00,0x00,0xff,0x00,0x00,0x00, -0xfe,0x00,0x00,0x00,0x41,0x00,0x06,0x00,0x71,0x00,0x00,0x00, -0x00,0x01,0x00,0x00,0x7e,0x00,0x00,0x00,0x09,0x00,0x00,0x00, -0xe2,0x00,0x00,0x00,0x3e,0x00,0x03,0x00,0x00,0x01,0x00,0x00, -0xff,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0x0d,0x00,0x00,0x00, -0xf8,0x00,0x02,0x00,0x0d,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x02,0x01,0x00,0x00,0x0c,0x01,0x00,0x00, -0x29,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0x0a,0x00,0x00,0x00, -0xf8,0x00,0x02,0x00,0x0c,0x00,0x00,0x00,0xf5,0x00,0x07,0x00, -0x11,0x00,0x00,0x00,0x0d,0x01,0x00,0x00,0x06,0x01,0x00,0x00, -0x0a,0x00,0x00,0x00,0x09,0x01,0x00,0x00,0x2f,0x00,0x00,0x00, -0xf7,0x00,0x03,0x00,0x0a,0x01,0x00,0x00,0x00,0x00,0x00,0x00, -0xfa,0x00,0x04,0x00,0x0d,0x01,0x00,0x00,0x04,0x01,0x00,0x00, -0x0a,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0x0a,0x01,0x00,0x00, -0xf9,0x00,0x02,0x00,0x04,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, -0x04,0x01,0x00,0x00,0xfd,0x00,0x01,0x00,0x38,0x00,0x01,0x00, - -}; -const uint64_t dequant_q2_K_len = 4056; - -unsigned char dequant_q3_K_data[] = { -0x03,0x02,0x23,0x07,0x00,0x05,0x01,0x00,0x0b,0x00,0x0d,0x00, -0x42,0x01,0x00,0x00,0x00,0x00,0x00,0x00,0x11,0x00,0x02,0x00, -0x01,0x00,0x00,0x00,0x11,0x00,0x02,0x00,0x27,0x00,0x00,0x00, -0x11,0x00,0x02,0x00,0x51,0x11,0x00,0x00,0x11,0x00,0x02,0x00, -0x60,0x11,0x00,0x00,0x0b,0x00,0x06,0x00,0x01,0x00,0x00,0x00, -0x47,0x4c,0x53,0x4c,0x2e,0x73,0x74,0x64,0x2e,0x34,0x35,0x30, -0x00,0x00,0x00,0x00,0x0e,0x00,0x03,0x00,0x00,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x0f,0x00,0x0a,0x00,0x05,0x00,0x00,0x00, -0x04,0x00,0x00,0x00,0x6d,0x61,0x69,0x6e,0x00,0x00,0x00,0x00, -0x17,0x00,0x00,0x00,0x25,0x00,0x00,0x00,0x33,0x00,0x00,0x00, -0x7b,0x00,0x00,0x00,0x09,0x01,0x00,0x00,0x10,0x00,0x06,0x00, -0x04,0x00,0x00,0x00,0x11,0x00,0x00,0x00,0x40,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x17,0x00,0x00,0x00,0x0b,0x00,0x00,0x00,0x1a,0x00,0x00,0x00, -0x48,0x00,0x05,0x00,0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0x23,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x04,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x23,0x00,0x00,0x00, -0x02,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x08,0x00,0x00,0x00, -0x48,0x00,0x05,0x00,0x23,0x00,0x00,0x00,0x03,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x47,0x00,0x03,0x00, -0x23,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x33,0x00,0x00,0x00,0x0b,0x00,0x00,0x00,0x1b,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x71,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x73,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x75,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x48,0x00,0x05,0x00,0x77,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0x77,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x20,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x77,0x00,0x00,0x00, -0x02,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x60,0x00,0x00,0x00, -0x48,0x00,0x05,0x00,0x77,0x00,0x00,0x00,0x03,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x6c,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x78,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x6e,0x00,0x00,0x00, -0x48,0x00,0x04,0x00,0x79,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x18,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x79,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x47,0x00,0x03,0x00,0x79,0x00,0x00,0x00,0x02,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x7b,0x00,0x00,0x00,0x22,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x7b,0x00,0x00,0x00, -0x21,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x06,0x01,0x00,0x00,0x06,0x00,0x00,0x00,0x02,0x00,0x00,0x00, -0x48,0x00,0x04,0x00,0x07,0x01,0x00,0x00,0x00,0x00,0x00,0x00, -0x19,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x07,0x01,0x00,0x00, -0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x47,0x00,0x03,0x00,0x07,0x01,0x00,0x00,0x02,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x09,0x01,0x00,0x00,0x22,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x09,0x01,0x00,0x00, -0x21,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x2f,0x01,0x00,0x00,0x0b,0x00,0x00,0x00,0x19,0x00,0x00,0x00, -0x13,0x00,0x02,0x00,0x02,0x00,0x00,0x00,0x21,0x00,0x03,0x00, -0x03,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x15,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x20,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x09,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x10,0x00,0x00,0x00,0x00,0x01,0x00,0x00,0x14,0x00,0x02,0x00, -0x11,0x00,0x00,0x00,0x15,0x00,0x04,0x00,0x14,0x00,0x00,0x00, -0x20,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x17,0x00,0x04,0x00, -0x15,0x00,0x00,0x00,0x14,0x00,0x00,0x00,0x03,0x00,0x00,0x00, -0x20,0x00,0x04,0x00,0x16,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x15,0x00,0x00,0x00,0x3b,0x00,0x04,0x00,0x16,0x00,0x00,0x00, -0x17,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x14,0x00,0x00,0x00,0x18,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x20,0x00,0x04,0x00,0x19,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x14,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x14,0x00,0x00,0x00, -0x1c,0x00,0x00,0x00,0x00,0x01,0x00,0x00,0x1e,0x00,0x06,0x00, -0x23,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0x24,0x00,0x00,0x00,0x09,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x3b,0x00,0x04,0x00,0x24,0x00,0x00,0x00,0x25,0x00,0x00,0x00, -0x09,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x26,0x00,0x00,0x00, -0x09,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x29,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x3b,0x00,0x04,0x00,0x16,0x00,0x00,0x00,0x33,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x37,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x3b,0x00,0x00,0x00,0x02,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x41,0x00,0x00,0x00, -0x10,0x00,0x00,0x00,0x15,0x00,0x04,0x00,0x52,0x00,0x00,0x00, -0x08,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x15,0x00,0x04,0x00, -0x5a,0x00,0x00,0x00,0x08,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x5e,0x00,0x00,0x00, -0x08,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x14,0x00,0x00,0x00, -0x70,0x00,0x00,0x00,0x20,0x00,0x00,0x00,0x1c,0x00,0x04,0x00, -0x71,0x00,0x00,0x00,0x52,0x00,0x00,0x00,0x70,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x14,0x00,0x00,0x00,0x72,0x00,0x00,0x00, -0x40,0x00,0x00,0x00,0x1c,0x00,0x04,0x00,0x73,0x00,0x00,0x00, -0x52,0x00,0x00,0x00,0x72,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x14,0x00,0x00,0x00,0x74,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0x1c,0x00,0x04,0x00,0x75,0x00,0x00,0x00,0x52,0x00,0x00,0x00, -0x74,0x00,0x00,0x00,0x16,0x00,0x03,0x00,0x76,0x00,0x00,0x00, -0x10,0x00,0x00,0x00,0x1e,0x00,0x06,0x00,0x77,0x00,0x00,0x00, -0x71,0x00,0x00,0x00,0x73,0x00,0x00,0x00,0x75,0x00,0x00,0x00, -0x76,0x00,0x00,0x00,0x1d,0x00,0x03,0x00,0x78,0x00,0x00,0x00, -0x77,0x00,0x00,0x00,0x1e,0x00,0x03,0x00,0x79,0x00,0x00,0x00, -0x78,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x7a,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x79,0x00,0x00,0x00,0x3b,0x00,0x04,0x00, -0x7a,0x00,0x00,0x00,0x7b,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0x20,0x00,0x04,0x00,0x7f,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0x52,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x84,0x00,0x00,0x00,0x0f,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x8e,0x00,0x00,0x00,0x03,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0xad,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0xd3,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x16,0x00,0x03,0x00, -0xde,0x00,0x00,0x00,0x20,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0xe2,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x76,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0xea,0x00,0x00,0x00, -0x20,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0xf1,0x00,0x00,0x00,0x80,0x00,0x00,0x00,0x1d,0x00,0x03,0x00, -0x06,0x01,0x00,0x00,0x76,0x00,0x00,0x00,0x1e,0x00,0x03,0x00, -0x07,0x01,0x00,0x00,0x06,0x01,0x00,0x00,0x20,0x00,0x04,0x00, -0x08,0x01,0x00,0x00,0x0c,0x00,0x00,0x00,0x07,0x01,0x00,0x00, -0x3b,0x00,0x04,0x00,0x08,0x01,0x00,0x00,0x09,0x01,0x00,0x00, -0x0c,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x14,0x00,0x00,0x00, -0x2e,0x01,0x00,0x00,0x01,0x00,0x00,0x00,0x2c,0x00,0x06,0x00, -0x15,0x00,0x00,0x00,0x2f,0x01,0x00,0x00,0x72,0x00,0x00,0x00, -0x2e,0x01,0x00,0x00,0x2e,0x01,0x00,0x00,0x2a,0x00,0x03,0x00, -0x11,0x00,0x00,0x00,0x32,0x01,0x00,0x00,0x29,0x00,0x03,0x00, -0x11,0x00,0x00,0x00,0x35,0x01,0x00,0x00,0x36,0x00,0x05,0x00, -0x02,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x03,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0x05,0x00,0x00,0x00, -0xf7,0x00,0x03,0x00,0x30,0x01,0x00,0x00,0x00,0x00,0x00,0x00, -0xfb,0x00,0x03,0x00,0x18,0x00,0x00,0x00,0x31,0x01,0x00,0x00, -0xf8,0x00,0x02,0x00,0x31,0x01,0x00,0x00,0xf9,0x00,0x02,0x00, -0x0a,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0x0a,0x00,0x00,0x00, -0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0x38,0x01,0x00,0x00, -0x09,0x00,0x00,0x00,0x31,0x01,0x00,0x00,0x2d,0x01,0x00,0x00, -0x0d,0x00,0x00,0x00,0xb1,0x00,0x05,0x00,0x11,0x00,0x00,0x00, -0x12,0x00,0x00,0x00,0x38,0x01,0x00,0x00,0x10,0x00,0x00,0x00, -0xf6,0x00,0x04,0x00,0x0c,0x00,0x00,0x00,0x0d,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0x12,0x00,0x00,0x00, -0x0b,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, -0x0b,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x19,0x00,0x00,0x00, -0x1a,0x00,0x00,0x00,0x17,0x00,0x00,0x00,0x18,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x14,0x00,0x00,0x00,0x1b,0x00,0x00,0x00, -0x1a,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x14,0x00,0x00,0x00, -0x1d,0x00,0x00,0x00,0x1b,0x00,0x00,0x00,0x1c,0x00,0x00,0x00, -0x7c,0x00,0x04,0x00,0x14,0x00,0x00,0x00,0x1f,0x00,0x00,0x00, -0x38,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x14,0x00,0x00,0x00, -0x20,0x00,0x00,0x00,0x1d,0x00,0x00,0x00,0x1f,0x00,0x00,0x00, -0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x21,0x00,0x00,0x00, -0x20,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x26,0x00,0x00,0x00, -0x27,0x00,0x00,0x00,0x25,0x00,0x00,0x00,0x09,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x28,0x00,0x00,0x00, -0x27,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x26,0x00,0x00,0x00, -0x2a,0x00,0x00,0x00,0x25,0x00,0x00,0x00,0x29,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x2b,0x00,0x00,0x00, -0x2a,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x2c,0x00,0x00,0x00,0x28,0x00,0x00,0x00,0x2b,0x00,0x00,0x00, -0x87,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x2d,0x00,0x00,0x00, -0x2c,0x00,0x00,0x00,0x10,0x00,0x00,0x00,0xaf,0x00,0x05,0x00, -0x11,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0x21,0x00,0x00,0x00, -0x2d,0x00,0x00,0x00,0xf7,0x00,0x03,0x00,0x30,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0x2e,0x00,0x00,0x00, -0x2f,0x00,0x00,0x00,0x30,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, -0x2f,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0x0c,0x00,0x00,0x00, -0xf8,0x00,0x02,0x00,0x30,0x00,0x00,0x00,0x41,0x00,0x05,0x00, -0x19,0x00,0x00,0x00,0x34,0x00,0x00,0x00,0x33,0x00,0x00,0x00, -0x18,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x14,0x00,0x00,0x00, -0x35,0x00,0x00,0x00,0x34,0x00,0x00,0x00,0x7c,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x36,0x00,0x00,0x00,0x35,0x00,0x00,0x00, -0x87,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x38,0x00,0x00,0x00, -0x36,0x00,0x00,0x00,0x37,0x00,0x00,0x00,0x87,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x3c,0x00,0x00,0x00,0x38,0x00,0x00,0x00, -0x3b,0x00,0x00,0x00,0x8b,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x3f,0x00,0x00,0x00,0x38,0x00,0x00,0x00,0x3b,0x00,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x43,0x00,0x00,0x00, -0x41,0x00,0x00,0x00,0x3f,0x00,0x00,0x00,0x8b,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x47,0x00,0x00,0x00,0x36,0x00,0x00,0x00, -0x37,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x48,0x00,0x00,0x00,0x37,0x00,0x00,0x00,0x47,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x49,0x00,0x00,0x00, -0x43,0x00,0x00,0x00,0x48,0x00,0x00,0x00,0x87,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x4c,0x00,0x00,0x00,0x3c,0x00,0x00,0x00, -0x37,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x50,0x00,0x00,0x00,0x37,0x00,0x00,0x00,0x4c,0x00,0x00,0x00, -0x82,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x51,0x00,0x00,0x00, -0x3c,0x00,0x00,0x00,0x50,0x00,0x00,0x00,0xc4,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x59,0x00,0x00,0x00,0x29,0x00,0x00,0x00, -0x3c,0x00,0x00,0x00,0x72,0x00,0x04,0x00,0x5a,0x00,0x00,0x00, -0x5b,0x00,0x00,0x00,0x59,0x00,0x00,0x00,0x7c,0x00,0x04,0x00, -0x52,0x00,0x00,0x00,0x5c,0x00,0x00,0x00,0x5b,0x00,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x60,0x00,0x00,0x00, -0x5e,0x00,0x00,0x00,0x4c,0x00,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x62,0x00,0x00,0x00,0x3b,0x00,0x00,0x00, -0x51,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x63,0x00,0x00,0x00,0x60,0x00,0x00,0x00,0x62,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x65,0x00,0x00,0x00, -0x63,0x00,0x00,0x00,0x3f,0x00,0x00,0x00,0xb1,0x00,0x05,0x00, -0x11,0x00,0x00,0x00,0x6c,0x00,0x00,0x00,0x65,0x00,0x00,0x00, -0x37,0x00,0x00,0x00,0xf7,0x00,0x03,0x00,0x6f,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0x6c,0x00,0x00,0x00, -0x6e,0x00,0x00,0x00,0x92,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, -0x6e,0x00,0x00,0x00,0x82,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x7e,0x00,0x00,0x00,0x65,0x00,0x00,0x00,0x09,0x00,0x00,0x00, -0x41,0x00,0x08,0x00,0x7f,0x00,0x00,0x00,0x80,0x00,0x00,0x00, -0x7b,0x00,0x00,0x00,0x09,0x00,0x00,0x00,0x21,0x00,0x00,0x00, -0x3b,0x00,0x00,0x00,0x7e,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x52,0x00,0x00,0x00,0x81,0x00,0x00,0x00,0x80,0x00,0x00,0x00, -0x71,0x00,0x04,0x00,0x14,0x00,0x00,0x00,0x82,0x00,0x00,0x00, -0x81,0x00,0x00,0x00,0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x83,0x00,0x00,0x00,0x82,0x00,0x00,0x00,0xc7,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x85,0x00,0x00,0x00,0x83,0x00,0x00,0x00, -0x84,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x88,0x00,0x00,0x00,0x65,0x00,0x00,0x00,0x5e,0x00,0x00,0x00, -0x41,0x00,0x08,0x00,0x7f,0x00,0x00,0x00,0x89,0x00,0x00,0x00, -0x7b,0x00,0x00,0x00,0x09,0x00,0x00,0x00,0x21,0x00,0x00,0x00, -0x3b,0x00,0x00,0x00,0x88,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x52,0x00,0x00,0x00,0x8a,0x00,0x00,0x00,0x89,0x00,0x00,0x00, -0xc2,0x00,0x05,0x00,0x52,0x00,0x00,0x00,0x8b,0x00,0x00,0x00, -0x8a,0x00,0x00,0x00,0x09,0x00,0x00,0x00,0x71,0x00,0x04,0x00, -0x14,0x00,0x00,0x00,0x8c,0x00,0x00,0x00,0x8b,0x00,0x00,0x00, -0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x8d,0x00,0x00,0x00, -0x8c,0x00,0x00,0x00,0xc7,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x8f,0x00,0x00,0x00,0x8d,0x00,0x00,0x00,0x8e,0x00,0x00,0x00, -0xc4,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x90,0x00,0x00,0x00, -0x8f,0x00,0x00,0x00,0x37,0x00,0x00,0x00,0xc5,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x91,0x00,0x00,0x00,0x85,0x00,0x00,0x00, -0x90,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0x6f,0x00,0x00,0x00, -0xf8,0x00,0x02,0x00,0x92,0x00,0x00,0x00,0xb1,0x00,0x05,0x00, -0x11,0x00,0x00,0x00,0x94,0x00,0x00,0x00,0x65,0x00,0x00,0x00, -0x5e,0x00,0x00,0x00,0xf7,0x00,0x03,0x00,0x97,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0x94,0x00,0x00,0x00, -0x96,0x00,0x00,0x00,0xab,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, -0x96,0x00,0x00,0x00,0x82,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x9a,0x00,0x00,0x00,0x65,0x00,0x00,0x00,0x09,0x00,0x00,0x00, -0x41,0x00,0x08,0x00,0x7f,0x00,0x00,0x00,0x9b,0x00,0x00,0x00, -0x7b,0x00,0x00,0x00,0x09,0x00,0x00,0x00,0x21,0x00,0x00,0x00, -0x3b,0x00,0x00,0x00,0x9a,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x52,0x00,0x00,0x00,0x9c,0x00,0x00,0x00,0x9b,0x00,0x00,0x00, -0x71,0x00,0x04,0x00,0x14,0x00,0x00,0x00,0x9d,0x00,0x00,0x00, -0x9c,0x00,0x00,0x00,0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x9e,0x00,0x00,0x00,0x9d,0x00,0x00,0x00,0xc7,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x9f,0x00,0x00,0x00,0x9e,0x00,0x00,0x00, -0x84,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xa2,0x00,0x00,0x00,0x65,0x00,0x00,0x00,0x37,0x00,0x00,0x00, -0x41,0x00,0x08,0x00,0x7f,0x00,0x00,0x00,0xa3,0x00,0x00,0x00, -0x7b,0x00,0x00,0x00,0x09,0x00,0x00,0x00,0x21,0x00,0x00,0x00, -0x3b,0x00,0x00,0x00,0xa2,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x52,0x00,0x00,0x00,0xa4,0x00,0x00,0x00,0xa3,0x00,0x00,0x00, -0xc2,0x00,0x05,0x00,0x52,0x00,0x00,0x00,0xa5,0x00,0x00,0x00, -0xa4,0x00,0x00,0x00,0x3b,0x00,0x00,0x00,0x71,0x00,0x04,0x00, -0x14,0x00,0x00,0x00,0xa6,0x00,0x00,0x00,0xa5,0x00,0x00,0x00, -0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0xa7,0x00,0x00,0x00, -0xa6,0x00,0x00,0x00,0xc7,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xa8,0x00,0x00,0x00,0xa7,0x00,0x00,0x00,0x8e,0x00,0x00,0x00, -0xc4,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xa9,0x00,0x00,0x00, -0xa8,0x00,0x00,0x00,0x37,0x00,0x00,0x00,0xc5,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xaa,0x00,0x00,0x00,0x9f,0x00,0x00,0x00, -0xa9,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0x97,0x00,0x00,0x00, -0xf8,0x00,0x02,0x00,0xab,0x00,0x00,0x00,0xb1,0x00,0x05,0x00, -0x11,0x00,0x00,0x00,0xae,0x00,0x00,0x00,0x65,0x00,0x00,0x00, -0xad,0x00,0x00,0x00,0xf7,0x00,0x03,0x00,0xb1,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0xae,0x00,0x00,0x00, -0xb0,0x00,0x00,0x00,0xc5,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, -0xb0,0x00,0x00,0x00,0x82,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xb4,0x00,0x00,0x00,0x65,0x00,0x00,0x00,0x5e,0x00,0x00,0x00, -0x41,0x00,0x08,0x00,0x7f,0x00,0x00,0x00,0xb5,0x00,0x00,0x00, -0x7b,0x00,0x00,0x00,0x09,0x00,0x00,0x00,0x21,0x00,0x00,0x00, -0x3b,0x00,0x00,0x00,0xb4,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x52,0x00,0x00,0x00,0xb6,0x00,0x00,0x00,0xb5,0x00,0x00,0x00, -0xc2,0x00,0x05,0x00,0x52,0x00,0x00,0x00,0xb7,0x00,0x00,0x00, -0xb6,0x00,0x00,0x00,0x37,0x00,0x00,0x00,0x71,0x00,0x04,0x00, -0x14,0x00,0x00,0x00,0xb8,0x00,0x00,0x00,0xb7,0x00,0x00,0x00, -0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0xb9,0x00,0x00,0x00, -0xb8,0x00,0x00,0x00,0x41,0x00,0x08,0x00,0x7f,0x00,0x00,0x00, -0xbd,0x00,0x00,0x00,0x7b,0x00,0x00,0x00,0x09,0x00,0x00,0x00, -0x21,0x00,0x00,0x00,0x3b,0x00,0x00,0x00,0x65,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x52,0x00,0x00,0x00,0xbe,0x00,0x00,0x00, -0xbd,0x00,0x00,0x00,0xc2,0x00,0x05,0x00,0x52,0x00,0x00,0x00, -0xbf,0x00,0x00,0x00,0xbe,0x00,0x00,0x00,0x37,0x00,0x00,0x00, -0x71,0x00,0x04,0x00,0x14,0x00,0x00,0x00,0xc0,0x00,0x00,0x00, -0xbf,0x00,0x00,0x00,0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0xc1,0x00,0x00,0x00,0xc0,0x00,0x00,0x00,0xc7,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xc2,0x00,0x00,0x00,0xc1,0x00,0x00,0x00, -0x8e,0x00,0x00,0x00,0xc4,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xc3,0x00,0x00,0x00,0xc2,0x00,0x00,0x00,0x37,0x00,0x00,0x00, -0xc5,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xc4,0x00,0x00,0x00, -0xb9,0x00,0x00,0x00,0xc3,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, -0xb1,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0xc5,0x00,0x00,0x00, -0x82,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xc8,0x00,0x00,0x00, -0x65,0x00,0x00,0x00,0x5e,0x00,0x00,0x00,0x41,0x00,0x08,0x00, -0x7f,0x00,0x00,0x00,0xc9,0x00,0x00,0x00,0x7b,0x00,0x00,0x00, -0x09,0x00,0x00,0x00,0x21,0x00,0x00,0x00,0x3b,0x00,0x00,0x00, -0xc8,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x52,0x00,0x00,0x00, -0xca,0x00,0x00,0x00,0xc9,0x00,0x00,0x00,0xc2,0x00,0x05,0x00, -0x52,0x00,0x00,0x00,0xcb,0x00,0x00,0x00,0xca,0x00,0x00,0x00, -0x37,0x00,0x00,0x00,0x71,0x00,0x04,0x00,0x14,0x00,0x00,0x00, -0xcc,0x00,0x00,0x00,0xcb,0x00,0x00,0x00,0x7c,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0xcd,0x00,0x00,0x00,0xcc,0x00,0x00,0x00, -0x82,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xd0,0x00,0x00,0x00, -0x65,0x00,0x00,0x00,0x37,0x00,0x00,0x00,0x41,0x00,0x08,0x00, -0x7f,0x00,0x00,0x00,0xd1,0x00,0x00,0x00,0x7b,0x00,0x00,0x00, -0x09,0x00,0x00,0x00,0x21,0x00,0x00,0x00,0x3b,0x00,0x00,0x00, -0xd0,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x52,0x00,0x00,0x00, -0xd2,0x00,0x00,0x00,0xd1,0x00,0x00,0x00,0xc2,0x00,0x05,0x00, -0x52,0x00,0x00,0x00,0xd4,0x00,0x00,0x00,0xd2,0x00,0x00,0x00, -0xd3,0x00,0x00,0x00,0x71,0x00,0x04,0x00,0x14,0x00,0x00,0x00, -0xd5,0x00,0x00,0x00,0xd4,0x00,0x00,0x00,0x7c,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0xd6,0x00,0x00,0x00,0xd5,0x00,0x00,0x00, -0xc7,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xd7,0x00,0x00,0x00, -0xd6,0x00,0x00,0x00,0x8e,0x00,0x00,0x00,0xc4,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xd8,0x00,0x00,0x00,0xd7,0x00,0x00,0x00, -0x37,0x00,0x00,0x00,0xc5,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xd9,0x00,0x00,0x00,0xcd,0x00,0x00,0x00,0xd8,0x00,0x00,0x00, -0xf9,0x00,0x02,0x00,0xb1,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, -0xb1,0x00,0x00,0x00,0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00, -0x39,0x01,0x00,0x00,0xc4,0x00,0x00,0x00,0xb0,0x00,0x00,0x00, -0xd9,0x00,0x00,0x00,0xc5,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, -0x97,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0x97,0x00,0x00,0x00, -0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0x3a,0x01,0x00,0x00, -0xaa,0x00,0x00,0x00,0x96,0x00,0x00,0x00,0x39,0x01,0x00,0x00, -0xb1,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0x6f,0x00,0x00,0x00, -0xf8,0x00,0x02,0x00,0x6f,0x00,0x00,0x00,0xf5,0x00,0x07,0x00, -0x06,0x00,0x00,0x00,0x3b,0x01,0x00,0x00,0x91,0x00,0x00,0x00, -0x6e,0x00,0x00,0x00,0x3a,0x01,0x00,0x00,0x97,0x00,0x00,0x00, -0x72,0x00,0x04,0x00,0x5a,0x00,0x00,0x00,0xdd,0x00,0x00,0x00, -0x3b,0x01,0x00,0x00,0x41,0x00,0x07,0x00,0xe2,0x00,0x00,0x00, -0xe3,0x00,0x00,0x00,0x7b,0x00,0x00,0x00,0x09,0x00,0x00,0x00, -0x21,0x00,0x00,0x00,0x8e,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x76,0x00,0x00,0x00,0xe4,0x00,0x00,0x00,0xe3,0x00,0x00,0x00, -0x73,0x00,0x04,0x00,0xde,0x00,0x00,0x00,0xe5,0x00,0x00,0x00, -0xe4,0x00,0x00,0x00,0x72,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0xe9,0x00,0x00,0x00,0xdd,0x00,0x00,0x00,0x82,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xeb,0x00,0x00,0x00,0xe9,0x00,0x00,0x00, -0xea,0x00,0x00,0x00,0x6f,0x00,0x04,0x00,0xde,0x00,0x00,0x00, -0xec,0x00,0x00,0x00,0xeb,0x00,0x00,0x00,0x85,0x00,0x05,0x00, -0xde,0x00,0x00,0x00,0xed,0x00,0x00,0x00,0xe5,0x00,0x00,0x00, -0xec,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xf0,0x00,0x00,0x00,0x21,0x00,0x00,0x00,0x10,0x00,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xf3,0x00,0x00,0x00, -0xf1,0x00,0x00,0x00,0x4c,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xf4,0x00,0x00,0x00,0xf0,0x00,0x00,0x00, -0xf3,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xf6,0x00,0x00,0x00,0xea,0x00,0x00,0x00,0x51,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xf7,0x00,0x00,0x00, -0xf4,0x00,0x00,0x00,0xf6,0x00,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xfa,0x00,0x00,0x00,0xea,0x00,0x00,0x00, -0x4c,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0xfd,0x00,0x00,0x00, -0xf8,0x00,0x02,0x00,0xfd,0x00,0x00,0x00,0xf5,0x00,0x07,0x00, -0x06,0x00,0x00,0x00,0x3c,0x01,0x00,0x00,0x49,0x00,0x00,0x00, -0x6f,0x00,0x00,0x00,0x2b,0x01,0x00,0x00,0xfe,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x04,0x01,0x00,0x00, -0x49,0x00,0x00,0x00,0x37,0x00,0x00,0x00,0xb1,0x00,0x05,0x00, -0x11,0x00,0x00,0x00,0x05,0x01,0x00,0x00,0x3c,0x01,0x00,0x00, -0x04,0x01,0x00,0x00,0xf6,0x00,0x04,0x00,0xff,0x00,0x00,0x00, -0xfe,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, -0x05,0x01,0x00,0x00,0xfe,0x00,0x00,0x00,0xff,0x00,0x00,0x00, -0xf8,0x00,0x02,0x00,0xfe,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x0c,0x01,0x00,0x00,0xf7,0x00,0x00,0x00, -0x3c,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x11,0x01,0x00,0x00,0xfa,0x00,0x00,0x00,0x3c,0x01,0x00,0x00, -0x41,0x00,0x08,0x00,0x7f,0x00,0x00,0x00,0x12,0x01,0x00,0x00, -0x7b,0x00,0x00,0x00,0x09,0x00,0x00,0x00,0x21,0x00,0x00,0x00, -0x29,0x00,0x00,0x00,0x11,0x01,0x00,0x00,0x3d,0x00,0x04,0x00, -0x52,0x00,0x00,0x00,0x13,0x01,0x00,0x00,0x12,0x01,0x00,0x00, -0xc2,0x00,0x05,0x00,0x52,0x00,0x00,0x00,0x15,0x01,0x00,0x00, -0x13,0x01,0x00,0x00,0x62,0x00,0x00,0x00,0x71,0x00,0x04,0x00, -0x14,0x00,0x00,0x00,0x16,0x01,0x00,0x00,0x15,0x01,0x00,0x00, -0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x17,0x01,0x00,0x00, -0x16,0x01,0x00,0x00,0xc7,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x18,0x01,0x00,0x00,0x17,0x01,0x00,0x00,0x8e,0x00,0x00,0x00, -0x72,0x00,0x04,0x00,0x5a,0x00,0x00,0x00,0x19,0x01,0x00,0x00, -0x18,0x01,0x00,0x00,0x72,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x1a,0x01,0x00,0x00,0x19,0x01,0x00,0x00,0x41,0x00,0x08,0x00, -0x7f,0x00,0x00,0x00,0x1d,0x01,0x00,0x00,0x7b,0x00,0x00,0x00, -0x09,0x00,0x00,0x00,0x21,0x00,0x00,0x00,0x09,0x00,0x00,0x00, -0x3c,0x01,0x00,0x00,0x3d,0x00,0x04,0x00,0x52,0x00,0x00,0x00, -0x1e,0x01,0x00,0x00,0x1d,0x01,0x00,0x00,0xc7,0x00,0x05,0x00, -0x52,0x00,0x00,0x00,0x20,0x01,0x00,0x00,0x1e,0x01,0x00,0x00, -0x5c,0x00,0x00,0x00,0x71,0x00,0x04,0x00,0x14,0x00,0x00,0x00, -0x21,0x01,0x00,0x00,0x20,0x01,0x00,0x00,0x7c,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x22,0x01,0x00,0x00,0x21,0x01,0x00,0x00, -0xab,0x00,0x05,0x00,0x11,0x00,0x00,0x00,0x23,0x01,0x00,0x00, -0x22,0x01,0x00,0x00,0x09,0x00,0x00,0x00,0xa9,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0x24,0x01,0x00,0x00,0x23,0x01,0x00,0x00, -0x09,0x00,0x00,0x00,0x37,0x00,0x00,0x00,0x82,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x25,0x01,0x00,0x00,0x1a,0x01,0x00,0x00, -0x24,0x01,0x00,0x00,0x6f,0x00,0x04,0x00,0xde,0x00,0x00,0x00, -0x26,0x01,0x00,0x00,0x25,0x01,0x00,0x00,0x85,0x00,0x05,0x00, -0xde,0x00,0x00,0x00,0x27,0x01,0x00,0x00,0xed,0x00,0x00,0x00, -0x26,0x01,0x00,0x00,0x73,0x00,0x04,0x00,0x76,0x00,0x00,0x00, -0x28,0x01,0x00,0x00,0x27,0x01,0x00,0x00,0x41,0x00,0x06,0x00, -0xe2,0x00,0x00,0x00,0x29,0x01,0x00,0x00,0x09,0x01,0x00,0x00, -0x09,0x00,0x00,0x00,0x0c,0x01,0x00,0x00,0x3e,0x00,0x03,0x00, -0x29,0x01,0x00,0x00,0x28,0x01,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x2b,0x01,0x00,0x00,0x3c,0x01,0x00,0x00, -0x29,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0xfd,0x00,0x00,0x00, -0xf8,0x00,0x02,0x00,0xff,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, -0x0d,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0x0d,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x2d,0x01,0x00,0x00, -0x38,0x01,0x00,0x00,0x29,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, -0x0a,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0x0c,0x00,0x00,0x00, -0xf5,0x00,0x07,0x00,0x11,0x00,0x00,0x00,0x41,0x01,0x00,0x00, -0x32,0x01,0x00,0x00,0x0a,0x00,0x00,0x00,0x35,0x01,0x00,0x00, -0x2f,0x00,0x00,0x00,0xf7,0x00,0x03,0x00,0x36,0x01,0x00,0x00, -0x00,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0x41,0x01,0x00,0x00, -0x30,0x01,0x00,0x00,0x36,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, -0x36,0x01,0x00,0x00,0xf9,0x00,0x02,0x00,0x30,0x01,0x00,0x00, -0xf8,0x00,0x02,0x00,0x30,0x01,0x00,0x00,0xfd,0x00,0x01,0x00, -0x38,0x00,0x01,0x00, -}; -const uint64_t dequant_q3_K_len = 4828; - -unsigned char dequant_q4_0_data[] = { -0x03,0x02,0x23,0x07,0x00,0x05,0x01,0x00,0x0b,0x00,0x0d,0x00, -0x19,0x03,0x00,0x00,0x00,0x00,0x00,0x00,0x11,0x00,0x02,0x00, -0x01,0x00,0x00,0x00,0x11,0x00,0x02,0x00,0x51,0x11,0x00,0x00, -0x11,0x00,0x02,0x00,0x60,0x11,0x00,0x00,0x0b,0x00,0x06,0x00, -0x01,0x00,0x00,0x00,0x47,0x4c,0x53,0x4c,0x2e,0x73,0x74,0x64, -0x2e,0x34,0x35,0x30,0x00,0x00,0x00,0x00,0x0e,0x00,0x03,0x00, -0x00,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x0f,0x00,0x09,0x00, -0x05,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x6d,0x61,0x69,0x6e, -0x00,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x16,0x00,0x00,0x00, -0x55,0x00,0x00,0x00,0x78,0x00,0x00,0x00,0x10,0x00,0x06,0x00, -0x04,0x00,0x00,0x00,0x11,0x00,0x00,0x00,0x00,0x01,0x00,0x00, -0x01,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x0c,0x00,0x00,0x00,0x0b,0x00,0x00,0x00,0x1c,0x00,0x00,0x00, -0x48,0x00,0x05,0x00,0x14,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0x14,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x04,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x14,0x00,0x00,0x00, -0x02,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x08,0x00,0x00,0x00, -0x48,0x00,0x05,0x00,0x14,0x00,0x00,0x00,0x03,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x47,0x00,0x03,0x00, -0x14,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x50,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x48,0x00,0x05,0x00,0x51,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0x51,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x52,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x12,0x00,0x00,0x00,0x48,0x00,0x04,0x00, -0x53,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x18,0x00,0x00,0x00, -0x48,0x00,0x05,0x00,0x53,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x03,0x00, -0x53,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x55,0x00,0x00,0x00,0x22,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x55,0x00,0x00,0x00,0x21,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x75,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x48,0x00,0x04,0x00, -0x76,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x19,0x00,0x00,0x00, -0x48,0x00,0x05,0x00,0x76,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x03,0x00, -0x76,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x78,0x00,0x00,0x00,0x22,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x78,0x00,0x00,0x00,0x21,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x9a,0x00,0x00,0x00, -0x0b,0x00,0x00,0x00,0x19,0x00,0x00,0x00,0x13,0x00,0x02,0x00, -0x02,0x00,0x00,0x00,0x21,0x00,0x03,0x00,0x03,0x00,0x00,0x00, -0x02,0x00,0x00,0x00,0x15,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x20,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x15,0x00,0x04,0x00, -0x09,0x00,0x00,0x00,0x20,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x17,0x00,0x04,0x00,0x0a,0x00,0x00,0x00,0x09,0x00,0x00,0x00, -0x03,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x0b,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x0a,0x00,0x00,0x00,0x3b,0x00,0x04,0x00, -0x0b,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x09,0x00,0x00,0x00,0x0d,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x0e,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x09,0x00,0x00,0x00,0x1e,0x00,0x06,0x00, -0x14,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0x15,0x00,0x00,0x00,0x09,0x00,0x00,0x00,0x14,0x00,0x00,0x00, -0x3b,0x00,0x04,0x00,0x15,0x00,0x00,0x00,0x16,0x00,0x00,0x00, -0x09,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x17,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0x18,0x00,0x00,0x00,0x09,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x1b,0x00,0x00,0x00, -0x20,0x00,0x00,0x00,0x14,0x00,0x02,0x00,0x24,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x37,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x48,0x00,0x00,0x00,0x10,0x00,0x00,0x00, -0x16,0x00,0x03,0x00,0x4a,0x00,0x00,0x00,0x20,0x00,0x00,0x00, -0x16,0x00,0x03,0x00,0x4d,0x00,0x00,0x00,0x10,0x00,0x00,0x00, -0x15,0x00,0x04,0x00,0x4e,0x00,0x00,0x00,0x08,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x09,0x00,0x00,0x00, -0x4f,0x00,0x00,0x00,0x10,0x00,0x00,0x00,0x1c,0x00,0x04,0x00, -0x50,0x00,0x00,0x00,0x4e,0x00,0x00,0x00,0x4f,0x00,0x00,0x00, -0x1e,0x00,0x04,0x00,0x51,0x00,0x00,0x00,0x4d,0x00,0x00,0x00, -0x50,0x00,0x00,0x00,0x1d,0x00,0x03,0x00,0x52,0x00,0x00,0x00, -0x51,0x00,0x00,0x00,0x1e,0x00,0x03,0x00,0x53,0x00,0x00,0x00, -0x52,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x54,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x53,0x00,0x00,0x00,0x3b,0x00,0x04,0x00, -0x54,0x00,0x00,0x00,0x55,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0x20,0x00,0x04,0x00,0x57,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0x4d,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x5f,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x4e,0x00,0x00,0x00,0x17,0x00,0x04,0x00, -0x63,0x00,0x00,0x00,0x4a,0x00,0x00,0x00,0x02,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x09,0x00,0x00,0x00,0x67,0x00,0x00,0x00, -0x0f,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x6b,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x4a,0x00,0x00,0x00,0x70,0x00,0x00,0x00,0x00,0x00,0x00,0x41, -0x1d,0x00,0x03,0x00,0x75,0x00,0x00,0x00,0x4d,0x00,0x00,0x00, -0x1e,0x00,0x03,0x00,0x76,0x00,0x00,0x00,0x75,0x00,0x00,0x00, -0x20,0x00,0x04,0x00,0x77,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0x76,0x00,0x00,0x00,0x3b,0x00,0x04,0x00,0x77,0x00,0x00,0x00, -0x78,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x7a,0x00,0x00,0x00,0x03,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x09,0x00,0x00,0x00,0x92,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x09,0x00,0x00,0x00, -0x99,0x00,0x00,0x00,0x00,0x01,0x00,0x00,0x2c,0x00,0x06,0x00, -0x0a,0x00,0x00,0x00,0x9a,0x00,0x00,0x00,0x99,0x00,0x00,0x00, -0x92,0x00,0x00,0x00,0x92,0x00,0x00,0x00,0x2c,0x00,0x05,0x00, -0x63,0x00,0x00,0x00,0xa5,0x00,0x00,0x00,0x70,0x00,0x00,0x00, -0x70,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0xff,0x02,0x00,0x00,0x11,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x00,0x03,0x00,0x00,0x12,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x01,0x03,0x00,0x00, -0x13,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x02,0x03,0x00,0x00,0x14,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x03,0x03,0x00,0x00,0x05,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x04,0x03,0x00,0x00, -0x15,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x05,0x03,0x00,0x00,0x06,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x06,0x03,0x00,0x00,0x16,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x07,0x03,0x00,0x00, -0x07,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x08,0x03,0x00,0x00,0x17,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x09,0x03,0x00,0x00,0x08,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x0a,0x03,0x00,0x00, -0x18,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x0b,0x03,0x00,0x00,0x09,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x0c,0x03,0x00,0x00,0x19,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x0d,0x03,0x00,0x00, -0x0a,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x0e,0x03,0x00,0x00,0x1a,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x0f,0x03,0x00,0x00,0x0b,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x10,0x03,0x00,0x00, -0x1b,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x11,0x03,0x00,0x00,0x0c,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x12,0x03,0x00,0x00,0x1c,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x13,0x03,0x00,0x00, -0x0d,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x14,0x03,0x00,0x00,0x1d,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x15,0x03,0x00,0x00,0x0e,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x16,0x03,0x00,0x00, -0x1e,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x17,0x03,0x00,0x00,0x0f,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x18,0x03,0x00,0x00,0x1f,0x00,0x00,0x00, -0x36,0x00,0x05,0x00,0x02,0x00,0x00,0x00,0x04,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x03,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, -0x05,0x00,0x00,0x00,0xf7,0x00,0x03,0x00,0x9b,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0xfb,0x00,0x03,0x00,0x0d,0x00,0x00,0x00, -0x9c,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0x9c,0x00,0x00,0x00, -0x41,0x00,0x05,0x00,0x0e,0x00,0x00,0x00,0x0f,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x0d,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x09,0x00,0x00,0x00,0x10,0x00,0x00,0x00,0x0f,0x00,0x00,0x00, -0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x11,0x00,0x00,0x00, -0x10,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x18,0x00,0x00,0x00, -0x19,0x00,0x00,0x00,0x16,0x00,0x00,0x00,0x17,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x1a,0x00,0x00,0x00, -0x19,0x00,0x00,0x00,0x87,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x1c,0x00,0x00,0x00,0x1a,0x00,0x00,0x00,0x1b,0x00,0x00,0x00, -0x8b,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x1d,0x00,0x00,0x00, -0x11,0x00,0x00,0x00,0x1c,0x00,0x00,0x00,0x87,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x11,0x00,0x00,0x00, -0x1c,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x26,0x00,0x00,0x00,0x1d,0x00,0x00,0x00,0x1b,0x00,0x00,0x00, -0xaf,0x00,0x05,0x00,0x24,0x00,0x00,0x00,0x29,0x00,0x00,0x00, -0x26,0x00,0x00,0x00,0x1a,0x00,0x00,0x00,0xa8,0x00,0x04,0x00, -0x24,0x00,0x00,0x00,0x2a,0x00,0x00,0x00,0x29,0x00,0x00,0x00, -0xf7,0x00,0x03,0x00,0x2c,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0xfa,0x00,0x04,0x00,0x2a,0x00,0x00,0x00,0x2b,0x00,0x00,0x00, -0x2c,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0x2b,0x00,0x00,0x00, -0x41,0x00,0x05,0x00,0x18,0x00,0x00,0x00,0x2f,0x00,0x00,0x00, -0x16,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x30,0x00,0x00,0x00,0x2f,0x00,0x00,0x00, -0xaf,0x00,0x05,0x00,0x24,0x00,0x00,0x00,0x31,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x30,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, -0x2c,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0x2c,0x00,0x00,0x00, -0xf5,0x00,0x07,0x00,0x24,0x00,0x00,0x00,0x32,0x00,0x00,0x00, -0x29,0x00,0x00,0x00,0x9c,0x00,0x00,0x00,0x31,0x00,0x00,0x00, -0x2b,0x00,0x00,0x00,0xf7,0x00,0x03,0x00,0x34,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0x32,0x00,0x00,0x00, -0x33,0x00,0x00,0x00,0x34,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, -0x33,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0x9b,0x00,0x00,0x00, -0xf8,0x00,0x02,0x00,0x34,0x00,0x00,0x00,0x41,0x00,0x05,0x00, -0x18,0x00,0x00,0x00,0x38,0x00,0x00,0x00,0x16,0x00,0x00,0x00, -0x37,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x39,0x00,0x00,0x00,0x38,0x00,0x00,0x00,0x87,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x3a,0x00,0x00,0x00,0x39,0x00,0x00,0x00, -0x1b,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x3e,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x3a,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x40,0x00,0x00,0x00, -0x3e,0x00,0x00,0x00,0x1d,0x00,0x00,0x00,0x41,0x00,0x07,0x00, -0x57,0x00,0x00,0x00,0x58,0x00,0x00,0x00,0x55,0x00,0x00,0x00, -0x2e,0x00,0x00,0x00,0x40,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x4d,0x00,0x00,0x00,0x59,0x00,0x00,0x00, -0x58,0x00,0x00,0x00,0x73,0x00,0x04,0x00,0x4a,0x00,0x00,0x00, -0x5a,0x00,0x00,0x00,0x59,0x00,0x00,0x00,0x41,0x00,0x08,0x00, -0x5f,0x00,0x00,0x00,0x60,0x00,0x00,0x00,0x55,0x00,0x00,0x00, -0x2e,0x00,0x00,0x00,0x40,0x00,0x00,0x00,0x17,0x00,0x00,0x00, -0x2e,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x4e,0x00,0x00,0x00, -0x61,0x00,0x00,0x00,0x60,0x00,0x00,0x00,0x71,0x00,0x04,0x00, -0x09,0x00,0x00,0x00,0x62,0x00,0x00,0x00,0x61,0x00,0x00,0x00, -0xc7,0x00,0x05,0x00,0x09,0x00,0x00,0x00,0x68,0x00,0x00,0x00, -0x62,0x00,0x00,0x00,0x67,0x00,0x00,0x00,0x70,0x00,0x04,0x00, -0x4a,0x00,0x00,0x00,0x69,0x00,0x00,0x00,0x68,0x00,0x00,0x00, -0xc2,0x00,0x05,0x00,0x09,0x00,0x00,0x00,0x6c,0x00,0x00,0x00, -0x62,0x00,0x00,0x00,0x6b,0x00,0x00,0x00,0x70,0x00,0x04,0x00, -0x4a,0x00,0x00,0x00,0x6d,0x00,0x00,0x00,0x6c,0x00,0x00,0x00, -0x50,0x00,0x05,0x00,0x63,0x00,0x00,0x00,0x6e,0x00,0x00,0x00, -0x69,0x00,0x00,0x00,0x6d,0x00,0x00,0x00,0x83,0x00,0x05,0x00, -0x63,0x00,0x00,0x00,0x72,0x00,0x00,0x00,0x6e,0x00,0x00,0x00, -0xa5,0x00,0x00,0x00,0x8e,0x00,0x05,0x00,0x63,0x00,0x00,0x00, -0x74,0x00,0x00,0x00,0x72,0x00,0x00,0x00,0x5a,0x00,0x00,0x00, -0x41,0x00,0x05,0x00,0x18,0x00,0x00,0x00,0x7b,0x00,0x00,0x00, -0x16,0x00,0x00,0x00,0x7a,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x7c,0x00,0x00,0x00,0x7b,0x00,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x7d,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x7c,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x80,0x00,0x00,0x00,0x7d,0x00,0x00,0x00, -0x26,0x00,0x00,0x00,0x51,0x00,0x05,0x00,0x4a,0x00,0x00,0x00, -0x85,0x00,0x00,0x00,0x74,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x73,0x00,0x04,0x00,0x4d,0x00,0x00,0x00,0x86,0x00,0x00,0x00, -0x85,0x00,0x00,0x00,0x41,0x00,0x06,0x00,0x57,0x00,0x00,0x00, -0x87,0x00,0x00,0x00,0x78,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0x80,0x00,0x00,0x00,0x3e,0x00,0x03,0x00,0x87,0x00,0x00,0x00, -0x86,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x91,0x00,0x00,0x00,0x80,0x00,0x00,0x00,0x48,0x00,0x00,0x00, -0x51,0x00,0x05,0x00,0x4a,0x00,0x00,0x00,0x94,0x00,0x00,0x00, -0x74,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x73,0x00,0x04,0x00, -0x4d,0x00,0x00,0x00,0x95,0x00,0x00,0x00,0x94,0x00,0x00,0x00, -0x41,0x00,0x06,0x00,0x57,0x00,0x00,0x00,0x96,0x00,0x00,0x00, -0x78,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0x91,0x00,0x00,0x00, -0x3e,0x00,0x03,0x00,0x96,0x00,0x00,0x00,0x95,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x4d,0x00,0x00,0x00,0xac,0x00,0x00,0x00, -0x58,0x00,0x00,0x00,0x73,0x00,0x04,0x00,0x4a,0x00,0x00,0x00, -0xad,0x00,0x00,0x00,0xac,0x00,0x00,0x00,0x41,0x00,0x08,0x00, -0x5f,0x00,0x00,0x00,0xae,0x00,0x00,0x00,0x55,0x00,0x00,0x00, -0x2e,0x00,0x00,0x00,0x40,0x00,0x00,0x00,0x17,0x00,0x00,0x00, -0x17,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x4e,0x00,0x00,0x00, -0xaf,0x00,0x00,0x00,0xae,0x00,0x00,0x00,0x71,0x00,0x04,0x00, -0x09,0x00,0x00,0x00,0xb0,0x00,0x00,0x00,0xaf,0x00,0x00,0x00, -0xc7,0x00,0x05,0x00,0x09,0x00,0x00,0x00,0xb1,0x00,0x00,0x00, -0xb0,0x00,0x00,0x00,0x67,0x00,0x00,0x00,0x70,0x00,0x04,0x00, -0x4a,0x00,0x00,0x00,0xb2,0x00,0x00,0x00,0xb1,0x00,0x00,0x00, -0xc2,0x00,0x05,0x00,0x09,0x00,0x00,0x00,0xb3,0x00,0x00,0x00, -0xb0,0x00,0x00,0x00,0x6b,0x00,0x00,0x00,0x70,0x00,0x04,0x00, -0x4a,0x00,0x00,0x00,0xb4,0x00,0x00,0x00,0xb3,0x00,0x00,0x00, -0x50,0x00,0x05,0x00,0x63,0x00,0x00,0x00,0xb5,0x00,0x00,0x00, -0xb2,0x00,0x00,0x00,0xb4,0x00,0x00,0x00,0x83,0x00,0x05,0x00, -0x63,0x00,0x00,0x00,0xb6,0x00,0x00,0x00,0xb5,0x00,0x00,0x00, -0xa5,0x00,0x00,0x00,0x8e,0x00,0x05,0x00,0x63,0x00,0x00,0x00, -0xb7,0x00,0x00,0x00,0xb6,0x00,0x00,0x00,0xad,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xbd,0x00,0x00,0x00, -0x80,0x00,0x00,0x00,0x17,0x00,0x00,0x00,0x51,0x00,0x05,0x00, -0x4a,0x00,0x00,0x00,0xbf,0x00,0x00,0x00,0xb7,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x73,0x00,0x04,0x00,0x4d,0x00,0x00,0x00, -0xc0,0x00,0x00,0x00,0xbf,0x00,0x00,0x00,0x41,0x00,0x06,0x00, -0x57,0x00,0x00,0x00,0xc1,0x00,0x00,0x00,0x78,0x00,0x00,0x00, -0x2e,0x00,0x00,0x00,0xbd,0x00,0x00,0x00,0x3e,0x00,0x03,0x00, -0xc1,0x00,0x00,0x00,0xc0,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xc8,0x00,0x00,0x00,0x80,0x00,0x00,0x00, -0xff,0x02,0x00,0x00,0x51,0x00,0x05,0x00,0x4a,0x00,0x00,0x00, -0xc9,0x00,0x00,0x00,0xb7,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x73,0x00,0x04,0x00,0x4d,0x00,0x00,0x00,0xca,0x00,0x00,0x00, -0xc9,0x00,0x00,0x00,0x41,0x00,0x06,0x00,0x57,0x00,0x00,0x00, -0xcb,0x00,0x00,0x00,0x78,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0xc8,0x00,0x00,0x00,0x3e,0x00,0x03,0x00,0xcb,0x00,0x00,0x00, -0xca,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x4d,0x00,0x00,0x00, -0xd4,0x00,0x00,0x00,0x58,0x00,0x00,0x00,0x73,0x00,0x04,0x00, -0x4a,0x00,0x00,0x00,0xd5,0x00,0x00,0x00,0xd4,0x00,0x00,0x00, -0x41,0x00,0x08,0x00,0x5f,0x00,0x00,0x00,0xd6,0x00,0x00,0x00, -0x55,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0x40,0x00,0x00,0x00, -0x17,0x00,0x00,0x00,0x37,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x4e,0x00,0x00,0x00,0xd7,0x00,0x00,0x00,0xd6,0x00,0x00,0x00, -0x71,0x00,0x04,0x00,0x09,0x00,0x00,0x00,0xd8,0x00,0x00,0x00, -0xd7,0x00,0x00,0x00,0xc7,0x00,0x05,0x00,0x09,0x00,0x00,0x00, -0xd9,0x00,0x00,0x00,0xd8,0x00,0x00,0x00,0x67,0x00,0x00,0x00, -0x70,0x00,0x04,0x00,0x4a,0x00,0x00,0x00,0xda,0x00,0x00,0x00, -0xd9,0x00,0x00,0x00,0xc2,0x00,0x05,0x00,0x09,0x00,0x00,0x00, -0xdb,0x00,0x00,0x00,0xd8,0x00,0x00,0x00,0x6b,0x00,0x00,0x00, -0x70,0x00,0x04,0x00,0x4a,0x00,0x00,0x00,0xdc,0x00,0x00,0x00, -0xdb,0x00,0x00,0x00,0x50,0x00,0x05,0x00,0x63,0x00,0x00,0x00, -0xdd,0x00,0x00,0x00,0xda,0x00,0x00,0x00,0xdc,0x00,0x00,0x00, -0x83,0x00,0x05,0x00,0x63,0x00,0x00,0x00,0xde,0x00,0x00,0x00, -0xdd,0x00,0x00,0x00,0xa5,0x00,0x00,0x00,0x8e,0x00,0x05,0x00, -0x63,0x00,0x00,0x00,0xdf,0x00,0x00,0x00,0xde,0x00,0x00,0x00, -0xd5,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xe5,0x00,0x00,0x00,0x80,0x00,0x00,0x00,0x37,0x00,0x00,0x00, -0x51,0x00,0x05,0x00,0x4a,0x00,0x00,0x00,0xe7,0x00,0x00,0x00, -0xdf,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x73,0x00,0x04,0x00, -0x4d,0x00,0x00,0x00,0xe8,0x00,0x00,0x00,0xe7,0x00,0x00,0x00, -0x41,0x00,0x06,0x00,0x57,0x00,0x00,0x00,0xe9,0x00,0x00,0x00, -0x78,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0xe5,0x00,0x00,0x00, -0x3e,0x00,0x03,0x00,0xe9,0x00,0x00,0x00,0xe8,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xf0,0x00,0x00,0x00, -0x80,0x00,0x00,0x00,0x00,0x03,0x00,0x00,0x51,0x00,0x05,0x00, -0x4a,0x00,0x00,0x00,0xf1,0x00,0x00,0x00,0xdf,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x73,0x00,0x04,0x00,0x4d,0x00,0x00,0x00, -0xf2,0x00,0x00,0x00,0xf1,0x00,0x00,0x00,0x41,0x00,0x06,0x00, -0x57,0x00,0x00,0x00,0xf3,0x00,0x00,0x00,0x78,0x00,0x00,0x00, -0x2e,0x00,0x00,0x00,0xf0,0x00,0x00,0x00,0x3e,0x00,0x03,0x00, -0xf3,0x00,0x00,0x00,0xf2,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x4d,0x00,0x00,0x00,0xfc,0x00,0x00,0x00,0x58,0x00,0x00,0x00, -0x73,0x00,0x04,0x00,0x4a,0x00,0x00,0x00,0xfd,0x00,0x00,0x00, -0xfc,0x00,0x00,0x00,0x41,0x00,0x08,0x00,0x5f,0x00,0x00,0x00, -0xfe,0x00,0x00,0x00,0x55,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0x40,0x00,0x00,0x00,0x17,0x00,0x00,0x00,0x7a,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x4e,0x00,0x00,0x00,0xff,0x00,0x00,0x00, -0xfe,0x00,0x00,0x00,0x71,0x00,0x04,0x00,0x09,0x00,0x00,0x00, -0x00,0x01,0x00,0x00,0xff,0x00,0x00,0x00,0xc7,0x00,0x05,0x00, -0x09,0x00,0x00,0x00,0x01,0x01,0x00,0x00,0x00,0x01,0x00,0x00, -0x67,0x00,0x00,0x00,0x70,0x00,0x04,0x00,0x4a,0x00,0x00,0x00, -0x02,0x01,0x00,0x00,0x01,0x01,0x00,0x00,0xc2,0x00,0x05,0x00, -0x09,0x00,0x00,0x00,0x03,0x01,0x00,0x00,0x00,0x01,0x00,0x00, -0x6b,0x00,0x00,0x00,0x70,0x00,0x04,0x00,0x4a,0x00,0x00,0x00, -0x04,0x01,0x00,0x00,0x03,0x01,0x00,0x00,0x50,0x00,0x05,0x00, -0x63,0x00,0x00,0x00,0x05,0x01,0x00,0x00,0x02,0x01,0x00,0x00, -0x04,0x01,0x00,0x00,0x83,0x00,0x05,0x00,0x63,0x00,0x00,0x00, -0x06,0x01,0x00,0x00,0x05,0x01,0x00,0x00,0xa5,0x00,0x00,0x00, -0x8e,0x00,0x05,0x00,0x63,0x00,0x00,0x00,0x07,0x01,0x00,0x00, -0x06,0x01,0x00,0x00,0xfd,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x0d,0x01,0x00,0x00,0x80,0x00,0x00,0x00, -0x7a,0x00,0x00,0x00,0x51,0x00,0x05,0x00,0x4a,0x00,0x00,0x00, -0x0f,0x01,0x00,0x00,0x07,0x01,0x00,0x00,0x00,0x00,0x00,0x00, -0x73,0x00,0x04,0x00,0x4d,0x00,0x00,0x00,0x10,0x01,0x00,0x00, -0x0f,0x01,0x00,0x00,0x41,0x00,0x06,0x00,0x57,0x00,0x00,0x00, -0x11,0x01,0x00,0x00,0x78,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0x0d,0x01,0x00,0x00,0x3e,0x00,0x03,0x00,0x11,0x01,0x00,0x00, -0x10,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x18,0x01,0x00,0x00,0x80,0x00,0x00,0x00,0x01,0x03,0x00,0x00, -0x51,0x00,0x05,0x00,0x4a,0x00,0x00,0x00,0x19,0x01,0x00,0x00, -0x07,0x01,0x00,0x00,0x01,0x00,0x00,0x00,0x73,0x00,0x04,0x00, -0x4d,0x00,0x00,0x00,0x1a,0x01,0x00,0x00,0x19,0x01,0x00,0x00, -0x41,0x00,0x06,0x00,0x57,0x00,0x00,0x00,0x1b,0x01,0x00,0x00, -0x78,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0x18,0x01,0x00,0x00, -0x3e,0x00,0x03,0x00,0x1b,0x01,0x00,0x00,0x1a,0x01,0x00,0x00, -0x3d,0x00,0x04,0x00,0x4d,0x00,0x00,0x00,0x24,0x01,0x00,0x00, -0x58,0x00,0x00,0x00,0x73,0x00,0x04,0x00,0x4a,0x00,0x00,0x00, -0x25,0x01,0x00,0x00,0x24,0x01,0x00,0x00,0x41,0x00,0x08,0x00, -0x5f,0x00,0x00,0x00,0x26,0x01,0x00,0x00,0x55,0x00,0x00,0x00, -0x2e,0x00,0x00,0x00,0x40,0x00,0x00,0x00,0x17,0x00,0x00,0x00, -0x6b,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x4e,0x00,0x00,0x00, -0x27,0x01,0x00,0x00,0x26,0x01,0x00,0x00,0x71,0x00,0x04,0x00, -0x09,0x00,0x00,0x00,0x28,0x01,0x00,0x00,0x27,0x01,0x00,0x00, -0xc7,0x00,0x05,0x00,0x09,0x00,0x00,0x00,0x29,0x01,0x00,0x00, -0x28,0x01,0x00,0x00,0x67,0x00,0x00,0x00,0x70,0x00,0x04,0x00, -0x4a,0x00,0x00,0x00,0x2a,0x01,0x00,0x00,0x29,0x01,0x00,0x00, -0xc2,0x00,0x05,0x00,0x09,0x00,0x00,0x00,0x2b,0x01,0x00,0x00, -0x28,0x01,0x00,0x00,0x6b,0x00,0x00,0x00,0x70,0x00,0x04,0x00, -0x4a,0x00,0x00,0x00,0x2c,0x01,0x00,0x00,0x2b,0x01,0x00,0x00, -0x50,0x00,0x05,0x00,0x63,0x00,0x00,0x00,0x2d,0x01,0x00,0x00, -0x2a,0x01,0x00,0x00,0x2c,0x01,0x00,0x00,0x83,0x00,0x05,0x00, -0x63,0x00,0x00,0x00,0x2e,0x01,0x00,0x00,0x2d,0x01,0x00,0x00, -0xa5,0x00,0x00,0x00,0x8e,0x00,0x05,0x00,0x63,0x00,0x00,0x00, -0x2f,0x01,0x00,0x00,0x2e,0x01,0x00,0x00,0x25,0x01,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x35,0x01,0x00,0x00, -0x80,0x00,0x00,0x00,0x6b,0x00,0x00,0x00,0x51,0x00,0x05,0x00, -0x4a,0x00,0x00,0x00,0x37,0x01,0x00,0x00,0x2f,0x01,0x00,0x00, -0x00,0x00,0x00,0x00,0x73,0x00,0x04,0x00,0x4d,0x00,0x00,0x00, -0x38,0x01,0x00,0x00,0x37,0x01,0x00,0x00,0x41,0x00,0x06,0x00, -0x57,0x00,0x00,0x00,0x39,0x01,0x00,0x00,0x78,0x00,0x00,0x00, -0x2e,0x00,0x00,0x00,0x35,0x01,0x00,0x00,0x3e,0x00,0x03,0x00, -0x39,0x01,0x00,0x00,0x38,0x01,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x40,0x01,0x00,0x00,0x80,0x00,0x00,0x00, -0x02,0x03,0x00,0x00,0x51,0x00,0x05,0x00,0x4a,0x00,0x00,0x00, -0x41,0x01,0x00,0x00,0x2f,0x01,0x00,0x00,0x01,0x00,0x00,0x00, -0x73,0x00,0x04,0x00,0x4d,0x00,0x00,0x00,0x42,0x01,0x00,0x00, -0x41,0x01,0x00,0x00,0x41,0x00,0x06,0x00,0x57,0x00,0x00,0x00, -0x43,0x01,0x00,0x00,0x78,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0x40,0x01,0x00,0x00,0x3e,0x00,0x03,0x00,0x43,0x01,0x00,0x00, -0x42,0x01,0x00,0x00,0x3d,0x00,0x04,0x00,0x4d,0x00,0x00,0x00, -0x4c,0x01,0x00,0x00,0x58,0x00,0x00,0x00,0x73,0x00,0x04,0x00, -0x4a,0x00,0x00,0x00,0x4d,0x01,0x00,0x00,0x4c,0x01,0x00,0x00, -0x41,0x00,0x08,0x00,0x5f,0x00,0x00,0x00,0x4e,0x01,0x00,0x00, -0x55,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0x40,0x00,0x00,0x00, -0x17,0x00,0x00,0x00,0x03,0x03,0x00,0x00,0x3d,0x00,0x04,0x00, -0x4e,0x00,0x00,0x00,0x4f,0x01,0x00,0x00,0x4e,0x01,0x00,0x00, -0x71,0x00,0x04,0x00,0x09,0x00,0x00,0x00,0x50,0x01,0x00,0x00, -0x4f,0x01,0x00,0x00,0xc7,0x00,0x05,0x00,0x09,0x00,0x00,0x00, -0x51,0x01,0x00,0x00,0x50,0x01,0x00,0x00,0x67,0x00,0x00,0x00, -0x70,0x00,0x04,0x00,0x4a,0x00,0x00,0x00,0x52,0x01,0x00,0x00, -0x51,0x01,0x00,0x00,0xc2,0x00,0x05,0x00,0x09,0x00,0x00,0x00, -0x53,0x01,0x00,0x00,0x50,0x01,0x00,0x00,0x6b,0x00,0x00,0x00, -0x70,0x00,0x04,0x00,0x4a,0x00,0x00,0x00,0x54,0x01,0x00,0x00, -0x53,0x01,0x00,0x00,0x50,0x00,0x05,0x00,0x63,0x00,0x00,0x00, -0x55,0x01,0x00,0x00,0x52,0x01,0x00,0x00,0x54,0x01,0x00,0x00, -0x83,0x00,0x05,0x00,0x63,0x00,0x00,0x00,0x56,0x01,0x00,0x00, -0x55,0x01,0x00,0x00,0xa5,0x00,0x00,0x00,0x8e,0x00,0x05,0x00, -0x63,0x00,0x00,0x00,0x57,0x01,0x00,0x00,0x56,0x01,0x00,0x00, -0x4d,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x5d,0x01,0x00,0x00,0x80,0x00,0x00,0x00,0x03,0x03,0x00,0x00, -0x51,0x00,0x05,0x00,0x4a,0x00,0x00,0x00,0x5f,0x01,0x00,0x00, -0x57,0x01,0x00,0x00,0x00,0x00,0x00,0x00,0x73,0x00,0x04,0x00, -0x4d,0x00,0x00,0x00,0x60,0x01,0x00,0x00,0x5f,0x01,0x00,0x00, -0x41,0x00,0x06,0x00,0x57,0x00,0x00,0x00,0x61,0x01,0x00,0x00, -0x78,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0x5d,0x01,0x00,0x00, -0x3e,0x00,0x03,0x00,0x61,0x01,0x00,0x00,0x60,0x01,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x68,0x01,0x00,0x00, -0x80,0x00,0x00,0x00,0x04,0x03,0x00,0x00,0x51,0x00,0x05,0x00, -0x4a,0x00,0x00,0x00,0x69,0x01,0x00,0x00,0x57,0x01,0x00,0x00, -0x01,0x00,0x00,0x00,0x73,0x00,0x04,0x00,0x4d,0x00,0x00,0x00, -0x6a,0x01,0x00,0x00,0x69,0x01,0x00,0x00,0x41,0x00,0x06,0x00, -0x57,0x00,0x00,0x00,0x6b,0x01,0x00,0x00,0x78,0x00,0x00,0x00, -0x2e,0x00,0x00,0x00,0x68,0x01,0x00,0x00,0x3e,0x00,0x03,0x00, -0x6b,0x01,0x00,0x00,0x6a,0x01,0x00,0x00,0x3d,0x00,0x04,0x00, -0x4d,0x00,0x00,0x00,0x74,0x01,0x00,0x00,0x58,0x00,0x00,0x00, -0x73,0x00,0x04,0x00,0x4a,0x00,0x00,0x00,0x75,0x01,0x00,0x00, -0x74,0x01,0x00,0x00,0x41,0x00,0x08,0x00,0x5f,0x00,0x00,0x00, -0x76,0x01,0x00,0x00,0x55,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0x40,0x00,0x00,0x00,0x17,0x00,0x00,0x00,0x05,0x03,0x00,0x00, -0x3d,0x00,0x04,0x00,0x4e,0x00,0x00,0x00,0x77,0x01,0x00,0x00, -0x76,0x01,0x00,0x00,0x71,0x00,0x04,0x00,0x09,0x00,0x00,0x00, -0x78,0x01,0x00,0x00,0x77,0x01,0x00,0x00,0xc7,0x00,0x05,0x00, -0x09,0x00,0x00,0x00,0x79,0x01,0x00,0x00,0x78,0x01,0x00,0x00, -0x67,0x00,0x00,0x00,0x70,0x00,0x04,0x00,0x4a,0x00,0x00,0x00, -0x7a,0x01,0x00,0x00,0x79,0x01,0x00,0x00,0xc2,0x00,0x05,0x00, -0x09,0x00,0x00,0x00,0x7b,0x01,0x00,0x00,0x78,0x01,0x00,0x00, -0x6b,0x00,0x00,0x00,0x70,0x00,0x04,0x00,0x4a,0x00,0x00,0x00, -0x7c,0x01,0x00,0x00,0x7b,0x01,0x00,0x00,0x50,0x00,0x05,0x00, -0x63,0x00,0x00,0x00,0x7d,0x01,0x00,0x00,0x7a,0x01,0x00,0x00, -0x7c,0x01,0x00,0x00,0x83,0x00,0x05,0x00,0x63,0x00,0x00,0x00, -0x7e,0x01,0x00,0x00,0x7d,0x01,0x00,0x00,0xa5,0x00,0x00,0x00, -0x8e,0x00,0x05,0x00,0x63,0x00,0x00,0x00,0x7f,0x01,0x00,0x00, -0x7e,0x01,0x00,0x00,0x75,0x01,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x85,0x01,0x00,0x00,0x80,0x00,0x00,0x00, -0x05,0x03,0x00,0x00,0x51,0x00,0x05,0x00,0x4a,0x00,0x00,0x00, -0x87,0x01,0x00,0x00,0x7f,0x01,0x00,0x00,0x00,0x00,0x00,0x00, -0x73,0x00,0x04,0x00,0x4d,0x00,0x00,0x00,0x88,0x01,0x00,0x00, -0x87,0x01,0x00,0x00,0x41,0x00,0x06,0x00,0x57,0x00,0x00,0x00, -0x89,0x01,0x00,0x00,0x78,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0x85,0x01,0x00,0x00,0x3e,0x00,0x03,0x00,0x89,0x01,0x00,0x00, -0x88,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x90,0x01,0x00,0x00,0x80,0x00,0x00,0x00,0x06,0x03,0x00,0x00, -0x51,0x00,0x05,0x00,0x4a,0x00,0x00,0x00,0x91,0x01,0x00,0x00, -0x7f,0x01,0x00,0x00,0x01,0x00,0x00,0x00,0x73,0x00,0x04,0x00, -0x4d,0x00,0x00,0x00,0x92,0x01,0x00,0x00,0x91,0x01,0x00,0x00, -0x41,0x00,0x06,0x00,0x57,0x00,0x00,0x00,0x93,0x01,0x00,0x00, -0x78,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0x90,0x01,0x00,0x00, -0x3e,0x00,0x03,0x00,0x93,0x01,0x00,0x00,0x92,0x01,0x00,0x00, -0x3d,0x00,0x04,0x00,0x4d,0x00,0x00,0x00,0x9c,0x01,0x00,0x00, -0x58,0x00,0x00,0x00,0x73,0x00,0x04,0x00,0x4a,0x00,0x00,0x00, -0x9d,0x01,0x00,0x00,0x9c,0x01,0x00,0x00,0x41,0x00,0x08,0x00, -0x5f,0x00,0x00,0x00,0x9e,0x01,0x00,0x00,0x55,0x00,0x00,0x00, -0x2e,0x00,0x00,0x00,0x40,0x00,0x00,0x00,0x17,0x00,0x00,0x00, -0x07,0x03,0x00,0x00,0x3d,0x00,0x04,0x00,0x4e,0x00,0x00,0x00, -0x9f,0x01,0x00,0x00,0x9e,0x01,0x00,0x00,0x71,0x00,0x04,0x00, -0x09,0x00,0x00,0x00,0xa0,0x01,0x00,0x00,0x9f,0x01,0x00,0x00, -0xc7,0x00,0x05,0x00,0x09,0x00,0x00,0x00,0xa1,0x01,0x00,0x00, -0xa0,0x01,0x00,0x00,0x67,0x00,0x00,0x00,0x70,0x00,0x04,0x00, -0x4a,0x00,0x00,0x00,0xa2,0x01,0x00,0x00,0xa1,0x01,0x00,0x00, -0xc2,0x00,0x05,0x00,0x09,0x00,0x00,0x00,0xa3,0x01,0x00,0x00, -0xa0,0x01,0x00,0x00,0x6b,0x00,0x00,0x00,0x70,0x00,0x04,0x00, -0x4a,0x00,0x00,0x00,0xa4,0x01,0x00,0x00,0xa3,0x01,0x00,0x00, -0x50,0x00,0x05,0x00,0x63,0x00,0x00,0x00,0xa5,0x01,0x00,0x00, -0xa2,0x01,0x00,0x00,0xa4,0x01,0x00,0x00,0x83,0x00,0x05,0x00, -0x63,0x00,0x00,0x00,0xa6,0x01,0x00,0x00,0xa5,0x01,0x00,0x00, -0xa5,0x00,0x00,0x00,0x8e,0x00,0x05,0x00,0x63,0x00,0x00,0x00, -0xa7,0x01,0x00,0x00,0xa6,0x01,0x00,0x00,0x9d,0x01,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xad,0x01,0x00,0x00, -0x80,0x00,0x00,0x00,0x07,0x03,0x00,0x00,0x51,0x00,0x05,0x00, -0x4a,0x00,0x00,0x00,0xaf,0x01,0x00,0x00,0xa7,0x01,0x00,0x00, -0x00,0x00,0x00,0x00,0x73,0x00,0x04,0x00,0x4d,0x00,0x00,0x00, -0xb0,0x01,0x00,0x00,0xaf,0x01,0x00,0x00,0x41,0x00,0x06,0x00, -0x57,0x00,0x00,0x00,0xb1,0x01,0x00,0x00,0x78,0x00,0x00,0x00, -0x2e,0x00,0x00,0x00,0xad,0x01,0x00,0x00,0x3e,0x00,0x03,0x00, -0xb1,0x01,0x00,0x00,0xb0,0x01,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xb8,0x01,0x00,0x00,0x80,0x00,0x00,0x00, -0x08,0x03,0x00,0x00,0x51,0x00,0x05,0x00,0x4a,0x00,0x00,0x00, -0xb9,0x01,0x00,0x00,0xa7,0x01,0x00,0x00,0x01,0x00,0x00,0x00, -0x73,0x00,0x04,0x00,0x4d,0x00,0x00,0x00,0xba,0x01,0x00,0x00, -0xb9,0x01,0x00,0x00,0x41,0x00,0x06,0x00,0x57,0x00,0x00,0x00, -0xbb,0x01,0x00,0x00,0x78,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0xb8,0x01,0x00,0x00,0x3e,0x00,0x03,0x00,0xbb,0x01,0x00,0x00, -0xba,0x01,0x00,0x00,0x3d,0x00,0x04,0x00,0x4d,0x00,0x00,0x00, -0xc4,0x01,0x00,0x00,0x58,0x00,0x00,0x00,0x73,0x00,0x04,0x00, -0x4a,0x00,0x00,0x00,0xc5,0x01,0x00,0x00,0xc4,0x01,0x00,0x00, -0x41,0x00,0x08,0x00,0x5f,0x00,0x00,0x00,0xc6,0x01,0x00,0x00, -0x55,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0x40,0x00,0x00,0x00, -0x17,0x00,0x00,0x00,0x09,0x03,0x00,0x00,0x3d,0x00,0x04,0x00, -0x4e,0x00,0x00,0x00,0xc7,0x01,0x00,0x00,0xc6,0x01,0x00,0x00, -0x71,0x00,0x04,0x00,0x09,0x00,0x00,0x00,0xc8,0x01,0x00,0x00, -0xc7,0x01,0x00,0x00,0xc7,0x00,0x05,0x00,0x09,0x00,0x00,0x00, -0xc9,0x01,0x00,0x00,0xc8,0x01,0x00,0x00,0x67,0x00,0x00,0x00, -0x70,0x00,0x04,0x00,0x4a,0x00,0x00,0x00,0xca,0x01,0x00,0x00, -0xc9,0x01,0x00,0x00,0xc2,0x00,0x05,0x00,0x09,0x00,0x00,0x00, -0xcb,0x01,0x00,0x00,0xc8,0x01,0x00,0x00,0x6b,0x00,0x00,0x00, -0x70,0x00,0x04,0x00,0x4a,0x00,0x00,0x00,0xcc,0x01,0x00,0x00, -0xcb,0x01,0x00,0x00,0x50,0x00,0x05,0x00,0x63,0x00,0x00,0x00, -0xcd,0x01,0x00,0x00,0xca,0x01,0x00,0x00,0xcc,0x01,0x00,0x00, -0x83,0x00,0x05,0x00,0x63,0x00,0x00,0x00,0xce,0x01,0x00,0x00, -0xcd,0x01,0x00,0x00,0xa5,0x00,0x00,0x00,0x8e,0x00,0x05,0x00, -0x63,0x00,0x00,0x00,0xcf,0x01,0x00,0x00,0xce,0x01,0x00,0x00, -0xc5,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xd5,0x01,0x00,0x00,0x80,0x00,0x00,0x00,0x09,0x03,0x00,0x00, -0x51,0x00,0x05,0x00,0x4a,0x00,0x00,0x00,0xd7,0x01,0x00,0x00, -0xcf,0x01,0x00,0x00,0x00,0x00,0x00,0x00,0x73,0x00,0x04,0x00, -0x4d,0x00,0x00,0x00,0xd8,0x01,0x00,0x00,0xd7,0x01,0x00,0x00, -0x41,0x00,0x06,0x00,0x57,0x00,0x00,0x00,0xd9,0x01,0x00,0x00, -0x78,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0xd5,0x01,0x00,0x00, -0x3e,0x00,0x03,0x00,0xd9,0x01,0x00,0x00,0xd8,0x01,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xe0,0x01,0x00,0x00, -0x80,0x00,0x00,0x00,0x0a,0x03,0x00,0x00,0x51,0x00,0x05,0x00, -0x4a,0x00,0x00,0x00,0xe1,0x01,0x00,0x00,0xcf,0x01,0x00,0x00, -0x01,0x00,0x00,0x00,0x73,0x00,0x04,0x00,0x4d,0x00,0x00,0x00, -0xe2,0x01,0x00,0x00,0xe1,0x01,0x00,0x00,0x41,0x00,0x06,0x00, -0x57,0x00,0x00,0x00,0xe3,0x01,0x00,0x00,0x78,0x00,0x00,0x00, -0x2e,0x00,0x00,0x00,0xe0,0x01,0x00,0x00,0x3e,0x00,0x03,0x00, -0xe3,0x01,0x00,0x00,0xe2,0x01,0x00,0x00,0x3d,0x00,0x04,0x00, -0x4d,0x00,0x00,0x00,0xec,0x01,0x00,0x00,0x58,0x00,0x00,0x00, -0x73,0x00,0x04,0x00,0x4a,0x00,0x00,0x00,0xed,0x01,0x00,0x00, -0xec,0x01,0x00,0x00,0x41,0x00,0x08,0x00,0x5f,0x00,0x00,0x00, -0xee,0x01,0x00,0x00,0x55,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0x40,0x00,0x00,0x00,0x17,0x00,0x00,0x00,0x0b,0x03,0x00,0x00, -0x3d,0x00,0x04,0x00,0x4e,0x00,0x00,0x00,0xef,0x01,0x00,0x00, -0xee,0x01,0x00,0x00,0x71,0x00,0x04,0x00,0x09,0x00,0x00,0x00, -0xf0,0x01,0x00,0x00,0xef,0x01,0x00,0x00,0xc7,0x00,0x05,0x00, -0x09,0x00,0x00,0x00,0xf1,0x01,0x00,0x00,0xf0,0x01,0x00,0x00, -0x67,0x00,0x00,0x00,0x70,0x00,0x04,0x00,0x4a,0x00,0x00,0x00, -0xf2,0x01,0x00,0x00,0xf1,0x01,0x00,0x00,0xc2,0x00,0x05,0x00, -0x09,0x00,0x00,0x00,0xf3,0x01,0x00,0x00,0xf0,0x01,0x00,0x00, -0x6b,0x00,0x00,0x00,0x70,0x00,0x04,0x00,0x4a,0x00,0x00,0x00, -0xf4,0x01,0x00,0x00,0xf3,0x01,0x00,0x00,0x50,0x00,0x05,0x00, -0x63,0x00,0x00,0x00,0xf5,0x01,0x00,0x00,0xf2,0x01,0x00,0x00, -0xf4,0x01,0x00,0x00,0x83,0x00,0x05,0x00,0x63,0x00,0x00,0x00, -0xf6,0x01,0x00,0x00,0xf5,0x01,0x00,0x00,0xa5,0x00,0x00,0x00, -0x8e,0x00,0x05,0x00,0x63,0x00,0x00,0x00,0xf7,0x01,0x00,0x00, -0xf6,0x01,0x00,0x00,0xed,0x01,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xfd,0x01,0x00,0x00,0x80,0x00,0x00,0x00, -0x0b,0x03,0x00,0x00,0x51,0x00,0x05,0x00,0x4a,0x00,0x00,0x00, -0xff,0x01,0x00,0x00,0xf7,0x01,0x00,0x00,0x00,0x00,0x00,0x00, -0x73,0x00,0x04,0x00,0x4d,0x00,0x00,0x00,0x00,0x02,0x00,0x00, -0xff,0x01,0x00,0x00,0x41,0x00,0x06,0x00,0x57,0x00,0x00,0x00, -0x01,0x02,0x00,0x00,0x78,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0xfd,0x01,0x00,0x00,0x3e,0x00,0x03,0x00,0x01,0x02,0x00,0x00, -0x00,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x08,0x02,0x00,0x00,0x80,0x00,0x00,0x00,0x0c,0x03,0x00,0x00, -0x51,0x00,0x05,0x00,0x4a,0x00,0x00,0x00,0x09,0x02,0x00,0x00, -0xf7,0x01,0x00,0x00,0x01,0x00,0x00,0x00,0x73,0x00,0x04,0x00, -0x4d,0x00,0x00,0x00,0x0a,0x02,0x00,0x00,0x09,0x02,0x00,0x00, -0x41,0x00,0x06,0x00,0x57,0x00,0x00,0x00,0x0b,0x02,0x00,0x00, -0x78,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0x08,0x02,0x00,0x00, -0x3e,0x00,0x03,0x00,0x0b,0x02,0x00,0x00,0x0a,0x02,0x00,0x00, -0x3d,0x00,0x04,0x00,0x4d,0x00,0x00,0x00,0x14,0x02,0x00,0x00, -0x58,0x00,0x00,0x00,0x73,0x00,0x04,0x00,0x4a,0x00,0x00,0x00, -0x15,0x02,0x00,0x00,0x14,0x02,0x00,0x00,0x41,0x00,0x08,0x00, -0x5f,0x00,0x00,0x00,0x16,0x02,0x00,0x00,0x55,0x00,0x00,0x00, -0x2e,0x00,0x00,0x00,0x40,0x00,0x00,0x00,0x17,0x00,0x00,0x00, -0x0d,0x03,0x00,0x00,0x3d,0x00,0x04,0x00,0x4e,0x00,0x00,0x00, -0x17,0x02,0x00,0x00,0x16,0x02,0x00,0x00,0x71,0x00,0x04,0x00, -0x09,0x00,0x00,0x00,0x18,0x02,0x00,0x00,0x17,0x02,0x00,0x00, -0xc7,0x00,0x05,0x00,0x09,0x00,0x00,0x00,0x19,0x02,0x00,0x00, -0x18,0x02,0x00,0x00,0x67,0x00,0x00,0x00,0x70,0x00,0x04,0x00, -0x4a,0x00,0x00,0x00,0x1a,0x02,0x00,0x00,0x19,0x02,0x00,0x00, -0xc2,0x00,0x05,0x00,0x09,0x00,0x00,0x00,0x1b,0x02,0x00,0x00, -0x18,0x02,0x00,0x00,0x6b,0x00,0x00,0x00,0x70,0x00,0x04,0x00, -0x4a,0x00,0x00,0x00,0x1c,0x02,0x00,0x00,0x1b,0x02,0x00,0x00, -0x50,0x00,0x05,0x00,0x63,0x00,0x00,0x00,0x1d,0x02,0x00,0x00, -0x1a,0x02,0x00,0x00,0x1c,0x02,0x00,0x00,0x83,0x00,0x05,0x00, -0x63,0x00,0x00,0x00,0x1e,0x02,0x00,0x00,0x1d,0x02,0x00,0x00, -0xa5,0x00,0x00,0x00,0x8e,0x00,0x05,0x00,0x63,0x00,0x00,0x00, -0x1f,0x02,0x00,0x00,0x1e,0x02,0x00,0x00,0x15,0x02,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x25,0x02,0x00,0x00, -0x80,0x00,0x00,0x00,0x0d,0x03,0x00,0x00,0x51,0x00,0x05,0x00, -0x4a,0x00,0x00,0x00,0x27,0x02,0x00,0x00,0x1f,0x02,0x00,0x00, -0x00,0x00,0x00,0x00,0x73,0x00,0x04,0x00,0x4d,0x00,0x00,0x00, -0x28,0x02,0x00,0x00,0x27,0x02,0x00,0x00,0x41,0x00,0x06,0x00, -0x57,0x00,0x00,0x00,0x29,0x02,0x00,0x00,0x78,0x00,0x00,0x00, -0x2e,0x00,0x00,0x00,0x25,0x02,0x00,0x00,0x3e,0x00,0x03,0x00, -0x29,0x02,0x00,0x00,0x28,0x02,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x30,0x02,0x00,0x00,0x80,0x00,0x00,0x00, -0x0e,0x03,0x00,0x00,0x51,0x00,0x05,0x00,0x4a,0x00,0x00,0x00, -0x31,0x02,0x00,0x00,0x1f,0x02,0x00,0x00,0x01,0x00,0x00,0x00, -0x73,0x00,0x04,0x00,0x4d,0x00,0x00,0x00,0x32,0x02,0x00,0x00, -0x31,0x02,0x00,0x00,0x41,0x00,0x06,0x00,0x57,0x00,0x00,0x00, -0x33,0x02,0x00,0x00,0x78,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0x30,0x02,0x00,0x00,0x3e,0x00,0x03,0x00,0x33,0x02,0x00,0x00, -0x32,0x02,0x00,0x00,0x3d,0x00,0x04,0x00,0x4d,0x00,0x00,0x00, -0x3c,0x02,0x00,0x00,0x58,0x00,0x00,0x00,0x73,0x00,0x04,0x00, -0x4a,0x00,0x00,0x00,0x3d,0x02,0x00,0x00,0x3c,0x02,0x00,0x00, -0x41,0x00,0x08,0x00,0x5f,0x00,0x00,0x00,0x3e,0x02,0x00,0x00, -0x55,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0x40,0x00,0x00,0x00, -0x17,0x00,0x00,0x00,0x0f,0x03,0x00,0x00,0x3d,0x00,0x04,0x00, -0x4e,0x00,0x00,0x00,0x3f,0x02,0x00,0x00,0x3e,0x02,0x00,0x00, -0x71,0x00,0x04,0x00,0x09,0x00,0x00,0x00,0x40,0x02,0x00,0x00, -0x3f,0x02,0x00,0x00,0xc7,0x00,0x05,0x00,0x09,0x00,0x00,0x00, -0x41,0x02,0x00,0x00,0x40,0x02,0x00,0x00,0x67,0x00,0x00,0x00, -0x70,0x00,0x04,0x00,0x4a,0x00,0x00,0x00,0x42,0x02,0x00,0x00, -0x41,0x02,0x00,0x00,0xc2,0x00,0x05,0x00,0x09,0x00,0x00,0x00, -0x43,0x02,0x00,0x00,0x40,0x02,0x00,0x00,0x6b,0x00,0x00,0x00, -0x70,0x00,0x04,0x00,0x4a,0x00,0x00,0x00,0x44,0x02,0x00,0x00, -0x43,0x02,0x00,0x00,0x50,0x00,0x05,0x00,0x63,0x00,0x00,0x00, -0x45,0x02,0x00,0x00,0x42,0x02,0x00,0x00,0x44,0x02,0x00,0x00, -0x83,0x00,0x05,0x00,0x63,0x00,0x00,0x00,0x46,0x02,0x00,0x00, -0x45,0x02,0x00,0x00,0xa5,0x00,0x00,0x00,0x8e,0x00,0x05,0x00, -0x63,0x00,0x00,0x00,0x47,0x02,0x00,0x00,0x46,0x02,0x00,0x00, -0x3d,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x4d,0x02,0x00,0x00,0x80,0x00,0x00,0x00,0x0f,0x03,0x00,0x00, -0x51,0x00,0x05,0x00,0x4a,0x00,0x00,0x00,0x4f,0x02,0x00,0x00, -0x47,0x02,0x00,0x00,0x00,0x00,0x00,0x00,0x73,0x00,0x04,0x00, -0x4d,0x00,0x00,0x00,0x50,0x02,0x00,0x00,0x4f,0x02,0x00,0x00, -0x41,0x00,0x06,0x00,0x57,0x00,0x00,0x00,0x51,0x02,0x00,0x00, -0x78,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0x4d,0x02,0x00,0x00, -0x3e,0x00,0x03,0x00,0x51,0x02,0x00,0x00,0x50,0x02,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x58,0x02,0x00,0x00, -0x80,0x00,0x00,0x00,0x10,0x03,0x00,0x00,0x51,0x00,0x05,0x00, -0x4a,0x00,0x00,0x00,0x59,0x02,0x00,0x00,0x47,0x02,0x00,0x00, -0x01,0x00,0x00,0x00,0x73,0x00,0x04,0x00,0x4d,0x00,0x00,0x00, -0x5a,0x02,0x00,0x00,0x59,0x02,0x00,0x00,0x41,0x00,0x06,0x00, -0x57,0x00,0x00,0x00,0x5b,0x02,0x00,0x00,0x78,0x00,0x00,0x00, -0x2e,0x00,0x00,0x00,0x58,0x02,0x00,0x00,0x3e,0x00,0x03,0x00, -0x5b,0x02,0x00,0x00,0x5a,0x02,0x00,0x00,0x3d,0x00,0x04,0x00, -0x4d,0x00,0x00,0x00,0x64,0x02,0x00,0x00,0x58,0x00,0x00,0x00, -0x73,0x00,0x04,0x00,0x4a,0x00,0x00,0x00,0x65,0x02,0x00,0x00, -0x64,0x02,0x00,0x00,0x41,0x00,0x08,0x00,0x5f,0x00,0x00,0x00, -0x66,0x02,0x00,0x00,0x55,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0x40,0x00,0x00,0x00,0x17,0x00,0x00,0x00,0x11,0x03,0x00,0x00, -0x3d,0x00,0x04,0x00,0x4e,0x00,0x00,0x00,0x67,0x02,0x00,0x00, -0x66,0x02,0x00,0x00,0x71,0x00,0x04,0x00,0x09,0x00,0x00,0x00, -0x68,0x02,0x00,0x00,0x67,0x02,0x00,0x00,0xc7,0x00,0x05,0x00, -0x09,0x00,0x00,0x00,0x69,0x02,0x00,0x00,0x68,0x02,0x00,0x00, -0x67,0x00,0x00,0x00,0x70,0x00,0x04,0x00,0x4a,0x00,0x00,0x00, -0x6a,0x02,0x00,0x00,0x69,0x02,0x00,0x00,0xc2,0x00,0x05,0x00, -0x09,0x00,0x00,0x00,0x6b,0x02,0x00,0x00,0x68,0x02,0x00,0x00, -0x6b,0x00,0x00,0x00,0x70,0x00,0x04,0x00,0x4a,0x00,0x00,0x00, -0x6c,0x02,0x00,0x00,0x6b,0x02,0x00,0x00,0x50,0x00,0x05,0x00, -0x63,0x00,0x00,0x00,0x6d,0x02,0x00,0x00,0x6a,0x02,0x00,0x00, -0x6c,0x02,0x00,0x00,0x83,0x00,0x05,0x00,0x63,0x00,0x00,0x00, -0x6e,0x02,0x00,0x00,0x6d,0x02,0x00,0x00,0xa5,0x00,0x00,0x00, -0x8e,0x00,0x05,0x00,0x63,0x00,0x00,0x00,0x6f,0x02,0x00,0x00, -0x6e,0x02,0x00,0x00,0x65,0x02,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x75,0x02,0x00,0x00,0x80,0x00,0x00,0x00, -0x11,0x03,0x00,0x00,0x51,0x00,0x05,0x00,0x4a,0x00,0x00,0x00, -0x77,0x02,0x00,0x00,0x6f,0x02,0x00,0x00,0x00,0x00,0x00,0x00, -0x73,0x00,0x04,0x00,0x4d,0x00,0x00,0x00,0x78,0x02,0x00,0x00, -0x77,0x02,0x00,0x00,0x41,0x00,0x06,0x00,0x57,0x00,0x00,0x00, -0x79,0x02,0x00,0x00,0x78,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0x75,0x02,0x00,0x00,0x3e,0x00,0x03,0x00,0x79,0x02,0x00,0x00, -0x78,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x80,0x02,0x00,0x00,0x80,0x00,0x00,0x00,0x12,0x03,0x00,0x00, -0x51,0x00,0x05,0x00,0x4a,0x00,0x00,0x00,0x81,0x02,0x00,0x00, -0x6f,0x02,0x00,0x00,0x01,0x00,0x00,0x00,0x73,0x00,0x04,0x00, -0x4d,0x00,0x00,0x00,0x82,0x02,0x00,0x00,0x81,0x02,0x00,0x00, -0x41,0x00,0x06,0x00,0x57,0x00,0x00,0x00,0x83,0x02,0x00,0x00, -0x78,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0x80,0x02,0x00,0x00, -0x3e,0x00,0x03,0x00,0x83,0x02,0x00,0x00,0x82,0x02,0x00,0x00, -0x3d,0x00,0x04,0x00,0x4d,0x00,0x00,0x00,0x8c,0x02,0x00,0x00, -0x58,0x00,0x00,0x00,0x73,0x00,0x04,0x00,0x4a,0x00,0x00,0x00, -0x8d,0x02,0x00,0x00,0x8c,0x02,0x00,0x00,0x41,0x00,0x08,0x00, -0x5f,0x00,0x00,0x00,0x8e,0x02,0x00,0x00,0x55,0x00,0x00,0x00, -0x2e,0x00,0x00,0x00,0x40,0x00,0x00,0x00,0x17,0x00,0x00,0x00, -0x13,0x03,0x00,0x00,0x3d,0x00,0x04,0x00,0x4e,0x00,0x00,0x00, -0x8f,0x02,0x00,0x00,0x8e,0x02,0x00,0x00,0x71,0x00,0x04,0x00, -0x09,0x00,0x00,0x00,0x90,0x02,0x00,0x00,0x8f,0x02,0x00,0x00, -0xc7,0x00,0x05,0x00,0x09,0x00,0x00,0x00,0x91,0x02,0x00,0x00, -0x90,0x02,0x00,0x00,0x67,0x00,0x00,0x00,0x70,0x00,0x04,0x00, -0x4a,0x00,0x00,0x00,0x92,0x02,0x00,0x00,0x91,0x02,0x00,0x00, -0xc2,0x00,0x05,0x00,0x09,0x00,0x00,0x00,0x93,0x02,0x00,0x00, -0x90,0x02,0x00,0x00,0x6b,0x00,0x00,0x00,0x70,0x00,0x04,0x00, -0x4a,0x00,0x00,0x00,0x94,0x02,0x00,0x00,0x93,0x02,0x00,0x00, -0x50,0x00,0x05,0x00,0x63,0x00,0x00,0x00,0x95,0x02,0x00,0x00, -0x92,0x02,0x00,0x00,0x94,0x02,0x00,0x00,0x83,0x00,0x05,0x00, -0x63,0x00,0x00,0x00,0x96,0x02,0x00,0x00,0x95,0x02,0x00,0x00, -0xa5,0x00,0x00,0x00,0x8e,0x00,0x05,0x00,0x63,0x00,0x00,0x00, -0x97,0x02,0x00,0x00,0x96,0x02,0x00,0x00,0x8d,0x02,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x9d,0x02,0x00,0x00, -0x80,0x00,0x00,0x00,0x13,0x03,0x00,0x00,0x51,0x00,0x05,0x00, -0x4a,0x00,0x00,0x00,0x9f,0x02,0x00,0x00,0x97,0x02,0x00,0x00, -0x00,0x00,0x00,0x00,0x73,0x00,0x04,0x00,0x4d,0x00,0x00,0x00, -0xa0,0x02,0x00,0x00,0x9f,0x02,0x00,0x00,0x41,0x00,0x06,0x00, -0x57,0x00,0x00,0x00,0xa1,0x02,0x00,0x00,0x78,0x00,0x00,0x00, -0x2e,0x00,0x00,0x00,0x9d,0x02,0x00,0x00,0x3e,0x00,0x03,0x00, -0xa1,0x02,0x00,0x00,0xa0,0x02,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xa8,0x02,0x00,0x00,0x80,0x00,0x00,0x00, -0x14,0x03,0x00,0x00,0x51,0x00,0x05,0x00,0x4a,0x00,0x00,0x00, -0xa9,0x02,0x00,0x00,0x97,0x02,0x00,0x00,0x01,0x00,0x00,0x00, -0x73,0x00,0x04,0x00,0x4d,0x00,0x00,0x00,0xaa,0x02,0x00,0x00, -0xa9,0x02,0x00,0x00,0x41,0x00,0x06,0x00,0x57,0x00,0x00,0x00, -0xab,0x02,0x00,0x00,0x78,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0xa8,0x02,0x00,0x00,0x3e,0x00,0x03,0x00,0xab,0x02,0x00,0x00, -0xaa,0x02,0x00,0x00,0x3d,0x00,0x04,0x00,0x4d,0x00,0x00,0x00, -0xb4,0x02,0x00,0x00,0x58,0x00,0x00,0x00,0x73,0x00,0x04,0x00, -0x4a,0x00,0x00,0x00,0xb5,0x02,0x00,0x00,0xb4,0x02,0x00,0x00, -0x41,0x00,0x08,0x00,0x5f,0x00,0x00,0x00,0xb6,0x02,0x00,0x00, -0x55,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0x40,0x00,0x00,0x00, -0x17,0x00,0x00,0x00,0x15,0x03,0x00,0x00,0x3d,0x00,0x04,0x00, -0x4e,0x00,0x00,0x00,0xb7,0x02,0x00,0x00,0xb6,0x02,0x00,0x00, -0x71,0x00,0x04,0x00,0x09,0x00,0x00,0x00,0xb8,0x02,0x00,0x00, -0xb7,0x02,0x00,0x00,0xc7,0x00,0x05,0x00,0x09,0x00,0x00,0x00, -0xb9,0x02,0x00,0x00,0xb8,0x02,0x00,0x00,0x67,0x00,0x00,0x00, -0x70,0x00,0x04,0x00,0x4a,0x00,0x00,0x00,0xba,0x02,0x00,0x00, -0xb9,0x02,0x00,0x00,0xc2,0x00,0x05,0x00,0x09,0x00,0x00,0x00, -0xbb,0x02,0x00,0x00,0xb8,0x02,0x00,0x00,0x6b,0x00,0x00,0x00, -0x70,0x00,0x04,0x00,0x4a,0x00,0x00,0x00,0xbc,0x02,0x00,0x00, -0xbb,0x02,0x00,0x00,0x50,0x00,0x05,0x00,0x63,0x00,0x00,0x00, -0xbd,0x02,0x00,0x00,0xba,0x02,0x00,0x00,0xbc,0x02,0x00,0x00, -0x83,0x00,0x05,0x00,0x63,0x00,0x00,0x00,0xbe,0x02,0x00,0x00, -0xbd,0x02,0x00,0x00,0xa5,0x00,0x00,0x00,0x8e,0x00,0x05,0x00, -0x63,0x00,0x00,0x00,0xbf,0x02,0x00,0x00,0xbe,0x02,0x00,0x00, -0xb5,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xc5,0x02,0x00,0x00,0x80,0x00,0x00,0x00,0x15,0x03,0x00,0x00, -0x51,0x00,0x05,0x00,0x4a,0x00,0x00,0x00,0xc7,0x02,0x00,0x00, -0xbf,0x02,0x00,0x00,0x00,0x00,0x00,0x00,0x73,0x00,0x04,0x00, -0x4d,0x00,0x00,0x00,0xc8,0x02,0x00,0x00,0xc7,0x02,0x00,0x00, -0x41,0x00,0x06,0x00,0x57,0x00,0x00,0x00,0xc9,0x02,0x00,0x00, -0x78,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0xc5,0x02,0x00,0x00, -0x3e,0x00,0x03,0x00,0xc9,0x02,0x00,0x00,0xc8,0x02,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xd0,0x02,0x00,0x00, -0x80,0x00,0x00,0x00,0x16,0x03,0x00,0x00,0x51,0x00,0x05,0x00, -0x4a,0x00,0x00,0x00,0xd1,0x02,0x00,0x00,0xbf,0x02,0x00,0x00, -0x01,0x00,0x00,0x00,0x73,0x00,0x04,0x00,0x4d,0x00,0x00,0x00, -0xd2,0x02,0x00,0x00,0xd1,0x02,0x00,0x00,0x41,0x00,0x06,0x00, -0x57,0x00,0x00,0x00,0xd3,0x02,0x00,0x00,0x78,0x00,0x00,0x00, -0x2e,0x00,0x00,0x00,0xd0,0x02,0x00,0x00,0x3e,0x00,0x03,0x00, -0xd3,0x02,0x00,0x00,0xd2,0x02,0x00,0x00,0x3d,0x00,0x04,0x00, -0x4d,0x00,0x00,0x00,0xdc,0x02,0x00,0x00,0x58,0x00,0x00,0x00, -0x73,0x00,0x04,0x00,0x4a,0x00,0x00,0x00,0xdd,0x02,0x00,0x00, -0xdc,0x02,0x00,0x00,0x41,0x00,0x08,0x00,0x5f,0x00,0x00,0x00, -0xde,0x02,0x00,0x00,0x55,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0x40,0x00,0x00,0x00,0x17,0x00,0x00,0x00,0x17,0x03,0x00,0x00, -0x3d,0x00,0x04,0x00,0x4e,0x00,0x00,0x00,0xdf,0x02,0x00,0x00, -0xde,0x02,0x00,0x00,0x71,0x00,0x04,0x00,0x09,0x00,0x00,0x00, -0xe0,0x02,0x00,0x00,0xdf,0x02,0x00,0x00,0xc7,0x00,0x05,0x00, -0x09,0x00,0x00,0x00,0xe1,0x02,0x00,0x00,0xe0,0x02,0x00,0x00, -0x67,0x00,0x00,0x00,0x70,0x00,0x04,0x00,0x4a,0x00,0x00,0x00, -0xe2,0x02,0x00,0x00,0xe1,0x02,0x00,0x00,0xc2,0x00,0x05,0x00, -0x09,0x00,0x00,0x00,0xe3,0x02,0x00,0x00,0xe0,0x02,0x00,0x00, -0x6b,0x00,0x00,0x00,0x70,0x00,0x04,0x00,0x4a,0x00,0x00,0x00, -0xe4,0x02,0x00,0x00,0xe3,0x02,0x00,0x00,0x50,0x00,0x05,0x00, -0x63,0x00,0x00,0x00,0xe5,0x02,0x00,0x00,0xe2,0x02,0x00,0x00, -0xe4,0x02,0x00,0x00,0x83,0x00,0x05,0x00,0x63,0x00,0x00,0x00, -0xe6,0x02,0x00,0x00,0xe5,0x02,0x00,0x00,0xa5,0x00,0x00,0x00, -0x8e,0x00,0x05,0x00,0x63,0x00,0x00,0x00,0xe7,0x02,0x00,0x00, -0xe6,0x02,0x00,0x00,0xdd,0x02,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xed,0x02,0x00,0x00,0x80,0x00,0x00,0x00, -0x17,0x03,0x00,0x00,0x51,0x00,0x05,0x00,0x4a,0x00,0x00,0x00, -0xef,0x02,0x00,0x00,0xe7,0x02,0x00,0x00,0x00,0x00,0x00,0x00, -0x73,0x00,0x04,0x00,0x4d,0x00,0x00,0x00,0xf0,0x02,0x00,0x00, -0xef,0x02,0x00,0x00,0x41,0x00,0x06,0x00,0x57,0x00,0x00,0x00, -0xf1,0x02,0x00,0x00,0x78,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0xed,0x02,0x00,0x00,0x3e,0x00,0x03,0x00,0xf1,0x02,0x00,0x00, -0xf0,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xf8,0x02,0x00,0x00,0x80,0x00,0x00,0x00,0x18,0x03,0x00,0x00, -0x51,0x00,0x05,0x00,0x4a,0x00,0x00,0x00,0xf9,0x02,0x00,0x00, -0xe7,0x02,0x00,0x00,0x01,0x00,0x00,0x00,0x73,0x00,0x04,0x00, -0x4d,0x00,0x00,0x00,0xfa,0x02,0x00,0x00,0xf9,0x02,0x00,0x00, -0x41,0x00,0x06,0x00,0x57,0x00,0x00,0x00,0xfb,0x02,0x00,0x00, -0x78,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0xf8,0x02,0x00,0x00, -0x3e,0x00,0x03,0x00,0xfb,0x02,0x00,0x00,0xfa,0x02,0x00,0x00, -0xf9,0x00,0x02,0x00,0x9b,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, -0x9b,0x00,0x00,0x00,0xfd,0x00,0x01,0x00,0x38,0x00,0x01,0x00, - -}; -const uint64_t dequant_q4_0_len = 8856; +const uint64_t dequant_q4_0_len = 5184; unsigned char dequant_q4_1_data[] = { 0x03,0x02,0x23,0x07,0x00,0x05,0x01,0x00,0x0b,0x00,0x0d,0x00, -0x59,0x03,0x00,0x00,0x00,0x00,0x00,0x00,0x11,0x00,0x02,0x00, -0x01,0x00,0x00,0x00,0x11,0x00,0x02,0x00,0x51,0x11,0x00,0x00, -0x11,0x00,0x02,0x00,0x60,0x11,0x00,0x00,0x0b,0x00,0x06,0x00, -0x01,0x00,0x00,0x00,0x47,0x4c,0x53,0x4c,0x2e,0x73,0x74,0x64, -0x2e,0x34,0x35,0x30,0x00,0x00,0x00,0x00,0x0e,0x00,0x03,0x00, -0x00,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x0f,0x00,0x09,0x00, -0x05,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x6d,0x61,0x69,0x6e, -0x00,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x16,0x00,0x00,0x00, -0x55,0x00,0x00,0x00,0x7d,0x00,0x00,0x00,0x10,0x00,0x06,0x00, -0x04,0x00,0x00,0x00,0x11,0x00,0x00,0x00,0x00,0x01,0x00,0x00, -0x01,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x0c,0x00,0x00,0x00,0x0b,0x00,0x00,0x00,0x1c,0x00,0x00,0x00, -0x48,0x00,0x05,0x00,0x14,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0x14,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x04,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x14,0x00,0x00,0x00, -0x02,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x08,0x00,0x00,0x00, -0x48,0x00,0x05,0x00,0x14,0x00,0x00,0x00,0x03,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x47,0x00,0x03,0x00, -0x14,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x50,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x48,0x00,0x05,0x00,0x51,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0x51,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x02,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x51,0x00,0x00,0x00, -0x02,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x04,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x52,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x14,0x00,0x00,0x00,0x48,0x00,0x04,0x00,0x53,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x18,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0x53,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x47,0x00,0x03,0x00,0x53,0x00,0x00,0x00, -0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x55,0x00,0x00,0x00, -0x22,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x55,0x00,0x00,0x00,0x21,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x7a,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x02,0x00,0x00,0x00,0x48,0x00,0x04,0x00,0x7b,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x19,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0x7b,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x47,0x00,0x03,0x00,0x7b,0x00,0x00,0x00, -0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x7d,0x00,0x00,0x00, -0x22,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x7d,0x00,0x00,0x00,0x21,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x9f,0x00,0x00,0x00,0x0b,0x00,0x00,0x00, -0x19,0x00,0x00,0x00,0x13,0x00,0x02,0x00,0x02,0x00,0x00,0x00, -0x21,0x00,0x03,0x00,0x03,0x00,0x00,0x00,0x02,0x00,0x00,0x00, -0x15,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x20,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x15,0x00,0x04,0x00,0x09,0x00,0x00,0x00, -0x20,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x17,0x00,0x04,0x00, -0x0a,0x00,0x00,0x00,0x09,0x00,0x00,0x00,0x03,0x00,0x00,0x00, -0x20,0x00,0x04,0x00,0x0b,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x0a,0x00,0x00,0x00,0x3b,0x00,0x04,0x00,0x0b,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x09,0x00,0x00,0x00,0x0d,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x20,0x00,0x04,0x00,0x0e,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x09,0x00,0x00,0x00,0x1e,0x00,0x06,0x00,0x14,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x15,0x00,0x00,0x00, -0x09,0x00,0x00,0x00,0x14,0x00,0x00,0x00,0x3b,0x00,0x04,0x00, -0x15,0x00,0x00,0x00,0x16,0x00,0x00,0x00,0x09,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x17,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x18,0x00,0x00,0x00, -0x09,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x1b,0x00,0x00,0x00,0x20,0x00,0x00,0x00, -0x14,0x00,0x02,0x00,0x24,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x37,0x00,0x00,0x00, -0x02,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x48,0x00,0x00,0x00,0x10,0x00,0x00,0x00,0x16,0x00,0x03,0x00, -0x4a,0x00,0x00,0x00,0x20,0x00,0x00,0x00,0x16,0x00,0x03,0x00, -0x4d,0x00,0x00,0x00,0x10,0x00,0x00,0x00,0x15,0x00,0x04,0x00, -0x4e,0x00,0x00,0x00,0x08,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x09,0x00,0x00,0x00,0x4f,0x00,0x00,0x00, -0x10,0x00,0x00,0x00,0x1c,0x00,0x04,0x00,0x50,0x00,0x00,0x00, -0x4e,0x00,0x00,0x00,0x4f,0x00,0x00,0x00,0x1e,0x00,0x05,0x00, -0x51,0x00,0x00,0x00,0x4d,0x00,0x00,0x00,0x4d,0x00,0x00,0x00, -0x50,0x00,0x00,0x00,0x1d,0x00,0x03,0x00,0x52,0x00,0x00,0x00, -0x51,0x00,0x00,0x00,0x1e,0x00,0x03,0x00,0x53,0x00,0x00,0x00, -0x52,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x54,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x53,0x00,0x00,0x00,0x3b,0x00,0x04,0x00, -0x54,0x00,0x00,0x00,0x55,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0x20,0x00,0x04,0x00,0x57,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0x4d,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x64,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x4e,0x00,0x00,0x00,0x17,0x00,0x04,0x00, -0x68,0x00,0x00,0x00,0x4a,0x00,0x00,0x00,0x02,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x09,0x00,0x00,0x00,0x6c,0x00,0x00,0x00, -0x0f,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x70,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x1d,0x00,0x03,0x00, -0x7a,0x00,0x00,0x00,0x4d,0x00,0x00,0x00,0x1e,0x00,0x03,0x00, -0x7b,0x00,0x00,0x00,0x7a,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0x7c,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x7b,0x00,0x00,0x00, -0x3b,0x00,0x04,0x00,0x7c,0x00,0x00,0x00,0x7d,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x7f,0x00,0x00,0x00,0x03,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x09,0x00,0x00,0x00,0x97,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x09,0x00,0x00,0x00,0x9e,0x00,0x00,0x00, -0x00,0x01,0x00,0x00,0x2c,0x00,0x06,0x00,0x0a,0x00,0x00,0x00, -0x9f,0x00,0x00,0x00,0x9e,0x00,0x00,0x00,0x97,0x00,0x00,0x00, -0x97,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x3f,0x03,0x00,0x00,0x11,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x40,0x03,0x00,0x00,0x12,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x41,0x03,0x00,0x00, -0x13,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x42,0x03,0x00,0x00,0x14,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x43,0x03,0x00,0x00,0x05,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x44,0x03,0x00,0x00, -0x15,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x45,0x03,0x00,0x00,0x06,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x46,0x03,0x00,0x00,0x16,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x47,0x03,0x00,0x00, -0x07,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x48,0x03,0x00,0x00,0x17,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x49,0x03,0x00,0x00,0x08,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x4a,0x03,0x00,0x00, -0x18,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x4b,0x03,0x00,0x00,0x09,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x4c,0x03,0x00,0x00,0x19,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x4d,0x03,0x00,0x00, -0x0a,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x4e,0x03,0x00,0x00,0x1a,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x4f,0x03,0x00,0x00,0x0b,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x50,0x03,0x00,0x00, -0x1b,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x51,0x03,0x00,0x00,0x0c,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x52,0x03,0x00,0x00,0x1c,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x53,0x03,0x00,0x00, -0x0d,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x54,0x03,0x00,0x00,0x1d,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x55,0x03,0x00,0x00,0x0e,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x56,0x03,0x00,0x00, -0x1e,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x57,0x03,0x00,0x00,0x0f,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x58,0x03,0x00,0x00,0x1f,0x00,0x00,0x00, -0x36,0x00,0x05,0x00,0x02,0x00,0x00,0x00,0x04,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x03,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, -0x05,0x00,0x00,0x00,0xf7,0x00,0x03,0x00,0xa0,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0xfb,0x00,0x03,0x00,0x0d,0x00,0x00,0x00, -0xa1,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0xa1,0x00,0x00,0x00, -0x41,0x00,0x05,0x00,0x0e,0x00,0x00,0x00,0x0f,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x0d,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x09,0x00,0x00,0x00,0x10,0x00,0x00,0x00,0x0f,0x00,0x00,0x00, -0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x11,0x00,0x00,0x00, -0x10,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x18,0x00,0x00,0x00, -0x19,0x00,0x00,0x00,0x16,0x00,0x00,0x00,0x17,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x1a,0x00,0x00,0x00, -0x19,0x00,0x00,0x00,0x87,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x1c,0x00,0x00,0x00,0x1a,0x00,0x00,0x00,0x1b,0x00,0x00,0x00, -0x8b,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x1d,0x00,0x00,0x00, -0x11,0x00,0x00,0x00,0x1c,0x00,0x00,0x00,0x87,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x11,0x00,0x00,0x00, -0x1c,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x26,0x00,0x00,0x00,0x1d,0x00,0x00,0x00,0x1b,0x00,0x00,0x00, -0xaf,0x00,0x05,0x00,0x24,0x00,0x00,0x00,0x29,0x00,0x00,0x00, -0x26,0x00,0x00,0x00,0x1a,0x00,0x00,0x00,0xa8,0x00,0x04,0x00, -0x24,0x00,0x00,0x00,0x2a,0x00,0x00,0x00,0x29,0x00,0x00,0x00, -0xf7,0x00,0x03,0x00,0x2c,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0xfa,0x00,0x04,0x00,0x2a,0x00,0x00,0x00,0x2b,0x00,0x00,0x00, -0x2c,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0x2b,0x00,0x00,0x00, -0x41,0x00,0x05,0x00,0x18,0x00,0x00,0x00,0x2f,0x00,0x00,0x00, -0x16,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x30,0x00,0x00,0x00,0x2f,0x00,0x00,0x00, -0xaf,0x00,0x05,0x00,0x24,0x00,0x00,0x00,0x31,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x30,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, -0x2c,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0x2c,0x00,0x00,0x00, -0xf5,0x00,0x07,0x00,0x24,0x00,0x00,0x00,0x32,0x00,0x00,0x00, -0x29,0x00,0x00,0x00,0xa1,0x00,0x00,0x00,0x31,0x00,0x00,0x00, -0x2b,0x00,0x00,0x00,0xf7,0x00,0x03,0x00,0x34,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0x32,0x00,0x00,0x00, -0x33,0x00,0x00,0x00,0x34,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, -0x33,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0xa0,0x00,0x00,0x00, -0xf8,0x00,0x02,0x00,0x34,0x00,0x00,0x00,0x41,0x00,0x05,0x00, -0x18,0x00,0x00,0x00,0x38,0x00,0x00,0x00,0x16,0x00,0x00,0x00, -0x37,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x39,0x00,0x00,0x00,0x38,0x00,0x00,0x00,0x87,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x3a,0x00,0x00,0x00,0x39,0x00,0x00,0x00, -0x1b,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x3e,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x3a,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x40,0x00,0x00,0x00, -0x3e,0x00,0x00,0x00,0x1d,0x00,0x00,0x00,0x41,0x00,0x07,0x00, -0x57,0x00,0x00,0x00,0x58,0x00,0x00,0x00,0x55,0x00,0x00,0x00, -0x2e,0x00,0x00,0x00,0x40,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x4d,0x00,0x00,0x00,0x59,0x00,0x00,0x00, -0x58,0x00,0x00,0x00,0x73,0x00,0x04,0x00,0x4a,0x00,0x00,0x00, -0x5a,0x00,0x00,0x00,0x59,0x00,0x00,0x00,0x41,0x00,0x07,0x00, -0x57,0x00,0x00,0x00,0x5d,0x00,0x00,0x00,0x55,0x00,0x00,0x00, -0x2e,0x00,0x00,0x00,0x40,0x00,0x00,0x00,0x17,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x4d,0x00,0x00,0x00,0x5e,0x00,0x00,0x00, -0x5d,0x00,0x00,0x00,0x73,0x00,0x04,0x00,0x4a,0x00,0x00,0x00, -0x5f,0x00,0x00,0x00,0x5e,0x00,0x00,0x00,0x41,0x00,0x08,0x00, -0x64,0x00,0x00,0x00,0x65,0x00,0x00,0x00,0x55,0x00,0x00,0x00, -0x2e,0x00,0x00,0x00,0x40,0x00,0x00,0x00,0x37,0x00,0x00,0x00, -0x2e,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x4e,0x00,0x00,0x00, -0x66,0x00,0x00,0x00,0x65,0x00,0x00,0x00,0x71,0x00,0x04,0x00, -0x09,0x00,0x00,0x00,0x67,0x00,0x00,0x00,0x66,0x00,0x00,0x00, -0xc7,0x00,0x05,0x00,0x09,0x00,0x00,0x00,0x6d,0x00,0x00,0x00, -0x67,0x00,0x00,0x00,0x6c,0x00,0x00,0x00,0x70,0x00,0x04,0x00, -0x4a,0x00,0x00,0x00,0x6e,0x00,0x00,0x00,0x6d,0x00,0x00,0x00, -0xc2,0x00,0x05,0x00,0x09,0x00,0x00,0x00,0x71,0x00,0x00,0x00, -0x67,0x00,0x00,0x00,0x70,0x00,0x00,0x00,0x70,0x00,0x04,0x00, -0x4a,0x00,0x00,0x00,0x72,0x00,0x00,0x00,0x71,0x00,0x00,0x00, -0x50,0x00,0x05,0x00,0x68,0x00,0x00,0x00,0x73,0x00,0x00,0x00, -0x6e,0x00,0x00,0x00,0x72,0x00,0x00,0x00,0x8e,0x00,0x05,0x00, -0x68,0x00,0x00,0x00,0x76,0x00,0x00,0x00,0x73,0x00,0x00,0x00, -0x5a,0x00,0x00,0x00,0x50,0x00,0x05,0x00,0x68,0x00,0x00,0x00, -0x78,0x00,0x00,0x00,0x5f,0x00,0x00,0x00,0x5f,0x00,0x00,0x00, -0x81,0x00,0x05,0x00,0x68,0x00,0x00,0x00,0x79,0x00,0x00,0x00, -0x76,0x00,0x00,0x00,0x78,0x00,0x00,0x00,0x41,0x00,0x05,0x00, -0x18,0x00,0x00,0x00,0x80,0x00,0x00,0x00,0x16,0x00,0x00,0x00, -0x7f,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x81,0x00,0x00,0x00,0x80,0x00,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x82,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x81,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x85,0x00,0x00,0x00,0x82,0x00,0x00,0x00,0x26,0x00,0x00,0x00, -0x51,0x00,0x05,0x00,0x4a,0x00,0x00,0x00,0x8a,0x00,0x00,0x00, -0x79,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x73,0x00,0x04,0x00, -0x4d,0x00,0x00,0x00,0x8b,0x00,0x00,0x00,0x8a,0x00,0x00,0x00, -0x41,0x00,0x06,0x00,0x57,0x00,0x00,0x00,0x8c,0x00,0x00,0x00, -0x7d,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0x85,0x00,0x00,0x00, -0x3e,0x00,0x03,0x00,0x8c,0x00,0x00,0x00,0x8b,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x96,0x00,0x00,0x00, -0x85,0x00,0x00,0x00,0x48,0x00,0x00,0x00,0x51,0x00,0x05,0x00, -0x4a,0x00,0x00,0x00,0x99,0x00,0x00,0x00,0x79,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x73,0x00,0x04,0x00,0x4d,0x00,0x00,0x00, -0x9a,0x00,0x00,0x00,0x99,0x00,0x00,0x00,0x41,0x00,0x06,0x00, -0x57,0x00,0x00,0x00,0x9b,0x00,0x00,0x00,0x7d,0x00,0x00,0x00, -0x2e,0x00,0x00,0x00,0x96,0x00,0x00,0x00,0x3e,0x00,0x03,0x00, -0x9b,0x00,0x00,0x00,0x9a,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x4d,0x00,0x00,0x00,0xb0,0x00,0x00,0x00,0x58,0x00,0x00,0x00, -0x73,0x00,0x04,0x00,0x4a,0x00,0x00,0x00,0xb1,0x00,0x00,0x00, -0xb0,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x4d,0x00,0x00,0x00, -0xb3,0x00,0x00,0x00,0x5d,0x00,0x00,0x00,0x73,0x00,0x04,0x00, -0x4a,0x00,0x00,0x00,0xb4,0x00,0x00,0x00,0xb3,0x00,0x00,0x00, -0x41,0x00,0x08,0x00,0x64,0x00,0x00,0x00,0xb5,0x00,0x00,0x00, -0x55,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0x40,0x00,0x00,0x00, -0x37,0x00,0x00,0x00,0x17,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x4e,0x00,0x00,0x00,0xb6,0x00,0x00,0x00,0xb5,0x00,0x00,0x00, -0x71,0x00,0x04,0x00,0x09,0x00,0x00,0x00,0xb7,0x00,0x00,0x00, -0xb6,0x00,0x00,0x00,0xc7,0x00,0x05,0x00,0x09,0x00,0x00,0x00, -0xb8,0x00,0x00,0x00,0xb7,0x00,0x00,0x00,0x6c,0x00,0x00,0x00, -0x70,0x00,0x04,0x00,0x4a,0x00,0x00,0x00,0xb9,0x00,0x00,0x00, -0xb8,0x00,0x00,0x00,0xc2,0x00,0x05,0x00,0x09,0x00,0x00,0x00, -0xba,0x00,0x00,0x00,0xb7,0x00,0x00,0x00,0x70,0x00,0x00,0x00, -0x70,0x00,0x04,0x00,0x4a,0x00,0x00,0x00,0xbb,0x00,0x00,0x00, -0xba,0x00,0x00,0x00,0x50,0x00,0x05,0x00,0x68,0x00,0x00,0x00, -0xbc,0x00,0x00,0x00,0xb9,0x00,0x00,0x00,0xbb,0x00,0x00,0x00, -0x8e,0x00,0x05,0x00,0x68,0x00,0x00,0x00,0xbd,0x00,0x00,0x00, -0xbc,0x00,0x00,0x00,0xb1,0x00,0x00,0x00,0x50,0x00,0x05,0x00, -0x68,0x00,0x00,0x00,0xbe,0x00,0x00,0x00,0xb4,0x00,0x00,0x00, -0xb4,0x00,0x00,0x00,0x81,0x00,0x05,0x00,0x68,0x00,0x00,0x00, -0xbf,0x00,0x00,0x00,0xbd,0x00,0x00,0x00,0xbe,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xc5,0x00,0x00,0x00, -0x85,0x00,0x00,0x00,0x17,0x00,0x00,0x00,0x51,0x00,0x05,0x00, -0x4a,0x00,0x00,0x00,0xc7,0x00,0x00,0x00,0xbf,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x73,0x00,0x04,0x00,0x4d,0x00,0x00,0x00, -0xc8,0x00,0x00,0x00,0xc7,0x00,0x00,0x00,0x41,0x00,0x06,0x00, -0x57,0x00,0x00,0x00,0xc9,0x00,0x00,0x00,0x7d,0x00,0x00,0x00, -0x2e,0x00,0x00,0x00,0xc5,0x00,0x00,0x00,0x3e,0x00,0x03,0x00, -0xc9,0x00,0x00,0x00,0xc8,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xd0,0x00,0x00,0x00,0x85,0x00,0x00,0x00, -0x3f,0x03,0x00,0x00,0x51,0x00,0x05,0x00,0x4a,0x00,0x00,0x00, -0xd1,0x00,0x00,0x00,0xbf,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x73,0x00,0x04,0x00,0x4d,0x00,0x00,0x00,0xd2,0x00,0x00,0x00, -0xd1,0x00,0x00,0x00,0x41,0x00,0x06,0x00,0x57,0x00,0x00,0x00, -0xd3,0x00,0x00,0x00,0x7d,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0xd0,0x00,0x00,0x00,0x3e,0x00,0x03,0x00,0xd3,0x00,0x00,0x00, -0xd2,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x4d,0x00,0x00,0x00, -0xdc,0x00,0x00,0x00,0x58,0x00,0x00,0x00,0x73,0x00,0x04,0x00, -0x4a,0x00,0x00,0x00,0xdd,0x00,0x00,0x00,0xdc,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x4d,0x00,0x00,0x00,0xdf,0x00,0x00,0x00, -0x5d,0x00,0x00,0x00,0x73,0x00,0x04,0x00,0x4a,0x00,0x00,0x00, -0xe0,0x00,0x00,0x00,0xdf,0x00,0x00,0x00,0x41,0x00,0x08,0x00, -0x64,0x00,0x00,0x00,0xe1,0x00,0x00,0x00,0x55,0x00,0x00,0x00, -0x2e,0x00,0x00,0x00,0x40,0x00,0x00,0x00,0x37,0x00,0x00,0x00, -0x37,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x4e,0x00,0x00,0x00, -0xe2,0x00,0x00,0x00,0xe1,0x00,0x00,0x00,0x71,0x00,0x04,0x00, -0x09,0x00,0x00,0x00,0xe3,0x00,0x00,0x00,0xe2,0x00,0x00,0x00, -0xc7,0x00,0x05,0x00,0x09,0x00,0x00,0x00,0xe4,0x00,0x00,0x00, -0xe3,0x00,0x00,0x00,0x6c,0x00,0x00,0x00,0x70,0x00,0x04,0x00, -0x4a,0x00,0x00,0x00,0xe5,0x00,0x00,0x00,0xe4,0x00,0x00,0x00, -0xc2,0x00,0x05,0x00,0x09,0x00,0x00,0x00,0xe6,0x00,0x00,0x00, -0xe3,0x00,0x00,0x00,0x70,0x00,0x00,0x00,0x70,0x00,0x04,0x00, -0x4a,0x00,0x00,0x00,0xe7,0x00,0x00,0x00,0xe6,0x00,0x00,0x00, -0x50,0x00,0x05,0x00,0x68,0x00,0x00,0x00,0xe8,0x00,0x00,0x00, -0xe5,0x00,0x00,0x00,0xe7,0x00,0x00,0x00,0x8e,0x00,0x05,0x00, -0x68,0x00,0x00,0x00,0xe9,0x00,0x00,0x00,0xe8,0x00,0x00,0x00, -0xdd,0x00,0x00,0x00,0x50,0x00,0x05,0x00,0x68,0x00,0x00,0x00, -0xea,0x00,0x00,0x00,0xe0,0x00,0x00,0x00,0xe0,0x00,0x00,0x00, -0x81,0x00,0x05,0x00,0x68,0x00,0x00,0x00,0xeb,0x00,0x00,0x00, -0xe9,0x00,0x00,0x00,0xea,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xf1,0x00,0x00,0x00,0x85,0x00,0x00,0x00, -0x37,0x00,0x00,0x00,0x51,0x00,0x05,0x00,0x4a,0x00,0x00,0x00, -0xf3,0x00,0x00,0x00,0xeb,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x73,0x00,0x04,0x00,0x4d,0x00,0x00,0x00,0xf4,0x00,0x00,0x00, -0xf3,0x00,0x00,0x00,0x41,0x00,0x06,0x00,0x57,0x00,0x00,0x00, -0xf5,0x00,0x00,0x00,0x7d,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0xf1,0x00,0x00,0x00,0x3e,0x00,0x03,0x00,0xf5,0x00,0x00,0x00, -0xf4,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xfc,0x00,0x00,0x00,0x85,0x00,0x00,0x00,0x40,0x03,0x00,0x00, -0x51,0x00,0x05,0x00,0x4a,0x00,0x00,0x00,0xfd,0x00,0x00,0x00, -0xeb,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x73,0x00,0x04,0x00, -0x4d,0x00,0x00,0x00,0xfe,0x00,0x00,0x00,0xfd,0x00,0x00,0x00, -0x41,0x00,0x06,0x00,0x57,0x00,0x00,0x00,0xff,0x00,0x00,0x00, -0x7d,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0xfc,0x00,0x00,0x00, -0x3e,0x00,0x03,0x00,0xff,0x00,0x00,0x00,0xfe,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x4d,0x00,0x00,0x00,0x08,0x01,0x00,0x00, -0x58,0x00,0x00,0x00,0x73,0x00,0x04,0x00,0x4a,0x00,0x00,0x00, -0x09,0x01,0x00,0x00,0x08,0x01,0x00,0x00,0x3d,0x00,0x04,0x00, -0x4d,0x00,0x00,0x00,0x0b,0x01,0x00,0x00,0x5d,0x00,0x00,0x00, -0x73,0x00,0x04,0x00,0x4a,0x00,0x00,0x00,0x0c,0x01,0x00,0x00, -0x0b,0x01,0x00,0x00,0x41,0x00,0x08,0x00,0x64,0x00,0x00,0x00, -0x0d,0x01,0x00,0x00,0x55,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0x40,0x00,0x00,0x00,0x37,0x00,0x00,0x00,0x7f,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x4e,0x00,0x00,0x00,0x0e,0x01,0x00,0x00, -0x0d,0x01,0x00,0x00,0x71,0x00,0x04,0x00,0x09,0x00,0x00,0x00, -0x0f,0x01,0x00,0x00,0x0e,0x01,0x00,0x00,0xc7,0x00,0x05,0x00, -0x09,0x00,0x00,0x00,0x10,0x01,0x00,0x00,0x0f,0x01,0x00,0x00, -0x6c,0x00,0x00,0x00,0x70,0x00,0x04,0x00,0x4a,0x00,0x00,0x00, -0x11,0x01,0x00,0x00,0x10,0x01,0x00,0x00,0xc2,0x00,0x05,0x00, -0x09,0x00,0x00,0x00,0x12,0x01,0x00,0x00,0x0f,0x01,0x00,0x00, -0x70,0x00,0x00,0x00,0x70,0x00,0x04,0x00,0x4a,0x00,0x00,0x00, -0x13,0x01,0x00,0x00,0x12,0x01,0x00,0x00,0x50,0x00,0x05,0x00, -0x68,0x00,0x00,0x00,0x14,0x01,0x00,0x00,0x11,0x01,0x00,0x00, -0x13,0x01,0x00,0x00,0x8e,0x00,0x05,0x00,0x68,0x00,0x00,0x00, -0x15,0x01,0x00,0x00,0x14,0x01,0x00,0x00,0x09,0x01,0x00,0x00, -0x50,0x00,0x05,0x00,0x68,0x00,0x00,0x00,0x16,0x01,0x00,0x00, -0x0c,0x01,0x00,0x00,0x0c,0x01,0x00,0x00,0x81,0x00,0x05,0x00, -0x68,0x00,0x00,0x00,0x17,0x01,0x00,0x00,0x15,0x01,0x00,0x00, -0x16,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x1d,0x01,0x00,0x00,0x85,0x00,0x00,0x00,0x7f,0x00,0x00,0x00, -0x51,0x00,0x05,0x00,0x4a,0x00,0x00,0x00,0x1f,0x01,0x00,0x00, -0x17,0x01,0x00,0x00,0x00,0x00,0x00,0x00,0x73,0x00,0x04,0x00, -0x4d,0x00,0x00,0x00,0x20,0x01,0x00,0x00,0x1f,0x01,0x00,0x00, -0x41,0x00,0x06,0x00,0x57,0x00,0x00,0x00,0x21,0x01,0x00,0x00, -0x7d,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0x1d,0x01,0x00,0x00, -0x3e,0x00,0x03,0x00,0x21,0x01,0x00,0x00,0x20,0x01,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x28,0x01,0x00,0x00, -0x85,0x00,0x00,0x00,0x41,0x03,0x00,0x00,0x51,0x00,0x05,0x00, -0x4a,0x00,0x00,0x00,0x29,0x01,0x00,0x00,0x17,0x01,0x00,0x00, -0x01,0x00,0x00,0x00,0x73,0x00,0x04,0x00,0x4d,0x00,0x00,0x00, -0x2a,0x01,0x00,0x00,0x29,0x01,0x00,0x00,0x41,0x00,0x06,0x00, -0x57,0x00,0x00,0x00,0x2b,0x01,0x00,0x00,0x7d,0x00,0x00,0x00, -0x2e,0x00,0x00,0x00,0x28,0x01,0x00,0x00,0x3e,0x00,0x03,0x00, -0x2b,0x01,0x00,0x00,0x2a,0x01,0x00,0x00,0x3d,0x00,0x04,0x00, -0x4d,0x00,0x00,0x00,0x34,0x01,0x00,0x00,0x58,0x00,0x00,0x00, -0x73,0x00,0x04,0x00,0x4a,0x00,0x00,0x00,0x35,0x01,0x00,0x00, -0x34,0x01,0x00,0x00,0x3d,0x00,0x04,0x00,0x4d,0x00,0x00,0x00, -0x37,0x01,0x00,0x00,0x5d,0x00,0x00,0x00,0x73,0x00,0x04,0x00, -0x4a,0x00,0x00,0x00,0x38,0x01,0x00,0x00,0x37,0x01,0x00,0x00, -0x41,0x00,0x08,0x00,0x64,0x00,0x00,0x00,0x39,0x01,0x00,0x00, -0x55,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0x40,0x00,0x00,0x00, -0x37,0x00,0x00,0x00,0x70,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x4e,0x00,0x00,0x00,0x3a,0x01,0x00,0x00,0x39,0x01,0x00,0x00, -0x71,0x00,0x04,0x00,0x09,0x00,0x00,0x00,0x3b,0x01,0x00,0x00, -0x3a,0x01,0x00,0x00,0xc7,0x00,0x05,0x00,0x09,0x00,0x00,0x00, -0x3c,0x01,0x00,0x00,0x3b,0x01,0x00,0x00,0x6c,0x00,0x00,0x00, -0x70,0x00,0x04,0x00,0x4a,0x00,0x00,0x00,0x3d,0x01,0x00,0x00, -0x3c,0x01,0x00,0x00,0xc2,0x00,0x05,0x00,0x09,0x00,0x00,0x00, -0x3e,0x01,0x00,0x00,0x3b,0x01,0x00,0x00,0x70,0x00,0x00,0x00, -0x70,0x00,0x04,0x00,0x4a,0x00,0x00,0x00,0x3f,0x01,0x00,0x00, -0x3e,0x01,0x00,0x00,0x50,0x00,0x05,0x00,0x68,0x00,0x00,0x00, -0x40,0x01,0x00,0x00,0x3d,0x01,0x00,0x00,0x3f,0x01,0x00,0x00, -0x8e,0x00,0x05,0x00,0x68,0x00,0x00,0x00,0x41,0x01,0x00,0x00, -0x40,0x01,0x00,0x00,0x35,0x01,0x00,0x00,0x50,0x00,0x05,0x00, -0x68,0x00,0x00,0x00,0x42,0x01,0x00,0x00,0x38,0x01,0x00,0x00, -0x38,0x01,0x00,0x00,0x81,0x00,0x05,0x00,0x68,0x00,0x00,0x00, -0x43,0x01,0x00,0x00,0x41,0x01,0x00,0x00,0x42,0x01,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x49,0x01,0x00,0x00, -0x85,0x00,0x00,0x00,0x70,0x00,0x00,0x00,0x51,0x00,0x05,0x00, -0x4a,0x00,0x00,0x00,0x4b,0x01,0x00,0x00,0x43,0x01,0x00,0x00, -0x00,0x00,0x00,0x00,0x73,0x00,0x04,0x00,0x4d,0x00,0x00,0x00, -0x4c,0x01,0x00,0x00,0x4b,0x01,0x00,0x00,0x41,0x00,0x06,0x00, -0x57,0x00,0x00,0x00,0x4d,0x01,0x00,0x00,0x7d,0x00,0x00,0x00, -0x2e,0x00,0x00,0x00,0x49,0x01,0x00,0x00,0x3e,0x00,0x03,0x00, -0x4d,0x01,0x00,0x00,0x4c,0x01,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x54,0x01,0x00,0x00,0x85,0x00,0x00,0x00, -0x42,0x03,0x00,0x00,0x51,0x00,0x05,0x00,0x4a,0x00,0x00,0x00, -0x55,0x01,0x00,0x00,0x43,0x01,0x00,0x00,0x01,0x00,0x00,0x00, -0x73,0x00,0x04,0x00,0x4d,0x00,0x00,0x00,0x56,0x01,0x00,0x00, -0x55,0x01,0x00,0x00,0x41,0x00,0x06,0x00,0x57,0x00,0x00,0x00, -0x57,0x01,0x00,0x00,0x7d,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0x54,0x01,0x00,0x00,0x3e,0x00,0x03,0x00,0x57,0x01,0x00,0x00, -0x56,0x01,0x00,0x00,0x3d,0x00,0x04,0x00,0x4d,0x00,0x00,0x00, -0x60,0x01,0x00,0x00,0x58,0x00,0x00,0x00,0x73,0x00,0x04,0x00, -0x4a,0x00,0x00,0x00,0x61,0x01,0x00,0x00,0x60,0x01,0x00,0x00, -0x3d,0x00,0x04,0x00,0x4d,0x00,0x00,0x00,0x63,0x01,0x00,0x00, -0x5d,0x00,0x00,0x00,0x73,0x00,0x04,0x00,0x4a,0x00,0x00,0x00, -0x64,0x01,0x00,0x00,0x63,0x01,0x00,0x00,0x41,0x00,0x08,0x00, -0x64,0x00,0x00,0x00,0x65,0x01,0x00,0x00,0x55,0x00,0x00,0x00, -0x2e,0x00,0x00,0x00,0x40,0x00,0x00,0x00,0x37,0x00,0x00,0x00, -0x43,0x03,0x00,0x00,0x3d,0x00,0x04,0x00,0x4e,0x00,0x00,0x00, -0x66,0x01,0x00,0x00,0x65,0x01,0x00,0x00,0x71,0x00,0x04,0x00, -0x09,0x00,0x00,0x00,0x67,0x01,0x00,0x00,0x66,0x01,0x00,0x00, -0xc7,0x00,0x05,0x00,0x09,0x00,0x00,0x00,0x68,0x01,0x00,0x00, -0x67,0x01,0x00,0x00,0x6c,0x00,0x00,0x00,0x70,0x00,0x04,0x00, -0x4a,0x00,0x00,0x00,0x69,0x01,0x00,0x00,0x68,0x01,0x00,0x00, -0xc2,0x00,0x05,0x00,0x09,0x00,0x00,0x00,0x6a,0x01,0x00,0x00, -0x67,0x01,0x00,0x00,0x70,0x00,0x00,0x00,0x70,0x00,0x04,0x00, -0x4a,0x00,0x00,0x00,0x6b,0x01,0x00,0x00,0x6a,0x01,0x00,0x00, -0x50,0x00,0x05,0x00,0x68,0x00,0x00,0x00,0x6c,0x01,0x00,0x00, -0x69,0x01,0x00,0x00,0x6b,0x01,0x00,0x00,0x8e,0x00,0x05,0x00, -0x68,0x00,0x00,0x00,0x6d,0x01,0x00,0x00,0x6c,0x01,0x00,0x00, -0x61,0x01,0x00,0x00,0x50,0x00,0x05,0x00,0x68,0x00,0x00,0x00, -0x6e,0x01,0x00,0x00,0x64,0x01,0x00,0x00,0x64,0x01,0x00,0x00, -0x81,0x00,0x05,0x00,0x68,0x00,0x00,0x00,0x6f,0x01,0x00,0x00, -0x6d,0x01,0x00,0x00,0x6e,0x01,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x75,0x01,0x00,0x00,0x85,0x00,0x00,0x00, -0x43,0x03,0x00,0x00,0x51,0x00,0x05,0x00,0x4a,0x00,0x00,0x00, -0x77,0x01,0x00,0x00,0x6f,0x01,0x00,0x00,0x00,0x00,0x00,0x00, -0x73,0x00,0x04,0x00,0x4d,0x00,0x00,0x00,0x78,0x01,0x00,0x00, -0x77,0x01,0x00,0x00,0x41,0x00,0x06,0x00,0x57,0x00,0x00,0x00, -0x79,0x01,0x00,0x00,0x7d,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0x75,0x01,0x00,0x00,0x3e,0x00,0x03,0x00,0x79,0x01,0x00,0x00, -0x78,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x80,0x01,0x00,0x00,0x85,0x00,0x00,0x00,0x44,0x03,0x00,0x00, -0x51,0x00,0x05,0x00,0x4a,0x00,0x00,0x00,0x81,0x01,0x00,0x00, -0x6f,0x01,0x00,0x00,0x01,0x00,0x00,0x00,0x73,0x00,0x04,0x00, -0x4d,0x00,0x00,0x00,0x82,0x01,0x00,0x00,0x81,0x01,0x00,0x00, -0x41,0x00,0x06,0x00,0x57,0x00,0x00,0x00,0x83,0x01,0x00,0x00, -0x7d,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0x80,0x01,0x00,0x00, -0x3e,0x00,0x03,0x00,0x83,0x01,0x00,0x00,0x82,0x01,0x00,0x00, -0x3d,0x00,0x04,0x00,0x4d,0x00,0x00,0x00,0x8c,0x01,0x00,0x00, -0x58,0x00,0x00,0x00,0x73,0x00,0x04,0x00,0x4a,0x00,0x00,0x00, -0x8d,0x01,0x00,0x00,0x8c,0x01,0x00,0x00,0x3d,0x00,0x04,0x00, -0x4d,0x00,0x00,0x00,0x8f,0x01,0x00,0x00,0x5d,0x00,0x00,0x00, -0x73,0x00,0x04,0x00,0x4a,0x00,0x00,0x00,0x90,0x01,0x00,0x00, -0x8f,0x01,0x00,0x00,0x41,0x00,0x08,0x00,0x64,0x00,0x00,0x00, -0x91,0x01,0x00,0x00,0x55,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0x40,0x00,0x00,0x00,0x37,0x00,0x00,0x00,0x45,0x03,0x00,0x00, -0x3d,0x00,0x04,0x00,0x4e,0x00,0x00,0x00,0x92,0x01,0x00,0x00, -0x91,0x01,0x00,0x00,0x71,0x00,0x04,0x00,0x09,0x00,0x00,0x00, -0x93,0x01,0x00,0x00,0x92,0x01,0x00,0x00,0xc7,0x00,0x05,0x00, -0x09,0x00,0x00,0x00,0x94,0x01,0x00,0x00,0x93,0x01,0x00,0x00, -0x6c,0x00,0x00,0x00,0x70,0x00,0x04,0x00,0x4a,0x00,0x00,0x00, -0x95,0x01,0x00,0x00,0x94,0x01,0x00,0x00,0xc2,0x00,0x05,0x00, -0x09,0x00,0x00,0x00,0x96,0x01,0x00,0x00,0x93,0x01,0x00,0x00, -0x70,0x00,0x00,0x00,0x70,0x00,0x04,0x00,0x4a,0x00,0x00,0x00, -0x97,0x01,0x00,0x00,0x96,0x01,0x00,0x00,0x50,0x00,0x05,0x00, -0x68,0x00,0x00,0x00,0x98,0x01,0x00,0x00,0x95,0x01,0x00,0x00, -0x97,0x01,0x00,0x00,0x8e,0x00,0x05,0x00,0x68,0x00,0x00,0x00, -0x99,0x01,0x00,0x00,0x98,0x01,0x00,0x00,0x8d,0x01,0x00,0x00, -0x50,0x00,0x05,0x00,0x68,0x00,0x00,0x00,0x9a,0x01,0x00,0x00, -0x90,0x01,0x00,0x00,0x90,0x01,0x00,0x00,0x81,0x00,0x05,0x00, -0x68,0x00,0x00,0x00,0x9b,0x01,0x00,0x00,0x99,0x01,0x00,0x00, -0x9a,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xa1,0x01,0x00,0x00,0x85,0x00,0x00,0x00,0x45,0x03,0x00,0x00, -0x51,0x00,0x05,0x00,0x4a,0x00,0x00,0x00,0xa3,0x01,0x00,0x00, -0x9b,0x01,0x00,0x00,0x00,0x00,0x00,0x00,0x73,0x00,0x04,0x00, -0x4d,0x00,0x00,0x00,0xa4,0x01,0x00,0x00,0xa3,0x01,0x00,0x00, -0x41,0x00,0x06,0x00,0x57,0x00,0x00,0x00,0xa5,0x01,0x00,0x00, -0x7d,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0xa1,0x01,0x00,0x00, -0x3e,0x00,0x03,0x00,0xa5,0x01,0x00,0x00,0xa4,0x01,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xac,0x01,0x00,0x00, -0x85,0x00,0x00,0x00,0x46,0x03,0x00,0x00,0x51,0x00,0x05,0x00, -0x4a,0x00,0x00,0x00,0xad,0x01,0x00,0x00,0x9b,0x01,0x00,0x00, -0x01,0x00,0x00,0x00,0x73,0x00,0x04,0x00,0x4d,0x00,0x00,0x00, -0xae,0x01,0x00,0x00,0xad,0x01,0x00,0x00,0x41,0x00,0x06,0x00, -0x57,0x00,0x00,0x00,0xaf,0x01,0x00,0x00,0x7d,0x00,0x00,0x00, -0x2e,0x00,0x00,0x00,0xac,0x01,0x00,0x00,0x3e,0x00,0x03,0x00, -0xaf,0x01,0x00,0x00,0xae,0x01,0x00,0x00,0x3d,0x00,0x04,0x00, -0x4d,0x00,0x00,0x00,0xb8,0x01,0x00,0x00,0x58,0x00,0x00,0x00, -0x73,0x00,0x04,0x00,0x4a,0x00,0x00,0x00,0xb9,0x01,0x00,0x00, -0xb8,0x01,0x00,0x00,0x3d,0x00,0x04,0x00,0x4d,0x00,0x00,0x00, -0xbb,0x01,0x00,0x00,0x5d,0x00,0x00,0x00,0x73,0x00,0x04,0x00, -0x4a,0x00,0x00,0x00,0xbc,0x01,0x00,0x00,0xbb,0x01,0x00,0x00, -0x41,0x00,0x08,0x00,0x64,0x00,0x00,0x00,0xbd,0x01,0x00,0x00, -0x55,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0x40,0x00,0x00,0x00, -0x37,0x00,0x00,0x00,0x47,0x03,0x00,0x00,0x3d,0x00,0x04,0x00, -0x4e,0x00,0x00,0x00,0xbe,0x01,0x00,0x00,0xbd,0x01,0x00,0x00, -0x71,0x00,0x04,0x00,0x09,0x00,0x00,0x00,0xbf,0x01,0x00,0x00, -0xbe,0x01,0x00,0x00,0xc7,0x00,0x05,0x00,0x09,0x00,0x00,0x00, -0xc0,0x01,0x00,0x00,0xbf,0x01,0x00,0x00,0x6c,0x00,0x00,0x00, -0x70,0x00,0x04,0x00,0x4a,0x00,0x00,0x00,0xc1,0x01,0x00,0x00, -0xc0,0x01,0x00,0x00,0xc2,0x00,0x05,0x00,0x09,0x00,0x00,0x00, -0xc2,0x01,0x00,0x00,0xbf,0x01,0x00,0x00,0x70,0x00,0x00,0x00, -0x70,0x00,0x04,0x00,0x4a,0x00,0x00,0x00,0xc3,0x01,0x00,0x00, -0xc2,0x01,0x00,0x00,0x50,0x00,0x05,0x00,0x68,0x00,0x00,0x00, -0xc4,0x01,0x00,0x00,0xc1,0x01,0x00,0x00,0xc3,0x01,0x00,0x00, -0x8e,0x00,0x05,0x00,0x68,0x00,0x00,0x00,0xc5,0x01,0x00,0x00, -0xc4,0x01,0x00,0x00,0xb9,0x01,0x00,0x00,0x50,0x00,0x05,0x00, -0x68,0x00,0x00,0x00,0xc6,0x01,0x00,0x00,0xbc,0x01,0x00,0x00, -0xbc,0x01,0x00,0x00,0x81,0x00,0x05,0x00,0x68,0x00,0x00,0x00, -0xc7,0x01,0x00,0x00,0xc5,0x01,0x00,0x00,0xc6,0x01,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xcd,0x01,0x00,0x00, -0x85,0x00,0x00,0x00,0x47,0x03,0x00,0x00,0x51,0x00,0x05,0x00, -0x4a,0x00,0x00,0x00,0xcf,0x01,0x00,0x00,0xc7,0x01,0x00,0x00, -0x00,0x00,0x00,0x00,0x73,0x00,0x04,0x00,0x4d,0x00,0x00,0x00, -0xd0,0x01,0x00,0x00,0xcf,0x01,0x00,0x00,0x41,0x00,0x06,0x00, -0x57,0x00,0x00,0x00,0xd1,0x01,0x00,0x00,0x7d,0x00,0x00,0x00, -0x2e,0x00,0x00,0x00,0xcd,0x01,0x00,0x00,0x3e,0x00,0x03,0x00, -0xd1,0x01,0x00,0x00,0xd0,0x01,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xd8,0x01,0x00,0x00,0x85,0x00,0x00,0x00, -0x48,0x03,0x00,0x00,0x51,0x00,0x05,0x00,0x4a,0x00,0x00,0x00, -0xd9,0x01,0x00,0x00,0xc7,0x01,0x00,0x00,0x01,0x00,0x00,0x00, -0x73,0x00,0x04,0x00,0x4d,0x00,0x00,0x00,0xda,0x01,0x00,0x00, -0xd9,0x01,0x00,0x00,0x41,0x00,0x06,0x00,0x57,0x00,0x00,0x00, -0xdb,0x01,0x00,0x00,0x7d,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0xd8,0x01,0x00,0x00,0x3e,0x00,0x03,0x00,0xdb,0x01,0x00,0x00, -0xda,0x01,0x00,0x00,0x3d,0x00,0x04,0x00,0x4d,0x00,0x00,0x00, -0xe4,0x01,0x00,0x00,0x58,0x00,0x00,0x00,0x73,0x00,0x04,0x00, -0x4a,0x00,0x00,0x00,0xe5,0x01,0x00,0x00,0xe4,0x01,0x00,0x00, -0x3d,0x00,0x04,0x00,0x4d,0x00,0x00,0x00,0xe7,0x01,0x00,0x00, -0x5d,0x00,0x00,0x00,0x73,0x00,0x04,0x00,0x4a,0x00,0x00,0x00, -0xe8,0x01,0x00,0x00,0xe7,0x01,0x00,0x00,0x41,0x00,0x08,0x00, -0x64,0x00,0x00,0x00,0xe9,0x01,0x00,0x00,0x55,0x00,0x00,0x00, -0x2e,0x00,0x00,0x00,0x40,0x00,0x00,0x00,0x37,0x00,0x00,0x00, -0x49,0x03,0x00,0x00,0x3d,0x00,0x04,0x00,0x4e,0x00,0x00,0x00, -0xea,0x01,0x00,0x00,0xe9,0x01,0x00,0x00,0x71,0x00,0x04,0x00, -0x09,0x00,0x00,0x00,0xeb,0x01,0x00,0x00,0xea,0x01,0x00,0x00, -0xc7,0x00,0x05,0x00,0x09,0x00,0x00,0x00,0xec,0x01,0x00,0x00, -0xeb,0x01,0x00,0x00,0x6c,0x00,0x00,0x00,0x70,0x00,0x04,0x00, -0x4a,0x00,0x00,0x00,0xed,0x01,0x00,0x00,0xec,0x01,0x00,0x00, -0xc2,0x00,0x05,0x00,0x09,0x00,0x00,0x00,0xee,0x01,0x00,0x00, -0xeb,0x01,0x00,0x00,0x70,0x00,0x00,0x00,0x70,0x00,0x04,0x00, -0x4a,0x00,0x00,0x00,0xef,0x01,0x00,0x00,0xee,0x01,0x00,0x00, -0x50,0x00,0x05,0x00,0x68,0x00,0x00,0x00,0xf0,0x01,0x00,0x00, -0xed,0x01,0x00,0x00,0xef,0x01,0x00,0x00,0x8e,0x00,0x05,0x00, -0x68,0x00,0x00,0x00,0xf1,0x01,0x00,0x00,0xf0,0x01,0x00,0x00, -0xe5,0x01,0x00,0x00,0x50,0x00,0x05,0x00,0x68,0x00,0x00,0x00, -0xf2,0x01,0x00,0x00,0xe8,0x01,0x00,0x00,0xe8,0x01,0x00,0x00, -0x81,0x00,0x05,0x00,0x68,0x00,0x00,0x00,0xf3,0x01,0x00,0x00, -0xf1,0x01,0x00,0x00,0xf2,0x01,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xf9,0x01,0x00,0x00,0x85,0x00,0x00,0x00, -0x49,0x03,0x00,0x00,0x51,0x00,0x05,0x00,0x4a,0x00,0x00,0x00, -0xfb,0x01,0x00,0x00,0xf3,0x01,0x00,0x00,0x00,0x00,0x00,0x00, -0x73,0x00,0x04,0x00,0x4d,0x00,0x00,0x00,0xfc,0x01,0x00,0x00, -0xfb,0x01,0x00,0x00,0x41,0x00,0x06,0x00,0x57,0x00,0x00,0x00, -0xfd,0x01,0x00,0x00,0x7d,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0xf9,0x01,0x00,0x00,0x3e,0x00,0x03,0x00,0xfd,0x01,0x00,0x00, -0xfc,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x04,0x02,0x00,0x00,0x85,0x00,0x00,0x00,0x4a,0x03,0x00,0x00, -0x51,0x00,0x05,0x00,0x4a,0x00,0x00,0x00,0x05,0x02,0x00,0x00, -0xf3,0x01,0x00,0x00,0x01,0x00,0x00,0x00,0x73,0x00,0x04,0x00, -0x4d,0x00,0x00,0x00,0x06,0x02,0x00,0x00,0x05,0x02,0x00,0x00, -0x41,0x00,0x06,0x00,0x57,0x00,0x00,0x00,0x07,0x02,0x00,0x00, -0x7d,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0x04,0x02,0x00,0x00, -0x3e,0x00,0x03,0x00,0x07,0x02,0x00,0x00,0x06,0x02,0x00,0x00, -0x3d,0x00,0x04,0x00,0x4d,0x00,0x00,0x00,0x10,0x02,0x00,0x00, -0x58,0x00,0x00,0x00,0x73,0x00,0x04,0x00,0x4a,0x00,0x00,0x00, -0x11,0x02,0x00,0x00,0x10,0x02,0x00,0x00,0x3d,0x00,0x04,0x00, -0x4d,0x00,0x00,0x00,0x13,0x02,0x00,0x00,0x5d,0x00,0x00,0x00, -0x73,0x00,0x04,0x00,0x4a,0x00,0x00,0x00,0x14,0x02,0x00,0x00, -0x13,0x02,0x00,0x00,0x41,0x00,0x08,0x00,0x64,0x00,0x00,0x00, -0x15,0x02,0x00,0x00,0x55,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0x40,0x00,0x00,0x00,0x37,0x00,0x00,0x00,0x4b,0x03,0x00,0x00, -0x3d,0x00,0x04,0x00,0x4e,0x00,0x00,0x00,0x16,0x02,0x00,0x00, -0x15,0x02,0x00,0x00,0x71,0x00,0x04,0x00,0x09,0x00,0x00,0x00, -0x17,0x02,0x00,0x00,0x16,0x02,0x00,0x00,0xc7,0x00,0x05,0x00, -0x09,0x00,0x00,0x00,0x18,0x02,0x00,0x00,0x17,0x02,0x00,0x00, -0x6c,0x00,0x00,0x00,0x70,0x00,0x04,0x00,0x4a,0x00,0x00,0x00, -0x19,0x02,0x00,0x00,0x18,0x02,0x00,0x00,0xc2,0x00,0x05,0x00, -0x09,0x00,0x00,0x00,0x1a,0x02,0x00,0x00,0x17,0x02,0x00,0x00, -0x70,0x00,0x00,0x00,0x70,0x00,0x04,0x00,0x4a,0x00,0x00,0x00, -0x1b,0x02,0x00,0x00,0x1a,0x02,0x00,0x00,0x50,0x00,0x05,0x00, -0x68,0x00,0x00,0x00,0x1c,0x02,0x00,0x00,0x19,0x02,0x00,0x00, -0x1b,0x02,0x00,0x00,0x8e,0x00,0x05,0x00,0x68,0x00,0x00,0x00, -0x1d,0x02,0x00,0x00,0x1c,0x02,0x00,0x00,0x11,0x02,0x00,0x00, -0x50,0x00,0x05,0x00,0x68,0x00,0x00,0x00,0x1e,0x02,0x00,0x00, -0x14,0x02,0x00,0x00,0x14,0x02,0x00,0x00,0x81,0x00,0x05,0x00, -0x68,0x00,0x00,0x00,0x1f,0x02,0x00,0x00,0x1d,0x02,0x00,0x00, -0x1e,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x25,0x02,0x00,0x00,0x85,0x00,0x00,0x00,0x4b,0x03,0x00,0x00, -0x51,0x00,0x05,0x00,0x4a,0x00,0x00,0x00,0x27,0x02,0x00,0x00, -0x1f,0x02,0x00,0x00,0x00,0x00,0x00,0x00,0x73,0x00,0x04,0x00, -0x4d,0x00,0x00,0x00,0x28,0x02,0x00,0x00,0x27,0x02,0x00,0x00, -0x41,0x00,0x06,0x00,0x57,0x00,0x00,0x00,0x29,0x02,0x00,0x00, -0x7d,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0x25,0x02,0x00,0x00, -0x3e,0x00,0x03,0x00,0x29,0x02,0x00,0x00,0x28,0x02,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x30,0x02,0x00,0x00, -0x85,0x00,0x00,0x00,0x4c,0x03,0x00,0x00,0x51,0x00,0x05,0x00, -0x4a,0x00,0x00,0x00,0x31,0x02,0x00,0x00,0x1f,0x02,0x00,0x00, -0x01,0x00,0x00,0x00,0x73,0x00,0x04,0x00,0x4d,0x00,0x00,0x00, -0x32,0x02,0x00,0x00,0x31,0x02,0x00,0x00,0x41,0x00,0x06,0x00, -0x57,0x00,0x00,0x00,0x33,0x02,0x00,0x00,0x7d,0x00,0x00,0x00, -0x2e,0x00,0x00,0x00,0x30,0x02,0x00,0x00,0x3e,0x00,0x03,0x00, -0x33,0x02,0x00,0x00,0x32,0x02,0x00,0x00,0x3d,0x00,0x04,0x00, -0x4d,0x00,0x00,0x00,0x3c,0x02,0x00,0x00,0x58,0x00,0x00,0x00, -0x73,0x00,0x04,0x00,0x4a,0x00,0x00,0x00,0x3d,0x02,0x00,0x00, -0x3c,0x02,0x00,0x00,0x3d,0x00,0x04,0x00,0x4d,0x00,0x00,0x00, -0x3f,0x02,0x00,0x00,0x5d,0x00,0x00,0x00,0x73,0x00,0x04,0x00, -0x4a,0x00,0x00,0x00,0x40,0x02,0x00,0x00,0x3f,0x02,0x00,0x00, -0x41,0x00,0x08,0x00,0x64,0x00,0x00,0x00,0x41,0x02,0x00,0x00, -0x55,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0x40,0x00,0x00,0x00, -0x37,0x00,0x00,0x00,0x4d,0x03,0x00,0x00,0x3d,0x00,0x04,0x00, -0x4e,0x00,0x00,0x00,0x42,0x02,0x00,0x00,0x41,0x02,0x00,0x00, -0x71,0x00,0x04,0x00,0x09,0x00,0x00,0x00,0x43,0x02,0x00,0x00, -0x42,0x02,0x00,0x00,0xc7,0x00,0x05,0x00,0x09,0x00,0x00,0x00, -0x44,0x02,0x00,0x00,0x43,0x02,0x00,0x00,0x6c,0x00,0x00,0x00, -0x70,0x00,0x04,0x00,0x4a,0x00,0x00,0x00,0x45,0x02,0x00,0x00, -0x44,0x02,0x00,0x00,0xc2,0x00,0x05,0x00,0x09,0x00,0x00,0x00, -0x46,0x02,0x00,0x00,0x43,0x02,0x00,0x00,0x70,0x00,0x00,0x00, -0x70,0x00,0x04,0x00,0x4a,0x00,0x00,0x00,0x47,0x02,0x00,0x00, -0x46,0x02,0x00,0x00,0x50,0x00,0x05,0x00,0x68,0x00,0x00,0x00, -0x48,0x02,0x00,0x00,0x45,0x02,0x00,0x00,0x47,0x02,0x00,0x00, -0x8e,0x00,0x05,0x00,0x68,0x00,0x00,0x00,0x49,0x02,0x00,0x00, -0x48,0x02,0x00,0x00,0x3d,0x02,0x00,0x00,0x50,0x00,0x05,0x00, -0x68,0x00,0x00,0x00,0x4a,0x02,0x00,0x00,0x40,0x02,0x00,0x00, -0x40,0x02,0x00,0x00,0x81,0x00,0x05,0x00,0x68,0x00,0x00,0x00, -0x4b,0x02,0x00,0x00,0x49,0x02,0x00,0x00,0x4a,0x02,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x51,0x02,0x00,0x00, -0x85,0x00,0x00,0x00,0x4d,0x03,0x00,0x00,0x51,0x00,0x05,0x00, -0x4a,0x00,0x00,0x00,0x53,0x02,0x00,0x00,0x4b,0x02,0x00,0x00, -0x00,0x00,0x00,0x00,0x73,0x00,0x04,0x00,0x4d,0x00,0x00,0x00, -0x54,0x02,0x00,0x00,0x53,0x02,0x00,0x00,0x41,0x00,0x06,0x00, -0x57,0x00,0x00,0x00,0x55,0x02,0x00,0x00,0x7d,0x00,0x00,0x00, -0x2e,0x00,0x00,0x00,0x51,0x02,0x00,0x00,0x3e,0x00,0x03,0x00, -0x55,0x02,0x00,0x00,0x54,0x02,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x5c,0x02,0x00,0x00,0x85,0x00,0x00,0x00, -0x4e,0x03,0x00,0x00,0x51,0x00,0x05,0x00,0x4a,0x00,0x00,0x00, -0x5d,0x02,0x00,0x00,0x4b,0x02,0x00,0x00,0x01,0x00,0x00,0x00, -0x73,0x00,0x04,0x00,0x4d,0x00,0x00,0x00,0x5e,0x02,0x00,0x00, -0x5d,0x02,0x00,0x00,0x41,0x00,0x06,0x00,0x57,0x00,0x00,0x00, -0x5f,0x02,0x00,0x00,0x7d,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0x5c,0x02,0x00,0x00,0x3e,0x00,0x03,0x00,0x5f,0x02,0x00,0x00, -0x5e,0x02,0x00,0x00,0x3d,0x00,0x04,0x00,0x4d,0x00,0x00,0x00, -0x68,0x02,0x00,0x00,0x58,0x00,0x00,0x00,0x73,0x00,0x04,0x00, -0x4a,0x00,0x00,0x00,0x69,0x02,0x00,0x00,0x68,0x02,0x00,0x00, -0x3d,0x00,0x04,0x00,0x4d,0x00,0x00,0x00,0x6b,0x02,0x00,0x00, -0x5d,0x00,0x00,0x00,0x73,0x00,0x04,0x00,0x4a,0x00,0x00,0x00, -0x6c,0x02,0x00,0x00,0x6b,0x02,0x00,0x00,0x41,0x00,0x08,0x00, -0x64,0x00,0x00,0x00,0x6d,0x02,0x00,0x00,0x55,0x00,0x00,0x00, -0x2e,0x00,0x00,0x00,0x40,0x00,0x00,0x00,0x37,0x00,0x00,0x00, -0x4f,0x03,0x00,0x00,0x3d,0x00,0x04,0x00,0x4e,0x00,0x00,0x00, -0x6e,0x02,0x00,0x00,0x6d,0x02,0x00,0x00,0x71,0x00,0x04,0x00, -0x09,0x00,0x00,0x00,0x6f,0x02,0x00,0x00,0x6e,0x02,0x00,0x00, -0xc7,0x00,0x05,0x00,0x09,0x00,0x00,0x00,0x70,0x02,0x00,0x00, -0x6f,0x02,0x00,0x00,0x6c,0x00,0x00,0x00,0x70,0x00,0x04,0x00, -0x4a,0x00,0x00,0x00,0x71,0x02,0x00,0x00,0x70,0x02,0x00,0x00, -0xc2,0x00,0x05,0x00,0x09,0x00,0x00,0x00,0x72,0x02,0x00,0x00, -0x6f,0x02,0x00,0x00,0x70,0x00,0x00,0x00,0x70,0x00,0x04,0x00, -0x4a,0x00,0x00,0x00,0x73,0x02,0x00,0x00,0x72,0x02,0x00,0x00, -0x50,0x00,0x05,0x00,0x68,0x00,0x00,0x00,0x74,0x02,0x00,0x00, -0x71,0x02,0x00,0x00,0x73,0x02,0x00,0x00,0x8e,0x00,0x05,0x00, -0x68,0x00,0x00,0x00,0x75,0x02,0x00,0x00,0x74,0x02,0x00,0x00, -0x69,0x02,0x00,0x00,0x50,0x00,0x05,0x00,0x68,0x00,0x00,0x00, -0x76,0x02,0x00,0x00,0x6c,0x02,0x00,0x00,0x6c,0x02,0x00,0x00, -0x81,0x00,0x05,0x00,0x68,0x00,0x00,0x00,0x77,0x02,0x00,0x00, -0x75,0x02,0x00,0x00,0x76,0x02,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x7d,0x02,0x00,0x00,0x85,0x00,0x00,0x00, -0x4f,0x03,0x00,0x00,0x51,0x00,0x05,0x00,0x4a,0x00,0x00,0x00, -0x7f,0x02,0x00,0x00,0x77,0x02,0x00,0x00,0x00,0x00,0x00,0x00, -0x73,0x00,0x04,0x00,0x4d,0x00,0x00,0x00,0x80,0x02,0x00,0x00, -0x7f,0x02,0x00,0x00,0x41,0x00,0x06,0x00,0x57,0x00,0x00,0x00, -0x81,0x02,0x00,0x00,0x7d,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0x7d,0x02,0x00,0x00,0x3e,0x00,0x03,0x00,0x81,0x02,0x00,0x00, -0x80,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x88,0x02,0x00,0x00,0x85,0x00,0x00,0x00,0x50,0x03,0x00,0x00, -0x51,0x00,0x05,0x00,0x4a,0x00,0x00,0x00,0x89,0x02,0x00,0x00, -0x77,0x02,0x00,0x00,0x01,0x00,0x00,0x00,0x73,0x00,0x04,0x00, -0x4d,0x00,0x00,0x00,0x8a,0x02,0x00,0x00,0x89,0x02,0x00,0x00, -0x41,0x00,0x06,0x00,0x57,0x00,0x00,0x00,0x8b,0x02,0x00,0x00, -0x7d,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0x88,0x02,0x00,0x00, -0x3e,0x00,0x03,0x00,0x8b,0x02,0x00,0x00,0x8a,0x02,0x00,0x00, -0x3d,0x00,0x04,0x00,0x4d,0x00,0x00,0x00,0x94,0x02,0x00,0x00, -0x58,0x00,0x00,0x00,0x73,0x00,0x04,0x00,0x4a,0x00,0x00,0x00, -0x95,0x02,0x00,0x00,0x94,0x02,0x00,0x00,0x3d,0x00,0x04,0x00, -0x4d,0x00,0x00,0x00,0x97,0x02,0x00,0x00,0x5d,0x00,0x00,0x00, -0x73,0x00,0x04,0x00,0x4a,0x00,0x00,0x00,0x98,0x02,0x00,0x00, -0x97,0x02,0x00,0x00,0x41,0x00,0x08,0x00,0x64,0x00,0x00,0x00, -0x99,0x02,0x00,0x00,0x55,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0x40,0x00,0x00,0x00,0x37,0x00,0x00,0x00,0x51,0x03,0x00,0x00, -0x3d,0x00,0x04,0x00,0x4e,0x00,0x00,0x00,0x9a,0x02,0x00,0x00, -0x99,0x02,0x00,0x00,0x71,0x00,0x04,0x00,0x09,0x00,0x00,0x00, -0x9b,0x02,0x00,0x00,0x9a,0x02,0x00,0x00,0xc7,0x00,0x05,0x00, -0x09,0x00,0x00,0x00,0x9c,0x02,0x00,0x00,0x9b,0x02,0x00,0x00, -0x6c,0x00,0x00,0x00,0x70,0x00,0x04,0x00,0x4a,0x00,0x00,0x00, -0x9d,0x02,0x00,0x00,0x9c,0x02,0x00,0x00,0xc2,0x00,0x05,0x00, -0x09,0x00,0x00,0x00,0x9e,0x02,0x00,0x00,0x9b,0x02,0x00,0x00, -0x70,0x00,0x00,0x00,0x70,0x00,0x04,0x00,0x4a,0x00,0x00,0x00, -0x9f,0x02,0x00,0x00,0x9e,0x02,0x00,0x00,0x50,0x00,0x05,0x00, -0x68,0x00,0x00,0x00,0xa0,0x02,0x00,0x00,0x9d,0x02,0x00,0x00, -0x9f,0x02,0x00,0x00,0x8e,0x00,0x05,0x00,0x68,0x00,0x00,0x00, -0xa1,0x02,0x00,0x00,0xa0,0x02,0x00,0x00,0x95,0x02,0x00,0x00, -0x50,0x00,0x05,0x00,0x68,0x00,0x00,0x00,0xa2,0x02,0x00,0x00, -0x98,0x02,0x00,0x00,0x98,0x02,0x00,0x00,0x81,0x00,0x05,0x00, -0x68,0x00,0x00,0x00,0xa3,0x02,0x00,0x00,0xa1,0x02,0x00,0x00, -0xa2,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xa9,0x02,0x00,0x00,0x85,0x00,0x00,0x00,0x51,0x03,0x00,0x00, -0x51,0x00,0x05,0x00,0x4a,0x00,0x00,0x00,0xab,0x02,0x00,0x00, -0xa3,0x02,0x00,0x00,0x00,0x00,0x00,0x00,0x73,0x00,0x04,0x00, -0x4d,0x00,0x00,0x00,0xac,0x02,0x00,0x00,0xab,0x02,0x00,0x00, -0x41,0x00,0x06,0x00,0x57,0x00,0x00,0x00,0xad,0x02,0x00,0x00, -0x7d,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0xa9,0x02,0x00,0x00, -0x3e,0x00,0x03,0x00,0xad,0x02,0x00,0x00,0xac,0x02,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xb4,0x02,0x00,0x00, -0x85,0x00,0x00,0x00,0x52,0x03,0x00,0x00,0x51,0x00,0x05,0x00, -0x4a,0x00,0x00,0x00,0xb5,0x02,0x00,0x00,0xa3,0x02,0x00,0x00, -0x01,0x00,0x00,0x00,0x73,0x00,0x04,0x00,0x4d,0x00,0x00,0x00, -0xb6,0x02,0x00,0x00,0xb5,0x02,0x00,0x00,0x41,0x00,0x06,0x00, -0x57,0x00,0x00,0x00,0xb7,0x02,0x00,0x00,0x7d,0x00,0x00,0x00, -0x2e,0x00,0x00,0x00,0xb4,0x02,0x00,0x00,0x3e,0x00,0x03,0x00, -0xb7,0x02,0x00,0x00,0xb6,0x02,0x00,0x00,0x3d,0x00,0x04,0x00, -0x4d,0x00,0x00,0x00,0xc0,0x02,0x00,0x00,0x58,0x00,0x00,0x00, -0x73,0x00,0x04,0x00,0x4a,0x00,0x00,0x00,0xc1,0x02,0x00,0x00, -0xc0,0x02,0x00,0x00,0x3d,0x00,0x04,0x00,0x4d,0x00,0x00,0x00, -0xc3,0x02,0x00,0x00,0x5d,0x00,0x00,0x00,0x73,0x00,0x04,0x00, -0x4a,0x00,0x00,0x00,0xc4,0x02,0x00,0x00,0xc3,0x02,0x00,0x00, -0x41,0x00,0x08,0x00,0x64,0x00,0x00,0x00,0xc5,0x02,0x00,0x00, -0x55,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0x40,0x00,0x00,0x00, -0x37,0x00,0x00,0x00,0x53,0x03,0x00,0x00,0x3d,0x00,0x04,0x00, -0x4e,0x00,0x00,0x00,0xc6,0x02,0x00,0x00,0xc5,0x02,0x00,0x00, -0x71,0x00,0x04,0x00,0x09,0x00,0x00,0x00,0xc7,0x02,0x00,0x00, -0xc6,0x02,0x00,0x00,0xc7,0x00,0x05,0x00,0x09,0x00,0x00,0x00, -0xc8,0x02,0x00,0x00,0xc7,0x02,0x00,0x00,0x6c,0x00,0x00,0x00, -0x70,0x00,0x04,0x00,0x4a,0x00,0x00,0x00,0xc9,0x02,0x00,0x00, -0xc8,0x02,0x00,0x00,0xc2,0x00,0x05,0x00,0x09,0x00,0x00,0x00, -0xca,0x02,0x00,0x00,0xc7,0x02,0x00,0x00,0x70,0x00,0x00,0x00, -0x70,0x00,0x04,0x00,0x4a,0x00,0x00,0x00,0xcb,0x02,0x00,0x00, -0xca,0x02,0x00,0x00,0x50,0x00,0x05,0x00,0x68,0x00,0x00,0x00, -0xcc,0x02,0x00,0x00,0xc9,0x02,0x00,0x00,0xcb,0x02,0x00,0x00, -0x8e,0x00,0x05,0x00,0x68,0x00,0x00,0x00,0xcd,0x02,0x00,0x00, -0xcc,0x02,0x00,0x00,0xc1,0x02,0x00,0x00,0x50,0x00,0x05,0x00, -0x68,0x00,0x00,0x00,0xce,0x02,0x00,0x00,0xc4,0x02,0x00,0x00, -0xc4,0x02,0x00,0x00,0x81,0x00,0x05,0x00,0x68,0x00,0x00,0x00, -0xcf,0x02,0x00,0x00,0xcd,0x02,0x00,0x00,0xce,0x02,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xd5,0x02,0x00,0x00, -0x85,0x00,0x00,0x00,0x53,0x03,0x00,0x00,0x51,0x00,0x05,0x00, -0x4a,0x00,0x00,0x00,0xd7,0x02,0x00,0x00,0xcf,0x02,0x00,0x00, -0x00,0x00,0x00,0x00,0x73,0x00,0x04,0x00,0x4d,0x00,0x00,0x00, -0xd8,0x02,0x00,0x00,0xd7,0x02,0x00,0x00,0x41,0x00,0x06,0x00, -0x57,0x00,0x00,0x00,0xd9,0x02,0x00,0x00,0x7d,0x00,0x00,0x00, -0x2e,0x00,0x00,0x00,0xd5,0x02,0x00,0x00,0x3e,0x00,0x03,0x00, -0xd9,0x02,0x00,0x00,0xd8,0x02,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xe0,0x02,0x00,0x00,0x85,0x00,0x00,0x00, -0x54,0x03,0x00,0x00,0x51,0x00,0x05,0x00,0x4a,0x00,0x00,0x00, -0xe1,0x02,0x00,0x00,0xcf,0x02,0x00,0x00,0x01,0x00,0x00,0x00, -0x73,0x00,0x04,0x00,0x4d,0x00,0x00,0x00,0xe2,0x02,0x00,0x00, -0xe1,0x02,0x00,0x00,0x41,0x00,0x06,0x00,0x57,0x00,0x00,0x00, -0xe3,0x02,0x00,0x00,0x7d,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0xe0,0x02,0x00,0x00,0x3e,0x00,0x03,0x00,0xe3,0x02,0x00,0x00, -0xe2,0x02,0x00,0x00,0x3d,0x00,0x04,0x00,0x4d,0x00,0x00,0x00, -0xec,0x02,0x00,0x00,0x58,0x00,0x00,0x00,0x73,0x00,0x04,0x00, -0x4a,0x00,0x00,0x00,0xed,0x02,0x00,0x00,0xec,0x02,0x00,0x00, -0x3d,0x00,0x04,0x00,0x4d,0x00,0x00,0x00,0xef,0x02,0x00,0x00, -0x5d,0x00,0x00,0x00,0x73,0x00,0x04,0x00,0x4a,0x00,0x00,0x00, -0xf0,0x02,0x00,0x00,0xef,0x02,0x00,0x00,0x41,0x00,0x08,0x00, -0x64,0x00,0x00,0x00,0xf1,0x02,0x00,0x00,0x55,0x00,0x00,0x00, -0x2e,0x00,0x00,0x00,0x40,0x00,0x00,0x00,0x37,0x00,0x00,0x00, -0x55,0x03,0x00,0x00,0x3d,0x00,0x04,0x00,0x4e,0x00,0x00,0x00, -0xf2,0x02,0x00,0x00,0xf1,0x02,0x00,0x00,0x71,0x00,0x04,0x00, -0x09,0x00,0x00,0x00,0xf3,0x02,0x00,0x00,0xf2,0x02,0x00,0x00, -0xc7,0x00,0x05,0x00,0x09,0x00,0x00,0x00,0xf4,0x02,0x00,0x00, -0xf3,0x02,0x00,0x00,0x6c,0x00,0x00,0x00,0x70,0x00,0x04,0x00, -0x4a,0x00,0x00,0x00,0xf5,0x02,0x00,0x00,0xf4,0x02,0x00,0x00, -0xc2,0x00,0x05,0x00,0x09,0x00,0x00,0x00,0xf6,0x02,0x00,0x00, -0xf3,0x02,0x00,0x00,0x70,0x00,0x00,0x00,0x70,0x00,0x04,0x00, -0x4a,0x00,0x00,0x00,0xf7,0x02,0x00,0x00,0xf6,0x02,0x00,0x00, -0x50,0x00,0x05,0x00,0x68,0x00,0x00,0x00,0xf8,0x02,0x00,0x00, -0xf5,0x02,0x00,0x00,0xf7,0x02,0x00,0x00,0x8e,0x00,0x05,0x00, -0x68,0x00,0x00,0x00,0xf9,0x02,0x00,0x00,0xf8,0x02,0x00,0x00, -0xed,0x02,0x00,0x00,0x50,0x00,0x05,0x00,0x68,0x00,0x00,0x00, -0xfa,0x02,0x00,0x00,0xf0,0x02,0x00,0x00,0xf0,0x02,0x00,0x00, -0x81,0x00,0x05,0x00,0x68,0x00,0x00,0x00,0xfb,0x02,0x00,0x00, -0xf9,0x02,0x00,0x00,0xfa,0x02,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x01,0x03,0x00,0x00,0x85,0x00,0x00,0x00, -0x55,0x03,0x00,0x00,0x51,0x00,0x05,0x00,0x4a,0x00,0x00,0x00, -0x03,0x03,0x00,0x00,0xfb,0x02,0x00,0x00,0x00,0x00,0x00,0x00, -0x73,0x00,0x04,0x00,0x4d,0x00,0x00,0x00,0x04,0x03,0x00,0x00, -0x03,0x03,0x00,0x00,0x41,0x00,0x06,0x00,0x57,0x00,0x00,0x00, -0x05,0x03,0x00,0x00,0x7d,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0x01,0x03,0x00,0x00,0x3e,0x00,0x03,0x00,0x05,0x03,0x00,0x00, -0x04,0x03,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x0c,0x03,0x00,0x00,0x85,0x00,0x00,0x00,0x56,0x03,0x00,0x00, -0x51,0x00,0x05,0x00,0x4a,0x00,0x00,0x00,0x0d,0x03,0x00,0x00, -0xfb,0x02,0x00,0x00,0x01,0x00,0x00,0x00,0x73,0x00,0x04,0x00, -0x4d,0x00,0x00,0x00,0x0e,0x03,0x00,0x00,0x0d,0x03,0x00,0x00, -0x41,0x00,0x06,0x00,0x57,0x00,0x00,0x00,0x0f,0x03,0x00,0x00, -0x7d,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0x0c,0x03,0x00,0x00, -0x3e,0x00,0x03,0x00,0x0f,0x03,0x00,0x00,0x0e,0x03,0x00,0x00, -0x3d,0x00,0x04,0x00,0x4d,0x00,0x00,0x00,0x18,0x03,0x00,0x00, -0x58,0x00,0x00,0x00,0x73,0x00,0x04,0x00,0x4a,0x00,0x00,0x00, -0x19,0x03,0x00,0x00,0x18,0x03,0x00,0x00,0x3d,0x00,0x04,0x00, -0x4d,0x00,0x00,0x00,0x1b,0x03,0x00,0x00,0x5d,0x00,0x00,0x00, -0x73,0x00,0x04,0x00,0x4a,0x00,0x00,0x00,0x1c,0x03,0x00,0x00, -0x1b,0x03,0x00,0x00,0x41,0x00,0x08,0x00,0x64,0x00,0x00,0x00, -0x1d,0x03,0x00,0x00,0x55,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0x40,0x00,0x00,0x00,0x37,0x00,0x00,0x00,0x57,0x03,0x00,0x00, -0x3d,0x00,0x04,0x00,0x4e,0x00,0x00,0x00,0x1e,0x03,0x00,0x00, -0x1d,0x03,0x00,0x00,0x71,0x00,0x04,0x00,0x09,0x00,0x00,0x00, -0x1f,0x03,0x00,0x00,0x1e,0x03,0x00,0x00,0xc7,0x00,0x05,0x00, -0x09,0x00,0x00,0x00,0x20,0x03,0x00,0x00,0x1f,0x03,0x00,0x00, -0x6c,0x00,0x00,0x00,0x70,0x00,0x04,0x00,0x4a,0x00,0x00,0x00, -0x21,0x03,0x00,0x00,0x20,0x03,0x00,0x00,0xc2,0x00,0x05,0x00, -0x09,0x00,0x00,0x00,0x22,0x03,0x00,0x00,0x1f,0x03,0x00,0x00, -0x70,0x00,0x00,0x00,0x70,0x00,0x04,0x00,0x4a,0x00,0x00,0x00, -0x23,0x03,0x00,0x00,0x22,0x03,0x00,0x00,0x50,0x00,0x05,0x00, -0x68,0x00,0x00,0x00,0x24,0x03,0x00,0x00,0x21,0x03,0x00,0x00, -0x23,0x03,0x00,0x00,0x8e,0x00,0x05,0x00,0x68,0x00,0x00,0x00, -0x25,0x03,0x00,0x00,0x24,0x03,0x00,0x00,0x19,0x03,0x00,0x00, -0x50,0x00,0x05,0x00,0x68,0x00,0x00,0x00,0x26,0x03,0x00,0x00, -0x1c,0x03,0x00,0x00,0x1c,0x03,0x00,0x00,0x81,0x00,0x05,0x00, -0x68,0x00,0x00,0x00,0x27,0x03,0x00,0x00,0x25,0x03,0x00,0x00, -0x26,0x03,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x2d,0x03,0x00,0x00,0x85,0x00,0x00,0x00,0x57,0x03,0x00,0x00, -0x51,0x00,0x05,0x00,0x4a,0x00,0x00,0x00,0x2f,0x03,0x00,0x00, -0x27,0x03,0x00,0x00,0x00,0x00,0x00,0x00,0x73,0x00,0x04,0x00, -0x4d,0x00,0x00,0x00,0x30,0x03,0x00,0x00,0x2f,0x03,0x00,0x00, -0x41,0x00,0x06,0x00,0x57,0x00,0x00,0x00,0x31,0x03,0x00,0x00, -0x7d,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0x2d,0x03,0x00,0x00, -0x3e,0x00,0x03,0x00,0x31,0x03,0x00,0x00,0x30,0x03,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x38,0x03,0x00,0x00, -0x85,0x00,0x00,0x00,0x58,0x03,0x00,0x00,0x51,0x00,0x05,0x00, -0x4a,0x00,0x00,0x00,0x39,0x03,0x00,0x00,0x27,0x03,0x00,0x00, -0x01,0x00,0x00,0x00,0x73,0x00,0x04,0x00,0x4d,0x00,0x00,0x00, -0x3a,0x03,0x00,0x00,0x39,0x03,0x00,0x00,0x41,0x00,0x06,0x00, -0x57,0x00,0x00,0x00,0x3b,0x03,0x00,0x00,0x7d,0x00,0x00,0x00, -0x2e,0x00,0x00,0x00,0x38,0x03,0x00,0x00,0x3e,0x00,0x03,0x00, -0x3b,0x03,0x00,0x00,0x3a,0x03,0x00,0x00,0xf9,0x00,0x02,0x00, -0xa0,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0xa0,0x00,0x00,0x00, -0xfd,0x00,0x01,0x00,0x38,0x00,0x01,0x00, -}; -const uint64_t dequant_q4_1_len = 9704; - -unsigned char dequant_q4_K_data[] = { -0x03,0x02,0x23,0x07,0x00,0x05,0x01,0x00,0x0b,0x00,0x0d,0x00, -0xb1,0x01,0x00,0x00,0x00,0x00,0x00,0x00,0x11,0x00,0x02,0x00, +0x85,0x01,0x00,0x00,0x00,0x00,0x00,0x00,0x11,0x00,0x02,0x00, 0x01,0x00,0x00,0x00,0x11,0x00,0x02,0x00,0x27,0x00,0x00,0x00, 0x11,0x00,0x02,0x00,0x51,0x11,0x00,0x00,0x11,0x00,0x02,0x00, 0x60,0x11,0x00,0x00,0x0b,0x00,0x06,0x00,0x01,0x00,0x00,0x00, @@ -3360,29 +3267,970 @@ unsigned char dequant_q4_K_data[] = { 0x00,0x00,0x00,0x00,0x0e,0x00,0x03,0x00,0x00,0x00,0x00,0x00, 0x01,0x00,0x00,0x00,0x0f,0x00,0x0a,0x00,0x05,0x00,0x00,0x00, 0x04,0x00,0x00,0x00,0x6d,0x61,0x69,0x6e,0x00,0x00,0x00,0x00, -0x17,0x00,0x00,0x00,0x25,0x00,0x00,0x00,0x33,0x00,0x00,0x00, -0x50,0x00,0x00,0x00,0x0b,0x01,0x00,0x00,0x10,0x00,0x06,0x00, +0x0b,0x00,0x00,0x00,0x12,0x00,0x00,0x00,0x2b,0x00,0x00,0x00, +0x4d,0x00,0x00,0x00,0x68,0x00,0x00,0x00,0x10,0x00,0x06,0x00, +0x04,0x00,0x00,0x00,0x11,0x00,0x00,0x00,0x00,0x01,0x00,0x00, +0x01,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x47,0x00,0x04,0x00, +0x0b,0x00,0x00,0x00,0x0b,0x00,0x00,0x00,0x1a,0x00,0x00,0x00, +0x47,0x00,0x04,0x00,0x12,0x00,0x00,0x00,0x0b,0x00,0x00,0x00, +0x1b,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x29,0x00,0x00,0x00, +0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0x48,0x00,0x05,0x00,0x29,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0x23,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x48,0x00,0x05,0x00, +0x29,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x23,0x00,0x00,0x00, +0x08,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x29,0x00,0x00,0x00, +0x03,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, +0x48,0x00,0x05,0x00,0x29,0x00,0x00,0x00,0x04,0x00,0x00,0x00, +0x23,0x00,0x00,0x00,0x10,0x00,0x00,0x00,0x47,0x00,0x03,0x00, +0x29,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00, +0x48,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0x48,0x00,0x05,0x00,0x49,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x48,0x00,0x05,0x00, +0x49,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x23,0x00,0x00,0x00, +0x02,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x49,0x00,0x00,0x00, +0x02,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x04,0x00,0x00,0x00, +0x47,0x00,0x04,0x00,0x4a,0x00,0x00,0x00,0x06,0x00,0x00,0x00, +0x14,0x00,0x00,0x00,0x48,0x00,0x04,0x00,0x4b,0x00,0x00,0x00, +0x00,0x00,0x00,0x00,0x18,0x00,0x00,0x00,0x48,0x00,0x05,0x00, +0x4b,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00, +0x00,0x00,0x00,0x00,0x47,0x00,0x03,0x00,0x4b,0x00,0x00,0x00, +0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x4d,0x00,0x00,0x00, +0x22,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00, +0x4d,0x00,0x00,0x00,0x21,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0x47,0x00,0x04,0x00,0x65,0x00,0x00,0x00,0x06,0x00,0x00,0x00, +0x02,0x00,0x00,0x00,0x48,0x00,0x04,0x00,0x66,0x00,0x00,0x00, +0x00,0x00,0x00,0x00,0x19,0x00,0x00,0x00,0x48,0x00,0x05,0x00, +0x66,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00, +0x00,0x00,0x00,0x00,0x47,0x00,0x03,0x00,0x66,0x00,0x00,0x00, +0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x68,0x00,0x00,0x00, +0x22,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00, +0x68,0x00,0x00,0x00,0x21,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0x47,0x00,0x04,0x00,0x96,0x00,0x00,0x00,0x0b,0x00,0x00,0x00, +0x19,0x00,0x00,0x00,0x13,0x00,0x02,0x00,0x02,0x00,0x00,0x00, +0x21,0x00,0x03,0x00,0x03,0x00,0x00,0x00,0x02,0x00,0x00,0x00, +0x15,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x20,0x00,0x00,0x00, +0x00,0x00,0x00,0x00,0x17,0x00,0x04,0x00,0x09,0x00,0x00,0x00, +0x06,0x00,0x00,0x00,0x03,0x00,0x00,0x00,0x20,0x00,0x04,0x00, +0x0a,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x09,0x00,0x00,0x00, +0x3b,0x00,0x04,0x00,0x0a,0x00,0x00,0x00,0x0b,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x0c,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x20,0x00,0x04,0x00, +0x0d,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x06,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x10,0x00,0x00,0x00, +0x04,0x00,0x00,0x00,0x3b,0x00,0x04,0x00,0x0a,0x00,0x00,0x00, +0x12,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x15,0x00,0x00,0x00,0x40,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x1e,0x00,0x00,0x00, +0x20,0x00,0x00,0x00,0x1e,0x00,0x07,0x00,0x29,0x00,0x00,0x00, +0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00, +0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x20,0x00,0x04,0x00, +0x2a,0x00,0x00,0x00,0x09,0x00,0x00,0x00,0x29,0x00,0x00,0x00, +0x3b,0x00,0x04,0x00,0x2a,0x00,0x00,0x00,0x2b,0x00,0x00,0x00, +0x09,0x00,0x00,0x00,0x15,0x00,0x04,0x00,0x2c,0x00,0x00,0x00, +0x20,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x2c,0x00,0x00,0x00,0x2d,0x00,0x00,0x00,0x04,0x00,0x00,0x00, +0x20,0x00,0x04,0x00,0x2e,0x00,0x00,0x00,0x09,0x00,0x00,0x00, +0x06,0x00,0x00,0x00,0x14,0x00,0x02,0x00,0x32,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x38,0x00,0x00,0x00, +0x00,0x04,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x3e,0x00,0x00,0x00,0x08,0x00,0x00,0x00,0x16,0x00,0x03,0x00, +0x42,0x00,0x00,0x00,0x20,0x00,0x00,0x00,0x16,0x00,0x03,0x00, +0x45,0x00,0x00,0x00,0x10,0x00,0x00,0x00,0x15,0x00,0x04,0x00, +0x46,0x00,0x00,0x00,0x08,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x47,0x00,0x00,0x00, +0x10,0x00,0x00,0x00,0x1c,0x00,0x04,0x00,0x48,0x00,0x00,0x00, +0x46,0x00,0x00,0x00,0x47,0x00,0x00,0x00,0x1e,0x00,0x05,0x00, +0x49,0x00,0x00,0x00,0x45,0x00,0x00,0x00,0x45,0x00,0x00,0x00, +0x48,0x00,0x00,0x00,0x1d,0x00,0x03,0x00,0x4a,0x00,0x00,0x00, +0x49,0x00,0x00,0x00,0x1e,0x00,0x03,0x00,0x4b,0x00,0x00,0x00, +0x4a,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x4c,0x00,0x00,0x00, +0x0c,0x00,0x00,0x00,0x4b,0x00,0x00,0x00,0x3b,0x00,0x04,0x00, +0x4c,0x00,0x00,0x00,0x4d,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x2c,0x00,0x00,0x00,0x4e,0x00,0x00,0x00, +0x00,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x50,0x00,0x00,0x00, +0x0c,0x00,0x00,0x00,0x45,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x2c,0x00,0x00,0x00,0x56,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0x1d,0x00,0x03,0x00,0x65,0x00,0x00,0x00,0x45,0x00,0x00,0x00, +0x1e,0x00,0x03,0x00,0x66,0x00,0x00,0x00,0x65,0x00,0x00,0x00, +0x20,0x00,0x04,0x00,0x67,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, +0x66,0x00,0x00,0x00,0x3b,0x00,0x04,0x00,0x67,0x00,0x00,0x00, +0x68,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x2c,0x00,0x00,0x00,0x6f,0x00,0x00,0x00,0x02,0x00,0x00,0x00, +0x20,0x00,0x04,0x00,0x73,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, +0x46,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x2c,0x00,0x00,0x00, +0x78,0x00,0x00,0x00,0x0f,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x94,0x00,0x00,0x00,0x00,0x01,0x00,0x00, +0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x95,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0x2c,0x00,0x06,0x00,0x09,0x00,0x00,0x00, +0x96,0x00,0x00,0x00,0x94,0x00,0x00,0x00,0x95,0x00,0x00,0x00, +0x95,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x79,0x01,0x00,0x00,0x11,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x7a,0x01,0x00,0x00,0x02,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x7b,0x01,0x00,0x00, +0x12,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x7c,0x01,0x00,0x00,0x03,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x7d,0x01,0x00,0x00,0x13,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x7e,0x01,0x00,0x00, +0x14,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x7f,0x01,0x00,0x00,0x05,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x80,0x01,0x00,0x00,0x15,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x81,0x01,0x00,0x00, +0x06,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x82,0x01,0x00,0x00,0x16,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x83,0x01,0x00,0x00,0x07,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x84,0x01,0x00,0x00, +0x17,0x00,0x00,0x00,0x36,0x00,0x05,0x00,0x02,0x00,0x00,0x00, +0x04,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x03,0x00,0x00,0x00, +0xf8,0x00,0x02,0x00,0x05,0x00,0x00,0x00,0xf7,0x00,0x03,0x00, +0x97,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xfb,0x00,0x03,0x00, +0x0c,0x00,0x00,0x00,0x98,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, +0x98,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x0d,0x00,0x00,0x00, +0x0e,0x00,0x00,0x00,0x0b,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, +0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x0f,0x00,0x00,0x00, +0x0e,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x11,0x00,0x00,0x00,0x0f,0x00,0x00,0x00,0x10,0x00,0x00,0x00, +0x41,0x00,0x05,0x00,0x0d,0x00,0x00,0x00,0x13,0x00,0x00,0x00, +0x12,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x14,0x00,0x00,0x00,0x13,0x00,0x00,0x00, +0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x16,0x00,0x00,0x00, +0x14,0x00,0x00,0x00,0x15,0x00,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x17,0x00,0x00,0x00,0x11,0x00,0x00,0x00, +0x16,0x00,0x00,0x00,0x89,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x1b,0x00,0x00,0x00,0x14,0x00,0x00,0x00,0x15,0x00,0x00,0x00, +0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x1f,0x00,0x00,0x00, +0x1b,0x00,0x00,0x00,0x1e,0x00,0x00,0x00,0x89,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x22,0x00,0x00,0x00,0x1b,0x00,0x00,0x00, +0x1e,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x25,0x00,0x00,0x00,0x1e,0x00,0x00,0x00,0x17,0x00,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x27,0x00,0x00,0x00, +0x25,0x00,0x00,0x00,0x22,0x00,0x00,0x00,0x41,0x00,0x05,0x00, +0x2e,0x00,0x00,0x00,0x2f,0x00,0x00,0x00,0x2b,0x00,0x00,0x00, +0x2d,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x30,0x00,0x00,0x00,0x2f,0x00,0x00,0x00,0x86,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x31,0x00,0x00,0x00,0x30,0x00,0x00,0x00, +0x1e,0x00,0x00,0x00,0xae,0x00,0x05,0x00,0x32,0x00,0x00,0x00, +0x33,0x00,0x00,0x00,0x27,0x00,0x00,0x00,0x31,0x00,0x00,0x00, +0xf7,0x00,0x03,0x00,0x35,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0xfa,0x00,0x04,0x00,0x33,0x00,0x00,0x00,0x34,0x00,0x00,0x00, +0x35,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0x34,0x00,0x00,0x00, +0xf9,0x00,0x02,0x00,0x97,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, +0x35,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x3a,0x00,0x00,0x00,0x38,0x00,0x00,0x00,0x17,0x00,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x3c,0x00,0x00,0x00, +0x1e,0x00,0x00,0x00,0x22,0x00,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x3d,0x00,0x00,0x00,0x3a,0x00,0x00,0x00, +0x3c,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x40,0x00,0x00,0x00,0x3e,0x00,0x00,0x00,0x1f,0x00,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x41,0x00,0x00,0x00, +0x3d,0x00,0x00,0x00,0x40,0x00,0x00,0x00,0x41,0x00,0x07,0x00, +0x50,0x00,0x00,0x00,0x51,0x00,0x00,0x00,0x4d,0x00,0x00,0x00, +0x4e,0x00,0x00,0x00,0x27,0x00,0x00,0x00,0x4e,0x00,0x00,0x00, +0x3d,0x00,0x04,0x00,0x45,0x00,0x00,0x00,0x52,0x00,0x00,0x00, +0x51,0x00,0x00,0x00,0x73,0x00,0x04,0x00,0x42,0x00,0x00,0x00, +0x53,0x00,0x00,0x00,0x52,0x00,0x00,0x00,0x41,0x00,0x07,0x00, +0x50,0x00,0x00,0x00,0x57,0x00,0x00,0x00,0x4d,0x00,0x00,0x00, +0x4e,0x00,0x00,0x00,0x27,0x00,0x00,0x00,0x56,0x00,0x00,0x00, +0x3d,0x00,0x04,0x00,0x45,0x00,0x00,0x00,0x58,0x00,0x00,0x00, +0x57,0x00,0x00,0x00,0x73,0x00,0x04,0x00,0x42,0x00,0x00,0x00, +0x59,0x00,0x00,0x00,0x58,0x00,0x00,0x00,0x41,0x00,0x08,0x00, +0x73,0x00,0x00,0x00,0x74,0x00,0x00,0x00,0x4d,0x00,0x00,0x00, +0x4e,0x00,0x00,0x00,0x27,0x00,0x00,0x00,0x6f,0x00,0x00,0x00, +0x40,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x46,0x00,0x00,0x00, +0x75,0x00,0x00,0x00,0x74,0x00,0x00,0x00,0x71,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x76,0x00,0x00,0x00,0x75,0x00,0x00,0x00, +0x7c,0x00,0x04,0x00,0x2c,0x00,0x00,0x00,0x77,0x00,0x00,0x00, +0x76,0x00,0x00,0x00,0xc7,0x00,0x05,0x00,0x2c,0x00,0x00,0x00, +0x79,0x00,0x00,0x00,0x77,0x00,0x00,0x00,0x78,0x00,0x00,0x00, +0x6f,0x00,0x04,0x00,0x42,0x00,0x00,0x00,0x7a,0x00,0x00,0x00, +0x79,0x00,0x00,0x00,0x0c,0x00,0x08,0x00,0x42,0x00,0x00,0x00, +0x7d,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x32,0x00,0x00,0x00, +0x53,0x00,0x00,0x00,0x7a,0x00,0x00,0x00,0x59,0x00,0x00,0x00, +0x73,0x00,0x04,0x00,0x45,0x00,0x00,0x00,0x7e,0x00,0x00,0x00, +0x7d,0x00,0x00,0x00,0x41,0x00,0x06,0x00,0x50,0x00,0x00,0x00, +0x7f,0x00,0x00,0x00,0x68,0x00,0x00,0x00,0x4e,0x00,0x00,0x00, +0x41,0x00,0x00,0x00,0x3e,0x00,0x03,0x00,0x7f,0x00,0x00,0x00, +0x7e,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x83,0x00,0x00,0x00,0x41,0x00,0x00,0x00,0x47,0x00,0x00,0x00, +0x3d,0x00,0x04,0x00,0x46,0x00,0x00,0x00,0x8a,0x00,0x00,0x00, +0x74,0x00,0x00,0x00,0xc2,0x00,0x05,0x00,0x46,0x00,0x00,0x00, +0x8b,0x00,0x00,0x00,0x8a,0x00,0x00,0x00,0x2d,0x00,0x00,0x00, +0x70,0x00,0x04,0x00,0x42,0x00,0x00,0x00,0x8c,0x00,0x00,0x00, +0x8b,0x00,0x00,0x00,0x0c,0x00,0x08,0x00,0x42,0x00,0x00,0x00, +0x8f,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x32,0x00,0x00,0x00, +0x53,0x00,0x00,0x00,0x8c,0x00,0x00,0x00,0x59,0x00,0x00,0x00, +0x73,0x00,0x04,0x00,0x45,0x00,0x00,0x00,0x90,0x00,0x00,0x00, +0x8f,0x00,0x00,0x00,0x41,0x00,0x06,0x00,0x50,0x00,0x00,0x00, +0x91,0x00,0x00,0x00,0x68,0x00,0x00,0x00,0x4e,0x00,0x00,0x00, +0x83,0x00,0x00,0x00,0x3e,0x00,0x03,0x00,0x91,0x00,0x00,0x00, +0x90,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xa4,0x00,0x00,0x00,0x41,0x00,0x00,0x00,0x95,0x00,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xa6,0x00,0x00,0x00, +0x40,0x00,0x00,0x00,0x95,0x00,0x00,0x00,0x41,0x00,0x08,0x00, +0x73,0x00,0x00,0x00,0xa7,0x00,0x00,0x00,0x4d,0x00,0x00,0x00, +0x4e,0x00,0x00,0x00,0x27,0x00,0x00,0x00,0x6f,0x00,0x00,0x00, +0xa6,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x46,0x00,0x00,0x00, +0xa8,0x00,0x00,0x00,0xa7,0x00,0x00,0x00,0x71,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0xa9,0x00,0x00,0x00,0xa8,0x00,0x00,0x00, +0x7c,0x00,0x04,0x00,0x2c,0x00,0x00,0x00,0xaa,0x00,0x00,0x00, +0xa9,0x00,0x00,0x00,0xc7,0x00,0x05,0x00,0x2c,0x00,0x00,0x00, +0xab,0x00,0x00,0x00,0xaa,0x00,0x00,0x00,0x78,0x00,0x00,0x00, +0x6f,0x00,0x04,0x00,0x42,0x00,0x00,0x00,0xac,0x00,0x00,0x00, +0xab,0x00,0x00,0x00,0x0c,0x00,0x08,0x00,0x42,0x00,0x00,0x00, +0xae,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x32,0x00,0x00,0x00, +0x53,0x00,0x00,0x00,0xac,0x00,0x00,0x00,0x59,0x00,0x00,0x00, +0x73,0x00,0x04,0x00,0x45,0x00,0x00,0x00,0xaf,0x00,0x00,0x00, +0xae,0x00,0x00,0x00,0x41,0x00,0x06,0x00,0x50,0x00,0x00,0x00, +0xb0,0x00,0x00,0x00,0x68,0x00,0x00,0x00,0x4e,0x00,0x00,0x00, +0xa4,0x00,0x00,0x00,0x3e,0x00,0x03,0x00,0xb0,0x00,0x00,0x00, +0xaf,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xb2,0x00,0x00,0x00,0x41,0x00,0x00,0x00,0x79,0x01,0x00,0x00, +0x3d,0x00,0x04,0x00,0x46,0x00,0x00,0x00,0xb5,0x00,0x00,0x00, +0xa7,0x00,0x00,0x00,0xc2,0x00,0x05,0x00,0x46,0x00,0x00,0x00, +0xb6,0x00,0x00,0x00,0xb5,0x00,0x00,0x00,0x2d,0x00,0x00,0x00, +0x70,0x00,0x04,0x00,0x42,0x00,0x00,0x00,0xb7,0x00,0x00,0x00, +0xb6,0x00,0x00,0x00,0x0c,0x00,0x08,0x00,0x42,0x00,0x00,0x00, +0xb9,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x32,0x00,0x00,0x00, +0x53,0x00,0x00,0x00,0xb7,0x00,0x00,0x00,0x59,0x00,0x00,0x00, +0x73,0x00,0x04,0x00,0x45,0x00,0x00,0x00,0xba,0x00,0x00,0x00, +0xb9,0x00,0x00,0x00,0x41,0x00,0x06,0x00,0x50,0x00,0x00,0x00, +0xbb,0x00,0x00,0x00,0x68,0x00,0x00,0x00,0x4e,0x00,0x00,0x00, +0xb2,0x00,0x00,0x00,0x3e,0x00,0x03,0x00,0xbb,0x00,0x00,0x00, +0xba,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xc3,0x00,0x00,0x00,0x41,0x00,0x00,0x00,0x7a,0x01,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xc5,0x00,0x00,0x00, +0x40,0x00,0x00,0x00,0x7a,0x01,0x00,0x00,0x41,0x00,0x08,0x00, +0x73,0x00,0x00,0x00,0xc6,0x00,0x00,0x00,0x4d,0x00,0x00,0x00, +0x4e,0x00,0x00,0x00,0x27,0x00,0x00,0x00,0x6f,0x00,0x00,0x00, +0xc5,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x46,0x00,0x00,0x00, +0xc7,0x00,0x00,0x00,0xc6,0x00,0x00,0x00,0x71,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0xc8,0x00,0x00,0x00,0xc7,0x00,0x00,0x00, +0x7c,0x00,0x04,0x00,0x2c,0x00,0x00,0x00,0xc9,0x00,0x00,0x00, +0xc8,0x00,0x00,0x00,0xc7,0x00,0x05,0x00,0x2c,0x00,0x00,0x00, +0xca,0x00,0x00,0x00,0xc9,0x00,0x00,0x00,0x78,0x00,0x00,0x00, +0x6f,0x00,0x04,0x00,0x42,0x00,0x00,0x00,0xcb,0x00,0x00,0x00, +0xca,0x00,0x00,0x00,0x0c,0x00,0x08,0x00,0x42,0x00,0x00,0x00, +0xcd,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x32,0x00,0x00,0x00, +0x53,0x00,0x00,0x00,0xcb,0x00,0x00,0x00,0x59,0x00,0x00,0x00, +0x73,0x00,0x04,0x00,0x45,0x00,0x00,0x00,0xce,0x00,0x00,0x00, +0xcd,0x00,0x00,0x00,0x41,0x00,0x06,0x00,0x50,0x00,0x00,0x00, +0xcf,0x00,0x00,0x00,0x68,0x00,0x00,0x00,0x4e,0x00,0x00,0x00, +0xc3,0x00,0x00,0x00,0x3e,0x00,0x03,0x00,0xcf,0x00,0x00,0x00, +0xce,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xd1,0x00,0x00,0x00,0x41,0x00,0x00,0x00,0x7b,0x01,0x00,0x00, +0x3d,0x00,0x04,0x00,0x46,0x00,0x00,0x00,0xd4,0x00,0x00,0x00, +0xc6,0x00,0x00,0x00,0xc2,0x00,0x05,0x00,0x46,0x00,0x00,0x00, +0xd5,0x00,0x00,0x00,0xd4,0x00,0x00,0x00,0x2d,0x00,0x00,0x00, +0x70,0x00,0x04,0x00,0x42,0x00,0x00,0x00,0xd6,0x00,0x00,0x00, +0xd5,0x00,0x00,0x00,0x0c,0x00,0x08,0x00,0x42,0x00,0x00,0x00, +0xd8,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x32,0x00,0x00,0x00, +0x53,0x00,0x00,0x00,0xd6,0x00,0x00,0x00,0x59,0x00,0x00,0x00, +0x73,0x00,0x04,0x00,0x45,0x00,0x00,0x00,0xd9,0x00,0x00,0x00, +0xd8,0x00,0x00,0x00,0x41,0x00,0x06,0x00,0x50,0x00,0x00,0x00, +0xda,0x00,0x00,0x00,0x68,0x00,0x00,0x00,0x4e,0x00,0x00,0x00, +0xd1,0x00,0x00,0x00,0x3e,0x00,0x03,0x00,0xda,0x00,0x00,0x00, +0xd9,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xe2,0x00,0x00,0x00,0x41,0x00,0x00,0x00,0x7c,0x01,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xe4,0x00,0x00,0x00, +0x40,0x00,0x00,0x00,0x7c,0x01,0x00,0x00,0x41,0x00,0x08,0x00, +0x73,0x00,0x00,0x00,0xe5,0x00,0x00,0x00,0x4d,0x00,0x00,0x00, +0x4e,0x00,0x00,0x00,0x27,0x00,0x00,0x00,0x6f,0x00,0x00,0x00, +0xe4,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x46,0x00,0x00,0x00, +0xe6,0x00,0x00,0x00,0xe5,0x00,0x00,0x00,0x71,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0xe7,0x00,0x00,0x00,0xe6,0x00,0x00,0x00, +0x7c,0x00,0x04,0x00,0x2c,0x00,0x00,0x00,0xe8,0x00,0x00,0x00, +0xe7,0x00,0x00,0x00,0xc7,0x00,0x05,0x00,0x2c,0x00,0x00,0x00, +0xe9,0x00,0x00,0x00,0xe8,0x00,0x00,0x00,0x78,0x00,0x00,0x00, +0x6f,0x00,0x04,0x00,0x42,0x00,0x00,0x00,0xea,0x00,0x00,0x00, +0xe9,0x00,0x00,0x00,0x0c,0x00,0x08,0x00,0x42,0x00,0x00,0x00, +0xec,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x32,0x00,0x00,0x00, +0x53,0x00,0x00,0x00,0xea,0x00,0x00,0x00,0x59,0x00,0x00,0x00, +0x73,0x00,0x04,0x00,0x45,0x00,0x00,0x00,0xed,0x00,0x00,0x00, +0xec,0x00,0x00,0x00,0x41,0x00,0x06,0x00,0x50,0x00,0x00,0x00, +0xee,0x00,0x00,0x00,0x68,0x00,0x00,0x00,0x4e,0x00,0x00,0x00, +0xe2,0x00,0x00,0x00,0x3e,0x00,0x03,0x00,0xee,0x00,0x00,0x00, +0xed,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xf0,0x00,0x00,0x00,0x41,0x00,0x00,0x00,0x7d,0x01,0x00,0x00, +0x3d,0x00,0x04,0x00,0x46,0x00,0x00,0x00,0xf3,0x00,0x00,0x00, +0xe5,0x00,0x00,0x00,0xc2,0x00,0x05,0x00,0x46,0x00,0x00,0x00, +0xf4,0x00,0x00,0x00,0xf3,0x00,0x00,0x00,0x2d,0x00,0x00,0x00, +0x70,0x00,0x04,0x00,0x42,0x00,0x00,0x00,0xf5,0x00,0x00,0x00, +0xf4,0x00,0x00,0x00,0x0c,0x00,0x08,0x00,0x42,0x00,0x00,0x00, +0xf7,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x32,0x00,0x00,0x00, +0x53,0x00,0x00,0x00,0xf5,0x00,0x00,0x00,0x59,0x00,0x00,0x00, +0x73,0x00,0x04,0x00,0x45,0x00,0x00,0x00,0xf8,0x00,0x00,0x00, +0xf7,0x00,0x00,0x00,0x41,0x00,0x06,0x00,0x50,0x00,0x00,0x00, +0xf9,0x00,0x00,0x00,0x68,0x00,0x00,0x00,0x4e,0x00,0x00,0x00, +0xf0,0x00,0x00,0x00,0x3e,0x00,0x03,0x00,0xf9,0x00,0x00,0x00, +0xf8,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x01,0x01,0x00,0x00,0x41,0x00,0x00,0x00,0x10,0x00,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x03,0x01,0x00,0x00, +0x40,0x00,0x00,0x00,0x10,0x00,0x00,0x00,0x41,0x00,0x08,0x00, +0x73,0x00,0x00,0x00,0x04,0x01,0x00,0x00,0x4d,0x00,0x00,0x00, +0x4e,0x00,0x00,0x00,0x27,0x00,0x00,0x00,0x6f,0x00,0x00,0x00, +0x03,0x01,0x00,0x00,0x3d,0x00,0x04,0x00,0x46,0x00,0x00,0x00, +0x05,0x01,0x00,0x00,0x04,0x01,0x00,0x00,0x71,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x06,0x01,0x00,0x00,0x05,0x01,0x00,0x00, +0x7c,0x00,0x04,0x00,0x2c,0x00,0x00,0x00,0x07,0x01,0x00,0x00, +0x06,0x01,0x00,0x00,0xc7,0x00,0x05,0x00,0x2c,0x00,0x00,0x00, +0x08,0x01,0x00,0x00,0x07,0x01,0x00,0x00,0x78,0x00,0x00,0x00, +0x6f,0x00,0x04,0x00,0x42,0x00,0x00,0x00,0x09,0x01,0x00,0x00, +0x08,0x01,0x00,0x00,0x0c,0x00,0x08,0x00,0x42,0x00,0x00,0x00, +0x0b,0x01,0x00,0x00,0x01,0x00,0x00,0x00,0x32,0x00,0x00,0x00, +0x53,0x00,0x00,0x00,0x09,0x01,0x00,0x00,0x59,0x00,0x00,0x00, +0x73,0x00,0x04,0x00,0x45,0x00,0x00,0x00,0x0c,0x01,0x00,0x00, +0x0b,0x01,0x00,0x00,0x41,0x00,0x06,0x00,0x50,0x00,0x00,0x00, +0x0d,0x01,0x00,0x00,0x68,0x00,0x00,0x00,0x4e,0x00,0x00,0x00, +0x01,0x01,0x00,0x00,0x3e,0x00,0x03,0x00,0x0d,0x01,0x00,0x00, +0x0c,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x0f,0x01,0x00,0x00,0x41,0x00,0x00,0x00,0x7e,0x01,0x00,0x00, +0x3d,0x00,0x04,0x00,0x46,0x00,0x00,0x00,0x12,0x01,0x00,0x00, +0x04,0x01,0x00,0x00,0xc2,0x00,0x05,0x00,0x46,0x00,0x00,0x00, +0x13,0x01,0x00,0x00,0x12,0x01,0x00,0x00,0x2d,0x00,0x00,0x00, +0x70,0x00,0x04,0x00,0x42,0x00,0x00,0x00,0x14,0x01,0x00,0x00, +0x13,0x01,0x00,0x00,0x0c,0x00,0x08,0x00,0x42,0x00,0x00,0x00, +0x16,0x01,0x00,0x00,0x01,0x00,0x00,0x00,0x32,0x00,0x00,0x00, +0x53,0x00,0x00,0x00,0x14,0x01,0x00,0x00,0x59,0x00,0x00,0x00, +0x73,0x00,0x04,0x00,0x45,0x00,0x00,0x00,0x17,0x01,0x00,0x00, +0x16,0x01,0x00,0x00,0x41,0x00,0x06,0x00,0x50,0x00,0x00,0x00, +0x18,0x01,0x00,0x00,0x68,0x00,0x00,0x00,0x4e,0x00,0x00,0x00, +0x0f,0x01,0x00,0x00,0x3e,0x00,0x03,0x00,0x18,0x01,0x00,0x00, +0x17,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x20,0x01,0x00,0x00,0x41,0x00,0x00,0x00,0x7f,0x01,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x22,0x01,0x00,0x00, +0x40,0x00,0x00,0x00,0x7f,0x01,0x00,0x00,0x41,0x00,0x08,0x00, +0x73,0x00,0x00,0x00,0x23,0x01,0x00,0x00,0x4d,0x00,0x00,0x00, +0x4e,0x00,0x00,0x00,0x27,0x00,0x00,0x00,0x6f,0x00,0x00,0x00, +0x22,0x01,0x00,0x00,0x3d,0x00,0x04,0x00,0x46,0x00,0x00,0x00, +0x24,0x01,0x00,0x00,0x23,0x01,0x00,0x00,0x71,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x25,0x01,0x00,0x00,0x24,0x01,0x00,0x00, +0x7c,0x00,0x04,0x00,0x2c,0x00,0x00,0x00,0x26,0x01,0x00,0x00, +0x25,0x01,0x00,0x00,0xc7,0x00,0x05,0x00,0x2c,0x00,0x00,0x00, +0x27,0x01,0x00,0x00,0x26,0x01,0x00,0x00,0x78,0x00,0x00,0x00, +0x6f,0x00,0x04,0x00,0x42,0x00,0x00,0x00,0x28,0x01,0x00,0x00, +0x27,0x01,0x00,0x00,0x0c,0x00,0x08,0x00,0x42,0x00,0x00,0x00, +0x2a,0x01,0x00,0x00,0x01,0x00,0x00,0x00,0x32,0x00,0x00,0x00, +0x53,0x00,0x00,0x00,0x28,0x01,0x00,0x00,0x59,0x00,0x00,0x00, +0x73,0x00,0x04,0x00,0x45,0x00,0x00,0x00,0x2b,0x01,0x00,0x00, +0x2a,0x01,0x00,0x00,0x41,0x00,0x06,0x00,0x50,0x00,0x00,0x00, +0x2c,0x01,0x00,0x00,0x68,0x00,0x00,0x00,0x4e,0x00,0x00,0x00, +0x20,0x01,0x00,0x00,0x3e,0x00,0x03,0x00,0x2c,0x01,0x00,0x00, +0x2b,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x2e,0x01,0x00,0x00,0x41,0x00,0x00,0x00,0x80,0x01,0x00,0x00, +0x3d,0x00,0x04,0x00,0x46,0x00,0x00,0x00,0x31,0x01,0x00,0x00, +0x23,0x01,0x00,0x00,0xc2,0x00,0x05,0x00,0x46,0x00,0x00,0x00, +0x32,0x01,0x00,0x00,0x31,0x01,0x00,0x00,0x2d,0x00,0x00,0x00, +0x70,0x00,0x04,0x00,0x42,0x00,0x00,0x00,0x33,0x01,0x00,0x00, +0x32,0x01,0x00,0x00,0x0c,0x00,0x08,0x00,0x42,0x00,0x00,0x00, +0x35,0x01,0x00,0x00,0x01,0x00,0x00,0x00,0x32,0x00,0x00,0x00, +0x53,0x00,0x00,0x00,0x33,0x01,0x00,0x00,0x59,0x00,0x00,0x00, +0x73,0x00,0x04,0x00,0x45,0x00,0x00,0x00,0x36,0x01,0x00,0x00, +0x35,0x01,0x00,0x00,0x41,0x00,0x06,0x00,0x50,0x00,0x00,0x00, +0x37,0x01,0x00,0x00,0x68,0x00,0x00,0x00,0x4e,0x00,0x00,0x00, +0x2e,0x01,0x00,0x00,0x3e,0x00,0x03,0x00,0x37,0x01,0x00,0x00, +0x36,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x3f,0x01,0x00,0x00,0x41,0x00,0x00,0x00,0x81,0x01,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x41,0x01,0x00,0x00, +0x40,0x00,0x00,0x00,0x81,0x01,0x00,0x00,0x41,0x00,0x08,0x00, +0x73,0x00,0x00,0x00,0x42,0x01,0x00,0x00,0x4d,0x00,0x00,0x00, +0x4e,0x00,0x00,0x00,0x27,0x00,0x00,0x00,0x6f,0x00,0x00,0x00, +0x41,0x01,0x00,0x00,0x3d,0x00,0x04,0x00,0x46,0x00,0x00,0x00, +0x43,0x01,0x00,0x00,0x42,0x01,0x00,0x00,0x71,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x44,0x01,0x00,0x00,0x43,0x01,0x00,0x00, +0x7c,0x00,0x04,0x00,0x2c,0x00,0x00,0x00,0x45,0x01,0x00,0x00, +0x44,0x01,0x00,0x00,0xc7,0x00,0x05,0x00,0x2c,0x00,0x00,0x00, +0x46,0x01,0x00,0x00,0x45,0x01,0x00,0x00,0x78,0x00,0x00,0x00, +0x6f,0x00,0x04,0x00,0x42,0x00,0x00,0x00,0x47,0x01,0x00,0x00, +0x46,0x01,0x00,0x00,0x0c,0x00,0x08,0x00,0x42,0x00,0x00,0x00, +0x49,0x01,0x00,0x00,0x01,0x00,0x00,0x00,0x32,0x00,0x00,0x00, +0x53,0x00,0x00,0x00,0x47,0x01,0x00,0x00,0x59,0x00,0x00,0x00, +0x73,0x00,0x04,0x00,0x45,0x00,0x00,0x00,0x4a,0x01,0x00,0x00, +0x49,0x01,0x00,0x00,0x41,0x00,0x06,0x00,0x50,0x00,0x00,0x00, +0x4b,0x01,0x00,0x00,0x68,0x00,0x00,0x00,0x4e,0x00,0x00,0x00, +0x3f,0x01,0x00,0x00,0x3e,0x00,0x03,0x00,0x4b,0x01,0x00,0x00, +0x4a,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x4d,0x01,0x00,0x00,0x41,0x00,0x00,0x00,0x82,0x01,0x00,0x00, +0x3d,0x00,0x04,0x00,0x46,0x00,0x00,0x00,0x50,0x01,0x00,0x00, +0x42,0x01,0x00,0x00,0xc2,0x00,0x05,0x00,0x46,0x00,0x00,0x00, +0x51,0x01,0x00,0x00,0x50,0x01,0x00,0x00,0x2d,0x00,0x00,0x00, +0x70,0x00,0x04,0x00,0x42,0x00,0x00,0x00,0x52,0x01,0x00,0x00, +0x51,0x01,0x00,0x00,0x0c,0x00,0x08,0x00,0x42,0x00,0x00,0x00, +0x54,0x01,0x00,0x00,0x01,0x00,0x00,0x00,0x32,0x00,0x00,0x00, +0x53,0x00,0x00,0x00,0x52,0x01,0x00,0x00,0x59,0x00,0x00,0x00, +0x73,0x00,0x04,0x00,0x45,0x00,0x00,0x00,0x55,0x01,0x00,0x00, +0x54,0x01,0x00,0x00,0x41,0x00,0x06,0x00,0x50,0x00,0x00,0x00, +0x56,0x01,0x00,0x00,0x68,0x00,0x00,0x00,0x4e,0x00,0x00,0x00, +0x4d,0x01,0x00,0x00,0x3e,0x00,0x03,0x00,0x56,0x01,0x00,0x00, +0x55,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x5e,0x01,0x00,0x00,0x41,0x00,0x00,0x00,0x83,0x01,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x60,0x01,0x00,0x00, +0x40,0x00,0x00,0x00,0x83,0x01,0x00,0x00,0x41,0x00,0x08,0x00, +0x73,0x00,0x00,0x00,0x61,0x01,0x00,0x00,0x4d,0x00,0x00,0x00, +0x4e,0x00,0x00,0x00,0x27,0x00,0x00,0x00,0x6f,0x00,0x00,0x00, +0x60,0x01,0x00,0x00,0x3d,0x00,0x04,0x00,0x46,0x00,0x00,0x00, +0x62,0x01,0x00,0x00,0x61,0x01,0x00,0x00,0x71,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x63,0x01,0x00,0x00,0x62,0x01,0x00,0x00, +0x7c,0x00,0x04,0x00,0x2c,0x00,0x00,0x00,0x64,0x01,0x00,0x00, +0x63,0x01,0x00,0x00,0xc7,0x00,0x05,0x00,0x2c,0x00,0x00,0x00, +0x65,0x01,0x00,0x00,0x64,0x01,0x00,0x00,0x78,0x00,0x00,0x00, +0x6f,0x00,0x04,0x00,0x42,0x00,0x00,0x00,0x66,0x01,0x00,0x00, +0x65,0x01,0x00,0x00,0x0c,0x00,0x08,0x00,0x42,0x00,0x00,0x00, +0x68,0x01,0x00,0x00,0x01,0x00,0x00,0x00,0x32,0x00,0x00,0x00, +0x53,0x00,0x00,0x00,0x66,0x01,0x00,0x00,0x59,0x00,0x00,0x00, +0x73,0x00,0x04,0x00,0x45,0x00,0x00,0x00,0x69,0x01,0x00,0x00, +0x68,0x01,0x00,0x00,0x41,0x00,0x06,0x00,0x50,0x00,0x00,0x00, +0x6a,0x01,0x00,0x00,0x68,0x00,0x00,0x00,0x4e,0x00,0x00,0x00, +0x5e,0x01,0x00,0x00,0x3e,0x00,0x03,0x00,0x6a,0x01,0x00,0x00, +0x69,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x6c,0x01,0x00,0x00,0x41,0x00,0x00,0x00,0x84,0x01,0x00,0x00, +0x3d,0x00,0x04,0x00,0x46,0x00,0x00,0x00,0x6f,0x01,0x00,0x00, +0x61,0x01,0x00,0x00,0xc2,0x00,0x05,0x00,0x46,0x00,0x00,0x00, +0x70,0x01,0x00,0x00,0x6f,0x01,0x00,0x00,0x2d,0x00,0x00,0x00, +0x70,0x00,0x04,0x00,0x42,0x00,0x00,0x00,0x71,0x01,0x00,0x00, +0x70,0x01,0x00,0x00,0x0c,0x00,0x08,0x00,0x42,0x00,0x00,0x00, +0x73,0x01,0x00,0x00,0x01,0x00,0x00,0x00,0x32,0x00,0x00,0x00, +0x53,0x00,0x00,0x00,0x71,0x01,0x00,0x00,0x59,0x00,0x00,0x00, +0x73,0x00,0x04,0x00,0x45,0x00,0x00,0x00,0x74,0x01,0x00,0x00, +0x73,0x01,0x00,0x00,0x41,0x00,0x06,0x00,0x50,0x00,0x00,0x00, +0x75,0x01,0x00,0x00,0x68,0x00,0x00,0x00,0x4e,0x00,0x00,0x00, +0x6c,0x01,0x00,0x00,0x3e,0x00,0x03,0x00,0x75,0x01,0x00,0x00, +0x74,0x01,0x00,0x00,0xf9,0x00,0x02,0x00,0x97,0x00,0x00,0x00, +0xf8,0x00,0x02,0x00,0x97,0x00,0x00,0x00,0xfd,0x00,0x01,0x00, +0x38,0x00,0x01,0x00, +}; +const uint64_t dequant_q4_1_len = 5248; + +unsigned char dequant_q4_K_data[] = { +0x03,0x02,0x23,0x07,0x00,0x05,0x01,0x00,0x0b,0x00,0x0d,0x00, +0xae,0x01,0x00,0x00,0x00,0x00,0x00,0x00,0x11,0x00,0x02,0x00, +0x01,0x00,0x00,0x00,0x11,0x00,0x02,0x00,0x27,0x00,0x00,0x00, +0x11,0x00,0x02,0x00,0x51,0x11,0x00,0x00,0x11,0x00,0x02,0x00, +0x60,0x11,0x00,0x00,0x0b,0x00,0x06,0x00,0x01,0x00,0x00,0x00, +0x47,0x4c,0x53,0x4c,0x2e,0x73,0x74,0x64,0x2e,0x34,0x35,0x30, +0x00,0x00,0x00,0x00,0x0e,0x00,0x03,0x00,0x00,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0x0f,0x00,0x0a,0x00,0x05,0x00,0x00,0x00, +0x04,0x00,0x00,0x00,0x6d,0x61,0x69,0x6e,0x00,0x00,0x00,0x00, +0x16,0x00,0x00,0x00,0x20,0x00,0x00,0x00,0x30,0x00,0x00,0x00, +0x4c,0x00,0x00,0x00,0x08,0x01,0x00,0x00,0x10,0x00,0x06,0x00, 0x04,0x00,0x00,0x00,0x11,0x00,0x00,0x00,0x20,0x00,0x00,0x00, 0x01,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x17,0x00,0x00,0x00,0x0b,0x00,0x00,0x00,0x1a,0x00,0x00,0x00, -0x48,0x00,0x05,0x00,0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0x16,0x00,0x00,0x00,0x0b,0x00,0x00,0x00,0x1a,0x00,0x00,0x00, +0x48,0x00,0x05,0x00,0x1e,0x00,0x00,0x00,0x00,0x00,0x00,0x00, 0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0x23,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x04,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x23,0x00,0x00,0x00, +0x1e,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x23,0x00,0x00,0x00, +0x04,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x1e,0x00,0x00,0x00, 0x02,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x08,0x00,0x00,0x00, -0x48,0x00,0x05,0x00,0x23,0x00,0x00,0x00,0x03,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x47,0x00,0x03,0x00, -0x23,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x33,0x00,0x00,0x00,0x0b,0x00,0x00,0x00,0x1b,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x49,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x4b,0x00,0x00,0x00, +0x48,0x00,0x05,0x00,0x1e,0x00,0x00,0x00,0x03,0x00,0x00,0x00, +0x23,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x48,0x00,0x05,0x00, +0x1e,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x23,0x00,0x00,0x00, +0x10,0x00,0x00,0x00,0x47,0x00,0x03,0x00,0x1e,0x00,0x00,0x00, +0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x30,0x00,0x00,0x00, +0x0b,0x00,0x00,0x00,0x1b,0x00,0x00,0x00,0x47,0x00,0x04,0x00, +0x45,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0x47,0x00,0x04,0x00,0x47,0x00,0x00,0x00,0x06,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x48,0x00,0x00,0x00, +0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0x48,0x00,0x05,0x00,0x48,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0x23,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x48,0x00,0x05,0x00, +0x48,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x23,0x00,0x00,0x00, +0x10,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x49,0x00,0x00,0x00, +0x06,0x00,0x00,0x00,0x90,0x00,0x00,0x00,0x48,0x00,0x04,0x00, +0x4a,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x18,0x00,0x00,0x00, +0x48,0x00,0x05,0x00,0x4a,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x03,0x00, +0x4a,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00, +0x4c,0x00,0x00,0x00,0x22,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0x47,0x00,0x04,0x00,0x4c,0x00,0x00,0x00,0x21,0x00,0x00,0x00, +0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x05,0x01,0x00,0x00, +0x06,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x48,0x00,0x04,0x00, +0x06,0x01,0x00,0x00,0x00,0x00,0x00,0x00,0x19,0x00,0x00,0x00, +0x48,0x00,0x05,0x00,0x06,0x01,0x00,0x00,0x00,0x00,0x00,0x00, +0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x03,0x00, +0x06,0x01,0x00,0x00,0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00, +0x08,0x01,0x00,0x00,0x22,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0x47,0x00,0x04,0x00,0x08,0x01,0x00,0x00,0x21,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x33,0x01,0x00,0x00, +0x0b,0x00,0x00,0x00,0x19,0x00,0x00,0x00,0x13,0x00,0x02,0x00, +0x02,0x00,0x00,0x00,0x21,0x00,0x03,0x00,0x03,0x00,0x00,0x00, +0x02,0x00,0x00,0x00,0x15,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x20,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x09,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x10,0x00,0x00,0x00, +0x00,0x01,0x00,0x00,0x14,0x00,0x02,0x00,0x11,0x00,0x00,0x00, +0x17,0x00,0x04,0x00,0x14,0x00,0x00,0x00,0x06,0x00,0x00,0x00, +0x03,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x15,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0x14,0x00,0x00,0x00,0x3b,0x00,0x04,0x00, +0x15,0x00,0x00,0x00,0x16,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0x20,0x00,0x04,0x00,0x17,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0x06,0x00,0x00,0x00,0x1e,0x00,0x07,0x00,0x1e,0x00,0x00,0x00, +0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00, +0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x20,0x00,0x04,0x00, +0x1f,0x00,0x00,0x00,0x09,0x00,0x00,0x00,0x1e,0x00,0x00,0x00, +0x3b,0x00,0x04,0x00,0x1f,0x00,0x00,0x00,0x20,0x00,0x00,0x00, +0x09,0x00,0x00,0x00,0x15,0x00,0x04,0x00,0x21,0x00,0x00,0x00, +0x20,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x21,0x00,0x00,0x00,0x22,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0x20,0x00,0x04,0x00,0x23,0x00,0x00,0x00,0x09,0x00,0x00,0x00, +0x06,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x21,0x00,0x00,0x00, +0x26,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x3b,0x00,0x04,0x00, +0x15,0x00,0x00,0x00,0x30,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x35,0x00,0x00,0x00, +0x08,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x3b,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x16,0x00,0x03,0x00, +0x3e,0x00,0x00,0x00,0x20,0x00,0x00,0x00,0x16,0x00,0x03,0x00, +0x41,0x00,0x00,0x00,0x10,0x00,0x00,0x00,0x17,0x00,0x04,0x00, +0x42,0x00,0x00,0x00,0x41,0x00,0x00,0x00,0x02,0x00,0x00,0x00, +0x15,0x00,0x04,0x00,0x43,0x00,0x00,0x00,0x08,0x00,0x00,0x00, +0x00,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x44,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x1c,0x00,0x04,0x00, +0x45,0x00,0x00,0x00,0x43,0x00,0x00,0x00,0x44,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x46,0x00,0x00,0x00, +0x80,0x00,0x00,0x00,0x1c,0x00,0x04,0x00,0x47,0x00,0x00,0x00, +0x43,0x00,0x00,0x00,0x46,0x00,0x00,0x00,0x1e,0x00,0x05,0x00, +0x48,0x00,0x00,0x00,0x42,0x00,0x00,0x00,0x45,0x00,0x00,0x00, +0x47,0x00,0x00,0x00,0x1d,0x00,0x03,0x00,0x49,0x00,0x00,0x00, +0x48,0x00,0x00,0x00,0x1e,0x00,0x03,0x00,0x4a,0x00,0x00,0x00, +0x49,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x4b,0x00,0x00,0x00, +0x0c,0x00,0x00,0x00,0x4a,0x00,0x00,0x00,0x3b,0x00,0x04,0x00, +0x4b,0x00,0x00,0x00,0x4c,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, +0x20,0x00,0x04,0x00,0x4e,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, +0x41,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x54,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x5b,0x00,0x00,0x00,0x40,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x5f,0x00,0x00,0x00, +0x04,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x64,0x00,0x00,0x00,0x20,0x00,0x00,0x00,0x20,0x00,0x04,0x00, +0x72,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x43,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x21,0x00,0x00,0x00,0x77,0x00,0x00,0x00, +0x3f,0x00,0x00,0x00,0x15,0x00,0x04,0x00,0x79,0x00,0x00,0x00, +0x08,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x21,0x00,0x00,0x00,0x8f,0x00,0x00,0x00,0x0f,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x21,0x00,0x00,0x00,0x96,0x00,0x00,0x00, +0x06,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x21,0x00,0x00,0x00, +0x98,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0xc6,0x00,0x00,0x00,0x05,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0xda,0x00,0x00,0x00, +0x03,0x00,0x00,0x00,0x1d,0x00,0x03,0x00,0x05,0x01,0x00,0x00, +0x41,0x00,0x00,0x00,0x1e,0x00,0x03,0x00,0x06,0x01,0x00,0x00, +0x05,0x01,0x00,0x00,0x20,0x00,0x04,0x00,0x07,0x01,0x00,0x00, +0x0c,0x00,0x00,0x00,0x06,0x01,0x00,0x00,0x3b,0x00,0x04,0x00, +0x07,0x01,0x00,0x00,0x08,0x01,0x00,0x00,0x0c,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x21,0x00,0x00,0x00,0x0e,0x01,0x00,0x00, +0x02,0x00,0x00,0x00,0x2c,0x00,0x06,0x00,0x14,0x00,0x00,0x00, +0x33,0x01,0x00,0x00,0x64,0x00,0x00,0x00,0x54,0x00,0x00,0x00, +0x54,0x00,0x00,0x00,0x2a,0x00,0x03,0x00,0x11,0x00,0x00,0x00, +0x36,0x01,0x00,0x00,0x29,0x00,0x03,0x00,0x11,0x00,0x00,0x00, +0x39,0x01,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0xa7,0x01,0x00,0x00,0x21,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0xaa,0x01,0x00,0x00,0x22,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0xad,0x01,0x00,0x00, +0x23,0x00,0x00,0x00,0x36,0x00,0x05,0x00,0x02,0x00,0x00,0x00, +0x04,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x03,0x00,0x00,0x00, +0xf8,0x00,0x02,0x00,0x05,0x00,0x00,0x00,0xf7,0x00,0x03,0x00, +0x34,0x01,0x00,0x00,0x00,0x00,0x00,0x00,0xfb,0x00,0x03,0x00, +0x09,0x00,0x00,0x00,0x35,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, +0x35,0x01,0x00,0x00,0xf9,0x00,0x02,0x00,0x0a,0x00,0x00,0x00, +0xf8,0x00,0x02,0x00,0x0a,0x00,0x00,0x00,0xf5,0x00,0x07,0x00, +0x06,0x00,0x00,0x00,0x3c,0x01,0x00,0x00,0x09,0x00,0x00,0x00, +0x35,0x01,0x00,0x00,0x32,0x01,0x00,0x00,0x0d,0x00,0x00,0x00, +0xb0,0x00,0x05,0x00,0x11,0x00,0x00,0x00,0x12,0x00,0x00,0x00, +0x3c,0x01,0x00,0x00,0x10,0x00,0x00,0x00,0xf6,0x00,0x04,0x00, +0x0c,0x00,0x00,0x00,0x0d,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0xfa,0x00,0x04,0x00,0x12,0x00,0x00,0x00,0x0b,0x00,0x00,0x00, +0x0c,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0x0b,0x00,0x00,0x00, +0x41,0x00,0x05,0x00,0x17,0x00,0x00,0x00,0x18,0x00,0x00,0x00, +0x16,0x00,0x00,0x00,0x09,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x19,0x00,0x00,0x00,0x18,0x00,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x1a,0x00,0x00,0x00, +0x19,0x00,0x00,0x00,0x10,0x00,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x1c,0x00,0x00,0x00,0x1a,0x00,0x00,0x00, +0x3c,0x01,0x00,0x00,0x41,0x00,0x05,0x00,0x23,0x00,0x00,0x00, +0x24,0x00,0x00,0x00,0x20,0x00,0x00,0x00,0x22,0x00,0x00,0x00, +0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x25,0x00,0x00,0x00, +0x24,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x23,0x00,0x00,0x00, +0x27,0x00,0x00,0x00,0x20,0x00,0x00,0x00,0x26,0x00,0x00,0x00, +0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x28,0x00,0x00,0x00, +0x27,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x29,0x00,0x00,0x00,0x25,0x00,0x00,0x00,0x28,0x00,0x00,0x00, +0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x2a,0x00,0x00,0x00, +0x29,0x00,0x00,0x00,0x10,0x00,0x00,0x00,0xae,0x00,0x05,0x00, +0x11,0x00,0x00,0x00,0x2b,0x00,0x00,0x00,0x1c,0x00,0x00,0x00, +0x2a,0x00,0x00,0x00,0xf7,0x00,0x03,0x00,0x2d,0x00,0x00,0x00, +0x00,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0x2b,0x00,0x00,0x00, +0x2c,0x00,0x00,0x00,0x2d,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, +0x2c,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0x0c,0x00,0x00,0x00, +0xf8,0x00,0x02,0x00,0x2d,0x00,0x00,0x00,0x41,0x00,0x05,0x00, +0x17,0x00,0x00,0x00,0x31,0x00,0x00,0x00,0x30,0x00,0x00,0x00, +0x09,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x32,0x00,0x00,0x00,0x31,0x00,0x00,0x00,0x86,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x36,0x00,0x00,0x00,0x32,0x00,0x00,0x00, +0x35,0x00,0x00,0x00,0x89,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x39,0x00,0x00,0x00,0x32,0x00,0x00,0x00,0x35,0x00,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x3d,0x00,0x00,0x00, +0x3b,0x00,0x00,0x00,0x36,0x00,0x00,0x00,0x41,0x00,0x08,0x00, +0x4e,0x00,0x00,0x00,0x4f,0x00,0x00,0x00,0x4c,0x00,0x00,0x00, +0x22,0x00,0x00,0x00,0x1c,0x00,0x00,0x00,0x22,0x00,0x00,0x00, +0x09,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x41,0x00,0x00,0x00, +0x50,0x00,0x00,0x00,0x4f,0x00,0x00,0x00,0x73,0x00,0x04,0x00, +0x3e,0x00,0x00,0x00,0x51,0x00,0x00,0x00,0x50,0x00,0x00,0x00, +0x41,0x00,0x08,0x00,0x4e,0x00,0x00,0x00,0x55,0x00,0x00,0x00, +0x4c,0x00,0x00,0x00,0x22,0x00,0x00,0x00,0x1c,0x00,0x00,0x00, +0x22,0x00,0x00,0x00,0x54,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0x41,0x00,0x00,0x00,0x56,0x00,0x00,0x00,0x55,0x00,0x00,0x00, +0x73,0x00,0x04,0x00,0x3e,0x00,0x00,0x00,0x57,0x00,0x00,0x00, +0x56,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x5a,0x00,0x00,0x00,0x1c,0x00,0x00,0x00,0x10,0x00,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x5d,0x00,0x00,0x00, +0x5b,0x00,0x00,0x00,0x36,0x00,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x5e,0x00,0x00,0x00,0x5a,0x00,0x00,0x00, +0x5d,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x61,0x00,0x00,0x00,0x5f,0x00,0x00,0x00,0x39,0x00,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x62,0x00,0x00,0x00, +0x5e,0x00,0x00,0x00,0x61,0x00,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x66,0x00,0x00,0x00,0x64,0x00,0x00,0x00, +0x36,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x69,0x00,0x00,0x00,0x66,0x00,0x00,0x00,0x61,0x00,0x00,0x00, +0xb0,0x00,0x05,0x00,0x11,0x00,0x00,0x00,0x6b,0x00,0x00,0x00, +0x3d,0x00,0x00,0x00,0x5f,0x00,0x00,0x00,0xf7,0x00,0x03,0x00, +0x6d,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, +0x6b,0x00,0x00,0x00,0x6c,0x00,0x00,0x00,0x87,0x00,0x00,0x00, +0xf8,0x00,0x02,0x00,0x6c,0x00,0x00,0x00,0x41,0x00,0x08,0x00, +0x72,0x00,0x00,0x00,0x73,0x00,0x00,0x00,0x4c,0x00,0x00,0x00, +0x22,0x00,0x00,0x00,0x1c,0x00,0x00,0x00,0x26,0x00,0x00,0x00, +0x3d,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x43,0x00,0x00,0x00, +0x74,0x00,0x00,0x00,0x73,0x00,0x00,0x00,0x71,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x75,0x00,0x00,0x00,0x74,0x00,0x00,0x00, +0x7c,0x00,0x04,0x00,0x21,0x00,0x00,0x00,0x76,0x00,0x00,0x00, +0x75,0x00,0x00,0x00,0xc7,0x00,0x05,0x00,0x21,0x00,0x00,0x00, +0x78,0x00,0x00,0x00,0x76,0x00,0x00,0x00,0x77,0x00,0x00,0x00, +0x72,0x00,0x04,0x00,0x79,0x00,0x00,0x00,0x7a,0x00,0x00,0x00, +0x78,0x00,0x00,0x00,0x7c,0x00,0x04,0x00,0x43,0x00,0x00,0x00, +0x7b,0x00,0x00,0x00,0x7a,0x00,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x7f,0x00,0x00,0x00,0x3d,0x00,0x00,0x00, +0x5f,0x00,0x00,0x00,0x41,0x00,0x08,0x00,0x72,0x00,0x00,0x00, +0x80,0x00,0x00,0x00,0x4c,0x00,0x00,0x00,0x22,0x00,0x00,0x00, +0x1c,0x00,0x00,0x00,0x26,0x00,0x00,0x00,0x7f,0x00,0x00,0x00, +0x3d,0x00,0x04,0x00,0x43,0x00,0x00,0x00,0x81,0x00,0x00,0x00, +0x80,0x00,0x00,0x00,0x71,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x82,0x00,0x00,0x00,0x81,0x00,0x00,0x00,0x7c,0x00,0x04,0x00, +0x21,0x00,0x00,0x00,0x83,0x00,0x00,0x00,0x82,0x00,0x00,0x00, +0xc7,0x00,0x05,0x00,0x21,0x00,0x00,0x00,0x84,0x00,0x00,0x00, +0x83,0x00,0x00,0x00,0x77,0x00,0x00,0x00,0x72,0x00,0x04,0x00, +0x79,0x00,0x00,0x00,0x85,0x00,0x00,0x00,0x84,0x00,0x00,0x00, +0x7c,0x00,0x04,0x00,0x43,0x00,0x00,0x00,0x86,0x00,0x00,0x00, +0x85,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0x6d,0x00,0x00,0x00, +0xf8,0x00,0x02,0x00,0x87,0x00,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x8a,0x00,0x00,0x00,0x3d,0x00,0x00,0x00, +0x5f,0x00,0x00,0x00,0x41,0x00,0x08,0x00,0x72,0x00,0x00,0x00, +0x8b,0x00,0x00,0x00,0x4c,0x00,0x00,0x00,0x22,0x00,0x00,0x00, +0x1c,0x00,0x00,0x00,0x26,0x00,0x00,0x00,0x8a,0x00,0x00,0x00, +0x3d,0x00,0x04,0x00,0x43,0x00,0x00,0x00,0x8c,0x00,0x00,0x00, +0x8b,0x00,0x00,0x00,0x71,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x8d,0x00,0x00,0x00,0x8c,0x00,0x00,0x00,0x7c,0x00,0x04,0x00, +0x21,0x00,0x00,0x00,0x8e,0x00,0x00,0x00,0x8d,0x00,0x00,0x00, +0xc7,0x00,0x05,0x00,0x21,0x00,0x00,0x00,0x90,0x00,0x00,0x00, +0x8e,0x00,0x00,0x00,0x8f,0x00,0x00,0x00,0x82,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x93,0x00,0x00,0x00,0x3d,0x00,0x00,0x00, +0x5f,0x00,0x00,0x00,0x41,0x00,0x08,0x00,0x72,0x00,0x00,0x00, +0x94,0x00,0x00,0x00,0x4c,0x00,0x00,0x00,0x22,0x00,0x00,0x00, +0x1c,0x00,0x00,0x00,0x26,0x00,0x00,0x00,0x93,0x00,0x00,0x00, +0x3d,0x00,0x04,0x00,0x43,0x00,0x00,0x00,0x95,0x00,0x00,0x00, +0x94,0x00,0x00,0x00,0xc2,0x00,0x05,0x00,0x43,0x00,0x00,0x00, +0x97,0x00,0x00,0x00,0x95,0x00,0x00,0x00,0x96,0x00,0x00,0x00, +0xc4,0x00,0x05,0x00,0x43,0x00,0x00,0x00,0x99,0x00,0x00,0x00, +0x97,0x00,0x00,0x00,0x98,0x00,0x00,0x00,0x71,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x9a,0x00,0x00,0x00,0x99,0x00,0x00,0x00, +0x7c,0x00,0x04,0x00,0x21,0x00,0x00,0x00,0x9b,0x00,0x00,0x00, +0x9a,0x00,0x00,0x00,0xc5,0x00,0x05,0x00,0x21,0x00,0x00,0x00, +0x9c,0x00,0x00,0x00,0x90,0x00,0x00,0x00,0x9b,0x00,0x00,0x00, +0x72,0x00,0x04,0x00,0x79,0x00,0x00,0x00,0x9d,0x00,0x00,0x00, +0x9c,0x00,0x00,0x00,0x7c,0x00,0x04,0x00,0x43,0x00,0x00,0x00, +0x9e,0x00,0x00,0x00,0x9d,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0x43,0x00,0x00,0x00,0xa3,0x00,0x00,0x00,0x8b,0x00,0x00,0x00, +0xc2,0x00,0x05,0x00,0x43,0x00,0x00,0x00,0xa4,0x00,0x00,0x00, +0xa3,0x00,0x00,0x00,0x98,0x00,0x00,0x00,0x41,0x00,0x08,0x00, +0x72,0x00,0x00,0x00,0xa7,0x00,0x00,0x00,0x4c,0x00,0x00,0x00, +0x22,0x00,0x00,0x00,0x1c,0x00,0x00,0x00,0x26,0x00,0x00,0x00, +0x3d,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x43,0x00,0x00,0x00, +0xa8,0x00,0x00,0x00,0xa7,0x00,0x00,0x00,0xc2,0x00,0x05,0x00, +0x43,0x00,0x00,0x00,0xa9,0x00,0x00,0x00,0xa8,0x00,0x00,0x00, +0x96,0x00,0x00,0x00,0xc4,0x00,0x05,0x00,0x43,0x00,0x00,0x00, +0xaa,0x00,0x00,0x00,0xa9,0x00,0x00,0x00,0x98,0x00,0x00,0x00, +0xc5,0x00,0x05,0x00,0x43,0x00,0x00,0x00,0xab,0x00,0x00,0x00, +0xa4,0x00,0x00,0x00,0xaa,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, +0x6d,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0x6d,0x00,0x00,0x00, +0xf5,0x00,0x07,0x00,0x43,0x00,0x00,0x00,0x3e,0x01,0x00,0x00, +0x86,0x00,0x00,0x00,0x6c,0x00,0x00,0x00,0xab,0x00,0x00,0x00, +0x87,0x00,0x00,0x00,0xf5,0x00,0x07,0x00,0x43,0x00,0x00,0x00, +0x3d,0x01,0x00,0x00,0x7b,0x00,0x00,0x00,0x6c,0x00,0x00,0x00, +0x9e,0x00,0x00,0x00,0x87,0x00,0x00,0x00,0x70,0x00,0x04,0x00, +0x3e,0x00,0x00,0x00,0xaf,0x00,0x00,0x00,0x3d,0x01,0x00,0x00, +0x85,0x00,0x05,0x00,0x3e,0x00,0x00,0x00,0xb0,0x00,0x00,0x00, +0x51,0x00,0x00,0x00,0xaf,0x00,0x00,0x00,0x70,0x00,0x04,0x00, +0x3e,0x00,0x00,0x00,0xb4,0x00,0x00,0x00,0x3e,0x01,0x00,0x00, +0x85,0x00,0x05,0x00,0x3e,0x00,0x00,0x00,0xb5,0x00,0x00,0x00, +0x57,0x00,0x00,0x00,0xb4,0x00,0x00,0x00,0xf7,0x00,0x03,0x00, +0xb9,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, +0x6b,0x00,0x00,0x00,0xb8,0x00,0x00,0x00,0xcf,0x00,0x00,0x00, +0xf8,0x00,0x02,0x00,0xb8,0x00,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xbc,0x00,0x00,0x00,0x3d,0x00,0x00,0x00, +0x54,0x00,0x00,0x00,0x41,0x00,0x08,0x00,0x72,0x00,0x00,0x00, +0xbd,0x00,0x00,0x00,0x4c,0x00,0x00,0x00,0x22,0x00,0x00,0x00, +0x1c,0x00,0x00,0x00,0x26,0x00,0x00,0x00,0xbc,0x00,0x00,0x00, +0x3d,0x00,0x04,0x00,0x43,0x00,0x00,0x00,0xbe,0x00,0x00,0x00, +0xbd,0x00,0x00,0x00,0x71,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0xbf,0x00,0x00,0x00,0xbe,0x00,0x00,0x00,0x7c,0x00,0x04,0x00, +0x21,0x00,0x00,0x00,0xc0,0x00,0x00,0x00,0xbf,0x00,0x00,0x00, +0xc7,0x00,0x05,0x00,0x21,0x00,0x00,0x00,0xc1,0x00,0x00,0x00, +0xc0,0x00,0x00,0x00,0x77,0x00,0x00,0x00,0x72,0x00,0x04,0x00, +0x79,0x00,0x00,0x00,0xc2,0x00,0x00,0x00,0xc1,0x00,0x00,0x00, +0x7c,0x00,0x04,0x00,0x43,0x00,0x00,0x00,0xc3,0x00,0x00,0x00, +0xc2,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xc7,0x00,0x00,0x00,0x3d,0x00,0x00,0x00,0xc6,0x00,0x00,0x00, +0x41,0x00,0x08,0x00,0x72,0x00,0x00,0x00,0xc8,0x00,0x00,0x00, +0x4c,0x00,0x00,0x00,0x22,0x00,0x00,0x00,0x1c,0x00,0x00,0x00, +0x26,0x00,0x00,0x00,0xc7,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0x43,0x00,0x00,0x00,0xc9,0x00,0x00,0x00,0xc8,0x00,0x00,0x00, +0x71,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0xca,0x00,0x00,0x00, +0xc9,0x00,0x00,0x00,0x7c,0x00,0x04,0x00,0x21,0x00,0x00,0x00, +0xcb,0x00,0x00,0x00,0xca,0x00,0x00,0x00,0xc7,0x00,0x05,0x00, +0x21,0x00,0x00,0x00,0xcc,0x00,0x00,0x00,0xcb,0x00,0x00,0x00, +0x77,0x00,0x00,0x00,0x72,0x00,0x04,0x00,0x79,0x00,0x00,0x00, +0xcd,0x00,0x00,0x00,0xcc,0x00,0x00,0x00,0x7c,0x00,0x04,0x00, +0x43,0x00,0x00,0x00,0xce,0x00,0x00,0x00,0xcd,0x00,0x00,0x00, +0xf9,0x00,0x02,0x00,0xb9,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, +0xcf,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xd2,0x00,0x00,0x00,0x3d,0x00,0x00,0x00,0xc6,0x00,0x00,0x00, +0x41,0x00,0x08,0x00,0x72,0x00,0x00,0x00,0xd3,0x00,0x00,0x00, +0x4c,0x00,0x00,0x00,0x22,0x00,0x00,0x00,0x1c,0x00,0x00,0x00, +0x26,0x00,0x00,0x00,0xd2,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0x43,0x00,0x00,0x00,0xd4,0x00,0x00,0x00,0xd3,0x00,0x00,0x00, +0x71,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0xd5,0x00,0x00,0x00, +0xd4,0x00,0x00,0x00,0x7c,0x00,0x04,0x00,0x21,0x00,0x00,0x00, +0xd6,0x00,0x00,0x00,0xd5,0x00,0x00,0x00,0xc7,0x00,0x05,0x00, +0x21,0x00,0x00,0x00,0xd7,0x00,0x00,0x00,0xd6,0x00,0x00,0x00, +0x8f,0x00,0x00,0x00,0x82,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xdb,0x00,0x00,0x00,0x3d,0x00,0x00,0x00,0xda,0x00,0x00,0x00, +0x41,0x00,0x08,0x00,0x72,0x00,0x00,0x00,0xdc,0x00,0x00,0x00, +0x4c,0x00,0x00,0x00,0x22,0x00,0x00,0x00,0x1c,0x00,0x00,0x00, +0x26,0x00,0x00,0x00,0xdb,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0x43,0x00,0x00,0x00,0xdd,0x00,0x00,0x00,0xdc,0x00,0x00,0x00, +0xc2,0x00,0x05,0x00,0x43,0x00,0x00,0x00,0xde,0x00,0x00,0x00, +0xdd,0x00,0x00,0x00,0x96,0x00,0x00,0x00,0xc4,0x00,0x05,0x00, +0x43,0x00,0x00,0x00,0xdf,0x00,0x00,0x00,0xde,0x00,0x00,0x00, +0x98,0x00,0x00,0x00,0x71,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0xe0,0x00,0x00,0x00,0xdf,0x00,0x00,0x00,0x7c,0x00,0x04,0x00, +0x21,0x00,0x00,0x00,0xe1,0x00,0x00,0x00,0xe0,0x00,0x00,0x00, +0xc5,0x00,0x05,0x00,0x21,0x00,0x00,0x00,0xe2,0x00,0x00,0x00, +0xd7,0x00,0x00,0x00,0xe1,0x00,0x00,0x00,0x72,0x00,0x04,0x00, +0x79,0x00,0x00,0x00,0xe3,0x00,0x00,0x00,0xe2,0x00,0x00,0x00, +0x7c,0x00,0x04,0x00,0x43,0x00,0x00,0x00,0xe4,0x00,0x00,0x00, +0xe3,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x43,0x00,0x00,0x00, +0xe9,0x00,0x00,0x00,0xd3,0x00,0x00,0x00,0xc2,0x00,0x05,0x00, +0x43,0x00,0x00,0x00,0xea,0x00,0x00,0x00,0xe9,0x00,0x00,0x00, +0x98,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xed,0x00,0x00,0x00,0x3d,0x00,0x00,0x00,0x54,0x00,0x00,0x00, +0x41,0x00,0x08,0x00,0x72,0x00,0x00,0x00,0xee,0x00,0x00,0x00, +0x4c,0x00,0x00,0x00,0x22,0x00,0x00,0x00,0x1c,0x00,0x00,0x00, +0x26,0x00,0x00,0x00,0xed,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0x43,0x00,0x00,0x00,0xef,0x00,0x00,0x00,0xee,0x00,0x00,0x00, +0xc2,0x00,0x05,0x00,0x43,0x00,0x00,0x00,0xf0,0x00,0x00,0x00, +0xef,0x00,0x00,0x00,0x96,0x00,0x00,0x00,0xc4,0x00,0x05,0x00, +0x43,0x00,0x00,0x00,0xf1,0x00,0x00,0x00,0xf0,0x00,0x00,0x00, +0x98,0x00,0x00,0x00,0xc5,0x00,0x05,0x00,0x43,0x00,0x00,0x00, +0xf2,0x00,0x00,0x00,0xea,0x00,0x00,0x00,0xf1,0x00,0x00,0x00, +0xf9,0x00,0x02,0x00,0xb9,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, +0xb9,0x00,0x00,0x00,0xf5,0x00,0x07,0x00,0x43,0x00,0x00,0x00, +0x40,0x01,0x00,0x00,0xce,0x00,0x00,0x00,0xb8,0x00,0x00,0x00, +0xf2,0x00,0x00,0x00,0xcf,0x00,0x00,0x00,0xf5,0x00,0x07,0x00, +0x43,0x00,0x00,0x00,0x3f,0x01,0x00,0x00,0xc3,0x00,0x00,0x00, +0xb8,0x00,0x00,0x00,0xe4,0x00,0x00,0x00,0xcf,0x00,0x00,0x00, +0x70,0x00,0x04,0x00,0x3e,0x00,0x00,0x00,0xf6,0x00,0x00,0x00, +0x3f,0x01,0x00,0x00,0x85,0x00,0x05,0x00,0x3e,0x00,0x00,0x00, +0xf7,0x00,0x00,0x00,0x51,0x00,0x00,0x00,0xf6,0x00,0x00,0x00, +0x70,0x00,0x04,0x00,0x3e,0x00,0x00,0x00,0xfb,0x00,0x00,0x00, +0x40,0x01,0x00,0x00,0x85,0x00,0x05,0x00,0x3e,0x00,0x00,0x00, +0xfc,0x00,0x00,0x00,0x57,0x00,0x00,0x00,0xfb,0x00,0x00,0x00, +0x41,0x00,0x08,0x00,0x72,0x00,0x00,0x00,0x12,0x01,0x00,0x00, +0x4c,0x00,0x00,0x00,0x22,0x00,0x00,0x00,0x1c,0x00,0x00,0x00, +0x0e,0x01,0x00,0x00,0x69,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0x43,0x00,0x00,0x00,0x13,0x01,0x00,0x00,0x12,0x01,0x00,0x00, +0x71,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x14,0x01,0x00,0x00, +0x13,0x01,0x00,0x00,0x7c,0x00,0x04,0x00,0x21,0x00,0x00,0x00, +0x15,0x01,0x00,0x00,0x14,0x01,0x00,0x00,0xc7,0x00,0x05,0x00, +0x21,0x00,0x00,0x00,0x16,0x01,0x00,0x00,0x15,0x01,0x00,0x00, +0x8f,0x00,0x00,0x00,0x6f,0x00,0x04,0x00,0x3e,0x00,0x00,0x00, +0x17,0x01,0x00,0x00,0x16,0x01,0x00,0x00,0x7f,0x00,0x04,0x00, +0x3e,0x00,0x00,0x00,0xa4,0x01,0x00,0x00,0xb5,0x00,0x00,0x00, +0x0c,0x00,0x08,0x00,0x3e,0x00,0x00,0x00,0x1a,0x01,0x00,0x00, +0x01,0x00,0x00,0x00,0x32,0x00,0x00,0x00,0xb0,0x00,0x00,0x00, +0x17,0x01,0x00,0x00,0xa4,0x01,0x00,0x00,0x73,0x00,0x04,0x00, +0x41,0x00,0x00,0x00,0x1b,0x01,0x00,0x00,0x1a,0x01,0x00,0x00, +0x41,0x00,0x06,0x00,0x4e,0x00,0x00,0x00,0x1c,0x01,0x00,0x00, +0x08,0x01,0x00,0x00,0x22,0x00,0x00,0x00,0x62,0x00,0x00,0x00, +0x3e,0x00,0x03,0x00,0x1c,0x01,0x00,0x00,0x1b,0x01,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x20,0x01,0x00,0x00, +0x62,0x00,0x00,0x00,0x64,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0x43,0x00,0x00,0x00,0x27,0x01,0x00,0x00,0x12,0x01,0x00,0x00, +0xc2,0x00,0x05,0x00,0x43,0x00,0x00,0x00,0x28,0x01,0x00,0x00, +0x27,0x01,0x00,0x00,0x98,0x00,0x00,0x00,0x70,0x00,0x04,0x00, +0x3e,0x00,0x00,0x00,0x29,0x01,0x00,0x00,0x28,0x01,0x00,0x00, +0x7f,0x00,0x04,0x00,0x3e,0x00,0x00,0x00,0xa5,0x01,0x00,0x00, +0xfc,0x00,0x00,0x00,0x0c,0x00,0x08,0x00,0x3e,0x00,0x00,0x00, +0x2c,0x01,0x00,0x00,0x01,0x00,0x00,0x00,0x32,0x00,0x00,0x00, +0xf7,0x00,0x00,0x00,0x29,0x01,0x00,0x00,0xa5,0x01,0x00,0x00, +0x73,0x00,0x04,0x00,0x41,0x00,0x00,0x00,0x2d,0x01,0x00,0x00, +0x2c,0x01,0x00,0x00,0x41,0x00,0x06,0x00,0x4e,0x00,0x00,0x00, +0x2e,0x01,0x00,0x00,0x08,0x01,0x00,0x00,0x22,0x00,0x00,0x00, +0x20,0x01,0x00,0x00,0x3e,0x00,0x03,0x00,0x2e,0x01,0x00,0x00, +0x2d,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x4f,0x01,0x00,0x00,0x62,0x00,0x00,0x00,0x54,0x00,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x50,0x01,0x00,0x00, +0x69,0x00,0x00,0x00,0x54,0x00,0x00,0x00,0x41,0x00,0x08,0x00, +0x72,0x00,0x00,0x00,0x51,0x01,0x00,0x00,0x4c,0x00,0x00,0x00, +0x22,0x00,0x00,0x00,0x1c,0x00,0x00,0x00,0x0e,0x01,0x00,0x00, +0x50,0x01,0x00,0x00,0x3d,0x00,0x04,0x00,0x43,0x00,0x00,0x00, +0x52,0x01,0x00,0x00,0x51,0x01,0x00,0x00,0x71,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x53,0x01,0x00,0x00,0x52,0x01,0x00,0x00, +0x7c,0x00,0x04,0x00,0x21,0x00,0x00,0x00,0x54,0x01,0x00,0x00, +0x53,0x01,0x00,0x00,0xc7,0x00,0x05,0x00,0x21,0x00,0x00,0x00, +0x55,0x01,0x00,0x00,0x54,0x01,0x00,0x00,0x8f,0x00,0x00,0x00, +0x6f,0x00,0x04,0x00,0x3e,0x00,0x00,0x00,0x56,0x01,0x00,0x00, +0x55,0x01,0x00,0x00,0x0c,0x00,0x08,0x00,0x3e,0x00,0x00,0x00, +0x58,0x01,0x00,0x00,0x01,0x00,0x00,0x00,0x32,0x00,0x00,0x00, +0xb0,0x00,0x00,0x00,0x56,0x01,0x00,0x00,0xa4,0x01,0x00,0x00, +0x73,0x00,0x04,0x00,0x41,0x00,0x00,0x00,0x59,0x01,0x00,0x00, +0x58,0x01,0x00,0x00,0x41,0x00,0x06,0x00,0x4e,0x00,0x00,0x00, +0x5a,0x01,0x00,0x00,0x08,0x01,0x00,0x00,0x22,0x00,0x00,0x00, +0x4f,0x01,0x00,0x00,0x3e,0x00,0x03,0x00,0x5a,0x01,0x00,0x00, +0x59,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x5c,0x01,0x00,0x00,0x62,0x00,0x00,0x00,0xa7,0x01,0x00,0x00, +0x3d,0x00,0x04,0x00,0x43,0x00,0x00,0x00,0x5f,0x01,0x00,0x00, +0x51,0x01,0x00,0x00,0xc2,0x00,0x05,0x00,0x43,0x00,0x00,0x00, +0x60,0x01,0x00,0x00,0x5f,0x01,0x00,0x00,0x98,0x00,0x00,0x00, +0x70,0x00,0x04,0x00,0x3e,0x00,0x00,0x00,0x61,0x01,0x00,0x00, +0x60,0x01,0x00,0x00,0x0c,0x00,0x08,0x00,0x3e,0x00,0x00,0x00, +0x63,0x01,0x00,0x00,0x01,0x00,0x00,0x00,0x32,0x00,0x00,0x00, +0xf7,0x00,0x00,0x00,0x61,0x01,0x00,0x00,0xa5,0x01,0x00,0x00, +0x73,0x00,0x04,0x00,0x41,0x00,0x00,0x00,0x64,0x01,0x00,0x00, +0x63,0x01,0x00,0x00,0x41,0x00,0x06,0x00,0x4e,0x00,0x00,0x00, +0x65,0x01,0x00,0x00,0x08,0x01,0x00,0x00,0x22,0x00,0x00,0x00, +0x5c,0x01,0x00,0x00,0x3e,0x00,0x03,0x00,0x65,0x01,0x00,0x00, +0x64,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x6d,0x01,0x00,0x00,0x62,0x00,0x00,0x00,0x3b,0x00,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x6e,0x01,0x00,0x00, +0x69,0x00,0x00,0x00,0x3b,0x00,0x00,0x00,0x41,0x00,0x08,0x00, +0x72,0x00,0x00,0x00,0x6f,0x01,0x00,0x00,0x4c,0x00,0x00,0x00, +0x22,0x00,0x00,0x00,0x1c,0x00,0x00,0x00,0x0e,0x01,0x00,0x00, +0x6e,0x01,0x00,0x00,0x3d,0x00,0x04,0x00,0x43,0x00,0x00,0x00, +0x70,0x01,0x00,0x00,0x6f,0x01,0x00,0x00,0x71,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x71,0x01,0x00,0x00,0x70,0x01,0x00,0x00, +0x7c,0x00,0x04,0x00,0x21,0x00,0x00,0x00,0x72,0x01,0x00,0x00, +0x71,0x01,0x00,0x00,0xc7,0x00,0x05,0x00,0x21,0x00,0x00,0x00, +0x73,0x01,0x00,0x00,0x72,0x01,0x00,0x00,0x8f,0x00,0x00,0x00, +0x6f,0x00,0x04,0x00,0x3e,0x00,0x00,0x00,0x74,0x01,0x00,0x00, +0x73,0x01,0x00,0x00,0x0c,0x00,0x08,0x00,0x3e,0x00,0x00,0x00, +0x76,0x01,0x00,0x00,0x01,0x00,0x00,0x00,0x32,0x00,0x00,0x00, +0xb0,0x00,0x00,0x00,0x74,0x01,0x00,0x00,0xa4,0x01,0x00,0x00, +0x73,0x00,0x04,0x00,0x41,0x00,0x00,0x00,0x77,0x01,0x00,0x00, +0x76,0x01,0x00,0x00,0x41,0x00,0x06,0x00,0x4e,0x00,0x00,0x00, +0x78,0x01,0x00,0x00,0x08,0x01,0x00,0x00,0x22,0x00,0x00,0x00, +0x6d,0x01,0x00,0x00,0x3e,0x00,0x03,0x00,0x78,0x01,0x00,0x00, +0x77,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x7a,0x01,0x00,0x00,0x62,0x00,0x00,0x00,0xaa,0x01,0x00,0x00, +0x3d,0x00,0x04,0x00,0x43,0x00,0x00,0x00,0x7d,0x01,0x00,0x00, +0x6f,0x01,0x00,0x00,0xc2,0x00,0x05,0x00,0x43,0x00,0x00,0x00, +0x7e,0x01,0x00,0x00,0x7d,0x01,0x00,0x00,0x98,0x00,0x00,0x00, +0x70,0x00,0x04,0x00,0x3e,0x00,0x00,0x00,0x7f,0x01,0x00,0x00, +0x7e,0x01,0x00,0x00,0x0c,0x00,0x08,0x00,0x3e,0x00,0x00,0x00, +0x81,0x01,0x00,0x00,0x01,0x00,0x00,0x00,0x32,0x00,0x00,0x00, +0xf7,0x00,0x00,0x00,0x7f,0x01,0x00,0x00,0xa5,0x01,0x00,0x00, +0x73,0x00,0x04,0x00,0x41,0x00,0x00,0x00,0x82,0x01,0x00,0x00, +0x81,0x01,0x00,0x00,0x41,0x00,0x06,0x00,0x4e,0x00,0x00,0x00, +0x83,0x01,0x00,0x00,0x08,0x01,0x00,0x00,0x22,0x00,0x00,0x00, +0x7a,0x01,0x00,0x00,0x3e,0x00,0x03,0x00,0x83,0x01,0x00,0x00, +0x82,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x8b,0x01,0x00,0x00,0x62,0x00,0x00,0x00,0xda,0x00,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x8c,0x01,0x00,0x00, +0x69,0x00,0x00,0x00,0xda,0x00,0x00,0x00,0x41,0x00,0x08,0x00, +0x72,0x00,0x00,0x00,0x8d,0x01,0x00,0x00,0x4c,0x00,0x00,0x00, +0x22,0x00,0x00,0x00,0x1c,0x00,0x00,0x00,0x0e,0x01,0x00,0x00, +0x8c,0x01,0x00,0x00,0x3d,0x00,0x04,0x00,0x43,0x00,0x00,0x00, +0x8e,0x01,0x00,0x00,0x8d,0x01,0x00,0x00,0x71,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x8f,0x01,0x00,0x00,0x8e,0x01,0x00,0x00, +0x7c,0x00,0x04,0x00,0x21,0x00,0x00,0x00,0x90,0x01,0x00,0x00, +0x8f,0x01,0x00,0x00,0xc7,0x00,0x05,0x00,0x21,0x00,0x00,0x00, +0x91,0x01,0x00,0x00,0x90,0x01,0x00,0x00,0x8f,0x00,0x00,0x00, +0x6f,0x00,0x04,0x00,0x3e,0x00,0x00,0x00,0x92,0x01,0x00,0x00, +0x91,0x01,0x00,0x00,0x0c,0x00,0x08,0x00,0x3e,0x00,0x00,0x00, +0x94,0x01,0x00,0x00,0x01,0x00,0x00,0x00,0x32,0x00,0x00,0x00, +0xb0,0x00,0x00,0x00,0x92,0x01,0x00,0x00,0xa4,0x01,0x00,0x00, +0x73,0x00,0x04,0x00,0x41,0x00,0x00,0x00,0x95,0x01,0x00,0x00, +0x94,0x01,0x00,0x00,0x41,0x00,0x06,0x00,0x4e,0x00,0x00,0x00, +0x96,0x01,0x00,0x00,0x08,0x01,0x00,0x00,0x22,0x00,0x00,0x00, +0x8b,0x01,0x00,0x00,0x3e,0x00,0x03,0x00,0x96,0x01,0x00,0x00, +0x95,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x98,0x01,0x00,0x00,0x62,0x00,0x00,0x00,0xad,0x01,0x00,0x00, +0x3d,0x00,0x04,0x00,0x43,0x00,0x00,0x00,0x9b,0x01,0x00,0x00, +0x8d,0x01,0x00,0x00,0xc2,0x00,0x05,0x00,0x43,0x00,0x00,0x00, +0x9c,0x01,0x00,0x00,0x9b,0x01,0x00,0x00,0x98,0x00,0x00,0x00, +0x70,0x00,0x04,0x00,0x3e,0x00,0x00,0x00,0x9d,0x01,0x00,0x00, +0x9c,0x01,0x00,0x00,0x0c,0x00,0x08,0x00,0x3e,0x00,0x00,0x00, +0x9f,0x01,0x00,0x00,0x01,0x00,0x00,0x00,0x32,0x00,0x00,0x00, +0xf7,0x00,0x00,0x00,0x9d,0x01,0x00,0x00,0xa5,0x01,0x00,0x00, +0x73,0x00,0x04,0x00,0x41,0x00,0x00,0x00,0xa0,0x01,0x00,0x00, +0x9f,0x01,0x00,0x00,0x41,0x00,0x06,0x00,0x4e,0x00,0x00,0x00, +0xa1,0x01,0x00,0x00,0x08,0x01,0x00,0x00,0x22,0x00,0x00,0x00, +0x98,0x01,0x00,0x00,0x3e,0x00,0x03,0x00,0xa1,0x01,0x00,0x00, +0xa0,0x01,0x00,0x00,0xf9,0x00,0x02,0x00,0x0d,0x00,0x00,0x00, +0xf8,0x00,0x02,0x00,0x0d,0x00,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x32,0x01,0x00,0x00,0x3c,0x01,0x00,0x00, +0x26,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0x0a,0x00,0x00,0x00, +0xf8,0x00,0x02,0x00,0x0c,0x00,0x00,0x00,0xf5,0x00,0x07,0x00, +0x11,0x00,0x00,0x00,0x45,0x01,0x00,0x00,0x36,0x01,0x00,0x00, +0x0a,0x00,0x00,0x00,0x39,0x01,0x00,0x00,0x2c,0x00,0x00,0x00, +0xf7,0x00,0x03,0x00,0x3a,0x01,0x00,0x00,0x00,0x00,0x00,0x00, +0xfa,0x00,0x04,0x00,0x45,0x01,0x00,0x00,0x34,0x01,0x00,0x00, +0x3a,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0x3a,0x01,0x00,0x00, +0xf9,0x00,0x02,0x00,0x34,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, +0x34,0x01,0x00,0x00,0xfd,0x00,0x01,0x00,0x38,0x00,0x01,0x00, + +}; +const uint64_t dequant_q4_K_len = 5916; + +unsigned char dequant_q5_0_data[] = { +0x03,0x02,0x23,0x07,0x00,0x05,0x01,0x00,0x0b,0x00,0x0d,0x00, +0xb7,0x01,0x00,0x00,0x00,0x00,0x00,0x00,0x11,0x00,0x02,0x00, +0x01,0x00,0x00,0x00,0x11,0x00,0x02,0x00,0x51,0x11,0x00,0x00, +0x11,0x00,0x02,0x00,0x60,0x11,0x00,0x00,0x0b,0x00,0x06,0x00, +0x01,0x00,0x00,0x00,0x47,0x4c,0x53,0x4c,0x2e,0x73,0x74,0x64, +0x2e,0x34,0x35,0x30,0x00,0x00,0x00,0x00,0x0e,0x00,0x03,0x00, +0x00,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x0f,0x00,0x0a,0x00, +0x05,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x6d,0x61,0x69,0x6e, +0x00,0x00,0x00,0x00,0x0b,0x00,0x00,0x00,0x12,0x00,0x00,0x00, +0x2b,0x00,0x00,0x00,0x50,0x00,0x00,0x00,0x7f,0x00,0x00,0x00, +0x10,0x00,0x06,0x00,0x04,0x00,0x00,0x00,0x11,0x00,0x00,0x00, +0x00,0x01,0x00,0x00,0x01,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0x47,0x00,0x04,0x00,0x0b,0x00,0x00,0x00,0x0b,0x00,0x00,0x00, +0x1a,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x12,0x00,0x00,0x00, +0x0b,0x00,0x00,0x00,0x1b,0x00,0x00,0x00,0x48,0x00,0x05,0x00, +0x29,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00, +0x00,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x29,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x04,0x00,0x00,0x00, +0x48,0x00,0x05,0x00,0x29,0x00,0x00,0x00,0x02,0x00,0x00,0x00, +0x23,0x00,0x00,0x00,0x08,0x00,0x00,0x00,0x48,0x00,0x05,0x00, +0x29,0x00,0x00,0x00,0x03,0x00,0x00,0x00,0x23,0x00,0x00,0x00, +0x0c,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x29,0x00,0x00,0x00, +0x04,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x10,0x00,0x00,0x00, +0x47,0x00,0x03,0x00,0x29,0x00,0x00,0x00,0x02,0x00,0x00,0x00, +0x47,0x00,0x04,0x00,0x48,0x00,0x00,0x00,0x06,0x00,0x00,0x00, +0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x4b,0x00,0x00,0x00, 0x06,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x48,0x00,0x05,0x00, 0x4c,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00, 0x00,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x4c,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x04,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x02,0x00,0x00,0x00, 0x48,0x00,0x05,0x00,0x4c,0x00,0x00,0x00,0x02,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x10,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x4d,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x90,0x00,0x00,0x00, +0x23,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x47,0x00,0x04,0x00, +0x4d,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x16,0x00,0x00,0x00, 0x48,0x00,0x04,0x00,0x4e,0x00,0x00,0x00,0x00,0x00,0x00,0x00, 0x18,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x4e,0x00,0x00,0x00, 0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00, @@ -3390,2770 +4238,1065 @@ unsigned char dequant_q4_K_data[] = { 0x47,0x00,0x04,0x00,0x50,0x00,0x00,0x00,0x22,0x00,0x00,0x00, 0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x50,0x00,0x00,0x00, 0x21,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x08,0x01,0x00,0x00,0x06,0x00,0x00,0x00,0x02,0x00,0x00,0x00, -0x48,0x00,0x04,0x00,0x09,0x01,0x00,0x00,0x00,0x00,0x00,0x00, -0x19,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x09,0x01,0x00,0x00, +0x7c,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x02,0x00,0x00,0x00, +0x48,0x00,0x04,0x00,0x7d,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0x19,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x7d,0x00,0x00,0x00, 0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x47,0x00,0x03,0x00,0x09,0x01,0x00,0x00,0x02,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x0b,0x01,0x00,0x00,0x22,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x0b,0x01,0x00,0x00, +0x47,0x00,0x03,0x00,0x7d,0x00,0x00,0x00,0x02,0x00,0x00,0x00, +0x47,0x00,0x04,0x00,0x7f,0x00,0x00,0x00,0x22,0x00,0x00,0x00, +0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x7f,0x00,0x00,0x00, 0x21,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x36,0x01,0x00,0x00,0x0b,0x00,0x00,0x00,0x19,0x00,0x00,0x00, +0xab,0x00,0x00,0x00,0x0b,0x00,0x00,0x00,0x19,0x00,0x00,0x00, 0x13,0x00,0x02,0x00,0x02,0x00,0x00,0x00,0x21,0x00,0x03,0x00, 0x03,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x15,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x20,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x09,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x10,0x00,0x00,0x00,0x00,0x01,0x00,0x00,0x14,0x00,0x02,0x00, -0x11,0x00,0x00,0x00,0x15,0x00,0x04,0x00,0x14,0x00,0x00,0x00, -0x20,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x17,0x00,0x04,0x00, -0x15,0x00,0x00,0x00,0x14,0x00,0x00,0x00,0x03,0x00,0x00,0x00, -0x20,0x00,0x04,0x00,0x16,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x15,0x00,0x00,0x00,0x3b,0x00,0x04,0x00,0x16,0x00,0x00,0x00, -0x17,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x14,0x00,0x00,0x00,0x18,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x20,0x00,0x04,0x00,0x19,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x14,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x14,0x00,0x00,0x00, -0x1c,0x00,0x00,0x00,0x00,0x01,0x00,0x00,0x1e,0x00,0x06,0x00, -0x23,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0x24,0x00,0x00,0x00,0x09,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x3b,0x00,0x04,0x00,0x24,0x00,0x00,0x00,0x25,0x00,0x00,0x00, -0x09,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x26,0x00,0x00,0x00, -0x09,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x29,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x3b,0x00,0x04,0x00,0x16,0x00,0x00,0x00,0x33,0x00,0x00,0x00, +0x06,0x00,0x00,0x00,0x20,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0x17,0x00,0x04,0x00,0x09,0x00,0x00,0x00,0x06,0x00,0x00,0x00, +0x03,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x0a,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0x09,0x00,0x00,0x00,0x3b,0x00,0x04,0x00, +0x0a,0x00,0x00,0x00,0x0b,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, +0x00,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x0d,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x10,0x00,0x00,0x00,0x04,0x00,0x00,0x00, +0x3b,0x00,0x04,0x00,0x0a,0x00,0x00,0x00,0x12,0x00,0x00,0x00, 0x01,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x39,0x00,0x00,0x00,0x08,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x3f,0x00,0x00,0x00,0x02,0x00,0x00,0x00, -0x16,0x00,0x03,0x00,0x42,0x00,0x00,0x00,0x20,0x00,0x00,0x00, -0x16,0x00,0x03,0x00,0x45,0x00,0x00,0x00,0x10,0x00,0x00,0x00, -0x17,0x00,0x04,0x00,0x46,0x00,0x00,0x00,0x45,0x00,0x00,0x00, -0x02,0x00,0x00,0x00,0x15,0x00,0x04,0x00,0x47,0x00,0x00,0x00, +0x15,0x00,0x00,0x00,0x40,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x1e,0x00,0x00,0x00,0x20,0x00,0x00,0x00, +0x1e,0x00,0x07,0x00,0x29,0x00,0x00,0x00,0x06,0x00,0x00,0x00, +0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00, +0x06,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x2a,0x00,0x00,0x00, +0x09,0x00,0x00,0x00,0x29,0x00,0x00,0x00,0x3b,0x00,0x04,0x00, +0x2a,0x00,0x00,0x00,0x2b,0x00,0x00,0x00,0x09,0x00,0x00,0x00, +0x15,0x00,0x04,0x00,0x2c,0x00,0x00,0x00,0x20,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x2c,0x00,0x00,0x00, +0x2d,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x20,0x00,0x04,0x00, +0x2e,0x00,0x00,0x00,0x09,0x00,0x00,0x00,0x06,0x00,0x00,0x00, +0x14,0x00,0x02,0x00,0x32,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x38,0x00,0x00,0x00,0x00,0x04,0x00,0x00, +0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x3e,0x00,0x00,0x00, +0x08,0x00,0x00,0x00,0x16,0x00,0x03,0x00,0x42,0x00,0x00,0x00, +0x20,0x00,0x00,0x00,0x16,0x00,0x03,0x00,0x45,0x00,0x00,0x00, +0x10,0x00,0x00,0x00,0x15,0x00,0x04,0x00,0x46,0x00,0x00,0x00, +0x10,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x47,0x00,0x00,0x00,0x02,0x00,0x00,0x00, +0x1c,0x00,0x04,0x00,0x48,0x00,0x00,0x00,0x46,0x00,0x00,0x00, +0x47,0x00,0x00,0x00,0x15,0x00,0x04,0x00,0x49,0x00,0x00,0x00, 0x08,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x14,0x00,0x00,0x00,0x48,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0x1c,0x00,0x04,0x00,0x49,0x00,0x00,0x00,0x47,0x00,0x00,0x00, -0x48,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x14,0x00,0x00,0x00, -0x4a,0x00,0x00,0x00,0x80,0x00,0x00,0x00,0x1c,0x00,0x04,0x00, -0x4b,0x00,0x00,0x00,0x47,0x00,0x00,0x00,0x4a,0x00,0x00,0x00, -0x1e,0x00,0x05,0x00,0x4c,0x00,0x00,0x00,0x46,0x00,0x00,0x00, -0x49,0x00,0x00,0x00,0x4b,0x00,0x00,0x00,0x1d,0x00,0x03,0x00, -0x4d,0x00,0x00,0x00,0x4c,0x00,0x00,0x00,0x1e,0x00,0x03,0x00, -0x4e,0x00,0x00,0x00,0x4d,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0x4f,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x4e,0x00,0x00,0x00, -0x3b,0x00,0x04,0x00,0x4f,0x00,0x00,0x00,0x50,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x52,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x45,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x14,0x00,0x00,0x00,0x58,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x5f,0x00,0x00,0x00, -0x40,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x63,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x68,0x00,0x00,0x00,0x20,0x00,0x00,0x00, -0x20,0x00,0x04,0x00,0x76,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0x47,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x7b,0x00,0x00,0x00,0x3f,0x00,0x00,0x00,0x15,0x00,0x04,0x00, -0x7d,0x00,0x00,0x00,0x08,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x93,0x00,0x00,0x00, -0x0f,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x9a,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0xc9,0x00,0x00,0x00,0x05,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0xdd,0x00,0x00,0x00, -0x03,0x00,0x00,0x00,0x1d,0x00,0x03,0x00,0x08,0x01,0x00,0x00, -0x45,0x00,0x00,0x00,0x1e,0x00,0x03,0x00,0x09,0x01,0x00,0x00, -0x08,0x01,0x00,0x00,0x20,0x00,0x04,0x00,0x0a,0x01,0x00,0x00, -0x0c,0x00,0x00,0x00,0x09,0x01,0x00,0x00,0x3b,0x00,0x04,0x00, -0x0a,0x01,0x00,0x00,0x0b,0x01,0x00,0x00,0x0c,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x14,0x00,0x00,0x00,0x35,0x01,0x00,0x00, -0x20,0x00,0x00,0x00,0x2c,0x00,0x06,0x00,0x15,0x00,0x00,0x00, -0x36,0x01,0x00,0x00,0x35,0x01,0x00,0x00,0x58,0x00,0x00,0x00, -0x58,0x00,0x00,0x00,0x2a,0x00,0x03,0x00,0x11,0x00,0x00,0x00, -0x39,0x01,0x00,0x00,0x29,0x00,0x03,0x00,0x11,0x00,0x00,0x00, -0x3c,0x01,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0xaa,0x01,0x00,0x00,0x21,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0xad,0x01,0x00,0x00,0x22,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0xb0,0x01,0x00,0x00, -0x23,0x00,0x00,0x00,0x36,0x00,0x05,0x00,0x02,0x00,0x00,0x00, -0x04,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x03,0x00,0x00,0x00, -0xf8,0x00,0x02,0x00,0x05,0x00,0x00,0x00,0xf7,0x00,0x03,0x00, -0x37,0x01,0x00,0x00,0x00,0x00,0x00,0x00,0xfb,0x00,0x03,0x00, -0x18,0x00,0x00,0x00,0x38,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, -0x38,0x01,0x00,0x00,0xf9,0x00,0x02,0x00,0x0a,0x00,0x00,0x00, -0xf8,0x00,0x02,0x00,0x0a,0x00,0x00,0x00,0xf5,0x00,0x07,0x00, -0x06,0x00,0x00,0x00,0x3f,0x01,0x00,0x00,0x09,0x00,0x00,0x00, -0x38,0x01,0x00,0x00,0x34,0x01,0x00,0x00,0x0d,0x00,0x00,0x00, -0xb1,0x00,0x05,0x00,0x11,0x00,0x00,0x00,0x12,0x00,0x00,0x00, -0x3f,0x01,0x00,0x00,0x10,0x00,0x00,0x00,0xf6,0x00,0x04,0x00, -0x0c,0x00,0x00,0x00,0x0d,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0xfa,0x00,0x04,0x00,0x12,0x00,0x00,0x00,0x0b,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0x0b,0x00,0x00,0x00, -0x41,0x00,0x05,0x00,0x19,0x00,0x00,0x00,0x1a,0x00,0x00,0x00, -0x17,0x00,0x00,0x00,0x18,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x14,0x00,0x00,0x00,0x1b,0x00,0x00,0x00,0x1a,0x00,0x00,0x00, -0x84,0x00,0x05,0x00,0x14,0x00,0x00,0x00,0x1d,0x00,0x00,0x00, -0x1b,0x00,0x00,0x00,0x1c,0x00,0x00,0x00,0x7c,0x00,0x04,0x00, -0x14,0x00,0x00,0x00,0x1f,0x00,0x00,0x00,0x3f,0x01,0x00,0x00, -0x80,0x00,0x05,0x00,0x14,0x00,0x00,0x00,0x20,0x00,0x00,0x00, -0x1d,0x00,0x00,0x00,0x1f,0x00,0x00,0x00,0x7c,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x21,0x00,0x00,0x00,0x20,0x00,0x00,0x00, -0x41,0x00,0x05,0x00,0x26,0x00,0x00,0x00,0x27,0x00,0x00,0x00, -0x25,0x00,0x00,0x00,0x09,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x28,0x00,0x00,0x00,0x27,0x00,0x00,0x00, -0x41,0x00,0x05,0x00,0x26,0x00,0x00,0x00,0x2a,0x00,0x00,0x00, -0x25,0x00,0x00,0x00,0x29,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x2b,0x00,0x00,0x00,0x2a,0x00,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x2c,0x00,0x00,0x00, -0x28,0x00,0x00,0x00,0x2b,0x00,0x00,0x00,0x87,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x2d,0x00,0x00,0x00,0x2c,0x00,0x00,0x00, -0x10,0x00,0x00,0x00,0xaf,0x00,0x05,0x00,0x11,0x00,0x00,0x00, -0x2e,0x00,0x00,0x00,0x21,0x00,0x00,0x00,0x2d,0x00,0x00,0x00, -0xf7,0x00,0x03,0x00,0x30,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0xfa,0x00,0x04,0x00,0x2e,0x00,0x00,0x00,0x2f,0x00,0x00,0x00, -0x30,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0x2f,0x00,0x00,0x00, -0xf9,0x00,0x02,0x00,0x0c,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, -0x30,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x19,0x00,0x00,0x00, -0x34,0x00,0x00,0x00,0x33,0x00,0x00,0x00,0x18,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x14,0x00,0x00,0x00,0x35,0x00,0x00,0x00, -0x34,0x00,0x00,0x00,0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x36,0x00,0x00,0x00,0x35,0x00,0x00,0x00,0x87,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x3a,0x00,0x00,0x00,0x36,0x00,0x00,0x00, -0x39,0x00,0x00,0x00,0x8b,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x3d,0x00,0x00,0x00,0x36,0x00,0x00,0x00,0x39,0x00,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x41,0x00,0x00,0x00, -0x3f,0x00,0x00,0x00,0x3a,0x00,0x00,0x00,0x41,0x00,0x08,0x00, -0x52,0x00,0x00,0x00,0x53,0x00,0x00,0x00,0x50,0x00,0x00,0x00, -0x09,0x00,0x00,0x00,0x21,0x00,0x00,0x00,0x09,0x00,0x00,0x00, -0x18,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x45,0x00,0x00,0x00, -0x54,0x00,0x00,0x00,0x53,0x00,0x00,0x00,0x73,0x00,0x04,0x00, -0x42,0x00,0x00,0x00,0x55,0x00,0x00,0x00,0x54,0x00,0x00,0x00, -0x41,0x00,0x08,0x00,0x52,0x00,0x00,0x00,0x59,0x00,0x00,0x00, -0x50,0x00,0x00,0x00,0x09,0x00,0x00,0x00,0x21,0x00,0x00,0x00, -0x09,0x00,0x00,0x00,0x58,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x45,0x00,0x00,0x00,0x5a,0x00,0x00,0x00,0x59,0x00,0x00,0x00, -0x73,0x00,0x04,0x00,0x42,0x00,0x00,0x00,0x5b,0x00,0x00,0x00, -0x5a,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x5e,0x00,0x00,0x00,0x21,0x00,0x00,0x00,0x10,0x00,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x61,0x00,0x00,0x00, -0x5f,0x00,0x00,0x00,0x3a,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x62,0x00,0x00,0x00,0x5e,0x00,0x00,0x00, -0x61,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x65,0x00,0x00,0x00,0x63,0x00,0x00,0x00,0x3d,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x66,0x00,0x00,0x00, -0x62,0x00,0x00,0x00,0x65,0x00,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x6a,0x00,0x00,0x00,0x68,0x00,0x00,0x00, -0x3a,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x6d,0x00,0x00,0x00,0x6a,0x00,0x00,0x00,0x65,0x00,0x00,0x00, -0xb1,0x00,0x05,0x00,0x11,0x00,0x00,0x00,0x6f,0x00,0x00,0x00, -0x41,0x00,0x00,0x00,0x63,0x00,0x00,0x00,0xf7,0x00,0x03,0x00, -0x71,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, -0x6f,0x00,0x00,0x00,0x70,0x00,0x00,0x00,0x8b,0x00,0x00,0x00, -0xf8,0x00,0x02,0x00,0x70,0x00,0x00,0x00,0x41,0x00,0x08,0x00, -0x76,0x00,0x00,0x00,0x77,0x00,0x00,0x00,0x50,0x00,0x00,0x00, -0x09,0x00,0x00,0x00,0x21,0x00,0x00,0x00,0x29,0x00,0x00,0x00, -0x41,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x47,0x00,0x00,0x00, -0x78,0x00,0x00,0x00,0x77,0x00,0x00,0x00,0x71,0x00,0x04,0x00, -0x14,0x00,0x00,0x00,0x79,0x00,0x00,0x00,0x78,0x00,0x00,0x00, -0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x7a,0x00,0x00,0x00, -0x79,0x00,0x00,0x00,0xc7,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x7c,0x00,0x00,0x00,0x7a,0x00,0x00,0x00,0x7b,0x00,0x00,0x00, -0x72,0x00,0x04,0x00,0x7d,0x00,0x00,0x00,0x7e,0x00,0x00,0x00, -0x7c,0x00,0x00,0x00,0x7c,0x00,0x04,0x00,0x47,0x00,0x00,0x00, -0x7f,0x00,0x00,0x00,0x7e,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x83,0x00,0x00,0x00,0x41,0x00,0x00,0x00, -0x63,0x00,0x00,0x00,0x41,0x00,0x08,0x00,0x76,0x00,0x00,0x00, -0x84,0x00,0x00,0x00,0x50,0x00,0x00,0x00,0x09,0x00,0x00,0x00, -0x21,0x00,0x00,0x00,0x29,0x00,0x00,0x00,0x83,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x47,0x00,0x00,0x00,0x85,0x00,0x00,0x00, -0x84,0x00,0x00,0x00,0x71,0x00,0x04,0x00,0x14,0x00,0x00,0x00, -0x86,0x00,0x00,0x00,0x85,0x00,0x00,0x00,0x7c,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x87,0x00,0x00,0x00,0x86,0x00,0x00,0x00, -0xc7,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x88,0x00,0x00,0x00, -0x87,0x00,0x00,0x00,0x7b,0x00,0x00,0x00,0x72,0x00,0x04,0x00, -0x7d,0x00,0x00,0x00,0x89,0x00,0x00,0x00,0x88,0x00,0x00,0x00, -0x7c,0x00,0x04,0x00,0x47,0x00,0x00,0x00,0x8a,0x00,0x00,0x00, -0x89,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0x71,0x00,0x00,0x00, -0xf8,0x00,0x02,0x00,0x8b,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x8e,0x00,0x00,0x00,0x41,0x00,0x00,0x00, -0x63,0x00,0x00,0x00,0x41,0x00,0x08,0x00,0x76,0x00,0x00,0x00, -0x8f,0x00,0x00,0x00,0x50,0x00,0x00,0x00,0x09,0x00,0x00,0x00, -0x21,0x00,0x00,0x00,0x29,0x00,0x00,0x00,0x8e,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x47,0x00,0x00,0x00,0x90,0x00,0x00,0x00, -0x8f,0x00,0x00,0x00,0x71,0x00,0x04,0x00,0x14,0x00,0x00,0x00, -0x91,0x00,0x00,0x00,0x90,0x00,0x00,0x00,0x7c,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x92,0x00,0x00,0x00,0x91,0x00,0x00,0x00, -0xc7,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x94,0x00,0x00,0x00, -0x92,0x00,0x00,0x00,0x93,0x00,0x00,0x00,0x82,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x97,0x00,0x00,0x00,0x41,0x00,0x00,0x00, -0x63,0x00,0x00,0x00,0x41,0x00,0x08,0x00,0x76,0x00,0x00,0x00, -0x98,0x00,0x00,0x00,0x50,0x00,0x00,0x00,0x09,0x00,0x00,0x00, -0x21,0x00,0x00,0x00,0x29,0x00,0x00,0x00,0x97,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x47,0x00,0x00,0x00,0x99,0x00,0x00,0x00, -0x98,0x00,0x00,0x00,0xc2,0x00,0x05,0x00,0x47,0x00,0x00,0x00, -0x9b,0x00,0x00,0x00,0x99,0x00,0x00,0x00,0x9a,0x00,0x00,0x00, -0xc4,0x00,0x05,0x00,0x47,0x00,0x00,0x00,0x9c,0x00,0x00,0x00, -0x9b,0x00,0x00,0x00,0x63,0x00,0x00,0x00,0x71,0x00,0x04,0x00, -0x14,0x00,0x00,0x00,0x9d,0x00,0x00,0x00,0x9c,0x00,0x00,0x00, -0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x9e,0x00,0x00,0x00, -0x9d,0x00,0x00,0x00,0xc5,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x9f,0x00,0x00,0x00,0x94,0x00,0x00,0x00,0x9e,0x00,0x00,0x00, -0x72,0x00,0x04,0x00,0x7d,0x00,0x00,0x00,0xa0,0x00,0x00,0x00, -0x9f,0x00,0x00,0x00,0x7c,0x00,0x04,0x00,0x47,0x00,0x00,0x00, -0xa1,0x00,0x00,0x00,0xa0,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x47,0x00,0x00,0x00,0xa6,0x00,0x00,0x00,0x8f,0x00,0x00,0x00, -0xc2,0x00,0x05,0x00,0x47,0x00,0x00,0x00,0xa7,0x00,0x00,0x00, -0xa6,0x00,0x00,0x00,0x63,0x00,0x00,0x00,0x41,0x00,0x08,0x00, -0x76,0x00,0x00,0x00,0xaa,0x00,0x00,0x00,0x50,0x00,0x00,0x00, -0x09,0x00,0x00,0x00,0x21,0x00,0x00,0x00,0x29,0x00,0x00,0x00, -0x41,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x47,0x00,0x00,0x00, -0xab,0x00,0x00,0x00,0xaa,0x00,0x00,0x00,0xc2,0x00,0x05,0x00, -0x47,0x00,0x00,0x00,0xac,0x00,0x00,0x00,0xab,0x00,0x00,0x00, -0x9a,0x00,0x00,0x00,0xc4,0x00,0x05,0x00,0x47,0x00,0x00,0x00, -0xad,0x00,0x00,0x00,0xac,0x00,0x00,0x00,0x63,0x00,0x00,0x00, -0xc5,0x00,0x05,0x00,0x47,0x00,0x00,0x00,0xae,0x00,0x00,0x00, -0xa7,0x00,0x00,0x00,0xad,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, -0x71,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0x71,0x00,0x00,0x00, -0xf5,0x00,0x07,0x00,0x47,0x00,0x00,0x00,0x41,0x01,0x00,0x00, -0x8a,0x00,0x00,0x00,0x70,0x00,0x00,0x00,0xae,0x00,0x00,0x00, -0x8b,0x00,0x00,0x00,0xf5,0x00,0x07,0x00,0x47,0x00,0x00,0x00, -0x40,0x01,0x00,0x00,0x7f,0x00,0x00,0x00,0x70,0x00,0x00,0x00, -0xa1,0x00,0x00,0x00,0x8b,0x00,0x00,0x00,0x70,0x00,0x04,0x00, -0x42,0x00,0x00,0x00,0xb2,0x00,0x00,0x00,0x40,0x01,0x00,0x00, -0x85,0x00,0x05,0x00,0x42,0x00,0x00,0x00,0xb3,0x00,0x00,0x00, -0x55,0x00,0x00,0x00,0xb2,0x00,0x00,0x00,0x70,0x00,0x04,0x00, -0x42,0x00,0x00,0x00,0xb7,0x00,0x00,0x00,0x41,0x01,0x00,0x00, -0x85,0x00,0x05,0x00,0x42,0x00,0x00,0x00,0xb8,0x00,0x00,0x00, -0x5b,0x00,0x00,0x00,0xb7,0x00,0x00,0x00,0xf7,0x00,0x03,0x00, -0xbc,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, -0x6f,0x00,0x00,0x00,0xbb,0x00,0x00,0x00,0xd2,0x00,0x00,0x00, -0xf8,0x00,0x02,0x00,0xbb,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xbf,0x00,0x00,0x00,0x41,0x00,0x00,0x00, -0x29,0x00,0x00,0x00,0x41,0x00,0x08,0x00,0x76,0x00,0x00,0x00, -0xc0,0x00,0x00,0x00,0x50,0x00,0x00,0x00,0x09,0x00,0x00,0x00, -0x21,0x00,0x00,0x00,0x29,0x00,0x00,0x00,0xbf,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x47,0x00,0x00,0x00,0xc1,0x00,0x00,0x00, -0xc0,0x00,0x00,0x00,0x71,0x00,0x04,0x00,0x14,0x00,0x00,0x00, -0xc2,0x00,0x00,0x00,0xc1,0x00,0x00,0x00,0x7c,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0xc3,0x00,0x00,0x00,0xc2,0x00,0x00,0x00, -0xc7,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xc4,0x00,0x00,0x00, -0xc3,0x00,0x00,0x00,0x7b,0x00,0x00,0x00,0x72,0x00,0x04,0x00, -0x7d,0x00,0x00,0x00,0xc5,0x00,0x00,0x00,0xc4,0x00,0x00,0x00, -0x7c,0x00,0x04,0x00,0x47,0x00,0x00,0x00,0xc6,0x00,0x00,0x00, -0xc5,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xca,0x00,0x00,0x00,0x41,0x00,0x00,0x00,0xc9,0x00,0x00,0x00, -0x41,0x00,0x08,0x00,0x76,0x00,0x00,0x00,0xcb,0x00,0x00,0x00, -0x50,0x00,0x00,0x00,0x09,0x00,0x00,0x00,0x21,0x00,0x00,0x00, -0x29,0x00,0x00,0x00,0xca,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x47,0x00,0x00,0x00,0xcc,0x00,0x00,0x00,0xcb,0x00,0x00,0x00, -0x71,0x00,0x04,0x00,0x14,0x00,0x00,0x00,0xcd,0x00,0x00,0x00, -0xcc,0x00,0x00,0x00,0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0xce,0x00,0x00,0x00,0xcd,0x00,0x00,0x00,0xc7,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xcf,0x00,0x00,0x00,0xce,0x00,0x00,0x00, -0x7b,0x00,0x00,0x00,0x72,0x00,0x04,0x00,0x7d,0x00,0x00,0x00, -0xd0,0x00,0x00,0x00,0xcf,0x00,0x00,0x00,0x7c,0x00,0x04,0x00, -0x47,0x00,0x00,0x00,0xd1,0x00,0x00,0x00,0xd0,0x00,0x00,0x00, -0xf9,0x00,0x02,0x00,0xbc,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, -0xd2,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xd5,0x00,0x00,0x00,0x41,0x00,0x00,0x00,0xc9,0x00,0x00,0x00, -0x41,0x00,0x08,0x00,0x76,0x00,0x00,0x00,0xd6,0x00,0x00,0x00, -0x50,0x00,0x00,0x00,0x09,0x00,0x00,0x00,0x21,0x00,0x00,0x00, -0x29,0x00,0x00,0x00,0xd5,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x47,0x00,0x00,0x00,0xd7,0x00,0x00,0x00,0xd6,0x00,0x00,0x00, -0x71,0x00,0x04,0x00,0x14,0x00,0x00,0x00,0xd8,0x00,0x00,0x00, -0xd7,0x00,0x00,0x00,0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0xd9,0x00,0x00,0x00,0xd8,0x00,0x00,0x00,0xc7,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xda,0x00,0x00,0x00,0xd9,0x00,0x00,0x00, -0x93,0x00,0x00,0x00,0x82,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xde,0x00,0x00,0x00,0x41,0x00,0x00,0x00,0xdd,0x00,0x00,0x00, -0x41,0x00,0x08,0x00,0x76,0x00,0x00,0x00,0xdf,0x00,0x00,0x00, -0x50,0x00,0x00,0x00,0x09,0x00,0x00,0x00,0x21,0x00,0x00,0x00, -0x29,0x00,0x00,0x00,0xde,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x47,0x00,0x00,0x00,0xe0,0x00,0x00,0x00,0xdf,0x00,0x00,0x00, -0xc2,0x00,0x05,0x00,0x47,0x00,0x00,0x00,0xe1,0x00,0x00,0x00, -0xe0,0x00,0x00,0x00,0x9a,0x00,0x00,0x00,0xc4,0x00,0x05,0x00, -0x47,0x00,0x00,0x00,0xe2,0x00,0x00,0x00,0xe1,0x00,0x00,0x00, -0x63,0x00,0x00,0x00,0x71,0x00,0x04,0x00,0x14,0x00,0x00,0x00, -0xe3,0x00,0x00,0x00,0xe2,0x00,0x00,0x00,0x7c,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0xe4,0x00,0x00,0x00,0xe3,0x00,0x00,0x00, -0xc5,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xe5,0x00,0x00,0x00, -0xda,0x00,0x00,0x00,0xe4,0x00,0x00,0x00,0x72,0x00,0x04,0x00, -0x7d,0x00,0x00,0x00,0xe6,0x00,0x00,0x00,0xe5,0x00,0x00,0x00, -0x7c,0x00,0x04,0x00,0x47,0x00,0x00,0x00,0xe7,0x00,0x00,0x00, -0xe6,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x47,0x00,0x00,0x00, -0xec,0x00,0x00,0x00,0xd6,0x00,0x00,0x00,0xc2,0x00,0x05,0x00, -0x47,0x00,0x00,0x00,0xed,0x00,0x00,0x00,0xec,0x00,0x00,0x00, -0x63,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xf0,0x00,0x00,0x00,0x41,0x00,0x00,0x00,0x29,0x00,0x00,0x00, -0x41,0x00,0x08,0x00,0x76,0x00,0x00,0x00,0xf1,0x00,0x00,0x00, -0x50,0x00,0x00,0x00,0x09,0x00,0x00,0x00,0x21,0x00,0x00,0x00, -0x29,0x00,0x00,0x00,0xf0,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x47,0x00,0x00,0x00,0xf2,0x00,0x00,0x00,0xf1,0x00,0x00,0x00, -0xc2,0x00,0x05,0x00,0x47,0x00,0x00,0x00,0xf3,0x00,0x00,0x00, -0xf2,0x00,0x00,0x00,0x9a,0x00,0x00,0x00,0xc4,0x00,0x05,0x00, -0x47,0x00,0x00,0x00,0xf4,0x00,0x00,0x00,0xf3,0x00,0x00,0x00, -0x63,0x00,0x00,0x00,0xc5,0x00,0x05,0x00,0x47,0x00,0x00,0x00, -0xf5,0x00,0x00,0x00,0xed,0x00,0x00,0x00,0xf4,0x00,0x00,0x00, -0xf9,0x00,0x02,0x00,0xbc,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, -0xbc,0x00,0x00,0x00,0xf5,0x00,0x07,0x00,0x47,0x00,0x00,0x00, -0x43,0x01,0x00,0x00,0xd1,0x00,0x00,0x00,0xbb,0x00,0x00,0x00, -0xf5,0x00,0x00,0x00,0xd2,0x00,0x00,0x00,0xf5,0x00,0x07,0x00, -0x47,0x00,0x00,0x00,0x42,0x01,0x00,0x00,0xc6,0x00,0x00,0x00, -0xbb,0x00,0x00,0x00,0xe7,0x00,0x00,0x00,0xd2,0x00,0x00,0x00, -0x70,0x00,0x04,0x00,0x42,0x00,0x00,0x00,0xf9,0x00,0x00,0x00, -0x42,0x01,0x00,0x00,0x85,0x00,0x05,0x00,0x42,0x00,0x00,0x00, -0xfa,0x00,0x00,0x00,0x55,0x00,0x00,0x00,0xf9,0x00,0x00,0x00, -0x70,0x00,0x04,0x00,0x42,0x00,0x00,0x00,0xfe,0x00,0x00,0x00, -0x43,0x01,0x00,0x00,0x85,0x00,0x05,0x00,0x42,0x00,0x00,0x00, -0xff,0x00,0x00,0x00,0x5b,0x00,0x00,0x00,0xfe,0x00,0x00,0x00, -0x41,0x00,0x08,0x00,0x76,0x00,0x00,0x00,0x14,0x01,0x00,0x00, -0x50,0x00,0x00,0x00,0x09,0x00,0x00,0x00,0x21,0x00,0x00,0x00, -0x3f,0x00,0x00,0x00,0x6d,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x47,0x00,0x00,0x00,0x15,0x01,0x00,0x00,0x14,0x01,0x00,0x00, -0x71,0x00,0x04,0x00,0x14,0x00,0x00,0x00,0x16,0x01,0x00,0x00, -0x15,0x01,0x00,0x00,0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x17,0x01,0x00,0x00,0x16,0x01,0x00,0x00,0xc7,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x18,0x01,0x00,0x00,0x17,0x01,0x00,0x00, -0x93,0x00,0x00,0x00,0x6f,0x00,0x04,0x00,0x42,0x00,0x00,0x00, -0x19,0x01,0x00,0x00,0x18,0x01,0x00,0x00,0x7f,0x00,0x04,0x00, -0x42,0x00,0x00,0x00,0xa7,0x01,0x00,0x00,0xb8,0x00,0x00,0x00, -0x0c,0x00,0x08,0x00,0x42,0x00,0x00,0x00,0x1c,0x01,0x00,0x00, -0x01,0x00,0x00,0x00,0x32,0x00,0x00,0x00,0xb3,0x00,0x00,0x00, -0x19,0x01,0x00,0x00,0xa7,0x01,0x00,0x00,0x73,0x00,0x04,0x00, -0x45,0x00,0x00,0x00,0x1d,0x01,0x00,0x00,0x1c,0x01,0x00,0x00, -0x41,0x00,0x06,0x00,0x52,0x00,0x00,0x00,0x1e,0x01,0x00,0x00, -0x0b,0x01,0x00,0x00,0x09,0x00,0x00,0x00,0x66,0x00,0x00,0x00, -0x3e,0x00,0x03,0x00,0x1e,0x01,0x00,0x00,0x1d,0x01,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x22,0x01,0x00,0x00, -0x66,0x00,0x00,0x00,0x68,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x47,0x00,0x00,0x00,0x29,0x01,0x00,0x00,0x14,0x01,0x00,0x00, -0xc2,0x00,0x05,0x00,0x47,0x00,0x00,0x00,0x2a,0x01,0x00,0x00, -0x29,0x01,0x00,0x00,0x63,0x00,0x00,0x00,0x70,0x00,0x04,0x00, -0x42,0x00,0x00,0x00,0x2b,0x01,0x00,0x00,0x2a,0x01,0x00,0x00, -0x7f,0x00,0x04,0x00,0x42,0x00,0x00,0x00,0xa8,0x01,0x00,0x00, -0xff,0x00,0x00,0x00,0x0c,0x00,0x08,0x00,0x42,0x00,0x00,0x00, -0x2e,0x01,0x00,0x00,0x01,0x00,0x00,0x00,0x32,0x00,0x00,0x00, -0xfa,0x00,0x00,0x00,0x2b,0x01,0x00,0x00,0xa8,0x01,0x00,0x00, -0x73,0x00,0x04,0x00,0x45,0x00,0x00,0x00,0x2f,0x01,0x00,0x00, -0x2e,0x01,0x00,0x00,0x41,0x00,0x06,0x00,0x52,0x00,0x00,0x00, -0x30,0x01,0x00,0x00,0x0b,0x01,0x00,0x00,0x09,0x00,0x00,0x00, -0x22,0x01,0x00,0x00,0x3e,0x00,0x03,0x00,0x30,0x01,0x00,0x00, -0x2f,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x52,0x01,0x00,0x00,0x66,0x00,0x00,0x00,0x29,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x53,0x01,0x00,0x00, -0x6d,0x00,0x00,0x00,0x29,0x00,0x00,0x00,0x41,0x00,0x08,0x00, -0x76,0x00,0x00,0x00,0x54,0x01,0x00,0x00,0x50,0x00,0x00,0x00, -0x09,0x00,0x00,0x00,0x21,0x00,0x00,0x00,0x3f,0x00,0x00,0x00, -0x53,0x01,0x00,0x00,0x3d,0x00,0x04,0x00,0x47,0x00,0x00,0x00, -0x55,0x01,0x00,0x00,0x54,0x01,0x00,0x00,0x71,0x00,0x04,0x00, -0x14,0x00,0x00,0x00,0x56,0x01,0x00,0x00,0x55,0x01,0x00,0x00, -0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x57,0x01,0x00,0x00, -0x56,0x01,0x00,0x00,0xc7,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x58,0x01,0x00,0x00,0x57,0x01,0x00,0x00,0x93,0x00,0x00,0x00, -0x6f,0x00,0x04,0x00,0x42,0x00,0x00,0x00,0x59,0x01,0x00,0x00, -0x58,0x01,0x00,0x00,0x0c,0x00,0x08,0x00,0x42,0x00,0x00,0x00, -0x5b,0x01,0x00,0x00,0x01,0x00,0x00,0x00,0x32,0x00,0x00,0x00, -0xb3,0x00,0x00,0x00,0x59,0x01,0x00,0x00,0xa7,0x01,0x00,0x00, -0x73,0x00,0x04,0x00,0x45,0x00,0x00,0x00,0x5c,0x01,0x00,0x00, -0x5b,0x01,0x00,0x00,0x41,0x00,0x06,0x00,0x52,0x00,0x00,0x00, -0x5d,0x01,0x00,0x00,0x0b,0x01,0x00,0x00,0x09,0x00,0x00,0x00, -0x52,0x01,0x00,0x00,0x3e,0x00,0x03,0x00,0x5d,0x01,0x00,0x00, -0x5c,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x5f,0x01,0x00,0x00,0x66,0x00,0x00,0x00,0xaa,0x01,0x00,0x00, -0x3d,0x00,0x04,0x00,0x47,0x00,0x00,0x00,0x62,0x01,0x00,0x00, -0x54,0x01,0x00,0x00,0xc2,0x00,0x05,0x00,0x47,0x00,0x00,0x00, -0x63,0x01,0x00,0x00,0x62,0x01,0x00,0x00,0x63,0x00,0x00,0x00, -0x70,0x00,0x04,0x00,0x42,0x00,0x00,0x00,0x64,0x01,0x00,0x00, -0x63,0x01,0x00,0x00,0x0c,0x00,0x08,0x00,0x42,0x00,0x00,0x00, -0x66,0x01,0x00,0x00,0x01,0x00,0x00,0x00,0x32,0x00,0x00,0x00, -0xfa,0x00,0x00,0x00,0x64,0x01,0x00,0x00,0xa8,0x01,0x00,0x00, -0x73,0x00,0x04,0x00,0x45,0x00,0x00,0x00,0x67,0x01,0x00,0x00, -0x66,0x01,0x00,0x00,0x41,0x00,0x06,0x00,0x52,0x00,0x00,0x00, -0x68,0x01,0x00,0x00,0x0b,0x01,0x00,0x00,0x09,0x00,0x00,0x00, -0x5f,0x01,0x00,0x00,0x3e,0x00,0x03,0x00,0x68,0x01,0x00,0x00, -0x67,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x70,0x01,0x00,0x00,0x66,0x00,0x00,0x00,0x3f,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x71,0x01,0x00,0x00, -0x6d,0x00,0x00,0x00,0x3f,0x00,0x00,0x00,0x41,0x00,0x08,0x00, -0x76,0x00,0x00,0x00,0x72,0x01,0x00,0x00,0x50,0x00,0x00,0x00, -0x09,0x00,0x00,0x00,0x21,0x00,0x00,0x00,0x3f,0x00,0x00,0x00, -0x71,0x01,0x00,0x00,0x3d,0x00,0x04,0x00,0x47,0x00,0x00,0x00, -0x73,0x01,0x00,0x00,0x72,0x01,0x00,0x00,0x71,0x00,0x04,0x00, -0x14,0x00,0x00,0x00,0x74,0x01,0x00,0x00,0x73,0x01,0x00,0x00, -0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x75,0x01,0x00,0x00, -0x74,0x01,0x00,0x00,0xc7,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x76,0x01,0x00,0x00,0x75,0x01,0x00,0x00,0x93,0x00,0x00,0x00, -0x6f,0x00,0x04,0x00,0x42,0x00,0x00,0x00,0x77,0x01,0x00,0x00, -0x76,0x01,0x00,0x00,0x0c,0x00,0x08,0x00,0x42,0x00,0x00,0x00, -0x79,0x01,0x00,0x00,0x01,0x00,0x00,0x00,0x32,0x00,0x00,0x00, -0xb3,0x00,0x00,0x00,0x77,0x01,0x00,0x00,0xa7,0x01,0x00,0x00, -0x73,0x00,0x04,0x00,0x45,0x00,0x00,0x00,0x7a,0x01,0x00,0x00, -0x79,0x01,0x00,0x00,0x41,0x00,0x06,0x00,0x52,0x00,0x00,0x00, -0x7b,0x01,0x00,0x00,0x0b,0x01,0x00,0x00,0x09,0x00,0x00,0x00, -0x70,0x01,0x00,0x00,0x3e,0x00,0x03,0x00,0x7b,0x01,0x00,0x00, -0x7a,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x7d,0x01,0x00,0x00,0x66,0x00,0x00,0x00,0xad,0x01,0x00,0x00, -0x3d,0x00,0x04,0x00,0x47,0x00,0x00,0x00,0x80,0x01,0x00,0x00, -0x72,0x01,0x00,0x00,0xc2,0x00,0x05,0x00,0x47,0x00,0x00,0x00, -0x81,0x01,0x00,0x00,0x80,0x01,0x00,0x00,0x63,0x00,0x00,0x00, -0x70,0x00,0x04,0x00,0x42,0x00,0x00,0x00,0x82,0x01,0x00,0x00, -0x81,0x01,0x00,0x00,0x0c,0x00,0x08,0x00,0x42,0x00,0x00,0x00, -0x84,0x01,0x00,0x00,0x01,0x00,0x00,0x00,0x32,0x00,0x00,0x00, -0xfa,0x00,0x00,0x00,0x82,0x01,0x00,0x00,0xa8,0x01,0x00,0x00, -0x73,0x00,0x04,0x00,0x45,0x00,0x00,0x00,0x85,0x01,0x00,0x00, -0x84,0x01,0x00,0x00,0x41,0x00,0x06,0x00,0x52,0x00,0x00,0x00, -0x86,0x01,0x00,0x00,0x0b,0x01,0x00,0x00,0x09,0x00,0x00,0x00, -0x7d,0x01,0x00,0x00,0x3e,0x00,0x03,0x00,0x86,0x01,0x00,0x00, -0x85,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x8e,0x01,0x00,0x00,0x66,0x00,0x00,0x00,0xdd,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x8f,0x01,0x00,0x00, -0x6d,0x00,0x00,0x00,0xdd,0x00,0x00,0x00,0x41,0x00,0x08,0x00, -0x76,0x00,0x00,0x00,0x90,0x01,0x00,0x00,0x50,0x00,0x00,0x00, -0x09,0x00,0x00,0x00,0x21,0x00,0x00,0x00,0x3f,0x00,0x00,0x00, -0x8f,0x01,0x00,0x00,0x3d,0x00,0x04,0x00,0x47,0x00,0x00,0x00, -0x91,0x01,0x00,0x00,0x90,0x01,0x00,0x00,0x71,0x00,0x04,0x00, -0x14,0x00,0x00,0x00,0x92,0x01,0x00,0x00,0x91,0x01,0x00,0x00, -0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x93,0x01,0x00,0x00, -0x92,0x01,0x00,0x00,0xc7,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x94,0x01,0x00,0x00,0x93,0x01,0x00,0x00,0x93,0x00,0x00,0x00, -0x6f,0x00,0x04,0x00,0x42,0x00,0x00,0x00,0x95,0x01,0x00,0x00, -0x94,0x01,0x00,0x00,0x0c,0x00,0x08,0x00,0x42,0x00,0x00,0x00, -0x97,0x01,0x00,0x00,0x01,0x00,0x00,0x00,0x32,0x00,0x00,0x00, -0xb3,0x00,0x00,0x00,0x95,0x01,0x00,0x00,0xa7,0x01,0x00,0x00, -0x73,0x00,0x04,0x00,0x45,0x00,0x00,0x00,0x98,0x01,0x00,0x00, -0x97,0x01,0x00,0x00,0x41,0x00,0x06,0x00,0x52,0x00,0x00,0x00, -0x99,0x01,0x00,0x00,0x0b,0x01,0x00,0x00,0x09,0x00,0x00,0x00, -0x8e,0x01,0x00,0x00,0x3e,0x00,0x03,0x00,0x99,0x01,0x00,0x00, -0x98,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x9b,0x01,0x00,0x00,0x66,0x00,0x00,0x00,0xb0,0x01,0x00,0x00, -0x3d,0x00,0x04,0x00,0x47,0x00,0x00,0x00,0x9e,0x01,0x00,0x00, -0x90,0x01,0x00,0x00,0xc2,0x00,0x05,0x00,0x47,0x00,0x00,0x00, -0x9f,0x01,0x00,0x00,0x9e,0x01,0x00,0x00,0x63,0x00,0x00,0x00, -0x70,0x00,0x04,0x00,0x42,0x00,0x00,0x00,0xa0,0x01,0x00,0x00, -0x9f,0x01,0x00,0x00,0x0c,0x00,0x08,0x00,0x42,0x00,0x00,0x00, -0xa2,0x01,0x00,0x00,0x01,0x00,0x00,0x00,0x32,0x00,0x00,0x00, -0xfa,0x00,0x00,0x00,0xa0,0x01,0x00,0x00,0xa8,0x01,0x00,0x00, -0x73,0x00,0x04,0x00,0x45,0x00,0x00,0x00,0xa3,0x01,0x00,0x00, -0xa2,0x01,0x00,0x00,0x41,0x00,0x06,0x00,0x52,0x00,0x00,0x00, -0xa4,0x01,0x00,0x00,0x0b,0x01,0x00,0x00,0x09,0x00,0x00,0x00, -0x9b,0x01,0x00,0x00,0x3e,0x00,0x03,0x00,0xa4,0x01,0x00,0x00, -0xa3,0x01,0x00,0x00,0xf9,0x00,0x02,0x00,0x0d,0x00,0x00,0x00, -0xf8,0x00,0x02,0x00,0x0d,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x34,0x01,0x00,0x00,0x3f,0x01,0x00,0x00, -0x29,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0x0a,0x00,0x00,0x00, -0xf8,0x00,0x02,0x00,0x0c,0x00,0x00,0x00,0xf5,0x00,0x07,0x00, -0x11,0x00,0x00,0x00,0x48,0x01,0x00,0x00,0x39,0x01,0x00,0x00, -0x0a,0x00,0x00,0x00,0x3c,0x01,0x00,0x00,0x2f,0x00,0x00,0x00, -0xf7,0x00,0x03,0x00,0x3d,0x01,0x00,0x00,0x00,0x00,0x00,0x00, -0xfa,0x00,0x04,0x00,0x48,0x01,0x00,0x00,0x37,0x01,0x00,0x00, -0x3d,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0x3d,0x01,0x00,0x00, -0xf9,0x00,0x02,0x00,0x37,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, -0x37,0x01,0x00,0x00,0xfd,0x00,0x01,0x00,0x38,0x00,0x01,0x00, - -}; -const uint64_t dequant_q4_K_len = 5940; - -unsigned char dequant_q5_0_data[] = { -0x03,0x02,0x23,0x07,0x00,0x05,0x01,0x00,0x0b,0x00,0x0d,0x00, -0x9b,0x04,0x00,0x00,0x00,0x00,0x00,0x00,0x11,0x00,0x02,0x00, -0x01,0x00,0x00,0x00,0x11,0x00,0x02,0x00,0x51,0x11,0x00,0x00, -0x11,0x00,0x02,0x00,0x60,0x11,0x00,0x00,0x0b,0x00,0x06,0x00, -0x01,0x00,0x00,0x00,0x47,0x4c,0x53,0x4c,0x2e,0x73,0x74,0x64, -0x2e,0x34,0x35,0x30,0x00,0x00,0x00,0x00,0x0e,0x00,0x03,0x00, -0x00,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x0f,0x00,0x09,0x00, -0x05,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x6d,0x61,0x69,0x6e, -0x00,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x16,0x00,0x00,0x00, -0x58,0x00,0x00,0x00,0xa1,0x00,0x00,0x00,0x10,0x00,0x06,0x00, -0x04,0x00,0x00,0x00,0x11,0x00,0x00,0x00,0x00,0x01,0x00,0x00, -0x01,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x0c,0x00,0x00,0x00,0x0b,0x00,0x00,0x00,0x1c,0x00,0x00,0x00, -0x48,0x00,0x05,0x00,0x14,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0x14,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x04,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x14,0x00,0x00,0x00, -0x02,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x08,0x00,0x00,0x00, -0x48,0x00,0x05,0x00,0x14,0x00,0x00,0x00,0x03,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x47,0x00,0x03,0x00, -0x14,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x50,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x02,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x53,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x54,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x48,0x00,0x05,0x00,0x54,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0x54,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x55,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x16,0x00,0x00,0x00,0x48,0x00,0x04,0x00, -0x56,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x18,0x00,0x00,0x00, -0x48,0x00,0x05,0x00,0x56,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x03,0x00, -0x56,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x58,0x00,0x00,0x00,0x22,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x58,0x00,0x00,0x00,0x21,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x9e,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x48,0x00,0x04,0x00, -0x9f,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x19,0x00,0x00,0x00, -0x48,0x00,0x05,0x00,0x9f,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x03,0x00, -0x9f,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0xa1,0x00,0x00,0x00,0x22,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0xa1,0x00,0x00,0x00,0x21,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0xc2,0x00,0x00,0x00, -0x0b,0x00,0x00,0x00,0x19,0x00,0x00,0x00,0x13,0x00,0x02,0x00, -0x02,0x00,0x00,0x00,0x21,0x00,0x03,0x00,0x03,0x00,0x00,0x00, -0x02,0x00,0x00,0x00,0x15,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x20,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x15,0x00,0x04,0x00, -0x09,0x00,0x00,0x00,0x20,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x17,0x00,0x04,0x00,0x0a,0x00,0x00,0x00,0x09,0x00,0x00,0x00, -0x03,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x0b,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x0a,0x00,0x00,0x00,0x3b,0x00,0x04,0x00, -0x0b,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x09,0x00,0x00,0x00,0x0d,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x0e,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x09,0x00,0x00,0x00,0x1e,0x00,0x06,0x00, -0x14,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0x15,0x00,0x00,0x00,0x09,0x00,0x00,0x00,0x14,0x00,0x00,0x00, -0x3b,0x00,0x04,0x00,0x15,0x00,0x00,0x00,0x16,0x00,0x00,0x00, -0x09,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x17,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0x18,0x00,0x00,0x00,0x09,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x1b,0x00,0x00,0x00, -0x20,0x00,0x00,0x00,0x14,0x00,0x02,0x00,0x24,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x37,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x48,0x00,0x00,0x00,0x10,0x00,0x00,0x00, -0x16,0x00,0x03,0x00,0x4a,0x00,0x00,0x00,0x20,0x00,0x00,0x00, -0x16,0x00,0x03,0x00,0x4d,0x00,0x00,0x00,0x10,0x00,0x00,0x00, -0x15,0x00,0x04,0x00,0x4e,0x00,0x00,0x00,0x10,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x09,0x00,0x00,0x00, -0x4f,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x1c,0x00,0x04,0x00, -0x50,0x00,0x00,0x00,0x4e,0x00,0x00,0x00,0x4f,0x00,0x00,0x00, -0x15,0x00,0x04,0x00,0x51,0x00,0x00,0x00,0x08,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x09,0x00,0x00,0x00, -0x52,0x00,0x00,0x00,0x10,0x00,0x00,0x00,0x1c,0x00,0x04,0x00, -0x53,0x00,0x00,0x00,0x51,0x00,0x00,0x00,0x52,0x00,0x00,0x00, -0x1e,0x00,0x05,0x00,0x54,0x00,0x00,0x00,0x4d,0x00,0x00,0x00, -0x50,0x00,0x00,0x00,0x53,0x00,0x00,0x00,0x1d,0x00,0x03,0x00, -0x55,0x00,0x00,0x00,0x54,0x00,0x00,0x00,0x1e,0x00,0x03,0x00, -0x56,0x00,0x00,0x00,0x55,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0x57,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x56,0x00,0x00,0x00, -0x3b,0x00,0x04,0x00,0x57,0x00,0x00,0x00,0x58,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x5a,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x4d,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0x61,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x4e,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x71,0x00,0x00,0x00, -0x04,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x77,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0x80,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x51,0x00,0x00,0x00, -0x17,0x00,0x04,0x00,0x84,0x00,0x00,0x00,0x4a,0x00,0x00,0x00, -0x02,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x09,0x00,0x00,0x00, -0x88,0x00,0x00,0x00,0x0f,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x09,0x00,0x00,0x00,0x91,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x4a,0x00,0x00,0x00,0x99,0x00,0x00,0x00, -0x00,0x00,0x80,0x41,0x1d,0x00,0x03,0x00,0x9e,0x00,0x00,0x00, -0x4d,0x00,0x00,0x00,0x1e,0x00,0x03,0x00,0x9f,0x00,0x00,0x00, -0x9e,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0xa0,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x9f,0x00,0x00,0x00,0x3b,0x00,0x04,0x00, -0xa0,0x00,0x00,0x00,0xa1,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0xa3,0x00,0x00,0x00, -0x03,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x09,0x00,0x00,0x00, -0xc1,0x00,0x00,0x00,0x00,0x01,0x00,0x00,0x2c,0x00,0x06,0x00, -0x0a,0x00,0x00,0x00,0xc2,0x00,0x00,0x00,0xc1,0x00,0x00,0x00, -0x91,0x00,0x00,0x00,0x91,0x00,0x00,0x00,0x2c,0x00,0x05,0x00, -0x84,0x00,0x00,0x00,0xcf,0x00,0x00,0x00,0x99,0x00,0x00,0x00, -0x99,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x82,0x04,0x00,0x00,0x0d,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x83,0x04,0x00,0x00,0x11,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x84,0x04,0x00,0x00, -0x0e,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x85,0x04,0x00,0x00,0x12,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x86,0x04,0x00,0x00,0x0f,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x87,0x04,0x00,0x00, +0x06,0x00,0x00,0x00,0x4a,0x00,0x00,0x00,0x10,0x00,0x00,0x00, +0x1c,0x00,0x04,0x00,0x4b,0x00,0x00,0x00,0x49,0x00,0x00,0x00, +0x4a,0x00,0x00,0x00,0x1e,0x00,0x05,0x00,0x4c,0x00,0x00,0x00, +0x45,0x00,0x00,0x00,0x48,0x00,0x00,0x00,0x4b,0x00,0x00,0x00, +0x1d,0x00,0x03,0x00,0x4d,0x00,0x00,0x00,0x4c,0x00,0x00,0x00, +0x1e,0x00,0x03,0x00,0x4e,0x00,0x00,0x00,0x4d,0x00,0x00,0x00, +0x20,0x00,0x04,0x00,0x4f,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, +0x4e,0x00,0x00,0x00,0x3b,0x00,0x04,0x00,0x4f,0x00,0x00,0x00, +0x50,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x2c,0x00,0x00,0x00,0x51,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0x20,0x00,0x04,0x00,0x53,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, +0x45,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x2c,0x00,0x00,0x00, +0x59,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x20,0x00,0x04,0x00, +0x5a,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x46,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x2c,0x00,0x00,0x00,0x5e,0x00,0x00,0x00, +0x10,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x2c,0x00,0x00,0x00, +0x76,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x20,0x00,0x04,0x00, +0x78,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x49,0x00,0x00,0x00, +0x1d,0x00,0x03,0x00,0x7c,0x00,0x00,0x00,0x45,0x00,0x00,0x00, +0x1e,0x00,0x03,0x00,0x7d,0x00,0x00,0x00,0x7c,0x00,0x00,0x00, +0x20,0x00,0x04,0x00,0x7e,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, +0x7d,0x00,0x00,0x00,0x3b,0x00,0x04,0x00,0x7e,0x00,0x00,0x00, +0x7f,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x86,0x00,0x00,0x00,0x0f,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x42,0x00,0x00,0x00,0x8f,0x00,0x00,0x00, +0x00,0x00,0x80,0x41,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x9d,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0xa9,0x00,0x00,0x00,0x00,0x01,0x00,0x00, +0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0xaa,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0x2c,0x00,0x06,0x00,0x09,0x00,0x00,0x00, +0xab,0x00,0x00,0x00,0xa9,0x00,0x00,0x00,0xaa,0x00,0x00,0x00, +0xaa,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0xaa,0x01,0x00,0x00,0x11,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0xab,0x01,0x00,0x00,0x0d,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0xac,0x01,0x00,0x00, +0x12,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0xad,0x01,0x00,0x00,0x0e,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0xae,0x01,0x00,0x00,0x03,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0xaf,0x01,0x00,0x00, 0x13,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x88,0x04,0x00,0x00,0x14,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x89,0x04,0x00,0x00,0x05,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x8a,0x04,0x00,0x00, +0xb0,0x01,0x00,0x00,0x14,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0xb1,0x01,0x00,0x00,0x05,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0xb2,0x01,0x00,0x00, 0x15,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x8b,0x04,0x00,0x00,0x06,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x8c,0x04,0x00,0x00,0x16,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x8d,0x04,0x00,0x00, +0xb3,0x01,0x00,0x00,0x06,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0xb4,0x01,0x00,0x00,0x16,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0xb5,0x01,0x00,0x00, 0x07,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x8e,0x04,0x00,0x00,0x17,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x8f,0x04,0x00,0x00,0x08,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x90,0x04,0x00,0x00, -0x18,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x91,0x04,0x00,0x00,0x09,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x92,0x04,0x00,0x00,0x19,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x93,0x04,0x00,0x00, -0x0a,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x94,0x04,0x00,0x00,0x1a,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x95,0x04,0x00,0x00,0x0b,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x96,0x04,0x00,0x00, -0x1b,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x97,0x04,0x00,0x00,0x1c,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x98,0x04,0x00,0x00,0x1d,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x99,0x04,0x00,0x00, -0x1e,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x9a,0x04,0x00,0x00,0x1f,0x00,0x00,0x00,0x36,0x00,0x05,0x00, +0xb6,0x01,0x00,0x00,0x17,0x00,0x00,0x00,0x36,0x00,0x05,0x00, 0x02,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x00,0x00,0x00,0x00, 0x03,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0x05,0x00,0x00,0x00, -0xf7,0x00,0x03,0x00,0xc3,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0xfb,0x00,0x03,0x00,0x0d,0x00,0x00,0x00,0xc4,0x00,0x00,0x00, -0xf8,0x00,0x02,0x00,0xc4,0x00,0x00,0x00,0x41,0x00,0x05,0x00, -0x0e,0x00,0x00,0x00,0x0f,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0x0d,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x09,0x00,0x00,0x00, -0x10,0x00,0x00,0x00,0x0f,0x00,0x00,0x00,0x7c,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x11,0x00,0x00,0x00,0x10,0x00,0x00,0x00, -0x41,0x00,0x05,0x00,0x18,0x00,0x00,0x00,0x19,0x00,0x00,0x00, -0x16,0x00,0x00,0x00,0x17,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x1a,0x00,0x00,0x00,0x19,0x00,0x00,0x00, -0x87,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x1c,0x00,0x00,0x00, -0x1a,0x00,0x00,0x00,0x1b,0x00,0x00,0x00,0x8b,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x1d,0x00,0x00,0x00,0x11,0x00,0x00,0x00, -0x1c,0x00,0x00,0x00,0x87,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x11,0x00,0x00,0x00,0x1c,0x00,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x26,0x00,0x00,0x00, -0x1d,0x00,0x00,0x00,0x1b,0x00,0x00,0x00,0xaf,0x00,0x05,0x00, -0x24,0x00,0x00,0x00,0x29,0x00,0x00,0x00,0x26,0x00,0x00,0x00, -0x1a,0x00,0x00,0x00,0xa8,0x00,0x04,0x00,0x24,0x00,0x00,0x00, -0x2a,0x00,0x00,0x00,0x29,0x00,0x00,0x00,0xf7,0x00,0x03,0x00, -0x2c,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, -0x2a,0x00,0x00,0x00,0x2b,0x00,0x00,0x00,0x2c,0x00,0x00,0x00, -0xf8,0x00,0x02,0x00,0x2b,0x00,0x00,0x00,0x41,0x00,0x05,0x00, -0x18,0x00,0x00,0x00,0x2f,0x00,0x00,0x00,0x16,0x00,0x00,0x00, -0x2e,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x30,0x00,0x00,0x00,0x2f,0x00,0x00,0x00,0xaf,0x00,0x05,0x00, -0x24,0x00,0x00,0x00,0x31,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x30,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0x2c,0x00,0x00,0x00, -0xf8,0x00,0x02,0x00,0x2c,0x00,0x00,0x00,0xf5,0x00,0x07,0x00, -0x24,0x00,0x00,0x00,0x32,0x00,0x00,0x00,0x29,0x00,0x00,0x00, -0xc4,0x00,0x00,0x00,0x31,0x00,0x00,0x00,0x2b,0x00,0x00,0x00, -0xf7,0x00,0x03,0x00,0x34,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0xfa,0x00,0x04,0x00,0x32,0x00,0x00,0x00,0x33,0x00,0x00,0x00, -0x34,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0x33,0x00,0x00,0x00, -0xf9,0x00,0x02,0x00,0xc3,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, -0x34,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x18,0x00,0x00,0x00, -0x38,0x00,0x00,0x00,0x16,0x00,0x00,0x00,0x37,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x39,0x00,0x00,0x00, -0x38,0x00,0x00,0x00,0x87,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x3a,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x1b,0x00,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x3e,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x3a,0x00,0x00,0x00,0x80,0x00,0x05,0x00, +0xf7,0x00,0x03,0x00,0xac,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0xfb,0x00,0x03,0x00,0x0c,0x00,0x00,0x00,0xad,0x00,0x00,0x00, +0xf8,0x00,0x02,0x00,0xad,0x00,0x00,0x00,0x41,0x00,0x05,0x00, +0x0d,0x00,0x00,0x00,0x0e,0x00,0x00,0x00,0x0b,0x00,0x00,0x00, +0x0c,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x0f,0x00,0x00,0x00,0x0e,0x00,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x11,0x00,0x00,0x00,0x0f,0x00,0x00,0x00, +0x10,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x0d,0x00,0x00,0x00, +0x13,0x00,0x00,0x00,0x12,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, +0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x14,0x00,0x00,0x00, +0x13,0x00,0x00,0x00,0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x16,0x00,0x00,0x00,0x14,0x00,0x00,0x00,0x15,0x00,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x17,0x00,0x00,0x00, +0x11,0x00,0x00,0x00,0x16,0x00,0x00,0x00,0x89,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x1b,0x00,0x00,0x00,0x14,0x00,0x00,0x00, +0x15,0x00,0x00,0x00,0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x1f,0x00,0x00,0x00,0x1b,0x00,0x00,0x00,0x1e,0x00,0x00,0x00, +0x89,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x22,0x00,0x00,0x00, +0x1b,0x00,0x00,0x00,0x1e,0x00,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x25,0x00,0x00,0x00,0x1e,0x00,0x00,0x00, +0x17,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x27,0x00,0x00,0x00,0x25,0x00,0x00,0x00,0x22,0x00,0x00,0x00, +0x41,0x00,0x05,0x00,0x2e,0x00,0x00,0x00,0x2f,0x00,0x00,0x00, +0x2b,0x00,0x00,0x00,0x2d,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x30,0x00,0x00,0x00,0x2f,0x00,0x00,0x00, +0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x31,0x00,0x00,0x00, +0x30,0x00,0x00,0x00,0x1e,0x00,0x00,0x00,0xae,0x00,0x05,0x00, +0x32,0x00,0x00,0x00,0x33,0x00,0x00,0x00,0x27,0x00,0x00,0x00, +0x31,0x00,0x00,0x00,0xf7,0x00,0x03,0x00,0x35,0x00,0x00,0x00, +0x00,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0x33,0x00,0x00,0x00, +0x34,0x00,0x00,0x00,0x35,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, +0x34,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0xac,0x00,0x00,0x00, +0xf8,0x00,0x02,0x00,0x35,0x00,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x3a,0x00,0x00,0x00,0x38,0x00,0x00,0x00, +0x17,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x3c,0x00,0x00,0x00,0x1e,0x00,0x00,0x00,0x22,0x00,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x3d,0x00,0x00,0x00, +0x3a,0x00,0x00,0x00,0x3c,0x00,0x00,0x00,0x84,0x00,0x05,0x00, 0x06,0x00,0x00,0x00,0x40,0x00,0x00,0x00,0x3e,0x00,0x00,0x00, -0x1d,0x00,0x00,0x00,0x41,0x00,0x07,0x00,0x5a,0x00,0x00,0x00, -0x5b,0x00,0x00,0x00,0x58,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0x40,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x4d,0x00,0x00,0x00,0x5c,0x00,0x00,0x00,0x5b,0x00,0x00,0x00, -0x73,0x00,0x04,0x00,0x4a,0x00,0x00,0x00,0x5d,0x00,0x00,0x00, -0x5c,0x00,0x00,0x00,0x41,0x00,0x08,0x00,0x61,0x00,0x00,0x00, -0x62,0x00,0x00,0x00,0x58,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0x40,0x00,0x00,0x00,0x17,0x00,0x00,0x00,0x17,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x4e,0x00,0x00,0x00,0x63,0x00,0x00,0x00, -0x62,0x00,0x00,0x00,0x71,0x00,0x04,0x00,0x09,0x00,0x00,0x00, -0x64,0x00,0x00,0x00,0x63,0x00,0x00,0x00,0xc4,0x00,0x05,0x00, -0x09,0x00,0x00,0x00,0x65,0x00,0x00,0x00,0x64,0x00,0x00,0x00, -0x48,0x00,0x00,0x00,0x41,0x00,0x08,0x00,0x61,0x00,0x00,0x00, -0x67,0x00,0x00,0x00,0x58,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0x40,0x00,0x00,0x00,0x17,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x4e,0x00,0x00,0x00,0x68,0x00,0x00,0x00, -0x67,0x00,0x00,0x00,0x71,0x00,0x04,0x00,0x09,0x00,0x00,0x00, -0x69,0x00,0x00,0x00,0x68,0x00,0x00,0x00,0xc5,0x00,0x05,0x00, -0x09,0x00,0x00,0x00,0x6a,0x00,0x00,0x00,0x65,0x00,0x00,0x00, -0x69,0x00,0x00,0x00,0xc2,0x00,0x05,0x00,0x09,0x00,0x00,0x00, -0x70,0x00,0x00,0x00,0x6a,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0xc4,0x00,0x05,0x00,0x09,0x00,0x00,0x00,0x72,0x00,0x00,0x00, -0x70,0x00,0x00,0x00,0x71,0x00,0x00,0x00,0xc7,0x00,0x05,0x00, -0x09,0x00,0x00,0x00,0x73,0x00,0x00,0x00,0x72,0x00,0x00,0x00, -0x52,0x00,0x00,0x00,0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x74,0x00,0x00,0x00,0x73,0x00,0x00,0x00,0xc2,0x00,0x05,0x00, -0x09,0x00,0x00,0x00,0x79,0x00,0x00,0x00,0x6a,0x00,0x00,0x00, -0x77,0x00,0x00,0x00,0xc7,0x00,0x05,0x00,0x09,0x00,0x00,0x00, -0x7a,0x00,0x00,0x00,0x79,0x00,0x00,0x00,0x52,0x00,0x00,0x00, -0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x7b,0x00,0x00,0x00, -0x7a,0x00,0x00,0x00,0x41,0x00,0x08,0x00,0x80,0x00,0x00,0x00, -0x81,0x00,0x00,0x00,0x58,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0x40,0x00,0x00,0x00,0x37,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x51,0x00,0x00,0x00,0x82,0x00,0x00,0x00, -0x81,0x00,0x00,0x00,0x71,0x00,0x04,0x00,0x09,0x00,0x00,0x00, -0x83,0x00,0x00,0x00,0x82,0x00,0x00,0x00,0xc7,0x00,0x05,0x00, -0x09,0x00,0x00,0x00,0x89,0x00,0x00,0x00,0x83,0x00,0x00,0x00, -0x88,0x00,0x00,0x00,0x7c,0x00,0x04,0x00,0x09,0x00,0x00,0x00, -0x8c,0x00,0x00,0x00,0x74,0x00,0x00,0x00,0xc5,0x00,0x05,0x00, -0x09,0x00,0x00,0x00,0x8d,0x00,0x00,0x00,0x89,0x00,0x00,0x00, -0x8c,0x00,0x00,0x00,0x70,0x00,0x04,0x00,0x4a,0x00,0x00,0x00, -0x8e,0x00,0x00,0x00,0x8d,0x00,0x00,0x00,0xc2,0x00,0x05,0x00, -0x09,0x00,0x00,0x00,0x90,0x00,0x00,0x00,0x83,0x00,0x00,0x00, -0x71,0x00,0x00,0x00,0x7c,0x00,0x04,0x00,0x09,0x00,0x00,0x00, -0x94,0x00,0x00,0x00,0x7b,0x00,0x00,0x00,0xc5,0x00,0x05,0x00, -0x09,0x00,0x00,0x00,0x95,0x00,0x00,0x00,0x90,0x00,0x00,0x00, -0x94,0x00,0x00,0x00,0x70,0x00,0x04,0x00,0x4a,0x00,0x00,0x00, -0x96,0x00,0x00,0x00,0x95,0x00,0x00,0x00,0x50,0x00,0x05,0x00, -0x84,0x00,0x00,0x00,0x97,0x00,0x00,0x00,0x8e,0x00,0x00,0x00, -0x96,0x00,0x00,0x00,0x83,0x00,0x05,0x00,0x84,0x00,0x00,0x00, -0x9b,0x00,0x00,0x00,0x97,0x00,0x00,0x00,0xcf,0x00,0x00,0x00, -0x8e,0x00,0x05,0x00,0x84,0x00,0x00,0x00,0x9d,0x00,0x00,0x00, -0x9b,0x00,0x00,0x00,0x5d,0x00,0x00,0x00,0x41,0x00,0x05,0x00, -0x18,0x00,0x00,0x00,0xa4,0x00,0x00,0x00,0x16,0x00,0x00,0x00, -0xa3,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0xa5,0x00,0x00,0x00,0xa4,0x00,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xa6,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0xa5,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xa9,0x00,0x00,0x00,0xa6,0x00,0x00,0x00,0x26,0x00,0x00,0x00, -0x51,0x00,0x05,0x00,0x4a,0x00,0x00,0x00,0xae,0x00,0x00,0x00, -0x9d,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x73,0x00,0x04,0x00, -0x4d,0x00,0x00,0x00,0xaf,0x00,0x00,0x00,0xae,0x00,0x00,0x00, -0x41,0x00,0x06,0x00,0x5a,0x00,0x00,0x00,0xb0,0x00,0x00,0x00, -0xa1,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0xa9,0x00,0x00,0x00, -0x3e,0x00,0x03,0x00,0xb0,0x00,0x00,0x00,0xaf,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xba,0x00,0x00,0x00, -0xa9,0x00,0x00,0x00,0x48,0x00,0x00,0x00,0x51,0x00,0x05,0x00, -0x4a,0x00,0x00,0x00,0xbc,0x00,0x00,0x00,0x9d,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x73,0x00,0x04,0x00,0x4d,0x00,0x00,0x00, -0xbd,0x00,0x00,0x00,0xbc,0x00,0x00,0x00,0x41,0x00,0x06,0x00, -0x5a,0x00,0x00,0x00,0xbe,0x00,0x00,0x00,0xa1,0x00,0x00,0x00, -0x2e,0x00,0x00,0x00,0xba,0x00,0x00,0x00,0x3e,0x00,0x03,0x00, -0xbe,0x00,0x00,0x00,0xbd,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x4d,0x00,0x00,0x00,0xd6,0x00,0x00,0x00,0x5b,0x00,0x00,0x00, -0x73,0x00,0x04,0x00,0x4a,0x00,0x00,0x00,0xd7,0x00,0x00,0x00, -0xd6,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x4e,0x00,0x00,0x00, -0xd9,0x00,0x00,0x00,0x62,0x00,0x00,0x00,0x71,0x00,0x04,0x00, -0x09,0x00,0x00,0x00,0xda,0x00,0x00,0x00,0xd9,0x00,0x00,0x00, -0xc4,0x00,0x05,0x00,0x09,0x00,0x00,0x00,0xdb,0x00,0x00,0x00, -0xda,0x00,0x00,0x00,0x48,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x4e,0x00,0x00,0x00,0xdd,0x00,0x00,0x00,0x67,0x00,0x00,0x00, -0x71,0x00,0x04,0x00,0x09,0x00,0x00,0x00,0xde,0x00,0x00,0x00, -0xdd,0x00,0x00,0x00,0xc5,0x00,0x05,0x00,0x09,0x00,0x00,0x00, -0xdf,0x00,0x00,0x00,0xdb,0x00,0x00,0x00,0xde,0x00,0x00,0x00, -0xc2,0x00,0x05,0x00,0x09,0x00,0x00,0x00,0xe0,0x00,0x00,0x00, -0xdf,0x00,0x00,0x00,0x17,0x00,0x00,0x00,0xc4,0x00,0x05,0x00, -0x09,0x00,0x00,0x00,0xe1,0x00,0x00,0x00,0xe0,0x00,0x00,0x00, -0x71,0x00,0x00,0x00,0xc7,0x00,0x05,0x00,0x09,0x00,0x00,0x00, -0xe2,0x00,0x00,0x00,0xe1,0x00,0x00,0x00,0x52,0x00,0x00,0x00, -0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0xe3,0x00,0x00,0x00, -0xe2,0x00,0x00,0x00,0xc2,0x00,0x05,0x00,0x09,0x00,0x00,0x00, -0xe5,0x00,0x00,0x00,0xdf,0x00,0x00,0x00,0x82,0x04,0x00,0x00, -0xc7,0x00,0x05,0x00,0x09,0x00,0x00,0x00,0xe6,0x00,0x00,0x00, -0xe5,0x00,0x00,0x00,0x52,0x00,0x00,0x00,0x7c,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0xe7,0x00,0x00,0x00,0xe6,0x00,0x00,0x00, -0x41,0x00,0x08,0x00,0x80,0x00,0x00,0x00,0xe9,0x00,0x00,0x00, -0x58,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0x40,0x00,0x00,0x00, -0x37,0x00,0x00,0x00,0x17,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x51,0x00,0x00,0x00,0xea,0x00,0x00,0x00,0xe9,0x00,0x00,0x00, -0x71,0x00,0x04,0x00,0x09,0x00,0x00,0x00,0xeb,0x00,0x00,0x00, -0xea,0x00,0x00,0x00,0xc7,0x00,0x05,0x00,0x09,0x00,0x00,0x00, -0xec,0x00,0x00,0x00,0xeb,0x00,0x00,0x00,0x88,0x00,0x00,0x00, -0x7c,0x00,0x04,0x00,0x09,0x00,0x00,0x00,0xee,0x00,0x00,0x00, -0xe3,0x00,0x00,0x00,0xc5,0x00,0x05,0x00,0x09,0x00,0x00,0x00, -0xef,0x00,0x00,0x00,0xec,0x00,0x00,0x00,0xee,0x00,0x00,0x00, -0x70,0x00,0x04,0x00,0x4a,0x00,0x00,0x00,0xf0,0x00,0x00,0x00, -0xef,0x00,0x00,0x00,0xc2,0x00,0x05,0x00,0x09,0x00,0x00,0x00, -0xf1,0x00,0x00,0x00,0xeb,0x00,0x00,0x00,0x71,0x00,0x00,0x00, -0x7c,0x00,0x04,0x00,0x09,0x00,0x00,0x00,0xf3,0x00,0x00,0x00, -0xe7,0x00,0x00,0x00,0xc5,0x00,0x05,0x00,0x09,0x00,0x00,0x00, -0xf4,0x00,0x00,0x00,0xf1,0x00,0x00,0x00,0xf3,0x00,0x00,0x00, -0x70,0x00,0x04,0x00,0x4a,0x00,0x00,0x00,0xf5,0x00,0x00,0x00, -0xf4,0x00,0x00,0x00,0x50,0x00,0x05,0x00,0x84,0x00,0x00,0x00, -0xf6,0x00,0x00,0x00,0xf0,0x00,0x00,0x00,0xf5,0x00,0x00,0x00, -0x83,0x00,0x05,0x00,0x84,0x00,0x00,0x00,0xf7,0x00,0x00,0x00, -0xf6,0x00,0x00,0x00,0xcf,0x00,0x00,0x00,0x8e,0x00,0x05,0x00, -0x84,0x00,0x00,0x00,0xf8,0x00,0x00,0x00,0xf7,0x00,0x00,0x00, -0xd7,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xfe,0x00,0x00,0x00,0xa9,0x00,0x00,0x00,0x17,0x00,0x00,0x00, -0x51,0x00,0x05,0x00,0x4a,0x00,0x00,0x00,0x00,0x01,0x00,0x00, -0xf8,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x73,0x00,0x04,0x00, -0x4d,0x00,0x00,0x00,0x01,0x01,0x00,0x00,0x00,0x01,0x00,0x00, -0x41,0x00,0x06,0x00,0x5a,0x00,0x00,0x00,0x02,0x01,0x00,0x00, -0xa1,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0xfe,0x00,0x00,0x00, -0x3e,0x00,0x03,0x00,0x02,0x01,0x00,0x00,0x01,0x01,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x09,0x01,0x00,0x00, -0xa9,0x00,0x00,0x00,0x83,0x04,0x00,0x00,0x51,0x00,0x05,0x00, -0x4a,0x00,0x00,0x00,0x0a,0x01,0x00,0x00,0xf8,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x73,0x00,0x04,0x00,0x4d,0x00,0x00,0x00, -0x0b,0x01,0x00,0x00,0x0a,0x01,0x00,0x00,0x41,0x00,0x06,0x00, -0x5a,0x00,0x00,0x00,0x0c,0x01,0x00,0x00,0xa1,0x00,0x00,0x00, -0x2e,0x00,0x00,0x00,0x09,0x01,0x00,0x00,0x3e,0x00,0x03,0x00, -0x0c,0x01,0x00,0x00,0x0b,0x01,0x00,0x00,0x3d,0x00,0x04,0x00, -0x4d,0x00,0x00,0x00,0x15,0x01,0x00,0x00,0x5b,0x00,0x00,0x00, -0x73,0x00,0x04,0x00,0x4a,0x00,0x00,0x00,0x16,0x01,0x00,0x00, -0x15,0x01,0x00,0x00,0x3d,0x00,0x04,0x00,0x4e,0x00,0x00,0x00, -0x18,0x01,0x00,0x00,0x62,0x00,0x00,0x00,0x71,0x00,0x04,0x00, -0x09,0x00,0x00,0x00,0x19,0x01,0x00,0x00,0x18,0x01,0x00,0x00, -0xc4,0x00,0x05,0x00,0x09,0x00,0x00,0x00,0x1a,0x01,0x00,0x00, -0x19,0x01,0x00,0x00,0x48,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x4e,0x00,0x00,0x00,0x1c,0x01,0x00,0x00,0x67,0x00,0x00,0x00, -0x71,0x00,0x04,0x00,0x09,0x00,0x00,0x00,0x1d,0x01,0x00,0x00, -0x1c,0x01,0x00,0x00,0xc5,0x00,0x05,0x00,0x09,0x00,0x00,0x00, -0x1e,0x01,0x00,0x00,0x1a,0x01,0x00,0x00,0x1d,0x01,0x00,0x00, -0xc2,0x00,0x05,0x00,0x09,0x00,0x00,0x00,0x1f,0x01,0x00,0x00, -0x1e,0x01,0x00,0x00,0x37,0x00,0x00,0x00,0xc4,0x00,0x05,0x00, -0x09,0x00,0x00,0x00,0x20,0x01,0x00,0x00,0x1f,0x01,0x00,0x00, -0x71,0x00,0x00,0x00,0xc7,0x00,0x05,0x00,0x09,0x00,0x00,0x00, -0x21,0x01,0x00,0x00,0x20,0x01,0x00,0x00,0x52,0x00,0x00,0x00, -0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x22,0x01,0x00,0x00, -0x21,0x01,0x00,0x00,0xc2,0x00,0x05,0x00,0x09,0x00,0x00,0x00, -0x24,0x01,0x00,0x00,0x1e,0x01,0x00,0x00,0x84,0x04,0x00,0x00, -0xc7,0x00,0x05,0x00,0x09,0x00,0x00,0x00,0x25,0x01,0x00,0x00, -0x24,0x01,0x00,0x00,0x52,0x00,0x00,0x00,0x7c,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x26,0x01,0x00,0x00,0x25,0x01,0x00,0x00, -0x41,0x00,0x08,0x00,0x80,0x00,0x00,0x00,0x28,0x01,0x00,0x00, -0x58,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0x40,0x00,0x00,0x00, -0x37,0x00,0x00,0x00,0x37,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x51,0x00,0x00,0x00,0x29,0x01,0x00,0x00,0x28,0x01,0x00,0x00, -0x71,0x00,0x04,0x00,0x09,0x00,0x00,0x00,0x2a,0x01,0x00,0x00, -0x29,0x01,0x00,0x00,0xc7,0x00,0x05,0x00,0x09,0x00,0x00,0x00, -0x2b,0x01,0x00,0x00,0x2a,0x01,0x00,0x00,0x88,0x00,0x00,0x00, -0x7c,0x00,0x04,0x00,0x09,0x00,0x00,0x00,0x2d,0x01,0x00,0x00, -0x22,0x01,0x00,0x00,0xc5,0x00,0x05,0x00,0x09,0x00,0x00,0x00, -0x2e,0x01,0x00,0x00,0x2b,0x01,0x00,0x00,0x2d,0x01,0x00,0x00, -0x70,0x00,0x04,0x00,0x4a,0x00,0x00,0x00,0x2f,0x01,0x00,0x00, -0x2e,0x01,0x00,0x00,0xc2,0x00,0x05,0x00,0x09,0x00,0x00,0x00, -0x30,0x01,0x00,0x00,0x2a,0x01,0x00,0x00,0x71,0x00,0x00,0x00, -0x7c,0x00,0x04,0x00,0x09,0x00,0x00,0x00,0x32,0x01,0x00,0x00, -0x26,0x01,0x00,0x00,0xc5,0x00,0x05,0x00,0x09,0x00,0x00,0x00, -0x33,0x01,0x00,0x00,0x30,0x01,0x00,0x00,0x32,0x01,0x00,0x00, -0x70,0x00,0x04,0x00,0x4a,0x00,0x00,0x00,0x34,0x01,0x00,0x00, -0x33,0x01,0x00,0x00,0x50,0x00,0x05,0x00,0x84,0x00,0x00,0x00, -0x35,0x01,0x00,0x00,0x2f,0x01,0x00,0x00,0x34,0x01,0x00,0x00, -0x83,0x00,0x05,0x00,0x84,0x00,0x00,0x00,0x36,0x01,0x00,0x00, -0x35,0x01,0x00,0x00,0xcf,0x00,0x00,0x00,0x8e,0x00,0x05,0x00, -0x84,0x00,0x00,0x00,0x37,0x01,0x00,0x00,0x36,0x01,0x00,0x00, -0x16,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x3d,0x01,0x00,0x00,0xa9,0x00,0x00,0x00,0x37,0x00,0x00,0x00, -0x51,0x00,0x05,0x00,0x4a,0x00,0x00,0x00,0x3f,0x01,0x00,0x00, -0x37,0x01,0x00,0x00,0x00,0x00,0x00,0x00,0x73,0x00,0x04,0x00, -0x4d,0x00,0x00,0x00,0x40,0x01,0x00,0x00,0x3f,0x01,0x00,0x00, -0x41,0x00,0x06,0x00,0x5a,0x00,0x00,0x00,0x41,0x01,0x00,0x00, -0xa1,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0x3d,0x01,0x00,0x00, -0x3e,0x00,0x03,0x00,0x41,0x01,0x00,0x00,0x40,0x01,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x48,0x01,0x00,0x00, -0xa9,0x00,0x00,0x00,0x85,0x04,0x00,0x00,0x51,0x00,0x05,0x00, -0x4a,0x00,0x00,0x00,0x49,0x01,0x00,0x00,0x37,0x01,0x00,0x00, -0x01,0x00,0x00,0x00,0x73,0x00,0x04,0x00,0x4d,0x00,0x00,0x00, -0x4a,0x01,0x00,0x00,0x49,0x01,0x00,0x00,0x41,0x00,0x06,0x00, -0x5a,0x00,0x00,0x00,0x4b,0x01,0x00,0x00,0xa1,0x00,0x00,0x00, -0x2e,0x00,0x00,0x00,0x48,0x01,0x00,0x00,0x3e,0x00,0x03,0x00, -0x4b,0x01,0x00,0x00,0x4a,0x01,0x00,0x00,0x3d,0x00,0x04,0x00, -0x4d,0x00,0x00,0x00,0x54,0x01,0x00,0x00,0x5b,0x00,0x00,0x00, -0x73,0x00,0x04,0x00,0x4a,0x00,0x00,0x00,0x55,0x01,0x00,0x00, -0x54,0x01,0x00,0x00,0x3d,0x00,0x04,0x00,0x4e,0x00,0x00,0x00, -0x57,0x01,0x00,0x00,0x62,0x00,0x00,0x00,0x71,0x00,0x04,0x00, -0x09,0x00,0x00,0x00,0x58,0x01,0x00,0x00,0x57,0x01,0x00,0x00, -0xc4,0x00,0x05,0x00,0x09,0x00,0x00,0x00,0x59,0x01,0x00,0x00, -0x58,0x01,0x00,0x00,0x48,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x4e,0x00,0x00,0x00,0x5b,0x01,0x00,0x00,0x67,0x00,0x00,0x00, -0x71,0x00,0x04,0x00,0x09,0x00,0x00,0x00,0x5c,0x01,0x00,0x00, -0x5b,0x01,0x00,0x00,0xc5,0x00,0x05,0x00,0x09,0x00,0x00,0x00, -0x5d,0x01,0x00,0x00,0x59,0x01,0x00,0x00,0x5c,0x01,0x00,0x00, -0xc2,0x00,0x05,0x00,0x09,0x00,0x00,0x00,0x5e,0x01,0x00,0x00, -0x5d,0x01,0x00,0x00,0xa3,0x00,0x00,0x00,0xc4,0x00,0x05,0x00, -0x09,0x00,0x00,0x00,0x5f,0x01,0x00,0x00,0x5e,0x01,0x00,0x00, -0x71,0x00,0x00,0x00,0xc7,0x00,0x05,0x00,0x09,0x00,0x00,0x00, -0x60,0x01,0x00,0x00,0x5f,0x01,0x00,0x00,0x52,0x00,0x00,0x00, -0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x61,0x01,0x00,0x00, -0x60,0x01,0x00,0x00,0xc2,0x00,0x05,0x00,0x09,0x00,0x00,0x00, -0x63,0x01,0x00,0x00,0x5d,0x01,0x00,0x00,0x86,0x04,0x00,0x00, -0xc7,0x00,0x05,0x00,0x09,0x00,0x00,0x00,0x64,0x01,0x00,0x00, -0x63,0x01,0x00,0x00,0x52,0x00,0x00,0x00,0x7c,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x65,0x01,0x00,0x00,0x64,0x01,0x00,0x00, -0x41,0x00,0x08,0x00,0x80,0x00,0x00,0x00,0x67,0x01,0x00,0x00, -0x58,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0x40,0x00,0x00,0x00, -0x37,0x00,0x00,0x00,0xa3,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x51,0x00,0x00,0x00,0x68,0x01,0x00,0x00,0x67,0x01,0x00,0x00, -0x71,0x00,0x04,0x00,0x09,0x00,0x00,0x00,0x69,0x01,0x00,0x00, -0x68,0x01,0x00,0x00,0xc7,0x00,0x05,0x00,0x09,0x00,0x00,0x00, -0x6a,0x01,0x00,0x00,0x69,0x01,0x00,0x00,0x88,0x00,0x00,0x00, -0x7c,0x00,0x04,0x00,0x09,0x00,0x00,0x00,0x6c,0x01,0x00,0x00, -0x61,0x01,0x00,0x00,0xc5,0x00,0x05,0x00,0x09,0x00,0x00,0x00, -0x6d,0x01,0x00,0x00,0x6a,0x01,0x00,0x00,0x6c,0x01,0x00,0x00, -0x70,0x00,0x04,0x00,0x4a,0x00,0x00,0x00,0x6e,0x01,0x00,0x00, -0x6d,0x01,0x00,0x00,0xc2,0x00,0x05,0x00,0x09,0x00,0x00,0x00, -0x6f,0x01,0x00,0x00,0x69,0x01,0x00,0x00,0x71,0x00,0x00,0x00, -0x7c,0x00,0x04,0x00,0x09,0x00,0x00,0x00,0x71,0x01,0x00,0x00, -0x65,0x01,0x00,0x00,0xc5,0x00,0x05,0x00,0x09,0x00,0x00,0x00, -0x72,0x01,0x00,0x00,0x6f,0x01,0x00,0x00,0x71,0x01,0x00,0x00, -0x70,0x00,0x04,0x00,0x4a,0x00,0x00,0x00,0x73,0x01,0x00,0x00, -0x72,0x01,0x00,0x00,0x50,0x00,0x05,0x00,0x84,0x00,0x00,0x00, -0x74,0x01,0x00,0x00,0x6e,0x01,0x00,0x00,0x73,0x01,0x00,0x00, -0x83,0x00,0x05,0x00,0x84,0x00,0x00,0x00,0x75,0x01,0x00,0x00, -0x74,0x01,0x00,0x00,0xcf,0x00,0x00,0x00,0x8e,0x00,0x05,0x00, -0x84,0x00,0x00,0x00,0x76,0x01,0x00,0x00,0x75,0x01,0x00,0x00, -0x55,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x7c,0x01,0x00,0x00,0xa9,0x00,0x00,0x00,0xa3,0x00,0x00,0x00, -0x51,0x00,0x05,0x00,0x4a,0x00,0x00,0x00,0x7e,0x01,0x00,0x00, -0x76,0x01,0x00,0x00,0x00,0x00,0x00,0x00,0x73,0x00,0x04,0x00, -0x4d,0x00,0x00,0x00,0x7f,0x01,0x00,0x00,0x7e,0x01,0x00,0x00, -0x41,0x00,0x06,0x00,0x5a,0x00,0x00,0x00,0x80,0x01,0x00,0x00, -0xa1,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0x7c,0x01,0x00,0x00, -0x3e,0x00,0x03,0x00,0x80,0x01,0x00,0x00,0x7f,0x01,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x87,0x01,0x00,0x00, -0xa9,0x00,0x00,0x00,0x87,0x04,0x00,0x00,0x51,0x00,0x05,0x00, -0x4a,0x00,0x00,0x00,0x88,0x01,0x00,0x00,0x76,0x01,0x00,0x00, -0x01,0x00,0x00,0x00,0x73,0x00,0x04,0x00,0x4d,0x00,0x00,0x00, -0x89,0x01,0x00,0x00,0x88,0x01,0x00,0x00,0x41,0x00,0x06,0x00, -0x5a,0x00,0x00,0x00,0x8a,0x01,0x00,0x00,0xa1,0x00,0x00,0x00, -0x2e,0x00,0x00,0x00,0x87,0x01,0x00,0x00,0x3e,0x00,0x03,0x00, -0x8a,0x01,0x00,0x00,0x89,0x01,0x00,0x00,0x3d,0x00,0x04,0x00, -0x4d,0x00,0x00,0x00,0x93,0x01,0x00,0x00,0x5b,0x00,0x00,0x00, -0x73,0x00,0x04,0x00,0x4a,0x00,0x00,0x00,0x94,0x01,0x00,0x00, -0x93,0x01,0x00,0x00,0x3d,0x00,0x04,0x00,0x4e,0x00,0x00,0x00, -0x96,0x01,0x00,0x00,0x62,0x00,0x00,0x00,0x71,0x00,0x04,0x00, -0x09,0x00,0x00,0x00,0x97,0x01,0x00,0x00,0x96,0x01,0x00,0x00, -0xc4,0x00,0x05,0x00,0x09,0x00,0x00,0x00,0x98,0x01,0x00,0x00, -0x97,0x01,0x00,0x00,0x48,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x4e,0x00,0x00,0x00,0x9a,0x01,0x00,0x00,0x67,0x00,0x00,0x00, -0x71,0x00,0x04,0x00,0x09,0x00,0x00,0x00,0x9b,0x01,0x00,0x00, -0x9a,0x01,0x00,0x00,0xc5,0x00,0x05,0x00,0x09,0x00,0x00,0x00, -0x9c,0x01,0x00,0x00,0x98,0x01,0x00,0x00,0x9b,0x01,0x00,0x00, -0xc2,0x00,0x05,0x00,0x09,0x00,0x00,0x00,0x9d,0x01,0x00,0x00, -0x9c,0x01,0x00,0x00,0x71,0x00,0x00,0x00,0xc4,0x00,0x05,0x00, -0x09,0x00,0x00,0x00,0x9e,0x01,0x00,0x00,0x9d,0x01,0x00,0x00, -0x71,0x00,0x00,0x00,0xc7,0x00,0x05,0x00,0x09,0x00,0x00,0x00, -0x9f,0x01,0x00,0x00,0x9e,0x01,0x00,0x00,0x52,0x00,0x00,0x00, -0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0xa0,0x01,0x00,0x00, -0x9f,0x01,0x00,0x00,0xc2,0x00,0x05,0x00,0x09,0x00,0x00,0x00, -0xa2,0x01,0x00,0x00,0x9c,0x01,0x00,0x00,0x48,0x00,0x00,0x00, -0xc7,0x00,0x05,0x00,0x09,0x00,0x00,0x00,0xa3,0x01,0x00,0x00, -0xa2,0x01,0x00,0x00,0x52,0x00,0x00,0x00,0x7c,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0xa4,0x01,0x00,0x00,0xa3,0x01,0x00,0x00, -0x41,0x00,0x08,0x00,0x80,0x00,0x00,0x00,0xa6,0x01,0x00,0x00, -0x58,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0x40,0x00,0x00,0x00, -0x37,0x00,0x00,0x00,0x71,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x51,0x00,0x00,0x00,0xa7,0x01,0x00,0x00,0xa6,0x01,0x00,0x00, -0x71,0x00,0x04,0x00,0x09,0x00,0x00,0x00,0xa8,0x01,0x00,0x00, -0xa7,0x01,0x00,0x00,0xc7,0x00,0x05,0x00,0x09,0x00,0x00,0x00, -0xa9,0x01,0x00,0x00,0xa8,0x01,0x00,0x00,0x88,0x00,0x00,0x00, -0x7c,0x00,0x04,0x00,0x09,0x00,0x00,0x00,0xab,0x01,0x00,0x00, -0xa0,0x01,0x00,0x00,0xc5,0x00,0x05,0x00,0x09,0x00,0x00,0x00, -0xac,0x01,0x00,0x00,0xa9,0x01,0x00,0x00,0xab,0x01,0x00,0x00, -0x70,0x00,0x04,0x00,0x4a,0x00,0x00,0x00,0xad,0x01,0x00,0x00, -0xac,0x01,0x00,0x00,0xc2,0x00,0x05,0x00,0x09,0x00,0x00,0x00, -0xae,0x01,0x00,0x00,0xa8,0x01,0x00,0x00,0x71,0x00,0x00,0x00, -0x7c,0x00,0x04,0x00,0x09,0x00,0x00,0x00,0xb0,0x01,0x00,0x00, -0xa4,0x01,0x00,0x00,0xc5,0x00,0x05,0x00,0x09,0x00,0x00,0x00, -0xb1,0x01,0x00,0x00,0xae,0x01,0x00,0x00,0xb0,0x01,0x00,0x00, -0x70,0x00,0x04,0x00,0x4a,0x00,0x00,0x00,0xb2,0x01,0x00,0x00, -0xb1,0x01,0x00,0x00,0x50,0x00,0x05,0x00,0x84,0x00,0x00,0x00, -0xb3,0x01,0x00,0x00,0xad,0x01,0x00,0x00,0xb2,0x01,0x00,0x00, -0x83,0x00,0x05,0x00,0x84,0x00,0x00,0x00,0xb4,0x01,0x00,0x00, -0xb3,0x01,0x00,0x00,0xcf,0x00,0x00,0x00,0x8e,0x00,0x05,0x00, -0x84,0x00,0x00,0x00,0xb5,0x01,0x00,0x00,0xb4,0x01,0x00,0x00, -0x94,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xbb,0x01,0x00,0x00,0xa9,0x00,0x00,0x00,0x71,0x00,0x00,0x00, -0x51,0x00,0x05,0x00,0x4a,0x00,0x00,0x00,0xbd,0x01,0x00,0x00, -0xb5,0x01,0x00,0x00,0x00,0x00,0x00,0x00,0x73,0x00,0x04,0x00, -0x4d,0x00,0x00,0x00,0xbe,0x01,0x00,0x00,0xbd,0x01,0x00,0x00, -0x41,0x00,0x06,0x00,0x5a,0x00,0x00,0x00,0xbf,0x01,0x00,0x00, -0xa1,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0xbb,0x01,0x00,0x00, -0x3e,0x00,0x03,0x00,0xbf,0x01,0x00,0x00,0xbe,0x01,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xc6,0x01,0x00,0x00, -0xa9,0x00,0x00,0x00,0x88,0x04,0x00,0x00,0x51,0x00,0x05,0x00, -0x4a,0x00,0x00,0x00,0xc7,0x01,0x00,0x00,0xb5,0x01,0x00,0x00, -0x01,0x00,0x00,0x00,0x73,0x00,0x04,0x00,0x4d,0x00,0x00,0x00, -0xc8,0x01,0x00,0x00,0xc7,0x01,0x00,0x00,0x41,0x00,0x06,0x00, -0x5a,0x00,0x00,0x00,0xc9,0x01,0x00,0x00,0xa1,0x00,0x00,0x00, -0x2e,0x00,0x00,0x00,0xc6,0x01,0x00,0x00,0x3e,0x00,0x03,0x00, -0xc9,0x01,0x00,0x00,0xc8,0x01,0x00,0x00,0x3d,0x00,0x04,0x00, -0x4d,0x00,0x00,0x00,0xd2,0x01,0x00,0x00,0x5b,0x00,0x00,0x00, -0x73,0x00,0x04,0x00,0x4a,0x00,0x00,0x00,0xd3,0x01,0x00,0x00, -0xd2,0x01,0x00,0x00,0x3d,0x00,0x04,0x00,0x4e,0x00,0x00,0x00, -0xd5,0x01,0x00,0x00,0x62,0x00,0x00,0x00,0x71,0x00,0x04,0x00, -0x09,0x00,0x00,0x00,0xd6,0x01,0x00,0x00,0xd5,0x01,0x00,0x00, -0xc4,0x00,0x05,0x00,0x09,0x00,0x00,0x00,0xd7,0x01,0x00,0x00, -0xd6,0x01,0x00,0x00,0x48,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x4e,0x00,0x00,0x00,0xd9,0x01,0x00,0x00,0x67,0x00,0x00,0x00, -0x71,0x00,0x04,0x00,0x09,0x00,0x00,0x00,0xda,0x01,0x00,0x00, -0xd9,0x01,0x00,0x00,0xc5,0x00,0x05,0x00,0x09,0x00,0x00,0x00, -0xdb,0x01,0x00,0x00,0xd7,0x01,0x00,0x00,0xda,0x01,0x00,0x00, -0xc2,0x00,0x05,0x00,0x09,0x00,0x00,0x00,0xdc,0x01,0x00,0x00, -0xdb,0x01,0x00,0x00,0x89,0x04,0x00,0x00,0xc4,0x00,0x05,0x00, -0x09,0x00,0x00,0x00,0xdd,0x01,0x00,0x00,0xdc,0x01,0x00,0x00, -0x71,0x00,0x00,0x00,0xc7,0x00,0x05,0x00,0x09,0x00,0x00,0x00, -0xde,0x01,0x00,0x00,0xdd,0x01,0x00,0x00,0x52,0x00,0x00,0x00, -0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0xdf,0x01,0x00,0x00, -0xde,0x01,0x00,0x00,0xc2,0x00,0x05,0x00,0x09,0x00,0x00,0x00, -0xe1,0x01,0x00,0x00,0xdb,0x01,0x00,0x00,0x83,0x04,0x00,0x00, -0xc7,0x00,0x05,0x00,0x09,0x00,0x00,0x00,0xe2,0x01,0x00,0x00, -0xe1,0x01,0x00,0x00,0x52,0x00,0x00,0x00,0x7c,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0xe3,0x01,0x00,0x00,0xe2,0x01,0x00,0x00, -0x41,0x00,0x08,0x00,0x80,0x00,0x00,0x00,0xe5,0x01,0x00,0x00, -0x58,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0x40,0x00,0x00,0x00, -0x37,0x00,0x00,0x00,0x89,0x04,0x00,0x00,0x3d,0x00,0x04,0x00, -0x51,0x00,0x00,0x00,0xe6,0x01,0x00,0x00,0xe5,0x01,0x00,0x00, -0x71,0x00,0x04,0x00,0x09,0x00,0x00,0x00,0xe7,0x01,0x00,0x00, -0xe6,0x01,0x00,0x00,0xc7,0x00,0x05,0x00,0x09,0x00,0x00,0x00, -0xe8,0x01,0x00,0x00,0xe7,0x01,0x00,0x00,0x88,0x00,0x00,0x00, -0x7c,0x00,0x04,0x00,0x09,0x00,0x00,0x00,0xea,0x01,0x00,0x00, -0xdf,0x01,0x00,0x00,0xc5,0x00,0x05,0x00,0x09,0x00,0x00,0x00, -0xeb,0x01,0x00,0x00,0xe8,0x01,0x00,0x00,0xea,0x01,0x00,0x00, -0x70,0x00,0x04,0x00,0x4a,0x00,0x00,0x00,0xec,0x01,0x00,0x00, -0xeb,0x01,0x00,0x00,0xc2,0x00,0x05,0x00,0x09,0x00,0x00,0x00, -0xed,0x01,0x00,0x00,0xe7,0x01,0x00,0x00,0x71,0x00,0x00,0x00, -0x7c,0x00,0x04,0x00,0x09,0x00,0x00,0x00,0xef,0x01,0x00,0x00, -0xe3,0x01,0x00,0x00,0xc5,0x00,0x05,0x00,0x09,0x00,0x00,0x00, -0xf0,0x01,0x00,0x00,0xed,0x01,0x00,0x00,0xef,0x01,0x00,0x00, -0x70,0x00,0x04,0x00,0x4a,0x00,0x00,0x00,0xf1,0x01,0x00,0x00, -0xf0,0x01,0x00,0x00,0x50,0x00,0x05,0x00,0x84,0x00,0x00,0x00, -0xf2,0x01,0x00,0x00,0xec,0x01,0x00,0x00,0xf1,0x01,0x00,0x00, -0x83,0x00,0x05,0x00,0x84,0x00,0x00,0x00,0xf3,0x01,0x00,0x00, -0xf2,0x01,0x00,0x00,0xcf,0x00,0x00,0x00,0x8e,0x00,0x05,0x00, -0x84,0x00,0x00,0x00,0xf4,0x01,0x00,0x00,0xf3,0x01,0x00,0x00, -0xd3,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xfa,0x01,0x00,0x00,0xa9,0x00,0x00,0x00,0x89,0x04,0x00,0x00, -0x51,0x00,0x05,0x00,0x4a,0x00,0x00,0x00,0xfc,0x01,0x00,0x00, -0xf4,0x01,0x00,0x00,0x00,0x00,0x00,0x00,0x73,0x00,0x04,0x00, -0x4d,0x00,0x00,0x00,0xfd,0x01,0x00,0x00,0xfc,0x01,0x00,0x00, -0x41,0x00,0x06,0x00,0x5a,0x00,0x00,0x00,0xfe,0x01,0x00,0x00, -0xa1,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0xfa,0x01,0x00,0x00, -0x3e,0x00,0x03,0x00,0xfe,0x01,0x00,0x00,0xfd,0x01,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x05,0x02,0x00,0x00, -0xa9,0x00,0x00,0x00,0x8a,0x04,0x00,0x00,0x51,0x00,0x05,0x00, -0x4a,0x00,0x00,0x00,0x06,0x02,0x00,0x00,0xf4,0x01,0x00,0x00, -0x01,0x00,0x00,0x00,0x73,0x00,0x04,0x00,0x4d,0x00,0x00,0x00, -0x07,0x02,0x00,0x00,0x06,0x02,0x00,0x00,0x41,0x00,0x06,0x00, -0x5a,0x00,0x00,0x00,0x08,0x02,0x00,0x00,0xa1,0x00,0x00,0x00, -0x2e,0x00,0x00,0x00,0x05,0x02,0x00,0x00,0x3e,0x00,0x03,0x00, -0x08,0x02,0x00,0x00,0x07,0x02,0x00,0x00,0x3d,0x00,0x04,0x00, -0x4d,0x00,0x00,0x00,0x11,0x02,0x00,0x00,0x5b,0x00,0x00,0x00, -0x73,0x00,0x04,0x00,0x4a,0x00,0x00,0x00,0x12,0x02,0x00,0x00, -0x11,0x02,0x00,0x00,0x3d,0x00,0x04,0x00,0x4e,0x00,0x00,0x00, -0x14,0x02,0x00,0x00,0x62,0x00,0x00,0x00,0x71,0x00,0x04,0x00, -0x09,0x00,0x00,0x00,0x15,0x02,0x00,0x00,0x14,0x02,0x00,0x00, -0xc4,0x00,0x05,0x00,0x09,0x00,0x00,0x00,0x16,0x02,0x00,0x00, -0x15,0x02,0x00,0x00,0x48,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x4e,0x00,0x00,0x00,0x18,0x02,0x00,0x00,0x67,0x00,0x00,0x00, -0x71,0x00,0x04,0x00,0x09,0x00,0x00,0x00,0x19,0x02,0x00,0x00, -0x18,0x02,0x00,0x00,0xc5,0x00,0x05,0x00,0x09,0x00,0x00,0x00, -0x1a,0x02,0x00,0x00,0x16,0x02,0x00,0x00,0x19,0x02,0x00,0x00, -0xc2,0x00,0x05,0x00,0x09,0x00,0x00,0x00,0x1b,0x02,0x00,0x00, -0x1a,0x02,0x00,0x00,0x8b,0x04,0x00,0x00,0xc4,0x00,0x05,0x00, -0x09,0x00,0x00,0x00,0x1c,0x02,0x00,0x00,0x1b,0x02,0x00,0x00, -0x71,0x00,0x00,0x00,0xc7,0x00,0x05,0x00,0x09,0x00,0x00,0x00, -0x1d,0x02,0x00,0x00,0x1c,0x02,0x00,0x00,0x52,0x00,0x00,0x00, -0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x1e,0x02,0x00,0x00, -0x1d,0x02,0x00,0x00,0xc2,0x00,0x05,0x00,0x09,0x00,0x00,0x00, -0x20,0x02,0x00,0x00,0x1a,0x02,0x00,0x00,0x85,0x04,0x00,0x00, -0xc7,0x00,0x05,0x00,0x09,0x00,0x00,0x00,0x21,0x02,0x00,0x00, -0x20,0x02,0x00,0x00,0x52,0x00,0x00,0x00,0x7c,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x22,0x02,0x00,0x00,0x21,0x02,0x00,0x00, -0x41,0x00,0x08,0x00,0x80,0x00,0x00,0x00,0x24,0x02,0x00,0x00, -0x58,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0x40,0x00,0x00,0x00, -0x37,0x00,0x00,0x00,0x8b,0x04,0x00,0x00,0x3d,0x00,0x04,0x00, -0x51,0x00,0x00,0x00,0x25,0x02,0x00,0x00,0x24,0x02,0x00,0x00, -0x71,0x00,0x04,0x00,0x09,0x00,0x00,0x00,0x26,0x02,0x00,0x00, -0x25,0x02,0x00,0x00,0xc7,0x00,0x05,0x00,0x09,0x00,0x00,0x00, -0x27,0x02,0x00,0x00,0x26,0x02,0x00,0x00,0x88,0x00,0x00,0x00, -0x7c,0x00,0x04,0x00,0x09,0x00,0x00,0x00,0x29,0x02,0x00,0x00, -0x1e,0x02,0x00,0x00,0xc5,0x00,0x05,0x00,0x09,0x00,0x00,0x00, -0x2a,0x02,0x00,0x00,0x27,0x02,0x00,0x00,0x29,0x02,0x00,0x00, -0x70,0x00,0x04,0x00,0x4a,0x00,0x00,0x00,0x2b,0x02,0x00,0x00, -0x2a,0x02,0x00,0x00,0xc2,0x00,0x05,0x00,0x09,0x00,0x00,0x00, -0x2c,0x02,0x00,0x00,0x26,0x02,0x00,0x00,0x71,0x00,0x00,0x00, -0x7c,0x00,0x04,0x00,0x09,0x00,0x00,0x00,0x2e,0x02,0x00,0x00, -0x22,0x02,0x00,0x00,0xc5,0x00,0x05,0x00,0x09,0x00,0x00,0x00, -0x2f,0x02,0x00,0x00,0x2c,0x02,0x00,0x00,0x2e,0x02,0x00,0x00, -0x70,0x00,0x04,0x00,0x4a,0x00,0x00,0x00,0x30,0x02,0x00,0x00, -0x2f,0x02,0x00,0x00,0x50,0x00,0x05,0x00,0x84,0x00,0x00,0x00, -0x31,0x02,0x00,0x00,0x2b,0x02,0x00,0x00,0x30,0x02,0x00,0x00, -0x83,0x00,0x05,0x00,0x84,0x00,0x00,0x00,0x32,0x02,0x00,0x00, -0x31,0x02,0x00,0x00,0xcf,0x00,0x00,0x00,0x8e,0x00,0x05,0x00, -0x84,0x00,0x00,0x00,0x33,0x02,0x00,0x00,0x32,0x02,0x00,0x00, -0x12,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x39,0x02,0x00,0x00,0xa9,0x00,0x00,0x00,0x8b,0x04,0x00,0x00, -0x51,0x00,0x05,0x00,0x4a,0x00,0x00,0x00,0x3b,0x02,0x00,0x00, -0x33,0x02,0x00,0x00,0x00,0x00,0x00,0x00,0x73,0x00,0x04,0x00, -0x4d,0x00,0x00,0x00,0x3c,0x02,0x00,0x00,0x3b,0x02,0x00,0x00, -0x41,0x00,0x06,0x00,0x5a,0x00,0x00,0x00,0x3d,0x02,0x00,0x00, -0xa1,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0x39,0x02,0x00,0x00, -0x3e,0x00,0x03,0x00,0x3d,0x02,0x00,0x00,0x3c,0x02,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x44,0x02,0x00,0x00, -0xa9,0x00,0x00,0x00,0x8c,0x04,0x00,0x00,0x51,0x00,0x05,0x00, -0x4a,0x00,0x00,0x00,0x45,0x02,0x00,0x00,0x33,0x02,0x00,0x00, -0x01,0x00,0x00,0x00,0x73,0x00,0x04,0x00,0x4d,0x00,0x00,0x00, -0x46,0x02,0x00,0x00,0x45,0x02,0x00,0x00,0x41,0x00,0x06,0x00, -0x5a,0x00,0x00,0x00,0x47,0x02,0x00,0x00,0xa1,0x00,0x00,0x00, -0x2e,0x00,0x00,0x00,0x44,0x02,0x00,0x00,0x3e,0x00,0x03,0x00, -0x47,0x02,0x00,0x00,0x46,0x02,0x00,0x00,0x3d,0x00,0x04,0x00, -0x4d,0x00,0x00,0x00,0x50,0x02,0x00,0x00,0x5b,0x00,0x00,0x00, -0x73,0x00,0x04,0x00,0x4a,0x00,0x00,0x00,0x51,0x02,0x00,0x00, -0x50,0x02,0x00,0x00,0x3d,0x00,0x04,0x00,0x4e,0x00,0x00,0x00, -0x53,0x02,0x00,0x00,0x62,0x00,0x00,0x00,0x71,0x00,0x04,0x00, -0x09,0x00,0x00,0x00,0x54,0x02,0x00,0x00,0x53,0x02,0x00,0x00, -0xc4,0x00,0x05,0x00,0x09,0x00,0x00,0x00,0x55,0x02,0x00,0x00, -0x54,0x02,0x00,0x00,0x48,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x4e,0x00,0x00,0x00,0x57,0x02,0x00,0x00,0x67,0x00,0x00,0x00, -0x71,0x00,0x04,0x00,0x09,0x00,0x00,0x00,0x58,0x02,0x00,0x00, -0x57,0x02,0x00,0x00,0xc5,0x00,0x05,0x00,0x09,0x00,0x00,0x00, -0x59,0x02,0x00,0x00,0x55,0x02,0x00,0x00,0x58,0x02,0x00,0x00, -0xc2,0x00,0x05,0x00,0x09,0x00,0x00,0x00,0x5a,0x02,0x00,0x00, -0x59,0x02,0x00,0x00,0x8d,0x04,0x00,0x00,0xc4,0x00,0x05,0x00, -0x09,0x00,0x00,0x00,0x5b,0x02,0x00,0x00,0x5a,0x02,0x00,0x00, -0x71,0x00,0x00,0x00,0xc7,0x00,0x05,0x00,0x09,0x00,0x00,0x00, -0x5c,0x02,0x00,0x00,0x5b,0x02,0x00,0x00,0x52,0x00,0x00,0x00, -0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x5d,0x02,0x00,0x00, -0x5c,0x02,0x00,0x00,0xc2,0x00,0x05,0x00,0x09,0x00,0x00,0x00, -0x5f,0x02,0x00,0x00,0x59,0x02,0x00,0x00,0x87,0x04,0x00,0x00, -0xc7,0x00,0x05,0x00,0x09,0x00,0x00,0x00,0x60,0x02,0x00,0x00, -0x5f,0x02,0x00,0x00,0x52,0x00,0x00,0x00,0x7c,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x61,0x02,0x00,0x00,0x60,0x02,0x00,0x00, -0x41,0x00,0x08,0x00,0x80,0x00,0x00,0x00,0x63,0x02,0x00,0x00, -0x58,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0x40,0x00,0x00,0x00, -0x37,0x00,0x00,0x00,0x8d,0x04,0x00,0x00,0x3d,0x00,0x04,0x00, -0x51,0x00,0x00,0x00,0x64,0x02,0x00,0x00,0x63,0x02,0x00,0x00, -0x71,0x00,0x04,0x00,0x09,0x00,0x00,0x00,0x65,0x02,0x00,0x00, -0x64,0x02,0x00,0x00,0xc7,0x00,0x05,0x00,0x09,0x00,0x00,0x00, -0x66,0x02,0x00,0x00,0x65,0x02,0x00,0x00,0x88,0x00,0x00,0x00, -0x7c,0x00,0x04,0x00,0x09,0x00,0x00,0x00,0x68,0x02,0x00,0x00, -0x5d,0x02,0x00,0x00,0xc5,0x00,0x05,0x00,0x09,0x00,0x00,0x00, -0x69,0x02,0x00,0x00,0x66,0x02,0x00,0x00,0x68,0x02,0x00,0x00, -0x70,0x00,0x04,0x00,0x4a,0x00,0x00,0x00,0x6a,0x02,0x00,0x00, -0x69,0x02,0x00,0x00,0xc2,0x00,0x05,0x00,0x09,0x00,0x00,0x00, -0x6b,0x02,0x00,0x00,0x65,0x02,0x00,0x00,0x71,0x00,0x00,0x00, -0x7c,0x00,0x04,0x00,0x09,0x00,0x00,0x00,0x6d,0x02,0x00,0x00, -0x61,0x02,0x00,0x00,0xc5,0x00,0x05,0x00,0x09,0x00,0x00,0x00, -0x6e,0x02,0x00,0x00,0x6b,0x02,0x00,0x00,0x6d,0x02,0x00,0x00, -0x70,0x00,0x04,0x00,0x4a,0x00,0x00,0x00,0x6f,0x02,0x00,0x00, -0x6e,0x02,0x00,0x00,0x50,0x00,0x05,0x00,0x84,0x00,0x00,0x00, -0x70,0x02,0x00,0x00,0x6a,0x02,0x00,0x00,0x6f,0x02,0x00,0x00, -0x83,0x00,0x05,0x00,0x84,0x00,0x00,0x00,0x71,0x02,0x00,0x00, -0x70,0x02,0x00,0x00,0xcf,0x00,0x00,0x00,0x8e,0x00,0x05,0x00, -0x84,0x00,0x00,0x00,0x72,0x02,0x00,0x00,0x71,0x02,0x00,0x00, -0x51,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x78,0x02,0x00,0x00,0xa9,0x00,0x00,0x00,0x8d,0x04,0x00,0x00, -0x51,0x00,0x05,0x00,0x4a,0x00,0x00,0x00,0x7a,0x02,0x00,0x00, -0x72,0x02,0x00,0x00,0x00,0x00,0x00,0x00,0x73,0x00,0x04,0x00, -0x4d,0x00,0x00,0x00,0x7b,0x02,0x00,0x00,0x7a,0x02,0x00,0x00, -0x41,0x00,0x06,0x00,0x5a,0x00,0x00,0x00,0x7c,0x02,0x00,0x00, -0xa1,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0x78,0x02,0x00,0x00, -0x3e,0x00,0x03,0x00,0x7c,0x02,0x00,0x00,0x7b,0x02,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x83,0x02,0x00,0x00, -0xa9,0x00,0x00,0x00,0x8e,0x04,0x00,0x00,0x51,0x00,0x05,0x00, -0x4a,0x00,0x00,0x00,0x84,0x02,0x00,0x00,0x72,0x02,0x00,0x00, -0x01,0x00,0x00,0x00,0x73,0x00,0x04,0x00,0x4d,0x00,0x00,0x00, -0x85,0x02,0x00,0x00,0x84,0x02,0x00,0x00,0x41,0x00,0x06,0x00, -0x5a,0x00,0x00,0x00,0x86,0x02,0x00,0x00,0xa1,0x00,0x00,0x00, -0x2e,0x00,0x00,0x00,0x83,0x02,0x00,0x00,0x3e,0x00,0x03,0x00, -0x86,0x02,0x00,0x00,0x85,0x02,0x00,0x00,0x3d,0x00,0x04,0x00, -0x4d,0x00,0x00,0x00,0x8f,0x02,0x00,0x00,0x5b,0x00,0x00,0x00, -0x73,0x00,0x04,0x00,0x4a,0x00,0x00,0x00,0x90,0x02,0x00,0x00, -0x8f,0x02,0x00,0x00,0x3d,0x00,0x04,0x00,0x4e,0x00,0x00,0x00, -0x92,0x02,0x00,0x00,0x62,0x00,0x00,0x00,0x71,0x00,0x04,0x00, -0x09,0x00,0x00,0x00,0x93,0x02,0x00,0x00,0x92,0x02,0x00,0x00, -0xc4,0x00,0x05,0x00,0x09,0x00,0x00,0x00,0x94,0x02,0x00,0x00, -0x93,0x02,0x00,0x00,0x48,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x4e,0x00,0x00,0x00,0x96,0x02,0x00,0x00,0x67,0x00,0x00,0x00, -0x71,0x00,0x04,0x00,0x09,0x00,0x00,0x00,0x97,0x02,0x00,0x00, -0x96,0x02,0x00,0x00,0xc5,0x00,0x05,0x00,0x09,0x00,0x00,0x00, -0x98,0x02,0x00,0x00,0x94,0x02,0x00,0x00,0x97,0x02,0x00,0x00, -0xc2,0x00,0x05,0x00,0x09,0x00,0x00,0x00,0x99,0x02,0x00,0x00, -0x98,0x02,0x00,0x00,0x8f,0x04,0x00,0x00,0xc4,0x00,0x05,0x00, -0x09,0x00,0x00,0x00,0x9a,0x02,0x00,0x00,0x99,0x02,0x00,0x00, -0x71,0x00,0x00,0x00,0xc7,0x00,0x05,0x00,0x09,0x00,0x00,0x00, -0x9b,0x02,0x00,0x00,0x9a,0x02,0x00,0x00,0x52,0x00,0x00,0x00, -0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x9c,0x02,0x00,0x00, -0x9b,0x02,0x00,0x00,0xc2,0x00,0x05,0x00,0x09,0x00,0x00,0x00, -0x9e,0x02,0x00,0x00,0x98,0x02,0x00,0x00,0x88,0x04,0x00,0x00, -0xc7,0x00,0x05,0x00,0x09,0x00,0x00,0x00,0x9f,0x02,0x00,0x00, -0x9e,0x02,0x00,0x00,0x52,0x00,0x00,0x00,0x7c,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0xa0,0x02,0x00,0x00,0x9f,0x02,0x00,0x00, -0x41,0x00,0x08,0x00,0x80,0x00,0x00,0x00,0xa2,0x02,0x00,0x00, -0x58,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0x40,0x00,0x00,0x00, -0x37,0x00,0x00,0x00,0x8f,0x04,0x00,0x00,0x3d,0x00,0x04,0x00, -0x51,0x00,0x00,0x00,0xa3,0x02,0x00,0x00,0xa2,0x02,0x00,0x00, -0x71,0x00,0x04,0x00,0x09,0x00,0x00,0x00,0xa4,0x02,0x00,0x00, -0xa3,0x02,0x00,0x00,0xc7,0x00,0x05,0x00,0x09,0x00,0x00,0x00, -0xa5,0x02,0x00,0x00,0xa4,0x02,0x00,0x00,0x88,0x00,0x00,0x00, -0x7c,0x00,0x04,0x00,0x09,0x00,0x00,0x00,0xa7,0x02,0x00,0x00, -0x9c,0x02,0x00,0x00,0xc5,0x00,0x05,0x00,0x09,0x00,0x00,0x00, -0xa8,0x02,0x00,0x00,0xa5,0x02,0x00,0x00,0xa7,0x02,0x00,0x00, -0x70,0x00,0x04,0x00,0x4a,0x00,0x00,0x00,0xa9,0x02,0x00,0x00, -0xa8,0x02,0x00,0x00,0xc2,0x00,0x05,0x00,0x09,0x00,0x00,0x00, -0xaa,0x02,0x00,0x00,0xa4,0x02,0x00,0x00,0x71,0x00,0x00,0x00, -0x7c,0x00,0x04,0x00,0x09,0x00,0x00,0x00,0xac,0x02,0x00,0x00, -0xa0,0x02,0x00,0x00,0xc5,0x00,0x05,0x00,0x09,0x00,0x00,0x00, -0xad,0x02,0x00,0x00,0xaa,0x02,0x00,0x00,0xac,0x02,0x00,0x00, -0x70,0x00,0x04,0x00,0x4a,0x00,0x00,0x00,0xae,0x02,0x00,0x00, -0xad,0x02,0x00,0x00,0x50,0x00,0x05,0x00,0x84,0x00,0x00,0x00, -0xaf,0x02,0x00,0x00,0xa9,0x02,0x00,0x00,0xae,0x02,0x00,0x00, -0x83,0x00,0x05,0x00,0x84,0x00,0x00,0x00,0xb0,0x02,0x00,0x00, -0xaf,0x02,0x00,0x00,0xcf,0x00,0x00,0x00,0x8e,0x00,0x05,0x00, -0x84,0x00,0x00,0x00,0xb1,0x02,0x00,0x00,0xb0,0x02,0x00,0x00, -0x90,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xb7,0x02,0x00,0x00,0xa9,0x00,0x00,0x00,0x8f,0x04,0x00,0x00, -0x51,0x00,0x05,0x00,0x4a,0x00,0x00,0x00,0xb9,0x02,0x00,0x00, -0xb1,0x02,0x00,0x00,0x00,0x00,0x00,0x00,0x73,0x00,0x04,0x00, -0x4d,0x00,0x00,0x00,0xba,0x02,0x00,0x00,0xb9,0x02,0x00,0x00, -0x41,0x00,0x06,0x00,0x5a,0x00,0x00,0x00,0xbb,0x02,0x00,0x00, -0xa1,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0xb7,0x02,0x00,0x00, -0x3e,0x00,0x03,0x00,0xbb,0x02,0x00,0x00,0xba,0x02,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xc2,0x02,0x00,0x00, -0xa9,0x00,0x00,0x00,0x90,0x04,0x00,0x00,0x51,0x00,0x05,0x00, -0x4a,0x00,0x00,0x00,0xc3,0x02,0x00,0x00,0xb1,0x02,0x00,0x00, -0x01,0x00,0x00,0x00,0x73,0x00,0x04,0x00,0x4d,0x00,0x00,0x00, -0xc4,0x02,0x00,0x00,0xc3,0x02,0x00,0x00,0x41,0x00,0x06,0x00, -0x5a,0x00,0x00,0x00,0xc5,0x02,0x00,0x00,0xa1,0x00,0x00,0x00, -0x2e,0x00,0x00,0x00,0xc2,0x02,0x00,0x00,0x3e,0x00,0x03,0x00, -0xc5,0x02,0x00,0x00,0xc4,0x02,0x00,0x00,0x3d,0x00,0x04,0x00, -0x4d,0x00,0x00,0x00,0xce,0x02,0x00,0x00,0x5b,0x00,0x00,0x00, -0x73,0x00,0x04,0x00,0x4a,0x00,0x00,0x00,0xcf,0x02,0x00,0x00, -0xce,0x02,0x00,0x00,0x3d,0x00,0x04,0x00,0x4e,0x00,0x00,0x00, -0xd1,0x02,0x00,0x00,0x62,0x00,0x00,0x00,0x71,0x00,0x04,0x00, -0x09,0x00,0x00,0x00,0xd2,0x02,0x00,0x00,0xd1,0x02,0x00,0x00, -0xc4,0x00,0x05,0x00,0x09,0x00,0x00,0x00,0xd3,0x02,0x00,0x00, -0xd2,0x02,0x00,0x00,0x48,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x4e,0x00,0x00,0x00,0xd5,0x02,0x00,0x00,0x67,0x00,0x00,0x00, -0x71,0x00,0x04,0x00,0x09,0x00,0x00,0x00,0xd6,0x02,0x00,0x00, -0xd5,0x02,0x00,0x00,0xc5,0x00,0x05,0x00,0x09,0x00,0x00,0x00, -0xd7,0x02,0x00,0x00,0xd3,0x02,0x00,0x00,0xd6,0x02,0x00,0x00, -0xc2,0x00,0x05,0x00,0x09,0x00,0x00,0x00,0xd8,0x02,0x00,0x00, -0xd7,0x02,0x00,0x00,0x91,0x04,0x00,0x00,0xc4,0x00,0x05,0x00, -0x09,0x00,0x00,0x00,0xd9,0x02,0x00,0x00,0xd8,0x02,0x00,0x00, -0x71,0x00,0x00,0x00,0xc7,0x00,0x05,0x00,0x09,0x00,0x00,0x00, -0xda,0x02,0x00,0x00,0xd9,0x02,0x00,0x00,0x52,0x00,0x00,0x00, -0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0xdb,0x02,0x00,0x00, -0xda,0x02,0x00,0x00,0xc2,0x00,0x05,0x00,0x09,0x00,0x00,0x00, -0xdd,0x02,0x00,0x00,0xd7,0x02,0x00,0x00,0x8a,0x04,0x00,0x00, -0xc7,0x00,0x05,0x00,0x09,0x00,0x00,0x00,0xde,0x02,0x00,0x00, -0xdd,0x02,0x00,0x00,0x52,0x00,0x00,0x00,0x7c,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0xdf,0x02,0x00,0x00,0xde,0x02,0x00,0x00, -0x41,0x00,0x08,0x00,0x80,0x00,0x00,0x00,0xe1,0x02,0x00,0x00, -0x58,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0x40,0x00,0x00,0x00, -0x37,0x00,0x00,0x00,0x91,0x04,0x00,0x00,0x3d,0x00,0x04,0x00, -0x51,0x00,0x00,0x00,0xe2,0x02,0x00,0x00,0xe1,0x02,0x00,0x00, -0x71,0x00,0x04,0x00,0x09,0x00,0x00,0x00,0xe3,0x02,0x00,0x00, -0xe2,0x02,0x00,0x00,0xc7,0x00,0x05,0x00,0x09,0x00,0x00,0x00, -0xe4,0x02,0x00,0x00,0xe3,0x02,0x00,0x00,0x88,0x00,0x00,0x00, -0x7c,0x00,0x04,0x00,0x09,0x00,0x00,0x00,0xe6,0x02,0x00,0x00, -0xdb,0x02,0x00,0x00,0xc5,0x00,0x05,0x00,0x09,0x00,0x00,0x00, -0xe7,0x02,0x00,0x00,0xe4,0x02,0x00,0x00,0xe6,0x02,0x00,0x00, -0x70,0x00,0x04,0x00,0x4a,0x00,0x00,0x00,0xe8,0x02,0x00,0x00, -0xe7,0x02,0x00,0x00,0xc2,0x00,0x05,0x00,0x09,0x00,0x00,0x00, -0xe9,0x02,0x00,0x00,0xe3,0x02,0x00,0x00,0x71,0x00,0x00,0x00, -0x7c,0x00,0x04,0x00,0x09,0x00,0x00,0x00,0xeb,0x02,0x00,0x00, -0xdf,0x02,0x00,0x00,0xc5,0x00,0x05,0x00,0x09,0x00,0x00,0x00, -0xec,0x02,0x00,0x00,0xe9,0x02,0x00,0x00,0xeb,0x02,0x00,0x00, -0x70,0x00,0x04,0x00,0x4a,0x00,0x00,0x00,0xed,0x02,0x00,0x00, -0xec,0x02,0x00,0x00,0x50,0x00,0x05,0x00,0x84,0x00,0x00,0x00, -0xee,0x02,0x00,0x00,0xe8,0x02,0x00,0x00,0xed,0x02,0x00,0x00, -0x83,0x00,0x05,0x00,0x84,0x00,0x00,0x00,0xef,0x02,0x00,0x00, -0xee,0x02,0x00,0x00,0xcf,0x00,0x00,0x00,0x8e,0x00,0x05,0x00, -0x84,0x00,0x00,0x00,0xf0,0x02,0x00,0x00,0xef,0x02,0x00,0x00, -0xcf,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xf6,0x02,0x00,0x00,0xa9,0x00,0x00,0x00,0x91,0x04,0x00,0x00, -0x51,0x00,0x05,0x00,0x4a,0x00,0x00,0x00,0xf8,0x02,0x00,0x00, -0xf0,0x02,0x00,0x00,0x00,0x00,0x00,0x00,0x73,0x00,0x04,0x00, -0x4d,0x00,0x00,0x00,0xf9,0x02,0x00,0x00,0xf8,0x02,0x00,0x00, -0x41,0x00,0x06,0x00,0x5a,0x00,0x00,0x00,0xfa,0x02,0x00,0x00, -0xa1,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0xf6,0x02,0x00,0x00, -0x3e,0x00,0x03,0x00,0xfa,0x02,0x00,0x00,0xf9,0x02,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x01,0x03,0x00,0x00, -0xa9,0x00,0x00,0x00,0x92,0x04,0x00,0x00,0x51,0x00,0x05,0x00, -0x4a,0x00,0x00,0x00,0x02,0x03,0x00,0x00,0xf0,0x02,0x00,0x00, -0x01,0x00,0x00,0x00,0x73,0x00,0x04,0x00,0x4d,0x00,0x00,0x00, -0x03,0x03,0x00,0x00,0x02,0x03,0x00,0x00,0x41,0x00,0x06,0x00, -0x5a,0x00,0x00,0x00,0x04,0x03,0x00,0x00,0xa1,0x00,0x00,0x00, -0x2e,0x00,0x00,0x00,0x01,0x03,0x00,0x00,0x3e,0x00,0x03,0x00, -0x04,0x03,0x00,0x00,0x03,0x03,0x00,0x00,0x3d,0x00,0x04,0x00, -0x4d,0x00,0x00,0x00,0x0d,0x03,0x00,0x00,0x5b,0x00,0x00,0x00, -0x73,0x00,0x04,0x00,0x4a,0x00,0x00,0x00,0x0e,0x03,0x00,0x00, -0x0d,0x03,0x00,0x00,0x3d,0x00,0x04,0x00,0x4e,0x00,0x00,0x00, -0x10,0x03,0x00,0x00,0x62,0x00,0x00,0x00,0x71,0x00,0x04,0x00, -0x09,0x00,0x00,0x00,0x11,0x03,0x00,0x00,0x10,0x03,0x00,0x00, -0xc4,0x00,0x05,0x00,0x09,0x00,0x00,0x00,0x12,0x03,0x00,0x00, -0x11,0x03,0x00,0x00,0x48,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x4e,0x00,0x00,0x00,0x14,0x03,0x00,0x00,0x67,0x00,0x00,0x00, -0x71,0x00,0x04,0x00,0x09,0x00,0x00,0x00,0x15,0x03,0x00,0x00, -0x14,0x03,0x00,0x00,0xc5,0x00,0x05,0x00,0x09,0x00,0x00,0x00, -0x16,0x03,0x00,0x00,0x12,0x03,0x00,0x00,0x15,0x03,0x00,0x00, -0xc2,0x00,0x05,0x00,0x09,0x00,0x00,0x00,0x17,0x03,0x00,0x00, -0x16,0x03,0x00,0x00,0x93,0x04,0x00,0x00,0xc4,0x00,0x05,0x00, -0x09,0x00,0x00,0x00,0x18,0x03,0x00,0x00,0x17,0x03,0x00,0x00, -0x71,0x00,0x00,0x00,0xc7,0x00,0x05,0x00,0x09,0x00,0x00,0x00, -0x19,0x03,0x00,0x00,0x18,0x03,0x00,0x00,0x52,0x00,0x00,0x00, -0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x1a,0x03,0x00,0x00, -0x19,0x03,0x00,0x00,0xc2,0x00,0x05,0x00,0x09,0x00,0x00,0x00, -0x1c,0x03,0x00,0x00,0x16,0x03,0x00,0x00,0x8c,0x04,0x00,0x00, -0xc7,0x00,0x05,0x00,0x09,0x00,0x00,0x00,0x1d,0x03,0x00,0x00, -0x1c,0x03,0x00,0x00,0x52,0x00,0x00,0x00,0x7c,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x1e,0x03,0x00,0x00,0x1d,0x03,0x00,0x00, -0x41,0x00,0x08,0x00,0x80,0x00,0x00,0x00,0x20,0x03,0x00,0x00, -0x58,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0x40,0x00,0x00,0x00, -0x37,0x00,0x00,0x00,0x93,0x04,0x00,0x00,0x3d,0x00,0x04,0x00, -0x51,0x00,0x00,0x00,0x21,0x03,0x00,0x00,0x20,0x03,0x00,0x00, -0x71,0x00,0x04,0x00,0x09,0x00,0x00,0x00,0x22,0x03,0x00,0x00, -0x21,0x03,0x00,0x00,0xc7,0x00,0x05,0x00,0x09,0x00,0x00,0x00, -0x23,0x03,0x00,0x00,0x22,0x03,0x00,0x00,0x88,0x00,0x00,0x00, -0x7c,0x00,0x04,0x00,0x09,0x00,0x00,0x00,0x25,0x03,0x00,0x00, -0x1a,0x03,0x00,0x00,0xc5,0x00,0x05,0x00,0x09,0x00,0x00,0x00, -0x26,0x03,0x00,0x00,0x23,0x03,0x00,0x00,0x25,0x03,0x00,0x00, -0x70,0x00,0x04,0x00,0x4a,0x00,0x00,0x00,0x27,0x03,0x00,0x00, -0x26,0x03,0x00,0x00,0xc2,0x00,0x05,0x00,0x09,0x00,0x00,0x00, -0x28,0x03,0x00,0x00,0x22,0x03,0x00,0x00,0x71,0x00,0x00,0x00, -0x7c,0x00,0x04,0x00,0x09,0x00,0x00,0x00,0x2a,0x03,0x00,0x00, -0x1e,0x03,0x00,0x00,0xc5,0x00,0x05,0x00,0x09,0x00,0x00,0x00, -0x2b,0x03,0x00,0x00,0x28,0x03,0x00,0x00,0x2a,0x03,0x00,0x00, -0x70,0x00,0x04,0x00,0x4a,0x00,0x00,0x00,0x2c,0x03,0x00,0x00, -0x2b,0x03,0x00,0x00,0x50,0x00,0x05,0x00,0x84,0x00,0x00,0x00, -0x2d,0x03,0x00,0x00,0x27,0x03,0x00,0x00,0x2c,0x03,0x00,0x00, -0x83,0x00,0x05,0x00,0x84,0x00,0x00,0x00,0x2e,0x03,0x00,0x00, -0x2d,0x03,0x00,0x00,0xcf,0x00,0x00,0x00,0x8e,0x00,0x05,0x00, -0x84,0x00,0x00,0x00,0x2f,0x03,0x00,0x00,0x2e,0x03,0x00,0x00, -0x0e,0x03,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x35,0x03,0x00,0x00,0xa9,0x00,0x00,0x00,0x93,0x04,0x00,0x00, -0x51,0x00,0x05,0x00,0x4a,0x00,0x00,0x00,0x37,0x03,0x00,0x00, -0x2f,0x03,0x00,0x00,0x00,0x00,0x00,0x00,0x73,0x00,0x04,0x00, -0x4d,0x00,0x00,0x00,0x38,0x03,0x00,0x00,0x37,0x03,0x00,0x00, -0x41,0x00,0x06,0x00,0x5a,0x00,0x00,0x00,0x39,0x03,0x00,0x00, -0xa1,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0x35,0x03,0x00,0x00, -0x3e,0x00,0x03,0x00,0x39,0x03,0x00,0x00,0x38,0x03,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x40,0x03,0x00,0x00, -0xa9,0x00,0x00,0x00,0x94,0x04,0x00,0x00,0x51,0x00,0x05,0x00, -0x4a,0x00,0x00,0x00,0x41,0x03,0x00,0x00,0x2f,0x03,0x00,0x00, -0x01,0x00,0x00,0x00,0x73,0x00,0x04,0x00,0x4d,0x00,0x00,0x00, -0x42,0x03,0x00,0x00,0x41,0x03,0x00,0x00,0x41,0x00,0x06,0x00, -0x5a,0x00,0x00,0x00,0x43,0x03,0x00,0x00,0xa1,0x00,0x00,0x00, -0x2e,0x00,0x00,0x00,0x40,0x03,0x00,0x00,0x3e,0x00,0x03,0x00, -0x43,0x03,0x00,0x00,0x42,0x03,0x00,0x00,0x3d,0x00,0x04,0x00, -0x4d,0x00,0x00,0x00,0x4c,0x03,0x00,0x00,0x5b,0x00,0x00,0x00, -0x73,0x00,0x04,0x00,0x4a,0x00,0x00,0x00,0x4d,0x03,0x00,0x00, -0x4c,0x03,0x00,0x00,0x3d,0x00,0x04,0x00,0x4e,0x00,0x00,0x00, -0x4f,0x03,0x00,0x00,0x62,0x00,0x00,0x00,0x71,0x00,0x04,0x00, -0x09,0x00,0x00,0x00,0x50,0x03,0x00,0x00,0x4f,0x03,0x00,0x00, -0xc4,0x00,0x05,0x00,0x09,0x00,0x00,0x00,0x51,0x03,0x00,0x00, -0x50,0x03,0x00,0x00,0x48,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x4e,0x00,0x00,0x00,0x53,0x03,0x00,0x00,0x67,0x00,0x00,0x00, -0x71,0x00,0x04,0x00,0x09,0x00,0x00,0x00,0x54,0x03,0x00,0x00, -0x53,0x03,0x00,0x00,0xc5,0x00,0x05,0x00,0x09,0x00,0x00,0x00, -0x55,0x03,0x00,0x00,0x51,0x03,0x00,0x00,0x54,0x03,0x00,0x00, -0xc2,0x00,0x05,0x00,0x09,0x00,0x00,0x00,0x56,0x03,0x00,0x00, -0x55,0x03,0x00,0x00,0x95,0x04,0x00,0x00,0xc4,0x00,0x05,0x00, -0x09,0x00,0x00,0x00,0x57,0x03,0x00,0x00,0x56,0x03,0x00,0x00, -0x71,0x00,0x00,0x00,0xc7,0x00,0x05,0x00,0x09,0x00,0x00,0x00, -0x58,0x03,0x00,0x00,0x57,0x03,0x00,0x00,0x52,0x00,0x00,0x00, -0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x59,0x03,0x00,0x00, -0x58,0x03,0x00,0x00,0xc2,0x00,0x05,0x00,0x09,0x00,0x00,0x00, -0x5b,0x03,0x00,0x00,0x55,0x03,0x00,0x00,0x8e,0x04,0x00,0x00, -0xc7,0x00,0x05,0x00,0x09,0x00,0x00,0x00,0x5c,0x03,0x00,0x00, -0x5b,0x03,0x00,0x00,0x52,0x00,0x00,0x00,0x7c,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x5d,0x03,0x00,0x00,0x5c,0x03,0x00,0x00, -0x41,0x00,0x08,0x00,0x80,0x00,0x00,0x00,0x5f,0x03,0x00,0x00, -0x58,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0x40,0x00,0x00,0x00, -0x37,0x00,0x00,0x00,0x95,0x04,0x00,0x00,0x3d,0x00,0x04,0x00, -0x51,0x00,0x00,0x00,0x60,0x03,0x00,0x00,0x5f,0x03,0x00,0x00, -0x71,0x00,0x04,0x00,0x09,0x00,0x00,0x00,0x61,0x03,0x00,0x00, -0x60,0x03,0x00,0x00,0xc7,0x00,0x05,0x00,0x09,0x00,0x00,0x00, -0x62,0x03,0x00,0x00,0x61,0x03,0x00,0x00,0x88,0x00,0x00,0x00, -0x7c,0x00,0x04,0x00,0x09,0x00,0x00,0x00,0x64,0x03,0x00,0x00, -0x59,0x03,0x00,0x00,0xc5,0x00,0x05,0x00,0x09,0x00,0x00,0x00, -0x65,0x03,0x00,0x00,0x62,0x03,0x00,0x00,0x64,0x03,0x00,0x00, -0x70,0x00,0x04,0x00,0x4a,0x00,0x00,0x00,0x66,0x03,0x00,0x00, -0x65,0x03,0x00,0x00,0xc2,0x00,0x05,0x00,0x09,0x00,0x00,0x00, -0x67,0x03,0x00,0x00,0x61,0x03,0x00,0x00,0x71,0x00,0x00,0x00, -0x7c,0x00,0x04,0x00,0x09,0x00,0x00,0x00,0x69,0x03,0x00,0x00, -0x5d,0x03,0x00,0x00,0xc5,0x00,0x05,0x00,0x09,0x00,0x00,0x00, -0x6a,0x03,0x00,0x00,0x67,0x03,0x00,0x00,0x69,0x03,0x00,0x00, -0x70,0x00,0x04,0x00,0x4a,0x00,0x00,0x00,0x6b,0x03,0x00,0x00, -0x6a,0x03,0x00,0x00,0x50,0x00,0x05,0x00,0x84,0x00,0x00,0x00, -0x6c,0x03,0x00,0x00,0x66,0x03,0x00,0x00,0x6b,0x03,0x00,0x00, -0x83,0x00,0x05,0x00,0x84,0x00,0x00,0x00,0x6d,0x03,0x00,0x00, -0x6c,0x03,0x00,0x00,0xcf,0x00,0x00,0x00,0x8e,0x00,0x05,0x00, -0x84,0x00,0x00,0x00,0x6e,0x03,0x00,0x00,0x6d,0x03,0x00,0x00, -0x4d,0x03,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x74,0x03,0x00,0x00,0xa9,0x00,0x00,0x00,0x95,0x04,0x00,0x00, -0x51,0x00,0x05,0x00,0x4a,0x00,0x00,0x00,0x76,0x03,0x00,0x00, -0x6e,0x03,0x00,0x00,0x00,0x00,0x00,0x00,0x73,0x00,0x04,0x00, -0x4d,0x00,0x00,0x00,0x77,0x03,0x00,0x00,0x76,0x03,0x00,0x00, -0x41,0x00,0x06,0x00,0x5a,0x00,0x00,0x00,0x78,0x03,0x00,0x00, -0xa1,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0x74,0x03,0x00,0x00, -0x3e,0x00,0x03,0x00,0x78,0x03,0x00,0x00,0x77,0x03,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x7f,0x03,0x00,0x00, -0xa9,0x00,0x00,0x00,0x96,0x04,0x00,0x00,0x51,0x00,0x05,0x00, -0x4a,0x00,0x00,0x00,0x80,0x03,0x00,0x00,0x6e,0x03,0x00,0x00, -0x01,0x00,0x00,0x00,0x73,0x00,0x04,0x00,0x4d,0x00,0x00,0x00, -0x81,0x03,0x00,0x00,0x80,0x03,0x00,0x00,0x41,0x00,0x06,0x00, -0x5a,0x00,0x00,0x00,0x82,0x03,0x00,0x00,0xa1,0x00,0x00,0x00, -0x2e,0x00,0x00,0x00,0x7f,0x03,0x00,0x00,0x3e,0x00,0x03,0x00, -0x82,0x03,0x00,0x00,0x81,0x03,0x00,0x00,0x3d,0x00,0x04,0x00, -0x4d,0x00,0x00,0x00,0x8b,0x03,0x00,0x00,0x5b,0x00,0x00,0x00, -0x73,0x00,0x04,0x00,0x4a,0x00,0x00,0x00,0x8c,0x03,0x00,0x00, -0x8b,0x03,0x00,0x00,0x3d,0x00,0x04,0x00,0x4e,0x00,0x00,0x00, -0x8e,0x03,0x00,0x00,0x62,0x00,0x00,0x00,0x71,0x00,0x04,0x00, -0x09,0x00,0x00,0x00,0x8f,0x03,0x00,0x00,0x8e,0x03,0x00,0x00, -0xc4,0x00,0x05,0x00,0x09,0x00,0x00,0x00,0x90,0x03,0x00,0x00, -0x8f,0x03,0x00,0x00,0x48,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x4e,0x00,0x00,0x00,0x92,0x03,0x00,0x00,0x67,0x00,0x00,0x00, -0x71,0x00,0x04,0x00,0x09,0x00,0x00,0x00,0x93,0x03,0x00,0x00, -0x92,0x03,0x00,0x00,0xc5,0x00,0x05,0x00,0x09,0x00,0x00,0x00, -0x94,0x03,0x00,0x00,0x90,0x03,0x00,0x00,0x93,0x03,0x00,0x00, -0xc2,0x00,0x05,0x00,0x09,0x00,0x00,0x00,0x95,0x03,0x00,0x00, -0x94,0x03,0x00,0x00,0x77,0x00,0x00,0x00,0xc4,0x00,0x05,0x00, -0x09,0x00,0x00,0x00,0x96,0x03,0x00,0x00,0x95,0x03,0x00,0x00, -0x71,0x00,0x00,0x00,0xc7,0x00,0x05,0x00,0x09,0x00,0x00,0x00, -0x97,0x03,0x00,0x00,0x96,0x03,0x00,0x00,0x52,0x00,0x00,0x00, -0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x98,0x03,0x00,0x00, -0x97,0x03,0x00,0x00,0xc2,0x00,0x05,0x00,0x09,0x00,0x00,0x00, -0x9a,0x03,0x00,0x00,0x94,0x03,0x00,0x00,0x90,0x04,0x00,0x00, -0xc7,0x00,0x05,0x00,0x09,0x00,0x00,0x00,0x9b,0x03,0x00,0x00, -0x9a,0x03,0x00,0x00,0x52,0x00,0x00,0x00,0x7c,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x9c,0x03,0x00,0x00,0x9b,0x03,0x00,0x00, -0x41,0x00,0x08,0x00,0x80,0x00,0x00,0x00,0x9e,0x03,0x00,0x00, -0x58,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0x40,0x00,0x00,0x00, -0x37,0x00,0x00,0x00,0x77,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x51,0x00,0x00,0x00,0x9f,0x03,0x00,0x00,0x9e,0x03,0x00,0x00, -0x71,0x00,0x04,0x00,0x09,0x00,0x00,0x00,0xa0,0x03,0x00,0x00, -0x9f,0x03,0x00,0x00,0xc7,0x00,0x05,0x00,0x09,0x00,0x00,0x00, -0xa1,0x03,0x00,0x00,0xa0,0x03,0x00,0x00,0x88,0x00,0x00,0x00, -0x7c,0x00,0x04,0x00,0x09,0x00,0x00,0x00,0xa3,0x03,0x00,0x00, -0x98,0x03,0x00,0x00,0xc5,0x00,0x05,0x00,0x09,0x00,0x00,0x00, -0xa4,0x03,0x00,0x00,0xa1,0x03,0x00,0x00,0xa3,0x03,0x00,0x00, -0x70,0x00,0x04,0x00,0x4a,0x00,0x00,0x00,0xa5,0x03,0x00,0x00, -0xa4,0x03,0x00,0x00,0xc2,0x00,0x05,0x00,0x09,0x00,0x00,0x00, -0xa6,0x03,0x00,0x00,0xa0,0x03,0x00,0x00,0x71,0x00,0x00,0x00, -0x7c,0x00,0x04,0x00,0x09,0x00,0x00,0x00,0xa8,0x03,0x00,0x00, -0x9c,0x03,0x00,0x00,0xc5,0x00,0x05,0x00,0x09,0x00,0x00,0x00, -0xa9,0x03,0x00,0x00,0xa6,0x03,0x00,0x00,0xa8,0x03,0x00,0x00, -0x70,0x00,0x04,0x00,0x4a,0x00,0x00,0x00,0xaa,0x03,0x00,0x00, -0xa9,0x03,0x00,0x00,0x50,0x00,0x05,0x00,0x84,0x00,0x00,0x00, -0xab,0x03,0x00,0x00,0xa5,0x03,0x00,0x00,0xaa,0x03,0x00,0x00, -0x83,0x00,0x05,0x00,0x84,0x00,0x00,0x00,0xac,0x03,0x00,0x00, -0xab,0x03,0x00,0x00,0xcf,0x00,0x00,0x00,0x8e,0x00,0x05,0x00, -0x84,0x00,0x00,0x00,0xad,0x03,0x00,0x00,0xac,0x03,0x00,0x00, -0x8c,0x03,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xb3,0x03,0x00,0x00,0xa9,0x00,0x00,0x00,0x77,0x00,0x00,0x00, -0x51,0x00,0x05,0x00,0x4a,0x00,0x00,0x00,0xb5,0x03,0x00,0x00, -0xad,0x03,0x00,0x00,0x00,0x00,0x00,0x00,0x73,0x00,0x04,0x00, -0x4d,0x00,0x00,0x00,0xb6,0x03,0x00,0x00,0xb5,0x03,0x00,0x00, -0x41,0x00,0x06,0x00,0x5a,0x00,0x00,0x00,0xb7,0x03,0x00,0x00, -0xa1,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0xb3,0x03,0x00,0x00, -0x3e,0x00,0x03,0x00,0xb7,0x03,0x00,0x00,0xb6,0x03,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xbe,0x03,0x00,0x00, -0xa9,0x00,0x00,0x00,0x97,0x04,0x00,0x00,0x51,0x00,0x05,0x00, -0x4a,0x00,0x00,0x00,0xbf,0x03,0x00,0x00,0xad,0x03,0x00,0x00, -0x01,0x00,0x00,0x00,0x73,0x00,0x04,0x00,0x4d,0x00,0x00,0x00, -0xc0,0x03,0x00,0x00,0xbf,0x03,0x00,0x00,0x41,0x00,0x06,0x00, -0x5a,0x00,0x00,0x00,0xc1,0x03,0x00,0x00,0xa1,0x00,0x00,0x00, -0x2e,0x00,0x00,0x00,0xbe,0x03,0x00,0x00,0x3e,0x00,0x03,0x00, -0xc1,0x03,0x00,0x00,0xc0,0x03,0x00,0x00,0x3d,0x00,0x04,0x00, -0x4d,0x00,0x00,0x00,0xca,0x03,0x00,0x00,0x5b,0x00,0x00,0x00, -0x73,0x00,0x04,0x00,0x4a,0x00,0x00,0x00,0xcb,0x03,0x00,0x00, -0xca,0x03,0x00,0x00,0x3d,0x00,0x04,0x00,0x4e,0x00,0x00,0x00, -0xcd,0x03,0x00,0x00,0x62,0x00,0x00,0x00,0x71,0x00,0x04,0x00, -0x09,0x00,0x00,0x00,0xce,0x03,0x00,0x00,0xcd,0x03,0x00,0x00, -0xc4,0x00,0x05,0x00,0x09,0x00,0x00,0x00,0xcf,0x03,0x00,0x00, -0xce,0x03,0x00,0x00,0x48,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x4e,0x00,0x00,0x00,0xd1,0x03,0x00,0x00,0x67,0x00,0x00,0x00, -0x71,0x00,0x04,0x00,0x09,0x00,0x00,0x00,0xd2,0x03,0x00,0x00, -0xd1,0x03,0x00,0x00,0xc5,0x00,0x05,0x00,0x09,0x00,0x00,0x00, -0xd3,0x03,0x00,0x00,0xcf,0x03,0x00,0x00,0xd2,0x03,0x00,0x00, -0xc2,0x00,0x05,0x00,0x09,0x00,0x00,0x00,0xd4,0x03,0x00,0x00, -0xd3,0x03,0x00,0x00,0x82,0x04,0x00,0x00,0xc4,0x00,0x05,0x00, -0x09,0x00,0x00,0x00,0xd5,0x03,0x00,0x00,0xd4,0x03,0x00,0x00, -0x71,0x00,0x00,0x00,0xc7,0x00,0x05,0x00,0x09,0x00,0x00,0x00, -0xd6,0x03,0x00,0x00,0xd5,0x03,0x00,0x00,0x52,0x00,0x00,0x00, -0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0xd7,0x03,0x00,0x00, -0xd6,0x03,0x00,0x00,0xc2,0x00,0x05,0x00,0x09,0x00,0x00,0x00, -0xd9,0x03,0x00,0x00,0xd3,0x03,0x00,0x00,0x92,0x04,0x00,0x00, -0xc7,0x00,0x05,0x00,0x09,0x00,0x00,0x00,0xda,0x03,0x00,0x00, -0xd9,0x03,0x00,0x00,0x52,0x00,0x00,0x00,0x7c,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0xdb,0x03,0x00,0x00,0xda,0x03,0x00,0x00, -0x41,0x00,0x08,0x00,0x80,0x00,0x00,0x00,0xdd,0x03,0x00,0x00, -0x58,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0x40,0x00,0x00,0x00, -0x37,0x00,0x00,0x00,0x82,0x04,0x00,0x00,0x3d,0x00,0x04,0x00, -0x51,0x00,0x00,0x00,0xde,0x03,0x00,0x00,0xdd,0x03,0x00,0x00, -0x71,0x00,0x04,0x00,0x09,0x00,0x00,0x00,0xdf,0x03,0x00,0x00, -0xde,0x03,0x00,0x00,0xc7,0x00,0x05,0x00,0x09,0x00,0x00,0x00, -0xe0,0x03,0x00,0x00,0xdf,0x03,0x00,0x00,0x88,0x00,0x00,0x00, -0x7c,0x00,0x04,0x00,0x09,0x00,0x00,0x00,0xe2,0x03,0x00,0x00, -0xd7,0x03,0x00,0x00,0xc5,0x00,0x05,0x00,0x09,0x00,0x00,0x00, -0xe3,0x03,0x00,0x00,0xe0,0x03,0x00,0x00,0xe2,0x03,0x00,0x00, -0x70,0x00,0x04,0x00,0x4a,0x00,0x00,0x00,0xe4,0x03,0x00,0x00, -0xe3,0x03,0x00,0x00,0xc2,0x00,0x05,0x00,0x09,0x00,0x00,0x00, -0xe5,0x03,0x00,0x00,0xdf,0x03,0x00,0x00,0x71,0x00,0x00,0x00, -0x7c,0x00,0x04,0x00,0x09,0x00,0x00,0x00,0xe7,0x03,0x00,0x00, -0xdb,0x03,0x00,0x00,0xc5,0x00,0x05,0x00,0x09,0x00,0x00,0x00, -0xe8,0x03,0x00,0x00,0xe5,0x03,0x00,0x00,0xe7,0x03,0x00,0x00, -0x70,0x00,0x04,0x00,0x4a,0x00,0x00,0x00,0xe9,0x03,0x00,0x00, -0xe8,0x03,0x00,0x00,0x50,0x00,0x05,0x00,0x84,0x00,0x00,0x00, -0xea,0x03,0x00,0x00,0xe4,0x03,0x00,0x00,0xe9,0x03,0x00,0x00, -0x83,0x00,0x05,0x00,0x84,0x00,0x00,0x00,0xeb,0x03,0x00,0x00, -0xea,0x03,0x00,0x00,0xcf,0x00,0x00,0x00,0x8e,0x00,0x05,0x00, -0x84,0x00,0x00,0x00,0xec,0x03,0x00,0x00,0xeb,0x03,0x00,0x00, -0xcb,0x03,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xf2,0x03,0x00,0x00,0xa9,0x00,0x00,0x00,0x82,0x04,0x00,0x00, -0x51,0x00,0x05,0x00,0x4a,0x00,0x00,0x00,0xf4,0x03,0x00,0x00, -0xec,0x03,0x00,0x00,0x00,0x00,0x00,0x00,0x73,0x00,0x04,0x00, -0x4d,0x00,0x00,0x00,0xf5,0x03,0x00,0x00,0xf4,0x03,0x00,0x00, -0x41,0x00,0x06,0x00,0x5a,0x00,0x00,0x00,0xf6,0x03,0x00,0x00, -0xa1,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0xf2,0x03,0x00,0x00, -0x3e,0x00,0x03,0x00,0xf6,0x03,0x00,0x00,0xf5,0x03,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xfd,0x03,0x00,0x00, -0xa9,0x00,0x00,0x00,0x98,0x04,0x00,0x00,0x51,0x00,0x05,0x00, -0x4a,0x00,0x00,0x00,0xfe,0x03,0x00,0x00,0xec,0x03,0x00,0x00, -0x01,0x00,0x00,0x00,0x73,0x00,0x04,0x00,0x4d,0x00,0x00,0x00, -0xff,0x03,0x00,0x00,0xfe,0x03,0x00,0x00,0x41,0x00,0x06,0x00, -0x5a,0x00,0x00,0x00,0x00,0x04,0x00,0x00,0xa1,0x00,0x00,0x00, -0x2e,0x00,0x00,0x00,0xfd,0x03,0x00,0x00,0x3e,0x00,0x03,0x00, -0x00,0x04,0x00,0x00,0xff,0x03,0x00,0x00,0x3d,0x00,0x04,0x00, -0x4d,0x00,0x00,0x00,0x09,0x04,0x00,0x00,0x5b,0x00,0x00,0x00, -0x73,0x00,0x04,0x00,0x4a,0x00,0x00,0x00,0x0a,0x04,0x00,0x00, -0x09,0x04,0x00,0x00,0x3d,0x00,0x04,0x00,0x4e,0x00,0x00,0x00, -0x0c,0x04,0x00,0x00,0x62,0x00,0x00,0x00,0x71,0x00,0x04,0x00, -0x09,0x00,0x00,0x00,0x0d,0x04,0x00,0x00,0x0c,0x04,0x00,0x00, -0xc4,0x00,0x05,0x00,0x09,0x00,0x00,0x00,0x0e,0x04,0x00,0x00, -0x0d,0x04,0x00,0x00,0x48,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x4e,0x00,0x00,0x00,0x10,0x04,0x00,0x00,0x67,0x00,0x00,0x00, -0x71,0x00,0x04,0x00,0x09,0x00,0x00,0x00,0x11,0x04,0x00,0x00, -0x10,0x04,0x00,0x00,0xc5,0x00,0x05,0x00,0x09,0x00,0x00,0x00, -0x12,0x04,0x00,0x00,0x0e,0x04,0x00,0x00,0x11,0x04,0x00,0x00, -0xc2,0x00,0x05,0x00,0x09,0x00,0x00,0x00,0x13,0x04,0x00,0x00, -0x12,0x04,0x00,0x00,0x84,0x04,0x00,0x00,0xc4,0x00,0x05,0x00, -0x09,0x00,0x00,0x00,0x14,0x04,0x00,0x00,0x13,0x04,0x00,0x00, -0x71,0x00,0x00,0x00,0xc7,0x00,0x05,0x00,0x09,0x00,0x00,0x00, -0x15,0x04,0x00,0x00,0x14,0x04,0x00,0x00,0x52,0x00,0x00,0x00, -0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x16,0x04,0x00,0x00, -0x15,0x04,0x00,0x00,0xc2,0x00,0x05,0x00,0x09,0x00,0x00,0x00, -0x18,0x04,0x00,0x00,0x12,0x04,0x00,0x00,0x94,0x04,0x00,0x00, -0xc7,0x00,0x05,0x00,0x09,0x00,0x00,0x00,0x19,0x04,0x00,0x00, -0x18,0x04,0x00,0x00,0x52,0x00,0x00,0x00,0x7c,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x1a,0x04,0x00,0x00,0x19,0x04,0x00,0x00, -0x41,0x00,0x08,0x00,0x80,0x00,0x00,0x00,0x1c,0x04,0x00,0x00, -0x58,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0x40,0x00,0x00,0x00, -0x37,0x00,0x00,0x00,0x84,0x04,0x00,0x00,0x3d,0x00,0x04,0x00, -0x51,0x00,0x00,0x00,0x1d,0x04,0x00,0x00,0x1c,0x04,0x00,0x00, -0x71,0x00,0x04,0x00,0x09,0x00,0x00,0x00,0x1e,0x04,0x00,0x00, -0x1d,0x04,0x00,0x00,0xc7,0x00,0x05,0x00,0x09,0x00,0x00,0x00, -0x1f,0x04,0x00,0x00,0x1e,0x04,0x00,0x00,0x88,0x00,0x00,0x00, -0x7c,0x00,0x04,0x00,0x09,0x00,0x00,0x00,0x21,0x04,0x00,0x00, -0x16,0x04,0x00,0x00,0xc5,0x00,0x05,0x00,0x09,0x00,0x00,0x00, -0x22,0x04,0x00,0x00,0x1f,0x04,0x00,0x00,0x21,0x04,0x00,0x00, -0x70,0x00,0x04,0x00,0x4a,0x00,0x00,0x00,0x23,0x04,0x00,0x00, -0x22,0x04,0x00,0x00,0xc2,0x00,0x05,0x00,0x09,0x00,0x00,0x00, -0x24,0x04,0x00,0x00,0x1e,0x04,0x00,0x00,0x71,0x00,0x00,0x00, -0x7c,0x00,0x04,0x00,0x09,0x00,0x00,0x00,0x26,0x04,0x00,0x00, -0x1a,0x04,0x00,0x00,0xc5,0x00,0x05,0x00,0x09,0x00,0x00,0x00, -0x27,0x04,0x00,0x00,0x24,0x04,0x00,0x00,0x26,0x04,0x00,0x00, -0x70,0x00,0x04,0x00,0x4a,0x00,0x00,0x00,0x28,0x04,0x00,0x00, -0x27,0x04,0x00,0x00,0x50,0x00,0x05,0x00,0x84,0x00,0x00,0x00, -0x29,0x04,0x00,0x00,0x23,0x04,0x00,0x00,0x28,0x04,0x00,0x00, -0x83,0x00,0x05,0x00,0x84,0x00,0x00,0x00,0x2a,0x04,0x00,0x00, -0x29,0x04,0x00,0x00,0xcf,0x00,0x00,0x00,0x8e,0x00,0x05,0x00, -0x84,0x00,0x00,0x00,0x2b,0x04,0x00,0x00,0x2a,0x04,0x00,0x00, -0x0a,0x04,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x31,0x04,0x00,0x00,0xa9,0x00,0x00,0x00,0x84,0x04,0x00,0x00, -0x51,0x00,0x05,0x00,0x4a,0x00,0x00,0x00,0x33,0x04,0x00,0x00, -0x2b,0x04,0x00,0x00,0x00,0x00,0x00,0x00,0x73,0x00,0x04,0x00, -0x4d,0x00,0x00,0x00,0x34,0x04,0x00,0x00,0x33,0x04,0x00,0x00, -0x41,0x00,0x06,0x00,0x5a,0x00,0x00,0x00,0x35,0x04,0x00,0x00, -0xa1,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0x31,0x04,0x00,0x00, -0x3e,0x00,0x03,0x00,0x35,0x04,0x00,0x00,0x34,0x04,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x3c,0x04,0x00,0x00, -0xa9,0x00,0x00,0x00,0x99,0x04,0x00,0x00,0x51,0x00,0x05,0x00, -0x4a,0x00,0x00,0x00,0x3d,0x04,0x00,0x00,0x2b,0x04,0x00,0x00, -0x01,0x00,0x00,0x00,0x73,0x00,0x04,0x00,0x4d,0x00,0x00,0x00, -0x3e,0x04,0x00,0x00,0x3d,0x04,0x00,0x00,0x41,0x00,0x06,0x00, -0x5a,0x00,0x00,0x00,0x3f,0x04,0x00,0x00,0xa1,0x00,0x00,0x00, -0x2e,0x00,0x00,0x00,0x3c,0x04,0x00,0x00,0x3e,0x00,0x03,0x00, -0x3f,0x04,0x00,0x00,0x3e,0x04,0x00,0x00,0x3d,0x00,0x04,0x00, -0x4d,0x00,0x00,0x00,0x48,0x04,0x00,0x00,0x5b,0x00,0x00,0x00, -0x73,0x00,0x04,0x00,0x4a,0x00,0x00,0x00,0x49,0x04,0x00,0x00, -0x48,0x04,0x00,0x00,0x3d,0x00,0x04,0x00,0x4e,0x00,0x00,0x00, -0x4b,0x04,0x00,0x00,0x62,0x00,0x00,0x00,0x71,0x00,0x04,0x00, -0x09,0x00,0x00,0x00,0x4c,0x04,0x00,0x00,0x4b,0x04,0x00,0x00, -0xc4,0x00,0x05,0x00,0x09,0x00,0x00,0x00,0x4d,0x04,0x00,0x00, -0x4c,0x04,0x00,0x00,0x48,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x4e,0x00,0x00,0x00,0x4f,0x04,0x00,0x00,0x67,0x00,0x00,0x00, -0x71,0x00,0x04,0x00,0x09,0x00,0x00,0x00,0x50,0x04,0x00,0x00, -0x4f,0x04,0x00,0x00,0xc5,0x00,0x05,0x00,0x09,0x00,0x00,0x00, -0x51,0x04,0x00,0x00,0x4d,0x04,0x00,0x00,0x50,0x04,0x00,0x00, -0xc2,0x00,0x05,0x00,0x09,0x00,0x00,0x00,0x52,0x04,0x00,0x00, -0x51,0x04,0x00,0x00,0x86,0x04,0x00,0x00,0xc4,0x00,0x05,0x00, -0x09,0x00,0x00,0x00,0x53,0x04,0x00,0x00,0x52,0x04,0x00,0x00, -0x71,0x00,0x00,0x00,0xc7,0x00,0x05,0x00,0x09,0x00,0x00,0x00, -0x54,0x04,0x00,0x00,0x53,0x04,0x00,0x00,0x52,0x00,0x00,0x00, -0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x55,0x04,0x00,0x00, -0x54,0x04,0x00,0x00,0xc2,0x00,0x05,0x00,0x09,0x00,0x00,0x00, -0x57,0x04,0x00,0x00,0x51,0x04,0x00,0x00,0x96,0x04,0x00,0x00, -0xc7,0x00,0x05,0x00,0x09,0x00,0x00,0x00,0x58,0x04,0x00,0x00, -0x57,0x04,0x00,0x00,0x52,0x00,0x00,0x00,0x7c,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x59,0x04,0x00,0x00,0x58,0x04,0x00,0x00, -0x41,0x00,0x08,0x00,0x80,0x00,0x00,0x00,0x5b,0x04,0x00,0x00, -0x58,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0x40,0x00,0x00,0x00, -0x37,0x00,0x00,0x00,0x86,0x04,0x00,0x00,0x3d,0x00,0x04,0x00, -0x51,0x00,0x00,0x00,0x5c,0x04,0x00,0x00,0x5b,0x04,0x00,0x00, -0x71,0x00,0x04,0x00,0x09,0x00,0x00,0x00,0x5d,0x04,0x00,0x00, -0x5c,0x04,0x00,0x00,0xc7,0x00,0x05,0x00,0x09,0x00,0x00,0x00, -0x5e,0x04,0x00,0x00,0x5d,0x04,0x00,0x00,0x88,0x00,0x00,0x00, -0x7c,0x00,0x04,0x00,0x09,0x00,0x00,0x00,0x60,0x04,0x00,0x00, -0x55,0x04,0x00,0x00,0xc5,0x00,0x05,0x00,0x09,0x00,0x00,0x00, -0x61,0x04,0x00,0x00,0x5e,0x04,0x00,0x00,0x60,0x04,0x00,0x00, -0x70,0x00,0x04,0x00,0x4a,0x00,0x00,0x00,0x62,0x04,0x00,0x00, -0x61,0x04,0x00,0x00,0xc2,0x00,0x05,0x00,0x09,0x00,0x00,0x00, -0x63,0x04,0x00,0x00,0x5d,0x04,0x00,0x00,0x71,0x00,0x00,0x00, -0x7c,0x00,0x04,0x00,0x09,0x00,0x00,0x00,0x65,0x04,0x00,0x00, -0x59,0x04,0x00,0x00,0xc5,0x00,0x05,0x00,0x09,0x00,0x00,0x00, -0x66,0x04,0x00,0x00,0x63,0x04,0x00,0x00,0x65,0x04,0x00,0x00, -0x70,0x00,0x04,0x00,0x4a,0x00,0x00,0x00,0x67,0x04,0x00,0x00, -0x66,0x04,0x00,0x00,0x50,0x00,0x05,0x00,0x84,0x00,0x00,0x00, -0x68,0x04,0x00,0x00,0x62,0x04,0x00,0x00,0x67,0x04,0x00,0x00, -0x83,0x00,0x05,0x00,0x84,0x00,0x00,0x00,0x69,0x04,0x00,0x00, -0x68,0x04,0x00,0x00,0xcf,0x00,0x00,0x00,0x8e,0x00,0x05,0x00, -0x84,0x00,0x00,0x00,0x6a,0x04,0x00,0x00,0x69,0x04,0x00,0x00, -0x49,0x04,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x70,0x04,0x00,0x00,0xa9,0x00,0x00,0x00,0x86,0x04,0x00,0x00, -0x51,0x00,0x05,0x00,0x4a,0x00,0x00,0x00,0x72,0x04,0x00,0x00, -0x6a,0x04,0x00,0x00,0x00,0x00,0x00,0x00,0x73,0x00,0x04,0x00, -0x4d,0x00,0x00,0x00,0x73,0x04,0x00,0x00,0x72,0x04,0x00,0x00, -0x41,0x00,0x06,0x00,0x5a,0x00,0x00,0x00,0x74,0x04,0x00,0x00, -0xa1,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0x70,0x04,0x00,0x00, -0x3e,0x00,0x03,0x00,0x74,0x04,0x00,0x00,0x73,0x04,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x7b,0x04,0x00,0x00, -0xa9,0x00,0x00,0x00,0x9a,0x04,0x00,0x00,0x51,0x00,0x05,0x00, -0x4a,0x00,0x00,0x00,0x7c,0x04,0x00,0x00,0x6a,0x04,0x00,0x00, -0x01,0x00,0x00,0x00,0x73,0x00,0x04,0x00,0x4d,0x00,0x00,0x00, -0x7d,0x04,0x00,0x00,0x7c,0x04,0x00,0x00,0x41,0x00,0x06,0x00, -0x5a,0x00,0x00,0x00,0x7e,0x04,0x00,0x00,0xa1,0x00,0x00,0x00, -0x2e,0x00,0x00,0x00,0x7b,0x04,0x00,0x00,0x3e,0x00,0x03,0x00, -0x7e,0x04,0x00,0x00,0x7d,0x04,0x00,0x00,0xf9,0x00,0x02,0x00, -0xc3,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0xc3,0x00,0x00,0x00, +0x1f,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x41,0x00,0x00,0x00,0x3d,0x00,0x00,0x00,0x40,0x00,0x00,0x00, +0x41,0x00,0x07,0x00,0x53,0x00,0x00,0x00,0x54,0x00,0x00,0x00, +0x50,0x00,0x00,0x00,0x51,0x00,0x00,0x00,0x27,0x00,0x00,0x00, +0x51,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x45,0x00,0x00,0x00, +0x55,0x00,0x00,0x00,0x54,0x00,0x00,0x00,0x73,0x00,0x04,0x00, +0x42,0x00,0x00,0x00,0x56,0x00,0x00,0x00,0x55,0x00,0x00,0x00, +0x41,0x00,0x08,0x00,0x5a,0x00,0x00,0x00,0x5b,0x00,0x00,0x00, +0x50,0x00,0x00,0x00,0x51,0x00,0x00,0x00,0x27,0x00,0x00,0x00, +0x59,0x00,0x00,0x00,0x59,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0x46,0x00,0x00,0x00,0x5c,0x00,0x00,0x00,0x5b,0x00,0x00,0x00, +0x71,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x5d,0x00,0x00,0x00, +0x5c,0x00,0x00,0x00,0xc4,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x5f,0x00,0x00,0x00,0x5d,0x00,0x00,0x00,0x5e,0x00,0x00,0x00, +0x41,0x00,0x08,0x00,0x5a,0x00,0x00,0x00,0x61,0x00,0x00,0x00, +0x50,0x00,0x00,0x00,0x51,0x00,0x00,0x00,0x27,0x00,0x00,0x00, +0x59,0x00,0x00,0x00,0x51,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0x46,0x00,0x00,0x00,0x62,0x00,0x00,0x00,0x61,0x00,0x00,0x00, +0x71,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x63,0x00,0x00,0x00, +0x62,0x00,0x00,0x00,0xc5,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x64,0x00,0x00,0x00,0x5f,0x00,0x00,0x00,0x63,0x00,0x00,0x00, +0x41,0x00,0x08,0x00,0x78,0x00,0x00,0x00,0x79,0x00,0x00,0x00, +0x50,0x00,0x00,0x00,0x51,0x00,0x00,0x00,0x27,0x00,0x00,0x00, +0x76,0x00,0x00,0x00,0x40,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0x49,0x00,0x00,0x00,0x7a,0x00,0x00,0x00,0x79,0x00,0x00,0x00, +0x71,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x7b,0x00,0x00,0x00, +0x7a,0x00,0x00,0x00,0xc7,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x87,0x00,0x00,0x00,0x7b,0x00,0x00,0x00,0x86,0x00,0x00,0x00, +0xc2,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x8a,0x00,0x00,0x00, +0x64,0x00,0x00,0x00,0x40,0x00,0x00,0x00,0xc4,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x8b,0x00,0x00,0x00,0x8a,0x00,0x00,0x00, +0x2d,0x00,0x00,0x00,0xc7,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x8c,0x00,0x00,0x00,0x8b,0x00,0x00,0x00,0x4a,0x00,0x00,0x00, +0xc5,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x8d,0x00,0x00,0x00, +0x87,0x00,0x00,0x00,0x8c,0x00,0x00,0x00,0x70,0x00,0x04,0x00, +0x42,0x00,0x00,0x00,0x8e,0x00,0x00,0x00,0x8d,0x00,0x00,0x00, +0x83,0x00,0x05,0x00,0x42,0x00,0x00,0x00,0x90,0x00,0x00,0x00, +0x8e,0x00,0x00,0x00,0x8f,0x00,0x00,0x00,0x85,0x00,0x05,0x00, +0x42,0x00,0x00,0x00,0x91,0x00,0x00,0x00,0x56,0x00,0x00,0x00, +0x90,0x00,0x00,0x00,0x73,0x00,0x04,0x00,0x45,0x00,0x00,0x00, +0x92,0x00,0x00,0x00,0x91,0x00,0x00,0x00,0x41,0x00,0x06,0x00, +0x53,0x00,0x00,0x00,0x93,0x00,0x00,0x00,0x7f,0x00,0x00,0x00, +0x51,0x00,0x00,0x00,0x41,0x00,0x00,0x00,0x3e,0x00,0x03,0x00, +0x93,0x00,0x00,0x00,0x92,0x00,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x97,0x00,0x00,0x00,0x41,0x00,0x00,0x00, +0x4a,0x00,0x00,0x00,0xc2,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x9a,0x00,0x00,0x00,0x7b,0x00,0x00,0x00,0x2d,0x00,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x9e,0x00,0x00,0x00, +0x40,0x00,0x00,0x00,0x9d,0x00,0x00,0x00,0xc2,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x9f,0x00,0x00,0x00,0x64,0x00,0x00,0x00, +0x9e,0x00,0x00,0x00,0xc7,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xa0,0x00,0x00,0x00,0x9f,0x00,0x00,0x00,0x4a,0x00,0x00,0x00, +0xc5,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xa1,0x00,0x00,0x00, +0x9a,0x00,0x00,0x00,0xa0,0x00,0x00,0x00,0x70,0x00,0x04,0x00, +0x42,0x00,0x00,0x00,0xa2,0x00,0x00,0x00,0xa1,0x00,0x00,0x00, +0x83,0x00,0x05,0x00,0x42,0x00,0x00,0x00,0xa3,0x00,0x00,0x00, +0xa2,0x00,0x00,0x00,0x8f,0x00,0x00,0x00,0x85,0x00,0x05,0x00, +0x42,0x00,0x00,0x00,0xa4,0x00,0x00,0x00,0x56,0x00,0x00,0x00, +0xa3,0x00,0x00,0x00,0x73,0x00,0x04,0x00,0x45,0x00,0x00,0x00, +0xa5,0x00,0x00,0x00,0xa4,0x00,0x00,0x00,0x41,0x00,0x06,0x00, +0x53,0x00,0x00,0x00,0xa6,0x00,0x00,0x00,0x7f,0x00,0x00,0x00, +0x51,0x00,0x00,0x00,0x97,0x00,0x00,0x00,0x3e,0x00,0x03,0x00, +0xa6,0x00,0x00,0x00,0xa5,0x00,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xb9,0x00,0x00,0x00,0x40,0x00,0x00,0x00, +0xaa,0x00,0x00,0x00,0x41,0x00,0x08,0x00,0x78,0x00,0x00,0x00, +0xba,0x00,0x00,0x00,0x50,0x00,0x00,0x00,0x51,0x00,0x00,0x00, +0x27,0x00,0x00,0x00,0x76,0x00,0x00,0x00,0xb9,0x00,0x00,0x00, +0x3d,0x00,0x04,0x00,0x49,0x00,0x00,0x00,0xbb,0x00,0x00,0x00, +0xba,0x00,0x00,0x00,0x71,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0xbc,0x00,0x00,0x00,0xbb,0x00,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xbd,0x00,0x00,0x00,0x41,0x00,0x00,0x00, +0xaa,0x00,0x00,0x00,0xc7,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xbf,0x00,0x00,0x00,0xbc,0x00,0x00,0x00,0x86,0x00,0x00,0x00, +0xc2,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xc0,0x00,0x00,0x00, +0x64,0x00,0x00,0x00,0xb9,0x00,0x00,0x00,0xc4,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xc1,0x00,0x00,0x00,0xc0,0x00,0x00,0x00, +0x2d,0x00,0x00,0x00,0xc7,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xc2,0x00,0x00,0x00,0xc1,0x00,0x00,0x00,0x4a,0x00,0x00,0x00, +0xc5,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xc3,0x00,0x00,0x00, +0xbf,0x00,0x00,0x00,0xc2,0x00,0x00,0x00,0x70,0x00,0x04,0x00, +0x42,0x00,0x00,0x00,0xc4,0x00,0x00,0x00,0xc3,0x00,0x00,0x00, +0x83,0x00,0x05,0x00,0x42,0x00,0x00,0x00,0xc5,0x00,0x00,0x00, +0xc4,0x00,0x00,0x00,0x8f,0x00,0x00,0x00,0x85,0x00,0x05,0x00, +0x42,0x00,0x00,0x00,0xc6,0x00,0x00,0x00,0x56,0x00,0x00,0x00, +0xc5,0x00,0x00,0x00,0x73,0x00,0x04,0x00,0x45,0x00,0x00,0x00, +0xc7,0x00,0x00,0x00,0xc6,0x00,0x00,0x00,0x41,0x00,0x06,0x00, +0x53,0x00,0x00,0x00,0xc8,0x00,0x00,0x00,0x7f,0x00,0x00,0x00, +0x51,0x00,0x00,0x00,0xbd,0x00,0x00,0x00,0x3e,0x00,0x03,0x00, +0xc8,0x00,0x00,0x00,0xc7,0x00,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xca,0x00,0x00,0x00,0x41,0x00,0x00,0x00, +0xaa,0x01,0x00,0x00,0xc2,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xcb,0x00,0x00,0x00,0xbc,0x00,0x00,0x00,0x2d,0x00,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xcc,0x00,0x00,0x00, +0x40,0x00,0x00,0x00,0xab,0x01,0x00,0x00,0xc2,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xcd,0x00,0x00,0x00,0x64,0x00,0x00,0x00, +0xcc,0x00,0x00,0x00,0xc7,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xce,0x00,0x00,0x00,0xcd,0x00,0x00,0x00,0x4a,0x00,0x00,0x00, +0xc5,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xcf,0x00,0x00,0x00, +0xcb,0x00,0x00,0x00,0xce,0x00,0x00,0x00,0x70,0x00,0x04,0x00, +0x42,0x00,0x00,0x00,0xd0,0x00,0x00,0x00,0xcf,0x00,0x00,0x00, +0x83,0x00,0x05,0x00,0x42,0x00,0x00,0x00,0xd1,0x00,0x00,0x00, +0xd0,0x00,0x00,0x00,0x8f,0x00,0x00,0x00,0x85,0x00,0x05,0x00, +0x42,0x00,0x00,0x00,0xd2,0x00,0x00,0x00,0x56,0x00,0x00,0x00, +0xd1,0x00,0x00,0x00,0x73,0x00,0x04,0x00,0x45,0x00,0x00,0x00, +0xd3,0x00,0x00,0x00,0xd2,0x00,0x00,0x00,0x41,0x00,0x06,0x00, +0x53,0x00,0x00,0x00,0xd4,0x00,0x00,0x00,0x7f,0x00,0x00,0x00, +0x51,0x00,0x00,0x00,0xca,0x00,0x00,0x00,0x3e,0x00,0x03,0x00, +0xd4,0x00,0x00,0x00,0xd3,0x00,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xdc,0x00,0x00,0x00,0x40,0x00,0x00,0x00, +0x47,0x00,0x00,0x00,0x41,0x00,0x08,0x00,0x78,0x00,0x00,0x00, +0xdd,0x00,0x00,0x00,0x50,0x00,0x00,0x00,0x51,0x00,0x00,0x00, +0x27,0x00,0x00,0x00,0x76,0x00,0x00,0x00,0xdc,0x00,0x00,0x00, +0x3d,0x00,0x04,0x00,0x49,0x00,0x00,0x00,0xde,0x00,0x00,0x00, +0xdd,0x00,0x00,0x00,0x71,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0xdf,0x00,0x00,0x00,0xde,0x00,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xe0,0x00,0x00,0x00,0x41,0x00,0x00,0x00, +0x47,0x00,0x00,0x00,0xc7,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xe2,0x00,0x00,0x00,0xdf,0x00,0x00,0x00,0x86,0x00,0x00,0x00, +0xc2,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xe3,0x00,0x00,0x00, +0x64,0x00,0x00,0x00,0xdc,0x00,0x00,0x00,0xc4,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xe4,0x00,0x00,0x00,0xe3,0x00,0x00,0x00, +0x2d,0x00,0x00,0x00,0xc7,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xe5,0x00,0x00,0x00,0xe4,0x00,0x00,0x00,0x4a,0x00,0x00,0x00, +0xc5,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xe6,0x00,0x00,0x00, +0xe2,0x00,0x00,0x00,0xe5,0x00,0x00,0x00,0x70,0x00,0x04,0x00, +0x42,0x00,0x00,0x00,0xe7,0x00,0x00,0x00,0xe6,0x00,0x00,0x00, +0x83,0x00,0x05,0x00,0x42,0x00,0x00,0x00,0xe8,0x00,0x00,0x00, +0xe7,0x00,0x00,0x00,0x8f,0x00,0x00,0x00,0x85,0x00,0x05,0x00, +0x42,0x00,0x00,0x00,0xe9,0x00,0x00,0x00,0x56,0x00,0x00,0x00, +0xe8,0x00,0x00,0x00,0x73,0x00,0x04,0x00,0x45,0x00,0x00,0x00, +0xea,0x00,0x00,0x00,0xe9,0x00,0x00,0x00,0x41,0x00,0x06,0x00, +0x53,0x00,0x00,0x00,0xeb,0x00,0x00,0x00,0x7f,0x00,0x00,0x00, +0x51,0x00,0x00,0x00,0xe0,0x00,0x00,0x00,0x3e,0x00,0x03,0x00, +0xeb,0x00,0x00,0x00,0xea,0x00,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xed,0x00,0x00,0x00,0x41,0x00,0x00,0x00, +0xac,0x01,0x00,0x00,0xc2,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xee,0x00,0x00,0x00,0xdf,0x00,0x00,0x00,0x2d,0x00,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xef,0x00,0x00,0x00, +0x40,0x00,0x00,0x00,0xad,0x01,0x00,0x00,0xc2,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xf0,0x00,0x00,0x00,0x64,0x00,0x00,0x00, +0xef,0x00,0x00,0x00,0xc7,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xf1,0x00,0x00,0x00,0xf0,0x00,0x00,0x00,0x4a,0x00,0x00,0x00, +0xc5,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xf2,0x00,0x00,0x00, +0xee,0x00,0x00,0x00,0xf1,0x00,0x00,0x00,0x70,0x00,0x04,0x00, +0x42,0x00,0x00,0x00,0xf3,0x00,0x00,0x00,0xf2,0x00,0x00,0x00, +0x83,0x00,0x05,0x00,0x42,0x00,0x00,0x00,0xf4,0x00,0x00,0x00, +0xf3,0x00,0x00,0x00,0x8f,0x00,0x00,0x00,0x85,0x00,0x05,0x00, +0x42,0x00,0x00,0x00,0xf5,0x00,0x00,0x00,0x56,0x00,0x00,0x00, +0xf4,0x00,0x00,0x00,0x73,0x00,0x04,0x00,0x45,0x00,0x00,0x00, +0xf6,0x00,0x00,0x00,0xf5,0x00,0x00,0x00,0x41,0x00,0x06,0x00, +0x53,0x00,0x00,0x00,0xf7,0x00,0x00,0x00,0x7f,0x00,0x00,0x00, +0x51,0x00,0x00,0x00,0xed,0x00,0x00,0x00,0x3e,0x00,0x03,0x00, +0xf7,0x00,0x00,0x00,0xf6,0x00,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xff,0x00,0x00,0x00,0x40,0x00,0x00,0x00, +0xae,0x01,0x00,0x00,0x41,0x00,0x08,0x00,0x78,0x00,0x00,0x00, +0x00,0x01,0x00,0x00,0x50,0x00,0x00,0x00,0x51,0x00,0x00,0x00, +0x27,0x00,0x00,0x00,0x76,0x00,0x00,0x00,0xff,0x00,0x00,0x00, +0x3d,0x00,0x04,0x00,0x49,0x00,0x00,0x00,0x01,0x01,0x00,0x00, +0x00,0x01,0x00,0x00,0x71,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x02,0x01,0x00,0x00,0x01,0x01,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x03,0x01,0x00,0x00,0x41,0x00,0x00,0x00, +0xae,0x01,0x00,0x00,0xc7,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x05,0x01,0x00,0x00,0x02,0x01,0x00,0x00,0x86,0x00,0x00,0x00, +0xc2,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x06,0x01,0x00,0x00, +0x64,0x00,0x00,0x00,0xff,0x00,0x00,0x00,0xc4,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x07,0x01,0x00,0x00,0x06,0x01,0x00,0x00, +0x2d,0x00,0x00,0x00,0xc7,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x08,0x01,0x00,0x00,0x07,0x01,0x00,0x00,0x4a,0x00,0x00,0x00, +0xc5,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x09,0x01,0x00,0x00, +0x05,0x01,0x00,0x00,0x08,0x01,0x00,0x00,0x70,0x00,0x04,0x00, +0x42,0x00,0x00,0x00,0x0a,0x01,0x00,0x00,0x09,0x01,0x00,0x00, +0x83,0x00,0x05,0x00,0x42,0x00,0x00,0x00,0x0b,0x01,0x00,0x00, +0x0a,0x01,0x00,0x00,0x8f,0x00,0x00,0x00,0x85,0x00,0x05,0x00, +0x42,0x00,0x00,0x00,0x0c,0x01,0x00,0x00,0x56,0x00,0x00,0x00, +0x0b,0x01,0x00,0x00,0x73,0x00,0x04,0x00,0x45,0x00,0x00,0x00, +0x0d,0x01,0x00,0x00,0x0c,0x01,0x00,0x00,0x41,0x00,0x06,0x00, +0x53,0x00,0x00,0x00,0x0e,0x01,0x00,0x00,0x7f,0x00,0x00,0x00, +0x51,0x00,0x00,0x00,0x03,0x01,0x00,0x00,0x3e,0x00,0x03,0x00, +0x0e,0x01,0x00,0x00,0x0d,0x01,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x10,0x01,0x00,0x00,0x41,0x00,0x00,0x00, +0xaf,0x01,0x00,0x00,0xc2,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x11,0x01,0x00,0x00,0x02,0x01,0x00,0x00,0x2d,0x00,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x12,0x01,0x00,0x00, +0x40,0x00,0x00,0x00,0x86,0x00,0x00,0x00,0xc2,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x13,0x01,0x00,0x00,0x64,0x00,0x00,0x00, +0x12,0x01,0x00,0x00,0xc7,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x14,0x01,0x00,0x00,0x13,0x01,0x00,0x00,0x4a,0x00,0x00,0x00, +0xc5,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x15,0x01,0x00,0x00, +0x11,0x01,0x00,0x00,0x14,0x01,0x00,0x00,0x70,0x00,0x04,0x00, +0x42,0x00,0x00,0x00,0x16,0x01,0x00,0x00,0x15,0x01,0x00,0x00, +0x83,0x00,0x05,0x00,0x42,0x00,0x00,0x00,0x17,0x01,0x00,0x00, +0x16,0x01,0x00,0x00,0x8f,0x00,0x00,0x00,0x85,0x00,0x05,0x00, +0x42,0x00,0x00,0x00,0x18,0x01,0x00,0x00,0x56,0x00,0x00,0x00, +0x17,0x01,0x00,0x00,0x73,0x00,0x04,0x00,0x45,0x00,0x00,0x00, +0x19,0x01,0x00,0x00,0x18,0x01,0x00,0x00,0x41,0x00,0x06,0x00, +0x53,0x00,0x00,0x00,0x1a,0x01,0x00,0x00,0x7f,0x00,0x00,0x00, +0x51,0x00,0x00,0x00,0x10,0x01,0x00,0x00,0x3e,0x00,0x03,0x00, +0x1a,0x01,0x00,0x00,0x19,0x01,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x22,0x01,0x00,0x00,0x40,0x00,0x00,0x00, +0x10,0x00,0x00,0x00,0x41,0x00,0x08,0x00,0x78,0x00,0x00,0x00, +0x23,0x01,0x00,0x00,0x50,0x00,0x00,0x00,0x51,0x00,0x00,0x00, +0x27,0x00,0x00,0x00,0x76,0x00,0x00,0x00,0x22,0x01,0x00,0x00, +0x3d,0x00,0x04,0x00,0x49,0x00,0x00,0x00,0x24,0x01,0x00,0x00, +0x23,0x01,0x00,0x00,0x71,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x25,0x01,0x00,0x00,0x24,0x01,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x26,0x01,0x00,0x00,0x41,0x00,0x00,0x00, +0x10,0x00,0x00,0x00,0xc7,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x28,0x01,0x00,0x00,0x25,0x01,0x00,0x00,0x86,0x00,0x00,0x00, +0xc2,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x29,0x01,0x00,0x00, +0x64,0x00,0x00,0x00,0x22,0x01,0x00,0x00,0xc4,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x2a,0x01,0x00,0x00,0x29,0x01,0x00,0x00, +0x2d,0x00,0x00,0x00,0xc7,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x2b,0x01,0x00,0x00,0x2a,0x01,0x00,0x00,0x4a,0x00,0x00,0x00, +0xc5,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x2c,0x01,0x00,0x00, +0x28,0x01,0x00,0x00,0x2b,0x01,0x00,0x00,0x70,0x00,0x04,0x00, +0x42,0x00,0x00,0x00,0x2d,0x01,0x00,0x00,0x2c,0x01,0x00,0x00, +0x83,0x00,0x05,0x00,0x42,0x00,0x00,0x00,0x2e,0x01,0x00,0x00, +0x2d,0x01,0x00,0x00,0x8f,0x00,0x00,0x00,0x85,0x00,0x05,0x00, +0x42,0x00,0x00,0x00,0x2f,0x01,0x00,0x00,0x56,0x00,0x00,0x00, +0x2e,0x01,0x00,0x00,0x73,0x00,0x04,0x00,0x45,0x00,0x00,0x00, +0x30,0x01,0x00,0x00,0x2f,0x01,0x00,0x00,0x41,0x00,0x06,0x00, +0x53,0x00,0x00,0x00,0x31,0x01,0x00,0x00,0x7f,0x00,0x00,0x00, +0x51,0x00,0x00,0x00,0x26,0x01,0x00,0x00,0x3e,0x00,0x03,0x00, +0x31,0x01,0x00,0x00,0x30,0x01,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x33,0x01,0x00,0x00,0x41,0x00,0x00,0x00, +0xb0,0x01,0x00,0x00,0xc2,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x34,0x01,0x00,0x00,0x25,0x01,0x00,0x00,0x2d,0x00,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x35,0x01,0x00,0x00, +0x40,0x00,0x00,0x00,0x4a,0x00,0x00,0x00,0xc2,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x36,0x01,0x00,0x00,0x64,0x00,0x00,0x00, +0x35,0x01,0x00,0x00,0xc7,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x37,0x01,0x00,0x00,0x36,0x01,0x00,0x00,0x4a,0x00,0x00,0x00, +0xc5,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x38,0x01,0x00,0x00, +0x34,0x01,0x00,0x00,0x37,0x01,0x00,0x00,0x70,0x00,0x04,0x00, +0x42,0x00,0x00,0x00,0x39,0x01,0x00,0x00,0x38,0x01,0x00,0x00, +0x83,0x00,0x05,0x00,0x42,0x00,0x00,0x00,0x3a,0x01,0x00,0x00, +0x39,0x01,0x00,0x00,0x8f,0x00,0x00,0x00,0x85,0x00,0x05,0x00, +0x42,0x00,0x00,0x00,0x3b,0x01,0x00,0x00,0x56,0x00,0x00,0x00, +0x3a,0x01,0x00,0x00,0x73,0x00,0x04,0x00,0x45,0x00,0x00,0x00, +0x3c,0x01,0x00,0x00,0x3b,0x01,0x00,0x00,0x41,0x00,0x06,0x00, +0x53,0x00,0x00,0x00,0x3d,0x01,0x00,0x00,0x7f,0x00,0x00,0x00, +0x51,0x00,0x00,0x00,0x33,0x01,0x00,0x00,0x3e,0x00,0x03,0x00, +0x3d,0x01,0x00,0x00,0x3c,0x01,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x45,0x01,0x00,0x00,0x40,0x00,0x00,0x00, +0xb1,0x01,0x00,0x00,0x41,0x00,0x08,0x00,0x78,0x00,0x00,0x00, +0x46,0x01,0x00,0x00,0x50,0x00,0x00,0x00,0x51,0x00,0x00,0x00, +0x27,0x00,0x00,0x00,0x76,0x00,0x00,0x00,0x45,0x01,0x00,0x00, +0x3d,0x00,0x04,0x00,0x49,0x00,0x00,0x00,0x47,0x01,0x00,0x00, +0x46,0x01,0x00,0x00,0x71,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x48,0x01,0x00,0x00,0x47,0x01,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x49,0x01,0x00,0x00,0x41,0x00,0x00,0x00, +0xb1,0x01,0x00,0x00,0xc7,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x4b,0x01,0x00,0x00,0x48,0x01,0x00,0x00,0x86,0x00,0x00,0x00, +0xc2,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x4c,0x01,0x00,0x00, +0x64,0x00,0x00,0x00,0x45,0x01,0x00,0x00,0xc4,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x4d,0x01,0x00,0x00,0x4c,0x01,0x00,0x00, +0x2d,0x00,0x00,0x00,0xc7,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x4e,0x01,0x00,0x00,0x4d,0x01,0x00,0x00,0x4a,0x00,0x00,0x00, +0xc5,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x4f,0x01,0x00,0x00, +0x4b,0x01,0x00,0x00,0x4e,0x01,0x00,0x00,0x70,0x00,0x04,0x00, +0x42,0x00,0x00,0x00,0x50,0x01,0x00,0x00,0x4f,0x01,0x00,0x00, +0x83,0x00,0x05,0x00,0x42,0x00,0x00,0x00,0x51,0x01,0x00,0x00, +0x50,0x01,0x00,0x00,0x8f,0x00,0x00,0x00,0x85,0x00,0x05,0x00, +0x42,0x00,0x00,0x00,0x52,0x01,0x00,0x00,0x56,0x00,0x00,0x00, +0x51,0x01,0x00,0x00,0x73,0x00,0x04,0x00,0x45,0x00,0x00,0x00, +0x53,0x01,0x00,0x00,0x52,0x01,0x00,0x00,0x41,0x00,0x06,0x00, +0x53,0x00,0x00,0x00,0x54,0x01,0x00,0x00,0x7f,0x00,0x00,0x00, +0x51,0x00,0x00,0x00,0x49,0x01,0x00,0x00,0x3e,0x00,0x03,0x00, +0x54,0x01,0x00,0x00,0x53,0x01,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x56,0x01,0x00,0x00,0x41,0x00,0x00,0x00, +0xb2,0x01,0x00,0x00,0xc2,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x57,0x01,0x00,0x00,0x48,0x01,0x00,0x00,0x2d,0x00,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x58,0x01,0x00,0x00, +0x40,0x00,0x00,0x00,0xaa,0x01,0x00,0x00,0xc2,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x59,0x01,0x00,0x00,0x64,0x00,0x00,0x00, +0x58,0x01,0x00,0x00,0xc7,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x5a,0x01,0x00,0x00,0x59,0x01,0x00,0x00,0x4a,0x00,0x00,0x00, +0xc5,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x5b,0x01,0x00,0x00, +0x57,0x01,0x00,0x00,0x5a,0x01,0x00,0x00,0x70,0x00,0x04,0x00, +0x42,0x00,0x00,0x00,0x5c,0x01,0x00,0x00,0x5b,0x01,0x00,0x00, +0x83,0x00,0x05,0x00,0x42,0x00,0x00,0x00,0x5d,0x01,0x00,0x00, +0x5c,0x01,0x00,0x00,0x8f,0x00,0x00,0x00,0x85,0x00,0x05,0x00, +0x42,0x00,0x00,0x00,0x5e,0x01,0x00,0x00,0x56,0x00,0x00,0x00, +0x5d,0x01,0x00,0x00,0x73,0x00,0x04,0x00,0x45,0x00,0x00,0x00, +0x5f,0x01,0x00,0x00,0x5e,0x01,0x00,0x00,0x41,0x00,0x06,0x00, +0x53,0x00,0x00,0x00,0x60,0x01,0x00,0x00,0x7f,0x00,0x00,0x00, +0x51,0x00,0x00,0x00,0x56,0x01,0x00,0x00,0x3e,0x00,0x03,0x00, +0x60,0x01,0x00,0x00,0x5f,0x01,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x68,0x01,0x00,0x00,0x40,0x00,0x00,0x00, +0xb3,0x01,0x00,0x00,0x41,0x00,0x08,0x00,0x78,0x00,0x00,0x00, +0x69,0x01,0x00,0x00,0x50,0x00,0x00,0x00,0x51,0x00,0x00,0x00, +0x27,0x00,0x00,0x00,0x76,0x00,0x00,0x00,0x68,0x01,0x00,0x00, +0x3d,0x00,0x04,0x00,0x49,0x00,0x00,0x00,0x6a,0x01,0x00,0x00, +0x69,0x01,0x00,0x00,0x71,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x6b,0x01,0x00,0x00,0x6a,0x01,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x6c,0x01,0x00,0x00,0x41,0x00,0x00,0x00, +0xb3,0x01,0x00,0x00,0xc7,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x6e,0x01,0x00,0x00,0x6b,0x01,0x00,0x00,0x86,0x00,0x00,0x00, +0xc2,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x6f,0x01,0x00,0x00, +0x64,0x00,0x00,0x00,0x68,0x01,0x00,0x00,0xc4,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x70,0x01,0x00,0x00,0x6f,0x01,0x00,0x00, +0x2d,0x00,0x00,0x00,0xc7,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x71,0x01,0x00,0x00,0x70,0x01,0x00,0x00,0x4a,0x00,0x00,0x00, +0xc5,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x72,0x01,0x00,0x00, +0x6e,0x01,0x00,0x00,0x71,0x01,0x00,0x00,0x70,0x00,0x04,0x00, +0x42,0x00,0x00,0x00,0x73,0x01,0x00,0x00,0x72,0x01,0x00,0x00, +0x83,0x00,0x05,0x00,0x42,0x00,0x00,0x00,0x74,0x01,0x00,0x00, +0x73,0x01,0x00,0x00,0x8f,0x00,0x00,0x00,0x85,0x00,0x05,0x00, +0x42,0x00,0x00,0x00,0x75,0x01,0x00,0x00,0x56,0x00,0x00,0x00, +0x74,0x01,0x00,0x00,0x73,0x00,0x04,0x00,0x45,0x00,0x00,0x00, +0x76,0x01,0x00,0x00,0x75,0x01,0x00,0x00,0x41,0x00,0x06,0x00, +0x53,0x00,0x00,0x00,0x77,0x01,0x00,0x00,0x7f,0x00,0x00,0x00, +0x51,0x00,0x00,0x00,0x6c,0x01,0x00,0x00,0x3e,0x00,0x03,0x00, +0x77,0x01,0x00,0x00,0x76,0x01,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x79,0x01,0x00,0x00,0x41,0x00,0x00,0x00, +0xb4,0x01,0x00,0x00,0xc2,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x7a,0x01,0x00,0x00,0x6b,0x01,0x00,0x00,0x2d,0x00,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x7b,0x01,0x00,0x00, +0x40,0x00,0x00,0x00,0xac,0x01,0x00,0x00,0xc2,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x7c,0x01,0x00,0x00,0x64,0x00,0x00,0x00, +0x7b,0x01,0x00,0x00,0xc7,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x7d,0x01,0x00,0x00,0x7c,0x01,0x00,0x00,0x4a,0x00,0x00,0x00, +0xc5,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x7e,0x01,0x00,0x00, +0x7a,0x01,0x00,0x00,0x7d,0x01,0x00,0x00,0x70,0x00,0x04,0x00, +0x42,0x00,0x00,0x00,0x7f,0x01,0x00,0x00,0x7e,0x01,0x00,0x00, +0x83,0x00,0x05,0x00,0x42,0x00,0x00,0x00,0x80,0x01,0x00,0x00, +0x7f,0x01,0x00,0x00,0x8f,0x00,0x00,0x00,0x85,0x00,0x05,0x00, +0x42,0x00,0x00,0x00,0x81,0x01,0x00,0x00,0x56,0x00,0x00,0x00, +0x80,0x01,0x00,0x00,0x73,0x00,0x04,0x00,0x45,0x00,0x00,0x00, +0x82,0x01,0x00,0x00,0x81,0x01,0x00,0x00,0x41,0x00,0x06,0x00, +0x53,0x00,0x00,0x00,0x83,0x01,0x00,0x00,0x7f,0x00,0x00,0x00, +0x51,0x00,0x00,0x00,0x79,0x01,0x00,0x00,0x3e,0x00,0x03,0x00, +0x83,0x01,0x00,0x00,0x82,0x01,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x8b,0x01,0x00,0x00,0x40,0x00,0x00,0x00, +0xb5,0x01,0x00,0x00,0x41,0x00,0x08,0x00,0x78,0x00,0x00,0x00, +0x8c,0x01,0x00,0x00,0x50,0x00,0x00,0x00,0x51,0x00,0x00,0x00, +0x27,0x00,0x00,0x00,0x76,0x00,0x00,0x00,0x8b,0x01,0x00,0x00, +0x3d,0x00,0x04,0x00,0x49,0x00,0x00,0x00,0x8d,0x01,0x00,0x00, +0x8c,0x01,0x00,0x00,0x71,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x8e,0x01,0x00,0x00,0x8d,0x01,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x8f,0x01,0x00,0x00,0x41,0x00,0x00,0x00, +0xb5,0x01,0x00,0x00,0xc7,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x91,0x01,0x00,0x00,0x8e,0x01,0x00,0x00,0x86,0x00,0x00,0x00, +0xc2,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x92,0x01,0x00,0x00, +0x64,0x00,0x00,0x00,0x8b,0x01,0x00,0x00,0xc4,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x93,0x01,0x00,0x00,0x92,0x01,0x00,0x00, +0x2d,0x00,0x00,0x00,0xc7,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x94,0x01,0x00,0x00,0x93,0x01,0x00,0x00,0x4a,0x00,0x00,0x00, +0xc5,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x95,0x01,0x00,0x00, +0x91,0x01,0x00,0x00,0x94,0x01,0x00,0x00,0x70,0x00,0x04,0x00, +0x42,0x00,0x00,0x00,0x96,0x01,0x00,0x00,0x95,0x01,0x00,0x00, +0x83,0x00,0x05,0x00,0x42,0x00,0x00,0x00,0x97,0x01,0x00,0x00, +0x96,0x01,0x00,0x00,0x8f,0x00,0x00,0x00,0x85,0x00,0x05,0x00, +0x42,0x00,0x00,0x00,0x98,0x01,0x00,0x00,0x56,0x00,0x00,0x00, +0x97,0x01,0x00,0x00,0x73,0x00,0x04,0x00,0x45,0x00,0x00,0x00, +0x99,0x01,0x00,0x00,0x98,0x01,0x00,0x00,0x41,0x00,0x06,0x00, +0x53,0x00,0x00,0x00,0x9a,0x01,0x00,0x00,0x7f,0x00,0x00,0x00, +0x51,0x00,0x00,0x00,0x8f,0x01,0x00,0x00,0x3e,0x00,0x03,0x00, +0x9a,0x01,0x00,0x00,0x99,0x01,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x9c,0x01,0x00,0x00,0x41,0x00,0x00,0x00, +0xb6,0x01,0x00,0x00,0xc2,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x9d,0x01,0x00,0x00,0x8e,0x01,0x00,0x00,0x2d,0x00,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x9e,0x01,0x00,0x00, +0x40,0x00,0x00,0x00,0xaf,0x01,0x00,0x00,0xc2,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x9f,0x01,0x00,0x00,0x64,0x00,0x00,0x00, +0x9e,0x01,0x00,0x00,0xc7,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xa0,0x01,0x00,0x00,0x9f,0x01,0x00,0x00,0x4a,0x00,0x00,0x00, +0xc5,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xa1,0x01,0x00,0x00, +0x9d,0x01,0x00,0x00,0xa0,0x01,0x00,0x00,0x70,0x00,0x04,0x00, +0x42,0x00,0x00,0x00,0xa2,0x01,0x00,0x00,0xa1,0x01,0x00,0x00, +0x83,0x00,0x05,0x00,0x42,0x00,0x00,0x00,0xa3,0x01,0x00,0x00, +0xa2,0x01,0x00,0x00,0x8f,0x00,0x00,0x00,0x85,0x00,0x05,0x00, +0x42,0x00,0x00,0x00,0xa4,0x01,0x00,0x00,0x56,0x00,0x00,0x00, +0xa3,0x01,0x00,0x00,0x73,0x00,0x04,0x00,0x45,0x00,0x00,0x00, +0xa5,0x01,0x00,0x00,0xa4,0x01,0x00,0x00,0x41,0x00,0x06,0x00, +0x53,0x00,0x00,0x00,0xa6,0x01,0x00,0x00,0x7f,0x00,0x00,0x00, +0x51,0x00,0x00,0x00,0x9c,0x01,0x00,0x00,0x3e,0x00,0x03,0x00, +0xa6,0x01,0x00,0x00,0xa5,0x01,0x00,0x00,0xf9,0x00,0x02,0x00, +0xac,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0xac,0x00,0x00,0x00, 0xfd,0x00,0x01,0x00,0x38,0x00,0x01,0x00, }; -const uint64_t dequant_q5_0_len = 13952; +const uint64_t dequant_q5_0_len = 6644; unsigned char dequant_q5_1_data[] = { 0x03,0x02,0x23,0x07,0x00,0x05,0x01,0x00,0x0b,0x00,0x0d,0x00, -0x95,0x04,0x00,0x00,0x00,0x00,0x00,0x00,0x11,0x00,0x02,0x00, +0xb4,0x01,0x00,0x00,0x00,0x00,0x00,0x00,0x11,0x00,0x02,0x00, 0x01,0x00,0x00,0x00,0x11,0x00,0x02,0x00,0x51,0x11,0x00,0x00, 0x11,0x00,0x02,0x00,0x60,0x11,0x00,0x00,0x0b,0x00,0x06,0x00, 0x01,0x00,0x00,0x00,0x47,0x4c,0x53,0x4c,0x2e,0x73,0x74,0x64, 0x2e,0x34,0x35,0x30,0x00,0x00,0x00,0x00,0x0e,0x00,0x03,0x00, -0x00,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x0f,0x00,0x09,0x00, +0x00,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x0f,0x00,0x0a,0x00, 0x05,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x6d,0x61,0x69,0x6e, -0x00,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x16,0x00,0x00,0x00, -0x55,0x00,0x00,0x00,0x9d,0x00,0x00,0x00,0x10,0x00,0x06,0x00, -0x04,0x00,0x00,0x00,0x11,0x00,0x00,0x00,0x00,0x01,0x00,0x00, -0x01,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x0c,0x00,0x00,0x00,0x0b,0x00,0x00,0x00,0x1c,0x00,0x00,0x00, -0x48,0x00,0x05,0x00,0x14,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0x14,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x04,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x14,0x00,0x00,0x00, -0x02,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x08,0x00,0x00,0x00, -0x48,0x00,0x05,0x00,0x14,0x00,0x00,0x00,0x03,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x47,0x00,0x03,0x00, -0x14,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x50,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x48,0x00,0x05,0x00,0x51,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0x51,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x02,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x51,0x00,0x00,0x00, -0x02,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x04,0x00,0x00,0x00, -0x48,0x00,0x05,0x00,0x51,0x00,0x00,0x00,0x03,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x08,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x52,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x18,0x00,0x00,0x00, -0x48,0x00,0x04,0x00,0x53,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x18,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x53,0x00,0x00,0x00, +0x00,0x00,0x00,0x00,0x0b,0x00,0x00,0x00,0x12,0x00,0x00,0x00, +0x2b,0x00,0x00,0x00,0x4d,0x00,0x00,0x00,0x7a,0x00,0x00,0x00, +0x10,0x00,0x06,0x00,0x04,0x00,0x00,0x00,0x11,0x00,0x00,0x00, +0x00,0x01,0x00,0x00,0x01,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0x47,0x00,0x04,0x00,0x0b,0x00,0x00,0x00,0x0b,0x00,0x00,0x00, +0x1a,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x12,0x00,0x00,0x00, +0x0b,0x00,0x00,0x00,0x1b,0x00,0x00,0x00,0x48,0x00,0x05,0x00, +0x29,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00, +0x00,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x29,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x04,0x00,0x00,0x00, +0x48,0x00,0x05,0x00,0x29,0x00,0x00,0x00,0x02,0x00,0x00,0x00, +0x23,0x00,0x00,0x00,0x08,0x00,0x00,0x00,0x48,0x00,0x05,0x00, +0x29,0x00,0x00,0x00,0x03,0x00,0x00,0x00,0x23,0x00,0x00,0x00, +0x0c,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x29,0x00,0x00,0x00, +0x04,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x10,0x00,0x00,0x00, +0x47,0x00,0x03,0x00,0x29,0x00,0x00,0x00,0x02,0x00,0x00,0x00, +0x47,0x00,0x04,0x00,0x48,0x00,0x00,0x00,0x06,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x49,0x00,0x00,0x00, 0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x47,0x00,0x03,0x00,0x53,0x00,0x00,0x00,0x02,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x55,0x00,0x00,0x00,0x22,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x55,0x00,0x00,0x00, -0x21,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x9a,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x02,0x00,0x00,0x00, -0x48,0x00,0x04,0x00,0x9b,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x19,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x9b,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x47,0x00,0x03,0x00,0x9b,0x00,0x00,0x00,0x02,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x9d,0x00,0x00,0x00,0x22,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x9d,0x00,0x00,0x00, -0x21,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0xbd,0x00,0x00,0x00,0x0b,0x00,0x00,0x00,0x19,0x00,0x00,0x00, -0x13,0x00,0x02,0x00,0x02,0x00,0x00,0x00,0x21,0x00,0x03,0x00, -0x03,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x15,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x20,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x15,0x00,0x04,0x00,0x09,0x00,0x00,0x00,0x20,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x17,0x00,0x04,0x00,0x0a,0x00,0x00,0x00, -0x09,0x00,0x00,0x00,0x03,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0x0b,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x0a,0x00,0x00,0x00, -0x3b,0x00,0x04,0x00,0x0b,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x09,0x00,0x00,0x00, -0x0d,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0x0e,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x09,0x00,0x00,0x00, -0x1e,0x00,0x06,0x00,0x14,0x00,0x00,0x00,0x06,0x00,0x00,0x00, +0x48,0x00,0x05,0x00,0x49,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0x23,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x48,0x00,0x05,0x00, +0x49,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x23,0x00,0x00,0x00, +0x04,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x49,0x00,0x00,0x00, +0x03,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x08,0x00,0x00,0x00, +0x47,0x00,0x04,0x00,0x4a,0x00,0x00,0x00,0x06,0x00,0x00,0x00, +0x18,0x00,0x00,0x00,0x48,0x00,0x04,0x00,0x4b,0x00,0x00,0x00, +0x00,0x00,0x00,0x00,0x18,0x00,0x00,0x00,0x48,0x00,0x05,0x00, +0x4b,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00, +0x00,0x00,0x00,0x00,0x47,0x00,0x03,0x00,0x4b,0x00,0x00,0x00, +0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x4d,0x00,0x00,0x00, +0x22,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00, +0x4d,0x00,0x00,0x00,0x21,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0x47,0x00,0x04,0x00,0x77,0x00,0x00,0x00,0x06,0x00,0x00,0x00, +0x02,0x00,0x00,0x00,0x48,0x00,0x04,0x00,0x78,0x00,0x00,0x00, +0x00,0x00,0x00,0x00,0x19,0x00,0x00,0x00,0x48,0x00,0x05,0x00, +0x78,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00, +0x00,0x00,0x00,0x00,0x47,0x00,0x03,0x00,0x78,0x00,0x00,0x00, +0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x7a,0x00,0x00,0x00, +0x22,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00, +0x7a,0x00,0x00,0x00,0x21,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0x47,0x00,0x04,0x00,0xa7,0x00,0x00,0x00,0x0b,0x00,0x00,0x00, +0x19,0x00,0x00,0x00,0x13,0x00,0x02,0x00,0x02,0x00,0x00,0x00, +0x21,0x00,0x03,0x00,0x03,0x00,0x00,0x00,0x02,0x00,0x00,0x00, +0x15,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x20,0x00,0x00,0x00, +0x00,0x00,0x00,0x00,0x17,0x00,0x04,0x00,0x09,0x00,0x00,0x00, +0x06,0x00,0x00,0x00,0x03,0x00,0x00,0x00,0x20,0x00,0x04,0x00, +0x0a,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x09,0x00,0x00,0x00, +0x3b,0x00,0x04,0x00,0x0a,0x00,0x00,0x00,0x0b,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x0c,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x20,0x00,0x04,0x00, +0x0d,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x06,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x10,0x00,0x00,0x00, +0x04,0x00,0x00,0x00,0x3b,0x00,0x04,0x00,0x0a,0x00,0x00,0x00, +0x12,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x15,0x00,0x00,0x00,0x40,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x1e,0x00,0x00,0x00, +0x20,0x00,0x00,0x00,0x1e,0x00,0x07,0x00,0x29,0x00,0x00,0x00, 0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x20,0x00,0x04,0x00,0x15,0x00,0x00,0x00,0x09,0x00,0x00,0x00, -0x14,0x00,0x00,0x00,0x3b,0x00,0x04,0x00,0x15,0x00,0x00,0x00, -0x16,0x00,0x00,0x00,0x09,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x17,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x20,0x00,0x04,0x00,0x18,0x00,0x00,0x00,0x09,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x1b,0x00,0x00,0x00,0x20,0x00,0x00,0x00,0x14,0x00,0x02,0x00, -0x24,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x2e,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x37,0x00,0x00,0x00,0x02,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x48,0x00,0x00,0x00, -0x10,0x00,0x00,0x00,0x16,0x00,0x03,0x00,0x4a,0x00,0x00,0x00, -0x20,0x00,0x00,0x00,0x16,0x00,0x03,0x00,0x4d,0x00,0x00,0x00, -0x10,0x00,0x00,0x00,0x15,0x00,0x04,0x00,0x4e,0x00,0x00,0x00, -0x08,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x09,0x00,0x00,0x00,0x4f,0x00,0x00,0x00,0x10,0x00,0x00,0x00, -0x1c,0x00,0x04,0x00,0x50,0x00,0x00,0x00,0x4e,0x00,0x00,0x00, -0x4f,0x00,0x00,0x00,0x1e,0x00,0x06,0x00,0x51,0x00,0x00,0x00, -0x4d,0x00,0x00,0x00,0x4d,0x00,0x00,0x00,0x09,0x00,0x00,0x00, -0x50,0x00,0x00,0x00,0x1d,0x00,0x03,0x00,0x52,0x00,0x00,0x00, -0x51,0x00,0x00,0x00,0x1e,0x00,0x03,0x00,0x53,0x00,0x00,0x00, -0x52,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x54,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x53,0x00,0x00,0x00,0x3b,0x00,0x04,0x00, -0x54,0x00,0x00,0x00,0x55,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0x20,0x00,0x04,0x00,0x57,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0x4d,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x64,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x09,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x69,0x00,0x00,0x00,0x04,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x71,0x00,0x00,0x00, +0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x20,0x00,0x04,0x00, +0x2a,0x00,0x00,0x00,0x09,0x00,0x00,0x00,0x29,0x00,0x00,0x00, +0x3b,0x00,0x04,0x00,0x2a,0x00,0x00,0x00,0x2b,0x00,0x00,0x00, +0x09,0x00,0x00,0x00,0x15,0x00,0x04,0x00,0x2c,0x00,0x00,0x00, +0x20,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x2c,0x00,0x00,0x00,0x2d,0x00,0x00,0x00,0x04,0x00,0x00,0x00, +0x20,0x00,0x04,0x00,0x2e,0x00,0x00,0x00,0x09,0x00,0x00,0x00, +0x06,0x00,0x00,0x00,0x14,0x00,0x02,0x00,0x32,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x38,0x00,0x00,0x00, +0x00,0x04,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x3e,0x00,0x00,0x00,0x08,0x00,0x00,0x00,0x16,0x00,0x03,0x00, +0x42,0x00,0x00,0x00,0x20,0x00,0x00,0x00,0x16,0x00,0x03,0x00, +0x45,0x00,0x00,0x00,0x10,0x00,0x00,0x00,0x15,0x00,0x04,0x00, +0x46,0x00,0x00,0x00,0x08,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x47,0x00,0x00,0x00, +0x10,0x00,0x00,0x00,0x1c,0x00,0x04,0x00,0x48,0x00,0x00,0x00, +0x46,0x00,0x00,0x00,0x47,0x00,0x00,0x00,0x1e,0x00,0x06,0x00, +0x49,0x00,0x00,0x00,0x45,0x00,0x00,0x00,0x45,0x00,0x00,0x00, +0x06,0x00,0x00,0x00,0x48,0x00,0x00,0x00,0x1d,0x00,0x03,0x00, +0x4a,0x00,0x00,0x00,0x49,0x00,0x00,0x00,0x1e,0x00,0x03,0x00, +0x4b,0x00,0x00,0x00,0x4a,0x00,0x00,0x00,0x20,0x00,0x04,0x00, +0x4c,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x4b,0x00,0x00,0x00, +0x3b,0x00,0x04,0x00,0x4c,0x00,0x00,0x00,0x4d,0x00,0x00,0x00, +0x0c,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x2c,0x00,0x00,0x00, +0x4e,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x20,0x00,0x04,0x00, +0x50,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x45,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x2c,0x00,0x00,0x00,0x56,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x2c,0x00,0x00,0x00, +0x5c,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x20,0x00,0x04,0x00, +0x5d,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x06,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x2c,0x00,0x00,0x00,0x71,0x00,0x00,0x00, +0x03,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x73,0x00,0x00,0x00, +0x0c,0x00,0x00,0x00,0x46,0x00,0x00,0x00,0x1d,0x00,0x03,0x00, +0x77,0x00,0x00,0x00,0x45,0x00,0x00,0x00,0x1e,0x00,0x03,0x00, +0x78,0x00,0x00,0x00,0x77,0x00,0x00,0x00,0x20,0x00,0x04,0x00, +0x79,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x78,0x00,0x00,0x00, +0x3b,0x00,0x04,0x00,0x79,0x00,0x00,0x00,0x7a,0x00,0x00,0x00, 0x0c,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x7a,0x00,0x00,0x00,0x03,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0x7c,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x4e,0x00,0x00,0x00, -0x17,0x00,0x04,0x00,0x80,0x00,0x00,0x00,0x4a,0x00,0x00,0x00, -0x02,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x09,0x00,0x00,0x00, -0x84,0x00,0x00,0x00,0x0f,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x09,0x00,0x00,0x00,0x8d,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x1d,0x00,0x03,0x00,0x9a,0x00,0x00,0x00,0x4d,0x00,0x00,0x00, -0x1e,0x00,0x03,0x00,0x9b,0x00,0x00,0x00,0x9a,0x00,0x00,0x00, -0x20,0x00,0x04,0x00,0x9c,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0x9b,0x00,0x00,0x00,0x3b,0x00,0x04,0x00,0x9c,0x00,0x00,0x00, -0x9d,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x09,0x00,0x00,0x00,0xbc,0x00,0x00,0x00,0x00,0x01,0x00,0x00, -0x2c,0x00,0x06,0x00,0x0a,0x00,0x00,0x00,0xbd,0x00,0x00,0x00, -0xbc,0x00,0x00,0x00,0x8d,0x00,0x00,0x00,0x8d,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x7c,0x04,0x00,0x00, +0x81,0x00,0x00,0x00,0x0f,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x98,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0xa5,0x00,0x00,0x00, +0x00,0x01,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0xa6,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x2c,0x00,0x06,0x00, +0x09,0x00,0x00,0x00,0xa7,0x00,0x00,0x00,0xa5,0x00,0x00,0x00, +0xa6,0x00,0x00,0x00,0xa6,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0xa6,0x01,0x00,0x00,0x11,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0xa7,0x01,0x00,0x00, 0x0d,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x7d,0x04,0x00,0x00,0x11,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x7e,0x04,0x00,0x00,0x0e,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x7f,0x04,0x00,0x00, -0x12,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x80,0x04,0x00,0x00,0x0f,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x81,0x04,0x00,0x00,0x13,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x82,0x04,0x00,0x00, +0xa8,0x01,0x00,0x00,0x02,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0xa9,0x01,0x00,0x00,0x12,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0xaa,0x01,0x00,0x00, +0x0e,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0xab,0x01,0x00,0x00,0x03,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0xac,0x01,0x00,0x00,0x13,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0xad,0x01,0x00,0x00, 0x14,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x83,0x04,0x00,0x00,0x05,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x84,0x04,0x00,0x00,0x15,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x85,0x04,0x00,0x00, +0xae,0x01,0x00,0x00,0x05,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0xaf,0x01,0x00,0x00,0x15,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0xb0,0x01,0x00,0x00, 0x06,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x86,0x04,0x00,0x00,0x16,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x87,0x04,0x00,0x00,0x07,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x88,0x04,0x00,0x00, -0x17,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x89,0x04,0x00,0x00,0x08,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x8a,0x04,0x00,0x00,0x18,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x8b,0x04,0x00,0x00, -0x09,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x8c,0x04,0x00,0x00,0x19,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x8d,0x04,0x00,0x00,0x0a,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x8e,0x04,0x00,0x00, -0x1a,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x8f,0x04,0x00,0x00,0x0b,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x90,0x04,0x00,0x00,0x1b,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x91,0x04,0x00,0x00, -0x1c,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x92,0x04,0x00,0x00,0x1d,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x93,0x04,0x00,0x00,0x1e,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x94,0x04,0x00,0x00, -0x1f,0x00,0x00,0x00,0x36,0x00,0x05,0x00,0x02,0x00,0x00,0x00, +0xb1,0x01,0x00,0x00,0x16,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0xb2,0x01,0x00,0x00,0x07,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0xb3,0x01,0x00,0x00, +0x17,0x00,0x00,0x00,0x36,0x00,0x05,0x00,0x02,0x00,0x00,0x00, 0x04,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x03,0x00,0x00,0x00, 0xf8,0x00,0x02,0x00,0x05,0x00,0x00,0x00,0xf7,0x00,0x03,0x00, -0xbe,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xfb,0x00,0x03,0x00, -0x0d,0x00,0x00,0x00,0xbf,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, -0xbf,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x0e,0x00,0x00,0x00, -0x0f,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x0d,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x09,0x00,0x00,0x00,0x10,0x00,0x00,0x00, -0x0f,0x00,0x00,0x00,0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x11,0x00,0x00,0x00,0x10,0x00,0x00,0x00,0x41,0x00,0x05,0x00, -0x18,0x00,0x00,0x00,0x19,0x00,0x00,0x00,0x16,0x00,0x00,0x00, -0x17,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x1a,0x00,0x00,0x00,0x19,0x00,0x00,0x00,0x87,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x1c,0x00,0x00,0x00,0x1a,0x00,0x00,0x00, -0x1b,0x00,0x00,0x00,0x8b,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x1d,0x00,0x00,0x00,0x11,0x00,0x00,0x00,0x1c,0x00,0x00,0x00, -0x87,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x11,0x00,0x00,0x00,0x1c,0x00,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x26,0x00,0x00,0x00,0x1d,0x00,0x00,0x00, -0x1b,0x00,0x00,0x00,0xaf,0x00,0x05,0x00,0x24,0x00,0x00,0x00, -0x29,0x00,0x00,0x00,0x26,0x00,0x00,0x00,0x1a,0x00,0x00,0x00, -0xa8,0x00,0x04,0x00,0x24,0x00,0x00,0x00,0x2a,0x00,0x00,0x00, -0x29,0x00,0x00,0x00,0xf7,0x00,0x03,0x00,0x2c,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0x2a,0x00,0x00,0x00, -0x2b,0x00,0x00,0x00,0x2c,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, -0x2b,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x18,0x00,0x00,0x00, -0x2f,0x00,0x00,0x00,0x16,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x30,0x00,0x00,0x00, -0x2f,0x00,0x00,0x00,0xaf,0x00,0x05,0x00,0x24,0x00,0x00,0x00, -0x31,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x30,0x00,0x00,0x00, -0xf9,0x00,0x02,0x00,0x2c,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, -0x2c,0x00,0x00,0x00,0xf5,0x00,0x07,0x00,0x24,0x00,0x00,0x00, -0x32,0x00,0x00,0x00,0x29,0x00,0x00,0x00,0xbf,0x00,0x00,0x00, -0x31,0x00,0x00,0x00,0x2b,0x00,0x00,0x00,0xf7,0x00,0x03,0x00, -0x34,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, -0x32,0x00,0x00,0x00,0x33,0x00,0x00,0x00,0x34,0x00,0x00,0x00, -0xf8,0x00,0x02,0x00,0x33,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, -0xbe,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0x34,0x00,0x00,0x00, -0x41,0x00,0x05,0x00,0x18,0x00,0x00,0x00,0x38,0x00,0x00,0x00, -0x16,0x00,0x00,0x00,0x37,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x38,0x00,0x00,0x00, -0x87,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x3a,0x00,0x00,0x00, -0x39,0x00,0x00,0x00,0x1b,0x00,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x3e,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x3a,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x40,0x00,0x00,0x00,0x3e,0x00,0x00,0x00,0x1d,0x00,0x00,0x00, -0x41,0x00,0x07,0x00,0x57,0x00,0x00,0x00,0x58,0x00,0x00,0x00, -0x55,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0x40,0x00,0x00,0x00, -0x2e,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x4d,0x00,0x00,0x00, -0x59,0x00,0x00,0x00,0x58,0x00,0x00,0x00,0x73,0x00,0x04,0x00, -0x4a,0x00,0x00,0x00,0x5a,0x00,0x00,0x00,0x59,0x00,0x00,0x00, -0x41,0x00,0x07,0x00,0x57,0x00,0x00,0x00,0x5d,0x00,0x00,0x00, -0x55,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0x40,0x00,0x00,0x00, -0x17,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x4d,0x00,0x00,0x00, -0x5e,0x00,0x00,0x00,0x5d,0x00,0x00,0x00,0x73,0x00,0x04,0x00, -0x4a,0x00,0x00,0x00,0x5f,0x00,0x00,0x00,0x5e,0x00,0x00,0x00, -0x41,0x00,0x07,0x00,0x64,0x00,0x00,0x00,0x65,0x00,0x00,0x00, -0x55,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0x40,0x00,0x00,0x00, -0x37,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x09,0x00,0x00,0x00, -0x66,0x00,0x00,0x00,0x65,0x00,0x00,0x00,0xc2,0x00,0x05,0x00, -0x09,0x00,0x00,0x00,0x68,0x00,0x00,0x00,0x66,0x00,0x00,0x00, -0x2e,0x00,0x00,0x00,0xc4,0x00,0x05,0x00,0x09,0x00,0x00,0x00, -0x6a,0x00,0x00,0x00,0x68,0x00,0x00,0x00,0x69,0x00,0x00,0x00, -0xc7,0x00,0x05,0x00,0x09,0x00,0x00,0x00,0x6b,0x00,0x00,0x00, -0x6a,0x00,0x00,0x00,0x4f,0x00,0x00,0x00,0x7c,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x6c,0x00,0x00,0x00,0x6b,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x09,0x00,0x00,0x00,0x6f,0x00,0x00,0x00, -0x65,0x00,0x00,0x00,0xc2,0x00,0x05,0x00,0x09,0x00,0x00,0x00, -0x73,0x00,0x00,0x00,0x6f,0x00,0x00,0x00,0x71,0x00,0x00,0x00, -0xc7,0x00,0x05,0x00,0x09,0x00,0x00,0x00,0x74,0x00,0x00,0x00, -0x73,0x00,0x00,0x00,0x4f,0x00,0x00,0x00,0x7c,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x75,0x00,0x00,0x00,0x74,0x00,0x00,0x00, -0x41,0x00,0x08,0x00,0x7c,0x00,0x00,0x00,0x7d,0x00,0x00,0x00, -0x55,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0x40,0x00,0x00,0x00, -0x7a,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x4e,0x00,0x00,0x00,0x7e,0x00,0x00,0x00,0x7d,0x00,0x00,0x00, -0x71,0x00,0x04,0x00,0x09,0x00,0x00,0x00,0x7f,0x00,0x00,0x00, -0x7e,0x00,0x00,0x00,0xc7,0x00,0x05,0x00,0x09,0x00,0x00,0x00, -0x85,0x00,0x00,0x00,0x7f,0x00,0x00,0x00,0x84,0x00,0x00,0x00, -0x7c,0x00,0x04,0x00,0x09,0x00,0x00,0x00,0x88,0x00,0x00,0x00, -0x6c,0x00,0x00,0x00,0xc5,0x00,0x05,0x00,0x09,0x00,0x00,0x00, -0x89,0x00,0x00,0x00,0x85,0x00,0x00,0x00,0x88,0x00,0x00,0x00, -0x70,0x00,0x04,0x00,0x4a,0x00,0x00,0x00,0x8a,0x00,0x00,0x00, -0x89,0x00,0x00,0x00,0xc2,0x00,0x05,0x00,0x09,0x00,0x00,0x00, -0x8c,0x00,0x00,0x00,0x7f,0x00,0x00,0x00,0x69,0x00,0x00,0x00, -0x7c,0x00,0x04,0x00,0x09,0x00,0x00,0x00,0x90,0x00,0x00,0x00, -0x75,0x00,0x00,0x00,0xc5,0x00,0x05,0x00,0x09,0x00,0x00,0x00, -0x91,0x00,0x00,0x00,0x8c,0x00,0x00,0x00,0x90,0x00,0x00,0x00, -0x70,0x00,0x04,0x00,0x4a,0x00,0x00,0x00,0x92,0x00,0x00,0x00, -0x91,0x00,0x00,0x00,0x50,0x00,0x05,0x00,0x80,0x00,0x00,0x00, -0x93,0x00,0x00,0x00,0x8a,0x00,0x00,0x00,0x92,0x00,0x00,0x00, -0x8e,0x00,0x05,0x00,0x80,0x00,0x00,0x00,0x96,0x00,0x00,0x00, -0x93,0x00,0x00,0x00,0x5a,0x00,0x00,0x00,0x50,0x00,0x05,0x00, -0x80,0x00,0x00,0x00,0x98,0x00,0x00,0x00,0x5f,0x00,0x00,0x00, -0x5f,0x00,0x00,0x00,0x81,0x00,0x05,0x00,0x80,0x00,0x00,0x00, -0x99,0x00,0x00,0x00,0x96,0x00,0x00,0x00,0x98,0x00,0x00,0x00, -0x41,0x00,0x05,0x00,0x18,0x00,0x00,0x00,0x9f,0x00,0x00,0x00, -0x16,0x00,0x00,0x00,0x7a,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0xa0,0x00,0x00,0x00,0x9f,0x00,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xa1,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0xa0,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xa4,0x00,0x00,0x00,0xa1,0x00,0x00,0x00, -0x26,0x00,0x00,0x00,0x51,0x00,0x05,0x00,0x4a,0x00,0x00,0x00, -0xa9,0x00,0x00,0x00,0x99,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x73,0x00,0x04,0x00,0x4d,0x00,0x00,0x00,0xaa,0x00,0x00,0x00, -0xa9,0x00,0x00,0x00,0x41,0x00,0x06,0x00,0x57,0x00,0x00,0x00, -0xab,0x00,0x00,0x00,0x9d,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0xa4,0x00,0x00,0x00,0x3e,0x00,0x03,0x00,0xab,0x00,0x00,0x00, -0xaa,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xb5,0x00,0x00,0x00,0xa4,0x00,0x00,0x00,0x48,0x00,0x00,0x00, -0x51,0x00,0x05,0x00,0x4a,0x00,0x00,0x00,0xb7,0x00,0x00,0x00, -0x99,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x73,0x00,0x04,0x00, -0x4d,0x00,0x00,0x00,0xb8,0x00,0x00,0x00,0xb7,0x00,0x00,0x00, -0x41,0x00,0x06,0x00,0x57,0x00,0x00,0x00,0xb9,0x00,0x00,0x00, -0x9d,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0xb5,0x00,0x00,0x00, -0x3e,0x00,0x03,0x00,0xb9,0x00,0x00,0x00,0xb8,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x4d,0x00,0x00,0x00,0xd0,0x00,0x00,0x00, -0x58,0x00,0x00,0x00,0x73,0x00,0x04,0x00,0x4a,0x00,0x00,0x00, -0xd1,0x00,0x00,0x00,0xd0,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x4d,0x00,0x00,0x00,0xd3,0x00,0x00,0x00,0x5d,0x00,0x00,0x00, -0x73,0x00,0x04,0x00,0x4a,0x00,0x00,0x00,0xd4,0x00,0x00,0x00, -0xd3,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x09,0x00,0x00,0x00, -0xd6,0x00,0x00,0x00,0x65,0x00,0x00,0x00,0xc2,0x00,0x05,0x00, -0x09,0x00,0x00,0x00,0xd7,0x00,0x00,0x00,0xd6,0x00,0x00,0x00, -0x17,0x00,0x00,0x00,0xc4,0x00,0x05,0x00,0x09,0x00,0x00,0x00, -0xd8,0x00,0x00,0x00,0xd7,0x00,0x00,0x00,0x69,0x00,0x00,0x00, -0xc7,0x00,0x05,0x00,0x09,0x00,0x00,0x00,0xd9,0x00,0x00,0x00, -0xd8,0x00,0x00,0x00,0x4f,0x00,0x00,0x00,0x7c,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0xda,0x00,0x00,0x00,0xd9,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x09,0x00,0x00,0x00,0xdc,0x00,0x00,0x00, -0x65,0x00,0x00,0x00,0xc2,0x00,0x05,0x00,0x09,0x00,0x00,0x00, -0xde,0x00,0x00,0x00,0xdc,0x00,0x00,0x00,0x7c,0x04,0x00,0x00, -0xc7,0x00,0x05,0x00,0x09,0x00,0x00,0x00,0xdf,0x00,0x00,0x00, -0xde,0x00,0x00,0x00,0x4f,0x00,0x00,0x00,0x7c,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0xe0,0x00,0x00,0x00,0xdf,0x00,0x00,0x00, -0x41,0x00,0x08,0x00,0x7c,0x00,0x00,0x00,0xe2,0x00,0x00,0x00, -0x55,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0x40,0x00,0x00,0x00, -0x7a,0x00,0x00,0x00,0x17,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x4e,0x00,0x00,0x00,0xe3,0x00,0x00,0x00,0xe2,0x00,0x00,0x00, -0x71,0x00,0x04,0x00,0x09,0x00,0x00,0x00,0xe4,0x00,0x00,0x00, -0xe3,0x00,0x00,0x00,0xc7,0x00,0x05,0x00,0x09,0x00,0x00,0x00, -0xe5,0x00,0x00,0x00,0xe4,0x00,0x00,0x00,0x84,0x00,0x00,0x00, -0x7c,0x00,0x04,0x00,0x09,0x00,0x00,0x00,0xe7,0x00,0x00,0x00, -0xda,0x00,0x00,0x00,0xc5,0x00,0x05,0x00,0x09,0x00,0x00,0x00, -0xe8,0x00,0x00,0x00,0xe5,0x00,0x00,0x00,0xe7,0x00,0x00,0x00, -0x70,0x00,0x04,0x00,0x4a,0x00,0x00,0x00,0xe9,0x00,0x00,0x00, -0xe8,0x00,0x00,0x00,0xc2,0x00,0x05,0x00,0x09,0x00,0x00,0x00, -0xea,0x00,0x00,0x00,0xe4,0x00,0x00,0x00,0x69,0x00,0x00,0x00, -0x7c,0x00,0x04,0x00,0x09,0x00,0x00,0x00,0xec,0x00,0x00,0x00, -0xe0,0x00,0x00,0x00,0xc5,0x00,0x05,0x00,0x09,0x00,0x00,0x00, -0xed,0x00,0x00,0x00,0xea,0x00,0x00,0x00,0xec,0x00,0x00,0x00, -0x70,0x00,0x04,0x00,0x4a,0x00,0x00,0x00,0xee,0x00,0x00,0x00, -0xed,0x00,0x00,0x00,0x50,0x00,0x05,0x00,0x80,0x00,0x00,0x00, -0xef,0x00,0x00,0x00,0xe9,0x00,0x00,0x00,0xee,0x00,0x00,0x00, -0x8e,0x00,0x05,0x00,0x80,0x00,0x00,0x00,0xf0,0x00,0x00,0x00, -0xef,0x00,0x00,0x00,0xd1,0x00,0x00,0x00,0x50,0x00,0x05,0x00, -0x80,0x00,0x00,0x00,0xf1,0x00,0x00,0x00,0xd4,0x00,0x00,0x00, -0xd4,0x00,0x00,0x00,0x81,0x00,0x05,0x00,0x80,0x00,0x00,0x00, -0xf2,0x00,0x00,0x00,0xf0,0x00,0x00,0x00,0xf1,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xf8,0x00,0x00,0x00, -0xa4,0x00,0x00,0x00,0x17,0x00,0x00,0x00,0x51,0x00,0x05,0x00, -0x4a,0x00,0x00,0x00,0xfa,0x00,0x00,0x00,0xf2,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x73,0x00,0x04,0x00,0x4d,0x00,0x00,0x00, -0xfb,0x00,0x00,0x00,0xfa,0x00,0x00,0x00,0x41,0x00,0x06,0x00, -0x57,0x00,0x00,0x00,0xfc,0x00,0x00,0x00,0x9d,0x00,0x00,0x00, -0x2e,0x00,0x00,0x00,0xf8,0x00,0x00,0x00,0x3e,0x00,0x03,0x00, -0xfc,0x00,0x00,0x00,0xfb,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x03,0x01,0x00,0x00,0xa4,0x00,0x00,0x00, -0x7d,0x04,0x00,0x00,0x51,0x00,0x05,0x00,0x4a,0x00,0x00,0x00, -0x04,0x01,0x00,0x00,0xf2,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x73,0x00,0x04,0x00,0x4d,0x00,0x00,0x00,0x05,0x01,0x00,0x00, -0x04,0x01,0x00,0x00,0x41,0x00,0x06,0x00,0x57,0x00,0x00,0x00, -0x06,0x01,0x00,0x00,0x9d,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0x03,0x01,0x00,0x00,0x3e,0x00,0x03,0x00,0x06,0x01,0x00,0x00, -0x05,0x01,0x00,0x00,0x3d,0x00,0x04,0x00,0x4d,0x00,0x00,0x00, -0x0f,0x01,0x00,0x00,0x58,0x00,0x00,0x00,0x73,0x00,0x04,0x00, -0x4a,0x00,0x00,0x00,0x10,0x01,0x00,0x00,0x0f,0x01,0x00,0x00, -0x3d,0x00,0x04,0x00,0x4d,0x00,0x00,0x00,0x12,0x01,0x00,0x00, -0x5d,0x00,0x00,0x00,0x73,0x00,0x04,0x00,0x4a,0x00,0x00,0x00, -0x13,0x01,0x00,0x00,0x12,0x01,0x00,0x00,0x3d,0x00,0x04,0x00, -0x09,0x00,0x00,0x00,0x15,0x01,0x00,0x00,0x65,0x00,0x00,0x00, -0xc2,0x00,0x05,0x00,0x09,0x00,0x00,0x00,0x16,0x01,0x00,0x00, -0x15,0x01,0x00,0x00,0x37,0x00,0x00,0x00,0xc4,0x00,0x05,0x00, -0x09,0x00,0x00,0x00,0x17,0x01,0x00,0x00,0x16,0x01,0x00,0x00, -0x69,0x00,0x00,0x00,0xc7,0x00,0x05,0x00,0x09,0x00,0x00,0x00, -0x18,0x01,0x00,0x00,0x17,0x01,0x00,0x00,0x4f,0x00,0x00,0x00, -0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x19,0x01,0x00,0x00, -0x18,0x01,0x00,0x00,0x3d,0x00,0x04,0x00,0x09,0x00,0x00,0x00, -0x1b,0x01,0x00,0x00,0x65,0x00,0x00,0x00,0xc2,0x00,0x05,0x00, -0x09,0x00,0x00,0x00,0x1d,0x01,0x00,0x00,0x1b,0x01,0x00,0x00, -0x7e,0x04,0x00,0x00,0xc7,0x00,0x05,0x00,0x09,0x00,0x00,0x00, -0x1e,0x01,0x00,0x00,0x1d,0x01,0x00,0x00,0x4f,0x00,0x00,0x00, -0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x1f,0x01,0x00,0x00, -0x1e,0x01,0x00,0x00,0x41,0x00,0x08,0x00,0x7c,0x00,0x00,0x00, -0x21,0x01,0x00,0x00,0x55,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0x40,0x00,0x00,0x00,0x7a,0x00,0x00,0x00,0x37,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x4e,0x00,0x00,0x00,0x22,0x01,0x00,0x00, -0x21,0x01,0x00,0x00,0x71,0x00,0x04,0x00,0x09,0x00,0x00,0x00, -0x23,0x01,0x00,0x00,0x22,0x01,0x00,0x00,0xc7,0x00,0x05,0x00, -0x09,0x00,0x00,0x00,0x24,0x01,0x00,0x00,0x23,0x01,0x00,0x00, -0x84,0x00,0x00,0x00,0x7c,0x00,0x04,0x00,0x09,0x00,0x00,0x00, -0x26,0x01,0x00,0x00,0x19,0x01,0x00,0x00,0xc5,0x00,0x05,0x00, -0x09,0x00,0x00,0x00,0x27,0x01,0x00,0x00,0x24,0x01,0x00,0x00, -0x26,0x01,0x00,0x00,0x70,0x00,0x04,0x00,0x4a,0x00,0x00,0x00, -0x28,0x01,0x00,0x00,0x27,0x01,0x00,0x00,0xc2,0x00,0x05,0x00, -0x09,0x00,0x00,0x00,0x29,0x01,0x00,0x00,0x23,0x01,0x00,0x00, -0x69,0x00,0x00,0x00,0x7c,0x00,0x04,0x00,0x09,0x00,0x00,0x00, -0x2b,0x01,0x00,0x00,0x1f,0x01,0x00,0x00,0xc5,0x00,0x05,0x00, -0x09,0x00,0x00,0x00,0x2c,0x01,0x00,0x00,0x29,0x01,0x00,0x00, -0x2b,0x01,0x00,0x00,0x70,0x00,0x04,0x00,0x4a,0x00,0x00,0x00, -0x2d,0x01,0x00,0x00,0x2c,0x01,0x00,0x00,0x50,0x00,0x05,0x00, -0x80,0x00,0x00,0x00,0x2e,0x01,0x00,0x00,0x28,0x01,0x00,0x00, -0x2d,0x01,0x00,0x00,0x8e,0x00,0x05,0x00,0x80,0x00,0x00,0x00, -0x2f,0x01,0x00,0x00,0x2e,0x01,0x00,0x00,0x10,0x01,0x00,0x00, -0x50,0x00,0x05,0x00,0x80,0x00,0x00,0x00,0x30,0x01,0x00,0x00, -0x13,0x01,0x00,0x00,0x13,0x01,0x00,0x00,0x81,0x00,0x05,0x00, -0x80,0x00,0x00,0x00,0x31,0x01,0x00,0x00,0x2f,0x01,0x00,0x00, -0x30,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x37,0x01,0x00,0x00,0xa4,0x00,0x00,0x00,0x37,0x00,0x00,0x00, -0x51,0x00,0x05,0x00,0x4a,0x00,0x00,0x00,0x39,0x01,0x00,0x00, -0x31,0x01,0x00,0x00,0x00,0x00,0x00,0x00,0x73,0x00,0x04,0x00, -0x4d,0x00,0x00,0x00,0x3a,0x01,0x00,0x00,0x39,0x01,0x00,0x00, -0x41,0x00,0x06,0x00,0x57,0x00,0x00,0x00,0x3b,0x01,0x00,0x00, -0x9d,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0x37,0x01,0x00,0x00, -0x3e,0x00,0x03,0x00,0x3b,0x01,0x00,0x00,0x3a,0x01,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x42,0x01,0x00,0x00, -0xa4,0x00,0x00,0x00,0x7f,0x04,0x00,0x00,0x51,0x00,0x05,0x00, -0x4a,0x00,0x00,0x00,0x43,0x01,0x00,0x00,0x31,0x01,0x00,0x00, -0x01,0x00,0x00,0x00,0x73,0x00,0x04,0x00,0x4d,0x00,0x00,0x00, -0x44,0x01,0x00,0x00,0x43,0x01,0x00,0x00,0x41,0x00,0x06,0x00, -0x57,0x00,0x00,0x00,0x45,0x01,0x00,0x00,0x9d,0x00,0x00,0x00, -0x2e,0x00,0x00,0x00,0x42,0x01,0x00,0x00,0x3e,0x00,0x03,0x00, -0x45,0x01,0x00,0x00,0x44,0x01,0x00,0x00,0x3d,0x00,0x04,0x00, -0x4d,0x00,0x00,0x00,0x4e,0x01,0x00,0x00,0x58,0x00,0x00,0x00, -0x73,0x00,0x04,0x00,0x4a,0x00,0x00,0x00,0x4f,0x01,0x00,0x00, -0x4e,0x01,0x00,0x00,0x3d,0x00,0x04,0x00,0x4d,0x00,0x00,0x00, -0x51,0x01,0x00,0x00,0x5d,0x00,0x00,0x00,0x73,0x00,0x04,0x00, -0x4a,0x00,0x00,0x00,0x52,0x01,0x00,0x00,0x51,0x01,0x00,0x00, -0x3d,0x00,0x04,0x00,0x09,0x00,0x00,0x00,0x54,0x01,0x00,0x00, -0x65,0x00,0x00,0x00,0xc2,0x00,0x05,0x00,0x09,0x00,0x00,0x00, -0x55,0x01,0x00,0x00,0x54,0x01,0x00,0x00,0x7a,0x00,0x00,0x00, -0xc4,0x00,0x05,0x00,0x09,0x00,0x00,0x00,0x56,0x01,0x00,0x00, -0x55,0x01,0x00,0x00,0x69,0x00,0x00,0x00,0xc7,0x00,0x05,0x00, -0x09,0x00,0x00,0x00,0x57,0x01,0x00,0x00,0x56,0x01,0x00,0x00, -0x4f,0x00,0x00,0x00,0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x58,0x01,0x00,0x00,0x57,0x01,0x00,0x00,0x3d,0x00,0x04,0x00, -0x09,0x00,0x00,0x00,0x5a,0x01,0x00,0x00,0x65,0x00,0x00,0x00, -0xc2,0x00,0x05,0x00,0x09,0x00,0x00,0x00,0x5c,0x01,0x00,0x00, -0x5a,0x01,0x00,0x00,0x80,0x04,0x00,0x00,0xc7,0x00,0x05,0x00, -0x09,0x00,0x00,0x00,0x5d,0x01,0x00,0x00,0x5c,0x01,0x00,0x00, -0x4f,0x00,0x00,0x00,0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x5e,0x01,0x00,0x00,0x5d,0x01,0x00,0x00,0x41,0x00,0x08,0x00, -0x7c,0x00,0x00,0x00,0x60,0x01,0x00,0x00,0x55,0x00,0x00,0x00, -0x2e,0x00,0x00,0x00,0x40,0x00,0x00,0x00,0x7a,0x00,0x00,0x00, -0x7a,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x4e,0x00,0x00,0x00, -0x61,0x01,0x00,0x00,0x60,0x01,0x00,0x00,0x71,0x00,0x04,0x00, -0x09,0x00,0x00,0x00,0x62,0x01,0x00,0x00,0x61,0x01,0x00,0x00, -0xc7,0x00,0x05,0x00,0x09,0x00,0x00,0x00,0x63,0x01,0x00,0x00, -0x62,0x01,0x00,0x00,0x84,0x00,0x00,0x00,0x7c,0x00,0x04,0x00, -0x09,0x00,0x00,0x00,0x65,0x01,0x00,0x00,0x58,0x01,0x00,0x00, -0xc5,0x00,0x05,0x00,0x09,0x00,0x00,0x00,0x66,0x01,0x00,0x00, -0x63,0x01,0x00,0x00,0x65,0x01,0x00,0x00,0x70,0x00,0x04,0x00, -0x4a,0x00,0x00,0x00,0x67,0x01,0x00,0x00,0x66,0x01,0x00,0x00, -0xc2,0x00,0x05,0x00,0x09,0x00,0x00,0x00,0x68,0x01,0x00,0x00, -0x62,0x01,0x00,0x00,0x69,0x00,0x00,0x00,0x7c,0x00,0x04,0x00, -0x09,0x00,0x00,0x00,0x6a,0x01,0x00,0x00,0x5e,0x01,0x00,0x00, -0xc5,0x00,0x05,0x00,0x09,0x00,0x00,0x00,0x6b,0x01,0x00,0x00, -0x68,0x01,0x00,0x00,0x6a,0x01,0x00,0x00,0x70,0x00,0x04,0x00, -0x4a,0x00,0x00,0x00,0x6c,0x01,0x00,0x00,0x6b,0x01,0x00,0x00, -0x50,0x00,0x05,0x00,0x80,0x00,0x00,0x00,0x6d,0x01,0x00,0x00, -0x67,0x01,0x00,0x00,0x6c,0x01,0x00,0x00,0x8e,0x00,0x05,0x00, -0x80,0x00,0x00,0x00,0x6e,0x01,0x00,0x00,0x6d,0x01,0x00,0x00, -0x4f,0x01,0x00,0x00,0x50,0x00,0x05,0x00,0x80,0x00,0x00,0x00, -0x6f,0x01,0x00,0x00,0x52,0x01,0x00,0x00,0x52,0x01,0x00,0x00, -0x81,0x00,0x05,0x00,0x80,0x00,0x00,0x00,0x70,0x01,0x00,0x00, -0x6e,0x01,0x00,0x00,0x6f,0x01,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x76,0x01,0x00,0x00,0xa4,0x00,0x00,0x00, -0x7a,0x00,0x00,0x00,0x51,0x00,0x05,0x00,0x4a,0x00,0x00,0x00, -0x78,0x01,0x00,0x00,0x70,0x01,0x00,0x00,0x00,0x00,0x00,0x00, -0x73,0x00,0x04,0x00,0x4d,0x00,0x00,0x00,0x79,0x01,0x00,0x00, -0x78,0x01,0x00,0x00,0x41,0x00,0x06,0x00,0x57,0x00,0x00,0x00, -0x7a,0x01,0x00,0x00,0x9d,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0x76,0x01,0x00,0x00,0x3e,0x00,0x03,0x00,0x7a,0x01,0x00,0x00, -0x79,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x81,0x01,0x00,0x00,0xa4,0x00,0x00,0x00,0x81,0x04,0x00,0x00, -0x51,0x00,0x05,0x00,0x4a,0x00,0x00,0x00,0x82,0x01,0x00,0x00, -0x70,0x01,0x00,0x00,0x01,0x00,0x00,0x00,0x73,0x00,0x04,0x00, -0x4d,0x00,0x00,0x00,0x83,0x01,0x00,0x00,0x82,0x01,0x00,0x00, -0x41,0x00,0x06,0x00,0x57,0x00,0x00,0x00,0x84,0x01,0x00,0x00, -0x9d,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0x81,0x01,0x00,0x00, -0x3e,0x00,0x03,0x00,0x84,0x01,0x00,0x00,0x83,0x01,0x00,0x00, -0x3d,0x00,0x04,0x00,0x4d,0x00,0x00,0x00,0x8d,0x01,0x00,0x00, -0x58,0x00,0x00,0x00,0x73,0x00,0x04,0x00,0x4a,0x00,0x00,0x00, -0x8e,0x01,0x00,0x00,0x8d,0x01,0x00,0x00,0x3d,0x00,0x04,0x00, -0x4d,0x00,0x00,0x00,0x90,0x01,0x00,0x00,0x5d,0x00,0x00,0x00, -0x73,0x00,0x04,0x00,0x4a,0x00,0x00,0x00,0x91,0x01,0x00,0x00, -0x90,0x01,0x00,0x00,0x3d,0x00,0x04,0x00,0x09,0x00,0x00,0x00, -0x93,0x01,0x00,0x00,0x65,0x00,0x00,0x00,0xc2,0x00,0x05,0x00, -0x09,0x00,0x00,0x00,0x94,0x01,0x00,0x00,0x93,0x01,0x00,0x00, -0x69,0x00,0x00,0x00,0xc4,0x00,0x05,0x00,0x09,0x00,0x00,0x00, -0x95,0x01,0x00,0x00,0x94,0x01,0x00,0x00,0x69,0x00,0x00,0x00, -0xc7,0x00,0x05,0x00,0x09,0x00,0x00,0x00,0x96,0x01,0x00,0x00, -0x95,0x01,0x00,0x00,0x4f,0x00,0x00,0x00,0x7c,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x97,0x01,0x00,0x00,0x96,0x01,0x00,0x00, -0x3d,0x00,0x04,0x00,0x09,0x00,0x00,0x00,0x99,0x01,0x00,0x00, -0x65,0x00,0x00,0x00,0xc2,0x00,0x05,0x00,0x09,0x00,0x00,0x00, -0x9b,0x01,0x00,0x00,0x99,0x01,0x00,0x00,0x48,0x00,0x00,0x00, -0xc7,0x00,0x05,0x00,0x09,0x00,0x00,0x00,0x9c,0x01,0x00,0x00, -0x9b,0x01,0x00,0x00,0x4f,0x00,0x00,0x00,0x7c,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x9d,0x01,0x00,0x00,0x9c,0x01,0x00,0x00, -0x41,0x00,0x08,0x00,0x7c,0x00,0x00,0x00,0x9f,0x01,0x00,0x00, -0x55,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0x40,0x00,0x00,0x00, -0x7a,0x00,0x00,0x00,0x69,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x4e,0x00,0x00,0x00,0xa0,0x01,0x00,0x00,0x9f,0x01,0x00,0x00, -0x71,0x00,0x04,0x00,0x09,0x00,0x00,0x00,0xa1,0x01,0x00,0x00, -0xa0,0x01,0x00,0x00,0xc7,0x00,0x05,0x00,0x09,0x00,0x00,0x00, -0xa2,0x01,0x00,0x00,0xa1,0x01,0x00,0x00,0x84,0x00,0x00,0x00, -0x7c,0x00,0x04,0x00,0x09,0x00,0x00,0x00,0xa4,0x01,0x00,0x00, -0x97,0x01,0x00,0x00,0xc5,0x00,0x05,0x00,0x09,0x00,0x00,0x00, -0xa5,0x01,0x00,0x00,0xa2,0x01,0x00,0x00,0xa4,0x01,0x00,0x00, -0x70,0x00,0x04,0x00,0x4a,0x00,0x00,0x00,0xa6,0x01,0x00,0x00, -0xa5,0x01,0x00,0x00,0xc2,0x00,0x05,0x00,0x09,0x00,0x00,0x00, -0xa7,0x01,0x00,0x00,0xa1,0x01,0x00,0x00,0x69,0x00,0x00,0x00, -0x7c,0x00,0x04,0x00,0x09,0x00,0x00,0x00,0xa9,0x01,0x00,0x00, -0x9d,0x01,0x00,0x00,0xc5,0x00,0x05,0x00,0x09,0x00,0x00,0x00, -0xaa,0x01,0x00,0x00,0xa7,0x01,0x00,0x00,0xa9,0x01,0x00,0x00, -0x70,0x00,0x04,0x00,0x4a,0x00,0x00,0x00,0xab,0x01,0x00,0x00, -0xaa,0x01,0x00,0x00,0x50,0x00,0x05,0x00,0x80,0x00,0x00,0x00, -0xac,0x01,0x00,0x00,0xa6,0x01,0x00,0x00,0xab,0x01,0x00,0x00, -0x8e,0x00,0x05,0x00,0x80,0x00,0x00,0x00,0xad,0x01,0x00,0x00, -0xac,0x01,0x00,0x00,0x8e,0x01,0x00,0x00,0x50,0x00,0x05,0x00, -0x80,0x00,0x00,0x00,0xae,0x01,0x00,0x00,0x91,0x01,0x00,0x00, -0x91,0x01,0x00,0x00,0x81,0x00,0x05,0x00,0x80,0x00,0x00,0x00, -0xaf,0x01,0x00,0x00,0xad,0x01,0x00,0x00,0xae,0x01,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xb5,0x01,0x00,0x00, -0xa4,0x00,0x00,0x00,0x69,0x00,0x00,0x00,0x51,0x00,0x05,0x00, -0x4a,0x00,0x00,0x00,0xb7,0x01,0x00,0x00,0xaf,0x01,0x00,0x00, -0x00,0x00,0x00,0x00,0x73,0x00,0x04,0x00,0x4d,0x00,0x00,0x00, -0xb8,0x01,0x00,0x00,0xb7,0x01,0x00,0x00,0x41,0x00,0x06,0x00, -0x57,0x00,0x00,0x00,0xb9,0x01,0x00,0x00,0x9d,0x00,0x00,0x00, -0x2e,0x00,0x00,0x00,0xb5,0x01,0x00,0x00,0x3e,0x00,0x03,0x00, -0xb9,0x01,0x00,0x00,0xb8,0x01,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xc0,0x01,0x00,0x00,0xa4,0x00,0x00,0x00, -0x82,0x04,0x00,0x00,0x51,0x00,0x05,0x00,0x4a,0x00,0x00,0x00, -0xc1,0x01,0x00,0x00,0xaf,0x01,0x00,0x00,0x01,0x00,0x00,0x00, -0x73,0x00,0x04,0x00,0x4d,0x00,0x00,0x00,0xc2,0x01,0x00,0x00, -0xc1,0x01,0x00,0x00,0x41,0x00,0x06,0x00,0x57,0x00,0x00,0x00, -0xc3,0x01,0x00,0x00,0x9d,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0xc0,0x01,0x00,0x00,0x3e,0x00,0x03,0x00,0xc3,0x01,0x00,0x00, -0xc2,0x01,0x00,0x00,0x3d,0x00,0x04,0x00,0x4d,0x00,0x00,0x00, -0xcc,0x01,0x00,0x00,0x58,0x00,0x00,0x00,0x73,0x00,0x04,0x00, -0x4a,0x00,0x00,0x00,0xcd,0x01,0x00,0x00,0xcc,0x01,0x00,0x00, -0x3d,0x00,0x04,0x00,0x4d,0x00,0x00,0x00,0xcf,0x01,0x00,0x00, -0x5d,0x00,0x00,0x00,0x73,0x00,0x04,0x00,0x4a,0x00,0x00,0x00, -0xd0,0x01,0x00,0x00,0xcf,0x01,0x00,0x00,0x3d,0x00,0x04,0x00, -0x09,0x00,0x00,0x00,0xd2,0x01,0x00,0x00,0x65,0x00,0x00,0x00, -0xc2,0x00,0x05,0x00,0x09,0x00,0x00,0x00,0xd3,0x01,0x00,0x00, -0xd2,0x01,0x00,0x00,0x83,0x04,0x00,0x00,0xc4,0x00,0x05,0x00, -0x09,0x00,0x00,0x00,0xd4,0x01,0x00,0x00,0xd3,0x01,0x00,0x00, -0x69,0x00,0x00,0x00,0xc7,0x00,0x05,0x00,0x09,0x00,0x00,0x00, -0xd5,0x01,0x00,0x00,0xd4,0x01,0x00,0x00,0x4f,0x00,0x00,0x00, -0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0xd6,0x01,0x00,0x00, -0xd5,0x01,0x00,0x00,0x3d,0x00,0x04,0x00,0x09,0x00,0x00,0x00, -0xd8,0x01,0x00,0x00,0x65,0x00,0x00,0x00,0xc2,0x00,0x05,0x00, -0x09,0x00,0x00,0x00,0xda,0x01,0x00,0x00,0xd8,0x01,0x00,0x00, -0x7d,0x04,0x00,0x00,0xc7,0x00,0x05,0x00,0x09,0x00,0x00,0x00, -0xdb,0x01,0x00,0x00,0xda,0x01,0x00,0x00,0x4f,0x00,0x00,0x00, -0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0xdc,0x01,0x00,0x00, -0xdb,0x01,0x00,0x00,0x41,0x00,0x08,0x00,0x7c,0x00,0x00,0x00, -0xde,0x01,0x00,0x00,0x55,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0x40,0x00,0x00,0x00,0x7a,0x00,0x00,0x00,0x83,0x04,0x00,0x00, -0x3d,0x00,0x04,0x00,0x4e,0x00,0x00,0x00,0xdf,0x01,0x00,0x00, -0xde,0x01,0x00,0x00,0x71,0x00,0x04,0x00,0x09,0x00,0x00,0x00, -0xe0,0x01,0x00,0x00,0xdf,0x01,0x00,0x00,0xc7,0x00,0x05,0x00, -0x09,0x00,0x00,0x00,0xe1,0x01,0x00,0x00,0xe0,0x01,0x00,0x00, -0x84,0x00,0x00,0x00,0x7c,0x00,0x04,0x00,0x09,0x00,0x00,0x00, -0xe3,0x01,0x00,0x00,0xd6,0x01,0x00,0x00,0xc5,0x00,0x05,0x00, -0x09,0x00,0x00,0x00,0xe4,0x01,0x00,0x00,0xe1,0x01,0x00,0x00, -0xe3,0x01,0x00,0x00,0x70,0x00,0x04,0x00,0x4a,0x00,0x00,0x00, -0xe5,0x01,0x00,0x00,0xe4,0x01,0x00,0x00,0xc2,0x00,0x05,0x00, -0x09,0x00,0x00,0x00,0xe6,0x01,0x00,0x00,0xe0,0x01,0x00,0x00, -0x69,0x00,0x00,0x00,0x7c,0x00,0x04,0x00,0x09,0x00,0x00,0x00, -0xe8,0x01,0x00,0x00,0xdc,0x01,0x00,0x00,0xc5,0x00,0x05,0x00, -0x09,0x00,0x00,0x00,0xe9,0x01,0x00,0x00,0xe6,0x01,0x00,0x00, -0xe8,0x01,0x00,0x00,0x70,0x00,0x04,0x00,0x4a,0x00,0x00,0x00, -0xea,0x01,0x00,0x00,0xe9,0x01,0x00,0x00,0x50,0x00,0x05,0x00, -0x80,0x00,0x00,0x00,0xeb,0x01,0x00,0x00,0xe5,0x01,0x00,0x00, -0xea,0x01,0x00,0x00,0x8e,0x00,0x05,0x00,0x80,0x00,0x00,0x00, -0xec,0x01,0x00,0x00,0xeb,0x01,0x00,0x00,0xcd,0x01,0x00,0x00, -0x50,0x00,0x05,0x00,0x80,0x00,0x00,0x00,0xed,0x01,0x00,0x00, -0xd0,0x01,0x00,0x00,0xd0,0x01,0x00,0x00,0x81,0x00,0x05,0x00, -0x80,0x00,0x00,0x00,0xee,0x01,0x00,0x00,0xec,0x01,0x00,0x00, -0xed,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xf4,0x01,0x00,0x00,0xa4,0x00,0x00,0x00,0x83,0x04,0x00,0x00, -0x51,0x00,0x05,0x00,0x4a,0x00,0x00,0x00,0xf6,0x01,0x00,0x00, -0xee,0x01,0x00,0x00,0x00,0x00,0x00,0x00,0x73,0x00,0x04,0x00, -0x4d,0x00,0x00,0x00,0xf7,0x01,0x00,0x00,0xf6,0x01,0x00,0x00, -0x41,0x00,0x06,0x00,0x57,0x00,0x00,0x00,0xf8,0x01,0x00,0x00, -0x9d,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0xf4,0x01,0x00,0x00, -0x3e,0x00,0x03,0x00,0xf8,0x01,0x00,0x00,0xf7,0x01,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xff,0x01,0x00,0x00, -0xa4,0x00,0x00,0x00,0x84,0x04,0x00,0x00,0x51,0x00,0x05,0x00, -0x4a,0x00,0x00,0x00,0x00,0x02,0x00,0x00,0xee,0x01,0x00,0x00, -0x01,0x00,0x00,0x00,0x73,0x00,0x04,0x00,0x4d,0x00,0x00,0x00, -0x01,0x02,0x00,0x00,0x00,0x02,0x00,0x00,0x41,0x00,0x06,0x00, -0x57,0x00,0x00,0x00,0x02,0x02,0x00,0x00,0x9d,0x00,0x00,0x00, -0x2e,0x00,0x00,0x00,0xff,0x01,0x00,0x00,0x3e,0x00,0x03,0x00, -0x02,0x02,0x00,0x00,0x01,0x02,0x00,0x00,0x3d,0x00,0x04,0x00, -0x4d,0x00,0x00,0x00,0x0b,0x02,0x00,0x00,0x58,0x00,0x00,0x00, -0x73,0x00,0x04,0x00,0x4a,0x00,0x00,0x00,0x0c,0x02,0x00,0x00, -0x0b,0x02,0x00,0x00,0x3d,0x00,0x04,0x00,0x4d,0x00,0x00,0x00, -0x0e,0x02,0x00,0x00,0x5d,0x00,0x00,0x00,0x73,0x00,0x04,0x00, -0x4a,0x00,0x00,0x00,0x0f,0x02,0x00,0x00,0x0e,0x02,0x00,0x00, -0x3d,0x00,0x04,0x00,0x09,0x00,0x00,0x00,0x11,0x02,0x00,0x00, -0x65,0x00,0x00,0x00,0xc2,0x00,0x05,0x00,0x09,0x00,0x00,0x00, -0x12,0x02,0x00,0x00,0x11,0x02,0x00,0x00,0x85,0x04,0x00,0x00, -0xc4,0x00,0x05,0x00,0x09,0x00,0x00,0x00,0x13,0x02,0x00,0x00, -0x12,0x02,0x00,0x00,0x69,0x00,0x00,0x00,0xc7,0x00,0x05,0x00, -0x09,0x00,0x00,0x00,0x14,0x02,0x00,0x00,0x13,0x02,0x00,0x00, -0x4f,0x00,0x00,0x00,0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x15,0x02,0x00,0x00,0x14,0x02,0x00,0x00,0x3d,0x00,0x04,0x00, -0x09,0x00,0x00,0x00,0x17,0x02,0x00,0x00,0x65,0x00,0x00,0x00, -0xc2,0x00,0x05,0x00,0x09,0x00,0x00,0x00,0x19,0x02,0x00,0x00, -0x17,0x02,0x00,0x00,0x7f,0x04,0x00,0x00,0xc7,0x00,0x05,0x00, -0x09,0x00,0x00,0x00,0x1a,0x02,0x00,0x00,0x19,0x02,0x00,0x00, -0x4f,0x00,0x00,0x00,0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x1b,0x02,0x00,0x00,0x1a,0x02,0x00,0x00,0x41,0x00,0x08,0x00, -0x7c,0x00,0x00,0x00,0x1d,0x02,0x00,0x00,0x55,0x00,0x00,0x00, -0x2e,0x00,0x00,0x00,0x40,0x00,0x00,0x00,0x7a,0x00,0x00,0x00, -0x85,0x04,0x00,0x00,0x3d,0x00,0x04,0x00,0x4e,0x00,0x00,0x00, -0x1e,0x02,0x00,0x00,0x1d,0x02,0x00,0x00,0x71,0x00,0x04,0x00, -0x09,0x00,0x00,0x00,0x1f,0x02,0x00,0x00,0x1e,0x02,0x00,0x00, -0xc7,0x00,0x05,0x00,0x09,0x00,0x00,0x00,0x20,0x02,0x00,0x00, -0x1f,0x02,0x00,0x00,0x84,0x00,0x00,0x00,0x7c,0x00,0x04,0x00, -0x09,0x00,0x00,0x00,0x22,0x02,0x00,0x00,0x15,0x02,0x00,0x00, -0xc5,0x00,0x05,0x00,0x09,0x00,0x00,0x00,0x23,0x02,0x00,0x00, -0x20,0x02,0x00,0x00,0x22,0x02,0x00,0x00,0x70,0x00,0x04,0x00, -0x4a,0x00,0x00,0x00,0x24,0x02,0x00,0x00,0x23,0x02,0x00,0x00, -0xc2,0x00,0x05,0x00,0x09,0x00,0x00,0x00,0x25,0x02,0x00,0x00, -0x1f,0x02,0x00,0x00,0x69,0x00,0x00,0x00,0x7c,0x00,0x04,0x00, -0x09,0x00,0x00,0x00,0x27,0x02,0x00,0x00,0x1b,0x02,0x00,0x00, -0xc5,0x00,0x05,0x00,0x09,0x00,0x00,0x00,0x28,0x02,0x00,0x00, -0x25,0x02,0x00,0x00,0x27,0x02,0x00,0x00,0x70,0x00,0x04,0x00, -0x4a,0x00,0x00,0x00,0x29,0x02,0x00,0x00,0x28,0x02,0x00,0x00, -0x50,0x00,0x05,0x00,0x80,0x00,0x00,0x00,0x2a,0x02,0x00,0x00, -0x24,0x02,0x00,0x00,0x29,0x02,0x00,0x00,0x8e,0x00,0x05,0x00, -0x80,0x00,0x00,0x00,0x2b,0x02,0x00,0x00,0x2a,0x02,0x00,0x00, -0x0c,0x02,0x00,0x00,0x50,0x00,0x05,0x00,0x80,0x00,0x00,0x00, -0x2c,0x02,0x00,0x00,0x0f,0x02,0x00,0x00,0x0f,0x02,0x00,0x00, -0x81,0x00,0x05,0x00,0x80,0x00,0x00,0x00,0x2d,0x02,0x00,0x00, -0x2b,0x02,0x00,0x00,0x2c,0x02,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x33,0x02,0x00,0x00,0xa4,0x00,0x00,0x00, -0x85,0x04,0x00,0x00,0x51,0x00,0x05,0x00,0x4a,0x00,0x00,0x00, -0x35,0x02,0x00,0x00,0x2d,0x02,0x00,0x00,0x00,0x00,0x00,0x00, -0x73,0x00,0x04,0x00,0x4d,0x00,0x00,0x00,0x36,0x02,0x00,0x00, -0x35,0x02,0x00,0x00,0x41,0x00,0x06,0x00,0x57,0x00,0x00,0x00, -0x37,0x02,0x00,0x00,0x9d,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0x33,0x02,0x00,0x00,0x3e,0x00,0x03,0x00,0x37,0x02,0x00,0x00, -0x36,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x3e,0x02,0x00,0x00,0xa4,0x00,0x00,0x00,0x86,0x04,0x00,0x00, -0x51,0x00,0x05,0x00,0x4a,0x00,0x00,0x00,0x3f,0x02,0x00,0x00, -0x2d,0x02,0x00,0x00,0x01,0x00,0x00,0x00,0x73,0x00,0x04,0x00, -0x4d,0x00,0x00,0x00,0x40,0x02,0x00,0x00,0x3f,0x02,0x00,0x00, -0x41,0x00,0x06,0x00,0x57,0x00,0x00,0x00,0x41,0x02,0x00,0x00, -0x9d,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0x3e,0x02,0x00,0x00, -0x3e,0x00,0x03,0x00,0x41,0x02,0x00,0x00,0x40,0x02,0x00,0x00, -0x3d,0x00,0x04,0x00,0x4d,0x00,0x00,0x00,0x4a,0x02,0x00,0x00, -0x58,0x00,0x00,0x00,0x73,0x00,0x04,0x00,0x4a,0x00,0x00,0x00, -0x4b,0x02,0x00,0x00,0x4a,0x02,0x00,0x00,0x3d,0x00,0x04,0x00, -0x4d,0x00,0x00,0x00,0x4d,0x02,0x00,0x00,0x5d,0x00,0x00,0x00, -0x73,0x00,0x04,0x00,0x4a,0x00,0x00,0x00,0x4e,0x02,0x00,0x00, -0x4d,0x02,0x00,0x00,0x3d,0x00,0x04,0x00,0x09,0x00,0x00,0x00, -0x50,0x02,0x00,0x00,0x65,0x00,0x00,0x00,0xc2,0x00,0x05,0x00, -0x09,0x00,0x00,0x00,0x51,0x02,0x00,0x00,0x50,0x02,0x00,0x00, -0x87,0x04,0x00,0x00,0xc4,0x00,0x05,0x00,0x09,0x00,0x00,0x00, -0x52,0x02,0x00,0x00,0x51,0x02,0x00,0x00,0x69,0x00,0x00,0x00, -0xc7,0x00,0x05,0x00,0x09,0x00,0x00,0x00,0x53,0x02,0x00,0x00, -0x52,0x02,0x00,0x00,0x4f,0x00,0x00,0x00,0x7c,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x54,0x02,0x00,0x00,0x53,0x02,0x00,0x00, -0x3d,0x00,0x04,0x00,0x09,0x00,0x00,0x00,0x56,0x02,0x00,0x00, -0x65,0x00,0x00,0x00,0xc2,0x00,0x05,0x00,0x09,0x00,0x00,0x00, -0x58,0x02,0x00,0x00,0x56,0x02,0x00,0x00,0x81,0x04,0x00,0x00, -0xc7,0x00,0x05,0x00,0x09,0x00,0x00,0x00,0x59,0x02,0x00,0x00, -0x58,0x02,0x00,0x00,0x4f,0x00,0x00,0x00,0x7c,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x5a,0x02,0x00,0x00,0x59,0x02,0x00,0x00, -0x41,0x00,0x08,0x00,0x7c,0x00,0x00,0x00,0x5c,0x02,0x00,0x00, -0x55,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0x40,0x00,0x00,0x00, -0x7a,0x00,0x00,0x00,0x87,0x04,0x00,0x00,0x3d,0x00,0x04,0x00, -0x4e,0x00,0x00,0x00,0x5d,0x02,0x00,0x00,0x5c,0x02,0x00,0x00, -0x71,0x00,0x04,0x00,0x09,0x00,0x00,0x00,0x5e,0x02,0x00,0x00, -0x5d,0x02,0x00,0x00,0xc7,0x00,0x05,0x00,0x09,0x00,0x00,0x00, -0x5f,0x02,0x00,0x00,0x5e,0x02,0x00,0x00,0x84,0x00,0x00,0x00, -0x7c,0x00,0x04,0x00,0x09,0x00,0x00,0x00,0x61,0x02,0x00,0x00, -0x54,0x02,0x00,0x00,0xc5,0x00,0x05,0x00,0x09,0x00,0x00,0x00, -0x62,0x02,0x00,0x00,0x5f,0x02,0x00,0x00,0x61,0x02,0x00,0x00, -0x70,0x00,0x04,0x00,0x4a,0x00,0x00,0x00,0x63,0x02,0x00,0x00, -0x62,0x02,0x00,0x00,0xc2,0x00,0x05,0x00,0x09,0x00,0x00,0x00, -0x64,0x02,0x00,0x00,0x5e,0x02,0x00,0x00,0x69,0x00,0x00,0x00, -0x7c,0x00,0x04,0x00,0x09,0x00,0x00,0x00,0x66,0x02,0x00,0x00, -0x5a,0x02,0x00,0x00,0xc5,0x00,0x05,0x00,0x09,0x00,0x00,0x00, -0x67,0x02,0x00,0x00,0x64,0x02,0x00,0x00,0x66,0x02,0x00,0x00, -0x70,0x00,0x04,0x00,0x4a,0x00,0x00,0x00,0x68,0x02,0x00,0x00, -0x67,0x02,0x00,0x00,0x50,0x00,0x05,0x00,0x80,0x00,0x00,0x00, -0x69,0x02,0x00,0x00,0x63,0x02,0x00,0x00,0x68,0x02,0x00,0x00, -0x8e,0x00,0x05,0x00,0x80,0x00,0x00,0x00,0x6a,0x02,0x00,0x00, -0x69,0x02,0x00,0x00,0x4b,0x02,0x00,0x00,0x50,0x00,0x05,0x00, -0x80,0x00,0x00,0x00,0x6b,0x02,0x00,0x00,0x4e,0x02,0x00,0x00, -0x4e,0x02,0x00,0x00,0x81,0x00,0x05,0x00,0x80,0x00,0x00,0x00, -0x6c,0x02,0x00,0x00,0x6a,0x02,0x00,0x00,0x6b,0x02,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x72,0x02,0x00,0x00, -0xa4,0x00,0x00,0x00,0x87,0x04,0x00,0x00,0x51,0x00,0x05,0x00, -0x4a,0x00,0x00,0x00,0x74,0x02,0x00,0x00,0x6c,0x02,0x00,0x00, -0x00,0x00,0x00,0x00,0x73,0x00,0x04,0x00,0x4d,0x00,0x00,0x00, -0x75,0x02,0x00,0x00,0x74,0x02,0x00,0x00,0x41,0x00,0x06,0x00, -0x57,0x00,0x00,0x00,0x76,0x02,0x00,0x00,0x9d,0x00,0x00,0x00, -0x2e,0x00,0x00,0x00,0x72,0x02,0x00,0x00,0x3e,0x00,0x03,0x00, -0x76,0x02,0x00,0x00,0x75,0x02,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x7d,0x02,0x00,0x00,0xa4,0x00,0x00,0x00, -0x88,0x04,0x00,0x00,0x51,0x00,0x05,0x00,0x4a,0x00,0x00,0x00, -0x7e,0x02,0x00,0x00,0x6c,0x02,0x00,0x00,0x01,0x00,0x00,0x00, -0x73,0x00,0x04,0x00,0x4d,0x00,0x00,0x00,0x7f,0x02,0x00,0x00, -0x7e,0x02,0x00,0x00,0x41,0x00,0x06,0x00,0x57,0x00,0x00,0x00, -0x80,0x02,0x00,0x00,0x9d,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0x7d,0x02,0x00,0x00,0x3e,0x00,0x03,0x00,0x80,0x02,0x00,0x00, -0x7f,0x02,0x00,0x00,0x3d,0x00,0x04,0x00,0x4d,0x00,0x00,0x00, -0x89,0x02,0x00,0x00,0x58,0x00,0x00,0x00,0x73,0x00,0x04,0x00, -0x4a,0x00,0x00,0x00,0x8a,0x02,0x00,0x00,0x89,0x02,0x00,0x00, -0x3d,0x00,0x04,0x00,0x4d,0x00,0x00,0x00,0x8c,0x02,0x00,0x00, -0x5d,0x00,0x00,0x00,0x73,0x00,0x04,0x00,0x4a,0x00,0x00,0x00, -0x8d,0x02,0x00,0x00,0x8c,0x02,0x00,0x00,0x3d,0x00,0x04,0x00, -0x09,0x00,0x00,0x00,0x8f,0x02,0x00,0x00,0x65,0x00,0x00,0x00, -0xc2,0x00,0x05,0x00,0x09,0x00,0x00,0x00,0x90,0x02,0x00,0x00, -0x8f,0x02,0x00,0x00,0x89,0x04,0x00,0x00,0xc4,0x00,0x05,0x00, -0x09,0x00,0x00,0x00,0x91,0x02,0x00,0x00,0x90,0x02,0x00,0x00, -0x69,0x00,0x00,0x00,0xc7,0x00,0x05,0x00,0x09,0x00,0x00,0x00, -0x92,0x02,0x00,0x00,0x91,0x02,0x00,0x00,0x4f,0x00,0x00,0x00, -0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x93,0x02,0x00,0x00, -0x92,0x02,0x00,0x00,0x3d,0x00,0x04,0x00,0x09,0x00,0x00,0x00, -0x95,0x02,0x00,0x00,0x65,0x00,0x00,0x00,0xc2,0x00,0x05,0x00, -0x09,0x00,0x00,0x00,0x97,0x02,0x00,0x00,0x95,0x02,0x00,0x00, -0x82,0x04,0x00,0x00,0xc7,0x00,0x05,0x00,0x09,0x00,0x00,0x00, -0x98,0x02,0x00,0x00,0x97,0x02,0x00,0x00,0x4f,0x00,0x00,0x00, -0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x99,0x02,0x00,0x00, -0x98,0x02,0x00,0x00,0x41,0x00,0x08,0x00,0x7c,0x00,0x00,0x00, -0x9b,0x02,0x00,0x00,0x55,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0x40,0x00,0x00,0x00,0x7a,0x00,0x00,0x00,0x89,0x04,0x00,0x00, -0x3d,0x00,0x04,0x00,0x4e,0x00,0x00,0x00,0x9c,0x02,0x00,0x00, -0x9b,0x02,0x00,0x00,0x71,0x00,0x04,0x00,0x09,0x00,0x00,0x00, -0x9d,0x02,0x00,0x00,0x9c,0x02,0x00,0x00,0xc7,0x00,0x05,0x00, -0x09,0x00,0x00,0x00,0x9e,0x02,0x00,0x00,0x9d,0x02,0x00,0x00, -0x84,0x00,0x00,0x00,0x7c,0x00,0x04,0x00,0x09,0x00,0x00,0x00, -0xa0,0x02,0x00,0x00,0x93,0x02,0x00,0x00,0xc5,0x00,0x05,0x00, -0x09,0x00,0x00,0x00,0xa1,0x02,0x00,0x00,0x9e,0x02,0x00,0x00, -0xa0,0x02,0x00,0x00,0x70,0x00,0x04,0x00,0x4a,0x00,0x00,0x00, -0xa2,0x02,0x00,0x00,0xa1,0x02,0x00,0x00,0xc2,0x00,0x05,0x00, -0x09,0x00,0x00,0x00,0xa3,0x02,0x00,0x00,0x9d,0x02,0x00,0x00, -0x69,0x00,0x00,0x00,0x7c,0x00,0x04,0x00,0x09,0x00,0x00,0x00, -0xa5,0x02,0x00,0x00,0x99,0x02,0x00,0x00,0xc5,0x00,0x05,0x00, -0x09,0x00,0x00,0x00,0xa6,0x02,0x00,0x00,0xa3,0x02,0x00,0x00, -0xa5,0x02,0x00,0x00,0x70,0x00,0x04,0x00,0x4a,0x00,0x00,0x00, -0xa7,0x02,0x00,0x00,0xa6,0x02,0x00,0x00,0x50,0x00,0x05,0x00, -0x80,0x00,0x00,0x00,0xa8,0x02,0x00,0x00,0xa2,0x02,0x00,0x00, -0xa7,0x02,0x00,0x00,0x8e,0x00,0x05,0x00,0x80,0x00,0x00,0x00, -0xa9,0x02,0x00,0x00,0xa8,0x02,0x00,0x00,0x8a,0x02,0x00,0x00, -0x50,0x00,0x05,0x00,0x80,0x00,0x00,0x00,0xaa,0x02,0x00,0x00, -0x8d,0x02,0x00,0x00,0x8d,0x02,0x00,0x00,0x81,0x00,0x05,0x00, -0x80,0x00,0x00,0x00,0xab,0x02,0x00,0x00,0xa9,0x02,0x00,0x00, -0xaa,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xb1,0x02,0x00,0x00,0xa4,0x00,0x00,0x00,0x89,0x04,0x00,0x00, -0x51,0x00,0x05,0x00,0x4a,0x00,0x00,0x00,0xb3,0x02,0x00,0x00, -0xab,0x02,0x00,0x00,0x00,0x00,0x00,0x00,0x73,0x00,0x04,0x00, -0x4d,0x00,0x00,0x00,0xb4,0x02,0x00,0x00,0xb3,0x02,0x00,0x00, -0x41,0x00,0x06,0x00,0x57,0x00,0x00,0x00,0xb5,0x02,0x00,0x00, -0x9d,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0xb1,0x02,0x00,0x00, -0x3e,0x00,0x03,0x00,0xb5,0x02,0x00,0x00,0xb4,0x02,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xbc,0x02,0x00,0x00, -0xa4,0x00,0x00,0x00,0x8a,0x04,0x00,0x00,0x51,0x00,0x05,0x00, -0x4a,0x00,0x00,0x00,0xbd,0x02,0x00,0x00,0xab,0x02,0x00,0x00, -0x01,0x00,0x00,0x00,0x73,0x00,0x04,0x00,0x4d,0x00,0x00,0x00, -0xbe,0x02,0x00,0x00,0xbd,0x02,0x00,0x00,0x41,0x00,0x06,0x00, -0x57,0x00,0x00,0x00,0xbf,0x02,0x00,0x00,0x9d,0x00,0x00,0x00, -0x2e,0x00,0x00,0x00,0xbc,0x02,0x00,0x00,0x3e,0x00,0x03,0x00, -0xbf,0x02,0x00,0x00,0xbe,0x02,0x00,0x00,0x3d,0x00,0x04,0x00, -0x4d,0x00,0x00,0x00,0xc8,0x02,0x00,0x00,0x58,0x00,0x00,0x00, -0x73,0x00,0x04,0x00,0x4a,0x00,0x00,0x00,0xc9,0x02,0x00,0x00, -0xc8,0x02,0x00,0x00,0x3d,0x00,0x04,0x00,0x4d,0x00,0x00,0x00, -0xcb,0x02,0x00,0x00,0x5d,0x00,0x00,0x00,0x73,0x00,0x04,0x00, -0x4a,0x00,0x00,0x00,0xcc,0x02,0x00,0x00,0xcb,0x02,0x00,0x00, -0x3d,0x00,0x04,0x00,0x09,0x00,0x00,0x00,0xce,0x02,0x00,0x00, -0x65,0x00,0x00,0x00,0xc2,0x00,0x05,0x00,0x09,0x00,0x00,0x00, -0xcf,0x02,0x00,0x00,0xce,0x02,0x00,0x00,0x8b,0x04,0x00,0x00, -0xc4,0x00,0x05,0x00,0x09,0x00,0x00,0x00,0xd0,0x02,0x00,0x00, -0xcf,0x02,0x00,0x00,0x69,0x00,0x00,0x00,0xc7,0x00,0x05,0x00, -0x09,0x00,0x00,0x00,0xd1,0x02,0x00,0x00,0xd0,0x02,0x00,0x00, -0x4f,0x00,0x00,0x00,0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0xd2,0x02,0x00,0x00,0xd1,0x02,0x00,0x00,0x3d,0x00,0x04,0x00, -0x09,0x00,0x00,0x00,0xd4,0x02,0x00,0x00,0x65,0x00,0x00,0x00, -0xc2,0x00,0x05,0x00,0x09,0x00,0x00,0x00,0xd6,0x02,0x00,0x00, -0xd4,0x02,0x00,0x00,0x84,0x04,0x00,0x00,0xc7,0x00,0x05,0x00, -0x09,0x00,0x00,0x00,0xd7,0x02,0x00,0x00,0xd6,0x02,0x00,0x00, -0x4f,0x00,0x00,0x00,0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0xd8,0x02,0x00,0x00,0xd7,0x02,0x00,0x00,0x41,0x00,0x08,0x00, -0x7c,0x00,0x00,0x00,0xda,0x02,0x00,0x00,0x55,0x00,0x00,0x00, -0x2e,0x00,0x00,0x00,0x40,0x00,0x00,0x00,0x7a,0x00,0x00,0x00, -0x8b,0x04,0x00,0x00,0x3d,0x00,0x04,0x00,0x4e,0x00,0x00,0x00, -0xdb,0x02,0x00,0x00,0xda,0x02,0x00,0x00,0x71,0x00,0x04,0x00, -0x09,0x00,0x00,0x00,0xdc,0x02,0x00,0x00,0xdb,0x02,0x00,0x00, -0xc7,0x00,0x05,0x00,0x09,0x00,0x00,0x00,0xdd,0x02,0x00,0x00, -0xdc,0x02,0x00,0x00,0x84,0x00,0x00,0x00,0x7c,0x00,0x04,0x00, -0x09,0x00,0x00,0x00,0xdf,0x02,0x00,0x00,0xd2,0x02,0x00,0x00, -0xc5,0x00,0x05,0x00,0x09,0x00,0x00,0x00,0xe0,0x02,0x00,0x00, -0xdd,0x02,0x00,0x00,0xdf,0x02,0x00,0x00,0x70,0x00,0x04,0x00, -0x4a,0x00,0x00,0x00,0xe1,0x02,0x00,0x00,0xe0,0x02,0x00,0x00, -0xc2,0x00,0x05,0x00,0x09,0x00,0x00,0x00,0xe2,0x02,0x00,0x00, -0xdc,0x02,0x00,0x00,0x69,0x00,0x00,0x00,0x7c,0x00,0x04,0x00, -0x09,0x00,0x00,0x00,0xe4,0x02,0x00,0x00,0xd8,0x02,0x00,0x00, -0xc5,0x00,0x05,0x00,0x09,0x00,0x00,0x00,0xe5,0x02,0x00,0x00, -0xe2,0x02,0x00,0x00,0xe4,0x02,0x00,0x00,0x70,0x00,0x04,0x00, -0x4a,0x00,0x00,0x00,0xe6,0x02,0x00,0x00,0xe5,0x02,0x00,0x00, -0x50,0x00,0x05,0x00,0x80,0x00,0x00,0x00,0xe7,0x02,0x00,0x00, -0xe1,0x02,0x00,0x00,0xe6,0x02,0x00,0x00,0x8e,0x00,0x05,0x00, -0x80,0x00,0x00,0x00,0xe8,0x02,0x00,0x00,0xe7,0x02,0x00,0x00, -0xc9,0x02,0x00,0x00,0x50,0x00,0x05,0x00,0x80,0x00,0x00,0x00, -0xe9,0x02,0x00,0x00,0xcc,0x02,0x00,0x00,0xcc,0x02,0x00,0x00, -0x81,0x00,0x05,0x00,0x80,0x00,0x00,0x00,0xea,0x02,0x00,0x00, -0xe8,0x02,0x00,0x00,0xe9,0x02,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xf0,0x02,0x00,0x00,0xa4,0x00,0x00,0x00, -0x8b,0x04,0x00,0x00,0x51,0x00,0x05,0x00,0x4a,0x00,0x00,0x00, -0xf2,0x02,0x00,0x00,0xea,0x02,0x00,0x00,0x00,0x00,0x00,0x00, -0x73,0x00,0x04,0x00,0x4d,0x00,0x00,0x00,0xf3,0x02,0x00,0x00, -0xf2,0x02,0x00,0x00,0x41,0x00,0x06,0x00,0x57,0x00,0x00,0x00, -0xf4,0x02,0x00,0x00,0x9d,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0xf0,0x02,0x00,0x00,0x3e,0x00,0x03,0x00,0xf4,0x02,0x00,0x00, -0xf3,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xfb,0x02,0x00,0x00,0xa4,0x00,0x00,0x00,0x8c,0x04,0x00,0x00, -0x51,0x00,0x05,0x00,0x4a,0x00,0x00,0x00,0xfc,0x02,0x00,0x00, -0xea,0x02,0x00,0x00,0x01,0x00,0x00,0x00,0x73,0x00,0x04,0x00, -0x4d,0x00,0x00,0x00,0xfd,0x02,0x00,0x00,0xfc,0x02,0x00,0x00, -0x41,0x00,0x06,0x00,0x57,0x00,0x00,0x00,0xfe,0x02,0x00,0x00, -0x9d,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0xfb,0x02,0x00,0x00, -0x3e,0x00,0x03,0x00,0xfe,0x02,0x00,0x00,0xfd,0x02,0x00,0x00, -0x3d,0x00,0x04,0x00,0x4d,0x00,0x00,0x00,0x07,0x03,0x00,0x00, -0x58,0x00,0x00,0x00,0x73,0x00,0x04,0x00,0x4a,0x00,0x00,0x00, -0x08,0x03,0x00,0x00,0x07,0x03,0x00,0x00,0x3d,0x00,0x04,0x00, -0x4d,0x00,0x00,0x00,0x0a,0x03,0x00,0x00,0x5d,0x00,0x00,0x00, -0x73,0x00,0x04,0x00,0x4a,0x00,0x00,0x00,0x0b,0x03,0x00,0x00, -0x0a,0x03,0x00,0x00,0x3d,0x00,0x04,0x00,0x09,0x00,0x00,0x00, -0x0d,0x03,0x00,0x00,0x65,0x00,0x00,0x00,0xc2,0x00,0x05,0x00, -0x09,0x00,0x00,0x00,0x0e,0x03,0x00,0x00,0x0d,0x03,0x00,0x00, -0x8d,0x04,0x00,0x00,0xc4,0x00,0x05,0x00,0x09,0x00,0x00,0x00, -0x0f,0x03,0x00,0x00,0x0e,0x03,0x00,0x00,0x69,0x00,0x00,0x00, -0xc7,0x00,0x05,0x00,0x09,0x00,0x00,0x00,0x10,0x03,0x00,0x00, -0x0f,0x03,0x00,0x00,0x4f,0x00,0x00,0x00,0x7c,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x11,0x03,0x00,0x00,0x10,0x03,0x00,0x00, -0x3d,0x00,0x04,0x00,0x09,0x00,0x00,0x00,0x13,0x03,0x00,0x00, -0x65,0x00,0x00,0x00,0xc2,0x00,0x05,0x00,0x09,0x00,0x00,0x00, -0x15,0x03,0x00,0x00,0x13,0x03,0x00,0x00,0x86,0x04,0x00,0x00, -0xc7,0x00,0x05,0x00,0x09,0x00,0x00,0x00,0x16,0x03,0x00,0x00, -0x15,0x03,0x00,0x00,0x4f,0x00,0x00,0x00,0x7c,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x17,0x03,0x00,0x00,0x16,0x03,0x00,0x00, -0x41,0x00,0x08,0x00,0x7c,0x00,0x00,0x00,0x19,0x03,0x00,0x00, -0x55,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0x40,0x00,0x00,0x00, -0x7a,0x00,0x00,0x00,0x8d,0x04,0x00,0x00,0x3d,0x00,0x04,0x00, -0x4e,0x00,0x00,0x00,0x1a,0x03,0x00,0x00,0x19,0x03,0x00,0x00, -0x71,0x00,0x04,0x00,0x09,0x00,0x00,0x00,0x1b,0x03,0x00,0x00, -0x1a,0x03,0x00,0x00,0xc7,0x00,0x05,0x00,0x09,0x00,0x00,0x00, -0x1c,0x03,0x00,0x00,0x1b,0x03,0x00,0x00,0x84,0x00,0x00,0x00, -0x7c,0x00,0x04,0x00,0x09,0x00,0x00,0x00,0x1e,0x03,0x00,0x00, -0x11,0x03,0x00,0x00,0xc5,0x00,0x05,0x00,0x09,0x00,0x00,0x00, -0x1f,0x03,0x00,0x00,0x1c,0x03,0x00,0x00,0x1e,0x03,0x00,0x00, -0x70,0x00,0x04,0x00,0x4a,0x00,0x00,0x00,0x20,0x03,0x00,0x00, -0x1f,0x03,0x00,0x00,0xc2,0x00,0x05,0x00,0x09,0x00,0x00,0x00, -0x21,0x03,0x00,0x00,0x1b,0x03,0x00,0x00,0x69,0x00,0x00,0x00, -0x7c,0x00,0x04,0x00,0x09,0x00,0x00,0x00,0x23,0x03,0x00,0x00, -0x17,0x03,0x00,0x00,0xc5,0x00,0x05,0x00,0x09,0x00,0x00,0x00, -0x24,0x03,0x00,0x00,0x21,0x03,0x00,0x00,0x23,0x03,0x00,0x00, -0x70,0x00,0x04,0x00,0x4a,0x00,0x00,0x00,0x25,0x03,0x00,0x00, -0x24,0x03,0x00,0x00,0x50,0x00,0x05,0x00,0x80,0x00,0x00,0x00, -0x26,0x03,0x00,0x00,0x20,0x03,0x00,0x00,0x25,0x03,0x00,0x00, -0x8e,0x00,0x05,0x00,0x80,0x00,0x00,0x00,0x27,0x03,0x00,0x00, -0x26,0x03,0x00,0x00,0x08,0x03,0x00,0x00,0x50,0x00,0x05,0x00, -0x80,0x00,0x00,0x00,0x28,0x03,0x00,0x00,0x0b,0x03,0x00,0x00, -0x0b,0x03,0x00,0x00,0x81,0x00,0x05,0x00,0x80,0x00,0x00,0x00, -0x29,0x03,0x00,0x00,0x27,0x03,0x00,0x00,0x28,0x03,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x2f,0x03,0x00,0x00, -0xa4,0x00,0x00,0x00,0x8d,0x04,0x00,0x00,0x51,0x00,0x05,0x00, -0x4a,0x00,0x00,0x00,0x31,0x03,0x00,0x00,0x29,0x03,0x00,0x00, -0x00,0x00,0x00,0x00,0x73,0x00,0x04,0x00,0x4d,0x00,0x00,0x00, -0x32,0x03,0x00,0x00,0x31,0x03,0x00,0x00,0x41,0x00,0x06,0x00, -0x57,0x00,0x00,0x00,0x33,0x03,0x00,0x00,0x9d,0x00,0x00,0x00, -0x2e,0x00,0x00,0x00,0x2f,0x03,0x00,0x00,0x3e,0x00,0x03,0x00, -0x33,0x03,0x00,0x00,0x32,0x03,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x3a,0x03,0x00,0x00,0xa4,0x00,0x00,0x00, -0x8e,0x04,0x00,0x00,0x51,0x00,0x05,0x00,0x4a,0x00,0x00,0x00, -0x3b,0x03,0x00,0x00,0x29,0x03,0x00,0x00,0x01,0x00,0x00,0x00, -0x73,0x00,0x04,0x00,0x4d,0x00,0x00,0x00,0x3c,0x03,0x00,0x00, -0x3b,0x03,0x00,0x00,0x41,0x00,0x06,0x00,0x57,0x00,0x00,0x00, -0x3d,0x03,0x00,0x00,0x9d,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0x3a,0x03,0x00,0x00,0x3e,0x00,0x03,0x00,0x3d,0x03,0x00,0x00, -0x3c,0x03,0x00,0x00,0x3d,0x00,0x04,0x00,0x4d,0x00,0x00,0x00, -0x46,0x03,0x00,0x00,0x58,0x00,0x00,0x00,0x73,0x00,0x04,0x00, -0x4a,0x00,0x00,0x00,0x47,0x03,0x00,0x00,0x46,0x03,0x00,0x00, -0x3d,0x00,0x04,0x00,0x4d,0x00,0x00,0x00,0x49,0x03,0x00,0x00, -0x5d,0x00,0x00,0x00,0x73,0x00,0x04,0x00,0x4a,0x00,0x00,0x00, -0x4a,0x03,0x00,0x00,0x49,0x03,0x00,0x00,0x3d,0x00,0x04,0x00, -0x09,0x00,0x00,0x00,0x4c,0x03,0x00,0x00,0x65,0x00,0x00,0x00, -0xc2,0x00,0x05,0x00,0x09,0x00,0x00,0x00,0x4d,0x03,0x00,0x00, -0x4c,0x03,0x00,0x00,0x8f,0x04,0x00,0x00,0xc4,0x00,0x05,0x00, -0x09,0x00,0x00,0x00,0x4e,0x03,0x00,0x00,0x4d,0x03,0x00,0x00, -0x69,0x00,0x00,0x00,0xc7,0x00,0x05,0x00,0x09,0x00,0x00,0x00, -0x4f,0x03,0x00,0x00,0x4e,0x03,0x00,0x00,0x4f,0x00,0x00,0x00, -0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x50,0x03,0x00,0x00, -0x4f,0x03,0x00,0x00,0x3d,0x00,0x04,0x00,0x09,0x00,0x00,0x00, -0x52,0x03,0x00,0x00,0x65,0x00,0x00,0x00,0xc2,0x00,0x05,0x00, -0x09,0x00,0x00,0x00,0x54,0x03,0x00,0x00,0x52,0x03,0x00,0x00, -0x88,0x04,0x00,0x00,0xc7,0x00,0x05,0x00,0x09,0x00,0x00,0x00, -0x55,0x03,0x00,0x00,0x54,0x03,0x00,0x00,0x4f,0x00,0x00,0x00, -0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x56,0x03,0x00,0x00, -0x55,0x03,0x00,0x00,0x41,0x00,0x08,0x00,0x7c,0x00,0x00,0x00, -0x58,0x03,0x00,0x00,0x55,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0x40,0x00,0x00,0x00,0x7a,0x00,0x00,0x00,0x8f,0x04,0x00,0x00, -0x3d,0x00,0x04,0x00,0x4e,0x00,0x00,0x00,0x59,0x03,0x00,0x00, -0x58,0x03,0x00,0x00,0x71,0x00,0x04,0x00,0x09,0x00,0x00,0x00, -0x5a,0x03,0x00,0x00,0x59,0x03,0x00,0x00,0xc7,0x00,0x05,0x00, -0x09,0x00,0x00,0x00,0x5b,0x03,0x00,0x00,0x5a,0x03,0x00,0x00, -0x84,0x00,0x00,0x00,0x7c,0x00,0x04,0x00,0x09,0x00,0x00,0x00, -0x5d,0x03,0x00,0x00,0x50,0x03,0x00,0x00,0xc5,0x00,0x05,0x00, -0x09,0x00,0x00,0x00,0x5e,0x03,0x00,0x00,0x5b,0x03,0x00,0x00, -0x5d,0x03,0x00,0x00,0x70,0x00,0x04,0x00,0x4a,0x00,0x00,0x00, -0x5f,0x03,0x00,0x00,0x5e,0x03,0x00,0x00,0xc2,0x00,0x05,0x00, -0x09,0x00,0x00,0x00,0x60,0x03,0x00,0x00,0x5a,0x03,0x00,0x00, -0x69,0x00,0x00,0x00,0x7c,0x00,0x04,0x00,0x09,0x00,0x00,0x00, -0x62,0x03,0x00,0x00,0x56,0x03,0x00,0x00,0xc5,0x00,0x05,0x00, -0x09,0x00,0x00,0x00,0x63,0x03,0x00,0x00,0x60,0x03,0x00,0x00, -0x62,0x03,0x00,0x00,0x70,0x00,0x04,0x00,0x4a,0x00,0x00,0x00, -0x64,0x03,0x00,0x00,0x63,0x03,0x00,0x00,0x50,0x00,0x05,0x00, -0x80,0x00,0x00,0x00,0x65,0x03,0x00,0x00,0x5f,0x03,0x00,0x00, -0x64,0x03,0x00,0x00,0x8e,0x00,0x05,0x00,0x80,0x00,0x00,0x00, -0x66,0x03,0x00,0x00,0x65,0x03,0x00,0x00,0x47,0x03,0x00,0x00, -0x50,0x00,0x05,0x00,0x80,0x00,0x00,0x00,0x67,0x03,0x00,0x00, -0x4a,0x03,0x00,0x00,0x4a,0x03,0x00,0x00,0x81,0x00,0x05,0x00, -0x80,0x00,0x00,0x00,0x68,0x03,0x00,0x00,0x66,0x03,0x00,0x00, -0x67,0x03,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x6e,0x03,0x00,0x00,0xa4,0x00,0x00,0x00,0x8f,0x04,0x00,0x00, -0x51,0x00,0x05,0x00,0x4a,0x00,0x00,0x00,0x70,0x03,0x00,0x00, -0x68,0x03,0x00,0x00,0x00,0x00,0x00,0x00,0x73,0x00,0x04,0x00, -0x4d,0x00,0x00,0x00,0x71,0x03,0x00,0x00,0x70,0x03,0x00,0x00, -0x41,0x00,0x06,0x00,0x57,0x00,0x00,0x00,0x72,0x03,0x00,0x00, -0x9d,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0x6e,0x03,0x00,0x00, -0x3e,0x00,0x03,0x00,0x72,0x03,0x00,0x00,0x71,0x03,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x79,0x03,0x00,0x00, -0xa4,0x00,0x00,0x00,0x90,0x04,0x00,0x00,0x51,0x00,0x05,0x00, -0x4a,0x00,0x00,0x00,0x7a,0x03,0x00,0x00,0x68,0x03,0x00,0x00, -0x01,0x00,0x00,0x00,0x73,0x00,0x04,0x00,0x4d,0x00,0x00,0x00, -0x7b,0x03,0x00,0x00,0x7a,0x03,0x00,0x00,0x41,0x00,0x06,0x00, -0x57,0x00,0x00,0x00,0x7c,0x03,0x00,0x00,0x9d,0x00,0x00,0x00, -0x2e,0x00,0x00,0x00,0x79,0x03,0x00,0x00,0x3e,0x00,0x03,0x00, -0x7c,0x03,0x00,0x00,0x7b,0x03,0x00,0x00,0x3d,0x00,0x04,0x00, -0x4d,0x00,0x00,0x00,0x85,0x03,0x00,0x00,0x58,0x00,0x00,0x00, -0x73,0x00,0x04,0x00,0x4a,0x00,0x00,0x00,0x86,0x03,0x00,0x00, -0x85,0x03,0x00,0x00,0x3d,0x00,0x04,0x00,0x4d,0x00,0x00,0x00, -0x88,0x03,0x00,0x00,0x5d,0x00,0x00,0x00,0x73,0x00,0x04,0x00, -0x4a,0x00,0x00,0x00,0x89,0x03,0x00,0x00,0x88,0x03,0x00,0x00, -0x3d,0x00,0x04,0x00,0x09,0x00,0x00,0x00,0x8b,0x03,0x00,0x00, -0x65,0x00,0x00,0x00,0xc2,0x00,0x05,0x00,0x09,0x00,0x00,0x00, -0x8c,0x03,0x00,0x00,0x8b,0x03,0x00,0x00,0x71,0x00,0x00,0x00, -0xc4,0x00,0x05,0x00,0x09,0x00,0x00,0x00,0x8d,0x03,0x00,0x00, -0x8c,0x03,0x00,0x00,0x69,0x00,0x00,0x00,0xc7,0x00,0x05,0x00, -0x09,0x00,0x00,0x00,0x8e,0x03,0x00,0x00,0x8d,0x03,0x00,0x00, -0x4f,0x00,0x00,0x00,0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x8f,0x03,0x00,0x00,0x8e,0x03,0x00,0x00,0x3d,0x00,0x04,0x00, -0x09,0x00,0x00,0x00,0x91,0x03,0x00,0x00,0x65,0x00,0x00,0x00, -0xc2,0x00,0x05,0x00,0x09,0x00,0x00,0x00,0x93,0x03,0x00,0x00, -0x91,0x03,0x00,0x00,0x8a,0x04,0x00,0x00,0xc7,0x00,0x05,0x00, -0x09,0x00,0x00,0x00,0x94,0x03,0x00,0x00,0x93,0x03,0x00,0x00, -0x4f,0x00,0x00,0x00,0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x95,0x03,0x00,0x00,0x94,0x03,0x00,0x00,0x41,0x00,0x08,0x00, -0x7c,0x00,0x00,0x00,0x97,0x03,0x00,0x00,0x55,0x00,0x00,0x00, -0x2e,0x00,0x00,0x00,0x40,0x00,0x00,0x00,0x7a,0x00,0x00,0x00, -0x71,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x4e,0x00,0x00,0x00, -0x98,0x03,0x00,0x00,0x97,0x03,0x00,0x00,0x71,0x00,0x04,0x00, -0x09,0x00,0x00,0x00,0x99,0x03,0x00,0x00,0x98,0x03,0x00,0x00, -0xc7,0x00,0x05,0x00,0x09,0x00,0x00,0x00,0x9a,0x03,0x00,0x00, -0x99,0x03,0x00,0x00,0x84,0x00,0x00,0x00,0x7c,0x00,0x04,0x00, -0x09,0x00,0x00,0x00,0x9c,0x03,0x00,0x00,0x8f,0x03,0x00,0x00, -0xc5,0x00,0x05,0x00,0x09,0x00,0x00,0x00,0x9d,0x03,0x00,0x00, -0x9a,0x03,0x00,0x00,0x9c,0x03,0x00,0x00,0x70,0x00,0x04,0x00, -0x4a,0x00,0x00,0x00,0x9e,0x03,0x00,0x00,0x9d,0x03,0x00,0x00, -0xc2,0x00,0x05,0x00,0x09,0x00,0x00,0x00,0x9f,0x03,0x00,0x00, -0x99,0x03,0x00,0x00,0x69,0x00,0x00,0x00,0x7c,0x00,0x04,0x00, -0x09,0x00,0x00,0x00,0xa1,0x03,0x00,0x00,0x95,0x03,0x00,0x00, -0xc5,0x00,0x05,0x00,0x09,0x00,0x00,0x00,0xa2,0x03,0x00,0x00, -0x9f,0x03,0x00,0x00,0xa1,0x03,0x00,0x00,0x70,0x00,0x04,0x00, -0x4a,0x00,0x00,0x00,0xa3,0x03,0x00,0x00,0xa2,0x03,0x00,0x00, -0x50,0x00,0x05,0x00,0x80,0x00,0x00,0x00,0xa4,0x03,0x00,0x00, -0x9e,0x03,0x00,0x00,0xa3,0x03,0x00,0x00,0x8e,0x00,0x05,0x00, -0x80,0x00,0x00,0x00,0xa5,0x03,0x00,0x00,0xa4,0x03,0x00,0x00, -0x86,0x03,0x00,0x00,0x50,0x00,0x05,0x00,0x80,0x00,0x00,0x00, -0xa6,0x03,0x00,0x00,0x89,0x03,0x00,0x00,0x89,0x03,0x00,0x00, -0x81,0x00,0x05,0x00,0x80,0x00,0x00,0x00,0xa7,0x03,0x00,0x00, -0xa5,0x03,0x00,0x00,0xa6,0x03,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xad,0x03,0x00,0x00,0xa4,0x00,0x00,0x00, -0x71,0x00,0x00,0x00,0x51,0x00,0x05,0x00,0x4a,0x00,0x00,0x00, -0xaf,0x03,0x00,0x00,0xa7,0x03,0x00,0x00,0x00,0x00,0x00,0x00, -0x73,0x00,0x04,0x00,0x4d,0x00,0x00,0x00,0xb0,0x03,0x00,0x00, -0xaf,0x03,0x00,0x00,0x41,0x00,0x06,0x00,0x57,0x00,0x00,0x00, -0xb1,0x03,0x00,0x00,0x9d,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0xad,0x03,0x00,0x00,0x3e,0x00,0x03,0x00,0xb1,0x03,0x00,0x00, -0xb0,0x03,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xb8,0x03,0x00,0x00,0xa4,0x00,0x00,0x00,0x91,0x04,0x00,0x00, -0x51,0x00,0x05,0x00,0x4a,0x00,0x00,0x00,0xb9,0x03,0x00,0x00, -0xa7,0x03,0x00,0x00,0x01,0x00,0x00,0x00,0x73,0x00,0x04,0x00, -0x4d,0x00,0x00,0x00,0xba,0x03,0x00,0x00,0xb9,0x03,0x00,0x00, -0x41,0x00,0x06,0x00,0x57,0x00,0x00,0x00,0xbb,0x03,0x00,0x00, -0x9d,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0xb8,0x03,0x00,0x00, -0x3e,0x00,0x03,0x00,0xbb,0x03,0x00,0x00,0xba,0x03,0x00,0x00, -0x3d,0x00,0x04,0x00,0x4d,0x00,0x00,0x00,0xc4,0x03,0x00,0x00, -0x58,0x00,0x00,0x00,0x73,0x00,0x04,0x00,0x4a,0x00,0x00,0x00, -0xc5,0x03,0x00,0x00,0xc4,0x03,0x00,0x00,0x3d,0x00,0x04,0x00, -0x4d,0x00,0x00,0x00,0xc7,0x03,0x00,0x00,0x5d,0x00,0x00,0x00, -0x73,0x00,0x04,0x00,0x4a,0x00,0x00,0x00,0xc8,0x03,0x00,0x00, -0xc7,0x03,0x00,0x00,0x3d,0x00,0x04,0x00,0x09,0x00,0x00,0x00, -0xca,0x03,0x00,0x00,0x65,0x00,0x00,0x00,0xc2,0x00,0x05,0x00, -0x09,0x00,0x00,0x00,0xcb,0x03,0x00,0x00,0xca,0x03,0x00,0x00, -0x7c,0x04,0x00,0x00,0xc4,0x00,0x05,0x00,0x09,0x00,0x00,0x00, -0xcc,0x03,0x00,0x00,0xcb,0x03,0x00,0x00,0x69,0x00,0x00,0x00, -0xc7,0x00,0x05,0x00,0x09,0x00,0x00,0x00,0xcd,0x03,0x00,0x00, -0xcc,0x03,0x00,0x00,0x4f,0x00,0x00,0x00,0x7c,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0xce,0x03,0x00,0x00,0xcd,0x03,0x00,0x00, -0x3d,0x00,0x04,0x00,0x09,0x00,0x00,0x00,0xd0,0x03,0x00,0x00, -0x65,0x00,0x00,0x00,0xc2,0x00,0x05,0x00,0x09,0x00,0x00,0x00, -0xd2,0x03,0x00,0x00,0xd0,0x03,0x00,0x00,0x8c,0x04,0x00,0x00, -0xc7,0x00,0x05,0x00,0x09,0x00,0x00,0x00,0xd3,0x03,0x00,0x00, -0xd2,0x03,0x00,0x00,0x4f,0x00,0x00,0x00,0x7c,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0xd4,0x03,0x00,0x00,0xd3,0x03,0x00,0x00, -0x41,0x00,0x08,0x00,0x7c,0x00,0x00,0x00,0xd6,0x03,0x00,0x00, -0x55,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0x40,0x00,0x00,0x00, -0x7a,0x00,0x00,0x00,0x7c,0x04,0x00,0x00,0x3d,0x00,0x04,0x00, -0x4e,0x00,0x00,0x00,0xd7,0x03,0x00,0x00,0xd6,0x03,0x00,0x00, -0x71,0x00,0x04,0x00,0x09,0x00,0x00,0x00,0xd8,0x03,0x00,0x00, -0xd7,0x03,0x00,0x00,0xc7,0x00,0x05,0x00,0x09,0x00,0x00,0x00, -0xd9,0x03,0x00,0x00,0xd8,0x03,0x00,0x00,0x84,0x00,0x00,0x00, -0x7c,0x00,0x04,0x00,0x09,0x00,0x00,0x00,0xdb,0x03,0x00,0x00, -0xce,0x03,0x00,0x00,0xc5,0x00,0x05,0x00,0x09,0x00,0x00,0x00, -0xdc,0x03,0x00,0x00,0xd9,0x03,0x00,0x00,0xdb,0x03,0x00,0x00, -0x70,0x00,0x04,0x00,0x4a,0x00,0x00,0x00,0xdd,0x03,0x00,0x00, -0xdc,0x03,0x00,0x00,0xc2,0x00,0x05,0x00,0x09,0x00,0x00,0x00, -0xde,0x03,0x00,0x00,0xd8,0x03,0x00,0x00,0x69,0x00,0x00,0x00, -0x7c,0x00,0x04,0x00,0x09,0x00,0x00,0x00,0xe0,0x03,0x00,0x00, -0xd4,0x03,0x00,0x00,0xc5,0x00,0x05,0x00,0x09,0x00,0x00,0x00, -0xe1,0x03,0x00,0x00,0xde,0x03,0x00,0x00,0xe0,0x03,0x00,0x00, -0x70,0x00,0x04,0x00,0x4a,0x00,0x00,0x00,0xe2,0x03,0x00,0x00, -0xe1,0x03,0x00,0x00,0x50,0x00,0x05,0x00,0x80,0x00,0x00,0x00, -0xe3,0x03,0x00,0x00,0xdd,0x03,0x00,0x00,0xe2,0x03,0x00,0x00, -0x8e,0x00,0x05,0x00,0x80,0x00,0x00,0x00,0xe4,0x03,0x00,0x00, -0xe3,0x03,0x00,0x00,0xc5,0x03,0x00,0x00,0x50,0x00,0x05,0x00, -0x80,0x00,0x00,0x00,0xe5,0x03,0x00,0x00,0xc8,0x03,0x00,0x00, -0xc8,0x03,0x00,0x00,0x81,0x00,0x05,0x00,0x80,0x00,0x00,0x00, -0xe6,0x03,0x00,0x00,0xe4,0x03,0x00,0x00,0xe5,0x03,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xec,0x03,0x00,0x00, -0xa4,0x00,0x00,0x00,0x7c,0x04,0x00,0x00,0x51,0x00,0x05,0x00, -0x4a,0x00,0x00,0x00,0xee,0x03,0x00,0x00,0xe6,0x03,0x00,0x00, -0x00,0x00,0x00,0x00,0x73,0x00,0x04,0x00,0x4d,0x00,0x00,0x00, -0xef,0x03,0x00,0x00,0xee,0x03,0x00,0x00,0x41,0x00,0x06,0x00, -0x57,0x00,0x00,0x00,0xf0,0x03,0x00,0x00,0x9d,0x00,0x00,0x00, -0x2e,0x00,0x00,0x00,0xec,0x03,0x00,0x00,0x3e,0x00,0x03,0x00, -0xf0,0x03,0x00,0x00,0xef,0x03,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xf7,0x03,0x00,0x00,0xa4,0x00,0x00,0x00, -0x92,0x04,0x00,0x00,0x51,0x00,0x05,0x00,0x4a,0x00,0x00,0x00, -0xf8,0x03,0x00,0x00,0xe6,0x03,0x00,0x00,0x01,0x00,0x00,0x00, -0x73,0x00,0x04,0x00,0x4d,0x00,0x00,0x00,0xf9,0x03,0x00,0x00, -0xf8,0x03,0x00,0x00,0x41,0x00,0x06,0x00,0x57,0x00,0x00,0x00, -0xfa,0x03,0x00,0x00,0x9d,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0xf7,0x03,0x00,0x00,0x3e,0x00,0x03,0x00,0xfa,0x03,0x00,0x00, -0xf9,0x03,0x00,0x00,0x3d,0x00,0x04,0x00,0x4d,0x00,0x00,0x00, -0x03,0x04,0x00,0x00,0x58,0x00,0x00,0x00,0x73,0x00,0x04,0x00, -0x4a,0x00,0x00,0x00,0x04,0x04,0x00,0x00,0x03,0x04,0x00,0x00, -0x3d,0x00,0x04,0x00,0x4d,0x00,0x00,0x00,0x06,0x04,0x00,0x00, -0x5d,0x00,0x00,0x00,0x73,0x00,0x04,0x00,0x4a,0x00,0x00,0x00, -0x07,0x04,0x00,0x00,0x06,0x04,0x00,0x00,0x3d,0x00,0x04,0x00, -0x09,0x00,0x00,0x00,0x09,0x04,0x00,0x00,0x65,0x00,0x00,0x00, -0xc2,0x00,0x05,0x00,0x09,0x00,0x00,0x00,0x0a,0x04,0x00,0x00, -0x09,0x04,0x00,0x00,0x7e,0x04,0x00,0x00,0xc4,0x00,0x05,0x00, -0x09,0x00,0x00,0x00,0x0b,0x04,0x00,0x00,0x0a,0x04,0x00,0x00, -0x69,0x00,0x00,0x00,0xc7,0x00,0x05,0x00,0x09,0x00,0x00,0x00, -0x0c,0x04,0x00,0x00,0x0b,0x04,0x00,0x00,0x4f,0x00,0x00,0x00, -0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x0d,0x04,0x00,0x00, -0x0c,0x04,0x00,0x00,0x3d,0x00,0x04,0x00,0x09,0x00,0x00,0x00, -0x0f,0x04,0x00,0x00,0x65,0x00,0x00,0x00,0xc2,0x00,0x05,0x00, -0x09,0x00,0x00,0x00,0x11,0x04,0x00,0x00,0x0f,0x04,0x00,0x00, -0x8e,0x04,0x00,0x00,0xc7,0x00,0x05,0x00,0x09,0x00,0x00,0x00, -0x12,0x04,0x00,0x00,0x11,0x04,0x00,0x00,0x4f,0x00,0x00,0x00, -0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x13,0x04,0x00,0x00, -0x12,0x04,0x00,0x00,0x41,0x00,0x08,0x00,0x7c,0x00,0x00,0x00, -0x15,0x04,0x00,0x00,0x55,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0x40,0x00,0x00,0x00,0x7a,0x00,0x00,0x00,0x7e,0x04,0x00,0x00, -0x3d,0x00,0x04,0x00,0x4e,0x00,0x00,0x00,0x16,0x04,0x00,0x00, -0x15,0x04,0x00,0x00,0x71,0x00,0x04,0x00,0x09,0x00,0x00,0x00, -0x17,0x04,0x00,0x00,0x16,0x04,0x00,0x00,0xc7,0x00,0x05,0x00, -0x09,0x00,0x00,0x00,0x18,0x04,0x00,0x00,0x17,0x04,0x00,0x00, -0x84,0x00,0x00,0x00,0x7c,0x00,0x04,0x00,0x09,0x00,0x00,0x00, -0x1a,0x04,0x00,0x00,0x0d,0x04,0x00,0x00,0xc5,0x00,0x05,0x00, -0x09,0x00,0x00,0x00,0x1b,0x04,0x00,0x00,0x18,0x04,0x00,0x00, -0x1a,0x04,0x00,0x00,0x70,0x00,0x04,0x00,0x4a,0x00,0x00,0x00, -0x1c,0x04,0x00,0x00,0x1b,0x04,0x00,0x00,0xc2,0x00,0x05,0x00, -0x09,0x00,0x00,0x00,0x1d,0x04,0x00,0x00,0x17,0x04,0x00,0x00, -0x69,0x00,0x00,0x00,0x7c,0x00,0x04,0x00,0x09,0x00,0x00,0x00, -0x1f,0x04,0x00,0x00,0x13,0x04,0x00,0x00,0xc5,0x00,0x05,0x00, -0x09,0x00,0x00,0x00,0x20,0x04,0x00,0x00,0x1d,0x04,0x00,0x00, -0x1f,0x04,0x00,0x00,0x70,0x00,0x04,0x00,0x4a,0x00,0x00,0x00, -0x21,0x04,0x00,0x00,0x20,0x04,0x00,0x00,0x50,0x00,0x05,0x00, -0x80,0x00,0x00,0x00,0x22,0x04,0x00,0x00,0x1c,0x04,0x00,0x00, -0x21,0x04,0x00,0x00,0x8e,0x00,0x05,0x00,0x80,0x00,0x00,0x00, -0x23,0x04,0x00,0x00,0x22,0x04,0x00,0x00,0x04,0x04,0x00,0x00, -0x50,0x00,0x05,0x00,0x80,0x00,0x00,0x00,0x24,0x04,0x00,0x00, -0x07,0x04,0x00,0x00,0x07,0x04,0x00,0x00,0x81,0x00,0x05,0x00, -0x80,0x00,0x00,0x00,0x25,0x04,0x00,0x00,0x23,0x04,0x00,0x00, -0x24,0x04,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x2b,0x04,0x00,0x00,0xa4,0x00,0x00,0x00,0x7e,0x04,0x00,0x00, -0x51,0x00,0x05,0x00,0x4a,0x00,0x00,0x00,0x2d,0x04,0x00,0x00, -0x25,0x04,0x00,0x00,0x00,0x00,0x00,0x00,0x73,0x00,0x04,0x00, -0x4d,0x00,0x00,0x00,0x2e,0x04,0x00,0x00,0x2d,0x04,0x00,0x00, -0x41,0x00,0x06,0x00,0x57,0x00,0x00,0x00,0x2f,0x04,0x00,0x00, -0x9d,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0x2b,0x04,0x00,0x00, -0x3e,0x00,0x03,0x00,0x2f,0x04,0x00,0x00,0x2e,0x04,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x36,0x04,0x00,0x00, -0xa4,0x00,0x00,0x00,0x93,0x04,0x00,0x00,0x51,0x00,0x05,0x00, -0x4a,0x00,0x00,0x00,0x37,0x04,0x00,0x00,0x25,0x04,0x00,0x00, -0x01,0x00,0x00,0x00,0x73,0x00,0x04,0x00,0x4d,0x00,0x00,0x00, -0x38,0x04,0x00,0x00,0x37,0x04,0x00,0x00,0x41,0x00,0x06,0x00, -0x57,0x00,0x00,0x00,0x39,0x04,0x00,0x00,0x9d,0x00,0x00,0x00, -0x2e,0x00,0x00,0x00,0x36,0x04,0x00,0x00,0x3e,0x00,0x03,0x00, -0x39,0x04,0x00,0x00,0x38,0x04,0x00,0x00,0x3d,0x00,0x04,0x00, -0x4d,0x00,0x00,0x00,0x42,0x04,0x00,0x00,0x58,0x00,0x00,0x00, -0x73,0x00,0x04,0x00,0x4a,0x00,0x00,0x00,0x43,0x04,0x00,0x00, -0x42,0x04,0x00,0x00,0x3d,0x00,0x04,0x00,0x4d,0x00,0x00,0x00, -0x45,0x04,0x00,0x00,0x5d,0x00,0x00,0x00,0x73,0x00,0x04,0x00, -0x4a,0x00,0x00,0x00,0x46,0x04,0x00,0x00,0x45,0x04,0x00,0x00, -0x3d,0x00,0x04,0x00,0x09,0x00,0x00,0x00,0x48,0x04,0x00,0x00, -0x65,0x00,0x00,0x00,0xc2,0x00,0x05,0x00,0x09,0x00,0x00,0x00, -0x49,0x04,0x00,0x00,0x48,0x04,0x00,0x00,0x80,0x04,0x00,0x00, -0xc4,0x00,0x05,0x00,0x09,0x00,0x00,0x00,0x4a,0x04,0x00,0x00, -0x49,0x04,0x00,0x00,0x69,0x00,0x00,0x00,0xc7,0x00,0x05,0x00, -0x09,0x00,0x00,0x00,0x4b,0x04,0x00,0x00,0x4a,0x04,0x00,0x00, -0x4f,0x00,0x00,0x00,0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x4c,0x04,0x00,0x00,0x4b,0x04,0x00,0x00,0x3d,0x00,0x04,0x00, -0x09,0x00,0x00,0x00,0x4e,0x04,0x00,0x00,0x65,0x00,0x00,0x00, -0xc2,0x00,0x05,0x00,0x09,0x00,0x00,0x00,0x50,0x04,0x00,0x00, -0x4e,0x04,0x00,0x00,0x90,0x04,0x00,0x00,0xc7,0x00,0x05,0x00, -0x09,0x00,0x00,0x00,0x51,0x04,0x00,0x00,0x50,0x04,0x00,0x00, -0x4f,0x00,0x00,0x00,0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x52,0x04,0x00,0x00,0x51,0x04,0x00,0x00,0x41,0x00,0x08,0x00, -0x7c,0x00,0x00,0x00,0x54,0x04,0x00,0x00,0x55,0x00,0x00,0x00, -0x2e,0x00,0x00,0x00,0x40,0x00,0x00,0x00,0x7a,0x00,0x00,0x00, -0x80,0x04,0x00,0x00,0x3d,0x00,0x04,0x00,0x4e,0x00,0x00,0x00, -0x55,0x04,0x00,0x00,0x54,0x04,0x00,0x00,0x71,0x00,0x04,0x00, -0x09,0x00,0x00,0x00,0x56,0x04,0x00,0x00,0x55,0x04,0x00,0x00, -0xc7,0x00,0x05,0x00,0x09,0x00,0x00,0x00,0x57,0x04,0x00,0x00, -0x56,0x04,0x00,0x00,0x84,0x00,0x00,0x00,0x7c,0x00,0x04,0x00, -0x09,0x00,0x00,0x00,0x59,0x04,0x00,0x00,0x4c,0x04,0x00,0x00, -0xc5,0x00,0x05,0x00,0x09,0x00,0x00,0x00,0x5a,0x04,0x00,0x00, -0x57,0x04,0x00,0x00,0x59,0x04,0x00,0x00,0x70,0x00,0x04,0x00, -0x4a,0x00,0x00,0x00,0x5b,0x04,0x00,0x00,0x5a,0x04,0x00,0x00, -0xc2,0x00,0x05,0x00,0x09,0x00,0x00,0x00,0x5c,0x04,0x00,0x00, -0x56,0x04,0x00,0x00,0x69,0x00,0x00,0x00,0x7c,0x00,0x04,0x00, -0x09,0x00,0x00,0x00,0x5e,0x04,0x00,0x00,0x52,0x04,0x00,0x00, -0xc5,0x00,0x05,0x00,0x09,0x00,0x00,0x00,0x5f,0x04,0x00,0x00, -0x5c,0x04,0x00,0x00,0x5e,0x04,0x00,0x00,0x70,0x00,0x04,0x00, -0x4a,0x00,0x00,0x00,0x60,0x04,0x00,0x00,0x5f,0x04,0x00,0x00, -0x50,0x00,0x05,0x00,0x80,0x00,0x00,0x00,0x61,0x04,0x00,0x00, -0x5b,0x04,0x00,0x00,0x60,0x04,0x00,0x00,0x8e,0x00,0x05,0x00, -0x80,0x00,0x00,0x00,0x62,0x04,0x00,0x00,0x61,0x04,0x00,0x00, -0x43,0x04,0x00,0x00,0x50,0x00,0x05,0x00,0x80,0x00,0x00,0x00, -0x63,0x04,0x00,0x00,0x46,0x04,0x00,0x00,0x46,0x04,0x00,0x00, -0x81,0x00,0x05,0x00,0x80,0x00,0x00,0x00,0x64,0x04,0x00,0x00, -0x62,0x04,0x00,0x00,0x63,0x04,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x6a,0x04,0x00,0x00,0xa4,0x00,0x00,0x00, -0x80,0x04,0x00,0x00,0x51,0x00,0x05,0x00,0x4a,0x00,0x00,0x00, -0x6c,0x04,0x00,0x00,0x64,0x04,0x00,0x00,0x00,0x00,0x00,0x00, -0x73,0x00,0x04,0x00,0x4d,0x00,0x00,0x00,0x6d,0x04,0x00,0x00, -0x6c,0x04,0x00,0x00,0x41,0x00,0x06,0x00,0x57,0x00,0x00,0x00, -0x6e,0x04,0x00,0x00,0x9d,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0x6a,0x04,0x00,0x00,0x3e,0x00,0x03,0x00,0x6e,0x04,0x00,0x00, -0x6d,0x04,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x75,0x04,0x00,0x00,0xa4,0x00,0x00,0x00,0x94,0x04,0x00,0x00, -0x51,0x00,0x05,0x00,0x4a,0x00,0x00,0x00,0x76,0x04,0x00,0x00, -0x64,0x04,0x00,0x00,0x01,0x00,0x00,0x00,0x73,0x00,0x04,0x00, -0x4d,0x00,0x00,0x00,0x77,0x04,0x00,0x00,0x76,0x04,0x00,0x00, -0x41,0x00,0x06,0x00,0x57,0x00,0x00,0x00,0x78,0x04,0x00,0x00, -0x9d,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0x75,0x04,0x00,0x00, -0x3e,0x00,0x03,0x00,0x78,0x04,0x00,0x00,0x77,0x04,0x00,0x00, -0xf9,0x00,0x02,0x00,0xbe,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, -0xbe,0x00,0x00,0x00,0xfd,0x00,0x01,0x00,0x38,0x00,0x01,0x00, - +0xa8,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xfb,0x00,0x03,0x00, +0x0c,0x00,0x00,0x00,0xa9,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, +0xa9,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x0d,0x00,0x00,0x00, +0x0e,0x00,0x00,0x00,0x0b,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, +0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x0f,0x00,0x00,0x00, +0x0e,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x11,0x00,0x00,0x00,0x0f,0x00,0x00,0x00,0x10,0x00,0x00,0x00, +0x41,0x00,0x05,0x00,0x0d,0x00,0x00,0x00,0x13,0x00,0x00,0x00, +0x12,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x14,0x00,0x00,0x00,0x13,0x00,0x00,0x00, +0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x16,0x00,0x00,0x00, +0x14,0x00,0x00,0x00,0x15,0x00,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x17,0x00,0x00,0x00,0x11,0x00,0x00,0x00, +0x16,0x00,0x00,0x00,0x89,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x1b,0x00,0x00,0x00,0x14,0x00,0x00,0x00,0x15,0x00,0x00,0x00, +0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x1f,0x00,0x00,0x00, +0x1b,0x00,0x00,0x00,0x1e,0x00,0x00,0x00,0x89,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x22,0x00,0x00,0x00,0x1b,0x00,0x00,0x00, +0x1e,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x25,0x00,0x00,0x00,0x1e,0x00,0x00,0x00,0x17,0x00,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x27,0x00,0x00,0x00, +0x25,0x00,0x00,0x00,0x22,0x00,0x00,0x00,0x41,0x00,0x05,0x00, +0x2e,0x00,0x00,0x00,0x2f,0x00,0x00,0x00,0x2b,0x00,0x00,0x00, +0x2d,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x30,0x00,0x00,0x00,0x2f,0x00,0x00,0x00,0x86,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x31,0x00,0x00,0x00,0x30,0x00,0x00,0x00, +0x1e,0x00,0x00,0x00,0xae,0x00,0x05,0x00,0x32,0x00,0x00,0x00, +0x33,0x00,0x00,0x00,0x27,0x00,0x00,0x00,0x31,0x00,0x00,0x00, +0xf7,0x00,0x03,0x00,0x35,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0xfa,0x00,0x04,0x00,0x33,0x00,0x00,0x00,0x34,0x00,0x00,0x00, +0x35,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0x34,0x00,0x00,0x00, +0xf9,0x00,0x02,0x00,0xa8,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, +0x35,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x3a,0x00,0x00,0x00,0x38,0x00,0x00,0x00,0x17,0x00,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x3c,0x00,0x00,0x00, +0x1e,0x00,0x00,0x00,0x22,0x00,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x3d,0x00,0x00,0x00,0x3a,0x00,0x00,0x00, +0x3c,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x40,0x00,0x00,0x00,0x3e,0x00,0x00,0x00,0x1f,0x00,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x41,0x00,0x00,0x00, +0x3d,0x00,0x00,0x00,0x40,0x00,0x00,0x00,0x41,0x00,0x07,0x00, +0x50,0x00,0x00,0x00,0x51,0x00,0x00,0x00,0x4d,0x00,0x00,0x00, +0x4e,0x00,0x00,0x00,0x27,0x00,0x00,0x00,0x4e,0x00,0x00,0x00, +0x3d,0x00,0x04,0x00,0x45,0x00,0x00,0x00,0x52,0x00,0x00,0x00, +0x51,0x00,0x00,0x00,0x73,0x00,0x04,0x00,0x42,0x00,0x00,0x00, +0x53,0x00,0x00,0x00,0x52,0x00,0x00,0x00,0x41,0x00,0x07,0x00, +0x50,0x00,0x00,0x00,0x57,0x00,0x00,0x00,0x4d,0x00,0x00,0x00, +0x4e,0x00,0x00,0x00,0x27,0x00,0x00,0x00,0x56,0x00,0x00,0x00, +0x3d,0x00,0x04,0x00,0x45,0x00,0x00,0x00,0x58,0x00,0x00,0x00, +0x57,0x00,0x00,0x00,0x73,0x00,0x04,0x00,0x42,0x00,0x00,0x00, +0x59,0x00,0x00,0x00,0x58,0x00,0x00,0x00,0x41,0x00,0x07,0x00, +0x5d,0x00,0x00,0x00,0x5e,0x00,0x00,0x00,0x4d,0x00,0x00,0x00, +0x4e,0x00,0x00,0x00,0x27,0x00,0x00,0x00,0x5c,0x00,0x00,0x00, +0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x5f,0x00,0x00,0x00, +0x5e,0x00,0x00,0x00,0x41,0x00,0x08,0x00,0x73,0x00,0x00,0x00, +0x74,0x00,0x00,0x00,0x4d,0x00,0x00,0x00,0x4e,0x00,0x00,0x00, +0x27,0x00,0x00,0x00,0x71,0x00,0x00,0x00,0x40,0x00,0x00,0x00, +0x3d,0x00,0x04,0x00,0x46,0x00,0x00,0x00,0x75,0x00,0x00,0x00, +0x74,0x00,0x00,0x00,0x71,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x76,0x00,0x00,0x00,0x75,0x00,0x00,0x00,0xc7,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x82,0x00,0x00,0x00,0x76,0x00,0x00,0x00, +0x81,0x00,0x00,0x00,0xc2,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x85,0x00,0x00,0x00,0x5f,0x00,0x00,0x00,0x40,0x00,0x00,0x00, +0xc4,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x86,0x00,0x00,0x00, +0x85,0x00,0x00,0x00,0x2d,0x00,0x00,0x00,0xc7,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x87,0x00,0x00,0x00,0x86,0x00,0x00,0x00, +0x47,0x00,0x00,0x00,0xc5,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x88,0x00,0x00,0x00,0x82,0x00,0x00,0x00,0x87,0x00,0x00,0x00, +0x70,0x00,0x04,0x00,0x42,0x00,0x00,0x00,0x89,0x00,0x00,0x00, +0x88,0x00,0x00,0x00,0x0c,0x00,0x08,0x00,0x42,0x00,0x00,0x00, +0x8c,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x32,0x00,0x00,0x00, +0x53,0x00,0x00,0x00,0x89,0x00,0x00,0x00,0x59,0x00,0x00,0x00, +0x73,0x00,0x04,0x00,0x45,0x00,0x00,0x00,0x8d,0x00,0x00,0x00, +0x8c,0x00,0x00,0x00,0x41,0x00,0x06,0x00,0x50,0x00,0x00,0x00, +0x8e,0x00,0x00,0x00,0x7a,0x00,0x00,0x00,0x4e,0x00,0x00,0x00, +0x41,0x00,0x00,0x00,0x3e,0x00,0x03,0x00,0x8e,0x00,0x00,0x00, +0x8d,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x92,0x00,0x00,0x00,0x41,0x00,0x00,0x00,0x47,0x00,0x00,0x00, +0xc2,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x95,0x00,0x00,0x00, +0x76,0x00,0x00,0x00,0x2d,0x00,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x99,0x00,0x00,0x00,0x40,0x00,0x00,0x00, +0x98,0x00,0x00,0x00,0xc2,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x9a,0x00,0x00,0x00,0x5f,0x00,0x00,0x00,0x99,0x00,0x00,0x00, +0xc7,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x9b,0x00,0x00,0x00, +0x9a,0x00,0x00,0x00,0x47,0x00,0x00,0x00,0xc5,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x9c,0x00,0x00,0x00,0x95,0x00,0x00,0x00, +0x9b,0x00,0x00,0x00,0x70,0x00,0x04,0x00,0x42,0x00,0x00,0x00, +0x9d,0x00,0x00,0x00,0x9c,0x00,0x00,0x00,0x0c,0x00,0x08,0x00, +0x42,0x00,0x00,0x00,0xa0,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0x32,0x00,0x00,0x00,0x53,0x00,0x00,0x00,0x9d,0x00,0x00,0x00, +0x59,0x00,0x00,0x00,0x73,0x00,0x04,0x00,0x45,0x00,0x00,0x00, +0xa1,0x00,0x00,0x00,0xa0,0x00,0x00,0x00,0x41,0x00,0x06,0x00, +0x50,0x00,0x00,0x00,0xa2,0x00,0x00,0x00,0x7a,0x00,0x00,0x00, +0x4e,0x00,0x00,0x00,0x92,0x00,0x00,0x00,0x3e,0x00,0x03,0x00, +0xa2,0x00,0x00,0x00,0xa1,0x00,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xb5,0x00,0x00,0x00,0x40,0x00,0x00,0x00, +0xa6,0x00,0x00,0x00,0x41,0x00,0x08,0x00,0x73,0x00,0x00,0x00, +0xb6,0x00,0x00,0x00,0x4d,0x00,0x00,0x00,0x4e,0x00,0x00,0x00, +0x27,0x00,0x00,0x00,0x71,0x00,0x00,0x00,0xb5,0x00,0x00,0x00, +0x3d,0x00,0x04,0x00,0x46,0x00,0x00,0x00,0xb7,0x00,0x00,0x00, +0xb6,0x00,0x00,0x00,0x71,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0xb8,0x00,0x00,0x00,0xb7,0x00,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xb9,0x00,0x00,0x00,0x41,0x00,0x00,0x00, +0xa6,0x00,0x00,0x00,0xc7,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xbb,0x00,0x00,0x00,0xb8,0x00,0x00,0x00,0x81,0x00,0x00,0x00, +0xc2,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xbc,0x00,0x00,0x00, +0x5f,0x00,0x00,0x00,0xb5,0x00,0x00,0x00,0xc4,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xbd,0x00,0x00,0x00,0xbc,0x00,0x00,0x00, +0x2d,0x00,0x00,0x00,0xc7,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xbe,0x00,0x00,0x00,0xbd,0x00,0x00,0x00,0x47,0x00,0x00,0x00, +0xc5,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xbf,0x00,0x00,0x00, +0xbb,0x00,0x00,0x00,0xbe,0x00,0x00,0x00,0x70,0x00,0x04,0x00, +0x42,0x00,0x00,0x00,0xc0,0x00,0x00,0x00,0xbf,0x00,0x00,0x00, +0x0c,0x00,0x08,0x00,0x42,0x00,0x00,0x00,0xc2,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0x32,0x00,0x00,0x00,0x53,0x00,0x00,0x00, +0xc0,0x00,0x00,0x00,0x59,0x00,0x00,0x00,0x73,0x00,0x04,0x00, +0x45,0x00,0x00,0x00,0xc3,0x00,0x00,0x00,0xc2,0x00,0x00,0x00, +0x41,0x00,0x06,0x00,0x50,0x00,0x00,0x00,0xc4,0x00,0x00,0x00, +0x7a,0x00,0x00,0x00,0x4e,0x00,0x00,0x00,0xb9,0x00,0x00,0x00, +0x3e,0x00,0x03,0x00,0xc4,0x00,0x00,0x00,0xc3,0x00,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xc6,0x00,0x00,0x00, +0x41,0x00,0x00,0x00,0xa6,0x01,0x00,0x00,0xc2,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xc7,0x00,0x00,0x00,0xb8,0x00,0x00,0x00, +0x2d,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xc8,0x00,0x00,0x00,0x40,0x00,0x00,0x00,0xa7,0x01,0x00,0x00, +0xc2,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xc9,0x00,0x00,0x00, +0x5f,0x00,0x00,0x00,0xc8,0x00,0x00,0x00,0xc7,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xca,0x00,0x00,0x00,0xc9,0x00,0x00,0x00, +0x47,0x00,0x00,0x00,0xc5,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xcb,0x00,0x00,0x00,0xc7,0x00,0x00,0x00,0xca,0x00,0x00,0x00, +0x70,0x00,0x04,0x00,0x42,0x00,0x00,0x00,0xcc,0x00,0x00,0x00, +0xcb,0x00,0x00,0x00,0x0c,0x00,0x08,0x00,0x42,0x00,0x00,0x00, +0xce,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x32,0x00,0x00,0x00, +0x53,0x00,0x00,0x00,0xcc,0x00,0x00,0x00,0x59,0x00,0x00,0x00, +0x73,0x00,0x04,0x00,0x45,0x00,0x00,0x00,0xcf,0x00,0x00,0x00, +0xce,0x00,0x00,0x00,0x41,0x00,0x06,0x00,0x50,0x00,0x00,0x00, +0xd0,0x00,0x00,0x00,0x7a,0x00,0x00,0x00,0x4e,0x00,0x00,0x00, +0xc6,0x00,0x00,0x00,0x3e,0x00,0x03,0x00,0xd0,0x00,0x00,0x00, +0xcf,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xd8,0x00,0x00,0x00,0x40,0x00,0x00,0x00,0xa8,0x01,0x00,0x00, +0x41,0x00,0x08,0x00,0x73,0x00,0x00,0x00,0xd9,0x00,0x00,0x00, +0x4d,0x00,0x00,0x00,0x4e,0x00,0x00,0x00,0x27,0x00,0x00,0x00, +0x71,0x00,0x00,0x00,0xd8,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0x46,0x00,0x00,0x00,0xda,0x00,0x00,0x00,0xd9,0x00,0x00,0x00, +0x71,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0xdb,0x00,0x00,0x00, +0xda,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xdc,0x00,0x00,0x00,0x41,0x00,0x00,0x00,0xa8,0x01,0x00,0x00, +0xc7,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xde,0x00,0x00,0x00, +0xdb,0x00,0x00,0x00,0x81,0x00,0x00,0x00,0xc2,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xdf,0x00,0x00,0x00,0x5f,0x00,0x00,0x00, +0xd8,0x00,0x00,0x00,0xc4,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xe0,0x00,0x00,0x00,0xdf,0x00,0x00,0x00,0x2d,0x00,0x00,0x00, +0xc7,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xe1,0x00,0x00,0x00, +0xe0,0x00,0x00,0x00,0x47,0x00,0x00,0x00,0xc5,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xe2,0x00,0x00,0x00,0xde,0x00,0x00,0x00, +0xe1,0x00,0x00,0x00,0x70,0x00,0x04,0x00,0x42,0x00,0x00,0x00, +0xe3,0x00,0x00,0x00,0xe2,0x00,0x00,0x00,0x0c,0x00,0x08,0x00, +0x42,0x00,0x00,0x00,0xe5,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0x32,0x00,0x00,0x00,0x53,0x00,0x00,0x00,0xe3,0x00,0x00,0x00, +0x59,0x00,0x00,0x00,0x73,0x00,0x04,0x00,0x45,0x00,0x00,0x00, +0xe6,0x00,0x00,0x00,0xe5,0x00,0x00,0x00,0x41,0x00,0x06,0x00, +0x50,0x00,0x00,0x00,0xe7,0x00,0x00,0x00,0x7a,0x00,0x00,0x00, +0x4e,0x00,0x00,0x00,0xdc,0x00,0x00,0x00,0x3e,0x00,0x03,0x00, +0xe7,0x00,0x00,0x00,0xe6,0x00,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xe9,0x00,0x00,0x00,0x41,0x00,0x00,0x00, +0xa9,0x01,0x00,0x00,0xc2,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xea,0x00,0x00,0x00,0xdb,0x00,0x00,0x00,0x2d,0x00,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xeb,0x00,0x00,0x00, +0x40,0x00,0x00,0x00,0xaa,0x01,0x00,0x00,0xc2,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xec,0x00,0x00,0x00,0x5f,0x00,0x00,0x00, +0xeb,0x00,0x00,0x00,0xc7,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xed,0x00,0x00,0x00,0xec,0x00,0x00,0x00,0x47,0x00,0x00,0x00, +0xc5,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xee,0x00,0x00,0x00, +0xea,0x00,0x00,0x00,0xed,0x00,0x00,0x00,0x70,0x00,0x04,0x00, +0x42,0x00,0x00,0x00,0xef,0x00,0x00,0x00,0xee,0x00,0x00,0x00, +0x0c,0x00,0x08,0x00,0x42,0x00,0x00,0x00,0xf1,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0x32,0x00,0x00,0x00,0x53,0x00,0x00,0x00, +0xef,0x00,0x00,0x00,0x59,0x00,0x00,0x00,0x73,0x00,0x04,0x00, +0x45,0x00,0x00,0x00,0xf2,0x00,0x00,0x00,0xf1,0x00,0x00,0x00, +0x41,0x00,0x06,0x00,0x50,0x00,0x00,0x00,0xf3,0x00,0x00,0x00, +0x7a,0x00,0x00,0x00,0x4e,0x00,0x00,0x00,0xe9,0x00,0x00,0x00, +0x3e,0x00,0x03,0x00,0xf3,0x00,0x00,0x00,0xf2,0x00,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xfb,0x00,0x00,0x00, +0x40,0x00,0x00,0x00,0xab,0x01,0x00,0x00,0x41,0x00,0x08,0x00, +0x73,0x00,0x00,0x00,0xfc,0x00,0x00,0x00,0x4d,0x00,0x00,0x00, +0x4e,0x00,0x00,0x00,0x27,0x00,0x00,0x00,0x71,0x00,0x00,0x00, +0xfb,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x46,0x00,0x00,0x00, +0xfd,0x00,0x00,0x00,0xfc,0x00,0x00,0x00,0x71,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0xfe,0x00,0x00,0x00,0xfd,0x00,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xff,0x00,0x00,0x00, +0x41,0x00,0x00,0x00,0xab,0x01,0x00,0x00,0xc7,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x01,0x01,0x00,0x00,0xfe,0x00,0x00,0x00, +0x81,0x00,0x00,0x00,0xc2,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x02,0x01,0x00,0x00,0x5f,0x00,0x00,0x00,0xfb,0x00,0x00,0x00, +0xc4,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x03,0x01,0x00,0x00, +0x02,0x01,0x00,0x00,0x2d,0x00,0x00,0x00,0xc7,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x04,0x01,0x00,0x00,0x03,0x01,0x00,0x00, +0x47,0x00,0x00,0x00,0xc5,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x05,0x01,0x00,0x00,0x01,0x01,0x00,0x00,0x04,0x01,0x00,0x00, +0x70,0x00,0x04,0x00,0x42,0x00,0x00,0x00,0x06,0x01,0x00,0x00, +0x05,0x01,0x00,0x00,0x0c,0x00,0x08,0x00,0x42,0x00,0x00,0x00, +0x08,0x01,0x00,0x00,0x01,0x00,0x00,0x00,0x32,0x00,0x00,0x00, +0x53,0x00,0x00,0x00,0x06,0x01,0x00,0x00,0x59,0x00,0x00,0x00, +0x73,0x00,0x04,0x00,0x45,0x00,0x00,0x00,0x09,0x01,0x00,0x00, +0x08,0x01,0x00,0x00,0x41,0x00,0x06,0x00,0x50,0x00,0x00,0x00, +0x0a,0x01,0x00,0x00,0x7a,0x00,0x00,0x00,0x4e,0x00,0x00,0x00, +0xff,0x00,0x00,0x00,0x3e,0x00,0x03,0x00,0x0a,0x01,0x00,0x00, +0x09,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x0c,0x01,0x00,0x00,0x41,0x00,0x00,0x00,0xac,0x01,0x00,0x00, +0xc2,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x0d,0x01,0x00,0x00, +0xfe,0x00,0x00,0x00,0x2d,0x00,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x0e,0x01,0x00,0x00,0x40,0x00,0x00,0x00, +0x81,0x00,0x00,0x00,0xc2,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x0f,0x01,0x00,0x00,0x5f,0x00,0x00,0x00,0x0e,0x01,0x00,0x00, +0xc7,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x10,0x01,0x00,0x00, +0x0f,0x01,0x00,0x00,0x47,0x00,0x00,0x00,0xc5,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x11,0x01,0x00,0x00,0x0d,0x01,0x00,0x00, +0x10,0x01,0x00,0x00,0x70,0x00,0x04,0x00,0x42,0x00,0x00,0x00, +0x12,0x01,0x00,0x00,0x11,0x01,0x00,0x00,0x0c,0x00,0x08,0x00, +0x42,0x00,0x00,0x00,0x14,0x01,0x00,0x00,0x01,0x00,0x00,0x00, +0x32,0x00,0x00,0x00,0x53,0x00,0x00,0x00,0x12,0x01,0x00,0x00, +0x59,0x00,0x00,0x00,0x73,0x00,0x04,0x00,0x45,0x00,0x00,0x00, +0x15,0x01,0x00,0x00,0x14,0x01,0x00,0x00,0x41,0x00,0x06,0x00, +0x50,0x00,0x00,0x00,0x16,0x01,0x00,0x00,0x7a,0x00,0x00,0x00, +0x4e,0x00,0x00,0x00,0x0c,0x01,0x00,0x00,0x3e,0x00,0x03,0x00, +0x16,0x01,0x00,0x00,0x15,0x01,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x1e,0x01,0x00,0x00,0x40,0x00,0x00,0x00, +0x10,0x00,0x00,0x00,0x41,0x00,0x08,0x00,0x73,0x00,0x00,0x00, +0x1f,0x01,0x00,0x00,0x4d,0x00,0x00,0x00,0x4e,0x00,0x00,0x00, +0x27,0x00,0x00,0x00,0x71,0x00,0x00,0x00,0x1e,0x01,0x00,0x00, +0x3d,0x00,0x04,0x00,0x46,0x00,0x00,0x00,0x20,0x01,0x00,0x00, +0x1f,0x01,0x00,0x00,0x71,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x21,0x01,0x00,0x00,0x20,0x01,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x22,0x01,0x00,0x00,0x41,0x00,0x00,0x00, +0x10,0x00,0x00,0x00,0xc7,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x24,0x01,0x00,0x00,0x21,0x01,0x00,0x00,0x81,0x00,0x00,0x00, +0xc2,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x25,0x01,0x00,0x00, +0x5f,0x00,0x00,0x00,0x1e,0x01,0x00,0x00,0xc4,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x26,0x01,0x00,0x00,0x25,0x01,0x00,0x00, +0x2d,0x00,0x00,0x00,0xc7,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x27,0x01,0x00,0x00,0x26,0x01,0x00,0x00,0x47,0x00,0x00,0x00, +0xc5,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x28,0x01,0x00,0x00, +0x24,0x01,0x00,0x00,0x27,0x01,0x00,0x00,0x70,0x00,0x04,0x00, +0x42,0x00,0x00,0x00,0x29,0x01,0x00,0x00,0x28,0x01,0x00,0x00, +0x0c,0x00,0x08,0x00,0x42,0x00,0x00,0x00,0x2b,0x01,0x00,0x00, +0x01,0x00,0x00,0x00,0x32,0x00,0x00,0x00,0x53,0x00,0x00,0x00, +0x29,0x01,0x00,0x00,0x59,0x00,0x00,0x00,0x73,0x00,0x04,0x00, +0x45,0x00,0x00,0x00,0x2c,0x01,0x00,0x00,0x2b,0x01,0x00,0x00, +0x41,0x00,0x06,0x00,0x50,0x00,0x00,0x00,0x2d,0x01,0x00,0x00, +0x7a,0x00,0x00,0x00,0x4e,0x00,0x00,0x00,0x22,0x01,0x00,0x00, +0x3e,0x00,0x03,0x00,0x2d,0x01,0x00,0x00,0x2c,0x01,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x2f,0x01,0x00,0x00, +0x41,0x00,0x00,0x00,0xad,0x01,0x00,0x00,0xc2,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x30,0x01,0x00,0x00,0x21,0x01,0x00,0x00, +0x2d,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x31,0x01,0x00,0x00,0x40,0x00,0x00,0x00,0x47,0x00,0x00,0x00, +0xc2,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x32,0x01,0x00,0x00, +0x5f,0x00,0x00,0x00,0x31,0x01,0x00,0x00,0xc7,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x33,0x01,0x00,0x00,0x32,0x01,0x00,0x00, +0x47,0x00,0x00,0x00,0xc5,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x34,0x01,0x00,0x00,0x30,0x01,0x00,0x00,0x33,0x01,0x00,0x00, +0x70,0x00,0x04,0x00,0x42,0x00,0x00,0x00,0x35,0x01,0x00,0x00, +0x34,0x01,0x00,0x00,0x0c,0x00,0x08,0x00,0x42,0x00,0x00,0x00, +0x37,0x01,0x00,0x00,0x01,0x00,0x00,0x00,0x32,0x00,0x00,0x00, +0x53,0x00,0x00,0x00,0x35,0x01,0x00,0x00,0x59,0x00,0x00,0x00, +0x73,0x00,0x04,0x00,0x45,0x00,0x00,0x00,0x38,0x01,0x00,0x00, +0x37,0x01,0x00,0x00,0x41,0x00,0x06,0x00,0x50,0x00,0x00,0x00, +0x39,0x01,0x00,0x00,0x7a,0x00,0x00,0x00,0x4e,0x00,0x00,0x00, +0x2f,0x01,0x00,0x00,0x3e,0x00,0x03,0x00,0x39,0x01,0x00,0x00, +0x38,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x41,0x01,0x00,0x00,0x40,0x00,0x00,0x00,0xae,0x01,0x00,0x00, +0x41,0x00,0x08,0x00,0x73,0x00,0x00,0x00,0x42,0x01,0x00,0x00, +0x4d,0x00,0x00,0x00,0x4e,0x00,0x00,0x00,0x27,0x00,0x00,0x00, +0x71,0x00,0x00,0x00,0x41,0x01,0x00,0x00,0x3d,0x00,0x04,0x00, +0x46,0x00,0x00,0x00,0x43,0x01,0x00,0x00,0x42,0x01,0x00,0x00, +0x71,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x44,0x01,0x00,0x00, +0x43,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x45,0x01,0x00,0x00,0x41,0x00,0x00,0x00,0xae,0x01,0x00,0x00, +0xc7,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x47,0x01,0x00,0x00, +0x44,0x01,0x00,0x00,0x81,0x00,0x00,0x00,0xc2,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x48,0x01,0x00,0x00,0x5f,0x00,0x00,0x00, +0x41,0x01,0x00,0x00,0xc4,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x49,0x01,0x00,0x00,0x48,0x01,0x00,0x00,0x2d,0x00,0x00,0x00, +0xc7,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x4a,0x01,0x00,0x00, +0x49,0x01,0x00,0x00,0x47,0x00,0x00,0x00,0xc5,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x4b,0x01,0x00,0x00,0x47,0x01,0x00,0x00, +0x4a,0x01,0x00,0x00,0x70,0x00,0x04,0x00,0x42,0x00,0x00,0x00, +0x4c,0x01,0x00,0x00,0x4b,0x01,0x00,0x00,0x0c,0x00,0x08,0x00, +0x42,0x00,0x00,0x00,0x4e,0x01,0x00,0x00,0x01,0x00,0x00,0x00, +0x32,0x00,0x00,0x00,0x53,0x00,0x00,0x00,0x4c,0x01,0x00,0x00, +0x59,0x00,0x00,0x00,0x73,0x00,0x04,0x00,0x45,0x00,0x00,0x00, +0x4f,0x01,0x00,0x00,0x4e,0x01,0x00,0x00,0x41,0x00,0x06,0x00, +0x50,0x00,0x00,0x00,0x50,0x01,0x00,0x00,0x7a,0x00,0x00,0x00, +0x4e,0x00,0x00,0x00,0x45,0x01,0x00,0x00,0x3e,0x00,0x03,0x00, +0x50,0x01,0x00,0x00,0x4f,0x01,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x52,0x01,0x00,0x00,0x41,0x00,0x00,0x00, +0xaf,0x01,0x00,0x00,0xc2,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x53,0x01,0x00,0x00,0x44,0x01,0x00,0x00,0x2d,0x00,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x54,0x01,0x00,0x00, +0x40,0x00,0x00,0x00,0xa6,0x01,0x00,0x00,0xc2,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x55,0x01,0x00,0x00,0x5f,0x00,0x00,0x00, +0x54,0x01,0x00,0x00,0xc7,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x56,0x01,0x00,0x00,0x55,0x01,0x00,0x00,0x47,0x00,0x00,0x00, +0xc5,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x57,0x01,0x00,0x00, +0x53,0x01,0x00,0x00,0x56,0x01,0x00,0x00,0x70,0x00,0x04,0x00, +0x42,0x00,0x00,0x00,0x58,0x01,0x00,0x00,0x57,0x01,0x00,0x00, +0x0c,0x00,0x08,0x00,0x42,0x00,0x00,0x00,0x5a,0x01,0x00,0x00, +0x01,0x00,0x00,0x00,0x32,0x00,0x00,0x00,0x53,0x00,0x00,0x00, +0x58,0x01,0x00,0x00,0x59,0x00,0x00,0x00,0x73,0x00,0x04,0x00, +0x45,0x00,0x00,0x00,0x5b,0x01,0x00,0x00,0x5a,0x01,0x00,0x00, +0x41,0x00,0x06,0x00,0x50,0x00,0x00,0x00,0x5c,0x01,0x00,0x00, +0x7a,0x00,0x00,0x00,0x4e,0x00,0x00,0x00,0x52,0x01,0x00,0x00, +0x3e,0x00,0x03,0x00,0x5c,0x01,0x00,0x00,0x5b,0x01,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x64,0x01,0x00,0x00, +0x40,0x00,0x00,0x00,0xb0,0x01,0x00,0x00,0x41,0x00,0x08,0x00, +0x73,0x00,0x00,0x00,0x65,0x01,0x00,0x00,0x4d,0x00,0x00,0x00, +0x4e,0x00,0x00,0x00,0x27,0x00,0x00,0x00,0x71,0x00,0x00,0x00, +0x64,0x01,0x00,0x00,0x3d,0x00,0x04,0x00,0x46,0x00,0x00,0x00, +0x66,0x01,0x00,0x00,0x65,0x01,0x00,0x00,0x71,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x67,0x01,0x00,0x00,0x66,0x01,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x68,0x01,0x00,0x00, +0x41,0x00,0x00,0x00,0xb0,0x01,0x00,0x00,0xc7,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x6a,0x01,0x00,0x00,0x67,0x01,0x00,0x00, +0x81,0x00,0x00,0x00,0xc2,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x6b,0x01,0x00,0x00,0x5f,0x00,0x00,0x00,0x64,0x01,0x00,0x00, +0xc4,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x6c,0x01,0x00,0x00, +0x6b,0x01,0x00,0x00,0x2d,0x00,0x00,0x00,0xc7,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x6d,0x01,0x00,0x00,0x6c,0x01,0x00,0x00, +0x47,0x00,0x00,0x00,0xc5,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x6e,0x01,0x00,0x00,0x6a,0x01,0x00,0x00,0x6d,0x01,0x00,0x00, +0x70,0x00,0x04,0x00,0x42,0x00,0x00,0x00,0x6f,0x01,0x00,0x00, +0x6e,0x01,0x00,0x00,0x0c,0x00,0x08,0x00,0x42,0x00,0x00,0x00, +0x71,0x01,0x00,0x00,0x01,0x00,0x00,0x00,0x32,0x00,0x00,0x00, +0x53,0x00,0x00,0x00,0x6f,0x01,0x00,0x00,0x59,0x00,0x00,0x00, +0x73,0x00,0x04,0x00,0x45,0x00,0x00,0x00,0x72,0x01,0x00,0x00, +0x71,0x01,0x00,0x00,0x41,0x00,0x06,0x00,0x50,0x00,0x00,0x00, +0x73,0x01,0x00,0x00,0x7a,0x00,0x00,0x00,0x4e,0x00,0x00,0x00, +0x68,0x01,0x00,0x00,0x3e,0x00,0x03,0x00,0x73,0x01,0x00,0x00, +0x72,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x75,0x01,0x00,0x00,0x41,0x00,0x00,0x00,0xb1,0x01,0x00,0x00, +0xc2,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x76,0x01,0x00,0x00, +0x67,0x01,0x00,0x00,0x2d,0x00,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x77,0x01,0x00,0x00,0x40,0x00,0x00,0x00, +0xa9,0x01,0x00,0x00,0xc2,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x78,0x01,0x00,0x00,0x5f,0x00,0x00,0x00,0x77,0x01,0x00,0x00, +0xc7,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x79,0x01,0x00,0x00, +0x78,0x01,0x00,0x00,0x47,0x00,0x00,0x00,0xc5,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x7a,0x01,0x00,0x00,0x76,0x01,0x00,0x00, +0x79,0x01,0x00,0x00,0x70,0x00,0x04,0x00,0x42,0x00,0x00,0x00, +0x7b,0x01,0x00,0x00,0x7a,0x01,0x00,0x00,0x0c,0x00,0x08,0x00, +0x42,0x00,0x00,0x00,0x7d,0x01,0x00,0x00,0x01,0x00,0x00,0x00, +0x32,0x00,0x00,0x00,0x53,0x00,0x00,0x00,0x7b,0x01,0x00,0x00, +0x59,0x00,0x00,0x00,0x73,0x00,0x04,0x00,0x45,0x00,0x00,0x00, +0x7e,0x01,0x00,0x00,0x7d,0x01,0x00,0x00,0x41,0x00,0x06,0x00, +0x50,0x00,0x00,0x00,0x7f,0x01,0x00,0x00,0x7a,0x00,0x00,0x00, +0x4e,0x00,0x00,0x00,0x75,0x01,0x00,0x00,0x3e,0x00,0x03,0x00, +0x7f,0x01,0x00,0x00,0x7e,0x01,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x87,0x01,0x00,0x00,0x40,0x00,0x00,0x00, +0xb2,0x01,0x00,0x00,0x41,0x00,0x08,0x00,0x73,0x00,0x00,0x00, +0x88,0x01,0x00,0x00,0x4d,0x00,0x00,0x00,0x4e,0x00,0x00,0x00, +0x27,0x00,0x00,0x00,0x71,0x00,0x00,0x00,0x87,0x01,0x00,0x00, +0x3d,0x00,0x04,0x00,0x46,0x00,0x00,0x00,0x89,0x01,0x00,0x00, +0x88,0x01,0x00,0x00,0x71,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x8a,0x01,0x00,0x00,0x89,0x01,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x8b,0x01,0x00,0x00,0x41,0x00,0x00,0x00, +0xb2,0x01,0x00,0x00,0xc7,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x8d,0x01,0x00,0x00,0x8a,0x01,0x00,0x00,0x81,0x00,0x00,0x00, +0xc2,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x8e,0x01,0x00,0x00, +0x5f,0x00,0x00,0x00,0x87,0x01,0x00,0x00,0xc4,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x8f,0x01,0x00,0x00,0x8e,0x01,0x00,0x00, +0x2d,0x00,0x00,0x00,0xc7,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x90,0x01,0x00,0x00,0x8f,0x01,0x00,0x00,0x47,0x00,0x00,0x00, +0xc5,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x91,0x01,0x00,0x00, +0x8d,0x01,0x00,0x00,0x90,0x01,0x00,0x00,0x70,0x00,0x04,0x00, +0x42,0x00,0x00,0x00,0x92,0x01,0x00,0x00,0x91,0x01,0x00,0x00, +0x0c,0x00,0x08,0x00,0x42,0x00,0x00,0x00,0x94,0x01,0x00,0x00, +0x01,0x00,0x00,0x00,0x32,0x00,0x00,0x00,0x53,0x00,0x00,0x00, +0x92,0x01,0x00,0x00,0x59,0x00,0x00,0x00,0x73,0x00,0x04,0x00, +0x45,0x00,0x00,0x00,0x95,0x01,0x00,0x00,0x94,0x01,0x00,0x00, +0x41,0x00,0x06,0x00,0x50,0x00,0x00,0x00,0x96,0x01,0x00,0x00, +0x7a,0x00,0x00,0x00,0x4e,0x00,0x00,0x00,0x8b,0x01,0x00,0x00, +0x3e,0x00,0x03,0x00,0x96,0x01,0x00,0x00,0x95,0x01,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x98,0x01,0x00,0x00, +0x41,0x00,0x00,0x00,0xb3,0x01,0x00,0x00,0xc2,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x99,0x01,0x00,0x00,0x8a,0x01,0x00,0x00, +0x2d,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x9a,0x01,0x00,0x00,0x40,0x00,0x00,0x00,0xac,0x01,0x00,0x00, +0xc2,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x9b,0x01,0x00,0x00, +0x5f,0x00,0x00,0x00,0x9a,0x01,0x00,0x00,0xc7,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x9c,0x01,0x00,0x00,0x9b,0x01,0x00,0x00, +0x47,0x00,0x00,0x00,0xc5,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x9d,0x01,0x00,0x00,0x99,0x01,0x00,0x00,0x9c,0x01,0x00,0x00, +0x70,0x00,0x04,0x00,0x42,0x00,0x00,0x00,0x9e,0x01,0x00,0x00, +0x9d,0x01,0x00,0x00,0x0c,0x00,0x08,0x00,0x42,0x00,0x00,0x00, +0xa0,0x01,0x00,0x00,0x01,0x00,0x00,0x00,0x32,0x00,0x00,0x00, +0x53,0x00,0x00,0x00,0x9e,0x01,0x00,0x00,0x59,0x00,0x00,0x00, +0x73,0x00,0x04,0x00,0x45,0x00,0x00,0x00,0xa1,0x01,0x00,0x00, +0xa0,0x01,0x00,0x00,0x41,0x00,0x06,0x00,0x50,0x00,0x00,0x00, +0xa2,0x01,0x00,0x00,0x7a,0x00,0x00,0x00,0x4e,0x00,0x00,0x00, +0x98,0x01,0x00,0x00,0x3e,0x00,0x03,0x00,0xa2,0x01,0x00,0x00, +0xa1,0x01,0x00,0x00,0xf9,0x00,0x02,0x00,0xa8,0x00,0x00,0x00, +0xf8,0x00,0x02,0x00,0xa8,0x00,0x00,0x00,0xfd,0x00,0x01,0x00, +0x38,0x00,0x01,0x00, }; -const uint64_t dequant_q5_1_len = 13548; +const uint64_t dequant_q5_1_len = 6412; unsigned char dequant_q5_K_data[] = { 0x03,0x02,0x23,0x07,0x00,0x05,0x01,0x00,0x0b,0x00,0x0d,0x00, -0xa0,0x01,0x00,0x00,0x00,0x00,0x00,0x00,0x11,0x00,0x02,0x00, +0x9e,0x01,0x00,0x00,0x00,0x00,0x00,0x00,0x11,0x00,0x02,0x00, 0x01,0x00,0x00,0x00,0x11,0x00,0x02,0x00,0x27,0x00,0x00,0x00, 0x11,0x00,0x02,0x00,0x51,0x11,0x00,0x00,0x11,0x00,0x02,0x00, 0x60,0x11,0x00,0x00,0x0b,0x00,0x06,0x00,0x01,0x00,0x00,0x00, @@ -6161,1606 +5304,1262 @@ unsigned char dequant_q5_K_data[] = { 0x00,0x00,0x00,0x00,0x0e,0x00,0x03,0x00,0x00,0x00,0x00,0x00, 0x01,0x00,0x00,0x00,0x0f,0x00,0x0a,0x00,0x05,0x00,0x00,0x00, 0x04,0x00,0x00,0x00,0x6d,0x61,0x69,0x6e,0x00,0x00,0x00,0x00, -0x17,0x00,0x00,0x00,0x25,0x00,0x00,0x00,0x33,0x00,0x00,0x00, -0x52,0x00,0x00,0x00,0x15,0x01,0x00,0x00,0x10,0x00,0x06,0x00, +0x16,0x00,0x00,0x00,0x20,0x00,0x00,0x00,0x30,0x00,0x00,0x00, +0x4e,0x00,0x00,0x00,0x11,0x01,0x00,0x00,0x10,0x00,0x06,0x00, 0x04,0x00,0x00,0x00,0x11,0x00,0x00,0x00,0x40,0x00,0x00,0x00, 0x01,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x17,0x00,0x00,0x00,0x0b,0x00,0x00,0x00,0x1a,0x00,0x00,0x00, -0x48,0x00,0x05,0x00,0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0x16,0x00,0x00,0x00,0x0b,0x00,0x00,0x00,0x1a,0x00,0x00,0x00, +0x48,0x00,0x05,0x00,0x1e,0x00,0x00,0x00,0x00,0x00,0x00,0x00, 0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0x23,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x04,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x23,0x00,0x00,0x00, +0x1e,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x23,0x00,0x00,0x00, +0x04,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x1e,0x00,0x00,0x00, 0x02,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x08,0x00,0x00,0x00, -0x48,0x00,0x05,0x00,0x23,0x00,0x00,0x00,0x03,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x47,0x00,0x03,0x00, -0x23,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x33,0x00,0x00,0x00,0x0b,0x00,0x00,0x00,0x1b,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x49,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x4b,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x4d,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x48,0x00,0x05,0x00,0x4e,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0x4e,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x04,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x4e,0x00,0x00,0x00, -0x02,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x10,0x00,0x00,0x00, -0x48,0x00,0x05,0x00,0x4e,0x00,0x00,0x00,0x03,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x30,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x4f,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0xb0,0x00,0x00,0x00, -0x48,0x00,0x04,0x00,0x50,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x18,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x50,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x47,0x00,0x03,0x00,0x50,0x00,0x00,0x00,0x02,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x52,0x00,0x00,0x00,0x22,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x52,0x00,0x00,0x00, -0x21,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x12,0x01,0x00,0x00,0x06,0x00,0x00,0x00,0x02,0x00,0x00,0x00, -0x48,0x00,0x04,0x00,0x13,0x01,0x00,0x00,0x00,0x00,0x00,0x00, -0x19,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x13,0x01,0x00,0x00, -0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x47,0x00,0x03,0x00,0x13,0x01,0x00,0x00,0x02,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x15,0x01,0x00,0x00,0x22,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x15,0x01,0x00,0x00, -0x21,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x89,0x01,0x00,0x00,0x0b,0x00,0x00,0x00,0x19,0x00,0x00,0x00, -0x13,0x00,0x02,0x00,0x02,0x00,0x00,0x00,0x21,0x00,0x03,0x00, -0x03,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x15,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x20,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x09,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x10,0x00,0x00,0x00,0x00,0x01,0x00,0x00,0x14,0x00,0x02,0x00, -0x11,0x00,0x00,0x00,0x15,0x00,0x04,0x00,0x14,0x00,0x00,0x00, -0x20,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x17,0x00,0x04,0x00, -0x15,0x00,0x00,0x00,0x14,0x00,0x00,0x00,0x03,0x00,0x00,0x00, -0x20,0x00,0x04,0x00,0x16,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x15,0x00,0x00,0x00,0x3b,0x00,0x04,0x00,0x16,0x00,0x00,0x00, -0x17,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x14,0x00,0x00,0x00,0x18,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x20,0x00,0x04,0x00,0x19,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x14,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x14,0x00,0x00,0x00, -0x1c,0x00,0x00,0x00,0x00,0x01,0x00,0x00,0x1e,0x00,0x06,0x00, -0x23,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0x24,0x00,0x00,0x00,0x09,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x3b,0x00,0x04,0x00,0x24,0x00,0x00,0x00,0x25,0x00,0x00,0x00, -0x09,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x26,0x00,0x00,0x00, -0x09,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x29,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x3b,0x00,0x04,0x00,0x16,0x00,0x00,0x00,0x33,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x39,0x00,0x00,0x00,0x10,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x3f,0x00,0x00,0x00,0x02,0x00,0x00,0x00, -0x16,0x00,0x03,0x00,0x42,0x00,0x00,0x00,0x20,0x00,0x00,0x00, -0x16,0x00,0x03,0x00,0x45,0x00,0x00,0x00,0x10,0x00,0x00,0x00, -0x17,0x00,0x04,0x00,0x46,0x00,0x00,0x00,0x45,0x00,0x00,0x00, -0x02,0x00,0x00,0x00,0x15,0x00,0x04,0x00,0x47,0x00,0x00,0x00, -0x08,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x14,0x00,0x00,0x00,0x48,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0x1c,0x00,0x04,0x00,0x49,0x00,0x00,0x00,0x47,0x00,0x00,0x00, -0x48,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x14,0x00,0x00,0x00, -0x4a,0x00,0x00,0x00,0x20,0x00,0x00,0x00,0x1c,0x00,0x04,0x00, -0x4b,0x00,0x00,0x00,0x47,0x00,0x00,0x00,0x4a,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x14,0x00,0x00,0x00,0x4c,0x00,0x00,0x00, -0x80,0x00,0x00,0x00,0x1c,0x00,0x04,0x00,0x4d,0x00,0x00,0x00, -0x47,0x00,0x00,0x00,0x4c,0x00,0x00,0x00,0x1e,0x00,0x06,0x00, -0x4e,0x00,0x00,0x00,0x46,0x00,0x00,0x00,0x49,0x00,0x00,0x00, -0x4b,0x00,0x00,0x00,0x4d,0x00,0x00,0x00,0x1d,0x00,0x03,0x00, -0x4f,0x00,0x00,0x00,0x4e,0x00,0x00,0x00,0x1e,0x00,0x03,0x00, -0x50,0x00,0x00,0x00,0x4f,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0x51,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x50,0x00,0x00,0x00, -0x3b,0x00,0x04,0x00,0x51,0x00,0x00,0x00,0x52,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x54,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x45,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x14,0x00,0x00,0x00,0x5a,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x61,0x00,0x00,0x00, -0x40,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x69,0x00,0x00,0x00,0x20,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x73,0x00,0x00,0x00,0x04,0x00,0x00,0x00, -0x20,0x00,0x04,0x00,0x7b,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0x47,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x80,0x00,0x00,0x00,0x3f,0x00,0x00,0x00,0x15,0x00,0x04,0x00, -0x82,0x00,0x00,0x00,0x08,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x98,0x00,0x00,0x00, -0x0f,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x9f,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0xce,0x00,0x00,0x00,0x05,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0xe2,0x00,0x00,0x00, -0x03,0x00,0x00,0x00,0x1d,0x00,0x03,0x00,0x12,0x01,0x00,0x00, -0x45,0x00,0x00,0x00,0x1e,0x00,0x03,0x00,0x13,0x01,0x00,0x00, -0x12,0x01,0x00,0x00,0x20,0x00,0x04,0x00,0x14,0x01,0x00,0x00, -0x0c,0x00,0x00,0x00,0x13,0x01,0x00,0x00,0x3b,0x00,0x04,0x00, -0x14,0x01,0x00,0x00,0x15,0x01,0x00,0x00,0x0c,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x69,0x01,0x00,0x00, -0x21,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x14,0x00,0x00,0x00, -0x88,0x01,0x00,0x00,0x40,0x00,0x00,0x00,0x2c,0x00,0x06,0x00, -0x15,0x00,0x00,0x00,0x89,0x01,0x00,0x00,0x88,0x01,0x00,0x00, -0x5a,0x00,0x00,0x00,0x5a,0x00,0x00,0x00,0x2a,0x00,0x03,0x00, -0x11,0x00,0x00,0x00,0x8c,0x01,0x00,0x00,0x29,0x00,0x03,0x00, -0x11,0x00,0x00,0x00,0x8f,0x01,0x00,0x00,0x36,0x00,0x05,0x00, -0x02,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x03,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0x05,0x00,0x00,0x00, -0xf7,0x00,0x03,0x00,0x8a,0x01,0x00,0x00,0x00,0x00,0x00,0x00, -0xfb,0x00,0x03,0x00,0x18,0x00,0x00,0x00,0x8b,0x01,0x00,0x00, -0xf8,0x00,0x02,0x00,0x8b,0x01,0x00,0x00,0xf9,0x00,0x02,0x00, -0x0a,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0x0a,0x00,0x00,0x00, -0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0x92,0x01,0x00,0x00, -0x09,0x00,0x00,0x00,0x8b,0x01,0x00,0x00,0x87,0x01,0x00,0x00, -0x0d,0x00,0x00,0x00,0xb1,0x00,0x05,0x00,0x11,0x00,0x00,0x00, -0x12,0x00,0x00,0x00,0x92,0x01,0x00,0x00,0x10,0x00,0x00,0x00, -0xf6,0x00,0x04,0x00,0x0c,0x00,0x00,0x00,0x0d,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0x12,0x00,0x00,0x00, -0x0b,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, -0x0b,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x19,0x00,0x00,0x00, -0x1a,0x00,0x00,0x00,0x17,0x00,0x00,0x00,0x18,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x14,0x00,0x00,0x00,0x1b,0x00,0x00,0x00, -0x1a,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x14,0x00,0x00,0x00, -0x1d,0x00,0x00,0x00,0x1b,0x00,0x00,0x00,0x1c,0x00,0x00,0x00, -0x7c,0x00,0x04,0x00,0x14,0x00,0x00,0x00,0x1f,0x00,0x00,0x00, -0x92,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x14,0x00,0x00,0x00, -0x20,0x00,0x00,0x00,0x1d,0x00,0x00,0x00,0x1f,0x00,0x00,0x00, -0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x21,0x00,0x00,0x00, -0x20,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x26,0x00,0x00,0x00, -0x27,0x00,0x00,0x00,0x25,0x00,0x00,0x00,0x09,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x28,0x00,0x00,0x00, -0x27,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x26,0x00,0x00,0x00, -0x2a,0x00,0x00,0x00,0x25,0x00,0x00,0x00,0x29,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x2b,0x00,0x00,0x00, -0x2a,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x2c,0x00,0x00,0x00,0x28,0x00,0x00,0x00,0x2b,0x00,0x00,0x00, -0x87,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x2d,0x00,0x00,0x00, -0x2c,0x00,0x00,0x00,0x10,0x00,0x00,0x00,0xaf,0x00,0x05,0x00, -0x11,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0x21,0x00,0x00,0x00, -0x2d,0x00,0x00,0x00,0xf7,0x00,0x03,0x00,0x30,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0x2e,0x00,0x00,0x00, -0x2f,0x00,0x00,0x00,0x30,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, -0x2f,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0x0c,0x00,0x00,0x00, -0xf8,0x00,0x02,0x00,0x30,0x00,0x00,0x00,0x41,0x00,0x05,0x00, -0x19,0x00,0x00,0x00,0x34,0x00,0x00,0x00,0x33,0x00,0x00,0x00, -0x18,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x14,0x00,0x00,0x00, -0x35,0x00,0x00,0x00,0x34,0x00,0x00,0x00,0x7c,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x36,0x00,0x00,0x00,0x35,0x00,0x00,0x00, -0x87,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x3a,0x00,0x00,0x00, -0x36,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x8b,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x3d,0x00,0x00,0x00,0x36,0x00,0x00,0x00, -0x39,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x41,0x00,0x00,0x00,0x3f,0x00,0x00,0x00,0x3a,0x00,0x00,0x00, -0x41,0x00,0x08,0x00,0x54,0x00,0x00,0x00,0x55,0x00,0x00,0x00, -0x52,0x00,0x00,0x00,0x09,0x00,0x00,0x00,0x21,0x00,0x00,0x00, -0x09,0x00,0x00,0x00,0x18,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x45,0x00,0x00,0x00,0x56,0x00,0x00,0x00,0x55,0x00,0x00,0x00, -0x73,0x00,0x04,0x00,0x42,0x00,0x00,0x00,0x57,0x00,0x00,0x00, -0x56,0x00,0x00,0x00,0x41,0x00,0x08,0x00,0x54,0x00,0x00,0x00, -0x5b,0x00,0x00,0x00,0x52,0x00,0x00,0x00,0x09,0x00,0x00,0x00, -0x21,0x00,0x00,0x00,0x09,0x00,0x00,0x00,0x5a,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x45,0x00,0x00,0x00,0x5c,0x00,0x00,0x00, -0x5b,0x00,0x00,0x00,0x73,0x00,0x04,0x00,0x42,0x00,0x00,0x00, -0x5d,0x00,0x00,0x00,0x5c,0x00,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x60,0x00,0x00,0x00,0x21,0x00,0x00,0x00, -0x10,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x63,0x00,0x00,0x00,0x61,0x00,0x00,0x00,0x3a,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x64,0x00,0x00,0x00, -0x60,0x00,0x00,0x00,0x63,0x00,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x66,0x00,0x00,0x00,0x3f,0x00,0x00,0x00, -0x3d,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x67,0x00,0x00,0x00,0x64,0x00,0x00,0x00,0x66,0x00,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x6b,0x00,0x00,0x00, -0x69,0x00,0x00,0x00,0x3a,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x6e,0x00,0x00,0x00,0x6b,0x00,0x00,0x00, -0x66,0x00,0x00,0x00,0xb1,0x00,0x05,0x00,0x11,0x00,0x00,0x00, -0x74,0x00,0x00,0x00,0x41,0x00,0x00,0x00,0x73,0x00,0x00,0x00, -0xf7,0x00,0x03,0x00,0x76,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0xfa,0x00,0x04,0x00,0x74,0x00,0x00,0x00,0x75,0x00,0x00,0x00, -0x90,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0x75,0x00,0x00,0x00, -0x41,0x00,0x08,0x00,0x7b,0x00,0x00,0x00,0x7c,0x00,0x00,0x00, -0x52,0x00,0x00,0x00,0x09,0x00,0x00,0x00,0x21,0x00,0x00,0x00, -0x29,0x00,0x00,0x00,0x41,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x47,0x00,0x00,0x00,0x7d,0x00,0x00,0x00,0x7c,0x00,0x00,0x00, -0x71,0x00,0x04,0x00,0x14,0x00,0x00,0x00,0x7e,0x00,0x00,0x00, -0x7d,0x00,0x00,0x00,0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x7f,0x00,0x00,0x00,0x7e,0x00,0x00,0x00,0xc7,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x81,0x00,0x00,0x00,0x7f,0x00,0x00,0x00, -0x80,0x00,0x00,0x00,0x72,0x00,0x04,0x00,0x82,0x00,0x00,0x00, -0x83,0x00,0x00,0x00,0x81,0x00,0x00,0x00,0x7c,0x00,0x04,0x00, -0x47,0x00,0x00,0x00,0x84,0x00,0x00,0x00,0x83,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x88,0x00,0x00,0x00, -0x41,0x00,0x00,0x00,0x73,0x00,0x00,0x00,0x41,0x00,0x08,0x00, -0x7b,0x00,0x00,0x00,0x89,0x00,0x00,0x00,0x52,0x00,0x00,0x00, -0x09,0x00,0x00,0x00,0x21,0x00,0x00,0x00,0x29,0x00,0x00,0x00, -0x88,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x47,0x00,0x00,0x00, -0x8a,0x00,0x00,0x00,0x89,0x00,0x00,0x00,0x71,0x00,0x04,0x00, -0x14,0x00,0x00,0x00,0x8b,0x00,0x00,0x00,0x8a,0x00,0x00,0x00, -0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x8c,0x00,0x00,0x00, -0x8b,0x00,0x00,0x00,0xc7,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x8d,0x00,0x00,0x00,0x8c,0x00,0x00,0x00,0x80,0x00,0x00,0x00, -0x72,0x00,0x04,0x00,0x82,0x00,0x00,0x00,0x8e,0x00,0x00,0x00, -0x8d,0x00,0x00,0x00,0x7c,0x00,0x04,0x00,0x47,0x00,0x00,0x00, -0x8f,0x00,0x00,0x00,0x8e,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, -0x76,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0x90,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x93,0x00,0x00,0x00, -0x41,0x00,0x00,0x00,0x73,0x00,0x00,0x00,0x41,0x00,0x08,0x00, -0x7b,0x00,0x00,0x00,0x94,0x00,0x00,0x00,0x52,0x00,0x00,0x00, -0x09,0x00,0x00,0x00,0x21,0x00,0x00,0x00,0x29,0x00,0x00,0x00, -0x93,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x47,0x00,0x00,0x00, -0x95,0x00,0x00,0x00,0x94,0x00,0x00,0x00,0x71,0x00,0x04,0x00, -0x14,0x00,0x00,0x00,0x96,0x00,0x00,0x00,0x95,0x00,0x00,0x00, -0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x97,0x00,0x00,0x00, -0x96,0x00,0x00,0x00,0xc7,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x99,0x00,0x00,0x00,0x97,0x00,0x00,0x00,0x98,0x00,0x00,0x00, -0x82,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x9c,0x00,0x00,0x00, -0x41,0x00,0x00,0x00,0x73,0x00,0x00,0x00,0x41,0x00,0x08,0x00, -0x7b,0x00,0x00,0x00,0x9d,0x00,0x00,0x00,0x52,0x00,0x00,0x00, -0x09,0x00,0x00,0x00,0x21,0x00,0x00,0x00,0x29,0x00,0x00,0x00, -0x9c,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x47,0x00,0x00,0x00, -0x9e,0x00,0x00,0x00,0x9d,0x00,0x00,0x00,0xc2,0x00,0x05,0x00, -0x47,0x00,0x00,0x00,0xa0,0x00,0x00,0x00,0x9e,0x00,0x00,0x00, -0x9f,0x00,0x00,0x00,0xc4,0x00,0x05,0x00,0x47,0x00,0x00,0x00, -0xa1,0x00,0x00,0x00,0xa0,0x00,0x00,0x00,0x73,0x00,0x00,0x00, -0x71,0x00,0x04,0x00,0x14,0x00,0x00,0x00,0xa2,0x00,0x00,0x00, -0xa1,0x00,0x00,0x00,0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0xa3,0x00,0x00,0x00,0xa2,0x00,0x00,0x00,0xc5,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xa4,0x00,0x00,0x00,0x99,0x00,0x00,0x00, -0xa3,0x00,0x00,0x00,0x72,0x00,0x04,0x00,0x82,0x00,0x00,0x00, -0xa5,0x00,0x00,0x00,0xa4,0x00,0x00,0x00,0x7c,0x00,0x04,0x00, -0x47,0x00,0x00,0x00,0xa6,0x00,0x00,0x00,0xa5,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x47,0x00,0x00,0x00,0xab,0x00,0x00,0x00, -0x94,0x00,0x00,0x00,0xc2,0x00,0x05,0x00,0x47,0x00,0x00,0x00, -0xac,0x00,0x00,0x00,0xab,0x00,0x00,0x00,0x73,0x00,0x00,0x00, -0x41,0x00,0x08,0x00,0x7b,0x00,0x00,0x00,0xaf,0x00,0x00,0x00, -0x52,0x00,0x00,0x00,0x09,0x00,0x00,0x00,0x21,0x00,0x00,0x00, -0x29,0x00,0x00,0x00,0x41,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x47,0x00,0x00,0x00,0xb0,0x00,0x00,0x00,0xaf,0x00,0x00,0x00, -0xc2,0x00,0x05,0x00,0x47,0x00,0x00,0x00,0xb1,0x00,0x00,0x00, -0xb0,0x00,0x00,0x00,0x9f,0x00,0x00,0x00,0xc4,0x00,0x05,0x00, -0x47,0x00,0x00,0x00,0xb2,0x00,0x00,0x00,0xb1,0x00,0x00,0x00, -0x73,0x00,0x00,0x00,0xc5,0x00,0x05,0x00,0x47,0x00,0x00,0x00, -0xb3,0x00,0x00,0x00,0xac,0x00,0x00,0x00,0xb2,0x00,0x00,0x00, -0xf9,0x00,0x02,0x00,0x76,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, -0x76,0x00,0x00,0x00,0xf5,0x00,0x07,0x00,0x47,0x00,0x00,0x00, -0x94,0x01,0x00,0x00,0x8f,0x00,0x00,0x00,0x75,0x00,0x00,0x00, -0xb3,0x00,0x00,0x00,0x90,0x00,0x00,0x00,0xf5,0x00,0x07,0x00, -0x47,0x00,0x00,0x00,0x93,0x01,0x00,0x00,0x84,0x00,0x00,0x00, -0x75,0x00,0x00,0x00,0xa6,0x00,0x00,0x00,0x90,0x00,0x00,0x00, -0x70,0x00,0x04,0x00,0x42,0x00,0x00,0x00,0xb7,0x00,0x00,0x00, -0x93,0x01,0x00,0x00,0x85,0x00,0x05,0x00,0x42,0x00,0x00,0x00, -0xb8,0x00,0x00,0x00,0x57,0x00,0x00,0x00,0xb7,0x00,0x00,0x00, -0x70,0x00,0x04,0x00,0x42,0x00,0x00,0x00,0xbc,0x00,0x00,0x00, -0x94,0x01,0x00,0x00,0x85,0x00,0x05,0x00,0x42,0x00,0x00,0x00, -0xbd,0x00,0x00,0x00,0x5d,0x00,0x00,0x00,0xbc,0x00,0x00,0x00, -0xf7,0x00,0x03,0x00,0xc1,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0xfa,0x00,0x04,0x00,0x74,0x00,0x00,0x00,0xc0,0x00,0x00,0x00, -0xd7,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0xc0,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xc4,0x00,0x00,0x00, -0x41,0x00,0x00,0x00,0x29,0x00,0x00,0x00,0x41,0x00,0x08,0x00, -0x7b,0x00,0x00,0x00,0xc5,0x00,0x00,0x00,0x52,0x00,0x00,0x00, -0x09,0x00,0x00,0x00,0x21,0x00,0x00,0x00,0x29,0x00,0x00,0x00, -0xc4,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x47,0x00,0x00,0x00, -0xc6,0x00,0x00,0x00,0xc5,0x00,0x00,0x00,0x71,0x00,0x04,0x00, -0x14,0x00,0x00,0x00,0xc7,0x00,0x00,0x00,0xc6,0x00,0x00,0x00, -0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0xc8,0x00,0x00,0x00, -0xc7,0x00,0x00,0x00,0xc7,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xc9,0x00,0x00,0x00,0xc8,0x00,0x00,0x00,0x80,0x00,0x00,0x00, -0x72,0x00,0x04,0x00,0x82,0x00,0x00,0x00,0xca,0x00,0x00,0x00, -0xc9,0x00,0x00,0x00,0x7c,0x00,0x04,0x00,0x47,0x00,0x00,0x00, -0xcb,0x00,0x00,0x00,0xca,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xcf,0x00,0x00,0x00,0x41,0x00,0x00,0x00, -0xce,0x00,0x00,0x00,0x41,0x00,0x08,0x00,0x7b,0x00,0x00,0x00, -0xd0,0x00,0x00,0x00,0x52,0x00,0x00,0x00,0x09,0x00,0x00,0x00, -0x21,0x00,0x00,0x00,0x29,0x00,0x00,0x00,0xcf,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x47,0x00,0x00,0x00,0xd1,0x00,0x00,0x00, -0xd0,0x00,0x00,0x00,0x71,0x00,0x04,0x00,0x14,0x00,0x00,0x00, -0xd2,0x00,0x00,0x00,0xd1,0x00,0x00,0x00,0x7c,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0xd3,0x00,0x00,0x00,0xd2,0x00,0x00,0x00, -0xc7,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xd4,0x00,0x00,0x00, -0xd3,0x00,0x00,0x00,0x80,0x00,0x00,0x00,0x72,0x00,0x04,0x00, -0x82,0x00,0x00,0x00,0xd5,0x00,0x00,0x00,0xd4,0x00,0x00,0x00, -0x7c,0x00,0x04,0x00,0x47,0x00,0x00,0x00,0xd6,0x00,0x00,0x00, -0xd5,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0xc1,0x00,0x00,0x00, -0xf8,0x00,0x02,0x00,0xd7,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xda,0x00,0x00,0x00,0x41,0x00,0x00,0x00, -0xce,0x00,0x00,0x00,0x41,0x00,0x08,0x00,0x7b,0x00,0x00,0x00, -0xdb,0x00,0x00,0x00,0x52,0x00,0x00,0x00,0x09,0x00,0x00,0x00, -0x21,0x00,0x00,0x00,0x29,0x00,0x00,0x00,0xda,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x47,0x00,0x00,0x00,0xdc,0x00,0x00,0x00, -0xdb,0x00,0x00,0x00,0x71,0x00,0x04,0x00,0x14,0x00,0x00,0x00, -0xdd,0x00,0x00,0x00,0xdc,0x00,0x00,0x00,0x7c,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0xde,0x00,0x00,0x00,0xdd,0x00,0x00,0x00, -0xc7,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xdf,0x00,0x00,0x00, -0xde,0x00,0x00,0x00,0x98,0x00,0x00,0x00,0x82,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xe3,0x00,0x00,0x00,0x41,0x00,0x00,0x00, -0xe2,0x00,0x00,0x00,0x41,0x00,0x08,0x00,0x7b,0x00,0x00,0x00, -0xe4,0x00,0x00,0x00,0x52,0x00,0x00,0x00,0x09,0x00,0x00,0x00, -0x21,0x00,0x00,0x00,0x29,0x00,0x00,0x00,0xe3,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x47,0x00,0x00,0x00,0xe5,0x00,0x00,0x00, -0xe4,0x00,0x00,0x00,0xc2,0x00,0x05,0x00,0x47,0x00,0x00,0x00, -0xe6,0x00,0x00,0x00,0xe5,0x00,0x00,0x00,0x9f,0x00,0x00,0x00, -0xc4,0x00,0x05,0x00,0x47,0x00,0x00,0x00,0xe7,0x00,0x00,0x00, -0xe6,0x00,0x00,0x00,0x73,0x00,0x00,0x00,0x71,0x00,0x04,0x00, -0x14,0x00,0x00,0x00,0xe8,0x00,0x00,0x00,0xe7,0x00,0x00,0x00, -0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0xe9,0x00,0x00,0x00, -0xe8,0x00,0x00,0x00,0xc5,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xea,0x00,0x00,0x00,0xdf,0x00,0x00,0x00,0xe9,0x00,0x00,0x00, -0x72,0x00,0x04,0x00,0x82,0x00,0x00,0x00,0xeb,0x00,0x00,0x00, -0xea,0x00,0x00,0x00,0x7c,0x00,0x04,0x00,0x47,0x00,0x00,0x00, -0xec,0x00,0x00,0x00,0xeb,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x47,0x00,0x00,0x00,0xf1,0x00,0x00,0x00,0xdb,0x00,0x00,0x00, -0xc2,0x00,0x05,0x00,0x47,0x00,0x00,0x00,0xf2,0x00,0x00,0x00, -0xf1,0x00,0x00,0x00,0x73,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xf5,0x00,0x00,0x00,0x41,0x00,0x00,0x00, -0x29,0x00,0x00,0x00,0x41,0x00,0x08,0x00,0x7b,0x00,0x00,0x00, -0xf6,0x00,0x00,0x00,0x52,0x00,0x00,0x00,0x09,0x00,0x00,0x00, -0x21,0x00,0x00,0x00,0x29,0x00,0x00,0x00,0xf5,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x47,0x00,0x00,0x00,0xf7,0x00,0x00,0x00, -0xf6,0x00,0x00,0x00,0xc2,0x00,0x05,0x00,0x47,0x00,0x00,0x00, -0xf8,0x00,0x00,0x00,0xf7,0x00,0x00,0x00,0x9f,0x00,0x00,0x00, -0xc4,0x00,0x05,0x00,0x47,0x00,0x00,0x00,0xf9,0x00,0x00,0x00, -0xf8,0x00,0x00,0x00,0x73,0x00,0x00,0x00,0xc5,0x00,0x05,0x00, -0x47,0x00,0x00,0x00,0xfa,0x00,0x00,0x00,0xf2,0x00,0x00,0x00, -0xf9,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0xc1,0x00,0x00,0x00, -0xf8,0x00,0x02,0x00,0xc1,0x00,0x00,0x00,0xf5,0x00,0x07,0x00, -0x47,0x00,0x00,0x00,0x96,0x01,0x00,0x00,0xd6,0x00,0x00,0x00, -0xc0,0x00,0x00,0x00,0xfa,0x00,0x00,0x00,0xd7,0x00,0x00,0x00, -0xf5,0x00,0x07,0x00,0x47,0x00,0x00,0x00,0x95,0x01,0x00,0x00, -0xcb,0x00,0x00,0x00,0xc0,0x00,0x00,0x00,0xec,0x00,0x00,0x00, -0xd7,0x00,0x00,0x00,0x70,0x00,0x04,0x00,0x42,0x00,0x00,0x00, -0xfe,0x00,0x00,0x00,0x95,0x01,0x00,0x00,0x85,0x00,0x05,0x00, -0x42,0x00,0x00,0x00,0xff,0x00,0x00,0x00,0x57,0x00,0x00,0x00, -0xfe,0x00,0x00,0x00,0x70,0x00,0x04,0x00,0x42,0x00,0x00,0x00, -0x03,0x01,0x00,0x00,0x96,0x01,0x00,0x00,0x85,0x00,0x05,0x00, -0x42,0x00,0x00,0x00,0x04,0x01,0x00,0x00,0x5d,0x00,0x00,0x00, -0x03,0x01,0x00,0x00,0xc4,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x08,0x01,0x00,0x00,0x29,0x00,0x00,0x00,0x41,0x00,0x00,0x00, -0x72,0x00,0x04,0x00,0x82,0x00,0x00,0x00,0x09,0x01,0x00,0x00, -0x08,0x01,0x00,0x00,0x7c,0x00,0x04,0x00,0x47,0x00,0x00,0x00, -0x0a,0x01,0x00,0x00,0x09,0x01,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x0e,0x01,0x00,0x00,0x41,0x00,0x00,0x00, -0x29,0x00,0x00,0x00,0xc4,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x0f,0x01,0x00,0x00,0x29,0x00,0x00,0x00,0x0e,0x01,0x00,0x00, -0x72,0x00,0x04,0x00,0x82,0x00,0x00,0x00,0x10,0x01,0x00,0x00, -0x0f,0x01,0x00,0x00,0x7c,0x00,0x04,0x00,0x47,0x00,0x00,0x00, -0x11,0x01,0x00,0x00,0x10,0x01,0x00,0x00,0x41,0x00,0x08,0x00, -0x7b,0x00,0x00,0x00,0x1a,0x01,0x00,0x00,0x52,0x00,0x00,0x00, -0x09,0x00,0x00,0x00,0x21,0x00,0x00,0x00,0xe2,0x00,0x00,0x00, -0x6e,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x47,0x00,0x00,0x00, -0x1b,0x01,0x00,0x00,0x1a,0x01,0x00,0x00,0x71,0x00,0x04,0x00, -0x14,0x00,0x00,0x00,0x1c,0x01,0x00,0x00,0x1b,0x01,0x00,0x00, -0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x1d,0x01,0x00,0x00, -0x1c,0x01,0x00,0x00,0xc7,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x1e,0x01,0x00,0x00,0x1d,0x01,0x00,0x00,0x98,0x00,0x00,0x00, -0x41,0x00,0x08,0x00,0x7b,0x00,0x00,0x00,0x21,0x01,0x00,0x00, -0x52,0x00,0x00,0x00,0x09,0x00,0x00,0x00,0x21,0x00,0x00,0x00, -0x3f,0x00,0x00,0x00,0x66,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x47,0x00,0x00,0x00,0x22,0x01,0x00,0x00,0x21,0x01,0x00,0x00, -0xc7,0x00,0x05,0x00,0x47,0x00,0x00,0x00,0x24,0x01,0x00,0x00, -0x22,0x01,0x00,0x00,0x0a,0x01,0x00,0x00,0x71,0x00,0x04,0x00, -0x14,0x00,0x00,0x00,0x25,0x01,0x00,0x00,0x24,0x01,0x00,0x00, -0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x26,0x01,0x00,0x00, -0x25,0x01,0x00,0x00,0xab,0x00,0x05,0x00,0x11,0x00,0x00,0x00, -0x27,0x01,0x00,0x00,0x26,0x01,0x00,0x00,0x09,0x00,0x00,0x00, -0xa9,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x28,0x01,0x00,0x00, -0x27,0x01,0x00,0x00,0x39,0x00,0x00,0x00,0x09,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x29,0x01,0x00,0x00, -0x1e,0x01,0x00,0x00,0x28,0x01,0x00,0x00,0x6f,0x00,0x04,0x00, -0x42,0x00,0x00,0x00,0x2a,0x01,0x00,0x00,0x29,0x01,0x00,0x00, -0x7f,0x00,0x04,0x00,0x42,0x00,0x00,0x00,0x9d,0x01,0x00,0x00, -0xbd,0x00,0x00,0x00,0x0c,0x00,0x08,0x00,0x42,0x00,0x00,0x00, -0x2d,0x01,0x00,0x00,0x01,0x00,0x00,0x00,0x32,0x00,0x00,0x00, -0xb8,0x00,0x00,0x00,0x2a,0x01,0x00,0x00,0x9d,0x01,0x00,0x00, -0x73,0x00,0x04,0x00,0x45,0x00,0x00,0x00,0x2e,0x01,0x00,0x00, -0x2d,0x01,0x00,0x00,0x41,0x00,0x06,0x00,0x54,0x00,0x00,0x00, -0x2f,0x01,0x00,0x00,0x15,0x01,0x00,0x00,0x09,0x00,0x00,0x00, -0x67,0x00,0x00,0x00,0x3e,0x00,0x03,0x00,0x2f,0x01,0x00,0x00, -0x2e,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x31,0x01,0x00,0x00,0x67,0x00,0x00,0x00,0x29,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x35,0x01,0x00,0x00, -0x6e,0x00,0x00,0x00,0x29,0x00,0x00,0x00,0x41,0x00,0x08,0x00, -0x7b,0x00,0x00,0x00,0x36,0x01,0x00,0x00,0x52,0x00,0x00,0x00, -0x09,0x00,0x00,0x00,0x21,0x00,0x00,0x00,0xe2,0x00,0x00,0x00, -0x35,0x01,0x00,0x00,0x3d,0x00,0x04,0x00,0x47,0x00,0x00,0x00, -0x37,0x01,0x00,0x00,0x36,0x01,0x00,0x00,0x71,0x00,0x04,0x00, -0x14,0x00,0x00,0x00,0x38,0x01,0x00,0x00,0x37,0x01,0x00,0x00, -0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x39,0x01,0x00,0x00, -0x38,0x01,0x00,0x00,0xc7,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x3a,0x01,0x00,0x00,0x39,0x01,0x00,0x00,0x98,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x3d,0x01,0x00,0x00, -0x66,0x00,0x00,0x00,0x29,0x00,0x00,0x00,0x41,0x00,0x08,0x00, -0x7b,0x00,0x00,0x00,0x3e,0x01,0x00,0x00,0x52,0x00,0x00,0x00, -0x09,0x00,0x00,0x00,0x21,0x00,0x00,0x00,0x3f,0x00,0x00,0x00, -0x3d,0x01,0x00,0x00,0x3d,0x00,0x04,0x00,0x47,0x00,0x00,0x00, -0x3f,0x01,0x00,0x00,0x3e,0x01,0x00,0x00,0xc7,0x00,0x05,0x00, -0x47,0x00,0x00,0x00,0x41,0x01,0x00,0x00,0x3f,0x01,0x00,0x00, -0x0a,0x01,0x00,0x00,0x71,0x00,0x04,0x00,0x14,0x00,0x00,0x00, -0x42,0x01,0x00,0x00,0x41,0x01,0x00,0x00,0x7c,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x43,0x01,0x00,0x00,0x42,0x01,0x00,0x00, -0xab,0x00,0x05,0x00,0x11,0x00,0x00,0x00,0x44,0x01,0x00,0x00, -0x43,0x01,0x00,0x00,0x09,0x00,0x00,0x00,0xa9,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0x45,0x01,0x00,0x00,0x44,0x01,0x00,0x00, -0x39,0x00,0x00,0x00,0x09,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x46,0x01,0x00,0x00,0x3a,0x01,0x00,0x00, -0x45,0x01,0x00,0x00,0x6f,0x00,0x04,0x00,0x42,0x00,0x00,0x00, -0x47,0x01,0x00,0x00,0x46,0x01,0x00,0x00,0x0c,0x00,0x08,0x00, -0x42,0x00,0x00,0x00,0x4a,0x01,0x00,0x00,0x01,0x00,0x00,0x00, -0x32,0x00,0x00,0x00,0xb8,0x00,0x00,0x00,0x47,0x01,0x00,0x00, -0x9d,0x01,0x00,0x00,0x73,0x00,0x04,0x00,0x45,0x00,0x00,0x00, -0x4b,0x01,0x00,0x00,0x4a,0x01,0x00,0x00,0x41,0x00,0x06,0x00, -0x54,0x00,0x00,0x00,0x4c,0x01,0x00,0x00,0x15,0x01,0x00,0x00, -0x09,0x00,0x00,0x00,0x31,0x01,0x00,0x00,0x3e,0x00,0x03,0x00, -0x4c,0x01,0x00,0x00,0x4b,0x01,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x4e,0x01,0x00,0x00,0x67,0x00,0x00,0x00, -0x69,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x47,0x00,0x00,0x00, -0x53,0x01,0x00,0x00,0x1a,0x01,0x00,0x00,0xc2,0x00,0x05,0x00, -0x47,0x00,0x00,0x00,0x54,0x01,0x00,0x00,0x53,0x01,0x00,0x00, -0x73,0x00,0x00,0x00,0x71,0x00,0x04,0x00,0x14,0x00,0x00,0x00, -0x55,0x01,0x00,0x00,0x54,0x01,0x00,0x00,0x7c,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x56,0x01,0x00,0x00,0x55,0x01,0x00,0x00, -0x3d,0x00,0x04,0x00,0x47,0x00,0x00,0x00,0x5a,0x01,0x00,0x00, -0x21,0x01,0x00,0x00,0xc7,0x00,0x05,0x00,0x47,0x00,0x00,0x00, -0x5c,0x01,0x00,0x00,0x5a,0x01,0x00,0x00,0x11,0x01,0x00,0x00, -0x71,0x00,0x04,0x00,0x14,0x00,0x00,0x00,0x5d,0x01,0x00,0x00, -0x5c,0x01,0x00,0x00,0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x5e,0x01,0x00,0x00,0x5d,0x01,0x00,0x00,0xab,0x00,0x05,0x00, -0x11,0x00,0x00,0x00,0x5f,0x01,0x00,0x00,0x5e,0x01,0x00,0x00, -0x09,0x00,0x00,0x00,0xa9,0x00,0x06,0x00,0x06,0x00,0x00,0x00, -0x60,0x01,0x00,0x00,0x5f,0x01,0x00,0x00,0x39,0x00,0x00,0x00, -0x09,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x61,0x01,0x00,0x00,0x56,0x01,0x00,0x00,0x60,0x01,0x00,0x00, -0x6f,0x00,0x04,0x00,0x42,0x00,0x00,0x00,0x62,0x01,0x00,0x00, -0x61,0x01,0x00,0x00,0x7f,0x00,0x04,0x00,0x42,0x00,0x00,0x00, -0x9f,0x01,0x00,0x00,0x04,0x01,0x00,0x00,0x0c,0x00,0x08,0x00, -0x42,0x00,0x00,0x00,0x65,0x01,0x00,0x00,0x01,0x00,0x00,0x00, -0x32,0x00,0x00,0x00,0xff,0x00,0x00,0x00,0x62,0x01,0x00,0x00, -0x9f,0x01,0x00,0x00,0x73,0x00,0x04,0x00,0x45,0x00,0x00,0x00, -0x66,0x01,0x00,0x00,0x65,0x01,0x00,0x00,0x41,0x00,0x06,0x00, -0x54,0x00,0x00,0x00,0x67,0x01,0x00,0x00,0x15,0x01,0x00,0x00, -0x09,0x00,0x00,0x00,0x4e,0x01,0x00,0x00,0x3e,0x00,0x03,0x00, -0x67,0x01,0x00,0x00,0x66,0x01,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x6a,0x01,0x00,0x00,0x67,0x00,0x00,0x00, -0x69,0x01,0x00,0x00,0x3d,0x00,0x04,0x00,0x47,0x00,0x00,0x00, -0x70,0x01,0x00,0x00,0x36,0x01,0x00,0x00,0xc2,0x00,0x05,0x00, -0x47,0x00,0x00,0x00,0x71,0x01,0x00,0x00,0x70,0x01,0x00,0x00, -0x73,0x00,0x00,0x00,0x71,0x00,0x04,0x00,0x14,0x00,0x00,0x00, -0x72,0x01,0x00,0x00,0x71,0x01,0x00,0x00,0x7c,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x73,0x01,0x00,0x00,0x72,0x01,0x00,0x00, -0x3d,0x00,0x04,0x00,0x47,0x00,0x00,0x00,0x78,0x01,0x00,0x00, -0x3e,0x01,0x00,0x00,0xc7,0x00,0x05,0x00,0x47,0x00,0x00,0x00, -0x7a,0x01,0x00,0x00,0x78,0x01,0x00,0x00,0x11,0x01,0x00,0x00, -0x71,0x00,0x04,0x00,0x14,0x00,0x00,0x00,0x7b,0x01,0x00,0x00, -0x7a,0x01,0x00,0x00,0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x7c,0x01,0x00,0x00,0x7b,0x01,0x00,0x00,0xab,0x00,0x05,0x00, -0x11,0x00,0x00,0x00,0x7d,0x01,0x00,0x00,0x7c,0x01,0x00,0x00, -0x09,0x00,0x00,0x00,0xa9,0x00,0x06,0x00,0x06,0x00,0x00,0x00, -0x7e,0x01,0x00,0x00,0x7d,0x01,0x00,0x00,0x39,0x00,0x00,0x00, -0x09,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x7f,0x01,0x00,0x00,0x73,0x01,0x00,0x00,0x7e,0x01,0x00,0x00, -0x6f,0x00,0x04,0x00,0x42,0x00,0x00,0x00,0x80,0x01,0x00,0x00, -0x7f,0x01,0x00,0x00,0x0c,0x00,0x08,0x00,0x42,0x00,0x00,0x00, -0x83,0x01,0x00,0x00,0x01,0x00,0x00,0x00,0x32,0x00,0x00,0x00, -0xff,0x00,0x00,0x00,0x80,0x01,0x00,0x00,0x9f,0x01,0x00,0x00, -0x73,0x00,0x04,0x00,0x45,0x00,0x00,0x00,0x84,0x01,0x00,0x00, -0x83,0x01,0x00,0x00,0x41,0x00,0x06,0x00,0x54,0x00,0x00,0x00, -0x85,0x01,0x00,0x00,0x15,0x01,0x00,0x00,0x09,0x00,0x00,0x00, -0x6a,0x01,0x00,0x00,0x3e,0x00,0x03,0x00,0x85,0x01,0x00,0x00, -0x84,0x01,0x00,0x00,0xf9,0x00,0x02,0x00,0x0d,0x00,0x00,0x00, -0xf8,0x00,0x02,0x00,0x0d,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x87,0x01,0x00,0x00,0x92,0x01,0x00,0x00, -0x29,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0x0a,0x00,0x00,0x00, -0xf8,0x00,0x02,0x00,0x0c,0x00,0x00,0x00,0xf5,0x00,0x07,0x00, -0x11,0x00,0x00,0x00,0x99,0x01,0x00,0x00,0x8c,0x01,0x00,0x00, -0x0a,0x00,0x00,0x00,0x8f,0x01,0x00,0x00,0x2f,0x00,0x00,0x00, -0xf7,0x00,0x03,0x00,0x90,0x01,0x00,0x00,0x00,0x00,0x00,0x00, -0xfa,0x00,0x04,0x00,0x99,0x01,0x00,0x00,0x8a,0x01,0x00,0x00, -0x90,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0x90,0x01,0x00,0x00, -0xf9,0x00,0x02,0x00,0x8a,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, -0x8a,0x01,0x00,0x00,0xfd,0x00,0x01,0x00,0x38,0x00,0x01,0x00, - -}; -const uint64_t dequant_q5_K_len = 5988; - -unsigned char dequant_q6_K_data[] = { -0x03,0x02,0x23,0x07,0x00,0x05,0x01,0x00,0x0b,0x00,0x0d,0x00, -0x10,0x01,0x00,0x00,0x00,0x00,0x00,0x00,0x11,0x00,0x02,0x00, -0x01,0x00,0x00,0x00,0x11,0x00,0x02,0x00,0x27,0x00,0x00,0x00, -0x11,0x00,0x02,0x00,0x51,0x11,0x00,0x00,0x11,0x00,0x02,0x00, -0x60,0x11,0x00,0x00,0x0b,0x00,0x06,0x00,0x01,0x00,0x00,0x00, -0x47,0x4c,0x53,0x4c,0x2e,0x73,0x74,0x64,0x2e,0x34,0x35,0x30, -0x00,0x00,0x00,0x00,0x0e,0x00,0x03,0x00,0x00,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x0f,0x00,0x0a,0x00,0x05,0x00,0x00,0x00, -0x04,0x00,0x00,0x00,0x6d,0x61,0x69,0x6e,0x00,0x00,0x00,0x00, -0x17,0x00,0x00,0x00,0x25,0x00,0x00,0x00,0x33,0x00,0x00,0x00, -0x66,0x00,0x00,0x00,0x7b,0x00,0x00,0x00,0x10,0x00,0x06,0x00, -0x04,0x00,0x00,0x00,0x11,0x00,0x00,0x00,0x40,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x17,0x00,0x00,0x00,0x0b,0x00,0x00,0x00,0x1a,0x00,0x00,0x00, -0x48,0x00,0x05,0x00,0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0x23,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x04,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x23,0x00,0x00,0x00, -0x02,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x08,0x00,0x00,0x00, -0x48,0x00,0x05,0x00,0x23,0x00,0x00,0x00,0x03,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x47,0x00,0x03,0x00, -0x23,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x33,0x00,0x00,0x00,0x0b,0x00,0x00,0x00,0x1b,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x5b,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x5d,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x60,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x48,0x00,0x05,0x00,0x62,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0x62,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x80,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x62,0x00,0x00,0x00, -0x02,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0xc0,0x00,0x00,0x00, -0x48,0x00,0x05,0x00,0x62,0x00,0x00,0x00,0x03,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0xd0,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x63,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0xd2,0x00,0x00,0x00, -0x48,0x00,0x04,0x00,0x64,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x18,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x64,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x47,0x00,0x03,0x00,0x64,0x00,0x00,0x00,0x02,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x66,0x00,0x00,0x00,0x22,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x66,0x00,0x00,0x00, -0x21,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x78,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x02,0x00,0x00,0x00, -0x48,0x00,0x04,0x00,0x79,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x19,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x79,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x47,0x00,0x03,0x00,0x79,0x00,0x00,0x00,0x02,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x7b,0x00,0x00,0x00,0x22,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x7b,0x00,0x00,0x00, -0x21,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x05,0x01,0x00,0x00,0x0b,0x00,0x00,0x00,0x19,0x00,0x00,0x00, -0x13,0x00,0x02,0x00,0x02,0x00,0x00,0x00,0x21,0x00,0x03,0x00, -0x03,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x15,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x20,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x09,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x10,0x00,0x00,0x00,0x00,0x01,0x00,0x00,0x14,0x00,0x02,0x00, -0x11,0x00,0x00,0x00,0x15,0x00,0x04,0x00,0x14,0x00,0x00,0x00, -0x20,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x17,0x00,0x04,0x00, -0x15,0x00,0x00,0x00,0x14,0x00,0x00,0x00,0x03,0x00,0x00,0x00, -0x20,0x00,0x04,0x00,0x16,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x15,0x00,0x00,0x00,0x3b,0x00,0x04,0x00,0x16,0x00,0x00,0x00, -0x17,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x14,0x00,0x00,0x00,0x18,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x20,0x00,0x04,0x00,0x19,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x14,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x14,0x00,0x00,0x00, -0x1c,0x00,0x00,0x00,0x00,0x01,0x00,0x00,0x1e,0x00,0x06,0x00, -0x23,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0x24,0x00,0x00,0x00,0x09,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x3b,0x00,0x04,0x00,0x24,0x00,0x00,0x00,0x25,0x00,0x00,0x00, -0x09,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x26,0x00,0x00,0x00, -0x09,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x29,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x3b,0x00,0x04,0x00,0x16,0x00,0x00,0x00,0x33,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x39,0x00,0x00,0x00,0x20,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x41,0x00,0x00,0x00,0x08,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x45,0x00,0x00,0x00, -0x10,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x4b,0x00,0x00,0x00,0x80,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x52,0x00,0x00,0x00,0x40,0x00,0x00,0x00, -0x15,0x00,0x04,0x00,0x57,0x00,0x00,0x00,0x08,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x14,0x00,0x00,0x00, -0x5a,0x00,0x00,0x00,0x80,0x00,0x00,0x00,0x1c,0x00,0x04,0x00, -0x5b,0x00,0x00,0x00,0x57,0x00,0x00,0x00,0x5a,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x14,0x00,0x00,0x00,0x5c,0x00,0x00,0x00, -0x40,0x00,0x00,0x00,0x1c,0x00,0x04,0x00,0x5d,0x00,0x00,0x00, -0x57,0x00,0x00,0x00,0x5c,0x00,0x00,0x00,0x15,0x00,0x04,0x00, -0x5e,0x00,0x00,0x00,0x08,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x14,0x00,0x00,0x00,0x5f,0x00,0x00,0x00, -0x10,0x00,0x00,0x00,0x1c,0x00,0x04,0x00,0x60,0x00,0x00,0x00, -0x5e,0x00,0x00,0x00,0x5f,0x00,0x00,0x00,0x16,0x00,0x03,0x00, -0x61,0x00,0x00,0x00,0x10,0x00,0x00,0x00,0x1e,0x00,0x06,0x00, -0x62,0x00,0x00,0x00,0x5b,0x00,0x00,0x00,0x5d,0x00,0x00,0x00, -0x60,0x00,0x00,0x00,0x61,0x00,0x00,0x00,0x1d,0x00,0x03,0x00, -0x63,0x00,0x00,0x00,0x62,0x00,0x00,0x00,0x1e,0x00,0x03,0x00, -0x64,0x00,0x00,0x00,0x63,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0x65,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x64,0x00,0x00,0x00, -0x3b,0x00,0x04,0x00,0x65,0x00,0x00,0x00,0x66,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x6c,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x57,0x00,0x00,0x00,0x16,0x00,0x03,0x00, -0x6f,0x00,0x00,0x00,0x20,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x73,0x00,0x00,0x00,0x03,0x00,0x00,0x00, -0x20,0x00,0x04,0x00,0x74,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0x61,0x00,0x00,0x00,0x1d,0x00,0x03,0x00,0x78,0x00,0x00,0x00, -0x61,0x00,0x00,0x00,0x1e,0x00,0x03,0x00,0x79,0x00,0x00,0x00, -0x78,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x7a,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x79,0x00,0x00,0x00,0x3b,0x00,0x04,0x00, -0x7a,0x00,0x00,0x00,0x7b,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x80,0x00,0x00,0x00, -0x02,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x83,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x5e,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x8e,0x00,0x00,0x00,0x0f,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x95,0x00,0x00,0x00, -0x04,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0xe1,0x00,0x00,0x00,0x60,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0xe6,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x14,0x00,0x00,0x00,0x04,0x01,0x00,0x00, -0x01,0x00,0x00,0x00,0x2c,0x00,0x06,0x00,0x15,0x00,0x00,0x00, -0x05,0x01,0x00,0x00,0x5c,0x00,0x00,0x00,0x04,0x01,0x00,0x00, -0x04,0x01,0x00,0x00,0x2a,0x00,0x03,0x00,0x11,0x00,0x00,0x00, -0x08,0x01,0x00,0x00,0x29,0x00,0x03,0x00,0x11,0x00,0x00,0x00, -0x0b,0x01,0x00,0x00,0x36,0x00,0x05,0x00,0x02,0x00,0x00,0x00, -0x04,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x03,0x00,0x00,0x00, -0xf8,0x00,0x02,0x00,0x05,0x00,0x00,0x00,0xf7,0x00,0x03,0x00, -0x06,0x01,0x00,0x00,0x00,0x00,0x00,0x00,0xfb,0x00,0x03,0x00, -0x18,0x00,0x00,0x00,0x07,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, -0x07,0x01,0x00,0x00,0xf9,0x00,0x02,0x00,0x0a,0x00,0x00,0x00, -0xf8,0x00,0x02,0x00,0x0a,0x00,0x00,0x00,0xf5,0x00,0x07,0x00, -0x06,0x00,0x00,0x00,0x0e,0x01,0x00,0x00,0x09,0x00,0x00,0x00, -0x07,0x01,0x00,0x00,0x03,0x01,0x00,0x00,0x0d,0x00,0x00,0x00, -0xb1,0x00,0x05,0x00,0x11,0x00,0x00,0x00,0x12,0x00,0x00,0x00, -0x0e,0x01,0x00,0x00,0x10,0x00,0x00,0x00,0xf6,0x00,0x04,0x00, -0x0c,0x00,0x00,0x00,0x0d,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0xfa,0x00,0x04,0x00,0x12,0x00,0x00,0x00,0x0b,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0x0b,0x00,0x00,0x00, -0x41,0x00,0x05,0x00,0x19,0x00,0x00,0x00,0x1a,0x00,0x00,0x00, -0x17,0x00,0x00,0x00,0x18,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x14,0x00,0x00,0x00,0x1b,0x00,0x00,0x00,0x1a,0x00,0x00,0x00, -0x84,0x00,0x05,0x00,0x14,0x00,0x00,0x00,0x1d,0x00,0x00,0x00, -0x1b,0x00,0x00,0x00,0x1c,0x00,0x00,0x00,0x7c,0x00,0x04,0x00, -0x14,0x00,0x00,0x00,0x1f,0x00,0x00,0x00,0x0e,0x01,0x00,0x00, -0x80,0x00,0x05,0x00,0x14,0x00,0x00,0x00,0x20,0x00,0x00,0x00, -0x1d,0x00,0x00,0x00,0x1f,0x00,0x00,0x00,0x7c,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x21,0x00,0x00,0x00,0x20,0x00,0x00,0x00, -0x41,0x00,0x05,0x00,0x26,0x00,0x00,0x00,0x27,0x00,0x00,0x00, -0x25,0x00,0x00,0x00,0x09,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x28,0x00,0x00,0x00,0x27,0x00,0x00,0x00, -0x41,0x00,0x05,0x00,0x26,0x00,0x00,0x00,0x2a,0x00,0x00,0x00, -0x25,0x00,0x00,0x00,0x29,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x2b,0x00,0x00,0x00,0x2a,0x00,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x2c,0x00,0x00,0x00, -0x28,0x00,0x00,0x00,0x2b,0x00,0x00,0x00,0x87,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x2d,0x00,0x00,0x00,0x2c,0x00,0x00,0x00, -0x10,0x00,0x00,0x00,0xaf,0x00,0x05,0x00,0x11,0x00,0x00,0x00, -0x2e,0x00,0x00,0x00,0x21,0x00,0x00,0x00,0x2d,0x00,0x00,0x00, -0xf7,0x00,0x03,0x00,0x30,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0xfa,0x00,0x04,0x00,0x2e,0x00,0x00,0x00,0x2f,0x00,0x00,0x00, -0x30,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0x2f,0x00,0x00,0x00, -0xf9,0x00,0x02,0x00,0x0c,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, -0x30,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x19,0x00,0x00,0x00, -0x34,0x00,0x00,0x00,0x33,0x00,0x00,0x00,0x18,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x14,0x00,0x00,0x00,0x35,0x00,0x00,0x00, -0x34,0x00,0x00,0x00,0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x36,0x00,0x00,0x00,0x35,0x00,0x00,0x00,0x87,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x3a,0x00,0x00,0x00,0x36,0x00,0x00,0x00, -0x39,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x3e,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x3a,0x00,0x00,0x00, -0x82,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x3f,0x00,0x00,0x00, -0x36,0x00,0x00,0x00,0x3e,0x00,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x43,0x00,0x00,0x00,0x41,0x00,0x00,0x00, -0x3a,0x00,0x00,0x00,0x87,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x46,0x00,0x00,0x00,0x3f,0x00,0x00,0x00,0x45,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x47,0x00,0x00,0x00, -0x43,0x00,0x00,0x00,0x46,0x00,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x4a,0x00,0x00,0x00,0x21,0x00,0x00,0x00, -0x10,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x4d,0x00,0x00,0x00,0x4b,0x00,0x00,0x00,0x3a,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x4e,0x00,0x00,0x00, -0x4a,0x00,0x00,0x00,0x4d,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x50,0x00,0x00,0x00,0x4e,0x00,0x00,0x00, -0x3f,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x54,0x00,0x00,0x00,0x52,0x00,0x00,0x00,0x3a,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x56,0x00,0x00,0x00, -0x54,0x00,0x00,0x00,0x3f,0x00,0x00,0x00,0x41,0x00,0x08,0x00, -0x6c,0x00,0x00,0x00,0x6d,0x00,0x00,0x00,0x66,0x00,0x00,0x00, -0x09,0x00,0x00,0x00,0x21,0x00,0x00,0x00,0x29,0x00,0x00,0x00, -0x36,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x57,0x00,0x00,0x00, -0x6e,0x00,0x00,0x00,0x6d,0x00,0x00,0x00,0x41,0x00,0x07,0x00, -0x74,0x00,0x00,0x00,0x75,0x00,0x00,0x00,0x66,0x00,0x00,0x00, -0x09,0x00,0x00,0x00,0x21,0x00,0x00,0x00,0x73,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x61,0x00,0x00,0x00,0x76,0x00,0x00,0x00, -0x75,0x00,0x00,0x00,0x73,0x00,0x04,0x00,0x6f,0x00,0x00,0x00, -0x77,0x00,0x00,0x00,0x76,0x00,0x00,0x00,0x41,0x00,0x08,0x00, -0x83,0x00,0x00,0x00,0x84,0x00,0x00,0x00,0x66,0x00,0x00,0x00, -0x09,0x00,0x00,0x00,0x21,0x00,0x00,0x00,0x80,0x00,0x00,0x00, -0x47,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x5e,0x00,0x00,0x00, -0x85,0x00,0x00,0x00,0x84,0x00,0x00,0x00,0x72,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x86,0x00,0x00,0x00,0x85,0x00,0x00,0x00, -0x41,0x00,0x08,0x00,0x6c,0x00,0x00,0x00,0x8a,0x00,0x00,0x00, -0x66,0x00,0x00,0x00,0x09,0x00,0x00,0x00,0x21,0x00,0x00,0x00, -0x09,0x00,0x00,0x00,0x56,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x57,0x00,0x00,0x00,0x8b,0x00,0x00,0x00,0x8a,0x00,0x00,0x00, -0x71,0x00,0x04,0x00,0x14,0x00,0x00,0x00,0x8c,0x00,0x00,0x00, -0x8b,0x00,0x00,0x00,0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x8d,0x00,0x00,0x00,0x8c,0x00,0x00,0x00,0xc7,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x8f,0x00,0x00,0x00,0x8d,0x00,0x00,0x00, -0x8e,0x00,0x00,0x00,0xc2,0x00,0x05,0x00,0x57,0x00,0x00,0x00, -0x91,0x00,0x00,0x00,0x6e,0x00,0x00,0x00,0x09,0x00,0x00,0x00, -0x71,0x00,0x04,0x00,0x14,0x00,0x00,0x00,0x92,0x00,0x00,0x00, -0x91,0x00,0x00,0x00,0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x93,0x00,0x00,0x00,0x92,0x00,0x00,0x00,0xc7,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x94,0x00,0x00,0x00,0x93,0x00,0x00,0x00, -0x73,0x00,0x00,0x00,0xc4,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x96,0x00,0x00,0x00,0x94,0x00,0x00,0x00,0x95,0x00,0x00,0x00, -0xc5,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x97,0x00,0x00,0x00, -0x8f,0x00,0x00,0x00,0x96,0x00,0x00,0x00,0x72,0x00,0x04,0x00, -0x5e,0x00,0x00,0x00,0x98,0x00,0x00,0x00,0x97,0x00,0x00,0x00, -0x72,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x99,0x00,0x00,0x00, -0x98,0x00,0x00,0x00,0x82,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x9a,0x00,0x00,0x00,0x99,0x00,0x00,0x00,0x39,0x00,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x9b,0x00,0x00,0x00, -0x86,0x00,0x00,0x00,0x9a,0x00,0x00,0x00,0x6f,0x00,0x04,0x00, -0x6f,0x00,0x00,0x00,0x9c,0x00,0x00,0x00,0x9b,0x00,0x00,0x00, -0x85,0x00,0x05,0x00,0x6f,0x00,0x00,0x00,0x9d,0x00,0x00,0x00, -0x77,0x00,0x00,0x00,0x9c,0x00,0x00,0x00,0x73,0x00,0x04,0x00, -0x61,0x00,0x00,0x00,0x9e,0x00,0x00,0x00,0x9d,0x00,0x00,0x00, -0x41,0x00,0x06,0x00,0x74,0x00,0x00,0x00,0x9f,0x00,0x00,0x00, -0x7b,0x00,0x00,0x00,0x09,0x00,0x00,0x00,0x50,0x00,0x00,0x00, -0x3e,0x00,0x03,0x00,0x9f,0x00,0x00,0x00,0x9e,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xa1,0x00,0x00,0x00, -0x50,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xa5,0x00,0x00,0x00,0x47,0x00,0x00,0x00, -0x80,0x00,0x00,0x00,0x41,0x00,0x08,0x00,0x83,0x00,0x00,0x00, -0xa6,0x00,0x00,0x00,0x66,0x00,0x00,0x00,0x09,0x00,0x00,0x00, -0x21,0x00,0x00,0x00,0x80,0x00,0x00,0x00,0xa5,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x5e,0x00,0x00,0x00,0xa7,0x00,0x00,0x00, -0xa6,0x00,0x00,0x00,0x72,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0xa8,0x00,0x00,0x00,0xa7,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xab,0x00,0x00,0x00,0x56,0x00,0x00,0x00, -0x39,0x00,0x00,0x00,0x41,0x00,0x08,0x00,0x6c,0x00,0x00,0x00, -0xac,0x00,0x00,0x00,0x66,0x00,0x00,0x00,0x09,0x00,0x00,0x00, -0x21,0x00,0x00,0x00,0x09,0x00,0x00,0x00,0xab,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x57,0x00,0x00,0x00,0xad,0x00,0x00,0x00, -0xac,0x00,0x00,0x00,0x71,0x00,0x04,0x00,0x14,0x00,0x00,0x00, -0xae,0x00,0x00,0x00,0xad,0x00,0x00,0x00,0x7c,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0xaf,0x00,0x00,0x00,0xae,0x00,0x00,0x00, -0xc7,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xb0,0x00,0x00,0x00, -0xaf,0x00,0x00,0x00,0x8e,0x00,0x00,0x00,0xc2,0x00,0x05,0x00, -0x57,0x00,0x00,0x00,0xb2,0x00,0x00,0x00,0x6e,0x00,0x00,0x00, -0x80,0x00,0x00,0x00,0x71,0x00,0x04,0x00,0x14,0x00,0x00,0x00, -0xb3,0x00,0x00,0x00,0xb2,0x00,0x00,0x00,0x7c,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0xb4,0x00,0x00,0x00,0xb3,0x00,0x00,0x00, -0xc7,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xb5,0x00,0x00,0x00, -0xb4,0x00,0x00,0x00,0x73,0x00,0x00,0x00,0xc4,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xb6,0x00,0x00,0x00,0xb5,0x00,0x00,0x00, -0x95,0x00,0x00,0x00,0xc5,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xb7,0x00,0x00,0x00,0xb0,0x00,0x00,0x00,0xb6,0x00,0x00,0x00, -0x72,0x00,0x04,0x00,0x5e,0x00,0x00,0x00,0xb8,0x00,0x00,0x00, -0xb7,0x00,0x00,0x00,0x72,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0xb9,0x00,0x00,0x00,0xb8,0x00,0x00,0x00,0x82,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xba,0x00,0x00,0x00,0xb9,0x00,0x00,0x00, -0x39,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xbb,0x00,0x00,0x00,0xa8,0x00,0x00,0x00,0xba,0x00,0x00,0x00, -0x6f,0x00,0x04,0x00,0x6f,0x00,0x00,0x00,0xbc,0x00,0x00,0x00, -0xbb,0x00,0x00,0x00,0x85,0x00,0x05,0x00,0x6f,0x00,0x00,0x00, -0xbd,0x00,0x00,0x00,0x77,0x00,0x00,0x00,0xbc,0x00,0x00,0x00, -0x73,0x00,0x04,0x00,0x61,0x00,0x00,0x00,0xbe,0x00,0x00,0x00, -0xbd,0x00,0x00,0x00,0x41,0x00,0x06,0x00,0x74,0x00,0x00,0x00, -0xbf,0x00,0x00,0x00,0x7b,0x00,0x00,0x00,0x09,0x00,0x00,0x00, -0xa1,0x00,0x00,0x00,0x3e,0x00,0x03,0x00,0xbf,0x00,0x00,0x00, -0xbe,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xc1,0x00,0x00,0x00,0x50,0x00,0x00,0x00,0x52,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xc5,0x00,0x00,0x00, -0x47,0x00,0x00,0x00,0x95,0x00,0x00,0x00,0x41,0x00,0x08,0x00, -0x83,0x00,0x00,0x00,0xc6,0x00,0x00,0x00,0x66,0x00,0x00,0x00, -0x09,0x00,0x00,0x00,0x21,0x00,0x00,0x00,0x80,0x00,0x00,0x00, -0xc5,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x5e,0x00,0x00,0x00, -0xc7,0x00,0x00,0x00,0xc6,0x00,0x00,0x00,0x72,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0xc8,0x00,0x00,0x00,0xc7,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x57,0x00,0x00,0x00,0xcd,0x00,0x00,0x00, -0x8a,0x00,0x00,0x00,0xc2,0x00,0x05,0x00,0x57,0x00,0x00,0x00, -0xce,0x00,0x00,0x00,0xcd,0x00,0x00,0x00,0x95,0x00,0x00,0x00, -0x71,0x00,0x04,0x00,0x14,0x00,0x00,0x00,0xcf,0x00,0x00,0x00, -0xce,0x00,0x00,0x00,0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0xd0,0x00,0x00,0x00,0xcf,0x00,0x00,0x00,0xc2,0x00,0x05,0x00, -0x57,0x00,0x00,0x00,0xd2,0x00,0x00,0x00,0x6e,0x00,0x00,0x00, -0x95,0x00,0x00,0x00,0x71,0x00,0x04,0x00,0x14,0x00,0x00,0x00, -0xd3,0x00,0x00,0x00,0xd2,0x00,0x00,0x00,0x7c,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0xd4,0x00,0x00,0x00,0xd3,0x00,0x00,0x00, -0xc7,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xd5,0x00,0x00,0x00, -0xd4,0x00,0x00,0x00,0x73,0x00,0x00,0x00,0xc4,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xd6,0x00,0x00,0x00,0xd5,0x00,0x00,0x00, -0x95,0x00,0x00,0x00,0xc5,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xd7,0x00,0x00,0x00,0xd0,0x00,0x00,0x00,0xd6,0x00,0x00,0x00, -0x72,0x00,0x04,0x00,0x5e,0x00,0x00,0x00,0xd8,0x00,0x00,0x00, -0xd7,0x00,0x00,0x00,0x72,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0xd9,0x00,0x00,0x00,0xd8,0x00,0x00,0x00,0x82,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xda,0x00,0x00,0x00,0xd9,0x00,0x00,0x00, -0x39,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xdb,0x00,0x00,0x00,0xc8,0x00,0x00,0x00,0xda,0x00,0x00,0x00, -0x6f,0x00,0x04,0x00,0x6f,0x00,0x00,0x00,0xdc,0x00,0x00,0x00, -0xdb,0x00,0x00,0x00,0x85,0x00,0x05,0x00,0x6f,0x00,0x00,0x00, -0xdd,0x00,0x00,0x00,0x77,0x00,0x00,0x00,0xdc,0x00,0x00,0x00, -0x73,0x00,0x04,0x00,0x61,0x00,0x00,0x00,0xde,0x00,0x00,0x00, -0xdd,0x00,0x00,0x00,0x41,0x00,0x06,0x00,0x74,0x00,0x00,0x00, -0xdf,0x00,0x00,0x00,0x7b,0x00,0x00,0x00,0x09,0x00,0x00,0x00, -0xc1,0x00,0x00,0x00,0x3e,0x00,0x03,0x00,0xdf,0x00,0x00,0x00, -0xde,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xe2,0x00,0x00,0x00,0x50,0x00,0x00,0x00,0xe1,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xe7,0x00,0x00,0x00, -0x47,0x00,0x00,0x00,0xe6,0x00,0x00,0x00,0x41,0x00,0x08,0x00, -0x83,0x00,0x00,0x00,0xe8,0x00,0x00,0x00,0x66,0x00,0x00,0x00, -0x09,0x00,0x00,0x00,0x21,0x00,0x00,0x00,0x80,0x00,0x00,0x00, -0xe7,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x5e,0x00,0x00,0x00, -0xe9,0x00,0x00,0x00,0xe8,0x00,0x00,0x00,0x72,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0xea,0x00,0x00,0x00,0xe9,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x57,0x00,0x00,0x00,0xef,0x00,0x00,0x00, -0xac,0x00,0x00,0x00,0xc2,0x00,0x05,0x00,0x57,0x00,0x00,0x00, -0xf0,0x00,0x00,0x00,0xef,0x00,0x00,0x00,0x95,0x00,0x00,0x00, -0x71,0x00,0x04,0x00,0x14,0x00,0x00,0x00,0xf1,0x00,0x00,0x00, -0xf0,0x00,0x00,0x00,0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0xf2,0x00,0x00,0x00,0xf1,0x00,0x00,0x00,0xc2,0x00,0x05,0x00, -0x57,0x00,0x00,0x00,0xf4,0x00,0x00,0x00,0x6e,0x00,0x00,0x00, -0xe6,0x00,0x00,0x00,0x71,0x00,0x04,0x00,0x14,0x00,0x00,0x00, -0xf5,0x00,0x00,0x00,0xf4,0x00,0x00,0x00,0x7c,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0xf6,0x00,0x00,0x00,0xf5,0x00,0x00,0x00, -0xc7,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xf7,0x00,0x00,0x00, -0xf6,0x00,0x00,0x00,0x73,0x00,0x00,0x00,0xc4,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xf8,0x00,0x00,0x00,0xf7,0x00,0x00,0x00, -0x95,0x00,0x00,0x00,0xc5,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xf9,0x00,0x00,0x00,0xf2,0x00,0x00,0x00,0xf8,0x00,0x00,0x00, -0x72,0x00,0x04,0x00,0x5e,0x00,0x00,0x00,0xfa,0x00,0x00,0x00, -0xf9,0x00,0x00,0x00,0x72,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0xfb,0x00,0x00,0x00,0xfa,0x00,0x00,0x00,0x82,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xfc,0x00,0x00,0x00,0xfb,0x00,0x00,0x00, -0x39,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xfd,0x00,0x00,0x00,0xea,0x00,0x00,0x00,0xfc,0x00,0x00,0x00, -0x6f,0x00,0x04,0x00,0x6f,0x00,0x00,0x00,0xfe,0x00,0x00,0x00, -0xfd,0x00,0x00,0x00,0x85,0x00,0x05,0x00,0x6f,0x00,0x00,0x00, -0xff,0x00,0x00,0x00,0x77,0x00,0x00,0x00,0xfe,0x00,0x00,0x00, -0x73,0x00,0x04,0x00,0x61,0x00,0x00,0x00,0x00,0x01,0x00,0x00, -0xff,0x00,0x00,0x00,0x41,0x00,0x06,0x00,0x74,0x00,0x00,0x00, -0x01,0x01,0x00,0x00,0x7b,0x00,0x00,0x00,0x09,0x00,0x00,0x00, -0xe2,0x00,0x00,0x00,0x3e,0x00,0x03,0x00,0x01,0x01,0x00,0x00, -0x00,0x01,0x00,0x00,0xf9,0x00,0x02,0x00,0x0d,0x00,0x00,0x00, -0xf8,0x00,0x02,0x00,0x0d,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x03,0x01,0x00,0x00,0x0e,0x01,0x00,0x00, -0x29,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0x0a,0x00,0x00,0x00, -0xf8,0x00,0x02,0x00,0x0c,0x00,0x00,0x00,0xf5,0x00,0x07,0x00, -0x11,0x00,0x00,0x00,0x0f,0x01,0x00,0x00,0x08,0x01,0x00,0x00, -0x0a,0x00,0x00,0x00,0x0b,0x01,0x00,0x00,0x2f,0x00,0x00,0x00, -0xf7,0x00,0x03,0x00,0x0c,0x01,0x00,0x00,0x00,0x00,0x00,0x00, -0xfa,0x00,0x04,0x00,0x0f,0x01,0x00,0x00,0x06,0x01,0x00,0x00, -0x0c,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0x0c,0x01,0x00,0x00, -0xf9,0x00,0x02,0x00,0x06,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, -0x06,0x01,0x00,0x00,0xfd,0x00,0x01,0x00,0x38,0x00,0x01,0x00, - -}; -const uint64_t dequant_q6_K_len = 4296; - -unsigned char dequant_q8_0_data[] = { -0x03,0x02,0x23,0x07,0x00,0x05,0x01,0x00,0x0b,0x00,0x0d,0x00, -0x23,0x03,0x00,0x00,0x00,0x00,0x00,0x00,0x11,0x00,0x02,0x00, -0x01,0x00,0x00,0x00,0x11,0x00,0x02,0x00,0x51,0x11,0x00,0x00, -0x11,0x00,0x02,0x00,0x60,0x11,0x00,0x00,0x0b,0x00,0x06,0x00, -0x01,0x00,0x00,0x00,0x47,0x4c,0x53,0x4c,0x2e,0x73,0x74,0x64, -0x2e,0x34,0x35,0x30,0x00,0x00,0x00,0x00,0x0e,0x00,0x03,0x00, -0x00,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x0f,0x00,0x09,0x00, -0x05,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x6d,0x61,0x69,0x6e, -0x00,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x16,0x00,0x00,0x00, -0x54,0x00,0x00,0x00,0x72,0x00,0x00,0x00,0x10,0x00,0x06,0x00, -0x04,0x00,0x00,0x00,0x11,0x00,0x00,0x00,0x00,0x01,0x00,0x00, -0x01,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x0c,0x00,0x00,0x00,0x0b,0x00,0x00,0x00,0x1c,0x00,0x00,0x00, -0x48,0x00,0x05,0x00,0x14,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0x14,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x04,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x14,0x00,0x00,0x00, -0x02,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x08,0x00,0x00,0x00, -0x48,0x00,0x05,0x00,0x14,0x00,0x00,0x00,0x03,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x47,0x00,0x03,0x00, -0x14,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x4f,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x48,0x00,0x05,0x00,0x50,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0x50,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x51,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x22,0x00,0x00,0x00,0x48,0x00,0x04,0x00, -0x52,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x18,0x00,0x00,0x00, -0x48,0x00,0x05,0x00,0x52,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0x48,0x00,0x05,0x00,0x1e,0x00,0x00,0x00,0x03,0x00,0x00,0x00, +0x23,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x48,0x00,0x05,0x00, +0x1e,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x23,0x00,0x00,0x00, +0x10,0x00,0x00,0x00,0x47,0x00,0x03,0x00,0x1e,0x00,0x00,0x00, +0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x30,0x00,0x00,0x00, +0x0b,0x00,0x00,0x00,0x1b,0x00,0x00,0x00,0x47,0x00,0x04,0x00, +0x45,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0x47,0x00,0x04,0x00,0x47,0x00,0x00,0x00,0x06,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x49,0x00,0x00,0x00, +0x06,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x48,0x00,0x05,0x00, +0x4a,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00, +0x00,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x4a,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x04,0x00,0x00,0x00, +0x48,0x00,0x05,0x00,0x4a,0x00,0x00,0x00,0x02,0x00,0x00,0x00, +0x23,0x00,0x00,0x00,0x10,0x00,0x00,0x00,0x48,0x00,0x05,0x00, +0x4a,0x00,0x00,0x00,0x03,0x00,0x00,0x00,0x23,0x00,0x00,0x00, +0x30,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x4b,0x00,0x00,0x00, +0x06,0x00,0x00,0x00,0xb0,0x00,0x00,0x00,0x48,0x00,0x04,0x00, +0x4c,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x18,0x00,0x00,0x00, +0x48,0x00,0x05,0x00,0x4c,0x00,0x00,0x00,0x00,0x00,0x00,0x00, 0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x03,0x00, -0x52,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x54,0x00,0x00,0x00,0x22,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x54,0x00,0x00,0x00,0x21,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x6f,0x00,0x00,0x00, +0x4c,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00, +0x4e,0x00,0x00,0x00,0x22,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0x47,0x00,0x04,0x00,0x4e,0x00,0x00,0x00,0x21,0x00,0x00,0x00, +0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x0e,0x01,0x00,0x00, 0x06,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x48,0x00,0x04,0x00, -0x70,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x19,0x00,0x00,0x00, -0x48,0x00,0x05,0x00,0x70,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0x0f,0x01,0x00,0x00,0x00,0x00,0x00,0x00,0x19,0x00,0x00,0x00, +0x48,0x00,0x05,0x00,0x0f,0x01,0x00,0x00,0x00,0x00,0x00,0x00, 0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x03,0x00, -0x70,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x72,0x00,0x00,0x00,0x22,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x72,0x00,0x00,0x00,0x21,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x94,0x00,0x00,0x00, +0x0f,0x01,0x00,0x00,0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00, +0x11,0x01,0x00,0x00,0x22,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0x47,0x00,0x04,0x00,0x11,0x01,0x00,0x00,0x21,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x87,0x01,0x00,0x00, 0x0b,0x00,0x00,0x00,0x19,0x00,0x00,0x00,0x13,0x00,0x02,0x00, 0x02,0x00,0x00,0x00,0x21,0x00,0x03,0x00,0x03,0x00,0x00,0x00, 0x02,0x00,0x00,0x00,0x15,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x20,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x15,0x00,0x04,0x00, -0x09,0x00,0x00,0x00,0x20,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x17,0x00,0x04,0x00,0x0a,0x00,0x00,0x00,0x09,0x00,0x00,0x00, -0x03,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x0b,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x0a,0x00,0x00,0x00,0x3b,0x00,0x04,0x00, -0x0b,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x09,0x00,0x00,0x00,0x0d,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x0e,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x09,0x00,0x00,0x00,0x1e,0x00,0x06,0x00, -0x14,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00, +0x20,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x09,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x10,0x00,0x00,0x00, +0x00,0x01,0x00,0x00,0x14,0x00,0x02,0x00,0x11,0x00,0x00,0x00, +0x17,0x00,0x04,0x00,0x14,0x00,0x00,0x00,0x06,0x00,0x00,0x00, +0x03,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x15,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0x14,0x00,0x00,0x00,0x3b,0x00,0x04,0x00, +0x15,0x00,0x00,0x00,0x16,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0x20,0x00,0x04,0x00,0x17,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0x06,0x00,0x00,0x00,0x1e,0x00,0x07,0x00,0x1e,0x00,0x00,0x00, +0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00, 0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0x15,0x00,0x00,0x00,0x09,0x00,0x00,0x00,0x14,0x00,0x00,0x00, -0x3b,0x00,0x04,0x00,0x15,0x00,0x00,0x00,0x16,0x00,0x00,0x00, -0x09,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x17,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0x18,0x00,0x00,0x00,0x09,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x1b,0x00,0x00,0x00, -0x20,0x00,0x00,0x00,0x14,0x00,0x02,0x00,0x24,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, +0x1f,0x00,0x00,0x00,0x09,0x00,0x00,0x00,0x1e,0x00,0x00,0x00, +0x3b,0x00,0x04,0x00,0x1f,0x00,0x00,0x00,0x20,0x00,0x00,0x00, +0x09,0x00,0x00,0x00,0x15,0x00,0x04,0x00,0x21,0x00,0x00,0x00, +0x20,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x21,0x00,0x00,0x00,0x22,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0x20,0x00,0x04,0x00,0x23,0x00,0x00,0x00,0x09,0x00,0x00,0x00, +0x06,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x21,0x00,0x00,0x00, +0x26,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x3b,0x00,0x04,0x00, +0x15,0x00,0x00,0x00,0x30,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x35,0x00,0x00,0x00, +0x10,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x3b,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x16,0x00,0x03,0x00, +0x3e,0x00,0x00,0x00,0x20,0x00,0x00,0x00,0x16,0x00,0x03,0x00, +0x41,0x00,0x00,0x00,0x10,0x00,0x00,0x00,0x17,0x00,0x04,0x00, +0x42,0x00,0x00,0x00,0x41,0x00,0x00,0x00,0x02,0x00,0x00,0x00, +0x15,0x00,0x04,0x00,0x43,0x00,0x00,0x00,0x08,0x00,0x00,0x00, 0x00,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x37,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x16,0x00,0x03,0x00, -0x49,0x00,0x00,0x00,0x20,0x00,0x00,0x00,0x16,0x00,0x03,0x00, -0x4c,0x00,0x00,0x00,0x10,0x00,0x00,0x00,0x15,0x00,0x04,0x00, -0x4d,0x00,0x00,0x00,0x08,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x09,0x00,0x00,0x00,0x4e,0x00,0x00,0x00, -0x20,0x00,0x00,0x00,0x1c,0x00,0x04,0x00,0x4f,0x00,0x00,0x00, -0x4d,0x00,0x00,0x00,0x4e,0x00,0x00,0x00,0x1e,0x00,0x04,0x00, -0x50,0x00,0x00,0x00,0x4c,0x00,0x00,0x00,0x4f,0x00,0x00,0x00, -0x1d,0x00,0x03,0x00,0x51,0x00,0x00,0x00,0x50,0x00,0x00,0x00, -0x1e,0x00,0x03,0x00,0x52,0x00,0x00,0x00,0x51,0x00,0x00,0x00, -0x20,0x00,0x04,0x00,0x53,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0x52,0x00,0x00,0x00,0x3b,0x00,0x04,0x00,0x53,0x00,0x00,0x00, -0x54,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0x56,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x4c,0x00,0x00,0x00, -0x17,0x00,0x04,0x00,0x5a,0x00,0x00,0x00,0x49,0x00,0x00,0x00, -0x02,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x5f,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x4d,0x00,0x00,0x00,0x1d,0x00,0x03,0x00, -0x6f,0x00,0x00,0x00,0x4c,0x00,0x00,0x00,0x1e,0x00,0x03,0x00, -0x70,0x00,0x00,0x00,0x6f,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0x71,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x70,0x00,0x00,0x00, -0x3b,0x00,0x04,0x00,0x71,0x00,0x00,0x00,0x72,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x74,0x00,0x00,0x00,0x03,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x09,0x00,0x00,0x00,0x8c,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x09,0x00,0x00,0x00,0x93,0x00,0x00,0x00, -0x00,0x01,0x00,0x00,0x2c,0x00,0x06,0x00,0x0a,0x00,0x00,0x00, -0x94,0x00,0x00,0x00,0x93,0x00,0x00,0x00,0x8c,0x00,0x00,0x00, -0x8c,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x07,0x03,0x00,0x00,0x04,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x08,0x03,0x00,0x00,0x05,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x09,0x03,0x00,0x00, -0x06,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x0a,0x03,0x00,0x00,0x07,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x0b,0x03,0x00,0x00,0x08,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x0c,0x03,0x00,0x00, -0x09,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x0d,0x03,0x00,0x00,0x0a,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x0e,0x03,0x00,0x00,0x0b,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x0f,0x03,0x00,0x00, -0x0c,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x10,0x03,0x00,0x00,0x0d,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x11,0x03,0x00,0x00,0x0e,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x12,0x03,0x00,0x00, -0x0f,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x13,0x03,0x00,0x00,0x10,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x14,0x03,0x00,0x00,0x11,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x15,0x03,0x00,0x00, -0x12,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x16,0x03,0x00,0x00,0x13,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x17,0x03,0x00,0x00,0x14,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x18,0x03,0x00,0x00, -0x15,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x19,0x03,0x00,0x00,0x16,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x1a,0x03,0x00,0x00,0x17,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x1b,0x03,0x00,0x00, -0x18,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x1c,0x03,0x00,0x00,0x19,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x1d,0x03,0x00,0x00,0x1a,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x1e,0x03,0x00,0x00, -0x1b,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x1f,0x03,0x00,0x00,0x1c,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x20,0x03,0x00,0x00,0x1d,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x21,0x03,0x00,0x00, -0x1e,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x22,0x03,0x00,0x00,0x1f,0x00,0x00,0x00,0x36,0x00,0x05,0x00, -0x02,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x03,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0x05,0x00,0x00,0x00, -0xf7,0x00,0x03,0x00,0x95,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0xfb,0x00,0x03,0x00,0x0d,0x00,0x00,0x00,0x96,0x00,0x00,0x00, -0xf8,0x00,0x02,0x00,0x96,0x00,0x00,0x00,0x41,0x00,0x05,0x00, -0x0e,0x00,0x00,0x00,0x0f,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0x0d,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x09,0x00,0x00,0x00, -0x10,0x00,0x00,0x00,0x0f,0x00,0x00,0x00,0x7c,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x11,0x00,0x00,0x00,0x10,0x00,0x00,0x00, -0x41,0x00,0x05,0x00,0x18,0x00,0x00,0x00,0x19,0x00,0x00,0x00, -0x16,0x00,0x00,0x00,0x17,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0x44,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x1c,0x00,0x04,0x00, +0x45,0x00,0x00,0x00,0x43,0x00,0x00,0x00,0x44,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x46,0x00,0x00,0x00, +0x20,0x00,0x00,0x00,0x1c,0x00,0x04,0x00,0x47,0x00,0x00,0x00, +0x43,0x00,0x00,0x00,0x46,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x48,0x00,0x00,0x00,0x80,0x00,0x00,0x00, +0x1c,0x00,0x04,0x00,0x49,0x00,0x00,0x00,0x43,0x00,0x00,0x00, +0x48,0x00,0x00,0x00,0x1e,0x00,0x06,0x00,0x4a,0x00,0x00,0x00, +0x42,0x00,0x00,0x00,0x45,0x00,0x00,0x00,0x47,0x00,0x00,0x00, +0x49,0x00,0x00,0x00,0x1d,0x00,0x03,0x00,0x4b,0x00,0x00,0x00, +0x4a,0x00,0x00,0x00,0x1e,0x00,0x03,0x00,0x4c,0x00,0x00,0x00, +0x4b,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x4d,0x00,0x00,0x00, +0x0c,0x00,0x00,0x00,0x4c,0x00,0x00,0x00,0x3b,0x00,0x04,0x00, +0x4d,0x00,0x00,0x00,0x4e,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, +0x20,0x00,0x04,0x00,0x50,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, +0x41,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x56,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x5d,0x00,0x00,0x00,0x40,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x6e,0x00,0x00,0x00, +0x04,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x76,0x00,0x00,0x00, +0x0c,0x00,0x00,0x00,0x43,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x21,0x00,0x00,0x00,0x7b,0x00,0x00,0x00,0x3f,0x00,0x00,0x00, +0x15,0x00,0x04,0x00,0x7d,0x00,0x00,0x00,0x08,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x21,0x00,0x00,0x00, +0x93,0x00,0x00,0x00,0x0f,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x21,0x00,0x00,0x00,0x9a,0x00,0x00,0x00,0x06,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x21,0x00,0x00,0x00,0x9c,0x00,0x00,0x00, +0x04,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0xca,0x00,0x00,0x00,0x05,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0xde,0x00,0x00,0x00,0x03,0x00,0x00,0x00, +0x1d,0x00,0x03,0x00,0x0e,0x01,0x00,0x00,0x41,0x00,0x00,0x00, +0x1e,0x00,0x03,0x00,0x0f,0x01,0x00,0x00,0x0e,0x01,0x00,0x00, +0x20,0x00,0x04,0x00,0x10,0x01,0x00,0x00,0x0c,0x00,0x00,0x00, +0x0f,0x01,0x00,0x00,0x3b,0x00,0x04,0x00,0x10,0x01,0x00,0x00, +0x11,0x01,0x00,0x00,0x0c,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x21,0x00,0x00,0x00,0x15,0x01,0x00,0x00,0x03,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x21,0x00,0x00,0x00,0x1d,0x01,0x00,0x00, +0x02,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x21,0x00,0x00,0x00, +0x26,0x01,0x00,0x00,0x10,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x68,0x01,0x00,0x00,0x21,0x00,0x00,0x00, +0x2c,0x00,0x06,0x00,0x14,0x00,0x00,0x00,0x87,0x01,0x00,0x00, +0x5d,0x00,0x00,0x00,0x56,0x00,0x00,0x00,0x56,0x00,0x00,0x00, +0x2a,0x00,0x03,0x00,0x11,0x00,0x00,0x00,0x8a,0x01,0x00,0x00, +0x29,0x00,0x03,0x00,0x11,0x00,0x00,0x00,0x8d,0x01,0x00,0x00, +0x36,0x00,0x05,0x00,0x02,0x00,0x00,0x00,0x04,0x00,0x00,0x00, +0x00,0x00,0x00,0x00,0x03,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, +0x05,0x00,0x00,0x00,0xf7,0x00,0x03,0x00,0x88,0x01,0x00,0x00, +0x00,0x00,0x00,0x00,0xfb,0x00,0x03,0x00,0x09,0x00,0x00,0x00, +0x89,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0x89,0x01,0x00,0x00, +0xf9,0x00,0x02,0x00,0x0a,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, +0x0a,0x00,0x00,0x00,0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00, +0x90,0x01,0x00,0x00,0x09,0x00,0x00,0x00,0x89,0x01,0x00,0x00, +0x86,0x01,0x00,0x00,0x0d,0x00,0x00,0x00,0xb0,0x00,0x05,0x00, +0x11,0x00,0x00,0x00,0x12,0x00,0x00,0x00,0x90,0x01,0x00,0x00, +0x10,0x00,0x00,0x00,0xf6,0x00,0x04,0x00,0x0c,0x00,0x00,0x00, +0x0d,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, +0x12,0x00,0x00,0x00,0x0b,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, +0xf8,0x00,0x02,0x00,0x0b,0x00,0x00,0x00,0x41,0x00,0x05,0x00, +0x17,0x00,0x00,0x00,0x18,0x00,0x00,0x00,0x16,0x00,0x00,0x00, +0x09,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x19,0x00,0x00,0x00,0x18,0x00,0x00,0x00,0x84,0x00,0x05,0x00, 0x06,0x00,0x00,0x00,0x1a,0x00,0x00,0x00,0x19,0x00,0x00,0x00, -0x87,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x1c,0x00,0x00,0x00, -0x1a,0x00,0x00,0x00,0x1b,0x00,0x00,0x00,0x8b,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x1d,0x00,0x00,0x00,0x11,0x00,0x00,0x00, -0x1c,0x00,0x00,0x00,0x87,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x11,0x00,0x00,0x00,0x1c,0x00,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x26,0x00,0x00,0x00, -0x1d,0x00,0x00,0x00,0x1b,0x00,0x00,0x00,0xaf,0x00,0x05,0x00, -0x24,0x00,0x00,0x00,0x29,0x00,0x00,0x00,0x26,0x00,0x00,0x00, -0x1a,0x00,0x00,0x00,0xa8,0x00,0x04,0x00,0x24,0x00,0x00,0x00, -0x2a,0x00,0x00,0x00,0x29,0x00,0x00,0x00,0xf7,0x00,0x03,0x00, -0x2c,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, -0x2a,0x00,0x00,0x00,0x2b,0x00,0x00,0x00,0x2c,0x00,0x00,0x00, -0xf8,0x00,0x02,0x00,0x2b,0x00,0x00,0x00,0x41,0x00,0x05,0x00, -0x18,0x00,0x00,0x00,0x2f,0x00,0x00,0x00,0x16,0x00,0x00,0x00, -0x2e,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x30,0x00,0x00,0x00,0x2f,0x00,0x00,0x00,0xaf,0x00,0x05,0x00, -0x24,0x00,0x00,0x00,0x31,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x30,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0x2c,0x00,0x00,0x00, -0xf8,0x00,0x02,0x00,0x2c,0x00,0x00,0x00,0xf5,0x00,0x07,0x00, -0x24,0x00,0x00,0x00,0x32,0x00,0x00,0x00,0x29,0x00,0x00,0x00, -0x96,0x00,0x00,0x00,0x31,0x00,0x00,0x00,0x2b,0x00,0x00,0x00, -0xf7,0x00,0x03,0x00,0x34,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0xfa,0x00,0x04,0x00,0x32,0x00,0x00,0x00,0x33,0x00,0x00,0x00, -0x34,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0x33,0x00,0x00,0x00, -0xf9,0x00,0x02,0x00,0x95,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, -0x34,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x18,0x00,0x00,0x00, -0x38,0x00,0x00,0x00,0x16,0x00,0x00,0x00,0x37,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x39,0x00,0x00,0x00, -0x38,0x00,0x00,0x00,0x87,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x3a,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x1b,0x00,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x3e,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x3a,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x40,0x00,0x00,0x00,0x3e,0x00,0x00,0x00, -0x1d,0x00,0x00,0x00,0x41,0x00,0x07,0x00,0x56,0x00,0x00,0x00, -0x57,0x00,0x00,0x00,0x54,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0x40,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x4c,0x00,0x00,0x00,0x58,0x00,0x00,0x00,0x57,0x00,0x00,0x00, -0x73,0x00,0x04,0x00,0x49,0x00,0x00,0x00,0x59,0x00,0x00,0x00, -0x58,0x00,0x00,0x00,0x41,0x00,0x08,0x00,0x5f,0x00,0x00,0x00, -0x60,0x00,0x00,0x00,0x54,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0x40,0x00,0x00,0x00,0x17,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x4d,0x00,0x00,0x00,0x61,0x00,0x00,0x00, -0x60,0x00,0x00,0x00,0x72,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x62,0x00,0x00,0x00,0x61,0x00,0x00,0x00,0x6f,0x00,0x04,0x00, -0x49,0x00,0x00,0x00,0x63,0x00,0x00,0x00,0x62,0x00,0x00,0x00, -0x41,0x00,0x08,0x00,0x5f,0x00,0x00,0x00,0x67,0x00,0x00,0x00, -0x54,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0x40,0x00,0x00,0x00, -0x17,0x00,0x00,0x00,0x17,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x4d,0x00,0x00,0x00,0x68,0x00,0x00,0x00,0x67,0x00,0x00,0x00, -0x72,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x69,0x00,0x00,0x00, -0x68,0x00,0x00,0x00,0x6f,0x00,0x04,0x00,0x49,0x00,0x00,0x00, -0x6a,0x00,0x00,0x00,0x69,0x00,0x00,0x00,0x50,0x00,0x05,0x00, -0x5a,0x00,0x00,0x00,0x6b,0x00,0x00,0x00,0x63,0x00,0x00,0x00, -0x6a,0x00,0x00,0x00,0x8e,0x00,0x05,0x00,0x5a,0x00,0x00,0x00, -0x6e,0x00,0x00,0x00,0x6b,0x00,0x00,0x00,0x59,0x00,0x00,0x00, -0x41,0x00,0x05,0x00,0x18,0x00,0x00,0x00,0x75,0x00,0x00,0x00, -0x16,0x00,0x00,0x00,0x74,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x76,0x00,0x00,0x00,0x75,0x00,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x77,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x76,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x7a,0x00,0x00,0x00,0x77,0x00,0x00,0x00, -0x26,0x00,0x00,0x00,0x51,0x00,0x05,0x00,0x49,0x00,0x00,0x00, -0x7f,0x00,0x00,0x00,0x6e,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x73,0x00,0x04,0x00,0x4c,0x00,0x00,0x00,0x80,0x00,0x00,0x00, -0x7f,0x00,0x00,0x00,0x41,0x00,0x06,0x00,0x56,0x00,0x00,0x00, -0x81,0x00,0x00,0x00,0x72,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0x7a,0x00,0x00,0x00,0x3e,0x00,0x03,0x00,0x81,0x00,0x00,0x00, -0x80,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x8b,0x00,0x00,0x00,0x7a,0x00,0x00,0x00,0x17,0x00,0x00,0x00, -0x51,0x00,0x05,0x00,0x49,0x00,0x00,0x00,0x8e,0x00,0x00,0x00, -0x6e,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x73,0x00,0x04,0x00, -0x4c,0x00,0x00,0x00,0x8f,0x00,0x00,0x00,0x8e,0x00,0x00,0x00, -0x41,0x00,0x06,0x00,0x56,0x00,0x00,0x00,0x90,0x00,0x00,0x00, -0x72,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0x8b,0x00,0x00,0x00, -0x3e,0x00,0x03,0x00,0x90,0x00,0x00,0x00,0x8f,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x4c,0x00,0x00,0x00,0xa5,0x00,0x00,0x00, -0x57,0x00,0x00,0x00,0x73,0x00,0x04,0x00,0x49,0x00,0x00,0x00, -0xa6,0x00,0x00,0x00,0xa5,0x00,0x00,0x00,0x41,0x00,0x08,0x00, -0x5f,0x00,0x00,0x00,0xa7,0x00,0x00,0x00,0x54,0x00,0x00,0x00, -0x2e,0x00,0x00,0x00,0x40,0x00,0x00,0x00,0x17,0x00,0x00,0x00, -0x37,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x4d,0x00,0x00,0x00, -0xa8,0x00,0x00,0x00,0xa7,0x00,0x00,0x00,0x72,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0xa9,0x00,0x00,0x00,0xa8,0x00,0x00,0x00, -0x6f,0x00,0x04,0x00,0x49,0x00,0x00,0x00,0xaa,0x00,0x00,0x00, -0xa9,0x00,0x00,0x00,0x41,0x00,0x08,0x00,0x5f,0x00,0x00,0x00, -0xac,0x00,0x00,0x00,0x54,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0x40,0x00,0x00,0x00,0x17,0x00,0x00,0x00,0x74,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x4d,0x00,0x00,0x00,0xad,0x00,0x00,0x00, -0xac,0x00,0x00,0x00,0x72,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0xae,0x00,0x00,0x00,0xad,0x00,0x00,0x00,0x6f,0x00,0x04,0x00, -0x49,0x00,0x00,0x00,0xaf,0x00,0x00,0x00,0xae,0x00,0x00,0x00, -0x50,0x00,0x05,0x00,0x5a,0x00,0x00,0x00,0xb0,0x00,0x00,0x00, -0xaa,0x00,0x00,0x00,0xaf,0x00,0x00,0x00,0x8e,0x00,0x05,0x00, -0x5a,0x00,0x00,0x00,0xb1,0x00,0x00,0x00,0xb0,0x00,0x00,0x00, -0xa6,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xb7,0x00,0x00,0x00,0x7a,0x00,0x00,0x00,0x37,0x00,0x00,0x00, -0x51,0x00,0x05,0x00,0x49,0x00,0x00,0x00,0xb9,0x00,0x00,0x00, -0xb1,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x73,0x00,0x04,0x00, -0x4c,0x00,0x00,0x00,0xba,0x00,0x00,0x00,0xb9,0x00,0x00,0x00, -0x41,0x00,0x06,0x00,0x56,0x00,0x00,0x00,0xbb,0x00,0x00,0x00, -0x72,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0xb7,0x00,0x00,0x00, -0x3e,0x00,0x03,0x00,0xbb,0x00,0x00,0x00,0xba,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xc2,0x00,0x00,0x00, -0x7a,0x00,0x00,0x00,0x74,0x00,0x00,0x00,0x51,0x00,0x05,0x00, -0x49,0x00,0x00,0x00,0xc3,0x00,0x00,0x00,0xb1,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x73,0x00,0x04,0x00,0x4c,0x00,0x00,0x00, -0xc4,0x00,0x00,0x00,0xc3,0x00,0x00,0x00,0x41,0x00,0x06,0x00, -0x56,0x00,0x00,0x00,0xc5,0x00,0x00,0x00,0x72,0x00,0x00,0x00, -0x2e,0x00,0x00,0x00,0xc2,0x00,0x00,0x00,0x3e,0x00,0x03,0x00, -0xc5,0x00,0x00,0x00,0xc4,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x4c,0x00,0x00,0x00,0xce,0x00,0x00,0x00,0x57,0x00,0x00,0x00, -0x73,0x00,0x04,0x00,0x49,0x00,0x00,0x00,0xcf,0x00,0x00,0x00, -0xce,0x00,0x00,0x00,0x41,0x00,0x08,0x00,0x5f,0x00,0x00,0x00, -0xd0,0x00,0x00,0x00,0x54,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0x40,0x00,0x00,0x00,0x17,0x00,0x00,0x00,0x07,0x03,0x00,0x00, -0x3d,0x00,0x04,0x00,0x4d,0x00,0x00,0x00,0xd1,0x00,0x00,0x00, -0xd0,0x00,0x00,0x00,0x72,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0xd2,0x00,0x00,0x00,0xd1,0x00,0x00,0x00,0x6f,0x00,0x04,0x00, -0x49,0x00,0x00,0x00,0xd3,0x00,0x00,0x00,0xd2,0x00,0x00,0x00, -0x41,0x00,0x08,0x00,0x5f,0x00,0x00,0x00,0xd5,0x00,0x00,0x00, -0x54,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0x40,0x00,0x00,0x00, -0x17,0x00,0x00,0x00,0x08,0x03,0x00,0x00,0x3d,0x00,0x04,0x00, -0x4d,0x00,0x00,0x00,0xd6,0x00,0x00,0x00,0xd5,0x00,0x00,0x00, -0x72,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0xd7,0x00,0x00,0x00, -0xd6,0x00,0x00,0x00,0x6f,0x00,0x04,0x00,0x49,0x00,0x00,0x00, -0xd8,0x00,0x00,0x00,0xd7,0x00,0x00,0x00,0x50,0x00,0x05,0x00, -0x5a,0x00,0x00,0x00,0xd9,0x00,0x00,0x00,0xd3,0x00,0x00,0x00, -0xd8,0x00,0x00,0x00,0x8e,0x00,0x05,0x00,0x5a,0x00,0x00,0x00, -0xda,0x00,0x00,0x00,0xd9,0x00,0x00,0x00,0xcf,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xe0,0x00,0x00,0x00, -0x7a,0x00,0x00,0x00,0x07,0x03,0x00,0x00,0x51,0x00,0x05,0x00, -0x49,0x00,0x00,0x00,0xe2,0x00,0x00,0x00,0xda,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x73,0x00,0x04,0x00,0x4c,0x00,0x00,0x00, -0xe3,0x00,0x00,0x00,0xe2,0x00,0x00,0x00,0x41,0x00,0x06,0x00, -0x56,0x00,0x00,0x00,0xe4,0x00,0x00,0x00,0x72,0x00,0x00,0x00, -0x2e,0x00,0x00,0x00,0xe0,0x00,0x00,0x00,0x3e,0x00,0x03,0x00, -0xe4,0x00,0x00,0x00,0xe3,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xeb,0x00,0x00,0x00,0x7a,0x00,0x00,0x00, -0x08,0x03,0x00,0x00,0x51,0x00,0x05,0x00,0x49,0x00,0x00,0x00, -0xec,0x00,0x00,0x00,0xda,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x73,0x00,0x04,0x00,0x4c,0x00,0x00,0x00,0xed,0x00,0x00,0x00, -0xec,0x00,0x00,0x00,0x41,0x00,0x06,0x00,0x56,0x00,0x00,0x00, -0xee,0x00,0x00,0x00,0x72,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0xeb,0x00,0x00,0x00,0x3e,0x00,0x03,0x00,0xee,0x00,0x00,0x00, -0xed,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x4c,0x00,0x00,0x00, -0xf7,0x00,0x00,0x00,0x57,0x00,0x00,0x00,0x73,0x00,0x04,0x00, -0x49,0x00,0x00,0x00,0xf8,0x00,0x00,0x00,0xf7,0x00,0x00,0x00, -0x41,0x00,0x08,0x00,0x5f,0x00,0x00,0x00,0xf9,0x00,0x00,0x00, -0x54,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0x40,0x00,0x00,0x00, -0x17,0x00,0x00,0x00,0x09,0x03,0x00,0x00,0x3d,0x00,0x04,0x00, -0x4d,0x00,0x00,0x00,0xfa,0x00,0x00,0x00,0xf9,0x00,0x00,0x00, -0x72,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0xfb,0x00,0x00,0x00, -0xfa,0x00,0x00,0x00,0x6f,0x00,0x04,0x00,0x49,0x00,0x00,0x00, -0xfc,0x00,0x00,0x00,0xfb,0x00,0x00,0x00,0x41,0x00,0x08,0x00, -0x5f,0x00,0x00,0x00,0xfe,0x00,0x00,0x00,0x54,0x00,0x00,0x00, -0x2e,0x00,0x00,0x00,0x40,0x00,0x00,0x00,0x17,0x00,0x00,0x00, -0x0a,0x03,0x00,0x00,0x3d,0x00,0x04,0x00,0x4d,0x00,0x00,0x00, -0xff,0x00,0x00,0x00,0xfe,0x00,0x00,0x00,0x72,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x00,0x01,0x00,0x00,0xff,0x00,0x00,0x00, -0x6f,0x00,0x04,0x00,0x49,0x00,0x00,0x00,0x01,0x01,0x00,0x00, -0x00,0x01,0x00,0x00,0x50,0x00,0x05,0x00,0x5a,0x00,0x00,0x00, -0x02,0x01,0x00,0x00,0xfc,0x00,0x00,0x00,0x01,0x01,0x00,0x00, -0x8e,0x00,0x05,0x00,0x5a,0x00,0x00,0x00,0x03,0x01,0x00,0x00, -0x02,0x01,0x00,0x00,0xf8,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x09,0x01,0x00,0x00,0x7a,0x00,0x00,0x00, -0x09,0x03,0x00,0x00,0x51,0x00,0x05,0x00,0x49,0x00,0x00,0x00, -0x0b,0x01,0x00,0x00,0x03,0x01,0x00,0x00,0x00,0x00,0x00,0x00, -0x73,0x00,0x04,0x00,0x4c,0x00,0x00,0x00,0x0c,0x01,0x00,0x00, -0x0b,0x01,0x00,0x00,0x41,0x00,0x06,0x00,0x56,0x00,0x00,0x00, -0x0d,0x01,0x00,0x00,0x72,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0x09,0x01,0x00,0x00,0x3e,0x00,0x03,0x00,0x0d,0x01,0x00,0x00, -0x0c,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x14,0x01,0x00,0x00,0x7a,0x00,0x00,0x00,0x0a,0x03,0x00,0x00, -0x51,0x00,0x05,0x00,0x49,0x00,0x00,0x00,0x15,0x01,0x00,0x00, -0x03,0x01,0x00,0x00,0x01,0x00,0x00,0x00,0x73,0x00,0x04,0x00, -0x4c,0x00,0x00,0x00,0x16,0x01,0x00,0x00,0x15,0x01,0x00,0x00, -0x41,0x00,0x06,0x00,0x56,0x00,0x00,0x00,0x17,0x01,0x00,0x00, -0x72,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0x14,0x01,0x00,0x00, -0x3e,0x00,0x03,0x00,0x17,0x01,0x00,0x00,0x16,0x01,0x00,0x00, -0x3d,0x00,0x04,0x00,0x4c,0x00,0x00,0x00,0x20,0x01,0x00,0x00, -0x57,0x00,0x00,0x00,0x73,0x00,0x04,0x00,0x49,0x00,0x00,0x00, -0x21,0x01,0x00,0x00,0x20,0x01,0x00,0x00,0x41,0x00,0x08,0x00, -0x5f,0x00,0x00,0x00,0x22,0x01,0x00,0x00,0x54,0x00,0x00,0x00, -0x2e,0x00,0x00,0x00,0x40,0x00,0x00,0x00,0x17,0x00,0x00,0x00, -0x0b,0x03,0x00,0x00,0x3d,0x00,0x04,0x00,0x4d,0x00,0x00,0x00, -0x23,0x01,0x00,0x00,0x22,0x01,0x00,0x00,0x72,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x24,0x01,0x00,0x00,0x23,0x01,0x00,0x00, -0x6f,0x00,0x04,0x00,0x49,0x00,0x00,0x00,0x25,0x01,0x00,0x00, -0x24,0x01,0x00,0x00,0x41,0x00,0x08,0x00,0x5f,0x00,0x00,0x00, -0x27,0x01,0x00,0x00,0x54,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0x40,0x00,0x00,0x00,0x17,0x00,0x00,0x00,0x0c,0x03,0x00,0x00, -0x3d,0x00,0x04,0x00,0x4d,0x00,0x00,0x00,0x28,0x01,0x00,0x00, -0x27,0x01,0x00,0x00,0x72,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x29,0x01,0x00,0x00,0x28,0x01,0x00,0x00,0x6f,0x00,0x04,0x00, -0x49,0x00,0x00,0x00,0x2a,0x01,0x00,0x00,0x29,0x01,0x00,0x00, -0x50,0x00,0x05,0x00,0x5a,0x00,0x00,0x00,0x2b,0x01,0x00,0x00, -0x25,0x01,0x00,0x00,0x2a,0x01,0x00,0x00,0x8e,0x00,0x05,0x00, -0x5a,0x00,0x00,0x00,0x2c,0x01,0x00,0x00,0x2b,0x01,0x00,0x00, -0x21,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x32,0x01,0x00,0x00,0x7a,0x00,0x00,0x00,0x0b,0x03,0x00,0x00, -0x51,0x00,0x05,0x00,0x49,0x00,0x00,0x00,0x34,0x01,0x00,0x00, -0x2c,0x01,0x00,0x00,0x00,0x00,0x00,0x00,0x73,0x00,0x04,0x00, -0x4c,0x00,0x00,0x00,0x35,0x01,0x00,0x00,0x34,0x01,0x00,0x00, -0x41,0x00,0x06,0x00,0x56,0x00,0x00,0x00,0x36,0x01,0x00,0x00, -0x72,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0x32,0x01,0x00,0x00, -0x3e,0x00,0x03,0x00,0x36,0x01,0x00,0x00,0x35,0x01,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x3d,0x01,0x00,0x00, -0x7a,0x00,0x00,0x00,0x0c,0x03,0x00,0x00,0x51,0x00,0x05,0x00, -0x49,0x00,0x00,0x00,0x3e,0x01,0x00,0x00,0x2c,0x01,0x00,0x00, -0x01,0x00,0x00,0x00,0x73,0x00,0x04,0x00,0x4c,0x00,0x00,0x00, -0x3f,0x01,0x00,0x00,0x3e,0x01,0x00,0x00,0x41,0x00,0x06,0x00, -0x56,0x00,0x00,0x00,0x40,0x01,0x00,0x00,0x72,0x00,0x00,0x00, -0x2e,0x00,0x00,0x00,0x3d,0x01,0x00,0x00,0x3e,0x00,0x03,0x00, -0x40,0x01,0x00,0x00,0x3f,0x01,0x00,0x00,0x3d,0x00,0x04,0x00, -0x4c,0x00,0x00,0x00,0x49,0x01,0x00,0x00,0x57,0x00,0x00,0x00, -0x73,0x00,0x04,0x00,0x49,0x00,0x00,0x00,0x4a,0x01,0x00,0x00, -0x49,0x01,0x00,0x00,0x41,0x00,0x08,0x00,0x5f,0x00,0x00,0x00, -0x4b,0x01,0x00,0x00,0x54,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0x40,0x00,0x00,0x00,0x17,0x00,0x00,0x00,0x0d,0x03,0x00,0x00, -0x3d,0x00,0x04,0x00,0x4d,0x00,0x00,0x00,0x4c,0x01,0x00,0x00, -0x4b,0x01,0x00,0x00,0x72,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x4d,0x01,0x00,0x00,0x4c,0x01,0x00,0x00,0x6f,0x00,0x04,0x00, -0x49,0x00,0x00,0x00,0x4e,0x01,0x00,0x00,0x4d,0x01,0x00,0x00, -0x41,0x00,0x08,0x00,0x5f,0x00,0x00,0x00,0x50,0x01,0x00,0x00, -0x54,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0x40,0x00,0x00,0x00, -0x17,0x00,0x00,0x00,0x0e,0x03,0x00,0x00,0x3d,0x00,0x04,0x00, -0x4d,0x00,0x00,0x00,0x51,0x01,0x00,0x00,0x50,0x01,0x00,0x00, -0x72,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x52,0x01,0x00,0x00, -0x51,0x01,0x00,0x00,0x6f,0x00,0x04,0x00,0x49,0x00,0x00,0x00, -0x53,0x01,0x00,0x00,0x52,0x01,0x00,0x00,0x50,0x00,0x05,0x00, -0x5a,0x00,0x00,0x00,0x54,0x01,0x00,0x00,0x4e,0x01,0x00,0x00, -0x53,0x01,0x00,0x00,0x8e,0x00,0x05,0x00,0x5a,0x00,0x00,0x00, -0x55,0x01,0x00,0x00,0x54,0x01,0x00,0x00,0x4a,0x01,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x5b,0x01,0x00,0x00, -0x7a,0x00,0x00,0x00,0x0d,0x03,0x00,0x00,0x51,0x00,0x05,0x00, -0x49,0x00,0x00,0x00,0x5d,0x01,0x00,0x00,0x55,0x01,0x00,0x00, -0x00,0x00,0x00,0x00,0x73,0x00,0x04,0x00,0x4c,0x00,0x00,0x00, -0x5e,0x01,0x00,0x00,0x5d,0x01,0x00,0x00,0x41,0x00,0x06,0x00, -0x56,0x00,0x00,0x00,0x5f,0x01,0x00,0x00,0x72,0x00,0x00,0x00, -0x2e,0x00,0x00,0x00,0x5b,0x01,0x00,0x00,0x3e,0x00,0x03,0x00, -0x5f,0x01,0x00,0x00,0x5e,0x01,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x66,0x01,0x00,0x00,0x7a,0x00,0x00,0x00, -0x0e,0x03,0x00,0x00,0x51,0x00,0x05,0x00,0x49,0x00,0x00,0x00, -0x67,0x01,0x00,0x00,0x55,0x01,0x00,0x00,0x01,0x00,0x00,0x00, -0x73,0x00,0x04,0x00,0x4c,0x00,0x00,0x00,0x68,0x01,0x00,0x00, -0x67,0x01,0x00,0x00,0x41,0x00,0x06,0x00,0x56,0x00,0x00,0x00, -0x69,0x01,0x00,0x00,0x72,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0x66,0x01,0x00,0x00,0x3e,0x00,0x03,0x00,0x69,0x01,0x00,0x00, -0x68,0x01,0x00,0x00,0x3d,0x00,0x04,0x00,0x4c,0x00,0x00,0x00, -0x72,0x01,0x00,0x00,0x57,0x00,0x00,0x00,0x73,0x00,0x04,0x00, -0x49,0x00,0x00,0x00,0x73,0x01,0x00,0x00,0x72,0x01,0x00,0x00, -0x41,0x00,0x08,0x00,0x5f,0x00,0x00,0x00,0x74,0x01,0x00,0x00, -0x54,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0x40,0x00,0x00,0x00, -0x17,0x00,0x00,0x00,0x0f,0x03,0x00,0x00,0x3d,0x00,0x04,0x00, -0x4d,0x00,0x00,0x00,0x75,0x01,0x00,0x00,0x74,0x01,0x00,0x00, -0x72,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x76,0x01,0x00,0x00, -0x75,0x01,0x00,0x00,0x6f,0x00,0x04,0x00,0x49,0x00,0x00,0x00, -0x77,0x01,0x00,0x00,0x76,0x01,0x00,0x00,0x41,0x00,0x08,0x00, -0x5f,0x00,0x00,0x00,0x79,0x01,0x00,0x00,0x54,0x00,0x00,0x00, -0x2e,0x00,0x00,0x00,0x40,0x00,0x00,0x00,0x17,0x00,0x00,0x00, -0x10,0x03,0x00,0x00,0x3d,0x00,0x04,0x00,0x4d,0x00,0x00,0x00, -0x7a,0x01,0x00,0x00,0x79,0x01,0x00,0x00,0x72,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x7b,0x01,0x00,0x00,0x7a,0x01,0x00,0x00, -0x6f,0x00,0x04,0x00,0x49,0x00,0x00,0x00,0x7c,0x01,0x00,0x00, -0x7b,0x01,0x00,0x00,0x50,0x00,0x05,0x00,0x5a,0x00,0x00,0x00, -0x7d,0x01,0x00,0x00,0x77,0x01,0x00,0x00,0x7c,0x01,0x00,0x00, -0x8e,0x00,0x05,0x00,0x5a,0x00,0x00,0x00,0x7e,0x01,0x00,0x00, -0x7d,0x01,0x00,0x00,0x73,0x01,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x84,0x01,0x00,0x00,0x7a,0x00,0x00,0x00, -0x0f,0x03,0x00,0x00,0x51,0x00,0x05,0x00,0x49,0x00,0x00,0x00, -0x86,0x01,0x00,0x00,0x7e,0x01,0x00,0x00,0x00,0x00,0x00,0x00, -0x73,0x00,0x04,0x00,0x4c,0x00,0x00,0x00,0x87,0x01,0x00,0x00, -0x86,0x01,0x00,0x00,0x41,0x00,0x06,0x00,0x56,0x00,0x00,0x00, -0x88,0x01,0x00,0x00,0x72,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0x84,0x01,0x00,0x00,0x3e,0x00,0x03,0x00,0x88,0x01,0x00,0x00, -0x87,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x8f,0x01,0x00,0x00,0x7a,0x00,0x00,0x00,0x10,0x03,0x00,0x00, -0x51,0x00,0x05,0x00,0x49,0x00,0x00,0x00,0x90,0x01,0x00,0x00, -0x7e,0x01,0x00,0x00,0x01,0x00,0x00,0x00,0x73,0x00,0x04,0x00, -0x4c,0x00,0x00,0x00,0x91,0x01,0x00,0x00,0x90,0x01,0x00,0x00, -0x41,0x00,0x06,0x00,0x56,0x00,0x00,0x00,0x92,0x01,0x00,0x00, -0x72,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0x8f,0x01,0x00,0x00, -0x3e,0x00,0x03,0x00,0x92,0x01,0x00,0x00,0x91,0x01,0x00,0x00, -0x3d,0x00,0x04,0x00,0x4c,0x00,0x00,0x00,0x9b,0x01,0x00,0x00, -0x57,0x00,0x00,0x00,0x73,0x00,0x04,0x00,0x49,0x00,0x00,0x00, -0x9c,0x01,0x00,0x00,0x9b,0x01,0x00,0x00,0x41,0x00,0x08,0x00, -0x5f,0x00,0x00,0x00,0x9d,0x01,0x00,0x00,0x54,0x00,0x00,0x00, -0x2e,0x00,0x00,0x00,0x40,0x00,0x00,0x00,0x17,0x00,0x00,0x00, -0x11,0x03,0x00,0x00,0x3d,0x00,0x04,0x00,0x4d,0x00,0x00,0x00, -0x9e,0x01,0x00,0x00,0x9d,0x01,0x00,0x00,0x72,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x9f,0x01,0x00,0x00,0x9e,0x01,0x00,0x00, -0x6f,0x00,0x04,0x00,0x49,0x00,0x00,0x00,0xa0,0x01,0x00,0x00, -0x9f,0x01,0x00,0x00,0x41,0x00,0x08,0x00,0x5f,0x00,0x00,0x00, -0xa2,0x01,0x00,0x00,0x54,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0x40,0x00,0x00,0x00,0x17,0x00,0x00,0x00,0x12,0x03,0x00,0x00, -0x3d,0x00,0x04,0x00,0x4d,0x00,0x00,0x00,0xa3,0x01,0x00,0x00, -0xa2,0x01,0x00,0x00,0x72,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0xa4,0x01,0x00,0x00,0xa3,0x01,0x00,0x00,0x6f,0x00,0x04,0x00, -0x49,0x00,0x00,0x00,0xa5,0x01,0x00,0x00,0xa4,0x01,0x00,0x00, -0x50,0x00,0x05,0x00,0x5a,0x00,0x00,0x00,0xa6,0x01,0x00,0x00, -0xa0,0x01,0x00,0x00,0xa5,0x01,0x00,0x00,0x8e,0x00,0x05,0x00, -0x5a,0x00,0x00,0x00,0xa7,0x01,0x00,0x00,0xa6,0x01,0x00,0x00, -0x9c,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xad,0x01,0x00,0x00,0x7a,0x00,0x00,0x00,0x11,0x03,0x00,0x00, -0x51,0x00,0x05,0x00,0x49,0x00,0x00,0x00,0xaf,0x01,0x00,0x00, -0xa7,0x01,0x00,0x00,0x00,0x00,0x00,0x00,0x73,0x00,0x04,0x00, -0x4c,0x00,0x00,0x00,0xb0,0x01,0x00,0x00,0xaf,0x01,0x00,0x00, -0x41,0x00,0x06,0x00,0x56,0x00,0x00,0x00,0xb1,0x01,0x00,0x00, -0x72,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0xad,0x01,0x00,0x00, -0x3e,0x00,0x03,0x00,0xb1,0x01,0x00,0x00,0xb0,0x01,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xb8,0x01,0x00,0x00, -0x7a,0x00,0x00,0x00,0x12,0x03,0x00,0x00,0x51,0x00,0x05,0x00, -0x49,0x00,0x00,0x00,0xb9,0x01,0x00,0x00,0xa7,0x01,0x00,0x00, -0x01,0x00,0x00,0x00,0x73,0x00,0x04,0x00,0x4c,0x00,0x00,0x00, -0xba,0x01,0x00,0x00,0xb9,0x01,0x00,0x00,0x41,0x00,0x06,0x00, -0x56,0x00,0x00,0x00,0xbb,0x01,0x00,0x00,0x72,0x00,0x00,0x00, -0x2e,0x00,0x00,0x00,0xb8,0x01,0x00,0x00,0x3e,0x00,0x03,0x00, -0xbb,0x01,0x00,0x00,0xba,0x01,0x00,0x00,0x3d,0x00,0x04,0x00, -0x4c,0x00,0x00,0x00,0xc4,0x01,0x00,0x00,0x57,0x00,0x00,0x00, -0x73,0x00,0x04,0x00,0x49,0x00,0x00,0x00,0xc5,0x01,0x00,0x00, -0xc4,0x01,0x00,0x00,0x41,0x00,0x08,0x00,0x5f,0x00,0x00,0x00, -0xc6,0x01,0x00,0x00,0x54,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0x40,0x00,0x00,0x00,0x17,0x00,0x00,0x00,0x13,0x03,0x00,0x00, -0x3d,0x00,0x04,0x00,0x4d,0x00,0x00,0x00,0xc7,0x01,0x00,0x00, -0xc6,0x01,0x00,0x00,0x72,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0xc8,0x01,0x00,0x00,0xc7,0x01,0x00,0x00,0x6f,0x00,0x04,0x00, -0x49,0x00,0x00,0x00,0xc9,0x01,0x00,0x00,0xc8,0x01,0x00,0x00, -0x41,0x00,0x08,0x00,0x5f,0x00,0x00,0x00,0xcb,0x01,0x00,0x00, -0x54,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0x40,0x00,0x00,0x00, -0x17,0x00,0x00,0x00,0x14,0x03,0x00,0x00,0x3d,0x00,0x04,0x00, -0x4d,0x00,0x00,0x00,0xcc,0x01,0x00,0x00,0xcb,0x01,0x00,0x00, -0x72,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0xcd,0x01,0x00,0x00, -0xcc,0x01,0x00,0x00,0x6f,0x00,0x04,0x00,0x49,0x00,0x00,0x00, -0xce,0x01,0x00,0x00,0xcd,0x01,0x00,0x00,0x50,0x00,0x05,0x00, -0x5a,0x00,0x00,0x00,0xcf,0x01,0x00,0x00,0xc9,0x01,0x00,0x00, -0xce,0x01,0x00,0x00,0x8e,0x00,0x05,0x00,0x5a,0x00,0x00,0x00, -0xd0,0x01,0x00,0x00,0xcf,0x01,0x00,0x00,0xc5,0x01,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xd6,0x01,0x00,0x00, -0x7a,0x00,0x00,0x00,0x13,0x03,0x00,0x00,0x51,0x00,0x05,0x00, -0x49,0x00,0x00,0x00,0xd8,0x01,0x00,0x00,0xd0,0x01,0x00,0x00, -0x00,0x00,0x00,0x00,0x73,0x00,0x04,0x00,0x4c,0x00,0x00,0x00, -0xd9,0x01,0x00,0x00,0xd8,0x01,0x00,0x00,0x41,0x00,0x06,0x00, -0x56,0x00,0x00,0x00,0xda,0x01,0x00,0x00,0x72,0x00,0x00,0x00, -0x2e,0x00,0x00,0x00,0xd6,0x01,0x00,0x00,0x3e,0x00,0x03,0x00, -0xda,0x01,0x00,0x00,0xd9,0x01,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xe1,0x01,0x00,0x00,0x7a,0x00,0x00,0x00, -0x14,0x03,0x00,0x00,0x51,0x00,0x05,0x00,0x49,0x00,0x00,0x00, -0xe2,0x01,0x00,0x00,0xd0,0x01,0x00,0x00,0x01,0x00,0x00,0x00, -0x73,0x00,0x04,0x00,0x4c,0x00,0x00,0x00,0xe3,0x01,0x00,0x00, -0xe2,0x01,0x00,0x00,0x41,0x00,0x06,0x00,0x56,0x00,0x00,0x00, -0xe4,0x01,0x00,0x00,0x72,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0xe1,0x01,0x00,0x00,0x3e,0x00,0x03,0x00,0xe4,0x01,0x00,0x00, -0xe3,0x01,0x00,0x00,0x3d,0x00,0x04,0x00,0x4c,0x00,0x00,0x00, -0xed,0x01,0x00,0x00,0x57,0x00,0x00,0x00,0x73,0x00,0x04,0x00, -0x49,0x00,0x00,0x00,0xee,0x01,0x00,0x00,0xed,0x01,0x00,0x00, -0x41,0x00,0x08,0x00,0x5f,0x00,0x00,0x00,0xef,0x01,0x00,0x00, -0x54,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0x40,0x00,0x00,0x00, -0x17,0x00,0x00,0x00,0x15,0x03,0x00,0x00,0x3d,0x00,0x04,0x00, -0x4d,0x00,0x00,0x00,0xf0,0x01,0x00,0x00,0xef,0x01,0x00,0x00, -0x72,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0xf1,0x01,0x00,0x00, -0xf0,0x01,0x00,0x00,0x6f,0x00,0x04,0x00,0x49,0x00,0x00,0x00, -0xf2,0x01,0x00,0x00,0xf1,0x01,0x00,0x00,0x41,0x00,0x08,0x00, -0x5f,0x00,0x00,0x00,0xf4,0x01,0x00,0x00,0x54,0x00,0x00,0x00, -0x2e,0x00,0x00,0x00,0x40,0x00,0x00,0x00,0x17,0x00,0x00,0x00, -0x16,0x03,0x00,0x00,0x3d,0x00,0x04,0x00,0x4d,0x00,0x00,0x00, -0xf5,0x01,0x00,0x00,0xf4,0x01,0x00,0x00,0x72,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0xf6,0x01,0x00,0x00,0xf5,0x01,0x00,0x00, -0x6f,0x00,0x04,0x00,0x49,0x00,0x00,0x00,0xf7,0x01,0x00,0x00, -0xf6,0x01,0x00,0x00,0x50,0x00,0x05,0x00,0x5a,0x00,0x00,0x00, -0xf8,0x01,0x00,0x00,0xf2,0x01,0x00,0x00,0xf7,0x01,0x00,0x00, -0x8e,0x00,0x05,0x00,0x5a,0x00,0x00,0x00,0xf9,0x01,0x00,0x00, -0xf8,0x01,0x00,0x00,0xee,0x01,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xff,0x01,0x00,0x00,0x7a,0x00,0x00,0x00, -0x15,0x03,0x00,0x00,0x51,0x00,0x05,0x00,0x49,0x00,0x00,0x00, -0x01,0x02,0x00,0x00,0xf9,0x01,0x00,0x00,0x00,0x00,0x00,0x00, -0x73,0x00,0x04,0x00,0x4c,0x00,0x00,0x00,0x02,0x02,0x00,0x00, -0x01,0x02,0x00,0x00,0x41,0x00,0x06,0x00,0x56,0x00,0x00,0x00, -0x03,0x02,0x00,0x00,0x72,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0xff,0x01,0x00,0x00,0x3e,0x00,0x03,0x00,0x03,0x02,0x00,0x00, -0x02,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x0a,0x02,0x00,0x00,0x7a,0x00,0x00,0x00,0x16,0x03,0x00,0x00, -0x51,0x00,0x05,0x00,0x49,0x00,0x00,0x00,0x0b,0x02,0x00,0x00, -0xf9,0x01,0x00,0x00,0x01,0x00,0x00,0x00,0x73,0x00,0x04,0x00, -0x4c,0x00,0x00,0x00,0x0c,0x02,0x00,0x00,0x0b,0x02,0x00,0x00, -0x41,0x00,0x06,0x00,0x56,0x00,0x00,0x00,0x0d,0x02,0x00,0x00, -0x72,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0x0a,0x02,0x00,0x00, -0x3e,0x00,0x03,0x00,0x0d,0x02,0x00,0x00,0x0c,0x02,0x00,0x00, -0x3d,0x00,0x04,0x00,0x4c,0x00,0x00,0x00,0x16,0x02,0x00,0x00, -0x57,0x00,0x00,0x00,0x73,0x00,0x04,0x00,0x49,0x00,0x00,0x00, -0x17,0x02,0x00,0x00,0x16,0x02,0x00,0x00,0x41,0x00,0x08,0x00, -0x5f,0x00,0x00,0x00,0x18,0x02,0x00,0x00,0x54,0x00,0x00,0x00, -0x2e,0x00,0x00,0x00,0x40,0x00,0x00,0x00,0x17,0x00,0x00,0x00, -0x17,0x03,0x00,0x00,0x3d,0x00,0x04,0x00,0x4d,0x00,0x00,0x00, -0x19,0x02,0x00,0x00,0x18,0x02,0x00,0x00,0x72,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x1a,0x02,0x00,0x00,0x19,0x02,0x00,0x00, -0x6f,0x00,0x04,0x00,0x49,0x00,0x00,0x00,0x1b,0x02,0x00,0x00, -0x1a,0x02,0x00,0x00,0x41,0x00,0x08,0x00,0x5f,0x00,0x00,0x00, -0x1d,0x02,0x00,0x00,0x54,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0x40,0x00,0x00,0x00,0x17,0x00,0x00,0x00,0x18,0x03,0x00,0x00, -0x3d,0x00,0x04,0x00,0x4d,0x00,0x00,0x00,0x1e,0x02,0x00,0x00, -0x1d,0x02,0x00,0x00,0x72,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x1f,0x02,0x00,0x00,0x1e,0x02,0x00,0x00,0x6f,0x00,0x04,0x00, -0x49,0x00,0x00,0x00,0x20,0x02,0x00,0x00,0x1f,0x02,0x00,0x00, -0x50,0x00,0x05,0x00,0x5a,0x00,0x00,0x00,0x21,0x02,0x00,0x00, -0x1b,0x02,0x00,0x00,0x20,0x02,0x00,0x00,0x8e,0x00,0x05,0x00, -0x5a,0x00,0x00,0x00,0x22,0x02,0x00,0x00,0x21,0x02,0x00,0x00, -0x17,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x28,0x02,0x00,0x00,0x7a,0x00,0x00,0x00,0x17,0x03,0x00,0x00, -0x51,0x00,0x05,0x00,0x49,0x00,0x00,0x00,0x2a,0x02,0x00,0x00, -0x22,0x02,0x00,0x00,0x00,0x00,0x00,0x00,0x73,0x00,0x04,0x00, -0x4c,0x00,0x00,0x00,0x2b,0x02,0x00,0x00,0x2a,0x02,0x00,0x00, -0x41,0x00,0x06,0x00,0x56,0x00,0x00,0x00,0x2c,0x02,0x00,0x00, -0x72,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0x28,0x02,0x00,0x00, -0x3e,0x00,0x03,0x00,0x2c,0x02,0x00,0x00,0x2b,0x02,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x33,0x02,0x00,0x00, -0x7a,0x00,0x00,0x00,0x18,0x03,0x00,0x00,0x51,0x00,0x05,0x00, -0x49,0x00,0x00,0x00,0x34,0x02,0x00,0x00,0x22,0x02,0x00,0x00, -0x01,0x00,0x00,0x00,0x73,0x00,0x04,0x00,0x4c,0x00,0x00,0x00, -0x35,0x02,0x00,0x00,0x34,0x02,0x00,0x00,0x41,0x00,0x06,0x00, -0x56,0x00,0x00,0x00,0x36,0x02,0x00,0x00,0x72,0x00,0x00,0x00, -0x2e,0x00,0x00,0x00,0x33,0x02,0x00,0x00,0x3e,0x00,0x03,0x00, -0x36,0x02,0x00,0x00,0x35,0x02,0x00,0x00,0x3d,0x00,0x04,0x00, -0x4c,0x00,0x00,0x00,0x3f,0x02,0x00,0x00,0x57,0x00,0x00,0x00, -0x73,0x00,0x04,0x00,0x49,0x00,0x00,0x00,0x40,0x02,0x00,0x00, -0x3f,0x02,0x00,0x00,0x41,0x00,0x08,0x00,0x5f,0x00,0x00,0x00, -0x41,0x02,0x00,0x00,0x54,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0x40,0x00,0x00,0x00,0x17,0x00,0x00,0x00,0x19,0x03,0x00,0x00, -0x3d,0x00,0x04,0x00,0x4d,0x00,0x00,0x00,0x42,0x02,0x00,0x00, -0x41,0x02,0x00,0x00,0x72,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x43,0x02,0x00,0x00,0x42,0x02,0x00,0x00,0x6f,0x00,0x04,0x00, -0x49,0x00,0x00,0x00,0x44,0x02,0x00,0x00,0x43,0x02,0x00,0x00, -0x41,0x00,0x08,0x00,0x5f,0x00,0x00,0x00,0x46,0x02,0x00,0x00, -0x54,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0x40,0x00,0x00,0x00, -0x17,0x00,0x00,0x00,0x1a,0x03,0x00,0x00,0x3d,0x00,0x04,0x00, -0x4d,0x00,0x00,0x00,0x47,0x02,0x00,0x00,0x46,0x02,0x00,0x00, -0x72,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x48,0x02,0x00,0x00, -0x47,0x02,0x00,0x00,0x6f,0x00,0x04,0x00,0x49,0x00,0x00,0x00, -0x49,0x02,0x00,0x00,0x48,0x02,0x00,0x00,0x50,0x00,0x05,0x00, -0x5a,0x00,0x00,0x00,0x4a,0x02,0x00,0x00,0x44,0x02,0x00,0x00, -0x49,0x02,0x00,0x00,0x8e,0x00,0x05,0x00,0x5a,0x00,0x00,0x00, -0x4b,0x02,0x00,0x00,0x4a,0x02,0x00,0x00,0x40,0x02,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x51,0x02,0x00,0x00, -0x7a,0x00,0x00,0x00,0x19,0x03,0x00,0x00,0x51,0x00,0x05,0x00, -0x49,0x00,0x00,0x00,0x53,0x02,0x00,0x00,0x4b,0x02,0x00,0x00, -0x00,0x00,0x00,0x00,0x73,0x00,0x04,0x00,0x4c,0x00,0x00,0x00, -0x54,0x02,0x00,0x00,0x53,0x02,0x00,0x00,0x41,0x00,0x06,0x00, -0x56,0x00,0x00,0x00,0x55,0x02,0x00,0x00,0x72,0x00,0x00,0x00, -0x2e,0x00,0x00,0x00,0x51,0x02,0x00,0x00,0x3e,0x00,0x03,0x00, -0x55,0x02,0x00,0x00,0x54,0x02,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x5c,0x02,0x00,0x00,0x7a,0x00,0x00,0x00, -0x1a,0x03,0x00,0x00,0x51,0x00,0x05,0x00,0x49,0x00,0x00,0x00, -0x5d,0x02,0x00,0x00,0x4b,0x02,0x00,0x00,0x01,0x00,0x00,0x00, -0x73,0x00,0x04,0x00,0x4c,0x00,0x00,0x00,0x5e,0x02,0x00,0x00, -0x5d,0x02,0x00,0x00,0x41,0x00,0x06,0x00,0x56,0x00,0x00,0x00, -0x5f,0x02,0x00,0x00,0x72,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0x5c,0x02,0x00,0x00,0x3e,0x00,0x03,0x00,0x5f,0x02,0x00,0x00, -0x5e,0x02,0x00,0x00,0x3d,0x00,0x04,0x00,0x4c,0x00,0x00,0x00, -0x68,0x02,0x00,0x00,0x57,0x00,0x00,0x00,0x73,0x00,0x04,0x00, -0x49,0x00,0x00,0x00,0x69,0x02,0x00,0x00,0x68,0x02,0x00,0x00, -0x41,0x00,0x08,0x00,0x5f,0x00,0x00,0x00,0x6a,0x02,0x00,0x00, -0x54,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0x40,0x00,0x00,0x00, -0x17,0x00,0x00,0x00,0x1b,0x03,0x00,0x00,0x3d,0x00,0x04,0x00, -0x4d,0x00,0x00,0x00,0x6b,0x02,0x00,0x00,0x6a,0x02,0x00,0x00, -0x72,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x6c,0x02,0x00,0x00, -0x6b,0x02,0x00,0x00,0x6f,0x00,0x04,0x00,0x49,0x00,0x00,0x00, -0x6d,0x02,0x00,0x00,0x6c,0x02,0x00,0x00,0x41,0x00,0x08,0x00, -0x5f,0x00,0x00,0x00,0x6f,0x02,0x00,0x00,0x54,0x00,0x00,0x00, -0x2e,0x00,0x00,0x00,0x40,0x00,0x00,0x00,0x17,0x00,0x00,0x00, -0x1c,0x03,0x00,0x00,0x3d,0x00,0x04,0x00,0x4d,0x00,0x00,0x00, -0x70,0x02,0x00,0x00,0x6f,0x02,0x00,0x00,0x72,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x71,0x02,0x00,0x00,0x70,0x02,0x00,0x00, -0x6f,0x00,0x04,0x00,0x49,0x00,0x00,0x00,0x72,0x02,0x00,0x00, -0x71,0x02,0x00,0x00,0x50,0x00,0x05,0x00,0x5a,0x00,0x00,0x00, -0x73,0x02,0x00,0x00,0x6d,0x02,0x00,0x00,0x72,0x02,0x00,0x00, -0x8e,0x00,0x05,0x00,0x5a,0x00,0x00,0x00,0x74,0x02,0x00,0x00, -0x73,0x02,0x00,0x00,0x69,0x02,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x7a,0x02,0x00,0x00,0x7a,0x00,0x00,0x00, -0x1b,0x03,0x00,0x00,0x51,0x00,0x05,0x00,0x49,0x00,0x00,0x00, -0x7c,0x02,0x00,0x00,0x74,0x02,0x00,0x00,0x00,0x00,0x00,0x00, -0x73,0x00,0x04,0x00,0x4c,0x00,0x00,0x00,0x7d,0x02,0x00,0x00, -0x7c,0x02,0x00,0x00,0x41,0x00,0x06,0x00,0x56,0x00,0x00,0x00, -0x7e,0x02,0x00,0x00,0x72,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0x7a,0x02,0x00,0x00,0x3e,0x00,0x03,0x00,0x7e,0x02,0x00,0x00, -0x7d,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x85,0x02,0x00,0x00,0x7a,0x00,0x00,0x00,0x1c,0x03,0x00,0x00, -0x51,0x00,0x05,0x00,0x49,0x00,0x00,0x00,0x86,0x02,0x00,0x00, -0x74,0x02,0x00,0x00,0x01,0x00,0x00,0x00,0x73,0x00,0x04,0x00, -0x4c,0x00,0x00,0x00,0x87,0x02,0x00,0x00,0x86,0x02,0x00,0x00, -0x41,0x00,0x06,0x00,0x56,0x00,0x00,0x00,0x88,0x02,0x00,0x00, -0x72,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0x85,0x02,0x00,0x00, -0x3e,0x00,0x03,0x00,0x88,0x02,0x00,0x00,0x87,0x02,0x00,0x00, -0x3d,0x00,0x04,0x00,0x4c,0x00,0x00,0x00,0x91,0x02,0x00,0x00, -0x57,0x00,0x00,0x00,0x73,0x00,0x04,0x00,0x49,0x00,0x00,0x00, -0x92,0x02,0x00,0x00,0x91,0x02,0x00,0x00,0x41,0x00,0x08,0x00, -0x5f,0x00,0x00,0x00,0x93,0x02,0x00,0x00,0x54,0x00,0x00,0x00, -0x2e,0x00,0x00,0x00,0x40,0x00,0x00,0x00,0x17,0x00,0x00,0x00, -0x1d,0x03,0x00,0x00,0x3d,0x00,0x04,0x00,0x4d,0x00,0x00,0x00, -0x94,0x02,0x00,0x00,0x93,0x02,0x00,0x00,0x72,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x95,0x02,0x00,0x00,0x94,0x02,0x00,0x00, -0x6f,0x00,0x04,0x00,0x49,0x00,0x00,0x00,0x96,0x02,0x00,0x00, -0x95,0x02,0x00,0x00,0x41,0x00,0x08,0x00,0x5f,0x00,0x00,0x00, -0x98,0x02,0x00,0x00,0x54,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0x40,0x00,0x00,0x00,0x17,0x00,0x00,0x00,0x1e,0x03,0x00,0x00, -0x3d,0x00,0x04,0x00,0x4d,0x00,0x00,0x00,0x99,0x02,0x00,0x00, -0x98,0x02,0x00,0x00,0x72,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x9a,0x02,0x00,0x00,0x99,0x02,0x00,0x00,0x6f,0x00,0x04,0x00, -0x49,0x00,0x00,0x00,0x9b,0x02,0x00,0x00,0x9a,0x02,0x00,0x00, -0x50,0x00,0x05,0x00,0x5a,0x00,0x00,0x00,0x9c,0x02,0x00,0x00, -0x96,0x02,0x00,0x00,0x9b,0x02,0x00,0x00,0x8e,0x00,0x05,0x00, -0x5a,0x00,0x00,0x00,0x9d,0x02,0x00,0x00,0x9c,0x02,0x00,0x00, -0x92,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xa3,0x02,0x00,0x00,0x7a,0x00,0x00,0x00,0x1d,0x03,0x00,0x00, -0x51,0x00,0x05,0x00,0x49,0x00,0x00,0x00,0xa5,0x02,0x00,0x00, -0x9d,0x02,0x00,0x00,0x00,0x00,0x00,0x00,0x73,0x00,0x04,0x00, -0x4c,0x00,0x00,0x00,0xa6,0x02,0x00,0x00,0xa5,0x02,0x00,0x00, -0x41,0x00,0x06,0x00,0x56,0x00,0x00,0x00,0xa7,0x02,0x00,0x00, -0x72,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0xa3,0x02,0x00,0x00, -0x3e,0x00,0x03,0x00,0xa7,0x02,0x00,0x00,0xa6,0x02,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xae,0x02,0x00,0x00, -0x7a,0x00,0x00,0x00,0x1e,0x03,0x00,0x00,0x51,0x00,0x05,0x00, -0x49,0x00,0x00,0x00,0xaf,0x02,0x00,0x00,0x9d,0x02,0x00,0x00, -0x01,0x00,0x00,0x00,0x73,0x00,0x04,0x00,0x4c,0x00,0x00,0x00, -0xb0,0x02,0x00,0x00,0xaf,0x02,0x00,0x00,0x41,0x00,0x06,0x00, -0x56,0x00,0x00,0x00,0xb1,0x02,0x00,0x00,0x72,0x00,0x00,0x00, -0x2e,0x00,0x00,0x00,0xae,0x02,0x00,0x00,0x3e,0x00,0x03,0x00, -0xb1,0x02,0x00,0x00,0xb0,0x02,0x00,0x00,0x3d,0x00,0x04,0x00, -0x4c,0x00,0x00,0x00,0xba,0x02,0x00,0x00,0x57,0x00,0x00,0x00, -0x73,0x00,0x04,0x00,0x49,0x00,0x00,0x00,0xbb,0x02,0x00,0x00, -0xba,0x02,0x00,0x00,0x41,0x00,0x08,0x00,0x5f,0x00,0x00,0x00, -0xbc,0x02,0x00,0x00,0x54,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0x40,0x00,0x00,0x00,0x17,0x00,0x00,0x00,0x1f,0x03,0x00,0x00, -0x3d,0x00,0x04,0x00,0x4d,0x00,0x00,0x00,0xbd,0x02,0x00,0x00, -0xbc,0x02,0x00,0x00,0x72,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0xbe,0x02,0x00,0x00,0xbd,0x02,0x00,0x00,0x6f,0x00,0x04,0x00, -0x49,0x00,0x00,0x00,0xbf,0x02,0x00,0x00,0xbe,0x02,0x00,0x00, -0x41,0x00,0x08,0x00,0x5f,0x00,0x00,0x00,0xc1,0x02,0x00,0x00, -0x54,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0x40,0x00,0x00,0x00, -0x17,0x00,0x00,0x00,0x20,0x03,0x00,0x00,0x3d,0x00,0x04,0x00, -0x4d,0x00,0x00,0x00,0xc2,0x02,0x00,0x00,0xc1,0x02,0x00,0x00, -0x72,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0xc3,0x02,0x00,0x00, -0xc2,0x02,0x00,0x00,0x6f,0x00,0x04,0x00,0x49,0x00,0x00,0x00, -0xc4,0x02,0x00,0x00,0xc3,0x02,0x00,0x00,0x50,0x00,0x05,0x00, -0x5a,0x00,0x00,0x00,0xc5,0x02,0x00,0x00,0xbf,0x02,0x00,0x00, -0xc4,0x02,0x00,0x00,0x8e,0x00,0x05,0x00,0x5a,0x00,0x00,0x00, -0xc6,0x02,0x00,0x00,0xc5,0x02,0x00,0x00,0xbb,0x02,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xcc,0x02,0x00,0x00, -0x7a,0x00,0x00,0x00,0x1f,0x03,0x00,0x00,0x51,0x00,0x05,0x00, -0x49,0x00,0x00,0x00,0xce,0x02,0x00,0x00,0xc6,0x02,0x00,0x00, -0x00,0x00,0x00,0x00,0x73,0x00,0x04,0x00,0x4c,0x00,0x00,0x00, -0xcf,0x02,0x00,0x00,0xce,0x02,0x00,0x00,0x41,0x00,0x06,0x00, -0x56,0x00,0x00,0x00,0xd0,0x02,0x00,0x00,0x72,0x00,0x00,0x00, -0x2e,0x00,0x00,0x00,0xcc,0x02,0x00,0x00,0x3e,0x00,0x03,0x00, -0xd0,0x02,0x00,0x00,0xcf,0x02,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xd7,0x02,0x00,0x00,0x7a,0x00,0x00,0x00, -0x20,0x03,0x00,0x00,0x51,0x00,0x05,0x00,0x49,0x00,0x00,0x00, -0xd8,0x02,0x00,0x00,0xc6,0x02,0x00,0x00,0x01,0x00,0x00,0x00, -0x73,0x00,0x04,0x00,0x4c,0x00,0x00,0x00,0xd9,0x02,0x00,0x00, -0xd8,0x02,0x00,0x00,0x41,0x00,0x06,0x00,0x56,0x00,0x00,0x00, -0xda,0x02,0x00,0x00,0x72,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0xd7,0x02,0x00,0x00,0x3e,0x00,0x03,0x00,0xda,0x02,0x00,0x00, -0xd9,0x02,0x00,0x00,0x3d,0x00,0x04,0x00,0x4c,0x00,0x00,0x00, -0xe3,0x02,0x00,0x00,0x57,0x00,0x00,0x00,0x73,0x00,0x04,0x00, -0x49,0x00,0x00,0x00,0xe4,0x02,0x00,0x00,0xe3,0x02,0x00,0x00, -0x41,0x00,0x08,0x00,0x5f,0x00,0x00,0x00,0xe5,0x02,0x00,0x00, -0x54,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0x40,0x00,0x00,0x00, -0x17,0x00,0x00,0x00,0x21,0x03,0x00,0x00,0x3d,0x00,0x04,0x00, -0x4d,0x00,0x00,0x00,0xe6,0x02,0x00,0x00,0xe5,0x02,0x00,0x00, -0x72,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0xe7,0x02,0x00,0x00, -0xe6,0x02,0x00,0x00,0x6f,0x00,0x04,0x00,0x49,0x00,0x00,0x00, -0xe8,0x02,0x00,0x00,0xe7,0x02,0x00,0x00,0x41,0x00,0x08,0x00, -0x5f,0x00,0x00,0x00,0xea,0x02,0x00,0x00,0x54,0x00,0x00,0x00, -0x2e,0x00,0x00,0x00,0x40,0x00,0x00,0x00,0x17,0x00,0x00,0x00, -0x22,0x03,0x00,0x00,0x3d,0x00,0x04,0x00,0x4d,0x00,0x00,0x00, -0xeb,0x02,0x00,0x00,0xea,0x02,0x00,0x00,0x72,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0xec,0x02,0x00,0x00,0xeb,0x02,0x00,0x00, -0x6f,0x00,0x04,0x00,0x49,0x00,0x00,0x00,0xed,0x02,0x00,0x00, -0xec,0x02,0x00,0x00,0x50,0x00,0x05,0x00,0x5a,0x00,0x00,0x00, -0xee,0x02,0x00,0x00,0xe8,0x02,0x00,0x00,0xed,0x02,0x00,0x00, -0x8e,0x00,0x05,0x00,0x5a,0x00,0x00,0x00,0xef,0x02,0x00,0x00, -0xee,0x02,0x00,0x00,0xe4,0x02,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xf5,0x02,0x00,0x00,0x7a,0x00,0x00,0x00, -0x21,0x03,0x00,0x00,0x51,0x00,0x05,0x00,0x49,0x00,0x00,0x00, -0xf7,0x02,0x00,0x00,0xef,0x02,0x00,0x00,0x00,0x00,0x00,0x00, -0x73,0x00,0x04,0x00,0x4c,0x00,0x00,0x00,0xf8,0x02,0x00,0x00, -0xf7,0x02,0x00,0x00,0x41,0x00,0x06,0x00,0x56,0x00,0x00,0x00, -0xf9,0x02,0x00,0x00,0x72,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0xf5,0x02,0x00,0x00,0x3e,0x00,0x03,0x00,0xf9,0x02,0x00,0x00, -0xf8,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x00,0x03,0x00,0x00,0x7a,0x00,0x00,0x00,0x22,0x03,0x00,0x00, -0x51,0x00,0x05,0x00,0x49,0x00,0x00,0x00,0x01,0x03,0x00,0x00, -0xef,0x02,0x00,0x00,0x01,0x00,0x00,0x00,0x73,0x00,0x04,0x00, -0x4c,0x00,0x00,0x00,0x02,0x03,0x00,0x00,0x01,0x03,0x00,0x00, -0x41,0x00,0x06,0x00,0x56,0x00,0x00,0x00,0x03,0x03,0x00,0x00, -0x72,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0x00,0x03,0x00,0x00, -0x3e,0x00,0x03,0x00,0x03,0x03,0x00,0x00,0x02,0x03,0x00,0x00, -0xf9,0x00,0x02,0x00,0x95,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, -0x95,0x00,0x00,0x00,0xfd,0x00,0x01,0x00,0x38,0x00,0x01,0x00, +0x10,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x1c,0x00,0x00,0x00,0x1a,0x00,0x00,0x00,0x90,0x01,0x00,0x00, +0x41,0x00,0x05,0x00,0x23,0x00,0x00,0x00,0x24,0x00,0x00,0x00, +0x20,0x00,0x00,0x00,0x22,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x25,0x00,0x00,0x00,0x24,0x00,0x00,0x00, +0x41,0x00,0x05,0x00,0x23,0x00,0x00,0x00,0x27,0x00,0x00,0x00, +0x20,0x00,0x00,0x00,0x26,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x28,0x00,0x00,0x00,0x27,0x00,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x29,0x00,0x00,0x00, +0x25,0x00,0x00,0x00,0x28,0x00,0x00,0x00,0x86,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x2a,0x00,0x00,0x00,0x29,0x00,0x00,0x00, +0x10,0x00,0x00,0x00,0xae,0x00,0x05,0x00,0x11,0x00,0x00,0x00, +0x2b,0x00,0x00,0x00,0x1c,0x00,0x00,0x00,0x2a,0x00,0x00,0x00, +0xf7,0x00,0x03,0x00,0x2d,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0xfa,0x00,0x04,0x00,0x2b,0x00,0x00,0x00,0x2c,0x00,0x00,0x00, +0x2d,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0x2c,0x00,0x00,0x00, +0xf9,0x00,0x02,0x00,0x0c,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, +0x2d,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x17,0x00,0x00,0x00, +0x31,0x00,0x00,0x00,0x30,0x00,0x00,0x00,0x09,0x00,0x00,0x00, +0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x32,0x00,0x00,0x00, +0x31,0x00,0x00,0x00,0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x36,0x00,0x00,0x00,0x32,0x00,0x00,0x00,0x35,0x00,0x00,0x00, +0x89,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x39,0x00,0x00,0x00, +0x32,0x00,0x00,0x00,0x35,0x00,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x3d,0x00,0x00,0x00,0x3b,0x00,0x00,0x00, +0x36,0x00,0x00,0x00,0x41,0x00,0x08,0x00,0x50,0x00,0x00,0x00, +0x51,0x00,0x00,0x00,0x4e,0x00,0x00,0x00,0x22,0x00,0x00,0x00, +0x1c,0x00,0x00,0x00,0x22,0x00,0x00,0x00,0x09,0x00,0x00,0x00, +0x3d,0x00,0x04,0x00,0x41,0x00,0x00,0x00,0x52,0x00,0x00,0x00, +0x51,0x00,0x00,0x00,0x73,0x00,0x04,0x00,0x3e,0x00,0x00,0x00, +0x53,0x00,0x00,0x00,0x52,0x00,0x00,0x00,0x41,0x00,0x08,0x00, +0x50,0x00,0x00,0x00,0x57,0x00,0x00,0x00,0x4e,0x00,0x00,0x00, +0x22,0x00,0x00,0x00,0x1c,0x00,0x00,0x00,0x22,0x00,0x00,0x00, +0x56,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x41,0x00,0x00,0x00, +0x58,0x00,0x00,0x00,0x57,0x00,0x00,0x00,0x73,0x00,0x04,0x00, +0x3e,0x00,0x00,0x00,0x59,0x00,0x00,0x00,0x58,0x00,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x5c,0x00,0x00,0x00, +0x1c,0x00,0x00,0x00,0x10,0x00,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x5f,0x00,0x00,0x00,0x5d,0x00,0x00,0x00, +0x36,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x60,0x00,0x00,0x00,0x5c,0x00,0x00,0x00,0x5f,0x00,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x62,0x00,0x00,0x00, +0x3b,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x63,0x00,0x00,0x00,0x60,0x00,0x00,0x00, +0x62,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x66,0x00,0x00,0x00,0x46,0x00,0x00,0x00,0x36,0x00,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x69,0x00,0x00,0x00, +0x66,0x00,0x00,0x00,0x62,0x00,0x00,0x00,0xb0,0x00,0x05,0x00, +0x11,0x00,0x00,0x00,0x6f,0x00,0x00,0x00,0x3d,0x00,0x00,0x00, +0x6e,0x00,0x00,0x00,0xf7,0x00,0x03,0x00,0x71,0x00,0x00,0x00, +0x00,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0x6f,0x00,0x00,0x00, +0x70,0x00,0x00,0x00,0x8b,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, +0x70,0x00,0x00,0x00,0x41,0x00,0x08,0x00,0x76,0x00,0x00,0x00, +0x77,0x00,0x00,0x00,0x4e,0x00,0x00,0x00,0x22,0x00,0x00,0x00, +0x1c,0x00,0x00,0x00,0x26,0x00,0x00,0x00,0x3d,0x00,0x00,0x00, +0x3d,0x00,0x04,0x00,0x43,0x00,0x00,0x00,0x78,0x00,0x00,0x00, +0x77,0x00,0x00,0x00,0x71,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x79,0x00,0x00,0x00,0x78,0x00,0x00,0x00,0x7c,0x00,0x04,0x00, +0x21,0x00,0x00,0x00,0x7a,0x00,0x00,0x00,0x79,0x00,0x00,0x00, +0xc7,0x00,0x05,0x00,0x21,0x00,0x00,0x00,0x7c,0x00,0x00,0x00, +0x7a,0x00,0x00,0x00,0x7b,0x00,0x00,0x00,0x72,0x00,0x04,0x00, +0x7d,0x00,0x00,0x00,0x7e,0x00,0x00,0x00,0x7c,0x00,0x00,0x00, +0x7c,0x00,0x04,0x00,0x43,0x00,0x00,0x00,0x7f,0x00,0x00,0x00, +0x7e,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x83,0x00,0x00,0x00,0x3d,0x00,0x00,0x00,0x6e,0x00,0x00,0x00, +0x41,0x00,0x08,0x00,0x76,0x00,0x00,0x00,0x84,0x00,0x00,0x00, +0x4e,0x00,0x00,0x00,0x22,0x00,0x00,0x00,0x1c,0x00,0x00,0x00, +0x26,0x00,0x00,0x00,0x83,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0x43,0x00,0x00,0x00,0x85,0x00,0x00,0x00,0x84,0x00,0x00,0x00, +0x71,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x86,0x00,0x00,0x00, +0x85,0x00,0x00,0x00,0x7c,0x00,0x04,0x00,0x21,0x00,0x00,0x00, +0x87,0x00,0x00,0x00,0x86,0x00,0x00,0x00,0xc7,0x00,0x05,0x00, +0x21,0x00,0x00,0x00,0x88,0x00,0x00,0x00,0x87,0x00,0x00,0x00, +0x7b,0x00,0x00,0x00,0x72,0x00,0x04,0x00,0x7d,0x00,0x00,0x00, +0x89,0x00,0x00,0x00,0x88,0x00,0x00,0x00,0x7c,0x00,0x04,0x00, +0x43,0x00,0x00,0x00,0x8a,0x00,0x00,0x00,0x89,0x00,0x00,0x00, +0xf9,0x00,0x02,0x00,0x71,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, +0x8b,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x8e,0x00,0x00,0x00,0x3d,0x00,0x00,0x00,0x6e,0x00,0x00,0x00, +0x41,0x00,0x08,0x00,0x76,0x00,0x00,0x00,0x8f,0x00,0x00,0x00, +0x4e,0x00,0x00,0x00,0x22,0x00,0x00,0x00,0x1c,0x00,0x00,0x00, +0x26,0x00,0x00,0x00,0x8e,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0x43,0x00,0x00,0x00,0x90,0x00,0x00,0x00,0x8f,0x00,0x00,0x00, +0x71,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x91,0x00,0x00,0x00, +0x90,0x00,0x00,0x00,0x7c,0x00,0x04,0x00,0x21,0x00,0x00,0x00, +0x92,0x00,0x00,0x00,0x91,0x00,0x00,0x00,0xc7,0x00,0x05,0x00, +0x21,0x00,0x00,0x00,0x94,0x00,0x00,0x00,0x92,0x00,0x00,0x00, +0x93,0x00,0x00,0x00,0x82,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x97,0x00,0x00,0x00,0x3d,0x00,0x00,0x00,0x6e,0x00,0x00,0x00, +0x41,0x00,0x08,0x00,0x76,0x00,0x00,0x00,0x98,0x00,0x00,0x00, +0x4e,0x00,0x00,0x00,0x22,0x00,0x00,0x00,0x1c,0x00,0x00,0x00, +0x26,0x00,0x00,0x00,0x97,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0x43,0x00,0x00,0x00,0x99,0x00,0x00,0x00,0x98,0x00,0x00,0x00, +0xc2,0x00,0x05,0x00,0x43,0x00,0x00,0x00,0x9b,0x00,0x00,0x00, +0x99,0x00,0x00,0x00,0x9a,0x00,0x00,0x00,0xc4,0x00,0x05,0x00, +0x43,0x00,0x00,0x00,0x9d,0x00,0x00,0x00,0x9b,0x00,0x00,0x00, +0x9c,0x00,0x00,0x00,0x71,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x9e,0x00,0x00,0x00,0x9d,0x00,0x00,0x00,0x7c,0x00,0x04,0x00, +0x21,0x00,0x00,0x00,0x9f,0x00,0x00,0x00,0x9e,0x00,0x00,0x00, +0xc5,0x00,0x05,0x00,0x21,0x00,0x00,0x00,0xa0,0x00,0x00,0x00, +0x94,0x00,0x00,0x00,0x9f,0x00,0x00,0x00,0x72,0x00,0x04,0x00, +0x7d,0x00,0x00,0x00,0xa1,0x00,0x00,0x00,0xa0,0x00,0x00,0x00, +0x7c,0x00,0x04,0x00,0x43,0x00,0x00,0x00,0xa2,0x00,0x00,0x00, +0xa1,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x43,0x00,0x00,0x00, +0xa7,0x00,0x00,0x00,0x8f,0x00,0x00,0x00,0xc2,0x00,0x05,0x00, +0x43,0x00,0x00,0x00,0xa8,0x00,0x00,0x00,0xa7,0x00,0x00,0x00, +0x9c,0x00,0x00,0x00,0x41,0x00,0x08,0x00,0x76,0x00,0x00,0x00, +0xab,0x00,0x00,0x00,0x4e,0x00,0x00,0x00,0x22,0x00,0x00,0x00, +0x1c,0x00,0x00,0x00,0x26,0x00,0x00,0x00,0x3d,0x00,0x00,0x00, +0x3d,0x00,0x04,0x00,0x43,0x00,0x00,0x00,0xac,0x00,0x00,0x00, +0xab,0x00,0x00,0x00,0xc2,0x00,0x05,0x00,0x43,0x00,0x00,0x00, +0xad,0x00,0x00,0x00,0xac,0x00,0x00,0x00,0x9a,0x00,0x00,0x00, +0xc4,0x00,0x05,0x00,0x43,0x00,0x00,0x00,0xae,0x00,0x00,0x00, +0xad,0x00,0x00,0x00,0x9c,0x00,0x00,0x00,0xc5,0x00,0x05,0x00, +0x43,0x00,0x00,0x00,0xaf,0x00,0x00,0x00,0xa8,0x00,0x00,0x00, +0xae,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0x71,0x00,0x00,0x00, +0xf8,0x00,0x02,0x00,0x71,0x00,0x00,0x00,0xf5,0x00,0x07,0x00, +0x43,0x00,0x00,0x00,0x92,0x01,0x00,0x00,0x8a,0x00,0x00,0x00, +0x70,0x00,0x00,0x00,0xaf,0x00,0x00,0x00,0x8b,0x00,0x00,0x00, +0xf5,0x00,0x07,0x00,0x43,0x00,0x00,0x00,0x91,0x01,0x00,0x00, +0x7f,0x00,0x00,0x00,0x70,0x00,0x00,0x00,0xa2,0x00,0x00,0x00, +0x8b,0x00,0x00,0x00,0x70,0x00,0x04,0x00,0x3e,0x00,0x00,0x00, +0xb3,0x00,0x00,0x00,0x91,0x01,0x00,0x00,0x85,0x00,0x05,0x00, +0x3e,0x00,0x00,0x00,0xb4,0x00,0x00,0x00,0x53,0x00,0x00,0x00, +0xb3,0x00,0x00,0x00,0x70,0x00,0x04,0x00,0x3e,0x00,0x00,0x00, +0xb8,0x00,0x00,0x00,0x92,0x01,0x00,0x00,0x85,0x00,0x05,0x00, +0x3e,0x00,0x00,0x00,0xb9,0x00,0x00,0x00,0x59,0x00,0x00,0x00, +0xb8,0x00,0x00,0x00,0xf7,0x00,0x03,0x00,0xbd,0x00,0x00,0x00, +0x00,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0x6f,0x00,0x00,0x00, +0xbc,0x00,0x00,0x00,0xd3,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, +0xbc,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xc0,0x00,0x00,0x00,0x3d,0x00,0x00,0x00,0x56,0x00,0x00,0x00, +0x41,0x00,0x08,0x00,0x76,0x00,0x00,0x00,0xc1,0x00,0x00,0x00, +0x4e,0x00,0x00,0x00,0x22,0x00,0x00,0x00,0x1c,0x00,0x00,0x00, +0x26,0x00,0x00,0x00,0xc0,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0x43,0x00,0x00,0x00,0xc2,0x00,0x00,0x00,0xc1,0x00,0x00,0x00, +0x71,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0xc3,0x00,0x00,0x00, +0xc2,0x00,0x00,0x00,0x7c,0x00,0x04,0x00,0x21,0x00,0x00,0x00, +0xc4,0x00,0x00,0x00,0xc3,0x00,0x00,0x00,0xc7,0x00,0x05,0x00, +0x21,0x00,0x00,0x00,0xc5,0x00,0x00,0x00,0xc4,0x00,0x00,0x00, +0x7b,0x00,0x00,0x00,0x72,0x00,0x04,0x00,0x7d,0x00,0x00,0x00, +0xc6,0x00,0x00,0x00,0xc5,0x00,0x00,0x00,0x7c,0x00,0x04,0x00, +0x43,0x00,0x00,0x00,0xc7,0x00,0x00,0x00,0xc6,0x00,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xcb,0x00,0x00,0x00, +0x3d,0x00,0x00,0x00,0xca,0x00,0x00,0x00,0x41,0x00,0x08,0x00, +0x76,0x00,0x00,0x00,0xcc,0x00,0x00,0x00,0x4e,0x00,0x00,0x00, +0x22,0x00,0x00,0x00,0x1c,0x00,0x00,0x00,0x26,0x00,0x00,0x00, +0xcb,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x43,0x00,0x00,0x00, +0xcd,0x00,0x00,0x00,0xcc,0x00,0x00,0x00,0x71,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0xce,0x00,0x00,0x00,0xcd,0x00,0x00,0x00, +0x7c,0x00,0x04,0x00,0x21,0x00,0x00,0x00,0xcf,0x00,0x00,0x00, +0xce,0x00,0x00,0x00,0xc7,0x00,0x05,0x00,0x21,0x00,0x00,0x00, +0xd0,0x00,0x00,0x00,0xcf,0x00,0x00,0x00,0x7b,0x00,0x00,0x00, +0x72,0x00,0x04,0x00,0x7d,0x00,0x00,0x00,0xd1,0x00,0x00,0x00, +0xd0,0x00,0x00,0x00,0x7c,0x00,0x04,0x00,0x43,0x00,0x00,0x00, +0xd2,0x00,0x00,0x00,0xd1,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, +0xbd,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0xd3,0x00,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xd6,0x00,0x00,0x00, +0x3d,0x00,0x00,0x00,0xca,0x00,0x00,0x00,0x41,0x00,0x08,0x00, +0x76,0x00,0x00,0x00,0xd7,0x00,0x00,0x00,0x4e,0x00,0x00,0x00, +0x22,0x00,0x00,0x00,0x1c,0x00,0x00,0x00,0x26,0x00,0x00,0x00, +0xd6,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x43,0x00,0x00,0x00, +0xd8,0x00,0x00,0x00,0xd7,0x00,0x00,0x00,0x71,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0xd9,0x00,0x00,0x00,0xd8,0x00,0x00,0x00, +0x7c,0x00,0x04,0x00,0x21,0x00,0x00,0x00,0xda,0x00,0x00,0x00, +0xd9,0x00,0x00,0x00,0xc7,0x00,0x05,0x00,0x21,0x00,0x00,0x00, +0xdb,0x00,0x00,0x00,0xda,0x00,0x00,0x00,0x93,0x00,0x00,0x00, +0x82,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xdf,0x00,0x00,0x00, +0x3d,0x00,0x00,0x00,0xde,0x00,0x00,0x00,0x41,0x00,0x08,0x00, +0x76,0x00,0x00,0x00,0xe0,0x00,0x00,0x00,0x4e,0x00,0x00,0x00, +0x22,0x00,0x00,0x00,0x1c,0x00,0x00,0x00,0x26,0x00,0x00,0x00, +0xdf,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x43,0x00,0x00,0x00, +0xe1,0x00,0x00,0x00,0xe0,0x00,0x00,0x00,0xc2,0x00,0x05,0x00, +0x43,0x00,0x00,0x00,0xe2,0x00,0x00,0x00,0xe1,0x00,0x00,0x00, +0x9a,0x00,0x00,0x00,0xc4,0x00,0x05,0x00,0x43,0x00,0x00,0x00, +0xe3,0x00,0x00,0x00,0xe2,0x00,0x00,0x00,0x9c,0x00,0x00,0x00, +0x71,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0xe4,0x00,0x00,0x00, +0xe3,0x00,0x00,0x00,0x7c,0x00,0x04,0x00,0x21,0x00,0x00,0x00, +0xe5,0x00,0x00,0x00,0xe4,0x00,0x00,0x00,0xc5,0x00,0x05,0x00, +0x21,0x00,0x00,0x00,0xe6,0x00,0x00,0x00,0xdb,0x00,0x00,0x00, +0xe5,0x00,0x00,0x00,0x72,0x00,0x04,0x00,0x7d,0x00,0x00,0x00, +0xe7,0x00,0x00,0x00,0xe6,0x00,0x00,0x00,0x7c,0x00,0x04,0x00, +0x43,0x00,0x00,0x00,0xe8,0x00,0x00,0x00,0xe7,0x00,0x00,0x00, +0x3d,0x00,0x04,0x00,0x43,0x00,0x00,0x00,0xed,0x00,0x00,0x00, +0xd7,0x00,0x00,0x00,0xc2,0x00,0x05,0x00,0x43,0x00,0x00,0x00, +0xee,0x00,0x00,0x00,0xed,0x00,0x00,0x00,0x9c,0x00,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xf1,0x00,0x00,0x00, +0x3d,0x00,0x00,0x00,0x56,0x00,0x00,0x00,0x41,0x00,0x08,0x00, +0x76,0x00,0x00,0x00,0xf2,0x00,0x00,0x00,0x4e,0x00,0x00,0x00, +0x22,0x00,0x00,0x00,0x1c,0x00,0x00,0x00,0x26,0x00,0x00,0x00, +0xf1,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x43,0x00,0x00,0x00, +0xf3,0x00,0x00,0x00,0xf2,0x00,0x00,0x00,0xc2,0x00,0x05,0x00, +0x43,0x00,0x00,0x00,0xf4,0x00,0x00,0x00,0xf3,0x00,0x00,0x00, +0x9a,0x00,0x00,0x00,0xc4,0x00,0x05,0x00,0x43,0x00,0x00,0x00, +0xf5,0x00,0x00,0x00,0xf4,0x00,0x00,0x00,0x9c,0x00,0x00,0x00, +0xc5,0x00,0x05,0x00,0x43,0x00,0x00,0x00,0xf6,0x00,0x00,0x00, +0xee,0x00,0x00,0x00,0xf5,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, +0xbd,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0xbd,0x00,0x00,0x00, +0xf5,0x00,0x07,0x00,0x43,0x00,0x00,0x00,0x94,0x01,0x00,0x00, +0xd2,0x00,0x00,0x00,0xbc,0x00,0x00,0x00,0xf6,0x00,0x00,0x00, +0xd3,0x00,0x00,0x00,0xf5,0x00,0x07,0x00,0x43,0x00,0x00,0x00, +0x93,0x01,0x00,0x00,0xc7,0x00,0x00,0x00,0xbc,0x00,0x00,0x00, +0xe8,0x00,0x00,0x00,0xd3,0x00,0x00,0x00,0x70,0x00,0x04,0x00, +0x3e,0x00,0x00,0x00,0xfa,0x00,0x00,0x00,0x93,0x01,0x00,0x00, +0x85,0x00,0x05,0x00,0x3e,0x00,0x00,0x00,0xfb,0x00,0x00,0x00, +0x53,0x00,0x00,0x00,0xfa,0x00,0x00,0x00,0x70,0x00,0x04,0x00, +0x3e,0x00,0x00,0x00,0xff,0x00,0x00,0x00,0x94,0x01,0x00,0x00, +0x85,0x00,0x05,0x00,0x3e,0x00,0x00,0x00,0x00,0x01,0x00,0x00, +0x59,0x00,0x00,0x00,0xff,0x00,0x00,0x00,0xc4,0x00,0x05,0x00, +0x21,0x00,0x00,0x00,0x04,0x01,0x00,0x00,0x26,0x00,0x00,0x00, +0x3d,0x00,0x00,0x00,0x72,0x00,0x04,0x00,0x7d,0x00,0x00,0x00, +0x05,0x01,0x00,0x00,0x04,0x01,0x00,0x00,0x7c,0x00,0x04,0x00, +0x43,0x00,0x00,0x00,0x06,0x01,0x00,0x00,0x05,0x01,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x0a,0x01,0x00,0x00, +0x3d,0x00,0x00,0x00,0x56,0x00,0x00,0x00,0xc4,0x00,0x05,0x00, +0x21,0x00,0x00,0x00,0x0b,0x01,0x00,0x00,0x26,0x00,0x00,0x00, +0x0a,0x01,0x00,0x00,0x72,0x00,0x04,0x00,0x7d,0x00,0x00,0x00, +0x0c,0x01,0x00,0x00,0x0b,0x01,0x00,0x00,0x7c,0x00,0x04,0x00, +0x43,0x00,0x00,0x00,0x0d,0x01,0x00,0x00,0x0c,0x01,0x00,0x00, +0x41,0x00,0x08,0x00,0x76,0x00,0x00,0x00,0x17,0x01,0x00,0x00, +0x4e,0x00,0x00,0x00,0x22,0x00,0x00,0x00,0x1c,0x00,0x00,0x00, +0x15,0x01,0x00,0x00,0x69,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0x43,0x00,0x00,0x00,0x18,0x01,0x00,0x00,0x17,0x01,0x00,0x00, +0x71,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x19,0x01,0x00,0x00, +0x18,0x01,0x00,0x00,0x7c,0x00,0x04,0x00,0x21,0x00,0x00,0x00, +0x1a,0x01,0x00,0x00,0x19,0x01,0x00,0x00,0xc7,0x00,0x05,0x00, +0x21,0x00,0x00,0x00,0x1b,0x01,0x00,0x00,0x1a,0x01,0x00,0x00, +0x93,0x00,0x00,0x00,0x41,0x00,0x08,0x00,0x76,0x00,0x00,0x00, +0x1f,0x01,0x00,0x00,0x4e,0x00,0x00,0x00,0x22,0x00,0x00,0x00, +0x1c,0x00,0x00,0x00,0x1d,0x01,0x00,0x00,0x62,0x00,0x00,0x00, +0x3d,0x00,0x04,0x00,0x43,0x00,0x00,0x00,0x20,0x01,0x00,0x00, +0x1f,0x01,0x00,0x00,0xc7,0x00,0x05,0x00,0x43,0x00,0x00,0x00, +0x22,0x01,0x00,0x00,0x20,0x01,0x00,0x00,0x06,0x01,0x00,0x00, +0x71,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x23,0x01,0x00,0x00, +0x22,0x01,0x00,0x00,0x7c,0x00,0x04,0x00,0x21,0x00,0x00,0x00, +0x24,0x01,0x00,0x00,0x23,0x01,0x00,0x00,0xab,0x00,0x05,0x00, +0x11,0x00,0x00,0x00,0x25,0x01,0x00,0x00,0x24,0x01,0x00,0x00, +0x22,0x00,0x00,0x00,0xa9,0x00,0x06,0x00,0x21,0x00,0x00,0x00, +0x27,0x01,0x00,0x00,0x25,0x01,0x00,0x00,0x26,0x01,0x00,0x00, +0x22,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x21,0x00,0x00,0x00, +0x28,0x01,0x00,0x00,0x1b,0x01,0x00,0x00,0x27,0x01,0x00,0x00, +0x6f,0x00,0x04,0x00,0x3e,0x00,0x00,0x00,0x29,0x01,0x00,0x00, +0x28,0x01,0x00,0x00,0x7f,0x00,0x04,0x00,0x3e,0x00,0x00,0x00, +0x9b,0x01,0x00,0x00,0xb9,0x00,0x00,0x00,0x0c,0x00,0x08,0x00, +0x3e,0x00,0x00,0x00,0x2c,0x01,0x00,0x00,0x01,0x00,0x00,0x00, +0x32,0x00,0x00,0x00,0xb4,0x00,0x00,0x00,0x29,0x01,0x00,0x00, +0x9b,0x01,0x00,0x00,0x73,0x00,0x04,0x00,0x41,0x00,0x00,0x00, +0x2d,0x01,0x00,0x00,0x2c,0x01,0x00,0x00,0x41,0x00,0x06,0x00, +0x50,0x00,0x00,0x00,0x2e,0x01,0x00,0x00,0x11,0x01,0x00,0x00, +0x22,0x00,0x00,0x00,0x63,0x00,0x00,0x00,0x3e,0x00,0x03,0x00, +0x2e,0x01,0x00,0x00,0x2d,0x01,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x30,0x01,0x00,0x00,0x63,0x00,0x00,0x00, +0x56,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x34,0x01,0x00,0x00,0x69,0x00,0x00,0x00,0x56,0x00,0x00,0x00, +0x41,0x00,0x08,0x00,0x76,0x00,0x00,0x00,0x35,0x01,0x00,0x00, +0x4e,0x00,0x00,0x00,0x22,0x00,0x00,0x00,0x1c,0x00,0x00,0x00, +0x15,0x01,0x00,0x00,0x34,0x01,0x00,0x00,0x3d,0x00,0x04,0x00, +0x43,0x00,0x00,0x00,0x36,0x01,0x00,0x00,0x35,0x01,0x00,0x00, +0x71,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x37,0x01,0x00,0x00, +0x36,0x01,0x00,0x00,0x7c,0x00,0x04,0x00,0x21,0x00,0x00,0x00, +0x38,0x01,0x00,0x00,0x37,0x01,0x00,0x00,0xc7,0x00,0x05,0x00, +0x21,0x00,0x00,0x00,0x39,0x01,0x00,0x00,0x38,0x01,0x00,0x00, +0x93,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x3c,0x01,0x00,0x00,0x62,0x00,0x00,0x00,0x56,0x00,0x00,0x00, +0x41,0x00,0x08,0x00,0x76,0x00,0x00,0x00,0x3d,0x01,0x00,0x00, +0x4e,0x00,0x00,0x00,0x22,0x00,0x00,0x00,0x1c,0x00,0x00,0x00, +0x1d,0x01,0x00,0x00,0x3c,0x01,0x00,0x00,0x3d,0x00,0x04,0x00, +0x43,0x00,0x00,0x00,0x3e,0x01,0x00,0x00,0x3d,0x01,0x00,0x00, +0xc7,0x00,0x05,0x00,0x43,0x00,0x00,0x00,0x40,0x01,0x00,0x00, +0x3e,0x01,0x00,0x00,0x06,0x01,0x00,0x00,0x71,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x41,0x01,0x00,0x00,0x40,0x01,0x00,0x00, +0x7c,0x00,0x04,0x00,0x21,0x00,0x00,0x00,0x42,0x01,0x00,0x00, +0x41,0x01,0x00,0x00,0xab,0x00,0x05,0x00,0x11,0x00,0x00,0x00, +0x43,0x01,0x00,0x00,0x42,0x01,0x00,0x00,0x22,0x00,0x00,0x00, +0xa9,0x00,0x06,0x00,0x21,0x00,0x00,0x00,0x44,0x01,0x00,0x00, +0x43,0x01,0x00,0x00,0x26,0x01,0x00,0x00,0x22,0x00,0x00,0x00, +0x80,0x00,0x05,0x00,0x21,0x00,0x00,0x00,0x45,0x01,0x00,0x00, +0x39,0x01,0x00,0x00,0x44,0x01,0x00,0x00,0x6f,0x00,0x04,0x00, +0x3e,0x00,0x00,0x00,0x46,0x01,0x00,0x00,0x45,0x01,0x00,0x00, +0x0c,0x00,0x08,0x00,0x3e,0x00,0x00,0x00,0x49,0x01,0x00,0x00, +0x01,0x00,0x00,0x00,0x32,0x00,0x00,0x00,0xb4,0x00,0x00,0x00, +0x46,0x01,0x00,0x00,0x9b,0x01,0x00,0x00,0x73,0x00,0x04,0x00, +0x41,0x00,0x00,0x00,0x4a,0x01,0x00,0x00,0x49,0x01,0x00,0x00, +0x41,0x00,0x06,0x00,0x50,0x00,0x00,0x00,0x4b,0x01,0x00,0x00, +0x11,0x01,0x00,0x00,0x22,0x00,0x00,0x00,0x30,0x01,0x00,0x00, +0x3e,0x00,0x03,0x00,0x4b,0x01,0x00,0x00,0x4a,0x01,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x4d,0x01,0x00,0x00, +0x63,0x00,0x00,0x00,0x46,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0x43,0x00,0x00,0x00,0x52,0x01,0x00,0x00,0x17,0x01,0x00,0x00, +0xc2,0x00,0x05,0x00,0x43,0x00,0x00,0x00,0x53,0x01,0x00,0x00, +0x52,0x01,0x00,0x00,0x9c,0x00,0x00,0x00,0x71,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x54,0x01,0x00,0x00,0x53,0x01,0x00,0x00, +0x7c,0x00,0x04,0x00,0x21,0x00,0x00,0x00,0x55,0x01,0x00,0x00, +0x54,0x01,0x00,0x00,0x3d,0x00,0x04,0x00,0x43,0x00,0x00,0x00, +0x59,0x01,0x00,0x00,0x1f,0x01,0x00,0x00,0xc7,0x00,0x05,0x00, +0x43,0x00,0x00,0x00,0x5b,0x01,0x00,0x00,0x59,0x01,0x00,0x00, +0x0d,0x01,0x00,0x00,0x71,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x5c,0x01,0x00,0x00,0x5b,0x01,0x00,0x00,0x7c,0x00,0x04,0x00, +0x21,0x00,0x00,0x00,0x5d,0x01,0x00,0x00,0x5c,0x01,0x00,0x00, +0xab,0x00,0x05,0x00,0x11,0x00,0x00,0x00,0x5e,0x01,0x00,0x00, +0x5d,0x01,0x00,0x00,0x22,0x00,0x00,0x00,0xa9,0x00,0x06,0x00, +0x21,0x00,0x00,0x00,0x5f,0x01,0x00,0x00,0x5e,0x01,0x00,0x00, +0x26,0x01,0x00,0x00,0x22,0x00,0x00,0x00,0x80,0x00,0x05,0x00, +0x21,0x00,0x00,0x00,0x60,0x01,0x00,0x00,0x55,0x01,0x00,0x00, +0x5f,0x01,0x00,0x00,0x6f,0x00,0x04,0x00,0x3e,0x00,0x00,0x00, +0x61,0x01,0x00,0x00,0x60,0x01,0x00,0x00,0x7f,0x00,0x04,0x00, +0x3e,0x00,0x00,0x00,0x9d,0x01,0x00,0x00,0x00,0x01,0x00,0x00, +0x0c,0x00,0x08,0x00,0x3e,0x00,0x00,0x00,0x64,0x01,0x00,0x00, +0x01,0x00,0x00,0x00,0x32,0x00,0x00,0x00,0xfb,0x00,0x00,0x00, +0x61,0x01,0x00,0x00,0x9d,0x01,0x00,0x00,0x73,0x00,0x04,0x00, +0x41,0x00,0x00,0x00,0x65,0x01,0x00,0x00,0x64,0x01,0x00,0x00, +0x41,0x00,0x06,0x00,0x50,0x00,0x00,0x00,0x66,0x01,0x00,0x00, +0x11,0x01,0x00,0x00,0x22,0x00,0x00,0x00,0x4d,0x01,0x00,0x00, +0x3e,0x00,0x03,0x00,0x66,0x01,0x00,0x00,0x65,0x01,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x69,0x01,0x00,0x00, +0x63,0x00,0x00,0x00,0x68,0x01,0x00,0x00,0x3d,0x00,0x04,0x00, +0x43,0x00,0x00,0x00,0x6f,0x01,0x00,0x00,0x35,0x01,0x00,0x00, +0xc2,0x00,0x05,0x00,0x43,0x00,0x00,0x00,0x70,0x01,0x00,0x00, +0x6f,0x01,0x00,0x00,0x9c,0x00,0x00,0x00,0x71,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x71,0x01,0x00,0x00,0x70,0x01,0x00,0x00, +0x7c,0x00,0x04,0x00,0x21,0x00,0x00,0x00,0x72,0x01,0x00,0x00, +0x71,0x01,0x00,0x00,0x3d,0x00,0x04,0x00,0x43,0x00,0x00,0x00, +0x77,0x01,0x00,0x00,0x3d,0x01,0x00,0x00,0xc7,0x00,0x05,0x00, +0x43,0x00,0x00,0x00,0x79,0x01,0x00,0x00,0x77,0x01,0x00,0x00, +0x0d,0x01,0x00,0x00,0x71,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x7a,0x01,0x00,0x00,0x79,0x01,0x00,0x00,0x7c,0x00,0x04,0x00, +0x21,0x00,0x00,0x00,0x7b,0x01,0x00,0x00,0x7a,0x01,0x00,0x00, +0xab,0x00,0x05,0x00,0x11,0x00,0x00,0x00,0x7c,0x01,0x00,0x00, +0x7b,0x01,0x00,0x00,0x22,0x00,0x00,0x00,0xa9,0x00,0x06,0x00, +0x21,0x00,0x00,0x00,0x7d,0x01,0x00,0x00,0x7c,0x01,0x00,0x00, +0x26,0x01,0x00,0x00,0x22,0x00,0x00,0x00,0x80,0x00,0x05,0x00, +0x21,0x00,0x00,0x00,0x7e,0x01,0x00,0x00,0x72,0x01,0x00,0x00, +0x7d,0x01,0x00,0x00,0x6f,0x00,0x04,0x00,0x3e,0x00,0x00,0x00, +0x7f,0x01,0x00,0x00,0x7e,0x01,0x00,0x00,0x0c,0x00,0x08,0x00, +0x3e,0x00,0x00,0x00,0x82,0x01,0x00,0x00,0x01,0x00,0x00,0x00, +0x32,0x00,0x00,0x00,0xfb,0x00,0x00,0x00,0x7f,0x01,0x00,0x00, +0x9d,0x01,0x00,0x00,0x73,0x00,0x04,0x00,0x41,0x00,0x00,0x00, +0x83,0x01,0x00,0x00,0x82,0x01,0x00,0x00,0x41,0x00,0x06,0x00, +0x50,0x00,0x00,0x00,0x84,0x01,0x00,0x00,0x11,0x01,0x00,0x00, +0x22,0x00,0x00,0x00,0x69,0x01,0x00,0x00,0x3e,0x00,0x03,0x00, +0x84,0x01,0x00,0x00,0x83,0x01,0x00,0x00,0xf9,0x00,0x02,0x00, +0x0d,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0x0d,0x00,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x86,0x01,0x00,0x00, +0x90,0x01,0x00,0x00,0x26,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, +0x0a,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0x0c,0x00,0x00,0x00, +0xf5,0x00,0x07,0x00,0x11,0x00,0x00,0x00,0x97,0x01,0x00,0x00, +0x8a,0x01,0x00,0x00,0x0a,0x00,0x00,0x00,0x8d,0x01,0x00,0x00, +0x2c,0x00,0x00,0x00,0xf7,0x00,0x03,0x00,0x8e,0x01,0x00,0x00, +0x00,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0x97,0x01,0x00,0x00, +0x88,0x01,0x00,0x00,0x8e,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, +0x8e,0x01,0x00,0x00,0xf9,0x00,0x02,0x00,0x88,0x01,0x00,0x00, +0xf8,0x00,0x02,0x00,0x88,0x01,0x00,0x00,0xfd,0x00,0x01,0x00, +0x38,0x00,0x01,0x00, +}; +const uint64_t dequant_q5_K_len = 5980; + +unsigned char dequant_q6_K_data[] = { +0x03,0x02,0x23,0x07,0x00,0x05,0x01,0x00,0x0b,0x00,0x0d,0x00, +0x0d,0x01,0x00,0x00,0x00,0x00,0x00,0x00,0x11,0x00,0x02,0x00, +0x01,0x00,0x00,0x00,0x11,0x00,0x02,0x00,0x27,0x00,0x00,0x00, +0x11,0x00,0x02,0x00,0x51,0x11,0x00,0x00,0x11,0x00,0x02,0x00, +0x60,0x11,0x00,0x00,0x0b,0x00,0x06,0x00,0x01,0x00,0x00,0x00, +0x47,0x4c,0x53,0x4c,0x2e,0x73,0x74,0x64,0x2e,0x34,0x35,0x30, +0x00,0x00,0x00,0x00,0x0e,0x00,0x03,0x00,0x00,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0x0f,0x00,0x0a,0x00,0x05,0x00,0x00,0x00, +0x04,0x00,0x00,0x00,0x6d,0x61,0x69,0x6e,0x00,0x00,0x00,0x00, +0x16,0x00,0x00,0x00,0x20,0x00,0x00,0x00,0x30,0x00,0x00,0x00, +0x5f,0x00,0x00,0x00,0x74,0x00,0x00,0x00,0x10,0x00,0x06,0x00, +0x04,0x00,0x00,0x00,0x11,0x00,0x00,0x00,0x40,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x47,0x00,0x04,0x00, +0x16,0x00,0x00,0x00,0x0b,0x00,0x00,0x00,0x1a,0x00,0x00,0x00, +0x48,0x00,0x05,0x00,0x1e,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x48,0x00,0x05,0x00, +0x1e,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x23,0x00,0x00,0x00, +0x04,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x1e,0x00,0x00,0x00, +0x02,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x08,0x00,0x00,0x00, +0x48,0x00,0x05,0x00,0x1e,0x00,0x00,0x00,0x03,0x00,0x00,0x00, +0x23,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x48,0x00,0x05,0x00, +0x1e,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x23,0x00,0x00,0x00, +0x10,0x00,0x00,0x00,0x47,0x00,0x03,0x00,0x1e,0x00,0x00,0x00, +0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x30,0x00,0x00,0x00, +0x0b,0x00,0x00,0x00,0x1b,0x00,0x00,0x00,0x47,0x00,0x04,0x00, +0x56,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0x47,0x00,0x04,0x00,0x57,0x00,0x00,0x00,0x06,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x59,0x00,0x00,0x00, +0x06,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x48,0x00,0x05,0x00, +0x5b,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00, +0x00,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x5b,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x80,0x00,0x00,0x00, +0x48,0x00,0x05,0x00,0x5b,0x00,0x00,0x00,0x02,0x00,0x00,0x00, +0x23,0x00,0x00,0x00,0xc0,0x00,0x00,0x00,0x48,0x00,0x05,0x00, +0x5b,0x00,0x00,0x00,0x03,0x00,0x00,0x00,0x23,0x00,0x00,0x00, +0xd0,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x5c,0x00,0x00,0x00, +0x06,0x00,0x00,0x00,0xd2,0x00,0x00,0x00,0x48,0x00,0x04,0x00, +0x5d,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x18,0x00,0x00,0x00, +0x48,0x00,0x05,0x00,0x5d,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x03,0x00, +0x5d,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00, +0x5f,0x00,0x00,0x00,0x22,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0x47,0x00,0x04,0x00,0x5f,0x00,0x00,0x00,0x21,0x00,0x00,0x00, +0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x71,0x00,0x00,0x00, +0x06,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x48,0x00,0x04,0x00, +0x72,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x19,0x00,0x00,0x00, +0x48,0x00,0x05,0x00,0x72,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x03,0x00, +0x72,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00, +0x74,0x00,0x00,0x00,0x22,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0x47,0x00,0x04,0x00,0x74,0x00,0x00,0x00,0x21,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x02,0x01,0x00,0x00, +0x0b,0x00,0x00,0x00,0x19,0x00,0x00,0x00,0x13,0x00,0x02,0x00, +0x02,0x00,0x00,0x00,0x21,0x00,0x03,0x00,0x03,0x00,0x00,0x00, +0x02,0x00,0x00,0x00,0x15,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x20,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x09,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x10,0x00,0x00,0x00, +0x00,0x01,0x00,0x00,0x14,0x00,0x02,0x00,0x11,0x00,0x00,0x00, +0x17,0x00,0x04,0x00,0x14,0x00,0x00,0x00,0x06,0x00,0x00,0x00, +0x03,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x15,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0x14,0x00,0x00,0x00,0x3b,0x00,0x04,0x00, +0x15,0x00,0x00,0x00,0x16,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0x20,0x00,0x04,0x00,0x17,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0x06,0x00,0x00,0x00,0x1e,0x00,0x07,0x00,0x1e,0x00,0x00,0x00, +0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00, +0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x20,0x00,0x04,0x00, +0x1f,0x00,0x00,0x00,0x09,0x00,0x00,0x00,0x1e,0x00,0x00,0x00, +0x3b,0x00,0x04,0x00,0x1f,0x00,0x00,0x00,0x20,0x00,0x00,0x00, +0x09,0x00,0x00,0x00,0x15,0x00,0x04,0x00,0x21,0x00,0x00,0x00, +0x20,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x21,0x00,0x00,0x00,0x22,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0x20,0x00,0x04,0x00,0x23,0x00,0x00,0x00,0x09,0x00,0x00,0x00, +0x06,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x21,0x00,0x00,0x00, +0x26,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x3b,0x00,0x04,0x00, +0x15,0x00,0x00,0x00,0x30,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x35,0x00,0x00,0x00, +0x20,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x3d,0x00,0x00,0x00,0x08,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x41,0x00,0x00,0x00,0x10,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x47,0x00,0x00,0x00, +0x80,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x4e,0x00,0x00,0x00,0x40,0x00,0x00,0x00,0x15,0x00,0x04,0x00, +0x53,0x00,0x00,0x00,0x08,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0x1c,0x00,0x04,0x00,0x56,0x00,0x00,0x00,0x53,0x00,0x00,0x00, +0x47,0x00,0x00,0x00,0x1c,0x00,0x04,0x00,0x57,0x00,0x00,0x00, +0x53,0x00,0x00,0x00,0x4e,0x00,0x00,0x00,0x15,0x00,0x04,0x00, +0x58,0x00,0x00,0x00,0x08,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0x1c,0x00,0x04,0x00,0x59,0x00,0x00,0x00,0x58,0x00,0x00,0x00, +0x41,0x00,0x00,0x00,0x16,0x00,0x03,0x00,0x5a,0x00,0x00,0x00, +0x10,0x00,0x00,0x00,0x1e,0x00,0x06,0x00,0x5b,0x00,0x00,0x00, +0x56,0x00,0x00,0x00,0x57,0x00,0x00,0x00,0x59,0x00,0x00,0x00, +0x5a,0x00,0x00,0x00,0x1d,0x00,0x03,0x00,0x5c,0x00,0x00,0x00, +0x5b,0x00,0x00,0x00,0x1e,0x00,0x03,0x00,0x5d,0x00,0x00,0x00, +0x5c,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x5e,0x00,0x00,0x00, +0x0c,0x00,0x00,0x00,0x5d,0x00,0x00,0x00,0x3b,0x00,0x04,0x00, +0x5e,0x00,0x00,0x00,0x5f,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, +0x20,0x00,0x04,0x00,0x65,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, +0x53,0x00,0x00,0x00,0x16,0x00,0x03,0x00,0x68,0x00,0x00,0x00, +0x20,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x21,0x00,0x00,0x00, +0x6c,0x00,0x00,0x00,0x03,0x00,0x00,0x00,0x20,0x00,0x04,0x00, +0x6d,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x5a,0x00,0x00,0x00, +0x1d,0x00,0x03,0x00,0x71,0x00,0x00,0x00,0x5a,0x00,0x00,0x00, +0x1e,0x00,0x03,0x00,0x72,0x00,0x00,0x00,0x71,0x00,0x00,0x00, +0x20,0x00,0x04,0x00,0x73,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, +0x72,0x00,0x00,0x00,0x3b,0x00,0x04,0x00,0x73,0x00,0x00,0x00, +0x74,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x21,0x00,0x00,0x00,0x79,0x00,0x00,0x00,0x02,0x00,0x00,0x00, +0x20,0x00,0x04,0x00,0x7c,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, +0x58,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x21,0x00,0x00,0x00, +0x87,0x00,0x00,0x00,0x0f,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x21,0x00,0x00,0x00,0x8e,0x00,0x00,0x00,0x04,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x21,0x00,0x00,0x00,0x93,0x00,0x00,0x00, +0x20,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x9f,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0xc0,0x00,0x00,0x00,0x04,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0xdd,0x00,0x00,0x00, +0x60,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0xe2,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x21,0x00,0x00,0x00,0xf0,0x00,0x00,0x00,0x06,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x01,0x01,0x00,0x00, +0x01,0x00,0x00,0x00,0x2c,0x00,0x06,0x00,0x14,0x00,0x00,0x00, +0x02,0x01,0x00,0x00,0x4e,0x00,0x00,0x00,0x01,0x01,0x00,0x00, +0x01,0x01,0x00,0x00,0x2a,0x00,0x03,0x00,0x11,0x00,0x00,0x00, +0x05,0x01,0x00,0x00,0x29,0x00,0x03,0x00,0x11,0x00,0x00,0x00, +0x08,0x01,0x00,0x00,0x36,0x00,0x05,0x00,0x02,0x00,0x00,0x00, +0x04,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x03,0x00,0x00,0x00, +0xf8,0x00,0x02,0x00,0x05,0x00,0x00,0x00,0xf7,0x00,0x03,0x00, +0x03,0x01,0x00,0x00,0x00,0x00,0x00,0x00,0xfb,0x00,0x03,0x00, +0x09,0x00,0x00,0x00,0x04,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, +0x04,0x01,0x00,0x00,0xf9,0x00,0x02,0x00,0x0a,0x00,0x00,0x00, +0xf8,0x00,0x02,0x00,0x0a,0x00,0x00,0x00,0xf5,0x00,0x07,0x00, +0x06,0x00,0x00,0x00,0x0b,0x01,0x00,0x00,0x09,0x00,0x00,0x00, +0x04,0x01,0x00,0x00,0x00,0x01,0x00,0x00,0x0d,0x00,0x00,0x00, +0xb0,0x00,0x05,0x00,0x11,0x00,0x00,0x00,0x12,0x00,0x00,0x00, +0x0b,0x01,0x00,0x00,0x10,0x00,0x00,0x00,0xf6,0x00,0x04,0x00, +0x0c,0x00,0x00,0x00,0x0d,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0xfa,0x00,0x04,0x00,0x12,0x00,0x00,0x00,0x0b,0x00,0x00,0x00, +0x0c,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0x0b,0x00,0x00,0x00, +0x41,0x00,0x05,0x00,0x17,0x00,0x00,0x00,0x18,0x00,0x00,0x00, +0x16,0x00,0x00,0x00,0x09,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x19,0x00,0x00,0x00,0x18,0x00,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x1a,0x00,0x00,0x00, +0x19,0x00,0x00,0x00,0x10,0x00,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x1c,0x00,0x00,0x00,0x1a,0x00,0x00,0x00, +0x0b,0x01,0x00,0x00,0x41,0x00,0x05,0x00,0x23,0x00,0x00,0x00, +0x24,0x00,0x00,0x00,0x20,0x00,0x00,0x00,0x22,0x00,0x00,0x00, +0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x25,0x00,0x00,0x00, +0x24,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x23,0x00,0x00,0x00, +0x27,0x00,0x00,0x00,0x20,0x00,0x00,0x00,0x26,0x00,0x00,0x00, +0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x28,0x00,0x00,0x00, +0x27,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x29,0x00,0x00,0x00,0x25,0x00,0x00,0x00,0x28,0x00,0x00,0x00, +0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x2a,0x00,0x00,0x00, +0x29,0x00,0x00,0x00,0x10,0x00,0x00,0x00,0xae,0x00,0x05,0x00, +0x11,0x00,0x00,0x00,0x2b,0x00,0x00,0x00,0x1c,0x00,0x00,0x00, +0x2a,0x00,0x00,0x00,0xf7,0x00,0x03,0x00,0x2d,0x00,0x00,0x00, +0x00,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0x2b,0x00,0x00,0x00, +0x2c,0x00,0x00,0x00,0x2d,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, +0x2c,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0x0c,0x00,0x00,0x00, +0xf8,0x00,0x02,0x00,0x2d,0x00,0x00,0x00,0x41,0x00,0x05,0x00, +0x17,0x00,0x00,0x00,0x31,0x00,0x00,0x00,0x30,0x00,0x00,0x00, +0x09,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x32,0x00,0x00,0x00,0x31,0x00,0x00,0x00,0x86,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x36,0x00,0x00,0x00,0x32,0x00,0x00,0x00, +0x35,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x3a,0x00,0x00,0x00,0x35,0x00,0x00,0x00,0x36,0x00,0x00,0x00, +0x82,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x3b,0x00,0x00,0x00, +0x32,0x00,0x00,0x00,0x3a,0x00,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x3f,0x00,0x00,0x00,0x3d,0x00,0x00,0x00, +0x36,0x00,0x00,0x00,0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x42,0x00,0x00,0x00,0x3b,0x00,0x00,0x00,0x41,0x00,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x43,0x00,0x00,0x00, +0x3f,0x00,0x00,0x00,0x42,0x00,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x46,0x00,0x00,0x00,0x1c,0x00,0x00,0x00, +0x10,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x49,0x00,0x00,0x00,0x47,0x00,0x00,0x00,0x36,0x00,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x4a,0x00,0x00,0x00, +0x46,0x00,0x00,0x00,0x49,0x00,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x4c,0x00,0x00,0x00,0x4a,0x00,0x00,0x00, +0x3b,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x50,0x00,0x00,0x00,0x4e,0x00,0x00,0x00,0x36,0x00,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x52,0x00,0x00,0x00, +0x50,0x00,0x00,0x00,0x3b,0x00,0x00,0x00,0x41,0x00,0x08,0x00, +0x65,0x00,0x00,0x00,0x66,0x00,0x00,0x00,0x5f,0x00,0x00,0x00, +0x22,0x00,0x00,0x00,0x1c,0x00,0x00,0x00,0x26,0x00,0x00,0x00, +0x32,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x53,0x00,0x00,0x00, +0x67,0x00,0x00,0x00,0x66,0x00,0x00,0x00,0x41,0x00,0x07,0x00, +0x6d,0x00,0x00,0x00,0x6e,0x00,0x00,0x00,0x5f,0x00,0x00,0x00, +0x22,0x00,0x00,0x00,0x1c,0x00,0x00,0x00,0x6c,0x00,0x00,0x00, +0x3d,0x00,0x04,0x00,0x5a,0x00,0x00,0x00,0x6f,0x00,0x00,0x00, +0x6e,0x00,0x00,0x00,0x73,0x00,0x04,0x00,0x68,0x00,0x00,0x00, +0x70,0x00,0x00,0x00,0x6f,0x00,0x00,0x00,0x41,0x00,0x08,0x00, +0x7c,0x00,0x00,0x00,0x7d,0x00,0x00,0x00,0x5f,0x00,0x00,0x00, +0x22,0x00,0x00,0x00,0x1c,0x00,0x00,0x00,0x79,0x00,0x00,0x00, +0x43,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x58,0x00,0x00,0x00, +0x7e,0x00,0x00,0x00,0x7d,0x00,0x00,0x00,0x72,0x00,0x04,0x00, +0x21,0x00,0x00,0x00,0x7f,0x00,0x00,0x00,0x7e,0x00,0x00,0x00, +0x41,0x00,0x08,0x00,0x65,0x00,0x00,0x00,0x83,0x00,0x00,0x00, +0x5f,0x00,0x00,0x00,0x22,0x00,0x00,0x00,0x1c,0x00,0x00,0x00, +0x22,0x00,0x00,0x00,0x52,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0x53,0x00,0x00,0x00,0x84,0x00,0x00,0x00,0x83,0x00,0x00,0x00, +0x71,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x85,0x00,0x00,0x00, +0x84,0x00,0x00,0x00,0x7c,0x00,0x04,0x00,0x21,0x00,0x00,0x00, +0x86,0x00,0x00,0x00,0x85,0x00,0x00,0x00,0xc7,0x00,0x05,0x00, +0x21,0x00,0x00,0x00,0x88,0x00,0x00,0x00,0x86,0x00,0x00,0x00, +0x87,0x00,0x00,0x00,0xc2,0x00,0x05,0x00,0x53,0x00,0x00,0x00, +0x8a,0x00,0x00,0x00,0x67,0x00,0x00,0x00,0x22,0x00,0x00,0x00, +0x71,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x8b,0x00,0x00,0x00, +0x8a,0x00,0x00,0x00,0x7c,0x00,0x04,0x00,0x21,0x00,0x00,0x00, +0x8c,0x00,0x00,0x00,0x8b,0x00,0x00,0x00,0xc7,0x00,0x05,0x00, +0x21,0x00,0x00,0x00,0x8d,0x00,0x00,0x00,0x8c,0x00,0x00,0x00, +0x6c,0x00,0x00,0x00,0xc4,0x00,0x05,0x00,0x21,0x00,0x00,0x00, +0x8f,0x00,0x00,0x00,0x8d,0x00,0x00,0x00,0x8e,0x00,0x00,0x00, +0xc5,0x00,0x05,0x00,0x21,0x00,0x00,0x00,0x90,0x00,0x00,0x00, +0x88,0x00,0x00,0x00,0x8f,0x00,0x00,0x00,0x72,0x00,0x04,0x00, +0x58,0x00,0x00,0x00,0x91,0x00,0x00,0x00,0x90,0x00,0x00,0x00, +0x72,0x00,0x04,0x00,0x21,0x00,0x00,0x00,0x92,0x00,0x00,0x00, +0x91,0x00,0x00,0x00,0x82,0x00,0x05,0x00,0x21,0x00,0x00,0x00, +0x94,0x00,0x00,0x00,0x92,0x00,0x00,0x00,0x93,0x00,0x00,0x00, +0x84,0x00,0x05,0x00,0x21,0x00,0x00,0x00,0x95,0x00,0x00,0x00, +0x7f,0x00,0x00,0x00,0x94,0x00,0x00,0x00,0x6f,0x00,0x04,0x00, +0x68,0x00,0x00,0x00,0x96,0x00,0x00,0x00,0x95,0x00,0x00,0x00, +0x85,0x00,0x05,0x00,0x68,0x00,0x00,0x00,0x97,0x00,0x00,0x00, +0x70,0x00,0x00,0x00,0x96,0x00,0x00,0x00,0x73,0x00,0x04,0x00, +0x5a,0x00,0x00,0x00,0x98,0x00,0x00,0x00,0x97,0x00,0x00,0x00, +0x41,0x00,0x06,0x00,0x6d,0x00,0x00,0x00,0x99,0x00,0x00,0x00, +0x74,0x00,0x00,0x00,0x22,0x00,0x00,0x00,0x4c,0x00,0x00,0x00, +0x3e,0x00,0x03,0x00,0x99,0x00,0x00,0x00,0x98,0x00,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x9b,0x00,0x00,0x00, +0x4c,0x00,0x00,0x00,0x35,0x00,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xa0,0x00,0x00,0x00,0x43,0x00,0x00,0x00, +0x9f,0x00,0x00,0x00,0x41,0x00,0x08,0x00,0x7c,0x00,0x00,0x00, +0xa1,0x00,0x00,0x00,0x5f,0x00,0x00,0x00,0x22,0x00,0x00,0x00, +0x1c,0x00,0x00,0x00,0x79,0x00,0x00,0x00,0xa0,0x00,0x00,0x00, +0x3d,0x00,0x04,0x00,0x58,0x00,0x00,0x00,0xa2,0x00,0x00,0x00, +0xa1,0x00,0x00,0x00,0x72,0x00,0x04,0x00,0x21,0x00,0x00,0x00, +0xa3,0x00,0x00,0x00,0xa2,0x00,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xa6,0x00,0x00,0x00,0x52,0x00,0x00,0x00, +0x35,0x00,0x00,0x00,0x41,0x00,0x08,0x00,0x65,0x00,0x00,0x00, +0xa7,0x00,0x00,0x00,0x5f,0x00,0x00,0x00,0x22,0x00,0x00,0x00, +0x1c,0x00,0x00,0x00,0x22,0x00,0x00,0x00,0xa6,0x00,0x00,0x00, +0x3d,0x00,0x04,0x00,0x53,0x00,0x00,0x00,0xa8,0x00,0x00,0x00, +0xa7,0x00,0x00,0x00,0x71,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0xa9,0x00,0x00,0x00,0xa8,0x00,0x00,0x00,0x7c,0x00,0x04,0x00, +0x21,0x00,0x00,0x00,0xaa,0x00,0x00,0x00,0xa9,0x00,0x00,0x00, +0xc7,0x00,0x05,0x00,0x21,0x00,0x00,0x00,0xab,0x00,0x00,0x00, +0xaa,0x00,0x00,0x00,0x87,0x00,0x00,0x00,0xc2,0x00,0x05,0x00, +0x53,0x00,0x00,0x00,0xad,0x00,0x00,0x00,0x67,0x00,0x00,0x00, +0x79,0x00,0x00,0x00,0x71,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0xae,0x00,0x00,0x00,0xad,0x00,0x00,0x00,0x7c,0x00,0x04,0x00, +0x21,0x00,0x00,0x00,0xaf,0x00,0x00,0x00,0xae,0x00,0x00,0x00, +0xc7,0x00,0x05,0x00,0x21,0x00,0x00,0x00,0xb0,0x00,0x00,0x00, +0xaf,0x00,0x00,0x00,0x6c,0x00,0x00,0x00,0xc4,0x00,0x05,0x00, +0x21,0x00,0x00,0x00,0xb1,0x00,0x00,0x00,0xb0,0x00,0x00,0x00, +0x8e,0x00,0x00,0x00,0xc5,0x00,0x05,0x00,0x21,0x00,0x00,0x00, +0xb2,0x00,0x00,0x00,0xab,0x00,0x00,0x00,0xb1,0x00,0x00,0x00, +0x72,0x00,0x04,0x00,0x58,0x00,0x00,0x00,0xb3,0x00,0x00,0x00, +0xb2,0x00,0x00,0x00,0x72,0x00,0x04,0x00,0x21,0x00,0x00,0x00, +0xb4,0x00,0x00,0x00,0xb3,0x00,0x00,0x00,0x82,0x00,0x05,0x00, +0x21,0x00,0x00,0x00,0xb5,0x00,0x00,0x00,0xb4,0x00,0x00,0x00, +0x93,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x21,0x00,0x00,0x00, +0xb6,0x00,0x00,0x00,0xa3,0x00,0x00,0x00,0xb5,0x00,0x00,0x00, +0x6f,0x00,0x04,0x00,0x68,0x00,0x00,0x00,0xb7,0x00,0x00,0x00, +0xb6,0x00,0x00,0x00,0x85,0x00,0x05,0x00,0x68,0x00,0x00,0x00, +0xb8,0x00,0x00,0x00,0x70,0x00,0x00,0x00,0xb7,0x00,0x00,0x00, +0x73,0x00,0x04,0x00,0x5a,0x00,0x00,0x00,0xb9,0x00,0x00,0x00, +0xb8,0x00,0x00,0x00,0x41,0x00,0x06,0x00,0x6d,0x00,0x00,0x00, +0xba,0x00,0x00,0x00,0x74,0x00,0x00,0x00,0x22,0x00,0x00,0x00, +0x9b,0x00,0x00,0x00,0x3e,0x00,0x03,0x00,0xba,0x00,0x00,0x00, +0xb9,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xbc,0x00,0x00,0x00,0x4c,0x00,0x00,0x00,0x4e,0x00,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xc1,0x00,0x00,0x00, +0x43,0x00,0x00,0x00,0xc0,0x00,0x00,0x00,0x41,0x00,0x08,0x00, +0x7c,0x00,0x00,0x00,0xc2,0x00,0x00,0x00,0x5f,0x00,0x00,0x00, +0x22,0x00,0x00,0x00,0x1c,0x00,0x00,0x00,0x79,0x00,0x00,0x00, +0xc1,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x58,0x00,0x00,0x00, +0xc3,0x00,0x00,0x00,0xc2,0x00,0x00,0x00,0x72,0x00,0x04,0x00, +0x21,0x00,0x00,0x00,0xc4,0x00,0x00,0x00,0xc3,0x00,0x00,0x00, +0x3d,0x00,0x04,0x00,0x53,0x00,0x00,0x00,0xc9,0x00,0x00,0x00, +0x83,0x00,0x00,0x00,0xc2,0x00,0x05,0x00,0x53,0x00,0x00,0x00, +0xca,0x00,0x00,0x00,0xc9,0x00,0x00,0x00,0x8e,0x00,0x00,0x00, +0x71,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0xcb,0x00,0x00,0x00, +0xca,0x00,0x00,0x00,0x7c,0x00,0x04,0x00,0x21,0x00,0x00,0x00, +0xcc,0x00,0x00,0x00,0xcb,0x00,0x00,0x00,0xc2,0x00,0x05,0x00, +0x53,0x00,0x00,0x00,0xce,0x00,0x00,0x00,0x67,0x00,0x00,0x00, +0x8e,0x00,0x00,0x00,0x71,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0xcf,0x00,0x00,0x00,0xce,0x00,0x00,0x00,0x7c,0x00,0x04,0x00, +0x21,0x00,0x00,0x00,0xd0,0x00,0x00,0x00,0xcf,0x00,0x00,0x00, +0xc7,0x00,0x05,0x00,0x21,0x00,0x00,0x00,0xd1,0x00,0x00,0x00, +0xd0,0x00,0x00,0x00,0x6c,0x00,0x00,0x00,0xc4,0x00,0x05,0x00, +0x21,0x00,0x00,0x00,0xd2,0x00,0x00,0x00,0xd1,0x00,0x00,0x00, +0x8e,0x00,0x00,0x00,0xc5,0x00,0x05,0x00,0x21,0x00,0x00,0x00, +0xd3,0x00,0x00,0x00,0xcc,0x00,0x00,0x00,0xd2,0x00,0x00,0x00, +0x72,0x00,0x04,0x00,0x58,0x00,0x00,0x00,0xd4,0x00,0x00,0x00, +0xd3,0x00,0x00,0x00,0x72,0x00,0x04,0x00,0x21,0x00,0x00,0x00, +0xd5,0x00,0x00,0x00,0xd4,0x00,0x00,0x00,0x82,0x00,0x05,0x00, +0x21,0x00,0x00,0x00,0xd6,0x00,0x00,0x00,0xd5,0x00,0x00,0x00, +0x93,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x21,0x00,0x00,0x00, +0xd7,0x00,0x00,0x00,0xc4,0x00,0x00,0x00,0xd6,0x00,0x00,0x00, +0x6f,0x00,0x04,0x00,0x68,0x00,0x00,0x00,0xd8,0x00,0x00,0x00, +0xd7,0x00,0x00,0x00,0x85,0x00,0x05,0x00,0x68,0x00,0x00,0x00, +0xd9,0x00,0x00,0x00,0x70,0x00,0x00,0x00,0xd8,0x00,0x00,0x00, +0x73,0x00,0x04,0x00,0x5a,0x00,0x00,0x00,0xda,0x00,0x00,0x00, +0xd9,0x00,0x00,0x00,0x41,0x00,0x06,0x00,0x6d,0x00,0x00,0x00, +0xdb,0x00,0x00,0x00,0x74,0x00,0x00,0x00,0x22,0x00,0x00,0x00, +0xbc,0x00,0x00,0x00,0x3e,0x00,0x03,0x00,0xdb,0x00,0x00,0x00, +0xda,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xde,0x00,0x00,0x00,0x4c,0x00,0x00,0x00,0xdd,0x00,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xe3,0x00,0x00,0x00, +0x43,0x00,0x00,0x00,0xe2,0x00,0x00,0x00,0x41,0x00,0x08,0x00, +0x7c,0x00,0x00,0x00,0xe4,0x00,0x00,0x00,0x5f,0x00,0x00,0x00, +0x22,0x00,0x00,0x00,0x1c,0x00,0x00,0x00,0x79,0x00,0x00,0x00, +0xe3,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x58,0x00,0x00,0x00, +0xe5,0x00,0x00,0x00,0xe4,0x00,0x00,0x00,0x72,0x00,0x04,0x00, +0x21,0x00,0x00,0x00,0xe6,0x00,0x00,0x00,0xe5,0x00,0x00,0x00, +0x3d,0x00,0x04,0x00,0x53,0x00,0x00,0x00,0xeb,0x00,0x00,0x00, +0xa7,0x00,0x00,0x00,0xc2,0x00,0x05,0x00,0x53,0x00,0x00,0x00, +0xec,0x00,0x00,0x00,0xeb,0x00,0x00,0x00,0x8e,0x00,0x00,0x00, +0x71,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0xed,0x00,0x00,0x00, +0xec,0x00,0x00,0x00,0x7c,0x00,0x04,0x00,0x21,0x00,0x00,0x00, +0xee,0x00,0x00,0x00,0xed,0x00,0x00,0x00,0xc2,0x00,0x05,0x00, +0x53,0x00,0x00,0x00,0xf1,0x00,0x00,0x00,0x67,0x00,0x00,0x00, +0xf0,0x00,0x00,0x00,0x71,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0xf2,0x00,0x00,0x00,0xf1,0x00,0x00,0x00,0x7c,0x00,0x04,0x00, +0x21,0x00,0x00,0x00,0xf3,0x00,0x00,0x00,0xf2,0x00,0x00,0x00, +0xc7,0x00,0x05,0x00,0x21,0x00,0x00,0x00,0xf4,0x00,0x00,0x00, +0xf3,0x00,0x00,0x00,0x6c,0x00,0x00,0x00,0xc4,0x00,0x05,0x00, +0x21,0x00,0x00,0x00,0xf5,0x00,0x00,0x00,0xf4,0x00,0x00,0x00, +0x8e,0x00,0x00,0x00,0xc5,0x00,0x05,0x00,0x21,0x00,0x00,0x00, +0xf6,0x00,0x00,0x00,0xee,0x00,0x00,0x00,0xf5,0x00,0x00,0x00, +0x72,0x00,0x04,0x00,0x58,0x00,0x00,0x00,0xf7,0x00,0x00,0x00, +0xf6,0x00,0x00,0x00,0x72,0x00,0x04,0x00,0x21,0x00,0x00,0x00, +0xf8,0x00,0x00,0x00,0xf7,0x00,0x00,0x00,0x82,0x00,0x05,0x00, +0x21,0x00,0x00,0x00,0xf9,0x00,0x00,0x00,0xf8,0x00,0x00,0x00, +0x93,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x21,0x00,0x00,0x00, +0xfa,0x00,0x00,0x00,0xe6,0x00,0x00,0x00,0xf9,0x00,0x00,0x00, +0x6f,0x00,0x04,0x00,0x68,0x00,0x00,0x00,0xfb,0x00,0x00,0x00, +0xfa,0x00,0x00,0x00,0x85,0x00,0x05,0x00,0x68,0x00,0x00,0x00, +0xfc,0x00,0x00,0x00,0x70,0x00,0x00,0x00,0xfb,0x00,0x00,0x00, +0x73,0x00,0x04,0x00,0x5a,0x00,0x00,0x00,0xfd,0x00,0x00,0x00, +0xfc,0x00,0x00,0x00,0x41,0x00,0x06,0x00,0x6d,0x00,0x00,0x00, +0xfe,0x00,0x00,0x00,0x74,0x00,0x00,0x00,0x22,0x00,0x00,0x00, +0xde,0x00,0x00,0x00,0x3e,0x00,0x03,0x00,0xfe,0x00,0x00,0x00, +0xfd,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0x0d,0x00,0x00,0x00, +0xf8,0x00,0x02,0x00,0x0d,0x00,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x00,0x01,0x00,0x00,0x0b,0x01,0x00,0x00, +0x26,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0x0a,0x00,0x00,0x00, +0xf8,0x00,0x02,0x00,0x0c,0x00,0x00,0x00,0xf5,0x00,0x07,0x00, +0x11,0x00,0x00,0x00,0x0c,0x01,0x00,0x00,0x05,0x01,0x00,0x00, +0x0a,0x00,0x00,0x00,0x08,0x01,0x00,0x00,0x2c,0x00,0x00,0x00, +0xf7,0x00,0x03,0x00,0x09,0x01,0x00,0x00,0x00,0x00,0x00,0x00, +0xfa,0x00,0x04,0x00,0x0c,0x01,0x00,0x00,0x03,0x01,0x00,0x00, +0x09,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0x09,0x01,0x00,0x00, +0xf9,0x00,0x02,0x00,0x03,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, +0x03,0x01,0x00,0x00,0xfd,0x00,0x01,0x00,0x38,0x00,0x01,0x00, }; -const uint64_t dequant_q8_0_len = 8868; +const uint64_t dequant_q6_K_len = 4272; + +unsigned char dequant_q8_0_data[] = { +0x03,0x02,0x23,0x07,0x00,0x05,0x01,0x00,0x0b,0x00,0x0d,0x00, +0x4c,0x01,0x00,0x00,0x00,0x00,0x00,0x00,0x11,0x00,0x02,0x00, +0x01,0x00,0x00,0x00,0x11,0x00,0x02,0x00,0x27,0x00,0x00,0x00, +0x11,0x00,0x02,0x00,0x51,0x11,0x00,0x00,0x11,0x00,0x02,0x00, +0x60,0x11,0x00,0x00,0x0b,0x00,0x06,0x00,0x01,0x00,0x00,0x00, +0x47,0x4c,0x53,0x4c,0x2e,0x73,0x74,0x64,0x2e,0x34,0x35,0x30, +0x00,0x00,0x00,0x00,0x0e,0x00,0x03,0x00,0x00,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0x0f,0x00,0x0a,0x00,0x05,0x00,0x00,0x00, +0x04,0x00,0x00,0x00,0x6d,0x61,0x69,0x6e,0x00,0x00,0x00,0x00, +0x0b,0x00,0x00,0x00,0x12,0x00,0x00,0x00,0x2b,0x00,0x00,0x00, +0x4c,0x00,0x00,0x00,0x61,0x00,0x00,0x00,0x10,0x00,0x06,0x00, +0x04,0x00,0x00,0x00,0x11,0x00,0x00,0x00,0x00,0x01,0x00,0x00, +0x01,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x47,0x00,0x04,0x00, +0x0b,0x00,0x00,0x00,0x0b,0x00,0x00,0x00,0x1a,0x00,0x00,0x00, +0x47,0x00,0x04,0x00,0x12,0x00,0x00,0x00,0x0b,0x00,0x00,0x00, +0x1b,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x29,0x00,0x00,0x00, +0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0x48,0x00,0x05,0x00,0x29,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0x23,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x48,0x00,0x05,0x00, +0x29,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x23,0x00,0x00,0x00, +0x08,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x29,0x00,0x00,0x00, +0x03,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, +0x48,0x00,0x05,0x00,0x29,0x00,0x00,0x00,0x04,0x00,0x00,0x00, +0x23,0x00,0x00,0x00,0x10,0x00,0x00,0x00,0x47,0x00,0x03,0x00, +0x29,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00, +0x47,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0x48,0x00,0x05,0x00,0x48,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x48,0x00,0x05,0x00, +0x48,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x23,0x00,0x00,0x00, +0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x49,0x00,0x00,0x00, +0x06,0x00,0x00,0x00,0x22,0x00,0x00,0x00,0x48,0x00,0x04,0x00, +0x4a,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x18,0x00,0x00,0x00, +0x48,0x00,0x05,0x00,0x4a,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x03,0x00, +0x4a,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00, +0x4c,0x00,0x00,0x00,0x22,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0x47,0x00,0x04,0x00,0x4c,0x00,0x00,0x00,0x21,0x00,0x00,0x00, +0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x5e,0x00,0x00,0x00, +0x06,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x48,0x00,0x04,0x00, +0x5f,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x19,0x00,0x00,0x00, +0x48,0x00,0x05,0x00,0x5f,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x03,0x00, +0x5f,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00, +0x61,0x00,0x00,0x00,0x22,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0x47,0x00,0x04,0x00,0x61,0x00,0x00,0x00,0x21,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x87,0x00,0x00,0x00, +0x0b,0x00,0x00,0x00,0x19,0x00,0x00,0x00,0x13,0x00,0x02,0x00, +0x02,0x00,0x00,0x00,0x21,0x00,0x03,0x00,0x03,0x00,0x00,0x00, +0x02,0x00,0x00,0x00,0x15,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x20,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x17,0x00,0x04,0x00, +0x09,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x03,0x00,0x00,0x00, +0x20,0x00,0x04,0x00,0x0a,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0x09,0x00,0x00,0x00,0x3b,0x00,0x04,0x00,0x0a,0x00,0x00,0x00, +0x0b,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0x20,0x00,0x04,0x00,0x0d,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0x06,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x10,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x3b,0x00,0x04,0x00, +0x0a,0x00,0x00,0x00,0x12,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x15,0x00,0x00,0x00, +0x40,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x1e,0x00,0x00,0x00,0x20,0x00,0x00,0x00,0x1e,0x00,0x07,0x00, +0x29,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00, +0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00, +0x20,0x00,0x04,0x00,0x2a,0x00,0x00,0x00,0x09,0x00,0x00,0x00, +0x29,0x00,0x00,0x00,0x3b,0x00,0x04,0x00,0x2a,0x00,0x00,0x00, +0x2b,0x00,0x00,0x00,0x09,0x00,0x00,0x00,0x15,0x00,0x04,0x00, +0x2c,0x00,0x00,0x00,0x20,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x2c,0x00,0x00,0x00,0x2d,0x00,0x00,0x00, +0x04,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x2e,0x00,0x00,0x00, +0x09,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x14,0x00,0x02,0x00, +0x32,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x38,0x00,0x00,0x00,0x00,0x04,0x00,0x00,0x2b,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x3e,0x00,0x00,0x00,0x10,0x00,0x00,0x00, +0x16,0x00,0x03,0x00,0x42,0x00,0x00,0x00,0x20,0x00,0x00,0x00, +0x16,0x00,0x03,0x00,0x45,0x00,0x00,0x00,0x10,0x00,0x00,0x00, +0x15,0x00,0x04,0x00,0x46,0x00,0x00,0x00,0x08,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0x1c,0x00,0x04,0x00,0x47,0x00,0x00,0x00, +0x46,0x00,0x00,0x00,0x1e,0x00,0x00,0x00,0x1e,0x00,0x04,0x00, +0x48,0x00,0x00,0x00,0x45,0x00,0x00,0x00,0x47,0x00,0x00,0x00, +0x1d,0x00,0x03,0x00,0x49,0x00,0x00,0x00,0x48,0x00,0x00,0x00, +0x1e,0x00,0x03,0x00,0x4a,0x00,0x00,0x00,0x49,0x00,0x00,0x00, +0x20,0x00,0x04,0x00,0x4b,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, +0x4a,0x00,0x00,0x00,0x3b,0x00,0x04,0x00,0x4b,0x00,0x00,0x00, +0x4c,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x2c,0x00,0x00,0x00,0x4d,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0x20,0x00,0x04,0x00,0x4f,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, +0x45,0x00,0x00,0x00,0x1d,0x00,0x03,0x00,0x5e,0x00,0x00,0x00, +0x45,0x00,0x00,0x00,0x1e,0x00,0x03,0x00,0x5f,0x00,0x00,0x00, +0x5e,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x60,0x00,0x00,0x00, +0x0c,0x00,0x00,0x00,0x5f,0x00,0x00,0x00,0x3b,0x00,0x04,0x00, +0x60,0x00,0x00,0x00,0x61,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x2c,0x00,0x00,0x00,0x67,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x6b,0x00,0x00,0x00, +0x0c,0x00,0x00,0x00,0x46,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x75,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x83,0x00,0x00,0x00, +0x02,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x86,0x00,0x00,0x00,0x00,0x01,0x00,0x00,0x2c,0x00,0x06,0x00, +0x09,0x00,0x00,0x00,0x87,0x00,0x00,0x00,0x86,0x00,0x00,0x00, +0x75,0x00,0x00,0x00,0x75,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x40,0x01,0x00,0x00,0x03,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x41,0x01,0x00,0x00, +0x05,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x42,0x01,0x00,0x00,0x06,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x43,0x01,0x00,0x00,0x07,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x44,0x01,0x00,0x00, +0x08,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x45,0x01,0x00,0x00,0x09,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x46,0x01,0x00,0x00,0x0a,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x47,0x01,0x00,0x00, +0x0b,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x48,0x01,0x00,0x00,0x0c,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x49,0x01,0x00,0x00,0x0d,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x4a,0x01,0x00,0x00, +0x0e,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x4b,0x01,0x00,0x00,0x0f,0x00,0x00,0x00,0x36,0x00,0x05,0x00, +0x02,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0x03,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0x05,0x00,0x00,0x00, +0xf7,0x00,0x03,0x00,0x88,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0xfb,0x00,0x03,0x00,0x0c,0x00,0x00,0x00,0x89,0x00,0x00,0x00, +0xf8,0x00,0x02,0x00,0x89,0x00,0x00,0x00,0x41,0x00,0x05,0x00, +0x0d,0x00,0x00,0x00,0x0e,0x00,0x00,0x00,0x0b,0x00,0x00,0x00, +0x0c,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x0f,0x00,0x00,0x00,0x0e,0x00,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x11,0x00,0x00,0x00,0x0f,0x00,0x00,0x00, +0x10,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x0d,0x00,0x00,0x00, +0x13,0x00,0x00,0x00,0x12,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, +0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x14,0x00,0x00,0x00, +0x13,0x00,0x00,0x00,0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x16,0x00,0x00,0x00,0x14,0x00,0x00,0x00,0x15,0x00,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x17,0x00,0x00,0x00, +0x11,0x00,0x00,0x00,0x16,0x00,0x00,0x00,0x89,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x1b,0x00,0x00,0x00,0x14,0x00,0x00,0x00, +0x15,0x00,0x00,0x00,0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x1f,0x00,0x00,0x00,0x1b,0x00,0x00,0x00,0x1e,0x00,0x00,0x00, +0x89,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x22,0x00,0x00,0x00, +0x1b,0x00,0x00,0x00,0x1e,0x00,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x25,0x00,0x00,0x00,0x1e,0x00,0x00,0x00, +0x17,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x27,0x00,0x00,0x00,0x25,0x00,0x00,0x00,0x22,0x00,0x00,0x00, +0x41,0x00,0x05,0x00,0x2e,0x00,0x00,0x00,0x2f,0x00,0x00,0x00, +0x2b,0x00,0x00,0x00,0x2d,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x30,0x00,0x00,0x00,0x2f,0x00,0x00,0x00, +0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x31,0x00,0x00,0x00, +0x30,0x00,0x00,0x00,0x1e,0x00,0x00,0x00,0xae,0x00,0x05,0x00, +0x32,0x00,0x00,0x00,0x33,0x00,0x00,0x00,0x27,0x00,0x00,0x00, +0x31,0x00,0x00,0x00,0xf7,0x00,0x03,0x00,0x35,0x00,0x00,0x00, +0x00,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0x33,0x00,0x00,0x00, +0x34,0x00,0x00,0x00,0x35,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, +0x34,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0x88,0x00,0x00,0x00, +0xf8,0x00,0x02,0x00,0x35,0x00,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x3a,0x00,0x00,0x00,0x38,0x00,0x00,0x00, +0x17,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x3c,0x00,0x00,0x00,0x1e,0x00,0x00,0x00,0x22,0x00,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x3d,0x00,0x00,0x00, +0x3a,0x00,0x00,0x00,0x3c,0x00,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x40,0x00,0x00,0x00,0x3e,0x00,0x00,0x00, +0x1f,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x41,0x00,0x00,0x00,0x3d,0x00,0x00,0x00,0x40,0x00,0x00,0x00, +0x41,0x00,0x07,0x00,0x4f,0x00,0x00,0x00,0x50,0x00,0x00,0x00, +0x4c,0x00,0x00,0x00,0x4d,0x00,0x00,0x00,0x27,0x00,0x00,0x00, +0x4d,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x45,0x00,0x00,0x00, +0x51,0x00,0x00,0x00,0x50,0x00,0x00,0x00,0x73,0x00,0x04,0x00, +0x42,0x00,0x00,0x00,0x52,0x00,0x00,0x00,0x51,0x00,0x00,0x00, +0x41,0x00,0x08,0x00,0x6b,0x00,0x00,0x00,0x6c,0x00,0x00,0x00, +0x4c,0x00,0x00,0x00,0x4d,0x00,0x00,0x00,0x27,0x00,0x00,0x00, +0x67,0x00,0x00,0x00,0x40,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0x46,0x00,0x00,0x00,0x6d,0x00,0x00,0x00,0x6c,0x00,0x00,0x00, +0x6f,0x00,0x04,0x00,0x42,0x00,0x00,0x00,0x6e,0x00,0x00,0x00, +0x6d,0x00,0x00,0x00,0x85,0x00,0x05,0x00,0x42,0x00,0x00,0x00, +0x6f,0x00,0x00,0x00,0x52,0x00,0x00,0x00,0x6e,0x00,0x00,0x00, +0x73,0x00,0x04,0x00,0x45,0x00,0x00,0x00,0x70,0x00,0x00,0x00, +0x6f,0x00,0x00,0x00,0x41,0x00,0x06,0x00,0x4f,0x00,0x00,0x00, +0x71,0x00,0x00,0x00,0x61,0x00,0x00,0x00,0x4d,0x00,0x00,0x00, +0x41,0x00,0x00,0x00,0x3e,0x00,0x03,0x00,0x71,0x00,0x00,0x00, +0x70,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x76,0x00,0x00,0x00,0x41,0x00,0x00,0x00,0x75,0x00,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x7c,0x00,0x00,0x00, +0x40,0x00,0x00,0x00,0x75,0x00,0x00,0x00,0x41,0x00,0x08,0x00, +0x6b,0x00,0x00,0x00,0x7d,0x00,0x00,0x00,0x4c,0x00,0x00,0x00, +0x4d,0x00,0x00,0x00,0x27,0x00,0x00,0x00,0x67,0x00,0x00,0x00, +0x7c,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x46,0x00,0x00,0x00, +0x7e,0x00,0x00,0x00,0x7d,0x00,0x00,0x00,0x6f,0x00,0x04,0x00, +0x42,0x00,0x00,0x00,0x7f,0x00,0x00,0x00,0x7e,0x00,0x00,0x00, +0x85,0x00,0x05,0x00,0x42,0x00,0x00,0x00,0x80,0x00,0x00,0x00, +0x52,0x00,0x00,0x00,0x7f,0x00,0x00,0x00,0x73,0x00,0x04,0x00, +0x45,0x00,0x00,0x00,0x81,0x00,0x00,0x00,0x80,0x00,0x00,0x00, +0x41,0x00,0x06,0x00,0x4f,0x00,0x00,0x00,0x82,0x00,0x00,0x00, +0x61,0x00,0x00,0x00,0x4d,0x00,0x00,0x00,0x76,0x00,0x00,0x00, +0x3e,0x00,0x03,0x00,0x82,0x00,0x00,0x00,0x81,0x00,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x95,0x00,0x00,0x00, +0x41,0x00,0x00,0x00,0x83,0x00,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x96,0x00,0x00,0x00,0x40,0x00,0x00,0x00, +0x83,0x00,0x00,0x00,0x41,0x00,0x08,0x00,0x6b,0x00,0x00,0x00, +0x97,0x00,0x00,0x00,0x4c,0x00,0x00,0x00,0x4d,0x00,0x00,0x00, +0x27,0x00,0x00,0x00,0x67,0x00,0x00,0x00,0x96,0x00,0x00,0x00, +0x3d,0x00,0x04,0x00,0x46,0x00,0x00,0x00,0x98,0x00,0x00,0x00, +0x97,0x00,0x00,0x00,0x6f,0x00,0x04,0x00,0x42,0x00,0x00,0x00, +0x99,0x00,0x00,0x00,0x98,0x00,0x00,0x00,0x85,0x00,0x05,0x00, +0x42,0x00,0x00,0x00,0x9a,0x00,0x00,0x00,0x52,0x00,0x00,0x00, +0x99,0x00,0x00,0x00,0x73,0x00,0x04,0x00,0x45,0x00,0x00,0x00, +0x9b,0x00,0x00,0x00,0x9a,0x00,0x00,0x00,0x41,0x00,0x06,0x00, +0x4f,0x00,0x00,0x00,0x9c,0x00,0x00,0x00,0x61,0x00,0x00,0x00, +0x4d,0x00,0x00,0x00,0x95,0x00,0x00,0x00,0x3e,0x00,0x03,0x00, +0x9c,0x00,0x00,0x00,0x9b,0x00,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x9e,0x00,0x00,0x00,0x41,0x00,0x00,0x00, +0x40,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xa0,0x00,0x00,0x00,0x40,0x00,0x00,0x00,0x40,0x01,0x00,0x00, +0x41,0x00,0x08,0x00,0x6b,0x00,0x00,0x00,0xa1,0x00,0x00,0x00, +0x4c,0x00,0x00,0x00,0x4d,0x00,0x00,0x00,0x27,0x00,0x00,0x00, +0x67,0x00,0x00,0x00,0xa0,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0x46,0x00,0x00,0x00,0xa2,0x00,0x00,0x00,0xa1,0x00,0x00,0x00, +0x6f,0x00,0x04,0x00,0x42,0x00,0x00,0x00,0xa3,0x00,0x00,0x00, +0xa2,0x00,0x00,0x00,0x85,0x00,0x05,0x00,0x42,0x00,0x00,0x00, +0xa4,0x00,0x00,0x00,0x52,0x00,0x00,0x00,0xa3,0x00,0x00,0x00, +0x73,0x00,0x04,0x00,0x45,0x00,0x00,0x00,0xa5,0x00,0x00,0x00, +0xa4,0x00,0x00,0x00,0x41,0x00,0x06,0x00,0x4f,0x00,0x00,0x00, +0xa6,0x00,0x00,0x00,0x61,0x00,0x00,0x00,0x4d,0x00,0x00,0x00, +0x9e,0x00,0x00,0x00,0x3e,0x00,0x03,0x00,0xa6,0x00,0x00,0x00, +0xa5,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xae,0x00,0x00,0x00,0x41,0x00,0x00,0x00,0x10,0x00,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xaf,0x00,0x00,0x00, +0x40,0x00,0x00,0x00,0x10,0x00,0x00,0x00,0x41,0x00,0x08,0x00, +0x6b,0x00,0x00,0x00,0xb0,0x00,0x00,0x00,0x4c,0x00,0x00,0x00, +0x4d,0x00,0x00,0x00,0x27,0x00,0x00,0x00,0x67,0x00,0x00,0x00, +0xaf,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x46,0x00,0x00,0x00, +0xb1,0x00,0x00,0x00,0xb0,0x00,0x00,0x00,0x6f,0x00,0x04,0x00, +0x42,0x00,0x00,0x00,0xb2,0x00,0x00,0x00,0xb1,0x00,0x00,0x00, +0x85,0x00,0x05,0x00,0x42,0x00,0x00,0x00,0xb3,0x00,0x00,0x00, +0x52,0x00,0x00,0x00,0xb2,0x00,0x00,0x00,0x73,0x00,0x04,0x00, +0x45,0x00,0x00,0x00,0xb4,0x00,0x00,0x00,0xb3,0x00,0x00,0x00, +0x41,0x00,0x06,0x00,0x4f,0x00,0x00,0x00,0xb5,0x00,0x00,0x00, +0x61,0x00,0x00,0x00,0x4d,0x00,0x00,0x00,0xae,0x00,0x00,0x00, +0x3e,0x00,0x03,0x00,0xb5,0x00,0x00,0x00,0xb4,0x00,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xb7,0x00,0x00,0x00, +0x41,0x00,0x00,0x00,0x41,0x01,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xb9,0x00,0x00,0x00,0x40,0x00,0x00,0x00, +0x41,0x01,0x00,0x00,0x41,0x00,0x08,0x00,0x6b,0x00,0x00,0x00, +0xba,0x00,0x00,0x00,0x4c,0x00,0x00,0x00,0x4d,0x00,0x00,0x00, +0x27,0x00,0x00,0x00,0x67,0x00,0x00,0x00,0xb9,0x00,0x00,0x00, +0x3d,0x00,0x04,0x00,0x46,0x00,0x00,0x00,0xbb,0x00,0x00,0x00, +0xba,0x00,0x00,0x00,0x6f,0x00,0x04,0x00,0x42,0x00,0x00,0x00, +0xbc,0x00,0x00,0x00,0xbb,0x00,0x00,0x00,0x85,0x00,0x05,0x00, +0x42,0x00,0x00,0x00,0xbd,0x00,0x00,0x00,0x52,0x00,0x00,0x00, +0xbc,0x00,0x00,0x00,0x73,0x00,0x04,0x00,0x45,0x00,0x00,0x00, +0xbe,0x00,0x00,0x00,0xbd,0x00,0x00,0x00,0x41,0x00,0x06,0x00, +0x4f,0x00,0x00,0x00,0xbf,0x00,0x00,0x00,0x61,0x00,0x00,0x00, +0x4d,0x00,0x00,0x00,0xb7,0x00,0x00,0x00,0x3e,0x00,0x03,0x00, +0xbf,0x00,0x00,0x00,0xbe,0x00,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xc7,0x00,0x00,0x00,0x41,0x00,0x00,0x00, +0x42,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xc8,0x00,0x00,0x00,0x40,0x00,0x00,0x00,0x42,0x01,0x00,0x00, +0x41,0x00,0x08,0x00,0x6b,0x00,0x00,0x00,0xc9,0x00,0x00,0x00, +0x4c,0x00,0x00,0x00,0x4d,0x00,0x00,0x00,0x27,0x00,0x00,0x00, +0x67,0x00,0x00,0x00,0xc8,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0x46,0x00,0x00,0x00,0xca,0x00,0x00,0x00,0xc9,0x00,0x00,0x00, +0x6f,0x00,0x04,0x00,0x42,0x00,0x00,0x00,0xcb,0x00,0x00,0x00, +0xca,0x00,0x00,0x00,0x85,0x00,0x05,0x00,0x42,0x00,0x00,0x00, +0xcc,0x00,0x00,0x00,0x52,0x00,0x00,0x00,0xcb,0x00,0x00,0x00, +0x73,0x00,0x04,0x00,0x45,0x00,0x00,0x00,0xcd,0x00,0x00,0x00, +0xcc,0x00,0x00,0x00,0x41,0x00,0x06,0x00,0x4f,0x00,0x00,0x00, +0xce,0x00,0x00,0x00,0x61,0x00,0x00,0x00,0x4d,0x00,0x00,0x00, +0xc7,0x00,0x00,0x00,0x3e,0x00,0x03,0x00,0xce,0x00,0x00,0x00, +0xcd,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xd0,0x00,0x00,0x00,0x41,0x00,0x00,0x00,0x43,0x01,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xd2,0x00,0x00,0x00, +0x40,0x00,0x00,0x00,0x43,0x01,0x00,0x00,0x41,0x00,0x08,0x00, +0x6b,0x00,0x00,0x00,0xd3,0x00,0x00,0x00,0x4c,0x00,0x00,0x00, +0x4d,0x00,0x00,0x00,0x27,0x00,0x00,0x00,0x67,0x00,0x00,0x00, +0xd2,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x46,0x00,0x00,0x00, +0xd4,0x00,0x00,0x00,0xd3,0x00,0x00,0x00,0x6f,0x00,0x04,0x00, +0x42,0x00,0x00,0x00,0xd5,0x00,0x00,0x00,0xd4,0x00,0x00,0x00, +0x85,0x00,0x05,0x00,0x42,0x00,0x00,0x00,0xd6,0x00,0x00,0x00, +0x52,0x00,0x00,0x00,0xd5,0x00,0x00,0x00,0x73,0x00,0x04,0x00, +0x45,0x00,0x00,0x00,0xd7,0x00,0x00,0x00,0xd6,0x00,0x00,0x00, +0x41,0x00,0x06,0x00,0x4f,0x00,0x00,0x00,0xd8,0x00,0x00,0x00, +0x61,0x00,0x00,0x00,0x4d,0x00,0x00,0x00,0xd0,0x00,0x00,0x00, +0x3e,0x00,0x03,0x00,0xd8,0x00,0x00,0x00,0xd7,0x00,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xe0,0x00,0x00,0x00, +0x41,0x00,0x00,0x00,0x44,0x01,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xe1,0x00,0x00,0x00,0x40,0x00,0x00,0x00, +0x44,0x01,0x00,0x00,0x41,0x00,0x08,0x00,0x6b,0x00,0x00,0x00, +0xe2,0x00,0x00,0x00,0x4c,0x00,0x00,0x00,0x4d,0x00,0x00,0x00, +0x27,0x00,0x00,0x00,0x67,0x00,0x00,0x00,0xe1,0x00,0x00,0x00, +0x3d,0x00,0x04,0x00,0x46,0x00,0x00,0x00,0xe3,0x00,0x00,0x00, +0xe2,0x00,0x00,0x00,0x6f,0x00,0x04,0x00,0x42,0x00,0x00,0x00, +0xe4,0x00,0x00,0x00,0xe3,0x00,0x00,0x00,0x85,0x00,0x05,0x00, +0x42,0x00,0x00,0x00,0xe5,0x00,0x00,0x00,0x52,0x00,0x00,0x00, +0xe4,0x00,0x00,0x00,0x73,0x00,0x04,0x00,0x45,0x00,0x00,0x00, +0xe6,0x00,0x00,0x00,0xe5,0x00,0x00,0x00,0x41,0x00,0x06,0x00, +0x4f,0x00,0x00,0x00,0xe7,0x00,0x00,0x00,0x61,0x00,0x00,0x00, +0x4d,0x00,0x00,0x00,0xe0,0x00,0x00,0x00,0x3e,0x00,0x03,0x00, +0xe7,0x00,0x00,0x00,0xe6,0x00,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xe9,0x00,0x00,0x00,0x41,0x00,0x00,0x00, +0x45,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xeb,0x00,0x00,0x00,0x40,0x00,0x00,0x00,0x45,0x01,0x00,0x00, +0x41,0x00,0x08,0x00,0x6b,0x00,0x00,0x00,0xec,0x00,0x00,0x00, +0x4c,0x00,0x00,0x00,0x4d,0x00,0x00,0x00,0x27,0x00,0x00,0x00, +0x67,0x00,0x00,0x00,0xeb,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0x46,0x00,0x00,0x00,0xed,0x00,0x00,0x00,0xec,0x00,0x00,0x00, +0x6f,0x00,0x04,0x00,0x42,0x00,0x00,0x00,0xee,0x00,0x00,0x00, +0xed,0x00,0x00,0x00,0x85,0x00,0x05,0x00,0x42,0x00,0x00,0x00, +0xef,0x00,0x00,0x00,0x52,0x00,0x00,0x00,0xee,0x00,0x00,0x00, +0x73,0x00,0x04,0x00,0x45,0x00,0x00,0x00,0xf0,0x00,0x00,0x00, +0xef,0x00,0x00,0x00,0x41,0x00,0x06,0x00,0x4f,0x00,0x00,0x00, +0xf1,0x00,0x00,0x00,0x61,0x00,0x00,0x00,0x4d,0x00,0x00,0x00, +0xe9,0x00,0x00,0x00,0x3e,0x00,0x03,0x00,0xf1,0x00,0x00,0x00, +0xf0,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xf9,0x00,0x00,0x00,0x41,0x00,0x00,0x00,0x46,0x01,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xfa,0x00,0x00,0x00, +0x40,0x00,0x00,0x00,0x46,0x01,0x00,0x00,0x41,0x00,0x08,0x00, +0x6b,0x00,0x00,0x00,0xfb,0x00,0x00,0x00,0x4c,0x00,0x00,0x00, +0x4d,0x00,0x00,0x00,0x27,0x00,0x00,0x00,0x67,0x00,0x00,0x00, +0xfa,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x46,0x00,0x00,0x00, +0xfc,0x00,0x00,0x00,0xfb,0x00,0x00,0x00,0x6f,0x00,0x04,0x00, +0x42,0x00,0x00,0x00,0xfd,0x00,0x00,0x00,0xfc,0x00,0x00,0x00, +0x85,0x00,0x05,0x00,0x42,0x00,0x00,0x00,0xfe,0x00,0x00,0x00, +0x52,0x00,0x00,0x00,0xfd,0x00,0x00,0x00,0x73,0x00,0x04,0x00, +0x45,0x00,0x00,0x00,0xff,0x00,0x00,0x00,0xfe,0x00,0x00,0x00, +0x41,0x00,0x06,0x00,0x4f,0x00,0x00,0x00,0x00,0x01,0x00,0x00, +0x61,0x00,0x00,0x00,0x4d,0x00,0x00,0x00,0xf9,0x00,0x00,0x00, +0x3e,0x00,0x03,0x00,0x00,0x01,0x00,0x00,0xff,0x00,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x02,0x01,0x00,0x00, +0x41,0x00,0x00,0x00,0x47,0x01,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x04,0x01,0x00,0x00,0x40,0x00,0x00,0x00, +0x47,0x01,0x00,0x00,0x41,0x00,0x08,0x00,0x6b,0x00,0x00,0x00, +0x05,0x01,0x00,0x00,0x4c,0x00,0x00,0x00,0x4d,0x00,0x00,0x00, +0x27,0x00,0x00,0x00,0x67,0x00,0x00,0x00,0x04,0x01,0x00,0x00, +0x3d,0x00,0x04,0x00,0x46,0x00,0x00,0x00,0x06,0x01,0x00,0x00, +0x05,0x01,0x00,0x00,0x6f,0x00,0x04,0x00,0x42,0x00,0x00,0x00, +0x07,0x01,0x00,0x00,0x06,0x01,0x00,0x00,0x85,0x00,0x05,0x00, +0x42,0x00,0x00,0x00,0x08,0x01,0x00,0x00,0x52,0x00,0x00,0x00, +0x07,0x01,0x00,0x00,0x73,0x00,0x04,0x00,0x45,0x00,0x00,0x00, +0x09,0x01,0x00,0x00,0x08,0x01,0x00,0x00,0x41,0x00,0x06,0x00, +0x4f,0x00,0x00,0x00,0x0a,0x01,0x00,0x00,0x61,0x00,0x00,0x00, +0x4d,0x00,0x00,0x00,0x02,0x01,0x00,0x00,0x3e,0x00,0x03,0x00, +0x0a,0x01,0x00,0x00,0x09,0x01,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x12,0x01,0x00,0x00,0x41,0x00,0x00,0x00, +0x48,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x13,0x01,0x00,0x00,0x40,0x00,0x00,0x00,0x48,0x01,0x00,0x00, +0x41,0x00,0x08,0x00,0x6b,0x00,0x00,0x00,0x14,0x01,0x00,0x00, +0x4c,0x00,0x00,0x00,0x4d,0x00,0x00,0x00,0x27,0x00,0x00,0x00, +0x67,0x00,0x00,0x00,0x13,0x01,0x00,0x00,0x3d,0x00,0x04,0x00, +0x46,0x00,0x00,0x00,0x15,0x01,0x00,0x00,0x14,0x01,0x00,0x00, +0x6f,0x00,0x04,0x00,0x42,0x00,0x00,0x00,0x16,0x01,0x00,0x00, +0x15,0x01,0x00,0x00,0x85,0x00,0x05,0x00,0x42,0x00,0x00,0x00, +0x17,0x01,0x00,0x00,0x52,0x00,0x00,0x00,0x16,0x01,0x00,0x00, +0x73,0x00,0x04,0x00,0x45,0x00,0x00,0x00,0x18,0x01,0x00,0x00, +0x17,0x01,0x00,0x00,0x41,0x00,0x06,0x00,0x4f,0x00,0x00,0x00, +0x19,0x01,0x00,0x00,0x61,0x00,0x00,0x00,0x4d,0x00,0x00,0x00, +0x12,0x01,0x00,0x00,0x3e,0x00,0x03,0x00,0x19,0x01,0x00,0x00, +0x18,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x1b,0x01,0x00,0x00,0x41,0x00,0x00,0x00,0x49,0x01,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x1d,0x01,0x00,0x00, +0x40,0x00,0x00,0x00,0x49,0x01,0x00,0x00,0x41,0x00,0x08,0x00, +0x6b,0x00,0x00,0x00,0x1e,0x01,0x00,0x00,0x4c,0x00,0x00,0x00, +0x4d,0x00,0x00,0x00,0x27,0x00,0x00,0x00,0x67,0x00,0x00,0x00, +0x1d,0x01,0x00,0x00,0x3d,0x00,0x04,0x00,0x46,0x00,0x00,0x00, +0x1f,0x01,0x00,0x00,0x1e,0x01,0x00,0x00,0x6f,0x00,0x04,0x00, +0x42,0x00,0x00,0x00,0x20,0x01,0x00,0x00,0x1f,0x01,0x00,0x00, +0x85,0x00,0x05,0x00,0x42,0x00,0x00,0x00,0x21,0x01,0x00,0x00, +0x52,0x00,0x00,0x00,0x20,0x01,0x00,0x00,0x73,0x00,0x04,0x00, +0x45,0x00,0x00,0x00,0x22,0x01,0x00,0x00,0x21,0x01,0x00,0x00, +0x41,0x00,0x06,0x00,0x4f,0x00,0x00,0x00,0x23,0x01,0x00,0x00, +0x61,0x00,0x00,0x00,0x4d,0x00,0x00,0x00,0x1b,0x01,0x00,0x00, +0x3e,0x00,0x03,0x00,0x23,0x01,0x00,0x00,0x22,0x01,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x2b,0x01,0x00,0x00, +0x41,0x00,0x00,0x00,0x4a,0x01,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x2c,0x01,0x00,0x00,0x40,0x00,0x00,0x00, +0x4a,0x01,0x00,0x00,0x41,0x00,0x08,0x00,0x6b,0x00,0x00,0x00, +0x2d,0x01,0x00,0x00,0x4c,0x00,0x00,0x00,0x4d,0x00,0x00,0x00, +0x27,0x00,0x00,0x00,0x67,0x00,0x00,0x00,0x2c,0x01,0x00,0x00, +0x3d,0x00,0x04,0x00,0x46,0x00,0x00,0x00,0x2e,0x01,0x00,0x00, +0x2d,0x01,0x00,0x00,0x6f,0x00,0x04,0x00,0x42,0x00,0x00,0x00, +0x2f,0x01,0x00,0x00,0x2e,0x01,0x00,0x00,0x85,0x00,0x05,0x00, +0x42,0x00,0x00,0x00,0x30,0x01,0x00,0x00,0x52,0x00,0x00,0x00, +0x2f,0x01,0x00,0x00,0x73,0x00,0x04,0x00,0x45,0x00,0x00,0x00, +0x31,0x01,0x00,0x00,0x30,0x01,0x00,0x00,0x41,0x00,0x06,0x00, +0x4f,0x00,0x00,0x00,0x32,0x01,0x00,0x00,0x61,0x00,0x00,0x00, +0x4d,0x00,0x00,0x00,0x2b,0x01,0x00,0x00,0x3e,0x00,0x03,0x00, +0x32,0x01,0x00,0x00,0x31,0x01,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x34,0x01,0x00,0x00,0x41,0x00,0x00,0x00, +0x4b,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x36,0x01,0x00,0x00,0x40,0x00,0x00,0x00,0x4b,0x01,0x00,0x00, +0x41,0x00,0x08,0x00,0x6b,0x00,0x00,0x00,0x37,0x01,0x00,0x00, +0x4c,0x00,0x00,0x00,0x4d,0x00,0x00,0x00,0x27,0x00,0x00,0x00, +0x67,0x00,0x00,0x00,0x36,0x01,0x00,0x00,0x3d,0x00,0x04,0x00, +0x46,0x00,0x00,0x00,0x38,0x01,0x00,0x00,0x37,0x01,0x00,0x00, +0x6f,0x00,0x04,0x00,0x42,0x00,0x00,0x00,0x39,0x01,0x00,0x00, +0x38,0x01,0x00,0x00,0x85,0x00,0x05,0x00,0x42,0x00,0x00,0x00, +0x3a,0x01,0x00,0x00,0x52,0x00,0x00,0x00,0x39,0x01,0x00,0x00, +0x73,0x00,0x04,0x00,0x45,0x00,0x00,0x00,0x3b,0x01,0x00,0x00, +0x3a,0x01,0x00,0x00,0x41,0x00,0x06,0x00,0x4f,0x00,0x00,0x00, +0x3c,0x01,0x00,0x00,0x61,0x00,0x00,0x00,0x4d,0x00,0x00,0x00, +0x34,0x01,0x00,0x00,0x3e,0x00,0x03,0x00,0x3c,0x01,0x00,0x00, +0x3b,0x01,0x00,0x00,0xf9,0x00,0x02,0x00,0x88,0x00,0x00,0x00, +0xf8,0x00,0x02,0x00,0x88,0x00,0x00,0x00,0xfd,0x00,0x01,0x00, +0x38,0x00,0x01,0x00, +}; +const uint64_t dequant_q8_0_len = 4780; unsigned char diag_mask_inf_f32_data[] = { 0x03,0x02,0x23,0x07,0x00,0x05,0x01,0x00,0x0b,0x00,0x0d,0x00, @@ -7890,144 +6689,6 @@ unsigned char diag_mask_inf_f32_data[] = { }; const uint64_t diag_mask_inf_f32_len = 1480; -unsigned char f32_to_f16_data[] = { -0x03,0x02,0x23,0x07,0x00,0x05,0x01,0x00,0x0b,0x00,0x0d,0x00, -0x53,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x11,0x00,0x02,0x00, -0x01,0x00,0x00,0x00,0x11,0x00,0x02,0x00,0x51,0x11,0x00,0x00, -0x0b,0x00,0x06,0x00,0x01,0x00,0x00,0x00,0x47,0x4c,0x53,0x4c, -0x2e,0x73,0x74,0x64,0x2e,0x34,0x35,0x30,0x00,0x00,0x00,0x00, -0x0e,0x00,0x03,0x00,0x00,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x0f,0x00,0x09,0x00,0x05,0x00,0x00,0x00,0x04,0x00,0x00,0x00, -0x6d,0x61,0x69,0x6e,0x00,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0x13,0x00,0x00,0x00,0x36,0x00,0x00,0x00,0x42,0x00,0x00,0x00, -0x10,0x00,0x06,0x00,0x04,0x00,0x00,0x00,0x11,0x00,0x00,0x00, -0x40,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x0c,0x00,0x00,0x00,0x0b,0x00,0x00,0x00, -0x1c,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x11,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x48,0x00,0x05,0x00,0x11,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0x11,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x08,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x11,0x00,0x00,0x00, -0x03,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0x47,0x00,0x03,0x00,0x11,0x00,0x00,0x00,0x02,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x33,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x02,0x00,0x00,0x00,0x48,0x00,0x04,0x00,0x34,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x19,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0x34,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x47,0x00,0x03,0x00,0x34,0x00,0x00,0x00, -0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x36,0x00,0x00,0x00, -0x22,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x36,0x00,0x00,0x00,0x21,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x3f,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x04,0x00,0x00,0x00,0x48,0x00,0x04,0x00,0x40,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x18,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0x40,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x47,0x00,0x03,0x00,0x40,0x00,0x00,0x00, -0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x42,0x00,0x00,0x00, -0x22,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x42,0x00,0x00,0x00,0x21,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x52,0x00,0x00,0x00,0x0b,0x00,0x00,0x00, -0x19,0x00,0x00,0x00,0x13,0x00,0x02,0x00,0x02,0x00,0x00,0x00, -0x21,0x00,0x03,0x00,0x03,0x00,0x00,0x00,0x02,0x00,0x00,0x00, -0x15,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x20,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x15,0x00,0x04,0x00,0x09,0x00,0x00,0x00, -0x20,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x17,0x00,0x04,0x00, -0x0a,0x00,0x00,0x00,0x09,0x00,0x00,0x00,0x03,0x00,0x00,0x00, -0x20,0x00,0x04,0x00,0x0b,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x0a,0x00,0x00,0x00,0x3b,0x00,0x04,0x00,0x0b,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x09,0x00,0x00,0x00,0x0d,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x20,0x00,0x04,0x00,0x0e,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x09,0x00,0x00,0x00,0x1e,0x00,0x06,0x00,0x11,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x12,0x00,0x00,0x00, -0x09,0x00,0x00,0x00,0x11,0x00,0x00,0x00,0x3b,0x00,0x04,0x00, -0x12,0x00,0x00,0x00,0x13,0x00,0x00,0x00,0x09,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x14,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x15,0x00,0x00,0x00, -0x09,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x14,0x00,0x02,0x00, -0x23,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x2b,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x16,0x00,0x03,0x00, -0x32,0x00,0x00,0x00,0x10,0x00,0x00,0x00,0x1d,0x00,0x03,0x00, -0x33,0x00,0x00,0x00,0x32,0x00,0x00,0x00,0x1e,0x00,0x03,0x00, -0x34,0x00,0x00,0x00,0x33,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0x35,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x34,0x00,0x00,0x00, -0x3b,0x00,0x04,0x00,0x35,0x00,0x00,0x00,0x36,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x38,0x00,0x00,0x00,0x03,0x00,0x00,0x00,0x16,0x00,0x03,0x00, -0x3e,0x00,0x00,0x00,0x20,0x00,0x00,0x00,0x1d,0x00,0x03,0x00, -0x3f,0x00,0x00,0x00,0x3e,0x00,0x00,0x00,0x1e,0x00,0x03,0x00, -0x40,0x00,0x00,0x00,0x3f,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0x41,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x40,0x00,0x00,0x00, -0x3b,0x00,0x04,0x00,0x41,0x00,0x00,0x00,0x42,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x44,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0x4a,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x3e,0x00,0x00,0x00, -0x20,0x00,0x04,0x00,0x4e,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0x32,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x09,0x00,0x00,0x00, -0x50,0x00,0x00,0x00,0x40,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x09,0x00,0x00,0x00,0x51,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x2c,0x00,0x06,0x00,0x0a,0x00,0x00,0x00,0x52,0x00,0x00,0x00, -0x50,0x00,0x00,0x00,0x51,0x00,0x00,0x00,0x51,0x00,0x00,0x00, -0x36,0x00,0x05,0x00,0x02,0x00,0x00,0x00,0x04,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x03,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, -0x05,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x0e,0x00,0x00,0x00, -0x0f,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x0d,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x09,0x00,0x00,0x00,0x10,0x00,0x00,0x00, -0x0f,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00, -0x16,0x00,0x00,0x00,0x13,0x00,0x00,0x00,0x14,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x17,0x00,0x00,0x00, -0x16,0x00,0x00,0x00,0x7c,0x00,0x04,0x00,0x09,0x00,0x00,0x00, -0x18,0x00,0x00,0x00,0x17,0x00,0x00,0x00,0x89,0x00,0x05,0x00, -0x09,0x00,0x00,0x00,0x19,0x00,0x00,0x00,0x10,0x00,0x00,0x00, -0x18,0x00,0x00,0x00,0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x1a,0x00,0x00,0x00,0x19,0x00,0x00,0x00,0x86,0x00,0x05,0x00, -0x09,0x00,0x00,0x00,0x21,0x00,0x00,0x00,0x10,0x00,0x00,0x00, -0x18,0x00,0x00,0x00,0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x22,0x00,0x00,0x00,0x21,0x00,0x00,0x00,0xb1,0x00,0x05,0x00, -0x23,0x00,0x00,0x00,0x27,0x00,0x00,0x00,0x1a,0x00,0x00,0x00, -0x17,0x00,0x00,0x00,0xf7,0x00,0x03,0x00,0x29,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0x27,0x00,0x00,0x00, -0x28,0x00,0x00,0x00,0x29,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, -0x28,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00, -0x2c,0x00,0x00,0x00,0x13,0x00,0x00,0x00,0x2b,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x2d,0x00,0x00,0x00, -0x2c,0x00,0x00,0x00,0xb1,0x00,0x05,0x00,0x23,0x00,0x00,0x00, -0x2e,0x00,0x00,0x00,0x22,0x00,0x00,0x00,0x2d,0x00,0x00,0x00, -0xf9,0x00,0x02,0x00,0x29,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, -0x29,0x00,0x00,0x00,0xf5,0x00,0x07,0x00,0x23,0x00,0x00,0x00, -0x2f,0x00,0x00,0x00,0x27,0x00,0x00,0x00,0x05,0x00,0x00,0x00, -0x2e,0x00,0x00,0x00,0x28,0x00,0x00,0x00,0xf7,0x00,0x03,0x00, -0x31,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, -0x2f,0x00,0x00,0x00,0x30,0x00,0x00,0x00,0x31,0x00,0x00,0x00, -0xf8,0x00,0x02,0x00,0x30,0x00,0x00,0x00,0x41,0x00,0x05,0x00, -0x15,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x13,0x00,0x00,0x00, -0x38,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x3a,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x3b,0x00,0x00,0x00,0x22,0x00,0x00,0x00, -0x3a,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x3d,0x00,0x00,0x00,0x3b,0x00,0x00,0x00,0x1a,0x00,0x00,0x00, -0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00,0x45,0x00,0x00,0x00, -0x13,0x00,0x00,0x00,0x44,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x46,0x00,0x00,0x00,0x45,0x00,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x47,0x00,0x00,0x00, -0x22,0x00,0x00,0x00,0x46,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x49,0x00,0x00,0x00,0x47,0x00,0x00,0x00, -0x1a,0x00,0x00,0x00,0x41,0x00,0x06,0x00,0x4a,0x00,0x00,0x00, -0x4b,0x00,0x00,0x00,0x42,0x00,0x00,0x00,0x2b,0x00,0x00,0x00, -0x49,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x3e,0x00,0x00,0x00, -0x4c,0x00,0x00,0x00,0x4b,0x00,0x00,0x00,0x73,0x00,0x04,0x00, -0x32,0x00,0x00,0x00,0x4d,0x00,0x00,0x00,0x4c,0x00,0x00,0x00, -0x41,0x00,0x06,0x00,0x4e,0x00,0x00,0x00,0x4f,0x00,0x00,0x00, -0x36,0x00,0x00,0x00,0x2b,0x00,0x00,0x00,0x3d,0x00,0x00,0x00, -0x3e,0x00,0x03,0x00,0x4f,0x00,0x00,0x00,0x4d,0x00,0x00,0x00, -0xf9,0x00,0x02,0x00,0x31,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, -0x31,0x00,0x00,0x00,0xfd,0x00,0x01,0x00,0x38,0x00,0x01,0x00, - -}; -const uint64_t f32_to_f16_len = 1596; - unsigned char gelu_f32_data[] = { 0x03,0x02,0x23,0x07,0x00,0x05,0x01,0x00,0x0b,0x00,0x0d,0x00, 0x4b,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x11,0x00,0x02,0x00, @@ -9897,9 +8558,9 @@ unsigned char get_rows_q5_1_data[] = { 0x58,0x00,0x00,0x00,0x3b,0x00,0x04,0x00,0x59,0x00,0x00,0x00, 0x5a,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x20,0x00,0x04,0x00, 0x5c,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x52,0x00,0x00,0x00, -0x20,0x00,0x04,0x00,0x69,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, +0x20,0x00,0x04,0x00,0x67,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, 0x06,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x10,0x00,0x00,0x00, -0x6e,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x70,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, 0x06,0x00,0x00,0x00,0x76,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, 0x2b,0x00,0x04,0x00,0x10,0x00,0x00,0x00,0x7e,0x00,0x00,0x00, 0x03,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x80,0x00,0x00,0x00, @@ -9971,68 +8632,66 @@ unsigned char get_rows_q5_1_data[] = { 0x20,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x52,0x00,0x00,0x00, 0x63,0x00,0x00,0x00,0x62,0x00,0x00,0x00,0x73,0x00,0x04,0x00, 0x1c,0x00,0x00,0x00,0x64,0x00,0x00,0x00,0x63,0x00,0x00,0x00, -0x41,0x00,0x07,0x00,0x69,0x00,0x00,0x00,0x6a,0x00,0x00,0x00, +0x41,0x00,0x07,0x00,0x67,0x00,0x00,0x00,0x68,0x00,0x00,0x00, 0x5a,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0x45,0x00,0x00,0x00, 0x12,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x6b,0x00,0x00,0x00,0x6a,0x00,0x00,0x00,0xc2,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x6d,0x00,0x00,0x00,0x6b,0x00,0x00,0x00, +0x69,0x00,0x00,0x00,0x68,0x00,0x00,0x00,0xc2,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x6f,0x00,0x00,0x00,0x69,0x00,0x00,0x00, 0x4a,0x00,0x00,0x00,0xc4,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x6f,0x00,0x00,0x00,0x6d,0x00,0x00,0x00,0x6e,0x00,0x00,0x00, -0xc7,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x70,0x00,0x00,0x00, -0x6f,0x00,0x00,0x00,0x54,0x00,0x00,0x00,0x7c,0x00,0x04,0x00, -0x10,0x00,0x00,0x00,0x71,0x00,0x00,0x00,0x70,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x74,0x00,0x00,0x00, -0x6a,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x77,0x00,0x00,0x00,0x4a,0x00,0x00,0x00,0x76,0x00,0x00,0x00, -0xc2,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x78,0x00,0x00,0x00, -0x74,0x00,0x00,0x00,0x77,0x00,0x00,0x00,0xc7,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x79,0x00,0x00,0x00,0x78,0x00,0x00,0x00, -0x54,0x00,0x00,0x00,0x7c,0x00,0x04,0x00,0x10,0x00,0x00,0x00, -0x7a,0x00,0x00,0x00,0x79,0x00,0x00,0x00,0x41,0x00,0x08,0x00, -0x80,0x00,0x00,0x00,0x81,0x00,0x00,0x00,0x5a,0x00,0x00,0x00, -0x2e,0x00,0x00,0x00,0x45,0x00,0x00,0x00,0x7e,0x00,0x00,0x00, -0x4a,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x53,0x00,0x00,0x00, -0x82,0x00,0x00,0x00,0x81,0x00,0x00,0x00,0x71,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x83,0x00,0x00,0x00,0x82,0x00,0x00,0x00, -0xc7,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x89,0x00,0x00,0x00, -0x83,0x00,0x00,0x00,0x88,0x00,0x00,0x00,0x7c,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x8d,0x00,0x00,0x00,0x71,0x00,0x00,0x00, -0xc5,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x8e,0x00,0x00,0x00, -0x89,0x00,0x00,0x00,0x8d,0x00,0x00,0x00,0x70,0x00,0x04,0x00, -0x1c,0x00,0x00,0x00,0x8f,0x00,0x00,0x00,0x8e,0x00,0x00,0x00, -0xc2,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x91,0x00,0x00,0x00, -0x83,0x00,0x00,0x00,0x6e,0x00,0x00,0x00,0x7c,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x94,0x00,0x00,0x00,0x7a,0x00,0x00,0x00, -0xc5,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x95,0x00,0x00,0x00, -0x91,0x00,0x00,0x00,0x94,0x00,0x00,0x00,0x70,0x00,0x04,0x00, -0x1c,0x00,0x00,0x00,0x96,0x00,0x00,0x00,0x95,0x00,0x00,0x00, -0x50,0x00,0x05,0x00,0x84,0x00,0x00,0x00,0x97,0x00,0x00,0x00, -0x8f,0x00,0x00,0x00,0x96,0x00,0x00,0x00,0x8e,0x00,0x05,0x00, -0x84,0x00,0x00,0x00,0x9a,0x00,0x00,0x00,0x97,0x00,0x00,0x00, -0x5f,0x00,0x00,0x00,0x50,0x00,0x05,0x00,0x84,0x00,0x00,0x00, -0x9c,0x00,0x00,0x00,0x64,0x00,0x00,0x00,0x64,0x00,0x00,0x00, -0x81,0x00,0x05,0x00,0x84,0x00,0x00,0x00,0x9d,0x00,0x00,0x00, -0x9a,0x00,0x00,0x00,0x9c,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xa4,0x00,0x00,0x00,0x4f,0x00,0x00,0x00, -0x4a,0x00,0x00,0x00,0x51,0x00,0x05,0x00,0x1c,0x00,0x00,0x00, -0xa7,0x00,0x00,0x00,0x9d,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x73,0x00,0x04,0x00,0x52,0x00,0x00,0x00,0xa8,0x00,0x00,0x00, -0xa7,0x00,0x00,0x00,0x41,0x00,0x06,0x00,0x5c,0x00,0x00,0x00, -0xa9,0x00,0x00,0x00,0xa1,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0xa4,0x00,0x00,0x00,0x3e,0x00,0x03,0x00,0xa9,0x00,0x00,0x00, -0xa8,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xad,0x00,0x00,0x00,0xa4,0x00,0x00,0x00,0x54,0x00,0x00,0x00, -0x51,0x00,0x05,0x00,0x1c,0x00,0x00,0x00,0xaf,0x00,0x00,0x00, -0x9d,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x73,0x00,0x04,0x00, -0x52,0x00,0x00,0x00,0xb0,0x00,0x00,0x00,0xaf,0x00,0x00,0x00, -0x41,0x00,0x06,0x00,0x5c,0x00,0x00,0x00,0xb1,0x00,0x00,0x00, -0xa1,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0xad,0x00,0x00,0x00, -0x3e,0x00,0x03,0x00,0xb1,0x00,0x00,0x00,0xb0,0x00,0x00,0x00, -0xf9,0x00,0x02,0x00,0xb4,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, -0xb4,0x00,0x00,0x00,0xfd,0x00,0x01,0x00,0x38,0x00,0x01,0x00, - +0x71,0x00,0x00,0x00,0x6f,0x00,0x00,0x00,0x70,0x00,0x00,0x00, +0xc7,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x72,0x00,0x00,0x00, +0x71,0x00,0x00,0x00,0x54,0x00,0x00,0x00,0x7c,0x00,0x04,0x00, +0x10,0x00,0x00,0x00,0x73,0x00,0x00,0x00,0x72,0x00,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x77,0x00,0x00,0x00, +0x4a,0x00,0x00,0x00,0x76,0x00,0x00,0x00,0xc2,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x78,0x00,0x00,0x00,0x69,0x00,0x00,0x00, +0x77,0x00,0x00,0x00,0xc7,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x79,0x00,0x00,0x00,0x78,0x00,0x00,0x00,0x54,0x00,0x00,0x00, +0x7c,0x00,0x04,0x00,0x10,0x00,0x00,0x00,0x7a,0x00,0x00,0x00, +0x79,0x00,0x00,0x00,0x41,0x00,0x08,0x00,0x80,0x00,0x00,0x00, +0x81,0x00,0x00,0x00,0x5a,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, +0x45,0x00,0x00,0x00,0x7e,0x00,0x00,0x00,0x4a,0x00,0x00,0x00, +0x3d,0x00,0x04,0x00,0x53,0x00,0x00,0x00,0x82,0x00,0x00,0x00, +0x81,0x00,0x00,0x00,0x71,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x83,0x00,0x00,0x00,0x82,0x00,0x00,0x00,0xc7,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x89,0x00,0x00,0x00,0x83,0x00,0x00,0x00, +0x88,0x00,0x00,0x00,0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x8d,0x00,0x00,0x00,0x73,0x00,0x00,0x00,0xc5,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x8e,0x00,0x00,0x00,0x89,0x00,0x00,0x00, +0x8d,0x00,0x00,0x00,0x70,0x00,0x04,0x00,0x1c,0x00,0x00,0x00, +0x8f,0x00,0x00,0x00,0x8e,0x00,0x00,0x00,0xc2,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x91,0x00,0x00,0x00,0x83,0x00,0x00,0x00, +0x70,0x00,0x00,0x00,0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x94,0x00,0x00,0x00,0x7a,0x00,0x00,0x00,0xc5,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x95,0x00,0x00,0x00,0x91,0x00,0x00,0x00, +0x94,0x00,0x00,0x00,0x70,0x00,0x04,0x00,0x1c,0x00,0x00,0x00, +0x96,0x00,0x00,0x00,0x95,0x00,0x00,0x00,0x50,0x00,0x05,0x00, +0x84,0x00,0x00,0x00,0x97,0x00,0x00,0x00,0x8f,0x00,0x00,0x00, +0x96,0x00,0x00,0x00,0x8e,0x00,0x05,0x00,0x84,0x00,0x00,0x00, +0x9a,0x00,0x00,0x00,0x97,0x00,0x00,0x00,0x5f,0x00,0x00,0x00, +0x50,0x00,0x05,0x00,0x84,0x00,0x00,0x00,0x9c,0x00,0x00,0x00, +0x64,0x00,0x00,0x00,0x64,0x00,0x00,0x00,0x81,0x00,0x05,0x00, +0x84,0x00,0x00,0x00,0x9d,0x00,0x00,0x00,0x9a,0x00,0x00,0x00, +0x9c,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xa4,0x00,0x00,0x00,0x4f,0x00,0x00,0x00,0x4a,0x00,0x00,0x00, +0x51,0x00,0x05,0x00,0x1c,0x00,0x00,0x00,0xa7,0x00,0x00,0x00, +0x9d,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x73,0x00,0x04,0x00, +0x52,0x00,0x00,0x00,0xa8,0x00,0x00,0x00,0xa7,0x00,0x00,0x00, +0x41,0x00,0x06,0x00,0x5c,0x00,0x00,0x00,0xa9,0x00,0x00,0x00, +0xa1,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0xa4,0x00,0x00,0x00, +0x3e,0x00,0x03,0x00,0xa9,0x00,0x00,0x00,0xa8,0x00,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xad,0x00,0x00,0x00, +0xa4,0x00,0x00,0x00,0x54,0x00,0x00,0x00,0x51,0x00,0x05,0x00, +0x1c,0x00,0x00,0x00,0xaf,0x00,0x00,0x00,0x9d,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0x73,0x00,0x04,0x00,0x52,0x00,0x00,0x00, +0xb0,0x00,0x00,0x00,0xaf,0x00,0x00,0x00,0x41,0x00,0x06,0x00, +0x5c,0x00,0x00,0x00,0xb1,0x00,0x00,0x00,0xa1,0x00,0x00,0x00, +0x2e,0x00,0x00,0x00,0xad,0x00,0x00,0x00,0x3e,0x00,0x03,0x00, +0xb1,0x00,0x00,0x00,0xb0,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, +0xb4,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0xb4,0x00,0x00,0x00, +0xfd,0x00,0x01,0x00,0x38,0x00,0x01,0x00, }; -const uint64_t get_rows_q5_1_len = 2796; +const uint64_t get_rows_q5_1_len = 2780; unsigned char get_rows_q5_1_f32_data[] = { 0x03,0x02,0x23,0x07,0x00,0x05,0x01,0x00,0x0b,0x00,0x0d,0x00, @@ -10135,9 +8794,9 @@ unsigned char get_rows_q5_1_f32_data[] = { 0x58,0x00,0x00,0x00,0x3b,0x00,0x04,0x00,0x59,0x00,0x00,0x00, 0x5a,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x20,0x00,0x04,0x00, 0x5c,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x52,0x00,0x00,0x00, -0x20,0x00,0x04,0x00,0x69,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, +0x20,0x00,0x04,0x00,0x67,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, 0x06,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x10,0x00,0x00,0x00, -0x6e,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x70,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, 0x06,0x00,0x00,0x00,0x76,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, 0x2b,0x00,0x04,0x00,0x10,0x00,0x00,0x00,0x7e,0x00,0x00,0x00, 0x03,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x80,0x00,0x00,0x00, @@ -10210,65 +8869,64 @@ unsigned char get_rows_q5_1_f32_data[] = { 0x45,0x00,0x00,0x00,0x20,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, 0x52,0x00,0x00,0x00,0x63,0x00,0x00,0x00,0x62,0x00,0x00,0x00, 0x73,0x00,0x04,0x00,0x1c,0x00,0x00,0x00,0x64,0x00,0x00,0x00, -0x63,0x00,0x00,0x00,0x41,0x00,0x07,0x00,0x69,0x00,0x00,0x00, -0x6a,0x00,0x00,0x00,0x5a,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, +0x63,0x00,0x00,0x00,0x41,0x00,0x07,0x00,0x67,0x00,0x00,0x00, +0x68,0x00,0x00,0x00,0x5a,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, 0x45,0x00,0x00,0x00,0x12,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x6b,0x00,0x00,0x00,0x6a,0x00,0x00,0x00, -0xc2,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x6d,0x00,0x00,0x00, -0x6b,0x00,0x00,0x00,0x4a,0x00,0x00,0x00,0xc4,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x6f,0x00,0x00,0x00,0x6d,0x00,0x00,0x00, -0x6e,0x00,0x00,0x00,0xc7,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x70,0x00,0x00,0x00,0x6f,0x00,0x00,0x00,0x54,0x00,0x00,0x00, -0x7c,0x00,0x04,0x00,0x10,0x00,0x00,0x00,0x71,0x00,0x00,0x00, -0x70,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x74,0x00,0x00,0x00,0x6a,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x77,0x00,0x00,0x00,0x4a,0x00,0x00,0x00, -0x76,0x00,0x00,0x00,0xc2,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x78,0x00,0x00,0x00,0x74,0x00,0x00,0x00,0x77,0x00,0x00,0x00, -0xc7,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x79,0x00,0x00,0x00, -0x78,0x00,0x00,0x00,0x54,0x00,0x00,0x00,0x7c,0x00,0x04,0x00, -0x10,0x00,0x00,0x00,0x7a,0x00,0x00,0x00,0x79,0x00,0x00,0x00, -0x41,0x00,0x08,0x00,0x80,0x00,0x00,0x00,0x81,0x00,0x00,0x00, -0x5a,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0x45,0x00,0x00,0x00, -0x7e,0x00,0x00,0x00,0x4a,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x53,0x00,0x00,0x00,0x82,0x00,0x00,0x00,0x81,0x00,0x00,0x00, -0x71,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x83,0x00,0x00,0x00, -0x82,0x00,0x00,0x00,0xc7,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x89,0x00,0x00,0x00,0x83,0x00,0x00,0x00,0x88,0x00,0x00,0x00, -0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x8d,0x00,0x00,0x00, -0x71,0x00,0x00,0x00,0xc5,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x8e,0x00,0x00,0x00,0x89,0x00,0x00,0x00,0x8d,0x00,0x00,0x00, -0x70,0x00,0x04,0x00,0x1c,0x00,0x00,0x00,0x8f,0x00,0x00,0x00, -0x8e,0x00,0x00,0x00,0xc2,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x91,0x00,0x00,0x00,0x83,0x00,0x00,0x00,0x6e,0x00,0x00,0x00, -0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x94,0x00,0x00,0x00, -0x7a,0x00,0x00,0x00,0xc5,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x95,0x00,0x00,0x00,0x91,0x00,0x00,0x00,0x94,0x00,0x00,0x00, -0x70,0x00,0x04,0x00,0x1c,0x00,0x00,0x00,0x96,0x00,0x00,0x00, -0x95,0x00,0x00,0x00,0x50,0x00,0x05,0x00,0x84,0x00,0x00,0x00, -0x97,0x00,0x00,0x00,0x8f,0x00,0x00,0x00,0x96,0x00,0x00,0x00, -0x8e,0x00,0x05,0x00,0x84,0x00,0x00,0x00,0x9a,0x00,0x00,0x00, -0x97,0x00,0x00,0x00,0x5f,0x00,0x00,0x00,0x50,0x00,0x05,0x00, -0x84,0x00,0x00,0x00,0x9c,0x00,0x00,0x00,0x64,0x00,0x00,0x00, -0x64,0x00,0x00,0x00,0x81,0x00,0x05,0x00,0x84,0x00,0x00,0x00, -0x9d,0x00,0x00,0x00,0x9a,0x00,0x00,0x00,0x9c,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xa4,0x00,0x00,0x00, -0x4f,0x00,0x00,0x00,0x4a,0x00,0x00,0x00,0x51,0x00,0x05,0x00, -0x1c,0x00,0x00,0x00,0xa7,0x00,0x00,0x00,0x9d,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x41,0x00,0x06,0x00,0xa8,0x00,0x00,0x00, -0xa9,0x00,0x00,0x00,0xa1,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0xa4,0x00,0x00,0x00,0x3e,0x00,0x03,0x00,0xa9,0x00,0x00,0x00, -0xa7,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xad,0x00,0x00,0x00,0xa4,0x00,0x00,0x00,0x54,0x00,0x00,0x00, -0x51,0x00,0x05,0x00,0x1c,0x00,0x00,0x00,0xaf,0x00,0x00,0x00, -0x9d,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x41,0x00,0x06,0x00, -0xa8,0x00,0x00,0x00,0xb0,0x00,0x00,0x00,0xa1,0x00,0x00,0x00, -0x2e,0x00,0x00,0x00,0xad,0x00,0x00,0x00,0x3e,0x00,0x03,0x00, -0xb0,0x00,0x00,0x00,0xaf,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, -0xb3,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0xb3,0x00,0x00,0x00, -0xfd,0x00,0x01,0x00,0x38,0x00,0x01,0x00, +0x06,0x00,0x00,0x00,0x69,0x00,0x00,0x00,0x68,0x00,0x00,0x00, +0xc2,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x6f,0x00,0x00,0x00, +0x69,0x00,0x00,0x00,0x4a,0x00,0x00,0x00,0xc4,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x71,0x00,0x00,0x00,0x6f,0x00,0x00,0x00, +0x70,0x00,0x00,0x00,0xc7,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x72,0x00,0x00,0x00,0x71,0x00,0x00,0x00,0x54,0x00,0x00,0x00, +0x7c,0x00,0x04,0x00,0x10,0x00,0x00,0x00,0x73,0x00,0x00,0x00, +0x72,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x77,0x00,0x00,0x00,0x4a,0x00,0x00,0x00,0x76,0x00,0x00,0x00, +0xc2,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x78,0x00,0x00,0x00, +0x69,0x00,0x00,0x00,0x77,0x00,0x00,0x00,0xc7,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x79,0x00,0x00,0x00,0x78,0x00,0x00,0x00, +0x54,0x00,0x00,0x00,0x7c,0x00,0x04,0x00,0x10,0x00,0x00,0x00, +0x7a,0x00,0x00,0x00,0x79,0x00,0x00,0x00,0x41,0x00,0x08,0x00, +0x80,0x00,0x00,0x00,0x81,0x00,0x00,0x00,0x5a,0x00,0x00,0x00, +0x2e,0x00,0x00,0x00,0x45,0x00,0x00,0x00,0x7e,0x00,0x00,0x00, +0x4a,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x53,0x00,0x00,0x00, +0x82,0x00,0x00,0x00,0x81,0x00,0x00,0x00,0x71,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x83,0x00,0x00,0x00,0x82,0x00,0x00,0x00, +0xc7,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x89,0x00,0x00,0x00, +0x83,0x00,0x00,0x00,0x88,0x00,0x00,0x00,0x7c,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x8d,0x00,0x00,0x00,0x73,0x00,0x00,0x00, +0xc5,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x8e,0x00,0x00,0x00, +0x89,0x00,0x00,0x00,0x8d,0x00,0x00,0x00,0x70,0x00,0x04,0x00, +0x1c,0x00,0x00,0x00,0x8f,0x00,0x00,0x00,0x8e,0x00,0x00,0x00, +0xc2,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x91,0x00,0x00,0x00, +0x83,0x00,0x00,0x00,0x70,0x00,0x00,0x00,0x7c,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x94,0x00,0x00,0x00,0x7a,0x00,0x00,0x00, +0xc5,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x95,0x00,0x00,0x00, +0x91,0x00,0x00,0x00,0x94,0x00,0x00,0x00,0x70,0x00,0x04,0x00, +0x1c,0x00,0x00,0x00,0x96,0x00,0x00,0x00,0x95,0x00,0x00,0x00, +0x50,0x00,0x05,0x00,0x84,0x00,0x00,0x00,0x97,0x00,0x00,0x00, +0x8f,0x00,0x00,0x00,0x96,0x00,0x00,0x00,0x8e,0x00,0x05,0x00, +0x84,0x00,0x00,0x00,0x9a,0x00,0x00,0x00,0x97,0x00,0x00,0x00, +0x5f,0x00,0x00,0x00,0x50,0x00,0x05,0x00,0x84,0x00,0x00,0x00, +0x9c,0x00,0x00,0x00,0x64,0x00,0x00,0x00,0x64,0x00,0x00,0x00, +0x81,0x00,0x05,0x00,0x84,0x00,0x00,0x00,0x9d,0x00,0x00,0x00, +0x9a,0x00,0x00,0x00,0x9c,0x00,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xa4,0x00,0x00,0x00,0x4f,0x00,0x00,0x00, +0x4a,0x00,0x00,0x00,0x51,0x00,0x05,0x00,0x1c,0x00,0x00,0x00, +0xa7,0x00,0x00,0x00,0x9d,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0x41,0x00,0x06,0x00,0xa8,0x00,0x00,0x00,0xa9,0x00,0x00,0x00, +0xa1,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0xa4,0x00,0x00,0x00, +0x3e,0x00,0x03,0x00,0xa9,0x00,0x00,0x00,0xa7,0x00,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xad,0x00,0x00,0x00, +0xa4,0x00,0x00,0x00,0x54,0x00,0x00,0x00,0x51,0x00,0x05,0x00, +0x1c,0x00,0x00,0x00,0xaf,0x00,0x00,0x00,0x9d,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0x41,0x00,0x06,0x00,0xa8,0x00,0x00,0x00, +0xb0,0x00,0x00,0x00,0xa1,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, +0xad,0x00,0x00,0x00,0x3e,0x00,0x03,0x00,0xb0,0x00,0x00,0x00, +0xaf,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0xb3,0x00,0x00,0x00, +0xf8,0x00,0x02,0x00,0xb3,0x00,0x00,0x00,0xfd,0x00,0x01,0x00, +0x38,0x00,0x01,0x00, }; -const uint64_t get_rows_q5_1_f32_len = 2780; +const uint64_t get_rows_q5_1_f32_len = 2764; unsigned char get_rows_q8_0_data[] = { 0x03,0x02,0x23,0x07,0x00,0x05,0x01,0x00,0x0b,0x00,0x0d,0x00, @@ -10661,9 +9319,9 @@ unsigned char get_rows_q8_0_f32_data[] = { }; const uint64_t get_rows_q8_0_f32_len = 2280; -unsigned char matmul_f16_aligned_l_data[] = { +unsigned char matmul_f16_data[] = { 0x03,0x02,0x23,0x07,0x00,0x05,0x01,0x00,0x0b,0x00,0x0d,0x00, -0x5b,0x03,0x00,0x00,0x00,0x00,0x00,0x00,0x11,0x00,0x02,0x00, +0xd9,0x02,0x00,0x00,0x00,0x00,0x00,0x00,0x11,0x00,0x02,0x00, 0x01,0x00,0x00,0x00,0x11,0x00,0x02,0x00,0x09,0x00,0x00,0x00, 0x11,0x00,0x02,0x00,0x51,0x11,0x00,0x00,0x0b,0x00,0x06,0x00, 0x01,0x00,0x00,0x00,0x47,0x4c,0x53,0x4c,0x2e,0x73,0x74,0x64, @@ -10671,9 +9329,9 @@ unsigned char matmul_f16_aligned_l_data[] = { 0x00,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x0f,0x00,0x0f,0x00, 0x05,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x6d,0x61,0x69,0x6e, 0x00,0x00,0x00,0x00,0x0b,0x00,0x00,0x00,0x12,0x00,0x00,0x00, -0x3d,0x00,0x00,0x00,0x4c,0x00,0x00,0x00,0xeb,0x00,0x00,0x00, -0xfa,0x00,0x00,0x00,0x80,0x01,0x00,0x00,0x8d,0x01,0x00,0x00, -0xbb,0x02,0x00,0x00,0x04,0x03,0x00,0x00,0x10,0x00,0x06,0x00, +0x3d,0x00,0x00,0x00,0x4c,0x00,0x00,0x00,0xfb,0x00,0x00,0x00, +0x06,0x01,0x00,0x00,0x46,0x01,0x00,0x00,0x51,0x01,0x00,0x00, +0x39,0x02,0x00,0x00,0x82,0x02,0x00,0x00,0x10,0x00,0x06,0x00, 0x04,0x00,0x00,0x00,0x11,0x00,0x00,0x00,0x01,0x00,0x00,0x00, 0x01,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x47,0x00,0x04,0x00, 0x0b,0x00,0x00,0x00,0x0b,0x00,0x00,0x00,0x1c,0x00,0x00,0x00, @@ -10712,1924 +9370,38 @@ unsigned char matmul_f16_aligned_l_data[] = { 0x06,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x62,0x00,0x00,0x00, 0x01,0x00,0x00,0x00,0x07,0x00,0x00,0x00,0x47,0x00,0x04,0x00, 0x6c,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x03,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x9d,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0xaf,0x00,0x00,0x00, +0x47,0x00,0x04,0x00,0xa6,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0xb8,0x00,0x00,0x00, 0x01,0x00,0x00,0x00,0x05,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0xb2,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x08,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0xf7,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x10,0x00,0x00,0x00,0x48,0x00,0x04,0x00,0xf8,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x05,0x00,0x00,0x00,0x48,0x00,0x04,0x00, -0xf8,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x18,0x00,0x00,0x00, -0x48,0x00,0x05,0x00,0xf8,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0xf8,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x07,0x00,0x00,0x00, -0x08,0x00,0x00,0x00,0x47,0x00,0x03,0x00,0xf8,0x00,0x00,0x00, -0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0xfa,0x00,0x00,0x00, -0x22,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0xfa,0x00,0x00,0x00,0x21,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x61,0x01,0x00,0x00,0x01,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x62,0x01,0x00,0x00, -0x0b,0x00,0x00,0x00,0x19,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x8a,0x01,0x00,0x00,0x06,0x00,0x00,0x00,0x10,0x00,0x00,0x00, -0x48,0x00,0x04,0x00,0x8b,0x01,0x00,0x00,0x00,0x00,0x00,0x00, -0x05,0x00,0x00,0x00,0x48,0x00,0x04,0x00,0x8b,0x01,0x00,0x00, +0xbb,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x08,0x00,0x00,0x00, +0x47,0x00,0x04,0x00,0x03,0x01,0x00,0x00,0x06,0x00,0x00,0x00, +0x02,0x00,0x00,0x00,0x48,0x00,0x04,0x00,0x04,0x01,0x00,0x00, 0x00,0x00,0x00,0x00,0x18,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0x8b,0x01,0x00,0x00,0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x8b,0x01,0x00,0x00, -0x00,0x00,0x00,0x00,0x07,0x00,0x00,0x00,0x08,0x00,0x00,0x00, -0x47,0x00,0x03,0x00,0x8b,0x01,0x00,0x00,0x02,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x8d,0x01,0x00,0x00,0x22,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x8d,0x01,0x00,0x00, -0x21,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0xbb,0x02,0x00,0x00,0x0b,0x00,0x00,0x00,0x18,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x01,0x03,0x00,0x00,0x06,0x00,0x00,0x00, -0x04,0x00,0x00,0x00,0x48,0x00,0x04,0x00,0x02,0x03,0x00,0x00, -0x00,0x00,0x00,0x00,0x19,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0x02,0x03,0x00,0x00,0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x47,0x00,0x03,0x00,0x02,0x03,0x00,0x00, -0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x04,0x03,0x00,0x00, +0x04,0x01,0x00,0x00,0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00, +0x00,0x00,0x00,0x00,0x47,0x00,0x03,0x00,0x04,0x01,0x00,0x00, +0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x06,0x01,0x00,0x00, 0x22,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x04,0x03,0x00,0x00,0x21,0x00,0x00,0x00,0x02,0x00,0x00,0x00, -0x13,0x00,0x02,0x00,0x02,0x00,0x00,0x00,0x21,0x00,0x03,0x00, -0x03,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x15,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x20,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x17,0x00,0x04,0x00,0x09,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x03,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x0a,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x09,0x00,0x00,0x00,0x3b,0x00,0x04,0x00, -0x0a,0x00,0x00,0x00,0x0b,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0x02,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x0d,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x1e,0x00,0x10,0x00, -0x10,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x20,0x00,0x04,0x00,0x11,0x00,0x00,0x00,0x09,0x00,0x00,0x00, -0x10,0x00,0x00,0x00,0x3b,0x00,0x04,0x00,0x11,0x00,0x00,0x00, -0x12,0x00,0x00,0x00,0x09,0x00,0x00,0x00,0x15,0x00,0x04,0x00, -0x13,0x00,0x00,0x00,0x20,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00,0x14,0x00,0x00,0x00, -0x08,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x15,0x00,0x00,0x00, -0x09,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x13,0x00,0x00,0x00,0x21,0x00,0x00,0x00,0x0a,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00,0x27,0x00,0x00,0x00, -0x09,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00, -0x2d,0x00,0x00,0x00,0x07,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x13,0x00,0x00,0x00,0x34,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x32,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x37,0x00,0x00,0x00, -0x40,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x39,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x3b,0x00,0x04,0x00, -0x0a,0x00,0x00,0x00,0x3d,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x3e,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x3b,0x00,0x04,0x00,0x0a,0x00,0x00,0x00, -0x4c,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x32,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x4f,0x00,0x00,0x00,0x20,0x00,0x00,0x00, -0x32,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x53,0x00,0x00,0x00, -0x20,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, -0x54,0x00,0x00,0x00,0x86,0x00,0x00,0x00,0x37,0x00,0x00,0x00, -0x53,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, -0x58,0x00,0x00,0x00,0x86,0x00,0x00,0x00,0x37,0x00,0x00,0x00, -0x53,0x00,0x00,0x00,0x32,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x60,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0x61,0x00,0x00,0x00,0x86,0x00,0x00,0x00, -0x53,0x00,0x00,0x00,0x60,0x00,0x00,0x00,0x32,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x62,0x00,0x00,0x00,0x04,0x00,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x63,0x00,0x00,0x00, -0x86,0x00,0x00,0x00,0x61,0x00,0x00,0x00,0x62,0x00,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x67,0x00,0x00,0x00, -0x86,0x00,0x00,0x00,0x61,0x00,0x00,0x00,0x62,0x00,0x00,0x00, -0x32,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x6c,0x00,0x00,0x00, -0x10,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x6d,0x00,0x00,0x00,0x08,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0x6e,0x00,0x00,0x00,0x86,0x00,0x00,0x00, -0x6c,0x00,0x00,0x00,0x6d,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0x73,0x00,0x00,0x00,0x86,0x00,0x00,0x00, -0x6c,0x00,0x00,0x00,0x6d,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x13,0x00,0x00,0x00,0x77,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00,0x7c,0x00,0x00,0x00, -0x02,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00, -0x87,0x00,0x00,0x00,0x0b,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x13,0x00,0x00,0x00,0x8d,0x00,0x00,0x00,0x03,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00,0x98,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x32,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x9d,0x00,0x00,0x00,0x40,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x13,0x00,0x00,0x00,0x9f,0x00,0x00,0x00,0x04,0x00,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0xae,0x00,0x00,0x00, -0x84,0x00,0x00,0x00,0x60,0x00,0x00,0x00,0x62,0x00,0x00,0x00, -0x32,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0xaf,0x00,0x00,0x00, -0x20,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, -0xb0,0x00,0x00,0x00,0x84,0x00,0x00,0x00,0x53,0x00,0x00,0x00, -0xaf,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, -0xb1,0x00,0x00,0x00,0x84,0x00,0x00,0x00,0x4f,0x00,0x00,0x00, -0x62,0x00,0x00,0x00,0x32,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0xb2,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0xb3,0x00,0x00,0x00,0x84,0x00,0x00,0x00, -0xb1,0x00,0x00,0x00,0xb2,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0xb4,0x00,0x00,0x00,0x84,0x00,0x00,0x00, -0xb3,0x00,0x00,0x00,0x60,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0xb5,0x00,0x00,0x00,0x86,0x00,0x00,0x00, -0xb0,0x00,0x00,0x00,0xb4,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0xb6,0x00,0x00,0x00,0x84,0x00,0x00,0x00, -0xae,0x00,0x00,0x00,0xb5,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0xb7,0x00,0x00,0x00,0x84,0x00,0x00,0x00, -0xb6,0x00,0x00,0x00,0xb2,0x00,0x00,0x00,0x14,0x00,0x02,0x00, -0xb8,0x00,0x00,0x00,0x16,0x00,0x03,0x00,0xba,0x00,0x00,0x00, -0x20,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, -0xbb,0x00,0x00,0x00,0x84,0x00,0x00,0x00,0x60,0x00,0x00,0x00, -0x62,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, -0xbc,0x00,0x00,0x00,0x84,0x00,0x00,0x00,0xbb,0x00,0x00,0x00, -0xb5,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, -0xbd,0x00,0x00,0x00,0x84,0x00,0x00,0x00,0xbc,0x00,0x00,0x00, -0xb2,0x00,0x00,0x00,0x1c,0x00,0x04,0x00,0xbe,0x00,0x00,0x00, -0xba,0x00,0x00,0x00,0xbd,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0xbf,0x00,0x00,0x00,0x07,0x00,0x00,0x00,0xbe,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0xba,0x00,0x00,0x00,0xc2,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0xc3,0x00,0x00,0x00, -0x07,0x00,0x00,0x00,0xba,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x13,0x00,0x00,0x00,0xc6,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x16,0x00,0x03,0x00,0xe6,0x00,0x00,0x00,0x10,0x00,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0xe7,0x00,0x00,0x00, -0x80,0x00,0x00,0x00,0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0xe8,0x00,0x00,0x00, -0x84,0x00,0x00,0x00,0x37,0x00,0x00,0x00,0xe7,0x00,0x00,0x00, -0x1c,0x00,0x04,0x00,0xe9,0x00,0x00,0x00,0xe6,0x00,0x00,0x00, -0xe8,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0xea,0x00,0x00,0x00, -0x04,0x00,0x00,0x00,0xe9,0x00,0x00,0x00,0x3b,0x00,0x04,0x00, -0xea,0x00,0x00,0x00,0xeb,0x00,0x00,0x00,0x04,0x00,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0xef,0x00,0x00,0x00, -0x80,0x00,0x00,0x00,0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00, -0x17,0x00,0x04,0x00,0xf5,0x00,0x00,0x00,0xe6,0x00,0x00,0x00, -0x04,0x00,0x00,0x00,0x18,0x00,0x04,0x00,0xf6,0x00,0x00,0x00, -0xf5,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x1d,0x00,0x03,0x00, -0xf7,0x00,0x00,0x00,0xf6,0x00,0x00,0x00,0x1e,0x00,0x03,0x00, -0xf8,0x00,0x00,0x00,0xf7,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0xf9,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0xf8,0x00,0x00,0x00, -0x3b,0x00,0x04,0x00,0xf9,0x00,0x00,0x00,0xfa,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0xfc,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0xe6,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0xff,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0xe6,0x00,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x04,0x01,0x00,0x00, -0x80,0x00,0x00,0x00,0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x11,0x01,0x00,0x00, -0x80,0x00,0x00,0x00,0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x1e,0x01,0x00,0x00, -0x80,0x00,0x00,0x00,0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x23,0x01,0x00,0x00, -0x03,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, -0x2c,0x01,0x00,0x00,0x80,0x00,0x00,0x00,0x6c,0x00,0x00,0x00, -0x39,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x31,0x01,0x00,0x00,0x04,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0x3a,0x01,0x00,0x00,0x80,0x00,0x00,0x00, -0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x3f,0x01,0x00,0x00,0x05,0x00,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x48,0x01,0x00,0x00, -0x80,0x00,0x00,0x00,0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x4d,0x01,0x00,0x00, -0x06,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, -0x56,0x01,0x00,0x00,0x80,0x00,0x00,0x00,0x6c,0x00,0x00,0x00, -0x39,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x5b,0x01,0x00,0x00,0x07,0x00,0x00,0x00,0x32,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x61,0x01,0x00,0x00,0x01,0x00,0x00,0x00, -0x33,0x00,0x06,0x00,0x09,0x00,0x00,0x00,0x62,0x01,0x00,0x00, -0x61,0x01,0x00,0x00,0x39,0x00,0x00,0x00,0x39,0x00,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x63,0x01,0x00,0x00, -0x51,0x00,0x00,0x00,0x62,0x01,0x00,0x00,0x00,0x00,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x64,0x01,0x00,0x00, -0x84,0x00,0x00,0x00,0x63,0x01,0x00,0x00,0x6d,0x00,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x65,0x01,0x00,0x00, -0x86,0x00,0x00,0x00,0x64,0x01,0x00,0x00,0x6c,0x00,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x7c,0x01,0x00,0x00, -0x80,0x00,0x00,0x00,0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x7d,0x01,0x00,0x00, -0x84,0x00,0x00,0x00,0x9d,0x00,0x00,0x00,0x7c,0x01,0x00,0x00, -0x1c,0x00,0x04,0x00,0x7e,0x01,0x00,0x00,0xe6,0x00,0x00,0x00, -0x7d,0x01,0x00,0x00,0x20,0x00,0x04,0x00,0x7f,0x01,0x00,0x00, -0x04,0x00,0x00,0x00,0x7e,0x01,0x00,0x00,0x3b,0x00,0x04,0x00, -0x7f,0x01,0x00,0x00,0x80,0x01,0x00,0x00,0x04,0x00,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x84,0x01,0x00,0x00, -0x80,0x00,0x00,0x00,0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00, -0x1d,0x00,0x03,0x00,0x8a,0x01,0x00,0x00,0xf6,0x00,0x00,0x00, -0x1e,0x00,0x03,0x00,0x8b,0x01,0x00,0x00,0x8a,0x01,0x00,0x00, -0x20,0x00,0x04,0x00,0x8c,0x01,0x00,0x00,0x0c,0x00,0x00,0x00, -0x8b,0x01,0x00,0x00,0x3b,0x00,0x04,0x00,0x8c,0x01,0x00,0x00, -0x8d,0x01,0x00,0x00,0x0c,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0x95,0x01,0x00,0x00,0x80,0x00,0x00,0x00, -0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0xa2,0x01,0x00,0x00,0x80,0x00,0x00,0x00, -0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0xaf,0x01,0x00,0x00,0x80,0x00,0x00,0x00, -0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0xbc,0x01,0x00,0x00,0x80,0x00,0x00,0x00, -0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0xc9,0x01,0x00,0x00,0x80,0x00,0x00,0x00, -0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0xd6,0x01,0x00,0x00,0x80,0x00,0x00,0x00, -0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0xe3,0x01,0x00,0x00,0x80,0x00,0x00,0x00, -0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0xef,0x01,0x00,0x00,0x08,0x01,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0xf0,0x01,0x00,0x00, -0x86,0x00,0x00,0x00,0x6c,0x00,0x00,0x00,0x6d,0x00,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0xf3,0x01,0x00,0x00, -0x86,0x00,0x00,0x00,0x6c,0x00,0x00,0x00,0x6d,0x00,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x0e,0x02,0x00,0x00, -0x84,0x00,0x00,0x00,0x60,0x00,0x00,0x00,0x62,0x00,0x00,0x00, -0x1c,0x00,0x04,0x00,0x0f,0x02,0x00,0x00,0xe6,0x00,0x00,0x00, -0x0e,0x02,0x00,0x00,0x20,0x00,0x04,0x00,0x10,0x02,0x00,0x00, -0x07,0x00,0x00,0x00,0x0f,0x02,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0x20,0x02,0x00,0x00,0x80,0x00,0x00,0x00, -0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0x26,0x02,0x00,0x00,0x07,0x00,0x00,0x00,0xe6,0x00,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x3c,0x02,0x00,0x00, -0x84,0x00,0x00,0x00,0xb5,0x00,0x00,0x00,0xb2,0x00,0x00,0x00, -0x1c,0x00,0x04,0x00,0x3d,0x02,0x00,0x00,0xe6,0x00,0x00,0x00, -0x3c,0x02,0x00,0x00,0x20,0x00,0x04,0x00,0x3e,0x02,0x00,0x00, -0x07,0x00,0x00,0x00,0x3d,0x02,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0x47,0x02,0x00,0x00,0x86,0x00,0x00,0x00, -0xaf,0x00,0x00,0x00,0xb5,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0x4f,0x02,0x00,0x00,0x80,0x00,0x00,0x00, -0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0x7e,0x02,0x00,0x00,0x84,0x00,0x00,0x00, -0x60,0x00,0x00,0x00,0x62,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x13,0x00,0x00,0x00,0xb3,0x02,0x00,0x00,0x0d,0x00,0x00,0x00, -0x3b,0x00,0x04,0x00,0x0a,0x00,0x00,0x00,0xbb,0x02,0x00,0x00, -0x01,0x00,0x00,0x00,0x1d,0x00,0x03,0x00,0x01,0x03,0x00,0x00, -0xba,0x00,0x00,0x00,0x1e,0x00,0x03,0x00,0x02,0x03,0x00,0x00, -0x01,0x03,0x00,0x00,0x20,0x00,0x04,0x00,0x03,0x03,0x00,0x00, -0x0c,0x00,0x00,0x00,0x02,0x03,0x00,0x00,0x3b,0x00,0x04,0x00, -0x03,0x03,0x00,0x00,0x04,0x03,0x00,0x00,0x0c,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00,0x09,0x03,0x00,0x00, -0x05,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, -0x16,0x03,0x00,0x00,0x84,0x00,0x00,0x00,0x60,0x00,0x00,0x00, -0x62,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x1f,0x03,0x00,0x00, -0x0c,0x00,0x00,0x00,0xba,0x00,0x00,0x00,0x36,0x00,0x05,0x00, -0x02,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x03,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0x05,0x00,0x00,0x00, -0x3b,0x00,0x04,0x00,0xbf,0x00,0x00,0x00,0xc0,0x00,0x00,0x00, -0x07,0x00,0x00,0x00,0x3b,0x00,0x04,0x00,0x10,0x02,0x00,0x00, -0x11,0x02,0x00,0x00,0x07,0x00,0x00,0x00,0x3b,0x00,0x04,0x00, -0x3e,0x02,0x00,0x00,0x3f,0x02,0x00,0x00,0x07,0x00,0x00,0x00, -0x41,0x00,0x05,0x00,0x0d,0x00,0x00,0x00,0x0e,0x00,0x00,0x00, -0x0b,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x0f,0x00,0x00,0x00,0x0e,0x00,0x00,0x00, -0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00,0x16,0x00,0x00,0x00, -0x12,0x00,0x00,0x00,0x14,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x17,0x00,0x00,0x00,0x16,0x00,0x00,0x00, -0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x18,0x00,0x00,0x00, -0x0f,0x00,0x00,0x00,0x17,0x00,0x00,0x00,0x89,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x1e,0x00,0x00,0x00,0x0f,0x00,0x00,0x00, -0x17,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00, -0x22,0x00,0x00,0x00,0x12,0x00,0x00,0x00,0x21,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x22,0x00,0x00,0x00,0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x24,0x00,0x00,0x00,0x18,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00,0x28,0x00,0x00,0x00, -0x12,0x00,0x00,0x00,0x27,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x29,0x00,0x00,0x00,0x28,0x00,0x00,0x00, -0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x2a,0x00,0x00,0x00, -0x1e,0x00,0x00,0x00,0x29,0x00,0x00,0x00,0x41,0x00,0x05,0x00, -0x15,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0x12,0x00,0x00,0x00, -0x2d,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x2f,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x30,0x00,0x00,0x00,0x24,0x00,0x00,0x00, -0x2f,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x32,0x00,0x00,0x00,0x30,0x00,0x00,0x00,0x2a,0x00,0x00,0x00, -0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00,0x35,0x00,0x00,0x00, -0x12,0x00,0x00,0x00,0x34,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x36,0x00,0x00,0x00,0x35,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x38,0x00,0x00,0x00, -0x36,0x00,0x00,0x00,0x37,0x00,0x00,0x00,0x82,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x3a,0x00,0x00,0x00,0x38,0x00,0x00,0x00, -0x39,0x00,0x00,0x00,0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x3b,0x00,0x00,0x00,0x3a,0x00,0x00,0x00,0x37,0x00,0x00,0x00, -0x41,0x00,0x05,0x00,0x0d,0x00,0x00,0x00,0x3f,0x00,0x00,0x00, -0x3d,0x00,0x00,0x00,0x3e,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x40,0x00,0x00,0x00,0x3f,0x00,0x00,0x00, -0x89,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x42,0x00,0x00,0x00, -0x40,0x00,0x00,0x00,0x3b,0x00,0x00,0x00,0x86,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x47,0x00,0x00,0x00,0x40,0x00,0x00,0x00, -0x3b,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x0d,0x00,0x00,0x00, -0x49,0x00,0x00,0x00,0x3d,0x00,0x00,0x00,0x39,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x4a,0x00,0x00,0x00, -0x49,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x0d,0x00,0x00,0x00, -0x4d,0x00,0x00,0x00,0x4c,0x00,0x00,0x00,0x3e,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x4e,0x00,0x00,0x00, -0x4d,0x00,0x00,0x00,0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x50,0x00,0x00,0x00,0x4e,0x00,0x00,0x00,0x4f,0x00,0x00,0x00, -0x89,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x55,0x00,0x00,0x00, -0x50,0x00,0x00,0x00,0x54,0x00,0x00,0x00,0x86,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x59,0x00,0x00,0x00,0x50,0x00,0x00,0x00, -0x58,0x00,0x00,0x00,0x89,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x5d,0x00,0x00,0x00,0x4e,0x00,0x00,0x00,0x4f,0x00,0x00,0x00, -0x89,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x64,0x00,0x00,0x00, -0x5d,0x00,0x00,0x00,0x63,0x00,0x00,0x00,0x86,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x68,0x00,0x00,0x00,0x5d,0x00,0x00,0x00, -0x67,0x00,0x00,0x00,0x89,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x6f,0x00,0x00,0x00,0x4e,0x00,0x00,0x00,0x6e,0x00,0x00,0x00, -0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x74,0x00,0x00,0x00, -0x4e,0x00,0x00,0x00,0x73,0x00,0x00,0x00,0x41,0x00,0x05,0x00, -0x15,0x00,0x00,0x00,0x78,0x00,0x00,0x00,0x12,0x00,0x00,0x00, -0x77,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x79,0x00,0x00,0x00,0x78,0x00,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x7a,0x00,0x00,0x00,0x47,0x00,0x00,0x00, -0x79,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00, -0x7d,0x00,0x00,0x00,0x12,0x00,0x00,0x00,0x7c,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x7e,0x00,0x00,0x00, -0x7d,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x80,0x00,0x00,0x00,0x47,0x00,0x00,0x00,0x39,0x00,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x83,0x00,0x00,0x00, -0x80,0x00,0x00,0x00,0x79,0x00,0x00,0x00,0x0c,0x00,0x07,0x00, -0x06,0x00,0x00,0x00,0x84,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x26,0x00,0x00,0x00,0x7e,0x00,0x00,0x00,0x83,0x00,0x00,0x00, -0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00,0x88,0x00,0x00,0x00, -0x12,0x00,0x00,0x00,0x87,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x89,0x00,0x00,0x00,0x88,0x00,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x8a,0x00,0x00,0x00, -0x32,0x00,0x00,0x00,0x89,0x00,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x8c,0x00,0x00,0x00,0x42,0x00,0x00,0x00, -0x37,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00, -0x8e,0x00,0x00,0x00,0x12,0x00,0x00,0x00,0x8d,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x8f,0x00,0x00,0x00, -0x8e,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x90,0x00,0x00,0x00,0x8c,0x00,0x00,0x00,0x8f,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x91,0x00,0x00,0x00, -0x8a,0x00,0x00,0x00,0x90,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x93,0x00,0x00,0x00,0x91,0x00,0x00,0x00, -0x7a,0x00,0x00,0x00,0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x94,0x00,0x00,0x00,0x93,0x00,0x00,0x00,0x6d,0x00,0x00,0x00, -0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00,0x99,0x00,0x00,0x00, -0x12,0x00,0x00,0x00,0x98,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x9a,0x00,0x00,0x00,0x99,0x00,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x9b,0x00,0x00,0x00, -0x0f,0x00,0x00,0x00,0x9a,0x00,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x9e,0x00,0x00,0x00,0x4a,0x00,0x00,0x00, -0x9d,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00, -0xa0,0x00,0x00,0x00,0x12,0x00,0x00,0x00,0x9f,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0xa1,0x00,0x00,0x00, -0xa0,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xa2,0x00,0x00,0x00,0x9e,0x00,0x00,0x00,0xa1,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xa3,0x00,0x00,0x00, -0x9b,0x00,0x00,0x00,0xa2,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xa5,0x00,0x00,0x00,0xa3,0x00,0x00,0x00, -0x7a,0x00,0x00,0x00,0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xa6,0x00,0x00,0x00,0xa5,0x00,0x00,0x00,0x6d,0x00,0x00,0x00, -0xf9,0x00,0x02,0x00,0xa8,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, -0xa8,0x00,0x00,0x00,0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00, -0x29,0x03,0x00,0x00,0x3e,0x00,0x00,0x00,0x05,0x00,0x00,0x00, -0xc7,0x00,0x00,0x00,0xa9,0x00,0x00,0x00,0xb0,0x00,0x05,0x00, -0xb8,0x00,0x00,0x00,0xb9,0x00,0x00,0x00,0x29,0x03,0x00,0x00, -0xb7,0x00,0x00,0x00,0xf6,0x00,0x04,0x00,0xaa,0x00,0x00,0x00, -0xa9,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, -0xb9,0x00,0x00,0x00,0xa9,0x00,0x00,0x00,0xaa,0x00,0x00,0x00, -0xf8,0x00,0x02,0x00,0xa9,0x00,0x00,0x00,0x41,0x00,0x05,0x00, -0xc3,0x00,0x00,0x00,0xc4,0x00,0x00,0x00,0xc0,0x00,0x00,0x00, -0x29,0x03,0x00,0x00,0x3e,0x00,0x03,0x00,0xc4,0x00,0x00,0x00, -0xc2,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xc7,0x00,0x00,0x00,0x29,0x03,0x00,0x00,0xc6,0x00,0x00,0x00, -0xf9,0x00,0x02,0x00,0xa8,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, -0xaa,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0xca,0x00,0x00,0x00, -0xf8,0x00,0x02,0x00,0xca,0x00,0x00,0x00,0xf5,0x00,0x07,0x00, -0x06,0x00,0x00,0x00,0x42,0x03,0x00,0x00,0xa6,0x00,0x00,0x00, -0xaa,0x00,0x00,0x00,0xf5,0x01,0x00,0x00,0xcd,0x00,0x00,0x00, -0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0x3e,0x03,0x00,0x00, -0x94,0x00,0x00,0x00,0xaa,0x00,0x00,0x00,0xf2,0x01,0x00,0x00, -0xcd,0x00,0x00,0x00,0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00, -0x2a,0x03,0x00,0x00,0x7a,0x00,0x00,0x00,0xaa,0x00,0x00,0x00, -0xa3,0x02,0x00,0x00,0xcd,0x00,0x00,0x00,0xb0,0x00,0x05,0x00, -0xb8,0x00,0x00,0x00,0xd1,0x00,0x00,0x00,0x2a,0x03,0x00,0x00, -0x84,0x00,0x00,0x00,0xf6,0x00,0x04,0x00,0xcc,0x00,0x00,0x00, -0xcd,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, -0xd1,0x00,0x00,0x00,0xcb,0x00,0x00,0x00,0xcc,0x00,0x00,0x00, -0xf8,0x00,0x02,0x00,0xcb,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, -0xd3,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0xd3,0x00,0x00,0x00, -0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0x3a,0x03,0x00,0x00, -0x3e,0x00,0x00,0x00,0xcb,0x00,0x00,0x00,0x67,0x01,0x00,0x00, -0xd4,0x00,0x00,0x00,0xb0,0x00,0x05,0x00,0xb8,0x00,0x00,0x00, -0xd9,0x00,0x00,0x00,0x3a,0x03,0x00,0x00,0x37,0x00,0x00,0x00, -0xf6,0x00,0x04,0x00,0xd5,0x00,0x00,0x00,0xd4,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0xd9,0x00,0x00,0x00, -0xd4,0x00,0x00,0x00,0xd5,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, -0xd4,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xde,0x00,0x00,0x00,0x74,0x00,0x00,0x00,0x3a,0x03,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xe1,0x00,0x00,0x00, -0xde,0x00,0x00,0x00,0x8f,0x00,0x00,0x00,0x86,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xe2,0x00,0x00,0x00,0xe1,0x00,0x00,0x00, -0x6d,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xe3,0x00,0x00,0x00,0x3e,0x03,0x00,0x00,0xe2,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xe5,0x00,0x00,0x00, -0xe3,0x00,0x00,0x00,0x6f,0x00,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xf0,0x00,0x00,0x00,0xde,0x00,0x00,0x00, -0xef,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xf2,0x00,0x00,0x00,0x6f,0x00,0x00,0x00,0x6d,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xf3,0x00,0x00,0x00, -0xf0,0x00,0x00,0x00,0xf2,0x00,0x00,0x00,0x41,0x00,0x08,0x00, -0xfc,0x00,0x00,0x00,0xfd,0x00,0x00,0x00,0xfa,0x00,0x00,0x00, -0x34,0x00,0x00,0x00,0xe5,0x00,0x00,0x00,0x34,0x00,0x00,0x00, -0x3e,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0xe6,0x00,0x00,0x00, -0xfe,0x00,0x00,0x00,0xfd,0x00,0x00,0x00,0x41,0x00,0x05,0x00, -0xff,0x00,0x00,0x00,0x00,0x01,0x00,0x00,0xeb,0x00,0x00,0x00, -0xf3,0x00,0x00,0x00,0x3e,0x00,0x03,0x00,0x00,0x01,0x00,0x00, -0xfe,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x05,0x01,0x00,0x00,0xde,0x00,0x00,0x00,0x04,0x01,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x08,0x01,0x00,0x00, -0x05,0x01,0x00,0x00,0xf2,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x09,0x01,0x00,0x00,0x08,0x01,0x00,0x00, -0x39,0x00,0x00,0x00,0x41,0x00,0x08,0x00,0xfc,0x00,0x00,0x00, -0x0b,0x01,0x00,0x00,0xfa,0x00,0x00,0x00,0x34,0x00,0x00,0x00, -0xe5,0x00,0x00,0x00,0x34,0x00,0x00,0x00,0x39,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0xe6,0x00,0x00,0x00,0x0c,0x01,0x00,0x00, -0x0b,0x01,0x00,0x00,0x41,0x00,0x05,0x00,0xff,0x00,0x00,0x00, -0x0d,0x01,0x00,0x00,0xeb,0x00,0x00,0x00,0x09,0x01,0x00,0x00, -0x3e,0x00,0x03,0x00,0x0d,0x01,0x00,0x00,0x0c,0x01,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x12,0x01,0x00,0x00, -0xde,0x00,0x00,0x00,0x11,0x01,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x15,0x01,0x00,0x00,0x12,0x01,0x00,0x00, -0xf2,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x16,0x01,0x00,0x00,0x15,0x01,0x00,0x00,0x0c,0x00,0x00,0x00, -0x41,0x00,0x08,0x00,0xfc,0x00,0x00,0x00,0x18,0x01,0x00,0x00, -0xfa,0x00,0x00,0x00,0x34,0x00,0x00,0x00,0xe5,0x00,0x00,0x00, -0x34,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0xe6,0x00,0x00,0x00,0x19,0x01,0x00,0x00,0x18,0x01,0x00,0x00, -0x41,0x00,0x05,0x00,0xff,0x00,0x00,0x00,0x1a,0x01,0x00,0x00, -0xeb,0x00,0x00,0x00,0x16,0x01,0x00,0x00,0x3e,0x00,0x03,0x00, -0x1a,0x01,0x00,0x00,0x19,0x01,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x1f,0x01,0x00,0x00,0xde,0x00,0x00,0x00, -0x1e,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x22,0x01,0x00,0x00,0x1f,0x01,0x00,0x00,0xf2,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x24,0x01,0x00,0x00, -0x22,0x01,0x00,0x00,0x23,0x01,0x00,0x00,0x41,0x00,0x08,0x00, -0xfc,0x00,0x00,0x00,0x26,0x01,0x00,0x00,0xfa,0x00,0x00,0x00, -0x34,0x00,0x00,0x00,0xe5,0x00,0x00,0x00,0x34,0x00,0x00,0x00, -0x23,0x01,0x00,0x00,0x3d,0x00,0x04,0x00,0xe6,0x00,0x00,0x00, -0x27,0x01,0x00,0x00,0x26,0x01,0x00,0x00,0x41,0x00,0x05,0x00, -0xff,0x00,0x00,0x00,0x28,0x01,0x00,0x00,0xeb,0x00,0x00,0x00, -0x24,0x01,0x00,0x00,0x3e,0x00,0x03,0x00,0x28,0x01,0x00,0x00, -0x27,0x01,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x2d,0x01,0x00,0x00,0xde,0x00,0x00,0x00,0x2c,0x01,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x30,0x01,0x00,0x00, -0x2d,0x01,0x00,0x00,0xf2,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x32,0x01,0x00,0x00,0x30,0x01,0x00,0x00, -0x31,0x01,0x00,0x00,0x41,0x00,0x08,0x00,0xfc,0x00,0x00,0x00, -0x34,0x01,0x00,0x00,0xfa,0x00,0x00,0x00,0x34,0x00,0x00,0x00, -0xe5,0x00,0x00,0x00,0xc6,0x00,0x00,0x00,0x3e,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0xe6,0x00,0x00,0x00,0x35,0x01,0x00,0x00, -0x34,0x01,0x00,0x00,0x41,0x00,0x05,0x00,0xff,0x00,0x00,0x00, -0x36,0x01,0x00,0x00,0xeb,0x00,0x00,0x00,0x32,0x01,0x00,0x00, -0x3e,0x00,0x03,0x00,0x36,0x01,0x00,0x00,0x35,0x01,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x3b,0x01,0x00,0x00, -0xde,0x00,0x00,0x00,0x3a,0x01,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x3e,0x01,0x00,0x00,0x3b,0x01,0x00,0x00, -0xf2,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x40,0x01,0x00,0x00,0x3e,0x01,0x00,0x00,0x3f,0x01,0x00,0x00, -0x41,0x00,0x08,0x00,0xfc,0x00,0x00,0x00,0x42,0x01,0x00,0x00, -0xfa,0x00,0x00,0x00,0x34,0x00,0x00,0x00,0xe5,0x00,0x00,0x00, -0xc6,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0xe6,0x00,0x00,0x00,0x43,0x01,0x00,0x00,0x42,0x01,0x00,0x00, -0x41,0x00,0x05,0x00,0xff,0x00,0x00,0x00,0x44,0x01,0x00,0x00, -0xeb,0x00,0x00,0x00,0x40,0x01,0x00,0x00,0x3e,0x00,0x03,0x00, -0x44,0x01,0x00,0x00,0x43,0x01,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x49,0x01,0x00,0x00,0xde,0x00,0x00,0x00, -0x48,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x4c,0x01,0x00,0x00,0x49,0x01,0x00,0x00,0xf2,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x4e,0x01,0x00,0x00, -0x4c,0x01,0x00,0x00,0x4d,0x01,0x00,0x00,0x41,0x00,0x08,0x00, -0xfc,0x00,0x00,0x00,0x50,0x01,0x00,0x00,0xfa,0x00,0x00,0x00, -0x34,0x00,0x00,0x00,0xe5,0x00,0x00,0x00,0xc6,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0xe6,0x00,0x00,0x00, -0x51,0x01,0x00,0x00,0x50,0x01,0x00,0x00,0x41,0x00,0x05,0x00, -0xff,0x00,0x00,0x00,0x52,0x01,0x00,0x00,0xeb,0x00,0x00,0x00, -0x4e,0x01,0x00,0x00,0x3e,0x00,0x03,0x00,0x52,0x01,0x00,0x00, -0x51,0x01,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x57,0x01,0x00,0x00,0xde,0x00,0x00,0x00,0x56,0x01,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x5a,0x01,0x00,0x00, -0x57,0x01,0x00,0x00,0xf2,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x5c,0x01,0x00,0x00,0x5a,0x01,0x00,0x00, -0x5b,0x01,0x00,0x00,0x41,0x00,0x08,0x00,0xfc,0x00,0x00,0x00, -0x5e,0x01,0x00,0x00,0xfa,0x00,0x00,0x00,0x34,0x00,0x00,0x00, -0xe5,0x00,0x00,0x00,0xc6,0x00,0x00,0x00,0x23,0x01,0x00,0x00, -0x3d,0x00,0x04,0x00,0xe6,0x00,0x00,0x00,0x5f,0x01,0x00,0x00, -0x5e,0x01,0x00,0x00,0x41,0x00,0x05,0x00,0xff,0x00,0x00,0x00, -0x60,0x01,0x00,0x00,0xeb,0x00,0x00,0x00,0x5c,0x01,0x00,0x00, -0x3e,0x00,0x03,0x00,0x60,0x01,0x00,0x00,0x5f,0x01,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x67,0x01,0x00,0x00, -0x3a,0x03,0x00,0x00,0x65,0x01,0x00,0x00,0xf9,0x00,0x02,0x00, -0xd3,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0xd5,0x00,0x00,0x00, -0xf9,0x00,0x02,0x00,0x69,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, -0x69,0x01,0x00,0x00,0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00, -0x3b,0x03,0x00,0x00,0x3e,0x00,0x00,0x00,0xd5,0x00,0x00,0x00, -0xee,0x01,0x00,0x00,0x6a,0x01,0x00,0x00,0xb0,0x00,0x05,0x00, -0xb8,0x00,0x00,0x00,0x6f,0x01,0x00,0x00,0x3b,0x03,0x00,0x00, -0x9d,0x00,0x00,0x00,0xf6,0x00,0x04,0x00,0x6b,0x01,0x00,0x00, -0x6a,0x01,0x00,0x00,0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, -0x6f,0x01,0x00,0x00,0x6a,0x01,0x00,0x00,0x6b,0x01,0x00,0x00, -0xf8,0x00,0x02,0x00,0x6a,0x01,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x74,0x01,0x00,0x00,0x74,0x00,0x00,0x00, -0x3b,0x03,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x77,0x01,0x00,0x00,0x74,0x01,0x00,0x00,0xa1,0x00,0x00,0x00, -0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x78,0x01,0x00,0x00, -0x77,0x01,0x00,0x00,0x6d,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x79,0x01,0x00,0x00,0x42,0x03,0x00,0x00, -0x78,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x7b,0x01,0x00,0x00,0x79,0x01,0x00,0x00,0x6f,0x00,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x85,0x01,0x00,0x00, -0x74,0x01,0x00,0x00,0x84,0x01,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x87,0x01,0x00,0x00,0x6f,0x00,0x00,0x00, -0x6d,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x88,0x01,0x00,0x00,0x85,0x01,0x00,0x00,0x87,0x01,0x00,0x00, -0x41,0x00,0x08,0x00,0xfc,0x00,0x00,0x00,0x8f,0x01,0x00,0x00, -0x8d,0x01,0x00,0x00,0x34,0x00,0x00,0x00,0x7b,0x01,0x00,0x00, -0x34,0x00,0x00,0x00,0x3e,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0xe6,0x00,0x00,0x00,0x90,0x01,0x00,0x00,0x8f,0x01,0x00,0x00, -0x41,0x00,0x05,0x00,0xff,0x00,0x00,0x00,0x91,0x01,0x00,0x00, -0x80,0x01,0x00,0x00,0x88,0x01,0x00,0x00,0x3e,0x00,0x03,0x00, -0x91,0x01,0x00,0x00,0x90,0x01,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x96,0x01,0x00,0x00,0x74,0x01,0x00,0x00, -0x95,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x99,0x01,0x00,0x00,0x96,0x01,0x00,0x00,0x87,0x01,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x9a,0x01,0x00,0x00, -0x99,0x01,0x00,0x00,0x39,0x00,0x00,0x00,0x41,0x00,0x08,0x00, -0xfc,0x00,0x00,0x00,0x9c,0x01,0x00,0x00,0x8d,0x01,0x00,0x00, -0x34,0x00,0x00,0x00,0x7b,0x01,0x00,0x00,0x34,0x00,0x00,0x00, -0x39,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0xe6,0x00,0x00,0x00, -0x9d,0x01,0x00,0x00,0x9c,0x01,0x00,0x00,0x41,0x00,0x05,0x00, -0xff,0x00,0x00,0x00,0x9e,0x01,0x00,0x00,0x80,0x01,0x00,0x00, -0x9a,0x01,0x00,0x00,0x3e,0x00,0x03,0x00,0x9e,0x01,0x00,0x00, -0x9d,0x01,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xa3,0x01,0x00,0x00,0x74,0x01,0x00,0x00,0xa2,0x01,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xa6,0x01,0x00,0x00, -0xa3,0x01,0x00,0x00,0x87,0x01,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xa7,0x01,0x00,0x00,0xa6,0x01,0x00,0x00, -0x0c,0x00,0x00,0x00,0x41,0x00,0x08,0x00,0xfc,0x00,0x00,0x00, -0xa9,0x01,0x00,0x00,0x8d,0x01,0x00,0x00,0x34,0x00,0x00,0x00, -0x7b,0x01,0x00,0x00,0x34,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0xe6,0x00,0x00,0x00,0xaa,0x01,0x00,0x00, -0xa9,0x01,0x00,0x00,0x41,0x00,0x05,0x00,0xff,0x00,0x00,0x00, -0xab,0x01,0x00,0x00,0x80,0x01,0x00,0x00,0xa7,0x01,0x00,0x00, -0x3e,0x00,0x03,0x00,0xab,0x01,0x00,0x00,0xaa,0x01,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xb0,0x01,0x00,0x00, -0x74,0x01,0x00,0x00,0xaf,0x01,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xb3,0x01,0x00,0x00,0xb0,0x01,0x00,0x00, -0x87,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xb4,0x01,0x00,0x00,0xb3,0x01,0x00,0x00,0x23,0x01,0x00,0x00, -0x41,0x00,0x08,0x00,0xfc,0x00,0x00,0x00,0xb6,0x01,0x00,0x00, -0x8d,0x01,0x00,0x00,0x34,0x00,0x00,0x00,0x7b,0x01,0x00,0x00, -0x34,0x00,0x00,0x00,0x23,0x01,0x00,0x00,0x3d,0x00,0x04,0x00, -0xe6,0x00,0x00,0x00,0xb7,0x01,0x00,0x00,0xb6,0x01,0x00,0x00, -0x41,0x00,0x05,0x00,0xff,0x00,0x00,0x00,0xb8,0x01,0x00,0x00, -0x80,0x01,0x00,0x00,0xb4,0x01,0x00,0x00,0x3e,0x00,0x03,0x00, -0xb8,0x01,0x00,0x00,0xb7,0x01,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xbd,0x01,0x00,0x00,0x74,0x01,0x00,0x00, -0xbc,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xc0,0x01,0x00,0x00,0xbd,0x01,0x00,0x00,0x87,0x01,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xc1,0x01,0x00,0x00, -0xc0,0x01,0x00,0x00,0x31,0x01,0x00,0x00,0x41,0x00,0x08,0x00, -0xfc,0x00,0x00,0x00,0xc3,0x01,0x00,0x00,0x8d,0x01,0x00,0x00, -0x34,0x00,0x00,0x00,0x7b,0x01,0x00,0x00,0xc6,0x00,0x00,0x00, -0x3e,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0xe6,0x00,0x00,0x00, -0xc4,0x01,0x00,0x00,0xc3,0x01,0x00,0x00,0x41,0x00,0x05,0x00, -0xff,0x00,0x00,0x00,0xc5,0x01,0x00,0x00,0x80,0x01,0x00,0x00, -0xc1,0x01,0x00,0x00,0x3e,0x00,0x03,0x00,0xc5,0x01,0x00,0x00, -0xc4,0x01,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xca,0x01,0x00,0x00,0x74,0x01,0x00,0x00,0xc9,0x01,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xcd,0x01,0x00,0x00, -0xca,0x01,0x00,0x00,0x87,0x01,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xce,0x01,0x00,0x00,0xcd,0x01,0x00,0x00, -0x3f,0x01,0x00,0x00,0x41,0x00,0x08,0x00,0xfc,0x00,0x00,0x00, -0xd0,0x01,0x00,0x00,0x8d,0x01,0x00,0x00,0x34,0x00,0x00,0x00, -0x7b,0x01,0x00,0x00,0xc6,0x00,0x00,0x00,0x39,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0xe6,0x00,0x00,0x00,0xd1,0x01,0x00,0x00, -0xd0,0x01,0x00,0x00,0x41,0x00,0x05,0x00,0xff,0x00,0x00,0x00, -0xd2,0x01,0x00,0x00,0x80,0x01,0x00,0x00,0xce,0x01,0x00,0x00, -0x3e,0x00,0x03,0x00,0xd2,0x01,0x00,0x00,0xd1,0x01,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xd7,0x01,0x00,0x00, -0x74,0x01,0x00,0x00,0xd6,0x01,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xda,0x01,0x00,0x00,0xd7,0x01,0x00,0x00, -0x87,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xdb,0x01,0x00,0x00,0xda,0x01,0x00,0x00,0x4d,0x01,0x00,0x00, -0x41,0x00,0x08,0x00,0xfc,0x00,0x00,0x00,0xdd,0x01,0x00,0x00, -0x8d,0x01,0x00,0x00,0x34,0x00,0x00,0x00,0x7b,0x01,0x00,0x00, -0xc6,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0xe6,0x00,0x00,0x00,0xde,0x01,0x00,0x00,0xdd,0x01,0x00,0x00, -0x41,0x00,0x05,0x00,0xff,0x00,0x00,0x00,0xdf,0x01,0x00,0x00, -0x80,0x01,0x00,0x00,0xdb,0x01,0x00,0x00,0x3e,0x00,0x03,0x00, -0xdf,0x01,0x00,0x00,0xde,0x01,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xe4,0x01,0x00,0x00,0x74,0x01,0x00,0x00, -0xe3,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xe7,0x01,0x00,0x00,0xe4,0x01,0x00,0x00,0x87,0x01,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xe8,0x01,0x00,0x00, -0xe7,0x01,0x00,0x00,0x5b,0x01,0x00,0x00,0x41,0x00,0x08,0x00, -0xfc,0x00,0x00,0x00,0xea,0x01,0x00,0x00,0x8d,0x01,0x00,0x00, -0x34,0x00,0x00,0x00,0x7b,0x01,0x00,0x00,0xc6,0x00,0x00,0x00, -0x23,0x01,0x00,0x00,0x3d,0x00,0x04,0x00,0xe6,0x00,0x00,0x00, -0xeb,0x01,0x00,0x00,0xea,0x01,0x00,0x00,0x41,0x00,0x05,0x00, -0xff,0x00,0x00,0x00,0xec,0x01,0x00,0x00,0x80,0x01,0x00,0x00, -0xe8,0x01,0x00,0x00,0x3e,0x00,0x03,0x00,0xec,0x01,0x00,0x00, -0xeb,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xee,0x01,0x00,0x00,0x3b,0x03,0x00,0x00,0x65,0x01,0x00,0x00, -0xf9,0x00,0x02,0x00,0x69,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, -0x6b,0x01,0x00,0x00,0xe0,0x00,0x04,0x00,0x0c,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0xef,0x01,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xf2,0x01,0x00,0x00,0x3e,0x03,0x00,0x00, -0xf0,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xf5,0x01,0x00,0x00,0x42,0x03,0x00,0x00,0xf3,0x01,0x00,0x00, -0xf9,0x00,0x02,0x00,0xf7,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, -0xf7,0x01,0x00,0x00,0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00, -0x44,0x03,0x00,0x00,0x3e,0x00,0x00,0x00,0x6b,0x01,0x00,0x00, -0xa1,0x02,0x00,0x00,0xfa,0x01,0x00,0x00,0xb0,0x00,0x05,0x00, -0xb8,0x00,0x00,0x00,0xfd,0x01,0x00,0x00,0x44,0x03,0x00,0x00, -0x6c,0x00,0x00,0x00,0xf6,0x00,0x04,0x00,0xf9,0x01,0x00,0x00, -0xfa,0x01,0x00,0x00,0x00,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, -0xfd,0x01,0x00,0x00,0xf8,0x01,0x00,0x00,0xf9,0x01,0x00,0x00, -0xf8,0x00,0x02,0x00,0xf8,0x01,0x00,0x00,0xf9,0x00,0x02,0x00, -0xff,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0xff,0x01,0x00,0x00, -0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0x48,0x03,0x00,0x00, -0x3e,0x00,0x00,0x00,0xf8,0x01,0x00,0x00,0x2b,0x02,0x00,0x00, -0x02,0x02,0x00,0x00,0xb0,0x00,0x05,0x00,0xb8,0x00,0x00,0x00, -0x05,0x02,0x00,0x00,0x48,0x03,0x00,0x00,0x60,0x00,0x00,0x00, -0xf6,0x00,0x04,0x00,0x01,0x02,0x00,0x00,0x02,0x02,0x00,0x00, -0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0x05,0x02,0x00,0x00, -0x00,0x02,0x00,0x00,0x01,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, -0x00,0x02,0x00,0x00,0xf9,0x00,0x02,0x00,0x07,0x02,0x00,0x00, -0xf8,0x00,0x02,0x00,0x07,0x02,0x00,0x00,0xf5,0x00,0x07,0x00, -0x06,0x00,0x00,0x00,0x5a,0x03,0x00,0x00,0x3e,0x00,0x00,0x00, -0x00,0x02,0x00,0x00,0x29,0x02,0x00,0x00,0x08,0x02,0x00,0x00, -0xb0,0x00,0x05,0x00,0xb8,0x00,0x00,0x00,0x0d,0x02,0x00,0x00, -0x5a,0x03,0x00,0x00,0x62,0x00,0x00,0x00,0xf6,0x00,0x04,0x00, -0x09,0x02,0x00,0x00,0x08,0x02,0x00,0x00,0x01,0x00,0x00,0x00, -0xfa,0x00,0x04,0x00,0x0d,0x02,0x00,0x00,0x08,0x02,0x00,0x00, -0x09,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x08,0x02,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x13,0x02,0x00,0x00, -0x48,0x03,0x00,0x00,0x62,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x15,0x02,0x00,0x00,0x13,0x02,0x00,0x00, -0x5a,0x03,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x17,0x02,0x00,0x00,0x55,0x00,0x00,0x00,0x53,0x00,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x19,0x02,0x00,0x00, -0x48,0x03,0x00,0x00,0x61,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x1a,0x02,0x00,0x00,0x17,0x02,0x00,0x00, -0x19,0x02,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x1c,0x02,0x00,0x00,0x64,0x00,0x00,0x00,0x62,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x1d,0x02,0x00,0x00, -0x1a,0x02,0x00,0x00,0x1c,0x02,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x1f,0x02,0x00,0x00,0x1d,0x02,0x00,0x00, -0x5a,0x03,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x21,0x02,0x00,0x00,0x1f,0x02,0x00,0x00,0x20,0x02,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x23,0x02,0x00,0x00, -0x21,0x02,0x00,0x00,0x44,0x03,0x00,0x00,0x41,0x00,0x05,0x00, -0xff,0x00,0x00,0x00,0x24,0x02,0x00,0x00,0xeb,0x00,0x00,0x00, -0x23,0x02,0x00,0x00,0x3d,0x00,0x04,0x00,0xe6,0x00,0x00,0x00, -0x25,0x02,0x00,0x00,0x24,0x02,0x00,0x00,0x41,0x00,0x05,0x00, -0x26,0x02,0x00,0x00,0x27,0x02,0x00,0x00,0x11,0x02,0x00,0x00, -0x15,0x02,0x00,0x00,0x3e,0x00,0x03,0x00,0x27,0x02,0x00,0x00, -0x25,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x29,0x02,0x00,0x00,0x5a,0x03,0x00,0x00,0xc6,0x00,0x00,0x00, -0xf9,0x00,0x02,0x00,0x07,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, -0x09,0x02,0x00,0x00,0xf9,0x00,0x02,0x00,0x02,0x02,0x00,0x00, -0xf8,0x00,0x02,0x00,0x02,0x02,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x2b,0x02,0x00,0x00,0x48,0x03,0x00,0x00, -0xc6,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0xff,0x01,0x00,0x00, -0xf8,0x00,0x02,0x00,0x01,0x02,0x00,0x00,0xf9,0x00,0x02,0x00, -0x2d,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x2d,0x02,0x00,0x00, -0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0x49,0x03,0x00,0x00, -0x3e,0x00,0x00,0x00,0x01,0x02,0x00,0x00,0x59,0x02,0x00,0x00, -0x30,0x02,0x00,0x00,0xb0,0x00,0x05,0x00,0xb8,0x00,0x00,0x00, -0x33,0x02,0x00,0x00,0x49,0x03,0x00,0x00,0xb5,0x00,0x00,0x00, -0xf6,0x00,0x04,0x00,0x2f,0x02,0x00,0x00,0x30,0x02,0x00,0x00, -0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0x33,0x02,0x00,0x00, -0x2e,0x02,0x00,0x00,0x2f,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, -0x2e,0x02,0x00,0x00,0xf9,0x00,0x02,0x00,0x35,0x02,0x00,0x00, -0xf8,0x00,0x02,0x00,0x35,0x02,0x00,0x00,0xf5,0x00,0x07,0x00, -0x06,0x00,0x00,0x00,0x57,0x03,0x00,0x00,0x3e,0x00,0x00,0x00, -0x2e,0x02,0x00,0x00,0x57,0x02,0x00,0x00,0x36,0x02,0x00,0x00, -0xb0,0x00,0x05,0x00,0xb8,0x00,0x00,0x00,0x3b,0x02,0x00,0x00, -0x57,0x03,0x00,0x00,0xb2,0x00,0x00,0x00,0xf6,0x00,0x04,0x00, -0x37,0x02,0x00,0x00,0x36,0x02,0x00,0x00,0x01,0x00,0x00,0x00, -0xfa,0x00,0x04,0x00,0x3b,0x02,0x00,0x00,0x36,0x02,0x00,0x00, -0x37,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x36,0x02,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x41,0x02,0x00,0x00, -0x49,0x03,0x00,0x00,0xb2,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x43,0x02,0x00,0x00,0x41,0x02,0x00,0x00, -0x57,0x03,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x45,0x02,0x00,0x00,0x59,0x00,0x00,0x00,0xaf,0x00,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x48,0x02,0x00,0x00, -0x49,0x03,0x00,0x00,0x47,0x02,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x49,0x02,0x00,0x00,0x45,0x02,0x00,0x00, -0x48,0x02,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x4b,0x02,0x00,0x00,0x68,0x00,0x00,0x00,0xb2,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x4c,0x02,0x00,0x00, -0x49,0x02,0x00,0x00,0x4b,0x02,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x4e,0x02,0x00,0x00,0x4c,0x02,0x00,0x00, -0x57,0x03,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x50,0x02,0x00,0x00,0x4e,0x02,0x00,0x00,0x4f,0x02,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x52,0x02,0x00,0x00, -0x50,0x02,0x00,0x00,0x44,0x03,0x00,0x00,0x41,0x00,0x05,0x00, -0xff,0x00,0x00,0x00,0x53,0x02,0x00,0x00,0x80,0x01,0x00,0x00, -0x52,0x02,0x00,0x00,0x3d,0x00,0x04,0x00,0xe6,0x00,0x00,0x00, -0x54,0x02,0x00,0x00,0x53,0x02,0x00,0x00,0x41,0x00,0x05,0x00, -0x26,0x02,0x00,0x00,0x55,0x02,0x00,0x00,0x3f,0x02,0x00,0x00, -0x43,0x02,0x00,0x00,0x3e,0x00,0x03,0x00,0x55,0x02,0x00,0x00, -0x54,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x57,0x02,0x00,0x00,0x57,0x03,0x00,0x00,0xc6,0x00,0x00,0x00, -0xf9,0x00,0x02,0x00,0x35,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, -0x37,0x02,0x00,0x00,0xf9,0x00,0x02,0x00,0x30,0x02,0x00,0x00, -0xf8,0x00,0x02,0x00,0x30,0x02,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x59,0x02,0x00,0x00,0x49,0x03,0x00,0x00, -0xc6,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0x2d,0x02,0x00,0x00, -0xf8,0x00,0x02,0x00,0x2f,0x02,0x00,0x00,0xf9,0x00,0x02,0x00, -0x5b,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x5b,0x02,0x00,0x00, -0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0x4a,0x03,0x00,0x00, -0x3e,0x00,0x00,0x00,0x2f,0x02,0x00,0x00,0x9f,0x02,0x00,0x00, -0x5e,0x02,0x00,0x00,0xb0,0x00,0x05,0x00,0xb8,0x00,0x00,0x00, -0x61,0x02,0x00,0x00,0x4a,0x03,0x00,0x00,0xb5,0x00,0x00,0x00, -0xf6,0x00,0x04,0x00,0x5d,0x02,0x00,0x00,0x5e,0x02,0x00,0x00, -0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0x61,0x02,0x00,0x00, -0x5c,0x02,0x00,0x00,0x5d,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, -0x5c,0x02,0x00,0x00,0xf9,0x00,0x02,0x00,0x63,0x02,0x00,0x00, -0xf8,0x00,0x02,0x00,0x63,0x02,0x00,0x00,0xf5,0x00,0x07,0x00, -0x06,0x00,0x00,0x00,0x4e,0x03,0x00,0x00,0x3e,0x00,0x00,0x00, -0x5c,0x02,0x00,0x00,0x9d,0x02,0x00,0x00,0x66,0x02,0x00,0x00, -0xb0,0x00,0x05,0x00,0xb8,0x00,0x00,0x00,0x69,0x02,0x00,0x00, -0x4e,0x03,0x00,0x00,0x60,0x00,0x00,0x00,0xf6,0x00,0x04,0x00, -0x65,0x02,0x00,0x00,0x66,0x02,0x00,0x00,0x01,0x00,0x00,0x00, -0xfa,0x00,0x04,0x00,0x69,0x02,0x00,0x00,0x64,0x02,0x00,0x00, -0x65,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x64,0x02,0x00,0x00, -0xf9,0x00,0x02,0x00,0x6b,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, -0x6b,0x02,0x00,0x00,0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00, -0x50,0x03,0x00,0x00,0x3e,0x00,0x00,0x00,0x64,0x02,0x00,0x00, -0x9b,0x02,0x00,0x00,0x6e,0x02,0x00,0x00,0xb0,0x00,0x05,0x00, -0xb8,0x00,0x00,0x00,0x71,0x02,0x00,0x00,0x50,0x03,0x00,0x00, -0xb2,0x00,0x00,0x00,0xf6,0x00,0x04,0x00,0x6d,0x02,0x00,0x00, -0x6e,0x02,0x00,0x00,0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, -0x71,0x02,0x00,0x00,0x6c,0x02,0x00,0x00,0x6d,0x02,0x00,0x00, -0xf8,0x00,0x02,0x00,0x6c,0x02,0x00,0x00,0xf9,0x00,0x02,0x00, -0x73,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x73,0x02,0x00,0x00, -0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0x52,0x03,0x00,0x00, -0x3e,0x00,0x00,0x00,0x6c,0x02,0x00,0x00,0x99,0x02,0x00,0x00, -0x74,0x02,0x00,0x00,0xb0,0x00,0x05,0x00,0xb8,0x00,0x00,0x00, -0x79,0x02,0x00,0x00,0x52,0x03,0x00,0x00,0x62,0x00,0x00,0x00, -0xf6,0x00,0x04,0x00,0x75,0x02,0x00,0x00,0x74,0x02,0x00,0x00, -0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0x79,0x02,0x00,0x00, -0x74,0x02,0x00,0x00,0x75,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, -0x74,0x02,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x7b,0x02,0x00,0x00,0x4a,0x03,0x00,0x00,0xb2,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x7d,0x02,0x00,0x00, -0x7b,0x02,0x00,0x00,0x50,0x03,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x7f,0x02,0x00,0x00,0x7d,0x02,0x00,0x00, -0x7e,0x02,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x81,0x02,0x00,0x00,0x4e,0x03,0x00,0x00,0x62,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x82,0x02,0x00,0x00, -0x7f,0x02,0x00,0x00,0x81,0x02,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x84,0x02,0x00,0x00,0x82,0x02,0x00,0x00, -0x52,0x03,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x88,0x02,0x00,0x00,0x81,0x02,0x00,0x00,0x52,0x03,0x00,0x00, -0x41,0x00,0x05,0x00,0x26,0x02,0x00,0x00,0x89,0x02,0x00,0x00, -0x11,0x02,0x00,0x00,0x88,0x02,0x00,0x00,0x3d,0x00,0x04,0x00, -0xe6,0x00,0x00,0x00,0x8a,0x02,0x00,0x00,0x89,0x02,0x00,0x00, -0x73,0x00,0x04,0x00,0xba,0x00,0x00,0x00,0x8b,0x02,0x00,0x00, -0x8a,0x02,0x00,0x00,0x41,0x00,0x05,0x00,0x26,0x02,0x00,0x00, -0x90,0x02,0x00,0x00,0x3f,0x02,0x00,0x00,0x7d,0x02,0x00,0x00, -0x3d,0x00,0x04,0x00,0xe6,0x00,0x00,0x00,0x91,0x02,0x00,0x00, -0x90,0x02,0x00,0x00,0x73,0x00,0x04,0x00,0xba,0x00,0x00,0x00, -0x92,0x02,0x00,0x00,0x91,0x02,0x00,0x00,0x41,0x00,0x05,0x00, -0xc3,0x00,0x00,0x00,0x94,0x02,0x00,0x00,0xc0,0x00,0x00,0x00, -0x84,0x02,0x00,0x00,0x3d,0x00,0x04,0x00,0xba,0x00,0x00,0x00, -0x95,0x02,0x00,0x00,0x94,0x02,0x00,0x00,0x0c,0x00,0x08,0x00, -0xba,0x00,0x00,0x00,0x96,0x02,0x00,0x00,0x01,0x00,0x00,0x00, -0x32,0x00,0x00,0x00,0x8b,0x02,0x00,0x00,0x92,0x02,0x00,0x00, -0x95,0x02,0x00,0x00,0x3e,0x00,0x03,0x00,0x94,0x02,0x00,0x00, -0x96,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x99,0x02,0x00,0x00,0x52,0x03,0x00,0x00,0xc6,0x00,0x00,0x00, -0xf9,0x00,0x02,0x00,0x73,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, -0x75,0x02,0x00,0x00,0xf9,0x00,0x02,0x00,0x6e,0x02,0x00,0x00, -0xf8,0x00,0x02,0x00,0x6e,0x02,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x9b,0x02,0x00,0x00,0x50,0x03,0x00,0x00, -0xc6,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0x6b,0x02,0x00,0x00, -0xf8,0x00,0x02,0x00,0x6d,0x02,0x00,0x00,0xf9,0x00,0x02,0x00, -0x66,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x66,0x02,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x9d,0x02,0x00,0x00, -0x4e,0x03,0x00,0x00,0xc6,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, -0x63,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x65,0x02,0x00,0x00, -0xf9,0x00,0x02,0x00,0x5e,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, -0x5e,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x9f,0x02,0x00,0x00,0x4a,0x03,0x00,0x00,0xc6,0x00,0x00,0x00, -0xf9,0x00,0x02,0x00,0x5b,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, -0x5d,0x02,0x00,0x00,0xf9,0x00,0x02,0x00,0xfa,0x01,0x00,0x00, -0xf8,0x00,0x02,0x00,0xfa,0x01,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xa1,0x02,0x00,0x00,0x44,0x03,0x00,0x00, -0xc6,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0xf7,0x01,0x00,0x00, -0xf8,0x00,0x02,0x00,0xf9,0x01,0x00,0x00,0xe0,0x00,0x04,0x00, -0x0c,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0xef,0x01,0x00,0x00, -0xf9,0x00,0x02,0x00,0xcd,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, -0xcd,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xa3,0x02,0x00,0x00,0x2a,0x03,0x00,0x00,0x6c,0x00,0x00,0x00, -0xf9,0x00,0x02,0x00,0xca,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, -0xcc,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xa8,0x02,0x00,0x00,0x55,0x00,0x00,0x00,0x53,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xa9,0x02,0x00,0x00, -0x8c,0x00,0x00,0x00,0xa8,0x02,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xae,0x02,0x00,0x00,0x59,0x00,0x00,0x00, -0xaf,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xaf,0x02,0x00,0x00,0x9e,0x00,0x00,0x00,0xae,0x02,0x00,0x00, -0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00,0xb4,0x02,0x00,0x00, -0x12,0x00,0x00,0x00,0xb3,0x02,0x00,0x00,0x3d,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0xb5,0x02,0x00,0x00,0xb4,0x02,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xb6,0x02,0x00,0x00, -0x0f,0x00,0x00,0x00,0xb5,0x02,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xba,0x02,0x00,0x00,0x47,0x00,0x00,0x00, -0xb5,0x02,0x00,0x00,0x41,0x00,0x05,0x00,0x0d,0x00,0x00,0x00, -0xbc,0x02,0x00,0x00,0xbb,0x02,0x00,0x00,0x0c,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0xbd,0x02,0x00,0x00, -0xbc,0x02,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xbe,0x02,0x00,0x00,0xba,0x02,0x00,0x00,0xbd,0x02,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xbf,0x02,0x00,0x00, -0xb6,0x02,0x00,0x00,0xbe,0x02,0x00,0x00,0xf9,0x00,0x02,0x00, -0xc1,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0xc1,0x02,0x00,0x00, -0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0x2b,0x03,0x00,0x00, -0x3e,0x00,0x00,0x00,0xcc,0x00,0x00,0x00,0x28,0x03,0x00,0x00, -0xc4,0x02,0x00,0x00,0xb0,0x00,0x05,0x00,0xb8,0x00,0x00,0x00, -0xc7,0x02,0x00,0x00,0x2b,0x03,0x00,0x00,0xb5,0x00,0x00,0x00, -0xf6,0x00,0x04,0x00,0xc3,0x02,0x00,0x00,0xc4,0x02,0x00,0x00, -0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0xc7,0x02,0x00,0x00, -0xc2,0x02,0x00,0x00,0xc3,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, -0xc2,0x02,0x00,0x00,0xf9,0x00,0x02,0x00,0xc9,0x02,0x00,0x00, -0xf8,0x00,0x02,0x00,0xc9,0x02,0x00,0x00,0xf5,0x00,0x07,0x00, -0x06,0x00,0x00,0x00,0x2c,0x03,0x00,0x00,0x3e,0x00,0x00,0x00, -0xc2,0x02,0x00,0x00,0x26,0x03,0x00,0x00,0xcc,0x02,0x00,0x00, -0xb0,0x00,0x05,0x00,0xb8,0x00,0x00,0x00,0xcf,0x02,0x00,0x00, -0x2c,0x03,0x00,0x00,0x60,0x00,0x00,0x00,0xf6,0x00,0x04,0x00, -0xcb,0x02,0x00,0x00,0xcc,0x02,0x00,0x00,0x01,0x00,0x00,0x00, -0xfa,0x00,0x04,0x00,0xcf,0x02,0x00,0x00,0xca,0x02,0x00,0x00, -0xcb,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0xca,0x02,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xd3,0x02,0x00,0x00, -0x2c,0x03,0x00,0x00,0x61,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xd4,0x02,0x00,0x00,0xa9,0x02,0x00,0x00, -0xd3,0x02,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xd6,0x02,0x00,0x00,0x64,0x00,0x00,0x00,0x62,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xd7,0x02,0x00,0x00, -0xd4,0x02,0x00,0x00,0xd6,0x02,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xdb,0x02,0x00,0x00,0x2b,0x03,0x00,0x00, -0x47,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xdc,0x02,0x00,0x00,0xaf,0x02,0x00,0x00,0xdb,0x02,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xde,0x02,0x00,0x00, -0x68,0x00,0x00,0x00,0xb2,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xdf,0x02,0x00,0x00,0xdc,0x02,0x00,0x00, -0xde,0x02,0x00,0x00,0xf9,0x00,0x02,0x00,0xe1,0x02,0x00,0x00, -0xf8,0x00,0x02,0x00,0xe1,0x02,0x00,0x00,0xf5,0x00,0x07,0x00, -0x06,0x00,0x00,0x00,0x2e,0x03,0x00,0x00,0x3e,0x00,0x00,0x00, -0xca,0x02,0x00,0x00,0x24,0x03,0x00,0x00,0xe4,0x02,0x00,0x00, -0xb0,0x00,0x05,0x00,0xb8,0x00,0x00,0x00,0xe7,0x02,0x00,0x00, -0x2e,0x03,0x00,0x00,0xb2,0x00,0x00,0x00,0xf6,0x00,0x04,0x00, -0xe3,0x02,0x00,0x00,0xe4,0x02,0x00,0x00,0x01,0x00,0x00,0x00, -0xfa,0x00,0x04,0x00,0xe7,0x02,0x00,0x00,0xe2,0x02,0x00,0x00, -0xe3,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0xe2,0x02,0x00,0x00, -0xf9,0x00,0x02,0x00,0xe9,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, -0xe9,0x02,0x00,0x00,0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00, -0x30,0x03,0x00,0x00,0x3e,0x00,0x00,0x00,0xe2,0x02,0x00,0x00, -0x22,0x03,0x00,0x00,0xec,0x02,0x00,0x00,0xb0,0x00,0x05,0x00, -0xb8,0x00,0x00,0x00,0xef,0x02,0x00,0x00,0x30,0x03,0x00,0x00, -0x62,0x00,0x00,0x00,0xf6,0x00,0x04,0x00,0xeb,0x02,0x00,0x00, -0xec,0x02,0x00,0x00,0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, -0xef,0x02,0x00,0x00,0xea,0x02,0x00,0x00,0xeb,0x02,0x00,0x00, -0xf8,0x00,0x02,0x00,0xea,0x02,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xf2,0x02,0x00,0x00,0xd7,0x02,0x00,0x00, -0x30,0x03,0x00,0x00,0xb0,0x00,0x05,0x00,0xb8,0x00,0x00,0x00, -0xf5,0x02,0x00,0x00,0xf2,0x02,0x00,0x00,0x36,0x00,0x00,0x00, -0xf7,0x00,0x03,0x00,0xf7,0x02,0x00,0x00,0x00,0x00,0x00,0x00, -0xfa,0x00,0x04,0x00,0xf5,0x02,0x00,0x00,0xf6,0x02,0x00,0x00, -0xf7,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0xf6,0x02,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xfa,0x02,0x00,0x00, -0xdf,0x02,0x00,0x00,0x2e,0x03,0x00,0x00,0x41,0x00,0x05,0x00, -0x15,0x00,0x00,0x00,0xfb,0x02,0x00,0x00,0x12,0x00,0x00,0x00, -0xc6,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0xfc,0x02,0x00,0x00,0xfb,0x02,0x00,0x00,0xb0,0x00,0x05,0x00, -0xb8,0x00,0x00,0x00,0xfd,0x02,0x00,0x00,0xfa,0x02,0x00,0x00, -0xfc,0x02,0x00,0x00,0xf9,0x00,0x02,0x00,0xf7,0x02,0x00,0x00, -0xf8,0x00,0x02,0x00,0xf7,0x02,0x00,0x00,0xf5,0x00,0x07,0x00, -0xb8,0x00,0x00,0x00,0xfe,0x02,0x00,0x00,0xf5,0x02,0x00,0x00, -0xea,0x02,0x00,0x00,0xfd,0x02,0x00,0x00,0xf6,0x02,0x00,0x00, -0xf7,0x00,0x03,0x00,0x00,0x03,0x00,0x00,0x00,0x00,0x00,0x00, -0xfa,0x00,0x04,0x00,0xfe,0x02,0x00,0x00,0xff,0x02,0x00,0x00, -0x00,0x03,0x00,0x00,0xf8,0x00,0x02,0x00,0xff,0x02,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x08,0x03,0x00,0x00, -0xdf,0x02,0x00,0x00,0x2e,0x03,0x00,0x00,0x41,0x00,0x05,0x00, -0x15,0x00,0x00,0x00,0x0a,0x03,0x00,0x00,0x12,0x00,0x00,0x00, -0x09,0x03,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x0b,0x03,0x00,0x00,0x0a,0x03,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x0c,0x03,0x00,0x00,0x08,0x03,0x00,0x00, -0x0b,0x03,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x0d,0x03,0x00,0x00,0xbf,0x02,0x00,0x00,0x0c,0x03,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x0f,0x03,0x00,0x00, -0x0d,0x03,0x00,0x00,0xd7,0x02,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x11,0x03,0x00,0x00,0x0f,0x03,0x00,0x00, -0x30,0x03,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x13,0x03,0x00,0x00,0x2b,0x03,0x00,0x00,0xb2,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x15,0x03,0x00,0x00, -0x13,0x03,0x00,0x00,0x2e,0x03,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x17,0x03,0x00,0x00,0x15,0x03,0x00,0x00, -0x16,0x03,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x19,0x03,0x00,0x00,0x2c,0x03,0x00,0x00,0x62,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x1a,0x03,0x00,0x00, -0x17,0x03,0x00,0x00,0x19,0x03,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x1c,0x03,0x00,0x00,0x1a,0x03,0x00,0x00, -0x30,0x03,0x00,0x00,0x41,0x00,0x05,0x00,0xc3,0x00,0x00,0x00, -0x1d,0x03,0x00,0x00,0xc0,0x00,0x00,0x00,0x1c,0x03,0x00,0x00, -0x3d,0x00,0x04,0x00,0xba,0x00,0x00,0x00,0x1e,0x03,0x00,0x00, -0x1d,0x03,0x00,0x00,0x41,0x00,0x06,0x00,0x1f,0x03,0x00,0x00, -0x20,0x03,0x00,0x00,0x04,0x03,0x00,0x00,0x34,0x00,0x00,0x00, -0x11,0x03,0x00,0x00,0x3e,0x00,0x03,0x00,0x20,0x03,0x00,0x00, -0x1e,0x03,0x00,0x00,0xf9,0x00,0x02,0x00,0x00,0x03,0x00,0x00, -0xf8,0x00,0x02,0x00,0x00,0x03,0x00,0x00,0xf9,0x00,0x02,0x00, -0xec,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0xec,0x02,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x22,0x03,0x00,0x00, -0x30,0x03,0x00,0x00,0xc6,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, -0xe9,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0xeb,0x02,0x00,0x00, -0xf9,0x00,0x02,0x00,0xe4,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, -0xe4,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x24,0x03,0x00,0x00,0x2e,0x03,0x00,0x00,0xc6,0x00,0x00,0x00, -0xf9,0x00,0x02,0x00,0xe1,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, -0xe3,0x02,0x00,0x00,0xf9,0x00,0x02,0x00,0xcc,0x02,0x00,0x00, -0xf8,0x00,0x02,0x00,0xcc,0x02,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x26,0x03,0x00,0x00,0x2c,0x03,0x00,0x00, -0xc6,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0xc9,0x02,0x00,0x00, -0xf8,0x00,0x02,0x00,0xcb,0x02,0x00,0x00,0xf9,0x00,0x02,0x00, -0xc4,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0xc4,0x02,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x28,0x03,0x00,0x00, -0x2b,0x03,0x00,0x00,0xc6,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, -0xc1,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0xc3,0x02,0x00,0x00, -0xfd,0x00,0x01,0x00,0x38,0x00,0x01,0x00, -}; -const uint64_t matmul_f16_aligned_l_len = 11936; - -unsigned char matmul_f16_aligned_l_fp32_data[] = { -0x03,0x02,0x23,0x07,0x00,0x05,0x01,0x00,0x0b,0x00,0x0d,0x00, -0xf3,0x02,0x00,0x00,0x00,0x00,0x00,0x00,0x11,0x00,0x02,0x00, -0x01,0x00,0x00,0x00,0x11,0x00,0x02,0x00,0x51,0x11,0x00,0x00, -0x0b,0x00,0x06,0x00,0x01,0x00,0x00,0x00,0x47,0x4c,0x53,0x4c, -0x2e,0x73,0x74,0x64,0x2e,0x34,0x35,0x30,0x00,0x00,0x00,0x00, -0x0e,0x00,0x03,0x00,0x00,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x0f,0x00,0x0f,0x00,0x05,0x00,0x00,0x00,0x04,0x00,0x00,0x00, -0x6d,0x61,0x69,0x6e,0x00,0x00,0x00,0x00,0x0b,0x00,0x00,0x00, -0x12,0x00,0x00,0x00,0x3d,0x00,0x00,0x00,0x4c,0x00,0x00,0x00, -0xea,0x00,0x00,0x00,0xf9,0x00,0x00,0x00,0x4b,0x01,0x00,0x00, -0x58,0x01,0x00,0x00,0x53,0x02,0x00,0x00,0x9c,0x02,0x00,0x00, -0x10,0x00,0x06,0x00,0x04,0x00,0x00,0x00,0x11,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x0b,0x00,0x00,0x00,0x0b,0x00,0x00,0x00, -0x1c,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00, +0x06,0x01,0x00,0x00,0x21,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0x47,0x00,0x04,0x00,0x20,0x01,0x00,0x00,0x01,0x00,0x00,0x00, +0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x21,0x01,0x00,0x00, +0x0b,0x00,0x00,0x00,0x19,0x00,0x00,0x00,0x47,0x00,0x04,0x00, +0x4e,0x01,0x00,0x00,0x06,0x00,0x00,0x00,0x02,0x00,0x00,0x00, +0x48,0x00,0x04,0x00,0x4f,0x01,0x00,0x00,0x00,0x00,0x00,0x00, +0x18,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x4f,0x01,0x00,0x00, 0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0x10,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x08,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00, -0x03,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00,0x04,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x10,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0x10,0x00,0x00,0x00,0x05,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x14,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x18,0x00,0x00,0x00, -0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00,0x07,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x1c,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0x10,0x00,0x00,0x00,0x08,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x20,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00, -0x09,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x24,0x00,0x00,0x00, -0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00,0x0a,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x28,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0x10,0x00,0x00,0x00,0x0b,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x2c,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x30,0x00,0x00,0x00, -0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00,0x0d,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x34,0x00,0x00,0x00,0x47,0x00,0x03,0x00, -0x10,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x37,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x3d,0x00,0x00,0x00,0x0b,0x00,0x00,0x00, -0x1a,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x4c,0x00,0x00,0x00, -0x0b,0x00,0x00,0x00,0x1b,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x4f,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x09,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x53,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x04,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x60,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x62,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x07,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x6c,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x03,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x9d,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0xaf,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x05,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0xb2,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x08,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0xf6,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x08,0x00,0x00,0x00,0x48,0x00,0x04,0x00, -0xf7,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x18,0x00,0x00,0x00, -0x48,0x00,0x05,0x00,0xf7,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x03,0x00, -0xf7,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0xf9,0x00,0x00,0x00,0x22,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0xf9,0x00,0x00,0x00,0x21,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x2c,0x01,0x00,0x00, -0x01,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x2d,0x01,0x00,0x00,0x0b,0x00,0x00,0x00,0x19,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x55,0x01,0x00,0x00,0x06,0x00,0x00,0x00, -0x08,0x00,0x00,0x00,0x48,0x00,0x04,0x00,0x56,0x01,0x00,0x00, -0x00,0x00,0x00,0x00,0x18,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0x56,0x01,0x00,0x00,0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x47,0x00,0x03,0x00,0x56,0x01,0x00,0x00, -0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x58,0x01,0x00,0x00, -0x22,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x58,0x01,0x00,0x00,0x21,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x53,0x02,0x00,0x00,0x0b,0x00,0x00,0x00, -0x18,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x99,0x02,0x00,0x00, -0x06,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x48,0x00,0x04,0x00, -0x9a,0x02,0x00,0x00,0x00,0x00,0x00,0x00,0x19,0x00,0x00,0x00, -0x48,0x00,0x05,0x00,0x9a,0x02,0x00,0x00,0x00,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x03,0x00, -0x9a,0x02,0x00,0x00,0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x9c,0x02,0x00,0x00,0x22,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x9c,0x02,0x00,0x00,0x21,0x00,0x00,0x00, -0x02,0x00,0x00,0x00,0x13,0x00,0x02,0x00,0x02,0x00,0x00,0x00, -0x21,0x00,0x03,0x00,0x03,0x00,0x00,0x00,0x02,0x00,0x00,0x00, -0x15,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x20,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x17,0x00,0x04,0x00,0x09,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x03,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0x0a,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x09,0x00,0x00,0x00, -0x3b,0x00,0x04,0x00,0x0a,0x00,0x00,0x00,0x0b,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0x0d,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x1e,0x00,0x10,0x00,0x10,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x11,0x00,0x00,0x00, -0x09,0x00,0x00,0x00,0x10,0x00,0x00,0x00,0x3b,0x00,0x04,0x00, -0x11,0x00,0x00,0x00,0x12,0x00,0x00,0x00,0x09,0x00,0x00,0x00, -0x15,0x00,0x04,0x00,0x13,0x00,0x00,0x00,0x20,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00, -0x14,0x00,0x00,0x00,0x08,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0x15,0x00,0x00,0x00,0x09,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00,0x21,0x00,0x00,0x00, -0x0a,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00, -0x27,0x00,0x00,0x00,0x09,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x13,0x00,0x00,0x00,0x2d,0x00,0x00,0x00,0x07,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00,0x34,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x32,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x37,0x00,0x00,0x00,0x40,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x3b,0x00,0x04,0x00,0x0a,0x00,0x00,0x00,0x3d,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x3e,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x3b,0x00,0x04,0x00, -0x0a,0x00,0x00,0x00,0x4c,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x32,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x4f,0x00,0x00,0x00, -0x20,0x00,0x00,0x00,0x32,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x53,0x00,0x00,0x00,0x20,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0x54,0x00,0x00,0x00,0x86,0x00,0x00,0x00, -0x37,0x00,0x00,0x00,0x53,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0x58,0x00,0x00,0x00,0x86,0x00,0x00,0x00, -0x37,0x00,0x00,0x00,0x53,0x00,0x00,0x00,0x32,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x60,0x00,0x00,0x00,0x02,0x00,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x61,0x00,0x00,0x00, -0x86,0x00,0x00,0x00,0x53,0x00,0x00,0x00,0x60,0x00,0x00,0x00, -0x32,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x62,0x00,0x00,0x00, -0x04,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, -0x63,0x00,0x00,0x00,0x86,0x00,0x00,0x00,0x61,0x00,0x00,0x00, -0x62,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, -0x67,0x00,0x00,0x00,0x86,0x00,0x00,0x00,0x61,0x00,0x00,0x00, -0x62,0x00,0x00,0x00,0x32,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x6c,0x00,0x00,0x00,0x10,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x6d,0x00,0x00,0x00,0x04,0x00,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x6e,0x00,0x00,0x00, -0x86,0x00,0x00,0x00,0x6c,0x00,0x00,0x00,0x6d,0x00,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x73,0x00,0x00,0x00, -0x86,0x00,0x00,0x00,0x6c,0x00,0x00,0x00,0x6d,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00,0x77,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00, -0x7c,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x13,0x00,0x00,0x00,0x87,0x00,0x00,0x00,0x0b,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00,0x8d,0x00,0x00,0x00, -0x03,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00, -0x98,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x32,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x9d,0x00,0x00,0x00,0x40,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00,0x9f,0x00,0x00,0x00, -0x04,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, -0xae,0x00,0x00,0x00,0x84,0x00,0x00,0x00,0x60,0x00,0x00,0x00, -0x62,0x00,0x00,0x00,0x32,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0xaf,0x00,0x00,0x00,0x20,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0xb0,0x00,0x00,0x00,0x84,0x00,0x00,0x00, -0x53,0x00,0x00,0x00,0xaf,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0xb1,0x00,0x00,0x00,0x84,0x00,0x00,0x00, -0x4f,0x00,0x00,0x00,0x62,0x00,0x00,0x00,0x32,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0xb2,0x00,0x00,0x00,0x02,0x00,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0xb3,0x00,0x00,0x00, -0x84,0x00,0x00,0x00,0xb1,0x00,0x00,0x00,0xb2,0x00,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0xb4,0x00,0x00,0x00, -0x84,0x00,0x00,0x00,0xb3,0x00,0x00,0x00,0x60,0x00,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0xb5,0x00,0x00,0x00, -0x86,0x00,0x00,0x00,0xb0,0x00,0x00,0x00,0xb4,0x00,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0xb6,0x00,0x00,0x00, -0x84,0x00,0x00,0x00,0xae,0x00,0x00,0x00,0xb5,0x00,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0xb7,0x00,0x00,0x00, -0x84,0x00,0x00,0x00,0xb6,0x00,0x00,0x00,0xb2,0x00,0x00,0x00, -0x14,0x00,0x02,0x00,0xb8,0x00,0x00,0x00,0x16,0x00,0x03,0x00, -0xba,0x00,0x00,0x00,0x20,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0xbb,0x00,0x00,0x00,0x84,0x00,0x00,0x00, -0x60,0x00,0x00,0x00,0x62,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0xbc,0x00,0x00,0x00,0x84,0x00,0x00,0x00, -0xbb,0x00,0x00,0x00,0xb5,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0xbd,0x00,0x00,0x00,0x84,0x00,0x00,0x00, -0xbc,0x00,0x00,0x00,0xb2,0x00,0x00,0x00,0x1c,0x00,0x04,0x00, -0xbe,0x00,0x00,0x00,0xba,0x00,0x00,0x00,0xbd,0x00,0x00,0x00, -0x20,0x00,0x04,0x00,0xbf,0x00,0x00,0x00,0x07,0x00,0x00,0x00, -0xbe,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0xba,0x00,0x00,0x00, -0xc2,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0xc3,0x00,0x00,0x00,0x07,0x00,0x00,0x00,0xba,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00,0xc6,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, -0xe6,0x00,0x00,0x00,0x80,0x00,0x00,0x00,0x6c,0x00,0x00,0x00, -0x39,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, -0xe7,0x00,0x00,0x00,0x84,0x00,0x00,0x00,0x37,0x00,0x00,0x00, -0xe6,0x00,0x00,0x00,0x1c,0x00,0x04,0x00,0xe8,0x00,0x00,0x00, -0xba,0x00,0x00,0x00,0xe7,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0xe9,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0xe8,0x00,0x00,0x00, -0x3b,0x00,0x04,0x00,0xe9,0x00,0x00,0x00,0xea,0x00,0x00,0x00, -0x04,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, -0xee,0x00,0x00,0x00,0x80,0x00,0x00,0x00,0x6c,0x00,0x00,0x00, -0x39,0x00,0x00,0x00,0x16,0x00,0x03,0x00,0xf4,0x00,0x00,0x00, -0x10,0x00,0x00,0x00,0x17,0x00,0x04,0x00,0xf5,0x00,0x00,0x00, -0xf4,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x1d,0x00,0x03,0x00, -0xf6,0x00,0x00,0x00,0xf5,0x00,0x00,0x00,0x1e,0x00,0x03,0x00, -0xf7,0x00,0x00,0x00,0xf6,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0xf8,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0xf7,0x00,0x00,0x00, -0x3b,0x00,0x04,0x00,0xf8,0x00,0x00,0x00,0xf9,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0xfb,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0xf4,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0xff,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0xba,0x00,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x04,0x01,0x00,0x00, -0x80,0x00,0x00,0x00,0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x12,0x01,0x00,0x00, -0x80,0x00,0x00,0x00,0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x20,0x01,0x00,0x00, -0x80,0x00,0x00,0x00,0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x25,0x01,0x00,0x00, -0x03,0x00,0x00,0x00,0x32,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x2c,0x01,0x00,0x00,0x01,0x00,0x00,0x00,0x33,0x00,0x06,0x00, -0x09,0x00,0x00,0x00,0x2d,0x01,0x00,0x00,0x2c,0x01,0x00,0x00, -0x39,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0x2e,0x01,0x00,0x00,0x51,0x00,0x00,0x00, -0x2d,0x01,0x00,0x00,0x00,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0x2f,0x01,0x00,0x00,0x84,0x00,0x00,0x00, -0x2e,0x01,0x00,0x00,0x6d,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0x30,0x01,0x00,0x00,0x86,0x00,0x00,0x00, -0x2f,0x01,0x00,0x00,0x6c,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0x47,0x01,0x00,0x00,0x80,0x00,0x00,0x00, -0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0x48,0x01,0x00,0x00,0x84,0x00,0x00,0x00, -0x9d,0x00,0x00,0x00,0x47,0x01,0x00,0x00,0x1c,0x00,0x04,0x00, -0x49,0x01,0x00,0x00,0xba,0x00,0x00,0x00,0x48,0x01,0x00,0x00, -0x20,0x00,0x04,0x00,0x4a,0x01,0x00,0x00,0x04,0x00,0x00,0x00, -0x49,0x01,0x00,0x00,0x3b,0x00,0x04,0x00,0x4a,0x01,0x00,0x00, -0x4b,0x01,0x00,0x00,0x04,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0x4f,0x01,0x00,0x00,0x80,0x00,0x00,0x00, -0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x1d,0x00,0x03,0x00, -0x55,0x01,0x00,0x00,0xf5,0x00,0x00,0x00,0x1e,0x00,0x03,0x00, -0x56,0x01,0x00,0x00,0x55,0x01,0x00,0x00,0x20,0x00,0x04,0x00, -0x57,0x01,0x00,0x00,0x0c,0x00,0x00,0x00,0x56,0x01,0x00,0x00, -0x3b,0x00,0x04,0x00,0x57,0x01,0x00,0x00,0x58,0x01,0x00,0x00, -0x0c,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, -0x61,0x01,0x00,0x00,0x80,0x00,0x00,0x00,0x6c,0x00,0x00,0x00, -0x39,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, -0x6f,0x01,0x00,0x00,0x80,0x00,0x00,0x00,0x6c,0x00,0x00,0x00, -0x39,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, -0x7d,0x01,0x00,0x00,0x80,0x00,0x00,0x00,0x6c,0x00,0x00,0x00, -0x39,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x8a,0x01,0x00,0x00,0x08,0x01,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0x8b,0x01,0x00,0x00,0x86,0x00,0x00,0x00, -0x6c,0x00,0x00,0x00,0x6d,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0x8e,0x01,0x00,0x00,0x86,0x00,0x00,0x00, -0x6c,0x00,0x00,0x00,0x6d,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0xa9,0x01,0x00,0x00,0x84,0x00,0x00,0x00, -0x60,0x00,0x00,0x00,0x62,0x00,0x00,0x00,0x1c,0x00,0x04,0x00, -0xaa,0x01,0x00,0x00,0xba,0x00,0x00,0x00,0xa9,0x01,0x00,0x00, -0x20,0x00,0x04,0x00,0xab,0x01,0x00,0x00,0x07,0x00,0x00,0x00, -0xaa,0x01,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, -0xbb,0x01,0x00,0x00,0x80,0x00,0x00,0x00,0x6c,0x00,0x00,0x00, -0x39,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, -0xd6,0x01,0x00,0x00,0x84,0x00,0x00,0x00,0xb5,0x00,0x00,0x00, -0xb2,0x00,0x00,0x00,0x1c,0x00,0x04,0x00,0xd7,0x01,0x00,0x00, -0xba,0x00,0x00,0x00,0xd6,0x01,0x00,0x00,0x20,0x00,0x04,0x00, -0xd8,0x01,0x00,0x00,0x07,0x00,0x00,0x00,0xd7,0x01,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0xe1,0x01,0x00,0x00, -0x86,0x00,0x00,0x00,0xaf,0x00,0x00,0x00,0xb5,0x00,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0xe9,0x01,0x00,0x00, -0x80,0x00,0x00,0x00,0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x18,0x02,0x00,0x00, -0x84,0x00,0x00,0x00,0x60,0x00,0x00,0x00,0x62,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00,0x4b,0x02,0x00,0x00, -0x0d,0x00,0x00,0x00,0x3b,0x00,0x04,0x00,0x0a,0x00,0x00,0x00, -0x53,0x02,0x00,0x00,0x01,0x00,0x00,0x00,0x1d,0x00,0x03,0x00, -0x99,0x02,0x00,0x00,0xba,0x00,0x00,0x00,0x1e,0x00,0x03,0x00, -0x9a,0x02,0x00,0x00,0x99,0x02,0x00,0x00,0x20,0x00,0x04,0x00, -0x9b,0x02,0x00,0x00,0x0c,0x00,0x00,0x00,0x9a,0x02,0x00,0x00, -0x3b,0x00,0x04,0x00,0x9b,0x02,0x00,0x00,0x9c,0x02,0x00,0x00, -0x0c,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00, -0xa1,0x02,0x00,0x00,0x05,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0xae,0x02,0x00,0x00,0x84,0x00,0x00,0x00, -0x60,0x00,0x00,0x00,0x62,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0xb7,0x02,0x00,0x00,0x0c,0x00,0x00,0x00,0xba,0x00,0x00,0x00, -0x36,0x00,0x05,0x00,0x02,0x00,0x00,0x00,0x04,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x03,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, -0x05,0x00,0x00,0x00,0x3b,0x00,0x04,0x00,0xbf,0x00,0x00,0x00, -0xc0,0x00,0x00,0x00,0x07,0x00,0x00,0x00,0x3b,0x00,0x04,0x00, -0xab,0x01,0x00,0x00,0xac,0x01,0x00,0x00,0x07,0x00,0x00,0x00, -0x3b,0x00,0x04,0x00,0xd8,0x01,0x00,0x00,0xd9,0x01,0x00,0x00, -0x07,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x0d,0x00,0x00,0x00, -0x0e,0x00,0x00,0x00,0x0b,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x0f,0x00,0x00,0x00, -0x0e,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00, -0x16,0x00,0x00,0x00,0x12,0x00,0x00,0x00,0x14,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x17,0x00,0x00,0x00, -0x16,0x00,0x00,0x00,0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x18,0x00,0x00,0x00,0x0f,0x00,0x00,0x00,0x17,0x00,0x00,0x00, -0x89,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x1e,0x00,0x00,0x00, -0x0f,0x00,0x00,0x00,0x17,0x00,0x00,0x00,0x41,0x00,0x05,0x00, -0x15,0x00,0x00,0x00,0x22,0x00,0x00,0x00,0x12,0x00,0x00,0x00, -0x21,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x22,0x00,0x00,0x00,0x86,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x24,0x00,0x00,0x00,0x18,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00, -0x28,0x00,0x00,0x00,0x12,0x00,0x00,0x00,0x27,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x29,0x00,0x00,0x00, -0x28,0x00,0x00,0x00,0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x2a,0x00,0x00,0x00,0x1e,0x00,0x00,0x00,0x29,0x00,0x00,0x00, -0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0x12,0x00,0x00,0x00,0x2d,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x2f,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x30,0x00,0x00,0x00, -0x24,0x00,0x00,0x00,0x2f,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x32,0x00,0x00,0x00,0x30,0x00,0x00,0x00, -0x2a,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00, -0x35,0x00,0x00,0x00,0x12,0x00,0x00,0x00,0x34,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x36,0x00,0x00,0x00, -0x35,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x38,0x00,0x00,0x00,0x36,0x00,0x00,0x00,0x37,0x00,0x00,0x00, -0x82,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x3a,0x00,0x00,0x00, -0x38,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x86,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x3b,0x00,0x00,0x00,0x3a,0x00,0x00,0x00, -0x37,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x0d,0x00,0x00,0x00, -0x3f,0x00,0x00,0x00,0x3d,0x00,0x00,0x00,0x3e,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x40,0x00,0x00,0x00, -0x3f,0x00,0x00,0x00,0x89,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x42,0x00,0x00,0x00,0x40,0x00,0x00,0x00,0x3b,0x00,0x00,0x00, -0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x47,0x00,0x00,0x00, -0x40,0x00,0x00,0x00,0x3b,0x00,0x00,0x00,0x41,0x00,0x05,0x00, -0x0d,0x00,0x00,0x00,0x49,0x00,0x00,0x00,0x3d,0x00,0x00,0x00, -0x39,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x4a,0x00,0x00,0x00,0x49,0x00,0x00,0x00,0x41,0x00,0x05,0x00, -0x0d,0x00,0x00,0x00,0x4d,0x00,0x00,0x00,0x4c,0x00,0x00,0x00, -0x3e,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x4e,0x00,0x00,0x00,0x4d,0x00,0x00,0x00,0x86,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x50,0x00,0x00,0x00,0x4e,0x00,0x00,0x00, -0x4f,0x00,0x00,0x00,0x89,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x55,0x00,0x00,0x00,0x50,0x00,0x00,0x00,0x54,0x00,0x00,0x00, -0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x59,0x00,0x00,0x00, -0x50,0x00,0x00,0x00,0x58,0x00,0x00,0x00,0x89,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x5d,0x00,0x00,0x00,0x4e,0x00,0x00,0x00, -0x4f,0x00,0x00,0x00,0x89,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x64,0x00,0x00,0x00,0x5d,0x00,0x00,0x00,0x63,0x00,0x00,0x00, -0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x68,0x00,0x00,0x00, -0x5d,0x00,0x00,0x00,0x67,0x00,0x00,0x00,0x89,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x6f,0x00,0x00,0x00,0x4e,0x00,0x00,0x00, -0x6e,0x00,0x00,0x00,0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x74,0x00,0x00,0x00,0x4e,0x00,0x00,0x00,0x73,0x00,0x00,0x00, -0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00,0x78,0x00,0x00,0x00, -0x12,0x00,0x00,0x00,0x77,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x79,0x00,0x00,0x00,0x78,0x00,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x7a,0x00,0x00,0x00, -0x47,0x00,0x00,0x00,0x79,0x00,0x00,0x00,0x41,0x00,0x05,0x00, -0x15,0x00,0x00,0x00,0x7d,0x00,0x00,0x00,0x12,0x00,0x00,0x00, -0x7c,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x7e,0x00,0x00,0x00,0x7d,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x80,0x00,0x00,0x00,0x47,0x00,0x00,0x00, -0x39,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x83,0x00,0x00,0x00,0x80,0x00,0x00,0x00,0x79,0x00,0x00,0x00, -0x0c,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0x84,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x26,0x00,0x00,0x00,0x7e,0x00,0x00,0x00, -0x83,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00, -0x88,0x00,0x00,0x00,0x12,0x00,0x00,0x00,0x87,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x89,0x00,0x00,0x00, -0x88,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x8a,0x00,0x00,0x00,0x32,0x00,0x00,0x00,0x89,0x00,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x8c,0x00,0x00,0x00, -0x42,0x00,0x00,0x00,0x37,0x00,0x00,0x00,0x41,0x00,0x05,0x00, -0x15,0x00,0x00,0x00,0x8e,0x00,0x00,0x00,0x12,0x00,0x00,0x00, -0x8d,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x8f,0x00,0x00,0x00,0x8e,0x00,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x90,0x00,0x00,0x00,0x8c,0x00,0x00,0x00, -0x8f,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x91,0x00,0x00,0x00,0x8a,0x00,0x00,0x00,0x90,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x93,0x00,0x00,0x00, -0x91,0x00,0x00,0x00,0x7a,0x00,0x00,0x00,0x86,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x94,0x00,0x00,0x00,0x93,0x00,0x00,0x00, -0x6d,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00, -0x99,0x00,0x00,0x00,0x12,0x00,0x00,0x00,0x98,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x9a,0x00,0x00,0x00, -0x99,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x9b,0x00,0x00,0x00,0x0f,0x00,0x00,0x00,0x9a,0x00,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x9e,0x00,0x00,0x00, -0x4a,0x00,0x00,0x00,0x9d,0x00,0x00,0x00,0x41,0x00,0x05,0x00, -0x15,0x00,0x00,0x00,0xa0,0x00,0x00,0x00,0x12,0x00,0x00,0x00, -0x9f,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0xa1,0x00,0x00,0x00,0xa0,0x00,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xa2,0x00,0x00,0x00,0x9e,0x00,0x00,0x00, -0xa1,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xa3,0x00,0x00,0x00,0x9b,0x00,0x00,0x00,0xa2,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xa5,0x00,0x00,0x00, -0xa3,0x00,0x00,0x00,0x7a,0x00,0x00,0x00,0x86,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xa6,0x00,0x00,0x00,0xa5,0x00,0x00,0x00, -0x6d,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0xa8,0x00,0x00,0x00, -0xf8,0x00,0x02,0x00,0xa8,0x00,0x00,0x00,0xf5,0x00,0x07,0x00, -0x06,0x00,0x00,0x00,0xc1,0x02,0x00,0x00,0x3e,0x00,0x00,0x00, -0x05,0x00,0x00,0x00,0xc7,0x00,0x00,0x00,0xa9,0x00,0x00,0x00, -0xb0,0x00,0x05,0x00,0xb8,0x00,0x00,0x00,0xb9,0x00,0x00,0x00, -0xc1,0x02,0x00,0x00,0xb7,0x00,0x00,0x00,0xf6,0x00,0x04,0x00, -0xaa,0x00,0x00,0x00,0xa9,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0xfa,0x00,0x04,0x00,0xb9,0x00,0x00,0x00,0xa9,0x00,0x00,0x00, -0xaa,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0xa9,0x00,0x00,0x00, -0x41,0x00,0x05,0x00,0xc3,0x00,0x00,0x00,0xc4,0x00,0x00,0x00, -0xc0,0x00,0x00,0x00,0xc1,0x02,0x00,0x00,0x3e,0x00,0x03,0x00, -0xc4,0x00,0x00,0x00,0xc2,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xc7,0x00,0x00,0x00,0xc1,0x02,0x00,0x00, -0xc6,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0xa8,0x00,0x00,0x00, -0xf8,0x00,0x02,0x00,0xaa,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, -0xca,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0xca,0x00,0x00,0x00, -0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0xda,0x02,0x00,0x00, -0xa6,0x00,0x00,0x00,0xaa,0x00,0x00,0x00,0x90,0x01,0x00,0x00, -0xcd,0x00,0x00,0x00,0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00, -0xd6,0x02,0x00,0x00,0x94,0x00,0x00,0x00,0xaa,0x00,0x00,0x00, -0x8d,0x01,0x00,0x00,0xcd,0x00,0x00,0x00,0xf5,0x00,0x07,0x00, -0x06,0x00,0x00,0x00,0xc2,0x02,0x00,0x00,0x7a,0x00,0x00,0x00, -0xaa,0x00,0x00,0x00,0x3b,0x02,0x00,0x00,0xcd,0x00,0x00,0x00, -0xb0,0x00,0x05,0x00,0xb8,0x00,0x00,0x00,0xd1,0x00,0x00,0x00, -0xc2,0x02,0x00,0x00,0x84,0x00,0x00,0x00,0xf6,0x00,0x04,0x00, -0xcc,0x00,0x00,0x00,0xcd,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0xfa,0x00,0x04,0x00,0xd1,0x00,0x00,0x00,0xcb,0x00,0x00,0x00, -0xcc,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0xcb,0x00,0x00,0x00, -0xf9,0x00,0x02,0x00,0xd3,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, -0xd3,0x00,0x00,0x00,0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00, -0xd2,0x02,0x00,0x00,0x3e,0x00,0x00,0x00,0xcb,0x00,0x00,0x00, -0x32,0x01,0x00,0x00,0xd4,0x00,0x00,0x00,0xb0,0x00,0x05,0x00, -0xb8,0x00,0x00,0x00,0xd9,0x00,0x00,0x00,0xd2,0x02,0x00,0x00, -0x37,0x00,0x00,0x00,0xf6,0x00,0x04,0x00,0xd5,0x00,0x00,0x00, -0xd4,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, -0xd9,0x00,0x00,0x00,0xd4,0x00,0x00,0x00,0xd5,0x00,0x00,0x00, -0xf8,0x00,0x02,0x00,0xd4,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xde,0x00,0x00,0x00,0x74,0x00,0x00,0x00, -0xd2,0x02,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xe1,0x00,0x00,0x00,0xde,0x00,0x00,0x00,0x8f,0x00,0x00,0x00, -0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xe2,0x00,0x00,0x00, -0xe1,0x00,0x00,0x00,0x6d,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xe3,0x00,0x00,0x00,0xd6,0x02,0x00,0x00, -0xe2,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xe5,0x00,0x00,0x00,0xe3,0x00,0x00,0x00,0x6f,0x00,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xef,0x00,0x00,0x00, -0xde,0x00,0x00,0x00,0xee,0x00,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xf1,0x00,0x00,0x00,0x6f,0x00,0x00,0x00, -0x6d,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xf2,0x00,0x00,0x00,0xef,0x00,0x00,0x00,0xf1,0x00,0x00,0x00, -0x41,0x00,0x07,0x00,0xfb,0x00,0x00,0x00,0xfc,0x00,0x00,0x00, -0xf9,0x00,0x00,0x00,0x34,0x00,0x00,0x00,0xe5,0x00,0x00,0x00, -0x3e,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0xf4,0x00,0x00,0x00, -0xfd,0x00,0x00,0x00,0xfc,0x00,0x00,0x00,0x73,0x00,0x04,0x00, -0xba,0x00,0x00,0x00,0xfe,0x00,0x00,0x00,0xfd,0x00,0x00,0x00, -0x41,0x00,0x05,0x00,0xff,0x00,0x00,0x00,0x00,0x01,0x00,0x00, -0xea,0x00,0x00,0x00,0xf2,0x00,0x00,0x00,0x3e,0x00,0x03,0x00, -0x00,0x01,0x00,0x00,0xfe,0x00,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x05,0x01,0x00,0x00,0xde,0x00,0x00,0x00, -0x04,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x08,0x01,0x00,0x00,0x05,0x01,0x00,0x00,0xf1,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x09,0x01,0x00,0x00, -0x08,0x01,0x00,0x00,0x39,0x00,0x00,0x00,0x41,0x00,0x07,0x00, -0xfb,0x00,0x00,0x00,0x0b,0x01,0x00,0x00,0xf9,0x00,0x00,0x00, -0x34,0x00,0x00,0x00,0xe5,0x00,0x00,0x00,0x39,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0xf4,0x00,0x00,0x00,0x0c,0x01,0x00,0x00, -0x0b,0x01,0x00,0x00,0x73,0x00,0x04,0x00,0xba,0x00,0x00,0x00, -0x0d,0x01,0x00,0x00,0x0c,0x01,0x00,0x00,0x41,0x00,0x05,0x00, -0xff,0x00,0x00,0x00,0x0e,0x01,0x00,0x00,0xea,0x00,0x00,0x00, -0x09,0x01,0x00,0x00,0x3e,0x00,0x03,0x00,0x0e,0x01,0x00,0x00, -0x0d,0x01,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x13,0x01,0x00,0x00,0xde,0x00,0x00,0x00,0x12,0x01,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x16,0x01,0x00,0x00, -0x13,0x01,0x00,0x00,0xf1,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x17,0x01,0x00,0x00,0x16,0x01,0x00,0x00, -0x0c,0x00,0x00,0x00,0x41,0x00,0x07,0x00,0xfb,0x00,0x00,0x00, -0x19,0x01,0x00,0x00,0xf9,0x00,0x00,0x00,0x34,0x00,0x00,0x00, -0xe5,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0xf4,0x00,0x00,0x00,0x1a,0x01,0x00,0x00,0x19,0x01,0x00,0x00, -0x73,0x00,0x04,0x00,0xba,0x00,0x00,0x00,0x1b,0x01,0x00,0x00, -0x1a,0x01,0x00,0x00,0x41,0x00,0x05,0x00,0xff,0x00,0x00,0x00, -0x1c,0x01,0x00,0x00,0xea,0x00,0x00,0x00,0x17,0x01,0x00,0x00, -0x3e,0x00,0x03,0x00,0x1c,0x01,0x00,0x00,0x1b,0x01,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x21,0x01,0x00,0x00, -0xde,0x00,0x00,0x00,0x20,0x01,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x24,0x01,0x00,0x00,0x21,0x01,0x00,0x00, -0xf1,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x26,0x01,0x00,0x00,0x24,0x01,0x00,0x00,0x25,0x01,0x00,0x00, -0x41,0x00,0x07,0x00,0xfb,0x00,0x00,0x00,0x28,0x01,0x00,0x00, -0xf9,0x00,0x00,0x00,0x34,0x00,0x00,0x00,0xe5,0x00,0x00,0x00, -0x25,0x01,0x00,0x00,0x3d,0x00,0x04,0x00,0xf4,0x00,0x00,0x00, -0x29,0x01,0x00,0x00,0x28,0x01,0x00,0x00,0x73,0x00,0x04,0x00, -0xba,0x00,0x00,0x00,0x2a,0x01,0x00,0x00,0x29,0x01,0x00,0x00, -0x41,0x00,0x05,0x00,0xff,0x00,0x00,0x00,0x2b,0x01,0x00,0x00, -0xea,0x00,0x00,0x00,0x26,0x01,0x00,0x00,0x3e,0x00,0x03,0x00, -0x2b,0x01,0x00,0x00,0x2a,0x01,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x32,0x01,0x00,0x00,0xd2,0x02,0x00,0x00, -0x30,0x01,0x00,0x00,0xf9,0x00,0x02,0x00,0xd3,0x00,0x00,0x00, -0xf8,0x00,0x02,0x00,0xd5,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, -0x34,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0x34,0x01,0x00,0x00, -0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0xd3,0x02,0x00,0x00, -0x3e,0x00,0x00,0x00,0xd5,0x00,0x00,0x00,0x89,0x01,0x00,0x00, -0x35,0x01,0x00,0x00,0xb0,0x00,0x05,0x00,0xb8,0x00,0x00,0x00, -0x3a,0x01,0x00,0x00,0xd3,0x02,0x00,0x00,0x9d,0x00,0x00,0x00, -0xf6,0x00,0x04,0x00,0x36,0x01,0x00,0x00,0x35,0x01,0x00,0x00, -0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0x3a,0x01,0x00,0x00, -0x35,0x01,0x00,0x00,0x36,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, -0x35,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x3f,0x01,0x00,0x00,0x74,0x00,0x00,0x00,0xd3,0x02,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x42,0x01,0x00,0x00, -0x3f,0x01,0x00,0x00,0xa1,0x00,0x00,0x00,0x86,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x43,0x01,0x00,0x00,0x42,0x01,0x00,0x00, -0x6d,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x44,0x01,0x00,0x00,0xda,0x02,0x00,0x00,0x43,0x01,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x46,0x01,0x00,0x00, -0x44,0x01,0x00,0x00,0x6f,0x00,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x50,0x01,0x00,0x00,0x3f,0x01,0x00,0x00, -0x4f,0x01,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x52,0x01,0x00,0x00,0x6f,0x00,0x00,0x00,0x6d,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x53,0x01,0x00,0x00, -0x50,0x01,0x00,0x00,0x52,0x01,0x00,0x00,0x41,0x00,0x07,0x00, -0xfb,0x00,0x00,0x00,0x5a,0x01,0x00,0x00,0x58,0x01,0x00,0x00, -0x34,0x00,0x00,0x00,0x46,0x01,0x00,0x00,0x3e,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0xf4,0x00,0x00,0x00,0x5b,0x01,0x00,0x00, -0x5a,0x01,0x00,0x00,0x73,0x00,0x04,0x00,0xba,0x00,0x00,0x00, -0x5c,0x01,0x00,0x00,0x5b,0x01,0x00,0x00,0x41,0x00,0x05,0x00, -0xff,0x00,0x00,0x00,0x5d,0x01,0x00,0x00,0x4b,0x01,0x00,0x00, -0x53,0x01,0x00,0x00,0x3e,0x00,0x03,0x00,0x5d,0x01,0x00,0x00, -0x5c,0x01,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x62,0x01,0x00,0x00,0x3f,0x01,0x00,0x00,0x61,0x01,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x65,0x01,0x00,0x00, -0x62,0x01,0x00,0x00,0x52,0x01,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x66,0x01,0x00,0x00,0x65,0x01,0x00,0x00, -0x39,0x00,0x00,0x00,0x41,0x00,0x07,0x00,0xfb,0x00,0x00,0x00, -0x68,0x01,0x00,0x00,0x58,0x01,0x00,0x00,0x34,0x00,0x00,0x00, -0x46,0x01,0x00,0x00,0x39,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0xf4,0x00,0x00,0x00,0x69,0x01,0x00,0x00,0x68,0x01,0x00,0x00, -0x73,0x00,0x04,0x00,0xba,0x00,0x00,0x00,0x6a,0x01,0x00,0x00, -0x69,0x01,0x00,0x00,0x41,0x00,0x05,0x00,0xff,0x00,0x00,0x00, -0x6b,0x01,0x00,0x00,0x4b,0x01,0x00,0x00,0x66,0x01,0x00,0x00, -0x3e,0x00,0x03,0x00,0x6b,0x01,0x00,0x00,0x6a,0x01,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x70,0x01,0x00,0x00, -0x3f,0x01,0x00,0x00,0x6f,0x01,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x73,0x01,0x00,0x00,0x70,0x01,0x00,0x00, -0x52,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x74,0x01,0x00,0x00,0x73,0x01,0x00,0x00,0x0c,0x00,0x00,0x00, -0x41,0x00,0x07,0x00,0xfb,0x00,0x00,0x00,0x76,0x01,0x00,0x00, -0x58,0x01,0x00,0x00,0x34,0x00,0x00,0x00,0x46,0x01,0x00,0x00, -0x0c,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0xf4,0x00,0x00,0x00, -0x77,0x01,0x00,0x00,0x76,0x01,0x00,0x00,0x73,0x00,0x04,0x00, -0xba,0x00,0x00,0x00,0x78,0x01,0x00,0x00,0x77,0x01,0x00,0x00, -0x41,0x00,0x05,0x00,0xff,0x00,0x00,0x00,0x79,0x01,0x00,0x00, -0x4b,0x01,0x00,0x00,0x74,0x01,0x00,0x00,0x3e,0x00,0x03,0x00, -0x79,0x01,0x00,0x00,0x78,0x01,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x7e,0x01,0x00,0x00,0x3f,0x01,0x00,0x00, -0x7d,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x81,0x01,0x00,0x00,0x7e,0x01,0x00,0x00,0x52,0x01,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x82,0x01,0x00,0x00, -0x81,0x01,0x00,0x00,0x25,0x01,0x00,0x00,0x41,0x00,0x07,0x00, -0xfb,0x00,0x00,0x00,0x84,0x01,0x00,0x00,0x58,0x01,0x00,0x00, -0x34,0x00,0x00,0x00,0x46,0x01,0x00,0x00,0x25,0x01,0x00,0x00, -0x3d,0x00,0x04,0x00,0xf4,0x00,0x00,0x00,0x85,0x01,0x00,0x00, -0x84,0x01,0x00,0x00,0x73,0x00,0x04,0x00,0xba,0x00,0x00,0x00, -0x86,0x01,0x00,0x00,0x85,0x01,0x00,0x00,0x41,0x00,0x05,0x00, -0xff,0x00,0x00,0x00,0x87,0x01,0x00,0x00,0x4b,0x01,0x00,0x00, -0x82,0x01,0x00,0x00,0x3e,0x00,0x03,0x00,0x87,0x01,0x00,0x00, -0x86,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x89,0x01,0x00,0x00,0xd3,0x02,0x00,0x00,0x30,0x01,0x00,0x00, -0xf9,0x00,0x02,0x00,0x34,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, -0x36,0x01,0x00,0x00,0xe0,0x00,0x04,0x00,0x0c,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x8a,0x01,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x8d,0x01,0x00,0x00,0xd6,0x02,0x00,0x00, -0x8b,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x90,0x01,0x00,0x00,0xda,0x02,0x00,0x00,0x8e,0x01,0x00,0x00, -0xf9,0x00,0x02,0x00,0x92,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, -0x92,0x01,0x00,0x00,0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00, -0xdc,0x02,0x00,0x00,0x3e,0x00,0x00,0x00,0x36,0x01,0x00,0x00, -0x39,0x02,0x00,0x00,0x95,0x01,0x00,0x00,0xb0,0x00,0x05,0x00, -0xb8,0x00,0x00,0x00,0x98,0x01,0x00,0x00,0xdc,0x02,0x00,0x00, -0x6c,0x00,0x00,0x00,0xf6,0x00,0x04,0x00,0x94,0x01,0x00,0x00, -0x95,0x01,0x00,0x00,0x00,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, -0x98,0x01,0x00,0x00,0x93,0x01,0x00,0x00,0x94,0x01,0x00,0x00, -0xf8,0x00,0x02,0x00,0x93,0x01,0x00,0x00,0xf9,0x00,0x02,0x00, -0x9a,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0x9a,0x01,0x00,0x00, -0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0xe0,0x02,0x00,0x00, -0x3e,0x00,0x00,0x00,0x93,0x01,0x00,0x00,0xc5,0x01,0x00,0x00, -0x9d,0x01,0x00,0x00,0xb0,0x00,0x05,0x00,0xb8,0x00,0x00,0x00, -0xa0,0x01,0x00,0x00,0xe0,0x02,0x00,0x00,0x60,0x00,0x00,0x00, -0xf6,0x00,0x04,0x00,0x9c,0x01,0x00,0x00,0x9d,0x01,0x00,0x00, -0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0xa0,0x01,0x00,0x00, -0x9b,0x01,0x00,0x00,0x9c,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, -0x9b,0x01,0x00,0x00,0xf9,0x00,0x02,0x00,0xa2,0x01,0x00,0x00, -0xf8,0x00,0x02,0x00,0xa2,0x01,0x00,0x00,0xf5,0x00,0x07,0x00, -0x06,0x00,0x00,0x00,0xf2,0x02,0x00,0x00,0x3e,0x00,0x00,0x00, -0x9b,0x01,0x00,0x00,0xc3,0x01,0x00,0x00,0xa3,0x01,0x00,0x00, -0xb0,0x00,0x05,0x00,0xb8,0x00,0x00,0x00,0xa8,0x01,0x00,0x00, -0xf2,0x02,0x00,0x00,0x62,0x00,0x00,0x00,0xf6,0x00,0x04,0x00, -0xa4,0x01,0x00,0x00,0xa3,0x01,0x00,0x00,0x01,0x00,0x00,0x00, -0xfa,0x00,0x04,0x00,0xa8,0x01,0x00,0x00,0xa3,0x01,0x00,0x00, -0xa4,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0xa3,0x01,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xae,0x01,0x00,0x00, -0xe0,0x02,0x00,0x00,0x62,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xb0,0x01,0x00,0x00,0xae,0x01,0x00,0x00, -0xf2,0x02,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xb2,0x01,0x00,0x00,0x55,0x00,0x00,0x00,0x53,0x00,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xb4,0x01,0x00,0x00, -0xe0,0x02,0x00,0x00,0x61,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xb5,0x01,0x00,0x00,0xb2,0x01,0x00,0x00, -0xb4,0x01,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xb7,0x01,0x00,0x00,0x64,0x00,0x00,0x00,0x62,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xb8,0x01,0x00,0x00, -0xb5,0x01,0x00,0x00,0xb7,0x01,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xba,0x01,0x00,0x00,0xb8,0x01,0x00,0x00, -0xf2,0x02,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xbc,0x01,0x00,0x00,0xba,0x01,0x00,0x00,0xbb,0x01,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xbe,0x01,0x00,0x00, -0xbc,0x01,0x00,0x00,0xdc,0x02,0x00,0x00,0x41,0x00,0x05,0x00, -0xff,0x00,0x00,0x00,0xbf,0x01,0x00,0x00,0xea,0x00,0x00,0x00, -0xbe,0x01,0x00,0x00,0x3d,0x00,0x04,0x00,0xba,0x00,0x00,0x00, -0xc0,0x01,0x00,0x00,0xbf,0x01,0x00,0x00,0x41,0x00,0x05,0x00, -0xc3,0x00,0x00,0x00,0xc1,0x01,0x00,0x00,0xac,0x01,0x00,0x00, -0xb0,0x01,0x00,0x00,0x3e,0x00,0x03,0x00,0xc1,0x01,0x00,0x00, -0xc0,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xc3,0x01,0x00,0x00,0xf2,0x02,0x00,0x00,0xc6,0x00,0x00,0x00, -0xf9,0x00,0x02,0x00,0xa2,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, -0xa4,0x01,0x00,0x00,0xf9,0x00,0x02,0x00,0x9d,0x01,0x00,0x00, -0xf8,0x00,0x02,0x00,0x9d,0x01,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xc5,0x01,0x00,0x00,0xe0,0x02,0x00,0x00, -0xc6,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0x9a,0x01,0x00,0x00, -0xf8,0x00,0x02,0x00,0x9c,0x01,0x00,0x00,0xf9,0x00,0x02,0x00, -0xc7,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0xc7,0x01,0x00,0x00, -0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0xe1,0x02,0x00,0x00, -0x3e,0x00,0x00,0x00,0x9c,0x01,0x00,0x00,0xf3,0x01,0x00,0x00, -0xca,0x01,0x00,0x00,0xb0,0x00,0x05,0x00,0xb8,0x00,0x00,0x00, -0xcd,0x01,0x00,0x00,0xe1,0x02,0x00,0x00,0xb5,0x00,0x00,0x00, -0xf6,0x00,0x04,0x00,0xc9,0x01,0x00,0x00,0xca,0x01,0x00,0x00, -0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0xcd,0x01,0x00,0x00, -0xc8,0x01,0x00,0x00,0xc9,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, -0xc8,0x01,0x00,0x00,0xf9,0x00,0x02,0x00,0xcf,0x01,0x00,0x00, -0xf8,0x00,0x02,0x00,0xcf,0x01,0x00,0x00,0xf5,0x00,0x07,0x00, -0x06,0x00,0x00,0x00,0xef,0x02,0x00,0x00,0x3e,0x00,0x00,0x00, -0xc8,0x01,0x00,0x00,0xf1,0x01,0x00,0x00,0xd0,0x01,0x00,0x00, -0xb0,0x00,0x05,0x00,0xb8,0x00,0x00,0x00,0xd5,0x01,0x00,0x00, -0xef,0x02,0x00,0x00,0xb2,0x00,0x00,0x00,0xf6,0x00,0x04,0x00, -0xd1,0x01,0x00,0x00,0xd0,0x01,0x00,0x00,0x01,0x00,0x00,0x00, -0xfa,0x00,0x04,0x00,0xd5,0x01,0x00,0x00,0xd0,0x01,0x00,0x00, -0xd1,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0xd0,0x01,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xdb,0x01,0x00,0x00, -0xe1,0x02,0x00,0x00,0xb2,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xdd,0x01,0x00,0x00,0xdb,0x01,0x00,0x00, -0xef,0x02,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xdf,0x01,0x00,0x00,0x59,0x00,0x00,0x00,0xaf,0x00,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xe2,0x01,0x00,0x00, -0xe1,0x02,0x00,0x00,0xe1,0x01,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xe3,0x01,0x00,0x00,0xdf,0x01,0x00,0x00, -0xe2,0x01,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xe5,0x01,0x00,0x00,0x68,0x00,0x00,0x00,0xb2,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xe6,0x01,0x00,0x00, -0xe3,0x01,0x00,0x00,0xe5,0x01,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xe8,0x01,0x00,0x00,0xe6,0x01,0x00,0x00, -0xef,0x02,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xea,0x01,0x00,0x00,0xe8,0x01,0x00,0x00,0xe9,0x01,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xec,0x01,0x00,0x00, -0xea,0x01,0x00,0x00,0xdc,0x02,0x00,0x00,0x41,0x00,0x05,0x00, -0xff,0x00,0x00,0x00,0xed,0x01,0x00,0x00,0x4b,0x01,0x00,0x00, -0xec,0x01,0x00,0x00,0x3d,0x00,0x04,0x00,0xba,0x00,0x00,0x00, -0xee,0x01,0x00,0x00,0xed,0x01,0x00,0x00,0x41,0x00,0x05,0x00, -0xc3,0x00,0x00,0x00,0xef,0x01,0x00,0x00,0xd9,0x01,0x00,0x00, -0xdd,0x01,0x00,0x00,0x3e,0x00,0x03,0x00,0xef,0x01,0x00,0x00, -0xee,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xf1,0x01,0x00,0x00,0xef,0x02,0x00,0x00,0xc6,0x00,0x00,0x00, -0xf9,0x00,0x02,0x00,0xcf,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, -0xd1,0x01,0x00,0x00,0xf9,0x00,0x02,0x00,0xca,0x01,0x00,0x00, -0xf8,0x00,0x02,0x00,0xca,0x01,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xf3,0x01,0x00,0x00,0xe1,0x02,0x00,0x00, -0xc6,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0xc7,0x01,0x00,0x00, -0xf8,0x00,0x02,0x00,0xc9,0x01,0x00,0x00,0xf9,0x00,0x02,0x00, -0xf5,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0xf5,0x01,0x00,0x00, -0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0xe2,0x02,0x00,0x00, -0x3e,0x00,0x00,0x00,0xc9,0x01,0x00,0x00,0x37,0x02,0x00,0x00, -0xf8,0x01,0x00,0x00,0xb0,0x00,0x05,0x00,0xb8,0x00,0x00,0x00, -0xfb,0x01,0x00,0x00,0xe2,0x02,0x00,0x00,0xb5,0x00,0x00,0x00, -0xf6,0x00,0x04,0x00,0xf7,0x01,0x00,0x00,0xf8,0x01,0x00,0x00, -0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0xfb,0x01,0x00,0x00, -0xf6,0x01,0x00,0x00,0xf7,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, -0xf6,0x01,0x00,0x00,0xf9,0x00,0x02,0x00,0xfd,0x01,0x00,0x00, -0xf8,0x00,0x02,0x00,0xfd,0x01,0x00,0x00,0xf5,0x00,0x07,0x00, -0x06,0x00,0x00,0x00,0xe6,0x02,0x00,0x00,0x3e,0x00,0x00,0x00, -0xf6,0x01,0x00,0x00,0x35,0x02,0x00,0x00,0x00,0x02,0x00,0x00, -0xb0,0x00,0x05,0x00,0xb8,0x00,0x00,0x00,0x03,0x02,0x00,0x00, -0xe6,0x02,0x00,0x00,0x60,0x00,0x00,0x00,0xf6,0x00,0x04,0x00, -0xff,0x01,0x00,0x00,0x00,0x02,0x00,0x00,0x01,0x00,0x00,0x00, -0xfa,0x00,0x04,0x00,0x03,0x02,0x00,0x00,0xfe,0x01,0x00,0x00, -0xff,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0xfe,0x01,0x00,0x00, -0xf9,0x00,0x02,0x00,0x05,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, -0x05,0x02,0x00,0x00,0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00, -0xe8,0x02,0x00,0x00,0x3e,0x00,0x00,0x00,0xfe,0x01,0x00,0x00, -0x33,0x02,0x00,0x00,0x08,0x02,0x00,0x00,0xb0,0x00,0x05,0x00, -0xb8,0x00,0x00,0x00,0x0b,0x02,0x00,0x00,0xe8,0x02,0x00,0x00, -0xb2,0x00,0x00,0x00,0xf6,0x00,0x04,0x00,0x07,0x02,0x00,0x00, -0x08,0x02,0x00,0x00,0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, -0x0b,0x02,0x00,0x00,0x06,0x02,0x00,0x00,0x07,0x02,0x00,0x00, -0xf8,0x00,0x02,0x00,0x06,0x02,0x00,0x00,0xf9,0x00,0x02,0x00, -0x0d,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x0d,0x02,0x00,0x00, -0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0xea,0x02,0x00,0x00, -0x3e,0x00,0x00,0x00,0x06,0x02,0x00,0x00,0x31,0x02,0x00,0x00, -0x0e,0x02,0x00,0x00,0xb0,0x00,0x05,0x00,0xb8,0x00,0x00,0x00, -0x13,0x02,0x00,0x00,0xea,0x02,0x00,0x00,0x62,0x00,0x00,0x00, -0xf6,0x00,0x04,0x00,0x0f,0x02,0x00,0x00,0x0e,0x02,0x00,0x00, -0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0x13,0x02,0x00,0x00, -0x0e,0x02,0x00,0x00,0x0f,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, -0x0e,0x02,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x15,0x02,0x00,0x00,0xe2,0x02,0x00,0x00,0xb2,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x17,0x02,0x00,0x00, -0x15,0x02,0x00,0x00,0xe8,0x02,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x19,0x02,0x00,0x00,0x17,0x02,0x00,0x00, -0x18,0x02,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x1b,0x02,0x00,0x00,0xe6,0x02,0x00,0x00,0x62,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x1c,0x02,0x00,0x00, -0x19,0x02,0x00,0x00,0x1b,0x02,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x1e,0x02,0x00,0x00,0x1c,0x02,0x00,0x00, -0xea,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x22,0x02,0x00,0x00,0x1b,0x02,0x00,0x00,0xea,0x02,0x00,0x00, -0x41,0x00,0x05,0x00,0xc3,0x00,0x00,0x00,0x23,0x02,0x00,0x00, -0xac,0x01,0x00,0x00,0x22,0x02,0x00,0x00,0x3d,0x00,0x04,0x00, -0xba,0x00,0x00,0x00,0x24,0x02,0x00,0x00,0x23,0x02,0x00,0x00, -0x41,0x00,0x05,0x00,0xc3,0x00,0x00,0x00,0x29,0x02,0x00,0x00, -0xd9,0x01,0x00,0x00,0x17,0x02,0x00,0x00,0x3d,0x00,0x04,0x00, -0xba,0x00,0x00,0x00,0x2a,0x02,0x00,0x00,0x29,0x02,0x00,0x00, -0x41,0x00,0x05,0x00,0xc3,0x00,0x00,0x00,0x2c,0x02,0x00,0x00, -0xc0,0x00,0x00,0x00,0x1e,0x02,0x00,0x00,0x3d,0x00,0x04,0x00, -0xba,0x00,0x00,0x00,0x2d,0x02,0x00,0x00,0x2c,0x02,0x00,0x00, -0x0c,0x00,0x08,0x00,0xba,0x00,0x00,0x00,0x2e,0x02,0x00,0x00, -0x01,0x00,0x00,0x00,0x32,0x00,0x00,0x00,0x24,0x02,0x00,0x00, -0x2a,0x02,0x00,0x00,0x2d,0x02,0x00,0x00,0x3e,0x00,0x03,0x00, -0x2c,0x02,0x00,0x00,0x2e,0x02,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x31,0x02,0x00,0x00,0xea,0x02,0x00,0x00, -0xc6,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0x0d,0x02,0x00,0x00, -0xf8,0x00,0x02,0x00,0x0f,0x02,0x00,0x00,0xf9,0x00,0x02,0x00, -0x08,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x08,0x02,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x33,0x02,0x00,0x00, -0xe8,0x02,0x00,0x00,0xc6,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, -0x05,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x07,0x02,0x00,0x00, -0xf9,0x00,0x02,0x00,0x00,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, -0x00,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x35,0x02,0x00,0x00,0xe6,0x02,0x00,0x00,0xc6,0x00,0x00,0x00, -0xf9,0x00,0x02,0x00,0xfd,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, -0xff,0x01,0x00,0x00,0xf9,0x00,0x02,0x00,0xf8,0x01,0x00,0x00, -0xf8,0x00,0x02,0x00,0xf8,0x01,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x37,0x02,0x00,0x00,0xe2,0x02,0x00,0x00, -0xc6,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0xf5,0x01,0x00,0x00, -0xf8,0x00,0x02,0x00,0xf7,0x01,0x00,0x00,0xf9,0x00,0x02,0x00, -0x95,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0x95,0x01,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x39,0x02,0x00,0x00, -0xdc,0x02,0x00,0x00,0xc6,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, -0x92,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0x94,0x01,0x00,0x00, -0xe0,0x00,0x04,0x00,0x0c,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0x8a,0x01,0x00,0x00,0xf9,0x00,0x02,0x00,0xcd,0x00,0x00,0x00, -0xf8,0x00,0x02,0x00,0xcd,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x3b,0x02,0x00,0x00,0xc2,0x02,0x00,0x00, -0x6c,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0xca,0x00,0x00,0x00, -0xf8,0x00,0x02,0x00,0xcc,0x00,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x40,0x02,0x00,0x00,0x55,0x00,0x00,0x00, -0x53,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x41,0x02,0x00,0x00,0x8c,0x00,0x00,0x00,0x40,0x02,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x46,0x02,0x00,0x00, -0x59,0x00,0x00,0x00,0xaf,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x47,0x02,0x00,0x00,0x9e,0x00,0x00,0x00, -0x46,0x02,0x00,0x00,0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00, -0x4c,0x02,0x00,0x00,0x12,0x00,0x00,0x00,0x4b,0x02,0x00,0x00, -0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x4d,0x02,0x00,0x00, -0x4c,0x02,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x4e,0x02,0x00,0x00,0x0f,0x00,0x00,0x00,0x4d,0x02,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x52,0x02,0x00,0x00, -0x47,0x00,0x00,0x00,0x4d,0x02,0x00,0x00,0x41,0x00,0x05,0x00, -0x0d,0x00,0x00,0x00,0x54,0x02,0x00,0x00,0x53,0x02,0x00,0x00, -0x0c,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x55,0x02,0x00,0x00,0x54,0x02,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x56,0x02,0x00,0x00,0x52,0x02,0x00,0x00, -0x55,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x57,0x02,0x00,0x00,0x4e,0x02,0x00,0x00,0x56,0x02,0x00,0x00, -0xf9,0x00,0x02,0x00,0x59,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, -0x59,0x02,0x00,0x00,0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00, -0xc3,0x02,0x00,0x00,0x3e,0x00,0x00,0x00,0xcc,0x00,0x00,0x00, -0xc0,0x02,0x00,0x00,0x5c,0x02,0x00,0x00,0xb0,0x00,0x05,0x00, -0xb8,0x00,0x00,0x00,0x5f,0x02,0x00,0x00,0xc3,0x02,0x00,0x00, -0xb5,0x00,0x00,0x00,0xf6,0x00,0x04,0x00,0x5b,0x02,0x00,0x00, -0x5c,0x02,0x00,0x00,0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, -0x5f,0x02,0x00,0x00,0x5a,0x02,0x00,0x00,0x5b,0x02,0x00,0x00, -0xf8,0x00,0x02,0x00,0x5a,0x02,0x00,0x00,0xf9,0x00,0x02,0x00, -0x61,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x61,0x02,0x00,0x00, -0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0xc4,0x02,0x00,0x00, -0x3e,0x00,0x00,0x00,0x5a,0x02,0x00,0x00,0xbe,0x02,0x00,0x00, -0x64,0x02,0x00,0x00,0xb0,0x00,0x05,0x00,0xb8,0x00,0x00,0x00, -0x67,0x02,0x00,0x00,0xc4,0x02,0x00,0x00,0x60,0x00,0x00,0x00, -0xf6,0x00,0x04,0x00,0x63,0x02,0x00,0x00,0x64,0x02,0x00,0x00, -0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0x67,0x02,0x00,0x00, -0x62,0x02,0x00,0x00,0x63,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, -0x62,0x02,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x6b,0x02,0x00,0x00,0xc4,0x02,0x00,0x00,0x61,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x6c,0x02,0x00,0x00, -0x41,0x02,0x00,0x00,0x6b,0x02,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x6e,0x02,0x00,0x00,0x64,0x00,0x00,0x00, -0x62,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x6f,0x02,0x00,0x00,0x6c,0x02,0x00,0x00,0x6e,0x02,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x73,0x02,0x00,0x00, -0xc3,0x02,0x00,0x00,0xe1,0x01,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x74,0x02,0x00,0x00,0x47,0x02,0x00,0x00, -0x73,0x02,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x76,0x02,0x00,0x00,0x68,0x00,0x00,0x00,0xb2,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x77,0x02,0x00,0x00, -0x74,0x02,0x00,0x00,0x76,0x02,0x00,0x00,0xf9,0x00,0x02,0x00, -0x79,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x79,0x02,0x00,0x00, -0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0xc6,0x02,0x00,0x00, -0x3e,0x00,0x00,0x00,0x62,0x02,0x00,0x00,0xbc,0x02,0x00,0x00, -0x7c,0x02,0x00,0x00,0xb0,0x00,0x05,0x00,0xb8,0x00,0x00,0x00, -0x7f,0x02,0x00,0x00,0xc6,0x02,0x00,0x00,0xb2,0x00,0x00,0x00, -0xf6,0x00,0x04,0x00,0x7b,0x02,0x00,0x00,0x7c,0x02,0x00,0x00, -0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0x7f,0x02,0x00,0x00, -0x7a,0x02,0x00,0x00,0x7b,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, -0x7a,0x02,0x00,0x00,0xf9,0x00,0x02,0x00,0x81,0x02,0x00,0x00, -0xf8,0x00,0x02,0x00,0x81,0x02,0x00,0x00,0xf5,0x00,0x07,0x00, -0x06,0x00,0x00,0x00,0xc8,0x02,0x00,0x00,0x3e,0x00,0x00,0x00, -0x7a,0x02,0x00,0x00,0xba,0x02,0x00,0x00,0x84,0x02,0x00,0x00, -0xb0,0x00,0x05,0x00,0xb8,0x00,0x00,0x00,0x87,0x02,0x00,0x00, -0xc8,0x02,0x00,0x00,0x62,0x00,0x00,0x00,0xf6,0x00,0x04,0x00, -0x83,0x02,0x00,0x00,0x84,0x02,0x00,0x00,0x01,0x00,0x00,0x00, -0xfa,0x00,0x04,0x00,0x87,0x02,0x00,0x00,0x82,0x02,0x00,0x00, -0x83,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x82,0x02,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x8a,0x02,0x00,0x00, -0x6f,0x02,0x00,0x00,0xc8,0x02,0x00,0x00,0xb0,0x00,0x05,0x00, -0xb8,0x00,0x00,0x00,0x8d,0x02,0x00,0x00,0x8a,0x02,0x00,0x00, -0x36,0x00,0x00,0x00,0xf7,0x00,0x03,0x00,0x8f,0x02,0x00,0x00, -0x00,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0x8d,0x02,0x00,0x00, -0x8e,0x02,0x00,0x00,0x8f,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, -0x8e,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x92,0x02,0x00,0x00,0x77,0x02,0x00,0x00,0xc6,0x02,0x00,0x00, -0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00,0x93,0x02,0x00,0x00, -0x12,0x00,0x00,0x00,0xc6,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x94,0x02,0x00,0x00,0x93,0x02,0x00,0x00, -0xb0,0x00,0x05,0x00,0xb8,0x00,0x00,0x00,0x95,0x02,0x00,0x00, -0x92,0x02,0x00,0x00,0x94,0x02,0x00,0x00,0xf9,0x00,0x02,0x00, -0x8f,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x8f,0x02,0x00,0x00, -0xf5,0x00,0x07,0x00,0xb8,0x00,0x00,0x00,0x96,0x02,0x00,0x00, -0x8d,0x02,0x00,0x00,0x82,0x02,0x00,0x00,0x95,0x02,0x00,0x00, -0x8e,0x02,0x00,0x00,0xf7,0x00,0x03,0x00,0x98,0x02,0x00,0x00, -0x00,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0x96,0x02,0x00,0x00, -0x97,0x02,0x00,0x00,0x98,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, -0x97,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xa0,0x02,0x00,0x00,0x77,0x02,0x00,0x00,0xc6,0x02,0x00,0x00, -0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00,0xa2,0x02,0x00,0x00, -0x12,0x00,0x00,0x00,0xa1,0x02,0x00,0x00,0x3d,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0xa3,0x02,0x00,0x00,0xa2,0x02,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xa4,0x02,0x00,0x00, -0xa0,0x02,0x00,0x00,0xa3,0x02,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xa5,0x02,0x00,0x00,0x57,0x02,0x00,0x00, -0xa4,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xa7,0x02,0x00,0x00,0xa5,0x02,0x00,0x00,0x6f,0x02,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xa9,0x02,0x00,0x00, -0xa7,0x02,0x00,0x00,0xc8,0x02,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xab,0x02,0x00,0x00,0xc3,0x02,0x00,0x00, -0xb2,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xad,0x02,0x00,0x00,0xab,0x02,0x00,0x00,0xc6,0x02,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xaf,0x02,0x00,0x00, -0xad,0x02,0x00,0x00,0xae,0x02,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xb1,0x02,0x00,0x00,0xc4,0x02,0x00,0x00, -0x62,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xb2,0x02,0x00,0x00,0xaf,0x02,0x00,0x00,0xb1,0x02,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xb4,0x02,0x00,0x00, -0xb2,0x02,0x00,0x00,0xc8,0x02,0x00,0x00,0x41,0x00,0x05,0x00, -0xc3,0x00,0x00,0x00,0xb5,0x02,0x00,0x00,0xc0,0x00,0x00,0x00, -0xb4,0x02,0x00,0x00,0x3d,0x00,0x04,0x00,0xba,0x00,0x00,0x00, -0xb6,0x02,0x00,0x00,0xb5,0x02,0x00,0x00,0x41,0x00,0x06,0x00, -0xb7,0x02,0x00,0x00,0xb8,0x02,0x00,0x00,0x9c,0x02,0x00,0x00, -0x34,0x00,0x00,0x00,0xa9,0x02,0x00,0x00,0x3e,0x00,0x03,0x00, -0xb8,0x02,0x00,0x00,0xb6,0x02,0x00,0x00,0xf9,0x00,0x02,0x00, -0x98,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x98,0x02,0x00,0x00, -0xf9,0x00,0x02,0x00,0x84,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, -0x84,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xba,0x02,0x00,0x00,0xc8,0x02,0x00,0x00,0xc6,0x00,0x00,0x00, -0xf9,0x00,0x02,0x00,0x81,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, -0x83,0x02,0x00,0x00,0xf9,0x00,0x02,0x00,0x7c,0x02,0x00,0x00, -0xf8,0x00,0x02,0x00,0x7c,0x02,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xbc,0x02,0x00,0x00,0xc6,0x02,0x00,0x00, -0xc6,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0x79,0x02,0x00,0x00, -0xf8,0x00,0x02,0x00,0x7b,0x02,0x00,0x00,0xf9,0x00,0x02,0x00, -0x64,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x64,0x02,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xbe,0x02,0x00,0x00, -0xc4,0x02,0x00,0x00,0xc6,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, -0x61,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x63,0x02,0x00,0x00, -0xf9,0x00,0x02,0x00,0x5c,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, -0x5c,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xc0,0x02,0x00,0x00,0xc3,0x02,0x00,0x00,0xc6,0x00,0x00,0x00, -0xf9,0x00,0x02,0x00,0x59,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, -0x5b,0x02,0x00,0x00,0xfd,0x00,0x01,0x00,0x38,0x00,0x01,0x00, - -}; -const uint64_t matmul_f16_aligned_l_fp32_len = 10512; - -unsigned char matmul_f16_aligned_m_data[] = { -0x03,0x02,0x23,0x07,0x00,0x05,0x01,0x00,0x0b,0x00,0x0d,0x00, -0x5b,0x03,0x00,0x00,0x00,0x00,0x00,0x00,0x11,0x00,0x02,0x00, -0x01,0x00,0x00,0x00,0x11,0x00,0x02,0x00,0x09,0x00,0x00,0x00, -0x11,0x00,0x02,0x00,0x51,0x11,0x00,0x00,0x0b,0x00,0x06,0x00, -0x01,0x00,0x00,0x00,0x47,0x4c,0x53,0x4c,0x2e,0x73,0x74,0x64, -0x2e,0x34,0x35,0x30,0x00,0x00,0x00,0x00,0x0e,0x00,0x03,0x00, -0x00,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x0f,0x00,0x0f,0x00, -0x05,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x6d,0x61,0x69,0x6e, -0x00,0x00,0x00,0x00,0x0b,0x00,0x00,0x00,0x12,0x00,0x00,0x00, -0x3d,0x00,0x00,0x00,0x4c,0x00,0x00,0x00,0xeb,0x00,0x00,0x00, -0xfa,0x00,0x00,0x00,0x80,0x01,0x00,0x00,0x8d,0x01,0x00,0x00, -0xbb,0x02,0x00,0x00,0x04,0x03,0x00,0x00,0x10,0x00,0x06,0x00, -0x04,0x00,0x00,0x00,0x11,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x0b,0x00,0x00,0x00,0x0b,0x00,0x00,0x00,0x1c,0x00,0x00,0x00, -0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0x10,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x04,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00, -0x02,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x08,0x00,0x00,0x00, -0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00,0x03,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0x10,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x10,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00, -0x05,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x14,0x00,0x00,0x00, -0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x18,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0x10,0x00,0x00,0x00,0x07,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x1c,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00, -0x08,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x20,0x00,0x00,0x00, -0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00,0x09,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x24,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0x10,0x00,0x00,0x00,0x0a,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x28,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00, -0x0b,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x2c,0x00,0x00,0x00, -0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x30,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0x10,0x00,0x00,0x00,0x0d,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x34,0x00,0x00,0x00,0x47,0x00,0x03,0x00,0x10,0x00,0x00,0x00, -0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x37,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x3d,0x00,0x00,0x00,0x0b,0x00,0x00,0x00,0x1a,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x4c,0x00,0x00,0x00,0x0b,0x00,0x00,0x00, -0x1b,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x4f,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x09,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x53,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x04,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x60,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x62,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x07,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x6c,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x03,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x9d,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0xaf,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x05,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0xb2,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x08,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0xf7,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x10,0x00,0x00,0x00,0x48,0x00,0x04,0x00,0xf8,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x05,0x00,0x00,0x00,0x48,0x00,0x04,0x00, -0xf8,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x18,0x00,0x00,0x00, -0x48,0x00,0x05,0x00,0xf8,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0xf8,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x07,0x00,0x00,0x00, -0x08,0x00,0x00,0x00,0x47,0x00,0x03,0x00,0xf8,0x00,0x00,0x00, -0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0xfa,0x00,0x00,0x00, -0x22,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0xfa,0x00,0x00,0x00,0x21,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x61,0x01,0x00,0x00,0x01,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x62,0x01,0x00,0x00, -0x0b,0x00,0x00,0x00,0x19,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x8a,0x01,0x00,0x00,0x06,0x00,0x00,0x00,0x10,0x00,0x00,0x00, -0x48,0x00,0x04,0x00,0x8b,0x01,0x00,0x00,0x00,0x00,0x00,0x00, -0x05,0x00,0x00,0x00,0x48,0x00,0x04,0x00,0x8b,0x01,0x00,0x00, -0x00,0x00,0x00,0x00,0x18,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0x8b,0x01,0x00,0x00,0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x8b,0x01,0x00,0x00, -0x00,0x00,0x00,0x00,0x07,0x00,0x00,0x00,0x08,0x00,0x00,0x00, -0x47,0x00,0x03,0x00,0x8b,0x01,0x00,0x00,0x02,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x8d,0x01,0x00,0x00,0x22,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x8d,0x01,0x00,0x00, +0x47,0x00,0x03,0x00,0x4f,0x01,0x00,0x00,0x02,0x00,0x00,0x00, +0x47,0x00,0x04,0x00,0x51,0x01,0x00,0x00,0x22,0x00,0x00,0x00, +0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x51,0x01,0x00,0x00, 0x21,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0xbb,0x02,0x00,0x00,0x0b,0x00,0x00,0x00,0x18,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x01,0x03,0x00,0x00,0x06,0x00,0x00,0x00, -0x04,0x00,0x00,0x00,0x48,0x00,0x04,0x00,0x02,0x03,0x00,0x00, +0x39,0x02,0x00,0x00,0x0b,0x00,0x00,0x00,0x18,0x00,0x00,0x00, +0x47,0x00,0x04,0x00,0x7f,0x02,0x00,0x00,0x06,0x00,0x00,0x00, +0x04,0x00,0x00,0x00,0x48,0x00,0x04,0x00,0x80,0x02,0x00,0x00, 0x00,0x00,0x00,0x00,0x19,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0x02,0x03,0x00,0x00,0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x47,0x00,0x03,0x00,0x02,0x03,0x00,0x00, -0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x04,0x03,0x00,0x00, +0x80,0x02,0x00,0x00,0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00, +0x00,0x00,0x00,0x00,0x47,0x00,0x03,0x00,0x80,0x02,0x00,0x00, +0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x82,0x02,0x00,0x00, 0x22,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x04,0x03,0x00,0x00,0x21,0x00,0x00,0x00,0x02,0x00,0x00,0x00, +0x82,0x02,0x00,0x00,0x21,0x00,0x00,0x00,0x02,0x00,0x00,0x00, 0x13,0x00,0x02,0x00,0x02,0x00,0x00,0x00,0x21,0x00,0x03,0x00, 0x03,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x15,0x00,0x04,0x00, 0x06,0x00,0x00,0x00,0x20,0x00,0x00,0x00,0x00,0x00,0x00,0x00, @@ -12680,3939 +9452,153 @@ unsigned char matmul_f16_aligned_m_data[] = { 0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x67,0x00,0x00,0x00, 0x86,0x00,0x00,0x00,0x61,0x00,0x00,0x00,0x62,0x00,0x00,0x00, 0x32,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x6c,0x00,0x00,0x00, -0x10,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x6d,0x00,0x00,0x00,0x08,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0x6e,0x00,0x00,0x00,0x86,0x00,0x00,0x00, -0x6c,0x00,0x00,0x00,0x6d,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0x73,0x00,0x00,0x00,0x86,0x00,0x00,0x00, -0x6c,0x00,0x00,0x00,0x6d,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x13,0x00,0x00,0x00,0x77,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00,0x7c,0x00,0x00,0x00, -0x02,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00, -0x87,0x00,0x00,0x00,0x0b,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x13,0x00,0x00,0x00,0x8d,0x00,0x00,0x00,0x03,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00,0x98,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x32,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x9d,0x00,0x00,0x00,0x40,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x13,0x00,0x00,0x00,0x9f,0x00,0x00,0x00,0x04,0x00,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0xae,0x00,0x00,0x00, -0x84,0x00,0x00,0x00,0x60,0x00,0x00,0x00,0x62,0x00,0x00,0x00, -0x32,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0xaf,0x00,0x00,0x00, -0x20,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, -0xb0,0x00,0x00,0x00,0x84,0x00,0x00,0x00,0x53,0x00,0x00,0x00, -0xaf,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, -0xb1,0x00,0x00,0x00,0x84,0x00,0x00,0x00,0x4f,0x00,0x00,0x00, -0x62,0x00,0x00,0x00,0x32,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0xb2,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0xb3,0x00,0x00,0x00,0x84,0x00,0x00,0x00, -0xb1,0x00,0x00,0x00,0xb2,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0xb4,0x00,0x00,0x00,0x84,0x00,0x00,0x00, -0xb3,0x00,0x00,0x00,0x60,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0xb5,0x00,0x00,0x00,0x86,0x00,0x00,0x00, -0xb0,0x00,0x00,0x00,0xb4,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0xb6,0x00,0x00,0x00,0x84,0x00,0x00,0x00, -0xae,0x00,0x00,0x00,0xb5,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0xb7,0x00,0x00,0x00,0x84,0x00,0x00,0x00, -0xb6,0x00,0x00,0x00,0xb2,0x00,0x00,0x00,0x14,0x00,0x02,0x00, -0xb8,0x00,0x00,0x00,0x16,0x00,0x03,0x00,0xba,0x00,0x00,0x00, -0x20,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, -0xbb,0x00,0x00,0x00,0x84,0x00,0x00,0x00,0x60,0x00,0x00,0x00, -0x62,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, -0xbc,0x00,0x00,0x00,0x84,0x00,0x00,0x00,0xbb,0x00,0x00,0x00, -0xb5,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, -0xbd,0x00,0x00,0x00,0x84,0x00,0x00,0x00,0xbc,0x00,0x00,0x00, -0xb2,0x00,0x00,0x00,0x1c,0x00,0x04,0x00,0xbe,0x00,0x00,0x00, -0xba,0x00,0x00,0x00,0xbd,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0xbf,0x00,0x00,0x00,0x07,0x00,0x00,0x00,0xbe,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0xba,0x00,0x00,0x00,0xc2,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0xc3,0x00,0x00,0x00, -0x07,0x00,0x00,0x00,0xba,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x13,0x00,0x00,0x00,0xc6,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x16,0x00,0x03,0x00,0xe6,0x00,0x00,0x00,0x10,0x00,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0xe7,0x00,0x00,0x00, -0x80,0x00,0x00,0x00,0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0xe8,0x00,0x00,0x00, -0x84,0x00,0x00,0x00,0x37,0x00,0x00,0x00,0xe7,0x00,0x00,0x00, -0x1c,0x00,0x04,0x00,0xe9,0x00,0x00,0x00,0xe6,0x00,0x00,0x00, -0xe8,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0xea,0x00,0x00,0x00, -0x04,0x00,0x00,0x00,0xe9,0x00,0x00,0x00,0x3b,0x00,0x04,0x00, -0xea,0x00,0x00,0x00,0xeb,0x00,0x00,0x00,0x04,0x00,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0xef,0x00,0x00,0x00, -0x80,0x00,0x00,0x00,0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00, -0x17,0x00,0x04,0x00,0xf5,0x00,0x00,0x00,0xe6,0x00,0x00,0x00, -0x04,0x00,0x00,0x00,0x18,0x00,0x04,0x00,0xf6,0x00,0x00,0x00, -0xf5,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x1d,0x00,0x03,0x00, -0xf7,0x00,0x00,0x00,0xf6,0x00,0x00,0x00,0x1e,0x00,0x03,0x00, -0xf8,0x00,0x00,0x00,0xf7,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0xf9,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0xf8,0x00,0x00,0x00, -0x3b,0x00,0x04,0x00,0xf9,0x00,0x00,0x00,0xfa,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0xfc,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0xe6,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0xff,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0xe6,0x00,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x04,0x01,0x00,0x00, -0x80,0x00,0x00,0x00,0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x11,0x01,0x00,0x00, -0x80,0x00,0x00,0x00,0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x1e,0x01,0x00,0x00, -0x80,0x00,0x00,0x00,0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x23,0x01,0x00,0x00, -0x03,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, -0x2c,0x01,0x00,0x00,0x80,0x00,0x00,0x00,0x6c,0x00,0x00,0x00, -0x39,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x31,0x01,0x00,0x00,0x04,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0x3a,0x01,0x00,0x00,0x80,0x00,0x00,0x00, -0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x3f,0x01,0x00,0x00,0x05,0x00,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x48,0x01,0x00,0x00, -0x80,0x00,0x00,0x00,0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x4d,0x01,0x00,0x00, -0x06,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, -0x56,0x01,0x00,0x00,0x80,0x00,0x00,0x00,0x6c,0x00,0x00,0x00, -0x39,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x5b,0x01,0x00,0x00,0x07,0x00,0x00,0x00,0x32,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x61,0x01,0x00,0x00,0x01,0x00,0x00,0x00, -0x33,0x00,0x06,0x00,0x09,0x00,0x00,0x00,0x62,0x01,0x00,0x00, -0x61,0x01,0x00,0x00,0x39,0x00,0x00,0x00,0x39,0x00,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x63,0x01,0x00,0x00, -0x51,0x00,0x00,0x00,0x62,0x01,0x00,0x00,0x00,0x00,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x64,0x01,0x00,0x00, -0x84,0x00,0x00,0x00,0x63,0x01,0x00,0x00,0x6d,0x00,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x65,0x01,0x00,0x00, -0x86,0x00,0x00,0x00,0x64,0x01,0x00,0x00,0x6c,0x00,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x7c,0x01,0x00,0x00, -0x80,0x00,0x00,0x00,0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x7d,0x01,0x00,0x00, -0x84,0x00,0x00,0x00,0x9d,0x00,0x00,0x00,0x7c,0x01,0x00,0x00, -0x1c,0x00,0x04,0x00,0x7e,0x01,0x00,0x00,0xe6,0x00,0x00,0x00, -0x7d,0x01,0x00,0x00,0x20,0x00,0x04,0x00,0x7f,0x01,0x00,0x00, -0x04,0x00,0x00,0x00,0x7e,0x01,0x00,0x00,0x3b,0x00,0x04,0x00, -0x7f,0x01,0x00,0x00,0x80,0x01,0x00,0x00,0x04,0x00,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x84,0x01,0x00,0x00, -0x80,0x00,0x00,0x00,0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00, -0x1d,0x00,0x03,0x00,0x8a,0x01,0x00,0x00,0xf6,0x00,0x00,0x00, -0x1e,0x00,0x03,0x00,0x8b,0x01,0x00,0x00,0x8a,0x01,0x00,0x00, -0x20,0x00,0x04,0x00,0x8c,0x01,0x00,0x00,0x0c,0x00,0x00,0x00, -0x8b,0x01,0x00,0x00,0x3b,0x00,0x04,0x00,0x8c,0x01,0x00,0x00, -0x8d,0x01,0x00,0x00,0x0c,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0x95,0x01,0x00,0x00,0x80,0x00,0x00,0x00, -0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0xa2,0x01,0x00,0x00,0x80,0x00,0x00,0x00, -0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0xaf,0x01,0x00,0x00,0x80,0x00,0x00,0x00, -0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0xbc,0x01,0x00,0x00,0x80,0x00,0x00,0x00, -0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0xc9,0x01,0x00,0x00,0x80,0x00,0x00,0x00, -0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0xd6,0x01,0x00,0x00,0x80,0x00,0x00,0x00, -0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0xe3,0x01,0x00,0x00,0x80,0x00,0x00,0x00, -0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0xef,0x01,0x00,0x00,0x08,0x01,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0xf0,0x01,0x00,0x00, -0x86,0x00,0x00,0x00,0x6c,0x00,0x00,0x00,0x6d,0x00,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0xf3,0x01,0x00,0x00, -0x86,0x00,0x00,0x00,0x6c,0x00,0x00,0x00,0x6d,0x00,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x0e,0x02,0x00,0x00, -0x84,0x00,0x00,0x00,0x60,0x00,0x00,0x00,0x62,0x00,0x00,0x00, -0x1c,0x00,0x04,0x00,0x0f,0x02,0x00,0x00,0xe6,0x00,0x00,0x00, -0x0e,0x02,0x00,0x00,0x20,0x00,0x04,0x00,0x10,0x02,0x00,0x00, -0x07,0x00,0x00,0x00,0x0f,0x02,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0x20,0x02,0x00,0x00,0x80,0x00,0x00,0x00, -0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0x26,0x02,0x00,0x00,0x07,0x00,0x00,0x00,0xe6,0x00,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x3c,0x02,0x00,0x00, -0x84,0x00,0x00,0x00,0xb5,0x00,0x00,0x00,0xb2,0x00,0x00,0x00, -0x1c,0x00,0x04,0x00,0x3d,0x02,0x00,0x00,0xe6,0x00,0x00,0x00, -0x3c,0x02,0x00,0x00,0x20,0x00,0x04,0x00,0x3e,0x02,0x00,0x00, -0x07,0x00,0x00,0x00,0x3d,0x02,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0x47,0x02,0x00,0x00,0x86,0x00,0x00,0x00, -0xaf,0x00,0x00,0x00,0xb5,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0x4f,0x02,0x00,0x00,0x80,0x00,0x00,0x00, -0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0x7e,0x02,0x00,0x00,0x84,0x00,0x00,0x00, -0x60,0x00,0x00,0x00,0x62,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x13,0x00,0x00,0x00,0xb3,0x02,0x00,0x00,0x0d,0x00,0x00,0x00, -0x3b,0x00,0x04,0x00,0x0a,0x00,0x00,0x00,0xbb,0x02,0x00,0x00, -0x01,0x00,0x00,0x00,0x1d,0x00,0x03,0x00,0x01,0x03,0x00,0x00, -0xba,0x00,0x00,0x00,0x1e,0x00,0x03,0x00,0x02,0x03,0x00,0x00, -0x01,0x03,0x00,0x00,0x20,0x00,0x04,0x00,0x03,0x03,0x00,0x00, -0x0c,0x00,0x00,0x00,0x02,0x03,0x00,0x00,0x3b,0x00,0x04,0x00, -0x03,0x03,0x00,0x00,0x04,0x03,0x00,0x00,0x0c,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00,0x09,0x03,0x00,0x00, -0x05,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, -0x16,0x03,0x00,0x00,0x84,0x00,0x00,0x00,0x60,0x00,0x00,0x00, -0x62,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x1f,0x03,0x00,0x00, -0x0c,0x00,0x00,0x00,0xba,0x00,0x00,0x00,0x36,0x00,0x05,0x00, -0x02,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x03,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0x05,0x00,0x00,0x00, -0x3b,0x00,0x04,0x00,0xbf,0x00,0x00,0x00,0xc0,0x00,0x00,0x00, -0x07,0x00,0x00,0x00,0x3b,0x00,0x04,0x00,0x10,0x02,0x00,0x00, -0x11,0x02,0x00,0x00,0x07,0x00,0x00,0x00,0x3b,0x00,0x04,0x00, -0x3e,0x02,0x00,0x00,0x3f,0x02,0x00,0x00,0x07,0x00,0x00,0x00, -0x41,0x00,0x05,0x00,0x0d,0x00,0x00,0x00,0x0e,0x00,0x00,0x00, -0x0b,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x0f,0x00,0x00,0x00,0x0e,0x00,0x00,0x00, -0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00,0x16,0x00,0x00,0x00, -0x12,0x00,0x00,0x00,0x14,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x17,0x00,0x00,0x00,0x16,0x00,0x00,0x00, -0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x18,0x00,0x00,0x00, -0x0f,0x00,0x00,0x00,0x17,0x00,0x00,0x00,0x89,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x1e,0x00,0x00,0x00,0x0f,0x00,0x00,0x00, -0x17,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00, -0x22,0x00,0x00,0x00,0x12,0x00,0x00,0x00,0x21,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x22,0x00,0x00,0x00,0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x24,0x00,0x00,0x00,0x18,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00,0x28,0x00,0x00,0x00, -0x12,0x00,0x00,0x00,0x27,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x29,0x00,0x00,0x00,0x28,0x00,0x00,0x00, -0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x2a,0x00,0x00,0x00, -0x1e,0x00,0x00,0x00,0x29,0x00,0x00,0x00,0x41,0x00,0x05,0x00, -0x15,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0x12,0x00,0x00,0x00, -0x2d,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x2f,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x30,0x00,0x00,0x00,0x24,0x00,0x00,0x00, -0x2f,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x32,0x00,0x00,0x00,0x30,0x00,0x00,0x00,0x2a,0x00,0x00,0x00, -0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00,0x35,0x00,0x00,0x00, -0x12,0x00,0x00,0x00,0x34,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x36,0x00,0x00,0x00,0x35,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x38,0x00,0x00,0x00, -0x36,0x00,0x00,0x00,0x37,0x00,0x00,0x00,0x82,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x3a,0x00,0x00,0x00,0x38,0x00,0x00,0x00, -0x39,0x00,0x00,0x00,0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x3b,0x00,0x00,0x00,0x3a,0x00,0x00,0x00,0x37,0x00,0x00,0x00, -0x41,0x00,0x05,0x00,0x0d,0x00,0x00,0x00,0x3f,0x00,0x00,0x00, -0x3d,0x00,0x00,0x00,0x3e,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x40,0x00,0x00,0x00,0x3f,0x00,0x00,0x00, -0x89,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x42,0x00,0x00,0x00, -0x40,0x00,0x00,0x00,0x3b,0x00,0x00,0x00,0x86,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x47,0x00,0x00,0x00,0x40,0x00,0x00,0x00, -0x3b,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x0d,0x00,0x00,0x00, -0x49,0x00,0x00,0x00,0x3d,0x00,0x00,0x00,0x39,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x4a,0x00,0x00,0x00, -0x49,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x0d,0x00,0x00,0x00, -0x4d,0x00,0x00,0x00,0x4c,0x00,0x00,0x00,0x3e,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x4e,0x00,0x00,0x00, -0x4d,0x00,0x00,0x00,0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x50,0x00,0x00,0x00,0x4e,0x00,0x00,0x00,0x4f,0x00,0x00,0x00, -0x89,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x55,0x00,0x00,0x00, -0x50,0x00,0x00,0x00,0x54,0x00,0x00,0x00,0x86,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x59,0x00,0x00,0x00,0x50,0x00,0x00,0x00, -0x58,0x00,0x00,0x00,0x89,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x5d,0x00,0x00,0x00,0x4e,0x00,0x00,0x00,0x4f,0x00,0x00,0x00, -0x89,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x64,0x00,0x00,0x00, -0x5d,0x00,0x00,0x00,0x63,0x00,0x00,0x00,0x86,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x68,0x00,0x00,0x00,0x5d,0x00,0x00,0x00, -0x67,0x00,0x00,0x00,0x89,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x6f,0x00,0x00,0x00,0x4e,0x00,0x00,0x00,0x6e,0x00,0x00,0x00, -0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x74,0x00,0x00,0x00, -0x4e,0x00,0x00,0x00,0x73,0x00,0x00,0x00,0x41,0x00,0x05,0x00, -0x15,0x00,0x00,0x00,0x78,0x00,0x00,0x00,0x12,0x00,0x00,0x00, -0x77,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x79,0x00,0x00,0x00,0x78,0x00,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x7a,0x00,0x00,0x00,0x47,0x00,0x00,0x00, -0x79,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00, -0x7d,0x00,0x00,0x00,0x12,0x00,0x00,0x00,0x7c,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x7e,0x00,0x00,0x00, -0x7d,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x80,0x00,0x00,0x00,0x47,0x00,0x00,0x00,0x39,0x00,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x83,0x00,0x00,0x00, -0x80,0x00,0x00,0x00,0x79,0x00,0x00,0x00,0x0c,0x00,0x07,0x00, -0x06,0x00,0x00,0x00,0x84,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x26,0x00,0x00,0x00,0x7e,0x00,0x00,0x00,0x83,0x00,0x00,0x00, -0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00,0x88,0x00,0x00,0x00, -0x12,0x00,0x00,0x00,0x87,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x89,0x00,0x00,0x00,0x88,0x00,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x8a,0x00,0x00,0x00, -0x32,0x00,0x00,0x00,0x89,0x00,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x8c,0x00,0x00,0x00,0x42,0x00,0x00,0x00, -0x37,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00, -0x8e,0x00,0x00,0x00,0x12,0x00,0x00,0x00,0x8d,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x8f,0x00,0x00,0x00, -0x8e,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x90,0x00,0x00,0x00,0x8c,0x00,0x00,0x00,0x8f,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x91,0x00,0x00,0x00, -0x8a,0x00,0x00,0x00,0x90,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x93,0x00,0x00,0x00,0x91,0x00,0x00,0x00, -0x7a,0x00,0x00,0x00,0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x94,0x00,0x00,0x00,0x93,0x00,0x00,0x00,0x6d,0x00,0x00,0x00, -0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00,0x99,0x00,0x00,0x00, -0x12,0x00,0x00,0x00,0x98,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x9a,0x00,0x00,0x00,0x99,0x00,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x9b,0x00,0x00,0x00, -0x0f,0x00,0x00,0x00,0x9a,0x00,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x9e,0x00,0x00,0x00,0x4a,0x00,0x00,0x00, -0x9d,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00, -0xa0,0x00,0x00,0x00,0x12,0x00,0x00,0x00,0x9f,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0xa1,0x00,0x00,0x00, -0xa0,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xa2,0x00,0x00,0x00,0x9e,0x00,0x00,0x00,0xa1,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xa3,0x00,0x00,0x00, -0x9b,0x00,0x00,0x00,0xa2,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xa5,0x00,0x00,0x00,0xa3,0x00,0x00,0x00, -0x7a,0x00,0x00,0x00,0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xa6,0x00,0x00,0x00,0xa5,0x00,0x00,0x00,0x6d,0x00,0x00,0x00, -0xf9,0x00,0x02,0x00,0xa8,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, -0xa8,0x00,0x00,0x00,0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00, -0x29,0x03,0x00,0x00,0x3e,0x00,0x00,0x00,0x05,0x00,0x00,0x00, -0xc7,0x00,0x00,0x00,0xa9,0x00,0x00,0x00,0xb0,0x00,0x05,0x00, -0xb8,0x00,0x00,0x00,0xb9,0x00,0x00,0x00,0x29,0x03,0x00,0x00, -0xb7,0x00,0x00,0x00,0xf6,0x00,0x04,0x00,0xaa,0x00,0x00,0x00, -0xa9,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, -0xb9,0x00,0x00,0x00,0xa9,0x00,0x00,0x00,0xaa,0x00,0x00,0x00, -0xf8,0x00,0x02,0x00,0xa9,0x00,0x00,0x00,0x41,0x00,0x05,0x00, -0xc3,0x00,0x00,0x00,0xc4,0x00,0x00,0x00,0xc0,0x00,0x00,0x00, -0x29,0x03,0x00,0x00,0x3e,0x00,0x03,0x00,0xc4,0x00,0x00,0x00, -0xc2,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xc7,0x00,0x00,0x00,0x29,0x03,0x00,0x00,0xc6,0x00,0x00,0x00, -0xf9,0x00,0x02,0x00,0xa8,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, -0xaa,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0xca,0x00,0x00,0x00, -0xf8,0x00,0x02,0x00,0xca,0x00,0x00,0x00,0xf5,0x00,0x07,0x00, -0x06,0x00,0x00,0x00,0x42,0x03,0x00,0x00,0xa6,0x00,0x00,0x00, -0xaa,0x00,0x00,0x00,0xf5,0x01,0x00,0x00,0xcd,0x00,0x00,0x00, -0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0x3e,0x03,0x00,0x00, -0x94,0x00,0x00,0x00,0xaa,0x00,0x00,0x00,0xf2,0x01,0x00,0x00, -0xcd,0x00,0x00,0x00,0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00, -0x2a,0x03,0x00,0x00,0x7a,0x00,0x00,0x00,0xaa,0x00,0x00,0x00, -0xa3,0x02,0x00,0x00,0xcd,0x00,0x00,0x00,0xb0,0x00,0x05,0x00, -0xb8,0x00,0x00,0x00,0xd1,0x00,0x00,0x00,0x2a,0x03,0x00,0x00, -0x84,0x00,0x00,0x00,0xf6,0x00,0x04,0x00,0xcc,0x00,0x00,0x00, -0xcd,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, -0xd1,0x00,0x00,0x00,0xcb,0x00,0x00,0x00,0xcc,0x00,0x00,0x00, -0xf8,0x00,0x02,0x00,0xcb,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, -0xd3,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0xd3,0x00,0x00,0x00, -0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0x3a,0x03,0x00,0x00, -0x3e,0x00,0x00,0x00,0xcb,0x00,0x00,0x00,0x67,0x01,0x00,0x00, -0xd4,0x00,0x00,0x00,0xb0,0x00,0x05,0x00,0xb8,0x00,0x00,0x00, -0xd9,0x00,0x00,0x00,0x3a,0x03,0x00,0x00,0x37,0x00,0x00,0x00, -0xf6,0x00,0x04,0x00,0xd5,0x00,0x00,0x00,0xd4,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0xd9,0x00,0x00,0x00, -0xd4,0x00,0x00,0x00,0xd5,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, -0xd4,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xde,0x00,0x00,0x00,0x74,0x00,0x00,0x00,0x3a,0x03,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xe1,0x00,0x00,0x00, -0xde,0x00,0x00,0x00,0x8f,0x00,0x00,0x00,0x86,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xe2,0x00,0x00,0x00,0xe1,0x00,0x00,0x00, -0x6d,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xe3,0x00,0x00,0x00,0x3e,0x03,0x00,0x00,0xe2,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xe5,0x00,0x00,0x00, -0xe3,0x00,0x00,0x00,0x6f,0x00,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xf0,0x00,0x00,0x00,0xde,0x00,0x00,0x00, -0xef,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xf2,0x00,0x00,0x00,0x6f,0x00,0x00,0x00,0x6d,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xf3,0x00,0x00,0x00, -0xf0,0x00,0x00,0x00,0xf2,0x00,0x00,0x00,0x41,0x00,0x08,0x00, -0xfc,0x00,0x00,0x00,0xfd,0x00,0x00,0x00,0xfa,0x00,0x00,0x00, -0x34,0x00,0x00,0x00,0xe5,0x00,0x00,0x00,0x34,0x00,0x00,0x00, -0x3e,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0xe6,0x00,0x00,0x00, -0xfe,0x00,0x00,0x00,0xfd,0x00,0x00,0x00,0x41,0x00,0x05,0x00, -0xff,0x00,0x00,0x00,0x00,0x01,0x00,0x00,0xeb,0x00,0x00,0x00, -0xf3,0x00,0x00,0x00,0x3e,0x00,0x03,0x00,0x00,0x01,0x00,0x00, -0xfe,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x05,0x01,0x00,0x00,0xde,0x00,0x00,0x00,0x04,0x01,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x08,0x01,0x00,0x00, -0x05,0x01,0x00,0x00,0xf2,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x09,0x01,0x00,0x00,0x08,0x01,0x00,0x00, -0x39,0x00,0x00,0x00,0x41,0x00,0x08,0x00,0xfc,0x00,0x00,0x00, -0x0b,0x01,0x00,0x00,0xfa,0x00,0x00,0x00,0x34,0x00,0x00,0x00, -0xe5,0x00,0x00,0x00,0x34,0x00,0x00,0x00,0x39,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0xe6,0x00,0x00,0x00,0x0c,0x01,0x00,0x00, -0x0b,0x01,0x00,0x00,0x41,0x00,0x05,0x00,0xff,0x00,0x00,0x00, -0x0d,0x01,0x00,0x00,0xeb,0x00,0x00,0x00,0x09,0x01,0x00,0x00, -0x3e,0x00,0x03,0x00,0x0d,0x01,0x00,0x00,0x0c,0x01,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x12,0x01,0x00,0x00, -0xde,0x00,0x00,0x00,0x11,0x01,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x15,0x01,0x00,0x00,0x12,0x01,0x00,0x00, -0xf2,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x16,0x01,0x00,0x00,0x15,0x01,0x00,0x00,0x0c,0x00,0x00,0x00, -0x41,0x00,0x08,0x00,0xfc,0x00,0x00,0x00,0x18,0x01,0x00,0x00, -0xfa,0x00,0x00,0x00,0x34,0x00,0x00,0x00,0xe5,0x00,0x00,0x00, -0x34,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0xe6,0x00,0x00,0x00,0x19,0x01,0x00,0x00,0x18,0x01,0x00,0x00, -0x41,0x00,0x05,0x00,0xff,0x00,0x00,0x00,0x1a,0x01,0x00,0x00, -0xeb,0x00,0x00,0x00,0x16,0x01,0x00,0x00,0x3e,0x00,0x03,0x00, -0x1a,0x01,0x00,0x00,0x19,0x01,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x1f,0x01,0x00,0x00,0xde,0x00,0x00,0x00, -0x1e,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x22,0x01,0x00,0x00,0x1f,0x01,0x00,0x00,0xf2,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x24,0x01,0x00,0x00, -0x22,0x01,0x00,0x00,0x23,0x01,0x00,0x00,0x41,0x00,0x08,0x00, -0xfc,0x00,0x00,0x00,0x26,0x01,0x00,0x00,0xfa,0x00,0x00,0x00, -0x34,0x00,0x00,0x00,0xe5,0x00,0x00,0x00,0x34,0x00,0x00,0x00, -0x23,0x01,0x00,0x00,0x3d,0x00,0x04,0x00,0xe6,0x00,0x00,0x00, -0x27,0x01,0x00,0x00,0x26,0x01,0x00,0x00,0x41,0x00,0x05,0x00, -0xff,0x00,0x00,0x00,0x28,0x01,0x00,0x00,0xeb,0x00,0x00,0x00, -0x24,0x01,0x00,0x00,0x3e,0x00,0x03,0x00,0x28,0x01,0x00,0x00, -0x27,0x01,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x2d,0x01,0x00,0x00,0xde,0x00,0x00,0x00,0x2c,0x01,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x30,0x01,0x00,0x00, -0x2d,0x01,0x00,0x00,0xf2,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x32,0x01,0x00,0x00,0x30,0x01,0x00,0x00, -0x31,0x01,0x00,0x00,0x41,0x00,0x08,0x00,0xfc,0x00,0x00,0x00, -0x34,0x01,0x00,0x00,0xfa,0x00,0x00,0x00,0x34,0x00,0x00,0x00, -0xe5,0x00,0x00,0x00,0xc6,0x00,0x00,0x00,0x3e,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0xe6,0x00,0x00,0x00,0x35,0x01,0x00,0x00, -0x34,0x01,0x00,0x00,0x41,0x00,0x05,0x00,0xff,0x00,0x00,0x00, -0x36,0x01,0x00,0x00,0xeb,0x00,0x00,0x00,0x32,0x01,0x00,0x00, -0x3e,0x00,0x03,0x00,0x36,0x01,0x00,0x00,0x35,0x01,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x3b,0x01,0x00,0x00, -0xde,0x00,0x00,0x00,0x3a,0x01,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x3e,0x01,0x00,0x00,0x3b,0x01,0x00,0x00, -0xf2,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x40,0x01,0x00,0x00,0x3e,0x01,0x00,0x00,0x3f,0x01,0x00,0x00, -0x41,0x00,0x08,0x00,0xfc,0x00,0x00,0x00,0x42,0x01,0x00,0x00, -0xfa,0x00,0x00,0x00,0x34,0x00,0x00,0x00,0xe5,0x00,0x00,0x00, -0xc6,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0xe6,0x00,0x00,0x00,0x43,0x01,0x00,0x00,0x42,0x01,0x00,0x00, -0x41,0x00,0x05,0x00,0xff,0x00,0x00,0x00,0x44,0x01,0x00,0x00, -0xeb,0x00,0x00,0x00,0x40,0x01,0x00,0x00,0x3e,0x00,0x03,0x00, -0x44,0x01,0x00,0x00,0x43,0x01,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x49,0x01,0x00,0x00,0xde,0x00,0x00,0x00, -0x48,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x4c,0x01,0x00,0x00,0x49,0x01,0x00,0x00,0xf2,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x4e,0x01,0x00,0x00, -0x4c,0x01,0x00,0x00,0x4d,0x01,0x00,0x00,0x41,0x00,0x08,0x00, -0xfc,0x00,0x00,0x00,0x50,0x01,0x00,0x00,0xfa,0x00,0x00,0x00, -0x34,0x00,0x00,0x00,0xe5,0x00,0x00,0x00,0xc6,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0xe6,0x00,0x00,0x00, -0x51,0x01,0x00,0x00,0x50,0x01,0x00,0x00,0x41,0x00,0x05,0x00, -0xff,0x00,0x00,0x00,0x52,0x01,0x00,0x00,0xeb,0x00,0x00,0x00, -0x4e,0x01,0x00,0x00,0x3e,0x00,0x03,0x00,0x52,0x01,0x00,0x00, -0x51,0x01,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x57,0x01,0x00,0x00,0xde,0x00,0x00,0x00,0x56,0x01,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x5a,0x01,0x00,0x00, -0x57,0x01,0x00,0x00,0xf2,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x5c,0x01,0x00,0x00,0x5a,0x01,0x00,0x00, -0x5b,0x01,0x00,0x00,0x41,0x00,0x08,0x00,0xfc,0x00,0x00,0x00, -0x5e,0x01,0x00,0x00,0xfa,0x00,0x00,0x00,0x34,0x00,0x00,0x00, -0xe5,0x00,0x00,0x00,0xc6,0x00,0x00,0x00,0x23,0x01,0x00,0x00, -0x3d,0x00,0x04,0x00,0xe6,0x00,0x00,0x00,0x5f,0x01,0x00,0x00, -0x5e,0x01,0x00,0x00,0x41,0x00,0x05,0x00,0xff,0x00,0x00,0x00, -0x60,0x01,0x00,0x00,0xeb,0x00,0x00,0x00,0x5c,0x01,0x00,0x00, -0x3e,0x00,0x03,0x00,0x60,0x01,0x00,0x00,0x5f,0x01,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x67,0x01,0x00,0x00, -0x3a,0x03,0x00,0x00,0x65,0x01,0x00,0x00,0xf9,0x00,0x02,0x00, -0xd3,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0xd5,0x00,0x00,0x00, -0xf9,0x00,0x02,0x00,0x69,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, -0x69,0x01,0x00,0x00,0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00, -0x3b,0x03,0x00,0x00,0x3e,0x00,0x00,0x00,0xd5,0x00,0x00,0x00, -0xee,0x01,0x00,0x00,0x6a,0x01,0x00,0x00,0xb0,0x00,0x05,0x00, -0xb8,0x00,0x00,0x00,0x6f,0x01,0x00,0x00,0x3b,0x03,0x00,0x00, -0x9d,0x00,0x00,0x00,0xf6,0x00,0x04,0x00,0x6b,0x01,0x00,0x00, -0x6a,0x01,0x00,0x00,0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, -0x6f,0x01,0x00,0x00,0x6a,0x01,0x00,0x00,0x6b,0x01,0x00,0x00, -0xf8,0x00,0x02,0x00,0x6a,0x01,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x74,0x01,0x00,0x00,0x74,0x00,0x00,0x00, -0x3b,0x03,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x77,0x01,0x00,0x00,0x74,0x01,0x00,0x00,0xa1,0x00,0x00,0x00, -0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x78,0x01,0x00,0x00, -0x77,0x01,0x00,0x00,0x6d,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x79,0x01,0x00,0x00,0x42,0x03,0x00,0x00, -0x78,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x7b,0x01,0x00,0x00,0x79,0x01,0x00,0x00,0x6f,0x00,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x85,0x01,0x00,0x00, -0x74,0x01,0x00,0x00,0x84,0x01,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x87,0x01,0x00,0x00,0x6f,0x00,0x00,0x00, -0x6d,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x88,0x01,0x00,0x00,0x85,0x01,0x00,0x00,0x87,0x01,0x00,0x00, -0x41,0x00,0x08,0x00,0xfc,0x00,0x00,0x00,0x8f,0x01,0x00,0x00, -0x8d,0x01,0x00,0x00,0x34,0x00,0x00,0x00,0x7b,0x01,0x00,0x00, -0x34,0x00,0x00,0x00,0x3e,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0xe6,0x00,0x00,0x00,0x90,0x01,0x00,0x00,0x8f,0x01,0x00,0x00, -0x41,0x00,0x05,0x00,0xff,0x00,0x00,0x00,0x91,0x01,0x00,0x00, -0x80,0x01,0x00,0x00,0x88,0x01,0x00,0x00,0x3e,0x00,0x03,0x00, -0x91,0x01,0x00,0x00,0x90,0x01,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x96,0x01,0x00,0x00,0x74,0x01,0x00,0x00, -0x95,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x99,0x01,0x00,0x00,0x96,0x01,0x00,0x00,0x87,0x01,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x9a,0x01,0x00,0x00, -0x99,0x01,0x00,0x00,0x39,0x00,0x00,0x00,0x41,0x00,0x08,0x00, -0xfc,0x00,0x00,0x00,0x9c,0x01,0x00,0x00,0x8d,0x01,0x00,0x00, -0x34,0x00,0x00,0x00,0x7b,0x01,0x00,0x00,0x34,0x00,0x00,0x00, -0x39,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0xe6,0x00,0x00,0x00, -0x9d,0x01,0x00,0x00,0x9c,0x01,0x00,0x00,0x41,0x00,0x05,0x00, -0xff,0x00,0x00,0x00,0x9e,0x01,0x00,0x00,0x80,0x01,0x00,0x00, -0x9a,0x01,0x00,0x00,0x3e,0x00,0x03,0x00,0x9e,0x01,0x00,0x00, -0x9d,0x01,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xa3,0x01,0x00,0x00,0x74,0x01,0x00,0x00,0xa2,0x01,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xa6,0x01,0x00,0x00, -0xa3,0x01,0x00,0x00,0x87,0x01,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xa7,0x01,0x00,0x00,0xa6,0x01,0x00,0x00, -0x0c,0x00,0x00,0x00,0x41,0x00,0x08,0x00,0xfc,0x00,0x00,0x00, -0xa9,0x01,0x00,0x00,0x8d,0x01,0x00,0x00,0x34,0x00,0x00,0x00, -0x7b,0x01,0x00,0x00,0x34,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0xe6,0x00,0x00,0x00,0xaa,0x01,0x00,0x00, -0xa9,0x01,0x00,0x00,0x41,0x00,0x05,0x00,0xff,0x00,0x00,0x00, -0xab,0x01,0x00,0x00,0x80,0x01,0x00,0x00,0xa7,0x01,0x00,0x00, -0x3e,0x00,0x03,0x00,0xab,0x01,0x00,0x00,0xaa,0x01,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xb0,0x01,0x00,0x00, -0x74,0x01,0x00,0x00,0xaf,0x01,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xb3,0x01,0x00,0x00,0xb0,0x01,0x00,0x00, -0x87,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xb4,0x01,0x00,0x00,0xb3,0x01,0x00,0x00,0x23,0x01,0x00,0x00, -0x41,0x00,0x08,0x00,0xfc,0x00,0x00,0x00,0xb6,0x01,0x00,0x00, -0x8d,0x01,0x00,0x00,0x34,0x00,0x00,0x00,0x7b,0x01,0x00,0x00, -0x34,0x00,0x00,0x00,0x23,0x01,0x00,0x00,0x3d,0x00,0x04,0x00, -0xe6,0x00,0x00,0x00,0xb7,0x01,0x00,0x00,0xb6,0x01,0x00,0x00, -0x41,0x00,0x05,0x00,0xff,0x00,0x00,0x00,0xb8,0x01,0x00,0x00, -0x80,0x01,0x00,0x00,0xb4,0x01,0x00,0x00,0x3e,0x00,0x03,0x00, -0xb8,0x01,0x00,0x00,0xb7,0x01,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xbd,0x01,0x00,0x00,0x74,0x01,0x00,0x00, -0xbc,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xc0,0x01,0x00,0x00,0xbd,0x01,0x00,0x00,0x87,0x01,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xc1,0x01,0x00,0x00, -0xc0,0x01,0x00,0x00,0x31,0x01,0x00,0x00,0x41,0x00,0x08,0x00, -0xfc,0x00,0x00,0x00,0xc3,0x01,0x00,0x00,0x8d,0x01,0x00,0x00, -0x34,0x00,0x00,0x00,0x7b,0x01,0x00,0x00,0xc6,0x00,0x00,0x00, -0x3e,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0xe6,0x00,0x00,0x00, -0xc4,0x01,0x00,0x00,0xc3,0x01,0x00,0x00,0x41,0x00,0x05,0x00, -0xff,0x00,0x00,0x00,0xc5,0x01,0x00,0x00,0x80,0x01,0x00,0x00, -0xc1,0x01,0x00,0x00,0x3e,0x00,0x03,0x00,0xc5,0x01,0x00,0x00, -0xc4,0x01,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xca,0x01,0x00,0x00,0x74,0x01,0x00,0x00,0xc9,0x01,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xcd,0x01,0x00,0x00, -0xca,0x01,0x00,0x00,0x87,0x01,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xce,0x01,0x00,0x00,0xcd,0x01,0x00,0x00, -0x3f,0x01,0x00,0x00,0x41,0x00,0x08,0x00,0xfc,0x00,0x00,0x00, -0xd0,0x01,0x00,0x00,0x8d,0x01,0x00,0x00,0x34,0x00,0x00,0x00, -0x7b,0x01,0x00,0x00,0xc6,0x00,0x00,0x00,0x39,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0xe6,0x00,0x00,0x00,0xd1,0x01,0x00,0x00, -0xd0,0x01,0x00,0x00,0x41,0x00,0x05,0x00,0xff,0x00,0x00,0x00, -0xd2,0x01,0x00,0x00,0x80,0x01,0x00,0x00,0xce,0x01,0x00,0x00, -0x3e,0x00,0x03,0x00,0xd2,0x01,0x00,0x00,0xd1,0x01,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xd7,0x01,0x00,0x00, -0x74,0x01,0x00,0x00,0xd6,0x01,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xda,0x01,0x00,0x00,0xd7,0x01,0x00,0x00, -0x87,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xdb,0x01,0x00,0x00,0xda,0x01,0x00,0x00,0x4d,0x01,0x00,0x00, -0x41,0x00,0x08,0x00,0xfc,0x00,0x00,0x00,0xdd,0x01,0x00,0x00, -0x8d,0x01,0x00,0x00,0x34,0x00,0x00,0x00,0x7b,0x01,0x00,0x00, -0xc6,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0xe6,0x00,0x00,0x00,0xde,0x01,0x00,0x00,0xdd,0x01,0x00,0x00, -0x41,0x00,0x05,0x00,0xff,0x00,0x00,0x00,0xdf,0x01,0x00,0x00, -0x80,0x01,0x00,0x00,0xdb,0x01,0x00,0x00,0x3e,0x00,0x03,0x00, -0xdf,0x01,0x00,0x00,0xde,0x01,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xe4,0x01,0x00,0x00,0x74,0x01,0x00,0x00, -0xe3,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xe7,0x01,0x00,0x00,0xe4,0x01,0x00,0x00,0x87,0x01,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xe8,0x01,0x00,0x00, -0xe7,0x01,0x00,0x00,0x5b,0x01,0x00,0x00,0x41,0x00,0x08,0x00, -0xfc,0x00,0x00,0x00,0xea,0x01,0x00,0x00,0x8d,0x01,0x00,0x00, -0x34,0x00,0x00,0x00,0x7b,0x01,0x00,0x00,0xc6,0x00,0x00,0x00, -0x23,0x01,0x00,0x00,0x3d,0x00,0x04,0x00,0xe6,0x00,0x00,0x00, -0xeb,0x01,0x00,0x00,0xea,0x01,0x00,0x00,0x41,0x00,0x05,0x00, -0xff,0x00,0x00,0x00,0xec,0x01,0x00,0x00,0x80,0x01,0x00,0x00, -0xe8,0x01,0x00,0x00,0x3e,0x00,0x03,0x00,0xec,0x01,0x00,0x00, -0xeb,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xee,0x01,0x00,0x00,0x3b,0x03,0x00,0x00,0x65,0x01,0x00,0x00, -0xf9,0x00,0x02,0x00,0x69,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, -0x6b,0x01,0x00,0x00,0xe0,0x00,0x04,0x00,0x0c,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0xef,0x01,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xf2,0x01,0x00,0x00,0x3e,0x03,0x00,0x00, -0xf0,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xf5,0x01,0x00,0x00,0x42,0x03,0x00,0x00,0xf3,0x01,0x00,0x00, -0xf9,0x00,0x02,0x00,0xf7,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, -0xf7,0x01,0x00,0x00,0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00, -0x44,0x03,0x00,0x00,0x3e,0x00,0x00,0x00,0x6b,0x01,0x00,0x00, -0xa1,0x02,0x00,0x00,0xfa,0x01,0x00,0x00,0xb0,0x00,0x05,0x00, -0xb8,0x00,0x00,0x00,0xfd,0x01,0x00,0x00,0x44,0x03,0x00,0x00, -0x6c,0x00,0x00,0x00,0xf6,0x00,0x04,0x00,0xf9,0x01,0x00,0x00, -0xfa,0x01,0x00,0x00,0x00,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, -0xfd,0x01,0x00,0x00,0xf8,0x01,0x00,0x00,0xf9,0x01,0x00,0x00, -0xf8,0x00,0x02,0x00,0xf8,0x01,0x00,0x00,0xf9,0x00,0x02,0x00, -0xff,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0xff,0x01,0x00,0x00, -0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0x48,0x03,0x00,0x00, -0x3e,0x00,0x00,0x00,0xf8,0x01,0x00,0x00,0x2b,0x02,0x00,0x00, -0x02,0x02,0x00,0x00,0xb0,0x00,0x05,0x00,0xb8,0x00,0x00,0x00, -0x05,0x02,0x00,0x00,0x48,0x03,0x00,0x00,0x60,0x00,0x00,0x00, -0xf6,0x00,0x04,0x00,0x01,0x02,0x00,0x00,0x02,0x02,0x00,0x00, -0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0x05,0x02,0x00,0x00, -0x00,0x02,0x00,0x00,0x01,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, -0x00,0x02,0x00,0x00,0xf9,0x00,0x02,0x00,0x07,0x02,0x00,0x00, -0xf8,0x00,0x02,0x00,0x07,0x02,0x00,0x00,0xf5,0x00,0x07,0x00, -0x06,0x00,0x00,0x00,0x5a,0x03,0x00,0x00,0x3e,0x00,0x00,0x00, -0x00,0x02,0x00,0x00,0x29,0x02,0x00,0x00,0x08,0x02,0x00,0x00, -0xb0,0x00,0x05,0x00,0xb8,0x00,0x00,0x00,0x0d,0x02,0x00,0x00, -0x5a,0x03,0x00,0x00,0x62,0x00,0x00,0x00,0xf6,0x00,0x04,0x00, -0x09,0x02,0x00,0x00,0x08,0x02,0x00,0x00,0x01,0x00,0x00,0x00, -0xfa,0x00,0x04,0x00,0x0d,0x02,0x00,0x00,0x08,0x02,0x00,0x00, -0x09,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x08,0x02,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x13,0x02,0x00,0x00, -0x48,0x03,0x00,0x00,0x62,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x15,0x02,0x00,0x00,0x13,0x02,0x00,0x00, -0x5a,0x03,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x17,0x02,0x00,0x00,0x55,0x00,0x00,0x00,0x53,0x00,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x19,0x02,0x00,0x00, -0x48,0x03,0x00,0x00,0x61,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x1a,0x02,0x00,0x00,0x17,0x02,0x00,0x00, -0x19,0x02,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x1c,0x02,0x00,0x00,0x64,0x00,0x00,0x00,0x62,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x1d,0x02,0x00,0x00, -0x1a,0x02,0x00,0x00,0x1c,0x02,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x1f,0x02,0x00,0x00,0x1d,0x02,0x00,0x00, -0x5a,0x03,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x21,0x02,0x00,0x00,0x1f,0x02,0x00,0x00,0x20,0x02,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x23,0x02,0x00,0x00, -0x21,0x02,0x00,0x00,0x44,0x03,0x00,0x00,0x41,0x00,0x05,0x00, -0xff,0x00,0x00,0x00,0x24,0x02,0x00,0x00,0xeb,0x00,0x00,0x00, -0x23,0x02,0x00,0x00,0x3d,0x00,0x04,0x00,0xe6,0x00,0x00,0x00, -0x25,0x02,0x00,0x00,0x24,0x02,0x00,0x00,0x41,0x00,0x05,0x00, -0x26,0x02,0x00,0x00,0x27,0x02,0x00,0x00,0x11,0x02,0x00,0x00, -0x15,0x02,0x00,0x00,0x3e,0x00,0x03,0x00,0x27,0x02,0x00,0x00, -0x25,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x29,0x02,0x00,0x00,0x5a,0x03,0x00,0x00,0xc6,0x00,0x00,0x00, -0xf9,0x00,0x02,0x00,0x07,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, -0x09,0x02,0x00,0x00,0xf9,0x00,0x02,0x00,0x02,0x02,0x00,0x00, -0xf8,0x00,0x02,0x00,0x02,0x02,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x2b,0x02,0x00,0x00,0x48,0x03,0x00,0x00, -0xc6,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0xff,0x01,0x00,0x00, -0xf8,0x00,0x02,0x00,0x01,0x02,0x00,0x00,0xf9,0x00,0x02,0x00, -0x2d,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x2d,0x02,0x00,0x00, -0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0x49,0x03,0x00,0x00, -0x3e,0x00,0x00,0x00,0x01,0x02,0x00,0x00,0x59,0x02,0x00,0x00, -0x30,0x02,0x00,0x00,0xb0,0x00,0x05,0x00,0xb8,0x00,0x00,0x00, -0x33,0x02,0x00,0x00,0x49,0x03,0x00,0x00,0xb5,0x00,0x00,0x00, -0xf6,0x00,0x04,0x00,0x2f,0x02,0x00,0x00,0x30,0x02,0x00,0x00, -0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0x33,0x02,0x00,0x00, -0x2e,0x02,0x00,0x00,0x2f,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, -0x2e,0x02,0x00,0x00,0xf9,0x00,0x02,0x00,0x35,0x02,0x00,0x00, -0xf8,0x00,0x02,0x00,0x35,0x02,0x00,0x00,0xf5,0x00,0x07,0x00, -0x06,0x00,0x00,0x00,0x57,0x03,0x00,0x00,0x3e,0x00,0x00,0x00, -0x2e,0x02,0x00,0x00,0x57,0x02,0x00,0x00,0x36,0x02,0x00,0x00, -0xb0,0x00,0x05,0x00,0xb8,0x00,0x00,0x00,0x3b,0x02,0x00,0x00, -0x57,0x03,0x00,0x00,0xb2,0x00,0x00,0x00,0xf6,0x00,0x04,0x00, -0x37,0x02,0x00,0x00,0x36,0x02,0x00,0x00,0x01,0x00,0x00,0x00, -0xfa,0x00,0x04,0x00,0x3b,0x02,0x00,0x00,0x36,0x02,0x00,0x00, -0x37,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x36,0x02,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x41,0x02,0x00,0x00, -0x49,0x03,0x00,0x00,0xb2,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x43,0x02,0x00,0x00,0x41,0x02,0x00,0x00, -0x57,0x03,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x45,0x02,0x00,0x00,0x59,0x00,0x00,0x00,0xaf,0x00,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x48,0x02,0x00,0x00, -0x49,0x03,0x00,0x00,0x47,0x02,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x49,0x02,0x00,0x00,0x45,0x02,0x00,0x00, -0x48,0x02,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x4b,0x02,0x00,0x00,0x68,0x00,0x00,0x00,0xb2,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x4c,0x02,0x00,0x00, -0x49,0x02,0x00,0x00,0x4b,0x02,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x4e,0x02,0x00,0x00,0x4c,0x02,0x00,0x00, -0x57,0x03,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x50,0x02,0x00,0x00,0x4e,0x02,0x00,0x00,0x4f,0x02,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x52,0x02,0x00,0x00, -0x50,0x02,0x00,0x00,0x44,0x03,0x00,0x00,0x41,0x00,0x05,0x00, -0xff,0x00,0x00,0x00,0x53,0x02,0x00,0x00,0x80,0x01,0x00,0x00, -0x52,0x02,0x00,0x00,0x3d,0x00,0x04,0x00,0xe6,0x00,0x00,0x00, -0x54,0x02,0x00,0x00,0x53,0x02,0x00,0x00,0x41,0x00,0x05,0x00, -0x26,0x02,0x00,0x00,0x55,0x02,0x00,0x00,0x3f,0x02,0x00,0x00, -0x43,0x02,0x00,0x00,0x3e,0x00,0x03,0x00,0x55,0x02,0x00,0x00, -0x54,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x57,0x02,0x00,0x00,0x57,0x03,0x00,0x00,0xc6,0x00,0x00,0x00, -0xf9,0x00,0x02,0x00,0x35,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, -0x37,0x02,0x00,0x00,0xf9,0x00,0x02,0x00,0x30,0x02,0x00,0x00, -0xf8,0x00,0x02,0x00,0x30,0x02,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x59,0x02,0x00,0x00,0x49,0x03,0x00,0x00, -0xc6,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0x2d,0x02,0x00,0x00, -0xf8,0x00,0x02,0x00,0x2f,0x02,0x00,0x00,0xf9,0x00,0x02,0x00, -0x5b,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x5b,0x02,0x00,0x00, -0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0x4a,0x03,0x00,0x00, -0x3e,0x00,0x00,0x00,0x2f,0x02,0x00,0x00,0x9f,0x02,0x00,0x00, -0x5e,0x02,0x00,0x00,0xb0,0x00,0x05,0x00,0xb8,0x00,0x00,0x00, -0x61,0x02,0x00,0x00,0x4a,0x03,0x00,0x00,0xb5,0x00,0x00,0x00, -0xf6,0x00,0x04,0x00,0x5d,0x02,0x00,0x00,0x5e,0x02,0x00,0x00, -0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0x61,0x02,0x00,0x00, -0x5c,0x02,0x00,0x00,0x5d,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, -0x5c,0x02,0x00,0x00,0xf9,0x00,0x02,0x00,0x63,0x02,0x00,0x00, -0xf8,0x00,0x02,0x00,0x63,0x02,0x00,0x00,0xf5,0x00,0x07,0x00, -0x06,0x00,0x00,0x00,0x4e,0x03,0x00,0x00,0x3e,0x00,0x00,0x00, -0x5c,0x02,0x00,0x00,0x9d,0x02,0x00,0x00,0x66,0x02,0x00,0x00, -0xb0,0x00,0x05,0x00,0xb8,0x00,0x00,0x00,0x69,0x02,0x00,0x00, -0x4e,0x03,0x00,0x00,0x60,0x00,0x00,0x00,0xf6,0x00,0x04,0x00, -0x65,0x02,0x00,0x00,0x66,0x02,0x00,0x00,0x01,0x00,0x00,0x00, -0xfa,0x00,0x04,0x00,0x69,0x02,0x00,0x00,0x64,0x02,0x00,0x00, -0x65,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x64,0x02,0x00,0x00, -0xf9,0x00,0x02,0x00,0x6b,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, -0x6b,0x02,0x00,0x00,0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00, -0x50,0x03,0x00,0x00,0x3e,0x00,0x00,0x00,0x64,0x02,0x00,0x00, -0x9b,0x02,0x00,0x00,0x6e,0x02,0x00,0x00,0xb0,0x00,0x05,0x00, -0xb8,0x00,0x00,0x00,0x71,0x02,0x00,0x00,0x50,0x03,0x00,0x00, -0xb2,0x00,0x00,0x00,0xf6,0x00,0x04,0x00,0x6d,0x02,0x00,0x00, -0x6e,0x02,0x00,0x00,0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, -0x71,0x02,0x00,0x00,0x6c,0x02,0x00,0x00,0x6d,0x02,0x00,0x00, -0xf8,0x00,0x02,0x00,0x6c,0x02,0x00,0x00,0xf9,0x00,0x02,0x00, -0x73,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x73,0x02,0x00,0x00, -0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0x52,0x03,0x00,0x00, -0x3e,0x00,0x00,0x00,0x6c,0x02,0x00,0x00,0x99,0x02,0x00,0x00, -0x74,0x02,0x00,0x00,0xb0,0x00,0x05,0x00,0xb8,0x00,0x00,0x00, -0x79,0x02,0x00,0x00,0x52,0x03,0x00,0x00,0x62,0x00,0x00,0x00, -0xf6,0x00,0x04,0x00,0x75,0x02,0x00,0x00,0x74,0x02,0x00,0x00, -0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0x79,0x02,0x00,0x00, -0x74,0x02,0x00,0x00,0x75,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, -0x74,0x02,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x7b,0x02,0x00,0x00,0x4a,0x03,0x00,0x00,0xb2,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x7d,0x02,0x00,0x00, -0x7b,0x02,0x00,0x00,0x50,0x03,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x7f,0x02,0x00,0x00,0x7d,0x02,0x00,0x00, -0x7e,0x02,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x81,0x02,0x00,0x00,0x4e,0x03,0x00,0x00,0x62,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x82,0x02,0x00,0x00, -0x7f,0x02,0x00,0x00,0x81,0x02,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x84,0x02,0x00,0x00,0x82,0x02,0x00,0x00, -0x52,0x03,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x88,0x02,0x00,0x00,0x81,0x02,0x00,0x00,0x52,0x03,0x00,0x00, -0x41,0x00,0x05,0x00,0x26,0x02,0x00,0x00,0x89,0x02,0x00,0x00, -0x11,0x02,0x00,0x00,0x88,0x02,0x00,0x00,0x3d,0x00,0x04,0x00, -0xe6,0x00,0x00,0x00,0x8a,0x02,0x00,0x00,0x89,0x02,0x00,0x00, -0x73,0x00,0x04,0x00,0xba,0x00,0x00,0x00,0x8b,0x02,0x00,0x00, -0x8a,0x02,0x00,0x00,0x41,0x00,0x05,0x00,0x26,0x02,0x00,0x00, -0x90,0x02,0x00,0x00,0x3f,0x02,0x00,0x00,0x7d,0x02,0x00,0x00, -0x3d,0x00,0x04,0x00,0xe6,0x00,0x00,0x00,0x91,0x02,0x00,0x00, -0x90,0x02,0x00,0x00,0x73,0x00,0x04,0x00,0xba,0x00,0x00,0x00, -0x92,0x02,0x00,0x00,0x91,0x02,0x00,0x00,0x41,0x00,0x05,0x00, -0xc3,0x00,0x00,0x00,0x94,0x02,0x00,0x00,0xc0,0x00,0x00,0x00, -0x84,0x02,0x00,0x00,0x3d,0x00,0x04,0x00,0xba,0x00,0x00,0x00, -0x95,0x02,0x00,0x00,0x94,0x02,0x00,0x00,0x0c,0x00,0x08,0x00, -0xba,0x00,0x00,0x00,0x96,0x02,0x00,0x00,0x01,0x00,0x00,0x00, -0x32,0x00,0x00,0x00,0x8b,0x02,0x00,0x00,0x92,0x02,0x00,0x00, -0x95,0x02,0x00,0x00,0x3e,0x00,0x03,0x00,0x94,0x02,0x00,0x00, -0x96,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x99,0x02,0x00,0x00,0x52,0x03,0x00,0x00,0xc6,0x00,0x00,0x00, -0xf9,0x00,0x02,0x00,0x73,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, -0x75,0x02,0x00,0x00,0xf9,0x00,0x02,0x00,0x6e,0x02,0x00,0x00, -0xf8,0x00,0x02,0x00,0x6e,0x02,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x9b,0x02,0x00,0x00,0x50,0x03,0x00,0x00, -0xc6,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0x6b,0x02,0x00,0x00, -0xf8,0x00,0x02,0x00,0x6d,0x02,0x00,0x00,0xf9,0x00,0x02,0x00, -0x66,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x66,0x02,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x9d,0x02,0x00,0x00, -0x4e,0x03,0x00,0x00,0xc6,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, -0x63,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x65,0x02,0x00,0x00, -0xf9,0x00,0x02,0x00,0x5e,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, -0x5e,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x9f,0x02,0x00,0x00,0x4a,0x03,0x00,0x00,0xc6,0x00,0x00,0x00, -0xf9,0x00,0x02,0x00,0x5b,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, -0x5d,0x02,0x00,0x00,0xf9,0x00,0x02,0x00,0xfa,0x01,0x00,0x00, -0xf8,0x00,0x02,0x00,0xfa,0x01,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xa1,0x02,0x00,0x00,0x44,0x03,0x00,0x00, -0xc6,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0xf7,0x01,0x00,0x00, -0xf8,0x00,0x02,0x00,0xf9,0x01,0x00,0x00,0xe0,0x00,0x04,0x00, -0x0c,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0xef,0x01,0x00,0x00, -0xf9,0x00,0x02,0x00,0xcd,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, -0xcd,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xa3,0x02,0x00,0x00,0x2a,0x03,0x00,0x00,0x6c,0x00,0x00,0x00, -0xf9,0x00,0x02,0x00,0xca,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, -0xcc,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xa8,0x02,0x00,0x00,0x55,0x00,0x00,0x00,0x53,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xa9,0x02,0x00,0x00, -0x8c,0x00,0x00,0x00,0xa8,0x02,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xae,0x02,0x00,0x00,0x59,0x00,0x00,0x00, -0xaf,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xaf,0x02,0x00,0x00,0x9e,0x00,0x00,0x00,0xae,0x02,0x00,0x00, -0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00,0xb4,0x02,0x00,0x00, -0x12,0x00,0x00,0x00,0xb3,0x02,0x00,0x00,0x3d,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0xb5,0x02,0x00,0x00,0xb4,0x02,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xb6,0x02,0x00,0x00, -0x0f,0x00,0x00,0x00,0xb5,0x02,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xba,0x02,0x00,0x00,0x47,0x00,0x00,0x00, -0xb5,0x02,0x00,0x00,0x41,0x00,0x05,0x00,0x0d,0x00,0x00,0x00, -0xbc,0x02,0x00,0x00,0xbb,0x02,0x00,0x00,0x0c,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0xbd,0x02,0x00,0x00, -0xbc,0x02,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xbe,0x02,0x00,0x00,0xba,0x02,0x00,0x00,0xbd,0x02,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xbf,0x02,0x00,0x00, -0xb6,0x02,0x00,0x00,0xbe,0x02,0x00,0x00,0xf9,0x00,0x02,0x00, -0xc1,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0xc1,0x02,0x00,0x00, -0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0x2b,0x03,0x00,0x00, -0x3e,0x00,0x00,0x00,0xcc,0x00,0x00,0x00,0x28,0x03,0x00,0x00, -0xc4,0x02,0x00,0x00,0xb0,0x00,0x05,0x00,0xb8,0x00,0x00,0x00, -0xc7,0x02,0x00,0x00,0x2b,0x03,0x00,0x00,0xb5,0x00,0x00,0x00, -0xf6,0x00,0x04,0x00,0xc3,0x02,0x00,0x00,0xc4,0x02,0x00,0x00, -0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0xc7,0x02,0x00,0x00, -0xc2,0x02,0x00,0x00,0xc3,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, -0xc2,0x02,0x00,0x00,0xf9,0x00,0x02,0x00,0xc9,0x02,0x00,0x00, -0xf8,0x00,0x02,0x00,0xc9,0x02,0x00,0x00,0xf5,0x00,0x07,0x00, -0x06,0x00,0x00,0x00,0x2c,0x03,0x00,0x00,0x3e,0x00,0x00,0x00, -0xc2,0x02,0x00,0x00,0x26,0x03,0x00,0x00,0xcc,0x02,0x00,0x00, -0xb0,0x00,0x05,0x00,0xb8,0x00,0x00,0x00,0xcf,0x02,0x00,0x00, -0x2c,0x03,0x00,0x00,0x60,0x00,0x00,0x00,0xf6,0x00,0x04,0x00, -0xcb,0x02,0x00,0x00,0xcc,0x02,0x00,0x00,0x01,0x00,0x00,0x00, -0xfa,0x00,0x04,0x00,0xcf,0x02,0x00,0x00,0xca,0x02,0x00,0x00, -0xcb,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0xca,0x02,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xd3,0x02,0x00,0x00, -0x2c,0x03,0x00,0x00,0x61,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xd4,0x02,0x00,0x00,0xa9,0x02,0x00,0x00, -0xd3,0x02,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xd6,0x02,0x00,0x00,0x64,0x00,0x00,0x00,0x62,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xd7,0x02,0x00,0x00, -0xd4,0x02,0x00,0x00,0xd6,0x02,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xdb,0x02,0x00,0x00,0x2b,0x03,0x00,0x00, -0x47,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xdc,0x02,0x00,0x00,0xaf,0x02,0x00,0x00,0xdb,0x02,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xde,0x02,0x00,0x00, -0x68,0x00,0x00,0x00,0xb2,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xdf,0x02,0x00,0x00,0xdc,0x02,0x00,0x00, -0xde,0x02,0x00,0x00,0xf9,0x00,0x02,0x00,0xe1,0x02,0x00,0x00, -0xf8,0x00,0x02,0x00,0xe1,0x02,0x00,0x00,0xf5,0x00,0x07,0x00, -0x06,0x00,0x00,0x00,0x2e,0x03,0x00,0x00,0x3e,0x00,0x00,0x00, -0xca,0x02,0x00,0x00,0x24,0x03,0x00,0x00,0xe4,0x02,0x00,0x00, -0xb0,0x00,0x05,0x00,0xb8,0x00,0x00,0x00,0xe7,0x02,0x00,0x00, -0x2e,0x03,0x00,0x00,0xb2,0x00,0x00,0x00,0xf6,0x00,0x04,0x00, -0xe3,0x02,0x00,0x00,0xe4,0x02,0x00,0x00,0x01,0x00,0x00,0x00, -0xfa,0x00,0x04,0x00,0xe7,0x02,0x00,0x00,0xe2,0x02,0x00,0x00, -0xe3,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0xe2,0x02,0x00,0x00, -0xf9,0x00,0x02,0x00,0xe9,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, -0xe9,0x02,0x00,0x00,0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00, -0x30,0x03,0x00,0x00,0x3e,0x00,0x00,0x00,0xe2,0x02,0x00,0x00, -0x22,0x03,0x00,0x00,0xec,0x02,0x00,0x00,0xb0,0x00,0x05,0x00, -0xb8,0x00,0x00,0x00,0xef,0x02,0x00,0x00,0x30,0x03,0x00,0x00, -0x62,0x00,0x00,0x00,0xf6,0x00,0x04,0x00,0xeb,0x02,0x00,0x00, -0xec,0x02,0x00,0x00,0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, -0xef,0x02,0x00,0x00,0xea,0x02,0x00,0x00,0xeb,0x02,0x00,0x00, -0xf8,0x00,0x02,0x00,0xea,0x02,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xf2,0x02,0x00,0x00,0xd7,0x02,0x00,0x00, -0x30,0x03,0x00,0x00,0xb0,0x00,0x05,0x00,0xb8,0x00,0x00,0x00, -0xf5,0x02,0x00,0x00,0xf2,0x02,0x00,0x00,0x36,0x00,0x00,0x00, -0xf7,0x00,0x03,0x00,0xf7,0x02,0x00,0x00,0x00,0x00,0x00,0x00, -0xfa,0x00,0x04,0x00,0xf5,0x02,0x00,0x00,0xf6,0x02,0x00,0x00, -0xf7,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0xf6,0x02,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xfa,0x02,0x00,0x00, -0xdf,0x02,0x00,0x00,0x2e,0x03,0x00,0x00,0x41,0x00,0x05,0x00, -0x15,0x00,0x00,0x00,0xfb,0x02,0x00,0x00,0x12,0x00,0x00,0x00, -0xc6,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0xfc,0x02,0x00,0x00,0xfb,0x02,0x00,0x00,0xb0,0x00,0x05,0x00, -0xb8,0x00,0x00,0x00,0xfd,0x02,0x00,0x00,0xfa,0x02,0x00,0x00, -0xfc,0x02,0x00,0x00,0xf9,0x00,0x02,0x00,0xf7,0x02,0x00,0x00, -0xf8,0x00,0x02,0x00,0xf7,0x02,0x00,0x00,0xf5,0x00,0x07,0x00, -0xb8,0x00,0x00,0x00,0xfe,0x02,0x00,0x00,0xf5,0x02,0x00,0x00, -0xea,0x02,0x00,0x00,0xfd,0x02,0x00,0x00,0xf6,0x02,0x00,0x00, -0xf7,0x00,0x03,0x00,0x00,0x03,0x00,0x00,0x00,0x00,0x00,0x00, -0xfa,0x00,0x04,0x00,0xfe,0x02,0x00,0x00,0xff,0x02,0x00,0x00, -0x00,0x03,0x00,0x00,0xf8,0x00,0x02,0x00,0xff,0x02,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x08,0x03,0x00,0x00, -0xdf,0x02,0x00,0x00,0x2e,0x03,0x00,0x00,0x41,0x00,0x05,0x00, -0x15,0x00,0x00,0x00,0x0a,0x03,0x00,0x00,0x12,0x00,0x00,0x00, -0x09,0x03,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x0b,0x03,0x00,0x00,0x0a,0x03,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x0c,0x03,0x00,0x00,0x08,0x03,0x00,0x00, -0x0b,0x03,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x0d,0x03,0x00,0x00,0xbf,0x02,0x00,0x00,0x0c,0x03,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x0f,0x03,0x00,0x00, -0x0d,0x03,0x00,0x00,0xd7,0x02,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x11,0x03,0x00,0x00,0x0f,0x03,0x00,0x00, -0x30,0x03,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x13,0x03,0x00,0x00,0x2b,0x03,0x00,0x00,0xb2,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x15,0x03,0x00,0x00, -0x13,0x03,0x00,0x00,0x2e,0x03,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x17,0x03,0x00,0x00,0x15,0x03,0x00,0x00, -0x16,0x03,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x19,0x03,0x00,0x00,0x2c,0x03,0x00,0x00,0x62,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x1a,0x03,0x00,0x00, -0x17,0x03,0x00,0x00,0x19,0x03,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x1c,0x03,0x00,0x00,0x1a,0x03,0x00,0x00, -0x30,0x03,0x00,0x00,0x41,0x00,0x05,0x00,0xc3,0x00,0x00,0x00, -0x1d,0x03,0x00,0x00,0xc0,0x00,0x00,0x00,0x1c,0x03,0x00,0x00, -0x3d,0x00,0x04,0x00,0xba,0x00,0x00,0x00,0x1e,0x03,0x00,0x00, -0x1d,0x03,0x00,0x00,0x41,0x00,0x06,0x00,0x1f,0x03,0x00,0x00, -0x20,0x03,0x00,0x00,0x04,0x03,0x00,0x00,0x34,0x00,0x00,0x00, -0x11,0x03,0x00,0x00,0x3e,0x00,0x03,0x00,0x20,0x03,0x00,0x00, -0x1e,0x03,0x00,0x00,0xf9,0x00,0x02,0x00,0x00,0x03,0x00,0x00, -0xf8,0x00,0x02,0x00,0x00,0x03,0x00,0x00,0xf9,0x00,0x02,0x00, -0xec,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0xec,0x02,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x22,0x03,0x00,0x00, -0x30,0x03,0x00,0x00,0xc6,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, -0xe9,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0xeb,0x02,0x00,0x00, -0xf9,0x00,0x02,0x00,0xe4,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, -0xe4,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x24,0x03,0x00,0x00,0x2e,0x03,0x00,0x00,0xc6,0x00,0x00,0x00, -0xf9,0x00,0x02,0x00,0xe1,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, -0xe3,0x02,0x00,0x00,0xf9,0x00,0x02,0x00,0xcc,0x02,0x00,0x00, -0xf8,0x00,0x02,0x00,0xcc,0x02,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x26,0x03,0x00,0x00,0x2c,0x03,0x00,0x00, -0xc6,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0xc9,0x02,0x00,0x00, -0xf8,0x00,0x02,0x00,0xcb,0x02,0x00,0x00,0xf9,0x00,0x02,0x00, -0xc4,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0xc4,0x02,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x28,0x03,0x00,0x00, -0x2b,0x03,0x00,0x00,0xc6,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, -0xc1,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0xc3,0x02,0x00,0x00, -0xfd,0x00,0x01,0x00,0x38,0x00,0x01,0x00, -}; -const uint64_t matmul_f16_aligned_m_len = 11936; - -unsigned char matmul_f16_aligned_m_fp32_data[] = { -0x03,0x02,0x23,0x07,0x00,0x05,0x01,0x00,0x0b,0x00,0x0d,0x00, -0xf3,0x02,0x00,0x00,0x00,0x00,0x00,0x00,0x11,0x00,0x02,0x00, -0x01,0x00,0x00,0x00,0x11,0x00,0x02,0x00,0x51,0x11,0x00,0x00, -0x0b,0x00,0x06,0x00,0x01,0x00,0x00,0x00,0x47,0x4c,0x53,0x4c, -0x2e,0x73,0x74,0x64,0x2e,0x34,0x35,0x30,0x00,0x00,0x00,0x00, -0x0e,0x00,0x03,0x00,0x00,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x0f,0x00,0x0f,0x00,0x05,0x00,0x00,0x00,0x04,0x00,0x00,0x00, -0x6d,0x61,0x69,0x6e,0x00,0x00,0x00,0x00,0x0b,0x00,0x00,0x00, -0x12,0x00,0x00,0x00,0x3d,0x00,0x00,0x00,0x4c,0x00,0x00,0x00, -0xea,0x00,0x00,0x00,0xf9,0x00,0x00,0x00,0x4b,0x01,0x00,0x00, -0x58,0x01,0x00,0x00,0x53,0x02,0x00,0x00,0x9c,0x02,0x00,0x00, -0x10,0x00,0x06,0x00,0x04,0x00,0x00,0x00,0x11,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x0b,0x00,0x00,0x00,0x0b,0x00,0x00,0x00, -0x1c,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0x10,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x08,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00, -0x03,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00,0x04,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x10,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0x10,0x00,0x00,0x00,0x05,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x14,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x18,0x00,0x00,0x00, -0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00,0x07,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x1c,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0x10,0x00,0x00,0x00,0x08,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x20,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00, -0x09,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x24,0x00,0x00,0x00, -0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00,0x0a,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x28,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0x10,0x00,0x00,0x00,0x0b,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x2c,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x30,0x00,0x00,0x00, -0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00,0x0d,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x34,0x00,0x00,0x00,0x47,0x00,0x03,0x00, -0x10,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x37,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x3d,0x00,0x00,0x00,0x0b,0x00,0x00,0x00, -0x1a,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x4c,0x00,0x00,0x00, -0x0b,0x00,0x00,0x00,0x1b,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x4f,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x09,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x53,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x04,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x60,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x62,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x07,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x6c,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x03,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x9d,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0xaf,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x05,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0xb2,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x08,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0xf6,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x08,0x00,0x00,0x00,0x48,0x00,0x04,0x00, -0xf7,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x18,0x00,0x00,0x00, -0x48,0x00,0x05,0x00,0xf7,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x03,0x00, -0xf7,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0xf9,0x00,0x00,0x00,0x22,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0xf9,0x00,0x00,0x00,0x21,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x2c,0x01,0x00,0x00, -0x01,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x2d,0x01,0x00,0x00,0x0b,0x00,0x00,0x00,0x19,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x55,0x01,0x00,0x00,0x06,0x00,0x00,0x00, -0x08,0x00,0x00,0x00,0x48,0x00,0x04,0x00,0x56,0x01,0x00,0x00, -0x00,0x00,0x00,0x00,0x18,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0x56,0x01,0x00,0x00,0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x47,0x00,0x03,0x00,0x56,0x01,0x00,0x00, -0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x58,0x01,0x00,0x00, -0x22,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x58,0x01,0x00,0x00,0x21,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x53,0x02,0x00,0x00,0x0b,0x00,0x00,0x00, -0x18,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x99,0x02,0x00,0x00, -0x06,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x48,0x00,0x04,0x00, -0x9a,0x02,0x00,0x00,0x00,0x00,0x00,0x00,0x19,0x00,0x00,0x00, -0x48,0x00,0x05,0x00,0x9a,0x02,0x00,0x00,0x00,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x03,0x00, -0x9a,0x02,0x00,0x00,0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x9c,0x02,0x00,0x00,0x22,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x9c,0x02,0x00,0x00,0x21,0x00,0x00,0x00, -0x02,0x00,0x00,0x00,0x13,0x00,0x02,0x00,0x02,0x00,0x00,0x00, -0x21,0x00,0x03,0x00,0x03,0x00,0x00,0x00,0x02,0x00,0x00,0x00, -0x15,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x20,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x17,0x00,0x04,0x00,0x09,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x03,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0x0a,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x09,0x00,0x00,0x00, -0x3b,0x00,0x04,0x00,0x0a,0x00,0x00,0x00,0x0b,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0x0d,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x1e,0x00,0x10,0x00,0x10,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x11,0x00,0x00,0x00, -0x09,0x00,0x00,0x00,0x10,0x00,0x00,0x00,0x3b,0x00,0x04,0x00, -0x11,0x00,0x00,0x00,0x12,0x00,0x00,0x00,0x09,0x00,0x00,0x00, -0x15,0x00,0x04,0x00,0x13,0x00,0x00,0x00,0x20,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00, -0x14,0x00,0x00,0x00,0x08,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0x15,0x00,0x00,0x00,0x09,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00,0x21,0x00,0x00,0x00, -0x0a,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00, -0x27,0x00,0x00,0x00,0x09,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x13,0x00,0x00,0x00,0x2d,0x00,0x00,0x00,0x07,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00,0x34,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x32,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x37,0x00,0x00,0x00,0x40,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x3b,0x00,0x04,0x00,0x0a,0x00,0x00,0x00,0x3d,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x3e,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x3b,0x00,0x04,0x00, -0x0a,0x00,0x00,0x00,0x4c,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x32,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x4f,0x00,0x00,0x00, -0x20,0x00,0x00,0x00,0x32,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x53,0x00,0x00,0x00,0x20,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0x54,0x00,0x00,0x00,0x86,0x00,0x00,0x00, -0x37,0x00,0x00,0x00,0x53,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0x58,0x00,0x00,0x00,0x86,0x00,0x00,0x00, -0x37,0x00,0x00,0x00,0x53,0x00,0x00,0x00,0x32,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x60,0x00,0x00,0x00,0x02,0x00,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x61,0x00,0x00,0x00, -0x86,0x00,0x00,0x00,0x53,0x00,0x00,0x00,0x60,0x00,0x00,0x00, -0x32,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x62,0x00,0x00,0x00, -0x04,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, -0x63,0x00,0x00,0x00,0x86,0x00,0x00,0x00,0x61,0x00,0x00,0x00, -0x62,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, -0x67,0x00,0x00,0x00,0x86,0x00,0x00,0x00,0x61,0x00,0x00,0x00, -0x62,0x00,0x00,0x00,0x32,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x6c,0x00,0x00,0x00,0x10,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x6d,0x00,0x00,0x00,0x04,0x00,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x6e,0x00,0x00,0x00, -0x86,0x00,0x00,0x00,0x6c,0x00,0x00,0x00,0x6d,0x00,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x73,0x00,0x00,0x00, -0x86,0x00,0x00,0x00,0x6c,0x00,0x00,0x00,0x6d,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00,0x77,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00, -0x7c,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x13,0x00,0x00,0x00,0x87,0x00,0x00,0x00,0x0b,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00,0x8d,0x00,0x00,0x00, -0x03,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00, -0x98,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x32,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x9d,0x00,0x00,0x00,0x40,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00,0x9f,0x00,0x00,0x00, -0x04,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, -0xae,0x00,0x00,0x00,0x84,0x00,0x00,0x00,0x60,0x00,0x00,0x00, -0x62,0x00,0x00,0x00,0x32,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0xaf,0x00,0x00,0x00,0x20,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0xb0,0x00,0x00,0x00,0x84,0x00,0x00,0x00, -0x53,0x00,0x00,0x00,0xaf,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0xb1,0x00,0x00,0x00,0x84,0x00,0x00,0x00, -0x4f,0x00,0x00,0x00,0x62,0x00,0x00,0x00,0x32,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0xb2,0x00,0x00,0x00,0x02,0x00,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0xb3,0x00,0x00,0x00, -0x84,0x00,0x00,0x00,0xb1,0x00,0x00,0x00,0xb2,0x00,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0xb4,0x00,0x00,0x00, -0x84,0x00,0x00,0x00,0xb3,0x00,0x00,0x00,0x60,0x00,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0xb5,0x00,0x00,0x00, -0x86,0x00,0x00,0x00,0xb0,0x00,0x00,0x00,0xb4,0x00,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0xb6,0x00,0x00,0x00, -0x84,0x00,0x00,0x00,0xae,0x00,0x00,0x00,0xb5,0x00,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0xb7,0x00,0x00,0x00, -0x84,0x00,0x00,0x00,0xb6,0x00,0x00,0x00,0xb2,0x00,0x00,0x00, -0x14,0x00,0x02,0x00,0xb8,0x00,0x00,0x00,0x16,0x00,0x03,0x00, -0xba,0x00,0x00,0x00,0x20,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0xbb,0x00,0x00,0x00,0x84,0x00,0x00,0x00, -0x60,0x00,0x00,0x00,0x62,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0xbc,0x00,0x00,0x00,0x84,0x00,0x00,0x00, -0xbb,0x00,0x00,0x00,0xb5,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0xbd,0x00,0x00,0x00,0x84,0x00,0x00,0x00, -0xbc,0x00,0x00,0x00,0xb2,0x00,0x00,0x00,0x1c,0x00,0x04,0x00, -0xbe,0x00,0x00,0x00,0xba,0x00,0x00,0x00,0xbd,0x00,0x00,0x00, -0x20,0x00,0x04,0x00,0xbf,0x00,0x00,0x00,0x07,0x00,0x00,0x00, -0xbe,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0xba,0x00,0x00,0x00, -0xc2,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0xc3,0x00,0x00,0x00,0x07,0x00,0x00,0x00,0xba,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00,0xc6,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, -0xe6,0x00,0x00,0x00,0x80,0x00,0x00,0x00,0x6c,0x00,0x00,0x00, +0x10,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, +0x6d,0x00,0x00,0x00,0x86,0x00,0x00,0x00,0x6c,0x00,0x00,0x00, 0x39,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, -0xe7,0x00,0x00,0x00,0x84,0x00,0x00,0x00,0x37,0x00,0x00,0x00, -0xe6,0x00,0x00,0x00,0x1c,0x00,0x04,0x00,0xe8,0x00,0x00,0x00, -0xba,0x00,0x00,0x00,0xe7,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0xe9,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0xe8,0x00,0x00,0x00, -0x3b,0x00,0x04,0x00,0xe9,0x00,0x00,0x00,0xea,0x00,0x00,0x00, -0x04,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, -0xee,0x00,0x00,0x00,0x80,0x00,0x00,0x00,0x6c,0x00,0x00,0x00, -0x39,0x00,0x00,0x00,0x16,0x00,0x03,0x00,0xf4,0x00,0x00,0x00, -0x10,0x00,0x00,0x00,0x17,0x00,0x04,0x00,0xf5,0x00,0x00,0x00, -0xf4,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x1d,0x00,0x03,0x00, -0xf6,0x00,0x00,0x00,0xf5,0x00,0x00,0x00,0x1e,0x00,0x03,0x00, -0xf7,0x00,0x00,0x00,0xf6,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0xf8,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0xf7,0x00,0x00,0x00, -0x3b,0x00,0x04,0x00,0xf8,0x00,0x00,0x00,0xf9,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0xfb,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0xf4,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0xff,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0xba,0x00,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x04,0x01,0x00,0x00, -0x80,0x00,0x00,0x00,0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x12,0x01,0x00,0x00, -0x80,0x00,0x00,0x00,0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x20,0x01,0x00,0x00, -0x80,0x00,0x00,0x00,0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x25,0x01,0x00,0x00, -0x03,0x00,0x00,0x00,0x32,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x2c,0x01,0x00,0x00,0x01,0x00,0x00,0x00,0x33,0x00,0x06,0x00, -0x09,0x00,0x00,0x00,0x2d,0x01,0x00,0x00,0x2c,0x01,0x00,0x00, -0x39,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0x2e,0x01,0x00,0x00,0x51,0x00,0x00,0x00, -0x2d,0x01,0x00,0x00,0x00,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0x2f,0x01,0x00,0x00,0x84,0x00,0x00,0x00, -0x2e,0x01,0x00,0x00,0x6d,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0x30,0x01,0x00,0x00,0x86,0x00,0x00,0x00, -0x2f,0x01,0x00,0x00,0x6c,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0x47,0x01,0x00,0x00,0x80,0x00,0x00,0x00, +0x72,0x00,0x00,0x00,0x86,0x00,0x00,0x00,0x6c,0x00,0x00,0x00, +0x39,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, +0x77,0x00,0x00,0x00,0x86,0x00,0x00,0x00,0x6c,0x00,0x00,0x00, +0x39,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, +0x7c,0x00,0x00,0x00,0x86,0x00,0x00,0x00,0x6c,0x00,0x00,0x00, +0x39,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00, +0x80,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x13,0x00,0x00,0x00,0x85,0x00,0x00,0x00,0x02,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00,0x90,0x00,0x00,0x00, +0x0b,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00, +0x96,0x00,0x00,0x00,0x03,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x13,0x00,0x00,0x00,0xa1,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, +0x32,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0xa6,0x00,0x00,0x00, +0x40,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00, +0xa8,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x34,0x00,0x06,0x00, +0x06,0x00,0x00,0x00,0xb7,0x00,0x00,0x00,0x84,0x00,0x00,0x00, +0x60,0x00,0x00,0x00,0x62,0x00,0x00,0x00,0x32,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0xb8,0x00,0x00,0x00,0x20,0x00,0x00,0x00, +0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0xb9,0x00,0x00,0x00, +0x84,0x00,0x00,0x00,0x53,0x00,0x00,0x00,0xb8,0x00,0x00,0x00, +0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0xba,0x00,0x00,0x00, +0x84,0x00,0x00,0x00,0x4f,0x00,0x00,0x00,0x62,0x00,0x00,0x00, +0x32,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0xbb,0x00,0x00,0x00, +0x02,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, +0xbc,0x00,0x00,0x00,0x84,0x00,0x00,0x00,0xba,0x00,0x00,0x00, +0xbb,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, +0xbd,0x00,0x00,0x00,0x84,0x00,0x00,0x00,0xbc,0x00,0x00,0x00, +0x60,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, +0xbe,0x00,0x00,0x00,0x86,0x00,0x00,0x00,0xb9,0x00,0x00,0x00, +0xbd,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, +0xbf,0x00,0x00,0x00,0x84,0x00,0x00,0x00,0xb7,0x00,0x00,0x00, +0xbe,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, +0xc0,0x00,0x00,0x00,0x84,0x00,0x00,0x00,0xbf,0x00,0x00,0x00, +0xbb,0x00,0x00,0x00,0x14,0x00,0x02,0x00,0xc1,0x00,0x00,0x00, +0x16,0x00,0x03,0x00,0xc3,0x00,0x00,0x00,0x20,0x00,0x00,0x00, +0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0xc4,0x00,0x00,0x00, +0x84,0x00,0x00,0x00,0x60,0x00,0x00,0x00,0x62,0x00,0x00,0x00, +0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0xc5,0x00,0x00,0x00, +0x84,0x00,0x00,0x00,0xc4,0x00,0x00,0x00,0xbe,0x00,0x00,0x00, +0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0xc6,0x00,0x00,0x00, +0x84,0x00,0x00,0x00,0xc5,0x00,0x00,0x00,0xbb,0x00,0x00,0x00, +0x1c,0x00,0x04,0x00,0xc7,0x00,0x00,0x00,0xc3,0x00,0x00,0x00, +0xc6,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0xc8,0x00,0x00,0x00, +0x07,0x00,0x00,0x00,0xc7,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0xc3,0x00,0x00,0x00,0xcb,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0x20,0x00,0x04,0x00,0xcc,0x00,0x00,0x00,0x07,0x00,0x00,0x00, +0xc3,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00, +0xcf,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x16,0x00,0x03,0x00, +0xf6,0x00,0x00,0x00,0x10,0x00,0x00,0x00,0x34,0x00,0x06,0x00, +0x06,0x00,0x00,0x00,0xf7,0x00,0x00,0x00,0x80,0x00,0x00,0x00, 0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0x48,0x01,0x00,0x00,0x84,0x00,0x00,0x00, -0x9d,0x00,0x00,0x00,0x47,0x01,0x00,0x00,0x1c,0x00,0x04,0x00, -0x49,0x01,0x00,0x00,0xba,0x00,0x00,0x00,0x48,0x01,0x00,0x00, -0x20,0x00,0x04,0x00,0x4a,0x01,0x00,0x00,0x04,0x00,0x00,0x00, -0x49,0x01,0x00,0x00,0x3b,0x00,0x04,0x00,0x4a,0x01,0x00,0x00, -0x4b,0x01,0x00,0x00,0x04,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0x4f,0x01,0x00,0x00,0x80,0x00,0x00,0x00, +0x06,0x00,0x00,0x00,0xf8,0x00,0x00,0x00,0x84,0x00,0x00,0x00, +0x37,0x00,0x00,0x00,0xf7,0x00,0x00,0x00,0x1c,0x00,0x04,0x00, +0xf9,0x00,0x00,0x00,0xf6,0x00,0x00,0x00,0xf8,0x00,0x00,0x00, +0x20,0x00,0x04,0x00,0xfa,0x00,0x00,0x00,0x04,0x00,0x00,0x00, +0xf9,0x00,0x00,0x00,0x3b,0x00,0x04,0x00,0xfa,0x00,0x00,0x00, +0xfb,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x34,0x00,0x06,0x00, +0x06,0x00,0x00,0x00,0xff,0x00,0x00,0x00,0x80,0x00,0x00,0x00, 0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x1d,0x00,0x03,0x00, -0x55,0x01,0x00,0x00,0xf5,0x00,0x00,0x00,0x1e,0x00,0x03,0x00, -0x56,0x01,0x00,0x00,0x55,0x01,0x00,0x00,0x20,0x00,0x04,0x00, -0x57,0x01,0x00,0x00,0x0c,0x00,0x00,0x00,0x56,0x01,0x00,0x00, -0x3b,0x00,0x04,0x00,0x57,0x01,0x00,0x00,0x58,0x01,0x00,0x00, -0x0c,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, -0x61,0x01,0x00,0x00,0x80,0x00,0x00,0x00,0x6c,0x00,0x00,0x00, -0x39,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, -0x6f,0x01,0x00,0x00,0x80,0x00,0x00,0x00,0x6c,0x00,0x00,0x00, -0x39,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, -0x7d,0x01,0x00,0x00,0x80,0x00,0x00,0x00,0x6c,0x00,0x00,0x00, -0x39,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x8a,0x01,0x00,0x00,0x08,0x01,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0x8b,0x01,0x00,0x00,0x86,0x00,0x00,0x00, -0x6c,0x00,0x00,0x00,0x6d,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0x8e,0x01,0x00,0x00,0x86,0x00,0x00,0x00, -0x6c,0x00,0x00,0x00,0x6d,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0xa9,0x01,0x00,0x00,0x84,0x00,0x00,0x00, -0x60,0x00,0x00,0x00,0x62,0x00,0x00,0x00,0x1c,0x00,0x04,0x00, -0xaa,0x01,0x00,0x00,0xba,0x00,0x00,0x00,0xa9,0x01,0x00,0x00, -0x20,0x00,0x04,0x00,0xab,0x01,0x00,0x00,0x07,0x00,0x00,0x00, -0xaa,0x01,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, -0xbb,0x01,0x00,0x00,0x80,0x00,0x00,0x00,0x6c,0x00,0x00,0x00, -0x39,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, -0xd6,0x01,0x00,0x00,0x84,0x00,0x00,0x00,0xb5,0x00,0x00,0x00, -0xb2,0x00,0x00,0x00,0x1c,0x00,0x04,0x00,0xd7,0x01,0x00,0x00, -0xba,0x00,0x00,0x00,0xd6,0x01,0x00,0x00,0x20,0x00,0x04,0x00, -0xd8,0x01,0x00,0x00,0x07,0x00,0x00,0x00,0xd7,0x01,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0xe1,0x01,0x00,0x00, -0x86,0x00,0x00,0x00,0xaf,0x00,0x00,0x00,0xb5,0x00,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0xe9,0x01,0x00,0x00, +0x03,0x01,0x00,0x00,0xf6,0x00,0x00,0x00,0x1e,0x00,0x03,0x00, +0x04,0x01,0x00,0x00,0x03,0x01,0x00,0x00,0x20,0x00,0x04,0x00, +0x05,0x01,0x00,0x00,0x0c,0x00,0x00,0x00,0x04,0x01,0x00,0x00, +0x3b,0x00,0x04,0x00,0x05,0x01,0x00,0x00,0x06,0x01,0x00,0x00, +0x0c,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x11,0x01,0x00,0x00, +0x0c,0x00,0x00,0x00,0xf6,0x00,0x00,0x00,0x20,0x00,0x04,0x00, +0x14,0x01,0x00,0x00,0x04,0x00,0x00,0x00,0xf6,0x00,0x00,0x00, +0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x1a,0x01,0x00,0x00, 0x80,0x00,0x00,0x00,0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x18,0x02,0x00,0x00, -0x84,0x00,0x00,0x00,0x60,0x00,0x00,0x00,0x62,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00,0x4b,0x02,0x00,0x00, -0x0d,0x00,0x00,0x00,0x3b,0x00,0x04,0x00,0x0a,0x00,0x00,0x00, -0x53,0x02,0x00,0x00,0x01,0x00,0x00,0x00,0x1d,0x00,0x03,0x00, -0x99,0x02,0x00,0x00,0xba,0x00,0x00,0x00,0x1e,0x00,0x03,0x00, -0x9a,0x02,0x00,0x00,0x99,0x02,0x00,0x00,0x20,0x00,0x04,0x00, -0x9b,0x02,0x00,0x00,0x0c,0x00,0x00,0x00,0x9a,0x02,0x00,0x00, -0x3b,0x00,0x04,0x00,0x9b,0x02,0x00,0x00,0x9c,0x02,0x00,0x00, -0x0c,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00, -0xa1,0x02,0x00,0x00,0x05,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0xae,0x02,0x00,0x00,0x84,0x00,0x00,0x00, -0x60,0x00,0x00,0x00,0x62,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0xb7,0x02,0x00,0x00,0x0c,0x00,0x00,0x00,0xba,0x00,0x00,0x00, -0x36,0x00,0x05,0x00,0x02,0x00,0x00,0x00,0x04,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x03,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, -0x05,0x00,0x00,0x00,0x3b,0x00,0x04,0x00,0xbf,0x00,0x00,0x00, -0xc0,0x00,0x00,0x00,0x07,0x00,0x00,0x00,0x3b,0x00,0x04,0x00, -0xab,0x01,0x00,0x00,0xac,0x01,0x00,0x00,0x07,0x00,0x00,0x00, -0x3b,0x00,0x04,0x00,0xd8,0x01,0x00,0x00,0xd9,0x01,0x00,0x00, -0x07,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x0d,0x00,0x00,0x00, -0x0e,0x00,0x00,0x00,0x0b,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x0f,0x00,0x00,0x00, -0x0e,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00, -0x16,0x00,0x00,0x00,0x12,0x00,0x00,0x00,0x14,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x17,0x00,0x00,0x00, -0x16,0x00,0x00,0x00,0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x18,0x00,0x00,0x00,0x0f,0x00,0x00,0x00,0x17,0x00,0x00,0x00, -0x89,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x1e,0x00,0x00,0x00, -0x0f,0x00,0x00,0x00,0x17,0x00,0x00,0x00,0x41,0x00,0x05,0x00, -0x15,0x00,0x00,0x00,0x22,0x00,0x00,0x00,0x12,0x00,0x00,0x00, -0x21,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x22,0x00,0x00,0x00,0x86,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x24,0x00,0x00,0x00,0x18,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00, -0x28,0x00,0x00,0x00,0x12,0x00,0x00,0x00,0x27,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x29,0x00,0x00,0x00, -0x28,0x00,0x00,0x00,0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x2a,0x00,0x00,0x00,0x1e,0x00,0x00,0x00,0x29,0x00,0x00,0x00, -0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0x12,0x00,0x00,0x00,0x2d,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x2f,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x30,0x00,0x00,0x00, -0x24,0x00,0x00,0x00,0x2f,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x32,0x00,0x00,0x00,0x30,0x00,0x00,0x00, -0x2a,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00, -0x35,0x00,0x00,0x00,0x12,0x00,0x00,0x00,0x34,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x36,0x00,0x00,0x00, -0x35,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x38,0x00,0x00,0x00,0x36,0x00,0x00,0x00,0x37,0x00,0x00,0x00, -0x82,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x3a,0x00,0x00,0x00, -0x38,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x86,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x3b,0x00,0x00,0x00,0x3a,0x00,0x00,0x00, -0x37,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x0d,0x00,0x00,0x00, -0x3f,0x00,0x00,0x00,0x3d,0x00,0x00,0x00,0x3e,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x40,0x00,0x00,0x00, -0x3f,0x00,0x00,0x00,0x89,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x42,0x00,0x00,0x00,0x40,0x00,0x00,0x00,0x3b,0x00,0x00,0x00, -0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x47,0x00,0x00,0x00, -0x40,0x00,0x00,0x00,0x3b,0x00,0x00,0x00,0x41,0x00,0x05,0x00, -0x0d,0x00,0x00,0x00,0x49,0x00,0x00,0x00,0x3d,0x00,0x00,0x00, -0x39,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x4a,0x00,0x00,0x00,0x49,0x00,0x00,0x00,0x41,0x00,0x05,0x00, -0x0d,0x00,0x00,0x00,0x4d,0x00,0x00,0x00,0x4c,0x00,0x00,0x00, -0x3e,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x4e,0x00,0x00,0x00,0x4d,0x00,0x00,0x00,0x86,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x50,0x00,0x00,0x00,0x4e,0x00,0x00,0x00, -0x4f,0x00,0x00,0x00,0x89,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x55,0x00,0x00,0x00,0x50,0x00,0x00,0x00,0x54,0x00,0x00,0x00, -0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x59,0x00,0x00,0x00, -0x50,0x00,0x00,0x00,0x58,0x00,0x00,0x00,0x89,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x5d,0x00,0x00,0x00,0x4e,0x00,0x00,0x00, -0x4f,0x00,0x00,0x00,0x89,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x64,0x00,0x00,0x00,0x5d,0x00,0x00,0x00,0x63,0x00,0x00,0x00, -0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x68,0x00,0x00,0x00, -0x5d,0x00,0x00,0x00,0x67,0x00,0x00,0x00,0x89,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x6f,0x00,0x00,0x00,0x4e,0x00,0x00,0x00, -0x6e,0x00,0x00,0x00,0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x74,0x00,0x00,0x00,0x4e,0x00,0x00,0x00,0x73,0x00,0x00,0x00, -0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00,0x78,0x00,0x00,0x00, -0x12,0x00,0x00,0x00,0x77,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x79,0x00,0x00,0x00,0x78,0x00,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x7a,0x00,0x00,0x00, -0x47,0x00,0x00,0x00,0x79,0x00,0x00,0x00,0x41,0x00,0x05,0x00, -0x15,0x00,0x00,0x00,0x7d,0x00,0x00,0x00,0x12,0x00,0x00,0x00, -0x7c,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x7e,0x00,0x00,0x00,0x7d,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x80,0x00,0x00,0x00,0x47,0x00,0x00,0x00, -0x39,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x83,0x00,0x00,0x00,0x80,0x00,0x00,0x00,0x79,0x00,0x00,0x00, -0x0c,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0x84,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x26,0x00,0x00,0x00,0x7e,0x00,0x00,0x00, -0x83,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00, -0x88,0x00,0x00,0x00,0x12,0x00,0x00,0x00,0x87,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x89,0x00,0x00,0x00, -0x88,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x8a,0x00,0x00,0x00,0x32,0x00,0x00,0x00,0x89,0x00,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x8c,0x00,0x00,0x00, -0x42,0x00,0x00,0x00,0x37,0x00,0x00,0x00,0x41,0x00,0x05,0x00, -0x15,0x00,0x00,0x00,0x8e,0x00,0x00,0x00,0x12,0x00,0x00,0x00, -0x8d,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x8f,0x00,0x00,0x00,0x8e,0x00,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x90,0x00,0x00,0x00,0x8c,0x00,0x00,0x00, -0x8f,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x91,0x00,0x00,0x00,0x8a,0x00,0x00,0x00,0x90,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x93,0x00,0x00,0x00, -0x91,0x00,0x00,0x00,0x7a,0x00,0x00,0x00,0x86,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x94,0x00,0x00,0x00,0x93,0x00,0x00,0x00, -0x6d,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00, -0x99,0x00,0x00,0x00,0x12,0x00,0x00,0x00,0x98,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x9a,0x00,0x00,0x00, -0x99,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x9b,0x00,0x00,0x00,0x0f,0x00,0x00,0x00,0x9a,0x00,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x9e,0x00,0x00,0x00, -0x4a,0x00,0x00,0x00,0x9d,0x00,0x00,0x00,0x41,0x00,0x05,0x00, -0x15,0x00,0x00,0x00,0xa0,0x00,0x00,0x00,0x12,0x00,0x00,0x00, -0x9f,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0xa1,0x00,0x00,0x00,0xa0,0x00,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xa2,0x00,0x00,0x00,0x9e,0x00,0x00,0x00, -0xa1,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xa3,0x00,0x00,0x00,0x9b,0x00,0x00,0x00,0xa2,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xa5,0x00,0x00,0x00, -0xa3,0x00,0x00,0x00,0x7a,0x00,0x00,0x00,0x86,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xa6,0x00,0x00,0x00,0xa5,0x00,0x00,0x00, -0x6d,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0xa8,0x00,0x00,0x00, -0xf8,0x00,0x02,0x00,0xa8,0x00,0x00,0x00,0xf5,0x00,0x07,0x00, -0x06,0x00,0x00,0x00,0xc1,0x02,0x00,0x00,0x3e,0x00,0x00,0x00, -0x05,0x00,0x00,0x00,0xc7,0x00,0x00,0x00,0xa9,0x00,0x00,0x00, -0xb0,0x00,0x05,0x00,0xb8,0x00,0x00,0x00,0xb9,0x00,0x00,0x00, -0xc1,0x02,0x00,0x00,0xb7,0x00,0x00,0x00,0xf6,0x00,0x04,0x00, -0xaa,0x00,0x00,0x00,0xa9,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0xfa,0x00,0x04,0x00,0xb9,0x00,0x00,0x00,0xa9,0x00,0x00,0x00, -0xaa,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0xa9,0x00,0x00,0x00, -0x41,0x00,0x05,0x00,0xc3,0x00,0x00,0x00,0xc4,0x00,0x00,0x00, -0xc0,0x00,0x00,0x00,0xc1,0x02,0x00,0x00,0x3e,0x00,0x03,0x00, -0xc4,0x00,0x00,0x00,0xc2,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xc7,0x00,0x00,0x00,0xc1,0x02,0x00,0x00, -0xc6,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0xa8,0x00,0x00,0x00, -0xf8,0x00,0x02,0x00,0xaa,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, -0xca,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0xca,0x00,0x00,0x00, -0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0xda,0x02,0x00,0x00, -0xa6,0x00,0x00,0x00,0xaa,0x00,0x00,0x00,0x90,0x01,0x00,0x00, -0xcd,0x00,0x00,0x00,0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00, -0xd6,0x02,0x00,0x00,0x94,0x00,0x00,0x00,0xaa,0x00,0x00,0x00, -0x8d,0x01,0x00,0x00,0xcd,0x00,0x00,0x00,0xf5,0x00,0x07,0x00, -0x06,0x00,0x00,0x00,0xc2,0x02,0x00,0x00,0x7a,0x00,0x00,0x00, -0xaa,0x00,0x00,0x00,0x3b,0x02,0x00,0x00,0xcd,0x00,0x00,0x00, -0xb0,0x00,0x05,0x00,0xb8,0x00,0x00,0x00,0xd1,0x00,0x00,0x00, -0xc2,0x02,0x00,0x00,0x84,0x00,0x00,0x00,0xf6,0x00,0x04,0x00, -0xcc,0x00,0x00,0x00,0xcd,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0xfa,0x00,0x04,0x00,0xd1,0x00,0x00,0x00,0xcb,0x00,0x00,0x00, -0xcc,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0xcb,0x00,0x00,0x00, -0xf9,0x00,0x02,0x00,0xd3,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, -0xd3,0x00,0x00,0x00,0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00, -0xd2,0x02,0x00,0x00,0x3e,0x00,0x00,0x00,0xcb,0x00,0x00,0x00, -0x32,0x01,0x00,0x00,0xd4,0x00,0x00,0x00,0xb0,0x00,0x05,0x00, -0xb8,0x00,0x00,0x00,0xd9,0x00,0x00,0x00,0xd2,0x02,0x00,0x00, -0x37,0x00,0x00,0x00,0xf6,0x00,0x04,0x00,0xd5,0x00,0x00,0x00, -0xd4,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, -0xd9,0x00,0x00,0x00,0xd4,0x00,0x00,0x00,0xd5,0x00,0x00,0x00, -0xf8,0x00,0x02,0x00,0xd4,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xde,0x00,0x00,0x00,0x74,0x00,0x00,0x00, -0xd2,0x02,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xe1,0x00,0x00,0x00,0xde,0x00,0x00,0x00,0x8f,0x00,0x00,0x00, -0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xe2,0x00,0x00,0x00, -0xe1,0x00,0x00,0x00,0x6d,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xe3,0x00,0x00,0x00,0xd6,0x02,0x00,0x00, -0xe2,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xe5,0x00,0x00,0x00,0xe3,0x00,0x00,0x00,0x6f,0x00,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xef,0x00,0x00,0x00, -0xde,0x00,0x00,0x00,0xee,0x00,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xf1,0x00,0x00,0x00,0x6f,0x00,0x00,0x00, -0x6d,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xf2,0x00,0x00,0x00,0xef,0x00,0x00,0x00,0xf1,0x00,0x00,0x00, -0x41,0x00,0x07,0x00,0xfb,0x00,0x00,0x00,0xfc,0x00,0x00,0x00, -0xf9,0x00,0x00,0x00,0x34,0x00,0x00,0x00,0xe5,0x00,0x00,0x00, -0x3e,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0xf4,0x00,0x00,0x00, -0xfd,0x00,0x00,0x00,0xfc,0x00,0x00,0x00,0x73,0x00,0x04,0x00, -0xba,0x00,0x00,0x00,0xfe,0x00,0x00,0x00,0xfd,0x00,0x00,0x00, -0x41,0x00,0x05,0x00,0xff,0x00,0x00,0x00,0x00,0x01,0x00,0x00, -0xea,0x00,0x00,0x00,0xf2,0x00,0x00,0x00,0x3e,0x00,0x03,0x00, -0x00,0x01,0x00,0x00,0xfe,0x00,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x05,0x01,0x00,0x00,0xde,0x00,0x00,0x00, -0x04,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x08,0x01,0x00,0x00,0x05,0x01,0x00,0x00,0xf1,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x09,0x01,0x00,0x00, -0x08,0x01,0x00,0x00,0x39,0x00,0x00,0x00,0x41,0x00,0x07,0x00, -0xfb,0x00,0x00,0x00,0x0b,0x01,0x00,0x00,0xf9,0x00,0x00,0x00, -0x34,0x00,0x00,0x00,0xe5,0x00,0x00,0x00,0x39,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0xf4,0x00,0x00,0x00,0x0c,0x01,0x00,0x00, -0x0b,0x01,0x00,0x00,0x73,0x00,0x04,0x00,0xba,0x00,0x00,0x00, -0x0d,0x01,0x00,0x00,0x0c,0x01,0x00,0x00,0x41,0x00,0x05,0x00, -0xff,0x00,0x00,0x00,0x0e,0x01,0x00,0x00,0xea,0x00,0x00,0x00, -0x09,0x01,0x00,0x00,0x3e,0x00,0x03,0x00,0x0e,0x01,0x00,0x00, -0x0d,0x01,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x13,0x01,0x00,0x00,0xde,0x00,0x00,0x00,0x12,0x01,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x16,0x01,0x00,0x00, -0x13,0x01,0x00,0x00,0xf1,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x17,0x01,0x00,0x00,0x16,0x01,0x00,0x00, -0x0c,0x00,0x00,0x00,0x41,0x00,0x07,0x00,0xfb,0x00,0x00,0x00, -0x19,0x01,0x00,0x00,0xf9,0x00,0x00,0x00,0x34,0x00,0x00,0x00, -0xe5,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0xf4,0x00,0x00,0x00,0x1a,0x01,0x00,0x00,0x19,0x01,0x00,0x00, -0x73,0x00,0x04,0x00,0xba,0x00,0x00,0x00,0x1b,0x01,0x00,0x00, -0x1a,0x01,0x00,0x00,0x41,0x00,0x05,0x00,0xff,0x00,0x00,0x00, -0x1c,0x01,0x00,0x00,0xea,0x00,0x00,0x00,0x17,0x01,0x00,0x00, -0x3e,0x00,0x03,0x00,0x1c,0x01,0x00,0x00,0x1b,0x01,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x21,0x01,0x00,0x00, -0xde,0x00,0x00,0x00,0x20,0x01,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x24,0x01,0x00,0x00,0x21,0x01,0x00,0x00, -0xf1,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x26,0x01,0x00,0x00,0x24,0x01,0x00,0x00,0x25,0x01,0x00,0x00, -0x41,0x00,0x07,0x00,0xfb,0x00,0x00,0x00,0x28,0x01,0x00,0x00, -0xf9,0x00,0x00,0x00,0x34,0x00,0x00,0x00,0xe5,0x00,0x00,0x00, -0x25,0x01,0x00,0x00,0x3d,0x00,0x04,0x00,0xf4,0x00,0x00,0x00, -0x29,0x01,0x00,0x00,0x28,0x01,0x00,0x00,0x73,0x00,0x04,0x00, -0xba,0x00,0x00,0x00,0x2a,0x01,0x00,0x00,0x29,0x01,0x00,0x00, -0x41,0x00,0x05,0x00,0xff,0x00,0x00,0x00,0x2b,0x01,0x00,0x00, -0xea,0x00,0x00,0x00,0x26,0x01,0x00,0x00,0x3e,0x00,0x03,0x00, -0x2b,0x01,0x00,0x00,0x2a,0x01,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x32,0x01,0x00,0x00,0xd2,0x02,0x00,0x00, -0x30,0x01,0x00,0x00,0xf9,0x00,0x02,0x00,0xd3,0x00,0x00,0x00, -0xf8,0x00,0x02,0x00,0xd5,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, -0x34,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0x34,0x01,0x00,0x00, -0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0xd3,0x02,0x00,0x00, -0x3e,0x00,0x00,0x00,0xd5,0x00,0x00,0x00,0x89,0x01,0x00,0x00, -0x35,0x01,0x00,0x00,0xb0,0x00,0x05,0x00,0xb8,0x00,0x00,0x00, -0x3a,0x01,0x00,0x00,0xd3,0x02,0x00,0x00,0x9d,0x00,0x00,0x00, -0xf6,0x00,0x04,0x00,0x36,0x01,0x00,0x00,0x35,0x01,0x00,0x00, -0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0x3a,0x01,0x00,0x00, -0x35,0x01,0x00,0x00,0x36,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, -0x35,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x3f,0x01,0x00,0x00,0x74,0x00,0x00,0x00,0xd3,0x02,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x42,0x01,0x00,0x00, -0x3f,0x01,0x00,0x00,0xa1,0x00,0x00,0x00,0x86,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x43,0x01,0x00,0x00,0x42,0x01,0x00,0x00, -0x6d,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x44,0x01,0x00,0x00,0xda,0x02,0x00,0x00,0x43,0x01,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x46,0x01,0x00,0x00, -0x44,0x01,0x00,0x00,0x6f,0x00,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x50,0x01,0x00,0x00,0x3f,0x01,0x00,0x00, -0x4f,0x01,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x52,0x01,0x00,0x00,0x6f,0x00,0x00,0x00,0x6d,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x53,0x01,0x00,0x00, -0x50,0x01,0x00,0x00,0x52,0x01,0x00,0x00,0x41,0x00,0x07,0x00, -0xfb,0x00,0x00,0x00,0x5a,0x01,0x00,0x00,0x58,0x01,0x00,0x00, -0x34,0x00,0x00,0x00,0x46,0x01,0x00,0x00,0x3e,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0xf4,0x00,0x00,0x00,0x5b,0x01,0x00,0x00, -0x5a,0x01,0x00,0x00,0x73,0x00,0x04,0x00,0xba,0x00,0x00,0x00, -0x5c,0x01,0x00,0x00,0x5b,0x01,0x00,0x00,0x41,0x00,0x05,0x00, -0xff,0x00,0x00,0x00,0x5d,0x01,0x00,0x00,0x4b,0x01,0x00,0x00, -0x53,0x01,0x00,0x00,0x3e,0x00,0x03,0x00,0x5d,0x01,0x00,0x00, -0x5c,0x01,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x62,0x01,0x00,0x00,0x3f,0x01,0x00,0x00,0x61,0x01,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x65,0x01,0x00,0x00, -0x62,0x01,0x00,0x00,0x52,0x01,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x66,0x01,0x00,0x00,0x65,0x01,0x00,0x00, -0x39,0x00,0x00,0x00,0x41,0x00,0x07,0x00,0xfb,0x00,0x00,0x00, -0x68,0x01,0x00,0x00,0x58,0x01,0x00,0x00,0x34,0x00,0x00,0x00, -0x46,0x01,0x00,0x00,0x39,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0xf4,0x00,0x00,0x00,0x69,0x01,0x00,0x00,0x68,0x01,0x00,0x00, -0x73,0x00,0x04,0x00,0xba,0x00,0x00,0x00,0x6a,0x01,0x00,0x00, -0x69,0x01,0x00,0x00,0x41,0x00,0x05,0x00,0xff,0x00,0x00,0x00, -0x6b,0x01,0x00,0x00,0x4b,0x01,0x00,0x00,0x66,0x01,0x00,0x00, -0x3e,0x00,0x03,0x00,0x6b,0x01,0x00,0x00,0x6a,0x01,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x70,0x01,0x00,0x00, -0x3f,0x01,0x00,0x00,0x6f,0x01,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x73,0x01,0x00,0x00,0x70,0x01,0x00,0x00, -0x52,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x74,0x01,0x00,0x00,0x73,0x01,0x00,0x00,0x0c,0x00,0x00,0x00, -0x41,0x00,0x07,0x00,0xfb,0x00,0x00,0x00,0x76,0x01,0x00,0x00, -0x58,0x01,0x00,0x00,0x34,0x00,0x00,0x00,0x46,0x01,0x00,0x00, -0x0c,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0xf4,0x00,0x00,0x00, -0x77,0x01,0x00,0x00,0x76,0x01,0x00,0x00,0x73,0x00,0x04,0x00, -0xba,0x00,0x00,0x00,0x78,0x01,0x00,0x00,0x77,0x01,0x00,0x00, -0x41,0x00,0x05,0x00,0xff,0x00,0x00,0x00,0x79,0x01,0x00,0x00, -0x4b,0x01,0x00,0x00,0x74,0x01,0x00,0x00,0x3e,0x00,0x03,0x00, -0x79,0x01,0x00,0x00,0x78,0x01,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x7e,0x01,0x00,0x00,0x3f,0x01,0x00,0x00, -0x7d,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x81,0x01,0x00,0x00,0x7e,0x01,0x00,0x00,0x52,0x01,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x82,0x01,0x00,0x00, -0x81,0x01,0x00,0x00,0x25,0x01,0x00,0x00,0x41,0x00,0x07,0x00, -0xfb,0x00,0x00,0x00,0x84,0x01,0x00,0x00,0x58,0x01,0x00,0x00, -0x34,0x00,0x00,0x00,0x46,0x01,0x00,0x00,0x25,0x01,0x00,0x00, -0x3d,0x00,0x04,0x00,0xf4,0x00,0x00,0x00,0x85,0x01,0x00,0x00, -0x84,0x01,0x00,0x00,0x73,0x00,0x04,0x00,0xba,0x00,0x00,0x00, -0x86,0x01,0x00,0x00,0x85,0x01,0x00,0x00,0x41,0x00,0x05,0x00, -0xff,0x00,0x00,0x00,0x87,0x01,0x00,0x00,0x4b,0x01,0x00,0x00, -0x82,0x01,0x00,0x00,0x3e,0x00,0x03,0x00,0x87,0x01,0x00,0x00, -0x86,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x89,0x01,0x00,0x00,0xd3,0x02,0x00,0x00,0x30,0x01,0x00,0x00, -0xf9,0x00,0x02,0x00,0x34,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, -0x36,0x01,0x00,0x00,0xe0,0x00,0x04,0x00,0x0c,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x8a,0x01,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x8d,0x01,0x00,0x00,0xd6,0x02,0x00,0x00, -0x8b,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x90,0x01,0x00,0x00,0xda,0x02,0x00,0x00,0x8e,0x01,0x00,0x00, -0xf9,0x00,0x02,0x00,0x92,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, -0x92,0x01,0x00,0x00,0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00, -0xdc,0x02,0x00,0x00,0x3e,0x00,0x00,0x00,0x36,0x01,0x00,0x00, -0x39,0x02,0x00,0x00,0x95,0x01,0x00,0x00,0xb0,0x00,0x05,0x00, -0xb8,0x00,0x00,0x00,0x98,0x01,0x00,0x00,0xdc,0x02,0x00,0x00, -0x6c,0x00,0x00,0x00,0xf6,0x00,0x04,0x00,0x94,0x01,0x00,0x00, -0x95,0x01,0x00,0x00,0x00,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, -0x98,0x01,0x00,0x00,0x93,0x01,0x00,0x00,0x94,0x01,0x00,0x00, -0xf8,0x00,0x02,0x00,0x93,0x01,0x00,0x00,0xf9,0x00,0x02,0x00, -0x9a,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0x9a,0x01,0x00,0x00, -0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0xe0,0x02,0x00,0x00, -0x3e,0x00,0x00,0x00,0x93,0x01,0x00,0x00,0xc5,0x01,0x00,0x00, -0x9d,0x01,0x00,0x00,0xb0,0x00,0x05,0x00,0xb8,0x00,0x00,0x00, -0xa0,0x01,0x00,0x00,0xe0,0x02,0x00,0x00,0x60,0x00,0x00,0x00, -0xf6,0x00,0x04,0x00,0x9c,0x01,0x00,0x00,0x9d,0x01,0x00,0x00, -0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0xa0,0x01,0x00,0x00, -0x9b,0x01,0x00,0x00,0x9c,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, -0x9b,0x01,0x00,0x00,0xf9,0x00,0x02,0x00,0xa2,0x01,0x00,0x00, -0xf8,0x00,0x02,0x00,0xa2,0x01,0x00,0x00,0xf5,0x00,0x07,0x00, -0x06,0x00,0x00,0x00,0xf2,0x02,0x00,0x00,0x3e,0x00,0x00,0x00, -0x9b,0x01,0x00,0x00,0xc3,0x01,0x00,0x00,0xa3,0x01,0x00,0x00, -0xb0,0x00,0x05,0x00,0xb8,0x00,0x00,0x00,0xa8,0x01,0x00,0x00, -0xf2,0x02,0x00,0x00,0x62,0x00,0x00,0x00,0xf6,0x00,0x04,0x00, -0xa4,0x01,0x00,0x00,0xa3,0x01,0x00,0x00,0x01,0x00,0x00,0x00, -0xfa,0x00,0x04,0x00,0xa8,0x01,0x00,0x00,0xa3,0x01,0x00,0x00, -0xa4,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0xa3,0x01,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xae,0x01,0x00,0x00, -0xe0,0x02,0x00,0x00,0x62,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xb0,0x01,0x00,0x00,0xae,0x01,0x00,0x00, -0xf2,0x02,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xb2,0x01,0x00,0x00,0x55,0x00,0x00,0x00,0x53,0x00,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xb4,0x01,0x00,0x00, -0xe0,0x02,0x00,0x00,0x61,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xb5,0x01,0x00,0x00,0xb2,0x01,0x00,0x00, -0xb4,0x01,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xb7,0x01,0x00,0x00,0x64,0x00,0x00,0x00,0x62,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xb8,0x01,0x00,0x00, -0xb5,0x01,0x00,0x00,0xb7,0x01,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xba,0x01,0x00,0x00,0xb8,0x01,0x00,0x00, -0xf2,0x02,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xbc,0x01,0x00,0x00,0xba,0x01,0x00,0x00,0xbb,0x01,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xbe,0x01,0x00,0x00, -0xbc,0x01,0x00,0x00,0xdc,0x02,0x00,0x00,0x41,0x00,0x05,0x00, -0xff,0x00,0x00,0x00,0xbf,0x01,0x00,0x00,0xea,0x00,0x00,0x00, -0xbe,0x01,0x00,0x00,0x3d,0x00,0x04,0x00,0xba,0x00,0x00,0x00, -0xc0,0x01,0x00,0x00,0xbf,0x01,0x00,0x00,0x41,0x00,0x05,0x00, -0xc3,0x00,0x00,0x00,0xc1,0x01,0x00,0x00,0xac,0x01,0x00,0x00, -0xb0,0x01,0x00,0x00,0x3e,0x00,0x03,0x00,0xc1,0x01,0x00,0x00, -0xc0,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xc3,0x01,0x00,0x00,0xf2,0x02,0x00,0x00,0xc6,0x00,0x00,0x00, -0xf9,0x00,0x02,0x00,0xa2,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, -0xa4,0x01,0x00,0x00,0xf9,0x00,0x02,0x00,0x9d,0x01,0x00,0x00, -0xf8,0x00,0x02,0x00,0x9d,0x01,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xc5,0x01,0x00,0x00,0xe0,0x02,0x00,0x00, -0xc6,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0x9a,0x01,0x00,0x00, -0xf8,0x00,0x02,0x00,0x9c,0x01,0x00,0x00,0xf9,0x00,0x02,0x00, -0xc7,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0xc7,0x01,0x00,0x00, -0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0xe1,0x02,0x00,0x00, -0x3e,0x00,0x00,0x00,0x9c,0x01,0x00,0x00,0xf3,0x01,0x00,0x00, -0xca,0x01,0x00,0x00,0xb0,0x00,0x05,0x00,0xb8,0x00,0x00,0x00, -0xcd,0x01,0x00,0x00,0xe1,0x02,0x00,0x00,0xb5,0x00,0x00,0x00, -0xf6,0x00,0x04,0x00,0xc9,0x01,0x00,0x00,0xca,0x01,0x00,0x00, -0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0xcd,0x01,0x00,0x00, -0xc8,0x01,0x00,0x00,0xc9,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, -0xc8,0x01,0x00,0x00,0xf9,0x00,0x02,0x00,0xcf,0x01,0x00,0x00, -0xf8,0x00,0x02,0x00,0xcf,0x01,0x00,0x00,0xf5,0x00,0x07,0x00, -0x06,0x00,0x00,0x00,0xef,0x02,0x00,0x00,0x3e,0x00,0x00,0x00, -0xc8,0x01,0x00,0x00,0xf1,0x01,0x00,0x00,0xd0,0x01,0x00,0x00, -0xb0,0x00,0x05,0x00,0xb8,0x00,0x00,0x00,0xd5,0x01,0x00,0x00, -0xef,0x02,0x00,0x00,0xb2,0x00,0x00,0x00,0xf6,0x00,0x04,0x00, -0xd1,0x01,0x00,0x00,0xd0,0x01,0x00,0x00,0x01,0x00,0x00,0x00, -0xfa,0x00,0x04,0x00,0xd5,0x01,0x00,0x00,0xd0,0x01,0x00,0x00, -0xd1,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0xd0,0x01,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xdb,0x01,0x00,0x00, -0xe1,0x02,0x00,0x00,0xb2,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xdd,0x01,0x00,0x00,0xdb,0x01,0x00,0x00, -0xef,0x02,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xdf,0x01,0x00,0x00,0x59,0x00,0x00,0x00,0xaf,0x00,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xe2,0x01,0x00,0x00, -0xe1,0x02,0x00,0x00,0xe1,0x01,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xe3,0x01,0x00,0x00,0xdf,0x01,0x00,0x00, -0xe2,0x01,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xe5,0x01,0x00,0x00,0x68,0x00,0x00,0x00,0xb2,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xe6,0x01,0x00,0x00, -0xe3,0x01,0x00,0x00,0xe5,0x01,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xe8,0x01,0x00,0x00,0xe6,0x01,0x00,0x00, -0xef,0x02,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xea,0x01,0x00,0x00,0xe8,0x01,0x00,0x00,0xe9,0x01,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xec,0x01,0x00,0x00, -0xea,0x01,0x00,0x00,0xdc,0x02,0x00,0x00,0x41,0x00,0x05,0x00, -0xff,0x00,0x00,0x00,0xed,0x01,0x00,0x00,0x4b,0x01,0x00,0x00, -0xec,0x01,0x00,0x00,0x3d,0x00,0x04,0x00,0xba,0x00,0x00,0x00, -0xee,0x01,0x00,0x00,0xed,0x01,0x00,0x00,0x41,0x00,0x05,0x00, -0xc3,0x00,0x00,0x00,0xef,0x01,0x00,0x00,0xd9,0x01,0x00,0x00, -0xdd,0x01,0x00,0x00,0x3e,0x00,0x03,0x00,0xef,0x01,0x00,0x00, -0xee,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xf1,0x01,0x00,0x00,0xef,0x02,0x00,0x00,0xc6,0x00,0x00,0x00, -0xf9,0x00,0x02,0x00,0xcf,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, -0xd1,0x01,0x00,0x00,0xf9,0x00,0x02,0x00,0xca,0x01,0x00,0x00, -0xf8,0x00,0x02,0x00,0xca,0x01,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xf3,0x01,0x00,0x00,0xe1,0x02,0x00,0x00, -0xc6,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0xc7,0x01,0x00,0x00, -0xf8,0x00,0x02,0x00,0xc9,0x01,0x00,0x00,0xf9,0x00,0x02,0x00, -0xf5,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0xf5,0x01,0x00,0x00, -0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0xe2,0x02,0x00,0x00, -0x3e,0x00,0x00,0x00,0xc9,0x01,0x00,0x00,0x37,0x02,0x00,0x00, -0xf8,0x01,0x00,0x00,0xb0,0x00,0x05,0x00,0xb8,0x00,0x00,0x00, -0xfb,0x01,0x00,0x00,0xe2,0x02,0x00,0x00,0xb5,0x00,0x00,0x00, -0xf6,0x00,0x04,0x00,0xf7,0x01,0x00,0x00,0xf8,0x01,0x00,0x00, -0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0xfb,0x01,0x00,0x00, -0xf6,0x01,0x00,0x00,0xf7,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, -0xf6,0x01,0x00,0x00,0xf9,0x00,0x02,0x00,0xfd,0x01,0x00,0x00, -0xf8,0x00,0x02,0x00,0xfd,0x01,0x00,0x00,0xf5,0x00,0x07,0x00, -0x06,0x00,0x00,0x00,0xe6,0x02,0x00,0x00,0x3e,0x00,0x00,0x00, -0xf6,0x01,0x00,0x00,0x35,0x02,0x00,0x00,0x00,0x02,0x00,0x00, -0xb0,0x00,0x05,0x00,0xb8,0x00,0x00,0x00,0x03,0x02,0x00,0x00, -0xe6,0x02,0x00,0x00,0x60,0x00,0x00,0x00,0xf6,0x00,0x04,0x00, -0xff,0x01,0x00,0x00,0x00,0x02,0x00,0x00,0x01,0x00,0x00,0x00, -0xfa,0x00,0x04,0x00,0x03,0x02,0x00,0x00,0xfe,0x01,0x00,0x00, -0xff,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0xfe,0x01,0x00,0x00, -0xf9,0x00,0x02,0x00,0x05,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, -0x05,0x02,0x00,0x00,0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00, -0xe8,0x02,0x00,0x00,0x3e,0x00,0x00,0x00,0xfe,0x01,0x00,0x00, -0x33,0x02,0x00,0x00,0x08,0x02,0x00,0x00,0xb0,0x00,0x05,0x00, -0xb8,0x00,0x00,0x00,0x0b,0x02,0x00,0x00,0xe8,0x02,0x00,0x00, -0xb2,0x00,0x00,0x00,0xf6,0x00,0x04,0x00,0x07,0x02,0x00,0x00, -0x08,0x02,0x00,0x00,0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, -0x0b,0x02,0x00,0x00,0x06,0x02,0x00,0x00,0x07,0x02,0x00,0x00, -0xf8,0x00,0x02,0x00,0x06,0x02,0x00,0x00,0xf9,0x00,0x02,0x00, -0x0d,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x0d,0x02,0x00,0x00, -0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0xea,0x02,0x00,0x00, -0x3e,0x00,0x00,0x00,0x06,0x02,0x00,0x00,0x31,0x02,0x00,0x00, -0x0e,0x02,0x00,0x00,0xb0,0x00,0x05,0x00,0xb8,0x00,0x00,0x00, -0x13,0x02,0x00,0x00,0xea,0x02,0x00,0x00,0x62,0x00,0x00,0x00, -0xf6,0x00,0x04,0x00,0x0f,0x02,0x00,0x00,0x0e,0x02,0x00,0x00, -0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0x13,0x02,0x00,0x00, -0x0e,0x02,0x00,0x00,0x0f,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, -0x0e,0x02,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x15,0x02,0x00,0x00,0xe2,0x02,0x00,0x00,0xb2,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x17,0x02,0x00,0x00, -0x15,0x02,0x00,0x00,0xe8,0x02,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x19,0x02,0x00,0x00,0x17,0x02,0x00,0x00, -0x18,0x02,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x1b,0x02,0x00,0x00,0xe6,0x02,0x00,0x00,0x62,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x1c,0x02,0x00,0x00, -0x19,0x02,0x00,0x00,0x1b,0x02,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x1e,0x02,0x00,0x00,0x1c,0x02,0x00,0x00, -0xea,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x22,0x02,0x00,0x00,0x1b,0x02,0x00,0x00,0xea,0x02,0x00,0x00, -0x41,0x00,0x05,0x00,0xc3,0x00,0x00,0x00,0x23,0x02,0x00,0x00, -0xac,0x01,0x00,0x00,0x22,0x02,0x00,0x00,0x3d,0x00,0x04,0x00, -0xba,0x00,0x00,0x00,0x24,0x02,0x00,0x00,0x23,0x02,0x00,0x00, -0x41,0x00,0x05,0x00,0xc3,0x00,0x00,0x00,0x29,0x02,0x00,0x00, -0xd9,0x01,0x00,0x00,0x17,0x02,0x00,0x00,0x3d,0x00,0x04,0x00, -0xba,0x00,0x00,0x00,0x2a,0x02,0x00,0x00,0x29,0x02,0x00,0x00, -0x41,0x00,0x05,0x00,0xc3,0x00,0x00,0x00,0x2c,0x02,0x00,0x00, -0xc0,0x00,0x00,0x00,0x1e,0x02,0x00,0x00,0x3d,0x00,0x04,0x00, -0xba,0x00,0x00,0x00,0x2d,0x02,0x00,0x00,0x2c,0x02,0x00,0x00, -0x0c,0x00,0x08,0x00,0xba,0x00,0x00,0x00,0x2e,0x02,0x00,0x00, -0x01,0x00,0x00,0x00,0x32,0x00,0x00,0x00,0x24,0x02,0x00,0x00, -0x2a,0x02,0x00,0x00,0x2d,0x02,0x00,0x00,0x3e,0x00,0x03,0x00, -0x2c,0x02,0x00,0x00,0x2e,0x02,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x31,0x02,0x00,0x00,0xea,0x02,0x00,0x00, -0xc6,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0x0d,0x02,0x00,0x00, -0xf8,0x00,0x02,0x00,0x0f,0x02,0x00,0x00,0xf9,0x00,0x02,0x00, -0x08,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x08,0x02,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x33,0x02,0x00,0x00, -0xe8,0x02,0x00,0x00,0xc6,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, -0x05,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x07,0x02,0x00,0x00, -0xf9,0x00,0x02,0x00,0x00,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, -0x00,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x35,0x02,0x00,0x00,0xe6,0x02,0x00,0x00,0xc6,0x00,0x00,0x00, -0xf9,0x00,0x02,0x00,0xfd,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, -0xff,0x01,0x00,0x00,0xf9,0x00,0x02,0x00,0xf8,0x01,0x00,0x00, -0xf8,0x00,0x02,0x00,0xf8,0x01,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x37,0x02,0x00,0x00,0xe2,0x02,0x00,0x00, -0xc6,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0xf5,0x01,0x00,0x00, -0xf8,0x00,0x02,0x00,0xf7,0x01,0x00,0x00,0xf9,0x00,0x02,0x00, -0x95,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0x95,0x01,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x39,0x02,0x00,0x00, -0xdc,0x02,0x00,0x00,0xc6,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, -0x92,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0x94,0x01,0x00,0x00, -0xe0,0x00,0x04,0x00,0x0c,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0x8a,0x01,0x00,0x00,0xf9,0x00,0x02,0x00,0xcd,0x00,0x00,0x00, -0xf8,0x00,0x02,0x00,0xcd,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x3b,0x02,0x00,0x00,0xc2,0x02,0x00,0x00, -0x6c,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0xca,0x00,0x00,0x00, -0xf8,0x00,0x02,0x00,0xcc,0x00,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x40,0x02,0x00,0x00,0x55,0x00,0x00,0x00, -0x53,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x41,0x02,0x00,0x00,0x8c,0x00,0x00,0x00,0x40,0x02,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x46,0x02,0x00,0x00, -0x59,0x00,0x00,0x00,0xaf,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x47,0x02,0x00,0x00,0x9e,0x00,0x00,0x00, -0x46,0x02,0x00,0x00,0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00, -0x4c,0x02,0x00,0x00,0x12,0x00,0x00,0x00,0x4b,0x02,0x00,0x00, -0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x4d,0x02,0x00,0x00, -0x4c,0x02,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x4e,0x02,0x00,0x00,0x0f,0x00,0x00,0x00,0x4d,0x02,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x52,0x02,0x00,0x00, -0x47,0x00,0x00,0x00,0x4d,0x02,0x00,0x00,0x41,0x00,0x05,0x00, -0x0d,0x00,0x00,0x00,0x54,0x02,0x00,0x00,0x53,0x02,0x00,0x00, -0x0c,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x55,0x02,0x00,0x00,0x54,0x02,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x56,0x02,0x00,0x00,0x52,0x02,0x00,0x00, -0x55,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x57,0x02,0x00,0x00,0x4e,0x02,0x00,0x00,0x56,0x02,0x00,0x00, -0xf9,0x00,0x02,0x00,0x59,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, -0x59,0x02,0x00,0x00,0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00, -0xc3,0x02,0x00,0x00,0x3e,0x00,0x00,0x00,0xcc,0x00,0x00,0x00, -0xc0,0x02,0x00,0x00,0x5c,0x02,0x00,0x00,0xb0,0x00,0x05,0x00, -0xb8,0x00,0x00,0x00,0x5f,0x02,0x00,0x00,0xc3,0x02,0x00,0x00, -0xb5,0x00,0x00,0x00,0xf6,0x00,0x04,0x00,0x5b,0x02,0x00,0x00, -0x5c,0x02,0x00,0x00,0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, -0x5f,0x02,0x00,0x00,0x5a,0x02,0x00,0x00,0x5b,0x02,0x00,0x00, -0xf8,0x00,0x02,0x00,0x5a,0x02,0x00,0x00,0xf9,0x00,0x02,0x00, -0x61,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x61,0x02,0x00,0x00, -0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0xc4,0x02,0x00,0x00, -0x3e,0x00,0x00,0x00,0x5a,0x02,0x00,0x00,0xbe,0x02,0x00,0x00, -0x64,0x02,0x00,0x00,0xb0,0x00,0x05,0x00,0xb8,0x00,0x00,0x00, -0x67,0x02,0x00,0x00,0xc4,0x02,0x00,0x00,0x60,0x00,0x00,0x00, -0xf6,0x00,0x04,0x00,0x63,0x02,0x00,0x00,0x64,0x02,0x00,0x00, -0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0x67,0x02,0x00,0x00, -0x62,0x02,0x00,0x00,0x63,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, -0x62,0x02,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x6b,0x02,0x00,0x00,0xc4,0x02,0x00,0x00,0x61,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x6c,0x02,0x00,0x00, -0x41,0x02,0x00,0x00,0x6b,0x02,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x6e,0x02,0x00,0x00,0x64,0x00,0x00,0x00, -0x62,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x6f,0x02,0x00,0x00,0x6c,0x02,0x00,0x00,0x6e,0x02,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x73,0x02,0x00,0x00, -0xc3,0x02,0x00,0x00,0xe1,0x01,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x74,0x02,0x00,0x00,0x47,0x02,0x00,0x00, -0x73,0x02,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x76,0x02,0x00,0x00,0x68,0x00,0x00,0x00,0xb2,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x77,0x02,0x00,0x00, -0x74,0x02,0x00,0x00,0x76,0x02,0x00,0x00,0xf9,0x00,0x02,0x00, -0x79,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x79,0x02,0x00,0x00, -0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0xc6,0x02,0x00,0x00, -0x3e,0x00,0x00,0x00,0x62,0x02,0x00,0x00,0xbc,0x02,0x00,0x00, -0x7c,0x02,0x00,0x00,0xb0,0x00,0x05,0x00,0xb8,0x00,0x00,0x00, -0x7f,0x02,0x00,0x00,0xc6,0x02,0x00,0x00,0xb2,0x00,0x00,0x00, -0xf6,0x00,0x04,0x00,0x7b,0x02,0x00,0x00,0x7c,0x02,0x00,0x00, -0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0x7f,0x02,0x00,0x00, -0x7a,0x02,0x00,0x00,0x7b,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, -0x7a,0x02,0x00,0x00,0xf9,0x00,0x02,0x00,0x81,0x02,0x00,0x00, -0xf8,0x00,0x02,0x00,0x81,0x02,0x00,0x00,0xf5,0x00,0x07,0x00, -0x06,0x00,0x00,0x00,0xc8,0x02,0x00,0x00,0x3e,0x00,0x00,0x00, -0x7a,0x02,0x00,0x00,0xba,0x02,0x00,0x00,0x84,0x02,0x00,0x00, -0xb0,0x00,0x05,0x00,0xb8,0x00,0x00,0x00,0x87,0x02,0x00,0x00, -0xc8,0x02,0x00,0x00,0x62,0x00,0x00,0x00,0xf6,0x00,0x04,0x00, -0x83,0x02,0x00,0x00,0x84,0x02,0x00,0x00,0x01,0x00,0x00,0x00, -0xfa,0x00,0x04,0x00,0x87,0x02,0x00,0x00,0x82,0x02,0x00,0x00, -0x83,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x82,0x02,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x8a,0x02,0x00,0x00, -0x6f,0x02,0x00,0x00,0xc8,0x02,0x00,0x00,0xb0,0x00,0x05,0x00, -0xb8,0x00,0x00,0x00,0x8d,0x02,0x00,0x00,0x8a,0x02,0x00,0x00, -0x36,0x00,0x00,0x00,0xf7,0x00,0x03,0x00,0x8f,0x02,0x00,0x00, -0x00,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0x8d,0x02,0x00,0x00, -0x8e,0x02,0x00,0x00,0x8f,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, -0x8e,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x92,0x02,0x00,0x00,0x77,0x02,0x00,0x00,0xc6,0x02,0x00,0x00, -0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00,0x93,0x02,0x00,0x00, -0x12,0x00,0x00,0x00,0xc6,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x94,0x02,0x00,0x00,0x93,0x02,0x00,0x00, -0xb0,0x00,0x05,0x00,0xb8,0x00,0x00,0x00,0x95,0x02,0x00,0x00, -0x92,0x02,0x00,0x00,0x94,0x02,0x00,0x00,0xf9,0x00,0x02,0x00, -0x8f,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x8f,0x02,0x00,0x00, -0xf5,0x00,0x07,0x00,0xb8,0x00,0x00,0x00,0x96,0x02,0x00,0x00, -0x8d,0x02,0x00,0x00,0x82,0x02,0x00,0x00,0x95,0x02,0x00,0x00, -0x8e,0x02,0x00,0x00,0xf7,0x00,0x03,0x00,0x98,0x02,0x00,0x00, -0x00,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0x96,0x02,0x00,0x00, -0x97,0x02,0x00,0x00,0x98,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, -0x97,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xa0,0x02,0x00,0x00,0x77,0x02,0x00,0x00,0xc6,0x02,0x00,0x00, -0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00,0xa2,0x02,0x00,0x00, -0x12,0x00,0x00,0x00,0xa1,0x02,0x00,0x00,0x3d,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0xa3,0x02,0x00,0x00,0xa2,0x02,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xa4,0x02,0x00,0x00, -0xa0,0x02,0x00,0x00,0xa3,0x02,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xa5,0x02,0x00,0x00,0x57,0x02,0x00,0x00, -0xa4,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xa7,0x02,0x00,0x00,0xa5,0x02,0x00,0x00,0x6f,0x02,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xa9,0x02,0x00,0x00, -0xa7,0x02,0x00,0x00,0xc8,0x02,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xab,0x02,0x00,0x00,0xc3,0x02,0x00,0x00, -0xb2,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xad,0x02,0x00,0x00,0xab,0x02,0x00,0x00,0xc6,0x02,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xaf,0x02,0x00,0x00, -0xad,0x02,0x00,0x00,0xae,0x02,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xb1,0x02,0x00,0x00,0xc4,0x02,0x00,0x00, -0x62,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xb2,0x02,0x00,0x00,0xaf,0x02,0x00,0x00,0xb1,0x02,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xb4,0x02,0x00,0x00, -0xb2,0x02,0x00,0x00,0xc8,0x02,0x00,0x00,0x41,0x00,0x05,0x00, -0xc3,0x00,0x00,0x00,0xb5,0x02,0x00,0x00,0xc0,0x00,0x00,0x00, -0xb4,0x02,0x00,0x00,0x3d,0x00,0x04,0x00,0xba,0x00,0x00,0x00, -0xb6,0x02,0x00,0x00,0xb5,0x02,0x00,0x00,0x41,0x00,0x06,0x00, -0xb7,0x02,0x00,0x00,0xb8,0x02,0x00,0x00,0x9c,0x02,0x00,0x00, -0x34,0x00,0x00,0x00,0xa9,0x02,0x00,0x00,0x3e,0x00,0x03,0x00, -0xb8,0x02,0x00,0x00,0xb6,0x02,0x00,0x00,0xf9,0x00,0x02,0x00, -0x98,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x98,0x02,0x00,0x00, -0xf9,0x00,0x02,0x00,0x84,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, -0x84,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xba,0x02,0x00,0x00,0xc8,0x02,0x00,0x00,0xc6,0x00,0x00,0x00, -0xf9,0x00,0x02,0x00,0x81,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, -0x83,0x02,0x00,0x00,0xf9,0x00,0x02,0x00,0x7c,0x02,0x00,0x00, -0xf8,0x00,0x02,0x00,0x7c,0x02,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xbc,0x02,0x00,0x00,0xc6,0x02,0x00,0x00, -0xc6,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0x79,0x02,0x00,0x00, -0xf8,0x00,0x02,0x00,0x7b,0x02,0x00,0x00,0xf9,0x00,0x02,0x00, -0x64,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x64,0x02,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xbe,0x02,0x00,0x00, -0xc4,0x02,0x00,0x00,0xc6,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, -0x61,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x63,0x02,0x00,0x00, -0xf9,0x00,0x02,0x00,0x5c,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, -0x5c,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xc0,0x02,0x00,0x00,0xc3,0x02,0x00,0x00,0xc6,0x00,0x00,0x00, -0xf9,0x00,0x02,0x00,0x59,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, -0x5b,0x02,0x00,0x00,0xfd,0x00,0x01,0x00,0x38,0x00,0x01,0x00, - -}; -const uint64_t matmul_f16_aligned_m_fp32_len = 10512; - -unsigned char matmul_f16_aligned_s_data[] = { -0x03,0x02,0x23,0x07,0x00,0x05,0x01,0x00,0x0b,0x00,0x0d,0x00, -0x5b,0x03,0x00,0x00,0x00,0x00,0x00,0x00,0x11,0x00,0x02,0x00, -0x01,0x00,0x00,0x00,0x11,0x00,0x02,0x00,0x09,0x00,0x00,0x00, -0x11,0x00,0x02,0x00,0x51,0x11,0x00,0x00,0x0b,0x00,0x06,0x00, -0x01,0x00,0x00,0x00,0x47,0x4c,0x53,0x4c,0x2e,0x73,0x74,0x64, -0x2e,0x34,0x35,0x30,0x00,0x00,0x00,0x00,0x0e,0x00,0x03,0x00, -0x00,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x0f,0x00,0x0f,0x00, -0x05,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x6d,0x61,0x69,0x6e, -0x00,0x00,0x00,0x00,0x0b,0x00,0x00,0x00,0x12,0x00,0x00,0x00, -0x3d,0x00,0x00,0x00,0x4c,0x00,0x00,0x00,0xeb,0x00,0x00,0x00, -0xfa,0x00,0x00,0x00,0x80,0x01,0x00,0x00,0x8d,0x01,0x00,0x00, -0xbb,0x02,0x00,0x00,0x04,0x03,0x00,0x00,0x10,0x00,0x06,0x00, -0x04,0x00,0x00,0x00,0x11,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x0b,0x00,0x00,0x00,0x0b,0x00,0x00,0x00,0x1c,0x00,0x00,0x00, -0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0x10,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x04,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00, -0x02,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x08,0x00,0x00,0x00, -0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00,0x03,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0x10,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x10,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00, -0x05,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x14,0x00,0x00,0x00, -0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x18,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0x10,0x00,0x00,0x00,0x07,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x1c,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00, -0x08,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x20,0x00,0x00,0x00, -0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00,0x09,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x24,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0x10,0x00,0x00,0x00,0x0a,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x28,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00, -0x0b,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x2c,0x00,0x00,0x00, -0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x30,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0x10,0x00,0x00,0x00,0x0d,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x34,0x00,0x00,0x00,0x47,0x00,0x03,0x00,0x10,0x00,0x00,0x00, -0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x37,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x3d,0x00,0x00,0x00,0x0b,0x00,0x00,0x00,0x1a,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x4c,0x00,0x00,0x00,0x0b,0x00,0x00,0x00, -0x1b,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x4f,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x09,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x53,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x04,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x60,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x62,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x07,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x6c,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x03,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x9d,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0xaf,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x05,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0xb2,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x08,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0xf7,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x10,0x00,0x00,0x00,0x48,0x00,0x04,0x00,0xf8,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x05,0x00,0x00,0x00,0x48,0x00,0x04,0x00, -0xf8,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x18,0x00,0x00,0x00, -0x48,0x00,0x05,0x00,0xf8,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0xf8,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x07,0x00,0x00,0x00, -0x08,0x00,0x00,0x00,0x47,0x00,0x03,0x00,0xf8,0x00,0x00,0x00, -0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0xfa,0x00,0x00,0x00, -0x22,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0xfa,0x00,0x00,0x00,0x21,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x61,0x01,0x00,0x00,0x01,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x62,0x01,0x00,0x00, -0x0b,0x00,0x00,0x00,0x19,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x8a,0x01,0x00,0x00,0x06,0x00,0x00,0x00,0x10,0x00,0x00,0x00, -0x48,0x00,0x04,0x00,0x8b,0x01,0x00,0x00,0x00,0x00,0x00,0x00, -0x05,0x00,0x00,0x00,0x48,0x00,0x04,0x00,0x8b,0x01,0x00,0x00, -0x00,0x00,0x00,0x00,0x18,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0x8b,0x01,0x00,0x00,0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x8b,0x01,0x00,0x00, -0x00,0x00,0x00,0x00,0x07,0x00,0x00,0x00,0x08,0x00,0x00,0x00, -0x47,0x00,0x03,0x00,0x8b,0x01,0x00,0x00,0x02,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x8d,0x01,0x00,0x00,0x22,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x8d,0x01,0x00,0x00, -0x21,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0xbb,0x02,0x00,0x00,0x0b,0x00,0x00,0x00,0x18,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x01,0x03,0x00,0x00,0x06,0x00,0x00,0x00, -0x04,0x00,0x00,0x00,0x48,0x00,0x04,0x00,0x02,0x03,0x00,0x00, -0x00,0x00,0x00,0x00,0x19,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0x02,0x03,0x00,0x00,0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x47,0x00,0x03,0x00,0x02,0x03,0x00,0x00, -0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x04,0x03,0x00,0x00, -0x22,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x04,0x03,0x00,0x00,0x21,0x00,0x00,0x00,0x02,0x00,0x00,0x00, -0x13,0x00,0x02,0x00,0x02,0x00,0x00,0x00,0x21,0x00,0x03,0x00, -0x03,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x15,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x20,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x17,0x00,0x04,0x00,0x09,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x03,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x0a,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x09,0x00,0x00,0x00,0x3b,0x00,0x04,0x00, -0x0a,0x00,0x00,0x00,0x0b,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0x02,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x0d,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x1e,0x00,0x10,0x00, -0x10,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x20,0x00,0x04,0x00,0x11,0x00,0x00,0x00,0x09,0x00,0x00,0x00, -0x10,0x00,0x00,0x00,0x3b,0x00,0x04,0x00,0x11,0x00,0x00,0x00, -0x12,0x00,0x00,0x00,0x09,0x00,0x00,0x00,0x15,0x00,0x04,0x00, -0x13,0x00,0x00,0x00,0x20,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00,0x14,0x00,0x00,0x00, -0x08,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x15,0x00,0x00,0x00, -0x09,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x13,0x00,0x00,0x00,0x21,0x00,0x00,0x00,0x0a,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00,0x27,0x00,0x00,0x00, -0x09,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00, -0x2d,0x00,0x00,0x00,0x07,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x13,0x00,0x00,0x00,0x34,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x32,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x37,0x00,0x00,0x00, -0x40,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x39,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x3b,0x00,0x04,0x00, -0x0a,0x00,0x00,0x00,0x3d,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x3e,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x3b,0x00,0x04,0x00,0x0a,0x00,0x00,0x00, -0x4c,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x32,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x4f,0x00,0x00,0x00,0x20,0x00,0x00,0x00, -0x32,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x53,0x00,0x00,0x00, -0x20,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, -0x54,0x00,0x00,0x00,0x86,0x00,0x00,0x00,0x37,0x00,0x00,0x00, -0x53,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, -0x58,0x00,0x00,0x00,0x86,0x00,0x00,0x00,0x37,0x00,0x00,0x00, -0x53,0x00,0x00,0x00,0x32,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x60,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0x61,0x00,0x00,0x00,0x86,0x00,0x00,0x00, -0x53,0x00,0x00,0x00,0x60,0x00,0x00,0x00,0x32,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x62,0x00,0x00,0x00,0x04,0x00,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x63,0x00,0x00,0x00, -0x86,0x00,0x00,0x00,0x61,0x00,0x00,0x00,0x62,0x00,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x67,0x00,0x00,0x00, -0x86,0x00,0x00,0x00,0x61,0x00,0x00,0x00,0x62,0x00,0x00,0x00, -0x32,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x6c,0x00,0x00,0x00, -0x10,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x6d,0x00,0x00,0x00,0x08,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0x6e,0x00,0x00,0x00,0x86,0x00,0x00,0x00, -0x6c,0x00,0x00,0x00,0x6d,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0x73,0x00,0x00,0x00,0x86,0x00,0x00,0x00, -0x6c,0x00,0x00,0x00,0x6d,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x13,0x00,0x00,0x00,0x77,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00,0x7c,0x00,0x00,0x00, -0x02,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00, -0x87,0x00,0x00,0x00,0x0b,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x13,0x00,0x00,0x00,0x8d,0x00,0x00,0x00,0x03,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00,0x98,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x32,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x9d,0x00,0x00,0x00,0x40,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x13,0x00,0x00,0x00,0x9f,0x00,0x00,0x00,0x04,0x00,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0xae,0x00,0x00,0x00, -0x84,0x00,0x00,0x00,0x60,0x00,0x00,0x00,0x62,0x00,0x00,0x00, -0x32,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0xaf,0x00,0x00,0x00, -0x20,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, -0xb0,0x00,0x00,0x00,0x84,0x00,0x00,0x00,0x53,0x00,0x00,0x00, -0xaf,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, -0xb1,0x00,0x00,0x00,0x84,0x00,0x00,0x00,0x4f,0x00,0x00,0x00, -0x62,0x00,0x00,0x00,0x32,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0xb2,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0xb3,0x00,0x00,0x00,0x84,0x00,0x00,0x00, -0xb1,0x00,0x00,0x00,0xb2,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0xb4,0x00,0x00,0x00,0x84,0x00,0x00,0x00, -0xb3,0x00,0x00,0x00,0x60,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0xb5,0x00,0x00,0x00,0x86,0x00,0x00,0x00, -0xb0,0x00,0x00,0x00,0xb4,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0xb6,0x00,0x00,0x00,0x84,0x00,0x00,0x00, -0xae,0x00,0x00,0x00,0xb5,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0xb7,0x00,0x00,0x00,0x84,0x00,0x00,0x00, -0xb6,0x00,0x00,0x00,0xb2,0x00,0x00,0x00,0x14,0x00,0x02,0x00, -0xb8,0x00,0x00,0x00,0x16,0x00,0x03,0x00,0xba,0x00,0x00,0x00, -0x20,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, -0xbb,0x00,0x00,0x00,0x84,0x00,0x00,0x00,0x60,0x00,0x00,0x00, -0x62,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, -0xbc,0x00,0x00,0x00,0x84,0x00,0x00,0x00,0xbb,0x00,0x00,0x00, -0xb5,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, -0xbd,0x00,0x00,0x00,0x84,0x00,0x00,0x00,0xbc,0x00,0x00,0x00, -0xb2,0x00,0x00,0x00,0x1c,0x00,0x04,0x00,0xbe,0x00,0x00,0x00, -0xba,0x00,0x00,0x00,0xbd,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0xbf,0x00,0x00,0x00,0x07,0x00,0x00,0x00,0xbe,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0xba,0x00,0x00,0x00,0xc2,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0xc3,0x00,0x00,0x00, -0x07,0x00,0x00,0x00,0xba,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x13,0x00,0x00,0x00,0xc6,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x16,0x00,0x03,0x00,0xe6,0x00,0x00,0x00,0x10,0x00,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0xe7,0x00,0x00,0x00, -0x80,0x00,0x00,0x00,0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0xe8,0x00,0x00,0x00, -0x84,0x00,0x00,0x00,0x37,0x00,0x00,0x00,0xe7,0x00,0x00,0x00, -0x1c,0x00,0x04,0x00,0xe9,0x00,0x00,0x00,0xe6,0x00,0x00,0x00, -0xe8,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0xea,0x00,0x00,0x00, -0x04,0x00,0x00,0x00,0xe9,0x00,0x00,0x00,0x3b,0x00,0x04,0x00, -0xea,0x00,0x00,0x00,0xeb,0x00,0x00,0x00,0x04,0x00,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0xef,0x00,0x00,0x00, -0x80,0x00,0x00,0x00,0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00, -0x17,0x00,0x04,0x00,0xf5,0x00,0x00,0x00,0xe6,0x00,0x00,0x00, -0x04,0x00,0x00,0x00,0x18,0x00,0x04,0x00,0xf6,0x00,0x00,0x00, -0xf5,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x1d,0x00,0x03,0x00, -0xf7,0x00,0x00,0x00,0xf6,0x00,0x00,0x00,0x1e,0x00,0x03,0x00, -0xf8,0x00,0x00,0x00,0xf7,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0xf9,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0xf8,0x00,0x00,0x00, -0x3b,0x00,0x04,0x00,0xf9,0x00,0x00,0x00,0xfa,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0xfc,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0xe6,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0xff,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0xe6,0x00,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x04,0x01,0x00,0x00, -0x80,0x00,0x00,0x00,0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x11,0x01,0x00,0x00, -0x80,0x00,0x00,0x00,0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x1e,0x01,0x00,0x00, -0x80,0x00,0x00,0x00,0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x23,0x01,0x00,0x00, -0x03,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, -0x2c,0x01,0x00,0x00,0x80,0x00,0x00,0x00,0x6c,0x00,0x00,0x00, -0x39,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x31,0x01,0x00,0x00,0x04,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0x3a,0x01,0x00,0x00,0x80,0x00,0x00,0x00, -0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x3f,0x01,0x00,0x00,0x05,0x00,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x48,0x01,0x00,0x00, -0x80,0x00,0x00,0x00,0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x4d,0x01,0x00,0x00, -0x06,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, -0x56,0x01,0x00,0x00,0x80,0x00,0x00,0x00,0x6c,0x00,0x00,0x00, -0x39,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x5b,0x01,0x00,0x00,0x07,0x00,0x00,0x00,0x32,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x61,0x01,0x00,0x00,0x01,0x00,0x00,0x00, -0x33,0x00,0x06,0x00,0x09,0x00,0x00,0x00,0x62,0x01,0x00,0x00, -0x61,0x01,0x00,0x00,0x39,0x00,0x00,0x00,0x39,0x00,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x63,0x01,0x00,0x00, -0x51,0x00,0x00,0x00,0x62,0x01,0x00,0x00,0x00,0x00,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x64,0x01,0x00,0x00, -0x84,0x00,0x00,0x00,0x63,0x01,0x00,0x00,0x6d,0x00,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x65,0x01,0x00,0x00, -0x86,0x00,0x00,0x00,0x64,0x01,0x00,0x00,0x6c,0x00,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x7c,0x01,0x00,0x00, -0x80,0x00,0x00,0x00,0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x7d,0x01,0x00,0x00, -0x84,0x00,0x00,0x00,0x9d,0x00,0x00,0x00,0x7c,0x01,0x00,0x00, -0x1c,0x00,0x04,0x00,0x7e,0x01,0x00,0x00,0xe6,0x00,0x00,0x00, -0x7d,0x01,0x00,0x00,0x20,0x00,0x04,0x00,0x7f,0x01,0x00,0x00, -0x04,0x00,0x00,0x00,0x7e,0x01,0x00,0x00,0x3b,0x00,0x04,0x00, -0x7f,0x01,0x00,0x00,0x80,0x01,0x00,0x00,0x04,0x00,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x84,0x01,0x00,0x00, -0x80,0x00,0x00,0x00,0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00, -0x1d,0x00,0x03,0x00,0x8a,0x01,0x00,0x00,0xf6,0x00,0x00,0x00, -0x1e,0x00,0x03,0x00,0x8b,0x01,0x00,0x00,0x8a,0x01,0x00,0x00, -0x20,0x00,0x04,0x00,0x8c,0x01,0x00,0x00,0x0c,0x00,0x00,0x00, -0x8b,0x01,0x00,0x00,0x3b,0x00,0x04,0x00,0x8c,0x01,0x00,0x00, -0x8d,0x01,0x00,0x00,0x0c,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0x95,0x01,0x00,0x00,0x80,0x00,0x00,0x00, -0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0xa2,0x01,0x00,0x00,0x80,0x00,0x00,0x00, -0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0xaf,0x01,0x00,0x00,0x80,0x00,0x00,0x00, -0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0xbc,0x01,0x00,0x00,0x80,0x00,0x00,0x00, -0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0xc9,0x01,0x00,0x00,0x80,0x00,0x00,0x00, -0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0xd6,0x01,0x00,0x00,0x80,0x00,0x00,0x00, -0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0xe3,0x01,0x00,0x00,0x80,0x00,0x00,0x00, -0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0xef,0x01,0x00,0x00,0x08,0x01,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0xf0,0x01,0x00,0x00, -0x86,0x00,0x00,0x00,0x6c,0x00,0x00,0x00,0x6d,0x00,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0xf3,0x01,0x00,0x00, -0x86,0x00,0x00,0x00,0x6c,0x00,0x00,0x00,0x6d,0x00,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x0e,0x02,0x00,0x00, -0x84,0x00,0x00,0x00,0x60,0x00,0x00,0x00,0x62,0x00,0x00,0x00, -0x1c,0x00,0x04,0x00,0x0f,0x02,0x00,0x00,0xe6,0x00,0x00,0x00, -0x0e,0x02,0x00,0x00,0x20,0x00,0x04,0x00,0x10,0x02,0x00,0x00, -0x07,0x00,0x00,0x00,0x0f,0x02,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0x20,0x02,0x00,0x00,0x80,0x00,0x00,0x00, -0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0x26,0x02,0x00,0x00,0x07,0x00,0x00,0x00,0xe6,0x00,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x3c,0x02,0x00,0x00, -0x84,0x00,0x00,0x00,0xb5,0x00,0x00,0x00,0xb2,0x00,0x00,0x00, -0x1c,0x00,0x04,0x00,0x3d,0x02,0x00,0x00,0xe6,0x00,0x00,0x00, -0x3c,0x02,0x00,0x00,0x20,0x00,0x04,0x00,0x3e,0x02,0x00,0x00, -0x07,0x00,0x00,0x00,0x3d,0x02,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0x47,0x02,0x00,0x00,0x86,0x00,0x00,0x00, -0xaf,0x00,0x00,0x00,0xb5,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0x4f,0x02,0x00,0x00,0x80,0x00,0x00,0x00, -0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0x7e,0x02,0x00,0x00,0x84,0x00,0x00,0x00, -0x60,0x00,0x00,0x00,0x62,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x13,0x00,0x00,0x00,0xb3,0x02,0x00,0x00,0x0d,0x00,0x00,0x00, -0x3b,0x00,0x04,0x00,0x0a,0x00,0x00,0x00,0xbb,0x02,0x00,0x00, -0x01,0x00,0x00,0x00,0x1d,0x00,0x03,0x00,0x01,0x03,0x00,0x00, -0xba,0x00,0x00,0x00,0x1e,0x00,0x03,0x00,0x02,0x03,0x00,0x00, -0x01,0x03,0x00,0x00,0x20,0x00,0x04,0x00,0x03,0x03,0x00,0x00, -0x0c,0x00,0x00,0x00,0x02,0x03,0x00,0x00,0x3b,0x00,0x04,0x00, -0x03,0x03,0x00,0x00,0x04,0x03,0x00,0x00,0x0c,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00,0x09,0x03,0x00,0x00, -0x05,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, -0x16,0x03,0x00,0x00,0x84,0x00,0x00,0x00,0x60,0x00,0x00,0x00, -0x62,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x1f,0x03,0x00,0x00, -0x0c,0x00,0x00,0x00,0xba,0x00,0x00,0x00,0x36,0x00,0x05,0x00, -0x02,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x03,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0x05,0x00,0x00,0x00, -0x3b,0x00,0x04,0x00,0xbf,0x00,0x00,0x00,0xc0,0x00,0x00,0x00, -0x07,0x00,0x00,0x00,0x3b,0x00,0x04,0x00,0x10,0x02,0x00,0x00, -0x11,0x02,0x00,0x00,0x07,0x00,0x00,0x00,0x3b,0x00,0x04,0x00, -0x3e,0x02,0x00,0x00,0x3f,0x02,0x00,0x00,0x07,0x00,0x00,0x00, -0x41,0x00,0x05,0x00,0x0d,0x00,0x00,0x00,0x0e,0x00,0x00,0x00, -0x0b,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x0f,0x00,0x00,0x00,0x0e,0x00,0x00,0x00, -0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00,0x16,0x00,0x00,0x00, -0x12,0x00,0x00,0x00,0x14,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x17,0x00,0x00,0x00,0x16,0x00,0x00,0x00, -0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x18,0x00,0x00,0x00, -0x0f,0x00,0x00,0x00,0x17,0x00,0x00,0x00,0x89,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x1e,0x00,0x00,0x00,0x0f,0x00,0x00,0x00, -0x17,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00, -0x22,0x00,0x00,0x00,0x12,0x00,0x00,0x00,0x21,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x22,0x00,0x00,0x00,0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x24,0x00,0x00,0x00,0x18,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00,0x28,0x00,0x00,0x00, -0x12,0x00,0x00,0x00,0x27,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x29,0x00,0x00,0x00,0x28,0x00,0x00,0x00, -0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x2a,0x00,0x00,0x00, -0x1e,0x00,0x00,0x00,0x29,0x00,0x00,0x00,0x41,0x00,0x05,0x00, -0x15,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0x12,0x00,0x00,0x00, -0x2d,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x2f,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x30,0x00,0x00,0x00,0x24,0x00,0x00,0x00, -0x2f,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x32,0x00,0x00,0x00,0x30,0x00,0x00,0x00,0x2a,0x00,0x00,0x00, -0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00,0x35,0x00,0x00,0x00, -0x12,0x00,0x00,0x00,0x34,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x36,0x00,0x00,0x00,0x35,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x38,0x00,0x00,0x00, -0x36,0x00,0x00,0x00,0x37,0x00,0x00,0x00,0x82,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x3a,0x00,0x00,0x00,0x38,0x00,0x00,0x00, -0x39,0x00,0x00,0x00,0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x3b,0x00,0x00,0x00,0x3a,0x00,0x00,0x00,0x37,0x00,0x00,0x00, -0x41,0x00,0x05,0x00,0x0d,0x00,0x00,0x00,0x3f,0x00,0x00,0x00, -0x3d,0x00,0x00,0x00,0x3e,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x40,0x00,0x00,0x00,0x3f,0x00,0x00,0x00, -0x89,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x42,0x00,0x00,0x00, -0x40,0x00,0x00,0x00,0x3b,0x00,0x00,0x00,0x86,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x47,0x00,0x00,0x00,0x40,0x00,0x00,0x00, -0x3b,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x0d,0x00,0x00,0x00, -0x49,0x00,0x00,0x00,0x3d,0x00,0x00,0x00,0x39,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x4a,0x00,0x00,0x00, -0x49,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x0d,0x00,0x00,0x00, -0x4d,0x00,0x00,0x00,0x4c,0x00,0x00,0x00,0x3e,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x4e,0x00,0x00,0x00, -0x4d,0x00,0x00,0x00,0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x50,0x00,0x00,0x00,0x4e,0x00,0x00,0x00,0x4f,0x00,0x00,0x00, -0x89,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x55,0x00,0x00,0x00, -0x50,0x00,0x00,0x00,0x54,0x00,0x00,0x00,0x86,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x59,0x00,0x00,0x00,0x50,0x00,0x00,0x00, -0x58,0x00,0x00,0x00,0x89,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x5d,0x00,0x00,0x00,0x4e,0x00,0x00,0x00,0x4f,0x00,0x00,0x00, -0x89,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x64,0x00,0x00,0x00, -0x5d,0x00,0x00,0x00,0x63,0x00,0x00,0x00,0x86,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x68,0x00,0x00,0x00,0x5d,0x00,0x00,0x00, -0x67,0x00,0x00,0x00,0x89,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x6f,0x00,0x00,0x00,0x4e,0x00,0x00,0x00,0x6e,0x00,0x00,0x00, -0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x74,0x00,0x00,0x00, -0x4e,0x00,0x00,0x00,0x73,0x00,0x00,0x00,0x41,0x00,0x05,0x00, -0x15,0x00,0x00,0x00,0x78,0x00,0x00,0x00,0x12,0x00,0x00,0x00, -0x77,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x79,0x00,0x00,0x00,0x78,0x00,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x7a,0x00,0x00,0x00,0x47,0x00,0x00,0x00, -0x79,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00, -0x7d,0x00,0x00,0x00,0x12,0x00,0x00,0x00,0x7c,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x7e,0x00,0x00,0x00, -0x7d,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x80,0x00,0x00,0x00,0x47,0x00,0x00,0x00,0x39,0x00,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x83,0x00,0x00,0x00, -0x80,0x00,0x00,0x00,0x79,0x00,0x00,0x00,0x0c,0x00,0x07,0x00, -0x06,0x00,0x00,0x00,0x84,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x26,0x00,0x00,0x00,0x7e,0x00,0x00,0x00,0x83,0x00,0x00,0x00, -0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00,0x88,0x00,0x00,0x00, -0x12,0x00,0x00,0x00,0x87,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x89,0x00,0x00,0x00,0x88,0x00,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x8a,0x00,0x00,0x00, -0x32,0x00,0x00,0x00,0x89,0x00,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x8c,0x00,0x00,0x00,0x42,0x00,0x00,0x00, -0x37,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00, -0x8e,0x00,0x00,0x00,0x12,0x00,0x00,0x00,0x8d,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x8f,0x00,0x00,0x00, -0x8e,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x90,0x00,0x00,0x00,0x8c,0x00,0x00,0x00,0x8f,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x91,0x00,0x00,0x00, -0x8a,0x00,0x00,0x00,0x90,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x93,0x00,0x00,0x00,0x91,0x00,0x00,0x00, -0x7a,0x00,0x00,0x00,0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x94,0x00,0x00,0x00,0x93,0x00,0x00,0x00,0x6d,0x00,0x00,0x00, -0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00,0x99,0x00,0x00,0x00, -0x12,0x00,0x00,0x00,0x98,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x9a,0x00,0x00,0x00,0x99,0x00,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x9b,0x00,0x00,0x00, -0x0f,0x00,0x00,0x00,0x9a,0x00,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x9e,0x00,0x00,0x00,0x4a,0x00,0x00,0x00, -0x9d,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00, -0xa0,0x00,0x00,0x00,0x12,0x00,0x00,0x00,0x9f,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0xa1,0x00,0x00,0x00, -0xa0,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xa2,0x00,0x00,0x00,0x9e,0x00,0x00,0x00,0xa1,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xa3,0x00,0x00,0x00, -0x9b,0x00,0x00,0x00,0xa2,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xa5,0x00,0x00,0x00,0xa3,0x00,0x00,0x00, -0x7a,0x00,0x00,0x00,0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xa6,0x00,0x00,0x00,0xa5,0x00,0x00,0x00,0x6d,0x00,0x00,0x00, -0xf9,0x00,0x02,0x00,0xa8,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, -0xa8,0x00,0x00,0x00,0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00, -0x29,0x03,0x00,0x00,0x3e,0x00,0x00,0x00,0x05,0x00,0x00,0x00, -0xc7,0x00,0x00,0x00,0xa9,0x00,0x00,0x00,0xb0,0x00,0x05,0x00, -0xb8,0x00,0x00,0x00,0xb9,0x00,0x00,0x00,0x29,0x03,0x00,0x00, -0xb7,0x00,0x00,0x00,0xf6,0x00,0x04,0x00,0xaa,0x00,0x00,0x00, -0xa9,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, -0xb9,0x00,0x00,0x00,0xa9,0x00,0x00,0x00,0xaa,0x00,0x00,0x00, -0xf8,0x00,0x02,0x00,0xa9,0x00,0x00,0x00,0x41,0x00,0x05,0x00, -0xc3,0x00,0x00,0x00,0xc4,0x00,0x00,0x00,0xc0,0x00,0x00,0x00, -0x29,0x03,0x00,0x00,0x3e,0x00,0x03,0x00,0xc4,0x00,0x00,0x00, -0xc2,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xc7,0x00,0x00,0x00,0x29,0x03,0x00,0x00,0xc6,0x00,0x00,0x00, -0xf9,0x00,0x02,0x00,0xa8,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, -0xaa,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0xca,0x00,0x00,0x00, -0xf8,0x00,0x02,0x00,0xca,0x00,0x00,0x00,0xf5,0x00,0x07,0x00, -0x06,0x00,0x00,0x00,0x42,0x03,0x00,0x00,0xa6,0x00,0x00,0x00, -0xaa,0x00,0x00,0x00,0xf5,0x01,0x00,0x00,0xcd,0x00,0x00,0x00, -0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0x3e,0x03,0x00,0x00, -0x94,0x00,0x00,0x00,0xaa,0x00,0x00,0x00,0xf2,0x01,0x00,0x00, -0xcd,0x00,0x00,0x00,0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00, -0x2a,0x03,0x00,0x00,0x7a,0x00,0x00,0x00,0xaa,0x00,0x00,0x00, -0xa3,0x02,0x00,0x00,0xcd,0x00,0x00,0x00,0xb0,0x00,0x05,0x00, -0xb8,0x00,0x00,0x00,0xd1,0x00,0x00,0x00,0x2a,0x03,0x00,0x00, -0x84,0x00,0x00,0x00,0xf6,0x00,0x04,0x00,0xcc,0x00,0x00,0x00, -0xcd,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, -0xd1,0x00,0x00,0x00,0xcb,0x00,0x00,0x00,0xcc,0x00,0x00,0x00, -0xf8,0x00,0x02,0x00,0xcb,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, -0xd3,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0xd3,0x00,0x00,0x00, -0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0x3a,0x03,0x00,0x00, -0x3e,0x00,0x00,0x00,0xcb,0x00,0x00,0x00,0x67,0x01,0x00,0x00, -0xd4,0x00,0x00,0x00,0xb0,0x00,0x05,0x00,0xb8,0x00,0x00,0x00, -0xd9,0x00,0x00,0x00,0x3a,0x03,0x00,0x00,0x37,0x00,0x00,0x00, -0xf6,0x00,0x04,0x00,0xd5,0x00,0x00,0x00,0xd4,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0xd9,0x00,0x00,0x00, -0xd4,0x00,0x00,0x00,0xd5,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, -0xd4,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xde,0x00,0x00,0x00,0x74,0x00,0x00,0x00,0x3a,0x03,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xe1,0x00,0x00,0x00, -0xde,0x00,0x00,0x00,0x8f,0x00,0x00,0x00,0x86,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xe2,0x00,0x00,0x00,0xe1,0x00,0x00,0x00, -0x6d,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xe3,0x00,0x00,0x00,0x3e,0x03,0x00,0x00,0xe2,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xe5,0x00,0x00,0x00, -0xe3,0x00,0x00,0x00,0x6f,0x00,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xf0,0x00,0x00,0x00,0xde,0x00,0x00,0x00, -0xef,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xf2,0x00,0x00,0x00,0x6f,0x00,0x00,0x00,0x6d,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xf3,0x00,0x00,0x00, -0xf0,0x00,0x00,0x00,0xf2,0x00,0x00,0x00,0x41,0x00,0x08,0x00, -0xfc,0x00,0x00,0x00,0xfd,0x00,0x00,0x00,0xfa,0x00,0x00,0x00, -0x34,0x00,0x00,0x00,0xe5,0x00,0x00,0x00,0x34,0x00,0x00,0x00, -0x3e,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0xe6,0x00,0x00,0x00, -0xfe,0x00,0x00,0x00,0xfd,0x00,0x00,0x00,0x41,0x00,0x05,0x00, -0xff,0x00,0x00,0x00,0x00,0x01,0x00,0x00,0xeb,0x00,0x00,0x00, -0xf3,0x00,0x00,0x00,0x3e,0x00,0x03,0x00,0x00,0x01,0x00,0x00, -0xfe,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x05,0x01,0x00,0x00,0xde,0x00,0x00,0x00,0x04,0x01,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x08,0x01,0x00,0x00, -0x05,0x01,0x00,0x00,0xf2,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x09,0x01,0x00,0x00,0x08,0x01,0x00,0x00, -0x39,0x00,0x00,0x00,0x41,0x00,0x08,0x00,0xfc,0x00,0x00,0x00, -0x0b,0x01,0x00,0x00,0xfa,0x00,0x00,0x00,0x34,0x00,0x00,0x00, -0xe5,0x00,0x00,0x00,0x34,0x00,0x00,0x00,0x39,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0xe6,0x00,0x00,0x00,0x0c,0x01,0x00,0x00, -0x0b,0x01,0x00,0x00,0x41,0x00,0x05,0x00,0xff,0x00,0x00,0x00, -0x0d,0x01,0x00,0x00,0xeb,0x00,0x00,0x00,0x09,0x01,0x00,0x00, -0x3e,0x00,0x03,0x00,0x0d,0x01,0x00,0x00,0x0c,0x01,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x12,0x01,0x00,0x00, -0xde,0x00,0x00,0x00,0x11,0x01,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x15,0x01,0x00,0x00,0x12,0x01,0x00,0x00, -0xf2,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x16,0x01,0x00,0x00,0x15,0x01,0x00,0x00,0x0c,0x00,0x00,0x00, -0x41,0x00,0x08,0x00,0xfc,0x00,0x00,0x00,0x18,0x01,0x00,0x00, -0xfa,0x00,0x00,0x00,0x34,0x00,0x00,0x00,0xe5,0x00,0x00,0x00, -0x34,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0xe6,0x00,0x00,0x00,0x19,0x01,0x00,0x00,0x18,0x01,0x00,0x00, -0x41,0x00,0x05,0x00,0xff,0x00,0x00,0x00,0x1a,0x01,0x00,0x00, -0xeb,0x00,0x00,0x00,0x16,0x01,0x00,0x00,0x3e,0x00,0x03,0x00, -0x1a,0x01,0x00,0x00,0x19,0x01,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x1f,0x01,0x00,0x00,0xde,0x00,0x00,0x00, -0x1e,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x22,0x01,0x00,0x00,0x1f,0x01,0x00,0x00,0xf2,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x24,0x01,0x00,0x00, -0x22,0x01,0x00,0x00,0x23,0x01,0x00,0x00,0x41,0x00,0x08,0x00, -0xfc,0x00,0x00,0x00,0x26,0x01,0x00,0x00,0xfa,0x00,0x00,0x00, -0x34,0x00,0x00,0x00,0xe5,0x00,0x00,0x00,0x34,0x00,0x00,0x00, -0x23,0x01,0x00,0x00,0x3d,0x00,0x04,0x00,0xe6,0x00,0x00,0x00, -0x27,0x01,0x00,0x00,0x26,0x01,0x00,0x00,0x41,0x00,0x05,0x00, -0xff,0x00,0x00,0x00,0x28,0x01,0x00,0x00,0xeb,0x00,0x00,0x00, -0x24,0x01,0x00,0x00,0x3e,0x00,0x03,0x00,0x28,0x01,0x00,0x00, -0x27,0x01,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x2d,0x01,0x00,0x00,0xde,0x00,0x00,0x00,0x2c,0x01,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x30,0x01,0x00,0x00, -0x2d,0x01,0x00,0x00,0xf2,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x32,0x01,0x00,0x00,0x30,0x01,0x00,0x00, -0x31,0x01,0x00,0x00,0x41,0x00,0x08,0x00,0xfc,0x00,0x00,0x00, -0x34,0x01,0x00,0x00,0xfa,0x00,0x00,0x00,0x34,0x00,0x00,0x00, -0xe5,0x00,0x00,0x00,0xc6,0x00,0x00,0x00,0x3e,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0xe6,0x00,0x00,0x00,0x35,0x01,0x00,0x00, -0x34,0x01,0x00,0x00,0x41,0x00,0x05,0x00,0xff,0x00,0x00,0x00, -0x36,0x01,0x00,0x00,0xeb,0x00,0x00,0x00,0x32,0x01,0x00,0x00, -0x3e,0x00,0x03,0x00,0x36,0x01,0x00,0x00,0x35,0x01,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x3b,0x01,0x00,0x00, -0xde,0x00,0x00,0x00,0x3a,0x01,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x3e,0x01,0x00,0x00,0x3b,0x01,0x00,0x00, -0xf2,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x40,0x01,0x00,0x00,0x3e,0x01,0x00,0x00,0x3f,0x01,0x00,0x00, -0x41,0x00,0x08,0x00,0xfc,0x00,0x00,0x00,0x42,0x01,0x00,0x00, -0xfa,0x00,0x00,0x00,0x34,0x00,0x00,0x00,0xe5,0x00,0x00,0x00, -0xc6,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0xe6,0x00,0x00,0x00,0x43,0x01,0x00,0x00,0x42,0x01,0x00,0x00, -0x41,0x00,0x05,0x00,0xff,0x00,0x00,0x00,0x44,0x01,0x00,0x00, -0xeb,0x00,0x00,0x00,0x40,0x01,0x00,0x00,0x3e,0x00,0x03,0x00, -0x44,0x01,0x00,0x00,0x43,0x01,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x49,0x01,0x00,0x00,0xde,0x00,0x00,0x00, -0x48,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x4c,0x01,0x00,0x00,0x49,0x01,0x00,0x00,0xf2,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x4e,0x01,0x00,0x00, -0x4c,0x01,0x00,0x00,0x4d,0x01,0x00,0x00,0x41,0x00,0x08,0x00, -0xfc,0x00,0x00,0x00,0x50,0x01,0x00,0x00,0xfa,0x00,0x00,0x00, -0x34,0x00,0x00,0x00,0xe5,0x00,0x00,0x00,0xc6,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0xe6,0x00,0x00,0x00, -0x51,0x01,0x00,0x00,0x50,0x01,0x00,0x00,0x41,0x00,0x05,0x00, -0xff,0x00,0x00,0x00,0x52,0x01,0x00,0x00,0xeb,0x00,0x00,0x00, -0x4e,0x01,0x00,0x00,0x3e,0x00,0x03,0x00,0x52,0x01,0x00,0x00, -0x51,0x01,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x57,0x01,0x00,0x00,0xde,0x00,0x00,0x00,0x56,0x01,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x5a,0x01,0x00,0x00, -0x57,0x01,0x00,0x00,0xf2,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x5c,0x01,0x00,0x00,0x5a,0x01,0x00,0x00, -0x5b,0x01,0x00,0x00,0x41,0x00,0x08,0x00,0xfc,0x00,0x00,0x00, -0x5e,0x01,0x00,0x00,0xfa,0x00,0x00,0x00,0x34,0x00,0x00,0x00, -0xe5,0x00,0x00,0x00,0xc6,0x00,0x00,0x00,0x23,0x01,0x00,0x00, -0x3d,0x00,0x04,0x00,0xe6,0x00,0x00,0x00,0x5f,0x01,0x00,0x00, -0x5e,0x01,0x00,0x00,0x41,0x00,0x05,0x00,0xff,0x00,0x00,0x00, -0x60,0x01,0x00,0x00,0xeb,0x00,0x00,0x00,0x5c,0x01,0x00,0x00, -0x3e,0x00,0x03,0x00,0x60,0x01,0x00,0x00,0x5f,0x01,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x67,0x01,0x00,0x00, -0x3a,0x03,0x00,0x00,0x65,0x01,0x00,0x00,0xf9,0x00,0x02,0x00, -0xd3,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0xd5,0x00,0x00,0x00, -0xf9,0x00,0x02,0x00,0x69,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, -0x69,0x01,0x00,0x00,0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00, -0x3b,0x03,0x00,0x00,0x3e,0x00,0x00,0x00,0xd5,0x00,0x00,0x00, -0xee,0x01,0x00,0x00,0x6a,0x01,0x00,0x00,0xb0,0x00,0x05,0x00, -0xb8,0x00,0x00,0x00,0x6f,0x01,0x00,0x00,0x3b,0x03,0x00,0x00, -0x9d,0x00,0x00,0x00,0xf6,0x00,0x04,0x00,0x6b,0x01,0x00,0x00, -0x6a,0x01,0x00,0x00,0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, -0x6f,0x01,0x00,0x00,0x6a,0x01,0x00,0x00,0x6b,0x01,0x00,0x00, -0xf8,0x00,0x02,0x00,0x6a,0x01,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x74,0x01,0x00,0x00,0x74,0x00,0x00,0x00, -0x3b,0x03,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x77,0x01,0x00,0x00,0x74,0x01,0x00,0x00,0xa1,0x00,0x00,0x00, -0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x78,0x01,0x00,0x00, -0x77,0x01,0x00,0x00,0x6d,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x79,0x01,0x00,0x00,0x42,0x03,0x00,0x00, -0x78,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x7b,0x01,0x00,0x00,0x79,0x01,0x00,0x00,0x6f,0x00,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x85,0x01,0x00,0x00, -0x74,0x01,0x00,0x00,0x84,0x01,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x87,0x01,0x00,0x00,0x6f,0x00,0x00,0x00, -0x6d,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x88,0x01,0x00,0x00,0x85,0x01,0x00,0x00,0x87,0x01,0x00,0x00, -0x41,0x00,0x08,0x00,0xfc,0x00,0x00,0x00,0x8f,0x01,0x00,0x00, -0x8d,0x01,0x00,0x00,0x34,0x00,0x00,0x00,0x7b,0x01,0x00,0x00, -0x34,0x00,0x00,0x00,0x3e,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0xe6,0x00,0x00,0x00,0x90,0x01,0x00,0x00,0x8f,0x01,0x00,0x00, -0x41,0x00,0x05,0x00,0xff,0x00,0x00,0x00,0x91,0x01,0x00,0x00, -0x80,0x01,0x00,0x00,0x88,0x01,0x00,0x00,0x3e,0x00,0x03,0x00, -0x91,0x01,0x00,0x00,0x90,0x01,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x96,0x01,0x00,0x00,0x74,0x01,0x00,0x00, -0x95,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x99,0x01,0x00,0x00,0x96,0x01,0x00,0x00,0x87,0x01,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x9a,0x01,0x00,0x00, -0x99,0x01,0x00,0x00,0x39,0x00,0x00,0x00,0x41,0x00,0x08,0x00, -0xfc,0x00,0x00,0x00,0x9c,0x01,0x00,0x00,0x8d,0x01,0x00,0x00, -0x34,0x00,0x00,0x00,0x7b,0x01,0x00,0x00,0x34,0x00,0x00,0x00, -0x39,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0xe6,0x00,0x00,0x00, -0x9d,0x01,0x00,0x00,0x9c,0x01,0x00,0x00,0x41,0x00,0x05,0x00, -0xff,0x00,0x00,0x00,0x9e,0x01,0x00,0x00,0x80,0x01,0x00,0x00, -0x9a,0x01,0x00,0x00,0x3e,0x00,0x03,0x00,0x9e,0x01,0x00,0x00, -0x9d,0x01,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xa3,0x01,0x00,0x00,0x74,0x01,0x00,0x00,0xa2,0x01,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xa6,0x01,0x00,0x00, -0xa3,0x01,0x00,0x00,0x87,0x01,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xa7,0x01,0x00,0x00,0xa6,0x01,0x00,0x00, -0x0c,0x00,0x00,0x00,0x41,0x00,0x08,0x00,0xfc,0x00,0x00,0x00, -0xa9,0x01,0x00,0x00,0x8d,0x01,0x00,0x00,0x34,0x00,0x00,0x00, -0x7b,0x01,0x00,0x00,0x34,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0xe6,0x00,0x00,0x00,0xaa,0x01,0x00,0x00, -0xa9,0x01,0x00,0x00,0x41,0x00,0x05,0x00,0xff,0x00,0x00,0x00, -0xab,0x01,0x00,0x00,0x80,0x01,0x00,0x00,0xa7,0x01,0x00,0x00, -0x3e,0x00,0x03,0x00,0xab,0x01,0x00,0x00,0xaa,0x01,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xb0,0x01,0x00,0x00, -0x74,0x01,0x00,0x00,0xaf,0x01,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xb3,0x01,0x00,0x00,0xb0,0x01,0x00,0x00, -0x87,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xb4,0x01,0x00,0x00,0xb3,0x01,0x00,0x00,0x23,0x01,0x00,0x00, -0x41,0x00,0x08,0x00,0xfc,0x00,0x00,0x00,0xb6,0x01,0x00,0x00, -0x8d,0x01,0x00,0x00,0x34,0x00,0x00,0x00,0x7b,0x01,0x00,0x00, -0x34,0x00,0x00,0x00,0x23,0x01,0x00,0x00,0x3d,0x00,0x04,0x00, -0xe6,0x00,0x00,0x00,0xb7,0x01,0x00,0x00,0xb6,0x01,0x00,0x00, -0x41,0x00,0x05,0x00,0xff,0x00,0x00,0x00,0xb8,0x01,0x00,0x00, -0x80,0x01,0x00,0x00,0xb4,0x01,0x00,0x00,0x3e,0x00,0x03,0x00, -0xb8,0x01,0x00,0x00,0xb7,0x01,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xbd,0x01,0x00,0x00,0x74,0x01,0x00,0x00, -0xbc,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xc0,0x01,0x00,0x00,0xbd,0x01,0x00,0x00,0x87,0x01,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xc1,0x01,0x00,0x00, -0xc0,0x01,0x00,0x00,0x31,0x01,0x00,0x00,0x41,0x00,0x08,0x00, -0xfc,0x00,0x00,0x00,0xc3,0x01,0x00,0x00,0x8d,0x01,0x00,0x00, -0x34,0x00,0x00,0x00,0x7b,0x01,0x00,0x00,0xc6,0x00,0x00,0x00, -0x3e,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0xe6,0x00,0x00,0x00, -0xc4,0x01,0x00,0x00,0xc3,0x01,0x00,0x00,0x41,0x00,0x05,0x00, -0xff,0x00,0x00,0x00,0xc5,0x01,0x00,0x00,0x80,0x01,0x00,0x00, -0xc1,0x01,0x00,0x00,0x3e,0x00,0x03,0x00,0xc5,0x01,0x00,0x00, -0xc4,0x01,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xca,0x01,0x00,0x00,0x74,0x01,0x00,0x00,0xc9,0x01,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xcd,0x01,0x00,0x00, -0xca,0x01,0x00,0x00,0x87,0x01,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xce,0x01,0x00,0x00,0xcd,0x01,0x00,0x00, -0x3f,0x01,0x00,0x00,0x41,0x00,0x08,0x00,0xfc,0x00,0x00,0x00, -0xd0,0x01,0x00,0x00,0x8d,0x01,0x00,0x00,0x34,0x00,0x00,0x00, -0x7b,0x01,0x00,0x00,0xc6,0x00,0x00,0x00,0x39,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0xe6,0x00,0x00,0x00,0xd1,0x01,0x00,0x00, -0xd0,0x01,0x00,0x00,0x41,0x00,0x05,0x00,0xff,0x00,0x00,0x00, -0xd2,0x01,0x00,0x00,0x80,0x01,0x00,0x00,0xce,0x01,0x00,0x00, -0x3e,0x00,0x03,0x00,0xd2,0x01,0x00,0x00,0xd1,0x01,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xd7,0x01,0x00,0x00, -0x74,0x01,0x00,0x00,0xd6,0x01,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xda,0x01,0x00,0x00,0xd7,0x01,0x00,0x00, -0x87,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xdb,0x01,0x00,0x00,0xda,0x01,0x00,0x00,0x4d,0x01,0x00,0x00, -0x41,0x00,0x08,0x00,0xfc,0x00,0x00,0x00,0xdd,0x01,0x00,0x00, -0x8d,0x01,0x00,0x00,0x34,0x00,0x00,0x00,0x7b,0x01,0x00,0x00, -0xc6,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0xe6,0x00,0x00,0x00,0xde,0x01,0x00,0x00,0xdd,0x01,0x00,0x00, -0x41,0x00,0x05,0x00,0xff,0x00,0x00,0x00,0xdf,0x01,0x00,0x00, -0x80,0x01,0x00,0x00,0xdb,0x01,0x00,0x00,0x3e,0x00,0x03,0x00, -0xdf,0x01,0x00,0x00,0xde,0x01,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xe4,0x01,0x00,0x00,0x74,0x01,0x00,0x00, -0xe3,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xe7,0x01,0x00,0x00,0xe4,0x01,0x00,0x00,0x87,0x01,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xe8,0x01,0x00,0x00, -0xe7,0x01,0x00,0x00,0x5b,0x01,0x00,0x00,0x41,0x00,0x08,0x00, -0xfc,0x00,0x00,0x00,0xea,0x01,0x00,0x00,0x8d,0x01,0x00,0x00, -0x34,0x00,0x00,0x00,0x7b,0x01,0x00,0x00,0xc6,0x00,0x00,0x00, -0x23,0x01,0x00,0x00,0x3d,0x00,0x04,0x00,0xe6,0x00,0x00,0x00, -0xeb,0x01,0x00,0x00,0xea,0x01,0x00,0x00,0x41,0x00,0x05,0x00, -0xff,0x00,0x00,0x00,0xec,0x01,0x00,0x00,0x80,0x01,0x00,0x00, -0xe8,0x01,0x00,0x00,0x3e,0x00,0x03,0x00,0xec,0x01,0x00,0x00, -0xeb,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xee,0x01,0x00,0x00,0x3b,0x03,0x00,0x00,0x65,0x01,0x00,0x00, -0xf9,0x00,0x02,0x00,0x69,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, -0x6b,0x01,0x00,0x00,0xe0,0x00,0x04,0x00,0x0c,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0xef,0x01,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xf2,0x01,0x00,0x00,0x3e,0x03,0x00,0x00, -0xf0,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xf5,0x01,0x00,0x00,0x42,0x03,0x00,0x00,0xf3,0x01,0x00,0x00, -0xf9,0x00,0x02,0x00,0xf7,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, -0xf7,0x01,0x00,0x00,0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00, -0x44,0x03,0x00,0x00,0x3e,0x00,0x00,0x00,0x6b,0x01,0x00,0x00, -0xa1,0x02,0x00,0x00,0xfa,0x01,0x00,0x00,0xb0,0x00,0x05,0x00, -0xb8,0x00,0x00,0x00,0xfd,0x01,0x00,0x00,0x44,0x03,0x00,0x00, -0x6c,0x00,0x00,0x00,0xf6,0x00,0x04,0x00,0xf9,0x01,0x00,0x00, -0xfa,0x01,0x00,0x00,0x00,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, -0xfd,0x01,0x00,0x00,0xf8,0x01,0x00,0x00,0xf9,0x01,0x00,0x00, -0xf8,0x00,0x02,0x00,0xf8,0x01,0x00,0x00,0xf9,0x00,0x02,0x00, -0xff,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0xff,0x01,0x00,0x00, -0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0x48,0x03,0x00,0x00, -0x3e,0x00,0x00,0x00,0xf8,0x01,0x00,0x00,0x2b,0x02,0x00,0x00, -0x02,0x02,0x00,0x00,0xb0,0x00,0x05,0x00,0xb8,0x00,0x00,0x00, -0x05,0x02,0x00,0x00,0x48,0x03,0x00,0x00,0x60,0x00,0x00,0x00, -0xf6,0x00,0x04,0x00,0x01,0x02,0x00,0x00,0x02,0x02,0x00,0x00, -0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0x05,0x02,0x00,0x00, -0x00,0x02,0x00,0x00,0x01,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, -0x00,0x02,0x00,0x00,0xf9,0x00,0x02,0x00,0x07,0x02,0x00,0x00, -0xf8,0x00,0x02,0x00,0x07,0x02,0x00,0x00,0xf5,0x00,0x07,0x00, -0x06,0x00,0x00,0x00,0x5a,0x03,0x00,0x00,0x3e,0x00,0x00,0x00, -0x00,0x02,0x00,0x00,0x29,0x02,0x00,0x00,0x08,0x02,0x00,0x00, -0xb0,0x00,0x05,0x00,0xb8,0x00,0x00,0x00,0x0d,0x02,0x00,0x00, -0x5a,0x03,0x00,0x00,0x62,0x00,0x00,0x00,0xf6,0x00,0x04,0x00, -0x09,0x02,0x00,0x00,0x08,0x02,0x00,0x00,0x01,0x00,0x00,0x00, -0xfa,0x00,0x04,0x00,0x0d,0x02,0x00,0x00,0x08,0x02,0x00,0x00, -0x09,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x08,0x02,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x13,0x02,0x00,0x00, -0x48,0x03,0x00,0x00,0x62,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x15,0x02,0x00,0x00,0x13,0x02,0x00,0x00, -0x5a,0x03,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x17,0x02,0x00,0x00,0x55,0x00,0x00,0x00,0x53,0x00,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x19,0x02,0x00,0x00, -0x48,0x03,0x00,0x00,0x61,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x1a,0x02,0x00,0x00,0x17,0x02,0x00,0x00, -0x19,0x02,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x1c,0x02,0x00,0x00,0x64,0x00,0x00,0x00,0x62,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x1d,0x02,0x00,0x00, -0x1a,0x02,0x00,0x00,0x1c,0x02,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x1f,0x02,0x00,0x00,0x1d,0x02,0x00,0x00, -0x5a,0x03,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x21,0x02,0x00,0x00,0x1f,0x02,0x00,0x00,0x20,0x02,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x23,0x02,0x00,0x00, -0x21,0x02,0x00,0x00,0x44,0x03,0x00,0x00,0x41,0x00,0x05,0x00, -0xff,0x00,0x00,0x00,0x24,0x02,0x00,0x00,0xeb,0x00,0x00,0x00, -0x23,0x02,0x00,0x00,0x3d,0x00,0x04,0x00,0xe6,0x00,0x00,0x00, -0x25,0x02,0x00,0x00,0x24,0x02,0x00,0x00,0x41,0x00,0x05,0x00, -0x26,0x02,0x00,0x00,0x27,0x02,0x00,0x00,0x11,0x02,0x00,0x00, -0x15,0x02,0x00,0x00,0x3e,0x00,0x03,0x00,0x27,0x02,0x00,0x00, -0x25,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x29,0x02,0x00,0x00,0x5a,0x03,0x00,0x00,0xc6,0x00,0x00,0x00, -0xf9,0x00,0x02,0x00,0x07,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, -0x09,0x02,0x00,0x00,0xf9,0x00,0x02,0x00,0x02,0x02,0x00,0x00, -0xf8,0x00,0x02,0x00,0x02,0x02,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x2b,0x02,0x00,0x00,0x48,0x03,0x00,0x00, -0xc6,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0xff,0x01,0x00,0x00, -0xf8,0x00,0x02,0x00,0x01,0x02,0x00,0x00,0xf9,0x00,0x02,0x00, -0x2d,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x2d,0x02,0x00,0x00, -0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0x49,0x03,0x00,0x00, -0x3e,0x00,0x00,0x00,0x01,0x02,0x00,0x00,0x59,0x02,0x00,0x00, -0x30,0x02,0x00,0x00,0xb0,0x00,0x05,0x00,0xb8,0x00,0x00,0x00, -0x33,0x02,0x00,0x00,0x49,0x03,0x00,0x00,0xb5,0x00,0x00,0x00, -0xf6,0x00,0x04,0x00,0x2f,0x02,0x00,0x00,0x30,0x02,0x00,0x00, -0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0x33,0x02,0x00,0x00, -0x2e,0x02,0x00,0x00,0x2f,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, -0x2e,0x02,0x00,0x00,0xf9,0x00,0x02,0x00,0x35,0x02,0x00,0x00, -0xf8,0x00,0x02,0x00,0x35,0x02,0x00,0x00,0xf5,0x00,0x07,0x00, -0x06,0x00,0x00,0x00,0x57,0x03,0x00,0x00,0x3e,0x00,0x00,0x00, -0x2e,0x02,0x00,0x00,0x57,0x02,0x00,0x00,0x36,0x02,0x00,0x00, -0xb0,0x00,0x05,0x00,0xb8,0x00,0x00,0x00,0x3b,0x02,0x00,0x00, -0x57,0x03,0x00,0x00,0xb2,0x00,0x00,0x00,0xf6,0x00,0x04,0x00, -0x37,0x02,0x00,0x00,0x36,0x02,0x00,0x00,0x01,0x00,0x00,0x00, -0xfa,0x00,0x04,0x00,0x3b,0x02,0x00,0x00,0x36,0x02,0x00,0x00, -0x37,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x36,0x02,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x41,0x02,0x00,0x00, -0x49,0x03,0x00,0x00,0xb2,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x43,0x02,0x00,0x00,0x41,0x02,0x00,0x00, -0x57,0x03,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x45,0x02,0x00,0x00,0x59,0x00,0x00,0x00,0xaf,0x00,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x48,0x02,0x00,0x00, -0x49,0x03,0x00,0x00,0x47,0x02,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x49,0x02,0x00,0x00,0x45,0x02,0x00,0x00, -0x48,0x02,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x4b,0x02,0x00,0x00,0x68,0x00,0x00,0x00,0xb2,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x4c,0x02,0x00,0x00, -0x49,0x02,0x00,0x00,0x4b,0x02,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x4e,0x02,0x00,0x00,0x4c,0x02,0x00,0x00, -0x57,0x03,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x50,0x02,0x00,0x00,0x4e,0x02,0x00,0x00,0x4f,0x02,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x52,0x02,0x00,0x00, -0x50,0x02,0x00,0x00,0x44,0x03,0x00,0x00,0x41,0x00,0x05,0x00, -0xff,0x00,0x00,0x00,0x53,0x02,0x00,0x00,0x80,0x01,0x00,0x00, -0x52,0x02,0x00,0x00,0x3d,0x00,0x04,0x00,0xe6,0x00,0x00,0x00, -0x54,0x02,0x00,0x00,0x53,0x02,0x00,0x00,0x41,0x00,0x05,0x00, -0x26,0x02,0x00,0x00,0x55,0x02,0x00,0x00,0x3f,0x02,0x00,0x00, -0x43,0x02,0x00,0x00,0x3e,0x00,0x03,0x00,0x55,0x02,0x00,0x00, -0x54,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x57,0x02,0x00,0x00,0x57,0x03,0x00,0x00,0xc6,0x00,0x00,0x00, -0xf9,0x00,0x02,0x00,0x35,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, -0x37,0x02,0x00,0x00,0xf9,0x00,0x02,0x00,0x30,0x02,0x00,0x00, -0xf8,0x00,0x02,0x00,0x30,0x02,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x59,0x02,0x00,0x00,0x49,0x03,0x00,0x00, -0xc6,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0x2d,0x02,0x00,0x00, -0xf8,0x00,0x02,0x00,0x2f,0x02,0x00,0x00,0xf9,0x00,0x02,0x00, -0x5b,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x5b,0x02,0x00,0x00, -0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0x4a,0x03,0x00,0x00, -0x3e,0x00,0x00,0x00,0x2f,0x02,0x00,0x00,0x9f,0x02,0x00,0x00, -0x5e,0x02,0x00,0x00,0xb0,0x00,0x05,0x00,0xb8,0x00,0x00,0x00, -0x61,0x02,0x00,0x00,0x4a,0x03,0x00,0x00,0xb5,0x00,0x00,0x00, -0xf6,0x00,0x04,0x00,0x5d,0x02,0x00,0x00,0x5e,0x02,0x00,0x00, -0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0x61,0x02,0x00,0x00, -0x5c,0x02,0x00,0x00,0x5d,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, -0x5c,0x02,0x00,0x00,0xf9,0x00,0x02,0x00,0x63,0x02,0x00,0x00, -0xf8,0x00,0x02,0x00,0x63,0x02,0x00,0x00,0xf5,0x00,0x07,0x00, -0x06,0x00,0x00,0x00,0x4e,0x03,0x00,0x00,0x3e,0x00,0x00,0x00, -0x5c,0x02,0x00,0x00,0x9d,0x02,0x00,0x00,0x66,0x02,0x00,0x00, -0xb0,0x00,0x05,0x00,0xb8,0x00,0x00,0x00,0x69,0x02,0x00,0x00, -0x4e,0x03,0x00,0x00,0x60,0x00,0x00,0x00,0xf6,0x00,0x04,0x00, -0x65,0x02,0x00,0x00,0x66,0x02,0x00,0x00,0x01,0x00,0x00,0x00, -0xfa,0x00,0x04,0x00,0x69,0x02,0x00,0x00,0x64,0x02,0x00,0x00, -0x65,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x64,0x02,0x00,0x00, -0xf9,0x00,0x02,0x00,0x6b,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, -0x6b,0x02,0x00,0x00,0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00, -0x50,0x03,0x00,0x00,0x3e,0x00,0x00,0x00,0x64,0x02,0x00,0x00, -0x9b,0x02,0x00,0x00,0x6e,0x02,0x00,0x00,0xb0,0x00,0x05,0x00, -0xb8,0x00,0x00,0x00,0x71,0x02,0x00,0x00,0x50,0x03,0x00,0x00, -0xb2,0x00,0x00,0x00,0xf6,0x00,0x04,0x00,0x6d,0x02,0x00,0x00, -0x6e,0x02,0x00,0x00,0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, -0x71,0x02,0x00,0x00,0x6c,0x02,0x00,0x00,0x6d,0x02,0x00,0x00, -0xf8,0x00,0x02,0x00,0x6c,0x02,0x00,0x00,0xf9,0x00,0x02,0x00, -0x73,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x73,0x02,0x00,0x00, -0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0x52,0x03,0x00,0x00, -0x3e,0x00,0x00,0x00,0x6c,0x02,0x00,0x00,0x99,0x02,0x00,0x00, -0x74,0x02,0x00,0x00,0xb0,0x00,0x05,0x00,0xb8,0x00,0x00,0x00, -0x79,0x02,0x00,0x00,0x52,0x03,0x00,0x00,0x62,0x00,0x00,0x00, -0xf6,0x00,0x04,0x00,0x75,0x02,0x00,0x00,0x74,0x02,0x00,0x00, -0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0x79,0x02,0x00,0x00, -0x74,0x02,0x00,0x00,0x75,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, -0x74,0x02,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x7b,0x02,0x00,0x00,0x4a,0x03,0x00,0x00,0xb2,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x7d,0x02,0x00,0x00, -0x7b,0x02,0x00,0x00,0x50,0x03,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x7f,0x02,0x00,0x00,0x7d,0x02,0x00,0x00, -0x7e,0x02,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x81,0x02,0x00,0x00,0x4e,0x03,0x00,0x00,0x62,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x82,0x02,0x00,0x00, -0x7f,0x02,0x00,0x00,0x81,0x02,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x84,0x02,0x00,0x00,0x82,0x02,0x00,0x00, -0x52,0x03,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x88,0x02,0x00,0x00,0x81,0x02,0x00,0x00,0x52,0x03,0x00,0x00, -0x41,0x00,0x05,0x00,0x26,0x02,0x00,0x00,0x89,0x02,0x00,0x00, -0x11,0x02,0x00,0x00,0x88,0x02,0x00,0x00,0x3d,0x00,0x04,0x00, -0xe6,0x00,0x00,0x00,0x8a,0x02,0x00,0x00,0x89,0x02,0x00,0x00, -0x73,0x00,0x04,0x00,0xba,0x00,0x00,0x00,0x8b,0x02,0x00,0x00, -0x8a,0x02,0x00,0x00,0x41,0x00,0x05,0x00,0x26,0x02,0x00,0x00, -0x90,0x02,0x00,0x00,0x3f,0x02,0x00,0x00,0x7d,0x02,0x00,0x00, -0x3d,0x00,0x04,0x00,0xe6,0x00,0x00,0x00,0x91,0x02,0x00,0x00, -0x90,0x02,0x00,0x00,0x73,0x00,0x04,0x00,0xba,0x00,0x00,0x00, -0x92,0x02,0x00,0x00,0x91,0x02,0x00,0x00,0x41,0x00,0x05,0x00, -0xc3,0x00,0x00,0x00,0x94,0x02,0x00,0x00,0xc0,0x00,0x00,0x00, -0x84,0x02,0x00,0x00,0x3d,0x00,0x04,0x00,0xba,0x00,0x00,0x00, -0x95,0x02,0x00,0x00,0x94,0x02,0x00,0x00,0x0c,0x00,0x08,0x00, -0xba,0x00,0x00,0x00,0x96,0x02,0x00,0x00,0x01,0x00,0x00,0x00, -0x32,0x00,0x00,0x00,0x8b,0x02,0x00,0x00,0x92,0x02,0x00,0x00, -0x95,0x02,0x00,0x00,0x3e,0x00,0x03,0x00,0x94,0x02,0x00,0x00, -0x96,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x99,0x02,0x00,0x00,0x52,0x03,0x00,0x00,0xc6,0x00,0x00,0x00, -0xf9,0x00,0x02,0x00,0x73,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, -0x75,0x02,0x00,0x00,0xf9,0x00,0x02,0x00,0x6e,0x02,0x00,0x00, -0xf8,0x00,0x02,0x00,0x6e,0x02,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x9b,0x02,0x00,0x00,0x50,0x03,0x00,0x00, -0xc6,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0x6b,0x02,0x00,0x00, -0xf8,0x00,0x02,0x00,0x6d,0x02,0x00,0x00,0xf9,0x00,0x02,0x00, -0x66,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x66,0x02,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x9d,0x02,0x00,0x00, -0x4e,0x03,0x00,0x00,0xc6,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, -0x63,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x65,0x02,0x00,0x00, -0xf9,0x00,0x02,0x00,0x5e,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, -0x5e,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x9f,0x02,0x00,0x00,0x4a,0x03,0x00,0x00,0xc6,0x00,0x00,0x00, -0xf9,0x00,0x02,0x00,0x5b,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, -0x5d,0x02,0x00,0x00,0xf9,0x00,0x02,0x00,0xfa,0x01,0x00,0x00, -0xf8,0x00,0x02,0x00,0xfa,0x01,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xa1,0x02,0x00,0x00,0x44,0x03,0x00,0x00, -0xc6,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0xf7,0x01,0x00,0x00, -0xf8,0x00,0x02,0x00,0xf9,0x01,0x00,0x00,0xe0,0x00,0x04,0x00, -0x0c,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0xef,0x01,0x00,0x00, -0xf9,0x00,0x02,0x00,0xcd,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, -0xcd,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xa3,0x02,0x00,0x00,0x2a,0x03,0x00,0x00,0x6c,0x00,0x00,0x00, -0xf9,0x00,0x02,0x00,0xca,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, -0xcc,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xa8,0x02,0x00,0x00,0x55,0x00,0x00,0x00,0x53,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xa9,0x02,0x00,0x00, -0x8c,0x00,0x00,0x00,0xa8,0x02,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xae,0x02,0x00,0x00,0x59,0x00,0x00,0x00, -0xaf,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xaf,0x02,0x00,0x00,0x9e,0x00,0x00,0x00,0xae,0x02,0x00,0x00, -0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00,0xb4,0x02,0x00,0x00, -0x12,0x00,0x00,0x00,0xb3,0x02,0x00,0x00,0x3d,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0xb5,0x02,0x00,0x00,0xb4,0x02,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xb6,0x02,0x00,0x00, -0x0f,0x00,0x00,0x00,0xb5,0x02,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xba,0x02,0x00,0x00,0x47,0x00,0x00,0x00, -0xb5,0x02,0x00,0x00,0x41,0x00,0x05,0x00,0x0d,0x00,0x00,0x00, -0xbc,0x02,0x00,0x00,0xbb,0x02,0x00,0x00,0x0c,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0xbd,0x02,0x00,0x00, -0xbc,0x02,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xbe,0x02,0x00,0x00,0xba,0x02,0x00,0x00,0xbd,0x02,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xbf,0x02,0x00,0x00, -0xb6,0x02,0x00,0x00,0xbe,0x02,0x00,0x00,0xf9,0x00,0x02,0x00, -0xc1,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0xc1,0x02,0x00,0x00, -0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0x2b,0x03,0x00,0x00, -0x3e,0x00,0x00,0x00,0xcc,0x00,0x00,0x00,0x28,0x03,0x00,0x00, -0xc4,0x02,0x00,0x00,0xb0,0x00,0x05,0x00,0xb8,0x00,0x00,0x00, -0xc7,0x02,0x00,0x00,0x2b,0x03,0x00,0x00,0xb5,0x00,0x00,0x00, -0xf6,0x00,0x04,0x00,0xc3,0x02,0x00,0x00,0xc4,0x02,0x00,0x00, -0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0xc7,0x02,0x00,0x00, -0xc2,0x02,0x00,0x00,0xc3,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, -0xc2,0x02,0x00,0x00,0xf9,0x00,0x02,0x00,0xc9,0x02,0x00,0x00, -0xf8,0x00,0x02,0x00,0xc9,0x02,0x00,0x00,0xf5,0x00,0x07,0x00, -0x06,0x00,0x00,0x00,0x2c,0x03,0x00,0x00,0x3e,0x00,0x00,0x00, -0xc2,0x02,0x00,0x00,0x26,0x03,0x00,0x00,0xcc,0x02,0x00,0x00, -0xb0,0x00,0x05,0x00,0xb8,0x00,0x00,0x00,0xcf,0x02,0x00,0x00, -0x2c,0x03,0x00,0x00,0x60,0x00,0x00,0x00,0xf6,0x00,0x04,0x00, -0xcb,0x02,0x00,0x00,0xcc,0x02,0x00,0x00,0x01,0x00,0x00,0x00, -0xfa,0x00,0x04,0x00,0xcf,0x02,0x00,0x00,0xca,0x02,0x00,0x00, -0xcb,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0xca,0x02,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xd3,0x02,0x00,0x00, -0x2c,0x03,0x00,0x00,0x61,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xd4,0x02,0x00,0x00,0xa9,0x02,0x00,0x00, -0xd3,0x02,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xd6,0x02,0x00,0x00,0x64,0x00,0x00,0x00,0x62,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xd7,0x02,0x00,0x00, -0xd4,0x02,0x00,0x00,0xd6,0x02,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xdb,0x02,0x00,0x00,0x2b,0x03,0x00,0x00, -0x47,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xdc,0x02,0x00,0x00,0xaf,0x02,0x00,0x00,0xdb,0x02,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xde,0x02,0x00,0x00, -0x68,0x00,0x00,0x00,0xb2,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xdf,0x02,0x00,0x00,0xdc,0x02,0x00,0x00, -0xde,0x02,0x00,0x00,0xf9,0x00,0x02,0x00,0xe1,0x02,0x00,0x00, -0xf8,0x00,0x02,0x00,0xe1,0x02,0x00,0x00,0xf5,0x00,0x07,0x00, -0x06,0x00,0x00,0x00,0x2e,0x03,0x00,0x00,0x3e,0x00,0x00,0x00, -0xca,0x02,0x00,0x00,0x24,0x03,0x00,0x00,0xe4,0x02,0x00,0x00, -0xb0,0x00,0x05,0x00,0xb8,0x00,0x00,0x00,0xe7,0x02,0x00,0x00, -0x2e,0x03,0x00,0x00,0xb2,0x00,0x00,0x00,0xf6,0x00,0x04,0x00, -0xe3,0x02,0x00,0x00,0xe4,0x02,0x00,0x00,0x01,0x00,0x00,0x00, -0xfa,0x00,0x04,0x00,0xe7,0x02,0x00,0x00,0xe2,0x02,0x00,0x00, -0xe3,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0xe2,0x02,0x00,0x00, -0xf9,0x00,0x02,0x00,0xe9,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, -0xe9,0x02,0x00,0x00,0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00, -0x30,0x03,0x00,0x00,0x3e,0x00,0x00,0x00,0xe2,0x02,0x00,0x00, -0x22,0x03,0x00,0x00,0xec,0x02,0x00,0x00,0xb0,0x00,0x05,0x00, -0xb8,0x00,0x00,0x00,0xef,0x02,0x00,0x00,0x30,0x03,0x00,0x00, -0x62,0x00,0x00,0x00,0xf6,0x00,0x04,0x00,0xeb,0x02,0x00,0x00, -0xec,0x02,0x00,0x00,0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, -0xef,0x02,0x00,0x00,0xea,0x02,0x00,0x00,0xeb,0x02,0x00,0x00, -0xf8,0x00,0x02,0x00,0xea,0x02,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xf2,0x02,0x00,0x00,0xd7,0x02,0x00,0x00, -0x30,0x03,0x00,0x00,0xb0,0x00,0x05,0x00,0xb8,0x00,0x00,0x00, -0xf5,0x02,0x00,0x00,0xf2,0x02,0x00,0x00,0x36,0x00,0x00,0x00, -0xf7,0x00,0x03,0x00,0xf7,0x02,0x00,0x00,0x00,0x00,0x00,0x00, -0xfa,0x00,0x04,0x00,0xf5,0x02,0x00,0x00,0xf6,0x02,0x00,0x00, -0xf7,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0xf6,0x02,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xfa,0x02,0x00,0x00, -0xdf,0x02,0x00,0x00,0x2e,0x03,0x00,0x00,0x41,0x00,0x05,0x00, -0x15,0x00,0x00,0x00,0xfb,0x02,0x00,0x00,0x12,0x00,0x00,0x00, -0xc6,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0xfc,0x02,0x00,0x00,0xfb,0x02,0x00,0x00,0xb0,0x00,0x05,0x00, -0xb8,0x00,0x00,0x00,0xfd,0x02,0x00,0x00,0xfa,0x02,0x00,0x00, -0xfc,0x02,0x00,0x00,0xf9,0x00,0x02,0x00,0xf7,0x02,0x00,0x00, -0xf8,0x00,0x02,0x00,0xf7,0x02,0x00,0x00,0xf5,0x00,0x07,0x00, -0xb8,0x00,0x00,0x00,0xfe,0x02,0x00,0x00,0xf5,0x02,0x00,0x00, -0xea,0x02,0x00,0x00,0xfd,0x02,0x00,0x00,0xf6,0x02,0x00,0x00, -0xf7,0x00,0x03,0x00,0x00,0x03,0x00,0x00,0x00,0x00,0x00,0x00, -0xfa,0x00,0x04,0x00,0xfe,0x02,0x00,0x00,0xff,0x02,0x00,0x00, -0x00,0x03,0x00,0x00,0xf8,0x00,0x02,0x00,0xff,0x02,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x08,0x03,0x00,0x00, -0xdf,0x02,0x00,0x00,0x2e,0x03,0x00,0x00,0x41,0x00,0x05,0x00, -0x15,0x00,0x00,0x00,0x0a,0x03,0x00,0x00,0x12,0x00,0x00,0x00, -0x09,0x03,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x0b,0x03,0x00,0x00,0x0a,0x03,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x0c,0x03,0x00,0x00,0x08,0x03,0x00,0x00, -0x0b,0x03,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x0d,0x03,0x00,0x00,0xbf,0x02,0x00,0x00,0x0c,0x03,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x0f,0x03,0x00,0x00, -0x0d,0x03,0x00,0x00,0xd7,0x02,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x11,0x03,0x00,0x00,0x0f,0x03,0x00,0x00, -0x30,0x03,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x13,0x03,0x00,0x00,0x2b,0x03,0x00,0x00,0xb2,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x15,0x03,0x00,0x00, -0x13,0x03,0x00,0x00,0x2e,0x03,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x17,0x03,0x00,0x00,0x15,0x03,0x00,0x00, -0x16,0x03,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x19,0x03,0x00,0x00,0x2c,0x03,0x00,0x00,0x62,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x1a,0x03,0x00,0x00, -0x17,0x03,0x00,0x00,0x19,0x03,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x1c,0x03,0x00,0x00,0x1a,0x03,0x00,0x00, -0x30,0x03,0x00,0x00,0x41,0x00,0x05,0x00,0xc3,0x00,0x00,0x00, -0x1d,0x03,0x00,0x00,0xc0,0x00,0x00,0x00,0x1c,0x03,0x00,0x00, -0x3d,0x00,0x04,0x00,0xba,0x00,0x00,0x00,0x1e,0x03,0x00,0x00, -0x1d,0x03,0x00,0x00,0x41,0x00,0x06,0x00,0x1f,0x03,0x00,0x00, -0x20,0x03,0x00,0x00,0x04,0x03,0x00,0x00,0x34,0x00,0x00,0x00, -0x11,0x03,0x00,0x00,0x3e,0x00,0x03,0x00,0x20,0x03,0x00,0x00, -0x1e,0x03,0x00,0x00,0xf9,0x00,0x02,0x00,0x00,0x03,0x00,0x00, -0xf8,0x00,0x02,0x00,0x00,0x03,0x00,0x00,0xf9,0x00,0x02,0x00, -0xec,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0xec,0x02,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x22,0x03,0x00,0x00, -0x30,0x03,0x00,0x00,0xc6,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, -0xe9,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0xeb,0x02,0x00,0x00, -0xf9,0x00,0x02,0x00,0xe4,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, -0xe4,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x24,0x03,0x00,0x00,0x2e,0x03,0x00,0x00,0xc6,0x00,0x00,0x00, -0xf9,0x00,0x02,0x00,0xe1,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, -0xe3,0x02,0x00,0x00,0xf9,0x00,0x02,0x00,0xcc,0x02,0x00,0x00, -0xf8,0x00,0x02,0x00,0xcc,0x02,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x26,0x03,0x00,0x00,0x2c,0x03,0x00,0x00, -0xc6,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0xc9,0x02,0x00,0x00, -0xf8,0x00,0x02,0x00,0xcb,0x02,0x00,0x00,0xf9,0x00,0x02,0x00, -0xc4,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0xc4,0x02,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x28,0x03,0x00,0x00, -0x2b,0x03,0x00,0x00,0xc6,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, -0xc1,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0xc3,0x02,0x00,0x00, -0xfd,0x00,0x01,0x00,0x38,0x00,0x01,0x00, -}; -const uint64_t matmul_f16_aligned_s_len = 11936; - -unsigned char matmul_f16_aligned_s_fp32_data[] = { -0x03,0x02,0x23,0x07,0x00,0x05,0x01,0x00,0x0b,0x00,0x0d,0x00, -0xf3,0x02,0x00,0x00,0x00,0x00,0x00,0x00,0x11,0x00,0x02,0x00, -0x01,0x00,0x00,0x00,0x11,0x00,0x02,0x00,0x51,0x11,0x00,0x00, -0x0b,0x00,0x06,0x00,0x01,0x00,0x00,0x00,0x47,0x4c,0x53,0x4c, -0x2e,0x73,0x74,0x64,0x2e,0x34,0x35,0x30,0x00,0x00,0x00,0x00, -0x0e,0x00,0x03,0x00,0x00,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x0f,0x00,0x0f,0x00,0x05,0x00,0x00,0x00,0x04,0x00,0x00,0x00, -0x6d,0x61,0x69,0x6e,0x00,0x00,0x00,0x00,0x0b,0x00,0x00,0x00, -0x12,0x00,0x00,0x00,0x3d,0x00,0x00,0x00,0x4c,0x00,0x00,0x00, -0xea,0x00,0x00,0x00,0xf9,0x00,0x00,0x00,0x4b,0x01,0x00,0x00, -0x58,0x01,0x00,0x00,0x53,0x02,0x00,0x00,0x9c,0x02,0x00,0x00, -0x10,0x00,0x06,0x00,0x04,0x00,0x00,0x00,0x11,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x0b,0x00,0x00,0x00,0x0b,0x00,0x00,0x00, -0x1c,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0x10,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x08,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00, -0x03,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00,0x04,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x10,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0x10,0x00,0x00,0x00,0x05,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x14,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x18,0x00,0x00,0x00, -0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00,0x07,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x1c,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0x10,0x00,0x00,0x00,0x08,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x20,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00, -0x09,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x24,0x00,0x00,0x00, -0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00,0x0a,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x28,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0x10,0x00,0x00,0x00,0x0b,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x2c,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x30,0x00,0x00,0x00, -0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00,0x0d,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x34,0x00,0x00,0x00,0x47,0x00,0x03,0x00, -0x10,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x37,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x3d,0x00,0x00,0x00,0x0b,0x00,0x00,0x00, -0x1a,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x4c,0x00,0x00,0x00, -0x0b,0x00,0x00,0x00,0x1b,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x4f,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x09,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x53,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x04,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x60,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x62,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x07,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x6c,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x03,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x9d,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0xaf,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x05,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0xb2,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x08,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0xf6,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x08,0x00,0x00,0x00,0x48,0x00,0x04,0x00, -0xf7,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x18,0x00,0x00,0x00, -0x48,0x00,0x05,0x00,0xf7,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x03,0x00, -0xf7,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0xf9,0x00,0x00,0x00,0x22,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0xf9,0x00,0x00,0x00,0x21,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x2c,0x01,0x00,0x00, -0x01,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x2d,0x01,0x00,0x00,0x0b,0x00,0x00,0x00,0x19,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x55,0x01,0x00,0x00,0x06,0x00,0x00,0x00, -0x08,0x00,0x00,0x00,0x48,0x00,0x04,0x00,0x56,0x01,0x00,0x00, -0x00,0x00,0x00,0x00,0x18,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0x56,0x01,0x00,0x00,0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x47,0x00,0x03,0x00,0x56,0x01,0x00,0x00, -0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x58,0x01,0x00,0x00, -0x22,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x58,0x01,0x00,0x00,0x21,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x53,0x02,0x00,0x00,0x0b,0x00,0x00,0x00, -0x18,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x99,0x02,0x00,0x00, -0x06,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x48,0x00,0x04,0x00, -0x9a,0x02,0x00,0x00,0x00,0x00,0x00,0x00,0x19,0x00,0x00,0x00, -0x48,0x00,0x05,0x00,0x9a,0x02,0x00,0x00,0x00,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x03,0x00, -0x9a,0x02,0x00,0x00,0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x9c,0x02,0x00,0x00,0x22,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x9c,0x02,0x00,0x00,0x21,0x00,0x00,0x00, -0x02,0x00,0x00,0x00,0x13,0x00,0x02,0x00,0x02,0x00,0x00,0x00, -0x21,0x00,0x03,0x00,0x03,0x00,0x00,0x00,0x02,0x00,0x00,0x00, -0x15,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x20,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x17,0x00,0x04,0x00,0x09,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x03,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0x0a,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x09,0x00,0x00,0x00, -0x3b,0x00,0x04,0x00,0x0a,0x00,0x00,0x00,0x0b,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0x0d,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x1e,0x00,0x10,0x00,0x10,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x11,0x00,0x00,0x00, -0x09,0x00,0x00,0x00,0x10,0x00,0x00,0x00,0x3b,0x00,0x04,0x00, -0x11,0x00,0x00,0x00,0x12,0x00,0x00,0x00,0x09,0x00,0x00,0x00, -0x15,0x00,0x04,0x00,0x13,0x00,0x00,0x00,0x20,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00, -0x14,0x00,0x00,0x00,0x08,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0x15,0x00,0x00,0x00,0x09,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00,0x21,0x00,0x00,0x00, -0x0a,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00, -0x27,0x00,0x00,0x00,0x09,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x13,0x00,0x00,0x00,0x2d,0x00,0x00,0x00,0x07,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00,0x34,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0xf6,0x00,0x00,0x00,0x1e,0x01,0x00,0x00, 0x00,0x00,0x00,0x00,0x32,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x37,0x00,0x00,0x00,0x40,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x3b,0x00,0x04,0x00,0x0a,0x00,0x00,0x00,0x3d,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x3e,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x3b,0x00,0x04,0x00, -0x0a,0x00,0x00,0x00,0x4c,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x32,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x4f,0x00,0x00,0x00, -0x20,0x00,0x00,0x00,0x32,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x53,0x00,0x00,0x00,0x20,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0x54,0x00,0x00,0x00,0x86,0x00,0x00,0x00, -0x37,0x00,0x00,0x00,0x53,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0x58,0x00,0x00,0x00,0x86,0x00,0x00,0x00, -0x37,0x00,0x00,0x00,0x53,0x00,0x00,0x00,0x32,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x60,0x00,0x00,0x00,0x02,0x00,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x61,0x00,0x00,0x00, -0x86,0x00,0x00,0x00,0x53,0x00,0x00,0x00,0x60,0x00,0x00,0x00, -0x32,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x62,0x00,0x00,0x00, -0x04,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, -0x63,0x00,0x00,0x00,0x86,0x00,0x00,0x00,0x61,0x00,0x00,0x00, -0x62,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, -0x67,0x00,0x00,0x00,0x86,0x00,0x00,0x00,0x61,0x00,0x00,0x00, -0x62,0x00,0x00,0x00,0x32,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x6c,0x00,0x00,0x00,0x10,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x6d,0x00,0x00,0x00,0x04,0x00,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x6e,0x00,0x00,0x00, -0x86,0x00,0x00,0x00,0x6c,0x00,0x00,0x00,0x6d,0x00,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x73,0x00,0x00,0x00, -0x86,0x00,0x00,0x00,0x6c,0x00,0x00,0x00,0x6d,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00,0x77,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00, -0x7c,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x13,0x00,0x00,0x00,0x87,0x00,0x00,0x00,0x0b,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00,0x8d,0x00,0x00,0x00, -0x03,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00, -0x98,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x32,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x9d,0x00,0x00,0x00,0x40,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00,0x9f,0x00,0x00,0x00, -0x04,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, -0xae,0x00,0x00,0x00,0x84,0x00,0x00,0x00,0x60,0x00,0x00,0x00, -0x62,0x00,0x00,0x00,0x32,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0xaf,0x00,0x00,0x00,0x20,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0xb0,0x00,0x00,0x00,0x84,0x00,0x00,0x00, -0x53,0x00,0x00,0x00,0xaf,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0xb1,0x00,0x00,0x00,0x84,0x00,0x00,0x00, -0x4f,0x00,0x00,0x00,0x62,0x00,0x00,0x00,0x32,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0xb2,0x00,0x00,0x00,0x02,0x00,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0xb3,0x00,0x00,0x00, -0x84,0x00,0x00,0x00,0xb1,0x00,0x00,0x00,0xb2,0x00,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0xb4,0x00,0x00,0x00, -0x84,0x00,0x00,0x00,0xb3,0x00,0x00,0x00,0x60,0x00,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0xb5,0x00,0x00,0x00, -0x86,0x00,0x00,0x00,0xb0,0x00,0x00,0x00,0xb4,0x00,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0xb6,0x00,0x00,0x00, -0x84,0x00,0x00,0x00,0xae,0x00,0x00,0x00,0xb5,0x00,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0xb7,0x00,0x00,0x00, -0x84,0x00,0x00,0x00,0xb6,0x00,0x00,0x00,0xb2,0x00,0x00,0x00, -0x14,0x00,0x02,0x00,0xb8,0x00,0x00,0x00,0x16,0x00,0x03,0x00, -0xba,0x00,0x00,0x00,0x20,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0xbb,0x00,0x00,0x00,0x84,0x00,0x00,0x00, -0x60,0x00,0x00,0x00,0x62,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0xbc,0x00,0x00,0x00,0x84,0x00,0x00,0x00, -0xbb,0x00,0x00,0x00,0xb5,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0xbd,0x00,0x00,0x00,0x84,0x00,0x00,0x00, -0xbc,0x00,0x00,0x00,0xb2,0x00,0x00,0x00,0x1c,0x00,0x04,0x00, -0xbe,0x00,0x00,0x00,0xba,0x00,0x00,0x00,0xbd,0x00,0x00,0x00, -0x20,0x00,0x04,0x00,0xbf,0x00,0x00,0x00,0x07,0x00,0x00,0x00, -0xbe,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0xba,0x00,0x00,0x00, -0xc2,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0xc3,0x00,0x00,0x00,0x07,0x00,0x00,0x00,0xba,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00,0xc6,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, -0xe6,0x00,0x00,0x00,0x80,0x00,0x00,0x00,0x6c,0x00,0x00,0x00, -0x39,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, -0xe7,0x00,0x00,0x00,0x84,0x00,0x00,0x00,0x37,0x00,0x00,0x00, -0xe6,0x00,0x00,0x00,0x1c,0x00,0x04,0x00,0xe8,0x00,0x00,0x00, -0xba,0x00,0x00,0x00,0xe7,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0xe9,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0xe8,0x00,0x00,0x00, -0x3b,0x00,0x04,0x00,0xe9,0x00,0x00,0x00,0xea,0x00,0x00,0x00, -0x04,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, -0xee,0x00,0x00,0x00,0x80,0x00,0x00,0x00,0x6c,0x00,0x00,0x00, -0x39,0x00,0x00,0x00,0x16,0x00,0x03,0x00,0xf4,0x00,0x00,0x00, -0x10,0x00,0x00,0x00,0x17,0x00,0x04,0x00,0xf5,0x00,0x00,0x00, -0xf4,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x1d,0x00,0x03,0x00, -0xf6,0x00,0x00,0x00,0xf5,0x00,0x00,0x00,0x1e,0x00,0x03,0x00, -0xf7,0x00,0x00,0x00,0xf6,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0xf8,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0xf7,0x00,0x00,0x00, -0x3b,0x00,0x04,0x00,0xf8,0x00,0x00,0x00,0xf9,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0xfb,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0xf4,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0xff,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0xba,0x00,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x04,0x01,0x00,0x00, -0x80,0x00,0x00,0x00,0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x12,0x01,0x00,0x00, -0x80,0x00,0x00,0x00,0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x20,0x01,0x00,0x00, -0x80,0x00,0x00,0x00,0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x25,0x01,0x00,0x00, -0x03,0x00,0x00,0x00,0x32,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x2c,0x01,0x00,0x00,0x01,0x00,0x00,0x00,0x33,0x00,0x06,0x00, -0x09,0x00,0x00,0x00,0x2d,0x01,0x00,0x00,0x2c,0x01,0x00,0x00, +0x20,0x01,0x00,0x00,0x01,0x00,0x00,0x00,0x33,0x00,0x06,0x00, +0x09,0x00,0x00,0x00,0x21,0x01,0x00,0x00,0x20,0x01,0x00,0x00, 0x39,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0x2e,0x01,0x00,0x00,0x51,0x00,0x00,0x00, -0x2d,0x01,0x00,0x00,0x00,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0x2f,0x01,0x00,0x00,0x84,0x00,0x00,0x00, -0x2e,0x01,0x00,0x00,0x6d,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0x30,0x01,0x00,0x00,0x86,0x00,0x00,0x00, -0x2f,0x01,0x00,0x00,0x6c,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0x47,0x01,0x00,0x00,0x80,0x00,0x00,0x00, +0x06,0x00,0x00,0x00,0x22,0x01,0x00,0x00,0x51,0x00,0x00,0x00, +0x21,0x01,0x00,0x00,0x00,0x00,0x00,0x00,0x34,0x00,0x06,0x00, +0x06,0x00,0x00,0x00,0x23,0x01,0x00,0x00,0x84,0x00,0x00,0x00, +0x22,0x01,0x00,0x00,0x39,0x00,0x00,0x00,0x34,0x00,0x06,0x00, +0x06,0x00,0x00,0x00,0x24,0x01,0x00,0x00,0x86,0x00,0x00,0x00, +0x23,0x01,0x00,0x00,0x6c,0x00,0x00,0x00,0x34,0x00,0x06,0x00, +0x06,0x00,0x00,0x00,0x42,0x01,0x00,0x00,0x80,0x00,0x00,0x00, 0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0x48,0x01,0x00,0x00,0x84,0x00,0x00,0x00, -0x9d,0x00,0x00,0x00,0x47,0x01,0x00,0x00,0x1c,0x00,0x04,0x00, -0x49,0x01,0x00,0x00,0xba,0x00,0x00,0x00,0x48,0x01,0x00,0x00, -0x20,0x00,0x04,0x00,0x4a,0x01,0x00,0x00,0x04,0x00,0x00,0x00, -0x49,0x01,0x00,0x00,0x3b,0x00,0x04,0x00,0x4a,0x01,0x00,0x00, -0x4b,0x01,0x00,0x00,0x04,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0x4f,0x01,0x00,0x00,0x80,0x00,0x00,0x00, +0x06,0x00,0x00,0x00,0x43,0x01,0x00,0x00,0x84,0x00,0x00,0x00, +0xa6,0x00,0x00,0x00,0x42,0x01,0x00,0x00,0x1c,0x00,0x04,0x00, +0x44,0x01,0x00,0x00,0xf6,0x00,0x00,0x00,0x43,0x01,0x00,0x00, +0x20,0x00,0x04,0x00,0x45,0x01,0x00,0x00,0x04,0x00,0x00,0x00, +0x44,0x01,0x00,0x00,0x3b,0x00,0x04,0x00,0x45,0x01,0x00,0x00, +0x46,0x01,0x00,0x00,0x04,0x00,0x00,0x00,0x34,0x00,0x06,0x00, +0x06,0x00,0x00,0x00,0x4a,0x01,0x00,0x00,0x80,0x00,0x00,0x00, 0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x1d,0x00,0x03,0x00, -0x55,0x01,0x00,0x00,0xf5,0x00,0x00,0x00,0x1e,0x00,0x03,0x00, -0x56,0x01,0x00,0x00,0x55,0x01,0x00,0x00,0x20,0x00,0x04,0x00, -0x57,0x01,0x00,0x00,0x0c,0x00,0x00,0x00,0x56,0x01,0x00,0x00, -0x3b,0x00,0x04,0x00,0x57,0x01,0x00,0x00,0x58,0x01,0x00,0x00, +0x4e,0x01,0x00,0x00,0xf6,0x00,0x00,0x00,0x1e,0x00,0x03,0x00, +0x4f,0x01,0x00,0x00,0x4e,0x01,0x00,0x00,0x20,0x00,0x04,0x00, +0x50,0x01,0x00,0x00,0x0c,0x00,0x00,0x00,0x4f,0x01,0x00,0x00, +0x3b,0x00,0x04,0x00,0x50,0x01,0x00,0x00,0x51,0x01,0x00,0x00, 0x0c,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, -0x61,0x01,0x00,0x00,0x80,0x00,0x00,0x00,0x6c,0x00,0x00,0x00, +0x63,0x01,0x00,0x00,0x80,0x00,0x00,0x00,0x6c,0x00,0x00,0x00, 0x39,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, -0x6f,0x01,0x00,0x00,0x80,0x00,0x00,0x00,0x6c,0x00,0x00,0x00, +0x68,0x01,0x00,0x00,0x51,0x00,0x00,0x00,0x21,0x01,0x00,0x00, +0x00,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, +0x69,0x01,0x00,0x00,0x84,0x00,0x00,0x00,0x68,0x01,0x00,0x00, 0x39,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, -0x7d,0x01,0x00,0x00,0x80,0x00,0x00,0x00,0x6c,0x00,0x00,0x00, -0x39,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x8a,0x01,0x00,0x00,0x08,0x01,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0x8b,0x01,0x00,0x00,0x86,0x00,0x00,0x00, -0x6c,0x00,0x00,0x00,0x6d,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0x8e,0x01,0x00,0x00,0x86,0x00,0x00,0x00, -0x6c,0x00,0x00,0x00,0x6d,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0xa9,0x01,0x00,0x00,0x84,0x00,0x00,0x00, +0x6a,0x01,0x00,0x00,0x86,0x00,0x00,0x00,0x69,0x01,0x00,0x00, +0x6c,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x6d,0x01,0x00,0x00,0x08,0x01,0x00,0x00,0x34,0x00,0x06,0x00, +0x06,0x00,0x00,0x00,0x6e,0x01,0x00,0x00,0x86,0x00,0x00,0x00, +0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x34,0x00,0x06,0x00, +0x06,0x00,0x00,0x00,0x71,0x01,0x00,0x00,0x86,0x00,0x00,0x00, +0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x34,0x00,0x06,0x00, +0x06,0x00,0x00,0x00,0x8c,0x01,0x00,0x00,0x84,0x00,0x00,0x00, 0x60,0x00,0x00,0x00,0x62,0x00,0x00,0x00,0x1c,0x00,0x04,0x00, -0xaa,0x01,0x00,0x00,0xba,0x00,0x00,0x00,0xa9,0x01,0x00,0x00, -0x20,0x00,0x04,0x00,0xab,0x01,0x00,0x00,0x07,0x00,0x00,0x00, -0xaa,0x01,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, -0xbb,0x01,0x00,0x00,0x80,0x00,0x00,0x00,0x6c,0x00,0x00,0x00, +0x8d,0x01,0x00,0x00,0xf6,0x00,0x00,0x00,0x8c,0x01,0x00,0x00, +0x20,0x00,0x04,0x00,0x8e,0x01,0x00,0x00,0x07,0x00,0x00,0x00, +0x8d,0x01,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, +0x9e,0x01,0x00,0x00,0x80,0x00,0x00,0x00,0x6c,0x00,0x00,0x00, +0x39,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0xa4,0x01,0x00,0x00, +0x07,0x00,0x00,0x00,0xf6,0x00,0x00,0x00,0x34,0x00,0x06,0x00, +0x06,0x00,0x00,0x00,0xba,0x01,0x00,0x00,0x84,0x00,0x00,0x00, +0xbe,0x00,0x00,0x00,0xbb,0x00,0x00,0x00,0x1c,0x00,0x04,0x00, +0xbb,0x01,0x00,0x00,0xf6,0x00,0x00,0x00,0xba,0x01,0x00,0x00, +0x20,0x00,0x04,0x00,0xbc,0x01,0x00,0x00,0x07,0x00,0x00,0x00, +0xbb,0x01,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, +0xc5,0x01,0x00,0x00,0x86,0x00,0x00,0x00,0xb8,0x00,0x00,0x00, +0xbe,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, +0xcd,0x01,0x00,0x00,0x80,0x00,0x00,0x00,0x6c,0x00,0x00,0x00, 0x39,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, -0xd6,0x01,0x00,0x00,0x84,0x00,0x00,0x00,0xb5,0x00,0x00,0x00, -0xb2,0x00,0x00,0x00,0x1c,0x00,0x04,0x00,0xd7,0x01,0x00,0x00, -0xba,0x00,0x00,0x00,0xd6,0x01,0x00,0x00,0x20,0x00,0x04,0x00, -0xd8,0x01,0x00,0x00,0x07,0x00,0x00,0x00,0xd7,0x01,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0xe1,0x01,0x00,0x00, -0x86,0x00,0x00,0x00,0xaf,0x00,0x00,0x00,0xb5,0x00,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0xe9,0x01,0x00,0x00, -0x80,0x00,0x00,0x00,0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x18,0x02,0x00,0x00, +0xfc,0x01,0x00,0x00,0x84,0x00,0x00,0x00,0x60,0x00,0x00,0x00, +0x62,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00, +0x31,0x02,0x00,0x00,0x0d,0x00,0x00,0x00,0x3b,0x00,0x04,0x00, +0x0a,0x00,0x00,0x00,0x39,0x02,0x00,0x00,0x01,0x00,0x00,0x00, +0x1d,0x00,0x03,0x00,0x7f,0x02,0x00,0x00,0xc3,0x00,0x00,0x00, +0x1e,0x00,0x03,0x00,0x80,0x02,0x00,0x00,0x7f,0x02,0x00,0x00, +0x20,0x00,0x04,0x00,0x81,0x02,0x00,0x00,0x0c,0x00,0x00,0x00, +0x80,0x02,0x00,0x00,0x3b,0x00,0x04,0x00,0x81,0x02,0x00,0x00, +0x82,0x02,0x00,0x00,0x0c,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x13,0x00,0x00,0x00,0x87,0x02,0x00,0x00,0x05,0x00,0x00,0x00, +0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x94,0x02,0x00,0x00, 0x84,0x00,0x00,0x00,0x60,0x00,0x00,0x00,0x62,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00,0x4b,0x02,0x00,0x00, -0x0d,0x00,0x00,0x00,0x3b,0x00,0x04,0x00,0x0a,0x00,0x00,0x00, -0x53,0x02,0x00,0x00,0x01,0x00,0x00,0x00,0x1d,0x00,0x03,0x00, -0x99,0x02,0x00,0x00,0xba,0x00,0x00,0x00,0x1e,0x00,0x03,0x00, -0x9a,0x02,0x00,0x00,0x99,0x02,0x00,0x00,0x20,0x00,0x04,0x00, -0x9b,0x02,0x00,0x00,0x0c,0x00,0x00,0x00,0x9a,0x02,0x00,0x00, -0x3b,0x00,0x04,0x00,0x9b,0x02,0x00,0x00,0x9c,0x02,0x00,0x00, -0x0c,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00, -0xa1,0x02,0x00,0x00,0x05,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0xae,0x02,0x00,0x00,0x84,0x00,0x00,0x00, -0x60,0x00,0x00,0x00,0x62,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0xb7,0x02,0x00,0x00,0x0c,0x00,0x00,0x00,0xba,0x00,0x00,0x00, -0x36,0x00,0x05,0x00,0x02,0x00,0x00,0x00,0x04,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x03,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, -0x05,0x00,0x00,0x00,0x3b,0x00,0x04,0x00,0xbf,0x00,0x00,0x00, -0xc0,0x00,0x00,0x00,0x07,0x00,0x00,0x00,0x3b,0x00,0x04,0x00, -0xab,0x01,0x00,0x00,0xac,0x01,0x00,0x00,0x07,0x00,0x00,0x00, -0x3b,0x00,0x04,0x00,0xd8,0x01,0x00,0x00,0xd9,0x01,0x00,0x00, -0x07,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x0d,0x00,0x00,0x00, -0x0e,0x00,0x00,0x00,0x0b,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x0f,0x00,0x00,0x00, -0x0e,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00, -0x16,0x00,0x00,0x00,0x12,0x00,0x00,0x00,0x14,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x17,0x00,0x00,0x00, -0x16,0x00,0x00,0x00,0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x18,0x00,0x00,0x00,0x0f,0x00,0x00,0x00,0x17,0x00,0x00,0x00, -0x89,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x1e,0x00,0x00,0x00, -0x0f,0x00,0x00,0x00,0x17,0x00,0x00,0x00,0x41,0x00,0x05,0x00, -0x15,0x00,0x00,0x00,0x22,0x00,0x00,0x00,0x12,0x00,0x00,0x00, -0x21,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x22,0x00,0x00,0x00,0x86,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x24,0x00,0x00,0x00,0x18,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00, -0x28,0x00,0x00,0x00,0x12,0x00,0x00,0x00,0x27,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x29,0x00,0x00,0x00, -0x28,0x00,0x00,0x00,0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x2a,0x00,0x00,0x00,0x1e,0x00,0x00,0x00,0x29,0x00,0x00,0x00, -0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0x12,0x00,0x00,0x00,0x2d,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x2f,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x30,0x00,0x00,0x00, -0x24,0x00,0x00,0x00,0x2f,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x32,0x00,0x00,0x00,0x30,0x00,0x00,0x00, -0x2a,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00, -0x35,0x00,0x00,0x00,0x12,0x00,0x00,0x00,0x34,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x36,0x00,0x00,0x00, -0x35,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x38,0x00,0x00,0x00,0x36,0x00,0x00,0x00,0x37,0x00,0x00,0x00, -0x82,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x3a,0x00,0x00,0x00, -0x38,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x86,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x3b,0x00,0x00,0x00,0x3a,0x00,0x00,0x00, -0x37,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x0d,0x00,0x00,0x00, -0x3f,0x00,0x00,0x00,0x3d,0x00,0x00,0x00,0x3e,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x40,0x00,0x00,0x00, -0x3f,0x00,0x00,0x00,0x89,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x42,0x00,0x00,0x00,0x40,0x00,0x00,0x00,0x3b,0x00,0x00,0x00, -0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x47,0x00,0x00,0x00, -0x40,0x00,0x00,0x00,0x3b,0x00,0x00,0x00,0x41,0x00,0x05,0x00, -0x0d,0x00,0x00,0x00,0x49,0x00,0x00,0x00,0x3d,0x00,0x00,0x00, -0x39,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x4a,0x00,0x00,0x00,0x49,0x00,0x00,0x00,0x41,0x00,0x05,0x00, -0x0d,0x00,0x00,0x00,0x4d,0x00,0x00,0x00,0x4c,0x00,0x00,0x00, -0x3e,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x4e,0x00,0x00,0x00,0x4d,0x00,0x00,0x00,0x86,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x50,0x00,0x00,0x00,0x4e,0x00,0x00,0x00, -0x4f,0x00,0x00,0x00,0x89,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x55,0x00,0x00,0x00,0x50,0x00,0x00,0x00,0x54,0x00,0x00,0x00, -0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x59,0x00,0x00,0x00, -0x50,0x00,0x00,0x00,0x58,0x00,0x00,0x00,0x89,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x5d,0x00,0x00,0x00,0x4e,0x00,0x00,0x00, -0x4f,0x00,0x00,0x00,0x89,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x64,0x00,0x00,0x00,0x5d,0x00,0x00,0x00,0x63,0x00,0x00,0x00, -0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x68,0x00,0x00,0x00, -0x5d,0x00,0x00,0x00,0x67,0x00,0x00,0x00,0x89,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x6f,0x00,0x00,0x00,0x4e,0x00,0x00,0x00, -0x6e,0x00,0x00,0x00,0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x74,0x00,0x00,0x00,0x4e,0x00,0x00,0x00,0x73,0x00,0x00,0x00, -0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00,0x78,0x00,0x00,0x00, -0x12,0x00,0x00,0x00,0x77,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x79,0x00,0x00,0x00,0x78,0x00,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x7a,0x00,0x00,0x00, -0x47,0x00,0x00,0x00,0x79,0x00,0x00,0x00,0x41,0x00,0x05,0x00, -0x15,0x00,0x00,0x00,0x7d,0x00,0x00,0x00,0x12,0x00,0x00,0x00, -0x7c,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x7e,0x00,0x00,0x00,0x7d,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x80,0x00,0x00,0x00,0x47,0x00,0x00,0x00, -0x39,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x83,0x00,0x00,0x00,0x80,0x00,0x00,0x00,0x79,0x00,0x00,0x00, -0x0c,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0x84,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x26,0x00,0x00,0x00,0x7e,0x00,0x00,0x00, -0x83,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00, -0x88,0x00,0x00,0x00,0x12,0x00,0x00,0x00,0x87,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x89,0x00,0x00,0x00, -0x88,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x8a,0x00,0x00,0x00,0x32,0x00,0x00,0x00,0x89,0x00,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x8c,0x00,0x00,0x00, -0x42,0x00,0x00,0x00,0x37,0x00,0x00,0x00,0x41,0x00,0x05,0x00, -0x15,0x00,0x00,0x00,0x8e,0x00,0x00,0x00,0x12,0x00,0x00,0x00, -0x8d,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x8f,0x00,0x00,0x00,0x8e,0x00,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x90,0x00,0x00,0x00,0x8c,0x00,0x00,0x00, -0x8f,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x91,0x00,0x00,0x00,0x8a,0x00,0x00,0x00,0x90,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x93,0x00,0x00,0x00, -0x91,0x00,0x00,0x00,0x7a,0x00,0x00,0x00,0x86,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x94,0x00,0x00,0x00,0x93,0x00,0x00,0x00, -0x6d,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00, -0x99,0x00,0x00,0x00,0x12,0x00,0x00,0x00,0x98,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x9a,0x00,0x00,0x00, -0x99,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x9b,0x00,0x00,0x00,0x0f,0x00,0x00,0x00,0x9a,0x00,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x9e,0x00,0x00,0x00, -0x4a,0x00,0x00,0x00,0x9d,0x00,0x00,0x00,0x41,0x00,0x05,0x00, -0x15,0x00,0x00,0x00,0xa0,0x00,0x00,0x00,0x12,0x00,0x00,0x00, -0x9f,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0xa1,0x00,0x00,0x00,0xa0,0x00,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xa2,0x00,0x00,0x00,0x9e,0x00,0x00,0x00, -0xa1,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xa3,0x00,0x00,0x00,0x9b,0x00,0x00,0x00,0xa2,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xa5,0x00,0x00,0x00, -0xa3,0x00,0x00,0x00,0x7a,0x00,0x00,0x00,0x86,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xa6,0x00,0x00,0x00,0xa5,0x00,0x00,0x00, -0x6d,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0xa8,0x00,0x00,0x00, -0xf8,0x00,0x02,0x00,0xa8,0x00,0x00,0x00,0xf5,0x00,0x07,0x00, -0x06,0x00,0x00,0x00,0xc1,0x02,0x00,0x00,0x3e,0x00,0x00,0x00, -0x05,0x00,0x00,0x00,0xc7,0x00,0x00,0x00,0xa9,0x00,0x00,0x00, -0xb0,0x00,0x05,0x00,0xb8,0x00,0x00,0x00,0xb9,0x00,0x00,0x00, -0xc1,0x02,0x00,0x00,0xb7,0x00,0x00,0x00,0xf6,0x00,0x04,0x00, -0xaa,0x00,0x00,0x00,0xa9,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0xfa,0x00,0x04,0x00,0xb9,0x00,0x00,0x00,0xa9,0x00,0x00,0x00, -0xaa,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0xa9,0x00,0x00,0x00, -0x41,0x00,0x05,0x00,0xc3,0x00,0x00,0x00,0xc4,0x00,0x00,0x00, -0xc0,0x00,0x00,0x00,0xc1,0x02,0x00,0x00,0x3e,0x00,0x03,0x00, -0xc4,0x00,0x00,0x00,0xc2,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xc7,0x00,0x00,0x00,0xc1,0x02,0x00,0x00, -0xc6,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0xa8,0x00,0x00,0x00, -0xf8,0x00,0x02,0x00,0xaa,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, -0xca,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0xca,0x00,0x00,0x00, -0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0xda,0x02,0x00,0x00, -0xa6,0x00,0x00,0x00,0xaa,0x00,0x00,0x00,0x90,0x01,0x00,0x00, -0xcd,0x00,0x00,0x00,0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00, -0xd6,0x02,0x00,0x00,0x94,0x00,0x00,0x00,0xaa,0x00,0x00,0x00, -0x8d,0x01,0x00,0x00,0xcd,0x00,0x00,0x00,0xf5,0x00,0x07,0x00, -0x06,0x00,0x00,0x00,0xc2,0x02,0x00,0x00,0x7a,0x00,0x00,0x00, -0xaa,0x00,0x00,0x00,0x3b,0x02,0x00,0x00,0xcd,0x00,0x00,0x00, -0xb0,0x00,0x05,0x00,0xb8,0x00,0x00,0x00,0xd1,0x00,0x00,0x00, -0xc2,0x02,0x00,0x00,0x84,0x00,0x00,0x00,0xf6,0x00,0x04,0x00, -0xcc,0x00,0x00,0x00,0xcd,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0xfa,0x00,0x04,0x00,0xd1,0x00,0x00,0x00,0xcb,0x00,0x00,0x00, -0xcc,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0xcb,0x00,0x00,0x00, -0xf9,0x00,0x02,0x00,0xd3,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, -0xd3,0x00,0x00,0x00,0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00, -0xd2,0x02,0x00,0x00,0x3e,0x00,0x00,0x00,0xcb,0x00,0x00,0x00, -0x32,0x01,0x00,0x00,0xd4,0x00,0x00,0x00,0xb0,0x00,0x05,0x00, -0xb8,0x00,0x00,0x00,0xd9,0x00,0x00,0x00,0xd2,0x02,0x00,0x00, -0x37,0x00,0x00,0x00,0xf6,0x00,0x04,0x00,0xd5,0x00,0x00,0x00, -0xd4,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, -0xd9,0x00,0x00,0x00,0xd4,0x00,0x00,0x00,0xd5,0x00,0x00,0x00, -0xf8,0x00,0x02,0x00,0xd4,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xde,0x00,0x00,0x00,0x74,0x00,0x00,0x00, -0xd2,0x02,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xe1,0x00,0x00,0x00,0xde,0x00,0x00,0x00,0x8f,0x00,0x00,0x00, -0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xe2,0x00,0x00,0x00, -0xe1,0x00,0x00,0x00,0x6d,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xe3,0x00,0x00,0x00,0xd6,0x02,0x00,0x00, -0xe2,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xe5,0x00,0x00,0x00,0xe3,0x00,0x00,0x00,0x6f,0x00,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xef,0x00,0x00,0x00, -0xde,0x00,0x00,0x00,0xee,0x00,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xf1,0x00,0x00,0x00,0x6f,0x00,0x00,0x00, -0x6d,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xf2,0x00,0x00,0x00,0xef,0x00,0x00,0x00,0xf1,0x00,0x00,0x00, -0x41,0x00,0x07,0x00,0xfb,0x00,0x00,0x00,0xfc,0x00,0x00,0x00, -0xf9,0x00,0x00,0x00,0x34,0x00,0x00,0x00,0xe5,0x00,0x00,0x00, -0x3e,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0xf4,0x00,0x00,0x00, -0xfd,0x00,0x00,0x00,0xfc,0x00,0x00,0x00,0x73,0x00,0x04,0x00, -0xba,0x00,0x00,0x00,0xfe,0x00,0x00,0x00,0xfd,0x00,0x00,0x00, -0x41,0x00,0x05,0x00,0xff,0x00,0x00,0x00,0x00,0x01,0x00,0x00, -0xea,0x00,0x00,0x00,0xf2,0x00,0x00,0x00,0x3e,0x00,0x03,0x00, -0x00,0x01,0x00,0x00,0xfe,0x00,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x05,0x01,0x00,0x00,0xde,0x00,0x00,0x00, -0x04,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x08,0x01,0x00,0x00,0x05,0x01,0x00,0x00,0xf1,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x09,0x01,0x00,0x00, -0x08,0x01,0x00,0x00,0x39,0x00,0x00,0x00,0x41,0x00,0x07,0x00, -0xfb,0x00,0x00,0x00,0x0b,0x01,0x00,0x00,0xf9,0x00,0x00,0x00, -0x34,0x00,0x00,0x00,0xe5,0x00,0x00,0x00,0x39,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0xf4,0x00,0x00,0x00,0x0c,0x01,0x00,0x00, -0x0b,0x01,0x00,0x00,0x73,0x00,0x04,0x00,0xba,0x00,0x00,0x00, -0x0d,0x01,0x00,0x00,0x0c,0x01,0x00,0x00,0x41,0x00,0x05,0x00, -0xff,0x00,0x00,0x00,0x0e,0x01,0x00,0x00,0xea,0x00,0x00,0x00, -0x09,0x01,0x00,0x00,0x3e,0x00,0x03,0x00,0x0e,0x01,0x00,0x00, -0x0d,0x01,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x13,0x01,0x00,0x00,0xde,0x00,0x00,0x00,0x12,0x01,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x16,0x01,0x00,0x00, -0x13,0x01,0x00,0x00,0xf1,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x17,0x01,0x00,0x00,0x16,0x01,0x00,0x00, -0x0c,0x00,0x00,0x00,0x41,0x00,0x07,0x00,0xfb,0x00,0x00,0x00, -0x19,0x01,0x00,0x00,0xf9,0x00,0x00,0x00,0x34,0x00,0x00,0x00, -0xe5,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0xf4,0x00,0x00,0x00,0x1a,0x01,0x00,0x00,0x19,0x01,0x00,0x00, -0x73,0x00,0x04,0x00,0xba,0x00,0x00,0x00,0x1b,0x01,0x00,0x00, -0x1a,0x01,0x00,0x00,0x41,0x00,0x05,0x00,0xff,0x00,0x00,0x00, -0x1c,0x01,0x00,0x00,0xea,0x00,0x00,0x00,0x17,0x01,0x00,0x00, -0x3e,0x00,0x03,0x00,0x1c,0x01,0x00,0x00,0x1b,0x01,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x21,0x01,0x00,0x00, -0xde,0x00,0x00,0x00,0x20,0x01,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x24,0x01,0x00,0x00,0x21,0x01,0x00,0x00, -0xf1,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x26,0x01,0x00,0x00,0x24,0x01,0x00,0x00,0x25,0x01,0x00,0x00, -0x41,0x00,0x07,0x00,0xfb,0x00,0x00,0x00,0x28,0x01,0x00,0x00, -0xf9,0x00,0x00,0x00,0x34,0x00,0x00,0x00,0xe5,0x00,0x00,0x00, -0x25,0x01,0x00,0x00,0x3d,0x00,0x04,0x00,0xf4,0x00,0x00,0x00, -0x29,0x01,0x00,0x00,0x28,0x01,0x00,0x00,0x73,0x00,0x04,0x00, -0xba,0x00,0x00,0x00,0x2a,0x01,0x00,0x00,0x29,0x01,0x00,0x00, -0x41,0x00,0x05,0x00,0xff,0x00,0x00,0x00,0x2b,0x01,0x00,0x00, -0xea,0x00,0x00,0x00,0x26,0x01,0x00,0x00,0x3e,0x00,0x03,0x00, -0x2b,0x01,0x00,0x00,0x2a,0x01,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x32,0x01,0x00,0x00,0xd2,0x02,0x00,0x00, -0x30,0x01,0x00,0x00,0xf9,0x00,0x02,0x00,0xd3,0x00,0x00,0x00, -0xf8,0x00,0x02,0x00,0xd5,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, -0x34,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0x34,0x01,0x00,0x00, -0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0xd3,0x02,0x00,0x00, -0x3e,0x00,0x00,0x00,0xd5,0x00,0x00,0x00,0x89,0x01,0x00,0x00, -0x35,0x01,0x00,0x00,0xb0,0x00,0x05,0x00,0xb8,0x00,0x00,0x00, -0x3a,0x01,0x00,0x00,0xd3,0x02,0x00,0x00,0x9d,0x00,0x00,0x00, -0xf6,0x00,0x04,0x00,0x36,0x01,0x00,0x00,0x35,0x01,0x00,0x00, -0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0x3a,0x01,0x00,0x00, -0x35,0x01,0x00,0x00,0x36,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, -0x35,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x3f,0x01,0x00,0x00,0x74,0x00,0x00,0x00,0xd3,0x02,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x42,0x01,0x00,0x00, -0x3f,0x01,0x00,0x00,0xa1,0x00,0x00,0x00,0x86,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x43,0x01,0x00,0x00,0x42,0x01,0x00,0x00, -0x6d,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x44,0x01,0x00,0x00,0xda,0x02,0x00,0x00,0x43,0x01,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x46,0x01,0x00,0x00, -0x44,0x01,0x00,0x00,0x6f,0x00,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x50,0x01,0x00,0x00,0x3f,0x01,0x00,0x00, -0x4f,0x01,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x52,0x01,0x00,0x00,0x6f,0x00,0x00,0x00,0x6d,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x53,0x01,0x00,0x00, -0x50,0x01,0x00,0x00,0x52,0x01,0x00,0x00,0x41,0x00,0x07,0x00, -0xfb,0x00,0x00,0x00,0x5a,0x01,0x00,0x00,0x58,0x01,0x00,0x00, -0x34,0x00,0x00,0x00,0x46,0x01,0x00,0x00,0x3e,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0xf4,0x00,0x00,0x00,0x5b,0x01,0x00,0x00, -0x5a,0x01,0x00,0x00,0x73,0x00,0x04,0x00,0xba,0x00,0x00,0x00, -0x5c,0x01,0x00,0x00,0x5b,0x01,0x00,0x00,0x41,0x00,0x05,0x00, -0xff,0x00,0x00,0x00,0x5d,0x01,0x00,0x00,0x4b,0x01,0x00,0x00, -0x53,0x01,0x00,0x00,0x3e,0x00,0x03,0x00,0x5d,0x01,0x00,0x00, -0x5c,0x01,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x62,0x01,0x00,0x00,0x3f,0x01,0x00,0x00,0x61,0x01,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x65,0x01,0x00,0x00, -0x62,0x01,0x00,0x00,0x52,0x01,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x66,0x01,0x00,0x00,0x65,0x01,0x00,0x00, -0x39,0x00,0x00,0x00,0x41,0x00,0x07,0x00,0xfb,0x00,0x00,0x00, -0x68,0x01,0x00,0x00,0x58,0x01,0x00,0x00,0x34,0x00,0x00,0x00, -0x46,0x01,0x00,0x00,0x39,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0xf4,0x00,0x00,0x00,0x69,0x01,0x00,0x00,0x68,0x01,0x00,0x00, -0x73,0x00,0x04,0x00,0xba,0x00,0x00,0x00,0x6a,0x01,0x00,0x00, -0x69,0x01,0x00,0x00,0x41,0x00,0x05,0x00,0xff,0x00,0x00,0x00, -0x6b,0x01,0x00,0x00,0x4b,0x01,0x00,0x00,0x66,0x01,0x00,0x00, -0x3e,0x00,0x03,0x00,0x6b,0x01,0x00,0x00,0x6a,0x01,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x70,0x01,0x00,0x00, -0x3f,0x01,0x00,0x00,0x6f,0x01,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x73,0x01,0x00,0x00,0x70,0x01,0x00,0x00, -0x52,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x74,0x01,0x00,0x00,0x73,0x01,0x00,0x00,0x0c,0x00,0x00,0x00, -0x41,0x00,0x07,0x00,0xfb,0x00,0x00,0x00,0x76,0x01,0x00,0x00, -0x58,0x01,0x00,0x00,0x34,0x00,0x00,0x00,0x46,0x01,0x00,0x00, -0x0c,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0xf4,0x00,0x00,0x00, -0x77,0x01,0x00,0x00,0x76,0x01,0x00,0x00,0x73,0x00,0x04,0x00, -0xba,0x00,0x00,0x00,0x78,0x01,0x00,0x00,0x77,0x01,0x00,0x00, -0x41,0x00,0x05,0x00,0xff,0x00,0x00,0x00,0x79,0x01,0x00,0x00, -0x4b,0x01,0x00,0x00,0x74,0x01,0x00,0x00,0x3e,0x00,0x03,0x00, -0x79,0x01,0x00,0x00,0x78,0x01,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x7e,0x01,0x00,0x00,0x3f,0x01,0x00,0x00, -0x7d,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x81,0x01,0x00,0x00,0x7e,0x01,0x00,0x00,0x52,0x01,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x82,0x01,0x00,0x00, -0x81,0x01,0x00,0x00,0x25,0x01,0x00,0x00,0x41,0x00,0x07,0x00, -0xfb,0x00,0x00,0x00,0x84,0x01,0x00,0x00,0x58,0x01,0x00,0x00, -0x34,0x00,0x00,0x00,0x46,0x01,0x00,0x00,0x25,0x01,0x00,0x00, -0x3d,0x00,0x04,0x00,0xf4,0x00,0x00,0x00,0x85,0x01,0x00,0x00, -0x84,0x01,0x00,0x00,0x73,0x00,0x04,0x00,0xba,0x00,0x00,0x00, -0x86,0x01,0x00,0x00,0x85,0x01,0x00,0x00,0x41,0x00,0x05,0x00, -0xff,0x00,0x00,0x00,0x87,0x01,0x00,0x00,0x4b,0x01,0x00,0x00, -0x82,0x01,0x00,0x00,0x3e,0x00,0x03,0x00,0x87,0x01,0x00,0x00, -0x86,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x89,0x01,0x00,0x00,0xd3,0x02,0x00,0x00,0x30,0x01,0x00,0x00, -0xf9,0x00,0x02,0x00,0x34,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, -0x36,0x01,0x00,0x00,0xe0,0x00,0x04,0x00,0x0c,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x8a,0x01,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x8d,0x01,0x00,0x00,0xd6,0x02,0x00,0x00, -0x8b,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x90,0x01,0x00,0x00,0xda,0x02,0x00,0x00,0x8e,0x01,0x00,0x00, -0xf9,0x00,0x02,0x00,0x92,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, -0x92,0x01,0x00,0x00,0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00, -0xdc,0x02,0x00,0x00,0x3e,0x00,0x00,0x00,0x36,0x01,0x00,0x00, -0x39,0x02,0x00,0x00,0x95,0x01,0x00,0x00,0xb0,0x00,0x05,0x00, -0xb8,0x00,0x00,0x00,0x98,0x01,0x00,0x00,0xdc,0x02,0x00,0x00, -0x6c,0x00,0x00,0x00,0xf6,0x00,0x04,0x00,0x94,0x01,0x00,0x00, -0x95,0x01,0x00,0x00,0x00,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, -0x98,0x01,0x00,0x00,0x93,0x01,0x00,0x00,0x94,0x01,0x00,0x00, -0xf8,0x00,0x02,0x00,0x93,0x01,0x00,0x00,0xf9,0x00,0x02,0x00, -0x9a,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0x9a,0x01,0x00,0x00, -0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0xe0,0x02,0x00,0x00, -0x3e,0x00,0x00,0x00,0x93,0x01,0x00,0x00,0xc5,0x01,0x00,0x00, -0x9d,0x01,0x00,0x00,0xb0,0x00,0x05,0x00,0xb8,0x00,0x00,0x00, -0xa0,0x01,0x00,0x00,0xe0,0x02,0x00,0x00,0x60,0x00,0x00,0x00, -0xf6,0x00,0x04,0x00,0x9c,0x01,0x00,0x00,0x9d,0x01,0x00,0x00, -0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0xa0,0x01,0x00,0x00, -0x9b,0x01,0x00,0x00,0x9c,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, -0x9b,0x01,0x00,0x00,0xf9,0x00,0x02,0x00,0xa2,0x01,0x00,0x00, -0xf8,0x00,0x02,0x00,0xa2,0x01,0x00,0x00,0xf5,0x00,0x07,0x00, -0x06,0x00,0x00,0x00,0xf2,0x02,0x00,0x00,0x3e,0x00,0x00,0x00, -0x9b,0x01,0x00,0x00,0xc3,0x01,0x00,0x00,0xa3,0x01,0x00,0x00, -0xb0,0x00,0x05,0x00,0xb8,0x00,0x00,0x00,0xa8,0x01,0x00,0x00, -0xf2,0x02,0x00,0x00,0x62,0x00,0x00,0x00,0xf6,0x00,0x04,0x00, -0xa4,0x01,0x00,0x00,0xa3,0x01,0x00,0x00,0x01,0x00,0x00,0x00, -0xfa,0x00,0x04,0x00,0xa8,0x01,0x00,0x00,0xa3,0x01,0x00,0x00, -0xa4,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0xa3,0x01,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xae,0x01,0x00,0x00, -0xe0,0x02,0x00,0x00,0x62,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xb0,0x01,0x00,0x00,0xae,0x01,0x00,0x00, -0xf2,0x02,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xb2,0x01,0x00,0x00,0x55,0x00,0x00,0x00,0x53,0x00,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xb4,0x01,0x00,0x00, -0xe0,0x02,0x00,0x00,0x61,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xb5,0x01,0x00,0x00,0xb2,0x01,0x00,0x00, -0xb4,0x01,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xb7,0x01,0x00,0x00,0x64,0x00,0x00,0x00,0x62,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xb8,0x01,0x00,0x00, -0xb5,0x01,0x00,0x00,0xb7,0x01,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xba,0x01,0x00,0x00,0xb8,0x01,0x00,0x00, -0xf2,0x02,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xbc,0x01,0x00,0x00,0xba,0x01,0x00,0x00,0xbb,0x01,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xbe,0x01,0x00,0x00, -0xbc,0x01,0x00,0x00,0xdc,0x02,0x00,0x00,0x41,0x00,0x05,0x00, -0xff,0x00,0x00,0x00,0xbf,0x01,0x00,0x00,0xea,0x00,0x00,0x00, -0xbe,0x01,0x00,0x00,0x3d,0x00,0x04,0x00,0xba,0x00,0x00,0x00, -0xc0,0x01,0x00,0x00,0xbf,0x01,0x00,0x00,0x41,0x00,0x05,0x00, -0xc3,0x00,0x00,0x00,0xc1,0x01,0x00,0x00,0xac,0x01,0x00,0x00, -0xb0,0x01,0x00,0x00,0x3e,0x00,0x03,0x00,0xc1,0x01,0x00,0x00, -0xc0,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xc3,0x01,0x00,0x00,0xf2,0x02,0x00,0x00,0xc6,0x00,0x00,0x00, -0xf9,0x00,0x02,0x00,0xa2,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, -0xa4,0x01,0x00,0x00,0xf9,0x00,0x02,0x00,0x9d,0x01,0x00,0x00, -0xf8,0x00,0x02,0x00,0x9d,0x01,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xc5,0x01,0x00,0x00,0xe0,0x02,0x00,0x00, -0xc6,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0x9a,0x01,0x00,0x00, -0xf8,0x00,0x02,0x00,0x9c,0x01,0x00,0x00,0xf9,0x00,0x02,0x00, -0xc7,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0xc7,0x01,0x00,0x00, -0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0xe1,0x02,0x00,0x00, -0x3e,0x00,0x00,0x00,0x9c,0x01,0x00,0x00,0xf3,0x01,0x00,0x00, -0xca,0x01,0x00,0x00,0xb0,0x00,0x05,0x00,0xb8,0x00,0x00,0x00, -0xcd,0x01,0x00,0x00,0xe1,0x02,0x00,0x00,0xb5,0x00,0x00,0x00, -0xf6,0x00,0x04,0x00,0xc9,0x01,0x00,0x00,0xca,0x01,0x00,0x00, -0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0xcd,0x01,0x00,0x00, -0xc8,0x01,0x00,0x00,0xc9,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, -0xc8,0x01,0x00,0x00,0xf9,0x00,0x02,0x00,0xcf,0x01,0x00,0x00, -0xf8,0x00,0x02,0x00,0xcf,0x01,0x00,0x00,0xf5,0x00,0x07,0x00, -0x06,0x00,0x00,0x00,0xef,0x02,0x00,0x00,0x3e,0x00,0x00,0x00, -0xc8,0x01,0x00,0x00,0xf1,0x01,0x00,0x00,0xd0,0x01,0x00,0x00, -0xb0,0x00,0x05,0x00,0xb8,0x00,0x00,0x00,0xd5,0x01,0x00,0x00, -0xef,0x02,0x00,0x00,0xb2,0x00,0x00,0x00,0xf6,0x00,0x04,0x00, -0xd1,0x01,0x00,0x00,0xd0,0x01,0x00,0x00,0x01,0x00,0x00,0x00, -0xfa,0x00,0x04,0x00,0xd5,0x01,0x00,0x00,0xd0,0x01,0x00,0x00, -0xd1,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0xd0,0x01,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xdb,0x01,0x00,0x00, -0xe1,0x02,0x00,0x00,0xb2,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xdd,0x01,0x00,0x00,0xdb,0x01,0x00,0x00, -0xef,0x02,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xdf,0x01,0x00,0x00,0x59,0x00,0x00,0x00,0xaf,0x00,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xe2,0x01,0x00,0x00, -0xe1,0x02,0x00,0x00,0xe1,0x01,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xe3,0x01,0x00,0x00,0xdf,0x01,0x00,0x00, -0xe2,0x01,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xe5,0x01,0x00,0x00,0x68,0x00,0x00,0x00,0xb2,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xe6,0x01,0x00,0x00, -0xe3,0x01,0x00,0x00,0xe5,0x01,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xe8,0x01,0x00,0x00,0xe6,0x01,0x00,0x00, -0xef,0x02,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xea,0x01,0x00,0x00,0xe8,0x01,0x00,0x00,0xe9,0x01,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xec,0x01,0x00,0x00, -0xea,0x01,0x00,0x00,0xdc,0x02,0x00,0x00,0x41,0x00,0x05,0x00, -0xff,0x00,0x00,0x00,0xed,0x01,0x00,0x00,0x4b,0x01,0x00,0x00, -0xec,0x01,0x00,0x00,0x3d,0x00,0x04,0x00,0xba,0x00,0x00,0x00, -0xee,0x01,0x00,0x00,0xed,0x01,0x00,0x00,0x41,0x00,0x05,0x00, -0xc3,0x00,0x00,0x00,0xef,0x01,0x00,0x00,0xd9,0x01,0x00,0x00, -0xdd,0x01,0x00,0x00,0x3e,0x00,0x03,0x00,0xef,0x01,0x00,0x00, -0xee,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xf1,0x01,0x00,0x00,0xef,0x02,0x00,0x00,0xc6,0x00,0x00,0x00, -0xf9,0x00,0x02,0x00,0xcf,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, -0xd1,0x01,0x00,0x00,0xf9,0x00,0x02,0x00,0xca,0x01,0x00,0x00, -0xf8,0x00,0x02,0x00,0xca,0x01,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xf3,0x01,0x00,0x00,0xe1,0x02,0x00,0x00, -0xc6,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0xc7,0x01,0x00,0x00, -0xf8,0x00,0x02,0x00,0xc9,0x01,0x00,0x00,0xf9,0x00,0x02,0x00, -0xf5,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0xf5,0x01,0x00,0x00, -0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0xe2,0x02,0x00,0x00, -0x3e,0x00,0x00,0x00,0xc9,0x01,0x00,0x00,0x37,0x02,0x00,0x00, -0xf8,0x01,0x00,0x00,0xb0,0x00,0x05,0x00,0xb8,0x00,0x00,0x00, -0xfb,0x01,0x00,0x00,0xe2,0x02,0x00,0x00,0xb5,0x00,0x00,0x00, -0xf6,0x00,0x04,0x00,0xf7,0x01,0x00,0x00,0xf8,0x01,0x00,0x00, -0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0xfb,0x01,0x00,0x00, -0xf6,0x01,0x00,0x00,0xf7,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, -0xf6,0x01,0x00,0x00,0xf9,0x00,0x02,0x00,0xfd,0x01,0x00,0x00, -0xf8,0x00,0x02,0x00,0xfd,0x01,0x00,0x00,0xf5,0x00,0x07,0x00, -0x06,0x00,0x00,0x00,0xe6,0x02,0x00,0x00,0x3e,0x00,0x00,0x00, -0xf6,0x01,0x00,0x00,0x35,0x02,0x00,0x00,0x00,0x02,0x00,0x00, -0xb0,0x00,0x05,0x00,0xb8,0x00,0x00,0x00,0x03,0x02,0x00,0x00, -0xe6,0x02,0x00,0x00,0x60,0x00,0x00,0x00,0xf6,0x00,0x04,0x00, -0xff,0x01,0x00,0x00,0x00,0x02,0x00,0x00,0x01,0x00,0x00,0x00, -0xfa,0x00,0x04,0x00,0x03,0x02,0x00,0x00,0xfe,0x01,0x00,0x00, -0xff,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0xfe,0x01,0x00,0x00, -0xf9,0x00,0x02,0x00,0x05,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, -0x05,0x02,0x00,0x00,0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00, -0xe8,0x02,0x00,0x00,0x3e,0x00,0x00,0x00,0xfe,0x01,0x00,0x00, -0x33,0x02,0x00,0x00,0x08,0x02,0x00,0x00,0xb0,0x00,0x05,0x00, -0xb8,0x00,0x00,0x00,0x0b,0x02,0x00,0x00,0xe8,0x02,0x00,0x00, -0xb2,0x00,0x00,0x00,0xf6,0x00,0x04,0x00,0x07,0x02,0x00,0x00, -0x08,0x02,0x00,0x00,0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, -0x0b,0x02,0x00,0x00,0x06,0x02,0x00,0x00,0x07,0x02,0x00,0x00, -0xf8,0x00,0x02,0x00,0x06,0x02,0x00,0x00,0xf9,0x00,0x02,0x00, -0x0d,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x0d,0x02,0x00,0x00, -0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0xea,0x02,0x00,0x00, -0x3e,0x00,0x00,0x00,0x06,0x02,0x00,0x00,0x31,0x02,0x00,0x00, -0x0e,0x02,0x00,0x00,0xb0,0x00,0x05,0x00,0xb8,0x00,0x00,0x00, -0x13,0x02,0x00,0x00,0xea,0x02,0x00,0x00,0x62,0x00,0x00,0x00, -0xf6,0x00,0x04,0x00,0x0f,0x02,0x00,0x00,0x0e,0x02,0x00,0x00, -0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0x13,0x02,0x00,0x00, -0x0e,0x02,0x00,0x00,0x0f,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, -0x0e,0x02,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x15,0x02,0x00,0x00,0xe2,0x02,0x00,0x00,0xb2,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x17,0x02,0x00,0x00, -0x15,0x02,0x00,0x00,0xe8,0x02,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x19,0x02,0x00,0x00,0x17,0x02,0x00,0x00, -0x18,0x02,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x1b,0x02,0x00,0x00,0xe6,0x02,0x00,0x00,0x62,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x1c,0x02,0x00,0x00, -0x19,0x02,0x00,0x00,0x1b,0x02,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x1e,0x02,0x00,0x00,0x1c,0x02,0x00,0x00, -0xea,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x22,0x02,0x00,0x00,0x1b,0x02,0x00,0x00,0xea,0x02,0x00,0x00, -0x41,0x00,0x05,0x00,0xc3,0x00,0x00,0x00,0x23,0x02,0x00,0x00, -0xac,0x01,0x00,0x00,0x22,0x02,0x00,0x00,0x3d,0x00,0x04,0x00, -0xba,0x00,0x00,0x00,0x24,0x02,0x00,0x00,0x23,0x02,0x00,0x00, -0x41,0x00,0x05,0x00,0xc3,0x00,0x00,0x00,0x29,0x02,0x00,0x00, -0xd9,0x01,0x00,0x00,0x17,0x02,0x00,0x00,0x3d,0x00,0x04,0x00, -0xba,0x00,0x00,0x00,0x2a,0x02,0x00,0x00,0x29,0x02,0x00,0x00, -0x41,0x00,0x05,0x00,0xc3,0x00,0x00,0x00,0x2c,0x02,0x00,0x00, -0xc0,0x00,0x00,0x00,0x1e,0x02,0x00,0x00,0x3d,0x00,0x04,0x00, -0xba,0x00,0x00,0x00,0x2d,0x02,0x00,0x00,0x2c,0x02,0x00,0x00, -0x0c,0x00,0x08,0x00,0xba,0x00,0x00,0x00,0x2e,0x02,0x00,0x00, -0x01,0x00,0x00,0x00,0x32,0x00,0x00,0x00,0x24,0x02,0x00,0x00, -0x2a,0x02,0x00,0x00,0x2d,0x02,0x00,0x00,0x3e,0x00,0x03,0x00, -0x2c,0x02,0x00,0x00,0x2e,0x02,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x31,0x02,0x00,0x00,0xea,0x02,0x00,0x00, -0xc6,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0x0d,0x02,0x00,0x00, -0xf8,0x00,0x02,0x00,0x0f,0x02,0x00,0x00,0xf9,0x00,0x02,0x00, -0x08,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x08,0x02,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x33,0x02,0x00,0x00, -0xe8,0x02,0x00,0x00,0xc6,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, -0x05,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x07,0x02,0x00,0x00, -0xf9,0x00,0x02,0x00,0x00,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, -0x00,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x35,0x02,0x00,0x00,0xe6,0x02,0x00,0x00,0xc6,0x00,0x00,0x00, -0xf9,0x00,0x02,0x00,0xfd,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, -0xff,0x01,0x00,0x00,0xf9,0x00,0x02,0x00,0xf8,0x01,0x00,0x00, -0xf8,0x00,0x02,0x00,0xf8,0x01,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x37,0x02,0x00,0x00,0xe2,0x02,0x00,0x00, -0xc6,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0xf5,0x01,0x00,0x00, -0xf8,0x00,0x02,0x00,0xf7,0x01,0x00,0x00,0xf9,0x00,0x02,0x00, -0x95,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0x95,0x01,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x39,0x02,0x00,0x00, -0xdc,0x02,0x00,0x00,0xc6,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, -0x92,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0x94,0x01,0x00,0x00, -0xe0,0x00,0x04,0x00,0x0c,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0x8a,0x01,0x00,0x00,0xf9,0x00,0x02,0x00,0xcd,0x00,0x00,0x00, -0xf8,0x00,0x02,0x00,0xcd,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x3b,0x02,0x00,0x00,0xc2,0x02,0x00,0x00, -0x6c,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0xca,0x00,0x00,0x00, -0xf8,0x00,0x02,0x00,0xcc,0x00,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x40,0x02,0x00,0x00,0x55,0x00,0x00,0x00, -0x53,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x41,0x02,0x00,0x00,0x8c,0x00,0x00,0x00,0x40,0x02,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x46,0x02,0x00,0x00, -0x59,0x00,0x00,0x00,0xaf,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x47,0x02,0x00,0x00,0x9e,0x00,0x00,0x00, -0x46,0x02,0x00,0x00,0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00, -0x4c,0x02,0x00,0x00,0x12,0x00,0x00,0x00,0x4b,0x02,0x00,0x00, -0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x4d,0x02,0x00,0x00, -0x4c,0x02,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x4e,0x02,0x00,0x00,0x0f,0x00,0x00,0x00,0x4d,0x02,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x52,0x02,0x00,0x00, -0x47,0x00,0x00,0x00,0x4d,0x02,0x00,0x00,0x41,0x00,0x05,0x00, -0x0d,0x00,0x00,0x00,0x54,0x02,0x00,0x00,0x53,0x02,0x00,0x00, -0x0c,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x55,0x02,0x00,0x00,0x54,0x02,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x56,0x02,0x00,0x00,0x52,0x02,0x00,0x00, -0x55,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x57,0x02,0x00,0x00,0x4e,0x02,0x00,0x00,0x56,0x02,0x00,0x00, -0xf9,0x00,0x02,0x00,0x59,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, -0x59,0x02,0x00,0x00,0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00, -0xc3,0x02,0x00,0x00,0x3e,0x00,0x00,0x00,0xcc,0x00,0x00,0x00, -0xc0,0x02,0x00,0x00,0x5c,0x02,0x00,0x00,0xb0,0x00,0x05,0x00, -0xb8,0x00,0x00,0x00,0x5f,0x02,0x00,0x00,0xc3,0x02,0x00,0x00, -0xb5,0x00,0x00,0x00,0xf6,0x00,0x04,0x00,0x5b,0x02,0x00,0x00, -0x5c,0x02,0x00,0x00,0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, -0x5f,0x02,0x00,0x00,0x5a,0x02,0x00,0x00,0x5b,0x02,0x00,0x00, -0xf8,0x00,0x02,0x00,0x5a,0x02,0x00,0x00,0xf9,0x00,0x02,0x00, -0x61,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x61,0x02,0x00,0x00, -0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0xc4,0x02,0x00,0x00, -0x3e,0x00,0x00,0x00,0x5a,0x02,0x00,0x00,0xbe,0x02,0x00,0x00, -0x64,0x02,0x00,0x00,0xb0,0x00,0x05,0x00,0xb8,0x00,0x00,0x00, -0x67,0x02,0x00,0x00,0xc4,0x02,0x00,0x00,0x60,0x00,0x00,0x00, -0xf6,0x00,0x04,0x00,0x63,0x02,0x00,0x00,0x64,0x02,0x00,0x00, -0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0x67,0x02,0x00,0x00, -0x62,0x02,0x00,0x00,0x63,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, -0x62,0x02,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x6b,0x02,0x00,0x00,0xc4,0x02,0x00,0x00,0x61,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x6c,0x02,0x00,0x00, -0x41,0x02,0x00,0x00,0x6b,0x02,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x6e,0x02,0x00,0x00,0x64,0x00,0x00,0x00, -0x62,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x6f,0x02,0x00,0x00,0x6c,0x02,0x00,0x00,0x6e,0x02,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x73,0x02,0x00,0x00, -0xc3,0x02,0x00,0x00,0xe1,0x01,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x74,0x02,0x00,0x00,0x47,0x02,0x00,0x00, -0x73,0x02,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x76,0x02,0x00,0x00,0x68,0x00,0x00,0x00,0xb2,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x77,0x02,0x00,0x00, -0x74,0x02,0x00,0x00,0x76,0x02,0x00,0x00,0xf9,0x00,0x02,0x00, -0x79,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x79,0x02,0x00,0x00, -0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0xc6,0x02,0x00,0x00, -0x3e,0x00,0x00,0x00,0x62,0x02,0x00,0x00,0xbc,0x02,0x00,0x00, -0x7c,0x02,0x00,0x00,0xb0,0x00,0x05,0x00,0xb8,0x00,0x00,0x00, -0x7f,0x02,0x00,0x00,0xc6,0x02,0x00,0x00,0xb2,0x00,0x00,0x00, -0xf6,0x00,0x04,0x00,0x7b,0x02,0x00,0x00,0x7c,0x02,0x00,0x00, -0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0x7f,0x02,0x00,0x00, -0x7a,0x02,0x00,0x00,0x7b,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, -0x7a,0x02,0x00,0x00,0xf9,0x00,0x02,0x00,0x81,0x02,0x00,0x00, -0xf8,0x00,0x02,0x00,0x81,0x02,0x00,0x00,0xf5,0x00,0x07,0x00, -0x06,0x00,0x00,0x00,0xc8,0x02,0x00,0x00,0x3e,0x00,0x00,0x00, -0x7a,0x02,0x00,0x00,0xba,0x02,0x00,0x00,0x84,0x02,0x00,0x00, -0xb0,0x00,0x05,0x00,0xb8,0x00,0x00,0x00,0x87,0x02,0x00,0x00, -0xc8,0x02,0x00,0x00,0x62,0x00,0x00,0x00,0xf6,0x00,0x04,0x00, -0x83,0x02,0x00,0x00,0x84,0x02,0x00,0x00,0x01,0x00,0x00,0x00, -0xfa,0x00,0x04,0x00,0x87,0x02,0x00,0x00,0x82,0x02,0x00,0x00, -0x83,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x82,0x02,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x8a,0x02,0x00,0x00, -0x6f,0x02,0x00,0x00,0xc8,0x02,0x00,0x00,0xb0,0x00,0x05,0x00, -0xb8,0x00,0x00,0x00,0x8d,0x02,0x00,0x00,0x8a,0x02,0x00,0x00, -0x36,0x00,0x00,0x00,0xf7,0x00,0x03,0x00,0x8f,0x02,0x00,0x00, -0x00,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0x8d,0x02,0x00,0x00, -0x8e,0x02,0x00,0x00,0x8f,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, -0x8e,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x92,0x02,0x00,0x00,0x77,0x02,0x00,0x00,0xc6,0x02,0x00,0x00, -0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00,0x93,0x02,0x00,0x00, -0x12,0x00,0x00,0x00,0xc6,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x94,0x02,0x00,0x00,0x93,0x02,0x00,0x00, -0xb0,0x00,0x05,0x00,0xb8,0x00,0x00,0x00,0x95,0x02,0x00,0x00, -0x92,0x02,0x00,0x00,0x94,0x02,0x00,0x00,0xf9,0x00,0x02,0x00, -0x8f,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x8f,0x02,0x00,0x00, -0xf5,0x00,0x07,0x00,0xb8,0x00,0x00,0x00,0x96,0x02,0x00,0x00, -0x8d,0x02,0x00,0x00,0x82,0x02,0x00,0x00,0x95,0x02,0x00,0x00, -0x8e,0x02,0x00,0x00,0xf7,0x00,0x03,0x00,0x98,0x02,0x00,0x00, -0x00,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0x96,0x02,0x00,0x00, -0x97,0x02,0x00,0x00,0x98,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, -0x97,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xa0,0x02,0x00,0x00,0x77,0x02,0x00,0x00,0xc6,0x02,0x00,0x00, -0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00,0xa2,0x02,0x00,0x00, -0x12,0x00,0x00,0x00,0xa1,0x02,0x00,0x00,0x3d,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0xa3,0x02,0x00,0x00,0xa2,0x02,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xa4,0x02,0x00,0x00, -0xa0,0x02,0x00,0x00,0xa3,0x02,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xa5,0x02,0x00,0x00,0x57,0x02,0x00,0x00, -0xa4,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xa7,0x02,0x00,0x00,0xa5,0x02,0x00,0x00,0x6f,0x02,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xa9,0x02,0x00,0x00, -0xa7,0x02,0x00,0x00,0xc8,0x02,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xab,0x02,0x00,0x00,0xc3,0x02,0x00,0x00, -0xb2,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xad,0x02,0x00,0x00,0xab,0x02,0x00,0x00,0xc6,0x02,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xaf,0x02,0x00,0x00, -0xad,0x02,0x00,0x00,0xae,0x02,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xb1,0x02,0x00,0x00,0xc4,0x02,0x00,0x00, -0x62,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xb2,0x02,0x00,0x00,0xaf,0x02,0x00,0x00,0xb1,0x02,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xb4,0x02,0x00,0x00, -0xb2,0x02,0x00,0x00,0xc8,0x02,0x00,0x00,0x41,0x00,0x05,0x00, -0xc3,0x00,0x00,0x00,0xb5,0x02,0x00,0x00,0xc0,0x00,0x00,0x00, -0xb4,0x02,0x00,0x00,0x3d,0x00,0x04,0x00,0xba,0x00,0x00,0x00, -0xb6,0x02,0x00,0x00,0xb5,0x02,0x00,0x00,0x41,0x00,0x06,0x00, -0xb7,0x02,0x00,0x00,0xb8,0x02,0x00,0x00,0x9c,0x02,0x00,0x00, -0x34,0x00,0x00,0x00,0xa9,0x02,0x00,0x00,0x3e,0x00,0x03,0x00, -0xb8,0x02,0x00,0x00,0xb6,0x02,0x00,0x00,0xf9,0x00,0x02,0x00, -0x98,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x98,0x02,0x00,0x00, -0xf9,0x00,0x02,0x00,0x84,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, -0x84,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xba,0x02,0x00,0x00,0xc8,0x02,0x00,0x00,0xc6,0x00,0x00,0x00, -0xf9,0x00,0x02,0x00,0x81,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, -0x83,0x02,0x00,0x00,0xf9,0x00,0x02,0x00,0x7c,0x02,0x00,0x00, -0xf8,0x00,0x02,0x00,0x7c,0x02,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xbc,0x02,0x00,0x00,0xc6,0x02,0x00,0x00, -0xc6,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0x79,0x02,0x00,0x00, -0xf8,0x00,0x02,0x00,0x7b,0x02,0x00,0x00,0xf9,0x00,0x02,0x00, -0x64,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x64,0x02,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xbe,0x02,0x00,0x00, -0xc4,0x02,0x00,0x00,0xc6,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, -0x61,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x63,0x02,0x00,0x00, -0xf9,0x00,0x02,0x00,0x5c,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, -0x5c,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xc0,0x02,0x00,0x00,0xc3,0x02,0x00,0x00,0xc6,0x00,0x00,0x00, -0xf9,0x00,0x02,0x00,0x59,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, -0x5b,0x02,0x00,0x00,0xfd,0x00,0x01,0x00,0x38,0x00,0x01,0x00, - -}; -const uint64_t matmul_f16_aligned_s_fp32_len = 10512; - -unsigned char matmul_f16_f32_aligned_l_data[] = { -0x03,0x02,0x23,0x07,0x00,0x05,0x01,0x00,0x0b,0x00,0x0d,0x00, -0x65,0x03,0x00,0x00,0x00,0x00,0x00,0x00,0x11,0x00,0x02,0x00, -0x01,0x00,0x00,0x00,0x11,0x00,0x02,0x00,0x09,0x00,0x00,0x00, -0x11,0x00,0x02,0x00,0x51,0x11,0x00,0x00,0x0b,0x00,0x06,0x00, -0x01,0x00,0x00,0x00,0x47,0x4c,0x53,0x4c,0x2e,0x73,0x74,0x64, -0x2e,0x34,0x35,0x30,0x00,0x00,0x00,0x00,0x0e,0x00,0x03,0x00, -0x00,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x0f,0x00,0x0f,0x00, -0x05,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x6d,0x61,0x69,0x6e, -0x00,0x00,0x00,0x00,0x0b,0x00,0x00,0x00,0x12,0x00,0x00,0x00, -0x3d,0x00,0x00,0x00,0x4c,0x00,0x00,0x00,0xeb,0x00,0x00,0x00, -0xfa,0x00,0x00,0x00,0x80,0x01,0x00,0x00,0x8f,0x01,0x00,0x00, -0xc6,0x02,0x00,0x00,0x0f,0x03,0x00,0x00,0x10,0x00,0x06,0x00, -0x04,0x00,0x00,0x00,0x11,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x0b,0x00,0x00,0x00,0x0b,0x00,0x00,0x00,0x1c,0x00,0x00,0x00, -0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0x10,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x04,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00, -0x02,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x08,0x00,0x00,0x00, -0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00,0x03,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0x10,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x10,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00, -0x05,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x14,0x00,0x00,0x00, -0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x18,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0x10,0x00,0x00,0x00,0x07,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x1c,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00, -0x08,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x20,0x00,0x00,0x00, -0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00,0x09,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x24,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0x10,0x00,0x00,0x00,0x0a,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x28,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00, -0x0b,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x2c,0x00,0x00,0x00, -0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x30,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0x10,0x00,0x00,0x00,0x0d,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x34,0x00,0x00,0x00,0x47,0x00,0x03,0x00,0x10,0x00,0x00,0x00, -0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x37,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x3d,0x00,0x00,0x00,0x0b,0x00,0x00,0x00,0x1a,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x4c,0x00,0x00,0x00,0x0b,0x00,0x00,0x00, -0x1b,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x4f,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x09,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x53,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x04,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x60,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x62,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x07,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x6c,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x03,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x9d,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0xaf,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x05,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0xb2,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x08,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0xf7,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x10,0x00,0x00,0x00,0x48,0x00,0x04,0x00,0xf8,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x05,0x00,0x00,0x00,0x48,0x00,0x04,0x00, -0xf8,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x18,0x00,0x00,0x00, -0x48,0x00,0x05,0x00,0xf8,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0xf8,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x07,0x00,0x00,0x00, -0x08,0x00,0x00,0x00,0x47,0x00,0x03,0x00,0xf8,0x00,0x00,0x00, -0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0xfa,0x00,0x00,0x00, -0x22,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0xfa,0x00,0x00,0x00,0x21,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x61,0x01,0x00,0x00,0x01,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x62,0x01,0x00,0x00, -0x0b,0x00,0x00,0x00,0x19,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x8c,0x01,0x00,0x00,0x06,0x00,0x00,0x00,0x20,0x00,0x00,0x00, -0x48,0x00,0x04,0x00,0x8d,0x01,0x00,0x00,0x00,0x00,0x00,0x00, -0x05,0x00,0x00,0x00,0x48,0x00,0x04,0x00,0x8d,0x01,0x00,0x00, -0x00,0x00,0x00,0x00,0x18,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0x8d,0x01,0x00,0x00,0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x8d,0x01,0x00,0x00, -0x00,0x00,0x00,0x00,0x07,0x00,0x00,0x00,0x10,0x00,0x00,0x00, -0x47,0x00,0x03,0x00,0x8d,0x01,0x00,0x00,0x02,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x8f,0x01,0x00,0x00,0x22,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x8f,0x01,0x00,0x00, -0x21,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0xc6,0x02,0x00,0x00,0x0b,0x00,0x00,0x00,0x18,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x0c,0x03,0x00,0x00,0x06,0x00,0x00,0x00, -0x04,0x00,0x00,0x00,0x48,0x00,0x04,0x00,0x0d,0x03,0x00,0x00, -0x00,0x00,0x00,0x00,0x19,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0x0d,0x03,0x00,0x00,0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x47,0x00,0x03,0x00,0x0d,0x03,0x00,0x00, -0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x0f,0x03,0x00,0x00, -0x22,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x0f,0x03,0x00,0x00,0x21,0x00,0x00,0x00,0x02,0x00,0x00,0x00, -0x13,0x00,0x02,0x00,0x02,0x00,0x00,0x00,0x21,0x00,0x03,0x00, -0x03,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x15,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x20,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x17,0x00,0x04,0x00,0x09,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x03,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x0a,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x09,0x00,0x00,0x00,0x3b,0x00,0x04,0x00, -0x0a,0x00,0x00,0x00,0x0b,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0x02,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x0d,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x1e,0x00,0x10,0x00, -0x10,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x20,0x00,0x04,0x00,0x11,0x00,0x00,0x00,0x09,0x00,0x00,0x00, -0x10,0x00,0x00,0x00,0x3b,0x00,0x04,0x00,0x11,0x00,0x00,0x00, -0x12,0x00,0x00,0x00,0x09,0x00,0x00,0x00,0x15,0x00,0x04,0x00, -0x13,0x00,0x00,0x00,0x20,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00,0x14,0x00,0x00,0x00, -0x08,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x15,0x00,0x00,0x00, -0x09,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x13,0x00,0x00,0x00,0x21,0x00,0x00,0x00,0x0a,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00,0x27,0x00,0x00,0x00, -0x09,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00, -0x2d,0x00,0x00,0x00,0x07,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x13,0x00,0x00,0x00,0x34,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x32,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x37,0x00,0x00,0x00, -0x40,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x39,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x3b,0x00,0x04,0x00, -0x0a,0x00,0x00,0x00,0x3d,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x3e,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x3b,0x00,0x04,0x00,0x0a,0x00,0x00,0x00, -0x4c,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x32,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x4f,0x00,0x00,0x00,0x20,0x00,0x00,0x00, -0x32,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x53,0x00,0x00,0x00, -0x20,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, -0x54,0x00,0x00,0x00,0x86,0x00,0x00,0x00,0x37,0x00,0x00,0x00, -0x53,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, -0x58,0x00,0x00,0x00,0x86,0x00,0x00,0x00,0x37,0x00,0x00,0x00, -0x53,0x00,0x00,0x00,0x32,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x60,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0x61,0x00,0x00,0x00,0x86,0x00,0x00,0x00, -0x53,0x00,0x00,0x00,0x60,0x00,0x00,0x00,0x32,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x62,0x00,0x00,0x00,0x04,0x00,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x63,0x00,0x00,0x00, -0x86,0x00,0x00,0x00,0x61,0x00,0x00,0x00,0x62,0x00,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x67,0x00,0x00,0x00, -0x86,0x00,0x00,0x00,0x61,0x00,0x00,0x00,0x62,0x00,0x00,0x00, -0x32,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x6c,0x00,0x00,0x00, -0x10,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x6d,0x00,0x00,0x00,0x08,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0x6e,0x00,0x00,0x00,0x86,0x00,0x00,0x00, -0x6c,0x00,0x00,0x00,0x6d,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0x73,0x00,0x00,0x00,0x86,0x00,0x00,0x00, -0x6c,0x00,0x00,0x00,0x6d,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x13,0x00,0x00,0x00,0x77,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00,0x7c,0x00,0x00,0x00, -0x02,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00, -0x87,0x00,0x00,0x00,0x0b,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x13,0x00,0x00,0x00,0x8d,0x00,0x00,0x00,0x03,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00,0x98,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x32,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x9d,0x00,0x00,0x00,0x40,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x13,0x00,0x00,0x00,0x9f,0x00,0x00,0x00,0x04,0x00,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0xae,0x00,0x00,0x00, -0x84,0x00,0x00,0x00,0x60,0x00,0x00,0x00,0x62,0x00,0x00,0x00, -0x32,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0xaf,0x00,0x00,0x00, -0x20,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, -0xb0,0x00,0x00,0x00,0x84,0x00,0x00,0x00,0x53,0x00,0x00,0x00, -0xaf,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, -0xb1,0x00,0x00,0x00,0x84,0x00,0x00,0x00,0x4f,0x00,0x00,0x00, -0x62,0x00,0x00,0x00,0x32,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0xb2,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0xb3,0x00,0x00,0x00,0x84,0x00,0x00,0x00, -0xb1,0x00,0x00,0x00,0xb2,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0xb4,0x00,0x00,0x00,0x84,0x00,0x00,0x00, -0xb3,0x00,0x00,0x00,0x60,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0xb5,0x00,0x00,0x00,0x86,0x00,0x00,0x00, -0xb0,0x00,0x00,0x00,0xb4,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0xb6,0x00,0x00,0x00,0x84,0x00,0x00,0x00, -0xae,0x00,0x00,0x00,0xb5,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0xb7,0x00,0x00,0x00,0x84,0x00,0x00,0x00, -0xb6,0x00,0x00,0x00,0xb2,0x00,0x00,0x00,0x14,0x00,0x02,0x00, -0xb8,0x00,0x00,0x00,0x16,0x00,0x03,0x00,0xba,0x00,0x00,0x00, -0x20,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, -0xbb,0x00,0x00,0x00,0x84,0x00,0x00,0x00,0x60,0x00,0x00,0x00, -0x62,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, -0xbc,0x00,0x00,0x00,0x84,0x00,0x00,0x00,0xbb,0x00,0x00,0x00, -0xb5,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, -0xbd,0x00,0x00,0x00,0x84,0x00,0x00,0x00,0xbc,0x00,0x00,0x00, -0xb2,0x00,0x00,0x00,0x1c,0x00,0x04,0x00,0xbe,0x00,0x00,0x00, -0xba,0x00,0x00,0x00,0xbd,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0xbf,0x00,0x00,0x00,0x07,0x00,0x00,0x00,0xbe,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0xba,0x00,0x00,0x00,0xc2,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0xc3,0x00,0x00,0x00, -0x07,0x00,0x00,0x00,0xba,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x13,0x00,0x00,0x00,0xc6,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x16,0x00,0x03,0x00,0xe6,0x00,0x00,0x00,0x10,0x00,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0xe7,0x00,0x00,0x00, -0x80,0x00,0x00,0x00,0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0xe8,0x00,0x00,0x00, -0x84,0x00,0x00,0x00,0x37,0x00,0x00,0x00,0xe7,0x00,0x00,0x00, -0x1c,0x00,0x04,0x00,0xe9,0x00,0x00,0x00,0xe6,0x00,0x00,0x00, -0xe8,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0xea,0x00,0x00,0x00, -0x04,0x00,0x00,0x00,0xe9,0x00,0x00,0x00,0x3b,0x00,0x04,0x00, -0xea,0x00,0x00,0x00,0xeb,0x00,0x00,0x00,0x04,0x00,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0xef,0x00,0x00,0x00, -0x80,0x00,0x00,0x00,0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00, -0x17,0x00,0x04,0x00,0xf5,0x00,0x00,0x00,0xe6,0x00,0x00,0x00, -0x04,0x00,0x00,0x00,0x18,0x00,0x04,0x00,0xf6,0x00,0x00,0x00, -0xf5,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x1d,0x00,0x03,0x00, -0xf7,0x00,0x00,0x00,0xf6,0x00,0x00,0x00,0x1e,0x00,0x03,0x00, -0xf8,0x00,0x00,0x00,0xf7,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0xf9,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0xf8,0x00,0x00,0x00, -0x3b,0x00,0x04,0x00,0xf9,0x00,0x00,0x00,0xfa,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0xfc,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0xe6,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0xff,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0xe6,0x00,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x04,0x01,0x00,0x00, -0x80,0x00,0x00,0x00,0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x11,0x01,0x00,0x00, -0x80,0x00,0x00,0x00,0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x1e,0x01,0x00,0x00, -0x80,0x00,0x00,0x00,0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x23,0x01,0x00,0x00, -0x03,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, -0x2c,0x01,0x00,0x00,0x80,0x00,0x00,0x00,0x6c,0x00,0x00,0x00, -0x39,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x31,0x01,0x00,0x00,0x04,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0x3a,0x01,0x00,0x00,0x80,0x00,0x00,0x00, -0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x3f,0x01,0x00,0x00,0x05,0x00,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x48,0x01,0x00,0x00, -0x80,0x00,0x00,0x00,0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x4d,0x01,0x00,0x00, -0x06,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, -0x56,0x01,0x00,0x00,0x80,0x00,0x00,0x00,0x6c,0x00,0x00,0x00, -0x39,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x5b,0x01,0x00,0x00,0x07,0x00,0x00,0x00,0x32,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x61,0x01,0x00,0x00,0x01,0x00,0x00,0x00, -0x33,0x00,0x06,0x00,0x09,0x00,0x00,0x00,0x62,0x01,0x00,0x00, -0x61,0x01,0x00,0x00,0x39,0x00,0x00,0x00,0x39,0x00,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x63,0x01,0x00,0x00, -0x51,0x00,0x00,0x00,0x62,0x01,0x00,0x00,0x00,0x00,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x64,0x01,0x00,0x00, -0x84,0x00,0x00,0x00,0x63,0x01,0x00,0x00,0x6d,0x00,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x65,0x01,0x00,0x00, -0x86,0x00,0x00,0x00,0x64,0x01,0x00,0x00,0x6c,0x00,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x7c,0x01,0x00,0x00, -0x80,0x00,0x00,0x00,0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x7d,0x01,0x00,0x00, -0x84,0x00,0x00,0x00,0x9d,0x00,0x00,0x00,0x7c,0x01,0x00,0x00, -0x1c,0x00,0x04,0x00,0x7e,0x01,0x00,0x00,0xe6,0x00,0x00,0x00, -0x7d,0x01,0x00,0x00,0x20,0x00,0x04,0x00,0x7f,0x01,0x00,0x00, -0x04,0x00,0x00,0x00,0x7e,0x01,0x00,0x00,0x3b,0x00,0x04,0x00, -0x7f,0x01,0x00,0x00,0x80,0x01,0x00,0x00,0x04,0x00,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x84,0x01,0x00,0x00, -0x80,0x00,0x00,0x00,0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00, -0x17,0x00,0x04,0x00,0x8a,0x01,0x00,0x00,0xba,0x00,0x00,0x00, -0x04,0x00,0x00,0x00,0x18,0x00,0x04,0x00,0x8b,0x01,0x00,0x00, -0x8a,0x01,0x00,0x00,0x02,0x00,0x00,0x00,0x1d,0x00,0x03,0x00, -0x8c,0x01,0x00,0x00,0x8b,0x01,0x00,0x00,0x1e,0x00,0x03,0x00, -0x8d,0x01,0x00,0x00,0x8c,0x01,0x00,0x00,0x20,0x00,0x04,0x00, -0x8e,0x01,0x00,0x00,0x0c,0x00,0x00,0x00,0x8d,0x01,0x00,0x00, +0x20,0x00,0x04,0x00,0x9d,0x02,0x00,0x00,0x0c,0x00,0x00,0x00, +0xc3,0x00,0x00,0x00,0x36,0x00,0x05,0x00,0x02,0x00,0x00,0x00, +0x04,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x03,0x00,0x00,0x00, +0xf8,0x00,0x02,0x00,0x05,0x00,0x00,0x00,0x3b,0x00,0x04,0x00, +0xc8,0x00,0x00,0x00,0xc9,0x00,0x00,0x00,0x07,0x00,0x00,0x00, 0x3b,0x00,0x04,0x00,0x8e,0x01,0x00,0x00,0x8f,0x01,0x00,0x00, -0x0c,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x91,0x01,0x00,0x00, -0x0c,0x00,0x00,0x00,0xba,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0x99,0x01,0x00,0x00,0x80,0x00,0x00,0x00, -0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0xa7,0x01,0x00,0x00,0x80,0x00,0x00,0x00, -0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0xb5,0x01,0x00,0x00,0x80,0x00,0x00,0x00, -0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0xc3,0x01,0x00,0x00,0x80,0x00,0x00,0x00, -0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0xd1,0x01,0x00,0x00,0x80,0x00,0x00,0x00, -0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0xdf,0x01,0x00,0x00,0x80,0x00,0x00,0x00, -0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0xed,0x01,0x00,0x00,0x80,0x00,0x00,0x00, -0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0xfa,0x01,0x00,0x00,0x08,0x01,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0xfb,0x01,0x00,0x00, -0x86,0x00,0x00,0x00,0x6c,0x00,0x00,0x00,0x6d,0x00,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0xfe,0x01,0x00,0x00, -0x86,0x00,0x00,0x00,0x6c,0x00,0x00,0x00,0x6d,0x00,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x19,0x02,0x00,0x00, -0x84,0x00,0x00,0x00,0x60,0x00,0x00,0x00,0x62,0x00,0x00,0x00, -0x1c,0x00,0x04,0x00,0x1a,0x02,0x00,0x00,0xe6,0x00,0x00,0x00, -0x19,0x02,0x00,0x00,0x20,0x00,0x04,0x00,0x1b,0x02,0x00,0x00, -0x07,0x00,0x00,0x00,0x1a,0x02,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0x2b,0x02,0x00,0x00,0x80,0x00,0x00,0x00, -0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0x31,0x02,0x00,0x00,0x07,0x00,0x00,0x00,0xe6,0x00,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x47,0x02,0x00,0x00, -0x84,0x00,0x00,0x00,0xb5,0x00,0x00,0x00,0xb2,0x00,0x00,0x00, -0x1c,0x00,0x04,0x00,0x48,0x02,0x00,0x00,0xe6,0x00,0x00,0x00, -0x47,0x02,0x00,0x00,0x20,0x00,0x04,0x00,0x49,0x02,0x00,0x00, -0x07,0x00,0x00,0x00,0x48,0x02,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0x52,0x02,0x00,0x00,0x86,0x00,0x00,0x00, -0xaf,0x00,0x00,0x00,0xb5,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0x5a,0x02,0x00,0x00,0x80,0x00,0x00,0x00, -0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0x89,0x02,0x00,0x00,0x84,0x00,0x00,0x00, -0x60,0x00,0x00,0x00,0x62,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x13,0x00,0x00,0x00,0xbe,0x02,0x00,0x00,0x0d,0x00,0x00,0x00, -0x3b,0x00,0x04,0x00,0x0a,0x00,0x00,0x00,0xc6,0x02,0x00,0x00, -0x01,0x00,0x00,0x00,0x1d,0x00,0x03,0x00,0x0c,0x03,0x00,0x00, -0xba,0x00,0x00,0x00,0x1e,0x00,0x03,0x00,0x0d,0x03,0x00,0x00, -0x0c,0x03,0x00,0x00,0x20,0x00,0x04,0x00,0x0e,0x03,0x00,0x00, -0x0c,0x00,0x00,0x00,0x0d,0x03,0x00,0x00,0x3b,0x00,0x04,0x00, -0x0e,0x03,0x00,0x00,0x0f,0x03,0x00,0x00,0x0c,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00,0x14,0x03,0x00,0x00, -0x05,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, -0x21,0x03,0x00,0x00,0x84,0x00,0x00,0x00,0x60,0x00,0x00,0x00, -0x62,0x00,0x00,0x00,0x36,0x00,0x05,0x00,0x02,0x00,0x00,0x00, -0x04,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x03,0x00,0x00,0x00, -0xf8,0x00,0x02,0x00,0x05,0x00,0x00,0x00,0x3b,0x00,0x04,0x00, -0xbf,0x00,0x00,0x00,0xc0,0x00,0x00,0x00,0x07,0x00,0x00,0x00, -0x3b,0x00,0x04,0x00,0x1b,0x02,0x00,0x00,0x1c,0x02,0x00,0x00, -0x07,0x00,0x00,0x00,0x3b,0x00,0x04,0x00,0x49,0x02,0x00,0x00, -0x4a,0x02,0x00,0x00,0x07,0x00,0x00,0x00,0x41,0x00,0x05,0x00, +0x07,0x00,0x00,0x00,0x3b,0x00,0x04,0x00,0xbc,0x01,0x00,0x00, +0xbd,0x01,0x00,0x00,0x07,0x00,0x00,0x00,0x41,0x00,0x05,0x00, 0x0d,0x00,0x00,0x00,0x0e,0x00,0x00,0x00,0x0b,0x00,0x00,0x00, 0x0c,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, 0x0f,0x00,0x00,0x00,0x0e,0x00,0x00,0x00,0x41,0x00,0x05,0x00, @@ -16668,1532 +9654,538 @@ unsigned char matmul_f16_f32_aligned_l_data[] = { 0x06,0x00,0x00,0x00,0x64,0x00,0x00,0x00,0x5d,0x00,0x00,0x00, 0x63,0x00,0x00,0x00,0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00, 0x68,0x00,0x00,0x00,0x5d,0x00,0x00,0x00,0x67,0x00,0x00,0x00, -0x89,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x6f,0x00,0x00,0x00, -0x4e,0x00,0x00,0x00,0x6e,0x00,0x00,0x00,0x86,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x74,0x00,0x00,0x00,0x4e,0x00,0x00,0x00, -0x73,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00, -0x78,0x00,0x00,0x00,0x12,0x00,0x00,0x00,0x77,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x79,0x00,0x00,0x00, -0x78,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x7a,0x00,0x00,0x00,0x47,0x00,0x00,0x00,0x79,0x00,0x00,0x00, -0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00,0x7d,0x00,0x00,0x00, -0x12,0x00,0x00,0x00,0x7c,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x7e,0x00,0x00,0x00,0x7d,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x80,0x00,0x00,0x00, -0x47,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x83,0x00,0x00,0x00,0x80,0x00,0x00,0x00, -0x79,0x00,0x00,0x00,0x0c,0x00,0x07,0x00,0x06,0x00,0x00,0x00, -0x84,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x26,0x00,0x00,0x00, -0x7e,0x00,0x00,0x00,0x83,0x00,0x00,0x00,0x41,0x00,0x05,0x00, -0x15,0x00,0x00,0x00,0x88,0x00,0x00,0x00,0x12,0x00,0x00,0x00, -0x87,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x89,0x00,0x00,0x00,0x88,0x00,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x8a,0x00,0x00,0x00,0x32,0x00,0x00,0x00, -0x89,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x8c,0x00,0x00,0x00,0x42,0x00,0x00,0x00,0x37,0x00,0x00,0x00, -0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00,0x8e,0x00,0x00,0x00, -0x12,0x00,0x00,0x00,0x8d,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x8f,0x00,0x00,0x00,0x8e,0x00,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x90,0x00,0x00,0x00, -0x8c,0x00,0x00,0x00,0x8f,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x91,0x00,0x00,0x00,0x8a,0x00,0x00,0x00, -0x90,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x93,0x00,0x00,0x00,0x91,0x00,0x00,0x00,0x7a,0x00,0x00,0x00, -0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x94,0x00,0x00,0x00, -0x93,0x00,0x00,0x00,0x6d,0x00,0x00,0x00,0x41,0x00,0x05,0x00, -0x15,0x00,0x00,0x00,0x99,0x00,0x00,0x00,0x12,0x00,0x00,0x00, -0x98,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x9a,0x00,0x00,0x00,0x99,0x00,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x9b,0x00,0x00,0x00,0x0f,0x00,0x00,0x00, -0x9a,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x9e,0x00,0x00,0x00,0x4a,0x00,0x00,0x00,0x9d,0x00,0x00,0x00, -0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00,0xa0,0x00,0x00,0x00, -0x12,0x00,0x00,0x00,0x9f,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0xa1,0x00,0x00,0x00,0xa0,0x00,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xa2,0x00,0x00,0x00, -0x9e,0x00,0x00,0x00,0xa1,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xa3,0x00,0x00,0x00,0x9b,0x00,0x00,0x00, -0xa2,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xa5,0x00,0x00,0x00,0xa3,0x00,0x00,0x00,0x7a,0x00,0x00,0x00, -0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xa6,0x00,0x00,0x00, -0xa5,0x00,0x00,0x00,0x6d,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, -0xa8,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0xa8,0x00,0x00,0x00, -0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0x33,0x03,0x00,0x00, -0x3e,0x00,0x00,0x00,0x05,0x00,0x00,0x00,0xc7,0x00,0x00,0x00, -0xa9,0x00,0x00,0x00,0xb0,0x00,0x05,0x00,0xb8,0x00,0x00,0x00, -0xb9,0x00,0x00,0x00,0x33,0x03,0x00,0x00,0xb7,0x00,0x00,0x00, -0xf6,0x00,0x04,0x00,0xaa,0x00,0x00,0x00,0xa9,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0xb9,0x00,0x00,0x00, -0xa9,0x00,0x00,0x00,0xaa,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, -0xa9,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0xc3,0x00,0x00,0x00, -0xc4,0x00,0x00,0x00,0xc0,0x00,0x00,0x00,0x33,0x03,0x00,0x00, -0x3e,0x00,0x03,0x00,0xc4,0x00,0x00,0x00,0xc2,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xc7,0x00,0x00,0x00, -0x33,0x03,0x00,0x00,0xc6,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, -0xa8,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0xaa,0x00,0x00,0x00, -0xf9,0x00,0x02,0x00,0xca,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, -0xca,0x00,0x00,0x00,0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00, -0x4c,0x03,0x00,0x00,0xa6,0x00,0x00,0x00,0xaa,0x00,0x00,0x00, -0x00,0x02,0x00,0x00,0xcd,0x00,0x00,0x00,0xf5,0x00,0x07,0x00, -0x06,0x00,0x00,0x00,0x48,0x03,0x00,0x00,0x94,0x00,0x00,0x00, -0xaa,0x00,0x00,0x00,0xfd,0x01,0x00,0x00,0xcd,0x00,0x00,0x00, -0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0x34,0x03,0x00,0x00, -0x7a,0x00,0x00,0x00,0xaa,0x00,0x00,0x00,0xae,0x02,0x00,0x00, -0xcd,0x00,0x00,0x00,0xb0,0x00,0x05,0x00,0xb8,0x00,0x00,0x00, -0xd1,0x00,0x00,0x00,0x34,0x03,0x00,0x00,0x84,0x00,0x00,0x00, -0xf6,0x00,0x04,0x00,0xcc,0x00,0x00,0x00,0xcd,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0xd1,0x00,0x00,0x00, -0xcb,0x00,0x00,0x00,0xcc,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, -0xcb,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0xd3,0x00,0x00,0x00, +0x89,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x6e,0x00,0x00,0x00, +0x4e,0x00,0x00,0x00,0x6d,0x00,0x00,0x00,0x86,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x73,0x00,0x00,0x00,0x4e,0x00,0x00,0x00, +0x72,0x00,0x00,0x00,0x89,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x78,0x00,0x00,0x00,0x4e,0x00,0x00,0x00,0x77,0x00,0x00,0x00, +0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x7d,0x00,0x00,0x00, +0x4e,0x00,0x00,0x00,0x7c,0x00,0x00,0x00,0x41,0x00,0x05,0x00, +0x15,0x00,0x00,0x00,0x81,0x00,0x00,0x00,0x12,0x00,0x00,0x00, +0x80,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x82,0x00,0x00,0x00,0x81,0x00,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x83,0x00,0x00,0x00,0x47,0x00,0x00,0x00, +0x82,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00, +0x86,0x00,0x00,0x00,0x12,0x00,0x00,0x00,0x85,0x00,0x00,0x00, +0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x87,0x00,0x00,0x00, +0x86,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x89,0x00,0x00,0x00,0x47,0x00,0x00,0x00,0x39,0x00,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x8c,0x00,0x00,0x00, +0x89,0x00,0x00,0x00,0x82,0x00,0x00,0x00,0x0c,0x00,0x07,0x00, +0x06,0x00,0x00,0x00,0x8d,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0x26,0x00,0x00,0x00,0x87,0x00,0x00,0x00,0x8c,0x00,0x00,0x00, +0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00,0x91,0x00,0x00,0x00, +0x12,0x00,0x00,0x00,0x90,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x92,0x00,0x00,0x00,0x91,0x00,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x93,0x00,0x00,0x00, +0x32,0x00,0x00,0x00,0x92,0x00,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x95,0x00,0x00,0x00,0x42,0x00,0x00,0x00, +0x37,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00, +0x97,0x00,0x00,0x00,0x12,0x00,0x00,0x00,0x96,0x00,0x00,0x00, +0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x98,0x00,0x00,0x00, +0x97,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x99,0x00,0x00,0x00,0x95,0x00,0x00,0x00,0x98,0x00,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x9a,0x00,0x00,0x00, +0x93,0x00,0x00,0x00,0x99,0x00,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x9c,0x00,0x00,0x00,0x9a,0x00,0x00,0x00, +0x83,0x00,0x00,0x00,0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x9d,0x00,0x00,0x00,0x9c,0x00,0x00,0x00,0x39,0x00,0x00,0x00, +0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00,0xa2,0x00,0x00,0x00, +0x12,0x00,0x00,0x00,0xa1,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0xa3,0x00,0x00,0x00,0xa2,0x00,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xa4,0x00,0x00,0x00, +0x0f,0x00,0x00,0x00,0xa3,0x00,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xa7,0x00,0x00,0x00,0x4a,0x00,0x00,0x00, +0xa6,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00, +0xa9,0x00,0x00,0x00,0x12,0x00,0x00,0x00,0xa8,0x00,0x00,0x00, +0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0xaa,0x00,0x00,0x00, +0xa9,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xab,0x00,0x00,0x00,0xa7,0x00,0x00,0x00,0xaa,0x00,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xac,0x00,0x00,0x00, +0xa4,0x00,0x00,0x00,0xab,0x00,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xae,0x00,0x00,0x00,0xac,0x00,0x00,0x00, +0x83,0x00,0x00,0x00,0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xaf,0x00,0x00,0x00,0xae,0x00,0x00,0x00,0x39,0x00,0x00,0x00, +0xf9,0x00,0x02,0x00,0xb1,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, +0xb1,0x00,0x00,0x00,0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00, +0xa7,0x02,0x00,0x00,0x3e,0x00,0x00,0x00,0x05,0x00,0x00,0x00, +0xd0,0x00,0x00,0x00,0xb2,0x00,0x00,0x00,0xb0,0x00,0x05,0x00, +0xc1,0x00,0x00,0x00,0xc2,0x00,0x00,0x00,0xa7,0x02,0x00,0x00, +0xc0,0x00,0x00,0x00,0xf6,0x00,0x04,0x00,0xb3,0x00,0x00,0x00, +0xb2,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, +0xc2,0x00,0x00,0x00,0xb2,0x00,0x00,0x00,0xb3,0x00,0x00,0x00, +0xf8,0x00,0x02,0x00,0xb2,0x00,0x00,0x00,0x41,0x00,0x05,0x00, +0xcc,0x00,0x00,0x00,0xcd,0x00,0x00,0x00,0xc9,0x00,0x00,0x00, +0xa7,0x02,0x00,0x00,0x3e,0x00,0x03,0x00,0xcd,0x00,0x00,0x00, +0xcb,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xd0,0x00,0x00,0x00,0xa7,0x02,0x00,0x00,0xcf,0x00,0x00,0x00, +0xf9,0x00,0x02,0x00,0xb1,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, +0xb3,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0xd3,0x00,0x00,0x00, 0xf8,0x00,0x02,0x00,0xd3,0x00,0x00,0x00,0xf5,0x00,0x07,0x00, -0x06,0x00,0x00,0x00,0x44,0x03,0x00,0x00,0x3e,0x00,0x00,0x00, -0xcb,0x00,0x00,0x00,0x67,0x01,0x00,0x00,0xd4,0x00,0x00,0x00, -0xb0,0x00,0x05,0x00,0xb8,0x00,0x00,0x00,0xd9,0x00,0x00,0x00, -0x44,0x03,0x00,0x00,0x37,0x00,0x00,0x00,0xf6,0x00,0x04,0x00, -0xd5,0x00,0x00,0x00,0xd4,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0xfa,0x00,0x04,0x00,0xd9,0x00,0x00,0x00,0xd4,0x00,0x00,0x00, -0xd5,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0xd4,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xde,0x00,0x00,0x00, -0x74,0x00,0x00,0x00,0x44,0x03,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xe1,0x00,0x00,0x00,0xde,0x00,0x00,0x00, -0x8f,0x00,0x00,0x00,0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xe2,0x00,0x00,0x00,0xe1,0x00,0x00,0x00,0x6d,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xe3,0x00,0x00,0x00, -0x48,0x03,0x00,0x00,0xe2,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xe5,0x00,0x00,0x00,0xe3,0x00,0x00,0x00, -0x6f,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xf0,0x00,0x00,0x00,0xde,0x00,0x00,0x00,0xef,0x00,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xf2,0x00,0x00,0x00, -0x6f,0x00,0x00,0x00,0x6d,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xf3,0x00,0x00,0x00,0xf0,0x00,0x00,0x00, -0xf2,0x00,0x00,0x00,0x41,0x00,0x08,0x00,0xfc,0x00,0x00,0x00, -0xfd,0x00,0x00,0x00,0xfa,0x00,0x00,0x00,0x34,0x00,0x00,0x00, -0xe5,0x00,0x00,0x00,0x34,0x00,0x00,0x00,0x3e,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0xe6,0x00,0x00,0x00,0xfe,0x00,0x00,0x00, -0xfd,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0xff,0x00,0x00,0x00, -0x00,0x01,0x00,0x00,0xeb,0x00,0x00,0x00,0xf3,0x00,0x00,0x00, -0x3e,0x00,0x03,0x00,0x00,0x01,0x00,0x00,0xfe,0x00,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x05,0x01,0x00,0x00, -0xde,0x00,0x00,0x00,0x04,0x01,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x08,0x01,0x00,0x00,0x05,0x01,0x00,0x00, -0xf2,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x09,0x01,0x00,0x00,0x08,0x01,0x00,0x00,0x39,0x00,0x00,0x00, -0x41,0x00,0x08,0x00,0xfc,0x00,0x00,0x00,0x0b,0x01,0x00,0x00, -0xfa,0x00,0x00,0x00,0x34,0x00,0x00,0x00,0xe5,0x00,0x00,0x00, -0x34,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0xe6,0x00,0x00,0x00,0x0c,0x01,0x00,0x00,0x0b,0x01,0x00,0x00, -0x41,0x00,0x05,0x00,0xff,0x00,0x00,0x00,0x0d,0x01,0x00,0x00, -0xeb,0x00,0x00,0x00,0x09,0x01,0x00,0x00,0x3e,0x00,0x03,0x00, -0x0d,0x01,0x00,0x00,0x0c,0x01,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x12,0x01,0x00,0x00,0xde,0x00,0x00,0x00, -0x11,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x15,0x01,0x00,0x00,0x12,0x01,0x00,0x00,0xf2,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x16,0x01,0x00,0x00, -0x15,0x01,0x00,0x00,0x0c,0x00,0x00,0x00,0x41,0x00,0x08,0x00, -0xfc,0x00,0x00,0x00,0x18,0x01,0x00,0x00,0xfa,0x00,0x00,0x00, -0x34,0x00,0x00,0x00,0xe5,0x00,0x00,0x00,0x34,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0xe6,0x00,0x00,0x00, -0x19,0x01,0x00,0x00,0x18,0x01,0x00,0x00,0x41,0x00,0x05,0x00, -0xff,0x00,0x00,0x00,0x1a,0x01,0x00,0x00,0xeb,0x00,0x00,0x00, -0x16,0x01,0x00,0x00,0x3e,0x00,0x03,0x00,0x1a,0x01,0x00,0x00, -0x19,0x01,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x1f,0x01,0x00,0x00,0xde,0x00,0x00,0x00,0x1e,0x01,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x22,0x01,0x00,0x00, -0x1f,0x01,0x00,0x00,0xf2,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x24,0x01,0x00,0x00,0x22,0x01,0x00,0x00, -0x23,0x01,0x00,0x00,0x41,0x00,0x08,0x00,0xfc,0x00,0x00,0x00, -0x26,0x01,0x00,0x00,0xfa,0x00,0x00,0x00,0x34,0x00,0x00,0x00, -0xe5,0x00,0x00,0x00,0x34,0x00,0x00,0x00,0x23,0x01,0x00,0x00, -0x3d,0x00,0x04,0x00,0xe6,0x00,0x00,0x00,0x27,0x01,0x00,0x00, -0x26,0x01,0x00,0x00,0x41,0x00,0x05,0x00,0xff,0x00,0x00,0x00, -0x28,0x01,0x00,0x00,0xeb,0x00,0x00,0x00,0x24,0x01,0x00,0x00, -0x3e,0x00,0x03,0x00,0x28,0x01,0x00,0x00,0x27,0x01,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x2d,0x01,0x00,0x00, -0xde,0x00,0x00,0x00,0x2c,0x01,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x30,0x01,0x00,0x00,0x2d,0x01,0x00,0x00, -0xf2,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x32,0x01,0x00,0x00,0x30,0x01,0x00,0x00,0x31,0x01,0x00,0x00, -0x41,0x00,0x08,0x00,0xfc,0x00,0x00,0x00,0x34,0x01,0x00,0x00, -0xfa,0x00,0x00,0x00,0x34,0x00,0x00,0x00,0xe5,0x00,0x00,0x00, -0xc6,0x00,0x00,0x00,0x3e,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0xe6,0x00,0x00,0x00,0x35,0x01,0x00,0x00,0x34,0x01,0x00,0x00, -0x41,0x00,0x05,0x00,0xff,0x00,0x00,0x00,0x36,0x01,0x00,0x00, -0xeb,0x00,0x00,0x00,0x32,0x01,0x00,0x00,0x3e,0x00,0x03,0x00, -0x36,0x01,0x00,0x00,0x35,0x01,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x3b,0x01,0x00,0x00,0xde,0x00,0x00,0x00, -0x3a,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x3e,0x01,0x00,0x00,0x3b,0x01,0x00,0x00,0xf2,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x40,0x01,0x00,0x00, -0x3e,0x01,0x00,0x00,0x3f,0x01,0x00,0x00,0x41,0x00,0x08,0x00, -0xfc,0x00,0x00,0x00,0x42,0x01,0x00,0x00,0xfa,0x00,0x00,0x00, -0x34,0x00,0x00,0x00,0xe5,0x00,0x00,0x00,0xc6,0x00,0x00,0x00, -0x39,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0xe6,0x00,0x00,0x00, -0x43,0x01,0x00,0x00,0x42,0x01,0x00,0x00,0x41,0x00,0x05,0x00, -0xff,0x00,0x00,0x00,0x44,0x01,0x00,0x00,0xeb,0x00,0x00,0x00, -0x40,0x01,0x00,0x00,0x3e,0x00,0x03,0x00,0x44,0x01,0x00,0x00, -0x43,0x01,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x49,0x01,0x00,0x00,0xde,0x00,0x00,0x00,0x48,0x01,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x4c,0x01,0x00,0x00, -0x49,0x01,0x00,0x00,0xf2,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x4e,0x01,0x00,0x00,0x4c,0x01,0x00,0x00, -0x4d,0x01,0x00,0x00,0x41,0x00,0x08,0x00,0xfc,0x00,0x00,0x00, -0x50,0x01,0x00,0x00,0xfa,0x00,0x00,0x00,0x34,0x00,0x00,0x00, -0xe5,0x00,0x00,0x00,0xc6,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0xe6,0x00,0x00,0x00,0x51,0x01,0x00,0x00, -0x50,0x01,0x00,0x00,0x41,0x00,0x05,0x00,0xff,0x00,0x00,0x00, -0x52,0x01,0x00,0x00,0xeb,0x00,0x00,0x00,0x4e,0x01,0x00,0x00, -0x3e,0x00,0x03,0x00,0x52,0x01,0x00,0x00,0x51,0x01,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x57,0x01,0x00,0x00, -0xde,0x00,0x00,0x00,0x56,0x01,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x5a,0x01,0x00,0x00,0x57,0x01,0x00,0x00, -0xf2,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x5c,0x01,0x00,0x00,0x5a,0x01,0x00,0x00,0x5b,0x01,0x00,0x00, -0x41,0x00,0x08,0x00,0xfc,0x00,0x00,0x00,0x5e,0x01,0x00,0x00, -0xfa,0x00,0x00,0x00,0x34,0x00,0x00,0x00,0xe5,0x00,0x00,0x00, -0xc6,0x00,0x00,0x00,0x23,0x01,0x00,0x00,0x3d,0x00,0x04,0x00, -0xe6,0x00,0x00,0x00,0x5f,0x01,0x00,0x00,0x5e,0x01,0x00,0x00, -0x41,0x00,0x05,0x00,0xff,0x00,0x00,0x00,0x60,0x01,0x00,0x00, -0xeb,0x00,0x00,0x00,0x5c,0x01,0x00,0x00,0x3e,0x00,0x03,0x00, -0x60,0x01,0x00,0x00,0x5f,0x01,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x67,0x01,0x00,0x00,0x44,0x03,0x00,0x00, -0x65,0x01,0x00,0x00,0xf9,0x00,0x02,0x00,0xd3,0x00,0x00,0x00, -0xf8,0x00,0x02,0x00,0xd5,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, -0x69,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0x69,0x01,0x00,0x00, -0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0x45,0x03,0x00,0x00, -0x3e,0x00,0x00,0x00,0xd5,0x00,0x00,0x00,0xf9,0x01,0x00,0x00, -0x6a,0x01,0x00,0x00,0xb0,0x00,0x05,0x00,0xb8,0x00,0x00,0x00, -0x6f,0x01,0x00,0x00,0x45,0x03,0x00,0x00,0x9d,0x00,0x00,0x00, -0xf6,0x00,0x04,0x00,0x6b,0x01,0x00,0x00,0x6a,0x01,0x00,0x00, -0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0x6f,0x01,0x00,0x00, -0x6a,0x01,0x00,0x00,0x6b,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, -0x6a,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x74,0x01,0x00,0x00,0x74,0x00,0x00,0x00,0x45,0x03,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x77,0x01,0x00,0x00, -0x74,0x01,0x00,0x00,0xa1,0x00,0x00,0x00,0x86,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x78,0x01,0x00,0x00,0x77,0x01,0x00,0x00, -0x6d,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x79,0x01,0x00,0x00,0x4c,0x03,0x00,0x00,0x78,0x01,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x7b,0x01,0x00,0x00, -0x79,0x01,0x00,0x00,0x6f,0x00,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x85,0x01,0x00,0x00,0x74,0x01,0x00,0x00, -0x84,0x01,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x87,0x01,0x00,0x00,0x6f,0x00,0x00,0x00,0x6d,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x88,0x01,0x00,0x00, -0x85,0x01,0x00,0x00,0x87,0x01,0x00,0x00,0x41,0x00,0x08,0x00, -0x91,0x01,0x00,0x00,0x92,0x01,0x00,0x00,0x8f,0x01,0x00,0x00, -0x34,0x00,0x00,0x00,0x7b,0x01,0x00,0x00,0x34,0x00,0x00,0x00, -0x3e,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0xba,0x00,0x00,0x00, -0x93,0x01,0x00,0x00,0x92,0x01,0x00,0x00,0x73,0x00,0x04,0x00, -0xe6,0x00,0x00,0x00,0x94,0x01,0x00,0x00,0x93,0x01,0x00,0x00, -0x41,0x00,0x05,0x00,0xff,0x00,0x00,0x00,0x95,0x01,0x00,0x00, -0x80,0x01,0x00,0x00,0x88,0x01,0x00,0x00,0x3e,0x00,0x03,0x00, -0x95,0x01,0x00,0x00,0x94,0x01,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x9a,0x01,0x00,0x00,0x74,0x01,0x00,0x00, -0x99,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x9d,0x01,0x00,0x00,0x9a,0x01,0x00,0x00,0x87,0x01,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x9e,0x01,0x00,0x00, -0x9d,0x01,0x00,0x00,0x39,0x00,0x00,0x00,0x41,0x00,0x08,0x00, -0x91,0x01,0x00,0x00,0xa0,0x01,0x00,0x00,0x8f,0x01,0x00,0x00, -0x34,0x00,0x00,0x00,0x7b,0x01,0x00,0x00,0x34,0x00,0x00,0x00, -0x39,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0xba,0x00,0x00,0x00, -0xa1,0x01,0x00,0x00,0xa0,0x01,0x00,0x00,0x73,0x00,0x04,0x00, -0xe6,0x00,0x00,0x00,0xa2,0x01,0x00,0x00,0xa1,0x01,0x00,0x00, -0x41,0x00,0x05,0x00,0xff,0x00,0x00,0x00,0xa3,0x01,0x00,0x00, -0x80,0x01,0x00,0x00,0x9e,0x01,0x00,0x00,0x3e,0x00,0x03,0x00, -0xa3,0x01,0x00,0x00,0xa2,0x01,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xa8,0x01,0x00,0x00,0x74,0x01,0x00,0x00, -0xa7,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xab,0x01,0x00,0x00,0xa8,0x01,0x00,0x00,0x87,0x01,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xac,0x01,0x00,0x00, -0xab,0x01,0x00,0x00,0x0c,0x00,0x00,0x00,0x41,0x00,0x08,0x00, -0x91,0x01,0x00,0x00,0xae,0x01,0x00,0x00,0x8f,0x01,0x00,0x00, -0x34,0x00,0x00,0x00,0x7b,0x01,0x00,0x00,0x34,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0xba,0x00,0x00,0x00, -0xaf,0x01,0x00,0x00,0xae,0x01,0x00,0x00,0x73,0x00,0x04,0x00, -0xe6,0x00,0x00,0x00,0xb0,0x01,0x00,0x00,0xaf,0x01,0x00,0x00, -0x41,0x00,0x05,0x00,0xff,0x00,0x00,0x00,0xb1,0x01,0x00,0x00, -0x80,0x01,0x00,0x00,0xac,0x01,0x00,0x00,0x3e,0x00,0x03,0x00, -0xb1,0x01,0x00,0x00,0xb0,0x01,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xb6,0x01,0x00,0x00,0x74,0x01,0x00,0x00, -0xb5,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xb9,0x01,0x00,0x00,0xb6,0x01,0x00,0x00,0x87,0x01,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xba,0x01,0x00,0x00, -0xb9,0x01,0x00,0x00,0x23,0x01,0x00,0x00,0x41,0x00,0x08,0x00, -0x91,0x01,0x00,0x00,0xbc,0x01,0x00,0x00,0x8f,0x01,0x00,0x00, -0x34,0x00,0x00,0x00,0x7b,0x01,0x00,0x00,0x34,0x00,0x00,0x00, -0x23,0x01,0x00,0x00,0x3d,0x00,0x04,0x00,0xba,0x00,0x00,0x00, -0xbd,0x01,0x00,0x00,0xbc,0x01,0x00,0x00,0x73,0x00,0x04,0x00, -0xe6,0x00,0x00,0x00,0xbe,0x01,0x00,0x00,0xbd,0x01,0x00,0x00, -0x41,0x00,0x05,0x00,0xff,0x00,0x00,0x00,0xbf,0x01,0x00,0x00, -0x80,0x01,0x00,0x00,0xba,0x01,0x00,0x00,0x3e,0x00,0x03,0x00, -0xbf,0x01,0x00,0x00,0xbe,0x01,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xc4,0x01,0x00,0x00,0x74,0x01,0x00,0x00, -0xc3,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xc7,0x01,0x00,0x00,0xc4,0x01,0x00,0x00,0x87,0x01,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xc8,0x01,0x00,0x00, -0xc7,0x01,0x00,0x00,0x31,0x01,0x00,0x00,0x41,0x00,0x08,0x00, -0x91,0x01,0x00,0x00,0xca,0x01,0x00,0x00,0x8f,0x01,0x00,0x00, -0x34,0x00,0x00,0x00,0x7b,0x01,0x00,0x00,0xc6,0x00,0x00,0x00, -0x3e,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0xba,0x00,0x00,0x00, -0xcb,0x01,0x00,0x00,0xca,0x01,0x00,0x00,0x73,0x00,0x04,0x00, -0xe6,0x00,0x00,0x00,0xcc,0x01,0x00,0x00,0xcb,0x01,0x00,0x00, -0x41,0x00,0x05,0x00,0xff,0x00,0x00,0x00,0xcd,0x01,0x00,0x00, -0x80,0x01,0x00,0x00,0xc8,0x01,0x00,0x00,0x3e,0x00,0x03,0x00, -0xcd,0x01,0x00,0x00,0xcc,0x01,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xd2,0x01,0x00,0x00,0x74,0x01,0x00,0x00, -0xd1,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xd5,0x01,0x00,0x00,0xd2,0x01,0x00,0x00,0x87,0x01,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xd6,0x01,0x00,0x00, -0xd5,0x01,0x00,0x00,0x3f,0x01,0x00,0x00,0x41,0x00,0x08,0x00, -0x91,0x01,0x00,0x00,0xd8,0x01,0x00,0x00,0x8f,0x01,0x00,0x00, -0x34,0x00,0x00,0x00,0x7b,0x01,0x00,0x00,0xc6,0x00,0x00,0x00, -0x39,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0xba,0x00,0x00,0x00, -0xd9,0x01,0x00,0x00,0xd8,0x01,0x00,0x00,0x73,0x00,0x04,0x00, -0xe6,0x00,0x00,0x00,0xda,0x01,0x00,0x00,0xd9,0x01,0x00,0x00, -0x41,0x00,0x05,0x00,0xff,0x00,0x00,0x00,0xdb,0x01,0x00,0x00, -0x80,0x01,0x00,0x00,0xd6,0x01,0x00,0x00,0x3e,0x00,0x03,0x00, -0xdb,0x01,0x00,0x00,0xda,0x01,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xe0,0x01,0x00,0x00,0x74,0x01,0x00,0x00, -0xdf,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xe3,0x01,0x00,0x00,0xe0,0x01,0x00,0x00,0x87,0x01,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xe4,0x01,0x00,0x00, -0xe3,0x01,0x00,0x00,0x4d,0x01,0x00,0x00,0x41,0x00,0x08,0x00, -0x91,0x01,0x00,0x00,0xe6,0x01,0x00,0x00,0x8f,0x01,0x00,0x00, -0x34,0x00,0x00,0x00,0x7b,0x01,0x00,0x00,0xc6,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0xba,0x00,0x00,0x00, -0xe7,0x01,0x00,0x00,0xe6,0x01,0x00,0x00,0x73,0x00,0x04,0x00, -0xe6,0x00,0x00,0x00,0xe8,0x01,0x00,0x00,0xe7,0x01,0x00,0x00, -0x41,0x00,0x05,0x00,0xff,0x00,0x00,0x00,0xe9,0x01,0x00,0x00, -0x80,0x01,0x00,0x00,0xe4,0x01,0x00,0x00,0x3e,0x00,0x03,0x00, -0xe9,0x01,0x00,0x00,0xe8,0x01,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xee,0x01,0x00,0x00,0x74,0x01,0x00,0x00, -0xed,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xf1,0x01,0x00,0x00,0xee,0x01,0x00,0x00,0x87,0x01,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xf2,0x01,0x00,0x00, -0xf1,0x01,0x00,0x00,0x5b,0x01,0x00,0x00,0x41,0x00,0x08,0x00, -0x91,0x01,0x00,0x00,0xf4,0x01,0x00,0x00,0x8f,0x01,0x00,0x00, -0x34,0x00,0x00,0x00,0x7b,0x01,0x00,0x00,0xc6,0x00,0x00,0x00, -0x23,0x01,0x00,0x00,0x3d,0x00,0x04,0x00,0xba,0x00,0x00,0x00, -0xf5,0x01,0x00,0x00,0xf4,0x01,0x00,0x00,0x73,0x00,0x04,0x00, -0xe6,0x00,0x00,0x00,0xf6,0x01,0x00,0x00,0xf5,0x01,0x00,0x00, -0x41,0x00,0x05,0x00,0xff,0x00,0x00,0x00,0xf7,0x01,0x00,0x00, -0x80,0x01,0x00,0x00,0xf2,0x01,0x00,0x00,0x3e,0x00,0x03,0x00, -0xf7,0x01,0x00,0x00,0xf6,0x01,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xf9,0x01,0x00,0x00,0x45,0x03,0x00,0x00, -0x65,0x01,0x00,0x00,0xf9,0x00,0x02,0x00,0x69,0x01,0x00,0x00, -0xf8,0x00,0x02,0x00,0x6b,0x01,0x00,0x00,0xe0,0x00,0x04,0x00, -0x0c,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0xfa,0x01,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xfd,0x01,0x00,0x00, -0x48,0x03,0x00,0x00,0xfb,0x01,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x00,0x02,0x00,0x00,0x4c,0x03,0x00,0x00, -0xfe,0x01,0x00,0x00,0xf9,0x00,0x02,0x00,0x02,0x02,0x00,0x00, -0xf8,0x00,0x02,0x00,0x02,0x02,0x00,0x00,0xf5,0x00,0x07,0x00, -0x06,0x00,0x00,0x00,0x4e,0x03,0x00,0x00,0x3e,0x00,0x00,0x00, -0x6b,0x01,0x00,0x00,0xac,0x02,0x00,0x00,0x05,0x02,0x00,0x00, -0xb0,0x00,0x05,0x00,0xb8,0x00,0x00,0x00,0x08,0x02,0x00,0x00, -0x4e,0x03,0x00,0x00,0x6c,0x00,0x00,0x00,0xf6,0x00,0x04,0x00, -0x04,0x02,0x00,0x00,0x05,0x02,0x00,0x00,0x00,0x00,0x00,0x00, -0xfa,0x00,0x04,0x00,0x08,0x02,0x00,0x00,0x03,0x02,0x00,0x00, -0x04,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x03,0x02,0x00,0x00, -0xf9,0x00,0x02,0x00,0x0a,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, -0x0a,0x02,0x00,0x00,0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00, -0x52,0x03,0x00,0x00,0x3e,0x00,0x00,0x00,0x03,0x02,0x00,0x00, -0x36,0x02,0x00,0x00,0x0d,0x02,0x00,0x00,0xb0,0x00,0x05,0x00, -0xb8,0x00,0x00,0x00,0x10,0x02,0x00,0x00,0x52,0x03,0x00,0x00, -0x60,0x00,0x00,0x00,0xf6,0x00,0x04,0x00,0x0c,0x02,0x00,0x00, -0x0d,0x02,0x00,0x00,0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, -0x10,0x02,0x00,0x00,0x0b,0x02,0x00,0x00,0x0c,0x02,0x00,0x00, -0xf8,0x00,0x02,0x00,0x0b,0x02,0x00,0x00,0xf9,0x00,0x02,0x00, -0x12,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x12,0x02,0x00,0x00, -0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0x64,0x03,0x00,0x00, -0x3e,0x00,0x00,0x00,0x0b,0x02,0x00,0x00,0x34,0x02,0x00,0x00, -0x13,0x02,0x00,0x00,0xb0,0x00,0x05,0x00,0xb8,0x00,0x00,0x00, -0x18,0x02,0x00,0x00,0x64,0x03,0x00,0x00,0x62,0x00,0x00,0x00, -0xf6,0x00,0x04,0x00,0x14,0x02,0x00,0x00,0x13,0x02,0x00,0x00, -0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0x18,0x02,0x00,0x00, -0x13,0x02,0x00,0x00,0x14,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, -0x13,0x02,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x1e,0x02,0x00,0x00,0x52,0x03,0x00,0x00,0x62,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x20,0x02,0x00,0x00, -0x1e,0x02,0x00,0x00,0x64,0x03,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x22,0x02,0x00,0x00,0x55,0x00,0x00,0x00, -0x53,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x24,0x02,0x00,0x00,0x52,0x03,0x00,0x00,0x61,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x25,0x02,0x00,0x00, -0x22,0x02,0x00,0x00,0x24,0x02,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x27,0x02,0x00,0x00,0x64,0x00,0x00,0x00, -0x62,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x28,0x02,0x00,0x00,0x25,0x02,0x00,0x00,0x27,0x02,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x2a,0x02,0x00,0x00, -0x28,0x02,0x00,0x00,0x64,0x03,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x2c,0x02,0x00,0x00,0x2a,0x02,0x00,0x00, -0x2b,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x2e,0x02,0x00,0x00,0x2c,0x02,0x00,0x00,0x4e,0x03,0x00,0x00, -0x41,0x00,0x05,0x00,0xff,0x00,0x00,0x00,0x2f,0x02,0x00,0x00, -0xeb,0x00,0x00,0x00,0x2e,0x02,0x00,0x00,0x3d,0x00,0x04,0x00, -0xe6,0x00,0x00,0x00,0x30,0x02,0x00,0x00,0x2f,0x02,0x00,0x00, -0x41,0x00,0x05,0x00,0x31,0x02,0x00,0x00,0x32,0x02,0x00,0x00, -0x1c,0x02,0x00,0x00,0x20,0x02,0x00,0x00,0x3e,0x00,0x03,0x00, -0x32,0x02,0x00,0x00,0x30,0x02,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x34,0x02,0x00,0x00,0x64,0x03,0x00,0x00, -0xc6,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0x12,0x02,0x00,0x00, -0xf8,0x00,0x02,0x00,0x14,0x02,0x00,0x00,0xf9,0x00,0x02,0x00, -0x0d,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x0d,0x02,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x36,0x02,0x00,0x00, -0x52,0x03,0x00,0x00,0xc6,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, -0x0a,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x0c,0x02,0x00,0x00, -0xf9,0x00,0x02,0x00,0x38,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, -0x38,0x02,0x00,0x00,0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00, -0x53,0x03,0x00,0x00,0x3e,0x00,0x00,0x00,0x0c,0x02,0x00,0x00, -0x64,0x02,0x00,0x00,0x3b,0x02,0x00,0x00,0xb0,0x00,0x05,0x00, -0xb8,0x00,0x00,0x00,0x3e,0x02,0x00,0x00,0x53,0x03,0x00,0x00, -0xb5,0x00,0x00,0x00,0xf6,0x00,0x04,0x00,0x3a,0x02,0x00,0x00, -0x3b,0x02,0x00,0x00,0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, -0x3e,0x02,0x00,0x00,0x39,0x02,0x00,0x00,0x3a,0x02,0x00,0x00, -0xf8,0x00,0x02,0x00,0x39,0x02,0x00,0x00,0xf9,0x00,0x02,0x00, -0x40,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x40,0x02,0x00,0x00, -0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0x61,0x03,0x00,0x00, -0x3e,0x00,0x00,0x00,0x39,0x02,0x00,0x00,0x62,0x02,0x00,0x00, -0x41,0x02,0x00,0x00,0xb0,0x00,0x05,0x00,0xb8,0x00,0x00,0x00, -0x46,0x02,0x00,0x00,0x61,0x03,0x00,0x00,0xb2,0x00,0x00,0x00, -0xf6,0x00,0x04,0x00,0x42,0x02,0x00,0x00,0x41,0x02,0x00,0x00, -0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0x46,0x02,0x00,0x00, -0x41,0x02,0x00,0x00,0x42,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, -0x41,0x02,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x4c,0x02,0x00,0x00,0x53,0x03,0x00,0x00,0xb2,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x4e,0x02,0x00,0x00, -0x4c,0x02,0x00,0x00,0x61,0x03,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x50,0x02,0x00,0x00,0x59,0x00,0x00,0x00, -0xaf,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x53,0x02,0x00,0x00,0x53,0x03,0x00,0x00,0x52,0x02,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x54,0x02,0x00,0x00, -0x50,0x02,0x00,0x00,0x53,0x02,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x56,0x02,0x00,0x00,0x68,0x00,0x00,0x00, -0xb2,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x57,0x02,0x00,0x00,0x54,0x02,0x00,0x00,0x56,0x02,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x59,0x02,0x00,0x00, -0x57,0x02,0x00,0x00,0x61,0x03,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x5b,0x02,0x00,0x00,0x59,0x02,0x00,0x00, -0x5a,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x5d,0x02,0x00,0x00,0x5b,0x02,0x00,0x00,0x4e,0x03,0x00,0x00, -0x41,0x00,0x05,0x00,0xff,0x00,0x00,0x00,0x5e,0x02,0x00,0x00, -0x80,0x01,0x00,0x00,0x5d,0x02,0x00,0x00,0x3d,0x00,0x04,0x00, -0xe6,0x00,0x00,0x00,0x5f,0x02,0x00,0x00,0x5e,0x02,0x00,0x00, -0x41,0x00,0x05,0x00,0x31,0x02,0x00,0x00,0x60,0x02,0x00,0x00, -0x4a,0x02,0x00,0x00,0x4e,0x02,0x00,0x00,0x3e,0x00,0x03,0x00, -0x60,0x02,0x00,0x00,0x5f,0x02,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x62,0x02,0x00,0x00,0x61,0x03,0x00,0x00, -0xc6,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0x40,0x02,0x00,0x00, -0xf8,0x00,0x02,0x00,0x42,0x02,0x00,0x00,0xf9,0x00,0x02,0x00, -0x3b,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x3b,0x02,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x64,0x02,0x00,0x00, -0x53,0x03,0x00,0x00,0xc6,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, -0x38,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x3a,0x02,0x00,0x00, -0xf9,0x00,0x02,0x00,0x66,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, -0x66,0x02,0x00,0x00,0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00, -0x54,0x03,0x00,0x00,0x3e,0x00,0x00,0x00,0x3a,0x02,0x00,0x00, -0xaa,0x02,0x00,0x00,0x69,0x02,0x00,0x00,0xb0,0x00,0x05,0x00, -0xb8,0x00,0x00,0x00,0x6c,0x02,0x00,0x00,0x54,0x03,0x00,0x00, -0xb5,0x00,0x00,0x00,0xf6,0x00,0x04,0x00,0x68,0x02,0x00,0x00, -0x69,0x02,0x00,0x00,0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, -0x6c,0x02,0x00,0x00,0x67,0x02,0x00,0x00,0x68,0x02,0x00,0x00, -0xf8,0x00,0x02,0x00,0x67,0x02,0x00,0x00,0xf9,0x00,0x02,0x00, -0x6e,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x6e,0x02,0x00,0x00, -0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0x58,0x03,0x00,0x00, -0x3e,0x00,0x00,0x00,0x67,0x02,0x00,0x00,0xa8,0x02,0x00,0x00, -0x71,0x02,0x00,0x00,0xb0,0x00,0x05,0x00,0xb8,0x00,0x00,0x00, -0x74,0x02,0x00,0x00,0x58,0x03,0x00,0x00,0x60,0x00,0x00,0x00, -0xf6,0x00,0x04,0x00,0x70,0x02,0x00,0x00,0x71,0x02,0x00,0x00, -0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0x74,0x02,0x00,0x00, -0x6f,0x02,0x00,0x00,0x70,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, -0x6f,0x02,0x00,0x00,0xf9,0x00,0x02,0x00,0x76,0x02,0x00,0x00, -0xf8,0x00,0x02,0x00,0x76,0x02,0x00,0x00,0xf5,0x00,0x07,0x00, -0x06,0x00,0x00,0x00,0x5a,0x03,0x00,0x00,0x3e,0x00,0x00,0x00, -0x6f,0x02,0x00,0x00,0xa6,0x02,0x00,0x00,0x79,0x02,0x00,0x00, -0xb0,0x00,0x05,0x00,0xb8,0x00,0x00,0x00,0x7c,0x02,0x00,0x00, -0x5a,0x03,0x00,0x00,0xb2,0x00,0x00,0x00,0xf6,0x00,0x04,0x00, -0x78,0x02,0x00,0x00,0x79,0x02,0x00,0x00,0x01,0x00,0x00,0x00, -0xfa,0x00,0x04,0x00,0x7c,0x02,0x00,0x00,0x77,0x02,0x00,0x00, -0x78,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x77,0x02,0x00,0x00, -0xf9,0x00,0x02,0x00,0x7e,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, -0x7e,0x02,0x00,0x00,0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00, -0x5c,0x03,0x00,0x00,0x3e,0x00,0x00,0x00,0x77,0x02,0x00,0x00, -0xa4,0x02,0x00,0x00,0x7f,0x02,0x00,0x00,0xb0,0x00,0x05,0x00, -0xb8,0x00,0x00,0x00,0x84,0x02,0x00,0x00,0x5c,0x03,0x00,0x00, -0x62,0x00,0x00,0x00,0xf6,0x00,0x04,0x00,0x80,0x02,0x00,0x00, -0x7f,0x02,0x00,0x00,0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, -0x84,0x02,0x00,0x00,0x7f,0x02,0x00,0x00,0x80,0x02,0x00,0x00, -0xf8,0x00,0x02,0x00,0x7f,0x02,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x86,0x02,0x00,0x00,0x54,0x03,0x00,0x00, -0xb2,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x88,0x02,0x00,0x00,0x86,0x02,0x00,0x00,0x5a,0x03,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x8a,0x02,0x00,0x00, -0x88,0x02,0x00,0x00,0x89,0x02,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x8c,0x02,0x00,0x00,0x58,0x03,0x00,0x00, -0x62,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x8d,0x02,0x00,0x00,0x8a,0x02,0x00,0x00,0x8c,0x02,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x8f,0x02,0x00,0x00, -0x8d,0x02,0x00,0x00,0x5c,0x03,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x93,0x02,0x00,0x00,0x8c,0x02,0x00,0x00, -0x5c,0x03,0x00,0x00,0x41,0x00,0x05,0x00,0x31,0x02,0x00,0x00, -0x94,0x02,0x00,0x00,0x1c,0x02,0x00,0x00,0x93,0x02,0x00,0x00, -0x3d,0x00,0x04,0x00,0xe6,0x00,0x00,0x00,0x95,0x02,0x00,0x00, -0x94,0x02,0x00,0x00,0x73,0x00,0x04,0x00,0xba,0x00,0x00,0x00, -0x96,0x02,0x00,0x00,0x95,0x02,0x00,0x00,0x41,0x00,0x05,0x00, -0x31,0x02,0x00,0x00,0x9b,0x02,0x00,0x00,0x4a,0x02,0x00,0x00, -0x88,0x02,0x00,0x00,0x3d,0x00,0x04,0x00,0xe6,0x00,0x00,0x00, -0x9c,0x02,0x00,0x00,0x9b,0x02,0x00,0x00,0x73,0x00,0x04,0x00, -0xba,0x00,0x00,0x00,0x9d,0x02,0x00,0x00,0x9c,0x02,0x00,0x00, -0x41,0x00,0x05,0x00,0xc3,0x00,0x00,0x00,0x9f,0x02,0x00,0x00, -0xc0,0x00,0x00,0x00,0x8f,0x02,0x00,0x00,0x3d,0x00,0x04,0x00, -0xba,0x00,0x00,0x00,0xa0,0x02,0x00,0x00,0x9f,0x02,0x00,0x00, -0x0c,0x00,0x08,0x00,0xba,0x00,0x00,0x00,0xa1,0x02,0x00,0x00, -0x01,0x00,0x00,0x00,0x32,0x00,0x00,0x00,0x96,0x02,0x00,0x00, -0x9d,0x02,0x00,0x00,0xa0,0x02,0x00,0x00,0x3e,0x00,0x03,0x00, -0x9f,0x02,0x00,0x00,0xa1,0x02,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xa4,0x02,0x00,0x00,0x5c,0x03,0x00,0x00, -0xc6,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0x7e,0x02,0x00,0x00, -0xf8,0x00,0x02,0x00,0x80,0x02,0x00,0x00,0xf9,0x00,0x02,0x00, -0x79,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x79,0x02,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xa6,0x02,0x00,0x00, -0x5a,0x03,0x00,0x00,0xc6,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, -0x76,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x78,0x02,0x00,0x00, -0xf9,0x00,0x02,0x00,0x71,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, -0x71,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xa8,0x02,0x00,0x00,0x58,0x03,0x00,0x00,0xc6,0x00,0x00,0x00, -0xf9,0x00,0x02,0x00,0x6e,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, -0x70,0x02,0x00,0x00,0xf9,0x00,0x02,0x00,0x69,0x02,0x00,0x00, -0xf8,0x00,0x02,0x00,0x69,0x02,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xaa,0x02,0x00,0x00,0x54,0x03,0x00,0x00, -0xc6,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0x66,0x02,0x00,0x00, -0xf8,0x00,0x02,0x00,0x68,0x02,0x00,0x00,0xf9,0x00,0x02,0x00, -0x05,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x05,0x02,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xac,0x02,0x00,0x00, -0x4e,0x03,0x00,0x00,0xc6,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, -0x02,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x04,0x02,0x00,0x00, -0xe0,0x00,0x04,0x00,0x0c,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0xfa,0x01,0x00,0x00,0xf9,0x00,0x02,0x00,0xcd,0x00,0x00,0x00, -0xf8,0x00,0x02,0x00,0xcd,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xae,0x02,0x00,0x00,0x34,0x03,0x00,0x00, -0x6c,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0xca,0x00,0x00,0x00, -0xf8,0x00,0x02,0x00,0xcc,0x00,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xb3,0x02,0x00,0x00,0x55,0x00,0x00,0x00, -0x53,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xb4,0x02,0x00,0x00,0x8c,0x00,0x00,0x00,0xb3,0x02,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xb9,0x02,0x00,0x00, -0x59,0x00,0x00,0x00,0xaf,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xba,0x02,0x00,0x00,0x9e,0x00,0x00,0x00, -0xb9,0x02,0x00,0x00,0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00, -0xbf,0x02,0x00,0x00,0x12,0x00,0x00,0x00,0xbe,0x02,0x00,0x00, -0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0xc0,0x02,0x00,0x00, -0xbf,0x02,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xc1,0x02,0x00,0x00,0x0f,0x00,0x00,0x00,0xc0,0x02,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xc5,0x02,0x00,0x00, -0x47,0x00,0x00,0x00,0xc0,0x02,0x00,0x00,0x41,0x00,0x05,0x00, -0x0d,0x00,0x00,0x00,0xc7,0x02,0x00,0x00,0xc6,0x02,0x00,0x00, -0x0c,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0xc8,0x02,0x00,0x00,0xc7,0x02,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xc9,0x02,0x00,0x00,0xc5,0x02,0x00,0x00, -0xc8,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xca,0x02,0x00,0x00,0xc1,0x02,0x00,0x00,0xc9,0x02,0x00,0x00, -0xf9,0x00,0x02,0x00,0xcc,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, -0xcc,0x02,0x00,0x00,0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00, -0x35,0x03,0x00,0x00,0x3e,0x00,0x00,0x00,0xcc,0x00,0x00,0x00, -0x32,0x03,0x00,0x00,0xcf,0x02,0x00,0x00,0xb0,0x00,0x05,0x00, -0xb8,0x00,0x00,0x00,0xd2,0x02,0x00,0x00,0x35,0x03,0x00,0x00, -0xb5,0x00,0x00,0x00,0xf6,0x00,0x04,0x00,0xce,0x02,0x00,0x00, -0xcf,0x02,0x00,0x00,0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, -0xd2,0x02,0x00,0x00,0xcd,0x02,0x00,0x00,0xce,0x02,0x00,0x00, -0xf8,0x00,0x02,0x00,0xcd,0x02,0x00,0x00,0xf9,0x00,0x02,0x00, -0xd4,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0xd4,0x02,0x00,0x00, -0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0x36,0x03,0x00,0x00, -0x3e,0x00,0x00,0x00,0xcd,0x02,0x00,0x00,0x30,0x03,0x00,0x00, -0xd7,0x02,0x00,0x00,0xb0,0x00,0x05,0x00,0xb8,0x00,0x00,0x00, -0xda,0x02,0x00,0x00,0x36,0x03,0x00,0x00,0x60,0x00,0x00,0x00, -0xf6,0x00,0x04,0x00,0xd6,0x02,0x00,0x00,0xd7,0x02,0x00,0x00, -0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0xda,0x02,0x00,0x00, -0xd5,0x02,0x00,0x00,0xd6,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, +0x06,0x00,0x00,0x00,0xc0,0x02,0x00,0x00,0xaf,0x00,0x00,0x00, +0xb3,0x00,0x00,0x00,0x73,0x01,0x00,0x00,0xd6,0x00,0x00,0x00, +0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0xbc,0x02,0x00,0x00, +0x9d,0x00,0x00,0x00,0xb3,0x00,0x00,0x00,0x70,0x01,0x00,0x00, +0xd6,0x00,0x00,0x00,0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00, +0xa8,0x02,0x00,0x00,0x83,0x00,0x00,0x00,0xb3,0x00,0x00,0x00, +0x21,0x02,0x00,0x00,0xd6,0x00,0x00,0x00,0xb0,0x00,0x05,0x00, +0xc1,0x00,0x00,0x00,0xda,0x00,0x00,0x00,0xa8,0x02,0x00,0x00, +0x8d,0x00,0x00,0x00,0xf6,0x00,0x04,0x00,0xd5,0x00,0x00,0x00, +0xd6,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, +0xda,0x00,0x00,0x00,0xd4,0x00,0x00,0x00,0xd5,0x00,0x00,0x00, +0xf8,0x00,0x02,0x00,0xd4,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, +0xdc,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0xdc,0x00,0x00,0x00, +0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0xb8,0x02,0x00,0x00, +0x3e,0x00,0x00,0x00,0xd4,0x00,0x00,0x00,0x26,0x01,0x00,0x00, +0xdf,0x00,0x00,0x00,0xb0,0x00,0x05,0x00,0xc1,0x00,0x00,0x00, +0xe2,0x00,0x00,0x00,0xb8,0x02,0x00,0x00,0x37,0x00,0x00,0x00, +0xf6,0x00,0x04,0x00,0xde,0x00,0x00,0x00,0xdf,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0xe2,0x00,0x00,0x00, +0xdd,0x00,0x00,0x00,0xde,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, +0xdd,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xe6,0x00,0x00,0x00,0x95,0x00,0x00,0x00,0x73,0x00,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xe8,0x00,0x00,0x00, +0xe6,0x00,0x00,0x00,0xb8,0x02,0x00,0x00,0xb0,0x00,0x05,0x00, +0xc1,0x00,0x00,0x00,0xeb,0x00,0x00,0x00,0xe8,0x00,0x00,0x00, +0x36,0x00,0x00,0x00,0xf7,0x00,0x03,0x00,0xed,0x00,0x00,0x00, +0x00,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0xeb,0x00,0x00,0x00, +0xec,0x00,0x00,0x00,0xed,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, +0xec,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xf0,0x00,0x00,0x00,0xa8,0x02,0x00,0x00,0x6e,0x00,0x00,0x00, +0xb0,0x00,0x05,0x00,0xc1,0x00,0x00,0x00,0xf2,0x00,0x00,0x00, +0xf0,0x00,0x00,0x00,0x8d,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, +0xed,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0xed,0x00,0x00,0x00, +0xf5,0x00,0x07,0x00,0xc1,0x00,0x00,0x00,0xf3,0x00,0x00,0x00, +0xeb,0x00,0x00,0x00,0xdd,0x00,0x00,0x00,0xf2,0x00,0x00,0x00, +0xec,0x00,0x00,0x00,0xf7,0x00,0x03,0x00,0xf5,0x00,0x00,0x00, +0x00,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0xf3,0x00,0x00,0x00, +0xf4,0x00,0x00,0x00,0x16,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, +0xf4,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xfe,0x00,0x00,0x00,0x73,0x00,0x00,0x00,0xb8,0x02,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x00,0x01,0x00,0x00, +0xfe,0x00,0x00,0x00,0xff,0x00,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x02,0x01,0x00,0x00,0x00,0x01,0x00,0x00, +0x6e,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x0d,0x01,0x00,0x00,0xfe,0x00,0x00,0x00,0x98,0x00,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x0e,0x01,0x00,0x00, +0xbc,0x02,0x00,0x00,0x0d,0x01,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x10,0x01,0x00,0x00,0x0e,0x01,0x00,0x00, +0x6e,0x00,0x00,0x00,0x41,0x00,0x06,0x00,0x11,0x01,0x00,0x00, +0x12,0x01,0x00,0x00,0x06,0x01,0x00,0x00,0x34,0x00,0x00,0x00, +0x10,0x01,0x00,0x00,0x3d,0x00,0x04,0x00,0xf6,0x00,0x00,0x00, +0x13,0x01,0x00,0x00,0x12,0x01,0x00,0x00,0x41,0x00,0x05,0x00, +0x14,0x01,0x00,0x00,0x15,0x01,0x00,0x00,0xfb,0x00,0x00,0x00, +0x02,0x01,0x00,0x00,0x3e,0x00,0x03,0x00,0x15,0x01,0x00,0x00, +0x13,0x01,0x00,0x00,0xf9,0x00,0x02,0x00,0xf5,0x00,0x00,0x00, +0xf8,0x00,0x02,0x00,0x16,0x01,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x19,0x01,0x00,0x00,0x73,0x00,0x00,0x00, +0xb8,0x02,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x1b,0x01,0x00,0x00,0x19,0x01,0x00,0x00,0x1a,0x01,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x1d,0x01,0x00,0x00, +0x1b,0x01,0x00,0x00,0x6e,0x00,0x00,0x00,0x41,0x00,0x05,0x00, +0x14,0x01,0x00,0x00,0x1f,0x01,0x00,0x00,0xfb,0x00,0x00,0x00, +0x1d,0x01,0x00,0x00,0x3e,0x00,0x03,0x00,0x1f,0x01,0x00,0x00, +0x1e,0x01,0x00,0x00,0xf9,0x00,0x02,0x00,0xf5,0x00,0x00,0x00, +0xf8,0x00,0x02,0x00,0xf5,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, +0xdf,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0xdf,0x00,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x26,0x01,0x00,0x00, +0xb8,0x02,0x00,0x00,0x24,0x01,0x00,0x00,0xf9,0x00,0x02,0x00, +0xdc,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0xde,0x00,0x00,0x00, +0xf9,0x00,0x02,0x00,0x28,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, +0x28,0x01,0x00,0x00,0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00, +0xb9,0x02,0x00,0x00,0x3e,0x00,0x00,0x00,0xde,0x00,0x00,0x00, +0x6c,0x01,0x00,0x00,0x2b,0x01,0x00,0x00,0xb0,0x00,0x05,0x00, +0xc1,0x00,0x00,0x00,0x2e,0x01,0x00,0x00,0xb9,0x02,0x00,0x00, +0xa6,0x00,0x00,0x00,0xf6,0x00,0x04,0x00,0x2a,0x01,0x00,0x00, +0x2b,0x01,0x00,0x00,0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, +0x2e,0x01,0x00,0x00,0x29,0x01,0x00,0x00,0x2a,0x01,0x00,0x00, +0xf8,0x00,0x02,0x00,0x29,0x01,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x32,0x01,0x00,0x00,0xa7,0x00,0x00,0x00, +0x7d,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x34,0x01,0x00,0x00,0x32,0x01,0x00,0x00,0xb9,0x02,0x00,0x00, +0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00,0x35,0x01,0x00,0x00, +0x12,0x00,0x00,0x00,0xcf,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x36,0x01,0x00,0x00,0x35,0x01,0x00,0x00, +0xb0,0x00,0x05,0x00,0xc1,0x00,0x00,0x00,0x37,0x01,0x00,0x00, +0x34,0x01,0x00,0x00,0x36,0x01,0x00,0x00,0xf7,0x00,0x03,0x00, +0x39,0x01,0x00,0x00,0x00,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, +0x37,0x01,0x00,0x00,0x38,0x01,0x00,0x00,0x39,0x01,0x00,0x00, +0xf8,0x00,0x02,0x00,0x38,0x01,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x3c,0x01,0x00,0x00,0xa8,0x02,0x00,0x00, +0x78,0x00,0x00,0x00,0xb0,0x00,0x05,0x00,0xc1,0x00,0x00,0x00, +0x3e,0x01,0x00,0x00,0x3c,0x01,0x00,0x00,0x8d,0x00,0x00,0x00, +0xf9,0x00,0x02,0x00,0x39,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, +0x39,0x01,0x00,0x00,0xf5,0x00,0x07,0x00,0xc1,0x00,0x00,0x00, +0x3f,0x01,0x00,0x00,0x37,0x01,0x00,0x00,0x29,0x01,0x00,0x00, +0x3e,0x01,0x00,0x00,0x38,0x01,0x00,0x00,0xf7,0x00,0x03,0x00, +0x41,0x01,0x00,0x00,0x00,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, +0x3f,0x01,0x00,0x00,0x40,0x01,0x00,0x00,0x5f,0x01,0x00,0x00, +0xf8,0x00,0x02,0x00,0x40,0x01,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x49,0x01,0x00,0x00,0x7d,0x00,0x00,0x00, +0xb9,0x02,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x4b,0x01,0x00,0x00,0x49,0x01,0x00,0x00,0x4a,0x01,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x4d,0x01,0x00,0x00, +0x4b,0x01,0x00,0x00,0x78,0x00,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x58,0x01,0x00,0x00,0x49,0x01,0x00,0x00, +0xaa,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x59,0x01,0x00,0x00,0xc0,0x02,0x00,0x00,0x58,0x01,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x5b,0x01,0x00,0x00, +0x59,0x01,0x00,0x00,0x78,0x00,0x00,0x00,0x41,0x00,0x06,0x00, +0x11,0x01,0x00,0x00,0x5c,0x01,0x00,0x00,0x51,0x01,0x00,0x00, +0x34,0x00,0x00,0x00,0x5b,0x01,0x00,0x00,0x3d,0x00,0x04,0x00, +0xf6,0x00,0x00,0x00,0x5d,0x01,0x00,0x00,0x5c,0x01,0x00,0x00, +0x41,0x00,0x05,0x00,0x14,0x01,0x00,0x00,0x5e,0x01,0x00,0x00, +0x46,0x01,0x00,0x00,0x4d,0x01,0x00,0x00,0x3e,0x00,0x03,0x00, +0x5e,0x01,0x00,0x00,0x5d,0x01,0x00,0x00,0xf9,0x00,0x02,0x00, +0x41,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0x5f,0x01,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x62,0x01,0x00,0x00, +0x7d,0x00,0x00,0x00,0xb9,0x02,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x64,0x01,0x00,0x00,0x62,0x01,0x00,0x00, +0x63,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x66,0x01,0x00,0x00,0x64,0x01,0x00,0x00,0x78,0x00,0x00,0x00, +0x41,0x00,0x05,0x00,0x14,0x01,0x00,0x00,0x67,0x01,0x00,0x00, +0x46,0x01,0x00,0x00,0x66,0x01,0x00,0x00,0x3e,0x00,0x03,0x00, +0x67,0x01,0x00,0x00,0x1e,0x01,0x00,0x00,0xf9,0x00,0x02,0x00, +0x41,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0x41,0x01,0x00,0x00, +0xf9,0x00,0x02,0x00,0x2b,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, +0x2b,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x6c,0x01,0x00,0x00,0xb9,0x02,0x00,0x00,0x6a,0x01,0x00,0x00, +0xf9,0x00,0x02,0x00,0x28,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, +0x2a,0x01,0x00,0x00,0xe0,0x00,0x04,0x00,0x0c,0x00,0x00,0x00, +0x0c,0x00,0x00,0x00,0x6d,0x01,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x70,0x01,0x00,0x00,0xbc,0x02,0x00,0x00, +0x6e,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x73,0x01,0x00,0x00,0xc0,0x02,0x00,0x00,0x71,0x01,0x00,0x00, +0xf9,0x00,0x02,0x00,0x75,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, +0x75,0x01,0x00,0x00,0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00, +0xc2,0x02,0x00,0x00,0x3e,0x00,0x00,0x00,0x2a,0x01,0x00,0x00, +0x1f,0x02,0x00,0x00,0x78,0x01,0x00,0x00,0xb0,0x00,0x05,0x00, +0xc1,0x00,0x00,0x00,0x7b,0x01,0x00,0x00,0xc2,0x02,0x00,0x00, +0x6c,0x00,0x00,0x00,0xf6,0x00,0x04,0x00,0x77,0x01,0x00,0x00, +0x78,0x01,0x00,0x00,0x00,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, +0x7b,0x01,0x00,0x00,0x76,0x01,0x00,0x00,0x77,0x01,0x00,0x00, +0xf8,0x00,0x02,0x00,0x76,0x01,0x00,0x00,0xf9,0x00,0x02,0x00, +0x7d,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0x7d,0x01,0x00,0x00, +0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0xc6,0x02,0x00,0x00, +0x3e,0x00,0x00,0x00,0x76,0x01,0x00,0x00,0xa9,0x01,0x00,0x00, +0x80,0x01,0x00,0x00,0xb0,0x00,0x05,0x00,0xc1,0x00,0x00,0x00, +0x83,0x01,0x00,0x00,0xc6,0x02,0x00,0x00,0x60,0x00,0x00,0x00, +0xf6,0x00,0x04,0x00,0x7f,0x01,0x00,0x00,0x80,0x01,0x00,0x00, +0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0x83,0x01,0x00,0x00, +0x7e,0x01,0x00,0x00,0x7f,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, +0x7e,0x01,0x00,0x00,0xf9,0x00,0x02,0x00,0x85,0x01,0x00,0x00, +0xf8,0x00,0x02,0x00,0x85,0x01,0x00,0x00,0xf5,0x00,0x07,0x00, +0x06,0x00,0x00,0x00,0xd8,0x02,0x00,0x00,0x3e,0x00,0x00,0x00, +0x7e,0x01,0x00,0x00,0xa7,0x01,0x00,0x00,0x86,0x01,0x00,0x00, +0xb0,0x00,0x05,0x00,0xc1,0x00,0x00,0x00,0x8b,0x01,0x00,0x00, +0xd8,0x02,0x00,0x00,0x62,0x00,0x00,0x00,0xf6,0x00,0x04,0x00, +0x87,0x01,0x00,0x00,0x86,0x01,0x00,0x00,0x01,0x00,0x00,0x00, +0xfa,0x00,0x04,0x00,0x8b,0x01,0x00,0x00,0x86,0x01,0x00,0x00, +0x87,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0x86,0x01,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x91,0x01,0x00,0x00, +0xc6,0x02,0x00,0x00,0x62,0x00,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x93,0x01,0x00,0x00,0x91,0x01,0x00,0x00, +0xd8,0x02,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x95,0x01,0x00,0x00,0x55,0x00,0x00,0x00,0x53,0x00,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x97,0x01,0x00,0x00, +0xc6,0x02,0x00,0x00,0x61,0x00,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x98,0x01,0x00,0x00,0x95,0x01,0x00,0x00, +0x97,0x01,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x9a,0x01,0x00,0x00,0x64,0x00,0x00,0x00,0x62,0x00,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x9b,0x01,0x00,0x00, +0x98,0x01,0x00,0x00,0x9a,0x01,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x9d,0x01,0x00,0x00,0x9b,0x01,0x00,0x00, +0xd8,0x02,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x9f,0x01,0x00,0x00,0x9d,0x01,0x00,0x00,0x9e,0x01,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xa1,0x01,0x00,0x00, +0x9f,0x01,0x00,0x00,0xc2,0x02,0x00,0x00,0x41,0x00,0x05,0x00, +0x14,0x01,0x00,0x00,0xa2,0x01,0x00,0x00,0xfb,0x00,0x00,0x00, +0xa1,0x01,0x00,0x00,0x3d,0x00,0x04,0x00,0xf6,0x00,0x00,0x00, +0xa3,0x01,0x00,0x00,0xa2,0x01,0x00,0x00,0x41,0x00,0x05,0x00, +0xa4,0x01,0x00,0x00,0xa5,0x01,0x00,0x00,0x8f,0x01,0x00,0x00, +0x93,0x01,0x00,0x00,0x3e,0x00,0x03,0x00,0xa5,0x01,0x00,0x00, +0xa3,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xa7,0x01,0x00,0x00,0xd8,0x02,0x00,0x00,0xcf,0x00,0x00,0x00, +0xf9,0x00,0x02,0x00,0x85,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, +0x87,0x01,0x00,0x00,0xf9,0x00,0x02,0x00,0x80,0x01,0x00,0x00, +0xf8,0x00,0x02,0x00,0x80,0x01,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xa9,0x01,0x00,0x00,0xc6,0x02,0x00,0x00, +0xcf,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0x7d,0x01,0x00,0x00, +0xf8,0x00,0x02,0x00,0x7f,0x01,0x00,0x00,0xf9,0x00,0x02,0x00, +0xab,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0xab,0x01,0x00,0x00, +0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0xc7,0x02,0x00,0x00, +0x3e,0x00,0x00,0x00,0x7f,0x01,0x00,0x00,0xd7,0x01,0x00,0x00, +0xae,0x01,0x00,0x00,0xb0,0x00,0x05,0x00,0xc1,0x00,0x00,0x00, +0xb1,0x01,0x00,0x00,0xc7,0x02,0x00,0x00,0xbe,0x00,0x00,0x00, +0xf6,0x00,0x04,0x00,0xad,0x01,0x00,0x00,0xae,0x01,0x00,0x00, +0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0xb1,0x01,0x00,0x00, +0xac,0x01,0x00,0x00,0xad,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, +0xac,0x01,0x00,0x00,0xf9,0x00,0x02,0x00,0xb3,0x01,0x00,0x00, +0xf8,0x00,0x02,0x00,0xb3,0x01,0x00,0x00,0xf5,0x00,0x07,0x00, +0x06,0x00,0x00,0x00,0xd5,0x02,0x00,0x00,0x3e,0x00,0x00,0x00, +0xac,0x01,0x00,0x00,0xd5,0x01,0x00,0x00,0xb4,0x01,0x00,0x00, +0xb0,0x00,0x05,0x00,0xc1,0x00,0x00,0x00,0xb9,0x01,0x00,0x00, +0xd5,0x02,0x00,0x00,0xbb,0x00,0x00,0x00,0xf6,0x00,0x04,0x00, +0xb5,0x01,0x00,0x00,0xb4,0x01,0x00,0x00,0x01,0x00,0x00,0x00, +0xfa,0x00,0x04,0x00,0xb9,0x01,0x00,0x00,0xb4,0x01,0x00,0x00, +0xb5,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0xb4,0x01,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xbf,0x01,0x00,0x00, +0xc7,0x02,0x00,0x00,0xbb,0x00,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xc1,0x01,0x00,0x00,0xbf,0x01,0x00,0x00, 0xd5,0x02,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xde,0x02,0x00,0x00,0x36,0x03,0x00,0x00,0x61,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xdf,0x02,0x00,0x00, -0xb4,0x02,0x00,0x00,0xde,0x02,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xe1,0x02,0x00,0x00,0x64,0x00,0x00,0x00, -0x62,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xe2,0x02,0x00,0x00,0xdf,0x02,0x00,0x00,0xe1,0x02,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xe6,0x02,0x00,0x00, -0x35,0x03,0x00,0x00,0x52,0x02,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xe7,0x02,0x00,0x00,0xba,0x02,0x00,0x00, -0xe6,0x02,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xe9,0x02,0x00,0x00,0x68,0x00,0x00,0x00,0xb2,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xea,0x02,0x00,0x00, -0xe7,0x02,0x00,0x00,0xe9,0x02,0x00,0x00,0xf9,0x00,0x02,0x00, -0xec,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0xec,0x02,0x00,0x00, -0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0x38,0x03,0x00,0x00, -0x3e,0x00,0x00,0x00,0xd5,0x02,0x00,0x00,0x2e,0x03,0x00,0x00, -0xef,0x02,0x00,0x00,0xb0,0x00,0x05,0x00,0xb8,0x00,0x00,0x00, -0xf2,0x02,0x00,0x00,0x38,0x03,0x00,0x00,0xb2,0x00,0x00,0x00, -0xf6,0x00,0x04,0x00,0xee,0x02,0x00,0x00,0xef,0x02,0x00,0x00, -0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0xf2,0x02,0x00,0x00, -0xed,0x02,0x00,0x00,0xee,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, -0xed,0x02,0x00,0x00,0xf9,0x00,0x02,0x00,0xf4,0x02,0x00,0x00, -0xf8,0x00,0x02,0x00,0xf4,0x02,0x00,0x00,0xf5,0x00,0x07,0x00, -0x06,0x00,0x00,0x00,0x3a,0x03,0x00,0x00,0x3e,0x00,0x00,0x00, -0xed,0x02,0x00,0x00,0x2c,0x03,0x00,0x00,0xf7,0x02,0x00,0x00, -0xb0,0x00,0x05,0x00,0xb8,0x00,0x00,0x00,0xfa,0x02,0x00,0x00, -0x3a,0x03,0x00,0x00,0x62,0x00,0x00,0x00,0xf6,0x00,0x04,0x00, -0xf6,0x02,0x00,0x00,0xf7,0x02,0x00,0x00,0x01,0x00,0x00,0x00, -0xfa,0x00,0x04,0x00,0xfa,0x02,0x00,0x00,0xf5,0x02,0x00,0x00, -0xf6,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0xf5,0x02,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xfd,0x02,0x00,0x00, -0xe2,0x02,0x00,0x00,0x3a,0x03,0x00,0x00,0xb0,0x00,0x05,0x00, -0xb8,0x00,0x00,0x00,0x00,0x03,0x00,0x00,0xfd,0x02,0x00,0x00, -0x36,0x00,0x00,0x00,0xf7,0x00,0x03,0x00,0x02,0x03,0x00,0x00, -0x00,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0x00,0x03,0x00,0x00, -0x01,0x03,0x00,0x00,0x02,0x03,0x00,0x00,0xf8,0x00,0x02,0x00, -0x01,0x03,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x05,0x03,0x00,0x00,0xea,0x02,0x00,0x00,0x38,0x03,0x00,0x00, -0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00,0x06,0x03,0x00,0x00, -0x12,0x00,0x00,0x00,0xc6,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x07,0x03,0x00,0x00,0x06,0x03,0x00,0x00, -0xb0,0x00,0x05,0x00,0xb8,0x00,0x00,0x00,0x08,0x03,0x00,0x00, -0x05,0x03,0x00,0x00,0x07,0x03,0x00,0x00,0xf9,0x00,0x02,0x00, -0x02,0x03,0x00,0x00,0xf8,0x00,0x02,0x00,0x02,0x03,0x00,0x00, -0xf5,0x00,0x07,0x00,0xb8,0x00,0x00,0x00,0x09,0x03,0x00,0x00, -0x00,0x03,0x00,0x00,0xf5,0x02,0x00,0x00,0x08,0x03,0x00,0x00, -0x01,0x03,0x00,0x00,0xf7,0x00,0x03,0x00,0x0b,0x03,0x00,0x00, -0x00,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0x09,0x03,0x00,0x00, -0x0a,0x03,0x00,0x00,0x0b,0x03,0x00,0x00,0xf8,0x00,0x02,0x00, -0x0a,0x03,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x13,0x03,0x00,0x00,0xea,0x02,0x00,0x00,0x38,0x03,0x00,0x00, -0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00,0x15,0x03,0x00,0x00, -0x12,0x00,0x00,0x00,0x14,0x03,0x00,0x00,0x3d,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x16,0x03,0x00,0x00,0x15,0x03,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x17,0x03,0x00,0x00, -0x13,0x03,0x00,0x00,0x16,0x03,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x18,0x03,0x00,0x00,0xca,0x02,0x00,0x00, -0x17,0x03,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x1a,0x03,0x00,0x00,0x18,0x03,0x00,0x00,0xe2,0x02,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x1c,0x03,0x00,0x00, -0x1a,0x03,0x00,0x00,0x3a,0x03,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x1e,0x03,0x00,0x00,0x35,0x03,0x00,0x00, -0xb2,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x20,0x03,0x00,0x00,0x1e,0x03,0x00,0x00,0x38,0x03,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x22,0x03,0x00,0x00, -0x20,0x03,0x00,0x00,0x21,0x03,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x24,0x03,0x00,0x00,0x36,0x03,0x00,0x00, -0x62,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x25,0x03,0x00,0x00,0x22,0x03,0x00,0x00,0x24,0x03,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x27,0x03,0x00,0x00, -0x25,0x03,0x00,0x00,0x3a,0x03,0x00,0x00,0x41,0x00,0x05,0x00, -0xc3,0x00,0x00,0x00,0x28,0x03,0x00,0x00,0xc0,0x00,0x00,0x00, -0x27,0x03,0x00,0x00,0x3d,0x00,0x04,0x00,0xba,0x00,0x00,0x00, -0x29,0x03,0x00,0x00,0x28,0x03,0x00,0x00,0x41,0x00,0x06,0x00, -0x91,0x01,0x00,0x00,0x2a,0x03,0x00,0x00,0x0f,0x03,0x00,0x00, -0x34,0x00,0x00,0x00,0x1c,0x03,0x00,0x00,0x3e,0x00,0x03,0x00, -0x2a,0x03,0x00,0x00,0x29,0x03,0x00,0x00,0xf9,0x00,0x02,0x00, -0x0b,0x03,0x00,0x00,0xf8,0x00,0x02,0x00,0x0b,0x03,0x00,0x00, -0xf9,0x00,0x02,0x00,0xf7,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, -0xf7,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x2c,0x03,0x00,0x00,0x3a,0x03,0x00,0x00,0xc6,0x00,0x00,0x00, -0xf9,0x00,0x02,0x00,0xf4,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, -0xf6,0x02,0x00,0x00,0xf9,0x00,0x02,0x00,0xef,0x02,0x00,0x00, -0xf8,0x00,0x02,0x00,0xef,0x02,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x2e,0x03,0x00,0x00,0x38,0x03,0x00,0x00, -0xc6,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0xec,0x02,0x00,0x00, -0xf8,0x00,0x02,0x00,0xee,0x02,0x00,0x00,0xf9,0x00,0x02,0x00, -0xd7,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0xd7,0x02,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x30,0x03,0x00,0x00, -0x36,0x03,0x00,0x00,0xc6,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, -0xd4,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0xd6,0x02,0x00,0x00, -0xf9,0x00,0x02,0x00,0xcf,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, -0xcf,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x32,0x03,0x00,0x00,0x35,0x03,0x00,0x00,0xc6,0x00,0x00,0x00, -0xf9,0x00,0x02,0x00,0xcc,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, -0xce,0x02,0x00,0x00,0xfd,0x00,0x01,0x00,0x38,0x00,0x01,0x00, - +0xc3,0x01,0x00,0x00,0x59,0x00,0x00,0x00,0xb8,0x00,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xc6,0x01,0x00,0x00, +0xc7,0x02,0x00,0x00,0xc5,0x01,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xc7,0x01,0x00,0x00,0xc3,0x01,0x00,0x00, +0xc6,0x01,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xc9,0x01,0x00,0x00,0x68,0x00,0x00,0x00,0xbb,0x00,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xca,0x01,0x00,0x00, +0xc7,0x01,0x00,0x00,0xc9,0x01,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xcc,0x01,0x00,0x00,0xca,0x01,0x00,0x00, +0xd5,0x02,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xce,0x01,0x00,0x00,0xcc,0x01,0x00,0x00,0xcd,0x01,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xd0,0x01,0x00,0x00, +0xce,0x01,0x00,0x00,0xc2,0x02,0x00,0x00,0x41,0x00,0x05,0x00, +0x14,0x01,0x00,0x00,0xd1,0x01,0x00,0x00,0x46,0x01,0x00,0x00, +0xd0,0x01,0x00,0x00,0x3d,0x00,0x04,0x00,0xf6,0x00,0x00,0x00, +0xd2,0x01,0x00,0x00,0xd1,0x01,0x00,0x00,0x41,0x00,0x05,0x00, +0xa4,0x01,0x00,0x00,0xd3,0x01,0x00,0x00,0xbd,0x01,0x00,0x00, +0xc1,0x01,0x00,0x00,0x3e,0x00,0x03,0x00,0xd3,0x01,0x00,0x00, +0xd2,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xd5,0x01,0x00,0x00,0xd5,0x02,0x00,0x00,0xcf,0x00,0x00,0x00, +0xf9,0x00,0x02,0x00,0xb3,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, +0xb5,0x01,0x00,0x00,0xf9,0x00,0x02,0x00,0xae,0x01,0x00,0x00, +0xf8,0x00,0x02,0x00,0xae,0x01,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xd7,0x01,0x00,0x00,0xc7,0x02,0x00,0x00, +0xcf,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0xab,0x01,0x00,0x00, +0xf8,0x00,0x02,0x00,0xad,0x01,0x00,0x00,0xf9,0x00,0x02,0x00, +0xd9,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0xd9,0x01,0x00,0x00, +0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0xc8,0x02,0x00,0x00, +0x3e,0x00,0x00,0x00,0xad,0x01,0x00,0x00,0x1d,0x02,0x00,0x00, +0xdc,0x01,0x00,0x00,0xb0,0x00,0x05,0x00,0xc1,0x00,0x00,0x00, +0xdf,0x01,0x00,0x00,0xc8,0x02,0x00,0x00,0xbe,0x00,0x00,0x00, +0xf6,0x00,0x04,0x00,0xdb,0x01,0x00,0x00,0xdc,0x01,0x00,0x00, +0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0xdf,0x01,0x00,0x00, +0xda,0x01,0x00,0x00,0xdb,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, +0xda,0x01,0x00,0x00,0xf9,0x00,0x02,0x00,0xe1,0x01,0x00,0x00, +0xf8,0x00,0x02,0x00,0xe1,0x01,0x00,0x00,0xf5,0x00,0x07,0x00, +0x06,0x00,0x00,0x00,0xcc,0x02,0x00,0x00,0x3e,0x00,0x00,0x00, +0xda,0x01,0x00,0x00,0x1b,0x02,0x00,0x00,0xe4,0x01,0x00,0x00, +0xb0,0x00,0x05,0x00,0xc1,0x00,0x00,0x00,0xe7,0x01,0x00,0x00, +0xcc,0x02,0x00,0x00,0x60,0x00,0x00,0x00,0xf6,0x00,0x04,0x00, +0xe3,0x01,0x00,0x00,0xe4,0x01,0x00,0x00,0x01,0x00,0x00,0x00, +0xfa,0x00,0x04,0x00,0xe7,0x01,0x00,0x00,0xe2,0x01,0x00,0x00, +0xe3,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0xe2,0x01,0x00,0x00, +0xf9,0x00,0x02,0x00,0xe9,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, +0xe9,0x01,0x00,0x00,0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00, +0xce,0x02,0x00,0x00,0x3e,0x00,0x00,0x00,0xe2,0x01,0x00,0x00, +0x19,0x02,0x00,0x00,0xec,0x01,0x00,0x00,0xb0,0x00,0x05,0x00, +0xc1,0x00,0x00,0x00,0xef,0x01,0x00,0x00,0xce,0x02,0x00,0x00, +0xbb,0x00,0x00,0x00,0xf6,0x00,0x04,0x00,0xeb,0x01,0x00,0x00, +0xec,0x01,0x00,0x00,0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, +0xef,0x01,0x00,0x00,0xea,0x01,0x00,0x00,0xeb,0x01,0x00,0x00, +0xf8,0x00,0x02,0x00,0xea,0x01,0x00,0x00,0xf9,0x00,0x02,0x00, +0xf1,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0xf1,0x01,0x00,0x00, +0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0xd0,0x02,0x00,0x00, +0x3e,0x00,0x00,0x00,0xea,0x01,0x00,0x00,0x17,0x02,0x00,0x00, +0xf2,0x01,0x00,0x00,0xb0,0x00,0x05,0x00,0xc1,0x00,0x00,0x00, +0xf7,0x01,0x00,0x00,0xd0,0x02,0x00,0x00,0x62,0x00,0x00,0x00, +0xf6,0x00,0x04,0x00,0xf3,0x01,0x00,0x00,0xf2,0x01,0x00,0x00, +0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0xf7,0x01,0x00,0x00, +0xf2,0x01,0x00,0x00,0xf3,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, +0xf2,0x01,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xf9,0x01,0x00,0x00,0xc8,0x02,0x00,0x00,0xbb,0x00,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xfb,0x01,0x00,0x00, +0xf9,0x01,0x00,0x00,0xce,0x02,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xfd,0x01,0x00,0x00,0xfb,0x01,0x00,0x00, +0xfc,0x01,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xff,0x01,0x00,0x00,0xcc,0x02,0x00,0x00,0x62,0x00,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x00,0x02,0x00,0x00, +0xfd,0x01,0x00,0x00,0xff,0x01,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x02,0x02,0x00,0x00,0x00,0x02,0x00,0x00, +0xd0,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x06,0x02,0x00,0x00,0xff,0x01,0x00,0x00,0xd0,0x02,0x00,0x00, +0x41,0x00,0x05,0x00,0xa4,0x01,0x00,0x00,0x07,0x02,0x00,0x00, +0x8f,0x01,0x00,0x00,0x06,0x02,0x00,0x00,0x3d,0x00,0x04,0x00, +0xf6,0x00,0x00,0x00,0x08,0x02,0x00,0x00,0x07,0x02,0x00,0x00, +0x73,0x00,0x04,0x00,0xc3,0x00,0x00,0x00,0x09,0x02,0x00,0x00, +0x08,0x02,0x00,0x00,0x41,0x00,0x05,0x00,0xa4,0x01,0x00,0x00, +0x0e,0x02,0x00,0x00,0xbd,0x01,0x00,0x00,0xfb,0x01,0x00,0x00, +0x3d,0x00,0x04,0x00,0xf6,0x00,0x00,0x00,0x0f,0x02,0x00,0x00, +0x0e,0x02,0x00,0x00,0x73,0x00,0x04,0x00,0xc3,0x00,0x00,0x00, +0x10,0x02,0x00,0x00,0x0f,0x02,0x00,0x00,0x41,0x00,0x05,0x00, +0xcc,0x00,0x00,0x00,0x12,0x02,0x00,0x00,0xc9,0x00,0x00,0x00, +0x02,0x02,0x00,0x00,0x3d,0x00,0x04,0x00,0xc3,0x00,0x00,0x00, +0x13,0x02,0x00,0x00,0x12,0x02,0x00,0x00,0x0c,0x00,0x08,0x00, +0xc3,0x00,0x00,0x00,0x14,0x02,0x00,0x00,0x01,0x00,0x00,0x00, +0x32,0x00,0x00,0x00,0x09,0x02,0x00,0x00,0x10,0x02,0x00,0x00, +0x13,0x02,0x00,0x00,0x3e,0x00,0x03,0x00,0x12,0x02,0x00,0x00, +0x14,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x17,0x02,0x00,0x00,0xd0,0x02,0x00,0x00,0xcf,0x00,0x00,0x00, +0xf9,0x00,0x02,0x00,0xf1,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, +0xf3,0x01,0x00,0x00,0xf9,0x00,0x02,0x00,0xec,0x01,0x00,0x00, +0xf8,0x00,0x02,0x00,0xec,0x01,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x19,0x02,0x00,0x00,0xce,0x02,0x00,0x00, +0xcf,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0xe9,0x01,0x00,0x00, +0xf8,0x00,0x02,0x00,0xeb,0x01,0x00,0x00,0xf9,0x00,0x02,0x00, +0xe4,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0xe4,0x01,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x1b,0x02,0x00,0x00, +0xcc,0x02,0x00,0x00,0xcf,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, +0xe1,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0xe3,0x01,0x00,0x00, +0xf9,0x00,0x02,0x00,0xdc,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, +0xdc,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x1d,0x02,0x00,0x00,0xc8,0x02,0x00,0x00,0xcf,0x00,0x00,0x00, +0xf9,0x00,0x02,0x00,0xd9,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, +0xdb,0x01,0x00,0x00,0xf9,0x00,0x02,0x00,0x78,0x01,0x00,0x00, +0xf8,0x00,0x02,0x00,0x78,0x01,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x1f,0x02,0x00,0x00,0xc2,0x02,0x00,0x00, +0xcf,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0x75,0x01,0x00,0x00, +0xf8,0x00,0x02,0x00,0x77,0x01,0x00,0x00,0xe0,0x00,0x04,0x00, +0x0c,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x6d,0x01,0x00,0x00, +0xf9,0x00,0x02,0x00,0xd6,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, +0xd6,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x21,0x02,0x00,0x00,0xa8,0x02,0x00,0x00,0x6c,0x00,0x00,0x00, +0xf9,0x00,0x02,0x00,0xd3,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, +0xd5,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x26,0x02,0x00,0x00,0x55,0x00,0x00,0x00,0x53,0x00,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x27,0x02,0x00,0x00, +0x95,0x00,0x00,0x00,0x26,0x02,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x2c,0x02,0x00,0x00,0x59,0x00,0x00,0x00, +0xb8,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x2d,0x02,0x00,0x00,0xa7,0x00,0x00,0x00,0x2c,0x02,0x00,0x00, +0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00,0x32,0x02,0x00,0x00, +0x12,0x00,0x00,0x00,0x31,0x02,0x00,0x00,0x3d,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x33,0x02,0x00,0x00,0x32,0x02,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x34,0x02,0x00,0x00, +0x0f,0x00,0x00,0x00,0x33,0x02,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x38,0x02,0x00,0x00,0x47,0x00,0x00,0x00, +0x33,0x02,0x00,0x00,0x41,0x00,0x05,0x00,0x0d,0x00,0x00,0x00, +0x3a,0x02,0x00,0x00,0x39,0x02,0x00,0x00,0x0c,0x00,0x00,0x00, +0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x3b,0x02,0x00,0x00, +0x3a,0x02,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x3c,0x02,0x00,0x00,0x38,0x02,0x00,0x00,0x3b,0x02,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x3d,0x02,0x00,0x00, +0x34,0x02,0x00,0x00,0x3c,0x02,0x00,0x00,0xf9,0x00,0x02,0x00, +0x3f,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x3f,0x02,0x00,0x00, +0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0xa9,0x02,0x00,0x00, +0x3e,0x00,0x00,0x00,0xd5,0x00,0x00,0x00,0xa6,0x02,0x00,0x00, +0x42,0x02,0x00,0x00,0xb0,0x00,0x05,0x00,0xc1,0x00,0x00,0x00, +0x45,0x02,0x00,0x00,0xa9,0x02,0x00,0x00,0xbe,0x00,0x00,0x00, +0xf6,0x00,0x04,0x00,0x41,0x02,0x00,0x00,0x42,0x02,0x00,0x00, +0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0x45,0x02,0x00,0x00, +0x40,0x02,0x00,0x00,0x41,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, +0x40,0x02,0x00,0x00,0xf9,0x00,0x02,0x00,0x47,0x02,0x00,0x00, +0xf8,0x00,0x02,0x00,0x47,0x02,0x00,0x00,0xf5,0x00,0x07,0x00, +0x06,0x00,0x00,0x00,0xaa,0x02,0x00,0x00,0x3e,0x00,0x00,0x00, +0x40,0x02,0x00,0x00,0xa4,0x02,0x00,0x00,0x4a,0x02,0x00,0x00, +0xb0,0x00,0x05,0x00,0xc1,0x00,0x00,0x00,0x4d,0x02,0x00,0x00, +0xaa,0x02,0x00,0x00,0x60,0x00,0x00,0x00,0xf6,0x00,0x04,0x00, +0x49,0x02,0x00,0x00,0x4a,0x02,0x00,0x00,0x01,0x00,0x00,0x00, +0xfa,0x00,0x04,0x00,0x4d,0x02,0x00,0x00,0x48,0x02,0x00,0x00, +0x49,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x48,0x02,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x51,0x02,0x00,0x00, +0xaa,0x02,0x00,0x00,0x61,0x00,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x52,0x02,0x00,0x00,0x27,0x02,0x00,0x00, +0x51,0x02,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x54,0x02,0x00,0x00,0x64,0x00,0x00,0x00,0x62,0x00,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x55,0x02,0x00,0x00, +0x52,0x02,0x00,0x00,0x54,0x02,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x59,0x02,0x00,0x00,0xa9,0x02,0x00,0x00, +0xc5,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x5a,0x02,0x00,0x00,0x2d,0x02,0x00,0x00,0x59,0x02,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x5c,0x02,0x00,0x00, +0x68,0x00,0x00,0x00,0xbb,0x00,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x5d,0x02,0x00,0x00,0x5a,0x02,0x00,0x00, +0x5c,0x02,0x00,0x00,0xf9,0x00,0x02,0x00,0x5f,0x02,0x00,0x00, +0xf8,0x00,0x02,0x00,0x5f,0x02,0x00,0x00,0xf5,0x00,0x07,0x00, +0x06,0x00,0x00,0x00,0xac,0x02,0x00,0x00,0x3e,0x00,0x00,0x00, +0x48,0x02,0x00,0x00,0xa2,0x02,0x00,0x00,0x62,0x02,0x00,0x00, +0xb0,0x00,0x05,0x00,0xc1,0x00,0x00,0x00,0x65,0x02,0x00,0x00, +0xac,0x02,0x00,0x00,0xbb,0x00,0x00,0x00,0xf6,0x00,0x04,0x00, +0x61,0x02,0x00,0x00,0x62,0x02,0x00,0x00,0x01,0x00,0x00,0x00, +0xfa,0x00,0x04,0x00,0x65,0x02,0x00,0x00,0x60,0x02,0x00,0x00, +0x61,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x60,0x02,0x00,0x00, +0xf9,0x00,0x02,0x00,0x67,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, +0x67,0x02,0x00,0x00,0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00, +0xae,0x02,0x00,0x00,0x3e,0x00,0x00,0x00,0x60,0x02,0x00,0x00, +0xa0,0x02,0x00,0x00,0x6a,0x02,0x00,0x00,0xb0,0x00,0x05,0x00, +0xc1,0x00,0x00,0x00,0x6d,0x02,0x00,0x00,0xae,0x02,0x00,0x00, +0x62,0x00,0x00,0x00,0xf6,0x00,0x04,0x00,0x69,0x02,0x00,0x00, +0x6a,0x02,0x00,0x00,0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, +0x6d,0x02,0x00,0x00,0x68,0x02,0x00,0x00,0x69,0x02,0x00,0x00, +0xf8,0x00,0x02,0x00,0x68,0x02,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x70,0x02,0x00,0x00,0x55,0x02,0x00,0x00, +0xae,0x02,0x00,0x00,0xb0,0x00,0x05,0x00,0xc1,0x00,0x00,0x00, +0x73,0x02,0x00,0x00,0x70,0x02,0x00,0x00,0x36,0x00,0x00,0x00, +0xf7,0x00,0x03,0x00,0x75,0x02,0x00,0x00,0x00,0x00,0x00,0x00, +0xfa,0x00,0x04,0x00,0x73,0x02,0x00,0x00,0x74,0x02,0x00,0x00, +0x75,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x74,0x02,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x78,0x02,0x00,0x00, +0x5d,0x02,0x00,0x00,0xac,0x02,0x00,0x00,0x41,0x00,0x05,0x00, +0x15,0x00,0x00,0x00,0x79,0x02,0x00,0x00,0x12,0x00,0x00,0x00, +0xcf,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x7a,0x02,0x00,0x00,0x79,0x02,0x00,0x00,0xb0,0x00,0x05,0x00, +0xc1,0x00,0x00,0x00,0x7b,0x02,0x00,0x00,0x78,0x02,0x00,0x00, +0x7a,0x02,0x00,0x00,0xf9,0x00,0x02,0x00,0x75,0x02,0x00,0x00, +0xf8,0x00,0x02,0x00,0x75,0x02,0x00,0x00,0xf5,0x00,0x07,0x00, +0xc1,0x00,0x00,0x00,0x7c,0x02,0x00,0x00,0x73,0x02,0x00,0x00, +0x68,0x02,0x00,0x00,0x7b,0x02,0x00,0x00,0x74,0x02,0x00,0x00, +0xf7,0x00,0x03,0x00,0x7e,0x02,0x00,0x00,0x00,0x00,0x00,0x00, +0xfa,0x00,0x04,0x00,0x7c,0x02,0x00,0x00,0x7d,0x02,0x00,0x00, +0x7e,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x7d,0x02,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x86,0x02,0x00,0x00, +0x5d,0x02,0x00,0x00,0xac,0x02,0x00,0x00,0x41,0x00,0x05,0x00, +0x15,0x00,0x00,0x00,0x88,0x02,0x00,0x00,0x12,0x00,0x00,0x00, +0x87,0x02,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x89,0x02,0x00,0x00,0x88,0x02,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x8a,0x02,0x00,0x00,0x86,0x02,0x00,0x00, +0x89,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x8b,0x02,0x00,0x00,0x3d,0x02,0x00,0x00,0x8a,0x02,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x8d,0x02,0x00,0x00, +0x8b,0x02,0x00,0x00,0x55,0x02,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x8f,0x02,0x00,0x00,0x8d,0x02,0x00,0x00, +0xae,0x02,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x91,0x02,0x00,0x00,0xa9,0x02,0x00,0x00,0xbb,0x00,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x93,0x02,0x00,0x00, +0x91,0x02,0x00,0x00,0xac,0x02,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x95,0x02,0x00,0x00,0x93,0x02,0x00,0x00, +0x94,0x02,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x97,0x02,0x00,0x00,0xaa,0x02,0x00,0x00,0x62,0x00,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x98,0x02,0x00,0x00, +0x95,0x02,0x00,0x00,0x97,0x02,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x9a,0x02,0x00,0x00,0x98,0x02,0x00,0x00, +0xae,0x02,0x00,0x00,0x41,0x00,0x05,0x00,0xcc,0x00,0x00,0x00, +0x9b,0x02,0x00,0x00,0xc9,0x00,0x00,0x00,0x9a,0x02,0x00,0x00, +0x3d,0x00,0x04,0x00,0xc3,0x00,0x00,0x00,0x9c,0x02,0x00,0x00, +0x9b,0x02,0x00,0x00,0x41,0x00,0x06,0x00,0x9d,0x02,0x00,0x00, +0x9e,0x02,0x00,0x00,0x82,0x02,0x00,0x00,0x34,0x00,0x00,0x00, +0x8f,0x02,0x00,0x00,0x3e,0x00,0x03,0x00,0x9e,0x02,0x00,0x00, +0x9c,0x02,0x00,0x00,0xf9,0x00,0x02,0x00,0x7e,0x02,0x00,0x00, +0xf8,0x00,0x02,0x00,0x7e,0x02,0x00,0x00,0xf9,0x00,0x02,0x00, +0x6a,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x6a,0x02,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xa0,0x02,0x00,0x00, +0xae,0x02,0x00,0x00,0xcf,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, +0x67,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x69,0x02,0x00,0x00, +0xf9,0x00,0x02,0x00,0x62,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, +0x62,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xa2,0x02,0x00,0x00,0xac,0x02,0x00,0x00,0xcf,0x00,0x00,0x00, +0xf9,0x00,0x02,0x00,0x5f,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, +0x61,0x02,0x00,0x00,0xf9,0x00,0x02,0x00,0x4a,0x02,0x00,0x00, +0xf8,0x00,0x02,0x00,0x4a,0x02,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xa4,0x02,0x00,0x00,0xaa,0x02,0x00,0x00, +0xcf,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0x47,0x02,0x00,0x00, +0xf8,0x00,0x02,0x00,0x49,0x02,0x00,0x00,0xf9,0x00,0x02,0x00, +0x42,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x42,0x02,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xa6,0x02,0x00,0x00, +0xa9,0x02,0x00,0x00,0xcf,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, +0x3f,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x41,0x02,0x00,0x00, +0xfd,0x00,0x01,0x00,0x38,0x00,0x01,0x00, }; -const uint64_t matmul_f16_f32_aligned_l_len = 12096; +const uint64_t matmul_f16_len = 10316; -unsigned char matmul_f16_f32_aligned_l_fp32_data[] = { +unsigned char matmul_f16_aligned_data[] = { 0x03,0x02,0x23,0x07,0x00,0x05,0x01,0x00,0x0b,0x00,0x0d,0x00, -0xf0,0x02,0x00,0x00,0x00,0x00,0x00,0x00,0x11,0x00,0x02,0x00, -0x01,0x00,0x00,0x00,0x11,0x00,0x02,0x00,0x51,0x11,0x00,0x00, -0x0b,0x00,0x06,0x00,0x01,0x00,0x00,0x00,0x47,0x4c,0x53,0x4c, -0x2e,0x73,0x74,0x64,0x2e,0x34,0x35,0x30,0x00,0x00,0x00,0x00, -0x0e,0x00,0x03,0x00,0x00,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x0f,0x00,0x0f,0x00,0x05,0x00,0x00,0x00,0x04,0x00,0x00,0x00, -0x6d,0x61,0x69,0x6e,0x00,0x00,0x00,0x00,0x0b,0x00,0x00,0x00, -0x12,0x00,0x00,0x00,0x3d,0x00,0x00,0x00,0x4c,0x00,0x00,0x00, -0xea,0x00,0x00,0x00,0xf9,0x00,0x00,0x00,0x4b,0x01,0x00,0x00, -0x59,0x01,0x00,0x00,0x51,0x02,0x00,0x00,0x9a,0x02,0x00,0x00, -0x10,0x00,0x06,0x00,0x04,0x00,0x00,0x00,0x11,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x0b,0x00,0x00,0x00,0x0b,0x00,0x00,0x00, -0x1c,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0x10,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x08,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00, -0x03,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00,0x04,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x10,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0x10,0x00,0x00,0x00,0x05,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x14,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x18,0x00,0x00,0x00, -0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00,0x07,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x1c,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0x10,0x00,0x00,0x00,0x08,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x20,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00, -0x09,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x24,0x00,0x00,0x00, -0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00,0x0a,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x28,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0x10,0x00,0x00,0x00,0x0b,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x2c,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x30,0x00,0x00,0x00, -0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00,0x0d,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x34,0x00,0x00,0x00,0x47,0x00,0x03,0x00, -0x10,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x37,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x3d,0x00,0x00,0x00,0x0b,0x00,0x00,0x00, -0x1a,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x4c,0x00,0x00,0x00, -0x0b,0x00,0x00,0x00,0x1b,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x4f,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x09,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x53,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x04,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x60,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x62,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x07,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x6c,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x03,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x9d,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0xaf,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x05,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0xb2,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x08,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0xf6,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x08,0x00,0x00,0x00,0x48,0x00,0x04,0x00, -0xf7,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x18,0x00,0x00,0x00, -0x48,0x00,0x05,0x00,0xf7,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x03,0x00, -0xf7,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0xf9,0x00,0x00,0x00,0x22,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0xf9,0x00,0x00,0x00,0x21,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x2c,0x01,0x00,0x00, -0x01,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x2d,0x01,0x00,0x00,0x0b,0x00,0x00,0x00,0x19,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x56,0x01,0x00,0x00,0x06,0x00,0x00,0x00, -0x10,0x00,0x00,0x00,0x48,0x00,0x04,0x00,0x57,0x01,0x00,0x00, -0x00,0x00,0x00,0x00,0x18,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0x57,0x01,0x00,0x00,0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x47,0x00,0x03,0x00,0x57,0x01,0x00,0x00, -0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x59,0x01,0x00,0x00, -0x22,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x59,0x01,0x00,0x00,0x21,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x51,0x02,0x00,0x00,0x0b,0x00,0x00,0x00, -0x18,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x97,0x02,0x00,0x00, -0x06,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x48,0x00,0x04,0x00, -0x98,0x02,0x00,0x00,0x00,0x00,0x00,0x00,0x19,0x00,0x00,0x00, -0x48,0x00,0x05,0x00,0x98,0x02,0x00,0x00,0x00,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x03,0x00, -0x98,0x02,0x00,0x00,0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x9a,0x02,0x00,0x00,0x22,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x9a,0x02,0x00,0x00,0x21,0x00,0x00,0x00, -0x02,0x00,0x00,0x00,0x13,0x00,0x02,0x00,0x02,0x00,0x00,0x00, -0x21,0x00,0x03,0x00,0x03,0x00,0x00,0x00,0x02,0x00,0x00,0x00, -0x15,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x20,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x17,0x00,0x04,0x00,0x09,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x03,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0x0a,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x09,0x00,0x00,0x00, -0x3b,0x00,0x04,0x00,0x0a,0x00,0x00,0x00,0x0b,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0x0d,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x1e,0x00,0x10,0x00,0x10,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x11,0x00,0x00,0x00, -0x09,0x00,0x00,0x00,0x10,0x00,0x00,0x00,0x3b,0x00,0x04,0x00, -0x11,0x00,0x00,0x00,0x12,0x00,0x00,0x00,0x09,0x00,0x00,0x00, -0x15,0x00,0x04,0x00,0x13,0x00,0x00,0x00,0x20,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00, -0x14,0x00,0x00,0x00,0x08,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0x15,0x00,0x00,0x00,0x09,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00,0x21,0x00,0x00,0x00, -0x0a,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00, -0x27,0x00,0x00,0x00,0x09,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x13,0x00,0x00,0x00,0x2d,0x00,0x00,0x00,0x07,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00,0x34,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x32,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x37,0x00,0x00,0x00,0x40,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x3b,0x00,0x04,0x00,0x0a,0x00,0x00,0x00,0x3d,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x3e,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x3b,0x00,0x04,0x00, -0x0a,0x00,0x00,0x00,0x4c,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x32,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x4f,0x00,0x00,0x00, -0x20,0x00,0x00,0x00,0x32,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x53,0x00,0x00,0x00,0x20,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0x54,0x00,0x00,0x00,0x86,0x00,0x00,0x00, -0x37,0x00,0x00,0x00,0x53,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0x58,0x00,0x00,0x00,0x86,0x00,0x00,0x00, -0x37,0x00,0x00,0x00,0x53,0x00,0x00,0x00,0x32,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x60,0x00,0x00,0x00,0x02,0x00,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x61,0x00,0x00,0x00, -0x86,0x00,0x00,0x00,0x53,0x00,0x00,0x00,0x60,0x00,0x00,0x00, -0x32,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x62,0x00,0x00,0x00, -0x04,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, -0x63,0x00,0x00,0x00,0x86,0x00,0x00,0x00,0x61,0x00,0x00,0x00, -0x62,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, -0x67,0x00,0x00,0x00,0x86,0x00,0x00,0x00,0x61,0x00,0x00,0x00, -0x62,0x00,0x00,0x00,0x32,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x6c,0x00,0x00,0x00,0x10,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x6d,0x00,0x00,0x00,0x04,0x00,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x6e,0x00,0x00,0x00, -0x86,0x00,0x00,0x00,0x6c,0x00,0x00,0x00,0x6d,0x00,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x73,0x00,0x00,0x00, -0x86,0x00,0x00,0x00,0x6c,0x00,0x00,0x00,0x6d,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00,0x77,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00, -0x7c,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x13,0x00,0x00,0x00,0x87,0x00,0x00,0x00,0x0b,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00,0x8d,0x00,0x00,0x00, -0x03,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00, -0x98,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x32,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x9d,0x00,0x00,0x00,0x40,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00,0x9f,0x00,0x00,0x00, -0x04,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, -0xae,0x00,0x00,0x00,0x84,0x00,0x00,0x00,0x60,0x00,0x00,0x00, -0x62,0x00,0x00,0x00,0x32,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0xaf,0x00,0x00,0x00,0x20,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0xb0,0x00,0x00,0x00,0x84,0x00,0x00,0x00, -0x53,0x00,0x00,0x00,0xaf,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0xb1,0x00,0x00,0x00,0x84,0x00,0x00,0x00, -0x4f,0x00,0x00,0x00,0x62,0x00,0x00,0x00,0x32,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0xb2,0x00,0x00,0x00,0x02,0x00,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0xb3,0x00,0x00,0x00, -0x84,0x00,0x00,0x00,0xb1,0x00,0x00,0x00,0xb2,0x00,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0xb4,0x00,0x00,0x00, -0x84,0x00,0x00,0x00,0xb3,0x00,0x00,0x00,0x60,0x00,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0xb5,0x00,0x00,0x00, -0x86,0x00,0x00,0x00,0xb0,0x00,0x00,0x00,0xb4,0x00,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0xb6,0x00,0x00,0x00, -0x84,0x00,0x00,0x00,0xae,0x00,0x00,0x00,0xb5,0x00,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0xb7,0x00,0x00,0x00, -0x84,0x00,0x00,0x00,0xb6,0x00,0x00,0x00,0xb2,0x00,0x00,0x00, -0x14,0x00,0x02,0x00,0xb8,0x00,0x00,0x00,0x16,0x00,0x03,0x00, -0xba,0x00,0x00,0x00,0x20,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0xbb,0x00,0x00,0x00,0x84,0x00,0x00,0x00, -0x60,0x00,0x00,0x00,0x62,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0xbc,0x00,0x00,0x00,0x84,0x00,0x00,0x00, -0xbb,0x00,0x00,0x00,0xb5,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0xbd,0x00,0x00,0x00,0x84,0x00,0x00,0x00, -0xbc,0x00,0x00,0x00,0xb2,0x00,0x00,0x00,0x1c,0x00,0x04,0x00, -0xbe,0x00,0x00,0x00,0xba,0x00,0x00,0x00,0xbd,0x00,0x00,0x00, -0x20,0x00,0x04,0x00,0xbf,0x00,0x00,0x00,0x07,0x00,0x00,0x00, -0xbe,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0xba,0x00,0x00,0x00, -0xc2,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0xc3,0x00,0x00,0x00,0x07,0x00,0x00,0x00,0xba,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00,0xc6,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, -0xe6,0x00,0x00,0x00,0x80,0x00,0x00,0x00,0x6c,0x00,0x00,0x00, -0x39,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, -0xe7,0x00,0x00,0x00,0x84,0x00,0x00,0x00,0x37,0x00,0x00,0x00, -0xe6,0x00,0x00,0x00,0x1c,0x00,0x04,0x00,0xe8,0x00,0x00,0x00, -0xba,0x00,0x00,0x00,0xe7,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0xe9,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0xe8,0x00,0x00,0x00, -0x3b,0x00,0x04,0x00,0xe9,0x00,0x00,0x00,0xea,0x00,0x00,0x00, -0x04,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, -0xee,0x00,0x00,0x00,0x80,0x00,0x00,0x00,0x6c,0x00,0x00,0x00, -0x39,0x00,0x00,0x00,0x16,0x00,0x03,0x00,0xf4,0x00,0x00,0x00, -0x10,0x00,0x00,0x00,0x17,0x00,0x04,0x00,0xf5,0x00,0x00,0x00, -0xf4,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x1d,0x00,0x03,0x00, -0xf6,0x00,0x00,0x00,0xf5,0x00,0x00,0x00,0x1e,0x00,0x03,0x00, -0xf7,0x00,0x00,0x00,0xf6,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0xf8,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0xf7,0x00,0x00,0x00, -0x3b,0x00,0x04,0x00,0xf8,0x00,0x00,0x00,0xf9,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0xfb,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0xf4,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0xff,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0xba,0x00,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x04,0x01,0x00,0x00, -0x80,0x00,0x00,0x00,0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x12,0x01,0x00,0x00, -0x80,0x00,0x00,0x00,0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x20,0x01,0x00,0x00, -0x80,0x00,0x00,0x00,0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x25,0x01,0x00,0x00, -0x03,0x00,0x00,0x00,0x32,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x2c,0x01,0x00,0x00,0x01,0x00,0x00,0x00,0x33,0x00,0x06,0x00, -0x09,0x00,0x00,0x00,0x2d,0x01,0x00,0x00,0x2c,0x01,0x00,0x00, -0x39,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0x2e,0x01,0x00,0x00,0x51,0x00,0x00,0x00, -0x2d,0x01,0x00,0x00,0x00,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0x2f,0x01,0x00,0x00,0x84,0x00,0x00,0x00, -0x2e,0x01,0x00,0x00,0x6d,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0x30,0x01,0x00,0x00,0x86,0x00,0x00,0x00, -0x2f,0x01,0x00,0x00,0x6c,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0x47,0x01,0x00,0x00,0x80,0x00,0x00,0x00, -0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0x48,0x01,0x00,0x00,0x84,0x00,0x00,0x00, -0x9d,0x00,0x00,0x00,0x47,0x01,0x00,0x00,0x1c,0x00,0x04,0x00, -0x49,0x01,0x00,0x00,0xba,0x00,0x00,0x00,0x48,0x01,0x00,0x00, -0x20,0x00,0x04,0x00,0x4a,0x01,0x00,0x00,0x04,0x00,0x00,0x00, -0x49,0x01,0x00,0x00,0x3b,0x00,0x04,0x00,0x4a,0x01,0x00,0x00, -0x4b,0x01,0x00,0x00,0x04,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0x4f,0x01,0x00,0x00,0x80,0x00,0x00,0x00, -0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x17,0x00,0x04,0x00, -0x55,0x01,0x00,0x00,0xba,0x00,0x00,0x00,0x04,0x00,0x00,0x00, -0x1d,0x00,0x03,0x00,0x56,0x01,0x00,0x00,0x55,0x01,0x00,0x00, -0x1e,0x00,0x03,0x00,0x57,0x01,0x00,0x00,0x56,0x01,0x00,0x00, -0x20,0x00,0x04,0x00,0x58,0x01,0x00,0x00,0x0c,0x00,0x00,0x00, -0x57,0x01,0x00,0x00,0x3b,0x00,0x04,0x00,0x58,0x01,0x00,0x00, -0x59,0x01,0x00,0x00,0x0c,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0x5b,0x01,0x00,0x00,0x0c,0x00,0x00,0x00,0xba,0x00,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x62,0x01,0x00,0x00, -0x80,0x00,0x00,0x00,0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x6f,0x01,0x00,0x00, -0x80,0x00,0x00,0x00,0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x7c,0x01,0x00,0x00, -0x80,0x00,0x00,0x00,0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x88,0x01,0x00,0x00, -0x08,0x01,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, -0x89,0x01,0x00,0x00,0x86,0x00,0x00,0x00,0x6c,0x00,0x00,0x00, -0x6d,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, -0x8c,0x01,0x00,0x00,0x86,0x00,0x00,0x00,0x6c,0x00,0x00,0x00, -0x6d,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, -0xa7,0x01,0x00,0x00,0x84,0x00,0x00,0x00,0x60,0x00,0x00,0x00, -0x62,0x00,0x00,0x00,0x1c,0x00,0x04,0x00,0xa8,0x01,0x00,0x00, -0xba,0x00,0x00,0x00,0xa7,0x01,0x00,0x00,0x20,0x00,0x04,0x00, -0xa9,0x01,0x00,0x00,0x07,0x00,0x00,0x00,0xa8,0x01,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0xb9,0x01,0x00,0x00, -0x80,0x00,0x00,0x00,0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0xd4,0x01,0x00,0x00, -0x84,0x00,0x00,0x00,0xb5,0x00,0x00,0x00,0xb2,0x00,0x00,0x00, -0x1c,0x00,0x04,0x00,0xd5,0x01,0x00,0x00,0xba,0x00,0x00,0x00, -0xd4,0x01,0x00,0x00,0x20,0x00,0x04,0x00,0xd6,0x01,0x00,0x00, -0x07,0x00,0x00,0x00,0xd5,0x01,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0xdf,0x01,0x00,0x00,0x86,0x00,0x00,0x00, -0xaf,0x00,0x00,0x00,0xb5,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0xe7,0x01,0x00,0x00,0x80,0x00,0x00,0x00, -0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0x16,0x02,0x00,0x00,0x84,0x00,0x00,0x00, -0x60,0x00,0x00,0x00,0x62,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x13,0x00,0x00,0x00,0x49,0x02,0x00,0x00,0x0d,0x00,0x00,0x00, -0x3b,0x00,0x04,0x00,0x0a,0x00,0x00,0x00,0x51,0x02,0x00,0x00, -0x01,0x00,0x00,0x00,0x1d,0x00,0x03,0x00,0x97,0x02,0x00,0x00, -0xba,0x00,0x00,0x00,0x1e,0x00,0x03,0x00,0x98,0x02,0x00,0x00, -0x97,0x02,0x00,0x00,0x20,0x00,0x04,0x00,0x99,0x02,0x00,0x00, -0x0c,0x00,0x00,0x00,0x98,0x02,0x00,0x00,0x3b,0x00,0x04,0x00, -0x99,0x02,0x00,0x00,0x9a,0x02,0x00,0x00,0x0c,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00,0x9f,0x02,0x00,0x00, -0x05,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, -0xac,0x02,0x00,0x00,0x84,0x00,0x00,0x00,0x60,0x00,0x00,0x00, -0x62,0x00,0x00,0x00,0x36,0x00,0x05,0x00,0x02,0x00,0x00,0x00, -0x04,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x03,0x00,0x00,0x00, -0xf8,0x00,0x02,0x00,0x05,0x00,0x00,0x00,0x3b,0x00,0x04,0x00, -0xbf,0x00,0x00,0x00,0xc0,0x00,0x00,0x00,0x07,0x00,0x00,0x00, -0x3b,0x00,0x04,0x00,0xa9,0x01,0x00,0x00,0xaa,0x01,0x00,0x00, -0x07,0x00,0x00,0x00,0x3b,0x00,0x04,0x00,0xd6,0x01,0x00,0x00, -0xd7,0x01,0x00,0x00,0x07,0x00,0x00,0x00,0x41,0x00,0x05,0x00, -0x0d,0x00,0x00,0x00,0x0e,0x00,0x00,0x00,0x0b,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x0f,0x00,0x00,0x00,0x0e,0x00,0x00,0x00,0x41,0x00,0x05,0x00, -0x15,0x00,0x00,0x00,0x16,0x00,0x00,0x00,0x12,0x00,0x00,0x00, -0x14,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x17,0x00,0x00,0x00,0x16,0x00,0x00,0x00,0x86,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x18,0x00,0x00,0x00,0x0f,0x00,0x00,0x00, -0x17,0x00,0x00,0x00,0x89,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x1e,0x00,0x00,0x00,0x0f,0x00,0x00,0x00,0x17,0x00,0x00,0x00, -0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00,0x22,0x00,0x00,0x00, -0x12,0x00,0x00,0x00,0x21,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x22,0x00,0x00,0x00, -0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x24,0x00,0x00,0x00, -0x18,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x41,0x00,0x05,0x00, -0x15,0x00,0x00,0x00,0x28,0x00,0x00,0x00,0x12,0x00,0x00,0x00, -0x27,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x29,0x00,0x00,0x00,0x28,0x00,0x00,0x00,0x86,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x2a,0x00,0x00,0x00,0x1e,0x00,0x00,0x00, -0x29,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00, -0x2e,0x00,0x00,0x00,0x12,0x00,0x00,0x00,0x2d,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x2f,0x00,0x00,0x00, -0x2e,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x30,0x00,0x00,0x00,0x24,0x00,0x00,0x00,0x2f,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x32,0x00,0x00,0x00, -0x30,0x00,0x00,0x00,0x2a,0x00,0x00,0x00,0x41,0x00,0x05,0x00, -0x15,0x00,0x00,0x00,0x35,0x00,0x00,0x00,0x12,0x00,0x00,0x00, -0x34,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x36,0x00,0x00,0x00,0x35,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x38,0x00,0x00,0x00,0x36,0x00,0x00,0x00, -0x37,0x00,0x00,0x00,0x82,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x3a,0x00,0x00,0x00,0x38,0x00,0x00,0x00,0x39,0x00,0x00,0x00, -0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x3b,0x00,0x00,0x00, -0x3a,0x00,0x00,0x00,0x37,0x00,0x00,0x00,0x41,0x00,0x05,0x00, -0x0d,0x00,0x00,0x00,0x3f,0x00,0x00,0x00,0x3d,0x00,0x00,0x00, -0x3e,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x40,0x00,0x00,0x00,0x3f,0x00,0x00,0x00,0x89,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x42,0x00,0x00,0x00,0x40,0x00,0x00,0x00, -0x3b,0x00,0x00,0x00,0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x47,0x00,0x00,0x00,0x40,0x00,0x00,0x00,0x3b,0x00,0x00,0x00, -0x41,0x00,0x05,0x00,0x0d,0x00,0x00,0x00,0x49,0x00,0x00,0x00, -0x3d,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x4a,0x00,0x00,0x00,0x49,0x00,0x00,0x00, -0x41,0x00,0x05,0x00,0x0d,0x00,0x00,0x00,0x4d,0x00,0x00,0x00, -0x4c,0x00,0x00,0x00,0x3e,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x4e,0x00,0x00,0x00,0x4d,0x00,0x00,0x00, -0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x50,0x00,0x00,0x00, -0x4e,0x00,0x00,0x00,0x4f,0x00,0x00,0x00,0x89,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x55,0x00,0x00,0x00,0x50,0x00,0x00,0x00, -0x54,0x00,0x00,0x00,0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x59,0x00,0x00,0x00,0x50,0x00,0x00,0x00,0x58,0x00,0x00,0x00, -0x89,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x5d,0x00,0x00,0x00, -0x4e,0x00,0x00,0x00,0x4f,0x00,0x00,0x00,0x89,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x64,0x00,0x00,0x00,0x5d,0x00,0x00,0x00, -0x63,0x00,0x00,0x00,0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x68,0x00,0x00,0x00,0x5d,0x00,0x00,0x00,0x67,0x00,0x00,0x00, -0x89,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x6f,0x00,0x00,0x00, -0x4e,0x00,0x00,0x00,0x6e,0x00,0x00,0x00,0x86,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x74,0x00,0x00,0x00,0x4e,0x00,0x00,0x00, -0x73,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00, -0x78,0x00,0x00,0x00,0x12,0x00,0x00,0x00,0x77,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x79,0x00,0x00,0x00, -0x78,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x7a,0x00,0x00,0x00,0x47,0x00,0x00,0x00,0x79,0x00,0x00,0x00, -0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00,0x7d,0x00,0x00,0x00, -0x12,0x00,0x00,0x00,0x7c,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x7e,0x00,0x00,0x00,0x7d,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x80,0x00,0x00,0x00, -0x47,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x83,0x00,0x00,0x00,0x80,0x00,0x00,0x00, -0x79,0x00,0x00,0x00,0x0c,0x00,0x07,0x00,0x06,0x00,0x00,0x00, -0x84,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x26,0x00,0x00,0x00, -0x7e,0x00,0x00,0x00,0x83,0x00,0x00,0x00,0x41,0x00,0x05,0x00, -0x15,0x00,0x00,0x00,0x88,0x00,0x00,0x00,0x12,0x00,0x00,0x00, -0x87,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x89,0x00,0x00,0x00,0x88,0x00,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x8a,0x00,0x00,0x00,0x32,0x00,0x00,0x00, -0x89,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x8c,0x00,0x00,0x00,0x42,0x00,0x00,0x00,0x37,0x00,0x00,0x00, -0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00,0x8e,0x00,0x00,0x00, -0x12,0x00,0x00,0x00,0x8d,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x8f,0x00,0x00,0x00,0x8e,0x00,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x90,0x00,0x00,0x00, -0x8c,0x00,0x00,0x00,0x8f,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x91,0x00,0x00,0x00,0x8a,0x00,0x00,0x00, -0x90,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x93,0x00,0x00,0x00,0x91,0x00,0x00,0x00,0x7a,0x00,0x00,0x00, -0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x94,0x00,0x00,0x00, -0x93,0x00,0x00,0x00,0x6d,0x00,0x00,0x00,0x41,0x00,0x05,0x00, -0x15,0x00,0x00,0x00,0x99,0x00,0x00,0x00,0x12,0x00,0x00,0x00, -0x98,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x9a,0x00,0x00,0x00,0x99,0x00,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x9b,0x00,0x00,0x00,0x0f,0x00,0x00,0x00, -0x9a,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x9e,0x00,0x00,0x00,0x4a,0x00,0x00,0x00,0x9d,0x00,0x00,0x00, -0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00,0xa0,0x00,0x00,0x00, -0x12,0x00,0x00,0x00,0x9f,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0xa1,0x00,0x00,0x00,0xa0,0x00,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xa2,0x00,0x00,0x00, -0x9e,0x00,0x00,0x00,0xa1,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xa3,0x00,0x00,0x00,0x9b,0x00,0x00,0x00, -0xa2,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xa5,0x00,0x00,0x00,0xa3,0x00,0x00,0x00,0x7a,0x00,0x00,0x00, -0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xa6,0x00,0x00,0x00, -0xa5,0x00,0x00,0x00,0x6d,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, -0xa8,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0xa8,0x00,0x00,0x00, -0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0xbe,0x02,0x00,0x00, -0x3e,0x00,0x00,0x00,0x05,0x00,0x00,0x00,0xc7,0x00,0x00,0x00, -0xa9,0x00,0x00,0x00,0xb0,0x00,0x05,0x00,0xb8,0x00,0x00,0x00, -0xb9,0x00,0x00,0x00,0xbe,0x02,0x00,0x00,0xb7,0x00,0x00,0x00, -0xf6,0x00,0x04,0x00,0xaa,0x00,0x00,0x00,0xa9,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0xb9,0x00,0x00,0x00, -0xa9,0x00,0x00,0x00,0xaa,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, -0xa9,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0xc3,0x00,0x00,0x00, -0xc4,0x00,0x00,0x00,0xc0,0x00,0x00,0x00,0xbe,0x02,0x00,0x00, -0x3e,0x00,0x03,0x00,0xc4,0x00,0x00,0x00,0xc2,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xc7,0x00,0x00,0x00, -0xbe,0x02,0x00,0x00,0xc6,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, -0xa8,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0xaa,0x00,0x00,0x00, -0xf9,0x00,0x02,0x00,0xca,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, -0xca,0x00,0x00,0x00,0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00, -0xd7,0x02,0x00,0x00,0xa6,0x00,0x00,0x00,0xaa,0x00,0x00,0x00, -0x8e,0x01,0x00,0x00,0xcd,0x00,0x00,0x00,0xf5,0x00,0x07,0x00, -0x06,0x00,0x00,0x00,0xd3,0x02,0x00,0x00,0x94,0x00,0x00,0x00, -0xaa,0x00,0x00,0x00,0x8b,0x01,0x00,0x00,0xcd,0x00,0x00,0x00, -0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0xbf,0x02,0x00,0x00, -0x7a,0x00,0x00,0x00,0xaa,0x00,0x00,0x00,0x39,0x02,0x00,0x00, -0xcd,0x00,0x00,0x00,0xb0,0x00,0x05,0x00,0xb8,0x00,0x00,0x00, -0xd1,0x00,0x00,0x00,0xbf,0x02,0x00,0x00,0x84,0x00,0x00,0x00, -0xf6,0x00,0x04,0x00,0xcc,0x00,0x00,0x00,0xcd,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0xd1,0x00,0x00,0x00, -0xcb,0x00,0x00,0x00,0xcc,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, -0xcb,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0xd3,0x00,0x00,0x00, -0xf8,0x00,0x02,0x00,0xd3,0x00,0x00,0x00,0xf5,0x00,0x07,0x00, -0x06,0x00,0x00,0x00,0xcf,0x02,0x00,0x00,0x3e,0x00,0x00,0x00, -0xcb,0x00,0x00,0x00,0x32,0x01,0x00,0x00,0xd4,0x00,0x00,0x00, -0xb0,0x00,0x05,0x00,0xb8,0x00,0x00,0x00,0xd9,0x00,0x00,0x00, -0xcf,0x02,0x00,0x00,0x37,0x00,0x00,0x00,0xf6,0x00,0x04,0x00, -0xd5,0x00,0x00,0x00,0xd4,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0xfa,0x00,0x04,0x00,0xd9,0x00,0x00,0x00,0xd4,0x00,0x00,0x00, -0xd5,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0xd4,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xde,0x00,0x00,0x00, -0x74,0x00,0x00,0x00,0xcf,0x02,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xe1,0x00,0x00,0x00,0xde,0x00,0x00,0x00, -0x8f,0x00,0x00,0x00,0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xe2,0x00,0x00,0x00,0xe1,0x00,0x00,0x00,0x6d,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xe3,0x00,0x00,0x00, -0xd3,0x02,0x00,0x00,0xe2,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xe5,0x00,0x00,0x00,0xe3,0x00,0x00,0x00, -0x6f,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xef,0x00,0x00,0x00,0xde,0x00,0x00,0x00,0xee,0x00,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xf1,0x00,0x00,0x00, -0x6f,0x00,0x00,0x00,0x6d,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xf2,0x00,0x00,0x00,0xef,0x00,0x00,0x00, -0xf1,0x00,0x00,0x00,0x41,0x00,0x07,0x00,0xfb,0x00,0x00,0x00, -0xfc,0x00,0x00,0x00,0xf9,0x00,0x00,0x00,0x34,0x00,0x00,0x00, -0xe5,0x00,0x00,0x00,0x3e,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0xf4,0x00,0x00,0x00,0xfd,0x00,0x00,0x00,0xfc,0x00,0x00,0x00, -0x73,0x00,0x04,0x00,0xba,0x00,0x00,0x00,0xfe,0x00,0x00,0x00, -0xfd,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0xff,0x00,0x00,0x00, -0x00,0x01,0x00,0x00,0xea,0x00,0x00,0x00,0xf2,0x00,0x00,0x00, -0x3e,0x00,0x03,0x00,0x00,0x01,0x00,0x00,0xfe,0x00,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x05,0x01,0x00,0x00, -0xde,0x00,0x00,0x00,0x04,0x01,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x08,0x01,0x00,0x00,0x05,0x01,0x00,0x00, -0xf1,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x09,0x01,0x00,0x00,0x08,0x01,0x00,0x00,0x39,0x00,0x00,0x00, -0x41,0x00,0x07,0x00,0xfb,0x00,0x00,0x00,0x0b,0x01,0x00,0x00, -0xf9,0x00,0x00,0x00,0x34,0x00,0x00,0x00,0xe5,0x00,0x00,0x00, -0x39,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0xf4,0x00,0x00,0x00, -0x0c,0x01,0x00,0x00,0x0b,0x01,0x00,0x00,0x73,0x00,0x04,0x00, -0xba,0x00,0x00,0x00,0x0d,0x01,0x00,0x00,0x0c,0x01,0x00,0x00, -0x41,0x00,0x05,0x00,0xff,0x00,0x00,0x00,0x0e,0x01,0x00,0x00, -0xea,0x00,0x00,0x00,0x09,0x01,0x00,0x00,0x3e,0x00,0x03,0x00, -0x0e,0x01,0x00,0x00,0x0d,0x01,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x13,0x01,0x00,0x00,0xde,0x00,0x00,0x00, -0x12,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x16,0x01,0x00,0x00,0x13,0x01,0x00,0x00,0xf1,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x17,0x01,0x00,0x00, -0x16,0x01,0x00,0x00,0x0c,0x00,0x00,0x00,0x41,0x00,0x07,0x00, -0xfb,0x00,0x00,0x00,0x19,0x01,0x00,0x00,0xf9,0x00,0x00,0x00, -0x34,0x00,0x00,0x00,0xe5,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0xf4,0x00,0x00,0x00,0x1a,0x01,0x00,0x00, -0x19,0x01,0x00,0x00,0x73,0x00,0x04,0x00,0xba,0x00,0x00,0x00, -0x1b,0x01,0x00,0x00,0x1a,0x01,0x00,0x00,0x41,0x00,0x05,0x00, -0xff,0x00,0x00,0x00,0x1c,0x01,0x00,0x00,0xea,0x00,0x00,0x00, -0x17,0x01,0x00,0x00,0x3e,0x00,0x03,0x00,0x1c,0x01,0x00,0x00, -0x1b,0x01,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x21,0x01,0x00,0x00,0xde,0x00,0x00,0x00,0x20,0x01,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x24,0x01,0x00,0x00, -0x21,0x01,0x00,0x00,0xf1,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x26,0x01,0x00,0x00,0x24,0x01,0x00,0x00, -0x25,0x01,0x00,0x00,0x41,0x00,0x07,0x00,0xfb,0x00,0x00,0x00, -0x28,0x01,0x00,0x00,0xf9,0x00,0x00,0x00,0x34,0x00,0x00,0x00, -0xe5,0x00,0x00,0x00,0x25,0x01,0x00,0x00,0x3d,0x00,0x04,0x00, -0xf4,0x00,0x00,0x00,0x29,0x01,0x00,0x00,0x28,0x01,0x00,0x00, -0x73,0x00,0x04,0x00,0xba,0x00,0x00,0x00,0x2a,0x01,0x00,0x00, -0x29,0x01,0x00,0x00,0x41,0x00,0x05,0x00,0xff,0x00,0x00,0x00, -0x2b,0x01,0x00,0x00,0xea,0x00,0x00,0x00,0x26,0x01,0x00,0x00, -0x3e,0x00,0x03,0x00,0x2b,0x01,0x00,0x00,0x2a,0x01,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x32,0x01,0x00,0x00, -0xcf,0x02,0x00,0x00,0x30,0x01,0x00,0x00,0xf9,0x00,0x02,0x00, -0xd3,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0xd5,0x00,0x00,0x00, -0xf9,0x00,0x02,0x00,0x34,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, -0x34,0x01,0x00,0x00,0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00, -0xd0,0x02,0x00,0x00,0x3e,0x00,0x00,0x00,0xd5,0x00,0x00,0x00, -0x87,0x01,0x00,0x00,0x35,0x01,0x00,0x00,0xb0,0x00,0x05,0x00, -0xb8,0x00,0x00,0x00,0x3a,0x01,0x00,0x00,0xd0,0x02,0x00,0x00, -0x9d,0x00,0x00,0x00,0xf6,0x00,0x04,0x00,0x36,0x01,0x00,0x00, -0x35,0x01,0x00,0x00,0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, -0x3a,0x01,0x00,0x00,0x35,0x01,0x00,0x00,0x36,0x01,0x00,0x00, -0xf8,0x00,0x02,0x00,0x35,0x01,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x3f,0x01,0x00,0x00,0x74,0x00,0x00,0x00, -0xd0,0x02,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x42,0x01,0x00,0x00,0x3f,0x01,0x00,0x00,0xa1,0x00,0x00,0x00, -0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x43,0x01,0x00,0x00, -0x42,0x01,0x00,0x00,0x6d,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x44,0x01,0x00,0x00,0xd7,0x02,0x00,0x00, -0x43,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x46,0x01,0x00,0x00,0x44,0x01,0x00,0x00,0x6f,0x00,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x50,0x01,0x00,0x00, -0x3f,0x01,0x00,0x00,0x4f,0x01,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x52,0x01,0x00,0x00,0x6f,0x00,0x00,0x00, -0x6d,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x53,0x01,0x00,0x00,0x50,0x01,0x00,0x00,0x52,0x01,0x00,0x00, -0x41,0x00,0x07,0x00,0x5b,0x01,0x00,0x00,0x5c,0x01,0x00,0x00, -0x59,0x01,0x00,0x00,0x34,0x00,0x00,0x00,0x46,0x01,0x00,0x00, -0x3e,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0xba,0x00,0x00,0x00, -0x5d,0x01,0x00,0x00,0x5c,0x01,0x00,0x00,0x41,0x00,0x05,0x00, -0xff,0x00,0x00,0x00,0x5e,0x01,0x00,0x00,0x4b,0x01,0x00,0x00, -0x53,0x01,0x00,0x00,0x3e,0x00,0x03,0x00,0x5e,0x01,0x00,0x00, -0x5d,0x01,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x63,0x01,0x00,0x00,0x3f,0x01,0x00,0x00,0x62,0x01,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x66,0x01,0x00,0x00, -0x63,0x01,0x00,0x00,0x52,0x01,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x67,0x01,0x00,0x00,0x66,0x01,0x00,0x00, -0x39,0x00,0x00,0x00,0x41,0x00,0x07,0x00,0x5b,0x01,0x00,0x00, -0x69,0x01,0x00,0x00,0x59,0x01,0x00,0x00,0x34,0x00,0x00,0x00, -0x46,0x01,0x00,0x00,0x39,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0xba,0x00,0x00,0x00,0x6a,0x01,0x00,0x00,0x69,0x01,0x00,0x00, -0x41,0x00,0x05,0x00,0xff,0x00,0x00,0x00,0x6b,0x01,0x00,0x00, -0x4b,0x01,0x00,0x00,0x67,0x01,0x00,0x00,0x3e,0x00,0x03,0x00, -0x6b,0x01,0x00,0x00,0x6a,0x01,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x70,0x01,0x00,0x00,0x3f,0x01,0x00,0x00, -0x6f,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x73,0x01,0x00,0x00,0x70,0x01,0x00,0x00,0x52,0x01,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x74,0x01,0x00,0x00, -0x73,0x01,0x00,0x00,0x0c,0x00,0x00,0x00,0x41,0x00,0x07,0x00, -0x5b,0x01,0x00,0x00,0x76,0x01,0x00,0x00,0x59,0x01,0x00,0x00, -0x34,0x00,0x00,0x00,0x46,0x01,0x00,0x00,0x0c,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0xba,0x00,0x00,0x00,0x77,0x01,0x00,0x00, -0x76,0x01,0x00,0x00,0x41,0x00,0x05,0x00,0xff,0x00,0x00,0x00, -0x78,0x01,0x00,0x00,0x4b,0x01,0x00,0x00,0x74,0x01,0x00,0x00, -0x3e,0x00,0x03,0x00,0x78,0x01,0x00,0x00,0x77,0x01,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x7d,0x01,0x00,0x00, -0x3f,0x01,0x00,0x00,0x7c,0x01,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x80,0x01,0x00,0x00,0x7d,0x01,0x00,0x00, -0x52,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x81,0x01,0x00,0x00,0x80,0x01,0x00,0x00,0x25,0x01,0x00,0x00, -0x41,0x00,0x07,0x00,0x5b,0x01,0x00,0x00,0x83,0x01,0x00,0x00, -0x59,0x01,0x00,0x00,0x34,0x00,0x00,0x00,0x46,0x01,0x00,0x00, -0x25,0x01,0x00,0x00,0x3d,0x00,0x04,0x00,0xba,0x00,0x00,0x00, -0x84,0x01,0x00,0x00,0x83,0x01,0x00,0x00,0x41,0x00,0x05,0x00, -0xff,0x00,0x00,0x00,0x85,0x01,0x00,0x00,0x4b,0x01,0x00,0x00, -0x81,0x01,0x00,0x00,0x3e,0x00,0x03,0x00,0x85,0x01,0x00,0x00, -0x84,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x87,0x01,0x00,0x00,0xd0,0x02,0x00,0x00,0x30,0x01,0x00,0x00, -0xf9,0x00,0x02,0x00,0x34,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, -0x36,0x01,0x00,0x00,0xe0,0x00,0x04,0x00,0x0c,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x88,0x01,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x8b,0x01,0x00,0x00,0xd3,0x02,0x00,0x00, -0x89,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x8e,0x01,0x00,0x00,0xd7,0x02,0x00,0x00,0x8c,0x01,0x00,0x00, -0xf9,0x00,0x02,0x00,0x90,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, -0x90,0x01,0x00,0x00,0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00, -0xd9,0x02,0x00,0x00,0x3e,0x00,0x00,0x00,0x36,0x01,0x00,0x00, -0x37,0x02,0x00,0x00,0x93,0x01,0x00,0x00,0xb0,0x00,0x05,0x00, -0xb8,0x00,0x00,0x00,0x96,0x01,0x00,0x00,0xd9,0x02,0x00,0x00, -0x6c,0x00,0x00,0x00,0xf6,0x00,0x04,0x00,0x92,0x01,0x00,0x00, -0x93,0x01,0x00,0x00,0x00,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, -0x96,0x01,0x00,0x00,0x91,0x01,0x00,0x00,0x92,0x01,0x00,0x00, -0xf8,0x00,0x02,0x00,0x91,0x01,0x00,0x00,0xf9,0x00,0x02,0x00, -0x98,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0x98,0x01,0x00,0x00, -0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0xdd,0x02,0x00,0x00, -0x3e,0x00,0x00,0x00,0x91,0x01,0x00,0x00,0xc3,0x01,0x00,0x00, -0x9b,0x01,0x00,0x00,0xb0,0x00,0x05,0x00,0xb8,0x00,0x00,0x00, -0x9e,0x01,0x00,0x00,0xdd,0x02,0x00,0x00,0x60,0x00,0x00,0x00, -0xf6,0x00,0x04,0x00,0x9a,0x01,0x00,0x00,0x9b,0x01,0x00,0x00, -0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0x9e,0x01,0x00,0x00, -0x99,0x01,0x00,0x00,0x9a,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, -0x99,0x01,0x00,0x00,0xf9,0x00,0x02,0x00,0xa0,0x01,0x00,0x00, -0xf8,0x00,0x02,0x00,0xa0,0x01,0x00,0x00,0xf5,0x00,0x07,0x00, -0x06,0x00,0x00,0x00,0xef,0x02,0x00,0x00,0x3e,0x00,0x00,0x00, -0x99,0x01,0x00,0x00,0xc1,0x01,0x00,0x00,0xa1,0x01,0x00,0x00, -0xb0,0x00,0x05,0x00,0xb8,0x00,0x00,0x00,0xa6,0x01,0x00,0x00, -0xef,0x02,0x00,0x00,0x62,0x00,0x00,0x00,0xf6,0x00,0x04,0x00, -0xa2,0x01,0x00,0x00,0xa1,0x01,0x00,0x00,0x01,0x00,0x00,0x00, -0xfa,0x00,0x04,0x00,0xa6,0x01,0x00,0x00,0xa1,0x01,0x00,0x00, -0xa2,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0xa1,0x01,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xac,0x01,0x00,0x00, -0xdd,0x02,0x00,0x00,0x62,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xae,0x01,0x00,0x00,0xac,0x01,0x00,0x00, -0xef,0x02,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xb0,0x01,0x00,0x00,0x55,0x00,0x00,0x00,0x53,0x00,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xb2,0x01,0x00,0x00, -0xdd,0x02,0x00,0x00,0x61,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xb3,0x01,0x00,0x00,0xb0,0x01,0x00,0x00, -0xb2,0x01,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xb5,0x01,0x00,0x00,0x64,0x00,0x00,0x00,0x62,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xb6,0x01,0x00,0x00, -0xb3,0x01,0x00,0x00,0xb5,0x01,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xb8,0x01,0x00,0x00,0xb6,0x01,0x00,0x00, -0xef,0x02,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xba,0x01,0x00,0x00,0xb8,0x01,0x00,0x00,0xb9,0x01,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xbc,0x01,0x00,0x00, -0xba,0x01,0x00,0x00,0xd9,0x02,0x00,0x00,0x41,0x00,0x05,0x00, -0xff,0x00,0x00,0x00,0xbd,0x01,0x00,0x00,0xea,0x00,0x00,0x00, -0xbc,0x01,0x00,0x00,0x3d,0x00,0x04,0x00,0xba,0x00,0x00,0x00, -0xbe,0x01,0x00,0x00,0xbd,0x01,0x00,0x00,0x41,0x00,0x05,0x00, -0xc3,0x00,0x00,0x00,0xbf,0x01,0x00,0x00,0xaa,0x01,0x00,0x00, -0xae,0x01,0x00,0x00,0x3e,0x00,0x03,0x00,0xbf,0x01,0x00,0x00, -0xbe,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xc1,0x01,0x00,0x00,0xef,0x02,0x00,0x00,0xc6,0x00,0x00,0x00, -0xf9,0x00,0x02,0x00,0xa0,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, -0xa2,0x01,0x00,0x00,0xf9,0x00,0x02,0x00,0x9b,0x01,0x00,0x00, -0xf8,0x00,0x02,0x00,0x9b,0x01,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xc3,0x01,0x00,0x00,0xdd,0x02,0x00,0x00, -0xc6,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0x98,0x01,0x00,0x00, -0xf8,0x00,0x02,0x00,0x9a,0x01,0x00,0x00,0xf9,0x00,0x02,0x00, -0xc5,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0xc5,0x01,0x00,0x00, -0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0xde,0x02,0x00,0x00, -0x3e,0x00,0x00,0x00,0x9a,0x01,0x00,0x00,0xf1,0x01,0x00,0x00, -0xc8,0x01,0x00,0x00,0xb0,0x00,0x05,0x00,0xb8,0x00,0x00,0x00, -0xcb,0x01,0x00,0x00,0xde,0x02,0x00,0x00,0xb5,0x00,0x00,0x00, -0xf6,0x00,0x04,0x00,0xc7,0x01,0x00,0x00,0xc8,0x01,0x00,0x00, -0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0xcb,0x01,0x00,0x00, -0xc6,0x01,0x00,0x00,0xc7,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, -0xc6,0x01,0x00,0x00,0xf9,0x00,0x02,0x00,0xcd,0x01,0x00,0x00, -0xf8,0x00,0x02,0x00,0xcd,0x01,0x00,0x00,0xf5,0x00,0x07,0x00, -0x06,0x00,0x00,0x00,0xec,0x02,0x00,0x00,0x3e,0x00,0x00,0x00, -0xc6,0x01,0x00,0x00,0xef,0x01,0x00,0x00,0xce,0x01,0x00,0x00, -0xb0,0x00,0x05,0x00,0xb8,0x00,0x00,0x00,0xd3,0x01,0x00,0x00, -0xec,0x02,0x00,0x00,0xb2,0x00,0x00,0x00,0xf6,0x00,0x04,0x00, -0xcf,0x01,0x00,0x00,0xce,0x01,0x00,0x00,0x01,0x00,0x00,0x00, -0xfa,0x00,0x04,0x00,0xd3,0x01,0x00,0x00,0xce,0x01,0x00,0x00, -0xcf,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0xce,0x01,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xd9,0x01,0x00,0x00, -0xde,0x02,0x00,0x00,0xb2,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xdb,0x01,0x00,0x00,0xd9,0x01,0x00,0x00, -0xec,0x02,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xdd,0x01,0x00,0x00,0x59,0x00,0x00,0x00,0xaf,0x00,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xe0,0x01,0x00,0x00, -0xde,0x02,0x00,0x00,0xdf,0x01,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xe1,0x01,0x00,0x00,0xdd,0x01,0x00,0x00, -0xe0,0x01,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xe3,0x01,0x00,0x00,0x68,0x00,0x00,0x00,0xb2,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xe4,0x01,0x00,0x00, -0xe1,0x01,0x00,0x00,0xe3,0x01,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xe6,0x01,0x00,0x00,0xe4,0x01,0x00,0x00, -0xec,0x02,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xe8,0x01,0x00,0x00,0xe6,0x01,0x00,0x00,0xe7,0x01,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xea,0x01,0x00,0x00, -0xe8,0x01,0x00,0x00,0xd9,0x02,0x00,0x00,0x41,0x00,0x05,0x00, -0xff,0x00,0x00,0x00,0xeb,0x01,0x00,0x00,0x4b,0x01,0x00,0x00, -0xea,0x01,0x00,0x00,0x3d,0x00,0x04,0x00,0xba,0x00,0x00,0x00, -0xec,0x01,0x00,0x00,0xeb,0x01,0x00,0x00,0x41,0x00,0x05,0x00, -0xc3,0x00,0x00,0x00,0xed,0x01,0x00,0x00,0xd7,0x01,0x00,0x00, -0xdb,0x01,0x00,0x00,0x3e,0x00,0x03,0x00,0xed,0x01,0x00,0x00, -0xec,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xef,0x01,0x00,0x00,0xec,0x02,0x00,0x00,0xc6,0x00,0x00,0x00, -0xf9,0x00,0x02,0x00,0xcd,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, -0xcf,0x01,0x00,0x00,0xf9,0x00,0x02,0x00,0xc8,0x01,0x00,0x00, -0xf8,0x00,0x02,0x00,0xc8,0x01,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xf1,0x01,0x00,0x00,0xde,0x02,0x00,0x00, -0xc6,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0xc5,0x01,0x00,0x00, -0xf8,0x00,0x02,0x00,0xc7,0x01,0x00,0x00,0xf9,0x00,0x02,0x00, -0xf3,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0xf3,0x01,0x00,0x00, -0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0xdf,0x02,0x00,0x00, -0x3e,0x00,0x00,0x00,0xc7,0x01,0x00,0x00,0x35,0x02,0x00,0x00, -0xf6,0x01,0x00,0x00,0xb0,0x00,0x05,0x00,0xb8,0x00,0x00,0x00, -0xf9,0x01,0x00,0x00,0xdf,0x02,0x00,0x00,0xb5,0x00,0x00,0x00, -0xf6,0x00,0x04,0x00,0xf5,0x01,0x00,0x00,0xf6,0x01,0x00,0x00, -0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0xf9,0x01,0x00,0x00, -0xf4,0x01,0x00,0x00,0xf5,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, -0xf4,0x01,0x00,0x00,0xf9,0x00,0x02,0x00,0xfb,0x01,0x00,0x00, -0xf8,0x00,0x02,0x00,0xfb,0x01,0x00,0x00,0xf5,0x00,0x07,0x00, -0x06,0x00,0x00,0x00,0xe3,0x02,0x00,0x00,0x3e,0x00,0x00,0x00, -0xf4,0x01,0x00,0x00,0x33,0x02,0x00,0x00,0xfe,0x01,0x00,0x00, -0xb0,0x00,0x05,0x00,0xb8,0x00,0x00,0x00,0x01,0x02,0x00,0x00, -0xe3,0x02,0x00,0x00,0x60,0x00,0x00,0x00,0xf6,0x00,0x04,0x00, -0xfd,0x01,0x00,0x00,0xfe,0x01,0x00,0x00,0x01,0x00,0x00,0x00, -0xfa,0x00,0x04,0x00,0x01,0x02,0x00,0x00,0xfc,0x01,0x00,0x00, -0xfd,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0xfc,0x01,0x00,0x00, -0xf9,0x00,0x02,0x00,0x03,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, -0x03,0x02,0x00,0x00,0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00, -0xe5,0x02,0x00,0x00,0x3e,0x00,0x00,0x00,0xfc,0x01,0x00,0x00, -0x31,0x02,0x00,0x00,0x06,0x02,0x00,0x00,0xb0,0x00,0x05,0x00, -0xb8,0x00,0x00,0x00,0x09,0x02,0x00,0x00,0xe5,0x02,0x00,0x00, -0xb2,0x00,0x00,0x00,0xf6,0x00,0x04,0x00,0x05,0x02,0x00,0x00, -0x06,0x02,0x00,0x00,0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, -0x09,0x02,0x00,0x00,0x04,0x02,0x00,0x00,0x05,0x02,0x00,0x00, -0xf8,0x00,0x02,0x00,0x04,0x02,0x00,0x00,0xf9,0x00,0x02,0x00, -0x0b,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x0b,0x02,0x00,0x00, -0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0xe7,0x02,0x00,0x00, -0x3e,0x00,0x00,0x00,0x04,0x02,0x00,0x00,0x2f,0x02,0x00,0x00, -0x0c,0x02,0x00,0x00,0xb0,0x00,0x05,0x00,0xb8,0x00,0x00,0x00, -0x11,0x02,0x00,0x00,0xe7,0x02,0x00,0x00,0x62,0x00,0x00,0x00, -0xf6,0x00,0x04,0x00,0x0d,0x02,0x00,0x00,0x0c,0x02,0x00,0x00, -0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0x11,0x02,0x00,0x00, -0x0c,0x02,0x00,0x00,0x0d,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, -0x0c,0x02,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x13,0x02,0x00,0x00,0xdf,0x02,0x00,0x00,0xb2,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x15,0x02,0x00,0x00, -0x13,0x02,0x00,0x00,0xe5,0x02,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x17,0x02,0x00,0x00,0x15,0x02,0x00,0x00, -0x16,0x02,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x19,0x02,0x00,0x00,0xe3,0x02,0x00,0x00,0x62,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x1a,0x02,0x00,0x00, -0x17,0x02,0x00,0x00,0x19,0x02,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x1c,0x02,0x00,0x00,0x1a,0x02,0x00,0x00, -0xe7,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x20,0x02,0x00,0x00,0x19,0x02,0x00,0x00,0xe7,0x02,0x00,0x00, -0x41,0x00,0x05,0x00,0xc3,0x00,0x00,0x00,0x21,0x02,0x00,0x00, -0xaa,0x01,0x00,0x00,0x20,0x02,0x00,0x00,0x3d,0x00,0x04,0x00, -0xba,0x00,0x00,0x00,0x22,0x02,0x00,0x00,0x21,0x02,0x00,0x00, -0x41,0x00,0x05,0x00,0xc3,0x00,0x00,0x00,0x27,0x02,0x00,0x00, -0xd7,0x01,0x00,0x00,0x15,0x02,0x00,0x00,0x3d,0x00,0x04,0x00, -0xba,0x00,0x00,0x00,0x28,0x02,0x00,0x00,0x27,0x02,0x00,0x00, -0x41,0x00,0x05,0x00,0xc3,0x00,0x00,0x00,0x2a,0x02,0x00,0x00, -0xc0,0x00,0x00,0x00,0x1c,0x02,0x00,0x00,0x3d,0x00,0x04,0x00, -0xba,0x00,0x00,0x00,0x2b,0x02,0x00,0x00,0x2a,0x02,0x00,0x00, -0x0c,0x00,0x08,0x00,0xba,0x00,0x00,0x00,0x2c,0x02,0x00,0x00, -0x01,0x00,0x00,0x00,0x32,0x00,0x00,0x00,0x22,0x02,0x00,0x00, -0x28,0x02,0x00,0x00,0x2b,0x02,0x00,0x00,0x3e,0x00,0x03,0x00, -0x2a,0x02,0x00,0x00,0x2c,0x02,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x2f,0x02,0x00,0x00,0xe7,0x02,0x00,0x00, -0xc6,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0x0b,0x02,0x00,0x00, -0xf8,0x00,0x02,0x00,0x0d,0x02,0x00,0x00,0xf9,0x00,0x02,0x00, -0x06,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x06,0x02,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x31,0x02,0x00,0x00, -0xe5,0x02,0x00,0x00,0xc6,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, -0x03,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x05,0x02,0x00,0x00, -0xf9,0x00,0x02,0x00,0xfe,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, -0xfe,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x33,0x02,0x00,0x00,0xe3,0x02,0x00,0x00,0xc6,0x00,0x00,0x00, -0xf9,0x00,0x02,0x00,0xfb,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, -0xfd,0x01,0x00,0x00,0xf9,0x00,0x02,0x00,0xf6,0x01,0x00,0x00, -0xf8,0x00,0x02,0x00,0xf6,0x01,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x35,0x02,0x00,0x00,0xdf,0x02,0x00,0x00, -0xc6,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0xf3,0x01,0x00,0x00, -0xf8,0x00,0x02,0x00,0xf5,0x01,0x00,0x00,0xf9,0x00,0x02,0x00, -0x93,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0x93,0x01,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x37,0x02,0x00,0x00, -0xd9,0x02,0x00,0x00,0xc6,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, -0x90,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0x92,0x01,0x00,0x00, -0xe0,0x00,0x04,0x00,0x0c,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0x88,0x01,0x00,0x00,0xf9,0x00,0x02,0x00,0xcd,0x00,0x00,0x00, -0xf8,0x00,0x02,0x00,0xcd,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x39,0x02,0x00,0x00,0xbf,0x02,0x00,0x00, -0x6c,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0xca,0x00,0x00,0x00, -0xf8,0x00,0x02,0x00,0xcc,0x00,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x3e,0x02,0x00,0x00,0x55,0x00,0x00,0x00, -0x53,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x3f,0x02,0x00,0x00,0x8c,0x00,0x00,0x00,0x3e,0x02,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x44,0x02,0x00,0x00, -0x59,0x00,0x00,0x00,0xaf,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x45,0x02,0x00,0x00,0x9e,0x00,0x00,0x00, -0x44,0x02,0x00,0x00,0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00, -0x4a,0x02,0x00,0x00,0x12,0x00,0x00,0x00,0x49,0x02,0x00,0x00, -0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x4b,0x02,0x00,0x00, -0x4a,0x02,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x4c,0x02,0x00,0x00,0x0f,0x00,0x00,0x00,0x4b,0x02,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x50,0x02,0x00,0x00, -0x47,0x00,0x00,0x00,0x4b,0x02,0x00,0x00,0x41,0x00,0x05,0x00, -0x0d,0x00,0x00,0x00,0x52,0x02,0x00,0x00,0x51,0x02,0x00,0x00, -0x0c,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x53,0x02,0x00,0x00,0x52,0x02,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x54,0x02,0x00,0x00,0x50,0x02,0x00,0x00, -0x53,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x55,0x02,0x00,0x00,0x4c,0x02,0x00,0x00,0x54,0x02,0x00,0x00, -0xf9,0x00,0x02,0x00,0x57,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, -0x57,0x02,0x00,0x00,0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00, -0xc0,0x02,0x00,0x00,0x3e,0x00,0x00,0x00,0xcc,0x00,0x00,0x00, -0xbd,0x02,0x00,0x00,0x5a,0x02,0x00,0x00,0xb0,0x00,0x05,0x00, -0xb8,0x00,0x00,0x00,0x5d,0x02,0x00,0x00,0xc0,0x02,0x00,0x00, -0xb5,0x00,0x00,0x00,0xf6,0x00,0x04,0x00,0x59,0x02,0x00,0x00, -0x5a,0x02,0x00,0x00,0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, -0x5d,0x02,0x00,0x00,0x58,0x02,0x00,0x00,0x59,0x02,0x00,0x00, -0xf8,0x00,0x02,0x00,0x58,0x02,0x00,0x00,0xf9,0x00,0x02,0x00, -0x5f,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x5f,0x02,0x00,0x00, -0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0xc1,0x02,0x00,0x00, -0x3e,0x00,0x00,0x00,0x58,0x02,0x00,0x00,0xbb,0x02,0x00,0x00, -0x62,0x02,0x00,0x00,0xb0,0x00,0x05,0x00,0xb8,0x00,0x00,0x00, -0x65,0x02,0x00,0x00,0xc1,0x02,0x00,0x00,0x60,0x00,0x00,0x00, -0xf6,0x00,0x04,0x00,0x61,0x02,0x00,0x00,0x62,0x02,0x00,0x00, -0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0x65,0x02,0x00,0x00, -0x60,0x02,0x00,0x00,0x61,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, -0x60,0x02,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x69,0x02,0x00,0x00,0xc1,0x02,0x00,0x00,0x61,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x6a,0x02,0x00,0x00, -0x3f,0x02,0x00,0x00,0x69,0x02,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x6c,0x02,0x00,0x00,0x64,0x00,0x00,0x00, -0x62,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x6d,0x02,0x00,0x00,0x6a,0x02,0x00,0x00,0x6c,0x02,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x71,0x02,0x00,0x00, -0xc0,0x02,0x00,0x00,0xdf,0x01,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x72,0x02,0x00,0x00,0x45,0x02,0x00,0x00, -0x71,0x02,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x74,0x02,0x00,0x00,0x68,0x00,0x00,0x00,0xb2,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x75,0x02,0x00,0x00, -0x72,0x02,0x00,0x00,0x74,0x02,0x00,0x00,0xf9,0x00,0x02,0x00, -0x77,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x77,0x02,0x00,0x00, -0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0xc3,0x02,0x00,0x00, -0x3e,0x00,0x00,0x00,0x60,0x02,0x00,0x00,0xb9,0x02,0x00,0x00, -0x7a,0x02,0x00,0x00,0xb0,0x00,0x05,0x00,0xb8,0x00,0x00,0x00, -0x7d,0x02,0x00,0x00,0xc3,0x02,0x00,0x00,0xb2,0x00,0x00,0x00, -0xf6,0x00,0x04,0x00,0x79,0x02,0x00,0x00,0x7a,0x02,0x00,0x00, -0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0x7d,0x02,0x00,0x00, -0x78,0x02,0x00,0x00,0x79,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, -0x78,0x02,0x00,0x00,0xf9,0x00,0x02,0x00,0x7f,0x02,0x00,0x00, -0xf8,0x00,0x02,0x00,0x7f,0x02,0x00,0x00,0xf5,0x00,0x07,0x00, -0x06,0x00,0x00,0x00,0xc5,0x02,0x00,0x00,0x3e,0x00,0x00,0x00, -0x78,0x02,0x00,0x00,0xb7,0x02,0x00,0x00,0x82,0x02,0x00,0x00, -0xb0,0x00,0x05,0x00,0xb8,0x00,0x00,0x00,0x85,0x02,0x00,0x00, -0xc5,0x02,0x00,0x00,0x62,0x00,0x00,0x00,0xf6,0x00,0x04,0x00, -0x81,0x02,0x00,0x00,0x82,0x02,0x00,0x00,0x01,0x00,0x00,0x00, -0xfa,0x00,0x04,0x00,0x85,0x02,0x00,0x00,0x80,0x02,0x00,0x00, -0x81,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x80,0x02,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x88,0x02,0x00,0x00, -0x6d,0x02,0x00,0x00,0xc5,0x02,0x00,0x00,0xb0,0x00,0x05,0x00, -0xb8,0x00,0x00,0x00,0x8b,0x02,0x00,0x00,0x88,0x02,0x00,0x00, -0x36,0x00,0x00,0x00,0xf7,0x00,0x03,0x00,0x8d,0x02,0x00,0x00, -0x00,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0x8b,0x02,0x00,0x00, -0x8c,0x02,0x00,0x00,0x8d,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, -0x8c,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x90,0x02,0x00,0x00,0x75,0x02,0x00,0x00,0xc3,0x02,0x00,0x00, -0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00,0x91,0x02,0x00,0x00, -0x12,0x00,0x00,0x00,0xc6,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x92,0x02,0x00,0x00,0x91,0x02,0x00,0x00, -0xb0,0x00,0x05,0x00,0xb8,0x00,0x00,0x00,0x93,0x02,0x00,0x00, -0x90,0x02,0x00,0x00,0x92,0x02,0x00,0x00,0xf9,0x00,0x02,0x00, -0x8d,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x8d,0x02,0x00,0x00, -0xf5,0x00,0x07,0x00,0xb8,0x00,0x00,0x00,0x94,0x02,0x00,0x00, -0x8b,0x02,0x00,0x00,0x80,0x02,0x00,0x00,0x93,0x02,0x00,0x00, -0x8c,0x02,0x00,0x00,0xf7,0x00,0x03,0x00,0x96,0x02,0x00,0x00, -0x00,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0x94,0x02,0x00,0x00, -0x95,0x02,0x00,0x00,0x96,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, -0x95,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x9e,0x02,0x00,0x00,0x75,0x02,0x00,0x00,0xc3,0x02,0x00,0x00, -0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00,0xa0,0x02,0x00,0x00, -0x12,0x00,0x00,0x00,0x9f,0x02,0x00,0x00,0x3d,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0xa1,0x02,0x00,0x00,0xa0,0x02,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xa2,0x02,0x00,0x00, -0x9e,0x02,0x00,0x00,0xa1,0x02,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xa3,0x02,0x00,0x00,0x55,0x02,0x00,0x00, -0xa2,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xa5,0x02,0x00,0x00,0xa3,0x02,0x00,0x00,0x6d,0x02,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xa7,0x02,0x00,0x00, -0xa5,0x02,0x00,0x00,0xc5,0x02,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xa9,0x02,0x00,0x00,0xc0,0x02,0x00,0x00, -0xb2,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xab,0x02,0x00,0x00,0xa9,0x02,0x00,0x00,0xc3,0x02,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xad,0x02,0x00,0x00, -0xab,0x02,0x00,0x00,0xac,0x02,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xaf,0x02,0x00,0x00,0xc1,0x02,0x00,0x00, -0x62,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xb0,0x02,0x00,0x00,0xad,0x02,0x00,0x00,0xaf,0x02,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xb2,0x02,0x00,0x00, -0xb0,0x02,0x00,0x00,0xc5,0x02,0x00,0x00,0x41,0x00,0x05,0x00, -0xc3,0x00,0x00,0x00,0xb3,0x02,0x00,0x00,0xc0,0x00,0x00,0x00, -0xb2,0x02,0x00,0x00,0x3d,0x00,0x04,0x00,0xba,0x00,0x00,0x00, -0xb4,0x02,0x00,0x00,0xb3,0x02,0x00,0x00,0x41,0x00,0x06,0x00, -0x5b,0x01,0x00,0x00,0xb5,0x02,0x00,0x00,0x9a,0x02,0x00,0x00, -0x34,0x00,0x00,0x00,0xa7,0x02,0x00,0x00,0x3e,0x00,0x03,0x00, -0xb5,0x02,0x00,0x00,0xb4,0x02,0x00,0x00,0xf9,0x00,0x02,0x00, -0x96,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x96,0x02,0x00,0x00, -0xf9,0x00,0x02,0x00,0x82,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, -0x82,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xb7,0x02,0x00,0x00,0xc5,0x02,0x00,0x00,0xc6,0x00,0x00,0x00, -0xf9,0x00,0x02,0x00,0x7f,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, -0x81,0x02,0x00,0x00,0xf9,0x00,0x02,0x00,0x7a,0x02,0x00,0x00, -0xf8,0x00,0x02,0x00,0x7a,0x02,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xb9,0x02,0x00,0x00,0xc3,0x02,0x00,0x00, -0xc6,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0x77,0x02,0x00,0x00, -0xf8,0x00,0x02,0x00,0x79,0x02,0x00,0x00,0xf9,0x00,0x02,0x00, -0x62,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x62,0x02,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xbb,0x02,0x00,0x00, -0xc1,0x02,0x00,0x00,0xc6,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, -0x5f,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x61,0x02,0x00,0x00, -0xf9,0x00,0x02,0x00,0x5a,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, -0x5a,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xbd,0x02,0x00,0x00,0xc0,0x02,0x00,0x00,0xc6,0x00,0x00,0x00, -0xf9,0x00,0x02,0x00,0x57,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, -0x59,0x02,0x00,0x00,0xfd,0x00,0x01,0x00,0x38,0x00,0x01,0x00, - -}; -const uint64_t matmul_f16_f32_aligned_l_fp32_len = 10464; - -unsigned char matmul_f16_f32_aligned_m_data[] = { -0x03,0x02,0x23,0x07,0x00,0x05,0x01,0x00,0x0b,0x00,0x0d,0x00, -0x65,0x03,0x00,0x00,0x00,0x00,0x00,0x00,0x11,0x00,0x02,0x00, +0x09,0x03,0x00,0x00,0x00,0x00,0x00,0x00,0x11,0x00,0x02,0x00, 0x01,0x00,0x00,0x00,0x11,0x00,0x02,0x00,0x09,0x00,0x00,0x00, 0x11,0x00,0x02,0x00,0x51,0x11,0x00,0x00,0x0b,0x00,0x06,0x00, 0x01,0x00,0x00,0x00,0x47,0x4c,0x53,0x4c,0x2e,0x73,0x74,0x64, @@ -18201,9 +10193,9 @@ unsigned char matmul_f16_f32_aligned_m_data[] = { 0x00,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x0f,0x00,0x0f,0x00, 0x05,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x6d,0x61,0x69,0x6e, 0x00,0x00,0x00,0x00,0x0b,0x00,0x00,0x00,0x12,0x00,0x00,0x00, -0x3d,0x00,0x00,0x00,0x4c,0x00,0x00,0x00,0xeb,0x00,0x00,0x00, -0xfa,0x00,0x00,0x00,0x80,0x01,0x00,0x00,0x8f,0x01,0x00,0x00, -0xc6,0x02,0x00,0x00,0x0f,0x03,0x00,0x00,0x10,0x00,0x06,0x00, +0x3d,0x00,0x00,0x00,0x4c,0x00,0x00,0x00,0xfe,0x00,0x00,0x00, +0x05,0x01,0x00,0x00,0x63,0x01,0x00,0x00,0x69,0x01,0x00,0x00, +0x69,0x02,0x00,0x00,0xb2,0x02,0x00,0x00,0x10,0x00,0x06,0x00, 0x04,0x00,0x00,0x00,0x11,0x00,0x00,0x00,0x01,0x00,0x00,0x00, 0x01,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x47,0x00,0x04,0x00, 0x0b,0x00,0x00,0x00,0x0b,0x00,0x00,0x00,0x1c,0x00,0x00,0x00, @@ -18242,14014 +10234,7104 @@ unsigned char matmul_f16_f32_aligned_m_data[] = { 0x06,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x62,0x00,0x00,0x00, 0x01,0x00,0x00,0x00,0x07,0x00,0x00,0x00,0x47,0x00,0x04,0x00, 0x6c,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x03,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x9d,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0xaf,0x00,0x00,0x00, +0x47,0x00,0x04,0x00,0xa7,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0xb9,0x00,0x00,0x00, 0x01,0x00,0x00,0x00,0x05,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0xb2,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x08,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0xf7,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x10,0x00,0x00,0x00,0x48,0x00,0x04,0x00,0xf8,0x00,0x00,0x00, +0xbc,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x08,0x00,0x00,0x00, +0x47,0x00,0x04,0x00,0x02,0x01,0x00,0x00,0x06,0x00,0x00,0x00, +0x10,0x00,0x00,0x00,0x48,0x00,0x04,0x00,0x03,0x01,0x00,0x00, 0x00,0x00,0x00,0x00,0x05,0x00,0x00,0x00,0x48,0x00,0x04,0x00, -0xf8,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x18,0x00,0x00,0x00, -0x48,0x00,0x05,0x00,0xf8,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0x03,0x01,0x00,0x00,0x00,0x00,0x00,0x00,0x18,0x00,0x00,0x00, +0x48,0x00,0x05,0x00,0x03,0x01,0x00,0x00,0x00,0x00,0x00,0x00, 0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0xf8,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x07,0x00,0x00,0x00, -0x08,0x00,0x00,0x00,0x47,0x00,0x03,0x00,0xf8,0x00,0x00,0x00, -0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0xfa,0x00,0x00,0x00, +0x03,0x01,0x00,0x00,0x00,0x00,0x00,0x00,0x07,0x00,0x00,0x00, +0x08,0x00,0x00,0x00,0x47,0x00,0x03,0x00,0x03,0x01,0x00,0x00, +0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x05,0x01,0x00,0x00, 0x22,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0xfa,0x00,0x00,0x00,0x21,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x61,0x01,0x00,0x00,0x01,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x62,0x01,0x00,0x00, +0x05,0x01,0x00,0x00,0x21,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0x47,0x00,0x04,0x00,0x3b,0x01,0x00,0x00,0x01,0x00,0x00,0x00, +0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x3c,0x01,0x00,0x00, 0x0b,0x00,0x00,0x00,0x19,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x8c,0x01,0x00,0x00,0x06,0x00,0x00,0x00,0x20,0x00,0x00,0x00, -0x48,0x00,0x04,0x00,0x8d,0x01,0x00,0x00,0x00,0x00,0x00,0x00, -0x05,0x00,0x00,0x00,0x48,0x00,0x04,0x00,0x8d,0x01,0x00,0x00, +0x66,0x01,0x00,0x00,0x06,0x00,0x00,0x00,0x10,0x00,0x00,0x00, +0x48,0x00,0x04,0x00,0x67,0x01,0x00,0x00,0x00,0x00,0x00,0x00, +0x05,0x00,0x00,0x00,0x48,0x00,0x04,0x00,0x67,0x01,0x00,0x00, 0x00,0x00,0x00,0x00,0x18,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0x8d,0x01,0x00,0x00,0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x8d,0x01,0x00,0x00, -0x00,0x00,0x00,0x00,0x07,0x00,0x00,0x00,0x10,0x00,0x00,0x00, -0x47,0x00,0x03,0x00,0x8d,0x01,0x00,0x00,0x02,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x8f,0x01,0x00,0x00,0x22,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x8f,0x01,0x00,0x00, -0x21,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0xc6,0x02,0x00,0x00,0x0b,0x00,0x00,0x00,0x18,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x0c,0x03,0x00,0x00,0x06,0x00,0x00,0x00, -0x04,0x00,0x00,0x00,0x48,0x00,0x04,0x00,0x0d,0x03,0x00,0x00, -0x00,0x00,0x00,0x00,0x19,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0x0d,0x03,0x00,0x00,0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x47,0x00,0x03,0x00,0x0d,0x03,0x00,0x00, -0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x0f,0x03,0x00,0x00, -0x22,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x0f,0x03,0x00,0x00,0x21,0x00,0x00,0x00,0x02,0x00,0x00,0x00, -0x13,0x00,0x02,0x00,0x02,0x00,0x00,0x00,0x21,0x00,0x03,0x00, -0x03,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x15,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x20,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x17,0x00,0x04,0x00,0x09,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x03,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x0a,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x09,0x00,0x00,0x00,0x3b,0x00,0x04,0x00, -0x0a,0x00,0x00,0x00,0x0b,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0x02,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x0d,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x1e,0x00,0x10,0x00, -0x10,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x20,0x00,0x04,0x00,0x11,0x00,0x00,0x00,0x09,0x00,0x00,0x00, -0x10,0x00,0x00,0x00,0x3b,0x00,0x04,0x00,0x11,0x00,0x00,0x00, -0x12,0x00,0x00,0x00,0x09,0x00,0x00,0x00,0x15,0x00,0x04,0x00, -0x13,0x00,0x00,0x00,0x20,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00,0x14,0x00,0x00,0x00, -0x08,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x15,0x00,0x00,0x00, -0x09,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x13,0x00,0x00,0x00,0x21,0x00,0x00,0x00,0x0a,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00,0x27,0x00,0x00,0x00, -0x09,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00, -0x2d,0x00,0x00,0x00,0x07,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x13,0x00,0x00,0x00,0x34,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x32,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x37,0x00,0x00,0x00, -0x40,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x39,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x3b,0x00,0x04,0x00, -0x0a,0x00,0x00,0x00,0x3d,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x3e,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x3b,0x00,0x04,0x00,0x0a,0x00,0x00,0x00, -0x4c,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x32,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x4f,0x00,0x00,0x00,0x20,0x00,0x00,0x00, -0x32,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x53,0x00,0x00,0x00, -0x20,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, -0x54,0x00,0x00,0x00,0x86,0x00,0x00,0x00,0x37,0x00,0x00,0x00, -0x53,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, -0x58,0x00,0x00,0x00,0x86,0x00,0x00,0x00,0x37,0x00,0x00,0x00, -0x53,0x00,0x00,0x00,0x32,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x60,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0x61,0x00,0x00,0x00,0x86,0x00,0x00,0x00, -0x53,0x00,0x00,0x00,0x60,0x00,0x00,0x00,0x32,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x62,0x00,0x00,0x00,0x04,0x00,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x63,0x00,0x00,0x00, -0x86,0x00,0x00,0x00,0x61,0x00,0x00,0x00,0x62,0x00,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x67,0x00,0x00,0x00, -0x86,0x00,0x00,0x00,0x61,0x00,0x00,0x00,0x62,0x00,0x00,0x00, -0x32,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x6c,0x00,0x00,0x00, -0x10,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x6d,0x00,0x00,0x00,0x08,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0x6e,0x00,0x00,0x00,0x86,0x00,0x00,0x00, -0x6c,0x00,0x00,0x00,0x6d,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0x73,0x00,0x00,0x00,0x86,0x00,0x00,0x00, -0x6c,0x00,0x00,0x00,0x6d,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x13,0x00,0x00,0x00,0x77,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00,0x7c,0x00,0x00,0x00, -0x02,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00, -0x87,0x00,0x00,0x00,0x0b,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x13,0x00,0x00,0x00,0x8d,0x00,0x00,0x00,0x03,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00,0x98,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x32,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x9d,0x00,0x00,0x00,0x40,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x13,0x00,0x00,0x00,0x9f,0x00,0x00,0x00,0x04,0x00,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0xae,0x00,0x00,0x00, -0x84,0x00,0x00,0x00,0x60,0x00,0x00,0x00,0x62,0x00,0x00,0x00, -0x32,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0xaf,0x00,0x00,0x00, -0x20,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, -0xb0,0x00,0x00,0x00,0x84,0x00,0x00,0x00,0x53,0x00,0x00,0x00, -0xaf,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, -0xb1,0x00,0x00,0x00,0x84,0x00,0x00,0x00,0x4f,0x00,0x00,0x00, -0x62,0x00,0x00,0x00,0x32,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0xb2,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0xb3,0x00,0x00,0x00,0x84,0x00,0x00,0x00, -0xb1,0x00,0x00,0x00,0xb2,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0xb4,0x00,0x00,0x00,0x84,0x00,0x00,0x00, -0xb3,0x00,0x00,0x00,0x60,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0xb5,0x00,0x00,0x00,0x86,0x00,0x00,0x00, -0xb0,0x00,0x00,0x00,0xb4,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0xb6,0x00,0x00,0x00,0x84,0x00,0x00,0x00, -0xae,0x00,0x00,0x00,0xb5,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0xb7,0x00,0x00,0x00,0x84,0x00,0x00,0x00, -0xb6,0x00,0x00,0x00,0xb2,0x00,0x00,0x00,0x14,0x00,0x02,0x00, -0xb8,0x00,0x00,0x00,0x16,0x00,0x03,0x00,0xba,0x00,0x00,0x00, -0x20,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, -0xbb,0x00,0x00,0x00,0x84,0x00,0x00,0x00,0x60,0x00,0x00,0x00, -0x62,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, -0xbc,0x00,0x00,0x00,0x84,0x00,0x00,0x00,0xbb,0x00,0x00,0x00, -0xb5,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, -0xbd,0x00,0x00,0x00,0x84,0x00,0x00,0x00,0xbc,0x00,0x00,0x00, -0xb2,0x00,0x00,0x00,0x1c,0x00,0x04,0x00,0xbe,0x00,0x00,0x00, -0xba,0x00,0x00,0x00,0xbd,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0xbf,0x00,0x00,0x00,0x07,0x00,0x00,0x00,0xbe,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0xba,0x00,0x00,0x00,0xc2,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0xc3,0x00,0x00,0x00, -0x07,0x00,0x00,0x00,0xba,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x13,0x00,0x00,0x00,0xc6,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x16,0x00,0x03,0x00,0xe6,0x00,0x00,0x00,0x10,0x00,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0xe7,0x00,0x00,0x00, -0x80,0x00,0x00,0x00,0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0xe8,0x00,0x00,0x00, -0x84,0x00,0x00,0x00,0x37,0x00,0x00,0x00,0xe7,0x00,0x00,0x00, -0x1c,0x00,0x04,0x00,0xe9,0x00,0x00,0x00,0xe6,0x00,0x00,0x00, -0xe8,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0xea,0x00,0x00,0x00, -0x04,0x00,0x00,0x00,0xe9,0x00,0x00,0x00,0x3b,0x00,0x04,0x00, -0xea,0x00,0x00,0x00,0xeb,0x00,0x00,0x00,0x04,0x00,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0xef,0x00,0x00,0x00, -0x80,0x00,0x00,0x00,0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00, -0x17,0x00,0x04,0x00,0xf5,0x00,0x00,0x00,0xe6,0x00,0x00,0x00, -0x04,0x00,0x00,0x00,0x18,0x00,0x04,0x00,0xf6,0x00,0x00,0x00, -0xf5,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x1d,0x00,0x03,0x00, -0xf7,0x00,0x00,0x00,0xf6,0x00,0x00,0x00,0x1e,0x00,0x03,0x00, -0xf8,0x00,0x00,0x00,0xf7,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0xf9,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0xf8,0x00,0x00,0x00, -0x3b,0x00,0x04,0x00,0xf9,0x00,0x00,0x00,0xfa,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0xfc,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0xe6,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0xff,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0xe6,0x00,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x04,0x01,0x00,0x00, -0x80,0x00,0x00,0x00,0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x11,0x01,0x00,0x00, -0x80,0x00,0x00,0x00,0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x1e,0x01,0x00,0x00, -0x80,0x00,0x00,0x00,0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x23,0x01,0x00,0x00, -0x03,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, -0x2c,0x01,0x00,0x00,0x80,0x00,0x00,0x00,0x6c,0x00,0x00,0x00, -0x39,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x31,0x01,0x00,0x00,0x04,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0x3a,0x01,0x00,0x00,0x80,0x00,0x00,0x00, -0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x3f,0x01,0x00,0x00,0x05,0x00,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x48,0x01,0x00,0x00, -0x80,0x00,0x00,0x00,0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x4d,0x01,0x00,0x00, -0x06,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, -0x56,0x01,0x00,0x00,0x80,0x00,0x00,0x00,0x6c,0x00,0x00,0x00, -0x39,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x5b,0x01,0x00,0x00,0x07,0x00,0x00,0x00,0x32,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x61,0x01,0x00,0x00,0x01,0x00,0x00,0x00, -0x33,0x00,0x06,0x00,0x09,0x00,0x00,0x00,0x62,0x01,0x00,0x00, -0x61,0x01,0x00,0x00,0x39,0x00,0x00,0x00,0x39,0x00,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x63,0x01,0x00,0x00, -0x51,0x00,0x00,0x00,0x62,0x01,0x00,0x00,0x00,0x00,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x64,0x01,0x00,0x00, -0x84,0x00,0x00,0x00,0x63,0x01,0x00,0x00,0x6d,0x00,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x65,0x01,0x00,0x00, -0x86,0x00,0x00,0x00,0x64,0x01,0x00,0x00,0x6c,0x00,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x7c,0x01,0x00,0x00, -0x80,0x00,0x00,0x00,0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x7d,0x01,0x00,0x00, -0x84,0x00,0x00,0x00,0x9d,0x00,0x00,0x00,0x7c,0x01,0x00,0x00, -0x1c,0x00,0x04,0x00,0x7e,0x01,0x00,0x00,0xe6,0x00,0x00,0x00, -0x7d,0x01,0x00,0x00,0x20,0x00,0x04,0x00,0x7f,0x01,0x00,0x00, -0x04,0x00,0x00,0x00,0x7e,0x01,0x00,0x00,0x3b,0x00,0x04,0x00, -0x7f,0x01,0x00,0x00,0x80,0x01,0x00,0x00,0x04,0x00,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x84,0x01,0x00,0x00, -0x80,0x00,0x00,0x00,0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00, -0x17,0x00,0x04,0x00,0x8a,0x01,0x00,0x00,0xba,0x00,0x00,0x00, -0x04,0x00,0x00,0x00,0x18,0x00,0x04,0x00,0x8b,0x01,0x00,0x00, -0x8a,0x01,0x00,0x00,0x02,0x00,0x00,0x00,0x1d,0x00,0x03,0x00, -0x8c,0x01,0x00,0x00,0x8b,0x01,0x00,0x00,0x1e,0x00,0x03,0x00, -0x8d,0x01,0x00,0x00,0x8c,0x01,0x00,0x00,0x20,0x00,0x04,0x00, -0x8e,0x01,0x00,0x00,0x0c,0x00,0x00,0x00,0x8d,0x01,0x00,0x00, -0x3b,0x00,0x04,0x00,0x8e,0x01,0x00,0x00,0x8f,0x01,0x00,0x00, -0x0c,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x91,0x01,0x00,0x00, -0x0c,0x00,0x00,0x00,0xba,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0x99,0x01,0x00,0x00,0x80,0x00,0x00,0x00, -0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0xa7,0x01,0x00,0x00,0x80,0x00,0x00,0x00, -0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0xb5,0x01,0x00,0x00,0x80,0x00,0x00,0x00, -0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0xc3,0x01,0x00,0x00,0x80,0x00,0x00,0x00, -0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0xd1,0x01,0x00,0x00,0x80,0x00,0x00,0x00, -0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0xdf,0x01,0x00,0x00,0x80,0x00,0x00,0x00, -0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0xed,0x01,0x00,0x00,0x80,0x00,0x00,0x00, -0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0xfa,0x01,0x00,0x00,0x08,0x01,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0xfb,0x01,0x00,0x00, -0x86,0x00,0x00,0x00,0x6c,0x00,0x00,0x00,0x6d,0x00,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0xfe,0x01,0x00,0x00, -0x86,0x00,0x00,0x00,0x6c,0x00,0x00,0x00,0x6d,0x00,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x19,0x02,0x00,0x00, -0x84,0x00,0x00,0x00,0x60,0x00,0x00,0x00,0x62,0x00,0x00,0x00, -0x1c,0x00,0x04,0x00,0x1a,0x02,0x00,0x00,0xe6,0x00,0x00,0x00, -0x19,0x02,0x00,0x00,0x20,0x00,0x04,0x00,0x1b,0x02,0x00,0x00, -0x07,0x00,0x00,0x00,0x1a,0x02,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0x2b,0x02,0x00,0x00,0x80,0x00,0x00,0x00, -0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0x31,0x02,0x00,0x00,0x07,0x00,0x00,0x00,0xe6,0x00,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x47,0x02,0x00,0x00, -0x84,0x00,0x00,0x00,0xb5,0x00,0x00,0x00,0xb2,0x00,0x00,0x00, -0x1c,0x00,0x04,0x00,0x48,0x02,0x00,0x00,0xe6,0x00,0x00,0x00, -0x47,0x02,0x00,0x00,0x20,0x00,0x04,0x00,0x49,0x02,0x00,0x00, -0x07,0x00,0x00,0x00,0x48,0x02,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0x52,0x02,0x00,0x00,0x86,0x00,0x00,0x00, -0xaf,0x00,0x00,0x00,0xb5,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0x5a,0x02,0x00,0x00,0x80,0x00,0x00,0x00, -0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0x89,0x02,0x00,0x00,0x84,0x00,0x00,0x00, -0x60,0x00,0x00,0x00,0x62,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x13,0x00,0x00,0x00,0xbe,0x02,0x00,0x00,0x0d,0x00,0x00,0x00, -0x3b,0x00,0x04,0x00,0x0a,0x00,0x00,0x00,0xc6,0x02,0x00,0x00, -0x01,0x00,0x00,0x00,0x1d,0x00,0x03,0x00,0x0c,0x03,0x00,0x00, -0xba,0x00,0x00,0x00,0x1e,0x00,0x03,0x00,0x0d,0x03,0x00,0x00, -0x0c,0x03,0x00,0x00,0x20,0x00,0x04,0x00,0x0e,0x03,0x00,0x00, -0x0c,0x00,0x00,0x00,0x0d,0x03,0x00,0x00,0x3b,0x00,0x04,0x00, -0x0e,0x03,0x00,0x00,0x0f,0x03,0x00,0x00,0x0c,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00,0x14,0x03,0x00,0x00, -0x05,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, -0x21,0x03,0x00,0x00,0x84,0x00,0x00,0x00,0x60,0x00,0x00,0x00, -0x62,0x00,0x00,0x00,0x36,0x00,0x05,0x00,0x02,0x00,0x00,0x00, -0x04,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x03,0x00,0x00,0x00, -0xf8,0x00,0x02,0x00,0x05,0x00,0x00,0x00,0x3b,0x00,0x04,0x00, -0xbf,0x00,0x00,0x00,0xc0,0x00,0x00,0x00,0x07,0x00,0x00,0x00, -0x3b,0x00,0x04,0x00,0x1b,0x02,0x00,0x00,0x1c,0x02,0x00,0x00, -0x07,0x00,0x00,0x00,0x3b,0x00,0x04,0x00,0x49,0x02,0x00,0x00, -0x4a,0x02,0x00,0x00,0x07,0x00,0x00,0x00,0x41,0x00,0x05,0x00, -0x0d,0x00,0x00,0x00,0x0e,0x00,0x00,0x00,0x0b,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x0f,0x00,0x00,0x00,0x0e,0x00,0x00,0x00,0x41,0x00,0x05,0x00, -0x15,0x00,0x00,0x00,0x16,0x00,0x00,0x00,0x12,0x00,0x00,0x00, -0x14,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x17,0x00,0x00,0x00,0x16,0x00,0x00,0x00,0x86,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x18,0x00,0x00,0x00,0x0f,0x00,0x00,0x00, -0x17,0x00,0x00,0x00,0x89,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x1e,0x00,0x00,0x00,0x0f,0x00,0x00,0x00,0x17,0x00,0x00,0x00, -0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00,0x22,0x00,0x00,0x00, -0x12,0x00,0x00,0x00,0x21,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x22,0x00,0x00,0x00, -0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x24,0x00,0x00,0x00, -0x18,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x41,0x00,0x05,0x00, -0x15,0x00,0x00,0x00,0x28,0x00,0x00,0x00,0x12,0x00,0x00,0x00, -0x27,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x29,0x00,0x00,0x00,0x28,0x00,0x00,0x00,0x86,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x2a,0x00,0x00,0x00,0x1e,0x00,0x00,0x00, -0x29,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00, -0x2e,0x00,0x00,0x00,0x12,0x00,0x00,0x00,0x2d,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x2f,0x00,0x00,0x00, -0x2e,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x30,0x00,0x00,0x00,0x24,0x00,0x00,0x00,0x2f,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x32,0x00,0x00,0x00, -0x30,0x00,0x00,0x00,0x2a,0x00,0x00,0x00,0x41,0x00,0x05,0x00, -0x15,0x00,0x00,0x00,0x35,0x00,0x00,0x00,0x12,0x00,0x00,0x00, -0x34,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x36,0x00,0x00,0x00,0x35,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x38,0x00,0x00,0x00,0x36,0x00,0x00,0x00, -0x37,0x00,0x00,0x00,0x82,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x3a,0x00,0x00,0x00,0x38,0x00,0x00,0x00,0x39,0x00,0x00,0x00, -0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x3b,0x00,0x00,0x00, -0x3a,0x00,0x00,0x00,0x37,0x00,0x00,0x00,0x41,0x00,0x05,0x00, -0x0d,0x00,0x00,0x00,0x3f,0x00,0x00,0x00,0x3d,0x00,0x00,0x00, -0x3e,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x40,0x00,0x00,0x00,0x3f,0x00,0x00,0x00,0x89,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x42,0x00,0x00,0x00,0x40,0x00,0x00,0x00, -0x3b,0x00,0x00,0x00,0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x47,0x00,0x00,0x00,0x40,0x00,0x00,0x00,0x3b,0x00,0x00,0x00, -0x41,0x00,0x05,0x00,0x0d,0x00,0x00,0x00,0x49,0x00,0x00,0x00, -0x3d,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x4a,0x00,0x00,0x00,0x49,0x00,0x00,0x00, -0x41,0x00,0x05,0x00,0x0d,0x00,0x00,0x00,0x4d,0x00,0x00,0x00, -0x4c,0x00,0x00,0x00,0x3e,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x4e,0x00,0x00,0x00,0x4d,0x00,0x00,0x00, -0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x50,0x00,0x00,0x00, -0x4e,0x00,0x00,0x00,0x4f,0x00,0x00,0x00,0x89,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x55,0x00,0x00,0x00,0x50,0x00,0x00,0x00, -0x54,0x00,0x00,0x00,0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x59,0x00,0x00,0x00,0x50,0x00,0x00,0x00,0x58,0x00,0x00,0x00, -0x89,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x5d,0x00,0x00,0x00, -0x4e,0x00,0x00,0x00,0x4f,0x00,0x00,0x00,0x89,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x64,0x00,0x00,0x00,0x5d,0x00,0x00,0x00, -0x63,0x00,0x00,0x00,0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x68,0x00,0x00,0x00,0x5d,0x00,0x00,0x00,0x67,0x00,0x00,0x00, -0x89,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x6f,0x00,0x00,0x00, -0x4e,0x00,0x00,0x00,0x6e,0x00,0x00,0x00,0x86,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x74,0x00,0x00,0x00,0x4e,0x00,0x00,0x00, -0x73,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00, -0x78,0x00,0x00,0x00,0x12,0x00,0x00,0x00,0x77,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x79,0x00,0x00,0x00, -0x78,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x7a,0x00,0x00,0x00,0x47,0x00,0x00,0x00,0x79,0x00,0x00,0x00, -0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00,0x7d,0x00,0x00,0x00, -0x12,0x00,0x00,0x00,0x7c,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x7e,0x00,0x00,0x00,0x7d,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x80,0x00,0x00,0x00, -0x47,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x83,0x00,0x00,0x00,0x80,0x00,0x00,0x00, -0x79,0x00,0x00,0x00,0x0c,0x00,0x07,0x00,0x06,0x00,0x00,0x00, -0x84,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x26,0x00,0x00,0x00, -0x7e,0x00,0x00,0x00,0x83,0x00,0x00,0x00,0x41,0x00,0x05,0x00, -0x15,0x00,0x00,0x00,0x88,0x00,0x00,0x00,0x12,0x00,0x00,0x00, -0x87,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x89,0x00,0x00,0x00,0x88,0x00,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x8a,0x00,0x00,0x00,0x32,0x00,0x00,0x00, -0x89,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x8c,0x00,0x00,0x00,0x42,0x00,0x00,0x00,0x37,0x00,0x00,0x00, -0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00,0x8e,0x00,0x00,0x00, -0x12,0x00,0x00,0x00,0x8d,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x8f,0x00,0x00,0x00,0x8e,0x00,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x90,0x00,0x00,0x00, -0x8c,0x00,0x00,0x00,0x8f,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x91,0x00,0x00,0x00,0x8a,0x00,0x00,0x00, -0x90,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x93,0x00,0x00,0x00,0x91,0x00,0x00,0x00,0x7a,0x00,0x00,0x00, -0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x94,0x00,0x00,0x00, -0x93,0x00,0x00,0x00,0x6d,0x00,0x00,0x00,0x41,0x00,0x05,0x00, -0x15,0x00,0x00,0x00,0x99,0x00,0x00,0x00,0x12,0x00,0x00,0x00, -0x98,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x9a,0x00,0x00,0x00,0x99,0x00,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x9b,0x00,0x00,0x00,0x0f,0x00,0x00,0x00, -0x9a,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x9e,0x00,0x00,0x00,0x4a,0x00,0x00,0x00,0x9d,0x00,0x00,0x00, -0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00,0xa0,0x00,0x00,0x00, -0x12,0x00,0x00,0x00,0x9f,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0xa1,0x00,0x00,0x00,0xa0,0x00,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xa2,0x00,0x00,0x00, -0x9e,0x00,0x00,0x00,0xa1,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xa3,0x00,0x00,0x00,0x9b,0x00,0x00,0x00, -0xa2,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xa5,0x00,0x00,0x00,0xa3,0x00,0x00,0x00,0x7a,0x00,0x00,0x00, -0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xa6,0x00,0x00,0x00, -0xa5,0x00,0x00,0x00,0x6d,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, -0xa8,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0xa8,0x00,0x00,0x00, -0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0x33,0x03,0x00,0x00, -0x3e,0x00,0x00,0x00,0x05,0x00,0x00,0x00,0xc7,0x00,0x00,0x00, -0xa9,0x00,0x00,0x00,0xb0,0x00,0x05,0x00,0xb8,0x00,0x00,0x00, -0xb9,0x00,0x00,0x00,0x33,0x03,0x00,0x00,0xb7,0x00,0x00,0x00, -0xf6,0x00,0x04,0x00,0xaa,0x00,0x00,0x00,0xa9,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0xb9,0x00,0x00,0x00, -0xa9,0x00,0x00,0x00,0xaa,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, -0xa9,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0xc3,0x00,0x00,0x00, -0xc4,0x00,0x00,0x00,0xc0,0x00,0x00,0x00,0x33,0x03,0x00,0x00, -0x3e,0x00,0x03,0x00,0xc4,0x00,0x00,0x00,0xc2,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xc7,0x00,0x00,0x00, -0x33,0x03,0x00,0x00,0xc6,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, -0xa8,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0xaa,0x00,0x00,0x00, -0xf9,0x00,0x02,0x00,0xca,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, -0xca,0x00,0x00,0x00,0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00, -0x4c,0x03,0x00,0x00,0xa6,0x00,0x00,0x00,0xaa,0x00,0x00,0x00, -0x00,0x02,0x00,0x00,0xcd,0x00,0x00,0x00,0xf5,0x00,0x07,0x00, -0x06,0x00,0x00,0x00,0x48,0x03,0x00,0x00,0x94,0x00,0x00,0x00, -0xaa,0x00,0x00,0x00,0xfd,0x01,0x00,0x00,0xcd,0x00,0x00,0x00, -0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0x34,0x03,0x00,0x00, -0x7a,0x00,0x00,0x00,0xaa,0x00,0x00,0x00,0xae,0x02,0x00,0x00, -0xcd,0x00,0x00,0x00,0xb0,0x00,0x05,0x00,0xb8,0x00,0x00,0x00, -0xd1,0x00,0x00,0x00,0x34,0x03,0x00,0x00,0x84,0x00,0x00,0x00, -0xf6,0x00,0x04,0x00,0xcc,0x00,0x00,0x00,0xcd,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0xd1,0x00,0x00,0x00, -0xcb,0x00,0x00,0x00,0xcc,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, -0xcb,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0xd3,0x00,0x00,0x00, -0xf8,0x00,0x02,0x00,0xd3,0x00,0x00,0x00,0xf5,0x00,0x07,0x00, -0x06,0x00,0x00,0x00,0x44,0x03,0x00,0x00,0x3e,0x00,0x00,0x00, -0xcb,0x00,0x00,0x00,0x67,0x01,0x00,0x00,0xd4,0x00,0x00,0x00, -0xb0,0x00,0x05,0x00,0xb8,0x00,0x00,0x00,0xd9,0x00,0x00,0x00, -0x44,0x03,0x00,0x00,0x37,0x00,0x00,0x00,0xf6,0x00,0x04,0x00, -0xd5,0x00,0x00,0x00,0xd4,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0xfa,0x00,0x04,0x00,0xd9,0x00,0x00,0x00,0xd4,0x00,0x00,0x00, -0xd5,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0xd4,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xde,0x00,0x00,0x00, -0x74,0x00,0x00,0x00,0x44,0x03,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xe1,0x00,0x00,0x00,0xde,0x00,0x00,0x00, -0x8f,0x00,0x00,0x00,0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xe2,0x00,0x00,0x00,0xe1,0x00,0x00,0x00,0x6d,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xe3,0x00,0x00,0x00, -0x48,0x03,0x00,0x00,0xe2,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xe5,0x00,0x00,0x00,0xe3,0x00,0x00,0x00, -0x6f,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xf0,0x00,0x00,0x00,0xde,0x00,0x00,0x00,0xef,0x00,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xf2,0x00,0x00,0x00, -0x6f,0x00,0x00,0x00,0x6d,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xf3,0x00,0x00,0x00,0xf0,0x00,0x00,0x00, -0xf2,0x00,0x00,0x00,0x41,0x00,0x08,0x00,0xfc,0x00,0x00,0x00, -0xfd,0x00,0x00,0x00,0xfa,0x00,0x00,0x00,0x34,0x00,0x00,0x00, -0xe5,0x00,0x00,0x00,0x34,0x00,0x00,0x00,0x3e,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0xe6,0x00,0x00,0x00,0xfe,0x00,0x00,0x00, -0xfd,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0xff,0x00,0x00,0x00, -0x00,0x01,0x00,0x00,0xeb,0x00,0x00,0x00,0xf3,0x00,0x00,0x00, -0x3e,0x00,0x03,0x00,0x00,0x01,0x00,0x00,0xfe,0x00,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x05,0x01,0x00,0x00, -0xde,0x00,0x00,0x00,0x04,0x01,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x08,0x01,0x00,0x00,0x05,0x01,0x00,0x00, -0xf2,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x09,0x01,0x00,0x00,0x08,0x01,0x00,0x00,0x39,0x00,0x00,0x00, -0x41,0x00,0x08,0x00,0xfc,0x00,0x00,0x00,0x0b,0x01,0x00,0x00, -0xfa,0x00,0x00,0x00,0x34,0x00,0x00,0x00,0xe5,0x00,0x00,0x00, -0x34,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0xe6,0x00,0x00,0x00,0x0c,0x01,0x00,0x00,0x0b,0x01,0x00,0x00, -0x41,0x00,0x05,0x00,0xff,0x00,0x00,0x00,0x0d,0x01,0x00,0x00, -0xeb,0x00,0x00,0x00,0x09,0x01,0x00,0x00,0x3e,0x00,0x03,0x00, -0x0d,0x01,0x00,0x00,0x0c,0x01,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x12,0x01,0x00,0x00,0xde,0x00,0x00,0x00, -0x11,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x15,0x01,0x00,0x00,0x12,0x01,0x00,0x00,0xf2,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x16,0x01,0x00,0x00, -0x15,0x01,0x00,0x00,0x0c,0x00,0x00,0x00,0x41,0x00,0x08,0x00, -0xfc,0x00,0x00,0x00,0x18,0x01,0x00,0x00,0xfa,0x00,0x00,0x00, -0x34,0x00,0x00,0x00,0xe5,0x00,0x00,0x00,0x34,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0xe6,0x00,0x00,0x00, -0x19,0x01,0x00,0x00,0x18,0x01,0x00,0x00,0x41,0x00,0x05,0x00, -0xff,0x00,0x00,0x00,0x1a,0x01,0x00,0x00,0xeb,0x00,0x00,0x00, -0x16,0x01,0x00,0x00,0x3e,0x00,0x03,0x00,0x1a,0x01,0x00,0x00, -0x19,0x01,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x1f,0x01,0x00,0x00,0xde,0x00,0x00,0x00,0x1e,0x01,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x22,0x01,0x00,0x00, -0x1f,0x01,0x00,0x00,0xf2,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x24,0x01,0x00,0x00,0x22,0x01,0x00,0x00, -0x23,0x01,0x00,0x00,0x41,0x00,0x08,0x00,0xfc,0x00,0x00,0x00, -0x26,0x01,0x00,0x00,0xfa,0x00,0x00,0x00,0x34,0x00,0x00,0x00, -0xe5,0x00,0x00,0x00,0x34,0x00,0x00,0x00,0x23,0x01,0x00,0x00, -0x3d,0x00,0x04,0x00,0xe6,0x00,0x00,0x00,0x27,0x01,0x00,0x00, -0x26,0x01,0x00,0x00,0x41,0x00,0x05,0x00,0xff,0x00,0x00,0x00, -0x28,0x01,0x00,0x00,0xeb,0x00,0x00,0x00,0x24,0x01,0x00,0x00, -0x3e,0x00,0x03,0x00,0x28,0x01,0x00,0x00,0x27,0x01,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x2d,0x01,0x00,0x00, -0xde,0x00,0x00,0x00,0x2c,0x01,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x30,0x01,0x00,0x00,0x2d,0x01,0x00,0x00, -0xf2,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x32,0x01,0x00,0x00,0x30,0x01,0x00,0x00,0x31,0x01,0x00,0x00, -0x41,0x00,0x08,0x00,0xfc,0x00,0x00,0x00,0x34,0x01,0x00,0x00, -0xfa,0x00,0x00,0x00,0x34,0x00,0x00,0x00,0xe5,0x00,0x00,0x00, -0xc6,0x00,0x00,0x00,0x3e,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0xe6,0x00,0x00,0x00,0x35,0x01,0x00,0x00,0x34,0x01,0x00,0x00, -0x41,0x00,0x05,0x00,0xff,0x00,0x00,0x00,0x36,0x01,0x00,0x00, -0xeb,0x00,0x00,0x00,0x32,0x01,0x00,0x00,0x3e,0x00,0x03,0x00, -0x36,0x01,0x00,0x00,0x35,0x01,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x3b,0x01,0x00,0x00,0xde,0x00,0x00,0x00, -0x3a,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x3e,0x01,0x00,0x00,0x3b,0x01,0x00,0x00,0xf2,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x40,0x01,0x00,0x00, -0x3e,0x01,0x00,0x00,0x3f,0x01,0x00,0x00,0x41,0x00,0x08,0x00, -0xfc,0x00,0x00,0x00,0x42,0x01,0x00,0x00,0xfa,0x00,0x00,0x00, -0x34,0x00,0x00,0x00,0xe5,0x00,0x00,0x00,0xc6,0x00,0x00,0x00, -0x39,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0xe6,0x00,0x00,0x00, -0x43,0x01,0x00,0x00,0x42,0x01,0x00,0x00,0x41,0x00,0x05,0x00, -0xff,0x00,0x00,0x00,0x44,0x01,0x00,0x00,0xeb,0x00,0x00,0x00, -0x40,0x01,0x00,0x00,0x3e,0x00,0x03,0x00,0x44,0x01,0x00,0x00, -0x43,0x01,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x49,0x01,0x00,0x00,0xde,0x00,0x00,0x00,0x48,0x01,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x4c,0x01,0x00,0x00, -0x49,0x01,0x00,0x00,0xf2,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x4e,0x01,0x00,0x00,0x4c,0x01,0x00,0x00, -0x4d,0x01,0x00,0x00,0x41,0x00,0x08,0x00,0xfc,0x00,0x00,0x00, -0x50,0x01,0x00,0x00,0xfa,0x00,0x00,0x00,0x34,0x00,0x00,0x00, -0xe5,0x00,0x00,0x00,0xc6,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0xe6,0x00,0x00,0x00,0x51,0x01,0x00,0x00, -0x50,0x01,0x00,0x00,0x41,0x00,0x05,0x00,0xff,0x00,0x00,0x00, -0x52,0x01,0x00,0x00,0xeb,0x00,0x00,0x00,0x4e,0x01,0x00,0x00, -0x3e,0x00,0x03,0x00,0x52,0x01,0x00,0x00,0x51,0x01,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x57,0x01,0x00,0x00, -0xde,0x00,0x00,0x00,0x56,0x01,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x5a,0x01,0x00,0x00,0x57,0x01,0x00,0x00, -0xf2,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x5c,0x01,0x00,0x00,0x5a,0x01,0x00,0x00,0x5b,0x01,0x00,0x00, -0x41,0x00,0x08,0x00,0xfc,0x00,0x00,0x00,0x5e,0x01,0x00,0x00, -0xfa,0x00,0x00,0x00,0x34,0x00,0x00,0x00,0xe5,0x00,0x00,0x00, -0xc6,0x00,0x00,0x00,0x23,0x01,0x00,0x00,0x3d,0x00,0x04,0x00, -0xe6,0x00,0x00,0x00,0x5f,0x01,0x00,0x00,0x5e,0x01,0x00,0x00, -0x41,0x00,0x05,0x00,0xff,0x00,0x00,0x00,0x60,0x01,0x00,0x00, -0xeb,0x00,0x00,0x00,0x5c,0x01,0x00,0x00,0x3e,0x00,0x03,0x00, -0x60,0x01,0x00,0x00,0x5f,0x01,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x67,0x01,0x00,0x00,0x44,0x03,0x00,0x00, -0x65,0x01,0x00,0x00,0xf9,0x00,0x02,0x00,0xd3,0x00,0x00,0x00, -0xf8,0x00,0x02,0x00,0xd5,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, -0x69,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0x69,0x01,0x00,0x00, -0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0x45,0x03,0x00,0x00, -0x3e,0x00,0x00,0x00,0xd5,0x00,0x00,0x00,0xf9,0x01,0x00,0x00, -0x6a,0x01,0x00,0x00,0xb0,0x00,0x05,0x00,0xb8,0x00,0x00,0x00, -0x6f,0x01,0x00,0x00,0x45,0x03,0x00,0x00,0x9d,0x00,0x00,0x00, -0xf6,0x00,0x04,0x00,0x6b,0x01,0x00,0x00,0x6a,0x01,0x00,0x00, -0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0x6f,0x01,0x00,0x00, -0x6a,0x01,0x00,0x00,0x6b,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, -0x6a,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x74,0x01,0x00,0x00,0x74,0x00,0x00,0x00,0x45,0x03,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x77,0x01,0x00,0x00, -0x74,0x01,0x00,0x00,0xa1,0x00,0x00,0x00,0x86,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x78,0x01,0x00,0x00,0x77,0x01,0x00,0x00, -0x6d,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x79,0x01,0x00,0x00,0x4c,0x03,0x00,0x00,0x78,0x01,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x7b,0x01,0x00,0x00, -0x79,0x01,0x00,0x00,0x6f,0x00,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x85,0x01,0x00,0x00,0x74,0x01,0x00,0x00, -0x84,0x01,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x87,0x01,0x00,0x00,0x6f,0x00,0x00,0x00,0x6d,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x88,0x01,0x00,0x00, -0x85,0x01,0x00,0x00,0x87,0x01,0x00,0x00,0x41,0x00,0x08,0x00, -0x91,0x01,0x00,0x00,0x92,0x01,0x00,0x00,0x8f,0x01,0x00,0x00, -0x34,0x00,0x00,0x00,0x7b,0x01,0x00,0x00,0x34,0x00,0x00,0x00, -0x3e,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0xba,0x00,0x00,0x00, -0x93,0x01,0x00,0x00,0x92,0x01,0x00,0x00,0x73,0x00,0x04,0x00, -0xe6,0x00,0x00,0x00,0x94,0x01,0x00,0x00,0x93,0x01,0x00,0x00, -0x41,0x00,0x05,0x00,0xff,0x00,0x00,0x00,0x95,0x01,0x00,0x00, -0x80,0x01,0x00,0x00,0x88,0x01,0x00,0x00,0x3e,0x00,0x03,0x00, -0x95,0x01,0x00,0x00,0x94,0x01,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x9a,0x01,0x00,0x00,0x74,0x01,0x00,0x00, -0x99,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x9d,0x01,0x00,0x00,0x9a,0x01,0x00,0x00,0x87,0x01,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x9e,0x01,0x00,0x00, -0x9d,0x01,0x00,0x00,0x39,0x00,0x00,0x00,0x41,0x00,0x08,0x00, -0x91,0x01,0x00,0x00,0xa0,0x01,0x00,0x00,0x8f,0x01,0x00,0x00, -0x34,0x00,0x00,0x00,0x7b,0x01,0x00,0x00,0x34,0x00,0x00,0x00, -0x39,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0xba,0x00,0x00,0x00, -0xa1,0x01,0x00,0x00,0xa0,0x01,0x00,0x00,0x73,0x00,0x04,0x00, -0xe6,0x00,0x00,0x00,0xa2,0x01,0x00,0x00,0xa1,0x01,0x00,0x00, -0x41,0x00,0x05,0x00,0xff,0x00,0x00,0x00,0xa3,0x01,0x00,0x00, -0x80,0x01,0x00,0x00,0x9e,0x01,0x00,0x00,0x3e,0x00,0x03,0x00, -0xa3,0x01,0x00,0x00,0xa2,0x01,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xa8,0x01,0x00,0x00,0x74,0x01,0x00,0x00, -0xa7,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xab,0x01,0x00,0x00,0xa8,0x01,0x00,0x00,0x87,0x01,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xac,0x01,0x00,0x00, -0xab,0x01,0x00,0x00,0x0c,0x00,0x00,0x00,0x41,0x00,0x08,0x00, -0x91,0x01,0x00,0x00,0xae,0x01,0x00,0x00,0x8f,0x01,0x00,0x00, -0x34,0x00,0x00,0x00,0x7b,0x01,0x00,0x00,0x34,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0xba,0x00,0x00,0x00, -0xaf,0x01,0x00,0x00,0xae,0x01,0x00,0x00,0x73,0x00,0x04,0x00, -0xe6,0x00,0x00,0x00,0xb0,0x01,0x00,0x00,0xaf,0x01,0x00,0x00, -0x41,0x00,0x05,0x00,0xff,0x00,0x00,0x00,0xb1,0x01,0x00,0x00, -0x80,0x01,0x00,0x00,0xac,0x01,0x00,0x00,0x3e,0x00,0x03,0x00, -0xb1,0x01,0x00,0x00,0xb0,0x01,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xb6,0x01,0x00,0x00,0x74,0x01,0x00,0x00, -0xb5,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xb9,0x01,0x00,0x00,0xb6,0x01,0x00,0x00,0x87,0x01,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xba,0x01,0x00,0x00, -0xb9,0x01,0x00,0x00,0x23,0x01,0x00,0x00,0x41,0x00,0x08,0x00, -0x91,0x01,0x00,0x00,0xbc,0x01,0x00,0x00,0x8f,0x01,0x00,0x00, -0x34,0x00,0x00,0x00,0x7b,0x01,0x00,0x00,0x34,0x00,0x00,0x00, -0x23,0x01,0x00,0x00,0x3d,0x00,0x04,0x00,0xba,0x00,0x00,0x00, -0xbd,0x01,0x00,0x00,0xbc,0x01,0x00,0x00,0x73,0x00,0x04,0x00, -0xe6,0x00,0x00,0x00,0xbe,0x01,0x00,0x00,0xbd,0x01,0x00,0x00, -0x41,0x00,0x05,0x00,0xff,0x00,0x00,0x00,0xbf,0x01,0x00,0x00, -0x80,0x01,0x00,0x00,0xba,0x01,0x00,0x00,0x3e,0x00,0x03,0x00, -0xbf,0x01,0x00,0x00,0xbe,0x01,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xc4,0x01,0x00,0x00,0x74,0x01,0x00,0x00, -0xc3,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xc7,0x01,0x00,0x00,0xc4,0x01,0x00,0x00,0x87,0x01,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xc8,0x01,0x00,0x00, -0xc7,0x01,0x00,0x00,0x31,0x01,0x00,0x00,0x41,0x00,0x08,0x00, -0x91,0x01,0x00,0x00,0xca,0x01,0x00,0x00,0x8f,0x01,0x00,0x00, -0x34,0x00,0x00,0x00,0x7b,0x01,0x00,0x00,0xc6,0x00,0x00,0x00, -0x3e,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0xba,0x00,0x00,0x00, -0xcb,0x01,0x00,0x00,0xca,0x01,0x00,0x00,0x73,0x00,0x04,0x00, -0xe6,0x00,0x00,0x00,0xcc,0x01,0x00,0x00,0xcb,0x01,0x00,0x00, -0x41,0x00,0x05,0x00,0xff,0x00,0x00,0x00,0xcd,0x01,0x00,0x00, -0x80,0x01,0x00,0x00,0xc8,0x01,0x00,0x00,0x3e,0x00,0x03,0x00, -0xcd,0x01,0x00,0x00,0xcc,0x01,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xd2,0x01,0x00,0x00,0x74,0x01,0x00,0x00, -0xd1,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xd5,0x01,0x00,0x00,0xd2,0x01,0x00,0x00,0x87,0x01,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xd6,0x01,0x00,0x00, -0xd5,0x01,0x00,0x00,0x3f,0x01,0x00,0x00,0x41,0x00,0x08,0x00, -0x91,0x01,0x00,0x00,0xd8,0x01,0x00,0x00,0x8f,0x01,0x00,0x00, -0x34,0x00,0x00,0x00,0x7b,0x01,0x00,0x00,0xc6,0x00,0x00,0x00, -0x39,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0xba,0x00,0x00,0x00, -0xd9,0x01,0x00,0x00,0xd8,0x01,0x00,0x00,0x73,0x00,0x04,0x00, -0xe6,0x00,0x00,0x00,0xda,0x01,0x00,0x00,0xd9,0x01,0x00,0x00, -0x41,0x00,0x05,0x00,0xff,0x00,0x00,0x00,0xdb,0x01,0x00,0x00, -0x80,0x01,0x00,0x00,0xd6,0x01,0x00,0x00,0x3e,0x00,0x03,0x00, -0xdb,0x01,0x00,0x00,0xda,0x01,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xe0,0x01,0x00,0x00,0x74,0x01,0x00,0x00, -0xdf,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xe3,0x01,0x00,0x00,0xe0,0x01,0x00,0x00,0x87,0x01,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xe4,0x01,0x00,0x00, -0xe3,0x01,0x00,0x00,0x4d,0x01,0x00,0x00,0x41,0x00,0x08,0x00, -0x91,0x01,0x00,0x00,0xe6,0x01,0x00,0x00,0x8f,0x01,0x00,0x00, -0x34,0x00,0x00,0x00,0x7b,0x01,0x00,0x00,0xc6,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0xba,0x00,0x00,0x00, -0xe7,0x01,0x00,0x00,0xe6,0x01,0x00,0x00,0x73,0x00,0x04,0x00, -0xe6,0x00,0x00,0x00,0xe8,0x01,0x00,0x00,0xe7,0x01,0x00,0x00, -0x41,0x00,0x05,0x00,0xff,0x00,0x00,0x00,0xe9,0x01,0x00,0x00, -0x80,0x01,0x00,0x00,0xe4,0x01,0x00,0x00,0x3e,0x00,0x03,0x00, -0xe9,0x01,0x00,0x00,0xe8,0x01,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xee,0x01,0x00,0x00,0x74,0x01,0x00,0x00, -0xed,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xf1,0x01,0x00,0x00,0xee,0x01,0x00,0x00,0x87,0x01,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xf2,0x01,0x00,0x00, -0xf1,0x01,0x00,0x00,0x5b,0x01,0x00,0x00,0x41,0x00,0x08,0x00, -0x91,0x01,0x00,0x00,0xf4,0x01,0x00,0x00,0x8f,0x01,0x00,0x00, -0x34,0x00,0x00,0x00,0x7b,0x01,0x00,0x00,0xc6,0x00,0x00,0x00, -0x23,0x01,0x00,0x00,0x3d,0x00,0x04,0x00,0xba,0x00,0x00,0x00, -0xf5,0x01,0x00,0x00,0xf4,0x01,0x00,0x00,0x73,0x00,0x04,0x00, -0xe6,0x00,0x00,0x00,0xf6,0x01,0x00,0x00,0xf5,0x01,0x00,0x00, -0x41,0x00,0x05,0x00,0xff,0x00,0x00,0x00,0xf7,0x01,0x00,0x00, -0x80,0x01,0x00,0x00,0xf2,0x01,0x00,0x00,0x3e,0x00,0x03,0x00, -0xf7,0x01,0x00,0x00,0xf6,0x01,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xf9,0x01,0x00,0x00,0x45,0x03,0x00,0x00, -0x65,0x01,0x00,0x00,0xf9,0x00,0x02,0x00,0x69,0x01,0x00,0x00, -0xf8,0x00,0x02,0x00,0x6b,0x01,0x00,0x00,0xe0,0x00,0x04,0x00, -0x0c,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0xfa,0x01,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xfd,0x01,0x00,0x00, -0x48,0x03,0x00,0x00,0xfb,0x01,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x00,0x02,0x00,0x00,0x4c,0x03,0x00,0x00, -0xfe,0x01,0x00,0x00,0xf9,0x00,0x02,0x00,0x02,0x02,0x00,0x00, -0xf8,0x00,0x02,0x00,0x02,0x02,0x00,0x00,0xf5,0x00,0x07,0x00, -0x06,0x00,0x00,0x00,0x4e,0x03,0x00,0x00,0x3e,0x00,0x00,0x00, -0x6b,0x01,0x00,0x00,0xac,0x02,0x00,0x00,0x05,0x02,0x00,0x00, -0xb0,0x00,0x05,0x00,0xb8,0x00,0x00,0x00,0x08,0x02,0x00,0x00, -0x4e,0x03,0x00,0x00,0x6c,0x00,0x00,0x00,0xf6,0x00,0x04,0x00, -0x04,0x02,0x00,0x00,0x05,0x02,0x00,0x00,0x00,0x00,0x00,0x00, -0xfa,0x00,0x04,0x00,0x08,0x02,0x00,0x00,0x03,0x02,0x00,0x00, -0x04,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x03,0x02,0x00,0x00, -0xf9,0x00,0x02,0x00,0x0a,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, -0x0a,0x02,0x00,0x00,0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00, -0x52,0x03,0x00,0x00,0x3e,0x00,0x00,0x00,0x03,0x02,0x00,0x00, -0x36,0x02,0x00,0x00,0x0d,0x02,0x00,0x00,0xb0,0x00,0x05,0x00, -0xb8,0x00,0x00,0x00,0x10,0x02,0x00,0x00,0x52,0x03,0x00,0x00, -0x60,0x00,0x00,0x00,0xf6,0x00,0x04,0x00,0x0c,0x02,0x00,0x00, -0x0d,0x02,0x00,0x00,0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, -0x10,0x02,0x00,0x00,0x0b,0x02,0x00,0x00,0x0c,0x02,0x00,0x00, -0xf8,0x00,0x02,0x00,0x0b,0x02,0x00,0x00,0xf9,0x00,0x02,0x00, -0x12,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x12,0x02,0x00,0x00, -0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0x64,0x03,0x00,0x00, -0x3e,0x00,0x00,0x00,0x0b,0x02,0x00,0x00,0x34,0x02,0x00,0x00, -0x13,0x02,0x00,0x00,0xb0,0x00,0x05,0x00,0xb8,0x00,0x00,0x00, -0x18,0x02,0x00,0x00,0x64,0x03,0x00,0x00,0x62,0x00,0x00,0x00, -0xf6,0x00,0x04,0x00,0x14,0x02,0x00,0x00,0x13,0x02,0x00,0x00, -0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0x18,0x02,0x00,0x00, -0x13,0x02,0x00,0x00,0x14,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, -0x13,0x02,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x1e,0x02,0x00,0x00,0x52,0x03,0x00,0x00,0x62,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x20,0x02,0x00,0x00, -0x1e,0x02,0x00,0x00,0x64,0x03,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x22,0x02,0x00,0x00,0x55,0x00,0x00,0x00, -0x53,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x24,0x02,0x00,0x00,0x52,0x03,0x00,0x00,0x61,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x25,0x02,0x00,0x00, -0x22,0x02,0x00,0x00,0x24,0x02,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x27,0x02,0x00,0x00,0x64,0x00,0x00,0x00, -0x62,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x28,0x02,0x00,0x00,0x25,0x02,0x00,0x00,0x27,0x02,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x2a,0x02,0x00,0x00, -0x28,0x02,0x00,0x00,0x64,0x03,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x2c,0x02,0x00,0x00,0x2a,0x02,0x00,0x00, -0x2b,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x2e,0x02,0x00,0x00,0x2c,0x02,0x00,0x00,0x4e,0x03,0x00,0x00, -0x41,0x00,0x05,0x00,0xff,0x00,0x00,0x00,0x2f,0x02,0x00,0x00, -0xeb,0x00,0x00,0x00,0x2e,0x02,0x00,0x00,0x3d,0x00,0x04,0x00, -0xe6,0x00,0x00,0x00,0x30,0x02,0x00,0x00,0x2f,0x02,0x00,0x00, -0x41,0x00,0x05,0x00,0x31,0x02,0x00,0x00,0x32,0x02,0x00,0x00, -0x1c,0x02,0x00,0x00,0x20,0x02,0x00,0x00,0x3e,0x00,0x03,0x00, -0x32,0x02,0x00,0x00,0x30,0x02,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x34,0x02,0x00,0x00,0x64,0x03,0x00,0x00, -0xc6,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0x12,0x02,0x00,0x00, -0xf8,0x00,0x02,0x00,0x14,0x02,0x00,0x00,0xf9,0x00,0x02,0x00, -0x0d,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x0d,0x02,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x36,0x02,0x00,0x00, -0x52,0x03,0x00,0x00,0xc6,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, -0x0a,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x0c,0x02,0x00,0x00, -0xf9,0x00,0x02,0x00,0x38,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, -0x38,0x02,0x00,0x00,0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00, -0x53,0x03,0x00,0x00,0x3e,0x00,0x00,0x00,0x0c,0x02,0x00,0x00, -0x64,0x02,0x00,0x00,0x3b,0x02,0x00,0x00,0xb0,0x00,0x05,0x00, -0xb8,0x00,0x00,0x00,0x3e,0x02,0x00,0x00,0x53,0x03,0x00,0x00, -0xb5,0x00,0x00,0x00,0xf6,0x00,0x04,0x00,0x3a,0x02,0x00,0x00, -0x3b,0x02,0x00,0x00,0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, -0x3e,0x02,0x00,0x00,0x39,0x02,0x00,0x00,0x3a,0x02,0x00,0x00, -0xf8,0x00,0x02,0x00,0x39,0x02,0x00,0x00,0xf9,0x00,0x02,0x00, -0x40,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x40,0x02,0x00,0x00, -0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0x61,0x03,0x00,0x00, -0x3e,0x00,0x00,0x00,0x39,0x02,0x00,0x00,0x62,0x02,0x00,0x00, -0x41,0x02,0x00,0x00,0xb0,0x00,0x05,0x00,0xb8,0x00,0x00,0x00, -0x46,0x02,0x00,0x00,0x61,0x03,0x00,0x00,0xb2,0x00,0x00,0x00, -0xf6,0x00,0x04,0x00,0x42,0x02,0x00,0x00,0x41,0x02,0x00,0x00, -0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0x46,0x02,0x00,0x00, -0x41,0x02,0x00,0x00,0x42,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, -0x41,0x02,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x4c,0x02,0x00,0x00,0x53,0x03,0x00,0x00,0xb2,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x4e,0x02,0x00,0x00, -0x4c,0x02,0x00,0x00,0x61,0x03,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x50,0x02,0x00,0x00,0x59,0x00,0x00,0x00, -0xaf,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x53,0x02,0x00,0x00,0x53,0x03,0x00,0x00,0x52,0x02,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x54,0x02,0x00,0x00, -0x50,0x02,0x00,0x00,0x53,0x02,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x56,0x02,0x00,0x00,0x68,0x00,0x00,0x00, -0xb2,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x57,0x02,0x00,0x00,0x54,0x02,0x00,0x00,0x56,0x02,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x59,0x02,0x00,0x00, -0x57,0x02,0x00,0x00,0x61,0x03,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x5b,0x02,0x00,0x00,0x59,0x02,0x00,0x00, -0x5a,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x5d,0x02,0x00,0x00,0x5b,0x02,0x00,0x00,0x4e,0x03,0x00,0x00, -0x41,0x00,0x05,0x00,0xff,0x00,0x00,0x00,0x5e,0x02,0x00,0x00, -0x80,0x01,0x00,0x00,0x5d,0x02,0x00,0x00,0x3d,0x00,0x04,0x00, -0xe6,0x00,0x00,0x00,0x5f,0x02,0x00,0x00,0x5e,0x02,0x00,0x00, -0x41,0x00,0x05,0x00,0x31,0x02,0x00,0x00,0x60,0x02,0x00,0x00, -0x4a,0x02,0x00,0x00,0x4e,0x02,0x00,0x00,0x3e,0x00,0x03,0x00, -0x60,0x02,0x00,0x00,0x5f,0x02,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x62,0x02,0x00,0x00,0x61,0x03,0x00,0x00, -0xc6,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0x40,0x02,0x00,0x00, -0xf8,0x00,0x02,0x00,0x42,0x02,0x00,0x00,0xf9,0x00,0x02,0x00, -0x3b,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x3b,0x02,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x64,0x02,0x00,0x00, -0x53,0x03,0x00,0x00,0xc6,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, -0x38,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x3a,0x02,0x00,0x00, -0xf9,0x00,0x02,0x00,0x66,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, -0x66,0x02,0x00,0x00,0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00, -0x54,0x03,0x00,0x00,0x3e,0x00,0x00,0x00,0x3a,0x02,0x00,0x00, -0xaa,0x02,0x00,0x00,0x69,0x02,0x00,0x00,0xb0,0x00,0x05,0x00, -0xb8,0x00,0x00,0x00,0x6c,0x02,0x00,0x00,0x54,0x03,0x00,0x00, -0xb5,0x00,0x00,0x00,0xf6,0x00,0x04,0x00,0x68,0x02,0x00,0x00, -0x69,0x02,0x00,0x00,0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, -0x6c,0x02,0x00,0x00,0x67,0x02,0x00,0x00,0x68,0x02,0x00,0x00, -0xf8,0x00,0x02,0x00,0x67,0x02,0x00,0x00,0xf9,0x00,0x02,0x00, -0x6e,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x6e,0x02,0x00,0x00, -0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0x58,0x03,0x00,0x00, -0x3e,0x00,0x00,0x00,0x67,0x02,0x00,0x00,0xa8,0x02,0x00,0x00, -0x71,0x02,0x00,0x00,0xb0,0x00,0x05,0x00,0xb8,0x00,0x00,0x00, -0x74,0x02,0x00,0x00,0x58,0x03,0x00,0x00,0x60,0x00,0x00,0x00, -0xf6,0x00,0x04,0x00,0x70,0x02,0x00,0x00,0x71,0x02,0x00,0x00, -0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0x74,0x02,0x00,0x00, -0x6f,0x02,0x00,0x00,0x70,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, -0x6f,0x02,0x00,0x00,0xf9,0x00,0x02,0x00,0x76,0x02,0x00,0x00, -0xf8,0x00,0x02,0x00,0x76,0x02,0x00,0x00,0xf5,0x00,0x07,0x00, -0x06,0x00,0x00,0x00,0x5a,0x03,0x00,0x00,0x3e,0x00,0x00,0x00, -0x6f,0x02,0x00,0x00,0xa6,0x02,0x00,0x00,0x79,0x02,0x00,0x00, -0xb0,0x00,0x05,0x00,0xb8,0x00,0x00,0x00,0x7c,0x02,0x00,0x00, -0x5a,0x03,0x00,0x00,0xb2,0x00,0x00,0x00,0xf6,0x00,0x04,0x00, -0x78,0x02,0x00,0x00,0x79,0x02,0x00,0x00,0x01,0x00,0x00,0x00, -0xfa,0x00,0x04,0x00,0x7c,0x02,0x00,0x00,0x77,0x02,0x00,0x00, -0x78,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x77,0x02,0x00,0x00, -0xf9,0x00,0x02,0x00,0x7e,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, -0x7e,0x02,0x00,0x00,0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00, -0x5c,0x03,0x00,0x00,0x3e,0x00,0x00,0x00,0x77,0x02,0x00,0x00, -0xa4,0x02,0x00,0x00,0x7f,0x02,0x00,0x00,0xb0,0x00,0x05,0x00, -0xb8,0x00,0x00,0x00,0x84,0x02,0x00,0x00,0x5c,0x03,0x00,0x00, -0x62,0x00,0x00,0x00,0xf6,0x00,0x04,0x00,0x80,0x02,0x00,0x00, -0x7f,0x02,0x00,0x00,0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, -0x84,0x02,0x00,0x00,0x7f,0x02,0x00,0x00,0x80,0x02,0x00,0x00, -0xf8,0x00,0x02,0x00,0x7f,0x02,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x86,0x02,0x00,0x00,0x54,0x03,0x00,0x00, -0xb2,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x88,0x02,0x00,0x00,0x86,0x02,0x00,0x00,0x5a,0x03,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x8a,0x02,0x00,0x00, -0x88,0x02,0x00,0x00,0x89,0x02,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x8c,0x02,0x00,0x00,0x58,0x03,0x00,0x00, -0x62,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x8d,0x02,0x00,0x00,0x8a,0x02,0x00,0x00,0x8c,0x02,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x8f,0x02,0x00,0x00, -0x8d,0x02,0x00,0x00,0x5c,0x03,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x93,0x02,0x00,0x00,0x8c,0x02,0x00,0x00, -0x5c,0x03,0x00,0x00,0x41,0x00,0x05,0x00,0x31,0x02,0x00,0x00, -0x94,0x02,0x00,0x00,0x1c,0x02,0x00,0x00,0x93,0x02,0x00,0x00, -0x3d,0x00,0x04,0x00,0xe6,0x00,0x00,0x00,0x95,0x02,0x00,0x00, -0x94,0x02,0x00,0x00,0x73,0x00,0x04,0x00,0xba,0x00,0x00,0x00, -0x96,0x02,0x00,0x00,0x95,0x02,0x00,0x00,0x41,0x00,0x05,0x00, -0x31,0x02,0x00,0x00,0x9b,0x02,0x00,0x00,0x4a,0x02,0x00,0x00, -0x88,0x02,0x00,0x00,0x3d,0x00,0x04,0x00,0xe6,0x00,0x00,0x00, -0x9c,0x02,0x00,0x00,0x9b,0x02,0x00,0x00,0x73,0x00,0x04,0x00, -0xba,0x00,0x00,0x00,0x9d,0x02,0x00,0x00,0x9c,0x02,0x00,0x00, -0x41,0x00,0x05,0x00,0xc3,0x00,0x00,0x00,0x9f,0x02,0x00,0x00, -0xc0,0x00,0x00,0x00,0x8f,0x02,0x00,0x00,0x3d,0x00,0x04,0x00, -0xba,0x00,0x00,0x00,0xa0,0x02,0x00,0x00,0x9f,0x02,0x00,0x00, -0x0c,0x00,0x08,0x00,0xba,0x00,0x00,0x00,0xa1,0x02,0x00,0x00, -0x01,0x00,0x00,0x00,0x32,0x00,0x00,0x00,0x96,0x02,0x00,0x00, -0x9d,0x02,0x00,0x00,0xa0,0x02,0x00,0x00,0x3e,0x00,0x03,0x00, -0x9f,0x02,0x00,0x00,0xa1,0x02,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xa4,0x02,0x00,0x00,0x5c,0x03,0x00,0x00, -0xc6,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0x7e,0x02,0x00,0x00, -0xf8,0x00,0x02,0x00,0x80,0x02,0x00,0x00,0xf9,0x00,0x02,0x00, -0x79,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x79,0x02,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xa6,0x02,0x00,0x00, -0x5a,0x03,0x00,0x00,0xc6,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, -0x76,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x78,0x02,0x00,0x00, -0xf9,0x00,0x02,0x00,0x71,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, -0x71,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xa8,0x02,0x00,0x00,0x58,0x03,0x00,0x00,0xc6,0x00,0x00,0x00, -0xf9,0x00,0x02,0x00,0x6e,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, -0x70,0x02,0x00,0x00,0xf9,0x00,0x02,0x00,0x69,0x02,0x00,0x00, -0xf8,0x00,0x02,0x00,0x69,0x02,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xaa,0x02,0x00,0x00,0x54,0x03,0x00,0x00, -0xc6,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0x66,0x02,0x00,0x00, -0xf8,0x00,0x02,0x00,0x68,0x02,0x00,0x00,0xf9,0x00,0x02,0x00, -0x05,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x05,0x02,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xac,0x02,0x00,0x00, -0x4e,0x03,0x00,0x00,0xc6,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, -0x02,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x04,0x02,0x00,0x00, -0xe0,0x00,0x04,0x00,0x0c,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0xfa,0x01,0x00,0x00,0xf9,0x00,0x02,0x00,0xcd,0x00,0x00,0x00, -0xf8,0x00,0x02,0x00,0xcd,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xae,0x02,0x00,0x00,0x34,0x03,0x00,0x00, -0x6c,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0xca,0x00,0x00,0x00, -0xf8,0x00,0x02,0x00,0xcc,0x00,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xb3,0x02,0x00,0x00,0x55,0x00,0x00,0x00, -0x53,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xb4,0x02,0x00,0x00,0x8c,0x00,0x00,0x00,0xb3,0x02,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xb9,0x02,0x00,0x00, -0x59,0x00,0x00,0x00,0xaf,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xba,0x02,0x00,0x00,0x9e,0x00,0x00,0x00, -0xb9,0x02,0x00,0x00,0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00, -0xbf,0x02,0x00,0x00,0x12,0x00,0x00,0x00,0xbe,0x02,0x00,0x00, -0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0xc0,0x02,0x00,0x00, -0xbf,0x02,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xc1,0x02,0x00,0x00,0x0f,0x00,0x00,0x00,0xc0,0x02,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xc5,0x02,0x00,0x00, -0x47,0x00,0x00,0x00,0xc0,0x02,0x00,0x00,0x41,0x00,0x05,0x00, -0x0d,0x00,0x00,0x00,0xc7,0x02,0x00,0x00,0xc6,0x02,0x00,0x00, -0x0c,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0xc8,0x02,0x00,0x00,0xc7,0x02,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xc9,0x02,0x00,0x00,0xc5,0x02,0x00,0x00, -0xc8,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xca,0x02,0x00,0x00,0xc1,0x02,0x00,0x00,0xc9,0x02,0x00,0x00, -0xf9,0x00,0x02,0x00,0xcc,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, -0xcc,0x02,0x00,0x00,0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00, -0x35,0x03,0x00,0x00,0x3e,0x00,0x00,0x00,0xcc,0x00,0x00,0x00, -0x32,0x03,0x00,0x00,0xcf,0x02,0x00,0x00,0xb0,0x00,0x05,0x00, -0xb8,0x00,0x00,0x00,0xd2,0x02,0x00,0x00,0x35,0x03,0x00,0x00, -0xb5,0x00,0x00,0x00,0xf6,0x00,0x04,0x00,0xce,0x02,0x00,0x00, -0xcf,0x02,0x00,0x00,0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, -0xd2,0x02,0x00,0x00,0xcd,0x02,0x00,0x00,0xce,0x02,0x00,0x00, -0xf8,0x00,0x02,0x00,0xcd,0x02,0x00,0x00,0xf9,0x00,0x02,0x00, -0xd4,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0xd4,0x02,0x00,0x00, -0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0x36,0x03,0x00,0x00, -0x3e,0x00,0x00,0x00,0xcd,0x02,0x00,0x00,0x30,0x03,0x00,0x00, -0xd7,0x02,0x00,0x00,0xb0,0x00,0x05,0x00,0xb8,0x00,0x00,0x00, -0xda,0x02,0x00,0x00,0x36,0x03,0x00,0x00,0x60,0x00,0x00,0x00, -0xf6,0x00,0x04,0x00,0xd6,0x02,0x00,0x00,0xd7,0x02,0x00,0x00, -0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0xda,0x02,0x00,0x00, -0xd5,0x02,0x00,0x00,0xd6,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, -0xd5,0x02,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xde,0x02,0x00,0x00,0x36,0x03,0x00,0x00,0x61,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xdf,0x02,0x00,0x00, -0xb4,0x02,0x00,0x00,0xde,0x02,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xe1,0x02,0x00,0x00,0x64,0x00,0x00,0x00, -0x62,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xe2,0x02,0x00,0x00,0xdf,0x02,0x00,0x00,0xe1,0x02,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xe6,0x02,0x00,0x00, -0x35,0x03,0x00,0x00,0x52,0x02,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xe7,0x02,0x00,0x00,0xba,0x02,0x00,0x00, -0xe6,0x02,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xe9,0x02,0x00,0x00,0x68,0x00,0x00,0x00,0xb2,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xea,0x02,0x00,0x00, -0xe7,0x02,0x00,0x00,0xe9,0x02,0x00,0x00,0xf9,0x00,0x02,0x00, -0xec,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0xec,0x02,0x00,0x00, -0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0x38,0x03,0x00,0x00, -0x3e,0x00,0x00,0x00,0xd5,0x02,0x00,0x00,0x2e,0x03,0x00,0x00, -0xef,0x02,0x00,0x00,0xb0,0x00,0x05,0x00,0xb8,0x00,0x00,0x00, -0xf2,0x02,0x00,0x00,0x38,0x03,0x00,0x00,0xb2,0x00,0x00,0x00, -0xf6,0x00,0x04,0x00,0xee,0x02,0x00,0x00,0xef,0x02,0x00,0x00, -0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0xf2,0x02,0x00,0x00, -0xed,0x02,0x00,0x00,0xee,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, -0xed,0x02,0x00,0x00,0xf9,0x00,0x02,0x00,0xf4,0x02,0x00,0x00, -0xf8,0x00,0x02,0x00,0xf4,0x02,0x00,0x00,0xf5,0x00,0x07,0x00, -0x06,0x00,0x00,0x00,0x3a,0x03,0x00,0x00,0x3e,0x00,0x00,0x00, -0xed,0x02,0x00,0x00,0x2c,0x03,0x00,0x00,0xf7,0x02,0x00,0x00, -0xb0,0x00,0x05,0x00,0xb8,0x00,0x00,0x00,0xfa,0x02,0x00,0x00, -0x3a,0x03,0x00,0x00,0x62,0x00,0x00,0x00,0xf6,0x00,0x04,0x00, -0xf6,0x02,0x00,0x00,0xf7,0x02,0x00,0x00,0x01,0x00,0x00,0x00, -0xfa,0x00,0x04,0x00,0xfa,0x02,0x00,0x00,0xf5,0x02,0x00,0x00, -0xf6,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0xf5,0x02,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xfd,0x02,0x00,0x00, -0xe2,0x02,0x00,0x00,0x3a,0x03,0x00,0x00,0xb0,0x00,0x05,0x00, -0xb8,0x00,0x00,0x00,0x00,0x03,0x00,0x00,0xfd,0x02,0x00,0x00, -0x36,0x00,0x00,0x00,0xf7,0x00,0x03,0x00,0x02,0x03,0x00,0x00, -0x00,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0x00,0x03,0x00,0x00, -0x01,0x03,0x00,0x00,0x02,0x03,0x00,0x00,0xf8,0x00,0x02,0x00, -0x01,0x03,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x05,0x03,0x00,0x00,0xea,0x02,0x00,0x00,0x38,0x03,0x00,0x00, -0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00,0x06,0x03,0x00,0x00, -0x12,0x00,0x00,0x00,0xc6,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x07,0x03,0x00,0x00,0x06,0x03,0x00,0x00, -0xb0,0x00,0x05,0x00,0xb8,0x00,0x00,0x00,0x08,0x03,0x00,0x00, -0x05,0x03,0x00,0x00,0x07,0x03,0x00,0x00,0xf9,0x00,0x02,0x00, -0x02,0x03,0x00,0x00,0xf8,0x00,0x02,0x00,0x02,0x03,0x00,0x00, -0xf5,0x00,0x07,0x00,0xb8,0x00,0x00,0x00,0x09,0x03,0x00,0x00, -0x00,0x03,0x00,0x00,0xf5,0x02,0x00,0x00,0x08,0x03,0x00,0x00, -0x01,0x03,0x00,0x00,0xf7,0x00,0x03,0x00,0x0b,0x03,0x00,0x00, -0x00,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0x09,0x03,0x00,0x00, -0x0a,0x03,0x00,0x00,0x0b,0x03,0x00,0x00,0xf8,0x00,0x02,0x00, -0x0a,0x03,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x13,0x03,0x00,0x00,0xea,0x02,0x00,0x00,0x38,0x03,0x00,0x00, -0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00,0x15,0x03,0x00,0x00, -0x12,0x00,0x00,0x00,0x14,0x03,0x00,0x00,0x3d,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x16,0x03,0x00,0x00,0x15,0x03,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x17,0x03,0x00,0x00, -0x13,0x03,0x00,0x00,0x16,0x03,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x18,0x03,0x00,0x00,0xca,0x02,0x00,0x00, -0x17,0x03,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x1a,0x03,0x00,0x00,0x18,0x03,0x00,0x00,0xe2,0x02,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x1c,0x03,0x00,0x00, -0x1a,0x03,0x00,0x00,0x3a,0x03,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x1e,0x03,0x00,0x00,0x35,0x03,0x00,0x00, -0xb2,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x20,0x03,0x00,0x00,0x1e,0x03,0x00,0x00,0x38,0x03,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x22,0x03,0x00,0x00, -0x20,0x03,0x00,0x00,0x21,0x03,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x24,0x03,0x00,0x00,0x36,0x03,0x00,0x00, -0x62,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x25,0x03,0x00,0x00,0x22,0x03,0x00,0x00,0x24,0x03,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x27,0x03,0x00,0x00, -0x25,0x03,0x00,0x00,0x3a,0x03,0x00,0x00,0x41,0x00,0x05,0x00, -0xc3,0x00,0x00,0x00,0x28,0x03,0x00,0x00,0xc0,0x00,0x00,0x00, -0x27,0x03,0x00,0x00,0x3d,0x00,0x04,0x00,0xba,0x00,0x00,0x00, -0x29,0x03,0x00,0x00,0x28,0x03,0x00,0x00,0x41,0x00,0x06,0x00, -0x91,0x01,0x00,0x00,0x2a,0x03,0x00,0x00,0x0f,0x03,0x00,0x00, -0x34,0x00,0x00,0x00,0x1c,0x03,0x00,0x00,0x3e,0x00,0x03,0x00, -0x2a,0x03,0x00,0x00,0x29,0x03,0x00,0x00,0xf9,0x00,0x02,0x00, -0x0b,0x03,0x00,0x00,0xf8,0x00,0x02,0x00,0x0b,0x03,0x00,0x00, -0xf9,0x00,0x02,0x00,0xf7,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, -0xf7,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x2c,0x03,0x00,0x00,0x3a,0x03,0x00,0x00,0xc6,0x00,0x00,0x00, -0xf9,0x00,0x02,0x00,0xf4,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, -0xf6,0x02,0x00,0x00,0xf9,0x00,0x02,0x00,0xef,0x02,0x00,0x00, -0xf8,0x00,0x02,0x00,0xef,0x02,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x2e,0x03,0x00,0x00,0x38,0x03,0x00,0x00, -0xc6,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0xec,0x02,0x00,0x00, -0xf8,0x00,0x02,0x00,0xee,0x02,0x00,0x00,0xf9,0x00,0x02,0x00, -0xd7,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0xd7,0x02,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x30,0x03,0x00,0x00, -0x36,0x03,0x00,0x00,0xc6,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, -0xd4,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0xd6,0x02,0x00,0x00, -0xf9,0x00,0x02,0x00,0xcf,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, -0xcf,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x32,0x03,0x00,0x00,0x35,0x03,0x00,0x00,0xc6,0x00,0x00,0x00, -0xf9,0x00,0x02,0x00,0xcc,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, -0xce,0x02,0x00,0x00,0xfd,0x00,0x01,0x00,0x38,0x00,0x01,0x00, - -}; -const uint64_t matmul_f16_f32_aligned_m_len = 12096; - -unsigned char matmul_f16_f32_aligned_m_fp32_data[] = { -0x03,0x02,0x23,0x07,0x00,0x05,0x01,0x00,0x0b,0x00,0x0d,0x00, -0xf0,0x02,0x00,0x00,0x00,0x00,0x00,0x00,0x11,0x00,0x02,0x00, -0x01,0x00,0x00,0x00,0x11,0x00,0x02,0x00,0x51,0x11,0x00,0x00, -0x0b,0x00,0x06,0x00,0x01,0x00,0x00,0x00,0x47,0x4c,0x53,0x4c, -0x2e,0x73,0x74,0x64,0x2e,0x34,0x35,0x30,0x00,0x00,0x00,0x00, -0x0e,0x00,0x03,0x00,0x00,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x0f,0x00,0x0f,0x00,0x05,0x00,0x00,0x00,0x04,0x00,0x00,0x00, -0x6d,0x61,0x69,0x6e,0x00,0x00,0x00,0x00,0x0b,0x00,0x00,0x00, -0x12,0x00,0x00,0x00,0x3d,0x00,0x00,0x00,0x4c,0x00,0x00,0x00, -0xea,0x00,0x00,0x00,0xf9,0x00,0x00,0x00,0x4b,0x01,0x00,0x00, -0x59,0x01,0x00,0x00,0x51,0x02,0x00,0x00,0x9a,0x02,0x00,0x00, -0x10,0x00,0x06,0x00,0x04,0x00,0x00,0x00,0x11,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x0b,0x00,0x00,0x00,0x0b,0x00,0x00,0x00, -0x1c,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0x10,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x08,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00, -0x03,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00,0x04,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x10,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0x10,0x00,0x00,0x00,0x05,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x14,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x18,0x00,0x00,0x00, -0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00,0x07,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x1c,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0x10,0x00,0x00,0x00,0x08,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x20,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00, -0x09,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x24,0x00,0x00,0x00, -0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00,0x0a,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x28,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0x10,0x00,0x00,0x00,0x0b,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x2c,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x30,0x00,0x00,0x00, -0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00,0x0d,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x34,0x00,0x00,0x00,0x47,0x00,0x03,0x00, -0x10,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x37,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x3d,0x00,0x00,0x00,0x0b,0x00,0x00,0x00, -0x1a,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x4c,0x00,0x00,0x00, -0x0b,0x00,0x00,0x00,0x1b,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x4f,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x09,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x53,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x04,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x60,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x62,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x07,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x6c,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x03,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x9d,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0xaf,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x05,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0xb2,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x08,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0xf6,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x08,0x00,0x00,0x00,0x48,0x00,0x04,0x00, -0xf7,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x18,0x00,0x00,0x00, -0x48,0x00,0x05,0x00,0xf7,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x03,0x00, -0xf7,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0xf9,0x00,0x00,0x00,0x22,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0xf9,0x00,0x00,0x00,0x21,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x2c,0x01,0x00,0x00, -0x01,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x2d,0x01,0x00,0x00,0x0b,0x00,0x00,0x00,0x19,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x56,0x01,0x00,0x00,0x06,0x00,0x00,0x00, -0x10,0x00,0x00,0x00,0x48,0x00,0x04,0x00,0x57,0x01,0x00,0x00, -0x00,0x00,0x00,0x00,0x18,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0x57,0x01,0x00,0x00,0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x47,0x00,0x03,0x00,0x57,0x01,0x00,0x00, -0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x59,0x01,0x00,0x00, -0x22,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x59,0x01,0x00,0x00,0x21,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x51,0x02,0x00,0x00,0x0b,0x00,0x00,0x00, -0x18,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x97,0x02,0x00,0x00, -0x06,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x48,0x00,0x04,0x00, -0x98,0x02,0x00,0x00,0x00,0x00,0x00,0x00,0x19,0x00,0x00,0x00, -0x48,0x00,0x05,0x00,0x98,0x02,0x00,0x00,0x00,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x03,0x00, -0x98,0x02,0x00,0x00,0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x9a,0x02,0x00,0x00,0x22,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x9a,0x02,0x00,0x00,0x21,0x00,0x00,0x00, -0x02,0x00,0x00,0x00,0x13,0x00,0x02,0x00,0x02,0x00,0x00,0x00, -0x21,0x00,0x03,0x00,0x03,0x00,0x00,0x00,0x02,0x00,0x00,0x00, -0x15,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x20,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x17,0x00,0x04,0x00,0x09,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x03,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0x0a,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x09,0x00,0x00,0x00, -0x3b,0x00,0x04,0x00,0x0a,0x00,0x00,0x00,0x0b,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0x0d,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x1e,0x00,0x10,0x00,0x10,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x11,0x00,0x00,0x00, -0x09,0x00,0x00,0x00,0x10,0x00,0x00,0x00,0x3b,0x00,0x04,0x00, -0x11,0x00,0x00,0x00,0x12,0x00,0x00,0x00,0x09,0x00,0x00,0x00, -0x15,0x00,0x04,0x00,0x13,0x00,0x00,0x00,0x20,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00, -0x14,0x00,0x00,0x00,0x08,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0x15,0x00,0x00,0x00,0x09,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00,0x21,0x00,0x00,0x00, -0x0a,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00, -0x27,0x00,0x00,0x00,0x09,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x13,0x00,0x00,0x00,0x2d,0x00,0x00,0x00,0x07,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00,0x34,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x32,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x37,0x00,0x00,0x00,0x40,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x3b,0x00,0x04,0x00,0x0a,0x00,0x00,0x00,0x3d,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x3e,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x3b,0x00,0x04,0x00, -0x0a,0x00,0x00,0x00,0x4c,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x32,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x4f,0x00,0x00,0x00, -0x20,0x00,0x00,0x00,0x32,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x53,0x00,0x00,0x00,0x20,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0x54,0x00,0x00,0x00,0x86,0x00,0x00,0x00, -0x37,0x00,0x00,0x00,0x53,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0x58,0x00,0x00,0x00,0x86,0x00,0x00,0x00, -0x37,0x00,0x00,0x00,0x53,0x00,0x00,0x00,0x32,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x60,0x00,0x00,0x00,0x02,0x00,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x61,0x00,0x00,0x00, -0x86,0x00,0x00,0x00,0x53,0x00,0x00,0x00,0x60,0x00,0x00,0x00, -0x32,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x62,0x00,0x00,0x00, -0x04,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, -0x63,0x00,0x00,0x00,0x86,0x00,0x00,0x00,0x61,0x00,0x00,0x00, -0x62,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, -0x67,0x00,0x00,0x00,0x86,0x00,0x00,0x00,0x61,0x00,0x00,0x00, -0x62,0x00,0x00,0x00,0x32,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x6c,0x00,0x00,0x00,0x10,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x6d,0x00,0x00,0x00,0x04,0x00,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x6e,0x00,0x00,0x00, -0x86,0x00,0x00,0x00,0x6c,0x00,0x00,0x00,0x6d,0x00,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x73,0x00,0x00,0x00, -0x86,0x00,0x00,0x00,0x6c,0x00,0x00,0x00,0x6d,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00,0x77,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00, -0x7c,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x13,0x00,0x00,0x00,0x87,0x00,0x00,0x00,0x0b,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00,0x8d,0x00,0x00,0x00, -0x03,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00, -0x98,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x32,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x9d,0x00,0x00,0x00,0x40,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00,0x9f,0x00,0x00,0x00, -0x04,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, -0xae,0x00,0x00,0x00,0x84,0x00,0x00,0x00,0x60,0x00,0x00,0x00, -0x62,0x00,0x00,0x00,0x32,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0xaf,0x00,0x00,0x00,0x20,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0xb0,0x00,0x00,0x00,0x84,0x00,0x00,0x00, -0x53,0x00,0x00,0x00,0xaf,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0xb1,0x00,0x00,0x00,0x84,0x00,0x00,0x00, -0x4f,0x00,0x00,0x00,0x62,0x00,0x00,0x00,0x32,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0xb2,0x00,0x00,0x00,0x02,0x00,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0xb3,0x00,0x00,0x00, -0x84,0x00,0x00,0x00,0xb1,0x00,0x00,0x00,0xb2,0x00,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0xb4,0x00,0x00,0x00, -0x84,0x00,0x00,0x00,0xb3,0x00,0x00,0x00,0x60,0x00,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0xb5,0x00,0x00,0x00, -0x86,0x00,0x00,0x00,0xb0,0x00,0x00,0x00,0xb4,0x00,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0xb6,0x00,0x00,0x00, -0x84,0x00,0x00,0x00,0xae,0x00,0x00,0x00,0xb5,0x00,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0xb7,0x00,0x00,0x00, -0x84,0x00,0x00,0x00,0xb6,0x00,0x00,0x00,0xb2,0x00,0x00,0x00, -0x14,0x00,0x02,0x00,0xb8,0x00,0x00,0x00,0x16,0x00,0x03,0x00, -0xba,0x00,0x00,0x00,0x20,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0xbb,0x00,0x00,0x00,0x84,0x00,0x00,0x00, -0x60,0x00,0x00,0x00,0x62,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0xbc,0x00,0x00,0x00,0x84,0x00,0x00,0x00, -0xbb,0x00,0x00,0x00,0xb5,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0xbd,0x00,0x00,0x00,0x84,0x00,0x00,0x00, -0xbc,0x00,0x00,0x00,0xb2,0x00,0x00,0x00,0x1c,0x00,0x04,0x00, -0xbe,0x00,0x00,0x00,0xba,0x00,0x00,0x00,0xbd,0x00,0x00,0x00, -0x20,0x00,0x04,0x00,0xbf,0x00,0x00,0x00,0x07,0x00,0x00,0x00, -0xbe,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0xba,0x00,0x00,0x00, -0xc2,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0xc3,0x00,0x00,0x00,0x07,0x00,0x00,0x00,0xba,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00,0xc6,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, -0xe6,0x00,0x00,0x00,0x80,0x00,0x00,0x00,0x6c,0x00,0x00,0x00, -0x39,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, -0xe7,0x00,0x00,0x00,0x84,0x00,0x00,0x00,0x37,0x00,0x00,0x00, -0xe6,0x00,0x00,0x00,0x1c,0x00,0x04,0x00,0xe8,0x00,0x00,0x00, -0xba,0x00,0x00,0x00,0xe7,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0xe9,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0xe8,0x00,0x00,0x00, -0x3b,0x00,0x04,0x00,0xe9,0x00,0x00,0x00,0xea,0x00,0x00,0x00, -0x04,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, -0xee,0x00,0x00,0x00,0x80,0x00,0x00,0x00,0x6c,0x00,0x00,0x00, -0x39,0x00,0x00,0x00,0x16,0x00,0x03,0x00,0xf4,0x00,0x00,0x00, -0x10,0x00,0x00,0x00,0x17,0x00,0x04,0x00,0xf5,0x00,0x00,0x00, -0xf4,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x1d,0x00,0x03,0x00, -0xf6,0x00,0x00,0x00,0xf5,0x00,0x00,0x00,0x1e,0x00,0x03,0x00, -0xf7,0x00,0x00,0x00,0xf6,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0xf8,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0xf7,0x00,0x00,0x00, -0x3b,0x00,0x04,0x00,0xf8,0x00,0x00,0x00,0xf9,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0xfb,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0xf4,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0xff,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0xba,0x00,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x04,0x01,0x00,0x00, -0x80,0x00,0x00,0x00,0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x12,0x01,0x00,0x00, -0x80,0x00,0x00,0x00,0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x20,0x01,0x00,0x00, -0x80,0x00,0x00,0x00,0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x25,0x01,0x00,0x00, -0x03,0x00,0x00,0x00,0x32,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x2c,0x01,0x00,0x00,0x01,0x00,0x00,0x00,0x33,0x00,0x06,0x00, -0x09,0x00,0x00,0x00,0x2d,0x01,0x00,0x00,0x2c,0x01,0x00,0x00, -0x39,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0x2e,0x01,0x00,0x00,0x51,0x00,0x00,0x00, -0x2d,0x01,0x00,0x00,0x00,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0x2f,0x01,0x00,0x00,0x84,0x00,0x00,0x00, -0x2e,0x01,0x00,0x00,0x6d,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0x30,0x01,0x00,0x00,0x86,0x00,0x00,0x00, -0x2f,0x01,0x00,0x00,0x6c,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0x47,0x01,0x00,0x00,0x80,0x00,0x00,0x00, -0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0x48,0x01,0x00,0x00,0x84,0x00,0x00,0x00, -0x9d,0x00,0x00,0x00,0x47,0x01,0x00,0x00,0x1c,0x00,0x04,0x00, -0x49,0x01,0x00,0x00,0xba,0x00,0x00,0x00,0x48,0x01,0x00,0x00, -0x20,0x00,0x04,0x00,0x4a,0x01,0x00,0x00,0x04,0x00,0x00,0x00, -0x49,0x01,0x00,0x00,0x3b,0x00,0x04,0x00,0x4a,0x01,0x00,0x00, -0x4b,0x01,0x00,0x00,0x04,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0x4f,0x01,0x00,0x00,0x80,0x00,0x00,0x00, -0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x17,0x00,0x04,0x00, -0x55,0x01,0x00,0x00,0xba,0x00,0x00,0x00,0x04,0x00,0x00,0x00, -0x1d,0x00,0x03,0x00,0x56,0x01,0x00,0x00,0x55,0x01,0x00,0x00, -0x1e,0x00,0x03,0x00,0x57,0x01,0x00,0x00,0x56,0x01,0x00,0x00, -0x20,0x00,0x04,0x00,0x58,0x01,0x00,0x00,0x0c,0x00,0x00,0x00, -0x57,0x01,0x00,0x00,0x3b,0x00,0x04,0x00,0x58,0x01,0x00,0x00, -0x59,0x01,0x00,0x00,0x0c,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0x5b,0x01,0x00,0x00,0x0c,0x00,0x00,0x00,0xba,0x00,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x62,0x01,0x00,0x00, -0x80,0x00,0x00,0x00,0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x6f,0x01,0x00,0x00, -0x80,0x00,0x00,0x00,0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x7c,0x01,0x00,0x00, -0x80,0x00,0x00,0x00,0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x88,0x01,0x00,0x00, -0x08,0x01,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, -0x89,0x01,0x00,0x00,0x86,0x00,0x00,0x00,0x6c,0x00,0x00,0x00, -0x6d,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, -0x8c,0x01,0x00,0x00,0x86,0x00,0x00,0x00,0x6c,0x00,0x00,0x00, -0x6d,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, -0xa7,0x01,0x00,0x00,0x84,0x00,0x00,0x00,0x60,0x00,0x00,0x00, -0x62,0x00,0x00,0x00,0x1c,0x00,0x04,0x00,0xa8,0x01,0x00,0x00, -0xba,0x00,0x00,0x00,0xa7,0x01,0x00,0x00,0x20,0x00,0x04,0x00, -0xa9,0x01,0x00,0x00,0x07,0x00,0x00,0x00,0xa8,0x01,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0xb9,0x01,0x00,0x00, -0x80,0x00,0x00,0x00,0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0xd4,0x01,0x00,0x00, -0x84,0x00,0x00,0x00,0xb5,0x00,0x00,0x00,0xb2,0x00,0x00,0x00, -0x1c,0x00,0x04,0x00,0xd5,0x01,0x00,0x00,0xba,0x00,0x00,0x00, -0xd4,0x01,0x00,0x00,0x20,0x00,0x04,0x00,0xd6,0x01,0x00,0x00, -0x07,0x00,0x00,0x00,0xd5,0x01,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0xdf,0x01,0x00,0x00,0x86,0x00,0x00,0x00, -0xaf,0x00,0x00,0x00,0xb5,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0xe7,0x01,0x00,0x00,0x80,0x00,0x00,0x00, -0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0x16,0x02,0x00,0x00,0x84,0x00,0x00,0x00, -0x60,0x00,0x00,0x00,0x62,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x13,0x00,0x00,0x00,0x49,0x02,0x00,0x00,0x0d,0x00,0x00,0x00, -0x3b,0x00,0x04,0x00,0x0a,0x00,0x00,0x00,0x51,0x02,0x00,0x00, -0x01,0x00,0x00,0x00,0x1d,0x00,0x03,0x00,0x97,0x02,0x00,0x00, -0xba,0x00,0x00,0x00,0x1e,0x00,0x03,0x00,0x98,0x02,0x00,0x00, -0x97,0x02,0x00,0x00,0x20,0x00,0x04,0x00,0x99,0x02,0x00,0x00, -0x0c,0x00,0x00,0x00,0x98,0x02,0x00,0x00,0x3b,0x00,0x04,0x00, -0x99,0x02,0x00,0x00,0x9a,0x02,0x00,0x00,0x0c,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00,0x9f,0x02,0x00,0x00, -0x05,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, -0xac,0x02,0x00,0x00,0x84,0x00,0x00,0x00,0x60,0x00,0x00,0x00, -0x62,0x00,0x00,0x00,0x36,0x00,0x05,0x00,0x02,0x00,0x00,0x00, -0x04,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x03,0x00,0x00,0x00, -0xf8,0x00,0x02,0x00,0x05,0x00,0x00,0x00,0x3b,0x00,0x04,0x00, -0xbf,0x00,0x00,0x00,0xc0,0x00,0x00,0x00,0x07,0x00,0x00,0x00, -0x3b,0x00,0x04,0x00,0xa9,0x01,0x00,0x00,0xaa,0x01,0x00,0x00, -0x07,0x00,0x00,0x00,0x3b,0x00,0x04,0x00,0xd6,0x01,0x00,0x00, -0xd7,0x01,0x00,0x00,0x07,0x00,0x00,0x00,0x41,0x00,0x05,0x00, -0x0d,0x00,0x00,0x00,0x0e,0x00,0x00,0x00,0x0b,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x0f,0x00,0x00,0x00,0x0e,0x00,0x00,0x00,0x41,0x00,0x05,0x00, -0x15,0x00,0x00,0x00,0x16,0x00,0x00,0x00,0x12,0x00,0x00,0x00, -0x14,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x17,0x00,0x00,0x00,0x16,0x00,0x00,0x00,0x86,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x18,0x00,0x00,0x00,0x0f,0x00,0x00,0x00, -0x17,0x00,0x00,0x00,0x89,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x1e,0x00,0x00,0x00,0x0f,0x00,0x00,0x00,0x17,0x00,0x00,0x00, -0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00,0x22,0x00,0x00,0x00, -0x12,0x00,0x00,0x00,0x21,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x22,0x00,0x00,0x00, -0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x24,0x00,0x00,0x00, -0x18,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x41,0x00,0x05,0x00, -0x15,0x00,0x00,0x00,0x28,0x00,0x00,0x00,0x12,0x00,0x00,0x00, -0x27,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x29,0x00,0x00,0x00,0x28,0x00,0x00,0x00,0x86,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x2a,0x00,0x00,0x00,0x1e,0x00,0x00,0x00, -0x29,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00, -0x2e,0x00,0x00,0x00,0x12,0x00,0x00,0x00,0x2d,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x2f,0x00,0x00,0x00, -0x2e,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x30,0x00,0x00,0x00,0x24,0x00,0x00,0x00,0x2f,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x32,0x00,0x00,0x00, -0x30,0x00,0x00,0x00,0x2a,0x00,0x00,0x00,0x41,0x00,0x05,0x00, -0x15,0x00,0x00,0x00,0x35,0x00,0x00,0x00,0x12,0x00,0x00,0x00, -0x34,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x36,0x00,0x00,0x00,0x35,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x38,0x00,0x00,0x00,0x36,0x00,0x00,0x00, -0x37,0x00,0x00,0x00,0x82,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x3a,0x00,0x00,0x00,0x38,0x00,0x00,0x00,0x39,0x00,0x00,0x00, -0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x3b,0x00,0x00,0x00, -0x3a,0x00,0x00,0x00,0x37,0x00,0x00,0x00,0x41,0x00,0x05,0x00, -0x0d,0x00,0x00,0x00,0x3f,0x00,0x00,0x00,0x3d,0x00,0x00,0x00, -0x3e,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x40,0x00,0x00,0x00,0x3f,0x00,0x00,0x00,0x89,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x42,0x00,0x00,0x00,0x40,0x00,0x00,0x00, -0x3b,0x00,0x00,0x00,0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x47,0x00,0x00,0x00,0x40,0x00,0x00,0x00,0x3b,0x00,0x00,0x00, -0x41,0x00,0x05,0x00,0x0d,0x00,0x00,0x00,0x49,0x00,0x00,0x00, -0x3d,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x4a,0x00,0x00,0x00,0x49,0x00,0x00,0x00, -0x41,0x00,0x05,0x00,0x0d,0x00,0x00,0x00,0x4d,0x00,0x00,0x00, -0x4c,0x00,0x00,0x00,0x3e,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x4e,0x00,0x00,0x00,0x4d,0x00,0x00,0x00, -0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x50,0x00,0x00,0x00, -0x4e,0x00,0x00,0x00,0x4f,0x00,0x00,0x00,0x89,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x55,0x00,0x00,0x00,0x50,0x00,0x00,0x00, -0x54,0x00,0x00,0x00,0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x59,0x00,0x00,0x00,0x50,0x00,0x00,0x00,0x58,0x00,0x00,0x00, -0x89,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x5d,0x00,0x00,0x00, -0x4e,0x00,0x00,0x00,0x4f,0x00,0x00,0x00,0x89,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x64,0x00,0x00,0x00,0x5d,0x00,0x00,0x00, -0x63,0x00,0x00,0x00,0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x68,0x00,0x00,0x00,0x5d,0x00,0x00,0x00,0x67,0x00,0x00,0x00, -0x89,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x6f,0x00,0x00,0x00, -0x4e,0x00,0x00,0x00,0x6e,0x00,0x00,0x00,0x86,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x74,0x00,0x00,0x00,0x4e,0x00,0x00,0x00, -0x73,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00, -0x78,0x00,0x00,0x00,0x12,0x00,0x00,0x00,0x77,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x79,0x00,0x00,0x00, -0x78,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x7a,0x00,0x00,0x00,0x47,0x00,0x00,0x00,0x79,0x00,0x00,0x00, -0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00,0x7d,0x00,0x00,0x00, -0x12,0x00,0x00,0x00,0x7c,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x7e,0x00,0x00,0x00,0x7d,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x80,0x00,0x00,0x00, -0x47,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x83,0x00,0x00,0x00,0x80,0x00,0x00,0x00, -0x79,0x00,0x00,0x00,0x0c,0x00,0x07,0x00,0x06,0x00,0x00,0x00, -0x84,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x26,0x00,0x00,0x00, -0x7e,0x00,0x00,0x00,0x83,0x00,0x00,0x00,0x41,0x00,0x05,0x00, -0x15,0x00,0x00,0x00,0x88,0x00,0x00,0x00,0x12,0x00,0x00,0x00, -0x87,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x89,0x00,0x00,0x00,0x88,0x00,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x8a,0x00,0x00,0x00,0x32,0x00,0x00,0x00, -0x89,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x8c,0x00,0x00,0x00,0x42,0x00,0x00,0x00,0x37,0x00,0x00,0x00, -0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00,0x8e,0x00,0x00,0x00, -0x12,0x00,0x00,0x00,0x8d,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x8f,0x00,0x00,0x00,0x8e,0x00,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x90,0x00,0x00,0x00, -0x8c,0x00,0x00,0x00,0x8f,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x91,0x00,0x00,0x00,0x8a,0x00,0x00,0x00, -0x90,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x93,0x00,0x00,0x00,0x91,0x00,0x00,0x00,0x7a,0x00,0x00,0x00, -0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x94,0x00,0x00,0x00, -0x93,0x00,0x00,0x00,0x6d,0x00,0x00,0x00,0x41,0x00,0x05,0x00, -0x15,0x00,0x00,0x00,0x99,0x00,0x00,0x00,0x12,0x00,0x00,0x00, -0x98,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x9a,0x00,0x00,0x00,0x99,0x00,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x9b,0x00,0x00,0x00,0x0f,0x00,0x00,0x00, -0x9a,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x9e,0x00,0x00,0x00,0x4a,0x00,0x00,0x00,0x9d,0x00,0x00,0x00, -0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00,0xa0,0x00,0x00,0x00, -0x12,0x00,0x00,0x00,0x9f,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0xa1,0x00,0x00,0x00,0xa0,0x00,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xa2,0x00,0x00,0x00, -0x9e,0x00,0x00,0x00,0xa1,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xa3,0x00,0x00,0x00,0x9b,0x00,0x00,0x00, -0xa2,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xa5,0x00,0x00,0x00,0xa3,0x00,0x00,0x00,0x7a,0x00,0x00,0x00, -0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xa6,0x00,0x00,0x00, -0xa5,0x00,0x00,0x00,0x6d,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, -0xa8,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0xa8,0x00,0x00,0x00, -0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0xbe,0x02,0x00,0x00, -0x3e,0x00,0x00,0x00,0x05,0x00,0x00,0x00,0xc7,0x00,0x00,0x00, -0xa9,0x00,0x00,0x00,0xb0,0x00,0x05,0x00,0xb8,0x00,0x00,0x00, -0xb9,0x00,0x00,0x00,0xbe,0x02,0x00,0x00,0xb7,0x00,0x00,0x00, -0xf6,0x00,0x04,0x00,0xaa,0x00,0x00,0x00,0xa9,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0xb9,0x00,0x00,0x00, -0xa9,0x00,0x00,0x00,0xaa,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, -0xa9,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0xc3,0x00,0x00,0x00, -0xc4,0x00,0x00,0x00,0xc0,0x00,0x00,0x00,0xbe,0x02,0x00,0x00, -0x3e,0x00,0x03,0x00,0xc4,0x00,0x00,0x00,0xc2,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xc7,0x00,0x00,0x00, -0xbe,0x02,0x00,0x00,0xc6,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, -0xa8,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0xaa,0x00,0x00,0x00, -0xf9,0x00,0x02,0x00,0xca,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, -0xca,0x00,0x00,0x00,0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00, -0xd7,0x02,0x00,0x00,0xa6,0x00,0x00,0x00,0xaa,0x00,0x00,0x00, -0x8e,0x01,0x00,0x00,0xcd,0x00,0x00,0x00,0xf5,0x00,0x07,0x00, -0x06,0x00,0x00,0x00,0xd3,0x02,0x00,0x00,0x94,0x00,0x00,0x00, -0xaa,0x00,0x00,0x00,0x8b,0x01,0x00,0x00,0xcd,0x00,0x00,0x00, -0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0xbf,0x02,0x00,0x00, -0x7a,0x00,0x00,0x00,0xaa,0x00,0x00,0x00,0x39,0x02,0x00,0x00, -0xcd,0x00,0x00,0x00,0xb0,0x00,0x05,0x00,0xb8,0x00,0x00,0x00, -0xd1,0x00,0x00,0x00,0xbf,0x02,0x00,0x00,0x84,0x00,0x00,0x00, -0xf6,0x00,0x04,0x00,0xcc,0x00,0x00,0x00,0xcd,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0xd1,0x00,0x00,0x00, -0xcb,0x00,0x00,0x00,0xcc,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, -0xcb,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0xd3,0x00,0x00,0x00, -0xf8,0x00,0x02,0x00,0xd3,0x00,0x00,0x00,0xf5,0x00,0x07,0x00, -0x06,0x00,0x00,0x00,0xcf,0x02,0x00,0x00,0x3e,0x00,0x00,0x00, -0xcb,0x00,0x00,0x00,0x32,0x01,0x00,0x00,0xd4,0x00,0x00,0x00, -0xb0,0x00,0x05,0x00,0xb8,0x00,0x00,0x00,0xd9,0x00,0x00,0x00, -0xcf,0x02,0x00,0x00,0x37,0x00,0x00,0x00,0xf6,0x00,0x04,0x00, -0xd5,0x00,0x00,0x00,0xd4,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0xfa,0x00,0x04,0x00,0xd9,0x00,0x00,0x00,0xd4,0x00,0x00,0x00, -0xd5,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0xd4,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xde,0x00,0x00,0x00, -0x74,0x00,0x00,0x00,0xcf,0x02,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xe1,0x00,0x00,0x00,0xde,0x00,0x00,0x00, -0x8f,0x00,0x00,0x00,0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xe2,0x00,0x00,0x00,0xe1,0x00,0x00,0x00,0x6d,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xe3,0x00,0x00,0x00, -0xd3,0x02,0x00,0x00,0xe2,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xe5,0x00,0x00,0x00,0xe3,0x00,0x00,0x00, -0x6f,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xef,0x00,0x00,0x00,0xde,0x00,0x00,0x00,0xee,0x00,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xf1,0x00,0x00,0x00, -0x6f,0x00,0x00,0x00,0x6d,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xf2,0x00,0x00,0x00,0xef,0x00,0x00,0x00, -0xf1,0x00,0x00,0x00,0x41,0x00,0x07,0x00,0xfb,0x00,0x00,0x00, -0xfc,0x00,0x00,0x00,0xf9,0x00,0x00,0x00,0x34,0x00,0x00,0x00, -0xe5,0x00,0x00,0x00,0x3e,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0xf4,0x00,0x00,0x00,0xfd,0x00,0x00,0x00,0xfc,0x00,0x00,0x00, -0x73,0x00,0x04,0x00,0xba,0x00,0x00,0x00,0xfe,0x00,0x00,0x00, -0xfd,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0xff,0x00,0x00,0x00, -0x00,0x01,0x00,0x00,0xea,0x00,0x00,0x00,0xf2,0x00,0x00,0x00, -0x3e,0x00,0x03,0x00,0x00,0x01,0x00,0x00,0xfe,0x00,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x05,0x01,0x00,0x00, -0xde,0x00,0x00,0x00,0x04,0x01,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x08,0x01,0x00,0x00,0x05,0x01,0x00,0x00, -0xf1,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x09,0x01,0x00,0x00,0x08,0x01,0x00,0x00,0x39,0x00,0x00,0x00, -0x41,0x00,0x07,0x00,0xfb,0x00,0x00,0x00,0x0b,0x01,0x00,0x00, -0xf9,0x00,0x00,0x00,0x34,0x00,0x00,0x00,0xe5,0x00,0x00,0x00, -0x39,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0xf4,0x00,0x00,0x00, -0x0c,0x01,0x00,0x00,0x0b,0x01,0x00,0x00,0x73,0x00,0x04,0x00, -0xba,0x00,0x00,0x00,0x0d,0x01,0x00,0x00,0x0c,0x01,0x00,0x00, -0x41,0x00,0x05,0x00,0xff,0x00,0x00,0x00,0x0e,0x01,0x00,0x00, -0xea,0x00,0x00,0x00,0x09,0x01,0x00,0x00,0x3e,0x00,0x03,0x00, -0x0e,0x01,0x00,0x00,0x0d,0x01,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x13,0x01,0x00,0x00,0xde,0x00,0x00,0x00, -0x12,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x16,0x01,0x00,0x00,0x13,0x01,0x00,0x00,0xf1,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x17,0x01,0x00,0x00, -0x16,0x01,0x00,0x00,0x0c,0x00,0x00,0x00,0x41,0x00,0x07,0x00, -0xfb,0x00,0x00,0x00,0x19,0x01,0x00,0x00,0xf9,0x00,0x00,0x00, -0x34,0x00,0x00,0x00,0xe5,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0xf4,0x00,0x00,0x00,0x1a,0x01,0x00,0x00, -0x19,0x01,0x00,0x00,0x73,0x00,0x04,0x00,0xba,0x00,0x00,0x00, -0x1b,0x01,0x00,0x00,0x1a,0x01,0x00,0x00,0x41,0x00,0x05,0x00, -0xff,0x00,0x00,0x00,0x1c,0x01,0x00,0x00,0xea,0x00,0x00,0x00, -0x17,0x01,0x00,0x00,0x3e,0x00,0x03,0x00,0x1c,0x01,0x00,0x00, -0x1b,0x01,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x21,0x01,0x00,0x00,0xde,0x00,0x00,0x00,0x20,0x01,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x24,0x01,0x00,0x00, -0x21,0x01,0x00,0x00,0xf1,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x26,0x01,0x00,0x00,0x24,0x01,0x00,0x00, -0x25,0x01,0x00,0x00,0x41,0x00,0x07,0x00,0xfb,0x00,0x00,0x00, -0x28,0x01,0x00,0x00,0xf9,0x00,0x00,0x00,0x34,0x00,0x00,0x00, -0xe5,0x00,0x00,0x00,0x25,0x01,0x00,0x00,0x3d,0x00,0x04,0x00, -0xf4,0x00,0x00,0x00,0x29,0x01,0x00,0x00,0x28,0x01,0x00,0x00, -0x73,0x00,0x04,0x00,0xba,0x00,0x00,0x00,0x2a,0x01,0x00,0x00, -0x29,0x01,0x00,0x00,0x41,0x00,0x05,0x00,0xff,0x00,0x00,0x00, -0x2b,0x01,0x00,0x00,0xea,0x00,0x00,0x00,0x26,0x01,0x00,0x00, -0x3e,0x00,0x03,0x00,0x2b,0x01,0x00,0x00,0x2a,0x01,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x32,0x01,0x00,0x00, -0xcf,0x02,0x00,0x00,0x30,0x01,0x00,0x00,0xf9,0x00,0x02,0x00, -0xd3,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0xd5,0x00,0x00,0x00, -0xf9,0x00,0x02,0x00,0x34,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, -0x34,0x01,0x00,0x00,0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00, -0xd0,0x02,0x00,0x00,0x3e,0x00,0x00,0x00,0xd5,0x00,0x00,0x00, -0x87,0x01,0x00,0x00,0x35,0x01,0x00,0x00,0xb0,0x00,0x05,0x00, -0xb8,0x00,0x00,0x00,0x3a,0x01,0x00,0x00,0xd0,0x02,0x00,0x00, -0x9d,0x00,0x00,0x00,0xf6,0x00,0x04,0x00,0x36,0x01,0x00,0x00, -0x35,0x01,0x00,0x00,0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, -0x3a,0x01,0x00,0x00,0x35,0x01,0x00,0x00,0x36,0x01,0x00,0x00, -0xf8,0x00,0x02,0x00,0x35,0x01,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x3f,0x01,0x00,0x00,0x74,0x00,0x00,0x00, -0xd0,0x02,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x42,0x01,0x00,0x00,0x3f,0x01,0x00,0x00,0xa1,0x00,0x00,0x00, -0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x43,0x01,0x00,0x00, -0x42,0x01,0x00,0x00,0x6d,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x44,0x01,0x00,0x00,0xd7,0x02,0x00,0x00, -0x43,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x46,0x01,0x00,0x00,0x44,0x01,0x00,0x00,0x6f,0x00,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x50,0x01,0x00,0x00, -0x3f,0x01,0x00,0x00,0x4f,0x01,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x52,0x01,0x00,0x00,0x6f,0x00,0x00,0x00, -0x6d,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x53,0x01,0x00,0x00,0x50,0x01,0x00,0x00,0x52,0x01,0x00,0x00, -0x41,0x00,0x07,0x00,0x5b,0x01,0x00,0x00,0x5c,0x01,0x00,0x00, -0x59,0x01,0x00,0x00,0x34,0x00,0x00,0x00,0x46,0x01,0x00,0x00, -0x3e,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0xba,0x00,0x00,0x00, -0x5d,0x01,0x00,0x00,0x5c,0x01,0x00,0x00,0x41,0x00,0x05,0x00, -0xff,0x00,0x00,0x00,0x5e,0x01,0x00,0x00,0x4b,0x01,0x00,0x00, -0x53,0x01,0x00,0x00,0x3e,0x00,0x03,0x00,0x5e,0x01,0x00,0x00, -0x5d,0x01,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x63,0x01,0x00,0x00,0x3f,0x01,0x00,0x00,0x62,0x01,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x66,0x01,0x00,0x00, -0x63,0x01,0x00,0x00,0x52,0x01,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x67,0x01,0x00,0x00,0x66,0x01,0x00,0x00, -0x39,0x00,0x00,0x00,0x41,0x00,0x07,0x00,0x5b,0x01,0x00,0x00, -0x69,0x01,0x00,0x00,0x59,0x01,0x00,0x00,0x34,0x00,0x00,0x00, -0x46,0x01,0x00,0x00,0x39,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0xba,0x00,0x00,0x00,0x6a,0x01,0x00,0x00,0x69,0x01,0x00,0x00, -0x41,0x00,0x05,0x00,0xff,0x00,0x00,0x00,0x6b,0x01,0x00,0x00, -0x4b,0x01,0x00,0x00,0x67,0x01,0x00,0x00,0x3e,0x00,0x03,0x00, -0x6b,0x01,0x00,0x00,0x6a,0x01,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x70,0x01,0x00,0x00,0x3f,0x01,0x00,0x00, -0x6f,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x73,0x01,0x00,0x00,0x70,0x01,0x00,0x00,0x52,0x01,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x74,0x01,0x00,0x00, -0x73,0x01,0x00,0x00,0x0c,0x00,0x00,0x00,0x41,0x00,0x07,0x00, -0x5b,0x01,0x00,0x00,0x76,0x01,0x00,0x00,0x59,0x01,0x00,0x00, -0x34,0x00,0x00,0x00,0x46,0x01,0x00,0x00,0x0c,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0xba,0x00,0x00,0x00,0x77,0x01,0x00,0x00, -0x76,0x01,0x00,0x00,0x41,0x00,0x05,0x00,0xff,0x00,0x00,0x00, -0x78,0x01,0x00,0x00,0x4b,0x01,0x00,0x00,0x74,0x01,0x00,0x00, -0x3e,0x00,0x03,0x00,0x78,0x01,0x00,0x00,0x77,0x01,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x7d,0x01,0x00,0x00, -0x3f,0x01,0x00,0x00,0x7c,0x01,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x80,0x01,0x00,0x00,0x7d,0x01,0x00,0x00, -0x52,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x81,0x01,0x00,0x00,0x80,0x01,0x00,0x00,0x25,0x01,0x00,0x00, -0x41,0x00,0x07,0x00,0x5b,0x01,0x00,0x00,0x83,0x01,0x00,0x00, -0x59,0x01,0x00,0x00,0x34,0x00,0x00,0x00,0x46,0x01,0x00,0x00, -0x25,0x01,0x00,0x00,0x3d,0x00,0x04,0x00,0xba,0x00,0x00,0x00, -0x84,0x01,0x00,0x00,0x83,0x01,0x00,0x00,0x41,0x00,0x05,0x00, -0xff,0x00,0x00,0x00,0x85,0x01,0x00,0x00,0x4b,0x01,0x00,0x00, -0x81,0x01,0x00,0x00,0x3e,0x00,0x03,0x00,0x85,0x01,0x00,0x00, -0x84,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x87,0x01,0x00,0x00,0xd0,0x02,0x00,0x00,0x30,0x01,0x00,0x00, -0xf9,0x00,0x02,0x00,0x34,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, -0x36,0x01,0x00,0x00,0xe0,0x00,0x04,0x00,0x0c,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x88,0x01,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x8b,0x01,0x00,0x00,0xd3,0x02,0x00,0x00, -0x89,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x8e,0x01,0x00,0x00,0xd7,0x02,0x00,0x00,0x8c,0x01,0x00,0x00, -0xf9,0x00,0x02,0x00,0x90,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, -0x90,0x01,0x00,0x00,0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00, -0xd9,0x02,0x00,0x00,0x3e,0x00,0x00,0x00,0x36,0x01,0x00,0x00, -0x37,0x02,0x00,0x00,0x93,0x01,0x00,0x00,0xb0,0x00,0x05,0x00, -0xb8,0x00,0x00,0x00,0x96,0x01,0x00,0x00,0xd9,0x02,0x00,0x00, -0x6c,0x00,0x00,0x00,0xf6,0x00,0x04,0x00,0x92,0x01,0x00,0x00, -0x93,0x01,0x00,0x00,0x00,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, -0x96,0x01,0x00,0x00,0x91,0x01,0x00,0x00,0x92,0x01,0x00,0x00, -0xf8,0x00,0x02,0x00,0x91,0x01,0x00,0x00,0xf9,0x00,0x02,0x00, -0x98,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0x98,0x01,0x00,0x00, -0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0xdd,0x02,0x00,0x00, -0x3e,0x00,0x00,0x00,0x91,0x01,0x00,0x00,0xc3,0x01,0x00,0x00, -0x9b,0x01,0x00,0x00,0xb0,0x00,0x05,0x00,0xb8,0x00,0x00,0x00, -0x9e,0x01,0x00,0x00,0xdd,0x02,0x00,0x00,0x60,0x00,0x00,0x00, -0xf6,0x00,0x04,0x00,0x9a,0x01,0x00,0x00,0x9b,0x01,0x00,0x00, -0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0x9e,0x01,0x00,0x00, -0x99,0x01,0x00,0x00,0x9a,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, -0x99,0x01,0x00,0x00,0xf9,0x00,0x02,0x00,0xa0,0x01,0x00,0x00, -0xf8,0x00,0x02,0x00,0xa0,0x01,0x00,0x00,0xf5,0x00,0x07,0x00, -0x06,0x00,0x00,0x00,0xef,0x02,0x00,0x00,0x3e,0x00,0x00,0x00, -0x99,0x01,0x00,0x00,0xc1,0x01,0x00,0x00,0xa1,0x01,0x00,0x00, -0xb0,0x00,0x05,0x00,0xb8,0x00,0x00,0x00,0xa6,0x01,0x00,0x00, -0xef,0x02,0x00,0x00,0x62,0x00,0x00,0x00,0xf6,0x00,0x04,0x00, -0xa2,0x01,0x00,0x00,0xa1,0x01,0x00,0x00,0x01,0x00,0x00,0x00, -0xfa,0x00,0x04,0x00,0xa6,0x01,0x00,0x00,0xa1,0x01,0x00,0x00, -0xa2,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0xa1,0x01,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xac,0x01,0x00,0x00, -0xdd,0x02,0x00,0x00,0x62,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xae,0x01,0x00,0x00,0xac,0x01,0x00,0x00, -0xef,0x02,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xb0,0x01,0x00,0x00,0x55,0x00,0x00,0x00,0x53,0x00,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xb2,0x01,0x00,0x00, -0xdd,0x02,0x00,0x00,0x61,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xb3,0x01,0x00,0x00,0xb0,0x01,0x00,0x00, -0xb2,0x01,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xb5,0x01,0x00,0x00,0x64,0x00,0x00,0x00,0x62,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xb6,0x01,0x00,0x00, -0xb3,0x01,0x00,0x00,0xb5,0x01,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xb8,0x01,0x00,0x00,0xb6,0x01,0x00,0x00, -0xef,0x02,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xba,0x01,0x00,0x00,0xb8,0x01,0x00,0x00,0xb9,0x01,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xbc,0x01,0x00,0x00, -0xba,0x01,0x00,0x00,0xd9,0x02,0x00,0x00,0x41,0x00,0x05,0x00, -0xff,0x00,0x00,0x00,0xbd,0x01,0x00,0x00,0xea,0x00,0x00,0x00, -0xbc,0x01,0x00,0x00,0x3d,0x00,0x04,0x00,0xba,0x00,0x00,0x00, -0xbe,0x01,0x00,0x00,0xbd,0x01,0x00,0x00,0x41,0x00,0x05,0x00, -0xc3,0x00,0x00,0x00,0xbf,0x01,0x00,0x00,0xaa,0x01,0x00,0x00, -0xae,0x01,0x00,0x00,0x3e,0x00,0x03,0x00,0xbf,0x01,0x00,0x00, -0xbe,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xc1,0x01,0x00,0x00,0xef,0x02,0x00,0x00,0xc6,0x00,0x00,0x00, -0xf9,0x00,0x02,0x00,0xa0,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, -0xa2,0x01,0x00,0x00,0xf9,0x00,0x02,0x00,0x9b,0x01,0x00,0x00, -0xf8,0x00,0x02,0x00,0x9b,0x01,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xc3,0x01,0x00,0x00,0xdd,0x02,0x00,0x00, -0xc6,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0x98,0x01,0x00,0x00, -0xf8,0x00,0x02,0x00,0x9a,0x01,0x00,0x00,0xf9,0x00,0x02,0x00, -0xc5,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0xc5,0x01,0x00,0x00, -0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0xde,0x02,0x00,0x00, -0x3e,0x00,0x00,0x00,0x9a,0x01,0x00,0x00,0xf1,0x01,0x00,0x00, -0xc8,0x01,0x00,0x00,0xb0,0x00,0x05,0x00,0xb8,0x00,0x00,0x00, -0xcb,0x01,0x00,0x00,0xde,0x02,0x00,0x00,0xb5,0x00,0x00,0x00, -0xf6,0x00,0x04,0x00,0xc7,0x01,0x00,0x00,0xc8,0x01,0x00,0x00, -0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0xcb,0x01,0x00,0x00, -0xc6,0x01,0x00,0x00,0xc7,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, -0xc6,0x01,0x00,0x00,0xf9,0x00,0x02,0x00,0xcd,0x01,0x00,0x00, -0xf8,0x00,0x02,0x00,0xcd,0x01,0x00,0x00,0xf5,0x00,0x07,0x00, -0x06,0x00,0x00,0x00,0xec,0x02,0x00,0x00,0x3e,0x00,0x00,0x00, -0xc6,0x01,0x00,0x00,0xef,0x01,0x00,0x00,0xce,0x01,0x00,0x00, -0xb0,0x00,0x05,0x00,0xb8,0x00,0x00,0x00,0xd3,0x01,0x00,0x00, -0xec,0x02,0x00,0x00,0xb2,0x00,0x00,0x00,0xf6,0x00,0x04,0x00, -0xcf,0x01,0x00,0x00,0xce,0x01,0x00,0x00,0x01,0x00,0x00,0x00, -0xfa,0x00,0x04,0x00,0xd3,0x01,0x00,0x00,0xce,0x01,0x00,0x00, -0xcf,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0xce,0x01,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xd9,0x01,0x00,0x00, -0xde,0x02,0x00,0x00,0xb2,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xdb,0x01,0x00,0x00,0xd9,0x01,0x00,0x00, -0xec,0x02,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xdd,0x01,0x00,0x00,0x59,0x00,0x00,0x00,0xaf,0x00,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xe0,0x01,0x00,0x00, -0xde,0x02,0x00,0x00,0xdf,0x01,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xe1,0x01,0x00,0x00,0xdd,0x01,0x00,0x00, -0xe0,0x01,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xe3,0x01,0x00,0x00,0x68,0x00,0x00,0x00,0xb2,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xe4,0x01,0x00,0x00, -0xe1,0x01,0x00,0x00,0xe3,0x01,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xe6,0x01,0x00,0x00,0xe4,0x01,0x00,0x00, -0xec,0x02,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xe8,0x01,0x00,0x00,0xe6,0x01,0x00,0x00,0xe7,0x01,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xea,0x01,0x00,0x00, -0xe8,0x01,0x00,0x00,0xd9,0x02,0x00,0x00,0x41,0x00,0x05,0x00, -0xff,0x00,0x00,0x00,0xeb,0x01,0x00,0x00,0x4b,0x01,0x00,0x00, -0xea,0x01,0x00,0x00,0x3d,0x00,0x04,0x00,0xba,0x00,0x00,0x00, -0xec,0x01,0x00,0x00,0xeb,0x01,0x00,0x00,0x41,0x00,0x05,0x00, -0xc3,0x00,0x00,0x00,0xed,0x01,0x00,0x00,0xd7,0x01,0x00,0x00, -0xdb,0x01,0x00,0x00,0x3e,0x00,0x03,0x00,0xed,0x01,0x00,0x00, -0xec,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xef,0x01,0x00,0x00,0xec,0x02,0x00,0x00,0xc6,0x00,0x00,0x00, -0xf9,0x00,0x02,0x00,0xcd,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, -0xcf,0x01,0x00,0x00,0xf9,0x00,0x02,0x00,0xc8,0x01,0x00,0x00, -0xf8,0x00,0x02,0x00,0xc8,0x01,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xf1,0x01,0x00,0x00,0xde,0x02,0x00,0x00, -0xc6,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0xc5,0x01,0x00,0x00, -0xf8,0x00,0x02,0x00,0xc7,0x01,0x00,0x00,0xf9,0x00,0x02,0x00, -0xf3,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0xf3,0x01,0x00,0x00, -0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0xdf,0x02,0x00,0x00, -0x3e,0x00,0x00,0x00,0xc7,0x01,0x00,0x00,0x35,0x02,0x00,0x00, -0xf6,0x01,0x00,0x00,0xb0,0x00,0x05,0x00,0xb8,0x00,0x00,0x00, -0xf9,0x01,0x00,0x00,0xdf,0x02,0x00,0x00,0xb5,0x00,0x00,0x00, -0xf6,0x00,0x04,0x00,0xf5,0x01,0x00,0x00,0xf6,0x01,0x00,0x00, -0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0xf9,0x01,0x00,0x00, -0xf4,0x01,0x00,0x00,0xf5,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, -0xf4,0x01,0x00,0x00,0xf9,0x00,0x02,0x00,0xfb,0x01,0x00,0x00, -0xf8,0x00,0x02,0x00,0xfb,0x01,0x00,0x00,0xf5,0x00,0x07,0x00, -0x06,0x00,0x00,0x00,0xe3,0x02,0x00,0x00,0x3e,0x00,0x00,0x00, -0xf4,0x01,0x00,0x00,0x33,0x02,0x00,0x00,0xfe,0x01,0x00,0x00, -0xb0,0x00,0x05,0x00,0xb8,0x00,0x00,0x00,0x01,0x02,0x00,0x00, -0xe3,0x02,0x00,0x00,0x60,0x00,0x00,0x00,0xf6,0x00,0x04,0x00, -0xfd,0x01,0x00,0x00,0xfe,0x01,0x00,0x00,0x01,0x00,0x00,0x00, -0xfa,0x00,0x04,0x00,0x01,0x02,0x00,0x00,0xfc,0x01,0x00,0x00, -0xfd,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0xfc,0x01,0x00,0x00, -0xf9,0x00,0x02,0x00,0x03,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, -0x03,0x02,0x00,0x00,0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00, -0xe5,0x02,0x00,0x00,0x3e,0x00,0x00,0x00,0xfc,0x01,0x00,0x00, -0x31,0x02,0x00,0x00,0x06,0x02,0x00,0x00,0xb0,0x00,0x05,0x00, -0xb8,0x00,0x00,0x00,0x09,0x02,0x00,0x00,0xe5,0x02,0x00,0x00, -0xb2,0x00,0x00,0x00,0xf6,0x00,0x04,0x00,0x05,0x02,0x00,0x00, -0x06,0x02,0x00,0x00,0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, -0x09,0x02,0x00,0x00,0x04,0x02,0x00,0x00,0x05,0x02,0x00,0x00, -0xf8,0x00,0x02,0x00,0x04,0x02,0x00,0x00,0xf9,0x00,0x02,0x00, -0x0b,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x0b,0x02,0x00,0x00, -0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0xe7,0x02,0x00,0x00, -0x3e,0x00,0x00,0x00,0x04,0x02,0x00,0x00,0x2f,0x02,0x00,0x00, -0x0c,0x02,0x00,0x00,0xb0,0x00,0x05,0x00,0xb8,0x00,0x00,0x00, -0x11,0x02,0x00,0x00,0xe7,0x02,0x00,0x00,0x62,0x00,0x00,0x00, -0xf6,0x00,0x04,0x00,0x0d,0x02,0x00,0x00,0x0c,0x02,0x00,0x00, -0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0x11,0x02,0x00,0x00, -0x0c,0x02,0x00,0x00,0x0d,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, -0x0c,0x02,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x13,0x02,0x00,0x00,0xdf,0x02,0x00,0x00,0xb2,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x15,0x02,0x00,0x00, -0x13,0x02,0x00,0x00,0xe5,0x02,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x17,0x02,0x00,0x00,0x15,0x02,0x00,0x00, -0x16,0x02,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x19,0x02,0x00,0x00,0xe3,0x02,0x00,0x00,0x62,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x1a,0x02,0x00,0x00, -0x17,0x02,0x00,0x00,0x19,0x02,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x1c,0x02,0x00,0x00,0x1a,0x02,0x00,0x00, -0xe7,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x20,0x02,0x00,0x00,0x19,0x02,0x00,0x00,0xe7,0x02,0x00,0x00, -0x41,0x00,0x05,0x00,0xc3,0x00,0x00,0x00,0x21,0x02,0x00,0x00, -0xaa,0x01,0x00,0x00,0x20,0x02,0x00,0x00,0x3d,0x00,0x04,0x00, -0xba,0x00,0x00,0x00,0x22,0x02,0x00,0x00,0x21,0x02,0x00,0x00, -0x41,0x00,0x05,0x00,0xc3,0x00,0x00,0x00,0x27,0x02,0x00,0x00, -0xd7,0x01,0x00,0x00,0x15,0x02,0x00,0x00,0x3d,0x00,0x04,0x00, -0xba,0x00,0x00,0x00,0x28,0x02,0x00,0x00,0x27,0x02,0x00,0x00, -0x41,0x00,0x05,0x00,0xc3,0x00,0x00,0x00,0x2a,0x02,0x00,0x00, -0xc0,0x00,0x00,0x00,0x1c,0x02,0x00,0x00,0x3d,0x00,0x04,0x00, -0xba,0x00,0x00,0x00,0x2b,0x02,0x00,0x00,0x2a,0x02,0x00,0x00, -0x0c,0x00,0x08,0x00,0xba,0x00,0x00,0x00,0x2c,0x02,0x00,0x00, -0x01,0x00,0x00,0x00,0x32,0x00,0x00,0x00,0x22,0x02,0x00,0x00, -0x28,0x02,0x00,0x00,0x2b,0x02,0x00,0x00,0x3e,0x00,0x03,0x00, -0x2a,0x02,0x00,0x00,0x2c,0x02,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x2f,0x02,0x00,0x00,0xe7,0x02,0x00,0x00, -0xc6,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0x0b,0x02,0x00,0x00, -0xf8,0x00,0x02,0x00,0x0d,0x02,0x00,0x00,0xf9,0x00,0x02,0x00, -0x06,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x06,0x02,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x31,0x02,0x00,0x00, -0xe5,0x02,0x00,0x00,0xc6,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, -0x03,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x05,0x02,0x00,0x00, -0xf9,0x00,0x02,0x00,0xfe,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, -0xfe,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x33,0x02,0x00,0x00,0xe3,0x02,0x00,0x00,0xc6,0x00,0x00,0x00, -0xf9,0x00,0x02,0x00,0xfb,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, -0xfd,0x01,0x00,0x00,0xf9,0x00,0x02,0x00,0xf6,0x01,0x00,0x00, -0xf8,0x00,0x02,0x00,0xf6,0x01,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x35,0x02,0x00,0x00,0xdf,0x02,0x00,0x00, -0xc6,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0xf3,0x01,0x00,0x00, -0xf8,0x00,0x02,0x00,0xf5,0x01,0x00,0x00,0xf9,0x00,0x02,0x00, -0x93,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0x93,0x01,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x37,0x02,0x00,0x00, -0xd9,0x02,0x00,0x00,0xc6,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, -0x90,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0x92,0x01,0x00,0x00, -0xe0,0x00,0x04,0x00,0x0c,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0x88,0x01,0x00,0x00,0xf9,0x00,0x02,0x00,0xcd,0x00,0x00,0x00, -0xf8,0x00,0x02,0x00,0xcd,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x39,0x02,0x00,0x00,0xbf,0x02,0x00,0x00, -0x6c,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0xca,0x00,0x00,0x00, -0xf8,0x00,0x02,0x00,0xcc,0x00,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x3e,0x02,0x00,0x00,0x55,0x00,0x00,0x00, -0x53,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x3f,0x02,0x00,0x00,0x8c,0x00,0x00,0x00,0x3e,0x02,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x44,0x02,0x00,0x00, -0x59,0x00,0x00,0x00,0xaf,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x45,0x02,0x00,0x00,0x9e,0x00,0x00,0x00, -0x44,0x02,0x00,0x00,0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00, -0x4a,0x02,0x00,0x00,0x12,0x00,0x00,0x00,0x49,0x02,0x00,0x00, -0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x4b,0x02,0x00,0x00, -0x4a,0x02,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x4c,0x02,0x00,0x00,0x0f,0x00,0x00,0x00,0x4b,0x02,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x50,0x02,0x00,0x00, -0x47,0x00,0x00,0x00,0x4b,0x02,0x00,0x00,0x41,0x00,0x05,0x00, -0x0d,0x00,0x00,0x00,0x52,0x02,0x00,0x00,0x51,0x02,0x00,0x00, -0x0c,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x53,0x02,0x00,0x00,0x52,0x02,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x54,0x02,0x00,0x00,0x50,0x02,0x00,0x00, -0x53,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x55,0x02,0x00,0x00,0x4c,0x02,0x00,0x00,0x54,0x02,0x00,0x00, -0xf9,0x00,0x02,0x00,0x57,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, -0x57,0x02,0x00,0x00,0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00, -0xc0,0x02,0x00,0x00,0x3e,0x00,0x00,0x00,0xcc,0x00,0x00,0x00, -0xbd,0x02,0x00,0x00,0x5a,0x02,0x00,0x00,0xb0,0x00,0x05,0x00, -0xb8,0x00,0x00,0x00,0x5d,0x02,0x00,0x00,0xc0,0x02,0x00,0x00, -0xb5,0x00,0x00,0x00,0xf6,0x00,0x04,0x00,0x59,0x02,0x00,0x00, -0x5a,0x02,0x00,0x00,0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, -0x5d,0x02,0x00,0x00,0x58,0x02,0x00,0x00,0x59,0x02,0x00,0x00, -0xf8,0x00,0x02,0x00,0x58,0x02,0x00,0x00,0xf9,0x00,0x02,0x00, -0x5f,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x5f,0x02,0x00,0x00, -0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0xc1,0x02,0x00,0x00, -0x3e,0x00,0x00,0x00,0x58,0x02,0x00,0x00,0xbb,0x02,0x00,0x00, -0x62,0x02,0x00,0x00,0xb0,0x00,0x05,0x00,0xb8,0x00,0x00,0x00, -0x65,0x02,0x00,0x00,0xc1,0x02,0x00,0x00,0x60,0x00,0x00,0x00, -0xf6,0x00,0x04,0x00,0x61,0x02,0x00,0x00,0x62,0x02,0x00,0x00, -0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0x65,0x02,0x00,0x00, -0x60,0x02,0x00,0x00,0x61,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, -0x60,0x02,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x69,0x02,0x00,0x00,0xc1,0x02,0x00,0x00,0x61,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x6a,0x02,0x00,0x00, -0x3f,0x02,0x00,0x00,0x69,0x02,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x6c,0x02,0x00,0x00,0x64,0x00,0x00,0x00, -0x62,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x6d,0x02,0x00,0x00,0x6a,0x02,0x00,0x00,0x6c,0x02,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x71,0x02,0x00,0x00, -0xc0,0x02,0x00,0x00,0xdf,0x01,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x72,0x02,0x00,0x00,0x45,0x02,0x00,0x00, -0x71,0x02,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x74,0x02,0x00,0x00,0x68,0x00,0x00,0x00,0xb2,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x75,0x02,0x00,0x00, -0x72,0x02,0x00,0x00,0x74,0x02,0x00,0x00,0xf9,0x00,0x02,0x00, -0x77,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x77,0x02,0x00,0x00, -0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0xc3,0x02,0x00,0x00, -0x3e,0x00,0x00,0x00,0x60,0x02,0x00,0x00,0xb9,0x02,0x00,0x00, -0x7a,0x02,0x00,0x00,0xb0,0x00,0x05,0x00,0xb8,0x00,0x00,0x00, -0x7d,0x02,0x00,0x00,0xc3,0x02,0x00,0x00,0xb2,0x00,0x00,0x00, -0xf6,0x00,0x04,0x00,0x79,0x02,0x00,0x00,0x7a,0x02,0x00,0x00, -0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0x7d,0x02,0x00,0x00, -0x78,0x02,0x00,0x00,0x79,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, -0x78,0x02,0x00,0x00,0xf9,0x00,0x02,0x00,0x7f,0x02,0x00,0x00, -0xf8,0x00,0x02,0x00,0x7f,0x02,0x00,0x00,0xf5,0x00,0x07,0x00, -0x06,0x00,0x00,0x00,0xc5,0x02,0x00,0x00,0x3e,0x00,0x00,0x00, -0x78,0x02,0x00,0x00,0xb7,0x02,0x00,0x00,0x82,0x02,0x00,0x00, -0xb0,0x00,0x05,0x00,0xb8,0x00,0x00,0x00,0x85,0x02,0x00,0x00, -0xc5,0x02,0x00,0x00,0x62,0x00,0x00,0x00,0xf6,0x00,0x04,0x00, -0x81,0x02,0x00,0x00,0x82,0x02,0x00,0x00,0x01,0x00,0x00,0x00, -0xfa,0x00,0x04,0x00,0x85,0x02,0x00,0x00,0x80,0x02,0x00,0x00, -0x81,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x80,0x02,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x88,0x02,0x00,0x00, -0x6d,0x02,0x00,0x00,0xc5,0x02,0x00,0x00,0xb0,0x00,0x05,0x00, -0xb8,0x00,0x00,0x00,0x8b,0x02,0x00,0x00,0x88,0x02,0x00,0x00, -0x36,0x00,0x00,0x00,0xf7,0x00,0x03,0x00,0x8d,0x02,0x00,0x00, -0x00,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0x8b,0x02,0x00,0x00, -0x8c,0x02,0x00,0x00,0x8d,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, -0x8c,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x90,0x02,0x00,0x00,0x75,0x02,0x00,0x00,0xc3,0x02,0x00,0x00, -0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00,0x91,0x02,0x00,0x00, -0x12,0x00,0x00,0x00,0xc6,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x92,0x02,0x00,0x00,0x91,0x02,0x00,0x00, -0xb0,0x00,0x05,0x00,0xb8,0x00,0x00,0x00,0x93,0x02,0x00,0x00, -0x90,0x02,0x00,0x00,0x92,0x02,0x00,0x00,0xf9,0x00,0x02,0x00, -0x8d,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x8d,0x02,0x00,0x00, -0xf5,0x00,0x07,0x00,0xb8,0x00,0x00,0x00,0x94,0x02,0x00,0x00, -0x8b,0x02,0x00,0x00,0x80,0x02,0x00,0x00,0x93,0x02,0x00,0x00, -0x8c,0x02,0x00,0x00,0xf7,0x00,0x03,0x00,0x96,0x02,0x00,0x00, -0x00,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0x94,0x02,0x00,0x00, -0x95,0x02,0x00,0x00,0x96,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, -0x95,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x9e,0x02,0x00,0x00,0x75,0x02,0x00,0x00,0xc3,0x02,0x00,0x00, -0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00,0xa0,0x02,0x00,0x00, -0x12,0x00,0x00,0x00,0x9f,0x02,0x00,0x00,0x3d,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0xa1,0x02,0x00,0x00,0xa0,0x02,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xa2,0x02,0x00,0x00, -0x9e,0x02,0x00,0x00,0xa1,0x02,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xa3,0x02,0x00,0x00,0x55,0x02,0x00,0x00, -0xa2,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xa5,0x02,0x00,0x00,0xa3,0x02,0x00,0x00,0x6d,0x02,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xa7,0x02,0x00,0x00, -0xa5,0x02,0x00,0x00,0xc5,0x02,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xa9,0x02,0x00,0x00,0xc0,0x02,0x00,0x00, -0xb2,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xab,0x02,0x00,0x00,0xa9,0x02,0x00,0x00,0xc3,0x02,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xad,0x02,0x00,0x00, -0xab,0x02,0x00,0x00,0xac,0x02,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xaf,0x02,0x00,0x00,0xc1,0x02,0x00,0x00, -0x62,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xb0,0x02,0x00,0x00,0xad,0x02,0x00,0x00,0xaf,0x02,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xb2,0x02,0x00,0x00, -0xb0,0x02,0x00,0x00,0xc5,0x02,0x00,0x00,0x41,0x00,0x05,0x00, -0xc3,0x00,0x00,0x00,0xb3,0x02,0x00,0x00,0xc0,0x00,0x00,0x00, -0xb2,0x02,0x00,0x00,0x3d,0x00,0x04,0x00,0xba,0x00,0x00,0x00, -0xb4,0x02,0x00,0x00,0xb3,0x02,0x00,0x00,0x41,0x00,0x06,0x00, -0x5b,0x01,0x00,0x00,0xb5,0x02,0x00,0x00,0x9a,0x02,0x00,0x00, -0x34,0x00,0x00,0x00,0xa7,0x02,0x00,0x00,0x3e,0x00,0x03,0x00, -0xb5,0x02,0x00,0x00,0xb4,0x02,0x00,0x00,0xf9,0x00,0x02,0x00, -0x96,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x96,0x02,0x00,0x00, -0xf9,0x00,0x02,0x00,0x82,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, -0x82,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xb7,0x02,0x00,0x00,0xc5,0x02,0x00,0x00,0xc6,0x00,0x00,0x00, -0xf9,0x00,0x02,0x00,0x7f,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, -0x81,0x02,0x00,0x00,0xf9,0x00,0x02,0x00,0x7a,0x02,0x00,0x00, -0xf8,0x00,0x02,0x00,0x7a,0x02,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xb9,0x02,0x00,0x00,0xc3,0x02,0x00,0x00, -0xc6,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0x77,0x02,0x00,0x00, -0xf8,0x00,0x02,0x00,0x79,0x02,0x00,0x00,0xf9,0x00,0x02,0x00, -0x62,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x62,0x02,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xbb,0x02,0x00,0x00, -0xc1,0x02,0x00,0x00,0xc6,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, -0x5f,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x61,0x02,0x00,0x00, -0xf9,0x00,0x02,0x00,0x5a,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, -0x5a,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xbd,0x02,0x00,0x00,0xc0,0x02,0x00,0x00,0xc6,0x00,0x00,0x00, -0xf9,0x00,0x02,0x00,0x57,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, -0x59,0x02,0x00,0x00,0xfd,0x00,0x01,0x00,0x38,0x00,0x01,0x00, - -}; -const uint64_t matmul_f16_f32_aligned_m_fp32_len = 10464; - -unsigned char matmul_f16_f32_aligned_s_data[] = { -0x03,0x02,0x23,0x07,0x00,0x05,0x01,0x00,0x0b,0x00,0x0d,0x00, -0x65,0x03,0x00,0x00,0x00,0x00,0x00,0x00,0x11,0x00,0x02,0x00, -0x01,0x00,0x00,0x00,0x11,0x00,0x02,0x00,0x09,0x00,0x00,0x00, -0x11,0x00,0x02,0x00,0x51,0x11,0x00,0x00,0x0b,0x00,0x06,0x00, -0x01,0x00,0x00,0x00,0x47,0x4c,0x53,0x4c,0x2e,0x73,0x74,0x64, -0x2e,0x34,0x35,0x30,0x00,0x00,0x00,0x00,0x0e,0x00,0x03,0x00, -0x00,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x0f,0x00,0x0f,0x00, -0x05,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x6d,0x61,0x69,0x6e, -0x00,0x00,0x00,0x00,0x0b,0x00,0x00,0x00,0x12,0x00,0x00,0x00, -0x3d,0x00,0x00,0x00,0x4c,0x00,0x00,0x00,0xeb,0x00,0x00,0x00, -0xfa,0x00,0x00,0x00,0x80,0x01,0x00,0x00,0x8f,0x01,0x00,0x00, -0xc6,0x02,0x00,0x00,0x0f,0x03,0x00,0x00,0x10,0x00,0x06,0x00, -0x04,0x00,0x00,0x00,0x11,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x0b,0x00,0x00,0x00,0x0b,0x00,0x00,0x00,0x1c,0x00,0x00,0x00, -0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0x10,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x04,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00, -0x02,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x08,0x00,0x00,0x00, -0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00,0x03,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0x10,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x10,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00, -0x05,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x14,0x00,0x00,0x00, -0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x18,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0x10,0x00,0x00,0x00,0x07,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x1c,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00, -0x08,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x20,0x00,0x00,0x00, -0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00,0x09,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x24,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0x10,0x00,0x00,0x00,0x0a,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x28,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00, -0x0b,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x2c,0x00,0x00,0x00, -0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x30,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0x10,0x00,0x00,0x00,0x0d,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x34,0x00,0x00,0x00,0x47,0x00,0x03,0x00,0x10,0x00,0x00,0x00, -0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x37,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x3d,0x00,0x00,0x00,0x0b,0x00,0x00,0x00,0x1a,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x4c,0x00,0x00,0x00,0x0b,0x00,0x00,0x00, -0x1b,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x4f,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x09,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x53,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x04,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x60,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x62,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x07,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x6c,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x03,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x9d,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0xaf,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x05,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0xb2,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x08,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0xf7,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x10,0x00,0x00,0x00,0x48,0x00,0x04,0x00,0xf8,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x05,0x00,0x00,0x00,0x48,0x00,0x04,0x00, -0xf8,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x18,0x00,0x00,0x00, -0x48,0x00,0x05,0x00,0xf8,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0xf8,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x07,0x00,0x00,0x00, -0x08,0x00,0x00,0x00,0x47,0x00,0x03,0x00,0xf8,0x00,0x00,0x00, -0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0xfa,0x00,0x00,0x00, -0x22,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0xfa,0x00,0x00,0x00,0x21,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x61,0x01,0x00,0x00,0x01,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x62,0x01,0x00,0x00, -0x0b,0x00,0x00,0x00,0x19,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x8c,0x01,0x00,0x00,0x06,0x00,0x00,0x00,0x20,0x00,0x00,0x00, -0x48,0x00,0x04,0x00,0x8d,0x01,0x00,0x00,0x00,0x00,0x00,0x00, -0x05,0x00,0x00,0x00,0x48,0x00,0x04,0x00,0x8d,0x01,0x00,0x00, -0x00,0x00,0x00,0x00,0x18,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0x8d,0x01,0x00,0x00,0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x8d,0x01,0x00,0x00, -0x00,0x00,0x00,0x00,0x07,0x00,0x00,0x00,0x10,0x00,0x00,0x00, -0x47,0x00,0x03,0x00,0x8d,0x01,0x00,0x00,0x02,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x8f,0x01,0x00,0x00,0x22,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x8f,0x01,0x00,0x00, -0x21,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0xc6,0x02,0x00,0x00,0x0b,0x00,0x00,0x00,0x18,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x0c,0x03,0x00,0x00,0x06,0x00,0x00,0x00, -0x04,0x00,0x00,0x00,0x48,0x00,0x04,0x00,0x0d,0x03,0x00,0x00, -0x00,0x00,0x00,0x00,0x19,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0x0d,0x03,0x00,0x00,0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x47,0x00,0x03,0x00,0x0d,0x03,0x00,0x00, -0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x0f,0x03,0x00,0x00, -0x22,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x0f,0x03,0x00,0x00,0x21,0x00,0x00,0x00,0x02,0x00,0x00,0x00, -0x13,0x00,0x02,0x00,0x02,0x00,0x00,0x00,0x21,0x00,0x03,0x00, -0x03,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x15,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x20,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x17,0x00,0x04,0x00,0x09,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x03,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x0a,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x09,0x00,0x00,0x00,0x3b,0x00,0x04,0x00, -0x0a,0x00,0x00,0x00,0x0b,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0x02,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x0d,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x1e,0x00,0x10,0x00, -0x10,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x20,0x00,0x04,0x00,0x11,0x00,0x00,0x00,0x09,0x00,0x00,0x00, -0x10,0x00,0x00,0x00,0x3b,0x00,0x04,0x00,0x11,0x00,0x00,0x00, -0x12,0x00,0x00,0x00,0x09,0x00,0x00,0x00,0x15,0x00,0x04,0x00, -0x13,0x00,0x00,0x00,0x20,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00,0x14,0x00,0x00,0x00, -0x08,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x15,0x00,0x00,0x00, -0x09,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x13,0x00,0x00,0x00,0x21,0x00,0x00,0x00,0x0a,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00,0x27,0x00,0x00,0x00, -0x09,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00, -0x2d,0x00,0x00,0x00,0x07,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x13,0x00,0x00,0x00,0x34,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x32,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x37,0x00,0x00,0x00, -0x40,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x39,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x3b,0x00,0x04,0x00, -0x0a,0x00,0x00,0x00,0x3d,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x3e,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x3b,0x00,0x04,0x00,0x0a,0x00,0x00,0x00, -0x4c,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x32,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x4f,0x00,0x00,0x00,0x20,0x00,0x00,0x00, -0x32,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x53,0x00,0x00,0x00, -0x20,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, -0x54,0x00,0x00,0x00,0x86,0x00,0x00,0x00,0x37,0x00,0x00,0x00, -0x53,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, -0x58,0x00,0x00,0x00,0x86,0x00,0x00,0x00,0x37,0x00,0x00,0x00, -0x53,0x00,0x00,0x00,0x32,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x60,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0x61,0x00,0x00,0x00,0x86,0x00,0x00,0x00, -0x53,0x00,0x00,0x00,0x60,0x00,0x00,0x00,0x32,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x62,0x00,0x00,0x00,0x04,0x00,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x63,0x00,0x00,0x00, -0x86,0x00,0x00,0x00,0x61,0x00,0x00,0x00,0x62,0x00,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x67,0x00,0x00,0x00, -0x86,0x00,0x00,0x00,0x61,0x00,0x00,0x00,0x62,0x00,0x00,0x00, -0x32,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x6c,0x00,0x00,0x00, -0x10,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x6d,0x00,0x00,0x00,0x08,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0x6e,0x00,0x00,0x00,0x86,0x00,0x00,0x00, -0x6c,0x00,0x00,0x00,0x6d,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0x73,0x00,0x00,0x00,0x86,0x00,0x00,0x00, -0x6c,0x00,0x00,0x00,0x6d,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x13,0x00,0x00,0x00,0x77,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00,0x7c,0x00,0x00,0x00, -0x02,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00, -0x87,0x00,0x00,0x00,0x0b,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x13,0x00,0x00,0x00,0x8d,0x00,0x00,0x00,0x03,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00,0x98,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x32,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x9d,0x00,0x00,0x00,0x40,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x13,0x00,0x00,0x00,0x9f,0x00,0x00,0x00,0x04,0x00,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0xae,0x00,0x00,0x00, -0x84,0x00,0x00,0x00,0x60,0x00,0x00,0x00,0x62,0x00,0x00,0x00, -0x32,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0xaf,0x00,0x00,0x00, -0x20,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, -0xb0,0x00,0x00,0x00,0x84,0x00,0x00,0x00,0x53,0x00,0x00,0x00, -0xaf,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, -0xb1,0x00,0x00,0x00,0x84,0x00,0x00,0x00,0x4f,0x00,0x00,0x00, -0x62,0x00,0x00,0x00,0x32,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0xb2,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0xb3,0x00,0x00,0x00,0x84,0x00,0x00,0x00, -0xb1,0x00,0x00,0x00,0xb2,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0xb4,0x00,0x00,0x00,0x84,0x00,0x00,0x00, -0xb3,0x00,0x00,0x00,0x60,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0xb5,0x00,0x00,0x00,0x86,0x00,0x00,0x00, -0xb0,0x00,0x00,0x00,0xb4,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0xb6,0x00,0x00,0x00,0x84,0x00,0x00,0x00, -0xae,0x00,0x00,0x00,0xb5,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0xb7,0x00,0x00,0x00,0x84,0x00,0x00,0x00, -0xb6,0x00,0x00,0x00,0xb2,0x00,0x00,0x00,0x14,0x00,0x02,0x00, -0xb8,0x00,0x00,0x00,0x16,0x00,0x03,0x00,0xba,0x00,0x00,0x00, -0x20,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, -0xbb,0x00,0x00,0x00,0x84,0x00,0x00,0x00,0x60,0x00,0x00,0x00, -0x62,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, -0xbc,0x00,0x00,0x00,0x84,0x00,0x00,0x00,0xbb,0x00,0x00,0x00, -0xb5,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, -0xbd,0x00,0x00,0x00,0x84,0x00,0x00,0x00,0xbc,0x00,0x00,0x00, -0xb2,0x00,0x00,0x00,0x1c,0x00,0x04,0x00,0xbe,0x00,0x00,0x00, -0xba,0x00,0x00,0x00,0xbd,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0xbf,0x00,0x00,0x00,0x07,0x00,0x00,0x00,0xbe,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0xba,0x00,0x00,0x00,0xc2,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0xc3,0x00,0x00,0x00, -0x07,0x00,0x00,0x00,0xba,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x13,0x00,0x00,0x00,0xc6,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x16,0x00,0x03,0x00,0xe6,0x00,0x00,0x00,0x10,0x00,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0xe7,0x00,0x00,0x00, -0x80,0x00,0x00,0x00,0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0xe8,0x00,0x00,0x00, -0x84,0x00,0x00,0x00,0x37,0x00,0x00,0x00,0xe7,0x00,0x00,0x00, -0x1c,0x00,0x04,0x00,0xe9,0x00,0x00,0x00,0xe6,0x00,0x00,0x00, -0xe8,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0xea,0x00,0x00,0x00, -0x04,0x00,0x00,0x00,0xe9,0x00,0x00,0x00,0x3b,0x00,0x04,0x00, -0xea,0x00,0x00,0x00,0xeb,0x00,0x00,0x00,0x04,0x00,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0xef,0x00,0x00,0x00, -0x80,0x00,0x00,0x00,0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00, -0x17,0x00,0x04,0x00,0xf5,0x00,0x00,0x00,0xe6,0x00,0x00,0x00, -0x04,0x00,0x00,0x00,0x18,0x00,0x04,0x00,0xf6,0x00,0x00,0x00, -0xf5,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x1d,0x00,0x03,0x00, -0xf7,0x00,0x00,0x00,0xf6,0x00,0x00,0x00,0x1e,0x00,0x03,0x00, -0xf8,0x00,0x00,0x00,0xf7,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0xf9,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0xf8,0x00,0x00,0x00, -0x3b,0x00,0x04,0x00,0xf9,0x00,0x00,0x00,0xfa,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0xfc,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0xe6,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0xff,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0xe6,0x00,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x04,0x01,0x00,0x00, -0x80,0x00,0x00,0x00,0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x11,0x01,0x00,0x00, -0x80,0x00,0x00,0x00,0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x1e,0x01,0x00,0x00, -0x80,0x00,0x00,0x00,0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x23,0x01,0x00,0x00, -0x03,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, -0x2c,0x01,0x00,0x00,0x80,0x00,0x00,0x00,0x6c,0x00,0x00,0x00, -0x39,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x31,0x01,0x00,0x00,0x04,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0x3a,0x01,0x00,0x00,0x80,0x00,0x00,0x00, -0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x3f,0x01,0x00,0x00,0x05,0x00,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x48,0x01,0x00,0x00, -0x80,0x00,0x00,0x00,0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x4d,0x01,0x00,0x00, -0x06,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, -0x56,0x01,0x00,0x00,0x80,0x00,0x00,0x00,0x6c,0x00,0x00,0x00, -0x39,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x5b,0x01,0x00,0x00,0x07,0x00,0x00,0x00,0x32,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x61,0x01,0x00,0x00,0x01,0x00,0x00,0x00, -0x33,0x00,0x06,0x00,0x09,0x00,0x00,0x00,0x62,0x01,0x00,0x00, -0x61,0x01,0x00,0x00,0x39,0x00,0x00,0x00,0x39,0x00,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x63,0x01,0x00,0x00, -0x51,0x00,0x00,0x00,0x62,0x01,0x00,0x00,0x00,0x00,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x64,0x01,0x00,0x00, -0x84,0x00,0x00,0x00,0x63,0x01,0x00,0x00,0x6d,0x00,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x65,0x01,0x00,0x00, -0x86,0x00,0x00,0x00,0x64,0x01,0x00,0x00,0x6c,0x00,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x7c,0x01,0x00,0x00, -0x80,0x00,0x00,0x00,0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x7d,0x01,0x00,0x00, -0x84,0x00,0x00,0x00,0x9d,0x00,0x00,0x00,0x7c,0x01,0x00,0x00, -0x1c,0x00,0x04,0x00,0x7e,0x01,0x00,0x00,0xe6,0x00,0x00,0x00, -0x7d,0x01,0x00,0x00,0x20,0x00,0x04,0x00,0x7f,0x01,0x00,0x00, -0x04,0x00,0x00,0x00,0x7e,0x01,0x00,0x00,0x3b,0x00,0x04,0x00, -0x7f,0x01,0x00,0x00,0x80,0x01,0x00,0x00,0x04,0x00,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x84,0x01,0x00,0x00, -0x80,0x00,0x00,0x00,0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00, -0x17,0x00,0x04,0x00,0x8a,0x01,0x00,0x00,0xba,0x00,0x00,0x00, -0x04,0x00,0x00,0x00,0x18,0x00,0x04,0x00,0x8b,0x01,0x00,0x00, -0x8a,0x01,0x00,0x00,0x02,0x00,0x00,0x00,0x1d,0x00,0x03,0x00, -0x8c,0x01,0x00,0x00,0x8b,0x01,0x00,0x00,0x1e,0x00,0x03,0x00, -0x8d,0x01,0x00,0x00,0x8c,0x01,0x00,0x00,0x20,0x00,0x04,0x00, -0x8e,0x01,0x00,0x00,0x0c,0x00,0x00,0x00,0x8d,0x01,0x00,0x00, -0x3b,0x00,0x04,0x00,0x8e,0x01,0x00,0x00,0x8f,0x01,0x00,0x00, -0x0c,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x91,0x01,0x00,0x00, -0x0c,0x00,0x00,0x00,0xba,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0x99,0x01,0x00,0x00,0x80,0x00,0x00,0x00, -0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0xa7,0x01,0x00,0x00,0x80,0x00,0x00,0x00, -0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0xb5,0x01,0x00,0x00,0x80,0x00,0x00,0x00, -0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0xc3,0x01,0x00,0x00,0x80,0x00,0x00,0x00, -0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0xd1,0x01,0x00,0x00,0x80,0x00,0x00,0x00, -0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0xdf,0x01,0x00,0x00,0x80,0x00,0x00,0x00, -0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0xed,0x01,0x00,0x00,0x80,0x00,0x00,0x00, -0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0xfa,0x01,0x00,0x00,0x08,0x01,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0xfb,0x01,0x00,0x00, -0x86,0x00,0x00,0x00,0x6c,0x00,0x00,0x00,0x6d,0x00,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0xfe,0x01,0x00,0x00, -0x86,0x00,0x00,0x00,0x6c,0x00,0x00,0x00,0x6d,0x00,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x19,0x02,0x00,0x00, -0x84,0x00,0x00,0x00,0x60,0x00,0x00,0x00,0x62,0x00,0x00,0x00, -0x1c,0x00,0x04,0x00,0x1a,0x02,0x00,0x00,0xe6,0x00,0x00,0x00, -0x19,0x02,0x00,0x00,0x20,0x00,0x04,0x00,0x1b,0x02,0x00,0x00, -0x07,0x00,0x00,0x00,0x1a,0x02,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0x2b,0x02,0x00,0x00,0x80,0x00,0x00,0x00, -0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0x31,0x02,0x00,0x00,0x07,0x00,0x00,0x00,0xe6,0x00,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x47,0x02,0x00,0x00, -0x84,0x00,0x00,0x00,0xb5,0x00,0x00,0x00,0xb2,0x00,0x00,0x00, -0x1c,0x00,0x04,0x00,0x48,0x02,0x00,0x00,0xe6,0x00,0x00,0x00, -0x47,0x02,0x00,0x00,0x20,0x00,0x04,0x00,0x49,0x02,0x00,0x00, -0x07,0x00,0x00,0x00,0x48,0x02,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0x52,0x02,0x00,0x00,0x86,0x00,0x00,0x00, -0xaf,0x00,0x00,0x00,0xb5,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0x5a,0x02,0x00,0x00,0x80,0x00,0x00,0x00, -0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0x89,0x02,0x00,0x00,0x84,0x00,0x00,0x00, -0x60,0x00,0x00,0x00,0x62,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x13,0x00,0x00,0x00,0xbe,0x02,0x00,0x00,0x0d,0x00,0x00,0x00, -0x3b,0x00,0x04,0x00,0x0a,0x00,0x00,0x00,0xc6,0x02,0x00,0x00, -0x01,0x00,0x00,0x00,0x1d,0x00,0x03,0x00,0x0c,0x03,0x00,0x00, -0xba,0x00,0x00,0x00,0x1e,0x00,0x03,0x00,0x0d,0x03,0x00,0x00, -0x0c,0x03,0x00,0x00,0x20,0x00,0x04,0x00,0x0e,0x03,0x00,0x00, -0x0c,0x00,0x00,0x00,0x0d,0x03,0x00,0x00,0x3b,0x00,0x04,0x00, -0x0e,0x03,0x00,0x00,0x0f,0x03,0x00,0x00,0x0c,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00,0x14,0x03,0x00,0x00, -0x05,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, -0x21,0x03,0x00,0x00,0x84,0x00,0x00,0x00,0x60,0x00,0x00,0x00, -0x62,0x00,0x00,0x00,0x36,0x00,0x05,0x00,0x02,0x00,0x00,0x00, -0x04,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x03,0x00,0x00,0x00, -0xf8,0x00,0x02,0x00,0x05,0x00,0x00,0x00,0x3b,0x00,0x04,0x00, -0xbf,0x00,0x00,0x00,0xc0,0x00,0x00,0x00,0x07,0x00,0x00,0x00, -0x3b,0x00,0x04,0x00,0x1b,0x02,0x00,0x00,0x1c,0x02,0x00,0x00, -0x07,0x00,0x00,0x00,0x3b,0x00,0x04,0x00,0x49,0x02,0x00,0x00, -0x4a,0x02,0x00,0x00,0x07,0x00,0x00,0x00,0x41,0x00,0x05,0x00, -0x0d,0x00,0x00,0x00,0x0e,0x00,0x00,0x00,0x0b,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x0f,0x00,0x00,0x00,0x0e,0x00,0x00,0x00,0x41,0x00,0x05,0x00, -0x15,0x00,0x00,0x00,0x16,0x00,0x00,0x00,0x12,0x00,0x00,0x00, -0x14,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x17,0x00,0x00,0x00,0x16,0x00,0x00,0x00,0x86,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x18,0x00,0x00,0x00,0x0f,0x00,0x00,0x00, -0x17,0x00,0x00,0x00,0x89,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x1e,0x00,0x00,0x00,0x0f,0x00,0x00,0x00,0x17,0x00,0x00,0x00, -0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00,0x22,0x00,0x00,0x00, -0x12,0x00,0x00,0x00,0x21,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x22,0x00,0x00,0x00, -0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x24,0x00,0x00,0x00, -0x18,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x41,0x00,0x05,0x00, -0x15,0x00,0x00,0x00,0x28,0x00,0x00,0x00,0x12,0x00,0x00,0x00, -0x27,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x29,0x00,0x00,0x00,0x28,0x00,0x00,0x00,0x86,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x2a,0x00,0x00,0x00,0x1e,0x00,0x00,0x00, -0x29,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00, -0x2e,0x00,0x00,0x00,0x12,0x00,0x00,0x00,0x2d,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x2f,0x00,0x00,0x00, -0x2e,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x30,0x00,0x00,0x00,0x24,0x00,0x00,0x00,0x2f,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x32,0x00,0x00,0x00, -0x30,0x00,0x00,0x00,0x2a,0x00,0x00,0x00,0x41,0x00,0x05,0x00, -0x15,0x00,0x00,0x00,0x35,0x00,0x00,0x00,0x12,0x00,0x00,0x00, -0x34,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x36,0x00,0x00,0x00,0x35,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x38,0x00,0x00,0x00,0x36,0x00,0x00,0x00, -0x37,0x00,0x00,0x00,0x82,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x3a,0x00,0x00,0x00,0x38,0x00,0x00,0x00,0x39,0x00,0x00,0x00, -0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x3b,0x00,0x00,0x00, -0x3a,0x00,0x00,0x00,0x37,0x00,0x00,0x00,0x41,0x00,0x05,0x00, -0x0d,0x00,0x00,0x00,0x3f,0x00,0x00,0x00,0x3d,0x00,0x00,0x00, -0x3e,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x40,0x00,0x00,0x00,0x3f,0x00,0x00,0x00,0x89,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x42,0x00,0x00,0x00,0x40,0x00,0x00,0x00, -0x3b,0x00,0x00,0x00,0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x47,0x00,0x00,0x00,0x40,0x00,0x00,0x00,0x3b,0x00,0x00,0x00, -0x41,0x00,0x05,0x00,0x0d,0x00,0x00,0x00,0x49,0x00,0x00,0x00, -0x3d,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x4a,0x00,0x00,0x00,0x49,0x00,0x00,0x00, -0x41,0x00,0x05,0x00,0x0d,0x00,0x00,0x00,0x4d,0x00,0x00,0x00, -0x4c,0x00,0x00,0x00,0x3e,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x4e,0x00,0x00,0x00,0x4d,0x00,0x00,0x00, -0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x50,0x00,0x00,0x00, -0x4e,0x00,0x00,0x00,0x4f,0x00,0x00,0x00,0x89,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x55,0x00,0x00,0x00,0x50,0x00,0x00,0x00, -0x54,0x00,0x00,0x00,0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x59,0x00,0x00,0x00,0x50,0x00,0x00,0x00,0x58,0x00,0x00,0x00, -0x89,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x5d,0x00,0x00,0x00, -0x4e,0x00,0x00,0x00,0x4f,0x00,0x00,0x00,0x89,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x64,0x00,0x00,0x00,0x5d,0x00,0x00,0x00, -0x63,0x00,0x00,0x00,0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x68,0x00,0x00,0x00,0x5d,0x00,0x00,0x00,0x67,0x00,0x00,0x00, -0x89,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x6f,0x00,0x00,0x00, -0x4e,0x00,0x00,0x00,0x6e,0x00,0x00,0x00,0x86,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x74,0x00,0x00,0x00,0x4e,0x00,0x00,0x00, -0x73,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00, -0x78,0x00,0x00,0x00,0x12,0x00,0x00,0x00,0x77,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x79,0x00,0x00,0x00, -0x78,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x7a,0x00,0x00,0x00,0x47,0x00,0x00,0x00,0x79,0x00,0x00,0x00, -0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00,0x7d,0x00,0x00,0x00, -0x12,0x00,0x00,0x00,0x7c,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x7e,0x00,0x00,0x00,0x7d,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x80,0x00,0x00,0x00, -0x47,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x83,0x00,0x00,0x00,0x80,0x00,0x00,0x00, -0x79,0x00,0x00,0x00,0x0c,0x00,0x07,0x00,0x06,0x00,0x00,0x00, -0x84,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x26,0x00,0x00,0x00, -0x7e,0x00,0x00,0x00,0x83,0x00,0x00,0x00,0x41,0x00,0x05,0x00, -0x15,0x00,0x00,0x00,0x88,0x00,0x00,0x00,0x12,0x00,0x00,0x00, -0x87,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x89,0x00,0x00,0x00,0x88,0x00,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x8a,0x00,0x00,0x00,0x32,0x00,0x00,0x00, -0x89,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x8c,0x00,0x00,0x00,0x42,0x00,0x00,0x00,0x37,0x00,0x00,0x00, -0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00,0x8e,0x00,0x00,0x00, -0x12,0x00,0x00,0x00,0x8d,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x8f,0x00,0x00,0x00,0x8e,0x00,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x90,0x00,0x00,0x00, -0x8c,0x00,0x00,0x00,0x8f,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x91,0x00,0x00,0x00,0x8a,0x00,0x00,0x00, -0x90,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x93,0x00,0x00,0x00,0x91,0x00,0x00,0x00,0x7a,0x00,0x00,0x00, -0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x94,0x00,0x00,0x00, -0x93,0x00,0x00,0x00,0x6d,0x00,0x00,0x00,0x41,0x00,0x05,0x00, -0x15,0x00,0x00,0x00,0x99,0x00,0x00,0x00,0x12,0x00,0x00,0x00, -0x98,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x9a,0x00,0x00,0x00,0x99,0x00,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x9b,0x00,0x00,0x00,0x0f,0x00,0x00,0x00, -0x9a,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x9e,0x00,0x00,0x00,0x4a,0x00,0x00,0x00,0x9d,0x00,0x00,0x00, -0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00,0xa0,0x00,0x00,0x00, -0x12,0x00,0x00,0x00,0x9f,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0xa1,0x00,0x00,0x00,0xa0,0x00,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xa2,0x00,0x00,0x00, -0x9e,0x00,0x00,0x00,0xa1,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xa3,0x00,0x00,0x00,0x9b,0x00,0x00,0x00, -0xa2,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xa5,0x00,0x00,0x00,0xa3,0x00,0x00,0x00,0x7a,0x00,0x00,0x00, -0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xa6,0x00,0x00,0x00, -0xa5,0x00,0x00,0x00,0x6d,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, -0xa8,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0xa8,0x00,0x00,0x00, -0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0x33,0x03,0x00,0x00, -0x3e,0x00,0x00,0x00,0x05,0x00,0x00,0x00,0xc7,0x00,0x00,0x00, -0xa9,0x00,0x00,0x00,0xb0,0x00,0x05,0x00,0xb8,0x00,0x00,0x00, -0xb9,0x00,0x00,0x00,0x33,0x03,0x00,0x00,0xb7,0x00,0x00,0x00, -0xf6,0x00,0x04,0x00,0xaa,0x00,0x00,0x00,0xa9,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0xb9,0x00,0x00,0x00, -0xa9,0x00,0x00,0x00,0xaa,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, -0xa9,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0xc3,0x00,0x00,0x00, -0xc4,0x00,0x00,0x00,0xc0,0x00,0x00,0x00,0x33,0x03,0x00,0x00, -0x3e,0x00,0x03,0x00,0xc4,0x00,0x00,0x00,0xc2,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xc7,0x00,0x00,0x00, -0x33,0x03,0x00,0x00,0xc6,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, -0xa8,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0xaa,0x00,0x00,0x00, -0xf9,0x00,0x02,0x00,0xca,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, -0xca,0x00,0x00,0x00,0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00, -0x4c,0x03,0x00,0x00,0xa6,0x00,0x00,0x00,0xaa,0x00,0x00,0x00, -0x00,0x02,0x00,0x00,0xcd,0x00,0x00,0x00,0xf5,0x00,0x07,0x00, -0x06,0x00,0x00,0x00,0x48,0x03,0x00,0x00,0x94,0x00,0x00,0x00, -0xaa,0x00,0x00,0x00,0xfd,0x01,0x00,0x00,0xcd,0x00,0x00,0x00, -0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0x34,0x03,0x00,0x00, -0x7a,0x00,0x00,0x00,0xaa,0x00,0x00,0x00,0xae,0x02,0x00,0x00, -0xcd,0x00,0x00,0x00,0xb0,0x00,0x05,0x00,0xb8,0x00,0x00,0x00, -0xd1,0x00,0x00,0x00,0x34,0x03,0x00,0x00,0x84,0x00,0x00,0x00, -0xf6,0x00,0x04,0x00,0xcc,0x00,0x00,0x00,0xcd,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0xd1,0x00,0x00,0x00, -0xcb,0x00,0x00,0x00,0xcc,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, -0xcb,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0xd3,0x00,0x00,0x00, -0xf8,0x00,0x02,0x00,0xd3,0x00,0x00,0x00,0xf5,0x00,0x07,0x00, -0x06,0x00,0x00,0x00,0x44,0x03,0x00,0x00,0x3e,0x00,0x00,0x00, -0xcb,0x00,0x00,0x00,0x67,0x01,0x00,0x00,0xd4,0x00,0x00,0x00, -0xb0,0x00,0x05,0x00,0xb8,0x00,0x00,0x00,0xd9,0x00,0x00,0x00, -0x44,0x03,0x00,0x00,0x37,0x00,0x00,0x00,0xf6,0x00,0x04,0x00, -0xd5,0x00,0x00,0x00,0xd4,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0xfa,0x00,0x04,0x00,0xd9,0x00,0x00,0x00,0xd4,0x00,0x00,0x00, -0xd5,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0xd4,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xde,0x00,0x00,0x00, -0x74,0x00,0x00,0x00,0x44,0x03,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xe1,0x00,0x00,0x00,0xde,0x00,0x00,0x00, -0x8f,0x00,0x00,0x00,0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xe2,0x00,0x00,0x00,0xe1,0x00,0x00,0x00,0x6d,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xe3,0x00,0x00,0x00, -0x48,0x03,0x00,0x00,0xe2,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xe5,0x00,0x00,0x00,0xe3,0x00,0x00,0x00, -0x6f,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xf0,0x00,0x00,0x00,0xde,0x00,0x00,0x00,0xef,0x00,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xf2,0x00,0x00,0x00, -0x6f,0x00,0x00,0x00,0x6d,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xf3,0x00,0x00,0x00,0xf0,0x00,0x00,0x00, -0xf2,0x00,0x00,0x00,0x41,0x00,0x08,0x00,0xfc,0x00,0x00,0x00, -0xfd,0x00,0x00,0x00,0xfa,0x00,0x00,0x00,0x34,0x00,0x00,0x00, -0xe5,0x00,0x00,0x00,0x34,0x00,0x00,0x00,0x3e,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0xe6,0x00,0x00,0x00,0xfe,0x00,0x00,0x00, -0xfd,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0xff,0x00,0x00,0x00, -0x00,0x01,0x00,0x00,0xeb,0x00,0x00,0x00,0xf3,0x00,0x00,0x00, -0x3e,0x00,0x03,0x00,0x00,0x01,0x00,0x00,0xfe,0x00,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x05,0x01,0x00,0x00, -0xde,0x00,0x00,0x00,0x04,0x01,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x08,0x01,0x00,0x00,0x05,0x01,0x00,0x00, -0xf2,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x09,0x01,0x00,0x00,0x08,0x01,0x00,0x00,0x39,0x00,0x00,0x00, -0x41,0x00,0x08,0x00,0xfc,0x00,0x00,0x00,0x0b,0x01,0x00,0x00, -0xfa,0x00,0x00,0x00,0x34,0x00,0x00,0x00,0xe5,0x00,0x00,0x00, -0x34,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0xe6,0x00,0x00,0x00,0x0c,0x01,0x00,0x00,0x0b,0x01,0x00,0x00, -0x41,0x00,0x05,0x00,0xff,0x00,0x00,0x00,0x0d,0x01,0x00,0x00, -0xeb,0x00,0x00,0x00,0x09,0x01,0x00,0x00,0x3e,0x00,0x03,0x00, -0x0d,0x01,0x00,0x00,0x0c,0x01,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x12,0x01,0x00,0x00,0xde,0x00,0x00,0x00, -0x11,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x15,0x01,0x00,0x00,0x12,0x01,0x00,0x00,0xf2,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x16,0x01,0x00,0x00, -0x15,0x01,0x00,0x00,0x0c,0x00,0x00,0x00,0x41,0x00,0x08,0x00, -0xfc,0x00,0x00,0x00,0x18,0x01,0x00,0x00,0xfa,0x00,0x00,0x00, -0x34,0x00,0x00,0x00,0xe5,0x00,0x00,0x00,0x34,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0xe6,0x00,0x00,0x00, -0x19,0x01,0x00,0x00,0x18,0x01,0x00,0x00,0x41,0x00,0x05,0x00, -0xff,0x00,0x00,0x00,0x1a,0x01,0x00,0x00,0xeb,0x00,0x00,0x00, -0x16,0x01,0x00,0x00,0x3e,0x00,0x03,0x00,0x1a,0x01,0x00,0x00, -0x19,0x01,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x1f,0x01,0x00,0x00,0xde,0x00,0x00,0x00,0x1e,0x01,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x22,0x01,0x00,0x00, -0x1f,0x01,0x00,0x00,0xf2,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x24,0x01,0x00,0x00,0x22,0x01,0x00,0x00, -0x23,0x01,0x00,0x00,0x41,0x00,0x08,0x00,0xfc,0x00,0x00,0x00, -0x26,0x01,0x00,0x00,0xfa,0x00,0x00,0x00,0x34,0x00,0x00,0x00, -0xe5,0x00,0x00,0x00,0x34,0x00,0x00,0x00,0x23,0x01,0x00,0x00, -0x3d,0x00,0x04,0x00,0xe6,0x00,0x00,0x00,0x27,0x01,0x00,0x00, -0x26,0x01,0x00,0x00,0x41,0x00,0x05,0x00,0xff,0x00,0x00,0x00, -0x28,0x01,0x00,0x00,0xeb,0x00,0x00,0x00,0x24,0x01,0x00,0x00, -0x3e,0x00,0x03,0x00,0x28,0x01,0x00,0x00,0x27,0x01,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x2d,0x01,0x00,0x00, -0xde,0x00,0x00,0x00,0x2c,0x01,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x30,0x01,0x00,0x00,0x2d,0x01,0x00,0x00, -0xf2,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x32,0x01,0x00,0x00,0x30,0x01,0x00,0x00,0x31,0x01,0x00,0x00, -0x41,0x00,0x08,0x00,0xfc,0x00,0x00,0x00,0x34,0x01,0x00,0x00, -0xfa,0x00,0x00,0x00,0x34,0x00,0x00,0x00,0xe5,0x00,0x00,0x00, -0xc6,0x00,0x00,0x00,0x3e,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0xe6,0x00,0x00,0x00,0x35,0x01,0x00,0x00,0x34,0x01,0x00,0x00, -0x41,0x00,0x05,0x00,0xff,0x00,0x00,0x00,0x36,0x01,0x00,0x00, -0xeb,0x00,0x00,0x00,0x32,0x01,0x00,0x00,0x3e,0x00,0x03,0x00, -0x36,0x01,0x00,0x00,0x35,0x01,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x3b,0x01,0x00,0x00,0xde,0x00,0x00,0x00, -0x3a,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x3e,0x01,0x00,0x00,0x3b,0x01,0x00,0x00,0xf2,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x40,0x01,0x00,0x00, -0x3e,0x01,0x00,0x00,0x3f,0x01,0x00,0x00,0x41,0x00,0x08,0x00, -0xfc,0x00,0x00,0x00,0x42,0x01,0x00,0x00,0xfa,0x00,0x00,0x00, -0x34,0x00,0x00,0x00,0xe5,0x00,0x00,0x00,0xc6,0x00,0x00,0x00, -0x39,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0xe6,0x00,0x00,0x00, -0x43,0x01,0x00,0x00,0x42,0x01,0x00,0x00,0x41,0x00,0x05,0x00, -0xff,0x00,0x00,0x00,0x44,0x01,0x00,0x00,0xeb,0x00,0x00,0x00, -0x40,0x01,0x00,0x00,0x3e,0x00,0x03,0x00,0x44,0x01,0x00,0x00, -0x43,0x01,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x49,0x01,0x00,0x00,0xde,0x00,0x00,0x00,0x48,0x01,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x4c,0x01,0x00,0x00, -0x49,0x01,0x00,0x00,0xf2,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x4e,0x01,0x00,0x00,0x4c,0x01,0x00,0x00, -0x4d,0x01,0x00,0x00,0x41,0x00,0x08,0x00,0xfc,0x00,0x00,0x00, -0x50,0x01,0x00,0x00,0xfa,0x00,0x00,0x00,0x34,0x00,0x00,0x00, -0xe5,0x00,0x00,0x00,0xc6,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0xe6,0x00,0x00,0x00,0x51,0x01,0x00,0x00, -0x50,0x01,0x00,0x00,0x41,0x00,0x05,0x00,0xff,0x00,0x00,0x00, -0x52,0x01,0x00,0x00,0xeb,0x00,0x00,0x00,0x4e,0x01,0x00,0x00, -0x3e,0x00,0x03,0x00,0x52,0x01,0x00,0x00,0x51,0x01,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x57,0x01,0x00,0x00, -0xde,0x00,0x00,0x00,0x56,0x01,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x5a,0x01,0x00,0x00,0x57,0x01,0x00,0x00, -0xf2,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x5c,0x01,0x00,0x00,0x5a,0x01,0x00,0x00,0x5b,0x01,0x00,0x00, -0x41,0x00,0x08,0x00,0xfc,0x00,0x00,0x00,0x5e,0x01,0x00,0x00, -0xfa,0x00,0x00,0x00,0x34,0x00,0x00,0x00,0xe5,0x00,0x00,0x00, -0xc6,0x00,0x00,0x00,0x23,0x01,0x00,0x00,0x3d,0x00,0x04,0x00, -0xe6,0x00,0x00,0x00,0x5f,0x01,0x00,0x00,0x5e,0x01,0x00,0x00, -0x41,0x00,0x05,0x00,0xff,0x00,0x00,0x00,0x60,0x01,0x00,0x00, -0xeb,0x00,0x00,0x00,0x5c,0x01,0x00,0x00,0x3e,0x00,0x03,0x00, -0x60,0x01,0x00,0x00,0x5f,0x01,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x67,0x01,0x00,0x00,0x44,0x03,0x00,0x00, -0x65,0x01,0x00,0x00,0xf9,0x00,0x02,0x00,0xd3,0x00,0x00,0x00, -0xf8,0x00,0x02,0x00,0xd5,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, -0x69,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0x69,0x01,0x00,0x00, -0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0x45,0x03,0x00,0x00, -0x3e,0x00,0x00,0x00,0xd5,0x00,0x00,0x00,0xf9,0x01,0x00,0x00, -0x6a,0x01,0x00,0x00,0xb0,0x00,0x05,0x00,0xb8,0x00,0x00,0x00, -0x6f,0x01,0x00,0x00,0x45,0x03,0x00,0x00,0x9d,0x00,0x00,0x00, -0xf6,0x00,0x04,0x00,0x6b,0x01,0x00,0x00,0x6a,0x01,0x00,0x00, -0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0x6f,0x01,0x00,0x00, -0x6a,0x01,0x00,0x00,0x6b,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, -0x6a,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x74,0x01,0x00,0x00,0x74,0x00,0x00,0x00,0x45,0x03,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x77,0x01,0x00,0x00, -0x74,0x01,0x00,0x00,0xa1,0x00,0x00,0x00,0x86,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x78,0x01,0x00,0x00,0x77,0x01,0x00,0x00, -0x6d,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x79,0x01,0x00,0x00,0x4c,0x03,0x00,0x00,0x78,0x01,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x7b,0x01,0x00,0x00, -0x79,0x01,0x00,0x00,0x6f,0x00,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x85,0x01,0x00,0x00,0x74,0x01,0x00,0x00, -0x84,0x01,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x87,0x01,0x00,0x00,0x6f,0x00,0x00,0x00,0x6d,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x88,0x01,0x00,0x00, -0x85,0x01,0x00,0x00,0x87,0x01,0x00,0x00,0x41,0x00,0x08,0x00, -0x91,0x01,0x00,0x00,0x92,0x01,0x00,0x00,0x8f,0x01,0x00,0x00, -0x34,0x00,0x00,0x00,0x7b,0x01,0x00,0x00,0x34,0x00,0x00,0x00, -0x3e,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0xba,0x00,0x00,0x00, -0x93,0x01,0x00,0x00,0x92,0x01,0x00,0x00,0x73,0x00,0x04,0x00, -0xe6,0x00,0x00,0x00,0x94,0x01,0x00,0x00,0x93,0x01,0x00,0x00, -0x41,0x00,0x05,0x00,0xff,0x00,0x00,0x00,0x95,0x01,0x00,0x00, -0x80,0x01,0x00,0x00,0x88,0x01,0x00,0x00,0x3e,0x00,0x03,0x00, -0x95,0x01,0x00,0x00,0x94,0x01,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x9a,0x01,0x00,0x00,0x74,0x01,0x00,0x00, -0x99,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x9d,0x01,0x00,0x00,0x9a,0x01,0x00,0x00,0x87,0x01,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x9e,0x01,0x00,0x00, -0x9d,0x01,0x00,0x00,0x39,0x00,0x00,0x00,0x41,0x00,0x08,0x00, -0x91,0x01,0x00,0x00,0xa0,0x01,0x00,0x00,0x8f,0x01,0x00,0x00, -0x34,0x00,0x00,0x00,0x7b,0x01,0x00,0x00,0x34,0x00,0x00,0x00, -0x39,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0xba,0x00,0x00,0x00, -0xa1,0x01,0x00,0x00,0xa0,0x01,0x00,0x00,0x73,0x00,0x04,0x00, -0xe6,0x00,0x00,0x00,0xa2,0x01,0x00,0x00,0xa1,0x01,0x00,0x00, -0x41,0x00,0x05,0x00,0xff,0x00,0x00,0x00,0xa3,0x01,0x00,0x00, -0x80,0x01,0x00,0x00,0x9e,0x01,0x00,0x00,0x3e,0x00,0x03,0x00, -0xa3,0x01,0x00,0x00,0xa2,0x01,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xa8,0x01,0x00,0x00,0x74,0x01,0x00,0x00, -0xa7,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xab,0x01,0x00,0x00,0xa8,0x01,0x00,0x00,0x87,0x01,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xac,0x01,0x00,0x00, -0xab,0x01,0x00,0x00,0x0c,0x00,0x00,0x00,0x41,0x00,0x08,0x00, -0x91,0x01,0x00,0x00,0xae,0x01,0x00,0x00,0x8f,0x01,0x00,0x00, -0x34,0x00,0x00,0x00,0x7b,0x01,0x00,0x00,0x34,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0xba,0x00,0x00,0x00, -0xaf,0x01,0x00,0x00,0xae,0x01,0x00,0x00,0x73,0x00,0x04,0x00, -0xe6,0x00,0x00,0x00,0xb0,0x01,0x00,0x00,0xaf,0x01,0x00,0x00, -0x41,0x00,0x05,0x00,0xff,0x00,0x00,0x00,0xb1,0x01,0x00,0x00, -0x80,0x01,0x00,0x00,0xac,0x01,0x00,0x00,0x3e,0x00,0x03,0x00, -0xb1,0x01,0x00,0x00,0xb0,0x01,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xb6,0x01,0x00,0x00,0x74,0x01,0x00,0x00, -0xb5,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xb9,0x01,0x00,0x00,0xb6,0x01,0x00,0x00,0x87,0x01,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xba,0x01,0x00,0x00, -0xb9,0x01,0x00,0x00,0x23,0x01,0x00,0x00,0x41,0x00,0x08,0x00, -0x91,0x01,0x00,0x00,0xbc,0x01,0x00,0x00,0x8f,0x01,0x00,0x00, -0x34,0x00,0x00,0x00,0x7b,0x01,0x00,0x00,0x34,0x00,0x00,0x00, -0x23,0x01,0x00,0x00,0x3d,0x00,0x04,0x00,0xba,0x00,0x00,0x00, -0xbd,0x01,0x00,0x00,0xbc,0x01,0x00,0x00,0x73,0x00,0x04,0x00, -0xe6,0x00,0x00,0x00,0xbe,0x01,0x00,0x00,0xbd,0x01,0x00,0x00, -0x41,0x00,0x05,0x00,0xff,0x00,0x00,0x00,0xbf,0x01,0x00,0x00, -0x80,0x01,0x00,0x00,0xba,0x01,0x00,0x00,0x3e,0x00,0x03,0x00, -0xbf,0x01,0x00,0x00,0xbe,0x01,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xc4,0x01,0x00,0x00,0x74,0x01,0x00,0x00, -0xc3,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xc7,0x01,0x00,0x00,0xc4,0x01,0x00,0x00,0x87,0x01,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xc8,0x01,0x00,0x00, -0xc7,0x01,0x00,0x00,0x31,0x01,0x00,0x00,0x41,0x00,0x08,0x00, -0x91,0x01,0x00,0x00,0xca,0x01,0x00,0x00,0x8f,0x01,0x00,0x00, -0x34,0x00,0x00,0x00,0x7b,0x01,0x00,0x00,0xc6,0x00,0x00,0x00, -0x3e,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0xba,0x00,0x00,0x00, -0xcb,0x01,0x00,0x00,0xca,0x01,0x00,0x00,0x73,0x00,0x04,0x00, -0xe6,0x00,0x00,0x00,0xcc,0x01,0x00,0x00,0xcb,0x01,0x00,0x00, -0x41,0x00,0x05,0x00,0xff,0x00,0x00,0x00,0xcd,0x01,0x00,0x00, -0x80,0x01,0x00,0x00,0xc8,0x01,0x00,0x00,0x3e,0x00,0x03,0x00, -0xcd,0x01,0x00,0x00,0xcc,0x01,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xd2,0x01,0x00,0x00,0x74,0x01,0x00,0x00, -0xd1,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xd5,0x01,0x00,0x00,0xd2,0x01,0x00,0x00,0x87,0x01,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xd6,0x01,0x00,0x00, -0xd5,0x01,0x00,0x00,0x3f,0x01,0x00,0x00,0x41,0x00,0x08,0x00, -0x91,0x01,0x00,0x00,0xd8,0x01,0x00,0x00,0x8f,0x01,0x00,0x00, -0x34,0x00,0x00,0x00,0x7b,0x01,0x00,0x00,0xc6,0x00,0x00,0x00, -0x39,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0xba,0x00,0x00,0x00, -0xd9,0x01,0x00,0x00,0xd8,0x01,0x00,0x00,0x73,0x00,0x04,0x00, -0xe6,0x00,0x00,0x00,0xda,0x01,0x00,0x00,0xd9,0x01,0x00,0x00, -0x41,0x00,0x05,0x00,0xff,0x00,0x00,0x00,0xdb,0x01,0x00,0x00, -0x80,0x01,0x00,0x00,0xd6,0x01,0x00,0x00,0x3e,0x00,0x03,0x00, -0xdb,0x01,0x00,0x00,0xda,0x01,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xe0,0x01,0x00,0x00,0x74,0x01,0x00,0x00, -0xdf,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xe3,0x01,0x00,0x00,0xe0,0x01,0x00,0x00,0x87,0x01,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xe4,0x01,0x00,0x00, -0xe3,0x01,0x00,0x00,0x4d,0x01,0x00,0x00,0x41,0x00,0x08,0x00, -0x91,0x01,0x00,0x00,0xe6,0x01,0x00,0x00,0x8f,0x01,0x00,0x00, -0x34,0x00,0x00,0x00,0x7b,0x01,0x00,0x00,0xc6,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0xba,0x00,0x00,0x00, -0xe7,0x01,0x00,0x00,0xe6,0x01,0x00,0x00,0x73,0x00,0x04,0x00, -0xe6,0x00,0x00,0x00,0xe8,0x01,0x00,0x00,0xe7,0x01,0x00,0x00, -0x41,0x00,0x05,0x00,0xff,0x00,0x00,0x00,0xe9,0x01,0x00,0x00, -0x80,0x01,0x00,0x00,0xe4,0x01,0x00,0x00,0x3e,0x00,0x03,0x00, -0xe9,0x01,0x00,0x00,0xe8,0x01,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xee,0x01,0x00,0x00,0x74,0x01,0x00,0x00, -0xed,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xf1,0x01,0x00,0x00,0xee,0x01,0x00,0x00,0x87,0x01,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xf2,0x01,0x00,0x00, -0xf1,0x01,0x00,0x00,0x5b,0x01,0x00,0x00,0x41,0x00,0x08,0x00, -0x91,0x01,0x00,0x00,0xf4,0x01,0x00,0x00,0x8f,0x01,0x00,0x00, -0x34,0x00,0x00,0x00,0x7b,0x01,0x00,0x00,0xc6,0x00,0x00,0x00, -0x23,0x01,0x00,0x00,0x3d,0x00,0x04,0x00,0xba,0x00,0x00,0x00, -0xf5,0x01,0x00,0x00,0xf4,0x01,0x00,0x00,0x73,0x00,0x04,0x00, -0xe6,0x00,0x00,0x00,0xf6,0x01,0x00,0x00,0xf5,0x01,0x00,0x00, -0x41,0x00,0x05,0x00,0xff,0x00,0x00,0x00,0xf7,0x01,0x00,0x00, -0x80,0x01,0x00,0x00,0xf2,0x01,0x00,0x00,0x3e,0x00,0x03,0x00, -0xf7,0x01,0x00,0x00,0xf6,0x01,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xf9,0x01,0x00,0x00,0x45,0x03,0x00,0x00, -0x65,0x01,0x00,0x00,0xf9,0x00,0x02,0x00,0x69,0x01,0x00,0x00, -0xf8,0x00,0x02,0x00,0x6b,0x01,0x00,0x00,0xe0,0x00,0x04,0x00, -0x0c,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0xfa,0x01,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xfd,0x01,0x00,0x00, -0x48,0x03,0x00,0x00,0xfb,0x01,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x00,0x02,0x00,0x00,0x4c,0x03,0x00,0x00, -0xfe,0x01,0x00,0x00,0xf9,0x00,0x02,0x00,0x02,0x02,0x00,0x00, -0xf8,0x00,0x02,0x00,0x02,0x02,0x00,0x00,0xf5,0x00,0x07,0x00, -0x06,0x00,0x00,0x00,0x4e,0x03,0x00,0x00,0x3e,0x00,0x00,0x00, -0x6b,0x01,0x00,0x00,0xac,0x02,0x00,0x00,0x05,0x02,0x00,0x00, -0xb0,0x00,0x05,0x00,0xb8,0x00,0x00,0x00,0x08,0x02,0x00,0x00, -0x4e,0x03,0x00,0x00,0x6c,0x00,0x00,0x00,0xf6,0x00,0x04,0x00, -0x04,0x02,0x00,0x00,0x05,0x02,0x00,0x00,0x00,0x00,0x00,0x00, -0xfa,0x00,0x04,0x00,0x08,0x02,0x00,0x00,0x03,0x02,0x00,0x00, -0x04,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x03,0x02,0x00,0x00, -0xf9,0x00,0x02,0x00,0x0a,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, -0x0a,0x02,0x00,0x00,0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00, -0x52,0x03,0x00,0x00,0x3e,0x00,0x00,0x00,0x03,0x02,0x00,0x00, -0x36,0x02,0x00,0x00,0x0d,0x02,0x00,0x00,0xb0,0x00,0x05,0x00, -0xb8,0x00,0x00,0x00,0x10,0x02,0x00,0x00,0x52,0x03,0x00,0x00, -0x60,0x00,0x00,0x00,0xf6,0x00,0x04,0x00,0x0c,0x02,0x00,0x00, -0x0d,0x02,0x00,0x00,0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, -0x10,0x02,0x00,0x00,0x0b,0x02,0x00,0x00,0x0c,0x02,0x00,0x00, -0xf8,0x00,0x02,0x00,0x0b,0x02,0x00,0x00,0xf9,0x00,0x02,0x00, -0x12,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x12,0x02,0x00,0x00, -0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0x64,0x03,0x00,0x00, -0x3e,0x00,0x00,0x00,0x0b,0x02,0x00,0x00,0x34,0x02,0x00,0x00, -0x13,0x02,0x00,0x00,0xb0,0x00,0x05,0x00,0xb8,0x00,0x00,0x00, -0x18,0x02,0x00,0x00,0x64,0x03,0x00,0x00,0x62,0x00,0x00,0x00, -0xf6,0x00,0x04,0x00,0x14,0x02,0x00,0x00,0x13,0x02,0x00,0x00, -0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0x18,0x02,0x00,0x00, -0x13,0x02,0x00,0x00,0x14,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, -0x13,0x02,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x1e,0x02,0x00,0x00,0x52,0x03,0x00,0x00,0x62,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x20,0x02,0x00,0x00, -0x1e,0x02,0x00,0x00,0x64,0x03,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x22,0x02,0x00,0x00,0x55,0x00,0x00,0x00, -0x53,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x24,0x02,0x00,0x00,0x52,0x03,0x00,0x00,0x61,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x25,0x02,0x00,0x00, -0x22,0x02,0x00,0x00,0x24,0x02,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x27,0x02,0x00,0x00,0x64,0x00,0x00,0x00, -0x62,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x28,0x02,0x00,0x00,0x25,0x02,0x00,0x00,0x27,0x02,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x2a,0x02,0x00,0x00, -0x28,0x02,0x00,0x00,0x64,0x03,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x2c,0x02,0x00,0x00,0x2a,0x02,0x00,0x00, -0x2b,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x2e,0x02,0x00,0x00,0x2c,0x02,0x00,0x00,0x4e,0x03,0x00,0x00, -0x41,0x00,0x05,0x00,0xff,0x00,0x00,0x00,0x2f,0x02,0x00,0x00, -0xeb,0x00,0x00,0x00,0x2e,0x02,0x00,0x00,0x3d,0x00,0x04,0x00, -0xe6,0x00,0x00,0x00,0x30,0x02,0x00,0x00,0x2f,0x02,0x00,0x00, -0x41,0x00,0x05,0x00,0x31,0x02,0x00,0x00,0x32,0x02,0x00,0x00, -0x1c,0x02,0x00,0x00,0x20,0x02,0x00,0x00,0x3e,0x00,0x03,0x00, -0x32,0x02,0x00,0x00,0x30,0x02,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x34,0x02,0x00,0x00,0x64,0x03,0x00,0x00, -0xc6,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0x12,0x02,0x00,0x00, -0xf8,0x00,0x02,0x00,0x14,0x02,0x00,0x00,0xf9,0x00,0x02,0x00, -0x0d,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x0d,0x02,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x36,0x02,0x00,0x00, -0x52,0x03,0x00,0x00,0xc6,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, -0x0a,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x0c,0x02,0x00,0x00, -0xf9,0x00,0x02,0x00,0x38,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, -0x38,0x02,0x00,0x00,0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00, -0x53,0x03,0x00,0x00,0x3e,0x00,0x00,0x00,0x0c,0x02,0x00,0x00, -0x64,0x02,0x00,0x00,0x3b,0x02,0x00,0x00,0xb0,0x00,0x05,0x00, -0xb8,0x00,0x00,0x00,0x3e,0x02,0x00,0x00,0x53,0x03,0x00,0x00, -0xb5,0x00,0x00,0x00,0xf6,0x00,0x04,0x00,0x3a,0x02,0x00,0x00, -0x3b,0x02,0x00,0x00,0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, -0x3e,0x02,0x00,0x00,0x39,0x02,0x00,0x00,0x3a,0x02,0x00,0x00, -0xf8,0x00,0x02,0x00,0x39,0x02,0x00,0x00,0xf9,0x00,0x02,0x00, -0x40,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x40,0x02,0x00,0x00, -0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0x61,0x03,0x00,0x00, -0x3e,0x00,0x00,0x00,0x39,0x02,0x00,0x00,0x62,0x02,0x00,0x00, -0x41,0x02,0x00,0x00,0xb0,0x00,0x05,0x00,0xb8,0x00,0x00,0x00, -0x46,0x02,0x00,0x00,0x61,0x03,0x00,0x00,0xb2,0x00,0x00,0x00, -0xf6,0x00,0x04,0x00,0x42,0x02,0x00,0x00,0x41,0x02,0x00,0x00, -0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0x46,0x02,0x00,0x00, -0x41,0x02,0x00,0x00,0x42,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, -0x41,0x02,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x4c,0x02,0x00,0x00,0x53,0x03,0x00,0x00,0xb2,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x4e,0x02,0x00,0x00, -0x4c,0x02,0x00,0x00,0x61,0x03,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x50,0x02,0x00,0x00,0x59,0x00,0x00,0x00, -0xaf,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x53,0x02,0x00,0x00,0x53,0x03,0x00,0x00,0x52,0x02,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x54,0x02,0x00,0x00, -0x50,0x02,0x00,0x00,0x53,0x02,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x56,0x02,0x00,0x00,0x68,0x00,0x00,0x00, -0xb2,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x57,0x02,0x00,0x00,0x54,0x02,0x00,0x00,0x56,0x02,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x59,0x02,0x00,0x00, -0x57,0x02,0x00,0x00,0x61,0x03,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x5b,0x02,0x00,0x00,0x59,0x02,0x00,0x00, -0x5a,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x5d,0x02,0x00,0x00,0x5b,0x02,0x00,0x00,0x4e,0x03,0x00,0x00, -0x41,0x00,0x05,0x00,0xff,0x00,0x00,0x00,0x5e,0x02,0x00,0x00, -0x80,0x01,0x00,0x00,0x5d,0x02,0x00,0x00,0x3d,0x00,0x04,0x00, -0xe6,0x00,0x00,0x00,0x5f,0x02,0x00,0x00,0x5e,0x02,0x00,0x00, -0x41,0x00,0x05,0x00,0x31,0x02,0x00,0x00,0x60,0x02,0x00,0x00, -0x4a,0x02,0x00,0x00,0x4e,0x02,0x00,0x00,0x3e,0x00,0x03,0x00, -0x60,0x02,0x00,0x00,0x5f,0x02,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x62,0x02,0x00,0x00,0x61,0x03,0x00,0x00, -0xc6,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0x40,0x02,0x00,0x00, -0xf8,0x00,0x02,0x00,0x42,0x02,0x00,0x00,0xf9,0x00,0x02,0x00, -0x3b,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x3b,0x02,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x64,0x02,0x00,0x00, -0x53,0x03,0x00,0x00,0xc6,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, -0x38,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x3a,0x02,0x00,0x00, -0xf9,0x00,0x02,0x00,0x66,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, -0x66,0x02,0x00,0x00,0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00, -0x54,0x03,0x00,0x00,0x3e,0x00,0x00,0x00,0x3a,0x02,0x00,0x00, -0xaa,0x02,0x00,0x00,0x69,0x02,0x00,0x00,0xb0,0x00,0x05,0x00, -0xb8,0x00,0x00,0x00,0x6c,0x02,0x00,0x00,0x54,0x03,0x00,0x00, -0xb5,0x00,0x00,0x00,0xf6,0x00,0x04,0x00,0x68,0x02,0x00,0x00, -0x69,0x02,0x00,0x00,0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, -0x6c,0x02,0x00,0x00,0x67,0x02,0x00,0x00,0x68,0x02,0x00,0x00, -0xf8,0x00,0x02,0x00,0x67,0x02,0x00,0x00,0xf9,0x00,0x02,0x00, -0x6e,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x6e,0x02,0x00,0x00, -0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0x58,0x03,0x00,0x00, -0x3e,0x00,0x00,0x00,0x67,0x02,0x00,0x00,0xa8,0x02,0x00,0x00, -0x71,0x02,0x00,0x00,0xb0,0x00,0x05,0x00,0xb8,0x00,0x00,0x00, -0x74,0x02,0x00,0x00,0x58,0x03,0x00,0x00,0x60,0x00,0x00,0x00, -0xf6,0x00,0x04,0x00,0x70,0x02,0x00,0x00,0x71,0x02,0x00,0x00, -0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0x74,0x02,0x00,0x00, -0x6f,0x02,0x00,0x00,0x70,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, -0x6f,0x02,0x00,0x00,0xf9,0x00,0x02,0x00,0x76,0x02,0x00,0x00, -0xf8,0x00,0x02,0x00,0x76,0x02,0x00,0x00,0xf5,0x00,0x07,0x00, -0x06,0x00,0x00,0x00,0x5a,0x03,0x00,0x00,0x3e,0x00,0x00,0x00, -0x6f,0x02,0x00,0x00,0xa6,0x02,0x00,0x00,0x79,0x02,0x00,0x00, -0xb0,0x00,0x05,0x00,0xb8,0x00,0x00,0x00,0x7c,0x02,0x00,0x00, -0x5a,0x03,0x00,0x00,0xb2,0x00,0x00,0x00,0xf6,0x00,0x04,0x00, -0x78,0x02,0x00,0x00,0x79,0x02,0x00,0x00,0x01,0x00,0x00,0x00, -0xfa,0x00,0x04,0x00,0x7c,0x02,0x00,0x00,0x77,0x02,0x00,0x00, -0x78,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x77,0x02,0x00,0x00, -0xf9,0x00,0x02,0x00,0x7e,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, -0x7e,0x02,0x00,0x00,0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00, -0x5c,0x03,0x00,0x00,0x3e,0x00,0x00,0x00,0x77,0x02,0x00,0x00, -0xa4,0x02,0x00,0x00,0x7f,0x02,0x00,0x00,0xb0,0x00,0x05,0x00, -0xb8,0x00,0x00,0x00,0x84,0x02,0x00,0x00,0x5c,0x03,0x00,0x00, -0x62,0x00,0x00,0x00,0xf6,0x00,0x04,0x00,0x80,0x02,0x00,0x00, -0x7f,0x02,0x00,0x00,0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, -0x84,0x02,0x00,0x00,0x7f,0x02,0x00,0x00,0x80,0x02,0x00,0x00, -0xf8,0x00,0x02,0x00,0x7f,0x02,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x86,0x02,0x00,0x00,0x54,0x03,0x00,0x00, -0xb2,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x88,0x02,0x00,0x00,0x86,0x02,0x00,0x00,0x5a,0x03,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x8a,0x02,0x00,0x00, -0x88,0x02,0x00,0x00,0x89,0x02,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x8c,0x02,0x00,0x00,0x58,0x03,0x00,0x00, -0x62,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x8d,0x02,0x00,0x00,0x8a,0x02,0x00,0x00,0x8c,0x02,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x8f,0x02,0x00,0x00, -0x8d,0x02,0x00,0x00,0x5c,0x03,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x93,0x02,0x00,0x00,0x8c,0x02,0x00,0x00, -0x5c,0x03,0x00,0x00,0x41,0x00,0x05,0x00,0x31,0x02,0x00,0x00, -0x94,0x02,0x00,0x00,0x1c,0x02,0x00,0x00,0x93,0x02,0x00,0x00, -0x3d,0x00,0x04,0x00,0xe6,0x00,0x00,0x00,0x95,0x02,0x00,0x00, -0x94,0x02,0x00,0x00,0x73,0x00,0x04,0x00,0xba,0x00,0x00,0x00, -0x96,0x02,0x00,0x00,0x95,0x02,0x00,0x00,0x41,0x00,0x05,0x00, -0x31,0x02,0x00,0x00,0x9b,0x02,0x00,0x00,0x4a,0x02,0x00,0x00, -0x88,0x02,0x00,0x00,0x3d,0x00,0x04,0x00,0xe6,0x00,0x00,0x00, -0x9c,0x02,0x00,0x00,0x9b,0x02,0x00,0x00,0x73,0x00,0x04,0x00, -0xba,0x00,0x00,0x00,0x9d,0x02,0x00,0x00,0x9c,0x02,0x00,0x00, -0x41,0x00,0x05,0x00,0xc3,0x00,0x00,0x00,0x9f,0x02,0x00,0x00, -0xc0,0x00,0x00,0x00,0x8f,0x02,0x00,0x00,0x3d,0x00,0x04,0x00, -0xba,0x00,0x00,0x00,0xa0,0x02,0x00,0x00,0x9f,0x02,0x00,0x00, -0x0c,0x00,0x08,0x00,0xba,0x00,0x00,0x00,0xa1,0x02,0x00,0x00, -0x01,0x00,0x00,0x00,0x32,0x00,0x00,0x00,0x96,0x02,0x00,0x00, -0x9d,0x02,0x00,0x00,0xa0,0x02,0x00,0x00,0x3e,0x00,0x03,0x00, -0x9f,0x02,0x00,0x00,0xa1,0x02,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xa4,0x02,0x00,0x00,0x5c,0x03,0x00,0x00, -0xc6,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0x7e,0x02,0x00,0x00, -0xf8,0x00,0x02,0x00,0x80,0x02,0x00,0x00,0xf9,0x00,0x02,0x00, -0x79,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x79,0x02,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xa6,0x02,0x00,0x00, -0x5a,0x03,0x00,0x00,0xc6,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, -0x76,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x78,0x02,0x00,0x00, -0xf9,0x00,0x02,0x00,0x71,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, -0x71,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xa8,0x02,0x00,0x00,0x58,0x03,0x00,0x00,0xc6,0x00,0x00,0x00, -0xf9,0x00,0x02,0x00,0x6e,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, -0x70,0x02,0x00,0x00,0xf9,0x00,0x02,0x00,0x69,0x02,0x00,0x00, -0xf8,0x00,0x02,0x00,0x69,0x02,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xaa,0x02,0x00,0x00,0x54,0x03,0x00,0x00, -0xc6,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0x66,0x02,0x00,0x00, -0xf8,0x00,0x02,0x00,0x68,0x02,0x00,0x00,0xf9,0x00,0x02,0x00, -0x05,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x05,0x02,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xac,0x02,0x00,0x00, -0x4e,0x03,0x00,0x00,0xc6,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, -0x02,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x04,0x02,0x00,0x00, -0xe0,0x00,0x04,0x00,0x0c,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0xfa,0x01,0x00,0x00,0xf9,0x00,0x02,0x00,0xcd,0x00,0x00,0x00, -0xf8,0x00,0x02,0x00,0xcd,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xae,0x02,0x00,0x00,0x34,0x03,0x00,0x00, -0x6c,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0xca,0x00,0x00,0x00, -0xf8,0x00,0x02,0x00,0xcc,0x00,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xb3,0x02,0x00,0x00,0x55,0x00,0x00,0x00, -0x53,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xb4,0x02,0x00,0x00,0x8c,0x00,0x00,0x00,0xb3,0x02,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xb9,0x02,0x00,0x00, -0x59,0x00,0x00,0x00,0xaf,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xba,0x02,0x00,0x00,0x9e,0x00,0x00,0x00, -0xb9,0x02,0x00,0x00,0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00, -0xbf,0x02,0x00,0x00,0x12,0x00,0x00,0x00,0xbe,0x02,0x00,0x00, -0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0xc0,0x02,0x00,0x00, -0xbf,0x02,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xc1,0x02,0x00,0x00,0x0f,0x00,0x00,0x00,0xc0,0x02,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xc5,0x02,0x00,0x00, -0x47,0x00,0x00,0x00,0xc0,0x02,0x00,0x00,0x41,0x00,0x05,0x00, -0x0d,0x00,0x00,0x00,0xc7,0x02,0x00,0x00,0xc6,0x02,0x00,0x00, -0x0c,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0xc8,0x02,0x00,0x00,0xc7,0x02,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xc9,0x02,0x00,0x00,0xc5,0x02,0x00,0x00, -0xc8,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xca,0x02,0x00,0x00,0xc1,0x02,0x00,0x00,0xc9,0x02,0x00,0x00, -0xf9,0x00,0x02,0x00,0xcc,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, -0xcc,0x02,0x00,0x00,0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00, -0x35,0x03,0x00,0x00,0x3e,0x00,0x00,0x00,0xcc,0x00,0x00,0x00, -0x32,0x03,0x00,0x00,0xcf,0x02,0x00,0x00,0xb0,0x00,0x05,0x00, -0xb8,0x00,0x00,0x00,0xd2,0x02,0x00,0x00,0x35,0x03,0x00,0x00, -0xb5,0x00,0x00,0x00,0xf6,0x00,0x04,0x00,0xce,0x02,0x00,0x00, -0xcf,0x02,0x00,0x00,0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, -0xd2,0x02,0x00,0x00,0xcd,0x02,0x00,0x00,0xce,0x02,0x00,0x00, -0xf8,0x00,0x02,0x00,0xcd,0x02,0x00,0x00,0xf9,0x00,0x02,0x00, -0xd4,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0xd4,0x02,0x00,0x00, -0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0x36,0x03,0x00,0x00, -0x3e,0x00,0x00,0x00,0xcd,0x02,0x00,0x00,0x30,0x03,0x00,0x00, -0xd7,0x02,0x00,0x00,0xb0,0x00,0x05,0x00,0xb8,0x00,0x00,0x00, -0xda,0x02,0x00,0x00,0x36,0x03,0x00,0x00,0x60,0x00,0x00,0x00, -0xf6,0x00,0x04,0x00,0xd6,0x02,0x00,0x00,0xd7,0x02,0x00,0x00, -0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0xda,0x02,0x00,0x00, -0xd5,0x02,0x00,0x00,0xd6,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, -0xd5,0x02,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xde,0x02,0x00,0x00,0x36,0x03,0x00,0x00,0x61,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xdf,0x02,0x00,0x00, -0xb4,0x02,0x00,0x00,0xde,0x02,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xe1,0x02,0x00,0x00,0x64,0x00,0x00,0x00, -0x62,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xe2,0x02,0x00,0x00,0xdf,0x02,0x00,0x00,0xe1,0x02,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xe6,0x02,0x00,0x00, -0x35,0x03,0x00,0x00,0x52,0x02,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xe7,0x02,0x00,0x00,0xba,0x02,0x00,0x00, -0xe6,0x02,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xe9,0x02,0x00,0x00,0x68,0x00,0x00,0x00,0xb2,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xea,0x02,0x00,0x00, -0xe7,0x02,0x00,0x00,0xe9,0x02,0x00,0x00,0xf9,0x00,0x02,0x00, -0xec,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0xec,0x02,0x00,0x00, -0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0x38,0x03,0x00,0x00, -0x3e,0x00,0x00,0x00,0xd5,0x02,0x00,0x00,0x2e,0x03,0x00,0x00, -0xef,0x02,0x00,0x00,0xb0,0x00,0x05,0x00,0xb8,0x00,0x00,0x00, -0xf2,0x02,0x00,0x00,0x38,0x03,0x00,0x00,0xb2,0x00,0x00,0x00, -0xf6,0x00,0x04,0x00,0xee,0x02,0x00,0x00,0xef,0x02,0x00,0x00, -0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0xf2,0x02,0x00,0x00, -0xed,0x02,0x00,0x00,0xee,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, -0xed,0x02,0x00,0x00,0xf9,0x00,0x02,0x00,0xf4,0x02,0x00,0x00, -0xf8,0x00,0x02,0x00,0xf4,0x02,0x00,0x00,0xf5,0x00,0x07,0x00, -0x06,0x00,0x00,0x00,0x3a,0x03,0x00,0x00,0x3e,0x00,0x00,0x00, -0xed,0x02,0x00,0x00,0x2c,0x03,0x00,0x00,0xf7,0x02,0x00,0x00, -0xb0,0x00,0x05,0x00,0xb8,0x00,0x00,0x00,0xfa,0x02,0x00,0x00, -0x3a,0x03,0x00,0x00,0x62,0x00,0x00,0x00,0xf6,0x00,0x04,0x00, -0xf6,0x02,0x00,0x00,0xf7,0x02,0x00,0x00,0x01,0x00,0x00,0x00, -0xfa,0x00,0x04,0x00,0xfa,0x02,0x00,0x00,0xf5,0x02,0x00,0x00, -0xf6,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0xf5,0x02,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xfd,0x02,0x00,0x00, -0xe2,0x02,0x00,0x00,0x3a,0x03,0x00,0x00,0xb0,0x00,0x05,0x00, -0xb8,0x00,0x00,0x00,0x00,0x03,0x00,0x00,0xfd,0x02,0x00,0x00, -0x36,0x00,0x00,0x00,0xf7,0x00,0x03,0x00,0x02,0x03,0x00,0x00, -0x00,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0x00,0x03,0x00,0x00, -0x01,0x03,0x00,0x00,0x02,0x03,0x00,0x00,0xf8,0x00,0x02,0x00, -0x01,0x03,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x05,0x03,0x00,0x00,0xea,0x02,0x00,0x00,0x38,0x03,0x00,0x00, -0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00,0x06,0x03,0x00,0x00, -0x12,0x00,0x00,0x00,0xc6,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x07,0x03,0x00,0x00,0x06,0x03,0x00,0x00, -0xb0,0x00,0x05,0x00,0xb8,0x00,0x00,0x00,0x08,0x03,0x00,0x00, -0x05,0x03,0x00,0x00,0x07,0x03,0x00,0x00,0xf9,0x00,0x02,0x00, -0x02,0x03,0x00,0x00,0xf8,0x00,0x02,0x00,0x02,0x03,0x00,0x00, -0xf5,0x00,0x07,0x00,0xb8,0x00,0x00,0x00,0x09,0x03,0x00,0x00, -0x00,0x03,0x00,0x00,0xf5,0x02,0x00,0x00,0x08,0x03,0x00,0x00, -0x01,0x03,0x00,0x00,0xf7,0x00,0x03,0x00,0x0b,0x03,0x00,0x00, -0x00,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0x09,0x03,0x00,0x00, -0x0a,0x03,0x00,0x00,0x0b,0x03,0x00,0x00,0xf8,0x00,0x02,0x00, -0x0a,0x03,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x13,0x03,0x00,0x00,0xea,0x02,0x00,0x00,0x38,0x03,0x00,0x00, -0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00,0x15,0x03,0x00,0x00, -0x12,0x00,0x00,0x00,0x14,0x03,0x00,0x00,0x3d,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x16,0x03,0x00,0x00,0x15,0x03,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x17,0x03,0x00,0x00, -0x13,0x03,0x00,0x00,0x16,0x03,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x18,0x03,0x00,0x00,0xca,0x02,0x00,0x00, -0x17,0x03,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x1a,0x03,0x00,0x00,0x18,0x03,0x00,0x00,0xe2,0x02,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x1c,0x03,0x00,0x00, -0x1a,0x03,0x00,0x00,0x3a,0x03,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x1e,0x03,0x00,0x00,0x35,0x03,0x00,0x00, -0xb2,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x20,0x03,0x00,0x00,0x1e,0x03,0x00,0x00,0x38,0x03,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x22,0x03,0x00,0x00, -0x20,0x03,0x00,0x00,0x21,0x03,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x24,0x03,0x00,0x00,0x36,0x03,0x00,0x00, -0x62,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x25,0x03,0x00,0x00,0x22,0x03,0x00,0x00,0x24,0x03,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x27,0x03,0x00,0x00, -0x25,0x03,0x00,0x00,0x3a,0x03,0x00,0x00,0x41,0x00,0x05,0x00, -0xc3,0x00,0x00,0x00,0x28,0x03,0x00,0x00,0xc0,0x00,0x00,0x00, -0x27,0x03,0x00,0x00,0x3d,0x00,0x04,0x00,0xba,0x00,0x00,0x00, -0x29,0x03,0x00,0x00,0x28,0x03,0x00,0x00,0x41,0x00,0x06,0x00, -0x91,0x01,0x00,0x00,0x2a,0x03,0x00,0x00,0x0f,0x03,0x00,0x00, -0x34,0x00,0x00,0x00,0x1c,0x03,0x00,0x00,0x3e,0x00,0x03,0x00, -0x2a,0x03,0x00,0x00,0x29,0x03,0x00,0x00,0xf9,0x00,0x02,0x00, -0x0b,0x03,0x00,0x00,0xf8,0x00,0x02,0x00,0x0b,0x03,0x00,0x00, -0xf9,0x00,0x02,0x00,0xf7,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, -0xf7,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x2c,0x03,0x00,0x00,0x3a,0x03,0x00,0x00,0xc6,0x00,0x00,0x00, -0xf9,0x00,0x02,0x00,0xf4,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, -0xf6,0x02,0x00,0x00,0xf9,0x00,0x02,0x00,0xef,0x02,0x00,0x00, -0xf8,0x00,0x02,0x00,0xef,0x02,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x2e,0x03,0x00,0x00,0x38,0x03,0x00,0x00, -0xc6,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0xec,0x02,0x00,0x00, -0xf8,0x00,0x02,0x00,0xee,0x02,0x00,0x00,0xf9,0x00,0x02,0x00, -0xd7,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0xd7,0x02,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x30,0x03,0x00,0x00, -0x36,0x03,0x00,0x00,0xc6,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, -0xd4,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0xd6,0x02,0x00,0x00, -0xf9,0x00,0x02,0x00,0xcf,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, -0xcf,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x32,0x03,0x00,0x00,0x35,0x03,0x00,0x00,0xc6,0x00,0x00,0x00, -0xf9,0x00,0x02,0x00,0xcc,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, -0xce,0x02,0x00,0x00,0xfd,0x00,0x01,0x00,0x38,0x00,0x01,0x00, - -}; -const uint64_t matmul_f16_f32_aligned_s_len = 12096; - -unsigned char matmul_f16_f32_aligned_s_fp32_data[] = { -0x03,0x02,0x23,0x07,0x00,0x05,0x01,0x00,0x0b,0x00,0x0d,0x00, -0xf0,0x02,0x00,0x00,0x00,0x00,0x00,0x00,0x11,0x00,0x02,0x00, -0x01,0x00,0x00,0x00,0x11,0x00,0x02,0x00,0x51,0x11,0x00,0x00, -0x0b,0x00,0x06,0x00,0x01,0x00,0x00,0x00,0x47,0x4c,0x53,0x4c, -0x2e,0x73,0x74,0x64,0x2e,0x34,0x35,0x30,0x00,0x00,0x00,0x00, -0x0e,0x00,0x03,0x00,0x00,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x0f,0x00,0x0f,0x00,0x05,0x00,0x00,0x00,0x04,0x00,0x00,0x00, -0x6d,0x61,0x69,0x6e,0x00,0x00,0x00,0x00,0x0b,0x00,0x00,0x00, -0x12,0x00,0x00,0x00,0x3d,0x00,0x00,0x00,0x4c,0x00,0x00,0x00, -0xea,0x00,0x00,0x00,0xf9,0x00,0x00,0x00,0x4b,0x01,0x00,0x00, -0x59,0x01,0x00,0x00,0x51,0x02,0x00,0x00,0x9a,0x02,0x00,0x00, -0x10,0x00,0x06,0x00,0x04,0x00,0x00,0x00,0x11,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x0b,0x00,0x00,0x00,0x0b,0x00,0x00,0x00, -0x1c,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0x10,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x08,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00, -0x03,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00,0x04,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x10,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0x10,0x00,0x00,0x00,0x05,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x14,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x18,0x00,0x00,0x00, -0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00,0x07,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x1c,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0x10,0x00,0x00,0x00,0x08,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x20,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00, -0x09,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x24,0x00,0x00,0x00, -0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00,0x0a,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x28,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0x10,0x00,0x00,0x00,0x0b,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x2c,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x30,0x00,0x00,0x00, -0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00,0x0d,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x34,0x00,0x00,0x00,0x47,0x00,0x03,0x00, -0x10,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x37,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x3d,0x00,0x00,0x00,0x0b,0x00,0x00,0x00, -0x1a,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x4c,0x00,0x00,0x00, -0x0b,0x00,0x00,0x00,0x1b,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x4f,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x09,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x53,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x04,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x60,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x62,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x07,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x6c,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x03,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x9d,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0xaf,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x05,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0xb2,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x08,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0xf6,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x08,0x00,0x00,0x00,0x48,0x00,0x04,0x00, -0xf7,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x18,0x00,0x00,0x00, -0x48,0x00,0x05,0x00,0xf7,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x03,0x00, -0xf7,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0xf9,0x00,0x00,0x00,0x22,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0xf9,0x00,0x00,0x00,0x21,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x2c,0x01,0x00,0x00, -0x01,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x2d,0x01,0x00,0x00,0x0b,0x00,0x00,0x00,0x19,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x56,0x01,0x00,0x00,0x06,0x00,0x00,0x00, -0x10,0x00,0x00,0x00,0x48,0x00,0x04,0x00,0x57,0x01,0x00,0x00, -0x00,0x00,0x00,0x00,0x18,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0x57,0x01,0x00,0x00,0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x47,0x00,0x03,0x00,0x57,0x01,0x00,0x00, -0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x59,0x01,0x00,0x00, -0x22,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x59,0x01,0x00,0x00,0x21,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x51,0x02,0x00,0x00,0x0b,0x00,0x00,0x00, -0x18,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x97,0x02,0x00,0x00, -0x06,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x48,0x00,0x04,0x00, -0x98,0x02,0x00,0x00,0x00,0x00,0x00,0x00,0x19,0x00,0x00,0x00, -0x48,0x00,0x05,0x00,0x98,0x02,0x00,0x00,0x00,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x03,0x00, -0x98,0x02,0x00,0x00,0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x9a,0x02,0x00,0x00,0x22,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x9a,0x02,0x00,0x00,0x21,0x00,0x00,0x00, -0x02,0x00,0x00,0x00,0x13,0x00,0x02,0x00,0x02,0x00,0x00,0x00, -0x21,0x00,0x03,0x00,0x03,0x00,0x00,0x00,0x02,0x00,0x00,0x00, -0x15,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x20,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x17,0x00,0x04,0x00,0x09,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x03,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0x0a,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x09,0x00,0x00,0x00, -0x3b,0x00,0x04,0x00,0x0a,0x00,0x00,0x00,0x0b,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0x0d,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x1e,0x00,0x10,0x00,0x10,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x11,0x00,0x00,0x00, -0x09,0x00,0x00,0x00,0x10,0x00,0x00,0x00,0x3b,0x00,0x04,0x00, -0x11,0x00,0x00,0x00,0x12,0x00,0x00,0x00,0x09,0x00,0x00,0x00, -0x15,0x00,0x04,0x00,0x13,0x00,0x00,0x00,0x20,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00, -0x14,0x00,0x00,0x00,0x08,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0x15,0x00,0x00,0x00,0x09,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00,0x21,0x00,0x00,0x00, -0x0a,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00, -0x27,0x00,0x00,0x00,0x09,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x13,0x00,0x00,0x00,0x2d,0x00,0x00,0x00,0x07,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00,0x34,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x32,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x37,0x00,0x00,0x00,0x40,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x3b,0x00,0x04,0x00,0x0a,0x00,0x00,0x00,0x3d,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x3e,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x3b,0x00,0x04,0x00, -0x0a,0x00,0x00,0x00,0x4c,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x32,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x4f,0x00,0x00,0x00, -0x20,0x00,0x00,0x00,0x32,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x53,0x00,0x00,0x00,0x20,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0x54,0x00,0x00,0x00,0x86,0x00,0x00,0x00, -0x37,0x00,0x00,0x00,0x53,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0x58,0x00,0x00,0x00,0x86,0x00,0x00,0x00, -0x37,0x00,0x00,0x00,0x53,0x00,0x00,0x00,0x32,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x60,0x00,0x00,0x00,0x02,0x00,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x61,0x00,0x00,0x00, -0x86,0x00,0x00,0x00,0x53,0x00,0x00,0x00,0x60,0x00,0x00,0x00, -0x32,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x62,0x00,0x00,0x00, -0x04,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, -0x63,0x00,0x00,0x00,0x86,0x00,0x00,0x00,0x61,0x00,0x00,0x00, -0x62,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, -0x67,0x00,0x00,0x00,0x86,0x00,0x00,0x00,0x61,0x00,0x00,0x00, -0x62,0x00,0x00,0x00,0x32,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x6c,0x00,0x00,0x00,0x10,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x6d,0x00,0x00,0x00,0x04,0x00,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x6e,0x00,0x00,0x00, -0x86,0x00,0x00,0x00,0x6c,0x00,0x00,0x00,0x6d,0x00,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x73,0x00,0x00,0x00, -0x86,0x00,0x00,0x00,0x6c,0x00,0x00,0x00,0x6d,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00,0x77,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00, -0x7c,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x13,0x00,0x00,0x00,0x87,0x00,0x00,0x00,0x0b,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00,0x8d,0x00,0x00,0x00, -0x03,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00, -0x98,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x32,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x9d,0x00,0x00,0x00,0x40,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00,0x9f,0x00,0x00,0x00, -0x04,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, -0xae,0x00,0x00,0x00,0x84,0x00,0x00,0x00,0x60,0x00,0x00,0x00, -0x62,0x00,0x00,0x00,0x32,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0xaf,0x00,0x00,0x00,0x20,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0xb0,0x00,0x00,0x00,0x84,0x00,0x00,0x00, -0x53,0x00,0x00,0x00,0xaf,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0xb1,0x00,0x00,0x00,0x84,0x00,0x00,0x00, -0x4f,0x00,0x00,0x00,0x62,0x00,0x00,0x00,0x32,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0xb2,0x00,0x00,0x00,0x02,0x00,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0xb3,0x00,0x00,0x00, -0x84,0x00,0x00,0x00,0xb1,0x00,0x00,0x00,0xb2,0x00,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0xb4,0x00,0x00,0x00, -0x84,0x00,0x00,0x00,0xb3,0x00,0x00,0x00,0x60,0x00,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0xb5,0x00,0x00,0x00, -0x86,0x00,0x00,0x00,0xb0,0x00,0x00,0x00,0xb4,0x00,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0xb6,0x00,0x00,0x00, -0x84,0x00,0x00,0x00,0xae,0x00,0x00,0x00,0xb5,0x00,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0xb7,0x00,0x00,0x00, -0x84,0x00,0x00,0x00,0xb6,0x00,0x00,0x00,0xb2,0x00,0x00,0x00, -0x14,0x00,0x02,0x00,0xb8,0x00,0x00,0x00,0x16,0x00,0x03,0x00, -0xba,0x00,0x00,0x00,0x20,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0xbb,0x00,0x00,0x00,0x84,0x00,0x00,0x00, -0x60,0x00,0x00,0x00,0x62,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0xbc,0x00,0x00,0x00,0x84,0x00,0x00,0x00, -0xbb,0x00,0x00,0x00,0xb5,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0xbd,0x00,0x00,0x00,0x84,0x00,0x00,0x00, -0xbc,0x00,0x00,0x00,0xb2,0x00,0x00,0x00,0x1c,0x00,0x04,0x00, -0xbe,0x00,0x00,0x00,0xba,0x00,0x00,0x00,0xbd,0x00,0x00,0x00, -0x20,0x00,0x04,0x00,0xbf,0x00,0x00,0x00,0x07,0x00,0x00,0x00, -0xbe,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0xba,0x00,0x00,0x00, -0xc2,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0xc3,0x00,0x00,0x00,0x07,0x00,0x00,0x00,0xba,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00,0xc6,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, -0xe6,0x00,0x00,0x00,0x80,0x00,0x00,0x00,0x6c,0x00,0x00,0x00, -0x39,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, -0xe7,0x00,0x00,0x00,0x84,0x00,0x00,0x00,0x37,0x00,0x00,0x00, -0xe6,0x00,0x00,0x00,0x1c,0x00,0x04,0x00,0xe8,0x00,0x00,0x00, -0xba,0x00,0x00,0x00,0xe7,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0xe9,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0xe8,0x00,0x00,0x00, -0x3b,0x00,0x04,0x00,0xe9,0x00,0x00,0x00,0xea,0x00,0x00,0x00, -0x04,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, -0xee,0x00,0x00,0x00,0x80,0x00,0x00,0x00,0x6c,0x00,0x00,0x00, -0x39,0x00,0x00,0x00,0x16,0x00,0x03,0x00,0xf4,0x00,0x00,0x00, -0x10,0x00,0x00,0x00,0x17,0x00,0x04,0x00,0xf5,0x00,0x00,0x00, -0xf4,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x1d,0x00,0x03,0x00, -0xf6,0x00,0x00,0x00,0xf5,0x00,0x00,0x00,0x1e,0x00,0x03,0x00, -0xf7,0x00,0x00,0x00,0xf6,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0xf8,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0xf7,0x00,0x00,0x00, -0x3b,0x00,0x04,0x00,0xf8,0x00,0x00,0x00,0xf9,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0xfb,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0xf4,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0xff,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0xba,0x00,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x04,0x01,0x00,0x00, -0x80,0x00,0x00,0x00,0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x12,0x01,0x00,0x00, -0x80,0x00,0x00,0x00,0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x20,0x01,0x00,0x00, -0x80,0x00,0x00,0x00,0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x25,0x01,0x00,0x00, -0x03,0x00,0x00,0x00,0x32,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x2c,0x01,0x00,0x00,0x01,0x00,0x00,0x00,0x33,0x00,0x06,0x00, -0x09,0x00,0x00,0x00,0x2d,0x01,0x00,0x00,0x2c,0x01,0x00,0x00, -0x39,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0x2e,0x01,0x00,0x00,0x51,0x00,0x00,0x00, -0x2d,0x01,0x00,0x00,0x00,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0x2f,0x01,0x00,0x00,0x84,0x00,0x00,0x00, -0x2e,0x01,0x00,0x00,0x6d,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0x30,0x01,0x00,0x00,0x86,0x00,0x00,0x00, -0x2f,0x01,0x00,0x00,0x6c,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0x47,0x01,0x00,0x00,0x80,0x00,0x00,0x00, -0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0x48,0x01,0x00,0x00,0x84,0x00,0x00,0x00, -0x9d,0x00,0x00,0x00,0x47,0x01,0x00,0x00,0x1c,0x00,0x04,0x00, -0x49,0x01,0x00,0x00,0xba,0x00,0x00,0x00,0x48,0x01,0x00,0x00, -0x20,0x00,0x04,0x00,0x4a,0x01,0x00,0x00,0x04,0x00,0x00,0x00, -0x49,0x01,0x00,0x00,0x3b,0x00,0x04,0x00,0x4a,0x01,0x00,0x00, -0x4b,0x01,0x00,0x00,0x04,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0x4f,0x01,0x00,0x00,0x80,0x00,0x00,0x00, -0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x17,0x00,0x04,0x00, -0x55,0x01,0x00,0x00,0xba,0x00,0x00,0x00,0x04,0x00,0x00,0x00, -0x1d,0x00,0x03,0x00,0x56,0x01,0x00,0x00,0x55,0x01,0x00,0x00, -0x1e,0x00,0x03,0x00,0x57,0x01,0x00,0x00,0x56,0x01,0x00,0x00, -0x20,0x00,0x04,0x00,0x58,0x01,0x00,0x00,0x0c,0x00,0x00,0x00, -0x57,0x01,0x00,0x00,0x3b,0x00,0x04,0x00,0x58,0x01,0x00,0x00, -0x59,0x01,0x00,0x00,0x0c,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0x5b,0x01,0x00,0x00,0x0c,0x00,0x00,0x00,0xba,0x00,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x62,0x01,0x00,0x00, -0x80,0x00,0x00,0x00,0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x6f,0x01,0x00,0x00, -0x80,0x00,0x00,0x00,0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x7c,0x01,0x00,0x00, -0x80,0x00,0x00,0x00,0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x88,0x01,0x00,0x00, -0x08,0x01,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, -0x89,0x01,0x00,0x00,0x86,0x00,0x00,0x00,0x6c,0x00,0x00,0x00, -0x6d,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, -0x8c,0x01,0x00,0x00,0x86,0x00,0x00,0x00,0x6c,0x00,0x00,0x00, -0x6d,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, -0xa7,0x01,0x00,0x00,0x84,0x00,0x00,0x00,0x60,0x00,0x00,0x00, -0x62,0x00,0x00,0x00,0x1c,0x00,0x04,0x00,0xa8,0x01,0x00,0x00, -0xba,0x00,0x00,0x00,0xa7,0x01,0x00,0x00,0x20,0x00,0x04,0x00, -0xa9,0x01,0x00,0x00,0x07,0x00,0x00,0x00,0xa8,0x01,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0xb9,0x01,0x00,0x00, -0x80,0x00,0x00,0x00,0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0xd4,0x01,0x00,0x00, -0x84,0x00,0x00,0x00,0xb5,0x00,0x00,0x00,0xb2,0x00,0x00,0x00, -0x1c,0x00,0x04,0x00,0xd5,0x01,0x00,0x00,0xba,0x00,0x00,0x00, -0xd4,0x01,0x00,0x00,0x20,0x00,0x04,0x00,0xd6,0x01,0x00,0x00, -0x07,0x00,0x00,0x00,0xd5,0x01,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0xdf,0x01,0x00,0x00,0x86,0x00,0x00,0x00, -0xaf,0x00,0x00,0x00,0xb5,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0xe7,0x01,0x00,0x00,0x80,0x00,0x00,0x00, -0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0x16,0x02,0x00,0x00,0x84,0x00,0x00,0x00, -0x60,0x00,0x00,0x00,0x62,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x13,0x00,0x00,0x00,0x49,0x02,0x00,0x00,0x0d,0x00,0x00,0x00, -0x3b,0x00,0x04,0x00,0x0a,0x00,0x00,0x00,0x51,0x02,0x00,0x00, -0x01,0x00,0x00,0x00,0x1d,0x00,0x03,0x00,0x97,0x02,0x00,0x00, -0xba,0x00,0x00,0x00,0x1e,0x00,0x03,0x00,0x98,0x02,0x00,0x00, -0x97,0x02,0x00,0x00,0x20,0x00,0x04,0x00,0x99,0x02,0x00,0x00, -0x0c,0x00,0x00,0x00,0x98,0x02,0x00,0x00,0x3b,0x00,0x04,0x00, -0x99,0x02,0x00,0x00,0x9a,0x02,0x00,0x00,0x0c,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00,0x9f,0x02,0x00,0x00, -0x05,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, -0xac,0x02,0x00,0x00,0x84,0x00,0x00,0x00,0x60,0x00,0x00,0x00, -0x62,0x00,0x00,0x00,0x36,0x00,0x05,0x00,0x02,0x00,0x00,0x00, -0x04,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x03,0x00,0x00,0x00, -0xf8,0x00,0x02,0x00,0x05,0x00,0x00,0x00,0x3b,0x00,0x04,0x00, -0xbf,0x00,0x00,0x00,0xc0,0x00,0x00,0x00,0x07,0x00,0x00,0x00, -0x3b,0x00,0x04,0x00,0xa9,0x01,0x00,0x00,0xaa,0x01,0x00,0x00, -0x07,0x00,0x00,0x00,0x3b,0x00,0x04,0x00,0xd6,0x01,0x00,0x00, -0xd7,0x01,0x00,0x00,0x07,0x00,0x00,0x00,0x41,0x00,0x05,0x00, -0x0d,0x00,0x00,0x00,0x0e,0x00,0x00,0x00,0x0b,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x0f,0x00,0x00,0x00,0x0e,0x00,0x00,0x00,0x41,0x00,0x05,0x00, -0x15,0x00,0x00,0x00,0x16,0x00,0x00,0x00,0x12,0x00,0x00,0x00, -0x14,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x17,0x00,0x00,0x00,0x16,0x00,0x00,0x00,0x86,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x18,0x00,0x00,0x00,0x0f,0x00,0x00,0x00, -0x17,0x00,0x00,0x00,0x89,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x1e,0x00,0x00,0x00,0x0f,0x00,0x00,0x00,0x17,0x00,0x00,0x00, -0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00,0x22,0x00,0x00,0x00, -0x12,0x00,0x00,0x00,0x21,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x22,0x00,0x00,0x00, -0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x24,0x00,0x00,0x00, -0x18,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x41,0x00,0x05,0x00, -0x15,0x00,0x00,0x00,0x28,0x00,0x00,0x00,0x12,0x00,0x00,0x00, -0x27,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x29,0x00,0x00,0x00,0x28,0x00,0x00,0x00,0x86,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x2a,0x00,0x00,0x00,0x1e,0x00,0x00,0x00, -0x29,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00, -0x2e,0x00,0x00,0x00,0x12,0x00,0x00,0x00,0x2d,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x2f,0x00,0x00,0x00, -0x2e,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x30,0x00,0x00,0x00,0x24,0x00,0x00,0x00,0x2f,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x32,0x00,0x00,0x00, -0x30,0x00,0x00,0x00,0x2a,0x00,0x00,0x00,0x41,0x00,0x05,0x00, -0x15,0x00,0x00,0x00,0x35,0x00,0x00,0x00,0x12,0x00,0x00,0x00, -0x34,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x36,0x00,0x00,0x00,0x35,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x38,0x00,0x00,0x00,0x36,0x00,0x00,0x00, -0x37,0x00,0x00,0x00,0x82,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x3a,0x00,0x00,0x00,0x38,0x00,0x00,0x00,0x39,0x00,0x00,0x00, -0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x3b,0x00,0x00,0x00, -0x3a,0x00,0x00,0x00,0x37,0x00,0x00,0x00,0x41,0x00,0x05,0x00, -0x0d,0x00,0x00,0x00,0x3f,0x00,0x00,0x00,0x3d,0x00,0x00,0x00, -0x3e,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x40,0x00,0x00,0x00,0x3f,0x00,0x00,0x00,0x89,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x42,0x00,0x00,0x00,0x40,0x00,0x00,0x00, -0x3b,0x00,0x00,0x00,0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x47,0x00,0x00,0x00,0x40,0x00,0x00,0x00,0x3b,0x00,0x00,0x00, -0x41,0x00,0x05,0x00,0x0d,0x00,0x00,0x00,0x49,0x00,0x00,0x00, -0x3d,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x4a,0x00,0x00,0x00,0x49,0x00,0x00,0x00, -0x41,0x00,0x05,0x00,0x0d,0x00,0x00,0x00,0x4d,0x00,0x00,0x00, -0x4c,0x00,0x00,0x00,0x3e,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x4e,0x00,0x00,0x00,0x4d,0x00,0x00,0x00, -0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x50,0x00,0x00,0x00, -0x4e,0x00,0x00,0x00,0x4f,0x00,0x00,0x00,0x89,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x55,0x00,0x00,0x00,0x50,0x00,0x00,0x00, -0x54,0x00,0x00,0x00,0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x59,0x00,0x00,0x00,0x50,0x00,0x00,0x00,0x58,0x00,0x00,0x00, -0x89,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x5d,0x00,0x00,0x00, -0x4e,0x00,0x00,0x00,0x4f,0x00,0x00,0x00,0x89,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x64,0x00,0x00,0x00,0x5d,0x00,0x00,0x00, -0x63,0x00,0x00,0x00,0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x68,0x00,0x00,0x00,0x5d,0x00,0x00,0x00,0x67,0x00,0x00,0x00, -0x89,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x6f,0x00,0x00,0x00, -0x4e,0x00,0x00,0x00,0x6e,0x00,0x00,0x00,0x86,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x74,0x00,0x00,0x00,0x4e,0x00,0x00,0x00, -0x73,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00, -0x78,0x00,0x00,0x00,0x12,0x00,0x00,0x00,0x77,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x79,0x00,0x00,0x00, -0x78,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x7a,0x00,0x00,0x00,0x47,0x00,0x00,0x00,0x79,0x00,0x00,0x00, -0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00,0x7d,0x00,0x00,0x00, -0x12,0x00,0x00,0x00,0x7c,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x7e,0x00,0x00,0x00,0x7d,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x80,0x00,0x00,0x00, -0x47,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x83,0x00,0x00,0x00,0x80,0x00,0x00,0x00, -0x79,0x00,0x00,0x00,0x0c,0x00,0x07,0x00,0x06,0x00,0x00,0x00, -0x84,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x26,0x00,0x00,0x00, -0x7e,0x00,0x00,0x00,0x83,0x00,0x00,0x00,0x41,0x00,0x05,0x00, -0x15,0x00,0x00,0x00,0x88,0x00,0x00,0x00,0x12,0x00,0x00,0x00, -0x87,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x89,0x00,0x00,0x00,0x88,0x00,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x8a,0x00,0x00,0x00,0x32,0x00,0x00,0x00, -0x89,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x8c,0x00,0x00,0x00,0x42,0x00,0x00,0x00,0x37,0x00,0x00,0x00, -0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00,0x8e,0x00,0x00,0x00, -0x12,0x00,0x00,0x00,0x8d,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x8f,0x00,0x00,0x00,0x8e,0x00,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x90,0x00,0x00,0x00, -0x8c,0x00,0x00,0x00,0x8f,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x91,0x00,0x00,0x00,0x8a,0x00,0x00,0x00, -0x90,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x93,0x00,0x00,0x00,0x91,0x00,0x00,0x00,0x7a,0x00,0x00,0x00, -0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x94,0x00,0x00,0x00, -0x93,0x00,0x00,0x00,0x6d,0x00,0x00,0x00,0x41,0x00,0x05,0x00, -0x15,0x00,0x00,0x00,0x99,0x00,0x00,0x00,0x12,0x00,0x00,0x00, -0x98,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x9a,0x00,0x00,0x00,0x99,0x00,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x9b,0x00,0x00,0x00,0x0f,0x00,0x00,0x00, -0x9a,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x9e,0x00,0x00,0x00,0x4a,0x00,0x00,0x00,0x9d,0x00,0x00,0x00, -0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00,0xa0,0x00,0x00,0x00, -0x12,0x00,0x00,0x00,0x9f,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0xa1,0x00,0x00,0x00,0xa0,0x00,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xa2,0x00,0x00,0x00, -0x9e,0x00,0x00,0x00,0xa1,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xa3,0x00,0x00,0x00,0x9b,0x00,0x00,0x00, -0xa2,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xa5,0x00,0x00,0x00,0xa3,0x00,0x00,0x00,0x7a,0x00,0x00,0x00, -0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xa6,0x00,0x00,0x00, -0xa5,0x00,0x00,0x00,0x6d,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, -0xa8,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0xa8,0x00,0x00,0x00, -0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0xbe,0x02,0x00,0x00, -0x3e,0x00,0x00,0x00,0x05,0x00,0x00,0x00,0xc7,0x00,0x00,0x00, -0xa9,0x00,0x00,0x00,0xb0,0x00,0x05,0x00,0xb8,0x00,0x00,0x00, -0xb9,0x00,0x00,0x00,0xbe,0x02,0x00,0x00,0xb7,0x00,0x00,0x00, -0xf6,0x00,0x04,0x00,0xaa,0x00,0x00,0x00,0xa9,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0xb9,0x00,0x00,0x00, -0xa9,0x00,0x00,0x00,0xaa,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, -0xa9,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0xc3,0x00,0x00,0x00, -0xc4,0x00,0x00,0x00,0xc0,0x00,0x00,0x00,0xbe,0x02,0x00,0x00, -0x3e,0x00,0x03,0x00,0xc4,0x00,0x00,0x00,0xc2,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xc7,0x00,0x00,0x00, -0xbe,0x02,0x00,0x00,0xc6,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, -0xa8,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0xaa,0x00,0x00,0x00, -0xf9,0x00,0x02,0x00,0xca,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, -0xca,0x00,0x00,0x00,0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00, -0xd7,0x02,0x00,0x00,0xa6,0x00,0x00,0x00,0xaa,0x00,0x00,0x00, -0x8e,0x01,0x00,0x00,0xcd,0x00,0x00,0x00,0xf5,0x00,0x07,0x00, -0x06,0x00,0x00,0x00,0xd3,0x02,0x00,0x00,0x94,0x00,0x00,0x00, -0xaa,0x00,0x00,0x00,0x8b,0x01,0x00,0x00,0xcd,0x00,0x00,0x00, -0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0xbf,0x02,0x00,0x00, -0x7a,0x00,0x00,0x00,0xaa,0x00,0x00,0x00,0x39,0x02,0x00,0x00, -0xcd,0x00,0x00,0x00,0xb0,0x00,0x05,0x00,0xb8,0x00,0x00,0x00, -0xd1,0x00,0x00,0x00,0xbf,0x02,0x00,0x00,0x84,0x00,0x00,0x00, -0xf6,0x00,0x04,0x00,0xcc,0x00,0x00,0x00,0xcd,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0xd1,0x00,0x00,0x00, -0xcb,0x00,0x00,0x00,0xcc,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, -0xcb,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0xd3,0x00,0x00,0x00, -0xf8,0x00,0x02,0x00,0xd3,0x00,0x00,0x00,0xf5,0x00,0x07,0x00, -0x06,0x00,0x00,0x00,0xcf,0x02,0x00,0x00,0x3e,0x00,0x00,0x00, -0xcb,0x00,0x00,0x00,0x32,0x01,0x00,0x00,0xd4,0x00,0x00,0x00, -0xb0,0x00,0x05,0x00,0xb8,0x00,0x00,0x00,0xd9,0x00,0x00,0x00, -0xcf,0x02,0x00,0x00,0x37,0x00,0x00,0x00,0xf6,0x00,0x04,0x00, -0xd5,0x00,0x00,0x00,0xd4,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0xfa,0x00,0x04,0x00,0xd9,0x00,0x00,0x00,0xd4,0x00,0x00,0x00, -0xd5,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0xd4,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xde,0x00,0x00,0x00, -0x74,0x00,0x00,0x00,0xcf,0x02,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xe1,0x00,0x00,0x00,0xde,0x00,0x00,0x00, -0x8f,0x00,0x00,0x00,0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xe2,0x00,0x00,0x00,0xe1,0x00,0x00,0x00,0x6d,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xe3,0x00,0x00,0x00, -0xd3,0x02,0x00,0x00,0xe2,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xe5,0x00,0x00,0x00,0xe3,0x00,0x00,0x00, -0x6f,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xef,0x00,0x00,0x00,0xde,0x00,0x00,0x00,0xee,0x00,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xf1,0x00,0x00,0x00, -0x6f,0x00,0x00,0x00,0x6d,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xf2,0x00,0x00,0x00,0xef,0x00,0x00,0x00, -0xf1,0x00,0x00,0x00,0x41,0x00,0x07,0x00,0xfb,0x00,0x00,0x00, -0xfc,0x00,0x00,0x00,0xf9,0x00,0x00,0x00,0x34,0x00,0x00,0x00, -0xe5,0x00,0x00,0x00,0x3e,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0xf4,0x00,0x00,0x00,0xfd,0x00,0x00,0x00,0xfc,0x00,0x00,0x00, -0x73,0x00,0x04,0x00,0xba,0x00,0x00,0x00,0xfe,0x00,0x00,0x00, -0xfd,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0xff,0x00,0x00,0x00, -0x00,0x01,0x00,0x00,0xea,0x00,0x00,0x00,0xf2,0x00,0x00,0x00, -0x3e,0x00,0x03,0x00,0x00,0x01,0x00,0x00,0xfe,0x00,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x05,0x01,0x00,0x00, -0xde,0x00,0x00,0x00,0x04,0x01,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x08,0x01,0x00,0x00,0x05,0x01,0x00,0x00, -0xf1,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x09,0x01,0x00,0x00,0x08,0x01,0x00,0x00,0x39,0x00,0x00,0x00, -0x41,0x00,0x07,0x00,0xfb,0x00,0x00,0x00,0x0b,0x01,0x00,0x00, -0xf9,0x00,0x00,0x00,0x34,0x00,0x00,0x00,0xe5,0x00,0x00,0x00, -0x39,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0xf4,0x00,0x00,0x00, -0x0c,0x01,0x00,0x00,0x0b,0x01,0x00,0x00,0x73,0x00,0x04,0x00, -0xba,0x00,0x00,0x00,0x0d,0x01,0x00,0x00,0x0c,0x01,0x00,0x00, -0x41,0x00,0x05,0x00,0xff,0x00,0x00,0x00,0x0e,0x01,0x00,0x00, -0xea,0x00,0x00,0x00,0x09,0x01,0x00,0x00,0x3e,0x00,0x03,0x00, -0x0e,0x01,0x00,0x00,0x0d,0x01,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x13,0x01,0x00,0x00,0xde,0x00,0x00,0x00, -0x12,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x16,0x01,0x00,0x00,0x13,0x01,0x00,0x00,0xf1,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x17,0x01,0x00,0x00, -0x16,0x01,0x00,0x00,0x0c,0x00,0x00,0x00,0x41,0x00,0x07,0x00, -0xfb,0x00,0x00,0x00,0x19,0x01,0x00,0x00,0xf9,0x00,0x00,0x00, -0x34,0x00,0x00,0x00,0xe5,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0xf4,0x00,0x00,0x00,0x1a,0x01,0x00,0x00, -0x19,0x01,0x00,0x00,0x73,0x00,0x04,0x00,0xba,0x00,0x00,0x00, -0x1b,0x01,0x00,0x00,0x1a,0x01,0x00,0x00,0x41,0x00,0x05,0x00, -0xff,0x00,0x00,0x00,0x1c,0x01,0x00,0x00,0xea,0x00,0x00,0x00, -0x17,0x01,0x00,0x00,0x3e,0x00,0x03,0x00,0x1c,0x01,0x00,0x00, -0x1b,0x01,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x21,0x01,0x00,0x00,0xde,0x00,0x00,0x00,0x20,0x01,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x24,0x01,0x00,0x00, -0x21,0x01,0x00,0x00,0xf1,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x26,0x01,0x00,0x00,0x24,0x01,0x00,0x00, -0x25,0x01,0x00,0x00,0x41,0x00,0x07,0x00,0xfb,0x00,0x00,0x00, -0x28,0x01,0x00,0x00,0xf9,0x00,0x00,0x00,0x34,0x00,0x00,0x00, -0xe5,0x00,0x00,0x00,0x25,0x01,0x00,0x00,0x3d,0x00,0x04,0x00, -0xf4,0x00,0x00,0x00,0x29,0x01,0x00,0x00,0x28,0x01,0x00,0x00, -0x73,0x00,0x04,0x00,0xba,0x00,0x00,0x00,0x2a,0x01,0x00,0x00, -0x29,0x01,0x00,0x00,0x41,0x00,0x05,0x00,0xff,0x00,0x00,0x00, -0x2b,0x01,0x00,0x00,0xea,0x00,0x00,0x00,0x26,0x01,0x00,0x00, -0x3e,0x00,0x03,0x00,0x2b,0x01,0x00,0x00,0x2a,0x01,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x32,0x01,0x00,0x00, -0xcf,0x02,0x00,0x00,0x30,0x01,0x00,0x00,0xf9,0x00,0x02,0x00, -0xd3,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0xd5,0x00,0x00,0x00, -0xf9,0x00,0x02,0x00,0x34,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, -0x34,0x01,0x00,0x00,0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00, -0xd0,0x02,0x00,0x00,0x3e,0x00,0x00,0x00,0xd5,0x00,0x00,0x00, -0x87,0x01,0x00,0x00,0x35,0x01,0x00,0x00,0xb0,0x00,0x05,0x00, -0xb8,0x00,0x00,0x00,0x3a,0x01,0x00,0x00,0xd0,0x02,0x00,0x00, -0x9d,0x00,0x00,0x00,0xf6,0x00,0x04,0x00,0x36,0x01,0x00,0x00, -0x35,0x01,0x00,0x00,0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, -0x3a,0x01,0x00,0x00,0x35,0x01,0x00,0x00,0x36,0x01,0x00,0x00, -0xf8,0x00,0x02,0x00,0x35,0x01,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x3f,0x01,0x00,0x00,0x74,0x00,0x00,0x00, -0xd0,0x02,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x42,0x01,0x00,0x00,0x3f,0x01,0x00,0x00,0xa1,0x00,0x00,0x00, -0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x43,0x01,0x00,0x00, -0x42,0x01,0x00,0x00,0x6d,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x44,0x01,0x00,0x00,0xd7,0x02,0x00,0x00, -0x43,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x46,0x01,0x00,0x00,0x44,0x01,0x00,0x00,0x6f,0x00,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x50,0x01,0x00,0x00, -0x3f,0x01,0x00,0x00,0x4f,0x01,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x52,0x01,0x00,0x00,0x6f,0x00,0x00,0x00, -0x6d,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x53,0x01,0x00,0x00,0x50,0x01,0x00,0x00,0x52,0x01,0x00,0x00, -0x41,0x00,0x07,0x00,0x5b,0x01,0x00,0x00,0x5c,0x01,0x00,0x00, -0x59,0x01,0x00,0x00,0x34,0x00,0x00,0x00,0x46,0x01,0x00,0x00, -0x3e,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0xba,0x00,0x00,0x00, -0x5d,0x01,0x00,0x00,0x5c,0x01,0x00,0x00,0x41,0x00,0x05,0x00, -0xff,0x00,0x00,0x00,0x5e,0x01,0x00,0x00,0x4b,0x01,0x00,0x00, -0x53,0x01,0x00,0x00,0x3e,0x00,0x03,0x00,0x5e,0x01,0x00,0x00, -0x5d,0x01,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x63,0x01,0x00,0x00,0x3f,0x01,0x00,0x00,0x62,0x01,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x66,0x01,0x00,0x00, -0x63,0x01,0x00,0x00,0x52,0x01,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x67,0x01,0x00,0x00,0x66,0x01,0x00,0x00, -0x39,0x00,0x00,0x00,0x41,0x00,0x07,0x00,0x5b,0x01,0x00,0x00, -0x69,0x01,0x00,0x00,0x59,0x01,0x00,0x00,0x34,0x00,0x00,0x00, -0x46,0x01,0x00,0x00,0x39,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0xba,0x00,0x00,0x00,0x6a,0x01,0x00,0x00,0x69,0x01,0x00,0x00, -0x41,0x00,0x05,0x00,0xff,0x00,0x00,0x00,0x6b,0x01,0x00,0x00, -0x4b,0x01,0x00,0x00,0x67,0x01,0x00,0x00,0x3e,0x00,0x03,0x00, -0x6b,0x01,0x00,0x00,0x6a,0x01,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x70,0x01,0x00,0x00,0x3f,0x01,0x00,0x00, -0x6f,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x73,0x01,0x00,0x00,0x70,0x01,0x00,0x00,0x52,0x01,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x74,0x01,0x00,0x00, -0x73,0x01,0x00,0x00,0x0c,0x00,0x00,0x00,0x41,0x00,0x07,0x00, -0x5b,0x01,0x00,0x00,0x76,0x01,0x00,0x00,0x59,0x01,0x00,0x00, -0x34,0x00,0x00,0x00,0x46,0x01,0x00,0x00,0x0c,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0xba,0x00,0x00,0x00,0x77,0x01,0x00,0x00, -0x76,0x01,0x00,0x00,0x41,0x00,0x05,0x00,0xff,0x00,0x00,0x00, -0x78,0x01,0x00,0x00,0x4b,0x01,0x00,0x00,0x74,0x01,0x00,0x00, -0x3e,0x00,0x03,0x00,0x78,0x01,0x00,0x00,0x77,0x01,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x7d,0x01,0x00,0x00, -0x3f,0x01,0x00,0x00,0x7c,0x01,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x80,0x01,0x00,0x00,0x7d,0x01,0x00,0x00, -0x52,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x81,0x01,0x00,0x00,0x80,0x01,0x00,0x00,0x25,0x01,0x00,0x00, -0x41,0x00,0x07,0x00,0x5b,0x01,0x00,0x00,0x83,0x01,0x00,0x00, -0x59,0x01,0x00,0x00,0x34,0x00,0x00,0x00,0x46,0x01,0x00,0x00, -0x25,0x01,0x00,0x00,0x3d,0x00,0x04,0x00,0xba,0x00,0x00,0x00, -0x84,0x01,0x00,0x00,0x83,0x01,0x00,0x00,0x41,0x00,0x05,0x00, -0xff,0x00,0x00,0x00,0x85,0x01,0x00,0x00,0x4b,0x01,0x00,0x00, -0x81,0x01,0x00,0x00,0x3e,0x00,0x03,0x00,0x85,0x01,0x00,0x00, -0x84,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x87,0x01,0x00,0x00,0xd0,0x02,0x00,0x00,0x30,0x01,0x00,0x00, -0xf9,0x00,0x02,0x00,0x34,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, -0x36,0x01,0x00,0x00,0xe0,0x00,0x04,0x00,0x0c,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x88,0x01,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x8b,0x01,0x00,0x00,0xd3,0x02,0x00,0x00, -0x89,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x8e,0x01,0x00,0x00,0xd7,0x02,0x00,0x00,0x8c,0x01,0x00,0x00, -0xf9,0x00,0x02,0x00,0x90,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, -0x90,0x01,0x00,0x00,0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00, -0xd9,0x02,0x00,0x00,0x3e,0x00,0x00,0x00,0x36,0x01,0x00,0x00, -0x37,0x02,0x00,0x00,0x93,0x01,0x00,0x00,0xb0,0x00,0x05,0x00, -0xb8,0x00,0x00,0x00,0x96,0x01,0x00,0x00,0xd9,0x02,0x00,0x00, -0x6c,0x00,0x00,0x00,0xf6,0x00,0x04,0x00,0x92,0x01,0x00,0x00, -0x93,0x01,0x00,0x00,0x00,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, -0x96,0x01,0x00,0x00,0x91,0x01,0x00,0x00,0x92,0x01,0x00,0x00, -0xf8,0x00,0x02,0x00,0x91,0x01,0x00,0x00,0xf9,0x00,0x02,0x00, -0x98,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0x98,0x01,0x00,0x00, -0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0xdd,0x02,0x00,0x00, -0x3e,0x00,0x00,0x00,0x91,0x01,0x00,0x00,0xc3,0x01,0x00,0x00, -0x9b,0x01,0x00,0x00,0xb0,0x00,0x05,0x00,0xb8,0x00,0x00,0x00, -0x9e,0x01,0x00,0x00,0xdd,0x02,0x00,0x00,0x60,0x00,0x00,0x00, -0xf6,0x00,0x04,0x00,0x9a,0x01,0x00,0x00,0x9b,0x01,0x00,0x00, -0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0x9e,0x01,0x00,0x00, -0x99,0x01,0x00,0x00,0x9a,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, -0x99,0x01,0x00,0x00,0xf9,0x00,0x02,0x00,0xa0,0x01,0x00,0x00, -0xf8,0x00,0x02,0x00,0xa0,0x01,0x00,0x00,0xf5,0x00,0x07,0x00, -0x06,0x00,0x00,0x00,0xef,0x02,0x00,0x00,0x3e,0x00,0x00,0x00, -0x99,0x01,0x00,0x00,0xc1,0x01,0x00,0x00,0xa1,0x01,0x00,0x00, -0xb0,0x00,0x05,0x00,0xb8,0x00,0x00,0x00,0xa6,0x01,0x00,0x00, -0xef,0x02,0x00,0x00,0x62,0x00,0x00,0x00,0xf6,0x00,0x04,0x00, -0xa2,0x01,0x00,0x00,0xa1,0x01,0x00,0x00,0x01,0x00,0x00,0x00, -0xfa,0x00,0x04,0x00,0xa6,0x01,0x00,0x00,0xa1,0x01,0x00,0x00, -0xa2,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0xa1,0x01,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xac,0x01,0x00,0x00, -0xdd,0x02,0x00,0x00,0x62,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xae,0x01,0x00,0x00,0xac,0x01,0x00,0x00, -0xef,0x02,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xb0,0x01,0x00,0x00,0x55,0x00,0x00,0x00,0x53,0x00,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xb2,0x01,0x00,0x00, -0xdd,0x02,0x00,0x00,0x61,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xb3,0x01,0x00,0x00,0xb0,0x01,0x00,0x00, -0xb2,0x01,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xb5,0x01,0x00,0x00,0x64,0x00,0x00,0x00,0x62,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xb6,0x01,0x00,0x00, -0xb3,0x01,0x00,0x00,0xb5,0x01,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xb8,0x01,0x00,0x00,0xb6,0x01,0x00,0x00, -0xef,0x02,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xba,0x01,0x00,0x00,0xb8,0x01,0x00,0x00,0xb9,0x01,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xbc,0x01,0x00,0x00, -0xba,0x01,0x00,0x00,0xd9,0x02,0x00,0x00,0x41,0x00,0x05,0x00, -0xff,0x00,0x00,0x00,0xbd,0x01,0x00,0x00,0xea,0x00,0x00,0x00, -0xbc,0x01,0x00,0x00,0x3d,0x00,0x04,0x00,0xba,0x00,0x00,0x00, -0xbe,0x01,0x00,0x00,0xbd,0x01,0x00,0x00,0x41,0x00,0x05,0x00, -0xc3,0x00,0x00,0x00,0xbf,0x01,0x00,0x00,0xaa,0x01,0x00,0x00, -0xae,0x01,0x00,0x00,0x3e,0x00,0x03,0x00,0xbf,0x01,0x00,0x00, -0xbe,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xc1,0x01,0x00,0x00,0xef,0x02,0x00,0x00,0xc6,0x00,0x00,0x00, -0xf9,0x00,0x02,0x00,0xa0,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, -0xa2,0x01,0x00,0x00,0xf9,0x00,0x02,0x00,0x9b,0x01,0x00,0x00, -0xf8,0x00,0x02,0x00,0x9b,0x01,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xc3,0x01,0x00,0x00,0xdd,0x02,0x00,0x00, -0xc6,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0x98,0x01,0x00,0x00, -0xf8,0x00,0x02,0x00,0x9a,0x01,0x00,0x00,0xf9,0x00,0x02,0x00, -0xc5,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0xc5,0x01,0x00,0x00, -0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0xde,0x02,0x00,0x00, -0x3e,0x00,0x00,0x00,0x9a,0x01,0x00,0x00,0xf1,0x01,0x00,0x00, -0xc8,0x01,0x00,0x00,0xb0,0x00,0x05,0x00,0xb8,0x00,0x00,0x00, -0xcb,0x01,0x00,0x00,0xde,0x02,0x00,0x00,0xb5,0x00,0x00,0x00, -0xf6,0x00,0x04,0x00,0xc7,0x01,0x00,0x00,0xc8,0x01,0x00,0x00, -0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0xcb,0x01,0x00,0x00, -0xc6,0x01,0x00,0x00,0xc7,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, -0xc6,0x01,0x00,0x00,0xf9,0x00,0x02,0x00,0xcd,0x01,0x00,0x00, -0xf8,0x00,0x02,0x00,0xcd,0x01,0x00,0x00,0xf5,0x00,0x07,0x00, -0x06,0x00,0x00,0x00,0xec,0x02,0x00,0x00,0x3e,0x00,0x00,0x00, -0xc6,0x01,0x00,0x00,0xef,0x01,0x00,0x00,0xce,0x01,0x00,0x00, -0xb0,0x00,0x05,0x00,0xb8,0x00,0x00,0x00,0xd3,0x01,0x00,0x00, -0xec,0x02,0x00,0x00,0xb2,0x00,0x00,0x00,0xf6,0x00,0x04,0x00, -0xcf,0x01,0x00,0x00,0xce,0x01,0x00,0x00,0x01,0x00,0x00,0x00, -0xfa,0x00,0x04,0x00,0xd3,0x01,0x00,0x00,0xce,0x01,0x00,0x00, -0xcf,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0xce,0x01,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xd9,0x01,0x00,0x00, -0xde,0x02,0x00,0x00,0xb2,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xdb,0x01,0x00,0x00,0xd9,0x01,0x00,0x00, -0xec,0x02,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xdd,0x01,0x00,0x00,0x59,0x00,0x00,0x00,0xaf,0x00,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xe0,0x01,0x00,0x00, -0xde,0x02,0x00,0x00,0xdf,0x01,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xe1,0x01,0x00,0x00,0xdd,0x01,0x00,0x00, -0xe0,0x01,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xe3,0x01,0x00,0x00,0x68,0x00,0x00,0x00,0xb2,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xe4,0x01,0x00,0x00, -0xe1,0x01,0x00,0x00,0xe3,0x01,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xe6,0x01,0x00,0x00,0xe4,0x01,0x00,0x00, -0xec,0x02,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xe8,0x01,0x00,0x00,0xe6,0x01,0x00,0x00,0xe7,0x01,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xea,0x01,0x00,0x00, -0xe8,0x01,0x00,0x00,0xd9,0x02,0x00,0x00,0x41,0x00,0x05,0x00, -0xff,0x00,0x00,0x00,0xeb,0x01,0x00,0x00,0x4b,0x01,0x00,0x00, -0xea,0x01,0x00,0x00,0x3d,0x00,0x04,0x00,0xba,0x00,0x00,0x00, -0xec,0x01,0x00,0x00,0xeb,0x01,0x00,0x00,0x41,0x00,0x05,0x00, -0xc3,0x00,0x00,0x00,0xed,0x01,0x00,0x00,0xd7,0x01,0x00,0x00, -0xdb,0x01,0x00,0x00,0x3e,0x00,0x03,0x00,0xed,0x01,0x00,0x00, -0xec,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xef,0x01,0x00,0x00,0xec,0x02,0x00,0x00,0xc6,0x00,0x00,0x00, -0xf9,0x00,0x02,0x00,0xcd,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, -0xcf,0x01,0x00,0x00,0xf9,0x00,0x02,0x00,0xc8,0x01,0x00,0x00, -0xf8,0x00,0x02,0x00,0xc8,0x01,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xf1,0x01,0x00,0x00,0xde,0x02,0x00,0x00, -0xc6,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0xc5,0x01,0x00,0x00, -0xf8,0x00,0x02,0x00,0xc7,0x01,0x00,0x00,0xf9,0x00,0x02,0x00, -0xf3,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0xf3,0x01,0x00,0x00, -0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0xdf,0x02,0x00,0x00, -0x3e,0x00,0x00,0x00,0xc7,0x01,0x00,0x00,0x35,0x02,0x00,0x00, -0xf6,0x01,0x00,0x00,0xb0,0x00,0x05,0x00,0xb8,0x00,0x00,0x00, -0xf9,0x01,0x00,0x00,0xdf,0x02,0x00,0x00,0xb5,0x00,0x00,0x00, -0xf6,0x00,0x04,0x00,0xf5,0x01,0x00,0x00,0xf6,0x01,0x00,0x00, -0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0xf9,0x01,0x00,0x00, -0xf4,0x01,0x00,0x00,0xf5,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, -0xf4,0x01,0x00,0x00,0xf9,0x00,0x02,0x00,0xfb,0x01,0x00,0x00, -0xf8,0x00,0x02,0x00,0xfb,0x01,0x00,0x00,0xf5,0x00,0x07,0x00, -0x06,0x00,0x00,0x00,0xe3,0x02,0x00,0x00,0x3e,0x00,0x00,0x00, -0xf4,0x01,0x00,0x00,0x33,0x02,0x00,0x00,0xfe,0x01,0x00,0x00, -0xb0,0x00,0x05,0x00,0xb8,0x00,0x00,0x00,0x01,0x02,0x00,0x00, -0xe3,0x02,0x00,0x00,0x60,0x00,0x00,0x00,0xf6,0x00,0x04,0x00, -0xfd,0x01,0x00,0x00,0xfe,0x01,0x00,0x00,0x01,0x00,0x00,0x00, -0xfa,0x00,0x04,0x00,0x01,0x02,0x00,0x00,0xfc,0x01,0x00,0x00, -0xfd,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0xfc,0x01,0x00,0x00, -0xf9,0x00,0x02,0x00,0x03,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, -0x03,0x02,0x00,0x00,0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00, -0xe5,0x02,0x00,0x00,0x3e,0x00,0x00,0x00,0xfc,0x01,0x00,0x00, -0x31,0x02,0x00,0x00,0x06,0x02,0x00,0x00,0xb0,0x00,0x05,0x00, -0xb8,0x00,0x00,0x00,0x09,0x02,0x00,0x00,0xe5,0x02,0x00,0x00, -0xb2,0x00,0x00,0x00,0xf6,0x00,0x04,0x00,0x05,0x02,0x00,0x00, -0x06,0x02,0x00,0x00,0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, -0x09,0x02,0x00,0x00,0x04,0x02,0x00,0x00,0x05,0x02,0x00,0x00, -0xf8,0x00,0x02,0x00,0x04,0x02,0x00,0x00,0xf9,0x00,0x02,0x00, -0x0b,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x0b,0x02,0x00,0x00, -0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0xe7,0x02,0x00,0x00, -0x3e,0x00,0x00,0x00,0x04,0x02,0x00,0x00,0x2f,0x02,0x00,0x00, -0x0c,0x02,0x00,0x00,0xb0,0x00,0x05,0x00,0xb8,0x00,0x00,0x00, -0x11,0x02,0x00,0x00,0xe7,0x02,0x00,0x00,0x62,0x00,0x00,0x00, -0xf6,0x00,0x04,0x00,0x0d,0x02,0x00,0x00,0x0c,0x02,0x00,0x00, -0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0x11,0x02,0x00,0x00, -0x0c,0x02,0x00,0x00,0x0d,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, -0x0c,0x02,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x13,0x02,0x00,0x00,0xdf,0x02,0x00,0x00,0xb2,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x15,0x02,0x00,0x00, -0x13,0x02,0x00,0x00,0xe5,0x02,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x17,0x02,0x00,0x00,0x15,0x02,0x00,0x00, -0x16,0x02,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x19,0x02,0x00,0x00,0xe3,0x02,0x00,0x00,0x62,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x1a,0x02,0x00,0x00, -0x17,0x02,0x00,0x00,0x19,0x02,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x1c,0x02,0x00,0x00,0x1a,0x02,0x00,0x00, -0xe7,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x20,0x02,0x00,0x00,0x19,0x02,0x00,0x00,0xe7,0x02,0x00,0x00, -0x41,0x00,0x05,0x00,0xc3,0x00,0x00,0x00,0x21,0x02,0x00,0x00, -0xaa,0x01,0x00,0x00,0x20,0x02,0x00,0x00,0x3d,0x00,0x04,0x00, -0xba,0x00,0x00,0x00,0x22,0x02,0x00,0x00,0x21,0x02,0x00,0x00, -0x41,0x00,0x05,0x00,0xc3,0x00,0x00,0x00,0x27,0x02,0x00,0x00, -0xd7,0x01,0x00,0x00,0x15,0x02,0x00,0x00,0x3d,0x00,0x04,0x00, -0xba,0x00,0x00,0x00,0x28,0x02,0x00,0x00,0x27,0x02,0x00,0x00, -0x41,0x00,0x05,0x00,0xc3,0x00,0x00,0x00,0x2a,0x02,0x00,0x00, -0xc0,0x00,0x00,0x00,0x1c,0x02,0x00,0x00,0x3d,0x00,0x04,0x00, -0xba,0x00,0x00,0x00,0x2b,0x02,0x00,0x00,0x2a,0x02,0x00,0x00, -0x0c,0x00,0x08,0x00,0xba,0x00,0x00,0x00,0x2c,0x02,0x00,0x00, -0x01,0x00,0x00,0x00,0x32,0x00,0x00,0x00,0x22,0x02,0x00,0x00, -0x28,0x02,0x00,0x00,0x2b,0x02,0x00,0x00,0x3e,0x00,0x03,0x00, -0x2a,0x02,0x00,0x00,0x2c,0x02,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x2f,0x02,0x00,0x00,0xe7,0x02,0x00,0x00, -0xc6,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0x0b,0x02,0x00,0x00, -0xf8,0x00,0x02,0x00,0x0d,0x02,0x00,0x00,0xf9,0x00,0x02,0x00, -0x06,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x06,0x02,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x31,0x02,0x00,0x00, -0xe5,0x02,0x00,0x00,0xc6,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, -0x03,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x05,0x02,0x00,0x00, -0xf9,0x00,0x02,0x00,0xfe,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, -0xfe,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x33,0x02,0x00,0x00,0xe3,0x02,0x00,0x00,0xc6,0x00,0x00,0x00, -0xf9,0x00,0x02,0x00,0xfb,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, -0xfd,0x01,0x00,0x00,0xf9,0x00,0x02,0x00,0xf6,0x01,0x00,0x00, -0xf8,0x00,0x02,0x00,0xf6,0x01,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x35,0x02,0x00,0x00,0xdf,0x02,0x00,0x00, -0xc6,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0xf3,0x01,0x00,0x00, -0xf8,0x00,0x02,0x00,0xf5,0x01,0x00,0x00,0xf9,0x00,0x02,0x00, -0x93,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0x93,0x01,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x37,0x02,0x00,0x00, -0xd9,0x02,0x00,0x00,0xc6,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, -0x90,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0x92,0x01,0x00,0x00, -0xe0,0x00,0x04,0x00,0x0c,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0x88,0x01,0x00,0x00,0xf9,0x00,0x02,0x00,0xcd,0x00,0x00,0x00, -0xf8,0x00,0x02,0x00,0xcd,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x39,0x02,0x00,0x00,0xbf,0x02,0x00,0x00, -0x6c,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0xca,0x00,0x00,0x00, -0xf8,0x00,0x02,0x00,0xcc,0x00,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x3e,0x02,0x00,0x00,0x55,0x00,0x00,0x00, -0x53,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x3f,0x02,0x00,0x00,0x8c,0x00,0x00,0x00,0x3e,0x02,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x44,0x02,0x00,0x00, -0x59,0x00,0x00,0x00,0xaf,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x45,0x02,0x00,0x00,0x9e,0x00,0x00,0x00, -0x44,0x02,0x00,0x00,0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00, -0x4a,0x02,0x00,0x00,0x12,0x00,0x00,0x00,0x49,0x02,0x00,0x00, -0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x4b,0x02,0x00,0x00, -0x4a,0x02,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x4c,0x02,0x00,0x00,0x0f,0x00,0x00,0x00,0x4b,0x02,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x50,0x02,0x00,0x00, -0x47,0x00,0x00,0x00,0x4b,0x02,0x00,0x00,0x41,0x00,0x05,0x00, -0x0d,0x00,0x00,0x00,0x52,0x02,0x00,0x00,0x51,0x02,0x00,0x00, -0x0c,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x53,0x02,0x00,0x00,0x52,0x02,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x54,0x02,0x00,0x00,0x50,0x02,0x00,0x00, -0x53,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x55,0x02,0x00,0x00,0x4c,0x02,0x00,0x00,0x54,0x02,0x00,0x00, -0xf9,0x00,0x02,0x00,0x57,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, -0x57,0x02,0x00,0x00,0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00, -0xc0,0x02,0x00,0x00,0x3e,0x00,0x00,0x00,0xcc,0x00,0x00,0x00, -0xbd,0x02,0x00,0x00,0x5a,0x02,0x00,0x00,0xb0,0x00,0x05,0x00, -0xb8,0x00,0x00,0x00,0x5d,0x02,0x00,0x00,0xc0,0x02,0x00,0x00, -0xb5,0x00,0x00,0x00,0xf6,0x00,0x04,0x00,0x59,0x02,0x00,0x00, -0x5a,0x02,0x00,0x00,0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, -0x5d,0x02,0x00,0x00,0x58,0x02,0x00,0x00,0x59,0x02,0x00,0x00, -0xf8,0x00,0x02,0x00,0x58,0x02,0x00,0x00,0xf9,0x00,0x02,0x00, -0x5f,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x5f,0x02,0x00,0x00, -0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0xc1,0x02,0x00,0x00, -0x3e,0x00,0x00,0x00,0x58,0x02,0x00,0x00,0xbb,0x02,0x00,0x00, -0x62,0x02,0x00,0x00,0xb0,0x00,0x05,0x00,0xb8,0x00,0x00,0x00, -0x65,0x02,0x00,0x00,0xc1,0x02,0x00,0x00,0x60,0x00,0x00,0x00, -0xf6,0x00,0x04,0x00,0x61,0x02,0x00,0x00,0x62,0x02,0x00,0x00, -0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0x65,0x02,0x00,0x00, -0x60,0x02,0x00,0x00,0x61,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, -0x60,0x02,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x69,0x02,0x00,0x00,0xc1,0x02,0x00,0x00,0x61,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x6a,0x02,0x00,0x00, -0x3f,0x02,0x00,0x00,0x69,0x02,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x6c,0x02,0x00,0x00,0x64,0x00,0x00,0x00, -0x62,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x6d,0x02,0x00,0x00,0x6a,0x02,0x00,0x00,0x6c,0x02,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x71,0x02,0x00,0x00, -0xc0,0x02,0x00,0x00,0xdf,0x01,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x72,0x02,0x00,0x00,0x45,0x02,0x00,0x00, -0x71,0x02,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x74,0x02,0x00,0x00,0x68,0x00,0x00,0x00,0xb2,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x75,0x02,0x00,0x00, -0x72,0x02,0x00,0x00,0x74,0x02,0x00,0x00,0xf9,0x00,0x02,0x00, -0x77,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x77,0x02,0x00,0x00, -0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0xc3,0x02,0x00,0x00, -0x3e,0x00,0x00,0x00,0x60,0x02,0x00,0x00,0xb9,0x02,0x00,0x00, -0x7a,0x02,0x00,0x00,0xb0,0x00,0x05,0x00,0xb8,0x00,0x00,0x00, -0x7d,0x02,0x00,0x00,0xc3,0x02,0x00,0x00,0xb2,0x00,0x00,0x00, -0xf6,0x00,0x04,0x00,0x79,0x02,0x00,0x00,0x7a,0x02,0x00,0x00, -0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0x7d,0x02,0x00,0x00, -0x78,0x02,0x00,0x00,0x79,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, -0x78,0x02,0x00,0x00,0xf9,0x00,0x02,0x00,0x7f,0x02,0x00,0x00, -0xf8,0x00,0x02,0x00,0x7f,0x02,0x00,0x00,0xf5,0x00,0x07,0x00, -0x06,0x00,0x00,0x00,0xc5,0x02,0x00,0x00,0x3e,0x00,0x00,0x00, -0x78,0x02,0x00,0x00,0xb7,0x02,0x00,0x00,0x82,0x02,0x00,0x00, -0xb0,0x00,0x05,0x00,0xb8,0x00,0x00,0x00,0x85,0x02,0x00,0x00, -0xc5,0x02,0x00,0x00,0x62,0x00,0x00,0x00,0xf6,0x00,0x04,0x00, -0x81,0x02,0x00,0x00,0x82,0x02,0x00,0x00,0x01,0x00,0x00,0x00, -0xfa,0x00,0x04,0x00,0x85,0x02,0x00,0x00,0x80,0x02,0x00,0x00, -0x81,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x80,0x02,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x88,0x02,0x00,0x00, -0x6d,0x02,0x00,0x00,0xc5,0x02,0x00,0x00,0xb0,0x00,0x05,0x00, -0xb8,0x00,0x00,0x00,0x8b,0x02,0x00,0x00,0x88,0x02,0x00,0x00, -0x36,0x00,0x00,0x00,0xf7,0x00,0x03,0x00,0x8d,0x02,0x00,0x00, -0x00,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0x8b,0x02,0x00,0x00, -0x8c,0x02,0x00,0x00,0x8d,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, -0x8c,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x90,0x02,0x00,0x00,0x75,0x02,0x00,0x00,0xc3,0x02,0x00,0x00, -0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00,0x91,0x02,0x00,0x00, -0x12,0x00,0x00,0x00,0xc6,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x92,0x02,0x00,0x00,0x91,0x02,0x00,0x00, -0xb0,0x00,0x05,0x00,0xb8,0x00,0x00,0x00,0x93,0x02,0x00,0x00, -0x90,0x02,0x00,0x00,0x92,0x02,0x00,0x00,0xf9,0x00,0x02,0x00, -0x8d,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x8d,0x02,0x00,0x00, -0xf5,0x00,0x07,0x00,0xb8,0x00,0x00,0x00,0x94,0x02,0x00,0x00, -0x8b,0x02,0x00,0x00,0x80,0x02,0x00,0x00,0x93,0x02,0x00,0x00, -0x8c,0x02,0x00,0x00,0xf7,0x00,0x03,0x00,0x96,0x02,0x00,0x00, -0x00,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0x94,0x02,0x00,0x00, -0x95,0x02,0x00,0x00,0x96,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, -0x95,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x9e,0x02,0x00,0x00,0x75,0x02,0x00,0x00,0xc3,0x02,0x00,0x00, -0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00,0xa0,0x02,0x00,0x00, -0x12,0x00,0x00,0x00,0x9f,0x02,0x00,0x00,0x3d,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0xa1,0x02,0x00,0x00,0xa0,0x02,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xa2,0x02,0x00,0x00, -0x9e,0x02,0x00,0x00,0xa1,0x02,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xa3,0x02,0x00,0x00,0x55,0x02,0x00,0x00, -0xa2,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xa5,0x02,0x00,0x00,0xa3,0x02,0x00,0x00,0x6d,0x02,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xa7,0x02,0x00,0x00, -0xa5,0x02,0x00,0x00,0xc5,0x02,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xa9,0x02,0x00,0x00,0xc0,0x02,0x00,0x00, -0xb2,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xab,0x02,0x00,0x00,0xa9,0x02,0x00,0x00,0xc3,0x02,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xad,0x02,0x00,0x00, -0xab,0x02,0x00,0x00,0xac,0x02,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xaf,0x02,0x00,0x00,0xc1,0x02,0x00,0x00, -0x62,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xb0,0x02,0x00,0x00,0xad,0x02,0x00,0x00,0xaf,0x02,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xb2,0x02,0x00,0x00, -0xb0,0x02,0x00,0x00,0xc5,0x02,0x00,0x00,0x41,0x00,0x05,0x00, -0xc3,0x00,0x00,0x00,0xb3,0x02,0x00,0x00,0xc0,0x00,0x00,0x00, -0xb2,0x02,0x00,0x00,0x3d,0x00,0x04,0x00,0xba,0x00,0x00,0x00, -0xb4,0x02,0x00,0x00,0xb3,0x02,0x00,0x00,0x41,0x00,0x06,0x00, -0x5b,0x01,0x00,0x00,0xb5,0x02,0x00,0x00,0x9a,0x02,0x00,0x00, -0x34,0x00,0x00,0x00,0xa7,0x02,0x00,0x00,0x3e,0x00,0x03,0x00, -0xb5,0x02,0x00,0x00,0xb4,0x02,0x00,0x00,0xf9,0x00,0x02,0x00, -0x96,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x96,0x02,0x00,0x00, -0xf9,0x00,0x02,0x00,0x82,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, -0x82,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xb7,0x02,0x00,0x00,0xc5,0x02,0x00,0x00,0xc6,0x00,0x00,0x00, -0xf9,0x00,0x02,0x00,0x7f,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, -0x81,0x02,0x00,0x00,0xf9,0x00,0x02,0x00,0x7a,0x02,0x00,0x00, -0xf8,0x00,0x02,0x00,0x7a,0x02,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xb9,0x02,0x00,0x00,0xc3,0x02,0x00,0x00, -0xc6,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0x77,0x02,0x00,0x00, -0xf8,0x00,0x02,0x00,0x79,0x02,0x00,0x00,0xf9,0x00,0x02,0x00, -0x62,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x62,0x02,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xbb,0x02,0x00,0x00, -0xc1,0x02,0x00,0x00,0xc6,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, -0x5f,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x61,0x02,0x00,0x00, -0xf9,0x00,0x02,0x00,0x5a,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, -0x5a,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xbd,0x02,0x00,0x00,0xc0,0x02,0x00,0x00,0xc6,0x00,0x00,0x00, -0xf9,0x00,0x02,0x00,0x57,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, -0x59,0x02,0x00,0x00,0xfd,0x00,0x01,0x00,0x38,0x00,0x01,0x00, - -}; -const uint64_t matmul_f16_f32_aligned_s_fp32_len = 10464; - -unsigned char matmul_f16_f32_l_data[] = { -0x03,0x02,0x23,0x07,0x00,0x05,0x01,0x00,0x0b,0x00,0x0d,0x00, -0xcd,0x02,0x00,0x00,0x00,0x00,0x00,0x00,0x11,0x00,0x02,0x00, -0x01,0x00,0x00,0x00,0x11,0x00,0x02,0x00,0x09,0x00,0x00,0x00, -0x11,0x00,0x02,0x00,0x51,0x11,0x00,0x00,0x0b,0x00,0x06,0x00, -0x01,0x00,0x00,0x00,0x47,0x4c,0x53,0x4c,0x2e,0x73,0x74,0x64, -0x2e,0x34,0x35,0x30,0x00,0x00,0x00,0x00,0x0e,0x00,0x03,0x00, -0x00,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x0f,0x00,0x0f,0x00, -0x05,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x6d,0x61,0x69,0x6e, -0x00,0x00,0x00,0x00,0x0b,0x00,0x00,0x00,0x12,0x00,0x00,0x00, -0x3d,0x00,0x00,0x00,0x4c,0x00,0x00,0x00,0xf1,0x00,0x00,0x00, -0xfc,0x00,0x00,0x00,0x3c,0x01,0x00,0x00,0x47,0x01,0x00,0x00, -0x2e,0x02,0x00,0x00,0x77,0x02,0x00,0x00,0x10,0x00,0x06,0x00, -0x04,0x00,0x00,0x00,0x11,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x0b,0x00,0x00,0x00,0x0b,0x00,0x00,0x00,0x1c,0x00,0x00,0x00, -0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0x10,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x04,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00, -0x02,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x08,0x00,0x00,0x00, -0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00,0x03,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0x10,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x10,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00, -0x05,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x14,0x00,0x00,0x00, -0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x18,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0x10,0x00,0x00,0x00,0x07,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x1c,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00, -0x08,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x20,0x00,0x00,0x00, -0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00,0x09,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x24,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0x10,0x00,0x00,0x00,0x0a,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x28,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00, -0x0b,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x2c,0x00,0x00,0x00, -0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x30,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0x10,0x00,0x00,0x00,0x0d,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x34,0x00,0x00,0x00,0x47,0x00,0x03,0x00,0x10,0x00,0x00,0x00, -0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x37,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x3d,0x00,0x00,0x00,0x0b,0x00,0x00,0x00,0x1a,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x4c,0x00,0x00,0x00,0x0b,0x00,0x00,0x00, -0x1b,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x4f,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x09,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x53,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x04,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x60,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x62,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x07,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x6c,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x03,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x9c,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0xae,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x05,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0xb1,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x08,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0xf9,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x02,0x00,0x00,0x00,0x48,0x00,0x04,0x00,0xfa,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x18,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0xfa,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x47,0x00,0x03,0x00,0xfa,0x00,0x00,0x00, -0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0xfc,0x00,0x00,0x00, -0x22,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0xfc,0x00,0x00,0x00,0x21,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x16,0x01,0x00,0x00,0x01,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x17,0x01,0x00,0x00, -0x0b,0x00,0x00,0x00,0x19,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x44,0x01,0x00,0x00,0x06,0x00,0x00,0x00,0x04,0x00,0x00,0x00, -0x48,0x00,0x04,0x00,0x45,0x01,0x00,0x00,0x00,0x00,0x00,0x00, -0x18,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x45,0x01,0x00,0x00, -0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x47,0x00,0x03,0x00,0x45,0x01,0x00,0x00,0x02,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x47,0x01,0x00,0x00,0x22,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x47,0x01,0x00,0x00, -0x21,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x2e,0x02,0x00,0x00,0x0b,0x00,0x00,0x00,0x18,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x74,0x02,0x00,0x00,0x06,0x00,0x00,0x00, -0x04,0x00,0x00,0x00,0x48,0x00,0x04,0x00,0x75,0x02,0x00,0x00, -0x00,0x00,0x00,0x00,0x19,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0x75,0x02,0x00,0x00,0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x47,0x00,0x03,0x00,0x75,0x02,0x00,0x00, -0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x77,0x02,0x00,0x00, -0x22,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x77,0x02,0x00,0x00,0x21,0x00,0x00,0x00,0x02,0x00,0x00,0x00, -0x13,0x00,0x02,0x00,0x02,0x00,0x00,0x00,0x21,0x00,0x03,0x00, -0x03,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x15,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x20,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x17,0x00,0x04,0x00,0x09,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x03,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x0a,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x09,0x00,0x00,0x00,0x3b,0x00,0x04,0x00, -0x0a,0x00,0x00,0x00,0x0b,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0x02,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x0d,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x1e,0x00,0x10,0x00, -0x10,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x20,0x00,0x04,0x00,0x11,0x00,0x00,0x00,0x09,0x00,0x00,0x00, -0x10,0x00,0x00,0x00,0x3b,0x00,0x04,0x00,0x11,0x00,0x00,0x00, -0x12,0x00,0x00,0x00,0x09,0x00,0x00,0x00,0x15,0x00,0x04,0x00, -0x13,0x00,0x00,0x00,0x20,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00,0x14,0x00,0x00,0x00, -0x08,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x15,0x00,0x00,0x00, -0x09,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x13,0x00,0x00,0x00,0x21,0x00,0x00,0x00,0x0a,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00,0x27,0x00,0x00,0x00, -0x09,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00, -0x2d,0x00,0x00,0x00,0x07,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x13,0x00,0x00,0x00,0x34,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x32,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x37,0x00,0x00,0x00, -0x40,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x39,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x3b,0x00,0x04,0x00, -0x0a,0x00,0x00,0x00,0x3d,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x3e,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x3b,0x00,0x04,0x00,0x0a,0x00,0x00,0x00, -0x4c,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x32,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x4f,0x00,0x00,0x00,0x20,0x00,0x00,0x00, -0x32,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x53,0x00,0x00,0x00, -0x20,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, -0x54,0x00,0x00,0x00,0x86,0x00,0x00,0x00,0x37,0x00,0x00,0x00, -0x53,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, -0x58,0x00,0x00,0x00,0x86,0x00,0x00,0x00,0x37,0x00,0x00,0x00, -0x53,0x00,0x00,0x00,0x32,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x60,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0x61,0x00,0x00,0x00,0x86,0x00,0x00,0x00, -0x53,0x00,0x00,0x00,0x60,0x00,0x00,0x00,0x32,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x62,0x00,0x00,0x00,0x04,0x00,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x63,0x00,0x00,0x00, -0x86,0x00,0x00,0x00,0x61,0x00,0x00,0x00,0x62,0x00,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x67,0x00,0x00,0x00, -0x86,0x00,0x00,0x00,0x61,0x00,0x00,0x00,0x62,0x00,0x00,0x00, -0x32,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x6c,0x00,0x00,0x00, -0x10,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, -0x6d,0x00,0x00,0x00,0x86,0x00,0x00,0x00,0x6c,0x00,0x00,0x00, -0x39,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, -0x72,0x00,0x00,0x00,0x86,0x00,0x00,0x00,0x6c,0x00,0x00,0x00, -0x39,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00, -0x76,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x13,0x00,0x00,0x00,0x7b,0x00,0x00,0x00,0x02,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00,0x86,0x00,0x00,0x00, -0x0b,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00, -0x8c,0x00,0x00,0x00,0x03,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x13,0x00,0x00,0x00,0x97,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0x32,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x9c,0x00,0x00,0x00, -0x40,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00, -0x9e,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0xad,0x00,0x00,0x00,0x84,0x00,0x00,0x00, -0x60,0x00,0x00,0x00,0x62,0x00,0x00,0x00,0x32,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0xae,0x00,0x00,0x00,0x20,0x00,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0xaf,0x00,0x00,0x00, -0x84,0x00,0x00,0x00,0x53,0x00,0x00,0x00,0xae,0x00,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0xb0,0x00,0x00,0x00, -0x84,0x00,0x00,0x00,0x4f,0x00,0x00,0x00,0x62,0x00,0x00,0x00, -0x32,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0xb1,0x00,0x00,0x00, -0x02,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, -0xb2,0x00,0x00,0x00,0x84,0x00,0x00,0x00,0xb0,0x00,0x00,0x00, -0xb1,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, -0xb3,0x00,0x00,0x00,0x84,0x00,0x00,0x00,0xb2,0x00,0x00,0x00, -0x60,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, -0xb4,0x00,0x00,0x00,0x86,0x00,0x00,0x00,0xaf,0x00,0x00,0x00, -0xb3,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, -0xb5,0x00,0x00,0x00,0x84,0x00,0x00,0x00,0xad,0x00,0x00,0x00, -0xb4,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, -0xb6,0x00,0x00,0x00,0x84,0x00,0x00,0x00,0xb5,0x00,0x00,0x00, -0xb1,0x00,0x00,0x00,0x14,0x00,0x02,0x00,0xb7,0x00,0x00,0x00, -0x16,0x00,0x03,0x00,0xb9,0x00,0x00,0x00,0x20,0x00,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0xba,0x00,0x00,0x00, -0x84,0x00,0x00,0x00,0x60,0x00,0x00,0x00,0x62,0x00,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0xbb,0x00,0x00,0x00, -0x84,0x00,0x00,0x00,0xba,0x00,0x00,0x00,0xb4,0x00,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0xbc,0x00,0x00,0x00, -0x84,0x00,0x00,0x00,0xbb,0x00,0x00,0x00,0xb1,0x00,0x00,0x00, -0x1c,0x00,0x04,0x00,0xbd,0x00,0x00,0x00,0xb9,0x00,0x00,0x00, -0xbc,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0xbe,0x00,0x00,0x00, -0x07,0x00,0x00,0x00,0xbd,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0xb9,0x00,0x00,0x00,0xc1,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x20,0x00,0x04,0x00,0xc2,0x00,0x00,0x00,0x07,0x00,0x00,0x00, -0xb9,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00, -0xc5,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x16,0x00,0x03,0x00, -0xec,0x00,0x00,0x00,0x10,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0xed,0x00,0x00,0x00,0x80,0x00,0x00,0x00, -0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0xee,0x00,0x00,0x00,0x84,0x00,0x00,0x00, -0x37,0x00,0x00,0x00,0xed,0x00,0x00,0x00,0x1c,0x00,0x04,0x00, -0xef,0x00,0x00,0x00,0xec,0x00,0x00,0x00,0xee,0x00,0x00,0x00, -0x20,0x00,0x04,0x00,0xf0,0x00,0x00,0x00,0x04,0x00,0x00,0x00, -0xef,0x00,0x00,0x00,0x3b,0x00,0x04,0x00,0xf0,0x00,0x00,0x00, -0xf1,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0xf5,0x00,0x00,0x00,0x80,0x00,0x00,0x00, -0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x1d,0x00,0x03,0x00, -0xf9,0x00,0x00,0x00,0xec,0x00,0x00,0x00,0x1e,0x00,0x03,0x00, -0xfa,0x00,0x00,0x00,0xf9,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0xfb,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0xfa,0x00,0x00,0x00, -0x3b,0x00,0x04,0x00,0xfb,0x00,0x00,0x00,0xfc,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x07,0x01,0x00,0x00, -0x0c,0x00,0x00,0x00,0xec,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0x0a,0x01,0x00,0x00,0x04,0x00,0x00,0x00,0xec,0x00,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x10,0x01,0x00,0x00, -0x80,0x00,0x00,0x00,0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0xec,0x00,0x00,0x00,0x14,0x01,0x00,0x00, -0x00,0x00,0x00,0x00,0x32,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x16,0x01,0x00,0x00,0x01,0x00,0x00,0x00,0x33,0x00,0x06,0x00, -0x09,0x00,0x00,0x00,0x17,0x01,0x00,0x00,0x16,0x01,0x00,0x00, -0x39,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0x18,0x01,0x00,0x00,0x51,0x00,0x00,0x00, -0x17,0x01,0x00,0x00,0x00,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0x19,0x01,0x00,0x00,0x84,0x00,0x00,0x00, -0x18,0x01,0x00,0x00,0x39,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0x1a,0x01,0x00,0x00,0x86,0x00,0x00,0x00, -0x19,0x01,0x00,0x00,0x6c,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0x38,0x01,0x00,0x00,0x80,0x00,0x00,0x00, -0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0x39,0x01,0x00,0x00,0x84,0x00,0x00,0x00, -0x9c,0x00,0x00,0x00,0x38,0x01,0x00,0x00,0x1c,0x00,0x04,0x00, -0x3a,0x01,0x00,0x00,0xec,0x00,0x00,0x00,0x39,0x01,0x00,0x00, -0x20,0x00,0x04,0x00,0x3b,0x01,0x00,0x00,0x04,0x00,0x00,0x00, -0x3a,0x01,0x00,0x00,0x3b,0x00,0x04,0x00,0x3b,0x01,0x00,0x00, -0x3c,0x01,0x00,0x00,0x04,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0x40,0x01,0x00,0x00,0x80,0x00,0x00,0x00, -0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x1d,0x00,0x03,0x00, -0x44,0x01,0x00,0x00,0xb9,0x00,0x00,0x00,0x1e,0x00,0x03,0x00, -0x45,0x01,0x00,0x00,0x44,0x01,0x00,0x00,0x20,0x00,0x04,0x00, -0x46,0x01,0x00,0x00,0x0c,0x00,0x00,0x00,0x45,0x01,0x00,0x00, -0x3b,0x00,0x04,0x00,0x46,0x01,0x00,0x00,0x47,0x01,0x00,0x00, -0x0c,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x52,0x01,0x00,0x00, -0x0c,0x00,0x00,0x00,0xb9,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0x5b,0x01,0x00,0x00,0x80,0x00,0x00,0x00, -0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x62,0x01,0x00,0x00,0x08,0x01,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x63,0x01,0x00,0x00, -0x86,0x00,0x00,0x00,0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x66,0x01,0x00,0x00, -0x86,0x00,0x00,0x00,0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x81,0x01,0x00,0x00, -0x84,0x00,0x00,0x00,0x60,0x00,0x00,0x00,0x62,0x00,0x00,0x00, -0x1c,0x00,0x04,0x00,0x82,0x01,0x00,0x00,0xec,0x00,0x00,0x00, -0x81,0x01,0x00,0x00,0x20,0x00,0x04,0x00,0x83,0x01,0x00,0x00, -0x07,0x00,0x00,0x00,0x82,0x01,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0x93,0x01,0x00,0x00,0x80,0x00,0x00,0x00, -0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0x99,0x01,0x00,0x00,0x07,0x00,0x00,0x00,0xec,0x00,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0xaf,0x01,0x00,0x00, -0x84,0x00,0x00,0x00,0xb4,0x00,0x00,0x00,0xb1,0x00,0x00,0x00, -0x1c,0x00,0x04,0x00,0xb0,0x01,0x00,0x00,0xec,0x00,0x00,0x00, -0xaf,0x01,0x00,0x00,0x20,0x00,0x04,0x00,0xb1,0x01,0x00,0x00, -0x07,0x00,0x00,0x00,0xb0,0x01,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0xba,0x01,0x00,0x00,0x86,0x00,0x00,0x00, -0xae,0x00,0x00,0x00,0xb4,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0xc2,0x01,0x00,0x00,0x80,0x00,0x00,0x00, -0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0xf1,0x01,0x00,0x00,0x84,0x00,0x00,0x00, -0x60,0x00,0x00,0x00,0x62,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x13,0x00,0x00,0x00,0x26,0x02,0x00,0x00,0x0d,0x00,0x00,0x00, -0x3b,0x00,0x04,0x00,0x0a,0x00,0x00,0x00,0x2e,0x02,0x00,0x00, -0x01,0x00,0x00,0x00,0x1d,0x00,0x03,0x00,0x74,0x02,0x00,0x00, -0xb9,0x00,0x00,0x00,0x1e,0x00,0x03,0x00,0x75,0x02,0x00,0x00, -0x74,0x02,0x00,0x00,0x20,0x00,0x04,0x00,0x76,0x02,0x00,0x00, -0x0c,0x00,0x00,0x00,0x75,0x02,0x00,0x00,0x3b,0x00,0x04,0x00, -0x76,0x02,0x00,0x00,0x77,0x02,0x00,0x00,0x0c,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00,0x7c,0x02,0x00,0x00, -0x05,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, -0x89,0x02,0x00,0x00,0x84,0x00,0x00,0x00,0x60,0x00,0x00,0x00, -0x62,0x00,0x00,0x00,0x36,0x00,0x05,0x00,0x02,0x00,0x00,0x00, -0x04,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x03,0x00,0x00,0x00, -0xf8,0x00,0x02,0x00,0x05,0x00,0x00,0x00,0x3b,0x00,0x04,0x00, -0xbe,0x00,0x00,0x00,0xbf,0x00,0x00,0x00,0x07,0x00,0x00,0x00, -0x3b,0x00,0x04,0x00,0x83,0x01,0x00,0x00,0x84,0x01,0x00,0x00, -0x07,0x00,0x00,0x00,0x3b,0x00,0x04,0x00,0xb1,0x01,0x00,0x00, -0xb2,0x01,0x00,0x00,0x07,0x00,0x00,0x00,0x41,0x00,0x05,0x00, -0x0d,0x00,0x00,0x00,0x0e,0x00,0x00,0x00,0x0b,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x0f,0x00,0x00,0x00,0x0e,0x00,0x00,0x00,0x41,0x00,0x05,0x00, -0x15,0x00,0x00,0x00,0x16,0x00,0x00,0x00,0x12,0x00,0x00,0x00, -0x14,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x17,0x00,0x00,0x00,0x16,0x00,0x00,0x00,0x86,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x18,0x00,0x00,0x00,0x0f,0x00,0x00,0x00, -0x17,0x00,0x00,0x00,0x89,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x1e,0x00,0x00,0x00,0x0f,0x00,0x00,0x00,0x17,0x00,0x00,0x00, -0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00,0x22,0x00,0x00,0x00, -0x12,0x00,0x00,0x00,0x21,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x22,0x00,0x00,0x00, -0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x24,0x00,0x00,0x00, -0x18,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x41,0x00,0x05,0x00, -0x15,0x00,0x00,0x00,0x28,0x00,0x00,0x00,0x12,0x00,0x00,0x00, -0x27,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x29,0x00,0x00,0x00,0x28,0x00,0x00,0x00,0x86,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x2a,0x00,0x00,0x00,0x1e,0x00,0x00,0x00, -0x29,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00, -0x2e,0x00,0x00,0x00,0x12,0x00,0x00,0x00,0x2d,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x2f,0x00,0x00,0x00, -0x2e,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x30,0x00,0x00,0x00,0x24,0x00,0x00,0x00,0x2f,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x32,0x00,0x00,0x00, -0x30,0x00,0x00,0x00,0x2a,0x00,0x00,0x00,0x41,0x00,0x05,0x00, -0x15,0x00,0x00,0x00,0x35,0x00,0x00,0x00,0x12,0x00,0x00,0x00, -0x34,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x36,0x00,0x00,0x00,0x35,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x38,0x00,0x00,0x00,0x36,0x00,0x00,0x00, -0x37,0x00,0x00,0x00,0x82,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x3a,0x00,0x00,0x00,0x38,0x00,0x00,0x00,0x39,0x00,0x00,0x00, -0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x3b,0x00,0x00,0x00, -0x3a,0x00,0x00,0x00,0x37,0x00,0x00,0x00,0x41,0x00,0x05,0x00, -0x0d,0x00,0x00,0x00,0x3f,0x00,0x00,0x00,0x3d,0x00,0x00,0x00, -0x3e,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x40,0x00,0x00,0x00,0x3f,0x00,0x00,0x00,0x89,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x42,0x00,0x00,0x00,0x40,0x00,0x00,0x00, -0x3b,0x00,0x00,0x00,0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x47,0x00,0x00,0x00,0x40,0x00,0x00,0x00,0x3b,0x00,0x00,0x00, -0x41,0x00,0x05,0x00,0x0d,0x00,0x00,0x00,0x49,0x00,0x00,0x00, -0x3d,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x4a,0x00,0x00,0x00,0x49,0x00,0x00,0x00, -0x41,0x00,0x05,0x00,0x0d,0x00,0x00,0x00,0x4d,0x00,0x00,0x00, -0x4c,0x00,0x00,0x00,0x3e,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x4e,0x00,0x00,0x00,0x4d,0x00,0x00,0x00, -0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x50,0x00,0x00,0x00, -0x4e,0x00,0x00,0x00,0x4f,0x00,0x00,0x00,0x89,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x55,0x00,0x00,0x00,0x50,0x00,0x00,0x00, -0x54,0x00,0x00,0x00,0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x59,0x00,0x00,0x00,0x50,0x00,0x00,0x00,0x58,0x00,0x00,0x00, -0x89,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x5d,0x00,0x00,0x00, -0x4e,0x00,0x00,0x00,0x4f,0x00,0x00,0x00,0x89,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x64,0x00,0x00,0x00,0x5d,0x00,0x00,0x00, -0x63,0x00,0x00,0x00,0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x68,0x00,0x00,0x00,0x5d,0x00,0x00,0x00,0x67,0x00,0x00,0x00, -0x89,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x6e,0x00,0x00,0x00, -0x4e,0x00,0x00,0x00,0x6d,0x00,0x00,0x00,0x86,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x73,0x00,0x00,0x00,0x4e,0x00,0x00,0x00, -0x72,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00, -0x77,0x00,0x00,0x00,0x12,0x00,0x00,0x00,0x76,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x78,0x00,0x00,0x00, -0x77,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x79,0x00,0x00,0x00,0x47,0x00,0x00,0x00,0x78,0x00,0x00,0x00, -0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00,0x7c,0x00,0x00,0x00, -0x12,0x00,0x00,0x00,0x7b,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x7d,0x00,0x00,0x00,0x7c,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x7f,0x00,0x00,0x00, -0x47,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x82,0x00,0x00,0x00,0x7f,0x00,0x00,0x00, -0x78,0x00,0x00,0x00,0x0c,0x00,0x07,0x00,0x06,0x00,0x00,0x00, -0x83,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x26,0x00,0x00,0x00, -0x7d,0x00,0x00,0x00,0x82,0x00,0x00,0x00,0x41,0x00,0x05,0x00, -0x15,0x00,0x00,0x00,0x87,0x00,0x00,0x00,0x12,0x00,0x00,0x00, -0x86,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x88,0x00,0x00,0x00,0x87,0x00,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x89,0x00,0x00,0x00,0x32,0x00,0x00,0x00, -0x88,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x8b,0x00,0x00,0x00,0x42,0x00,0x00,0x00,0x37,0x00,0x00,0x00, -0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00,0x8d,0x00,0x00,0x00, -0x12,0x00,0x00,0x00,0x8c,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x8e,0x00,0x00,0x00,0x8d,0x00,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x8f,0x00,0x00,0x00, -0x8b,0x00,0x00,0x00,0x8e,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x90,0x00,0x00,0x00,0x89,0x00,0x00,0x00, -0x8f,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x92,0x00,0x00,0x00,0x90,0x00,0x00,0x00,0x79,0x00,0x00,0x00, -0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x93,0x00,0x00,0x00, -0x92,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x41,0x00,0x05,0x00, -0x15,0x00,0x00,0x00,0x98,0x00,0x00,0x00,0x12,0x00,0x00,0x00, -0x97,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x99,0x00,0x00,0x00,0x98,0x00,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x9a,0x00,0x00,0x00,0x0f,0x00,0x00,0x00, -0x99,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x9d,0x00,0x00,0x00,0x4a,0x00,0x00,0x00,0x9c,0x00,0x00,0x00, -0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00,0x9f,0x00,0x00,0x00, -0x12,0x00,0x00,0x00,0x9e,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0xa0,0x00,0x00,0x00,0x9f,0x00,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xa1,0x00,0x00,0x00, -0x9d,0x00,0x00,0x00,0xa0,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xa2,0x00,0x00,0x00,0x9a,0x00,0x00,0x00, -0xa1,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xa4,0x00,0x00,0x00,0xa2,0x00,0x00,0x00,0x79,0x00,0x00,0x00, -0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xa5,0x00,0x00,0x00, -0xa4,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, -0xa7,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0xa7,0x00,0x00,0x00, -0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0x9b,0x02,0x00,0x00, -0x3e,0x00,0x00,0x00,0x05,0x00,0x00,0x00,0xc6,0x00,0x00,0x00, -0xa8,0x00,0x00,0x00,0xb0,0x00,0x05,0x00,0xb7,0x00,0x00,0x00, -0xb8,0x00,0x00,0x00,0x9b,0x02,0x00,0x00,0xb6,0x00,0x00,0x00, -0xf6,0x00,0x04,0x00,0xa9,0x00,0x00,0x00,0xa8,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0xb8,0x00,0x00,0x00, -0xa8,0x00,0x00,0x00,0xa9,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, -0xa8,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0xc2,0x00,0x00,0x00, -0xc3,0x00,0x00,0x00,0xbf,0x00,0x00,0x00,0x9b,0x02,0x00,0x00, -0x3e,0x00,0x03,0x00,0xc3,0x00,0x00,0x00,0xc1,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xc6,0x00,0x00,0x00, -0x9b,0x02,0x00,0x00,0xc5,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, -0xa7,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0xa9,0x00,0x00,0x00, -0xf9,0x00,0x02,0x00,0xc9,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, -0xc9,0x00,0x00,0x00,0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00, -0xb4,0x02,0x00,0x00,0xa5,0x00,0x00,0x00,0xa9,0x00,0x00,0x00, -0x68,0x01,0x00,0x00,0xcc,0x00,0x00,0x00,0xf5,0x00,0x07,0x00, -0x06,0x00,0x00,0x00,0xb0,0x02,0x00,0x00,0x93,0x00,0x00,0x00, -0xa9,0x00,0x00,0x00,0x65,0x01,0x00,0x00,0xcc,0x00,0x00,0x00, -0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0x9c,0x02,0x00,0x00, -0x79,0x00,0x00,0x00,0xa9,0x00,0x00,0x00,0x16,0x02,0x00,0x00, -0xcc,0x00,0x00,0x00,0xb0,0x00,0x05,0x00,0xb7,0x00,0x00,0x00, -0xd0,0x00,0x00,0x00,0x9c,0x02,0x00,0x00,0x83,0x00,0x00,0x00, -0xf6,0x00,0x04,0x00,0xcb,0x00,0x00,0x00,0xcc,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0xd0,0x00,0x00,0x00, -0xca,0x00,0x00,0x00,0xcb,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, -0xca,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0xd2,0x00,0x00,0x00, -0xf8,0x00,0x02,0x00,0xd2,0x00,0x00,0x00,0xf5,0x00,0x07,0x00, -0x06,0x00,0x00,0x00,0xac,0x02,0x00,0x00,0x3e,0x00,0x00,0x00, -0xca,0x00,0x00,0x00,0x1c,0x01,0x00,0x00,0xd5,0x00,0x00,0x00, -0xb0,0x00,0x05,0x00,0xb7,0x00,0x00,0x00,0xd8,0x00,0x00,0x00, -0xac,0x02,0x00,0x00,0x37,0x00,0x00,0x00,0xf6,0x00,0x04,0x00, -0xd4,0x00,0x00,0x00,0xd5,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0xfa,0x00,0x04,0x00,0xd8,0x00,0x00,0x00,0xd3,0x00,0x00,0x00, -0xd4,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0xd3,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xdc,0x00,0x00,0x00, -0x8b,0x00,0x00,0x00,0x73,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xde,0x00,0x00,0x00,0xdc,0x00,0x00,0x00, -0xac,0x02,0x00,0x00,0xb0,0x00,0x05,0x00,0xb7,0x00,0x00,0x00, -0xe1,0x00,0x00,0x00,0xde,0x00,0x00,0x00,0x36,0x00,0x00,0x00, -0xf7,0x00,0x03,0x00,0xe3,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0xfa,0x00,0x04,0x00,0xe1,0x00,0x00,0x00,0xe2,0x00,0x00,0x00, -0xe3,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0xe2,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xe6,0x00,0x00,0x00, -0x9c,0x02,0x00,0x00,0x6e,0x00,0x00,0x00,0xb0,0x00,0x05,0x00, -0xb7,0x00,0x00,0x00,0xe8,0x00,0x00,0x00,0xe6,0x00,0x00,0x00, -0x83,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0xe3,0x00,0x00,0x00, -0xf8,0x00,0x02,0x00,0xe3,0x00,0x00,0x00,0xf5,0x00,0x07,0x00, -0xb7,0x00,0x00,0x00,0xe9,0x00,0x00,0x00,0xe1,0x00,0x00,0x00, -0xd3,0x00,0x00,0x00,0xe8,0x00,0x00,0x00,0xe2,0x00,0x00,0x00, -0xf7,0x00,0x03,0x00,0xeb,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0xfa,0x00,0x04,0x00,0xe9,0x00,0x00,0x00,0xea,0x00,0x00,0x00, -0x0c,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0xea,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xf4,0x00,0x00,0x00, -0x73,0x00,0x00,0x00,0xac,0x02,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xf6,0x00,0x00,0x00,0xf4,0x00,0x00,0x00, -0xf5,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xf8,0x00,0x00,0x00,0xf6,0x00,0x00,0x00,0x6e,0x00,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x03,0x01,0x00,0x00, -0xf4,0x00,0x00,0x00,0x8e,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x04,0x01,0x00,0x00,0xb0,0x02,0x00,0x00, -0x03,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x06,0x01,0x00,0x00,0x04,0x01,0x00,0x00,0x6e,0x00,0x00,0x00, -0x41,0x00,0x06,0x00,0x07,0x01,0x00,0x00,0x08,0x01,0x00,0x00, -0xfc,0x00,0x00,0x00,0x34,0x00,0x00,0x00,0x06,0x01,0x00,0x00, -0x3d,0x00,0x04,0x00,0xec,0x00,0x00,0x00,0x09,0x01,0x00,0x00, -0x08,0x01,0x00,0x00,0x41,0x00,0x05,0x00,0x0a,0x01,0x00,0x00, -0x0b,0x01,0x00,0x00,0xf1,0x00,0x00,0x00,0xf8,0x00,0x00,0x00, -0x3e,0x00,0x03,0x00,0x0b,0x01,0x00,0x00,0x09,0x01,0x00,0x00, -0xf9,0x00,0x02,0x00,0xeb,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, -0x0c,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x0f,0x01,0x00,0x00,0x73,0x00,0x00,0x00,0xac,0x02,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x11,0x01,0x00,0x00, -0x0f,0x01,0x00,0x00,0x10,0x01,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x13,0x01,0x00,0x00,0x11,0x01,0x00,0x00, -0x6e,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x0a,0x01,0x00,0x00, -0x15,0x01,0x00,0x00,0xf1,0x00,0x00,0x00,0x13,0x01,0x00,0x00, -0x3e,0x00,0x03,0x00,0x15,0x01,0x00,0x00,0x14,0x01,0x00,0x00, -0xf9,0x00,0x02,0x00,0xeb,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, -0xeb,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0xd5,0x00,0x00,0x00, -0xf8,0x00,0x02,0x00,0xd5,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x1c,0x01,0x00,0x00,0xac,0x02,0x00,0x00, -0x1a,0x01,0x00,0x00,0xf9,0x00,0x02,0x00,0xd2,0x00,0x00,0x00, -0xf8,0x00,0x02,0x00,0xd4,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, -0x1e,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0x1e,0x01,0x00,0x00, -0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0xad,0x02,0x00,0x00, -0x3e,0x00,0x00,0x00,0xd4,0x00,0x00,0x00,0x61,0x01,0x00,0x00, -0x21,0x01,0x00,0x00,0xb0,0x00,0x05,0x00,0xb7,0x00,0x00,0x00, -0x24,0x01,0x00,0x00,0xad,0x02,0x00,0x00,0x9c,0x00,0x00,0x00, -0xf6,0x00,0x04,0x00,0x20,0x01,0x00,0x00,0x21,0x01,0x00,0x00, -0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0x24,0x01,0x00,0x00, -0x1f,0x01,0x00,0x00,0x20,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, -0x1f,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x28,0x01,0x00,0x00,0x9d,0x00,0x00,0x00,0x73,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x2a,0x01,0x00,0x00, -0x28,0x01,0x00,0x00,0xad,0x02,0x00,0x00,0x41,0x00,0x05,0x00, -0x15,0x00,0x00,0x00,0x2b,0x01,0x00,0x00,0x12,0x00,0x00,0x00, -0xc5,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x2c,0x01,0x00,0x00,0x2b,0x01,0x00,0x00,0xb0,0x00,0x05,0x00, -0xb7,0x00,0x00,0x00,0x2d,0x01,0x00,0x00,0x2a,0x01,0x00,0x00, -0x2c,0x01,0x00,0x00,0xf7,0x00,0x03,0x00,0x2f,0x01,0x00,0x00, -0x00,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0x2d,0x01,0x00,0x00, -0x2e,0x01,0x00,0x00,0x2f,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, -0x2e,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x32,0x01,0x00,0x00,0x9c,0x02,0x00,0x00,0x6e,0x00,0x00,0x00, -0xb0,0x00,0x05,0x00,0xb7,0x00,0x00,0x00,0x34,0x01,0x00,0x00, -0x32,0x01,0x00,0x00,0x83,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, -0x2f,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0x2f,0x01,0x00,0x00, -0xf5,0x00,0x07,0x00,0xb7,0x00,0x00,0x00,0x35,0x01,0x00,0x00, -0x2d,0x01,0x00,0x00,0x1f,0x01,0x00,0x00,0x34,0x01,0x00,0x00, -0x2e,0x01,0x00,0x00,0xf7,0x00,0x03,0x00,0x37,0x01,0x00,0x00, -0x00,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0x35,0x01,0x00,0x00, -0x36,0x01,0x00,0x00,0x57,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, -0x36,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x3f,0x01,0x00,0x00,0x73,0x00,0x00,0x00,0xad,0x02,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x41,0x01,0x00,0x00, -0x3f,0x01,0x00,0x00,0x40,0x01,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x43,0x01,0x00,0x00,0x41,0x01,0x00,0x00, -0x6e,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x4e,0x01,0x00,0x00,0x3f,0x01,0x00,0x00,0xa0,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x4f,0x01,0x00,0x00, -0xb4,0x02,0x00,0x00,0x4e,0x01,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x51,0x01,0x00,0x00,0x4f,0x01,0x00,0x00, -0x6e,0x00,0x00,0x00,0x41,0x00,0x06,0x00,0x52,0x01,0x00,0x00, -0x53,0x01,0x00,0x00,0x47,0x01,0x00,0x00,0x34,0x00,0x00,0x00, -0x51,0x01,0x00,0x00,0x3d,0x00,0x04,0x00,0xb9,0x00,0x00,0x00, -0x54,0x01,0x00,0x00,0x53,0x01,0x00,0x00,0x73,0x00,0x04,0x00, -0xec,0x00,0x00,0x00,0x55,0x01,0x00,0x00,0x54,0x01,0x00,0x00, -0x41,0x00,0x05,0x00,0x0a,0x01,0x00,0x00,0x56,0x01,0x00,0x00, -0x3c,0x01,0x00,0x00,0x43,0x01,0x00,0x00,0x3e,0x00,0x03,0x00, -0x56,0x01,0x00,0x00,0x55,0x01,0x00,0x00,0xf9,0x00,0x02,0x00, -0x37,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0x57,0x01,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x5a,0x01,0x00,0x00, -0x73,0x00,0x00,0x00,0xad,0x02,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x5c,0x01,0x00,0x00,0x5a,0x01,0x00,0x00, -0x5b,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x5e,0x01,0x00,0x00,0x5c,0x01,0x00,0x00,0x6e,0x00,0x00,0x00, -0x41,0x00,0x05,0x00,0x0a,0x01,0x00,0x00,0x5f,0x01,0x00,0x00, -0x3c,0x01,0x00,0x00,0x5e,0x01,0x00,0x00,0x3e,0x00,0x03,0x00, -0x5f,0x01,0x00,0x00,0x14,0x01,0x00,0x00,0xf9,0x00,0x02,0x00, -0x37,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0x37,0x01,0x00,0x00, -0xf9,0x00,0x02,0x00,0x21,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, -0x21,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x61,0x01,0x00,0x00,0xad,0x02,0x00,0x00,0x1a,0x01,0x00,0x00, -0xf9,0x00,0x02,0x00,0x1e,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, -0x20,0x01,0x00,0x00,0xe0,0x00,0x04,0x00,0x0c,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x62,0x01,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x65,0x01,0x00,0x00,0xb0,0x02,0x00,0x00, -0x63,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x68,0x01,0x00,0x00,0xb4,0x02,0x00,0x00,0x66,0x01,0x00,0x00, -0xf9,0x00,0x02,0x00,0x6a,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, -0x6a,0x01,0x00,0x00,0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00, -0xb6,0x02,0x00,0x00,0x3e,0x00,0x00,0x00,0x20,0x01,0x00,0x00, -0x14,0x02,0x00,0x00,0x6d,0x01,0x00,0x00,0xb0,0x00,0x05,0x00, -0xb7,0x00,0x00,0x00,0x70,0x01,0x00,0x00,0xb6,0x02,0x00,0x00, -0x6c,0x00,0x00,0x00,0xf6,0x00,0x04,0x00,0x6c,0x01,0x00,0x00, -0x6d,0x01,0x00,0x00,0x00,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, -0x70,0x01,0x00,0x00,0x6b,0x01,0x00,0x00,0x6c,0x01,0x00,0x00, -0xf8,0x00,0x02,0x00,0x6b,0x01,0x00,0x00,0xf9,0x00,0x02,0x00, -0x72,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0x72,0x01,0x00,0x00, -0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0xba,0x02,0x00,0x00, -0x3e,0x00,0x00,0x00,0x6b,0x01,0x00,0x00,0x9e,0x01,0x00,0x00, -0x75,0x01,0x00,0x00,0xb0,0x00,0x05,0x00,0xb7,0x00,0x00,0x00, -0x78,0x01,0x00,0x00,0xba,0x02,0x00,0x00,0x60,0x00,0x00,0x00, -0xf6,0x00,0x04,0x00,0x74,0x01,0x00,0x00,0x75,0x01,0x00,0x00, -0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0x78,0x01,0x00,0x00, -0x73,0x01,0x00,0x00,0x74,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, -0x73,0x01,0x00,0x00,0xf9,0x00,0x02,0x00,0x7a,0x01,0x00,0x00, -0xf8,0x00,0x02,0x00,0x7a,0x01,0x00,0x00,0xf5,0x00,0x07,0x00, -0x06,0x00,0x00,0x00,0xcc,0x02,0x00,0x00,0x3e,0x00,0x00,0x00, -0x73,0x01,0x00,0x00,0x9c,0x01,0x00,0x00,0x7b,0x01,0x00,0x00, -0xb0,0x00,0x05,0x00,0xb7,0x00,0x00,0x00,0x80,0x01,0x00,0x00, -0xcc,0x02,0x00,0x00,0x62,0x00,0x00,0x00,0xf6,0x00,0x04,0x00, -0x7c,0x01,0x00,0x00,0x7b,0x01,0x00,0x00,0x01,0x00,0x00,0x00, -0xfa,0x00,0x04,0x00,0x80,0x01,0x00,0x00,0x7b,0x01,0x00,0x00, -0x7c,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0x7b,0x01,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x86,0x01,0x00,0x00, -0xba,0x02,0x00,0x00,0x62,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x88,0x01,0x00,0x00,0x86,0x01,0x00,0x00, -0xcc,0x02,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x8a,0x01,0x00,0x00,0x55,0x00,0x00,0x00,0x53,0x00,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x8c,0x01,0x00,0x00, -0xba,0x02,0x00,0x00,0x61,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x8d,0x01,0x00,0x00,0x8a,0x01,0x00,0x00, -0x8c,0x01,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x8f,0x01,0x00,0x00,0x64,0x00,0x00,0x00,0x62,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x90,0x01,0x00,0x00, -0x8d,0x01,0x00,0x00,0x8f,0x01,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x92,0x01,0x00,0x00,0x90,0x01,0x00,0x00, -0xcc,0x02,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x94,0x01,0x00,0x00,0x92,0x01,0x00,0x00,0x93,0x01,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x96,0x01,0x00,0x00, -0x94,0x01,0x00,0x00,0xb6,0x02,0x00,0x00,0x41,0x00,0x05,0x00, -0x0a,0x01,0x00,0x00,0x97,0x01,0x00,0x00,0xf1,0x00,0x00,0x00, -0x96,0x01,0x00,0x00,0x3d,0x00,0x04,0x00,0xec,0x00,0x00,0x00, -0x98,0x01,0x00,0x00,0x97,0x01,0x00,0x00,0x41,0x00,0x05,0x00, -0x99,0x01,0x00,0x00,0x9a,0x01,0x00,0x00,0x84,0x01,0x00,0x00, -0x88,0x01,0x00,0x00,0x3e,0x00,0x03,0x00,0x9a,0x01,0x00,0x00, -0x98,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x9c,0x01,0x00,0x00,0xcc,0x02,0x00,0x00,0xc5,0x00,0x00,0x00, -0xf9,0x00,0x02,0x00,0x7a,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, -0x7c,0x01,0x00,0x00,0xf9,0x00,0x02,0x00,0x75,0x01,0x00,0x00, -0xf8,0x00,0x02,0x00,0x75,0x01,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x9e,0x01,0x00,0x00,0xba,0x02,0x00,0x00, -0xc5,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0x72,0x01,0x00,0x00, -0xf8,0x00,0x02,0x00,0x74,0x01,0x00,0x00,0xf9,0x00,0x02,0x00, -0xa0,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0xa0,0x01,0x00,0x00, -0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0xbb,0x02,0x00,0x00, -0x3e,0x00,0x00,0x00,0x74,0x01,0x00,0x00,0xcc,0x01,0x00,0x00, -0xa3,0x01,0x00,0x00,0xb0,0x00,0x05,0x00,0xb7,0x00,0x00,0x00, -0xa6,0x01,0x00,0x00,0xbb,0x02,0x00,0x00,0xb4,0x00,0x00,0x00, -0xf6,0x00,0x04,0x00,0xa2,0x01,0x00,0x00,0xa3,0x01,0x00,0x00, -0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0xa6,0x01,0x00,0x00, -0xa1,0x01,0x00,0x00,0xa2,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, -0xa1,0x01,0x00,0x00,0xf9,0x00,0x02,0x00,0xa8,0x01,0x00,0x00, -0xf8,0x00,0x02,0x00,0xa8,0x01,0x00,0x00,0xf5,0x00,0x07,0x00, -0x06,0x00,0x00,0x00,0xc9,0x02,0x00,0x00,0x3e,0x00,0x00,0x00, -0xa1,0x01,0x00,0x00,0xca,0x01,0x00,0x00,0xa9,0x01,0x00,0x00, -0xb0,0x00,0x05,0x00,0xb7,0x00,0x00,0x00,0xae,0x01,0x00,0x00, -0xc9,0x02,0x00,0x00,0xb1,0x00,0x00,0x00,0xf6,0x00,0x04,0x00, -0xaa,0x01,0x00,0x00,0xa9,0x01,0x00,0x00,0x01,0x00,0x00,0x00, -0xfa,0x00,0x04,0x00,0xae,0x01,0x00,0x00,0xa9,0x01,0x00,0x00, -0xaa,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0xa9,0x01,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xb4,0x01,0x00,0x00, -0xbb,0x02,0x00,0x00,0xb1,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xb6,0x01,0x00,0x00,0xb4,0x01,0x00,0x00, -0xc9,0x02,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xb8,0x01,0x00,0x00,0x59,0x00,0x00,0x00,0xae,0x00,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xbb,0x01,0x00,0x00, -0xbb,0x02,0x00,0x00,0xba,0x01,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xbc,0x01,0x00,0x00,0xb8,0x01,0x00,0x00, -0xbb,0x01,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xbe,0x01,0x00,0x00,0x68,0x00,0x00,0x00,0xb1,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xbf,0x01,0x00,0x00, -0xbc,0x01,0x00,0x00,0xbe,0x01,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xc1,0x01,0x00,0x00,0xbf,0x01,0x00,0x00, -0xc9,0x02,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xc3,0x01,0x00,0x00,0xc1,0x01,0x00,0x00,0xc2,0x01,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xc5,0x01,0x00,0x00, -0xc3,0x01,0x00,0x00,0xb6,0x02,0x00,0x00,0x41,0x00,0x05,0x00, -0x0a,0x01,0x00,0x00,0xc6,0x01,0x00,0x00,0x3c,0x01,0x00,0x00, -0xc5,0x01,0x00,0x00,0x3d,0x00,0x04,0x00,0xec,0x00,0x00,0x00, -0xc7,0x01,0x00,0x00,0xc6,0x01,0x00,0x00,0x41,0x00,0x05,0x00, -0x99,0x01,0x00,0x00,0xc8,0x01,0x00,0x00,0xb2,0x01,0x00,0x00, -0xb6,0x01,0x00,0x00,0x3e,0x00,0x03,0x00,0xc8,0x01,0x00,0x00, -0xc7,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xca,0x01,0x00,0x00,0xc9,0x02,0x00,0x00,0xc5,0x00,0x00,0x00, -0xf9,0x00,0x02,0x00,0xa8,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, -0xaa,0x01,0x00,0x00,0xf9,0x00,0x02,0x00,0xa3,0x01,0x00,0x00, -0xf8,0x00,0x02,0x00,0xa3,0x01,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xcc,0x01,0x00,0x00,0xbb,0x02,0x00,0x00, -0xc5,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0xa0,0x01,0x00,0x00, -0xf8,0x00,0x02,0x00,0xa2,0x01,0x00,0x00,0xf9,0x00,0x02,0x00, -0xce,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0xce,0x01,0x00,0x00, -0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0xbc,0x02,0x00,0x00, -0x3e,0x00,0x00,0x00,0xa2,0x01,0x00,0x00,0x12,0x02,0x00,0x00, -0xd1,0x01,0x00,0x00,0xb0,0x00,0x05,0x00,0xb7,0x00,0x00,0x00, -0xd4,0x01,0x00,0x00,0xbc,0x02,0x00,0x00,0xb4,0x00,0x00,0x00, -0xf6,0x00,0x04,0x00,0xd0,0x01,0x00,0x00,0xd1,0x01,0x00,0x00, -0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0xd4,0x01,0x00,0x00, -0xcf,0x01,0x00,0x00,0xd0,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, -0xcf,0x01,0x00,0x00,0xf9,0x00,0x02,0x00,0xd6,0x01,0x00,0x00, -0xf8,0x00,0x02,0x00,0xd6,0x01,0x00,0x00,0xf5,0x00,0x07,0x00, -0x06,0x00,0x00,0x00,0xc0,0x02,0x00,0x00,0x3e,0x00,0x00,0x00, -0xcf,0x01,0x00,0x00,0x10,0x02,0x00,0x00,0xd9,0x01,0x00,0x00, -0xb0,0x00,0x05,0x00,0xb7,0x00,0x00,0x00,0xdc,0x01,0x00,0x00, -0xc0,0x02,0x00,0x00,0x60,0x00,0x00,0x00,0xf6,0x00,0x04,0x00, -0xd8,0x01,0x00,0x00,0xd9,0x01,0x00,0x00,0x01,0x00,0x00,0x00, -0xfa,0x00,0x04,0x00,0xdc,0x01,0x00,0x00,0xd7,0x01,0x00,0x00, -0xd8,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0xd7,0x01,0x00,0x00, -0xf9,0x00,0x02,0x00,0xde,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, -0xde,0x01,0x00,0x00,0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00, -0xc2,0x02,0x00,0x00,0x3e,0x00,0x00,0x00,0xd7,0x01,0x00,0x00, -0x0e,0x02,0x00,0x00,0xe1,0x01,0x00,0x00,0xb0,0x00,0x05,0x00, -0xb7,0x00,0x00,0x00,0xe4,0x01,0x00,0x00,0xc2,0x02,0x00,0x00, -0xb1,0x00,0x00,0x00,0xf6,0x00,0x04,0x00,0xe0,0x01,0x00,0x00, -0xe1,0x01,0x00,0x00,0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, -0xe4,0x01,0x00,0x00,0xdf,0x01,0x00,0x00,0xe0,0x01,0x00,0x00, -0xf8,0x00,0x02,0x00,0xdf,0x01,0x00,0x00,0xf9,0x00,0x02,0x00, -0xe6,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0xe6,0x01,0x00,0x00, -0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0xc4,0x02,0x00,0x00, -0x3e,0x00,0x00,0x00,0xdf,0x01,0x00,0x00,0x0c,0x02,0x00,0x00, -0xe7,0x01,0x00,0x00,0xb0,0x00,0x05,0x00,0xb7,0x00,0x00,0x00, -0xec,0x01,0x00,0x00,0xc4,0x02,0x00,0x00,0x62,0x00,0x00,0x00, -0xf6,0x00,0x04,0x00,0xe8,0x01,0x00,0x00,0xe7,0x01,0x00,0x00, -0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0xec,0x01,0x00,0x00, -0xe7,0x01,0x00,0x00,0xe8,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, -0xe7,0x01,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xee,0x01,0x00,0x00,0xbc,0x02,0x00,0x00,0xb1,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xf0,0x01,0x00,0x00, -0xee,0x01,0x00,0x00,0xc2,0x02,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xf2,0x01,0x00,0x00,0xf0,0x01,0x00,0x00, -0xf1,0x01,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xf4,0x01,0x00,0x00,0xc0,0x02,0x00,0x00,0x62,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xf5,0x01,0x00,0x00, -0xf2,0x01,0x00,0x00,0xf4,0x01,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xf7,0x01,0x00,0x00,0xf5,0x01,0x00,0x00, -0xc4,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xfb,0x01,0x00,0x00,0xf4,0x01,0x00,0x00,0xc4,0x02,0x00,0x00, -0x41,0x00,0x05,0x00,0x99,0x01,0x00,0x00,0xfc,0x01,0x00,0x00, -0x84,0x01,0x00,0x00,0xfb,0x01,0x00,0x00,0x3d,0x00,0x04,0x00, -0xec,0x00,0x00,0x00,0xfd,0x01,0x00,0x00,0xfc,0x01,0x00,0x00, -0x73,0x00,0x04,0x00,0xb9,0x00,0x00,0x00,0xfe,0x01,0x00,0x00, -0xfd,0x01,0x00,0x00,0x41,0x00,0x05,0x00,0x99,0x01,0x00,0x00, -0x03,0x02,0x00,0x00,0xb2,0x01,0x00,0x00,0xf0,0x01,0x00,0x00, -0x3d,0x00,0x04,0x00,0xec,0x00,0x00,0x00,0x04,0x02,0x00,0x00, -0x03,0x02,0x00,0x00,0x73,0x00,0x04,0x00,0xb9,0x00,0x00,0x00, -0x05,0x02,0x00,0x00,0x04,0x02,0x00,0x00,0x41,0x00,0x05,0x00, -0xc2,0x00,0x00,0x00,0x07,0x02,0x00,0x00,0xbf,0x00,0x00,0x00, -0xf7,0x01,0x00,0x00,0x3d,0x00,0x04,0x00,0xb9,0x00,0x00,0x00, -0x08,0x02,0x00,0x00,0x07,0x02,0x00,0x00,0x0c,0x00,0x08,0x00, -0xb9,0x00,0x00,0x00,0x09,0x02,0x00,0x00,0x01,0x00,0x00,0x00, -0x32,0x00,0x00,0x00,0xfe,0x01,0x00,0x00,0x05,0x02,0x00,0x00, -0x08,0x02,0x00,0x00,0x3e,0x00,0x03,0x00,0x07,0x02,0x00,0x00, -0x09,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x0c,0x02,0x00,0x00,0xc4,0x02,0x00,0x00,0xc5,0x00,0x00,0x00, -0xf9,0x00,0x02,0x00,0xe6,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, -0xe8,0x01,0x00,0x00,0xf9,0x00,0x02,0x00,0xe1,0x01,0x00,0x00, -0xf8,0x00,0x02,0x00,0xe1,0x01,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x0e,0x02,0x00,0x00,0xc2,0x02,0x00,0x00, -0xc5,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0xde,0x01,0x00,0x00, -0xf8,0x00,0x02,0x00,0xe0,0x01,0x00,0x00,0xf9,0x00,0x02,0x00, -0xd9,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0xd9,0x01,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x10,0x02,0x00,0x00, -0xc0,0x02,0x00,0x00,0xc5,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, -0xd6,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0xd8,0x01,0x00,0x00, -0xf9,0x00,0x02,0x00,0xd1,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, -0xd1,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x12,0x02,0x00,0x00,0xbc,0x02,0x00,0x00,0xc5,0x00,0x00,0x00, -0xf9,0x00,0x02,0x00,0xce,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, -0xd0,0x01,0x00,0x00,0xf9,0x00,0x02,0x00,0x6d,0x01,0x00,0x00, -0xf8,0x00,0x02,0x00,0x6d,0x01,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x14,0x02,0x00,0x00,0xb6,0x02,0x00,0x00, -0xc5,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0x6a,0x01,0x00,0x00, -0xf8,0x00,0x02,0x00,0x6c,0x01,0x00,0x00,0xe0,0x00,0x04,0x00, -0x0c,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x62,0x01,0x00,0x00, -0xf9,0x00,0x02,0x00,0xcc,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, -0xcc,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x16,0x02,0x00,0x00,0x9c,0x02,0x00,0x00,0x6c,0x00,0x00,0x00, -0xf9,0x00,0x02,0x00,0xc9,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, -0xcb,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x1b,0x02,0x00,0x00,0x55,0x00,0x00,0x00,0x53,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x1c,0x02,0x00,0x00, -0x8b,0x00,0x00,0x00,0x1b,0x02,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x21,0x02,0x00,0x00,0x59,0x00,0x00,0x00, -0xae,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x22,0x02,0x00,0x00,0x9d,0x00,0x00,0x00,0x21,0x02,0x00,0x00, -0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00,0x27,0x02,0x00,0x00, -0x12,0x00,0x00,0x00,0x26,0x02,0x00,0x00,0x3d,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x28,0x02,0x00,0x00,0x27,0x02,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x29,0x02,0x00,0x00, -0x0f,0x00,0x00,0x00,0x28,0x02,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x2d,0x02,0x00,0x00,0x47,0x00,0x00,0x00, -0x28,0x02,0x00,0x00,0x41,0x00,0x05,0x00,0x0d,0x00,0x00,0x00, -0x2f,0x02,0x00,0x00,0x2e,0x02,0x00,0x00,0x0c,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x30,0x02,0x00,0x00, -0x2f,0x02,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x31,0x02,0x00,0x00,0x2d,0x02,0x00,0x00,0x30,0x02,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x32,0x02,0x00,0x00, -0x29,0x02,0x00,0x00,0x31,0x02,0x00,0x00,0xf9,0x00,0x02,0x00, -0x34,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x34,0x02,0x00,0x00, -0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0x9d,0x02,0x00,0x00, -0x3e,0x00,0x00,0x00,0xcb,0x00,0x00,0x00,0x9a,0x02,0x00,0x00, -0x37,0x02,0x00,0x00,0xb0,0x00,0x05,0x00,0xb7,0x00,0x00,0x00, -0x3a,0x02,0x00,0x00,0x9d,0x02,0x00,0x00,0xb4,0x00,0x00,0x00, -0xf6,0x00,0x04,0x00,0x36,0x02,0x00,0x00,0x37,0x02,0x00,0x00, -0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0x3a,0x02,0x00,0x00, -0x35,0x02,0x00,0x00,0x36,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, -0x35,0x02,0x00,0x00,0xf9,0x00,0x02,0x00,0x3c,0x02,0x00,0x00, -0xf8,0x00,0x02,0x00,0x3c,0x02,0x00,0x00,0xf5,0x00,0x07,0x00, -0x06,0x00,0x00,0x00,0x9e,0x02,0x00,0x00,0x3e,0x00,0x00,0x00, -0x35,0x02,0x00,0x00,0x98,0x02,0x00,0x00,0x3f,0x02,0x00,0x00, -0xb0,0x00,0x05,0x00,0xb7,0x00,0x00,0x00,0x42,0x02,0x00,0x00, -0x9e,0x02,0x00,0x00,0x60,0x00,0x00,0x00,0xf6,0x00,0x04,0x00, -0x3e,0x02,0x00,0x00,0x3f,0x02,0x00,0x00,0x01,0x00,0x00,0x00, -0xfa,0x00,0x04,0x00,0x42,0x02,0x00,0x00,0x3d,0x02,0x00,0x00, -0x3e,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x3d,0x02,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x46,0x02,0x00,0x00, -0x9e,0x02,0x00,0x00,0x61,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x47,0x02,0x00,0x00,0x1c,0x02,0x00,0x00, -0x46,0x02,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x49,0x02,0x00,0x00,0x64,0x00,0x00,0x00,0x62,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x4a,0x02,0x00,0x00, -0x47,0x02,0x00,0x00,0x49,0x02,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x4e,0x02,0x00,0x00,0x9d,0x02,0x00,0x00, -0xba,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x4f,0x02,0x00,0x00,0x22,0x02,0x00,0x00,0x4e,0x02,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x51,0x02,0x00,0x00, -0x68,0x00,0x00,0x00,0xb1,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x52,0x02,0x00,0x00,0x4f,0x02,0x00,0x00, -0x51,0x02,0x00,0x00,0xf9,0x00,0x02,0x00,0x54,0x02,0x00,0x00, -0xf8,0x00,0x02,0x00,0x54,0x02,0x00,0x00,0xf5,0x00,0x07,0x00, -0x06,0x00,0x00,0x00,0xa0,0x02,0x00,0x00,0x3e,0x00,0x00,0x00, -0x3d,0x02,0x00,0x00,0x96,0x02,0x00,0x00,0x57,0x02,0x00,0x00, -0xb0,0x00,0x05,0x00,0xb7,0x00,0x00,0x00,0x5a,0x02,0x00,0x00, -0xa0,0x02,0x00,0x00,0xb1,0x00,0x00,0x00,0xf6,0x00,0x04,0x00, -0x56,0x02,0x00,0x00,0x57,0x02,0x00,0x00,0x01,0x00,0x00,0x00, -0xfa,0x00,0x04,0x00,0x5a,0x02,0x00,0x00,0x55,0x02,0x00,0x00, -0x56,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x55,0x02,0x00,0x00, -0xf9,0x00,0x02,0x00,0x5c,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, -0x5c,0x02,0x00,0x00,0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00, -0xa2,0x02,0x00,0x00,0x3e,0x00,0x00,0x00,0x55,0x02,0x00,0x00, -0x94,0x02,0x00,0x00,0x5f,0x02,0x00,0x00,0xb0,0x00,0x05,0x00, -0xb7,0x00,0x00,0x00,0x62,0x02,0x00,0x00,0xa2,0x02,0x00,0x00, -0x62,0x00,0x00,0x00,0xf6,0x00,0x04,0x00,0x5e,0x02,0x00,0x00, -0x5f,0x02,0x00,0x00,0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, -0x62,0x02,0x00,0x00,0x5d,0x02,0x00,0x00,0x5e,0x02,0x00,0x00, -0xf8,0x00,0x02,0x00,0x5d,0x02,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x65,0x02,0x00,0x00,0x4a,0x02,0x00,0x00, -0xa2,0x02,0x00,0x00,0xb0,0x00,0x05,0x00,0xb7,0x00,0x00,0x00, -0x68,0x02,0x00,0x00,0x65,0x02,0x00,0x00,0x36,0x00,0x00,0x00, -0xf7,0x00,0x03,0x00,0x6a,0x02,0x00,0x00,0x00,0x00,0x00,0x00, -0xfa,0x00,0x04,0x00,0x68,0x02,0x00,0x00,0x69,0x02,0x00,0x00, -0x6a,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x69,0x02,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x6d,0x02,0x00,0x00, -0x52,0x02,0x00,0x00,0xa0,0x02,0x00,0x00,0x41,0x00,0x05,0x00, -0x15,0x00,0x00,0x00,0x6e,0x02,0x00,0x00,0x12,0x00,0x00,0x00, -0xc5,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x6f,0x02,0x00,0x00,0x6e,0x02,0x00,0x00,0xb0,0x00,0x05,0x00, -0xb7,0x00,0x00,0x00,0x70,0x02,0x00,0x00,0x6d,0x02,0x00,0x00, -0x6f,0x02,0x00,0x00,0xf9,0x00,0x02,0x00,0x6a,0x02,0x00,0x00, -0xf8,0x00,0x02,0x00,0x6a,0x02,0x00,0x00,0xf5,0x00,0x07,0x00, -0xb7,0x00,0x00,0x00,0x71,0x02,0x00,0x00,0x68,0x02,0x00,0x00, -0x5d,0x02,0x00,0x00,0x70,0x02,0x00,0x00,0x69,0x02,0x00,0x00, -0xf7,0x00,0x03,0x00,0x73,0x02,0x00,0x00,0x00,0x00,0x00,0x00, -0xfa,0x00,0x04,0x00,0x71,0x02,0x00,0x00,0x72,0x02,0x00,0x00, -0x73,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x72,0x02,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x7b,0x02,0x00,0x00, -0x52,0x02,0x00,0x00,0xa0,0x02,0x00,0x00,0x41,0x00,0x05,0x00, -0x15,0x00,0x00,0x00,0x7d,0x02,0x00,0x00,0x12,0x00,0x00,0x00, -0x7c,0x02,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x7e,0x02,0x00,0x00,0x7d,0x02,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x7f,0x02,0x00,0x00,0x7b,0x02,0x00,0x00, -0x7e,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x80,0x02,0x00,0x00,0x32,0x02,0x00,0x00,0x7f,0x02,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x82,0x02,0x00,0x00, -0x80,0x02,0x00,0x00,0x4a,0x02,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x84,0x02,0x00,0x00,0x82,0x02,0x00,0x00, -0xa2,0x02,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x86,0x02,0x00,0x00,0x9d,0x02,0x00,0x00,0xb1,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x88,0x02,0x00,0x00, -0x86,0x02,0x00,0x00,0xa0,0x02,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x8a,0x02,0x00,0x00,0x88,0x02,0x00,0x00, -0x89,0x02,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x8c,0x02,0x00,0x00,0x9e,0x02,0x00,0x00,0x62,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x8d,0x02,0x00,0x00, -0x8a,0x02,0x00,0x00,0x8c,0x02,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x8f,0x02,0x00,0x00,0x8d,0x02,0x00,0x00, -0xa2,0x02,0x00,0x00,0x41,0x00,0x05,0x00,0xc2,0x00,0x00,0x00, -0x90,0x02,0x00,0x00,0xbf,0x00,0x00,0x00,0x8f,0x02,0x00,0x00, -0x3d,0x00,0x04,0x00,0xb9,0x00,0x00,0x00,0x91,0x02,0x00,0x00, -0x90,0x02,0x00,0x00,0x41,0x00,0x06,0x00,0x52,0x01,0x00,0x00, -0x92,0x02,0x00,0x00,0x77,0x02,0x00,0x00,0x34,0x00,0x00,0x00, -0x84,0x02,0x00,0x00,0x3e,0x00,0x03,0x00,0x92,0x02,0x00,0x00, -0x91,0x02,0x00,0x00,0xf9,0x00,0x02,0x00,0x73,0x02,0x00,0x00, -0xf8,0x00,0x02,0x00,0x73,0x02,0x00,0x00,0xf9,0x00,0x02,0x00, -0x5f,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x5f,0x02,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x94,0x02,0x00,0x00, -0xa2,0x02,0x00,0x00,0xc5,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, -0x5c,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x5e,0x02,0x00,0x00, -0xf9,0x00,0x02,0x00,0x57,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, -0x57,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x96,0x02,0x00,0x00,0xa0,0x02,0x00,0x00,0xc5,0x00,0x00,0x00, -0xf9,0x00,0x02,0x00,0x54,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, -0x56,0x02,0x00,0x00,0xf9,0x00,0x02,0x00,0x3f,0x02,0x00,0x00, -0xf8,0x00,0x02,0x00,0x3f,0x02,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x98,0x02,0x00,0x00,0x9e,0x02,0x00,0x00, -0xc5,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0x3c,0x02,0x00,0x00, -0xf8,0x00,0x02,0x00,0x3e,0x02,0x00,0x00,0xf9,0x00,0x02,0x00, -0x37,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x37,0x02,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x9a,0x02,0x00,0x00, -0x9d,0x02,0x00,0x00,0xc5,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, -0x34,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x36,0x02,0x00,0x00, -0xfd,0x00,0x01,0x00,0x38,0x00,0x01,0x00, -}; -const uint64_t matmul_f16_f32_l_len = 10172; - -unsigned char matmul_f16_f32_l_fp32_data[] = { -0x03,0x02,0x23,0x07,0x00,0x05,0x01,0x00,0x0b,0x00,0x0d,0x00, -0xc9,0x02,0x00,0x00,0x00,0x00,0x00,0x00,0x11,0x00,0x02,0x00, -0x01,0x00,0x00,0x00,0x11,0x00,0x02,0x00,0x51,0x11,0x00,0x00, -0x0b,0x00,0x06,0x00,0x01,0x00,0x00,0x00,0x47,0x4c,0x53,0x4c, -0x2e,0x73,0x74,0x64,0x2e,0x34,0x35,0x30,0x00,0x00,0x00,0x00, -0x0e,0x00,0x03,0x00,0x00,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x0f,0x00,0x0f,0x00,0x05,0x00,0x00,0x00,0x04,0x00,0x00,0x00, -0x6d,0x61,0x69,0x6e,0x00,0x00,0x00,0x00,0x0b,0x00,0x00,0x00, -0x12,0x00,0x00,0x00,0x3d,0x00,0x00,0x00,0x4c,0x00,0x00,0x00, -0xf0,0x00,0x00,0x00,0xfc,0x00,0x00,0x00,0x3c,0x01,0x00,0x00, -0x47,0x01,0x00,0x00,0x2a,0x02,0x00,0x00,0x73,0x02,0x00,0x00, -0x10,0x00,0x06,0x00,0x04,0x00,0x00,0x00,0x11,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x0b,0x00,0x00,0x00,0x0b,0x00,0x00,0x00, -0x1c,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0x10,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x08,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00, -0x03,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00,0x04,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x10,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0x10,0x00,0x00,0x00,0x05,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x14,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x18,0x00,0x00,0x00, -0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00,0x07,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x1c,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0x10,0x00,0x00,0x00,0x08,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x20,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00, -0x09,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x24,0x00,0x00,0x00, -0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00,0x0a,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x28,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0x10,0x00,0x00,0x00,0x0b,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x2c,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x30,0x00,0x00,0x00, -0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00,0x0d,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x34,0x00,0x00,0x00,0x47,0x00,0x03,0x00, -0x10,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x37,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x3d,0x00,0x00,0x00,0x0b,0x00,0x00,0x00, -0x1a,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x4c,0x00,0x00,0x00, -0x0b,0x00,0x00,0x00,0x1b,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x4f,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x09,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x53,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x04,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x60,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x62,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x07,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x6c,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x03,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x9c,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0xae,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x05,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0xb1,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x08,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0xf9,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x48,0x00,0x04,0x00, -0xfa,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x18,0x00,0x00,0x00, -0x48,0x00,0x05,0x00,0xfa,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x03,0x00, -0xfa,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0xfc,0x00,0x00,0x00,0x22,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0xfc,0x00,0x00,0x00,0x21,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x16,0x01,0x00,0x00, -0x01,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x17,0x01,0x00,0x00,0x0b,0x00,0x00,0x00,0x19,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x44,0x01,0x00,0x00,0x06,0x00,0x00,0x00, -0x04,0x00,0x00,0x00,0x48,0x00,0x04,0x00,0x45,0x01,0x00,0x00, -0x00,0x00,0x00,0x00,0x18,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0x45,0x01,0x00,0x00,0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x47,0x00,0x03,0x00,0x45,0x01,0x00,0x00, -0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x47,0x01,0x00,0x00, -0x22,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x47,0x01,0x00,0x00,0x21,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x2a,0x02,0x00,0x00,0x0b,0x00,0x00,0x00, -0x18,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x70,0x02,0x00,0x00, -0x06,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x48,0x00,0x04,0x00, -0x71,0x02,0x00,0x00,0x00,0x00,0x00,0x00,0x19,0x00,0x00,0x00, -0x48,0x00,0x05,0x00,0x71,0x02,0x00,0x00,0x00,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x03,0x00, -0x71,0x02,0x00,0x00,0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x73,0x02,0x00,0x00,0x22,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x73,0x02,0x00,0x00,0x21,0x00,0x00,0x00, -0x02,0x00,0x00,0x00,0x13,0x00,0x02,0x00,0x02,0x00,0x00,0x00, -0x21,0x00,0x03,0x00,0x03,0x00,0x00,0x00,0x02,0x00,0x00,0x00, -0x15,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x20,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x17,0x00,0x04,0x00,0x09,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x03,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0x0a,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x09,0x00,0x00,0x00, -0x3b,0x00,0x04,0x00,0x0a,0x00,0x00,0x00,0x0b,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0x0d,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x1e,0x00,0x10,0x00,0x10,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x11,0x00,0x00,0x00, -0x09,0x00,0x00,0x00,0x10,0x00,0x00,0x00,0x3b,0x00,0x04,0x00, -0x11,0x00,0x00,0x00,0x12,0x00,0x00,0x00,0x09,0x00,0x00,0x00, -0x15,0x00,0x04,0x00,0x13,0x00,0x00,0x00,0x20,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00, -0x14,0x00,0x00,0x00,0x08,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0x15,0x00,0x00,0x00,0x09,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00,0x21,0x00,0x00,0x00, -0x0a,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00, -0x27,0x00,0x00,0x00,0x09,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x13,0x00,0x00,0x00,0x2d,0x00,0x00,0x00,0x07,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00,0x34,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x32,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x37,0x00,0x00,0x00,0x40,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x3b,0x00,0x04,0x00,0x0a,0x00,0x00,0x00,0x3d,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x3e,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x3b,0x00,0x04,0x00, -0x0a,0x00,0x00,0x00,0x4c,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x32,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x4f,0x00,0x00,0x00, -0x20,0x00,0x00,0x00,0x32,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x53,0x00,0x00,0x00,0x20,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0x54,0x00,0x00,0x00,0x86,0x00,0x00,0x00, -0x37,0x00,0x00,0x00,0x53,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0x58,0x00,0x00,0x00,0x86,0x00,0x00,0x00, -0x37,0x00,0x00,0x00,0x53,0x00,0x00,0x00,0x32,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x60,0x00,0x00,0x00,0x02,0x00,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x61,0x00,0x00,0x00, -0x86,0x00,0x00,0x00,0x53,0x00,0x00,0x00,0x60,0x00,0x00,0x00, -0x32,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x62,0x00,0x00,0x00, -0x04,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, -0x63,0x00,0x00,0x00,0x86,0x00,0x00,0x00,0x61,0x00,0x00,0x00, -0x62,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, -0x67,0x00,0x00,0x00,0x86,0x00,0x00,0x00,0x61,0x00,0x00,0x00, -0x62,0x00,0x00,0x00,0x32,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x6c,0x00,0x00,0x00,0x10,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0x6d,0x00,0x00,0x00,0x86,0x00,0x00,0x00, -0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0x72,0x00,0x00,0x00,0x86,0x00,0x00,0x00, -0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x13,0x00,0x00,0x00,0x76,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00,0x7b,0x00,0x00,0x00, -0x02,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00, -0x86,0x00,0x00,0x00,0x0b,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x13,0x00,0x00,0x00,0x8c,0x00,0x00,0x00,0x03,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00,0x97,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x32,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x9c,0x00,0x00,0x00,0x40,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x13,0x00,0x00,0x00,0x9e,0x00,0x00,0x00,0x04,0x00,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0xad,0x00,0x00,0x00, -0x84,0x00,0x00,0x00,0x60,0x00,0x00,0x00,0x62,0x00,0x00,0x00, -0x32,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0xae,0x00,0x00,0x00, -0x20,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, -0xaf,0x00,0x00,0x00,0x84,0x00,0x00,0x00,0x53,0x00,0x00,0x00, -0xae,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, -0xb0,0x00,0x00,0x00,0x84,0x00,0x00,0x00,0x4f,0x00,0x00,0x00, -0x62,0x00,0x00,0x00,0x32,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0xb1,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0xb2,0x00,0x00,0x00,0x84,0x00,0x00,0x00, -0xb0,0x00,0x00,0x00,0xb1,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0xb3,0x00,0x00,0x00,0x84,0x00,0x00,0x00, -0xb2,0x00,0x00,0x00,0x60,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0xb4,0x00,0x00,0x00,0x86,0x00,0x00,0x00, -0xaf,0x00,0x00,0x00,0xb3,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0xb5,0x00,0x00,0x00,0x84,0x00,0x00,0x00, -0xad,0x00,0x00,0x00,0xb4,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0xb6,0x00,0x00,0x00,0x84,0x00,0x00,0x00, -0xb5,0x00,0x00,0x00,0xb1,0x00,0x00,0x00,0x14,0x00,0x02,0x00, -0xb7,0x00,0x00,0x00,0x16,0x00,0x03,0x00,0xb9,0x00,0x00,0x00, -0x20,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, -0xba,0x00,0x00,0x00,0x84,0x00,0x00,0x00,0x60,0x00,0x00,0x00, -0x62,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, -0xbb,0x00,0x00,0x00,0x84,0x00,0x00,0x00,0xba,0x00,0x00,0x00, -0xb4,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, -0xbc,0x00,0x00,0x00,0x84,0x00,0x00,0x00,0xbb,0x00,0x00,0x00, -0xb1,0x00,0x00,0x00,0x1c,0x00,0x04,0x00,0xbd,0x00,0x00,0x00, -0xb9,0x00,0x00,0x00,0xbc,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0xbe,0x00,0x00,0x00,0x07,0x00,0x00,0x00,0xbd,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0xb9,0x00,0x00,0x00,0xc1,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0xc2,0x00,0x00,0x00, -0x07,0x00,0x00,0x00,0xb9,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x13,0x00,0x00,0x00,0xc5,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0xec,0x00,0x00,0x00, -0x80,0x00,0x00,0x00,0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0xed,0x00,0x00,0x00, -0x84,0x00,0x00,0x00,0x37,0x00,0x00,0x00,0xec,0x00,0x00,0x00, -0x1c,0x00,0x04,0x00,0xee,0x00,0x00,0x00,0xb9,0x00,0x00,0x00, -0xed,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0xef,0x00,0x00,0x00, -0x04,0x00,0x00,0x00,0xee,0x00,0x00,0x00,0x3b,0x00,0x04,0x00, -0xef,0x00,0x00,0x00,0xf0,0x00,0x00,0x00,0x04,0x00,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0xf4,0x00,0x00,0x00, -0x80,0x00,0x00,0x00,0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00, -0x16,0x00,0x03,0x00,0xf8,0x00,0x00,0x00,0x10,0x00,0x00,0x00, -0x1d,0x00,0x03,0x00,0xf9,0x00,0x00,0x00,0xf8,0x00,0x00,0x00, -0x1e,0x00,0x03,0x00,0xfa,0x00,0x00,0x00,0xf9,0x00,0x00,0x00, -0x20,0x00,0x04,0x00,0xfb,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0xfa,0x00,0x00,0x00,0x3b,0x00,0x04,0x00,0xfb,0x00,0x00,0x00, -0xfc,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0x07,0x01,0x00,0x00,0x0c,0x00,0x00,0x00,0xf8,0x00,0x00,0x00, -0x20,0x00,0x04,0x00,0x0b,0x01,0x00,0x00,0x04,0x00,0x00,0x00, -0xb9,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, -0x11,0x01,0x00,0x00,0x80,0x00,0x00,0x00,0x6c,0x00,0x00,0x00, -0x39,0x00,0x00,0x00,0x32,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x16,0x01,0x00,0x00,0x01,0x00,0x00,0x00,0x33,0x00,0x06,0x00, -0x09,0x00,0x00,0x00,0x17,0x01,0x00,0x00,0x16,0x01,0x00,0x00, -0x39,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0x18,0x01,0x00,0x00,0x51,0x00,0x00,0x00, -0x17,0x01,0x00,0x00,0x00,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0x19,0x01,0x00,0x00,0x84,0x00,0x00,0x00, -0x18,0x01,0x00,0x00,0x39,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0x1a,0x01,0x00,0x00,0x86,0x00,0x00,0x00, -0x19,0x01,0x00,0x00,0x6c,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0x38,0x01,0x00,0x00,0x80,0x00,0x00,0x00, -0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0x39,0x01,0x00,0x00,0x84,0x00,0x00,0x00, -0x9c,0x00,0x00,0x00,0x38,0x01,0x00,0x00,0x1c,0x00,0x04,0x00, -0x3a,0x01,0x00,0x00,0xb9,0x00,0x00,0x00,0x39,0x01,0x00,0x00, -0x20,0x00,0x04,0x00,0x3b,0x01,0x00,0x00,0x04,0x00,0x00,0x00, -0x3a,0x01,0x00,0x00,0x3b,0x00,0x04,0x00,0x3b,0x01,0x00,0x00, -0x3c,0x01,0x00,0x00,0x04,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0x40,0x01,0x00,0x00,0x80,0x00,0x00,0x00, -0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x1d,0x00,0x03,0x00, -0x44,0x01,0x00,0x00,0xb9,0x00,0x00,0x00,0x1e,0x00,0x03,0x00, -0x45,0x01,0x00,0x00,0x44,0x01,0x00,0x00,0x20,0x00,0x04,0x00, -0x46,0x01,0x00,0x00,0x0c,0x00,0x00,0x00,0x45,0x01,0x00,0x00, -0x3b,0x00,0x04,0x00,0x46,0x01,0x00,0x00,0x47,0x01,0x00,0x00, -0x0c,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x52,0x01,0x00,0x00, -0x0c,0x00,0x00,0x00,0xb9,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0x5a,0x01,0x00,0x00,0x80,0x00,0x00,0x00, -0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x61,0x01,0x00,0x00,0x08,0x01,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x62,0x01,0x00,0x00, -0x86,0x00,0x00,0x00,0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x65,0x01,0x00,0x00, -0x86,0x00,0x00,0x00,0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x80,0x01,0x00,0x00, -0x84,0x00,0x00,0x00,0x60,0x00,0x00,0x00,0x62,0x00,0x00,0x00, -0x1c,0x00,0x04,0x00,0x81,0x01,0x00,0x00,0xb9,0x00,0x00,0x00, -0x80,0x01,0x00,0x00,0x20,0x00,0x04,0x00,0x82,0x01,0x00,0x00, -0x07,0x00,0x00,0x00,0x81,0x01,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0x92,0x01,0x00,0x00,0x80,0x00,0x00,0x00, -0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0xad,0x01,0x00,0x00,0x84,0x00,0x00,0x00, -0xb4,0x00,0x00,0x00,0xb1,0x00,0x00,0x00,0x1c,0x00,0x04,0x00, -0xae,0x01,0x00,0x00,0xb9,0x00,0x00,0x00,0xad,0x01,0x00,0x00, -0x20,0x00,0x04,0x00,0xaf,0x01,0x00,0x00,0x07,0x00,0x00,0x00, -0xae,0x01,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, -0xb8,0x01,0x00,0x00,0x86,0x00,0x00,0x00,0xae,0x00,0x00,0x00, -0xb4,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, -0xc0,0x01,0x00,0x00,0x80,0x00,0x00,0x00,0x6c,0x00,0x00,0x00, -0x39,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, -0xef,0x01,0x00,0x00,0x84,0x00,0x00,0x00,0x60,0x00,0x00,0x00, -0x62,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00, -0x22,0x02,0x00,0x00,0x0d,0x00,0x00,0x00,0x3b,0x00,0x04,0x00, -0x0a,0x00,0x00,0x00,0x2a,0x02,0x00,0x00,0x01,0x00,0x00,0x00, -0x1d,0x00,0x03,0x00,0x70,0x02,0x00,0x00,0xb9,0x00,0x00,0x00, -0x1e,0x00,0x03,0x00,0x71,0x02,0x00,0x00,0x70,0x02,0x00,0x00, -0x20,0x00,0x04,0x00,0x72,0x02,0x00,0x00,0x0c,0x00,0x00,0x00, -0x71,0x02,0x00,0x00,0x3b,0x00,0x04,0x00,0x72,0x02,0x00,0x00, -0x73,0x02,0x00,0x00,0x0c,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x13,0x00,0x00,0x00,0x78,0x02,0x00,0x00,0x05,0x00,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x85,0x02,0x00,0x00, -0x84,0x00,0x00,0x00,0x60,0x00,0x00,0x00,0x62,0x00,0x00,0x00, -0x36,0x00,0x05,0x00,0x02,0x00,0x00,0x00,0x04,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x03,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, -0x05,0x00,0x00,0x00,0x3b,0x00,0x04,0x00,0xbe,0x00,0x00,0x00, -0xbf,0x00,0x00,0x00,0x07,0x00,0x00,0x00,0x3b,0x00,0x04,0x00, -0x82,0x01,0x00,0x00,0x83,0x01,0x00,0x00,0x07,0x00,0x00,0x00, -0x3b,0x00,0x04,0x00,0xaf,0x01,0x00,0x00,0xb0,0x01,0x00,0x00, -0x07,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x0d,0x00,0x00,0x00, -0x0e,0x00,0x00,0x00,0x0b,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x0f,0x00,0x00,0x00, -0x0e,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00, -0x16,0x00,0x00,0x00,0x12,0x00,0x00,0x00,0x14,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x17,0x00,0x00,0x00, -0x16,0x00,0x00,0x00,0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x18,0x00,0x00,0x00,0x0f,0x00,0x00,0x00,0x17,0x00,0x00,0x00, -0x89,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x1e,0x00,0x00,0x00, -0x0f,0x00,0x00,0x00,0x17,0x00,0x00,0x00,0x41,0x00,0x05,0x00, -0x15,0x00,0x00,0x00,0x22,0x00,0x00,0x00,0x12,0x00,0x00,0x00, -0x21,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x22,0x00,0x00,0x00,0x86,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x24,0x00,0x00,0x00,0x18,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00, -0x28,0x00,0x00,0x00,0x12,0x00,0x00,0x00,0x27,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x29,0x00,0x00,0x00, -0x28,0x00,0x00,0x00,0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x2a,0x00,0x00,0x00,0x1e,0x00,0x00,0x00,0x29,0x00,0x00,0x00, -0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0x12,0x00,0x00,0x00,0x2d,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x2f,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x30,0x00,0x00,0x00, -0x24,0x00,0x00,0x00,0x2f,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x32,0x00,0x00,0x00,0x30,0x00,0x00,0x00, -0x2a,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00, -0x35,0x00,0x00,0x00,0x12,0x00,0x00,0x00,0x34,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x36,0x00,0x00,0x00, -0x35,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x38,0x00,0x00,0x00,0x36,0x00,0x00,0x00,0x37,0x00,0x00,0x00, -0x82,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x3a,0x00,0x00,0x00, -0x38,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x86,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x3b,0x00,0x00,0x00,0x3a,0x00,0x00,0x00, -0x37,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x0d,0x00,0x00,0x00, -0x3f,0x00,0x00,0x00,0x3d,0x00,0x00,0x00,0x3e,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x40,0x00,0x00,0x00, -0x3f,0x00,0x00,0x00,0x89,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x42,0x00,0x00,0x00,0x40,0x00,0x00,0x00,0x3b,0x00,0x00,0x00, -0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x47,0x00,0x00,0x00, -0x40,0x00,0x00,0x00,0x3b,0x00,0x00,0x00,0x41,0x00,0x05,0x00, -0x0d,0x00,0x00,0x00,0x49,0x00,0x00,0x00,0x3d,0x00,0x00,0x00, -0x39,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x4a,0x00,0x00,0x00,0x49,0x00,0x00,0x00,0x41,0x00,0x05,0x00, -0x0d,0x00,0x00,0x00,0x4d,0x00,0x00,0x00,0x4c,0x00,0x00,0x00, -0x3e,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x4e,0x00,0x00,0x00,0x4d,0x00,0x00,0x00,0x86,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x50,0x00,0x00,0x00,0x4e,0x00,0x00,0x00, -0x4f,0x00,0x00,0x00,0x89,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x55,0x00,0x00,0x00,0x50,0x00,0x00,0x00,0x54,0x00,0x00,0x00, -0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x59,0x00,0x00,0x00, -0x50,0x00,0x00,0x00,0x58,0x00,0x00,0x00,0x89,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x5d,0x00,0x00,0x00,0x4e,0x00,0x00,0x00, -0x4f,0x00,0x00,0x00,0x89,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x64,0x00,0x00,0x00,0x5d,0x00,0x00,0x00,0x63,0x00,0x00,0x00, -0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x68,0x00,0x00,0x00, -0x5d,0x00,0x00,0x00,0x67,0x00,0x00,0x00,0x89,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x6e,0x00,0x00,0x00,0x4e,0x00,0x00,0x00, -0x6d,0x00,0x00,0x00,0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x73,0x00,0x00,0x00,0x4e,0x00,0x00,0x00,0x72,0x00,0x00,0x00, -0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00,0x77,0x00,0x00,0x00, -0x12,0x00,0x00,0x00,0x76,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x78,0x00,0x00,0x00,0x77,0x00,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x79,0x00,0x00,0x00, -0x47,0x00,0x00,0x00,0x78,0x00,0x00,0x00,0x41,0x00,0x05,0x00, -0x15,0x00,0x00,0x00,0x7c,0x00,0x00,0x00,0x12,0x00,0x00,0x00, -0x7b,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x7d,0x00,0x00,0x00,0x7c,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x7f,0x00,0x00,0x00,0x47,0x00,0x00,0x00, -0x39,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x82,0x00,0x00,0x00,0x7f,0x00,0x00,0x00,0x78,0x00,0x00,0x00, -0x0c,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0x83,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x26,0x00,0x00,0x00,0x7d,0x00,0x00,0x00, -0x82,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00, -0x87,0x00,0x00,0x00,0x12,0x00,0x00,0x00,0x86,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x88,0x00,0x00,0x00, -0x87,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x89,0x00,0x00,0x00,0x32,0x00,0x00,0x00,0x88,0x00,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x8b,0x00,0x00,0x00, -0x42,0x00,0x00,0x00,0x37,0x00,0x00,0x00,0x41,0x00,0x05,0x00, -0x15,0x00,0x00,0x00,0x8d,0x00,0x00,0x00,0x12,0x00,0x00,0x00, -0x8c,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x8e,0x00,0x00,0x00,0x8d,0x00,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x8f,0x00,0x00,0x00,0x8b,0x00,0x00,0x00, -0x8e,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x90,0x00,0x00,0x00,0x89,0x00,0x00,0x00,0x8f,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x92,0x00,0x00,0x00, -0x90,0x00,0x00,0x00,0x79,0x00,0x00,0x00,0x86,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x93,0x00,0x00,0x00,0x92,0x00,0x00,0x00, -0x39,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00, -0x98,0x00,0x00,0x00,0x12,0x00,0x00,0x00,0x97,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x99,0x00,0x00,0x00, -0x98,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x9a,0x00,0x00,0x00,0x0f,0x00,0x00,0x00,0x99,0x00,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x9d,0x00,0x00,0x00, -0x4a,0x00,0x00,0x00,0x9c,0x00,0x00,0x00,0x41,0x00,0x05,0x00, -0x15,0x00,0x00,0x00,0x9f,0x00,0x00,0x00,0x12,0x00,0x00,0x00, -0x9e,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0xa0,0x00,0x00,0x00,0x9f,0x00,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xa1,0x00,0x00,0x00,0x9d,0x00,0x00,0x00, -0xa0,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xa2,0x00,0x00,0x00,0x9a,0x00,0x00,0x00,0xa1,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xa4,0x00,0x00,0x00, -0xa2,0x00,0x00,0x00,0x79,0x00,0x00,0x00,0x86,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xa5,0x00,0x00,0x00,0xa4,0x00,0x00,0x00, -0x39,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0xa7,0x00,0x00,0x00, -0xf8,0x00,0x02,0x00,0xa7,0x00,0x00,0x00,0xf5,0x00,0x07,0x00, -0x06,0x00,0x00,0x00,0x97,0x02,0x00,0x00,0x3e,0x00,0x00,0x00, -0x05,0x00,0x00,0x00,0xc6,0x00,0x00,0x00,0xa8,0x00,0x00,0x00, -0xb0,0x00,0x05,0x00,0xb7,0x00,0x00,0x00,0xb8,0x00,0x00,0x00, -0x97,0x02,0x00,0x00,0xb6,0x00,0x00,0x00,0xf6,0x00,0x04,0x00, -0xa9,0x00,0x00,0x00,0xa8,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0xfa,0x00,0x04,0x00,0xb8,0x00,0x00,0x00,0xa8,0x00,0x00,0x00, -0xa9,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0xa8,0x00,0x00,0x00, -0x41,0x00,0x05,0x00,0xc2,0x00,0x00,0x00,0xc3,0x00,0x00,0x00, -0xbf,0x00,0x00,0x00,0x97,0x02,0x00,0x00,0x3e,0x00,0x03,0x00, -0xc3,0x00,0x00,0x00,0xc1,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xc6,0x00,0x00,0x00,0x97,0x02,0x00,0x00, -0xc5,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0xa7,0x00,0x00,0x00, -0xf8,0x00,0x02,0x00,0xa9,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, -0xc9,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0xc9,0x00,0x00,0x00, -0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0xb0,0x02,0x00,0x00, -0xa5,0x00,0x00,0x00,0xa9,0x00,0x00,0x00,0x67,0x01,0x00,0x00, -0xcc,0x00,0x00,0x00,0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00, -0xac,0x02,0x00,0x00,0x93,0x00,0x00,0x00,0xa9,0x00,0x00,0x00, -0x64,0x01,0x00,0x00,0xcc,0x00,0x00,0x00,0xf5,0x00,0x07,0x00, -0x06,0x00,0x00,0x00,0x98,0x02,0x00,0x00,0x79,0x00,0x00,0x00, -0xa9,0x00,0x00,0x00,0x12,0x02,0x00,0x00,0xcc,0x00,0x00,0x00, -0xb0,0x00,0x05,0x00,0xb7,0x00,0x00,0x00,0xd0,0x00,0x00,0x00, -0x98,0x02,0x00,0x00,0x83,0x00,0x00,0x00,0xf6,0x00,0x04,0x00, -0xcb,0x00,0x00,0x00,0xcc,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0xfa,0x00,0x04,0x00,0xd0,0x00,0x00,0x00,0xca,0x00,0x00,0x00, -0xcb,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0xca,0x00,0x00,0x00, -0xf9,0x00,0x02,0x00,0xd2,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, -0xd2,0x00,0x00,0x00,0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00, -0xa8,0x02,0x00,0x00,0x3e,0x00,0x00,0x00,0xca,0x00,0x00,0x00, -0x1c,0x01,0x00,0x00,0xd5,0x00,0x00,0x00,0xb0,0x00,0x05,0x00, -0xb7,0x00,0x00,0x00,0xd8,0x00,0x00,0x00,0xa8,0x02,0x00,0x00, -0x37,0x00,0x00,0x00,0xf6,0x00,0x04,0x00,0xd4,0x00,0x00,0x00, -0xd5,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, -0xd8,0x00,0x00,0x00,0xd3,0x00,0x00,0x00,0xd4,0x00,0x00,0x00, -0xf8,0x00,0x02,0x00,0xd3,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xdc,0x00,0x00,0x00,0x8b,0x00,0x00,0x00, -0x73,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xde,0x00,0x00,0x00,0xdc,0x00,0x00,0x00,0xa8,0x02,0x00,0x00, -0xb0,0x00,0x05,0x00,0xb7,0x00,0x00,0x00,0xe1,0x00,0x00,0x00, -0xde,0x00,0x00,0x00,0x36,0x00,0x00,0x00,0xf7,0x00,0x03,0x00, -0xe3,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, -0xe1,0x00,0x00,0x00,0xe2,0x00,0x00,0x00,0xe3,0x00,0x00,0x00, -0xf8,0x00,0x02,0x00,0xe2,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xe6,0x00,0x00,0x00,0x98,0x02,0x00,0x00, -0x6e,0x00,0x00,0x00,0xb0,0x00,0x05,0x00,0xb7,0x00,0x00,0x00, -0xe8,0x00,0x00,0x00,0xe6,0x00,0x00,0x00,0x83,0x00,0x00,0x00, -0xf9,0x00,0x02,0x00,0xe3,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, -0xe3,0x00,0x00,0x00,0xf5,0x00,0x07,0x00,0xb7,0x00,0x00,0x00, -0xe9,0x00,0x00,0x00,0xe1,0x00,0x00,0x00,0xd3,0x00,0x00,0x00, -0xe8,0x00,0x00,0x00,0xe2,0x00,0x00,0x00,0xf7,0x00,0x03,0x00, -0xeb,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, -0xe9,0x00,0x00,0x00,0xea,0x00,0x00,0x00,0x0d,0x01,0x00,0x00, -0xf8,0x00,0x02,0x00,0xea,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xf3,0x00,0x00,0x00,0x73,0x00,0x00,0x00, -0xa8,0x02,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xf5,0x00,0x00,0x00,0xf3,0x00,0x00,0x00,0xf4,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xf7,0x00,0x00,0x00, -0xf5,0x00,0x00,0x00,0x6e,0x00,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x03,0x01,0x00,0x00,0xf3,0x00,0x00,0x00, -0x8e,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x04,0x01,0x00,0x00,0xac,0x02,0x00,0x00,0x03,0x01,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x06,0x01,0x00,0x00, -0x04,0x01,0x00,0x00,0x6e,0x00,0x00,0x00,0x41,0x00,0x06,0x00, -0x07,0x01,0x00,0x00,0x08,0x01,0x00,0x00,0xfc,0x00,0x00,0x00, -0x34,0x00,0x00,0x00,0x06,0x01,0x00,0x00,0x3d,0x00,0x04,0x00, -0xf8,0x00,0x00,0x00,0x09,0x01,0x00,0x00,0x08,0x01,0x00,0x00, -0x73,0x00,0x04,0x00,0xb9,0x00,0x00,0x00,0x0a,0x01,0x00,0x00, -0x09,0x01,0x00,0x00,0x41,0x00,0x05,0x00,0x0b,0x01,0x00,0x00, -0x0c,0x01,0x00,0x00,0xf0,0x00,0x00,0x00,0xf7,0x00,0x00,0x00, -0x3e,0x00,0x03,0x00,0x0c,0x01,0x00,0x00,0x0a,0x01,0x00,0x00, -0xf9,0x00,0x02,0x00,0xeb,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, -0x0d,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x10,0x01,0x00,0x00,0x73,0x00,0x00,0x00,0xa8,0x02,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x12,0x01,0x00,0x00, -0x10,0x01,0x00,0x00,0x11,0x01,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x14,0x01,0x00,0x00,0x12,0x01,0x00,0x00, -0x6e,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x0b,0x01,0x00,0x00, -0x15,0x01,0x00,0x00,0xf0,0x00,0x00,0x00,0x14,0x01,0x00,0x00, -0x3e,0x00,0x03,0x00,0x15,0x01,0x00,0x00,0xc1,0x00,0x00,0x00, -0xf9,0x00,0x02,0x00,0xeb,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, -0xeb,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0xd5,0x00,0x00,0x00, -0xf8,0x00,0x02,0x00,0xd5,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x1c,0x01,0x00,0x00,0xa8,0x02,0x00,0x00, -0x1a,0x01,0x00,0x00,0xf9,0x00,0x02,0x00,0xd2,0x00,0x00,0x00, -0xf8,0x00,0x02,0x00,0xd4,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, -0x1e,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0x1e,0x01,0x00,0x00, -0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0xa9,0x02,0x00,0x00, -0x3e,0x00,0x00,0x00,0xd4,0x00,0x00,0x00,0x60,0x01,0x00,0x00, -0x21,0x01,0x00,0x00,0xb0,0x00,0x05,0x00,0xb7,0x00,0x00,0x00, -0x24,0x01,0x00,0x00,0xa9,0x02,0x00,0x00,0x9c,0x00,0x00,0x00, -0xf6,0x00,0x04,0x00,0x20,0x01,0x00,0x00,0x21,0x01,0x00,0x00, -0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0x24,0x01,0x00,0x00, -0x1f,0x01,0x00,0x00,0x20,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, -0x1f,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x28,0x01,0x00,0x00,0x9d,0x00,0x00,0x00,0x73,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x2a,0x01,0x00,0x00, -0x28,0x01,0x00,0x00,0xa9,0x02,0x00,0x00,0x41,0x00,0x05,0x00, -0x15,0x00,0x00,0x00,0x2b,0x01,0x00,0x00,0x12,0x00,0x00,0x00, -0xc5,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x2c,0x01,0x00,0x00,0x2b,0x01,0x00,0x00,0xb0,0x00,0x05,0x00, -0xb7,0x00,0x00,0x00,0x2d,0x01,0x00,0x00,0x2a,0x01,0x00,0x00, -0x2c,0x01,0x00,0x00,0xf7,0x00,0x03,0x00,0x2f,0x01,0x00,0x00, -0x00,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0x2d,0x01,0x00,0x00, -0x2e,0x01,0x00,0x00,0x2f,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, -0x2e,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x32,0x01,0x00,0x00,0x98,0x02,0x00,0x00,0x6e,0x00,0x00,0x00, -0xb0,0x00,0x05,0x00,0xb7,0x00,0x00,0x00,0x34,0x01,0x00,0x00, -0x32,0x01,0x00,0x00,0x83,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, -0x2f,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0x2f,0x01,0x00,0x00, -0xf5,0x00,0x07,0x00,0xb7,0x00,0x00,0x00,0x35,0x01,0x00,0x00, -0x2d,0x01,0x00,0x00,0x1f,0x01,0x00,0x00,0x34,0x01,0x00,0x00, -0x2e,0x01,0x00,0x00,0xf7,0x00,0x03,0x00,0x37,0x01,0x00,0x00, -0x00,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0x35,0x01,0x00,0x00, -0x36,0x01,0x00,0x00,0x56,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, -0x36,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x3f,0x01,0x00,0x00,0x73,0x00,0x00,0x00,0xa9,0x02,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x41,0x01,0x00,0x00, -0x3f,0x01,0x00,0x00,0x40,0x01,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x43,0x01,0x00,0x00,0x41,0x01,0x00,0x00, -0x6e,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x4e,0x01,0x00,0x00,0x3f,0x01,0x00,0x00,0xa0,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x4f,0x01,0x00,0x00, -0xb0,0x02,0x00,0x00,0x4e,0x01,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x51,0x01,0x00,0x00,0x4f,0x01,0x00,0x00, -0x6e,0x00,0x00,0x00,0x41,0x00,0x06,0x00,0x52,0x01,0x00,0x00, -0x53,0x01,0x00,0x00,0x47,0x01,0x00,0x00,0x34,0x00,0x00,0x00, -0x51,0x01,0x00,0x00,0x3d,0x00,0x04,0x00,0xb9,0x00,0x00,0x00, -0x54,0x01,0x00,0x00,0x53,0x01,0x00,0x00,0x41,0x00,0x05,0x00, -0x0b,0x01,0x00,0x00,0x55,0x01,0x00,0x00,0x3c,0x01,0x00,0x00, -0x43,0x01,0x00,0x00,0x3e,0x00,0x03,0x00,0x55,0x01,0x00,0x00, -0x54,0x01,0x00,0x00,0xf9,0x00,0x02,0x00,0x37,0x01,0x00,0x00, -0xf8,0x00,0x02,0x00,0x56,0x01,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x59,0x01,0x00,0x00,0x73,0x00,0x00,0x00, -0xa9,0x02,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x5b,0x01,0x00,0x00,0x59,0x01,0x00,0x00,0x5a,0x01,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x5d,0x01,0x00,0x00, -0x5b,0x01,0x00,0x00,0x6e,0x00,0x00,0x00,0x41,0x00,0x05,0x00, -0x0b,0x01,0x00,0x00,0x5e,0x01,0x00,0x00,0x3c,0x01,0x00,0x00, -0x5d,0x01,0x00,0x00,0x3e,0x00,0x03,0x00,0x5e,0x01,0x00,0x00, -0xc1,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0x37,0x01,0x00,0x00, -0xf8,0x00,0x02,0x00,0x37,0x01,0x00,0x00,0xf9,0x00,0x02,0x00, -0x21,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0x21,0x01,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x60,0x01,0x00,0x00, -0xa9,0x02,0x00,0x00,0x1a,0x01,0x00,0x00,0xf9,0x00,0x02,0x00, -0x1e,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0x20,0x01,0x00,0x00, -0xe0,0x00,0x04,0x00,0x0c,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0x61,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x64,0x01,0x00,0x00,0xac,0x02,0x00,0x00,0x62,0x01,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x67,0x01,0x00,0x00, -0xb0,0x02,0x00,0x00,0x65,0x01,0x00,0x00,0xf9,0x00,0x02,0x00, -0x69,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0x69,0x01,0x00,0x00, -0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0xb2,0x02,0x00,0x00, -0x3e,0x00,0x00,0x00,0x20,0x01,0x00,0x00,0x10,0x02,0x00,0x00, -0x6c,0x01,0x00,0x00,0xb0,0x00,0x05,0x00,0xb7,0x00,0x00,0x00, -0x6f,0x01,0x00,0x00,0xb2,0x02,0x00,0x00,0x6c,0x00,0x00,0x00, -0xf6,0x00,0x04,0x00,0x6b,0x01,0x00,0x00,0x6c,0x01,0x00,0x00, -0x00,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0x6f,0x01,0x00,0x00, -0x6a,0x01,0x00,0x00,0x6b,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, -0x6a,0x01,0x00,0x00,0xf9,0x00,0x02,0x00,0x71,0x01,0x00,0x00, -0xf8,0x00,0x02,0x00,0x71,0x01,0x00,0x00,0xf5,0x00,0x07,0x00, -0x06,0x00,0x00,0x00,0xb6,0x02,0x00,0x00,0x3e,0x00,0x00,0x00, -0x6a,0x01,0x00,0x00,0x9c,0x01,0x00,0x00,0x74,0x01,0x00,0x00, -0xb0,0x00,0x05,0x00,0xb7,0x00,0x00,0x00,0x77,0x01,0x00,0x00, -0xb6,0x02,0x00,0x00,0x60,0x00,0x00,0x00,0xf6,0x00,0x04,0x00, -0x73,0x01,0x00,0x00,0x74,0x01,0x00,0x00,0x01,0x00,0x00,0x00, -0xfa,0x00,0x04,0x00,0x77,0x01,0x00,0x00,0x72,0x01,0x00,0x00, -0x73,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0x72,0x01,0x00,0x00, -0xf9,0x00,0x02,0x00,0x79,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, -0x79,0x01,0x00,0x00,0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00, -0xc8,0x02,0x00,0x00,0x3e,0x00,0x00,0x00,0x72,0x01,0x00,0x00, -0x9a,0x01,0x00,0x00,0x7a,0x01,0x00,0x00,0xb0,0x00,0x05,0x00, -0xb7,0x00,0x00,0x00,0x7f,0x01,0x00,0x00,0xc8,0x02,0x00,0x00, -0x62,0x00,0x00,0x00,0xf6,0x00,0x04,0x00,0x7b,0x01,0x00,0x00, -0x7a,0x01,0x00,0x00,0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, -0x7f,0x01,0x00,0x00,0x7a,0x01,0x00,0x00,0x7b,0x01,0x00,0x00, -0xf8,0x00,0x02,0x00,0x7a,0x01,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x85,0x01,0x00,0x00,0xb6,0x02,0x00,0x00, -0x62,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x87,0x01,0x00,0x00,0x85,0x01,0x00,0x00,0xc8,0x02,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x89,0x01,0x00,0x00, -0x55,0x00,0x00,0x00,0x53,0x00,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x8b,0x01,0x00,0x00,0xb6,0x02,0x00,0x00, -0x61,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x8c,0x01,0x00,0x00,0x89,0x01,0x00,0x00,0x8b,0x01,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x8e,0x01,0x00,0x00, -0x64,0x00,0x00,0x00,0x62,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x8f,0x01,0x00,0x00,0x8c,0x01,0x00,0x00, -0x8e,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x91,0x01,0x00,0x00,0x8f,0x01,0x00,0x00,0xc8,0x02,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x93,0x01,0x00,0x00, -0x91,0x01,0x00,0x00,0x92,0x01,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x95,0x01,0x00,0x00,0x93,0x01,0x00,0x00, -0xb2,0x02,0x00,0x00,0x41,0x00,0x05,0x00,0x0b,0x01,0x00,0x00, -0x96,0x01,0x00,0x00,0xf0,0x00,0x00,0x00,0x95,0x01,0x00,0x00, -0x3d,0x00,0x04,0x00,0xb9,0x00,0x00,0x00,0x97,0x01,0x00,0x00, -0x96,0x01,0x00,0x00,0x41,0x00,0x05,0x00,0xc2,0x00,0x00,0x00, -0x98,0x01,0x00,0x00,0x83,0x01,0x00,0x00,0x87,0x01,0x00,0x00, -0x3e,0x00,0x03,0x00,0x98,0x01,0x00,0x00,0x97,0x01,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x9a,0x01,0x00,0x00, -0xc8,0x02,0x00,0x00,0xc5,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, -0x79,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0x7b,0x01,0x00,0x00, -0xf9,0x00,0x02,0x00,0x74,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, -0x74,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x9c,0x01,0x00,0x00,0xb6,0x02,0x00,0x00,0xc5,0x00,0x00,0x00, -0xf9,0x00,0x02,0x00,0x71,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, -0x73,0x01,0x00,0x00,0xf9,0x00,0x02,0x00,0x9e,0x01,0x00,0x00, -0xf8,0x00,0x02,0x00,0x9e,0x01,0x00,0x00,0xf5,0x00,0x07,0x00, -0x06,0x00,0x00,0x00,0xb7,0x02,0x00,0x00,0x3e,0x00,0x00,0x00, -0x73,0x01,0x00,0x00,0xca,0x01,0x00,0x00,0xa1,0x01,0x00,0x00, -0xb0,0x00,0x05,0x00,0xb7,0x00,0x00,0x00,0xa4,0x01,0x00,0x00, -0xb7,0x02,0x00,0x00,0xb4,0x00,0x00,0x00,0xf6,0x00,0x04,0x00, -0xa0,0x01,0x00,0x00,0xa1,0x01,0x00,0x00,0x01,0x00,0x00,0x00, -0xfa,0x00,0x04,0x00,0xa4,0x01,0x00,0x00,0x9f,0x01,0x00,0x00, -0xa0,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0x9f,0x01,0x00,0x00, -0xf9,0x00,0x02,0x00,0xa6,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, -0xa6,0x01,0x00,0x00,0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00, -0xc5,0x02,0x00,0x00,0x3e,0x00,0x00,0x00,0x9f,0x01,0x00,0x00, -0xc8,0x01,0x00,0x00,0xa7,0x01,0x00,0x00,0xb0,0x00,0x05,0x00, -0xb7,0x00,0x00,0x00,0xac,0x01,0x00,0x00,0xc5,0x02,0x00,0x00, -0xb1,0x00,0x00,0x00,0xf6,0x00,0x04,0x00,0xa8,0x01,0x00,0x00, -0xa7,0x01,0x00,0x00,0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, -0xac,0x01,0x00,0x00,0xa7,0x01,0x00,0x00,0xa8,0x01,0x00,0x00, -0xf8,0x00,0x02,0x00,0xa7,0x01,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xb2,0x01,0x00,0x00,0xb7,0x02,0x00,0x00, -0xb1,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xb4,0x01,0x00,0x00,0xb2,0x01,0x00,0x00,0xc5,0x02,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xb6,0x01,0x00,0x00, -0x59,0x00,0x00,0x00,0xae,0x00,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xb9,0x01,0x00,0x00,0xb7,0x02,0x00,0x00, -0xb8,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xba,0x01,0x00,0x00,0xb6,0x01,0x00,0x00,0xb9,0x01,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xbc,0x01,0x00,0x00, -0x68,0x00,0x00,0x00,0xb1,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xbd,0x01,0x00,0x00,0xba,0x01,0x00,0x00, -0xbc,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xbf,0x01,0x00,0x00,0xbd,0x01,0x00,0x00,0xc5,0x02,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xc1,0x01,0x00,0x00, -0xbf,0x01,0x00,0x00,0xc0,0x01,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xc3,0x01,0x00,0x00,0xc1,0x01,0x00,0x00, -0xb2,0x02,0x00,0x00,0x41,0x00,0x05,0x00,0x0b,0x01,0x00,0x00, -0xc4,0x01,0x00,0x00,0x3c,0x01,0x00,0x00,0xc3,0x01,0x00,0x00, -0x3d,0x00,0x04,0x00,0xb9,0x00,0x00,0x00,0xc5,0x01,0x00,0x00, -0xc4,0x01,0x00,0x00,0x41,0x00,0x05,0x00,0xc2,0x00,0x00,0x00, -0xc6,0x01,0x00,0x00,0xb0,0x01,0x00,0x00,0xb4,0x01,0x00,0x00, -0x3e,0x00,0x03,0x00,0xc6,0x01,0x00,0x00,0xc5,0x01,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xc8,0x01,0x00,0x00, -0xc5,0x02,0x00,0x00,0xc5,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, -0xa6,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0xa8,0x01,0x00,0x00, -0xf9,0x00,0x02,0x00,0xa1,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, -0xa1,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xca,0x01,0x00,0x00,0xb7,0x02,0x00,0x00,0xc5,0x00,0x00,0x00, -0xf9,0x00,0x02,0x00,0x9e,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, -0xa0,0x01,0x00,0x00,0xf9,0x00,0x02,0x00,0xcc,0x01,0x00,0x00, -0xf8,0x00,0x02,0x00,0xcc,0x01,0x00,0x00,0xf5,0x00,0x07,0x00, -0x06,0x00,0x00,0x00,0xb8,0x02,0x00,0x00,0x3e,0x00,0x00,0x00, -0xa0,0x01,0x00,0x00,0x0e,0x02,0x00,0x00,0xcf,0x01,0x00,0x00, -0xb0,0x00,0x05,0x00,0xb7,0x00,0x00,0x00,0xd2,0x01,0x00,0x00, -0xb8,0x02,0x00,0x00,0xb4,0x00,0x00,0x00,0xf6,0x00,0x04,0x00, -0xce,0x01,0x00,0x00,0xcf,0x01,0x00,0x00,0x01,0x00,0x00,0x00, -0xfa,0x00,0x04,0x00,0xd2,0x01,0x00,0x00,0xcd,0x01,0x00,0x00, -0xce,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0xcd,0x01,0x00,0x00, -0xf9,0x00,0x02,0x00,0xd4,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, -0xd4,0x01,0x00,0x00,0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00, -0xbc,0x02,0x00,0x00,0x3e,0x00,0x00,0x00,0xcd,0x01,0x00,0x00, -0x0c,0x02,0x00,0x00,0xd7,0x01,0x00,0x00,0xb0,0x00,0x05,0x00, -0xb7,0x00,0x00,0x00,0xda,0x01,0x00,0x00,0xbc,0x02,0x00,0x00, -0x60,0x00,0x00,0x00,0xf6,0x00,0x04,0x00,0xd6,0x01,0x00,0x00, -0xd7,0x01,0x00,0x00,0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, -0xda,0x01,0x00,0x00,0xd5,0x01,0x00,0x00,0xd6,0x01,0x00,0x00, -0xf8,0x00,0x02,0x00,0xd5,0x01,0x00,0x00,0xf9,0x00,0x02,0x00, -0xdc,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0xdc,0x01,0x00,0x00, -0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0xbe,0x02,0x00,0x00, -0x3e,0x00,0x00,0x00,0xd5,0x01,0x00,0x00,0x0a,0x02,0x00,0x00, -0xdf,0x01,0x00,0x00,0xb0,0x00,0x05,0x00,0xb7,0x00,0x00,0x00, -0xe2,0x01,0x00,0x00,0xbe,0x02,0x00,0x00,0xb1,0x00,0x00,0x00, -0xf6,0x00,0x04,0x00,0xde,0x01,0x00,0x00,0xdf,0x01,0x00,0x00, -0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0xe2,0x01,0x00,0x00, -0xdd,0x01,0x00,0x00,0xde,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, -0xdd,0x01,0x00,0x00,0xf9,0x00,0x02,0x00,0xe4,0x01,0x00,0x00, -0xf8,0x00,0x02,0x00,0xe4,0x01,0x00,0x00,0xf5,0x00,0x07,0x00, -0x06,0x00,0x00,0x00,0xc0,0x02,0x00,0x00,0x3e,0x00,0x00,0x00, -0xdd,0x01,0x00,0x00,0x08,0x02,0x00,0x00,0xe5,0x01,0x00,0x00, -0xb0,0x00,0x05,0x00,0xb7,0x00,0x00,0x00,0xea,0x01,0x00,0x00, -0xc0,0x02,0x00,0x00,0x62,0x00,0x00,0x00,0xf6,0x00,0x04,0x00, -0xe6,0x01,0x00,0x00,0xe5,0x01,0x00,0x00,0x01,0x00,0x00,0x00, -0xfa,0x00,0x04,0x00,0xea,0x01,0x00,0x00,0xe5,0x01,0x00,0x00, -0xe6,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0xe5,0x01,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xec,0x01,0x00,0x00, -0xb8,0x02,0x00,0x00,0xb1,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xee,0x01,0x00,0x00,0xec,0x01,0x00,0x00, -0xbe,0x02,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xf0,0x01,0x00,0x00,0xee,0x01,0x00,0x00,0xef,0x01,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xf2,0x01,0x00,0x00, -0xbc,0x02,0x00,0x00,0x62,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xf3,0x01,0x00,0x00,0xf0,0x01,0x00,0x00, -0xf2,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xf5,0x01,0x00,0x00,0xf3,0x01,0x00,0x00,0xc0,0x02,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xf9,0x01,0x00,0x00, -0xf2,0x01,0x00,0x00,0xc0,0x02,0x00,0x00,0x41,0x00,0x05,0x00, -0xc2,0x00,0x00,0x00,0xfa,0x01,0x00,0x00,0x83,0x01,0x00,0x00, -0xf9,0x01,0x00,0x00,0x3d,0x00,0x04,0x00,0xb9,0x00,0x00,0x00, -0xfb,0x01,0x00,0x00,0xfa,0x01,0x00,0x00,0x41,0x00,0x05,0x00, -0xc2,0x00,0x00,0x00,0x00,0x02,0x00,0x00,0xb0,0x01,0x00,0x00, -0xee,0x01,0x00,0x00,0x3d,0x00,0x04,0x00,0xb9,0x00,0x00,0x00, -0x01,0x02,0x00,0x00,0x00,0x02,0x00,0x00,0x41,0x00,0x05,0x00, -0xc2,0x00,0x00,0x00,0x03,0x02,0x00,0x00,0xbf,0x00,0x00,0x00, -0xf5,0x01,0x00,0x00,0x3d,0x00,0x04,0x00,0xb9,0x00,0x00,0x00, -0x04,0x02,0x00,0x00,0x03,0x02,0x00,0x00,0x0c,0x00,0x08,0x00, -0xb9,0x00,0x00,0x00,0x05,0x02,0x00,0x00,0x01,0x00,0x00,0x00, -0x32,0x00,0x00,0x00,0xfb,0x01,0x00,0x00,0x01,0x02,0x00,0x00, -0x04,0x02,0x00,0x00,0x3e,0x00,0x03,0x00,0x03,0x02,0x00,0x00, -0x05,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x08,0x02,0x00,0x00,0xc0,0x02,0x00,0x00,0xc5,0x00,0x00,0x00, -0xf9,0x00,0x02,0x00,0xe4,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, -0xe6,0x01,0x00,0x00,0xf9,0x00,0x02,0x00,0xdf,0x01,0x00,0x00, -0xf8,0x00,0x02,0x00,0xdf,0x01,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x0a,0x02,0x00,0x00,0xbe,0x02,0x00,0x00, -0xc5,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0xdc,0x01,0x00,0x00, -0xf8,0x00,0x02,0x00,0xde,0x01,0x00,0x00,0xf9,0x00,0x02,0x00, -0xd7,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0xd7,0x01,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x0c,0x02,0x00,0x00, -0xbc,0x02,0x00,0x00,0xc5,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, -0xd4,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0xd6,0x01,0x00,0x00, -0xf9,0x00,0x02,0x00,0xcf,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, -0xcf,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x0e,0x02,0x00,0x00,0xb8,0x02,0x00,0x00,0xc5,0x00,0x00,0x00, -0xf9,0x00,0x02,0x00,0xcc,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, -0xce,0x01,0x00,0x00,0xf9,0x00,0x02,0x00,0x6c,0x01,0x00,0x00, -0xf8,0x00,0x02,0x00,0x6c,0x01,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x10,0x02,0x00,0x00,0xb2,0x02,0x00,0x00, -0xc5,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0x69,0x01,0x00,0x00, -0xf8,0x00,0x02,0x00,0x6b,0x01,0x00,0x00,0xe0,0x00,0x04,0x00, -0x0c,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x61,0x01,0x00,0x00, -0xf9,0x00,0x02,0x00,0xcc,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, -0xcc,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x12,0x02,0x00,0x00,0x98,0x02,0x00,0x00,0x6c,0x00,0x00,0x00, -0xf9,0x00,0x02,0x00,0xc9,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, -0xcb,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x17,0x02,0x00,0x00,0x55,0x00,0x00,0x00,0x53,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x18,0x02,0x00,0x00, -0x8b,0x00,0x00,0x00,0x17,0x02,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x1d,0x02,0x00,0x00,0x59,0x00,0x00,0x00, -0xae,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x1e,0x02,0x00,0x00,0x9d,0x00,0x00,0x00,0x1d,0x02,0x00,0x00, -0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00,0x23,0x02,0x00,0x00, -0x12,0x00,0x00,0x00,0x22,0x02,0x00,0x00,0x3d,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x24,0x02,0x00,0x00,0x23,0x02,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x25,0x02,0x00,0x00, -0x0f,0x00,0x00,0x00,0x24,0x02,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x29,0x02,0x00,0x00,0x47,0x00,0x00,0x00, -0x24,0x02,0x00,0x00,0x41,0x00,0x05,0x00,0x0d,0x00,0x00,0x00, -0x2b,0x02,0x00,0x00,0x2a,0x02,0x00,0x00,0x0c,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x2c,0x02,0x00,0x00, -0x2b,0x02,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x2d,0x02,0x00,0x00,0x29,0x02,0x00,0x00,0x2c,0x02,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x2e,0x02,0x00,0x00, -0x25,0x02,0x00,0x00,0x2d,0x02,0x00,0x00,0xf9,0x00,0x02,0x00, -0x30,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x30,0x02,0x00,0x00, -0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0x99,0x02,0x00,0x00, -0x3e,0x00,0x00,0x00,0xcb,0x00,0x00,0x00,0x96,0x02,0x00,0x00, -0x33,0x02,0x00,0x00,0xb0,0x00,0x05,0x00,0xb7,0x00,0x00,0x00, -0x36,0x02,0x00,0x00,0x99,0x02,0x00,0x00,0xb4,0x00,0x00,0x00, -0xf6,0x00,0x04,0x00,0x32,0x02,0x00,0x00,0x33,0x02,0x00,0x00, -0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0x36,0x02,0x00,0x00, -0x31,0x02,0x00,0x00,0x32,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, -0x31,0x02,0x00,0x00,0xf9,0x00,0x02,0x00,0x38,0x02,0x00,0x00, -0xf8,0x00,0x02,0x00,0x38,0x02,0x00,0x00,0xf5,0x00,0x07,0x00, -0x06,0x00,0x00,0x00,0x9a,0x02,0x00,0x00,0x3e,0x00,0x00,0x00, -0x31,0x02,0x00,0x00,0x94,0x02,0x00,0x00,0x3b,0x02,0x00,0x00, -0xb0,0x00,0x05,0x00,0xb7,0x00,0x00,0x00,0x3e,0x02,0x00,0x00, -0x9a,0x02,0x00,0x00,0x60,0x00,0x00,0x00,0xf6,0x00,0x04,0x00, -0x3a,0x02,0x00,0x00,0x3b,0x02,0x00,0x00,0x01,0x00,0x00,0x00, -0xfa,0x00,0x04,0x00,0x3e,0x02,0x00,0x00,0x39,0x02,0x00,0x00, -0x3a,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x39,0x02,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x42,0x02,0x00,0x00, -0x9a,0x02,0x00,0x00,0x61,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x43,0x02,0x00,0x00,0x18,0x02,0x00,0x00, -0x42,0x02,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x45,0x02,0x00,0x00,0x64,0x00,0x00,0x00,0x62,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x46,0x02,0x00,0x00, -0x43,0x02,0x00,0x00,0x45,0x02,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x4a,0x02,0x00,0x00,0x99,0x02,0x00,0x00, -0xb8,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x4b,0x02,0x00,0x00,0x1e,0x02,0x00,0x00,0x4a,0x02,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x4d,0x02,0x00,0x00, -0x68,0x00,0x00,0x00,0xb1,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x4e,0x02,0x00,0x00,0x4b,0x02,0x00,0x00, -0x4d,0x02,0x00,0x00,0xf9,0x00,0x02,0x00,0x50,0x02,0x00,0x00, -0xf8,0x00,0x02,0x00,0x50,0x02,0x00,0x00,0xf5,0x00,0x07,0x00, -0x06,0x00,0x00,0x00,0x9c,0x02,0x00,0x00,0x3e,0x00,0x00,0x00, -0x39,0x02,0x00,0x00,0x92,0x02,0x00,0x00,0x53,0x02,0x00,0x00, -0xb0,0x00,0x05,0x00,0xb7,0x00,0x00,0x00,0x56,0x02,0x00,0x00, -0x9c,0x02,0x00,0x00,0xb1,0x00,0x00,0x00,0xf6,0x00,0x04,0x00, -0x52,0x02,0x00,0x00,0x53,0x02,0x00,0x00,0x01,0x00,0x00,0x00, -0xfa,0x00,0x04,0x00,0x56,0x02,0x00,0x00,0x51,0x02,0x00,0x00, -0x52,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x51,0x02,0x00,0x00, -0xf9,0x00,0x02,0x00,0x58,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, -0x58,0x02,0x00,0x00,0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00, -0x9e,0x02,0x00,0x00,0x3e,0x00,0x00,0x00,0x51,0x02,0x00,0x00, -0x90,0x02,0x00,0x00,0x5b,0x02,0x00,0x00,0xb0,0x00,0x05,0x00, -0xb7,0x00,0x00,0x00,0x5e,0x02,0x00,0x00,0x9e,0x02,0x00,0x00, -0x62,0x00,0x00,0x00,0xf6,0x00,0x04,0x00,0x5a,0x02,0x00,0x00, -0x5b,0x02,0x00,0x00,0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, -0x5e,0x02,0x00,0x00,0x59,0x02,0x00,0x00,0x5a,0x02,0x00,0x00, -0xf8,0x00,0x02,0x00,0x59,0x02,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x61,0x02,0x00,0x00,0x46,0x02,0x00,0x00, -0x9e,0x02,0x00,0x00,0xb0,0x00,0x05,0x00,0xb7,0x00,0x00,0x00, -0x64,0x02,0x00,0x00,0x61,0x02,0x00,0x00,0x36,0x00,0x00,0x00, -0xf7,0x00,0x03,0x00,0x66,0x02,0x00,0x00,0x00,0x00,0x00,0x00, -0xfa,0x00,0x04,0x00,0x64,0x02,0x00,0x00,0x65,0x02,0x00,0x00, -0x66,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x65,0x02,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x69,0x02,0x00,0x00, -0x4e,0x02,0x00,0x00,0x9c,0x02,0x00,0x00,0x41,0x00,0x05,0x00, -0x15,0x00,0x00,0x00,0x6a,0x02,0x00,0x00,0x12,0x00,0x00,0x00, -0xc5,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x6b,0x02,0x00,0x00,0x6a,0x02,0x00,0x00,0xb0,0x00,0x05,0x00, -0xb7,0x00,0x00,0x00,0x6c,0x02,0x00,0x00,0x69,0x02,0x00,0x00, -0x6b,0x02,0x00,0x00,0xf9,0x00,0x02,0x00,0x66,0x02,0x00,0x00, -0xf8,0x00,0x02,0x00,0x66,0x02,0x00,0x00,0xf5,0x00,0x07,0x00, -0xb7,0x00,0x00,0x00,0x6d,0x02,0x00,0x00,0x64,0x02,0x00,0x00, -0x59,0x02,0x00,0x00,0x6c,0x02,0x00,0x00,0x65,0x02,0x00,0x00, -0xf7,0x00,0x03,0x00,0x6f,0x02,0x00,0x00,0x00,0x00,0x00,0x00, -0xfa,0x00,0x04,0x00,0x6d,0x02,0x00,0x00,0x6e,0x02,0x00,0x00, -0x6f,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x6e,0x02,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x77,0x02,0x00,0x00, -0x4e,0x02,0x00,0x00,0x9c,0x02,0x00,0x00,0x41,0x00,0x05,0x00, -0x15,0x00,0x00,0x00,0x79,0x02,0x00,0x00,0x12,0x00,0x00,0x00, -0x78,0x02,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x7a,0x02,0x00,0x00,0x79,0x02,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x7b,0x02,0x00,0x00,0x77,0x02,0x00,0x00, -0x7a,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x7c,0x02,0x00,0x00,0x2e,0x02,0x00,0x00,0x7b,0x02,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x7e,0x02,0x00,0x00, -0x7c,0x02,0x00,0x00,0x46,0x02,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x80,0x02,0x00,0x00,0x7e,0x02,0x00,0x00, -0x9e,0x02,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x82,0x02,0x00,0x00,0x99,0x02,0x00,0x00,0xb1,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x84,0x02,0x00,0x00, -0x82,0x02,0x00,0x00,0x9c,0x02,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x86,0x02,0x00,0x00,0x84,0x02,0x00,0x00, -0x85,0x02,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x88,0x02,0x00,0x00,0x9a,0x02,0x00,0x00,0x62,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x89,0x02,0x00,0x00, -0x86,0x02,0x00,0x00,0x88,0x02,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x8b,0x02,0x00,0x00,0x89,0x02,0x00,0x00, -0x9e,0x02,0x00,0x00,0x41,0x00,0x05,0x00,0xc2,0x00,0x00,0x00, -0x8c,0x02,0x00,0x00,0xbf,0x00,0x00,0x00,0x8b,0x02,0x00,0x00, -0x3d,0x00,0x04,0x00,0xb9,0x00,0x00,0x00,0x8d,0x02,0x00,0x00, -0x8c,0x02,0x00,0x00,0x41,0x00,0x06,0x00,0x52,0x01,0x00,0x00, -0x8e,0x02,0x00,0x00,0x73,0x02,0x00,0x00,0x34,0x00,0x00,0x00, -0x80,0x02,0x00,0x00,0x3e,0x00,0x03,0x00,0x8e,0x02,0x00,0x00, -0x8d,0x02,0x00,0x00,0xf9,0x00,0x02,0x00,0x6f,0x02,0x00,0x00, -0xf8,0x00,0x02,0x00,0x6f,0x02,0x00,0x00,0xf9,0x00,0x02,0x00, -0x5b,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x5b,0x02,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x90,0x02,0x00,0x00, -0x9e,0x02,0x00,0x00,0xc5,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, -0x58,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x5a,0x02,0x00,0x00, -0xf9,0x00,0x02,0x00,0x53,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, -0x53,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x92,0x02,0x00,0x00,0x9c,0x02,0x00,0x00,0xc5,0x00,0x00,0x00, -0xf9,0x00,0x02,0x00,0x50,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, -0x52,0x02,0x00,0x00,0xf9,0x00,0x02,0x00,0x3b,0x02,0x00,0x00, -0xf8,0x00,0x02,0x00,0x3b,0x02,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x94,0x02,0x00,0x00,0x9a,0x02,0x00,0x00, -0xc5,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0x38,0x02,0x00,0x00, -0xf8,0x00,0x02,0x00,0x3a,0x02,0x00,0x00,0xf9,0x00,0x02,0x00, -0x33,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x33,0x02,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x96,0x02,0x00,0x00, -0x99,0x02,0x00,0x00,0xc5,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, -0x30,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x32,0x02,0x00,0x00, -0xfd,0x00,0x01,0x00,0x38,0x00,0x01,0x00, -}; -const uint64_t matmul_f16_f32_l_fp32_len = 10100; - -unsigned char matmul_f16_f32_m_data[] = { -0x03,0x02,0x23,0x07,0x00,0x05,0x01,0x00,0x0b,0x00,0x0d,0x00, -0xcd,0x02,0x00,0x00,0x00,0x00,0x00,0x00,0x11,0x00,0x02,0x00, -0x01,0x00,0x00,0x00,0x11,0x00,0x02,0x00,0x09,0x00,0x00,0x00, -0x11,0x00,0x02,0x00,0x51,0x11,0x00,0x00,0x0b,0x00,0x06,0x00, -0x01,0x00,0x00,0x00,0x47,0x4c,0x53,0x4c,0x2e,0x73,0x74,0x64, -0x2e,0x34,0x35,0x30,0x00,0x00,0x00,0x00,0x0e,0x00,0x03,0x00, -0x00,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x0f,0x00,0x0f,0x00, -0x05,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x6d,0x61,0x69,0x6e, -0x00,0x00,0x00,0x00,0x0b,0x00,0x00,0x00,0x12,0x00,0x00,0x00, -0x3d,0x00,0x00,0x00,0x4c,0x00,0x00,0x00,0xf1,0x00,0x00,0x00, -0xfc,0x00,0x00,0x00,0x3c,0x01,0x00,0x00,0x47,0x01,0x00,0x00, -0x2e,0x02,0x00,0x00,0x77,0x02,0x00,0x00,0x10,0x00,0x06,0x00, -0x04,0x00,0x00,0x00,0x11,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x0b,0x00,0x00,0x00,0x0b,0x00,0x00,0x00,0x1c,0x00,0x00,0x00, -0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0x10,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x04,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00, -0x02,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x08,0x00,0x00,0x00, -0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00,0x03,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0x10,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x10,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00, -0x05,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x14,0x00,0x00,0x00, -0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x18,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0x10,0x00,0x00,0x00,0x07,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x1c,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00, -0x08,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x20,0x00,0x00,0x00, -0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00,0x09,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x24,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0x10,0x00,0x00,0x00,0x0a,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x28,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00, -0x0b,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x2c,0x00,0x00,0x00, -0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x30,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0x10,0x00,0x00,0x00,0x0d,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x34,0x00,0x00,0x00,0x47,0x00,0x03,0x00,0x10,0x00,0x00,0x00, -0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x37,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x3d,0x00,0x00,0x00,0x0b,0x00,0x00,0x00,0x1a,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x4c,0x00,0x00,0x00,0x0b,0x00,0x00,0x00, -0x1b,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x4f,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x09,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x53,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x04,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x60,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x62,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x07,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x6c,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x03,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x9c,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0xae,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x05,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0xb1,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x08,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0xf9,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x02,0x00,0x00,0x00,0x48,0x00,0x04,0x00,0xfa,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x18,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0xfa,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x47,0x00,0x03,0x00,0xfa,0x00,0x00,0x00, -0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0xfc,0x00,0x00,0x00, -0x22,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0xfc,0x00,0x00,0x00,0x21,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x16,0x01,0x00,0x00,0x01,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x17,0x01,0x00,0x00, -0x0b,0x00,0x00,0x00,0x19,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x44,0x01,0x00,0x00,0x06,0x00,0x00,0x00,0x04,0x00,0x00,0x00, -0x48,0x00,0x04,0x00,0x45,0x01,0x00,0x00,0x00,0x00,0x00,0x00, -0x18,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x45,0x01,0x00,0x00, -0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x47,0x00,0x03,0x00,0x45,0x01,0x00,0x00,0x02,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x47,0x01,0x00,0x00,0x22,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x47,0x01,0x00,0x00, -0x21,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x2e,0x02,0x00,0x00,0x0b,0x00,0x00,0x00,0x18,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x74,0x02,0x00,0x00,0x06,0x00,0x00,0x00, -0x04,0x00,0x00,0x00,0x48,0x00,0x04,0x00,0x75,0x02,0x00,0x00, -0x00,0x00,0x00,0x00,0x19,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0x75,0x02,0x00,0x00,0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x47,0x00,0x03,0x00,0x75,0x02,0x00,0x00, -0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x77,0x02,0x00,0x00, -0x22,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x77,0x02,0x00,0x00,0x21,0x00,0x00,0x00,0x02,0x00,0x00,0x00, -0x13,0x00,0x02,0x00,0x02,0x00,0x00,0x00,0x21,0x00,0x03,0x00, -0x03,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x15,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x20,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x17,0x00,0x04,0x00,0x09,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x03,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x0a,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x09,0x00,0x00,0x00,0x3b,0x00,0x04,0x00, -0x0a,0x00,0x00,0x00,0x0b,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0x02,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x0d,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x1e,0x00,0x10,0x00, -0x10,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x20,0x00,0x04,0x00,0x11,0x00,0x00,0x00,0x09,0x00,0x00,0x00, -0x10,0x00,0x00,0x00,0x3b,0x00,0x04,0x00,0x11,0x00,0x00,0x00, -0x12,0x00,0x00,0x00,0x09,0x00,0x00,0x00,0x15,0x00,0x04,0x00, -0x13,0x00,0x00,0x00,0x20,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00,0x14,0x00,0x00,0x00, -0x08,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x15,0x00,0x00,0x00, -0x09,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x13,0x00,0x00,0x00,0x21,0x00,0x00,0x00,0x0a,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00,0x27,0x00,0x00,0x00, -0x09,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00, -0x2d,0x00,0x00,0x00,0x07,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x13,0x00,0x00,0x00,0x34,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x32,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x37,0x00,0x00,0x00, -0x40,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x39,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x3b,0x00,0x04,0x00, -0x0a,0x00,0x00,0x00,0x3d,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x3e,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x3b,0x00,0x04,0x00,0x0a,0x00,0x00,0x00, -0x4c,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x32,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x4f,0x00,0x00,0x00,0x20,0x00,0x00,0x00, -0x32,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x53,0x00,0x00,0x00, -0x20,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, -0x54,0x00,0x00,0x00,0x86,0x00,0x00,0x00,0x37,0x00,0x00,0x00, -0x53,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, -0x58,0x00,0x00,0x00,0x86,0x00,0x00,0x00,0x37,0x00,0x00,0x00, -0x53,0x00,0x00,0x00,0x32,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x60,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0x61,0x00,0x00,0x00,0x86,0x00,0x00,0x00, -0x53,0x00,0x00,0x00,0x60,0x00,0x00,0x00,0x32,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x62,0x00,0x00,0x00,0x04,0x00,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x63,0x00,0x00,0x00, -0x86,0x00,0x00,0x00,0x61,0x00,0x00,0x00,0x62,0x00,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x67,0x00,0x00,0x00, -0x86,0x00,0x00,0x00,0x61,0x00,0x00,0x00,0x62,0x00,0x00,0x00, -0x32,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x6c,0x00,0x00,0x00, -0x10,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, -0x6d,0x00,0x00,0x00,0x86,0x00,0x00,0x00,0x6c,0x00,0x00,0x00, -0x39,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, -0x72,0x00,0x00,0x00,0x86,0x00,0x00,0x00,0x6c,0x00,0x00,0x00, -0x39,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00, -0x76,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x13,0x00,0x00,0x00,0x7b,0x00,0x00,0x00,0x02,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00,0x86,0x00,0x00,0x00, -0x0b,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00, -0x8c,0x00,0x00,0x00,0x03,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x13,0x00,0x00,0x00,0x97,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0x32,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x9c,0x00,0x00,0x00, -0x40,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00, -0x9e,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0xad,0x00,0x00,0x00,0x84,0x00,0x00,0x00, -0x60,0x00,0x00,0x00,0x62,0x00,0x00,0x00,0x32,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0xae,0x00,0x00,0x00,0x20,0x00,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0xaf,0x00,0x00,0x00, -0x84,0x00,0x00,0x00,0x53,0x00,0x00,0x00,0xae,0x00,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0xb0,0x00,0x00,0x00, -0x84,0x00,0x00,0x00,0x4f,0x00,0x00,0x00,0x62,0x00,0x00,0x00, -0x32,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0xb1,0x00,0x00,0x00, -0x02,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, -0xb2,0x00,0x00,0x00,0x84,0x00,0x00,0x00,0xb0,0x00,0x00,0x00, -0xb1,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, -0xb3,0x00,0x00,0x00,0x84,0x00,0x00,0x00,0xb2,0x00,0x00,0x00, -0x60,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, -0xb4,0x00,0x00,0x00,0x86,0x00,0x00,0x00,0xaf,0x00,0x00,0x00, -0xb3,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, -0xb5,0x00,0x00,0x00,0x84,0x00,0x00,0x00,0xad,0x00,0x00,0x00, -0xb4,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, -0xb6,0x00,0x00,0x00,0x84,0x00,0x00,0x00,0xb5,0x00,0x00,0x00, -0xb1,0x00,0x00,0x00,0x14,0x00,0x02,0x00,0xb7,0x00,0x00,0x00, -0x16,0x00,0x03,0x00,0xb9,0x00,0x00,0x00,0x20,0x00,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0xba,0x00,0x00,0x00, -0x84,0x00,0x00,0x00,0x60,0x00,0x00,0x00,0x62,0x00,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0xbb,0x00,0x00,0x00, -0x84,0x00,0x00,0x00,0xba,0x00,0x00,0x00,0xb4,0x00,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0xbc,0x00,0x00,0x00, -0x84,0x00,0x00,0x00,0xbb,0x00,0x00,0x00,0xb1,0x00,0x00,0x00, -0x1c,0x00,0x04,0x00,0xbd,0x00,0x00,0x00,0xb9,0x00,0x00,0x00, -0xbc,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0xbe,0x00,0x00,0x00, -0x07,0x00,0x00,0x00,0xbd,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0xb9,0x00,0x00,0x00,0xc1,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x20,0x00,0x04,0x00,0xc2,0x00,0x00,0x00,0x07,0x00,0x00,0x00, -0xb9,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00, -0xc5,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x16,0x00,0x03,0x00, -0xec,0x00,0x00,0x00,0x10,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0xed,0x00,0x00,0x00,0x80,0x00,0x00,0x00, -0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0xee,0x00,0x00,0x00,0x84,0x00,0x00,0x00, -0x37,0x00,0x00,0x00,0xed,0x00,0x00,0x00,0x1c,0x00,0x04,0x00, -0xef,0x00,0x00,0x00,0xec,0x00,0x00,0x00,0xee,0x00,0x00,0x00, -0x20,0x00,0x04,0x00,0xf0,0x00,0x00,0x00,0x04,0x00,0x00,0x00, -0xef,0x00,0x00,0x00,0x3b,0x00,0x04,0x00,0xf0,0x00,0x00,0x00, -0xf1,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0xf5,0x00,0x00,0x00,0x80,0x00,0x00,0x00, -0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x1d,0x00,0x03,0x00, -0xf9,0x00,0x00,0x00,0xec,0x00,0x00,0x00,0x1e,0x00,0x03,0x00, -0xfa,0x00,0x00,0x00,0xf9,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0xfb,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0xfa,0x00,0x00,0x00, -0x3b,0x00,0x04,0x00,0xfb,0x00,0x00,0x00,0xfc,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x07,0x01,0x00,0x00, -0x0c,0x00,0x00,0x00,0xec,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0x0a,0x01,0x00,0x00,0x04,0x00,0x00,0x00,0xec,0x00,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x10,0x01,0x00,0x00, -0x80,0x00,0x00,0x00,0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0xec,0x00,0x00,0x00,0x14,0x01,0x00,0x00, -0x00,0x00,0x00,0x00,0x32,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x16,0x01,0x00,0x00,0x01,0x00,0x00,0x00,0x33,0x00,0x06,0x00, -0x09,0x00,0x00,0x00,0x17,0x01,0x00,0x00,0x16,0x01,0x00,0x00, -0x39,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0x18,0x01,0x00,0x00,0x51,0x00,0x00,0x00, -0x17,0x01,0x00,0x00,0x00,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0x19,0x01,0x00,0x00,0x84,0x00,0x00,0x00, -0x18,0x01,0x00,0x00,0x39,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0x1a,0x01,0x00,0x00,0x86,0x00,0x00,0x00, -0x19,0x01,0x00,0x00,0x6c,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0x38,0x01,0x00,0x00,0x80,0x00,0x00,0x00, -0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0x39,0x01,0x00,0x00,0x84,0x00,0x00,0x00, -0x9c,0x00,0x00,0x00,0x38,0x01,0x00,0x00,0x1c,0x00,0x04,0x00, -0x3a,0x01,0x00,0x00,0xec,0x00,0x00,0x00,0x39,0x01,0x00,0x00, -0x20,0x00,0x04,0x00,0x3b,0x01,0x00,0x00,0x04,0x00,0x00,0x00, -0x3a,0x01,0x00,0x00,0x3b,0x00,0x04,0x00,0x3b,0x01,0x00,0x00, -0x3c,0x01,0x00,0x00,0x04,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0x40,0x01,0x00,0x00,0x80,0x00,0x00,0x00, -0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x1d,0x00,0x03,0x00, -0x44,0x01,0x00,0x00,0xb9,0x00,0x00,0x00,0x1e,0x00,0x03,0x00, -0x45,0x01,0x00,0x00,0x44,0x01,0x00,0x00,0x20,0x00,0x04,0x00, -0x46,0x01,0x00,0x00,0x0c,0x00,0x00,0x00,0x45,0x01,0x00,0x00, -0x3b,0x00,0x04,0x00,0x46,0x01,0x00,0x00,0x47,0x01,0x00,0x00, -0x0c,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x52,0x01,0x00,0x00, -0x0c,0x00,0x00,0x00,0xb9,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0x5b,0x01,0x00,0x00,0x80,0x00,0x00,0x00, -0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x62,0x01,0x00,0x00,0x08,0x01,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x63,0x01,0x00,0x00, -0x86,0x00,0x00,0x00,0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x66,0x01,0x00,0x00, -0x86,0x00,0x00,0x00,0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x81,0x01,0x00,0x00, -0x84,0x00,0x00,0x00,0x60,0x00,0x00,0x00,0x62,0x00,0x00,0x00, -0x1c,0x00,0x04,0x00,0x82,0x01,0x00,0x00,0xec,0x00,0x00,0x00, -0x81,0x01,0x00,0x00,0x20,0x00,0x04,0x00,0x83,0x01,0x00,0x00, -0x07,0x00,0x00,0x00,0x82,0x01,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0x93,0x01,0x00,0x00,0x80,0x00,0x00,0x00, -0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0x99,0x01,0x00,0x00,0x07,0x00,0x00,0x00,0xec,0x00,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0xaf,0x01,0x00,0x00, -0x84,0x00,0x00,0x00,0xb4,0x00,0x00,0x00,0xb1,0x00,0x00,0x00, -0x1c,0x00,0x04,0x00,0xb0,0x01,0x00,0x00,0xec,0x00,0x00,0x00, -0xaf,0x01,0x00,0x00,0x20,0x00,0x04,0x00,0xb1,0x01,0x00,0x00, -0x07,0x00,0x00,0x00,0xb0,0x01,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0xba,0x01,0x00,0x00,0x86,0x00,0x00,0x00, -0xae,0x00,0x00,0x00,0xb4,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0xc2,0x01,0x00,0x00,0x80,0x00,0x00,0x00, -0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0xf1,0x01,0x00,0x00,0x84,0x00,0x00,0x00, -0x60,0x00,0x00,0x00,0x62,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x13,0x00,0x00,0x00,0x26,0x02,0x00,0x00,0x0d,0x00,0x00,0x00, -0x3b,0x00,0x04,0x00,0x0a,0x00,0x00,0x00,0x2e,0x02,0x00,0x00, -0x01,0x00,0x00,0x00,0x1d,0x00,0x03,0x00,0x74,0x02,0x00,0x00, -0xb9,0x00,0x00,0x00,0x1e,0x00,0x03,0x00,0x75,0x02,0x00,0x00, -0x74,0x02,0x00,0x00,0x20,0x00,0x04,0x00,0x76,0x02,0x00,0x00, -0x0c,0x00,0x00,0x00,0x75,0x02,0x00,0x00,0x3b,0x00,0x04,0x00, -0x76,0x02,0x00,0x00,0x77,0x02,0x00,0x00,0x0c,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00,0x7c,0x02,0x00,0x00, -0x05,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, -0x89,0x02,0x00,0x00,0x84,0x00,0x00,0x00,0x60,0x00,0x00,0x00, -0x62,0x00,0x00,0x00,0x36,0x00,0x05,0x00,0x02,0x00,0x00,0x00, -0x04,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x03,0x00,0x00,0x00, -0xf8,0x00,0x02,0x00,0x05,0x00,0x00,0x00,0x3b,0x00,0x04,0x00, -0xbe,0x00,0x00,0x00,0xbf,0x00,0x00,0x00,0x07,0x00,0x00,0x00, -0x3b,0x00,0x04,0x00,0x83,0x01,0x00,0x00,0x84,0x01,0x00,0x00, -0x07,0x00,0x00,0x00,0x3b,0x00,0x04,0x00,0xb1,0x01,0x00,0x00, -0xb2,0x01,0x00,0x00,0x07,0x00,0x00,0x00,0x41,0x00,0x05,0x00, -0x0d,0x00,0x00,0x00,0x0e,0x00,0x00,0x00,0x0b,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x0f,0x00,0x00,0x00,0x0e,0x00,0x00,0x00,0x41,0x00,0x05,0x00, -0x15,0x00,0x00,0x00,0x16,0x00,0x00,0x00,0x12,0x00,0x00,0x00, -0x14,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x17,0x00,0x00,0x00,0x16,0x00,0x00,0x00,0x86,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x18,0x00,0x00,0x00,0x0f,0x00,0x00,0x00, -0x17,0x00,0x00,0x00,0x89,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x1e,0x00,0x00,0x00,0x0f,0x00,0x00,0x00,0x17,0x00,0x00,0x00, -0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00,0x22,0x00,0x00,0x00, -0x12,0x00,0x00,0x00,0x21,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x22,0x00,0x00,0x00, -0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x24,0x00,0x00,0x00, -0x18,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x41,0x00,0x05,0x00, -0x15,0x00,0x00,0x00,0x28,0x00,0x00,0x00,0x12,0x00,0x00,0x00, -0x27,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x29,0x00,0x00,0x00,0x28,0x00,0x00,0x00,0x86,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x2a,0x00,0x00,0x00,0x1e,0x00,0x00,0x00, -0x29,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00, -0x2e,0x00,0x00,0x00,0x12,0x00,0x00,0x00,0x2d,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x2f,0x00,0x00,0x00, -0x2e,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x30,0x00,0x00,0x00,0x24,0x00,0x00,0x00,0x2f,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x32,0x00,0x00,0x00, -0x30,0x00,0x00,0x00,0x2a,0x00,0x00,0x00,0x41,0x00,0x05,0x00, -0x15,0x00,0x00,0x00,0x35,0x00,0x00,0x00,0x12,0x00,0x00,0x00, -0x34,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x36,0x00,0x00,0x00,0x35,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x38,0x00,0x00,0x00,0x36,0x00,0x00,0x00, -0x37,0x00,0x00,0x00,0x82,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x3a,0x00,0x00,0x00,0x38,0x00,0x00,0x00,0x39,0x00,0x00,0x00, -0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x3b,0x00,0x00,0x00, -0x3a,0x00,0x00,0x00,0x37,0x00,0x00,0x00,0x41,0x00,0x05,0x00, -0x0d,0x00,0x00,0x00,0x3f,0x00,0x00,0x00,0x3d,0x00,0x00,0x00, -0x3e,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x40,0x00,0x00,0x00,0x3f,0x00,0x00,0x00,0x89,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x42,0x00,0x00,0x00,0x40,0x00,0x00,0x00, -0x3b,0x00,0x00,0x00,0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x47,0x00,0x00,0x00,0x40,0x00,0x00,0x00,0x3b,0x00,0x00,0x00, -0x41,0x00,0x05,0x00,0x0d,0x00,0x00,0x00,0x49,0x00,0x00,0x00, -0x3d,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x4a,0x00,0x00,0x00,0x49,0x00,0x00,0x00, -0x41,0x00,0x05,0x00,0x0d,0x00,0x00,0x00,0x4d,0x00,0x00,0x00, -0x4c,0x00,0x00,0x00,0x3e,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x4e,0x00,0x00,0x00,0x4d,0x00,0x00,0x00, -0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x50,0x00,0x00,0x00, -0x4e,0x00,0x00,0x00,0x4f,0x00,0x00,0x00,0x89,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x55,0x00,0x00,0x00,0x50,0x00,0x00,0x00, -0x54,0x00,0x00,0x00,0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x59,0x00,0x00,0x00,0x50,0x00,0x00,0x00,0x58,0x00,0x00,0x00, -0x89,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x5d,0x00,0x00,0x00, -0x4e,0x00,0x00,0x00,0x4f,0x00,0x00,0x00,0x89,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x64,0x00,0x00,0x00,0x5d,0x00,0x00,0x00, -0x63,0x00,0x00,0x00,0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x68,0x00,0x00,0x00,0x5d,0x00,0x00,0x00,0x67,0x00,0x00,0x00, -0x89,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x6e,0x00,0x00,0x00, -0x4e,0x00,0x00,0x00,0x6d,0x00,0x00,0x00,0x86,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x73,0x00,0x00,0x00,0x4e,0x00,0x00,0x00, -0x72,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00, -0x77,0x00,0x00,0x00,0x12,0x00,0x00,0x00,0x76,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x78,0x00,0x00,0x00, -0x77,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x79,0x00,0x00,0x00,0x47,0x00,0x00,0x00,0x78,0x00,0x00,0x00, -0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00,0x7c,0x00,0x00,0x00, -0x12,0x00,0x00,0x00,0x7b,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x7d,0x00,0x00,0x00,0x7c,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x7f,0x00,0x00,0x00, -0x47,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x82,0x00,0x00,0x00,0x7f,0x00,0x00,0x00, -0x78,0x00,0x00,0x00,0x0c,0x00,0x07,0x00,0x06,0x00,0x00,0x00, -0x83,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x26,0x00,0x00,0x00, -0x7d,0x00,0x00,0x00,0x82,0x00,0x00,0x00,0x41,0x00,0x05,0x00, -0x15,0x00,0x00,0x00,0x87,0x00,0x00,0x00,0x12,0x00,0x00,0x00, -0x86,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x88,0x00,0x00,0x00,0x87,0x00,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x89,0x00,0x00,0x00,0x32,0x00,0x00,0x00, -0x88,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x8b,0x00,0x00,0x00,0x42,0x00,0x00,0x00,0x37,0x00,0x00,0x00, -0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00,0x8d,0x00,0x00,0x00, -0x12,0x00,0x00,0x00,0x8c,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x8e,0x00,0x00,0x00,0x8d,0x00,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x8f,0x00,0x00,0x00, -0x8b,0x00,0x00,0x00,0x8e,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x90,0x00,0x00,0x00,0x89,0x00,0x00,0x00, -0x8f,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x92,0x00,0x00,0x00,0x90,0x00,0x00,0x00,0x79,0x00,0x00,0x00, -0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x93,0x00,0x00,0x00, -0x92,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x41,0x00,0x05,0x00, -0x15,0x00,0x00,0x00,0x98,0x00,0x00,0x00,0x12,0x00,0x00,0x00, -0x97,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x99,0x00,0x00,0x00,0x98,0x00,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x9a,0x00,0x00,0x00,0x0f,0x00,0x00,0x00, -0x99,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x9d,0x00,0x00,0x00,0x4a,0x00,0x00,0x00,0x9c,0x00,0x00,0x00, -0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00,0x9f,0x00,0x00,0x00, -0x12,0x00,0x00,0x00,0x9e,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0xa0,0x00,0x00,0x00,0x9f,0x00,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xa1,0x00,0x00,0x00, -0x9d,0x00,0x00,0x00,0xa0,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xa2,0x00,0x00,0x00,0x9a,0x00,0x00,0x00, -0xa1,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xa4,0x00,0x00,0x00,0xa2,0x00,0x00,0x00,0x79,0x00,0x00,0x00, -0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xa5,0x00,0x00,0x00, -0xa4,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, -0xa7,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0xa7,0x00,0x00,0x00, -0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0x9b,0x02,0x00,0x00, -0x3e,0x00,0x00,0x00,0x05,0x00,0x00,0x00,0xc6,0x00,0x00,0x00, -0xa8,0x00,0x00,0x00,0xb0,0x00,0x05,0x00,0xb7,0x00,0x00,0x00, -0xb8,0x00,0x00,0x00,0x9b,0x02,0x00,0x00,0xb6,0x00,0x00,0x00, -0xf6,0x00,0x04,0x00,0xa9,0x00,0x00,0x00,0xa8,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0xb8,0x00,0x00,0x00, -0xa8,0x00,0x00,0x00,0xa9,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, -0xa8,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0xc2,0x00,0x00,0x00, -0xc3,0x00,0x00,0x00,0xbf,0x00,0x00,0x00,0x9b,0x02,0x00,0x00, -0x3e,0x00,0x03,0x00,0xc3,0x00,0x00,0x00,0xc1,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xc6,0x00,0x00,0x00, -0x9b,0x02,0x00,0x00,0xc5,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, -0xa7,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0xa9,0x00,0x00,0x00, -0xf9,0x00,0x02,0x00,0xc9,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, -0xc9,0x00,0x00,0x00,0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00, -0xb4,0x02,0x00,0x00,0xa5,0x00,0x00,0x00,0xa9,0x00,0x00,0x00, -0x68,0x01,0x00,0x00,0xcc,0x00,0x00,0x00,0xf5,0x00,0x07,0x00, -0x06,0x00,0x00,0x00,0xb0,0x02,0x00,0x00,0x93,0x00,0x00,0x00, -0xa9,0x00,0x00,0x00,0x65,0x01,0x00,0x00,0xcc,0x00,0x00,0x00, -0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0x9c,0x02,0x00,0x00, -0x79,0x00,0x00,0x00,0xa9,0x00,0x00,0x00,0x16,0x02,0x00,0x00, -0xcc,0x00,0x00,0x00,0xb0,0x00,0x05,0x00,0xb7,0x00,0x00,0x00, -0xd0,0x00,0x00,0x00,0x9c,0x02,0x00,0x00,0x83,0x00,0x00,0x00, -0xf6,0x00,0x04,0x00,0xcb,0x00,0x00,0x00,0xcc,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0xd0,0x00,0x00,0x00, -0xca,0x00,0x00,0x00,0xcb,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, -0xca,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0xd2,0x00,0x00,0x00, -0xf8,0x00,0x02,0x00,0xd2,0x00,0x00,0x00,0xf5,0x00,0x07,0x00, -0x06,0x00,0x00,0x00,0xac,0x02,0x00,0x00,0x3e,0x00,0x00,0x00, -0xca,0x00,0x00,0x00,0x1c,0x01,0x00,0x00,0xd5,0x00,0x00,0x00, -0xb0,0x00,0x05,0x00,0xb7,0x00,0x00,0x00,0xd8,0x00,0x00,0x00, -0xac,0x02,0x00,0x00,0x37,0x00,0x00,0x00,0xf6,0x00,0x04,0x00, -0xd4,0x00,0x00,0x00,0xd5,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0xfa,0x00,0x04,0x00,0xd8,0x00,0x00,0x00,0xd3,0x00,0x00,0x00, -0xd4,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0xd3,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xdc,0x00,0x00,0x00, -0x8b,0x00,0x00,0x00,0x73,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xde,0x00,0x00,0x00,0xdc,0x00,0x00,0x00, -0xac,0x02,0x00,0x00,0xb0,0x00,0x05,0x00,0xb7,0x00,0x00,0x00, -0xe1,0x00,0x00,0x00,0xde,0x00,0x00,0x00,0x36,0x00,0x00,0x00, -0xf7,0x00,0x03,0x00,0xe3,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0xfa,0x00,0x04,0x00,0xe1,0x00,0x00,0x00,0xe2,0x00,0x00,0x00, -0xe3,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0xe2,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xe6,0x00,0x00,0x00, -0x9c,0x02,0x00,0x00,0x6e,0x00,0x00,0x00,0xb0,0x00,0x05,0x00, -0xb7,0x00,0x00,0x00,0xe8,0x00,0x00,0x00,0xe6,0x00,0x00,0x00, -0x83,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0xe3,0x00,0x00,0x00, -0xf8,0x00,0x02,0x00,0xe3,0x00,0x00,0x00,0xf5,0x00,0x07,0x00, -0xb7,0x00,0x00,0x00,0xe9,0x00,0x00,0x00,0xe1,0x00,0x00,0x00, -0xd3,0x00,0x00,0x00,0xe8,0x00,0x00,0x00,0xe2,0x00,0x00,0x00, -0xf7,0x00,0x03,0x00,0xeb,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0xfa,0x00,0x04,0x00,0xe9,0x00,0x00,0x00,0xea,0x00,0x00,0x00, -0x0c,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0xea,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xf4,0x00,0x00,0x00, -0x73,0x00,0x00,0x00,0xac,0x02,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xf6,0x00,0x00,0x00,0xf4,0x00,0x00,0x00, -0xf5,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xf8,0x00,0x00,0x00,0xf6,0x00,0x00,0x00,0x6e,0x00,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x03,0x01,0x00,0x00, -0xf4,0x00,0x00,0x00,0x8e,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x04,0x01,0x00,0x00,0xb0,0x02,0x00,0x00, -0x03,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x06,0x01,0x00,0x00,0x04,0x01,0x00,0x00,0x6e,0x00,0x00,0x00, -0x41,0x00,0x06,0x00,0x07,0x01,0x00,0x00,0x08,0x01,0x00,0x00, -0xfc,0x00,0x00,0x00,0x34,0x00,0x00,0x00,0x06,0x01,0x00,0x00, -0x3d,0x00,0x04,0x00,0xec,0x00,0x00,0x00,0x09,0x01,0x00,0x00, -0x08,0x01,0x00,0x00,0x41,0x00,0x05,0x00,0x0a,0x01,0x00,0x00, -0x0b,0x01,0x00,0x00,0xf1,0x00,0x00,0x00,0xf8,0x00,0x00,0x00, -0x3e,0x00,0x03,0x00,0x0b,0x01,0x00,0x00,0x09,0x01,0x00,0x00, -0xf9,0x00,0x02,0x00,0xeb,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, -0x0c,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x0f,0x01,0x00,0x00,0x73,0x00,0x00,0x00,0xac,0x02,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x11,0x01,0x00,0x00, -0x0f,0x01,0x00,0x00,0x10,0x01,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x13,0x01,0x00,0x00,0x11,0x01,0x00,0x00, -0x6e,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x0a,0x01,0x00,0x00, -0x15,0x01,0x00,0x00,0xf1,0x00,0x00,0x00,0x13,0x01,0x00,0x00, -0x3e,0x00,0x03,0x00,0x15,0x01,0x00,0x00,0x14,0x01,0x00,0x00, -0xf9,0x00,0x02,0x00,0xeb,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, -0xeb,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0xd5,0x00,0x00,0x00, -0xf8,0x00,0x02,0x00,0xd5,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x1c,0x01,0x00,0x00,0xac,0x02,0x00,0x00, -0x1a,0x01,0x00,0x00,0xf9,0x00,0x02,0x00,0xd2,0x00,0x00,0x00, -0xf8,0x00,0x02,0x00,0xd4,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, -0x1e,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0x1e,0x01,0x00,0x00, -0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0xad,0x02,0x00,0x00, -0x3e,0x00,0x00,0x00,0xd4,0x00,0x00,0x00,0x61,0x01,0x00,0x00, -0x21,0x01,0x00,0x00,0xb0,0x00,0x05,0x00,0xb7,0x00,0x00,0x00, -0x24,0x01,0x00,0x00,0xad,0x02,0x00,0x00,0x9c,0x00,0x00,0x00, -0xf6,0x00,0x04,0x00,0x20,0x01,0x00,0x00,0x21,0x01,0x00,0x00, -0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0x24,0x01,0x00,0x00, -0x1f,0x01,0x00,0x00,0x20,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, -0x1f,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x28,0x01,0x00,0x00,0x9d,0x00,0x00,0x00,0x73,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x2a,0x01,0x00,0x00, -0x28,0x01,0x00,0x00,0xad,0x02,0x00,0x00,0x41,0x00,0x05,0x00, -0x15,0x00,0x00,0x00,0x2b,0x01,0x00,0x00,0x12,0x00,0x00,0x00, -0xc5,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x2c,0x01,0x00,0x00,0x2b,0x01,0x00,0x00,0xb0,0x00,0x05,0x00, -0xb7,0x00,0x00,0x00,0x2d,0x01,0x00,0x00,0x2a,0x01,0x00,0x00, -0x2c,0x01,0x00,0x00,0xf7,0x00,0x03,0x00,0x2f,0x01,0x00,0x00, -0x00,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0x2d,0x01,0x00,0x00, -0x2e,0x01,0x00,0x00,0x2f,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, -0x2e,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x32,0x01,0x00,0x00,0x9c,0x02,0x00,0x00,0x6e,0x00,0x00,0x00, -0xb0,0x00,0x05,0x00,0xb7,0x00,0x00,0x00,0x34,0x01,0x00,0x00, -0x32,0x01,0x00,0x00,0x83,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, -0x2f,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0x2f,0x01,0x00,0x00, -0xf5,0x00,0x07,0x00,0xb7,0x00,0x00,0x00,0x35,0x01,0x00,0x00, -0x2d,0x01,0x00,0x00,0x1f,0x01,0x00,0x00,0x34,0x01,0x00,0x00, -0x2e,0x01,0x00,0x00,0xf7,0x00,0x03,0x00,0x37,0x01,0x00,0x00, -0x00,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0x35,0x01,0x00,0x00, -0x36,0x01,0x00,0x00,0x57,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, -0x36,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x3f,0x01,0x00,0x00,0x73,0x00,0x00,0x00,0xad,0x02,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x41,0x01,0x00,0x00, -0x3f,0x01,0x00,0x00,0x40,0x01,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x43,0x01,0x00,0x00,0x41,0x01,0x00,0x00, -0x6e,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x4e,0x01,0x00,0x00,0x3f,0x01,0x00,0x00,0xa0,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x4f,0x01,0x00,0x00, -0xb4,0x02,0x00,0x00,0x4e,0x01,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x51,0x01,0x00,0x00,0x4f,0x01,0x00,0x00, -0x6e,0x00,0x00,0x00,0x41,0x00,0x06,0x00,0x52,0x01,0x00,0x00, -0x53,0x01,0x00,0x00,0x47,0x01,0x00,0x00,0x34,0x00,0x00,0x00, -0x51,0x01,0x00,0x00,0x3d,0x00,0x04,0x00,0xb9,0x00,0x00,0x00, -0x54,0x01,0x00,0x00,0x53,0x01,0x00,0x00,0x73,0x00,0x04,0x00, -0xec,0x00,0x00,0x00,0x55,0x01,0x00,0x00,0x54,0x01,0x00,0x00, -0x41,0x00,0x05,0x00,0x0a,0x01,0x00,0x00,0x56,0x01,0x00,0x00, -0x3c,0x01,0x00,0x00,0x43,0x01,0x00,0x00,0x3e,0x00,0x03,0x00, -0x56,0x01,0x00,0x00,0x55,0x01,0x00,0x00,0xf9,0x00,0x02,0x00, -0x37,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0x57,0x01,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x5a,0x01,0x00,0x00, -0x73,0x00,0x00,0x00,0xad,0x02,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x5c,0x01,0x00,0x00,0x5a,0x01,0x00,0x00, -0x5b,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x5e,0x01,0x00,0x00,0x5c,0x01,0x00,0x00,0x6e,0x00,0x00,0x00, -0x41,0x00,0x05,0x00,0x0a,0x01,0x00,0x00,0x5f,0x01,0x00,0x00, -0x3c,0x01,0x00,0x00,0x5e,0x01,0x00,0x00,0x3e,0x00,0x03,0x00, -0x5f,0x01,0x00,0x00,0x14,0x01,0x00,0x00,0xf9,0x00,0x02,0x00, -0x37,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0x37,0x01,0x00,0x00, -0xf9,0x00,0x02,0x00,0x21,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, -0x21,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x61,0x01,0x00,0x00,0xad,0x02,0x00,0x00,0x1a,0x01,0x00,0x00, -0xf9,0x00,0x02,0x00,0x1e,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, -0x20,0x01,0x00,0x00,0xe0,0x00,0x04,0x00,0x0c,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x62,0x01,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x65,0x01,0x00,0x00,0xb0,0x02,0x00,0x00, -0x63,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x68,0x01,0x00,0x00,0xb4,0x02,0x00,0x00,0x66,0x01,0x00,0x00, -0xf9,0x00,0x02,0x00,0x6a,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, -0x6a,0x01,0x00,0x00,0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00, -0xb6,0x02,0x00,0x00,0x3e,0x00,0x00,0x00,0x20,0x01,0x00,0x00, -0x14,0x02,0x00,0x00,0x6d,0x01,0x00,0x00,0xb0,0x00,0x05,0x00, -0xb7,0x00,0x00,0x00,0x70,0x01,0x00,0x00,0xb6,0x02,0x00,0x00, -0x6c,0x00,0x00,0x00,0xf6,0x00,0x04,0x00,0x6c,0x01,0x00,0x00, -0x6d,0x01,0x00,0x00,0x00,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, -0x70,0x01,0x00,0x00,0x6b,0x01,0x00,0x00,0x6c,0x01,0x00,0x00, -0xf8,0x00,0x02,0x00,0x6b,0x01,0x00,0x00,0xf9,0x00,0x02,0x00, -0x72,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0x72,0x01,0x00,0x00, -0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0xba,0x02,0x00,0x00, -0x3e,0x00,0x00,0x00,0x6b,0x01,0x00,0x00,0x9e,0x01,0x00,0x00, -0x75,0x01,0x00,0x00,0xb0,0x00,0x05,0x00,0xb7,0x00,0x00,0x00, -0x78,0x01,0x00,0x00,0xba,0x02,0x00,0x00,0x60,0x00,0x00,0x00, -0xf6,0x00,0x04,0x00,0x74,0x01,0x00,0x00,0x75,0x01,0x00,0x00, -0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0x78,0x01,0x00,0x00, -0x73,0x01,0x00,0x00,0x74,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, -0x73,0x01,0x00,0x00,0xf9,0x00,0x02,0x00,0x7a,0x01,0x00,0x00, -0xf8,0x00,0x02,0x00,0x7a,0x01,0x00,0x00,0xf5,0x00,0x07,0x00, -0x06,0x00,0x00,0x00,0xcc,0x02,0x00,0x00,0x3e,0x00,0x00,0x00, -0x73,0x01,0x00,0x00,0x9c,0x01,0x00,0x00,0x7b,0x01,0x00,0x00, -0xb0,0x00,0x05,0x00,0xb7,0x00,0x00,0x00,0x80,0x01,0x00,0x00, -0xcc,0x02,0x00,0x00,0x62,0x00,0x00,0x00,0xf6,0x00,0x04,0x00, -0x7c,0x01,0x00,0x00,0x7b,0x01,0x00,0x00,0x01,0x00,0x00,0x00, -0xfa,0x00,0x04,0x00,0x80,0x01,0x00,0x00,0x7b,0x01,0x00,0x00, -0x7c,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0x7b,0x01,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x86,0x01,0x00,0x00, -0xba,0x02,0x00,0x00,0x62,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x88,0x01,0x00,0x00,0x86,0x01,0x00,0x00, -0xcc,0x02,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x8a,0x01,0x00,0x00,0x55,0x00,0x00,0x00,0x53,0x00,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x8c,0x01,0x00,0x00, -0xba,0x02,0x00,0x00,0x61,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x8d,0x01,0x00,0x00,0x8a,0x01,0x00,0x00, -0x8c,0x01,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x8f,0x01,0x00,0x00,0x64,0x00,0x00,0x00,0x62,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x90,0x01,0x00,0x00, -0x8d,0x01,0x00,0x00,0x8f,0x01,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x92,0x01,0x00,0x00,0x90,0x01,0x00,0x00, -0xcc,0x02,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x94,0x01,0x00,0x00,0x92,0x01,0x00,0x00,0x93,0x01,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x96,0x01,0x00,0x00, -0x94,0x01,0x00,0x00,0xb6,0x02,0x00,0x00,0x41,0x00,0x05,0x00, -0x0a,0x01,0x00,0x00,0x97,0x01,0x00,0x00,0xf1,0x00,0x00,0x00, -0x96,0x01,0x00,0x00,0x3d,0x00,0x04,0x00,0xec,0x00,0x00,0x00, -0x98,0x01,0x00,0x00,0x97,0x01,0x00,0x00,0x41,0x00,0x05,0x00, -0x99,0x01,0x00,0x00,0x9a,0x01,0x00,0x00,0x84,0x01,0x00,0x00, -0x88,0x01,0x00,0x00,0x3e,0x00,0x03,0x00,0x9a,0x01,0x00,0x00, -0x98,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x9c,0x01,0x00,0x00,0xcc,0x02,0x00,0x00,0xc5,0x00,0x00,0x00, -0xf9,0x00,0x02,0x00,0x7a,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, -0x7c,0x01,0x00,0x00,0xf9,0x00,0x02,0x00,0x75,0x01,0x00,0x00, -0xf8,0x00,0x02,0x00,0x75,0x01,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x9e,0x01,0x00,0x00,0xba,0x02,0x00,0x00, -0xc5,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0x72,0x01,0x00,0x00, -0xf8,0x00,0x02,0x00,0x74,0x01,0x00,0x00,0xf9,0x00,0x02,0x00, -0xa0,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0xa0,0x01,0x00,0x00, -0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0xbb,0x02,0x00,0x00, -0x3e,0x00,0x00,0x00,0x74,0x01,0x00,0x00,0xcc,0x01,0x00,0x00, -0xa3,0x01,0x00,0x00,0xb0,0x00,0x05,0x00,0xb7,0x00,0x00,0x00, -0xa6,0x01,0x00,0x00,0xbb,0x02,0x00,0x00,0xb4,0x00,0x00,0x00, -0xf6,0x00,0x04,0x00,0xa2,0x01,0x00,0x00,0xa3,0x01,0x00,0x00, -0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0xa6,0x01,0x00,0x00, -0xa1,0x01,0x00,0x00,0xa2,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, -0xa1,0x01,0x00,0x00,0xf9,0x00,0x02,0x00,0xa8,0x01,0x00,0x00, -0xf8,0x00,0x02,0x00,0xa8,0x01,0x00,0x00,0xf5,0x00,0x07,0x00, -0x06,0x00,0x00,0x00,0xc9,0x02,0x00,0x00,0x3e,0x00,0x00,0x00, -0xa1,0x01,0x00,0x00,0xca,0x01,0x00,0x00,0xa9,0x01,0x00,0x00, -0xb0,0x00,0x05,0x00,0xb7,0x00,0x00,0x00,0xae,0x01,0x00,0x00, -0xc9,0x02,0x00,0x00,0xb1,0x00,0x00,0x00,0xf6,0x00,0x04,0x00, -0xaa,0x01,0x00,0x00,0xa9,0x01,0x00,0x00,0x01,0x00,0x00,0x00, -0xfa,0x00,0x04,0x00,0xae,0x01,0x00,0x00,0xa9,0x01,0x00,0x00, -0xaa,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0xa9,0x01,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xb4,0x01,0x00,0x00, -0xbb,0x02,0x00,0x00,0xb1,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xb6,0x01,0x00,0x00,0xb4,0x01,0x00,0x00, -0xc9,0x02,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xb8,0x01,0x00,0x00,0x59,0x00,0x00,0x00,0xae,0x00,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xbb,0x01,0x00,0x00, -0xbb,0x02,0x00,0x00,0xba,0x01,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xbc,0x01,0x00,0x00,0xb8,0x01,0x00,0x00, -0xbb,0x01,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xbe,0x01,0x00,0x00,0x68,0x00,0x00,0x00,0xb1,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xbf,0x01,0x00,0x00, -0xbc,0x01,0x00,0x00,0xbe,0x01,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xc1,0x01,0x00,0x00,0xbf,0x01,0x00,0x00, -0xc9,0x02,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xc3,0x01,0x00,0x00,0xc1,0x01,0x00,0x00,0xc2,0x01,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xc5,0x01,0x00,0x00, -0xc3,0x01,0x00,0x00,0xb6,0x02,0x00,0x00,0x41,0x00,0x05,0x00, -0x0a,0x01,0x00,0x00,0xc6,0x01,0x00,0x00,0x3c,0x01,0x00,0x00, -0xc5,0x01,0x00,0x00,0x3d,0x00,0x04,0x00,0xec,0x00,0x00,0x00, -0xc7,0x01,0x00,0x00,0xc6,0x01,0x00,0x00,0x41,0x00,0x05,0x00, -0x99,0x01,0x00,0x00,0xc8,0x01,0x00,0x00,0xb2,0x01,0x00,0x00, -0xb6,0x01,0x00,0x00,0x3e,0x00,0x03,0x00,0xc8,0x01,0x00,0x00, -0xc7,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xca,0x01,0x00,0x00,0xc9,0x02,0x00,0x00,0xc5,0x00,0x00,0x00, -0xf9,0x00,0x02,0x00,0xa8,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, -0xaa,0x01,0x00,0x00,0xf9,0x00,0x02,0x00,0xa3,0x01,0x00,0x00, -0xf8,0x00,0x02,0x00,0xa3,0x01,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xcc,0x01,0x00,0x00,0xbb,0x02,0x00,0x00, -0xc5,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0xa0,0x01,0x00,0x00, -0xf8,0x00,0x02,0x00,0xa2,0x01,0x00,0x00,0xf9,0x00,0x02,0x00, -0xce,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0xce,0x01,0x00,0x00, -0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0xbc,0x02,0x00,0x00, -0x3e,0x00,0x00,0x00,0xa2,0x01,0x00,0x00,0x12,0x02,0x00,0x00, -0xd1,0x01,0x00,0x00,0xb0,0x00,0x05,0x00,0xb7,0x00,0x00,0x00, -0xd4,0x01,0x00,0x00,0xbc,0x02,0x00,0x00,0xb4,0x00,0x00,0x00, -0xf6,0x00,0x04,0x00,0xd0,0x01,0x00,0x00,0xd1,0x01,0x00,0x00, -0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0xd4,0x01,0x00,0x00, -0xcf,0x01,0x00,0x00,0xd0,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, -0xcf,0x01,0x00,0x00,0xf9,0x00,0x02,0x00,0xd6,0x01,0x00,0x00, -0xf8,0x00,0x02,0x00,0xd6,0x01,0x00,0x00,0xf5,0x00,0x07,0x00, -0x06,0x00,0x00,0x00,0xc0,0x02,0x00,0x00,0x3e,0x00,0x00,0x00, -0xcf,0x01,0x00,0x00,0x10,0x02,0x00,0x00,0xd9,0x01,0x00,0x00, -0xb0,0x00,0x05,0x00,0xb7,0x00,0x00,0x00,0xdc,0x01,0x00,0x00, -0xc0,0x02,0x00,0x00,0x60,0x00,0x00,0x00,0xf6,0x00,0x04,0x00, -0xd8,0x01,0x00,0x00,0xd9,0x01,0x00,0x00,0x01,0x00,0x00,0x00, -0xfa,0x00,0x04,0x00,0xdc,0x01,0x00,0x00,0xd7,0x01,0x00,0x00, -0xd8,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0xd7,0x01,0x00,0x00, -0xf9,0x00,0x02,0x00,0xde,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, -0xde,0x01,0x00,0x00,0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00, -0xc2,0x02,0x00,0x00,0x3e,0x00,0x00,0x00,0xd7,0x01,0x00,0x00, -0x0e,0x02,0x00,0x00,0xe1,0x01,0x00,0x00,0xb0,0x00,0x05,0x00, -0xb7,0x00,0x00,0x00,0xe4,0x01,0x00,0x00,0xc2,0x02,0x00,0x00, -0xb1,0x00,0x00,0x00,0xf6,0x00,0x04,0x00,0xe0,0x01,0x00,0x00, -0xe1,0x01,0x00,0x00,0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, -0xe4,0x01,0x00,0x00,0xdf,0x01,0x00,0x00,0xe0,0x01,0x00,0x00, -0xf8,0x00,0x02,0x00,0xdf,0x01,0x00,0x00,0xf9,0x00,0x02,0x00, -0xe6,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0xe6,0x01,0x00,0x00, -0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0xc4,0x02,0x00,0x00, -0x3e,0x00,0x00,0x00,0xdf,0x01,0x00,0x00,0x0c,0x02,0x00,0x00, -0xe7,0x01,0x00,0x00,0xb0,0x00,0x05,0x00,0xb7,0x00,0x00,0x00, -0xec,0x01,0x00,0x00,0xc4,0x02,0x00,0x00,0x62,0x00,0x00,0x00, -0xf6,0x00,0x04,0x00,0xe8,0x01,0x00,0x00,0xe7,0x01,0x00,0x00, -0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0xec,0x01,0x00,0x00, -0xe7,0x01,0x00,0x00,0xe8,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, -0xe7,0x01,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xee,0x01,0x00,0x00,0xbc,0x02,0x00,0x00,0xb1,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xf0,0x01,0x00,0x00, -0xee,0x01,0x00,0x00,0xc2,0x02,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xf2,0x01,0x00,0x00,0xf0,0x01,0x00,0x00, -0xf1,0x01,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xf4,0x01,0x00,0x00,0xc0,0x02,0x00,0x00,0x62,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xf5,0x01,0x00,0x00, -0xf2,0x01,0x00,0x00,0xf4,0x01,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xf7,0x01,0x00,0x00,0xf5,0x01,0x00,0x00, -0xc4,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xfb,0x01,0x00,0x00,0xf4,0x01,0x00,0x00,0xc4,0x02,0x00,0x00, -0x41,0x00,0x05,0x00,0x99,0x01,0x00,0x00,0xfc,0x01,0x00,0x00, -0x84,0x01,0x00,0x00,0xfb,0x01,0x00,0x00,0x3d,0x00,0x04,0x00, -0xec,0x00,0x00,0x00,0xfd,0x01,0x00,0x00,0xfc,0x01,0x00,0x00, -0x73,0x00,0x04,0x00,0xb9,0x00,0x00,0x00,0xfe,0x01,0x00,0x00, -0xfd,0x01,0x00,0x00,0x41,0x00,0x05,0x00,0x99,0x01,0x00,0x00, -0x03,0x02,0x00,0x00,0xb2,0x01,0x00,0x00,0xf0,0x01,0x00,0x00, -0x3d,0x00,0x04,0x00,0xec,0x00,0x00,0x00,0x04,0x02,0x00,0x00, -0x03,0x02,0x00,0x00,0x73,0x00,0x04,0x00,0xb9,0x00,0x00,0x00, -0x05,0x02,0x00,0x00,0x04,0x02,0x00,0x00,0x41,0x00,0x05,0x00, -0xc2,0x00,0x00,0x00,0x07,0x02,0x00,0x00,0xbf,0x00,0x00,0x00, -0xf7,0x01,0x00,0x00,0x3d,0x00,0x04,0x00,0xb9,0x00,0x00,0x00, -0x08,0x02,0x00,0x00,0x07,0x02,0x00,0x00,0x0c,0x00,0x08,0x00, -0xb9,0x00,0x00,0x00,0x09,0x02,0x00,0x00,0x01,0x00,0x00,0x00, -0x32,0x00,0x00,0x00,0xfe,0x01,0x00,0x00,0x05,0x02,0x00,0x00, -0x08,0x02,0x00,0x00,0x3e,0x00,0x03,0x00,0x07,0x02,0x00,0x00, -0x09,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x0c,0x02,0x00,0x00,0xc4,0x02,0x00,0x00,0xc5,0x00,0x00,0x00, -0xf9,0x00,0x02,0x00,0xe6,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, -0xe8,0x01,0x00,0x00,0xf9,0x00,0x02,0x00,0xe1,0x01,0x00,0x00, -0xf8,0x00,0x02,0x00,0xe1,0x01,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x0e,0x02,0x00,0x00,0xc2,0x02,0x00,0x00, -0xc5,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0xde,0x01,0x00,0x00, -0xf8,0x00,0x02,0x00,0xe0,0x01,0x00,0x00,0xf9,0x00,0x02,0x00, -0xd9,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0xd9,0x01,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x10,0x02,0x00,0x00, -0xc0,0x02,0x00,0x00,0xc5,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, -0xd6,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0xd8,0x01,0x00,0x00, -0xf9,0x00,0x02,0x00,0xd1,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, -0xd1,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x12,0x02,0x00,0x00,0xbc,0x02,0x00,0x00,0xc5,0x00,0x00,0x00, -0xf9,0x00,0x02,0x00,0xce,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, -0xd0,0x01,0x00,0x00,0xf9,0x00,0x02,0x00,0x6d,0x01,0x00,0x00, -0xf8,0x00,0x02,0x00,0x6d,0x01,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x14,0x02,0x00,0x00,0xb6,0x02,0x00,0x00, -0xc5,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0x6a,0x01,0x00,0x00, -0xf8,0x00,0x02,0x00,0x6c,0x01,0x00,0x00,0xe0,0x00,0x04,0x00, -0x0c,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x62,0x01,0x00,0x00, -0xf9,0x00,0x02,0x00,0xcc,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, -0xcc,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x16,0x02,0x00,0x00,0x9c,0x02,0x00,0x00,0x6c,0x00,0x00,0x00, -0xf9,0x00,0x02,0x00,0xc9,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, -0xcb,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x1b,0x02,0x00,0x00,0x55,0x00,0x00,0x00,0x53,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x1c,0x02,0x00,0x00, -0x8b,0x00,0x00,0x00,0x1b,0x02,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x21,0x02,0x00,0x00,0x59,0x00,0x00,0x00, -0xae,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x22,0x02,0x00,0x00,0x9d,0x00,0x00,0x00,0x21,0x02,0x00,0x00, -0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00,0x27,0x02,0x00,0x00, -0x12,0x00,0x00,0x00,0x26,0x02,0x00,0x00,0x3d,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x28,0x02,0x00,0x00,0x27,0x02,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x29,0x02,0x00,0x00, -0x0f,0x00,0x00,0x00,0x28,0x02,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x2d,0x02,0x00,0x00,0x47,0x00,0x00,0x00, -0x28,0x02,0x00,0x00,0x41,0x00,0x05,0x00,0x0d,0x00,0x00,0x00, -0x2f,0x02,0x00,0x00,0x2e,0x02,0x00,0x00,0x0c,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x30,0x02,0x00,0x00, -0x2f,0x02,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x31,0x02,0x00,0x00,0x2d,0x02,0x00,0x00,0x30,0x02,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x32,0x02,0x00,0x00, -0x29,0x02,0x00,0x00,0x31,0x02,0x00,0x00,0xf9,0x00,0x02,0x00, -0x34,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x34,0x02,0x00,0x00, -0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0x9d,0x02,0x00,0x00, -0x3e,0x00,0x00,0x00,0xcb,0x00,0x00,0x00,0x9a,0x02,0x00,0x00, -0x37,0x02,0x00,0x00,0xb0,0x00,0x05,0x00,0xb7,0x00,0x00,0x00, -0x3a,0x02,0x00,0x00,0x9d,0x02,0x00,0x00,0xb4,0x00,0x00,0x00, -0xf6,0x00,0x04,0x00,0x36,0x02,0x00,0x00,0x37,0x02,0x00,0x00, -0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0x3a,0x02,0x00,0x00, -0x35,0x02,0x00,0x00,0x36,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, -0x35,0x02,0x00,0x00,0xf9,0x00,0x02,0x00,0x3c,0x02,0x00,0x00, -0xf8,0x00,0x02,0x00,0x3c,0x02,0x00,0x00,0xf5,0x00,0x07,0x00, -0x06,0x00,0x00,0x00,0x9e,0x02,0x00,0x00,0x3e,0x00,0x00,0x00, -0x35,0x02,0x00,0x00,0x98,0x02,0x00,0x00,0x3f,0x02,0x00,0x00, -0xb0,0x00,0x05,0x00,0xb7,0x00,0x00,0x00,0x42,0x02,0x00,0x00, -0x9e,0x02,0x00,0x00,0x60,0x00,0x00,0x00,0xf6,0x00,0x04,0x00, -0x3e,0x02,0x00,0x00,0x3f,0x02,0x00,0x00,0x01,0x00,0x00,0x00, -0xfa,0x00,0x04,0x00,0x42,0x02,0x00,0x00,0x3d,0x02,0x00,0x00, -0x3e,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x3d,0x02,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x46,0x02,0x00,0x00, -0x9e,0x02,0x00,0x00,0x61,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x47,0x02,0x00,0x00,0x1c,0x02,0x00,0x00, -0x46,0x02,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x49,0x02,0x00,0x00,0x64,0x00,0x00,0x00,0x62,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x4a,0x02,0x00,0x00, -0x47,0x02,0x00,0x00,0x49,0x02,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x4e,0x02,0x00,0x00,0x9d,0x02,0x00,0x00, -0xba,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x4f,0x02,0x00,0x00,0x22,0x02,0x00,0x00,0x4e,0x02,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x51,0x02,0x00,0x00, -0x68,0x00,0x00,0x00,0xb1,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x52,0x02,0x00,0x00,0x4f,0x02,0x00,0x00, -0x51,0x02,0x00,0x00,0xf9,0x00,0x02,0x00,0x54,0x02,0x00,0x00, -0xf8,0x00,0x02,0x00,0x54,0x02,0x00,0x00,0xf5,0x00,0x07,0x00, -0x06,0x00,0x00,0x00,0xa0,0x02,0x00,0x00,0x3e,0x00,0x00,0x00, -0x3d,0x02,0x00,0x00,0x96,0x02,0x00,0x00,0x57,0x02,0x00,0x00, -0xb0,0x00,0x05,0x00,0xb7,0x00,0x00,0x00,0x5a,0x02,0x00,0x00, -0xa0,0x02,0x00,0x00,0xb1,0x00,0x00,0x00,0xf6,0x00,0x04,0x00, -0x56,0x02,0x00,0x00,0x57,0x02,0x00,0x00,0x01,0x00,0x00,0x00, -0xfa,0x00,0x04,0x00,0x5a,0x02,0x00,0x00,0x55,0x02,0x00,0x00, -0x56,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x55,0x02,0x00,0x00, -0xf9,0x00,0x02,0x00,0x5c,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, -0x5c,0x02,0x00,0x00,0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00, -0xa2,0x02,0x00,0x00,0x3e,0x00,0x00,0x00,0x55,0x02,0x00,0x00, -0x94,0x02,0x00,0x00,0x5f,0x02,0x00,0x00,0xb0,0x00,0x05,0x00, -0xb7,0x00,0x00,0x00,0x62,0x02,0x00,0x00,0xa2,0x02,0x00,0x00, -0x62,0x00,0x00,0x00,0xf6,0x00,0x04,0x00,0x5e,0x02,0x00,0x00, -0x5f,0x02,0x00,0x00,0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, -0x62,0x02,0x00,0x00,0x5d,0x02,0x00,0x00,0x5e,0x02,0x00,0x00, -0xf8,0x00,0x02,0x00,0x5d,0x02,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x65,0x02,0x00,0x00,0x4a,0x02,0x00,0x00, -0xa2,0x02,0x00,0x00,0xb0,0x00,0x05,0x00,0xb7,0x00,0x00,0x00, -0x68,0x02,0x00,0x00,0x65,0x02,0x00,0x00,0x36,0x00,0x00,0x00, -0xf7,0x00,0x03,0x00,0x6a,0x02,0x00,0x00,0x00,0x00,0x00,0x00, -0xfa,0x00,0x04,0x00,0x68,0x02,0x00,0x00,0x69,0x02,0x00,0x00, -0x6a,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x69,0x02,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x6d,0x02,0x00,0x00, -0x52,0x02,0x00,0x00,0xa0,0x02,0x00,0x00,0x41,0x00,0x05,0x00, -0x15,0x00,0x00,0x00,0x6e,0x02,0x00,0x00,0x12,0x00,0x00,0x00, -0xc5,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x6f,0x02,0x00,0x00,0x6e,0x02,0x00,0x00,0xb0,0x00,0x05,0x00, -0xb7,0x00,0x00,0x00,0x70,0x02,0x00,0x00,0x6d,0x02,0x00,0x00, -0x6f,0x02,0x00,0x00,0xf9,0x00,0x02,0x00,0x6a,0x02,0x00,0x00, -0xf8,0x00,0x02,0x00,0x6a,0x02,0x00,0x00,0xf5,0x00,0x07,0x00, -0xb7,0x00,0x00,0x00,0x71,0x02,0x00,0x00,0x68,0x02,0x00,0x00, -0x5d,0x02,0x00,0x00,0x70,0x02,0x00,0x00,0x69,0x02,0x00,0x00, -0xf7,0x00,0x03,0x00,0x73,0x02,0x00,0x00,0x00,0x00,0x00,0x00, -0xfa,0x00,0x04,0x00,0x71,0x02,0x00,0x00,0x72,0x02,0x00,0x00, -0x73,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x72,0x02,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x7b,0x02,0x00,0x00, -0x52,0x02,0x00,0x00,0xa0,0x02,0x00,0x00,0x41,0x00,0x05,0x00, -0x15,0x00,0x00,0x00,0x7d,0x02,0x00,0x00,0x12,0x00,0x00,0x00, -0x7c,0x02,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x7e,0x02,0x00,0x00,0x7d,0x02,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x7f,0x02,0x00,0x00,0x7b,0x02,0x00,0x00, -0x7e,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x80,0x02,0x00,0x00,0x32,0x02,0x00,0x00,0x7f,0x02,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x82,0x02,0x00,0x00, -0x80,0x02,0x00,0x00,0x4a,0x02,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x84,0x02,0x00,0x00,0x82,0x02,0x00,0x00, -0xa2,0x02,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x86,0x02,0x00,0x00,0x9d,0x02,0x00,0x00,0xb1,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x88,0x02,0x00,0x00, -0x86,0x02,0x00,0x00,0xa0,0x02,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x8a,0x02,0x00,0x00,0x88,0x02,0x00,0x00, -0x89,0x02,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x8c,0x02,0x00,0x00,0x9e,0x02,0x00,0x00,0x62,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x8d,0x02,0x00,0x00, -0x8a,0x02,0x00,0x00,0x8c,0x02,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x8f,0x02,0x00,0x00,0x8d,0x02,0x00,0x00, -0xa2,0x02,0x00,0x00,0x41,0x00,0x05,0x00,0xc2,0x00,0x00,0x00, -0x90,0x02,0x00,0x00,0xbf,0x00,0x00,0x00,0x8f,0x02,0x00,0x00, -0x3d,0x00,0x04,0x00,0xb9,0x00,0x00,0x00,0x91,0x02,0x00,0x00, -0x90,0x02,0x00,0x00,0x41,0x00,0x06,0x00,0x52,0x01,0x00,0x00, -0x92,0x02,0x00,0x00,0x77,0x02,0x00,0x00,0x34,0x00,0x00,0x00, -0x84,0x02,0x00,0x00,0x3e,0x00,0x03,0x00,0x92,0x02,0x00,0x00, -0x91,0x02,0x00,0x00,0xf9,0x00,0x02,0x00,0x73,0x02,0x00,0x00, -0xf8,0x00,0x02,0x00,0x73,0x02,0x00,0x00,0xf9,0x00,0x02,0x00, -0x5f,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x5f,0x02,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x94,0x02,0x00,0x00, -0xa2,0x02,0x00,0x00,0xc5,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, -0x5c,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x5e,0x02,0x00,0x00, -0xf9,0x00,0x02,0x00,0x57,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, -0x57,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x96,0x02,0x00,0x00,0xa0,0x02,0x00,0x00,0xc5,0x00,0x00,0x00, -0xf9,0x00,0x02,0x00,0x54,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, -0x56,0x02,0x00,0x00,0xf9,0x00,0x02,0x00,0x3f,0x02,0x00,0x00, -0xf8,0x00,0x02,0x00,0x3f,0x02,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x98,0x02,0x00,0x00,0x9e,0x02,0x00,0x00, -0xc5,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0x3c,0x02,0x00,0x00, -0xf8,0x00,0x02,0x00,0x3e,0x02,0x00,0x00,0xf9,0x00,0x02,0x00, -0x37,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x37,0x02,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x9a,0x02,0x00,0x00, -0x9d,0x02,0x00,0x00,0xc5,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, -0x34,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x36,0x02,0x00,0x00, -0xfd,0x00,0x01,0x00,0x38,0x00,0x01,0x00, -}; -const uint64_t matmul_f16_f32_m_len = 10172; - -unsigned char matmul_f16_f32_m_fp32_data[] = { -0x03,0x02,0x23,0x07,0x00,0x05,0x01,0x00,0x0b,0x00,0x0d,0x00, -0xc9,0x02,0x00,0x00,0x00,0x00,0x00,0x00,0x11,0x00,0x02,0x00, -0x01,0x00,0x00,0x00,0x11,0x00,0x02,0x00,0x51,0x11,0x00,0x00, -0x0b,0x00,0x06,0x00,0x01,0x00,0x00,0x00,0x47,0x4c,0x53,0x4c, -0x2e,0x73,0x74,0x64,0x2e,0x34,0x35,0x30,0x00,0x00,0x00,0x00, -0x0e,0x00,0x03,0x00,0x00,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x0f,0x00,0x0f,0x00,0x05,0x00,0x00,0x00,0x04,0x00,0x00,0x00, -0x6d,0x61,0x69,0x6e,0x00,0x00,0x00,0x00,0x0b,0x00,0x00,0x00, -0x12,0x00,0x00,0x00,0x3d,0x00,0x00,0x00,0x4c,0x00,0x00,0x00, -0xf0,0x00,0x00,0x00,0xfc,0x00,0x00,0x00,0x3c,0x01,0x00,0x00, -0x47,0x01,0x00,0x00,0x2a,0x02,0x00,0x00,0x73,0x02,0x00,0x00, -0x10,0x00,0x06,0x00,0x04,0x00,0x00,0x00,0x11,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x0b,0x00,0x00,0x00,0x0b,0x00,0x00,0x00, -0x1c,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0x10,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x08,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00, -0x03,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00,0x04,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x10,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0x10,0x00,0x00,0x00,0x05,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x14,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x18,0x00,0x00,0x00, -0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00,0x07,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x1c,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0x10,0x00,0x00,0x00,0x08,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x20,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00, -0x09,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x24,0x00,0x00,0x00, -0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00,0x0a,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x28,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0x10,0x00,0x00,0x00,0x0b,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x2c,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x30,0x00,0x00,0x00, -0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00,0x0d,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x34,0x00,0x00,0x00,0x47,0x00,0x03,0x00, -0x10,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x37,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x3d,0x00,0x00,0x00,0x0b,0x00,0x00,0x00, -0x1a,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x4c,0x00,0x00,0x00, -0x0b,0x00,0x00,0x00,0x1b,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x4f,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x09,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x53,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x04,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x60,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x62,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x07,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x6c,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x03,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x9c,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0xae,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x05,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0xb1,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x08,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0xf9,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x48,0x00,0x04,0x00, -0xfa,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x18,0x00,0x00,0x00, -0x48,0x00,0x05,0x00,0xfa,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x03,0x00, -0xfa,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0xfc,0x00,0x00,0x00,0x22,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0xfc,0x00,0x00,0x00,0x21,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x16,0x01,0x00,0x00, -0x01,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x17,0x01,0x00,0x00,0x0b,0x00,0x00,0x00,0x19,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x44,0x01,0x00,0x00,0x06,0x00,0x00,0x00, -0x04,0x00,0x00,0x00,0x48,0x00,0x04,0x00,0x45,0x01,0x00,0x00, -0x00,0x00,0x00,0x00,0x18,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0x45,0x01,0x00,0x00,0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x47,0x00,0x03,0x00,0x45,0x01,0x00,0x00, -0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x47,0x01,0x00,0x00, -0x22,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x47,0x01,0x00,0x00,0x21,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x2a,0x02,0x00,0x00,0x0b,0x00,0x00,0x00, -0x18,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x70,0x02,0x00,0x00, -0x06,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x48,0x00,0x04,0x00, -0x71,0x02,0x00,0x00,0x00,0x00,0x00,0x00,0x19,0x00,0x00,0x00, -0x48,0x00,0x05,0x00,0x71,0x02,0x00,0x00,0x00,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x03,0x00, -0x71,0x02,0x00,0x00,0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x73,0x02,0x00,0x00,0x22,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x73,0x02,0x00,0x00,0x21,0x00,0x00,0x00, -0x02,0x00,0x00,0x00,0x13,0x00,0x02,0x00,0x02,0x00,0x00,0x00, -0x21,0x00,0x03,0x00,0x03,0x00,0x00,0x00,0x02,0x00,0x00,0x00, -0x15,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x20,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x17,0x00,0x04,0x00,0x09,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x03,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0x0a,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x09,0x00,0x00,0x00, -0x3b,0x00,0x04,0x00,0x0a,0x00,0x00,0x00,0x0b,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0x0d,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x1e,0x00,0x10,0x00,0x10,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x11,0x00,0x00,0x00, -0x09,0x00,0x00,0x00,0x10,0x00,0x00,0x00,0x3b,0x00,0x04,0x00, -0x11,0x00,0x00,0x00,0x12,0x00,0x00,0x00,0x09,0x00,0x00,0x00, -0x15,0x00,0x04,0x00,0x13,0x00,0x00,0x00,0x20,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00, -0x14,0x00,0x00,0x00,0x08,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0x15,0x00,0x00,0x00,0x09,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00,0x21,0x00,0x00,0x00, -0x0a,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00, -0x27,0x00,0x00,0x00,0x09,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x13,0x00,0x00,0x00,0x2d,0x00,0x00,0x00,0x07,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00,0x34,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x32,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x37,0x00,0x00,0x00,0x40,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x3b,0x00,0x04,0x00,0x0a,0x00,0x00,0x00,0x3d,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x3e,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x3b,0x00,0x04,0x00, -0x0a,0x00,0x00,0x00,0x4c,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x32,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x4f,0x00,0x00,0x00, -0x20,0x00,0x00,0x00,0x32,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x53,0x00,0x00,0x00,0x20,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0x54,0x00,0x00,0x00,0x86,0x00,0x00,0x00, -0x37,0x00,0x00,0x00,0x53,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0x58,0x00,0x00,0x00,0x86,0x00,0x00,0x00, -0x37,0x00,0x00,0x00,0x53,0x00,0x00,0x00,0x32,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x60,0x00,0x00,0x00,0x02,0x00,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x61,0x00,0x00,0x00, -0x86,0x00,0x00,0x00,0x53,0x00,0x00,0x00,0x60,0x00,0x00,0x00, -0x32,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x62,0x00,0x00,0x00, -0x04,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, -0x63,0x00,0x00,0x00,0x86,0x00,0x00,0x00,0x61,0x00,0x00,0x00, -0x62,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, -0x67,0x00,0x00,0x00,0x86,0x00,0x00,0x00,0x61,0x00,0x00,0x00, -0x62,0x00,0x00,0x00,0x32,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x6c,0x00,0x00,0x00,0x10,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0x6d,0x00,0x00,0x00,0x86,0x00,0x00,0x00, -0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0x72,0x00,0x00,0x00,0x86,0x00,0x00,0x00, -0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x13,0x00,0x00,0x00,0x76,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00,0x7b,0x00,0x00,0x00, -0x02,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00, -0x86,0x00,0x00,0x00,0x0b,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x13,0x00,0x00,0x00,0x8c,0x00,0x00,0x00,0x03,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00,0x97,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x32,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x9c,0x00,0x00,0x00,0x40,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x13,0x00,0x00,0x00,0x9e,0x00,0x00,0x00,0x04,0x00,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0xad,0x00,0x00,0x00, -0x84,0x00,0x00,0x00,0x60,0x00,0x00,0x00,0x62,0x00,0x00,0x00, -0x32,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0xae,0x00,0x00,0x00, -0x20,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, -0xaf,0x00,0x00,0x00,0x84,0x00,0x00,0x00,0x53,0x00,0x00,0x00, -0xae,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, -0xb0,0x00,0x00,0x00,0x84,0x00,0x00,0x00,0x4f,0x00,0x00,0x00, -0x62,0x00,0x00,0x00,0x32,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0xb1,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0xb2,0x00,0x00,0x00,0x84,0x00,0x00,0x00, -0xb0,0x00,0x00,0x00,0xb1,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0xb3,0x00,0x00,0x00,0x84,0x00,0x00,0x00, -0xb2,0x00,0x00,0x00,0x60,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0xb4,0x00,0x00,0x00,0x86,0x00,0x00,0x00, -0xaf,0x00,0x00,0x00,0xb3,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0xb5,0x00,0x00,0x00,0x84,0x00,0x00,0x00, -0xad,0x00,0x00,0x00,0xb4,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0xb6,0x00,0x00,0x00,0x84,0x00,0x00,0x00, -0xb5,0x00,0x00,0x00,0xb1,0x00,0x00,0x00,0x14,0x00,0x02,0x00, -0xb7,0x00,0x00,0x00,0x16,0x00,0x03,0x00,0xb9,0x00,0x00,0x00, -0x20,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, -0xba,0x00,0x00,0x00,0x84,0x00,0x00,0x00,0x60,0x00,0x00,0x00, -0x62,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, -0xbb,0x00,0x00,0x00,0x84,0x00,0x00,0x00,0xba,0x00,0x00,0x00, -0xb4,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, -0xbc,0x00,0x00,0x00,0x84,0x00,0x00,0x00,0xbb,0x00,0x00,0x00, -0xb1,0x00,0x00,0x00,0x1c,0x00,0x04,0x00,0xbd,0x00,0x00,0x00, -0xb9,0x00,0x00,0x00,0xbc,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0xbe,0x00,0x00,0x00,0x07,0x00,0x00,0x00,0xbd,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0xb9,0x00,0x00,0x00,0xc1,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0xc2,0x00,0x00,0x00, -0x07,0x00,0x00,0x00,0xb9,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x13,0x00,0x00,0x00,0xc5,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0xec,0x00,0x00,0x00, -0x80,0x00,0x00,0x00,0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0xed,0x00,0x00,0x00, -0x84,0x00,0x00,0x00,0x37,0x00,0x00,0x00,0xec,0x00,0x00,0x00, -0x1c,0x00,0x04,0x00,0xee,0x00,0x00,0x00,0xb9,0x00,0x00,0x00, -0xed,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0xef,0x00,0x00,0x00, -0x04,0x00,0x00,0x00,0xee,0x00,0x00,0x00,0x3b,0x00,0x04,0x00, -0xef,0x00,0x00,0x00,0xf0,0x00,0x00,0x00,0x04,0x00,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0xf4,0x00,0x00,0x00, -0x80,0x00,0x00,0x00,0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00, -0x16,0x00,0x03,0x00,0xf8,0x00,0x00,0x00,0x10,0x00,0x00,0x00, -0x1d,0x00,0x03,0x00,0xf9,0x00,0x00,0x00,0xf8,0x00,0x00,0x00, -0x1e,0x00,0x03,0x00,0xfa,0x00,0x00,0x00,0xf9,0x00,0x00,0x00, -0x20,0x00,0x04,0x00,0xfb,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0xfa,0x00,0x00,0x00,0x3b,0x00,0x04,0x00,0xfb,0x00,0x00,0x00, -0xfc,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0x07,0x01,0x00,0x00,0x0c,0x00,0x00,0x00,0xf8,0x00,0x00,0x00, -0x20,0x00,0x04,0x00,0x0b,0x01,0x00,0x00,0x04,0x00,0x00,0x00, -0xb9,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, -0x11,0x01,0x00,0x00,0x80,0x00,0x00,0x00,0x6c,0x00,0x00,0x00, -0x39,0x00,0x00,0x00,0x32,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x16,0x01,0x00,0x00,0x01,0x00,0x00,0x00,0x33,0x00,0x06,0x00, -0x09,0x00,0x00,0x00,0x17,0x01,0x00,0x00,0x16,0x01,0x00,0x00, -0x39,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0x18,0x01,0x00,0x00,0x51,0x00,0x00,0x00, -0x17,0x01,0x00,0x00,0x00,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0x19,0x01,0x00,0x00,0x84,0x00,0x00,0x00, -0x18,0x01,0x00,0x00,0x39,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0x1a,0x01,0x00,0x00,0x86,0x00,0x00,0x00, -0x19,0x01,0x00,0x00,0x6c,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0x38,0x01,0x00,0x00,0x80,0x00,0x00,0x00, -0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0x39,0x01,0x00,0x00,0x84,0x00,0x00,0x00, -0x9c,0x00,0x00,0x00,0x38,0x01,0x00,0x00,0x1c,0x00,0x04,0x00, -0x3a,0x01,0x00,0x00,0xb9,0x00,0x00,0x00,0x39,0x01,0x00,0x00, -0x20,0x00,0x04,0x00,0x3b,0x01,0x00,0x00,0x04,0x00,0x00,0x00, -0x3a,0x01,0x00,0x00,0x3b,0x00,0x04,0x00,0x3b,0x01,0x00,0x00, -0x3c,0x01,0x00,0x00,0x04,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0x40,0x01,0x00,0x00,0x80,0x00,0x00,0x00, -0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x1d,0x00,0x03,0x00, -0x44,0x01,0x00,0x00,0xb9,0x00,0x00,0x00,0x1e,0x00,0x03,0x00, -0x45,0x01,0x00,0x00,0x44,0x01,0x00,0x00,0x20,0x00,0x04,0x00, -0x46,0x01,0x00,0x00,0x0c,0x00,0x00,0x00,0x45,0x01,0x00,0x00, -0x3b,0x00,0x04,0x00,0x46,0x01,0x00,0x00,0x47,0x01,0x00,0x00, -0x0c,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x52,0x01,0x00,0x00, -0x0c,0x00,0x00,0x00,0xb9,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0x5a,0x01,0x00,0x00,0x80,0x00,0x00,0x00, -0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x61,0x01,0x00,0x00,0x08,0x01,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x62,0x01,0x00,0x00, -0x86,0x00,0x00,0x00,0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x65,0x01,0x00,0x00, -0x86,0x00,0x00,0x00,0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x80,0x01,0x00,0x00, -0x84,0x00,0x00,0x00,0x60,0x00,0x00,0x00,0x62,0x00,0x00,0x00, -0x1c,0x00,0x04,0x00,0x81,0x01,0x00,0x00,0xb9,0x00,0x00,0x00, -0x80,0x01,0x00,0x00,0x20,0x00,0x04,0x00,0x82,0x01,0x00,0x00, -0x07,0x00,0x00,0x00,0x81,0x01,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0x92,0x01,0x00,0x00,0x80,0x00,0x00,0x00, -0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0xad,0x01,0x00,0x00,0x84,0x00,0x00,0x00, -0xb4,0x00,0x00,0x00,0xb1,0x00,0x00,0x00,0x1c,0x00,0x04,0x00, -0xae,0x01,0x00,0x00,0xb9,0x00,0x00,0x00,0xad,0x01,0x00,0x00, -0x20,0x00,0x04,0x00,0xaf,0x01,0x00,0x00,0x07,0x00,0x00,0x00, -0xae,0x01,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, -0xb8,0x01,0x00,0x00,0x86,0x00,0x00,0x00,0xae,0x00,0x00,0x00, -0xb4,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, -0xc0,0x01,0x00,0x00,0x80,0x00,0x00,0x00,0x6c,0x00,0x00,0x00, -0x39,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, -0xef,0x01,0x00,0x00,0x84,0x00,0x00,0x00,0x60,0x00,0x00,0x00, -0x62,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00, -0x22,0x02,0x00,0x00,0x0d,0x00,0x00,0x00,0x3b,0x00,0x04,0x00, -0x0a,0x00,0x00,0x00,0x2a,0x02,0x00,0x00,0x01,0x00,0x00,0x00, -0x1d,0x00,0x03,0x00,0x70,0x02,0x00,0x00,0xb9,0x00,0x00,0x00, -0x1e,0x00,0x03,0x00,0x71,0x02,0x00,0x00,0x70,0x02,0x00,0x00, -0x20,0x00,0x04,0x00,0x72,0x02,0x00,0x00,0x0c,0x00,0x00,0x00, -0x71,0x02,0x00,0x00,0x3b,0x00,0x04,0x00,0x72,0x02,0x00,0x00, -0x73,0x02,0x00,0x00,0x0c,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x13,0x00,0x00,0x00,0x78,0x02,0x00,0x00,0x05,0x00,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x85,0x02,0x00,0x00, -0x84,0x00,0x00,0x00,0x60,0x00,0x00,0x00,0x62,0x00,0x00,0x00, -0x36,0x00,0x05,0x00,0x02,0x00,0x00,0x00,0x04,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x03,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, -0x05,0x00,0x00,0x00,0x3b,0x00,0x04,0x00,0xbe,0x00,0x00,0x00, -0xbf,0x00,0x00,0x00,0x07,0x00,0x00,0x00,0x3b,0x00,0x04,0x00, -0x82,0x01,0x00,0x00,0x83,0x01,0x00,0x00,0x07,0x00,0x00,0x00, -0x3b,0x00,0x04,0x00,0xaf,0x01,0x00,0x00,0xb0,0x01,0x00,0x00, -0x07,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x0d,0x00,0x00,0x00, -0x0e,0x00,0x00,0x00,0x0b,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x0f,0x00,0x00,0x00, -0x0e,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00, -0x16,0x00,0x00,0x00,0x12,0x00,0x00,0x00,0x14,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x17,0x00,0x00,0x00, -0x16,0x00,0x00,0x00,0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x18,0x00,0x00,0x00,0x0f,0x00,0x00,0x00,0x17,0x00,0x00,0x00, -0x89,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x1e,0x00,0x00,0x00, -0x0f,0x00,0x00,0x00,0x17,0x00,0x00,0x00,0x41,0x00,0x05,0x00, -0x15,0x00,0x00,0x00,0x22,0x00,0x00,0x00,0x12,0x00,0x00,0x00, -0x21,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x22,0x00,0x00,0x00,0x86,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x24,0x00,0x00,0x00,0x18,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00, -0x28,0x00,0x00,0x00,0x12,0x00,0x00,0x00,0x27,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x29,0x00,0x00,0x00, -0x28,0x00,0x00,0x00,0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x2a,0x00,0x00,0x00,0x1e,0x00,0x00,0x00,0x29,0x00,0x00,0x00, -0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0x12,0x00,0x00,0x00,0x2d,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x2f,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x30,0x00,0x00,0x00, -0x24,0x00,0x00,0x00,0x2f,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x32,0x00,0x00,0x00,0x30,0x00,0x00,0x00, -0x2a,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00, -0x35,0x00,0x00,0x00,0x12,0x00,0x00,0x00,0x34,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x36,0x00,0x00,0x00, -0x35,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x38,0x00,0x00,0x00,0x36,0x00,0x00,0x00,0x37,0x00,0x00,0x00, -0x82,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x3a,0x00,0x00,0x00, -0x38,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x86,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x3b,0x00,0x00,0x00,0x3a,0x00,0x00,0x00, -0x37,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x0d,0x00,0x00,0x00, -0x3f,0x00,0x00,0x00,0x3d,0x00,0x00,0x00,0x3e,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x40,0x00,0x00,0x00, -0x3f,0x00,0x00,0x00,0x89,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x42,0x00,0x00,0x00,0x40,0x00,0x00,0x00,0x3b,0x00,0x00,0x00, -0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x47,0x00,0x00,0x00, -0x40,0x00,0x00,0x00,0x3b,0x00,0x00,0x00,0x41,0x00,0x05,0x00, -0x0d,0x00,0x00,0x00,0x49,0x00,0x00,0x00,0x3d,0x00,0x00,0x00, -0x39,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x4a,0x00,0x00,0x00,0x49,0x00,0x00,0x00,0x41,0x00,0x05,0x00, -0x0d,0x00,0x00,0x00,0x4d,0x00,0x00,0x00,0x4c,0x00,0x00,0x00, -0x3e,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x4e,0x00,0x00,0x00,0x4d,0x00,0x00,0x00,0x86,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x50,0x00,0x00,0x00,0x4e,0x00,0x00,0x00, -0x4f,0x00,0x00,0x00,0x89,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x55,0x00,0x00,0x00,0x50,0x00,0x00,0x00,0x54,0x00,0x00,0x00, -0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x59,0x00,0x00,0x00, -0x50,0x00,0x00,0x00,0x58,0x00,0x00,0x00,0x89,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x5d,0x00,0x00,0x00,0x4e,0x00,0x00,0x00, -0x4f,0x00,0x00,0x00,0x89,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x64,0x00,0x00,0x00,0x5d,0x00,0x00,0x00,0x63,0x00,0x00,0x00, -0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x68,0x00,0x00,0x00, -0x5d,0x00,0x00,0x00,0x67,0x00,0x00,0x00,0x89,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x6e,0x00,0x00,0x00,0x4e,0x00,0x00,0x00, -0x6d,0x00,0x00,0x00,0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x73,0x00,0x00,0x00,0x4e,0x00,0x00,0x00,0x72,0x00,0x00,0x00, -0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00,0x77,0x00,0x00,0x00, -0x12,0x00,0x00,0x00,0x76,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x78,0x00,0x00,0x00,0x77,0x00,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x79,0x00,0x00,0x00, -0x47,0x00,0x00,0x00,0x78,0x00,0x00,0x00,0x41,0x00,0x05,0x00, -0x15,0x00,0x00,0x00,0x7c,0x00,0x00,0x00,0x12,0x00,0x00,0x00, -0x7b,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x7d,0x00,0x00,0x00,0x7c,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x7f,0x00,0x00,0x00,0x47,0x00,0x00,0x00, -0x39,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x82,0x00,0x00,0x00,0x7f,0x00,0x00,0x00,0x78,0x00,0x00,0x00, -0x0c,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0x83,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x26,0x00,0x00,0x00,0x7d,0x00,0x00,0x00, -0x82,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00, -0x87,0x00,0x00,0x00,0x12,0x00,0x00,0x00,0x86,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x88,0x00,0x00,0x00, -0x87,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x89,0x00,0x00,0x00,0x32,0x00,0x00,0x00,0x88,0x00,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x8b,0x00,0x00,0x00, -0x42,0x00,0x00,0x00,0x37,0x00,0x00,0x00,0x41,0x00,0x05,0x00, -0x15,0x00,0x00,0x00,0x8d,0x00,0x00,0x00,0x12,0x00,0x00,0x00, -0x8c,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x8e,0x00,0x00,0x00,0x8d,0x00,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x8f,0x00,0x00,0x00,0x8b,0x00,0x00,0x00, -0x8e,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x90,0x00,0x00,0x00,0x89,0x00,0x00,0x00,0x8f,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x92,0x00,0x00,0x00, -0x90,0x00,0x00,0x00,0x79,0x00,0x00,0x00,0x86,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x93,0x00,0x00,0x00,0x92,0x00,0x00,0x00, -0x39,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00, -0x98,0x00,0x00,0x00,0x12,0x00,0x00,0x00,0x97,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x99,0x00,0x00,0x00, -0x98,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x9a,0x00,0x00,0x00,0x0f,0x00,0x00,0x00,0x99,0x00,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x9d,0x00,0x00,0x00, -0x4a,0x00,0x00,0x00,0x9c,0x00,0x00,0x00,0x41,0x00,0x05,0x00, -0x15,0x00,0x00,0x00,0x9f,0x00,0x00,0x00,0x12,0x00,0x00,0x00, -0x9e,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0xa0,0x00,0x00,0x00,0x9f,0x00,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xa1,0x00,0x00,0x00,0x9d,0x00,0x00,0x00, -0xa0,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xa2,0x00,0x00,0x00,0x9a,0x00,0x00,0x00,0xa1,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xa4,0x00,0x00,0x00, -0xa2,0x00,0x00,0x00,0x79,0x00,0x00,0x00,0x86,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xa5,0x00,0x00,0x00,0xa4,0x00,0x00,0x00, -0x39,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0xa7,0x00,0x00,0x00, -0xf8,0x00,0x02,0x00,0xa7,0x00,0x00,0x00,0xf5,0x00,0x07,0x00, -0x06,0x00,0x00,0x00,0x97,0x02,0x00,0x00,0x3e,0x00,0x00,0x00, -0x05,0x00,0x00,0x00,0xc6,0x00,0x00,0x00,0xa8,0x00,0x00,0x00, -0xb0,0x00,0x05,0x00,0xb7,0x00,0x00,0x00,0xb8,0x00,0x00,0x00, -0x97,0x02,0x00,0x00,0xb6,0x00,0x00,0x00,0xf6,0x00,0x04,0x00, -0xa9,0x00,0x00,0x00,0xa8,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0xfa,0x00,0x04,0x00,0xb8,0x00,0x00,0x00,0xa8,0x00,0x00,0x00, -0xa9,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0xa8,0x00,0x00,0x00, -0x41,0x00,0x05,0x00,0xc2,0x00,0x00,0x00,0xc3,0x00,0x00,0x00, -0xbf,0x00,0x00,0x00,0x97,0x02,0x00,0x00,0x3e,0x00,0x03,0x00, -0xc3,0x00,0x00,0x00,0xc1,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xc6,0x00,0x00,0x00,0x97,0x02,0x00,0x00, -0xc5,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0xa7,0x00,0x00,0x00, -0xf8,0x00,0x02,0x00,0xa9,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, -0xc9,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0xc9,0x00,0x00,0x00, -0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0xb0,0x02,0x00,0x00, -0xa5,0x00,0x00,0x00,0xa9,0x00,0x00,0x00,0x67,0x01,0x00,0x00, -0xcc,0x00,0x00,0x00,0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00, -0xac,0x02,0x00,0x00,0x93,0x00,0x00,0x00,0xa9,0x00,0x00,0x00, -0x64,0x01,0x00,0x00,0xcc,0x00,0x00,0x00,0xf5,0x00,0x07,0x00, -0x06,0x00,0x00,0x00,0x98,0x02,0x00,0x00,0x79,0x00,0x00,0x00, -0xa9,0x00,0x00,0x00,0x12,0x02,0x00,0x00,0xcc,0x00,0x00,0x00, -0xb0,0x00,0x05,0x00,0xb7,0x00,0x00,0x00,0xd0,0x00,0x00,0x00, -0x98,0x02,0x00,0x00,0x83,0x00,0x00,0x00,0xf6,0x00,0x04,0x00, -0xcb,0x00,0x00,0x00,0xcc,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0xfa,0x00,0x04,0x00,0xd0,0x00,0x00,0x00,0xca,0x00,0x00,0x00, -0xcb,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0xca,0x00,0x00,0x00, -0xf9,0x00,0x02,0x00,0xd2,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, -0xd2,0x00,0x00,0x00,0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00, -0xa8,0x02,0x00,0x00,0x3e,0x00,0x00,0x00,0xca,0x00,0x00,0x00, -0x1c,0x01,0x00,0x00,0xd5,0x00,0x00,0x00,0xb0,0x00,0x05,0x00, -0xb7,0x00,0x00,0x00,0xd8,0x00,0x00,0x00,0xa8,0x02,0x00,0x00, -0x37,0x00,0x00,0x00,0xf6,0x00,0x04,0x00,0xd4,0x00,0x00,0x00, -0xd5,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, -0xd8,0x00,0x00,0x00,0xd3,0x00,0x00,0x00,0xd4,0x00,0x00,0x00, -0xf8,0x00,0x02,0x00,0xd3,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xdc,0x00,0x00,0x00,0x8b,0x00,0x00,0x00, -0x73,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xde,0x00,0x00,0x00,0xdc,0x00,0x00,0x00,0xa8,0x02,0x00,0x00, -0xb0,0x00,0x05,0x00,0xb7,0x00,0x00,0x00,0xe1,0x00,0x00,0x00, -0xde,0x00,0x00,0x00,0x36,0x00,0x00,0x00,0xf7,0x00,0x03,0x00, -0xe3,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, -0xe1,0x00,0x00,0x00,0xe2,0x00,0x00,0x00,0xe3,0x00,0x00,0x00, -0xf8,0x00,0x02,0x00,0xe2,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xe6,0x00,0x00,0x00,0x98,0x02,0x00,0x00, -0x6e,0x00,0x00,0x00,0xb0,0x00,0x05,0x00,0xb7,0x00,0x00,0x00, -0xe8,0x00,0x00,0x00,0xe6,0x00,0x00,0x00,0x83,0x00,0x00,0x00, -0xf9,0x00,0x02,0x00,0xe3,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, -0xe3,0x00,0x00,0x00,0xf5,0x00,0x07,0x00,0xb7,0x00,0x00,0x00, -0xe9,0x00,0x00,0x00,0xe1,0x00,0x00,0x00,0xd3,0x00,0x00,0x00, -0xe8,0x00,0x00,0x00,0xe2,0x00,0x00,0x00,0xf7,0x00,0x03,0x00, -0xeb,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, -0xe9,0x00,0x00,0x00,0xea,0x00,0x00,0x00,0x0d,0x01,0x00,0x00, -0xf8,0x00,0x02,0x00,0xea,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xf3,0x00,0x00,0x00,0x73,0x00,0x00,0x00, -0xa8,0x02,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xf5,0x00,0x00,0x00,0xf3,0x00,0x00,0x00,0xf4,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xf7,0x00,0x00,0x00, -0xf5,0x00,0x00,0x00,0x6e,0x00,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x03,0x01,0x00,0x00,0xf3,0x00,0x00,0x00, -0x8e,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x04,0x01,0x00,0x00,0xac,0x02,0x00,0x00,0x03,0x01,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x06,0x01,0x00,0x00, -0x04,0x01,0x00,0x00,0x6e,0x00,0x00,0x00,0x41,0x00,0x06,0x00, -0x07,0x01,0x00,0x00,0x08,0x01,0x00,0x00,0xfc,0x00,0x00,0x00, -0x34,0x00,0x00,0x00,0x06,0x01,0x00,0x00,0x3d,0x00,0x04,0x00, -0xf8,0x00,0x00,0x00,0x09,0x01,0x00,0x00,0x08,0x01,0x00,0x00, -0x73,0x00,0x04,0x00,0xb9,0x00,0x00,0x00,0x0a,0x01,0x00,0x00, -0x09,0x01,0x00,0x00,0x41,0x00,0x05,0x00,0x0b,0x01,0x00,0x00, -0x0c,0x01,0x00,0x00,0xf0,0x00,0x00,0x00,0xf7,0x00,0x00,0x00, -0x3e,0x00,0x03,0x00,0x0c,0x01,0x00,0x00,0x0a,0x01,0x00,0x00, -0xf9,0x00,0x02,0x00,0xeb,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, -0x0d,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x10,0x01,0x00,0x00,0x73,0x00,0x00,0x00,0xa8,0x02,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x12,0x01,0x00,0x00, -0x10,0x01,0x00,0x00,0x11,0x01,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x14,0x01,0x00,0x00,0x12,0x01,0x00,0x00, -0x6e,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x0b,0x01,0x00,0x00, -0x15,0x01,0x00,0x00,0xf0,0x00,0x00,0x00,0x14,0x01,0x00,0x00, -0x3e,0x00,0x03,0x00,0x15,0x01,0x00,0x00,0xc1,0x00,0x00,0x00, -0xf9,0x00,0x02,0x00,0xeb,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, -0xeb,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0xd5,0x00,0x00,0x00, -0xf8,0x00,0x02,0x00,0xd5,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x1c,0x01,0x00,0x00,0xa8,0x02,0x00,0x00, -0x1a,0x01,0x00,0x00,0xf9,0x00,0x02,0x00,0xd2,0x00,0x00,0x00, -0xf8,0x00,0x02,0x00,0xd4,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, -0x1e,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0x1e,0x01,0x00,0x00, -0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0xa9,0x02,0x00,0x00, -0x3e,0x00,0x00,0x00,0xd4,0x00,0x00,0x00,0x60,0x01,0x00,0x00, -0x21,0x01,0x00,0x00,0xb0,0x00,0x05,0x00,0xb7,0x00,0x00,0x00, -0x24,0x01,0x00,0x00,0xa9,0x02,0x00,0x00,0x9c,0x00,0x00,0x00, -0xf6,0x00,0x04,0x00,0x20,0x01,0x00,0x00,0x21,0x01,0x00,0x00, -0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0x24,0x01,0x00,0x00, -0x1f,0x01,0x00,0x00,0x20,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, -0x1f,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x28,0x01,0x00,0x00,0x9d,0x00,0x00,0x00,0x73,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x2a,0x01,0x00,0x00, -0x28,0x01,0x00,0x00,0xa9,0x02,0x00,0x00,0x41,0x00,0x05,0x00, -0x15,0x00,0x00,0x00,0x2b,0x01,0x00,0x00,0x12,0x00,0x00,0x00, -0xc5,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x2c,0x01,0x00,0x00,0x2b,0x01,0x00,0x00,0xb0,0x00,0x05,0x00, -0xb7,0x00,0x00,0x00,0x2d,0x01,0x00,0x00,0x2a,0x01,0x00,0x00, -0x2c,0x01,0x00,0x00,0xf7,0x00,0x03,0x00,0x2f,0x01,0x00,0x00, -0x00,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0x2d,0x01,0x00,0x00, -0x2e,0x01,0x00,0x00,0x2f,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, -0x2e,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x32,0x01,0x00,0x00,0x98,0x02,0x00,0x00,0x6e,0x00,0x00,0x00, -0xb0,0x00,0x05,0x00,0xb7,0x00,0x00,0x00,0x34,0x01,0x00,0x00, -0x32,0x01,0x00,0x00,0x83,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, -0x2f,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0x2f,0x01,0x00,0x00, -0xf5,0x00,0x07,0x00,0xb7,0x00,0x00,0x00,0x35,0x01,0x00,0x00, -0x2d,0x01,0x00,0x00,0x1f,0x01,0x00,0x00,0x34,0x01,0x00,0x00, -0x2e,0x01,0x00,0x00,0xf7,0x00,0x03,0x00,0x37,0x01,0x00,0x00, -0x00,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0x35,0x01,0x00,0x00, -0x36,0x01,0x00,0x00,0x56,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, -0x36,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x3f,0x01,0x00,0x00,0x73,0x00,0x00,0x00,0xa9,0x02,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x41,0x01,0x00,0x00, -0x3f,0x01,0x00,0x00,0x40,0x01,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x43,0x01,0x00,0x00,0x41,0x01,0x00,0x00, -0x6e,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x4e,0x01,0x00,0x00,0x3f,0x01,0x00,0x00,0xa0,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x4f,0x01,0x00,0x00, -0xb0,0x02,0x00,0x00,0x4e,0x01,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x51,0x01,0x00,0x00,0x4f,0x01,0x00,0x00, -0x6e,0x00,0x00,0x00,0x41,0x00,0x06,0x00,0x52,0x01,0x00,0x00, -0x53,0x01,0x00,0x00,0x47,0x01,0x00,0x00,0x34,0x00,0x00,0x00, -0x51,0x01,0x00,0x00,0x3d,0x00,0x04,0x00,0xb9,0x00,0x00,0x00, -0x54,0x01,0x00,0x00,0x53,0x01,0x00,0x00,0x41,0x00,0x05,0x00, -0x0b,0x01,0x00,0x00,0x55,0x01,0x00,0x00,0x3c,0x01,0x00,0x00, -0x43,0x01,0x00,0x00,0x3e,0x00,0x03,0x00,0x55,0x01,0x00,0x00, -0x54,0x01,0x00,0x00,0xf9,0x00,0x02,0x00,0x37,0x01,0x00,0x00, -0xf8,0x00,0x02,0x00,0x56,0x01,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x59,0x01,0x00,0x00,0x73,0x00,0x00,0x00, -0xa9,0x02,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x5b,0x01,0x00,0x00,0x59,0x01,0x00,0x00,0x5a,0x01,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x5d,0x01,0x00,0x00, -0x5b,0x01,0x00,0x00,0x6e,0x00,0x00,0x00,0x41,0x00,0x05,0x00, -0x0b,0x01,0x00,0x00,0x5e,0x01,0x00,0x00,0x3c,0x01,0x00,0x00, -0x5d,0x01,0x00,0x00,0x3e,0x00,0x03,0x00,0x5e,0x01,0x00,0x00, -0xc1,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0x37,0x01,0x00,0x00, -0xf8,0x00,0x02,0x00,0x37,0x01,0x00,0x00,0xf9,0x00,0x02,0x00, -0x21,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0x21,0x01,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x60,0x01,0x00,0x00, -0xa9,0x02,0x00,0x00,0x1a,0x01,0x00,0x00,0xf9,0x00,0x02,0x00, -0x1e,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0x20,0x01,0x00,0x00, -0xe0,0x00,0x04,0x00,0x0c,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0x61,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x64,0x01,0x00,0x00,0xac,0x02,0x00,0x00,0x62,0x01,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x67,0x01,0x00,0x00, -0xb0,0x02,0x00,0x00,0x65,0x01,0x00,0x00,0xf9,0x00,0x02,0x00, -0x69,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0x69,0x01,0x00,0x00, -0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0xb2,0x02,0x00,0x00, -0x3e,0x00,0x00,0x00,0x20,0x01,0x00,0x00,0x10,0x02,0x00,0x00, -0x6c,0x01,0x00,0x00,0xb0,0x00,0x05,0x00,0xb7,0x00,0x00,0x00, -0x6f,0x01,0x00,0x00,0xb2,0x02,0x00,0x00,0x6c,0x00,0x00,0x00, -0xf6,0x00,0x04,0x00,0x6b,0x01,0x00,0x00,0x6c,0x01,0x00,0x00, -0x00,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0x6f,0x01,0x00,0x00, -0x6a,0x01,0x00,0x00,0x6b,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, -0x6a,0x01,0x00,0x00,0xf9,0x00,0x02,0x00,0x71,0x01,0x00,0x00, -0xf8,0x00,0x02,0x00,0x71,0x01,0x00,0x00,0xf5,0x00,0x07,0x00, -0x06,0x00,0x00,0x00,0xb6,0x02,0x00,0x00,0x3e,0x00,0x00,0x00, -0x6a,0x01,0x00,0x00,0x9c,0x01,0x00,0x00,0x74,0x01,0x00,0x00, -0xb0,0x00,0x05,0x00,0xb7,0x00,0x00,0x00,0x77,0x01,0x00,0x00, -0xb6,0x02,0x00,0x00,0x60,0x00,0x00,0x00,0xf6,0x00,0x04,0x00, -0x73,0x01,0x00,0x00,0x74,0x01,0x00,0x00,0x01,0x00,0x00,0x00, -0xfa,0x00,0x04,0x00,0x77,0x01,0x00,0x00,0x72,0x01,0x00,0x00, -0x73,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0x72,0x01,0x00,0x00, -0xf9,0x00,0x02,0x00,0x79,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, -0x79,0x01,0x00,0x00,0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00, -0xc8,0x02,0x00,0x00,0x3e,0x00,0x00,0x00,0x72,0x01,0x00,0x00, -0x9a,0x01,0x00,0x00,0x7a,0x01,0x00,0x00,0xb0,0x00,0x05,0x00, -0xb7,0x00,0x00,0x00,0x7f,0x01,0x00,0x00,0xc8,0x02,0x00,0x00, -0x62,0x00,0x00,0x00,0xf6,0x00,0x04,0x00,0x7b,0x01,0x00,0x00, -0x7a,0x01,0x00,0x00,0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, -0x7f,0x01,0x00,0x00,0x7a,0x01,0x00,0x00,0x7b,0x01,0x00,0x00, -0xf8,0x00,0x02,0x00,0x7a,0x01,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x85,0x01,0x00,0x00,0xb6,0x02,0x00,0x00, -0x62,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x87,0x01,0x00,0x00,0x85,0x01,0x00,0x00,0xc8,0x02,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x89,0x01,0x00,0x00, -0x55,0x00,0x00,0x00,0x53,0x00,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x8b,0x01,0x00,0x00,0xb6,0x02,0x00,0x00, -0x61,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x8c,0x01,0x00,0x00,0x89,0x01,0x00,0x00,0x8b,0x01,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x8e,0x01,0x00,0x00, -0x64,0x00,0x00,0x00,0x62,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x8f,0x01,0x00,0x00,0x8c,0x01,0x00,0x00, -0x8e,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x91,0x01,0x00,0x00,0x8f,0x01,0x00,0x00,0xc8,0x02,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x93,0x01,0x00,0x00, -0x91,0x01,0x00,0x00,0x92,0x01,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x95,0x01,0x00,0x00,0x93,0x01,0x00,0x00, -0xb2,0x02,0x00,0x00,0x41,0x00,0x05,0x00,0x0b,0x01,0x00,0x00, -0x96,0x01,0x00,0x00,0xf0,0x00,0x00,0x00,0x95,0x01,0x00,0x00, -0x3d,0x00,0x04,0x00,0xb9,0x00,0x00,0x00,0x97,0x01,0x00,0x00, -0x96,0x01,0x00,0x00,0x41,0x00,0x05,0x00,0xc2,0x00,0x00,0x00, -0x98,0x01,0x00,0x00,0x83,0x01,0x00,0x00,0x87,0x01,0x00,0x00, -0x3e,0x00,0x03,0x00,0x98,0x01,0x00,0x00,0x97,0x01,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x9a,0x01,0x00,0x00, -0xc8,0x02,0x00,0x00,0xc5,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, -0x79,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0x7b,0x01,0x00,0x00, -0xf9,0x00,0x02,0x00,0x74,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, -0x74,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x9c,0x01,0x00,0x00,0xb6,0x02,0x00,0x00,0xc5,0x00,0x00,0x00, -0xf9,0x00,0x02,0x00,0x71,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, -0x73,0x01,0x00,0x00,0xf9,0x00,0x02,0x00,0x9e,0x01,0x00,0x00, -0xf8,0x00,0x02,0x00,0x9e,0x01,0x00,0x00,0xf5,0x00,0x07,0x00, -0x06,0x00,0x00,0x00,0xb7,0x02,0x00,0x00,0x3e,0x00,0x00,0x00, -0x73,0x01,0x00,0x00,0xca,0x01,0x00,0x00,0xa1,0x01,0x00,0x00, -0xb0,0x00,0x05,0x00,0xb7,0x00,0x00,0x00,0xa4,0x01,0x00,0x00, -0xb7,0x02,0x00,0x00,0xb4,0x00,0x00,0x00,0xf6,0x00,0x04,0x00, -0xa0,0x01,0x00,0x00,0xa1,0x01,0x00,0x00,0x01,0x00,0x00,0x00, -0xfa,0x00,0x04,0x00,0xa4,0x01,0x00,0x00,0x9f,0x01,0x00,0x00, -0xa0,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0x9f,0x01,0x00,0x00, -0xf9,0x00,0x02,0x00,0xa6,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, -0xa6,0x01,0x00,0x00,0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00, -0xc5,0x02,0x00,0x00,0x3e,0x00,0x00,0x00,0x9f,0x01,0x00,0x00, -0xc8,0x01,0x00,0x00,0xa7,0x01,0x00,0x00,0xb0,0x00,0x05,0x00, -0xb7,0x00,0x00,0x00,0xac,0x01,0x00,0x00,0xc5,0x02,0x00,0x00, -0xb1,0x00,0x00,0x00,0xf6,0x00,0x04,0x00,0xa8,0x01,0x00,0x00, -0xa7,0x01,0x00,0x00,0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, -0xac,0x01,0x00,0x00,0xa7,0x01,0x00,0x00,0xa8,0x01,0x00,0x00, -0xf8,0x00,0x02,0x00,0xa7,0x01,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xb2,0x01,0x00,0x00,0xb7,0x02,0x00,0x00, -0xb1,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xb4,0x01,0x00,0x00,0xb2,0x01,0x00,0x00,0xc5,0x02,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xb6,0x01,0x00,0x00, -0x59,0x00,0x00,0x00,0xae,0x00,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xb9,0x01,0x00,0x00,0xb7,0x02,0x00,0x00, -0xb8,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xba,0x01,0x00,0x00,0xb6,0x01,0x00,0x00,0xb9,0x01,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xbc,0x01,0x00,0x00, -0x68,0x00,0x00,0x00,0xb1,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xbd,0x01,0x00,0x00,0xba,0x01,0x00,0x00, -0xbc,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xbf,0x01,0x00,0x00,0xbd,0x01,0x00,0x00,0xc5,0x02,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xc1,0x01,0x00,0x00, -0xbf,0x01,0x00,0x00,0xc0,0x01,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xc3,0x01,0x00,0x00,0xc1,0x01,0x00,0x00, -0xb2,0x02,0x00,0x00,0x41,0x00,0x05,0x00,0x0b,0x01,0x00,0x00, -0xc4,0x01,0x00,0x00,0x3c,0x01,0x00,0x00,0xc3,0x01,0x00,0x00, -0x3d,0x00,0x04,0x00,0xb9,0x00,0x00,0x00,0xc5,0x01,0x00,0x00, -0xc4,0x01,0x00,0x00,0x41,0x00,0x05,0x00,0xc2,0x00,0x00,0x00, -0xc6,0x01,0x00,0x00,0xb0,0x01,0x00,0x00,0xb4,0x01,0x00,0x00, -0x3e,0x00,0x03,0x00,0xc6,0x01,0x00,0x00,0xc5,0x01,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xc8,0x01,0x00,0x00, -0xc5,0x02,0x00,0x00,0xc5,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, -0xa6,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0xa8,0x01,0x00,0x00, -0xf9,0x00,0x02,0x00,0xa1,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, -0xa1,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xca,0x01,0x00,0x00,0xb7,0x02,0x00,0x00,0xc5,0x00,0x00,0x00, -0xf9,0x00,0x02,0x00,0x9e,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, -0xa0,0x01,0x00,0x00,0xf9,0x00,0x02,0x00,0xcc,0x01,0x00,0x00, -0xf8,0x00,0x02,0x00,0xcc,0x01,0x00,0x00,0xf5,0x00,0x07,0x00, -0x06,0x00,0x00,0x00,0xb8,0x02,0x00,0x00,0x3e,0x00,0x00,0x00, -0xa0,0x01,0x00,0x00,0x0e,0x02,0x00,0x00,0xcf,0x01,0x00,0x00, -0xb0,0x00,0x05,0x00,0xb7,0x00,0x00,0x00,0xd2,0x01,0x00,0x00, -0xb8,0x02,0x00,0x00,0xb4,0x00,0x00,0x00,0xf6,0x00,0x04,0x00, -0xce,0x01,0x00,0x00,0xcf,0x01,0x00,0x00,0x01,0x00,0x00,0x00, -0xfa,0x00,0x04,0x00,0xd2,0x01,0x00,0x00,0xcd,0x01,0x00,0x00, -0xce,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0xcd,0x01,0x00,0x00, -0xf9,0x00,0x02,0x00,0xd4,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, -0xd4,0x01,0x00,0x00,0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00, -0xbc,0x02,0x00,0x00,0x3e,0x00,0x00,0x00,0xcd,0x01,0x00,0x00, -0x0c,0x02,0x00,0x00,0xd7,0x01,0x00,0x00,0xb0,0x00,0x05,0x00, -0xb7,0x00,0x00,0x00,0xda,0x01,0x00,0x00,0xbc,0x02,0x00,0x00, -0x60,0x00,0x00,0x00,0xf6,0x00,0x04,0x00,0xd6,0x01,0x00,0x00, -0xd7,0x01,0x00,0x00,0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, -0xda,0x01,0x00,0x00,0xd5,0x01,0x00,0x00,0xd6,0x01,0x00,0x00, -0xf8,0x00,0x02,0x00,0xd5,0x01,0x00,0x00,0xf9,0x00,0x02,0x00, -0xdc,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0xdc,0x01,0x00,0x00, -0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0xbe,0x02,0x00,0x00, -0x3e,0x00,0x00,0x00,0xd5,0x01,0x00,0x00,0x0a,0x02,0x00,0x00, -0xdf,0x01,0x00,0x00,0xb0,0x00,0x05,0x00,0xb7,0x00,0x00,0x00, -0xe2,0x01,0x00,0x00,0xbe,0x02,0x00,0x00,0xb1,0x00,0x00,0x00, -0xf6,0x00,0x04,0x00,0xde,0x01,0x00,0x00,0xdf,0x01,0x00,0x00, -0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0xe2,0x01,0x00,0x00, -0xdd,0x01,0x00,0x00,0xde,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, -0xdd,0x01,0x00,0x00,0xf9,0x00,0x02,0x00,0xe4,0x01,0x00,0x00, -0xf8,0x00,0x02,0x00,0xe4,0x01,0x00,0x00,0xf5,0x00,0x07,0x00, -0x06,0x00,0x00,0x00,0xc0,0x02,0x00,0x00,0x3e,0x00,0x00,0x00, -0xdd,0x01,0x00,0x00,0x08,0x02,0x00,0x00,0xe5,0x01,0x00,0x00, -0xb0,0x00,0x05,0x00,0xb7,0x00,0x00,0x00,0xea,0x01,0x00,0x00, -0xc0,0x02,0x00,0x00,0x62,0x00,0x00,0x00,0xf6,0x00,0x04,0x00, -0xe6,0x01,0x00,0x00,0xe5,0x01,0x00,0x00,0x01,0x00,0x00,0x00, -0xfa,0x00,0x04,0x00,0xea,0x01,0x00,0x00,0xe5,0x01,0x00,0x00, -0xe6,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0xe5,0x01,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xec,0x01,0x00,0x00, -0xb8,0x02,0x00,0x00,0xb1,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xee,0x01,0x00,0x00,0xec,0x01,0x00,0x00, -0xbe,0x02,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xf0,0x01,0x00,0x00,0xee,0x01,0x00,0x00,0xef,0x01,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xf2,0x01,0x00,0x00, -0xbc,0x02,0x00,0x00,0x62,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xf3,0x01,0x00,0x00,0xf0,0x01,0x00,0x00, -0xf2,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xf5,0x01,0x00,0x00,0xf3,0x01,0x00,0x00,0xc0,0x02,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xf9,0x01,0x00,0x00, -0xf2,0x01,0x00,0x00,0xc0,0x02,0x00,0x00,0x41,0x00,0x05,0x00, -0xc2,0x00,0x00,0x00,0xfa,0x01,0x00,0x00,0x83,0x01,0x00,0x00, -0xf9,0x01,0x00,0x00,0x3d,0x00,0x04,0x00,0xb9,0x00,0x00,0x00, -0xfb,0x01,0x00,0x00,0xfa,0x01,0x00,0x00,0x41,0x00,0x05,0x00, -0xc2,0x00,0x00,0x00,0x00,0x02,0x00,0x00,0xb0,0x01,0x00,0x00, -0xee,0x01,0x00,0x00,0x3d,0x00,0x04,0x00,0xb9,0x00,0x00,0x00, -0x01,0x02,0x00,0x00,0x00,0x02,0x00,0x00,0x41,0x00,0x05,0x00, -0xc2,0x00,0x00,0x00,0x03,0x02,0x00,0x00,0xbf,0x00,0x00,0x00, -0xf5,0x01,0x00,0x00,0x3d,0x00,0x04,0x00,0xb9,0x00,0x00,0x00, -0x04,0x02,0x00,0x00,0x03,0x02,0x00,0x00,0x0c,0x00,0x08,0x00, -0xb9,0x00,0x00,0x00,0x05,0x02,0x00,0x00,0x01,0x00,0x00,0x00, -0x32,0x00,0x00,0x00,0xfb,0x01,0x00,0x00,0x01,0x02,0x00,0x00, -0x04,0x02,0x00,0x00,0x3e,0x00,0x03,0x00,0x03,0x02,0x00,0x00, -0x05,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x08,0x02,0x00,0x00,0xc0,0x02,0x00,0x00,0xc5,0x00,0x00,0x00, -0xf9,0x00,0x02,0x00,0xe4,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, -0xe6,0x01,0x00,0x00,0xf9,0x00,0x02,0x00,0xdf,0x01,0x00,0x00, -0xf8,0x00,0x02,0x00,0xdf,0x01,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x0a,0x02,0x00,0x00,0xbe,0x02,0x00,0x00, -0xc5,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0xdc,0x01,0x00,0x00, -0xf8,0x00,0x02,0x00,0xde,0x01,0x00,0x00,0xf9,0x00,0x02,0x00, -0xd7,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0xd7,0x01,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x0c,0x02,0x00,0x00, -0xbc,0x02,0x00,0x00,0xc5,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, -0xd4,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0xd6,0x01,0x00,0x00, -0xf9,0x00,0x02,0x00,0xcf,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, -0xcf,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x0e,0x02,0x00,0x00,0xb8,0x02,0x00,0x00,0xc5,0x00,0x00,0x00, -0xf9,0x00,0x02,0x00,0xcc,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, -0xce,0x01,0x00,0x00,0xf9,0x00,0x02,0x00,0x6c,0x01,0x00,0x00, -0xf8,0x00,0x02,0x00,0x6c,0x01,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x10,0x02,0x00,0x00,0xb2,0x02,0x00,0x00, -0xc5,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0x69,0x01,0x00,0x00, -0xf8,0x00,0x02,0x00,0x6b,0x01,0x00,0x00,0xe0,0x00,0x04,0x00, -0x0c,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x61,0x01,0x00,0x00, -0xf9,0x00,0x02,0x00,0xcc,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, -0xcc,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x12,0x02,0x00,0x00,0x98,0x02,0x00,0x00,0x6c,0x00,0x00,0x00, -0xf9,0x00,0x02,0x00,0xc9,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, -0xcb,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x17,0x02,0x00,0x00,0x55,0x00,0x00,0x00,0x53,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x18,0x02,0x00,0x00, -0x8b,0x00,0x00,0x00,0x17,0x02,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x1d,0x02,0x00,0x00,0x59,0x00,0x00,0x00, -0xae,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x1e,0x02,0x00,0x00,0x9d,0x00,0x00,0x00,0x1d,0x02,0x00,0x00, -0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00,0x23,0x02,0x00,0x00, -0x12,0x00,0x00,0x00,0x22,0x02,0x00,0x00,0x3d,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x24,0x02,0x00,0x00,0x23,0x02,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x25,0x02,0x00,0x00, -0x0f,0x00,0x00,0x00,0x24,0x02,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x29,0x02,0x00,0x00,0x47,0x00,0x00,0x00, -0x24,0x02,0x00,0x00,0x41,0x00,0x05,0x00,0x0d,0x00,0x00,0x00, -0x2b,0x02,0x00,0x00,0x2a,0x02,0x00,0x00,0x0c,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x2c,0x02,0x00,0x00, -0x2b,0x02,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x2d,0x02,0x00,0x00,0x29,0x02,0x00,0x00,0x2c,0x02,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x2e,0x02,0x00,0x00, -0x25,0x02,0x00,0x00,0x2d,0x02,0x00,0x00,0xf9,0x00,0x02,0x00, -0x30,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x30,0x02,0x00,0x00, -0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0x99,0x02,0x00,0x00, -0x3e,0x00,0x00,0x00,0xcb,0x00,0x00,0x00,0x96,0x02,0x00,0x00, -0x33,0x02,0x00,0x00,0xb0,0x00,0x05,0x00,0xb7,0x00,0x00,0x00, -0x36,0x02,0x00,0x00,0x99,0x02,0x00,0x00,0xb4,0x00,0x00,0x00, -0xf6,0x00,0x04,0x00,0x32,0x02,0x00,0x00,0x33,0x02,0x00,0x00, -0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0x36,0x02,0x00,0x00, -0x31,0x02,0x00,0x00,0x32,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, -0x31,0x02,0x00,0x00,0xf9,0x00,0x02,0x00,0x38,0x02,0x00,0x00, -0xf8,0x00,0x02,0x00,0x38,0x02,0x00,0x00,0xf5,0x00,0x07,0x00, -0x06,0x00,0x00,0x00,0x9a,0x02,0x00,0x00,0x3e,0x00,0x00,0x00, -0x31,0x02,0x00,0x00,0x94,0x02,0x00,0x00,0x3b,0x02,0x00,0x00, -0xb0,0x00,0x05,0x00,0xb7,0x00,0x00,0x00,0x3e,0x02,0x00,0x00, -0x9a,0x02,0x00,0x00,0x60,0x00,0x00,0x00,0xf6,0x00,0x04,0x00, -0x3a,0x02,0x00,0x00,0x3b,0x02,0x00,0x00,0x01,0x00,0x00,0x00, -0xfa,0x00,0x04,0x00,0x3e,0x02,0x00,0x00,0x39,0x02,0x00,0x00, -0x3a,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x39,0x02,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x42,0x02,0x00,0x00, -0x9a,0x02,0x00,0x00,0x61,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x43,0x02,0x00,0x00,0x18,0x02,0x00,0x00, -0x42,0x02,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x45,0x02,0x00,0x00,0x64,0x00,0x00,0x00,0x62,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x46,0x02,0x00,0x00, -0x43,0x02,0x00,0x00,0x45,0x02,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x4a,0x02,0x00,0x00,0x99,0x02,0x00,0x00, -0xb8,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x4b,0x02,0x00,0x00,0x1e,0x02,0x00,0x00,0x4a,0x02,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x4d,0x02,0x00,0x00, -0x68,0x00,0x00,0x00,0xb1,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x4e,0x02,0x00,0x00,0x4b,0x02,0x00,0x00, -0x4d,0x02,0x00,0x00,0xf9,0x00,0x02,0x00,0x50,0x02,0x00,0x00, -0xf8,0x00,0x02,0x00,0x50,0x02,0x00,0x00,0xf5,0x00,0x07,0x00, -0x06,0x00,0x00,0x00,0x9c,0x02,0x00,0x00,0x3e,0x00,0x00,0x00, -0x39,0x02,0x00,0x00,0x92,0x02,0x00,0x00,0x53,0x02,0x00,0x00, -0xb0,0x00,0x05,0x00,0xb7,0x00,0x00,0x00,0x56,0x02,0x00,0x00, -0x9c,0x02,0x00,0x00,0xb1,0x00,0x00,0x00,0xf6,0x00,0x04,0x00, -0x52,0x02,0x00,0x00,0x53,0x02,0x00,0x00,0x01,0x00,0x00,0x00, -0xfa,0x00,0x04,0x00,0x56,0x02,0x00,0x00,0x51,0x02,0x00,0x00, -0x52,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x51,0x02,0x00,0x00, -0xf9,0x00,0x02,0x00,0x58,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, -0x58,0x02,0x00,0x00,0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00, -0x9e,0x02,0x00,0x00,0x3e,0x00,0x00,0x00,0x51,0x02,0x00,0x00, -0x90,0x02,0x00,0x00,0x5b,0x02,0x00,0x00,0xb0,0x00,0x05,0x00, -0xb7,0x00,0x00,0x00,0x5e,0x02,0x00,0x00,0x9e,0x02,0x00,0x00, -0x62,0x00,0x00,0x00,0xf6,0x00,0x04,0x00,0x5a,0x02,0x00,0x00, -0x5b,0x02,0x00,0x00,0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, -0x5e,0x02,0x00,0x00,0x59,0x02,0x00,0x00,0x5a,0x02,0x00,0x00, -0xf8,0x00,0x02,0x00,0x59,0x02,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x61,0x02,0x00,0x00,0x46,0x02,0x00,0x00, -0x9e,0x02,0x00,0x00,0xb0,0x00,0x05,0x00,0xb7,0x00,0x00,0x00, -0x64,0x02,0x00,0x00,0x61,0x02,0x00,0x00,0x36,0x00,0x00,0x00, -0xf7,0x00,0x03,0x00,0x66,0x02,0x00,0x00,0x00,0x00,0x00,0x00, -0xfa,0x00,0x04,0x00,0x64,0x02,0x00,0x00,0x65,0x02,0x00,0x00, -0x66,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x65,0x02,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x69,0x02,0x00,0x00, -0x4e,0x02,0x00,0x00,0x9c,0x02,0x00,0x00,0x41,0x00,0x05,0x00, -0x15,0x00,0x00,0x00,0x6a,0x02,0x00,0x00,0x12,0x00,0x00,0x00, -0xc5,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x6b,0x02,0x00,0x00,0x6a,0x02,0x00,0x00,0xb0,0x00,0x05,0x00, -0xb7,0x00,0x00,0x00,0x6c,0x02,0x00,0x00,0x69,0x02,0x00,0x00, -0x6b,0x02,0x00,0x00,0xf9,0x00,0x02,0x00,0x66,0x02,0x00,0x00, -0xf8,0x00,0x02,0x00,0x66,0x02,0x00,0x00,0xf5,0x00,0x07,0x00, -0xb7,0x00,0x00,0x00,0x6d,0x02,0x00,0x00,0x64,0x02,0x00,0x00, -0x59,0x02,0x00,0x00,0x6c,0x02,0x00,0x00,0x65,0x02,0x00,0x00, -0xf7,0x00,0x03,0x00,0x6f,0x02,0x00,0x00,0x00,0x00,0x00,0x00, -0xfa,0x00,0x04,0x00,0x6d,0x02,0x00,0x00,0x6e,0x02,0x00,0x00, -0x6f,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x6e,0x02,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x77,0x02,0x00,0x00, -0x4e,0x02,0x00,0x00,0x9c,0x02,0x00,0x00,0x41,0x00,0x05,0x00, -0x15,0x00,0x00,0x00,0x79,0x02,0x00,0x00,0x12,0x00,0x00,0x00, -0x78,0x02,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x7a,0x02,0x00,0x00,0x79,0x02,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x7b,0x02,0x00,0x00,0x77,0x02,0x00,0x00, -0x7a,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x7c,0x02,0x00,0x00,0x2e,0x02,0x00,0x00,0x7b,0x02,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x7e,0x02,0x00,0x00, -0x7c,0x02,0x00,0x00,0x46,0x02,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x80,0x02,0x00,0x00,0x7e,0x02,0x00,0x00, -0x9e,0x02,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x82,0x02,0x00,0x00,0x99,0x02,0x00,0x00,0xb1,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x84,0x02,0x00,0x00, -0x82,0x02,0x00,0x00,0x9c,0x02,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x86,0x02,0x00,0x00,0x84,0x02,0x00,0x00, -0x85,0x02,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x88,0x02,0x00,0x00,0x9a,0x02,0x00,0x00,0x62,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x89,0x02,0x00,0x00, -0x86,0x02,0x00,0x00,0x88,0x02,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x8b,0x02,0x00,0x00,0x89,0x02,0x00,0x00, -0x9e,0x02,0x00,0x00,0x41,0x00,0x05,0x00,0xc2,0x00,0x00,0x00, -0x8c,0x02,0x00,0x00,0xbf,0x00,0x00,0x00,0x8b,0x02,0x00,0x00, -0x3d,0x00,0x04,0x00,0xb9,0x00,0x00,0x00,0x8d,0x02,0x00,0x00, -0x8c,0x02,0x00,0x00,0x41,0x00,0x06,0x00,0x52,0x01,0x00,0x00, -0x8e,0x02,0x00,0x00,0x73,0x02,0x00,0x00,0x34,0x00,0x00,0x00, -0x80,0x02,0x00,0x00,0x3e,0x00,0x03,0x00,0x8e,0x02,0x00,0x00, -0x8d,0x02,0x00,0x00,0xf9,0x00,0x02,0x00,0x6f,0x02,0x00,0x00, -0xf8,0x00,0x02,0x00,0x6f,0x02,0x00,0x00,0xf9,0x00,0x02,0x00, -0x5b,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x5b,0x02,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x90,0x02,0x00,0x00, -0x9e,0x02,0x00,0x00,0xc5,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, -0x58,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x5a,0x02,0x00,0x00, -0xf9,0x00,0x02,0x00,0x53,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, -0x53,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x92,0x02,0x00,0x00,0x9c,0x02,0x00,0x00,0xc5,0x00,0x00,0x00, -0xf9,0x00,0x02,0x00,0x50,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, -0x52,0x02,0x00,0x00,0xf9,0x00,0x02,0x00,0x3b,0x02,0x00,0x00, -0xf8,0x00,0x02,0x00,0x3b,0x02,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x94,0x02,0x00,0x00,0x9a,0x02,0x00,0x00, -0xc5,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0x38,0x02,0x00,0x00, -0xf8,0x00,0x02,0x00,0x3a,0x02,0x00,0x00,0xf9,0x00,0x02,0x00, -0x33,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x33,0x02,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x96,0x02,0x00,0x00, -0x99,0x02,0x00,0x00,0xc5,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, -0x30,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x32,0x02,0x00,0x00, -0xfd,0x00,0x01,0x00,0x38,0x00,0x01,0x00, -}; -const uint64_t matmul_f16_f32_m_fp32_len = 10100; - -unsigned char matmul_f16_f32_s_data[] = { -0x03,0x02,0x23,0x07,0x00,0x05,0x01,0x00,0x0b,0x00,0x0d,0x00, -0xcd,0x02,0x00,0x00,0x00,0x00,0x00,0x00,0x11,0x00,0x02,0x00, -0x01,0x00,0x00,0x00,0x11,0x00,0x02,0x00,0x09,0x00,0x00,0x00, -0x11,0x00,0x02,0x00,0x51,0x11,0x00,0x00,0x0b,0x00,0x06,0x00, -0x01,0x00,0x00,0x00,0x47,0x4c,0x53,0x4c,0x2e,0x73,0x74,0x64, -0x2e,0x34,0x35,0x30,0x00,0x00,0x00,0x00,0x0e,0x00,0x03,0x00, -0x00,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x0f,0x00,0x0f,0x00, -0x05,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x6d,0x61,0x69,0x6e, -0x00,0x00,0x00,0x00,0x0b,0x00,0x00,0x00,0x12,0x00,0x00,0x00, -0x3d,0x00,0x00,0x00,0x4c,0x00,0x00,0x00,0xf1,0x00,0x00,0x00, -0xfc,0x00,0x00,0x00,0x3c,0x01,0x00,0x00,0x47,0x01,0x00,0x00, -0x2e,0x02,0x00,0x00,0x77,0x02,0x00,0x00,0x10,0x00,0x06,0x00, -0x04,0x00,0x00,0x00,0x11,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x0b,0x00,0x00,0x00,0x0b,0x00,0x00,0x00,0x1c,0x00,0x00,0x00, -0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0x10,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x04,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00, -0x02,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x08,0x00,0x00,0x00, -0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00,0x03,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0x10,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x10,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00, -0x05,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x14,0x00,0x00,0x00, -0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x18,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0x10,0x00,0x00,0x00,0x07,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x1c,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00, -0x08,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x20,0x00,0x00,0x00, -0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00,0x09,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x24,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0x10,0x00,0x00,0x00,0x0a,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x28,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00, -0x0b,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x2c,0x00,0x00,0x00, -0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x30,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0x10,0x00,0x00,0x00,0x0d,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x34,0x00,0x00,0x00,0x47,0x00,0x03,0x00,0x10,0x00,0x00,0x00, -0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x37,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x3d,0x00,0x00,0x00,0x0b,0x00,0x00,0x00,0x1a,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x4c,0x00,0x00,0x00,0x0b,0x00,0x00,0x00, -0x1b,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x4f,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x09,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x53,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x04,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x60,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x62,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x07,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x6c,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x03,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x9c,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0xae,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x05,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0xb1,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x08,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0xf9,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x02,0x00,0x00,0x00,0x48,0x00,0x04,0x00,0xfa,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x18,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0xfa,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x47,0x00,0x03,0x00,0xfa,0x00,0x00,0x00, -0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0xfc,0x00,0x00,0x00, -0x22,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0xfc,0x00,0x00,0x00,0x21,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x16,0x01,0x00,0x00,0x01,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x17,0x01,0x00,0x00, -0x0b,0x00,0x00,0x00,0x19,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x44,0x01,0x00,0x00,0x06,0x00,0x00,0x00,0x04,0x00,0x00,0x00, -0x48,0x00,0x04,0x00,0x45,0x01,0x00,0x00,0x00,0x00,0x00,0x00, -0x18,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x45,0x01,0x00,0x00, -0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x47,0x00,0x03,0x00,0x45,0x01,0x00,0x00,0x02,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x47,0x01,0x00,0x00,0x22,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x47,0x01,0x00,0x00, -0x21,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x2e,0x02,0x00,0x00,0x0b,0x00,0x00,0x00,0x18,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x74,0x02,0x00,0x00,0x06,0x00,0x00,0x00, -0x04,0x00,0x00,0x00,0x48,0x00,0x04,0x00,0x75,0x02,0x00,0x00, -0x00,0x00,0x00,0x00,0x19,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0x75,0x02,0x00,0x00,0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x47,0x00,0x03,0x00,0x75,0x02,0x00,0x00, -0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x77,0x02,0x00,0x00, -0x22,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x77,0x02,0x00,0x00,0x21,0x00,0x00,0x00,0x02,0x00,0x00,0x00, -0x13,0x00,0x02,0x00,0x02,0x00,0x00,0x00,0x21,0x00,0x03,0x00, -0x03,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x15,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x20,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x17,0x00,0x04,0x00,0x09,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x03,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x0a,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x09,0x00,0x00,0x00,0x3b,0x00,0x04,0x00, -0x0a,0x00,0x00,0x00,0x0b,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0x02,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x0d,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x1e,0x00,0x10,0x00, -0x10,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x20,0x00,0x04,0x00,0x11,0x00,0x00,0x00,0x09,0x00,0x00,0x00, -0x10,0x00,0x00,0x00,0x3b,0x00,0x04,0x00,0x11,0x00,0x00,0x00, -0x12,0x00,0x00,0x00,0x09,0x00,0x00,0x00,0x15,0x00,0x04,0x00, -0x13,0x00,0x00,0x00,0x20,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00,0x14,0x00,0x00,0x00, -0x08,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x15,0x00,0x00,0x00, -0x09,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x13,0x00,0x00,0x00,0x21,0x00,0x00,0x00,0x0a,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00,0x27,0x00,0x00,0x00, -0x09,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00, -0x2d,0x00,0x00,0x00,0x07,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x13,0x00,0x00,0x00,0x34,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x32,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x37,0x00,0x00,0x00, -0x40,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x39,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x3b,0x00,0x04,0x00, -0x0a,0x00,0x00,0x00,0x3d,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x3e,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x3b,0x00,0x04,0x00,0x0a,0x00,0x00,0x00, -0x4c,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x32,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x4f,0x00,0x00,0x00,0x20,0x00,0x00,0x00, -0x32,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x53,0x00,0x00,0x00, -0x20,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, -0x54,0x00,0x00,0x00,0x86,0x00,0x00,0x00,0x37,0x00,0x00,0x00, -0x53,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, -0x58,0x00,0x00,0x00,0x86,0x00,0x00,0x00,0x37,0x00,0x00,0x00, -0x53,0x00,0x00,0x00,0x32,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x60,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0x61,0x00,0x00,0x00,0x86,0x00,0x00,0x00, -0x53,0x00,0x00,0x00,0x60,0x00,0x00,0x00,0x32,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x62,0x00,0x00,0x00,0x04,0x00,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x63,0x00,0x00,0x00, -0x86,0x00,0x00,0x00,0x61,0x00,0x00,0x00,0x62,0x00,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x67,0x00,0x00,0x00, -0x86,0x00,0x00,0x00,0x61,0x00,0x00,0x00,0x62,0x00,0x00,0x00, -0x32,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x6c,0x00,0x00,0x00, -0x10,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, -0x6d,0x00,0x00,0x00,0x86,0x00,0x00,0x00,0x6c,0x00,0x00,0x00, -0x39,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, -0x72,0x00,0x00,0x00,0x86,0x00,0x00,0x00,0x6c,0x00,0x00,0x00, -0x39,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00, -0x76,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x13,0x00,0x00,0x00,0x7b,0x00,0x00,0x00,0x02,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00,0x86,0x00,0x00,0x00, -0x0b,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00, -0x8c,0x00,0x00,0x00,0x03,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x13,0x00,0x00,0x00,0x97,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0x32,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x9c,0x00,0x00,0x00, -0x40,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00, -0x9e,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0xad,0x00,0x00,0x00,0x84,0x00,0x00,0x00, -0x60,0x00,0x00,0x00,0x62,0x00,0x00,0x00,0x32,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0xae,0x00,0x00,0x00,0x20,0x00,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0xaf,0x00,0x00,0x00, -0x84,0x00,0x00,0x00,0x53,0x00,0x00,0x00,0xae,0x00,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0xb0,0x00,0x00,0x00, -0x84,0x00,0x00,0x00,0x4f,0x00,0x00,0x00,0x62,0x00,0x00,0x00, -0x32,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0xb1,0x00,0x00,0x00, -0x02,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, -0xb2,0x00,0x00,0x00,0x84,0x00,0x00,0x00,0xb0,0x00,0x00,0x00, -0xb1,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, -0xb3,0x00,0x00,0x00,0x84,0x00,0x00,0x00,0xb2,0x00,0x00,0x00, -0x60,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, -0xb4,0x00,0x00,0x00,0x86,0x00,0x00,0x00,0xaf,0x00,0x00,0x00, -0xb3,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, -0xb5,0x00,0x00,0x00,0x84,0x00,0x00,0x00,0xad,0x00,0x00,0x00, -0xb4,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, -0xb6,0x00,0x00,0x00,0x84,0x00,0x00,0x00,0xb5,0x00,0x00,0x00, -0xb1,0x00,0x00,0x00,0x14,0x00,0x02,0x00,0xb7,0x00,0x00,0x00, -0x16,0x00,0x03,0x00,0xb9,0x00,0x00,0x00,0x20,0x00,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0xba,0x00,0x00,0x00, -0x84,0x00,0x00,0x00,0x60,0x00,0x00,0x00,0x62,0x00,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0xbb,0x00,0x00,0x00, -0x84,0x00,0x00,0x00,0xba,0x00,0x00,0x00,0xb4,0x00,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0xbc,0x00,0x00,0x00, -0x84,0x00,0x00,0x00,0xbb,0x00,0x00,0x00,0xb1,0x00,0x00,0x00, -0x1c,0x00,0x04,0x00,0xbd,0x00,0x00,0x00,0xb9,0x00,0x00,0x00, -0xbc,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0xbe,0x00,0x00,0x00, -0x07,0x00,0x00,0x00,0xbd,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0xb9,0x00,0x00,0x00,0xc1,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x20,0x00,0x04,0x00,0xc2,0x00,0x00,0x00,0x07,0x00,0x00,0x00, -0xb9,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00, -0xc5,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x16,0x00,0x03,0x00, -0xec,0x00,0x00,0x00,0x10,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0xed,0x00,0x00,0x00,0x80,0x00,0x00,0x00, -0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0xee,0x00,0x00,0x00,0x84,0x00,0x00,0x00, -0x37,0x00,0x00,0x00,0xed,0x00,0x00,0x00,0x1c,0x00,0x04,0x00, -0xef,0x00,0x00,0x00,0xec,0x00,0x00,0x00,0xee,0x00,0x00,0x00, -0x20,0x00,0x04,0x00,0xf0,0x00,0x00,0x00,0x04,0x00,0x00,0x00, -0xef,0x00,0x00,0x00,0x3b,0x00,0x04,0x00,0xf0,0x00,0x00,0x00, -0xf1,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0xf5,0x00,0x00,0x00,0x80,0x00,0x00,0x00, -0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x1d,0x00,0x03,0x00, -0xf9,0x00,0x00,0x00,0xec,0x00,0x00,0x00,0x1e,0x00,0x03,0x00, -0xfa,0x00,0x00,0x00,0xf9,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0xfb,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0xfa,0x00,0x00,0x00, -0x3b,0x00,0x04,0x00,0xfb,0x00,0x00,0x00,0xfc,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x07,0x01,0x00,0x00, -0x0c,0x00,0x00,0x00,0xec,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0x0a,0x01,0x00,0x00,0x04,0x00,0x00,0x00,0xec,0x00,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x10,0x01,0x00,0x00, -0x80,0x00,0x00,0x00,0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0xec,0x00,0x00,0x00,0x14,0x01,0x00,0x00, -0x00,0x00,0x00,0x00,0x32,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x16,0x01,0x00,0x00,0x01,0x00,0x00,0x00,0x33,0x00,0x06,0x00, -0x09,0x00,0x00,0x00,0x17,0x01,0x00,0x00,0x16,0x01,0x00,0x00, -0x39,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0x18,0x01,0x00,0x00,0x51,0x00,0x00,0x00, -0x17,0x01,0x00,0x00,0x00,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0x19,0x01,0x00,0x00,0x84,0x00,0x00,0x00, -0x18,0x01,0x00,0x00,0x39,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0x1a,0x01,0x00,0x00,0x86,0x00,0x00,0x00, -0x19,0x01,0x00,0x00,0x6c,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0x38,0x01,0x00,0x00,0x80,0x00,0x00,0x00, -0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0x39,0x01,0x00,0x00,0x84,0x00,0x00,0x00, -0x9c,0x00,0x00,0x00,0x38,0x01,0x00,0x00,0x1c,0x00,0x04,0x00, -0x3a,0x01,0x00,0x00,0xec,0x00,0x00,0x00,0x39,0x01,0x00,0x00, -0x20,0x00,0x04,0x00,0x3b,0x01,0x00,0x00,0x04,0x00,0x00,0x00, -0x3a,0x01,0x00,0x00,0x3b,0x00,0x04,0x00,0x3b,0x01,0x00,0x00, -0x3c,0x01,0x00,0x00,0x04,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0x40,0x01,0x00,0x00,0x80,0x00,0x00,0x00, -0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x1d,0x00,0x03,0x00, -0x44,0x01,0x00,0x00,0xb9,0x00,0x00,0x00,0x1e,0x00,0x03,0x00, -0x45,0x01,0x00,0x00,0x44,0x01,0x00,0x00,0x20,0x00,0x04,0x00, -0x46,0x01,0x00,0x00,0x0c,0x00,0x00,0x00,0x45,0x01,0x00,0x00, -0x3b,0x00,0x04,0x00,0x46,0x01,0x00,0x00,0x47,0x01,0x00,0x00, -0x0c,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x52,0x01,0x00,0x00, -0x0c,0x00,0x00,0x00,0xb9,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0x5b,0x01,0x00,0x00,0x80,0x00,0x00,0x00, -0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x62,0x01,0x00,0x00,0x08,0x01,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x63,0x01,0x00,0x00, -0x86,0x00,0x00,0x00,0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x66,0x01,0x00,0x00, -0x86,0x00,0x00,0x00,0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x81,0x01,0x00,0x00, -0x84,0x00,0x00,0x00,0x60,0x00,0x00,0x00,0x62,0x00,0x00,0x00, -0x1c,0x00,0x04,0x00,0x82,0x01,0x00,0x00,0xec,0x00,0x00,0x00, -0x81,0x01,0x00,0x00,0x20,0x00,0x04,0x00,0x83,0x01,0x00,0x00, -0x07,0x00,0x00,0x00,0x82,0x01,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0x93,0x01,0x00,0x00,0x80,0x00,0x00,0x00, -0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0x99,0x01,0x00,0x00,0x07,0x00,0x00,0x00,0xec,0x00,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0xaf,0x01,0x00,0x00, -0x84,0x00,0x00,0x00,0xb4,0x00,0x00,0x00,0xb1,0x00,0x00,0x00, -0x1c,0x00,0x04,0x00,0xb0,0x01,0x00,0x00,0xec,0x00,0x00,0x00, -0xaf,0x01,0x00,0x00,0x20,0x00,0x04,0x00,0xb1,0x01,0x00,0x00, -0x07,0x00,0x00,0x00,0xb0,0x01,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0xba,0x01,0x00,0x00,0x86,0x00,0x00,0x00, -0xae,0x00,0x00,0x00,0xb4,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0xc2,0x01,0x00,0x00,0x80,0x00,0x00,0x00, -0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0xf1,0x01,0x00,0x00,0x84,0x00,0x00,0x00, -0x60,0x00,0x00,0x00,0x62,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x13,0x00,0x00,0x00,0x26,0x02,0x00,0x00,0x0d,0x00,0x00,0x00, -0x3b,0x00,0x04,0x00,0x0a,0x00,0x00,0x00,0x2e,0x02,0x00,0x00, -0x01,0x00,0x00,0x00,0x1d,0x00,0x03,0x00,0x74,0x02,0x00,0x00, -0xb9,0x00,0x00,0x00,0x1e,0x00,0x03,0x00,0x75,0x02,0x00,0x00, -0x74,0x02,0x00,0x00,0x20,0x00,0x04,0x00,0x76,0x02,0x00,0x00, -0x0c,0x00,0x00,0x00,0x75,0x02,0x00,0x00,0x3b,0x00,0x04,0x00, -0x76,0x02,0x00,0x00,0x77,0x02,0x00,0x00,0x0c,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00,0x7c,0x02,0x00,0x00, -0x05,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, -0x89,0x02,0x00,0x00,0x84,0x00,0x00,0x00,0x60,0x00,0x00,0x00, -0x62,0x00,0x00,0x00,0x36,0x00,0x05,0x00,0x02,0x00,0x00,0x00, -0x04,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x03,0x00,0x00,0x00, -0xf8,0x00,0x02,0x00,0x05,0x00,0x00,0x00,0x3b,0x00,0x04,0x00, -0xbe,0x00,0x00,0x00,0xbf,0x00,0x00,0x00,0x07,0x00,0x00,0x00, -0x3b,0x00,0x04,0x00,0x83,0x01,0x00,0x00,0x84,0x01,0x00,0x00, -0x07,0x00,0x00,0x00,0x3b,0x00,0x04,0x00,0xb1,0x01,0x00,0x00, -0xb2,0x01,0x00,0x00,0x07,0x00,0x00,0x00,0x41,0x00,0x05,0x00, -0x0d,0x00,0x00,0x00,0x0e,0x00,0x00,0x00,0x0b,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x0f,0x00,0x00,0x00,0x0e,0x00,0x00,0x00,0x41,0x00,0x05,0x00, -0x15,0x00,0x00,0x00,0x16,0x00,0x00,0x00,0x12,0x00,0x00,0x00, -0x14,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x17,0x00,0x00,0x00,0x16,0x00,0x00,0x00,0x86,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x18,0x00,0x00,0x00,0x0f,0x00,0x00,0x00, -0x17,0x00,0x00,0x00,0x89,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x1e,0x00,0x00,0x00,0x0f,0x00,0x00,0x00,0x17,0x00,0x00,0x00, -0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00,0x22,0x00,0x00,0x00, -0x12,0x00,0x00,0x00,0x21,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x22,0x00,0x00,0x00, -0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x24,0x00,0x00,0x00, -0x18,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x41,0x00,0x05,0x00, -0x15,0x00,0x00,0x00,0x28,0x00,0x00,0x00,0x12,0x00,0x00,0x00, -0x27,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x29,0x00,0x00,0x00,0x28,0x00,0x00,0x00,0x86,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x2a,0x00,0x00,0x00,0x1e,0x00,0x00,0x00, -0x29,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00, -0x2e,0x00,0x00,0x00,0x12,0x00,0x00,0x00,0x2d,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x2f,0x00,0x00,0x00, -0x2e,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x30,0x00,0x00,0x00,0x24,0x00,0x00,0x00,0x2f,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x32,0x00,0x00,0x00, -0x30,0x00,0x00,0x00,0x2a,0x00,0x00,0x00,0x41,0x00,0x05,0x00, -0x15,0x00,0x00,0x00,0x35,0x00,0x00,0x00,0x12,0x00,0x00,0x00, -0x34,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x36,0x00,0x00,0x00,0x35,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x38,0x00,0x00,0x00,0x36,0x00,0x00,0x00, -0x37,0x00,0x00,0x00,0x82,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x3a,0x00,0x00,0x00,0x38,0x00,0x00,0x00,0x39,0x00,0x00,0x00, -0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x3b,0x00,0x00,0x00, -0x3a,0x00,0x00,0x00,0x37,0x00,0x00,0x00,0x41,0x00,0x05,0x00, -0x0d,0x00,0x00,0x00,0x3f,0x00,0x00,0x00,0x3d,0x00,0x00,0x00, -0x3e,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x40,0x00,0x00,0x00,0x3f,0x00,0x00,0x00,0x89,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x42,0x00,0x00,0x00,0x40,0x00,0x00,0x00, -0x3b,0x00,0x00,0x00,0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x47,0x00,0x00,0x00,0x40,0x00,0x00,0x00,0x3b,0x00,0x00,0x00, -0x41,0x00,0x05,0x00,0x0d,0x00,0x00,0x00,0x49,0x00,0x00,0x00, -0x3d,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x4a,0x00,0x00,0x00,0x49,0x00,0x00,0x00, -0x41,0x00,0x05,0x00,0x0d,0x00,0x00,0x00,0x4d,0x00,0x00,0x00, -0x4c,0x00,0x00,0x00,0x3e,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x4e,0x00,0x00,0x00,0x4d,0x00,0x00,0x00, -0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x50,0x00,0x00,0x00, -0x4e,0x00,0x00,0x00,0x4f,0x00,0x00,0x00,0x89,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x55,0x00,0x00,0x00,0x50,0x00,0x00,0x00, -0x54,0x00,0x00,0x00,0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x59,0x00,0x00,0x00,0x50,0x00,0x00,0x00,0x58,0x00,0x00,0x00, -0x89,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x5d,0x00,0x00,0x00, -0x4e,0x00,0x00,0x00,0x4f,0x00,0x00,0x00,0x89,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x64,0x00,0x00,0x00,0x5d,0x00,0x00,0x00, -0x63,0x00,0x00,0x00,0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x68,0x00,0x00,0x00,0x5d,0x00,0x00,0x00,0x67,0x00,0x00,0x00, -0x89,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x6e,0x00,0x00,0x00, -0x4e,0x00,0x00,0x00,0x6d,0x00,0x00,0x00,0x86,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x73,0x00,0x00,0x00,0x4e,0x00,0x00,0x00, -0x72,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00, -0x77,0x00,0x00,0x00,0x12,0x00,0x00,0x00,0x76,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x78,0x00,0x00,0x00, -0x77,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x79,0x00,0x00,0x00,0x47,0x00,0x00,0x00,0x78,0x00,0x00,0x00, -0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00,0x7c,0x00,0x00,0x00, -0x12,0x00,0x00,0x00,0x7b,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x7d,0x00,0x00,0x00,0x7c,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x7f,0x00,0x00,0x00, -0x47,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x82,0x00,0x00,0x00,0x7f,0x00,0x00,0x00, -0x78,0x00,0x00,0x00,0x0c,0x00,0x07,0x00,0x06,0x00,0x00,0x00, -0x83,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x26,0x00,0x00,0x00, -0x7d,0x00,0x00,0x00,0x82,0x00,0x00,0x00,0x41,0x00,0x05,0x00, -0x15,0x00,0x00,0x00,0x87,0x00,0x00,0x00,0x12,0x00,0x00,0x00, -0x86,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x88,0x00,0x00,0x00,0x87,0x00,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x89,0x00,0x00,0x00,0x32,0x00,0x00,0x00, -0x88,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x8b,0x00,0x00,0x00,0x42,0x00,0x00,0x00,0x37,0x00,0x00,0x00, -0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00,0x8d,0x00,0x00,0x00, -0x12,0x00,0x00,0x00,0x8c,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x8e,0x00,0x00,0x00,0x8d,0x00,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x8f,0x00,0x00,0x00, -0x8b,0x00,0x00,0x00,0x8e,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x90,0x00,0x00,0x00,0x89,0x00,0x00,0x00, -0x8f,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x92,0x00,0x00,0x00,0x90,0x00,0x00,0x00,0x79,0x00,0x00,0x00, -0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x93,0x00,0x00,0x00, -0x92,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x41,0x00,0x05,0x00, -0x15,0x00,0x00,0x00,0x98,0x00,0x00,0x00,0x12,0x00,0x00,0x00, -0x97,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x99,0x00,0x00,0x00,0x98,0x00,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x9a,0x00,0x00,0x00,0x0f,0x00,0x00,0x00, -0x99,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x9d,0x00,0x00,0x00,0x4a,0x00,0x00,0x00,0x9c,0x00,0x00,0x00, -0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00,0x9f,0x00,0x00,0x00, -0x12,0x00,0x00,0x00,0x9e,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0xa0,0x00,0x00,0x00,0x9f,0x00,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xa1,0x00,0x00,0x00, -0x9d,0x00,0x00,0x00,0xa0,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xa2,0x00,0x00,0x00,0x9a,0x00,0x00,0x00, -0xa1,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xa4,0x00,0x00,0x00,0xa2,0x00,0x00,0x00,0x79,0x00,0x00,0x00, -0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xa5,0x00,0x00,0x00, -0xa4,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, -0xa7,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0xa7,0x00,0x00,0x00, -0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0x9b,0x02,0x00,0x00, -0x3e,0x00,0x00,0x00,0x05,0x00,0x00,0x00,0xc6,0x00,0x00,0x00, -0xa8,0x00,0x00,0x00,0xb0,0x00,0x05,0x00,0xb7,0x00,0x00,0x00, -0xb8,0x00,0x00,0x00,0x9b,0x02,0x00,0x00,0xb6,0x00,0x00,0x00, -0xf6,0x00,0x04,0x00,0xa9,0x00,0x00,0x00,0xa8,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0xb8,0x00,0x00,0x00, -0xa8,0x00,0x00,0x00,0xa9,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, -0xa8,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0xc2,0x00,0x00,0x00, -0xc3,0x00,0x00,0x00,0xbf,0x00,0x00,0x00,0x9b,0x02,0x00,0x00, -0x3e,0x00,0x03,0x00,0xc3,0x00,0x00,0x00,0xc1,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xc6,0x00,0x00,0x00, -0x9b,0x02,0x00,0x00,0xc5,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, -0xa7,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0xa9,0x00,0x00,0x00, -0xf9,0x00,0x02,0x00,0xc9,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, -0xc9,0x00,0x00,0x00,0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00, -0xb4,0x02,0x00,0x00,0xa5,0x00,0x00,0x00,0xa9,0x00,0x00,0x00, -0x68,0x01,0x00,0x00,0xcc,0x00,0x00,0x00,0xf5,0x00,0x07,0x00, -0x06,0x00,0x00,0x00,0xb0,0x02,0x00,0x00,0x93,0x00,0x00,0x00, -0xa9,0x00,0x00,0x00,0x65,0x01,0x00,0x00,0xcc,0x00,0x00,0x00, -0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0x9c,0x02,0x00,0x00, -0x79,0x00,0x00,0x00,0xa9,0x00,0x00,0x00,0x16,0x02,0x00,0x00, -0xcc,0x00,0x00,0x00,0xb0,0x00,0x05,0x00,0xb7,0x00,0x00,0x00, -0xd0,0x00,0x00,0x00,0x9c,0x02,0x00,0x00,0x83,0x00,0x00,0x00, -0xf6,0x00,0x04,0x00,0xcb,0x00,0x00,0x00,0xcc,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0xd0,0x00,0x00,0x00, -0xca,0x00,0x00,0x00,0xcb,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, -0xca,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0xd2,0x00,0x00,0x00, -0xf8,0x00,0x02,0x00,0xd2,0x00,0x00,0x00,0xf5,0x00,0x07,0x00, -0x06,0x00,0x00,0x00,0xac,0x02,0x00,0x00,0x3e,0x00,0x00,0x00, -0xca,0x00,0x00,0x00,0x1c,0x01,0x00,0x00,0xd5,0x00,0x00,0x00, -0xb0,0x00,0x05,0x00,0xb7,0x00,0x00,0x00,0xd8,0x00,0x00,0x00, -0xac,0x02,0x00,0x00,0x37,0x00,0x00,0x00,0xf6,0x00,0x04,0x00, -0xd4,0x00,0x00,0x00,0xd5,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0xfa,0x00,0x04,0x00,0xd8,0x00,0x00,0x00,0xd3,0x00,0x00,0x00, -0xd4,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0xd3,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xdc,0x00,0x00,0x00, -0x8b,0x00,0x00,0x00,0x73,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xde,0x00,0x00,0x00,0xdc,0x00,0x00,0x00, -0xac,0x02,0x00,0x00,0xb0,0x00,0x05,0x00,0xb7,0x00,0x00,0x00, -0xe1,0x00,0x00,0x00,0xde,0x00,0x00,0x00,0x36,0x00,0x00,0x00, -0xf7,0x00,0x03,0x00,0xe3,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0xfa,0x00,0x04,0x00,0xe1,0x00,0x00,0x00,0xe2,0x00,0x00,0x00, -0xe3,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0xe2,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xe6,0x00,0x00,0x00, -0x9c,0x02,0x00,0x00,0x6e,0x00,0x00,0x00,0xb0,0x00,0x05,0x00, -0xb7,0x00,0x00,0x00,0xe8,0x00,0x00,0x00,0xe6,0x00,0x00,0x00, -0x83,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0xe3,0x00,0x00,0x00, -0xf8,0x00,0x02,0x00,0xe3,0x00,0x00,0x00,0xf5,0x00,0x07,0x00, -0xb7,0x00,0x00,0x00,0xe9,0x00,0x00,0x00,0xe1,0x00,0x00,0x00, -0xd3,0x00,0x00,0x00,0xe8,0x00,0x00,0x00,0xe2,0x00,0x00,0x00, -0xf7,0x00,0x03,0x00,0xeb,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0xfa,0x00,0x04,0x00,0xe9,0x00,0x00,0x00,0xea,0x00,0x00,0x00, -0x0c,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0xea,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xf4,0x00,0x00,0x00, -0x73,0x00,0x00,0x00,0xac,0x02,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xf6,0x00,0x00,0x00,0xf4,0x00,0x00,0x00, -0xf5,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xf8,0x00,0x00,0x00,0xf6,0x00,0x00,0x00,0x6e,0x00,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x03,0x01,0x00,0x00, -0xf4,0x00,0x00,0x00,0x8e,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x04,0x01,0x00,0x00,0xb0,0x02,0x00,0x00, -0x03,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x06,0x01,0x00,0x00,0x04,0x01,0x00,0x00,0x6e,0x00,0x00,0x00, -0x41,0x00,0x06,0x00,0x07,0x01,0x00,0x00,0x08,0x01,0x00,0x00, -0xfc,0x00,0x00,0x00,0x34,0x00,0x00,0x00,0x06,0x01,0x00,0x00, -0x3d,0x00,0x04,0x00,0xec,0x00,0x00,0x00,0x09,0x01,0x00,0x00, -0x08,0x01,0x00,0x00,0x41,0x00,0x05,0x00,0x0a,0x01,0x00,0x00, -0x0b,0x01,0x00,0x00,0xf1,0x00,0x00,0x00,0xf8,0x00,0x00,0x00, -0x3e,0x00,0x03,0x00,0x0b,0x01,0x00,0x00,0x09,0x01,0x00,0x00, -0xf9,0x00,0x02,0x00,0xeb,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, -0x0c,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x0f,0x01,0x00,0x00,0x73,0x00,0x00,0x00,0xac,0x02,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x11,0x01,0x00,0x00, -0x0f,0x01,0x00,0x00,0x10,0x01,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x13,0x01,0x00,0x00,0x11,0x01,0x00,0x00, -0x6e,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x0a,0x01,0x00,0x00, -0x15,0x01,0x00,0x00,0xf1,0x00,0x00,0x00,0x13,0x01,0x00,0x00, -0x3e,0x00,0x03,0x00,0x15,0x01,0x00,0x00,0x14,0x01,0x00,0x00, -0xf9,0x00,0x02,0x00,0xeb,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, -0xeb,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0xd5,0x00,0x00,0x00, -0xf8,0x00,0x02,0x00,0xd5,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x1c,0x01,0x00,0x00,0xac,0x02,0x00,0x00, -0x1a,0x01,0x00,0x00,0xf9,0x00,0x02,0x00,0xd2,0x00,0x00,0x00, -0xf8,0x00,0x02,0x00,0xd4,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, -0x1e,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0x1e,0x01,0x00,0x00, -0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0xad,0x02,0x00,0x00, -0x3e,0x00,0x00,0x00,0xd4,0x00,0x00,0x00,0x61,0x01,0x00,0x00, -0x21,0x01,0x00,0x00,0xb0,0x00,0x05,0x00,0xb7,0x00,0x00,0x00, -0x24,0x01,0x00,0x00,0xad,0x02,0x00,0x00,0x9c,0x00,0x00,0x00, -0xf6,0x00,0x04,0x00,0x20,0x01,0x00,0x00,0x21,0x01,0x00,0x00, -0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0x24,0x01,0x00,0x00, -0x1f,0x01,0x00,0x00,0x20,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, -0x1f,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x28,0x01,0x00,0x00,0x9d,0x00,0x00,0x00,0x73,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x2a,0x01,0x00,0x00, -0x28,0x01,0x00,0x00,0xad,0x02,0x00,0x00,0x41,0x00,0x05,0x00, -0x15,0x00,0x00,0x00,0x2b,0x01,0x00,0x00,0x12,0x00,0x00,0x00, -0xc5,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x2c,0x01,0x00,0x00,0x2b,0x01,0x00,0x00,0xb0,0x00,0x05,0x00, -0xb7,0x00,0x00,0x00,0x2d,0x01,0x00,0x00,0x2a,0x01,0x00,0x00, -0x2c,0x01,0x00,0x00,0xf7,0x00,0x03,0x00,0x2f,0x01,0x00,0x00, -0x00,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0x2d,0x01,0x00,0x00, -0x2e,0x01,0x00,0x00,0x2f,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, -0x2e,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x32,0x01,0x00,0x00,0x9c,0x02,0x00,0x00,0x6e,0x00,0x00,0x00, -0xb0,0x00,0x05,0x00,0xb7,0x00,0x00,0x00,0x34,0x01,0x00,0x00, -0x32,0x01,0x00,0x00,0x83,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, -0x2f,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0x2f,0x01,0x00,0x00, -0xf5,0x00,0x07,0x00,0xb7,0x00,0x00,0x00,0x35,0x01,0x00,0x00, -0x2d,0x01,0x00,0x00,0x1f,0x01,0x00,0x00,0x34,0x01,0x00,0x00, -0x2e,0x01,0x00,0x00,0xf7,0x00,0x03,0x00,0x37,0x01,0x00,0x00, -0x00,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0x35,0x01,0x00,0x00, -0x36,0x01,0x00,0x00,0x57,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, -0x36,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x3f,0x01,0x00,0x00,0x73,0x00,0x00,0x00,0xad,0x02,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x41,0x01,0x00,0x00, -0x3f,0x01,0x00,0x00,0x40,0x01,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x43,0x01,0x00,0x00,0x41,0x01,0x00,0x00, -0x6e,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x4e,0x01,0x00,0x00,0x3f,0x01,0x00,0x00,0xa0,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x4f,0x01,0x00,0x00, -0xb4,0x02,0x00,0x00,0x4e,0x01,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x51,0x01,0x00,0x00,0x4f,0x01,0x00,0x00, -0x6e,0x00,0x00,0x00,0x41,0x00,0x06,0x00,0x52,0x01,0x00,0x00, -0x53,0x01,0x00,0x00,0x47,0x01,0x00,0x00,0x34,0x00,0x00,0x00, -0x51,0x01,0x00,0x00,0x3d,0x00,0x04,0x00,0xb9,0x00,0x00,0x00, -0x54,0x01,0x00,0x00,0x53,0x01,0x00,0x00,0x73,0x00,0x04,0x00, -0xec,0x00,0x00,0x00,0x55,0x01,0x00,0x00,0x54,0x01,0x00,0x00, -0x41,0x00,0x05,0x00,0x0a,0x01,0x00,0x00,0x56,0x01,0x00,0x00, -0x3c,0x01,0x00,0x00,0x43,0x01,0x00,0x00,0x3e,0x00,0x03,0x00, -0x56,0x01,0x00,0x00,0x55,0x01,0x00,0x00,0xf9,0x00,0x02,0x00, -0x37,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0x57,0x01,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x5a,0x01,0x00,0x00, -0x73,0x00,0x00,0x00,0xad,0x02,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x5c,0x01,0x00,0x00,0x5a,0x01,0x00,0x00, -0x5b,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x5e,0x01,0x00,0x00,0x5c,0x01,0x00,0x00,0x6e,0x00,0x00,0x00, -0x41,0x00,0x05,0x00,0x0a,0x01,0x00,0x00,0x5f,0x01,0x00,0x00, -0x3c,0x01,0x00,0x00,0x5e,0x01,0x00,0x00,0x3e,0x00,0x03,0x00, -0x5f,0x01,0x00,0x00,0x14,0x01,0x00,0x00,0xf9,0x00,0x02,0x00, -0x37,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0x37,0x01,0x00,0x00, -0xf9,0x00,0x02,0x00,0x21,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, -0x21,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x61,0x01,0x00,0x00,0xad,0x02,0x00,0x00,0x1a,0x01,0x00,0x00, -0xf9,0x00,0x02,0x00,0x1e,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, -0x20,0x01,0x00,0x00,0xe0,0x00,0x04,0x00,0x0c,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x62,0x01,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x65,0x01,0x00,0x00,0xb0,0x02,0x00,0x00, -0x63,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x68,0x01,0x00,0x00,0xb4,0x02,0x00,0x00,0x66,0x01,0x00,0x00, -0xf9,0x00,0x02,0x00,0x6a,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, -0x6a,0x01,0x00,0x00,0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00, -0xb6,0x02,0x00,0x00,0x3e,0x00,0x00,0x00,0x20,0x01,0x00,0x00, -0x14,0x02,0x00,0x00,0x6d,0x01,0x00,0x00,0xb0,0x00,0x05,0x00, -0xb7,0x00,0x00,0x00,0x70,0x01,0x00,0x00,0xb6,0x02,0x00,0x00, -0x6c,0x00,0x00,0x00,0xf6,0x00,0x04,0x00,0x6c,0x01,0x00,0x00, -0x6d,0x01,0x00,0x00,0x00,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, -0x70,0x01,0x00,0x00,0x6b,0x01,0x00,0x00,0x6c,0x01,0x00,0x00, -0xf8,0x00,0x02,0x00,0x6b,0x01,0x00,0x00,0xf9,0x00,0x02,0x00, -0x72,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0x72,0x01,0x00,0x00, -0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0xba,0x02,0x00,0x00, -0x3e,0x00,0x00,0x00,0x6b,0x01,0x00,0x00,0x9e,0x01,0x00,0x00, -0x75,0x01,0x00,0x00,0xb0,0x00,0x05,0x00,0xb7,0x00,0x00,0x00, -0x78,0x01,0x00,0x00,0xba,0x02,0x00,0x00,0x60,0x00,0x00,0x00, -0xf6,0x00,0x04,0x00,0x74,0x01,0x00,0x00,0x75,0x01,0x00,0x00, -0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0x78,0x01,0x00,0x00, -0x73,0x01,0x00,0x00,0x74,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, -0x73,0x01,0x00,0x00,0xf9,0x00,0x02,0x00,0x7a,0x01,0x00,0x00, -0xf8,0x00,0x02,0x00,0x7a,0x01,0x00,0x00,0xf5,0x00,0x07,0x00, -0x06,0x00,0x00,0x00,0xcc,0x02,0x00,0x00,0x3e,0x00,0x00,0x00, -0x73,0x01,0x00,0x00,0x9c,0x01,0x00,0x00,0x7b,0x01,0x00,0x00, -0xb0,0x00,0x05,0x00,0xb7,0x00,0x00,0x00,0x80,0x01,0x00,0x00, -0xcc,0x02,0x00,0x00,0x62,0x00,0x00,0x00,0xf6,0x00,0x04,0x00, -0x7c,0x01,0x00,0x00,0x7b,0x01,0x00,0x00,0x01,0x00,0x00,0x00, -0xfa,0x00,0x04,0x00,0x80,0x01,0x00,0x00,0x7b,0x01,0x00,0x00, -0x7c,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0x7b,0x01,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x86,0x01,0x00,0x00, -0xba,0x02,0x00,0x00,0x62,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x88,0x01,0x00,0x00,0x86,0x01,0x00,0x00, -0xcc,0x02,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x8a,0x01,0x00,0x00,0x55,0x00,0x00,0x00,0x53,0x00,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x8c,0x01,0x00,0x00, -0xba,0x02,0x00,0x00,0x61,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x8d,0x01,0x00,0x00,0x8a,0x01,0x00,0x00, -0x8c,0x01,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x8f,0x01,0x00,0x00,0x64,0x00,0x00,0x00,0x62,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x90,0x01,0x00,0x00, -0x8d,0x01,0x00,0x00,0x8f,0x01,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x92,0x01,0x00,0x00,0x90,0x01,0x00,0x00, -0xcc,0x02,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x94,0x01,0x00,0x00,0x92,0x01,0x00,0x00,0x93,0x01,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x96,0x01,0x00,0x00, -0x94,0x01,0x00,0x00,0xb6,0x02,0x00,0x00,0x41,0x00,0x05,0x00, -0x0a,0x01,0x00,0x00,0x97,0x01,0x00,0x00,0xf1,0x00,0x00,0x00, -0x96,0x01,0x00,0x00,0x3d,0x00,0x04,0x00,0xec,0x00,0x00,0x00, -0x98,0x01,0x00,0x00,0x97,0x01,0x00,0x00,0x41,0x00,0x05,0x00, -0x99,0x01,0x00,0x00,0x9a,0x01,0x00,0x00,0x84,0x01,0x00,0x00, -0x88,0x01,0x00,0x00,0x3e,0x00,0x03,0x00,0x9a,0x01,0x00,0x00, -0x98,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x9c,0x01,0x00,0x00,0xcc,0x02,0x00,0x00,0xc5,0x00,0x00,0x00, -0xf9,0x00,0x02,0x00,0x7a,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, -0x7c,0x01,0x00,0x00,0xf9,0x00,0x02,0x00,0x75,0x01,0x00,0x00, -0xf8,0x00,0x02,0x00,0x75,0x01,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x9e,0x01,0x00,0x00,0xba,0x02,0x00,0x00, -0xc5,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0x72,0x01,0x00,0x00, -0xf8,0x00,0x02,0x00,0x74,0x01,0x00,0x00,0xf9,0x00,0x02,0x00, -0xa0,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0xa0,0x01,0x00,0x00, -0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0xbb,0x02,0x00,0x00, -0x3e,0x00,0x00,0x00,0x74,0x01,0x00,0x00,0xcc,0x01,0x00,0x00, -0xa3,0x01,0x00,0x00,0xb0,0x00,0x05,0x00,0xb7,0x00,0x00,0x00, -0xa6,0x01,0x00,0x00,0xbb,0x02,0x00,0x00,0xb4,0x00,0x00,0x00, -0xf6,0x00,0x04,0x00,0xa2,0x01,0x00,0x00,0xa3,0x01,0x00,0x00, -0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0xa6,0x01,0x00,0x00, -0xa1,0x01,0x00,0x00,0xa2,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, -0xa1,0x01,0x00,0x00,0xf9,0x00,0x02,0x00,0xa8,0x01,0x00,0x00, -0xf8,0x00,0x02,0x00,0xa8,0x01,0x00,0x00,0xf5,0x00,0x07,0x00, -0x06,0x00,0x00,0x00,0xc9,0x02,0x00,0x00,0x3e,0x00,0x00,0x00, -0xa1,0x01,0x00,0x00,0xca,0x01,0x00,0x00,0xa9,0x01,0x00,0x00, -0xb0,0x00,0x05,0x00,0xb7,0x00,0x00,0x00,0xae,0x01,0x00,0x00, -0xc9,0x02,0x00,0x00,0xb1,0x00,0x00,0x00,0xf6,0x00,0x04,0x00, -0xaa,0x01,0x00,0x00,0xa9,0x01,0x00,0x00,0x01,0x00,0x00,0x00, -0xfa,0x00,0x04,0x00,0xae,0x01,0x00,0x00,0xa9,0x01,0x00,0x00, -0xaa,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0xa9,0x01,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xb4,0x01,0x00,0x00, -0xbb,0x02,0x00,0x00,0xb1,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xb6,0x01,0x00,0x00,0xb4,0x01,0x00,0x00, -0xc9,0x02,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xb8,0x01,0x00,0x00,0x59,0x00,0x00,0x00,0xae,0x00,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xbb,0x01,0x00,0x00, -0xbb,0x02,0x00,0x00,0xba,0x01,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xbc,0x01,0x00,0x00,0xb8,0x01,0x00,0x00, -0xbb,0x01,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xbe,0x01,0x00,0x00,0x68,0x00,0x00,0x00,0xb1,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xbf,0x01,0x00,0x00, -0xbc,0x01,0x00,0x00,0xbe,0x01,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xc1,0x01,0x00,0x00,0xbf,0x01,0x00,0x00, -0xc9,0x02,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xc3,0x01,0x00,0x00,0xc1,0x01,0x00,0x00,0xc2,0x01,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xc5,0x01,0x00,0x00, -0xc3,0x01,0x00,0x00,0xb6,0x02,0x00,0x00,0x41,0x00,0x05,0x00, -0x0a,0x01,0x00,0x00,0xc6,0x01,0x00,0x00,0x3c,0x01,0x00,0x00, -0xc5,0x01,0x00,0x00,0x3d,0x00,0x04,0x00,0xec,0x00,0x00,0x00, -0xc7,0x01,0x00,0x00,0xc6,0x01,0x00,0x00,0x41,0x00,0x05,0x00, -0x99,0x01,0x00,0x00,0xc8,0x01,0x00,0x00,0xb2,0x01,0x00,0x00, -0xb6,0x01,0x00,0x00,0x3e,0x00,0x03,0x00,0xc8,0x01,0x00,0x00, -0xc7,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xca,0x01,0x00,0x00,0xc9,0x02,0x00,0x00,0xc5,0x00,0x00,0x00, -0xf9,0x00,0x02,0x00,0xa8,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, -0xaa,0x01,0x00,0x00,0xf9,0x00,0x02,0x00,0xa3,0x01,0x00,0x00, -0xf8,0x00,0x02,0x00,0xa3,0x01,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xcc,0x01,0x00,0x00,0xbb,0x02,0x00,0x00, -0xc5,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0xa0,0x01,0x00,0x00, -0xf8,0x00,0x02,0x00,0xa2,0x01,0x00,0x00,0xf9,0x00,0x02,0x00, -0xce,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0xce,0x01,0x00,0x00, -0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0xbc,0x02,0x00,0x00, -0x3e,0x00,0x00,0x00,0xa2,0x01,0x00,0x00,0x12,0x02,0x00,0x00, -0xd1,0x01,0x00,0x00,0xb0,0x00,0x05,0x00,0xb7,0x00,0x00,0x00, -0xd4,0x01,0x00,0x00,0xbc,0x02,0x00,0x00,0xb4,0x00,0x00,0x00, -0xf6,0x00,0x04,0x00,0xd0,0x01,0x00,0x00,0xd1,0x01,0x00,0x00, -0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0xd4,0x01,0x00,0x00, -0xcf,0x01,0x00,0x00,0xd0,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, -0xcf,0x01,0x00,0x00,0xf9,0x00,0x02,0x00,0xd6,0x01,0x00,0x00, -0xf8,0x00,0x02,0x00,0xd6,0x01,0x00,0x00,0xf5,0x00,0x07,0x00, -0x06,0x00,0x00,0x00,0xc0,0x02,0x00,0x00,0x3e,0x00,0x00,0x00, -0xcf,0x01,0x00,0x00,0x10,0x02,0x00,0x00,0xd9,0x01,0x00,0x00, -0xb0,0x00,0x05,0x00,0xb7,0x00,0x00,0x00,0xdc,0x01,0x00,0x00, -0xc0,0x02,0x00,0x00,0x60,0x00,0x00,0x00,0xf6,0x00,0x04,0x00, -0xd8,0x01,0x00,0x00,0xd9,0x01,0x00,0x00,0x01,0x00,0x00,0x00, -0xfa,0x00,0x04,0x00,0xdc,0x01,0x00,0x00,0xd7,0x01,0x00,0x00, -0xd8,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0xd7,0x01,0x00,0x00, -0xf9,0x00,0x02,0x00,0xde,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, -0xde,0x01,0x00,0x00,0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00, -0xc2,0x02,0x00,0x00,0x3e,0x00,0x00,0x00,0xd7,0x01,0x00,0x00, -0x0e,0x02,0x00,0x00,0xe1,0x01,0x00,0x00,0xb0,0x00,0x05,0x00, -0xb7,0x00,0x00,0x00,0xe4,0x01,0x00,0x00,0xc2,0x02,0x00,0x00, -0xb1,0x00,0x00,0x00,0xf6,0x00,0x04,0x00,0xe0,0x01,0x00,0x00, -0xe1,0x01,0x00,0x00,0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, -0xe4,0x01,0x00,0x00,0xdf,0x01,0x00,0x00,0xe0,0x01,0x00,0x00, -0xf8,0x00,0x02,0x00,0xdf,0x01,0x00,0x00,0xf9,0x00,0x02,0x00, -0xe6,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0xe6,0x01,0x00,0x00, -0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0xc4,0x02,0x00,0x00, -0x3e,0x00,0x00,0x00,0xdf,0x01,0x00,0x00,0x0c,0x02,0x00,0x00, -0xe7,0x01,0x00,0x00,0xb0,0x00,0x05,0x00,0xb7,0x00,0x00,0x00, -0xec,0x01,0x00,0x00,0xc4,0x02,0x00,0x00,0x62,0x00,0x00,0x00, -0xf6,0x00,0x04,0x00,0xe8,0x01,0x00,0x00,0xe7,0x01,0x00,0x00, -0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0xec,0x01,0x00,0x00, -0xe7,0x01,0x00,0x00,0xe8,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, -0xe7,0x01,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xee,0x01,0x00,0x00,0xbc,0x02,0x00,0x00,0xb1,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xf0,0x01,0x00,0x00, -0xee,0x01,0x00,0x00,0xc2,0x02,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xf2,0x01,0x00,0x00,0xf0,0x01,0x00,0x00, -0xf1,0x01,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xf4,0x01,0x00,0x00,0xc0,0x02,0x00,0x00,0x62,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xf5,0x01,0x00,0x00, -0xf2,0x01,0x00,0x00,0xf4,0x01,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xf7,0x01,0x00,0x00,0xf5,0x01,0x00,0x00, -0xc4,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xfb,0x01,0x00,0x00,0xf4,0x01,0x00,0x00,0xc4,0x02,0x00,0x00, -0x41,0x00,0x05,0x00,0x99,0x01,0x00,0x00,0xfc,0x01,0x00,0x00, -0x84,0x01,0x00,0x00,0xfb,0x01,0x00,0x00,0x3d,0x00,0x04,0x00, -0xec,0x00,0x00,0x00,0xfd,0x01,0x00,0x00,0xfc,0x01,0x00,0x00, -0x73,0x00,0x04,0x00,0xb9,0x00,0x00,0x00,0xfe,0x01,0x00,0x00, -0xfd,0x01,0x00,0x00,0x41,0x00,0x05,0x00,0x99,0x01,0x00,0x00, -0x03,0x02,0x00,0x00,0xb2,0x01,0x00,0x00,0xf0,0x01,0x00,0x00, -0x3d,0x00,0x04,0x00,0xec,0x00,0x00,0x00,0x04,0x02,0x00,0x00, -0x03,0x02,0x00,0x00,0x73,0x00,0x04,0x00,0xb9,0x00,0x00,0x00, -0x05,0x02,0x00,0x00,0x04,0x02,0x00,0x00,0x41,0x00,0x05,0x00, -0xc2,0x00,0x00,0x00,0x07,0x02,0x00,0x00,0xbf,0x00,0x00,0x00, -0xf7,0x01,0x00,0x00,0x3d,0x00,0x04,0x00,0xb9,0x00,0x00,0x00, -0x08,0x02,0x00,0x00,0x07,0x02,0x00,0x00,0x0c,0x00,0x08,0x00, -0xb9,0x00,0x00,0x00,0x09,0x02,0x00,0x00,0x01,0x00,0x00,0x00, -0x32,0x00,0x00,0x00,0xfe,0x01,0x00,0x00,0x05,0x02,0x00,0x00, -0x08,0x02,0x00,0x00,0x3e,0x00,0x03,0x00,0x07,0x02,0x00,0x00, -0x09,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x0c,0x02,0x00,0x00,0xc4,0x02,0x00,0x00,0xc5,0x00,0x00,0x00, -0xf9,0x00,0x02,0x00,0xe6,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, -0xe8,0x01,0x00,0x00,0xf9,0x00,0x02,0x00,0xe1,0x01,0x00,0x00, -0xf8,0x00,0x02,0x00,0xe1,0x01,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x0e,0x02,0x00,0x00,0xc2,0x02,0x00,0x00, -0xc5,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0xde,0x01,0x00,0x00, -0xf8,0x00,0x02,0x00,0xe0,0x01,0x00,0x00,0xf9,0x00,0x02,0x00, -0xd9,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0xd9,0x01,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x10,0x02,0x00,0x00, -0xc0,0x02,0x00,0x00,0xc5,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, -0xd6,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0xd8,0x01,0x00,0x00, -0xf9,0x00,0x02,0x00,0xd1,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, -0xd1,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x12,0x02,0x00,0x00,0xbc,0x02,0x00,0x00,0xc5,0x00,0x00,0x00, -0xf9,0x00,0x02,0x00,0xce,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, -0xd0,0x01,0x00,0x00,0xf9,0x00,0x02,0x00,0x6d,0x01,0x00,0x00, -0xf8,0x00,0x02,0x00,0x6d,0x01,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x14,0x02,0x00,0x00,0xb6,0x02,0x00,0x00, -0xc5,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0x6a,0x01,0x00,0x00, -0xf8,0x00,0x02,0x00,0x6c,0x01,0x00,0x00,0xe0,0x00,0x04,0x00, -0x0c,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x62,0x01,0x00,0x00, -0xf9,0x00,0x02,0x00,0xcc,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, -0xcc,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x16,0x02,0x00,0x00,0x9c,0x02,0x00,0x00,0x6c,0x00,0x00,0x00, -0xf9,0x00,0x02,0x00,0xc9,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, -0xcb,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x1b,0x02,0x00,0x00,0x55,0x00,0x00,0x00,0x53,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x1c,0x02,0x00,0x00, -0x8b,0x00,0x00,0x00,0x1b,0x02,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x21,0x02,0x00,0x00,0x59,0x00,0x00,0x00, -0xae,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x22,0x02,0x00,0x00,0x9d,0x00,0x00,0x00,0x21,0x02,0x00,0x00, -0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00,0x27,0x02,0x00,0x00, -0x12,0x00,0x00,0x00,0x26,0x02,0x00,0x00,0x3d,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x28,0x02,0x00,0x00,0x27,0x02,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x29,0x02,0x00,0x00, -0x0f,0x00,0x00,0x00,0x28,0x02,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x2d,0x02,0x00,0x00,0x47,0x00,0x00,0x00, -0x28,0x02,0x00,0x00,0x41,0x00,0x05,0x00,0x0d,0x00,0x00,0x00, -0x2f,0x02,0x00,0x00,0x2e,0x02,0x00,0x00,0x0c,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x30,0x02,0x00,0x00, -0x2f,0x02,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x31,0x02,0x00,0x00,0x2d,0x02,0x00,0x00,0x30,0x02,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x32,0x02,0x00,0x00, -0x29,0x02,0x00,0x00,0x31,0x02,0x00,0x00,0xf9,0x00,0x02,0x00, -0x34,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x34,0x02,0x00,0x00, -0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0x9d,0x02,0x00,0x00, -0x3e,0x00,0x00,0x00,0xcb,0x00,0x00,0x00,0x9a,0x02,0x00,0x00, -0x37,0x02,0x00,0x00,0xb0,0x00,0x05,0x00,0xb7,0x00,0x00,0x00, -0x3a,0x02,0x00,0x00,0x9d,0x02,0x00,0x00,0xb4,0x00,0x00,0x00, -0xf6,0x00,0x04,0x00,0x36,0x02,0x00,0x00,0x37,0x02,0x00,0x00, -0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0x3a,0x02,0x00,0x00, -0x35,0x02,0x00,0x00,0x36,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, -0x35,0x02,0x00,0x00,0xf9,0x00,0x02,0x00,0x3c,0x02,0x00,0x00, -0xf8,0x00,0x02,0x00,0x3c,0x02,0x00,0x00,0xf5,0x00,0x07,0x00, -0x06,0x00,0x00,0x00,0x9e,0x02,0x00,0x00,0x3e,0x00,0x00,0x00, -0x35,0x02,0x00,0x00,0x98,0x02,0x00,0x00,0x3f,0x02,0x00,0x00, -0xb0,0x00,0x05,0x00,0xb7,0x00,0x00,0x00,0x42,0x02,0x00,0x00, -0x9e,0x02,0x00,0x00,0x60,0x00,0x00,0x00,0xf6,0x00,0x04,0x00, -0x3e,0x02,0x00,0x00,0x3f,0x02,0x00,0x00,0x01,0x00,0x00,0x00, -0xfa,0x00,0x04,0x00,0x42,0x02,0x00,0x00,0x3d,0x02,0x00,0x00, -0x3e,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x3d,0x02,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x46,0x02,0x00,0x00, -0x9e,0x02,0x00,0x00,0x61,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x47,0x02,0x00,0x00,0x1c,0x02,0x00,0x00, -0x46,0x02,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x49,0x02,0x00,0x00,0x64,0x00,0x00,0x00,0x62,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x4a,0x02,0x00,0x00, -0x47,0x02,0x00,0x00,0x49,0x02,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x4e,0x02,0x00,0x00,0x9d,0x02,0x00,0x00, -0xba,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x4f,0x02,0x00,0x00,0x22,0x02,0x00,0x00,0x4e,0x02,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x51,0x02,0x00,0x00, -0x68,0x00,0x00,0x00,0xb1,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x52,0x02,0x00,0x00,0x4f,0x02,0x00,0x00, -0x51,0x02,0x00,0x00,0xf9,0x00,0x02,0x00,0x54,0x02,0x00,0x00, -0xf8,0x00,0x02,0x00,0x54,0x02,0x00,0x00,0xf5,0x00,0x07,0x00, -0x06,0x00,0x00,0x00,0xa0,0x02,0x00,0x00,0x3e,0x00,0x00,0x00, -0x3d,0x02,0x00,0x00,0x96,0x02,0x00,0x00,0x57,0x02,0x00,0x00, -0xb0,0x00,0x05,0x00,0xb7,0x00,0x00,0x00,0x5a,0x02,0x00,0x00, -0xa0,0x02,0x00,0x00,0xb1,0x00,0x00,0x00,0xf6,0x00,0x04,0x00, -0x56,0x02,0x00,0x00,0x57,0x02,0x00,0x00,0x01,0x00,0x00,0x00, -0xfa,0x00,0x04,0x00,0x5a,0x02,0x00,0x00,0x55,0x02,0x00,0x00, -0x56,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x55,0x02,0x00,0x00, -0xf9,0x00,0x02,0x00,0x5c,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, -0x5c,0x02,0x00,0x00,0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00, -0xa2,0x02,0x00,0x00,0x3e,0x00,0x00,0x00,0x55,0x02,0x00,0x00, -0x94,0x02,0x00,0x00,0x5f,0x02,0x00,0x00,0xb0,0x00,0x05,0x00, -0xb7,0x00,0x00,0x00,0x62,0x02,0x00,0x00,0xa2,0x02,0x00,0x00, -0x62,0x00,0x00,0x00,0xf6,0x00,0x04,0x00,0x5e,0x02,0x00,0x00, -0x5f,0x02,0x00,0x00,0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, -0x62,0x02,0x00,0x00,0x5d,0x02,0x00,0x00,0x5e,0x02,0x00,0x00, -0xf8,0x00,0x02,0x00,0x5d,0x02,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x65,0x02,0x00,0x00,0x4a,0x02,0x00,0x00, -0xa2,0x02,0x00,0x00,0xb0,0x00,0x05,0x00,0xb7,0x00,0x00,0x00, -0x68,0x02,0x00,0x00,0x65,0x02,0x00,0x00,0x36,0x00,0x00,0x00, -0xf7,0x00,0x03,0x00,0x6a,0x02,0x00,0x00,0x00,0x00,0x00,0x00, -0xfa,0x00,0x04,0x00,0x68,0x02,0x00,0x00,0x69,0x02,0x00,0x00, -0x6a,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x69,0x02,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x6d,0x02,0x00,0x00, -0x52,0x02,0x00,0x00,0xa0,0x02,0x00,0x00,0x41,0x00,0x05,0x00, -0x15,0x00,0x00,0x00,0x6e,0x02,0x00,0x00,0x12,0x00,0x00,0x00, -0xc5,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x6f,0x02,0x00,0x00,0x6e,0x02,0x00,0x00,0xb0,0x00,0x05,0x00, -0xb7,0x00,0x00,0x00,0x70,0x02,0x00,0x00,0x6d,0x02,0x00,0x00, -0x6f,0x02,0x00,0x00,0xf9,0x00,0x02,0x00,0x6a,0x02,0x00,0x00, -0xf8,0x00,0x02,0x00,0x6a,0x02,0x00,0x00,0xf5,0x00,0x07,0x00, -0xb7,0x00,0x00,0x00,0x71,0x02,0x00,0x00,0x68,0x02,0x00,0x00, -0x5d,0x02,0x00,0x00,0x70,0x02,0x00,0x00,0x69,0x02,0x00,0x00, -0xf7,0x00,0x03,0x00,0x73,0x02,0x00,0x00,0x00,0x00,0x00,0x00, -0xfa,0x00,0x04,0x00,0x71,0x02,0x00,0x00,0x72,0x02,0x00,0x00, -0x73,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x72,0x02,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x7b,0x02,0x00,0x00, -0x52,0x02,0x00,0x00,0xa0,0x02,0x00,0x00,0x41,0x00,0x05,0x00, -0x15,0x00,0x00,0x00,0x7d,0x02,0x00,0x00,0x12,0x00,0x00,0x00, -0x7c,0x02,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x7e,0x02,0x00,0x00,0x7d,0x02,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x7f,0x02,0x00,0x00,0x7b,0x02,0x00,0x00, -0x7e,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x80,0x02,0x00,0x00,0x32,0x02,0x00,0x00,0x7f,0x02,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x82,0x02,0x00,0x00, -0x80,0x02,0x00,0x00,0x4a,0x02,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x84,0x02,0x00,0x00,0x82,0x02,0x00,0x00, -0xa2,0x02,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x86,0x02,0x00,0x00,0x9d,0x02,0x00,0x00,0xb1,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x88,0x02,0x00,0x00, -0x86,0x02,0x00,0x00,0xa0,0x02,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x8a,0x02,0x00,0x00,0x88,0x02,0x00,0x00, -0x89,0x02,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x8c,0x02,0x00,0x00,0x9e,0x02,0x00,0x00,0x62,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x8d,0x02,0x00,0x00, -0x8a,0x02,0x00,0x00,0x8c,0x02,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x8f,0x02,0x00,0x00,0x8d,0x02,0x00,0x00, -0xa2,0x02,0x00,0x00,0x41,0x00,0x05,0x00,0xc2,0x00,0x00,0x00, -0x90,0x02,0x00,0x00,0xbf,0x00,0x00,0x00,0x8f,0x02,0x00,0x00, -0x3d,0x00,0x04,0x00,0xb9,0x00,0x00,0x00,0x91,0x02,0x00,0x00, -0x90,0x02,0x00,0x00,0x41,0x00,0x06,0x00,0x52,0x01,0x00,0x00, -0x92,0x02,0x00,0x00,0x77,0x02,0x00,0x00,0x34,0x00,0x00,0x00, -0x84,0x02,0x00,0x00,0x3e,0x00,0x03,0x00,0x92,0x02,0x00,0x00, -0x91,0x02,0x00,0x00,0xf9,0x00,0x02,0x00,0x73,0x02,0x00,0x00, -0xf8,0x00,0x02,0x00,0x73,0x02,0x00,0x00,0xf9,0x00,0x02,0x00, -0x5f,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x5f,0x02,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x94,0x02,0x00,0x00, -0xa2,0x02,0x00,0x00,0xc5,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, -0x5c,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x5e,0x02,0x00,0x00, -0xf9,0x00,0x02,0x00,0x57,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, -0x57,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x96,0x02,0x00,0x00,0xa0,0x02,0x00,0x00,0xc5,0x00,0x00,0x00, -0xf9,0x00,0x02,0x00,0x54,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, -0x56,0x02,0x00,0x00,0xf9,0x00,0x02,0x00,0x3f,0x02,0x00,0x00, -0xf8,0x00,0x02,0x00,0x3f,0x02,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x98,0x02,0x00,0x00,0x9e,0x02,0x00,0x00, -0xc5,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0x3c,0x02,0x00,0x00, -0xf8,0x00,0x02,0x00,0x3e,0x02,0x00,0x00,0xf9,0x00,0x02,0x00, -0x37,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x37,0x02,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x9a,0x02,0x00,0x00, -0x9d,0x02,0x00,0x00,0xc5,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, -0x34,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x36,0x02,0x00,0x00, -0xfd,0x00,0x01,0x00,0x38,0x00,0x01,0x00, -}; -const uint64_t matmul_f16_f32_s_len = 10172; - -unsigned char matmul_f16_f32_s_fp32_data[] = { -0x03,0x02,0x23,0x07,0x00,0x05,0x01,0x00,0x0b,0x00,0x0d,0x00, -0xc9,0x02,0x00,0x00,0x00,0x00,0x00,0x00,0x11,0x00,0x02,0x00, -0x01,0x00,0x00,0x00,0x11,0x00,0x02,0x00,0x51,0x11,0x00,0x00, -0x0b,0x00,0x06,0x00,0x01,0x00,0x00,0x00,0x47,0x4c,0x53,0x4c, -0x2e,0x73,0x74,0x64,0x2e,0x34,0x35,0x30,0x00,0x00,0x00,0x00, -0x0e,0x00,0x03,0x00,0x00,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x0f,0x00,0x0f,0x00,0x05,0x00,0x00,0x00,0x04,0x00,0x00,0x00, -0x6d,0x61,0x69,0x6e,0x00,0x00,0x00,0x00,0x0b,0x00,0x00,0x00, -0x12,0x00,0x00,0x00,0x3d,0x00,0x00,0x00,0x4c,0x00,0x00,0x00, -0xf0,0x00,0x00,0x00,0xfc,0x00,0x00,0x00,0x3c,0x01,0x00,0x00, -0x47,0x01,0x00,0x00,0x2a,0x02,0x00,0x00,0x73,0x02,0x00,0x00, -0x10,0x00,0x06,0x00,0x04,0x00,0x00,0x00,0x11,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x0b,0x00,0x00,0x00,0x0b,0x00,0x00,0x00, -0x1c,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0x10,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x08,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00, -0x03,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00,0x04,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x10,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0x10,0x00,0x00,0x00,0x05,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x14,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x18,0x00,0x00,0x00, -0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00,0x07,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x1c,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0x10,0x00,0x00,0x00,0x08,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x20,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00, -0x09,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x24,0x00,0x00,0x00, -0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00,0x0a,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x28,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0x10,0x00,0x00,0x00,0x0b,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x2c,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x30,0x00,0x00,0x00, -0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00,0x0d,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x34,0x00,0x00,0x00,0x47,0x00,0x03,0x00, -0x10,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x37,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x3d,0x00,0x00,0x00,0x0b,0x00,0x00,0x00, -0x1a,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x4c,0x00,0x00,0x00, -0x0b,0x00,0x00,0x00,0x1b,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x4f,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x09,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x53,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x04,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x60,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x62,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x07,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x6c,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x03,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x9c,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0xae,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x05,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0xb1,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x08,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0xf9,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x48,0x00,0x04,0x00, -0xfa,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x18,0x00,0x00,0x00, -0x48,0x00,0x05,0x00,0xfa,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x03,0x00, -0xfa,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0xfc,0x00,0x00,0x00,0x22,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0xfc,0x00,0x00,0x00,0x21,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x16,0x01,0x00,0x00, -0x01,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x17,0x01,0x00,0x00,0x0b,0x00,0x00,0x00,0x19,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x44,0x01,0x00,0x00,0x06,0x00,0x00,0x00, -0x04,0x00,0x00,0x00,0x48,0x00,0x04,0x00,0x45,0x01,0x00,0x00, -0x00,0x00,0x00,0x00,0x18,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0x45,0x01,0x00,0x00,0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x47,0x00,0x03,0x00,0x45,0x01,0x00,0x00, -0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x47,0x01,0x00,0x00, -0x22,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x47,0x01,0x00,0x00,0x21,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x2a,0x02,0x00,0x00,0x0b,0x00,0x00,0x00, -0x18,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x70,0x02,0x00,0x00, -0x06,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x48,0x00,0x04,0x00, -0x71,0x02,0x00,0x00,0x00,0x00,0x00,0x00,0x19,0x00,0x00,0x00, -0x48,0x00,0x05,0x00,0x71,0x02,0x00,0x00,0x00,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x03,0x00, -0x71,0x02,0x00,0x00,0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x73,0x02,0x00,0x00,0x22,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x73,0x02,0x00,0x00,0x21,0x00,0x00,0x00, -0x02,0x00,0x00,0x00,0x13,0x00,0x02,0x00,0x02,0x00,0x00,0x00, -0x21,0x00,0x03,0x00,0x03,0x00,0x00,0x00,0x02,0x00,0x00,0x00, -0x15,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x20,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x17,0x00,0x04,0x00,0x09,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x03,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0x0a,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x09,0x00,0x00,0x00, -0x3b,0x00,0x04,0x00,0x0a,0x00,0x00,0x00,0x0b,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0x0d,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x1e,0x00,0x10,0x00,0x10,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x11,0x00,0x00,0x00, -0x09,0x00,0x00,0x00,0x10,0x00,0x00,0x00,0x3b,0x00,0x04,0x00, -0x11,0x00,0x00,0x00,0x12,0x00,0x00,0x00,0x09,0x00,0x00,0x00, -0x15,0x00,0x04,0x00,0x13,0x00,0x00,0x00,0x20,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00, -0x14,0x00,0x00,0x00,0x08,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0x15,0x00,0x00,0x00,0x09,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00,0x21,0x00,0x00,0x00, -0x0a,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00, -0x27,0x00,0x00,0x00,0x09,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x13,0x00,0x00,0x00,0x2d,0x00,0x00,0x00,0x07,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00,0x34,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x32,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x37,0x00,0x00,0x00,0x40,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x3b,0x00,0x04,0x00,0x0a,0x00,0x00,0x00,0x3d,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x3e,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x3b,0x00,0x04,0x00, -0x0a,0x00,0x00,0x00,0x4c,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x32,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x4f,0x00,0x00,0x00, -0x20,0x00,0x00,0x00,0x32,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x53,0x00,0x00,0x00,0x20,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0x54,0x00,0x00,0x00,0x86,0x00,0x00,0x00, -0x37,0x00,0x00,0x00,0x53,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0x58,0x00,0x00,0x00,0x86,0x00,0x00,0x00, -0x37,0x00,0x00,0x00,0x53,0x00,0x00,0x00,0x32,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x60,0x00,0x00,0x00,0x02,0x00,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x61,0x00,0x00,0x00, -0x86,0x00,0x00,0x00,0x53,0x00,0x00,0x00,0x60,0x00,0x00,0x00, -0x32,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x62,0x00,0x00,0x00, -0x04,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, -0x63,0x00,0x00,0x00,0x86,0x00,0x00,0x00,0x61,0x00,0x00,0x00, -0x62,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, -0x67,0x00,0x00,0x00,0x86,0x00,0x00,0x00,0x61,0x00,0x00,0x00, -0x62,0x00,0x00,0x00,0x32,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x6c,0x00,0x00,0x00,0x10,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0x6d,0x00,0x00,0x00,0x86,0x00,0x00,0x00, -0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0x72,0x00,0x00,0x00,0x86,0x00,0x00,0x00, -0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x13,0x00,0x00,0x00,0x76,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00,0x7b,0x00,0x00,0x00, -0x02,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00, -0x86,0x00,0x00,0x00,0x0b,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x13,0x00,0x00,0x00,0x8c,0x00,0x00,0x00,0x03,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00,0x97,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x32,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x9c,0x00,0x00,0x00,0x40,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x13,0x00,0x00,0x00,0x9e,0x00,0x00,0x00,0x04,0x00,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0xad,0x00,0x00,0x00, -0x84,0x00,0x00,0x00,0x60,0x00,0x00,0x00,0x62,0x00,0x00,0x00, -0x32,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0xae,0x00,0x00,0x00, -0x20,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, -0xaf,0x00,0x00,0x00,0x84,0x00,0x00,0x00,0x53,0x00,0x00,0x00, -0xae,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, -0xb0,0x00,0x00,0x00,0x84,0x00,0x00,0x00,0x4f,0x00,0x00,0x00, -0x62,0x00,0x00,0x00,0x32,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0xb1,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0xb2,0x00,0x00,0x00,0x84,0x00,0x00,0x00, -0xb0,0x00,0x00,0x00,0xb1,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0xb3,0x00,0x00,0x00,0x84,0x00,0x00,0x00, -0xb2,0x00,0x00,0x00,0x60,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0xb4,0x00,0x00,0x00,0x86,0x00,0x00,0x00, -0xaf,0x00,0x00,0x00,0xb3,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0xb5,0x00,0x00,0x00,0x84,0x00,0x00,0x00, -0xad,0x00,0x00,0x00,0xb4,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0xb6,0x00,0x00,0x00,0x84,0x00,0x00,0x00, -0xb5,0x00,0x00,0x00,0xb1,0x00,0x00,0x00,0x14,0x00,0x02,0x00, -0xb7,0x00,0x00,0x00,0x16,0x00,0x03,0x00,0xb9,0x00,0x00,0x00, -0x20,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, -0xba,0x00,0x00,0x00,0x84,0x00,0x00,0x00,0x60,0x00,0x00,0x00, -0x62,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, -0xbb,0x00,0x00,0x00,0x84,0x00,0x00,0x00,0xba,0x00,0x00,0x00, -0xb4,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, -0xbc,0x00,0x00,0x00,0x84,0x00,0x00,0x00,0xbb,0x00,0x00,0x00, -0xb1,0x00,0x00,0x00,0x1c,0x00,0x04,0x00,0xbd,0x00,0x00,0x00, -0xb9,0x00,0x00,0x00,0xbc,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0xbe,0x00,0x00,0x00,0x07,0x00,0x00,0x00,0xbd,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0xb9,0x00,0x00,0x00,0xc1,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0xc2,0x00,0x00,0x00, -0x07,0x00,0x00,0x00,0xb9,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x13,0x00,0x00,0x00,0xc5,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0xec,0x00,0x00,0x00, -0x80,0x00,0x00,0x00,0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0xed,0x00,0x00,0x00, -0x84,0x00,0x00,0x00,0x37,0x00,0x00,0x00,0xec,0x00,0x00,0x00, -0x1c,0x00,0x04,0x00,0xee,0x00,0x00,0x00,0xb9,0x00,0x00,0x00, -0xed,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0xef,0x00,0x00,0x00, -0x04,0x00,0x00,0x00,0xee,0x00,0x00,0x00,0x3b,0x00,0x04,0x00, -0xef,0x00,0x00,0x00,0xf0,0x00,0x00,0x00,0x04,0x00,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0xf4,0x00,0x00,0x00, -0x80,0x00,0x00,0x00,0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00, -0x16,0x00,0x03,0x00,0xf8,0x00,0x00,0x00,0x10,0x00,0x00,0x00, -0x1d,0x00,0x03,0x00,0xf9,0x00,0x00,0x00,0xf8,0x00,0x00,0x00, -0x1e,0x00,0x03,0x00,0xfa,0x00,0x00,0x00,0xf9,0x00,0x00,0x00, -0x20,0x00,0x04,0x00,0xfb,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0xfa,0x00,0x00,0x00,0x3b,0x00,0x04,0x00,0xfb,0x00,0x00,0x00, -0xfc,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0x07,0x01,0x00,0x00,0x0c,0x00,0x00,0x00,0xf8,0x00,0x00,0x00, -0x20,0x00,0x04,0x00,0x0b,0x01,0x00,0x00,0x04,0x00,0x00,0x00, -0xb9,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, -0x11,0x01,0x00,0x00,0x80,0x00,0x00,0x00,0x6c,0x00,0x00,0x00, -0x39,0x00,0x00,0x00,0x32,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x16,0x01,0x00,0x00,0x01,0x00,0x00,0x00,0x33,0x00,0x06,0x00, -0x09,0x00,0x00,0x00,0x17,0x01,0x00,0x00,0x16,0x01,0x00,0x00, -0x39,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0x18,0x01,0x00,0x00,0x51,0x00,0x00,0x00, -0x17,0x01,0x00,0x00,0x00,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0x19,0x01,0x00,0x00,0x84,0x00,0x00,0x00, -0x18,0x01,0x00,0x00,0x39,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0x1a,0x01,0x00,0x00,0x86,0x00,0x00,0x00, -0x19,0x01,0x00,0x00,0x6c,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0x38,0x01,0x00,0x00,0x80,0x00,0x00,0x00, -0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0x39,0x01,0x00,0x00,0x84,0x00,0x00,0x00, -0x9c,0x00,0x00,0x00,0x38,0x01,0x00,0x00,0x1c,0x00,0x04,0x00, -0x3a,0x01,0x00,0x00,0xb9,0x00,0x00,0x00,0x39,0x01,0x00,0x00, -0x20,0x00,0x04,0x00,0x3b,0x01,0x00,0x00,0x04,0x00,0x00,0x00, -0x3a,0x01,0x00,0x00,0x3b,0x00,0x04,0x00,0x3b,0x01,0x00,0x00, -0x3c,0x01,0x00,0x00,0x04,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0x40,0x01,0x00,0x00,0x80,0x00,0x00,0x00, -0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x1d,0x00,0x03,0x00, -0x44,0x01,0x00,0x00,0xb9,0x00,0x00,0x00,0x1e,0x00,0x03,0x00, -0x45,0x01,0x00,0x00,0x44,0x01,0x00,0x00,0x20,0x00,0x04,0x00, -0x46,0x01,0x00,0x00,0x0c,0x00,0x00,0x00,0x45,0x01,0x00,0x00, -0x3b,0x00,0x04,0x00,0x46,0x01,0x00,0x00,0x47,0x01,0x00,0x00, -0x0c,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x52,0x01,0x00,0x00, -0x0c,0x00,0x00,0x00,0xb9,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0x5a,0x01,0x00,0x00,0x80,0x00,0x00,0x00, -0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x61,0x01,0x00,0x00,0x08,0x01,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x62,0x01,0x00,0x00, -0x86,0x00,0x00,0x00,0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x65,0x01,0x00,0x00, -0x86,0x00,0x00,0x00,0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x80,0x01,0x00,0x00, -0x84,0x00,0x00,0x00,0x60,0x00,0x00,0x00,0x62,0x00,0x00,0x00, -0x1c,0x00,0x04,0x00,0x81,0x01,0x00,0x00,0xb9,0x00,0x00,0x00, -0x80,0x01,0x00,0x00,0x20,0x00,0x04,0x00,0x82,0x01,0x00,0x00, -0x07,0x00,0x00,0x00,0x81,0x01,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0x92,0x01,0x00,0x00,0x80,0x00,0x00,0x00, -0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0xad,0x01,0x00,0x00,0x84,0x00,0x00,0x00, -0xb4,0x00,0x00,0x00,0xb1,0x00,0x00,0x00,0x1c,0x00,0x04,0x00, -0xae,0x01,0x00,0x00,0xb9,0x00,0x00,0x00,0xad,0x01,0x00,0x00, -0x20,0x00,0x04,0x00,0xaf,0x01,0x00,0x00,0x07,0x00,0x00,0x00, -0xae,0x01,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, -0xb8,0x01,0x00,0x00,0x86,0x00,0x00,0x00,0xae,0x00,0x00,0x00, -0xb4,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, -0xc0,0x01,0x00,0x00,0x80,0x00,0x00,0x00,0x6c,0x00,0x00,0x00, -0x39,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, -0xef,0x01,0x00,0x00,0x84,0x00,0x00,0x00,0x60,0x00,0x00,0x00, -0x62,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00, -0x22,0x02,0x00,0x00,0x0d,0x00,0x00,0x00,0x3b,0x00,0x04,0x00, -0x0a,0x00,0x00,0x00,0x2a,0x02,0x00,0x00,0x01,0x00,0x00,0x00, -0x1d,0x00,0x03,0x00,0x70,0x02,0x00,0x00,0xb9,0x00,0x00,0x00, -0x1e,0x00,0x03,0x00,0x71,0x02,0x00,0x00,0x70,0x02,0x00,0x00, -0x20,0x00,0x04,0x00,0x72,0x02,0x00,0x00,0x0c,0x00,0x00,0x00, -0x71,0x02,0x00,0x00,0x3b,0x00,0x04,0x00,0x72,0x02,0x00,0x00, -0x73,0x02,0x00,0x00,0x0c,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x13,0x00,0x00,0x00,0x78,0x02,0x00,0x00,0x05,0x00,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x85,0x02,0x00,0x00, -0x84,0x00,0x00,0x00,0x60,0x00,0x00,0x00,0x62,0x00,0x00,0x00, -0x36,0x00,0x05,0x00,0x02,0x00,0x00,0x00,0x04,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x03,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, -0x05,0x00,0x00,0x00,0x3b,0x00,0x04,0x00,0xbe,0x00,0x00,0x00, -0xbf,0x00,0x00,0x00,0x07,0x00,0x00,0x00,0x3b,0x00,0x04,0x00, -0x82,0x01,0x00,0x00,0x83,0x01,0x00,0x00,0x07,0x00,0x00,0x00, -0x3b,0x00,0x04,0x00,0xaf,0x01,0x00,0x00,0xb0,0x01,0x00,0x00, -0x07,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x0d,0x00,0x00,0x00, -0x0e,0x00,0x00,0x00,0x0b,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x0f,0x00,0x00,0x00, -0x0e,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00, -0x16,0x00,0x00,0x00,0x12,0x00,0x00,0x00,0x14,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x17,0x00,0x00,0x00, -0x16,0x00,0x00,0x00,0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x18,0x00,0x00,0x00,0x0f,0x00,0x00,0x00,0x17,0x00,0x00,0x00, -0x89,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x1e,0x00,0x00,0x00, -0x0f,0x00,0x00,0x00,0x17,0x00,0x00,0x00,0x41,0x00,0x05,0x00, -0x15,0x00,0x00,0x00,0x22,0x00,0x00,0x00,0x12,0x00,0x00,0x00, -0x21,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x22,0x00,0x00,0x00,0x86,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x24,0x00,0x00,0x00,0x18,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00, -0x28,0x00,0x00,0x00,0x12,0x00,0x00,0x00,0x27,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x29,0x00,0x00,0x00, -0x28,0x00,0x00,0x00,0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x2a,0x00,0x00,0x00,0x1e,0x00,0x00,0x00,0x29,0x00,0x00,0x00, -0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0x12,0x00,0x00,0x00,0x2d,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x2f,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x30,0x00,0x00,0x00, -0x24,0x00,0x00,0x00,0x2f,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x32,0x00,0x00,0x00,0x30,0x00,0x00,0x00, -0x2a,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00, -0x35,0x00,0x00,0x00,0x12,0x00,0x00,0x00,0x34,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x36,0x00,0x00,0x00, -0x35,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x38,0x00,0x00,0x00,0x36,0x00,0x00,0x00,0x37,0x00,0x00,0x00, -0x82,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x3a,0x00,0x00,0x00, -0x38,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x86,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x3b,0x00,0x00,0x00,0x3a,0x00,0x00,0x00, -0x37,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x0d,0x00,0x00,0x00, -0x3f,0x00,0x00,0x00,0x3d,0x00,0x00,0x00,0x3e,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x40,0x00,0x00,0x00, -0x3f,0x00,0x00,0x00,0x89,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x42,0x00,0x00,0x00,0x40,0x00,0x00,0x00,0x3b,0x00,0x00,0x00, -0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x47,0x00,0x00,0x00, -0x40,0x00,0x00,0x00,0x3b,0x00,0x00,0x00,0x41,0x00,0x05,0x00, -0x0d,0x00,0x00,0x00,0x49,0x00,0x00,0x00,0x3d,0x00,0x00,0x00, -0x39,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x4a,0x00,0x00,0x00,0x49,0x00,0x00,0x00,0x41,0x00,0x05,0x00, -0x0d,0x00,0x00,0x00,0x4d,0x00,0x00,0x00,0x4c,0x00,0x00,0x00, -0x3e,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x4e,0x00,0x00,0x00,0x4d,0x00,0x00,0x00,0x86,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x50,0x00,0x00,0x00,0x4e,0x00,0x00,0x00, -0x4f,0x00,0x00,0x00,0x89,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x55,0x00,0x00,0x00,0x50,0x00,0x00,0x00,0x54,0x00,0x00,0x00, -0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x59,0x00,0x00,0x00, -0x50,0x00,0x00,0x00,0x58,0x00,0x00,0x00,0x89,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x5d,0x00,0x00,0x00,0x4e,0x00,0x00,0x00, -0x4f,0x00,0x00,0x00,0x89,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x64,0x00,0x00,0x00,0x5d,0x00,0x00,0x00,0x63,0x00,0x00,0x00, -0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x68,0x00,0x00,0x00, -0x5d,0x00,0x00,0x00,0x67,0x00,0x00,0x00,0x89,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x6e,0x00,0x00,0x00,0x4e,0x00,0x00,0x00, -0x6d,0x00,0x00,0x00,0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x73,0x00,0x00,0x00,0x4e,0x00,0x00,0x00,0x72,0x00,0x00,0x00, -0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00,0x77,0x00,0x00,0x00, -0x12,0x00,0x00,0x00,0x76,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x78,0x00,0x00,0x00,0x77,0x00,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x79,0x00,0x00,0x00, -0x47,0x00,0x00,0x00,0x78,0x00,0x00,0x00,0x41,0x00,0x05,0x00, -0x15,0x00,0x00,0x00,0x7c,0x00,0x00,0x00,0x12,0x00,0x00,0x00, -0x7b,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x7d,0x00,0x00,0x00,0x7c,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x7f,0x00,0x00,0x00,0x47,0x00,0x00,0x00, -0x39,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x82,0x00,0x00,0x00,0x7f,0x00,0x00,0x00,0x78,0x00,0x00,0x00, -0x0c,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0x83,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x26,0x00,0x00,0x00,0x7d,0x00,0x00,0x00, -0x82,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00, -0x87,0x00,0x00,0x00,0x12,0x00,0x00,0x00,0x86,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x88,0x00,0x00,0x00, -0x87,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x89,0x00,0x00,0x00,0x32,0x00,0x00,0x00,0x88,0x00,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x8b,0x00,0x00,0x00, -0x42,0x00,0x00,0x00,0x37,0x00,0x00,0x00,0x41,0x00,0x05,0x00, -0x15,0x00,0x00,0x00,0x8d,0x00,0x00,0x00,0x12,0x00,0x00,0x00, -0x8c,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x8e,0x00,0x00,0x00,0x8d,0x00,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x8f,0x00,0x00,0x00,0x8b,0x00,0x00,0x00, -0x8e,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x90,0x00,0x00,0x00,0x89,0x00,0x00,0x00,0x8f,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x92,0x00,0x00,0x00, -0x90,0x00,0x00,0x00,0x79,0x00,0x00,0x00,0x86,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x93,0x00,0x00,0x00,0x92,0x00,0x00,0x00, -0x39,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00, -0x98,0x00,0x00,0x00,0x12,0x00,0x00,0x00,0x97,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x99,0x00,0x00,0x00, -0x98,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x9a,0x00,0x00,0x00,0x0f,0x00,0x00,0x00,0x99,0x00,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x9d,0x00,0x00,0x00, -0x4a,0x00,0x00,0x00,0x9c,0x00,0x00,0x00,0x41,0x00,0x05,0x00, -0x15,0x00,0x00,0x00,0x9f,0x00,0x00,0x00,0x12,0x00,0x00,0x00, -0x9e,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0xa0,0x00,0x00,0x00,0x9f,0x00,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xa1,0x00,0x00,0x00,0x9d,0x00,0x00,0x00, -0xa0,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xa2,0x00,0x00,0x00,0x9a,0x00,0x00,0x00,0xa1,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xa4,0x00,0x00,0x00, -0xa2,0x00,0x00,0x00,0x79,0x00,0x00,0x00,0x86,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xa5,0x00,0x00,0x00,0xa4,0x00,0x00,0x00, -0x39,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0xa7,0x00,0x00,0x00, -0xf8,0x00,0x02,0x00,0xa7,0x00,0x00,0x00,0xf5,0x00,0x07,0x00, -0x06,0x00,0x00,0x00,0x97,0x02,0x00,0x00,0x3e,0x00,0x00,0x00, -0x05,0x00,0x00,0x00,0xc6,0x00,0x00,0x00,0xa8,0x00,0x00,0x00, -0xb0,0x00,0x05,0x00,0xb7,0x00,0x00,0x00,0xb8,0x00,0x00,0x00, -0x97,0x02,0x00,0x00,0xb6,0x00,0x00,0x00,0xf6,0x00,0x04,0x00, -0xa9,0x00,0x00,0x00,0xa8,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0xfa,0x00,0x04,0x00,0xb8,0x00,0x00,0x00,0xa8,0x00,0x00,0x00, -0xa9,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0xa8,0x00,0x00,0x00, -0x41,0x00,0x05,0x00,0xc2,0x00,0x00,0x00,0xc3,0x00,0x00,0x00, -0xbf,0x00,0x00,0x00,0x97,0x02,0x00,0x00,0x3e,0x00,0x03,0x00, -0xc3,0x00,0x00,0x00,0xc1,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xc6,0x00,0x00,0x00,0x97,0x02,0x00,0x00, -0xc5,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0xa7,0x00,0x00,0x00, -0xf8,0x00,0x02,0x00,0xa9,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, -0xc9,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0xc9,0x00,0x00,0x00, -0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0xb0,0x02,0x00,0x00, -0xa5,0x00,0x00,0x00,0xa9,0x00,0x00,0x00,0x67,0x01,0x00,0x00, -0xcc,0x00,0x00,0x00,0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00, -0xac,0x02,0x00,0x00,0x93,0x00,0x00,0x00,0xa9,0x00,0x00,0x00, -0x64,0x01,0x00,0x00,0xcc,0x00,0x00,0x00,0xf5,0x00,0x07,0x00, -0x06,0x00,0x00,0x00,0x98,0x02,0x00,0x00,0x79,0x00,0x00,0x00, -0xa9,0x00,0x00,0x00,0x12,0x02,0x00,0x00,0xcc,0x00,0x00,0x00, -0xb0,0x00,0x05,0x00,0xb7,0x00,0x00,0x00,0xd0,0x00,0x00,0x00, -0x98,0x02,0x00,0x00,0x83,0x00,0x00,0x00,0xf6,0x00,0x04,0x00, -0xcb,0x00,0x00,0x00,0xcc,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0xfa,0x00,0x04,0x00,0xd0,0x00,0x00,0x00,0xca,0x00,0x00,0x00, -0xcb,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0xca,0x00,0x00,0x00, -0xf9,0x00,0x02,0x00,0xd2,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, -0xd2,0x00,0x00,0x00,0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00, -0xa8,0x02,0x00,0x00,0x3e,0x00,0x00,0x00,0xca,0x00,0x00,0x00, -0x1c,0x01,0x00,0x00,0xd5,0x00,0x00,0x00,0xb0,0x00,0x05,0x00, -0xb7,0x00,0x00,0x00,0xd8,0x00,0x00,0x00,0xa8,0x02,0x00,0x00, -0x37,0x00,0x00,0x00,0xf6,0x00,0x04,0x00,0xd4,0x00,0x00,0x00, -0xd5,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, -0xd8,0x00,0x00,0x00,0xd3,0x00,0x00,0x00,0xd4,0x00,0x00,0x00, -0xf8,0x00,0x02,0x00,0xd3,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xdc,0x00,0x00,0x00,0x8b,0x00,0x00,0x00, -0x73,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xde,0x00,0x00,0x00,0xdc,0x00,0x00,0x00,0xa8,0x02,0x00,0x00, -0xb0,0x00,0x05,0x00,0xb7,0x00,0x00,0x00,0xe1,0x00,0x00,0x00, -0xde,0x00,0x00,0x00,0x36,0x00,0x00,0x00,0xf7,0x00,0x03,0x00, -0xe3,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, -0xe1,0x00,0x00,0x00,0xe2,0x00,0x00,0x00,0xe3,0x00,0x00,0x00, -0xf8,0x00,0x02,0x00,0xe2,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xe6,0x00,0x00,0x00,0x98,0x02,0x00,0x00, -0x6e,0x00,0x00,0x00,0xb0,0x00,0x05,0x00,0xb7,0x00,0x00,0x00, -0xe8,0x00,0x00,0x00,0xe6,0x00,0x00,0x00,0x83,0x00,0x00,0x00, -0xf9,0x00,0x02,0x00,0xe3,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, -0xe3,0x00,0x00,0x00,0xf5,0x00,0x07,0x00,0xb7,0x00,0x00,0x00, -0xe9,0x00,0x00,0x00,0xe1,0x00,0x00,0x00,0xd3,0x00,0x00,0x00, -0xe8,0x00,0x00,0x00,0xe2,0x00,0x00,0x00,0xf7,0x00,0x03,0x00, -0xeb,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, -0xe9,0x00,0x00,0x00,0xea,0x00,0x00,0x00,0x0d,0x01,0x00,0x00, -0xf8,0x00,0x02,0x00,0xea,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xf3,0x00,0x00,0x00,0x73,0x00,0x00,0x00, -0xa8,0x02,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xf5,0x00,0x00,0x00,0xf3,0x00,0x00,0x00,0xf4,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xf7,0x00,0x00,0x00, -0xf5,0x00,0x00,0x00,0x6e,0x00,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x03,0x01,0x00,0x00,0xf3,0x00,0x00,0x00, -0x8e,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x04,0x01,0x00,0x00,0xac,0x02,0x00,0x00,0x03,0x01,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x06,0x01,0x00,0x00, -0x04,0x01,0x00,0x00,0x6e,0x00,0x00,0x00,0x41,0x00,0x06,0x00, -0x07,0x01,0x00,0x00,0x08,0x01,0x00,0x00,0xfc,0x00,0x00,0x00, -0x34,0x00,0x00,0x00,0x06,0x01,0x00,0x00,0x3d,0x00,0x04,0x00, -0xf8,0x00,0x00,0x00,0x09,0x01,0x00,0x00,0x08,0x01,0x00,0x00, -0x73,0x00,0x04,0x00,0xb9,0x00,0x00,0x00,0x0a,0x01,0x00,0x00, -0x09,0x01,0x00,0x00,0x41,0x00,0x05,0x00,0x0b,0x01,0x00,0x00, -0x0c,0x01,0x00,0x00,0xf0,0x00,0x00,0x00,0xf7,0x00,0x00,0x00, -0x3e,0x00,0x03,0x00,0x0c,0x01,0x00,0x00,0x0a,0x01,0x00,0x00, -0xf9,0x00,0x02,0x00,0xeb,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, -0x0d,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x10,0x01,0x00,0x00,0x73,0x00,0x00,0x00,0xa8,0x02,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x12,0x01,0x00,0x00, -0x10,0x01,0x00,0x00,0x11,0x01,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x14,0x01,0x00,0x00,0x12,0x01,0x00,0x00, -0x6e,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x0b,0x01,0x00,0x00, -0x15,0x01,0x00,0x00,0xf0,0x00,0x00,0x00,0x14,0x01,0x00,0x00, -0x3e,0x00,0x03,0x00,0x15,0x01,0x00,0x00,0xc1,0x00,0x00,0x00, -0xf9,0x00,0x02,0x00,0xeb,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, -0xeb,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0xd5,0x00,0x00,0x00, -0xf8,0x00,0x02,0x00,0xd5,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x1c,0x01,0x00,0x00,0xa8,0x02,0x00,0x00, -0x1a,0x01,0x00,0x00,0xf9,0x00,0x02,0x00,0xd2,0x00,0x00,0x00, -0xf8,0x00,0x02,0x00,0xd4,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, -0x1e,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0x1e,0x01,0x00,0x00, -0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0xa9,0x02,0x00,0x00, -0x3e,0x00,0x00,0x00,0xd4,0x00,0x00,0x00,0x60,0x01,0x00,0x00, -0x21,0x01,0x00,0x00,0xb0,0x00,0x05,0x00,0xb7,0x00,0x00,0x00, -0x24,0x01,0x00,0x00,0xa9,0x02,0x00,0x00,0x9c,0x00,0x00,0x00, -0xf6,0x00,0x04,0x00,0x20,0x01,0x00,0x00,0x21,0x01,0x00,0x00, -0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0x24,0x01,0x00,0x00, -0x1f,0x01,0x00,0x00,0x20,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, -0x1f,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x28,0x01,0x00,0x00,0x9d,0x00,0x00,0x00,0x73,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x2a,0x01,0x00,0x00, -0x28,0x01,0x00,0x00,0xa9,0x02,0x00,0x00,0x41,0x00,0x05,0x00, -0x15,0x00,0x00,0x00,0x2b,0x01,0x00,0x00,0x12,0x00,0x00,0x00, -0xc5,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x2c,0x01,0x00,0x00,0x2b,0x01,0x00,0x00,0xb0,0x00,0x05,0x00, -0xb7,0x00,0x00,0x00,0x2d,0x01,0x00,0x00,0x2a,0x01,0x00,0x00, -0x2c,0x01,0x00,0x00,0xf7,0x00,0x03,0x00,0x2f,0x01,0x00,0x00, -0x00,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0x2d,0x01,0x00,0x00, -0x2e,0x01,0x00,0x00,0x2f,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, -0x2e,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x32,0x01,0x00,0x00,0x98,0x02,0x00,0x00,0x6e,0x00,0x00,0x00, -0xb0,0x00,0x05,0x00,0xb7,0x00,0x00,0x00,0x34,0x01,0x00,0x00, -0x32,0x01,0x00,0x00,0x83,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, -0x2f,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0x2f,0x01,0x00,0x00, -0xf5,0x00,0x07,0x00,0xb7,0x00,0x00,0x00,0x35,0x01,0x00,0x00, -0x2d,0x01,0x00,0x00,0x1f,0x01,0x00,0x00,0x34,0x01,0x00,0x00, -0x2e,0x01,0x00,0x00,0xf7,0x00,0x03,0x00,0x37,0x01,0x00,0x00, -0x00,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0x35,0x01,0x00,0x00, -0x36,0x01,0x00,0x00,0x56,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, -0x36,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x3f,0x01,0x00,0x00,0x73,0x00,0x00,0x00,0xa9,0x02,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x41,0x01,0x00,0x00, -0x3f,0x01,0x00,0x00,0x40,0x01,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x43,0x01,0x00,0x00,0x41,0x01,0x00,0x00, -0x6e,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x4e,0x01,0x00,0x00,0x3f,0x01,0x00,0x00,0xa0,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x4f,0x01,0x00,0x00, -0xb0,0x02,0x00,0x00,0x4e,0x01,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x51,0x01,0x00,0x00,0x4f,0x01,0x00,0x00, -0x6e,0x00,0x00,0x00,0x41,0x00,0x06,0x00,0x52,0x01,0x00,0x00, -0x53,0x01,0x00,0x00,0x47,0x01,0x00,0x00,0x34,0x00,0x00,0x00, -0x51,0x01,0x00,0x00,0x3d,0x00,0x04,0x00,0xb9,0x00,0x00,0x00, -0x54,0x01,0x00,0x00,0x53,0x01,0x00,0x00,0x41,0x00,0x05,0x00, -0x0b,0x01,0x00,0x00,0x55,0x01,0x00,0x00,0x3c,0x01,0x00,0x00, -0x43,0x01,0x00,0x00,0x3e,0x00,0x03,0x00,0x55,0x01,0x00,0x00, -0x54,0x01,0x00,0x00,0xf9,0x00,0x02,0x00,0x37,0x01,0x00,0x00, -0xf8,0x00,0x02,0x00,0x56,0x01,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x59,0x01,0x00,0x00,0x73,0x00,0x00,0x00, -0xa9,0x02,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x5b,0x01,0x00,0x00,0x59,0x01,0x00,0x00,0x5a,0x01,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x5d,0x01,0x00,0x00, -0x5b,0x01,0x00,0x00,0x6e,0x00,0x00,0x00,0x41,0x00,0x05,0x00, -0x0b,0x01,0x00,0x00,0x5e,0x01,0x00,0x00,0x3c,0x01,0x00,0x00, -0x5d,0x01,0x00,0x00,0x3e,0x00,0x03,0x00,0x5e,0x01,0x00,0x00, -0xc1,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0x37,0x01,0x00,0x00, -0xf8,0x00,0x02,0x00,0x37,0x01,0x00,0x00,0xf9,0x00,0x02,0x00, -0x21,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0x21,0x01,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x60,0x01,0x00,0x00, -0xa9,0x02,0x00,0x00,0x1a,0x01,0x00,0x00,0xf9,0x00,0x02,0x00, -0x1e,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0x20,0x01,0x00,0x00, -0xe0,0x00,0x04,0x00,0x0c,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0x61,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x64,0x01,0x00,0x00,0xac,0x02,0x00,0x00,0x62,0x01,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x67,0x01,0x00,0x00, -0xb0,0x02,0x00,0x00,0x65,0x01,0x00,0x00,0xf9,0x00,0x02,0x00, -0x69,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0x69,0x01,0x00,0x00, -0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0xb2,0x02,0x00,0x00, -0x3e,0x00,0x00,0x00,0x20,0x01,0x00,0x00,0x10,0x02,0x00,0x00, -0x6c,0x01,0x00,0x00,0xb0,0x00,0x05,0x00,0xb7,0x00,0x00,0x00, -0x6f,0x01,0x00,0x00,0xb2,0x02,0x00,0x00,0x6c,0x00,0x00,0x00, -0xf6,0x00,0x04,0x00,0x6b,0x01,0x00,0x00,0x6c,0x01,0x00,0x00, -0x00,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0x6f,0x01,0x00,0x00, -0x6a,0x01,0x00,0x00,0x6b,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, -0x6a,0x01,0x00,0x00,0xf9,0x00,0x02,0x00,0x71,0x01,0x00,0x00, -0xf8,0x00,0x02,0x00,0x71,0x01,0x00,0x00,0xf5,0x00,0x07,0x00, -0x06,0x00,0x00,0x00,0xb6,0x02,0x00,0x00,0x3e,0x00,0x00,0x00, -0x6a,0x01,0x00,0x00,0x9c,0x01,0x00,0x00,0x74,0x01,0x00,0x00, -0xb0,0x00,0x05,0x00,0xb7,0x00,0x00,0x00,0x77,0x01,0x00,0x00, -0xb6,0x02,0x00,0x00,0x60,0x00,0x00,0x00,0xf6,0x00,0x04,0x00, -0x73,0x01,0x00,0x00,0x74,0x01,0x00,0x00,0x01,0x00,0x00,0x00, -0xfa,0x00,0x04,0x00,0x77,0x01,0x00,0x00,0x72,0x01,0x00,0x00, -0x73,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0x72,0x01,0x00,0x00, -0xf9,0x00,0x02,0x00,0x79,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, -0x79,0x01,0x00,0x00,0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00, -0xc8,0x02,0x00,0x00,0x3e,0x00,0x00,0x00,0x72,0x01,0x00,0x00, -0x9a,0x01,0x00,0x00,0x7a,0x01,0x00,0x00,0xb0,0x00,0x05,0x00, -0xb7,0x00,0x00,0x00,0x7f,0x01,0x00,0x00,0xc8,0x02,0x00,0x00, -0x62,0x00,0x00,0x00,0xf6,0x00,0x04,0x00,0x7b,0x01,0x00,0x00, -0x7a,0x01,0x00,0x00,0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, -0x7f,0x01,0x00,0x00,0x7a,0x01,0x00,0x00,0x7b,0x01,0x00,0x00, -0xf8,0x00,0x02,0x00,0x7a,0x01,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x85,0x01,0x00,0x00,0xb6,0x02,0x00,0x00, -0x62,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x87,0x01,0x00,0x00,0x85,0x01,0x00,0x00,0xc8,0x02,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x89,0x01,0x00,0x00, -0x55,0x00,0x00,0x00,0x53,0x00,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x8b,0x01,0x00,0x00,0xb6,0x02,0x00,0x00, -0x61,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x8c,0x01,0x00,0x00,0x89,0x01,0x00,0x00,0x8b,0x01,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x8e,0x01,0x00,0x00, -0x64,0x00,0x00,0x00,0x62,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x8f,0x01,0x00,0x00,0x8c,0x01,0x00,0x00, -0x8e,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x91,0x01,0x00,0x00,0x8f,0x01,0x00,0x00,0xc8,0x02,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x93,0x01,0x00,0x00, -0x91,0x01,0x00,0x00,0x92,0x01,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x95,0x01,0x00,0x00,0x93,0x01,0x00,0x00, -0xb2,0x02,0x00,0x00,0x41,0x00,0x05,0x00,0x0b,0x01,0x00,0x00, -0x96,0x01,0x00,0x00,0xf0,0x00,0x00,0x00,0x95,0x01,0x00,0x00, -0x3d,0x00,0x04,0x00,0xb9,0x00,0x00,0x00,0x97,0x01,0x00,0x00, -0x96,0x01,0x00,0x00,0x41,0x00,0x05,0x00,0xc2,0x00,0x00,0x00, -0x98,0x01,0x00,0x00,0x83,0x01,0x00,0x00,0x87,0x01,0x00,0x00, -0x3e,0x00,0x03,0x00,0x98,0x01,0x00,0x00,0x97,0x01,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x9a,0x01,0x00,0x00, -0xc8,0x02,0x00,0x00,0xc5,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, -0x79,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0x7b,0x01,0x00,0x00, -0xf9,0x00,0x02,0x00,0x74,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, -0x74,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x9c,0x01,0x00,0x00,0xb6,0x02,0x00,0x00,0xc5,0x00,0x00,0x00, -0xf9,0x00,0x02,0x00,0x71,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, -0x73,0x01,0x00,0x00,0xf9,0x00,0x02,0x00,0x9e,0x01,0x00,0x00, -0xf8,0x00,0x02,0x00,0x9e,0x01,0x00,0x00,0xf5,0x00,0x07,0x00, -0x06,0x00,0x00,0x00,0xb7,0x02,0x00,0x00,0x3e,0x00,0x00,0x00, -0x73,0x01,0x00,0x00,0xca,0x01,0x00,0x00,0xa1,0x01,0x00,0x00, -0xb0,0x00,0x05,0x00,0xb7,0x00,0x00,0x00,0xa4,0x01,0x00,0x00, -0xb7,0x02,0x00,0x00,0xb4,0x00,0x00,0x00,0xf6,0x00,0x04,0x00, -0xa0,0x01,0x00,0x00,0xa1,0x01,0x00,0x00,0x01,0x00,0x00,0x00, -0xfa,0x00,0x04,0x00,0xa4,0x01,0x00,0x00,0x9f,0x01,0x00,0x00, -0xa0,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0x9f,0x01,0x00,0x00, -0xf9,0x00,0x02,0x00,0xa6,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, -0xa6,0x01,0x00,0x00,0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00, -0xc5,0x02,0x00,0x00,0x3e,0x00,0x00,0x00,0x9f,0x01,0x00,0x00, -0xc8,0x01,0x00,0x00,0xa7,0x01,0x00,0x00,0xb0,0x00,0x05,0x00, -0xb7,0x00,0x00,0x00,0xac,0x01,0x00,0x00,0xc5,0x02,0x00,0x00, -0xb1,0x00,0x00,0x00,0xf6,0x00,0x04,0x00,0xa8,0x01,0x00,0x00, -0xa7,0x01,0x00,0x00,0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, -0xac,0x01,0x00,0x00,0xa7,0x01,0x00,0x00,0xa8,0x01,0x00,0x00, -0xf8,0x00,0x02,0x00,0xa7,0x01,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xb2,0x01,0x00,0x00,0xb7,0x02,0x00,0x00, -0xb1,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xb4,0x01,0x00,0x00,0xb2,0x01,0x00,0x00,0xc5,0x02,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xb6,0x01,0x00,0x00, -0x59,0x00,0x00,0x00,0xae,0x00,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xb9,0x01,0x00,0x00,0xb7,0x02,0x00,0x00, -0xb8,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xba,0x01,0x00,0x00,0xb6,0x01,0x00,0x00,0xb9,0x01,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xbc,0x01,0x00,0x00, -0x68,0x00,0x00,0x00,0xb1,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xbd,0x01,0x00,0x00,0xba,0x01,0x00,0x00, -0xbc,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xbf,0x01,0x00,0x00,0xbd,0x01,0x00,0x00,0xc5,0x02,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xc1,0x01,0x00,0x00, -0xbf,0x01,0x00,0x00,0xc0,0x01,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xc3,0x01,0x00,0x00,0xc1,0x01,0x00,0x00, -0xb2,0x02,0x00,0x00,0x41,0x00,0x05,0x00,0x0b,0x01,0x00,0x00, -0xc4,0x01,0x00,0x00,0x3c,0x01,0x00,0x00,0xc3,0x01,0x00,0x00, -0x3d,0x00,0x04,0x00,0xb9,0x00,0x00,0x00,0xc5,0x01,0x00,0x00, -0xc4,0x01,0x00,0x00,0x41,0x00,0x05,0x00,0xc2,0x00,0x00,0x00, -0xc6,0x01,0x00,0x00,0xb0,0x01,0x00,0x00,0xb4,0x01,0x00,0x00, -0x3e,0x00,0x03,0x00,0xc6,0x01,0x00,0x00,0xc5,0x01,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xc8,0x01,0x00,0x00, -0xc5,0x02,0x00,0x00,0xc5,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, -0xa6,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0xa8,0x01,0x00,0x00, -0xf9,0x00,0x02,0x00,0xa1,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, -0xa1,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xca,0x01,0x00,0x00,0xb7,0x02,0x00,0x00,0xc5,0x00,0x00,0x00, -0xf9,0x00,0x02,0x00,0x9e,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, -0xa0,0x01,0x00,0x00,0xf9,0x00,0x02,0x00,0xcc,0x01,0x00,0x00, -0xf8,0x00,0x02,0x00,0xcc,0x01,0x00,0x00,0xf5,0x00,0x07,0x00, -0x06,0x00,0x00,0x00,0xb8,0x02,0x00,0x00,0x3e,0x00,0x00,0x00, -0xa0,0x01,0x00,0x00,0x0e,0x02,0x00,0x00,0xcf,0x01,0x00,0x00, -0xb0,0x00,0x05,0x00,0xb7,0x00,0x00,0x00,0xd2,0x01,0x00,0x00, -0xb8,0x02,0x00,0x00,0xb4,0x00,0x00,0x00,0xf6,0x00,0x04,0x00, -0xce,0x01,0x00,0x00,0xcf,0x01,0x00,0x00,0x01,0x00,0x00,0x00, -0xfa,0x00,0x04,0x00,0xd2,0x01,0x00,0x00,0xcd,0x01,0x00,0x00, -0xce,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0xcd,0x01,0x00,0x00, -0xf9,0x00,0x02,0x00,0xd4,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, -0xd4,0x01,0x00,0x00,0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00, -0xbc,0x02,0x00,0x00,0x3e,0x00,0x00,0x00,0xcd,0x01,0x00,0x00, -0x0c,0x02,0x00,0x00,0xd7,0x01,0x00,0x00,0xb0,0x00,0x05,0x00, -0xb7,0x00,0x00,0x00,0xda,0x01,0x00,0x00,0xbc,0x02,0x00,0x00, -0x60,0x00,0x00,0x00,0xf6,0x00,0x04,0x00,0xd6,0x01,0x00,0x00, -0xd7,0x01,0x00,0x00,0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, -0xda,0x01,0x00,0x00,0xd5,0x01,0x00,0x00,0xd6,0x01,0x00,0x00, -0xf8,0x00,0x02,0x00,0xd5,0x01,0x00,0x00,0xf9,0x00,0x02,0x00, -0xdc,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0xdc,0x01,0x00,0x00, -0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0xbe,0x02,0x00,0x00, -0x3e,0x00,0x00,0x00,0xd5,0x01,0x00,0x00,0x0a,0x02,0x00,0x00, -0xdf,0x01,0x00,0x00,0xb0,0x00,0x05,0x00,0xb7,0x00,0x00,0x00, -0xe2,0x01,0x00,0x00,0xbe,0x02,0x00,0x00,0xb1,0x00,0x00,0x00, -0xf6,0x00,0x04,0x00,0xde,0x01,0x00,0x00,0xdf,0x01,0x00,0x00, -0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0xe2,0x01,0x00,0x00, -0xdd,0x01,0x00,0x00,0xde,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, -0xdd,0x01,0x00,0x00,0xf9,0x00,0x02,0x00,0xe4,0x01,0x00,0x00, -0xf8,0x00,0x02,0x00,0xe4,0x01,0x00,0x00,0xf5,0x00,0x07,0x00, -0x06,0x00,0x00,0x00,0xc0,0x02,0x00,0x00,0x3e,0x00,0x00,0x00, -0xdd,0x01,0x00,0x00,0x08,0x02,0x00,0x00,0xe5,0x01,0x00,0x00, -0xb0,0x00,0x05,0x00,0xb7,0x00,0x00,0x00,0xea,0x01,0x00,0x00, -0xc0,0x02,0x00,0x00,0x62,0x00,0x00,0x00,0xf6,0x00,0x04,0x00, -0xe6,0x01,0x00,0x00,0xe5,0x01,0x00,0x00,0x01,0x00,0x00,0x00, -0xfa,0x00,0x04,0x00,0xea,0x01,0x00,0x00,0xe5,0x01,0x00,0x00, -0xe6,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0xe5,0x01,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xec,0x01,0x00,0x00, -0xb8,0x02,0x00,0x00,0xb1,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xee,0x01,0x00,0x00,0xec,0x01,0x00,0x00, -0xbe,0x02,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xf0,0x01,0x00,0x00,0xee,0x01,0x00,0x00,0xef,0x01,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xf2,0x01,0x00,0x00, -0xbc,0x02,0x00,0x00,0x62,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xf3,0x01,0x00,0x00,0xf0,0x01,0x00,0x00, -0xf2,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xf5,0x01,0x00,0x00,0xf3,0x01,0x00,0x00,0xc0,0x02,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xf9,0x01,0x00,0x00, -0xf2,0x01,0x00,0x00,0xc0,0x02,0x00,0x00,0x41,0x00,0x05,0x00, -0xc2,0x00,0x00,0x00,0xfa,0x01,0x00,0x00,0x83,0x01,0x00,0x00, -0xf9,0x01,0x00,0x00,0x3d,0x00,0x04,0x00,0xb9,0x00,0x00,0x00, -0xfb,0x01,0x00,0x00,0xfa,0x01,0x00,0x00,0x41,0x00,0x05,0x00, -0xc2,0x00,0x00,0x00,0x00,0x02,0x00,0x00,0xb0,0x01,0x00,0x00, -0xee,0x01,0x00,0x00,0x3d,0x00,0x04,0x00,0xb9,0x00,0x00,0x00, -0x01,0x02,0x00,0x00,0x00,0x02,0x00,0x00,0x41,0x00,0x05,0x00, -0xc2,0x00,0x00,0x00,0x03,0x02,0x00,0x00,0xbf,0x00,0x00,0x00, -0xf5,0x01,0x00,0x00,0x3d,0x00,0x04,0x00,0xb9,0x00,0x00,0x00, -0x04,0x02,0x00,0x00,0x03,0x02,0x00,0x00,0x0c,0x00,0x08,0x00, -0xb9,0x00,0x00,0x00,0x05,0x02,0x00,0x00,0x01,0x00,0x00,0x00, -0x32,0x00,0x00,0x00,0xfb,0x01,0x00,0x00,0x01,0x02,0x00,0x00, -0x04,0x02,0x00,0x00,0x3e,0x00,0x03,0x00,0x03,0x02,0x00,0x00, -0x05,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x08,0x02,0x00,0x00,0xc0,0x02,0x00,0x00,0xc5,0x00,0x00,0x00, -0xf9,0x00,0x02,0x00,0xe4,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, -0xe6,0x01,0x00,0x00,0xf9,0x00,0x02,0x00,0xdf,0x01,0x00,0x00, -0xf8,0x00,0x02,0x00,0xdf,0x01,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x0a,0x02,0x00,0x00,0xbe,0x02,0x00,0x00, -0xc5,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0xdc,0x01,0x00,0x00, -0xf8,0x00,0x02,0x00,0xde,0x01,0x00,0x00,0xf9,0x00,0x02,0x00, -0xd7,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0xd7,0x01,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x0c,0x02,0x00,0x00, -0xbc,0x02,0x00,0x00,0xc5,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, -0xd4,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0xd6,0x01,0x00,0x00, -0xf9,0x00,0x02,0x00,0xcf,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, -0xcf,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x0e,0x02,0x00,0x00,0xb8,0x02,0x00,0x00,0xc5,0x00,0x00,0x00, -0xf9,0x00,0x02,0x00,0xcc,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, -0xce,0x01,0x00,0x00,0xf9,0x00,0x02,0x00,0x6c,0x01,0x00,0x00, -0xf8,0x00,0x02,0x00,0x6c,0x01,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x10,0x02,0x00,0x00,0xb2,0x02,0x00,0x00, -0xc5,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0x69,0x01,0x00,0x00, -0xf8,0x00,0x02,0x00,0x6b,0x01,0x00,0x00,0xe0,0x00,0x04,0x00, -0x0c,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x61,0x01,0x00,0x00, -0xf9,0x00,0x02,0x00,0xcc,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, -0xcc,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x12,0x02,0x00,0x00,0x98,0x02,0x00,0x00,0x6c,0x00,0x00,0x00, -0xf9,0x00,0x02,0x00,0xc9,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, -0xcb,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x17,0x02,0x00,0x00,0x55,0x00,0x00,0x00,0x53,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x18,0x02,0x00,0x00, -0x8b,0x00,0x00,0x00,0x17,0x02,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x1d,0x02,0x00,0x00,0x59,0x00,0x00,0x00, -0xae,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x1e,0x02,0x00,0x00,0x9d,0x00,0x00,0x00,0x1d,0x02,0x00,0x00, -0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00,0x23,0x02,0x00,0x00, -0x12,0x00,0x00,0x00,0x22,0x02,0x00,0x00,0x3d,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x24,0x02,0x00,0x00,0x23,0x02,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x25,0x02,0x00,0x00, -0x0f,0x00,0x00,0x00,0x24,0x02,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x29,0x02,0x00,0x00,0x47,0x00,0x00,0x00, -0x24,0x02,0x00,0x00,0x41,0x00,0x05,0x00,0x0d,0x00,0x00,0x00, -0x2b,0x02,0x00,0x00,0x2a,0x02,0x00,0x00,0x0c,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x2c,0x02,0x00,0x00, -0x2b,0x02,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x2d,0x02,0x00,0x00,0x29,0x02,0x00,0x00,0x2c,0x02,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x2e,0x02,0x00,0x00, -0x25,0x02,0x00,0x00,0x2d,0x02,0x00,0x00,0xf9,0x00,0x02,0x00, -0x30,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x30,0x02,0x00,0x00, -0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0x99,0x02,0x00,0x00, -0x3e,0x00,0x00,0x00,0xcb,0x00,0x00,0x00,0x96,0x02,0x00,0x00, -0x33,0x02,0x00,0x00,0xb0,0x00,0x05,0x00,0xb7,0x00,0x00,0x00, -0x36,0x02,0x00,0x00,0x99,0x02,0x00,0x00,0xb4,0x00,0x00,0x00, -0xf6,0x00,0x04,0x00,0x32,0x02,0x00,0x00,0x33,0x02,0x00,0x00, -0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0x36,0x02,0x00,0x00, -0x31,0x02,0x00,0x00,0x32,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, -0x31,0x02,0x00,0x00,0xf9,0x00,0x02,0x00,0x38,0x02,0x00,0x00, -0xf8,0x00,0x02,0x00,0x38,0x02,0x00,0x00,0xf5,0x00,0x07,0x00, -0x06,0x00,0x00,0x00,0x9a,0x02,0x00,0x00,0x3e,0x00,0x00,0x00, -0x31,0x02,0x00,0x00,0x94,0x02,0x00,0x00,0x3b,0x02,0x00,0x00, -0xb0,0x00,0x05,0x00,0xb7,0x00,0x00,0x00,0x3e,0x02,0x00,0x00, -0x9a,0x02,0x00,0x00,0x60,0x00,0x00,0x00,0xf6,0x00,0x04,0x00, -0x3a,0x02,0x00,0x00,0x3b,0x02,0x00,0x00,0x01,0x00,0x00,0x00, -0xfa,0x00,0x04,0x00,0x3e,0x02,0x00,0x00,0x39,0x02,0x00,0x00, -0x3a,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x39,0x02,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x42,0x02,0x00,0x00, -0x9a,0x02,0x00,0x00,0x61,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x43,0x02,0x00,0x00,0x18,0x02,0x00,0x00, -0x42,0x02,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x45,0x02,0x00,0x00,0x64,0x00,0x00,0x00,0x62,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x46,0x02,0x00,0x00, -0x43,0x02,0x00,0x00,0x45,0x02,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x4a,0x02,0x00,0x00,0x99,0x02,0x00,0x00, -0xb8,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x4b,0x02,0x00,0x00,0x1e,0x02,0x00,0x00,0x4a,0x02,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x4d,0x02,0x00,0x00, -0x68,0x00,0x00,0x00,0xb1,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x4e,0x02,0x00,0x00,0x4b,0x02,0x00,0x00, -0x4d,0x02,0x00,0x00,0xf9,0x00,0x02,0x00,0x50,0x02,0x00,0x00, -0xf8,0x00,0x02,0x00,0x50,0x02,0x00,0x00,0xf5,0x00,0x07,0x00, -0x06,0x00,0x00,0x00,0x9c,0x02,0x00,0x00,0x3e,0x00,0x00,0x00, -0x39,0x02,0x00,0x00,0x92,0x02,0x00,0x00,0x53,0x02,0x00,0x00, -0xb0,0x00,0x05,0x00,0xb7,0x00,0x00,0x00,0x56,0x02,0x00,0x00, -0x9c,0x02,0x00,0x00,0xb1,0x00,0x00,0x00,0xf6,0x00,0x04,0x00, -0x52,0x02,0x00,0x00,0x53,0x02,0x00,0x00,0x01,0x00,0x00,0x00, -0xfa,0x00,0x04,0x00,0x56,0x02,0x00,0x00,0x51,0x02,0x00,0x00, -0x52,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x51,0x02,0x00,0x00, -0xf9,0x00,0x02,0x00,0x58,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, -0x58,0x02,0x00,0x00,0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00, -0x9e,0x02,0x00,0x00,0x3e,0x00,0x00,0x00,0x51,0x02,0x00,0x00, -0x90,0x02,0x00,0x00,0x5b,0x02,0x00,0x00,0xb0,0x00,0x05,0x00, -0xb7,0x00,0x00,0x00,0x5e,0x02,0x00,0x00,0x9e,0x02,0x00,0x00, -0x62,0x00,0x00,0x00,0xf6,0x00,0x04,0x00,0x5a,0x02,0x00,0x00, -0x5b,0x02,0x00,0x00,0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, -0x5e,0x02,0x00,0x00,0x59,0x02,0x00,0x00,0x5a,0x02,0x00,0x00, -0xf8,0x00,0x02,0x00,0x59,0x02,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x61,0x02,0x00,0x00,0x46,0x02,0x00,0x00, -0x9e,0x02,0x00,0x00,0xb0,0x00,0x05,0x00,0xb7,0x00,0x00,0x00, -0x64,0x02,0x00,0x00,0x61,0x02,0x00,0x00,0x36,0x00,0x00,0x00, -0xf7,0x00,0x03,0x00,0x66,0x02,0x00,0x00,0x00,0x00,0x00,0x00, -0xfa,0x00,0x04,0x00,0x64,0x02,0x00,0x00,0x65,0x02,0x00,0x00, -0x66,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x65,0x02,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x69,0x02,0x00,0x00, -0x4e,0x02,0x00,0x00,0x9c,0x02,0x00,0x00,0x41,0x00,0x05,0x00, -0x15,0x00,0x00,0x00,0x6a,0x02,0x00,0x00,0x12,0x00,0x00,0x00, -0xc5,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x6b,0x02,0x00,0x00,0x6a,0x02,0x00,0x00,0xb0,0x00,0x05,0x00, -0xb7,0x00,0x00,0x00,0x6c,0x02,0x00,0x00,0x69,0x02,0x00,0x00, -0x6b,0x02,0x00,0x00,0xf9,0x00,0x02,0x00,0x66,0x02,0x00,0x00, -0xf8,0x00,0x02,0x00,0x66,0x02,0x00,0x00,0xf5,0x00,0x07,0x00, -0xb7,0x00,0x00,0x00,0x6d,0x02,0x00,0x00,0x64,0x02,0x00,0x00, -0x59,0x02,0x00,0x00,0x6c,0x02,0x00,0x00,0x65,0x02,0x00,0x00, -0xf7,0x00,0x03,0x00,0x6f,0x02,0x00,0x00,0x00,0x00,0x00,0x00, -0xfa,0x00,0x04,0x00,0x6d,0x02,0x00,0x00,0x6e,0x02,0x00,0x00, -0x6f,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x6e,0x02,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x77,0x02,0x00,0x00, -0x4e,0x02,0x00,0x00,0x9c,0x02,0x00,0x00,0x41,0x00,0x05,0x00, -0x15,0x00,0x00,0x00,0x79,0x02,0x00,0x00,0x12,0x00,0x00,0x00, -0x78,0x02,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x7a,0x02,0x00,0x00,0x79,0x02,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x7b,0x02,0x00,0x00,0x77,0x02,0x00,0x00, -0x7a,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x7c,0x02,0x00,0x00,0x2e,0x02,0x00,0x00,0x7b,0x02,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x7e,0x02,0x00,0x00, -0x7c,0x02,0x00,0x00,0x46,0x02,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x80,0x02,0x00,0x00,0x7e,0x02,0x00,0x00, -0x9e,0x02,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x82,0x02,0x00,0x00,0x99,0x02,0x00,0x00,0xb1,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x84,0x02,0x00,0x00, -0x82,0x02,0x00,0x00,0x9c,0x02,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x86,0x02,0x00,0x00,0x84,0x02,0x00,0x00, -0x85,0x02,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x88,0x02,0x00,0x00,0x9a,0x02,0x00,0x00,0x62,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x89,0x02,0x00,0x00, -0x86,0x02,0x00,0x00,0x88,0x02,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x8b,0x02,0x00,0x00,0x89,0x02,0x00,0x00, -0x9e,0x02,0x00,0x00,0x41,0x00,0x05,0x00,0xc2,0x00,0x00,0x00, -0x8c,0x02,0x00,0x00,0xbf,0x00,0x00,0x00,0x8b,0x02,0x00,0x00, -0x3d,0x00,0x04,0x00,0xb9,0x00,0x00,0x00,0x8d,0x02,0x00,0x00, -0x8c,0x02,0x00,0x00,0x41,0x00,0x06,0x00,0x52,0x01,0x00,0x00, -0x8e,0x02,0x00,0x00,0x73,0x02,0x00,0x00,0x34,0x00,0x00,0x00, -0x80,0x02,0x00,0x00,0x3e,0x00,0x03,0x00,0x8e,0x02,0x00,0x00, -0x8d,0x02,0x00,0x00,0xf9,0x00,0x02,0x00,0x6f,0x02,0x00,0x00, -0xf8,0x00,0x02,0x00,0x6f,0x02,0x00,0x00,0xf9,0x00,0x02,0x00, -0x5b,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x5b,0x02,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x90,0x02,0x00,0x00, -0x9e,0x02,0x00,0x00,0xc5,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, -0x58,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x5a,0x02,0x00,0x00, -0xf9,0x00,0x02,0x00,0x53,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, -0x53,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x92,0x02,0x00,0x00,0x9c,0x02,0x00,0x00,0xc5,0x00,0x00,0x00, -0xf9,0x00,0x02,0x00,0x50,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, -0x52,0x02,0x00,0x00,0xf9,0x00,0x02,0x00,0x3b,0x02,0x00,0x00, -0xf8,0x00,0x02,0x00,0x3b,0x02,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x94,0x02,0x00,0x00,0x9a,0x02,0x00,0x00, -0xc5,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0x38,0x02,0x00,0x00, -0xf8,0x00,0x02,0x00,0x3a,0x02,0x00,0x00,0xf9,0x00,0x02,0x00, -0x33,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x33,0x02,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x96,0x02,0x00,0x00, -0x99,0x02,0x00,0x00,0xc5,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, -0x30,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x32,0x02,0x00,0x00, -0xfd,0x00,0x01,0x00,0x38,0x00,0x01,0x00, -}; -const uint64_t matmul_f16_f32_s_fp32_len = 10100; - -unsigned char matmul_f16_l_data[] = { -0x03,0x02,0x23,0x07,0x00,0x05,0x01,0x00,0x0b,0x00,0x0d,0x00, -0xcc,0x02,0x00,0x00,0x00,0x00,0x00,0x00,0x11,0x00,0x02,0x00, -0x01,0x00,0x00,0x00,0x11,0x00,0x02,0x00,0x09,0x00,0x00,0x00, -0x11,0x00,0x02,0x00,0x51,0x11,0x00,0x00,0x0b,0x00,0x06,0x00, -0x01,0x00,0x00,0x00,0x47,0x4c,0x53,0x4c,0x2e,0x73,0x74,0x64, -0x2e,0x34,0x35,0x30,0x00,0x00,0x00,0x00,0x0e,0x00,0x03,0x00, -0x00,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x0f,0x00,0x0f,0x00, -0x05,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x6d,0x61,0x69,0x6e, -0x00,0x00,0x00,0x00,0x0b,0x00,0x00,0x00,0x12,0x00,0x00,0x00, -0x3d,0x00,0x00,0x00,0x4c,0x00,0x00,0x00,0xf1,0x00,0x00,0x00, -0xfc,0x00,0x00,0x00,0x3c,0x01,0x00,0x00,0x47,0x01,0x00,0x00, -0x2c,0x02,0x00,0x00,0x75,0x02,0x00,0x00,0x10,0x00,0x06,0x00, -0x04,0x00,0x00,0x00,0x11,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x0b,0x00,0x00,0x00,0x0b,0x00,0x00,0x00,0x1c,0x00,0x00,0x00, -0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0x10,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x04,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00, -0x02,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x08,0x00,0x00,0x00, -0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00,0x03,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0x10,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x10,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00, -0x05,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x14,0x00,0x00,0x00, -0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x18,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0x10,0x00,0x00,0x00,0x07,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x1c,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00, -0x08,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x20,0x00,0x00,0x00, -0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00,0x09,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x24,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0x10,0x00,0x00,0x00,0x0a,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x28,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00, -0x0b,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x2c,0x00,0x00,0x00, -0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x30,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0x10,0x00,0x00,0x00,0x0d,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x34,0x00,0x00,0x00,0x47,0x00,0x03,0x00,0x10,0x00,0x00,0x00, -0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x37,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x3d,0x00,0x00,0x00,0x0b,0x00,0x00,0x00,0x1a,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x4c,0x00,0x00,0x00,0x0b,0x00,0x00,0x00, -0x1b,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x4f,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x09,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x53,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x04,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x60,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x62,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x07,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x6c,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x03,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x9c,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0xae,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x05,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0xb1,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x08,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0xf9,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x02,0x00,0x00,0x00,0x48,0x00,0x04,0x00,0xfa,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x18,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0xfa,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x47,0x00,0x03,0x00,0xfa,0x00,0x00,0x00, -0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0xfc,0x00,0x00,0x00, -0x22,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0xfc,0x00,0x00,0x00,0x21,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x16,0x01,0x00,0x00,0x01,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x17,0x01,0x00,0x00, -0x0b,0x00,0x00,0x00,0x19,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x44,0x01,0x00,0x00,0x06,0x00,0x00,0x00,0x02,0x00,0x00,0x00, -0x48,0x00,0x04,0x00,0x45,0x01,0x00,0x00,0x00,0x00,0x00,0x00, -0x18,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x45,0x01,0x00,0x00, -0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x47,0x00,0x03,0x00,0x45,0x01,0x00,0x00,0x02,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x47,0x01,0x00,0x00,0x22,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x47,0x01,0x00,0x00, -0x21,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x2c,0x02,0x00,0x00,0x0b,0x00,0x00,0x00,0x18,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x72,0x02,0x00,0x00,0x06,0x00,0x00,0x00, -0x04,0x00,0x00,0x00,0x48,0x00,0x04,0x00,0x73,0x02,0x00,0x00, -0x00,0x00,0x00,0x00,0x19,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0x73,0x02,0x00,0x00,0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x47,0x00,0x03,0x00,0x73,0x02,0x00,0x00, -0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x75,0x02,0x00,0x00, -0x22,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x75,0x02,0x00,0x00,0x21,0x00,0x00,0x00,0x02,0x00,0x00,0x00, -0x13,0x00,0x02,0x00,0x02,0x00,0x00,0x00,0x21,0x00,0x03,0x00, -0x03,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x15,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x20,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x17,0x00,0x04,0x00,0x09,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x03,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x0a,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x09,0x00,0x00,0x00,0x3b,0x00,0x04,0x00, -0x0a,0x00,0x00,0x00,0x0b,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0x02,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x0d,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x1e,0x00,0x10,0x00, -0x10,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x20,0x00,0x04,0x00,0x11,0x00,0x00,0x00,0x09,0x00,0x00,0x00, -0x10,0x00,0x00,0x00,0x3b,0x00,0x04,0x00,0x11,0x00,0x00,0x00, -0x12,0x00,0x00,0x00,0x09,0x00,0x00,0x00,0x15,0x00,0x04,0x00, -0x13,0x00,0x00,0x00,0x20,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00,0x14,0x00,0x00,0x00, -0x08,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x15,0x00,0x00,0x00, -0x09,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x13,0x00,0x00,0x00,0x21,0x00,0x00,0x00,0x0a,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00,0x27,0x00,0x00,0x00, -0x09,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00, -0x2d,0x00,0x00,0x00,0x07,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x13,0x00,0x00,0x00,0x34,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x32,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x37,0x00,0x00,0x00, -0x40,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x39,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x3b,0x00,0x04,0x00, -0x0a,0x00,0x00,0x00,0x3d,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x3e,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x3b,0x00,0x04,0x00,0x0a,0x00,0x00,0x00, -0x4c,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x32,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x4f,0x00,0x00,0x00,0x20,0x00,0x00,0x00, -0x32,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x53,0x00,0x00,0x00, -0x20,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, -0x54,0x00,0x00,0x00,0x86,0x00,0x00,0x00,0x37,0x00,0x00,0x00, -0x53,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, -0x58,0x00,0x00,0x00,0x86,0x00,0x00,0x00,0x37,0x00,0x00,0x00, -0x53,0x00,0x00,0x00,0x32,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x60,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0x61,0x00,0x00,0x00,0x86,0x00,0x00,0x00, -0x53,0x00,0x00,0x00,0x60,0x00,0x00,0x00,0x32,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x62,0x00,0x00,0x00,0x04,0x00,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x63,0x00,0x00,0x00, -0x86,0x00,0x00,0x00,0x61,0x00,0x00,0x00,0x62,0x00,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x67,0x00,0x00,0x00, -0x86,0x00,0x00,0x00,0x61,0x00,0x00,0x00,0x62,0x00,0x00,0x00, -0x32,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x6c,0x00,0x00,0x00, -0x10,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, -0x6d,0x00,0x00,0x00,0x86,0x00,0x00,0x00,0x6c,0x00,0x00,0x00, -0x39,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, -0x72,0x00,0x00,0x00,0x86,0x00,0x00,0x00,0x6c,0x00,0x00,0x00, -0x39,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00, -0x76,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x13,0x00,0x00,0x00,0x7b,0x00,0x00,0x00,0x02,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00,0x86,0x00,0x00,0x00, -0x0b,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00, -0x8c,0x00,0x00,0x00,0x03,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x13,0x00,0x00,0x00,0x97,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0x32,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x9c,0x00,0x00,0x00, -0x40,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00, -0x9e,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0xad,0x00,0x00,0x00,0x84,0x00,0x00,0x00, -0x60,0x00,0x00,0x00,0x62,0x00,0x00,0x00,0x32,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0xae,0x00,0x00,0x00,0x20,0x00,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0xaf,0x00,0x00,0x00, -0x84,0x00,0x00,0x00,0x53,0x00,0x00,0x00,0xae,0x00,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0xb0,0x00,0x00,0x00, -0x84,0x00,0x00,0x00,0x4f,0x00,0x00,0x00,0x62,0x00,0x00,0x00, -0x32,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0xb1,0x00,0x00,0x00, -0x02,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, -0xb2,0x00,0x00,0x00,0x84,0x00,0x00,0x00,0xb0,0x00,0x00,0x00, -0xb1,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, -0xb3,0x00,0x00,0x00,0x84,0x00,0x00,0x00,0xb2,0x00,0x00,0x00, -0x60,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, -0xb4,0x00,0x00,0x00,0x86,0x00,0x00,0x00,0xaf,0x00,0x00,0x00, -0xb3,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, -0xb5,0x00,0x00,0x00,0x84,0x00,0x00,0x00,0xad,0x00,0x00,0x00, -0xb4,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, -0xb6,0x00,0x00,0x00,0x84,0x00,0x00,0x00,0xb5,0x00,0x00,0x00, -0xb1,0x00,0x00,0x00,0x14,0x00,0x02,0x00,0xb7,0x00,0x00,0x00, -0x16,0x00,0x03,0x00,0xb9,0x00,0x00,0x00,0x20,0x00,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0xba,0x00,0x00,0x00, -0x84,0x00,0x00,0x00,0x60,0x00,0x00,0x00,0x62,0x00,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0xbb,0x00,0x00,0x00, -0x84,0x00,0x00,0x00,0xba,0x00,0x00,0x00,0xb4,0x00,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0xbc,0x00,0x00,0x00, -0x84,0x00,0x00,0x00,0xbb,0x00,0x00,0x00,0xb1,0x00,0x00,0x00, -0x1c,0x00,0x04,0x00,0xbd,0x00,0x00,0x00,0xb9,0x00,0x00,0x00, -0xbc,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0xbe,0x00,0x00,0x00, -0x07,0x00,0x00,0x00,0xbd,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0xb9,0x00,0x00,0x00,0xc1,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x20,0x00,0x04,0x00,0xc2,0x00,0x00,0x00,0x07,0x00,0x00,0x00, -0xb9,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00, -0xc5,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x16,0x00,0x03,0x00, -0xec,0x00,0x00,0x00,0x10,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0xed,0x00,0x00,0x00,0x80,0x00,0x00,0x00, -0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0xee,0x00,0x00,0x00,0x84,0x00,0x00,0x00, -0x37,0x00,0x00,0x00,0xed,0x00,0x00,0x00,0x1c,0x00,0x04,0x00, -0xef,0x00,0x00,0x00,0xec,0x00,0x00,0x00,0xee,0x00,0x00,0x00, -0x20,0x00,0x04,0x00,0xf0,0x00,0x00,0x00,0x04,0x00,0x00,0x00, -0xef,0x00,0x00,0x00,0x3b,0x00,0x04,0x00,0xf0,0x00,0x00,0x00, -0xf1,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0xf5,0x00,0x00,0x00,0x80,0x00,0x00,0x00, -0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x1d,0x00,0x03,0x00, -0xf9,0x00,0x00,0x00,0xec,0x00,0x00,0x00,0x1e,0x00,0x03,0x00, -0xfa,0x00,0x00,0x00,0xf9,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0xfb,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0xfa,0x00,0x00,0x00, -0x3b,0x00,0x04,0x00,0xfb,0x00,0x00,0x00,0xfc,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x07,0x01,0x00,0x00, -0x0c,0x00,0x00,0x00,0xec,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0x0a,0x01,0x00,0x00,0x04,0x00,0x00,0x00,0xec,0x00,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x10,0x01,0x00,0x00, -0x80,0x00,0x00,0x00,0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0xec,0x00,0x00,0x00,0x14,0x01,0x00,0x00, -0x00,0x00,0x00,0x00,0x32,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x16,0x01,0x00,0x00,0x01,0x00,0x00,0x00,0x33,0x00,0x06,0x00, -0x09,0x00,0x00,0x00,0x17,0x01,0x00,0x00,0x16,0x01,0x00,0x00, -0x39,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0x18,0x01,0x00,0x00,0x51,0x00,0x00,0x00, -0x17,0x01,0x00,0x00,0x00,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0x19,0x01,0x00,0x00,0x84,0x00,0x00,0x00, -0x18,0x01,0x00,0x00,0x39,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0x1a,0x01,0x00,0x00,0x86,0x00,0x00,0x00, -0x19,0x01,0x00,0x00,0x6c,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0x38,0x01,0x00,0x00,0x80,0x00,0x00,0x00, -0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0x39,0x01,0x00,0x00,0x84,0x00,0x00,0x00, -0x9c,0x00,0x00,0x00,0x38,0x01,0x00,0x00,0x1c,0x00,0x04,0x00, -0x3a,0x01,0x00,0x00,0xec,0x00,0x00,0x00,0x39,0x01,0x00,0x00, -0x20,0x00,0x04,0x00,0x3b,0x01,0x00,0x00,0x04,0x00,0x00,0x00, -0x3a,0x01,0x00,0x00,0x3b,0x00,0x04,0x00,0x3b,0x01,0x00,0x00, -0x3c,0x01,0x00,0x00,0x04,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0x40,0x01,0x00,0x00,0x80,0x00,0x00,0x00, -0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x1d,0x00,0x03,0x00, -0x44,0x01,0x00,0x00,0xec,0x00,0x00,0x00,0x1e,0x00,0x03,0x00, -0x45,0x01,0x00,0x00,0x44,0x01,0x00,0x00,0x20,0x00,0x04,0x00, -0x46,0x01,0x00,0x00,0x0c,0x00,0x00,0x00,0x45,0x01,0x00,0x00, -0x3b,0x00,0x04,0x00,0x46,0x01,0x00,0x00,0x47,0x01,0x00,0x00, -0x0c,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, -0x59,0x01,0x00,0x00,0x80,0x00,0x00,0x00,0x6c,0x00,0x00,0x00, -0x39,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x60,0x01,0x00,0x00,0x08,0x01,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0x61,0x01,0x00,0x00,0x86,0x00,0x00,0x00, -0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0x64,0x01,0x00,0x00,0x86,0x00,0x00,0x00, -0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0x7f,0x01,0x00,0x00,0x84,0x00,0x00,0x00, -0x60,0x00,0x00,0x00,0x62,0x00,0x00,0x00,0x1c,0x00,0x04,0x00, -0x80,0x01,0x00,0x00,0xec,0x00,0x00,0x00,0x7f,0x01,0x00,0x00, -0x20,0x00,0x04,0x00,0x81,0x01,0x00,0x00,0x07,0x00,0x00,0x00, -0x80,0x01,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, -0x91,0x01,0x00,0x00,0x80,0x00,0x00,0x00,0x6c,0x00,0x00,0x00, -0x39,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x97,0x01,0x00,0x00, -0x07,0x00,0x00,0x00,0xec,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0xad,0x01,0x00,0x00,0x84,0x00,0x00,0x00, -0xb4,0x00,0x00,0x00,0xb1,0x00,0x00,0x00,0x1c,0x00,0x04,0x00, -0xae,0x01,0x00,0x00,0xec,0x00,0x00,0x00,0xad,0x01,0x00,0x00, -0x20,0x00,0x04,0x00,0xaf,0x01,0x00,0x00,0x07,0x00,0x00,0x00, -0xae,0x01,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, -0xb8,0x01,0x00,0x00,0x86,0x00,0x00,0x00,0xae,0x00,0x00,0x00, -0xb4,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, -0xc0,0x01,0x00,0x00,0x80,0x00,0x00,0x00,0x6c,0x00,0x00,0x00, -0x39,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, -0xef,0x01,0x00,0x00,0x84,0x00,0x00,0x00,0x60,0x00,0x00,0x00, -0x62,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00, -0x24,0x02,0x00,0x00,0x0d,0x00,0x00,0x00,0x3b,0x00,0x04,0x00, -0x0a,0x00,0x00,0x00,0x2c,0x02,0x00,0x00,0x01,0x00,0x00,0x00, -0x1d,0x00,0x03,0x00,0x72,0x02,0x00,0x00,0xb9,0x00,0x00,0x00, -0x1e,0x00,0x03,0x00,0x73,0x02,0x00,0x00,0x72,0x02,0x00,0x00, -0x20,0x00,0x04,0x00,0x74,0x02,0x00,0x00,0x0c,0x00,0x00,0x00, -0x73,0x02,0x00,0x00,0x3b,0x00,0x04,0x00,0x74,0x02,0x00,0x00, -0x75,0x02,0x00,0x00,0x0c,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x13,0x00,0x00,0x00,0x7a,0x02,0x00,0x00,0x05,0x00,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x87,0x02,0x00,0x00, -0x84,0x00,0x00,0x00,0x60,0x00,0x00,0x00,0x62,0x00,0x00,0x00, -0x20,0x00,0x04,0x00,0x90,0x02,0x00,0x00,0x0c,0x00,0x00,0x00, -0xb9,0x00,0x00,0x00,0x36,0x00,0x05,0x00,0x02,0x00,0x00,0x00, -0x04,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x03,0x00,0x00,0x00, -0xf8,0x00,0x02,0x00,0x05,0x00,0x00,0x00,0x3b,0x00,0x04,0x00, -0xbe,0x00,0x00,0x00,0xbf,0x00,0x00,0x00,0x07,0x00,0x00,0x00, -0x3b,0x00,0x04,0x00,0x81,0x01,0x00,0x00,0x82,0x01,0x00,0x00, -0x07,0x00,0x00,0x00,0x3b,0x00,0x04,0x00,0xaf,0x01,0x00,0x00, -0xb0,0x01,0x00,0x00,0x07,0x00,0x00,0x00,0x41,0x00,0x05,0x00, -0x0d,0x00,0x00,0x00,0x0e,0x00,0x00,0x00,0x0b,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x0f,0x00,0x00,0x00,0x0e,0x00,0x00,0x00,0x41,0x00,0x05,0x00, -0x15,0x00,0x00,0x00,0x16,0x00,0x00,0x00,0x12,0x00,0x00,0x00, -0x14,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x17,0x00,0x00,0x00,0x16,0x00,0x00,0x00,0x86,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x18,0x00,0x00,0x00,0x0f,0x00,0x00,0x00, -0x17,0x00,0x00,0x00,0x89,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x1e,0x00,0x00,0x00,0x0f,0x00,0x00,0x00,0x17,0x00,0x00,0x00, -0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00,0x22,0x00,0x00,0x00, -0x12,0x00,0x00,0x00,0x21,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x22,0x00,0x00,0x00, -0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x24,0x00,0x00,0x00, -0x18,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x41,0x00,0x05,0x00, -0x15,0x00,0x00,0x00,0x28,0x00,0x00,0x00,0x12,0x00,0x00,0x00, -0x27,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x29,0x00,0x00,0x00,0x28,0x00,0x00,0x00,0x86,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x2a,0x00,0x00,0x00,0x1e,0x00,0x00,0x00, -0x29,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00, -0x2e,0x00,0x00,0x00,0x12,0x00,0x00,0x00,0x2d,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x2f,0x00,0x00,0x00, -0x2e,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x30,0x00,0x00,0x00,0x24,0x00,0x00,0x00,0x2f,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x32,0x00,0x00,0x00, -0x30,0x00,0x00,0x00,0x2a,0x00,0x00,0x00,0x41,0x00,0x05,0x00, -0x15,0x00,0x00,0x00,0x35,0x00,0x00,0x00,0x12,0x00,0x00,0x00, -0x34,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x36,0x00,0x00,0x00,0x35,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x38,0x00,0x00,0x00,0x36,0x00,0x00,0x00, -0x37,0x00,0x00,0x00,0x82,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x3a,0x00,0x00,0x00,0x38,0x00,0x00,0x00,0x39,0x00,0x00,0x00, -0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x3b,0x00,0x00,0x00, -0x3a,0x00,0x00,0x00,0x37,0x00,0x00,0x00,0x41,0x00,0x05,0x00, -0x0d,0x00,0x00,0x00,0x3f,0x00,0x00,0x00,0x3d,0x00,0x00,0x00, -0x3e,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x40,0x00,0x00,0x00,0x3f,0x00,0x00,0x00,0x89,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x42,0x00,0x00,0x00,0x40,0x00,0x00,0x00, -0x3b,0x00,0x00,0x00,0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x47,0x00,0x00,0x00,0x40,0x00,0x00,0x00,0x3b,0x00,0x00,0x00, -0x41,0x00,0x05,0x00,0x0d,0x00,0x00,0x00,0x49,0x00,0x00,0x00, -0x3d,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x4a,0x00,0x00,0x00,0x49,0x00,0x00,0x00, -0x41,0x00,0x05,0x00,0x0d,0x00,0x00,0x00,0x4d,0x00,0x00,0x00, -0x4c,0x00,0x00,0x00,0x3e,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x4e,0x00,0x00,0x00,0x4d,0x00,0x00,0x00, -0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x50,0x00,0x00,0x00, -0x4e,0x00,0x00,0x00,0x4f,0x00,0x00,0x00,0x89,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x55,0x00,0x00,0x00,0x50,0x00,0x00,0x00, -0x54,0x00,0x00,0x00,0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x59,0x00,0x00,0x00,0x50,0x00,0x00,0x00,0x58,0x00,0x00,0x00, -0x89,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x5d,0x00,0x00,0x00, -0x4e,0x00,0x00,0x00,0x4f,0x00,0x00,0x00,0x89,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x64,0x00,0x00,0x00,0x5d,0x00,0x00,0x00, -0x63,0x00,0x00,0x00,0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x68,0x00,0x00,0x00,0x5d,0x00,0x00,0x00,0x67,0x00,0x00,0x00, -0x89,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x6e,0x00,0x00,0x00, -0x4e,0x00,0x00,0x00,0x6d,0x00,0x00,0x00,0x86,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x73,0x00,0x00,0x00,0x4e,0x00,0x00,0x00, -0x72,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00, -0x77,0x00,0x00,0x00,0x12,0x00,0x00,0x00,0x76,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x78,0x00,0x00,0x00, -0x77,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x79,0x00,0x00,0x00,0x47,0x00,0x00,0x00,0x78,0x00,0x00,0x00, -0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00,0x7c,0x00,0x00,0x00, -0x12,0x00,0x00,0x00,0x7b,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x7d,0x00,0x00,0x00,0x7c,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x7f,0x00,0x00,0x00, -0x47,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x82,0x00,0x00,0x00,0x7f,0x00,0x00,0x00, -0x78,0x00,0x00,0x00,0x0c,0x00,0x07,0x00,0x06,0x00,0x00,0x00, -0x83,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x26,0x00,0x00,0x00, -0x7d,0x00,0x00,0x00,0x82,0x00,0x00,0x00,0x41,0x00,0x05,0x00, -0x15,0x00,0x00,0x00,0x87,0x00,0x00,0x00,0x12,0x00,0x00,0x00, -0x86,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x88,0x00,0x00,0x00,0x87,0x00,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x89,0x00,0x00,0x00,0x32,0x00,0x00,0x00, -0x88,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x8b,0x00,0x00,0x00,0x42,0x00,0x00,0x00,0x37,0x00,0x00,0x00, -0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00,0x8d,0x00,0x00,0x00, -0x12,0x00,0x00,0x00,0x8c,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x8e,0x00,0x00,0x00,0x8d,0x00,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x8f,0x00,0x00,0x00, -0x8b,0x00,0x00,0x00,0x8e,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x90,0x00,0x00,0x00,0x89,0x00,0x00,0x00, -0x8f,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x92,0x00,0x00,0x00,0x90,0x00,0x00,0x00,0x79,0x00,0x00,0x00, -0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x93,0x00,0x00,0x00, -0x92,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x41,0x00,0x05,0x00, -0x15,0x00,0x00,0x00,0x98,0x00,0x00,0x00,0x12,0x00,0x00,0x00, -0x97,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x99,0x00,0x00,0x00,0x98,0x00,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x9a,0x00,0x00,0x00,0x0f,0x00,0x00,0x00, -0x99,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x9d,0x00,0x00,0x00,0x4a,0x00,0x00,0x00,0x9c,0x00,0x00,0x00, -0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00,0x9f,0x00,0x00,0x00, -0x12,0x00,0x00,0x00,0x9e,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0xa0,0x00,0x00,0x00,0x9f,0x00,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xa1,0x00,0x00,0x00, -0x9d,0x00,0x00,0x00,0xa0,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xa2,0x00,0x00,0x00,0x9a,0x00,0x00,0x00, -0xa1,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xa4,0x00,0x00,0x00,0xa2,0x00,0x00,0x00,0x79,0x00,0x00,0x00, -0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xa5,0x00,0x00,0x00, -0xa4,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, -0xa7,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0xa7,0x00,0x00,0x00, -0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0x9a,0x02,0x00,0x00, -0x3e,0x00,0x00,0x00,0x05,0x00,0x00,0x00,0xc6,0x00,0x00,0x00, -0xa8,0x00,0x00,0x00,0xb0,0x00,0x05,0x00,0xb7,0x00,0x00,0x00, -0xb8,0x00,0x00,0x00,0x9a,0x02,0x00,0x00,0xb6,0x00,0x00,0x00, -0xf6,0x00,0x04,0x00,0xa9,0x00,0x00,0x00,0xa8,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0xb8,0x00,0x00,0x00, -0xa8,0x00,0x00,0x00,0xa9,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, -0xa8,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0xc2,0x00,0x00,0x00, -0xc3,0x00,0x00,0x00,0xbf,0x00,0x00,0x00,0x9a,0x02,0x00,0x00, -0x3e,0x00,0x03,0x00,0xc3,0x00,0x00,0x00,0xc1,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xc6,0x00,0x00,0x00, -0x9a,0x02,0x00,0x00,0xc5,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, -0xa7,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0xa9,0x00,0x00,0x00, -0xf9,0x00,0x02,0x00,0xc9,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, -0xc9,0x00,0x00,0x00,0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00, -0xb3,0x02,0x00,0x00,0xa5,0x00,0x00,0x00,0xa9,0x00,0x00,0x00, -0x66,0x01,0x00,0x00,0xcc,0x00,0x00,0x00,0xf5,0x00,0x07,0x00, -0x06,0x00,0x00,0x00,0xaf,0x02,0x00,0x00,0x93,0x00,0x00,0x00, -0xa9,0x00,0x00,0x00,0x63,0x01,0x00,0x00,0xcc,0x00,0x00,0x00, -0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0x9b,0x02,0x00,0x00, -0x79,0x00,0x00,0x00,0xa9,0x00,0x00,0x00,0x14,0x02,0x00,0x00, -0xcc,0x00,0x00,0x00,0xb0,0x00,0x05,0x00,0xb7,0x00,0x00,0x00, -0xd0,0x00,0x00,0x00,0x9b,0x02,0x00,0x00,0x83,0x00,0x00,0x00, -0xf6,0x00,0x04,0x00,0xcb,0x00,0x00,0x00,0xcc,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0xd0,0x00,0x00,0x00, -0xca,0x00,0x00,0x00,0xcb,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, -0xca,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0xd2,0x00,0x00,0x00, -0xf8,0x00,0x02,0x00,0xd2,0x00,0x00,0x00,0xf5,0x00,0x07,0x00, -0x06,0x00,0x00,0x00,0xab,0x02,0x00,0x00,0x3e,0x00,0x00,0x00, -0xca,0x00,0x00,0x00,0x1c,0x01,0x00,0x00,0xd5,0x00,0x00,0x00, -0xb0,0x00,0x05,0x00,0xb7,0x00,0x00,0x00,0xd8,0x00,0x00,0x00, -0xab,0x02,0x00,0x00,0x37,0x00,0x00,0x00,0xf6,0x00,0x04,0x00, -0xd4,0x00,0x00,0x00,0xd5,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0xfa,0x00,0x04,0x00,0xd8,0x00,0x00,0x00,0xd3,0x00,0x00,0x00, -0xd4,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0xd3,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xdc,0x00,0x00,0x00, -0x8b,0x00,0x00,0x00,0x73,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xde,0x00,0x00,0x00,0xdc,0x00,0x00,0x00, -0xab,0x02,0x00,0x00,0xb0,0x00,0x05,0x00,0xb7,0x00,0x00,0x00, -0xe1,0x00,0x00,0x00,0xde,0x00,0x00,0x00,0x36,0x00,0x00,0x00, -0xf7,0x00,0x03,0x00,0xe3,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0xfa,0x00,0x04,0x00,0xe1,0x00,0x00,0x00,0xe2,0x00,0x00,0x00, -0xe3,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0xe2,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xe6,0x00,0x00,0x00, -0x9b,0x02,0x00,0x00,0x6e,0x00,0x00,0x00,0xb0,0x00,0x05,0x00, -0xb7,0x00,0x00,0x00,0xe8,0x00,0x00,0x00,0xe6,0x00,0x00,0x00, -0x83,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0xe3,0x00,0x00,0x00, -0xf8,0x00,0x02,0x00,0xe3,0x00,0x00,0x00,0xf5,0x00,0x07,0x00, -0xb7,0x00,0x00,0x00,0xe9,0x00,0x00,0x00,0xe1,0x00,0x00,0x00, -0xd3,0x00,0x00,0x00,0xe8,0x00,0x00,0x00,0xe2,0x00,0x00,0x00, -0xf7,0x00,0x03,0x00,0xeb,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0xfa,0x00,0x04,0x00,0xe9,0x00,0x00,0x00,0xea,0x00,0x00,0x00, -0x0c,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0xea,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xf4,0x00,0x00,0x00, -0x73,0x00,0x00,0x00,0xab,0x02,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xf6,0x00,0x00,0x00,0xf4,0x00,0x00,0x00, -0xf5,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xf8,0x00,0x00,0x00,0xf6,0x00,0x00,0x00,0x6e,0x00,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x03,0x01,0x00,0x00, -0xf4,0x00,0x00,0x00,0x8e,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x04,0x01,0x00,0x00,0xaf,0x02,0x00,0x00, -0x03,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x06,0x01,0x00,0x00,0x04,0x01,0x00,0x00,0x6e,0x00,0x00,0x00, -0x41,0x00,0x06,0x00,0x07,0x01,0x00,0x00,0x08,0x01,0x00,0x00, -0xfc,0x00,0x00,0x00,0x34,0x00,0x00,0x00,0x06,0x01,0x00,0x00, -0x3d,0x00,0x04,0x00,0xec,0x00,0x00,0x00,0x09,0x01,0x00,0x00, -0x08,0x01,0x00,0x00,0x41,0x00,0x05,0x00,0x0a,0x01,0x00,0x00, -0x0b,0x01,0x00,0x00,0xf1,0x00,0x00,0x00,0xf8,0x00,0x00,0x00, -0x3e,0x00,0x03,0x00,0x0b,0x01,0x00,0x00,0x09,0x01,0x00,0x00, -0xf9,0x00,0x02,0x00,0xeb,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, -0x0c,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x0f,0x01,0x00,0x00,0x73,0x00,0x00,0x00,0xab,0x02,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x11,0x01,0x00,0x00, -0x0f,0x01,0x00,0x00,0x10,0x01,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x13,0x01,0x00,0x00,0x11,0x01,0x00,0x00, -0x6e,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x0a,0x01,0x00,0x00, -0x15,0x01,0x00,0x00,0xf1,0x00,0x00,0x00,0x13,0x01,0x00,0x00, -0x3e,0x00,0x03,0x00,0x15,0x01,0x00,0x00,0x14,0x01,0x00,0x00, -0xf9,0x00,0x02,0x00,0xeb,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, -0xeb,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0xd5,0x00,0x00,0x00, -0xf8,0x00,0x02,0x00,0xd5,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x1c,0x01,0x00,0x00,0xab,0x02,0x00,0x00, -0x1a,0x01,0x00,0x00,0xf9,0x00,0x02,0x00,0xd2,0x00,0x00,0x00, -0xf8,0x00,0x02,0x00,0xd4,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, -0x1e,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0x1e,0x01,0x00,0x00, -0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0xac,0x02,0x00,0x00, -0x3e,0x00,0x00,0x00,0xd4,0x00,0x00,0x00,0x5f,0x01,0x00,0x00, -0x21,0x01,0x00,0x00,0xb0,0x00,0x05,0x00,0xb7,0x00,0x00,0x00, -0x24,0x01,0x00,0x00,0xac,0x02,0x00,0x00,0x9c,0x00,0x00,0x00, -0xf6,0x00,0x04,0x00,0x20,0x01,0x00,0x00,0x21,0x01,0x00,0x00, -0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0x24,0x01,0x00,0x00, -0x1f,0x01,0x00,0x00,0x20,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, -0x1f,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x28,0x01,0x00,0x00,0x9d,0x00,0x00,0x00,0x73,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x2a,0x01,0x00,0x00, -0x28,0x01,0x00,0x00,0xac,0x02,0x00,0x00,0x41,0x00,0x05,0x00, -0x15,0x00,0x00,0x00,0x2b,0x01,0x00,0x00,0x12,0x00,0x00,0x00, -0xc5,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x2c,0x01,0x00,0x00,0x2b,0x01,0x00,0x00,0xb0,0x00,0x05,0x00, -0xb7,0x00,0x00,0x00,0x2d,0x01,0x00,0x00,0x2a,0x01,0x00,0x00, -0x2c,0x01,0x00,0x00,0xf7,0x00,0x03,0x00,0x2f,0x01,0x00,0x00, -0x00,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0x2d,0x01,0x00,0x00, -0x2e,0x01,0x00,0x00,0x2f,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, -0x2e,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x32,0x01,0x00,0x00,0x9b,0x02,0x00,0x00,0x6e,0x00,0x00,0x00, -0xb0,0x00,0x05,0x00,0xb7,0x00,0x00,0x00,0x34,0x01,0x00,0x00, -0x32,0x01,0x00,0x00,0x83,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, -0x2f,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0x2f,0x01,0x00,0x00, -0xf5,0x00,0x07,0x00,0xb7,0x00,0x00,0x00,0x35,0x01,0x00,0x00, -0x2d,0x01,0x00,0x00,0x1f,0x01,0x00,0x00,0x34,0x01,0x00,0x00, -0x2e,0x01,0x00,0x00,0xf7,0x00,0x03,0x00,0x37,0x01,0x00,0x00, -0x00,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0x35,0x01,0x00,0x00, -0x36,0x01,0x00,0x00,0x55,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, -0x36,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x3f,0x01,0x00,0x00,0x73,0x00,0x00,0x00,0xac,0x02,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x41,0x01,0x00,0x00, -0x3f,0x01,0x00,0x00,0x40,0x01,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x43,0x01,0x00,0x00,0x41,0x01,0x00,0x00, -0x6e,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x4e,0x01,0x00,0x00,0x3f,0x01,0x00,0x00,0xa0,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x4f,0x01,0x00,0x00, -0xb3,0x02,0x00,0x00,0x4e,0x01,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x51,0x01,0x00,0x00,0x4f,0x01,0x00,0x00, -0x6e,0x00,0x00,0x00,0x41,0x00,0x06,0x00,0x07,0x01,0x00,0x00, -0x52,0x01,0x00,0x00,0x47,0x01,0x00,0x00,0x34,0x00,0x00,0x00, -0x51,0x01,0x00,0x00,0x3d,0x00,0x04,0x00,0xec,0x00,0x00,0x00, -0x53,0x01,0x00,0x00,0x52,0x01,0x00,0x00,0x41,0x00,0x05,0x00, -0x0a,0x01,0x00,0x00,0x54,0x01,0x00,0x00,0x3c,0x01,0x00,0x00, -0x43,0x01,0x00,0x00,0x3e,0x00,0x03,0x00,0x54,0x01,0x00,0x00, -0x53,0x01,0x00,0x00,0xf9,0x00,0x02,0x00,0x37,0x01,0x00,0x00, -0xf8,0x00,0x02,0x00,0x55,0x01,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x58,0x01,0x00,0x00,0x73,0x00,0x00,0x00, -0xac,0x02,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x5a,0x01,0x00,0x00,0x58,0x01,0x00,0x00,0x59,0x01,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x5c,0x01,0x00,0x00, -0x5a,0x01,0x00,0x00,0x6e,0x00,0x00,0x00,0x41,0x00,0x05,0x00, -0x0a,0x01,0x00,0x00,0x5d,0x01,0x00,0x00,0x3c,0x01,0x00,0x00, -0x5c,0x01,0x00,0x00,0x3e,0x00,0x03,0x00,0x5d,0x01,0x00,0x00, -0x14,0x01,0x00,0x00,0xf9,0x00,0x02,0x00,0x37,0x01,0x00,0x00, -0xf8,0x00,0x02,0x00,0x37,0x01,0x00,0x00,0xf9,0x00,0x02,0x00, -0x21,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0x21,0x01,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x5f,0x01,0x00,0x00, -0xac,0x02,0x00,0x00,0x1a,0x01,0x00,0x00,0xf9,0x00,0x02,0x00, -0x1e,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0x20,0x01,0x00,0x00, -0xe0,0x00,0x04,0x00,0x0c,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0x60,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x63,0x01,0x00,0x00,0xaf,0x02,0x00,0x00,0x61,0x01,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x66,0x01,0x00,0x00, -0xb3,0x02,0x00,0x00,0x64,0x01,0x00,0x00,0xf9,0x00,0x02,0x00, -0x68,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0x68,0x01,0x00,0x00, -0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0xb5,0x02,0x00,0x00, -0x3e,0x00,0x00,0x00,0x20,0x01,0x00,0x00,0x12,0x02,0x00,0x00, -0x6b,0x01,0x00,0x00,0xb0,0x00,0x05,0x00,0xb7,0x00,0x00,0x00, -0x6e,0x01,0x00,0x00,0xb5,0x02,0x00,0x00,0x6c,0x00,0x00,0x00, -0xf6,0x00,0x04,0x00,0x6a,0x01,0x00,0x00,0x6b,0x01,0x00,0x00, -0x00,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0x6e,0x01,0x00,0x00, -0x69,0x01,0x00,0x00,0x6a,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, -0x69,0x01,0x00,0x00,0xf9,0x00,0x02,0x00,0x70,0x01,0x00,0x00, -0xf8,0x00,0x02,0x00,0x70,0x01,0x00,0x00,0xf5,0x00,0x07,0x00, -0x06,0x00,0x00,0x00,0xb9,0x02,0x00,0x00,0x3e,0x00,0x00,0x00, -0x69,0x01,0x00,0x00,0x9c,0x01,0x00,0x00,0x73,0x01,0x00,0x00, -0xb0,0x00,0x05,0x00,0xb7,0x00,0x00,0x00,0x76,0x01,0x00,0x00, -0xb9,0x02,0x00,0x00,0x60,0x00,0x00,0x00,0xf6,0x00,0x04,0x00, -0x72,0x01,0x00,0x00,0x73,0x01,0x00,0x00,0x01,0x00,0x00,0x00, -0xfa,0x00,0x04,0x00,0x76,0x01,0x00,0x00,0x71,0x01,0x00,0x00, -0x72,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0x71,0x01,0x00,0x00, -0xf9,0x00,0x02,0x00,0x78,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, -0x78,0x01,0x00,0x00,0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00, -0xcb,0x02,0x00,0x00,0x3e,0x00,0x00,0x00,0x71,0x01,0x00,0x00, -0x9a,0x01,0x00,0x00,0x79,0x01,0x00,0x00,0xb0,0x00,0x05,0x00, -0xb7,0x00,0x00,0x00,0x7e,0x01,0x00,0x00,0xcb,0x02,0x00,0x00, -0x62,0x00,0x00,0x00,0xf6,0x00,0x04,0x00,0x7a,0x01,0x00,0x00, -0x79,0x01,0x00,0x00,0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, -0x7e,0x01,0x00,0x00,0x79,0x01,0x00,0x00,0x7a,0x01,0x00,0x00, -0xf8,0x00,0x02,0x00,0x79,0x01,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x84,0x01,0x00,0x00,0xb9,0x02,0x00,0x00, -0x62,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x86,0x01,0x00,0x00,0x84,0x01,0x00,0x00,0xcb,0x02,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x88,0x01,0x00,0x00, -0x55,0x00,0x00,0x00,0x53,0x00,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x8a,0x01,0x00,0x00,0xb9,0x02,0x00,0x00, -0x61,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x8b,0x01,0x00,0x00,0x88,0x01,0x00,0x00,0x8a,0x01,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x8d,0x01,0x00,0x00, -0x64,0x00,0x00,0x00,0x62,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x8e,0x01,0x00,0x00,0x8b,0x01,0x00,0x00, -0x8d,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x90,0x01,0x00,0x00,0x8e,0x01,0x00,0x00,0xcb,0x02,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x92,0x01,0x00,0x00, -0x90,0x01,0x00,0x00,0x91,0x01,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x94,0x01,0x00,0x00,0x92,0x01,0x00,0x00, -0xb5,0x02,0x00,0x00,0x41,0x00,0x05,0x00,0x0a,0x01,0x00,0x00, -0x95,0x01,0x00,0x00,0xf1,0x00,0x00,0x00,0x94,0x01,0x00,0x00, -0x3d,0x00,0x04,0x00,0xec,0x00,0x00,0x00,0x96,0x01,0x00,0x00, -0x95,0x01,0x00,0x00,0x41,0x00,0x05,0x00,0x97,0x01,0x00,0x00, -0x98,0x01,0x00,0x00,0x82,0x01,0x00,0x00,0x86,0x01,0x00,0x00, -0x3e,0x00,0x03,0x00,0x98,0x01,0x00,0x00,0x96,0x01,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x9a,0x01,0x00,0x00, -0xcb,0x02,0x00,0x00,0xc5,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, -0x78,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0x7a,0x01,0x00,0x00, -0xf9,0x00,0x02,0x00,0x73,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, -0x73,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x9c,0x01,0x00,0x00,0xb9,0x02,0x00,0x00,0xc5,0x00,0x00,0x00, -0xf9,0x00,0x02,0x00,0x70,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, -0x72,0x01,0x00,0x00,0xf9,0x00,0x02,0x00,0x9e,0x01,0x00,0x00, -0xf8,0x00,0x02,0x00,0x9e,0x01,0x00,0x00,0xf5,0x00,0x07,0x00, -0x06,0x00,0x00,0x00,0xba,0x02,0x00,0x00,0x3e,0x00,0x00,0x00, -0x72,0x01,0x00,0x00,0xca,0x01,0x00,0x00,0xa1,0x01,0x00,0x00, -0xb0,0x00,0x05,0x00,0xb7,0x00,0x00,0x00,0xa4,0x01,0x00,0x00, -0xba,0x02,0x00,0x00,0xb4,0x00,0x00,0x00,0xf6,0x00,0x04,0x00, -0xa0,0x01,0x00,0x00,0xa1,0x01,0x00,0x00,0x01,0x00,0x00,0x00, -0xfa,0x00,0x04,0x00,0xa4,0x01,0x00,0x00,0x9f,0x01,0x00,0x00, -0xa0,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0x9f,0x01,0x00,0x00, -0xf9,0x00,0x02,0x00,0xa6,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, -0xa6,0x01,0x00,0x00,0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00, -0xc8,0x02,0x00,0x00,0x3e,0x00,0x00,0x00,0x9f,0x01,0x00,0x00, -0xc8,0x01,0x00,0x00,0xa7,0x01,0x00,0x00,0xb0,0x00,0x05,0x00, -0xb7,0x00,0x00,0x00,0xac,0x01,0x00,0x00,0xc8,0x02,0x00,0x00, -0xb1,0x00,0x00,0x00,0xf6,0x00,0x04,0x00,0xa8,0x01,0x00,0x00, -0xa7,0x01,0x00,0x00,0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, -0xac,0x01,0x00,0x00,0xa7,0x01,0x00,0x00,0xa8,0x01,0x00,0x00, -0xf8,0x00,0x02,0x00,0xa7,0x01,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xb2,0x01,0x00,0x00,0xba,0x02,0x00,0x00, -0xb1,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xb4,0x01,0x00,0x00,0xb2,0x01,0x00,0x00,0xc8,0x02,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xb6,0x01,0x00,0x00, -0x59,0x00,0x00,0x00,0xae,0x00,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xb9,0x01,0x00,0x00,0xba,0x02,0x00,0x00, -0xb8,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xba,0x01,0x00,0x00,0xb6,0x01,0x00,0x00,0xb9,0x01,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xbc,0x01,0x00,0x00, -0x68,0x00,0x00,0x00,0xb1,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xbd,0x01,0x00,0x00,0xba,0x01,0x00,0x00, -0xbc,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xbf,0x01,0x00,0x00,0xbd,0x01,0x00,0x00,0xc8,0x02,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xc1,0x01,0x00,0x00, -0xbf,0x01,0x00,0x00,0xc0,0x01,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xc3,0x01,0x00,0x00,0xc1,0x01,0x00,0x00, -0xb5,0x02,0x00,0x00,0x41,0x00,0x05,0x00,0x0a,0x01,0x00,0x00, -0xc4,0x01,0x00,0x00,0x3c,0x01,0x00,0x00,0xc3,0x01,0x00,0x00, -0x3d,0x00,0x04,0x00,0xec,0x00,0x00,0x00,0xc5,0x01,0x00,0x00, -0xc4,0x01,0x00,0x00,0x41,0x00,0x05,0x00,0x97,0x01,0x00,0x00, -0xc6,0x01,0x00,0x00,0xb0,0x01,0x00,0x00,0xb4,0x01,0x00,0x00, -0x3e,0x00,0x03,0x00,0xc6,0x01,0x00,0x00,0xc5,0x01,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xc8,0x01,0x00,0x00, -0xc8,0x02,0x00,0x00,0xc5,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, -0xa6,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0xa8,0x01,0x00,0x00, -0xf9,0x00,0x02,0x00,0xa1,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, -0xa1,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xca,0x01,0x00,0x00,0xba,0x02,0x00,0x00,0xc5,0x00,0x00,0x00, -0xf9,0x00,0x02,0x00,0x9e,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, -0xa0,0x01,0x00,0x00,0xf9,0x00,0x02,0x00,0xcc,0x01,0x00,0x00, -0xf8,0x00,0x02,0x00,0xcc,0x01,0x00,0x00,0xf5,0x00,0x07,0x00, -0x06,0x00,0x00,0x00,0xbb,0x02,0x00,0x00,0x3e,0x00,0x00,0x00, -0xa0,0x01,0x00,0x00,0x10,0x02,0x00,0x00,0xcf,0x01,0x00,0x00, -0xb0,0x00,0x05,0x00,0xb7,0x00,0x00,0x00,0xd2,0x01,0x00,0x00, -0xbb,0x02,0x00,0x00,0xb4,0x00,0x00,0x00,0xf6,0x00,0x04,0x00, -0xce,0x01,0x00,0x00,0xcf,0x01,0x00,0x00,0x01,0x00,0x00,0x00, -0xfa,0x00,0x04,0x00,0xd2,0x01,0x00,0x00,0xcd,0x01,0x00,0x00, -0xce,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0xcd,0x01,0x00,0x00, -0xf9,0x00,0x02,0x00,0xd4,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, -0xd4,0x01,0x00,0x00,0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00, -0xbf,0x02,0x00,0x00,0x3e,0x00,0x00,0x00,0xcd,0x01,0x00,0x00, -0x0e,0x02,0x00,0x00,0xd7,0x01,0x00,0x00,0xb0,0x00,0x05,0x00, -0xb7,0x00,0x00,0x00,0xda,0x01,0x00,0x00,0xbf,0x02,0x00,0x00, -0x60,0x00,0x00,0x00,0xf6,0x00,0x04,0x00,0xd6,0x01,0x00,0x00, -0xd7,0x01,0x00,0x00,0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, -0xda,0x01,0x00,0x00,0xd5,0x01,0x00,0x00,0xd6,0x01,0x00,0x00, -0xf8,0x00,0x02,0x00,0xd5,0x01,0x00,0x00,0xf9,0x00,0x02,0x00, -0xdc,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0xdc,0x01,0x00,0x00, -0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0xc1,0x02,0x00,0x00, -0x3e,0x00,0x00,0x00,0xd5,0x01,0x00,0x00,0x0c,0x02,0x00,0x00, -0xdf,0x01,0x00,0x00,0xb0,0x00,0x05,0x00,0xb7,0x00,0x00,0x00, -0xe2,0x01,0x00,0x00,0xc1,0x02,0x00,0x00,0xb1,0x00,0x00,0x00, -0xf6,0x00,0x04,0x00,0xde,0x01,0x00,0x00,0xdf,0x01,0x00,0x00, -0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0xe2,0x01,0x00,0x00, -0xdd,0x01,0x00,0x00,0xde,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, -0xdd,0x01,0x00,0x00,0xf9,0x00,0x02,0x00,0xe4,0x01,0x00,0x00, -0xf8,0x00,0x02,0x00,0xe4,0x01,0x00,0x00,0xf5,0x00,0x07,0x00, -0x06,0x00,0x00,0x00,0xc3,0x02,0x00,0x00,0x3e,0x00,0x00,0x00, -0xdd,0x01,0x00,0x00,0x0a,0x02,0x00,0x00,0xe5,0x01,0x00,0x00, -0xb0,0x00,0x05,0x00,0xb7,0x00,0x00,0x00,0xea,0x01,0x00,0x00, -0xc3,0x02,0x00,0x00,0x62,0x00,0x00,0x00,0xf6,0x00,0x04,0x00, -0xe6,0x01,0x00,0x00,0xe5,0x01,0x00,0x00,0x01,0x00,0x00,0x00, -0xfa,0x00,0x04,0x00,0xea,0x01,0x00,0x00,0xe5,0x01,0x00,0x00, -0xe6,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0xe5,0x01,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xec,0x01,0x00,0x00, -0xbb,0x02,0x00,0x00,0xb1,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xee,0x01,0x00,0x00,0xec,0x01,0x00,0x00, -0xc1,0x02,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xf0,0x01,0x00,0x00,0xee,0x01,0x00,0x00,0xef,0x01,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xf2,0x01,0x00,0x00, -0xbf,0x02,0x00,0x00,0x62,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xf3,0x01,0x00,0x00,0xf0,0x01,0x00,0x00, -0xf2,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xf5,0x01,0x00,0x00,0xf3,0x01,0x00,0x00,0xc3,0x02,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xf9,0x01,0x00,0x00, -0xf2,0x01,0x00,0x00,0xc3,0x02,0x00,0x00,0x41,0x00,0x05,0x00, -0x97,0x01,0x00,0x00,0xfa,0x01,0x00,0x00,0x82,0x01,0x00,0x00, -0xf9,0x01,0x00,0x00,0x3d,0x00,0x04,0x00,0xec,0x00,0x00,0x00, -0xfb,0x01,0x00,0x00,0xfa,0x01,0x00,0x00,0x73,0x00,0x04,0x00, -0xb9,0x00,0x00,0x00,0xfc,0x01,0x00,0x00,0xfb,0x01,0x00,0x00, -0x41,0x00,0x05,0x00,0x97,0x01,0x00,0x00,0x01,0x02,0x00,0x00, -0xb0,0x01,0x00,0x00,0xee,0x01,0x00,0x00,0x3d,0x00,0x04,0x00, -0xec,0x00,0x00,0x00,0x02,0x02,0x00,0x00,0x01,0x02,0x00,0x00, -0x73,0x00,0x04,0x00,0xb9,0x00,0x00,0x00,0x03,0x02,0x00,0x00, -0x02,0x02,0x00,0x00,0x41,0x00,0x05,0x00,0xc2,0x00,0x00,0x00, -0x05,0x02,0x00,0x00,0xbf,0x00,0x00,0x00,0xf5,0x01,0x00,0x00, -0x3d,0x00,0x04,0x00,0xb9,0x00,0x00,0x00,0x06,0x02,0x00,0x00, -0x05,0x02,0x00,0x00,0x0c,0x00,0x08,0x00,0xb9,0x00,0x00,0x00, -0x07,0x02,0x00,0x00,0x01,0x00,0x00,0x00,0x32,0x00,0x00,0x00, -0xfc,0x01,0x00,0x00,0x03,0x02,0x00,0x00,0x06,0x02,0x00,0x00, -0x3e,0x00,0x03,0x00,0x05,0x02,0x00,0x00,0x07,0x02,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x0a,0x02,0x00,0x00, -0xc3,0x02,0x00,0x00,0xc5,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, -0xe4,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0xe6,0x01,0x00,0x00, -0xf9,0x00,0x02,0x00,0xdf,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, -0xdf,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x0c,0x02,0x00,0x00,0xc1,0x02,0x00,0x00,0xc5,0x00,0x00,0x00, -0xf9,0x00,0x02,0x00,0xdc,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, -0xde,0x01,0x00,0x00,0xf9,0x00,0x02,0x00,0xd7,0x01,0x00,0x00, -0xf8,0x00,0x02,0x00,0xd7,0x01,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x0e,0x02,0x00,0x00,0xbf,0x02,0x00,0x00, -0xc5,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0xd4,0x01,0x00,0x00, -0xf8,0x00,0x02,0x00,0xd6,0x01,0x00,0x00,0xf9,0x00,0x02,0x00, -0xcf,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0xcf,0x01,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x10,0x02,0x00,0x00, -0xbb,0x02,0x00,0x00,0xc5,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, -0xcc,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0xce,0x01,0x00,0x00, -0xf9,0x00,0x02,0x00,0x6b,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, -0x6b,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x12,0x02,0x00,0x00,0xb5,0x02,0x00,0x00,0xc5,0x00,0x00,0x00, -0xf9,0x00,0x02,0x00,0x68,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, -0x6a,0x01,0x00,0x00,0xe0,0x00,0x04,0x00,0x0c,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x60,0x01,0x00,0x00,0xf9,0x00,0x02,0x00, -0xcc,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0xcc,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x14,0x02,0x00,0x00, -0x9b,0x02,0x00,0x00,0x6c,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, -0xc9,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0xcb,0x00,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x19,0x02,0x00,0x00, -0x55,0x00,0x00,0x00,0x53,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x1a,0x02,0x00,0x00,0x8b,0x00,0x00,0x00, -0x19,0x02,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x1f,0x02,0x00,0x00,0x59,0x00,0x00,0x00,0xae,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x20,0x02,0x00,0x00, -0x9d,0x00,0x00,0x00,0x1f,0x02,0x00,0x00,0x41,0x00,0x05,0x00, -0x15,0x00,0x00,0x00,0x25,0x02,0x00,0x00,0x12,0x00,0x00,0x00, -0x24,0x02,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x26,0x02,0x00,0x00,0x25,0x02,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x27,0x02,0x00,0x00,0x0f,0x00,0x00,0x00, -0x26,0x02,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x2b,0x02,0x00,0x00,0x47,0x00,0x00,0x00,0x26,0x02,0x00,0x00, -0x41,0x00,0x05,0x00,0x0d,0x00,0x00,0x00,0x2d,0x02,0x00,0x00, -0x2c,0x02,0x00,0x00,0x0c,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x2e,0x02,0x00,0x00,0x2d,0x02,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x2f,0x02,0x00,0x00, -0x2b,0x02,0x00,0x00,0x2e,0x02,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x30,0x02,0x00,0x00,0x27,0x02,0x00,0x00, -0x2f,0x02,0x00,0x00,0xf9,0x00,0x02,0x00,0x32,0x02,0x00,0x00, -0xf8,0x00,0x02,0x00,0x32,0x02,0x00,0x00,0xf5,0x00,0x07,0x00, -0x06,0x00,0x00,0x00,0x9c,0x02,0x00,0x00,0x3e,0x00,0x00,0x00, -0xcb,0x00,0x00,0x00,0x99,0x02,0x00,0x00,0x35,0x02,0x00,0x00, -0xb0,0x00,0x05,0x00,0xb7,0x00,0x00,0x00,0x38,0x02,0x00,0x00, -0x9c,0x02,0x00,0x00,0xb4,0x00,0x00,0x00,0xf6,0x00,0x04,0x00, -0x34,0x02,0x00,0x00,0x35,0x02,0x00,0x00,0x01,0x00,0x00,0x00, -0xfa,0x00,0x04,0x00,0x38,0x02,0x00,0x00,0x33,0x02,0x00,0x00, -0x34,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x33,0x02,0x00,0x00, -0xf9,0x00,0x02,0x00,0x3a,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, -0x3a,0x02,0x00,0x00,0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00, -0x9d,0x02,0x00,0x00,0x3e,0x00,0x00,0x00,0x33,0x02,0x00,0x00, -0x97,0x02,0x00,0x00,0x3d,0x02,0x00,0x00,0xb0,0x00,0x05,0x00, -0xb7,0x00,0x00,0x00,0x40,0x02,0x00,0x00,0x9d,0x02,0x00,0x00, -0x60,0x00,0x00,0x00,0xf6,0x00,0x04,0x00,0x3c,0x02,0x00,0x00, -0x3d,0x02,0x00,0x00,0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, -0x40,0x02,0x00,0x00,0x3b,0x02,0x00,0x00,0x3c,0x02,0x00,0x00, -0xf8,0x00,0x02,0x00,0x3b,0x02,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x44,0x02,0x00,0x00,0x9d,0x02,0x00,0x00, -0x61,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x45,0x02,0x00,0x00,0x1a,0x02,0x00,0x00,0x44,0x02,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x47,0x02,0x00,0x00, -0x64,0x00,0x00,0x00,0x62,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x48,0x02,0x00,0x00,0x45,0x02,0x00,0x00, -0x47,0x02,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x4c,0x02,0x00,0x00,0x9c,0x02,0x00,0x00,0xb8,0x01,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x4d,0x02,0x00,0x00, -0x20,0x02,0x00,0x00,0x4c,0x02,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x4f,0x02,0x00,0x00,0x68,0x00,0x00,0x00, -0xb1,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x50,0x02,0x00,0x00,0x4d,0x02,0x00,0x00,0x4f,0x02,0x00,0x00, -0xf9,0x00,0x02,0x00,0x52,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, -0x52,0x02,0x00,0x00,0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00, -0x9f,0x02,0x00,0x00,0x3e,0x00,0x00,0x00,0x3b,0x02,0x00,0x00, -0x95,0x02,0x00,0x00,0x55,0x02,0x00,0x00,0xb0,0x00,0x05,0x00, -0xb7,0x00,0x00,0x00,0x58,0x02,0x00,0x00,0x9f,0x02,0x00,0x00, -0xb1,0x00,0x00,0x00,0xf6,0x00,0x04,0x00,0x54,0x02,0x00,0x00, -0x55,0x02,0x00,0x00,0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, -0x58,0x02,0x00,0x00,0x53,0x02,0x00,0x00,0x54,0x02,0x00,0x00, -0xf8,0x00,0x02,0x00,0x53,0x02,0x00,0x00,0xf9,0x00,0x02,0x00, -0x5a,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x5a,0x02,0x00,0x00, -0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0xa1,0x02,0x00,0x00, -0x3e,0x00,0x00,0x00,0x53,0x02,0x00,0x00,0x93,0x02,0x00,0x00, -0x5d,0x02,0x00,0x00,0xb0,0x00,0x05,0x00,0xb7,0x00,0x00,0x00, -0x60,0x02,0x00,0x00,0xa1,0x02,0x00,0x00,0x62,0x00,0x00,0x00, -0xf6,0x00,0x04,0x00,0x5c,0x02,0x00,0x00,0x5d,0x02,0x00,0x00, -0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0x60,0x02,0x00,0x00, -0x5b,0x02,0x00,0x00,0x5c,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, -0x5b,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x63,0x02,0x00,0x00,0x48,0x02,0x00,0x00,0xa1,0x02,0x00,0x00, -0xb0,0x00,0x05,0x00,0xb7,0x00,0x00,0x00,0x66,0x02,0x00,0x00, -0x63,0x02,0x00,0x00,0x36,0x00,0x00,0x00,0xf7,0x00,0x03,0x00, -0x68,0x02,0x00,0x00,0x00,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, -0x66,0x02,0x00,0x00,0x67,0x02,0x00,0x00,0x68,0x02,0x00,0x00, -0xf8,0x00,0x02,0x00,0x67,0x02,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x6b,0x02,0x00,0x00,0x50,0x02,0x00,0x00, -0x9f,0x02,0x00,0x00,0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00, -0x6c,0x02,0x00,0x00,0x12,0x00,0x00,0x00,0xc5,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x6d,0x02,0x00,0x00, -0x6c,0x02,0x00,0x00,0xb0,0x00,0x05,0x00,0xb7,0x00,0x00,0x00, -0x6e,0x02,0x00,0x00,0x6b,0x02,0x00,0x00,0x6d,0x02,0x00,0x00, -0xf9,0x00,0x02,0x00,0x68,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, -0x68,0x02,0x00,0x00,0xf5,0x00,0x07,0x00,0xb7,0x00,0x00,0x00, -0x6f,0x02,0x00,0x00,0x66,0x02,0x00,0x00,0x5b,0x02,0x00,0x00, -0x6e,0x02,0x00,0x00,0x67,0x02,0x00,0x00,0xf7,0x00,0x03,0x00, -0x71,0x02,0x00,0x00,0x00,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, -0x6f,0x02,0x00,0x00,0x70,0x02,0x00,0x00,0x71,0x02,0x00,0x00, -0xf8,0x00,0x02,0x00,0x70,0x02,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x79,0x02,0x00,0x00,0x50,0x02,0x00,0x00, -0x9f,0x02,0x00,0x00,0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00, -0x7b,0x02,0x00,0x00,0x12,0x00,0x00,0x00,0x7a,0x02,0x00,0x00, -0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x7c,0x02,0x00,0x00, -0x7b,0x02,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x7d,0x02,0x00,0x00,0x79,0x02,0x00,0x00,0x7c,0x02,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x7e,0x02,0x00,0x00, -0x30,0x02,0x00,0x00,0x7d,0x02,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x80,0x02,0x00,0x00,0x7e,0x02,0x00,0x00, -0x48,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x82,0x02,0x00,0x00,0x80,0x02,0x00,0x00,0xa1,0x02,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x84,0x02,0x00,0x00, -0x9c,0x02,0x00,0x00,0xb1,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x86,0x02,0x00,0x00,0x84,0x02,0x00,0x00, -0x9f,0x02,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x88,0x02,0x00,0x00,0x86,0x02,0x00,0x00,0x87,0x02,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x8a,0x02,0x00,0x00, -0x9d,0x02,0x00,0x00,0x62,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x8b,0x02,0x00,0x00,0x88,0x02,0x00,0x00, -0x8a,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x8d,0x02,0x00,0x00,0x8b,0x02,0x00,0x00,0xa1,0x02,0x00,0x00, -0x41,0x00,0x05,0x00,0xc2,0x00,0x00,0x00,0x8e,0x02,0x00,0x00, -0xbf,0x00,0x00,0x00,0x8d,0x02,0x00,0x00,0x3d,0x00,0x04,0x00, -0xb9,0x00,0x00,0x00,0x8f,0x02,0x00,0x00,0x8e,0x02,0x00,0x00, -0x41,0x00,0x06,0x00,0x90,0x02,0x00,0x00,0x91,0x02,0x00,0x00, -0x75,0x02,0x00,0x00,0x34,0x00,0x00,0x00,0x82,0x02,0x00,0x00, -0x3e,0x00,0x03,0x00,0x91,0x02,0x00,0x00,0x8f,0x02,0x00,0x00, -0xf9,0x00,0x02,0x00,0x71,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, -0x71,0x02,0x00,0x00,0xf9,0x00,0x02,0x00,0x5d,0x02,0x00,0x00, -0xf8,0x00,0x02,0x00,0x5d,0x02,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x93,0x02,0x00,0x00,0xa1,0x02,0x00,0x00, -0xc5,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0x5a,0x02,0x00,0x00, -0xf8,0x00,0x02,0x00,0x5c,0x02,0x00,0x00,0xf9,0x00,0x02,0x00, -0x55,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x55,0x02,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x95,0x02,0x00,0x00, -0x9f,0x02,0x00,0x00,0xc5,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, -0x52,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x54,0x02,0x00,0x00, -0xf9,0x00,0x02,0x00,0x3d,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, -0x3d,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x97,0x02,0x00,0x00,0x9d,0x02,0x00,0x00,0xc5,0x00,0x00,0x00, -0xf9,0x00,0x02,0x00,0x3a,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, -0x3c,0x02,0x00,0x00,0xf9,0x00,0x02,0x00,0x35,0x02,0x00,0x00, -0xf8,0x00,0x02,0x00,0x35,0x02,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x99,0x02,0x00,0x00,0x9c,0x02,0x00,0x00, -0xc5,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0x32,0x02,0x00,0x00, -0xf8,0x00,0x02,0x00,0x34,0x02,0x00,0x00,0xfd,0x00,0x01,0x00, -0x38,0x00,0x01,0x00, -}; -const uint64_t matmul_f16_l_len = 10156; - -unsigned char matmul_f16_l_fp32_data[] = { -0x03,0x02,0x23,0x07,0x00,0x05,0x01,0x00,0x0b,0x00,0x0d,0x00, -0xca,0x02,0x00,0x00,0x00,0x00,0x00,0x00,0x11,0x00,0x02,0x00, -0x01,0x00,0x00,0x00,0x11,0x00,0x02,0x00,0x51,0x11,0x00,0x00, -0x0b,0x00,0x06,0x00,0x01,0x00,0x00,0x00,0x47,0x4c,0x53,0x4c, -0x2e,0x73,0x74,0x64,0x2e,0x34,0x35,0x30,0x00,0x00,0x00,0x00, -0x0e,0x00,0x03,0x00,0x00,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x0f,0x00,0x0f,0x00,0x05,0x00,0x00,0x00,0x04,0x00,0x00,0x00, -0x6d,0x61,0x69,0x6e,0x00,0x00,0x00,0x00,0x0b,0x00,0x00,0x00, -0x12,0x00,0x00,0x00,0x3d,0x00,0x00,0x00,0x4c,0x00,0x00,0x00, -0xf0,0x00,0x00,0x00,0xfc,0x00,0x00,0x00,0x3c,0x01,0x00,0x00, -0x47,0x01,0x00,0x00,0x2a,0x02,0x00,0x00,0x73,0x02,0x00,0x00, -0x10,0x00,0x06,0x00,0x04,0x00,0x00,0x00,0x11,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x0b,0x00,0x00,0x00,0x0b,0x00,0x00,0x00, -0x1c,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0x10,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x08,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00, -0x03,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00,0x04,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x10,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0x10,0x00,0x00,0x00,0x05,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x14,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x18,0x00,0x00,0x00, -0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00,0x07,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x1c,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0x10,0x00,0x00,0x00,0x08,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x20,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00, -0x09,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x24,0x00,0x00,0x00, -0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00,0x0a,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x28,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0x10,0x00,0x00,0x00,0x0b,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x2c,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x30,0x00,0x00,0x00, -0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00,0x0d,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x34,0x00,0x00,0x00,0x47,0x00,0x03,0x00, -0x10,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x37,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x3d,0x00,0x00,0x00,0x0b,0x00,0x00,0x00, -0x1a,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x4c,0x00,0x00,0x00, -0x0b,0x00,0x00,0x00,0x1b,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x4f,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x09,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x53,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x04,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x60,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x62,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x07,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x6c,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x03,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x9c,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0xae,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x05,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0xb1,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x08,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0xf9,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x48,0x00,0x04,0x00, -0xfa,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x18,0x00,0x00,0x00, -0x48,0x00,0x05,0x00,0xfa,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x03,0x00, -0xfa,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0xfc,0x00,0x00,0x00,0x22,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0xfc,0x00,0x00,0x00,0x21,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x16,0x01,0x00,0x00, -0x01,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x17,0x01,0x00,0x00,0x0b,0x00,0x00,0x00,0x19,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x44,0x01,0x00,0x00,0x06,0x00,0x00,0x00, -0x02,0x00,0x00,0x00,0x48,0x00,0x04,0x00,0x45,0x01,0x00,0x00, -0x00,0x00,0x00,0x00,0x18,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0x45,0x01,0x00,0x00,0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x47,0x00,0x03,0x00,0x45,0x01,0x00,0x00, -0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x47,0x01,0x00,0x00, -0x22,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x47,0x01,0x00,0x00,0x21,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x2a,0x02,0x00,0x00,0x0b,0x00,0x00,0x00, -0x18,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x70,0x02,0x00,0x00, -0x06,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x48,0x00,0x04,0x00, -0x71,0x02,0x00,0x00,0x00,0x00,0x00,0x00,0x19,0x00,0x00,0x00, -0x48,0x00,0x05,0x00,0x71,0x02,0x00,0x00,0x00,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x03,0x00, -0x71,0x02,0x00,0x00,0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x73,0x02,0x00,0x00,0x22,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x73,0x02,0x00,0x00,0x21,0x00,0x00,0x00, -0x02,0x00,0x00,0x00,0x13,0x00,0x02,0x00,0x02,0x00,0x00,0x00, -0x21,0x00,0x03,0x00,0x03,0x00,0x00,0x00,0x02,0x00,0x00,0x00, -0x15,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x20,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x17,0x00,0x04,0x00,0x09,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x03,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0x0a,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x09,0x00,0x00,0x00, -0x3b,0x00,0x04,0x00,0x0a,0x00,0x00,0x00,0x0b,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0x0d,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x1e,0x00,0x10,0x00,0x10,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x11,0x00,0x00,0x00, -0x09,0x00,0x00,0x00,0x10,0x00,0x00,0x00,0x3b,0x00,0x04,0x00, -0x11,0x00,0x00,0x00,0x12,0x00,0x00,0x00,0x09,0x00,0x00,0x00, -0x15,0x00,0x04,0x00,0x13,0x00,0x00,0x00,0x20,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00, -0x14,0x00,0x00,0x00,0x08,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0x15,0x00,0x00,0x00,0x09,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00,0x21,0x00,0x00,0x00, -0x0a,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00, -0x27,0x00,0x00,0x00,0x09,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x13,0x00,0x00,0x00,0x2d,0x00,0x00,0x00,0x07,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00,0x34,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x32,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x37,0x00,0x00,0x00,0x40,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x3b,0x00,0x04,0x00,0x0a,0x00,0x00,0x00,0x3d,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x3e,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x3b,0x00,0x04,0x00, -0x0a,0x00,0x00,0x00,0x4c,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x32,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x4f,0x00,0x00,0x00, -0x20,0x00,0x00,0x00,0x32,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x53,0x00,0x00,0x00,0x20,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0x54,0x00,0x00,0x00,0x86,0x00,0x00,0x00, -0x37,0x00,0x00,0x00,0x53,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0x58,0x00,0x00,0x00,0x86,0x00,0x00,0x00, -0x37,0x00,0x00,0x00,0x53,0x00,0x00,0x00,0x32,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x60,0x00,0x00,0x00,0x02,0x00,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x61,0x00,0x00,0x00, -0x86,0x00,0x00,0x00,0x53,0x00,0x00,0x00,0x60,0x00,0x00,0x00, -0x32,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x62,0x00,0x00,0x00, -0x04,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, -0x63,0x00,0x00,0x00,0x86,0x00,0x00,0x00,0x61,0x00,0x00,0x00, -0x62,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, -0x67,0x00,0x00,0x00,0x86,0x00,0x00,0x00,0x61,0x00,0x00,0x00, -0x62,0x00,0x00,0x00,0x32,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x6c,0x00,0x00,0x00,0x10,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0x6d,0x00,0x00,0x00,0x86,0x00,0x00,0x00, -0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0x72,0x00,0x00,0x00,0x86,0x00,0x00,0x00, -0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x13,0x00,0x00,0x00,0x76,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00,0x7b,0x00,0x00,0x00, -0x02,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00, -0x86,0x00,0x00,0x00,0x0b,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x13,0x00,0x00,0x00,0x8c,0x00,0x00,0x00,0x03,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00,0x97,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x32,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x9c,0x00,0x00,0x00,0x40,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x13,0x00,0x00,0x00,0x9e,0x00,0x00,0x00,0x04,0x00,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0xad,0x00,0x00,0x00, -0x84,0x00,0x00,0x00,0x60,0x00,0x00,0x00,0x62,0x00,0x00,0x00, -0x32,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0xae,0x00,0x00,0x00, -0x20,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, -0xaf,0x00,0x00,0x00,0x84,0x00,0x00,0x00,0x53,0x00,0x00,0x00, -0xae,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, -0xb0,0x00,0x00,0x00,0x84,0x00,0x00,0x00,0x4f,0x00,0x00,0x00, -0x62,0x00,0x00,0x00,0x32,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0xb1,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0xb2,0x00,0x00,0x00,0x84,0x00,0x00,0x00, -0xb0,0x00,0x00,0x00,0xb1,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0xb3,0x00,0x00,0x00,0x84,0x00,0x00,0x00, -0xb2,0x00,0x00,0x00,0x60,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0xb4,0x00,0x00,0x00,0x86,0x00,0x00,0x00, -0xaf,0x00,0x00,0x00,0xb3,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0xb5,0x00,0x00,0x00,0x84,0x00,0x00,0x00, -0xad,0x00,0x00,0x00,0xb4,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0xb6,0x00,0x00,0x00,0x84,0x00,0x00,0x00, -0xb5,0x00,0x00,0x00,0xb1,0x00,0x00,0x00,0x14,0x00,0x02,0x00, -0xb7,0x00,0x00,0x00,0x16,0x00,0x03,0x00,0xb9,0x00,0x00,0x00, -0x20,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, -0xba,0x00,0x00,0x00,0x84,0x00,0x00,0x00,0x60,0x00,0x00,0x00, -0x62,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, -0xbb,0x00,0x00,0x00,0x84,0x00,0x00,0x00,0xba,0x00,0x00,0x00, -0xb4,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, -0xbc,0x00,0x00,0x00,0x84,0x00,0x00,0x00,0xbb,0x00,0x00,0x00, -0xb1,0x00,0x00,0x00,0x1c,0x00,0x04,0x00,0xbd,0x00,0x00,0x00, -0xb9,0x00,0x00,0x00,0xbc,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0xbe,0x00,0x00,0x00,0x07,0x00,0x00,0x00,0xbd,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0xb9,0x00,0x00,0x00,0xc1,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0xc2,0x00,0x00,0x00, -0x07,0x00,0x00,0x00,0xb9,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x13,0x00,0x00,0x00,0xc5,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0xec,0x00,0x00,0x00, -0x80,0x00,0x00,0x00,0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0xed,0x00,0x00,0x00, -0x84,0x00,0x00,0x00,0x37,0x00,0x00,0x00,0xec,0x00,0x00,0x00, -0x1c,0x00,0x04,0x00,0xee,0x00,0x00,0x00,0xb9,0x00,0x00,0x00, -0xed,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0xef,0x00,0x00,0x00, -0x04,0x00,0x00,0x00,0xee,0x00,0x00,0x00,0x3b,0x00,0x04,0x00, -0xef,0x00,0x00,0x00,0xf0,0x00,0x00,0x00,0x04,0x00,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0xf4,0x00,0x00,0x00, -0x80,0x00,0x00,0x00,0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00, -0x16,0x00,0x03,0x00,0xf8,0x00,0x00,0x00,0x10,0x00,0x00,0x00, -0x1d,0x00,0x03,0x00,0xf9,0x00,0x00,0x00,0xf8,0x00,0x00,0x00, -0x1e,0x00,0x03,0x00,0xfa,0x00,0x00,0x00,0xf9,0x00,0x00,0x00, -0x20,0x00,0x04,0x00,0xfb,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0xfa,0x00,0x00,0x00,0x3b,0x00,0x04,0x00,0xfb,0x00,0x00,0x00, -0xfc,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0x07,0x01,0x00,0x00,0x0c,0x00,0x00,0x00,0xf8,0x00,0x00,0x00, -0x20,0x00,0x04,0x00,0x0b,0x01,0x00,0x00,0x04,0x00,0x00,0x00, -0xb9,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, -0x11,0x01,0x00,0x00,0x80,0x00,0x00,0x00,0x6c,0x00,0x00,0x00, -0x39,0x00,0x00,0x00,0x32,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x16,0x01,0x00,0x00,0x01,0x00,0x00,0x00,0x33,0x00,0x06,0x00, -0x09,0x00,0x00,0x00,0x17,0x01,0x00,0x00,0x16,0x01,0x00,0x00, -0x39,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0x18,0x01,0x00,0x00,0x51,0x00,0x00,0x00, -0x17,0x01,0x00,0x00,0x00,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0x19,0x01,0x00,0x00,0x84,0x00,0x00,0x00, -0x18,0x01,0x00,0x00,0x39,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0x1a,0x01,0x00,0x00,0x86,0x00,0x00,0x00, -0x19,0x01,0x00,0x00,0x6c,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0x38,0x01,0x00,0x00,0x80,0x00,0x00,0x00, -0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0x39,0x01,0x00,0x00,0x84,0x00,0x00,0x00, -0x9c,0x00,0x00,0x00,0x38,0x01,0x00,0x00,0x1c,0x00,0x04,0x00, -0x3a,0x01,0x00,0x00,0xb9,0x00,0x00,0x00,0x39,0x01,0x00,0x00, -0x20,0x00,0x04,0x00,0x3b,0x01,0x00,0x00,0x04,0x00,0x00,0x00, -0x3a,0x01,0x00,0x00,0x3b,0x00,0x04,0x00,0x3b,0x01,0x00,0x00, -0x3c,0x01,0x00,0x00,0x04,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0x40,0x01,0x00,0x00,0x80,0x00,0x00,0x00, -0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x1d,0x00,0x03,0x00, -0x44,0x01,0x00,0x00,0xf8,0x00,0x00,0x00,0x1e,0x00,0x03,0x00, -0x45,0x01,0x00,0x00,0x44,0x01,0x00,0x00,0x20,0x00,0x04,0x00, -0x46,0x01,0x00,0x00,0x0c,0x00,0x00,0x00,0x45,0x01,0x00,0x00, -0x3b,0x00,0x04,0x00,0x46,0x01,0x00,0x00,0x47,0x01,0x00,0x00, -0x0c,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, -0x5a,0x01,0x00,0x00,0x80,0x00,0x00,0x00,0x6c,0x00,0x00,0x00, -0x39,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x61,0x01,0x00,0x00,0x08,0x01,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0x62,0x01,0x00,0x00,0x86,0x00,0x00,0x00, -0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0x65,0x01,0x00,0x00,0x86,0x00,0x00,0x00, -0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0x80,0x01,0x00,0x00,0x84,0x00,0x00,0x00, -0x60,0x00,0x00,0x00,0x62,0x00,0x00,0x00,0x1c,0x00,0x04,0x00, -0x81,0x01,0x00,0x00,0xb9,0x00,0x00,0x00,0x80,0x01,0x00,0x00, -0x20,0x00,0x04,0x00,0x82,0x01,0x00,0x00,0x07,0x00,0x00,0x00, -0x81,0x01,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, -0x92,0x01,0x00,0x00,0x80,0x00,0x00,0x00,0x6c,0x00,0x00,0x00, -0x39,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, -0xad,0x01,0x00,0x00,0x84,0x00,0x00,0x00,0xb4,0x00,0x00,0x00, -0xb1,0x00,0x00,0x00,0x1c,0x00,0x04,0x00,0xae,0x01,0x00,0x00, -0xb9,0x00,0x00,0x00,0xad,0x01,0x00,0x00,0x20,0x00,0x04,0x00, -0xaf,0x01,0x00,0x00,0x07,0x00,0x00,0x00,0xae,0x01,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0xb8,0x01,0x00,0x00, -0x86,0x00,0x00,0x00,0xae,0x00,0x00,0x00,0xb4,0x00,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0xc0,0x01,0x00,0x00, -0x80,0x00,0x00,0x00,0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0xef,0x01,0x00,0x00, -0x84,0x00,0x00,0x00,0x60,0x00,0x00,0x00,0x62,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00,0x22,0x02,0x00,0x00, -0x0d,0x00,0x00,0x00,0x3b,0x00,0x04,0x00,0x0a,0x00,0x00,0x00, -0x2a,0x02,0x00,0x00,0x01,0x00,0x00,0x00,0x1d,0x00,0x03,0x00, -0x70,0x02,0x00,0x00,0xb9,0x00,0x00,0x00,0x1e,0x00,0x03,0x00, -0x71,0x02,0x00,0x00,0x70,0x02,0x00,0x00,0x20,0x00,0x04,0x00, -0x72,0x02,0x00,0x00,0x0c,0x00,0x00,0x00,0x71,0x02,0x00,0x00, -0x3b,0x00,0x04,0x00,0x72,0x02,0x00,0x00,0x73,0x02,0x00,0x00, -0x0c,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00, -0x78,0x02,0x00,0x00,0x05,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0x85,0x02,0x00,0x00,0x84,0x00,0x00,0x00, -0x60,0x00,0x00,0x00,0x62,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0x8e,0x02,0x00,0x00,0x0c,0x00,0x00,0x00,0xb9,0x00,0x00,0x00, -0x36,0x00,0x05,0x00,0x02,0x00,0x00,0x00,0x04,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x03,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, -0x05,0x00,0x00,0x00,0x3b,0x00,0x04,0x00,0xbe,0x00,0x00,0x00, -0xbf,0x00,0x00,0x00,0x07,0x00,0x00,0x00,0x3b,0x00,0x04,0x00, -0x82,0x01,0x00,0x00,0x83,0x01,0x00,0x00,0x07,0x00,0x00,0x00, -0x3b,0x00,0x04,0x00,0xaf,0x01,0x00,0x00,0xb0,0x01,0x00,0x00, -0x07,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x0d,0x00,0x00,0x00, -0x0e,0x00,0x00,0x00,0x0b,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x0f,0x00,0x00,0x00, -0x0e,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00, -0x16,0x00,0x00,0x00,0x12,0x00,0x00,0x00,0x14,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x17,0x00,0x00,0x00, -0x16,0x00,0x00,0x00,0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x18,0x00,0x00,0x00,0x0f,0x00,0x00,0x00,0x17,0x00,0x00,0x00, -0x89,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x1e,0x00,0x00,0x00, -0x0f,0x00,0x00,0x00,0x17,0x00,0x00,0x00,0x41,0x00,0x05,0x00, -0x15,0x00,0x00,0x00,0x22,0x00,0x00,0x00,0x12,0x00,0x00,0x00, -0x21,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x22,0x00,0x00,0x00,0x86,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x24,0x00,0x00,0x00,0x18,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00, -0x28,0x00,0x00,0x00,0x12,0x00,0x00,0x00,0x27,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x29,0x00,0x00,0x00, -0x28,0x00,0x00,0x00,0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x2a,0x00,0x00,0x00,0x1e,0x00,0x00,0x00,0x29,0x00,0x00,0x00, -0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0x12,0x00,0x00,0x00,0x2d,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x2f,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x30,0x00,0x00,0x00, -0x24,0x00,0x00,0x00,0x2f,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x32,0x00,0x00,0x00,0x30,0x00,0x00,0x00, -0x2a,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00, -0x35,0x00,0x00,0x00,0x12,0x00,0x00,0x00,0x34,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x36,0x00,0x00,0x00, -0x35,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x38,0x00,0x00,0x00,0x36,0x00,0x00,0x00,0x37,0x00,0x00,0x00, -0x82,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x3a,0x00,0x00,0x00, -0x38,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x86,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x3b,0x00,0x00,0x00,0x3a,0x00,0x00,0x00, -0x37,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x0d,0x00,0x00,0x00, -0x3f,0x00,0x00,0x00,0x3d,0x00,0x00,0x00,0x3e,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x40,0x00,0x00,0x00, -0x3f,0x00,0x00,0x00,0x89,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x42,0x00,0x00,0x00,0x40,0x00,0x00,0x00,0x3b,0x00,0x00,0x00, -0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x47,0x00,0x00,0x00, -0x40,0x00,0x00,0x00,0x3b,0x00,0x00,0x00,0x41,0x00,0x05,0x00, -0x0d,0x00,0x00,0x00,0x49,0x00,0x00,0x00,0x3d,0x00,0x00,0x00, -0x39,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x4a,0x00,0x00,0x00,0x49,0x00,0x00,0x00,0x41,0x00,0x05,0x00, -0x0d,0x00,0x00,0x00,0x4d,0x00,0x00,0x00,0x4c,0x00,0x00,0x00, -0x3e,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x4e,0x00,0x00,0x00,0x4d,0x00,0x00,0x00,0x86,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x50,0x00,0x00,0x00,0x4e,0x00,0x00,0x00, -0x4f,0x00,0x00,0x00,0x89,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x55,0x00,0x00,0x00,0x50,0x00,0x00,0x00,0x54,0x00,0x00,0x00, -0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x59,0x00,0x00,0x00, -0x50,0x00,0x00,0x00,0x58,0x00,0x00,0x00,0x89,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x5d,0x00,0x00,0x00,0x4e,0x00,0x00,0x00, -0x4f,0x00,0x00,0x00,0x89,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x64,0x00,0x00,0x00,0x5d,0x00,0x00,0x00,0x63,0x00,0x00,0x00, -0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x68,0x00,0x00,0x00, -0x5d,0x00,0x00,0x00,0x67,0x00,0x00,0x00,0x89,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x6e,0x00,0x00,0x00,0x4e,0x00,0x00,0x00, -0x6d,0x00,0x00,0x00,0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x73,0x00,0x00,0x00,0x4e,0x00,0x00,0x00,0x72,0x00,0x00,0x00, -0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00,0x77,0x00,0x00,0x00, -0x12,0x00,0x00,0x00,0x76,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x78,0x00,0x00,0x00,0x77,0x00,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x79,0x00,0x00,0x00, -0x47,0x00,0x00,0x00,0x78,0x00,0x00,0x00,0x41,0x00,0x05,0x00, -0x15,0x00,0x00,0x00,0x7c,0x00,0x00,0x00,0x12,0x00,0x00,0x00, -0x7b,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x7d,0x00,0x00,0x00,0x7c,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x7f,0x00,0x00,0x00,0x47,0x00,0x00,0x00, -0x39,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x82,0x00,0x00,0x00,0x7f,0x00,0x00,0x00,0x78,0x00,0x00,0x00, -0x0c,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0x83,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x26,0x00,0x00,0x00,0x7d,0x00,0x00,0x00, -0x82,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00, -0x87,0x00,0x00,0x00,0x12,0x00,0x00,0x00,0x86,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x88,0x00,0x00,0x00, -0x87,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x89,0x00,0x00,0x00,0x32,0x00,0x00,0x00,0x88,0x00,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x8b,0x00,0x00,0x00, -0x42,0x00,0x00,0x00,0x37,0x00,0x00,0x00,0x41,0x00,0x05,0x00, -0x15,0x00,0x00,0x00,0x8d,0x00,0x00,0x00,0x12,0x00,0x00,0x00, -0x8c,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x8e,0x00,0x00,0x00,0x8d,0x00,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x8f,0x00,0x00,0x00,0x8b,0x00,0x00,0x00, -0x8e,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x90,0x00,0x00,0x00,0x89,0x00,0x00,0x00,0x8f,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x92,0x00,0x00,0x00, -0x90,0x00,0x00,0x00,0x79,0x00,0x00,0x00,0x86,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x93,0x00,0x00,0x00,0x92,0x00,0x00,0x00, -0x39,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00, -0x98,0x00,0x00,0x00,0x12,0x00,0x00,0x00,0x97,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x99,0x00,0x00,0x00, -0x98,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x9a,0x00,0x00,0x00,0x0f,0x00,0x00,0x00,0x99,0x00,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x9d,0x00,0x00,0x00, -0x4a,0x00,0x00,0x00,0x9c,0x00,0x00,0x00,0x41,0x00,0x05,0x00, -0x15,0x00,0x00,0x00,0x9f,0x00,0x00,0x00,0x12,0x00,0x00,0x00, -0x9e,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0xa0,0x00,0x00,0x00,0x9f,0x00,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xa1,0x00,0x00,0x00,0x9d,0x00,0x00,0x00, -0xa0,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xa2,0x00,0x00,0x00,0x9a,0x00,0x00,0x00,0xa1,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xa4,0x00,0x00,0x00, -0xa2,0x00,0x00,0x00,0x79,0x00,0x00,0x00,0x86,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xa5,0x00,0x00,0x00,0xa4,0x00,0x00,0x00, -0x39,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0xa7,0x00,0x00,0x00, -0xf8,0x00,0x02,0x00,0xa7,0x00,0x00,0x00,0xf5,0x00,0x07,0x00, -0x06,0x00,0x00,0x00,0x98,0x02,0x00,0x00,0x3e,0x00,0x00,0x00, -0x05,0x00,0x00,0x00,0xc6,0x00,0x00,0x00,0xa8,0x00,0x00,0x00, -0xb0,0x00,0x05,0x00,0xb7,0x00,0x00,0x00,0xb8,0x00,0x00,0x00, -0x98,0x02,0x00,0x00,0xb6,0x00,0x00,0x00,0xf6,0x00,0x04,0x00, -0xa9,0x00,0x00,0x00,0xa8,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0xfa,0x00,0x04,0x00,0xb8,0x00,0x00,0x00,0xa8,0x00,0x00,0x00, -0xa9,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0xa8,0x00,0x00,0x00, -0x41,0x00,0x05,0x00,0xc2,0x00,0x00,0x00,0xc3,0x00,0x00,0x00, -0xbf,0x00,0x00,0x00,0x98,0x02,0x00,0x00,0x3e,0x00,0x03,0x00, -0xc3,0x00,0x00,0x00,0xc1,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xc6,0x00,0x00,0x00,0x98,0x02,0x00,0x00, -0xc5,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0xa7,0x00,0x00,0x00, -0xf8,0x00,0x02,0x00,0xa9,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, -0xc9,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0xc9,0x00,0x00,0x00, -0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0xb1,0x02,0x00,0x00, -0xa5,0x00,0x00,0x00,0xa9,0x00,0x00,0x00,0x67,0x01,0x00,0x00, -0xcc,0x00,0x00,0x00,0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00, -0xad,0x02,0x00,0x00,0x93,0x00,0x00,0x00,0xa9,0x00,0x00,0x00, -0x64,0x01,0x00,0x00,0xcc,0x00,0x00,0x00,0xf5,0x00,0x07,0x00, -0x06,0x00,0x00,0x00,0x99,0x02,0x00,0x00,0x79,0x00,0x00,0x00, -0xa9,0x00,0x00,0x00,0x12,0x02,0x00,0x00,0xcc,0x00,0x00,0x00, -0xb0,0x00,0x05,0x00,0xb7,0x00,0x00,0x00,0xd0,0x00,0x00,0x00, -0x99,0x02,0x00,0x00,0x83,0x00,0x00,0x00,0xf6,0x00,0x04,0x00, -0xcb,0x00,0x00,0x00,0xcc,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0xfa,0x00,0x04,0x00,0xd0,0x00,0x00,0x00,0xca,0x00,0x00,0x00, -0xcb,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0xca,0x00,0x00,0x00, -0xf9,0x00,0x02,0x00,0xd2,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, -0xd2,0x00,0x00,0x00,0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00, -0xa9,0x02,0x00,0x00,0x3e,0x00,0x00,0x00,0xca,0x00,0x00,0x00, -0x1c,0x01,0x00,0x00,0xd5,0x00,0x00,0x00,0xb0,0x00,0x05,0x00, -0xb7,0x00,0x00,0x00,0xd8,0x00,0x00,0x00,0xa9,0x02,0x00,0x00, -0x37,0x00,0x00,0x00,0xf6,0x00,0x04,0x00,0xd4,0x00,0x00,0x00, -0xd5,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, -0xd8,0x00,0x00,0x00,0xd3,0x00,0x00,0x00,0xd4,0x00,0x00,0x00, -0xf8,0x00,0x02,0x00,0xd3,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xdc,0x00,0x00,0x00,0x8b,0x00,0x00,0x00, -0x73,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xde,0x00,0x00,0x00,0xdc,0x00,0x00,0x00,0xa9,0x02,0x00,0x00, -0xb0,0x00,0x05,0x00,0xb7,0x00,0x00,0x00,0xe1,0x00,0x00,0x00, -0xde,0x00,0x00,0x00,0x36,0x00,0x00,0x00,0xf7,0x00,0x03,0x00, -0xe3,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, -0xe1,0x00,0x00,0x00,0xe2,0x00,0x00,0x00,0xe3,0x00,0x00,0x00, -0xf8,0x00,0x02,0x00,0xe2,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xe6,0x00,0x00,0x00,0x99,0x02,0x00,0x00, -0x6e,0x00,0x00,0x00,0xb0,0x00,0x05,0x00,0xb7,0x00,0x00,0x00, -0xe8,0x00,0x00,0x00,0xe6,0x00,0x00,0x00,0x83,0x00,0x00,0x00, -0xf9,0x00,0x02,0x00,0xe3,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, -0xe3,0x00,0x00,0x00,0xf5,0x00,0x07,0x00,0xb7,0x00,0x00,0x00, -0xe9,0x00,0x00,0x00,0xe1,0x00,0x00,0x00,0xd3,0x00,0x00,0x00, -0xe8,0x00,0x00,0x00,0xe2,0x00,0x00,0x00,0xf7,0x00,0x03,0x00, -0xeb,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, -0xe9,0x00,0x00,0x00,0xea,0x00,0x00,0x00,0x0d,0x01,0x00,0x00, -0xf8,0x00,0x02,0x00,0xea,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xf3,0x00,0x00,0x00,0x73,0x00,0x00,0x00, -0xa9,0x02,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xf5,0x00,0x00,0x00,0xf3,0x00,0x00,0x00,0xf4,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xf7,0x00,0x00,0x00, -0xf5,0x00,0x00,0x00,0x6e,0x00,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x03,0x01,0x00,0x00,0xf3,0x00,0x00,0x00, -0x8e,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x04,0x01,0x00,0x00,0xad,0x02,0x00,0x00,0x03,0x01,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x06,0x01,0x00,0x00, -0x04,0x01,0x00,0x00,0x6e,0x00,0x00,0x00,0x41,0x00,0x06,0x00, -0x07,0x01,0x00,0x00,0x08,0x01,0x00,0x00,0xfc,0x00,0x00,0x00, -0x34,0x00,0x00,0x00,0x06,0x01,0x00,0x00,0x3d,0x00,0x04,0x00, -0xf8,0x00,0x00,0x00,0x09,0x01,0x00,0x00,0x08,0x01,0x00,0x00, -0x73,0x00,0x04,0x00,0xb9,0x00,0x00,0x00,0x0a,0x01,0x00,0x00, -0x09,0x01,0x00,0x00,0x41,0x00,0x05,0x00,0x0b,0x01,0x00,0x00, -0x0c,0x01,0x00,0x00,0xf0,0x00,0x00,0x00,0xf7,0x00,0x00,0x00, -0x3e,0x00,0x03,0x00,0x0c,0x01,0x00,0x00,0x0a,0x01,0x00,0x00, -0xf9,0x00,0x02,0x00,0xeb,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, -0x0d,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x10,0x01,0x00,0x00,0x73,0x00,0x00,0x00,0xa9,0x02,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x12,0x01,0x00,0x00, -0x10,0x01,0x00,0x00,0x11,0x01,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x14,0x01,0x00,0x00,0x12,0x01,0x00,0x00, -0x6e,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x0b,0x01,0x00,0x00, -0x15,0x01,0x00,0x00,0xf0,0x00,0x00,0x00,0x14,0x01,0x00,0x00, -0x3e,0x00,0x03,0x00,0x15,0x01,0x00,0x00,0xc1,0x00,0x00,0x00, -0xf9,0x00,0x02,0x00,0xeb,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, -0xeb,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0xd5,0x00,0x00,0x00, -0xf8,0x00,0x02,0x00,0xd5,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x1c,0x01,0x00,0x00,0xa9,0x02,0x00,0x00, -0x1a,0x01,0x00,0x00,0xf9,0x00,0x02,0x00,0xd2,0x00,0x00,0x00, -0xf8,0x00,0x02,0x00,0xd4,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, -0x1e,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0x1e,0x01,0x00,0x00, -0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0xaa,0x02,0x00,0x00, -0x3e,0x00,0x00,0x00,0xd4,0x00,0x00,0x00,0x60,0x01,0x00,0x00, -0x21,0x01,0x00,0x00,0xb0,0x00,0x05,0x00,0xb7,0x00,0x00,0x00, -0x24,0x01,0x00,0x00,0xaa,0x02,0x00,0x00,0x9c,0x00,0x00,0x00, -0xf6,0x00,0x04,0x00,0x20,0x01,0x00,0x00,0x21,0x01,0x00,0x00, -0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0x24,0x01,0x00,0x00, -0x1f,0x01,0x00,0x00,0x20,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, -0x1f,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x28,0x01,0x00,0x00,0x9d,0x00,0x00,0x00,0x73,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x2a,0x01,0x00,0x00, -0x28,0x01,0x00,0x00,0xaa,0x02,0x00,0x00,0x41,0x00,0x05,0x00, -0x15,0x00,0x00,0x00,0x2b,0x01,0x00,0x00,0x12,0x00,0x00,0x00, -0xc5,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x2c,0x01,0x00,0x00,0x2b,0x01,0x00,0x00,0xb0,0x00,0x05,0x00, -0xb7,0x00,0x00,0x00,0x2d,0x01,0x00,0x00,0x2a,0x01,0x00,0x00, -0x2c,0x01,0x00,0x00,0xf7,0x00,0x03,0x00,0x2f,0x01,0x00,0x00, -0x00,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0x2d,0x01,0x00,0x00, -0x2e,0x01,0x00,0x00,0x2f,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, -0x2e,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x32,0x01,0x00,0x00,0x99,0x02,0x00,0x00,0x6e,0x00,0x00,0x00, -0xb0,0x00,0x05,0x00,0xb7,0x00,0x00,0x00,0x34,0x01,0x00,0x00, -0x32,0x01,0x00,0x00,0x83,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, -0x2f,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0x2f,0x01,0x00,0x00, -0xf5,0x00,0x07,0x00,0xb7,0x00,0x00,0x00,0x35,0x01,0x00,0x00, -0x2d,0x01,0x00,0x00,0x1f,0x01,0x00,0x00,0x34,0x01,0x00,0x00, -0x2e,0x01,0x00,0x00,0xf7,0x00,0x03,0x00,0x37,0x01,0x00,0x00, -0x00,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0x35,0x01,0x00,0x00, -0x36,0x01,0x00,0x00,0x56,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, -0x36,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x3f,0x01,0x00,0x00,0x73,0x00,0x00,0x00,0xaa,0x02,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x41,0x01,0x00,0x00, -0x3f,0x01,0x00,0x00,0x40,0x01,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x43,0x01,0x00,0x00,0x41,0x01,0x00,0x00, -0x6e,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x4e,0x01,0x00,0x00,0x3f,0x01,0x00,0x00,0xa0,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x4f,0x01,0x00,0x00, -0xb1,0x02,0x00,0x00,0x4e,0x01,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x51,0x01,0x00,0x00,0x4f,0x01,0x00,0x00, -0x6e,0x00,0x00,0x00,0x41,0x00,0x06,0x00,0x07,0x01,0x00,0x00, -0x52,0x01,0x00,0x00,0x47,0x01,0x00,0x00,0x34,0x00,0x00,0x00, -0x51,0x01,0x00,0x00,0x3d,0x00,0x04,0x00,0xf8,0x00,0x00,0x00, -0x53,0x01,0x00,0x00,0x52,0x01,0x00,0x00,0x73,0x00,0x04,0x00, -0xb9,0x00,0x00,0x00,0x54,0x01,0x00,0x00,0x53,0x01,0x00,0x00, -0x41,0x00,0x05,0x00,0x0b,0x01,0x00,0x00,0x55,0x01,0x00,0x00, -0x3c,0x01,0x00,0x00,0x43,0x01,0x00,0x00,0x3e,0x00,0x03,0x00, -0x55,0x01,0x00,0x00,0x54,0x01,0x00,0x00,0xf9,0x00,0x02,0x00, -0x37,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0x56,0x01,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x59,0x01,0x00,0x00, -0x73,0x00,0x00,0x00,0xaa,0x02,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x5b,0x01,0x00,0x00,0x59,0x01,0x00,0x00, -0x5a,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x5d,0x01,0x00,0x00,0x5b,0x01,0x00,0x00,0x6e,0x00,0x00,0x00, -0x41,0x00,0x05,0x00,0x0b,0x01,0x00,0x00,0x5e,0x01,0x00,0x00, -0x3c,0x01,0x00,0x00,0x5d,0x01,0x00,0x00,0x3e,0x00,0x03,0x00, -0x5e,0x01,0x00,0x00,0xc1,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, -0x37,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0x37,0x01,0x00,0x00, -0xf9,0x00,0x02,0x00,0x21,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, -0x21,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x60,0x01,0x00,0x00,0xaa,0x02,0x00,0x00,0x1a,0x01,0x00,0x00, -0xf9,0x00,0x02,0x00,0x1e,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, -0x20,0x01,0x00,0x00,0xe0,0x00,0x04,0x00,0x0c,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x61,0x01,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x64,0x01,0x00,0x00,0xad,0x02,0x00,0x00, -0x62,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x67,0x01,0x00,0x00,0xb1,0x02,0x00,0x00,0x65,0x01,0x00,0x00, -0xf9,0x00,0x02,0x00,0x69,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, -0x69,0x01,0x00,0x00,0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00, -0xb3,0x02,0x00,0x00,0x3e,0x00,0x00,0x00,0x20,0x01,0x00,0x00, -0x10,0x02,0x00,0x00,0x6c,0x01,0x00,0x00,0xb0,0x00,0x05,0x00, -0xb7,0x00,0x00,0x00,0x6f,0x01,0x00,0x00,0xb3,0x02,0x00,0x00, -0x6c,0x00,0x00,0x00,0xf6,0x00,0x04,0x00,0x6b,0x01,0x00,0x00, -0x6c,0x01,0x00,0x00,0x00,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, -0x6f,0x01,0x00,0x00,0x6a,0x01,0x00,0x00,0x6b,0x01,0x00,0x00, -0xf8,0x00,0x02,0x00,0x6a,0x01,0x00,0x00,0xf9,0x00,0x02,0x00, -0x71,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0x71,0x01,0x00,0x00, -0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0xb7,0x02,0x00,0x00, -0x3e,0x00,0x00,0x00,0x6a,0x01,0x00,0x00,0x9c,0x01,0x00,0x00, -0x74,0x01,0x00,0x00,0xb0,0x00,0x05,0x00,0xb7,0x00,0x00,0x00, -0x77,0x01,0x00,0x00,0xb7,0x02,0x00,0x00,0x60,0x00,0x00,0x00, -0xf6,0x00,0x04,0x00,0x73,0x01,0x00,0x00,0x74,0x01,0x00,0x00, -0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0x77,0x01,0x00,0x00, -0x72,0x01,0x00,0x00,0x73,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, -0x72,0x01,0x00,0x00,0xf9,0x00,0x02,0x00,0x79,0x01,0x00,0x00, -0xf8,0x00,0x02,0x00,0x79,0x01,0x00,0x00,0xf5,0x00,0x07,0x00, -0x06,0x00,0x00,0x00,0xc9,0x02,0x00,0x00,0x3e,0x00,0x00,0x00, -0x72,0x01,0x00,0x00,0x9a,0x01,0x00,0x00,0x7a,0x01,0x00,0x00, -0xb0,0x00,0x05,0x00,0xb7,0x00,0x00,0x00,0x7f,0x01,0x00,0x00, -0xc9,0x02,0x00,0x00,0x62,0x00,0x00,0x00,0xf6,0x00,0x04,0x00, -0x7b,0x01,0x00,0x00,0x7a,0x01,0x00,0x00,0x01,0x00,0x00,0x00, -0xfa,0x00,0x04,0x00,0x7f,0x01,0x00,0x00,0x7a,0x01,0x00,0x00, -0x7b,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0x7a,0x01,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x85,0x01,0x00,0x00, -0xb7,0x02,0x00,0x00,0x62,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x87,0x01,0x00,0x00,0x85,0x01,0x00,0x00, -0xc9,0x02,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x89,0x01,0x00,0x00,0x55,0x00,0x00,0x00,0x53,0x00,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x8b,0x01,0x00,0x00, -0xb7,0x02,0x00,0x00,0x61,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x8c,0x01,0x00,0x00,0x89,0x01,0x00,0x00, -0x8b,0x01,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x8e,0x01,0x00,0x00,0x64,0x00,0x00,0x00,0x62,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x8f,0x01,0x00,0x00, -0x8c,0x01,0x00,0x00,0x8e,0x01,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x91,0x01,0x00,0x00,0x8f,0x01,0x00,0x00, -0xc9,0x02,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x93,0x01,0x00,0x00,0x91,0x01,0x00,0x00,0x92,0x01,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x95,0x01,0x00,0x00, -0x93,0x01,0x00,0x00,0xb3,0x02,0x00,0x00,0x41,0x00,0x05,0x00, -0x0b,0x01,0x00,0x00,0x96,0x01,0x00,0x00,0xf0,0x00,0x00,0x00, -0x95,0x01,0x00,0x00,0x3d,0x00,0x04,0x00,0xb9,0x00,0x00,0x00, -0x97,0x01,0x00,0x00,0x96,0x01,0x00,0x00,0x41,0x00,0x05,0x00, -0xc2,0x00,0x00,0x00,0x98,0x01,0x00,0x00,0x83,0x01,0x00,0x00, -0x87,0x01,0x00,0x00,0x3e,0x00,0x03,0x00,0x98,0x01,0x00,0x00, -0x97,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x9a,0x01,0x00,0x00,0xc9,0x02,0x00,0x00,0xc5,0x00,0x00,0x00, -0xf9,0x00,0x02,0x00,0x79,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, -0x7b,0x01,0x00,0x00,0xf9,0x00,0x02,0x00,0x74,0x01,0x00,0x00, -0xf8,0x00,0x02,0x00,0x74,0x01,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x9c,0x01,0x00,0x00,0xb7,0x02,0x00,0x00, -0xc5,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0x71,0x01,0x00,0x00, -0xf8,0x00,0x02,0x00,0x73,0x01,0x00,0x00,0xf9,0x00,0x02,0x00, -0x9e,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0x9e,0x01,0x00,0x00, -0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0xb8,0x02,0x00,0x00, -0x3e,0x00,0x00,0x00,0x73,0x01,0x00,0x00,0xca,0x01,0x00,0x00, -0xa1,0x01,0x00,0x00,0xb0,0x00,0x05,0x00,0xb7,0x00,0x00,0x00, -0xa4,0x01,0x00,0x00,0xb8,0x02,0x00,0x00,0xb4,0x00,0x00,0x00, -0xf6,0x00,0x04,0x00,0xa0,0x01,0x00,0x00,0xa1,0x01,0x00,0x00, -0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0xa4,0x01,0x00,0x00, -0x9f,0x01,0x00,0x00,0xa0,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, -0x9f,0x01,0x00,0x00,0xf9,0x00,0x02,0x00,0xa6,0x01,0x00,0x00, -0xf8,0x00,0x02,0x00,0xa6,0x01,0x00,0x00,0xf5,0x00,0x07,0x00, -0x06,0x00,0x00,0x00,0xc6,0x02,0x00,0x00,0x3e,0x00,0x00,0x00, -0x9f,0x01,0x00,0x00,0xc8,0x01,0x00,0x00,0xa7,0x01,0x00,0x00, -0xb0,0x00,0x05,0x00,0xb7,0x00,0x00,0x00,0xac,0x01,0x00,0x00, -0xc6,0x02,0x00,0x00,0xb1,0x00,0x00,0x00,0xf6,0x00,0x04,0x00, -0xa8,0x01,0x00,0x00,0xa7,0x01,0x00,0x00,0x01,0x00,0x00,0x00, -0xfa,0x00,0x04,0x00,0xac,0x01,0x00,0x00,0xa7,0x01,0x00,0x00, -0xa8,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0xa7,0x01,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xb2,0x01,0x00,0x00, -0xb8,0x02,0x00,0x00,0xb1,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xb4,0x01,0x00,0x00,0xb2,0x01,0x00,0x00, -0xc6,0x02,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xb6,0x01,0x00,0x00,0x59,0x00,0x00,0x00,0xae,0x00,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xb9,0x01,0x00,0x00, -0xb8,0x02,0x00,0x00,0xb8,0x01,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xba,0x01,0x00,0x00,0xb6,0x01,0x00,0x00, -0xb9,0x01,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xbc,0x01,0x00,0x00,0x68,0x00,0x00,0x00,0xb1,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xbd,0x01,0x00,0x00, -0xba,0x01,0x00,0x00,0xbc,0x01,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xbf,0x01,0x00,0x00,0xbd,0x01,0x00,0x00, -0xc6,0x02,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xc1,0x01,0x00,0x00,0xbf,0x01,0x00,0x00,0xc0,0x01,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xc3,0x01,0x00,0x00, -0xc1,0x01,0x00,0x00,0xb3,0x02,0x00,0x00,0x41,0x00,0x05,0x00, -0x0b,0x01,0x00,0x00,0xc4,0x01,0x00,0x00,0x3c,0x01,0x00,0x00, -0xc3,0x01,0x00,0x00,0x3d,0x00,0x04,0x00,0xb9,0x00,0x00,0x00, -0xc5,0x01,0x00,0x00,0xc4,0x01,0x00,0x00,0x41,0x00,0x05,0x00, -0xc2,0x00,0x00,0x00,0xc6,0x01,0x00,0x00,0xb0,0x01,0x00,0x00, -0xb4,0x01,0x00,0x00,0x3e,0x00,0x03,0x00,0xc6,0x01,0x00,0x00, -0xc5,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xc8,0x01,0x00,0x00,0xc6,0x02,0x00,0x00,0xc5,0x00,0x00,0x00, -0xf9,0x00,0x02,0x00,0xa6,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, -0xa8,0x01,0x00,0x00,0xf9,0x00,0x02,0x00,0xa1,0x01,0x00,0x00, -0xf8,0x00,0x02,0x00,0xa1,0x01,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xca,0x01,0x00,0x00,0xb8,0x02,0x00,0x00, -0xc5,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0x9e,0x01,0x00,0x00, -0xf8,0x00,0x02,0x00,0xa0,0x01,0x00,0x00,0xf9,0x00,0x02,0x00, -0xcc,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0xcc,0x01,0x00,0x00, -0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0xb9,0x02,0x00,0x00, -0x3e,0x00,0x00,0x00,0xa0,0x01,0x00,0x00,0x0e,0x02,0x00,0x00, -0xcf,0x01,0x00,0x00,0xb0,0x00,0x05,0x00,0xb7,0x00,0x00,0x00, -0xd2,0x01,0x00,0x00,0xb9,0x02,0x00,0x00,0xb4,0x00,0x00,0x00, -0xf6,0x00,0x04,0x00,0xce,0x01,0x00,0x00,0xcf,0x01,0x00,0x00, -0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0xd2,0x01,0x00,0x00, -0xcd,0x01,0x00,0x00,0xce,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, -0xcd,0x01,0x00,0x00,0xf9,0x00,0x02,0x00,0xd4,0x01,0x00,0x00, -0xf8,0x00,0x02,0x00,0xd4,0x01,0x00,0x00,0xf5,0x00,0x07,0x00, -0x06,0x00,0x00,0x00,0xbd,0x02,0x00,0x00,0x3e,0x00,0x00,0x00, -0xcd,0x01,0x00,0x00,0x0c,0x02,0x00,0x00,0xd7,0x01,0x00,0x00, -0xb0,0x00,0x05,0x00,0xb7,0x00,0x00,0x00,0xda,0x01,0x00,0x00, -0xbd,0x02,0x00,0x00,0x60,0x00,0x00,0x00,0xf6,0x00,0x04,0x00, -0xd6,0x01,0x00,0x00,0xd7,0x01,0x00,0x00,0x01,0x00,0x00,0x00, -0xfa,0x00,0x04,0x00,0xda,0x01,0x00,0x00,0xd5,0x01,0x00,0x00, -0xd6,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0xd5,0x01,0x00,0x00, -0xf9,0x00,0x02,0x00,0xdc,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, -0xdc,0x01,0x00,0x00,0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00, -0xbf,0x02,0x00,0x00,0x3e,0x00,0x00,0x00,0xd5,0x01,0x00,0x00, -0x0a,0x02,0x00,0x00,0xdf,0x01,0x00,0x00,0xb0,0x00,0x05,0x00, -0xb7,0x00,0x00,0x00,0xe2,0x01,0x00,0x00,0xbf,0x02,0x00,0x00, -0xb1,0x00,0x00,0x00,0xf6,0x00,0x04,0x00,0xde,0x01,0x00,0x00, -0xdf,0x01,0x00,0x00,0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, -0xe2,0x01,0x00,0x00,0xdd,0x01,0x00,0x00,0xde,0x01,0x00,0x00, -0xf8,0x00,0x02,0x00,0xdd,0x01,0x00,0x00,0xf9,0x00,0x02,0x00, -0xe4,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0xe4,0x01,0x00,0x00, -0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0xc1,0x02,0x00,0x00, -0x3e,0x00,0x00,0x00,0xdd,0x01,0x00,0x00,0x08,0x02,0x00,0x00, -0xe5,0x01,0x00,0x00,0xb0,0x00,0x05,0x00,0xb7,0x00,0x00,0x00, -0xea,0x01,0x00,0x00,0xc1,0x02,0x00,0x00,0x62,0x00,0x00,0x00, -0xf6,0x00,0x04,0x00,0xe6,0x01,0x00,0x00,0xe5,0x01,0x00,0x00, -0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0xea,0x01,0x00,0x00, -0xe5,0x01,0x00,0x00,0xe6,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, -0xe5,0x01,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xec,0x01,0x00,0x00,0xb9,0x02,0x00,0x00,0xb1,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xee,0x01,0x00,0x00, -0xec,0x01,0x00,0x00,0xbf,0x02,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xf0,0x01,0x00,0x00,0xee,0x01,0x00,0x00, -0xef,0x01,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xf2,0x01,0x00,0x00,0xbd,0x02,0x00,0x00,0x62,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xf3,0x01,0x00,0x00, -0xf0,0x01,0x00,0x00,0xf2,0x01,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xf5,0x01,0x00,0x00,0xf3,0x01,0x00,0x00, -0xc1,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xf9,0x01,0x00,0x00,0xf2,0x01,0x00,0x00,0xc1,0x02,0x00,0x00, -0x41,0x00,0x05,0x00,0xc2,0x00,0x00,0x00,0xfa,0x01,0x00,0x00, -0x83,0x01,0x00,0x00,0xf9,0x01,0x00,0x00,0x3d,0x00,0x04,0x00, -0xb9,0x00,0x00,0x00,0xfb,0x01,0x00,0x00,0xfa,0x01,0x00,0x00, -0x41,0x00,0x05,0x00,0xc2,0x00,0x00,0x00,0x00,0x02,0x00,0x00, -0xb0,0x01,0x00,0x00,0xee,0x01,0x00,0x00,0x3d,0x00,0x04,0x00, -0xb9,0x00,0x00,0x00,0x01,0x02,0x00,0x00,0x00,0x02,0x00,0x00, -0x41,0x00,0x05,0x00,0xc2,0x00,0x00,0x00,0x03,0x02,0x00,0x00, -0xbf,0x00,0x00,0x00,0xf5,0x01,0x00,0x00,0x3d,0x00,0x04,0x00, -0xb9,0x00,0x00,0x00,0x04,0x02,0x00,0x00,0x03,0x02,0x00,0x00, -0x0c,0x00,0x08,0x00,0xb9,0x00,0x00,0x00,0x05,0x02,0x00,0x00, -0x01,0x00,0x00,0x00,0x32,0x00,0x00,0x00,0xfb,0x01,0x00,0x00, -0x01,0x02,0x00,0x00,0x04,0x02,0x00,0x00,0x3e,0x00,0x03,0x00, -0x03,0x02,0x00,0x00,0x05,0x02,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x08,0x02,0x00,0x00,0xc1,0x02,0x00,0x00, -0xc5,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0xe4,0x01,0x00,0x00, -0xf8,0x00,0x02,0x00,0xe6,0x01,0x00,0x00,0xf9,0x00,0x02,0x00, -0xdf,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0xdf,0x01,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x0a,0x02,0x00,0x00, -0xbf,0x02,0x00,0x00,0xc5,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, -0xdc,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0xde,0x01,0x00,0x00, -0xf9,0x00,0x02,0x00,0xd7,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, -0xd7,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x0c,0x02,0x00,0x00,0xbd,0x02,0x00,0x00,0xc5,0x00,0x00,0x00, -0xf9,0x00,0x02,0x00,0xd4,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, -0xd6,0x01,0x00,0x00,0xf9,0x00,0x02,0x00,0xcf,0x01,0x00,0x00, -0xf8,0x00,0x02,0x00,0xcf,0x01,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x0e,0x02,0x00,0x00,0xb9,0x02,0x00,0x00, -0xc5,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0xcc,0x01,0x00,0x00, -0xf8,0x00,0x02,0x00,0xce,0x01,0x00,0x00,0xf9,0x00,0x02,0x00, -0x6c,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0x6c,0x01,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x10,0x02,0x00,0x00, -0xb3,0x02,0x00,0x00,0xc5,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, -0x69,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0x6b,0x01,0x00,0x00, -0xe0,0x00,0x04,0x00,0x0c,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0x61,0x01,0x00,0x00,0xf9,0x00,0x02,0x00,0xcc,0x00,0x00,0x00, -0xf8,0x00,0x02,0x00,0xcc,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x12,0x02,0x00,0x00,0x99,0x02,0x00,0x00, -0x6c,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0xc9,0x00,0x00,0x00, -0xf8,0x00,0x02,0x00,0xcb,0x00,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x17,0x02,0x00,0x00,0x55,0x00,0x00,0x00, -0x53,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x18,0x02,0x00,0x00,0x8b,0x00,0x00,0x00,0x17,0x02,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x1d,0x02,0x00,0x00, -0x59,0x00,0x00,0x00,0xae,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x1e,0x02,0x00,0x00,0x9d,0x00,0x00,0x00, -0x1d,0x02,0x00,0x00,0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00, -0x23,0x02,0x00,0x00,0x12,0x00,0x00,0x00,0x22,0x02,0x00,0x00, -0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x24,0x02,0x00,0x00, -0x23,0x02,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x25,0x02,0x00,0x00,0x0f,0x00,0x00,0x00,0x24,0x02,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x29,0x02,0x00,0x00, -0x47,0x00,0x00,0x00,0x24,0x02,0x00,0x00,0x41,0x00,0x05,0x00, -0x0d,0x00,0x00,0x00,0x2b,0x02,0x00,0x00,0x2a,0x02,0x00,0x00, -0x0c,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x2c,0x02,0x00,0x00,0x2b,0x02,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x2d,0x02,0x00,0x00,0x29,0x02,0x00,0x00, -0x2c,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x2e,0x02,0x00,0x00,0x25,0x02,0x00,0x00,0x2d,0x02,0x00,0x00, -0xf9,0x00,0x02,0x00,0x30,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, -0x30,0x02,0x00,0x00,0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00, -0x9a,0x02,0x00,0x00,0x3e,0x00,0x00,0x00,0xcb,0x00,0x00,0x00, -0x97,0x02,0x00,0x00,0x33,0x02,0x00,0x00,0xb0,0x00,0x05,0x00, -0xb7,0x00,0x00,0x00,0x36,0x02,0x00,0x00,0x9a,0x02,0x00,0x00, -0xb4,0x00,0x00,0x00,0xf6,0x00,0x04,0x00,0x32,0x02,0x00,0x00, -0x33,0x02,0x00,0x00,0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, -0x36,0x02,0x00,0x00,0x31,0x02,0x00,0x00,0x32,0x02,0x00,0x00, -0xf8,0x00,0x02,0x00,0x31,0x02,0x00,0x00,0xf9,0x00,0x02,0x00, -0x38,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x38,0x02,0x00,0x00, -0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0x9b,0x02,0x00,0x00, -0x3e,0x00,0x00,0x00,0x31,0x02,0x00,0x00,0x95,0x02,0x00,0x00, -0x3b,0x02,0x00,0x00,0xb0,0x00,0x05,0x00,0xb7,0x00,0x00,0x00, -0x3e,0x02,0x00,0x00,0x9b,0x02,0x00,0x00,0x60,0x00,0x00,0x00, -0xf6,0x00,0x04,0x00,0x3a,0x02,0x00,0x00,0x3b,0x02,0x00,0x00, -0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0x3e,0x02,0x00,0x00, -0x39,0x02,0x00,0x00,0x3a,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, -0x39,0x02,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x42,0x02,0x00,0x00,0x9b,0x02,0x00,0x00,0x61,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x43,0x02,0x00,0x00, -0x18,0x02,0x00,0x00,0x42,0x02,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x45,0x02,0x00,0x00,0x64,0x00,0x00,0x00, -0x62,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x46,0x02,0x00,0x00,0x43,0x02,0x00,0x00,0x45,0x02,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x4a,0x02,0x00,0x00, -0x9a,0x02,0x00,0x00,0xb8,0x01,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x4b,0x02,0x00,0x00,0x1e,0x02,0x00,0x00, -0x4a,0x02,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x4d,0x02,0x00,0x00,0x68,0x00,0x00,0x00,0xb1,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x4e,0x02,0x00,0x00, -0x4b,0x02,0x00,0x00,0x4d,0x02,0x00,0x00,0xf9,0x00,0x02,0x00, -0x50,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x50,0x02,0x00,0x00, -0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0x9d,0x02,0x00,0x00, -0x3e,0x00,0x00,0x00,0x39,0x02,0x00,0x00,0x93,0x02,0x00,0x00, -0x53,0x02,0x00,0x00,0xb0,0x00,0x05,0x00,0xb7,0x00,0x00,0x00, -0x56,0x02,0x00,0x00,0x9d,0x02,0x00,0x00,0xb1,0x00,0x00,0x00, -0xf6,0x00,0x04,0x00,0x52,0x02,0x00,0x00,0x53,0x02,0x00,0x00, -0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0x56,0x02,0x00,0x00, -0x51,0x02,0x00,0x00,0x52,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, -0x51,0x02,0x00,0x00,0xf9,0x00,0x02,0x00,0x58,0x02,0x00,0x00, -0xf8,0x00,0x02,0x00,0x58,0x02,0x00,0x00,0xf5,0x00,0x07,0x00, -0x06,0x00,0x00,0x00,0x9f,0x02,0x00,0x00,0x3e,0x00,0x00,0x00, -0x51,0x02,0x00,0x00,0x91,0x02,0x00,0x00,0x5b,0x02,0x00,0x00, -0xb0,0x00,0x05,0x00,0xb7,0x00,0x00,0x00,0x5e,0x02,0x00,0x00, -0x9f,0x02,0x00,0x00,0x62,0x00,0x00,0x00,0xf6,0x00,0x04,0x00, -0x5a,0x02,0x00,0x00,0x5b,0x02,0x00,0x00,0x01,0x00,0x00,0x00, -0xfa,0x00,0x04,0x00,0x5e,0x02,0x00,0x00,0x59,0x02,0x00,0x00, -0x5a,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x59,0x02,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x61,0x02,0x00,0x00, -0x46,0x02,0x00,0x00,0x9f,0x02,0x00,0x00,0xb0,0x00,0x05,0x00, -0xb7,0x00,0x00,0x00,0x64,0x02,0x00,0x00,0x61,0x02,0x00,0x00, -0x36,0x00,0x00,0x00,0xf7,0x00,0x03,0x00,0x66,0x02,0x00,0x00, -0x00,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0x64,0x02,0x00,0x00, -0x65,0x02,0x00,0x00,0x66,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, -0x65,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x69,0x02,0x00,0x00,0x4e,0x02,0x00,0x00,0x9d,0x02,0x00,0x00, -0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00,0x6a,0x02,0x00,0x00, -0x12,0x00,0x00,0x00,0xc5,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x6b,0x02,0x00,0x00,0x6a,0x02,0x00,0x00, -0xb0,0x00,0x05,0x00,0xb7,0x00,0x00,0x00,0x6c,0x02,0x00,0x00, -0x69,0x02,0x00,0x00,0x6b,0x02,0x00,0x00,0xf9,0x00,0x02,0x00, -0x66,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x66,0x02,0x00,0x00, -0xf5,0x00,0x07,0x00,0xb7,0x00,0x00,0x00,0x6d,0x02,0x00,0x00, -0x64,0x02,0x00,0x00,0x59,0x02,0x00,0x00,0x6c,0x02,0x00,0x00, -0x65,0x02,0x00,0x00,0xf7,0x00,0x03,0x00,0x6f,0x02,0x00,0x00, -0x00,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0x6d,0x02,0x00,0x00, -0x6e,0x02,0x00,0x00,0x6f,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, -0x6e,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x77,0x02,0x00,0x00,0x4e,0x02,0x00,0x00,0x9d,0x02,0x00,0x00, -0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00,0x79,0x02,0x00,0x00, -0x12,0x00,0x00,0x00,0x78,0x02,0x00,0x00,0x3d,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x7a,0x02,0x00,0x00,0x79,0x02,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x7b,0x02,0x00,0x00, -0x77,0x02,0x00,0x00,0x7a,0x02,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x7c,0x02,0x00,0x00,0x2e,0x02,0x00,0x00, -0x7b,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x7e,0x02,0x00,0x00,0x7c,0x02,0x00,0x00,0x46,0x02,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x80,0x02,0x00,0x00, -0x7e,0x02,0x00,0x00,0x9f,0x02,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x82,0x02,0x00,0x00,0x9a,0x02,0x00,0x00, -0xb1,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x84,0x02,0x00,0x00,0x82,0x02,0x00,0x00,0x9d,0x02,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x86,0x02,0x00,0x00, -0x84,0x02,0x00,0x00,0x85,0x02,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x88,0x02,0x00,0x00,0x9b,0x02,0x00,0x00, -0x62,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x89,0x02,0x00,0x00,0x86,0x02,0x00,0x00,0x88,0x02,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x8b,0x02,0x00,0x00, -0x89,0x02,0x00,0x00,0x9f,0x02,0x00,0x00,0x41,0x00,0x05,0x00, -0xc2,0x00,0x00,0x00,0x8c,0x02,0x00,0x00,0xbf,0x00,0x00,0x00, -0x8b,0x02,0x00,0x00,0x3d,0x00,0x04,0x00,0xb9,0x00,0x00,0x00, -0x8d,0x02,0x00,0x00,0x8c,0x02,0x00,0x00,0x41,0x00,0x06,0x00, -0x8e,0x02,0x00,0x00,0x8f,0x02,0x00,0x00,0x73,0x02,0x00,0x00, -0x34,0x00,0x00,0x00,0x80,0x02,0x00,0x00,0x3e,0x00,0x03,0x00, -0x8f,0x02,0x00,0x00,0x8d,0x02,0x00,0x00,0xf9,0x00,0x02,0x00, -0x6f,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x6f,0x02,0x00,0x00, -0xf9,0x00,0x02,0x00,0x5b,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, -0x5b,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x91,0x02,0x00,0x00,0x9f,0x02,0x00,0x00,0xc5,0x00,0x00,0x00, -0xf9,0x00,0x02,0x00,0x58,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, -0x5a,0x02,0x00,0x00,0xf9,0x00,0x02,0x00,0x53,0x02,0x00,0x00, -0xf8,0x00,0x02,0x00,0x53,0x02,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x93,0x02,0x00,0x00,0x9d,0x02,0x00,0x00, -0xc5,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0x50,0x02,0x00,0x00, -0xf8,0x00,0x02,0x00,0x52,0x02,0x00,0x00,0xf9,0x00,0x02,0x00, -0x3b,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x3b,0x02,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x95,0x02,0x00,0x00, -0x9b,0x02,0x00,0x00,0xc5,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, -0x38,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x3a,0x02,0x00,0x00, -0xf9,0x00,0x02,0x00,0x33,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, -0x33,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x97,0x02,0x00,0x00,0x9a,0x02,0x00,0x00,0xc5,0x00,0x00,0x00, -0xf9,0x00,0x02,0x00,0x30,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, -0x32,0x02,0x00,0x00,0xfd,0x00,0x01,0x00,0x38,0x00,0x01,0x00, - -}; -const uint64_t matmul_f16_l_fp32_len = 10116; - -unsigned char matmul_f16_m_data[] = { -0x03,0x02,0x23,0x07,0x00,0x05,0x01,0x00,0x0b,0x00,0x0d,0x00, -0xcc,0x02,0x00,0x00,0x00,0x00,0x00,0x00,0x11,0x00,0x02,0x00, -0x01,0x00,0x00,0x00,0x11,0x00,0x02,0x00,0x09,0x00,0x00,0x00, -0x11,0x00,0x02,0x00,0x51,0x11,0x00,0x00,0x0b,0x00,0x06,0x00, -0x01,0x00,0x00,0x00,0x47,0x4c,0x53,0x4c,0x2e,0x73,0x74,0x64, -0x2e,0x34,0x35,0x30,0x00,0x00,0x00,0x00,0x0e,0x00,0x03,0x00, -0x00,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x0f,0x00,0x0f,0x00, -0x05,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x6d,0x61,0x69,0x6e, -0x00,0x00,0x00,0x00,0x0b,0x00,0x00,0x00,0x12,0x00,0x00,0x00, -0x3d,0x00,0x00,0x00,0x4c,0x00,0x00,0x00,0xf1,0x00,0x00,0x00, -0xfc,0x00,0x00,0x00,0x3c,0x01,0x00,0x00,0x47,0x01,0x00,0x00, -0x2c,0x02,0x00,0x00,0x75,0x02,0x00,0x00,0x10,0x00,0x06,0x00, -0x04,0x00,0x00,0x00,0x11,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x0b,0x00,0x00,0x00,0x0b,0x00,0x00,0x00,0x1c,0x00,0x00,0x00, -0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0x10,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x04,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00, -0x02,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x08,0x00,0x00,0x00, -0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00,0x03,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0x10,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x10,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00, -0x05,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x14,0x00,0x00,0x00, -0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x18,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0x10,0x00,0x00,0x00,0x07,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x1c,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00, -0x08,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x20,0x00,0x00,0x00, -0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00,0x09,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x24,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0x10,0x00,0x00,0x00,0x0a,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x28,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00, -0x0b,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x2c,0x00,0x00,0x00, -0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x30,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0x10,0x00,0x00,0x00,0x0d,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x34,0x00,0x00,0x00,0x47,0x00,0x03,0x00,0x10,0x00,0x00,0x00, -0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x37,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x3d,0x00,0x00,0x00,0x0b,0x00,0x00,0x00,0x1a,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x4c,0x00,0x00,0x00,0x0b,0x00,0x00,0x00, -0x1b,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x4f,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x09,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x53,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x04,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x60,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x62,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x07,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x6c,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x03,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x9c,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0xae,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x05,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0xb1,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x08,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0xf9,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x02,0x00,0x00,0x00,0x48,0x00,0x04,0x00,0xfa,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x18,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0xfa,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x47,0x00,0x03,0x00,0xfa,0x00,0x00,0x00, -0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0xfc,0x00,0x00,0x00, -0x22,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0xfc,0x00,0x00,0x00,0x21,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x16,0x01,0x00,0x00,0x01,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x17,0x01,0x00,0x00, -0x0b,0x00,0x00,0x00,0x19,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x44,0x01,0x00,0x00,0x06,0x00,0x00,0x00,0x02,0x00,0x00,0x00, -0x48,0x00,0x04,0x00,0x45,0x01,0x00,0x00,0x00,0x00,0x00,0x00, -0x18,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x45,0x01,0x00,0x00, -0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x47,0x00,0x03,0x00,0x45,0x01,0x00,0x00,0x02,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x47,0x01,0x00,0x00,0x22,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x47,0x01,0x00,0x00, -0x21,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x2c,0x02,0x00,0x00,0x0b,0x00,0x00,0x00,0x18,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x72,0x02,0x00,0x00,0x06,0x00,0x00,0x00, -0x04,0x00,0x00,0x00,0x48,0x00,0x04,0x00,0x73,0x02,0x00,0x00, -0x00,0x00,0x00,0x00,0x19,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0x73,0x02,0x00,0x00,0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x47,0x00,0x03,0x00,0x73,0x02,0x00,0x00, -0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x75,0x02,0x00,0x00, -0x22,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x75,0x02,0x00,0x00,0x21,0x00,0x00,0x00,0x02,0x00,0x00,0x00, -0x13,0x00,0x02,0x00,0x02,0x00,0x00,0x00,0x21,0x00,0x03,0x00, -0x03,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x15,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x20,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x17,0x00,0x04,0x00,0x09,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x03,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x0a,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x09,0x00,0x00,0x00,0x3b,0x00,0x04,0x00, -0x0a,0x00,0x00,0x00,0x0b,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0x02,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x0d,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x1e,0x00,0x10,0x00, -0x10,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x20,0x00,0x04,0x00,0x11,0x00,0x00,0x00,0x09,0x00,0x00,0x00, -0x10,0x00,0x00,0x00,0x3b,0x00,0x04,0x00,0x11,0x00,0x00,0x00, -0x12,0x00,0x00,0x00,0x09,0x00,0x00,0x00,0x15,0x00,0x04,0x00, -0x13,0x00,0x00,0x00,0x20,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00,0x14,0x00,0x00,0x00, -0x08,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x15,0x00,0x00,0x00, -0x09,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x13,0x00,0x00,0x00,0x21,0x00,0x00,0x00,0x0a,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00,0x27,0x00,0x00,0x00, -0x09,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00, -0x2d,0x00,0x00,0x00,0x07,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x13,0x00,0x00,0x00,0x34,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x32,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x37,0x00,0x00,0x00, -0x40,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x39,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x3b,0x00,0x04,0x00, -0x0a,0x00,0x00,0x00,0x3d,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x3e,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x3b,0x00,0x04,0x00,0x0a,0x00,0x00,0x00, -0x4c,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x32,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x4f,0x00,0x00,0x00,0x20,0x00,0x00,0x00, -0x32,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x53,0x00,0x00,0x00, -0x20,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, -0x54,0x00,0x00,0x00,0x86,0x00,0x00,0x00,0x37,0x00,0x00,0x00, -0x53,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, -0x58,0x00,0x00,0x00,0x86,0x00,0x00,0x00,0x37,0x00,0x00,0x00, -0x53,0x00,0x00,0x00,0x32,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x60,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0x61,0x00,0x00,0x00,0x86,0x00,0x00,0x00, -0x53,0x00,0x00,0x00,0x60,0x00,0x00,0x00,0x32,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x62,0x00,0x00,0x00,0x04,0x00,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x63,0x00,0x00,0x00, -0x86,0x00,0x00,0x00,0x61,0x00,0x00,0x00,0x62,0x00,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x67,0x00,0x00,0x00, -0x86,0x00,0x00,0x00,0x61,0x00,0x00,0x00,0x62,0x00,0x00,0x00, -0x32,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x6c,0x00,0x00,0x00, -0x10,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, -0x6d,0x00,0x00,0x00,0x86,0x00,0x00,0x00,0x6c,0x00,0x00,0x00, -0x39,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, -0x72,0x00,0x00,0x00,0x86,0x00,0x00,0x00,0x6c,0x00,0x00,0x00, -0x39,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00, -0x76,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x13,0x00,0x00,0x00,0x7b,0x00,0x00,0x00,0x02,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00,0x86,0x00,0x00,0x00, -0x0b,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00, -0x8c,0x00,0x00,0x00,0x03,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x13,0x00,0x00,0x00,0x97,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0x32,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x9c,0x00,0x00,0x00, -0x40,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00, -0x9e,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0xad,0x00,0x00,0x00,0x84,0x00,0x00,0x00, -0x60,0x00,0x00,0x00,0x62,0x00,0x00,0x00,0x32,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0xae,0x00,0x00,0x00,0x20,0x00,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0xaf,0x00,0x00,0x00, -0x84,0x00,0x00,0x00,0x53,0x00,0x00,0x00,0xae,0x00,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0xb0,0x00,0x00,0x00, -0x84,0x00,0x00,0x00,0x4f,0x00,0x00,0x00,0x62,0x00,0x00,0x00, -0x32,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0xb1,0x00,0x00,0x00, -0x02,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, -0xb2,0x00,0x00,0x00,0x84,0x00,0x00,0x00,0xb0,0x00,0x00,0x00, -0xb1,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, -0xb3,0x00,0x00,0x00,0x84,0x00,0x00,0x00,0xb2,0x00,0x00,0x00, -0x60,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, -0xb4,0x00,0x00,0x00,0x86,0x00,0x00,0x00,0xaf,0x00,0x00,0x00, -0xb3,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, -0xb5,0x00,0x00,0x00,0x84,0x00,0x00,0x00,0xad,0x00,0x00,0x00, -0xb4,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, -0xb6,0x00,0x00,0x00,0x84,0x00,0x00,0x00,0xb5,0x00,0x00,0x00, -0xb1,0x00,0x00,0x00,0x14,0x00,0x02,0x00,0xb7,0x00,0x00,0x00, -0x16,0x00,0x03,0x00,0xb9,0x00,0x00,0x00,0x20,0x00,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0xba,0x00,0x00,0x00, -0x84,0x00,0x00,0x00,0x60,0x00,0x00,0x00,0x62,0x00,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0xbb,0x00,0x00,0x00, -0x84,0x00,0x00,0x00,0xba,0x00,0x00,0x00,0xb4,0x00,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0xbc,0x00,0x00,0x00, -0x84,0x00,0x00,0x00,0xbb,0x00,0x00,0x00,0xb1,0x00,0x00,0x00, -0x1c,0x00,0x04,0x00,0xbd,0x00,0x00,0x00,0xb9,0x00,0x00,0x00, -0xbc,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0xbe,0x00,0x00,0x00, -0x07,0x00,0x00,0x00,0xbd,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0xb9,0x00,0x00,0x00,0xc1,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x20,0x00,0x04,0x00,0xc2,0x00,0x00,0x00,0x07,0x00,0x00,0x00, -0xb9,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00, -0xc5,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x16,0x00,0x03,0x00, -0xec,0x00,0x00,0x00,0x10,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0xed,0x00,0x00,0x00,0x80,0x00,0x00,0x00, -0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0xee,0x00,0x00,0x00,0x84,0x00,0x00,0x00, -0x37,0x00,0x00,0x00,0xed,0x00,0x00,0x00,0x1c,0x00,0x04,0x00, -0xef,0x00,0x00,0x00,0xec,0x00,0x00,0x00,0xee,0x00,0x00,0x00, -0x20,0x00,0x04,0x00,0xf0,0x00,0x00,0x00,0x04,0x00,0x00,0x00, -0xef,0x00,0x00,0x00,0x3b,0x00,0x04,0x00,0xf0,0x00,0x00,0x00, -0xf1,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0xf5,0x00,0x00,0x00,0x80,0x00,0x00,0x00, -0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x1d,0x00,0x03,0x00, -0xf9,0x00,0x00,0x00,0xec,0x00,0x00,0x00,0x1e,0x00,0x03,0x00, -0xfa,0x00,0x00,0x00,0xf9,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0xfb,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0xfa,0x00,0x00,0x00, -0x3b,0x00,0x04,0x00,0xfb,0x00,0x00,0x00,0xfc,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x07,0x01,0x00,0x00, -0x0c,0x00,0x00,0x00,0xec,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0x0a,0x01,0x00,0x00,0x04,0x00,0x00,0x00,0xec,0x00,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x10,0x01,0x00,0x00, -0x80,0x00,0x00,0x00,0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0xec,0x00,0x00,0x00,0x14,0x01,0x00,0x00, -0x00,0x00,0x00,0x00,0x32,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x16,0x01,0x00,0x00,0x01,0x00,0x00,0x00,0x33,0x00,0x06,0x00, -0x09,0x00,0x00,0x00,0x17,0x01,0x00,0x00,0x16,0x01,0x00,0x00, -0x39,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0x18,0x01,0x00,0x00,0x51,0x00,0x00,0x00, -0x17,0x01,0x00,0x00,0x00,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0x19,0x01,0x00,0x00,0x84,0x00,0x00,0x00, -0x18,0x01,0x00,0x00,0x39,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0x1a,0x01,0x00,0x00,0x86,0x00,0x00,0x00, -0x19,0x01,0x00,0x00,0x6c,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0x38,0x01,0x00,0x00,0x80,0x00,0x00,0x00, -0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0x39,0x01,0x00,0x00,0x84,0x00,0x00,0x00, -0x9c,0x00,0x00,0x00,0x38,0x01,0x00,0x00,0x1c,0x00,0x04,0x00, -0x3a,0x01,0x00,0x00,0xec,0x00,0x00,0x00,0x39,0x01,0x00,0x00, -0x20,0x00,0x04,0x00,0x3b,0x01,0x00,0x00,0x04,0x00,0x00,0x00, -0x3a,0x01,0x00,0x00,0x3b,0x00,0x04,0x00,0x3b,0x01,0x00,0x00, -0x3c,0x01,0x00,0x00,0x04,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0x40,0x01,0x00,0x00,0x80,0x00,0x00,0x00, -0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x1d,0x00,0x03,0x00, -0x44,0x01,0x00,0x00,0xec,0x00,0x00,0x00,0x1e,0x00,0x03,0x00, -0x45,0x01,0x00,0x00,0x44,0x01,0x00,0x00,0x20,0x00,0x04,0x00, -0x46,0x01,0x00,0x00,0x0c,0x00,0x00,0x00,0x45,0x01,0x00,0x00, -0x3b,0x00,0x04,0x00,0x46,0x01,0x00,0x00,0x47,0x01,0x00,0x00, -0x0c,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, -0x59,0x01,0x00,0x00,0x80,0x00,0x00,0x00,0x6c,0x00,0x00,0x00, -0x39,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x60,0x01,0x00,0x00,0x08,0x01,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0x61,0x01,0x00,0x00,0x86,0x00,0x00,0x00, -0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0x64,0x01,0x00,0x00,0x86,0x00,0x00,0x00, -0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0x7f,0x01,0x00,0x00,0x84,0x00,0x00,0x00, -0x60,0x00,0x00,0x00,0x62,0x00,0x00,0x00,0x1c,0x00,0x04,0x00, -0x80,0x01,0x00,0x00,0xec,0x00,0x00,0x00,0x7f,0x01,0x00,0x00, -0x20,0x00,0x04,0x00,0x81,0x01,0x00,0x00,0x07,0x00,0x00,0x00, -0x80,0x01,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, -0x91,0x01,0x00,0x00,0x80,0x00,0x00,0x00,0x6c,0x00,0x00,0x00, -0x39,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x97,0x01,0x00,0x00, -0x07,0x00,0x00,0x00,0xec,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0xad,0x01,0x00,0x00,0x84,0x00,0x00,0x00, -0xb4,0x00,0x00,0x00,0xb1,0x00,0x00,0x00,0x1c,0x00,0x04,0x00, -0xae,0x01,0x00,0x00,0xec,0x00,0x00,0x00,0xad,0x01,0x00,0x00, -0x20,0x00,0x04,0x00,0xaf,0x01,0x00,0x00,0x07,0x00,0x00,0x00, -0xae,0x01,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, -0xb8,0x01,0x00,0x00,0x86,0x00,0x00,0x00,0xae,0x00,0x00,0x00, -0xb4,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, -0xc0,0x01,0x00,0x00,0x80,0x00,0x00,0x00,0x6c,0x00,0x00,0x00, -0x39,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, -0xef,0x01,0x00,0x00,0x84,0x00,0x00,0x00,0x60,0x00,0x00,0x00, -0x62,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00, -0x24,0x02,0x00,0x00,0x0d,0x00,0x00,0x00,0x3b,0x00,0x04,0x00, -0x0a,0x00,0x00,0x00,0x2c,0x02,0x00,0x00,0x01,0x00,0x00,0x00, -0x1d,0x00,0x03,0x00,0x72,0x02,0x00,0x00,0xb9,0x00,0x00,0x00, -0x1e,0x00,0x03,0x00,0x73,0x02,0x00,0x00,0x72,0x02,0x00,0x00, -0x20,0x00,0x04,0x00,0x74,0x02,0x00,0x00,0x0c,0x00,0x00,0x00, -0x73,0x02,0x00,0x00,0x3b,0x00,0x04,0x00,0x74,0x02,0x00,0x00, -0x75,0x02,0x00,0x00,0x0c,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x13,0x00,0x00,0x00,0x7a,0x02,0x00,0x00,0x05,0x00,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x87,0x02,0x00,0x00, -0x84,0x00,0x00,0x00,0x60,0x00,0x00,0x00,0x62,0x00,0x00,0x00, -0x20,0x00,0x04,0x00,0x90,0x02,0x00,0x00,0x0c,0x00,0x00,0x00, -0xb9,0x00,0x00,0x00,0x36,0x00,0x05,0x00,0x02,0x00,0x00,0x00, -0x04,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x03,0x00,0x00,0x00, -0xf8,0x00,0x02,0x00,0x05,0x00,0x00,0x00,0x3b,0x00,0x04,0x00, -0xbe,0x00,0x00,0x00,0xbf,0x00,0x00,0x00,0x07,0x00,0x00,0x00, -0x3b,0x00,0x04,0x00,0x81,0x01,0x00,0x00,0x82,0x01,0x00,0x00, -0x07,0x00,0x00,0x00,0x3b,0x00,0x04,0x00,0xaf,0x01,0x00,0x00, -0xb0,0x01,0x00,0x00,0x07,0x00,0x00,0x00,0x41,0x00,0x05,0x00, -0x0d,0x00,0x00,0x00,0x0e,0x00,0x00,0x00,0x0b,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x0f,0x00,0x00,0x00,0x0e,0x00,0x00,0x00,0x41,0x00,0x05,0x00, -0x15,0x00,0x00,0x00,0x16,0x00,0x00,0x00,0x12,0x00,0x00,0x00, -0x14,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x17,0x00,0x00,0x00,0x16,0x00,0x00,0x00,0x86,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x18,0x00,0x00,0x00,0x0f,0x00,0x00,0x00, -0x17,0x00,0x00,0x00,0x89,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x1e,0x00,0x00,0x00,0x0f,0x00,0x00,0x00,0x17,0x00,0x00,0x00, -0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00,0x22,0x00,0x00,0x00, -0x12,0x00,0x00,0x00,0x21,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x22,0x00,0x00,0x00, -0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x24,0x00,0x00,0x00, -0x18,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x41,0x00,0x05,0x00, -0x15,0x00,0x00,0x00,0x28,0x00,0x00,0x00,0x12,0x00,0x00,0x00, -0x27,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x29,0x00,0x00,0x00,0x28,0x00,0x00,0x00,0x86,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x2a,0x00,0x00,0x00,0x1e,0x00,0x00,0x00, -0x29,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00, -0x2e,0x00,0x00,0x00,0x12,0x00,0x00,0x00,0x2d,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x2f,0x00,0x00,0x00, -0x2e,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x30,0x00,0x00,0x00,0x24,0x00,0x00,0x00,0x2f,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x32,0x00,0x00,0x00, -0x30,0x00,0x00,0x00,0x2a,0x00,0x00,0x00,0x41,0x00,0x05,0x00, -0x15,0x00,0x00,0x00,0x35,0x00,0x00,0x00,0x12,0x00,0x00,0x00, -0x34,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x36,0x00,0x00,0x00,0x35,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x38,0x00,0x00,0x00,0x36,0x00,0x00,0x00, -0x37,0x00,0x00,0x00,0x82,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x3a,0x00,0x00,0x00,0x38,0x00,0x00,0x00,0x39,0x00,0x00,0x00, -0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x3b,0x00,0x00,0x00, -0x3a,0x00,0x00,0x00,0x37,0x00,0x00,0x00,0x41,0x00,0x05,0x00, -0x0d,0x00,0x00,0x00,0x3f,0x00,0x00,0x00,0x3d,0x00,0x00,0x00, -0x3e,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x40,0x00,0x00,0x00,0x3f,0x00,0x00,0x00,0x89,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x42,0x00,0x00,0x00,0x40,0x00,0x00,0x00, -0x3b,0x00,0x00,0x00,0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x47,0x00,0x00,0x00,0x40,0x00,0x00,0x00,0x3b,0x00,0x00,0x00, -0x41,0x00,0x05,0x00,0x0d,0x00,0x00,0x00,0x49,0x00,0x00,0x00, -0x3d,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x4a,0x00,0x00,0x00,0x49,0x00,0x00,0x00, -0x41,0x00,0x05,0x00,0x0d,0x00,0x00,0x00,0x4d,0x00,0x00,0x00, -0x4c,0x00,0x00,0x00,0x3e,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x4e,0x00,0x00,0x00,0x4d,0x00,0x00,0x00, -0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x50,0x00,0x00,0x00, -0x4e,0x00,0x00,0x00,0x4f,0x00,0x00,0x00,0x89,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x55,0x00,0x00,0x00,0x50,0x00,0x00,0x00, -0x54,0x00,0x00,0x00,0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x59,0x00,0x00,0x00,0x50,0x00,0x00,0x00,0x58,0x00,0x00,0x00, -0x89,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x5d,0x00,0x00,0x00, -0x4e,0x00,0x00,0x00,0x4f,0x00,0x00,0x00,0x89,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x64,0x00,0x00,0x00,0x5d,0x00,0x00,0x00, -0x63,0x00,0x00,0x00,0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x68,0x00,0x00,0x00,0x5d,0x00,0x00,0x00,0x67,0x00,0x00,0x00, -0x89,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x6e,0x00,0x00,0x00, -0x4e,0x00,0x00,0x00,0x6d,0x00,0x00,0x00,0x86,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x73,0x00,0x00,0x00,0x4e,0x00,0x00,0x00, -0x72,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00, -0x77,0x00,0x00,0x00,0x12,0x00,0x00,0x00,0x76,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x78,0x00,0x00,0x00, -0x77,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x79,0x00,0x00,0x00,0x47,0x00,0x00,0x00,0x78,0x00,0x00,0x00, -0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00,0x7c,0x00,0x00,0x00, -0x12,0x00,0x00,0x00,0x7b,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x7d,0x00,0x00,0x00,0x7c,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x7f,0x00,0x00,0x00, -0x47,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x82,0x00,0x00,0x00,0x7f,0x00,0x00,0x00, -0x78,0x00,0x00,0x00,0x0c,0x00,0x07,0x00,0x06,0x00,0x00,0x00, -0x83,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x26,0x00,0x00,0x00, -0x7d,0x00,0x00,0x00,0x82,0x00,0x00,0x00,0x41,0x00,0x05,0x00, -0x15,0x00,0x00,0x00,0x87,0x00,0x00,0x00,0x12,0x00,0x00,0x00, -0x86,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x88,0x00,0x00,0x00,0x87,0x00,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x89,0x00,0x00,0x00,0x32,0x00,0x00,0x00, -0x88,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x8b,0x00,0x00,0x00,0x42,0x00,0x00,0x00,0x37,0x00,0x00,0x00, -0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00,0x8d,0x00,0x00,0x00, -0x12,0x00,0x00,0x00,0x8c,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x8e,0x00,0x00,0x00,0x8d,0x00,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x8f,0x00,0x00,0x00, -0x8b,0x00,0x00,0x00,0x8e,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x90,0x00,0x00,0x00,0x89,0x00,0x00,0x00, -0x8f,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x92,0x00,0x00,0x00,0x90,0x00,0x00,0x00,0x79,0x00,0x00,0x00, -0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x93,0x00,0x00,0x00, -0x92,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x41,0x00,0x05,0x00, -0x15,0x00,0x00,0x00,0x98,0x00,0x00,0x00,0x12,0x00,0x00,0x00, -0x97,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x99,0x00,0x00,0x00,0x98,0x00,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x9a,0x00,0x00,0x00,0x0f,0x00,0x00,0x00, -0x99,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x9d,0x00,0x00,0x00,0x4a,0x00,0x00,0x00,0x9c,0x00,0x00,0x00, -0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00,0x9f,0x00,0x00,0x00, -0x12,0x00,0x00,0x00,0x9e,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0xa0,0x00,0x00,0x00,0x9f,0x00,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xa1,0x00,0x00,0x00, -0x9d,0x00,0x00,0x00,0xa0,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xa2,0x00,0x00,0x00,0x9a,0x00,0x00,0x00, -0xa1,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xa4,0x00,0x00,0x00,0xa2,0x00,0x00,0x00,0x79,0x00,0x00,0x00, -0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xa5,0x00,0x00,0x00, -0xa4,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, -0xa7,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0xa7,0x00,0x00,0x00, -0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0x9a,0x02,0x00,0x00, -0x3e,0x00,0x00,0x00,0x05,0x00,0x00,0x00,0xc6,0x00,0x00,0x00, -0xa8,0x00,0x00,0x00,0xb0,0x00,0x05,0x00,0xb7,0x00,0x00,0x00, -0xb8,0x00,0x00,0x00,0x9a,0x02,0x00,0x00,0xb6,0x00,0x00,0x00, -0xf6,0x00,0x04,0x00,0xa9,0x00,0x00,0x00,0xa8,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0xb8,0x00,0x00,0x00, -0xa8,0x00,0x00,0x00,0xa9,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, -0xa8,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0xc2,0x00,0x00,0x00, -0xc3,0x00,0x00,0x00,0xbf,0x00,0x00,0x00,0x9a,0x02,0x00,0x00, -0x3e,0x00,0x03,0x00,0xc3,0x00,0x00,0x00,0xc1,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xc6,0x00,0x00,0x00, -0x9a,0x02,0x00,0x00,0xc5,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, -0xa7,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0xa9,0x00,0x00,0x00, -0xf9,0x00,0x02,0x00,0xc9,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, -0xc9,0x00,0x00,0x00,0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00, -0xb3,0x02,0x00,0x00,0xa5,0x00,0x00,0x00,0xa9,0x00,0x00,0x00, -0x66,0x01,0x00,0x00,0xcc,0x00,0x00,0x00,0xf5,0x00,0x07,0x00, -0x06,0x00,0x00,0x00,0xaf,0x02,0x00,0x00,0x93,0x00,0x00,0x00, -0xa9,0x00,0x00,0x00,0x63,0x01,0x00,0x00,0xcc,0x00,0x00,0x00, -0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0x9b,0x02,0x00,0x00, -0x79,0x00,0x00,0x00,0xa9,0x00,0x00,0x00,0x14,0x02,0x00,0x00, -0xcc,0x00,0x00,0x00,0xb0,0x00,0x05,0x00,0xb7,0x00,0x00,0x00, -0xd0,0x00,0x00,0x00,0x9b,0x02,0x00,0x00,0x83,0x00,0x00,0x00, -0xf6,0x00,0x04,0x00,0xcb,0x00,0x00,0x00,0xcc,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0xd0,0x00,0x00,0x00, -0xca,0x00,0x00,0x00,0xcb,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, -0xca,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0xd2,0x00,0x00,0x00, -0xf8,0x00,0x02,0x00,0xd2,0x00,0x00,0x00,0xf5,0x00,0x07,0x00, -0x06,0x00,0x00,0x00,0xab,0x02,0x00,0x00,0x3e,0x00,0x00,0x00, -0xca,0x00,0x00,0x00,0x1c,0x01,0x00,0x00,0xd5,0x00,0x00,0x00, -0xb0,0x00,0x05,0x00,0xb7,0x00,0x00,0x00,0xd8,0x00,0x00,0x00, -0xab,0x02,0x00,0x00,0x37,0x00,0x00,0x00,0xf6,0x00,0x04,0x00, -0xd4,0x00,0x00,0x00,0xd5,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0xfa,0x00,0x04,0x00,0xd8,0x00,0x00,0x00,0xd3,0x00,0x00,0x00, -0xd4,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0xd3,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xdc,0x00,0x00,0x00, -0x8b,0x00,0x00,0x00,0x73,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xde,0x00,0x00,0x00,0xdc,0x00,0x00,0x00, -0xab,0x02,0x00,0x00,0xb0,0x00,0x05,0x00,0xb7,0x00,0x00,0x00, -0xe1,0x00,0x00,0x00,0xde,0x00,0x00,0x00,0x36,0x00,0x00,0x00, -0xf7,0x00,0x03,0x00,0xe3,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0xfa,0x00,0x04,0x00,0xe1,0x00,0x00,0x00,0xe2,0x00,0x00,0x00, -0xe3,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0xe2,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xe6,0x00,0x00,0x00, -0x9b,0x02,0x00,0x00,0x6e,0x00,0x00,0x00,0xb0,0x00,0x05,0x00, -0xb7,0x00,0x00,0x00,0xe8,0x00,0x00,0x00,0xe6,0x00,0x00,0x00, -0x83,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0xe3,0x00,0x00,0x00, -0xf8,0x00,0x02,0x00,0xe3,0x00,0x00,0x00,0xf5,0x00,0x07,0x00, -0xb7,0x00,0x00,0x00,0xe9,0x00,0x00,0x00,0xe1,0x00,0x00,0x00, -0xd3,0x00,0x00,0x00,0xe8,0x00,0x00,0x00,0xe2,0x00,0x00,0x00, -0xf7,0x00,0x03,0x00,0xeb,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0xfa,0x00,0x04,0x00,0xe9,0x00,0x00,0x00,0xea,0x00,0x00,0x00, -0x0c,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0xea,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xf4,0x00,0x00,0x00, -0x73,0x00,0x00,0x00,0xab,0x02,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xf6,0x00,0x00,0x00,0xf4,0x00,0x00,0x00, -0xf5,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xf8,0x00,0x00,0x00,0xf6,0x00,0x00,0x00,0x6e,0x00,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x03,0x01,0x00,0x00, -0xf4,0x00,0x00,0x00,0x8e,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x04,0x01,0x00,0x00,0xaf,0x02,0x00,0x00, -0x03,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x06,0x01,0x00,0x00,0x04,0x01,0x00,0x00,0x6e,0x00,0x00,0x00, -0x41,0x00,0x06,0x00,0x07,0x01,0x00,0x00,0x08,0x01,0x00,0x00, -0xfc,0x00,0x00,0x00,0x34,0x00,0x00,0x00,0x06,0x01,0x00,0x00, -0x3d,0x00,0x04,0x00,0xec,0x00,0x00,0x00,0x09,0x01,0x00,0x00, -0x08,0x01,0x00,0x00,0x41,0x00,0x05,0x00,0x0a,0x01,0x00,0x00, -0x0b,0x01,0x00,0x00,0xf1,0x00,0x00,0x00,0xf8,0x00,0x00,0x00, -0x3e,0x00,0x03,0x00,0x0b,0x01,0x00,0x00,0x09,0x01,0x00,0x00, -0xf9,0x00,0x02,0x00,0xeb,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, -0x0c,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x0f,0x01,0x00,0x00,0x73,0x00,0x00,0x00,0xab,0x02,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x11,0x01,0x00,0x00, -0x0f,0x01,0x00,0x00,0x10,0x01,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x13,0x01,0x00,0x00,0x11,0x01,0x00,0x00, -0x6e,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x0a,0x01,0x00,0x00, -0x15,0x01,0x00,0x00,0xf1,0x00,0x00,0x00,0x13,0x01,0x00,0x00, -0x3e,0x00,0x03,0x00,0x15,0x01,0x00,0x00,0x14,0x01,0x00,0x00, -0xf9,0x00,0x02,0x00,0xeb,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, -0xeb,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0xd5,0x00,0x00,0x00, -0xf8,0x00,0x02,0x00,0xd5,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x1c,0x01,0x00,0x00,0xab,0x02,0x00,0x00, -0x1a,0x01,0x00,0x00,0xf9,0x00,0x02,0x00,0xd2,0x00,0x00,0x00, -0xf8,0x00,0x02,0x00,0xd4,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, -0x1e,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0x1e,0x01,0x00,0x00, -0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0xac,0x02,0x00,0x00, -0x3e,0x00,0x00,0x00,0xd4,0x00,0x00,0x00,0x5f,0x01,0x00,0x00, -0x21,0x01,0x00,0x00,0xb0,0x00,0x05,0x00,0xb7,0x00,0x00,0x00, -0x24,0x01,0x00,0x00,0xac,0x02,0x00,0x00,0x9c,0x00,0x00,0x00, -0xf6,0x00,0x04,0x00,0x20,0x01,0x00,0x00,0x21,0x01,0x00,0x00, -0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0x24,0x01,0x00,0x00, -0x1f,0x01,0x00,0x00,0x20,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, -0x1f,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x28,0x01,0x00,0x00,0x9d,0x00,0x00,0x00,0x73,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x2a,0x01,0x00,0x00, -0x28,0x01,0x00,0x00,0xac,0x02,0x00,0x00,0x41,0x00,0x05,0x00, -0x15,0x00,0x00,0x00,0x2b,0x01,0x00,0x00,0x12,0x00,0x00,0x00, -0xc5,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x2c,0x01,0x00,0x00,0x2b,0x01,0x00,0x00,0xb0,0x00,0x05,0x00, -0xb7,0x00,0x00,0x00,0x2d,0x01,0x00,0x00,0x2a,0x01,0x00,0x00, -0x2c,0x01,0x00,0x00,0xf7,0x00,0x03,0x00,0x2f,0x01,0x00,0x00, -0x00,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0x2d,0x01,0x00,0x00, -0x2e,0x01,0x00,0x00,0x2f,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, -0x2e,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x32,0x01,0x00,0x00,0x9b,0x02,0x00,0x00,0x6e,0x00,0x00,0x00, -0xb0,0x00,0x05,0x00,0xb7,0x00,0x00,0x00,0x34,0x01,0x00,0x00, -0x32,0x01,0x00,0x00,0x83,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, -0x2f,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0x2f,0x01,0x00,0x00, -0xf5,0x00,0x07,0x00,0xb7,0x00,0x00,0x00,0x35,0x01,0x00,0x00, -0x2d,0x01,0x00,0x00,0x1f,0x01,0x00,0x00,0x34,0x01,0x00,0x00, -0x2e,0x01,0x00,0x00,0xf7,0x00,0x03,0x00,0x37,0x01,0x00,0x00, -0x00,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0x35,0x01,0x00,0x00, -0x36,0x01,0x00,0x00,0x55,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, -0x36,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x3f,0x01,0x00,0x00,0x73,0x00,0x00,0x00,0xac,0x02,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x41,0x01,0x00,0x00, -0x3f,0x01,0x00,0x00,0x40,0x01,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x43,0x01,0x00,0x00,0x41,0x01,0x00,0x00, -0x6e,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x4e,0x01,0x00,0x00,0x3f,0x01,0x00,0x00,0xa0,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x4f,0x01,0x00,0x00, -0xb3,0x02,0x00,0x00,0x4e,0x01,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x51,0x01,0x00,0x00,0x4f,0x01,0x00,0x00, -0x6e,0x00,0x00,0x00,0x41,0x00,0x06,0x00,0x07,0x01,0x00,0x00, -0x52,0x01,0x00,0x00,0x47,0x01,0x00,0x00,0x34,0x00,0x00,0x00, -0x51,0x01,0x00,0x00,0x3d,0x00,0x04,0x00,0xec,0x00,0x00,0x00, -0x53,0x01,0x00,0x00,0x52,0x01,0x00,0x00,0x41,0x00,0x05,0x00, -0x0a,0x01,0x00,0x00,0x54,0x01,0x00,0x00,0x3c,0x01,0x00,0x00, -0x43,0x01,0x00,0x00,0x3e,0x00,0x03,0x00,0x54,0x01,0x00,0x00, -0x53,0x01,0x00,0x00,0xf9,0x00,0x02,0x00,0x37,0x01,0x00,0x00, -0xf8,0x00,0x02,0x00,0x55,0x01,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x58,0x01,0x00,0x00,0x73,0x00,0x00,0x00, -0xac,0x02,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x5a,0x01,0x00,0x00,0x58,0x01,0x00,0x00,0x59,0x01,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x5c,0x01,0x00,0x00, -0x5a,0x01,0x00,0x00,0x6e,0x00,0x00,0x00,0x41,0x00,0x05,0x00, -0x0a,0x01,0x00,0x00,0x5d,0x01,0x00,0x00,0x3c,0x01,0x00,0x00, -0x5c,0x01,0x00,0x00,0x3e,0x00,0x03,0x00,0x5d,0x01,0x00,0x00, -0x14,0x01,0x00,0x00,0xf9,0x00,0x02,0x00,0x37,0x01,0x00,0x00, -0xf8,0x00,0x02,0x00,0x37,0x01,0x00,0x00,0xf9,0x00,0x02,0x00, -0x21,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0x21,0x01,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x5f,0x01,0x00,0x00, -0xac,0x02,0x00,0x00,0x1a,0x01,0x00,0x00,0xf9,0x00,0x02,0x00, -0x1e,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0x20,0x01,0x00,0x00, -0xe0,0x00,0x04,0x00,0x0c,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0x60,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x63,0x01,0x00,0x00,0xaf,0x02,0x00,0x00,0x61,0x01,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x66,0x01,0x00,0x00, -0xb3,0x02,0x00,0x00,0x64,0x01,0x00,0x00,0xf9,0x00,0x02,0x00, -0x68,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0x68,0x01,0x00,0x00, -0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0xb5,0x02,0x00,0x00, -0x3e,0x00,0x00,0x00,0x20,0x01,0x00,0x00,0x12,0x02,0x00,0x00, -0x6b,0x01,0x00,0x00,0xb0,0x00,0x05,0x00,0xb7,0x00,0x00,0x00, -0x6e,0x01,0x00,0x00,0xb5,0x02,0x00,0x00,0x6c,0x00,0x00,0x00, -0xf6,0x00,0x04,0x00,0x6a,0x01,0x00,0x00,0x6b,0x01,0x00,0x00, -0x00,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0x6e,0x01,0x00,0x00, -0x69,0x01,0x00,0x00,0x6a,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, -0x69,0x01,0x00,0x00,0xf9,0x00,0x02,0x00,0x70,0x01,0x00,0x00, -0xf8,0x00,0x02,0x00,0x70,0x01,0x00,0x00,0xf5,0x00,0x07,0x00, -0x06,0x00,0x00,0x00,0xb9,0x02,0x00,0x00,0x3e,0x00,0x00,0x00, -0x69,0x01,0x00,0x00,0x9c,0x01,0x00,0x00,0x73,0x01,0x00,0x00, -0xb0,0x00,0x05,0x00,0xb7,0x00,0x00,0x00,0x76,0x01,0x00,0x00, -0xb9,0x02,0x00,0x00,0x60,0x00,0x00,0x00,0xf6,0x00,0x04,0x00, -0x72,0x01,0x00,0x00,0x73,0x01,0x00,0x00,0x01,0x00,0x00,0x00, -0xfa,0x00,0x04,0x00,0x76,0x01,0x00,0x00,0x71,0x01,0x00,0x00, -0x72,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0x71,0x01,0x00,0x00, -0xf9,0x00,0x02,0x00,0x78,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, -0x78,0x01,0x00,0x00,0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00, -0xcb,0x02,0x00,0x00,0x3e,0x00,0x00,0x00,0x71,0x01,0x00,0x00, -0x9a,0x01,0x00,0x00,0x79,0x01,0x00,0x00,0xb0,0x00,0x05,0x00, -0xb7,0x00,0x00,0x00,0x7e,0x01,0x00,0x00,0xcb,0x02,0x00,0x00, -0x62,0x00,0x00,0x00,0xf6,0x00,0x04,0x00,0x7a,0x01,0x00,0x00, -0x79,0x01,0x00,0x00,0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, -0x7e,0x01,0x00,0x00,0x79,0x01,0x00,0x00,0x7a,0x01,0x00,0x00, -0xf8,0x00,0x02,0x00,0x79,0x01,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x84,0x01,0x00,0x00,0xb9,0x02,0x00,0x00, -0x62,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x86,0x01,0x00,0x00,0x84,0x01,0x00,0x00,0xcb,0x02,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x88,0x01,0x00,0x00, -0x55,0x00,0x00,0x00,0x53,0x00,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x8a,0x01,0x00,0x00,0xb9,0x02,0x00,0x00, -0x61,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x8b,0x01,0x00,0x00,0x88,0x01,0x00,0x00,0x8a,0x01,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x8d,0x01,0x00,0x00, -0x64,0x00,0x00,0x00,0x62,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x8e,0x01,0x00,0x00,0x8b,0x01,0x00,0x00, -0x8d,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x90,0x01,0x00,0x00,0x8e,0x01,0x00,0x00,0xcb,0x02,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x92,0x01,0x00,0x00, -0x90,0x01,0x00,0x00,0x91,0x01,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x94,0x01,0x00,0x00,0x92,0x01,0x00,0x00, -0xb5,0x02,0x00,0x00,0x41,0x00,0x05,0x00,0x0a,0x01,0x00,0x00, -0x95,0x01,0x00,0x00,0xf1,0x00,0x00,0x00,0x94,0x01,0x00,0x00, -0x3d,0x00,0x04,0x00,0xec,0x00,0x00,0x00,0x96,0x01,0x00,0x00, -0x95,0x01,0x00,0x00,0x41,0x00,0x05,0x00,0x97,0x01,0x00,0x00, -0x98,0x01,0x00,0x00,0x82,0x01,0x00,0x00,0x86,0x01,0x00,0x00, -0x3e,0x00,0x03,0x00,0x98,0x01,0x00,0x00,0x96,0x01,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x9a,0x01,0x00,0x00, -0xcb,0x02,0x00,0x00,0xc5,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, -0x78,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0x7a,0x01,0x00,0x00, -0xf9,0x00,0x02,0x00,0x73,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, -0x73,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x9c,0x01,0x00,0x00,0xb9,0x02,0x00,0x00,0xc5,0x00,0x00,0x00, -0xf9,0x00,0x02,0x00,0x70,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, -0x72,0x01,0x00,0x00,0xf9,0x00,0x02,0x00,0x9e,0x01,0x00,0x00, -0xf8,0x00,0x02,0x00,0x9e,0x01,0x00,0x00,0xf5,0x00,0x07,0x00, -0x06,0x00,0x00,0x00,0xba,0x02,0x00,0x00,0x3e,0x00,0x00,0x00, -0x72,0x01,0x00,0x00,0xca,0x01,0x00,0x00,0xa1,0x01,0x00,0x00, -0xb0,0x00,0x05,0x00,0xb7,0x00,0x00,0x00,0xa4,0x01,0x00,0x00, -0xba,0x02,0x00,0x00,0xb4,0x00,0x00,0x00,0xf6,0x00,0x04,0x00, -0xa0,0x01,0x00,0x00,0xa1,0x01,0x00,0x00,0x01,0x00,0x00,0x00, -0xfa,0x00,0x04,0x00,0xa4,0x01,0x00,0x00,0x9f,0x01,0x00,0x00, -0xa0,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0x9f,0x01,0x00,0x00, -0xf9,0x00,0x02,0x00,0xa6,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, -0xa6,0x01,0x00,0x00,0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00, -0xc8,0x02,0x00,0x00,0x3e,0x00,0x00,0x00,0x9f,0x01,0x00,0x00, -0xc8,0x01,0x00,0x00,0xa7,0x01,0x00,0x00,0xb0,0x00,0x05,0x00, -0xb7,0x00,0x00,0x00,0xac,0x01,0x00,0x00,0xc8,0x02,0x00,0x00, -0xb1,0x00,0x00,0x00,0xf6,0x00,0x04,0x00,0xa8,0x01,0x00,0x00, -0xa7,0x01,0x00,0x00,0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, -0xac,0x01,0x00,0x00,0xa7,0x01,0x00,0x00,0xa8,0x01,0x00,0x00, -0xf8,0x00,0x02,0x00,0xa7,0x01,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xb2,0x01,0x00,0x00,0xba,0x02,0x00,0x00, -0xb1,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xb4,0x01,0x00,0x00,0xb2,0x01,0x00,0x00,0xc8,0x02,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xb6,0x01,0x00,0x00, -0x59,0x00,0x00,0x00,0xae,0x00,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xb9,0x01,0x00,0x00,0xba,0x02,0x00,0x00, -0xb8,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xba,0x01,0x00,0x00,0xb6,0x01,0x00,0x00,0xb9,0x01,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xbc,0x01,0x00,0x00, -0x68,0x00,0x00,0x00,0xb1,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xbd,0x01,0x00,0x00,0xba,0x01,0x00,0x00, -0xbc,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xbf,0x01,0x00,0x00,0xbd,0x01,0x00,0x00,0xc8,0x02,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xc1,0x01,0x00,0x00, -0xbf,0x01,0x00,0x00,0xc0,0x01,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xc3,0x01,0x00,0x00,0xc1,0x01,0x00,0x00, -0xb5,0x02,0x00,0x00,0x41,0x00,0x05,0x00,0x0a,0x01,0x00,0x00, -0xc4,0x01,0x00,0x00,0x3c,0x01,0x00,0x00,0xc3,0x01,0x00,0x00, -0x3d,0x00,0x04,0x00,0xec,0x00,0x00,0x00,0xc5,0x01,0x00,0x00, -0xc4,0x01,0x00,0x00,0x41,0x00,0x05,0x00,0x97,0x01,0x00,0x00, -0xc6,0x01,0x00,0x00,0xb0,0x01,0x00,0x00,0xb4,0x01,0x00,0x00, -0x3e,0x00,0x03,0x00,0xc6,0x01,0x00,0x00,0xc5,0x01,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xc8,0x01,0x00,0x00, -0xc8,0x02,0x00,0x00,0xc5,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, -0xa6,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0xa8,0x01,0x00,0x00, -0xf9,0x00,0x02,0x00,0xa1,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, -0xa1,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xca,0x01,0x00,0x00,0xba,0x02,0x00,0x00,0xc5,0x00,0x00,0x00, -0xf9,0x00,0x02,0x00,0x9e,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, -0xa0,0x01,0x00,0x00,0xf9,0x00,0x02,0x00,0xcc,0x01,0x00,0x00, -0xf8,0x00,0x02,0x00,0xcc,0x01,0x00,0x00,0xf5,0x00,0x07,0x00, -0x06,0x00,0x00,0x00,0xbb,0x02,0x00,0x00,0x3e,0x00,0x00,0x00, -0xa0,0x01,0x00,0x00,0x10,0x02,0x00,0x00,0xcf,0x01,0x00,0x00, -0xb0,0x00,0x05,0x00,0xb7,0x00,0x00,0x00,0xd2,0x01,0x00,0x00, -0xbb,0x02,0x00,0x00,0xb4,0x00,0x00,0x00,0xf6,0x00,0x04,0x00, -0xce,0x01,0x00,0x00,0xcf,0x01,0x00,0x00,0x01,0x00,0x00,0x00, -0xfa,0x00,0x04,0x00,0xd2,0x01,0x00,0x00,0xcd,0x01,0x00,0x00, -0xce,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0xcd,0x01,0x00,0x00, -0xf9,0x00,0x02,0x00,0xd4,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, -0xd4,0x01,0x00,0x00,0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00, -0xbf,0x02,0x00,0x00,0x3e,0x00,0x00,0x00,0xcd,0x01,0x00,0x00, -0x0e,0x02,0x00,0x00,0xd7,0x01,0x00,0x00,0xb0,0x00,0x05,0x00, -0xb7,0x00,0x00,0x00,0xda,0x01,0x00,0x00,0xbf,0x02,0x00,0x00, -0x60,0x00,0x00,0x00,0xf6,0x00,0x04,0x00,0xd6,0x01,0x00,0x00, -0xd7,0x01,0x00,0x00,0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, -0xda,0x01,0x00,0x00,0xd5,0x01,0x00,0x00,0xd6,0x01,0x00,0x00, -0xf8,0x00,0x02,0x00,0xd5,0x01,0x00,0x00,0xf9,0x00,0x02,0x00, -0xdc,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0xdc,0x01,0x00,0x00, -0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0xc1,0x02,0x00,0x00, -0x3e,0x00,0x00,0x00,0xd5,0x01,0x00,0x00,0x0c,0x02,0x00,0x00, -0xdf,0x01,0x00,0x00,0xb0,0x00,0x05,0x00,0xb7,0x00,0x00,0x00, -0xe2,0x01,0x00,0x00,0xc1,0x02,0x00,0x00,0xb1,0x00,0x00,0x00, -0xf6,0x00,0x04,0x00,0xde,0x01,0x00,0x00,0xdf,0x01,0x00,0x00, -0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0xe2,0x01,0x00,0x00, -0xdd,0x01,0x00,0x00,0xde,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, -0xdd,0x01,0x00,0x00,0xf9,0x00,0x02,0x00,0xe4,0x01,0x00,0x00, -0xf8,0x00,0x02,0x00,0xe4,0x01,0x00,0x00,0xf5,0x00,0x07,0x00, -0x06,0x00,0x00,0x00,0xc3,0x02,0x00,0x00,0x3e,0x00,0x00,0x00, -0xdd,0x01,0x00,0x00,0x0a,0x02,0x00,0x00,0xe5,0x01,0x00,0x00, -0xb0,0x00,0x05,0x00,0xb7,0x00,0x00,0x00,0xea,0x01,0x00,0x00, -0xc3,0x02,0x00,0x00,0x62,0x00,0x00,0x00,0xf6,0x00,0x04,0x00, -0xe6,0x01,0x00,0x00,0xe5,0x01,0x00,0x00,0x01,0x00,0x00,0x00, -0xfa,0x00,0x04,0x00,0xea,0x01,0x00,0x00,0xe5,0x01,0x00,0x00, -0xe6,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0xe5,0x01,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xec,0x01,0x00,0x00, -0xbb,0x02,0x00,0x00,0xb1,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xee,0x01,0x00,0x00,0xec,0x01,0x00,0x00, -0xc1,0x02,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xf0,0x01,0x00,0x00,0xee,0x01,0x00,0x00,0xef,0x01,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xf2,0x01,0x00,0x00, -0xbf,0x02,0x00,0x00,0x62,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xf3,0x01,0x00,0x00,0xf0,0x01,0x00,0x00, -0xf2,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xf5,0x01,0x00,0x00,0xf3,0x01,0x00,0x00,0xc3,0x02,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xf9,0x01,0x00,0x00, -0xf2,0x01,0x00,0x00,0xc3,0x02,0x00,0x00,0x41,0x00,0x05,0x00, -0x97,0x01,0x00,0x00,0xfa,0x01,0x00,0x00,0x82,0x01,0x00,0x00, -0xf9,0x01,0x00,0x00,0x3d,0x00,0x04,0x00,0xec,0x00,0x00,0x00, -0xfb,0x01,0x00,0x00,0xfa,0x01,0x00,0x00,0x73,0x00,0x04,0x00, -0xb9,0x00,0x00,0x00,0xfc,0x01,0x00,0x00,0xfb,0x01,0x00,0x00, -0x41,0x00,0x05,0x00,0x97,0x01,0x00,0x00,0x01,0x02,0x00,0x00, -0xb0,0x01,0x00,0x00,0xee,0x01,0x00,0x00,0x3d,0x00,0x04,0x00, -0xec,0x00,0x00,0x00,0x02,0x02,0x00,0x00,0x01,0x02,0x00,0x00, -0x73,0x00,0x04,0x00,0xb9,0x00,0x00,0x00,0x03,0x02,0x00,0x00, -0x02,0x02,0x00,0x00,0x41,0x00,0x05,0x00,0xc2,0x00,0x00,0x00, -0x05,0x02,0x00,0x00,0xbf,0x00,0x00,0x00,0xf5,0x01,0x00,0x00, -0x3d,0x00,0x04,0x00,0xb9,0x00,0x00,0x00,0x06,0x02,0x00,0x00, -0x05,0x02,0x00,0x00,0x0c,0x00,0x08,0x00,0xb9,0x00,0x00,0x00, -0x07,0x02,0x00,0x00,0x01,0x00,0x00,0x00,0x32,0x00,0x00,0x00, -0xfc,0x01,0x00,0x00,0x03,0x02,0x00,0x00,0x06,0x02,0x00,0x00, -0x3e,0x00,0x03,0x00,0x05,0x02,0x00,0x00,0x07,0x02,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x0a,0x02,0x00,0x00, -0xc3,0x02,0x00,0x00,0xc5,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, -0xe4,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0xe6,0x01,0x00,0x00, -0xf9,0x00,0x02,0x00,0xdf,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, -0xdf,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x0c,0x02,0x00,0x00,0xc1,0x02,0x00,0x00,0xc5,0x00,0x00,0x00, -0xf9,0x00,0x02,0x00,0xdc,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, -0xde,0x01,0x00,0x00,0xf9,0x00,0x02,0x00,0xd7,0x01,0x00,0x00, -0xf8,0x00,0x02,0x00,0xd7,0x01,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x0e,0x02,0x00,0x00,0xbf,0x02,0x00,0x00, -0xc5,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0xd4,0x01,0x00,0x00, -0xf8,0x00,0x02,0x00,0xd6,0x01,0x00,0x00,0xf9,0x00,0x02,0x00, -0xcf,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0xcf,0x01,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x10,0x02,0x00,0x00, -0xbb,0x02,0x00,0x00,0xc5,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, -0xcc,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0xce,0x01,0x00,0x00, -0xf9,0x00,0x02,0x00,0x6b,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, -0x6b,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x12,0x02,0x00,0x00,0xb5,0x02,0x00,0x00,0xc5,0x00,0x00,0x00, -0xf9,0x00,0x02,0x00,0x68,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, -0x6a,0x01,0x00,0x00,0xe0,0x00,0x04,0x00,0x0c,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x60,0x01,0x00,0x00,0xf9,0x00,0x02,0x00, -0xcc,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0xcc,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x14,0x02,0x00,0x00, -0x9b,0x02,0x00,0x00,0x6c,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, -0xc9,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0xcb,0x00,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x19,0x02,0x00,0x00, -0x55,0x00,0x00,0x00,0x53,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x1a,0x02,0x00,0x00,0x8b,0x00,0x00,0x00, -0x19,0x02,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x1f,0x02,0x00,0x00,0x59,0x00,0x00,0x00,0xae,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x20,0x02,0x00,0x00, -0x9d,0x00,0x00,0x00,0x1f,0x02,0x00,0x00,0x41,0x00,0x05,0x00, -0x15,0x00,0x00,0x00,0x25,0x02,0x00,0x00,0x12,0x00,0x00,0x00, -0x24,0x02,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x26,0x02,0x00,0x00,0x25,0x02,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x27,0x02,0x00,0x00,0x0f,0x00,0x00,0x00, -0x26,0x02,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x2b,0x02,0x00,0x00,0x47,0x00,0x00,0x00,0x26,0x02,0x00,0x00, -0x41,0x00,0x05,0x00,0x0d,0x00,0x00,0x00,0x2d,0x02,0x00,0x00, -0x2c,0x02,0x00,0x00,0x0c,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x2e,0x02,0x00,0x00,0x2d,0x02,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x2f,0x02,0x00,0x00, -0x2b,0x02,0x00,0x00,0x2e,0x02,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x30,0x02,0x00,0x00,0x27,0x02,0x00,0x00, -0x2f,0x02,0x00,0x00,0xf9,0x00,0x02,0x00,0x32,0x02,0x00,0x00, -0xf8,0x00,0x02,0x00,0x32,0x02,0x00,0x00,0xf5,0x00,0x07,0x00, -0x06,0x00,0x00,0x00,0x9c,0x02,0x00,0x00,0x3e,0x00,0x00,0x00, -0xcb,0x00,0x00,0x00,0x99,0x02,0x00,0x00,0x35,0x02,0x00,0x00, -0xb0,0x00,0x05,0x00,0xb7,0x00,0x00,0x00,0x38,0x02,0x00,0x00, -0x9c,0x02,0x00,0x00,0xb4,0x00,0x00,0x00,0xf6,0x00,0x04,0x00, -0x34,0x02,0x00,0x00,0x35,0x02,0x00,0x00,0x01,0x00,0x00,0x00, -0xfa,0x00,0x04,0x00,0x38,0x02,0x00,0x00,0x33,0x02,0x00,0x00, -0x34,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x33,0x02,0x00,0x00, -0xf9,0x00,0x02,0x00,0x3a,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, -0x3a,0x02,0x00,0x00,0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00, -0x9d,0x02,0x00,0x00,0x3e,0x00,0x00,0x00,0x33,0x02,0x00,0x00, -0x97,0x02,0x00,0x00,0x3d,0x02,0x00,0x00,0xb0,0x00,0x05,0x00, -0xb7,0x00,0x00,0x00,0x40,0x02,0x00,0x00,0x9d,0x02,0x00,0x00, -0x60,0x00,0x00,0x00,0xf6,0x00,0x04,0x00,0x3c,0x02,0x00,0x00, -0x3d,0x02,0x00,0x00,0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, -0x40,0x02,0x00,0x00,0x3b,0x02,0x00,0x00,0x3c,0x02,0x00,0x00, -0xf8,0x00,0x02,0x00,0x3b,0x02,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x44,0x02,0x00,0x00,0x9d,0x02,0x00,0x00, -0x61,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x45,0x02,0x00,0x00,0x1a,0x02,0x00,0x00,0x44,0x02,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x47,0x02,0x00,0x00, -0x64,0x00,0x00,0x00,0x62,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x48,0x02,0x00,0x00,0x45,0x02,0x00,0x00, -0x47,0x02,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x4c,0x02,0x00,0x00,0x9c,0x02,0x00,0x00,0xb8,0x01,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x4d,0x02,0x00,0x00, -0x20,0x02,0x00,0x00,0x4c,0x02,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x4f,0x02,0x00,0x00,0x68,0x00,0x00,0x00, -0xb1,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x50,0x02,0x00,0x00,0x4d,0x02,0x00,0x00,0x4f,0x02,0x00,0x00, -0xf9,0x00,0x02,0x00,0x52,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, -0x52,0x02,0x00,0x00,0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00, -0x9f,0x02,0x00,0x00,0x3e,0x00,0x00,0x00,0x3b,0x02,0x00,0x00, -0x95,0x02,0x00,0x00,0x55,0x02,0x00,0x00,0xb0,0x00,0x05,0x00, -0xb7,0x00,0x00,0x00,0x58,0x02,0x00,0x00,0x9f,0x02,0x00,0x00, -0xb1,0x00,0x00,0x00,0xf6,0x00,0x04,0x00,0x54,0x02,0x00,0x00, -0x55,0x02,0x00,0x00,0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, -0x58,0x02,0x00,0x00,0x53,0x02,0x00,0x00,0x54,0x02,0x00,0x00, -0xf8,0x00,0x02,0x00,0x53,0x02,0x00,0x00,0xf9,0x00,0x02,0x00, -0x5a,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x5a,0x02,0x00,0x00, -0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0xa1,0x02,0x00,0x00, -0x3e,0x00,0x00,0x00,0x53,0x02,0x00,0x00,0x93,0x02,0x00,0x00, -0x5d,0x02,0x00,0x00,0xb0,0x00,0x05,0x00,0xb7,0x00,0x00,0x00, -0x60,0x02,0x00,0x00,0xa1,0x02,0x00,0x00,0x62,0x00,0x00,0x00, -0xf6,0x00,0x04,0x00,0x5c,0x02,0x00,0x00,0x5d,0x02,0x00,0x00, -0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0x60,0x02,0x00,0x00, -0x5b,0x02,0x00,0x00,0x5c,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, -0x5b,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x63,0x02,0x00,0x00,0x48,0x02,0x00,0x00,0xa1,0x02,0x00,0x00, -0xb0,0x00,0x05,0x00,0xb7,0x00,0x00,0x00,0x66,0x02,0x00,0x00, -0x63,0x02,0x00,0x00,0x36,0x00,0x00,0x00,0xf7,0x00,0x03,0x00, -0x68,0x02,0x00,0x00,0x00,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, -0x66,0x02,0x00,0x00,0x67,0x02,0x00,0x00,0x68,0x02,0x00,0x00, -0xf8,0x00,0x02,0x00,0x67,0x02,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x6b,0x02,0x00,0x00,0x50,0x02,0x00,0x00, -0x9f,0x02,0x00,0x00,0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00, -0x6c,0x02,0x00,0x00,0x12,0x00,0x00,0x00,0xc5,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x6d,0x02,0x00,0x00, -0x6c,0x02,0x00,0x00,0xb0,0x00,0x05,0x00,0xb7,0x00,0x00,0x00, -0x6e,0x02,0x00,0x00,0x6b,0x02,0x00,0x00,0x6d,0x02,0x00,0x00, -0xf9,0x00,0x02,0x00,0x68,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, -0x68,0x02,0x00,0x00,0xf5,0x00,0x07,0x00,0xb7,0x00,0x00,0x00, -0x6f,0x02,0x00,0x00,0x66,0x02,0x00,0x00,0x5b,0x02,0x00,0x00, -0x6e,0x02,0x00,0x00,0x67,0x02,0x00,0x00,0xf7,0x00,0x03,0x00, -0x71,0x02,0x00,0x00,0x00,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, -0x6f,0x02,0x00,0x00,0x70,0x02,0x00,0x00,0x71,0x02,0x00,0x00, -0xf8,0x00,0x02,0x00,0x70,0x02,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x79,0x02,0x00,0x00,0x50,0x02,0x00,0x00, -0x9f,0x02,0x00,0x00,0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00, -0x7b,0x02,0x00,0x00,0x12,0x00,0x00,0x00,0x7a,0x02,0x00,0x00, -0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x7c,0x02,0x00,0x00, -0x7b,0x02,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x7d,0x02,0x00,0x00,0x79,0x02,0x00,0x00,0x7c,0x02,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x7e,0x02,0x00,0x00, -0x30,0x02,0x00,0x00,0x7d,0x02,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x80,0x02,0x00,0x00,0x7e,0x02,0x00,0x00, -0x48,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x82,0x02,0x00,0x00,0x80,0x02,0x00,0x00,0xa1,0x02,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x84,0x02,0x00,0x00, -0x9c,0x02,0x00,0x00,0xb1,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x86,0x02,0x00,0x00,0x84,0x02,0x00,0x00, -0x9f,0x02,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x88,0x02,0x00,0x00,0x86,0x02,0x00,0x00,0x87,0x02,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x8a,0x02,0x00,0x00, -0x9d,0x02,0x00,0x00,0x62,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x8b,0x02,0x00,0x00,0x88,0x02,0x00,0x00, -0x8a,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x8d,0x02,0x00,0x00,0x8b,0x02,0x00,0x00,0xa1,0x02,0x00,0x00, -0x41,0x00,0x05,0x00,0xc2,0x00,0x00,0x00,0x8e,0x02,0x00,0x00, -0xbf,0x00,0x00,0x00,0x8d,0x02,0x00,0x00,0x3d,0x00,0x04,0x00, -0xb9,0x00,0x00,0x00,0x8f,0x02,0x00,0x00,0x8e,0x02,0x00,0x00, -0x41,0x00,0x06,0x00,0x90,0x02,0x00,0x00,0x91,0x02,0x00,0x00, -0x75,0x02,0x00,0x00,0x34,0x00,0x00,0x00,0x82,0x02,0x00,0x00, -0x3e,0x00,0x03,0x00,0x91,0x02,0x00,0x00,0x8f,0x02,0x00,0x00, -0xf9,0x00,0x02,0x00,0x71,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, -0x71,0x02,0x00,0x00,0xf9,0x00,0x02,0x00,0x5d,0x02,0x00,0x00, -0xf8,0x00,0x02,0x00,0x5d,0x02,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x93,0x02,0x00,0x00,0xa1,0x02,0x00,0x00, -0xc5,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0x5a,0x02,0x00,0x00, -0xf8,0x00,0x02,0x00,0x5c,0x02,0x00,0x00,0xf9,0x00,0x02,0x00, -0x55,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x55,0x02,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x95,0x02,0x00,0x00, -0x9f,0x02,0x00,0x00,0xc5,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, -0x52,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x54,0x02,0x00,0x00, -0xf9,0x00,0x02,0x00,0x3d,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, -0x3d,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x97,0x02,0x00,0x00,0x9d,0x02,0x00,0x00,0xc5,0x00,0x00,0x00, -0xf9,0x00,0x02,0x00,0x3a,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, -0x3c,0x02,0x00,0x00,0xf9,0x00,0x02,0x00,0x35,0x02,0x00,0x00, -0xf8,0x00,0x02,0x00,0x35,0x02,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x99,0x02,0x00,0x00,0x9c,0x02,0x00,0x00, -0xc5,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0x32,0x02,0x00,0x00, -0xf8,0x00,0x02,0x00,0x34,0x02,0x00,0x00,0xfd,0x00,0x01,0x00, -0x38,0x00,0x01,0x00, -}; -const uint64_t matmul_f16_m_len = 10156; - -unsigned char matmul_f16_m_fp32_data[] = { -0x03,0x02,0x23,0x07,0x00,0x05,0x01,0x00,0x0b,0x00,0x0d,0x00, -0xca,0x02,0x00,0x00,0x00,0x00,0x00,0x00,0x11,0x00,0x02,0x00, -0x01,0x00,0x00,0x00,0x11,0x00,0x02,0x00,0x51,0x11,0x00,0x00, -0x0b,0x00,0x06,0x00,0x01,0x00,0x00,0x00,0x47,0x4c,0x53,0x4c, -0x2e,0x73,0x74,0x64,0x2e,0x34,0x35,0x30,0x00,0x00,0x00,0x00, -0x0e,0x00,0x03,0x00,0x00,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x0f,0x00,0x0f,0x00,0x05,0x00,0x00,0x00,0x04,0x00,0x00,0x00, -0x6d,0x61,0x69,0x6e,0x00,0x00,0x00,0x00,0x0b,0x00,0x00,0x00, -0x12,0x00,0x00,0x00,0x3d,0x00,0x00,0x00,0x4c,0x00,0x00,0x00, -0xf0,0x00,0x00,0x00,0xfc,0x00,0x00,0x00,0x3c,0x01,0x00,0x00, -0x47,0x01,0x00,0x00,0x2a,0x02,0x00,0x00,0x73,0x02,0x00,0x00, -0x10,0x00,0x06,0x00,0x04,0x00,0x00,0x00,0x11,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x0b,0x00,0x00,0x00,0x0b,0x00,0x00,0x00, -0x1c,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0x10,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x08,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00, -0x03,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00,0x04,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x10,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0x10,0x00,0x00,0x00,0x05,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x14,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x18,0x00,0x00,0x00, -0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00,0x07,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x1c,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0x10,0x00,0x00,0x00,0x08,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x20,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00, -0x09,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x24,0x00,0x00,0x00, -0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00,0x0a,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x28,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0x10,0x00,0x00,0x00,0x0b,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x2c,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x30,0x00,0x00,0x00, -0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00,0x0d,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x34,0x00,0x00,0x00,0x47,0x00,0x03,0x00, -0x10,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x37,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x3d,0x00,0x00,0x00,0x0b,0x00,0x00,0x00, -0x1a,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x4c,0x00,0x00,0x00, -0x0b,0x00,0x00,0x00,0x1b,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x4f,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x09,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x53,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x04,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x60,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x62,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x07,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x6c,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x03,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x9c,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0xae,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x05,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0xb1,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x08,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0xf9,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x48,0x00,0x04,0x00, -0xfa,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x18,0x00,0x00,0x00, -0x48,0x00,0x05,0x00,0xfa,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x03,0x00, -0xfa,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0xfc,0x00,0x00,0x00,0x22,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0xfc,0x00,0x00,0x00,0x21,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x16,0x01,0x00,0x00, -0x01,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x17,0x01,0x00,0x00,0x0b,0x00,0x00,0x00,0x19,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x44,0x01,0x00,0x00,0x06,0x00,0x00,0x00, -0x02,0x00,0x00,0x00,0x48,0x00,0x04,0x00,0x45,0x01,0x00,0x00, -0x00,0x00,0x00,0x00,0x18,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0x45,0x01,0x00,0x00,0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x47,0x00,0x03,0x00,0x45,0x01,0x00,0x00, -0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x47,0x01,0x00,0x00, -0x22,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x47,0x01,0x00,0x00,0x21,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x2a,0x02,0x00,0x00,0x0b,0x00,0x00,0x00, -0x18,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x70,0x02,0x00,0x00, -0x06,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x48,0x00,0x04,0x00, -0x71,0x02,0x00,0x00,0x00,0x00,0x00,0x00,0x19,0x00,0x00,0x00, -0x48,0x00,0x05,0x00,0x71,0x02,0x00,0x00,0x00,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x03,0x00, -0x71,0x02,0x00,0x00,0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x73,0x02,0x00,0x00,0x22,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x73,0x02,0x00,0x00,0x21,0x00,0x00,0x00, -0x02,0x00,0x00,0x00,0x13,0x00,0x02,0x00,0x02,0x00,0x00,0x00, -0x21,0x00,0x03,0x00,0x03,0x00,0x00,0x00,0x02,0x00,0x00,0x00, -0x15,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x20,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x17,0x00,0x04,0x00,0x09,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x03,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0x0a,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x09,0x00,0x00,0x00, -0x3b,0x00,0x04,0x00,0x0a,0x00,0x00,0x00,0x0b,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0x0d,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x1e,0x00,0x10,0x00,0x10,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x11,0x00,0x00,0x00, -0x09,0x00,0x00,0x00,0x10,0x00,0x00,0x00,0x3b,0x00,0x04,0x00, -0x11,0x00,0x00,0x00,0x12,0x00,0x00,0x00,0x09,0x00,0x00,0x00, -0x15,0x00,0x04,0x00,0x13,0x00,0x00,0x00,0x20,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00, -0x14,0x00,0x00,0x00,0x08,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0x15,0x00,0x00,0x00,0x09,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00,0x21,0x00,0x00,0x00, -0x0a,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00, -0x27,0x00,0x00,0x00,0x09,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x13,0x00,0x00,0x00,0x2d,0x00,0x00,0x00,0x07,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00,0x34,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x32,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x37,0x00,0x00,0x00,0x40,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x3b,0x00,0x04,0x00,0x0a,0x00,0x00,0x00,0x3d,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x3e,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x3b,0x00,0x04,0x00, -0x0a,0x00,0x00,0x00,0x4c,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x32,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x4f,0x00,0x00,0x00, -0x20,0x00,0x00,0x00,0x32,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x53,0x00,0x00,0x00,0x20,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0x54,0x00,0x00,0x00,0x86,0x00,0x00,0x00, -0x37,0x00,0x00,0x00,0x53,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0x58,0x00,0x00,0x00,0x86,0x00,0x00,0x00, -0x37,0x00,0x00,0x00,0x53,0x00,0x00,0x00,0x32,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x60,0x00,0x00,0x00,0x02,0x00,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x61,0x00,0x00,0x00, -0x86,0x00,0x00,0x00,0x53,0x00,0x00,0x00,0x60,0x00,0x00,0x00, -0x32,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x62,0x00,0x00,0x00, -0x04,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, -0x63,0x00,0x00,0x00,0x86,0x00,0x00,0x00,0x61,0x00,0x00,0x00, -0x62,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, -0x67,0x00,0x00,0x00,0x86,0x00,0x00,0x00,0x61,0x00,0x00,0x00, -0x62,0x00,0x00,0x00,0x32,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x6c,0x00,0x00,0x00,0x10,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0x6d,0x00,0x00,0x00,0x86,0x00,0x00,0x00, -0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0x72,0x00,0x00,0x00,0x86,0x00,0x00,0x00, -0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x13,0x00,0x00,0x00,0x76,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00,0x7b,0x00,0x00,0x00, -0x02,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00, -0x86,0x00,0x00,0x00,0x0b,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x13,0x00,0x00,0x00,0x8c,0x00,0x00,0x00,0x03,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00,0x97,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x32,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x9c,0x00,0x00,0x00,0x40,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x13,0x00,0x00,0x00,0x9e,0x00,0x00,0x00,0x04,0x00,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0xad,0x00,0x00,0x00, -0x84,0x00,0x00,0x00,0x60,0x00,0x00,0x00,0x62,0x00,0x00,0x00, -0x32,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0xae,0x00,0x00,0x00, -0x20,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, -0xaf,0x00,0x00,0x00,0x84,0x00,0x00,0x00,0x53,0x00,0x00,0x00, -0xae,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, -0xb0,0x00,0x00,0x00,0x84,0x00,0x00,0x00,0x4f,0x00,0x00,0x00, -0x62,0x00,0x00,0x00,0x32,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0xb1,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0xb2,0x00,0x00,0x00,0x84,0x00,0x00,0x00, -0xb0,0x00,0x00,0x00,0xb1,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0xb3,0x00,0x00,0x00,0x84,0x00,0x00,0x00, -0xb2,0x00,0x00,0x00,0x60,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0xb4,0x00,0x00,0x00,0x86,0x00,0x00,0x00, -0xaf,0x00,0x00,0x00,0xb3,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0xb5,0x00,0x00,0x00,0x84,0x00,0x00,0x00, -0xad,0x00,0x00,0x00,0xb4,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0xb6,0x00,0x00,0x00,0x84,0x00,0x00,0x00, -0xb5,0x00,0x00,0x00,0xb1,0x00,0x00,0x00,0x14,0x00,0x02,0x00, -0xb7,0x00,0x00,0x00,0x16,0x00,0x03,0x00,0xb9,0x00,0x00,0x00, -0x20,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, -0xba,0x00,0x00,0x00,0x84,0x00,0x00,0x00,0x60,0x00,0x00,0x00, -0x62,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, -0xbb,0x00,0x00,0x00,0x84,0x00,0x00,0x00,0xba,0x00,0x00,0x00, -0xb4,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, -0xbc,0x00,0x00,0x00,0x84,0x00,0x00,0x00,0xbb,0x00,0x00,0x00, -0xb1,0x00,0x00,0x00,0x1c,0x00,0x04,0x00,0xbd,0x00,0x00,0x00, -0xb9,0x00,0x00,0x00,0xbc,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0xbe,0x00,0x00,0x00,0x07,0x00,0x00,0x00,0xbd,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0xb9,0x00,0x00,0x00,0xc1,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0xc2,0x00,0x00,0x00, -0x07,0x00,0x00,0x00,0xb9,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x13,0x00,0x00,0x00,0xc5,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0xec,0x00,0x00,0x00, -0x80,0x00,0x00,0x00,0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0xed,0x00,0x00,0x00, -0x84,0x00,0x00,0x00,0x37,0x00,0x00,0x00,0xec,0x00,0x00,0x00, -0x1c,0x00,0x04,0x00,0xee,0x00,0x00,0x00,0xb9,0x00,0x00,0x00, -0xed,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0xef,0x00,0x00,0x00, -0x04,0x00,0x00,0x00,0xee,0x00,0x00,0x00,0x3b,0x00,0x04,0x00, -0xef,0x00,0x00,0x00,0xf0,0x00,0x00,0x00,0x04,0x00,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0xf4,0x00,0x00,0x00, -0x80,0x00,0x00,0x00,0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00, -0x16,0x00,0x03,0x00,0xf8,0x00,0x00,0x00,0x10,0x00,0x00,0x00, -0x1d,0x00,0x03,0x00,0xf9,0x00,0x00,0x00,0xf8,0x00,0x00,0x00, -0x1e,0x00,0x03,0x00,0xfa,0x00,0x00,0x00,0xf9,0x00,0x00,0x00, -0x20,0x00,0x04,0x00,0xfb,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0xfa,0x00,0x00,0x00,0x3b,0x00,0x04,0x00,0xfb,0x00,0x00,0x00, -0xfc,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0x07,0x01,0x00,0x00,0x0c,0x00,0x00,0x00,0xf8,0x00,0x00,0x00, -0x20,0x00,0x04,0x00,0x0b,0x01,0x00,0x00,0x04,0x00,0x00,0x00, -0xb9,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, -0x11,0x01,0x00,0x00,0x80,0x00,0x00,0x00,0x6c,0x00,0x00,0x00, -0x39,0x00,0x00,0x00,0x32,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x16,0x01,0x00,0x00,0x01,0x00,0x00,0x00,0x33,0x00,0x06,0x00, -0x09,0x00,0x00,0x00,0x17,0x01,0x00,0x00,0x16,0x01,0x00,0x00, -0x39,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0x18,0x01,0x00,0x00,0x51,0x00,0x00,0x00, -0x17,0x01,0x00,0x00,0x00,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0x19,0x01,0x00,0x00,0x84,0x00,0x00,0x00, -0x18,0x01,0x00,0x00,0x39,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0x1a,0x01,0x00,0x00,0x86,0x00,0x00,0x00, -0x19,0x01,0x00,0x00,0x6c,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0x38,0x01,0x00,0x00,0x80,0x00,0x00,0x00, -0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0x39,0x01,0x00,0x00,0x84,0x00,0x00,0x00, -0x9c,0x00,0x00,0x00,0x38,0x01,0x00,0x00,0x1c,0x00,0x04,0x00, -0x3a,0x01,0x00,0x00,0xb9,0x00,0x00,0x00,0x39,0x01,0x00,0x00, -0x20,0x00,0x04,0x00,0x3b,0x01,0x00,0x00,0x04,0x00,0x00,0x00, -0x3a,0x01,0x00,0x00,0x3b,0x00,0x04,0x00,0x3b,0x01,0x00,0x00, -0x3c,0x01,0x00,0x00,0x04,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0x40,0x01,0x00,0x00,0x80,0x00,0x00,0x00, -0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x1d,0x00,0x03,0x00, -0x44,0x01,0x00,0x00,0xf8,0x00,0x00,0x00,0x1e,0x00,0x03,0x00, -0x45,0x01,0x00,0x00,0x44,0x01,0x00,0x00,0x20,0x00,0x04,0x00, -0x46,0x01,0x00,0x00,0x0c,0x00,0x00,0x00,0x45,0x01,0x00,0x00, -0x3b,0x00,0x04,0x00,0x46,0x01,0x00,0x00,0x47,0x01,0x00,0x00, -0x0c,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, -0x5a,0x01,0x00,0x00,0x80,0x00,0x00,0x00,0x6c,0x00,0x00,0x00, -0x39,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x61,0x01,0x00,0x00,0x08,0x01,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0x62,0x01,0x00,0x00,0x86,0x00,0x00,0x00, -0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0x65,0x01,0x00,0x00,0x86,0x00,0x00,0x00, -0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0x80,0x01,0x00,0x00,0x84,0x00,0x00,0x00, -0x60,0x00,0x00,0x00,0x62,0x00,0x00,0x00,0x1c,0x00,0x04,0x00, -0x81,0x01,0x00,0x00,0xb9,0x00,0x00,0x00,0x80,0x01,0x00,0x00, -0x20,0x00,0x04,0x00,0x82,0x01,0x00,0x00,0x07,0x00,0x00,0x00, -0x81,0x01,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, -0x92,0x01,0x00,0x00,0x80,0x00,0x00,0x00,0x6c,0x00,0x00,0x00, -0x39,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, -0xad,0x01,0x00,0x00,0x84,0x00,0x00,0x00,0xb4,0x00,0x00,0x00, -0xb1,0x00,0x00,0x00,0x1c,0x00,0x04,0x00,0xae,0x01,0x00,0x00, -0xb9,0x00,0x00,0x00,0xad,0x01,0x00,0x00,0x20,0x00,0x04,0x00, -0xaf,0x01,0x00,0x00,0x07,0x00,0x00,0x00,0xae,0x01,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0xb8,0x01,0x00,0x00, -0x86,0x00,0x00,0x00,0xae,0x00,0x00,0x00,0xb4,0x00,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0xc0,0x01,0x00,0x00, -0x80,0x00,0x00,0x00,0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0xef,0x01,0x00,0x00, -0x84,0x00,0x00,0x00,0x60,0x00,0x00,0x00,0x62,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00,0x22,0x02,0x00,0x00, -0x0d,0x00,0x00,0x00,0x3b,0x00,0x04,0x00,0x0a,0x00,0x00,0x00, -0x2a,0x02,0x00,0x00,0x01,0x00,0x00,0x00,0x1d,0x00,0x03,0x00, -0x70,0x02,0x00,0x00,0xb9,0x00,0x00,0x00,0x1e,0x00,0x03,0x00, -0x71,0x02,0x00,0x00,0x70,0x02,0x00,0x00,0x20,0x00,0x04,0x00, -0x72,0x02,0x00,0x00,0x0c,0x00,0x00,0x00,0x71,0x02,0x00,0x00, -0x3b,0x00,0x04,0x00,0x72,0x02,0x00,0x00,0x73,0x02,0x00,0x00, -0x0c,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00, -0x78,0x02,0x00,0x00,0x05,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0x85,0x02,0x00,0x00,0x84,0x00,0x00,0x00, -0x60,0x00,0x00,0x00,0x62,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0x8e,0x02,0x00,0x00,0x0c,0x00,0x00,0x00,0xb9,0x00,0x00,0x00, -0x36,0x00,0x05,0x00,0x02,0x00,0x00,0x00,0x04,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x03,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, -0x05,0x00,0x00,0x00,0x3b,0x00,0x04,0x00,0xbe,0x00,0x00,0x00, -0xbf,0x00,0x00,0x00,0x07,0x00,0x00,0x00,0x3b,0x00,0x04,0x00, -0x82,0x01,0x00,0x00,0x83,0x01,0x00,0x00,0x07,0x00,0x00,0x00, -0x3b,0x00,0x04,0x00,0xaf,0x01,0x00,0x00,0xb0,0x01,0x00,0x00, -0x07,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x0d,0x00,0x00,0x00, -0x0e,0x00,0x00,0x00,0x0b,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x0f,0x00,0x00,0x00, -0x0e,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00, -0x16,0x00,0x00,0x00,0x12,0x00,0x00,0x00,0x14,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x17,0x00,0x00,0x00, -0x16,0x00,0x00,0x00,0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x18,0x00,0x00,0x00,0x0f,0x00,0x00,0x00,0x17,0x00,0x00,0x00, -0x89,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x1e,0x00,0x00,0x00, -0x0f,0x00,0x00,0x00,0x17,0x00,0x00,0x00,0x41,0x00,0x05,0x00, -0x15,0x00,0x00,0x00,0x22,0x00,0x00,0x00,0x12,0x00,0x00,0x00, -0x21,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x22,0x00,0x00,0x00,0x86,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x24,0x00,0x00,0x00,0x18,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00, -0x28,0x00,0x00,0x00,0x12,0x00,0x00,0x00,0x27,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x29,0x00,0x00,0x00, -0x28,0x00,0x00,0x00,0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x2a,0x00,0x00,0x00,0x1e,0x00,0x00,0x00,0x29,0x00,0x00,0x00, -0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0x12,0x00,0x00,0x00,0x2d,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x2f,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x30,0x00,0x00,0x00, -0x24,0x00,0x00,0x00,0x2f,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x32,0x00,0x00,0x00,0x30,0x00,0x00,0x00, -0x2a,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00, -0x35,0x00,0x00,0x00,0x12,0x00,0x00,0x00,0x34,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x36,0x00,0x00,0x00, -0x35,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x38,0x00,0x00,0x00,0x36,0x00,0x00,0x00,0x37,0x00,0x00,0x00, -0x82,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x3a,0x00,0x00,0x00, -0x38,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x86,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x3b,0x00,0x00,0x00,0x3a,0x00,0x00,0x00, -0x37,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x0d,0x00,0x00,0x00, -0x3f,0x00,0x00,0x00,0x3d,0x00,0x00,0x00,0x3e,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x40,0x00,0x00,0x00, -0x3f,0x00,0x00,0x00,0x89,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x42,0x00,0x00,0x00,0x40,0x00,0x00,0x00,0x3b,0x00,0x00,0x00, -0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x47,0x00,0x00,0x00, -0x40,0x00,0x00,0x00,0x3b,0x00,0x00,0x00,0x41,0x00,0x05,0x00, -0x0d,0x00,0x00,0x00,0x49,0x00,0x00,0x00,0x3d,0x00,0x00,0x00, -0x39,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x4a,0x00,0x00,0x00,0x49,0x00,0x00,0x00,0x41,0x00,0x05,0x00, -0x0d,0x00,0x00,0x00,0x4d,0x00,0x00,0x00,0x4c,0x00,0x00,0x00, -0x3e,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x4e,0x00,0x00,0x00,0x4d,0x00,0x00,0x00,0x86,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x50,0x00,0x00,0x00,0x4e,0x00,0x00,0x00, -0x4f,0x00,0x00,0x00,0x89,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x55,0x00,0x00,0x00,0x50,0x00,0x00,0x00,0x54,0x00,0x00,0x00, -0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x59,0x00,0x00,0x00, -0x50,0x00,0x00,0x00,0x58,0x00,0x00,0x00,0x89,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x5d,0x00,0x00,0x00,0x4e,0x00,0x00,0x00, -0x4f,0x00,0x00,0x00,0x89,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x64,0x00,0x00,0x00,0x5d,0x00,0x00,0x00,0x63,0x00,0x00,0x00, -0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x68,0x00,0x00,0x00, -0x5d,0x00,0x00,0x00,0x67,0x00,0x00,0x00,0x89,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x6e,0x00,0x00,0x00,0x4e,0x00,0x00,0x00, -0x6d,0x00,0x00,0x00,0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x73,0x00,0x00,0x00,0x4e,0x00,0x00,0x00,0x72,0x00,0x00,0x00, -0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00,0x77,0x00,0x00,0x00, -0x12,0x00,0x00,0x00,0x76,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x78,0x00,0x00,0x00,0x77,0x00,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x79,0x00,0x00,0x00, -0x47,0x00,0x00,0x00,0x78,0x00,0x00,0x00,0x41,0x00,0x05,0x00, -0x15,0x00,0x00,0x00,0x7c,0x00,0x00,0x00,0x12,0x00,0x00,0x00, -0x7b,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x7d,0x00,0x00,0x00,0x7c,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x7f,0x00,0x00,0x00,0x47,0x00,0x00,0x00, -0x39,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x82,0x00,0x00,0x00,0x7f,0x00,0x00,0x00,0x78,0x00,0x00,0x00, -0x0c,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0x83,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x26,0x00,0x00,0x00,0x7d,0x00,0x00,0x00, -0x82,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00, -0x87,0x00,0x00,0x00,0x12,0x00,0x00,0x00,0x86,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x88,0x00,0x00,0x00, -0x87,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x89,0x00,0x00,0x00,0x32,0x00,0x00,0x00,0x88,0x00,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x8b,0x00,0x00,0x00, -0x42,0x00,0x00,0x00,0x37,0x00,0x00,0x00,0x41,0x00,0x05,0x00, -0x15,0x00,0x00,0x00,0x8d,0x00,0x00,0x00,0x12,0x00,0x00,0x00, -0x8c,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x8e,0x00,0x00,0x00,0x8d,0x00,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x8f,0x00,0x00,0x00,0x8b,0x00,0x00,0x00, -0x8e,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x90,0x00,0x00,0x00,0x89,0x00,0x00,0x00,0x8f,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x92,0x00,0x00,0x00, -0x90,0x00,0x00,0x00,0x79,0x00,0x00,0x00,0x86,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x93,0x00,0x00,0x00,0x92,0x00,0x00,0x00, -0x39,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00, -0x98,0x00,0x00,0x00,0x12,0x00,0x00,0x00,0x97,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x99,0x00,0x00,0x00, -0x98,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x9a,0x00,0x00,0x00,0x0f,0x00,0x00,0x00,0x99,0x00,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x9d,0x00,0x00,0x00, -0x4a,0x00,0x00,0x00,0x9c,0x00,0x00,0x00,0x41,0x00,0x05,0x00, -0x15,0x00,0x00,0x00,0x9f,0x00,0x00,0x00,0x12,0x00,0x00,0x00, -0x9e,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0xa0,0x00,0x00,0x00,0x9f,0x00,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xa1,0x00,0x00,0x00,0x9d,0x00,0x00,0x00, -0xa0,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xa2,0x00,0x00,0x00,0x9a,0x00,0x00,0x00,0xa1,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xa4,0x00,0x00,0x00, -0xa2,0x00,0x00,0x00,0x79,0x00,0x00,0x00,0x86,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xa5,0x00,0x00,0x00,0xa4,0x00,0x00,0x00, -0x39,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0xa7,0x00,0x00,0x00, -0xf8,0x00,0x02,0x00,0xa7,0x00,0x00,0x00,0xf5,0x00,0x07,0x00, -0x06,0x00,0x00,0x00,0x98,0x02,0x00,0x00,0x3e,0x00,0x00,0x00, -0x05,0x00,0x00,0x00,0xc6,0x00,0x00,0x00,0xa8,0x00,0x00,0x00, -0xb0,0x00,0x05,0x00,0xb7,0x00,0x00,0x00,0xb8,0x00,0x00,0x00, -0x98,0x02,0x00,0x00,0xb6,0x00,0x00,0x00,0xf6,0x00,0x04,0x00, -0xa9,0x00,0x00,0x00,0xa8,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0xfa,0x00,0x04,0x00,0xb8,0x00,0x00,0x00,0xa8,0x00,0x00,0x00, -0xa9,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0xa8,0x00,0x00,0x00, -0x41,0x00,0x05,0x00,0xc2,0x00,0x00,0x00,0xc3,0x00,0x00,0x00, -0xbf,0x00,0x00,0x00,0x98,0x02,0x00,0x00,0x3e,0x00,0x03,0x00, -0xc3,0x00,0x00,0x00,0xc1,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xc6,0x00,0x00,0x00,0x98,0x02,0x00,0x00, -0xc5,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0xa7,0x00,0x00,0x00, -0xf8,0x00,0x02,0x00,0xa9,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, -0xc9,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0xc9,0x00,0x00,0x00, -0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0xb1,0x02,0x00,0x00, -0xa5,0x00,0x00,0x00,0xa9,0x00,0x00,0x00,0x67,0x01,0x00,0x00, -0xcc,0x00,0x00,0x00,0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00, -0xad,0x02,0x00,0x00,0x93,0x00,0x00,0x00,0xa9,0x00,0x00,0x00, -0x64,0x01,0x00,0x00,0xcc,0x00,0x00,0x00,0xf5,0x00,0x07,0x00, -0x06,0x00,0x00,0x00,0x99,0x02,0x00,0x00,0x79,0x00,0x00,0x00, -0xa9,0x00,0x00,0x00,0x12,0x02,0x00,0x00,0xcc,0x00,0x00,0x00, -0xb0,0x00,0x05,0x00,0xb7,0x00,0x00,0x00,0xd0,0x00,0x00,0x00, -0x99,0x02,0x00,0x00,0x83,0x00,0x00,0x00,0xf6,0x00,0x04,0x00, -0xcb,0x00,0x00,0x00,0xcc,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0xfa,0x00,0x04,0x00,0xd0,0x00,0x00,0x00,0xca,0x00,0x00,0x00, -0xcb,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0xca,0x00,0x00,0x00, -0xf9,0x00,0x02,0x00,0xd2,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, -0xd2,0x00,0x00,0x00,0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00, -0xa9,0x02,0x00,0x00,0x3e,0x00,0x00,0x00,0xca,0x00,0x00,0x00, -0x1c,0x01,0x00,0x00,0xd5,0x00,0x00,0x00,0xb0,0x00,0x05,0x00, -0xb7,0x00,0x00,0x00,0xd8,0x00,0x00,0x00,0xa9,0x02,0x00,0x00, -0x37,0x00,0x00,0x00,0xf6,0x00,0x04,0x00,0xd4,0x00,0x00,0x00, -0xd5,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, -0xd8,0x00,0x00,0x00,0xd3,0x00,0x00,0x00,0xd4,0x00,0x00,0x00, -0xf8,0x00,0x02,0x00,0xd3,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xdc,0x00,0x00,0x00,0x8b,0x00,0x00,0x00, -0x73,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xde,0x00,0x00,0x00,0xdc,0x00,0x00,0x00,0xa9,0x02,0x00,0x00, -0xb0,0x00,0x05,0x00,0xb7,0x00,0x00,0x00,0xe1,0x00,0x00,0x00, -0xde,0x00,0x00,0x00,0x36,0x00,0x00,0x00,0xf7,0x00,0x03,0x00, -0xe3,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, -0xe1,0x00,0x00,0x00,0xe2,0x00,0x00,0x00,0xe3,0x00,0x00,0x00, -0xf8,0x00,0x02,0x00,0xe2,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xe6,0x00,0x00,0x00,0x99,0x02,0x00,0x00, -0x6e,0x00,0x00,0x00,0xb0,0x00,0x05,0x00,0xb7,0x00,0x00,0x00, -0xe8,0x00,0x00,0x00,0xe6,0x00,0x00,0x00,0x83,0x00,0x00,0x00, -0xf9,0x00,0x02,0x00,0xe3,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, -0xe3,0x00,0x00,0x00,0xf5,0x00,0x07,0x00,0xb7,0x00,0x00,0x00, -0xe9,0x00,0x00,0x00,0xe1,0x00,0x00,0x00,0xd3,0x00,0x00,0x00, -0xe8,0x00,0x00,0x00,0xe2,0x00,0x00,0x00,0xf7,0x00,0x03,0x00, -0xeb,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, -0xe9,0x00,0x00,0x00,0xea,0x00,0x00,0x00,0x0d,0x01,0x00,0x00, -0xf8,0x00,0x02,0x00,0xea,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xf3,0x00,0x00,0x00,0x73,0x00,0x00,0x00, -0xa9,0x02,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xf5,0x00,0x00,0x00,0xf3,0x00,0x00,0x00,0xf4,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xf7,0x00,0x00,0x00, -0xf5,0x00,0x00,0x00,0x6e,0x00,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x03,0x01,0x00,0x00,0xf3,0x00,0x00,0x00, -0x8e,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x04,0x01,0x00,0x00,0xad,0x02,0x00,0x00,0x03,0x01,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x06,0x01,0x00,0x00, -0x04,0x01,0x00,0x00,0x6e,0x00,0x00,0x00,0x41,0x00,0x06,0x00, -0x07,0x01,0x00,0x00,0x08,0x01,0x00,0x00,0xfc,0x00,0x00,0x00, -0x34,0x00,0x00,0x00,0x06,0x01,0x00,0x00,0x3d,0x00,0x04,0x00, -0xf8,0x00,0x00,0x00,0x09,0x01,0x00,0x00,0x08,0x01,0x00,0x00, -0x73,0x00,0x04,0x00,0xb9,0x00,0x00,0x00,0x0a,0x01,0x00,0x00, -0x09,0x01,0x00,0x00,0x41,0x00,0x05,0x00,0x0b,0x01,0x00,0x00, -0x0c,0x01,0x00,0x00,0xf0,0x00,0x00,0x00,0xf7,0x00,0x00,0x00, -0x3e,0x00,0x03,0x00,0x0c,0x01,0x00,0x00,0x0a,0x01,0x00,0x00, -0xf9,0x00,0x02,0x00,0xeb,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, -0x0d,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x10,0x01,0x00,0x00,0x73,0x00,0x00,0x00,0xa9,0x02,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x12,0x01,0x00,0x00, -0x10,0x01,0x00,0x00,0x11,0x01,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x14,0x01,0x00,0x00,0x12,0x01,0x00,0x00, -0x6e,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x0b,0x01,0x00,0x00, -0x15,0x01,0x00,0x00,0xf0,0x00,0x00,0x00,0x14,0x01,0x00,0x00, -0x3e,0x00,0x03,0x00,0x15,0x01,0x00,0x00,0xc1,0x00,0x00,0x00, -0xf9,0x00,0x02,0x00,0xeb,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, -0xeb,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0xd5,0x00,0x00,0x00, -0xf8,0x00,0x02,0x00,0xd5,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x1c,0x01,0x00,0x00,0xa9,0x02,0x00,0x00, -0x1a,0x01,0x00,0x00,0xf9,0x00,0x02,0x00,0xd2,0x00,0x00,0x00, -0xf8,0x00,0x02,0x00,0xd4,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, -0x1e,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0x1e,0x01,0x00,0x00, -0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0xaa,0x02,0x00,0x00, -0x3e,0x00,0x00,0x00,0xd4,0x00,0x00,0x00,0x60,0x01,0x00,0x00, -0x21,0x01,0x00,0x00,0xb0,0x00,0x05,0x00,0xb7,0x00,0x00,0x00, -0x24,0x01,0x00,0x00,0xaa,0x02,0x00,0x00,0x9c,0x00,0x00,0x00, -0xf6,0x00,0x04,0x00,0x20,0x01,0x00,0x00,0x21,0x01,0x00,0x00, -0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0x24,0x01,0x00,0x00, -0x1f,0x01,0x00,0x00,0x20,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, -0x1f,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x28,0x01,0x00,0x00,0x9d,0x00,0x00,0x00,0x73,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x2a,0x01,0x00,0x00, -0x28,0x01,0x00,0x00,0xaa,0x02,0x00,0x00,0x41,0x00,0x05,0x00, -0x15,0x00,0x00,0x00,0x2b,0x01,0x00,0x00,0x12,0x00,0x00,0x00, -0xc5,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x2c,0x01,0x00,0x00,0x2b,0x01,0x00,0x00,0xb0,0x00,0x05,0x00, -0xb7,0x00,0x00,0x00,0x2d,0x01,0x00,0x00,0x2a,0x01,0x00,0x00, -0x2c,0x01,0x00,0x00,0xf7,0x00,0x03,0x00,0x2f,0x01,0x00,0x00, -0x00,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0x2d,0x01,0x00,0x00, -0x2e,0x01,0x00,0x00,0x2f,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, -0x2e,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x32,0x01,0x00,0x00,0x99,0x02,0x00,0x00,0x6e,0x00,0x00,0x00, -0xb0,0x00,0x05,0x00,0xb7,0x00,0x00,0x00,0x34,0x01,0x00,0x00, -0x32,0x01,0x00,0x00,0x83,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, -0x2f,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0x2f,0x01,0x00,0x00, -0xf5,0x00,0x07,0x00,0xb7,0x00,0x00,0x00,0x35,0x01,0x00,0x00, -0x2d,0x01,0x00,0x00,0x1f,0x01,0x00,0x00,0x34,0x01,0x00,0x00, -0x2e,0x01,0x00,0x00,0xf7,0x00,0x03,0x00,0x37,0x01,0x00,0x00, -0x00,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0x35,0x01,0x00,0x00, -0x36,0x01,0x00,0x00,0x56,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, -0x36,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x3f,0x01,0x00,0x00,0x73,0x00,0x00,0x00,0xaa,0x02,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x41,0x01,0x00,0x00, -0x3f,0x01,0x00,0x00,0x40,0x01,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x43,0x01,0x00,0x00,0x41,0x01,0x00,0x00, -0x6e,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x4e,0x01,0x00,0x00,0x3f,0x01,0x00,0x00,0xa0,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x4f,0x01,0x00,0x00, -0xb1,0x02,0x00,0x00,0x4e,0x01,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x51,0x01,0x00,0x00,0x4f,0x01,0x00,0x00, -0x6e,0x00,0x00,0x00,0x41,0x00,0x06,0x00,0x07,0x01,0x00,0x00, -0x52,0x01,0x00,0x00,0x47,0x01,0x00,0x00,0x34,0x00,0x00,0x00, -0x51,0x01,0x00,0x00,0x3d,0x00,0x04,0x00,0xf8,0x00,0x00,0x00, -0x53,0x01,0x00,0x00,0x52,0x01,0x00,0x00,0x73,0x00,0x04,0x00, -0xb9,0x00,0x00,0x00,0x54,0x01,0x00,0x00,0x53,0x01,0x00,0x00, -0x41,0x00,0x05,0x00,0x0b,0x01,0x00,0x00,0x55,0x01,0x00,0x00, -0x3c,0x01,0x00,0x00,0x43,0x01,0x00,0x00,0x3e,0x00,0x03,0x00, -0x55,0x01,0x00,0x00,0x54,0x01,0x00,0x00,0xf9,0x00,0x02,0x00, -0x37,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0x56,0x01,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x59,0x01,0x00,0x00, -0x73,0x00,0x00,0x00,0xaa,0x02,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x5b,0x01,0x00,0x00,0x59,0x01,0x00,0x00, -0x5a,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x5d,0x01,0x00,0x00,0x5b,0x01,0x00,0x00,0x6e,0x00,0x00,0x00, -0x41,0x00,0x05,0x00,0x0b,0x01,0x00,0x00,0x5e,0x01,0x00,0x00, -0x3c,0x01,0x00,0x00,0x5d,0x01,0x00,0x00,0x3e,0x00,0x03,0x00, -0x5e,0x01,0x00,0x00,0xc1,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, -0x37,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0x37,0x01,0x00,0x00, -0xf9,0x00,0x02,0x00,0x21,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, -0x21,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x60,0x01,0x00,0x00,0xaa,0x02,0x00,0x00,0x1a,0x01,0x00,0x00, -0xf9,0x00,0x02,0x00,0x1e,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, -0x20,0x01,0x00,0x00,0xe0,0x00,0x04,0x00,0x0c,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x61,0x01,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x64,0x01,0x00,0x00,0xad,0x02,0x00,0x00, -0x62,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x67,0x01,0x00,0x00,0xb1,0x02,0x00,0x00,0x65,0x01,0x00,0x00, -0xf9,0x00,0x02,0x00,0x69,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, -0x69,0x01,0x00,0x00,0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00, -0xb3,0x02,0x00,0x00,0x3e,0x00,0x00,0x00,0x20,0x01,0x00,0x00, -0x10,0x02,0x00,0x00,0x6c,0x01,0x00,0x00,0xb0,0x00,0x05,0x00, -0xb7,0x00,0x00,0x00,0x6f,0x01,0x00,0x00,0xb3,0x02,0x00,0x00, -0x6c,0x00,0x00,0x00,0xf6,0x00,0x04,0x00,0x6b,0x01,0x00,0x00, -0x6c,0x01,0x00,0x00,0x00,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, -0x6f,0x01,0x00,0x00,0x6a,0x01,0x00,0x00,0x6b,0x01,0x00,0x00, -0xf8,0x00,0x02,0x00,0x6a,0x01,0x00,0x00,0xf9,0x00,0x02,0x00, -0x71,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0x71,0x01,0x00,0x00, -0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0xb7,0x02,0x00,0x00, -0x3e,0x00,0x00,0x00,0x6a,0x01,0x00,0x00,0x9c,0x01,0x00,0x00, -0x74,0x01,0x00,0x00,0xb0,0x00,0x05,0x00,0xb7,0x00,0x00,0x00, -0x77,0x01,0x00,0x00,0xb7,0x02,0x00,0x00,0x60,0x00,0x00,0x00, -0xf6,0x00,0x04,0x00,0x73,0x01,0x00,0x00,0x74,0x01,0x00,0x00, -0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0x77,0x01,0x00,0x00, -0x72,0x01,0x00,0x00,0x73,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, -0x72,0x01,0x00,0x00,0xf9,0x00,0x02,0x00,0x79,0x01,0x00,0x00, -0xf8,0x00,0x02,0x00,0x79,0x01,0x00,0x00,0xf5,0x00,0x07,0x00, -0x06,0x00,0x00,0x00,0xc9,0x02,0x00,0x00,0x3e,0x00,0x00,0x00, -0x72,0x01,0x00,0x00,0x9a,0x01,0x00,0x00,0x7a,0x01,0x00,0x00, -0xb0,0x00,0x05,0x00,0xb7,0x00,0x00,0x00,0x7f,0x01,0x00,0x00, -0xc9,0x02,0x00,0x00,0x62,0x00,0x00,0x00,0xf6,0x00,0x04,0x00, -0x7b,0x01,0x00,0x00,0x7a,0x01,0x00,0x00,0x01,0x00,0x00,0x00, -0xfa,0x00,0x04,0x00,0x7f,0x01,0x00,0x00,0x7a,0x01,0x00,0x00, -0x7b,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0x7a,0x01,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x85,0x01,0x00,0x00, -0xb7,0x02,0x00,0x00,0x62,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x87,0x01,0x00,0x00,0x85,0x01,0x00,0x00, -0xc9,0x02,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x89,0x01,0x00,0x00,0x55,0x00,0x00,0x00,0x53,0x00,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x8b,0x01,0x00,0x00, -0xb7,0x02,0x00,0x00,0x61,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x8c,0x01,0x00,0x00,0x89,0x01,0x00,0x00, -0x8b,0x01,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x8e,0x01,0x00,0x00,0x64,0x00,0x00,0x00,0x62,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x8f,0x01,0x00,0x00, -0x8c,0x01,0x00,0x00,0x8e,0x01,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x91,0x01,0x00,0x00,0x8f,0x01,0x00,0x00, -0xc9,0x02,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x93,0x01,0x00,0x00,0x91,0x01,0x00,0x00,0x92,0x01,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x95,0x01,0x00,0x00, -0x93,0x01,0x00,0x00,0xb3,0x02,0x00,0x00,0x41,0x00,0x05,0x00, -0x0b,0x01,0x00,0x00,0x96,0x01,0x00,0x00,0xf0,0x00,0x00,0x00, -0x95,0x01,0x00,0x00,0x3d,0x00,0x04,0x00,0xb9,0x00,0x00,0x00, -0x97,0x01,0x00,0x00,0x96,0x01,0x00,0x00,0x41,0x00,0x05,0x00, -0xc2,0x00,0x00,0x00,0x98,0x01,0x00,0x00,0x83,0x01,0x00,0x00, -0x87,0x01,0x00,0x00,0x3e,0x00,0x03,0x00,0x98,0x01,0x00,0x00, -0x97,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x9a,0x01,0x00,0x00,0xc9,0x02,0x00,0x00,0xc5,0x00,0x00,0x00, -0xf9,0x00,0x02,0x00,0x79,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, -0x7b,0x01,0x00,0x00,0xf9,0x00,0x02,0x00,0x74,0x01,0x00,0x00, -0xf8,0x00,0x02,0x00,0x74,0x01,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x9c,0x01,0x00,0x00,0xb7,0x02,0x00,0x00, -0xc5,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0x71,0x01,0x00,0x00, -0xf8,0x00,0x02,0x00,0x73,0x01,0x00,0x00,0xf9,0x00,0x02,0x00, -0x9e,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0x9e,0x01,0x00,0x00, -0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0xb8,0x02,0x00,0x00, -0x3e,0x00,0x00,0x00,0x73,0x01,0x00,0x00,0xca,0x01,0x00,0x00, -0xa1,0x01,0x00,0x00,0xb0,0x00,0x05,0x00,0xb7,0x00,0x00,0x00, -0xa4,0x01,0x00,0x00,0xb8,0x02,0x00,0x00,0xb4,0x00,0x00,0x00, -0xf6,0x00,0x04,0x00,0xa0,0x01,0x00,0x00,0xa1,0x01,0x00,0x00, -0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0xa4,0x01,0x00,0x00, -0x9f,0x01,0x00,0x00,0xa0,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, -0x9f,0x01,0x00,0x00,0xf9,0x00,0x02,0x00,0xa6,0x01,0x00,0x00, -0xf8,0x00,0x02,0x00,0xa6,0x01,0x00,0x00,0xf5,0x00,0x07,0x00, -0x06,0x00,0x00,0x00,0xc6,0x02,0x00,0x00,0x3e,0x00,0x00,0x00, -0x9f,0x01,0x00,0x00,0xc8,0x01,0x00,0x00,0xa7,0x01,0x00,0x00, -0xb0,0x00,0x05,0x00,0xb7,0x00,0x00,0x00,0xac,0x01,0x00,0x00, -0xc6,0x02,0x00,0x00,0xb1,0x00,0x00,0x00,0xf6,0x00,0x04,0x00, -0xa8,0x01,0x00,0x00,0xa7,0x01,0x00,0x00,0x01,0x00,0x00,0x00, -0xfa,0x00,0x04,0x00,0xac,0x01,0x00,0x00,0xa7,0x01,0x00,0x00, -0xa8,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0xa7,0x01,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xb2,0x01,0x00,0x00, -0xb8,0x02,0x00,0x00,0xb1,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xb4,0x01,0x00,0x00,0xb2,0x01,0x00,0x00, -0xc6,0x02,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xb6,0x01,0x00,0x00,0x59,0x00,0x00,0x00,0xae,0x00,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xb9,0x01,0x00,0x00, -0xb8,0x02,0x00,0x00,0xb8,0x01,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xba,0x01,0x00,0x00,0xb6,0x01,0x00,0x00, -0xb9,0x01,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xbc,0x01,0x00,0x00,0x68,0x00,0x00,0x00,0xb1,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xbd,0x01,0x00,0x00, -0xba,0x01,0x00,0x00,0xbc,0x01,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xbf,0x01,0x00,0x00,0xbd,0x01,0x00,0x00, -0xc6,0x02,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xc1,0x01,0x00,0x00,0xbf,0x01,0x00,0x00,0xc0,0x01,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xc3,0x01,0x00,0x00, -0xc1,0x01,0x00,0x00,0xb3,0x02,0x00,0x00,0x41,0x00,0x05,0x00, -0x0b,0x01,0x00,0x00,0xc4,0x01,0x00,0x00,0x3c,0x01,0x00,0x00, -0xc3,0x01,0x00,0x00,0x3d,0x00,0x04,0x00,0xb9,0x00,0x00,0x00, -0xc5,0x01,0x00,0x00,0xc4,0x01,0x00,0x00,0x41,0x00,0x05,0x00, -0xc2,0x00,0x00,0x00,0xc6,0x01,0x00,0x00,0xb0,0x01,0x00,0x00, -0xb4,0x01,0x00,0x00,0x3e,0x00,0x03,0x00,0xc6,0x01,0x00,0x00, -0xc5,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xc8,0x01,0x00,0x00,0xc6,0x02,0x00,0x00,0xc5,0x00,0x00,0x00, -0xf9,0x00,0x02,0x00,0xa6,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, -0xa8,0x01,0x00,0x00,0xf9,0x00,0x02,0x00,0xa1,0x01,0x00,0x00, -0xf8,0x00,0x02,0x00,0xa1,0x01,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xca,0x01,0x00,0x00,0xb8,0x02,0x00,0x00, -0xc5,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0x9e,0x01,0x00,0x00, -0xf8,0x00,0x02,0x00,0xa0,0x01,0x00,0x00,0xf9,0x00,0x02,0x00, -0xcc,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0xcc,0x01,0x00,0x00, -0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0xb9,0x02,0x00,0x00, -0x3e,0x00,0x00,0x00,0xa0,0x01,0x00,0x00,0x0e,0x02,0x00,0x00, -0xcf,0x01,0x00,0x00,0xb0,0x00,0x05,0x00,0xb7,0x00,0x00,0x00, -0xd2,0x01,0x00,0x00,0xb9,0x02,0x00,0x00,0xb4,0x00,0x00,0x00, -0xf6,0x00,0x04,0x00,0xce,0x01,0x00,0x00,0xcf,0x01,0x00,0x00, -0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0xd2,0x01,0x00,0x00, -0xcd,0x01,0x00,0x00,0xce,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, -0xcd,0x01,0x00,0x00,0xf9,0x00,0x02,0x00,0xd4,0x01,0x00,0x00, -0xf8,0x00,0x02,0x00,0xd4,0x01,0x00,0x00,0xf5,0x00,0x07,0x00, -0x06,0x00,0x00,0x00,0xbd,0x02,0x00,0x00,0x3e,0x00,0x00,0x00, -0xcd,0x01,0x00,0x00,0x0c,0x02,0x00,0x00,0xd7,0x01,0x00,0x00, -0xb0,0x00,0x05,0x00,0xb7,0x00,0x00,0x00,0xda,0x01,0x00,0x00, -0xbd,0x02,0x00,0x00,0x60,0x00,0x00,0x00,0xf6,0x00,0x04,0x00, -0xd6,0x01,0x00,0x00,0xd7,0x01,0x00,0x00,0x01,0x00,0x00,0x00, -0xfa,0x00,0x04,0x00,0xda,0x01,0x00,0x00,0xd5,0x01,0x00,0x00, -0xd6,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0xd5,0x01,0x00,0x00, -0xf9,0x00,0x02,0x00,0xdc,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, -0xdc,0x01,0x00,0x00,0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00, -0xbf,0x02,0x00,0x00,0x3e,0x00,0x00,0x00,0xd5,0x01,0x00,0x00, -0x0a,0x02,0x00,0x00,0xdf,0x01,0x00,0x00,0xb0,0x00,0x05,0x00, -0xb7,0x00,0x00,0x00,0xe2,0x01,0x00,0x00,0xbf,0x02,0x00,0x00, -0xb1,0x00,0x00,0x00,0xf6,0x00,0x04,0x00,0xde,0x01,0x00,0x00, -0xdf,0x01,0x00,0x00,0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, -0xe2,0x01,0x00,0x00,0xdd,0x01,0x00,0x00,0xde,0x01,0x00,0x00, -0xf8,0x00,0x02,0x00,0xdd,0x01,0x00,0x00,0xf9,0x00,0x02,0x00, -0xe4,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0xe4,0x01,0x00,0x00, -0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0xc1,0x02,0x00,0x00, -0x3e,0x00,0x00,0x00,0xdd,0x01,0x00,0x00,0x08,0x02,0x00,0x00, -0xe5,0x01,0x00,0x00,0xb0,0x00,0x05,0x00,0xb7,0x00,0x00,0x00, -0xea,0x01,0x00,0x00,0xc1,0x02,0x00,0x00,0x62,0x00,0x00,0x00, -0xf6,0x00,0x04,0x00,0xe6,0x01,0x00,0x00,0xe5,0x01,0x00,0x00, -0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0xea,0x01,0x00,0x00, -0xe5,0x01,0x00,0x00,0xe6,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, -0xe5,0x01,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xec,0x01,0x00,0x00,0xb9,0x02,0x00,0x00,0xb1,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xee,0x01,0x00,0x00, -0xec,0x01,0x00,0x00,0xbf,0x02,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xf0,0x01,0x00,0x00,0xee,0x01,0x00,0x00, -0xef,0x01,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xf2,0x01,0x00,0x00,0xbd,0x02,0x00,0x00,0x62,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xf3,0x01,0x00,0x00, -0xf0,0x01,0x00,0x00,0xf2,0x01,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xf5,0x01,0x00,0x00,0xf3,0x01,0x00,0x00, -0xc1,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xf9,0x01,0x00,0x00,0xf2,0x01,0x00,0x00,0xc1,0x02,0x00,0x00, -0x41,0x00,0x05,0x00,0xc2,0x00,0x00,0x00,0xfa,0x01,0x00,0x00, -0x83,0x01,0x00,0x00,0xf9,0x01,0x00,0x00,0x3d,0x00,0x04,0x00, -0xb9,0x00,0x00,0x00,0xfb,0x01,0x00,0x00,0xfa,0x01,0x00,0x00, -0x41,0x00,0x05,0x00,0xc2,0x00,0x00,0x00,0x00,0x02,0x00,0x00, -0xb0,0x01,0x00,0x00,0xee,0x01,0x00,0x00,0x3d,0x00,0x04,0x00, -0xb9,0x00,0x00,0x00,0x01,0x02,0x00,0x00,0x00,0x02,0x00,0x00, -0x41,0x00,0x05,0x00,0xc2,0x00,0x00,0x00,0x03,0x02,0x00,0x00, -0xbf,0x00,0x00,0x00,0xf5,0x01,0x00,0x00,0x3d,0x00,0x04,0x00, -0xb9,0x00,0x00,0x00,0x04,0x02,0x00,0x00,0x03,0x02,0x00,0x00, -0x0c,0x00,0x08,0x00,0xb9,0x00,0x00,0x00,0x05,0x02,0x00,0x00, -0x01,0x00,0x00,0x00,0x32,0x00,0x00,0x00,0xfb,0x01,0x00,0x00, -0x01,0x02,0x00,0x00,0x04,0x02,0x00,0x00,0x3e,0x00,0x03,0x00, -0x03,0x02,0x00,0x00,0x05,0x02,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x08,0x02,0x00,0x00,0xc1,0x02,0x00,0x00, -0xc5,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0xe4,0x01,0x00,0x00, -0xf8,0x00,0x02,0x00,0xe6,0x01,0x00,0x00,0xf9,0x00,0x02,0x00, -0xdf,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0xdf,0x01,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x0a,0x02,0x00,0x00, -0xbf,0x02,0x00,0x00,0xc5,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, -0xdc,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0xde,0x01,0x00,0x00, -0xf9,0x00,0x02,0x00,0xd7,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, -0xd7,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x0c,0x02,0x00,0x00,0xbd,0x02,0x00,0x00,0xc5,0x00,0x00,0x00, -0xf9,0x00,0x02,0x00,0xd4,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, -0xd6,0x01,0x00,0x00,0xf9,0x00,0x02,0x00,0xcf,0x01,0x00,0x00, -0xf8,0x00,0x02,0x00,0xcf,0x01,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x0e,0x02,0x00,0x00,0xb9,0x02,0x00,0x00, -0xc5,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0xcc,0x01,0x00,0x00, -0xf8,0x00,0x02,0x00,0xce,0x01,0x00,0x00,0xf9,0x00,0x02,0x00, -0x6c,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0x6c,0x01,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x10,0x02,0x00,0x00, -0xb3,0x02,0x00,0x00,0xc5,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, -0x69,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0x6b,0x01,0x00,0x00, -0xe0,0x00,0x04,0x00,0x0c,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0x61,0x01,0x00,0x00,0xf9,0x00,0x02,0x00,0xcc,0x00,0x00,0x00, -0xf8,0x00,0x02,0x00,0xcc,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x12,0x02,0x00,0x00,0x99,0x02,0x00,0x00, -0x6c,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0xc9,0x00,0x00,0x00, -0xf8,0x00,0x02,0x00,0xcb,0x00,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x17,0x02,0x00,0x00,0x55,0x00,0x00,0x00, -0x53,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x18,0x02,0x00,0x00,0x8b,0x00,0x00,0x00,0x17,0x02,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x1d,0x02,0x00,0x00, -0x59,0x00,0x00,0x00,0xae,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x1e,0x02,0x00,0x00,0x9d,0x00,0x00,0x00, -0x1d,0x02,0x00,0x00,0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00, -0x23,0x02,0x00,0x00,0x12,0x00,0x00,0x00,0x22,0x02,0x00,0x00, -0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x24,0x02,0x00,0x00, -0x23,0x02,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x25,0x02,0x00,0x00,0x0f,0x00,0x00,0x00,0x24,0x02,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x29,0x02,0x00,0x00, -0x47,0x00,0x00,0x00,0x24,0x02,0x00,0x00,0x41,0x00,0x05,0x00, -0x0d,0x00,0x00,0x00,0x2b,0x02,0x00,0x00,0x2a,0x02,0x00,0x00, -0x0c,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x2c,0x02,0x00,0x00,0x2b,0x02,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x2d,0x02,0x00,0x00,0x29,0x02,0x00,0x00, -0x2c,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x2e,0x02,0x00,0x00,0x25,0x02,0x00,0x00,0x2d,0x02,0x00,0x00, -0xf9,0x00,0x02,0x00,0x30,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, -0x30,0x02,0x00,0x00,0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00, -0x9a,0x02,0x00,0x00,0x3e,0x00,0x00,0x00,0xcb,0x00,0x00,0x00, -0x97,0x02,0x00,0x00,0x33,0x02,0x00,0x00,0xb0,0x00,0x05,0x00, -0xb7,0x00,0x00,0x00,0x36,0x02,0x00,0x00,0x9a,0x02,0x00,0x00, -0xb4,0x00,0x00,0x00,0xf6,0x00,0x04,0x00,0x32,0x02,0x00,0x00, -0x33,0x02,0x00,0x00,0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, -0x36,0x02,0x00,0x00,0x31,0x02,0x00,0x00,0x32,0x02,0x00,0x00, -0xf8,0x00,0x02,0x00,0x31,0x02,0x00,0x00,0xf9,0x00,0x02,0x00, -0x38,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x38,0x02,0x00,0x00, -0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0x9b,0x02,0x00,0x00, -0x3e,0x00,0x00,0x00,0x31,0x02,0x00,0x00,0x95,0x02,0x00,0x00, -0x3b,0x02,0x00,0x00,0xb0,0x00,0x05,0x00,0xb7,0x00,0x00,0x00, -0x3e,0x02,0x00,0x00,0x9b,0x02,0x00,0x00,0x60,0x00,0x00,0x00, -0xf6,0x00,0x04,0x00,0x3a,0x02,0x00,0x00,0x3b,0x02,0x00,0x00, -0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0x3e,0x02,0x00,0x00, -0x39,0x02,0x00,0x00,0x3a,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, -0x39,0x02,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x42,0x02,0x00,0x00,0x9b,0x02,0x00,0x00,0x61,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x43,0x02,0x00,0x00, -0x18,0x02,0x00,0x00,0x42,0x02,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x45,0x02,0x00,0x00,0x64,0x00,0x00,0x00, -0x62,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x46,0x02,0x00,0x00,0x43,0x02,0x00,0x00,0x45,0x02,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x4a,0x02,0x00,0x00, -0x9a,0x02,0x00,0x00,0xb8,0x01,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x4b,0x02,0x00,0x00,0x1e,0x02,0x00,0x00, -0x4a,0x02,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x4d,0x02,0x00,0x00,0x68,0x00,0x00,0x00,0xb1,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x4e,0x02,0x00,0x00, -0x4b,0x02,0x00,0x00,0x4d,0x02,0x00,0x00,0xf9,0x00,0x02,0x00, -0x50,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x50,0x02,0x00,0x00, -0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0x9d,0x02,0x00,0x00, -0x3e,0x00,0x00,0x00,0x39,0x02,0x00,0x00,0x93,0x02,0x00,0x00, -0x53,0x02,0x00,0x00,0xb0,0x00,0x05,0x00,0xb7,0x00,0x00,0x00, -0x56,0x02,0x00,0x00,0x9d,0x02,0x00,0x00,0xb1,0x00,0x00,0x00, -0xf6,0x00,0x04,0x00,0x52,0x02,0x00,0x00,0x53,0x02,0x00,0x00, -0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0x56,0x02,0x00,0x00, -0x51,0x02,0x00,0x00,0x52,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, -0x51,0x02,0x00,0x00,0xf9,0x00,0x02,0x00,0x58,0x02,0x00,0x00, -0xf8,0x00,0x02,0x00,0x58,0x02,0x00,0x00,0xf5,0x00,0x07,0x00, -0x06,0x00,0x00,0x00,0x9f,0x02,0x00,0x00,0x3e,0x00,0x00,0x00, -0x51,0x02,0x00,0x00,0x91,0x02,0x00,0x00,0x5b,0x02,0x00,0x00, -0xb0,0x00,0x05,0x00,0xb7,0x00,0x00,0x00,0x5e,0x02,0x00,0x00, -0x9f,0x02,0x00,0x00,0x62,0x00,0x00,0x00,0xf6,0x00,0x04,0x00, -0x5a,0x02,0x00,0x00,0x5b,0x02,0x00,0x00,0x01,0x00,0x00,0x00, -0xfa,0x00,0x04,0x00,0x5e,0x02,0x00,0x00,0x59,0x02,0x00,0x00, -0x5a,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x59,0x02,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x61,0x02,0x00,0x00, -0x46,0x02,0x00,0x00,0x9f,0x02,0x00,0x00,0xb0,0x00,0x05,0x00, -0xb7,0x00,0x00,0x00,0x64,0x02,0x00,0x00,0x61,0x02,0x00,0x00, -0x36,0x00,0x00,0x00,0xf7,0x00,0x03,0x00,0x66,0x02,0x00,0x00, -0x00,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0x64,0x02,0x00,0x00, -0x65,0x02,0x00,0x00,0x66,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, -0x65,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x69,0x02,0x00,0x00,0x4e,0x02,0x00,0x00,0x9d,0x02,0x00,0x00, -0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00,0x6a,0x02,0x00,0x00, -0x12,0x00,0x00,0x00,0xc5,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x6b,0x02,0x00,0x00,0x6a,0x02,0x00,0x00, -0xb0,0x00,0x05,0x00,0xb7,0x00,0x00,0x00,0x6c,0x02,0x00,0x00, -0x69,0x02,0x00,0x00,0x6b,0x02,0x00,0x00,0xf9,0x00,0x02,0x00, -0x66,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x66,0x02,0x00,0x00, -0xf5,0x00,0x07,0x00,0xb7,0x00,0x00,0x00,0x6d,0x02,0x00,0x00, -0x64,0x02,0x00,0x00,0x59,0x02,0x00,0x00,0x6c,0x02,0x00,0x00, -0x65,0x02,0x00,0x00,0xf7,0x00,0x03,0x00,0x6f,0x02,0x00,0x00, -0x00,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0x6d,0x02,0x00,0x00, -0x6e,0x02,0x00,0x00,0x6f,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, -0x6e,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x77,0x02,0x00,0x00,0x4e,0x02,0x00,0x00,0x9d,0x02,0x00,0x00, -0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00,0x79,0x02,0x00,0x00, -0x12,0x00,0x00,0x00,0x78,0x02,0x00,0x00,0x3d,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x7a,0x02,0x00,0x00,0x79,0x02,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x7b,0x02,0x00,0x00, -0x77,0x02,0x00,0x00,0x7a,0x02,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x7c,0x02,0x00,0x00,0x2e,0x02,0x00,0x00, -0x7b,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x7e,0x02,0x00,0x00,0x7c,0x02,0x00,0x00,0x46,0x02,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x80,0x02,0x00,0x00, -0x7e,0x02,0x00,0x00,0x9f,0x02,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x82,0x02,0x00,0x00,0x9a,0x02,0x00,0x00, -0xb1,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x84,0x02,0x00,0x00,0x82,0x02,0x00,0x00,0x9d,0x02,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x86,0x02,0x00,0x00, -0x84,0x02,0x00,0x00,0x85,0x02,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x88,0x02,0x00,0x00,0x9b,0x02,0x00,0x00, -0x62,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x89,0x02,0x00,0x00,0x86,0x02,0x00,0x00,0x88,0x02,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x8b,0x02,0x00,0x00, -0x89,0x02,0x00,0x00,0x9f,0x02,0x00,0x00,0x41,0x00,0x05,0x00, -0xc2,0x00,0x00,0x00,0x8c,0x02,0x00,0x00,0xbf,0x00,0x00,0x00, -0x8b,0x02,0x00,0x00,0x3d,0x00,0x04,0x00,0xb9,0x00,0x00,0x00, -0x8d,0x02,0x00,0x00,0x8c,0x02,0x00,0x00,0x41,0x00,0x06,0x00, -0x8e,0x02,0x00,0x00,0x8f,0x02,0x00,0x00,0x73,0x02,0x00,0x00, -0x34,0x00,0x00,0x00,0x80,0x02,0x00,0x00,0x3e,0x00,0x03,0x00, -0x8f,0x02,0x00,0x00,0x8d,0x02,0x00,0x00,0xf9,0x00,0x02,0x00, -0x6f,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x6f,0x02,0x00,0x00, -0xf9,0x00,0x02,0x00,0x5b,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, -0x5b,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x91,0x02,0x00,0x00,0x9f,0x02,0x00,0x00,0xc5,0x00,0x00,0x00, -0xf9,0x00,0x02,0x00,0x58,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, -0x5a,0x02,0x00,0x00,0xf9,0x00,0x02,0x00,0x53,0x02,0x00,0x00, -0xf8,0x00,0x02,0x00,0x53,0x02,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x93,0x02,0x00,0x00,0x9d,0x02,0x00,0x00, -0xc5,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0x50,0x02,0x00,0x00, -0xf8,0x00,0x02,0x00,0x52,0x02,0x00,0x00,0xf9,0x00,0x02,0x00, -0x3b,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x3b,0x02,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x95,0x02,0x00,0x00, -0x9b,0x02,0x00,0x00,0xc5,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, -0x38,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x3a,0x02,0x00,0x00, -0xf9,0x00,0x02,0x00,0x33,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, -0x33,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x97,0x02,0x00,0x00,0x9a,0x02,0x00,0x00,0xc5,0x00,0x00,0x00, -0xf9,0x00,0x02,0x00,0x30,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, -0x32,0x02,0x00,0x00,0xfd,0x00,0x01,0x00,0x38,0x00,0x01,0x00, - -}; -const uint64_t matmul_f16_m_fp32_len = 10116; - -unsigned char matmul_f16_s_data[] = { -0x03,0x02,0x23,0x07,0x00,0x05,0x01,0x00,0x0b,0x00,0x0d,0x00, -0xcc,0x02,0x00,0x00,0x00,0x00,0x00,0x00,0x11,0x00,0x02,0x00, -0x01,0x00,0x00,0x00,0x11,0x00,0x02,0x00,0x09,0x00,0x00,0x00, -0x11,0x00,0x02,0x00,0x51,0x11,0x00,0x00,0x0b,0x00,0x06,0x00, -0x01,0x00,0x00,0x00,0x47,0x4c,0x53,0x4c,0x2e,0x73,0x74,0x64, -0x2e,0x34,0x35,0x30,0x00,0x00,0x00,0x00,0x0e,0x00,0x03,0x00, -0x00,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x0f,0x00,0x0f,0x00, -0x05,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x6d,0x61,0x69,0x6e, -0x00,0x00,0x00,0x00,0x0b,0x00,0x00,0x00,0x12,0x00,0x00,0x00, -0x3d,0x00,0x00,0x00,0x4c,0x00,0x00,0x00,0xf1,0x00,0x00,0x00, -0xfc,0x00,0x00,0x00,0x3c,0x01,0x00,0x00,0x47,0x01,0x00,0x00, -0x2c,0x02,0x00,0x00,0x75,0x02,0x00,0x00,0x10,0x00,0x06,0x00, -0x04,0x00,0x00,0x00,0x11,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x0b,0x00,0x00,0x00,0x0b,0x00,0x00,0x00,0x1c,0x00,0x00,0x00, -0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0x10,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x04,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00, -0x02,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x08,0x00,0x00,0x00, -0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00,0x03,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0x10,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x10,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00, -0x05,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x14,0x00,0x00,0x00, -0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x18,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0x10,0x00,0x00,0x00,0x07,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x1c,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00, -0x08,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x20,0x00,0x00,0x00, -0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00,0x09,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x24,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0x10,0x00,0x00,0x00,0x0a,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x28,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00, -0x0b,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x2c,0x00,0x00,0x00, -0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x30,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0x10,0x00,0x00,0x00,0x0d,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x34,0x00,0x00,0x00,0x47,0x00,0x03,0x00,0x10,0x00,0x00,0x00, -0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x37,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x3d,0x00,0x00,0x00,0x0b,0x00,0x00,0x00,0x1a,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x4c,0x00,0x00,0x00,0x0b,0x00,0x00,0x00, -0x1b,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x4f,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x09,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x53,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x04,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x60,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x62,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x07,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x6c,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x03,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x9c,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0xae,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x05,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0xb1,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x08,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0xf9,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x02,0x00,0x00,0x00,0x48,0x00,0x04,0x00,0xfa,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x18,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0xfa,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x47,0x00,0x03,0x00,0xfa,0x00,0x00,0x00, -0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0xfc,0x00,0x00,0x00, -0x22,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0xfc,0x00,0x00,0x00,0x21,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x16,0x01,0x00,0x00,0x01,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x17,0x01,0x00,0x00, -0x0b,0x00,0x00,0x00,0x19,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x44,0x01,0x00,0x00,0x06,0x00,0x00,0x00,0x02,0x00,0x00,0x00, -0x48,0x00,0x04,0x00,0x45,0x01,0x00,0x00,0x00,0x00,0x00,0x00, -0x18,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x45,0x01,0x00,0x00, -0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x47,0x00,0x03,0x00,0x45,0x01,0x00,0x00,0x02,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x47,0x01,0x00,0x00,0x22,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x47,0x01,0x00,0x00, -0x21,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x2c,0x02,0x00,0x00,0x0b,0x00,0x00,0x00,0x18,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x72,0x02,0x00,0x00,0x06,0x00,0x00,0x00, -0x04,0x00,0x00,0x00,0x48,0x00,0x04,0x00,0x73,0x02,0x00,0x00, -0x00,0x00,0x00,0x00,0x19,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0x73,0x02,0x00,0x00,0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x47,0x00,0x03,0x00,0x73,0x02,0x00,0x00, -0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x75,0x02,0x00,0x00, -0x22,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x75,0x02,0x00,0x00,0x21,0x00,0x00,0x00,0x02,0x00,0x00,0x00, -0x13,0x00,0x02,0x00,0x02,0x00,0x00,0x00,0x21,0x00,0x03,0x00, -0x03,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x15,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x20,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x17,0x00,0x04,0x00,0x09,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x03,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x0a,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x09,0x00,0x00,0x00,0x3b,0x00,0x04,0x00, -0x0a,0x00,0x00,0x00,0x0b,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0x02,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x0d,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x1e,0x00,0x10,0x00, -0x10,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x20,0x00,0x04,0x00,0x11,0x00,0x00,0x00,0x09,0x00,0x00,0x00, -0x10,0x00,0x00,0x00,0x3b,0x00,0x04,0x00,0x11,0x00,0x00,0x00, -0x12,0x00,0x00,0x00,0x09,0x00,0x00,0x00,0x15,0x00,0x04,0x00, -0x13,0x00,0x00,0x00,0x20,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00,0x14,0x00,0x00,0x00, -0x08,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x15,0x00,0x00,0x00, -0x09,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x13,0x00,0x00,0x00,0x21,0x00,0x00,0x00,0x0a,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00,0x27,0x00,0x00,0x00, -0x09,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00, -0x2d,0x00,0x00,0x00,0x07,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x13,0x00,0x00,0x00,0x34,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x32,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x37,0x00,0x00,0x00, -0x40,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x39,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x3b,0x00,0x04,0x00, -0x0a,0x00,0x00,0x00,0x3d,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x3e,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x3b,0x00,0x04,0x00,0x0a,0x00,0x00,0x00, -0x4c,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x32,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x4f,0x00,0x00,0x00,0x20,0x00,0x00,0x00, -0x32,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x53,0x00,0x00,0x00, -0x20,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, -0x54,0x00,0x00,0x00,0x86,0x00,0x00,0x00,0x37,0x00,0x00,0x00, -0x53,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, -0x58,0x00,0x00,0x00,0x86,0x00,0x00,0x00,0x37,0x00,0x00,0x00, -0x53,0x00,0x00,0x00,0x32,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x60,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0x61,0x00,0x00,0x00,0x86,0x00,0x00,0x00, -0x53,0x00,0x00,0x00,0x60,0x00,0x00,0x00,0x32,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x62,0x00,0x00,0x00,0x04,0x00,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x63,0x00,0x00,0x00, -0x86,0x00,0x00,0x00,0x61,0x00,0x00,0x00,0x62,0x00,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x67,0x00,0x00,0x00, -0x86,0x00,0x00,0x00,0x61,0x00,0x00,0x00,0x62,0x00,0x00,0x00, -0x32,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x6c,0x00,0x00,0x00, -0x10,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, -0x6d,0x00,0x00,0x00,0x86,0x00,0x00,0x00,0x6c,0x00,0x00,0x00, -0x39,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, -0x72,0x00,0x00,0x00,0x86,0x00,0x00,0x00,0x6c,0x00,0x00,0x00, -0x39,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00, -0x76,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x13,0x00,0x00,0x00,0x7b,0x00,0x00,0x00,0x02,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00,0x86,0x00,0x00,0x00, -0x0b,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00, -0x8c,0x00,0x00,0x00,0x03,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x13,0x00,0x00,0x00,0x97,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0x32,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x9c,0x00,0x00,0x00, -0x40,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00, -0x9e,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0xad,0x00,0x00,0x00,0x84,0x00,0x00,0x00, -0x60,0x00,0x00,0x00,0x62,0x00,0x00,0x00,0x32,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0xae,0x00,0x00,0x00,0x20,0x00,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0xaf,0x00,0x00,0x00, -0x84,0x00,0x00,0x00,0x53,0x00,0x00,0x00,0xae,0x00,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0xb0,0x00,0x00,0x00, -0x84,0x00,0x00,0x00,0x4f,0x00,0x00,0x00,0x62,0x00,0x00,0x00, -0x32,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0xb1,0x00,0x00,0x00, -0x02,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, -0xb2,0x00,0x00,0x00,0x84,0x00,0x00,0x00,0xb0,0x00,0x00,0x00, -0xb1,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, -0xb3,0x00,0x00,0x00,0x84,0x00,0x00,0x00,0xb2,0x00,0x00,0x00, -0x60,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, -0xb4,0x00,0x00,0x00,0x86,0x00,0x00,0x00,0xaf,0x00,0x00,0x00, -0xb3,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, -0xb5,0x00,0x00,0x00,0x84,0x00,0x00,0x00,0xad,0x00,0x00,0x00, -0xb4,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, -0xb6,0x00,0x00,0x00,0x84,0x00,0x00,0x00,0xb5,0x00,0x00,0x00, -0xb1,0x00,0x00,0x00,0x14,0x00,0x02,0x00,0xb7,0x00,0x00,0x00, -0x16,0x00,0x03,0x00,0xb9,0x00,0x00,0x00,0x20,0x00,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0xba,0x00,0x00,0x00, -0x84,0x00,0x00,0x00,0x60,0x00,0x00,0x00,0x62,0x00,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0xbb,0x00,0x00,0x00, -0x84,0x00,0x00,0x00,0xba,0x00,0x00,0x00,0xb4,0x00,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0xbc,0x00,0x00,0x00, -0x84,0x00,0x00,0x00,0xbb,0x00,0x00,0x00,0xb1,0x00,0x00,0x00, -0x1c,0x00,0x04,0x00,0xbd,0x00,0x00,0x00,0xb9,0x00,0x00,0x00, -0xbc,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0xbe,0x00,0x00,0x00, -0x07,0x00,0x00,0x00,0xbd,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0xb9,0x00,0x00,0x00,0xc1,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x20,0x00,0x04,0x00,0xc2,0x00,0x00,0x00,0x07,0x00,0x00,0x00, -0xb9,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00, -0xc5,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x16,0x00,0x03,0x00, -0xec,0x00,0x00,0x00,0x10,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0xed,0x00,0x00,0x00,0x80,0x00,0x00,0x00, -0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0xee,0x00,0x00,0x00,0x84,0x00,0x00,0x00, -0x37,0x00,0x00,0x00,0xed,0x00,0x00,0x00,0x1c,0x00,0x04,0x00, -0xef,0x00,0x00,0x00,0xec,0x00,0x00,0x00,0xee,0x00,0x00,0x00, -0x20,0x00,0x04,0x00,0xf0,0x00,0x00,0x00,0x04,0x00,0x00,0x00, -0xef,0x00,0x00,0x00,0x3b,0x00,0x04,0x00,0xf0,0x00,0x00,0x00, -0xf1,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0xf5,0x00,0x00,0x00,0x80,0x00,0x00,0x00, -0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x1d,0x00,0x03,0x00, -0xf9,0x00,0x00,0x00,0xec,0x00,0x00,0x00,0x1e,0x00,0x03,0x00, -0xfa,0x00,0x00,0x00,0xf9,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0xfb,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0xfa,0x00,0x00,0x00, -0x3b,0x00,0x04,0x00,0xfb,0x00,0x00,0x00,0xfc,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x07,0x01,0x00,0x00, -0x0c,0x00,0x00,0x00,0xec,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0x0a,0x01,0x00,0x00,0x04,0x00,0x00,0x00,0xec,0x00,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x10,0x01,0x00,0x00, -0x80,0x00,0x00,0x00,0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0xec,0x00,0x00,0x00,0x14,0x01,0x00,0x00, -0x00,0x00,0x00,0x00,0x32,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x16,0x01,0x00,0x00,0x01,0x00,0x00,0x00,0x33,0x00,0x06,0x00, -0x09,0x00,0x00,0x00,0x17,0x01,0x00,0x00,0x16,0x01,0x00,0x00, -0x39,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0x18,0x01,0x00,0x00,0x51,0x00,0x00,0x00, -0x17,0x01,0x00,0x00,0x00,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0x19,0x01,0x00,0x00,0x84,0x00,0x00,0x00, -0x18,0x01,0x00,0x00,0x39,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0x1a,0x01,0x00,0x00,0x86,0x00,0x00,0x00, -0x19,0x01,0x00,0x00,0x6c,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0x38,0x01,0x00,0x00,0x80,0x00,0x00,0x00, -0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0x39,0x01,0x00,0x00,0x84,0x00,0x00,0x00, -0x9c,0x00,0x00,0x00,0x38,0x01,0x00,0x00,0x1c,0x00,0x04,0x00, -0x3a,0x01,0x00,0x00,0xec,0x00,0x00,0x00,0x39,0x01,0x00,0x00, -0x20,0x00,0x04,0x00,0x3b,0x01,0x00,0x00,0x04,0x00,0x00,0x00, -0x3a,0x01,0x00,0x00,0x3b,0x00,0x04,0x00,0x3b,0x01,0x00,0x00, -0x3c,0x01,0x00,0x00,0x04,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0x40,0x01,0x00,0x00,0x80,0x00,0x00,0x00, -0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x1d,0x00,0x03,0x00, -0x44,0x01,0x00,0x00,0xec,0x00,0x00,0x00,0x1e,0x00,0x03,0x00, -0x45,0x01,0x00,0x00,0x44,0x01,0x00,0x00,0x20,0x00,0x04,0x00, -0x46,0x01,0x00,0x00,0x0c,0x00,0x00,0x00,0x45,0x01,0x00,0x00, -0x3b,0x00,0x04,0x00,0x46,0x01,0x00,0x00,0x47,0x01,0x00,0x00, -0x0c,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, -0x59,0x01,0x00,0x00,0x80,0x00,0x00,0x00,0x6c,0x00,0x00,0x00, -0x39,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x60,0x01,0x00,0x00,0x08,0x01,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0x61,0x01,0x00,0x00,0x86,0x00,0x00,0x00, -0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0x64,0x01,0x00,0x00,0x86,0x00,0x00,0x00, -0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0x7f,0x01,0x00,0x00,0x84,0x00,0x00,0x00, -0x60,0x00,0x00,0x00,0x62,0x00,0x00,0x00,0x1c,0x00,0x04,0x00, -0x80,0x01,0x00,0x00,0xec,0x00,0x00,0x00,0x7f,0x01,0x00,0x00, -0x20,0x00,0x04,0x00,0x81,0x01,0x00,0x00,0x07,0x00,0x00,0x00, -0x80,0x01,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, -0x91,0x01,0x00,0x00,0x80,0x00,0x00,0x00,0x6c,0x00,0x00,0x00, -0x39,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x97,0x01,0x00,0x00, -0x07,0x00,0x00,0x00,0xec,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0xad,0x01,0x00,0x00,0x84,0x00,0x00,0x00, -0xb4,0x00,0x00,0x00,0xb1,0x00,0x00,0x00,0x1c,0x00,0x04,0x00, -0xae,0x01,0x00,0x00,0xec,0x00,0x00,0x00,0xad,0x01,0x00,0x00, -0x20,0x00,0x04,0x00,0xaf,0x01,0x00,0x00,0x07,0x00,0x00,0x00, -0xae,0x01,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, -0xb8,0x01,0x00,0x00,0x86,0x00,0x00,0x00,0xae,0x00,0x00,0x00, -0xb4,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, -0xc0,0x01,0x00,0x00,0x80,0x00,0x00,0x00,0x6c,0x00,0x00,0x00, -0x39,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, -0xef,0x01,0x00,0x00,0x84,0x00,0x00,0x00,0x60,0x00,0x00,0x00, -0x62,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00, -0x24,0x02,0x00,0x00,0x0d,0x00,0x00,0x00,0x3b,0x00,0x04,0x00, -0x0a,0x00,0x00,0x00,0x2c,0x02,0x00,0x00,0x01,0x00,0x00,0x00, -0x1d,0x00,0x03,0x00,0x72,0x02,0x00,0x00,0xb9,0x00,0x00,0x00, -0x1e,0x00,0x03,0x00,0x73,0x02,0x00,0x00,0x72,0x02,0x00,0x00, -0x20,0x00,0x04,0x00,0x74,0x02,0x00,0x00,0x0c,0x00,0x00,0x00, -0x73,0x02,0x00,0x00,0x3b,0x00,0x04,0x00,0x74,0x02,0x00,0x00, -0x75,0x02,0x00,0x00,0x0c,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x13,0x00,0x00,0x00,0x7a,0x02,0x00,0x00,0x05,0x00,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x87,0x02,0x00,0x00, -0x84,0x00,0x00,0x00,0x60,0x00,0x00,0x00,0x62,0x00,0x00,0x00, -0x20,0x00,0x04,0x00,0x90,0x02,0x00,0x00,0x0c,0x00,0x00,0x00, -0xb9,0x00,0x00,0x00,0x36,0x00,0x05,0x00,0x02,0x00,0x00,0x00, -0x04,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x03,0x00,0x00,0x00, -0xf8,0x00,0x02,0x00,0x05,0x00,0x00,0x00,0x3b,0x00,0x04,0x00, -0xbe,0x00,0x00,0x00,0xbf,0x00,0x00,0x00,0x07,0x00,0x00,0x00, -0x3b,0x00,0x04,0x00,0x81,0x01,0x00,0x00,0x82,0x01,0x00,0x00, -0x07,0x00,0x00,0x00,0x3b,0x00,0x04,0x00,0xaf,0x01,0x00,0x00, -0xb0,0x01,0x00,0x00,0x07,0x00,0x00,0x00,0x41,0x00,0x05,0x00, -0x0d,0x00,0x00,0x00,0x0e,0x00,0x00,0x00,0x0b,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x0f,0x00,0x00,0x00,0x0e,0x00,0x00,0x00,0x41,0x00,0x05,0x00, -0x15,0x00,0x00,0x00,0x16,0x00,0x00,0x00,0x12,0x00,0x00,0x00, -0x14,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x17,0x00,0x00,0x00,0x16,0x00,0x00,0x00,0x86,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x18,0x00,0x00,0x00,0x0f,0x00,0x00,0x00, -0x17,0x00,0x00,0x00,0x89,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x1e,0x00,0x00,0x00,0x0f,0x00,0x00,0x00,0x17,0x00,0x00,0x00, -0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00,0x22,0x00,0x00,0x00, -0x12,0x00,0x00,0x00,0x21,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x22,0x00,0x00,0x00, -0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x24,0x00,0x00,0x00, -0x18,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x41,0x00,0x05,0x00, -0x15,0x00,0x00,0x00,0x28,0x00,0x00,0x00,0x12,0x00,0x00,0x00, -0x27,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x29,0x00,0x00,0x00,0x28,0x00,0x00,0x00,0x86,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x2a,0x00,0x00,0x00,0x1e,0x00,0x00,0x00, -0x29,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00, -0x2e,0x00,0x00,0x00,0x12,0x00,0x00,0x00,0x2d,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x2f,0x00,0x00,0x00, -0x2e,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x30,0x00,0x00,0x00,0x24,0x00,0x00,0x00,0x2f,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x32,0x00,0x00,0x00, -0x30,0x00,0x00,0x00,0x2a,0x00,0x00,0x00,0x41,0x00,0x05,0x00, -0x15,0x00,0x00,0x00,0x35,0x00,0x00,0x00,0x12,0x00,0x00,0x00, -0x34,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x36,0x00,0x00,0x00,0x35,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x38,0x00,0x00,0x00,0x36,0x00,0x00,0x00, -0x37,0x00,0x00,0x00,0x82,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x3a,0x00,0x00,0x00,0x38,0x00,0x00,0x00,0x39,0x00,0x00,0x00, -0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x3b,0x00,0x00,0x00, -0x3a,0x00,0x00,0x00,0x37,0x00,0x00,0x00,0x41,0x00,0x05,0x00, -0x0d,0x00,0x00,0x00,0x3f,0x00,0x00,0x00,0x3d,0x00,0x00,0x00, -0x3e,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x40,0x00,0x00,0x00,0x3f,0x00,0x00,0x00,0x89,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x42,0x00,0x00,0x00,0x40,0x00,0x00,0x00, -0x3b,0x00,0x00,0x00,0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x47,0x00,0x00,0x00,0x40,0x00,0x00,0x00,0x3b,0x00,0x00,0x00, -0x41,0x00,0x05,0x00,0x0d,0x00,0x00,0x00,0x49,0x00,0x00,0x00, -0x3d,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x4a,0x00,0x00,0x00,0x49,0x00,0x00,0x00, -0x41,0x00,0x05,0x00,0x0d,0x00,0x00,0x00,0x4d,0x00,0x00,0x00, -0x4c,0x00,0x00,0x00,0x3e,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x4e,0x00,0x00,0x00,0x4d,0x00,0x00,0x00, -0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x50,0x00,0x00,0x00, -0x4e,0x00,0x00,0x00,0x4f,0x00,0x00,0x00,0x89,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x55,0x00,0x00,0x00,0x50,0x00,0x00,0x00, -0x54,0x00,0x00,0x00,0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x59,0x00,0x00,0x00,0x50,0x00,0x00,0x00,0x58,0x00,0x00,0x00, -0x89,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x5d,0x00,0x00,0x00, -0x4e,0x00,0x00,0x00,0x4f,0x00,0x00,0x00,0x89,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x64,0x00,0x00,0x00,0x5d,0x00,0x00,0x00, -0x63,0x00,0x00,0x00,0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x68,0x00,0x00,0x00,0x5d,0x00,0x00,0x00,0x67,0x00,0x00,0x00, -0x89,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x6e,0x00,0x00,0x00, -0x4e,0x00,0x00,0x00,0x6d,0x00,0x00,0x00,0x86,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x73,0x00,0x00,0x00,0x4e,0x00,0x00,0x00, -0x72,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00, -0x77,0x00,0x00,0x00,0x12,0x00,0x00,0x00,0x76,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x78,0x00,0x00,0x00, -0x77,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x79,0x00,0x00,0x00,0x47,0x00,0x00,0x00,0x78,0x00,0x00,0x00, -0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00,0x7c,0x00,0x00,0x00, -0x12,0x00,0x00,0x00,0x7b,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x7d,0x00,0x00,0x00,0x7c,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x7f,0x00,0x00,0x00, -0x47,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x82,0x00,0x00,0x00,0x7f,0x00,0x00,0x00, -0x78,0x00,0x00,0x00,0x0c,0x00,0x07,0x00,0x06,0x00,0x00,0x00, -0x83,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x26,0x00,0x00,0x00, -0x7d,0x00,0x00,0x00,0x82,0x00,0x00,0x00,0x41,0x00,0x05,0x00, -0x15,0x00,0x00,0x00,0x87,0x00,0x00,0x00,0x12,0x00,0x00,0x00, -0x86,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x88,0x00,0x00,0x00,0x87,0x00,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x89,0x00,0x00,0x00,0x32,0x00,0x00,0x00, -0x88,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x8b,0x00,0x00,0x00,0x42,0x00,0x00,0x00,0x37,0x00,0x00,0x00, -0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00,0x8d,0x00,0x00,0x00, -0x12,0x00,0x00,0x00,0x8c,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x8e,0x00,0x00,0x00,0x8d,0x00,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x8f,0x00,0x00,0x00, -0x8b,0x00,0x00,0x00,0x8e,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x90,0x00,0x00,0x00,0x89,0x00,0x00,0x00, -0x8f,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x92,0x00,0x00,0x00,0x90,0x00,0x00,0x00,0x79,0x00,0x00,0x00, -0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x93,0x00,0x00,0x00, -0x92,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x41,0x00,0x05,0x00, -0x15,0x00,0x00,0x00,0x98,0x00,0x00,0x00,0x12,0x00,0x00,0x00, -0x97,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x99,0x00,0x00,0x00,0x98,0x00,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x9a,0x00,0x00,0x00,0x0f,0x00,0x00,0x00, -0x99,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x9d,0x00,0x00,0x00,0x4a,0x00,0x00,0x00,0x9c,0x00,0x00,0x00, -0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00,0x9f,0x00,0x00,0x00, -0x12,0x00,0x00,0x00,0x9e,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0xa0,0x00,0x00,0x00,0x9f,0x00,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xa1,0x00,0x00,0x00, -0x9d,0x00,0x00,0x00,0xa0,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xa2,0x00,0x00,0x00,0x9a,0x00,0x00,0x00, -0xa1,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xa4,0x00,0x00,0x00,0xa2,0x00,0x00,0x00,0x79,0x00,0x00,0x00, -0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xa5,0x00,0x00,0x00, -0xa4,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, -0xa7,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0xa7,0x00,0x00,0x00, -0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0x9a,0x02,0x00,0x00, -0x3e,0x00,0x00,0x00,0x05,0x00,0x00,0x00,0xc6,0x00,0x00,0x00, -0xa8,0x00,0x00,0x00,0xb0,0x00,0x05,0x00,0xb7,0x00,0x00,0x00, -0xb8,0x00,0x00,0x00,0x9a,0x02,0x00,0x00,0xb6,0x00,0x00,0x00, -0xf6,0x00,0x04,0x00,0xa9,0x00,0x00,0x00,0xa8,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0xb8,0x00,0x00,0x00, -0xa8,0x00,0x00,0x00,0xa9,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, -0xa8,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0xc2,0x00,0x00,0x00, -0xc3,0x00,0x00,0x00,0xbf,0x00,0x00,0x00,0x9a,0x02,0x00,0x00, -0x3e,0x00,0x03,0x00,0xc3,0x00,0x00,0x00,0xc1,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xc6,0x00,0x00,0x00, -0x9a,0x02,0x00,0x00,0xc5,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, -0xa7,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0xa9,0x00,0x00,0x00, -0xf9,0x00,0x02,0x00,0xc9,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, -0xc9,0x00,0x00,0x00,0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00, -0xb3,0x02,0x00,0x00,0xa5,0x00,0x00,0x00,0xa9,0x00,0x00,0x00, -0x66,0x01,0x00,0x00,0xcc,0x00,0x00,0x00,0xf5,0x00,0x07,0x00, -0x06,0x00,0x00,0x00,0xaf,0x02,0x00,0x00,0x93,0x00,0x00,0x00, -0xa9,0x00,0x00,0x00,0x63,0x01,0x00,0x00,0xcc,0x00,0x00,0x00, -0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0x9b,0x02,0x00,0x00, -0x79,0x00,0x00,0x00,0xa9,0x00,0x00,0x00,0x14,0x02,0x00,0x00, -0xcc,0x00,0x00,0x00,0xb0,0x00,0x05,0x00,0xb7,0x00,0x00,0x00, -0xd0,0x00,0x00,0x00,0x9b,0x02,0x00,0x00,0x83,0x00,0x00,0x00, -0xf6,0x00,0x04,0x00,0xcb,0x00,0x00,0x00,0xcc,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0xd0,0x00,0x00,0x00, -0xca,0x00,0x00,0x00,0xcb,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, -0xca,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0xd2,0x00,0x00,0x00, -0xf8,0x00,0x02,0x00,0xd2,0x00,0x00,0x00,0xf5,0x00,0x07,0x00, -0x06,0x00,0x00,0x00,0xab,0x02,0x00,0x00,0x3e,0x00,0x00,0x00, -0xca,0x00,0x00,0x00,0x1c,0x01,0x00,0x00,0xd5,0x00,0x00,0x00, -0xb0,0x00,0x05,0x00,0xb7,0x00,0x00,0x00,0xd8,0x00,0x00,0x00, -0xab,0x02,0x00,0x00,0x37,0x00,0x00,0x00,0xf6,0x00,0x04,0x00, -0xd4,0x00,0x00,0x00,0xd5,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0xfa,0x00,0x04,0x00,0xd8,0x00,0x00,0x00,0xd3,0x00,0x00,0x00, -0xd4,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0xd3,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xdc,0x00,0x00,0x00, -0x8b,0x00,0x00,0x00,0x73,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xde,0x00,0x00,0x00,0xdc,0x00,0x00,0x00, -0xab,0x02,0x00,0x00,0xb0,0x00,0x05,0x00,0xb7,0x00,0x00,0x00, -0xe1,0x00,0x00,0x00,0xde,0x00,0x00,0x00,0x36,0x00,0x00,0x00, -0xf7,0x00,0x03,0x00,0xe3,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0xfa,0x00,0x04,0x00,0xe1,0x00,0x00,0x00,0xe2,0x00,0x00,0x00, -0xe3,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0xe2,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xe6,0x00,0x00,0x00, -0x9b,0x02,0x00,0x00,0x6e,0x00,0x00,0x00,0xb0,0x00,0x05,0x00, -0xb7,0x00,0x00,0x00,0xe8,0x00,0x00,0x00,0xe6,0x00,0x00,0x00, -0x83,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0xe3,0x00,0x00,0x00, -0xf8,0x00,0x02,0x00,0xe3,0x00,0x00,0x00,0xf5,0x00,0x07,0x00, -0xb7,0x00,0x00,0x00,0xe9,0x00,0x00,0x00,0xe1,0x00,0x00,0x00, -0xd3,0x00,0x00,0x00,0xe8,0x00,0x00,0x00,0xe2,0x00,0x00,0x00, -0xf7,0x00,0x03,0x00,0xeb,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0xfa,0x00,0x04,0x00,0xe9,0x00,0x00,0x00,0xea,0x00,0x00,0x00, -0x0c,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0xea,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xf4,0x00,0x00,0x00, -0x73,0x00,0x00,0x00,0xab,0x02,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xf6,0x00,0x00,0x00,0xf4,0x00,0x00,0x00, -0xf5,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xf8,0x00,0x00,0x00,0xf6,0x00,0x00,0x00,0x6e,0x00,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x03,0x01,0x00,0x00, -0xf4,0x00,0x00,0x00,0x8e,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x04,0x01,0x00,0x00,0xaf,0x02,0x00,0x00, -0x03,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x06,0x01,0x00,0x00,0x04,0x01,0x00,0x00,0x6e,0x00,0x00,0x00, -0x41,0x00,0x06,0x00,0x07,0x01,0x00,0x00,0x08,0x01,0x00,0x00, -0xfc,0x00,0x00,0x00,0x34,0x00,0x00,0x00,0x06,0x01,0x00,0x00, -0x3d,0x00,0x04,0x00,0xec,0x00,0x00,0x00,0x09,0x01,0x00,0x00, -0x08,0x01,0x00,0x00,0x41,0x00,0x05,0x00,0x0a,0x01,0x00,0x00, -0x0b,0x01,0x00,0x00,0xf1,0x00,0x00,0x00,0xf8,0x00,0x00,0x00, -0x3e,0x00,0x03,0x00,0x0b,0x01,0x00,0x00,0x09,0x01,0x00,0x00, -0xf9,0x00,0x02,0x00,0xeb,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, -0x0c,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x0f,0x01,0x00,0x00,0x73,0x00,0x00,0x00,0xab,0x02,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x11,0x01,0x00,0x00, -0x0f,0x01,0x00,0x00,0x10,0x01,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x13,0x01,0x00,0x00,0x11,0x01,0x00,0x00, -0x6e,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x0a,0x01,0x00,0x00, -0x15,0x01,0x00,0x00,0xf1,0x00,0x00,0x00,0x13,0x01,0x00,0x00, -0x3e,0x00,0x03,0x00,0x15,0x01,0x00,0x00,0x14,0x01,0x00,0x00, -0xf9,0x00,0x02,0x00,0xeb,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, -0xeb,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0xd5,0x00,0x00,0x00, -0xf8,0x00,0x02,0x00,0xd5,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x1c,0x01,0x00,0x00,0xab,0x02,0x00,0x00, -0x1a,0x01,0x00,0x00,0xf9,0x00,0x02,0x00,0xd2,0x00,0x00,0x00, -0xf8,0x00,0x02,0x00,0xd4,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, -0x1e,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0x1e,0x01,0x00,0x00, -0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0xac,0x02,0x00,0x00, -0x3e,0x00,0x00,0x00,0xd4,0x00,0x00,0x00,0x5f,0x01,0x00,0x00, -0x21,0x01,0x00,0x00,0xb0,0x00,0x05,0x00,0xb7,0x00,0x00,0x00, -0x24,0x01,0x00,0x00,0xac,0x02,0x00,0x00,0x9c,0x00,0x00,0x00, -0xf6,0x00,0x04,0x00,0x20,0x01,0x00,0x00,0x21,0x01,0x00,0x00, -0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0x24,0x01,0x00,0x00, -0x1f,0x01,0x00,0x00,0x20,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, -0x1f,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x28,0x01,0x00,0x00,0x9d,0x00,0x00,0x00,0x73,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x2a,0x01,0x00,0x00, -0x28,0x01,0x00,0x00,0xac,0x02,0x00,0x00,0x41,0x00,0x05,0x00, -0x15,0x00,0x00,0x00,0x2b,0x01,0x00,0x00,0x12,0x00,0x00,0x00, -0xc5,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x2c,0x01,0x00,0x00,0x2b,0x01,0x00,0x00,0xb0,0x00,0x05,0x00, -0xb7,0x00,0x00,0x00,0x2d,0x01,0x00,0x00,0x2a,0x01,0x00,0x00, -0x2c,0x01,0x00,0x00,0xf7,0x00,0x03,0x00,0x2f,0x01,0x00,0x00, -0x00,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0x2d,0x01,0x00,0x00, -0x2e,0x01,0x00,0x00,0x2f,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, -0x2e,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x32,0x01,0x00,0x00,0x9b,0x02,0x00,0x00,0x6e,0x00,0x00,0x00, -0xb0,0x00,0x05,0x00,0xb7,0x00,0x00,0x00,0x34,0x01,0x00,0x00, -0x32,0x01,0x00,0x00,0x83,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, -0x2f,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0x2f,0x01,0x00,0x00, -0xf5,0x00,0x07,0x00,0xb7,0x00,0x00,0x00,0x35,0x01,0x00,0x00, -0x2d,0x01,0x00,0x00,0x1f,0x01,0x00,0x00,0x34,0x01,0x00,0x00, -0x2e,0x01,0x00,0x00,0xf7,0x00,0x03,0x00,0x37,0x01,0x00,0x00, -0x00,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0x35,0x01,0x00,0x00, -0x36,0x01,0x00,0x00,0x55,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, -0x36,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x3f,0x01,0x00,0x00,0x73,0x00,0x00,0x00,0xac,0x02,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x41,0x01,0x00,0x00, -0x3f,0x01,0x00,0x00,0x40,0x01,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x43,0x01,0x00,0x00,0x41,0x01,0x00,0x00, -0x6e,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x4e,0x01,0x00,0x00,0x3f,0x01,0x00,0x00,0xa0,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x4f,0x01,0x00,0x00, -0xb3,0x02,0x00,0x00,0x4e,0x01,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x51,0x01,0x00,0x00,0x4f,0x01,0x00,0x00, -0x6e,0x00,0x00,0x00,0x41,0x00,0x06,0x00,0x07,0x01,0x00,0x00, -0x52,0x01,0x00,0x00,0x47,0x01,0x00,0x00,0x34,0x00,0x00,0x00, -0x51,0x01,0x00,0x00,0x3d,0x00,0x04,0x00,0xec,0x00,0x00,0x00, -0x53,0x01,0x00,0x00,0x52,0x01,0x00,0x00,0x41,0x00,0x05,0x00, -0x0a,0x01,0x00,0x00,0x54,0x01,0x00,0x00,0x3c,0x01,0x00,0x00, -0x43,0x01,0x00,0x00,0x3e,0x00,0x03,0x00,0x54,0x01,0x00,0x00, -0x53,0x01,0x00,0x00,0xf9,0x00,0x02,0x00,0x37,0x01,0x00,0x00, -0xf8,0x00,0x02,0x00,0x55,0x01,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x58,0x01,0x00,0x00,0x73,0x00,0x00,0x00, -0xac,0x02,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x5a,0x01,0x00,0x00,0x58,0x01,0x00,0x00,0x59,0x01,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x5c,0x01,0x00,0x00, -0x5a,0x01,0x00,0x00,0x6e,0x00,0x00,0x00,0x41,0x00,0x05,0x00, -0x0a,0x01,0x00,0x00,0x5d,0x01,0x00,0x00,0x3c,0x01,0x00,0x00, -0x5c,0x01,0x00,0x00,0x3e,0x00,0x03,0x00,0x5d,0x01,0x00,0x00, -0x14,0x01,0x00,0x00,0xf9,0x00,0x02,0x00,0x37,0x01,0x00,0x00, -0xf8,0x00,0x02,0x00,0x37,0x01,0x00,0x00,0xf9,0x00,0x02,0x00, -0x21,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0x21,0x01,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x5f,0x01,0x00,0x00, -0xac,0x02,0x00,0x00,0x1a,0x01,0x00,0x00,0xf9,0x00,0x02,0x00, -0x1e,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0x20,0x01,0x00,0x00, -0xe0,0x00,0x04,0x00,0x0c,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0x60,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x63,0x01,0x00,0x00,0xaf,0x02,0x00,0x00,0x61,0x01,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x66,0x01,0x00,0x00, -0xb3,0x02,0x00,0x00,0x64,0x01,0x00,0x00,0xf9,0x00,0x02,0x00, -0x68,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0x68,0x01,0x00,0x00, -0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0xb5,0x02,0x00,0x00, -0x3e,0x00,0x00,0x00,0x20,0x01,0x00,0x00,0x12,0x02,0x00,0x00, -0x6b,0x01,0x00,0x00,0xb0,0x00,0x05,0x00,0xb7,0x00,0x00,0x00, -0x6e,0x01,0x00,0x00,0xb5,0x02,0x00,0x00,0x6c,0x00,0x00,0x00, -0xf6,0x00,0x04,0x00,0x6a,0x01,0x00,0x00,0x6b,0x01,0x00,0x00, -0x00,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0x6e,0x01,0x00,0x00, -0x69,0x01,0x00,0x00,0x6a,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, -0x69,0x01,0x00,0x00,0xf9,0x00,0x02,0x00,0x70,0x01,0x00,0x00, -0xf8,0x00,0x02,0x00,0x70,0x01,0x00,0x00,0xf5,0x00,0x07,0x00, -0x06,0x00,0x00,0x00,0xb9,0x02,0x00,0x00,0x3e,0x00,0x00,0x00, -0x69,0x01,0x00,0x00,0x9c,0x01,0x00,0x00,0x73,0x01,0x00,0x00, -0xb0,0x00,0x05,0x00,0xb7,0x00,0x00,0x00,0x76,0x01,0x00,0x00, -0xb9,0x02,0x00,0x00,0x60,0x00,0x00,0x00,0xf6,0x00,0x04,0x00, -0x72,0x01,0x00,0x00,0x73,0x01,0x00,0x00,0x01,0x00,0x00,0x00, -0xfa,0x00,0x04,0x00,0x76,0x01,0x00,0x00,0x71,0x01,0x00,0x00, -0x72,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0x71,0x01,0x00,0x00, -0xf9,0x00,0x02,0x00,0x78,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, -0x78,0x01,0x00,0x00,0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00, -0xcb,0x02,0x00,0x00,0x3e,0x00,0x00,0x00,0x71,0x01,0x00,0x00, -0x9a,0x01,0x00,0x00,0x79,0x01,0x00,0x00,0xb0,0x00,0x05,0x00, -0xb7,0x00,0x00,0x00,0x7e,0x01,0x00,0x00,0xcb,0x02,0x00,0x00, -0x62,0x00,0x00,0x00,0xf6,0x00,0x04,0x00,0x7a,0x01,0x00,0x00, -0x79,0x01,0x00,0x00,0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, -0x7e,0x01,0x00,0x00,0x79,0x01,0x00,0x00,0x7a,0x01,0x00,0x00, -0xf8,0x00,0x02,0x00,0x79,0x01,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x84,0x01,0x00,0x00,0xb9,0x02,0x00,0x00, -0x62,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x86,0x01,0x00,0x00,0x84,0x01,0x00,0x00,0xcb,0x02,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x88,0x01,0x00,0x00, -0x55,0x00,0x00,0x00,0x53,0x00,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x8a,0x01,0x00,0x00,0xb9,0x02,0x00,0x00, -0x61,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x8b,0x01,0x00,0x00,0x88,0x01,0x00,0x00,0x8a,0x01,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x8d,0x01,0x00,0x00, -0x64,0x00,0x00,0x00,0x62,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x8e,0x01,0x00,0x00,0x8b,0x01,0x00,0x00, -0x8d,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x90,0x01,0x00,0x00,0x8e,0x01,0x00,0x00,0xcb,0x02,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x92,0x01,0x00,0x00, -0x90,0x01,0x00,0x00,0x91,0x01,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x94,0x01,0x00,0x00,0x92,0x01,0x00,0x00, -0xb5,0x02,0x00,0x00,0x41,0x00,0x05,0x00,0x0a,0x01,0x00,0x00, -0x95,0x01,0x00,0x00,0xf1,0x00,0x00,0x00,0x94,0x01,0x00,0x00, -0x3d,0x00,0x04,0x00,0xec,0x00,0x00,0x00,0x96,0x01,0x00,0x00, -0x95,0x01,0x00,0x00,0x41,0x00,0x05,0x00,0x97,0x01,0x00,0x00, -0x98,0x01,0x00,0x00,0x82,0x01,0x00,0x00,0x86,0x01,0x00,0x00, -0x3e,0x00,0x03,0x00,0x98,0x01,0x00,0x00,0x96,0x01,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x9a,0x01,0x00,0x00, -0xcb,0x02,0x00,0x00,0xc5,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, -0x78,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0x7a,0x01,0x00,0x00, -0xf9,0x00,0x02,0x00,0x73,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, -0x73,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x9c,0x01,0x00,0x00,0xb9,0x02,0x00,0x00,0xc5,0x00,0x00,0x00, -0xf9,0x00,0x02,0x00,0x70,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, -0x72,0x01,0x00,0x00,0xf9,0x00,0x02,0x00,0x9e,0x01,0x00,0x00, -0xf8,0x00,0x02,0x00,0x9e,0x01,0x00,0x00,0xf5,0x00,0x07,0x00, -0x06,0x00,0x00,0x00,0xba,0x02,0x00,0x00,0x3e,0x00,0x00,0x00, -0x72,0x01,0x00,0x00,0xca,0x01,0x00,0x00,0xa1,0x01,0x00,0x00, -0xb0,0x00,0x05,0x00,0xb7,0x00,0x00,0x00,0xa4,0x01,0x00,0x00, -0xba,0x02,0x00,0x00,0xb4,0x00,0x00,0x00,0xf6,0x00,0x04,0x00, -0xa0,0x01,0x00,0x00,0xa1,0x01,0x00,0x00,0x01,0x00,0x00,0x00, -0xfa,0x00,0x04,0x00,0xa4,0x01,0x00,0x00,0x9f,0x01,0x00,0x00, -0xa0,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0x9f,0x01,0x00,0x00, -0xf9,0x00,0x02,0x00,0xa6,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, -0xa6,0x01,0x00,0x00,0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00, -0xc8,0x02,0x00,0x00,0x3e,0x00,0x00,0x00,0x9f,0x01,0x00,0x00, -0xc8,0x01,0x00,0x00,0xa7,0x01,0x00,0x00,0xb0,0x00,0x05,0x00, -0xb7,0x00,0x00,0x00,0xac,0x01,0x00,0x00,0xc8,0x02,0x00,0x00, -0xb1,0x00,0x00,0x00,0xf6,0x00,0x04,0x00,0xa8,0x01,0x00,0x00, -0xa7,0x01,0x00,0x00,0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, -0xac,0x01,0x00,0x00,0xa7,0x01,0x00,0x00,0xa8,0x01,0x00,0x00, -0xf8,0x00,0x02,0x00,0xa7,0x01,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xb2,0x01,0x00,0x00,0xba,0x02,0x00,0x00, -0xb1,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xb4,0x01,0x00,0x00,0xb2,0x01,0x00,0x00,0xc8,0x02,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xb6,0x01,0x00,0x00, -0x59,0x00,0x00,0x00,0xae,0x00,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xb9,0x01,0x00,0x00,0xba,0x02,0x00,0x00, -0xb8,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xba,0x01,0x00,0x00,0xb6,0x01,0x00,0x00,0xb9,0x01,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xbc,0x01,0x00,0x00, -0x68,0x00,0x00,0x00,0xb1,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xbd,0x01,0x00,0x00,0xba,0x01,0x00,0x00, -0xbc,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xbf,0x01,0x00,0x00,0xbd,0x01,0x00,0x00,0xc8,0x02,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xc1,0x01,0x00,0x00, -0xbf,0x01,0x00,0x00,0xc0,0x01,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xc3,0x01,0x00,0x00,0xc1,0x01,0x00,0x00, -0xb5,0x02,0x00,0x00,0x41,0x00,0x05,0x00,0x0a,0x01,0x00,0x00, -0xc4,0x01,0x00,0x00,0x3c,0x01,0x00,0x00,0xc3,0x01,0x00,0x00, -0x3d,0x00,0x04,0x00,0xec,0x00,0x00,0x00,0xc5,0x01,0x00,0x00, -0xc4,0x01,0x00,0x00,0x41,0x00,0x05,0x00,0x97,0x01,0x00,0x00, -0xc6,0x01,0x00,0x00,0xb0,0x01,0x00,0x00,0xb4,0x01,0x00,0x00, -0x3e,0x00,0x03,0x00,0xc6,0x01,0x00,0x00,0xc5,0x01,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xc8,0x01,0x00,0x00, -0xc8,0x02,0x00,0x00,0xc5,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, -0xa6,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0xa8,0x01,0x00,0x00, -0xf9,0x00,0x02,0x00,0xa1,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, -0xa1,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xca,0x01,0x00,0x00,0xba,0x02,0x00,0x00,0xc5,0x00,0x00,0x00, -0xf9,0x00,0x02,0x00,0x9e,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, -0xa0,0x01,0x00,0x00,0xf9,0x00,0x02,0x00,0xcc,0x01,0x00,0x00, -0xf8,0x00,0x02,0x00,0xcc,0x01,0x00,0x00,0xf5,0x00,0x07,0x00, -0x06,0x00,0x00,0x00,0xbb,0x02,0x00,0x00,0x3e,0x00,0x00,0x00, -0xa0,0x01,0x00,0x00,0x10,0x02,0x00,0x00,0xcf,0x01,0x00,0x00, -0xb0,0x00,0x05,0x00,0xb7,0x00,0x00,0x00,0xd2,0x01,0x00,0x00, -0xbb,0x02,0x00,0x00,0xb4,0x00,0x00,0x00,0xf6,0x00,0x04,0x00, -0xce,0x01,0x00,0x00,0xcf,0x01,0x00,0x00,0x01,0x00,0x00,0x00, -0xfa,0x00,0x04,0x00,0xd2,0x01,0x00,0x00,0xcd,0x01,0x00,0x00, -0xce,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0xcd,0x01,0x00,0x00, -0xf9,0x00,0x02,0x00,0xd4,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, -0xd4,0x01,0x00,0x00,0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00, -0xbf,0x02,0x00,0x00,0x3e,0x00,0x00,0x00,0xcd,0x01,0x00,0x00, -0x0e,0x02,0x00,0x00,0xd7,0x01,0x00,0x00,0xb0,0x00,0x05,0x00, -0xb7,0x00,0x00,0x00,0xda,0x01,0x00,0x00,0xbf,0x02,0x00,0x00, -0x60,0x00,0x00,0x00,0xf6,0x00,0x04,0x00,0xd6,0x01,0x00,0x00, -0xd7,0x01,0x00,0x00,0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, -0xda,0x01,0x00,0x00,0xd5,0x01,0x00,0x00,0xd6,0x01,0x00,0x00, -0xf8,0x00,0x02,0x00,0xd5,0x01,0x00,0x00,0xf9,0x00,0x02,0x00, -0xdc,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0xdc,0x01,0x00,0x00, -0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0xc1,0x02,0x00,0x00, -0x3e,0x00,0x00,0x00,0xd5,0x01,0x00,0x00,0x0c,0x02,0x00,0x00, -0xdf,0x01,0x00,0x00,0xb0,0x00,0x05,0x00,0xb7,0x00,0x00,0x00, -0xe2,0x01,0x00,0x00,0xc1,0x02,0x00,0x00,0xb1,0x00,0x00,0x00, -0xf6,0x00,0x04,0x00,0xde,0x01,0x00,0x00,0xdf,0x01,0x00,0x00, -0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0xe2,0x01,0x00,0x00, -0xdd,0x01,0x00,0x00,0xde,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, -0xdd,0x01,0x00,0x00,0xf9,0x00,0x02,0x00,0xe4,0x01,0x00,0x00, -0xf8,0x00,0x02,0x00,0xe4,0x01,0x00,0x00,0xf5,0x00,0x07,0x00, -0x06,0x00,0x00,0x00,0xc3,0x02,0x00,0x00,0x3e,0x00,0x00,0x00, -0xdd,0x01,0x00,0x00,0x0a,0x02,0x00,0x00,0xe5,0x01,0x00,0x00, -0xb0,0x00,0x05,0x00,0xb7,0x00,0x00,0x00,0xea,0x01,0x00,0x00, -0xc3,0x02,0x00,0x00,0x62,0x00,0x00,0x00,0xf6,0x00,0x04,0x00, -0xe6,0x01,0x00,0x00,0xe5,0x01,0x00,0x00,0x01,0x00,0x00,0x00, -0xfa,0x00,0x04,0x00,0xea,0x01,0x00,0x00,0xe5,0x01,0x00,0x00, -0xe6,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0xe5,0x01,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xec,0x01,0x00,0x00, -0xbb,0x02,0x00,0x00,0xb1,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xee,0x01,0x00,0x00,0xec,0x01,0x00,0x00, -0xc1,0x02,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xf0,0x01,0x00,0x00,0xee,0x01,0x00,0x00,0xef,0x01,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xf2,0x01,0x00,0x00, -0xbf,0x02,0x00,0x00,0x62,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xf3,0x01,0x00,0x00,0xf0,0x01,0x00,0x00, -0xf2,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xf5,0x01,0x00,0x00,0xf3,0x01,0x00,0x00,0xc3,0x02,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xf9,0x01,0x00,0x00, -0xf2,0x01,0x00,0x00,0xc3,0x02,0x00,0x00,0x41,0x00,0x05,0x00, -0x97,0x01,0x00,0x00,0xfa,0x01,0x00,0x00,0x82,0x01,0x00,0x00, -0xf9,0x01,0x00,0x00,0x3d,0x00,0x04,0x00,0xec,0x00,0x00,0x00, -0xfb,0x01,0x00,0x00,0xfa,0x01,0x00,0x00,0x73,0x00,0x04,0x00, -0xb9,0x00,0x00,0x00,0xfc,0x01,0x00,0x00,0xfb,0x01,0x00,0x00, -0x41,0x00,0x05,0x00,0x97,0x01,0x00,0x00,0x01,0x02,0x00,0x00, -0xb0,0x01,0x00,0x00,0xee,0x01,0x00,0x00,0x3d,0x00,0x04,0x00, -0xec,0x00,0x00,0x00,0x02,0x02,0x00,0x00,0x01,0x02,0x00,0x00, -0x73,0x00,0x04,0x00,0xb9,0x00,0x00,0x00,0x03,0x02,0x00,0x00, -0x02,0x02,0x00,0x00,0x41,0x00,0x05,0x00,0xc2,0x00,0x00,0x00, -0x05,0x02,0x00,0x00,0xbf,0x00,0x00,0x00,0xf5,0x01,0x00,0x00, -0x3d,0x00,0x04,0x00,0xb9,0x00,0x00,0x00,0x06,0x02,0x00,0x00, -0x05,0x02,0x00,0x00,0x0c,0x00,0x08,0x00,0xb9,0x00,0x00,0x00, -0x07,0x02,0x00,0x00,0x01,0x00,0x00,0x00,0x32,0x00,0x00,0x00, -0xfc,0x01,0x00,0x00,0x03,0x02,0x00,0x00,0x06,0x02,0x00,0x00, -0x3e,0x00,0x03,0x00,0x05,0x02,0x00,0x00,0x07,0x02,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x0a,0x02,0x00,0x00, -0xc3,0x02,0x00,0x00,0xc5,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, -0xe4,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0xe6,0x01,0x00,0x00, -0xf9,0x00,0x02,0x00,0xdf,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, -0xdf,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x0c,0x02,0x00,0x00,0xc1,0x02,0x00,0x00,0xc5,0x00,0x00,0x00, -0xf9,0x00,0x02,0x00,0xdc,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, -0xde,0x01,0x00,0x00,0xf9,0x00,0x02,0x00,0xd7,0x01,0x00,0x00, -0xf8,0x00,0x02,0x00,0xd7,0x01,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x0e,0x02,0x00,0x00,0xbf,0x02,0x00,0x00, -0xc5,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0xd4,0x01,0x00,0x00, -0xf8,0x00,0x02,0x00,0xd6,0x01,0x00,0x00,0xf9,0x00,0x02,0x00, -0xcf,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0xcf,0x01,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x10,0x02,0x00,0x00, -0xbb,0x02,0x00,0x00,0xc5,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, -0xcc,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0xce,0x01,0x00,0x00, -0xf9,0x00,0x02,0x00,0x6b,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, -0x6b,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x12,0x02,0x00,0x00,0xb5,0x02,0x00,0x00,0xc5,0x00,0x00,0x00, -0xf9,0x00,0x02,0x00,0x68,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, -0x6a,0x01,0x00,0x00,0xe0,0x00,0x04,0x00,0x0c,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x60,0x01,0x00,0x00,0xf9,0x00,0x02,0x00, -0xcc,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0xcc,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x14,0x02,0x00,0x00, -0x9b,0x02,0x00,0x00,0x6c,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, -0xc9,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0xcb,0x00,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x19,0x02,0x00,0x00, -0x55,0x00,0x00,0x00,0x53,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x1a,0x02,0x00,0x00,0x8b,0x00,0x00,0x00, -0x19,0x02,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x1f,0x02,0x00,0x00,0x59,0x00,0x00,0x00,0xae,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x20,0x02,0x00,0x00, -0x9d,0x00,0x00,0x00,0x1f,0x02,0x00,0x00,0x41,0x00,0x05,0x00, -0x15,0x00,0x00,0x00,0x25,0x02,0x00,0x00,0x12,0x00,0x00,0x00, -0x24,0x02,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x26,0x02,0x00,0x00,0x25,0x02,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x27,0x02,0x00,0x00,0x0f,0x00,0x00,0x00, -0x26,0x02,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x2b,0x02,0x00,0x00,0x47,0x00,0x00,0x00,0x26,0x02,0x00,0x00, -0x41,0x00,0x05,0x00,0x0d,0x00,0x00,0x00,0x2d,0x02,0x00,0x00, -0x2c,0x02,0x00,0x00,0x0c,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x2e,0x02,0x00,0x00,0x2d,0x02,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x2f,0x02,0x00,0x00, -0x2b,0x02,0x00,0x00,0x2e,0x02,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x30,0x02,0x00,0x00,0x27,0x02,0x00,0x00, -0x2f,0x02,0x00,0x00,0xf9,0x00,0x02,0x00,0x32,0x02,0x00,0x00, -0xf8,0x00,0x02,0x00,0x32,0x02,0x00,0x00,0xf5,0x00,0x07,0x00, -0x06,0x00,0x00,0x00,0x9c,0x02,0x00,0x00,0x3e,0x00,0x00,0x00, -0xcb,0x00,0x00,0x00,0x99,0x02,0x00,0x00,0x35,0x02,0x00,0x00, -0xb0,0x00,0x05,0x00,0xb7,0x00,0x00,0x00,0x38,0x02,0x00,0x00, -0x9c,0x02,0x00,0x00,0xb4,0x00,0x00,0x00,0xf6,0x00,0x04,0x00, -0x34,0x02,0x00,0x00,0x35,0x02,0x00,0x00,0x01,0x00,0x00,0x00, -0xfa,0x00,0x04,0x00,0x38,0x02,0x00,0x00,0x33,0x02,0x00,0x00, -0x34,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x33,0x02,0x00,0x00, -0xf9,0x00,0x02,0x00,0x3a,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, -0x3a,0x02,0x00,0x00,0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00, -0x9d,0x02,0x00,0x00,0x3e,0x00,0x00,0x00,0x33,0x02,0x00,0x00, -0x97,0x02,0x00,0x00,0x3d,0x02,0x00,0x00,0xb0,0x00,0x05,0x00, -0xb7,0x00,0x00,0x00,0x40,0x02,0x00,0x00,0x9d,0x02,0x00,0x00, -0x60,0x00,0x00,0x00,0xf6,0x00,0x04,0x00,0x3c,0x02,0x00,0x00, -0x3d,0x02,0x00,0x00,0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, -0x40,0x02,0x00,0x00,0x3b,0x02,0x00,0x00,0x3c,0x02,0x00,0x00, -0xf8,0x00,0x02,0x00,0x3b,0x02,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x44,0x02,0x00,0x00,0x9d,0x02,0x00,0x00, -0x61,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x45,0x02,0x00,0x00,0x1a,0x02,0x00,0x00,0x44,0x02,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x47,0x02,0x00,0x00, -0x64,0x00,0x00,0x00,0x62,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x48,0x02,0x00,0x00,0x45,0x02,0x00,0x00, -0x47,0x02,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x4c,0x02,0x00,0x00,0x9c,0x02,0x00,0x00,0xb8,0x01,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x4d,0x02,0x00,0x00, -0x20,0x02,0x00,0x00,0x4c,0x02,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x4f,0x02,0x00,0x00,0x68,0x00,0x00,0x00, -0xb1,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x50,0x02,0x00,0x00,0x4d,0x02,0x00,0x00,0x4f,0x02,0x00,0x00, -0xf9,0x00,0x02,0x00,0x52,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, -0x52,0x02,0x00,0x00,0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00, -0x9f,0x02,0x00,0x00,0x3e,0x00,0x00,0x00,0x3b,0x02,0x00,0x00, -0x95,0x02,0x00,0x00,0x55,0x02,0x00,0x00,0xb0,0x00,0x05,0x00, -0xb7,0x00,0x00,0x00,0x58,0x02,0x00,0x00,0x9f,0x02,0x00,0x00, -0xb1,0x00,0x00,0x00,0xf6,0x00,0x04,0x00,0x54,0x02,0x00,0x00, -0x55,0x02,0x00,0x00,0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, -0x58,0x02,0x00,0x00,0x53,0x02,0x00,0x00,0x54,0x02,0x00,0x00, -0xf8,0x00,0x02,0x00,0x53,0x02,0x00,0x00,0xf9,0x00,0x02,0x00, -0x5a,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x5a,0x02,0x00,0x00, -0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0xa1,0x02,0x00,0x00, -0x3e,0x00,0x00,0x00,0x53,0x02,0x00,0x00,0x93,0x02,0x00,0x00, -0x5d,0x02,0x00,0x00,0xb0,0x00,0x05,0x00,0xb7,0x00,0x00,0x00, -0x60,0x02,0x00,0x00,0xa1,0x02,0x00,0x00,0x62,0x00,0x00,0x00, -0xf6,0x00,0x04,0x00,0x5c,0x02,0x00,0x00,0x5d,0x02,0x00,0x00, -0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0x60,0x02,0x00,0x00, -0x5b,0x02,0x00,0x00,0x5c,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, -0x5b,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x63,0x02,0x00,0x00,0x48,0x02,0x00,0x00,0xa1,0x02,0x00,0x00, -0xb0,0x00,0x05,0x00,0xb7,0x00,0x00,0x00,0x66,0x02,0x00,0x00, -0x63,0x02,0x00,0x00,0x36,0x00,0x00,0x00,0xf7,0x00,0x03,0x00, -0x68,0x02,0x00,0x00,0x00,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, -0x66,0x02,0x00,0x00,0x67,0x02,0x00,0x00,0x68,0x02,0x00,0x00, -0xf8,0x00,0x02,0x00,0x67,0x02,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x6b,0x02,0x00,0x00,0x50,0x02,0x00,0x00, -0x9f,0x02,0x00,0x00,0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00, -0x6c,0x02,0x00,0x00,0x12,0x00,0x00,0x00,0xc5,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x6d,0x02,0x00,0x00, -0x6c,0x02,0x00,0x00,0xb0,0x00,0x05,0x00,0xb7,0x00,0x00,0x00, -0x6e,0x02,0x00,0x00,0x6b,0x02,0x00,0x00,0x6d,0x02,0x00,0x00, -0xf9,0x00,0x02,0x00,0x68,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, -0x68,0x02,0x00,0x00,0xf5,0x00,0x07,0x00,0xb7,0x00,0x00,0x00, -0x6f,0x02,0x00,0x00,0x66,0x02,0x00,0x00,0x5b,0x02,0x00,0x00, -0x6e,0x02,0x00,0x00,0x67,0x02,0x00,0x00,0xf7,0x00,0x03,0x00, -0x71,0x02,0x00,0x00,0x00,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, -0x6f,0x02,0x00,0x00,0x70,0x02,0x00,0x00,0x71,0x02,0x00,0x00, -0xf8,0x00,0x02,0x00,0x70,0x02,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x79,0x02,0x00,0x00,0x50,0x02,0x00,0x00, -0x9f,0x02,0x00,0x00,0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00, -0x7b,0x02,0x00,0x00,0x12,0x00,0x00,0x00,0x7a,0x02,0x00,0x00, -0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x7c,0x02,0x00,0x00, -0x7b,0x02,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x7d,0x02,0x00,0x00,0x79,0x02,0x00,0x00,0x7c,0x02,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x7e,0x02,0x00,0x00, -0x30,0x02,0x00,0x00,0x7d,0x02,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x80,0x02,0x00,0x00,0x7e,0x02,0x00,0x00, -0x48,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x82,0x02,0x00,0x00,0x80,0x02,0x00,0x00,0xa1,0x02,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x84,0x02,0x00,0x00, -0x9c,0x02,0x00,0x00,0xb1,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x86,0x02,0x00,0x00,0x84,0x02,0x00,0x00, -0x9f,0x02,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x88,0x02,0x00,0x00,0x86,0x02,0x00,0x00,0x87,0x02,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x8a,0x02,0x00,0x00, -0x9d,0x02,0x00,0x00,0x62,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x8b,0x02,0x00,0x00,0x88,0x02,0x00,0x00, -0x8a,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x8d,0x02,0x00,0x00,0x8b,0x02,0x00,0x00,0xa1,0x02,0x00,0x00, -0x41,0x00,0x05,0x00,0xc2,0x00,0x00,0x00,0x8e,0x02,0x00,0x00, -0xbf,0x00,0x00,0x00,0x8d,0x02,0x00,0x00,0x3d,0x00,0x04,0x00, -0xb9,0x00,0x00,0x00,0x8f,0x02,0x00,0x00,0x8e,0x02,0x00,0x00, -0x41,0x00,0x06,0x00,0x90,0x02,0x00,0x00,0x91,0x02,0x00,0x00, -0x75,0x02,0x00,0x00,0x34,0x00,0x00,0x00,0x82,0x02,0x00,0x00, -0x3e,0x00,0x03,0x00,0x91,0x02,0x00,0x00,0x8f,0x02,0x00,0x00, -0xf9,0x00,0x02,0x00,0x71,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, -0x71,0x02,0x00,0x00,0xf9,0x00,0x02,0x00,0x5d,0x02,0x00,0x00, -0xf8,0x00,0x02,0x00,0x5d,0x02,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x93,0x02,0x00,0x00,0xa1,0x02,0x00,0x00, -0xc5,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0x5a,0x02,0x00,0x00, -0xf8,0x00,0x02,0x00,0x5c,0x02,0x00,0x00,0xf9,0x00,0x02,0x00, -0x55,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x55,0x02,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x95,0x02,0x00,0x00, -0x9f,0x02,0x00,0x00,0xc5,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, -0x52,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x54,0x02,0x00,0x00, -0xf9,0x00,0x02,0x00,0x3d,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, -0x3d,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x97,0x02,0x00,0x00,0x9d,0x02,0x00,0x00,0xc5,0x00,0x00,0x00, -0xf9,0x00,0x02,0x00,0x3a,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, -0x3c,0x02,0x00,0x00,0xf9,0x00,0x02,0x00,0x35,0x02,0x00,0x00, -0xf8,0x00,0x02,0x00,0x35,0x02,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x99,0x02,0x00,0x00,0x9c,0x02,0x00,0x00, -0xc5,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0x32,0x02,0x00,0x00, -0xf8,0x00,0x02,0x00,0x34,0x02,0x00,0x00,0xfd,0x00,0x01,0x00, -0x38,0x00,0x01,0x00, -}; -const uint64_t matmul_f16_s_len = 10156; - -unsigned char matmul_f16_s_fp32_data[] = { -0x03,0x02,0x23,0x07,0x00,0x05,0x01,0x00,0x0b,0x00,0x0d,0x00, -0xca,0x02,0x00,0x00,0x00,0x00,0x00,0x00,0x11,0x00,0x02,0x00, -0x01,0x00,0x00,0x00,0x11,0x00,0x02,0x00,0x51,0x11,0x00,0x00, -0x0b,0x00,0x06,0x00,0x01,0x00,0x00,0x00,0x47,0x4c,0x53,0x4c, -0x2e,0x73,0x74,0x64,0x2e,0x34,0x35,0x30,0x00,0x00,0x00,0x00, -0x0e,0x00,0x03,0x00,0x00,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x0f,0x00,0x0f,0x00,0x05,0x00,0x00,0x00,0x04,0x00,0x00,0x00, -0x6d,0x61,0x69,0x6e,0x00,0x00,0x00,0x00,0x0b,0x00,0x00,0x00, -0x12,0x00,0x00,0x00,0x3d,0x00,0x00,0x00,0x4c,0x00,0x00,0x00, -0xf0,0x00,0x00,0x00,0xfc,0x00,0x00,0x00,0x3c,0x01,0x00,0x00, -0x47,0x01,0x00,0x00,0x2a,0x02,0x00,0x00,0x73,0x02,0x00,0x00, -0x10,0x00,0x06,0x00,0x04,0x00,0x00,0x00,0x11,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x0b,0x00,0x00,0x00,0x0b,0x00,0x00,0x00, -0x1c,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0x10,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x08,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00, -0x03,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00,0x04,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x10,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0x10,0x00,0x00,0x00,0x05,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x14,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x18,0x00,0x00,0x00, -0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00,0x07,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x1c,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0x10,0x00,0x00,0x00,0x08,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x20,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00, -0x09,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x24,0x00,0x00,0x00, -0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00,0x0a,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x28,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0x10,0x00,0x00,0x00,0x0b,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x2c,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x30,0x00,0x00,0x00, -0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00,0x0d,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x34,0x00,0x00,0x00,0x47,0x00,0x03,0x00, -0x10,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x37,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x3d,0x00,0x00,0x00,0x0b,0x00,0x00,0x00, -0x1a,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x4c,0x00,0x00,0x00, -0x0b,0x00,0x00,0x00,0x1b,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x4f,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x09,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x53,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x04,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x60,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x62,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x07,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x6c,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x03,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x9c,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0xae,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x05,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0xb1,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x08,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0xf9,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x48,0x00,0x04,0x00, -0xfa,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x18,0x00,0x00,0x00, -0x48,0x00,0x05,0x00,0xfa,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x03,0x00, -0xfa,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0xfc,0x00,0x00,0x00,0x22,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0xfc,0x00,0x00,0x00,0x21,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x16,0x01,0x00,0x00, -0x01,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x17,0x01,0x00,0x00,0x0b,0x00,0x00,0x00,0x19,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x44,0x01,0x00,0x00,0x06,0x00,0x00,0x00, -0x02,0x00,0x00,0x00,0x48,0x00,0x04,0x00,0x45,0x01,0x00,0x00, -0x00,0x00,0x00,0x00,0x18,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0x45,0x01,0x00,0x00,0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x47,0x00,0x03,0x00,0x45,0x01,0x00,0x00, -0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x47,0x01,0x00,0x00, -0x22,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x47,0x01,0x00,0x00,0x21,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x2a,0x02,0x00,0x00,0x0b,0x00,0x00,0x00, -0x18,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x70,0x02,0x00,0x00, -0x06,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x48,0x00,0x04,0x00, -0x71,0x02,0x00,0x00,0x00,0x00,0x00,0x00,0x19,0x00,0x00,0x00, -0x48,0x00,0x05,0x00,0x71,0x02,0x00,0x00,0x00,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x03,0x00, -0x71,0x02,0x00,0x00,0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x73,0x02,0x00,0x00,0x22,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x73,0x02,0x00,0x00,0x21,0x00,0x00,0x00, -0x02,0x00,0x00,0x00,0x13,0x00,0x02,0x00,0x02,0x00,0x00,0x00, -0x21,0x00,0x03,0x00,0x03,0x00,0x00,0x00,0x02,0x00,0x00,0x00, -0x15,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x20,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x17,0x00,0x04,0x00,0x09,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x03,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0x0a,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x09,0x00,0x00,0x00, -0x3b,0x00,0x04,0x00,0x0a,0x00,0x00,0x00,0x0b,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0x0d,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x1e,0x00,0x10,0x00,0x10,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x11,0x00,0x00,0x00, -0x09,0x00,0x00,0x00,0x10,0x00,0x00,0x00,0x3b,0x00,0x04,0x00, -0x11,0x00,0x00,0x00,0x12,0x00,0x00,0x00,0x09,0x00,0x00,0x00, -0x15,0x00,0x04,0x00,0x13,0x00,0x00,0x00,0x20,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00, -0x14,0x00,0x00,0x00,0x08,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0x15,0x00,0x00,0x00,0x09,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00,0x21,0x00,0x00,0x00, -0x0a,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00, -0x27,0x00,0x00,0x00,0x09,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x13,0x00,0x00,0x00,0x2d,0x00,0x00,0x00,0x07,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00,0x34,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x32,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x37,0x00,0x00,0x00,0x40,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x3b,0x00,0x04,0x00,0x0a,0x00,0x00,0x00,0x3d,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x3e,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x3b,0x00,0x04,0x00, -0x0a,0x00,0x00,0x00,0x4c,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x32,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x4f,0x00,0x00,0x00, -0x20,0x00,0x00,0x00,0x32,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x53,0x00,0x00,0x00,0x20,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0x54,0x00,0x00,0x00,0x86,0x00,0x00,0x00, -0x37,0x00,0x00,0x00,0x53,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0x58,0x00,0x00,0x00,0x86,0x00,0x00,0x00, -0x37,0x00,0x00,0x00,0x53,0x00,0x00,0x00,0x32,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x60,0x00,0x00,0x00,0x02,0x00,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x61,0x00,0x00,0x00, -0x86,0x00,0x00,0x00,0x53,0x00,0x00,0x00,0x60,0x00,0x00,0x00, -0x32,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x62,0x00,0x00,0x00, -0x04,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, -0x63,0x00,0x00,0x00,0x86,0x00,0x00,0x00,0x61,0x00,0x00,0x00, -0x62,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, -0x67,0x00,0x00,0x00,0x86,0x00,0x00,0x00,0x61,0x00,0x00,0x00, -0x62,0x00,0x00,0x00,0x32,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x6c,0x00,0x00,0x00,0x10,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0x6d,0x00,0x00,0x00,0x86,0x00,0x00,0x00, -0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0x72,0x00,0x00,0x00,0x86,0x00,0x00,0x00, -0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x13,0x00,0x00,0x00,0x76,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00,0x7b,0x00,0x00,0x00, -0x02,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00, -0x86,0x00,0x00,0x00,0x0b,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x13,0x00,0x00,0x00,0x8c,0x00,0x00,0x00,0x03,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00,0x97,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x32,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x9c,0x00,0x00,0x00,0x40,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x13,0x00,0x00,0x00,0x9e,0x00,0x00,0x00,0x04,0x00,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0xad,0x00,0x00,0x00, -0x84,0x00,0x00,0x00,0x60,0x00,0x00,0x00,0x62,0x00,0x00,0x00, -0x32,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0xae,0x00,0x00,0x00, -0x20,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, -0xaf,0x00,0x00,0x00,0x84,0x00,0x00,0x00,0x53,0x00,0x00,0x00, -0xae,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, -0xb0,0x00,0x00,0x00,0x84,0x00,0x00,0x00,0x4f,0x00,0x00,0x00, -0x62,0x00,0x00,0x00,0x32,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0xb1,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0xb2,0x00,0x00,0x00,0x84,0x00,0x00,0x00, -0xb0,0x00,0x00,0x00,0xb1,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0xb3,0x00,0x00,0x00,0x84,0x00,0x00,0x00, -0xb2,0x00,0x00,0x00,0x60,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0xb4,0x00,0x00,0x00,0x86,0x00,0x00,0x00, -0xaf,0x00,0x00,0x00,0xb3,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0xb5,0x00,0x00,0x00,0x84,0x00,0x00,0x00, -0xad,0x00,0x00,0x00,0xb4,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0xb6,0x00,0x00,0x00,0x84,0x00,0x00,0x00, -0xb5,0x00,0x00,0x00,0xb1,0x00,0x00,0x00,0x14,0x00,0x02,0x00, -0xb7,0x00,0x00,0x00,0x16,0x00,0x03,0x00,0xb9,0x00,0x00,0x00, -0x20,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, -0xba,0x00,0x00,0x00,0x84,0x00,0x00,0x00,0x60,0x00,0x00,0x00, -0x62,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, -0xbb,0x00,0x00,0x00,0x84,0x00,0x00,0x00,0xba,0x00,0x00,0x00, -0xb4,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, -0xbc,0x00,0x00,0x00,0x84,0x00,0x00,0x00,0xbb,0x00,0x00,0x00, -0xb1,0x00,0x00,0x00,0x1c,0x00,0x04,0x00,0xbd,0x00,0x00,0x00, -0xb9,0x00,0x00,0x00,0xbc,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0xbe,0x00,0x00,0x00,0x07,0x00,0x00,0x00,0xbd,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0xb9,0x00,0x00,0x00,0xc1,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0xc2,0x00,0x00,0x00, -0x07,0x00,0x00,0x00,0xb9,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x13,0x00,0x00,0x00,0xc5,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0xec,0x00,0x00,0x00, -0x80,0x00,0x00,0x00,0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0xed,0x00,0x00,0x00, -0x84,0x00,0x00,0x00,0x37,0x00,0x00,0x00,0xec,0x00,0x00,0x00, -0x1c,0x00,0x04,0x00,0xee,0x00,0x00,0x00,0xb9,0x00,0x00,0x00, -0xed,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0xef,0x00,0x00,0x00, -0x04,0x00,0x00,0x00,0xee,0x00,0x00,0x00,0x3b,0x00,0x04,0x00, -0xef,0x00,0x00,0x00,0xf0,0x00,0x00,0x00,0x04,0x00,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0xf4,0x00,0x00,0x00, -0x80,0x00,0x00,0x00,0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00, -0x16,0x00,0x03,0x00,0xf8,0x00,0x00,0x00,0x10,0x00,0x00,0x00, -0x1d,0x00,0x03,0x00,0xf9,0x00,0x00,0x00,0xf8,0x00,0x00,0x00, -0x1e,0x00,0x03,0x00,0xfa,0x00,0x00,0x00,0xf9,0x00,0x00,0x00, -0x20,0x00,0x04,0x00,0xfb,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0xfa,0x00,0x00,0x00,0x3b,0x00,0x04,0x00,0xfb,0x00,0x00,0x00, -0xfc,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0x07,0x01,0x00,0x00,0x0c,0x00,0x00,0x00,0xf8,0x00,0x00,0x00, -0x20,0x00,0x04,0x00,0x0b,0x01,0x00,0x00,0x04,0x00,0x00,0x00, -0xb9,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, -0x11,0x01,0x00,0x00,0x80,0x00,0x00,0x00,0x6c,0x00,0x00,0x00, -0x39,0x00,0x00,0x00,0x32,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x16,0x01,0x00,0x00,0x01,0x00,0x00,0x00,0x33,0x00,0x06,0x00, -0x09,0x00,0x00,0x00,0x17,0x01,0x00,0x00,0x16,0x01,0x00,0x00, -0x39,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0x18,0x01,0x00,0x00,0x51,0x00,0x00,0x00, -0x17,0x01,0x00,0x00,0x00,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0x19,0x01,0x00,0x00,0x84,0x00,0x00,0x00, -0x18,0x01,0x00,0x00,0x39,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0x1a,0x01,0x00,0x00,0x86,0x00,0x00,0x00, -0x19,0x01,0x00,0x00,0x6c,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0x38,0x01,0x00,0x00,0x80,0x00,0x00,0x00, -0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0x39,0x01,0x00,0x00,0x84,0x00,0x00,0x00, -0x9c,0x00,0x00,0x00,0x38,0x01,0x00,0x00,0x1c,0x00,0x04,0x00, -0x3a,0x01,0x00,0x00,0xb9,0x00,0x00,0x00,0x39,0x01,0x00,0x00, -0x20,0x00,0x04,0x00,0x3b,0x01,0x00,0x00,0x04,0x00,0x00,0x00, -0x3a,0x01,0x00,0x00,0x3b,0x00,0x04,0x00,0x3b,0x01,0x00,0x00, -0x3c,0x01,0x00,0x00,0x04,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0x40,0x01,0x00,0x00,0x80,0x00,0x00,0x00, -0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x1d,0x00,0x03,0x00, -0x44,0x01,0x00,0x00,0xf8,0x00,0x00,0x00,0x1e,0x00,0x03,0x00, -0x45,0x01,0x00,0x00,0x44,0x01,0x00,0x00,0x20,0x00,0x04,0x00, -0x46,0x01,0x00,0x00,0x0c,0x00,0x00,0x00,0x45,0x01,0x00,0x00, -0x3b,0x00,0x04,0x00,0x46,0x01,0x00,0x00,0x47,0x01,0x00,0x00, -0x0c,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, -0x5a,0x01,0x00,0x00,0x80,0x00,0x00,0x00,0x6c,0x00,0x00,0x00, -0x39,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x61,0x01,0x00,0x00,0x08,0x01,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0x62,0x01,0x00,0x00,0x86,0x00,0x00,0x00, -0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0x65,0x01,0x00,0x00,0x86,0x00,0x00,0x00, -0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0x80,0x01,0x00,0x00,0x84,0x00,0x00,0x00, -0x60,0x00,0x00,0x00,0x62,0x00,0x00,0x00,0x1c,0x00,0x04,0x00, -0x81,0x01,0x00,0x00,0xb9,0x00,0x00,0x00,0x80,0x01,0x00,0x00, -0x20,0x00,0x04,0x00,0x82,0x01,0x00,0x00,0x07,0x00,0x00,0x00, -0x81,0x01,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, -0x92,0x01,0x00,0x00,0x80,0x00,0x00,0x00,0x6c,0x00,0x00,0x00, -0x39,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, -0xad,0x01,0x00,0x00,0x84,0x00,0x00,0x00,0xb4,0x00,0x00,0x00, -0xb1,0x00,0x00,0x00,0x1c,0x00,0x04,0x00,0xae,0x01,0x00,0x00, -0xb9,0x00,0x00,0x00,0xad,0x01,0x00,0x00,0x20,0x00,0x04,0x00, -0xaf,0x01,0x00,0x00,0x07,0x00,0x00,0x00,0xae,0x01,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0xb8,0x01,0x00,0x00, -0x86,0x00,0x00,0x00,0xae,0x00,0x00,0x00,0xb4,0x00,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0xc0,0x01,0x00,0x00, -0x80,0x00,0x00,0x00,0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0xef,0x01,0x00,0x00, -0x84,0x00,0x00,0x00,0x60,0x00,0x00,0x00,0x62,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00,0x22,0x02,0x00,0x00, -0x0d,0x00,0x00,0x00,0x3b,0x00,0x04,0x00,0x0a,0x00,0x00,0x00, -0x2a,0x02,0x00,0x00,0x01,0x00,0x00,0x00,0x1d,0x00,0x03,0x00, -0x70,0x02,0x00,0x00,0xb9,0x00,0x00,0x00,0x1e,0x00,0x03,0x00, -0x71,0x02,0x00,0x00,0x70,0x02,0x00,0x00,0x20,0x00,0x04,0x00, -0x72,0x02,0x00,0x00,0x0c,0x00,0x00,0x00,0x71,0x02,0x00,0x00, -0x3b,0x00,0x04,0x00,0x72,0x02,0x00,0x00,0x73,0x02,0x00,0x00, -0x0c,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00, -0x78,0x02,0x00,0x00,0x05,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0x85,0x02,0x00,0x00,0x84,0x00,0x00,0x00, -0x60,0x00,0x00,0x00,0x62,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0x8e,0x02,0x00,0x00,0x0c,0x00,0x00,0x00,0xb9,0x00,0x00,0x00, -0x36,0x00,0x05,0x00,0x02,0x00,0x00,0x00,0x04,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x03,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, -0x05,0x00,0x00,0x00,0x3b,0x00,0x04,0x00,0xbe,0x00,0x00,0x00, -0xbf,0x00,0x00,0x00,0x07,0x00,0x00,0x00,0x3b,0x00,0x04,0x00, -0x82,0x01,0x00,0x00,0x83,0x01,0x00,0x00,0x07,0x00,0x00,0x00, -0x3b,0x00,0x04,0x00,0xaf,0x01,0x00,0x00,0xb0,0x01,0x00,0x00, -0x07,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x0d,0x00,0x00,0x00, -0x0e,0x00,0x00,0x00,0x0b,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x0f,0x00,0x00,0x00, -0x0e,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00, -0x16,0x00,0x00,0x00,0x12,0x00,0x00,0x00,0x14,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x17,0x00,0x00,0x00, -0x16,0x00,0x00,0x00,0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x18,0x00,0x00,0x00,0x0f,0x00,0x00,0x00,0x17,0x00,0x00,0x00, -0x89,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x1e,0x00,0x00,0x00, -0x0f,0x00,0x00,0x00,0x17,0x00,0x00,0x00,0x41,0x00,0x05,0x00, -0x15,0x00,0x00,0x00,0x22,0x00,0x00,0x00,0x12,0x00,0x00,0x00, -0x21,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x22,0x00,0x00,0x00,0x86,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x24,0x00,0x00,0x00,0x18,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00, -0x28,0x00,0x00,0x00,0x12,0x00,0x00,0x00,0x27,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x29,0x00,0x00,0x00, -0x28,0x00,0x00,0x00,0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x2a,0x00,0x00,0x00,0x1e,0x00,0x00,0x00,0x29,0x00,0x00,0x00, -0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0x12,0x00,0x00,0x00,0x2d,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x2f,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x30,0x00,0x00,0x00, -0x24,0x00,0x00,0x00,0x2f,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x32,0x00,0x00,0x00,0x30,0x00,0x00,0x00, -0x2a,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00, -0x35,0x00,0x00,0x00,0x12,0x00,0x00,0x00,0x34,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x36,0x00,0x00,0x00, -0x35,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x38,0x00,0x00,0x00,0x36,0x00,0x00,0x00,0x37,0x00,0x00,0x00, -0x82,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x3a,0x00,0x00,0x00, -0x38,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x86,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x3b,0x00,0x00,0x00,0x3a,0x00,0x00,0x00, -0x37,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x0d,0x00,0x00,0x00, -0x3f,0x00,0x00,0x00,0x3d,0x00,0x00,0x00,0x3e,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x40,0x00,0x00,0x00, -0x3f,0x00,0x00,0x00,0x89,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x42,0x00,0x00,0x00,0x40,0x00,0x00,0x00,0x3b,0x00,0x00,0x00, -0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x47,0x00,0x00,0x00, -0x40,0x00,0x00,0x00,0x3b,0x00,0x00,0x00,0x41,0x00,0x05,0x00, -0x0d,0x00,0x00,0x00,0x49,0x00,0x00,0x00,0x3d,0x00,0x00,0x00, -0x39,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x4a,0x00,0x00,0x00,0x49,0x00,0x00,0x00,0x41,0x00,0x05,0x00, -0x0d,0x00,0x00,0x00,0x4d,0x00,0x00,0x00,0x4c,0x00,0x00,0x00, -0x3e,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x4e,0x00,0x00,0x00,0x4d,0x00,0x00,0x00,0x86,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x50,0x00,0x00,0x00,0x4e,0x00,0x00,0x00, -0x4f,0x00,0x00,0x00,0x89,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x55,0x00,0x00,0x00,0x50,0x00,0x00,0x00,0x54,0x00,0x00,0x00, -0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x59,0x00,0x00,0x00, -0x50,0x00,0x00,0x00,0x58,0x00,0x00,0x00,0x89,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x5d,0x00,0x00,0x00,0x4e,0x00,0x00,0x00, -0x4f,0x00,0x00,0x00,0x89,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x64,0x00,0x00,0x00,0x5d,0x00,0x00,0x00,0x63,0x00,0x00,0x00, -0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x68,0x00,0x00,0x00, -0x5d,0x00,0x00,0x00,0x67,0x00,0x00,0x00,0x89,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x6e,0x00,0x00,0x00,0x4e,0x00,0x00,0x00, -0x6d,0x00,0x00,0x00,0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x73,0x00,0x00,0x00,0x4e,0x00,0x00,0x00,0x72,0x00,0x00,0x00, -0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00,0x77,0x00,0x00,0x00, -0x12,0x00,0x00,0x00,0x76,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x78,0x00,0x00,0x00,0x77,0x00,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x79,0x00,0x00,0x00, -0x47,0x00,0x00,0x00,0x78,0x00,0x00,0x00,0x41,0x00,0x05,0x00, -0x15,0x00,0x00,0x00,0x7c,0x00,0x00,0x00,0x12,0x00,0x00,0x00, -0x7b,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x7d,0x00,0x00,0x00,0x7c,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x7f,0x00,0x00,0x00,0x47,0x00,0x00,0x00, -0x39,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x82,0x00,0x00,0x00,0x7f,0x00,0x00,0x00,0x78,0x00,0x00,0x00, -0x0c,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0x83,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x26,0x00,0x00,0x00,0x7d,0x00,0x00,0x00, -0x82,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00, -0x87,0x00,0x00,0x00,0x12,0x00,0x00,0x00,0x86,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x88,0x00,0x00,0x00, -0x87,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x89,0x00,0x00,0x00,0x32,0x00,0x00,0x00,0x88,0x00,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x8b,0x00,0x00,0x00, -0x42,0x00,0x00,0x00,0x37,0x00,0x00,0x00,0x41,0x00,0x05,0x00, -0x15,0x00,0x00,0x00,0x8d,0x00,0x00,0x00,0x12,0x00,0x00,0x00, -0x8c,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x8e,0x00,0x00,0x00,0x8d,0x00,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x8f,0x00,0x00,0x00,0x8b,0x00,0x00,0x00, -0x8e,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x90,0x00,0x00,0x00,0x89,0x00,0x00,0x00,0x8f,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x92,0x00,0x00,0x00, -0x90,0x00,0x00,0x00,0x79,0x00,0x00,0x00,0x86,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x93,0x00,0x00,0x00,0x92,0x00,0x00,0x00, -0x39,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00, -0x98,0x00,0x00,0x00,0x12,0x00,0x00,0x00,0x97,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x99,0x00,0x00,0x00, -0x98,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x9a,0x00,0x00,0x00,0x0f,0x00,0x00,0x00,0x99,0x00,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x9d,0x00,0x00,0x00, -0x4a,0x00,0x00,0x00,0x9c,0x00,0x00,0x00,0x41,0x00,0x05,0x00, -0x15,0x00,0x00,0x00,0x9f,0x00,0x00,0x00,0x12,0x00,0x00,0x00, -0x9e,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0xa0,0x00,0x00,0x00,0x9f,0x00,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xa1,0x00,0x00,0x00,0x9d,0x00,0x00,0x00, -0xa0,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xa2,0x00,0x00,0x00,0x9a,0x00,0x00,0x00,0xa1,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xa4,0x00,0x00,0x00, -0xa2,0x00,0x00,0x00,0x79,0x00,0x00,0x00,0x86,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xa5,0x00,0x00,0x00,0xa4,0x00,0x00,0x00, -0x39,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0xa7,0x00,0x00,0x00, -0xf8,0x00,0x02,0x00,0xa7,0x00,0x00,0x00,0xf5,0x00,0x07,0x00, -0x06,0x00,0x00,0x00,0x98,0x02,0x00,0x00,0x3e,0x00,0x00,0x00, -0x05,0x00,0x00,0x00,0xc6,0x00,0x00,0x00,0xa8,0x00,0x00,0x00, -0xb0,0x00,0x05,0x00,0xb7,0x00,0x00,0x00,0xb8,0x00,0x00,0x00, -0x98,0x02,0x00,0x00,0xb6,0x00,0x00,0x00,0xf6,0x00,0x04,0x00, -0xa9,0x00,0x00,0x00,0xa8,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0xfa,0x00,0x04,0x00,0xb8,0x00,0x00,0x00,0xa8,0x00,0x00,0x00, -0xa9,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0xa8,0x00,0x00,0x00, -0x41,0x00,0x05,0x00,0xc2,0x00,0x00,0x00,0xc3,0x00,0x00,0x00, -0xbf,0x00,0x00,0x00,0x98,0x02,0x00,0x00,0x3e,0x00,0x03,0x00, -0xc3,0x00,0x00,0x00,0xc1,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xc6,0x00,0x00,0x00,0x98,0x02,0x00,0x00, -0xc5,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0xa7,0x00,0x00,0x00, -0xf8,0x00,0x02,0x00,0xa9,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, -0xc9,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0xc9,0x00,0x00,0x00, -0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0xb1,0x02,0x00,0x00, -0xa5,0x00,0x00,0x00,0xa9,0x00,0x00,0x00,0x67,0x01,0x00,0x00, -0xcc,0x00,0x00,0x00,0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00, -0xad,0x02,0x00,0x00,0x93,0x00,0x00,0x00,0xa9,0x00,0x00,0x00, -0x64,0x01,0x00,0x00,0xcc,0x00,0x00,0x00,0xf5,0x00,0x07,0x00, -0x06,0x00,0x00,0x00,0x99,0x02,0x00,0x00,0x79,0x00,0x00,0x00, -0xa9,0x00,0x00,0x00,0x12,0x02,0x00,0x00,0xcc,0x00,0x00,0x00, -0xb0,0x00,0x05,0x00,0xb7,0x00,0x00,0x00,0xd0,0x00,0x00,0x00, -0x99,0x02,0x00,0x00,0x83,0x00,0x00,0x00,0xf6,0x00,0x04,0x00, -0xcb,0x00,0x00,0x00,0xcc,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0xfa,0x00,0x04,0x00,0xd0,0x00,0x00,0x00,0xca,0x00,0x00,0x00, -0xcb,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0xca,0x00,0x00,0x00, -0xf9,0x00,0x02,0x00,0xd2,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, -0xd2,0x00,0x00,0x00,0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00, -0xa9,0x02,0x00,0x00,0x3e,0x00,0x00,0x00,0xca,0x00,0x00,0x00, -0x1c,0x01,0x00,0x00,0xd5,0x00,0x00,0x00,0xb0,0x00,0x05,0x00, -0xb7,0x00,0x00,0x00,0xd8,0x00,0x00,0x00,0xa9,0x02,0x00,0x00, -0x37,0x00,0x00,0x00,0xf6,0x00,0x04,0x00,0xd4,0x00,0x00,0x00, -0xd5,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, -0xd8,0x00,0x00,0x00,0xd3,0x00,0x00,0x00,0xd4,0x00,0x00,0x00, -0xf8,0x00,0x02,0x00,0xd3,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xdc,0x00,0x00,0x00,0x8b,0x00,0x00,0x00, -0x73,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xde,0x00,0x00,0x00,0xdc,0x00,0x00,0x00,0xa9,0x02,0x00,0x00, -0xb0,0x00,0x05,0x00,0xb7,0x00,0x00,0x00,0xe1,0x00,0x00,0x00, -0xde,0x00,0x00,0x00,0x36,0x00,0x00,0x00,0xf7,0x00,0x03,0x00, -0xe3,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, -0xe1,0x00,0x00,0x00,0xe2,0x00,0x00,0x00,0xe3,0x00,0x00,0x00, -0xf8,0x00,0x02,0x00,0xe2,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xe6,0x00,0x00,0x00,0x99,0x02,0x00,0x00, -0x6e,0x00,0x00,0x00,0xb0,0x00,0x05,0x00,0xb7,0x00,0x00,0x00, -0xe8,0x00,0x00,0x00,0xe6,0x00,0x00,0x00,0x83,0x00,0x00,0x00, -0xf9,0x00,0x02,0x00,0xe3,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, -0xe3,0x00,0x00,0x00,0xf5,0x00,0x07,0x00,0xb7,0x00,0x00,0x00, -0xe9,0x00,0x00,0x00,0xe1,0x00,0x00,0x00,0xd3,0x00,0x00,0x00, -0xe8,0x00,0x00,0x00,0xe2,0x00,0x00,0x00,0xf7,0x00,0x03,0x00, -0xeb,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, -0xe9,0x00,0x00,0x00,0xea,0x00,0x00,0x00,0x0d,0x01,0x00,0x00, -0xf8,0x00,0x02,0x00,0xea,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xf3,0x00,0x00,0x00,0x73,0x00,0x00,0x00, -0xa9,0x02,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xf5,0x00,0x00,0x00,0xf3,0x00,0x00,0x00,0xf4,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xf7,0x00,0x00,0x00, -0xf5,0x00,0x00,0x00,0x6e,0x00,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x03,0x01,0x00,0x00,0xf3,0x00,0x00,0x00, -0x8e,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x04,0x01,0x00,0x00,0xad,0x02,0x00,0x00,0x03,0x01,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x06,0x01,0x00,0x00, -0x04,0x01,0x00,0x00,0x6e,0x00,0x00,0x00,0x41,0x00,0x06,0x00, -0x07,0x01,0x00,0x00,0x08,0x01,0x00,0x00,0xfc,0x00,0x00,0x00, -0x34,0x00,0x00,0x00,0x06,0x01,0x00,0x00,0x3d,0x00,0x04,0x00, -0xf8,0x00,0x00,0x00,0x09,0x01,0x00,0x00,0x08,0x01,0x00,0x00, -0x73,0x00,0x04,0x00,0xb9,0x00,0x00,0x00,0x0a,0x01,0x00,0x00, -0x09,0x01,0x00,0x00,0x41,0x00,0x05,0x00,0x0b,0x01,0x00,0x00, -0x0c,0x01,0x00,0x00,0xf0,0x00,0x00,0x00,0xf7,0x00,0x00,0x00, -0x3e,0x00,0x03,0x00,0x0c,0x01,0x00,0x00,0x0a,0x01,0x00,0x00, -0xf9,0x00,0x02,0x00,0xeb,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, -0x0d,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x10,0x01,0x00,0x00,0x73,0x00,0x00,0x00,0xa9,0x02,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x12,0x01,0x00,0x00, -0x10,0x01,0x00,0x00,0x11,0x01,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x14,0x01,0x00,0x00,0x12,0x01,0x00,0x00, -0x6e,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x0b,0x01,0x00,0x00, -0x15,0x01,0x00,0x00,0xf0,0x00,0x00,0x00,0x14,0x01,0x00,0x00, -0x3e,0x00,0x03,0x00,0x15,0x01,0x00,0x00,0xc1,0x00,0x00,0x00, -0xf9,0x00,0x02,0x00,0xeb,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, -0xeb,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0xd5,0x00,0x00,0x00, -0xf8,0x00,0x02,0x00,0xd5,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x1c,0x01,0x00,0x00,0xa9,0x02,0x00,0x00, -0x1a,0x01,0x00,0x00,0xf9,0x00,0x02,0x00,0xd2,0x00,0x00,0x00, -0xf8,0x00,0x02,0x00,0xd4,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, -0x1e,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0x1e,0x01,0x00,0x00, -0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0xaa,0x02,0x00,0x00, -0x3e,0x00,0x00,0x00,0xd4,0x00,0x00,0x00,0x60,0x01,0x00,0x00, -0x21,0x01,0x00,0x00,0xb0,0x00,0x05,0x00,0xb7,0x00,0x00,0x00, -0x24,0x01,0x00,0x00,0xaa,0x02,0x00,0x00,0x9c,0x00,0x00,0x00, -0xf6,0x00,0x04,0x00,0x20,0x01,0x00,0x00,0x21,0x01,0x00,0x00, -0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0x24,0x01,0x00,0x00, -0x1f,0x01,0x00,0x00,0x20,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, -0x1f,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x28,0x01,0x00,0x00,0x9d,0x00,0x00,0x00,0x73,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x2a,0x01,0x00,0x00, -0x28,0x01,0x00,0x00,0xaa,0x02,0x00,0x00,0x41,0x00,0x05,0x00, -0x15,0x00,0x00,0x00,0x2b,0x01,0x00,0x00,0x12,0x00,0x00,0x00, -0xc5,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x2c,0x01,0x00,0x00,0x2b,0x01,0x00,0x00,0xb0,0x00,0x05,0x00, -0xb7,0x00,0x00,0x00,0x2d,0x01,0x00,0x00,0x2a,0x01,0x00,0x00, -0x2c,0x01,0x00,0x00,0xf7,0x00,0x03,0x00,0x2f,0x01,0x00,0x00, -0x00,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0x2d,0x01,0x00,0x00, -0x2e,0x01,0x00,0x00,0x2f,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, -0x2e,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x32,0x01,0x00,0x00,0x99,0x02,0x00,0x00,0x6e,0x00,0x00,0x00, -0xb0,0x00,0x05,0x00,0xb7,0x00,0x00,0x00,0x34,0x01,0x00,0x00, -0x32,0x01,0x00,0x00,0x83,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, -0x2f,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0x2f,0x01,0x00,0x00, -0xf5,0x00,0x07,0x00,0xb7,0x00,0x00,0x00,0x35,0x01,0x00,0x00, -0x2d,0x01,0x00,0x00,0x1f,0x01,0x00,0x00,0x34,0x01,0x00,0x00, -0x2e,0x01,0x00,0x00,0xf7,0x00,0x03,0x00,0x37,0x01,0x00,0x00, -0x00,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0x35,0x01,0x00,0x00, -0x36,0x01,0x00,0x00,0x56,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, -0x36,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x3f,0x01,0x00,0x00,0x73,0x00,0x00,0x00,0xaa,0x02,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x41,0x01,0x00,0x00, -0x3f,0x01,0x00,0x00,0x40,0x01,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x43,0x01,0x00,0x00,0x41,0x01,0x00,0x00, -0x6e,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x4e,0x01,0x00,0x00,0x3f,0x01,0x00,0x00,0xa0,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x4f,0x01,0x00,0x00, -0xb1,0x02,0x00,0x00,0x4e,0x01,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x51,0x01,0x00,0x00,0x4f,0x01,0x00,0x00, -0x6e,0x00,0x00,0x00,0x41,0x00,0x06,0x00,0x07,0x01,0x00,0x00, -0x52,0x01,0x00,0x00,0x47,0x01,0x00,0x00,0x34,0x00,0x00,0x00, -0x51,0x01,0x00,0x00,0x3d,0x00,0x04,0x00,0xf8,0x00,0x00,0x00, -0x53,0x01,0x00,0x00,0x52,0x01,0x00,0x00,0x73,0x00,0x04,0x00, -0xb9,0x00,0x00,0x00,0x54,0x01,0x00,0x00,0x53,0x01,0x00,0x00, -0x41,0x00,0x05,0x00,0x0b,0x01,0x00,0x00,0x55,0x01,0x00,0x00, -0x3c,0x01,0x00,0x00,0x43,0x01,0x00,0x00,0x3e,0x00,0x03,0x00, -0x55,0x01,0x00,0x00,0x54,0x01,0x00,0x00,0xf9,0x00,0x02,0x00, -0x37,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0x56,0x01,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x59,0x01,0x00,0x00, -0x73,0x00,0x00,0x00,0xaa,0x02,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x5b,0x01,0x00,0x00,0x59,0x01,0x00,0x00, -0x5a,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x5d,0x01,0x00,0x00,0x5b,0x01,0x00,0x00,0x6e,0x00,0x00,0x00, -0x41,0x00,0x05,0x00,0x0b,0x01,0x00,0x00,0x5e,0x01,0x00,0x00, -0x3c,0x01,0x00,0x00,0x5d,0x01,0x00,0x00,0x3e,0x00,0x03,0x00, -0x5e,0x01,0x00,0x00,0xc1,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, -0x37,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0x37,0x01,0x00,0x00, -0xf9,0x00,0x02,0x00,0x21,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, -0x21,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x60,0x01,0x00,0x00,0xaa,0x02,0x00,0x00,0x1a,0x01,0x00,0x00, -0xf9,0x00,0x02,0x00,0x1e,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, -0x20,0x01,0x00,0x00,0xe0,0x00,0x04,0x00,0x0c,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x61,0x01,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x64,0x01,0x00,0x00,0xad,0x02,0x00,0x00, -0x62,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x67,0x01,0x00,0x00,0xb1,0x02,0x00,0x00,0x65,0x01,0x00,0x00, -0xf9,0x00,0x02,0x00,0x69,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, -0x69,0x01,0x00,0x00,0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00, -0xb3,0x02,0x00,0x00,0x3e,0x00,0x00,0x00,0x20,0x01,0x00,0x00, -0x10,0x02,0x00,0x00,0x6c,0x01,0x00,0x00,0xb0,0x00,0x05,0x00, -0xb7,0x00,0x00,0x00,0x6f,0x01,0x00,0x00,0xb3,0x02,0x00,0x00, -0x6c,0x00,0x00,0x00,0xf6,0x00,0x04,0x00,0x6b,0x01,0x00,0x00, -0x6c,0x01,0x00,0x00,0x00,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, -0x6f,0x01,0x00,0x00,0x6a,0x01,0x00,0x00,0x6b,0x01,0x00,0x00, -0xf8,0x00,0x02,0x00,0x6a,0x01,0x00,0x00,0xf9,0x00,0x02,0x00, -0x71,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0x71,0x01,0x00,0x00, -0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0xb7,0x02,0x00,0x00, -0x3e,0x00,0x00,0x00,0x6a,0x01,0x00,0x00,0x9c,0x01,0x00,0x00, -0x74,0x01,0x00,0x00,0xb0,0x00,0x05,0x00,0xb7,0x00,0x00,0x00, -0x77,0x01,0x00,0x00,0xb7,0x02,0x00,0x00,0x60,0x00,0x00,0x00, -0xf6,0x00,0x04,0x00,0x73,0x01,0x00,0x00,0x74,0x01,0x00,0x00, -0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0x77,0x01,0x00,0x00, -0x72,0x01,0x00,0x00,0x73,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, -0x72,0x01,0x00,0x00,0xf9,0x00,0x02,0x00,0x79,0x01,0x00,0x00, -0xf8,0x00,0x02,0x00,0x79,0x01,0x00,0x00,0xf5,0x00,0x07,0x00, -0x06,0x00,0x00,0x00,0xc9,0x02,0x00,0x00,0x3e,0x00,0x00,0x00, -0x72,0x01,0x00,0x00,0x9a,0x01,0x00,0x00,0x7a,0x01,0x00,0x00, -0xb0,0x00,0x05,0x00,0xb7,0x00,0x00,0x00,0x7f,0x01,0x00,0x00, -0xc9,0x02,0x00,0x00,0x62,0x00,0x00,0x00,0xf6,0x00,0x04,0x00, -0x7b,0x01,0x00,0x00,0x7a,0x01,0x00,0x00,0x01,0x00,0x00,0x00, -0xfa,0x00,0x04,0x00,0x7f,0x01,0x00,0x00,0x7a,0x01,0x00,0x00, -0x7b,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0x7a,0x01,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x85,0x01,0x00,0x00, -0xb7,0x02,0x00,0x00,0x62,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x87,0x01,0x00,0x00,0x85,0x01,0x00,0x00, -0xc9,0x02,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x89,0x01,0x00,0x00,0x55,0x00,0x00,0x00,0x53,0x00,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x8b,0x01,0x00,0x00, -0xb7,0x02,0x00,0x00,0x61,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x8c,0x01,0x00,0x00,0x89,0x01,0x00,0x00, -0x8b,0x01,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x8e,0x01,0x00,0x00,0x64,0x00,0x00,0x00,0x62,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x8f,0x01,0x00,0x00, -0x8c,0x01,0x00,0x00,0x8e,0x01,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x91,0x01,0x00,0x00,0x8f,0x01,0x00,0x00, -0xc9,0x02,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x93,0x01,0x00,0x00,0x91,0x01,0x00,0x00,0x92,0x01,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x95,0x01,0x00,0x00, -0x93,0x01,0x00,0x00,0xb3,0x02,0x00,0x00,0x41,0x00,0x05,0x00, -0x0b,0x01,0x00,0x00,0x96,0x01,0x00,0x00,0xf0,0x00,0x00,0x00, -0x95,0x01,0x00,0x00,0x3d,0x00,0x04,0x00,0xb9,0x00,0x00,0x00, -0x97,0x01,0x00,0x00,0x96,0x01,0x00,0x00,0x41,0x00,0x05,0x00, -0xc2,0x00,0x00,0x00,0x98,0x01,0x00,0x00,0x83,0x01,0x00,0x00, -0x87,0x01,0x00,0x00,0x3e,0x00,0x03,0x00,0x98,0x01,0x00,0x00, -0x97,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x9a,0x01,0x00,0x00,0xc9,0x02,0x00,0x00,0xc5,0x00,0x00,0x00, -0xf9,0x00,0x02,0x00,0x79,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, -0x7b,0x01,0x00,0x00,0xf9,0x00,0x02,0x00,0x74,0x01,0x00,0x00, -0xf8,0x00,0x02,0x00,0x74,0x01,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x9c,0x01,0x00,0x00,0xb7,0x02,0x00,0x00, -0xc5,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0x71,0x01,0x00,0x00, -0xf8,0x00,0x02,0x00,0x73,0x01,0x00,0x00,0xf9,0x00,0x02,0x00, -0x9e,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0x9e,0x01,0x00,0x00, -0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0xb8,0x02,0x00,0x00, -0x3e,0x00,0x00,0x00,0x73,0x01,0x00,0x00,0xca,0x01,0x00,0x00, -0xa1,0x01,0x00,0x00,0xb0,0x00,0x05,0x00,0xb7,0x00,0x00,0x00, -0xa4,0x01,0x00,0x00,0xb8,0x02,0x00,0x00,0xb4,0x00,0x00,0x00, -0xf6,0x00,0x04,0x00,0xa0,0x01,0x00,0x00,0xa1,0x01,0x00,0x00, -0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0xa4,0x01,0x00,0x00, -0x9f,0x01,0x00,0x00,0xa0,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, -0x9f,0x01,0x00,0x00,0xf9,0x00,0x02,0x00,0xa6,0x01,0x00,0x00, -0xf8,0x00,0x02,0x00,0xa6,0x01,0x00,0x00,0xf5,0x00,0x07,0x00, -0x06,0x00,0x00,0x00,0xc6,0x02,0x00,0x00,0x3e,0x00,0x00,0x00, -0x9f,0x01,0x00,0x00,0xc8,0x01,0x00,0x00,0xa7,0x01,0x00,0x00, -0xb0,0x00,0x05,0x00,0xb7,0x00,0x00,0x00,0xac,0x01,0x00,0x00, -0xc6,0x02,0x00,0x00,0xb1,0x00,0x00,0x00,0xf6,0x00,0x04,0x00, -0xa8,0x01,0x00,0x00,0xa7,0x01,0x00,0x00,0x01,0x00,0x00,0x00, -0xfa,0x00,0x04,0x00,0xac,0x01,0x00,0x00,0xa7,0x01,0x00,0x00, -0xa8,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0xa7,0x01,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xb2,0x01,0x00,0x00, -0xb8,0x02,0x00,0x00,0xb1,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xb4,0x01,0x00,0x00,0xb2,0x01,0x00,0x00, -0xc6,0x02,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xb6,0x01,0x00,0x00,0x59,0x00,0x00,0x00,0xae,0x00,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xb9,0x01,0x00,0x00, -0xb8,0x02,0x00,0x00,0xb8,0x01,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xba,0x01,0x00,0x00,0xb6,0x01,0x00,0x00, -0xb9,0x01,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xbc,0x01,0x00,0x00,0x68,0x00,0x00,0x00,0xb1,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xbd,0x01,0x00,0x00, -0xba,0x01,0x00,0x00,0xbc,0x01,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xbf,0x01,0x00,0x00,0xbd,0x01,0x00,0x00, -0xc6,0x02,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xc1,0x01,0x00,0x00,0xbf,0x01,0x00,0x00,0xc0,0x01,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xc3,0x01,0x00,0x00, -0xc1,0x01,0x00,0x00,0xb3,0x02,0x00,0x00,0x41,0x00,0x05,0x00, -0x0b,0x01,0x00,0x00,0xc4,0x01,0x00,0x00,0x3c,0x01,0x00,0x00, -0xc3,0x01,0x00,0x00,0x3d,0x00,0x04,0x00,0xb9,0x00,0x00,0x00, -0xc5,0x01,0x00,0x00,0xc4,0x01,0x00,0x00,0x41,0x00,0x05,0x00, -0xc2,0x00,0x00,0x00,0xc6,0x01,0x00,0x00,0xb0,0x01,0x00,0x00, -0xb4,0x01,0x00,0x00,0x3e,0x00,0x03,0x00,0xc6,0x01,0x00,0x00, -0xc5,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xc8,0x01,0x00,0x00,0xc6,0x02,0x00,0x00,0xc5,0x00,0x00,0x00, -0xf9,0x00,0x02,0x00,0xa6,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, -0xa8,0x01,0x00,0x00,0xf9,0x00,0x02,0x00,0xa1,0x01,0x00,0x00, -0xf8,0x00,0x02,0x00,0xa1,0x01,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xca,0x01,0x00,0x00,0xb8,0x02,0x00,0x00, -0xc5,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0x9e,0x01,0x00,0x00, -0xf8,0x00,0x02,0x00,0xa0,0x01,0x00,0x00,0xf9,0x00,0x02,0x00, -0xcc,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0xcc,0x01,0x00,0x00, -0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0xb9,0x02,0x00,0x00, -0x3e,0x00,0x00,0x00,0xa0,0x01,0x00,0x00,0x0e,0x02,0x00,0x00, -0xcf,0x01,0x00,0x00,0xb0,0x00,0x05,0x00,0xb7,0x00,0x00,0x00, -0xd2,0x01,0x00,0x00,0xb9,0x02,0x00,0x00,0xb4,0x00,0x00,0x00, -0xf6,0x00,0x04,0x00,0xce,0x01,0x00,0x00,0xcf,0x01,0x00,0x00, -0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0xd2,0x01,0x00,0x00, -0xcd,0x01,0x00,0x00,0xce,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, -0xcd,0x01,0x00,0x00,0xf9,0x00,0x02,0x00,0xd4,0x01,0x00,0x00, -0xf8,0x00,0x02,0x00,0xd4,0x01,0x00,0x00,0xf5,0x00,0x07,0x00, -0x06,0x00,0x00,0x00,0xbd,0x02,0x00,0x00,0x3e,0x00,0x00,0x00, -0xcd,0x01,0x00,0x00,0x0c,0x02,0x00,0x00,0xd7,0x01,0x00,0x00, -0xb0,0x00,0x05,0x00,0xb7,0x00,0x00,0x00,0xda,0x01,0x00,0x00, -0xbd,0x02,0x00,0x00,0x60,0x00,0x00,0x00,0xf6,0x00,0x04,0x00, -0xd6,0x01,0x00,0x00,0xd7,0x01,0x00,0x00,0x01,0x00,0x00,0x00, -0xfa,0x00,0x04,0x00,0xda,0x01,0x00,0x00,0xd5,0x01,0x00,0x00, -0xd6,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0xd5,0x01,0x00,0x00, -0xf9,0x00,0x02,0x00,0xdc,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, -0xdc,0x01,0x00,0x00,0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00, -0xbf,0x02,0x00,0x00,0x3e,0x00,0x00,0x00,0xd5,0x01,0x00,0x00, -0x0a,0x02,0x00,0x00,0xdf,0x01,0x00,0x00,0xb0,0x00,0x05,0x00, -0xb7,0x00,0x00,0x00,0xe2,0x01,0x00,0x00,0xbf,0x02,0x00,0x00, -0xb1,0x00,0x00,0x00,0xf6,0x00,0x04,0x00,0xde,0x01,0x00,0x00, -0xdf,0x01,0x00,0x00,0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, -0xe2,0x01,0x00,0x00,0xdd,0x01,0x00,0x00,0xde,0x01,0x00,0x00, -0xf8,0x00,0x02,0x00,0xdd,0x01,0x00,0x00,0xf9,0x00,0x02,0x00, -0xe4,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0xe4,0x01,0x00,0x00, -0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0xc1,0x02,0x00,0x00, -0x3e,0x00,0x00,0x00,0xdd,0x01,0x00,0x00,0x08,0x02,0x00,0x00, -0xe5,0x01,0x00,0x00,0xb0,0x00,0x05,0x00,0xb7,0x00,0x00,0x00, -0xea,0x01,0x00,0x00,0xc1,0x02,0x00,0x00,0x62,0x00,0x00,0x00, -0xf6,0x00,0x04,0x00,0xe6,0x01,0x00,0x00,0xe5,0x01,0x00,0x00, -0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0xea,0x01,0x00,0x00, -0xe5,0x01,0x00,0x00,0xe6,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, -0xe5,0x01,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xec,0x01,0x00,0x00,0xb9,0x02,0x00,0x00,0xb1,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xee,0x01,0x00,0x00, -0xec,0x01,0x00,0x00,0xbf,0x02,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xf0,0x01,0x00,0x00,0xee,0x01,0x00,0x00, -0xef,0x01,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xf2,0x01,0x00,0x00,0xbd,0x02,0x00,0x00,0x62,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xf3,0x01,0x00,0x00, -0xf0,0x01,0x00,0x00,0xf2,0x01,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xf5,0x01,0x00,0x00,0xf3,0x01,0x00,0x00, -0xc1,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xf9,0x01,0x00,0x00,0xf2,0x01,0x00,0x00,0xc1,0x02,0x00,0x00, -0x41,0x00,0x05,0x00,0xc2,0x00,0x00,0x00,0xfa,0x01,0x00,0x00, -0x83,0x01,0x00,0x00,0xf9,0x01,0x00,0x00,0x3d,0x00,0x04,0x00, -0xb9,0x00,0x00,0x00,0xfb,0x01,0x00,0x00,0xfa,0x01,0x00,0x00, -0x41,0x00,0x05,0x00,0xc2,0x00,0x00,0x00,0x00,0x02,0x00,0x00, -0xb0,0x01,0x00,0x00,0xee,0x01,0x00,0x00,0x3d,0x00,0x04,0x00, -0xb9,0x00,0x00,0x00,0x01,0x02,0x00,0x00,0x00,0x02,0x00,0x00, -0x41,0x00,0x05,0x00,0xc2,0x00,0x00,0x00,0x03,0x02,0x00,0x00, -0xbf,0x00,0x00,0x00,0xf5,0x01,0x00,0x00,0x3d,0x00,0x04,0x00, -0xb9,0x00,0x00,0x00,0x04,0x02,0x00,0x00,0x03,0x02,0x00,0x00, -0x0c,0x00,0x08,0x00,0xb9,0x00,0x00,0x00,0x05,0x02,0x00,0x00, -0x01,0x00,0x00,0x00,0x32,0x00,0x00,0x00,0xfb,0x01,0x00,0x00, -0x01,0x02,0x00,0x00,0x04,0x02,0x00,0x00,0x3e,0x00,0x03,0x00, -0x03,0x02,0x00,0x00,0x05,0x02,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x08,0x02,0x00,0x00,0xc1,0x02,0x00,0x00, -0xc5,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0xe4,0x01,0x00,0x00, -0xf8,0x00,0x02,0x00,0xe6,0x01,0x00,0x00,0xf9,0x00,0x02,0x00, -0xdf,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0xdf,0x01,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x0a,0x02,0x00,0x00, -0xbf,0x02,0x00,0x00,0xc5,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, -0xdc,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0xde,0x01,0x00,0x00, -0xf9,0x00,0x02,0x00,0xd7,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, -0xd7,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x0c,0x02,0x00,0x00,0xbd,0x02,0x00,0x00,0xc5,0x00,0x00,0x00, -0xf9,0x00,0x02,0x00,0xd4,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, -0xd6,0x01,0x00,0x00,0xf9,0x00,0x02,0x00,0xcf,0x01,0x00,0x00, -0xf8,0x00,0x02,0x00,0xcf,0x01,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x0e,0x02,0x00,0x00,0xb9,0x02,0x00,0x00, -0xc5,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0xcc,0x01,0x00,0x00, -0xf8,0x00,0x02,0x00,0xce,0x01,0x00,0x00,0xf9,0x00,0x02,0x00, -0x6c,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0x6c,0x01,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x10,0x02,0x00,0x00, -0xb3,0x02,0x00,0x00,0xc5,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, -0x69,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0x6b,0x01,0x00,0x00, -0xe0,0x00,0x04,0x00,0x0c,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0x61,0x01,0x00,0x00,0xf9,0x00,0x02,0x00,0xcc,0x00,0x00,0x00, -0xf8,0x00,0x02,0x00,0xcc,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x12,0x02,0x00,0x00,0x99,0x02,0x00,0x00, -0x6c,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0xc9,0x00,0x00,0x00, -0xf8,0x00,0x02,0x00,0xcb,0x00,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x17,0x02,0x00,0x00,0x55,0x00,0x00,0x00, -0x53,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x18,0x02,0x00,0x00,0x8b,0x00,0x00,0x00,0x17,0x02,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x1d,0x02,0x00,0x00, -0x59,0x00,0x00,0x00,0xae,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x1e,0x02,0x00,0x00,0x9d,0x00,0x00,0x00, -0x1d,0x02,0x00,0x00,0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00, -0x23,0x02,0x00,0x00,0x12,0x00,0x00,0x00,0x22,0x02,0x00,0x00, -0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x24,0x02,0x00,0x00, -0x23,0x02,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x25,0x02,0x00,0x00,0x0f,0x00,0x00,0x00,0x24,0x02,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x29,0x02,0x00,0x00, -0x47,0x00,0x00,0x00,0x24,0x02,0x00,0x00,0x41,0x00,0x05,0x00, -0x0d,0x00,0x00,0x00,0x2b,0x02,0x00,0x00,0x2a,0x02,0x00,0x00, -0x0c,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x2c,0x02,0x00,0x00,0x2b,0x02,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x2d,0x02,0x00,0x00,0x29,0x02,0x00,0x00, -0x2c,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x2e,0x02,0x00,0x00,0x25,0x02,0x00,0x00,0x2d,0x02,0x00,0x00, -0xf9,0x00,0x02,0x00,0x30,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, -0x30,0x02,0x00,0x00,0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00, -0x9a,0x02,0x00,0x00,0x3e,0x00,0x00,0x00,0xcb,0x00,0x00,0x00, -0x97,0x02,0x00,0x00,0x33,0x02,0x00,0x00,0xb0,0x00,0x05,0x00, -0xb7,0x00,0x00,0x00,0x36,0x02,0x00,0x00,0x9a,0x02,0x00,0x00, -0xb4,0x00,0x00,0x00,0xf6,0x00,0x04,0x00,0x32,0x02,0x00,0x00, -0x33,0x02,0x00,0x00,0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, -0x36,0x02,0x00,0x00,0x31,0x02,0x00,0x00,0x32,0x02,0x00,0x00, -0xf8,0x00,0x02,0x00,0x31,0x02,0x00,0x00,0xf9,0x00,0x02,0x00, -0x38,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x38,0x02,0x00,0x00, -0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0x9b,0x02,0x00,0x00, -0x3e,0x00,0x00,0x00,0x31,0x02,0x00,0x00,0x95,0x02,0x00,0x00, -0x3b,0x02,0x00,0x00,0xb0,0x00,0x05,0x00,0xb7,0x00,0x00,0x00, -0x3e,0x02,0x00,0x00,0x9b,0x02,0x00,0x00,0x60,0x00,0x00,0x00, -0xf6,0x00,0x04,0x00,0x3a,0x02,0x00,0x00,0x3b,0x02,0x00,0x00, -0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0x3e,0x02,0x00,0x00, -0x39,0x02,0x00,0x00,0x3a,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, -0x39,0x02,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x42,0x02,0x00,0x00,0x9b,0x02,0x00,0x00,0x61,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x43,0x02,0x00,0x00, -0x18,0x02,0x00,0x00,0x42,0x02,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x45,0x02,0x00,0x00,0x64,0x00,0x00,0x00, -0x62,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x46,0x02,0x00,0x00,0x43,0x02,0x00,0x00,0x45,0x02,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x4a,0x02,0x00,0x00, -0x9a,0x02,0x00,0x00,0xb8,0x01,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x4b,0x02,0x00,0x00,0x1e,0x02,0x00,0x00, -0x4a,0x02,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x4d,0x02,0x00,0x00,0x68,0x00,0x00,0x00,0xb1,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x4e,0x02,0x00,0x00, -0x4b,0x02,0x00,0x00,0x4d,0x02,0x00,0x00,0xf9,0x00,0x02,0x00, -0x50,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x50,0x02,0x00,0x00, -0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0x9d,0x02,0x00,0x00, -0x3e,0x00,0x00,0x00,0x39,0x02,0x00,0x00,0x93,0x02,0x00,0x00, -0x53,0x02,0x00,0x00,0xb0,0x00,0x05,0x00,0xb7,0x00,0x00,0x00, -0x56,0x02,0x00,0x00,0x9d,0x02,0x00,0x00,0xb1,0x00,0x00,0x00, -0xf6,0x00,0x04,0x00,0x52,0x02,0x00,0x00,0x53,0x02,0x00,0x00, -0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0x56,0x02,0x00,0x00, -0x51,0x02,0x00,0x00,0x52,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, -0x51,0x02,0x00,0x00,0xf9,0x00,0x02,0x00,0x58,0x02,0x00,0x00, -0xf8,0x00,0x02,0x00,0x58,0x02,0x00,0x00,0xf5,0x00,0x07,0x00, -0x06,0x00,0x00,0x00,0x9f,0x02,0x00,0x00,0x3e,0x00,0x00,0x00, -0x51,0x02,0x00,0x00,0x91,0x02,0x00,0x00,0x5b,0x02,0x00,0x00, -0xb0,0x00,0x05,0x00,0xb7,0x00,0x00,0x00,0x5e,0x02,0x00,0x00, -0x9f,0x02,0x00,0x00,0x62,0x00,0x00,0x00,0xf6,0x00,0x04,0x00, -0x5a,0x02,0x00,0x00,0x5b,0x02,0x00,0x00,0x01,0x00,0x00,0x00, -0xfa,0x00,0x04,0x00,0x5e,0x02,0x00,0x00,0x59,0x02,0x00,0x00, -0x5a,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x59,0x02,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x61,0x02,0x00,0x00, -0x46,0x02,0x00,0x00,0x9f,0x02,0x00,0x00,0xb0,0x00,0x05,0x00, -0xb7,0x00,0x00,0x00,0x64,0x02,0x00,0x00,0x61,0x02,0x00,0x00, -0x36,0x00,0x00,0x00,0xf7,0x00,0x03,0x00,0x66,0x02,0x00,0x00, -0x00,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0x64,0x02,0x00,0x00, -0x65,0x02,0x00,0x00,0x66,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, -0x65,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x69,0x02,0x00,0x00,0x4e,0x02,0x00,0x00,0x9d,0x02,0x00,0x00, -0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00,0x6a,0x02,0x00,0x00, -0x12,0x00,0x00,0x00,0xc5,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x6b,0x02,0x00,0x00,0x6a,0x02,0x00,0x00, -0xb0,0x00,0x05,0x00,0xb7,0x00,0x00,0x00,0x6c,0x02,0x00,0x00, -0x69,0x02,0x00,0x00,0x6b,0x02,0x00,0x00,0xf9,0x00,0x02,0x00, -0x66,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x66,0x02,0x00,0x00, -0xf5,0x00,0x07,0x00,0xb7,0x00,0x00,0x00,0x6d,0x02,0x00,0x00, -0x64,0x02,0x00,0x00,0x59,0x02,0x00,0x00,0x6c,0x02,0x00,0x00, -0x65,0x02,0x00,0x00,0xf7,0x00,0x03,0x00,0x6f,0x02,0x00,0x00, -0x00,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0x6d,0x02,0x00,0x00, -0x6e,0x02,0x00,0x00,0x6f,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, -0x6e,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x77,0x02,0x00,0x00,0x4e,0x02,0x00,0x00,0x9d,0x02,0x00,0x00, -0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00,0x79,0x02,0x00,0x00, -0x12,0x00,0x00,0x00,0x78,0x02,0x00,0x00,0x3d,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x7a,0x02,0x00,0x00,0x79,0x02,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x7b,0x02,0x00,0x00, -0x77,0x02,0x00,0x00,0x7a,0x02,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x7c,0x02,0x00,0x00,0x2e,0x02,0x00,0x00, -0x7b,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x7e,0x02,0x00,0x00,0x7c,0x02,0x00,0x00,0x46,0x02,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x80,0x02,0x00,0x00, -0x7e,0x02,0x00,0x00,0x9f,0x02,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x82,0x02,0x00,0x00,0x9a,0x02,0x00,0x00, -0xb1,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x84,0x02,0x00,0x00,0x82,0x02,0x00,0x00,0x9d,0x02,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x86,0x02,0x00,0x00, -0x84,0x02,0x00,0x00,0x85,0x02,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x88,0x02,0x00,0x00,0x9b,0x02,0x00,0x00, -0x62,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x89,0x02,0x00,0x00,0x86,0x02,0x00,0x00,0x88,0x02,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x8b,0x02,0x00,0x00, -0x89,0x02,0x00,0x00,0x9f,0x02,0x00,0x00,0x41,0x00,0x05,0x00, -0xc2,0x00,0x00,0x00,0x8c,0x02,0x00,0x00,0xbf,0x00,0x00,0x00, -0x8b,0x02,0x00,0x00,0x3d,0x00,0x04,0x00,0xb9,0x00,0x00,0x00, -0x8d,0x02,0x00,0x00,0x8c,0x02,0x00,0x00,0x41,0x00,0x06,0x00, -0x8e,0x02,0x00,0x00,0x8f,0x02,0x00,0x00,0x73,0x02,0x00,0x00, -0x34,0x00,0x00,0x00,0x80,0x02,0x00,0x00,0x3e,0x00,0x03,0x00, -0x8f,0x02,0x00,0x00,0x8d,0x02,0x00,0x00,0xf9,0x00,0x02,0x00, -0x6f,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x6f,0x02,0x00,0x00, -0xf9,0x00,0x02,0x00,0x5b,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, -0x5b,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x91,0x02,0x00,0x00,0x9f,0x02,0x00,0x00,0xc5,0x00,0x00,0x00, -0xf9,0x00,0x02,0x00,0x58,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, -0x5a,0x02,0x00,0x00,0xf9,0x00,0x02,0x00,0x53,0x02,0x00,0x00, -0xf8,0x00,0x02,0x00,0x53,0x02,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x93,0x02,0x00,0x00,0x9d,0x02,0x00,0x00, -0xc5,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0x50,0x02,0x00,0x00, -0xf8,0x00,0x02,0x00,0x52,0x02,0x00,0x00,0xf9,0x00,0x02,0x00, -0x3b,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x3b,0x02,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x95,0x02,0x00,0x00, -0x9b,0x02,0x00,0x00,0xc5,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, -0x38,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x3a,0x02,0x00,0x00, -0xf9,0x00,0x02,0x00,0x33,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, -0x33,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x97,0x02,0x00,0x00,0x9a,0x02,0x00,0x00,0xc5,0x00,0x00,0x00, -0xf9,0x00,0x02,0x00,0x30,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, -0x32,0x02,0x00,0x00,0xfd,0x00,0x01,0x00,0x38,0x00,0x01,0x00, - -}; -const uint64_t matmul_f16_s_fp32_len = 10116; - -unsigned char matmul_f32_aligned_l_data[] = { -0x03,0x02,0x23,0x07,0x00,0x05,0x01,0x00,0x0b,0x00,0x0d,0x00, -0x6a,0x03,0x00,0x00,0x00,0x00,0x00,0x00,0x11,0x00,0x02,0x00, -0x01,0x00,0x00,0x00,0x11,0x00,0x02,0x00,0x09,0x00,0x00,0x00, -0x0b,0x00,0x06,0x00,0x01,0x00,0x00,0x00,0x47,0x4c,0x53,0x4c, -0x2e,0x73,0x74,0x64,0x2e,0x34,0x35,0x30,0x00,0x00,0x00,0x00, -0x0e,0x00,0x03,0x00,0x00,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x0f,0x00,0x0f,0x00,0x05,0x00,0x00,0x00,0x04,0x00,0x00,0x00, -0x6d,0x61,0x69,0x6e,0x00,0x00,0x00,0x00,0x0b,0x00,0x00,0x00, -0x12,0x00,0x00,0x00,0x3d,0x00,0x00,0x00,0x4c,0x00,0x00,0x00, -0xeb,0x00,0x00,0x00,0xfa,0x00,0x00,0x00,0x88,0x01,0x00,0x00, -0x95,0x01,0x00,0x00,0xcb,0x02,0x00,0x00,0x14,0x03,0x00,0x00, -0x10,0x00,0x06,0x00,0x04,0x00,0x00,0x00,0x11,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x0b,0x00,0x00,0x00,0x0b,0x00,0x00,0x00, -0x1c,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0x10,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x08,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00, -0x03,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00,0x04,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x10,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0x10,0x00,0x00,0x00,0x05,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x14,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x18,0x00,0x00,0x00, -0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00,0x07,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x1c,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0x10,0x00,0x00,0x00,0x08,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x20,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00, -0x09,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x24,0x00,0x00,0x00, -0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00,0x0a,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x28,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0x10,0x00,0x00,0x00,0x0b,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x2c,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x30,0x00,0x00,0x00, -0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00,0x0d,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x34,0x00,0x00,0x00,0x47,0x00,0x03,0x00, -0x10,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x37,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x3d,0x00,0x00,0x00,0x0b,0x00,0x00,0x00, -0x1a,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x4c,0x00,0x00,0x00, -0x0b,0x00,0x00,0x00,0x1b,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x4f,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x09,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x53,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x04,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x60,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x62,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x07,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x6c,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x03,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x9d,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0xaf,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x05,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0xb2,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x08,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0xf7,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x20,0x00,0x00,0x00,0x48,0x00,0x04,0x00, -0xf8,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x05,0x00,0x00,0x00, -0x48,0x00,0x04,0x00,0xf8,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x18,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0xf8,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x48,0x00,0x05,0x00,0xf8,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x07,0x00,0x00,0x00,0x10,0x00,0x00,0x00,0x47,0x00,0x03,0x00, -0xf8,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0xfa,0x00,0x00,0x00,0x22,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0xfa,0x00,0x00,0x00,0x21,0x00,0x00,0x00, +0x67,0x01,0x00,0x00,0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00, +0x00,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x67,0x01,0x00,0x00, +0x00,0x00,0x00,0x00,0x07,0x00,0x00,0x00,0x08,0x00,0x00,0x00, +0x47,0x00,0x03,0x00,0x67,0x01,0x00,0x00,0x02,0x00,0x00,0x00, +0x47,0x00,0x04,0x00,0x69,0x01,0x00,0x00,0x22,0x00,0x00,0x00, 0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x69,0x01,0x00,0x00, -0x01,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x6a,0x01,0x00,0x00,0x0b,0x00,0x00,0x00,0x19,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x92,0x01,0x00,0x00,0x06,0x00,0x00,0x00, -0x20,0x00,0x00,0x00,0x48,0x00,0x04,0x00,0x93,0x01,0x00,0x00, -0x00,0x00,0x00,0x00,0x05,0x00,0x00,0x00,0x48,0x00,0x04,0x00, -0x93,0x01,0x00,0x00,0x00,0x00,0x00,0x00,0x18,0x00,0x00,0x00, -0x48,0x00,0x05,0x00,0x93,0x01,0x00,0x00,0x00,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0x93,0x01,0x00,0x00,0x00,0x00,0x00,0x00,0x07,0x00,0x00,0x00, -0x10,0x00,0x00,0x00,0x47,0x00,0x03,0x00,0x93,0x01,0x00,0x00, -0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x95,0x01,0x00,0x00, +0x21,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x47,0x00,0x04,0x00, +0x69,0x02,0x00,0x00,0x0b,0x00,0x00,0x00,0x18,0x00,0x00,0x00, +0x47,0x00,0x04,0x00,0xaf,0x02,0x00,0x00,0x06,0x00,0x00,0x00, +0x04,0x00,0x00,0x00,0x48,0x00,0x04,0x00,0xb0,0x02,0x00,0x00, +0x00,0x00,0x00,0x00,0x19,0x00,0x00,0x00,0x48,0x00,0x05,0x00, +0xb0,0x02,0x00,0x00,0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00, +0x00,0x00,0x00,0x00,0x47,0x00,0x03,0x00,0xb0,0x02,0x00,0x00, +0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0xb2,0x02,0x00,0x00, 0x22,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x95,0x01,0x00,0x00,0x21,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0xcb,0x02,0x00,0x00,0x0b,0x00,0x00,0x00, -0x18,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x11,0x03,0x00,0x00, -0x06,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x48,0x00,0x04,0x00, -0x12,0x03,0x00,0x00,0x00,0x00,0x00,0x00,0x19,0x00,0x00,0x00, -0x48,0x00,0x05,0x00,0x12,0x03,0x00,0x00,0x00,0x00,0x00,0x00, +0xb2,0x02,0x00,0x00,0x21,0x00,0x00,0x00,0x02,0x00,0x00,0x00, +0x13,0x00,0x02,0x00,0x02,0x00,0x00,0x00,0x21,0x00,0x03,0x00, +0x03,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x15,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x20,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0x17,0x00,0x04,0x00,0x09,0x00,0x00,0x00,0x06,0x00,0x00,0x00, +0x03,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x0a,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0x09,0x00,0x00,0x00,0x3b,0x00,0x04,0x00, +0x0a,0x00,0x00,0x00,0x0b,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, +0x02,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x0d,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x1e,0x00,0x10,0x00, +0x10,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00, +0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00, +0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00, +0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00, +0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00, +0x20,0x00,0x04,0x00,0x11,0x00,0x00,0x00,0x09,0x00,0x00,0x00, +0x10,0x00,0x00,0x00,0x3b,0x00,0x04,0x00,0x11,0x00,0x00,0x00, +0x12,0x00,0x00,0x00,0x09,0x00,0x00,0x00,0x15,0x00,0x04,0x00, +0x13,0x00,0x00,0x00,0x20,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00,0x14,0x00,0x00,0x00, +0x08,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x15,0x00,0x00,0x00, +0x09,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x13,0x00,0x00,0x00,0x21,0x00,0x00,0x00,0x0a,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00,0x27,0x00,0x00,0x00, +0x09,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00, +0x2d,0x00,0x00,0x00,0x07,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x13,0x00,0x00,0x00,0x34,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0x32,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x37,0x00,0x00,0x00, +0x40,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x39,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x3b,0x00,0x04,0x00, +0x0a,0x00,0x00,0x00,0x3d,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x3e,0x00,0x00,0x00, +0x00,0x00,0x00,0x00,0x3b,0x00,0x04,0x00,0x0a,0x00,0x00,0x00, +0x4c,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x32,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x4f,0x00,0x00,0x00,0x20,0x00,0x00,0x00, +0x32,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x53,0x00,0x00,0x00, +0x20,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, +0x54,0x00,0x00,0x00,0x86,0x00,0x00,0x00,0x37,0x00,0x00,0x00, +0x53,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, +0x58,0x00,0x00,0x00,0x86,0x00,0x00,0x00,0x37,0x00,0x00,0x00, +0x53,0x00,0x00,0x00,0x32,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x60,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x34,0x00,0x06,0x00, +0x06,0x00,0x00,0x00,0x61,0x00,0x00,0x00,0x86,0x00,0x00,0x00, +0x53,0x00,0x00,0x00,0x60,0x00,0x00,0x00,0x32,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x62,0x00,0x00,0x00,0x04,0x00,0x00,0x00, +0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x63,0x00,0x00,0x00, +0x86,0x00,0x00,0x00,0x61,0x00,0x00,0x00,0x62,0x00,0x00,0x00, +0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x67,0x00,0x00,0x00, +0x86,0x00,0x00,0x00,0x61,0x00,0x00,0x00,0x62,0x00,0x00,0x00, +0x32,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x6c,0x00,0x00,0x00, +0x10,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x6d,0x00,0x00,0x00,0x08,0x00,0x00,0x00,0x34,0x00,0x06,0x00, +0x06,0x00,0x00,0x00,0x6e,0x00,0x00,0x00,0x86,0x00,0x00,0x00, +0x6c,0x00,0x00,0x00,0x6d,0x00,0x00,0x00,0x34,0x00,0x06,0x00, +0x06,0x00,0x00,0x00,0x73,0x00,0x00,0x00,0x86,0x00,0x00,0x00, +0x6c,0x00,0x00,0x00,0x6d,0x00,0x00,0x00,0x34,0x00,0x06,0x00, +0x06,0x00,0x00,0x00,0x78,0x00,0x00,0x00,0x86,0x00,0x00,0x00, +0x6c,0x00,0x00,0x00,0x6d,0x00,0x00,0x00,0x34,0x00,0x06,0x00, +0x06,0x00,0x00,0x00,0x7d,0x00,0x00,0x00,0x86,0x00,0x00,0x00, +0x6c,0x00,0x00,0x00,0x6d,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x13,0x00,0x00,0x00,0x81,0x00,0x00,0x00,0x06,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00,0x86,0x00,0x00,0x00, +0x02,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00, +0x91,0x00,0x00,0x00,0x0b,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x13,0x00,0x00,0x00,0x97,0x00,0x00,0x00,0x03,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00,0xa2,0x00,0x00,0x00, +0x0c,0x00,0x00,0x00,0x32,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0xa7,0x00,0x00,0x00,0x40,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x13,0x00,0x00,0x00,0xa9,0x00,0x00,0x00,0x04,0x00,0x00,0x00, +0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0xb8,0x00,0x00,0x00, +0x84,0x00,0x00,0x00,0x60,0x00,0x00,0x00,0x62,0x00,0x00,0x00, +0x32,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0xb9,0x00,0x00,0x00, +0x20,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, +0xba,0x00,0x00,0x00,0x84,0x00,0x00,0x00,0x53,0x00,0x00,0x00, +0xb9,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, +0xbb,0x00,0x00,0x00,0x84,0x00,0x00,0x00,0x4f,0x00,0x00,0x00, +0x62,0x00,0x00,0x00,0x32,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0xbc,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x34,0x00,0x06,0x00, +0x06,0x00,0x00,0x00,0xbd,0x00,0x00,0x00,0x84,0x00,0x00,0x00, +0xbb,0x00,0x00,0x00,0xbc,0x00,0x00,0x00,0x34,0x00,0x06,0x00, +0x06,0x00,0x00,0x00,0xbe,0x00,0x00,0x00,0x84,0x00,0x00,0x00, +0xbd,0x00,0x00,0x00,0x60,0x00,0x00,0x00,0x34,0x00,0x06,0x00, +0x06,0x00,0x00,0x00,0xbf,0x00,0x00,0x00,0x86,0x00,0x00,0x00, +0xba,0x00,0x00,0x00,0xbe,0x00,0x00,0x00,0x34,0x00,0x06,0x00, +0x06,0x00,0x00,0x00,0xc0,0x00,0x00,0x00,0x84,0x00,0x00,0x00, +0xb8,0x00,0x00,0x00,0xbf,0x00,0x00,0x00,0x34,0x00,0x06,0x00, +0x06,0x00,0x00,0x00,0xc1,0x00,0x00,0x00,0x84,0x00,0x00,0x00, +0xc0,0x00,0x00,0x00,0xbc,0x00,0x00,0x00,0x14,0x00,0x02,0x00, +0xc2,0x00,0x00,0x00,0x16,0x00,0x03,0x00,0xc4,0x00,0x00,0x00, +0x20,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, +0xc5,0x00,0x00,0x00,0x84,0x00,0x00,0x00,0x60,0x00,0x00,0x00, +0x62,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, +0xc6,0x00,0x00,0x00,0x84,0x00,0x00,0x00,0xc5,0x00,0x00,0x00, +0xbf,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, +0xc7,0x00,0x00,0x00,0x84,0x00,0x00,0x00,0xc6,0x00,0x00,0x00, +0xbc,0x00,0x00,0x00,0x1c,0x00,0x04,0x00,0xc8,0x00,0x00,0x00, +0xc4,0x00,0x00,0x00,0xc7,0x00,0x00,0x00,0x20,0x00,0x04,0x00, +0xc9,0x00,0x00,0x00,0x07,0x00,0x00,0x00,0xc8,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0xc4,0x00,0x00,0x00,0xcc,0x00,0x00,0x00, +0x00,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0xcd,0x00,0x00,0x00, +0x07,0x00,0x00,0x00,0xc4,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x13,0x00,0x00,0x00,0xd0,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0xf4,0x00,0x00,0x00, +0x80,0x00,0x00,0x00,0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00, +0x16,0x00,0x03,0x00,0xf9,0x00,0x00,0x00,0x10,0x00,0x00,0x00, +0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0xfa,0x00,0x00,0x00, +0x80,0x00,0x00,0x00,0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00, +0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0xfb,0x00,0x00,0x00, +0x84,0x00,0x00,0x00,0x37,0x00,0x00,0x00,0xfa,0x00,0x00,0x00, +0x1c,0x00,0x04,0x00,0xfc,0x00,0x00,0x00,0xf9,0x00,0x00,0x00, +0xfb,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0xfd,0x00,0x00,0x00, +0x04,0x00,0x00,0x00,0xfc,0x00,0x00,0x00,0x3b,0x00,0x04,0x00, +0xfd,0x00,0x00,0x00,0xfe,0x00,0x00,0x00,0x04,0x00,0x00,0x00, +0x17,0x00,0x04,0x00,0x00,0x01,0x00,0x00,0xf9,0x00,0x00,0x00, +0x04,0x00,0x00,0x00,0x18,0x00,0x04,0x00,0x01,0x01,0x00,0x00, +0x00,0x01,0x00,0x00,0x02,0x00,0x00,0x00,0x1d,0x00,0x03,0x00, +0x02,0x01,0x00,0x00,0x01,0x01,0x00,0x00,0x1e,0x00,0x03,0x00, +0x03,0x01,0x00,0x00,0x02,0x01,0x00,0x00,0x20,0x00,0x04,0x00, +0x04,0x01,0x00,0x00,0x0c,0x00,0x00,0x00,0x03,0x01,0x00,0x00, +0x3b,0x00,0x04,0x00,0x04,0x01,0x00,0x00,0x05,0x01,0x00,0x00, +0x0c,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x07,0x01,0x00,0x00, +0x0c,0x00,0x00,0x00,0xf9,0x00,0x00,0x00,0x20,0x00,0x04,0x00, +0x0a,0x01,0x00,0x00,0x04,0x00,0x00,0x00,0xf9,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x19,0x01,0x00,0x00, +0x03,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x20,0x01,0x00,0x00,0x04,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x27,0x01,0x00,0x00,0x05,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x2e,0x01,0x00,0x00, +0x06,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x35,0x01,0x00,0x00,0x07,0x00,0x00,0x00,0x32,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x3b,0x01,0x00,0x00,0x01,0x00,0x00,0x00, +0x33,0x00,0x06,0x00,0x09,0x00,0x00,0x00,0x3c,0x01,0x00,0x00, +0x3b,0x01,0x00,0x00,0x39,0x00,0x00,0x00,0x39,0x00,0x00,0x00, +0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x3d,0x01,0x00,0x00, +0x51,0x00,0x00,0x00,0x3c,0x01,0x00,0x00,0x00,0x00,0x00,0x00, +0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x3e,0x01,0x00,0x00, +0x84,0x00,0x00,0x00,0x3d,0x01,0x00,0x00,0x6d,0x00,0x00,0x00, +0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x3f,0x01,0x00,0x00, +0x86,0x00,0x00,0x00,0x3e,0x01,0x00,0x00,0x6c,0x00,0x00,0x00, +0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x5a,0x01,0x00,0x00, +0x80,0x00,0x00,0x00,0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00, +0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x5f,0x01,0x00,0x00, +0x80,0x00,0x00,0x00,0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00, +0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x60,0x01,0x00,0x00, +0x84,0x00,0x00,0x00,0xa7,0x00,0x00,0x00,0x5f,0x01,0x00,0x00, +0x1c,0x00,0x04,0x00,0x61,0x01,0x00,0x00,0xf9,0x00,0x00,0x00, +0x60,0x01,0x00,0x00,0x20,0x00,0x04,0x00,0x62,0x01,0x00,0x00, +0x04,0x00,0x00,0x00,0x61,0x01,0x00,0x00,0x3b,0x00,0x04,0x00, +0x62,0x01,0x00,0x00,0x63,0x01,0x00,0x00,0x04,0x00,0x00,0x00, +0x1d,0x00,0x03,0x00,0x66,0x01,0x00,0x00,0x01,0x01,0x00,0x00, +0x1e,0x00,0x03,0x00,0x67,0x01,0x00,0x00,0x66,0x01,0x00,0x00, +0x20,0x00,0x04,0x00,0x68,0x01,0x00,0x00,0x0c,0x00,0x00,0x00, +0x67,0x01,0x00,0x00,0x3b,0x00,0x04,0x00,0x68,0x01,0x00,0x00, +0x69,0x01,0x00,0x00,0x0c,0x00,0x00,0x00,0x34,0x00,0x06,0x00, +0x06,0x00,0x00,0x00,0x98,0x01,0x00,0x00,0x51,0x00,0x00,0x00, +0x3c,0x01,0x00,0x00,0x00,0x00,0x00,0x00,0x34,0x00,0x06,0x00, +0x06,0x00,0x00,0x00,0x99,0x01,0x00,0x00,0x84,0x00,0x00,0x00, +0x98,0x01,0x00,0x00,0x6d,0x00,0x00,0x00,0x34,0x00,0x06,0x00, +0x06,0x00,0x00,0x00,0x9a,0x01,0x00,0x00,0x86,0x00,0x00,0x00, +0x99,0x01,0x00,0x00,0x6c,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x9d,0x01,0x00,0x00,0x08,0x01,0x00,0x00, +0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x9e,0x01,0x00,0x00, +0x86,0x00,0x00,0x00,0x6c,0x00,0x00,0x00,0x6d,0x00,0x00,0x00, +0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0xa1,0x01,0x00,0x00, +0x86,0x00,0x00,0x00,0x6c,0x00,0x00,0x00,0x6d,0x00,0x00,0x00, +0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0xbc,0x01,0x00,0x00, +0x84,0x00,0x00,0x00,0x60,0x00,0x00,0x00,0x62,0x00,0x00,0x00, +0x1c,0x00,0x04,0x00,0xbd,0x01,0x00,0x00,0xf9,0x00,0x00,0x00, +0xbc,0x01,0x00,0x00,0x20,0x00,0x04,0x00,0xbe,0x01,0x00,0x00, +0x07,0x00,0x00,0x00,0xbd,0x01,0x00,0x00,0x34,0x00,0x06,0x00, +0x06,0x00,0x00,0x00,0xce,0x01,0x00,0x00,0x80,0x00,0x00,0x00, +0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x20,0x00,0x04,0x00, +0xd4,0x01,0x00,0x00,0x07,0x00,0x00,0x00,0xf9,0x00,0x00,0x00, +0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0xea,0x01,0x00,0x00, +0x84,0x00,0x00,0x00,0xbf,0x00,0x00,0x00,0xbc,0x00,0x00,0x00, +0x1c,0x00,0x04,0x00,0xeb,0x01,0x00,0x00,0xf9,0x00,0x00,0x00, +0xea,0x01,0x00,0x00,0x20,0x00,0x04,0x00,0xec,0x01,0x00,0x00, +0x07,0x00,0x00,0x00,0xeb,0x01,0x00,0x00,0x34,0x00,0x06,0x00, +0x06,0x00,0x00,0x00,0xf5,0x01,0x00,0x00,0x86,0x00,0x00,0x00, +0xb9,0x00,0x00,0x00,0xbf,0x00,0x00,0x00,0x34,0x00,0x06,0x00, +0x06,0x00,0x00,0x00,0xfd,0x01,0x00,0x00,0x80,0x00,0x00,0x00, +0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x34,0x00,0x06,0x00, +0x06,0x00,0x00,0x00,0x2c,0x02,0x00,0x00,0x84,0x00,0x00,0x00, +0x60,0x00,0x00,0x00,0x62,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x13,0x00,0x00,0x00,0x61,0x02,0x00,0x00,0x0d,0x00,0x00,0x00, +0x3b,0x00,0x04,0x00,0x0a,0x00,0x00,0x00,0x69,0x02,0x00,0x00, +0x01,0x00,0x00,0x00,0x1d,0x00,0x03,0x00,0xaf,0x02,0x00,0x00, +0xc4,0x00,0x00,0x00,0x1e,0x00,0x03,0x00,0xb0,0x02,0x00,0x00, +0xaf,0x02,0x00,0x00,0x20,0x00,0x04,0x00,0xb1,0x02,0x00,0x00, +0x0c,0x00,0x00,0x00,0xb0,0x02,0x00,0x00,0x3b,0x00,0x04,0x00, +0xb1,0x02,0x00,0x00,0xb2,0x02,0x00,0x00,0x0c,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00,0xb7,0x02,0x00,0x00, +0x05,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, +0xc4,0x02,0x00,0x00,0x84,0x00,0x00,0x00,0x60,0x00,0x00,0x00, +0x62,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0xcd,0x02,0x00,0x00, +0x0c,0x00,0x00,0x00,0xc4,0x00,0x00,0x00,0x36,0x00,0x05,0x00, +0x02,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0x03,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0x05,0x00,0x00,0x00, +0x3b,0x00,0x04,0x00,0xc9,0x00,0x00,0x00,0xca,0x00,0x00,0x00, +0x07,0x00,0x00,0x00,0x3b,0x00,0x04,0x00,0xbe,0x01,0x00,0x00, +0xbf,0x01,0x00,0x00,0x07,0x00,0x00,0x00,0x3b,0x00,0x04,0x00, +0xec,0x01,0x00,0x00,0xed,0x01,0x00,0x00,0x07,0x00,0x00,0x00, +0x41,0x00,0x05,0x00,0x0d,0x00,0x00,0x00,0x0e,0x00,0x00,0x00, +0x0b,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x0f,0x00,0x00,0x00,0x0e,0x00,0x00,0x00, +0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00,0x16,0x00,0x00,0x00, +0x12,0x00,0x00,0x00,0x14,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x17,0x00,0x00,0x00,0x16,0x00,0x00,0x00, +0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x18,0x00,0x00,0x00, +0x0f,0x00,0x00,0x00,0x17,0x00,0x00,0x00,0x89,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x1e,0x00,0x00,0x00,0x0f,0x00,0x00,0x00, +0x17,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00, +0x22,0x00,0x00,0x00,0x12,0x00,0x00,0x00,0x21,0x00,0x00,0x00, +0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x23,0x00,0x00,0x00, +0x22,0x00,0x00,0x00,0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x24,0x00,0x00,0x00,0x18,0x00,0x00,0x00,0x23,0x00,0x00,0x00, +0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00,0x28,0x00,0x00,0x00, +0x12,0x00,0x00,0x00,0x27,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x29,0x00,0x00,0x00,0x28,0x00,0x00,0x00, +0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x2a,0x00,0x00,0x00, +0x1e,0x00,0x00,0x00,0x29,0x00,0x00,0x00,0x41,0x00,0x05,0x00, +0x15,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0x12,0x00,0x00,0x00, +0x2d,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x2f,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x30,0x00,0x00,0x00,0x24,0x00,0x00,0x00, +0x2f,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x32,0x00,0x00,0x00,0x30,0x00,0x00,0x00,0x2a,0x00,0x00,0x00, +0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00,0x35,0x00,0x00,0x00, +0x12,0x00,0x00,0x00,0x34,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x36,0x00,0x00,0x00,0x35,0x00,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x38,0x00,0x00,0x00, +0x36,0x00,0x00,0x00,0x37,0x00,0x00,0x00,0x82,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x3a,0x00,0x00,0x00,0x38,0x00,0x00,0x00, +0x39,0x00,0x00,0x00,0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x3b,0x00,0x00,0x00,0x3a,0x00,0x00,0x00,0x37,0x00,0x00,0x00, +0x41,0x00,0x05,0x00,0x0d,0x00,0x00,0x00,0x3f,0x00,0x00,0x00, +0x3d,0x00,0x00,0x00,0x3e,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x40,0x00,0x00,0x00,0x3f,0x00,0x00,0x00, +0x89,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x42,0x00,0x00,0x00, +0x40,0x00,0x00,0x00,0x3b,0x00,0x00,0x00,0x86,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x47,0x00,0x00,0x00,0x40,0x00,0x00,0x00, +0x3b,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x0d,0x00,0x00,0x00, +0x49,0x00,0x00,0x00,0x3d,0x00,0x00,0x00,0x39,0x00,0x00,0x00, +0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x4a,0x00,0x00,0x00, +0x49,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x0d,0x00,0x00,0x00, +0x4d,0x00,0x00,0x00,0x4c,0x00,0x00,0x00,0x3e,0x00,0x00,0x00, +0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x4e,0x00,0x00,0x00, +0x4d,0x00,0x00,0x00,0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x50,0x00,0x00,0x00,0x4e,0x00,0x00,0x00,0x4f,0x00,0x00,0x00, +0x89,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x55,0x00,0x00,0x00, +0x50,0x00,0x00,0x00,0x54,0x00,0x00,0x00,0x86,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x59,0x00,0x00,0x00,0x50,0x00,0x00,0x00, +0x58,0x00,0x00,0x00,0x89,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x5d,0x00,0x00,0x00,0x4e,0x00,0x00,0x00,0x4f,0x00,0x00,0x00, +0x89,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x64,0x00,0x00,0x00, +0x5d,0x00,0x00,0x00,0x63,0x00,0x00,0x00,0x86,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x68,0x00,0x00,0x00,0x5d,0x00,0x00,0x00, +0x67,0x00,0x00,0x00,0x89,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x6f,0x00,0x00,0x00,0x4e,0x00,0x00,0x00,0x6e,0x00,0x00,0x00, +0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x74,0x00,0x00,0x00, +0x4e,0x00,0x00,0x00,0x73,0x00,0x00,0x00,0x89,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x79,0x00,0x00,0x00,0x4e,0x00,0x00,0x00, +0x78,0x00,0x00,0x00,0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x7e,0x00,0x00,0x00,0x4e,0x00,0x00,0x00,0x7d,0x00,0x00,0x00, +0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00,0x82,0x00,0x00,0x00, +0x12,0x00,0x00,0x00,0x81,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x83,0x00,0x00,0x00,0x82,0x00,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x84,0x00,0x00,0x00, +0x47,0x00,0x00,0x00,0x83,0x00,0x00,0x00,0x41,0x00,0x05,0x00, +0x15,0x00,0x00,0x00,0x87,0x00,0x00,0x00,0x12,0x00,0x00,0x00, +0x86,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x88,0x00,0x00,0x00,0x87,0x00,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x8a,0x00,0x00,0x00,0x47,0x00,0x00,0x00, +0x39,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x8d,0x00,0x00,0x00,0x8a,0x00,0x00,0x00,0x83,0x00,0x00,0x00, +0x0c,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0x8e,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0x26,0x00,0x00,0x00,0x88,0x00,0x00,0x00, +0x8d,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00, +0x92,0x00,0x00,0x00,0x12,0x00,0x00,0x00,0x91,0x00,0x00,0x00, +0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x93,0x00,0x00,0x00, +0x92,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x94,0x00,0x00,0x00,0x32,0x00,0x00,0x00,0x93,0x00,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x96,0x00,0x00,0x00, +0x42,0x00,0x00,0x00,0x37,0x00,0x00,0x00,0x41,0x00,0x05,0x00, +0x15,0x00,0x00,0x00,0x98,0x00,0x00,0x00,0x12,0x00,0x00,0x00, +0x97,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x99,0x00,0x00,0x00,0x98,0x00,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x9a,0x00,0x00,0x00,0x96,0x00,0x00,0x00, +0x99,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x9b,0x00,0x00,0x00,0x94,0x00,0x00,0x00,0x9a,0x00,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x9d,0x00,0x00,0x00, +0x9b,0x00,0x00,0x00,0x84,0x00,0x00,0x00,0x86,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x9e,0x00,0x00,0x00,0x9d,0x00,0x00,0x00, +0x6d,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00, +0xa3,0x00,0x00,0x00,0x12,0x00,0x00,0x00,0xa2,0x00,0x00,0x00, +0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0xa4,0x00,0x00,0x00, +0xa3,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xa5,0x00,0x00,0x00,0x0f,0x00,0x00,0x00,0xa4,0x00,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xa8,0x00,0x00,0x00, +0x4a,0x00,0x00,0x00,0xa7,0x00,0x00,0x00,0x41,0x00,0x05,0x00, +0x15,0x00,0x00,0x00,0xaa,0x00,0x00,0x00,0x12,0x00,0x00,0x00, +0xa9,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0xab,0x00,0x00,0x00,0xaa,0x00,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xac,0x00,0x00,0x00,0xa8,0x00,0x00,0x00, +0xab,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xad,0x00,0x00,0x00,0xa5,0x00,0x00,0x00,0xac,0x00,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xaf,0x00,0x00,0x00, +0xad,0x00,0x00,0x00,0x84,0x00,0x00,0x00,0x86,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xb0,0x00,0x00,0x00,0xaf,0x00,0x00,0x00, +0x6d,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0xb2,0x00,0x00,0x00, +0xf8,0x00,0x02,0x00,0xb2,0x00,0x00,0x00,0xf5,0x00,0x07,0x00, +0x06,0x00,0x00,0x00,0xd7,0x02,0x00,0x00,0x3e,0x00,0x00,0x00, +0x05,0x00,0x00,0x00,0xd1,0x00,0x00,0x00,0xb3,0x00,0x00,0x00, +0xb0,0x00,0x05,0x00,0xc2,0x00,0x00,0x00,0xc3,0x00,0x00,0x00, +0xd7,0x02,0x00,0x00,0xc1,0x00,0x00,0x00,0xf6,0x00,0x04,0x00, +0xb4,0x00,0x00,0x00,0xb3,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0xfa,0x00,0x04,0x00,0xc3,0x00,0x00,0x00,0xb3,0x00,0x00,0x00, +0xb4,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0xb3,0x00,0x00,0x00, +0x41,0x00,0x05,0x00,0xcd,0x00,0x00,0x00,0xce,0x00,0x00,0x00, +0xca,0x00,0x00,0x00,0xd7,0x02,0x00,0x00,0x3e,0x00,0x03,0x00, +0xce,0x00,0x00,0x00,0xcc,0x00,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xd1,0x00,0x00,0x00,0xd7,0x02,0x00,0x00, +0xd0,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0xb2,0x00,0x00,0x00, +0xf8,0x00,0x02,0x00,0xb4,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, +0xd4,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0xd4,0x00,0x00,0x00, +0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0xf0,0x02,0x00,0x00, +0xb0,0x00,0x00,0x00,0xb4,0x00,0x00,0x00,0xa3,0x01,0x00,0x00, +0xd7,0x00,0x00,0x00,0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00, +0xec,0x02,0x00,0x00,0x9e,0x00,0x00,0x00,0xb4,0x00,0x00,0x00, +0xa0,0x01,0x00,0x00,0xd7,0x00,0x00,0x00,0xf5,0x00,0x07,0x00, +0x06,0x00,0x00,0x00,0xd8,0x02,0x00,0x00,0x84,0x00,0x00,0x00, +0xb4,0x00,0x00,0x00,0x51,0x02,0x00,0x00,0xd7,0x00,0x00,0x00, +0xb0,0x00,0x05,0x00,0xc2,0x00,0x00,0x00,0xdb,0x00,0x00,0x00, +0xd8,0x02,0x00,0x00,0x8e,0x00,0x00,0x00,0xf6,0x00,0x04,0x00, +0xd6,0x00,0x00,0x00,0xd7,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0xfa,0x00,0x04,0x00,0xdb,0x00,0x00,0x00,0xd5,0x00,0x00,0x00, +0xd6,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0xd5,0x00,0x00,0x00, +0xf9,0x00,0x02,0x00,0xdd,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, +0xdd,0x00,0x00,0x00,0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00, +0xe8,0x02,0x00,0x00,0x3e,0x00,0x00,0x00,0xd5,0x00,0x00,0x00, +0x41,0x01,0x00,0x00,0xde,0x00,0x00,0x00,0xb0,0x00,0x05,0x00, +0xc2,0x00,0x00,0x00,0xe3,0x00,0x00,0x00,0xe8,0x02,0x00,0x00, +0x37,0x00,0x00,0x00,0xf6,0x00,0x04,0x00,0xdf,0x00,0x00,0x00, +0xde,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, +0xe3,0x00,0x00,0x00,0xde,0x00,0x00,0x00,0xdf,0x00,0x00,0x00, +0xf8,0x00,0x02,0x00,0xde,0x00,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xe8,0x00,0x00,0x00,0x74,0x00,0x00,0x00, +0xe8,0x02,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xeb,0x00,0x00,0x00,0xe8,0x00,0x00,0x00,0x99,0x00,0x00,0x00, +0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xec,0x00,0x00,0x00, +0xeb,0x00,0x00,0x00,0x6d,0x00,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xed,0x00,0x00,0x00,0xec,0x02,0x00,0x00, +0xec,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xef,0x00,0x00,0x00,0xed,0x00,0x00,0x00,0x6f,0x00,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xf5,0x00,0x00,0x00, +0xe8,0x00,0x00,0x00,0xf4,0x00,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xf7,0x00,0x00,0x00,0x6f,0x00,0x00,0x00, +0x6d,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xf8,0x00,0x00,0x00,0xf5,0x00,0x00,0x00,0xf7,0x00,0x00,0x00, +0x41,0x00,0x08,0x00,0x07,0x01,0x00,0x00,0x08,0x01,0x00,0x00, +0x05,0x01,0x00,0x00,0x34,0x00,0x00,0x00,0xef,0x00,0x00,0x00, +0x34,0x00,0x00,0x00,0x3e,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0xf9,0x00,0x00,0x00,0x09,0x01,0x00,0x00,0x08,0x01,0x00,0x00, +0x41,0x00,0x05,0x00,0x0a,0x01,0x00,0x00,0x0b,0x01,0x00,0x00, +0xfe,0x00,0x00,0x00,0xf8,0x00,0x00,0x00,0x3e,0x00,0x03,0x00, +0x0b,0x01,0x00,0x00,0x09,0x01,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x0d,0x01,0x00,0x00,0xf8,0x00,0x00,0x00, +0x39,0x00,0x00,0x00,0x41,0x00,0x08,0x00,0x07,0x01,0x00,0x00, +0x0f,0x01,0x00,0x00,0x05,0x01,0x00,0x00,0x34,0x00,0x00,0x00, +0xef,0x00,0x00,0x00,0x34,0x00,0x00,0x00,0x39,0x00,0x00,0x00, +0x3d,0x00,0x04,0x00,0xf9,0x00,0x00,0x00,0x10,0x01,0x00,0x00, +0x0f,0x01,0x00,0x00,0x41,0x00,0x05,0x00,0x0a,0x01,0x00,0x00, +0x11,0x01,0x00,0x00,0xfe,0x00,0x00,0x00,0x0d,0x01,0x00,0x00, +0x3e,0x00,0x03,0x00,0x11,0x01,0x00,0x00,0x10,0x01,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x13,0x01,0x00,0x00, +0xf8,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x41,0x00,0x08,0x00, +0x07,0x01,0x00,0x00,0x15,0x01,0x00,0x00,0x05,0x01,0x00,0x00, +0x34,0x00,0x00,0x00,0xef,0x00,0x00,0x00,0x34,0x00,0x00,0x00, +0x0c,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0xf9,0x00,0x00,0x00, +0x16,0x01,0x00,0x00,0x15,0x01,0x00,0x00,0x41,0x00,0x05,0x00, +0x0a,0x01,0x00,0x00,0x17,0x01,0x00,0x00,0xfe,0x00,0x00,0x00, +0x13,0x01,0x00,0x00,0x3e,0x00,0x03,0x00,0x17,0x01,0x00,0x00, +0x16,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x1a,0x01,0x00,0x00,0xf8,0x00,0x00,0x00,0x19,0x01,0x00,0x00, +0x41,0x00,0x08,0x00,0x07,0x01,0x00,0x00,0x1c,0x01,0x00,0x00, +0x05,0x01,0x00,0x00,0x34,0x00,0x00,0x00,0xef,0x00,0x00,0x00, +0x34,0x00,0x00,0x00,0x19,0x01,0x00,0x00,0x3d,0x00,0x04,0x00, +0xf9,0x00,0x00,0x00,0x1d,0x01,0x00,0x00,0x1c,0x01,0x00,0x00, +0x41,0x00,0x05,0x00,0x0a,0x01,0x00,0x00,0x1e,0x01,0x00,0x00, +0xfe,0x00,0x00,0x00,0x1a,0x01,0x00,0x00,0x3e,0x00,0x03,0x00, +0x1e,0x01,0x00,0x00,0x1d,0x01,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x21,0x01,0x00,0x00,0xf8,0x00,0x00,0x00, +0x20,0x01,0x00,0x00,0x41,0x00,0x08,0x00,0x07,0x01,0x00,0x00, +0x23,0x01,0x00,0x00,0x05,0x01,0x00,0x00,0x34,0x00,0x00,0x00, +0xef,0x00,0x00,0x00,0xd0,0x00,0x00,0x00,0x3e,0x00,0x00,0x00, +0x3d,0x00,0x04,0x00,0xf9,0x00,0x00,0x00,0x24,0x01,0x00,0x00, +0x23,0x01,0x00,0x00,0x41,0x00,0x05,0x00,0x0a,0x01,0x00,0x00, +0x25,0x01,0x00,0x00,0xfe,0x00,0x00,0x00,0x21,0x01,0x00,0x00, +0x3e,0x00,0x03,0x00,0x25,0x01,0x00,0x00,0x24,0x01,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x28,0x01,0x00,0x00, +0xf8,0x00,0x00,0x00,0x27,0x01,0x00,0x00,0x41,0x00,0x08,0x00, +0x07,0x01,0x00,0x00,0x2a,0x01,0x00,0x00,0x05,0x01,0x00,0x00, +0x34,0x00,0x00,0x00,0xef,0x00,0x00,0x00,0xd0,0x00,0x00,0x00, +0x39,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0xf9,0x00,0x00,0x00, +0x2b,0x01,0x00,0x00,0x2a,0x01,0x00,0x00,0x41,0x00,0x05,0x00, +0x0a,0x01,0x00,0x00,0x2c,0x01,0x00,0x00,0xfe,0x00,0x00,0x00, +0x28,0x01,0x00,0x00,0x3e,0x00,0x03,0x00,0x2c,0x01,0x00,0x00, +0x2b,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x2f,0x01,0x00,0x00,0xf8,0x00,0x00,0x00,0x2e,0x01,0x00,0x00, +0x41,0x00,0x08,0x00,0x07,0x01,0x00,0x00,0x31,0x01,0x00,0x00, +0x05,0x01,0x00,0x00,0x34,0x00,0x00,0x00,0xef,0x00,0x00,0x00, +0xd0,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0xf9,0x00,0x00,0x00,0x32,0x01,0x00,0x00,0x31,0x01,0x00,0x00, +0x41,0x00,0x05,0x00,0x0a,0x01,0x00,0x00,0x33,0x01,0x00,0x00, +0xfe,0x00,0x00,0x00,0x2f,0x01,0x00,0x00,0x3e,0x00,0x03,0x00, +0x33,0x01,0x00,0x00,0x32,0x01,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x36,0x01,0x00,0x00,0xf8,0x00,0x00,0x00, +0x35,0x01,0x00,0x00,0x41,0x00,0x08,0x00,0x07,0x01,0x00,0x00, +0x38,0x01,0x00,0x00,0x05,0x01,0x00,0x00,0x34,0x00,0x00,0x00, +0xef,0x00,0x00,0x00,0xd0,0x00,0x00,0x00,0x19,0x01,0x00,0x00, +0x3d,0x00,0x04,0x00,0xf9,0x00,0x00,0x00,0x39,0x01,0x00,0x00, +0x38,0x01,0x00,0x00,0x41,0x00,0x05,0x00,0x0a,0x01,0x00,0x00, +0x3a,0x01,0x00,0x00,0xfe,0x00,0x00,0x00,0x36,0x01,0x00,0x00, +0x3e,0x00,0x03,0x00,0x3a,0x01,0x00,0x00,0x39,0x01,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x41,0x01,0x00,0x00, +0xe8,0x02,0x00,0x00,0x3f,0x01,0x00,0x00,0xf9,0x00,0x02,0x00, +0xdd,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0xdf,0x00,0x00,0x00, +0xf9,0x00,0x02,0x00,0x43,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, +0x43,0x01,0x00,0x00,0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00, +0xe9,0x02,0x00,0x00,0x3e,0x00,0x00,0x00,0xdf,0x00,0x00,0x00, +0x9c,0x01,0x00,0x00,0x44,0x01,0x00,0x00,0xb0,0x00,0x05,0x00, +0xc2,0x00,0x00,0x00,0x49,0x01,0x00,0x00,0xe9,0x02,0x00,0x00, +0xa7,0x00,0x00,0x00,0xf6,0x00,0x04,0x00,0x45,0x01,0x00,0x00, +0x44,0x01,0x00,0x00,0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, +0x49,0x01,0x00,0x00,0x44,0x01,0x00,0x00,0x45,0x01,0x00,0x00, +0xf8,0x00,0x02,0x00,0x44,0x01,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x4e,0x01,0x00,0x00,0x7e,0x00,0x00,0x00, +0xe9,0x02,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x51,0x01,0x00,0x00,0x4e,0x01,0x00,0x00,0xab,0x00,0x00,0x00, +0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x52,0x01,0x00,0x00, +0x51,0x01,0x00,0x00,0x6d,0x00,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x53,0x01,0x00,0x00,0xf0,0x02,0x00,0x00, +0x52,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x55,0x01,0x00,0x00,0x53,0x01,0x00,0x00,0x79,0x00,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x5b,0x01,0x00,0x00, +0x4e,0x01,0x00,0x00,0x5a,0x01,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x5d,0x01,0x00,0x00,0x79,0x00,0x00,0x00, +0x6d,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x5e,0x01,0x00,0x00,0x5b,0x01,0x00,0x00,0x5d,0x01,0x00,0x00, +0x41,0x00,0x08,0x00,0x07,0x01,0x00,0x00,0x6b,0x01,0x00,0x00, +0x69,0x01,0x00,0x00,0x34,0x00,0x00,0x00,0x55,0x01,0x00,0x00, +0x34,0x00,0x00,0x00,0x3e,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0xf9,0x00,0x00,0x00,0x6c,0x01,0x00,0x00,0x6b,0x01,0x00,0x00, +0x41,0x00,0x05,0x00,0x0a,0x01,0x00,0x00,0x6d,0x01,0x00,0x00, +0x63,0x01,0x00,0x00,0x5e,0x01,0x00,0x00,0x3e,0x00,0x03,0x00, +0x6d,0x01,0x00,0x00,0x6c,0x01,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x6f,0x01,0x00,0x00,0x5e,0x01,0x00,0x00, +0x39,0x00,0x00,0x00,0x41,0x00,0x08,0x00,0x07,0x01,0x00,0x00, +0x71,0x01,0x00,0x00,0x69,0x01,0x00,0x00,0x34,0x00,0x00,0x00, +0x55,0x01,0x00,0x00,0x34,0x00,0x00,0x00,0x39,0x00,0x00,0x00, +0x3d,0x00,0x04,0x00,0xf9,0x00,0x00,0x00,0x72,0x01,0x00,0x00, +0x71,0x01,0x00,0x00,0x41,0x00,0x05,0x00,0x0a,0x01,0x00,0x00, +0x73,0x01,0x00,0x00,0x63,0x01,0x00,0x00,0x6f,0x01,0x00,0x00, +0x3e,0x00,0x03,0x00,0x73,0x01,0x00,0x00,0x72,0x01,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x75,0x01,0x00,0x00, +0x5e,0x01,0x00,0x00,0x0c,0x00,0x00,0x00,0x41,0x00,0x08,0x00, +0x07,0x01,0x00,0x00,0x77,0x01,0x00,0x00,0x69,0x01,0x00,0x00, +0x34,0x00,0x00,0x00,0x55,0x01,0x00,0x00,0x34,0x00,0x00,0x00, +0x0c,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0xf9,0x00,0x00,0x00, +0x78,0x01,0x00,0x00,0x77,0x01,0x00,0x00,0x41,0x00,0x05,0x00, +0x0a,0x01,0x00,0x00,0x79,0x01,0x00,0x00,0x63,0x01,0x00,0x00, +0x75,0x01,0x00,0x00,0x3e,0x00,0x03,0x00,0x79,0x01,0x00,0x00, +0x78,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x7b,0x01,0x00,0x00,0x5e,0x01,0x00,0x00,0x19,0x01,0x00,0x00, +0x41,0x00,0x08,0x00,0x07,0x01,0x00,0x00,0x7d,0x01,0x00,0x00, +0x69,0x01,0x00,0x00,0x34,0x00,0x00,0x00,0x55,0x01,0x00,0x00, +0x34,0x00,0x00,0x00,0x19,0x01,0x00,0x00,0x3d,0x00,0x04,0x00, +0xf9,0x00,0x00,0x00,0x7e,0x01,0x00,0x00,0x7d,0x01,0x00,0x00, +0x41,0x00,0x05,0x00,0x0a,0x01,0x00,0x00,0x7f,0x01,0x00,0x00, +0x63,0x01,0x00,0x00,0x7b,0x01,0x00,0x00,0x3e,0x00,0x03,0x00, +0x7f,0x01,0x00,0x00,0x7e,0x01,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x81,0x01,0x00,0x00,0x5e,0x01,0x00,0x00, +0x20,0x01,0x00,0x00,0x41,0x00,0x08,0x00,0x07,0x01,0x00,0x00, +0x83,0x01,0x00,0x00,0x69,0x01,0x00,0x00,0x34,0x00,0x00,0x00, +0x55,0x01,0x00,0x00,0xd0,0x00,0x00,0x00,0x3e,0x00,0x00,0x00, +0x3d,0x00,0x04,0x00,0xf9,0x00,0x00,0x00,0x84,0x01,0x00,0x00, +0x83,0x01,0x00,0x00,0x41,0x00,0x05,0x00,0x0a,0x01,0x00,0x00, +0x85,0x01,0x00,0x00,0x63,0x01,0x00,0x00,0x81,0x01,0x00,0x00, +0x3e,0x00,0x03,0x00,0x85,0x01,0x00,0x00,0x84,0x01,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x87,0x01,0x00,0x00, +0x5e,0x01,0x00,0x00,0x27,0x01,0x00,0x00,0x41,0x00,0x08,0x00, +0x07,0x01,0x00,0x00,0x89,0x01,0x00,0x00,0x69,0x01,0x00,0x00, +0x34,0x00,0x00,0x00,0x55,0x01,0x00,0x00,0xd0,0x00,0x00,0x00, +0x39,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0xf9,0x00,0x00,0x00, +0x8a,0x01,0x00,0x00,0x89,0x01,0x00,0x00,0x41,0x00,0x05,0x00, +0x0a,0x01,0x00,0x00,0x8b,0x01,0x00,0x00,0x63,0x01,0x00,0x00, +0x87,0x01,0x00,0x00,0x3e,0x00,0x03,0x00,0x8b,0x01,0x00,0x00, +0x8a,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x8d,0x01,0x00,0x00,0x5e,0x01,0x00,0x00,0x2e,0x01,0x00,0x00, +0x41,0x00,0x08,0x00,0x07,0x01,0x00,0x00,0x8f,0x01,0x00,0x00, +0x69,0x01,0x00,0x00,0x34,0x00,0x00,0x00,0x55,0x01,0x00,0x00, +0xd0,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0xf9,0x00,0x00,0x00,0x90,0x01,0x00,0x00,0x8f,0x01,0x00,0x00, +0x41,0x00,0x05,0x00,0x0a,0x01,0x00,0x00,0x91,0x01,0x00,0x00, +0x63,0x01,0x00,0x00,0x8d,0x01,0x00,0x00,0x3e,0x00,0x03,0x00, +0x91,0x01,0x00,0x00,0x90,0x01,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x93,0x01,0x00,0x00,0x5e,0x01,0x00,0x00, +0x35,0x01,0x00,0x00,0x41,0x00,0x08,0x00,0x07,0x01,0x00,0x00, +0x95,0x01,0x00,0x00,0x69,0x01,0x00,0x00,0x34,0x00,0x00,0x00, +0x55,0x01,0x00,0x00,0xd0,0x00,0x00,0x00,0x19,0x01,0x00,0x00, +0x3d,0x00,0x04,0x00,0xf9,0x00,0x00,0x00,0x96,0x01,0x00,0x00, +0x95,0x01,0x00,0x00,0x41,0x00,0x05,0x00,0x0a,0x01,0x00,0x00, +0x97,0x01,0x00,0x00,0x63,0x01,0x00,0x00,0x93,0x01,0x00,0x00, +0x3e,0x00,0x03,0x00,0x97,0x01,0x00,0x00,0x96,0x01,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x9c,0x01,0x00,0x00, +0xe9,0x02,0x00,0x00,0x9a,0x01,0x00,0x00,0xf9,0x00,0x02,0x00, +0x43,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0x45,0x01,0x00,0x00, +0xe0,0x00,0x04,0x00,0x0c,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, +0x9d,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xa0,0x01,0x00,0x00,0xec,0x02,0x00,0x00,0x9e,0x01,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xa3,0x01,0x00,0x00, +0xf0,0x02,0x00,0x00,0xa1,0x01,0x00,0x00,0xf9,0x00,0x02,0x00, +0xa5,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0xa5,0x01,0x00,0x00, +0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0xf2,0x02,0x00,0x00, +0x3e,0x00,0x00,0x00,0x45,0x01,0x00,0x00,0x4f,0x02,0x00,0x00, +0xa8,0x01,0x00,0x00,0xb0,0x00,0x05,0x00,0xc2,0x00,0x00,0x00, +0xab,0x01,0x00,0x00,0xf2,0x02,0x00,0x00,0x6c,0x00,0x00,0x00, +0xf6,0x00,0x04,0x00,0xa7,0x01,0x00,0x00,0xa8,0x01,0x00,0x00, +0x00,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0xab,0x01,0x00,0x00, +0xa6,0x01,0x00,0x00,0xa7,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, +0xa6,0x01,0x00,0x00,0xf9,0x00,0x02,0x00,0xad,0x01,0x00,0x00, +0xf8,0x00,0x02,0x00,0xad,0x01,0x00,0x00,0xf5,0x00,0x07,0x00, +0x06,0x00,0x00,0x00,0xf6,0x02,0x00,0x00,0x3e,0x00,0x00,0x00, +0xa6,0x01,0x00,0x00,0xd9,0x01,0x00,0x00,0xb0,0x01,0x00,0x00, +0xb0,0x00,0x05,0x00,0xc2,0x00,0x00,0x00,0xb3,0x01,0x00,0x00, +0xf6,0x02,0x00,0x00,0x60,0x00,0x00,0x00,0xf6,0x00,0x04,0x00, +0xaf,0x01,0x00,0x00,0xb0,0x01,0x00,0x00,0x01,0x00,0x00,0x00, +0xfa,0x00,0x04,0x00,0xb3,0x01,0x00,0x00,0xae,0x01,0x00,0x00, +0xaf,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0xae,0x01,0x00,0x00, +0xf9,0x00,0x02,0x00,0xb5,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, +0xb5,0x01,0x00,0x00,0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00, +0x08,0x03,0x00,0x00,0x3e,0x00,0x00,0x00,0xae,0x01,0x00,0x00, +0xd7,0x01,0x00,0x00,0xb6,0x01,0x00,0x00,0xb0,0x00,0x05,0x00, +0xc2,0x00,0x00,0x00,0xbb,0x01,0x00,0x00,0x08,0x03,0x00,0x00, +0x62,0x00,0x00,0x00,0xf6,0x00,0x04,0x00,0xb7,0x01,0x00,0x00, +0xb6,0x01,0x00,0x00,0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, +0xbb,0x01,0x00,0x00,0xb6,0x01,0x00,0x00,0xb7,0x01,0x00,0x00, +0xf8,0x00,0x02,0x00,0xb6,0x01,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xc1,0x01,0x00,0x00,0xf6,0x02,0x00,0x00, +0x62,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xc3,0x01,0x00,0x00,0xc1,0x01,0x00,0x00,0x08,0x03,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xc5,0x01,0x00,0x00, +0x55,0x00,0x00,0x00,0x53,0x00,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xc7,0x01,0x00,0x00,0xf6,0x02,0x00,0x00, +0x61,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xc8,0x01,0x00,0x00,0xc5,0x01,0x00,0x00,0xc7,0x01,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xca,0x01,0x00,0x00, +0x64,0x00,0x00,0x00,0x62,0x00,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xcb,0x01,0x00,0x00,0xc8,0x01,0x00,0x00, +0xca,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xcd,0x01,0x00,0x00,0xcb,0x01,0x00,0x00,0x08,0x03,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xcf,0x01,0x00,0x00, +0xcd,0x01,0x00,0x00,0xce,0x01,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xd1,0x01,0x00,0x00,0xcf,0x01,0x00,0x00, +0xf2,0x02,0x00,0x00,0x41,0x00,0x05,0x00,0x0a,0x01,0x00,0x00, +0xd2,0x01,0x00,0x00,0xfe,0x00,0x00,0x00,0xd1,0x01,0x00,0x00, +0x3d,0x00,0x04,0x00,0xf9,0x00,0x00,0x00,0xd3,0x01,0x00,0x00, +0xd2,0x01,0x00,0x00,0x41,0x00,0x05,0x00,0xd4,0x01,0x00,0x00, +0xd5,0x01,0x00,0x00,0xbf,0x01,0x00,0x00,0xc3,0x01,0x00,0x00, +0x3e,0x00,0x03,0x00,0xd5,0x01,0x00,0x00,0xd3,0x01,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xd7,0x01,0x00,0x00, +0x08,0x03,0x00,0x00,0xd0,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, +0xb5,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0xb7,0x01,0x00,0x00, +0xf9,0x00,0x02,0x00,0xb0,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, +0xb0,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xd9,0x01,0x00,0x00,0xf6,0x02,0x00,0x00,0xd0,0x00,0x00,0x00, +0xf9,0x00,0x02,0x00,0xad,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, +0xaf,0x01,0x00,0x00,0xf9,0x00,0x02,0x00,0xdb,0x01,0x00,0x00, +0xf8,0x00,0x02,0x00,0xdb,0x01,0x00,0x00,0xf5,0x00,0x07,0x00, +0x06,0x00,0x00,0x00,0xf7,0x02,0x00,0x00,0x3e,0x00,0x00,0x00, +0xaf,0x01,0x00,0x00,0x07,0x02,0x00,0x00,0xde,0x01,0x00,0x00, +0xb0,0x00,0x05,0x00,0xc2,0x00,0x00,0x00,0xe1,0x01,0x00,0x00, +0xf7,0x02,0x00,0x00,0xbf,0x00,0x00,0x00,0xf6,0x00,0x04,0x00, +0xdd,0x01,0x00,0x00,0xde,0x01,0x00,0x00,0x01,0x00,0x00,0x00, +0xfa,0x00,0x04,0x00,0xe1,0x01,0x00,0x00,0xdc,0x01,0x00,0x00, +0xdd,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0xdc,0x01,0x00,0x00, +0xf9,0x00,0x02,0x00,0xe3,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, +0xe3,0x01,0x00,0x00,0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00, +0x05,0x03,0x00,0x00,0x3e,0x00,0x00,0x00,0xdc,0x01,0x00,0x00, +0x05,0x02,0x00,0x00,0xe4,0x01,0x00,0x00,0xb0,0x00,0x05,0x00, +0xc2,0x00,0x00,0x00,0xe9,0x01,0x00,0x00,0x05,0x03,0x00,0x00, +0xbc,0x00,0x00,0x00,0xf6,0x00,0x04,0x00,0xe5,0x01,0x00,0x00, +0xe4,0x01,0x00,0x00,0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, +0xe9,0x01,0x00,0x00,0xe4,0x01,0x00,0x00,0xe5,0x01,0x00,0x00, +0xf8,0x00,0x02,0x00,0xe4,0x01,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xef,0x01,0x00,0x00,0xf7,0x02,0x00,0x00, +0xbc,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xf1,0x01,0x00,0x00,0xef,0x01,0x00,0x00,0x05,0x03,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xf3,0x01,0x00,0x00, +0x59,0x00,0x00,0x00,0xb9,0x00,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xf6,0x01,0x00,0x00,0xf7,0x02,0x00,0x00, +0xf5,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xf7,0x01,0x00,0x00,0xf3,0x01,0x00,0x00,0xf6,0x01,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xf9,0x01,0x00,0x00, +0x68,0x00,0x00,0x00,0xbc,0x00,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xfa,0x01,0x00,0x00,0xf7,0x01,0x00,0x00, +0xf9,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xfc,0x01,0x00,0x00,0xfa,0x01,0x00,0x00,0x05,0x03,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xfe,0x01,0x00,0x00, +0xfc,0x01,0x00,0x00,0xfd,0x01,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x00,0x02,0x00,0x00,0xfe,0x01,0x00,0x00, +0xf2,0x02,0x00,0x00,0x41,0x00,0x05,0x00,0x0a,0x01,0x00,0x00, +0x01,0x02,0x00,0x00,0x63,0x01,0x00,0x00,0x00,0x02,0x00,0x00, +0x3d,0x00,0x04,0x00,0xf9,0x00,0x00,0x00,0x02,0x02,0x00,0x00, +0x01,0x02,0x00,0x00,0x41,0x00,0x05,0x00,0xd4,0x01,0x00,0x00, +0x03,0x02,0x00,0x00,0xed,0x01,0x00,0x00,0xf1,0x01,0x00,0x00, +0x3e,0x00,0x03,0x00,0x03,0x02,0x00,0x00,0x02,0x02,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x05,0x02,0x00,0x00, +0x05,0x03,0x00,0x00,0xd0,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, +0xe3,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0xe5,0x01,0x00,0x00, +0xf9,0x00,0x02,0x00,0xde,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, +0xde,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x07,0x02,0x00,0x00,0xf7,0x02,0x00,0x00,0xd0,0x00,0x00,0x00, +0xf9,0x00,0x02,0x00,0xdb,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, +0xdd,0x01,0x00,0x00,0xf9,0x00,0x02,0x00,0x09,0x02,0x00,0x00, +0xf8,0x00,0x02,0x00,0x09,0x02,0x00,0x00,0xf5,0x00,0x07,0x00, +0x06,0x00,0x00,0x00,0xf8,0x02,0x00,0x00,0x3e,0x00,0x00,0x00, +0xdd,0x01,0x00,0x00,0x4d,0x02,0x00,0x00,0x0c,0x02,0x00,0x00, +0xb0,0x00,0x05,0x00,0xc2,0x00,0x00,0x00,0x0f,0x02,0x00,0x00, +0xf8,0x02,0x00,0x00,0xbf,0x00,0x00,0x00,0xf6,0x00,0x04,0x00, +0x0b,0x02,0x00,0x00,0x0c,0x02,0x00,0x00,0x01,0x00,0x00,0x00, +0xfa,0x00,0x04,0x00,0x0f,0x02,0x00,0x00,0x0a,0x02,0x00,0x00, +0x0b,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x0a,0x02,0x00,0x00, +0xf9,0x00,0x02,0x00,0x11,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, +0x11,0x02,0x00,0x00,0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00, +0xfc,0x02,0x00,0x00,0x3e,0x00,0x00,0x00,0x0a,0x02,0x00,0x00, +0x4b,0x02,0x00,0x00,0x14,0x02,0x00,0x00,0xb0,0x00,0x05,0x00, +0xc2,0x00,0x00,0x00,0x17,0x02,0x00,0x00,0xfc,0x02,0x00,0x00, +0x60,0x00,0x00,0x00,0xf6,0x00,0x04,0x00,0x13,0x02,0x00,0x00, +0x14,0x02,0x00,0x00,0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, +0x17,0x02,0x00,0x00,0x12,0x02,0x00,0x00,0x13,0x02,0x00,0x00, +0xf8,0x00,0x02,0x00,0x12,0x02,0x00,0x00,0xf9,0x00,0x02,0x00, +0x19,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x19,0x02,0x00,0x00, +0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0xfe,0x02,0x00,0x00, +0x3e,0x00,0x00,0x00,0x12,0x02,0x00,0x00,0x49,0x02,0x00,0x00, +0x1c,0x02,0x00,0x00,0xb0,0x00,0x05,0x00,0xc2,0x00,0x00,0x00, +0x1f,0x02,0x00,0x00,0xfe,0x02,0x00,0x00,0xbc,0x00,0x00,0x00, +0xf6,0x00,0x04,0x00,0x1b,0x02,0x00,0x00,0x1c,0x02,0x00,0x00, +0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0x1f,0x02,0x00,0x00, +0x1a,0x02,0x00,0x00,0x1b,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, +0x1a,0x02,0x00,0x00,0xf9,0x00,0x02,0x00,0x21,0x02,0x00,0x00, +0xf8,0x00,0x02,0x00,0x21,0x02,0x00,0x00,0xf5,0x00,0x07,0x00, +0x06,0x00,0x00,0x00,0x00,0x03,0x00,0x00,0x3e,0x00,0x00,0x00, +0x1a,0x02,0x00,0x00,0x47,0x02,0x00,0x00,0x22,0x02,0x00,0x00, +0xb0,0x00,0x05,0x00,0xc2,0x00,0x00,0x00,0x27,0x02,0x00,0x00, +0x00,0x03,0x00,0x00,0x62,0x00,0x00,0x00,0xf6,0x00,0x04,0x00, +0x23,0x02,0x00,0x00,0x22,0x02,0x00,0x00,0x01,0x00,0x00,0x00, +0xfa,0x00,0x04,0x00,0x27,0x02,0x00,0x00,0x22,0x02,0x00,0x00, +0x23,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x22,0x02,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x29,0x02,0x00,0x00, +0xf8,0x02,0x00,0x00,0xbc,0x00,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x2b,0x02,0x00,0x00,0x29,0x02,0x00,0x00, +0xfe,0x02,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x2d,0x02,0x00,0x00,0x2b,0x02,0x00,0x00,0x2c,0x02,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x2f,0x02,0x00,0x00, +0xfc,0x02,0x00,0x00,0x62,0x00,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x30,0x02,0x00,0x00,0x2d,0x02,0x00,0x00, +0x2f,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x32,0x02,0x00,0x00,0x30,0x02,0x00,0x00,0x00,0x03,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x36,0x02,0x00,0x00, +0x2f,0x02,0x00,0x00,0x00,0x03,0x00,0x00,0x41,0x00,0x05,0x00, +0xd4,0x01,0x00,0x00,0x37,0x02,0x00,0x00,0xbf,0x01,0x00,0x00, +0x36,0x02,0x00,0x00,0x3d,0x00,0x04,0x00,0xf9,0x00,0x00,0x00, +0x38,0x02,0x00,0x00,0x37,0x02,0x00,0x00,0x73,0x00,0x04,0x00, +0xc4,0x00,0x00,0x00,0x39,0x02,0x00,0x00,0x38,0x02,0x00,0x00, +0x41,0x00,0x05,0x00,0xd4,0x01,0x00,0x00,0x3e,0x02,0x00,0x00, +0xed,0x01,0x00,0x00,0x2b,0x02,0x00,0x00,0x3d,0x00,0x04,0x00, +0xf9,0x00,0x00,0x00,0x3f,0x02,0x00,0x00,0x3e,0x02,0x00,0x00, +0x73,0x00,0x04,0x00,0xc4,0x00,0x00,0x00,0x40,0x02,0x00,0x00, +0x3f,0x02,0x00,0x00,0x41,0x00,0x05,0x00,0xcd,0x00,0x00,0x00, +0x42,0x02,0x00,0x00,0xca,0x00,0x00,0x00,0x32,0x02,0x00,0x00, +0x3d,0x00,0x04,0x00,0xc4,0x00,0x00,0x00,0x43,0x02,0x00,0x00, +0x42,0x02,0x00,0x00,0x0c,0x00,0x08,0x00,0xc4,0x00,0x00,0x00, +0x44,0x02,0x00,0x00,0x01,0x00,0x00,0x00,0x32,0x00,0x00,0x00, +0x39,0x02,0x00,0x00,0x40,0x02,0x00,0x00,0x43,0x02,0x00,0x00, +0x3e,0x00,0x03,0x00,0x42,0x02,0x00,0x00,0x44,0x02,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x47,0x02,0x00,0x00, +0x00,0x03,0x00,0x00,0xd0,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, +0x21,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x23,0x02,0x00,0x00, +0xf9,0x00,0x02,0x00,0x1c,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, +0x1c,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x49,0x02,0x00,0x00,0xfe,0x02,0x00,0x00,0xd0,0x00,0x00,0x00, +0xf9,0x00,0x02,0x00,0x19,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, +0x1b,0x02,0x00,0x00,0xf9,0x00,0x02,0x00,0x14,0x02,0x00,0x00, +0xf8,0x00,0x02,0x00,0x14,0x02,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x4b,0x02,0x00,0x00,0xfc,0x02,0x00,0x00, +0xd0,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0x11,0x02,0x00,0x00, +0xf8,0x00,0x02,0x00,0x13,0x02,0x00,0x00,0xf9,0x00,0x02,0x00, +0x0c,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x0c,0x02,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x4d,0x02,0x00,0x00, +0xf8,0x02,0x00,0x00,0xd0,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, +0x09,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x0b,0x02,0x00,0x00, +0xf9,0x00,0x02,0x00,0xa8,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, +0xa8,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x4f,0x02,0x00,0x00,0xf2,0x02,0x00,0x00,0xd0,0x00,0x00,0x00, +0xf9,0x00,0x02,0x00,0xa5,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, +0xa7,0x01,0x00,0x00,0xe0,0x00,0x04,0x00,0x0c,0x00,0x00,0x00, +0x0c,0x00,0x00,0x00,0x9d,0x01,0x00,0x00,0xf9,0x00,0x02,0x00, +0xd7,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0xd7,0x00,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x51,0x02,0x00,0x00, +0xd8,0x02,0x00,0x00,0x6c,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, +0xd4,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0xd6,0x00,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x56,0x02,0x00,0x00, +0x55,0x00,0x00,0x00,0x53,0x00,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x57,0x02,0x00,0x00,0x96,0x00,0x00,0x00, +0x56,0x02,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x5c,0x02,0x00,0x00,0x59,0x00,0x00,0x00,0xb9,0x00,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x5d,0x02,0x00,0x00, +0xa8,0x00,0x00,0x00,0x5c,0x02,0x00,0x00,0x41,0x00,0x05,0x00, +0x15,0x00,0x00,0x00,0x62,0x02,0x00,0x00,0x12,0x00,0x00,0x00, +0x61,0x02,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x63,0x02,0x00,0x00,0x62,0x02,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x64,0x02,0x00,0x00,0x0f,0x00,0x00,0x00, +0x63,0x02,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x68,0x02,0x00,0x00,0x47,0x00,0x00,0x00,0x63,0x02,0x00,0x00, +0x41,0x00,0x05,0x00,0x0d,0x00,0x00,0x00,0x6a,0x02,0x00,0x00, +0x69,0x02,0x00,0x00,0x0c,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x6b,0x02,0x00,0x00,0x6a,0x02,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x6c,0x02,0x00,0x00, +0x68,0x02,0x00,0x00,0x6b,0x02,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x6d,0x02,0x00,0x00,0x64,0x02,0x00,0x00, +0x6c,0x02,0x00,0x00,0xf9,0x00,0x02,0x00,0x6f,0x02,0x00,0x00, +0xf8,0x00,0x02,0x00,0x6f,0x02,0x00,0x00,0xf5,0x00,0x07,0x00, +0x06,0x00,0x00,0x00,0xd9,0x02,0x00,0x00,0x3e,0x00,0x00,0x00, +0xd6,0x00,0x00,0x00,0xd6,0x02,0x00,0x00,0x72,0x02,0x00,0x00, +0xb0,0x00,0x05,0x00,0xc2,0x00,0x00,0x00,0x75,0x02,0x00,0x00, +0xd9,0x02,0x00,0x00,0xbf,0x00,0x00,0x00,0xf6,0x00,0x04,0x00, +0x71,0x02,0x00,0x00,0x72,0x02,0x00,0x00,0x01,0x00,0x00,0x00, +0xfa,0x00,0x04,0x00,0x75,0x02,0x00,0x00,0x70,0x02,0x00,0x00, +0x71,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x70,0x02,0x00,0x00, +0xf9,0x00,0x02,0x00,0x77,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, +0x77,0x02,0x00,0x00,0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00, +0xda,0x02,0x00,0x00,0x3e,0x00,0x00,0x00,0x70,0x02,0x00,0x00, +0xd4,0x02,0x00,0x00,0x7a,0x02,0x00,0x00,0xb0,0x00,0x05,0x00, +0xc2,0x00,0x00,0x00,0x7d,0x02,0x00,0x00,0xda,0x02,0x00,0x00, +0x60,0x00,0x00,0x00,0xf6,0x00,0x04,0x00,0x79,0x02,0x00,0x00, +0x7a,0x02,0x00,0x00,0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, +0x7d,0x02,0x00,0x00,0x78,0x02,0x00,0x00,0x79,0x02,0x00,0x00, +0xf8,0x00,0x02,0x00,0x78,0x02,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x81,0x02,0x00,0x00,0xda,0x02,0x00,0x00, +0x61,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x82,0x02,0x00,0x00,0x57,0x02,0x00,0x00,0x81,0x02,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x84,0x02,0x00,0x00, +0x64,0x00,0x00,0x00,0x62,0x00,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x85,0x02,0x00,0x00,0x82,0x02,0x00,0x00, +0x84,0x02,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x89,0x02,0x00,0x00,0xd9,0x02,0x00,0x00,0xf5,0x01,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x8a,0x02,0x00,0x00, +0x5d,0x02,0x00,0x00,0x89,0x02,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x8c,0x02,0x00,0x00,0x68,0x00,0x00,0x00, +0xbc,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x8d,0x02,0x00,0x00,0x8a,0x02,0x00,0x00,0x8c,0x02,0x00,0x00, +0xf9,0x00,0x02,0x00,0x8f,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, +0x8f,0x02,0x00,0x00,0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00, +0xdc,0x02,0x00,0x00,0x3e,0x00,0x00,0x00,0x78,0x02,0x00,0x00, +0xd2,0x02,0x00,0x00,0x92,0x02,0x00,0x00,0xb0,0x00,0x05,0x00, +0xc2,0x00,0x00,0x00,0x95,0x02,0x00,0x00,0xdc,0x02,0x00,0x00, +0xbc,0x00,0x00,0x00,0xf6,0x00,0x04,0x00,0x91,0x02,0x00,0x00, +0x92,0x02,0x00,0x00,0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, +0x95,0x02,0x00,0x00,0x90,0x02,0x00,0x00,0x91,0x02,0x00,0x00, +0xf8,0x00,0x02,0x00,0x90,0x02,0x00,0x00,0xf9,0x00,0x02,0x00, +0x97,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x97,0x02,0x00,0x00, +0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0xde,0x02,0x00,0x00, +0x3e,0x00,0x00,0x00,0x90,0x02,0x00,0x00,0xd0,0x02,0x00,0x00, +0x9a,0x02,0x00,0x00,0xb0,0x00,0x05,0x00,0xc2,0x00,0x00,0x00, +0x9d,0x02,0x00,0x00,0xde,0x02,0x00,0x00,0x62,0x00,0x00,0x00, +0xf6,0x00,0x04,0x00,0x99,0x02,0x00,0x00,0x9a,0x02,0x00,0x00, +0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0x9d,0x02,0x00,0x00, +0x98,0x02,0x00,0x00,0x99,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, +0x98,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xa0,0x02,0x00,0x00,0x85,0x02,0x00,0x00,0xde,0x02,0x00,0x00, +0xb0,0x00,0x05,0x00,0xc2,0x00,0x00,0x00,0xa3,0x02,0x00,0x00, +0xa0,0x02,0x00,0x00,0x36,0x00,0x00,0x00,0xf7,0x00,0x03,0x00, +0xa5,0x02,0x00,0x00,0x00,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, +0xa3,0x02,0x00,0x00,0xa4,0x02,0x00,0x00,0xa5,0x02,0x00,0x00, +0xf8,0x00,0x02,0x00,0xa4,0x02,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xa8,0x02,0x00,0x00,0x8d,0x02,0x00,0x00, +0xdc,0x02,0x00,0x00,0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00, +0xa9,0x02,0x00,0x00,0x12,0x00,0x00,0x00,0xd0,0x00,0x00,0x00, +0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0xaa,0x02,0x00,0x00, +0xa9,0x02,0x00,0x00,0xb0,0x00,0x05,0x00,0xc2,0x00,0x00,0x00, +0xab,0x02,0x00,0x00,0xa8,0x02,0x00,0x00,0xaa,0x02,0x00,0x00, +0xf9,0x00,0x02,0x00,0xa5,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, +0xa5,0x02,0x00,0x00,0xf5,0x00,0x07,0x00,0xc2,0x00,0x00,0x00, +0xac,0x02,0x00,0x00,0xa3,0x02,0x00,0x00,0x98,0x02,0x00,0x00, +0xab,0x02,0x00,0x00,0xa4,0x02,0x00,0x00,0xf7,0x00,0x03,0x00, +0xae,0x02,0x00,0x00,0x00,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, +0xac,0x02,0x00,0x00,0xad,0x02,0x00,0x00,0xae,0x02,0x00,0x00, +0xf8,0x00,0x02,0x00,0xad,0x02,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xb6,0x02,0x00,0x00,0x8d,0x02,0x00,0x00, +0xdc,0x02,0x00,0x00,0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00, +0xb8,0x02,0x00,0x00,0x12,0x00,0x00,0x00,0xb7,0x02,0x00,0x00, +0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0xb9,0x02,0x00,0x00, +0xb8,0x02,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xba,0x02,0x00,0x00,0xb6,0x02,0x00,0x00,0xb9,0x02,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xbb,0x02,0x00,0x00, +0x6d,0x02,0x00,0x00,0xba,0x02,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xbd,0x02,0x00,0x00,0xbb,0x02,0x00,0x00, +0x85,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xbf,0x02,0x00,0x00,0xbd,0x02,0x00,0x00,0xde,0x02,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xc1,0x02,0x00,0x00, +0xd9,0x02,0x00,0x00,0xbc,0x00,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xc3,0x02,0x00,0x00,0xc1,0x02,0x00,0x00, +0xdc,0x02,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xc5,0x02,0x00,0x00,0xc3,0x02,0x00,0x00,0xc4,0x02,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xc7,0x02,0x00,0x00, +0xda,0x02,0x00,0x00,0x62,0x00,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xc8,0x02,0x00,0x00,0xc5,0x02,0x00,0x00, +0xc7,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xca,0x02,0x00,0x00,0xc8,0x02,0x00,0x00,0xde,0x02,0x00,0x00, +0x41,0x00,0x05,0x00,0xcd,0x00,0x00,0x00,0xcb,0x02,0x00,0x00, +0xca,0x00,0x00,0x00,0xca,0x02,0x00,0x00,0x3d,0x00,0x04,0x00, +0xc4,0x00,0x00,0x00,0xcc,0x02,0x00,0x00,0xcb,0x02,0x00,0x00, +0x41,0x00,0x06,0x00,0xcd,0x02,0x00,0x00,0xce,0x02,0x00,0x00, +0xb2,0x02,0x00,0x00,0x34,0x00,0x00,0x00,0xbf,0x02,0x00,0x00, +0x3e,0x00,0x03,0x00,0xce,0x02,0x00,0x00,0xcc,0x02,0x00,0x00, +0xf9,0x00,0x02,0x00,0xae,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, +0xae,0x02,0x00,0x00,0xf9,0x00,0x02,0x00,0x9a,0x02,0x00,0x00, +0xf8,0x00,0x02,0x00,0x9a,0x02,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xd0,0x02,0x00,0x00,0xde,0x02,0x00,0x00, +0xd0,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0x97,0x02,0x00,0x00, +0xf8,0x00,0x02,0x00,0x99,0x02,0x00,0x00,0xf9,0x00,0x02,0x00, +0x92,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x92,0x02,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xd2,0x02,0x00,0x00, +0xdc,0x02,0x00,0x00,0xd0,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, +0x8f,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x91,0x02,0x00,0x00, +0xf9,0x00,0x02,0x00,0x7a,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, +0x7a,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xd4,0x02,0x00,0x00,0xda,0x02,0x00,0x00,0xd0,0x00,0x00,0x00, +0xf9,0x00,0x02,0x00,0x77,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, +0x79,0x02,0x00,0x00,0xf9,0x00,0x02,0x00,0x72,0x02,0x00,0x00, +0xf8,0x00,0x02,0x00,0x72,0x02,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xd6,0x02,0x00,0x00,0xd9,0x02,0x00,0x00, +0xd0,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0x6f,0x02,0x00,0x00, +0xf8,0x00,0x02,0x00,0x71,0x02,0x00,0x00,0xfd,0x00,0x01,0x00, +0x38,0x00,0x01,0x00, +}; +const uint64_t matmul_f16_aligned_len = 11200; + +unsigned char matmul_f16_aligned_fp32_data[] = { +0x03,0x02,0x23,0x07,0x00,0x05,0x01,0x00,0x0b,0x00,0x0d,0x00, +0xd9,0x02,0x00,0x00,0x00,0x00,0x00,0x00,0x11,0x00,0x02,0x00, +0x01,0x00,0x00,0x00,0x11,0x00,0x02,0x00,0x51,0x11,0x00,0x00, +0x0b,0x00,0x06,0x00,0x01,0x00,0x00,0x00,0x47,0x4c,0x53,0x4c, +0x2e,0x73,0x74,0x64,0x2e,0x34,0x35,0x30,0x00,0x00,0x00,0x00, +0x0e,0x00,0x03,0x00,0x00,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0x0f,0x00,0x0f,0x00,0x05,0x00,0x00,0x00,0x04,0x00,0x00,0x00, +0x6d,0x61,0x69,0x6e,0x00,0x00,0x00,0x00,0x0b,0x00,0x00,0x00, +0x12,0x00,0x00,0x00,0x3d,0x00,0x00,0x00,0x4c,0x00,0x00,0x00, +0xfd,0x00,0x00,0x00,0x04,0x01,0x00,0x00,0x4a,0x01,0x00,0x00, +0x50,0x01,0x00,0x00,0x39,0x02,0x00,0x00,0x82,0x02,0x00,0x00, +0x10,0x00,0x06,0x00,0x04,0x00,0x00,0x00,0x11,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0x47,0x00,0x04,0x00,0x0b,0x00,0x00,0x00,0x0b,0x00,0x00,0x00, +0x1c,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00, +0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0x23,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x48,0x00,0x05,0x00, +0x10,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x23,0x00,0x00,0x00, +0x08,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00, +0x03,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, +0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00,0x04,0x00,0x00,0x00, +0x23,0x00,0x00,0x00,0x10,0x00,0x00,0x00,0x48,0x00,0x05,0x00, +0x10,0x00,0x00,0x00,0x05,0x00,0x00,0x00,0x23,0x00,0x00,0x00, +0x14,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00, +0x06,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x18,0x00,0x00,0x00, +0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00,0x07,0x00,0x00,0x00, +0x23,0x00,0x00,0x00,0x1c,0x00,0x00,0x00,0x48,0x00,0x05,0x00, +0x10,0x00,0x00,0x00,0x08,0x00,0x00,0x00,0x23,0x00,0x00,0x00, +0x20,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00, +0x09,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x24,0x00,0x00,0x00, +0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00,0x0a,0x00,0x00,0x00, +0x23,0x00,0x00,0x00,0x28,0x00,0x00,0x00,0x48,0x00,0x05,0x00, +0x10,0x00,0x00,0x00,0x0b,0x00,0x00,0x00,0x23,0x00,0x00,0x00, +0x2c,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00, +0x0c,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x30,0x00,0x00,0x00, +0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00,0x0d,0x00,0x00,0x00, +0x23,0x00,0x00,0x00,0x34,0x00,0x00,0x00,0x47,0x00,0x03,0x00, +0x10,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00, +0x37,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0x47,0x00,0x04,0x00,0x3d,0x00,0x00,0x00,0x0b,0x00,0x00,0x00, +0x1a,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x4c,0x00,0x00,0x00, +0x0b,0x00,0x00,0x00,0x1b,0x00,0x00,0x00,0x47,0x00,0x04,0x00, +0x4f,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x09,0x00,0x00,0x00, +0x47,0x00,0x04,0x00,0x53,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0x04,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x60,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x47,0x00,0x04,0x00, +0x62,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x07,0x00,0x00,0x00, +0x47,0x00,0x04,0x00,0x6c,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0x03,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0xa7,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00, +0xb9,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x05,0x00,0x00,0x00, +0x47,0x00,0x04,0x00,0xbc,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0x08,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x01,0x01,0x00,0x00, +0x06,0x00,0x00,0x00,0x08,0x00,0x00,0x00,0x48,0x00,0x04,0x00, +0x02,0x01,0x00,0x00,0x00,0x00,0x00,0x00,0x18,0x00,0x00,0x00, +0x48,0x00,0x05,0x00,0x02,0x01,0x00,0x00,0x00,0x00,0x00,0x00, 0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x03,0x00, -0x12,0x03,0x00,0x00,0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x14,0x03,0x00,0x00,0x22,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x14,0x03,0x00,0x00,0x21,0x00,0x00,0x00, +0x02,0x01,0x00,0x00,0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00, +0x04,0x01,0x00,0x00,0x22,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0x47,0x00,0x04,0x00,0x04,0x01,0x00,0x00,0x21,0x00,0x00,0x00, +0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x22,0x01,0x00,0x00, +0x01,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00, +0x23,0x01,0x00,0x00,0x0b,0x00,0x00,0x00,0x19,0x00,0x00,0x00, +0x47,0x00,0x04,0x00,0x4d,0x01,0x00,0x00,0x06,0x00,0x00,0x00, +0x08,0x00,0x00,0x00,0x48,0x00,0x04,0x00,0x4e,0x01,0x00,0x00, +0x00,0x00,0x00,0x00,0x18,0x00,0x00,0x00,0x48,0x00,0x05,0x00, +0x4e,0x01,0x00,0x00,0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00, +0x00,0x00,0x00,0x00,0x47,0x00,0x03,0x00,0x4e,0x01,0x00,0x00, +0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x50,0x01,0x00,0x00, +0x22,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00, +0x50,0x01,0x00,0x00,0x21,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0x47,0x00,0x04,0x00,0x39,0x02,0x00,0x00,0x0b,0x00,0x00,0x00, +0x18,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x7f,0x02,0x00,0x00, +0x06,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x48,0x00,0x04,0x00, +0x80,0x02,0x00,0x00,0x00,0x00,0x00,0x00,0x19,0x00,0x00,0x00, +0x48,0x00,0x05,0x00,0x80,0x02,0x00,0x00,0x00,0x00,0x00,0x00, +0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x03,0x00, +0x80,0x02,0x00,0x00,0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00, +0x82,0x02,0x00,0x00,0x22,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0x47,0x00,0x04,0x00,0x82,0x02,0x00,0x00,0x21,0x00,0x00,0x00, +0x02,0x00,0x00,0x00,0x13,0x00,0x02,0x00,0x02,0x00,0x00,0x00, +0x21,0x00,0x03,0x00,0x03,0x00,0x00,0x00,0x02,0x00,0x00,0x00, +0x15,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x20,0x00,0x00,0x00, +0x00,0x00,0x00,0x00,0x17,0x00,0x04,0x00,0x09,0x00,0x00,0x00, +0x06,0x00,0x00,0x00,0x03,0x00,0x00,0x00,0x20,0x00,0x04,0x00, +0x0a,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x09,0x00,0x00,0x00, +0x3b,0x00,0x04,0x00,0x0a,0x00,0x00,0x00,0x0b,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x0c,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x20,0x00,0x04,0x00, +0x0d,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x06,0x00,0x00,0x00, +0x1e,0x00,0x10,0x00,0x10,0x00,0x00,0x00,0x06,0x00,0x00,0x00, +0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00, +0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00, +0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00, +0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00, +0x06,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x11,0x00,0x00,0x00, +0x09,0x00,0x00,0x00,0x10,0x00,0x00,0x00,0x3b,0x00,0x04,0x00, +0x11,0x00,0x00,0x00,0x12,0x00,0x00,0x00,0x09,0x00,0x00,0x00, +0x15,0x00,0x04,0x00,0x13,0x00,0x00,0x00,0x20,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00, +0x14,0x00,0x00,0x00,0x08,0x00,0x00,0x00,0x20,0x00,0x04,0x00, +0x15,0x00,0x00,0x00,0x09,0x00,0x00,0x00,0x06,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00,0x21,0x00,0x00,0x00, +0x0a,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00, +0x27,0x00,0x00,0x00,0x09,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x13,0x00,0x00,0x00,0x2d,0x00,0x00,0x00,0x07,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00,0x34,0x00,0x00,0x00, +0x00,0x00,0x00,0x00,0x32,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x37,0x00,0x00,0x00,0x40,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0x3b,0x00,0x04,0x00,0x0a,0x00,0x00,0x00,0x3d,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x3e,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x3b,0x00,0x04,0x00, +0x0a,0x00,0x00,0x00,0x4c,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0x32,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x4f,0x00,0x00,0x00, +0x20,0x00,0x00,0x00,0x32,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x53,0x00,0x00,0x00,0x20,0x00,0x00,0x00,0x34,0x00,0x06,0x00, +0x06,0x00,0x00,0x00,0x54,0x00,0x00,0x00,0x86,0x00,0x00,0x00, +0x37,0x00,0x00,0x00,0x53,0x00,0x00,0x00,0x34,0x00,0x06,0x00, +0x06,0x00,0x00,0x00,0x58,0x00,0x00,0x00,0x86,0x00,0x00,0x00, +0x37,0x00,0x00,0x00,0x53,0x00,0x00,0x00,0x32,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x60,0x00,0x00,0x00,0x02,0x00,0x00,0x00, +0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x61,0x00,0x00,0x00, +0x86,0x00,0x00,0x00,0x53,0x00,0x00,0x00,0x60,0x00,0x00,0x00, +0x32,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x62,0x00,0x00,0x00, +0x04,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, +0x63,0x00,0x00,0x00,0x86,0x00,0x00,0x00,0x61,0x00,0x00,0x00, +0x62,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, +0x67,0x00,0x00,0x00,0x86,0x00,0x00,0x00,0x61,0x00,0x00,0x00, +0x62,0x00,0x00,0x00,0x32,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x6c,0x00,0x00,0x00,0x10,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x6d,0x00,0x00,0x00,0x04,0x00,0x00,0x00, +0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x6e,0x00,0x00,0x00, +0x86,0x00,0x00,0x00,0x6c,0x00,0x00,0x00,0x6d,0x00,0x00,0x00, +0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x73,0x00,0x00,0x00, +0x86,0x00,0x00,0x00,0x6c,0x00,0x00,0x00,0x6d,0x00,0x00,0x00, +0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x78,0x00,0x00,0x00, +0x86,0x00,0x00,0x00,0x6c,0x00,0x00,0x00,0x6d,0x00,0x00,0x00, +0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x7d,0x00,0x00,0x00, +0x86,0x00,0x00,0x00,0x6c,0x00,0x00,0x00,0x6d,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00,0x81,0x00,0x00,0x00, +0x06,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00, +0x86,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x13,0x00,0x00,0x00,0x91,0x00,0x00,0x00,0x0b,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00,0x97,0x00,0x00,0x00, +0x03,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00, +0xa2,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x32,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0xa7,0x00,0x00,0x00,0x40,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00,0xa9,0x00,0x00,0x00, +0x04,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, +0xb8,0x00,0x00,0x00,0x84,0x00,0x00,0x00,0x60,0x00,0x00,0x00, +0x62,0x00,0x00,0x00,0x32,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0xb9,0x00,0x00,0x00,0x20,0x00,0x00,0x00,0x34,0x00,0x06,0x00, +0x06,0x00,0x00,0x00,0xba,0x00,0x00,0x00,0x84,0x00,0x00,0x00, +0x53,0x00,0x00,0x00,0xb9,0x00,0x00,0x00,0x34,0x00,0x06,0x00, +0x06,0x00,0x00,0x00,0xbb,0x00,0x00,0x00,0x84,0x00,0x00,0x00, +0x4f,0x00,0x00,0x00,0x62,0x00,0x00,0x00,0x32,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0xbc,0x00,0x00,0x00,0x02,0x00,0x00,0x00, +0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0xbd,0x00,0x00,0x00, +0x84,0x00,0x00,0x00,0xbb,0x00,0x00,0x00,0xbc,0x00,0x00,0x00, +0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0xbe,0x00,0x00,0x00, +0x84,0x00,0x00,0x00,0xbd,0x00,0x00,0x00,0x60,0x00,0x00,0x00, +0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0xbf,0x00,0x00,0x00, +0x86,0x00,0x00,0x00,0xba,0x00,0x00,0x00,0xbe,0x00,0x00,0x00, +0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0xc0,0x00,0x00,0x00, +0x84,0x00,0x00,0x00,0xb8,0x00,0x00,0x00,0xbf,0x00,0x00,0x00, +0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0xc1,0x00,0x00,0x00, +0x84,0x00,0x00,0x00,0xc0,0x00,0x00,0x00,0xbc,0x00,0x00,0x00, +0x14,0x00,0x02,0x00,0xc2,0x00,0x00,0x00,0x16,0x00,0x03,0x00, +0xc4,0x00,0x00,0x00,0x20,0x00,0x00,0x00,0x34,0x00,0x06,0x00, +0x06,0x00,0x00,0x00,0xc5,0x00,0x00,0x00,0x84,0x00,0x00,0x00, +0x60,0x00,0x00,0x00,0x62,0x00,0x00,0x00,0x34,0x00,0x06,0x00, +0x06,0x00,0x00,0x00,0xc6,0x00,0x00,0x00,0x84,0x00,0x00,0x00, +0xc5,0x00,0x00,0x00,0xbf,0x00,0x00,0x00,0x34,0x00,0x06,0x00, +0x06,0x00,0x00,0x00,0xc7,0x00,0x00,0x00,0x84,0x00,0x00,0x00, +0xc6,0x00,0x00,0x00,0xbc,0x00,0x00,0x00,0x1c,0x00,0x04,0x00, +0xc8,0x00,0x00,0x00,0xc4,0x00,0x00,0x00,0xc7,0x00,0x00,0x00, +0x20,0x00,0x04,0x00,0xc9,0x00,0x00,0x00,0x07,0x00,0x00,0x00, +0xc8,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0xc4,0x00,0x00,0x00, +0xcc,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x20,0x00,0x04,0x00, +0xcd,0x00,0x00,0x00,0x07,0x00,0x00,0x00,0xc4,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00,0xd0,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, +0xf4,0x00,0x00,0x00,0x80,0x00,0x00,0x00,0x6c,0x00,0x00,0x00, +0x39,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, +0xf9,0x00,0x00,0x00,0x80,0x00,0x00,0x00,0x6c,0x00,0x00,0x00, +0x39,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, +0xfa,0x00,0x00,0x00,0x84,0x00,0x00,0x00,0x37,0x00,0x00,0x00, +0xf9,0x00,0x00,0x00,0x1c,0x00,0x04,0x00,0xfb,0x00,0x00,0x00, +0xc4,0x00,0x00,0x00,0xfa,0x00,0x00,0x00,0x20,0x00,0x04,0x00, +0xfc,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0xfb,0x00,0x00,0x00, +0x3b,0x00,0x04,0x00,0xfc,0x00,0x00,0x00,0xfd,0x00,0x00,0x00, +0x04,0x00,0x00,0x00,0x16,0x00,0x03,0x00,0xff,0x00,0x00,0x00, +0x10,0x00,0x00,0x00,0x17,0x00,0x04,0x00,0x00,0x01,0x00,0x00, +0xff,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x1d,0x00,0x03,0x00, +0x01,0x01,0x00,0x00,0x00,0x01,0x00,0x00,0x1e,0x00,0x03,0x00, +0x02,0x01,0x00,0x00,0x01,0x01,0x00,0x00,0x20,0x00,0x04,0x00, +0x03,0x01,0x00,0x00,0x0c,0x00,0x00,0x00,0x02,0x01,0x00,0x00, +0x3b,0x00,0x04,0x00,0x03,0x01,0x00,0x00,0x04,0x01,0x00,0x00, +0x0c,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x06,0x01,0x00,0x00, +0x0c,0x00,0x00,0x00,0xff,0x00,0x00,0x00,0x20,0x00,0x04,0x00, +0x0a,0x01,0x00,0x00,0x04,0x00,0x00,0x00,0xc4,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x1b,0x01,0x00,0x00, +0x03,0x00,0x00,0x00,0x32,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x22,0x01,0x00,0x00,0x01,0x00,0x00,0x00,0x33,0x00,0x06,0x00, +0x09,0x00,0x00,0x00,0x23,0x01,0x00,0x00,0x22,0x01,0x00,0x00, +0x39,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x34,0x00,0x06,0x00, +0x06,0x00,0x00,0x00,0x24,0x01,0x00,0x00,0x51,0x00,0x00,0x00, +0x23,0x01,0x00,0x00,0x00,0x00,0x00,0x00,0x34,0x00,0x06,0x00, +0x06,0x00,0x00,0x00,0x25,0x01,0x00,0x00,0x84,0x00,0x00,0x00, +0x24,0x01,0x00,0x00,0x6d,0x00,0x00,0x00,0x34,0x00,0x06,0x00, +0x06,0x00,0x00,0x00,0x26,0x01,0x00,0x00,0x86,0x00,0x00,0x00, +0x25,0x01,0x00,0x00,0x6c,0x00,0x00,0x00,0x34,0x00,0x06,0x00, +0x06,0x00,0x00,0x00,0x41,0x01,0x00,0x00,0x80,0x00,0x00,0x00, +0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x34,0x00,0x06,0x00, +0x06,0x00,0x00,0x00,0x46,0x01,0x00,0x00,0x80,0x00,0x00,0x00, +0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x34,0x00,0x06,0x00, +0x06,0x00,0x00,0x00,0x47,0x01,0x00,0x00,0x84,0x00,0x00,0x00, +0xa7,0x00,0x00,0x00,0x46,0x01,0x00,0x00,0x1c,0x00,0x04,0x00, +0x48,0x01,0x00,0x00,0xc4,0x00,0x00,0x00,0x47,0x01,0x00,0x00, +0x20,0x00,0x04,0x00,0x49,0x01,0x00,0x00,0x04,0x00,0x00,0x00, +0x48,0x01,0x00,0x00,0x3b,0x00,0x04,0x00,0x49,0x01,0x00,0x00, +0x4a,0x01,0x00,0x00,0x04,0x00,0x00,0x00,0x1d,0x00,0x03,0x00, +0x4d,0x01,0x00,0x00,0x00,0x01,0x00,0x00,0x1e,0x00,0x03,0x00, +0x4e,0x01,0x00,0x00,0x4d,0x01,0x00,0x00,0x20,0x00,0x04,0x00, +0x4f,0x01,0x00,0x00,0x0c,0x00,0x00,0x00,0x4e,0x01,0x00,0x00, +0x3b,0x00,0x04,0x00,0x4f,0x01,0x00,0x00,0x50,0x01,0x00,0x00, +0x0c,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, +0x6b,0x01,0x00,0x00,0x51,0x00,0x00,0x00,0x23,0x01,0x00,0x00, +0x00,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, +0x6c,0x01,0x00,0x00,0x84,0x00,0x00,0x00,0x6b,0x01,0x00,0x00, +0x6d,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, +0x6d,0x01,0x00,0x00,0x86,0x00,0x00,0x00,0x6c,0x01,0x00,0x00, +0x6c,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x70,0x01,0x00,0x00,0x08,0x01,0x00,0x00,0x34,0x00,0x06,0x00, +0x06,0x00,0x00,0x00,0x71,0x01,0x00,0x00,0x86,0x00,0x00,0x00, +0x6c,0x00,0x00,0x00,0x6d,0x00,0x00,0x00,0x34,0x00,0x06,0x00, +0x06,0x00,0x00,0x00,0x74,0x01,0x00,0x00,0x86,0x00,0x00,0x00, +0x6c,0x00,0x00,0x00,0x6d,0x00,0x00,0x00,0x34,0x00,0x06,0x00, +0x06,0x00,0x00,0x00,0x8f,0x01,0x00,0x00,0x84,0x00,0x00,0x00, +0x60,0x00,0x00,0x00,0x62,0x00,0x00,0x00,0x1c,0x00,0x04,0x00, +0x90,0x01,0x00,0x00,0xc4,0x00,0x00,0x00,0x8f,0x01,0x00,0x00, +0x20,0x00,0x04,0x00,0x91,0x01,0x00,0x00,0x07,0x00,0x00,0x00, +0x90,0x01,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, +0xa1,0x01,0x00,0x00,0x80,0x00,0x00,0x00,0x6c,0x00,0x00,0x00, +0x39,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, +0xbc,0x01,0x00,0x00,0x84,0x00,0x00,0x00,0xbf,0x00,0x00,0x00, +0xbc,0x00,0x00,0x00,0x1c,0x00,0x04,0x00,0xbd,0x01,0x00,0x00, +0xc4,0x00,0x00,0x00,0xbc,0x01,0x00,0x00,0x20,0x00,0x04,0x00, +0xbe,0x01,0x00,0x00,0x07,0x00,0x00,0x00,0xbd,0x01,0x00,0x00, +0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0xc7,0x01,0x00,0x00, +0x86,0x00,0x00,0x00,0xb9,0x00,0x00,0x00,0xbf,0x00,0x00,0x00, +0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0xcf,0x01,0x00,0x00, +0x80,0x00,0x00,0x00,0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00, +0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0xfe,0x01,0x00,0x00, +0x84,0x00,0x00,0x00,0x60,0x00,0x00,0x00,0x62,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00,0x31,0x02,0x00,0x00, +0x0d,0x00,0x00,0x00,0x3b,0x00,0x04,0x00,0x0a,0x00,0x00,0x00, +0x39,0x02,0x00,0x00,0x01,0x00,0x00,0x00,0x1d,0x00,0x03,0x00, +0x7f,0x02,0x00,0x00,0xc4,0x00,0x00,0x00,0x1e,0x00,0x03,0x00, +0x80,0x02,0x00,0x00,0x7f,0x02,0x00,0x00,0x20,0x00,0x04,0x00, +0x81,0x02,0x00,0x00,0x0c,0x00,0x00,0x00,0x80,0x02,0x00,0x00, +0x3b,0x00,0x04,0x00,0x81,0x02,0x00,0x00,0x82,0x02,0x00,0x00, +0x0c,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00, +0x87,0x02,0x00,0x00,0x05,0x00,0x00,0x00,0x34,0x00,0x06,0x00, +0x06,0x00,0x00,0x00,0x94,0x02,0x00,0x00,0x84,0x00,0x00,0x00, +0x60,0x00,0x00,0x00,0x62,0x00,0x00,0x00,0x20,0x00,0x04,0x00, +0x9d,0x02,0x00,0x00,0x0c,0x00,0x00,0x00,0xc4,0x00,0x00,0x00, +0x36,0x00,0x05,0x00,0x02,0x00,0x00,0x00,0x04,0x00,0x00,0x00, +0x00,0x00,0x00,0x00,0x03,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, +0x05,0x00,0x00,0x00,0x3b,0x00,0x04,0x00,0xc9,0x00,0x00,0x00, +0xca,0x00,0x00,0x00,0x07,0x00,0x00,0x00,0x3b,0x00,0x04,0x00, +0x91,0x01,0x00,0x00,0x92,0x01,0x00,0x00,0x07,0x00,0x00,0x00, +0x3b,0x00,0x04,0x00,0xbe,0x01,0x00,0x00,0xbf,0x01,0x00,0x00, +0x07,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x0d,0x00,0x00,0x00, +0x0e,0x00,0x00,0x00,0x0b,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, +0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x0f,0x00,0x00,0x00, +0x0e,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00, +0x16,0x00,0x00,0x00,0x12,0x00,0x00,0x00,0x14,0x00,0x00,0x00, +0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x17,0x00,0x00,0x00, +0x16,0x00,0x00,0x00,0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x18,0x00,0x00,0x00,0x0f,0x00,0x00,0x00,0x17,0x00,0x00,0x00, +0x89,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x1e,0x00,0x00,0x00, +0x0f,0x00,0x00,0x00,0x17,0x00,0x00,0x00,0x41,0x00,0x05,0x00, +0x15,0x00,0x00,0x00,0x22,0x00,0x00,0x00,0x12,0x00,0x00,0x00, +0x21,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x23,0x00,0x00,0x00,0x22,0x00,0x00,0x00,0x86,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x24,0x00,0x00,0x00,0x18,0x00,0x00,0x00, +0x23,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00, +0x28,0x00,0x00,0x00,0x12,0x00,0x00,0x00,0x27,0x00,0x00,0x00, +0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x29,0x00,0x00,0x00, +0x28,0x00,0x00,0x00,0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x2a,0x00,0x00,0x00,0x1e,0x00,0x00,0x00,0x29,0x00,0x00,0x00, +0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, +0x12,0x00,0x00,0x00,0x2d,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x2f,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x30,0x00,0x00,0x00, +0x24,0x00,0x00,0x00,0x2f,0x00,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x32,0x00,0x00,0x00,0x30,0x00,0x00,0x00, +0x2a,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00, +0x35,0x00,0x00,0x00,0x12,0x00,0x00,0x00,0x34,0x00,0x00,0x00, +0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x36,0x00,0x00,0x00, +0x35,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x38,0x00,0x00,0x00,0x36,0x00,0x00,0x00,0x37,0x00,0x00,0x00, +0x82,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x3a,0x00,0x00,0x00, +0x38,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x86,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x3b,0x00,0x00,0x00,0x3a,0x00,0x00,0x00, +0x37,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x0d,0x00,0x00,0x00, +0x3f,0x00,0x00,0x00,0x3d,0x00,0x00,0x00,0x3e,0x00,0x00,0x00, +0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x40,0x00,0x00,0x00, +0x3f,0x00,0x00,0x00,0x89,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x42,0x00,0x00,0x00,0x40,0x00,0x00,0x00,0x3b,0x00,0x00,0x00, +0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x47,0x00,0x00,0x00, +0x40,0x00,0x00,0x00,0x3b,0x00,0x00,0x00,0x41,0x00,0x05,0x00, +0x0d,0x00,0x00,0x00,0x49,0x00,0x00,0x00,0x3d,0x00,0x00,0x00, +0x39,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x4a,0x00,0x00,0x00,0x49,0x00,0x00,0x00,0x41,0x00,0x05,0x00, +0x0d,0x00,0x00,0x00,0x4d,0x00,0x00,0x00,0x4c,0x00,0x00,0x00, +0x3e,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x4e,0x00,0x00,0x00,0x4d,0x00,0x00,0x00,0x86,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x50,0x00,0x00,0x00,0x4e,0x00,0x00,0x00, +0x4f,0x00,0x00,0x00,0x89,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x55,0x00,0x00,0x00,0x50,0x00,0x00,0x00,0x54,0x00,0x00,0x00, +0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x59,0x00,0x00,0x00, +0x50,0x00,0x00,0x00,0x58,0x00,0x00,0x00,0x89,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x5d,0x00,0x00,0x00,0x4e,0x00,0x00,0x00, +0x4f,0x00,0x00,0x00,0x89,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x64,0x00,0x00,0x00,0x5d,0x00,0x00,0x00,0x63,0x00,0x00,0x00, +0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x68,0x00,0x00,0x00, +0x5d,0x00,0x00,0x00,0x67,0x00,0x00,0x00,0x89,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x6f,0x00,0x00,0x00,0x4e,0x00,0x00,0x00, +0x6e,0x00,0x00,0x00,0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x74,0x00,0x00,0x00,0x4e,0x00,0x00,0x00,0x73,0x00,0x00,0x00, +0x89,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x79,0x00,0x00,0x00, +0x4e,0x00,0x00,0x00,0x78,0x00,0x00,0x00,0x86,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x7e,0x00,0x00,0x00,0x4e,0x00,0x00,0x00, +0x7d,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00, +0x82,0x00,0x00,0x00,0x12,0x00,0x00,0x00,0x81,0x00,0x00,0x00, +0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x83,0x00,0x00,0x00, +0x82,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x84,0x00,0x00,0x00,0x47,0x00,0x00,0x00,0x83,0x00,0x00,0x00, +0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00,0x87,0x00,0x00,0x00, +0x12,0x00,0x00,0x00,0x86,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x88,0x00,0x00,0x00,0x87,0x00,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x8a,0x00,0x00,0x00, +0x47,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x8d,0x00,0x00,0x00,0x8a,0x00,0x00,0x00, +0x83,0x00,0x00,0x00,0x0c,0x00,0x07,0x00,0x06,0x00,0x00,0x00, +0x8e,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x26,0x00,0x00,0x00, +0x88,0x00,0x00,0x00,0x8d,0x00,0x00,0x00,0x41,0x00,0x05,0x00, +0x15,0x00,0x00,0x00,0x92,0x00,0x00,0x00,0x12,0x00,0x00,0x00, +0x91,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x93,0x00,0x00,0x00,0x92,0x00,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x94,0x00,0x00,0x00,0x32,0x00,0x00,0x00, +0x93,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x96,0x00,0x00,0x00,0x42,0x00,0x00,0x00,0x37,0x00,0x00,0x00, +0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00,0x98,0x00,0x00,0x00, +0x12,0x00,0x00,0x00,0x97,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x99,0x00,0x00,0x00,0x98,0x00,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x9a,0x00,0x00,0x00, +0x96,0x00,0x00,0x00,0x99,0x00,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x9b,0x00,0x00,0x00,0x94,0x00,0x00,0x00, +0x9a,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x9d,0x00,0x00,0x00,0x9b,0x00,0x00,0x00,0x84,0x00,0x00,0x00, +0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x9e,0x00,0x00,0x00, +0x9d,0x00,0x00,0x00,0x6d,0x00,0x00,0x00,0x41,0x00,0x05,0x00, +0x15,0x00,0x00,0x00,0xa3,0x00,0x00,0x00,0x12,0x00,0x00,0x00, +0xa2,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0xa4,0x00,0x00,0x00,0xa3,0x00,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xa5,0x00,0x00,0x00,0x0f,0x00,0x00,0x00, +0xa4,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xa8,0x00,0x00,0x00,0x4a,0x00,0x00,0x00,0xa7,0x00,0x00,0x00, +0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00,0xaa,0x00,0x00,0x00, +0x12,0x00,0x00,0x00,0xa9,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0xab,0x00,0x00,0x00,0xaa,0x00,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xac,0x00,0x00,0x00, +0xa8,0x00,0x00,0x00,0xab,0x00,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xad,0x00,0x00,0x00,0xa5,0x00,0x00,0x00, +0xac,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xaf,0x00,0x00,0x00,0xad,0x00,0x00,0x00,0x84,0x00,0x00,0x00, +0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xb0,0x00,0x00,0x00, +0xaf,0x00,0x00,0x00,0x6d,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, +0xb2,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0xb2,0x00,0x00,0x00, +0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0xa7,0x02,0x00,0x00, +0x3e,0x00,0x00,0x00,0x05,0x00,0x00,0x00,0xd1,0x00,0x00,0x00, +0xb3,0x00,0x00,0x00,0xb0,0x00,0x05,0x00,0xc2,0x00,0x00,0x00, +0xc3,0x00,0x00,0x00,0xa7,0x02,0x00,0x00,0xc1,0x00,0x00,0x00, +0xf6,0x00,0x04,0x00,0xb4,0x00,0x00,0x00,0xb3,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0xc3,0x00,0x00,0x00, +0xb3,0x00,0x00,0x00,0xb4,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, +0xb3,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0xcd,0x00,0x00,0x00, +0xce,0x00,0x00,0x00,0xca,0x00,0x00,0x00,0xa7,0x02,0x00,0x00, +0x3e,0x00,0x03,0x00,0xce,0x00,0x00,0x00,0xcc,0x00,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xd1,0x00,0x00,0x00, +0xa7,0x02,0x00,0x00,0xd0,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, +0xb2,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0xb4,0x00,0x00,0x00, +0xf9,0x00,0x02,0x00,0xd4,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, +0xd4,0x00,0x00,0x00,0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00, +0xc0,0x02,0x00,0x00,0xb0,0x00,0x00,0x00,0xb4,0x00,0x00,0x00, +0x76,0x01,0x00,0x00,0xd7,0x00,0x00,0x00,0xf5,0x00,0x07,0x00, +0x06,0x00,0x00,0x00,0xbc,0x02,0x00,0x00,0x9e,0x00,0x00,0x00, +0xb4,0x00,0x00,0x00,0x73,0x01,0x00,0x00,0xd7,0x00,0x00,0x00, +0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0xa8,0x02,0x00,0x00, +0x84,0x00,0x00,0x00,0xb4,0x00,0x00,0x00,0x21,0x02,0x00,0x00, +0xd7,0x00,0x00,0x00,0xb0,0x00,0x05,0x00,0xc2,0x00,0x00,0x00, +0xdb,0x00,0x00,0x00,0xa8,0x02,0x00,0x00,0x8e,0x00,0x00,0x00, +0xf6,0x00,0x04,0x00,0xd6,0x00,0x00,0x00,0xd7,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0xdb,0x00,0x00,0x00, +0xd5,0x00,0x00,0x00,0xd6,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, +0xd5,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0xdd,0x00,0x00,0x00, +0xf8,0x00,0x02,0x00,0xdd,0x00,0x00,0x00,0xf5,0x00,0x07,0x00, +0x06,0x00,0x00,0x00,0xb8,0x02,0x00,0x00,0x3e,0x00,0x00,0x00, +0xd5,0x00,0x00,0x00,0x28,0x01,0x00,0x00,0xde,0x00,0x00,0x00, +0xb0,0x00,0x05,0x00,0xc2,0x00,0x00,0x00,0xe3,0x00,0x00,0x00, +0xb8,0x02,0x00,0x00,0x37,0x00,0x00,0x00,0xf6,0x00,0x04,0x00, +0xdf,0x00,0x00,0x00,0xde,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0xfa,0x00,0x04,0x00,0xe3,0x00,0x00,0x00,0xde,0x00,0x00,0x00, +0xdf,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0xde,0x00,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xe8,0x00,0x00,0x00, +0x74,0x00,0x00,0x00,0xb8,0x02,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xeb,0x00,0x00,0x00,0xe8,0x00,0x00,0x00, +0x99,0x00,0x00,0x00,0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xec,0x00,0x00,0x00,0xeb,0x00,0x00,0x00,0x6d,0x00,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xed,0x00,0x00,0x00, +0xbc,0x02,0x00,0x00,0xec,0x00,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xef,0x00,0x00,0x00,0xed,0x00,0x00,0x00, +0x6f,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xf5,0x00,0x00,0x00,0xe8,0x00,0x00,0x00,0xf4,0x00,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xf7,0x00,0x00,0x00, +0x6f,0x00,0x00,0x00,0x6d,0x00,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xf8,0x00,0x00,0x00,0xf5,0x00,0x00,0x00, +0xf7,0x00,0x00,0x00,0x41,0x00,0x07,0x00,0x06,0x01,0x00,0x00, +0x07,0x01,0x00,0x00,0x04,0x01,0x00,0x00,0x34,0x00,0x00,0x00, +0xef,0x00,0x00,0x00,0x3e,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0xff,0x00,0x00,0x00,0x08,0x01,0x00,0x00,0x07,0x01,0x00,0x00, +0x73,0x00,0x04,0x00,0xc4,0x00,0x00,0x00,0x09,0x01,0x00,0x00, +0x08,0x01,0x00,0x00,0x41,0x00,0x05,0x00,0x0a,0x01,0x00,0x00, +0x0b,0x01,0x00,0x00,0xfd,0x00,0x00,0x00,0xf8,0x00,0x00,0x00, +0x3e,0x00,0x03,0x00,0x0b,0x01,0x00,0x00,0x09,0x01,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x0d,0x01,0x00,0x00, +0xf8,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x41,0x00,0x07,0x00, +0x06,0x01,0x00,0x00,0x0f,0x01,0x00,0x00,0x04,0x01,0x00,0x00, +0x34,0x00,0x00,0x00,0xef,0x00,0x00,0x00,0x39,0x00,0x00,0x00, +0x3d,0x00,0x04,0x00,0xff,0x00,0x00,0x00,0x10,0x01,0x00,0x00, +0x0f,0x01,0x00,0x00,0x73,0x00,0x04,0x00,0xc4,0x00,0x00,0x00, +0x11,0x01,0x00,0x00,0x10,0x01,0x00,0x00,0x41,0x00,0x05,0x00, +0x0a,0x01,0x00,0x00,0x12,0x01,0x00,0x00,0xfd,0x00,0x00,0x00, +0x0d,0x01,0x00,0x00,0x3e,0x00,0x03,0x00,0x12,0x01,0x00,0x00, +0x11,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x14,0x01,0x00,0x00,0xf8,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, +0x41,0x00,0x07,0x00,0x06,0x01,0x00,0x00,0x16,0x01,0x00,0x00, +0x04,0x01,0x00,0x00,0x34,0x00,0x00,0x00,0xef,0x00,0x00,0x00, +0x0c,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0xff,0x00,0x00,0x00, +0x17,0x01,0x00,0x00,0x16,0x01,0x00,0x00,0x73,0x00,0x04,0x00, +0xc4,0x00,0x00,0x00,0x18,0x01,0x00,0x00,0x17,0x01,0x00,0x00, +0x41,0x00,0x05,0x00,0x0a,0x01,0x00,0x00,0x19,0x01,0x00,0x00, +0xfd,0x00,0x00,0x00,0x14,0x01,0x00,0x00,0x3e,0x00,0x03,0x00, +0x19,0x01,0x00,0x00,0x18,0x01,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x1c,0x01,0x00,0x00,0xf8,0x00,0x00,0x00, +0x1b,0x01,0x00,0x00,0x41,0x00,0x07,0x00,0x06,0x01,0x00,0x00, +0x1e,0x01,0x00,0x00,0x04,0x01,0x00,0x00,0x34,0x00,0x00,0x00, +0xef,0x00,0x00,0x00,0x1b,0x01,0x00,0x00,0x3d,0x00,0x04,0x00, +0xff,0x00,0x00,0x00,0x1f,0x01,0x00,0x00,0x1e,0x01,0x00,0x00, +0x73,0x00,0x04,0x00,0xc4,0x00,0x00,0x00,0x20,0x01,0x00,0x00, +0x1f,0x01,0x00,0x00,0x41,0x00,0x05,0x00,0x0a,0x01,0x00,0x00, +0x21,0x01,0x00,0x00,0xfd,0x00,0x00,0x00,0x1c,0x01,0x00,0x00, +0x3e,0x00,0x03,0x00,0x21,0x01,0x00,0x00,0x20,0x01,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x28,0x01,0x00,0x00, +0xb8,0x02,0x00,0x00,0x26,0x01,0x00,0x00,0xf9,0x00,0x02,0x00, +0xdd,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0xdf,0x00,0x00,0x00, +0xf9,0x00,0x02,0x00,0x2a,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, +0x2a,0x01,0x00,0x00,0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00, +0xb9,0x02,0x00,0x00,0x3e,0x00,0x00,0x00,0xdf,0x00,0x00,0x00, +0x6f,0x01,0x00,0x00,0x2b,0x01,0x00,0x00,0xb0,0x00,0x05,0x00, +0xc2,0x00,0x00,0x00,0x30,0x01,0x00,0x00,0xb9,0x02,0x00,0x00, +0xa7,0x00,0x00,0x00,0xf6,0x00,0x04,0x00,0x2c,0x01,0x00,0x00, +0x2b,0x01,0x00,0x00,0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, +0x30,0x01,0x00,0x00,0x2b,0x01,0x00,0x00,0x2c,0x01,0x00,0x00, +0xf8,0x00,0x02,0x00,0x2b,0x01,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x35,0x01,0x00,0x00,0x7e,0x00,0x00,0x00, +0xb9,0x02,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x38,0x01,0x00,0x00,0x35,0x01,0x00,0x00,0xab,0x00,0x00,0x00, +0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x39,0x01,0x00,0x00, +0x38,0x01,0x00,0x00,0x6d,0x00,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x3a,0x01,0x00,0x00,0xc0,0x02,0x00,0x00, +0x39,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x3c,0x01,0x00,0x00,0x3a,0x01,0x00,0x00,0x79,0x00,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x42,0x01,0x00,0x00, +0x35,0x01,0x00,0x00,0x41,0x01,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x44,0x01,0x00,0x00,0x79,0x00,0x00,0x00, +0x6d,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x45,0x01,0x00,0x00,0x42,0x01,0x00,0x00,0x44,0x01,0x00,0x00, +0x41,0x00,0x07,0x00,0x06,0x01,0x00,0x00,0x52,0x01,0x00,0x00, +0x50,0x01,0x00,0x00,0x34,0x00,0x00,0x00,0x3c,0x01,0x00,0x00, +0x3e,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0xff,0x00,0x00,0x00, +0x53,0x01,0x00,0x00,0x52,0x01,0x00,0x00,0x73,0x00,0x04,0x00, +0xc4,0x00,0x00,0x00,0x54,0x01,0x00,0x00,0x53,0x01,0x00,0x00, +0x41,0x00,0x05,0x00,0x0a,0x01,0x00,0x00,0x55,0x01,0x00,0x00, +0x4a,0x01,0x00,0x00,0x45,0x01,0x00,0x00,0x3e,0x00,0x03,0x00, +0x55,0x01,0x00,0x00,0x54,0x01,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x57,0x01,0x00,0x00,0x45,0x01,0x00,0x00, +0x39,0x00,0x00,0x00,0x41,0x00,0x07,0x00,0x06,0x01,0x00,0x00, +0x59,0x01,0x00,0x00,0x50,0x01,0x00,0x00,0x34,0x00,0x00,0x00, +0x3c,0x01,0x00,0x00,0x39,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0xff,0x00,0x00,0x00,0x5a,0x01,0x00,0x00,0x59,0x01,0x00,0x00, +0x73,0x00,0x04,0x00,0xc4,0x00,0x00,0x00,0x5b,0x01,0x00,0x00, +0x5a,0x01,0x00,0x00,0x41,0x00,0x05,0x00,0x0a,0x01,0x00,0x00, +0x5c,0x01,0x00,0x00,0x4a,0x01,0x00,0x00,0x57,0x01,0x00,0x00, +0x3e,0x00,0x03,0x00,0x5c,0x01,0x00,0x00,0x5b,0x01,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x5e,0x01,0x00,0x00, +0x45,0x01,0x00,0x00,0x0c,0x00,0x00,0x00,0x41,0x00,0x07,0x00, +0x06,0x01,0x00,0x00,0x60,0x01,0x00,0x00,0x50,0x01,0x00,0x00, +0x34,0x00,0x00,0x00,0x3c,0x01,0x00,0x00,0x0c,0x00,0x00,0x00, +0x3d,0x00,0x04,0x00,0xff,0x00,0x00,0x00,0x61,0x01,0x00,0x00, +0x60,0x01,0x00,0x00,0x73,0x00,0x04,0x00,0xc4,0x00,0x00,0x00, +0x62,0x01,0x00,0x00,0x61,0x01,0x00,0x00,0x41,0x00,0x05,0x00, +0x0a,0x01,0x00,0x00,0x63,0x01,0x00,0x00,0x4a,0x01,0x00,0x00, +0x5e,0x01,0x00,0x00,0x3e,0x00,0x03,0x00,0x63,0x01,0x00,0x00, +0x62,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x65,0x01,0x00,0x00,0x45,0x01,0x00,0x00,0x1b,0x01,0x00,0x00, +0x41,0x00,0x07,0x00,0x06,0x01,0x00,0x00,0x67,0x01,0x00,0x00, +0x50,0x01,0x00,0x00,0x34,0x00,0x00,0x00,0x3c,0x01,0x00,0x00, +0x1b,0x01,0x00,0x00,0x3d,0x00,0x04,0x00,0xff,0x00,0x00,0x00, +0x68,0x01,0x00,0x00,0x67,0x01,0x00,0x00,0x73,0x00,0x04,0x00, +0xc4,0x00,0x00,0x00,0x69,0x01,0x00,0x00,0x68,0x01,0x00,0x00, +0x41,0x00,0x05,0x00,0x0a,0x01,0x00,0x00,0x6a,0x01,0x00,0x00, +0x4a,0x01,0x00,0x00,0x65,0x01,0x00,0x00,0x3e,0x00,0x03,0x00, +0x6a,0x01,0x00,0x00,0x69,0x01,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x6f,0x01,0x00,0x00,0xb9,0x02,0x00,0x00, +0x6d,0x01,0x00,0x00,0xf9,0x00,0x02,0x00,0x2a,0x01,0x00,0x00, +0xf8,0x00,0x02,0x00,0x2c,0x01,0x00,0x00,0xe0,0x00,0x04,0x00, +0x0c,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x70,0x01,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x73,0x01,0x00,0x00, +0xbc,0x02,0x00,0x00,0x71,0x01,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x76,0x01,0x00,0x00,0xc0,0x02,0x00,0x00, +0x74,0x01,0x00,0x00,0xf9,0x00,0x02,0x00,0x78,0x01,0x00,0x00, +0xf8,0x00,0x02,0x00,0x78,0x01,0x00,0x00,0xf5,0x00,0x07,0x00, +0x06,0x00,0x00,0x00,0xc2,0x02,0x00,0x00,0x3e,0x00,0x00,0x00, +0x2c,0x01,0x00,0x00,0x1f,0x02,0x00,0x00,0x7b,0x01,0x00,0x00, +0xb0,0x00,0x05,0x00,0xc2,0x00,0x00,0x00,0x7e,0x01,0x00,0x00, +0xc2,0x02,0x00,0x00,0x6c,0x00,0x00,0x00,0xf6,0x00,0x04,0x00, +0x7a,0x01,0x00,0x00,0x7b,0x01,0x00,0x00,0x00,0x00,0x00,0x00, +0xfa,0x00,0x04,0x00,0x7e,0x01,0x00,0x00,0x79,0x01,0x00,0x00, +0x7a,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0x79,0x01,0x00,0x00, +0xf9,0x00,0x02,0x00,0x80,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, +0x80,0x01,0x00,0x00,0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00, +0xc6,0x02,0x00,0x00,0x3e,0x00,0x00,0x00,0x79,0x01,0x00,0x00, +0xab,0x01,0x00,0x00,0x83,0x01,0x00,0x00,0xb0,0x00,0x05,0x00, +0xc2,0x00,0x00,0x00,0x86,0x01,0x00,0x00,0xc6,0x02,0x00,0x00, +0x60,0x00,0x00,0x00,0xf6,0x00,0x04,0x00,0x82,0x01,0x00,0x00, +0x83,0x01,0x00,0x00,0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, +0x86,0x01,0x00,0x00,0x81,0x01,0x00,0x00,0x82,0x01,0x00,0x00, +0xf8,0x00,0x02,0x00,0x81,0x01,0x00,0x00,0xf9,0x00,0x02,0x00, +0x88,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0x88,0x01,0x00,0x00, +0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0xd8,0x02,0x00,0x00, +0x3e,0x00,0x00,0x00,0x81,0x01,0x00,0x00,0xa9,0x01,0x00,0x00, +0x89,0x01,0x00,0x00,0xb0,0x00,0x05,0x00,0xc2,0x00,0x00,0x00, +0x8e,0x01,0x00,0x00,0xd8,0x02,0x00,0x00,0x62,0x00,0x00,0x00, +0xf6,0x00,0x04,0x00,0x8a,0x01,0x00,0x00,0x89,0x01,0x00,0x00, +0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0x8e,0x01,0x00,0x00, +0x89,0x01,0x00,0x00,0x8a,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, +0x89,0x01,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x94,0x01,0x00,0x00,0xc6,0x02,0x00,0x00,0x62,0x00,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x96,0x01,0x00,0x00, +0x94,0x01,0x00,0x00,0xd8,0x02,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x98,0x01,0x00,0x00,0x55,0x00,0x00,0x00, +0x53,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x9a,0x01,0x00,0x00,0xc6,0x02,0x00,0x00,0x61,0x00,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x9b,0x01,0x00,0x00, +0x98,0x01,0x00,0x00,0x9a,0x01,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x9d,0x01,0x00,0x00,0x64,0x00,0x00,0x00, +0x62,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x9e,0x01,0x00,0x00,0x9b,0x01,0x00,0x00,0x9d,0x01,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xa0,0x01,0x00,0x00, +0x9e,0x01,0x00,0x00,0xd8,0x02,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xa2,0x01,0x00,0x00,0xa0,0x01,0x00,0x00, +0xa1,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xa4,0x01,0x00,0x00,0xa2,0x01,0x00,0x00,0xc2,0x02,0x00,0x00, +0x41,0x00,0x05,0x00,0x0a,0x01,0x00,0x00,0xa5,0x01,0x00,0x00, +0xfd,0x00,0x00,0x00,0xa4,0x01,0x00,0x00,0x3d,0x00,0x04,0x00, +0xc4,0x00,0x00,0x00,0xa6,0x01,0x00,0x00,0xa5,0x01,0x00,0x00, +0x41,0x00,0x05,0x00,0xcd,0x00,0x00,0x00,0xa7,0x01,0x00,0x00, +0x92,0x01,0x00,0x00,0x96,0x01,0x00,0x00,0x3e,0x00,0x03,0x00, +0xa7,0x01,0x00,0x00,0xa6,0x01,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xa9,0x01,0x00,0x00,0xd8,0x02,0x00,0x00, +0xd0,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0x88,0x01,0x00,0x00, +0xf8,0x00,0x02,0x00,0x8a,0x01,0x00,0x00,0xf9,0x00,0x02,0x00, +0x83,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0x83,0x01,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xab,0x01,0x00,0x00, +0xc6,0x02,0x00,0x00,0xd0,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, +0x80,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0x82,0x01,0x00,0x00, +0xf9,0x00,0x02,0x00,0xad,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, +0xad,0x01,0x00,0x00,0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00, +0xc7,0x02,0x00,0x00,0x3e,0x00,0x00,0x00,0x82,0x01,0x00,0x00, +0xd9,0x01,0x00,0x00,0xb0,0x01,0x00,0x00,0xb0,0x00,0x05,0x00, +0xc2,0x00,0x00,0x00,0xb3,0x01,0x00,0x00,0xc7,0x02,0x00,0x00, +0xbf,0x00,0x00,0x00,0xf6,0x00,0x04,0x00,0xaf,0x01,0x00,0x00, +0xb0,0x01,0x00,0x00,0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, +0xb3,0x01,0x00,0x00,0xae,0x01,0x00,0x00,0xaf,0x01,0x00,0x00, +0xf8,0x00,0x02,0x00,0xae,0x01,0x00,0x00,0xf9,0x00,0x02,0x00, +0xb5,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0xb5,0x01,0x00,0x00, +0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0xd5,0x02,0x00,0x00, +0x3e,0x00,0x00,0x00,0xae,0x01,0x00,0x00,0xd7,0x01,0x00,0x00, +0xb6,0x01,0x00,0x00,0xb0,0x00,0x05,0x00,0xc2,0x00,0x00,0x00, +0xbb,0x01,0x00,0x00,0xd5,0x02,0x00,0x00,0xbc,0x00,0x00,0x00, +0xf6,0x00,0x04,0x00,0xb7,0x01,0x00,0x00,0xb6,0x01,0x00,0x00, +0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0xbb,0x01,0x00,0x00, +0xb6,0x01,0x00,0x00,0xb7,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, +0xb6,0x01,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xc1,0x01,0x00,0x00,0xc7,0x02,0x00,0x00,0xbc,0x00,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xc3,0x01,0x00,0x00, +0xc1,0x01,0x00,0x00,0xd5,0x02,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xc5,0x01,0x00,0x00,0x59,0x00,0x00,0x00, +0xb9,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xc8,0x01,0x00,0x00,0xc7,0x02,0x00,0x00,0xc7,0x01,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xc9,0x01,0x00,0x00, +0xc5,0x01,0x00,0x00,0xc8,0x01,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xcb,0x01,0x00,0x00,0x68,0x00,0x00,0x00, +0xbc,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xcc,0x01,0x00,0x00,0xc9,0x01,0x00,0x00,0xcb,0x01,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xce,0x01,0x00,0x00, +0xcc,0x01,0x00,0x00,0xd5,0x02,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xd0,0x01,0x00,0x00,0xce,0x01,0x00,0x00, +0xcf,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xd2,0x01,0x00,0x00,0xd0,0x01,0x00,0x00,0xc2,0x02,0x00,0x00, +0x41,0x00,0x05,0x00,0x0a,0x01,0x00,0x00,0xd3,0x01,0x00,0x00, +0x4a,0x01,0x00,0x00,0xd2,0x01,0x00,0x00,0x3d,0x00,0x04,0x00, +0xc4,0x00,0x00,0x00,0xd4,0x01,0x00,0x00,0xd3,0x01,0x00,0x00, +0x41,0x00,0x05,0x00,0xcd,0x00,0x00,0x00,0xd5,0x01,0x00,0x00, +0xbf,0x01,0x00,0x00,0xc3,0x01,0x00,0x00,0x3e,0x00,0x03,0x00, +0xd5,0x01,0x00,0x00,0xd4,0x01,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xd7,0x01,0x00,0x00,0xd5,0x02,0x00,0x00, +0xd0,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0xb5,0x01,0x00,0x00, +0xf8,0x00,0x02,0x00,0xb7,0x01,0x00,0x00,0xf9,0x00,0x02,0x00, +0xb0,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0xb0,0x01,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xd9,0x01,0x00,0x00, +0xc7,0x02,0x00,0x00,0xd0,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, +0xad,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0xaf,0x01,0x00,0x00, +0xf9,0x00,0x02,0x00,0xdb,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, +0xdb,0x01,0x00,0x00,0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00, +0xc8,0x02,0x00,0x00,0x3e,0x00,0x00,0x00,0xaf,0x01,0x00,0x00, +0x1d,0x02,0x00,0x00,0xde,0x01,0x00,0x00,0xb0,0x00,0x05,0x00, +0xc2,0x00,0x00,0x00,0xe1,0x01,0x00,0x00,0xc8,0x02,0x00,0x00, +0xbf,0x00,0x00,0x00,0xf6,0x00,0x04,0x00,0xdd,0x01,0x00,0x00, +0xde,0x01,0x00,0x00,0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, +0xe1,0x01,0x00,0x00,0xdc,0x01,0x00,0x00,0xdd,0x01,0x00,0x00, +0xf8,0x00,0x02,0x00,0xdc,0x01,0x00,0x00,0xf9,0x00,0x02,0x00, +0xe3,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0xe3,0x01,0x00,0x00, +0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0xcc,0x02,0x00,0x00, +0x3e,0x00,0x00,0x00,0xdc,0x01,0x00,0x00,0x1b,0x02,0x00,0x00, +0xe6,0x01,0x00,0x00,0xb0,0x00,0x05,0x00,0xc2,0x00,0x00,0x00, +0xe9,0x01,0x00,0x00,0xcc,0x02,0x00,0x00,0x60,0x00,0x00,0x00, +0xf6,0x00,0x04,0x00,0xe5,0x01,0x00,0x00,0xe6,0x01,0x00,0x00, +0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0xe9,0x01,0x00,0x00, +0xe4,0x01,0x00,0x00,0xe5,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, +0xe4,0x01,0x00,0x00,0xf9,0x00,0x02,0x00,0xeb,0x01,0x00,0x00, +0xf8,0x00,0x02,0x00,0xeb,0x01,0x00,0x00,0xf5,0x00,0x07,0x00, +0x06,0x00,0x00,0x00,0xce,0x02,0x00,0x00,0x3e,0x00,0x00,0x00, +0xe4,0x01,0x00,0x00,0x19,0x02,0x00,0x00,0xee,0x01,0x00,0x00, +0xb0,0x00,0x05,0x00,0xc2,0x00,0x00,0x00,0xf1,0x01,0x00,0x00, +0xce,0x02,0x00,0x00,0xbc,0x00,0x00,0x00,0xf6,0x00,0x04,0x00, +0xed,0x01,0x00,0x00,0xee,0x01,0x00,0x00,0x01,0x00,0x00,0x00, +0xfa,0x00,0x04,0x00,0xf1,0x01,0x00,0x00,0xec,0x01,0x00,0x00, +0xed,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0xec,0x01,0x00,0x00, +0xf9,0x00,0x02,0x00,0xf3,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, +0xf3,0x01,0x00,0x00,0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00, +0xd0,0x02,0x00,0x00,0x3e,0x00,0x00,0x00,0xec,0x01,0x00,0x00, +0x17,0x02,0x00,0x00,0xf4,0x01,0x00,0x00,0xb0,0x00,0x05,0x00, +0xc2,0x00,0x00,0x00,0xf9,0x01,0x00,0x00,0xd0,0x02,0x00,0x00, +0x62,0x00,0x00,0x00,0xf6,0x00,0x04,0x00,0xf5,0x01,0x00,0x00, +0xf4,0x01,0x00,0x00,0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, +0xf9,0x01,0x00,0x00,0xf4,0x01,0x00,0x00,0xf5,0x01,0x00,0x00, +0xf8,0x00,0x02,0x00,0xf4,0x01,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xfb,0x01,0x00,0x00,0xc8,0x02,0x00,0x00, +0xbc,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xfd,0x01,0x00,0x00,0xfb,0x01,0x00,0x00,0xce,0x02,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xff,0x01,0x00,0x00, +0xfd,0x01,0x00,0x00,0xfe,0x01,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x01,0x02,0x00,0x00,0xcc,0x02,0x00,0x00, +0x62,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x02,0x02,0x00,0x00,0xff,0x01,0x00,0x00,0x01,0x02,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x04,0x02,0x00,0x00, +0x02,0x02,0x00,0x00,0xd0,0x02,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x08,0x02,0x00,0x00,0x01,0x02,0x00,0x00, +0xd0,0x02,0x00,0x00,0x41,0x00,0x05,0x00,0xcd,0x00,0x00,0x00, +0x09,0x02,0x00,0x00,0x92,0x01,0x00,0x00,0x08,0x02,0x00,0x00, +0x3d,0x00,0x04,0x00,0xc4,0x00,0x00,0x00,0x0a,0x02,0x00,0x00, +0x09,0x02,0x00,0x00,0x41,0x00,0x05,0x00,0xcd,0x00,0x00,0x00, +0x0f,0x02,0x00,0x00,0xbf,0x01,0x00,0x00,0xfd,0x01,0x00,0x00, +0x3d,0x00,0x04,0x00,0xc4,0x00,0x00,0x00,0x10,0x02,0x00,0x00, +0x0f,0x02,0x00,0x00,0x41,0x00,0x05,0x00,0xcd,0x00,0x00,0x00, +0x12,0x02,0x00,0x00,0xca,0x00,0x00,0x00,0x04,0x02,0x00,0x00, +0x3d,0x00,0x04,0x00,0xc4,0x00,0x00,0x00,0x13,0x02,0x00,0x00, +0x12,0x02,0x00,0x00,0x0c,0x00,0x08,0x00,0xc4,0x00,0x00,0x00, +0x14,0x02,0x00,0x00,0x01,0x00,0x00,0x00,0x32,0x00,0x00,0x00, +0x0a,0x02,0x00,0x00,0x10,0x02,0x00,0x00,0x13,0x02,0x00,0x00, +0x3e,0x00,0x03,0x00,0x12,0x02,0x00,0x00,0x14,0x02,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x17,0x02,0x00,0x00, +0xd0,0x02,0x00,0x00,0xd0,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, +0xf3,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0xf5,0x01,0x00,0x00, +0xf9,0x00,0x02,0x00,0xee,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, +0xee,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x19,0x02,0x00,0x00,0xce,0x02,0x00,0x00,0xd0,0x00,0x00,0x00, +0xf9,0x00,0x02,0x00,0xeb,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, +0xed,0x01,0x00,0x00,0xf9,0x00,0x02,0x00,0xe6,0x01,0x00,0x00, +0xf8,0x00,0x02,0x00,0xe6,0x01,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x1b,0x02,0x00,0x00,0xcc,0x02,0x00,0x00, +0xd0,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0xe3,0x01,0x00,0x00, +0xf8,0x00,0x02,0x00,0xe5,0x01,0x00,0x00,0xf9,0x00,0x02,0x00, +0xde,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0xde,0x01,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x1d,0x02,0x00,0x00, +0xc8,0x02,0x00,0x00,0xd0,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, +0xdb,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0xdd,0x01,0x00,0x00, +0xf9,0x00,0x02,0x00,0x7b,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, +0x7b,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x1f,0x02,0x00,0x00,0xc2,0x02,0x00,0x00,0xd0,0x00,0x00,0x00, +0xf9,0x00,0x02,0x00,0x78,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, +0x7a,0x01,0x00,0x00,0xe0,0x00,0x04,0x00,0x0c,0x00,0x00,0x00, +0x0c,0x00,0x00,0x00,0x70,0x01,0x00,0x00,0xf9,0x00,0x02,0x00, +0xd7,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0xd7,0x00,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x21,0x02,0x00,0x00, +0xa8,0x02,0x00,0x00,0x6c,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, +0xd4,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0xd6,0x00,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x26,0x02,0x00,0x00, +0x55,0x00,0x00,0x00,0x53,0x00,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x27,0x02,0x00,0x00,0x96,0x00,0x00,0x00, +0x26,0x02,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x2c,0x02,0x00,0x00,0x59,0x00,0x00,0x00,0xb9,0x00,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x2d,0x02,0x00,0x00, +0xa8,0x00,0x00,0x00,0x2c,0x02,0x00,0x00,0x41,0x00,0x05,0x00, +0x15,0x00,0x00,0x00,0x32,0x02,0x00,0x00,0x12,0x00,0x00,0x00, +0x31,0x02,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x33,0x02,0x00,0x00,0x32,0x02,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x34,0x02,0x00,0x00,0x0f,0x00,0x00,0x00, +0x33,0x02,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x38,0x02,0x00,0x00,0x47,0x00,0x00,0x00,0x33,0x02,0x00,0x00, +0x41,0x00,0x05,0x00,0x0d,0x00,0x00,0x00,0x3a,0x02,0x00,0x00, +0x39,0x02,0x00,0x00,0x0c,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x3b,0x02,0x00,0x00,0x3a,0x02,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x3c,0x02,0x00,0x00, +0x38,0x02,0x00,0x00,0x3b,0x02,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x3d,0x02,0x00,0x00,0x34,0x02,0x00,0x00, +0x3c,0x02,0x00,0x00,0xf9,0x00,0x02,0x00,0x3f,0x02,0x00,0x00, +0xf8,0x00,0x02,0x00,0x3f,0x02,0x00,0x00,0xf5,0x00,0x07,0x00, +0x06,0x00,0x00,0x00,0xa9,0x02,0x00,0x00,0x3e,0x00,0x00,0x00, +0xd6,0x00,0x00,0x00,0xa6,0x02,0x00,0x00,0x42,0x02,0x00,0x00, +0xb0,0x00,0x05,0x00,0xc2,0x00,0x00,0x00,0x45,0x02,0x00,0x00, +0xa9,0x02,0x00,0x00,0xbf,0x00,0x00,0x00,0xf6,0x00,0x04,0x00, +0x41,0x02,0x00,0x00,0x42,0x02,0x00,0x00,0x01,0x00,0x00,0x00, +0xfa,0x00,0x04,0x00,0x45,0x02,0x00,0x00,0x40,0x02,0x00,0x00, +0x41,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x40,0x02,0x00,0x00, +0xf9,0x00,0x02,0x00,0x47,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, +0x47,0x02,0x00,0x00,0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00, +0xaa,0x02,0x00,0x00,0x3e,0x00,0x00,0x00,0x40,0x02,0x00,0x00, +0xa4,0x02,0x00,0x00,0x4a,0x02,0x00,0x00,0xb0,0x00,0x05,0x00, +0xc2,0x00,0x00,0x00,0x4d,0x02,0x00,0x00,0xaa,0x02,0x00,0x00, +0x60,0x00,0x00,0x00,0xf6,0x00,0x04,0x00,0x49,0x02,0x00,0x00, +0x4a,0x02,0x00,0x00,0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, +0x4d,0x02,0x00,0x00,0x48,0x02,0x00,0x00,0x49,0x02,0x00,0x00, +0xf8,0x00,0x02,0x00,0x48,0x02,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x51,0x02,0x00,0x00,0xaa,0x02,0x00,0x00, +0x61,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x52,0x02,0x00,0x00,0x27,0x02,0x00,0x00,0x51,0x02,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x54,0x02,0x00,0x00, +0x64,0x00,0x00,0x00,0x62,0x00,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x55,0x02,0x00,0x00,0x52,0x02,0x00,0x00, +0x54,0x02,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x59,0x02,0x00,0x00,0xa9,0x02,0x00,0x00,0xc7,0x01,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x5a,0x02,0x00,0x00, +0x2d,0x02,0x00,0x00,0x59,0x02,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x5c,0x02,0x00,0x00,0x68,0x00,0x00,0x00, +0xbc,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x5d,0x02,0x00,0x00,0x5a,0x02,0x00,0x00,0x5c,0x02,0x00,0x00, +0xf9,0x00,0x02,0x00,0x5f,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, +0x5f,0x02,0x00,0x00,0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00, +0xac,0x02,0x00,0x00,0x3e,0x00,0x00,0x00,0x48,0x02,0x00,0x00, +0xa2,0x02,0x00,0x00,0x62,0x02,0x00,0x00,0xb0,0x00,0x05,0x00, +0xc2,0x00,0x00,0x00,0x65,0x02,0x00,0x00,0xac,0x02,0x00,0x00, +0xbc,0x00,0x00,0x00,0xf6,0x00,0x04,0x00,0x61,0x02,0x00,0x00, +0x62,0x02,0x00,0x00,0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, +0x65,0x02,0x00,0x00,0x60,0x02,0x00,0x00,0x61,0x02,0x00,0x00, +0xf8,0x00,0x02,0x00,0x60,0x02,0x00,0x00,0xf9,0x00,0x02,0x00, +0x67,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x67,0x02,0x00,0x00, +0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0xae,0x02,0x00,0x00, +0x3e,0x00,0x00,0x00,0x60,0x02,0x00,0x00,0xa0,0x02,0x00,0x00, +0x6a,0x02,0x00,0x00,0xb0,0x00,0x05,0x00,0xc2,0x00,0x00,0x00, +0x6d,0x02,0x00,0x00,0xae,0x02,0x00,0x00,0x62,0x00,0x00,0x00, +0xf6,0x00,0x04,0x00,0x69,0x02,0x00,0x00,0x6a,0x02,0x00,0x00, +0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0x6d,0x02,0x00,0x00, +0x68,0x02,0x00,0x00,0x69,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, +0x68,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x70,0x02,0x00,0x00,0x55,0x02,0x00,0x00,0xae,0x02,0x00,0x00, +0xb0,0x00,0x05,0x00,0xc2,0x00,0x00,0x00,0x73,0x02,0x00,0x00, +0x70,0x02,0x00,0x00,0x36,0x00,0x00,0x00,0xf7,0x00,0x03,0x00, +0x75,0x02,0x00,0x00,0x00,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, +0x73,0x02,0x00,0x00,0x74,0x02,0x00,0x00,0x75,0x02,0x00,0x00, +0xf8,0x00,0x02,0x00,0x74,0x02,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x78,0x02,0x00,0x00,0x5d,0x02,0x00,0x00, +0xac,0x02,0x00,0x00,0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00, +0x79,0x02,0x00,0x00,0x12,0x00,0x00,0x00,0xd0,0x00,0x00,0x00, +0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x7a,0x02,0x00,0x00, +0x79,0x02,0x00,0x00,0xb0,0x00,0x05,0x00,0xc2,0x00,0x00,0x00, +0x7b,0x02,0x00,0x00,0x78,0x02,0x00,0x00,0x7a,0x02,0x00,0x00, +0xf9,0x00,0x02,0x00,0x75,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, +0x75,0x02,0x00,0x00,0xf5,0x00,0x07,0x00,0xc2,0x00,0x00,0x00, +0x7c,0x02,0x00,0x00,0x73,0x02,0x00,0x00,0x68,0x02,0x00,0x00, +0x7b,0x02,0x00,0x00,0x74,0x02,0x00,0x00,0xf7,0x00,0x03,0x00, +0x7e,0x02,0x00,0x00,0x00,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, +0x7c,0x02,0x00,0x00,0x7d,0x02,0x00,0x00,0x7e,0x02,0x00,0x00, +0xf8,0x00,0x02,0x00,0x7d,0x02,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x86,0x02,0x00,0x00,0x5d,0x02,0x00,0x00, +0xac,0x02,0x00,0x00,0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00, +0x88,0x02,0x00,0x00,0x12,0x00,0x00,0x00,0x87,0x02,0x00,0x00, +0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x89,0x02,0x00,0x00, +0x88,0x02,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x8a,0x02,0x00,0x00,0x86,0x02,0x00,0x00,0x89,0x02,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x8b,0x02,0x00,0x00, +0x3d,0x02,0x00,0x00,0x8a,0x02,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x8d,0x02,0x00,0x00,0x8b,0x02,0x00,0x00, +0x55,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x8f,0x02,0x00,0x00,0x8d,0x02,0x00,0x00,0xae,0x02,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x91,0x02,0x00,0x00, +0xa9,0x02,0x00,0x00,0xbc,0x00,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x93,0x02,0x00,0x00,0x91,0x02,0x00,0x00, +0xac,0x02,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x95,0x02,0x00,0x00,0x93,0x02,0x00,0x00,0x94,0x02,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x97,0x02,0x00,0x00, +0xaa,0x02,0x00,0x00,0x62,0x00,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x98,0x02,0x00,0x00,0x95,0x02,0x00,0x00, +0x97,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x9a,0x02,0x00,0x00,0x98,0x02,0x00,0x00,0xae,0x02,0x00,0x00, +0x41,0x00,0x05,0x00,0xcd,0x00,0x00,0x00,0x9b,0x02,0x00,0x00, +0xca,0x00,0x00,0x00,0x9a,0x02,0x00,0x00,0x3d,0x00,0x04,0x00, +0xc4,0x00,0x00,0x00,0x9c,0x02,0x00,0x00,0x9b,0x02,0x00,0x00, +0x41,0x00,0x06,0x00,0x9d,0x02,0x00,0x00,0x9e,0x02,0x00,0x00, +0x82,0x02,0x00,0x00,0x34,0x00,0x00,0x00,0x8f,0x02,0x00,0x00, +0x3e,0x00,0x03,0x00,0x9e,0x02,0x00,0x00,0x9c,0x02,0x00,0x00, +0xf9,0x00,0x02,0x00,0x7e,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, +0x7e,0x02,0x00,0x00,0xf9,0x00,0x02,0x00,0x6a,0x02,0x00,0x00, +0xf8,0x00,0x02,0x00,0x6a,0x02,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xa0,0x02,0x00,0x00,0xae,0x02,0x00,0x00, +0xd0,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0x67,0x02,0x00,0x00, +0xf8,0x00,0x02,0x00,0x69,0x02,0x00,0x00,0xf9,0x00,0x02,0x00, +0x62,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x62,0x02,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xa2,0x02,0x00,0x00, +0xac,0x02,0x00,0x00,0xd0,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, +0x5f,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x61,0x02,0x00,0x00, +0xf9,0x00,0x02,0x00,0x4a,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, +0x4a,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xa4,0x02,0x00,0x00,0xaa,0x02,0x00,0x00,0xd0,0x00,0x00,0x00, +0xf9,0x00,0x02,0x00,0x47,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, +0x49,0x02,0x00,0x00,0xf9,0x00,0x02,0x00,0x42,0x02,0x00,0x00, +0xf8,0x00,0x02,0x00,0x42,0x02,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xa6,0x02,0x00,0x00,0xa9,0x02,0x00,0x00, +0xd0,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0x3f,0x02,0x00,0x00, +0xf8,0x00,0x02,0x00,0x41,0x02,0x00,0x00,0xfd,0x00,0x01,0x00, +0x38,0x00,0x01,0x00, +}; +const uint64_t matmul_f16_aligned_fp32_len = 10288; + +unsigned char matmul_f16_f32_data[] = { +0x03,0x02,0x23,0x07,0x00,0x05,0x01,0x00,0x0b,0x00,0x0d,0x00, +0xda,0x02,0x00,0x00,0x00,0x00,0x00,0x00,0x11,0x00,0x02,0x00, +0x01,0x00,0x00,0x00,0x11,0x00,0x02,0x00,0x09,0x00,0x00,0x00, +0x11,0x00,0x02,0x00,0x51,0x11,0x00,0x00,0x0b,0x00,0x06,0x00, +0x01,0x00,0x00,0x00,0x47,0x4c,0x53,0x4c,0x2e,0x73,0x74,0x64, +0x2e,0x34,0x35,0x30,0x00,0x00,0x00,0x00,0x0e,0x00,0x03,0x00, +0x00,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x0f,0x00,0x0f,0x00, +0x05,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x6d,0x61,0x69,0x6e, +0x00,0x00,0x00,0x00,0x0b,0x00,0x00,0x00,0x12,0x00,0x00,0x00, +0x3d,0x00,0x00,0x00,0x4c,0x00,0x00,0x00,0xfb,0x00,0x00,0x00, +0x06,0x01,0x00,0x00,0x46,0x01,0x00,0x00,0x51,0x01,0x00,0x00, +0x3b,0x02,0x00,0x00,0x84,0x02,0x00,0x00,0x10,0x00,0x06,0x00, +0x04,0x00,0x00,0x00,0x11,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x47,0x00,0x04,0x00, +0x0b,0x00,0x00,0x00,0x0b,0x00,0x00,0x00,0x1c,0x00,0x00,0x00, +0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x48,0x00,0x05,0x00, +0x10,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x23,0x00,0x00,0x00, +0x04,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00, +0x02,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x08,0x00,0x00,0x00, +0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00,0x03,0x00,0x00,0x00, +0x23,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x48,0x00,0x05,0x00, +0x10,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x23,0x00,0x00,0x00, +0x10,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00, +0x05,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x14,0x00,0x00,0x00, +0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00,0x06,0x00,0x00,0x00, +0x23,0x00,0x00,0x00,0x18,0x00,0x00,0x00,0x48,0x00,0x05,0x00, +0x10,0x00,0x00,0x00,0x07,0x00,0x00,0x00,0x23,0x00,0x00,0x00, +0x1c,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00, +0x08,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x20,0x00,0x00,0x00, +0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00,0x09,0x00,0x00,0x00, +0x23,0x00,0x00,0x00,0x24,0x00,0x00,0x00,0x48,0x00,0x05,0x00, +0x10,0x00,0x00,0x00,0x0a,0x00,0x00,0x00,0x23,0x00,0x00,0x00, +0x28,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00, +0x0b,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x2c,0x00,0x00,0x00, +0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, +0x23,0x00,0x00,0x00,0x30,0x00,0x00,0x00,0x48,0x00,0x05,0x00, +0x10,0x00,0x00,0x00,0x0d,0x00,0x00,0x00,0x23,0x00,0x00,0x00, +0x34,0x00,0x00,0x00,0x47,0x00,0x03,0x00,0x10,0x00,0x00,0x00, +0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x37,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x47,0x00,0x04,0x00, +0x3d,0x00,0x00,0x00,0x0b,0x00,0x00,0x00,0x1a,0x00,0x00,0x00, +0x47,0x00,0x04,0x00,0x4c,0x00,0x00,0x00,0x0b,0x00,0x00,0x00, +0x1b,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x4f,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0x09,0x00,0x00,0x00,0x47,0x00,0x04,0x00, +0x53,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x04,0x00,0x00,0x00, +0x47,0x00,0x04,0x00,0x60,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0x06,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x62,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0x07,0x00,0x00,0x00,0x47,0x00,0x04,0x00, +0x6c,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x03,0x00,0x00,0x00, +0x47,0x00,0x04,0x00,0xa6,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0xb8,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0x05,0x00,0x00,0x00,0x47,0x00,0x04,0x00, +0xbb,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x08,0x00,0x00,0x00, +0x47,0x00,0x04,0x00,0x03,0x01,0x00,0x00,0x06,0x00,0x00,0x00, +0x02,0x00,0x00,0x00,0x48,0x00,0x04,0x00,0x04,0x01,0x00,0x00, +0x00,0x00,0x00,0x00,0x18,0x00,0x00,0x00,0x48,0x00,0x05,0x00, +0x04,0x01,0x00,0x00,0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00, +0x00,0x00,0x00,0x00,0x47,0x00,0x03,0x00,0x04,0x01,0x00,0x00, +0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x06,0x01,0x00,0x00, +0x22,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00, +0x06,0x01,0x00,0x00,0x21,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0x47,0x00,0x04,0x00,0x20,0x01,0x00,0x00,0x01,0x00,0x00,0x00, +0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x21,0x01,0x00,0x00, +0x0b,0x00,0x00,0x00,0x19,0x00,0x00,0x00,0x47,0x00,0x04,0x00, +0x4e,0x01,0x00,0x00,0x06,0x00,0x00,0x00,0x04,0x00,0x00,0x00, +0x48,0x00,0x04,0x00,0x4f,0x01,0x00,0x00,0x00,0x00,0x00,0x00, +0x18,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x4f,0x01,0x00,0x00, +0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0x47,0x00,0x03,0x00,0x4f,0x01,0x00,0x00,0x02,0x00,0x00,0x00, +0x47,0x00,0x04,0x00,0x51,0x01,0x00,0x00,0x22,0x00,0x00,0x00, +0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x51,0x01,0x00,0x00, +0x21,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x47,0x00,0x04,0x00, +0x3b,0x02,0x00,0x00,0x0b,0x00,0x00,0x00,0x18,0x00,0x00,0x00, +0x47,0x00,0x04,0x00,0x81,0x02,0x00,0x00,0x06,0x00,0x00,0x00, +0x04,0x00,0x00,0x00,0x48,0x00,0x04,0x00,0x82,0x02,0x00,0x00, +0x00,0x00,0x00,0x00,0x19,0x00,0x00,0x00,0x48,0x00,0x05,0x00, +0x82,0x02,0x00,0x00,0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00, +0x00,0x00,0x00,0x00,0x47,0x00,0x03,0x00,0x82,0x02,0x00,0x00, +0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x84,0x02,0x00,0x00, +0x22,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00, +0x84,0x02,0x00,0x00,0x21,0x00,0x00,0x00,0x02,0x00,0x00,0x00, +0x13,0x00,0x02,0x00,0x02,0x00,0x00,0x00,0x21,0x00,0x03,0x00, +0x03,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x15,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x20,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0x17,0x00,0x04,0x00,0x09,0x00,0x00,0x00,0x06,0x00,0x00,0x00, +0x03,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x0a,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0x09,0x00,0x00,0x00,0x3b,0x00,0x04,0x00, +0x0a,0x00,0x00,0x00,0x0b,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, +0x02,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x0d,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x1e,0x00,0x10,0x00, +0x10,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00, +0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00, +0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00, +0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00, +0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00, +0x20,0x00,0x04,0x00,0x11,0x00,0x00,0x00,0x09,0x00,0x00,0x00, +0x10,0x00,0x00,0x00,0x3b,0x00,0x04,0x00,0x11,0x00,0x00,0x00, +0x12,0x00,0x00,0x00,0x09,0x00,0x00,0x00,0x15,0x00,0x04,0x00, +0x13,0x00,0x00,0x00,0x20,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00,0x14,0x00,0x00,0x00, +0x08,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x15,0x00,0x00,0x00, +0x09,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x13,0x00,0x00,0x00,0x21,0x00,0x00,0x00,0x0a,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00,0x27,0x00,0x00,0x00, +0x09,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00, +0x2d,0x00,0x00,0x00,0x07,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x13,0x00,0x00,0x00,0x34,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0x32,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x37,0x00,0x00,0x00, +0x40,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x39,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x3b,0x00,0x04,0x00, +0x0a,0x00,0x00,0x00,0x3d,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x3e,0x00,0x00,0x00, +0x00,0x00,0x00,0x00,0x3b,0x00,0x04,0x00,0x0a,0x00,0x00,0x00, +0x4c,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x32,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x4f,0x00,0x00,0x00,0x20,0x00,0x00,0x00, +0x32,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x53,0x00,0x00,0x00, +0x20,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, +0x54,0x00,0x00,0x00,0x86,0x00,0x00,0x00,0x37,0x00,0x00,0x00, +0x53,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, +0x58,0x00,0x00,0x00,0x86,0x00,0x00,0x00,0x37,0x00,0x00,0x00, +0x53,0x00,0x00,0x00,0x32,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x60,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x34,0x00,0x06,0x00, +0x06,0x00,0x00,0x00,0x61,0x00,0x00,0x00,0x86,0x00,0x00,0x00, +0x53,0x00,0x00,0x00,0x60,0x00,0x00,0x00,0x32,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x62,0x00,0x00,0x00,0x04,0x00,0x00,0x00, +0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x63,0x00,0x00,0x00, +0x86,0x00,0x00,0x00,0x61,0x00,0x00,0x00,0x62,0x00,0x00,0x00, +0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x67,0x00,0x00,0x00, +0x86,0x00,0x00,0x00,0x61,0x00,0x00,0x00,0x62,0x00,0x00,0x00, +0x32,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x6c,0x00,0x00,0x00, +0x10,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, +0x6d,0x00,0x00,0x00,0x86,0x00,0x00,0x00,0x6c,0x00,0x00,0x00, +0x39,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, +0x72,0x00,0x00,0x00,0x86,0x00,0x00,0x00,0x6c,0x00,0x00,0x00, +0x39,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, +0x77,0x00,0x00,0x00,0x86,0x00,0x00,0x00,0x6c,0x00,0x00,0x00, +0x39,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, +0x7c,0x00,0x00,0x00,0x86,0x00,0x00,0x00,0x6c,0x00,0x00,0x00, +0x39,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00, +0x80,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x13,0x00,0x00,0x00,0x85,0x00,0x00,0x00,0x02,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00,0x90,0x00,0x00,0x00, +0x0b,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00, +0x96,0x00,0x00,0x00,0x03,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x13,0x00,0x00,0x00,0xa1,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, +0x32,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0xa6,0x00,0x00,0x00, +0x40,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00, +0xa8,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x34,0x00,0x06,0x00, +0x06,0x00,0x00,0x00,0xb7,0x00,0x00,0x00,0x84,0x00,0x00,0x00, +0x60,0x00,0x00,0x00,0x62,0x00,0x00,0x00,0x32,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0xb8,0x00,0x00,0x00,0x20,0x00,0x00,0x00, +0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0xb9,0x00,0x00,0x00, +0x84,0x00,0x00,0x00,0x53,0x00,0x00,0x00,0xb8,0x00,0x00,0x00, +0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0xba,0x00,0x00,0x00, +0x84,0x00,0x00,0x00,0x4f,0x00,0x00,0x00,0x62,0x00,0x00,0x00, +0x32,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0xbb,0x00,0x00,0x00, +0x02,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, +0xbc,0x00,0x00,0x00,0x84,0x00,0x00,0x00,0xba,0x00,0x00,0x00, +0xbb,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, +0xbd,0x00,0x00,0x00,0x84,0x00,0x00,0x00,0xbc,0x00,0x00,0x00, +0x60,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, +0xbe,0x00,0x00,0x00,0x86,0x00,0x00,0x00,0xb9,0x00,0x00,0x00, +0xbd,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, +0xbf,0x00,0x00,0x00,0x84,0x00,0x00,0x00,0xb7,0x00,0x00,0x00, +0xbe,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, +0xc0,0x00,0x00,0x00,0x84,0x00,0x00,0x00,0xbf,0x00,0x00,0x00, +0xbb,0x00,0x00,0x00,0x14,0x00,0x02,0x00,0xc1,0x00,0x00,0x00, +0x16,0x00,0x03,0x00,0xc3,0x00,0x00,0x00,0x20,0x00,0x00,0x00, +0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0xc4,0x00,0x00,0x00, +0x84,0x00,0x00,0x00,0x60,0x00,0x00,0x00,0x62,0x00,0x00,0x00, +0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0xc5,0x00,0x00,0x00, +0x84,0x00,0x00,0x00,0xc4,0x00,0x00,0x00,0xbe,0x00,0x00,0x00, +0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0xc6,0x00,0x00,0x00, +0x84,0x00,0x00,0x00,0xc5,0x00,0x00,0x00,0xbb,0x00,0x00,0x00, +0x1c,0x00,0x04,0x00,0xc7,0x00,0x00,0x00,0xc3,0x00,0x00,0x00, +0xc6,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0xc8,0x00,0x00,0x00, +0x07,0x00,0x00,0x00,0xc7,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0xc3,0x00,0x00,0x00,0xcb,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0x20,0x00,0x04,0x00,0xcc,0x00,0x00,0x00,0x07,0x00,0x00,0x00, +0xc3,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00, +0xcf,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x16,0x00,0x03,0x00, +0xf6,0x00,0x00,0x00,0x10,0x00,0x00,0x00,0x34,0x00,0x06,0x00, +0x06,0x00,0x00,0x00,0xf7,0x00,0x00,0x00,0x80,0x00,0x00,0x00, +0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x34,0x00,0x06,0x00, +0x06,0x00,0x00,0x00,0xf8,0x00,0x00,0x00,0x84,0x00,0x00,0x00, +0x37,0x00,0x00,0x00,0xf7,0x00,0x00,0x00,0x1c,0x00,0x04,0x00, +0xf9,0x00,0x00,0x00,0xf6,0x00,0x00,0x00,0xf8,0x00,0x00,0x00, +0x20,0x00,0x04,0x00,0xfa,0x00,0x00,0x00,0x04,0x00,0x00,0x00, +0xf9,0x00,0x00,0x00,0x3b,0x00,0x04,0x00,0xfa,0x00,0x00,0x00, +0xfb,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x34,0x00,0x06,0x00, +0x06,0x00,0x00,0x00,0xff,0x00,0x00,0x00,0x80,0x00,0x00,0x00, +0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x1d,0x00,0x03,0x00, +0x03,0x01,0x00,0x00,0xf6,0x00,0x00,0x00,0x1e,0x00,0x03,0x00, +0x04,0x01,0x00,0x00,0x03,0x01,0x00,0x00,0x20,0x00,0x04,0x00, +0x05,0x01,0x00,0x00,0x0c,0x00,0x00,0x00,0x04,0x01,0x00,0x00, +0x3b,0x00,0x04,0x00,0x05,0x01,0x00,0x00,0x06,0x01,0x00,0x00, +0x0c,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x11,0x01,0x00,0x00, +0x0c,0x00,0x00,0x00,0xf6,0x00,0x00,0x00,0x20,0x00,0x04,0x00, +0x14,0x01,0x00,0x00,0x04,0x00,0x00,0x00,0xf6,0x00,0x00,0x00, +0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x1a,0x01,0x00,0x00, +0x80,0x00,0x00,0x00,0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0xf6,0x00,0x00,0x00,0x1e,0x01,0x00,0x00, +0x00,0x00,0x00,0x00,0x32,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x20,0x01,0x00,0x00,0x01,0x00,0x00,0x00,0x33,0x00,0x06,0x00, +0x09,0x00,0x00,0x00,0x21,0x01,0x00,0x00,0x20,0x01,0x00,0x00, +0x39,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x34,0x00,0x06,0x00, +0x06,0x00,0x00,0x00,0x22,0x01,0x00,0x00,0x51,0x00,0x00,0x00, +0x21,0x01,0x00,0x00,0x00,0x00,0x00,0x00,0x34,0x00,0x06,0x00, +0x06,0x00,0x00,0x00,0x23,0x01,0x00,0x00,0x84,0x00,0x00,0x00, +0x22,0x01,0x00,0x00,0x39,0x00,0x00,0x00,0x34,0x00,0x06,0x00, +0x06,0x00,0x00,0x00,0x24,0x01,0x00,0x00,0x86,0x00,0x00,0x00, +0x23,0x01,0x00,0x00,0x6c,0x00,0x00,0x00,0x34,0x00,0x06,0x00, +0x06,0x00,0x00,0x00,0x42,0x01,0x00,0x00,0x80,0x00,0x00,0x00, +0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x34,0x00,0x06,0x00, +0x06,0x00,0x00,0x00,0x43,0x01,0x00,0x00,0x84,0x00,0x00,0x00, +0xa6,0x00,0x00,0x00,0x42,0x01,0x00,0x00,0x1c,0x00,0x04,0x00, +0x44,0x01,0x00,0x00,0xf6,0x00,0x00,0x00,0x43,0x01,0x00,0x00, +0x20,0x00,0x04,0x00,0x45,0x01,0x00,0x00,0x04,0x00,0x00,0x00, +0x44,0x01,0x00,0x00,0x3b,0x00,0x04,0x00,0x45,0x01,0x00,0x00, +0x46,0x01,0x00,0x00,0x04,0x00,0x00,0x00,0x34,0x00,0x06,0x00, +0x06,0x00,0x00,0x00,0x4a,0x01,0x00,0x00,0x80,0x00,0x00,0x00, +0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x1d,0x00,0x03,0x00, +0x4e,0x01,0x00,0x00,0xc3,0x00,0x00,0x00,0x1e,0x00,0x03,0x00, +0x4f,0x01,0x00,0x00,0x4e,0x01,0x00,0x00,0x20,0x00,0x04,0x00, +0x50,0x01,0x00,0x00,0x0c,0x00,0x00,0x00,0x4f,0x01,0x00,0x00, +0x3b,0x00,0x04,0x00,0x50,0x01,0x00,0x00,0x51,0x01,0x00,0x00, +0x0c,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x5c,0x01,0x00,0x00, +0x0c,0x00,0x00,0x00,0xc3,0x00,0x00,0x00,0x34,0x00,0x06,0x00, +0x06,0x00,0x00,0x00,0x65,0x01,0x00,0x00,0x80,0x00,0x00,0x00, +0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x34,0x00,0x06,0x00, +0x06,0x00,0x00,0x00,0x6a,0x01,0x00,0x00,0x51,0x00,0x00,0x00, +0x21,0x01,0x00,0x00,0x00,0x00,0x00,0x00,0x34,0x00,0x06,0x00, +0x06,0x00,0x00,0x00,0x6b,0x01,0x00,0x00,0x84,0x00,0x00,0x00, +0x6a,0x01,0x00,0x00,0x39,0x00,0x00,0x00,0x34,0x00,0x06,0x00, +0x06,0x00,0x00,0x00,0x6c,0x01,0x00,0x00,0x86,0x00,0x00,0x00, +0x6b,0x01,0x00,0x00,0x6c,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x6f,0x01,0x00,0x00,0x08,0x01,0x00,0x00, +0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x70,0x01,0x00,0x00, +0x86,0x00,0x00,0x00,0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00, +0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x73,0x01,0x00,0x00, +0x86,0x00,0x00,0x00,0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00, +0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x8e,0x01,0x00,0x00, +0x84,0x00,0x00,0x00,0x60,0x00,0x00,0x00,0x62,0x00,0x00,0x00, +0x1c,0x00,0x04,0x00,0x8f,0x01,0x00,0x00,0xf6,0x00,0x00,0x00, +0x8e,0x01,0x00,0x00,0x20,0x00,0x04,0x00,0x90,0x01,0x00,0x00, +0x07,0x00,0x00,0x00,0x8f,0x01,0x00,0x00,0x34,0x00,0x06,0x00, +0x06,0x00,0x00,0x00,0xa0,0x01,0x00,0x00,0x80,0x00,0x00,0x00, +0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x20,0x00,0x04,0x00, +0xa6,0x01,0x00,0x00,0x07,0x00,0x00,0x00,0xf6,0x00,0x00,0x00, +0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0xbc,0x01,0x00,0x00, +0x84,0x00,0x00,0x00,0xbe,0x00,0x00,0x00,0xbb,0x00,0x00,0x00, +0x1c,0x00,0x04,0x00,0xbd,0x01,0x00,0x00,0xf6,0x00,0x00,0x00, +0xbc,0x01,0x00,0x00,0x20,0x00,0x04,0x00,0xbe,0x01,0x00,0x00, +0x07,0x00,0x00,0x00,0xbd,0x01,0x00,0x00,0x34,0x00,0x06,0x00, +0x06,0x00,0x00,0x00,0xc7,0x01,0x00,0x00,0x86,0x00,0x00,0x00, +0xb8,0x00,0x00,0x00,0xbe,0x00,0x00,0x00,0x34,0x00,0x06,0x00, +0x06,0x00,0x00,0x00,0xcf,0x01,0x00,0x00,0x80,0x00,0x00,0x00, +0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x34,0x00,0x06,0x00, +0x06,0x00,0x00,0x00,0xfe,0x01,0x00,0x00,0x84,0x00,0x00,0x00, +0x60,0x00,0x00,0x00,0x62,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x13,0x00,0x00,0x00,0x33,0x02,0x00,0x00,0x0d,0x00,0x00,0x00, +0x3b,0x00,0x04,0x00,0x0a,0x00,0x00,0x00,0x3b,0x02,0x00,0x00, +0x01,0x00,0x00,0x00,0x1d,0x00,0x03,0x00,0x81,0x02,0x00,0x00, +0xc3,0x00,0x00,0x00,0x1e,0x00,0x03,0x00,0x82,0x02,0x00,0x00, +0x81,0x02,0x00,0x00,0x20,0x00,0x04,0x00,0x83,0x02,0x00,0x00, +0x0c,0x00,0x00,0x00,0x82,0x02,0x00,0x00,0x3b,0x00,0x04,0x00, +0x83,0x02,0x00,0x00,0x84,0x02,0x00,0x00,0x0c,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00,0x89,0x02,0x00,0x00, +0x05,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, +0x96,0x02,0x00,0x00,0x84,0x00,0x00,0x00,0x60,0x00,0x00,0x00, +0x62,0x00,0x00,0x00,0x36,0x00,0x05,0x00,0x02,0x00,0x00,0x00, +0x04,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x03,0x00,0x00,0x00, +0xf8,0x00,0x02,0x00,0x05,0x00,0x00,0x00,0x3b,0x00,0x04,0x00, +0xc8,0x00,0x00,0x00,0xc9,0x00,0x00,0x00,0x07,0x00,0x00,0x00, +0x3b,0x00,0x04,0x00,0x90,0x01,0x00,0x00,0x91,0x01,0x00,0x00, +0x07,0x00,0x00,0x00,0x3b,0x00,0x04,0x00,0xbe,0x01,0x00,0x00, +0xbf,0x01,0x00,0x00,0x07,0x00,0x00,0x00,0x41,0x00,0x05,0x00, +0x0d,0x00,0x00,0x00,0x0e,0x00,0x00,0x00,0x0b,0x00,0x00,0x00, +0x0c,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x0f,0x00,0x00,0x00,0x0e,0x00,0x00,0x00,0x41,0x00,0x05,0x00, +0x15,0x00,0x00,0x00,0x16,0x00,0x00,0x00,0x12,0x00,0x00,0x00, +0x14,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x17,0x00,0x00,0x00,0x16,0x00,0x00,0x00,0x86,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x18,0x00,0x00,0x00,0x0f,0x00,0x00,0x00, +0x17,0x00,0x00,0x00,0x89,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x1e,0x00,0x00,0x00,0x0f,0x00,0x00,0x00,0x17,0x00,0x00,0x00, +0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00,0x22,0x00,0x00,0x00, +0x12,0x00,0x00,0x00,0x21,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x22,0x00,0x00,0x00, +0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x24,0x00,0x00,0x00, +0x18,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x41,0x00,0x05,0x00, +0x15,0x00,0x00,0x00,0x28,0x00,0x00,0x00,0x12,0x00,0x00,0x00, +0x27,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x29,0x00,0x00,0x00,0x28,0x00,0x00,0x00,0x86,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x2a,0x00,0x00,0x00,0x1e,0x00,0x00,0x00, +0x29,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00, +0x2e,0x00,0x00,0x00,0x12,0x00,0x00,0x00,0x2d,0x00,0x00,0x00, +0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x2f,0x00,0x00,0x00, +0x2e,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x30,0x00,0x00,0x00,0x24,0x00,0x00,0x00,0x2f,0x00,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x32,0x00,0x00,0x00, +0x30,0x00,0x00,0x00,0x2a,0x00,0x00,0x00,0x41,0x00,0x05,0x00, +0x15,0x00,0x00,0x00,0x35,0x00,0x00,0x00,0x12,0x00,0x00,0x00, +0x34,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x36,0x00,0x00,0x00,0x35,0x00,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x38,0x00,0x00,0x00,0x36,0x00,0x00,0x00, +0x37,0x00,0x00,0x00,0x82,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x3a,0x00,0x00,0x00,0x38,0x00,0x00,0x00,0x39,0x00,0x00,0x00, +0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x3b,0x00,0x00,0x00, +0x3a,0x00,0x00,0x00,0x37,0x00,0x00,0x00,0x41,0x00,0x05,0x00, +0x0d,0x00,0x00,0x00,0x3f,0x00,0x00,0x00,0x3d,0x00,0x00,0x00, +0x3e,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x40,0x00,0x00,0x00,0x3f,0x00,0x00,0x00,0x89,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x42,0x00,0x00,0x00,0x40,0x00,0x00,0x00, +0x3b,0x00,0x00,0x00,0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x47,0x00,0x00,0x00,0x40,0x00,0x00,0x00,0x3b,0x00,0x00,0x00, +0x41,0x00,0x05,0x00,0x0d,0x00,0x00,0x00,0x49,0x00,0x00,0x00, +0x3d,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x4a,0x00,0x00,0x00,0x49,0x00,0x00,0x00, +0x41,0x00,0x05,0x00,0x0d,0x00,0x00,0x00,0x4d,0x00,0x00,0x00, +0x4c,0x00,0x00,0x00,0x3e,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x4e,0x00,0x00,0x00,0x4d,0x00,0x00,0x00, +0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x50,0x00,0x00,0x00, +0x4e,0x00,0x00,0x00,0x4f,0x00,0x00,0x00,0x89,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x55,0x00,0x00,0x00,0x50,0x00,0x00,0x00, +0x54,0x00,0x00,0x00,0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x59,0x00,0x00,0x00,0x50,0x00,0x00,0x00,0x58,0x00,0x00,0x00, +0x89,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x5d,0x00,0x00,0x00, +0x4e,0x00,0x00,0x00,0x4f,0x00,0x00,0x00,0x89,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x64,0x00,0x00,0x00,0x5d,0x00,0x00,0x00, +0x63,0x00,0x00,0x00,0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x68,0x00,0x00,0x00,0x5d,0x00,0x00,0x00,0x67,0x00,0x00,0x00, +0x89,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x6e,0x00,0x00,0x00, +0x4e,0x00,0x00,0x00,0x6d,0x00,0x00,0x00,0x86,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x73,0x00,0x00,0x00,0x4e,0x00,0x00,0x00, +0x72,0x00,0x00,0x00,0x89,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x78,0x00,0x00,0x00,0x4e,0x00,0x00,0x00,0x77,0x00,0x00,0x00, +0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x7d,0x00,0x00,0x00, +0x4e,0x00,0x00,0x00,0x7c,0x00,0x00,0x00,0x41,0x00,0x05,0x00, +0x15,0x00,0x00,0x00,0x81,0x00,0x00,0x00,0x12,0x00,0x00,0x00, +0x80,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x82,0x00,0x00,0x00,0x81,0x00,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x83,0x00,0x00,0x00,0x47,0x00,0x00,0x00, +0x82,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00, +0x86,0x00,0x00,0x00,0x12,0x00,0x00,0x00,0x85,0x00,0x00,0x00, +0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x87,0x00,0x00,0x00, +0x86,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x89,0x00,0x00,0x00,0x47,0x00,0x00,0x00,0x39,0x00,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x8c,0x00,0x00,0x00, +0x89,0x00,0x00,0x00,0x82,0x00,0x00,0x00,0x0c,0x00,0x07,0x00, +0x06,0x00,0x00,0x00,0x8d,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0x26,0x00,0x00,0x00,0x87,0x00,0x00,0x00,0x8c,0x00,0x00,0x00, +0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00,0x91,0x00,0x00,0x00, +0x12,0x00,0x00,0x00,0x90,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x92,0x00,0x00,0x00,0x91,0x00,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x93,0x00,0x00,0x00, +0x32,0x00,0x00,0x00,0x92,0x00,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x95,0x00,0x00,0x00,0x42,0x00,0x00,0x00, +0x37,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00, +0x97,0x00,0x00,0x00,0x12,0x00,0x00,0x00,0x96,0x00,0x00,0x00, +0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x98,0x00,0x00,0x00, +0x97,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x99,0x00,0x00,0x00,0x95,0x00,0x00,0x00,0x98,0x00,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x9a,0x00,0x00,0x00, +0x93,0x00,0x00,0x00,0x99,0x00,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x9c,0x00,0x00,0x00,0x9a,0x00,0x00,0x00, +0x83,0x00,0x00,0x00,0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x9d,0x00,0x00,0x00,0x9c,0x00,0x00,0x00,0x39,0x00,0x00,0x00, +0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00,0xa2,0x00,0x00,0x00, +0x12,0x00,0x00,0x00,0xa1,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0xa3,0x00,0x00,0x00,0xa2,0x00,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xa4,0x00,0x00,0x00, +0x0f,0x00,0x00,0x00,0xa3,0x00,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xa7,0x00,0x00,0x00,0x4a,0x00,0x00,0x00, +0xa6,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00, +0xa9,0x00,0x00,0x00,0x12,0x00,0x00,0x00,0xa8,0x00,0x00,0x00, +0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0xaa,0x00,0x00,0x00, +0xa9,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xab,0x00,0x00,0x00,0xa7,0x00,0x00,0x00,0xaa,0x00,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xac,0x00,0x00,0x00, +0xa4,0x00,0x00,0x00,0xab,0x00,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xae,0x00,0x00,0x00,0xac,0x00,0x00,0x00, +0x83,0x00,0x00,0x00,0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xaf,0x00,0x00,0x00,0xae,0x00,0x00,0x00,0x39,0x00,0x00,0x00, +0xf9,0x00,0x02,0x00,0xb1,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, +0xb1,0x00,0x00,0x00,0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00, +0xa8,0x02,0x00,0x00,0x3e,0x00,0x00,0x00,0x05,0x00,0x00,0x00, +0xd0,0x00,0x00,0x00,0xb2,0x00,0x00,0x00,0xb0,0x00,0x05,0x00, +0xc1,0x00,0x00,0x00,0xc2,0x00,0x00,0x00,0xa8,0x02,0x00,0x00, +0xc0,0x00,0x00,0x00,0xf6,0x00,0x04,0x00,0xb3,0x00,0x00,0x00, +0xb2,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, +0xc2,0x00,0x00,0x00,0xb2,0x00,0x00,0x00,0xb3,0x00,0x00,0x00, +0xf8,0x00,0x02,0x00,0xb2,0x00,0x00,0x00,0x41,0x00,0x05,0x00, +0xcc,0x00,0x00,0x00,0xcd,0x00,0x00,0x00,0xc9,0x00,0x00,0x00, +0xa8,0x02,0x00,0x00,0x3e,0x00,0x03,0x00,0xcd,0x00,0x00,0x00, +0xcb,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xd0,0x00,0x00,0x00,0xa8,0x02,0x00,0x00,0xcf,0x00,0x00,0x00, +0xf9,0x00,0x02,0x00,0xb1,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, +0xb3,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0xd3,0x00,0x00,0x00, +0xf8,0x00,0x02,0x00,0xd3,0x00,0x00,0x00,0xf5,0x00,0x07,0x00, +0x06,0x00,0x00,0x00,0xc1,0x02,0x00,0x00,0xaf,0x00,0x00,0x00, +0xb3,0x00,0x00,0x00,0x75,0x01,0x00,0x00,0xd6,0x00,0x00,0x00, +0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0xbd,0x02,0x00,0x00, +0x9d,0x00,0x00,0x00,0xb3,0x00,0x00,0x00,0x72,0x01,0x00,0x00, +0xd6,0x00,0x00,0x00,0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00, +0xa9,0x02,0x00,0x00,0x83,0x00,0x00,0x00,0xb3,0x00,0x00,0x00, +0x23,0x02,0x00,0x00,0xd6,0x00,0x00,0x00,0xb0,0x00,0x05,0x00, +0xc1,0x00,0x00,0x00,0xda,0x00,0x00,0x00,0xa9,0x02,0x00,0x00, +0x8d,0x00,0x00,0x00,0xf6,0x00,0x04,0x00,0xd5,0x00,0x00,0x00, +0xd6,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, +0xda,0x00,0x00,0x00,0xd4,0x00,0x00,0x00,0xd5,0x00,0x00,0x00, +0xf8,0x00,0x02,0x00,0xd4,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, +0xdc,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0xdc,0x00,0x00,0x00, +0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0xb9,0x02,0x00,0x00, +0x3e,0x00,0x00,0x00,0xd4,0x00,0x00,0x00,0x26,0x01,0x00,0x00, +0xdf,0x00,0x00,0x00,0xb0,0x00,0x05,0x00,0xc1,0x00,0x00,0x00, +0xe2,0x00,0x00,0x00,0xb9,0x02,0x00,0x00,0x37,0x00,0x00,0x00, +0xf6,0x00,0x04,0x00,0xde,0x00,0x00,0x00,0xdf,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0xe2,0x00,0x00,0x00, +0xdd,0x00,0x00,0x00,0xde,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, +0xdd,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xe6,0x00,0x00,0x00,0x95,0x00,0x00,0x00,0x73,0x00,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xe8,0x00,0x00,0x00, +0xe6,0x00,0x00,0x00,0xb9,0x02,0x00,0x00,0xb0,0x00,0x05,0x00, +0xc1,0x00,0x00,0x00,0xeb,0x00,0x00,0x00,0xe8,0x00,0x00,0x00, +0x36,0x00,0x00,0x00,0xf7,0x00,0x03,0x00,0xed,0x00,0x00,0x00, +0x00,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0xeb,0x00,0x00,0x00, +0xec,0x00,0x00,0x00,0xed,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, +0xec,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xf0,0x00,0x00,0x00,0xa9,0x02,0x00,0x00,0x6e,0x00,0x00,0x00, +0xb0,0x00,0x05,0x00,0xc1,0x00,0x00,0x00,0xf2,0x00,0x00,0x00, +0xf0,0x00,0x00,0x00,0x8d,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, +0xed,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0xed,0x00,0x00,0x00, +0xf5,0x00,0x07,0x00,0xc1,0x00,0x00,0x00,0xf3,0x00,0x00,0x00, +0xeb,0x00,0x00,0x00,0xdd,0x00,0x00,0x00,0xf2,0x00,0x00,0x00, +0xec,0x00,0x00,0x00,0xf7,0x00,0x03,0x00,0xf5,0x00,0x00,0x00, +0x00,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0xf3,0x00,0x00,0x00, +0xf4,0x00,0x00,0x00,0x16,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, +0xf4,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xfe,0x00,0x00,0x00,0x73,0x00,0x00,0x00,0xb9,0x02,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x00,0x01,0x00,0x00, +0xfe,0x00,0x00,0x00,0xff,0x00,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x02,0x01,0x00,0x00,0x00,0x01,0x00,0x00, +0x6e,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x0d,0x01,0x00,0x00,0xfe,0x00,0x00,0x00,0x98,0x00,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x0e,0x01,0x00,0x00, +0xbd,0x02,0x00,0x00,0x0d,0x01,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x10,0x01,0x00,0x00,0x0e,0x01,0x00,0x00, +0x6e,0x00,0x00,0x00,0x41,0x00,0x06,0x00,0x11,0x01,0x00,0x00, +0x12,0x01,0x00,0x00,0x06,0x01,0x00,0x00,0x34,0x00,0x00,0x00, +0x10,0x01,0x00,0x00,0x3d,0x00,0x04,0x00,0xf6,0x00,0x00,0x00, +0x13,0x01,0x00,0x00,0x12,0x01,0x00,0x00,0x41,0x00,0x05,0x00, +0x14,0x01,0x00,0x00,0x15,0x01,0x00,0x00,0xfb,0x00,0x00,0x00, +0x02,0x01,0x00,0x00,0x3e,0x00,0x03,0x00,0x15,0x01,0x00,0x00, +0x13,0x01,0x00,0x00,0xf9,0x00,0x02,0x00,0xf5,0x00,0x00,0x00, +0xf8,0x00,0x02,0x00,0x16,0x01,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x19,0x01,0x00,0x00,0x73,0x00,0x00,0x00, +0xb9,0x02,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x1b,0x01,0x00,0x00,0x19,0x01,0x00,0x00,0x1a,0x01,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x1d,0x01,0x00,0x00, +0x1b,0x01,0x00,0x00,0x6e,0x00,0x00,0x00,0x41,0x00,0x05,0x00, +0x14,0x01,0x00,0x00,0x1f,0x01,0x00,0x00,0xfb,0x00,0x00,0x00, +0x1d,0x01,0x00,0x00,0x3e,0x00,0x03,0x00,0x1f,0x01,0x00,0x00, +0x1e,0x01,0x00,0x00,0xf9,0x00,0x02,0x00,0xf5,0x00,0x00,0x00, +0xf8,0x00,0x02,0x00,0xf5,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, +0xdf,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0xdf,0x00,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x26,0x01,0x00,0x00, +0xb9,0x02,0x00,0x00,0x24,0x01,0x00,0x00,0xf9,0x00,0x02,0x00, +0xdc,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0xde,0x00,0x00,0x00, +0xf9,0x00,0x02,0x00,0x28,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, +0x28,0x01,0x00,0x00,0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00, +0xba,0x02,0x00,0x00,0x3e,0x00,0x00,0x00,0xde,0x00,0x00,0x00, +0x6e,0x01,0x00,0x00,0x2b,0x01,0x00,0x00,0xb0,0x00,0x05,0x00, +0xc1,0x00,0x00,0x00,0x2e,0x01,0x00,0x00,0xba,0x02,0x00,0x00, +0xa6,0x00,0x00,0x00,0xf6,0x00,0x04,0x00,0x2a,0x01,0x00,0x00, +0x2b,0x01,0x00,0x00,0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, +0x2e,0x01,0x00,0x00,0x29,0x01,0x00,0x00,0x2a,0x01,0x00,0x00, +0xf8,0x00,0x02,0x00,0x29,0x01,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x32,0x01,0x00,0x00,0xa7,0x00,0x00,0x00, +0x7d,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x34,0x01,0x00,0x00,0x32,0x01,0x00,0x00,0xba,0x02,0x00,0x00, +0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00,0x35,0x01,0x00,0x00, +0x12,0x00,0x00,0x00,0xcf,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x36,0x01,0x00,0x00,0x35,0x01,0x00,0x00, +0xb0,0x00,0x05,0x00,0xc1,0x00,0x00,0x00,0x37,0x01,0x00,0x00, +0x34,0x01,0x00,0x00,0x36,0x01,0x00,0x00,0xf7,0x00,0x03,0x00, +0x39,0x01,0x00,0x00,0x00,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, +0x37,0x01,0x00,0x00,0x38,0x01,0x00,0x00,0x39,0x01,0x00,0x00, +0xf8,0x00,0x02,0x00,0x38,0x01,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x3c,0x01,0x00,0x00,0xa9,0x02,0x00,0x00, +0x78,0x00,0x00,0x00,0xb0,0x00,0x05,0x00,0xc1,0x00,0x00,0x00, +0x3e,0x01,0x00,0x00,0x3c,0x01,0x00,0x00,0x8d,0x00,0x00,0x00, +0xf9,0x00,0x02,0x00,0x39,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, +0x39,0x01,0x00,0x00,0xf5,0x00,0x07,0x00,0xc1,0x00,0x00,0x00, +0x3f,0x01,0x00,0x00,0x37,0x01,0x00,0x00,0x29,0x01,0x00,0x00, +0x3e,0x01,0x00,0x00,0x38,0x01,0x00,0x00,0xf7,0x00,0x03,0x00, +0x41,0x01,0x00,0x00,0x00,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, +0x3f,0x01,0x00,0x00,0x40,0x01,0x00,0x00,0x61,0x01,0x00,0x00, +0xf8,0x00,0x02,0x00,0x40,0x01,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x49,0x01,0x00,0x00,0x7d,0x00,0x00,0x00, +0xba,0x02,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x4b,0x01,0x00,0x00,0x49,0x01,0x00,0x00,0x4a,0x01,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x4d,0x01,0x00,0x00, +0x4b,0x01,0x00,0x00,0x78,0x00,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x58,0x01,0x00,0x00,0x49,0x01,0x00,0x00, +0xaa,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x59,0x01,0x00,0x00,0xc1,0x02,0x00,0x00,0x58,0x01,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x5b,0x01,0x00,0x00, +0x59,0x01,0x00,0x00,0x78,0x00,0x00,0x00,0x41,0x00,0x06,0x00, +0x5c,0x01,0x00,0x00,0x5d,0x01,0x00,0x00,0x51,0x01,0x00,0x00, +0x34,0x00,0x00,0x00,0x5b,0x01,0x00,0x00,0x3d,0x00,0x04,0x00, +0xc3,0x00,0x00,0x00,0x5e,0x01,0x00,0x00,0x5d,0x01,0x00,0x00, +0x73,0x00,0x04,0x00,0xf6,0x00,0x00,0x00,0x5f,0x01,0x00,0x00, +0x5e,0x01,0x00,0x00,0x41,0x00,0x05,0x00,0x14,0x01,0x00,0x00, +0x60,0x01,0x00,0x00,0x46,0x01,0x00,0x00,0x4d,0x01,0x00,0x00, +0x3e,0x00,0x03,0x00,0x60,0x01,0x00,0x00,0x5f,0x01,0x00,0x00, +0xf9,0x00,0x02,0x00,0x41,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, +0x61,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x64,0x01,0x00,0x00,0x7d,0x00,0x00,0x00,0xba,0x02,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x66,0x01,0x00,0x00, +0x64,0x01,0x00,0x00,0x65,0x01,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x68,0x01,0x00,0x00,0x66,0x01,0x00,0x00, +0x78,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x14,0x01,0x00,0x00, +0x69,0x01,0x00,0x00,0x46,0x01,0x00,0x00,0x68,0x01,0x00,0x00, +0x3e,0x00,0x03,0x00,0x69,0x01,0x00,0x00,0x1e,0x01,0x00,0x00, +0xf9,0x00,0x02,0x00,0x41,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, +0x41,0x01,0x00,0x00,0xf9,0x00,0x02,0x00,0x2b,0x01,0x00,0x00, +0xf8,0x00,0x02,0x00,0x2b,0x01,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x6e,0x01,0x00,0x00,0xba,0x02,0x00,0x00, +0x6c,0x01,0x00,0x00,0xf9,0x00,0x02,0x00,0x28,0x01,0x00,0x00, +0xf8,0x00,0x02,0x00,0x2a,0x01,0x00,0x00,0xe0,0x00,0x04,0x00, +0x0c,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x6f,0x01,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x72,0x01,0x00,0x00, +0xbd,0x02,0x00,0x00,0x70,0x01,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x75,0x01,0x00,0x00,0xc1,0x02,0x00,0x00, +0x73,0x01,0x00,0x00,0xf9,0x00,0x02,0x00,0x77,0x01,0x00,0x00, +0xf8,0x00,0x02,0x00,0x77,0x01,0x00,0x00,0xf5,0x00,0x07,0x00, +0x06,0x00,0x00,0x00,0xc3,0x02,0x00,0x00,0x3e,0x00,0x00,0x00, +0x2a,0x01,0x00,0x00,0x21,0x02,0x00,0x00,0x7a,0x01,0x00,0x00, +0xb0,0x00,0x05,0x00,0xc1,0x00,0x00,0x00,0x7d,0x01,0x00,0x00, +0xc3,0x02,0x00,0x00,0x6c,0x00,0x00,0x00,0xf6,0x00,0x04,0x00, +0x79,0x01,0x00,0x00,0x7a,0x01,0x00,0x00,0x00,0x00,0x00,0x00, +0xfa,0x00,0x04,0x00,0x7d,0x01,0x00,0x00,0x78,0x01,0x00,0x00, +0x79,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0x78,0x01,0x00,0x00, +0xf9,0x00,0x02,0x00,0x7f,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, +0x7f,0x01,0x00,0x00,0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00, +0xc7,0x02,0x00,0x00,0x3e,0x00,0x00,0x00,0x78,0x01,0x00,0x00, +0xab,0x01,0x00,0x00,0x82,0x01,0x00,0x00,0xb0,0x00,0x05,0x00, +0xc1,0x00,0x00,0x00,0x85,0x01,0x00,0x00,0xc7,0x02,0x00,0x00, +0x60,0x00,0x00,0x00,0xf6,0x00,0x04,0x00,0x81,0x01,0x00,0x00, +0x82,0x01,0x00,0x00,0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, +0x85,0x01,0x00,0x00,0x80,0x01,0x00,0x00,0x81,0x01,0x00,0x00, +0xf8,0x00,0x02,0x00,0x80,0x01,0x00,0x00,0xf9,0x00,0x02,0x00, +0x87,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0x87,0x01,0x00,0x00, +0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0xd9,0x02,0x00,0x00, +0x3e,0x00,0x00,0x00,0x80,0x01,0x00,0x00,0xa9,0x01,0x00,0x00, +0x88,0x01,0x00,0x00,0xb0,0x00,0x05,0x00,0xc1,0x00,0x00,0x00, +0x8d,0x01,0x00,0x00,0xd9,0x02,0x00,0x00,0x62,0x00,0x00,0x00, +0xf6,0x00,0x04,0x00,0x89,0x01,0x00,0x00,0x88,0x01,0x00,0x00, +0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0x8d,0x01,0x00,0x00, +0x88,0x01,0x00,0x00,0x89,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, +0x88,0x01,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x93,0x01,0x00,0x00,0xc7,0x02,0x00,0x00,0x62,0x00,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x95,0x01,0x00,0x00, +0x93,0x01,0x00,0x00,0xd9,0x02,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x97,0x01,0x00,0x00,0x55,0x00,0x00,0x00, +0x53,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x99,0x01,0x00,0x00,0xc7,0x02,0x00,0x00,0x61,0x00,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x9a,0x01,0x00,0x00, +0x97,0x01,0x00,0x00,0x99,0x01,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x9c,0x01,0x00,0x00,0x64,0x00,0x00,0x00, +0x62,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x9d,0x01,0x00,0x00,0x9a,0x01,0x00,0x00,0x9c,0x01,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x9f,0x01,0x00,0x00, +0x9d,0x01,0x00,0x00,0xd9,0x02,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xa1,0x01,0x00,0x00,0x9f,0x01,0x00,0x00, +0xa0,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xa3,0x01,0x00,0x00,0xa1,0x01,0x00,0x00,0xc3,0x02,0x00,0x00, +0x41,0x00,0x05,0x00,0x14,0x01,0x00,0x00,0xa4,0x01,0x00,0x00, +0xfb,0x00,0x00,0x00,0xa3,0x01,0x00,0x00,0x3d,0x00,0x04,0x00, +0xf6,0x00,0x00,0x00,0xa5,0x01,0x00,0x00,0xa4,0x01,0x00,0x00, +0x41,0x00,0x05,0x00,0xa6,0x01,0x00,0x00,0xa7,0x01,0x00,0x00, +0x91,0x01,0x00,0x00,0x95,0x01,0x00,0x00,0x3e,0x00,0x03,0x00, +0xa7,0x01,0x00,0x00,0xa5,0x01,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xa9,0x01,0x00,0x00,0xd9,0x02,0x00,0x00, +0xcf,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0x87,0x01,0x00,0x00, +0xf8,0x00,0x02,0x00,0x89,0x01,0x00,0x00,0xf9,0x00,0x02,0x00, +0x82,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0x82,0x01,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xab,0x01,0x00,0x00, +0xc7,0x02,0x00,0x00,0xcf,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, +0x7f,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0x81,0x01,0x00,0x00, +0xf9,0x00,0x02,0x00,0xad,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, +0xad,0x01,0x00,0x00,0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00, +0xc8,0x02,0x00,0x00,0x3e,0x00,0x00,0x00,0x81,0x01,0x00,0x00, +0xd9,0x01,0x00,0x00,0xb0,0x01,0x00,0x00,0xb0,0x00,0x05,0x00, +0xc1,0x00,0x00,0x00,0xb3,0x01,0x00,0x00,0xc8,0x02,0x00,0x00, +0xbe,0x00,0x00,0x00,0xf6,0x00,0x04,0x00,0xaf,0x01,0x00,0x00, +0xb0,0x01,0x00,0x00,0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, +0xb3,0x01,0x00,0x00,0xae,0x01,0x00,0x00,0xaf,0x01,0x00,0x00, +0xf8,0x00,0x02,0x00,0xae,0x01,0x00,0x00,0xf9,0x00,0x02,0x00, +0xb5,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0xb5,0x01,0x00,0x00, +0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0xd6,0x02,0x00,0x00, +0x3e,0x00,0x00,0x00,0xae,0x01,0x00,0x00,0xd7,0x01,0x00,0x00, +0xb6,0x01,0x00,0x00,0xb0,0x00,0x05,0x00,0xc1,0x00,0x00,0x00, +0xbb,0x01,0x00,0x00,0xd6,0x02,0x00,0x00,0xbb,0x00,0x00,0x00, +0xf6,0x00,0x04,0x00,0xb7,0x01,0x00,0x00,0xb6,0x01,0x00,0x00, +0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0xbb,0x01,0x00,0x00, +0xb6,0x01,0x00,0x00,0xb7,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, +0xb6,0x01,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xc1,0x01,0x00,0x00,0xc8,0x02,0x00,0x00,0xbb,0x00,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xc3,0x01,0x00,0x00, +0xc1,0x01,0x00,0x00,0xd6,0x02,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xc5,0x01,0x00,0x00,0x59,0x00,0x00,0x00, +0xb8,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xc8,0x01,0x00,0x00,0xc8,0x02,0x00,0x00,0xc7,0x01,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xc9,0x01,0x00,0x00, +0xc5,0x01,0x00,0x00,0xc8,0x01,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xcb,0x01,0x00,0x00,0x68,0x00,0x00,0x00, +0xbb,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xcc,0x01,0x00,0x00,0xc9,0x01,0x00,0x00,0xcb,0x01,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xce,0x01,0x00,0x00, +0xcc,0x01,0x00,0x00,0xd6,0x02,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xd0,0x01,0x00,0x00,0xce,0x01,0x00,0x00, +0xcf,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xd2,0x01,0x00,0x00,0xd0,0x01,0x00,0x00,0xc3,0x02,0x00,0x00, +0x41,0x00,0x05,0x00,0x14,0x01,0x00,0x00,0xd3,0x01,0x00,0x00, +0x46,0x01,0x00,0x00,0xd2,0x01,0x00,0x00,0x3d,0x00,0x04,0x00, +0xf6,0x00,0x00,0x00,0xd4,0x01,0x00,0x00,0xd3,0x01,0x00,0x00, +0x41,0x00,0x05,0x00,0xa6,0x01,0x00,0x00,0xd5,0x01,0x00,0x00, +0xbf,0x01,0x00,0x00,0xc3,0x01,0x00,0x00,0x3e,0x00,0x03,0x00, +0xd5,0x01,0x00,0x00,0xd4,0x01,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xd7,0x01,0x00,0x00,0xd6,0x02,0x00,0x00, +0xcf,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0xb5,0x01,0x00,0x00, +0xf8,0x00,0x02,0x00,0xb7,0x01,0x00,0x00,0xf9,0x00,0x02,0x00, +0xb0,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0xb0,0x01,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xd9,0x01,0x00,0x00, +0xc8,0x02,0x00,0x00,0xcf,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, +0xad,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0xaf,0x01,0x00,0x00, +0xf9,0x00,0x02,0x00,0xdb,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, +0xdb,0x01,0x00,0x00,0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00, +0xc9,0x02,0x00,0x00,0x3e,0x00,0x00,0x00,0xaf,0x01,0x00,0x00, +0x1f,0x02,0x00,0x00,0xde,0x01,0x00,0x00,0xb0,0x00,0x05,0x00, +0xc1,0x00,0x00,0x00,0xe1,0x01,0x00,0x00,0xc9,0x02,0x00,0x00, +0xbe,0x00,0x00,0x00,0xf6,0x00,0x04,0x00,0xdd,0x01,0x00,0x00, +0xde,0x01,0x00,0x00,0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, +0xe1,0x01,0x00,0x00,0xdc,0x01,0x00,0x00,0xdd,0x01,0x00,0x00, +0xf8,0x00,0x02,0x00,0xdc,0x01,0x00,0x00,0xf9,0x00,0x02,0x00, +0xe3,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0xe3,0x01,0x00,0x00, +0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0xcd,0x02,0x00,0x00, +0x3e,0x00,0x00,0x00,0xdc,0x01,0x00,0x00,0x1d,0x02,0x00,0x00, +0xe6,0x01,0x00,0x00,0xb0,0x00,0x05,0x00,0xc1,0x00,0x00,0x00, +0xe9,0x01,0x00,0x00,0xcd,0x02,0x00,0x00,0x60,0x00,0x00,0x00, +0xf6,0x00,0x04,0x00,0xe5,0x01,0x00,0x00,0xe6,0x01,0x00,0x00, +0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0xe9,0x01,0x00,0x00, +0xe4,0x01,0x00,0x00,0xe5,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, +0xe4,0x01,0x00,0x00,0xf9,0x00,0x02,0x00,0xeb,0x01,0x00,0x00, +0xf8,0x00,0x02,0x00,0xeb,0x01,0x00,0x00,0xf5,0x00,0x07,0x00, +0x06,0x00,0x00,0x00,0xcf,0x02,0x00,0x00,0x3e,0x00,0x00,0x00, +0xe4,0x01,0x00,0x00,0x1b,0x02,0x00,0x00,0xee,0x01,0x00,0x00, +0xb0,0x00,0x05,0x00,0xc1,0x00,0x00,0x00,0xf1,0x01,0x00,0x00, +0xcf,0x02,0x00,0x00,0xbb,0x00,0x00,0x00,0xf6,0x00,0x04,0x00, +0xed,0x01,0x00,0x00,0xee,0x01,0x00,0x00,0x01,0x00,0x00,0x00, +0xfa,0x00,0x04,0x00,0xf1,0x01,0x00,0x00,0xec,0x01,0x00,0x00, +0xed,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0xec,0x01,0x00,0x00, +0xf9,0x00,0x02,0x00,0xf3,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, +0xf3,0x01,0x00,0x00,0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00, +0xd1,0x02,0x00,0x00,0x3e,0x00,0x00,0x00,0xec,0x01,0x00,0x00, +0x19,0x02,0x00,0x00,0xf4,0x01,0x00,0x00,0xb0,0x00,0x05,0x00, +0xc1,0x00,0x00,0x00,0xf9,0x01,0x00,0x00,0xd1,0x02,0x00,0x00, +0x62,0x00,0x00,0x00,0xf6,0x00,0x04,0x00,0xf5,0x01,0x00,0x00, +0xf4,0x01,0x00,0x00,0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, +0xf9,0x01,0x00,0x00,0xf4,0x01,0x00,0x00,0xf5,0x01,0x00,0x00, +0xf8,0x00,0x02,0x00,0xf4,0x01,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xfb,0x01,0x00,0x00,0xc9,0x02,0x00,0x00, +0xbb,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xfd,0x01,0x00,0x00,0xfb,0x01,0x00,0x00,0xcf,0x02,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xff,0x01,0x00,0x00, +0xfd,0x01,0x00,0x00,0xfe,0x01,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x01,0x02,0x00,0x00,0xcd,0x02,0x00,0x00, +0x62,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x02,0x02,0x00,0x00,0xff,0x01,0x00,0x00,0x01,0x02,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x04,0x02,0x00,0x00, +0x02,0x02,0x00,0x00,0xd1,0x02,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x08,0x02,0x00,0x00,0x01,0x02,0x00,0x00, +0xd1,0x02,0x00,0x00,0x41,0x00,0x05,0x00,0xa6,0x01,0x00,0x00, +0x09,0x02,0x00,0x00,0x91,0x01,0x00,0x00,0x08,0x02,0x00,0x00, +0x3d,0x00,0x04,0x00,0xf6,0x00,0x00,0x00,0x0a,0x02,0x00,0x00, +0x09,0x02,0x00,0x00,0x73,0x00,0x04,0x00,0xc3,0x00,0x00,0x00, +0x0b,0x02,0x00,0x00,0x0a,0x02,0x00,0x00,0x41,0x00,0x05,0x00, +0xa6,0x01,0x00,0x00,0x10,0x02,0x00,0x00,0xbf,0x01,0x00,0x00, +0xfd,0x01,0x00,0x00,0x3d,0x00,0x04,0x00,0xf6,0x00,0x00,0x00, +0x11,0x02,0x00,0x00,0x10,0x02,0x00,0x00,0x73,0x00,0x04,0x00, +0xc3,0x00,0x00,0x00,0x12,0x02,0x00,0x00,0x11,0x02,0x00,0x00, +0x41,0x00,0x05,0x00,0xcc,0x00,0x00,0x00,0x14,0x02,0x00,0x00, +0xc9,0x00,0x00,0x00,0x04,0x02,0x00,0x00,0x3d,0x00,0x04,0x00, +0xc3,0x00,0x00,0x00,0x15,0x02,0x00,0x00,0x14,0x02,0x00,0x00, +0x0c,0x00,0x08,0x00,0xc3,0x00,0x00,0x00,0x16,0x02,0x00,0x00, +0x01,0x00,0x00,0x00,0x32,0x00,0x00,0x00,0x0b,0x02,0x00,0x00, +0x12,0x02,0x00,0x00,0x15,0x02,0x00,0x00,0x3e,0x00,0x03,0x00, +0x14,0x02,0x00,0x00,0x16,0x02,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x19,0x02,0x00,0x00,0xd1,0x02,0x00,0x00, +0xcf,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0xf3,0x01,0x00,0x00, +0xf8,0x00,0x02,0x00,0xf5,0x01,0x00,0x00,0xf9,0x00,0x02,0x00, +0xee,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0xee,0x01,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x1b,0x02,0x00,0x00, +0xcf,0x02,0x00,0x00,0xcf,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, +0xeb,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0xed,0x01,0x00,0x00, +0xf9,0x00,0x02,0x00,0xe6,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, +0xe6,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x1d,0x02,0x00,0x00,0xcd,0x02,0x00,0x00,0xcf,0x00,0x00,0x00, +0xf9,0x00,0x02,0x00,0xe3,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, +0xe5,0x01,0x00,0x00,0xf9,0x00,0x02,0x00,0xde,0x01,0x00,0x00, +0xf8,0x00,0x02,0x00,0xde,0x01,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x1f,0x02,0x00,0x00,0xc9,0x02,0x00,0x00, +0xcf,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0xdb,0x01,0x00,0x00, +0xf8,0x00,0x02,0x00,0xdd,0x01,0x00,0x00,0xf9,0x00,0x02,0x00, +0x7a,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0x7a,0x01,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x21,0x02,0x00,0x00, +0xc3,0x02,0x00,0x00,0xcf,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, +0x77,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0x79,0x01,0x00,0x00, +0xe0,0x00,0x04,0x00,0x0c,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, +0x6f,0x01,0x00,0x00,0xf9,0x00,0x02,0x00,0xd6,0x00,0x00,0x00, +0xf8,0x00,0x02,0x00,0xd6,0x00,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x23,0x02,0x00,0x00,0xa9,0x02,0x00,0x00, +0x6c,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0xd3,0x00,0x00,0x00, +0xf8,0x00,0x02,0x00,0xd5,0x00,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x28,0x02,0x00,0x00,0x55,0x00,0x00,0x00, +0x53,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x29,0x02,0x00,0x00,0x95,0x00,0x00,0x00,0x28,0x02,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x2e,0x02,0x00,0x00, +0x59,0x00,0x00,0x00,0xb8,0x00,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x2f,0x02,0x00,0x00,0xa7,0x00,0x00,0x00, +0x2e,0x02,0x00,0x00,0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00, +0x34,0x02,0x00,0x00,0x12,0x00,0x00,0x00,0x33,0x02,0x00,0x00, +0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x35,0x02,0x00,0x00, +0x34,0x02,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x36,0x02,0x00,0x00,0x0f,0x00,0x00,0x00,0x35,0x02,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x3a,0x02,0x00,0x00, +0x47,0x00,0x00,0x00,0x35,0x02,0x00,0x00,0x41,0x00,0x05,0x00, +0x0d,0x00,0x00,0x00,0x3c,0x02,0x00,0x00,0x3b,0x02,0x00,0x00, +0x0c,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x3d,0x02,0x00,0x00,0x3c,0x02,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x3e,0x02,0x00,0x00,0x3a,0x02,0x00,0x00, +0x3d,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x3f,0x02,0x00,0x00,0x36,0x02,0x00,0x00,0x3e,0x02,0x00,0x00, +0xf9,0x00,0x02,0x00,0x41,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, +0x41,0x02,0x00,0x00,0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00, +0xaa,0x02,0x00,0x00,0x3e,0x00,0x00,0x00,0xd5,0x00,0x00,0x00, +0xa7,0x02,0x00,0x00,0x44,0x02,0x00,0x00,0xb0,0x00,0x05,0x00, +0xc1,0x00,0x00,0x00,0x47,0x02,0x00,0x00,0xaa,0x02,0x00,0x00, +0xbe,0x00,0x00,0x00,0xf6,0x00,0x04,0x00,0x43,0x02,0x00,0x00, +0x44,0x02,0x00,0x00,0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, +0x47,0x02,0x00,0x00,0x42,0x02,0x00,0x00,0x43,0x02,0x00,0x00, +0xf8,0x00,0x02,0x00,0x42,0x02,0x00,0x00,0xf9,0x00,0x02,0x00, +0x49,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x49,0x02,0x00,0x00, +0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0xab,0x02,0x00,0x00, +0x3e,0x00,0x00,0x00,0x42,0x02,0x00,0x00,0xa5,0x02,0x00,0x00, +0x4c,0x02,0x00,0x00,0xb0,0x00,0x05,0x00,0xc1,0x00,0x00,0x00, +0x4f,0x02,0x00,0x00,0xab,0x02,0x00,0x00,0x60,0x00,0x00,0x00, +0xf6,0x00,0x04,0x00,0x4b,0x02,0x00,0x00,0x4c,0x02,0x00,0x00, +0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0x4f,0x02,0x00,0x00, +0x4a,0x02,0x00,0x00,0x4b,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, +0x4a,0x02,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x53,0x02,0x00,0x00,0xab,0x02,0x00,0x00,0x61,0x00,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x54,0x02,0x00,0x00, +0x29,0x02,0x00,0x00,0x53,0x02,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x56,0x02,0x00,0x00,0x64,0x00,0x00,0x00, +0x62,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x57,0x02,0x00,0x00,0x54,0x02,0x00,0x00,0x56,0x02,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x5b,0x02,0x00,0x00, +0xaa,0x02,0x00,0x00,0xc7,0x01,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x5c,0x02,0x00,0x00,0x2f,0x02,0x00,0x00, +0x5b,0x02,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x5e,0x02,0x00,0x00,0x68,0x00,0x00,0x00,0xbb,0x00,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x5f,0x02,0x00,0x00, +0x5c,0x02,0x00,0x00,0x5e,0x02,0x00,0x00,0xf9,0x00,0x02,0x00, +0x61,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x61,0x02,0x00,0x00, +0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0xad,0x02,0x00,0x00, +0x3e,0x00,0x00,0x00,0x4a,0x02,0x00,0x00,0xa3,0x02,0x00,0x00, +0x64,0x02,0x00,0x00,0xb0,0x00,0x05,0x00,0xc1,0x00,0x00,0x00, +0x67,0x02,0x00,0x00,0xad,0x02,0x00,0x00,0xbb,0x00,0x00,0x00, +0xf6,0x00,0x04,0x00,0x63,0x02,0x00,0x00,0x64,0x02,0x00,0x00, +0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0x67,0x02,0x00,0x00, +0x62,0x02,0x00,0x00,0x63,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, +0x62,0x02,0x00,0x00,0xf9,0x00,0x02,0x00,0x69,0x02,0x00,0x00, +0xf8,0x00,0x02,0x00,0x69,0x02,0x00,0x00,0xf5,0x00,0x07,0x00, +0x06,0x00,0x00,0x00,0xaf,0x02,0x00,0x00,0x3e,0x00,0x00,0x00, +0x62,0x02,0x00,0x00,0xa1,0x02,0x00,0x00,0x6c,0x02,0x00,0x00, +0xb0,0x00,0x05,0x00,0xc1,0x00,0x00,0x00,0x6f,0x02,0x00,0x00, +0xaf,0x02,0x00,0x00,0x62,0x00,0x00,0x00,0xf6,0x00,0x04,0x00, +0x6b,0x02,0x00,0x00,0x6c,0x02,0x00,0x00,0x01,0x00,0x00,0x00, +0xfa,0x00,0x04,0x00,0x6f,0x02,0x00,0x00,0x6a,0x02,0x00,0x00, +0x6b,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x6a,0x02,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x72,0x02,0x00,0x00, +0x57,0x02,0x00,0x00,0xaf,0x02,0x00,0x00,0xb0,0x00,0x05,0x00, +0xc1,0x00,0x00,0x00,0x75,0x02,0x00,0x00,0x72,0x02,0x00,0x00, +0x36,0x00,0x00,0x00,0xf7,0x00,0x03,0x00,0x77,0x02,0x00,0x00, +0x00,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0x75,0x02,0x00,0x00, +0x76,0x02,0x00,0x00,0x77,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, +0x76,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x7a,0x02,0x00,0x00,0x5f,0x02,0x00,0x00,0xad,0x02,0x00,0x00, +0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00,0x7b,0x02,0x00,0x00, +0x12,0x00,0x00,0x00,0xcf,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x7c,0x02,0x00,0x00,0x7b,0x02,0x00,0x00, +0xb0,0x00,0x05,0x00,0xc1,0x00,0x00,0x00,0x7d,0x02,0x00,0x00, +0x7a,0x02,0x00,0x00,0x7c,0x02,0x00,0x00,0xf9,0x00,0x02,0x00, +0x77,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x77,0x02,0x00,0x00, +0xf5,0x00,0x07,0x00,0xc1,0x00,0x00,0x00,0x7e,0x02,0x00,0x00, +0x75,0x02,0x00,0x00,0x6a,0x02,0x00,0x00,0x7d,0x02,0x00,0x00, +0x76,0x02,0x00,0x00,0xf7,0x00,0x03,0x00,0x80,0x02,0x00,0x00, +0x00,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0x7e,0x02,0x00,0x00, +0x7f,0x02,0x00,0x00,0x80,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, +0x7f,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x88,0x02,0x00,0x00,0x5f,0x02,0x00,0x00,0xad,0x02,0x00,0x00, +0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00,0x8a,0x02,0x00,0x00, +0x12,0x00,0x00,0x00,0x89,0x02,0x00,0x00,0x3d,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x8b,0x02,0x00,0x00,0x8a,0x02,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x8c,0x02,0x00,0x00, +0x88,0x02,0x00,0x00,0x8b,0x02,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x8d,0x02,0x00,0x00,0x3f,0x02,0x00,0x00, +0x8c,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x8f,0x02,0x00,0x00,0x8d,0x02,0x00,0x00,0x57,0x02,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x91,0x02,0x00,0x00, +0x8f,0x02,0x00,0x00,0xaf,0x02,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x93,0x02,0x00,0x00,0xaa,0x02,0x00,0x00, +0xbb,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x95,0x02,0x00,0x00,0x93,0x02,0x00,0x00,0xad,0x02,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x97,0x02,0x00,0x00, +0x95,0x02,0x00,0x00,0x96,0x02,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x99,0x02,0x00,0x00,0xab,0x02,0x00,0x00, +0x62,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x9a,0x02,0x00,0x00,0x97,0x02,0x00,0x00,0x99,0x02,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x9c,0x02,0x00,0x00, +0x9a,0x02,0x00,0x00,0xaf,0x02,0x00,0x00,0x41,0x00,0x05,0x00, +0xcc,0x00,0x00,0x00,0x9d,0x02,0x00,0x00,0xc9,0x00,0x00,0x00, +0x9c,0x02,0x00,0x00,0x3d,0x00,0x04,0x00,0xc3,0x00,0x00,0x00, +0x9e,0x02,0x00,0x00,0x9d,0x02,0x00,0x00,0x41,0x00,0x06,0x00, +0x5c,0x01,0x00,0x00,0x9f,0x02,0x00,0x00,0x84,0x02,0x00,0x00, +0x34,0x00,0x00,0x00,0x91,0x02,0x00,0x00,0x3e,0x00,0x03,0x00, +0x9f,0x02,0x00,0x00,0x9e,0x02,0x00,0x00,0xf9,0x00,0x02,0x00, +0x80,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x80,0x02,0x00,0x00, +0xf9,0x00,0x02,0x00,0x6c,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, +0x6c,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xa1,0x02,0x00,0x00,0xaf,0x02,0x00,0x00,0xcf,0x00,0x00,0x00, +0xf9,0x00,0x02,0x00,0x69,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, +0x6b,0x02,0x00,0x00,0xf9,0x00,0x02,0x00,0x64,0x02,0x00,0x00, +0xf8,0x00,0x02,0x00,0x64,0x02,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xa3,0x02,0x00,0x00,0xad,0x02,0x00,0x00, +0xcf,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0x61,0x02,0x00,0x00, +0xf8,0x00,0x02,0x00,0x63,0x02,0x00,0x00,0xf9,0x00,0x02,0x00, +0x4c,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x4c,0x02,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xa5,0x02,0x00,0x00, +0xab,0x02,0x00,0x00,0xcf,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, +0x49,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x4b,0x02,0x00,0x00, +0xf9,0x00,0x02,0x00,0x44,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, +0x44,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xa7,0x02,0x00,0x00,0xaa,0x02,0x00,0x00,0xcf,0x00,0x00,0x00, +0xf9,0x00,0x02,0x00,0x41,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, +0x43,0x02,0x00,0x00,0xfd,0x00,0x01,0x00,0x38,0x00,0x01,0x00, + +}; +const uint64_t matmul_f16_f32_len = 10332; + +unsigned char matmul_f16_f32_aligned_data[] = { +0x03,0x02,0x23,0x07,0x00,0x05,0x01,0x00,0x0b,0x00,0x0d,0x00, +0x13,0x03,0x00,0x00,0x00,0x00,0x00,0x00,0x11,0x00,0x02,0x00, +0x01,0x00,0x00,0x00,0x11,0x00,0x02,0x00,0x09,0x00,0x00,0x00, +0x11,0x00,0x02,0x00,0x51,0x11,0x00,0x00,0x0b,0x00,0x06,0x00, +0x01,0x00,0x00,0x00,0x47,0x4c,0x53,0x4c,0x2e,0x73,0x74,0x64, +0x2e,0x34,0x35,0x30,0x00,0x00,0x00,0x00,0x0e,0x00,0x03,0x00, +0x00,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x0f,0x00,0x0f,0x00, +0x05,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x6d,0x61,0x69,0x6e, +0x00,0x00,0x00,0x00,0x0b,0x00,0x00,0x00,0x12,0x00,0x00,0x00, +0x3d,0x00,0x00,0x00,0x4c,0x00,0x00,0x00,0xfe,0x00,0x00,0x00, +0x05,0x01,0x00,0x00,0x63,0x01,0x00,0x00,0x6b,0x01,0x00,0x00, +0x74,0x02,0x00,0x00,0xbd,0x02,0x00,0x00,0x10,0x00,0x06,0x00, +0x04,0x00,0x00,0x00,0x11,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x47,0x00,0x04,0x00, +0x0b,0x00,0x00,0x00,0x0b,0x00,0x00,0x00,0x1c,0x00,0x00,0x00, +0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x48,0x00,0x05,0x00, +0x10,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x23,0x00,0x00,0x00, +0x04,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00, +0x02,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x08,0x00,0x00,0x00, +0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00,0x03,0x00,0x00,0x00, +0x23,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x48,0x00,0x05,0x00, +0x10,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x23,0x00,0x00,0x00, +0x10,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00, +0x05,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x14,0x00,0x00,0x00, +0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00,0x06,0x00,0x00,0x00, +0x23,0x00,0x00,0x00,0x18,0x00,0x00,0x00,0x48,0x00,0x05,0x00, +0x10,0x00,0x00,0x00,0x07,0x00,0x00,0x00,0x23,0x00,0x00,0x00, +0x1c,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00, +0x08,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x20,0x00,0x00,0x00, +0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00,0x09,0x00,0x00,0x00, +0x23,0x00,0x00,0x00,0x24,0x00,0x00,0x00,0x48,0x00,0x05,0x00, +0x10,0x00,0x00,0x00,0x0a,0x00,0x00,0x00,0x23,0x00,0x00,0x00, +0x28,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00, +0x0b,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x2c,0x00,0x00,0x00, +0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, +0x23,0x00,0x00,0x00,0x30,0x00,0x00,0x00,0x48,0x00,0x05,0x00, +0x10,0x00,0x00,0x00,0x0d,0x00,0x00,0x00,0x23,0x00,0x00,0x00, +0x34,0x00,0x00,0x00,0x47,0x00,0x03,0x00,0x10,0x00,0x00,0x00, +0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x37,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x47,0x00,0x04,0x00, +0x3d,0x00,0x00,0x00,0x0b,0x00,0x00,0x00,0x1a,0x00,0x00,0x00, +0x47,0x00,0x04,0x00,0x4c,0x00,0x00,0x00,0x0b,0x00,0x00,0x00, +0x1b,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x4f,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0x09,0x00,0x00,0x00,0x47,0x00,0x04,0x00, +0x53,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x04,0x00,0x00,0x00, +0x47,0x00,0x04,0x00,0x60,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0x06,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x62,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0x07,0x00,0x00,0x00,0x47,0x00,0x04,0x00, +0x6c,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x03,0x00,0x00,0x00, +0x47,0x00,0x04,0x00,0xa7,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0xb9,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0x05,0x00,0x00,0x00,0x47,0x00,0x04,0x00, +0xbc,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x08,0x00,0x00,0x00, +0x47,0x00,0x04,0x00,0x02,0x01,0x00,0x00,0x06,0x00,0x00,0x00, +0x10,0x00,0x00,0x00,0x48,0x00,0x04,0x00,0x03,0x01,0x00,0x00, +0x00,0x00,0x00,0x00,0x05,0x00,0x00,0x00,0x48,0x00,0x04,0x00, +0x03,0x01,0x00,0x00,0x00,0x00,0x00,0x00,0x18,0x00,0x00,0x00, +0x48,0x00,0x05,0x00,0x03,0x01,0x00,0x00,0x00,0x00,0x00,0x00, +0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x48,0x00,0x05,0x00, +0x03,0x01,0x00,0x00,0x00,0x00,0x00,0x00,0x07,0x00,0x00,0x00, +0x08,0x00,0x00,0x00,0x47,0x00,0x03,0x00,0x03,0x01,0x00,0x00, +0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x05,0x01,0x00,0x00, +0x22,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00, +0x05,0x01,0x00,0x00,0x21,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0x47,0x00,0x04,0x00,0x3b,0x01,0x00,0x00,0x01,0x00,0x00,0x00, +0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x3c,0x01,0x00,0x00, +0x0b,0x00,0x00,0x00,0x19,0x00,0x00,0x00,0x47,0x00,0x04,0x00, +0x68,0x01,0x00,0x00,0x06,0x00,0x00,0x00,0x20,0x00,0x00,0x00, +0x48,0x00,0x04,0x00,0x69,0x01,0x00,0x00,0x00,0x00,0x00,0x00, +0x05,0x00,0x00,0x00,0x48,0x00,0x04,0x00,0x69,0x01,0x00,0x00, +0x00,0x00,0x00,0x00,0x18,0x00,0x00,0x00,0x48,0x00,0x05,0x00, +0x69,0x01,0x00,0x00,0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00, +0x00,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x69,0x01,0x00,0x00, +0x00,0x00,0x00,0x00,0x07,0x00,0x00,0x00,0x10,0x00,0x00,0x00, +0x47,0x00,0x03,0x00,0x69,0x01,0x00,0x00,0x02,0x00,0x00,0x00, +0x47,0x00,0x04,0x00,0x6b,0x01,0x00,0x00,0x22,0x00,0x00,0x00, +0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x6b,0x01,0x00,0x00, +0x21,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x47,0x00,0x04,0x00, +0x74,0x02,0x00,0x00,0x0b,0x00,0x00,0x00,0x18,0x00,0x00,0x00, +0x47,0x00,0x04,0x00,0xba,0x02,0x00,0x00,0x06,0x00,0x00,0x00, +0x04,0x00,0x00,0x00,0x48,0x00,0x04,0x00,0xbb,0x02,0x00,0x00, +0x00,0x00,0x00,0x00,0x19,0x00,0x00,0x00,0x48,0x00,0x05,0x00, +0xbb,0x02,0x00,0x00,0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00, +0x00,0x00,0x00,0x00,0x47,0x00,0x03,0x00,0xbb,0x02,0x00,0x00, +0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0xbd,0x02,0x00,0x00, +0x22,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00, +0xbd,0x02,0x00,0x00,0x21,0x00,0x00,0x00,0x02,0x00,0x00,0x00, +0x13,0x00,0x02,0x00,0x02,0x00,0x00,0x00,0x21,0x00,0x03,0x00, +0x03,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x15,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x20,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0x17,0x00,0x04,0x00,0x09,0x00,0x00,0x00,0x06,0x00,0x00,0x00, +0x03,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x0a,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0x09,0x00,0x00,0x00,0x3b,0x00,0x04,0x00, +0x0a,0x00,0x00,0x00,0x0b,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, +0x02,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x0d,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x1e,0x00,0x10,0x00, +0x10,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00, +0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00, +0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00, +0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00, +0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00, +0x20,0x00,0x04,0x00,0x11,0x00,0x00,0x00,0x09,0x00,0x00,0x00, +0x10,0x00,0x00,0x00,0x3b,0x00,0x04,0x00,0x11,0x00,0x00,0x00, +0x12,0x00,0x00,0x00,0x09,0x00,0x00,0x00,0x15,0x00,0x04,0x00, +0x13,0x00,0x00,0x00,0x20,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00,0x14,0x00,0x00,0x00, +0x08,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x15,0x00,0x00,0x00, +0x09,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x13,0x00,0x00,0x00,0x21,0x00,0x00,0x00,0x0a,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00,0x27,0x00,0x00,0x00, +0x09,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00, +0x2d,0x00,0x00,0x00,0x07,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x13,0x00,0x00,0x00,0x34,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0x32,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x37,0x00,0x00,0x00, +0x40,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x39,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x3b,0x00,0x04,0x00, +0x0a,0x00,0x00,0x00,0x3d,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x3e,0x00,0x00,0x00, +0x00,0x00,0x00,0x00,0x3b,0x00,0x04,0x00,0x0a,0x00,0x00,0x00, +0x4c,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x32,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x4f,0x00,0x00,0x00,0x20,0x00,0x00,0x00, +0x32,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x53,0x00,0x00,0x00, +0x20,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, +0x54,0x00,0x00,0x00,0x86,0x00,0x00,0x00,0x37,0x00,0x00,0x00, +0x53,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, +0x58,0x00,0x00,0x00,0x86,0x00,0x00,0x00,0x37,0x00,0x00,0x00, +0x53,0x00,0x00,0x00,0x32,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x60,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x34,0x00,0x06,0x00, +0x06,0x00,0x00,0x00,0x61,0x00,0x00,0x00,0x86,0x00,0x00,0x00, +0x53,0x00,0x00,0x00,0x60,0x00,0x00,0x00,0x32,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x62,0x00,0x00,0x00,0x04,0x00,0x00,0x00, +0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x63,0x00,0x00,0x00, +0x86,0x00,0x00,0x00,0x61,0x00,0x00,0x00,0x62,0x00,0x00,0x00, +0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x67,0x00,0x00,0x00, +0x86,0x00,0x00,0x00,0x61,0x00,0x00,0x00,0x62,0x00,0x00,0x00, +0x32,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x6c,0x00,0x00,0x00, +0x10,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x6d,0x00,0x00,0x00,0x08,0x00,0x00,0x00,0x34,0x00,0x06,0x00, +0x06,0x00,0x00,0x00,0x6e,0x00,0x00,0x00,0x86,0x00,0x00,0x00, +0x6c,0x00,0x00,0x00,0x6d,0x00,0x00,0x00,0x34,0x00,0x06,0x00, +0x06,0x00,0x00,0x00,0x73,0x00,0x00,0x00,0x86,0x00,0x00,0x00, +0x6c,0x00,0x00,0x00,0x6d,0x00,0x00,0x00,0x34,0x00,0x06,0x00, +0x06,0x00,0x00,0x00,0x78,0x00,0x00,0x00,0x86,0x00,0x00,0x00, +0x6c,0x00,0x00,0x00,0x6d,0x00,0x00,0x00,0x34,0x00,0x06,0x00, +0x06,0x00,0x00,0x00,0x7d,0x00,0x00,0x00,0x86,0x00,0x00,0x00, +0x6c,0x00,0x00,0x00,0x6d,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x13,0x00,0x00,0x00,0x81,0x00,0x00,0x00,0x06,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00,0x86,0x00,0x00,0x00, +0x02,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00, +0x91,0x00,0x00,0x00,0x0b,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x13,0x00,0x00,0x00,0x97,0x00,0x00,0x00,0x03,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00,0xa2,0x00,0x00,0x00, +0x0c,0x00,0x00,0x00,0x32,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0xa7,0x00,0x00,0x00,0x40,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x13,0x00,0x00,0x00,0xa9,0x00,0x00,0x00,0x04,0x00,0x00,0x00, +0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0xb8,0x00,0x00,0x00, +0x84,0x00,0x00,0x00,0x60,0x00,0x00,0x00,0x62,0x00,0x00,0x00, +0x32,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0xb9,0x00,0x00,0x00, +0x20,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, +0xba,0x00,0x00,0x00,0x84,0x00,0x00,0x00,0x53,0x00,0x00,0x00, +0xb9,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, +0xbb,0x00,0x00,0x00,0x84,0x00,0x00,0x00,0x4f,0x00,0x00,0x00, +0x62,0x00,0x00,0x00,0x32,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0xbc,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x34,0x00,0x06,0x00, +0x06,0x00,0x00,0x00,0xbd,0x00,0x00,0x00,0x84,0x00,0x00,0x00, +0xbb,0x00,0x00,0x00,0xbc,0x00,0x00,0x00,0x34,0x00,0x06,0x00, +0x06,0x00,0x00,0x00,0xbe,0x00,0x00,0x00,0x84,0x00,0x00,0x00, +0xbd,0x00,0x00,0x00,0x60,0x00,0x00,0x00,0x34,0x00,0x06,0x00, +0x06,0x00,0x00,0x00,0xbf,0x00,0x00,0x00,0x86,0x00,0x00,0x00, +0xba,0x00,0x00,0x00,0xbe,0x00,0x00,0x00,0x34,0x00,0x06,0x00, +0x06,0x00,0x00,0x00,0xc0,0x00,0x00,0x00,0x84,0x00,0x00,0x00, +0xb8,0x00,0x00,0x00,0xbf,0x00,0x00,0x00,0x34,0x00,0x06,0x00, +0x06,0x00,0x00,0x00,0xc1,0x00,0x00,0x00,0x84,0x00,0x00,0x00, +0xc0,0x00,0x00,0x00,0xbc,0x00,0x00,0x00,0x14,0x00,0x02,0x00, +0xc2,0x00,0x00,0x00,0x16,0x00,0x03,0x00,0xc4,0x00,0x00,0x00, +0x20,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, +0xc5,0x00,0x00,0x00,0x84,0x00,0x00,0x00,0x60,0x00,0x00,0x00, +0x62,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, +0xc6,0x00,0x00,0x00,0x84,0x00,0x00,0x00,0xc5,0x00,0x00,0x00, +0xbf,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, +0xc7,0x00,0x00,0x00,0x84,0x00,0x00,0x00,0xc6,0x00,0x00,0x00, +0xbc,0x00,0x00,0x00,0x1c,0x00,0x04,0x00,0xc8,0x00,0x00,0x00, +0xc4,0x00,0x00,0x00,0xc7,0x00,0x00,0x00,0x20,0x00,0x04,0x00, +0xc9,0x00,0x00,0x00,0x07,0x00,0x00,0x00,0xc8,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0xc4,0x00,0x00,0x00,0xcc,0x00,0x00,0x00, +0x00,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0xcd,0x00,0x00,0x00, +0x07,0x00,0x00,0x00,0xc4,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x13,0x00,0x00,0x00,0xd0,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0xf4,0x00,0x00,0x00, +0x80,0x00,0x00,0x00,0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00, +0x16,0x00,0x03,0x00,0xf9,0x00,0x00,0x00,0x10,0x00,0x00,0x00, +0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0xfa,0x00,0x00,0x00, +0x80,0x00,0x00,0x00,0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00, +0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0xfb,0x00,0x00,0x00, +0x84,0x00,0x00,0x00,0x37,0x00,0x00,0x00,0xfa,0x00,0x00,0x00, +0x1c,0x00,0x04,0x00,0xfc,0x00,0x00,0x00,0xf9,0x00,0x00,0x00, +0xfb,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0xfd,0x00,0x00,0x00, +0x04,0x00,0x00,0x00,0xfc,0x00,0x00,0x00,0x3b,0x00,0x04,0x00, +0xfd,0x00,0x00,0x00,0xfe,0x00,0x00,0x00,0x04,0x00,0x00,0x00, +0x17,0x00,0x04,0x00,0x00,0x01,0x00,0x00,0xf9,0x00,0x00,0x00, +0x04,0x00,0x00,0x00,0x18,0x00,0x04,0x00,0x01,0x01,0x00,0x00, +0x00,0x01,0x00,0x00,0x02,0x00,0x00,0x00,0x1d,0x00,0x03,0x00, +0x02,0x01,0x00,0x00,0x01,0x01,0x00,0x00,0x1e,0x00,0x03,0x00, +0x03,0x01,0x00,0x00,0x02,0x01,0x00,0x00,0x20,0x00,0x04,0x00, +0x04,0x01,0x00,0x00,0x0c,0x00,0x00,0x00,0x03,0x01,0x00,0x00, +0x3b,0x00,0x04,0x00,0x04,0x01,0x00,0x00,0x05,0x01,0x00,0x00, +0x0c,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x07,0x01,0x00,0x00, +0x0c,0x00,0x00,0x00,0xf9,0x00,0x00,0x00,0x20,0x00,0x04,0x00, +0x0a,0x01,0x00,0x00,0x04,0x00,0x00,0x00,0xf9,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x19,0x01,0x00,0x00, +0x03,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x20,0x01,0x00,0x00,0x04,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x27,0x01,0x00,0x00,0x05,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x2e,0x01,0x00,0x00, +0x06,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x35,0x01,0x00,0x00,0x07,0x00,0x00,0x00,0x32,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x3b,0x01,0x00,0x00,0x01,0x00,0x00,0x00, +0x33,0x00,0x06,0x00,0x09,0x00,0x00,0x00,0x3c,0x01,0x00,0x00, +0x3b,0x01,0x00,0x00,0x39,0x00,0x00,0x00,0x39,0x00,0x00,0x00, +0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x3d,0x01,0x00,0x00, +0x51,0x00,0x00,0x00,0x3c,0x01,0x00,0x00,0x00,0x00,0x00,0x00, +0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x3e,0x01,0x00,0x00, +0x84,0x00,0x00,0x00,0x3d,0x01,0x00,0x00,0x6d,0x00,0x00,0x00, +0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x3f,0x01,0x00,0x00, +0x86,0x00,0x00,0x00,0x3e,0x01,0x00,0x00,0x6c,0x00,0x00,0x00, +0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x5a,0x01,0x00,0x00, +0x80,0x00,0x00,0x00,0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00, +0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x5f,0x01,0x00,0x00, +0x80,0x00,0x00,0x00,0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00, +0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x60,0x01,0x00,0x00, +0x84,0x00,0x00,0x00,0xa7,0x00,0x00,0x00,0x5f,0x01,0x00,0x00, +0x1c,0x00,0x04,0x00,0x61,0x01,0x00,0x00,0xf9,0x00,0x00,0x00, +0x60,0x01,0x00,0x00,0x20,0x00,0x04,0x00,0x62,0x01,0x00,0x00, +0x04,0x00,0x00,0x00,0x61,0x01,0x00,0x00,0x3b,0x00,0x04,0x00, +0x62,0x01,0x00,0x00,0x63,0x01,0x00,0x00,0x04,0x00,0x00,0x00, +0x17,0x00,0x04,0x00,0x66,0x01,0x00,0x00,0xc4,0x00,0x00,0x00, +0x04,0x00,0x00,0x00,0x18,0x00,0x04,0x00,0x67,0x01,0x00,0x00, +0x66,0x01,0x00,0x00,0x02,0x00,0x00,0x00,0x1d,0x00,0x03,0x00, +0x68,0x01,0x00,0x00,0x67,0x01,0x00,0x00,0x1e,0x00,0x03,0x00, +0x69,0x01,0x00,0x00,0x68,0x01,0x00,0x00,0x20,0x00,0x04,0x00, +0x6a,0x01,0x00,0x00,0x0c,0x00,0x00,0x00,0x69,0x01,0x00,0x00, +0x3b,0x00,0x04,0x00,0x6a,0x01,0x00,0x00,0x6b,0x01,0x00,0x00, +0x0c,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x6d,0x01,0x00,0x00, +0x0c,0x00,0x00,0x00,0xc4,0x00,0x00,0x00,0x34,0x00,0x06,0x00, +0x06,0x00,0x00,0x00,0xa3,0x01,0x00,0x00,0x51,0x00,0x00,0x00, +0x3c,0x01,0x00,0x00,0x00,0x00,0x00,0x00,0x34,0x00,0x06,0x00, +0x06,0x00,0x00,0x00,0xa4,0x01,0x00,0x00,0x84,0x00,0x00,0x00, +0xa3,0x01,0x00,0x00,0x6d,0x00,0x00,0x00,0x34,0x00,0x06,0x00, +0x06,0x00,0x00,0x00,0xa5,0x01,0x00,0x00,0x86,0x00,0x00,0x00, +0xa4,0x01,0x00,0x00,0x6c,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0xa8,0x01,0x00,0x00,0x08,0x01,0x00,0x00, +0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0xa9,0x01,0x00,0x00, +0x86,0x00,0x00,0x00,0x6c,0x00,0x00,0x00,0x6d,0x00,0x00,0x00, +0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0xac,0x01,0x00,0x00, +0x86,0x00,0x00,0x00,0x6c,0x00,0x00,0x00,0x6d,0x00,0x00,0x00, +0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0xc7,0x01,0x00,0x00, +0x84,0x00,0x00,0x00,0x60,0x00,0x00,0x00,0x62,0x00,0x00,0x00, +0x1c,0x00,0x04,0x00,0xc8,0x01,0x00,0x00,0xf9,0x00,0x00,0x00, +0xc7,0x01,0x00,0x00,0x20,0x00,0x04,0x00,0xc9,0x01,0x00,0x00, +0x07,0x00,0x00,0x00,0xc8,0x01,0x00,0x00,0x34,0x00,0x06,0x00, +0x06,0x00,0x00,0x00,0xd9,0x01,0x00,0x00,0x80,0x00,0x00,0x00, +0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x20,0x00,0x04,0x00, +0xdf,0x01,0x00,0x00,0x07,0x00,0x00,0x00,0xf9,0x00,0x00,0x00, +0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0xf5,0x01,0x00,0x00, +0x84,0x00,0x00,0x00,0xbf,0x00,0x00,0x00,0xbc,0x00,0x00,0x00, +0x1c,0x00,0x04,0x00,0xf6,0x01,0x00,0x00,0xf9,0x00,0x00,0x00, +0xf5,0x01,0x00,0x00,0x20,0x00,0x04,0x00,0xf7,0x01,0x00,0x00, +0x07,0x00,0x00,0x00,0xf6,0x01,0x00,0x00,0x34,0x00,0x06,0x00, +0x06,0x00,0x00,0x00,0x00,0x02,0x00,0x00,0x86,0x00,0x00,0x00, +0xb9,0x00,0x00,0x00,0xbf,0x00,0x00,0x00,0x34,0x00,0x06,0x00, +0x06,0x00,0x00,0x00,0x08,0x02,0x00,0x00,0x80,0x00,0x00,0x00, +0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x34,0x00,0x06,0x00, +0x06,0x00,0x00,0x00,0x37,0x02,0x00,0x00,0x84,0x00,0x00,0x00, +0x60,0x00,0x00,0x00,0x62,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x13,0x00,0x00,0x00,0x6c,0x02,0x00,0x00,0x0d,0x00,0x00,0x00, +0x3b,0x00,0x04,0x00,0x0a,0x00,0x00,0x00,0x74,0x02,0x00,0x00, +0x01,0x00,0x00,0x00,0x1d,0x00,0x03,0x00,0xba,0x02,0x00,0x00, +0xc4,0x00,0x00,0x00,0x1e,0x00,0x03,0x00,0xbb,0x02,0x00,0x00, +0xba,0x02,0x00,0x00,0x20,0x00,0x04,0x00,0xbc,0x02,0x00,0x00, +0x0c,0x00,0x00,0x00,0xbb,0x02,0x00,0x00,0x3b,0x00,0x04,0x00, +0xbc,0x02,0x00,0x00,0xbd,0x02,0x00,0x00,0x0c,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00,0xc2,0x02,0x00,0x00, +0x05,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, +0xcf,0x02,0x00,0x00,0x84,0x00,0x00,0x00,0x60,0x00,0x00,0x00, +0x62,0x00,0x00,0x00,0x36,0x00,0x05,0x00,0x02,0x00,0x00,0x00, +0x04,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x03,0x00,0x00,0x00, +0xf8,0x00,0x02,0x00,0x05,0x00,0x00,0x00,0x3b,0x00,0x04,0x00, +0xc9,0x00,0x00,0x00,0xca,0x00,0x00,0x00,0x07,0x00,0x00,0x00, +0x3b,0x00,0x04,0x00,0xc9,0x01,0x00,0x00,0xca,0x01,0x00,0x00, +0x07,0x00,0x00,0x00,0x3b,0x00,0x04,0x00,0xf7,0x01,0x00,0x00, +0xf8,0x01,0x00,0x00,0x07,0x00,0x00,0x00,0x41,0x00,0x05,0x00, +0x0d,0x00,0x00,0x00,0x0e,0x00,0x00,0x00,0x0b,0x00,0x00,0x00, +0x0c,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x0f,0x00,0x00,0x00,0x0e,0x00,0x00,0x00,0x41,0x00,0x05,0x00, +0x15,0x00,0x00,0x00,0x16,0x00,0x00,0x00,0x12,0x00,0x00,0x00, +0x14,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x17,0x00,0x00,0x00,0x16,0x00,0x00,0x00,0x86,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x18,0x00,0x00,0x00,0x0f,0x00,0x00,0x00, +0x17,0x00,0x00,0x00,0x89,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x1e,0x00,0x00,0x00,0x0f,0x00,0x00,0x00,0x17,0x00,0x00,0x00, +0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00,0x22,0x00,0x00,0x00, +0x12,0x00,0x00,0x00,0x21,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x22,0x00,0x00,0x00, +0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x24,0x00,0x00,0x00, +0x18,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x41,0x00,0x05,0x00, +0x15,0x00,0x00,0x00,0x28,0x00,0x00,0x00,0x12,0x00,0x00,0x00, +0x27,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x29,0x00,0x00,0x00,0x28,0x00,0x00,0x00,0x86,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x2a,0x00,0x00,0x00,0x1e,0x00,0x00,0x00, +0x29,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00, +0x2e,0x00,0x00,0x00,0x12,0x00,0x00,0x00,0x2d,0x00,0x00,0x00, +0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x2f,0x00,0x00,0x00, +0x2e,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x30,0x00,0x00,0x00,0x24,0x00,0x00,0x00,0x2f,0x00,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x32,0x00,0x00,0x00, +0x30,0x00,0x00,0x00,0x2a,0x00,0x00,0x00,0x41,0x00,0x05,0x00, +0x15,0x00,0x00,0x00,0x35,0x00,0x00,0x00,0x12,0x00,0x00,0x00, +0x34,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x36,0x00,0x00,0x00,0x35,0x00,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x38,0x00,0x00,0x00,0x36,0x00,0x00,0x00, +0x37,0x00,0x00,0x00,0x82,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x3a,0x00,0x00,0x00,0x38,0x00,0x00,0x00,0x39,0x00,0x00,0x00, +0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x3b,0x00,0x00,0x00, +0x3a,0x00,0x00,0x00,0x37,0x00,0x00,0x00,0x41,0x00,0x05,0x00, +0x0d,0x00,0x00,0x00,0x3f,0x00,0x00,0x00,0x3d,0x00,0x00,0x00, +0x3e,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x40,0x00,0x00,0x00,0x3f,0x00,0x00,0x00,0x89,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x42,0x00,0x00,0x00,0x40,0x00,0x00,0x00, +0x3b,0x00,0x00,0x00,0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x47,0x00,0x00,0x00,0x40,0x00,0x00,0x00,0x3b,0x00,0x00,0x00, +0x41,0x00,0x05,0x00,0x0d,0x00,0x00,0x00,0x49,0x00,0x00,0x00, +0x3d,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x4a,0x00,0x00,0x00,0x49,0x00,0x00,0x00, +0x41,0x00,0x05,0x00,0x0d,0x00,0x00,0x00,0x4d,0x00,0x00,0x00, +0x4c,0x00,0x00,0x00,0x3e,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x4e,0x00,0x00,0x00,0x4d,0x00,0x00,0x00, +0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x50,0x00,0x00,0x00, +0x4e,0x00,0x00,0x00,0x4f,0x00,0x00,0x00,0x89,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x55,0x00,0x00,0x00,0x50,0x00,0x00,0x00, +0x54,0x00,0x00,0x00,0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x59,0x00,0x00,0x00,0x50,0x00,0x00,0x00,0x58,0x00,0x00,0x00, +0x89,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x5d,0x00,0x00,0x00, +0x4e,0x00,0x00,0x00,0x4f,0x00,0x00,0x00,0x89,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x64,0x00,0x00,0x00,0x5d,0x00,0x00,0x00, +0x63,0x00,0x00,0x00,0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x68,0x00,0x00,0x00,0x5d,0x00,0x00,0x00,0x67,0x00,0x00,0x00, +0x89,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x6f,0x00,0x00,0x00, +0x4e,0x00,0x00,0x00,0x6e,0x00,0x00,0x00,0x86,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x74,0x00,0x00,0x00,0x4e,0x00,0x00,0x00, +0x73,0x00,0x00,0x00,0x89,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x79,0x00,0x00,0x00,0x4e,0x00,0x00,0x00,0x78,0x00,0x00,0x00, +0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x7e,0x00,0x00,0x00, +0x4e,0x00,0x00,0x00,0x7d,0x00,0x00,0x00,0x41,0x00,0x05,0x00, +0x15,0x00,0x00,0x00,0x82,0x00,0x00,0x00,0x12,0x00,0x00,0x00, +0x81,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x83,0x00,0x00,0x00,0x82,0x00,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x84,0x00,0x00,0x00,0x47,0x00,0x00,0x00, +0x83,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00, +0x87,0x00,0x00,0x00,0x12,0x00,0x00,0x00,0x86,0x00,0x00,0x00, +0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x88,0x00,0x00,0x00, +0x87,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x8a,0x00,0x00,0x00,0x47,0x00,0x00,0x00,0x39,0x00,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x8d,0x00,0x00,0x00, +0x8a,0x00,0x00,0x00,0x83,0x00,0x00,0x00,0x0c,0x00,0x07,0x00, +0x06,0x00,0x00,0x00,0x8e,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0x26,0x00,0x00,0x00,0x88,0x00,0x00,0x00,0x8d,0x00,0x00,0x00, +0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00,0x92,0x00,0x00,0x00, +0x12,0x00,0x00,0x00,0x91,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x93,0x00,0x00,0x00,0x92,0x00,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x94,0x00,0x00,0x00, +0x32,0x00,0x00,0x00,0x93,0x00,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x96,0x00,0x00,0x00,0x42,0x00,0x00,0x00, +0x37,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00, +0x98,0x00,0x00,0x00,0x12,0x00,0x00,0x00,0x97,0x00,0x00,0x00, +0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x99,0x00,0x00,0x00, +0x98,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x9a,0x00,0x00,0x00,0x96,0x00,0x00,0x00,0x99,0x00,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x9b,0x00,0x00,0x00, +0x94,0x00,0x00,0x00,0x9a,0x00,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x9d,0x00,0x00,0x00,0x9b,0x00,0x00,0x00, +0x84,0x00,0x00,0x00,0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x9e,0x00,0x00,0x00,0x9d,0x00,0x00,0x00,0x6d,0x00,0x00,0x00, +0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00,0xa3,0x00,0x00,0x00, +0x12,0x00,0x00,0x00,0xa2,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0xa4,0x00,0x00,0x00,0xa3,0x00,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xa5,0x00,0x00,0x00, +0x0f,0x00,0x00,0x00,0xa4,0x00,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xa8,0x00,0x00,0x00,0x4a,0x00,0x00,0x00, +0xa7,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00, +0xaa,0x00,0x00,0x00,0x12,0x00,0x00,0x00,0xa9,0x00,0x00,0x00, +0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0xab,0x00,0x00,0x00, +0xaa,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xac,0x00,0x00,0x00,0xa8,0x00,0x00,0x00,0xab,0x00,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xad,0x00,0x00,0x00, +0xa5,0x00,0x00,0x00,0xac,0x00,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xaf,0x00,0x00,0x00,0xad,0x00,0x00,0x00, +0x84,0x00,0x00,0x00,0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xb0,0x00,0x00,0x00,0xaf,0x00,0x00,0x00,0x6d,0x00,0x00,0x00, +0xf9,0x00,0x02,0x00,0xb2,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, +0xb2,0x00,0x00,0x00,0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00, +0xe1,0x02,0x00,0x00,0x3e,0x00,0x00,0x00,0x05,0x00,0x00,0x00, +0xd1,0x00,0x00,0x00,0xb3,0x00,0x00,0x00,0xb0,0x00,0x05,0x00, +0xc2,0x00,0x00,0x00,0xc3,0x00,0x00,0x00,0xe1,0x02,0x00,0x00, +0xc1,0x00,0x00,0x00,0xf6,0x00,0x04,0x00,0xb4,0x00,0x00,0x00, +0xb3,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, +0xc3,0x00,0x00,0x00,0xb3,0x00,0x00,0x00,0xb4,0x00,0x00,0x00, +0xf8,0x00,0x02,0x00,0xb3,0x00,0x00,0x00,0x41,0x00,0x05,0x00, +0xcd,0x00,0x00,0x00,0xce,0x00,0x00,0x00,0xca,0x00,0x00,0x00, +0xe1,0x02,0x00,0x00,0x3e,0x00,0x03,0x00,0xce,0x00,0x00,0x00, +0xcc,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xd1,0x00,0x00,0x00,0xe1,0x02,0x00,0x00,0xd0,0x00,0x00,0x00, +0xf9,0x00,0x02,0x00,0xb2,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, +0xb4,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0xd4,0x00,0x00,0x00, +0xf8,0x00,0x02,0x00,0xd4,0x00,0x00,0x00,0xf5,0x00,0x07,0x00, +0x06,0x00,0x00,0x00,0xfa,0x02,0x00,0x00,0xb0,0x00,0x00,0x00, +0xb4,0x00,0x00,0x00,0xae,0x01,0x00,0x00,0xd7,0x00,0x00,0x00, +0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0xf6,0x02,0x00,0x00, +0x9e,0x00,0x00,0x00,0xb4,0x00,0x00,0x00,0xab,0x01,0x00,0x00, +0xd7,0x00,0x00,0x00,0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00, +0xe2,0x02,0x00,0x00,0x84,0x00,0x00,0x00,0xb4,0x00,0x00,0x00, +0x5c,0x02,0x00,0x00,0xd7,0x00,0x00,0x00,0xb0,0x00,0x05,0x00, +0xc2,0x00,0x00,0x00,0xdb,0x00,0x00,0x00,0xe2,0x02,0x00,0x00, +0x8e,0x00,0x00,0x00,0xf6,0x00,0x04,0x00,0xd6,0x00,0x00,0x00, +0xd7,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, +0xdb,0x00,0x00,0x00,0xd5,0x00,0x00,0x00,0xd6,0x00,0x00,0x00, +0xf8,0x00,0x02,0x00,0xd5,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, +0xdd,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0xdd,0x00,0x00,0x00, +0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0xf2,0x02,0x00,0x00, +0x3e,0x00,0x00,0x00,0xd5,0x00,0x00,0x00,0x41,0x01,0x00,0x00, +0xde,0x00,0x00,0x00,0xb0,0x00,0x05,0x00,0xc2,0x00,0x00,0x00, +0xe3,0x00,0x00,0x00,0xf2,0x02,0x00,0x00,0x37,0x00,0x00,0x00, +0xf6,0x00,0x04,0x00,0xdf,0x00,0x00,0x00,0xde,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0xe3,0x00,0x00,0x00, +0xde,0x00,0x00,0x00,0xdf,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, +0xde,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xe8,0x00,0x00,0x00,0x74,0x00,0x00,0x00,0xf2,0x02,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xeb,0x00,0x00,0x00, +0xe8,0x00,0x00,0x00,0x99,0x00,0x00,0x00,0x86,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xec,0x00,0x00,0x00,0xeb,0x00,0x00,0x00, +0x6d,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xed,0x00,0x00,0x00,0xf6,0x02,0x00,0x00,0xec,0x00,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xef,0x00,0x00,0x00, +0xed,0x00,0x00,0x00,0x6f,0x00,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xf5,0x00,0x00,0x00,0xe8,0x00,0x00,0x00, +0xf4,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xf7,0x00,0x00,0x00,0x6f,0x00,0x00,0x00,0x6d,0x00,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xf8,0x00,0x00,0x00, +0xf5,0x00,0x00,0x00,0xf7,0x00,0x00,0x00,0x41,0x00,0x08,0x00, +0x07,0x01,0x00,0x00,0x08,0x01,0x00,0x00,0x05,0x01,0x00,0x00, +0x34,0x00,0x00,0x00,0xef,0x00,0x00,0x00,0x34,0x00,0x00,0x00, +0x3e,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0xf9,0x00,0x00,0x00, +0x09,0x01,0x00,0x00,0x08,0x01,0x00,0x00,0x41,0x00,0x05,0x00, +0x0a,0x01,0x00,0x00,0x0b,0x01,0x00,0x00,0xfe,0x00,0x00,0x00, +0xf8,0x00,0x00,0x00,0x3e,0x00,0x03,0x00,0x0b,0x01,0x00,0x00, +0x09,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x0d,0x01,0x00,0x00,0xf8,0x00,0x00,0x00,0x39,0x00,0x00,0x00, +0x41,0x00,0x08,0x00,0x07,0x01,0x00,0x00,0x0f,0x01,0x00,0x00, +0x05,0x01,0x00,0x00,0x34,0x00,0x00,0x00,0xef,0x00,0x00,0x00, +0x34,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0xf9,0x00,0x00,0x00,0x10,0x01,0x00,0x00,0x0f,0x01,0x00,0x00, +0x41,0x00,0x05,0x00,0x0a,0x01,0x00,0x00,0x11,0x01,0x00,0x00, +0xfe,0x00,0x00,0x00,0x0d,0x01,0x00,0x00,0x3e,0x00,0x03,0x00, +0x11,0x01,0x00,0x00,0x10,0x01,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x13,0x01,0x00,0x00,0xf8,0x00,0x00,0x00, +0x0c,0x00,0x00,0x00,0x41,0x00,0x08,0x00,0x07,0x01,0x00,0x00, +0x15,0x01,0x00,0x00,0x05,0x01,0x00,0x00,0x34,0x00,0x00,0x00, +0xef,0x00,0x00,0x00,0x34,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, +0x3d,0x00,0x04,0x00,0xf9,0x00,0x00,0x00,0x16,0x01,0x00,0x00, +0x15,0x01,0x00,0x00,0x41,0x00,0x05,0x00,0x0a,0x01,0x00,0x00, +0x17,0x01,0x00,0x00,0xfe,0x00,0x00,0x00,0x13,0x01,0x00,0x00, +0x3e,0x00,0x03,0x00,0x17,0x01,0x00,0x00,0x16,0x01,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x1a,0x01,0x00,0x00, +0xf8,0x00,0x00,0x00,0x19,0x01,0x00,0x00,0x41,0x00,0x08,0x00, +0x07,0x01,0x00,0x00,0x1c,0x01,0x00,0x00,0x05,0x01,0x00,0x00, +0x34,0x00,0x00,0x00,0xef,0x00,0x00,0x00,0x34,0x00,0x00,0x00, +0x19,0x01,0x00,0x00,0x3d,0x00,0x04,0x00,0xf9,0x00,0x00,0x00, +0x1d,0x01,0x00,0x00,0x1c,0x01,0x00,0x00,0x41,0x00,0x05,0x00, +0x0a,0x01,0x00,0x00,0x1e,0x01,0x00,0x00,0xfe,0x00,0x00,0x00, +0x1a,0x01,0x00,0x00,0x3e,0x00,0x03,0x00,0x1e,0x01,0x00,0x00, +0x1d,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x21,0x01,0x00,0x00,0xf8,0x00,0x00,0x00,0x20,0x01,0x00,0x00, +0x41,0x00,0x08,0x00,0x07,0x01,0x00,0x00,0x23,0x01,0x00,0x00, +0x05,0x01,0x00,0x00,0x34,0x00,0x00,0x00,0xef,0x00,0x00,0x00, +0xd0,0x00,0x00,0x00,0x3e,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0xf9,0x00,0x00,0x00,0x24,0x01,0x00,0x00,0x23,0x01,0x00,0x00, +0x41,0x00,0x05,0x00,0x0a,0x01,0x00,0x00,0x25,0x01,0x00,0x00, +0xfe,0x00,0x00,0x00,0x21,0x01,0x00,0x00,0x3e,0x00,0x03,0x00, +0x25,0x01,0x00,0x00,0x24,0x01,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x28,0x01,0x00,0x00,0xf8,0x00,0x00,0x00, +0x27,0x01,0x00,0x00,0x41,0x00,0x08,0x00,0x07,0x01,0x00,0x00, +0x2a,0x01,0x00,0x00,0x05,0x01,0x00,0x00,0x34,0x00,0x00,0x00, +0xef,0x00,0x00,0x00,0xd0,0x00,0x00,0x00,0x39,0x00,0x00,0x00, +0x3d,0x00,0x04,0x00,0xf9,0x00,0x00,0x00,0x2b,0x01,0x00,0x00, +0x2a,0x01,0x00,0x00,0x41,0x00,0x05,0x00,0x0a,0x01,0x00,0x00, +0x2c,0x01,0x00,0x00,0xfe,0x00,0x00,0x00,0x28,0x01,0x00,0x00, +0x3e,0x00,0x03,0x00,0x2c,0x01,0x00,0x00,0x2b,0x01,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x2f,0x01,0x00,0x00, +0xf8,0x00,0x00,0x00,0x2e,0x01,0x00,0x00,0x41,0x00,0x08,0x00, +0x07,0x01,0x00,0x00,0x31,0x01,0x00,0x00,0x05,0x01,0x00,0x00, +0x34,0x00,0x00,0x00,0xef,0x00,0x00,0x00,0xd0,0x00,0x00,0x00, +0x0c,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0xf9,0x00,0x00,0x00, +0x32,0x01,0x00,0x00,0x31,0x01,0x00,0x00,0x41,0x00,0x05,0x00, +0x0a,0x01,0x00,0x00,0x33,0x01,0x00,0x00,0xfe,0x00,0x00,0x00, +0x2f,0x01,0x00,0x00,0x3e,0x00,0x03,0x00,0x33,0x01,0x00,0x00, +0x32,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x36,0x01,0x00,0x00,0xf8,0x00,0x00,0x00,0x35,0x01,0x00,0x00, +0x41,0x00,0x08,0x00,0x07,0x01,0x00,0x00,0x38,0x01,0x00,0x00, +0x05,0x01,0x00,0x00,0x34,0x00,0x00,0x00,0xef,0x00,0x00,0x00, +0xd0,0x00,0x00,0x00,0x19,0x01,0x00,0x00,0x3d,0x00,0x04,0x00, +0xf9,0x00,0x00,0x00,0x39,0x01,0x00,0x00,0x38,0x01,0x00,0x00, +0x41,0x00,0x05,0x00,0x0a,0x01,0x00,0x00,0x3a,0x01,0x00,0x00, +0xfe,0x00,0x00,0x00,0x36,0x01,0x00,0x00,0x3e,0x00,0x03,0x00, +0x3a,0x01,0x00,0x00,0x39,0x01,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x41,0x01,0x00,0x00,0xf2,0x02,0x00,0x00, +0x3f,0x01,0x00,0x00,0xf9,0x00,0x02,0x00,0xdd,0x00,0x00,0x00, +0xf8,0x00,0x02,0x00,0xdf,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, +0x43,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0x43,0x01,0x00,0x00, +0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0xf3,0x02,0x00,0x00, +0x3e,0x00,0x00,0x00,0xdf,0x00,0x00,0x00,0xa7,0x01,0x00,0x00, +0x44,0x01,0x00,0x00,0xb0,0x00,0x05,0x00,0xc2,0x00,0x00,0x00, +0x49,0x01,0x00,0x00,0xf3,0x02,0x00,0x00,0xa7,0x00,0x00,0x00, +0xf6,0x00,0x04,0x00,0x45,0x01,0x00,0x00,0x44,0x01,0x00,0x00, +0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0x49,0x01,0x00,0x00, +0x44,0x01,0x00,0x00,0x45,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, +0x44,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x4e,0x01,0x00,0x00,0x7e,0x00,0x00,0x00,0xf3,0x02,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x51,0x01,0x00,0x00, +0x4e,0x01,0x00,0x00,0xab,0x00,0x00,0x00,0x86,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x52,0x01,0x00,0x00,0x51,0x01,0x00,0x00, +0x6d,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x53,0x01,0x00,0x00,0xfa,0x02,0x00,0x00,0x52,0x01,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x55,0x01,0x00,0x00, +0x53,0x01,0x00,0x00,0x79,0x00,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x5b,0x01,0x00,0x00,0x4e,0x01,0x00,0x00, +0x5a,0x01,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x5d,0x01,0x00,0x00,0x79,0x00,0x00,0x00,0x6d,0x00,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x5e,0x01,0x00,0x00, +0x5b,0x01,0x00,0x00,0x5d,0x01,0x00,0x00,0x41,0x00,0x08,0x00, +0x6d,0x01,0x00,0x00,0x6e,0x01,0x00,0x00,0x6b,0x01,0x00,0x00, +0x34,0x00,0x00,0x00,0x55,0x01,0x00,0x00,0x34,0x00,0x00,0x00, +0x3e,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0xc4,0x00,0x00,0x00, +0x6f,0x01,0x00,0x00,0x6e,0x01,0x00,0x00,0x73,0x00,0x04,0x00, +0xf9,0x00,0x00,0x00,0x70,0x01,0x00,0x00,0x6f,0x01,0x00,0x00, +0x41,0x00,0x05,0x00,0x0a,0x01,0x00,0x00,0x71,0x01,0x00,0x00, +0x63,0x01,0x00,0x00,0x5e,0x01,0x00,0x00,0x3e,0x00,0x03,0x00, +0x71,0x01,0x00,0x00,0x70,0x01,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x73,0x01,0x00,0x00,0x5e,0x01,0x00,0x00, +0x39,0x00,0x00,0x00,0x41,0x00,0x08,0x00,0x6d,0x01,0x00,0x00, +0x75,0x01,0x00,0x00,0x6b,0x01,0x00,0x00,0x34,0x00,0x00,0x00, +0x55,0x01,0x00,0x00,0x34,0x00,0x00,0x00,0x39,0x00,0x00,0x00, +0x3d,0x00,0x04,0x00,0xc4,0x00,0x00,0x00,0x76,0x01,0x00,0x00, +0x75,0x01,0x00,0x00,0x73,0x00,0x04,0x00,0xf9,0x00,0x00,0x00, +0x77,0x01,0x00,0x00,0x76,0x01,0x00,0x00,0x41,0x00,0x05,0x00, +0x0a,0x01,0x00,0x00,0x78,0x01,0x00,0x00,0x63,0x01,0x00,0x00, +0x73,0x01,0x00,0x00,0x3e,0x00,0x03,0x00,0x78,0x01,0x00,0x00, +0x77,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x7a,0x01,0x00,0x00,0x5e,0x01,0x00,0x00,0x0c,0x00,0x00,0x00, +0x41,0x00,0x08,0x00,0x6d,0x01,0x00,0x00,0x7c,0x01,0x00,0x00, +0x6b,0x01,0x00,0x00,0x34,0x00,0x00,0x00,0x55,0x01,0x00,0x00, +0x34,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0xc4,0x00,0x00,0x00,0x7d,0x01,0x00,0x00,0x7c,0x01,0x00,0x00, +0x73,0x00,0x04,0x00,0xf9,0x00,0x00,0x00,0x7e,0x01,0x00,0x00, +0x7d,0x01,0x00,0x00,0x41,0x00,0x05,0x00,0x0a,0x01,0x00,0x00, +0x7f,0x01,0x00,0x00,0x63,0x01,0x00,0x00,0x7a,0x01,0x00,0x00, +0x3e,0x00,0x03,0x00,0x7f,0x01,0x00,0x00,0x7e,0x01,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x81,0x01,0x00,0x00, +0x5e,0x01,0x00,0x00,0x19,0x01,0x00,0x00,0x41,0x00,0x08,0x00, +0x6d,0x01,0x00,0x00,0x83,0x01,0x00,0x00,0x6b,0x01,0x00,0x00, +0x34,0x00,0x00,0x00,0x55,0x01,0x00,0x00,0x34,0x00,0x00,0x00, +0x19,0x01,0x00,0x00,0x3d,0x00,0x04,0x00,0xc4,0x00,0x00,0x00, +0x84,0x01,0x00,0x00,0x83,0x01,0x00,0x00,0x73,0x00,0x04,0x00, +0xf9,0x00,0x00,0x00,0x85,0x01,0x00,0x00,0x84,0x01,0x00,0x00, +0x41,0x00,0x05,0x00,0x0a,0x01,0x00,0x00,0x86,0x01,0x00,0x00, +0x63,0x01,0x00,0x00,0x81,0x01,0x00,0x00,0x3e,0x00,0x03,0x00, +0x86,0x01,0x00,0x00,0x85,0x01,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x88,0x01,0x00,0x00,0x5e,0x01,0x00,0x00, +0x20,0x01,0x00,0x00,0x41,0x00,0x08,0x00,0x6d,0x01,0x00,0x00, +0x8a,0x01,0x00,0x00,0x6b,0x01,0x00,0x00,0x34,0x00,0x00,0x00, +0x55,0x01,0x00,0x00,0xd0,0x00,0x00,0x00,0x3e,0x00,0x00,0x00, +0x3d,0x00,0x04,0x00,0xc4,0x00,0x00,0x00,0x8b,0x01,0x00,0x00, +0x8a,0x01,0x00,0x00,0x73,0x00,0x04,0x00,0xf9,0x00,0x00,0x00, +0x8c,0x01,0x00,0x00,0x8b,0x01,0x00,0x00,0x41,0x00,0x05,0x00, +0x0a,0x01,0x00,0x00,0x8d,0x01,0x00,0x00,0x63,0x01,0x00,0x00, +0x88,0x01,0x00,0x00,0x3e,0x00,0x03,0x00,0x8d,0x01,0x00,0x00, +0x8c,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x8f,0x01,0x00,0x00,0x5e,0x01,0x00,0x00,0x27,0x01,0x00,0x00, +0x41,0x00,0x08,0x00,0x6d,0x01,0x00,0x00,0x91,0x01,0x00,0x00, +0x6b,0x01,0x00,0x00,0x34,0x00,0x00,0x00,0x55,0x01,0x00,0x00, +0xd0,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0xc4,0x00,0x00,0x00,0x92,0x01,0x00,0x00,0x91,0x01,0x00,0x00, +0x73,0x00,0x04,0x00,0xf9,0x00,0x00,0x00,0x93,0x01,0x00,0x00, +0x92,0x01,0x00,0x00,0x41,0x00,0x05,0x00,0x0a,0x01,0x00,0x00, +0x94,0x01,0x00,0x00,0x63,0x01,0x00,0x00,0x8f,0x01,0x00,0x00, +0x3e,0x00,0x03,0x00,0x94,0x01,0x00,0x00,0x93,0x01,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x96,0x01,0x00,0x00, +0x5e,0x01,0x00,0x00,0x2e,0x01,0x00,0x00,0x41,0x00,0x08,0x00, +0x6d,0x01,0x00,0x00,0x98,0x01,0x00,0x00,0x6b,0x01,0x00,0x00, +0x34,0x00,0x00,0x00,0x55,0x01,0x00,0x00,0xd0,0x00,0x00,0x00, +0x0c,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0xc4,0x00,0x00,0x00, +0x99,0x01,0x00,0x00,0x98,0x01,0x00,0x00,0x73,0x00,0x04,0x00, +0xf9,0x00,0x00,0x00,0x9a,0x01,0x00,0x00,0x99,0x01,0x00,0x00, +0x41,0x00,0x05,0x00,0x0a,0x01,0x00,0x00,0x9b,0x01,0x00,0x00, +0x63,0x01,0x00,0x00,0x96,0x01,0x00,0x00,0x3e,0x00,0x03,0x00, +0x9b,0x01,0x00,0x00,0x9a,0x01,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x9d,0x01,0x00,0x00,0x5e,0x01,0x00,0x00, +0x35,0x01,0x00,0x00,0x41,0x00,0x08,0x00,0x6d,0x01,0x00,0x00, +0x9f,0x01,0x00,0x00,0x6b,0x01,0x00,0x00,0x34,0x00,0x00,0x00, +0x55,0x01,0x00,0x00,0xd0,0x00,0x00,0x00,0x19,0x01,0x00,0x00, +0x3d,0x00,0x04,0x00,0xc4,0x00,0x00,0x00,0xa0,0x01,0x00,0x00, +0x9f,0x01,0x00,0x00,0x73,0x00,0x04,0x00,0xf9,0x00,0x00,0x00, +0xa1,0x01,0x00,0x00,0xa0,0x01,0x00,0x00,0x41,0x00,0x05,0x00, +0x0a,0x01,0x00,0x00,0xa2,0x01,0x00,0x00,0x63,0x01,0x00,0x00, +0x9d,0x01,0x00,0x00,0x3e,0x00,0x03,0x00,0xa2,0x01,0x00,0x00, +0xa1,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xa7,0x01,0x00,0x00,0xf3,0x02,0x00,0x00,0xa5,0x01,0x00,0x00, +0xf9,0x00,0x02,0x00,0x43,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, +0x45,0x01,0x00,0x00,0xe0,0x00,0x04,0x00,0x0c,0x00,0x00,0x00, +0x0c,0x00,0x00,0x00,0xa8,0x01,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xab,0x01,0x00,0x00,0xf6,0x02,0x00,0x00, +0xa9,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xae,0x01,0x00,0x00,0xfa,0x02,0x00,0x00,0xac,0x01,0x00,0x00, +0xf9,0x00,0x02,0x00,0xb0,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, +0xb0,0x01,0x00,0x00,0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00, +0xfc,0x02,0x00,0x00,0x3e,0x00,0x00,0x00,0x45,0x01,0x00,0x00, +0x5a,0x02,0x00,0x00,0xb3,0x01,0x00,0x00,0xb0,0x00,0x05,0x00, +0xc2,0x00,0x00,0x00,0xb6,0x01,0x00,0x00,0xfc,0x02,0x00,0x00, +0x6c,0x00,0x00,0x00,0xf6,0x00,0x04,0x00,0xb2,0x01,0x00,0x00, +0xb3,0x01,0x00,0x00,0x00,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, +0xb6,0x01,0x00,0x00,0xb1,0x01,0x00,0x00,0xb2,0x01,0x00,0x00, +0xf8,0x00,0x02,0x00,0xb1,0x01,0x00,0x00,0xf9,0x00,0x02,0x00, +0xb8,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0xb8,0x01,0x00,0x00, +0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0x00,0x03,0x00,0x00, +0x3e,0x00,0x00,0x00,0xb1,0x01,0x00,0x00,0xe4,0x01,0x00,0x00, +0xbb,0x01,0x00,0x00,0xb0,0x00,0x05,0x00,0xc2,0x00,0x00,0x00, +0xbe,0x01,0x00,0x00,0x00,0x03,0x00,0x00,0x60,0x00,0x00,0x00, +0xf6,0x00,0x04,0x00,0xba,0x01,0x00,0x00,0xbb,0x01,0x00,0x00, +0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0xbe,0x01,0x00,0x00, +0xb9,0x01,0x00,0x00,0xba,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, +0xb9,0x01,0x00,0x00,0xf9,0x00,0x02,0x00,0xc0,0x01,0x00,0x00, +0xf8,0x00,0x02,0x00,0xc0,0x01,0x00,0x00,0xf5,0x00,0x07,0x00, +0x06,0x00,0x00,0x00,0x12,0x03,0x00,0x00,0x3e,0x00,0x00,0x00, +0xb9,0x01,0x00,0x00,0xe2,0x01,0x00,0x00,0xc1,0x01,0x00,0x00, +0xb0,0x00,0x05,0x00,0xc2,0x00,0x00,0x00,0xc6,0x01,0x00,0x00, +0x12,0x03,0x00,0x00,0x62,0x00,0x00,0x00,0xf6,0x00,0x04,0x00, +0xc2,0x01,0x00,0x00,0xc1,0x01,0x00,0x00,0x01,0x00,0x00,0x00, +0xfa,0x00,0x04,0x00,0xc6,0x01,0x00,0x00,0xc1,0x01,0x00,0x00, +0xc2,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0xc1,0x01,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xcc,0x01,0x00,0x00, +0x00,0x03,0x00,0x00,0x62,0x00,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xce,0x01,0x00,0x00,0xcc,0x01,0x00,0x00, +0x12,0x03,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xd0,0x01,0x00,0x00,0x55,0x00,0x00,0x00,0x53,0x00,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xd2,0x01,0x00,0x00, +0x00,0x03,0x00,0x00,0x61,0x00,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xd3,0x01,0x00,0x00,0xd0,0x01,0x00,0x00, +0xd2,0x01,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xd5,0x01,0x00,0x00,0x64,0x00,0x00,0x00,0x62,0x00,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xd6,0x01,0x00,0x00, +0xd3,0x01,0x00,0x00,0xd5,0x01,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xd8,0x01,0x00,0x00,0xd6,0x01,0x00,0x00, +0x12,0x03,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xda,0x01,0x00,0x00,0xd8,0x01,0x00,0x00,0xd9,0x01,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xdc,0x01,0x00,0x00, +0xda,0x01,0x00,0x00,0xfc,0x02,0x00,0x00,0x41,0x00,0x05,0x00, +0x0a,0x01,0x00,0x00,0xdd,0x01,0x00,0x00,0xfe,0x00,0x00,0x00, +0xdc,0x01,0x00,0x00,0x3d,0x00,0x04,0x00,0xf9,0x00,0x00,0x00, +0xde,0x01,0x00,0x00,0xdd,0x01,0x00,0x00,0x41,0x00,0x05,0x00, +0xdf,0x01,0x00,0x00,0xe0,0x01,0x00,0x00,0xca,0x01,0x00,0x00, +0xce,0x01,0x00,0x00,0x3e,0x00,0x03,0x00,0xe0,0x01,0x00,0x00, +0xde,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xe2,0x01,0x00,0x00,0x12,0x03,0x00,0x00,0xd0,0x00,0x00,0x00, +0xf9,0x00,0x02,0x00,0xc0,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, +0xc2,0x01,0x00,0x00,0xf9,0x00,0x02,0x00,0xbb,0x01,0x00,0x00, +0xf8,0x00,0x02,0x00,0xbb,0x01,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xe4,0x01,0x00,0x00,0x00,0x03,0x00,0x00, +0xd0,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0xb8,0x01,0x00,0x00, +0xf8,0x00,0x02,0x00,0xba,0x01,0x00,0x00,0xf9,0x00,0x02,0x00, +0xe6,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0xe6,0x01,0x00,0x00, +0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0x01,0x03,0x00,0x00, +0x3e,0x00,0x00,0x00,0xba,0x01,0x00,0x00,0x12,0x02,0x00,0x00, +0xe9,0x01,0x00,0x00,0xb0,0x00,0x05,0x00,0xc2,0x00,0x00,0x00, +0xec,0x01,0x00,0x00,0x01,0x03,0x00,0x00,0xbf,0x00,0x00,0x00, +0xf6,0x00,0x04,0x00,0xe8,0x01,0x00,0x00,0xe9,0x01,0x00,0x00, +0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0xec,0x01,0x00,0x00, +0xe7,0x01,0x00,0x00,0xe8,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, +0xe7,0x01,0x00,0x00,0xf9,0x00,0x02,0x00,0xee,0x01,0x00,0x00, +0xf8,0x00,0x02,0x00,0xee,0x01,0x00,0x00,0xf5,0x00,0x07,0x00, +0x06,0x00,0x00,0x00,0x0f,0x03,0x00,0x00,0x3e,0x00,0x00,0x00, +0xe7,0x01,0x00,0x00,0x10,0x02,0x00,0x00,0xef,0x01,0x00,0x00, +0xb0,0x00,0x05,0x00,0xc2,0x00,0x00,0x00,0xf4,0x01,0x00,0x00, +0x0f,0x03,0x00,0x00,0xbc,0x00,0x00,0x00,0xf6,0x00,0x04,0x00, +0xf0,0x01,0x00,0x00,0xef,0x01,0x00,0x00,0x01,0x00,0x00,0x00, +0xfa,0x00,0x04,0x00,0xf4,0x01,0x00,0x00,0xef,0x01,0x00,0x00, +0xf0,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0xef,0x01,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xfa,0x01,0x00,0x00, +0x01,0x03,0x00,0x00,0xbc,0x00,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xfc,0x01,0x00,0x00,0xfa,0x01,0x00,0x00, +0x0f,0x03,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xfe,0x01,0x00,0x00,0x59,0x00,0x00,0x00,0xb9,0x00,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x01,0x02,0x00,0x00, +0x01,0x03,0x00,0x00,0x00,0x02,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x02,0x02,0x00,0x00,0xfe,0x01,0x00,0x00, +0x01,0x02,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x04,0x02,0x00,0x00,0x68,0x00,0x00,0x00,0xbc,0x00,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x05,0x02,0x00,0x00, +0x02,0x02,0x00,0x00,0x04,0x02,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x07,0x02,0x00,0x00,0x05,0x02,0x00,0x00, +0x0f,0x03,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x09,0x02,0x00,0x00,0x07,0x02,0x00,0x00,0x08,0x02,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x0b,0x02,0x00,0x00, +0x09,0x02,0x00,0x00,0xfc,0x02,0x00,0x00,0x41,0x00,0x05,0x00, +0x0a,0x01,0x00,0x00,0x0c,0x02,0x00,0x00,0x63,0x01,0x00,0x00, +0x0b,0x02,0x00,0x00,0x3d,0x00,0x04,0x00,0xf9,0x00,0x00,0x00, +0x0d,0x02,0x00,0x00,0x0c,0x02,0x00,0x00,0x41,0x00,0x05,0x00, +0xdf,0x01,0x00,0x00,0x0e,0x02,0x00,0x00,0xf8,0x01,0x00,0x00, +0xfc,0x01,0x00,0x00,0x3e,0x00,0x03,0x00,0x0e,0x02,0x00,0x00, +0x0d,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x10,0x02,0x00,0x00,0x0f,0x03,0x00,0x00,0xd0,0x00,0x00,0x00, +0xf9,0x00,0x02,0x00,0xee,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, +0xf0,0x01,0x00,0x00,0xf9,0x00,0x02,0x00,0xe9,0x01,0x00,0x00, +0xf8,0x00,0x02,0x00,0xe9,0x01,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x12,0x02,0x00,0x00,0x01,0x03,0x00,0x00, +0xd0,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0xe6,0x01,0x00,0x00, +0xf8,0x00,0x02,0x00,0xe8,0x01,0x00,0x00,0xf9,0x00,0x02,0x00, +0x14,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x14,0x02,0x00,0x00, +0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0x02,0x03,0x00,0x00, +0x3e,0x00,0x00,0x00,0xe8,0x01,0x00,0x00,0x58,0x02,0x00,0x00, +0x17,0x02,0x00,0x00,0xb0,0x00,0x05,0x00,0xc2,0x00,0x00,0x00, +0x1a,0x02,0x00,0x00,0x02,0x03,0x00,0x00,0xbf,0x00,0x00,0x00, +0xf6,0x00,0x04,0x00,0x16,0x02,0x00,0x00,0x17,0x02,0x00,0x00, +0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0x1a,0x02,0x00,0x00, +0x15,0x02,0x00,0x00,0x16,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, +0x15,0x02,0x00,0x00,0xf9,0x00,0x02,0x00,0x1c,0x02,0x00,0x00, +0xf8,0x00,0x02,0x00,0x1c,0x02,0x00,0x00,0xf5,0x00,0x07,0x00, +0x06,0x00,0x00,0x00,0x06,0x03,0x00,0x00,0x3e,0x00,0x00,0x00, +0x15,0x02,0x00,0x00,0x56,0x02,0x00,0x00,0x1f,0x02,0x00,0x00, +0xb0,0x00,0x05,0x00,0xc2,0x00,0x00,0x00,0x22,0x02,0x00,0x00, +0x06,0x03,0x00,0x00,0x60,0x00,0x00,0x00,0xf6,0x00,0x04,0x00, +0x1e,0x02,0x00,0x00,0x1f,0x02,0x00,0x00,0x01,0x00,0x00,0x00, +0xfa,0x00,0x04,0x00,0x22,0x02,0x00,0x00,0x1d,0x02,0x00,0x00, +0x1e,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x1d,0x02,0x00,0x00, +0xf9,0x00,0x02,0x00,0x24,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, +0x24,0x02,0x00,0x00,0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00, +0x08,0x03,0x00,0x00,0x3e,0x00,0x00,0x00,0x1d,0x02,0x00,0x00, +0x54,0x02,0x00,0x00,0x27,0x02,0x00,0x00,0xb0,0x00,0x05,0x00, +0xc2,0x00,0x00,0x00,0x2a,0x02,0x00,0x00,0x08,0x03,0x00,0x00, +0xbc,0x00,0x00,0x00,0xf6,0x00,0x04,0x00,0x26,0x02,0x00,0x00, +0x27,0x02,0x00,0x00,0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, +0x2a,0x02,0x00,0x00,0x25,0x02,0x00,0x00,0x26,0x02,0x00,0x00, +0xf8,0x00,0x02,0x00,0x25,0x02,0x00,0x00,0xf9,0x00,0x02,0x00, +0x2c,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x2c,0x02,0x00,0x00, +0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0x0a,0x03,0x00,0x00, +0x3e,0x00,0x00,0x00,0x25,0x02,0x00,0x00,0x52,0x02,0x00,0x00, +0x2d,0x02,0x00,0x00,0xb0,0x00,0x05,0x00,0xc2,0x00,0x00,0x00, +0x32,0x02,0x00,0x00,0x0a,0x03,0x00,0x00,0x62,0x00,0x00,0x00, +0xf6,0x00,0x04,0x00,0x2e,0x02,0x00,0x00,0x2d,0x02,0x00,0x00, +0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0x32,0x02,0x00,0x00, +0x2d,0x02,0x00,0x00,0x2e,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, +0x2d,0x02,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x34,0x02,0x00,0x00,0x02,0x03,0x00,0x00,0xbc,0x00,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x36,0x02,0x00,0x00, +0x34,0x02,0x00,0x00,0x08,0x03,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x38,0x02,0x00,0x00,0x36,0x02,0x00,0x00, +0x37,0x02,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x3a,0x02,0x00,0x00,0x06,0x03,0x00,0x00,0x62,0x00,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x3b,0x02,0x00,0x00, +0x38,0x02,0x00,0x00,0x3a,0x02,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x3d,0x02,0x00,0x00,0x3b,0x02,0x00,0x00, +0x0a,0x03,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x41,0x02,0x00,0x00,0x3a,0x02,0x00,0x00,0x0a,0x03,0x00,0x00, +0x41,0x00,0x05,0x00,0xdf,0x01,0x00,0x00,0x42,0x02,0x00,0x00, +0xca,0x01,0x00,0x00,0x41,0x02,0x00,0x00,0x3d,0x00,0x04,0x00, +0xf9,0x00,0x00,0x00,0x43,0x02,0x00,0x00,0x42,0x02,0x00,0x00, +0x73,0x00,0x04,0x00,0xc4,0x00,0x00,0x00,0x44,0x02,0x00,0x00, +0x43,0x02,0x00,0x00,0x41,0x00,0x05,0x00,0xdf,0x01,0x00,0x00, +0x49,0x02,0x00,0x00,0xf8,0x01,0x00,0x00,0x36,0x02,0x00,0x00, +0x3d,0x00,0x04,0x00,0xf9,0x00,0x00,0x00,0x4a,0x02,0x00,0x00, +0x49,0x02,0x00,0x00,0x73,0x00,0x04,0x00,0xc4,0x00,0x00,0x00, +0x4b,0x02,0x00,0x00,0x4a,0x02,0x00,0x00,0x41,0x00,0x05,0x00, +0xcd,0x00,0x00,0x00,0x4d,0x02,0x00,0x00,0xca,0x00,0x00,0x00, +0x3d,0x02,0x00,0x00,0x3d,0x00,0x04,0x00,0xc4,0x00,0x00,0x00, +0x4e,0x02,0x00,0x00,0x4d,0x02,0x00,0x00,0x0c,0x00,0x08,0x00, +0xc4,0x00,0x00,0x00,0x4f,0x02,0x00,0x00,0x01,0x00,0x00,0x00, +0x32,0x00,0x00,0x00,0x44,0x02,0x00,0x00,0x4b,0x02,0x00,0x00, +0x4e,0x02,0x00,0x00,0x3e,0x00,0x03,0x00,0x4d,0x02,0x00,0x00, +0x4f,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x52,0x02,0x00,0x00,0x0a,0x03,0x00,0x00,0xd0,0x00,0x00,0x00, +0xf9,0x00,0x02,0x00,0x2c,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, +0x2e,0x02,0x00,0x00,0xf9,0x00,0x02,0x00,0x27,0x02,0x00,0x00, +0xf8,0x00,0x02,0x00,0x27,0x02,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x54,0x02,0x00,0x00,0x08,0x03,0x00,0x00, +0xd0,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0x24,0x02,0x00,0x00, +0xf8,0x00,0x02,0x00,0x26,0x02,0x00,0x00,0xf9,0x00,0x02,0x00, +0x1f,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x1f,0x02,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x56,0x02,0x00,0x00, +0x06,0x03,0x00,0x00,0xd0,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, +0x1c,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x1e,0x02,0x00,0x00, +0xf9,0x00,0x02,0x00,0x17,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, +0x17,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x58,0x02,0x00,0x00,0x02,0x03,0x00,0x00,0xd0,0x00,0x00,0x00, +0xf9,0x00,0x02,0x00,0x14,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, +0x16,0x02,0x00,0x00,0xf9,0x00,0x02,0x00,0xb3,0x01,0x00,0x00, +0xf8,0x00,0x02,0x00,0xb3,0x01,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x5a,0x02,0x00,0x00,0xfc,0x02,0x00,0x00, +0xd0,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0xb0,0x01,0x00,0x00, +0xf8,0x00,0x02,0x00,0xb2,0x01,0x00,0x00,0xe0,0x00,0x04,0x00, +0x0c,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0xa8,0x01,0x00,0x00, +0xf9,0x00,0x02,0x00,0xd7,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, +0xd7,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x5c,0x02,0x00,0x00,0xe2,0x02,0x00,0x00,0x6c,0x00,0x00,0x00, +0xf9,0x00,0x02,0x00,0xd4,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, +0xd6,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x61,0x02,0x00,0x00,0x55,0x00,0x00,0x00,0x53,0x00,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x62,0x02,0x00,0x00, +0x96,0x00,0x00,0x00,0x61,0x02,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x67,0x02,0x00,0x00,0x59,0x00,0x00,0x00, +0xb9,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x68,0x02,0x00,0x00,0xa8,0x00,0x00,0x00,0x67,0x02,0x00,0x00, +0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00,0x6d,0x02,0x00,0x00, +0x12,0x00,0x00,0x00,0x6c,0x02,0x00,0x00,0x3d,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x6e,0x02,0x00,0x00,0x6d,0x02,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x6f,0x02,0x00,0x00, +0x0f,0x00,0x00,0x00,0x6e,0x02,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x73,0x02,0x00,0x00,0x47,0x00,0x00,0x00, +0x6e,0x02,0x00,0x00,0x41,0x00,0x05,0x00,0x0d,0x00,0x00,0x00, +0x75,0x02,0x00,0x00,0x74,0x02,0x00,0x00,0x0c,0x00,0x00,0x00, +0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x76,0x02,0x00,0x00, +0x75,0x02,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x77,0x02,0x00,0x00,0x73,0x02,0x00,0x00,0x76,0x02,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x78,0x02,0x00,0x00, +0x6f,0x02,0x00,0x00,0x77,0x02,0x00,0x00,0xf9,0x00,0x02,0x00, +0x7a,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x7a,0x02,0x00,0x00, +0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0xe3,0x02,0x00,0x00, +0x3e,0x00,0x00,0x00,0xd6,0x00,0x00,0x00,0xe0,0x02,0x00,0x00, +0x7d,0x02,0x00,0x00,0xb0,0x00,0x05,0x00,0xc2,0x00,0x00,0x00, +0x80,0x02,0x00,0x00,0xe3,0x02,0x00,0x00,0xbf,0x00,0x00,0x00, +0xf6,0x00,0x04,0x00,0x7c,0x02,0x00,0x00,0x7d,0x02,0x00,0x00, +0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0x80,0x02,0x00,0x00, +0x7b,0x02,0x00,0x00,0x7c,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, +0x7b,0x02,0x00,0x00,0xf9,0x00,0x02,0x00,0x82,0x02,0x00,0x00, +0xf8,0x00,0x02,0x00,0x82,0x02,0x00,0x00,0xf5,0x00,0x07,0x00, +0x06,0x00,0x00,0x00,0xe4,0x02,0x00,0x00,0x3e,0x00,0x00,0x00, +0x7b,0x02,0x00,0x00,0xde,0x02,0x00,0x00,0x85,0x02,0x00,0x00, +0xb0,0x00,0x05,0x00,0xc2,0x00,0x00,0x00,0x88,0x02,0x00,0x00, +0xe4,0x02,0x00,0x00,0x60,0x00,0x00,0x00,0xf6,0x00,0x04,0x00, +0x84,0x02,0x00,0x00,0x85,0x02,0x00,0x00,0x01,0x00,0x00,0x00, +0xfa,0x00,0x04,0x00,0x88,0x02,0x00,0x00,0x83,0x02,0x00,0x00, +0x84,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x83,0x02,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x8c,0x02,0x00,0x00, +0xe4,0x02,0x00,0x00,0x61,0x00,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x8d,0x02,0x00,0x00,0x62,0x02,0x00,0x00, +0x8c,0x02,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x8f,0x02,0x00,0x00,0x64,0x00,0x00,0x00,0x62,0x00,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x90,0x02,0x00,0x00, +0x8d,0x02,0x00,0x00,0x8f,0x02,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x94,0x02,0x00,0x00,0xe3,0x02,0x00,0x00, +0x00,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x95,0x02,0x00,0x00,0x68,0x02,0x00,0x00,0x94,0x02,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x97,0x02,0x00,0x00, +0x68,0x00,0x00,0x00,0xbc,0x00,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x98,0x02,0x00,0x00,0x95,0x02,0x00,0x00, +0x97,0x02,0x00,0x00,0xf9,0x00,0x02,0x00,0x9a,0x02,0x00,0x00, +0xf8,0x00,0x02,0x00,0x9a,0x02,0x00,0x00,0xf5,0x00,0x07,0x00, +0x06,0x00,0x00,0x00,0xe6,0x02,0x00,0x00,0x3e,0x00,0x00,0x00, +0x83,0x02,0x00,0x00,0xdc,0x02,0x00,0x00,0x9d,0x02,0x00,0x00, +0xb0,0x00,0x05,0x00,0xc2,0x00,0x00,0x00,0xa0,0x02,0x00,0x00, +0xe6,0x02,0x00,0x00,0xbc,0x00,0x00,0x00,0xf6,0x00,0x04,0x00, +0x9c,0x02,0x00,0x00,0x9d,0x02,0x00,0x00,0x01,0x00,0x00,0x00, +0xfa,0x00,0x04,0x00,0xa0,0x02,0x00,0x00,0x9b,0x02,0x00,0x00, +0x9c,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x9b,0x02,0x00,0x00, +0xf9,0x00,0x02,0x00,0xa2,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, +0xa2,0x02,0x00,0x00,0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00, +0xe8,0x02,0x00,0x00,0x3e,0x00,0x00,0x00,0x9b,0x02,0x00,0x00, +0xda,0x02,0x00,0x00,0xa5,0x02,0x00,0x00,0xb0,0x00,0x05,0x00, +0xc2,0x00,0x00,0x00,0xa8,0x02,0x00,0x00,0xe8,0x02,0x00,0x00, +0x62,0x00,0x00,0x00,0xf6,0x00,0x04,0x00,0xa4,0x02,0x00,0x00, +0xa5,0x02,0x00,0x00,0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, +0xa8,0x02,0x00,0x00,0xa3,0x02,0x00,0x00,0xa4,0x02,0x00,0x00, +0xf8,0x00,0x02,0x00,0xa3,0x02,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xab,0x02,0x00,0x00,0x90,0x02,0x00,0x00, +0xe8,0x02,0x00,0x00,0xb0,0x00,0x05,0x00,0xc2,0x00,0x00,0x00, +0xae,0x02,0x00,0x00,0xab,0x02,0x00,0x00,0x36,0x00,0x00,0x00, +0xf7,0x00,0x03,0x00,0xb0,0x02,0x00,0x00,0x00,0x00,0x00,0x00, +0xfa,0x00,0x04,0x00,0xae,0x02,0x00,0x00,0xaf,0x02,0x00,0x00, +0xb0,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0xaf,0x02,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xb3,0x02,0x00,0x00, +0x98,0x02,0x00,0x00,0xe6,0x02,0x00,0x00,0x41,0x00,0x05,0x00, +0x15,0x00,0x00,0x00,0xb4,0x02,0x00,0x00,0x12,0x00,0x00,0x00, +0xd0,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0xb5,0x02,0x00,0x00,0xb4,0x02,0x00,0x00,0xb0,0x00,0x05,0x00, +0xc2,0x00,0x00,0x00,0xb6,0x02,0x00,0x00,0xb3,0x02,0x00,0x00, +0xb5,0x02,0x00,0x00,0xf9,0x00,0x02,0x00,0xb0,0x02,0x00,0x00, +0xf8,0x00,0x02,0x00,0xb0,0x02,0x00,0x00,0xf5,0x00,0x07,0x00, +0xc2,0x00,0x00,0x00,0xb7,0x02,0x00,0x00,0xae,0x02,0x00,0x00, +0xa3,0x02,0x00,0x00,0xb6,0x02,0x00,0x00,0xaf,0x02,0x00,0x00, +0xf7,0x00,0x03,0x00,0xb9,0x02,0x00,0x00,0x00,0x00,0x00,0x00, +0xfa,0x00,0x04,0x00,0xb7,0x02,0x00,0x00,0xb8,0x02,0x00,0x00, +0xb9,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0xb8,0x02,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xc1,0x02,0x00,0x00, +0x98,0x02,0x00,0x00,0xe6,0x02,0x00,0x00,0x41,0x00,0x05,0x00, +0x15,0x00,0x00,0x00,0xc3,0x02,0x00,0x00,0x12,0x00,0x00,0x00, +0xc2,0x02,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0xc4,0x02,0x00,0x00,0xc3,0x02,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xc5,0x02,0x00,0x00,0xc1,0x02,0x00,0x00, +0xc4,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xc6,0x02,0x00,0x00,0x78,0x02,0x00,0x00,0xc5,0x02,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xc8,0x02,0x00,0x00, +0xc6,0x02,0x00,0x00,0x90,0x02,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xca,0x02,0x00,0x00,0xc8,0x02,0x00,0x00, +0xe8,0x02,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xcc,0x02,0x00,0x00,0xe3,0x02,0x00,0x00,0xbc,0x00,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xce,0x02,0x00,0x00, +0xcc,0x02,0x00,0x00,0xe6,0x02,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xd0,0x02,0x00,0x00,0xce,0x02,0x00,0x00, +0xcf,0x02,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xd2,0x02,0x00,0x00,0xe4,0x02,0x00,0x00,0x62,0x00,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xd3,0x02,0x00,0x00, +0xd0,0x02,0x00,0x00,0xd2,0x02,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xd5,0x02,0x00,0x00,0xd3,0x02,0x00,0x00, +0xe8,0x02,0x00,0x00,0x41,0x00,0x05,0x00,0xcd,0x00,0x00,0x00, +0xd6,0x02,0x00,0x00,0xca,0x00,0x00,0x00,0xd5,0x02,0x00,0x00, +0x3d,0x00,0x04,0x00,0xc4,0x00,0x00,0x00,0xd7,0x02,0x00,0x00, +0xd6,0x02,0x00,0x00,0x41,0x00,0x06,0x00,0x6d,0x01,0x00,0x00, +0xd8,0x02,0x00,0x00,0xbd,0x02,0x00,0x00,0x34,0x00,0x00,0x00, +0xca,0x02,0x00,0x00,0x3e,0x00,0x03,0x00,0xd8,0x02,0x00,0x00, +0xd7,0x02,0x00,0x00,0xf9,0x00,0x02,0x00,0xb9,0x02,0x00,0x00, +0xf8,0x00,0x02,0x00,0xb9,0x02,0x00,0x00,0xf9,0x00,0x02,0x00, +0xa5,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0xa5,0x02,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xda,0x02,0x00,0x00, +0xe8,0x02,0x00,0x00,0xd0,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, +0xa2,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0xa4,0x02,0x00,0x00, +0xf9,0x00,0x02,0x00,0x9d,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, +0x9d,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xdc,0x02,0x00,0x00,0xe6,0x02,0x00,0x00,0xd0,0x00,0x00,0x00, +0xf9,0x00,0x02,0x00,0x9a,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, +0x9c,0x02,0x00,0x00,0xf9,0x00,0x02,0x00,0x85,0x02,0x00,0x00, +0xf8,0x00,0x02,0x00,0x85,0x02,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xde,0x02,0x00,0x00,0xe4,0x02,0x00,0x00, +0xd0,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0x82,0x02,0x00,0x00, +0xf8,0x00,0x02,0x00,0x84,0x02,0x00,0x00,0xf9,0x00,0x02,0x00, +0x7d,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x7d,0x02,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xe0,0x02,0x00,0x00, +0xe3,0x02,0x00,0x00,0xd0,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, +0x7a,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x7c,0x02,0x00,0x00, +0xfd,0x00,0x01,0x00,0x38,0x00,0x01,0x00, +}; +const uint64_t matmul_f16_f32_aligned_len = 11360; + +unsigned char matmul_f16_f32_aligned_fp32_data[] = { +0x03,0x02,0x23,0x07,0x00,0x05,0x01,0x00,0x0b,0x00,0x0d,0x00, +0xd6,0x02,0x00,0x00,0x00,0x00,0x00,0x00,0x11,0x00,0x02,0x00, +0x01,0x00,0x00,0x00,0x11,0x00,0x02,0x00,0x51,0x11,0x00,0x00, +0x0b,0x00,0x06,0x00,0x01,0x00,0x00,0x00,0x47,0x4c,0x53,0x4c, +0x2e,0x73,0x74,0x64,0x2e,0x34,0x35,0x30,0x00,0x00,0x00,0x00, +0x0e,0x00,0x03,0x00,0x00,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0x0f,0x00,0x0f,0x00,0x05,0x00,0x00,0x00,0x04,0x00,0x00,0x00, +0x6d,0x61,0x69,0x6e,0x00,0x00,0x00,0x00,0x0b,0x00,0x00,0x00, +0x12,0x00,0x00,0x00,0x3d,0x00,0x00,0x00,0x4c,0x00,0x00,0x00, +0xfd,0x00,0x00,0x00,0x04,0x01,0x00,0x00,0x4a,0x01,0x00,0x00, +0x51,0x01,0x00,0x00,0x37,0x02,0x00,0x00,0x80,0x02,0x00,0x00, +0x10,0x00,0x06,0x00,0x04,0x00,0x00,0x00,0x11,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0x47,0x00,0x04,0x00,0x0b,0x00,0x00,0x00,0x0b,0x00,0x00,0x00, +0x1c,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00, +0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0x23,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x48,0x00,0x05,0x00, +0x10,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x23,0x00,0x00,0x00, +0x08,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00, +0x03,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, +0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00,0x04,0x00,0x00,0x00, +0x23,0x00,0x00,0x00,0x10,0x00,0x00,0x00,0x48,0x00,0x05,0x00, +0x10,0x00,0x00,0x00,0x05,0x00,0x00,0x00,0x23,0x00,0x00,0x00, +0x14,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00, +0x06,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x18,0x00,0x00,0x00, +0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00,0x07,0x00,0x00,0x00, +0x23,0x00,0x00,0x00,0x1c,0x00,0x00,0x00,0x48,0x00,0x05,0x00, +0x10,0x00,0x00,0x00,0x08,0x00,0x00,0x00,0x23,0x00,0x00,0x00, +0x20,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00, +0x09,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x24,0x00,0x00,0x00, +0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00,0x0a,0x00,0x00,0x00, +0x23,0x00,0x00,0x00,0x28,0x00,0x00,0x00,0x48,0x00,0x05,0x00, +0x10,0x00,0x00,0x00,0x0b,0x00,0x00,0x00,0x23,0x00,0x00,0x00, +0x2c,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00, +0x0c,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x30,0x00,0x00,0x00, +0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00,0x0d,0x00,0x00,0x00, +0x23,0x00,0x00,0x00,0x34,0x00,0x00,0x00,0x47,0x00,0x03,0x00, +0x10,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00, +0x37,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0x47,0x00,0x04,0x00,0x3d,0x00,0x00,0x00,0x0b,0x00,0x00,0x00, +0x1a,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x4c,0x00,0x00,0x00, +0x0b,0x00,0x00,0x00,0x1b,0x00,0x00,0x00,0x47,0x00,0x04,0x00, +0x4f,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x09,0x00,0x00,0x00, +0x47,0x00,0x04,0x00,0x53,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0x04,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x60,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x47,0x00,0x04,0x00, +0x62,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x07,0x00,0x00,0x00, +0x47,0x00,0x04,0x00,0x6c,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0x03,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0xa7,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00, +0xb9,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x05,0x00,0x00,0x00, +0x47,0x00,0x04,0x00,0xbc,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0x08,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x01,0x01,0x00,0x00, +0x06,0x00,0x00,0x00,0x08,0x00,0x00,0x00,0x48,0x00,0x04,0x00, +0x02,0x01,0x00,0x00,0x00,0x00,0x00,0x00,0x18,0x00,0x00,0x00, +0x48,0x00,0x05,0x00,0x02,0x01,0x00,0x00,0x00,0x00,0x00,0x00, +0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x03,0x00, +0x02,0x01,0x00,0x00,0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00, +0x04,0x01,0x00,0x00,0x22,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0x47,0x00,0x04,0x00,0x04,0x01,0x00,0x00,0x21,0x00,0x00,0x00, +0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x22,0x01,0x00,0x00, +0x01,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00, +0x23,0x01,0x00,0x00,0x0b,0x00,0x00,0x00,0x19,0x00,0x00,0x00, +0x47,0x00,0x04,0x00,0x4e,0x01,0x00,0x00,0x06,0x00,0x00,0x00, +0x10,0x00,0x00,0x00,0x48,0x00,0x04,0x00,0x4f,0x01,0x00,0x00, +0x00,0x00,0x00,0x00,0x18,0x00,0x00,0x00,0x48,0x00,0x05,0x00, +0x4f,0x01,0x00,0x00,0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00, +0x00,0x00,0x00,0x00,0x47,0x00,0x03,0x00,0x4f,0x01,0x00,0x00, +0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x51,0x01,0x00,0x00, +0x22,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00, +0x51,0x01,0x00,0x00,0x21,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0x47,0x00,0x04,0x00,0x37,0x02,0x00,0x00,0x0b,0x00,0x00,0x00, +0x18,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x7d,0x02,0x00,0x00, +0x06,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x48,0x00,0x04,0x00, +0x7e,0x02,0x00,0x00,0x00,0x00,0x00,0x00,0x19,0x00,0x00,0x00, +0x48,0x00,0x05,0x00,0x7e,0x02,0x00,0x00,0x00,0x00,0x00,0x00, +0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x03,0x00, +0x7e,0x02,0x00,0x00,0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00, +0x80,0x02,0x00,0x00,0x22,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0x47,0x00,0x04,0x00,0x80,0x02,0x00,0x00,0x21,0x00,0x00,0x00, +0x02,0x00,0x00,0x00,0x13,0x00,0x02,0x00,0x02,0x00,0x00,0x00, +0x21,0x00,0x03,0x00,0x03,0x00,0x00,0x00,0x02,0x00,0x00,0x00, +0x15,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x20,0x00,0x00,0x00, +0x00,0x00,0x00,0x00,0x17,0x00,0x04,0x00,0x09,0x00,0x00,0x00, +0x06,0x00,0x00,0x00,0x03,0x00,0x00,0x00,0x20,0x00,0x04,0x00, +0x0a,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x09,0x00,0x00,0x00, +0x3b,0x00,0x04,0x00,0x0a,0x00,0x00,0x00,0x0b,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x0c,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x20,0x00,0x04,0x00, +0x0d,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x06,0x00,0x00,0x00, +0x1e,0x00,0x10,0x00,0x10,0x00,0x00,0x00,0x06,0x00,0x00,0x00, +0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00, +0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00, +0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00, +0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00, +0x06,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x11,0x00,0x00,0x00, +0x09,0x00,0x00,0x00,0x10,0x00,0x00,0x00,0x3b,0x00,0x04,0x00, +0x11,0x00,0x00,0x00,0x12,0x00,0x00,0x00,0x09,0x00,0x00,0x00, +0x15,0x00,0x04,0x00,0x13,0x00,0x00,0x00,0x20,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00, +0x14,0x00,0x00,0x00,0x08,0x00,0x00,0x00,0x20,0x00,0x04,0x00, +0x15,0x00,0x00,0x00,0x09,0x00,0x00,0x00,0x06,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00,0x21,0x00,0x00,0x00, +0x0a,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00, +0x27,0x00,0x00,0x00,0x09,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x13,0x00,0x00,0x00,0x2d,0x00,0x00,0x00,0x07,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00,0x34,0x00,0x00,0x00, +0x00,0x00,0x00,0x00,0x32,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x37,0x00,0x00,0x00,0x40,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0x3b,0x00,0x04,0x00,0x0a,0x00,0x00,0x00,0x3d,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x3e,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x3b,0x00,0x04,0x00, +0x0a,0x00,0x00,0x00,0x4c,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0x32,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x4f,0x00,0x00,0x00, +0x20,0x00,0x00,0x00,0x32,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x53,0x00,0x00,0x00,0x20,0x00,0x00,0x00,0x34,0x00,0x06,0x00, +0x06,0x00,0x00,0x00,0x54,0x00,0x00,0x00,0x86,0x00,0x00,0x00, +0x37,0x00,0x00,0x00,0x53,0x00,0x00,0x00,0x34,0x00,0x06,0x00, +0x06,0x00,0x00,0x00,0x58,0x00,0x00,0x00,0x86,0x00,0x00,0x00, +0x37,0x00,0x00,0x00,0x53,0x00,0x00,0x00,0x32,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x60,0x00,0x00,0x00,0x02,0x00,0x00,0x00, +0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x61,0x00,0x00,0x00, +0x86,0x00,0x00,0x00,0x53,0x00,0x00,0x00,0x60,0x00,0x00,0x00, +0x32,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x62,0x00,0x00,0x00, +0x04,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, +0x63,0x00,0x00,0x00,0x86,0x00,0x00,0x00,0x61,0x00,0x00,0x00, +0x62,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, +0x67,0x00,0x00,0x00,0x86,0x00,0x00,0x00,0x61,0x00,0x00,0x00, +0x62,0x00,0x00,0x00,0x32,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x6c,0x00,0x00,0x00,0x10,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x6d,0x00,0x00,0x00,0x04,0x00,0x00,0x00, +0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x6e,0x00,0x00,0x00, +0x86,0x00,0x00,0x00,0x6c,0x00,0x00,0x00,0x6d,0x00,0x00,0x00, +0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x73,0x00,0x00,0x00, +0x86,0x00,0x00,0x00,0x6c,0x00,0x00,0x00,0x6d,0x00,0x00,0x00, +0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x78,0x00,0x00,0x00, +0x86,0x00,0x00,0x00,0x6c,0x00,0x00,0x00,0x6d,0x00,0x00,0x00, +0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x7d,0x00,0x00,0x00, +0x86,0x00,0x00,0x00,0x6c,0x00,0x00,0x00,0x6d,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00,0x81,0x00,0x00,0x00, +0x06,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00, +0x86,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x13,0x00,0x00,0x00,0x91,0x00,0x00,0x00,0x0b,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00,0x97,0x00,0x00,0x00, +0x03,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00, +0xa2,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x32,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0xa7,0x00,0x00,0x00,0x40,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00,0xa9,0x00,0x00,0x00, +0x04,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, +0xb8,0x00,0x00,0x00,0x84,0x00,0x00,0x00,0x60,0x00,0x00,0x00, +0x62,0x00,0x00,0x00,0x32,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0xb9,0x00,0x00,0x00,0x20,0x00,0x00,0x00,0x34,0x00,0x06,0x00, +0x06,0x00,0x00,0x00,0xba,0x00,0x00,0x00,0x84,0x00,0x00,0x00, +0x53,0x00,0x00,0x00,0xb9,0x00,0x00,0x00,0x34,0x00,0x06,0x00, +0x06,0x00,0x00,0x00,0xbb,0x00,0x00,0x00,0x84,0x00,0x00,0x00, +0x4f,0x00,0x00,0x00,0x62,0x00,0x00,0x00,0x32,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0xbc,0x00,0x00,0x00,0x02,0x00,0x00,0x00, +0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0xbd,0x00,0x00,0x00, +0x84,0x00,0x00,0x00,0xbb,0x00,0x00,0x00,0xbc,0x00,0x00,0x00, +0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0xbe,0x00,0x00,0x00, +0x84,0x00,0x00,0x00,0xbd,0x00,0x00,0x00,0x60,0x00,0x00,0x00, +0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0xbf,0x00,0x00,0x00, +0x86,0x00,0x00,0x00,0xba,0x00,0x00,0x00,0xbe,0x00,0x00,0x00, +0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0xc0,0x00,0x00,0x00, +0x84,0x00,0x00,0x00,0xb8,0x00,0x00,0x00,0xbf,0x00,0x00,0x00, +0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0xc1,0x00,0x00,0x00, +0x84,0x00,0x00,0x00,0xc0,0x00,0x00,0x00,0xbc,0x00,0x00,0x00, +0x14,0x00,0x02,0x00,0xc2,0x00,0x00,0x00,0x16,0x00,0x03,0x00, +0xc4,0x00,0x00,0x00,0x20,0x00,0x00,0x00,0x34,0x00,0x06,0x00, +0x06,0x00,0x00,0x00,0xc5,0x00,0x00,0x00,0x84,0x00,0x00,0x00, +0x60,0x00,0x00,0x00,0x62,0x00,0x00,0x00,0x34,0x00,0x06,0x00, +0x06,0x00,0x00,0x00,0xc6,0x00,0x00,0x00,0x84,0x00,0x00,0x00, +0xc5,0x00,0x00,0x00,0xbf,0x00,0x00,0x00,0x34,0x00,0x06,0x00, +0x06,0x00,0x00,0x00,0xc7,0x00,0x00,0x00,0x84,0x00,0x00,0x00, +0xc6,0x00,0x00,0x00,0xbc,0x00,0x00,0x00,0x1c,0x00,0x04,0x00, +0xc8,0x00,0x00,0x00,0xc4,0x00,0x00,0x00,0xc7,0x00,0x00,0x00, +0x20,0x00,0x04,0x00,0xc9,0x00,0x00,0x00,0x07,0x00,0x00,0x00, +0xc8,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0xc4,0x00,0x00,0x00, +0xcc,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x20,0x00,0x04,0x00, +0xcd,0x00,0x00,0x00,0x07,0x00,0x00,0x00,0xc4,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00,0xd0,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, +0xf4,0x00,0x00,0x00,0x80,0x00,0x00,0x00,0x6c,0x00,0x00,0x00, +0x39,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, +0xf9,0x00,0x00,0x00,0x80,0x00,0x00,0x00,0x6c,0x00,0x00,0x00, +0x39,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, +0xfa,0x00,0x00,0x00,0x84,0x00,0x00,0x00,0x37,0x00,0x00,0x00, +0xf9,0x00,0x00,0x00,0x1c,0x00,0x04,0x00,0xfb,0x00,0x00,0x00, +0xc4,0x00,0x00,0x00,0xfa,0x00,0x00,0x00,0x20,0x00,0x04,0x00, +0xfc,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0xfb,0x00,0x00,0x00, +0x3b,0x00,0x04,0x00,0xfc,0x00,0x00,0x00,0xfd,0x00,0x00,0x00, +0x04,0x00,0x00,0x00,0x16,0x00,0x03,0x00,0xff,0x00,0x00,0x00, +0x10,0x00,0x00,0x00,0x17,0x00,0x04,0x00,0x00,0x01,0x00,0x00, +0xff,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x1d,0x00,0x03,0x00, +0x01,0x01,0x00,0x00,0x00,0x01,0x00,0x00,0x1e,0x00,0x03,0x00, +0x02,0x01,0x00,0x00,0x01,0x01,0x00,0x00,0x20,0x00,0x04,0x00, +0x03,0x01,0x00,0x00,0x0c,0x00,0x00,0x00,0x02,0x01,0x00,0x00, +0x3b,0x00,0x04,0x00,0x03,0x01,0x00,0x00,0x04,0x01,0x00,0x00, +0x0c,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x06,0x01,0x00,0x00, +0x0c,0x00,0x00,0x00,0xff,0x00,0x00,0x00,0x20,0x00,0x04,0x00, +0x0a,0x01,0x00,0x00,0x04,0x00,0x00,0x00,0xc4,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x1b,0x01,0x00,0x00, +0x03,0x00,0x00,0x00,0x32,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x22,0x01,0x00,0x00,0x01,0x00,0x00,0x00,0x33,0x00,0x06,0x00, +0x09,0x00,0x00,0x00,0x23,0x01,0x00,0x00,0x22,0x01,0x00,0x00, +0x39,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x34,0x00,0x06,0x00, +0x06,0x00,0x00,0x00,0x24,0x01,0x00,0x00,0x51,0x00,0x00,0x00, +0x23,0x01,0x00,0x00,0x00,0x00,0x00,0x00,0x34,0x00,0x06,0x00, +0x06,0x00,0x00,0x00,0x25,0x01,0x00,0x00,0x84,0x00,0x00,0x00, +0x24,0x01,0x00,0x00,0x6d,0x00,0x00,0x00,0x34,0x00,0x06,0x00, +0x06,0x00,0x00,0x00,0x26,0x01,0x00,0x00,0x86,0x00,0x00,0x00, +0x25,0x01,0x00,0x00,0x6c,0x00,0x00,0x00,0x34,0x00,0x06,0x00, +0x06,0x00,0x00,0x00,0x41,0x01,0x00,0x00,0x80,0x00,0x00,0x00, +0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x34,0x00,0x06,0x00, +0x06,0x00,0x00,0x00,0x46,0x01,0x00,0x00,0x80,0x00,0x00,0x00, +0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x34,0x00,0x06,0x00, +0x06,0x00,0x00,0x00,0x47,0x01,0x00,0x00,0x84,0x00,0x00,0x00, +0xa7,0x00,0x00,0x00,0x46,0x01,0x00,0x00,0x1c,0x00,0x04,0x00, +0x48,0x01,0x00,0x00,0xc4,0x00,0x00,0x00,0x47,0x01,0x00,0x00, +0x20,0x00,0x04,0x00,0x49,0x01,0x00,0x00,0x04,0x00,0x00,0x00, +0x48,0x01,0x00,0x00,0x3b,0x00,0x04,0x00,0x49,0x01,0x00,0x00, +0x4a,0x01,0x00,0x00,0x04,0x00,0x00,0x00,0x17,0x00,0x04,0x00, +0x4d,0x01,0x00,0x00,0xc4,0x00,0x00,0x00,0x04,0x00,0x00,0x00, +0x1d,0x00,0x03,0x00,0x4e,0x01,0x00,0x00,0x4d,0x01,0x00,0x00, +0x1e,0x00,0x03,0x00,0x4f,0x01,0x00,0x00,0x4e,0x01,0x00,0x00, +0x20,0x00,0x04,0x00,0x50,0x01,0x00,0x00,0x0c,0x00,0x00,0x00, +0x4f,0x01,0x00,0x00,0x3b,0x00,0x04,0x00,0x50,0x01,0x00,0x00, +0x51,0x01,0x00,0x00,0x0c,0x00,0x00,0x00,0x20,0x00,0x04,0x00, +0x53,0x01,0x00,0x00,0x0c,0x00,0x00,0x00,0xc4,0x00,0x00,0x00, +0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x69,0x01,0x00,0x00, +0x51,0x00,0x00,0x00,0x23,0x01,0x00,0x00,0x00,0x00,0x00,0x00, +0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x6a,0x01,0x00,0x00, +0x84,0x00,0x00,0x00,0x69,0x01,0x00,0x00,0x6d,0x00,0x00,0x00, +0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x6b,0x01,0x00,0x00, +0x86,0x00,0x00,0x00,0x6a,0x01,0x00,0x00,0x6c,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x6e,0x01,0x00,0x00, +0x08,0x01,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, +0x6f,0x01,0x00,0x00,0x86,0x00,0x00,0x00,0x6c,0x00,0x00,0x00, +0x6d,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, +0x72,0x01,0x00,0x00,0x86,0x00,0x00,0x00,0x6c,0x00,0x00,0x00, +0x6d,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, +0x8d,0x01,0x00,0x00,0x84,0x00,0x00,0x00,0x60,0x00,0x00,0x00, +0x62,0x00,0x00,0x00,0x1c,0x00,0x04,0x00,0x8e,0x01,0x00,0x00, +0xc4,0x00,0x00,0x00,0x8d,0x01,0x00,0x00,0x20,0x00,0x04,0x00, +0x8f,0x01,0x00,0x00,0x07,0x00,0x00,0x00,0x8e,0x01,0x00,0x00, +0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x9f,0x01,0x00,0x00, +0x80,0x00,0x00,0x00,0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00, +0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0xba,0x01,0x00,0x00, +0x84,0x00,0x00,0x00,0xbf,0x00,0x00,0x00,0xbc,0x00,0x00,0x00, +0x1c,0x00,0x04,0x00,0xbb,0x01,0x00,0x00,0xc4,0x00,0x00,0x00, +0xba,0x01,0x00,0x00,0x20,0x00,0x04,0x00,0xbc,0x01,0x00,0x00, +0x07,0x00,0x00,0x00,0xbb,0x01,0x00,0x00,0x34,0x00,0x06,0x00, +0x06,0x00,0x00,0x00,0xc5,0x01,0x00,0x00,0x86,0x00,0x00,0x00, +0xb9,0x00,0x00,0x00,0xbf,0x00,0x00,0x00,0x34,0x00,0x06,0x00, +0x06,0x00,0x00,0x00,0xcd,0x01,0x00,0x00,0x80,0x00,0x00,0x00, +0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x34,0x00,0x06,0x00, +0x06,0x00,0x00,0x00,0xfc,0x01,0x00,0x00,0x84,0x00,0x00,0x00, +0x60,0x00,0x00,0x00,0x62,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x13,0x00,0x00,0x00,0x2f,0x02,0x00,0x00,0x0d,0x00,0x00,0x00, +0x3b,0x00,0x04,0x00,0x0a,0x00,0x00,0x00,0x37,0x02,0x00,0x00, +0x01,0x00,0x00,0x00,0x1d,0x00,0x03,0x00,0x7d,0x02,0x00,0x00, +0xc4,0x00,0x00,0x00,0x1e,0x00,0x03,0x00,0x7e,0x02,0x00,0x00, +0x7d,0x02,0x00,0x00,0x20,0x00,0x04,0x00,0x7f,0x02,0x00,0x00, +0x0c,0x00,0x00,0x00,0x7e,0x02,0x00,0x00,0x3b,0x00,0x04,0x00, +0x7f,0x02,0x00,0x00,0x80,0x02,0x00,0x00,0x0c,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00,0x85,0x02,0x00,0x00, +0x05,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, +0x92,0x02,0x00,0x00,0x84,0x00,0x00,0x00,0x60,0x00,0x00,0x00, +0x62,0x00,0x00,0x00,0x36,0x00,0x05,0x00,0x02,0x00,0x00,0x00, +0x04,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x03,0x00,0x00,0x00, +0xf8,0x00,0x02,0x00,0x05,0x00,0x00,0x00,0x3b,0x00,0x04,0x00, +0xc9,0x00,0x00,0x00,0xca,0x00,0x00,0x00,0x07,0x00,0x00,0x00, +0x3b,0x00,0x04,0x00,0x8f,0x01,0x00,0x00,0x90,0x01,0x00,0x00, +0x07,0x00,0x00,0x00,0x3b,0x00,0x04,0x00,0xbc,0x01,0x00,0x00, +0xbd,0x01,0x00,0x00,0x07,0x00,0x00,0x00,0x41,0x00,0x05,0x00, +0x0d,0x00,0x00,0x00,0x0e,0x00,0x00,0x00,0x0b,0x00,0x00,0x00, +0x0c,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x0f,0x00,0x00,0x00,0x0e,0x00,0x00,0x00,0x41,0x00,0x05,0x00, +0x15,0x00,0x00,0x00,0x16,0x00,0x00,0x00,0x12,0x00,0x00,0x00, +0x14,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x17,0x00,0x00,0x00,0x16,0x00,0x00,0x00,0x86,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x18,0x00,0x00,0x00,0x0f,0x00,0x00,0x00, +0x17,0x00,0x00,0x00,0x89,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x1e,0x00,0x00,0x00,0x0f,0x00,0x00,0x00,0x17,0x00,0x00,0x00, +0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00,0x22,0x00,0x00,0x00, +0x12,0x00,0x00,0x00,0x21,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x22,0x00,0x00,0x00, +0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x24,0x00,0x00,0x00, +0x18,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x41,0x00,0x05,0x00, +0x15,0x00,0x00,0x00,0x28,0x00,0x00,0x00,0x12,0x00,0x00,0x00, +0x27,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x29,0x00,0x00,0x00,0x28,0x00,0x00,0x00,0x86,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x2a,0x00,0x00,0x00,0x1e,0x00,0x00,0x00, +0x29,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00, +0x2e,0x00,0x00,0x00,0x12,0x00,0x00,0x00,0x2d,0x00,0x00,0x00, +0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x2f,0x00,0x00,0x00, +0x2e,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x30,0x00,0x00,0x00,0x24,0x00,0x00,0x00,0x2f,0x00,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x32,0x00,0x00,0x00, +0x30,0x00,0x00,0x00,0x2a,0x00,0x00,0x00,0x41,0x00,0x05,0x00, +0x15,0x00,0x00,0x00,0x35,0x00,0x00,0x00,0x12,0x00,0x00,0x00, +0x34,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x36,0x00,0x00,0x00,0x35,0x00,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x38,0x00,0x00,0x00,0x36,0x00,0x00,0x00, +0x37,0x00,0x00,0x00,0x82,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x3a,0x00,0x00,0x00,0x38,0x00,0x00,0x00,0x39,0x00,0x00,0x00, +0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x3b,0x00,0x00,0x00, +0x3a,0x00,0x00,0x00,0x37,0x00,0x00,0x00,0x41,0x00,0x05,0x00, +0x0d,0x00,0x00,0x00,0x3f,0x00,0x00,0x00,0x3d,0x00,0x00,0x00, +0x3e,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x40,0x00,0x00,0x00,0x3f,0x00,0x00,0x00,0x89,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x42,0x00,0x00,0x00,0x40,0x00,0x00,0x00, +0x3b,0x00,0x00,0x00,0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x47,0x00,0x00,0x00,0x40,0x00,0x00,0x00,0x3b,0x00,0x00,0x00, +0x41,0x00,0x05,0x00,0x0d,0x00,0x00,0x00,0x49,0x00,0x00,0x00, +0x3d,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x4a,0x00,0x00,0x00,0x49,0x00,0x00,0x00, +0x41,0x00,0x05,0x00,0x0d,0x00,0x00,0x00,0x4d,0x00,0x00,0x00, +0x4c,0x00,0x00,0x00,0x3e,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x4e,0x00,0x00,0x00,0x4d,0x00,0x00,0x00, +0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x50,0x00,0x00,0x00, +0x4e,0x00,0x00,0x00,0x4f,0x00,0x00,0x00,0x89,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x55,0x00,0x00,0x00,0x50,0x00,0x00,0x00, +0x54,0x00,0x00,0x00,0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x59,0x00,0x00,0x00,0x50,0x00,0x00,0x00,0x58,0x00,0x00,0x00, +0x89,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x5d,0x00,0x00,0x00, +0x4e,0x00,0x00,0x00,0x4f,0x00,0x00,0x00,0x89,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x64,0x00,0x00,0x00,0x5d,0x00,0x00,0x00, +0x63,0x00,0x00,0x00,0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x68,0x00,0x00,0x00,0x5d,0x00,0x00,0x00,0x67,0x00,0x00,0x00, +0x89,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x6f,0x00,0x00,0x00, +0x4e,0x00,0x00,0x00,0x6e,0x00,0x00,0x00,0x86,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x74,0x00,0x00,0x00,0x4e,0x00,0x00,0x00, +0x73,0x00,0x00,0x00,0x89,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x79,0x00,0x00,0x00,0x4e,0x00,0x00,0x00,0x78,0x00,0x00,0x00, +0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x7e,0x00,0x00,0x00, +0x4e,0x00,0x00,0x00,0x7d,0x00,0x00,0x00,0x41,0x00,0x05,0x00, +0x15,0x00,0x00,0x00,0x82,0x00,0x00,0x00,0x12,0x00,0x00,0x00, +0x81,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x83,0x00,0x00,0x00,0x82,0x00,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x84,0x00,0x00,0x00,0x47,0x00,0x00,0x00, +0x83,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00, +0x87,0x00,0x00,0x00,0x12,0x00,0x00,0x00,0x86,0x00,0x00,0x00, +0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x88,0x00,0x00,0x00, +0x87,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x8a,0x00,0x00,0x00,0x47,0x00,0x00,0x00,0x39,0x00,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x8d,0x00,0x00,0x00, +0x8a,0x00,0x00,0x00,0x83,0x00,0x00,0x00,0x0c,0x00,0x07,0x00, +0x06,0x00,0x00,0x00,0x8e,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0x26,0x00,0x00,0x00,0x88,0x00,0x00,0x00,0x8d,0x00,0x00,0x00, +0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00,0x92,0x00,0x00,0x00, +0x12,0x00,0x00,0x00,0x91,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x93,0x00,0x00,0x00,0x92,0x00,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x94,0x00,0x00,0x00, +0x32,0x00,0x00,0x00,0x93,0x00,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x96,0x00,0x00,0x00,0x42,0x00,0x00,0x00, +0x37,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00, +0x98,0x00,0x00,0x00,0x12,0x00,0x00,0x00,0x97,0x00,0x00,0x00, +0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x99,0x00,0x00,0x00, +0x98,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x9a,0x00,0x00,0x00,0x96,0x00,0x00,0x00,0x99,0x00,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x9b,0x00,0x00,0x00, +0x94,0x00,0x00,0x00,0x9a,0x00,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x9d,0x00,0x00,0x00,0x9b,0x00,0x00,0x00, +0x84,0x00,0x00,0x00,0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x9e,0x00,0x00,0x00,0x9d,0x00,0x00,0x00,0x6d,0x00,0x00,0x00, +0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00,0xa3,0x00,0x00,0x00, +0x12,0x00,0x00,0x00,0xa2,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0xa4,0x00,0x00,0x00,0xa3,0x00,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xa5,0x00,0x00,0x00, +0x0f,0x00,0x00,0x00,0xa4,0x00,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xa8,0x00,0x00,0x00,0x4a,0x00,0x00,0x00, +0xa7,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00, +0xaa,0x00,0x00,0x00,0x12,0x00,0x00,0x00,0xa9,0x00,0x00,0x00, +0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0xab,0x00,0x00,0x00, +0xaa,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xac,0x00,0x00,0x00,0xa8,0x00,0x00,0x00,0xab,0x00,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xad,0x00,0x00,0x00, +0xa5,0x00,0x00,0x00,0xac,0x00,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xaf,0x00,0x00,0x00,0xad,0x00,0x00,0x00, +0x84,0x00,0x00,0x00,0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xb0,0x00,0x00,0x00,0xaf,0x00,0x00,0x00,0x6d,0x00,0x00,0x00, +0xf9,0x00,0x02,0x00,0xb2,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, +0xb2,0x00,0x00,0x00,0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00, +0xa4,0x02,0x00,0x00,0x3e,0x00,0x00,0x00,0x05,0x00,0x00,0x00, +0xd1,0x00,0x00,0x00,0xb3,0x00,0x00,0x00,0xb0,0x00,0x05,0x00, +0xc2,0x00,0x00,0x00,0xc3,0x00,0x00,0x00,0xa4,0x02,0x00,0x00, +0xc1,0x00,0x00,0x00,0xf6,0x00,0x04,0x00,0xb4,0x00,0x00,0x00, +0xb3,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, +0xc3,0x00,0x00,0x00,0xb3,0x00,0x00,0x00,0xb4,0x00,0x00,0x00, +0xf8,0x00,0x02,0x00,0xb3,0x00,0x00,0x00,0x41,0x00,0x05,0x00, +0xcd,0x00,0x00,0x00,0xce,0x00,0x00,0x00,0xca,0x00,0x00,0x00, +0xa4,0x02,0x00,0x00,0x3e,0x00,0x03,0x00,0xce,0x00,0x00,0x00, +0xcc,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xd1,0x00,0x00,0x00,0xa4,0x02,0x00,0x00,0xd0,0x00,0x00,0x00, +0xf9,0x00,0x02,0x00,0xb2,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, +0xb4,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0xd4,0x00,0x00,0x00, +0xf8,0x00,0x02,0x00,0xd4,0x00,0x00,0x00,0xf5,0x00,0x07,0x00, +0x06,0x00,0x00,0x00,0xbd,0x02,0x00,0x00,0xb0,0x00,0x00,0x00, +0xb4,0x00,0x00,0x00,0x74,0x01,0x00,0x00,0xd7,0x00,0x00,0x00, +0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0xb9,0x02,0x00,0x00, +0x9e,0x00,0x00,0x00,0xb4,0x00,0x00,0x00,0x71,0x01,0x00,0x00, +0xd7,0x00,0x00,0x00,0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00, +0xa5,0x02,0x00,0x00,0x84,0x00,0x00,0x00,0xb4,0x00,0x00,0x00, +0x1f,0x02,0x00,0x00,0xd7,0x00,0x00,0x00,0xb0,0x00,0x05,0x00, +0xc2,0x00,0x00,0x00,0xdb,0x00,0x00,0x00,0xa5,0x02,0x00,0x00, +0x8e,0x00,0x00,0x00,0xf6,0x00,0x04,0x00,0xd6,0x00,0x00,0x00, +0xd7,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, +0xdb,0x00,0x00,0x00,0xd5,0x00,0x00,0x00,0xd6,0x00,0x00,0x00, +0xf8,0x00,0x02,0x00,0xd5,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, +0xdd,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0xdd,0x00,0x00,0x00, +0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0xb5,0x02,0x00,0x00, +0x3e,0x00,0x00,0x00,0xd5,0x00,0x00,0x00,0x28,0x01,0x00,0x00, +0xde,0x00,0x00,0x00,0xb0,0x00,0x05,0x00,0xc2,0x00,0x00,0x00, +0xe3,0x00,0x00,0x00,0xb5,0x02,0x00,0x00,0x37,0x00,0x00,0x00, +0xf6,0x00,0x04,0x00,0xdf,0x00,0x00,0x00,0xde,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0xe3,0x00,0x00,0x00, +0xde,0x00,0x00,0x00,0xdf,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, +0xde,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xe8,0x00,0x00,0x00,0x74,0x00,0x00,0x00,0xb5,0x02,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xeb,0x00,0x00,0x00, +0xe8,0x00,0x00,0x00,0x99,0x00,0x00,0x00,0x86,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xec,0x00,0x00,0x00,0xeb,0x00,0x00,0x00, +0x6d,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xed,0x00,0x00,0x00,0xb9,0x02,0x00,0x00,0xec,0x00,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xef,0x00,0x00,0x00, +0xed,0x00,0x00,0x00,0x6f,0x00,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xf5,0x00,0x00,0x00,0xe8,0x00,0x00,0x00, +0xf4,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xf7,0x00,0x00,0x00,0x6f,0x00,0x00,0x00,0x6d,0x00,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xf8,0x00,0x00,0x00, +0xf5,0x00,0x00,0x00,0xf7,0x00,0x00,0x00,0x41,0x00,0x07,0x00, +0x06,0x01,0x00,0x00,0x07,0x01,0x00,0x00,0x04,0x01,0x00,0x00, +0x34,0x00,0x00,0x00,0xef,0x00,0x00,0x00,0x3e,0x00,0x00,0x00, +0x3d,0x00,0x04,0x00,0xff,0x00,0x00,0x00,0x08,0x01,0x00,0x00, +0x07,0x01,0x00,0x00,0x73,0x00,0x04,0x00,0xc4,0x00,0x00,0x00, +0x09,0x01,0x00,0x00,0x08,0x01,0x00,0x00,0x41,0x00,0x05,0x00, +0x0a,0x01,0x00,0x00,0x0b,0x01,0x00,0x00,0xfd,0x00,0x00,0x00, +0xf8,0x00,0x00,0x00,0x3e,0x00,0x03,0x00,0x0b,0x01,0x00,0x00, +0x09,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x0d,0x01,0x00,0x00,0xf8,0x00,0x00,0x00,0x39,0x00,0x00,0x00, +0x41,0x00,0x07,0x00,0x06,0x01,0x00,0x00,0x0f,0x01,0x00,0x00, +0x04,0x01,0x00,0x00,0x34,0x00,0x00,0x00,0xef,0x00,0x00,0x00, +0x39,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0xff,0x00,0x00,0x00, +0x10,0x01,0x00,0x00,0x0f,0x01,0x00,0x00,0x73,0x00,0x04,0x00, +0xc4,0x00,0x00,0x00,0x11,0x01,0x00,0x00,0x10,0x01,0x00,0x00, +0x41,0x00,0x05,0x00,0x0a,0x01,0x00,0x00,0x12,0x01,0x00,0x00, +0xfd,0x00,0x00,0x00,0x0d,0x01,0x00,0x00,0x3e,0x00,0x03,0x00, +0x12,0x01,0x00,0x00,0x11,0x01,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x14,0x01,0x00,0x00,0xf8,0x00,0x00,0x00, +0x0c,0x00,0x00,0x00,0x41,0x00,0x07,0x00,0x06,0x01,0x00,0x00, +0x16,0x01,0x00,0x00,0x04,0x01,0x00,0x00,0x34,0x00,0x00,0x00, +0xef,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0xff,0x00,0x00,0x00,0x17,0x01,0x00,0x00,0x16,0x01,0x00,0x00, +0x73,0x00,0x04,0x00,0xc4,0x00,0x00,0x00,0x18,0x01,0x00,0x00, +0x17,0x01,0x00,0x00,0x41,0x00,0x05,0x00,0x0a,0x01,0x00,0x00, +0x19,0x01,0x00,0x00,0xfd,0x00,0x00,0x00,0x14,0x01,0x00,0x00, +0x3e,0x00,0x03,0x00,0x19,0x01,0x00,0x00,0x18,0x01,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x1c,0x01,0x00,0x00, +0xf8,0x00,0x00,0x00,0x1b,0x01,0x00,0x00,0x41,0x00,0x07,0x00, +0x06,0x01,0x00,0x00,0x1e,0x01,0x00,0x00,0x04,0x01,0x00,0x00, +0x34,0x00,0x00,0x00,0xef,0x00,0x00,0x00,0x1b,0x01,0x00,0x00, +0x3d,0x00,0x04,0x00,0xff,0x00,0x00,0x00,0x1f,0x01,0x00,0x00, +0x1e,0x01,0x00,0x00,0x73,0x00,0x04,0x00,0xc4,0x00,0x00,0x00, +0x20,0x01,0x00,0x00,0x1f,0x01,0x00,0x00,0x41,0x00,0x05,0x00, +0x0a,0x01,0x00,0x00,0x21,0x01,0x00,0x00,0xfd,0x00,0x00,0x00, +0x1c,0x01,0x00,0x00,0x3e,0x00,0x03,0x00,0x21,0x01,0x00,0x00, +0x20,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x28,0x01,0x00,0x00,0xb5,0x02,0x00,0x00,0x26,0x01,0x00,0x00, +0xf9,0x00,0x02,0x00,0xdd,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, +0xdf,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0x2a,0x01,0x00,0x00, +0xf8,0x00,0x02,0x00,0x2a,0x01,0x00,0x00,0xf5,0x00,0x07,0x00, +0x06,0x00,0x00,0x00,0xb6,0x02,0x00,0x00,0x3e,0x00,0x00,0x00, +0xdf,0x00,0x00,0x00,0x6d,0x01,0x00,0x00,0x2b,0x01,0x00,0x00, +0xb0,0x00,0x05,0x00,0xc2,0x00,0x00,0x00,0x30,0x01,0x00,0x00, +0xb6,0x02,0x00,0x00,0xa7,0x00,0x00,0x00,0xf6,0x00,0x04,0x00, +0x2c,0x01,0x00,0x00,0x2b,0x01,0x00,0x00,0x01,0x00,0x00,0x00, +0xfa,0x00,0x04,0x00,0x30,0x01,0x00,0x00,0x2b,0x01,0x00,0x00, +0x2c,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0x2b,0x01,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x35,0x01,0x00,0x00, +0x7e,0x00,0x00,0x00,0xb6,0x02,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x38,0x01,0x00,0x00,0x35,0x01,0x00,0x00, +0xab,0x00,0x00,0x00,0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x39,0x01,0x00,0x00,0x38,0x01,0x00,0x00,0x6d,0x00,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x3a,0x01,0x00,0x00, +0xbd,0x02,0x00,0x00,0x39,0x01,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x3c,0x01,0x00,0x00,0x3a,0x01,0x00,0x00, +0x79,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x42,0x01,0x00,0x00,0x35,0x01,0x00,0x00,0x41,0x01,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x44,0x01,0x00,0x00, +0x79,0x00,0x00,0x00,0x6d,0x00,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x45,0x01,0x00,0x00,0x42,0x01,0x00,0x00, +0x44,0x01,0x00,0x00,0x41,0x00,0x07,0x00,0x53,0x01,0x00,0x00, +0x54,0x01,0x00,0x00,0x51,0x01,0x00,0x00,0x34,0x00,0x00,0x00, +0x3c,0x01,0x00,0x00,0x3e,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0xc4,0x00,0x00,0x00,0x55,0x01,0x00,0x00,0x54,0x01,0x00,0x00, +0x41,0x00,0x05,0x00,0x0a,0x01,0x00,0x00,0x56,0x01,0x00,0x00, +0x4a,0x01,0x00,0x00,0x45,0x01,0x00,0x00,0x3e,0x00,0x03,0x00, +0x56,0x01,0x00,0x00,0x55,0x01,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x58,0x01,0x00,0x00,0x45,0x01,0x00,0x00, +0x39,0x00,0x00,0x00,0x41,0x00,0x07,0x00,0x53,0x01,0x00,0x00, +0x5a,0x01,0x00,0x00,0x51,0x01,0x00,0x00,0x34,0x00,0x00,0x00, +0x3c,0x01,0x00,0x00,0x39,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0xc4,0x00,0x00,0x00,0x5b,0x01,0x00,0x00,0x5a,0x01,0x00,0x00, +0x41,0x00,0x05,0x00,0x0a,0x01,0x00,0x00,0x5c,0x01,0x00,0x00, +0x4a,0x01,0x00,0x00,0x58,0x01,0x00,0x00,0x3e,0x00,0x03,0x00, +0x5c,0x01,0x00,0x00,0x5b,0x01,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x5e,0x01,0x00,0x00,0x45,0x01,0x00,0x00, +0x0c,0x00,0x00,0x00,0x41,0x00,0x07,0x00,0x53,0x01,0x00,0x00, +0x60,0x01,0x00,0x00,0x51,0x01,0x00,0x00,0x34,0x00,0x00,0x00, +0x3c,0x01,0x00,0x00,0x0c,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0xc4,0x00,0x00,0x00,0x61,0x01,0x00,0x00,0x60,0x01,0x00,0x00, +0x41,0x00,0x05,0x00,0x0a,0x01,0x00,0x00,0x62,0x01,0x00,0x00, +0x4a,0x01,0x00,0x00,0x5e,0x01,0x00,0x00,0x3e,0x00,0x03,0x00, +0x62,0x01,0x00,0x00,0x61,0x01,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x64,0x01,0x00,0x00,0x45,0x01,0x00,0x00, +0x1b,0x01,0x00,0x00,0x41,0x00,0x07,0x00,0x53,0x01,0x00,0x00, +0x66,0x01,0x00,0x00,0x51,0x01,0x00,0x00,0x34,0x00,0x00,0x00, +0x3c,0x01,0x00,0x00,0x1b,0x01,0x00,0x00,0x3d,0x00,0x04,0x00, +0xc4,0x00,0x00,0x00,0x67,0x01,0x00,0x00,0x66,0x01,0x00,0x00, +0x41,0x00,0x05,0x00,0x0a,0x01,0x00,0x00,0x68,0x01,0x00,0x00, +0x4a,0x01,0x00,0x00,0x64,0x01,0x00,0x00,0x3e,0x00,0x03,0x00, +0x68,0x01,0x00,0x00,0x67,0x01,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x6d,0x01,0x00,0x00,0xb6,0x02,0x00,0x00, +0x6b,0x01,0x00,0x00,0xf9,0x00,0x02,0x00,0x2a,0x01,0x00,0x00, +0xf8,0x00,0x02,0x00,0x2c,0x01,0x00,0x00,0xe0,0x00,0x04,0x00, +0x0c,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x6e,0x01,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x71,0x01,0x00,0x00, +0xb9,0x02,0x00,0x00,0x6f,0x01,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x74,0x01,0x00,0x00,0xbd,0x02,0x00,0x00, +0x72,0x01,0x00,0x00,0xf9,0x00,0x02,0x00,0x76,0x01,0x00,0x00, +0xf8,0x00,0x02,0x00,0x76,0x01,0x00,0x00,0xf5,0x00,0x07,0x00, +0x06,0x00,0x00,0x00,0xbf,0x02,0x00,0x00,0x3e,0x00,0x00,0x00, +0x2c,0x01,0x00,0x00,0x1d,0x02,0x00,0x00,0x79,0x01,0x00,0x00, +0xb0,0x00,0x05,0x00,0xc2,0x00,0x00,0x00,0x7c,0x01,0x00,0x00, +0xbf,0x02,0x00,0x00,0x6c,0x00,0x00,0x00,0xf6,0x00,0x04,0x00, +0x78,0x01,0x00,0x00,0x79,0x01,0x00,0x00,0x00,0x00,0x00,0x00, +0xfa,0x00,0x04,0x00,0x7c,0x01,0x00,0x00,0x77,0x01,0x00,0x00, +0x78,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0x77,0x01,0x00,0x00, +0xf9,0x00,0x02,0x00,0x7e,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, +0x7e,0x01,0x00,0x00,0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00, +0xc3,0x02,0x00,0x00,0x3e,0x00,0x00,0x00,0x77,0x01,0x00,0x00, +0xa9,0x01,0x00,0x00,0x81,0x01,0x00,0x00,0xb0,0x00,0x05,0x00, +0xc2,0x00,0x00,0x00,0x84,0x01,0x00,0x00,0xc3,0x02,0x00,0x00, +0x60,0x00,0x00,0x00,0xf6,0x00,0x04,0x00,0x80,0x01,0x00,0x00, +0x81,0x01,0x00,0x00,0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, +0x84,0x01,0x00,0x00,0x7f,0x01,0x00,0x00,0x80,0x01,0x00,0x00, +0xf8,0x00,0x02,0x00,0x7f,0x01,0x00,0x00,0xf9,0x00,0x02,0x00, +0x86,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0x86,0x01,0x00,0x00, +0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0xd5,0x02,0x00,0x00, +0x3e,0x00,0x00,0x00,0x7f,0x01,0x00,0x00,0xa7,0x01,0x00,0x00, +0x87,0x01,0x00,0x00,0xb0,0x00,0x05,0x00,0xc2,0x00,0x00,0x00, +0x8c,0x01,0x00,0x00,0xd5,0x02,0x00,0x00,0x62,0x00,0x00,0x00, +0xf6,0x00,0x04,0x00,0x88,0x01,0x00,0x00,0x87,0x01,0x00,0x00, +0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0x8c,0x01,0x00,0x00, +0x87,0x01,0x00,0x00,0x88,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, +0x87,0x01,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x92,0x01,0x00,0x00,0xc3,0x02,0x00,0x00,0x62,0x00,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x94,0x01,0x00,0x00, +0x92,0x01,0x00,0x00,0xd5,0x02,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x96,0x01,0x00,0x00,0x55,0x00,0x00,0x00, +0x53,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x98,0x01,0x00,0x00,0xc3,0x02,0x00,0x00,0x61,0x00,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x99,0x01,0x00,0x00, +0x96,0x01,0x00,0x00,0x98,0x01,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x9b,0x01,0x00,0x00,0x64,0x00,0x00,0x00, +0x62,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x9c,0x01,0x00,0x00,0x99,0x01,0x00,0x00,0x9b,0x01,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x9e,0x01,0x00,0x00, +0x9c,0x01,0x00,0x00,0xd5,0x02,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xa0,0x01,0x00,0x00,0x9e,0x01,0x00,0x00, +0x9f,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xa2,0x01,0x00,0x00,0xa0,0x01,0x00,0x00,0xbf,0x02,0x00,0x00, +0x41,0x00,0x05,0x00,0x0a,0x01,0x00,0x00,0xa3,0x01,0x00,0x00, +0xfd,0x00,0x00,0x00,0xa2,0x01,0x00,0x00,0x3d,0x00,0x04,0x00, +0xc4,0x00,0x00,0x00,0xa4,0x01,0x00,0x00,0xa3,0x01,0x00,0x00, +0x41,0x00,0x05,0x00,0xcd,0x00,0x00,0x00,0xa5,0x01,0x00,0x00, +0x90,0x01,0x00,0x00,0x94,0x01,0x00,0x00,0x3e,0x00,0x03,0x00, +0xa5,0x01,0x00,0x00,0xa4,0x01,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xa7,0x01,0x00,0x00,0xd5,0x02,0x00,0x00, +0xd0,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0x86,0x01,0x00,0x00, +0xf8,0x00,0x02,0x00,0x88,0x01,0x00,0x00,0xf9,0x00,0x02,0x00, +0x81,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0x81,0x01,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xa9,0x01,0x00,0x00, +0xc3,0x02,0x00,0x00,0xd0,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, +0x7e,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0x80,0x01,0x00,0x00, +0xf9,0x00,0x02,0x00,0xab,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, +0xab,0x01,0x00,0x00,0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00, +0xc4,0x02,0x00,0x00,0x3e,0x00,0x00,0x00,0x80,0x01,0x00,0x00, +0xd7,0x01,0x00,0x00,0xae,0x01,0x00,0x00,0xb0,0x00,0x05,0x00, +0xc2,0x00,0x00,0x00,0xb1,0x01,0x00,0x00,0xc4,0x02,0x00,0x00, +0xbf,0x00,0x00,0x00,0xf6,0x00,0x04,0x00,0xad,0x01,0x00,0x00, +0xae,0x01,0x00,0x00,0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, +0xb1,0x01,0x00,0x00,0xac,0x01,0x00,0x00,0xad,0x01,0x00,0x00, +0xf8,0x00,0x02,0x00,0xac,0x01,0x00,0x00,0xf9,0x00,0x02,0x00, +0xb3,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0xb3,0x01,0x00,0x00, +0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0xd2,0x02,0x00,0x00, +0x3e,0x00,0x00,0x00,0xac,0x01,0x00,0x00,0xd5,0x01,0x00,0x00, +0xb4,0x01,0x00,0x00,0xb0,0x00,0x05,0x00,0xc2,0x00,0x00,0x00, +0xb9,0x01,0x00,0x00,0xd2,0x02,0x00,0x00,0xbc,0x00,0x00,0x00, +0xf6,0x00,0x04,0x00,0xb5,0x01,0x00,0x00,0xb4,0x01,0x00,0x00, +0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0xb9,0x01,0x00,0x00, +0xb4,0x01,0x00,0x00,0xb5,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, +0xb4,0x01,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xbf,0x01,0x00,0x00,0xc4,0x02,0x00,0x00,0xbc,0x00,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xc1,0x01,0x00,0x00, +0xbf,0x01,0x00,0x00,0xd2,0x02,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xc3,0x01,0x00,0x00,0x59,0x00,0x00,0x00, +0xb9,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xc6,0x01,0x00,0x00,0xc4,0x02,0x00,0x00,0xc5,0x01,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xc7,0x01,0x00,0x00, +0xc3,0x01,0x00,0x00,0xc6,0x01,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xc9,0x01,0x00,0x00,0x68,0x00,0x00,0x00, +0xbc,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xca,0x01,0x00,0x00,0xc7,0x01,0x00,0x00,0xc9,0x01,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xcc,0x01,0x00,0x00, +0xca,0x01,0x00,0x00,0xd2,0x02,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xce,0x01,0x00,0x00,0xcc,0x01,0x00,0x00, +0xcd,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xd0,0x01,0x00,0x00,0xce,0x01,0x00,0x00,0xbf,0x02,0x00,0x00, +0x41,0x00,0x05,0x00,0x0a,0x01,0x00,0x00,0xd1,0x01,0x00,0x00, +0x4a,0x01,0x00,0x00,0xd0,0x01,0x00,0x00,0x3d,0x00,0x04,0x00, +0xc4,0x00,0x00,0x00,0xd2,0x01,0x00,0x00,0xd1,0x01,0x00,0x00, +0x41,0x00,0x05,0x00,0xcd,0x00,0x00,0x00,0xd3,0x01,0x00,0x00, +0xbd,0x01,0x00,0x00,0xc1,0x01,0x00,0x00,0x3e,0x00,0x03,0x00, +0xd3,0x01,0x00,0x00,0xd2,0x01,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xd5,0x01,0x00,0x00,0xd2,0x02,0x00,0x00, +0xd0,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0xb3,0x01,0x00,0x00, +0xf8,0x00,0x02,0x00,0xb5,0x01,0x00,0x00,0xf9,0x00,0x02,0x00, +0xae,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0xae,0x01,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xd7,0x01,0x00,0x00, +0xc4,0x02,0x00,0x00,0xd0,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, +0xab,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0xad,0x01,0x00,0x00, +0xf9,0x00,0x02,0x00,0xd9,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, +0xd9,0x01,0x00,0x00,0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00, +0xc5,0x02,0x00,0x00,0x3e,0x00,0x00,0x00,0xad,0x01,0x00,0x00, +0x1b,0x02,0x00,0x00,0xdc,0x01,0x00,0x00,0xb0,0x00,0x05,0x00, +0xc2,0x00,0x00,0x00,0xdf,0x01,0x00,0x00,0xc5,0x02,0x00,0x00, +0xbf,0x00,0x00,0x00,0xf6,0x00,0x04,0x00,0xdb,0x01,0x00,0x00, +0xdc,0x01,0x00,0x00,0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, +0xdf,0x01,0x00,0x00,0xda,0x01,0x00,0x00,0xdb,0x01,0x00,0x00, +0xf8,0x00,0x02,0x00,0xda,0x01,0x00,0x00,0xf9,0x00,0x02,0x00, +0xe1,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0xe1,0x01,0x00,0x00, +0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0xc9,0x02,0x00,0x00, +0x3e,0x00,0x00,0x00,0xda,0x01,0x00,0x00,0x19,0x02,0x00,0x00, +0xe4,0x01,0x00,0x00,0xb0,0x00,0x05,0x00,0xc2,0x00,0x00,0x00, +0xe7,0x01,0x00,0x00,0xc9,0x02,0x00,0x00,0x60,0x00,0x00,0x00, +0xf6,0x00,0x04,0x00,0xe3,0x01,0x00,0x00,0xe4,0x01,0x00,0x00, +0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0xe7,0x01,0x00,0x00, +0xe2,0x01,0x00,0x00,0xe3,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, +0xe2,0x01,0x00,0x00,0xf9,0x00,0x02,0x00,0xe9,0x01,0x00,0x00, +0xf8,0x00,0x02,0x00,0xe9,0x01,0x00,0x00,0xf5,0x00,0x07,0x00, +0x06,0x00,0x00,0x00,0xcb,0x02,0x00,0x00,0x3e,0x00,0x00,0x00, +0xe2,0x01,0x00,0x00,0x17,0x02,0x00,0x00,0xec,0x01,0x00,0x00, +0xb0,0x00,0x05,0x00,0xc2,0x00,0x00,0x00,0xef,0x01,0x00,0x00, +0xcb,0x02,0x00,0x00,0xbc,0x00,0x00,0x00,0xf6,0x00,0x04,0x00, +0xeb,0x01,0x00,0x00,0xec,0x01,0x00,0x00,0x01,0x00,0x00,0x00, +0xfa,0x00,0x04,0x00,0xef,0x01,0x00,0x00,0xea,0x01,0x00,0x00, +0xeb,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0xea,0x01,0x00,0x00, +0xf9,0x00,0x02,0x00,0xf1,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, +0xf1,0x01,0x00,0x00,0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00, +0xcd,0x02,0x00,0x00,0x3e,0x00,0x00,0x00,0xea,0x01,0x00,0x00, +0x15,0x02,0x00,0x00,0xf2,0x01,0x00,0x00,0xb0,0x00,0x05,0x00, +0xc2,0x00,0x00,0x00,0xf7,0x01,0x00,0x00,0xcd,0x02,0x00,0x00, +0x62,0x00,0x00,0x00,0xf6,0x00,0x04,0x00,0xf3,0x01,0x00,0x00, +0xf2,0x01,0x00,0x00,0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, +0xf7,0x01,0x00,0x00,0xf2,0x01,0x00,0x00,0xf3,0x01,0x00,0x00, +0xf8,0x00,0x02,0x00,0xf2,0x01,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xf9,0x01,0x00,0x00,0xc5,0x02,0x00,0x00, +0xbc,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xfb,0x01,0x00,0x00,0xf9,0x01,0x00,0x00,0xcb,0x02,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xfd,0x01,0x00,0x00, +0xfb,0x01,0x00,0x00,0xfc,0x01,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xff,0x01,0x00,0x00,0xc9,0x02,0x00,0x00, +0x62,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x00,0x02,0x00,0x00,0xfd,0x01,0x00,0x00,0xff,0x01,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x02,0x02,0x00,0x00, +0x00,0x02,0x00,0x00,0xcd,0x02,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x06,0x02,0x00,0x00,0xff,0x01,0x00,0x00, +0xcd,0x02,0x00,0x00,0x41,0x00,0x05,0x00,0xcd,0x00,0x00,0x00, +0x07,0x02,0x00,0x00,0x90,0x01,0x00,0x00,0x06,0x02,0x00,0x00, +0x3d,0x00,0x04,0x00,0xc4,0x00,0x00,0x00,0x08,0x02,0x00,0x00, +0x07,0x02,0x00,0x00,0x41,0x00,0x05,0x00,0xcd,0x00,0x00,0x00, +0x0d,0x02,0x00,0x00,0xbd,0x01,0x00,0x00,0xfb,0x01,0x00,0x00, +0x3d,0x00,0x04,0x00,0xc4,0x00,0x00,0x00,0x0e,0x02,0x00,0x00, +0x0d,0x02,0x00,0x00,0x41,0x00,0x05,0x00,0xcd,0x00,0x00,0x00, +0x10,0x02,0x00,0x00,0xca,0x00,0x00,0x00,0x02,0x02,0x00,0x00, +0x3d,0x00,0x04,0x00,0xc4,0x00,0x00,0x00,0x11,0x02,0x00,0x00, +0x10,0x02,0x00,0x00,0x0c,0x00,0x08,0x00,0xc4,0x00,0x00,0x00, +0x12,0x02,0x00,0x00,0x01,0x00,0x00,0x00,0x32,0x00,0x00,0x00, +0x08,0x02,0x00,0x00,0x0e,0x02,0x00,0x00,0x11,0x02,0x00,0x00, +0x3e,0x00,0x03,0x00,0x10,0x02,0x00,0x00,0x12,0x02,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x15,0x02,0x00,0x00, +0xcd,0x02,0x00,0x00,0xd0,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, +0xf1,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0xf3,0x01,0x00,0x00, +0xf9,0x00,0x02,0x00,0xec,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, +0xec,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x17,0x02,0x00,0x00,0xcb,0x02,0x00,0x00,0xd0,0x00,0x00,0x00, +0xf9,0x00,0x02,0x00,0xe9,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, +0xeb,0x01,0x00,0x00,0xf9,0x00,0x02,0x00,0xe4,0x01,0x00,0x00, +0xf8,0x00,0x02,0x00,0xe4,0x01,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x19,0x02,0x00,0x00,0xc9,0x02,0x00,0x00, +0xd0,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0xe1,0x01,0x00,0x00, +0xf8,0x00,0x02,0x00,0xe3,0x01,0x00,0x00,0xf9,0x00,0x02,0x00, +0xdc,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0xdc,0x01,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x1b,0x02,0x00,0x00, +0xc5,0x02,0x00,0x00,0xd0,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, +0xd9,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0xdb,0x01,0x00,0x00, +0xf9,0x00,0x02,0x00,0x79,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, +0x79,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x1d,0x02,0x00,0x00,0xbf,0x02,0x00,0x00,0xd0,0x00,0x00,0x00, +0xf9,0x00,0x02,0x00,0x76,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, +0x78,0x01,0x00,0x00,0xe0,0x00,0x04,0x00,0x0c,0x00,0x00,0x00, +0x0c,0x00,0x00,0x00,0x6e,0x01,0x00,0x00,0xf9,0x00,0x02,0x00, +0xd7,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0xd7,0x00,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x1f,0x02,0x00,0x00, +0xa5,0x02,0x00,0x00,0x6c,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, +0xd4,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0xd6,0x00,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x24,0x02,0x00,0x00, +0x55,0x00,0x00,0x00,0x53,0x00,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x25,0x02,0x00,0x00,0x96,0x00,0x00,0x00, +0x24,0x02,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x2a,0x02,0x00,0x00,0x59,0x00,0x00,0x00,0xb9,0x00,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x2b,0x02,0x00,0x00, +0xa8,0x00,0x00,0x00,0x2a,0x02,0x00,0x00,0x41,0x00,0x05,0x00, +0x15,0x00,0x00,0x00,0x30,0x02,0x00,0x00,0x12,0x00,0x00,0x00, +0x2f,0x02,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x31,0x02,0x00,0x00,0x30,0x02,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x32,0x02,0x00,0x00,0x0f,0x00,0x00,0x00, +0x31,0x02,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x36,0x02,0x00,0x00,0x47,0x00,0x00,0x00,0x31,0x02,0x00,0x00, +0x41,0x00,0x05,0x00,0x0d,0x00,0x00,0x00,0x38,0x02,0x00,0x00, +0x37,0x02,0x00,0x00,0x0c,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x39,0x02,0x00,0x00,0x38,0x02,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x3a,0x02,0x00,0x00, +0x36,0x02,0x00,0x00,0x39,0x02,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x3b,0x02,0x00,0x00,0x32,0x02,0x00,0x00, +0x3a,0x02,0x00,0x00,0xf9,0x00,0x02,0x00,0x3d,0x02,0x00,0x00, +0xf8,0x00,0x02,0x00,0x3d,0x02,0x00,0x00,0xf5,0x00,0x07,0x00, +0x06,0x00,0x00,0x00,0xa6,0x02,0x00,0x00,0x3e,0x00,0x00,0x00, +0xd6,0x00,0x00,0x00,0xa3,0x02,0x00,0x00,0x40,0x02,0x00,0x00, +0xb0,0x00,0x05,0x00,0xc2,0x00,0x00,0x00,0x43,0x02,0x00,0x00, +0xa6,0x02,0x00,0x00,0xbf,0x00,0x00,0x00,0xf6,0x00,0x04,0x00, +0x3f,0x02,0x00,0x00,0x40,0x02,0x00,0x00,0x01,0x00,0x00,0x00, +0xfa,0x00,0x04,0x00,0x43,0x02,0x00,0x00,0x3e,0x02,0x00,0x00, +0x3f,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x3e,0x02,0x00,0x00, +0xf9,0x00,0x02,0x00,0x45,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, +0x45,0x02,0x00,0x00,0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00, +0xa7,0x02,0x00,0x00,0x3e,0x00,0x00,0x00,0x3e,0x02,0x00,0x00, +0xa1,0x02,0x00,0x00,0x48,0x02,0x00,0x00,0xb0,0x00,0x05,0x00, +0xc2,0x00,0x00,0x00,0x4b,0x02,0x00,0x00,0xa7,0x02,0x00,0x00, +0x60,0x00,0x00,0x00,0xf6,0x00,0x04,0x00,0x47,0x02,0x00,0x00, +0x48,0x02,0x00,0x00,0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, +0x4b,0x02,0x00,0x00,0x46,0x02,0x00,0x00,0x47,0x02,0x00,0x00, +0xf8,0x00,0x02,0x00,0x46,0x02,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x4f,0x02,0x00,0x00,0xa7,0x02,0x00,0x00, +0x61,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x50,0x02,0x00,0x00,0x25,0x02,0x00,0x00,0x4f,0x02,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x52,0x02,0x00,0x00, +0x64,0x00,0x00,0x00,0x62,0x00,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x53,0x02,0x00,0x00,0x50,0x02,0x00,0x00, +0x52,0x02,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x57,0x02,0x00,0x00,0xa6,0x02,0x00,0x00,0xc5,0x01,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x58,0x02,0x00,0x00, +0x2b,0x02,0x00,0x00,0x57,0x02,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x5a,0x02,0x00,0x00,0x68,0x00,0x00,0x00, +0xbc,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x5b,0x02,0x00,0x00,0x58,0x02,0x00,0x00,0x5a,0x02,0x00,0x00, +0xf9,0x00,0x02,0x00,0x5d,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, +0x5d,0x02,0x00,0x00,0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00, +0xa9,0x02,0x00,0x00,0x3e,0x00,0x00,0x00,0x46,0x02,0x00,0x00, +0x9f,0x02,0x00,0x00,0x60,0x02,0x00,0x00,0xb0,0x00,0x05,0x00, +0xc2,0x00,0x00,0x00,0x63,0x02,0x00,0x00,0xa9,0x02,0x00,0x00, +0xbc,0x00,0x00,0x00,0xf6,0x00,0x04,0x00,0x5f,0x02,0x00,0x00, +0x60,0x02,0x00,0x00,0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, +0x63,0x02,0x00,0x00,0x5e,0x02,0x00,0x00,0x5f,0x02,0x00,0x00, +0xf8,0x00,0x02,0x00,0x5e,0x02,0x00,0x00,0xf9,0x00,0x02,0x00, +0x65,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x65,0x02,0x00,0x00, +0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0xab,0x02,0x00,0x00, +0x3e,0x00,0x00,0x00,0x5e,0x02,0x00,0x00,0x9d,0x02,0x00,0x00, +0x68,0x02,0x00,0x00,0xb0,0x00,0x05,0x00,0xc2,0x00,0x00,0x00, +0x6b,0x02,0x00,0x00,0xab,0x02,0x00,0x00,0x62,0x00,0x00,0x00, +0xf6,0x00,0x04,0x00,0x67,0x02,0x00,0x00,0x68,0x02,0x00,0x00, +0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0x6b,0x02,0x00,0x00, +0x66,0x02,0x00,0x00,0x67,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, +0x66,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x6e,0x02,0x00,0x00,0x53,0x02,0x00,0x00,0xab,0x02,0x00,0x00, +0xb0,0x00,0x05,0x00,0xc2,0x00,0x00,0x00,0x71,0x02,0x00,0x00, +0x6e,0x02,0x00,0x00,0x36,0x00,0x00,0x00,0xf7,0x00,0x03,0x00, +0x73,0x02,0x00,0x00,0x00,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, +0x71,0x02,0x00,0x00,0x72,0x02,0x00,0x00,0x73,0x02,0x00,0x00, +0xf8,0x00,0x02,0x00,0x72,0x02,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x76,0x02,0x00,0x00,0x5b,0x02,0x00,0x00, +0xa9,0x02,0x00,0x00,0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00, +0x77,0x02,0x00,0x00,0x12,0x00,0x00,0x00,0xd0,0x00,0x00,0x00, +0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x78,0x02,0x00,0x00, +0x77,0x02,0x00,0x00,0xb0,0x00,0x05,0x00,0xc2,0x00,0x00,0x00, +0x79,0x02,0x00,0x00,0x76,0x02,0x00,0x00,0x78,0x02,0x00,0x00, +0xf9,0x00,0x02,0x00,0x73,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, +0x73,0x02,0x00,0x00,0xf5,0x00,0x07,0x00,0xc2,0x00,0x00,0x00, +0x7a,0x02,0x00,0x00,0x71,0x02,0x00,0x00,0x66,0x02,0x00,0x00, +0x79,0x02,0x00,0x00,0x72,0x02,0x00,0x00,0xf7,0x00,0x03,0x00, +0x7c,0x02,0x00,0x00,0x00,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, +0x7a,0x02,0x00,0x00,0x7b,0x02,0x00,0x00,0x7c,0x02,0x00,0x00, +0xf8,0x00,0x02,0x00,0x7b,0x02,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x84,0x02,0x00,0x00,0x5b,0x02,0x00,0x00, +0xa9,0x02,0x00,0x00,0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00, +0x86,0x02,0x00,0x00,0x12,0x00,0x00,0x00,0x85,0x02,0x00,0x00, +0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x87,0x02,0x00,0x00, +0x86,0x02,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x88,0x02,0x00,0x00,0x84,0x02,0x00,0x00,0x87,0x02,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x89,0x02,0x00,0x00, +0x3b,0x02,0x00,0x00,0x88,0x02,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x8b,0x02,0x00,0x00,0x89,0x02,0x00,0x00, +0x53,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x8d,0x02,0x00,0x00,0x8b,0x02,0x00,0x00,0xab,0x02,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x8f,0x02,0x00,0x00, +0xa6,0x02,0x00,0x00,0xbc,0x00,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x91,0x02,0x00,0x00,0x8f,0x02,0x00,0x00, +0xa9,0x02,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x93,0x02,0x00,0x00,0x91,0x02,0x00,0x00,0x92,0x02,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x95,0x02,0x00,0x00, +0xa7,0x02,0x00,0x00,0x62,0x00,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x96,0x02,0x00,0x00,0x93,0x02,0x00,0x00, +0x95,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x98,0x02,0x00,0x00,0x96,0x02,0x00,0x00,0xab,0x02,0x00,0x00, +0x41,0x00,0x05,0x00,0xcd,0x00,0x00,0x00,0x99,0x02,0x00,0x00, +0xca,0x00,0x00,0x00,0x98,0x02,0x00,0x00,0x3d,0x00,0x04,0x00, +0xc4,0x00,0x00,0x00,0x9a,0x02,0x00,0x00,0x99,0x02,0x00,0x00, +0x41,0x00,0x06,0x00,0x53,0x01,0x00,0x00,0x9b,0x02,0x00,0x00, +0x80,0x02,0x00,0x00,0x34,0x00,0x00,0x00,0x8d,0x02,0x00,0x00, +0x3e,0x00,0x03,0x00,0x9b,0x02,0x00,0x00,0x9a,0x02,0x00,0x00, +0xf9,0x00,0x02,0x00,0x7c,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, +0x7c,0x02,0x00,0x00,0xf9,0x00,0x02,0x00,0x68,0x02,0x00,0x00, +0xf8,0x00,0x02,0x00,0x68,0x02,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x9d,0x02,0x00,0x00,0xab,0x02,0x00,0x00, +0xd0,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0x65,0x02,0x00,0x00, +0xf8,0x00,0x02,0x00,0x67,0x02,0x00,0x00,0xf9,0x00,0x02,0x00, +0x60,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x60,0x02,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x9f,0x02,0x00,0x00, +0xa9,0x02,0x00,0x00,0xd0,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, +0x5d,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x5f,0x02,0x00,0x00, +0xf9,0x00,0x02,0x00,0x48,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, +0x48,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xa1,0x02,0x00,0x00,0xa7,0x02,0x00,0x00,0xd0,0x00,0x00,0x00, +0xf9,0x00,0x02,0x00,0x45,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, +0x47,0x02,0x00,0x00,0xf9,0x00,0x02,0x00,0x40,0x02,0x00,0x00, +0xf8,0x00,0x02,0x00,0x40,0x02,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xa3,0x02,0x00,0x00,0xa6,0x02,0x00,0x00, +0xd0,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0x3d,0x02,0x00,0x00, +0xf8,0x00,0x02,0x00,0x3f,0x02,0x00,0x00,0xfd,0x00,0x01,0x00, +0x38,0x00,0x01,0x00, +}; +const uint64_t matmul_f16_f32_aligned_fp32_len = 10240; + +unsigned char matmul_f16_f32_fp32_data[] = { +0x03,0x02,0x23,0x07,0x00,0x05,0x01,0x00,0x0b,0x00,0x0d,0x00, +0xd6,0x02,0x00,0x00,0x00,0x00,0x00,0x00,0x11,0x00,0x02,0x00, +0x01,0x00,0x00,0x00,0x11,0x00,0x02,0x00,0x51,0x11,0x00,0x00, +0x0b,0x00,0x06,0x00,0x01,0x00,0x00,0x00,0x47,0x4c,0x53,0x4c, +0x2e,0x73,0x74,0x64,0x2e,0x34,0x35,0x30,0x00,0x00,0x00,0x00, +0x0e,0x00,0x03,0x00,0x00,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0x0f,0x00,0x0f,0x00,0x05,0x00,0x00,0x00,0x04,0x00,0x00,0x00, +0x6d,0x61,0x69,0x6e,0x00,0x00,0x00,0x00,0x0b,0x00,0x00,0x00, +0x12,0x00,0x00,0x00,0x3d,0x00,0x00,0x00,0x4c,0x00,0x00,0x00, +0xfa,0x00,0x00,0x00,0x06,0x01,0x00,0x00,0x46,0x01,0x00,0x00, +0x51,0x01,0x00,0x00,0x37,0x02,0x00,0x00,0x80,0x02,0x00,0x00, +0x10,0x00,0x06,0x00,0x04,0x00,0x00,0x00,0x11,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0x47,0x00,0x04,0x00,0x0b,0x00,0x00,0x00,0x0b,0x00,0x00,0x00, +0x1c,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00, +0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0x23,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x48,0x00,0x05,0x00, +0x10,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x23,0x00,0x00,0x00, +0x08,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00, +0x03,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, +0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00,0x04,0x00,0x00,0x00, +0x23,0x00,0x00,0x00,0x10,0x00,0x00,0x00,0x48,0x00,0x05,0x00, +0x10,0x00,0x00,0x00,0x05,0x00,0x00,0x00,0x23,0x00,0x00,0x00, +0x14,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00, +0x06,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x18,0x00,0x00,0x00, +0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00,0x07,0x00,0x00,0x00, +0x23,0x00,0x00,0x00,0x1c,0x00,0x00,0x00,0x48,0x00,0x05,0x00, +0x10,0x00,0x00,0x00,0x08,0x00,0x00,0x00,0x23,0x00,0x00,0x00, +0x20,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00, +0x09,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x24,0x00,0x00,0x00, +0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00,0x0a,0x00,0x00,0x00, +0x23,0x00,0x00,0x00,0x28,0x00,0x00,0x00,0x48,0x00,0x05,0x00, +0x10,0x00,0x00,0x00,0x0b,0x00,0x00,0x00,0x23,0x00,0x00,0x00, +0x2c,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00, +0x0c,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x30,0x00,0x00,0x00, +0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00,0x0d,0x00,0x00,0x00, +0x23,0x00,0x00,0x00,0x34,0x00,0x00,0x00,0x47,0x00,0x03,0x00, +0x10,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00, +0x37,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0x47,0x00,0x04,0x00,0x3d,0x00,0x00,0x00,0x0b,0x00,0x00,0x00, +0x1a,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x4c,0x00,0x00,0x00, +0x0b,0x00,0x00,0x00,0x1b,0x00,0x00,0x00,0x47,0x00,0x04,0x00, +0x4f,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x09,0x00,0x00,0x00, +0x47,0x00,0x04,0x00,0x53,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0x04,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x60,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x47,0x00,0x04,0x00, +0x62,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x07,0x00,0x00,0x00, +0x47,0x00,0x04,0x00,0x6c,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0x03,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0xa6,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00, +0xb8,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x05,0x00,0x00,0x00, +0x47,0x00,0x04,0x00,0xbb,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0x08,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x03,0x01,0x00,0x00, +0x06,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x48,0x00,0x04,0x00, +0x04,0x01,0x00,0x00,0x00,0x00,0x00,0x00,0x18,0x00,0x00,0x00, +0x48,0x00,0x05,0x00,0x04,0x01,0x00,0x00,0x00,0x00,0x00,0x00, +0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x03,0x00, +0x04,0x01,0x00,0x00,0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00, +0x06,0x01,0x00,0x00,0x22,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0x47,0x00,0x04,0x00,0x06,0x01,0x00,0x00,0x21,0x00,0x00,0x00, +0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x20,0x01,0x00,0x00, +0x01,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00, +0x21,0x01,0x00,0x00,0x0b,0x00,0x00,0x00,0x19,0x00,0x00,0x00, +0x47,0x00,0x04,0x00,0x4e,0x01,0x00,0x00,0x06,0x00,0x00,0x00, +0x04,0x00,0x00,0x00,0x48,0x00,0x04,0x00,0x4f,0x01,0x00,0x00, +0x00,0x00,0x00,0x00,0x18,0x00,0x00,0x00,0x48,0x00,0x05,0x00, +0x4f,0x01,0x00,0x00,0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00, +0x00,0x00,0x00,0x00,0x47,0x00,0x03,0x00,0x4f,0x01,0x00,0x00, +0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x51,0x01,0x00,0x00, +0x22,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00, +0x51,0x01,0x00,0x00,0x21,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0x47,0x00,0x04,0x00,0x37,0x02,0x00,0x00,0x0b,0x00,0x00,0x00, +0x18,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x7d,0x02,0x00,0x00, +0x06,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x48,0x00,0x04,0x00, +0x7e,0x02,0x00,0x00,0x00,0x00,0x00,0x00,0x19,0x00,0x00,0x00, +0x48,0x00,0x05,0x00,0x7e,0x02,0x00,0x00,0x00,0x00,0x00,0x00, +0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x03,0x00, +0x7e,0x02,0x00,0x00,0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00, +0x80,0x02,0x00,0x00,0x22,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0x47,0x00,0x04,0x00,0x80,0x02,0x00,0x00,0x21,0x00,0x00,0x00, +0x02,0x00,0x00,0x00,0x13,0x00,0x02,0x00,0x02,0x00,0x00,0x00, +0x21,0x00,0x03,0x00,0x03,0x00,0x00,0x00,0x02,0x00,0x00,0x00, +0x15,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x20,0x00,0x00,0x00, +0x00,0x00,0x00,0x00,0x17,0x00,0x04,0x00,0x09,0x00,0x00,0x00, +0x06,0x00,0x00,0x00,0x03,0x00,0x00,0x00,0x20,0x00,0x04,0x00, +0x0a,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x09,0x00,0x00,0x00, +0x3b,0x00,0x04,0x00,0x0a,0x00,0x00,0x00,0x0b,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x0c,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x20,0x00,0x04,0x00, +0x0d,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x06,0x00,0x00,0x00, +0x1e,0x00,0x10,0x00,0x10,0x00,0x00,0x00,0x06,0x00,0x00,0x00, +0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00, +0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00, +0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00, +0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00, +0x06,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x11,0x00,0x00,0x00, +0x09,0x00,0x00,0x00,0x10,0x00,0x00,0x00,0x3b,0x00,0x04,0x00, +0x11,0x00,0x00,0x00,0x12,0x00,0x00,0x00,0x09,0x00,0x00,0x00, +0x15,0x00,0x04,0x00,0x13,0x00,0x00,0x00,0x20,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00, +0x14,0x00,0x00,0x00,0x08,0x00,0x00,0x00,0x20,0x00,0x04,0x00, +0x15,0x00,0x00,0x00,0x09,0x00,0x00,0x00,0x06,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00,0x21,0x00,0x00,0x00, +0x0a,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00, +0x27,0x00,0x00,0x00,0x09,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x13,0x00,0x00,0x00,0x2d,0x00,0x00,0x00,0x07,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00,0x34,0x00,0x00,0x00, +0x00,0x00,0x00,0x00,0x32,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x37,0x00,0x00,0x00,0x40,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0x3b,0x00,0x04,0x00,0x0a,0x00,0x00,0x00,0x3d,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x3e,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x3b,0x00,0x04,0x00, +0x0a,0x00,0x00,0x00,0x4c,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0x32,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x4f,0x00,0x00,0x00, +0x20,0x00,0x00,0x00,0x32,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x53,0x00,0x00,0x00,0x20,0x00,0x00,0x00,0x34,0x00,0x06,0x00, +0x06,0x00,0x00,0x00,0x54,0x00,0x00,0x00,0x86,0x00,0x00,0x00, +0x37,0x00,0x00,0x00,0x53,0x00,0x00,0x00,0x34,0x00,0x06,0x00, +0x06,0x00,0x00,0x00,0x58,0x00,0x00,0x00,0x86,0x00,0x00,0x00, +0x37,0x00,0x00,0x00,0x53,0x00,0x00,0x00,0x32,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x60,0x00,0x00,0x00,0x02,0x00,0x00,0x00, +0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x61,0x00,0x00,0x00, +0x86,0x00,0x00,0x00,0x53,0x00,0x00,0x00,0x60,0x00,0x00,0x00, +0x32,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x62,0x00,0x00,0x00, +0x04,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, +0x63,0x00,0x00,0x00,0x86,0x00,0x00,0x00,0x61,0x00,0x00,0x00, +0x62,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, +0x67,0x00,0x00,0x00,0x86,0x00,0x00,0x00,0x61,0x00,0x00,0x00, +0x62,0x00,0x00,0x00,0x32,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x6c,0x00,0x00,0x00,0x10,0x00,0x00,0x00,0x34,0x00,0x06,0x00, +0x06,0x00,0x00,0x00,0x6d,0x00,0x00,0x00,0x86,0x00,0x00,0x00, +0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x34,0x00,0x06,0x00, +0x06,0x00,0x00,0x00,0x72,0x00,0x00,0x00,0x86,0x00,0x00,0x00, +0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x34,0x00,0x06,0x00, +0x06,0x00,0x00,0x00,0x77,0x00,0x00,0x00,0x86,0x00,0x00,0x00, +0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x34,0x00,0x06,0x00, +0x06,0x00,0x00,0x00,0x7c,0x00,0x00,0x00,0x86,0x00,0x00,0x00, +0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x13,0x00,0x00,0x00,0x80,0x00,0x00,0x00,0x06,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00,0x85,0x00,0x00,0x00, +0x02,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00, +0x90,0x00,0x00,0x00,0x0b,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x13,0x00,0x00,0x00,0x96,0x00,0x00,0x00,0x03,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00,0xa1,0x00,0x00,0x00, +0x0c,0x00,0x00,0x00,0x32,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0xa6,0x00,0x00,0x00,0x40,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x13,0x00,0x00,0x00,0xa8,0x00,0x00,0x00,0x04,0x00,0x00,0x00, +0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0xb7,0x00,0x00,0x00, +0x84,0x00,0x00,0x00,0x60,0x00,0x00,0x00,0x62,0x00,0x00,0x00, +0x32,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0xb8,0x00,0x00,0x00, +0x20,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, +0xb9,0x00,0x00,0x00,0x84,0x00,0x00,0x00,0x53,0x00,0x00,0x00, +0xb8,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, +0xba,0x00,0x00,0x00,0x84,0x00,0x00,0x00,0x4f,0x00,0x00,0x00, +0x62,0x00,0x00,0x00,0x32,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0xbb,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x34,0x00,0x06,0x00, +0x06,0x00,0x00,0x00,0xbc,0x00,0x00,0x00,0x84,0x00,0x00,0x00, +0xba,0x00,0x00,0x00,0xbb,0x00,0x00,0x00,0x34,0x00,0x06,0x00, +0x06,0x00,0x00,0x00,0xbd,0x00,0x00,0x00,0x84,0x00,0x00,0x00, +0xbc,0x00,0x00,0x00,0x60,0x00,0x00,0x00,0x34,0x00,0x06,0x00, +0x06,0x00,0x00,0x00,0xbe,0x00,0x00,0x00,0x86,0x00,0x00,0x00, +0xb9,0x00,0x00,0x00,0xbd,0x00,0x00,0x00,0x34,0x00,0x06,0x00, +0x06,0x00,0x00,0x00,0xbf,0x00,0x00,0x00,0x84,0x00,0x00,0x00, +0xb7,0x00,0x00,0x00,0xbe,0x00,0x00,0x00,0x34,0x00,0x06,0x00, +0x06,0x00,0x00,0x00,0xc0,0x00,0x00,0x00,0x84,0x00,0x00,0x00, +0xbf,0x00,0x00,0x00,0xbb,0x00,0x00,0x00,0x14,0x00,0x02,0x00, +0xc1,0x00,0x00,0x00,0x16,0x00,0x03,0x00,0xc3,0x00,0x00,0x00, +0x20,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, +0xc4,0x00,0x00,0x00,0x84,0x00,0x00,0x00,0x60,0x00,0x00,0x00, +0x62,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, +0xc5,0x00,0x00,0x00,0x84,0x00,0x00,0x00,0xc4,0x00,0x00,0x00, +0xbe,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, +0xc6,0x00,0x00,0x00,0x84,0x00,0x00,0x00,0xc5,0x00,0x00,0x00, +0xbb,0x00,0x00,0x00,0x1c,0x00,0x04,0x00,0xc7,0x00,0x00,0x00, +0xc3,0x00,0x00,0x00,0xc6,0x00,0x00,0x00,0x20,0x00,0x04,0x00, +0xc8,0x00,0x00,0x00,0x07,0x00,0x00,0x00,0xc7,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0xc3,0x00,0x00,0x00,0xcb,0x00,0x00,0x00, +0x00,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0xcc,0x00,0x00,0x00, +0x07,0x00,0x00,0x00,0xc3,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x13,0x00,0x00,0x00,0xcf,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0xf6,0x00,0x00,0x00, +0x80,0x00,0x00,0x00,0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00, +0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0xf7,0x00,0x00,0x00, +0x84,0x00,0x00,0x00,0x37,0x00,0x00,0x00,0xf6,0x00,0x00,0x00, +0x1c,0x00,0x04,0x00,0xf8,0x00,0x00,0x00,0xc3,0x00,0x00,0x00, +0xf7,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0xf9,0x00,0x00,0x00, +0x04,0x00,0x00,0x00,0xf8,0x00,0x00,0x00,0x3b,0x00,0x04,0x00, +0xf9,0x00,0x00,0x00,0xfa,0x00,0x00,0x00,0x04,0x00,0x00,0x00, +0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0xfe,0x00,0x00,0x00, +0x80,0x00,0x00,0x00,0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00, +0x16,0x00,0x03,0x00,0x02,0x01,0x00,0x00,0x10,0x00,0x00,0x00, +0x1d,0x00,0x03,0x00,0x03,0x01,0x00,0x00,0x02,0x01,0x00,0x00, +0x1e,0x00,0x03,0x00,0x04,0x01,0x00,0x00,0x03,0x01,0x00,0x00, +0x20,0x00,0x04,0x00,0x05,0x01,0x00,0x00,0x0c,0x00,0x00,0x00, +0x04,0x01,0x00,0x00,0x3b,0x00,0x04,0x00,0x05,0x01,0x00,0x00, +0x06,0x01,0x00,0x00,0x0c,0x00,0x00,0x00,0x20,0x00,0x04,0x00, +0x11,0x01,0x00,0x00,0x0c,0x00,0x00,0x00,0x02,0x01,0x00,0x00, +0x20,0x00,0x04,0x00,0x15,0x01,0x00,0x00,0x04,0x00,0x00,0x00, +0xc3,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, +0x1b,0x01,0x00,0x00,0x80,0x00,0x00,0x00,0x6c,0x00,0x00,0x00, +0x39,0x00,0x00,0x00,0x32,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x20,0x01,0x00,0x00,0x01,0x00,0x00,0x00,0x33,0x00,0x06,0x00, +0x09,0x00,0x00,0x00,0x21,0x01,0x00,0x00,0x20,0x01,0x00,0x00, +0x39,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x34,0x00,0x06,0x00, +0x06,0x00,0x00,0x00,0x22,0x01,0x00,0x00,0x51,0x00,0x00,0x00, +0x21,0x01,0x00,0x00,0x00,0x00,0x00,0x00,0x34,0x00,0x06,0x00, +0x06,0x00,0x00,0x00,0x23,0x01,0x00,0x00,0x84,0x00,0x00,0x00, +0x22,0x01,0x00,0x00,0x39,0x00,0x00,0x00,0x34,0x00,0x06,0x00, +0x06,0x00,0x00,0x00,0x24,0x01,0x00,0x00,0x86,0x00,0x00,0x00, +0x23,0x01,0x00,0x00,0x6c,0x00,0x00,0x00,0x34,0x00,0x06,0x00, +0x06,0x00,0x00,0x00,0x42,0x01,0x00,0x00,0x80,0x00,0x00,0x00, +0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x34,0x00,0x06,0x00, +0x06,0x00,0x00,0x00,0x43,0x01,0x00,0x00,0x84,0x00,0x00,0x00, +0xa6,0x00,0x00,0x00,0x42,0x01,0x00,0x00,0x1c,0x00,0x04,0x00, +0x44,0x01,0x00,0x00,0xc3,0x00,0x00,0x00,0x43,0x01,0x00,0x00, +0x20,0x00,0x04,0x00,0x45,0x01,0x00,0x00,0x04,0x00,0x00,0x00, +0x44,0x01,0x00,0x00,0x3b,0x00,0x04,0x00,0x45,0x01,0x00,0x00, +0x46,0x01,0x00,0x00,0x04,0x00,0x00,0x00,0x34,0x00,0x06,0x00, +0x06,0x00,0x00,0x00,0x4a,0x01,0x00,0x00,0x80,0x00,0x00,0x00, +0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x1d,0x00,0x03,0x00, +0x4e,0x01,0x00,0x00,0xc3,0x00,0x00,0x00,0x1e,0x00,0x03,0x00, +0x4f,0x01,0x00,0x00,0x4e,0x01,0x00,0x00,0x20,0x00,0x04,0x00, +0x50,0x01,0x00,0x00,0x0c,0x00,0x00,0x00,0x4f,0x01,0x00,0x00, +0x3b,0x00,0x04,0x00,0x50,0x01,0x00,0x00,0x51,0x01,0x00,0x00, +0x0c,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x5c,0x01,0x00,0x00, +0x0c,0x00,0x00,0x00,0xc3,0x00,0x00,0x00,0x34,0x00,0x06,0x00, +0x06,0x00,0x00,0x00,0x64,0x01,0x00,0x00,0x80,0x00,0x00,0x00, +0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x34,0x00,0x06,0x00, +0x06,0x00,0x00,0x00,0x69,0x01,0x00,0x00,0x51,0x00,0x00,0x00, +0x21,0x01,0x00,0x00,0x00,0x00,0x00,0x00,0x34,0x00,0x06,0x00, +0x06,0x00,0x00,0x00,0x6a,0x01,0x00,0x00,0x84,0x00,0x00,0x00, +0x69,0x01,0x00,0x00,0x39,0x00,0x00,0x00,0x34,0x00,0x06,0x00, +0x06,0x00,0x00,0x00,0x6b,0x01,0x00,0x00,0x86,0x00,0x00,0x00, +0x6a,0x01,0x00,0x00,0x6c,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x6e,0x01,0x00,0x00,0x08,0x01,0x00,0x00, +0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x6f,0x01,0x00,0x00, +0x86,0x00,0x00,0x00,0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00, +0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x72,0x01,0x00,0x00, +0x86,0x00,0x00,0x00,0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00, +0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x8d,0x01,0x00,0x00, +0x84,0x00,0x00,0x00,0x60,0x00,0x00,0x00,0x62,0x00,0x00,0x00, +0x1c,0x00,0x04,0x00,0x8e,0x01,0x00,0x00,0xc3,0x00,0x00,0x00, +0x8d,0x01,0x00,0x00,0x20,0x00,0x04,0x00,0x8f,0x01,0x00,0x00, +0x07,0x00,0x00,0x00,0x8e,0x01,0x00,0x00,0x34,0x00,0x06,0x00, +0x06,0x00,0x00,0x00,0x9f,0x01,0x00,0x00,0x80,0x00,0x00,0x00, +0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x34,0x00,0x06,0x00, +0x06,0x00,0x00,0x00,0xba,0x01,0x00,0x00,0x84,0x00,0x00,0x00, +0xbe,0x00,0x00,0x00,0xbb,0x00,0x00,0x00,0x1c,0x00,0x04,0x00, +0xbb,0x01,0x00,0x00,0xc3,0x00,0x00,0x00,0xba,0x01,0x00,0x00, +0x20,0x00,0x04,0x00,0xbc,0x01,0x00,0x00,0x07,0x00,0x00,0x00, +0xbb,0x01,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, +0xc5,0x01,0x00,0x00,0x86,0x00,0x00,0x00,0xb8,0x00,0x00,0x00, +0xbe,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, +0xcd,0x01,0x00,0x00,0x80,0x00,0x00,0x00,0x6c,0x00,0x00,0x00, +0x39,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, +0xfc,0x01,0x00,0x00,0x84,0x00,0x00,0x00,0x60,0x00,0x00,0x00, +0x62,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00, +0x2f,0x02,0x00,0x00,0x0d,0x00,0x00,0x00,0x3b,0x00,0x04,0x00, +0x0a,0x00,0x00,0x00,0x37,0x02,0x00,0x00,0x01,0x00,0x00,0x00, +0x1d,0x00,0x03,0x00,0x7d,0x02,0x00,0x00,0xc3,0x00,0x00,0x00, +0x1e,0x00,0x03,0x00,0x7e,0x02,0x00,0x00,0x7d,0x02,0x00,0x00, +0x20,0x00,0x04,0x00,0x7f,0x02,0x00,0x00,0x0c,0x00,0x00,0x00, +0x7e,0x02,0x00,0x00,0x3b,0x00,0x04,0x00,0x7f,0x02,0x00,0x00, +0x80,0x02,0x00,0x00,0x0c,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x13,0x00,0x00,0x00,0x85,0x02,0x00,0x00,0x05,0x00,0x00,0x00, +0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x92,0x02,0x00,0x00, +0x84,0x00,0x00,0x00,0x60,0x00,0x00,0x00,0x62,0x00,0x00,0x00, +0x36,0x00,0x05,0x00,0x02,0x00,0x00,0x00,0x04,0x00,0x00,0x00, +0x00,0x00,0x00,0x00,0x03,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, +0x05,0x00,0x00,0x00,0x3b,0x00,0x04,0x00,0xc8,0x00,0x00,0x00, +0xc9,0x00,0x00,0x00,0x07,0x00,0x00,0x00,0x3b,0x00,0x04,0x00, +0x8f,0x01,0x00,0x00,0x90,0x01,0x00,0x00,0x07,0x00,0x00,0x00, +0x3b,0x00,0x04,0x00,0xbc,0x01,0x00,0x00,0xbd,0x01,0x00,0x00, +0x07,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x0d,0x00,0x00,0x00, +0x0e,0x00,0x00,0x00,0x0b,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, +0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x0f,0x00,0x00,0x00, +0x0e,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00, +0x16,0x00,0x00,0x00,0x12,0x00,0x00,0x00,0x14,0x00,0x00,0x00, +0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x17,0x00,0x00,0x00, +0x16,0x00,0x00,0x00,0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x18,0x00,0x00,0x00,0x0f,0x00,0x00,0x00,0x17,0x00,0x00,0x00, +0x89,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x1e,0x00,0x00,0x00, +0x0f,0x00,0x00,0x00,0x17,0x00,0x00,0x00,0x41,0x00,0x05,0x00, +0x15,0x00,0x00,0x00,0x22,0x00,0x00,0x00,0x12,0x00,0x00,0x00, +0x21,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x23,0x00,0x00,0x00,0x22,0x00,0x00,0x00,0x86,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x24,0x00,0x00,0x00,0x18,0x00,0x00,0x00, +0x23,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00, +0x28,0x00,0x00,0x00,0x12,0x00,0x00,0x00,0x27,0x00,0x00,0x00, +0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x29,0x00,0x00,0x00, +0x28,0x00,0x00,0x00,0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x2a,0x00,0x00,0x00,0x1e,0x00,0x00,0x00,0x29,0x00,0x00,0x00, +0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, +0x12,0x00,0x00,0x00,0x2d,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x2f,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x30,0x00,0x00,0x00, +0x24,0x00,0x00,0x00,0x2f,0x00,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x32,0x00,0x00,0x00,0x30,0x00,0x00,0x00, +0x2a,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00, +0x35,0x00,0x00,0x00,0x12,0x00,0x00,0x00,0x34,0x00,0x00,0x00, +0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x36,0x00,0x00,0x00, +0x35,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x38,0x00,0x00,0x00,0x36,0x00,0x00,0x00,0x37,0x00,0x00,0x00, +0x82,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x3a,0x00,0x00,0x00, +0x38,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x86,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x3b,0x00,0x00,0x00,0x3a,0x00,0x00,0x00, +0x37,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x0d,0x00,0x00,0x00, +0x3f,0x00,0x00,0x00,0x3d,0x00,0x00,0x00,0x3e,0x00,0x00,0x00, +0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x40,0x00,0x00,0x00, +0x3f,0x00,0x00,0x00,0x89,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x42,0x00,0x00,0x00,0x40,0x00,0x00,0x00,0x3b,0x00,0x00,0x00, +0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x47,0x00,0x00,0x00, +0x40,0x00,0x00,0x00,0x3b,0x00,0x00,0x00,0x41,0x00,0x05,0x00, +0x0d,0x00,0x00,0x00,0x49,0x00,0x00,0x00,0x3d,0x00,0x00,0x00, +0x39,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x4a,0x00,0x00,0x00,0x49,0x00,0x00,0x00,0x41,0x00,0x05,0x00, +0x0d,0x00,0x00,0x00,0x4d,0x00,0x00,0x00,0x4c,0x00,0x00,0x00, +0x3e,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x4e,0x00,0x00,0x00,0x4d,0x00,0x00,0x00,0x86,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x50,0x00,0x00,0x00,0x4e,0x00,0x00,0x00, +0x4f,0x00,0x00,0x00,0x89,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x55,0x00,0x00,0x00,0x50,0x00,0x00,0x00,0x54,0x00,0x00,0x00, +0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x59,0x00,0x00,0x00, +0x50,0x00,0x00,0x00,0x58,0x00,0x00,0x00,0x89,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x5d,0x00,0x00,0x00,0x4e,0x00,0x00,0x00, +0x4f,0x00,0x00,0x00,0x89,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x64,0x00,0x00,0x00,0x5d,0x00,0x00,0x00,0x63,0x00,0x00,0x00, +0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x68,0x00,0x00,0x00, +0x5d,0x00,0x00,0x00,0x67,0x00,0x00,0x00,0x89,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x6e,0x00,0x00,0x00,0x4e,0x00,0x00,0x00, +0x6d,0x00,0x00,0x00,0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x73,0x00,0x00,0x00,0x4e,0x00,0x00,0x00,0x72,0x00,0x00,0x00, +0x89,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x78,0x00,0x00,0x00, +0x4e,0x00,0x00,0x00,0x77,0x00,0x00,0x00,0x86,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x7d,0x00,0x00,0x00,0x4e,0x00,0x00,0x00, +0x7c,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00, +0x81,0x00,0x00,0x00,0x12,0x00,0x00,0x00,0x80,0x00,0x00,0x00, +0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x82,0x00,0x00,0x00, +0x81,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x83,0x00,0x00,0x00,0x47,0x00,0x00,0x00,0x82,0x00,0x00,0x00, +0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00,0x86,0x00,0x00,0x00, +0x12,0x00,0x00,0x00,0x85,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x87,0x00,0x00,0x00,0x86,0x00,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x89,0x00,0x00,0x00, +0x47,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x8c,0x00,0x00,0x00,0x89,0x00,0x00,0x00, +0x82,0x00,0x00,0x00,0x0c,0x00,0x07,0x00,0x06,0x00,0x00,0x00, +0x8d,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x26,0x00,0x00,0x00, +0x87,0x00,0x00,0x00,0x8c,0x00,0x00,0x00,0x41,0x00,0x05,0x00, +0x15,0x00,0x00,0x00,0x91,0x00,0x00,0x00,0x12,0x00,0x00,0x00, +0x90,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x92,0x00,0x00,0x00,0x91,0x00,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x93,0x00,0x00,0x00,0x32,0x00,0x00,0x00, +0x92,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x95,0x00,0x00,0x00,0x42,0x00,0x00,0x00,0x37,0x00,0x00,0x00, +0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00,0x97,0x00,0x00,0x00, +0x12,0x00,0x00,0x00,0x96,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x98,0x00,0x00,0x00,0x97,0x00,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x99,0x00,0x00,0x00, +0x95,0x00,0x00,0x00,0x98,0x00,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x9a,0x00,0x00,0x00,0x93,0x00,0x00,0x00, +0x99,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x9c,0x00,0x00,0x00,0x9a,0x00,0x00,0x00,0x83,0x00,0x00,0x00, +0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x9d,0x00,0x00,0x00, +0x9c,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x41,0x00,0x05,0x00, +0x15,0x00,0x00,0x00,0xa2,0x00,0x00,0x00,0x12,0x00,0x00,0x00, +0xa1,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0xa3,0x00,0x00,0x00,0xa2,0x00,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xa4,0x00,0x00,0x00,0x0f,0x00,0x00,0x00, +0xa3,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xa7,0x00,0x00,0x00,0x4a,0x00,0x00,0x00,0xa6,0x00,0x00,0x00, +0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00,0xa9,0x00,0x00,0x00, +0x12,0x00,0x00,0x00,0xa8,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0xaa,0x00,0x00,0x00,0xa9,0x00,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xab,0x00,0x00,0x00, +0xa7,0x00,0x00,0x00,0xaa,0x00,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xac,0x00,0x00,0x00,0xa4,0x00,0x00,0x00, +0xab,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xae,0x00,0x00,0x00,0xac,0x00,0x00,0x00,0x83,0x00,0x00,0x00, +0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xaf,0x00,0x00,0x00, +0xae,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, +0xb1,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0xb1,0x00,0x00,0x00, +0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0xa4,0x02,0x00,0x00, +0x3e,0x00,0x00,0x00,0x05,0x00,0x00,0x00,0xd0,0x00,0x00,0x00, +0xb2,0x00,0x00,0x00,0xb0,0x00,0x05,0x00,0xc1,0x00,0x00,0x00, +0xc2,0x00,0x00,0x00,0xa4,0x02,0x00,0x00,0xc0,0x00,0x00,0x00, +0xf6,0x00,0x04,0x00,0xb3,0x00,0x00,0x00,0xb2,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0xc2,0x00,0x00,0x00, +0xb2,0x00,0x00,0x00,0xb3,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, +0xb2,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0xcc,0x00,0x00,0x00, +0xcd,0x00,0x00,0x00,0xc9,0x00,0x00,0x00,0xa4,0x02,0x00,0x00, +0x3e,0x00,0x03,0x00,0xcd,0x00,0x00,0x00,0xcb,0x00,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xd0,0x00,0x00,0x00, +0xa4,0x02,0x00,0x00,0xcf,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, +0xb1,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0xb3,0x00,0x00,0x00, +0xf9,0x00,0x02,0x00,0xd3,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, +0xd3,0x00,0x00,0x00,0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00, +0xbd,0x02,0x00,0x00,0xaf,0x00,0x00,0x00,0xb3,0x00,0x00,0x00, +0x74,0x01,0x00,0x00,0xd6,0x00,0x00,0x00,0xf5,0x00,0x07,0x00, +0x06,0x00,0x00,0x00,0xb9,0x02,0x00,0x00,0x9d,0x00,0x00,0x00, +0xb3,0x00,0x00,0x00,0x71,0x01,0x00,0x00,0xd6,0x00,0x00,0x00, +0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0xa5,0x02,0x00,0x00, +0x83,0x00,0x00,0x00,0xb3,0x00,0x00,0x00,0x1f,0x02,0x00,0x00, +0xd6,0x00,0x00,0x00,0xb0,0x00,0x05,0x00,0xc1,0x00,0x00,0x00, +0xda,0x00,0x00,0x00,0xa5,0x02,0x00,0x00,0x8d,0x00,0x00,0x00, +0xf6,0x00,0x04,0x00,0xd5,0x00,0x00,0x00,0xd6,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0xda,0x00,0x00,0x00, +0xd4,0x00,0x00,0x00,0xd5,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, +0xd4,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0xdc,0x00,0x00,0x00, +0xf8,0x00,0x02,0x00,0xdc,0x00,0x00,0x00,0xf5,0x00,0x07,0x00, +0x06,0x00,0x00,0x00,0xb5,0x02,0x00,0x00,0x3e,0x00,0x00,0x00, +0xd4,0x00,0x00,0x00,0x26,0x01,0x00,0x00,0xdf,0x00,0x00,0x00, +0xb0,0x00,0x05,0x00,0xc1,0x00,0x00,0x00,0xe2,0x00,0x00,0x00, +0xb5,0x02,0x00,0x00,0x37,0x00,0x00,0x00,0xf6,0x00,0x04,0x00, +0xde,0x00,0x00,0x00,0xdf,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0xfa,0x00,0x04,0x00,0xe2,0x00,0x00,0x00,0xdd,0x00,0x00,0x00, +0xde,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0xdd,0x00,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xe6,0x00,0x00,0x00, +0x95,0x00,0x00,0x00,0x73,0x00,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xe8,0x00,0x00,0x00,0xe6,0x00,0x00,0x00, +0xb5,0x02,0x00,0x00,0xb0,0x00,0x05,0x00,0xc1,0x00,0x00,0x00, +0xeb,0x00,0x00,0x00,0xe8,0x00,0x00,0x00,0x36,0x00,0x00,0x00, +0xf7,0x00,0x03,0x00,0xed,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0xfa,0x00,0x04,0x00,0xeb,0x00,0x00,0x00,0xec,0x00,0x00,0x00, +0xed,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0xec,0x00,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xf0,0x00,0x00,0x00, +0xa5,0x02,0x00,0x00,0x6e,0x00,0x00,0x00,0xb0,0x00,0x05,0x00, +0xc1,0x00,0x00,0x00,0xf2,0x00,0x00,0x00,0xf0,0x00,0x00,0x00, +0x8d,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0xed,0x00,0x00,0x00, +0xf8,0x00,0x02,0x00,0xed,0x00,0x00,0x00,0xf5,0x00,0x07,0x00, +0xc1,0x00,0x00,0x00,0xf3,0x00,0x00,0x00,0xeb,0x00,0x00,0x00, +0xdd,0x00,0x00,0x00,0xf2,0x00,0x00,0x00,0xec,0x00,0x00,0x00, +0xf7,0x00,0x03,0x00,0xf5,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0xfa,0x00,0x04,0x00,0xf3,0x00,0x00,0x00,0xf4,0x00,0x00,0x00, +0x17,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0xf4,0x00,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xfd,0x00,0x00,0x00, +0x73,0x00,0x00,0x00,0xb5,0x02,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xff,0x00,0x00,0x00,0xfd,0x00,0x00,0x00, +0xfe,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x01,0x01,0x00,0x00,0xff,0x00,0x00,0x00,0x6e,0x00,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x0d,0x01,0x00,0x00, +0xfd,0x00,0x00,0x00,0x98,0x00,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x0e,0x01,0x00,0x00,0xb9,0x02,0x00,0x00, +0x0d,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x10,0x01,0x00,0x00,0x0e,0x01,0x00,0x00,0x6e,0x00,0x00,0x00, +0x41,0x00,0x06,0x00,0x11,0x01,0x00,0x00,0x12,0x01,0x00,0x00, +0x06,0x01,0x00,0x00,0x34,0x00,0x00,0x00,0x10,0x01,0x00,0x00, +0x3d,0x00,0x04,0x00,0x02,0x01,0x00,0x00,0x13,0x01,0x00,0x00, +0x12,0x01,0x00,0x00,0x73,0x00,0x04,0x00,0xc3,0x00,0x00,0x00, +0x14,0x01,0x00,0x00,0x13,0x01,0x00,0x00,0x41,0x00,0x05,0x00, +0x15,0x01,0x00,0x00,0x16,0x01,0x00,0x00,0xfa,0x00,0x00,0x00, +0x01,0x01,0x00,0x00,0x3e,0x00,0x03,0x00,0x16,0x01,0x00,0x00, +0x14,0x01,0x00,0x00,0xf9,0x00,0x02,0x00,0xf5,0x00,0x00,0x00, +0xf8,0x00,0x02,0x00,0x17,0x01,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x1a,0x01,0x00,0x00,0x73,0x00,0x00,0x00, +0xb5,0x02,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x1c,0x01,0x00,0x00,0x1a,0x01,0x00,0x00,0x1b,0x01,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x1e,0x01,0x00,0x00, +0x1c,0x01,0x00,0x00,0x6e,0x00,0x00,0x00,0x41,0x00,0x05,0x00, +0x15,0x01,0x00,0x00,0x1f,0x01,0x00,0x00,0xfa,0x00,0x00,0x00, +0x1e,0x01,0x00,0x00,0x3e,0x00,0x03,0x00,0x1f,0x01,0x00,0x00, +0xcb,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0xf5,0x00,0x00,0x00, +0xf8,0x00,0x02,0x00,0xf5,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, +0xdf,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0xdf,0x00,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x26,0x01,0x00,0x00, +0xb5,0x02,0x00,0x00,0x24,0x01,0x00,0x00,0xf9,0x00,0x02,0x00, +0xdc,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0xde,0x00,0x00,0x00, +0xf9,0x00,0x02,0x00,0x28,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, +0x28,0x01,0x00,0x00,0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00, +0xb6,0x02,0x00,0x00,0x3e,0x00,0x00,0x00,0xde,0x00,0x00,0x00, +0x6d,0x01,0x00,0x00,0x2b,0x01,0x00,0x00,0xb0,0x00,0x05,0x00, +0xc1,0x00,0x00,0x00,0x2e,0x01,0x00,0x00,0xb6,0x02,0x00,0x00, +0xa6,0x00,0x00,0x00,0xf6,0x00,0x04,0x00,0x2a,0x01,0x00,0x00, +0x2b,0x01,0x00,0x00,0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, +0x2e,0x01,0x00,0x00,0x29,0x01,0x00,0x00,0x2a,0x01,0x00,0x00, +0xf8,0x00,0x02,0x00,0x29,0x01,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x32,0x01,0x00,0x00,0xa7,0x00,0x00,0x00, +0x7d,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x34,0x01,0x00,0x00,0x32,0x01,0x00,0x00,0xb6,0x02,0x00,0x00, +0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00,0x35,0x01,0x00,0x00, +0x12,0x00,0x00,0x00,0xcf,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x36,0x01,0x00,0x00,0x35,0x01,0x00,0x00, +0xb0,0x00,0x05,0x00,0xc1,0x00,0x00,0x00,0x37,0x01,0x00,0x00, +0x34,0x01,0x00,0x00,0x36,0x01,0x00,0x00,0xf7,0x00,0x03,0x00, +0x39,0x01,0x00,0x00,0x00,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, +0x37,0x01,0x00,0x00,0x38,0x01,0x00,0x00,0x39,0x01,0x00,0x00, +0xf8,0x00,0x02,0x00,0x38,0x01,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x3c,0x01,0x00,0x00,0xa5,0x02,0x00,0x00, +0x78,0x00,0x00,0x00,0xb0,0x00,0x05,0x00,0xc1,0x00,0x00,0x00, +0x3e,0x01,0x00,0x00,0x3c,0x01,0x00,0x00,0x8d,0x00,0x00,0x00, +0xf9,0x00,0x02,0x00,0x39,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, +0x39,0x01,0x00,0x00,0xf5,0x00,0x07,0x00,0xc1,0x00,0x00,0x00, +0x3f,0x01,0x00,0x00,0x37,0x01,0x00,0x00,0x29,0x01,0x00,0x00, +0x3e,0x01,0x00,0x00,0x38,0x01,0x00,0x00,0xf7,0x00,0x03,0x00, +0x41,0x01,0x00,0x00,0x00,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, +0x3f,0x01,0x00,0x00,0x40,0x01,0x00,0x00,0x60,0x01,0x00,0x00, +0xf8,0x00,0x02,0x00,0x40,0x01,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x49,0x01,0x00,0x00,0x7d,0x00,0x00,0x00, +0xb6,0x02,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x4b,0x01,0x00,0x00,0x49,0x01,0x00,0x00,0x4a,0x01,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x4d,0x01,0x00,0x00, +0x4b,0x01,0x00,0x00,0x78,0x00,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x58,0x01,0x00,0x00,0x49,0x01,0x00,0x00, +0xaa,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x59,0x01,0x00,0x00,0xbd,0x02,0x00,0x00,0x58,0x01,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x5b,0x01,0x00,0x00, +0x59,0x01,0x00,0x00,0x78,0x00,0x00,0x00,0x41,0x00,0x06,0x00, +0x5c,0x01,0x00,0x00,0x5d,0x01,0x00,0x00,0x51,0x01,0x00,0x00, +0x34,0x00,0x00,0x00,0x5b,0x01,0x00,0x00,0x3d,0x00,0x04,0x00, +0xc3,0x00,0x00,0x00,0x5e,0x01,0x00,0x00,0x5d,0x01,0x00,0x00, +0x41,0x00,0x05,0x00,0x15,0x01,0x00,0x00,0x5f,0x01,0x00,0x00, +0x46,0x01,0x00,0x00,0x4d,0x01,0x00,0x00,0x3e,0x00,0x03,0x00, +0x5f,0x01,0x00,0x00,0x5e,0x01,0x00,0x00,0xf9,0x00,0x02,0x00, +0x41,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0x60,0x01,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x63,0x01,0x00,0x00, +0x7d,0x00,0x00,0x00,0xb6,0x02,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x65,0x01,0x00,0x00,0x63,0x01,0x00,0x00, +0x64,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x67,0x01,0x00,0x00,0x65,0x01,0x00,0x00,0x78,0x00,0x00,0x00, +0x41,0x00,0x05,0x00,0x15,0x01,0x00,0x00,0x68,0x01,0x00,0x00, +0x46,0x01,0x00,0x00,0x67,0x01,0x00,0x00,0x3e,0x00,0x03,0x00, +0x68,0x01,0x00,0x00,0xcb,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, +0x41,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0x41,0x01,0x00,0x00, +0xf9,0x00,0x02,0x00,0x2b,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, +0x2b,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x6d,0x01,0x00,0x00,0xb6,0x02,0x00,0x00,0x6b,0x01,0x00,0x00, +0xf9,0x00,0x02,0x00,0x28,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, +0x2a,0x01,0x00,0x00,0xe0,0x00,0x04,0x00,0x0c,0x00,0x00,0x00, +0x0c,0x00,0x00,0x00,0x6e,0x01,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x71,0x01,0x00,0x00,0xb9,0x02,0x00,0x00, +0x6f,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x74,0x01,0x00,0x00,0xbd,0x02,0x00,0x00,0x72,0x01,0x00,0x00, +0xf9,0x00,0x02,0x00,0x76,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, +0x76,0x01,0x00,0x00,0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00, +0xbf,0x02,0x00,0x00,0x3e,0x00,0x00,0x00,0x2a,0x01,0x00,0x00, +0x1d,0x02,0x00,0x00,0x79,0x01,0x00,0x00,0xb0,0x00,0x05,0x00, +0xc1,0x00,0x00,0x00,0x7c,0x01,0x00,0x00,0xbf,0x02,0x00,0x00, +0x6c,0x00,0x00,0x00,0xf6,0x00,0x04,0x00,0x78,0x01,0x00,0x00, +0x79,0x01,0x00,0x00,0x00,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, +0x7c,0x01,0x00,0x00,0x77,0x01,0x00,0x00,0x78,0x01,0x00,0x00, +0xf8,0x00,0x02,0x00,0x77,0x01,0x00,0x00,0xf9,0x00,0x02,0x00, +0x7e,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0x7e,0x01,0x00,0x00, +0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0xc3,0x02,0x00,0x00, +0x3e,0x00,0x00,0x00,0x77,0x01,0x00,0x00,0xa9,0x01,0x00,0x00, +0x81,0x01,0x00,0x00,0xb0,0x00,0x05,0x00,0xc1,0x00,0x00,0x00, +0x84,0x01,0x00,0x00,0xc3,0x02,0x00,0x00,0x60,0x00,0x00,0x00, +0xf6,0x00,0x04,0x00,0x80,0x01,0x00,0x00,0x81,0x01,0x00,0x00, +0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0x84,0x01,0x00,0x00, +0x7f,0x01,0x00,0x00,0x80,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, +0x7f,0x01,0x00,0x00,0xf9,0x00,0x02,0x00,0x86,0x01,0x00,0x00, +0xf8,0x00,0x02,0x00,0x86,0x01,0x00,0x00,0xf5,0x00,0x07,0x00, +0x06,0x00,0x00,0x00,0xd5,0x02,0x00,0x00,0x3e,0x00,0x00,0x00, +0x7f,0x01,0x00,0x00,0xa7,0x01,0x00,0x00,0x87,0x01,0x00,0x00, +0xb0,0x00,0x05,0x00,0xc1,0x00,0x00,0x00,0x8c,0x01,0x00,0x00, +0xd5,0x02,0x00,0x00,0x62,0x00,0x00,0x00,0xf6,0x00,0x04,0x00, +0x88,0x01,0x00,0x00,0x87,0x01,0x00,0x00,0x01,0x00,0x00,0x00, +0xfa,0x00,0x04,0x00,0x8c,0x01,0x00,0x00,0x87,0x01,0x00,0x00, +0x88,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0x87,0x01,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x92,0x01,0x00,0x00, +0xc3,0x02,0x00,0x00,0x62,0x00,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x94,0x01,0x00,0x00,0x92,0x01,0x00,0x00, +0xd5,0x02,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x96,0x01,0x00,0x00,0x55,0x00,0x00,0x00,0x53,0x00,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x98,0x01,0x00,0x00, +0xc3,0x02,0x00,0x00,0x61,0x00,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x99,0x01,0x00,0x00,0x96,0x01,0x00,0x00, +0x98,0x01,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x9b,0x01,0x00,0x00,0x64,0x00,0x00,0x00,0x62,0x00,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x9c,0x01,0x00,0x00, +0x99,0x01,0x00,0x00,0x9b,0x01,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x9e,0x01,0x00,0x00,0x9c,0x01,0x00,0x00, +0xd5,0x02,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xa0,0x01,0x00,0x00,0x9e,0x01,0x00,0x00,0x9f,0x01,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xa2,0x01,0x00,0x00, +0xa0,0x01,0x00,0x00,0xbf,0x02,0x00,0x00,0x41,0x00,0x05,0x00, +0x15,0x01,0x00,0x00,0xa3,0x01,0x00,0x00,0xfa,0x00,0x00,0x00, +0xa2,0x01,0x00,0x00,0x3d,0x00,0x04,0x00,0xc3,0x00,0x00,0x00, +0xa4,0x01,0x00,0x00,0xa3,0x01,0x00,0x00,0x41,0x00,0x05,0x00, +0xcc,0x00,0x00,0x00,0xa5,0x01,0x00,0x00,0x90,0x01,0x00,0x00, +0x94,0x01,0x00,0x00,0x3e,0x00,0x03,0x00,0xa5,0x01,0x00,0x00, +0xa4,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xa7,0x01,0x00,0x00,0xd5,0x02,0x00,0x00,0xcf,0x00,0x00,0x00, +0xf9,0x00,0x02,0x00,0x86,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, +0x88,0x01,0x00,0x00,0xf9,0x00,0x02,0x00,0x81,0x01,0x00,0x00, +0xf8,0x00,0x02,0x00,0x81,0x01,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xa9,0x01,0x00,0x00,0xc3,0x02,0x00,0x00, +0xcf,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0x7e,0x01,0x00,0x00, +0xf8,0x00,0x02,0x00,0x80,0x01,0x00,0x00,0xf9,0x00,0x02,0x00, +0xab,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0xab,0x01,0x00,0x00, +0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0xc4,0x02,0x00,0x00, +0x3e,0x00,0x00,0x00,0x80,0x01,0x00,0x00,0xd7,0x01,0x00,0x00, +0xae,0x01,0x00,0x00,0xb0,0x00,0x05,0x00,0xc1,0x00,0x00,0x00, +0xb1,0x01,0x00,0x00,0xc4,0x02,0x00,0x00,0xbe,0x00,0x00,0x00, +0xf6,0x00,0x04,0x00,0xad,0x01,0x00,0x00,0xae,0x01,0x00,0x00, +0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0xb1,0x01,0x00,0x00, +0xac,0x01,0x00,0x00,0xad,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, +0xac,0x01,0x00,0x00,0xf9,0x00,0x02,0x00,0xb3,0x01,0x00,0x00, +0xf8,0x00,0x02,0x00,0xb3,0x01,0x00,0x00,0xf5,0x00,0x07,0x00, +0x06,0x00,0x00,0x00,0xd2,0x02,0x00,0x00,0x3e,0x00,0x00,0x00, +0xac,0x01,0x00,0x00,0xd5,0x01,0x00,0x00,0xb4,0x01,0x00,0x00, +0xb0,0x00,0x05,0x00,0xc1,0x00,0x00,0x00,0xb9,0x01,0x00,0x00, +0xd2,0x02,0x00,0x00,0xbb,0x00,0x00,0x00,0xf6,0x00,0x04,0x00, +0xb5,0x01,0x00,0x00,0xb4,0x01,0x00,0x00,0x01,0x00,0x00,0x00, +0xfa,0x00,0x04,0x00,0xb9,0x01,0x00,0x00,0xb4,0x01,0x00,0x00, +0xb5,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0xb4,0x01,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xbf,0x01,0x00,0x00, +0xc4,0x02,0x00,0x00,0xbb,0x00,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xc1,0x01,0x00,0x00,0xbf,0x01,0x00,0x00, +0xd2,0x02,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xc3,0x01,0x00,0x00,0x59,0x00,0x00,0x00,0xb8,0x00,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xc6,0x01,0x00,0x00, +0xc4,0x02,0x00,0x00,0xc5,0x01,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xc7,0x01,0x00,0x00,0xc3,0x01,0x00,0x00, +0xc6,0x01,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xc9,0x01,0x00,0x00,0x68,0x00,0x00,0x00,0xbb,0x00,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xca,0x01,0x00,0x00, +0xc7,0x01,0x00,0x00,0xc9,0x01,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xcc,0x01,0x00,0x00,0xca,0x01,0x00,0x00, +0xd2,0x02,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xce,0x01,0x00,0x00,0xcc,0x01,0x00,0x00,0xcd,0x01,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xd0,0x01,0x00,0x00, +0xce,0x01,0x00,0x00,0xbf,0x02,0x00,0x00,0x41,0x00,0x05,0x00, +0x15,0x01,0x00,0x00,0xd1,0x01,0x00,0x00,0x46,0x01,0x00,0x00, +0xd0,0x01,0x00,0x00,0x3d,0x00,0x04,0x00,0xc3,0x00,0x00,0x00, +0xd2,0x01,0x00,0x00,0xd1,0x01,0x00,0x00,0x41,0x00,0x05,0x00, +0xcc,0x00,0x00,0x00,0xd3,0x01,0x00,0x00,0xbd,0x01,0x00,0x00, +0xc1,0x01,0x00,0x00,0x3e,0x00,0x03,0x00,0xd3,0x01,0x00,0x00, +0xd2,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xd5,0x01,0x00,0x00,0xd2,0x02,0x00,0x00,0xcf,0x00,0x00,0x00, +0xf9,0x00,0x02,0x00,0xb3,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, +0xb5,0x01,0x00,0x00,0xf9,0x00,0x02,0x00,0xae,0x01,0x00,0x00, +0xf8,0x00,0x02,0x00,0xae,0x01,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xd7,0x01,0x00,0x00,0xc4,0x02,0x00,0x00, +0xcf,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0xab,0x01,0x00,0x00, +0xf8,0x00,0x02,0x00,0xad,0x01,0x00,0x00,0xf9,0x00,0x02,0x00, +0xd9,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0xd9,0x01,0x00,0x00, +0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0xc5,0x02,0x00,0x00, +0x3e,0x00,0x00,0x00,0xad,0x01,0x00,0x00,0x1b,0x02,0x00,0x00, +0xdc,0x01,0x00,0x00,0xb0,0x00,0x05,0x00,0xc1,0x00,0x00,0x00, +0xdf,0x01,0x00,0x00,0xc5,0x02,0x00,0x00,0xbe,0x00,0x00,0x00, +0xf6,0x00,0x04,0x00,0xdb,0x01,0x00,0x00,0xdc,0x01,0x00,0x00, +0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0xdf,0x01,0x00,0x00, +0xda,0x01,0x00,0x00,0xdb,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, +0xda,0x01,0x00,0x00,0xf9,0x00,0x02,0x00,0xe1,0x01,0x00,0x00, +0xf8,0x00,0x02,0x00,0xe1,0x01,0x00,0x00,0xf5,0x00,0x07,0x00, +0x06,0x00,0x00,0x00,0xc9,0x02,0x00,0x00,0x3e,0x00,0x00,0x00, +0xda,0x01,0x00,0x00,0x19,0x02,0x00,0x00,0xe4,0x01,0x00,0x00, +0xb0,0x00,0x05,0x00,0xc1,0x00,0x00,0x00,0xe7,0x01,0x00,0x00, +0xc9,0x02,0x00,0x00,0x60,0x00,0x00,0x00,0xf6,0x00,0x04,0x00, +0xe3,0x01,0x00,0x00,0xe4,0x01,0x00,0x00,0x01,0x00,0x00,0x00, +0xfa,0x00,0x04,0x00,0xe7,0x01,0x00,0x00,0xe2,0x01,0x00,0x00, +0xe3,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0xe2,0x01,0x00,0x00, +0xf9,0x00,0x02,0x00,0xe9,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, +0xe9,0x01,0x00,0x00,0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00, +0xcb,0x02,0x00,0x00,0x3e,0x00,0x00,0x00,0xe2,0x01,0x00,0x00, +0x17,0x02,0x00,0x00,0xec,0x01,0x00,0x00,0xb0,0x00,0x05,0x00, +0xc1,0x00,0x00,0x00,0xef,0x01,0x00,0x00,0xcb,0x02,0x00,0x00, +0xbb,0x00,0x00,0x00,0xf6,0x00,0x04,0x00,0xeb,0x01,0x00,0x00, +0xec,0x01,0x00,0x00,0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, +0xef,0x01,0x00,0x00,0xea,0x01,0x00,0x00,0xeb,0x01,0x00,0x00, +0xf8,0x00,0x02,0x00,0xea,0x01,0x00,0x00,0xf9,0x00,0x02,0x00, +0xf1,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0xf1,0x01,0x00,0x00, +0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0xcd,0x02,0x00,0x00, +0x3e,0x00,0x00,0x00,0xea,0x01,0x00,0x00,0x15,0x02,0x00,0x00, +0xf2,0x01,0x00,0x00,0xb0,0x00,0x05,0x00,0xc1,0x00,0x00,0x00, +0xf7,0x01,0x00,0x00,0xcd,0x02,0x00,0x00,0x62,0x00,0x00,0x00, +0xf6,0x00,0x04,0x00,0xf3,0x01,0x00,0x00,0xf2,0x01,0x00,0x00, +0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0xf7,0x01,0x00,0x00, +0xf2,0x01,0x00,0x00,0xf3,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, +0xf2,0x01,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xf9,0x01,0x00,0x00,0xc5,0x02,0x00,0x00,0xbb,0x00,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xfb,0x01,0x00,0x00, +0xf9,0x01,0x00,0x00,0xcb,0x02,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xfd,0x01,0x00,0x00,0xfb,0x01,0x00,0x00, +0xfc,0x01,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xff,0x01,0x00,0x00,0xc9,0x02,0x00,0x00,0x62,0x00,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x00,0x02,0x00,0x00, +0xfd,0x01,0x00,0x00,0xff,0x01,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x02,0x02,0x00,0x00,0x00,0x02,0x00,0x00, +0xcd,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x06,0x02,0x00,0x00,0xff,0x01,0x00,0x00,0xcd,0x02,0x00,0x00, +0x41,0x00,0x05,0x00,0xcc,0x00,0x00,0x00,0x07,0x02,0x00,0x00, +0x90,0x01,0x00,0x00,0x06,0x02,0x00,0x00,0x3d,0x00,0x04,0x00, +0xc3,0x00,0x00,0x00,0x08,0x02,0x00,0x00,0x07,0x02,0x00,0x00, +0x41,0x00,0x05,0x00,0xcc,0x00,0x00,0x00,0x0d,0x02,0x00,0x00, +0xbd,0x01,0x00,0x00,0xfb,0x01,0x00,0x00,0x3d,0x00,0x04,0x00, +0xc3,0x00,0x00,0x00,0x0e,0x02,0x00,0x00,0x0d,0x02,0x00,0x00, +0x41,0x00,0x05,0x00,0xcc,0x00,0x00,0x00,0x10,0x02,0x00,0x00, +0xc9,0x00,0x00,0x00,0x02,0x02,0x00,0x00,0x3d,0x00,0x04,0x00, +0xc3,0x00,0x00,0x00,0x11,0x02,0x00,0x00,0x10,0x02,0x00,0x00, +0x0c,0x00,0x08,0x00,0xc3,0x00,0x00,0x00,0x12,0x02,0x00,0x00, +0x01,0x00,0x00,0x00,0x32,0x00,0x00,0x00,0x08,0x02,0x00,0x00, +0x0e,0x02,0x00,0x00,0x11,0x02,0x00,0x00,0x3e,0x00,0x03,0x00, +0x10,0x02,0x00,0x00,0x12,0x02,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x15,0x02,0x00,0x00,0xcd,0x02,0x00,0x00, +0xcf,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0xf1,0x01,0x00,0x00, +0xf8,0x00,0x02,0x00,0xf3,0x01,0x00,0x00,0xf9,0x00,0x02,0x00, +0xec,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0xec,0x01,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x17,0x02,0x00,0x00, +0xcb,0x02,0x00,0x00,0xcf,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, +0xe9,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0xeb,0x01,0x00,0x00, +0xf9,0x00,0x02,0x00,0xe4,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, +0xe4,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x19,0x02,0x00,0x00,0xc9,0x02,0x00,0x00,0xcf,0x00,0x00,0x00, +0xf9,0x00,0x02,0x00,0xe1,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, +0xe3,0x01,0x00,0x00,0xf9,0x00,0x02,0x00,0xdc,0x01,0x00,0x00, +0xf8,0x00,0x02,0x00,0xdc,0x01,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x1b,0x02,0x00,0x00,0xc5,0x02,0x00,0x00, +0xcf,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0xd9,0x01,0x00,0x00, +0xf8,0x00,0x02,0x00,0xdb,0x01,0x00,0x00,0xf9,0x00,0x02,0x00, +0x79,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0x79,0x01,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x1d,0x02,0x00,0x00, +0xbf,0x02,0x00,0x00,0xcf,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, +0x76,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0x78,0x01,0x00,0x00, +0xe0,0x00,0x04,0x00,0x0c,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, +0x6e,0x01,0x00,0x00,0xf9,0x00,0x02,0x00,0xd6,0x00,0x00,0x00, +0xf8,0x00,0x02,0x00,0xd6,0x00,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x1f,0x02,0x00,0x00,0xa5,0x02,0x00,0x00, +0x6c,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0xd3,0x00,0x00,0x00, +0xf8,0x00,0x02,0x00,0xd5,0x00,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x24,0x02,0x00,0x00,0x55,0x00,0x00,0x00, +0x53,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x25,0x02,0x00,0x00,0x95,0x00,0x00,0x00,0x24,0x02,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x2a,0x02,0x00,0x00, +0x59,0x00,0x00,0x00,0xb8,0x00,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x2b,0x02,0x00,0x00,0xa7,0x00,0x00,0x00, +0x2a,0x02,0x00,0x00,0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00, +0x30,0x02,0x00,0x00,0x12,0x00,0x00,0x00,0x2f,0x02,0x00,0x00, +0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x31,0x02,0x00,0x00, +0x30,0x02,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x32,0x02,0x00,0x00,0x0f,0x00,0x00,0x00,0x31,0x02,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x36,0x02,0x00,0x00, +0x47,0x00,0x00,0x00,0x31,0x02,0x00,0x00,0x41,0x00,0x05,0x00, +0x0d,0x00,0x00,0x00,0x38,0x02,0x00,0x00,0x37,0x02,0x00,0x00, +0x0c,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x39,0x02,0x00,0x00,0x38,0x02,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x3a,0x02,0x00,0x00,0x36,0x02,0x00,0x00, +0x39,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x3b,0x02,0x00,0x00,0x32,0x02,0x00,0x00,0x3a,0x02,0x00,0x00, +0xf9,0x00,0x02,0x00,0x3d,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, +0x3d,0x02,0x00,0x00,0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00, +0xa6,0x02,0x00,0x00,0x3e,0x00,0x00,0x00,0xd5,0x00,0x00,0x00, +0xa3,0x02,0x00,0x00,0x40,0x02,0x00,0x00,0xb0,0x00,0x05,0x00, +0xc1,0x00,0x00,0x00,0x43,0x02,0x00,0x00,0xa6,0x02,0x00,0x00, +0xbe,0x00,0x00,0x00,0xf6,0x00,0x04,0x00,0x3f,0x02,0x00,0x00, +0x40,0x02,0x00,0x00,0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, +0x43,0x02,0x00,0x00,0x3e,0x02,0x00,0x00,0x3f,0x02,0x00,0x00, +0xf8,0x00,0x02,0x00,0x3e,0x02,0x00,0x00,0xf9,0x00,0x02,0x00, +0x45,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x45,0x02,0x00,0x00, +0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0xa7,0x02,0x00,0x00, +0x3e,0x00,0x00,0x00,0x3e,0x02,0x00,0x00,0xa1,0x02,0x00,0x00, +0x48,0x02,0x00,0x00,0xb0,0x00,0x05,0x00,0xc1,0x00,0x00,0x00, +0x4b,0x02,0x00,0x00,0xa7,0x02,0x00,0x00,0x60,0x00,0x00,0x00, +0xf6,0x00,0x04,0x00,0x47,0x02,0x00,0x00,0x48,0x02,0x00,0x00, +0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0x4b,0x02,0x00,0x00, +0x46,0x02,0x00,0x00,0x47,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, +0x46,0x02,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x4f,0x02,0x00,0x00,0xa7,0x02,0x00,0x00,0x61,0x00,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x50,0x02,0x00,0x00, +0x25,0x02,0x00,0x00,0x4f,0x02,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x52,0x02,0x00,0x00,0x64,0x00,0x00,0x00, +0x62,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x53,0x02,0x00,0x00,0x50,0x02,0x00,0x00,0x52,0x02,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x57,0x02,0x00,0x00, +0xa6,0x02,0x00,0x00,0xc5,0x01,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x58,0x02,0x00,0x00,0x2b,0x02,0x00,0x00, +0x57,0x02,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x5a,0x02,0x00,0x00,0x68,0x00,0x00,0x00,0xbb,0x00,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x5b,0x02,0x00,0x00, +0x58,0x02,0x00,0x00,0x5a,0x02,0x00,0x00,0xf9,0x00,0x02,0x00, +0x5d,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x5d,0x02,0x00,0x00, +0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0xa9,0x02,0x00,0x00, +0x3e,0x00,0x00,0x00,0x46,0x02,0x00,0x00,0x9f,0x02,0x00,0x00, +0x60,0x02,0x00,0x00,0xb0,0x00,0x05,0x00,0xc1,0x00,0x00,0x00, +0x63,0x02,0x00,0x00,0xa9,0x02,0x00,0x00,0xbb,0x00,0x00,0x00, +0xf6,0x00,0x04,0x00,0x5f,0x02,0x00,0x00,0x60,0x02,0x00,0x00, +0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0x63,0x02,0x00,0x00, +0x5e,0x02,0x00,0x00,0x5f,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, +0x5e,0x02,0x00,0x00,0xf9,0x00,0x02,0x00,0x65,0x02,0x00,0x00, +0xf8,0x00,0x02,0x00,0x65,0x02,0x00,0x00,0xf5,0x00,0x07,0x00, +0x06,0x00,0x00,0x00,0xab,0x02,0x00,0x00,0x3e,0x00,0x00,0x00, +0x5e,0x02,0x00,0x00,0x9d,0x02,0x00,0x00,0x68,0x02,0x00,0x00, +0xb0,0x00,0x05,0x00,0xc1,0x00,0x00,0x00,0x6b,0x02,0x00,0x00, +0xab,0x02,0x00,0x00,0x62,0x00,0x00,0x00,0xf6,0x00,0x04,0x00, +0x67,0x02,0x00,0x00,0x68,0x02,0x00,0x00,0x01,0x00,0x00,0x00, +0xfa,0x00,0x04,0x00,0x6b,0x02,0x00,0x00,0x66,0x02,0x00,0x00, +0x67,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x66,0x02,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x6e,0x02,0x00,0x00, +0x53,0x02,0x00,0x00,0xab,0x02,0x00,0x00,0xb0,0x00,0x05,0x00, +0xc1,0x00,0x00,0x00,0x71,0x02,0x00,0x00,0x6e,0x02,0x00,0x00, +0x36,0x00,0x00,0x00,0xf7,0x00,0x03,0x00,0x73,0x02,0x00,0x00, +0x00,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0x71,0x02,0x00,0x00, +0x72,0x02,0x00,0x00,0x73,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, +0x72,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x76,0x02,0x00,0x00,0x5b,0x02,0x00,0x00,0xa9,0x02,0x00,0x00, +0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00,0x77,0x02,0x00,0x00, +0x12,0x00,0x00,0x00,0xcf,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x78,0x02,0x00,0x00,0x77,0x02,0x00,0x00, +0xb0,0x00,0x05,0x00,0xc1,0x00,0x00,0x00,0x79,0x02,0x00,0x00, +0x76,0x02,0x00,0x00,0x78,0x02,0x00,0x00,0xf9,0x00,0x02,0x00, +0x73,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x73,0x02,0x00,0x00, +0xf5,0x00,0x07,0x00,0xc1,0x00,0x00,0x00,0x7a,0x02,0x00,0x00, +0x71,0x02,0x00,0x00,0x66,0x02,0x00,0x00,0x79,0x02,0x00,0x00, +0x72,0x02,0x00,0x00,0xf7,0x00,0x03,0x00,0x7c,0x02,0x00,0x00, +0x00,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0x7a,0x02,0x00,0x00, +0x7b,0x02,0x00,0x00,0x7c,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, +0x7b,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x84,0x02,0x00,0x00,0x5b,0x02,0x00,0x00,0xa9,0x02,0x00,0x00, +0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00,0x86,0x02,0x00,0x00, +0x12,0x00,0x00,0x00,0x85,0x02,0x00,0x00,0x3d,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x87,0x02,0x00,0x00,0x86,0x02,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x88,0x02,0x00,0x00, +0x84,0x02,0x00,0x00,0x87,0x02,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x89,0x02,0x00,0x00,0x3b,0x02,0x00,0x00, +0x88,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x8b,0x02,0x00,0x00,0x89,0x02,0x00,0x00,0x53,0x02,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x8d,0x02,0x00,0x00, +0x8b,0x02,0x00,0x00,0xab,0x02,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x8f,0x02,0x00,0x00,0xa6,0x02,0x00,0x00, +0xbb,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x91,0x02,0x00,0x00,0x8f,0x02,0x00,0x00,0xa9,0x02,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x93,0x02,0x00,0x00, +0x91,0x02,0x00,0x00,0x92,0x02,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x95,0x02,0x00,0x00,0xa7,0x02,0x00,0x00, +0x62,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x96,0x02,0x00,0x00,0x93,0x02,0x00,0x00,0x95,0x02,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x98,0x02,0x00,0x00, +0x96,0x02,0x00,0x00,0xab,0x02,0x00,0x00,0x41,0x00,0x05,0x00, +0xcc,0x00,0x00,0x00,0x99,0x02,0x00,0x00,0xc9,0x00,0x00,0x00, +0x98,0x02,0x00,0x00,0x3d,0x00,0x04,0x00,0xc3,0x00,0x00,0x00, +0x9a,0x02,0x00,0x00,0x99,0x02,0x00,0x00,0x41,0x00,0x06,0x00, +0x5c,0x01,0x00,0x00,0x9b,0x02,0x00,0x00,0x80,0x02,0x00,0x00, +0x34,0x00,0x00,0x00,0x8d,0x02,0x00,0x00,0x3e,0x00,0x03,0x00, +0x9b,0x02,0x00,0x00,0x9a,0x02,0x00,0x00,0xf9,0x00,0x02,0x00, +0x7c,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x7c,0x02,0x00,0x00, +0xf9,0x00,0x02,0x00,0x68,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, +0x68,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x9d,0x02,0x00,0x00,0xab,0x02,0x00,0x00,0xcf,0x00,0x00,0x00, +0xf9,0x00,0x02,0x00,0x65,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, +0x67,0x02,0x00,0x00,0xf9,0x00,0x02,0x00,0x60,0x02,0x00,0x00, +0xf8,0x00,0x02,0x00,0x60,0x02,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x9f,0x02,0x00,0x00,0xa9,0x02,0x00,0x00, +0xcf,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0x5d,0x02,0x00,0x00, +0xf8,0x00,0x02,0x00,0x5f,0x02,0x00,0x00,0xf9,0x00,0x02,0x00, +0x48,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x48,0x02,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xa1,0x02,0x00,0x00, +0xa7,0x02,0x00,0x00,0xcf,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, +0x45,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x47,0x02,0x00,0x00, +0xf9,0x00,0x02,0x00,0x40,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, +0x40,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xa3,0x02,0x00,0x00,0xa6,0x02,0x00,0x00,0xcf,0x00,0x00,0x00, +0xf9,0x00,0x02,0x00,0x3d,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, +0x3f,0x02,0x00,0x00,0xfd,0x00,0x01,0x00,0x38,0x00,0x01,0x00, + +}; +const uint64_t matmul_f16_f32_fp32_len = 10260; + +unsigned char matmul_f16_fp32_data[] = { +0x03,0x02,0x23,0x07,0x00,0x05,0x01,0x00,0x0b,0x00,0x0d,0x00, +0xd7,0x02,0x00,0x00,0x00,0x00,0x00,0x00,0x11,0x00,0x02,0x00, +0x01,0x00,0x00,0x00,0x11,0x00,0x02,0x00,0x51,0x11,0x00,0x00, +0x0b,0x00,0x06,0x00,0x01,0x00,0x00,0x00,0x47,0x4c,0x53,0x4c, +0x2e,0x73,0x74,0x64,0x2e,0x34,0x35,0x30,0x00,0x00,0x00,0x00, +0x0e,0x00,0x03,0x00,0x00,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0x0f,0x00,0x0f,0x00,0x05,0x00,0x00,0x00,0x04,0x00,0x00,0x00, +0x6d,0x61,0x69,0x6e,0x00,0x00,0x00,0x00,0x0b,0x00,0x00,0x00, +0x12,0x00,0x00,0x00,0x3d,0x00,0x00,0x00,0x4c,0x00,0x00,0x00, +0xfa,0x00,0x00,0x00,0x06,0x01,0x00,0x00,0x46,0x01,0x00,0x00, +0x51,0x01,0x00,0x00,0x37,0x02,0x00,0x00,0x80,0x02,0x00,0x00, +0x10,0x00,0x06,0x00,0x04,0x00,0x00,0x00,0x11,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0x47,0x00,0x04,0x00,0x0b,0x00,0x00,0x00,0x0b,0x00,0x00,0x00, +0x1c,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00, +0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0x23,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x48,0x00,0x05,0x00, +0x10,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x23,0x00,0x00,0x00, +0x08,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00, +0x03,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, +0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00,0x04,0x00,0x00,0x00, +0x23,0x00,0x00,0x00,0x10,0x00,0x00,0x00,0x48,0x00,0x05,0x00, +0x10,0x00,0x00,0x00,0x05,0x00,0x00,0x00,0x23,0x00,0x00,0x00, +0x14,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00, +0x06,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x18,0x00,0x00,0x00, +0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00,0x07,0x00,0x00,0x00, +0x23,0x00,0x00,0x00,0x1c,0x00,0x00,0x00,0x48,0x00,0x05,0x00, +0x10,0x00,0x00,0x00,0x08,0x00,0x00,0x00,0x23,0x00,0x00,0x00, +0x20,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00, +0x09,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x24,0x00,0x00,0x00, +0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00,0x0a,0x00,0x00,0x00, +0x23,0x00,0x00,0x00,0x28,0x00,0x00,0x00,0x48,0x00,0x05,0x00, +0x10,0x00,0x00,0x00,0x0b,0x00,0x00,0x00,0x23,0x00,0x00,0x00, +0x2c,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00, +0x0c,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x30,0x00,0x00,0x00, +0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00,0x0d,0x00,0x00,0x00, +0x23,0x00,0x00,0x00,0x34,0x00,0x00,0x00,0x47,0x00,0x03,0x00, +0x10,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00, +0x37,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0x47,0x00,0x04,0x00,0x3d,0x00,0x00,0x00,0x0b,0x00,0x00,0x00, +0x1a,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x4c,0x00,0x00,0x00, +0x0b,0x00,0x00,0x00,0x1b,0x00,0x00,0x00,0x47,0x00,0x04,0x00, +0x4f,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x09,0x00,0x00,0x00, +0x47,0x00,0x04,0x00,0x53,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0x04,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x60,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x47,0x00,0x04,0x00, +0x62,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x07,0x00,0x00,0x00, +0x47,0x00,0x04,0x00,0x6c,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0x03,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0xa6,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00, +0xb8,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x05,0x00,0x00,0x00, +0x47,0x00,0x04,0x00,0xbb,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0x08,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x03,0x01,0x00,0x00, +0x06,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x48,0x00,0x04,0x00, +0x04,0x01,0x00,0x00,0x00,0x00,0x00,0x00,0x18,0x00,0x00,0x00, +0x48,0x00,0x05,0x00,0x04,0x01,0x00,0x00,0x00,0x00,0x00,0x00, +0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x03,0x00, +0x04,0x01,0x00,0x00,0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00, +0x06,0x01,0x00,0x00,0x22,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0x47,0x00,0x04,0x00,0x06,0x01,0x00,0x00,0x21,0x00,0x00,0x00, +0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x20,0x01,0x00,0x00, +0x01,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00, +0x21,0x01,0x00,0x00,0x0b,0x00,0x00,0x00,0x19,0x00,0x00,0x00, +0x47,0x00,0x04,0x00,0x4e,0x01,0x00,0x00,0x06,0x00,0x00,0x00, +0x02,0x00,0x00,0x00,0x48,0x00,0x04,0x00,0x4f,0x01,0x00,0x00, +0x00,0x00,0x00,0x00,0x18,0x00,0x00,0x00,0x48,0x00,0x05,0x00, +0x4f,0x01,0x00,0x00,0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00, +0x00,0x00,0x00,0x00,0x47,0x00,0x03,0x00,0x4f,0x01,0x00,0x00, +0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x51,0x01,0x00,0x00, +0x22,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00, +0x51,0x01,0x00,0x00,0x21,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0x47,0x00,0x04,0x00,0x37,0x02,0x00,0x00,0x0b,0x00,0x00,0x00, +0x18,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x7d,0x02,0x00,0x00, +0x06,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x48,0x00,0x04,0x00, +0x7e,0x02,0x00,0x00,0x00,0x00,0x00,0x00,0x19,0x00,0x00,0x00, +0x48,0x00,0x05,0x00,0x7e,0x02,0x00,0x00,0x00,0x00,0x00,0x00, +0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x03,0x00, +0x7e,0x02,0x00,0x00,0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00, +0x80,0x02,0x00,0x00,0x22,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0x47,0x00,0x04,0x00,0x80,0x02,0x00,0x00,0x21,0x00,0x00,0x00, +0x02,0x00,0x00,0x00,0x13,0x00,0x02,0x00,0x02,0x00,0x00,0x00, +0x21,0x00,0x03,0x00,0x03,0x00,0x00,0x00,0x02,0x00,0x00,0x00, +0x15,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x20,0x00,0x00,0x00, +0x00,0x00,0x00,0x00,0x17,0x00,0x04,0x00,0x09,0x00,0x00,0x00, +0x06,0x00,0x00,0x00,0x03,0x00,0x00,0x00,0x20,0x00,0x04,0x00, +0x0a,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x09,0x00,0x00,0x00, +0x3b,0x00,0x04,0x00,0x0a,0x00,0x00,0x00,0x0b,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x0c,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x20,0x00,0x04,0x00, +0x0d,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x06,0x00,0x00,0x00, +0x1e,0x00,0x10,0x00,0x10,0x00,0x00,0x00,0x06,0x00,0x00,0x00, +0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00, +0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00, +0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00, +0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00, +0x06,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x11,0x00,0x00,0x00, +0x09,0x00,0x00,0x00,0x10,0x00,0x00,0x00,0x3b,0x00,0x04,0x00, +0x11,0x00,0x00,0x00,0x12,0x00,0x00,0x00,0x09,0x00,0x00,0x00, +0x15,0x00,0x04,0x00,0x13,0x00,0x00,0x00,0x20,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00, +0x14,0x00,0x00,0x00,0x08,0x00,0x00,0x00,0x20,0x00,0x04,0x00, +0x15,0x00,0x00,0x00,0x09,0x00,0x00,0x00,0x06,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00,0x21,0x00,0x00,0x00, +0x0a,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00, +0x27,0x00,0x00,0x00,0x09,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x13,0x00,0x00,0x00,0x2d,0x00,0x00,0x00,0x07,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00,0x34,0x00,0x00,0x00, +0x00,0x00,0x00,0x00,0x32,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x37,0x00,0x00,0x00,0x40,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0x3b,0x00,0x04,0x00,0x0a,0x00,0x00,0x00,0x3d,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x3e,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x3b,0x00,0x04,0x00, +0x0a,0x00,0x00,0x00,0x4c,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0x32,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x4f,0x00,0x00,0x00, +0x20,0x00,0x00,0x00,0x32,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x53,0x00,0x00,0x00,0x20,0x00,0x00,0x00,0x34,0x00,0x06,0x00, +0x06,0x00,0x00,0x00,0x54,0x00,0x00,0x00,0x86,0x00,0x00,0x00, +0x37,0x00,0x00,0x00,0x53,0x00,0x00,0x00,0x34,0x00,0x06,0x00, +0x06,0x00,0x00,0x00,0x58,0x00,0x00,0x00,0x86,0x00,0x00,0x00, +0x37,0x00,0x00,0x00,0x53,0x00,0x00,0x00,0x32,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x60,0x00,0x00,0x00,0x02,0x00,0x00,0x00, +0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x61,0x00,0x00,0x00, +0x86,0x00,0x00,0x00,0x53,0x00,0x00,0x00,0x60,0x00,0x00,0x00, +0x32,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x62,0x00,0x00,0x00, +0x04,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, +0x63,0x00,0x00,0x00,0x86,0x00,0x00,0x00,0x61,0x00,0x00,0x00, +0x62,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, +0x67,0x00,0x00,0x00,0x86,0x00,0x00,0x00,0x61,0x00,0x00,0x00, +0x62,0x00,0x00,0x00,0x32,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x6c,0x00,0x00,0x00,0x10,0x00,0x00,0x00,0x34,0x00,0x06,0x00, +0x06,0x00,0x00,0x00,0x6d,0x00,0x00,0x00,0x86,0x00,0x00,0x00, +0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x34,0x00,0x06,0x00, +0x06,0x00,0x00,0x00,0x72,0x00,0x00,0x00,0x86,0x00,0x00,0x00, +0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x34,0x00,0x06,0x00, +0x06,0x00,0x00,0x00,0x77,0x00,0x00,0x00,0x86,0x00,0x00,0x00, +0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x34,0x00,0x06,0x00, +0x06,0x00,0x00,0x00,0x7c,0x00,0x00,0x00,0x86,0x00,0x00,0x00, +0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x13,0x00,0x00,0x00,0x80,0x00,0x00,0x00,0x06,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00,0x85,0x00,0x00,0x00, +0x02,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00, +0x90,0x00,0x00,0x00,0x0b,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x13,0x00,0x00,0x00,0x96,0x00,0x00,0x00,0x03,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00,0xa1,0x00,0x00,0x00, +0x0c,0x00,0x00,0x00,0x32,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0xa6,0x00,0x00,0x00,0x40,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x13,0x00,0x00,0x00,0xa8,0x00,0x00,0x00,0x04,0x00,0x00,0x00, +0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0xb7,0x00,0x00,0x00, +0x84,0x00,0x00,0x00,0x60,0x00,0x00,0x00,0x62,0x00,0x00,0x00, +0x32,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0xb8,0x00,0x00,0x00, +0x20,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, +0xb9,0x00,0x00,0x00,0x84,0x00,0x00,0x00,0x53,0x00,0x00,0x00, +0xb8,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, +0xba,0x00,0x00,0x00,0x84,0x00,0x00,0x00,0x4f,0x00,0x00,0x00, +0x62,0x00,0x00,0x00,0x32,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0xbb,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x34,0x00,0x06,0x00, +0x06,0x00,0x00,0x00,0xbc,0x00,0x00,0x00,0x84,0x00,0x00,0x00, +0xba,0x00,0x00,0x00,0xbb,0x00,0x00,0x00,0x34,0x00,0x06,0x00, +0x06,0x00,0x00,0x00,0xbd,0x00,0x00,0x00,0x84,0x00,0x00,0x00, +0xbc,0x00,0x00,0x00,0x60,0x00,0x00,0x00,0x34,0x00,0x06,0x00, +0x06,0x00,0x00,0x00,0xbe,0x00,0x00,0x00,0x86,0x00,0x00,0x00, +0xb9,0x00,0x00,0x00,0xbd,0x00,0x00,0x00,0x34,0x00,0x06,0x00, +0x06,0x00,0x00,0x00,0xbf,0x00,0x00,0x00,0x84,0x00,0x00,0x00, +0xb7,0x00,0x00,0x00,0xbe,0x00,0x00,0x00,0x34,0x00,0x06,0x00, +0x06,0x00,0x00,0x00,0xc0,0x00,0x00,0x00,0x84,0x00,0x00,0x00, +0xbf,0x00,0x00,0x00,0xbb,0x00,0x00,0x00,0x14,0x00,0x02,0x00, +0xc1,0x00,0x00,0x00,0x16,0x00,0x03,0x00,0xc3,0x00,0x00,0x00, +0x20,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, +0xc4,0x00,0x00,0x00,0x84,0x00,0x00,0x00,0x60,0x00,0x00,0x00, +0x62,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, +0xc5,0x00,0x00,0x00,0x84,0x00,0x00,0x00,0xc4,0x00,0x00,0x00, +0xbe,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, +0xc6,0x00,0x00,0x00,0x84,0x00,0x00,0x00,0xc5,0x00,0x00,0x00, +0xbb,0x00,0x00,0x00,0x1c,0x00,0x04,0x00,0xc7,0x00,0x00,0x00, +0xc3,0x00,0x00,0x00,0xc6,0x00,0x00,0x00,0x20,0x00,0x04,0x00, +0xc8,0x00,0x00,0x00,0x07,0x00,0x00,0x00,0xc7,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0xc3,0x00,0x00,0x00,0xcb,0x00,0x00,0x00, +0x00,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0xcc,0x00,0x00,0x00, +0x07,0x00,0x00,0x00,0xc3,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x13,0x00,0x00,0x00,0xcf,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0xf6,0x00,0x00,0x00, +0x80,0x00,0x00,0x00,0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00, +0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0xf7,0x00,0x00,0x00, +0x84,0x00,0x00,0x00,0x37,0x00,0x00,0x00,0xf6,0x00,0x00,0x00, +0x1c,0x00,0x04,0x00,0xf8,0x00,0x00,0x00,0xc3,0x00,0x00,0x00, +0xf7,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0xf9,0x00,0x00,0x00, +0x04,0x00,0x00,0x00,0xf8,0x00,0x00,0x00,0x3b,0x00,0x04,0x00, +0xf9,0x00,0x00,0x00,0xfa,0x00,0x00,0x00,0x04,0x00,0x00,0x00, +0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0xfe,0x00,0x00,0x00, +0x80,0x00,0x00,0x00,0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00, +0x16,0x00,0x03,0x00,0x02,0x01,0x00,0x00,0x10,0x00,0x00,0x00, +0x1d,0x00,0x03,0x00,0x03,0x01,0x00,0x00,0x02,0x01,0x00,0x00, +0x1e,0x00,0x03,0x00,0x04,0x01,0x00,0x00,0x03,0x01,0x00,0x00, +0x20,0x00,0x04,0x00,0x05,0x01,0x00,0x00,0x0c,0x00,0x00,0x00, +0x04,0x01,0x00,0x00,0x3b,0x00,0x04,0x00,0x05,0x01,0x00,0x00, +0x06,0x01,0x00,0x00,0x0c,0x00,0x00,0x00,0x20,0x00,0x04,0x00, +0x11,0x01,0x00,0x00,0x0c,0x00,0x00,0x00,0x02,0x01,0x00,0x00, +0x20,0x00,0x04,0x00,0x15,0x01,0x00,0x00,0x04,0x00,0x00,0x00, +0xc3,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, +0x1b,0x01,0x00,0x00,0x80,0x00,0x00,0x00,0x6c,0x00,0x00,0x00, +0x39,0x00,0x00,0x00,0x32,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x20,0x01,0x00,0x00,0x01,0x00,0x00,0x00,0x33,0x00,0x06,0x00, +0x09,0x00,0x00,0x00,0x21,0x01,0x00,0x00,0x20,0x01,0x00,0x00, +0x39,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x34,0x00,0x06,0x00, +0x06,0x00,0x00,0x00,0x22,0x01,0x00,0x00,0x51,0x00,0x00,0x00, +0x21,0x01,0x00,0x00,0x00,0x00,0x00,0x00,0x34,0x00,0x06,0x00, +0x06,0x00,0x00,0x00,0x23,0x01,0x00,0x00,0x84,0x00,0x00,0x00, +0x22,0x01,0x00,0x00,0x39,0x00,0x00,0x00,0x34,0x00,0x06,0x00, +0x06,0x00,0x00,0x00,0x24,0x01,0x00,0x00,0x86,0x00,0x00,0x00, +0x23,0x01,0x00,0x00,0x6c,0x00,0x00,0x00,0x34,0x00,0x06,0x00, +0x06,0x00,0x00,0x00,0x42,0x01,0x00,0x00,0x80,0x00,0x00,0x00, +0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x34,0x00,0x06,0x00, +0x06,0x00,0x00,0x00,0x43,0x01,0x00,0x00,0x84,0x00,0x00,0x00, +0xa6,0x00,0x00,0x00,0x42,0x01,0x00,0x00,0x1c,0x00,0x04,0x00, +0x44,0x01,0x00,0x00,0xc3,0x00,0x00,0x00,0x43,0x01,0x00,0x00, +0x20,0x00,0x04,0x00,0x45,0x01,0x00,0x00,0x04,0x00,0x00,0x00, +0x44,0x01,0x00,0x00,0x3b,0x00,0x04,0x00,0x45,0x01,0x00,0x00, +0x46,0x01,0x00,0x00,0x04,0x00,0x00,0x00,0x34,0x00,0x06,0x00, +0x06,0x00,0x00,0x00,0x4a,0x01,0x00,0x00,0x80,0x00,0x00,0x00, +0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x1d,0x00,0x03,0x00, +0x4e,0x01,0x00,0x00,0x02,0x01,0x00,0x00,0x1e,0x00,0x03,0x00, +0x4f,0x01,0x00,0x00,0x4e,0x01,0x00,0x00,0x20,0x00,0x04,0x00, +0x50,0x01,0x00,0x00,0x0c,0x00,0x00,0x00,0x4f,0x01,0x00,0x00, +0x3b,0x00,0x04,0x00,0x50,0x01,0x00,0x00,0x51,0x01,0x00,0x00, +0x0c,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, +0x64,0x01,0x00,0x00,0x80,0x00,0x00,0x00,0x6c,0x00,0x00,0x00, +0x39,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, +0x69,0x01,0x00,0x00,0x51,0x00,0x00,0x00,0x21,0x01,0x00,0x00, +0x00,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, +0x6a,0x01,0x00,0x00,0x84,0x00,0x00,0x00,0x69,0x01,0x00,0x00, +0x39,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, +0x6b,0x01,0x00,0x00,0x86,0x00,0x00,0x00,0x6a,0x01,0x00,0x00, +0x6c,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x6e,0x01,0x00,0x00,0x08,0x01,0x00,0x00,0x34,0x00,0x06,0x00, +0x06,0x00,0x00,0x00,0x6f,0x01,0x00,0x00,0x86,0x00,0x00,0x00, +0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x34,0x00,0x06,0x00, +0x06,0x00,0x00,0x00,0x72,0x01,0x00,0x00,0x86,0x00,0x00,0x00, +0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x34,0x00,0x06,0x00, +0x06,0x00,0x00,0x00,0x8d,0x01,0x00,0x00,0x84,0x00,0x00,0x00, +0x60,0x00,0x00,0x00,0x62,0x00,0x00,0x00,0x1c,0x00,0x04,0x00, +0x8e,0x01,0x00,0x00,0xc3,0x00,0x00,0x00,0x8d,0x01,0x00,0x00, +0x20,0x00,0x04,0x00,0x8f,0x01,0x00,0x00,0x07,0x00,0x00,0x00, +0x8e,0x01,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, +0x9f,0x01,0x00,0x00,0x80,0x00,0x00,0x00,0x6c,0x00,0x00,0x00, +0x39,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, +0xba,0x01,0x00,0x00,0x84,0x00,0x00,0x00,0xbe,0x00,0x00,0x00, +0xbb,0x00,0x00,0x00,0x1c,0x00,0x04,0x00,0xbb,0x01,0x00,0x00, +0xc3,0x00,0x00,0x00,0xba,0x01,0x00,0x00,0x20,0x00,0x04,0x00, +0xbc,0x01,0x00,0x00,0x07,0x00,0x00,0x00,0xbb,0x01,0x00,0x00, +0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0xc5,0x01,0x00,0x00, +0x86,0x00,0x00,0x00,0xb8,0x00,0x00,0x00,0xbe,0x00,0x00,0x00, +0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0xcd,0x01,0x00,0x00, +0x80,0x00,0x00,0x00,0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00, +0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0xfc,0x01,0x00,0x00, +0x84,0x00,0x00,0x00,0x60,0x00,0x00,0x00,0x62,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00,0x2f,0x02,0x00,0x00, +0x0d,0x00,0x00,0x00,0x3b,0x00,0x04,0x00,0x0a,0x00,0x00,0x00, +0x37,0x02,0x00,0x00,0x01,0x00,0x00,0x00,0x1d,0x00,0x03,0x00, +0x7d,0x02,0x00,0x00,0xc3,0x00,0x00,0x00,0x1e,0x00,0x03,0x00, +0x7e,0x02,0x00,0x00,0x7d,0x02,0x00,0x00,0x20,0x00,0x04,0x00, +0x7f,0x02,0x00,0x00,0x0c,0x00,0x00,0x00,0x7e,0x02,0x00,0x00, +0x3b,0x00,0x04,0x00,0x7f,0x02,0x00,0x00,0x80,0x02,0x00,0x00, +0x0c,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00, +0x85,0x02,0x00,0x00,0x05,0x00,0x00,0x00,0x34,0x00,0x06,0x00, +0x06,0x00,0x00,0x00,0x92,0x02,0x00,0x00,0x84,0x00,0x00,0x00, +0x60,0x00,0x00,0x00,0x62,0x00,0x00,0x00,0x20,0x00,0x04,0x00, +0x9b,0x02,0x00,0x00,0x0c,0x00,0x00,0x00,0xc3,0x00,0x00,0x00, +0x36,0x00,0x05,0x00,0x02,0x00,0x00,0x00,0x04,0x00,0x00,0x00, +0x00,0x00,0x00,0x00,0x03,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, +0x05,0x00,0x00,0x00,0x3b,0x00,0x04,0x00,0xc8,0x00,0x00,0x00, +0xc9,0x00,0x00,0x00,0x07,0x00,0x00,0x00,0x3b,0x00,0x04,0x00, +0x8f,0x01,0x00,0x00,0x90,0x01,0x00,0x00,0x07,0x00,0x00,0x00, +0x3b,0x00,0x04,0x00,0xbc,0x01,0x00,0x00,0xbd,0x01,0x00,0x00, +0x07,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x0d,0x00,0x00,0x00, +0x0e,0x00,0x00,0x00,0x0b,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, +0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x0f,0x00,0x00,0x00, +0x0e,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00, +0x16,0x00,0x00,0x00,0x12,0x00,0x00,0x00,0x14,0x00,0x00,0x00, +0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x17,0x00,0x00,0x00, +0x16,0x00,0x00,0x00,0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x18,0x00,0x00,0x00,0x0f,0x00,0x00,0x00,0x17,0x00,0x00,0x00, +0x89,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x1e,0x00,0x00,0x00, +0x0f,0x00,0x00,0x00,0x17,0x00,0x00,0x00,0x41,0x00,0x05,0x00, +0x15,0x00,0x00,0x00,0x22,0x00,0x00,0x00,0x12,0x00,0x00,0x00, +0x21,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x23,0x00,0x00,0x00,0x22,0x00,0x00,0x00,0x86,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x24,0x00,0x00,0x00,0x18,0x00,0x00,0x00, +0x23,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00, +0x28,0x00,0x00,0x00,0x12,0x00,0x00,0x00,0x27,0x00,0x00,0x00, +0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x29,0x00,0x00,0x00, +0x28,0x00,0x00,0x00,0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x2a,0x00,0x00,0x00,0x1e,0x00,0x00,0x00,0x29,0x00,0x00,0x00, +0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, +0x12,0x00,0x00,0x00,0x2d,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x2f,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x30,0x00,0x00,0x00, +0x24,0x00,0x00,0x00,0x2f,0x00,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x32,0x00,0x00,0x00,0x30,0x00,0x00,0x00, +0x2a,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00, +0x35,0x00,0x00,0x00,0x12,0x00,0x00,0x00,0x34,0x00,0x00,0x00, +0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x36,0x00,0x00,0x00, +0x35,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x38,0x00,0x00,0x00,0x36,0x00,0x00,0x00,0x37,0x00,0x00,0x00, +0x82,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x3a,0x00,0x00,0x00, +0x38,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x86,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x3b,0x00,0x00,0x00,0x3a,0x00,0x00,0x00, +0x37,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x0d,0x00,0x00,0x00, +0x3f,0x00,0x00,0x00,0x3d,0x00,0x00,0x00,0x3e,0x00,0x00,0x00, +0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x40,0x00,0x00,0x00, +0x3f,0x00,0x00,0x00,0x89,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x42,0x00,0x00,0x00,0x40,0x00,0x00,0x00,0x3b,0x00,0x00,0x00, +0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x47,0x00,0x00,0x00, +0x40,0x00,0x00,0x00,0x3b,0x00,0x00,0x00,0x41,0x00,0x05,0x00, +0x0d,0x00,0x00,0x00,0x49,0x00,0x00,0x00,0x3d,0x00,0x00,0x00, +0x39,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x4a,0x00,0x00,0x00,0x49,0x00,0x00,0x00,0x41,0x00,0x05,0x00, +0x0d,0x00,0x00,0x00,0x4d,0x00,0x00,0x00,0x4c,0x00,0x00,0x00, +0x3e,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x4e,0x00,0x00,0x00,0x4d,0x00,0x00,0x00,0x86,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x50,0x00,0x00,0x00,0x4e,0x00,0x00,0x00, +0x4f,0x00,0x00,0x00,0x89,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x55,0x00,0x00,0x00,0x50,0x00,0x00,0x00,0x54,0x00,0x00,0x00, +0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x59,0x00,0x00,0x00, +0x50,0x00,0x00,0x00,0x58,0x00,0x00,0x00,0x89,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x5d,0x00,0x00,0x00,0x4e,0x00,0x00,0x00, +0x4f,0x00,0x00,0x00,0x89,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x64,0x00,0x00,0x00,0x5d,0x00,0x00,0x00,0x63,0x00,0x00,0x00, +0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x68,0x00,0x00,0x00, +0x5d,0x00,0x00,0x00,0x67,0x00,0x00,0x00,0x89,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x6e,0x00,0x00,0x00,0x4e,0x00,0x00,0x00, +0x6d,0x00,0x00,0x00,0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x73,0x00,0x00,0x00,0x4e,0x00,0x00,0x00,0x72,0x00,0x00,0x00, +0x89,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x78,0x00,0x00,0x00, +0x4e,0x00,0x00,0x00,0x77,0x00,0x00,0x00,0x86,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x7d,0x00,0x00,0x00,0x4e,0x00,0x00,0x00, +0x7c,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00, +0x81,0x00,0x00,0x00,0x12,0x00,0x00,0x00,0x80,0x00,0x00,0x00, +0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x82,0x00,0x00,0x00, +0x81,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x83,0x00,0x00,0x00,0x47,0x00,0x00,0x00,0x82,0x00,0x00,0x00, +0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00,0x86,0x00,0x00,0x00, +0x12,0x00,0x00,0x00,0x85,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x87,0x00,0x00,0x00,0x86,0x00,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x89,0x00,0x00,0x00, +0x47,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x8c,0x00,0x00,0x00,0x89,0x00,0x00,0x00, +0x82,0x00,0x00,0x00,0x0c,0x00,0x07,0x00,0x06,0x00,0x00,0x00, +0x8d,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x26,0x00,0x00,0x00, +0x87,0x00,0x00,0x00,0x8c,0x00,0x00,0x00,0x41,0x00,0x05,0x00, +0x15,0x00,0x00,0x00,0x91,0x00,0x00,0x00,0x12,0x00,0x00,0x00, +0x90,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x92,0x00,0x00,0x00,0x91,0x00,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x93,0x00,0x00,0x00,0x32,0x00,0x00,0x00, +0x92,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x95,0x00,0x00,0x00,0x42,0x00,0x00,0x00,0x37,0x00,0x00,0x00, +0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00,0x97,0x00,0x00,0x00, +0x12,0x00,0x00,0x00,0x96,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x98,0x00,0x00,0x00,0x97,0x00,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x99,0x00,0x00,0x00, +0x95,0x00,0x00,0x00,0x98,0x00,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x9a,0x00,0x00,0x00,0x93,0x00,0x00,0x00, +0x99,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x9c,0x00,0x00,0x00,0x9a,0x00,0x00,0x00,0x83,0x00,0x00,0x00, +0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x9d,0x00,0x00,0x00, +0x9c,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x41,0x00,0x05,0x00, +0x15,0x00,0x00,0x00,0xa2,0x00,0x00,0x00,0x12,0x00,0x00,0x00, +0xa1,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0xa3,0x00,0x00,0x00,0xa2,0x00,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xa4,0x00,0x00,0x00,0x0f,0x00,0x00,0x00, +0xa3,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xa7,0x00,0x00,0x00,0x4a,0x00,0x00,0x00,0xa6,0x00,0x00,0x00, +0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00,0xa9,0x00,0x00,0x00, +0x12,0x00,0x00,0x00,0xa8,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0xaa,0x00,0x00,0x00,0xa9,0x00,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xab,0x00,0x00,0x00, +0xa7,0x00,0x00,0x00,0xaa,0x00,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xac,0x00,0x00,0x00,0xa4,0x00,0x00,0x00, +0xab,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xae,0x00,0x00,0x00,0xac,0x00,0x00,0x00,0x83,0x00,0x00,0x00, +0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xaf,0x00,0x00,0x00, +0xae,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, +0xb1,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0xb1,0x00,0x00,0x00, +0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0xa5,0x02,0x00,0x00, +0x3e,0x00,0x00,0x00,0x05,0x00,0x00,0x00,0xd0,0x00,0x00,0x00, +0xb2,0x00,0x00,0x00,0xb0,0x00,0x05,0x00,0xc1,0x00,0x00,0x00, +0xc2,0x00,0x00,0x00,0xa5,0x02,0x00,0x00,0xc0,0x00,0x00,0x00, +0xf6,0x00,0x04,0x00,0xb3,0x00,0x00,0x00,0xb2,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0xc2,0x00,0x00,0x00, +0xb2,0x00,0x00,0x00,0xb3,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, +0xb2,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0xcc,0x00,0x00,0x00, +0xcd,0x00,0x00,0x00,0xc9,0x00,0x00,0x00,0xa5,0x02,0x00,0x00, +0x3e,0x00,0x03,0x00,0xcd,0x00,0x00,0x00,0xcb,0x00,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xd0,0x00,0x00,0x00, +0xa5,0x02,0x00,0x00,0xcf,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, +0xb1,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0xb3,0x00,0x00,0x00, +0xf9,0x00,0x02,0x00,0xd3,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, +0xd3,0x00,0x00,0x00,0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00, +0xbe,0x02,0x00,0x00,0xaf,0x00,0x00,0x00,0xb3,0x00,0x00,0x00, +0x74,0x01,0x00,0x00,0xd6,0x00,0x00,0x00,0xf5,0x00,0x07,0x00, +0x06,0x00,0x00,0x00,0xba,0x02,0x00,0x00,0x9d,0x00,0x00,0x00, +0xb3,0x00,0x00,0x00,0x71,0x01,0x00,0x00,0xd6,0x00,0x00,0x00, +0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0xa6,0x02,0x00,0x00, +0x83,0x00,0x00,0x00,0xb3,0x00,0x00,0x00,0x1f,0x02,0x00,0x00, +0xd6,0x00,0x00,0x00,0xb0,0x00,0x05,0x00,0xc1,0x00,0x00,0x00, +0xda,0x00,0x00,0x00,0xa6,0x02,0x00,0x00,0x8d,0x00,0x00,0x00, +0xf6,0x00,0x04,0x00,0xd5,0x00,0x00,0x00,0xd6,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0xda,0x00,0x00,0x00, +0xd4,0x00,0x00,0x00,0xd5,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, +0xd4,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0xdc,0x00,0x00,0x00, +0xf8,0x00,0x02,0x00,0xdc,0x00,0x00,0x00,0xf5,0x00,0x07,0x00, +0x06,0x00,0x00,0x00,0xb6,0x02,0x00,0x00,0x3e,0x00,0x00,0x00, +0xd4,0x00,0x00,0x00,0x26,0x01,0x00,0x00,0xdf,0x00,0x00,0x00, +0xb0,0x00,0x05,0x00,0xc1,0x00,0x00,0x00,0xe2,0x00,0x00,0x00, +0xb6,0x02,0x00,0x00,0x37,0x00,0x00,0x00,0xf6,0x00,0x04,0x00, +0xde,0x00,0x00,0x00,0xdf,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0xfa,0x00,0x04,0x00,0xe2,0x00,0x00,0x00,0xdd,0x00,0x00,0x00, +0xde,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0xdd,0x00,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xe6,0x00,0x00,0x00, +0x95,0x00,0x00,0x00,0x73,0x00,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xe8,0x00,0x00,0x00,0xe6,0x00,0x00,0x00, +0xb6,0x02,0x00,0x00,0xb0,0x00,0x05,0x00,0xc1,0x00,0x00,0x00, +0xeb,0x00,0x00,0x00,0xe8,0x00,0x00,0x00,0x36,0x00,0x00,0x00, +0xf7,0x00,0x03,0x00,0xed,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0xfa,0x00,0x04,0x00,0xeb,0x00,0x00,0x00,0xec,0x00,0x00,0x00, +0xed,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0xec,0x00,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xf0,0x00,0x00,0x00, +0xa6,0x02,0x00,0x00,0x6e,0x00,0x00,0x00,0xb0,0x00,0x05,0x00, +0xc1,0x00,0x00,0x00,0xf2,0x00,0x00,0x00,0xf0,0x00,0x00,0x00, +0x8d,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0xed,0x00,0x00,0x00, +0xf8,0x00,0x02,0x00,0xed,0x00,0x00,0x00,0xf5,0x00,0x07,0x00, +0xc1,0x00,0x00,0x00,0xf3,0x00,0x00,0x00,0xeb,0x00,0x00,0x00, +0xdd,0x00,0x00,0x00,0xf2,0x00,0x00,0x00,0xec,0x00,0x00,0x00, +0xf7,0x00,0x03,0x00,0xf5,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0xfa,0x00,0x04,0x00,0xf3,0x00,0x00,0x00,0xf4,0x00,0x00,0x00, +0x17,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0xf4,0x00,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xfd,0x00,0x00,0x00, +0x73,0x00,0x00,0x00,0xb6,0x02,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xff,0x00,0x00,0x00,0xfd,0x00,0x00,0x00, +0xfe,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x01,0x01,0x00,0x00,0xff,0x00,0x00,0x00,0x6e,0x00,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x0d,0x01,0x00,0x00, +0xfd,0x00,0x00,0x00,0x98,0x00,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x0e,0x01,0x00,0x00,0xba,0x02,0x00,0x00, +0x0d,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x10,0x01,0x00,0x00,0x0e,0x01,0x00,0x00,0x6e,0x00,0x00,0x00, +0x41,0x00,0x06,0x00,0x11,0x01,0x00,0x00,0x12,0x01,0x00,0x00, +0x06,0x01,0x00,0x00,0x34,0x00,0x00,0x00,0x10,0x01,0x00,0x00, +0x3d,0x00,0x04,0x00,0x02,0x01,0x00,0x00,0x13,0x01,0x00,0x00, +0x12,0x01,0x00,0x00,0x73,0x00,0x04,0x00,0xc3,0x00,0x00,0x00, +0x14,0x01,0x00,0x00,0x13,0x01,0x00,0x00,0x41,0x00,0x05,0x00, +0x15,0x01,0x00,0x00,0x16,0x01,0x00,0x00,0xfa,0x00,0x00,0x00, +0x01,0x01,0x00,0x00,0x3e,0x00,0x03,0x00,0x16,0x01,0x00,0x00, +0x14,0x01,0x00,0x00,0xf9,0x00,0x02,0x00,0xf5,0x00,0x00,0x00, +0xf8,0x00,0x02,0x00,0x17,0x01,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x1a,0x01,0x00,0x00,0x73,0x00,0x00,0x00, +0xb6,0x02,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x1c,0x01,0x00,0x00,0x1a,0x01,0x00,0x00,0x1b,0x01,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x1e,0x01,0x00,0x00, +0x1c,0x01,0x00,0x00,0x6e,0x00,0x00,0x00,0x41,0x00,0x05,0x00, +0x15,0x01,0x00,0x00,0x1f,0x01,0x00,0x00,0xfa,0x00,0x00,0x00, +0x1e,0x01,0x00,0x00,0x3e,0x00,0x03,0x00,0x1f,0x01,0x00,0x00, +0xcb,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0xf5,0x00,0x00,0x00, +0xf8,0x00,0x02,0x00,0xf5,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, +0xdf,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0xdf,0x00,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x26,0x01,0x00,0x00, +0xb6,0x02,0x00,0x00,0x24,0x01,0x00,0x00,0xf9,0x00,0x02,0x00, +0xdc,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0xde,0x00,0x00,0x00, +0xf9,0x00,0x02,0x00,0x28,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, +0x28,0x01,0x00,0x00,0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00, +0xb7,0x02,0x00,0x00,0x3e,0x00,0x00,0x00,0xde,0x00,0x00,0x00, +0x6d,0x01,0x00,0x00,0x2b,0x01,0x00,0x00,0xb0,0x00,0x05,0x00, +0xc1,0x00,0x00,0x00,0x2e,0x01,0x00,0x00,0xb7,0x02,0x00,0x00, +0xa6,0x00,0x00,0x00,0xf6,0x00,0x04,0x00,0x2a,0x01,0x00,0x00, +0x2b,0x01,0x00,0x00,0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, +0x2e,0x01,0x00,0x00,0x29,0x01,0x00,0x00,0x2a,0x01,0x00,0x00, +0xf8,0x00,0x02,0x00,0x29,0x01,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x32,0x01,0x00,0x00,0xa7,0x00,0x00,0x00, +0x7d,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x34,0x01,0x00,0x00,0x32,0x01,0x00,0x00,0xb7,0x02,0x00,0x00, +0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00,0x35,0x01,0x00,0x00, +0x12,0x00,0x00,0x00,0xcf,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x36,0x01,0x00,0x00,0x35,0x01,0x00,0x00, +0xb0,0x00,0x05,0x00,0xc1,0x00,0x00,0x00,0x37,0x01,0x00,0x00, +0x34,0x01,0x00,0x00,0x36,0x01,0x00,0x00,0xf7,0x00,0x03,0x00, +0x39,0x01,0x00,0x00,0x00,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, +0x37,0x01,0x00,0x00,0x38,0x01,0x00,0x00,0x39,0x01,0x00,0x00, +0xf8,0x00,0x02,0x00,0x38,0x01,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x3c,0x01,0x00,0x00,0xa6,0x02,0x00,0x00, +0x78,0x00,0x00,0x00,0xb0,0x00,0x05,0x00,0xc1,0x00,0x00,0x00, +0x3e,0x01,0x00,0x00,0x3c,0x01,0x00,0x00,0x8d,0x00,0x00,0x00, +0xf9,0x00,0x02,0x00,0x39,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, +0x39,0x01,0x00,0x00,0xf5,0x00,0x07,0x00,0xc1,0x00,0x00,0x00, +0x3f,0x01,0x00,0x00,0x37,0x01,0x00,0x00,0x29,0x01,0x00,0x00, +0x3e,0x01,0x00,0x00,0x38,0x01,0x00,0x00,0xf7,0x00,0x03,0x00, +0x41,0x01,0x00,0x00,0x00,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, +0x3f,0x01,0x00,0x00,0x40,0x01,0x00,0x00,0x60,0x01,0x00,0x00, +0xf8,0x00,0x02,0x00,0x40,0x01,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x49,0x01,0x00,0x00,0x7d,0x00,0x00,0x00, +0xb7,0x02,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x4b,0x01,0x00,0x00,0x49,0x01,0x00,0x00,0x4a,0x01,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x4d,0x01,0x00,0x00, +0x4b,0x01,0x00,0x00,0x78,0x00,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x58,0x01,0x00,0x00,0x49,0x01,0x00,0x00, +0xaa,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x59,0x01,0x00,0x00,0xbe,0x02,0x00,0x00,0x58,0x01,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x5b,0x01,0x00,0x00, +0x59,0x01,0x00,0x00,0x78,0x00,0x00,0x00,0x41,0x00,0x06,0x00, +0x11,0x01,0x00,0x00,0x5c,0x01,0x00,0x00,0x51,0x01,0x00,0x00, +0x34,0x00,0x00,0x00,0x5b,0x01,0x00,0x00,0x3d,0x00,0x04,0x00, +0x02,0x01,0x00,0x00,0x5d,0x01,0x00,0x00,0x5c,0x01,0x00,0x00, +0x73,0x00,0x04,0x00,0xc3,0x00,0x00,0x00,0x5e,0x01,0x00,0x00, +0x5d,0x01,0x00,0x00,0x41,0x00,0x05,0x00,0x15,0x01,0x00,0x00, +0x5f,0x01,0x00,0x00,0x46,0x01,0x00,0x00,0x4d,0x01,0x00,0x00, +0x3e,0x00,0x03,0x00,0x5f,0x01,0x00,0x00,0x5e,0x01,0x00,0x00, +0xf9,0x00,0x02,0x00,0x41,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, +0x60,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x63,0x01,0x00,0x00,0x7d,0x00,0x00,0x00,0xb7,0x02,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x65,0x01,0x00,0x00, +0x63,0x01,0x00,0x00,0x64,0x01,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x67,0x01,0x00,0x00,0x65,0x01,0x00,0x00, +0x78,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x15,0x01,0x00,0x00, +0x68,0x01,0x00,0x00,0x46,0x01,0x00,0x00,0x67,0x01,0x00,0x00, +0x3e,0x00,0x03,0x00,0x68,0x01,0x00,0x00,0xcb,0x00,0x00,0x00, +0xf9,0x00,0x02,0x00,0x41,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, +0x41,0x01,0x00,0x00,0xf9,0x00,0x02,0x00,0x2b,0x01,0x00,0x00, +0xf8,0x00,0x02,0x00,0x2b,0x01,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x6d,0x01,0x00,0x00,0xb7,0x02,0x00,0x00, +0x6b,0x01,0x00,0x00,0xf9,0x00,0x02,0x00,0x28,0x01,0x00,0x00, +0xf8,0x00,0x02,0x00,0x2a,0x01,0x00,0x00,0xe0,0x00,0x04,0x00, +0x0c,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x6e,0x01,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x71,0x01,0x00,0x00, +0xba,0x02,0x00,0x00,0x6f,0x01,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x74,0x01,0x00,0x00,0xbe,0x02,0x00,0x00, +0x72,0x01,0x00,0x00,0xf9,0x00,0x02,0x00,0x76,0x01,0x00,0x00, +0xf8,0x00,0x02,0x00,0x76,0x01,0x00,0x00,0xf5,0x00,0x07,0x00, +0x06,0x00,0x00,0x00,0xc0,0x02,0x00,0x00,0x3e,0x00,0x00,0x00, +0x2a,0x01,0x00,0x00,0x1d,0x02,0x00,0x00,0x79,0x01,0x00,0x00, +0xb0,0x00,0x05,0x00,0xc1,0x00,0x00,0x00,0x7c,0x01,0x00,0x00, +0xc0,0x02,0x00,0x00,0x6c,0x00,0x00,0x00,0xf6,0x00,0x04,0x00, +0x78,0x01,0x00,0x00,0x79,0x01,0x00,0x00,0x00,0x00,0x00,0x00, +0xfa,0x00,0x04,0x00,0x7c,0x01,0x00,0x00,0x77,0x01,0x00,0x00, +0x78,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0x77,0x01,0x00,0x00, +0xf9,0x00,0x02,0x00,0x7e,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, +0x7e,0x01,0x00,0x00,0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00, +0xc4,0x02,0x00,0x00,0x3e,0x00,0x00,0x00,0x77,0x01,0x00,0x00, +0xa9,0x01,0x00,0x00,0x81,0x01,0x00,0x00,0xb0,0x00,0x05,0x00, +0xc1,0x00,0x00,0x00,0x84,0x01,0x00,0x00,0xc4,0x02,0x00,0x00, +0x60,0x00,0x00,0x00,0xf6,0x00,0x04,0x00,0x80,0x01,0x00,0x00, +0x81,0x01,0x00,0x00,0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, +0x84,0x01,0x00,0x00,0x7f,0x01,0x00,0x00,0x80,0x01,0x00,0x00, +0xf8,0x00,0x02,0x00,0x7f,0x01,0x00,0x00,0xf9,0x00,0x02,0x00, +0x86,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0x86,0x01,0x00,0x00, +0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0xd6,0x02,0x00,0x00, +0x3e,0x00,0x00,0x00,0x7f,0x01,0x00,0x00,0xa7,0x01,0x00,0x00, +0x87,0x01,0x00,0x00,0xb0,0x00,0x05,0x00,0xc1,0x00,0x00,0x00, +0x8c,0x01,0x00,0x00,0xd6,0x02,0x00,0x00,0x62,0x00,0x00,0x00, +0xf6,0x00,0x04,0x00,0x88,0x01,0x00,0x00,0x87,0x01,0x00,0x00, +0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0x8c,0x01,0x00,0x00, +0x87,0x01,0x00,0x00,0x88,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, +0x87,0x01,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x92,0x01,0x00,0x00,0xc4,0x02,0x00,0x00,0x62,0x00,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x94,0x01,0x00,0x00, +0x92,0x01,0x00,0x00,0xd6,0x02,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x96,0x01,0x00,0x00,0x55,0x00,0x00,0x00, +0x53,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x98,0x01,0x00,0x00,0xc4,0x02,0x00,0x00,0x61,0x00,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x99,0x01,0x00,0x00, +0x96,0x01,0x00,0x00,0x98,0x01,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x9b,0x01,0x00,0x00,0x64,0x00,0x00,0x00, +0x62,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x9c,0x01,0x00,0x00,0x99,0x01,0x00,0x00,0x9b,0x01,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x9e,0x01,0x00,0x00, +0x9c,0x01,0x00,0x00,0xd6,0x02,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xa0,0x01,0x00,0x00,0x9e,0x01,0x00,0x00, +0x9f,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xa2,0x01,0x00,0x00,0xa0,0x01,0x00,0x00,0xc0,0x02,0x00,0x00, +0x41,0x00,0x05,0x00,0x15,0x01,0x00,0x00,0xa3,0x01,0x00,0x00, +0xfa,0x00,0x00,0x00,0xa2,0x01,0x00,0x00,0x3d,0x00,0x04,0x00, +0xc3,0x00,0x00,0x00,0xa4,0x01,0x00,0x00,0xa3,0x01,0x00,0x00, +0x41,0x00,0x05,0x00,0xcc,0x00,0x00,0x00,0xa5,0x01,0x00,0x00, +0x90,0x01,0x00,0x00,0x94,0x01,0x00,0x00,0x3e,0x00,0x03,0x00, +0xa5,0x01,0x00,0x00,0xa4,0x01,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xa7,0x01,0x00,0x00,0xd6,0x02,0x00,0x00, +0xcf,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0x86,0x01,0x00,0x00, +0xf8,0x00,0x02,0x00,0x88,0x01,0x00,0x00,0xf9,0x00,0x02,0x00, +0x81,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0x81,0x01,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xa9,0x01,0x00,0x00, +0xc4,0x02,0x00,0x00,0xcf,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, +0x7e,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0x80,0x01,0x00,0x00, +0xf9,0x00,0x02,0x00,0xab,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, +0xab,0x01,0x00,0x00,0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00, +0xc5,0x02,0x00,0x00,0x3e,0x00,0x00,0x00,0x80,0x01,0x00,0x00, +0xd7,0x01,0x00,0x00,0xae,0x01,0x00,0x00,0xb0,0x00,0x05,0x00, +0xc1,0x00,0x00,0x00,0xb1,0x01,0x00,0x00,0xc5,0x02,0x00,0x00, +0xbe,0x00,0x00,0x00,0xf6,0x00,0x04,0x00,0xad,0x01,0x00,0x00, +0xae,0x01,0x00,0x00,0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, +0xb1,0x01,0x00,0x00,0xac,0x01,0x00,0x00,0xad,0x01,0x00,0x00, +0xf8,0x00,0x02,0x00,0xac,0x01,0x00,0x00,0xf9,0x00,0x02,0x00, +0xb3,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0xb3,0x01,0x00,0x00, +0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0xd3,0x02,0x00,0x00, +0x3e,0x00,0x00,0x00,0xac,0x01,0x00,0x00,0xd5,0x01,0x00,0x00, +0xb4,0x01,0x00,0x00,0xb0,0x00,0x05,0x00,0xc1,0x00,0x00,0x00, +0xb9,0x01,0x00,0x00,0xd3,0x02,0x00,0x00,0xbb,0x00,0x00,0x00, +0xf6,0x00,0x04,0x00,0xb5,0x01,0x00,0x00,0xb4,0x01,0x00,0x00, +0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0xb9,0x01,0x00,0x00, +0xb4,0x01,0x00,0x00,0xb5,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, +0xb4,0x01,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xbf,0x01,0x00,0x00,0xc5,0x02,0x00,0x00,0xbb,0x00,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xc1,0x01,0x00,0x00, +0xbf,0x01,0x00,0x00,0xd3,0x02,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xc3,0x01,0x00,0x00,0x59,0x00,0x00,0x00, +0xb8,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xc6,0x01,0x00,0x00,0xc5,0x02,0x00,0x00,0xc5,0x01,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xc7,0x01,0x00,0x00, +0xc3,0x01,0x00,0x00,0xc6,0x01,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xc9,0x01,0x00,0x00,0x68,0x00,0x00,0x00, +0xbb,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xca,0x01,0x00,0x00,0xc7,0x01,0x00,0x00,0xc9,0x01,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xcc,0x01,0x00,0x00, +0xca,0x01,0x00,0x00,0xd3,0x02,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xce,0x01,0x00,0x00,0xcc,0x01,0x00,0x00, +0xcd,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xd0,0x01,0x00,0x00,0xce,0x01,0x00,0x00,0xc0,0x02,0x00,0x00, +0x41,0x00,0x05,0x00,0x15,0x01,0x00,0x00,0xd1,0x01,0x00,0x00, +0x46,0x01,0x00,0x00,0xd0,0x01,0x00,0x00,0x3d,0x00,0x04,0x00, +0xc3,0x00,0x00,0x00,0xd2,0x01,0x00,0x00,0xd1,0x01,0x00,0x00, +0x41,0x00,0x05,0x00,0xcc,0x00,0x00,0x00,0xd3,0x01,0x00,0x00, +0xbd,0x01,0x00,0x00,0xc1,0x01,0x00,0x00,0x3e,0x00,0x03,0x00, +0xd3,0x01,0x00,0x00,0xd2,0x01,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xd5,0x01,0x00,0x00,0xd3,0x02,0x00,0x00, +0xcf,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0xb3,0x01,0x00,0x00, +0xf8,0x00,0x02,0x00,0xb5,0x01,0x00,0x00,0xf9,0x00,0x02,0x00, +0xae,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0xae,0x01,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xd7,0x01,0x00,0x00, +0xc5,0x02,0x00,0x00,0xcf,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, +0xab,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0xad,0x01,0x00,0x00, +0xf9,0x00,0x02,0x00,0xd9,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, +0xd9,0x01,0x00,0x00,0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00, +0xc6,0x02,0x00,0x00,0x3e,0x00,0x00,0x00,0xad,0x01,0x00,0x00, +0x1b,0x02,0x00,0x00,0xdc,0x01,0x00,0x00,0xb0,0x00,0x05,0x00, +0xc1,0x00,0x00,0x00,0xdf,0x01,0x00,0x00,0xc6,0x02,0x00,0x00, +0xbe,0x00,0x00,0x00,0xf6,0x00,0x04,0x00,0xdb,0x01,0x00,0x00, +0xdc,0x01,0x00,0x00,0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, +0xdf,0x01,0x00,0x00,0xda,0x01,0x00,0x00,0xdb,0x01,0x00,0x00, +0xf8,0x00,0x02,0x00,0xda,0x01,0x00,0x00,0xf9,0x00,0x02,0x00, +0xe1,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0xe1,0x01,0x00,0x00, +0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0xca,0x02,0x00,0x00, +0x3e,0x00,0x00,0x00,0xda,0x01,0x00,0x00,0x19,0x02,0x00,0x00, +0xe4,0x01,0x00,0x00,0xb0,0x00,0x05,0x00,0xc1,0x00,0x00,0x00, +0xe7,0x01,0x00,0x00,0xca,0x02,0x00,0x00,0x60,0x00,0x00,0x00, +0xf6,0x00,0x04,0x00,0xe3,0x01,0x00,0x00,0xe4,0x01,0x00,0x00, +0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0xe7,0x01,0x00,0x00, +0xe2,0x01,0x00,0x00,0xe3,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, +0xe2,0x01,0x00,0x00,0xf9,0x00,0x02,0x00,0xe9,0x01,0x00,0x00, +0xf8,0x00,0x02,0x00,0xe9,0x01,0x00,0x00,0xf5,0x00,0x07,0x00, +0x06,0x00,0x00,0x00,0xcc,0x02,0x00,0x00,0x3e,0x00,0x00,0x00, +0xe2,0x01,0x00,0x00,0x17,0x02,0x00,0x00,0xec,0x01,0x00,0x00, +0xb0,0x00,0x05,0x00,0xc1,0x00,0x00,0x00,0xef,0x01,0x00,0x00, +0xcc,0x02,0x00,0x00,0xbb,0x00,0x00,0x00,0xf6,0x00,0x04,0x00, +0xeb,0x01,0x00,0x00,0xec,0x01,0x00,0x00,0x01,0x00,0x00,0x00, +0xfa,0x00,0x04,0x00,0xef,0x01,0x00,0x00,0xea,0x01,0x00,0x00, +0xeb,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0xea,0x01,0x00,0x00, +0xf9,0x00,0x02,0x00,0xf1,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, +0xf1,0x01,0x00,0x00,0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00, +0xce,0x02,0x00,0x00,0x3e,0x00,0x00,0x00,0xea,0x01,0x00,0x00, +0x15,0x02,0x00,0x00,0xf2,0x01,0x00,0x00,0xb0,0x00,0x05,0x00, +0xc1,0x00,0x00,0x00,0xf7,0x01,0x00,0x00,0xce,0x02,0x00,0x00, +0x62,0x00,0x00,0x00,0xf6,0x00,0x04,0x00,0xf3,0x01,0x00,0x00, +0xf2,0x01,0x00,0x00,0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, +0xf7,0x01,0x00,0x00,0xf2,0x01,0x00,0x00,0xf3,0x01,0x00,0x00, +0xf8,0x00,0x02,0x00,0xf2,0x01,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xf9,0x01,0x00,0x00,0xc6,0x02,0x00,0x00, +0xbb,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xfb,0x01,0x00,0x00,0xf9,0x01,0x00,0x00,0xcc,0x02,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xfd,0x01,0x00,0x00, +0xfb,0x01,0x00,0x00,0xfc,0x01,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xff,0x01,0x00,0x00,0xca,0x02,0x00,0x00, +0x62,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x00,0x02,0x00,0x00,0xfd,0x01,0x00,0x00,0xff,0x01,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x02,0x02,0x00,0x00, +0x00,0x02,0x00,0x00,0xce,0x02,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x06,0x02,0x00,0x00,0xff,0x01,0x00,0x00, +0xce,0x02,0x00,0x00,0x41,0x00,0x05,0x00,0xcc,0x00,0x00,0x00, +0x07,0x02,0x00,0x00,0x90,0x01,0x00,0x00,0x06,0x02,0x00,0x00, +0x3d,0x00,0x04,0x00,0xc3,0x00,0x00,0x00,0x08,0x02,0x00,0x00, +0x07,0x02,0x00,0x00,0x41,0x00,0x05,0x00,0xcc,0x00,0x00,0x00, +0x0d,0x02,0x00,0x00,0xbd,0x01,0x00,0x00,0xfb,0x01,0x00,0x00, +0x3d,0x00,0x04,0x00,0xc3,0x00,0x00,0x00,0x0e,0x02,0x00,0x00, +0x0d,0x02,0x00,0x00,0x41,0x00,0x05,0x00,0xcc,0x00,0x00,0x00, +0x10,0x02,0x00,0x00,0xc9,0x00,0x00,0x00,0x02,0x02,0x00,0x00, +0x3d,0x00,0x04,0x00,0xc3,0x00,0x00,0x00,0x11,0x02,0x00,0x00, +0x10,0x02,0x00,0x00,0x0c,0x00,0x08,0x00,0xc3,0x00,0x00,0x00, +0x12,0x02,0x00,0x00,0x01,0x00,0x00,0x00,0x32,0x00,0x00,0x00, +0x08,0x02,0x00,0x00,0x0e,0x02,0x00,0x00,0x11,0x02,0x00,0x00, +0x3e,0x00,0x03,0x00,0x10,0x02,0x00,0x00,0x12,0x02,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x15,0x02,0x00,0x00, +0xce,0x02,0x00,0x00,0xcf,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, +0xf1,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0xf3,0x01,0x00,0x00, +0xf9,0x00,0x02,0x00,0xec,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, +0xec,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x17,0x02,0x00,0x00,0xcc,0x02,0x00,0x00,0xcf,0x00,0x00,0x00, +0xf9,0x00,0x02,0x00,0xe9,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, +0xeb,0x01,0x00,0x00,0xf9,0x00,0x02,0x00,0xe4,0x01,0x00,0x00, +0xf8,0x00,0x02,0x00,0xe4,0x01,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x19,0x02,0x00,0x00,0xca,0x02,0x00,0x00, +0xcf,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0xe1,0x01,0x00,0x00, +0xf8,0x00,0x02,0x00,0xe3,0x01,0x00,0x00,0xf9,0x00,0x02,0x00, +0xdc,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0xdc,0x01,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x1b,0x02,0x00,0x00, +0xc6,0x02,0x00,0x00,0xcf,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, +0xd9,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0xdb,0x01,0x00,0x00, +0xf9,0x00,0x02,0x00,0x79,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, +0x79,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x1d,0x02,0x00,0x00,0xc0,0x02,0x00,0x00,0xcf,0x00,0x00,0x00, +0xf9,0x00,0x02,0x00,0x76,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, +0x78,0x01,0x00,0x00,0xe0,0x00,0x04,0x00,0x0c,0x00,0x00,0x00, +0x0c,0x00,0x00,0x00,0x6e,0x01,0x00,0x00,0xf9,0x00,0x02,0x00, +0xd6,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0xd6,0x00,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x1f,0x02,0x00,0x00, +0xa6,0x02,0x00,0x00,0x6c,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, +0xd3,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0xd5,0x00,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x24,0x02,0x00,0x00, +0x55,0x00,0x00,0x00,0x53,0x00,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x25,0x02,0x00,0x00,0x95,0x00,0x00,0x00, +0x24,0x02,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x2a,0x02,0x00,0x00,0x59,0x00,0x00,0x00,0xb8,0x00,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x2b,0x02,0x00,0x00, +0xa7,0x00,0x00,0x00,0x2a,0x02,0x00,0x00,0x41,0x00,0x05,0x00, +0x15,0x00,0x00,0x00,0x30,0x02,0x00,0x00,0x12,0x00,0x00,0x00, +0x2f,0x02,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x31,0x02,0x00,0x00,0x30,0x02,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x32,0x02,0x00,0x00,0x0f,0x00,0x00,0x00, +0x31,0x02,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x36,0x02,0x00,0x00,0x47,0x00,0x00,0x00,0x31,0x02,0x00,0x00, +0x41,0x00,0x05,0x00,0x0d,0x00,0x00,0x00,0x38,0x02,0x00,0x00, +0x37,0x02,0x00,0x00,0x0c,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x39,0x02,0x00,0x00,0x38,0x02,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x3a,0x02,0x00,0x00, +0x36,0x02,0x00,0x00,0x39,0x02,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x3b,0x02,0x00,0x00,0x32,0x02,0x00,0x00, +0x3a,0x02,0x00,0x00,0xf9,0x00,0x02,0x00,0x3d,0x02,0x00,0x00, +0xf8,0x00,0x02,0x00,0x3d,0x02,0x00,0x00,0xf5,0x00,0x07,0x00, +0x06,0x00,0x00,0x00,0xa7,0x02,0x00,0x00,0x3e,0x00,0x00,0x00, +0xd5,0x00,0x00,0x00,0xa4,0x02,0x00,0x00,0x40,0x02,0x00,0x00, +0xb0,0x00,0x05,0x00,0xc1,0x00,0x00,0x00,0x43,0x02,0x00,0x00, +0xa7,0x02,0x00,0x00,0xbe,0x00,0x00,0x00,0xf6,0x00,0x04,0x00, +0x3f,0x02,0x00,0x00,0x40,0x02,0x00,0x00,0x01,0x00,0x00,0x00, +0xfa,0x00,0x04,0x00,0x43,0x02,0x00,0x00,0x3e,0x02,0x00,0x00, +0x3f,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x3e,0x02,0x00,0x00, +0xf9,0x00,0x02,0x00,0x45,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, +0x45,0x02,0x00,0x00,0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00, +0xa8,0x02,0x00,0x00,0x3e,0x00,0x00,0x00,0x3e,0x02,0x00,0x00, +0xa2,0x02,0x00,0x00,0x48,0x02,0x00,0x00,0xb0,0x00,0x05,0x00, +0xc1,0x00,0x00,0x00,0x4b,0x02,0x00,0x00,0xa8,0x02,0x00,0x00, +0x60,0x00,0x00,0x00,0xf6,0x00,0x04,0x00,0x47,0x02,0x00,0x00, +0x48,0x02,0x00,0x00,0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, +0x4b,0x02,0x00,0x00,0x46,0x02,0x00,0x00,0x47,0x02,0x00,0x00, +0xf8,0x00,0x02,0x00,0x46,0x02,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x4f,0x02,0x00,0x00,0xa8,0x02,0x00,0x00, +0x61,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x50,0x02,0x00,0x00,0x25,0x02,0x00,0x00,0x4f,0x02,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x52,0x02,0x00,0x00, +0x64,0x00,0x00,0x00,0x62,0x00,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x53,0x02,0x00,0x00,0x50,0x02,0x00,0x00, +0x52,0x02,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x57,0x02,0x00,0x00,0xa7,0x02,0x00,0x00,0xc5,0x01,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x58,0x02,0x00,0x00, +0x2b,0x02,0x00,0x00,0x57,0x02,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x5a,0x02,0x00,0x00,0x68,0x00,0x00,0x00, +0xbb,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x5b,0x02,0x00,0x00,0x58,0x02,0x00,0x00,0x5a,0x02,0x00,0x00, +0xf9,0x00,0x02,0x00,0x5d,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, +0x5d,0x02,0x00,0x00,0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00, +0xaa,0x02,0x00,0x00,0x3e,0x00,0x00,0x00,0x46,0x02,0x00,0x00, +0xa0,0x02,0x00,0x00,0x60,0x02,0x00,0x00,0xb0,0x00,0x05,0x00, +0xc1,0x00,0x00,0x00,0x63,0x02,0x00,0x00,0xaa,0x02,0x00,0x00, +0xbb,0x00,0x00,0x00,0xf6,0x00,0x04,0x00,0x5f,0x02,0x00,0x00, +0x60,0x02,0x00,0x00,0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, +0x63,0x02,0x00,0x00,0x5e,0x02,0x00,0x00,0x5f,0x02,0x00,0x00, +0xf8,0x00,0x02,0x00,0x5e,0x02,0x00,0x00,0xf9,0x00,0x02,0x00, +0x65,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x65,0x02,0x00,0x00, +0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0xac,0x02,0x00,0x00, +0x3e,0x00,0x00,0x00,0x5e,0x02,0x00,0x00,0x9e,0x02,0x00,0x00, +0x68,0x02,0x00,0x00,0xb0,0x00,0x05,0x00,0xc1,0x00,0x00,0x00, +0x6b,0x02,0x00,0x00,0xac,0x02,0x00,0x00,0x62,0x00,0x00,0x00, +0xf6,0x00,0x04,0x00,0x67,0x02,0x00,0x00,0x68,0x02,0x00,0x00, +0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0x6b,0x02,0x00,0x00, +0x66,0x02,0x00,0x00,0x67,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, +0x66,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x6e,0x02,0x00,0x00,0x53,0x02,0x00,0x00,0xac,0x02,0x00,0x00, +0xb0,0x00,0x05,0x00,0xc1,0x00,0x00,0x00,0x71,0x02,0x00,0x00, +0x6e,0x02,0x00,0x00,0x36,0x00,0x00,0x00,0xf7,0x00,0x03,0x00, +0x73,0x02,0x00,0x00,0x00,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, +0x71,0x02,0x00,0x00,0x72,0x02,0x00,0x00,0x73,0x02,0x00,0x00, +0xf8,0x00,0x02,0x00,0x72,0x02,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x76,0x02,0x00,0x00,0x5b,0x02,0x00,0x00, +0xaa,0x02,0x00,0x00,0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00, +0x77,0x02,0x00,0x00,0x12,0x00,0x00,0x00,0xcf,0x00,0x00,0x00, +0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x78,0x02,0x00,0x00, +0x77,0x02,0x00,0x00,0xb0,0x00,0x05,0x00,0xc1,0x00,0x00,0x00, +0x79,0x02,0x00,0x00,0x76,0x02,0x00,0x00,0x78,0x02,0x00,0x00, +0xf9,0x00,0x02,0x00,0x73,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, +0x73,0x02,0x00,0x00,0xf5,0x00,0x07,0x00,0xc1,0x00,0x00,0x00, +0x7a,0x02,0x00,0x00,0x71,0x02,0x00,0x00,0x66,0x02,0x00,0x00, +0x79,0x02,0x00,0x00,0x72,0x02,0x00,0x00,0xf7,0x00,0x03,0x00, +0x7c,0x02,0x00,0x00,0x00,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, +0x7a,0x02,0x00,0x00,0x7b,0x02,0x00,0x00,0x7c,0x02,0x00,0x00, +0xf8,0x00,0x02,0x00,0x7b,0x02,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x84,0x02,0x00,0x00,0x5b,0x02,0x00,0x00, +0xaa,0x02,0x00,0x00,0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00, +0x86,0x02,0x00,0x00,0x12,0x00,0x00,0x00,0x85,0x02,0x00,0x00, +0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x87,0x02,0x00,0x00, +0x86,0x02,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x88,0x02,0x00,0x00,0x84,0x02,0x00,0x00,0x87,0x02,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x89,0x02,0x00,0x00, +0x3b,0x02,0x00,0x00,0x88,0x02,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x8b,0x02,0x00,0x00,0x89,0x02,0x00,0x00, +0x53,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x8d,0x02,0x00,0x00,0x8b,0x02,0x00,0x00,0xac,0x02,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x8f,0x02,0x00,0x00, +0xa7,0x02,0x00,0x00,0xbb,0x00,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x91,0x02,0x00,0x00,0x8f,0x02,0x00,0x00, +0xaa,0x02,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x93,0x02,0x00,0x00,0x91,0x02,0x00,0x00,0x92,0x02,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x95,0x02,0x00,0x00, +0xa8,0x02,0x00,0x00,0x62,0x00,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x96,0x02,0x00,0x00,0x93,0x02,0x00,0x00, +0x95,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x98,0x02,0x00,0x00,0x96,0x02,0x00,0x00,0xac,0x02,0x00,0x00, +0x41,0x00,0x05,0x00,0xcc,0x00,0x00,0x00,0x99,0x02,0x00,0x00, +0xc9,0x00,0x00,0x00,0x98,0x02,0x00,0x00,0x3d,0x00,0x04,0x00, +0xc3,0x00,0x00,0x00,0x9a,0x02,0x00,0x00,0x99,0x02,0x00,0x00, +0x41,0x00,0x06,0x00,0x9b,0x02,0x00,0x00,0x9c,0x02,0x00,0x00, +0x80,0x02,0x00,0x00,0x34,0x00,0x00,0x00,0x8d,0x02,0x00,0x00, +0x3e,0x00,0x03,0x00,0x9c,0x02,0x00,0x00,0x9a,0x02,0x00,0x00, +0xf9,0x00,0x02,0x00,0x7c,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, +0x7c,0x02,0x00,0x00,0xf9,0x00,0x02,0x00,0x68,0x02,0x00,0x00, +0xf8,0x00,0x02,0x00,0x68,0x02,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x9e,0x02,0x00,0x00,0xac,0x02,0x00,0x00, +0xcf,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0x65,0x02,0x00,0x00, +0xf8,0x00,0x02,0x00,0x67,0x02,0x00,0x00,0xf9,0x00,0x02,0x00, +0x60,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x60,0x02,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xa0,0x02,0x00,0x00, +0xaa,0x02,0x00,0x00,0xcf,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, +0x5d,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x5f,0x02,0x00,0x00, +0xf9,0x00,0x02,0x00,0x48,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, +0x48,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xa2,0x02,0x00,0x00,0xa8,0x02,0x00,0x00,0xcf,0x00,0x00,0x00, +0xf9,0x00,0x02,0x00,0x45,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, +0x47,0x02,0x00,0x00,0xf9,0x00,0x02,0x00,0x40,0x02,0x00,0x00, +0xf8,0x00,0x02,0x00,0x40,0x02,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xa4,0x02,0x00,0x00,0xa7,0x02,0x00,0x00, +0xcf,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0x3d,0x02,0x00,0x00, +0xf8,0x00,0x02,0x00,0x3f,0x02,0x00,0x00,0xfd,0x00,0x01,0x00, +0x38,0x00,0x01,0x00, +}; +const uint64_t matmul_f16_fp32_len = 10276; + +unsigned char matmul_f32_data[] = { +0x03,0x02,0x23,0x07,0x00,0x05,0x01,0x00,0x0b,0x00,0x0d,0x00, +0xda,0x02,0x00,0x00,0x00,0x00,0x00,0x00,0x11,0x00,0x02,0x00, +0x01,0x00,0x00,0x00,0x11,0x00,0x02,0x00,0x09,0x00,0x00,0x00, +0x0b,0x00,0x06,0x00,0x01,0x00,0x00,0x00,0x47,0x4c,0x53,0x4c, +0x2e,0x73,0x74,0x64,0x2e,0x34,0x35,0x30,0x00,0x00,0x00,0x00, +0x0e,0x00,0x03,0x00,0x00,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0x0f,0x00,0x0f,0x00,0x05,0x00,0x00,0x00,0x04,0x00,0x00,0x00, +0x6d,0x61,0x69,0x6e,0x00,0x00,0x00,0x00,0x0b,0x00,0x00,0x00, +0x12,0x00,0x00,0x00,0x3d,0x00,0x00,0x00,0x4c,0x00,0x00,0x00, +0xfb,0x00,0x00,0x00,0x06,0x01,0x00,0x00,0x47,0x01,0x00,0x00, +0x52,0x01,0x00,0x00,0x3b,0x02,0x00,0x00,0x84,0x02,0x00,0x00, +0x10,0x00,0x06,0x00,0x04,0x00,0x00,0x00,0x11,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0x47,0x00,0x04,0x00,0x0b,0x00,0x00,0x00,0x0b,0x00,0x00,0x00, +0x1c,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00, +0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0x23,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x48,0x00,0x05,0x00, +0x10,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x23,0x00,0x00,0x00, +0x08,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00, +0x03,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, +0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00,0x04,0x00,0x00,0x00, +0x23,0x00,0x00,0x00,0x10,0x00,0x00,0x00,0x48,0x00,0x05,0x00, +0x10,0x00,0x00,0x00,0x05,0x00,0x00,0x00,0x23,0x00,0x00,0x00, +0x14,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00, +0x06,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x18,0x00,0x00,0x00, +0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00,0x07,0x00,0x00,0x00, +0x23,0x00,0x00,0x00,0x1c,0x00,0x00,0x00,0x48,0x00,0x05,0x00, +0x10,0x00,0x00,0x00,0x08,0x00,0x00,0x00,0x23,0x00,0x00,0x00, +0x20,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00, +0x09,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x24,0x00,0x00,0x00, +0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00,0x0a,0x00,0x00,0x00, +0x23,0x00,0x00,0x00,0x28,0x00,0x00,0x00,0x48,0x00,0x05,0x00, +0x10,0x00,0x00,0x00,0x0b,0x00,0x00,0x00,0x23,0x00,0x00,0x00, +0x2c,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00, +0x0c,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x30,0x00,0x00,0x00, +0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00,0x0d,0x00,0x00,0x00, +0x23,0x00,0x00,0x00,0x34,0x00,0x00,0x00,0x47,0x00,0x03,0x00, +0x10,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00, +0x37,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0x47,0x00,0x04,0x00,0x3d,0x00,0x00,0x00,0x0b,0x00,0x00,0x00, +0x1a,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x4c,0x00,0x00,0x00, +0x0b,0x00,0x00,0x00,0x1b,0x00,0x00,0x00,0x47,0x00,0x04,0x00, +0x4f,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x09,0x00,0x00,0x00, +0x47,0x00,0x04,0x00,0x53,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0x04,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x60,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x47,0x00,0x04,0x00, +0x62,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x07,0x00,0x00,0x00, +0x47,0x00,0x04,0x00,0x6c,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0x03,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0xa6,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00, +0xb8,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x05,0x00,0x00,0x00, +0x47,0x00,0x04,0x00,0xbb,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0x08,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x03,0x01,0x00,0x00, +0x06,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x48,0x00,0x04,0x00, +0x04,0x01,0x00,0x00,0x00,0x00,0x00,0x00,0x18,0x00,0x00,0x00, +0x48,0x00,0x05,0x00,0x04,0x01,0x00,0x00,0x00,0x00,0x00,0x00, +0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x03,0x00, +0x04,0x01,0x00,0x00,0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00, +0x06,0x01,0x00,0x00,0x22,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0x47,0x00,0x04,0x00,0x06,0x01,0x00,0x00,0x21,0x00,0x00,0x00, +0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x21,0x01,0x00,0x00, +0x01,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00, +0x22,0x01,0x00,0x00,0x0b,0x00,0x00,0x00,0x19,0x00,0x00,0x00, +0x47,0x00,0x04,0x00,0x4f,0x01,0x00,0x00,0x06,0x00,0x00,0x00, +0x04,0x00,0x00,0x00,0x48,0x00,0x04,0x00,0x50,0x01,0x00,0x00, +0x00,0x00,0x00,0x00,0x18,0x00,0x00,0x00,0x48,0x00,0x05,0x00, +0x50,0x01,0x00,0x00,0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00, +0x00,0x00,0x00,0x00,0x47,0x00,0x03,0x00,0x50,0x01,0x00,0x00, +0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x52,0x01,0x00,0x00, +0x22,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00, +0x52,0x01,0x00,0x00,0x21,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0x47,0x00,0x04,0x00,0x3b,0x02,0x00,0x00,0x0b,0x00,0x00,0x00, +0x18,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x81,0x02,0x00,0x00, +0x06,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x48,0x00,0x04,0x00, +0x82,0x02,0x00,0x00,0x00,0x00,0x00,0x00,0x19,0x00,0x00,0x00, +0x48,0x00,0x05,0x00,0x82,0x02,0x00,0x00,0x00,0x00,0x00,0x00, +0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x03,0x00, +0x82,0x02,0x00,0x00,0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00, +0x84,0x02,0x00,0x00,0x22,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0x47,0x00,0x04,0x00,0x84,0x02,0x00,0x00,0x21,0x00,0x00,0x00, +0x02,0x00,0x00,0x00,0x13,0x00,0x02,0x00,0x02,0x00,0x00,0x00, +0x21,0x00,0x03,0x00,0x03,0x00,0x00,0x00,0x02,0x00,0x00,0x00, +0x15,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x20,0x00,0x00,0x00, +0x00,0x00,0x00,0x00,0x17,0x00,0x04,0x00,0x09,0x00,0x00,0x00, +0x06,0x00,0x00,0x00,0x03,0x00,0x00,0x00,0x20,0x00,0x04,0x00, +0x0a,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x09,0x00,0x00,0x00, +0x3b,0x00,0x04,0x00,0x0a,0x00,0x00,0x00,0x0b,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x0c,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x20,0x00,0x04,0x00, +0x0d,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x06,0x00,0x00,0x00, +0x1e,0x00,0x10,0x00,0x10,0x00,0x00,0x00,0x06,0x00,0x00,0x00, +0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00, +0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00, +0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00, +0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00, +0x06,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x11,0x00,0x00,0x00, +0x09,0x00,0x00,0x00,0x10,0x00,0x00,0x00,0x3b,0x00,0x04,0x00, +0x11,0x00,0x00,0x00,0x12,0x00,0x00,0x00,0x09,0x00,0x00,0x00, +0x15,0x00,0x04,0x00,0x13,0x00,0x00,0x00,0x20,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00, +0x14,0x00,0x00,0x00,0x08,0x00,0x00,0x00,0x20,0x00,0x04,0x00, +0x15,0x00,0x00,0x00,0x09,0x00,0x00,0x00,0x06,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00,0x21,0x00,0x00,0x00, +0x0a,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00, +0x27,0x00,0x00,0x00,0x09,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x13,0x00,0x00,0x00,0x2d,0x00,0x00,0x00,0x07,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00,0x34,0x00,0x00,0x00, +0x00,0x00,0x00,0x00,0x32,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x37,0x00,0x00,0x00,0x40,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0x3b,0x00,0x04,0x00,0x0a,0x00,0x00,0x00,0x3d,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x3e,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x3b,0x00,0x04,0x00, +0x0a,0x00,0x00,0x00,0x4c,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0x32,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x4f,0x00,0x00,0x00, +0x20,0x00,0x00,0x00,0x32,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x53,0x00,0x00,0x00,0x20,0x00,0x00,0x00,0x34,0x00,0x06,0x00, +0x06,0x00,0x00,0x00,0x54,0x00,0x00,0x00,0x86,0x00,0x00,0x00, +0x37,0x00,0x00,0x00,0x53,0x00,0x00,0x00,0x34,0x00,0x06,0x00, +0x06,0x00,0x00,0x00,0x58,0x00,0x00,0x00,0x86,0x00,0x00,0x00, +0x37,0x00,0x00,0x00,0x53,0x00,0x00,0x00,0x32,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x60,0x00,0x00,0x00,0x02,0x00,0x00,0x00, +0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x61,0x00,0x00,0x00, +0x86,0x00,0x00,0x00,0x53,0x00,0x00,0x00,0x60,0x00,0x00,0x00, +0x32,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x62,0x00,0x00,0x00, +0x04,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, +0x63,0x00,0x00,0x00,0x86,0x00,0x00,0x00,0x61,0x00,0x00,0x00, +0x62,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, +0x67,0x00,0x00,0x00,0x86,0x00,0x00,0x00,0x61,0x00,0x00,0x00, +0x62,0x00,0x00,0x00,0x32,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x6c,0x00,0x00,0x00,0x10,0x00,0x00,0x00,0x34,0x00,0x06,0x00, +0x06,0x00,0x00,0x00,0x6d,0x00,0x00,0x00,0x86,0x00,0x00,0x00, +0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x34,0x00,0x06,0x00, +0x06,0x00,0x00,0x00,0x72,0x00,0x00,0x00,0x86,0x00,0x00,0x00, +0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x34,0x00,0x06,0x00, +0x06,0x00,0x00,0x00,0x77,0x00,0x00,0x00,0x86,0x00,0x00,0x00, +0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x34,0x00,0x06,0x00, +0x06,0x00,0x00,0x00,0x7c,0x00,0x00,0x00,0x86,0x00,0x00,0x00, +0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x13,0x00,0x00,0x00,0x80,0x00,0x00,0x00,0x06,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00,0x85,0x00,0x00,0x00, +0x02,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00, +0x90,0x00,0x00,0x00,0x0b,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x13,0x00,0x00,0x00,0x96,0x00,0x00,0x00,0x03,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00,0xa1,0x00,0x00,0x00, +0x0c,0x00,0x00,0x00,0x32,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0xa6,0x00,0x00,0x00,0x40,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x13,0x00,0x00,0x00,0xa8,0x00,0x00,0x00,0x04,0x00,0x00,0x00, +0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0xb7,0x00,0x00,0x00, +0x84,0x00,0x00,0x00,0x60,0x00,0x00,0x00,0x62,0x00,0x00,0x00, +0x32,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0xb8,0x00,0x00,0x00, +0x20,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, +0xb9,0x00,0x00,0x00,0x84,0x00,0x00,0x00,0x53,0x00,0x00,0x00, +0xb8,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, +0xba,0x00,0x00,0x00,0x84,0x00,0x00,0x00,0x4f,0x00,0x00,0x00, +0x62,0x00,0x00,0x00,0x32,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0xbb,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x34,0x00,0x06,0x00, +0x06,0x00,0x00,0x00,0xbc,0x00,0x00,0x00,0x84,0x00,0x00,0x00, +0xba,0x00,0x00,0x00,0xbb,0x00,0x00,0x00,0x34,0x00,0x06,0x00, +0x06,0x00,0x00,0x00,0xbd,0x00,0x00,0x00,0x84,0x00,0x00,0x00, +0xbc,0x00,0x00,0x00,0x60,0x00,0x00,0x00,0x34,0x00,0x06,0x00, +0x06,0x00,0x00,0x00,0xbe,0x00,0x00,0x00,0x86,0x00,0x00,0x00, +0xb9,0x00,0x00,0x00,0xbd,0x00,0x00,0x00,0x34,0x00,0x06,0x00, +0x06,0x00,0x00,0x00,0xbf,0x00,0x00,0x00,0x84,0x00,0x00,0x00, +0xb7,0x00,0x00,0x00,0xbe,0x00,0x00,0x00,0x34,0x00,0x06,0x00, +0x06,0x00,0x00,0x00,0xc0,0x00,0x00,0x00,0x84,0x00,0x00,0x00, +0xbf,0x00,0x00,0x00,0xbb,0x00,0x00,0x00,0x14,0x00,0x02,0x00, +0xc1,0x00,0x00,0x00,0x16,0x00,0x03,0x00,0xc3,0x00,0x00,0x00, +0x20,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, +0xc4,0x00,0x00,0x00,0x84,0x00,0x00,0x00,0x60,0x00,0x00,0x00, +0x62,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, +0xc5,0x00,0x00,0x00,0x84,0x00,0x00,0x00,0xc4,0x00,0x00,0x00, +0xbe,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, +0xc6,0x00,0x00,0x00,0x84,0x00,0x00,0x00,0xc5,0x00,0x00,0x00, +0xbb,0x00,0x00,0x00,0x1c,0x00,0x04,0x00,0xc7,0x00,0x00,0x00, +0xc3,0x00,0x00,0x00,0xc6,0x00,0x00,0x00,0x20,0x00,0x04,0x00, +0xc8,0x00,0x00,0x00,0x07,0x00,0x00,0x00,0xc7,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0xc3,0x00,0x00,0x00,0xcb,0x00,0x00,0x00, +0x00,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0xcc,0x00,0x00,0x00, +0x07,0x00,0x00,0x00,0xc3,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x13,0x00,0x00,0x00,0xcf,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0x16,0x00,0x03,0x00,0xf6,0x00,0x00,0x00,0x10,0x00,0x00,0x00, +0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0xf7,0x00,0x00,0x00, +0x80,0x00,0x00,0x00,0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00, +0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0xf8,0x00,0x00,0x00, +0x84,0x00,0x00,0x00,0x37,0x00,0x00,0x00,0xf7,0x00,0x00,0x00, +0x1c,0x00,0x04,0x00,0xf9,0x00,0x00,0x00,0xf6,0x00,0x00,0x00, +0xf8,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0xfa,0x00,0x00,0x00, +0x04,0x00,0x00,0x00,0xf9,0x00,0x00,0x00,0x3b,0x00,0x04,0x00, +0xfa,0x00,0x00,0x00,0xfb,0x00,0x00,0x00,0x04,0x00,0x00,0x00, +0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0xff,0x00,0x00,0x00, +0x80,0x00,0x00,0x00,0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00, +0x1d,0x00,0x03,0x00,0x03,0x01,0x00,0x00,0xc3,0x00,0x00,0x00, +0x1e,0x00,0x03,0x00,0x04,0x01,0x00,0x00,0x03,0x01,0x00,0x00, +0x20,0x00,0x04,0x00,0x05,0x01,0x00,0x00,0x0c,0x00,0x00,0x00, +0x04,0x01,0x00,0x00,0x3b,0x00,0x04,0x00,0x05,0x01,0x00,0x00, +0x06,0x01,0x00,0x00,0x0c,0x00,0x00,0x00,0x20,0x00,0x04,0x00, +0x11,0x01,0x00,0x00,0x0c,0x00,0x00,0x00,0xc3,0x00,0x00,0x00, +0x20,0x00,0x04,0x00,0x15,0x01,0x00,0x00,0x04,0x00,0x00,0x00, +0xf6,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, +0x1b,0x01,0x00,0x00,0x80,0x00,0x00,0x00,0x6c,0x00,0x00,0x00, +0x39,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0xf6,0x00,0x00,0x00, +0x1f,0x01,0x00,0x00,0x00,0x00,0x00,0x00,0x32,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x21,0x01,0x00,0x00,0x01,0x00,0x00,0x00, +0x33,0x00,0x06,0x00,0x09,0x00,0x00,0x00,0x22,0x01,0x00,0x00, +0x21,0x01,0x00,0x00,0x39,0x00,0x00,0x00,0x39,0x00,0x00,0x00, +0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x23,0x01,0x00,0x00, +0x51,0x00,0x00,0x00,0x22,0x01,0x00,0x00,0x00,0x00,0x00,0x00, +0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x24,0x01,0x00,0x00, +0x84,0x00,0x00,0x00,0x23,0x01,0x00,0x00,0x39,0x00,0x00,0x00, +0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x25,0x01,0x00,0x00, +0x86,0x00,0x00,0x00,0x24,0x01,0x00,0x00,0x6c,0x00,0x00,0x00, +0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x43,0x01,0x00,0x00, +0x80,0x00,0x00,0x00,0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00, +0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x44,0x01,0x00,0x00, +0x84,0x00,0x00,0x00,0xa6,0x00,0x00,0x00,0x43,0x01,0x00,0x00, +0x1c,0x00,0x04,0x00,0x45,0x01,0x00,0x00,0xf6,0x00,0x00,0x00, +0x44,0x01,0x00,0x00,0x20,0x00,0x04,0x00,0x46,0x01,0x00,0x00, +0x04,0x00,0x00,0x00,0x45,0x01,0x00,0x00,0x3b,0x00,0x04,0x00, +0x46,0x01,0x00,0x00,0x47,0x01,0x00,0x00,0x04,0x00,0x00,0x00, +0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x4b,0x01,0x00,0x00, +0x80,0x00,0x00,0x00,0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00, +0x1d,0x00,0x03,0x00,0x4f,0x01,0x00,0x00,0xc3,0x00,0x00,0x00, +0x1e,0x00,0x03,0x00,0x50,0x01,0x00,0x00,0x4f,0x01,0x00,0x00, +0x20,0x00,0x04,0x00,0x51,0x01,0x00,0x00,0x0c,0x00,0x00,0x00, +0x50,0x01,0x00,0x00,0x3b,0x00,0x04,0x00,0x51,0x01,0x00,0x00, +0x52,0x01,0x00,0x00,0x0c,0x00,0x00,0x00,0x34,0x00,0x06,0x00, +0x06,0x00,0x00,0x00,0x65,0x01,0x00,0x00,0x80,0x00,0x00,0x00, +0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x34,0x00,0x06,0x00, +0x06,0x00,0x00,0x00,0x6a,0x01,0x00,0x00,0x51,0x00,0x00,0x00, +0x22,0x01,0x00,0x00,0x00,0x00,0x00,0x00,0x34,0x00,0x06,0x00, +0x06,0x00,0x00,0x00,0x6b,0x01,0x00,0x00,0x84,0x00,0x00,0x00, +0x6a,0x01,0x00,0x00,0x39,0x00,0x00,0x00,0x34,0x00,0x06,0x00, +0x06,0x00,0x00,0x00,0x6c,0x01,0x00,0x00,0x86,0x00,0x00,0x00, +0x6b,0x01,0x00,0x00,0x6c,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x6f,0x01,0x00,0x00,0x08,0x01,0x00,0x00, +0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x70,0x01,0x00,0x00, +0x86,0x00,0x00,0x00,0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00, +0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x73,0x01,0x00,0x00, +0x86,0x00,0x00,0x00,0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00, +0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x8e,0x01,0x00,0x00, +0x84,0x00,0x00,0x00,0x60,0x00,0x00,0x00,0x62,0x00,0x00,0x00, +0x1c,0x00,0x04,0x00,0x8f,0x01,0x00,0x00,0xf6,0x00,0x00,0x00, +0x8e,0x01,0x00,0x00,0x20,0x00,0x04,0x00,0x90,0x01,0x00,0x00, +0x07,0x00,0x00,0x00,0x8f,0x01,0x00,0x00,0x34,0x00,0x06,0x00, +0x06,0x00,0x00,0x00,0xa0,0x01,0x00,0x00,0x80,0x00,0x00,0x00, +0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x20,0x00,0x04,0x00, +0xa6,0x01,0x00,0x00,0x07,0x00,0x00,0x00,0xf6,0x00,0x00,0x00, +0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0xbc,0x01,0x00,0x00, +0x84,0x00,0x00,0x00,0xbe,0x00,0x00,0x00,0xbb,0x00,0x00,0x00, +0x1c,0x00,0x04,0x00,0xbd,0x01,0x00,0x00,0xf6,0x00,0x00,0x00, +0xbc,0x01,0x00,0x00,0x20,0x00,0x04,0x00,0xbe,0x01,0x00,0x00, +0x07,0x00,0x00,0x00,0xbd,0x01,0x00,0x00,0x34,0x00,0x06,0x00, +0x06,0x00,0x00,0x00,0xc7,0x01,0x00,0x00,0x86,0x00,0x00,0x00, +0xb8,0x00,0x00,0x00,0xbe,0x00,0x00,0x00,0x34,0x00,0x06,0x00, +0x06,0x00,0x00,0x00,0xcf,0x01,0x00,0x00,0x80,0x00,0x00,0x00, +0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x34,0x00,0x06,0x00, +0x06,0x00,0x00,0x00,0xfe,0x01,0x00,0x00,0x84,0x00,0x00,0x00, +0x60,0x00,0x00,0x00,0x62,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x13,0x00,0x00,0x00,0x33,0x02,0x00,0x00,0x0d,0x00,0x00,0x00, +0x3b,0x00,0x04,0x00,0x0a,0x00,0x00,0x00,0x3b,0x02,0x00,0x00, +0x01,0x00,0x00,0x00,0x1d,0x00,0x03,0x00,0x81,0x02,0x00,0x00, +0xc3,0x00,0x00,0x00,0x1e,0x00,0x03,0x00,0x82,0x02,0x00,0x00, +0x81,0x02,0x00,0x00,0x20,0x00,0x04,0x00,0x83,0x02,0x00,0x00, +0x0c,0x00,0x00,0x00,0x82,0x02,0x00,0x00,0x3b,0x00,0x04,0x00, +0x83,0x02,0x00,0x00,0x84,0x02,0x00,0x00,0x0c,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00,0x89,0x02,0x00,0x00, +0x05,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, +0x96,0x02,0x00,0x00,0x84,0x00,0x00,0x00,0x60,0x00,0x00,0x00, +0x62,0x00,0x00,0x00,0x36,0x00,0x05,0x00,0x02,0x00,0x00,0x00, +0x04,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x03,0x00,0x00,0x00, +0xf8,0x00,0x02,0x00,0x05,0x00,0x00,0x00,0x3b,0x00,0x04,0x00, +0xc8,0x00,0x00,0x00,0xc9,0x00,0x00,0x00,0x07,0x00,0x00,0x00, +0x3b,0x00,0x04,0x00,0x90,0x01,0x00,0x00,0x91,0x01,0x00,0x00, +0x07,0x00,0x00,0x00,0x3b,0x00,0x04,0x00,0xbe,0x01,0x00,0x00, +0xbf,0x01,0x00,0x00,0x07,0x00,0x00,0x00,0x41,0x00,0x05,0x00, +0x0d,0x00,0x00,0x00,0x0e,0x00,0x00,0x00,0x0b,0x00,0x00,0x00, +0x0c,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x0f,0x00,0x00,0x00,0x0e,0x00,0x00,0x00,0x41,0x00,0x05,0x00, +0x15,0x00,0x00,0x00,0x16,0x00,0x00,0x00,0x12,0x00,0x00,0x00, +0x14,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x17,0x00,0x00,0x00,0x16,0x00,0x00,0x00,0x86,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x18,0x00,0x00,0x00,0x0f,0x00,0x00,0x00, +0x17,0x00,0x00,0x00,0x89,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x1e,0x00,0x00,0x00,0x0f,0x00,0x00,0x00,0x17,0x00,0x00,0x00, +0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00,0x22,0x00,0x00,0x00, +0x12,0x00,0x00,0x00,0x21,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x22,0x00,0x00,0x00, +0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x24,0x00,0x00,0x00, +0x18,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x41,0x00,0x05,0x00, +0x15,0x00,0x00,0x00,0x28,0x00,0x00,0x00,0x12,0x00,0x00,0x00, +0x27,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x29,0x00,0x00,0x00,0x28,0x00,0x00,0x00,0x86,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x2a,0x00,0x00,0x00,0x1e,0x00,0x00,0x00, +0x29,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00, +0x2e,0x00,0x00,0x00,0x12,0x00,0x00,0x00,0x2d,0x00,0x00,0x00, +0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x2f,0x00,0x00,0x00, +0x2e,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x30,0x00,0x00,0x00,0x24,0x00,0x00,0x00,0x2f,0x00,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x32,0x00,0x00,0x00, +0x30,0x00,0x00,0x00,0x2a,0x00,0x00,0x00,0x41,0x00,0x05,0x00, +0x15,0x00,0x00,0x00,0x35,0x00,0x00,0x00,0x12,0x00,0x00,0x00, +0x34,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x36,0x00,0x00,0x00,0x35,0x00,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x38,0x00,0x00,0x00,0x36,0x00,0x00,0x00, +0x37,0x00,0x00,0x00,0x82,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x3a,0x00,0x00,0x00,0x38,0x00,0x00,0x00,0x39,0x00,0x00,0x00, +0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x3b,0x00,0x00,0x00, +0x3a,0x00,0x00,0x00,0x37,0x00,0x00,0x00,0x41,0x00,0x05,0x00, +0x0d,0x00,0x00,0x00,0x3f,0x00,0x00,0x00,0x3d,0x00,0x00,0x00, +0x3e,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x40,0x00,0x00,0x00,0x3f,0x00,0x00,0x00,0x89,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x42,0x00,0x00,0x00,0x40,0x00,0x00,0x00, +0x3b,0x00,0x00,0x00,0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x47,0x00,0x00,0x00,0x40,0x00,0x00,0x00,0x3b,0x00,0x00,0x00, +0x41,0x00,0x05,0x00,0x0d,0x00,0x00,0x00,0x49,0x00,0x00,0x00, +0x3d,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x4a,0x00,0x00,0x00,0x49,0x00,0x00,0x00, +0x41,0x00,0x05,0x00,0x0d,0x00,0x00,0x00,0x4d,0x00,0x00,0x00, +0x4c,0x00,0x00,0x00,0x3e,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x4e,0x00,0x00,0x00,0x4d,0x00,0x00,0x00, +0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x50,0x00,0x00,0x00, +0x4e,0x00,0x00,0x00,0x4f,0x00,0x00,0x00,0x89,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x55,0x00,0x00,0x00,0x50,0x00,0x00,0x00, +0x54,0x00,0x00,0x00,0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x59,0x00,0x00,0x00,0x50,0x00,0x00,0x00,0x58,0x00,0x00,0x00, +0x89,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x5d,0x00,0x00,0x00, +0x4e,0x00,0x00,0x00,0x4f,0x00,0x00,0x00,0x89,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x64,0x00,0x00,0x00,0x5d,0x00,0x00,0x00, +0x63,0x00,0x00,0x00,0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x68,0x00,0x00,0x00,0x5d,0x00,0x00,0x00,0x67,0x00,0x00,0x00, +0x89,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x6e,0x00,0x00,0x00, +0x4e,0x00,0x00,0x00,0x6d,0x00,0x00,0x00,0x86,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x73,0x00,0x00,0x00,0x4e,0x00,0x00,0x00, +0x72,0x00,0x00,0x00,0x89,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x78,0x00,0x00,0x00,0x4e,0x00,0x00,0x00,0x77,0x00,0x00,0x00, +0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x7d,0x00,0x00,0x00, +0x4e,0x00,0x00,0x00,0x7c,0x00,0x00,0x00,0x41,0x00,0x05,0x00, +0x15,0x00,0x00,0x00,0x81,0x00,0x00,0x00,0x12,0x00,0x00,0x00, +0x80,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x82,0x00,0x00,0x00,0x81,0x00,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x83,0x00,0x00,0x00,0x47,0x00,0x00,0x00, +0x82,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00, +0x86,0x00,0x00,0x00,0x12,0x00,0x00,0x00,0x85,0x00,0x00,0x00, +0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x87,0x00,0x00,0x00, +0x86,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x89,0x00,0x00,0x00,0x47,0x00,0x00,0x00,0x39,0x00,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x8c,0x00,0x00,0x00, +0x89,0x00,0x00,0x00,0x82,0x00,0x00,0x00,0x0c,0x00,0x07,0x00, +0x06,0x00,0x00,0x00,0x8d,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0x26,0x00,0x00,0x00,0x87,0x00,0x00,0x00,0x8c,0x00,0x00,0x00, +0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00,0x91,0x00,0x00,0x00, +0x12,0x00,0x00,0x00,0x90,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x92,0x00,0x00,0x00,0x91,0x00,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x93,0x00,0x00,0x00, +0x32,0x00,0x00,0x00,0x92,0x00,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x95,0x00,0x00,0x00,0x42,0x00,0x00,0x00, +0x37,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00, +0x97,0x00,0x00,0x00,0x12,0x00,0x00,0x00,0x96,0x00,0x00,0x00, +0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x98,0x00,0x00,0x00, +0x97,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x99,0x00,0x00,0x00,0x95,0x00,0x00,0x00,0x98,0x00,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x9a,0x00,0x00,0x00, +0x93,0x00,0x00,0x00,0x99,0x00,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x9c,0x00,0x00,0x00,0x9a,0x00,0x00,0x00, +0x83,0x00,0x00,0x00,0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x9d,0x00,0x00,0x00,0x9c,0x00,0x00,0x00,0x39,0x00,0x00,0x00, +0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00,0xa2,0x00,0x00,0x00, +0x12,0x00,0x00,0x00,0xa1,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0xa3,0x00,0x00,0x00,0xa2,0x00,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xa4,0x00,0x00,0x00, +0x0f,0x00,0x00,0x00,0xa3,0x00,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xa7,0x00,0x00,0x00,0x4a,0x00,0x00,0x00, +0xa6,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00, +0xa9,0x00,0x00,0x00,0x12,0x00,0x00,0x00,0xa8,0x00,0x00,0x00, +0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0xaa,0x00,0x00,0x00, +0xa9,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xab,0x00,0x00,0x00,0xa7,0x00,0x00,0x00,0xaa,0x00,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xac,0x00,0x00,0x00, +0xa4,0x00,0x00,0x00,0xab,0x00,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xae,0x00,0x00,0x00,0xac,0x00,0x00,0x00, +0x83,0x00,0x00,0x00,0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xaf,0x00,0x00,0x00,0xae,0x00,0x00,0x00,0x39,0x00,0x00,0x00, +0xf9,0x00,0x02,0x00,0xb1,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, +0xb1,0x00,0x00,0x00,0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00, +0xa8,0x02,0x00,0x00,0x3e,0x00,0x00,0x00,0x05,0x00,0x00,0x00, +0xd0,0x00,0x00,0x00,0xb2,0x00,0x00,0x00,0xb0,0x00,0x05,0x00, +0xc1,0x00,0x00,0x00,0xc2,0x00,0x00,0x00,0xa8,0x02,0x00,0x00, +0xc0,0x00,0x00,0x00,0xf6,0x00,0x04,0x00,0xb3,0x00,0x00,0x00, +0xb2,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, +0xc2,0x00,0x00,0x00,0xb2,0x00,0x00,0x00,0xb3,0x00,0x00,0x00, +0xf8,0x00,0x02,0x00,0xb2,0x00,0x00,0x00,0x41,0x00,0x05,0x00, +0xcc,0x00,0x00,0x00,0xcd,0x00,0x00,0x00,0xc9,0x00,0x00,0x00, +0xa8,0x02,0x00,0x00,0x3e,0x00,0x03,0x00,0xcd,0x00,0x00,0x00, +0xcb,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xd0,0x00,0x00,0x00,0xa8,0x02,0x00,0x00,0xcf,0x00,0x00,0x00, +0xf9,0x00,0x02,0x00,0xb1,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, +0xb3,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0xd3,0x00,0x00,0x00, +0xf8,0x00,0x02,0x00,0xd3,0x00,0x00,0x00,0xf5,0x00,0x07,0x00, +0x06,0x00,0x00,0x00,0xc1,0x02,0x00,0x00,0xaf,0x00,0x00,0x00, +0xb3,0x00,0x00,0x00,0x75,0x01,0x00,0x00,0xd6,0x00,0x00,0x00, +0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0xbd,0x02,0x00,0x00, +0x9d,0x00,0x00,0x00,0xb3,0x00,0x00,0x00,0x72,0x01,0x00,0x00, +0xd6,0x00,0x00,0x00,0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00, +0xa9,0x02,0x00,0x00,0x83,0x00,0x00,0x00,0xb3,0x00,0x00,0x00, +0x23,0x02,0x00,0x00,0xd6,0x00,0x00,0x00,0xb0,0x00,0x05,0x00, +0xc1,0x00,0x00,0x00,0xda,0x00,0x00,0x00,0xa9,0x02,0x00,0x00, +0x8d,0x00,0x00,0x00,0xf6,0x00,0x04,0x00,0xd5,0x00,0x00,0x00, +0xd6,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, +0xda,0x00,0x00,0x00,0xd4,0x00,0x00,0x00,0xd5,0x00,0x00,0x00, +0xf8,0x00,0x02,0x00,0xd4,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, +0xdc,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0xdc,0x00,0x00,0x00, +0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0xb9,0x02,0x00,0x00, +0x3e,0x00,0x00,0x00,0xd4,0x00,0x00,0x00,0x27,0x01,0x00,0x00, +0xdf,0x00,0x00,0x00,0xb0,0x00,0x05,0x00,0xc1,0x00,0x00,0x00, +0xe2,0x00,0x00,0x00,0xb9,0x02,0x00,0x00,0x37,0x00,0x00,0x00, +0xf6,0x00,0x04,0x00,0xde,0x00,0x00,0x00,0xdf,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0xe2,0x00,0x00,0x00, +0xdd,0x00,0x00,0x00,0xde,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, +0xdd,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xe6,0x00,0x00,0x00,0x95,0x00,0x00,0x00,0x73,0x00,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xe8,0x00,0x00,0x00, +0xe6,0x00,0x00,0x00,0xb9,0x02,0x00,0x00,0xb0,0x00,0x05,0x00, +0xc1,0x00,0x00,0x00,0xeb,0x00,0x00,0x00,0xe8,0x00,0x00,0x00, +0x36,0x00,0x00,0x00,0xf7,0x00,0x03,0x00,0xed,0x00,0x00,0x00, +0x00,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0xeb,0x00,0x00,0x00, +0xec,0x00,0x00,0x00,0xed,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, +0xec,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xf0,0x00,0x00,0x00,0xa9,0x02,0x00,0x00,0x6e,0x00,0x00,0x00, +0xb0,0x00,0x05,0x00,0xc1,0x00,0x00,0x00,0xf2,0x00,0x00,0x00, +0xf0,0x00,0x00,0x00,0x8d,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, +0xed,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0xed,0x00,0x00,0x00, +0xf5,0x00,0x07,0x00,0xc1,0x00,0x00,0x00,0xf3,0x00,0x00,0x00, +0xeb,0x00,0x00,0x00,0xdd,0x00,0x00,0x00,0xf2,0x00,0x00,0x00, +0xec,0x00,0x00,0x00,0xf7,0x00,0x03,0x00,0xf5,0x00,0x00,0x00, +0x00,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0xf3,0x00,0x00,0x00, +0xf4,0x00,0x00,0x00,0x17,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, +0xf4,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xfe,0x00,0x00,0x00,0x73,0x00,0x00,0x00,0xb9,0x02,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x00,0x01,0x00,0x00, +0xfe,0x00,0x00,0x00,0xff,0x00,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x02,0x01,0x00,0x00,0x00,0x01,0x00,0x00, +0x6e,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x0d,0x01,0x00,0x00,0xfe,0x00,0x00,0x00,0x98,0x00,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x0e,0x01,0x00,0x00, +0xbd,0x02,0x00,0x00,0x0d,0x01,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x10,0x01,0x00,0x00,0x0e,0x01,0x00,0x00, +0x6e,0x00,0x00,0x00,0x41,0x00,0x06,0x00,0x11,0x01,0x00,0x00, +0x12,0x01,0x00,0x00,0x06,0x01,0x00,0x00,0x34,0x00,0x00,0x00, +0x10,0x01,0x00,0x00,0x3d,0x00,0x04,0x00,0xc3,0x00,0x00,0x00, +0x13,0x01,0x00,0x00,0x12,0x01,0x00,0x00,0x73,0x00,0x04,0x00, +0xf6,0x00,0x00,0x00,0x14,0x01,0x00,0x00,0x13,0x01,0x00,0x00, +0x41,0x00,0x05,0x00,0x15,0x01,0x00,0x00,0x16,0x01,0x00,0x00, +0xfb,0x00,0x00,0x00,0x02,0x01,0x00,0x00,0x3e,0x00,0x03,0x00, +0x16,0x01,0x00,0x00,0x14,0x01,0x00,0x00,0xf9,0x00,0x02,0x00, +0xf5,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0x17,0x01,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x1a,0x01,0x00,0x00, +0x73,0x00,0x00,0x00,0xb9,0x02,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x1c,0x01,0x00,0x00,0x1a,0x01,0x00,0x00, +0x1b,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x1e,0x01,0x00,0x00,0x1c,0x01,0x00,0x00,0x6e,0x00,0x00,0x00, +0x41,0x00,0x05,0x00,0x15,0x01,0x00,0x00,0x20,0x01,0x00,0x00, +0xfb,0x00,0x00,0x00,0x1e,0x01,0x00,0x00,0x3e,0x00,0x03,0x00, +0x20,0x01,0x00,0x00,0x1f,0x01,0x00,0x00,0xf9,0x00,0x02,0x00, +0xf5,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0xf5,0x00,0x00,0x00, +0xf9,0x00,0x02,0x00,0xdf,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, +0xdf,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x27,0x01,0x00,0x00,0xb9,0x02,0x00,0x00,0x25,0x01,0x00,0x00, +0xf9,0x00,0x02,0x00,0xdc,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, +0xde,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0x29,0x01,0x00,0x00, +0xf8,0x00,0x02,0x00,0x29,0x01,0x00,0x00,0xf5,0x00,0x07,0x00, +0x06,0x00,0x00,0x00,0xba,0x02,0x00,0x00,0x3e,0x00,0x00,0x00, +0xde,0x00,0x00,0x00,0x6e,0x01,0x00,0x00,0x2c,0x01,0x00,0x00, +0xb0,0x00,0x05,0x00,0xc1,0x00,0x00,0x00,0x2f,0x01,0x00,0x00, +0xba,0x02,0x00,0x00,0xa6,0x00,0x00,0x00,0xf6,0x00,0x04,0x00, +0x2b,0x01,0x00,0x00,0x2c,0x01,0x00,0x00,0x01,0x00,0x00,0x00, +0xfa,0x00,0x04,0x00,0x2f,0x01,0x00,0x00,0x2a,0x01,0x00,0x00, +0x2b,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0x2a,0x01,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x33,0x01,0x00,0x00, +0xa7,0x00,0x00,0x00,0x7d,0x00,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x35,0x01,0x00,0x00,0x33,0x01,0x00,0x00, +0xba,0x02,0x00,0x00,0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00, +0x36,0x01,0x00,0x00,0x12,0x00,0x00,0x00,0xcf,0x00,0x00,0x00, +0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x37,0x01,0x00,0x00, +0x36,0x01,0x00,0x00,0xb0,0x00,0x05,0x00,0xc1,0x00,0x00,0x00, +0x38,0x01,0x00,0x00,0x35,0x01,0x00,0x00,0x37,0x01,0x00,0x00, +0xf7,0x00,0x03,0x00,0x3a,0x01,0x00,0x00,0x00,0x00,0x00,0x00, +0xfa,0x00,0x04,0x00,0x38,0x01,0x00,0x00,0x39,0x01,0x00,0x00, +0x3a,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0x39,0x01,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x3d,0x01,0x00,0x00, +0xa9,0x02,0x00,0x00,0x78,0x00,0x00,0x00,0xb0,0x00,0x05,0x00, +0xc1,0x00,0x00,0x00,0x3f,0x01,0x00,0x00,0x3d,0x01,0x00,0x00, +0x8d,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0x3a,0x01,0x00,0x00, +0xf8,0x00,0x02,0x00,0x3a,0x01,0x00,0x00,0xf5,0x00,0x07,0x00, +0xc1,0x00,0x00,0x00,0x40,0x01,0x00,0x00,0x38,0x01,0x00,0x00, +0x2a,0x01,0x00,0x00,0x3f,0x01,0x00,0x00,0x39,0x01,0x00,0x00, +0xf7,0x00,0x03,0x00,0x42,0x01,0x00,0x00,0x00,0x00,0x00,0x00, +0xfa,0x00,0x04,0x00,0x40,0x01,0x00,0x00,0x41,0x01,0x00,0x00, +0x61,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0x41,0x01,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x4a,0x01,0x00,0x00, +0x7d,0x00,0x00,0x00,0xba,0x02,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x4c,0x01,0x00,0x00,0x4a,0x01,0x00,0x00, +0x4b,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x4e,0x01,0x00,0x00,0x4c,0x01,0x00,0x00,0x78,0x00,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x59,0x01,0x00,0x00, +0x4a,0x01,0x00,0x00,0xaa,0x00,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x5a,0x01,0x00,0x00,0xc1,0x02,0x00,0x00, +0x59,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x5c,0x01,0x00,0x00,0x5a,0x01,0x00,0x00,0x78,0x00,0x00,0x00, +0x41,0x00,0x06,0x00,0x11,0x01,0x00,0x00,0x5d,0x01,0x00,0x00, +0x52,0x01,0x00,0x00,0x34,0x00,0x00,0x00,0x5c,0x01,0x00,0x00, +0x3d,0x00,0x04,0x00,0xc3,0x00,0x00,0x00,0x5e,0x01,0x00,0x00, +0x5d,0x01,0x00,0x00,0x73,0x00,0x04,0x00,0xf6,0x00,0x00,0x00, +0x5f,0x01,0x00,0x00,0x5e,0x01,0x00,0x00,0x41,0x00,0x05,0x00, +0x15,0x01,0x00,0x00,0x60,0x01,0x00,0x00,0x47,0x01,0x00,0x00, +0x4e,0x01,0x00,0x00,0x3e,0x00,0x03,0x00,0x60,0x01,0x00,0x00, +0x5f,0x01,0x00,0x00,0xf9,0x00,0x02,0x00,0x42,0x01,0x00,0x00, +0xf8,0x00,0x02,0x00,0x61,0x01,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x64,0x01,0x00,0x00,0x7d,0x00,0x00,0x00, +0xba,0x02,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x66,0x01,0x00,0x00,0x64,0x01,0x00,0x00,0x65,0x01,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x68,0x01,0x00,0x00, +0x66,0x01,0x00,0x00,0x78,0x00,0x00,0x00,0x41,0x00,0x05,0x00, +0x15,0x01,0x00,0x00,0x69,0x01,0x00,0x00,0x47,0x01,0x00,0x00, +0x68,0x01,0x00,0x00,0x3e,0x00,0x03,0x00,0x69,0x01,0x00,0x00, +0x1f,0x01,0x00,0x00,0xf9,0x00,0x02,0x00,0x42,0x01,0x00,0x00, +0xf8,0x00,0x02,0x00,0x42,0x01,0x00,0x00,0xf9,0x00,0x02,0x00, +0x2c,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0x2c,0x01,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x6e,0x01,0x00,0x00, +0xba,0x02,0x00,0x00,0x6c,0x01,0x00,0x00,0xf9,0x00,0x02,0x00, +0x29,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0x2b,0x01,0x00,0x00, +0xe0,0x00,0x04,0x00,0x0c,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, +0x6f,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x72,0x01,0x00,0x00,0xbd,0x02,0x00,0x00,0x70,0x01,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x75,0x01,0x00,0x00, +0xc1,0x02,0x00,0x00,0x73,0x01,0x00,0x00,0xf9,0x00,0x02,0x00, +0x77,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0x77,0x01,0x00,0x00, +0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0xc3,0x02,0x00,0x00, +0x3e,0x00,0x00,0x00,0x2b,0x01,0x00,0x00,0x21,0x02,0x00,0x00, +0x7a,0x01,0x00,0x00,0xb0,0x00,0x05,0x00,0xc1,0x00,0x00,0x00, +0x7d,0x01,0x00,0x00,0xc3,0x02,0x00,0x00,0x6c,0x00,0x00,0x00, +0xf6,0x00,0x04,0x00,0x79,0x01,0x00,0x00,0x7a,0x01,0x00,0x00, +0x00,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0x7d,0x01,0x00,0x00, +0x78,0x01,0x00,0x00,0x79,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, +0x78,0x01,0x00,0x00,0xf9,0x00,0x02,0x00,0x7f,0x01,0x00,0x00, +0xf8,0x00,0x02,0x00,0x7f,0x01,0x00,0x00,0xf5,0x00,0x07,0x00, +0x06,0x00,0x00,0x00,0xc7,0x02,0x00,0x00,0x3e,0x00,0x00,0x00, +0x78,0x01,0x00,0x00,0xab,0x01,0x00,0x00,0x82,0x01,0x00,0x00, +0xb0,0x00,0x05,0x00,0xc1,0x00,0x00,0x00,0x85,0x01,0x00,0x00, +0xc7,0x02,0x00,0x00,0x60,0x00,0x00,0x00,0xf6,0x00,0x04,0x00, +0x81,0x01,0x00,0x00,0x82,0x01,0x00,0x00,0x01,0x00,0x00,0x00, +0xfa,0x00,0x04,0x00,0x85,0x01,0x00,0x00,0x80,0x01,0x00,0x00, +0x81,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0x80,0x01,0x00,0x00, +0xf9,0x00,0x02,0x00,0x87,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, +0x87,0x01,0x00,0x00,0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00, +0xd9,0x02,0x00,0x00,0x3e,0x00,0x00,0x00,0x80,0x01,0x00,0x00, +0xa9,0x01,0x00,0x00,0x88,0x01,0x00,0x00,0xb0,0x00,0x05,0x00, +0xc1,0x00,0x00,0x00,0x8d,0x01,0x00,0x00,0xd9,0x02,0x00,0x00, +0x62,0x00,0x00,0x00,0xf6,0x00,0x04,0x00,0x89,0x01,0x00,0x00, +0x88,0x01,0x00,0x00,0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, +0x8d,0x01,0x00,0x00,0x88,0x01,0x00,0x00,0x89,0x01,0x00,0x00, +0xf8,0x00,0x02,0x00,0x88,0x01,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x93,0x01,0x00,0x00,0xc7,0x02,0x00,0x00, +0x62,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x95,0x01,0x00,0x00,0x93,0x01,0x00,0x00,0xd9,0x02,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x97,0x01,0x00,0x00, +0x55,0x00,0x00,0x00,0x53,0x00,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x99,0x01,0x00,0x00,0xc7,0x02,0x00,0x00, +0x61,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x9a,0x01,0x00,0x00,0x97,0x01,0x00,0x00,0x99,0x01,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x9c,0x01,0x00,0x00, +0x64,0x00,0x00,0x00,0x62,0x00,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x9d,0x01,0x00,0x00,0x9a,0x01,0x00,0x00, +0x9c,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x9f,0x01,0x00,0x00,0x9d,0x01,0x00,0x00,0xd9,0x02,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xa1,0x01,0x00,0x00, +0x9f,0x01,0x00,0x00,0xa0,0x01,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xa3,0x01,0x00,0x00,0xa1,0x01,0x00,0x00, +0xc3,0x02,0x00,0x00,0x41,0x00,0x05,0x00,0x15,0x01,0x00,0x00, +0xa4,0x01,0x00,0x00,0xfb,0x00,0x00,0x00,0xa3,0x01,0x00,0x00, +0x3d,0x00,0x04,0x00,0xf6,0x00,0x00,0x00,0xa5,0x01,0x00,0x00, +0xa4,0x01,0x00,0x00,0x41,0x00,0x05,0x00,0xa6,0x01,0x00,0x00, +0xa7,0x01,0x00,0x00,0x91,0x01,0x00,0x00,0x95,0x01,0x00,0x00, +0x3e,0x00,0x03,0x00,0xa7,0x01,0x00,0x00,0xa5,0x01,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xa9,0x01,0x00,0x00, +0xd9,0x02,0x00,0x00,0xcf,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, +0x87,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0x89,0x01,0x00,0x00, +0xf9,0x00,0x02,0x00,0x82,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, +0x82,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xab,0x01,0x00,0x00,0xc7,0x02,0x00,0x00,0xcf,0x00,0x00,0x00, +0xf9,0x00,0x02,0x00,0x7f,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, +0x81,0x01,0x00,0x00,0xf9,0x00,0x02,0x00,0xad,0x01,0x00,0x00, +0xf8,0x00,0x02,0x00,0xad,0x01,0x00,0x00,0xf5,0x00,0x07,0x00, +0x06,0x00,0x00,0x00,0xc8,0x02,0x00,0x00,0x3e,0x00,0x00,0x00, +0x81,0x01,0x00,0x00,0xd9,0x01,0x00,0x00,0xb0,0x01,0x00,0x00, +0xb0,0x00,0x05,0x00,0xc1,0x00,0x00,0x00,0xb3,0x01,0x00,0x00, +0xc8,0x02,0x00,0x00,0xbe,0x00,0x00,0x00,0xf6,0x00,0x04,0x00, +0xaf,0x01,0x00,0x00,0xb0,0x01,0x00,0x00,0x01,0x00,0x00,0x00, +0xfa,0x00,0x04,0x00,0xb3,0x01,0x00,0x00,0xae,0x01,0x00,0x00, +0xaf,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0xae,0x01,0x00,0x00, +0xf9,0x00,0x02,0x00,0xb5,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, +0xb5,0x01,0x00,0x00,0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00, +0xd6,0x02,0x00,0x00,0x3e,0x00,0x00,0x00,0xae,0x01,0x00,0x00, +0xd7,0x01,0x00,0x00,0xb6,0x01,0x00,0x00,0xb0,0x00,0x05,0x00, +0xc1,0x00,0x00,0x00,0xbb,0x01,0x00,0x00,0xd6,0x02,0x00,0x00, +0xbb,0x00,0x00,0x00,0xf6,0x00,0x04,0x00,0xb7,0x01,0x00,0x00, +0xb6,0x01,0x00,0x00,0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, +0xbb,0x01,0x00,0x00,0xb6,0x01,0x00,0x00,0xb7,0x01,0x00,0x00, +0xf8,0x00,0x02,0x00,0xb6,0x01,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xc1,0x01,0x00,0x00,0xc8,0x02,0x00,0x00, +0xbb,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xc3,0x01,0x00,0x00,0xc1,0x01,0x00,0x00,0xd6,0x02,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xc5,0x01,0x00,0x00, +0x59,0x00,0x00,0x00,0xb8,0x00,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xc8,0x01,0x00,0x00,0xc8,0x02,0x00,0x00, +0xc7,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xc9,0x01,0x00,0x00,0xc5,0x01,0x00,0x00,0xc8,0x01,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xcb,0x01,0x00,0x00, +0x68,0x00,0x00,0x00,0xbb,0x00,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xcc,0x01,0x00,0x00,0xc9,0x01,0x00,0x00, +0xcb,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xce,0x01,0x00,0x00,0xcc,0x01,0x00,0x00,0xd6,0x02,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xd0,0x01,0x00,0x00, +0xce,0x01,0x00,0x00,0xcf,0x01,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xd2,0x01,0x00,0x00,0xd0,0x01,0x00,0x00, +0xc3,0x02,0x00,0x00,0x41,0x00,0x05,0x00,0x15,0x01,0x00,0x00, +0xd3,0x01,0x00,0x00,0x47,0x01,0x00,0x00,0xd2,0x01,0x00,0x00, +0x3d,0x00,0x04,0x00,0xf6,0x00,0x00,0x00,0xd4,0x01,0x00,0x00, +0xd3,0x01,0x00,0x00,0x41,0x00,0x05,0x00,0xa6,0x01,0x00,0x00, +0xd5,0x01,0x00,0x00,0xbf,0x01,0x00,0x00,0xc3,0x01,0x00,0x00, +0x3e,0x00,0x03,0x00,0xd5,0x01,0x00,0x00,0xd4,0x01,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xd7,0x01,0x00,0x00, +0xd6,0x02,0x00,0x00,0xcf,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, +0xb5,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0xb7,0x01,0x00,0x00, +0xf9,0x00,0x02,0x00,0xb0,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, +0xb0,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xd9,0x01,0x00,0x00,0xc8,0x02,0x00,0x00,0xcf,0x00,0x00,0x00, +0xf9,0x00,0x02,0x00,0xad,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, +0xaf,0x01,0x00,0x00,0xf9,0x00,0x02,0x00,0xdb,0x01,0x00,0x00, +0xf8,0x00,0x02,0x00,0xdb,0x01,0x00,0x00,0xf5,0x00,0x07,0x00, +0x06,0x00,0x00,0x00,0xc9,0x02,0x00,0x00,0x3e,0x00,0x00,0x00, +0xaf,0x01,0x00,0x00,0x1f,0x02,0x00,0x00,0xde,0x01,0x00,0x00, +0xb0,0x00,0x05,0x00,0xc1,0x00,0x00,0x00,0xe1,0x01,0x00,0x00, +0xc9,0x02,0x00,0x00,0xbe,0x00,0x00,0x00,0xf6,0x00,0x04,0x00, +0xdd,0x01,0x00,0x00,0xde,0x01,0x00,0x00,0x01,0x00,0x00,0x00, +0xfa,0x00,0x04,0x00,0xe1,0x01,0x00,0x00,0xdc,0x01,0x00,0x00, +0xdd,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0xdc,0x01,0x00,0x00, +0xf9,0x00,0x02,0x00,0xe3,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, +0xe3,0x01,0x00,0x00,0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00, +0xcd,0x02,0x00,0x00,0x3e,0x00,0x00,0x00,0xdc,0x01,0x00,0x00, +0x1d,0x02,0x00,0x00,0xe6,0x01,0x00,0x00,0xb0,0x00,0x05,0x00, +0xc1,0x00,0x00,0x00,0xe9,0x01,0x00,0x00,0xcd,0x02,0x00,0x00, +0x60,0x00,0x00,0x00,0xf6,0x00,0x04,0x00,0xe5,0x01,0x00,0x00, +0xe6,0x01,0x00,0x00,0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, +0xe9,0x01,0x00,0x00,0xe4,0x01,0x00,0x00,0xe5,0x01,0x00,0x00, +0xf8,0x00,0x02,0x00,0xe4,0x01,0x00,0x00,0xf9,0x00,0x02,0x00, +0xeb,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0xeb,0x01,0x00,0x00, +0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0xcf,0x02,0x00,0x00, +0x3e,0x00,0x00,0x00,0xe4,0x01,0x00,0x00,0x1b,0x02,0x00,0x00, +0xee,0x01,0x00,0x00,0xb0,0x00,0x05,0x00,0xc1,0x00,0x00,0x00, +0xf1,0x01,0x00,0x00,0xcf,0x02,0x00,0x00,0xbb,0x00,0x00,0x00, +0xf6,0x00,0x04,0x00,0xed,0x01,0x00,0x00,0xee,0x01,0x00,0x00, +0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0xf1,0x01,0x00,0x00, +0xec,0x01,0x00,0x00,0xed,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, +0xec,0x01,0x00,0x00,0xf9,0x00,0x02,0x00,0xf3,0x01,0x00,0x00, +0xf8,0x00,0x02,0x00,0xf3,0x01,0x00,0x00,0xf5,0x00,0x07,0x00, +0x06,0x00,0x00,0x00,0xd1,0x02,0x00,0x00,0x3e,0x00,0x00,0x00, +0xec,0x01,0x00,0x00,0x19,0x02,0x00,0x00,0xf4,0x01,0x00,0x00, +0xb0,0x00,0x05,0x00,0xc1,0x00,0x00,0x00,0xf9,0x01,0x00,0x00, +0xd1,0x02,0x00,0x00,0x62,0x00,0x00,0x00,0xf6,0x00,0x04,0x00, +0xf5,0x01,0x00,0x00,0xf4,0x01,0x00,0x00,0x01,0x00,0x00,0x00, +0xfa,0x00,0x04,0x00,0xf9,0x01,0x00,0x00,0xf4,0x01,0x00,0x00, +0xf5,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0xf4,0x01,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xfb,0x01,0x00,0x00, +0xc9,0x02,0x00,0x00,0xbb,0x00,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xfd,0x01,0x00,0x00,0xfb,0x01,0x00,0x00, +0xcf,0x02,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xff,0x01,0x00,0x00,0xfd,0x01,0x00,0x00,0xfe,0x01,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x01,0x02,0x00,0x00, +0xcd,0x02,0x00,0x00,0x62,0x00,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x02,0x02,0x00,0x00,0xff,0x01,0x00,0x00, +0x01,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x04,0x02,0x00,0x00,0x02,0x02,0x00,0x00,0xd1,0x02,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x08,0x02,0x00,0x00, +0x01,0x02,0x00,0x00,0xd1,0x02,0x00,0x00,0x41,0x00,0x05,0x00, +0xa6,0x01,0x00,0x00,0x09,0x02,0x00,0x00,0x91,0x01,0x00,0x00, +0x08,0x02,0x00,0x00,0x3d,0x00,0x04,0x00,0xf6,0x00,0x00,0x00, +0x0a,0x02,0x00,0x00,0x09,0x02,0x00,0x00,0x73,0x00,0x04,0x00, +0xc3,0x00,0x00,0x00,0x0b,0x02,0x00,0x00,0x0a,0x02,0x00,0x00, +0x41,0x00,0x05,0x00,0xa6,0x01,0x00,0x00,0x10,0x02,0x00,0x00, +0xbf,0x01,0x00,0x00,0xfd,0x01,0x00,0x00,0x3d,0x00,0x04,0x00, +0xf6,0x00,0x00,0x00,0x11,0x02,0x00,0x00,0x10,0x02,0x00,0x00, +0x73,0x00,0x04,0x00,0xc3,0x00,0x00,0x00,0x12,0x02,0x00,0x00, +0x11,0x02,0x00,0x00,0x41,0x00,0x05,0x00,0xcc,0x00,0x00,0x00, +0x14,0x02,0x00,0x00,0xc9,0x00,0x00,0x00,0x04,0x02,0x00,0x00, +0x3d,0x00,0x04,0x00,0xc3,0x00,0x00,0x00,0x15,0x02,0x00,0x00, +0x14,0x02,0x00,0x00,0x0c,0x00,0x08,0x00,0xc3,0x00,0x00,0x00, +0x16,0x02,0x00,0x00,0x01,0x00,0x00,0x00,0x32,0x00,0x00,0x00, +0x0b,0x02,0x00,0x00,0x12,0x02,0x00,0x00,0x15,0x02,0x00,0x00, +0x3e,0x00,0x03,0x00,0x14,0x02,0x00,0x00,0x16,0x02,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x19,0x02,0x00,0x00, +0xd1,0x02,0x00,0x00,0xcf,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, +0xf3,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0xf5,0x01,0x00,0x00, +0xf9,0x00,0x02,0x00,0xee,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, +0xee,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x1b,0x02,0x00,0x00,0xcf,0x02,0x00,0x00,0xcf,0x00,0x00,0x00, +0xf9,0x00,0x02,0x00,0xeb,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, +0xed,0x01,0x00,0x00,0xf9,0x00,0x02,0x00,0xe6,0x01,0x00,0x00, +0xf8,0x00,0x02,0x00,0xe6,0x01,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x1d,0x02,0x00,0x00,0xcd,0x02,0x00,0x00, +0xcf,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0xe3,0x01,0x00,0x00, +0xf8,0x00,0x02,0x00,0xe5,0x01,0x00,0x00,0xf9,0x00,0x02,0x00, +0xde,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0xde,0x01,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x1f,0x02,0x00,0x00, +0xc9,0x02,0x00,0x00,0xcf,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, +0xdb,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0xdd,0x01,0x00,0x00, +0xf9,0x00,0x02,0x00,0x7a,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, +0x7a,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x21,0x02,0x00,0x00,0xc3,0x02,0x00,0x00,0xcf,0x00,0x00,0x00, +0xf9,0x00,0x02,0x00,0x77,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, +0x79,0x01,0x00,0x00,0xe0,0x00,0x04,0x00,0x0c,0x00,0x00,0x00, +0x0c,0x00,0x00,0x00,0x6f,0x01,0x00,0x00,0xf9,0x00,0x02,0x00, +0xd6,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0xd6,0x00,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x23,0x02,0x00,0x00, +0xa9,0x02,0x00,0x00,0x6c,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, +0xd3,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0xd5,0x00,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x28,0x02,0x00,0x00, +0x55,0x00,0x00,0x00,0x53,0x00,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x29,0x02,0x00,0x00,0x95,0x00,0x00,0x00, +0x28,0x02,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x2e,0x02,0x00,0x00,0x59,0x00,0x00,0x00,0xb8,0x00,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x2f,0x02,0x00,0x00, +0xa7,0x00,0x00,0x00,0x2e,0x02,0x00,0x00,0x41,0x00,0x05,0x00, +0x15,0x00,0x00,0x00,0x34,0x02,0x00,0x00,0x12,0x00,0x00,0x00, +0x33,0x02,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x35,0x02,0x00,0x00,0x34,0x02,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x36,0x02,0x00,0x00,0x0f,0x00,0x00,0x00, +0x35,0x02,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x3a,0x02,0x00,0x00,0x47,0x00,0x00,0x00,0x35,0x02,0x00,0x00, +0x41,0x00,0x05,0x00,0x0d,0x00,0x00,0x00,0x3c,0x02,0x00,0x00, +0x3b,0x02,0x00,0x00,0x0c,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x3d,0x02,0x00,0x00,0x3c,0x02,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x3e,0x02,0x00,0x00, +0x3a,0x02,0x00,0x00,0x3d,0x02,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x3f,0x02,0x00,0x00,0x36,0x02,0x00,0x00, +0x3e,0x02,0x00,0x00,0xf9,0x00,0x02,0x00,0x41,0x02,0x00,0x00, +0xf8,0x00,0x02,0x00,0x41,0x02,0x00,0x00,0xf5,0x00,0x07,0x00, +0x06,0x00,0x00,0x00,0xaa,0x02,0x00,0x00,0x3e,0x00,0x00,0x00, +0xd5,0x00,0x00,0x00,0xa7,0x02,0x00,0x00,0x44,0x02,0x00,0x00, +0xb0,0x00,0x05,0x00,0xc1,0x00,0x00,0x00,0x47,0x02,0x00,0x00, +0xaa,0x02,0x00,0x00,0xbe,0x00,0x00,0x00,0xf6,0x00,0x04,0x00, +0x43,0x02,0x00,0x00,0x44,0x02,0x00,0x00,0x01,0x00,0x00,0x00, +0xfa,0x00,0x04,0x00,0x47,0x02,0x00,0x00,0x42,0x02,0x00,0x00, +0x43,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x42,0x02,0x00,0x00, +0xf9,0x00,0x02,0x00,0x49,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, +0x49,0x02,0x00,0x00,0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00, +0xab,0x02,0x00,0x00,0x3e,0x00,0x00,0x00,0x42,0x02,0x00,0x00, +0xa5,0x02,0x00,0x00,0x4c,0x02,0x00,0x00,0xb0,0x00,0x05,0x00, +0xc1,0x00,0x00,0x00,0x4f,0x02,0x00,0x00,0xab,0x02,0x00,0x00, +0x60,0x00,0x00,0x00,0xf6,0x00,0x04,0x00,0x4b,0x02,0x00,0x00, +0x4c,0x02,0x00,0x00,0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, +0x4f,0x02,0x00,0x00,0x4a,0x02,0x00,0x00,0x4b,0x02,0x00,0x00, +0xf8,0x00,0x02,0x00,0x4a,0x02,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x53,0x02,0x00,0x00,0xab,0x02,0x00,0x00, +0x61,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x54,0x02,0x00,0x00,0x29,0x02,0x00,0x00,0x53,0x02,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x56,0x02,0x00,0x00, +0x64,0x00,0x00,0x00,0x62,0x00,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x57,0x02,0x00,0x00,0x54,0x02,0x00,0x00, +0x56,0x02,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x5b,0x02,0x00,0x00,0xaa,0x02,0x00,0x00,0xc7,0x01,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x5c,0x02,0x00,0x00, +0x2f,0x02,0x00,0x00,0x5b,0x02,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x5e,0x02,0x00,0x00,0x68,0x00,0x00,0x00, +0xbb,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x5f,0x02,0x00,0x00,0x5c,0x02,0x00,0x00,0x5e,0x02,0x00,0x00, +0xf9,0x00,0x02,0x00,0x61,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, +0x61,0x02,0x00,0x00,0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00, +0xad,0x02,0x00,0x00,0x3e,0x00,0x00,0x00,0x4a,0x02,0x00,0x00, +0xa3,0x02,0x00,0x00,0x64,0x02,0x00,0x00,0xb0,0x00,0x05,0x00, +0xc1,0x00,0x00,0x00,0x67,0x02,0x00,0x00,0xad,0x02,0x00,0x00, +0xbb,0x00,0x00,0x00,0xf6,0x00,0x04,0x00,0x63,0x02,0x00,0x00, +0x64,0x02,0x00,0x00,0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, +0x67,0x02,0x00,0x00,0x62,0x02,0x00,0x00,0x63,0x02,0x00,0x00, +0xf8,0x00,0x02,0x00,0x62,0x02,0x00,0x00,0xf9,0x00,0x02,0x00, +0x69,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x69,0x02,0x00,0x00, +0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0xaf,0x02,0x00,0x00, +0x3e,0x00,0x00,0x00,0x62,0x02,0x00,0x00,0xa1,0x02,0x00,0x00, +0x6c,0x02,0x00,0x00,0xb0,0x00,0x05,0x00,0xc1,0x00,0x00,0x00, +0x6f,0x02,0x00,0x00,0xaf,0x02,0x00,0x00,0x62,0x00,0x00,0x00, +0xf6,0x00,0x04,0x00,0x6b,0x02,0x00,0x00,0x6c,0x02,0x00,0x00, +0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0x6f,0x02,0x00,0x00, +0x6a,0x02,0x00,0x00,0x6b,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, +0x6a,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x72,0x02,0x00,0x00,0x57,0x02,0x00,0x00,0xaf,0x02,0x00,0x00, +0xb0,0x00,0x05,0x00,0xc1,0x00,0x00,0x00,0x75,0x02,0x00,0x00, +0x72,0x02,0x00,0x00,0x36,0x00,0x00,0x00,0xf7,0x00,0x03,0x00, +0x77,0x02,0x00,0x00,0x00,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, +0x75,0x02,0x00,0x00,0x76,0x02,0x00,0x00,0x77,0x02,0x00,0x00, +0xf8,0x00,0x02,0x00,0x76,0x02,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x7a,0x02,0x00,0x00,0x5f,0x02,0x00,0x00, +0xad,0x02,0x00,0x00,0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00, +0x7b,0x02,0x00,0x00,0x12,0x00,0x00,0x00,0xcf,0x00,0x00,0x00, +0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x7c,0x02,0x00,0x00, +0x7b,0x02,0x00,0x00,0xb0,0x00,0x05,0x00,0xc1,0x00,0x00,0x00, +0x7d,0x02,0x00,0x00,0x7a,0x02,0x00,0x00,0x7c,0x02,0x00,0x00, +0xf9,0x00,0x02,0x00,0x77,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, +0x77,0x02,0x00,0x00,0xf5,0x00,0x07,0x00,0xc1,0x00,0x00,0x00, +0x7e,0x02,0x00,0x00,0x75,0x02,0x00,0x00,0x6a,0x02,0x00,0x00, +0x7d,0x02,0x00,0x00,0x76,0x02,0x00,0x00,0xf7,0x00,0x03,0x00, +0x80,0x02,0x00,0x00,0x00,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, +0x7e,0x02,0x00,0x00,0x7f,0x02,0x00,0x00,0x80,0x02,0x00,0x00, +0xf8,0x00,0x02,0x00,0x7f,0x02,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x88,0x02,0x00,0x00,0x5f,0x02,0x00,0x00, +0xad,0x02,0x00,0x00,0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00, +0x8a,0x02,0x00,0x00,0x12,0x00,0x00,0x00,0x89,0x02,0x00,0x00, +0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x8b,0x02,0x00,0x00, +0x8a,0x02,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x8c,0x02,0x00,0x00,0x88,0x02,0x00,0x00,0x8b,0x02,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x8d,0x02,0x00,0x00, +0x3f,0x02,0x00,0x00,0x8c,0x02,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x8f,0x02,0x00,0x00,0x8d,0x02,0x00,0x00, +0x57,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x91,0x02,0x00,0x00,0x8f,0x02,0x00,0x00,0xaf,0x02,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x93,0x02,0x00,0x00, +0xaa,0x02,0x00,0x00,0xbb,0x00,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x95,0x02,0x00,0x00,0x93,0x02,0x00,0x00, +0xad,0x02,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x97,0x02,0x00,0x00,0x95,0x02,0x00,0x00,0x96,0x02,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x99,0x02,0x00,0x00, +0xab,0x02,0x00,0x00,0x62,0x00,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x9a,0x02,0x00,0x00,0x97,0x02,0x00,0x00, +0x99,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x9c,0x02,0x00,0x00,0x9a,0x02,0x00,0x00,0xaf,0x02,0x00,0x00, +0x41,0x00,0x05,0x00,0xcc,0x00,0x00,0x00,0x9d,0x02,0x00,0x00, +0xc9,0x00,0x00,0x00,0x9c,0x02,0x00,0x00,0x3d,0x00,0x04,0x00, +0xc3,0x00,0x00,0x00,0x9e,0x02,0x00,0x00,0x9d,0x02,0x00,0x00, +0x41,0x00,0x06,0x00,0x11,0x01,0x00,0x00,0x9f,0x02,0x00,0x00, +0x84,0x02,0x00,0x00,0x34,0x00,0x00,0x00,0x91,0x02,0x00,0x00, +0x3e,0x00,0x03,0x00,0x9f,0x02,0x00,0x00,0x9e,0x02,0x00,0x00, +0xf9,0x00,0x02,0x00,0x80,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, +0x80,0x02,0x00,0x00,0xf9,0x00,0x02,0x00,0x6c,0x02,0x00,0x00, +0xf8,0x00,0x02,0x00,0x6c,0x02,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xa1,0x02,0x00,0x00,0xaf,0x02,0x00,0x00, +0xcf,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0x69,0x02,0x00,0x00, +0xf8,0x00,0x02,0x00,0x6b,0x02,0x00,0x00,0xf9,0x00,0x02,0x00, +0x64,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x64,0x02,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xa3,0x02,0x00,0x00, +0xad,0x02,0x00,0x00,0xcf,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, +0x61,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x63,0x02,0x00,0x00, +0xf9,0x00,0x02,0x00,0x4c,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, +0x4c,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xa5,0x02,0x00,0x00,0xab,0x02,0x00,0x00,0xcf,0x00,0x00,0x00, +0xf9,0x00,0x02,0x00,0x49,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, +0x4b,0x02,0x00,0x00,0xf9,0x00,0x02,0x00,0x44,0x02,0x00,0x00, +0xf8,0x00,0x02,0x00,0x44,0x02,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xa7,0x02,0x00,0x00,0xaa,0x02,0x00,0x00, +0xcf,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0x41,0x02,0x00,0x00, +0xf8,0x00,0x02,0x00,0x43,0x02,0x00,0x00,0xfd,0x00,0x01,0x00, +0x38,0x00,0x01,0x00, +}; +const uint64_t matmul_f32_len = 10324; + +unsigned char matmul_f32_aligned_data[] = { +0x03,0x02,0x23,0x07,0x00,0x05,0x01,0x00,0x0b,0x00,0x0d,0x00, +0x18,0x03,0x00,0x00,0x00,0x00,0x00,0x00,0x11,0x00,0x02,0x00, +0x01,0x00,0x00,0x00,0x11,0x00,0x02,0x00,0x09,0x00,0x00,0x00, +0x0b,0x00,0x06,0x00,0x01,0x00,0x00,0x00,0x47,0x4c,0x53,0x4c, +0x2e,0x73,0x74,0x64,0x2e,0x34,0x35,0x30,0x00,0x00,0x00,0x00, +0x0e,0x00,0x03,0x00,0x00,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0x0f,0x00,0x0f,0x00,0x05,0x00,0x00,0x00,0x04,0x00,0x00,0x00, +0x6d,0x61,0x69,0x6e,0x00,0x00,0x00,0x00,0x0b,0x00,0x00,0x00, +0x12,0x00,0x00,0x00,0x3d,0x00,0x00,0x00,0x4c,0x00,0x00,0x00, +0xfe,0x00,0x00,0x00,0x05,0x01,0x00,0x00,0x6b,0x01,0x00,0x00, +0x71,0x01,0x00,0x00,0x79,0x02,0x00,0x00,0xc2,0x02,0x00,0x00, +0x10,0x00,0x06,0x00,0x04,0x00,0x00,0x00,0x11,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0x47,0x00,0x04,0x00,0x0b,0x00,0x00,0x00,0x0b,0x00,0x00,0x00, +0x1c,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00, +0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0x23,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x48,0x00,0x05,0x00, +0x10,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x23,0x00,0x00,0x00, +0x08,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00, +0x03,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, +0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00,0x04,0x00,0x00,0x00, +0x23,0x00,0x00,0x00,0x10,0x00,0x00,0x00,0x48,0x00,0x05,0x00, +0x10,0x00,0x00,0x00,0x05,0x00,0x00,0x00,0x23,0x00,0x00,0x00, +0x14,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00, +0x06,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x18,0x00,0x00,0x00, +0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00,0x07,0x00,0x00,0x00, +0x23,0x00,0x00,0x00,0x1c,0x00,0x00,0x00,0x48,0x00,0x05,0x00, +0x10,0x00,0x00,0x00,0x08,0x00,0x00,0x00,0x23,0x00,0x00,0x00, +0x20,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00, +0x09,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x24,0x00,0x00,0x00, +0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00,0x0a,0x00,0x00,0x00, +0x23,0x00,0x00,0x00,0x28,0x00,0x00,0x00,0x48,0x00,0x05,0x00, +0x10,0x00,0x00,0x00,0x0b,0x00,0x00,0x00,0x23,0x00,0x00,0x00, +0x2c,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00, +0x0c,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x30,0x00,0x00,0x00, +0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00,0x0d,0x00,0x00,0x00, +0x23,0x00,0x00,0x00,0x34,0x00,0x00,0x00,0x47,0x00,0x03,0x00, +0x10,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00, +0x37,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0x47,0x00,0x04,0x00,0x3d,0x00,0x00,0x00,0x0b,0x00,0x00,0x00, +0x1a,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x4c,0x00,0x00,0x00, +0x0b,0x00,0x00,0x00,0x1b,0x00,0x00,0x00,0x47,0x00,0x04,0x00, +0x4f,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x09,0x00,0x00,0x00, +0x47,0x00,0x04,0x00,0x53,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0x04,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x60,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x47,0x00,0x04,0x00, +0x62,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x07,0x00,0x00,0x00, +0x47,0x00,0x04,0x00,0x6c,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0x03,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0xa7,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00, +0xb9,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x05,0x00,0x00,0x00, +0x47,0x00,0x04,0x00,0xbc,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0x08,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x02,0x01,0x00,0x00, +0x06,0x00,0x00,0x00,0x20,0x00,0x00,0x00,0x48,0x00,0x04,0x00, +0x03,0x01,0x00,0x00,0x00,0x00,0x00,0x00,0x05,0x00,0x00,0x00, +0x48,0x00,0x04,0x00,0x03,0x01,0x00,0x00,0x00,0x00,0x00,0x00, +0x18,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x03,0x01,0x00,0x00, +0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0x48,0x00,0x05,0x00,0x03,0x01,0x00,0x00,0x00,0x00,0x00,0x00, +0x07,0x00,0x00,0x00,0x10,0x00,0x00,0x00,0x47,0x00,0x03,0x00, +0x03,0x01,0x00,0x00,0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00, +0x05,0x01,0x00,0x00,0x22,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0x47,0x00,0x04,0x00,0x05,0x01,0x00,0x00,0x21,0x00,0x00,0x00, +0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x43,0x01,0x00,0x00, +0x01,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00, +0x44,0x01,0x00,0x00,0x0b,0x00,0x00,0x00,0x19,0x00,0x00,0x00, +0x47,0x00,0x04,0x00,0x6e,0x01,0x00,0x00,0x06,0x00,0x00,0x00, +0x20,0x00,0x00,0x00,0x48,0x00,0x04,0x00,0x6f,0x01,0x00,0x00, +0x00,0x00,0x00,0x00,0x05,0x00,0x00,0x00,0x48,0x00,0x04,0x00, +0x6f,0x01,0x00,0x00,0x00,0x00,0x00,0x00,0x18,0x00,0x00,0x00, +0x48,0x00,0x05,0x00,0x6f,0x01,0x00,0x00,0x00,0x00,0x00,0x00, +0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x48,0x00,0x05,0x00, +0x6f,0x01,0x00,0x00,0x00,0x00,0x00,0x00,0x07,0x00,0x00,0x00, +0x10,0x00,0x00,0x00,0x47,0x00,0x03,0x00,0x6f,0x01,0x00,0x00, +0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x71,0x01,0x00,0x00, +0x22,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00, +0x71,0x01,0x00,0x00,0x21,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0x47,0x00,0x04,0x00,0x79,0x02,0x00,0x00,0x0b,0x00,0x00,0x00, +0x18,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0xbf,0x02,0x00,0x00, +0x06,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x48,0x00,0x04,0x00, +0xc0,0x02,0x00,0x00,0x00,0x00,0x00,0x00,0x19,0x00,0x00,0x00, +0x48,0x00,0x05,0x00,0xc0,0x02,0x00,0x00,0x00,0x00,0x00,0x00, +0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x03,0x00, +0xc0,0x02,0x00,0x00,0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00, +0xc2,0x02,0x00,0x00,0x22,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0x47,0x00,0x04,0x00,0xc2,0x02,0x00,0x00,0x21,0x00,0x00,0x00, 0x02,0x00,0x00,0x00,0x13,0x00,0x02,0x00,0x02,0x00,0x00,0x00, 0x21,0x00,0x03,0x00,0x03,0x00,0x00,0x00,0x02,0x00,0x00,0x00, 0x15,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x20,0x00,0x00,0x00, @@ -32306,169 +17388,151 @@ unsigned char matmul_f32_aligned_l_data[] = { 0x86,0x00,0x00,0x00,0x6c,0x00,0x00,0x00,0x6d,0x00,0x00,0x00, 0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x73,0x00,0x00,0x00, 0x86,0x00,0x00,0x00,0x6c,0x00,0x00,0x00,0x6d,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00,0x77,0x00,0x00,0x00, +0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x78,0x00,0x00,0x00, +0x86,0x00,0x00,0x00,0x6c,0x00,0x00,0x00,0x6d,0x00,0x00,0x00, +0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x7d,0x00,0x00,0x00, +0x86,0x00,0x00,0x00,0x6c,0x00,0x00,0x00,0x6d,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00,0x81,0x00,0x00,0x00, 0x06,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00, -0x7c,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x13,0x00,0x00,0x00,0x87,0x00,0x00,0x00,0x0b,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00,0x8d,0x00,0x00,0x00, +0x86,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x13,0x00,0x00,0x00,0x91,0x00,0x00,0x00,0x0b,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00,0x97,0x00,0x00,0x00, 0x03,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00, -0x98,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x32,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x9d,0x00,0x00,0x00,0x40,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00,0x9f,0x00,0x00,0x00, +0xa2,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x32,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0xa7,0x00,0x00,0x00,0x40,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00,0xa9,0x00,0x00,0x00, 0x04,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, -0xae,0x00,0x00,0x00,0x84,0x00,0x00,0x00,0x60,0x00,0x00,0x00, +0xb8,0x00,0x00,0x00,0x84,0x00,0x00,0x00,0x60,0x00,0x00,0x00, 0x62,0x00,0x00,0x00,0x32,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0xaf,0x00,0x00,0x00,0x20,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0xb0,0x00,0x00,0x00,0x84,0x00,0x00,0x00, -0x53,0x00,0x00,0x00,0xaf,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0xb1,0x00,0x00,0x00,0x84,0x00,0x00,0x00, -0x4f,0x00,0x00,0x00,0x62,0x00,0x00,0x00,0x32,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0xb2,0x00,0x00,0x00,0x02,0x00,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0xb3,0x00,0x00,0x00, -0x84,0x00,0x00,0x00,0xb1,0x00,0x00,0x00,0xb2,0x00,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0xb4,0x00,0x00,0x00, -0x84,0x00,0x00,0x00,0xb3,0x00,0x00,0x00,0x60,0x00,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0xb5,0x00,0x00,0x00, -0x86,0x00,0x00,0x00,0xb0,0x00,0x00,0x00,0xb4,0x00,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0xb6,0x00,0x00,0x00, -0x84,0x00,0x00,0x00,0xae,0x00,0x00,0x00,0xb5,0x00,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0xb7,0x00,0x00,0x00, -0x84,0x00,0x00,0x00,0xb6,0x00,0x00,0x00,0xb2,0x00,0x00,0x00, -0x14,0x00,0x02,0x00,0xb8,0x00,0x00,0x00,0x16,0x00,0x03,0x00, -0xba,0x00,0x00,0x00,0x20,0x00,0x00,0x00,0x34,0x00,0x06,0x00, +0xb9,0x00,0x00,0x00,0x20,0x00,0x00,0x00,0x34,0x00,0x06,0x00, +0x06,0x00,0x00,0x00,0xba,0x00,0x00,0x00,0x84,0x00,0x00,0x00, +0x53,0x00,0x00,0x00,0xb9,0x00,0x00,0x00,0x34,0x00,0x06,0x00, 0x06,0x00,0x00,0x00,0xbb,0x00,0x00,0x00,0x84,0x00,0x00,0x00, +0x4f,0x00,0x00,0x00,0x62,0x00,0x00,0x00,0x32,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0xbc,0x00,0x00,0x00,0x02,0x00,0x00,0x00, +0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0xbd,0x00,0x00,0x00, +0x84,0x00,0x00,0x00,0xbb,0x00,0x00,0x00,0xbc,0x00,0x00,0x00, +0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0xbe,0x00,0x00,0x00, +0x84,0x00,0x00,0x00,0xbd,0x00,0x00,0x00,0x60,0x00,0x00,0x00, +0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0xbf,0x00,0x00,0x00, +0x86,0x00,0x00,0x00,0xba,0x00,0x00,0x00,0xbe,0x00,0x00,0x00, +0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0xc0,0x00,0x00,0x00, +0x84,0x00,0x00,0x00,0xb8,0x00,0x00,0x00,0xbf,0x00,0x00,0x00, +0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0xc1,0x00,0x00,0x00, +0x84,0x00,0x00,0x00,0xc0,0x00,0x00,0x00,0xbc,0x00,0x00,0x00, +0x14,0x00,0x02,0x00,0xc2,0x00,0x00,0x00,0x16,0x00,0x03,0x00, +0xc4,0x00,0x00,0x00,0x20,0x00,0x00,0x00,0x34,0x00,0x06,0x00, +0x06,0x00,0x00,0x00,0xc5,0x00,0x00,0x00,0x84,0x00,0x00,0x00, 0x60,0x00,0x00,0x00,0x62,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0xbc,0x00,0x00,0x00,0x84,0x00,0x00,0x00, -0xbb,0x00,0x00,0x00,0xb5,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0xbd,0x00,0x00,0x00,0x84,0x00,0x00,0x00, -0xbc,0x00,0x00,0x00,0xb2,0x00,0x00,0x00,0x1c,0x00,0x04,0x00, -0xbe,0x00,0x00,0x00,0xba,0x00,0x00,0x00,0xbd,0x00,0x00,0x00, -0x20,0x00,0x04,0x00,0xbf,0x00,0x00,0x00,0x07,0x00,0x00,0x00, -0xbe,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0xba,0x00,0x00,0x00, -0xc2,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0xc3,0x00,0x00,0x00,0x07,0x00,0x00,0x00,0xba,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00,0xc6,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x16,0x00,0x03,0x00,0xe6,0x00,0x00,0x00, +0x06,0x00,0x00,0x00,0xc6,0x00,0x00,0x00,0x84,0x00,0x00,0x00, +0xc5,0x00,0x00,0x00,0xbf,0x00,0x00,0x00,0x34,0x00,0x06,0x00, +0x06,0x00,0x00,0x00,0xc7,0x00,0x00,0x00,0x84,0x00,0x00,0x00, +0xc6,0x00,0x00,0x00,0xbc,0x00,0x00,0x00,0x1c,0x00,0x04,0x00, +0xc8,0x00,0x00,0x00,0xc4,0x00,0x00,0x00,0xc7,0x00,0x00,0x00, +0x20,0x00,0x04,0x00,0xc9,0x00,0x00,0x00,0x07,0x00,0x00,0x00, +0xc8,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0xc4,0x00,0x00,0x00, +0xcc,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x20,0x00,0x04,0x00, +0xcd,0x00,0x00,0x00,0x07,0x00,0x00,0x00,0xc4,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00,0xd0,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, +0xf4,0x00,0x00,0x00,0x80,0x00,0x00,0x00,0x6c,0x00,0x00,0x00, +0x39,0x00,0x00,0x00,0x16,0x00,0x03,0x00,0xf9,0x00,0x00,0x00, 0x10,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, -0xe7,0x00,0x00,0x00,0x80,0x00,0x00,0x00,0x6c,0x00,0x00,0x00, +0xfa,0x00,0x00,0x00,0x80,0x00,0x00,0x00,0x6c,0x00,0x00,0x00, 0x39,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, -0xe8,0x00,0x00,0x00,0x84,0x00,0x00,0x00,0x37,0x00,0x00,0x00, -0xe7,0x00,0x00,0x00,0x1c,0x00,0x04,0x00,0xe9,0x00,0x00,0x00, -0xe6,0x00,0x00,0x00,0xe8,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0xea,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0xe9,0x00,0x00,0x00, -0x3b,0x00,0x04,0x00,0xea,0x00,0x00,0x00,0xeb,0x00,0x00,0x00, -0x04,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, -0xef,0x00,0x00,0x00,0x80,0x00,0x00,0x00,0x6c,0x00,0x00,0x00, -0x39,0x00,0x00,0x00,0x17,0x00,0x04,0x00,0xf5,0x00,0x00,0x00, -0xba,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x18,0x00,0x04,0x00, -0xf6,0x00,0x00,0x00,0xf5,0x00,0x00,0x00,0x02,0x00,0x00,0x00, -0x1d,0x00,0x03,0x00,0xf7,0x00,0x00,0x00,0xf6,0x00,0x00,0x00, -0x1e,0x00,0x03,0x00,0xf8,0x00,0x00,0x00,0xf7,0x00,0x00,0x00, -0x20,0x00,0x04,0x00,0xf9,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0xf8,0x00,0x00,0x00,0x3b,0x00,0x04,0x00,0xf9,0x00,0x00,0x00, -0xfa,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0xfc,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0xba,0x00,0x00,0x00, -0x20,0x00,0x04,0x00,0x00,0x01,0x00,0x00,0x04,0x00,0x00,0x00, -0xe6,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, -0x05,0x01,0x00,0x00,0x80,0x00,0x00,0x00,0x6c,0x00,0x00,0x00, -0x39,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, -0x13,0x01,0x00,0x00,0x80,0x00,0x00,0x00,0x6c,0x00,0x00,0x00, -0x39,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, -0x21,0x01,0x00,0x00,0x80,0x00,0x00,0x00,0x6c,0x00,0x00,0x00, -0x39,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x26,0x01,0x00,0x00,0x03,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0x30,0x01,0x00,0x00,0x80,0x00,0x00,0x00, -0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x35,0x01,0x00,0x00,0x04,0x00,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x3f,0x01,0x00,0x00, -0x80,0x00,0x00,0x00,0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x44,0x01,0x00,0x00, -0x05,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, -0x4e,0x01,0x00,0x00,0x80,0x00,0x00,0x00,0x6c,0x00,0x00,0x00, -0x39,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x53,0x01,0x00,0x00,0x06,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0x5d,0x01,0x00,0x00,0x80,0x00,0x00,0x00, -0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x62,0x01,0x00,0x00,0x07,0x00,0x00,0x00, -0x32,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x69,0x01,0x00,0x00, +0xfb,0x00,0x00,0x00,0x84,0x00,0x00,0x00,0x37,0x00,0x00,0x00, +0xfa,0x00,0x00,0x00,0x1c,0x00,0x04,0x00,0xfc,0x00,0x00,0x00, +0xf9,0x00,0x00,0x00,0xfb,0x00,0x00,0x00,0x20,0x00,0x04,0x00, +0xfd,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0xfc,0x00,0x00,0x00, +0x3b,0x00,0x04,0x00,0xfd,0x00,0x00,0x00,0xfe,0x00,0x00,0x00, +0x04,0x00,0x00,0x00,0x17,0x00,0x04,0x00,0x00,0x01,0x00,0x00, +0xc4,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x18,0x00,0x04,0x00, +0x01,0x01,0x00,0x00,0x00,0x01,0x00,0x00,0x02,0x00,0x00,0x00, +0x1d,0x00,0x03,0x00,0x02,0x01,0x00,0x00,0x01,0x01,0x00,0x00, +0x1e,0x00,0x03,0x00,0x03,0x01,0x00,0x00,0x02,0x01,0x00,0x00, +0x20,0x00,0x04,0x00,0x04,0x01,0x00,0x00,0x0c,0x00,0x00,0x00, +0x03,0x01,0x00,0x00,0x3b,0x00,0x04,0x00,0x04,0x01,0x00,0x00, +0x05,0x01,0x00,0x00,0x0c,0x00,0x00,0x00,0x20,0x00,0x04,0x00, +0x07,0x01,0x00,0x00,0x0c,0x00,0x00,0x00,0xc4,0x00,0x00,0x00, +0x20,0x00,0x04,0x00,0x0b,0x01,0x00,0x00,0x04,0x00,0x00,0x00, +0xf9,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x1c,0x01,0x00,0x00,0x03,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x24,0x01,0x00,0x00,0x04,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x2c,0x01,0x00,0x00, +0x05,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x34,0x01,0x00,0x00,0x06,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x3c,0x01,0x00,0x00,0x07,0x00,0x00,0x00, +0x32,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x43,0x01,0x00,0x00, 0x01,0x00,0x00,0x00,0x33,0x00,0x06,0x00,0x09,0x00,0x00,0x00, -0x6a,0x01,0x00,0x00,0x69,0x01,0x00,0x00,0x39,0x00,0x00,0x00, +0x44,0x01,0x00,0x00,0x43,0x01,0x00,0x00,0x39,0x00,0x00,0x00, 0x39,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, -0x6b,0x01,0x00,0x00,0x51,0x00,0x00,0x00,0x6a,0x01,0x00,0x00, +0x45,0x01,0x00,0x00,0x51,0x00,0x00,0x00,0x44,0x01,0x00,0x00, 0x00,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, -0x6c,0x01,0x00,0x00,0x84,0x00,0x00,0x00,0x6b,0x01,0x00,0x00, +0x46,0x01,0x00,0x00,0x84,0x00,0x00,0x00,0x45,0x01,0x00,0x00, 0x6d,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, -0x6d,0x01,0x00,0x00,0x86,0x00,0x00,0x00,0x6c,0x01,0x00,0x00, +0x47,0x01,0x00,0x00,0x86,0x00,0x00,0x00,0x46,0x01,0x00,0x00, 0x6c,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, -0x84,0x01,0x00,0x00,0x80,0x00,0x00,0x00,0x6c,0x00,0x00,0x00, +0x62,0x01,0x00,0x00,0x80,0x00,0x00,0x00,0x6c,0x00,0x00,0x00, 0x39,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, -0x85,0x01,0x00,0x00,0x84,0x00,0x00,0x00,0x9d,0x00,0x00,0x00, -0x84,0x01,0x00,0x00,0x1c,0x00,0x04,0x00,0x86,0x01,0x00,0x00, -0xe6,0x00,0x00,0x00,0x85,0x01,0x00,0x00,0x20,0x00,0x04,0x00, -0x87,0x01,0x00,0x00,0x04,0x00,0x00,0x00,0x86,0x01,0x00,0x00, -0x3b,0x00,0x04,0x00,0x87,0x01,0x00,0x00,0x88,0x01,0x00,0x00, -0x04,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, -0x8c,0x01,0x00,0x00,0x80,0x00,0x00,0x00,0x6c,0x00,0x00,0x00, -0x39,0x00,0x00,0x00,0x1d,0x00,0x03,0x00,0x92,0x01,0x00,0x00, -0xf6,0x00,0x00,0x00,0x1e,0x00,0x03,0x00,0x93,0x01,0x00,0x00, -0x92,0x01,0x00,0x00,0x20,0x00,0x04,0x00,0x94,0x01,0x00,0x00, -0x0c,0x00,0x00,0x00,0x93,0x01,0x00,0x00,0x3b,0x00,0x04,0x00, -0x94,0x01,0x00,0x00,0x95,0x01,0x00,0x00,0x0c,0x00,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x9e,0x01,0x00,0x00, -0x80,0x00,0x00,0x00,0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0xac,0x01,0x00,0x00, -0x80,0x00,0x00,0x00,0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0xba,0x01,0x00,0x00, -0x80,0x00,0x00,0x00,0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0xc8,0x01,0x00,0x00, -0x80,0x00,0x00,0x00,0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0xd6,0x01,0x00,0x00, -0x80,0x00,0x00,0x00,0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0xe4,0x01,0x00,0x00, -0x80,0x00,0x00,0x00,0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0xf2,0x01,0x00,0x00, -0x80,0x00,0x00,0x00,0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0xff,0x01,0x00,0x00, +0x67,0x01,0x00,0x00,0x80,0x00,0x00,0x00,0x6c,0x00,0x00,0x00, +0x39,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, +0x68,0x01,0x00,0x00,0x84,0x00,0x00,0x00,0xa7,0x00,0x00,0x00, +0x67,0x01,0x00,0x00,0x1c,0x00,0x04,0x00,0x69,0x01,0x00,0x00, +0xf9,0x00,0x00,0x00,0x68,0x01,0x00,0x00,0x20,0x00,0x04,0x00, +0x6a,0x01,0x00,0x00,0x04,0x00,0x00,0x00,0x69,0x01,0x00,0x00, +0x3b,0x00,0x04,0x00,0x6a,0x01,0x00,0x00,0x6b,0x01,0x00,0x00, +0x04,0x00,0x00,0x00,0x1d,0x00,0x03,0x00,0x6e,0x01,0x00,0x00, +0x01,0x01,0x00,0x00,0x1e,0x00,0x03,0x00,0x6f,0x01,0x00,0x00, +0x6e,0x01,0x00,0x00,0x20,0x00,0x04,0x00,0x70,0x01,0x00,0x00, +0x0c,0x00,0x00,0x00,0x6f,0x01,0x00,0x00,0x3b,0x00,0x04,0x00, +0x70,0x01,0x00,0x00,0x71,0x01,0x00,0x00,0x0c,0x00,0x00,0x00, +0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0xa8,0x01,0x00,0x00, +0x51,0x00,0x00,0x00,0x44,0x01,0x00,0x00,0x00,0x00,0x00,0x00, +0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0xa9,0x01,0x00,0x00, +0x84,0x00,0x00,0x00,0xa8,0x01,0x00,0x00,0x6d,0x00,0x00,0x00, +0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0xaa,0x01,0x00,0x00, +0x86,0x00,0x00,0x00,0xa9,0x01,0x00,0x00,0x6c,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0xad,0x01,0x00,0x00, 0x08,0x01,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, -0x00,0x02,0x00,0x00,0x86,0x00,0x00,0x00,0x6c,0x00,0x00,0x00, +0xae,0x01,0x00,0x00,0x86,0x00,0x00,0x00,0x6c,0x00,0x00,0x00, 0x6d,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, -0x03,0x02,0x00,0x00,0x86,0x00,0x00,0x00,0x6c,0x00,0x00,0x00, +0xb1,0x01,0x00,0x00,0x86,0x00,0x00,0x00,0x6c,0x00,0x00,0x00, 0x6d,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, -0x1e,0x02,0x00,0x00,0x84,0x00,0x00,0x00,0x60,0x00,0x00,0x00, -0x62,0x00,0x00,0x00,0x1c,0x00,0x04,0x00,0x1f,0x02,0x00,0x00, -0xe6,0x00,0x00,0x00,0x1e,0x02,0x00,0x00,0x20,0x00,0x04,0x00, -0x20,0x02,0x00,0x00,0x07,0x00,0x00,0x00,0x1f,0x02,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x30,0x02,0x00,0x00, +0xcc,0x01,0x00,0x00,0x84,0x00,0x00,0x00,0x60,0x00,0x00,0x00, +0x62,0x00,0x00,0x00,0x1c,0x00,0x04,0x00,0xcd,0x01,0x00,0x00, +0xf9,0x00,0x00,0x00,0xcc,0x01,0x00,0x00,0x20,0x00,0x04,0x00, +0xce,0x01,0x00,0x00,0x07,0x00,0x00,0x00,0xcd,0x01,0x00,0x00, +0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0xde,0x01,0x00,0x00, 0x80,0x00,0x00,0x00,0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00, -0x20,0x00,0x04,0x00,0x36,0x02,0x00,0x00,0x07,0x00,0x00,0x00, -0xe6,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, -0x4c,0x02,0x00,0x00,0x84,0x00,0x00,0x00,0xb5,0x00,0x00,0x00, -0xb2,0x00,0x00,0x00,0x1c,0x00,0x04,0x00,0x4d,0x02,0x00,0x00, -0xe6,0x00,0x00,0x00,0x4c,0x02,0x00,0x00,0x20,0x00,0x04,0x00, -0x4e,0x02,0x00,0x00,0x07,0x00,0x00,0x00,0x4d,0x02,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x57,0x02,0x00,0x00, -0x86,0x00,0x00,0x00,0xaf,0x00,0x00,0x00,0xb5,0x00,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x5f,0x02,0x00,0x00, +0x20,0x00,0x04,0x00,0xe4,0x01,0x00,0x00,0x07,0x00,0x00,0x00, +0xf9,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, +0xfa,0x01,0x00,0x00,0x84,0x00,0x00,0x00,0xbf,0x00,0x00,0x00, +0xbc,0x00,0x00,0x00,0x1c,0x00,0x04,0x00,0xfb,0x01,0x00,0x00, +0xf9,0x00,0x00,0x00,0xfa,0x01,0x00,0x00,0x20,0x00,0x04,0x00, +0xfc,0x01,0x00,0x00,0x07,0x00,0x00,0x00,0xfb,0x01,0x00,0x00, +0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x05,0x02,0x00,0x00, +0x86,0x00,0x00,0x00,0xb9,0x00,0x00,0x00,0xbf,0x00,0x00,0x00, +0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x0d,0x02,0x00,0x00, 0x80,0x00,0x00,0x00,0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x8e,0x02,0x00,0x00, +0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x3c,0x02,0x00,0x00, 0x84,0x00,0x00,0x00,0x60,0x00,0x00,0x00,0x62,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00,0xc3,0x02,0x00,0x00, +0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00,0x71,0x02,0x00,0x00, 0x0d,0x00,0x00,0x00,0x3b,0x00,0x04,0x00,0x0a,0x00,0x00,0x00, -0xcb,0x02,0x00,0x00,0x01,0x00,0x00,0x00,0x1d,0x00,0x03,0x00, -0x11,0x03,0x00,0x00,0xba,0x00,0x00,0x00,0x1e,0x00,0x03,0x00, -0x12,0x03,0x00,0x00,0x11,0x03,0x00,0x00,0x20,0x00,0x04,0x00, -0x13,0x03,0x00,0x00,0x0c,0x00,0x00,0x00,0x12,0x03,0x00,0x00, -0x3b,0x00,0x04,0x00,0x13,0x03,0x00,0x00,0x14,0x03,0x00,0x00, +0x79,0x02,0x00,0x00,0x01,0x00,0x00,0x00,0x1d,0x00,0x03,0x00, +0xbf,0x02,0x00,0x00,0xc4,0x00,0x00,0x00,0x1e,0x00,0x03,0x00, +0xc0,0x02,0x00,0x00,0xbf,0x02,0x00,0x00,0x20,0x00,0x04,0x00, +0xc1,0x02,0x00,0x00,0x0c,0x00,0x00,0x00,0xc0,0x02,0x00,0x00, +0x3b,0x00,0x04,0x00,0xc1,0x02,0x00,0x00,0xc2,0x02,0x00,0x00, 0x0c,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00, -0x19,0x03,0x00,0x00,0x05,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0x26,0x03,0x00,0x00,0x84,0x00,0x00,0x00, +0xc7,0x02,0x00,0x00,0x05,0x00,0x00,0x00,0x34,0x00,0x06,0x00, +0x06,0x00,0x00,0x00,0xd4,0x02,0x00,0x00,0x84,0x00,0x00,0x00, 0x60,0x00,0x00,0x00,0x62,0x00,0x00,0x00,0x36,0x00,0x05,0x00, 0x02,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x00,0x00,0x00,0x00, 0x03,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0x05,0x00,0x00,0x00, -0x3b,0x00,0x04,0x00,0xbf,0x00,0x00,0x00,0xc0,0x00,0x00,0x00, -0x07,0x00,0x00,0x00,0x3b,0x00,0x04,0x00,0x20,0x02,0x00,0x00, -0x21,0x02,0x00,0x00,0x07,0x00,0x00,0x00,0x3b,0x00,0x04,0x00, -0x4e,0x02,0x00,0x00,0x4f,0x02,0x00,0x00,0x07,0x00,0x00,0x00, +0x3b,0x00,0x04,0x00,0xc9,0x00,0x00,0x00,0xca,0x00,0x00,0x00, +0x07,0x00,0x00,0x00,0x3b,0x00,0x04,0x00,0xce,0x01,0x00,0x00, +0xcf,0x01,0x00,0x00,0x07,0x00,0x00,0x00,0x3b,0x00,0x04,0x00, +0xfc,0x01,0x00,0x00,0xfd,0x01,0x00,0x00,0x07,0x00,0x00,0x00, 0x41,0x00,0x05,0x00,0x0d,0x00,0x00,0x00,0x0e,0x00,0x00,0x00, 0x0b,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, 0x06,0x00,0x00,0x00,0x0f,0x00,0x00,0x00,0x0e,0x00,0x00,0x00, @@ -32527,672 +17591,628 @@ unsigned char matmul_f32_aligned_l_data[] = { 0x67,0x00,0x00,0x00,0x89,0x00,0x05,0x00,0x06,0x00,0x00,0x00, 0x6f,0x00,0x00,0x00,0x4e,0x00,0x00,0x00,0x6e,0x00,0x00,0x00, 0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x74,0x00,0x00,0x00, -0x4e,0x00,0x00,0x00,0x73,0x00,0x00,0x00,0x41,0x00,0x05,0x00, -0x15,0x00,0x00,0x00,0x78,0x00,0x00,0x00,0x12,0x00,0x00,0x00, -0x77,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x79,0x00,0x00,0x00,0x78,0x00,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x7a,0x00,0x00,0x00,0x47,0x00,0x00,0x00, -0x79,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00, -0x7d,0x00,0x00,0x00,0x12,0x00,0x00,0x00,0x7c,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x7e,0x00,0x00,0x00, -0x7d,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x80,0x00,0x00,0x00,0x47,0x00,0x00,0x00,0x39,0x00,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x83,0x00,0x00,0x00, -0x80,0x00,0x00,0x00,0x79,0x00,0x00,0x00,0x0c,0x00,0x07,0x00, -0x06,0x00,0x00,0x00,0x84,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x26,0x00,0x00,0x00,0x7e,0x00,0x00,0x00,0x83,0x00,0x00,0x00, -0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00,0x88,0x00,0x00,0x00, -0x12,0x00,0x00,0x00,0x87,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x89,0x00,0x00,0x00,0x88,0x00,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x8a,0x00,0x00,0x00, -0x32,0x00,0x00,0x00,0x89,0x00,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x8c,0x00,0x00,0x00,0x42,0x00,0x00,0x00, -0x37,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00, -0x8e,0x00,0x00,0x00,0x12,0x00,0x00,0x00,0x8d,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x8f,0x00,0x00,0x00, -0x8e,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x90,0x00,0x00,0x00,0x8c,0x00,0x00,0x00,0x8f,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x91,0x00,0x00,0x00, -0x8a,0x00,0x00,0x00,0x90,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x93,0x00,0x00,0x00,0x91,0x00,0x00,0x00, -0x7a,0x00,0x00,0x00,0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x94,0x00,0x00,0x00,0x93,0x00,0x00,0x00,0x6d,0x00,0x00,0x00, -0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00,0x99,0x00,0x00,0x00, -0x12,0x00,0x00,0x00,0x98,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x9a,0x00,0x00,0x00,0x99,0x00,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x9b,0x00,0x00,0x00, -0x0f,0x00,0x00,0x00,0x9a,0x00,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x9e,0x00,0x00,0x00,0x4a,0x00,0x00,0x00, -0x9d,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00, -0xa0,0x00,0x00,0x00,0x12,0x00,0x00,0x00,0x9f,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0xa1,0x00,0x00,0x00, -0xa0,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xa2,0x00,0x00,0x00,0x9e,0x00,0x00,0x00,0xa1,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xa3,0x00,0x00,0x00, -0x9b,0x00,0x00,0x00,0xa2,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xa5,0x00,0x00,0x00,0xa3,0x00,0x00,0x00, -0x7a,0x00,0x00,0x00,0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xa6,0x00,0x00,0x00,0xa5,0x00,0x00,0x00,0x6d,0x00,0x00,0x00, -0xf9,0x00,0x02,0x00,0xa8,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, -0xa8,0x00,0x00,0x00,0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00, -0x38,0x03,0x00,0x00,0x3e,0x00,0x00,0x00,0x05,0x00,0x00,0x00, -0xc7,0x00,0x00,0x00,0xa9,0x00,0x00,0x00,0xb0,0x00,0x05,0x00, -0xb8,0x00,0x00,0x00,0xb9,0x00,0x00,0x00,0x38,0x03,0x00,0x00, -0xb7,0x00,0x00,0x00,0xf6,0x00,0x04,0x00,0xaa,0x00,0x00,0x00, -0xa9,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, -0xb9,0x00,0x00,0x00,0xa9,0x00,0x00,0x00,0xaa,0x00,0x00,0x00, -0xf8,0x00,0x02,0x00,0xa9,0x00,0x00,0x00,0x41,0x00,0x05,0x00, -0xc3,0x00,0x00,0x00,0xc4,0x00,0x00,0x00,0xc0,0x00,0x00,0x00, -0x38,0x03,0x00,0x00,0x3e,0x00,0x03,0x00,0xc4,0x00,0x00,0x00, -0xc2,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xc7,0x00,0x00,0x00,0x38,0x03,0x00,0x00,0xc6,0x00,0x00,0x00, -0xf9,0x00,0x02,0x00,0xa8,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, -0xaa,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0xca,0x00,0x00,0x00, -0xf8,0x00,0x02,0x00,0xca,0x00,0x00,0x00,0xf5,0x00,0x07,0x00, -0x06,0x00,0x00,0x00,0x51,0x03,0x00,0x00,0xa6,0x00,0x00,0x00, -0xaa,0x00,0x00,0x00,0x05,0x02,0x00,0x00,0xcd,0x00,0x00,0x00, -0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0x4d,0x03,0x00,0x00, -0x94,0x00,0x00,0x00,0xaa,0x00,0x00,0x00,0x02,0x02,0x00,0x00, -0xcd,0x00,0x00,0x00,0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00, -0x39,0x03,0x00,0x00,0x7a,0x00,0x00,0x00,0xaa,0x00,0x00,0x00, -0xb3,0x02,0x00,0x00,0xcd,0x00,0x00,0x00,0xb0,0x00,0x05,0x00, -0xb8,0x00,0x00,0x00,0xd1,0x00,0x00,0x00,0x39,0x03,0x00,0x00, -0x84,0x00,0x00,0x00,0xf6,0x00,0x04,0x00,0xcc,0x00,0x00,0x00, -0xcd,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, -0xd1,0x00,0x00,0x00,0xcb,0x00,0x00,0x00,0xcc,0x00,0x00,0x00, -0xf8,0x00,0x02,0x00,0xcb,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, -0xd3,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0xd3,0x00,0x00,0x00, -0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0x49,0x03,0x00,0x00, -0x3e,0x00,0x00,0x00,0xcb,0x00,0x00,0x00,0x6f,0x01,0x00,0x00, -0xd4,0x00,0x00,0x00,0xb0,0x00,0x05,0x00,0xb8,0x00,0x00,0x00, -0xd9,0x00,0x00,0x00,0x49,0x03,0x00,0x00,0x37,0x00,0x00,0x00, -0xf6,0x00,0x04,0x00,0xd5,0x00,0x00,0x00,0xd4,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0xd9,0x00,0x00,0x00, -0xd4,0x00,0x00,0x00,0xd5,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, -0xd4,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xde,0x00,0x00,0x00,0x74,0x00,0x00,0x00,0x49,0x03,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xe1,0x00,0x00,0x00, -0xde,0x00,0x00,0x00,0x8f,0x00,0x00,0x00,0x86,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xe2,0x00,0x00,0x00,0xe1,0x00,0x00,0x00, +0x4e,0x00,0x00,0x00,0x73,0x00,0x00,0x00,0x89,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x79,0x00,0x00,0x00,0x4e,0x00,0x00,0x00, +0x78,0x00,0x00,0x00,0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x7e,0x00,0x00,0x00,0x4e,0x00,0x00,0x00,0x7d,0x00,0x00,0x00, +0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00,0x82,0x00,0x00,0x00, +0x12,0x00,0x00,0x00,0x81,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x83,0x00,0x00,0x00,0x82,0x00,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x84,0x00,0x00,0x00, +0x47,0x00,0x00,0x00,0x83,0x00,0x00,0x00,0x41,0x00,0x05,0x00, +0x15,0x00,0x00,0x00,0x87,0x00,0x00,0x00,0x12,0x00,0x00,0x00, +0x86,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x88,0x00,0x00,0x00,0x87,0x00,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x8a,0x00,0x00,0x00,0x47,0x00,0x00,0x00, +0x39,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x8d,0x00,0x00,0x00,0x8a,0x00,0x00,0x00,0x83,0x00,0x00,0x00, +0x0c,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0x8e,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0x26,0x00,0x00,0x00,0x88,0x00,0x00,0x00, +0x8d,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00, +0x92,0x00,0x00,0x00,0x12,0x00,0x00,0x00,0x91,0x00,0x00,0x00, +0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x93,0x00,0x00,0x00, +0x92,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x94,0x00,0x00,0x00,0x32,0x00,0x00,0x00,0x93,0x00,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x96,0x00,0x00,0x00, +0x42,0x00,0x00,0x00,0x37,0x00,0x00,0x00,0x41,0x00,0x05,0x00, +0x15,0x00,0x00,0x00,0x98,0x00,0x00,0x00,0x12,0x00,0x00,0x00, +0x97,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x99,0x00,0x00,0x00,0x98,0x00,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x9a,0x00,0x00,0x00,0x96,0x00,0x00,0x00, +0x99,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x9b,0x00,0x00,0x00,0x94,0x00,0x00,0x00,0x9a,0x00,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x9d,0x00,0x00,0x00, +0x9b,0x00,0x00,0x00,0x84,0x00,0x00,0x00,0x86,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x9e,0x00,0x00,0x00,0x9d,0x00,0x00,0x00, +0x6d,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00, +0xa3,0x00,0x00,0x00,0x12,0x00,0x00,0x00,0xa2,0x00,0x00,0x00, +0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0xa4,0x00,0x00,0x00, +0xa3,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xa5,0x00,0x00,0x00,0x0f,0x00,0x00,0x00,0xa4,0x00,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xa8,0x00,0x00,0x00, +0x4a,0x00,0x00,0x00,0xa7,0x00,0x00,0x00,0x41,0x00,0x05,0x00, +0x15,0x00,0x00,0x00,0xaa,0x00,0x00,0x00,0x12,0x00,0x00,0x00, +0xa9,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0xab,0x00,0x00,0x00,0xaa,0x00,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xac,0x00,0x00,0x00,0xa8,0x00,0x00,0x00, +0xab,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xad,0x00,0x00,0x00,0xa5,0x00,0x00,0x00,0xac,0x00,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xaf,0x00,0x00,0x00, +0xad,0x00,0x00,0x00,0x84,0x00,0x00,0x00,0x86,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xb0,0x00,0x00,0x00,0xaf,0x00,0x00,0x00, +0x6d,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0xb2,0x00,0x00,0x00, +0xf8,0x00,0x02,0x00,0xb2,0x00,0x00,0x00,0xf5,0x00,0x07,0x00, +0x06,0x00,0x00,0x00,0xe6,0x02,0x00,0x00,0x3e,0x00,0x00,0x00, +0x05,0x00,0x00,0x00,0xd1,0x00,0x00,0x00,0xb3,0x00,0x00,0x00, +0xb0,0x00,0x05,0x00,0xc2,0x00,0x00,0x00,0xc3,0x00,0x00,0x00, +0xe6,0x02,0x00,0x00,0xc1,0x00,0x00,0x00,0xf6,0x00,0x04,0x00, +0xb4,0x00,0x00,0x00,0xb3,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0xfa,0x00,0x04,0x00,0xc3,0x00,0x00,0x00,0xb3,0x00,0x00,0x00, +0xb4,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0xb3,0x00,0x00,0x00, +0x41,0x00,0x05,0x00,0xcd,0x00,0x00,0x00,0xce,0x00,0x00,0x00, +0xca,0x00,0x00,0x00,0xe6,0x02,0x00,0x00,0x3e,0x00,0x03,0x00, +0xce,0x00,0x00,0x00,0xcc,0x00,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xd1,0x00,0x00,0x00,0xe6,0x02,0x00,0x00, +0xd0,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0xb2,0x00,0x00,0x00, +0xf8,0x00,0x02,0x00,0xb4,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, +0xd4,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0xd4,0x00,0x00,0x00, +0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0xff,0x02,0x00,0x00, +0xb0,0x00,0x00,0x00,0xb4,0x00,0x00,0x00,0xb3,0x01,0x00,0x00, +0xd7,0x00,0x00,0x00,0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00, +0xfb,0x02,0x00,0x00,0x9e,0x00,0x00,0x00,0xb4,0x00,0x00,0x00, +0xb0,0x01,0x00,0x00,0xd7,0x00,0x00,0x00,0xf5,0x00,0x07,0x00, +0x06,0x00,0x00,0x00,0xe7,0x02,0x00,0x00,0x84,0x00,0x00,0x00, +0xb4,0x00,0x00,0x00,0x61,0x02,0x00,0x00,0xd7,0x00,0x00,0x00, +0xb0,0x00,0x05,0x00,0xc2,0x00,0x00,0x00,0xdb,0x00,0x00,0x00, +0xe7,0x02,0x00,0x00,0x8e,0x00,0x00,0x00,0xf6,0x00,0x04,0x00, +0xd6,0x00,0x00,0x00,0xd7,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0xfa,0x00,0x04,0x00,0xdb,0x00,0x00,0x00,0xd5,0x00,0x00,0x00, +0xd6,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0xd5,0x00,0x00,0x00, +0xf9,0x00,0x02,0x00,0xdd,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, +0xdd,0x00,0x00,0x00,0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00, +0xf7,0x02,0x00,0x00,0x3e,0x00,0x00,0x00,0xd5,0x00,0x00,0x00, +0x49,0x01,0x00,0x00,0xde,0x00,0x00,0x00,0xb0,0x00,0x05,0x00, +0xc2,0x00,0x00,0x00,0xe3,0x00,0x00,0x00,0xf7,0x02,0x00,0x00, +0x37,0x00,0x00,0x00,0xf6,0x00,0x04,0x00,0xdf,0x00,0x00,0x00, +0xde,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, +0xe3,0x00,0x00,0x00,0xde,0x00,0x00,0x00,0xdf,0x00,0x00,0x00, +0xf8,0x00,0x02,0x00,0xde,0x00,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xe8,0x00,0x00,0x00,0x74,0x00,0x00,0x00, +0xf7,0x02,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xeb,0x00,0x00,0x00,0xe8,0x00,0x00,0x00,0x99,0x00,0x00,0x00, +0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xec,0x00,0x00,0x00, +0xeb,0x00,0x00,0x00,0x6d,0x00,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xed,0x00,0x00,0x00,0xfb,0x02,0x00,0x00, +0xec,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xef,0x00,0x00,0x00,0xed,0x00,0x00,0x00,0x6f,0x00,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xf5,0x00,0x00,0x00, +0xe8,0x00,0x00,0x00,0xf4,0x00,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xf7,0x00,0x00,0x00,0x6f,0x00,0x00,0x00, 0x6d,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xe3,0x00,0x00,0x00,0x4d,0x03,0x00,0x00,0xe2,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xe5,0x00,0x00,0x00, -0xe3,0x00,0x00,0x00,0x6f,0x00,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xf0,0x00,0x00,0x00,0xde,0x00,0x00,0x00, -0xef,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xf2,0x00,0x00,0x00,0x6f,0x00,0x00,0x00,0x6d,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xf3,0x00,0x00,0x00, -0xf0,0x00,0x00,0x00,0xf2,0x00,0x00,0x00,0x41,0x00,0x08,0x00, -0xfc,0x00,0x00,0x00,0xfd,0x00,0x00,0x00,0xfa,0x00,0x00,0x00, -0x34,0x00,0x00,0x00,0xe5,0x00,0x00,0x00,0x34,0x00,0x00,0x00, -0x3e,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0xba,0x00,0x00,0x00, -0xfe,0x00,0x00,0x00,0xfd,0x00,0x00,0x00,0x73,0x00,0x04,0x00, -0xe6,0x00,0x00,0x00,0xff,0x00,0x00,0x00,0xfe,0x00,0x00,0x00, -0x41,0x00,0x05,0x00,0x00,0x01,0x00,0x00,0x01,0x01,0x00,0x00, -0xeb,0x00,0x00,0x00,0xf3,0x00,0x00,0x00,0x3e,0x00,0x03,0x00, -0x01,0x01,0x00,0x00,0xff,0x00,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x06,0x01,0x00,0x00,0xde,0x00,0x00,0x00, -0x05,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x09,0x01,0x00,0x00,0x06,0x01,0x00,0x00,0xf2,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x0a,0x01,0x00,0x00, -0x09,0x01,0x00,0x00,0x39,0x00,0x00,0x00,0x41,0x00,0x08,0x00, -0xfc,0x00,0x00,0x00,0x0c,0x01,0x00,0x00,0xfa,0x00,0x00,0x00, -0x34,0x00,0x00,0x00,0xe5,0x00,0x00,0x00,0x34,0x00,0x00,0x00, -0x39,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0xba,0x00,0x00,0x00, -0x0d,0x01,0x00,0x00,0x0c,0x01,0x00,0x00,0x73,0x00,0x04,0x00, -0xe6,0x00,0x00,0x00,0x0e,0x01,0x00,0x00,0x0d,0x01,0x00,0x00, -0x41,0x00,0x05,0x00,0x00,0x01,0x00,0x00,0x0f,0x01,0x00,0x00, -0xeb,0x00,0x00,0x00,0x0a,0x01,0x00,0x00,0x3e,0x00,0x03,0x00, -0x0f,0x01,0x00,0x00,0x0e,0x01,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x14,0x01,0x00,0x00,0xde,0x00,0x00,0x00, -0x13,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x17,0x01,0x00,0x00,0x14,0x01,0x00,0x00,0xf2,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x18,0x01,0x00,0x00, -0x17,0x01,0x00,0x00,0x0c,0x00,0x00,0x00,0x41,0x00,0x08,0x00, -0xfc,0x00,0x00,0x00,0x1a,0x01,0x00,0x00,0xfa,0x00,0x00,0x00, -0x34,0x00,0x00,0x00,0xe5,0x00,0x00,0x00,0x34,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0xba,0x00,0x00,0x00, -0x1b,0x01,0x00,0x00,0x1a,0x01,0x00,0x00,0x73,0x00,0x04,0x00, -0xe6,0x00,0x00,0x00,0x1c,0x01,0x00,0x00,0x1b,0x01,0x00,0x00, -0x41,0x00,0x05,0x00,0x00,0x01,0x00,0x00,0x1d,0x01,0x00,0x00, -0xeb,0x00,0x00,0x00,0x18,0x01,0x00,0x00,0x3e,0x00,0x03,0x00, -0x1d,0x01,0x00,0x00,0x1c,0x01,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x22,0x01,0x00,0x00,0xde,0x00,0x00,0x00, -0x21,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x25,0x01,0x00,0x00,0x22,0x01,0x00,0x00,0xf2,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x27,0x01,0x00,0x00, -0x25,0x01,0x00,0x00,0x26,0x01,0x00,0x00,0x41,0x00,0x08,0x00, -0xfc,0x00,0x00,0x00,0x29,0x01,0x00,0x00,0xfa,0x00,0x00,0x00, -0x34,0x00,0x00,0x00,0xe5,0x00,0x00,0x00,0x34,0x00,0x00,0x00, -0x26,0x01,0x00,0x00,0x3d,0x00,0x04,0x00,0xba,0x00,0x00,0x00, -0x2a,0x01,0x00,0x00,0x29,0x01,0x00,0x00,0x73,0x00,0x04,0x00, -0xe6,0x00,0x00,0x00,0x2b,0x01,0x00,0x00,0x2a,0x01,0x00,0x00, -0x41,0x00,0x05,0x00,0x00,0x01,0x00,0x00,0x2c,0x01,0x00,0x00, -0xeb,0x00,0x00,0x00,0x27,0x01,0x00,0x00,0x3e,0x00,0x03,0x00, -0x2c,0x01,0x00,0x00,0x2b,0x01,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x31,0x01,0x00,0x00,0xde,0x00,0x00,0x00, -0x30,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x34,0x01,0x00,0x00,0x31,0x01,0x00,0x00,0xf2,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x36,0x01,0x00,0x00, -0x34,0x01,0x00,0x00,0x35,0x01,0x00,0x00,0x41,0x00,0x08,0x00, -0xfc,0x00,0x00,0x00,0x38,0x01,0x00,0x00,0xfa,0x00,0x00,0x00, -0x34,0x00,0x00,0x00,0xe5,0x00,0x00,0x00,0xc6,0x00,0x00,0x00, -0x3e,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0xba,0x00,0x00,0x00, -0x39,0x01,0x00,0x00,0x38,0x01,0x00,0x00,0x73,0x00,0x04,0x00, -0xe6,0x00,0x00,0x00,0x3a,0x01,0x00,0x00,0x39,0x01,0x00,0x00, -0x41,0x00,0x05,0x00,0x00,0x01,0x00,0x00,0x3b,0x01,0x00,0x00, -0xeb,0x00,0x00,0x00,0x36,0x01,0x00,0x00,0x3e,0x00,0x03,0x00, -0x3b,0x01,0x00,0x00,0x3a,0x01,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x40,0x01,0x00,0x00,0xde,0x00,0x00,0x00, -0x3f,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x43,0x01,0x00,0x00,0x40,0x01,0x00,0x00,0xf2,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x45,0x01,0x00,0x00, -0x43,0x01,0x00,0x00,0x44,0x01,0x00,0x00,0x41,0x00,0x08,0x00, -0xfc,0x00,0x00,0x00,0x47,0x01,0x00,0x00,0xfa,0x00,0x00,0x00, -0x34,0x00,0x00,0x00,0xe5,0x00,0x00,0x00,0xc6,0x00,0x00,0x00, -0x39,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0xba,0x00,0x00,0x00, -0x48,0x01,0x00,0x00,0x47,0x01,0x00,0x00,0x73,0x00,0x04,0x00, -0xe6,0x00,0x00,0x00,0x49,0x01,0x00,0x00,0x48,0x01,0x00,0x00, -0x41,0x00,0x05,0x00,0x00,0x01,0x00,0x00,0x4a,0x01,0x00,0x00, -0xeb,0x00,0x00,0x00,0x45,0x01,0x00,0x00,0x3e,0x00,0x03,0x00, -0x4a,0x01,0x00,0x00,0x49,0x01,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x4f,0x01,0x00,0x00,0xde,0x00,0x00,0x00, -0x4e,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x52,0x01,0x00,0x00,0x4f,0x01,0x00,0x00,0xf2,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x54,0x01,0x00,0x00, -0x52,0x01,0x00,0x00,0x53,0x01,0x00,0x00,0x41,0x00,0x08,0x00, -0xfc,0x00,0x00,0x00,0x56,0x01,0x00,0x00,0xfa,0x00,0x00,0x00, -0x34,0x00,0x00,0x00,0xe5,0x00,0x00,0x00,0xc6,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0xba,0x00,0x00,0x00, -0x57,0x01,0x00,0x00,0x56,0x01,0x00,0x00,0x73,0x00,0x04,0x00, -0xe6,0x00,0x00,0x00,0x58,0x01,0x00,0x00,0x57,0x01,0x00,0x00, -0x41,0x00,0x05,0x00,0x00,0x01,0x00,0x00,0x59,0x01,0x00,0x00, -0xeb,0x00,0x00,0x00,0x54,0x01,0x00,0x00,0x3e,0x00,0x03,0x00, -0x59,0x01,0x00,0x00,0x58,0x01,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x5e,0x01,0x00,0x00,0xde,0x00,0x00,0x00, -0x5d,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x61,0x01,0x00,0x00,0x5e,0x01,0x00,0x00,0xf2,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x63,0x01,0x00,0x00, -0x61,0x01,0x00,0x00,0x62,0x01,0x00,0x00,0x41,0x00,0x08,0x00, -0xfc,0x00,0x00,0x00,0x65,0x01,0x00,0x00,0xfa,0x00,0x00,0x00, -0x34,0x00,0x00,0x00,0xe5,0x00,0x00,0x00,0xc6,0x00,0x00,0x00, -0x26,0x01,0x00,0x00,0x3d,0x00,0x04,0x00,0xba,0x00,0x00,0x00, -0x66,0x01,0x00,0x00,0x65,0x01,0x00,0x00,0x73,0x00,0x04,0x00, -0xe6,0x00,0x00,0x00,0x67,0x01,0x00,0x00,0x66,0x01,0x00,0x00, -0x41,0x00,0x05,0x00,0x00,0x01,0x00,0x00,0x68,0x01,0x00,0x00, -0xeb,0x00,0x00,0x00,0x63,0x01,0x00,0x00,0x3e,0x00,0x03,0x00, -0x68,0x01,0x00,0x00,0x67,0x01,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x6f,0x01,0x00,0x00,0x49,0x03,0x00,0x00, -0x6d,0x01,0x00,0x00,0xf9,0x00,0x02,0x00,0xd3,0x00,0x00,0x00, -0xf8,0x00,0x02,0x00,0xd5,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, -0x71,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0x71,0x01,0x00,0x00, -0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0x4a,0x03,0x00,0x00, -0x3e,0x00,0x00,0x00,0xd5,0x00,0x00,0x00,0xfe,0x01,0x00,0x00, -0x72,0x01,0x00,0x00,0xb0,0x00,0x05,0x00,0xb8,0x00,0x00,0x00, -0x77,0x01,0x00,0x00,0x4a,0x03,0x00,0x00,0x9d,0x00,0x00,0x00, -0xf6,0x00,0x04,0x00,0x73,0x01,0x00,0x00,0x72,0x01,0x00,0x00, -0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0x77,0x01,0x00,0x00, -0x72,0x01,0x00,0x00,0x73,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, -0x72,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x7c,0x01,0x00,0x00,0x74,0x00,0x00,0x00,0x4a,0x03,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x7f,0x01,0x00,0x00, -0x7c,0x01,0x00,0x00,0xa1,0x00,0x00,0x00,0x86,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x80,0x01,0x00,0x00,0x7f,0x01,0x00,0x00, +0xf8,0x00,0x00,0x00,0xf5,0x00,0x00,0x00,0xf7,0x00,0x00,0x00, +0x41,0x00,0x08,0x00,0x07,0x01,0x00,0x00,0x08,0x01,0x00,0x00, +0x05,0x01,0x00,0x00,0x34,0x00,0x00,0x00,0xef,0x00,0x00,0x00, +0x34,0x00,0x00,0x00,0x3e,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0xc4,0x00,0x00,0x00,0x09,0x01,0x00,0x00,0x08,0x01,0x00,0x00, +0x73,0x00,0x04,0x00,0xf9,0x00,0x00,0x00,0x0a,0x01,0x00,0x00, +0x09,0x01,0x00,0x00,0x41,0x00,0x05,0x00,0x0b,0x01,0x00,0x00, +0x0c,0x01,0x00,0x00,0xfe,0x00,0x00,0x00,0xf8,0x00,0x00,0x00, +0x3e,0x00,0x03,0x00,0x0c,0x01,0x00,0x00,0x0a,0x01,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x0e,0x01,0x00,0x00, +0xf8,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x41,0x00,0x08,0x00, +0x07,0x01,0x00,0x00,0x10,0x01,0x00,0x00,0x05,0x01,0x00,0x00, +0x34,0x00,0x00,0x00,0xef,0x00,0x00,0x00,0x34,0x00,0x00,0x00, +0x39,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0xc4,0x00,0x00,0x00, +0x11,0x01,0x00,0x00,0x10,0x01,0x00,0x00,0x73,0x00,0x04,0x00, +0xf9,0x00,0x00,0x00,0x12,0x01,0x00,0x00,0x11,0x01,0x00,0x00, +0x41,0x00,0x05,0x00,0x0b,0x01,0x00,0x00,0x13,0x01,0x00,0x00, +0xfe,0x00,0x00,0x00,0x0e,0x01,0x00,0x00,0x3e,0x00,0x03,0x00, +0x13,0x01,0x00,0x00,0x12,0x01,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x15,0x01,0x00,0x00,0xf8,0x00,0x00,0x00, +0x0c,0x00,0x00,0x00,0x41,0x00,0x08,0x00,0x07,0x01,0x00,0x00, +0x17,0x01,0x00,0x00,0x05,0x01,0x00,0x00,0x34,0x00,0x00,0x00, +0xef,0x00,0x00,0x00,0x34,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, +0x3d,0x00,0x04,0x00,0xc4,0x00,0x00,0x00,0x18,0x01,0x00,0x00, +0x17,0x01,0x00,0x00,0x73,0x00,0x04,0x00,0xf9,0x00,0x00,0x00, +0x19,0x01,0x00,0x00,0x18,0x01,0x00,0x00,0x41,0x00,0x05,0x00, +0x0b,0x01,0x00,0x00,0x1a,0x01,0x00,0x00,0xfe,0x00,0x00,0x00, +0x15,0x01,0x00,0x00,0x3e,0x00,0x03,0x00,0x1a,0x01,0x00,0x00, +0x19,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x1d,0x01,0x00,0x00,0xf8,0x00,0x00,0x00,0x1c,0x01,0x00,0x00, +0x41,0x00,0x08,0x00,0x07,0x01,0x00,0x00,0x1f,0x01,0x00,0x00, +0x05,0x01,0x00,0x00,0x34,0x00,0x00,0x00,0xef,0x00,0x00,0x00, +0x34,0x00,0x00,0x00,0x1c,0x01,0x00,0x00,0x3d,0x00,0x04,0x00, +0xc4,0x00,0x00,0x00,0x20,0x01,0x00,0x00,0x1f,0x01,0x00,0x00, +0x73,0x00,0x04,0x00,0xf9,0x00,0x00,0x00,0x21,0x01,0x00,0x00, +0x20,0x01,0x00,0x00,0x41,0x00,0x05,0x00,0x0b,0x01,0x00,0x00, +0x22,0x01,0x00,0x00,0xfe,0x00,0x00,0x00,0x1d,0x01,0x00,0x00, +0x3e,0x00,0x03,0x00,0x22,0x01,0x00,0x00,0x21,0x01,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x25,0x01,0x00,0x00, +0xf8,0x00,0x00,0x00,0x24,0x01,0x00,0x00,0x41,0x00,0x08,0x00, +0x07,0x01,0x00,0x00,0x27,0x01,0x00,0x00,0x05,0x01,0x00,0x00, +0x34,0x00,0x00,0x00,0xef,0x00,0x00,0x00,0xd0,0x00,0x00,0x00, +0x3e,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0xc4,0x00,0x00,0x00, +0x28,0x01,0x00,0x00,0x27,0x01,0x00,0x00,0x73,0x00,0x04,0x00, +0xf9,0x00,0x00,0x00,0x29,0x01,0x00,0x00,0x28,0x01,0x00,0x00, +0x41,0x00,0x05,0x00,0x0b,0x01,0x00,0x00,0x2a,0x01,0x00,0x00, +0xfe,0x00,0x00,0x00,0x25,0x01,0x00,0x00,0x3e,0x00,0x03,0x00, +0x2a,0x01,0x00,0x00,0x29,0x01,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x2d,0x01,0x00,0x00,0xf8,0x00,0x00,0x00, +0x2c,0x01,0x00,0x00,0x41,0x00,0x08,0x00,0x07,0x01,0x00,0x00, +0x2f,0x01,0x00,0x00,0x05,0x01,0x00,0x00,0x34,0x00,0x00,0x00, +0xef,0x00,0x00,0x00,0xd0,0x00,0x00,0x00,0x39,0x00,0x00,0x00, +0x3d,0x00,0x04,0x00,0xc4,0x00,0x00,0x00,0x30,0x01,0x00,0x00, +0x2f,0x01,0x00,0x00,0x73,0x00,0x04,0x00,0xf9,0x00,0x00,0x00, +0x31,0x01,0x00,0x00,0x30,0x01,0x00,0x00,0x41,0x00,0x05,0x00, +0x0b,0x01,0x00,0x00,0x32,0x01,0x00,0x00,0xfe,0x00,0x00,0x00, +0x2d,0x01,0x00,0x00,0x3e,0x00,0x03,0x00,0x32,0x01,0x00,0x00, +0x31,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x35,0x01,0x00,0x00,0xf8,0x00,0x00,0x00,0x34,0x01,0x00,0x00, +0x41,0x00,0x08,0x00,0x07,0x01,0x00,0x00,0x37,0x01,0x00,0x00, +0x05,0x01,0x00,0x00,0x34,0x00,0x00,0x00,0xef,0x00,0x00,0x00, +0xd0,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0xc4,0x00,0x00,0x00,0x38,0x01,0x00,0x00,0x37,0x01,0x00,0x00, +0x73,0x00,0x04,0x00,0xf9,0x00,0x00,0x00,0x39,0x01,0x00,0x00, +0x38,0x01,0x00,0x00,0x41,0x00,0x05,0x00,0x0b,0x01,0x00,0x00, +0x3a,0x01,0x00,0x00,0xfe,0x00,0x00,0x00,0x35,0x01,0x00,0x00, +0x3e,0x00,0x03,0x00,0x3a,0x01,0x00,0x00,0x39,0x01,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x3d,0x01,0x00,0x00, +0xf8,0x00,0x00,0x00,0x3c,0x01,0x00,0x00,0x41,0x00,0x08,0x00, +0x07,0x01,0x00,0x00,0x3f,0x01,0x00,0x00,0x05,0x01,0x00,0x00, +0x34,0x00,0x00,0x00,0xef,0x00,0x00,0x00,0xd0,0x00,0x00,0x00, +0x1c,0x01,0x00,0x00,0x3d,0x00,0x04,0x00,0xc4,0x00,0x00,0x00, +0x40,0x01,0x00,0x00,0x3f,0x01,0x00,0x00,0x73,0x00,0x04,0x00, +0xf9,0x00,0x00,0x00,0x41,0x01,0x00,0x00,0x40,0x01,0x00,0x00, +0x41,0x00,0x05,0x00,0x0b,0x01,0x00,0x00,0x42,0x01,0x00,0x00, +0xfe,0x00,0x00,0x00,0x3d,0x01,0x00,0x00,0x3e,0x00,0x03,0x00, +0x42,0x01,0x00,0x00,0x41,0x01,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x49,0x01,0x00,0x00,0xf7,0x02,0x00,0x00, +0x47,0x01,0x00,0x00,0xf9,0x00,0x02,0x00,0xdd,0x00,0x00,0x00, +0xf8,0x00,0x02,0x00,0xdf,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, +0x4b,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0x4b,0x01,0x00,0x00, +0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0xf8,0x02,0x00,0x00, +0x3e,0x00,0x00,0x00,0xdf,0x00,0x00,0x00,0xac,0x01,0x00,0x00, +0x4c,0x01,0x00,0x00,0xb0,0x00,0x05,0x00,0xc2,0x00,0x00,0x00, +0x51,0x01,0x00,0x00,0xf8,0x02,0x00,0x00,0xa7,0x00,0x00,0x00, +0xf6,0x00,0x04,0x00,0x4d,0x01,0x00,0x00,0x4c,0x01,0x00,0x00, +0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0x51,0x01,0x00,0x00, +0x4c,0x01,0x00,0x00,0x4d,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, +0x4c,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x56,0x01,0x00,0x00,0x7e,0x00,0x00,0x00,0xf8,0x02,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x59,0x01,0x00,0x00, +0x56,0x01,0x00,0x00,0xab,0x00,0x00,0x00,0x86,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x5a,0x01,0x00,0x00,0x59,0x01,0x00,0x00, 0x6d,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x81,0x01,0x00,0x00,0x51,0x03,0x00,0x00,0x80,0x01,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x83,0x01,0x00,0x00, -0x81,0x01,0x00,0x00,0x6f,0x00,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x8d,0x01,0x00,0x00,0x7c,0x01,0x00,0x00, -0x8c,0x01,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x8f,0x01,0x00,0x00,0x6f,0x00,0x00,0x00,0x6d,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x90,0x01,0x00,0x00, -0x8d,0x01,0x00,0x00,0x8f,0x01,0x00,0x00,0x41,0x00,0x08,0x00, -0xfc,0x00,0x00,0x00,0x97,0x01,0x00,0x00,0x95,0x01,0x00,0x00, -0x34,0x00,0x00,0x00,0x83,0x01,0x00,0x00,0x34,0x00,0x00,0x00, -0x3e,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0xba,0x00,0x00,0x00, -0x98,0x01,0x00,0x00,0x97,0x01,0x00,0x00,0x73,0x00,0x04,0x00, -0xe6,0x00,0x00,0x00,0x99,0x01,0x00,0x00,0x98,0x01,0x00,0x00, -0x41,0x00,0x05,0x00,0x00,0x01,0x00,0x00,0x9a,0x01,0x00,0x00, -0x88,0x01,0x00,0x00,0x90,0x01,0x00,0x00,0x3e,0x00,0x03,0x00, -0x9a,0x01,0x00,0x00,0x99,0x01,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x9f,0x01,0x00,0x00,0x7c,0x01,0x00,0x00, -0x9e,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xa2,0x01,0x00,0x00,0x9f,0x01,0x00,0x00,0x8f,0x01,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xa3,0x01,0x00,0x00, -0xa2,0x01,0x00,0x00,0x39,0x00,0x00,0x00,0x41,0x00,0x08,0x00, -0xfc,0x00,0x00,0x00,0xa5,0x01,0x00,0x00,0x95,0x01,0x00,0x00, -0x34,0x00,0x00,0x00,0x83,0x01,0x00,0x00,0x34,0x00,0x00,0x00, -0x39,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0xba,0x00,0x00,0x00, -0xa6,0x01,0x00,0x00,0xa5,0x01,0x00,0x00,0x73,0x00,0x04,0x00, -0xe6,0x00,0x00,0x00,0xa7,0x01,0x00,0x00,0xa6,0x01,0x00,0x00, -0x41,0x00,0x05,0x00,0x00,0x01,0x00,0x00,0xa8,0x01,0x00,0x00, -0x88,0x01,0x00,0x00,0xa3,0x01,0x00,0x00,0x3e,0x00,0x03,0x00, -0xa8,0x01,0x00,0x00,0xa7,0x01,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xad,0x01,0x00,0x00,0x7c,0x01,0x00,0x00, -0xac,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xb0,0x01,0x00,0x00,0xad,0x01,0x00,0x00,0x8f,0x01,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xb1,0x01,0x00,0x00, -0xb0,0x01,0x00,0x00,0x0c,0x00,0x00,0x00,0x41,0x00,0x08,0x00, -0xfc,0x00,0x00,0x00,0xb3,0x01,0x00,0x00,0x95,0x01,0x00,0x00, -0x34,0x00,0x00,0x00,0x83,0x01,0x00,0x00,0x34,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0xba,0x00,0x00,0x00, -0xb4,0x01,0x00,0x00,0xb3,0x01,0x00,0x00,0x73,0x00,0x04,0x00, -0xe6,0x00,0x00,0x00,0xb5,0x01,0x00,0x00,0xb4,0x01,0x00,0x00, -0x41,0x00,0x05,0x00,0x00,0x01,0x00,0x00,0xb6,0x01,0x00,0x00, -0x88,0x01,0x00,0x00,0xb1,0x01,0x00,0x00,0x3e,0x00,0x03,0x00, -0xb6,0x01,0x00,0x00,0xb5,0x01,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xbb,0x01,0x00,0x00,0x7c,0x01,0x00,0x00, -0xba,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xbe,0x01,0x00,0x00,0xbb,0x01,0x00,0x00,0x8f,0x01,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xbf,0x01,0x00,0x00, -0xbe,0x01,0x00,0x00,0x26,0x01,0x00,0x00,0x41,0x00,0x08,0x00, -0xfc,0x00,0x00,0x00,0xc1,0x01,0x00,0x00,0x95,0x01,0x00,0x00, -0x34,0x00,0x00,0x00,0x83,0x01,0x00,0x00,0x34,0x00,0x00,0x00, -0x26,0x01,0x00,0x00,0x3d,0x00,0x04,0x00,0xba,0x00,0x00,0x00, -0xc2,0x01,0x00,0x00,0xc1,0x01,0x00,0x00,0x73,0x00,0x04,0x00, -0xe6,0x00,0x00,0x00,0xc3,0x01,0x00,0x00,0xc2,0x01,0x00,0x00, -0x41,0x00,0x05,0x00,0x00,0x01,0x00,0x00,0xc4,0x01,0x00,0x00, -0x88,0x01,0x00,0x00,0xbf,0x01,0x00,0x00,0x3e,0x00,0x03,0x00, -0xc4,0x01,0x00,0x00,0xc3,0x01,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xc9,0x01,0x00,0x00,0x7c,0x01,0x00,0x00, -0xc8,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xcc,0x01,0x00,0x00,0xc9,0x01,0x00,0x00,0x8f,0x01,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xcd,0x01,0x00,0x00, -0xcc,0x01,0x00,0x00,0x35,0x01,0x00,0x00,0x41,0x00,0x08,0x00, -0xfc,0x00,0x00,0x00,0xcf,0x01,0x00,0x00,0x95,0x01,0x00,0x00, -0x34,0x00,0x00,0x00,0x83,0x01,0x00,0x00,0xc6,0x00,0x00,0x00, -0x3e,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0xba,0x00,0x00,0x00, -0xd0,0x01,0x00,0x00,0xcf,0x01,0x00,0x00,0x73,0x00,0x04,0x00, -0xe6,0x00,0x00,0x00,0xd1,0x01,0x00,0x00,0xd0,0x01,0x00,0x00, -0x41,0x00,0x05,0x00,0x00,0x01,0x00,0x00,0xd2,0x01,0x00,0x00, -0x88,0x01,0x00,0x00,0xcd,0x01,0x00,0x00,0x3e,0x00,0x03,0x00, -0xd2,0x01,0x00,0x00,0xd1,0x01,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xd7,0x01,0x00,0x00,0x7c,0x01,0x00,0x00, -0xd6,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xda,0x01,0x00,0x00,0xd7,0x01,0x00,0x00,0x8f,0x01,0x00,0x00, +0x5b,0x01,0x00,0x00,0xff,0x02,0x00,0x00,0x5a,0x01,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x5d,0x01,0x00,0x00, +0x5b,0x01,0x00,0x00,0x79,0x00,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x63,0x01,0x00,0x00,0x56,0x01,0x00,0x00, +0x62,0x01,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x65,0x01,0x00,0x00,0x79,0x00,0x00,0x00,0x6d,0x00,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x66,0x01,0x00,0x00, +0x63,0x01,0x00,0x00,0x65,0x01,0x00,0x00,0x41,0x00,0x08,0x00, +0x07,0x01,0x00,0x00,0x73,0x01,0x00,0x00,0x71,0x01,0x00,0x00, +0x34,0x00,0x00,0x00,0x5d,0x01,0x00,0x00,0x34,0x00,0x00,0x00, +0x3e,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0xc4,0x00,0x00,0x00, +0x74,0x01,0x00,0x00,0x73,0x01,0x00,0x00,0x73,0x00,0x04,0x00, +0xf9,0x00,0x00,0x00,0x75,0x01,0x00,0x00,0x74,0x01,0x00,0x00, +0x41,0x00,0x05,0x00,0x0b,0x01,0x00,0x00,0x76,0x01,0x00,0x00, +0x6b,0x01,0x00,0x00,0x66,0x01,0x00,0x00,0x3e,0x00,0x03,0x00, +0x76,0x01,0x00,0x00,0x75,0x01,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x78,0x01,0x00,0x00,0x66,0x01,0x00,0x00, +0x39,0x00,0x00,0x00,0x41,0x00,0x08,0x00,0x07,0x01,0x00,0x00, +0x7a,0x01,0x00,0x00,0x71,0x01,0x00,0x00,0x34,0x00,0x00,0x00, +0x5d,0x01,0x00,0x00,0x34,0x00,0x00,0x00,0x39,0x00,0x00,0x00, +0x3d,0x00,0x04,0x00,0xc4,0x00,0x00,0x00,0x7b,0x01,0x00,0x00, +0x7a,0x01,0x00,0x00,0x73,0x00,0x04,0x00,0xf9,0x00,0x00,0x00, +0x7c,0x01,0x00,0x00,0x7b,0x01,0x00,0x00,0x41,0x00,0x05,0x00, +0x0b,0x01,0x00,0x00,0x7d,0x01,0x00,0x00,0x6b,0x01,0x00,0x00, +0x78,0x01,0x00,0x00,0x3e,0x00,0x03,0x00,0x7d,0x01,0x00,0x00, +0x7c,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x7f,0x01,0x00,0x00,0x66,0x01,0x00,0x00,0x0c,0x00,0x00,0x00, +0x41,0x00,0x08,0x00,0x07,0x01,0x00,0x00,0x81,0x01,0x00,0x00, +0x71,0x01,0x00,0x00,0x34,0x00,0x00,0x00,0x5d,0x01,0x00,0x00, +0x34,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0xc4,0x00,0x00,0x00,0x82,0x01,0x00,0x00,0x81,0x01,0x00,0x00, +0x73,0x00,0x04,0x00,0xf9,0x00,0x00,0x00,0x83,0x01,0x00,0x00, +0x82,0x01,0x00,0x00,0x41,0x00,0x05,0x00,0x0b,0x01,0x00,0x00, +0x84,0x01,0x00,0x00,0x6b,0x01,0x00,0x00,0x7f,0x01,0x00,0x00, +0x3e,0x00,0x03,0x00,0x84,0x01,0x00,0x00,0x83,0x01,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x86,0x01,0x00,0x00, +0x66,0x01,0x00,0x00,0x1c,0x01,0x00,0x00,0x41,0x00,0x08,0x00, +0x07,0x01,0x00,0x00,0x88,0x01,0x00,0x00,0x71,0x01,0x00,0x00, +0x34,0x00,0x00,0x00,0x5d,0x01,0x00,0x00,0x34,0x00,0x00,0x00, +0x1c,0x01,0x00,0x00,0x3d,0x00,0x04,0x00,0xc4,0x00,0x00,0x00, +0x89,0x01,0x00,0x00,0x88,0x01,0x00,0x00,0x73,0x00,0x04,0x00, +0xf9,0x00,0x00,0x00,0x8a,0x01,0x00,0x00,0x89,0x01,0x00,0x00, +0x41,0x00,0x05,0x00,0x0b,0x01,0x00,0x00,0x8b,0x01,0x00,0x00, +0x6b,0x01,0x00,0x00,0x86,0x01,0x00,0x00,0x3e,0x00,0x03,0x00, +0x8b,0x01,0x00,0x00,0x8a,0x01,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x8d,0x01,0x00,0x00,0x66,0x01,0x00,0x00, +0x24,0x01,0x00,0x00,0x41,0x00,0x08,0x00,0x07,0x01,0x00,0x00, +0x8f,0x01,0x00,0x00,0x71,0x01,0x00,0x00,0x34,0x00,0x00,0x00, +0x5d,0x01,0x00,0x00,0xd0,0x00,0x00,0x00,0x3e,0x00,0x00,0x00, +0x3d,0x00,0x04,0x00,0xc4,0x00,0x00,0x00,0x90,0x01,0x00,0x00, +0x8f,0x01,0x00,0x00,0x73,0x00,0x04,0x00,0xf9,0x00,0x00,0x00, +0x91,0x01,0x00,0x00,0x90,0x01,0x00,0x00,0x41,0x00,0x05,0x00, +0x0b,0x01,0x00,0x00,0x92,0x01,0x00,0x00,0x6b,0x01,0x00,0x00, +0x8d,0x01,0x00,0x00,0x3e,0x00,0x03,0x00,0x92,0x01,0x00,0x00, +0x91,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x94,0x01,0x00,0x00,0x66,0x01,0x00,0x00,0x2c,0x01,0x00,0x00, +0x41,0x00,0x08,0x00,0x07,0x01,0x00,0x00,0x96,0x01,0x00,0x00, +0x71,0x01,0x00,0x00,0x34,0x00,0x00,0x00,0x5d,0x01,0x00,0x00, +0xd0,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0xc4,0x00,0x00,0x00,0x97,0x01,0x00,0x00,0x96,0x01,0x00,0x00, +0x73,0x00,0x04,0x00,0xf9,0x00,0x00,0x00,0x98,0x01,0x00,0x00, +0x97,0x01,0x00,0x00,0x41,0x00,0x05,0x00,0x0b,0x01,0x00,0x00, +0x99,0x01,0x00,0x00,0x6b,0x01,0x00,0x00,0x94,0x01,0x00,0x00, +0x3e,0x00,0x03,0x00,0x99,0x01,0x00,0x00,0x98,0x01,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x9b,0x01,0x00,0x00, +0x66,0x01,0x00,0x00,0x34,0x01,0x00,0x00,0x41,0x00,0x08,0x00, +0x07,0x01,0x00,0x00,0x9d,0x01,0x00,0x00,0x71,0x01,0x00,0x00, +0x34,0x00,0x00,0x00,0x5d,0x01,0x00,0x00,0xd0,0x00,0x00,0x00, +0x0c,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0xc4,0x00,0x00,0x00, +0x9e,0x01,0x00,0x00,0x9d,0x01,0x00,0x00,0x73,0x00,0x04,0x00, +0xf9,0x00,0x00,0x00,0x9f,0x01,0x00,0x00,0x9e,0x01,0x00,0x00, +0x41,0x00,0x05,0x00,0x0b,0x01,0x00,0x00,0xa0,0x01,0x00,0x00, +0x6b,0x01,0x00,0x00,0x9b,0x01,0x00,0x00,0x3e,0x00,0x03,0x00, +0xa0,0x01,0x00,0x00,0x9f,0x01,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xa2,0x01,0x00,0x00,0x66,0x01,0x00,0x00, +0x3c,0x01,0x00,0x00,0x41,0x00,0x08,0x00,0x07,0x01,0x00,0x00, +0xa4,0x01,0x00,0x00,0x71,0x01,0x00,0x00,0x34,0x00,0x00,0x00, +0x5d,0x01,0x00,0x00,0xd0,0x00,0x00,0x00,0x1c,0x01,0x00,0x00, +0x3d,0x00,0x04,0x00,0xc4,0x00,0x00,0x00,0xa5,0x01,0x00,0x00, +0xa4,0x01,0x00,0x00,0x73,0x00,0x04,0x00,0xf9,0x00,0x00,0x00, +0xa6,0x01,0x00,0x00,0xa5,0x01,0x00,0x00,0x41,0x00,0x05,0x00, +0x0b,0x01,0x00,0x00,0xa7,0x01,0x00,0x00,0x6b,0x01,0x00,0x00, +0xa2,0x01,0x00,0x00,0x3e,0x00,0x03,0x00,0xa7,0x01,0x00,0x00, +0xa6,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xac,0x01,0x00,0x00,0xf8,0x02,0x00,0x00,0xaa,0x01,0x00,0x00, +0xf9,0x00,0x02,0x00,0x4b,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, +0x4d,0x01,0x00,0x00,0xe0,0x00,0x04,0x00,0x0c,0x00,0x00,0x00, +0x0c,0x00,0x00,0x00,0xad,0x01,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xb0,0x01,0x00,0x00,0xfb,0x02,0x00,0x00, +0xae,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xb3,0x01,0x00,0x00,0xff,0x02,0x00,0x00,0xb1,0x01,0x00,0x00, +0xf9,0x00,0x02,0x00,0xb5,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, +0xb5,0x01,0x00,0x00,0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00, +0x01,0x03,0x00,0x00,0x3e,0x00,0x00,0x00,0x4d,0x01,0x00,0x00, +0x5f,0x02,0x00,0x00,0xb8,0x01,0x00,0x00,0xb0,0x00,0x05,0x00, +0xc2,0x00,0x00,0x00,0xbb,0x01,0x00,0x00,0x01,0x03,0x00,0x00, +0x6c,0x00,0x00,0x00,0xf6,0x00,0x04,0x00,0xb7,0x01,0x00,0x00, +0xb8,0x01,0x00,0x00,0x00,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, +0xbb,0x01,0x00,0x00,0xb6,0x01,0x00,0x00,0xb7,0x01,0x00,0x00, +0xf8,0x00,0x02,0x00,0xb6,0x01,0x00,0x00,0xf9,0x00,0x02,0x00, +0xbd,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0xbd,0x01,0x00,0x00, +0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0x05,0x03,0x00,0x00, +0x3e,0x00,0x00,0x00,0xb6,0x01,0x00,0x00,0xe9,0x01,0x00,0x00, +0xc0,0x01,0x00,0x00,0xb0,0x00,0x05,0x00,0xc2,0x00,0x00,0x00, +0xc3,0x01,0x00,0x00,0x05,0x03,0x00,0x00,0x60,0x00,0x00,0x00, +0xf6,0x00,0x04,0x00,0xbf,0x01,0x00,0x00,0xc0,0x01,0x00,0x00, +0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0xc3,0x01,0x00,0x00, +0xbe,0x01,0x00,0x00,0xbf,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, +0xbe,0x01,0x00,0x00,0xf9,0x00,0x02,0x00,0xc5,0x01,0x00,0x00, +0xf8,0x00,0x02,0x00,0xc5,0x01,0x00,0x00,0xf5,0x00,0x07,0x00, +0x06,0x00,0x00,0x00,0x17,0x03,0x00,0x00,0x3e,0x00,0x00,0x00, +0xbe,0x01,0x00,0x00,0xe7,0x01,0x00,0x00,0xc6,0x01,0x00,0x00, +0xb0,0x00,0x05,0x00,0xc2,0x00,0x00,0x00,0xcb,0x01,0x00,0x00, +0x17,0x03,0x00,0x00,0x62,0x00,0x00,0x00,0xf6,0x00,0x04,0x00, +0xc7,0x01,0x00,0x00,0xc6,0x01,0x00,0x00,0x01,0x00,0x00,0x00, +0xfa,0x00,0x04,0x00,0xcb,0x01,0x00,0x00,0xc6,0x01,0x00,0x00, +0xc7,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0xc6,0x01,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xd1,0x01,0x00,0x00, +0x05,0x03,0x00,0x00,0x62,0x00,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xd3,0x01,0x00,0x00,0xd1,0x01,0x00,0x00, +0x17,0x03,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xd5,0x01,0x00,0x00,0x55,0x00,0x00,0x00,0x53,0x00,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xd7,0x01,0x00,0x00, +0x05,0x03,0x00,0x00,0x61,0x00,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xd8,0x01,0x00,0x00,0xd5,0x01,0x00,0x00, +0xd7,0x01,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xda,0x01,0x00,0x00,0x64,0x00,0x00,0x00,0x62,0x00,0x00,0x00, 0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xdb,0x01,0x00,0x00, -0xda,0x01,0x00,0x00,0x44,0x01,0x00,0x00,0x41,0x00,0x08,0x00, -0xfc,0x00,0x00,0x00,0xdd,0x01,0x00,0x00,0x95,0x01,0x00,0x00, -0x34,0x00,0x00,0x00,0x83,0x01,0x00,0x00,0xc6,0x00,0x00,0x00, -0x39,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0xba,0x00,0x00,0x00, -0xde,0x01,0x00,0x00,0xdd,0x01,0x00,0x00,0x73,0x00,0x04,0x00, -0xe6,0x00,0x00,0x00,0xdf,0x01,0x00,0x00,0xde,0x01,0x00,0x00, -0x41,0x00,0x05,0x00,0x00,0x01,0x00,0x00,0xe0,0x01,0x00,0x00, -0x88,0x01,0x00,0x00,0xdb,0x01,0x00,0x00,0x3e,0x00,0x03,0x00, -0xe0,0x01,0x00,0x00,0xdf,0x01,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xe5,0x01,0x00,0x00,0x7c,0x01,0x00,0x00, -0xe4,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xe8,0x01,0x00,0x00,0xe5,0x01,0x00,0x00,0x8f,0x01,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xe9,0x01,0x00,0x00, -0xe8,0x01,0x00,0x00,0x53,0x01,0x00,0x00,0x41,0x00,0x08,0x00, -0xfc,0x00,0x00,0x00,0xeb,0x01,0x00,0x00,0x95,0x01,0x00,0x00, -0x34,0x00,0x00,0x00,0x83,0x01,0x00,0x00,0xc6,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0xba,0x00,0x00,0x00, -0xec,0x01,0x00,0x00,0xeb,0x01,0x00,0x00,0x73,0x00,0x04,0x00, -0xe6,0x00,0x00,0x00,0xed,0x01,0x00,0x00,0xec,0x01,0x00,0x00, -0x41,0x00,0x05,0x00,0x00,0x01,0x00,0x00,0xee,0x01,0x00,0x00, -0x88,0x01,0x00,0x00,0xe9,0x01,0x00,0x00,0x3e,0x00,0x03,0x00, -0xee,0x01,0x00,0x00,0xed,0x01,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xf3,0x01,0x00,0x00,0x7c,0x01,0x00,0x00, -0xf2,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xf6,0x01,0x00,0x00,0xf3,0x01,0x00,0x00,0x8f,0x01,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xf7,0x01,0x00,0x00, -0xf6,0x01,0x00,0x00,0x62,0x01,0x00,0x00,0x41,0x00,0x08,0x00, -0xfc,0x00,0x00,0x00,0xf9,0x01,0x00,0x00,0x95,0x01,0x00,0x00, -0x34,0x00,0x00,0x00,0x83,0x01,0x00,0x00,0xc6,0x00,0x00,0x00, -0x26,0x01,0x00,0x00,0x3d,0x00,0x04,0x00,0xba,0x00,0x00,0x00, -0xfa,0x01,0x00,0x00,0xf9,0x01,0x00,0x00,0x73,0x00,0x04,0x00, -0xe6,0x00,0x00,0x00,0xfb,0x01,0x00,0x00,0xfa,0x01,0x00,0x00, -0x41,0x00,0x05,0x00,0x00,0x01,0x00,0x00,0xfc,0x01,0x00,0x00, -0x88,0x01,0x00,0x00,0xf7,0x01,0x00,0x00,0x3e,0x00,0x03,0x00, -0xfc,0x01,0x00,0x00,0xfb,0x01,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xfe,0x01,0x00,0x00,0x4a,0x03,0x00,0x00, -0x6d,0x01,0x00,0x00,0xf9,0x00,0x02,0x00,0x71,0x01,0x00,0x00, -0xf8,0x00,0x02,0x00,0x73,0x01,0x00,0x00,0xe0,0x00,0x04,0x00, -0x0c,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0xff,0x01,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x02,0x02,0x00,0x00, -0x4d,0x03,0x00,0x00,0x00,0x02,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x05,0x02,0x00,0x00,0x51,0x03,0x00,0x00, -0x03,0x02,0x00,0x00,0xf9,0x00,0x02,0x00,0x07,0x02,0x00,0x00, -0xf8,0x00,0x02,0x00,0x07,0x02,0x00,0x00,0xf5,0x00,0x07,0x00, -0x06,0x00,0x00,0x00,0x53,0x03,0x00,0x00,0x3e,0x00,0x00,0x00, -0x73,0x01,0x00,0x00,0xb1,0x02,0x00,0x00,0x0a,0x02,0x00,0x00, -0xb0,0x00,0x05,0x00,0xb8,0x00,0x00,0x00,0x0d,0x02,0x00,0x00, -0x53,0x03,0x00,0x00,0x6c,0x00,0x00,0x00,0xf6,0x00,0x04,0x00, -0x09,0x02,0x00,0x00,0x0a,0x02,0x00,0x00,0x00,0x00,0x00,0x00, -0xfa,0x00,0x04,0x00,0x0d,0x02,0x00,0x00,0x08,0x02,0x00,0x00, -0x09,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x08,0x02,0x00,0x00, -0xf9,0x00,0x02,0x00,0x0f,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, -0x0f,0x02,0x00,0x00,0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00, -0x57,0x03,0x00,0x00,0x3e,0x00,0x00,0x00,0x08,0x02,0x00,0x00, -0x3b,0x02,0x00,0x00,0x12,0x02,0x00,0x00,0xb0,0x00,0x05,0x00, -0xb8,0x00,0x00,0x00,0x15,0x02,0x00,0x00,0x57,0x03,0x00,0x00, -0x60,0x00,0x00,0x00,0xf6,0x00,0x04,0x00,0x11,0x02,0x00,0x00, -0x12,0x02,0x00,0x00,0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, -0x15,0x02,0x00,0x00,0x10,0x02,0x00,0x00,0x11,0x02,0x00,0x00, -0xf8,0x00,0x02,0x00,0x10,0x02,0x00,0x00,0xf9,0x00,0x02,0x00, -0x17,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x17,0x02,0x00,0x00, -0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0x69,0x03,0x00,0x00, -0x3e,0x00,0x00,0x00,0x10,0x02,0x00,0x00,0x39,0x02,0x00,0x00, -0x18,0x02,0x00,0x00,0xb0,0x00,0x05,0x00,0xb8,0x00,0x00,0x00, -0x1d,0x02,0x00,0x00,0x69,0x03,0x00,0x00,0x62,0x00,0x00,0x00, -0xf6,0x00,0x04,0x00,0x19,0x02,0x00,0x00,0x18,0x02,0x00,0x00, -0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0x1d,0x02,0x00,0x00, -0x18,0x02,0x00,0x00,0x19,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, -0x18,0x02,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x23,0x02,0x00,0x00,0x57,0x03,0x00,0x00,0x62,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x25,0x02,0x00,0x00, -0x23,0x02,0x00,0x00,0x69,0x03,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x27,0x02,0x00,0x00,0x55,0x00,0x00,0x00, -0x53,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x29,0x02,0x00,0x00,0x57,0x03,0x00,0x00,0x61,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x2a,0x02,0x00,0x00, -0x27,0x02,0x00,0x00,0x29,0x02,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x2c,0x02,0x00,0x00,0x64,0x00,0x00,0x00, -0x62,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x2d,0x02,0x00,0x00,0x2a,0x02,0x00,0x00,0x2c,0x02,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x2f,0x02,0x00,0x00, -0x2d,0x02,0x00,0x00,0x69,0x03,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x31,0x02,0x00,0x00,0x2f,0x02,0x00,0x00, -0x30,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x33,0x02,0x00,0x00,0x31,0x02,0x00,0x00,0x53,0x03,0x00,0x00, -0x41,0x00,0x05,0x00,0x00,0x01,0x00,0x00,0x34,0x02,0x00,0x00, -0xeb,0x00,0x00,0x00,0x33,0x02,0x00,0x00,0x3d,0x00,0x04,0x00, -0xe6,0x00,0x00,0x00,0x35,0x02,0x00,0x00,0x34,0x02,0x00,0x00, -0x41,0x00,0x05,0x00,0x36,0x02,0x00,0x00,0x37,0x02,0x00,0x00, -0x21,0x02,0x00,0x00,0x25,0x02,0x00,0x00,0x3e,0x00,0x03,0x00, -0x37,0x02,0x00,0x00,0x35,0x02,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x39,0x02,0x00,0x00,0x69,0x03,0x00,0x00, -0xc6,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0x17,0x02,0x00,0x00, -0xf8,0x00,0x02,0x00,0x19,0x02,0x00,0x00,0xf9,0x00,0x02,0x00, -0x12,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x12,0x02,0x00,0x00, +0xd8,0x01,0x00,0x00,0xda,0x01,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xdd,0x01,0x00,0x00,0xdb,0x01,0x00,0x00, +0x17,0x03,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xdf,0x01,0x00,0x00,0xdd,0x01,0x00,0x00,0xde,0x01,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xe1,0x01,0x00,0x00, +0xdf,0x01,0x00,0x00,0x01,0x03,0x00,0x00,0x41,0x00,0x05,0x00, +0x0b,0x01,0x00,0x00,0xe2,0x01,0x00,0x00,0xfe,0x00,0x00,0x00, +0xe1,0x01,0x00,0x00,0x3d,0x00,0x04,0x00,0xf9,0x00,0x00,0x00, +0xe3,0x01,0x00,0x00,0xe2,0x01,0x00,0x00,0x41,0x00,0x05,0x00, +0xe4,0x01,0x00,0x00,0xe5,0x01,0x00,0x00,0xcf,0x01,0x00,0x00, +0xd3,0x01,0x00,0x00,0x3e,0x00,0x03,0x00,0xe5,0x01,0x00,0x00, +0xe3,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xe7,0x01,0x00,0x00,0x17,0x03,0x00,0x00,0xd0,0x00,0x00,0x00, +0xf9,0x00,0x02,0x00,0xc5,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, +0xc7,0x01,0x00,0x00,0xf9,0x00,0x02,0x00,0xc0,0x01,0x00,0x00, +0xf8,0x00,0x02,0x00,0xc0,0x01,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xe9,0x01,0x00,0x00,0x05,0x03,0x00,0x00, +0xd0,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0xbd,0x01,0x00,0x00, +0xf8,0x00,0x02,0x00,0xbf,0x01,0x00,0x00,0xf9,0x00,0x02,0x00, +0xeb,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0xeb,0x01,0x00,0x00, +0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0x06,0x03,0x00,0x00, +0x3e,0x00,0x00,0x00,0xbf,0x01,0x00,0x00,0x17,0x02,0x00,0x00, +0xee,0x01,0x00,0x00,0xb0,0x00,0x05,0x00,0xc2,0x00,0x00,0x00, +0xf1,0x01,0x00,0x00,0x06,0x03,0x00,0x00,0xbf,0x00,0x00,0x00, +0xf6,0x00,0x04,0x00,0xed,0x01,0x00,0x00,0xee,0x01,0x00,0x00, +0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0xf1,0x01,0x00,0x00, +0xec,0x01,0x00,0x00,0xed,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, +0xec,0x01,0x00,0x00,0xf9,0x00,0x02,0x00,0xf3,0x01,0x00,0x00, +0xf8,0x00,0x02,0x00,0xf3,0x01,0x00,0x00,0xf5,0x00,0x07,0x00, +0x06,0x00,0x00,0x00,0x14,0x03,0x00,0x00,0x3e,0x00,0x00,0x00, +0xec,0x01,0x00,0x00,0x15,0x02,0x00,0x00,0xf4,0x01,0x00,0x00, +0xb0,0x00,0x05,0x00,0xc2,0x00,0x00,0x00,0xf9,0x01,0x00,0x00, +0x14,0x03,0x00,0x00,0xbc,0x00,0x00,0x00,0xf6,0x00,0x04,0x00, +0xf5,0x01,0x00,0x00,0xf4,0x01,0x00,0x00,0x01,0x00,0x00,0x00, +0xfa,0x00,0x04,0x00,0xf9,0x01,0x00,0x00,0xf4,0x01,0x00,0x00, +0xf5,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0xf4,0x01,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xff,0x01,0x00,0x00, +0x06,0x03,0x00,0x00,0xbc,0x00,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x01,0x02,0x00,0x00,0xff,0x01,0x00,0x00, +0x14,0x03,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x03,0x02,0x00,0x00,0x59,0x00,0x00,0x00,0xb9,0x00,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x06,0x02,0x00,0x00, +0x06,0x03,0x00,0x00,0x05,0x02,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x07,0x02,0x00,0x00,0x03,0x02,0x00,0x00, +0x06,0x02,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x09,0x02,0x00,0x00,0x68,0x00,0x00,0x00,0xbc,0x00,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x0a,0x02,0x00,0x00, +0x07,0x02,0x00,0x00,0x09,0x02,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x0c,0x02,0x00,0x00,0x0a,0x02,0x00,0x00, +0x14,0x03,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x0e,0x02,0x00,0x00,0x0c,0x02,0x00,0x00,0x0d,0x02,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x10,0x02,0x00,0x00, +0x0e,0x02,0x00,0x00,0x01,0x03,0x00,0x00,0x41,0x00,0x05,0x00, +0x0b,0x01,0x00,0x00,0x11,0x02,0x00,0x00,0x6b,0x01,0x00,0x00, +0x10,0x02,0x00,0x00,0x3d,0x00,0x04,0x00,0xf9,0x00,0x00,0x00, +0x12,0x02,0x00,0x00,0x11,0x02,0x00,0x00,0x41,0x00,0x05,0x00, +0xe4,0x01,0x00,0x00,0x13,0x02,0x00,0x00,0xfd,0x01,0x00,0x00, +0x01,0x02,0x00,0x00,0x3e,0x00,0x03,0x00,0x13,0x02,0x00,0x00, +0x12,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x15,0x02,0x00,0x00,0x14,0x03,0x00,0x00,0xd0,0x00,0x00,0x00, +0xf9,0x00,0x02,0x00,0xf3,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, +0xf5,0x01,0x00,0x00,0xf9,0x00,0x02,0x00,0xee,0x01,0x00,0x00, +0xf8,0x00,0x02,0x00,0xee,0x01,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x17,0x02,0x00,0x00,0x06,0x03,0x00,0x00, +0xd0,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0xeb,0x01,0x00,0x00, +0xf8,0x00,0x02,0x00,0xed,0x01,0x00,0x00,0xf9,0x00,0x02,0x00, +0x19,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x19,0x02,0x00,0x00, +0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0x07,0x03,0x00,0x00, +0x3e,0x00,0x00,0x00,0xed,0x01,0x00,0x00,0x5d,0x02,0x00,0x00, +0x1c,0x02,0x00,0x00,0xb0,0x00,0x05,0x00,0xc2,0x00,0x00,0x00, +0x1f,0x02,0x00,0x00,0x07,0x03,0x00,0x00,0xbf,0x00,0x00,0x00, +0xf6,0x00,0x04,0x00,0x1b,0x02,0x00,0x00,0x1c,0x02,0x00,0x00, +0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0x1f,0x02,0x00,0x00, +0x1a,0x02,0x00,0x00,0x1b,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, +0x1a,0x02,0x00,0x00,0xf9,0x00,0x02,0x00,0x21,0x02,0x00,0x00, +0xf8,0x00,0x02,0x00,0x21,0x02,0x00,0x00,0xf5,0x00,0x07,0x00, +0x06,0x00,0x00,0x00,0x0b,0x03,0x00,0x00,0x3e,0x00,0x00,0x00, +0x1a,0x02,0x00,0x00,0x5b,0x02,0x00,0x00,0x24,0x02,0x00,0x00, +0xb0,0x00,0x05,0x00,0xc2,0x00,0x00,0x00,0x27,0x02,0x00,0x00, +0x0b,0x03,0x00,0x00,0x60,0x00,0x00,0x00,0xf6,0x00,0x04,0x00, +0x23,0x02,0x00,0x00,0x24,0x02,0x00,0x00,0x01,0x00,0x00,0x00, +0xfa,0x00,0x04,0x00,0x27,0x02,0x00,0x00,0x22,0x02,0x00,0x00, +0x23,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x22,0x02,0x00,0x00, +0xf9,0x00,0x02,0x00,0x29,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, +0x29,0x02,0x00,0x00,0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00, +0x0d,0x03,0x00,0x00,0x3e,0x00,0x00,0x00,0x22,0x02,0x00,0x00, +0x59,0x02,0x00,0x00,0x2c,0x02,0x00,0x00,0xb0,0x00,0x05,0x00, +0xc2,0x00,0x00,0x00,0x2f,0x02,0x00,0x00,0x0d,0x03,0x00,0x00, +0xbc,0x00,0x00,0x00,0xf6,0x00,0x04,0x00,0x2b,0x02,0x00,0x00, +0x2c,0x02,0x00,0x00,0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, +0x2f,0x02,0x00,0x00,0x2a,0x02,0x00,0x00,0x2b,0x02,0x00,0x00, +0xf8,0x00,0x02,0x00,0x2a,0x02,0x00,0x00,0xf9,0x00,0x02,0x00, +0x31,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x31,0x02,0x00,0x00, +0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0x0f,0x03,0x00,0x00, +0x3e,0x00,0x00,0x00,0x2a,0x02,0x00,0x00,0x57,0x02,0x00,0x00, +0x32,0x02,0x00,0x00,0xb0,0x00,0x05,0x00,0xc2,0x00,0x00,0x00, +0x37,0x02,0x00,0x00,0x0f,0x03,0x00,0x00,0x62,0x00,0x00,0x00, +0xf6,0x00,0x04,0x00,0x33,0x02,0x00,0x00,0x32,0x02,0x00,0x00, +0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0x37,0x02,0x00,0x00, +0x32,0x02,0x00,0x00,0x33,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, +0x32,0x02,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x39,0x02,0x00,0x00,0x07,0x03,0x00,0x00,0xbc,0x00,0x00,0x00, 0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x3b,0x02,0x00,0x00, -0x57,0x03,0x00,0x00,0xc6,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, -0x0f,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x11,0x02,0x00,0x00, -0xf9,0x00,0x02,0x00,0x3d,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, -0x3d,0x02,0x00,0x00,0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00, -0x58,0x03,0x00,0x00,0x3e,0x00,0x00,0x00,0x11,0x02,0x00,0x00, -0x69,0x02,0x00,0x00,0x40,0x02,0x00,0x00,0xb0,0x00,0x05,0x00, -0xb8,0x00,0x00,0x00,0x43,0x02,0x00,0x00,0x58,0x03,0x00,0x00, -0xb5,0x00,0x00,0x00,0xf6,0x00,0x04,0x00,0x3f,0x02,0x00,0x00, -0x40,0x02,0x00,0x00,0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, -0x43,0x02,0x00,0x00,0x3e,0x02,0x00,0x00,0x3f,0x02,0x00,0x00, -0xf8,0x00,0x02,0x00,0x3e,0x02,0x00,0x00,0xf9,0x00,0x02,0x00, -0x45,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x45,0x02,0x00,0x00, -0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0x66,0x03,0x00,0x00, -0x3e,0x00,0x00,0x00,0x3e,0x02,0x00,0x00,0x67,0x02,0x00,0x00, -0x46,0x02,0x00,0x00,0xb0,0x00,0x05,0x00,0xb8,0x00,0x00,0x00, -0x4b,0x02,0x00,0x00,0x66,0x03,0x00,0x00,0xb2,0x00,0x00,0x00, -0xf6,0x00,0x04,0x00,0x47,0x02,0x00,0x00,0x46,0x02,0x00,0x00, -0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0x4b,0x02,0x00,0x00, -0x46,0x02,0x00,0x00,0x47,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, -0x46,0x02,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x51,0x02,0x00,0x00,0x58,0x03,0x00,0x00,0xb2,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x53,0x02,0x00,0x00, -0x51,0x02,0x00,0x00,0x66,0x03,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x55,0x02,0x00,0x00,0x59,0x00,0x00,0x00, -0xaf,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x58,0x02,0x00,0x00,0x58,0x03,0x00,0x00,0x57,0x02,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x59,0x02,0x00,0x00, -0x55,0x02,0x00,0x00,0x58,0x02,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x5b,0x02,0x00,0x00,0x68,0x00,0x00,0x00, -0xb2,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x5c,0x02,0x00,0x00,0x59,0x02,0x00,0x00,0x5b,0x02,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x5e,0x02,0x00,0x00, -0x5c,0x02,0x00,0x00,0x66,0x03,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x60,0x02,0x00,0x00,0x5e,0x02,0x00,0x00, -0x5f,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x62,0x02,0x00,0x00,0x60,0x02,0x00,0x00,0x53,0x03,0x00,0x00, -0x41,0x00,0x05,0x00,0x00,0x01,0x00,0x00,0x63,0x02,0x00,0x00, -0x88,0x01,0x00,0x00,0x62,0x02,0x00,0x00,0x3d,0x00,0x04,0x00, -0xe6,0x00,0x00,0x00,0x64,0x02,0x00,0x00,0x63,0x02,0x00,0x00, -0x41,0x00,0x05,0x00,0x36,0x02,0x00,0x00,0x65,0x02,0x00,0x00, -0x4f,0x02,0x00,0x00,0x53,0x02,0x00,0x00,0x3e,0x00,0x03,0x00, -0x65,0x02,0x00,0x00,0x64,0x02,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x67,0x02,0x00,0x00,0x66,0x03,0x00,0x00, -0xc6,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0x45,0x02,0x00,0x00, -0xf8,0x00,0x02,0x00,0x47,0x02,0x00,0x00,0xf9,0x00,0x02,0x00, -0x40,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x40,0x02,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x69,0x02,0x00,0x00, -0x58,0x03,0x00,0x00,0xc6,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, -0x3d,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x3f,0x02,0x00,0x00, -0xf9,0x00,0x02,0x00,0x6b,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, -0x6b,0x02,0x00,0x00,0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00, -0x59,0x03,0x00,0x00,0x3e,0x00,0x00,0x00,0x3f,0x02,0x00,0x00, -0xaf,0x02,0x00,0x00,0x6e,0x02,0x00,0x00,0xb0,0x00,0x05,0x00, -0xb8,0x00,0x00,0x00,0x71,0x02,0x00,0x00,0x59,0x03,0x00,0x00, -0xb5,0x00,0x00,0x00,0xf6,0x00,0x04,0x00,0x6d,0x02,0x00,0x00, -0x6e,0x02,0x00,0x00,0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, -0x71,0x02,0x00,0x00,0x6c,0x02,0x00,0x00,0x6d,0x02,0x00,0x00, -0xf8,0x00,0x02,0x00,0x6c,0x02,0x00,0x00,0xf9,0x00,0x02,0x00, -0x73,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x73,0x02,0x00,0x00, -0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0x5d,0x03,0x00,0x00, -0x3e,0x00,0x00,0x00,0x6c,0x02,0x00,0x00,0xad,0x02,0x00,0x00, -0x76,0x02,0x00,0x00,0xb0,0x00,0x05,0x00,0xb8,0x00,0x00,0x00, -0x79,0x02,0x00,0x00,0x5d,0x03,0x00,0x00,0x60,0x00,0x00,0x00, -0xf6,0x00,0x04,0x00,0x75,0x02,0x00,0x00,0x76,0x02,0x00,0x00, -0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0x79,0x02,0x00,0x00, -0x74,0x02,0x00,0x00,0x75,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, -0x74,0x02,0x00,0x00,0xf9,0x00,0x02,0x00,0x7b,0x02,0x00,0x00, -0xf8,0x00,0x02,0x00,0x7b,0x02,0x00,0x00,0xf5,0x00,0x07,0x00, -0x06,0x00,0x00,0x00,0x5f,0x03,0x00,0x00,0x3e,0x00,0x00,0x00, -0x74,0x02,0x00,0x00,0xab,0x02,0x00,0x00,0x7e,0x02,0x00,0x00, -0xb0,0x00,0x05,0x00,0xb8,0x00,0x00,0x00,0x81,0x02,0x00,0x00, -0x5f,0x03,0x00,0x00,0xb2,0x00,0x00,0x00,0xf6,0x00,0x04,0x00, -0x7d,0x02,0x00,0x00,0x7e,0x02,0x00,0x00,0x01,0x00,0x00,0x00, -0xfa,0x00,0x04,0x00,0x81,0x02,0x00,0x00,0x7c,0x02,0x00,0x00, -0x7d,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x7c,0x02,0x00,0x00, -0xf9,0x00,0x02,0x00,0x83,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, -0x83,0x02,0x00,0x00,0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00, -0x61,0x03,0x00,0x00,0x3e,0x00,0x00,0x00,0x7c,0x02,0x00,0x00, -0xa9,0x02,0x00,0x00,0x84,0x02,0x00,0x00,0xb0,0x00,0x05,0x00, -0xb8,0x00,0x00,0x00,0x89,0x02,0x00,0x00,0x61,0x03,0x00,0x00, -0x62,0x00,0x00,0x00,0xf6,0x00,0x04,0x00,0x85,0x02,0x00,0x00, -0x84,0x02,0x00,0x00,0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, -0x89,0x02,0x00,0x00,0x84,0x02,0x00,0x00,0x85,0x02,0x00,0x00, -0xf8,0x00,0x02,0x00,0x84,0x02,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x8b,0x02,0x00,0x00,0x59,0x03,0x00,0x00, -0xb2,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x8d,0x02,0x00,0x00,0x8b,0x02,0x00,0x00,0x5f,0x03,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x8f,0x02,0x00,0x00, -0x8d,0x02,0x00,0x00,0x8e,0x02,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x91,0x02,0x00,0x00,0x5d,0x03,0x00,0x00, -0x62,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x92,0x02,0x00,0x00,0x8f,0x02,0x00,0x00,0x91,0x02,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x94,0x02,0x00,0x00, -0x92,0x02,0x00,0x00,0x61,0x03,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x98,0x02,0x00,0x00,0x91,0x02,0x00,0x00, -0x61,0x03,0x00,0x00,0x41,0x00,0x05,0x00,0x36,0x02,0x00,0x00, -0x99,0x02,0x00,0x00,0x21,0x02,0x00,0x00,0x98,0x02,0x00,0x00, -0x3d,0x00,0x04,0x00,0xe6,0x00,0x00,0x00,0x9a,0x02,0x00,0x00, -0x99,0x02,0x00,0x00,0x73,0x00,0x04,0x00,0xba,0x00,0x00,0x00, -0x9b,0x02,0x00,0x00,0x9a,0x02,0x00,0x00,0x41,0x00,0x05,0x00, -0x36,0x02,0x00,0x00,0xa0,0x02,0x00,0x00,0x4f,0x02,0x00,0x00, -0x8d,0x02,0x00,0x00,0x3d,0x00,0x04,0x00,0xe6,0x00,0x00,0x00, -0xa1,0x02,0x00,0x00,0xa0,0x02,0x00,0x00,0x73,0x00,0x04,0x00, -0xba,0x00,0x00,0x00,0xa2,0x02,0x00,0x00,0xa1,0x02,0x00,0x00, -0x41,0x00,0x05,0x00,0xc3,0x00,0x00,0x00,0xa4,0x02,0x00,0x00, -0xc0,0x00,0x00,0x00,0x94,0x02,0x00,0x00,0x3d,0x00,0x04,0x00, -0xba,0x00,0x00,0x00,0xa5,0x02,0x00,0x00,0xa4,0x02,0x00,0x00, -0x0c,0x00,0x08,0x00,0xba,0x00,0x00,0x00,0xa6,0x02,0x00,0x00, -0x01,0x00,0x00,0x00,0x32,0x00,0x00,0x00,0x9b,0x02,0x00,0x00, -0xa2,0x02,0x00,0x00,0xa5,0x02,0x00,0x00,0x3e,0x00,0x03,0x00, -0xa4,0x02,0x00,0x00,0xa6,0x02,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xa9,0x02,0x00,0x00,0x61,0x03,0x00,0x00, -0xc6,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0x83,0x02,0x00,0x00, -0xf8,0x00,0x02,0x00,0x85,0x02,0x00,0x00,0xf9,0x00,0x02,0x00, -0x7e,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x7e,0x02,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xab,0x02,0x00,0x00, -0x5f,0x03,0x00,0x00,0xc6,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, -0x7b,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x7d,0x02,0x00,0x00, -0xf9,0x00,0x02,0x00,0x76,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, -0x76,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xad,0x02,0x00,0x00,0x5d,0x03,0x00,0x00,0xc6,0x00,0x00,0x00, -0xf9,0x00,0x02,0x00,0x73,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, -0x75,0x02,0x00,0x00,0xf9,0x00,0x02,0x00,0x6e,0x02,0x00,0x00, -0xf8,0x00,0x02,0x00,0x6e,0x02,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xaf,0x02,0x00,0x00,0x59,0x03,0x00,0x00, -0xc6,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0x6b,0x02,0x00,0x00, -0xf8,0x00,0x02,0x00,0x6d,0x02,0x00,0x00,0xf9,0x00,0x02,0x00, -0x0a,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x0a,0x02,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xb1,0x02,0x00,0x00, -0x53,0x03,0x00,0x00,0xc6,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, -0x07,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x09,0x02,0x00,0x00, -0xe0,0x00,0x04,0x00,0x0c,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0xff,0x01,0x00,0x00,0xf9,0x00,0x02,0x00,0xcd,0x00,0x00,0x00, -0xf8,0x00,0x02,0x00,0xcd,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xb3,0x02,0x00,0x00,0x39,0x03,0x00,0x00, -0x6c,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0xca,0x00,0x00,0x00, -0xf8,0x00,0x02,0x00,0xcc,0x00,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xb8,0x02,0x00,0x00,0x55,0x00,0x00,0x00, -0x53,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xb9,0x02,0x00,0x00,0x8c,0x00,0x00,0x00,0xb8,0x02,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xbe,0x02,0x00,0x00, -0x59,0x00,0x00,0x00,0xaf,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xbf,0x02,0x00,0x00,0x9e,0x00,0x00,0x00, -0xbe,0x02,0x00,0x00,0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00, -0xc4,0x02,0x00,0x00,0x12,0x00,0x00,0x00,0xc3,0x02,0x00,0x00, -0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0xc5,0x02,0x00,0x00, -0xc4,0x02,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xc6,0x02,0x00,0x00,0x0f,0x00,0x00,0x00,0xc5,0x02,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xca,0x02,0x00,0x00, -0x47,0x00,0x00,0x00,0xc5,0x02,0x00,0x00,0x41,0x00,0x05,0x00, -0x0d,0x00,0x00,0x00,0xcc,0x02,0x00,0x00,0xcb,0x02,0x00,0x00, -0x0c,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0xcd,0x02,0x00,0x00,0xcc,0x02,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xce,0x02,0x00,0x00,0xca,0x02,0x00,0x00, -0xcd,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xcf,0x02,0x00,0x00,0xc6,0x02,0x00,0x00,0xce,0x02,0x00,0x00, -0xf9,0x00,0x02,0x00,0xd1,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, -0xd1,0x02,0x00,0x00,0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00, -0x3a,0x03,0x00,0x00,0x3e,0x00,0x00,0x00,0xcc,0x00,0x00,0x00, -0x37,0x03,0x00,0x00,0xd4,0x02,0x00,0x00,0xb0,0x00,0x05,0x00, -0xb8,0x00,0x00,0x00,0xd7,0x02,0x00,0x00,0x3a,0x03,0x00,0x00, -0xb5,0x00,0x00,0x00,0xf6,0x00,0x04,0x00,0xd3,0x02,0x00,0x00, -0xd4,0x02,0x00,0x00,0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, -0xd7,0x02,0x00,0x00,0xd2,0x02,0x00,0x00,0xd3,0x02,0x00,0x00, -0xf8,0x00,0x02,0x00,0xd2,0x02,0x00,0x00,0xf9,0x00,0x02,0x00, -0xd9,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0xd9,0x02,0x00,0x00, -0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0x3b,0x03,0x00,0x00, -0x3e,0x00,0x00,0x00,0xd2,0x02,0x00,0x00,0x35,0x03,0x00,0x00, -0xdc,0x02,0x00,0x00,0xb0,0x00,0x05,0x00,0xb8,0x00,0x00,0x00, -0xdf,0x02,0x00,0x00,0x3b,0x03,0x00,0x00,0x60,0x00,0x00,0x00, -0xf6,0x00,0x04,0x00,0xdb,0x02,0x00,0x00,0xdc,0x02,0x00,0x00, -0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0xdf,0x02,0x00,0x00, -0xda,0x02,0x00,0x00,0xdb,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, -0xda,0x02,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xe3,0x02,0x00,0x00,0x3b,0x03,0x00,0x00,0x61,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xe4,0x02,0x00,0x00, -0xb9,0x02,0x00,0x00,0xe3,0x02,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xe6,0x02,0x00,0x00,0x64,0x00,0x00,0x00, -0x62,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xe7,0x02,0x00,0x00,0xe4,0x02,0x00,0x00,0xe6,0x02,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xeb,0x02,0x00,0x00, -0x3a,0x03,0x00,0x00,0x57,0x02,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xec,0x02,0x00,0x00,0xbf,0x02,0x00,0x00, -0xeb,0x02,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xee,0x02,0x00,0x00,0x68,0x00,0x00,0x00,0xb2,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xef,0x02,0x00,0x00, -0xec,0x02,0x00,0x00,0xee,0x02,0x00,0x00,0xf9,0x00,0x02,0x00, -0xf1,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0xf1,0x02,0x00,0x00, -0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0x3d,0x03,0x00,0x00, -0x3e,0x00,0x00,0x00,0xda,0x02,0x00,0x00,0x33,0x03,0x00,0x00, -0xf4,0x02,0x00,0x00,0xb0,0x00,0x05,0x00,0xb8,0x00,0x00,0x00, -0xf7,0x02,0x00,0x00,0x3d,0x03,0x00,0x00,0xb2,0x00,0x00,0x00, -0xf6,0x00,0x04,0x00,0xf3,0x02,0x00,0x00,0xf4,0x02,0x00,0x00, -0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0xf7,0x02,0x00,0x00, -0xf2,0x02,0x00,0x00,0xf3,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, -0xf2,0x02,0x00,0x00,0xf9,0x00,0x02,0x00,0xf9,0x02,0x00,0x00, -0xf8,0x00,0x02,0x00,0xf9,0x02,0x00,0x00,0xf5,0x00,0x07,0x00, -0x06,0x00,0x00,0x00,0x3f,0x03,0x00,0x00,0x3e,0x00,0x00,0x00, -0xf2,0x02,0x00,0x00,0x31,0x03,0x00,0x00,0xfc,0x02,0x00,0x00, -0xb0,0x00,0x05,0x00,0xb8,0x00,0x00,0x00,0xff,0x02,0x00,0x00, -0x3f,0x03,0x00,0x00,0x62,0x00,0x00,0x00,0xf6,0x00,0x04,0x00, -0xfb,0x02,0x00,0x00,0xfc,0x02,0x00,0x00,0x01,0x00,0x00,0x00, -0xfa,0x00,0x04,0x00,0xff,0x02,0x00,0x00,0xfa,0x02,0x00,0x00, -0xfb,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0xfa,0x02,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x02,0x03,0x00,0x00, -0xe7,0x02,0x00,0x00,0x3f,0x03,0x00,0x00,0xb0,0x00,0x05,0x00, -0xb8,0x00,0x00,0x00,0x05,0x03,0x00,0x00,0x02,0x03,0x00,0x00, -0x36,0x00,0x00,0x00,0xf7,0x00,0x03,0x00,0x07,0x03,0x00,0x00, -0x00,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0x05,0x03,0x00,0x00, -0x06,0x03,0x00,0x00,0x07,0x03,0x00,0x00,0xf8,0x00,0x02,0x00, -0x06,0x03,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x0a,0x03,0x00,0x00,0xef,0x02,0x00,0x00,0x3d,0x03,0x00,0x00, -0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00,0x0b,0x03,0x00,0x00, -0x12,0x00,0x00,0x00,0xc6,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x0c,0x03,0x00,0x00,0x0b,0x03,0x00,0x00, -0xb0,0x00,0x05,0x00,0xb8,0x00,0x00,0x00,0x0d,0x03,0x00,0x00, -0x0a,0x03,0x00,0x00,0x0c,0x03,0x00,0x00,0xf9,0x00,0x02,0x00, -0x07,0x03,0x00,0x00,0xf8,0x00,0x02,0x00,0x07,0x03,0x00,0x00, -0xf5,0x00,0x07,0x00,0xb8,0x00,0x00,0x00,0x0e,0x03,0x00,0x00, -0x05,0x03,0x00,0x00,0xfa,0x02,0x00,0x00,0x0d,0x03,0x00,0x00, -0x06,0x03,0x00,0x00,0xf7,0x00,0x03,0x00,0x10,0x03,0x00,0x00, -0x00,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0x0e,0x03,0x00,0x00, -0x0f,0x03,0x00,0x00,0x10,0x03,0x00,0x00,0xf8,0x00,0x02,0x00, +0x39,0x02,0x00,0x00,0x0d,0x03,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x3d,0x02,0x00,0x00,0x3b,0x02,0x00,0x00, +0x3c,0x02,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x3f,0x02,0x00,0x00,0x0b,0x03,0x00,0x00,0x62,0x00,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x40,0x02,0x00,0x00, +0x3d,0x02,0x00,0x00,0x3f,0x02,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x42,0x02,0x00,0x00,0x40,0x02,0x00,0x00, 0x0f,0x03,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x18,0x03,0x00,0x00,0xef,0x02,0x00,0x00,0x3d,0x03,0x00,0x00, -0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00,0x1a,0x03,0x00,0x00, -0x12,0x00,0x00,0x00,0x19,0x03,0x00,0x00,0x3d,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x1b,0x03,0x00,0x00,0x1a,0x03,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x1c,0x03,0x00,0x00, -0x18,0x03,0x00,0x00,0x1b,0x03,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x1d,0x03,0x00,0x00,0xcf,0x02,0x00,0x00, -0x1c,0x03,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x1f,0x03,0x00,0x00,0x1d,0x03,0x00,0x00,0xe7,0x02,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x21,0x03,0x00,0x00, -0x1f,0x03,0x00,0x00,0x3f,0x03,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x23,0x03,0x00,0x00,0x3a,0x03,0x00,0x00, -0xb2,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x25,0x03,0x00,0x00,0x23,0x03,0x00,0x00,0x3d,0x03,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x27,0x03,0x00,0x00, -0x25,0x03,0x00,0x00,0x26,0x03,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x29,0x03,0x00,0x00,0x3b,0x03,0x00,0x00, -0x62,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x2a,0x03,0x00,0x00,0x27,0x03,0x00,0x00,0x29,0x03,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x2c,0x03,0x00,0x00, -0x2a,0x03,0x00,0x00,0x3f,0x03,0x00,0x00,0x41,0x00,0x05,0x00, -0xc3,0x00,0x00,0x00,0x2d,0x03,0x00,0x00,0xc0,0x00,0x00,0x00, -0x2c,0x03,0x00,0x00,0x3d,0x00,0x04,0x00,0xba,0x00,0x00,0x00, -0x2e,0x03,0x00,0x00,0x2d,0x03,0x00,0x00,0x41,0x00,0x06,0x00, -0xfc,0x00,0x00,0x00,0x2f,0x03,0x00,0x00,0x14,0x03,0x00,0x00, -0x34,0x00,0x00,0x00,0x21,0x03,0x00,0x00,0x3e,0x00,0x03,0x00, -0x2f,0x03,0x00,0x00,0x2e,0x03,0x00,0x00,0xf9,0x00,0x02,0x00, -0x10,0x03,0x00,0x00,0xf8,0x00,0x02,0x00,0x10,0x03,0x00,0x00, -0xf9,0x00,0x02,0x00,0xfc,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, -0xfc,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x31,0x03,0x00,0x00,0x3f,0x03,0x00,0x00,0xc6,0x00,0x00,0x00, -0xf9,0x00,0x02,0x00,0xf9,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, -0xfb,0x02,0x00,0x00,0xf9,0x00,0x02,0x00,0xf4,0x02,0x00,0x00, -0xf8,0x00,0x02,0x00,0xf4,0x02,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x33,0x03,0x00,0x00,0x3d,0x03,0x00,0x00, -0xc6,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0xf1,0x02,0x00,0x00, -0xf8,0x00,0x02,0x00,0xf3,0x02,0x00,0x00,0xf9,0x00,0x02,0x00, -0xdc,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0xdc,0x02,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x35,0x03,0x00,0x00, -0x3b,0x03,0x00,0x00,0xc6,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, -0xd9,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0xdb,0x02,0x00,0x00, -0xf9,0x00,0x02,0x00,0xd4,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, -0xd4,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x37,0x03,0x00,0x00,0x3a,0x03,0x00,0x00,0xc6,0x00,0x00,0x00, -0xf9,0x00,0x02,0x00,0xd1,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, -0xd3,0x02,0x00,0x00,0xfd,0x00,0x01,0x00,0x38,0x00,0x01,0x00, - +0x46,0x02,0x00,0x00,0x3f,0x02,0x00,0x00,0x0f,0x03,0x00,0x00, +0x41,0x00,0x05,0x00,0xe4,0x01,0x00,0x00,0x47,0x02,0x00,0x00, +0xcf,0x01,0x00,0x00,0x46,0x02,0x00,0x00,0x3d,0x00,0x04,0x00, +0xf9,0x00,0x00,0x00,0x48,0x02,0x00,0x00,0x47,0x02,0x00,0x00, +0x73,0x00,0x04,0x00,0xc4,0x00,0x00,0x00,0x49,0x02,0x00,0x00, +0x48,0x02,0x00,0x00,0x41,0x00,0x05,0x00,0xe4,0x01,0x00,0x00, +0x4e,0x02,0x00,0x00,0xfd,0x01,0x00,0x00,0x3b,0x02,0x00,0x00, +0x3d,0x00,0x04,0x00,0xf9,0x00,0x00,0x00,0x4f,0x02,0x00,0x00, +0x4e,0x02,0x00,0x00,0x73,0x00,0x04,0x00,0xc4,0x00,0x00,0x00, +0x50,0x02,0x00,0x00,0x4f,0x02,0x00,0x00,0x41,0x00,0x05,0x00, +0xcd,0x00,0x00,0x00,0x52,0x02,0x00,0x00,0xca,0x00,0x00,0x00, +0x42,0x02,0x00,0x00,0x3d,0x00,0x04,0x00,0xc4,0x00,0x00,0x00, +0x53,0x02,0x00,0x00,0x52,0x02,0x00,0x00,0x0c,0x00,0x08,0x00, +0xc4,0x00,0x00,0x00,0x54,0x02,0x00,0x00,0x01,0x00,0x00,0x00, +0x32,0x00,0x00,0x00,0x49,0x02,0x00,0x00,0x50,0x02,0x00,0x00, +0x53,0x02,0x00,0x00,0x3e,0x00,0x03,0x00,0x52,0x02,0x00,0x00, +0x54,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x57,0x02,0x00,0x00,0x0f,0x03,0x00,0x00,0xd0,0x00,0x00,0x00, +0xf9,0x00,0x02,0x00,0x31,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, +0x33,0x02,0x00,0x00,0xf9,0x00,0x02,0x00,0x2c,0x02,0x00,0x00, +0xf8,0x00,0x02,0x00,0x2c,0x02,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x59,0x02,0x00,0x00,0x0d,0x03,0x00,0x00, +0xd0,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0x29,0x02,0x00,0x00, +0xf8,0x00,0x02,0x00,0x2b,0x02,0x00,0x00,0xf9,0x00,0x02,0x00, +0x24,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x24,0x02,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x5b,0x02,0x00,0x00, +0x0b,0x03,0x00,0x00,0xd0,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, +0x21,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x23,0x02,0x00,0x00, +0xf9,0x00,0x02,0x00,0x1c,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, +0x1c,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x5d,0x02,0x00,0x00,0x07,0x03,0x00,0x00,0xd0,0x00,0x00,0x00, +0xf9,0x00,0x02,0x00,0x19,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, +0x1b,0x02,0x00,0x00,0xf9,0x00,0x02,0x00,0xb8,0x01,0x00,0x00, +0xf8,0x00,0x02,0x00,0xb8,0x01,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x5f,0x02,0x00,0x00,0x01,0x03,0x00,0x00, +0xd0,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0xb5,0x01,0x00,0x00, +0xf8,0x00,0x02,0x00,0xb7,0x01,0x00,0x00,0xe0,0x00,0x04,0x00, +0x0c,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0xad,0x01,0x00,0x00, +0xf9,0x00,0x02,0x00,0xd7,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, +0xd7,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x61,0x02,0x00,0x00,0xe7,0x02,0x00,0x00,0x6c,0x00,0x00,0x00, +0xf9,0x00,0x02,0x00,0xd4,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, +0xd6,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x66,0x02,0x00,0x00,0x55,0x00,0x00,0x00,0x53,0x00,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x67,0x02,0x00,0x00, +0x96,0x00,0x00,0x00,0x66,0x02,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x6c,0x02,0x00,0x00,0x59,0x00,0x00,0x00, +0xb9,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x6d,0x02,0x00,0x00,0xa8,0x00,0x00,0x00,0x6c,0x02,0x00,0x00, +0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00,0x72,0x02,0x00,0x00, +0x12,0x00,0x00,0x00,0x71,0x02,0x00,0x00,0x3d,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x73,0x02,0x00,0x00,0x72,0x02,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x74,0x02,0x00,0x00, +0x0f,0x00,0x00,0x00,0x73,0x02,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x78,0x02,0x00,0x00,0x47,0x00,0x00,0x00, +0x73,0x02,0x00,0x00,0x41,0x00,0x05,0x00,0x0d,0x00,0x00,0x00, +0x7a,0x02,0x00,0x00,0x79,0x02,0x00,0x00,0x0c,0x00,0x00,0x00, +0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x7b,0x02,0x00,0x00, +0x7a,0x02,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x7c,0x02,0x00,0x00,0x78,0x02,0x00,0x00,0x7b,0x02,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x7d,0x02,0x00,0x00, +0x74,0x02,0x00,0x00,0x7c,0x02,0x00,0x00,0xf9,0x00,0x02,0x00, +0x7f,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x7f,0x02,0x00,0x00, +0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0xe8,0x02,0x00,0x00, +0x3e,0x00,0x00,0x00,0xd6,0x00,0x00,0x00,0xe5,0x02,0x00,0x00, +0x82,0x02,0x00,0x00,0xb0,0x00,0x05,0x00,0xc2,0x00,0x00,0x00, +0x85,0x02,0x00,0x00,0xe8,0x02,0x00,0x00,0xbf,0x00,0x00,0x00, +0xf6,0x00,0x04,0x00,0x81,0x02,0x00,0x00,0x82,0x02,0x00,0x00, +0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0x85,0x02,0x00,0x00, +0x80,0x02,0x00,0x00,0x81,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, +0x80,0x02,0x00,0x00,0xf9,0x00,0x02,0x00,0x87,0x02,0x00,0x00, +0xf8,0x00,0x02,0x00,0x87,0x02,0x00,0x00,0xf5,0x00,0x07,0x00, +0x06,0x00,0x00,0x00,0xe9,0x02,0x00,0x00,0x3e,0x00,0x00,0x00, +0x80,0x02,0x00,0x00,0xe3,0x02,0x00,0x00,0x8a,0x02,0x00,0x00, +0xb0,0x00,0x05,0x00,0xc2,0x00,0x00,0x00,0x8d,0x02,0x00,0x00, +0xe9,0x02,0x00,0x00,0x60,0x00,0x00,0x00,0xf6,0x00,0x04,0x00, +0x89,0x02,0x00,0x00,0x8a,0x02,0x00,0x00,0x01,0x00,0x00,0x00, +0xfa,0x00,0x04,0x00,0x8d,0x02,0x00,0x00,0x88,0x02,0x00,0x00, +0x89,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x88,0x02,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x91,0x02,0x00,0x00, +0xe9,0x02,0x00,0x00,0x61,0x00,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x92,0x02,0x00,0x00,0x67,0x02,0x00,0x00, +0x91,0x02,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x94,0x02,0x00,0x00,0x64,0x00,0x00,0x00,0x62,0x00,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x95,0x02,0x00,0x00, +0x92,0x02,0x00,0x00,0x94,0x02,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x99,0x02,0x00,0x00,0xe8,0x02,0x00,0x00, +0x05,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x9a,0x02,0x00,0x00,0x6d,0x02,0x00,0x00,0x99,0x02,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x9c,0x02,0x00,0x00, +0x68,0x00,0x00,0x00,0xbc,0x00,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x9d,0x02,0x00,0x00,0x9a,0x02,0x00,0x00, +0x9c,0x02,0x00,0x00,0xf9,0x00,0x02,0x00,0x9f,0x02,0x00,0x00, +0xf8,0x00,0x02,0x00,0x9f,0x02,0x00,0x00,0xf5,0x00,0x07,0x00, +0x06,0x00,0x00,0x00,0xeb,0x02,0x00,0x00,0x3e,0x00,0x00,0x00, +0x88,0x02,0x00,0x00,0xe1,0x02,0x00,0x00,0xa2,0x02,0x00,0x00, +0xb0,0x00,0x05,0x00,0xc2,0x00,0x00,0x00,0xa5,0x02,0x00,0x00, +0xeb,0x02,0x00,0x00,0xbc,0x00,0x00,0x00,0xf6,0x00,0x04,0x00, +0xa1,0x02,0x00,0x00,0xa2,0x02,0x00,0x00,0x01,0x00,0x00,0x00, +0xfa,0x00,0x04,0x00,0xa5,0x02,0x00,0x00,0xa0,0x02,0x00,0x00, +0xa1,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0xa0,0x02,0x00,0x00, +0xf9,0x00,0x02,0x00,0xa7,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, +0xa7,0x02,0x00,0x00,0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00, +0xed,0x02,0x00,0x00,0x3e,0x00,0x00,0x00,0xa0,0x02,0x00,0x00, +0xdf,0x02,0x00,0x00,0xaa,0x02,0x00,0x00,0xb0,0x00,0x05,0x00, +0xc2,0x00,0x00,0x00,0xad,0x02,0x00,0x00,0xed,0x02,0x00,0x00, +0x62,0x00,0x00,0x00,0xf6,0x00,0x04,0x00,0xa9,0x02,0x00,0x00, +0xaa,0x02,0x00,0x00,0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, +0xad,0x02,0x00,0x00,0xa8,0x02,0x00,0x00,0xa9,0x02,0x00,0x00, +0xf8,0x00,0x02,0x00,0xa8,0x02,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xb0,0x02,0x00,0x00,0x95,0x02,0x00,0x00, +0xed,0x02,0x00,0x00,0xb0,0x00,0x05,0x00,0xc2,0x00,0x00,0x00, +0xb3,0x02,0x00,0x00,0xb0,0x02,0x00,0x00,0x36,0x00,0x00,0x00, +0xf7,0x00,0x03,0x00,0xb5,0x02,0x00,0x00,0x00,0x00,0x00,0x00, +0xfa,0x00,0x04,0x00,0xb3,0x02,0x00,0x00,0xb4,0x02,0x00,0x00, +0xb5,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0xb4,0x02,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xb8,0x02,0x00,0x00, +0x9d,0x02,0x00,0x00,0xeb,0x02,0x00,0x00,0x41,0x00,0x05,0x00, +0x15,0x00,0x00,0x00,0xb9,0x02,0x00,0x00,0x12,0x00,0x00,0x00, +0xd0,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0xba,0x02,0x00,0x00,0xb9,0x02,0x00,0x00,0xb0,0x00,0x05,0x00, +0xc2,0x00,0x00,0x00,0xbb,0x02,0x00,0x00,0xb8,0x02,0x00,0x00, +0xba,0x02,0x00,0x00,0xf9,0x00,0x02,0x00,0xb5,0x02,0x00,0x00, +0xf8,0x00,0x02,0x00,0xb5,0x02,0x00,0x00,0xf5,0x00,0x07,0x00, +0xc2,0x00,0x00,0x00,0xbc,0x02,0x00,0x00,0xb3,0x02,0x00,0x00, +0xa8,0x02,0x00,0x00,0xbb,0x02,0x00,0x00,0xb4,0x02,0x00,0x00, +0xf7,0x00,0x03,0x00,0xbe,0x02,0x00,0x00,0x00,0x00,0x00,0x00, +0xfa,0x00,0x04,0x00,0xbc,0x02,0x00,0x00,0xbd,0x02,0x00,0x00, +0xbe,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0xbd,0x02,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xc6,0x02,0x00,0x00, +0x9d,0x02,0x00,0x00,0xeb,0x02,0x00,0x00,0x41,0x00,0x05,0x00, +0x15,0x00,0x00,0x00,0xc8,0x02,0x00,0x00,0x12,0x00,0x00,0x00, +0xc7,0x02,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0xc9,0x02,0x00,0x00,0xc8,0x02,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xca,0x02,0x00,0x00,0xc6,0x02,0x00,0x00, +0xc9,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xcb,0x02,0x00,0x00,0x7d,0x02,0x00,0x00,0xca,0x02,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xcd,0x02,0x00,0x00, +0xcb,0x02,0x00,0x00,0x95,0x02,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xcf,0x02,0x00,0x00,0xcd,0x02,0x00,0x00, +0xed,0x02,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xd1,0x02,0x00,0x00,0xe8,0x02,0x00,0x00,0xbc,0x00,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xd3,0x02,0x00,0x00, +0xd1,0x02,0x00,0x00,0xeb,0x02,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xd5,0x02,0x00,0x00,0xd3,0x02,0x00,0x00, +0xd4,0x02,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xd7,0x02,0x00,0x00,0xe9,0x02,0x00,0x00,0x62,0x00,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xd8,0x02,0x00,0x00, +0xd5,0x02,0x00,0x00,0xd7,0x02,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xda,0x02,0x00,0x00,0xd8,0x02,0x00,0x00, +0xed,0x02,0x00,0x00,0x41,0x00,0x05,0x00,0xcd,0x00,0x00,0x00, +0xdb,0x02,0x00,0x00,0xca,0x00,0x00,0x00,0xda,0x02,0x00,0x00, +0x3d,0x00,0x04,0x00,0xc4,0x00,0x00,0x00,0xdc,0x02,0x00,0x00, +0xdb,0x02,0x00,0x00,0x41,0x00,0x06,0x00,0x07,0x01,0x00,0x00, +0xdd,0x02,0x00,0x00,0xc2,0x02,0x00,0x00,0x34,0x00,0x00,0x00, +0xcf,0x02,0x00,0x00,0x3e,0x00,0x03,0x00,0xdd,0x02,0x00,0x00, +0xdc,0x02,0x00,0x00,0xf9,0x00,0x02,0x00,0xbe,0x02,0x00,0x00, +0xf8,0x00,0x02,0x00,0xbe,0x02,0x00,0x00,0xf9,0x00,0x02,0x00, +0xaa,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0xaa,0x02,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xdf,0x02,0x00,0x00, +0xed,0x02,0x00,0x00,0xd0,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, +0xa7,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0xa9,0x02,0x00,0x00, +0xf9,0x00,0x02,0x00,0xa2,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, +0xa2,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xe1,0x02,0x00,0x00,0xeb,0x02,0x00,0x00,0xd0,0x00,0x00,0x00, +0xf9,0x00,0x02,0x00,0x9f,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, +0xa1,0x02,0x00,0x00,0xf9,0x00,0x02,0x00,0x8a,0x02,0x00,0x00, +0xf8,0x00,0x02,0x00,0x8a,0x02,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xe3,0x02,0x00,0x00,0xe9,0x02,0x00,0x00, +0xd0,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0x87,0x02,0x00,0x00, +0xf8,0x00,0x02,0x00,0x89,0x02,0x00,0x00,0xf9,0x00,0x02,0x00, +0x82,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x82,0x02,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xe5,0x02,0x00,0x00, +0xe8,0x02,0x00,0x00,0xd0,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, +0x7f,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x81,0x02,0x00,0x00, +0xfd,0x00,0x01,0x00,0x38,0x00,0x01,0x00, }; -const uint64_t matmul_f32_aligned_l_len = 12168; +const uint64_t matmul_f32_aligned_len = 11432; -unsigned char matmul_f32_aligned_l_fp32_data[] = { +unsigned char matmul_f32_aligned_fp32_data[] = { 0x03,0x02,0x23,0x07,0x00,0x05,0x01,0x00,0x0b,0x00,0x0d,0x00, -0xe9,0x02,0x00,0x00,0x00,0x00,0x00,0x00,0x11,0x00,0x02,0x00, +0xcf,0x02,0x00,0x00,0x00,0x00,0x00,0x00,0x11,0x00,0x02,0x00, 0x01,0x00,0x00,0x00,0x0b,0x00,0x06,0x00,0x01,0x00,0x00,0x00, 0x47,0x4c,0x53,0x4c,0x2e,0x73,0x74,0x64,0x2e,0x34,0x35,0x30, 0x00,0x00,0x00,0x00,0x0e,0x00,0x03,0x00,0x00,0x00,0x00,0x00, 0x01,0x00,0x00,0x00,0x0f,0x00,0x0f,0x00,0x05,0x00,0x00,0x00, 0x04,0x00,0x00,0x00,0x6d,0x61,0x69,0x6e,0x00,0x00,0x00,0x00, 0x0b,0x00,0x00,0x00,0x12,0x00,0x00,0x00,0x3d,0x00,0x00,0x00, -0x4c,0x00,0x00,0x00,0xea,0x00,0x00,0x00,0xf8,0x00,0x00,0x00, -0x46,0x01,0x00,0x00,0x53,0x01,0x00,0x00,0x4a,0x02,0x00,0x00, -0x93,0x02,0x00,0x00,0x10,0x00,0x06,0x00,0x04,0x00,0x00,0x00, +0x4c,0x00,0x00,0x00,0xfd,0x00,0x00,0x00,0x03,0x01,0x00,0x00, +0x45,0x01,0x00,0x00,0x4b,0x01,0x00,0x00,0x30,0x02,0x00,0x00, +0x79,0x02,0x00,0x00,0x10,0x00,0x06,0x00,0x04,0x00,0x00,0x00, 0x11,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x01,0x00,0x00,0x00, 0x01,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x0b,0x00,0x00,0x00, 0x0b,0x00,0x00,0x00,0x1c,0x00,0x00,0x00,0x48,0x00,0x05,0x00, @@ -33231,28 +18251,18156 @@ unsigned char matmul_f32_aligned_l_fp32_data[] = { 0x47,0x00,0x04,0x00,0x62,0x00,0x00,0x00,0x01,0x00,0x00,0x00, 0x07,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x6c,0x00,0x00,0x00, 0x01,0x00,0x00,0x00,0x03,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x9d,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x02,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0xaf,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x05,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0xb2,0x00,0x00,0x00, +0xa7,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x02,0x00,0x00,0x00, +0x47,0x00,0x04,0x00,0xb9,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0x05,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0xbc,0x00,0x00,0x00, 0x01,0x00,0x00,0x00,0x08,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0xf5,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x10,0x00,0x00,0x00, -0x48,0x00,0x04,0x00,0xf6,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x18,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0xf6,0x00,0x00,0x00, +0x00,0x01,0x00,0x00,0x06,0x00,0x00,0x00,0x10,0x00,0x00,0x00, +0x48,0x00,0x04,0x00,0x01,0x01,0x00,0x00,0x00,0x00,0x00,0x00, +0x18,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x01,0x01,0x00,0x00, 0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x47,0x00,0x03,0x00,0xf6,0x00,0x00,0x00,0x02,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0xf8,0x00,0x00,0x00,0x22,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0xf8,0x00,0x00,0x00, +0x47,0x00,0x03,0x00,0x01,0x01,0x00,0x00,0x02,0x00,0x00,0x00, +0x47,0x00,0x04,0x00,0x03,0x01,0x00,0x00,0x22,0x00,0x00,0x00, +0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x03,0x01,0x00,0x00, 0x21,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x27,0x01,0x00,0x00,0x01,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x28,0x01,0x00,0x00,0x0b,0x00,0x00,0x00, -0x19,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x50,0x01,0x00,0x00, +0x1d,0x01,0x00,0x00,0x01,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0x47,0x00,0x04,0x00,0x1e,0x01,0x00,0x00,0x0b,0x00,0x00,0x00, +0x19,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x48,0x01,0x00,0x00, 0x06,0x00,0x00,0x00,0x10,0x00,0x00,0x00,0x48,0x00,0x04,0x00, -0x51,0x01,0x00,0x00,0x00,0x00,0x00,0x00,0x18,0x00,0x00,0x00, -0x48,0x00,0x05,0x00,0x51,0x01,0x00,0x00,0x00,0x00,0x00,0x00, +0x49,0x01,0x00,0x00,0x00,0x00,0x00,0x00,0x18,0x00,0x00,0x00, +0x48,0x00,0x05,0x00,0x49,0x01,0x00,0x00,0x00,0x00,0x00,0x00, 0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x03,0x00, -0x51,0x01,0x00,0x00,0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x53,0x01,0x00,0x00,0x22,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x53,0x01,0x00,0x00,0x21,0x00,0x00,0x00, +0x49,0x01,0x00,0x00,0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00, +0x4b,0x01,0x00,0x00,0x22,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0x47,0x00,0x04,0x00,0x4b,0x01,0x00,0x00,0x21,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x30,0x02,0x00,0x00, +0x0b,0x00,0x00,0x00,0x18,0x00,0x00,0x00,0x47,0x00,0x04,0x00, +0x76,0x02,0x00,0x00,0x06,0x00,0x00,0x00,0x04,0x00,0x00,0x00, +0x48,0x00,0x04,0x00,0x77,0x02,0x00,0x00,0x00,0x00,0x00,0x00, +0x19,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x77,0x02,0x00,0x00, +0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0x47,0x00,0x03,0x00,0x77,0x02,0x00,0x00,0x02,0x00,0x00,0x00, +0x47,0x00,0x04,0x00,0x79,0x02,0x00,0x00,0x22,0x00,0x00,0x00, +0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x79,0x02,0x00,0x00, +0x21,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x13,0x00,0x02,0x00, +0x02,0x00,0x00,0x00,0x21,0x00,0x03,0x00,0x03,0x00,0x00,0x00, +0x02,0x00,0x00,0x00,0x15,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x20,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x17,0x00,0x04,0x00, +0x09,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x03,0x00,0x00,0x00, +0x20,0x00,0x04,0x00,0x0a,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0x09,0x00,0x00,0x00,0x3b,0x00,0x04,0x00,0x0a,0x00,0x00,0x00, +0x0b,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x02,0x00,0x00,0x00, +0x20,0x00,0x04,0x00,0x0d,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0x06,0x00,0x00,0x00,0x1e,0x00,0x10,0x00,0x10,0x00,0x00,0x00, +0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00, +0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00, +0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00, +0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00, +0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x20,0x00,0x04,0x00, +0x11,0x00,0x00,0x00,0x09,0x00,0x00,0x00,0x10,0x00,0x00,0x00, +0x3b,0x00,0x04,0x00,0x11,0x00,0x00,0x00,0x12,0x00,0x00,0x00, +0x09,0x00,0x00,0x00,0x15,0x00,0x04,0x00,0x13,0x00,0x00,0x00, +0x20,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x13,0x00,0x00,0x00,0x14,0x00,0x00,0x00,0x08,0x00,0x00,0x00, +0x20,0x00,0x04,0x00,0x15,0x00,0x00,0x00,0x09,0x00,0x00,0x00, +0x06,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00, +0x21,0x00,0x00,0x00,0x0a,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x13,0x00,0x00,0x00,0x27,0x00,0x00,0x00,0x09,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00,0x2d,0x00,0x00,0x00, +0x07,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00, +0x34,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x32,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x37,0x00,0x00,0x00,0x40,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x39,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0x3b,0x00,0x04,0x00,0x0a,0x00,0x00,0x00, +0x3d,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x3e,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0x3b,0x00,0x04,0x00,0x0a,0x00,0x00,0x00,0x4c,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0x32,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x4f,0x00,0x00,0x00,0x20,0x00,0x00,0x00,0x32,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x53,0x00,0x00,0x00,0x20,0x00,0x00,0x00, +0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x54,0x00,0x00,0x00, +0x86,0x00,0x00,0x00,0x37,0x00,0x00,0x00,0x53,0x00,0x00,0x00, +0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x58,0x00,0x00,0x00, +0x86,0x00,0x00,0x00,0x37,0x00,0x00,0x00,0x53,0x00,0x00,0x00, +0x32,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x60,0x00,0x00,0x00, +0x02,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, +0x61,0x00,0x00,0x00,0x86,0x00,0x00,0x00,0x53,0x00,0x00,0x00, +0x60,0x00,0x00,0x00,0x32,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x62,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x34,0x00,0x06,0x00, +0x06,0x00,0x00,0x00,0x63,0x00,0x00,0x00,0x86,0x00,0x00,0x00, +0x61,0x00,0x00,0x00,0x62,0x00,0x00,0x00,0x34,0x00,0x06,0x00, +0x06,0x00,0x00,0x00,0x67,0x00,0x00,0x00,0x86,0x00,0x00,0x00, +0x61,0x00,0x00,0x00,0x62,0x00,0x00,0x00,0x32,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x6c,0x00,0x00,0x00,0x10,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x6d,0x00,0x00,0x00, +0x04,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, +0x6e,0x00,0x00,0x00,0x86,0x00,0x00,0x00,0x6c,0x00,0x00,0x00, +0x6d,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, +0x73,0x00,0x00,0x00,0x86,0x00,0x00,0x00,0x6c,0x00,0x00,0x00, +0x6d,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, +0x78,0x00,0x00,0x00,0x86,0x00,0x00,0x00,0x6c,0x00,0x00,0x00, +0x6d,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, +0x7d,0x00,0x00,0x00,0x86,0x00,0x00,0x00,0x6c,0x00,0x00,0x00, +0x6d,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00, +0x81,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x13,0x00,0x00,0x00,0x86,0x00,0x00,0x00,0x02,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00,0x91,0x00,0x00,0x00, +0x0b,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00, +0x97,0x00,0x00,0x00,0x03,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x13,0x00,0x00,0x00,0xa2,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, +0x32,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0xa7,0x00,0x00,0x00, +0x40,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00, +0xa9,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x34,0x00,0x06,0x00, +0x06,0x00,0x00,0x00,0xb8,0x00,0x00,0x00,0x84,0x00,0x00,0x00, +0x60,0x00,0x00,0x00,0x62,0x00,0x00,0x00,0x32,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0xb9,0x00,0x00,0x00,0x20,0x00,0x00,0x00, +0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0xba,0x00,0x00,0x00, +0x84,0x00,0x00,0x00,0x53,0x00,0x00,0x00,0xb9,0x00,0x00,0x00, +0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0xbb,0x00,0x00,0x00, +0x84,0x00,0x00,0x00,0x4f,0x00,0x00,0x00,0x62,0x00,0x00,0x00, +0x32,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0xbc,0x00,0x00,0x00, +0x02,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, +0xbd,0x00,0x00,0x00,0x84,0x00,0x00,0x00,0xbb,0x00,0x00,0x00, +0xbc,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, +0xbe,0x00,0x00,0x00,0x84,0x00,0x00,0x00,0xbd,0x00,0x00,0x00, +0x60,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, +0xbf,0x00,0x00,0x00,0x86,0x00,0x00,0x00,0xba,0x00,0x00,0x00, +0xbe,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, +0xc0,0x00,0x00,0x00,0x84,0x00,0x00,0x00,0xb8,0x00,0x00,0x00, +0xbf,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, +0xc1,0x00,0x00,0x00,0x84,0x00,0x00,0x00,0xc0,0x00,0x00,0x00, +0xbc,0x00,0x00,0x00,0x14,0x00,0x02,0x00,0xc2,0x00,0x00,0x00, +0x16,0x00,0x03,0x00,0xc4,0x00,0x00,0x00,0x20,0x00,0x00,0x00, +0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0xc5,0x00,0x00,0x00, +0x84,0x00,0x00,0x00,0x60,0x00,0x00,0x00,0x62,0x00,0x00,0x00, +0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0xc6,0x00,0x00,0x00, +0x84,0x00,0x00,0x00,0xc5,0x00,0x00,0x00,0xbf,0x00,0x00,0x00, +0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0xc7,0x00,0x00,0x00, +0x84,0x00,0x00,0x00,0xc6,0x00,0x00,0x00,0xbc,0x00,0x00,0x00, +0x1c,0x00,0x04,0x00,0xc8,0x00,0x00,0x00,0xc4,0x00,0x00,0x00, +0xc7,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0xc9,0x00,0x00,0x00, +0x07,0x00,0x00,0x00,0xc8,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0xc4,0x00,0x00,0x00,0xcc,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0x20,0x00,0x04,0x00,0xcd,0x00,0x00,0x00,0x07,0x00,0x00,0x00, +0xc4,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00, +0xd0,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x34,0x00,0x06,0x00, +0x06,0x00,0x00,0x00,0xf4,0x00,0x00,0x00,0x80,0x00,0x00,0x00, +0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x34,0x00,0x06,0x00, +0x06,0x00,0x00,0x00,0xf9,0x00,0x00,0x00,0x80,0x00,0x00,0x00, +0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x34,0x00,0x06,0x00, +0x06,0x00,0x00,0x00,0xfa,0x00,0x00,0x00,0x84,0x00,0x00,0x00, +0x37,0x00,0x00,0x00,0xf9,0x00,0x00,0x00,0x1c,0x00,0x04,0x00, +0xfb,0x00,0x00,0x00,0xc4,0x00,0x00,0x00,0xfa,0x00,0x00,0x00, +0x20,0x00,0x04,0x00,0xfc,0x00,0x00,0x00,0x04,0x00,0x00,0x00, +0xfb,0x00,0x00,0x00,0x3b,0x00,0x04,0x00,0xfc,0x00,0x00,0x00, +0xfd,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x17,0x00,0x04,0x00, +0xff,0x00,0x00,0x00,0xc4,0x00,0x00,0x00,0x04,0x00,0x00,0x00, +0x1d,0x00,0x03,0x00,0x00,0x01,0x00,0x00,0xff,0x00,0x00,0x00, +0x1e,0x00,0x03,0x00,0x01,0x01,0x00,0x00,0x00,0x01,0x00,0x00, +0x20,0x00,0x04,0x00,0x02,0x01,0x00,0x00,0x0c,0x00,0x00,0x00, +0x01,0x01,0x00,0x00,0x3b,0x00,0x04,0x00,0x02,0x01,0x00,0x00, +0x03,0x01,0x00,0x00,0x0c,0x00,0x00,0x00,0x20,0x00,0x04,0x00, +0x05,0x01,0x00,0x00,0x0c,0x00,0x00,0x00,0xc4,0x00,0x00,0x00, +0x20,0x00,0x04,0x00,0x08,0x01,0x00,0x00,0x04,0x00,0x00,0x00, +0xc4,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x17,0x01,0x00,0x00,0x03,0x00,0x00,0x00,0x32,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x1d,0x01,0x00,0x00,0x01,0x00,0x00,0x00, +0x33,0x00,0x06,0x00,0x09,0x00,0x00,0x00,0x1e,0x01,0x00,0x00, +0x1d,0x01,0x00,0x00,0x39,0x00,0x00,0x00,0x39,0x00,0x00,0x00, +0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x1f,0x01,0x00,0x00, +0x51,0x00,0x00,0x00,0x1e,0x01,0x00,0x00,0x00,0x00,0x00,0x00, +0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x20,0x01,0x00,0x00, +0x84,0x00,0x00,0x00,0x1f,0x01,0x00,0x00,0x6d,0x00,0x00,0x00, +0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x21,0x01,0x00,0x00, +0x86,0x00,0x00,0x00,0x20,0x01,0x00,0x00,0x6c,0x00,0x00,0x00, +0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x3c,0x01,0x00,0x00, +0x80,0x00,0x00,0x00,0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00, +0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x41,0x01,0x00,0x00, +0x80,0x00,0x00,0x00,0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00, +0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x42,0x01,0x00,0x00, +0x84,0x00,0x00,0x00,0xa7,0x00,0x00,0x00,0x41,0x01,0x00,0x00, +0x1c,0x00,0x04,0x00,0x43,0x01,0x00,0x00,0xc4,0x00,0x00,0x00, +0x42,0x01,0x00,0x00,0x20,0x00,0x04,0x00,0x44,0x01,0x00,0x00, +0x04,0x00,0x00,0x00,0x43,0x01,0x00,0x00,0x3b,0x00,0x04,0x00, +0x44,0x01,0x00,0x00,0x45,0x01,0x00,0x00,0x04,0x00,0x00,0x00, +0x1d,0x00,0x03,0x00,0x48,0x01,0x00,0x00,0xff,0x00,0x00,0x00, +0x1e,0x00,0x03,0x00,0x49,0x01,0x00,0x00,0x48,0x01,0x00,0x00, +0x20,0x00,0x04,0x00,0x4a,0x01,0x00,0x00,0x0c,0x00,0x00,0x00, +0x49,0x01,0x00,0x00,0x3b,0x00,0x04,0x00,0x4a,0x01,0x00,0x00, +0x4b,0x01,0x00,0x00,0x0c,0x00,0x00,0x00,0x34,0x00,0x06,0x00, +0x06,0x00,0x00,0x00,0x62,0x01,0x00,0x00,0x51,0x00,0x00,0x00, +0x1e,0x01,0x00,0x00,0x00,0x00,0x00,0x00,0x34,0x00,0x06,0x00, +0x06,0x00,0x00,0x00,0x63,0x01,0x00,0x00,0x84,0x00,0x00,0x00, +0x62,0x01,0x00,0x00,0x6d,0x00,0x00,0x00,0x34,0x00,0x06,0x00, +0x06,0x00,0x00,0x00,0x64,0x01,0x00,0x00,0x86,0x00,0x00,0x00, +0x63,0x01,0x00,0x00,0x6c,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x67,0x01,0x00,0x00,0x08,0x01,0x00,0x00, +0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x68,0x01,0x00,0x00, +0x86,0x00,0x00,0x00,0x6c,0x00,0x00,0x00,0x6d,0x00,0x00,0x00, +0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x6b,0x01,0x00,0x00, +0x86,0x00,0x00,0x00,0x6c,0x00,0x00,0x00,0x6d,0x00,0x00,0x00, +0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x86,0x01,0x00,0x00, +0x84,0x00,0x00,0x00,0x60,0x00,0x00,0x00,0x62,0x00,0x00,0x00, +0x1c,0x00,0x04,0x00,0x87,0x01,0x00,0x00,0xc4,0x00,0x00,0x00, +0x86,0x01,0x00,0x00,0x20,0x00,0x04,0x00,0x88,0x01,0x00,0x00, +0x07,0x00,0x00,0x00,0x87,0x01,0x00,0x00,0x34,0x00,0x06,0x00, +0x06,0x00,0x00,0x00,0x98,0x01,0x00,0x00,0x80,0x00,0x00,0x00, +0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x34,0x00,0x06,0x00, +0x06,0x00,0x00,0x00,0xb3,0x01,0x00,0x00,0x84,0x00,0x00,0x00, +0xbf,0x00,0x00,0x00,0xbc,0x00,0x00,0x00,0x1c,0x00,0x04,0x00, +0xb4,0x01,0x00,0x00,0xc4,0x00,0x00,0x00,0xb3,0x01,0x00,0x00, +0x20,0x00,0x04,0x00,0xb5,0x01,0x00,0x00,0x07,0x00,0x00,0x00, +0xb4,0x01,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, +0xbe,0x01,0x00,0x00,0x86,0x00,0x00,0x00,0xb9,0x00,0x00,0x00, +0xbf,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, +0xc6,0x01,0x00,0x00,0x80,0x00,0x00,0x00,0x6c,0x00,0x00,0x00, +0x39,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, +0xf5,0x01,0x00,0x00,0x84,0x00,0x00,0x00,0x60,0x00,0x00,0x00, +0x62,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00, +0x28,0x02,0x00,0x00,0x0d,0x00,0x00,0x00,0x3b,0x00,0x04,0x00, +0x0a,0x00,0x00,0x00,0x30,0x02,0x00,0x00,0x01,0x00,0x00,0x00, +0x1d,0x00,0x03,0x00,0x76,0x02,0x00,0x00,0xc4,0x00,0x00,0x00, +0x1e,0x00,0x03,0x00,0x77,0x02,0x00,0x00,0x76,0x02,0x00,0x00, +0x20,0x00,0x04,0x00,0x78,0x02,0x00,0x00,0x0c,0x00,0x00,0x00, +0x77,0x02,0x00,0x00,0x3b,0x00,0x04,0x00,0x78,0x02,0x00,0x00, +0x79,0x02,0x00,0x00,0x0c,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x13,0x00,0x00,0x00,0x7e,0x02,0x00,0x00,0x05,0x00,0x00,0x00, +0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x8b,0x02,0x00,0x00, +0x84,0x00,0x00,0x00,0x60,0x00,0x00,0x00,0x62,0x00,0x00,0x00, +0x36,0x00,0x05,0x00,0x02,0x00,0x00,0x00,0x04,0x00,0x00,0x00, +0x00,0x00,0x00,0x00,0x03,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, +0x05,0x00,0x00,0x00,0x3b,0x00,0x04,0x00,0xc9,0x00,0x00,0x00, +0xca,0x00,0x00,0x00,0x07,0x00,0x00,0x00,0x3b,0x00,0x04,0x00, +0x88,0x01,0x00,0x00,0x89,0x01,0x00,0x00,0x07,0x00,0x00,0x00, +0x3b,0x00,0x04,0x00,0xb5,0x01,0x00,0x00,0xb6,0x01,0x00,0x00, +0x07,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x0d,0x00,0x00,0x00, +0x0e,0x00,0x00,0x00,0x0b,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, +0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x0f,0x00,0x00,0x00, +0x0e,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00, +0x16,0x00,0x00,0x00,0x12,0x00,0x00,0x00,0x14,0x00,0x00,0x00, +0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x17,0x00,0x00,0x00, +0x16,0x00,0x00,0x00,0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x18,0x00,0x00,0x00,0x0f,0x00,0x00,0x00,0x17,0x00,0x00,0x00, +0x89,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x1e,0x00,0x00,0x00, +0x0f,0x00,0x00,0x00,0x17,0x00,0x00,0x00,0x41,0x00,0x05,0x00, +0x15,0x00,0x00,0x00,0x22,0x00,0x00,0x00,0x12,0x00,0x00,0x00, +0x21,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x23,0x00,0x00,0x00,0x22,0x00,0x00,0x00,0x86,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x24,0x00,0x00,0x00,0x18,0x00,0x00,0x00, +0x23,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00, +0x28,0x00,0x00,0x00,0x12,0x00,0x00,0x00,0x27,0x00,0x00,0x00, +0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x29,0x00,0x00,0x00, +0x28,0x00,0x00,0x00,0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x2a,0x00,0x00,0x00,0x1e,0x00,0x00,0x00,0x29,0x00,0x00,0x00, +0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, +0x12,0x00,0x00,0x00,0x2d,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x2f,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x30,0x00,0x00,0x00, +0x24,0x00,0x00,0x00,0x2f,0x00,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x32,0x00,0x00,0x00,0x30,0x00,0x00,0x00, +0x2a,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00, +0x35,0x00,0x00,0x00,0x12,0x00,0x00,0x00,0x34,0x00,0x00,0x00, +0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x36,0x00,0x00,0x00, +0x35,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x38,0x00,0x00,0x00,0x36,0x00,0x00,0x00,0x37,0x00,0x00,0x00, +0x82,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x3a,0x00,0x00,0x00, +0x38,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x86,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x3b,0x00,0x00,0x00,0x3a,0x00,0x00,0x00, +0x37,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x0d,0x00,0x00,0x00, +0x3f,0x00,0x00,0x00,0x3d,0x00,0x00,0x00,0x3e,0x00,0x00,0x00, +0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x40,0x00,0x00,0x00, +0x3f,0x00,0x00,0x00,0x89,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x42,0x00,0x00,0x00,0x40,0x00,0x00,0x00,0x3b,0x00,0x00,0x00, +0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x47,0x00,0x00,0x00, +0x40,0x00,0x00,0x00,0x3b,0x00,0x00,0x00,0x41,0x00,0x05,0x00, +0x0d,0x00,0x00,0x00,0x49,0x00,0x00,0x00,0x3d,0x00,0x00,0x00, +0x39,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x4a,0x00,0x00,0x00,0x49,0x00,0x00,0x00,0x41,0x00,0x05,0x00, +0x0d,0x00,0x00,0x00,0x4d,0x00,0x00,0x00,0x4c,0x00,0x00,0x00, +0x3e,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x4e,0x00,0x00,0x00,0x4d,0x00,0x00,0x00,0x86,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x50,0x00,0x00,0x00,0x4e,0x00,0x00,0x00, +0x4f,0x00,0x00,0x00,0x89,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x55,0x00,0x00,0x00,0x50,0x00,0x00,0x00,0x54,0x00,0x00,0x00, +0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x59,0x00,0x00,0x00, +0x50,0x00,0x00,0x00,0x58,0x00,0x00,0x00,0x89,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x5d,0x00,0x00,0x00,0x4e,0x00,0x00,0x00, +0x4f,0x00,0x00,0x00,0x89,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x64,0x00,0x00,0x00,0x5d,0x00,0x00,0x00,0x63,0x00,0x00,0x00, +0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x68,0x00,0x00,0x00, +0x5d,0x00,0x00,0x00,0x67,0x00,0x00,0x00,0x89,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x6f,0x00,0x00,0x00,0x4e,0x00,0x00,0x00, +0x6e,0x00,0x00,0x00,0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x74,0x00,0x00,0x00,0x4e,0x00,0x00,0x00,0x73,0x00,0x00,0x00, +0x89,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x79,0x00,0x00,0x00, +0x4e,0x00,0x00,0x00,0x78,0x00,0x00,0x00,0x86,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x7e,0x00,0x00,0x00,0x4e,0x00,0x00,0x00, +0x7d,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00, +0x82,0x00,0x00,0x00,0x12,0x00,0x00,0x00,0x81,0x00,0x00,0x00, +0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x83,0x00,0x00,0x00, +0x82,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x84,0x00,0x00,0x00,0x47,0x00,0x00,0x00,0x83,0x00,0x00,0x00, +0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00,0x87,0x00,0x00,0x00, +0x12,0x00,0x00,0x00,0x86,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x88,0x00,0x00,0x00,0x87,0x00,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x8a,0x00,0x00,0x00, +0x47,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x8d,0x00,0x00,0x00,0x8a,0x00,0x00,0x00, +0x83,0x00,0x00,0x00,0x0c,0x00,0x07,0x00,0x06,0x00,0x00,0x00, +0x8e,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x26,0x00,0x00,0x00, +0x88,0x00,0x00,0x00,0x8d,0x00,0x00,0x00,0x41,0x00,0x05,0x00, +0x15,0x00,0x00,0x00,0x92,0x00,0x00,0x00,0x12,0x00,0x00,0x00, +0x91,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x93,0x00,0x00,0x00,0x92,0x00,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x94,0x00,0x00,0x00,0x32,0x00,0x00,0x00, +0x93,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x96,0x00,0x00,0x00,0x42,0x00,0x00,0x00,0x37,0x00,0x00,0x00, +0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00,0x98,0x00,0x00,0x00, +0x12,0x00,0x00,0x00,0x97,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x99,0x00,0x00,0x00,0x98,0x00,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x9a,0x00,0x00,0x00, +0x96,0x00,0x00,0x00,0x99,0x00,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x9b,0x00,0x00,0x00,0x94,0x00,0x00,0x00, +0x9a,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x9d,0x00,0x00,0x00,0x9b,0x00,0x00,0x00,0x84,0x00,0x00,0x00, +0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x9e,0x00,0x00,0x00, +0x9d,0x00,0x00,0x00,0x6d,0x00,0x00,0x00,0x41,0x00,0x05,0x00, +0x15,0x00,0x00,0x00,0xa3,0x00,0x00,0x00,0x12,0x00,0x00,0x00, +0xa2,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0xa4,0x00,0x00,0x00,0xa3,0x00,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xa5,0x00,0x00,0x00,0x0f,0x00,0x00,0x00, +0xa4,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xa8,0x00,0x00,0x00,0x4a,0x00,0x00,0x00,0xa7,0x00,0x00,0x00, +0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00,0xaa,0x00,0x00,0x00, +0x12,0x00,0x00,0x00,0xa9,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0xab,0x00,0x00,0x00,0xaa,0x00,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xac,0x00,0x00,0x00, +0xa8,0x00,0x00,0x00,0xab,0x00,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xad,0x00,0x00,0x00,0xa5,0x00,0x00,0x00, +0xac,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xaf,0x00,0x00,0x00,0xad,0x00,0x00,0x00,0x84,0x00,0x00,0x00, +0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xb0,0x00,0x00,0x00, +0xaf,0x00,0x00,0x00,0x6d,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, +0xb2,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0xb2,0x00,0x00,0x00, +0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0x9d,0x02,0x00,0x00, +0x3e,0x00,0x00,0x00,0x05,0x00,0x00,0x00,0xd1,0x00,0x00,0x00, +0xb3,0x00,0x00,0x00,0xb0,0x00,0x05,0x00,0xc2,0x00,0x00,0x00, +0xc3,0x00,0x00,0x00,0x9d,0x02,0x00,0x00,0xc1,0x00,0x00,0x00, +0xf6,0x00,0x04,0x00,0xb4,0x00,0x00,0x00,0xb3,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0xc3,0x00,0x00,0x00, +0xb3,0x00,0x00,0x00,0xb4,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, +0xb3,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0xcd,0x00,0x00,0x00, +0xce,0x00,0x00,0x00,0xca,0x00,0x00,0x00,0x9d,0x02,0x00,0x00, +0x3e,0x00,0x03,0x00,0xce,0x00,0x00,0x00,0xcc,0x00,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xd1,0x00,0x00,0x00, +0x9d,0x02,0x00,0x00,0xd0,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, +0xb2,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0xb4,0x00,0x00,0x00, +0xf9,0x00,0x02,0x00,0xd4,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, +0xd4,0x00,0x00,0x00,0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00, +0xb6,0x02,0x00,0x00,0xb0,0x00,0x00,0x00,0xb4,0x00,0x00,0x00, +0x6d,0x01,0x00,0x00,0xd7,0x00,0x00,0x00,0xf5,0x00,0x07,0x00, +0x06,0x00,0x00,0x00,0xb2,0x02,0x00,0x00,0x9e,0x00,0x00,0x00, +0xb4,0x00,0x00,0x00,0x6a,0x01,0x00,0x00,0xd7,0x00,0x00,0x00, +0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0x9e,0x02,0x00,0x00, +0x84,0x00,0x00,0x00,0xb4,0x00,0x00,0x00,0x18,0x02,0x00,0x00, +0xd7,0x00,0x00,0x00,0xb0,0x00,0x05,0x00,0xc2,0x00,0x00,0x00, +0xdb,0x00,0x00,0x00,0x9e,0x02,0x00,0x00,0x8e,0x00,0x00,0x00, +0xf6,0x00,0x04,0x00,0xd6,0x00,0x00,0x00,0xd7,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0xdb,0x00,0x00,0x00, +0xd5,0x00,0x00,0x00,0xd6,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, +0xd5,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0xdd,0x00,0x00,0x00, +0xf8,0x00,0x02,0x00,0xdd,0x00,0x00,0x00,0xf5,0x00,0x07,0x00, +0x06,0x00,0x00,0x00,0xae,0x02,0x00,0x00,0x3e,0x00,0x00,0x00, +0xd5,0x00,0x00,0x00,0x23,0x01,0x00,0x00,0xde,0x00,0x00,0x00, +0xb0,0x00,0x05,0x00,0xc2,0x00,0x00,0x00,0xe3,0x00,0x00,0x00, +0xae,0x02,0x00,0x00,0x37,0x00,0x00,0x00,0xf6,0x00,0x04,0x00, +0xdf,0x00,0x00,0x00,0xde,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0xfa,0x00,0x04,0x00,0xe3,0x00,0x00,0x00,0xde,0x00,0x00,0x00, +0xdf,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0xde,0x00,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xe8,0x00,0x00,0x00, +0x74,0x00,0x00,0x00,0xae,0x02,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xeb,0x00,0x00,0x00,0xe8,0x00,0x00,0x00, +0x99,0x00,0x00,0x00,0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xec,0x00,0x00,0x00,0xeb,0x00,0x00,0x00,0x6d,0x00,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xed,0x00,0x00,0x00, +0xb2,0x02,0x00,0x00,0xec,0x00,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xef,0x00,0x00,0x00,0xed,0x00,0x00,0x00, +0x6f,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xf5,0x00,0x00,0x00,0xe8,0x00,0x00,0x00,0xf4,0x00,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xf7,0x00,0x00,0x00, +0x6f,0x00,0x00,0x00,0x6d,0x00,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xf8,0x00,0x00,0x00,0xf5,0x00,0x00,0x00, +0xf7,0x00,0x00,0x00,0x41,0x00,0x07,0x00,0x05,0x01,0x00,0x00, +0x06,0x01,0x00,0x00,0x03,0x01,0x00,0x00,0x34,0x00,0x00,0x00, +0xef,0x00,0x00,0x00,0x3e,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0xc4,0x00,0x00,0x00,0x07,0x01,0x00,0x00,0x06,0x01,0x00,0x00, +0x41,0x00,0x05,0x00,0x08,0x01,0x00,0x00,0x09,0x01,0x00,0x00, +0xfd,0x00,0x00,0x00,0xf8,0x00,0x00,0x00,0x3e,0x00,0x03,0x00, +0x09,0x01,0x00,0x00,0x07,0x01,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x0b,0x01,0x00,0x00,0xf8,0x00,0x00,0x00, +0x39,0x00,0x00,0x00,0x41,0x00,0x07,0x00,0x05,0x01,0x00,0x00, +0x0d,0x01,0x00,0x00,0x03,0x01,0x00,0x00,0x34,0x00,0x00,0x00, +0xef,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0xc4,0x00,0x00,0x00,0x0e,0x01,0x00,0x00,0x0d,0x01,0x00,0x00, +0x41,0x00,0x05,0x00,0x08,0x01,0x00,0x00,0x0f,0x01,0x00,0x00, +0xfd,0x00,0x00,0x00,0x0b,0x01,0x00,0x00,0x3e,0x00,0x03,0x00, +0x0f,0x01,0x00,0x00,0x0e,0x01,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x11,0x01,0x00,0x00,0xf8,0x00,0x00,0x00, +0x0c,0x00,0x00,0x00,0x41,0x00,0x07,0x00,0x05,0x01,0x00,0x00, +0x13,0x01,0x00,0x00,0x03,0x01,0x00,0x00,0x34,0x00,0x00,0x00, +0xef,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0xc4,0x00,0x00,0x00,0x14,0x01,0x00,0x00,0x13,0x01,0x00,0x00, +0x41,0x00,0x05,0x00,0x08,0x01,0x00,0x00,0x15,0x01,0x00,0x00, +0xfd,0x00,0x00,0x00,0x11,0x01,0x00,0x00,0x3e,0x00,0x03,0x00, +0x15,0x01,0x00,0x00,0x14,0x01,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x18,0x01,0x00,0x00,0xf8,0x00,0x00,0x00, +0x17,0x01,0x00,0x00,0x41,0x00,0x07,0x00,0x05,0x01,0x00,0x00, +0x1a,0x01,0x00,0x00,0x03,0x01,0x00,0x00,0x34,0x00,0x00,0x00, +0xef,0x00,0x00,0x00,0x17,0x01,0x00,0x00,0x3d,0x00,0x04,0x00, +0xc4,0x00,0x00,0x00,0x1b,0x01,0x00,0x00,0x1a,0x01,0x00,0x00, +0x41,0x00,0x05,0x00,0x08,0x01,0x00,0x00,0x1c,0x01,0x00,0x00, +0xfd,0x00,0x00,0x00,0x18,0x01,0x00,0x00,0x3e,0x00,0x03,0x00, +0x1c,0x01,0x00,0x00,0x1b,0x01,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x23,0x01,0x00,0x00,0xae,0x02,0x00,0x00, +0x21,0x01,0x00,0x00,0xf9,0x00,0x02,0x00,0xdd,0x00,0x00,0x00, +0xf8,0x00,0x02,0x00,0xdf,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, +0x25,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0x25,0x01,0x00,0x00, +0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0xaf,0x02,0x00,0x00, +0x3e,0x00,0x00,0x00,0xdf,0x00,0x00,0x00,0x66,0x01,0x00,0x00, +0x26,0x01,0x00,0x00,0xb0,0x00,0x05,0x00,0xc2,0x00,0x00,0x00, +0x2b,0x01,0x00,0x00,0xaf,0x02,0x00,0x00,0xa7,0x00,0x00,0x00, +0xf6,0x00,0x04,0x00,0x27,0x01,0x00,0x00,0x26,0x01,0x00,0x00, +0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0x2b,0x01,0x00,0x00, +0x26,0x01,0x00,0x00,0x27,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, +0x26,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x30,0x01,0x00,0x00,0x7e,0x00,0x00,0x00,0xaf,0x02,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x33,0x01,0x00,0x00, +0x30,0x01,0x00,0x00,0xab,0x00,0x00,0x00,0x86,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x34,0x01,0x00,0x00,0x33,0x01,0x00,0x00, +0x6d,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x35,0x01,0x00,0x00,0xb6,0x02,0x00,0x00,0x34,0x01,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x37,0x01,0x00,0x00, +0x35,0x01,0x00,0x00,0x79,0x00,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x3d,0x01,0x00,0x00,0x30,0x01,0x00,0x00, +0x3c,0x01,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x3f,0x01,0x00,0x00,0x79,0x00,0x00,0x00,0x6d,0x00,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x40,0x01,0x00,0x00, +0x3d,0x01,0x00,0x00,0x3f,0x01,0x00,0x00,0x41,0x00,0x07,0x00, +0x05,0x01,0x00,0x00,0x4d,0x01,0x00,0x00,0x4b,0x01,0x00,0x00, +0x34,0x00,0x00,0x00,0x37,0x01,0x00,0x00,0x3e,0x00,0x00,0x00, +0x3d,0x00,0x04,0x00,0xc4,0x00,0x00,0x00,0x4e,0x01,0x00,0x00, +0x4d,0x01,0x00,0x00,0x41,0x00,0x05,0x00,0x08,0x01,0x00,0x00, +0x4f,0x01,0x00,0x00,0x45,0x01,0x00,0x00,0x40,0x01,0x00,0x00, +0x3e,0x00,0x03,0x00,0x4f,0x01,0x00,0x00,0x4e,0x01,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x51,0x01,0x00,0x00, +0x40,0x01,0x00,0x00,0x39,0x00,0x00,0x00,0x41,0x00,0x07,0x00, +0x05,0x01,0x00,0x00,0x53,0x01,0x00,0x00,0x4b,0x01,0x00,0x00, +0x34,0x00,0x00,0x00,0x37,0x01,0x00,0x00,0x39,0x00,0x00,0x00, +0x3d,0x00,0x04,0x00,0xc4,0x00,0x00,0x00,0x54,0x01,0x00,0x00, +0x53,0x01,0x00,0x00,0x41,0x00,0x05,0x00,0x08,0x01,0x00,0x00, +0x55,0x01,0x00,0x00,0x45,0x01,0x00,0x00,0x51,0x01,0x00,0x00, +0x3e,0x00,0x03,0x00,0x55,0x01,0x00,0x00,0x54,0x01,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x57,0x01,0x00,0x00, +0x40,0x01,0x00,0x00,0x0c,0x00,0x00,0x00,0x41,0x00,0x07,0x00, +0x05,0x01,0x00,0x00,0x59,0x01,0x00,0x00,0x4b,0x01,0x00,0x00, +0x34,0x00,0x00,0x00,0x37,0x01,0x00,0x00,0x0c,0x00,0x00,0x00, +0x3d,0x00,0x04,0x00,0xc4,0x00,0x00,0x00,0x5a,0x01,0x00,0x00, +0x59,0x01,0x00,0x00,0x41,0x00,0x05,0x00,0x08,0x01,0x00,0x00, +0x5b,0x01,0x00,0x00,0x45,0x01,0x00,0x00,0x57,0x01,0x00,0x00, +0x3e,0x00,0x03,0x00,0x5b,0x01,0x00,0x00,0x5a,0x01,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x5d,0x01,0x00,0x00, +0x40,0x01,0x00,0x00,0x17,0x01,0x00,0x00,0x41,0x00,0x07,0x00, +0x05,0x01,0x00,0x00,0x5f,0x01,0x00,0x00,0x4b,0x01,0x00,0x00, +0x34,0x00,0x00,0x00,0x37,0x01,0x00,0x00,0x17,0x01,0x00,0x00, +0x3d,0x00,0x04,0x00,0xc4,0x00,0x00,0x00,0x60,0x01,0x00,0x00, +0x5f,0x01,0x00,0x00,0x41,0x00,0x05,0x00,0x08,0x01,0x00,0x00, +0x61,0x01,0x00,0x00,0x45,0x01,0x00,0x00,0x5d,0x01,0x00,0x00, +0x3e,0x00,0x03,0x00,0x61,0x01,0x00,0x00,0x60,0x01,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x66,0x01,0x00,0x00, +0xaf,0x02,0x00,0x00,0x64,0x01,0x00,0x00,0xf9,0x00,0x02,0x00, +0x25,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0x27,0x01,0x00,0x00, +0xe0,0x00,0x04,0x00,0x0c,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, +0x67,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x6a,0x01,0x00,0x00,0xb2,0x02,0x00,0x00,0x68,0x01,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x6d,0x01,0x00,0x00, +0xb6,0x02,0x00,0x00,0x6b,0x01,0x00,0x00,0xf9,0x00,0x02,0x00, +0x6f,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0x6f,0x01,0x00,0x00, +0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0xb8,0x02,0x00,0x00, +0x3e,0x00,0x00,0x00,0x27,0x01,0x00,0x00,0x16,0x02,0x00,0x00, +0x72,0x01,0x00,0x00,0xb0,0x00,0x05,0x00,0xc2,0x00,0x00,0x00, +0x75,0x01,0x00,0x00,0xb8,0x02,0x00,0x00,0x6c,0x00,0x00,0x00, +0xf6,0x00,0x04,0x00,0x71,0x01,0x00,0x00,0x72,0x01,0x00,0x00, +0x00,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0x75,0x01,0x00,0x00, +0x70,0x01,0x00,0x00,0x71,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, +0x70,0x01,0x00,0x00,0xf9,0x00,0x02,0x00,0x77,0x01,0x00,0x00, +0xf8,0x00,0x02,0x00,0x77,0x01,0x00,0x00,0xf5,0x00,0x07,0x00, +0x06,0x00,0x00,0x00,0xbc,0x02,0x00,0x00,0x3e,0x00,0x00,0x00, +0x70,0x01,0x00,0x00,0xa2,0x01,0x00,0x00,0x7a,0x01,0x00,0x00, +0xb0,0x00,0x05,0x00,0xc2,0x00,0x00,0x00,0x7d,0x01,0x00,0x00, +0xbc,0x02,0x00,0x00,0x60,0x00,0x00,0x00,0xf6,0x00,0x04,0x00, +0x79,0x01,0x00,0x00,0x7a,0x01,0x00,0x00,0x01,0x00,0x00,0x00, +0xfa,0x00,0x04,0x00,0x7d,0x01,0x00,0x00,0x78,0x01,0x00,0x00, +0x79,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0x78,0x01,0x00,0x00, +0xf9,0x00,0x02,0x00,0x7f,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, +0x7f,0x01,0x00,0x00,0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00, +0xce,0x02,0x00,0x00,0x3e,0x00,0x00,0x00,0x78,0x01,0x00,0x00, +0xa0,0x01,0x00,0x00,0x80,0x01,0x00,0x00,0xb0,0x00,0x05,0x00, +0xc2,0x00,0x00,0x00,0x85,0x01,0x00,0x00,0xce,0x02,0x00,0x00, +0x62,0x00,0x00,0x00,0xf6,0x00,0x04,0x00,0x81,0x01,0x00,0x00, +0x80,0x01,0x00,0x00,0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, +0x85,0x01,0x00,0x00,0x80,0x01,0x00,0x00,0x81,0x01,0x00,0x00, +0xf8,0x00,0x02,0x00,0x80,0x01,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x8b,0x01,0x00,0x00,0xbc,0x02,0x00,0x00, +0x62,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x8d,0x01,0x00,0x00,0x8b,0x01,0x00,0x00,0xce,0x02,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x8f,0x01,0x00,0x00, +0x55,0x00,0x00,0x00,0x53,0x00,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x91,0x01,0x00,0x00,0xbc,0x02,0x00,0x00, +0x61,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x92,0x01,0x00,0x00,0x8f,0x01,0x00,0x00,0x91,0x01,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x94,0x01,0x00,0x00, +0x64,0x00,0x00,0x00,0x62,0x00,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x95,0x01,0x00,0x00,0x92,0x01,0x00,0x00, +0x94,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x97,0x01,0x00,0x00,0x95,0x01,0x00,0x00,0xce,0x02,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x99,0x01,0x00,0x00, +0x97,0x01,0x00,0x00,0x98,0x01,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x9b,0x01,0x00,0x00,0x99,0x01,0x00,0x00, +0xb8,0x02,0x00,0x00,0x41,0x00,0x05,0x00,0x08,0x01,0x00,0x00, +0x9c,0x01,0x00,0x00,0xfd,0x00,0x00,0x00,0x9b,0x01,0x00,0x00, +0x3d,0x00,0x04,0x00,0xc4,0x00,0x00,0x00,0x9d,0x01,0x00,0x00, +0x9c,0x01,0x00,0x00,0x41,0x00,0x05,0x00,0xcd,0x00,0x00,0x00, +0x9e,0x01,0x00,0x00,0x89,0x01,0x00,0x00,0x8d,0x01,0x00,0x00, +0x3e,0x00,0x03,0x00,0x9e,0x01,0x00,0x00,0x9d,0x01,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xa0,0x01,0x00,0x00, +0xce,0x02,0x00,0x00,0xd0,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, +0x7f,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0x81,0x01,0x00,0x00, +0xf9,0x00,0x02,0x00,0x7a,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, +0x7a,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xa2,0x01,0x00,0x00,0xbc,0x02,0x00,0x00,0xd0,0x00,0x00,0x00, +0xf9,0x00,0x02,0x00,0x77,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, +0x79,0x01,0x00,0x00,0xf9,0x00,0x02,0x00,0xa4,0x01,0x00,0x00, +0xf8,0x00,0x02,0x00,0xa4,0x01,0x00,0x00,0xf5,0x00,0x07,0x00, +0x06,0x00,0x00,0x00,0xbd,0x02,0x00,0x00,0x3e,0x00,0x00,0x00, +0x79,0x01,0x00,0x00,0xd0,0x01,0x00,0x00,0xa7,0x01,0x00,0x00, +0xb0,0x00,0x05,0x00,0xc2,0x00,0x00,0x00,0xaa,0x01,0x00,0x00, +0xbd,0x02,0x00,0x00,0xbf,0x00,0x00,0x00,0xf6,0x00,0x04,0x00, +0xa6,0x01,0x00,0x00,0xa7,0x01,0x00,0x00,0x01,0x00,0x00,0x00, +0xfa,0x00,0x04,0x00,0xaa,0x01,0x00,0x00,0xa5,0x01,0x00,0x00, +0xa6,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0xa5,0x01,0x00,0x00, +0xf9,0x00,0x02,0x00,0xac,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, +0xac,0x01,0x00,0x00,0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00, +0xcb,0x02,0x00,0x00,0x3e,0x00,0x00,0x00,0xa5,0x01,0x00,0x00, +0xce,0x01,0x00,0x00,0xad,0x01,0x00,0x00,0xb0,0x00,0x05,0x00, +0xc2,0x00,0x00,0x00,0xb2,0x01,0x00,0x00,0xcb,0x02,0x00,0x00, +0xbc,0x00,0x00,0x00,0xf6,0x00,0x04,0x00,0xae,0x01,0x00,0x00, +0xad,0x01,0x00,0x00,0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, +0xb2,0x01,0x00,0x00,0xad,0x01,0x00,0x00,0xae,0x01,0x00,0x00, +0xf8,0x00,0x02,0x00,0xad,0x01,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xb8,0x01,0x00,0x00,0xbd,0x02,0x00,0x00, +0xbc,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xba,0x01,0x00,0x00,0xb8,0x01,0x00,0x00,0xcb,0x02,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xbc,0x01,0x00,0x00, +0x59,0x00,0x00,0x00,0xb9,0x00,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xbf,0x01,0x00,0x00,0xbd,0x02,0x00,0x00, +0xbe,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xc0,0x01,0x00,0x00,0xbc,0x01,0x00,0x00,0xbf,0x01,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xc2,0x01,0x00,0x00, +0x68,0x00,0x00,0x00,0xbc,0x00,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xc3,0x01,0x00,0x00,0xc0,0x01,0x00,0x00, +0xc2,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xc5,0x01,0x00,0x00,0xc3,0x01,0x00,0x00,0xcb,0x02,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xc7,0x01,0x00,0x00, +0xc5,0x01,0x00,0x00,0xc6,0x01,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xc9,0x01,0x00,0x00,0xc7,0x01,0x00,0x00, +0xb8,0x02,0x00,0x00,0x41,0x00,0x05,0x00,0x08,0x01,0x00,0x00, +0xca,0x01,0x00,0x00,0x45,0x01,0x00,0x00,0xc9,0x01,0x00,0x00, +0x3d,0x00,0x04,0x00,0xc4,0x00,0x00,0x00,0xcb,0x01,0x00,0x00, +0xca,0x01,0x00,0x00,0x41,0x00,0x05,0x00,0xcd,0x00,0x00,0x00, +0xcc,0x01,0x00,0x00,0xb6,0x01,0x00,0x00,0xba,0x01,0x00,0x00, +0x3e,0x00,0x03,0x00,0xcc,0x01,0x00,0x00,0xcb,0x01,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xce,0x01,0x00,0x00, +0xcb,0x02,0x00,0x00,0xd0,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, +0xac,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0xae,0x01,0x00,0x00, +0xf9,0x00,0x02,0x00,0xa7,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, +0xa7,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xd0,0x01,0x00,0x00,0xbd,0x02,0x00,0x00,0xd0,0x00,0x00,0x00, +0xf9,0x00,0x02,0x00,0xa4,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, +0xa6,0x01,0x00,0x00,0xf9,0x00,0x02,0x00,0xd2,0x01,0x00,0x00, +0xf8,0x00,0x02,0x00,0xd2,0x01,0x00,0x00,0xf5,0x00,0x07,0x00, +0x06,0x00,0x00,0x00,0xbe,0x02,0x00,0x00,0x3e,0x00,0x00,0x00, +0xa6,0x01,0x00,0x00,0x14,0x02,0x00,0x00,0xd5,0x01,0x00,0x00, +0xb0,0x00,0x05,0x00,0xc2,0x00,0x00,0x00,0xd8,0x01,0x00,0x00, +0xbe,0x02,0x00,0x00,0xbf,0x00,0x00,0x00,0xf6,0x00,0x04,0x00, +0xd4,0x01,0x00,0x00,0xd5,0x01,0x00,0x00,0x01,0x00,0x00,0x00, +0xfa,0x00,0x04,0x00,0xd8,0x01,0x00,0x00,0xd3,0x01,0x00,0x00, +0xd4,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0xd3,0x01,0x00,0x00, +0xf9,0x00,0x02,0x00,0xda,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, +0xda,0x01,0x00,0x00,0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00, +0xc2,0x02,0x00,0x00,0x3e,0x00,0x00,0x00,0xd3,0x01,0x00,0x00, +0x12,0x02,0x00,0x00,0xdd,0x01,0x00,0x00,0xb0,0x00,0x05,0x00, +0xc2,0x00,0x00,0x00,0xe0,0x01,0x00,0x00,0xc2,0x02,0x00,0x00, +0x60,0x00,0x00,0x00,0xf6,0x00,0x04,0x00,0xdc,0x01,0x00,0x00, +0xdd,0x01,0x00,0x00,0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, +0xe0,0x01,0x00,0x00,0xdb,0x01,0x00,0x00,0xdc,0x01,0x00,0x00, +0xf8,0x00,0x02,0x00,0xdb,0x01,0x00,0x00,0xf9,0x00,0x02,0x00, +0xe2,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0xe2,0x01,0x00,0x00, +0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0xc4,0x02,0x00,0x00, +0x3e,0x00,0x00,0x00,0xdb,0x01,0x00,0x00,0x10,0x02,0x00,0x00, +0xe5,0x01,0x00,0x00,0xb0,0x00,0x05,0x00,0xc2,0x00,0x00,0x00, +0xe8,0x01,0x00,0x00,0xc4,0x02,0x00,0x00,0xbc,0x00,0x00,0x00, +0xf6,0x00,0x04,0x00,0xe4,0x01,0x00,0x00,0xe5,0x01,0x00,0x00, +0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0xe8,0x01,0x00,0x00, +0xe3,0x01,0x00,0x00,0xe4,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, +0xe3,0x01,0x00,0x00,0xf9,0x00,0x02,0x00,0xea,0x01,0x00,0x00, +0xf8,0x00,0x02,0x00,0xea,0x01,0x00,0x00,0xf5,0x00,0x07,0x00, +0x06,0x00,0x00,0x00,0xc6,0x02,0x00,0x00,0x3e,0x00,0x00,0x00, +0xe3,0x01,0x00,0x00,0x0e,0x02,0x00,0x00,0xeb,0x01,0x00,0x00, +0xb0,0x00,0x05,0x00,0xc2,0x00,0x00,0x00,0xf0,0x01,0x00,0x00, +0xc6,0x02,0x00,0x00,0x62,0x00,0x00,0x00,0xf6,0x00,0x04,0x00, +0xec,0x01,0x00,0x00,0xeb,0x01,0x00,0x00,0x01,0x00,0x00,0x00, +0xfa,0x00,0x04,0x00,0xf0,0x01,0x00,0x00,0xeb,0x01,0x00,0x00, +0xec,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0xeb,0x01,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xf2,0x01,0x00,0x00, +0xbe,0x02,0x00,0x00,0xbc,0x00,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xf4,0x01,0x00,0x00,0xf2,0x01,0x00,0x00, +0xc4,0x02,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xf6,0x01,0x00,0x00,0xf4,0x01,0x00,0x00,0xf5,0x01,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xf8,0x01,0x00,0x00, +0xc2,0x02,0x00,0x00,0x62,0x00,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xf9,0x01,0x00,0x00,0xf6,0x01,0x00,0x00, +0xf8,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xfb,0x01,0x00,0x00,0xf9,0x01,0x00,0x00,0xc6,0x02,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xff,0x01,0x00,0x00, +0xf8,0x01,0x00,0x00,0xc6,0x02,0x00,0x00,0x41,0x00,0x05,0x00, +0xcd,0x00,0x00,0x00,0x00,0x02,0x00,0x00,0x89,0x01,0x00,0x00, +0xff,0x01,0x00,0x00,0x3d,0x00,0x04,0x00,0xc4,0x00,0x00,0x00, +0x01,0x02,0x00,0x00,0x00,0x02,0x00,0x00,0x41,0x00,0x05,0x00, +0xcd,0x00,0x00,0x00,0x06,0x02,0x00,0x00,0xb6,0x01,0x00,0x00, +0xf4,0x01,0x00,0x00,0x3d,0x00,0x04,0x00,0xc4,0x00,0x00,0x00, +0x07,0x02,0x00,0x00,0x06,0x02,0x00,0x00,0x41,0x00,0x05,0x00, +0xcd,0x00,0x00,0x00,0x09,0x02,0x00,0x00,0xca,0x00,0x00,0x00, +0xfb,0x01,0x00,0x00,0x3d,0x00,0x04,0x00,0xc4,0x00,0x00,0x00, +0x0a,0x02,0x00,0x00,0x09,0x02,0x00,0x00,0x0c,0x00,0x08,0x00, +0xc4,0x00,0x00,0x00,0x0b,0x02,0x00,0x00,0x01,0x00,0x00,0x00, +0x32,0x00,0x00,0x00,0x01,0x02,0x00,0x00,0x07,0x02,0x00,0x00, +0x0a,0x02,0x00,0x00,0x3e,0x00,0x03,0x00,0x09,0x02,0x00,0x00, +0x0b,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x0e,0x02,0x00,0x00,0xc6,0x02,0x00,0x00,0xd0,0x00,0x00,0x00, +0xf9,0x00,0x02,0x00,0xea,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, +0xec,0x01,0x00,0x00,0xf9,0x00,0x02,0x00,0xe5,0x01,0x00,0x00, +0xf8,0x00,0x02,0x00,0xe5,0x01,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x10,0x02,0x00,0x00,0xc4,0x02,0x00,0x00, +0xd0,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0xe2,0x01,0x00,0x00, +0xf8,0x00,0x02,0x00,0xe4,0x01,0x00,0x00,0xf9,0x00,0x02,0x00, +0xdd,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0xdd,0x01,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x12,0x02,0x00,0x00, +0xc2,0x02,0x00,0x00,0xd0,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, +0xda,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0xdc,0x01,0x00,0x00, +0xf9,0x00,0x02,0x00,0xd5,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, +0xd5,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x14,0x02,0x00,0x00,0xbe,0x02,0x00,0x00,0xd0,0x00,0x00,0x00, +0xf9,0x00,0x02,0x00,0xd2,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, +0xd4,0x01,0x00,0x00,0xf9,0x00,0x02,0x00,0x72,0x01,0x00,0x00, +0xf8,0x00,0x02,0x00,0x72,0x01,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x16,0x02,0x00,0x00,0xb8,0x02,0x00,0x00, +0xd0,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0x6f,0x01,0x00,0x00, +0xf8,0x00,0x02,0x00,0x71,0x01,0x00,0x00,0xe0,0x00,0x04,0x00, +0x0c,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x67,0x01,0x00,0x00, +0xf9,0x00,0x02,0x00,0xd7,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, +0xd7,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x18,0x02,0x00,0x00,0x9e,0x02,0x00,0x00,0x6c,0x00,0x00,0x00, +0xf9,0x00,0x02,0x00,0xd4,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, +0xd6,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x1d,0x02,0x00,0x00,0x55,0x00,0x00,0x00,0x53,0x00,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x1e,0x02,0x00,0x00, +0x96,0x00,0x00,0x00,0x1d,0x02,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x23,0x02,0x00,0x00,0x59,0x00,0x00,0x00, +0xb9,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x24,0x02,0x00,0x00,0xa8,0x00,0x00,0x00,0x23,0x02,0x00,0x00, +0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00,0x29,0x02,0x00,0x00, +0x12,0x00,0x00,0x00,0x28,0x02,0x00,0x00,0x3d,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x2a,0x02,0x00,0x00,0x29,0x02,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x2b,0x02,0x00,0x00, +0x0f,0x00,0x00,0x00,0x2a,0x02,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x2f,0x02,0x00,0x00,0x47,0x00,0x00,0x00, +0x2a,0x02,0x00,0x00,0x41,0x00,0x05,0x00,0x0d,0x00,0x00,0x00, +0x31,0x02,0x00,0x00,0x30,0x02,0x00,0x00,0x0c,0x00,0x00,0x00, +0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x32,0x02,0x00,0x00, +0x31,0x02,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x33,0x02,0x00,0x00,0x2f,0x02,0x00,0x00,0x32,0x02,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x34,0x02,0x00,0x00, +0x2b,0x02,0x00,0x00,0x33,0x02,0x00,0x00,0xf9,0x00,0x02,0x00, +0x36,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x36,0x02,0x00,0x00, +0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0x9f,0x02,0x00,0x00, +0x3e,0x00,0x00,0x00,0xd6,0x00,0x00,0x00,0x9c,0x02,0x00,0x00, +0x39,0x02,0x00,0x00,0xb0,0x00,0x05,0x00,0xc2,0x00,0x00,0x00, +0x3c,0x02,0x00,0x00,0x9f,0x02,0x00,0x00,0xbf,0x00,0x00,0x00, +0xf6,0x00,0x04,0x00,0x38,0x02,0x00,0x00,0x39,0x02,0x00,0x00, +0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0x3c,0x02,0x00,0x00, +0x37,0x02,0x00,0x00,0x38,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, +0x37,0x02,0x00,0x00,0xf9,0x00,0x02,0x00,0x3e,0x02,0x00,0x00, +0xf8,0x00,0x02,0x00,0x3e,0x02,0x00,0x00,0xf5,0x00,0x07,0x00, +0x06,0x00,0x00,0x00,0xa0,0x02,0x00,0x00,0x3e,0x00,0x00,0x00, +0x37,0x02,0x00,0x00,0x9a,0x02,0x00,0x00,0x41,0x02,0x00,0x00, +0xb0,0x00,0x05,0x00,0xc2,0x00,0x00,0x00,0x44,0x02,0x00,0x00, +0xa0,0x02,0x00,0x00,0x60,0x00,0x00,0x00,0xf6,0x00,0x04,0x00, +0x40,0x02,0x00,0x00,0x41,0x02,0x00,0x00,0x01,0x00,0x00,0x00, +0xfa,0x00,0x04,0x00,0x44,0x02,0x00,0x00,0x3f,0x02,0x00,0x00, +0x40,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x3f,0x02,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x48,0x02,0x00,0x00, +0xa0,0x02,0x00,0x00,0x61,0x00,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x49,0x02,0x00,0x00,0x1e,0x02,0x00,0x00, +0x48,0x02,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x4b,0x02,0x00,0x00,0x64,0x00,0x00,0x00,0x62,0x00,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x4c,0x02,0x00,0x00, +0x49,0x02,0x00,0x00,0x4b,0x02,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x50,0x02,0x00,0x00,0x9f,0x02,0x00,0x00, +0xbe,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x51,0x02,0x00,0x00,0x24,0x02,0x00,0x00,0x50,0x02,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x53,0x02,0x00,0x00, +0x68,0x00,0x00,0x00,0xbc,0x00,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x54,0x02,0x00,0x00,0x51,0x02,0x00,0x00, +0x53,0x02,0x00,0x00,0xf9,0x00,0x02,0x00,0x56,0x02,0x00,0x00, +0xf8,0x00,0x02,0x00,0x56,0x02,0x00,0x00,0xf5,0x00,0x07,0x00, +0x06,0x00,0x00,0x00,0xa2,0x02,0x00,0x00,0x3e,0x00,0x00,0x00, +0x3f,0x02,0x00,0x00,0x98,0x02,0x00,0x00,0x59,0x02,0x00,0x00, +0xb0,0x00,0x05,0x00,0xc2,0x00,0x00,0x00,0x5c,0x02,0x00,0x00, +0xa2,0x02,0x00,0x00,0xbc,0x00,0x00,0x00,0xf6,0x00,0x04,0x00, +0x58,0x02,0x00,0x00,0x59,0x02,0x00,0x00,0x01,0x00,0x00,0x00, +0xfa,0x00,0x04,0x00,0x5c,0x02,0x00,0x00,0x57,0x02,0x00,0x00, +0x58,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x57,0x02,0x00,0x00, +0xf9,0x00,0x02,0x00,0x5e,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, +0x5e,0x02,0x00,0x00,0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00, +0xa4,0x02,0x00,0x00,0x3e,0x00,0x00,0x00,0x57,0x02,0x00,0x00, +0x96,0x02,0x00,0x00,0x61,0x02,0x00,0x00,0xb0,0x00,0x05,0x00, +0xc2,0x00,0x00,0x00,0x64,0x02,0x00,0x00,0xa4,0x02,0x00,0x00, +0x62,0x00,0x00,0x00,0xf6,0x00,0x04,0x00,0x60,0x02,0x00,0x00, +0x61,0x02,0x00,0x00,0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, +0x64,0x02,0x00,0x00,0x5f,0x02,0x00,0x00,0x60,0x02,0x00,0x00, +0xf8,0x00,0x02,0x00,0x5f,0x02,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x67,0x02,0x00,0x00,0x4c,0x02,0x00,0x00, +0xa4,0x02,0x00,0x00,0xb0,0x00,0x05,0x00,0xc2,0x00,0x00,0x00, +0x6a,0x02,0x00,0x00,0x67,0x02,0x00,0x00,0x36,0x00,0x00,0x00, +0xf7,0x00,0x03,0x00,0x6c,0x02,0x00,0x00,0x00,0x00,0x00,0x00, +0xfa,0x00,0x04,0x00,0x6a,0x02,0x00,0x00,0x6b,0x02,0x00,0x00, +0x6c,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x6b,0x02,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x6f,0x02,0x00,0x00, +0x54,0x02,0x00,0x00,0xa2,0x02,0x00,0x00,0x41,0x00,0x05,0x00, +0x15,0x00,0x00,0x00,0x70,0x02,0x00,0x00,0x12,0x00,0x00,0x00, +0xd0,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x71,0x02,0x00,0x00,0x70,0x02,0x00,0x00,0xb0,0x00,0x05,0x00, +0xc2,0x00,0x00,0x00,0x72,0x02,0x00,0x00,0x6f,0x02,0x00,0x00, +0x71,0x02,0x00,0x00,0xf9,0x00,0x02,0x00,0x6c,0x02,0x00,0x00, +0xf8,0x00,0x02,0x00,0x6c,0x02,0x00,0x00,0xf5,0x00,0x07,0x00, +0xc2,0x00,0x00,0x00,0x73,0x02,0x00,0x00,0x6a,0x02,0x00,0x00, +0x5f,0x02,0x00,0x00,0x72,0x02,0x00,0x00,0x6b,0x02,0x00,0x00, +0xf7,0x00,0x03,0x00,0x75,0x02,0x00,0x00,0x00,0x00,0x00,0x00, +0xfa,0x00,0x04,0x00,0x73,0x02,0x00,0x00,0x74,0x02,0x00,0x00, +0x75,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x74,0x02,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x7d,0x02,0x00,0x00, +0x54,0x02,0x00,0x00,0xa2,0x02,0x00,0x00,0x41,0x00,0x05,0x00, +0x15,0x00,0x00,0x00,0x7f,0x02,0x00,0x00,0x12,0x00,0x00,0x00, +0x7e,0x02,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x80,0x02,0x00,0x00,0x7f,0x02,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x81,0x02,0x00,0x00,0x7d,0x02,0x00,0x00, +0x80,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x82,0x02,0x00,0x00,0x34,0x02,0x00,0x00,0x81,0x02,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x84,0x02,0x00,0x00, +0x82,0x02,0x00,0x00,0x4c,0x02,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x86,0x02,0x00,0x00,0x84,0x02,0x00,0x00, +0xa4,0x02,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x88,0x02,0x00,0x00,0x9f,0x02,0x00,0x00,0xbc,0x00,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x8a,0x02,0x00,0x00, +0x88,0x02,0x00,0x00,0xa2,0x02,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x8c,0x02,0x00,0x00,0x8a,0x02,0x00,0x00, +0x8b,0x02,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x8e,0x02,0x00,0x00,0xa0,0x02,0x00,0x00,0x62,0x00,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x8f,0x02,0x00,0x00, +0x8c,0x02,0x00,0x00,0x8e,0x02,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x91,0x02,0x00,0x00,0x8f,0x02,0x00,0x00, +0xa4,0x02,0x00,0x00,0x41,0x00,0x05,0x00,0xcd,0x00,0x00,0x00, +0x92,0x02,0x00,0x00,0xca,0x00,0x00,0x00,0x91,0x02,0x00,0x00, +0x3d,0x00,0x04,0x00,0xc4,0x00,0x00,0x00,0x93,0x02,0x00,0x00, +0x92,0x02,0x00,0x00,0x41,0x00,0x06,0x00,0x05,0x01,0x00,0x00, +0x94,0x02,0x00,0x00,0x79,0x02,0x00,0x00,0x34,0x00,0x00,0x00, +0x86,0x02,0x00,0x00,0x3e,0x00,0x03,0x00,0x94,0x02,0x00,0x00, +0x93,0x02,0x00,0x00,0xf9,0x00,0x02,0x00,0x75,0x02,0x00,0x00, +0xf8,0x00,0x02,0x00,0x75,0x02,0x00,0x00,0xf9,0x00,0x02,0x00, +0x61,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x61,0x02,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x96,0x02,0x00,0x00, +0xa4,0x02,0x00,0x00,0xd0,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, +0x5e,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x60,0x02,0x00,0x00, +0xf9,0x00,0x02,0x00,0x59,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, +0x59,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x98,0x02,0x00,0x00,0xa2,0x02,0x00,0x00,0xd0,0x00,0x00,0x00, +0xf9,0x00,0x02,0x00,0x56,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, +0x58,0x02,0x00,0x00,0xf9,0x00,0x02,0x00,0x41,0x02,0x00,0x00, +0xf8,0x00,0x02,0x00,0x41,0x02,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x9a,0x02,0x00,0x00,0xa0,0x02,0x00,0x00, +0xd0,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0x3e,0x02,0x00,0x00, +0xf8,0x00,0x02,0x00,0x40,0x02,0x00,0x00,0xf9,0x00,0x02,0x00, +0x39,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x39,0x02,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x9c,0x02,0x00,0x00, +0x9f,0x02,0x00,0x00,0xd0,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, +0x36,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x38,0x02,0x00,0x00, +0xfd,0x00,0x01,0x00,0x38,0x00,0x01,0x00, +}; +const uint64_t matmul_f32_aligned_fp32_len = 10124; + +unsigned char matmul_f32_fp32_data[] = { +0x03,0x02,0x23,0x07,0x00,0x05,0x01,0x00,0x0b,0x00,0x0d,0x00, +0xd3,0x02,0x00,0x00,0x00,0x00,0x00,0x00,0x11,0x00,0x02,0x00, +0x01,0x00,0x00,0x00,0x0b,0x00,0x06,0x00,0x01,0x00,0x00,0x00, +0x47,0x4c,0x53,0x4c,0x2e,0x73,0x74,0x64,0x2e,0x34,0x35,0x30, +0x00,0x00,0x00,0x00,0x0e,0x00,0x03,0x00,0x00,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0x0f,0x00,0x0f,0x00,0x05,0x00,0x00,0x00, +0x04,0x00,0x00,0x00,0x6d,0x61,0x69,0x6e,0x00,0x00,0x00,0x00, +0x0b,0x00,0x00,0x00,0x12,0x00,0x00,0x00,0x3d,0x00,0x00,0x00, +0x4c,0x00,0x00,0x00,0xfa,0x00,0x00,0x00,0x05,0x01,0x00,0x00, +0x44,0x01,0x00,0x00,0x4f,0x01,0x00,0x00,0x34,0x02,0x00,0x00, +0x7d,0x02,0x00,0x00,0x10,0x00,0x06,0x00,0x04,0x00,0x00,0x00, +0x11,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x0b,0x00,0x00,0x00, +0x0b,0x00,0x00,0x00,0x1c,0x00,0x00,0x00,0x48,0x00,0x05,0x00, +0x10,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00, +0x00,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x04,0x00,0x00,0x00, +0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00,0x02,0x00,0x00,0x00, +0x23,0x00,0x00,0x00,0x08,0x00,0x00,0x00,0x48,0x00,0x05,0x00, +0x10,0x00,0x00,0x00,0x03,0x00,0x00,0x00,0x23,0x00,0x00,0x00, +0x0c,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00, +0x04,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x10,0x00,0x00,0x00, +0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00,0x05,0x00,0x00,0x00, +0x23,0x00,0x00,0x00,0x14,0x00,0x00,0x00,0x48,0x00,0x05,0x00, +0x10,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x23,0x00,0x00,0x00, +0x18,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00, +0x07,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x1c,0x00,0x00,0x00, +0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00,0x08,0x00,0x00,0x00, +0x23,0x00,0x00,0x00,0x20,0x00,0x00,0x00,0x48,0x00,0x05,0x00, +0x10,0x00,0x00,0x00,0x09,0x00,0x00,0x00,0x23,0x00,0x00,0x00, +0x24,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00, +0x0a,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x28,0x00,0x00,0x00, +0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00,0x0b,0x00,0x00,0x00, +0x23,0x00,0x00,0x00,0x2c,0x00,0x00,0x00,0x48,0x00,0x05,0x00, +0x10,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x23,0x00,0x00,0x00, +0x30,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00, +0x0d,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x34,0x00,0x00,0x00, +0x47,0x00,0x03,0x00,0x10,0x00,0x00,0x00,0x02,0x00,0x00,0x00, +0x47,0x00,0x04,0x00,0x37,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x3d,0x00,0x00,0x00, +0x0b,0x00,0x00,0x00,0x1a,0x00,0x00,0x00,0x47,0x00,0x04,0x00, +0x4c,0x00,0x00,0x00,0x0b,0x00,0x00,0x00,0x1b,0x00,0x00,0x00, +0x47,0x00,0x04,0x00,0x4f,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0x09,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x53,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x47,0x00,0x04,0x00, +0x60,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x06,0x00,0x00,0x00, +0x47,0x00,0x04,0x00,0x62,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0x07,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x6c,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0x03,0x00,0x00,0x00,0x47,0x00,0x04,0x00, +0xa6,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x02,0x00,0x00,0x00, +0x47,0x00,0x04,0x00,0xb8,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0x05,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0xbb,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0x08,0x00,0x00,0x00,0x47,0x00,0x04,0x00, +0x02,0x01,0x00,0x00,0x06,0x00,0x00,0x00,0x04,0x00,0x00,0x00, +0x48,0x00,0x04,0x00,0x03,0x01,0x00,0x00,0x00,0x00,0x00,0x00, +0x18,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x03,0x01,0x00,0x00, +0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0x47,0x00,0x03,0x00,0x03,0x01,0x00,0x00,0x02,0x00,0x00,0x00, +0x47,0x00,0x04,0x00,0x05,0x01,0x00,0x00,0x22,0x00,0x00,0x00, +0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x05,0x01,0x00,0x00, +0x21,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00, +0x1e,0x01,0x00,0x00,0x01,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0x47,0x00,0x04,0x00,0x1f,0x01,0x00,0x00,0x0b,0x00,0x00,0x00, +0x19,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x4c,0x01,0x00,0x00, +0x06,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x48,0x00,0x04,0x00, +0x4d,0x01,0x00,0x00,0x00,0x00,0x00,0x00,0x18,0x00,0x00,0x00, +0x48,0x00,0x05,0x00,0x4d,0x01,0x00,0x00,0x00,0x00,0x00,0x00, +0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x03,0x00, +0x4d,0x01,0x00,0x00,0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00, +0x4f,0x01,0x00,0x00,0x22,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0x47,0x00,0x04,0x00,0x4f,0x01,0x00,0x00,0x21,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x34,0x02,0x00,0x00, +0x0b,0x00,0x00,0x00,0x18,0x00,0x00,0x00,0x47,0x00,0x04,0x00, +0x7a,0x02,0x00,0x00,0x06,0x00,0x00,0x00,0x04,0x00,0x00,0x00, +0x48,0x00,0x04,0x00,0x7b,0x02,0x00,0x00,0x00,0x00,0x00,0x00, +0x19,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x7b,0x02,0x00,0x00, +0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0x47,0x00,0x03,0x00,0x7b,0x02,0x00,0x00,0x02,0x00,0x00,0x00, +0x47,0x00,0x04,0x00,0x7d,0x02,0x00,0x00,0x22,0x00,0x00,0x00, +0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x7d,0x02,0x00,0x00, +0x21,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x13,0x00,0x02,0x00, +0x02,0x00,0x00,0x00,0x21,0x00,0x03,0x00,0x03,0x00,0x00,0x00, +0x02,0x00,0x00,0x00,0x15,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x20,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x17,0x00,0x04,0x00, +0x09,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x03,0x00,0x00,0x00, +0x20,0x00,0x04,0x00,0x0a,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0x09,0x00,0x00,0x00,0x3b,0x00,0x04,0x00,0x0a,0x00,0x00,0x00, +0x0b,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x02,0x00,0x00,0x00, +0x20,0x00,0x04,0x00,0x0d,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0x06,0x00,0x00,0x00,0x1e,0x00,0x10,0x00,0x10,0x00,0x00,0x00, +0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00, +0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00, +0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00, +0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00, +0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x20,0x00,0x04,0x00, +0x11,0x00,0x00,0x00,0x09,0x00,0x00,0x00,0x10,0x00,0x00,0x00, +0x3b,0x00,0x04,0x00,0x11,0x00,0x00,0x00,0x12,0x00,0x00,0x00, +0x09,0x00,0x00,0x00,0x15,0x00,0x04,0x00,0x13,0x00,0x00,0x00, +0x20,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x13,0x00,0x00,0x00,0x14,0x00,0x00,0x00,0x08,0x00,0x00,0x00, +0x20,0x00,0x04,0x00,0x15,0x00,0x00,0x00,0x09,0x00,0x00,0x00, +0x06,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00, +0x21,0x00,0x00,0x00,0x0a,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x13,0x00,0x00,0x00,0x27,0x00,0x00,0x00,0x09,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00,0x2d,0x00,0x00,0x00, +0x07,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00, +0x34,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x32,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x37,0x00,0x00,0x00,0x40,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x39,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0x3b,0x00,0x04,0x00,0x0a,0x00,0x00,0x00, +0x3d,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x3e,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0x3b,0x00,0x04,0x00,0x0a,0x00,0x00,0x00,0x4c,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0x32,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x4f,0x00,0x00,0x00,0x20,0x00,0x00,0x00,0x32,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x53,0x00,0x00,0x00,0x20,0x00,0x00,0x00, +0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x54,0x00,0x00,0x00, +0x86,0x00,0x00,0x00,0x37,0x00,0x00,0x00,0x53,0x00,0x00,0x00, +0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x58,0x00,0x00,0x00, +0x86,0x00,0x00,0x00,0x37,0x00,0x00,0x00,0x53,0x00,0x00,0x00, +0x32,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x60,0x00,0x00,0x00, +0x02,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, +0x61,0x00,0x00,0x00,0x86,0x00,0x00,0x00,0x53,0x00,0x00,0x00, +0x60,0x00,0x00,0x00,0x32,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x62,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x34,0x00,0x06,0x00, +0x06,0x00,0x00,0x00,0x63,0x00,0x00,0x00,0x86,0x00,0x00,0x00, +0x61,0x00,0x00,0x00,0x62,0x00,0x00,0x00,0x34,0x00,0x06,0x00, +0x06,0x00,0x00,0x00,0x67,0x00,0x00,0x00,0x86,0x00,0x00,0x00, +0x61,0x00,0x00,0x00,0x62,0x00,0x00,0x00,0x32,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x6c,0x00,0x00,0x00,0x10,0x00,0x00,0x00, +0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x6d,0x00,0x00,0x00, +0x86,0x00,0x00,0x00,0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00, +0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x72,0x00,0x00,0x00, +0x86,0x00,0x00,0x00,0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00, +0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x77,0x00,0x00,0x00, +0x86,0x00,0x00,0x00,0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00, +0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x7c,0x00,0x00,0x00, +0x86,0x00,0x00,0x00,0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00,0x80,0x00,0x00,0x00, +0x06,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00, +0x85,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x13,0x00,0x00,0x00,0x90,0x00,0x00,0x00,0x0b,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00,0x96,0x00,0x00,0x00, +0x03,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00, +0xa1,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x32,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0xa6,0x00,0x00,0x00,0x40,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00,0xa8,0x00,0x00,0x00, +0x04,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, +0xb7,0x00,0x00,0x00,0x84,0x00,0x00,0x00,0x60,0x00,0x00,0x00, +0x62,0x00,0x00,0x00,0x32,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0xb8,0x00,0x00,0x00,0x20,0x00,0x00,0x00,0x34,0x00,0x06,0x00, +0x06,0x00,0x00,0x00,0xb9,0x00,0x00,0x00,0x84,0x00,0x00,0x00, +0x53,0x00,0x00,0x00,0xb8,0x00,0x00,0x00,0x34,0x00,0x06,0x00, +0x06,0x00,0x00,0x00,0xba,0x00,0x00,0x00,0x84,0x00,0x00,0x00, +0x4f,0x00,0x00,0x00,0x62,0x00,0x00,0x00,0x32,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0xbb,0x00,0x00,0x00,0x02,0x00,0x00,0x00, +0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0xbc,0x00,0x00,0x00, +0x84,0x00,0x00,0x00,0xba,0x00,0x00,0x00,0xbb,0x00,0x00,0x00, +0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0xbd,0x00,0x00,0x00, +0x84,0x00,0x00,0x00,0xbc,0x00,0x00,0x00,0x60,0x00,0x00,0x00, +0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0xbe,0x00,0x00,0x00, +0x86,0x00,0x00,0x00,0xb9,0x00,0x00,0x00,0xbd,0x00,0x00,0x00, +0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0xbf,0x00,0x00,0x00, +0x84,0x00,0x00,0x00,0xb7,0x00,0x00,0x00,0xbe,0x00,0x00,0x00, +0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0xc0,0x00,0x00,0x00, +0x84,0x00,0x00,0x00,0xbf,0x00,0x00,0x00,0xbb,0x00,0x00,0x00, +0x14,0x00,0x02,0x00,0xc1,0x00,0x00,0x00,0x16,0x00,0x03,0x00, +0xc3,0x00,0x00,0x00,0x20,0x00,0x00,0x00,0x34,0x00,0x06,0x00, +0x06,0x00,0x00,0x00,0xc4,0x00,0x00,0x00,0x84,0x00,0x00,0x00, +0x60,0x00,0x00,0x00,0x62,0x00,0x00,0x00,0x34,0x00,0x06,0x00, +0x06,0x00,0x00,0x00,0xc5,0x00,0x00,0x00,0x84,0x00,0x00,0x00, +0xc4,0x00,0x00,0x00,0xbe,0x00,0x00,0x00,0x34,0x00,0x06,0x00, +0x06,0x00,0x00,0x00,0xc6,0x00,0x00,0x00,0x84,0x00,0x00,0x00, +0xc5,0x00,0x00,0x00,0xbb,0x00,0x00,0x00,0x1c,0x00,0x04,0x00, +0xc7,0x00,0x00,0x00,0xc3,0x00,0x00,0x00,0xc6,0x00,0x00,0x00, +0x20,0x00,0x04,0x00,0xc8,0x00,0x00,0x00,0x07,0x00,0x00,0x00, +0xc7,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0xc3,0x00,0x00,0x00, +0xcb,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x20,0x00,0x04,0x00, +0xcc,0x00,0x00,0x00,0x07,0x00,0x00,0x00,0xc3,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00,0xcf,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, +0xf6,0x00,0x00,0x00,0x80,0x00,0x00,0x00,0x6c,0x00,0x00,0x00, +0x39,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, +0xf7,0x00,0x00,0x00,0x84,0x00,0x00,0x00,0x37,0x00,0x00,0x00, +0xf6,0x00,0x00,0x00,0x1c,0x00,0x04,0x00,0xf8,0x00,0x00,0x00, +0xc3,0x00,0x00,0x00,0xf7,0x00,0x00,0x00,0x20,0x00,0x04,0x00, +0xf9,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0xf8,0x00,0x00,0x00, +0x3b,0x00,0x04,0x00,0xf9,0x00,0x00,0x00,0xfa,0x00,0x00,0x00, +0x04,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, +0xfe,0x00,0x00,0x00,0x80,0x00,0x00,0x00,0x6c,0x00,0x00,0x00, +0x39,0x00,0x00,0x00,0x1d,0x00,0x03,0x00,0x02,0x01,0x00,0x00, +0xc3,0x00,0x00,0x00,0x1e,0x00,0x03,0x00,0x03,0x01,0x00,0x00, +0x02,0x01,0x00,0x00,0x20,0x00,0x04,0x00,0x04,0x01,0x00,0x00, +0x0c,0x00,0x00,0x00,0x03,0x01,0x00,0x00,0x3b,0x00,0x04,0x00, +0x04,0x01,0x00,0x00,0x05,0x01,0x00,0x00,0x0c,0x00,0x00,0x00, +0x20,0x00,0x04,0x00,0x10,0x01,0x00,0x00,0x0c,0x00,0x00,0x00, +0xc3,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x13,0x01,0x00,0x00, +0x04,0x00,0x00,0x00,0xc3,0x00,0x00,0x00,0x34,0x00,0x06,0x00, +0x06,0x00,0x00,0x00,0x19,0x01,0x00,0x00,0x80,0x00,0x00,0x00, +0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x32,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x1e,0x01,0x00,0x00,0x01,0x00,0x00,0x00, +0x33,0x00,0x06,0x00,0x09,0x00,0x00,0x00,0x1f,0x01,0x00,0x00, +0x1e,0x01,0x00,0x00,0x39,0x00,0x00,0x00,0x39,0x00,0x00,0x00, +0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x20,0x01,0x00,0x00, +0x51,0x00,0x00,0x00,0x1f,0x01,0x00,0x00,0x00,0x00,0x00,0x00, +0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x21,0x01,0x00,0x00, +0x84,0x00,0x00,0x00,0x20,0x01,0x00,0x00,0x39,0x00,0x00,0x00, +0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x22,0x01,0x00,0x00, +0x86,0x00,0x00,0x00,0x21,0x01,0x00,0x00,0x6c,0x00,0x00,0x00, +0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x40,0x01,0x00,0x00, +0x80,0x00,0x00,0x00,0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00, +0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x41,0x01,0x00,0x00, +0x84,0x00,0x00,0x00,0xa6,0x00,0x00,0x00,0x40,0x01,0x00,0x00, +0x1c,0x00,0x04,0x00,0x42,0x01,0x00,0x00,0xc3,0x00,0x00,0x00, +0x41,0x01,0x00,0x00,0x20,0x00,0x04,0x00,0x43,0x01,0x00,0x00, +0x04,0x00,0x00,0x00,0x42,0x01,0x00,0x00,0x3b,0x00,0x04,0x00, +0x43,0x01,0x00,0x00,0x44,0x01,0x00,0x00,0x04,0x00,0x00,0x00, +0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x48,0x01,0x00,0x00, +0x80,0x00,0x00,0x00,0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00, +0x1d,0x00,0x03,0x00,0x4c,0x01,0x00,0x00,0xc3,0x00,0x00,0x00, +0x1e,0x00,0x03,0x00,0x4d,0x01,0x00,0x00,0x4c,0x01,0x00,0x00, +0x20,0x00,0x04,0x00,0x4e,0x01,0x00,0x00,0x0c,0x00,0x00,0x00, +0x4d,0x01,0x00,0x00,0x3b,0x00,0x04,0x00,0x4e,0x01,0x00,0x00, +0x4f,0x01,0x00,0x00,0x0c,0x00,0x00,0x00,0x34,0x00,0x06,0x00, +0x06,0x00,0x00,0x00,0x61,0x01,0x00,0x00,0x80,0x00,0x00,0x00, +0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x34,0x00,0x06,0x00, +0x06,0x00,0x00,0x00,0x66,0x01,0x00,0x00,0x51,0x00,0x00,0x00, +0x1f,0x01,0x00,0x00,0x00,0x00,0x00,0x00,0x34,0x00,0x06,0x00, +0x06,0x00,0x00,0x00,0x67,0x01,0x00,0x00,0x84,0x00,0x00,0x00, +0x66,0x01,0x00,0x00,0x39,0x00,0x00,0x00,0x34,0x00,0x06,0x00, +0x06,0x00,0x00,0x00,0x68,0x01,0x00,0x00,0x86,0x00,0x00,0x00, +0x67,0x01,0x00,0x00,0x6c,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x6b,0x01,0x00,0x00,0x08,0x01,0x00,0x00, +0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x6c,0x01,0x00,0x00, +0x86,0x00,0x00,0x00,0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00, +0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x6f,0x01,0x00,0x00, +0x86,0x00,0x00,0x00,0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00, +0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x8a,0x01,0x00,0x00, +0x84,0x00,0x00,0x00,0x60,0x00,0x00,0x00,0x62,0x00,0x00,0x00, +0x1c,0x00,0x04,0x00,0x8b,0x01,0x00,0x00,0xc3,0x00,0x00,0x00, +0x8a,0x01,0x00,0x00,0x20,0x00,0x04,0x00,0x8c,0x01,0x00,0x00, +0x07,0x00,0x00,0x00,0x8b,0x01,0x00,0x00,0x34,0x00,0x06,0x00, +0x06,0x00,0x00,0x00,0x9c,0x01,0x00,0x00,0x80,0x00,0x00,0x00, +0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x34,0x00,0x06,0x00, +0x06,0x00,0x00,0x00,0xb7,0x01,0x00,0x00,0x84,0x00,0x00,0x00, +0xbe,0x00,0x00,0x00,0xbb,0x00,0x00,0x00,0x1c,0x00,0x04,0x00, +0xb8,0x01,0x00,0x00,0xc3,0x00,0x00,0x00,0xb7,0x01,0x00,0x00, +0x20,0x00,0x04,0x00,0xb9,0x01,0x00,0x00,0x07,0x00,0x00,0x00, +0xb8,0x01,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, +0xc2,0x01,0x00,0x00,0x86,0x00,0x00,0x00,0xb8,0x00,0x00,0x00, +0xbe,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, +0xca,0x01,0x00,0x00,0x80,0x00,0x00,0x00,0x6c,0x00,0x00,0x00, +0x39,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, +0xf9,0x01,0x00,0x00,0x84,0x00,0x00,0x00,0x60,0x00,0x00,0x00, +0x62,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00, +0x2c,0x02,0x00,0x00,0x0d,0x00,0x00,0x00,0x3b,0x00,0x04,0x00, +0x0a,0x00,0x00,0x00,0x34,0x02,0x00,0x00,0x01,0x00,0x00,0x00, +0x1d,0x00,0x03,0x00,0x7a,0x02,0x00,0x00,0xc3,0x00,0x00,0x00, +0x1e,0x00,0x03,0x00,0x7b,0x02,0x00,0x00,0x7a,0x02,0x00,0x00, +0x20,0x00,0x04,0x00,0x7c,0x02,0x00,0x00,0x0c,0x00,0x00,0x00, +0x7b,0x02,0x00,0x00,0x3b,0x00,0x04,0x00,0x7c,0x02,0x00,0x00, +0x7d,0x02,0x00,0x00,0x0c,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x13,0x00,0x00,0x00,0x82,0x02,0x00,0x00,0x05,0x00,0x00,0x00, +0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x8f,0x02,0x00,0x00, +0x84,0x00,0x00,0x00,0x60,0x00,0x00,0x00,0x62,0x00,0x00,0x00, +0x36,0x00,0x05,0x00,0x02,0x00,0x00,0x00,0x04,0x00,0x00,0x00, +0x00,0x00,0x00,0x00,0x03,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, +0x05,0x00,0x00,0x00,0x3b,0x00,0x04,0x00,0xc8,0x00,0x00,0x00, +0xc9,0x00,0x00,0x00,0x07,0x00,0x00,0x00,0x3b,0x00,0x04,0x00, +0x8c,0x01,0x00,0x00,0x8d,0x01,0x00,0x00,0x07,0x00,0x00,0x00, +0x3b,0x00,0x04,0x00,0xb9,0x01,0x00,0x00,0xba,0x01,0x00,0x00, +0x07,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x0d,0x00,0x00,0x00, +0x0e,0x00,0x00,0x00,0x0b,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, +0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x0f,0x00,0x00,0x00, +0x0e,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00, +0x16,0x00,0x00,0x00,0x12,0x00,0x00,0x00,0x14,0x00,0x00,0x00, +0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x17,0x00,0x00,0x00, +0x16,0x00,0x00,0x00,0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x18,0x00,0x00,0x00,0x0f,0x00,0x00,0x00,0x17,0x00,0x00,0x00, +0x89,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x1e,0x00,0x00,0x00, +0x0f,0x00,0x00,0x00,0x17,0x00,0x00,0x00,0x41,0x00,0x05,0x00, +0x15,0x00,0x00,0x00,0x22,0x00,0x00,0x00,0x12,0x00,0x00,0x00, +0x21,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x23,0x00,0x00,0x00,0x22,0x00,0x00,0x00,0x86,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x24,0x00,0x00,0x00,0x18,0x00,0x00,0x00, +0x23,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00, +0x28,0x00,0x00,0x00,0x12,0x00,0x00,0x00,0x27,0x00,0x00,0x00, +0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x29,0x00,0x00,0x00, +0x28,0x00,0x00,0x00,0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x2a,0x00,0x00,0x00,0x1e,0x00,0x00,0x00,0x29,0x00,0x00,0x00, +0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, +0x12,0x00,0x00,0x00,0x2d,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x2f,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x30,0x00,0x00,0x00, +0x24,0x00,0x00,0x00,0x2f,0x00,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x32,0x00,0x00,0x00,0x30,0x00,0x00,0x00, +0x2a,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00, +0x35,0x00,0x00,0x00,0x12,0x00,0x00,0x00,0x34,0x00,0x00,0x00, +0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x36,0x00,0x00,0x00, +0x35,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x38,0x00,0x00,0x00,0x36,0x00,0x00,0x00,0x37,0x00,0x00,0x00, +0x82,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x3a,0x00,0x00,0x00, +0x38,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x86,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x3b,0x00,0x00,0x00,0x3a,0x00,0x00,0x00, +0x37,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x0d,0x00,0x00,0x00, +0x3f,0x00,0x00,0x00,0x3d,0x00,0x00,0x00,0x3e,0x00,0x00,0x00, +0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x40,0x00,0x00,0x00, +0x3f,0x00,0x00,0x00,0x89,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x42,0x00,0x00,0x00,0x40,0x00,0x00,0x00,0x3b,0x00,0x00,0x00, +0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x47,0x00,0x00,0x00, +0x40,0x00,0x00,0x00,0x3b,0x00,0x00,0x00,0x41,0x00,0x05,0x00, +0x0d,0x00,0x00,0x00,0x49,0x00,0x00,0x00,0x3d,0x00,0x00,0x00, +0x39,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x4a,0x00,0x00,0x00,0x49,0x00,0x00,0x00,0x41,0x00,0x05,0x00, +0x0d,0x00,0x00,0x00,0x4d,0x00,0x00,0x00,0x4c,0x00,0x00,0x00, +0x3e,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x4e,0x00,0x00,0x00,0x4d,0x00,0x00,0x00,0x86,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x50,0x00,0x00,0x00,0x4e,0x00,0x00,0x00, +0x4f,0x00,0x00,0x00,0x89,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x55,0x00,0x00,0x00,0x50,0x00,0x00,0x00,0x54,0x00,0x00,0x00, +0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x59,0x00,0x00,0x00, +0x50,0x00,0x00,0x00,0x58,0x00,0x00,0x00,0x89,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x5d,0x00,0x00,0x00,0x4e,0x00,0x00,0x00, +0x4f,0x00,0x00,0x00,0x89,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x64,0x00,0x00,0x00,0x5d,0x00,0x00,0x00,0x63,0x00,0x00,0x00, +0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x68,0x00,0x00,0x00, +0x5d,0x00,0x00,0x00,0x67,0x00,0x00,0x00,0x89,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x6e,0x00,0x00,0x00,0x4e,0x00,0x00,0x00, +0x6d,0x00,0x00,0x00,0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x73,0x00,0x00,0x00,0x4e,0x00,0x00,0x00,0x72,0x00,0x00,0x00, +0x89,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x78,0x00,0x00,0x00, +0x4e,0x00,0x00,0x00,0x77,0x00,0x00,0x00,0x86,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x7d,0x00,0x00,0x00,0x4e,0x00,0x00,0x00, +0x7c,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00, +0x81,0x00,0x00,0x00,0x12,0x00,0x00,0x00,0x80,0x00,0x00,0x00, +0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x82,0x00,0x00,0x00, +0x81,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x83,0x00,0x00,0x00,0x47,0x00,0x00,0x00,0x82,0x00,0x00,0x00, +0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00,0x86,0x00,0x00,0x00, +0x12,0x00,0x00,0x00,0x85,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x87,0x00,0x00,0x00,0x86,0x00,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x89,0x00,0x00,0x00, +0x47,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x8c,0x00,0x00,0x00,0x89,0x00,0x00,0x00, +0x82,0x00,0x00,0x00,0x0c,0x00,0x07,0x00,0x06,0x00,0x00,0x00, +0x8d,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x26,0x00,0x00,0x00, +0x87,0x00,0x00,0x00,0x8c,0x00,0x00,0x00,0x41,0x00,0x05,0x00, +0x15,0x00,0x00,0x00,0x91,0x00,0x00,0x00,0x12,0x00,0x00,0x00, +0x90,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x92,0x00,0x00,0x00,0x91,0x00,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x93,0x00,0x00,0x00,0x32,0x00,0x00,0x00, +0x92,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x95,0x00,0x00,0x00,0x42,0x00,0x00,0x00,0x37,0x00,0x00,0x00, +0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00,0x97,0x00,0x00,0x00, +0x12,0x00,0x00,0x00,0x96,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x98,0x00,0x00,0x00,0x97,0x00,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x99,0x00,0x00,0x00, +0x95,0x00,0x00,0x00,0x98,0x00,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x9a,0x00,0x00,0x00,0x93,0x00,0x00,0x00, +0x99,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x9c,0x00,0x00,0x00,0x9a,0x00,0x00,0x00,0x83,0x00,0x00,0x00, +0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x9d,0x00,0x00,0x00, +0x9c,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x41,0x00,0x05,0x00, +0x15,0x00,0x00,0x00,0xa2,0x00,0x00,0x00,0x12,0x00,0x00,0x00, +0xa1,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0xa3,0x00,0x00,0x00,0xa2,0x00,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xa4,0x00,0x00,0x00,0x0f,0x00,0x00,0x00, +0xa3,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xa7,0x00,0x00,0x00,0x4a,0x00,0x00,0x00,0xa6,0x00,0x00,0x00, +0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00,0xa9,0x00,0x00,0x00, +0x12,0x00,0x00,0x00,0xa8,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0xaa,0x00,0x00,0x00,0xa9,0x00,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xab,0x00,0x00,0x00, +0xa7,0x00,0x00,0x00,0xaa,0x00,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xac,0x00,0x00,0x00,0xa4,0x00,0x00,0x00, +0xab,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xae,0x00,0x00,0x00,0xac,0x00,0x00,0x00,0x83,0x00,0x00,0x00, +0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xaf,0x00,0x00,0x00, +0xae,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, +0xb1,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0xb1,0x00,0x00,0x00, +0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0xa1,0x02,0x00,0x00, +0x3e,0x00,0x00,0x00,0x05,0x00,0x00,0x00,0xd0,0x00,0x00,0x00, +0xb2,0x00,0x00,0x00,0xb0,0x00,0x05,0x00,0xc1,0x00,0x00,0x00, +0xc2,0x00,0x00,0x00,0xa1,0x02,0x00,0x00,0xc0,0x00,0x00,0x00, +0xf6,0x00,0x04,0x00,0xb3,0x00,0x00,0x00,0xb2,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0xc2,0x00,0x00,0x00, +0xb2,0x00,0x00,0x00,0xb3,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, +0xb2,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0xcc,0x00,0x00,0x00, +0xcd,0x00,0x00,0x00,0xc9,0x00,0x00,0x00,0xa1,0x02,0x00,0x00, +0x3e,0x00,0x03,0x00,0xcd,0x00,0x00,0x00,0xcb,0x00,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xd0,0x00,0x00,0x00, +0xa1,0x02,0x00,0x00,0xcf,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, +0xb1,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0xb3,0x00,0x00,0x00, +0xf9,0x00,0x02,0x00,0xd3,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, +0xd3,0x00,0x00,0x00,0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00, +0xba,0x02,0x00,0x00,0xaf,0x00,0x00,0x00,0xb3,0x00,0x00,0x00, +0x71,0x01,0x00,0x00,0xd6,0x00,0x00,0x00,0xf5,0x00,0x07,0x00, +0x06,0x00,0x00,0x00,0xb6,0x02,0x00,0x00,0x9d,0x00,0x00,0x00, +0xb3,0x00,0x00,0x00,0x6e,0x01,0x00,0x00,0xd6,0x00,0x00,0x00, +0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0xa2,0x02,0x00,0x00, +0x83,0x00,0x00,0x00,0xb3,0x00,0x00,0x00,0x1c,0x02,0x00,0x00, +0xd6,0x00,0x00,0x00,0xb0,0x00,0x05,0x00,0xc1,0x00,0x00,0x00, +0xda,0x00,0x00,0x00,0xa2,0x02,0x00,0x00,0x8d,0x00,0x00,0x00, +0xf6,0x00,0x04,0x00,0xd5,0x00,0x00,0x00,0xd6,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0xda,0x00,0x00,0x00, +0xd4,0x00,0x00,0x00,0xd5,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, +0xd4,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0xdc,0x00,0x00,0x00, +0xf8,0x00,0x02,0x00,0xdc,0x00,0x00,0x00,0xf5,0x00,0x07,0x00, +0x06,0x00,0x00,0x00,0xb2,0x02,0x00,0x00,0x3e,0x00,0x00,0x00, +0xd4,0x00,0x00,0x00,0x24,0x01,0x00,0x00,0xdf,0x00,0x00,0x00, +0xb0,0x00,0x05,0x00,0xc1,0x00,0x00,0x00,0xe2,0x00,0x00,0x00, +0xb2,0x02,0x00,0x00,0x37,0x00,0x00,0x00,0xf6,0x00,0x04,0x00, +0xde,0x00,0x00,0x00,0xdf,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0xfa,0x00,0x04,0x00,0xe2,0x00,0x00,0x00,0xdd,0x00,0x00,0x00, +0xde,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0xdd,0x00,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xe6,0x00,0x00,0x00, +0x95,0x00,0x00,0x00,0x73,0x00,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xe8,0x00,0x00,0x00,0xe6,0x00,0x00,0x00, +0xb2,0x02,0x00,0x00,0xb0,0x00,0x05,0x00,0xc1,0x00,0x00,0x00, +0xeb,0x00,0x00,0x00,0xe8,0x00,0x00,0x00,0x36,0x00,0x00,0x00, +0xf7,0x00,0x03,0x00,0xed,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0xfa,0x00,0x04,0x00,0xeb,0x00,0x00,0x00,0xec,0x00,0x00,0x00, +0xed,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0xec,0x00,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xf0,0x00,0x00,0x00, +0xa2,0x02,0x00,0x00,0x6e,0x00,0x00,0x00,0xb0,0x00,0x05,0x00, +0xc1,0x00,0x00,0x00,0xf2,0x00,0x00,0x00,0xf0,0x00,0x00,0x00, +0x8d,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0xed,0x00,0x00,0x00, +0xf8,0x00,0x02,0x00,0xed,0x00,0x00,0x00,0xf5,0x00,0x07,0x00, +0xc1,0x00,0x00,0x00,0xf3,0x00,0x00,0x00,0xeb,0x00,0x00,0x00, +0xdd,0x00,0x00,0x00,0xf2,0x00,0x00,0x00,0xec,0x00,0x00,0x00, +0xf7,0x00,0x03,0x00,0xf5,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0xfa,0x00,0x04,0x00,0xf3,0x00,0x00,0x00,0xf4,0x00,0x00,0x00, +0x15,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0xf4,0x00,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xfd,0x00,0x00,0x00, +0x73,0x00,0x00,0x00,0xb2,0x02,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xff,0x00,0x00,0x00,0xfd,0x00,0x00,0x00, +0xfe,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x01,0x01,0x00,0x00,0xff,0x00,0x00,0x00,0x6e,0x00,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x0c,0x01,0x00,0x00, +0xfd,0x00,0x00,0x00,0x98,0x00,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x0d,0x01,0x00,0x00,0xb6,0x02,0x00,0x00, +0x0c,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x0f,0x01,0x00,0x00,0x0d,0x01,0x00,0x00,0x6e,0x00,0x00,0x00, +0x41,0x00,0x06,0x00,0x10,0x01,0x00,0x00,0x11,0x01,0x00,0x00, +0x05,0x01,0x00,0x00,0x34,0x00,0x00,0x00,0x0f,0x01,0x00,0x00, +0x3d,0x00,0x04,0x00,0xc3,0x00,0x00,0x00,0x12,0x01,0x00,0x00, +0x11,0x01,0x00,0x00,0x41,0x00,0x05,0x00,0x13,0x01,0x00,0x00, +0x14,0x01,0x00,0x00,0xfa,0x00,0x00,0x00,0x01,0x01,0x00,0x00, +0x3e,0x00,0x03,0x00,0x14,0x01,0x00,0x00,0x12,0x01,0x00,0x00, +0xf9,0x00,0x02,0x00,0xf5,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, +0x15,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x18,0x01,0x00,0x00,0x73,0x00,0x00,0x00,0xb2,0x02,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x1a,0x01,0x00,0x00, +0x18,0x01,0x00,0x00,0x19,0x01,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x1c,0x01,0x00,0x00,0x1a,0x01,0x00,0x00, +0x6e,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x13,0x01,0x00,0x00, +0x1d,0x01,0x00,0x00,0xfa,0x00,0x00,0x00,0x1c,0x01,0x00,0x00, +0x3e,0x00,0x03,0x00,0x1d,0x01,0x00,0x00,0xcb,0x00,0x00,0x00, +0xf9,0x00,0x02,0x00,0xf5,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, +0xf5,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0xdf,0x00,0x00,0x00, +0xf8,0x00,0x02,0x00,0xdf,0x00,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x24,0x01,0x00,0x00,0xb2,0x02,0x00,0x00, +0x22,0x01,0x00,0x00,0xf9,0x00,0x02,0x00,0xdc,0x00,0x00,0x00, +0xf8,0x00,0x02,0x00,0xde,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, +0x26,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0x26,0x01,0x00,0x00, +0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0xb3,0x02,0x00,0x00, +0x3e,0x00,0x00,0x00,0xde,0x00,0x00,0x00,0x6a,0x01,0x00,0x00, +0x29,0x01,0x00,0x00,0xb0,0x00,0x05,0x00,0xc1,0x00,0x00,0x00, +0x2c,0x01,0x00,0x00,0xb3,0x02,0x00,0x00,0xa6,0x00,0x00,0x00, +0xf6,0x00,0x04,0x00,0x28,0x01,0x00,0x00,0x29,0x01,0x00,0x00, +0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0x2c,0x01,0x00,0x00, +0x27,0x01,0x00,0x00,0x28,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, +0x27,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x30,0x01,0x00,0x00,0xa7,0x00,0x00,0x00,0x7d,0x00,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x32,0x01,0x00,0x00, +0x30,0x01,0x00,0x00,0xb3,0x02,0x00,0x00,0x41,0x00,0x05,0x00, +0x15,0x00,0x00,0x00,0x33,0x01,0x00,0x00,0x12,0x00,0x00,0x00, +0xcf,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x34,0x01,0x00,0x00,0x33,0x01,0x00,0x00,0xb0,0x00,0x05,0x00, +0xc1,0x00,0x00,0x00,0x35,0x01,0x00,0x00,0x32,0x01,0x00,0x00, +0x34,0x01,0x00,0x00,0xf7,0x00,0x03,0x00,0x37,0x01,0x00,0x00, +0x00,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0x35,0x01,0x00,0x00, +0x36,0x01,0x00,0x00,0x37,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, +0x36,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x3a,0x01,0x00,0x00,0xa2,0x02,0x00,0x00,0x78,0x00,0x00,0x00, +0xb0,0x00,0x05,0x00,0xc1,0x00,0x00,0x00,0x3c,0x01,0x00,0x00, +0x3a,0x01,0x00,0x00,0x8d,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, +0x37,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0x37,0x01,0x00,0x00, +0xf5,0x00,0x07,0x00,0xc1,0x00,0x00,0x00,0x3d,0x01,0x00,0x00, +0x35,0x01,0x00,0x00,0x27,0x01,0x00,0x00,0x3c,0x01,0x00,0x00, +0x36,0x01,0x00,0x00,0xf7,0x00,0x03,0x00,0x3f,0x01,0x00,0x00, +0x00,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0x3d,0x01,0x00,0x00, +0x3e,0x01,0x00,0x00,0x5d,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, +0x3e,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x47,0x01,0x00,0x00,0x7d,0x00,0x00,0x00,0xb3,0x02,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x49,0x01,0x00,0x00, +0x47,0x01,0x00,0x00,0x48,0x01,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x4b,0x01,0x00,0x00,0x49,0x01,0x00,0x00, +0x78,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x56,0x01,0x00,0x00,0x47,0x01,0x00,0x00,0xaa,0x00,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x57,0x01,0x00,0x00, +0xba,0x02,0x00,0x00,0x56,0x01,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x59,0x01,0x00,0x00,0x57,0x01,0x00,0x00, +0x78,0x00,0x00,0x00,0x41,0x00,0x06,0x00,0x10,0x01,0x00,0x00, +0x5a,0x01,0x00,0x00,0x4f,0x01,0x00,0x00,0x34,0x00,0x00,0x00, +0x59,0x01,0x00,0x00,0x3d,0x00,0x04,0x00,0xc3,0x00,0x00,0x00, +0x5b,0x01,0x00,0x00,0x5a,0x01,0x00,0x00,0x41,0x00,0x05,0x00, +0x13,0x01,0x00,0x00,0x5c,0x01,0x00,0x00,0x44,0x01,0x00,0x00, +0x4b,0x01,0x00,0x00,0x3e,0x00,0x03,0x00,0x5c,0x01,0x00,0x00, +0x5b,0x01,0x00,0x00,0xf9,0x00,0x02,0x00,0x3f,0x01,0x00,0x00, +0xf8,0x00,0x02,0x00,0x5d,0x01,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x60,0x01,0x00,0x00,0x7d,0x00,0x00,0x00, +0xb3,0x02,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x62,0x01,0x00,0x00,0x60,0x01,0x00,0x00,0x61,0x01,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x64,0x01,0x00,0x00, +0x62,0x01,0x00,0x00,0x78,0x00,0x00,0x00,0x41,0x00,0x05,0x00, +0x13,0x01,0x00,0x00,0x65,0x01,0x00,0x00,0x44,0x01,0x00,0x00, +0x64,0x01,0x00,0x00,0x3e,0x00,0x03,0x00,0x65,0x01,0x00,0x00, +0xcb,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0x3f,0x01,0x00,0x00, +0xf8,0x00,0x02,0x00,0x3f,0x01,0x00,0x00,0xf9,0x00,0x02,0x00, +0x29,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0x29,0x01,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x6a,0x01,0x00,0x00, +0xb3,0x02,0x00,0x00,0x68,0x01,0x00,0x00,0xf9,0x00,0x02,0x00, +0x26,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0x28,0x01,0x00,0x00, +0xe0,0x00,0x04,0x00,0x0c,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, +0x6b,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x6e,0x01,0x00,0x00,0xb6,0x02,0x00,0x00,0x6c,0x01,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x71,0x01,0x00,0x00, +0xba,0x02,0x00,0x00,0x6f,0x01,0x00,0x00,0xf9,0x00,0x02,0x00, +0x73,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0x73,0x01,0x00,0x00, +0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0xbc,0x02,0x00,0x00, +0x3e,0x00,0x00,0x00,0x28,0x01,0x00,0x00,0x1a,0x02,0x00,0x00, +0x76,0x01,0x00,0x00,0xb0,0x00,0x05,0x00,0xc1,0x00,0x00,0x00, +0x79,0x01,0x00,0x00,0xbc,0x02,0x00,0x00,0x6c,0x00,0x00,0x00, +0xf6,0x00,0x04,0x00,0x75,0x01,0x00,0x00,0x76,0x01,0x00,0x00, +0x00,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0x79,0x01,0x00,0x00, +0x74,0x01,0x00,0x00,0x75,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, +0x74,0x01,0x00,0x00,0xf9,0x00,0x02,0x00,0x7b,0x01,0x00,0x00, +0xf8,0x00,0x02,0x00,0x7b,0x01,0x00,0x00,0xf5,0x00,0x07,0x00, +0x06,0x00,0x00,0x00,0xc0,0x02,0x00,0x00,0x3e,0x00,0x00,0x00, +0x74,0x01,0x00,0x00,0xa6,0x01,0x00,0x00,0x7e,0x01,0x00,0x00, +0xb0,0x00,0x05,0x00,0xc1,0x00,0x00,0x00,0x81,0x01,0x00,0x00, +0xc0,0x02,0x00,0x00,0x60,0x00,0x00,0x00,0xf6,0x00,0x04,0x00, +0x7d,0x01,0x00,0x00,0x7e,0x01,0x00,0x00,0x01,0x00,0x00,0x00, +0xfa,0x00,0x04,0x00,0x81,0x01,0x00,0x00,0x7c,0x01,0x00,0x00, +0x7d,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0x7c,0x01,0x00,0x00, +0xf9,0x00,0x02,0x00,0x83,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, +0x83,0x01,0x00,0x00,0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00, +0xd2,0x02,0x00,0x00,0x3e,0x00,0x00,0x00,0x7c,0x01,0x00,0x00, +0xa4,0x01,0x00,0x00,0x84,0x01,0x00,0x00,0xb0,0x00,0x05,0x00, +0xc1,0x00,0x00,0x00,0x89,0x01,0x00,0x00,0xd2,0x02,0x00,0x00, +0x62,0x00,0x00,0x00,0xf6,0x00,0x04,0x00,0x85,0x01,0x00,0x00, +0x84,0x01,0x00,0x00,0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, +0x89,0x01,0x00,0x00,0x84,0x01,0x00,0x00,0x85,0x01,0x00,0x00, +0xf8,0x00,0x02,0x00,0x84,0x01,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x8f,0x01,0x00,0x00,0xc0,0x02,0x00,0x00, +0x62,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x91,0x01,0x00,0x00,0x8f,0x01,0x00,0x00,0xd2,0x02,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x93,0x01,0x00,0x00, +0x55,0x00,0x00,0x00,0x53,0x00,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x95,0x01,0x00,0x00,0xc0,0x02,0x00,0x00, +0x61,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x96,0x01,0x00,0x00,0x93,0x01,0x00,0x00,0x95,0x01,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x98,0x01,0x00,0x00, +0x64,0x00,0x00,0x00,0x62,0x00,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x99,0x01,0x00,0x00,0x96,0x01,0x00,0x00, +0x98,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x9b,0x01,0x00,0x00,0x99,0x01,0x00,0x00,0xd2,0x02,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x9d,0x01,0x00,0x00, +0x9b,0x01,0x00,0x00,0x9c,0x01,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x9f,0x01,0x00,0x00,0x9d,0x01,0x00,0x00, +0xbc,0x02,0x00,0x00,0x41,0x00,0x05,0x00,0x13,0x01,0x00,0x00, +0xa0,0x01,0x00,0x00,0xfa,0x00,0x00,0x00,0x9f,0x01,0x00,0x00, +0x3d,0x00,0x04,0x00,0xc3,0x00,0x00,0x00,0xa1,0x01,0x00,0x00, +0xa0,0x01,0x00,0x00,0x41,0x00,0x05,0x00,0xcc,0x00,0x00,0x00, +0xa2,0x01,0x00,0x00,0x8d,0x01,0x00,0x00,0x91,0x01,0x00,0x00, +0x3e,0x00,0x03,0x00,0xa2,0x01,0x00,0x00,0xa1,0x01,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xa4,0x01,0x00,0x00, +0xd2,0x02,0x00,0x00,0xcf,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, +0x83,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0x85,0x01,0x00,0x00, +0xf9,0x00,0x02,0x00,0x7e,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, +0x7e,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xa6,0x01,0x00,0x00,0xc0,0x02,0x00,0x00,0xcf,0x00,0x00,0x00, +0xf9,0x00,0x02,0x00,0x7b,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, +0x7d,0x01,0x00,0x00,0xf9,0x00,0x02,0x00,0xa8,0x01,0x00,0x00, +0xf8,0x00,0x02,0x00,0xa8,0x01,0x00,0x00,0xf5,0x00,0x07,0x00, +0x06,0x00,0x00,0x00,0xc1,0x02,0x00,0x00,0x3e,0x00,0x00,0x00, +0x7d,0x01,0x00,0x00,0xd4,0x01,0x00,0x00,0xab,0x01,0x00,0x00, +0xb0,0x00,0x05,0x00,0xc1,0x00,0x00,0x00,0xae,0x01,0x00,0x00, +0xc1,0x02,0x00,0x00,0xbe,0x00,0x00,0x00,0xf6,0x00,0x04,0x00, +0xaa,0x01,0x00,0x00,0xab,0x01,0x00,0x00,0x01,0x00,0x00,0x00, +0xfa,0x00,0x04,0x00,0xae,0x01,0x00,0x00,0xa9,0x01,0x00,0x00, +0xaa,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0xa9,0x01,0x00,0x00, +0xf9,0x00,0x02,0x00,0xb0,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, +0xb0,0x01,0x00,0x00,0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00, +0xcf,0x02,0x00,0x00,0x3e,0x00,0x00,0x00,0xa9,0x01,0x00,0x00, +0xd2,0x01,0x00,0x00,0xb1,0x01,0x00,0x00,0xb0,0x00,0x05,0x00, +0xc1,0x00,0x00,0x00,0xb6,0x01,0x00,0x00,0xcf,0x02,0x00,0x00, +0xbb,0x00,0x00,0x00,0xf6,0x00,0x04,0x00,0xb2,0x01,0x00,0x00, +0xb1,0x01,0x00,0x00,0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, +0xb6,0x01,0x00,0x00,0xb1,0x01,0x00,0x00,0xb2,0x01,0x00,0x00, +0xf8,0x00,0x02,0x00,0xb1,0x01,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xbc,0x01,0x00,0x00,0xc1,0x02,0x00,0x00, +0xbb,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xbe,0x01,0x00,0x00,0xbc,0x01,0x00,0x00,0xcf,0x02,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xc0,0x01,0x00,0x00, +0x59,0x00,0x00,0x00,0xb8,0x00,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xc3,0x01,0x00,0x00,0xc1,0x02,0x00,0x00, +0xc2,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xc4,0x01,0x00,0x00,0xc0,0x01,0x00,0x00,0xc3,0x01,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xc6,0x01,0x00,0x00, +0x68,0x00,0x00,0x00,0xbb,0x00,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xc7,0x01,0x00,0x00,0xc4,0x01,0x00,0x00, +0xc6,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xc9,0x01,0x00,0x00,0xc7,0x01,0x00,0x00,0xcf,0x02,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xcb,0x01,0x00,0x00, +0xc9,0x01,0x00,0x00,0xca,0x01,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xcd,0x01,0x00,0x00,0xcb,0x01,0x00,0x00, +0xbc,0x02,0x00,0x00,0x41,0x00,0x05,0x00,0x13,0x01,0x00,0x00, +0xce,0x01,0x00,0x00,0x44,0x01,0x00,0x00,0xcd,0x01,0x00,0x00, +0x3d,0x00,0x04,0x00,0xc3,0x00,0x00,0x00,0xcf,0x01,0x00,0x00, +0xce,0x01,0x00,0x00,0x41,0x00,0x05,0x00,0xcc,0x00,0x00,0x00, +0xd0,0x01,0x00,0x00,0xba,0x01,0x00,0x00,0xbe,0x01,0x00,0x00, +0x3e,0x00,0x03,0x00,0xd0,0x01,0x00,0x00,0xcf,0x01,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xd2,0x01,0x00,0x00, +0xcf,0x02,0x00,0x00,0xcf,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, +0xb0,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0xb2,0x01,0x00,0x00, +0xf9,0x00,0x02,0x00,0xab,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, +0xab,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xd4,0x01,0x00,0x00,0xc1,0x02,0x00,0x00,0xcf,0x00,0x00,0x00, +0xf9,0x00,0x02,0x00,0xa8,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, +0xaa,0x01,0x00,0x00,0xf9,0x00,0x02,0x00,0xd6,0x01,0x00,0x00, +0xf8,0x00,0x02,0x00,0xd6,0x01,0x00,0x00,0xf5,0x00,0x07,0x00, +0x06,0x00,0x00,0x00,0xc2,0x02,0x00,0x00,0x3e,0x00,0x00,0x00, +0xaa,0x01,0x00,0x00,0x18,0x02,0x00,0x00,0xd9,0x01,0x00,0x00, +0xb0,0x00,0x05,0x00,0xc1,0x00,0x00,0x00,0xdc,0x01,0x00,0x00, +0xc2,0x02,0x00,0x00,0xbe,0x00,0x00,0x00,0xf6,0x00,0x04,0x00, +0xd8,0x01,0x00,0x00,0xd9,0x01,0x00,0x00,0x01,0x00,0x00,0x00, +0xfa,0x00,0x04,0x00,0xdc,0x01,0x00,0x00,0xd7,0x01,0x00,0x00, +0xd8,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0xd7,0x01,0x00,0x00, +0xf9,0x00,0x02,0x00,0xde,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, +0xde,0x01,0x00,0x00,0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00, +0xc6,0x02,0x00,0x00,0x3e,0x00,0x00,0x00,0xd7,0x01,0x00,0x00, +0x16,0x02,0x00,0x00,0xe1,0x01,0x00,0x00,0xb0,0x00,0x05,0x00, +0xc1,0x00,0x00,0x00,0xe4,0x01,0x00,0x00,0xc6,0x02,0x00,0x00, +0x60,0x00,0x00,0x00,0xf6,0x00,0x04,0x00,0xe0,0x01,0x00,0x00, +0xe1,0x01,0x00,0x00,0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, +0xe4,0x01,0x00,0x00,0xdf,0x01,0x00,0x00,0xe0,0x01,0x00,0x00, +0xf8,0x00,0x02,0x00,0xdf,0x01,0x00,0x00,0xf9,0x00,0x02,0x00, +0xe6,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0xe6,0x01,0x00,0x00, +0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0xc8,0x02,0x00,0x00, +0x3e,0x00,0x00,0x00,0xdf,0x01,0x00,0x00,0x14,0x02,0x00,0x00, +0xe9,0x01,0x00,0x00,0xb0,0x00,0x05,0x00,0xc1,0x00,0x00,0x00, +0xec,0x01,0x00,0x00,0xc8,0x02,0x00,0x00,0xbb,0x00,0x00,0x00, +0xf6,0x00,0x04,0x00,0xe8,0x01,0x00,0x00,0xe9,0x01,0x00,0x00, +0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0xec,0x01,0x00,0x00, +0xe7,0x01,0x00,0x00,0xe8,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, +0xe7,0x01,0x00,0x00,0xf9,0x00,0x02,0x00,0xee,0x01,0x00,0x00, +0xf8,0x00,0x02,0x00,0xee,0x01,0x00,0x00,0xf5,0x00,0x07,0x00, +0x06,0x00,0x00,0x00,0xca,0x02,0x00,0x00,0x3e,0x00,0x00,0x00, +0xe7,0x01,0x00,0x00,0x12,0x02,0x00,0x00,0xef,0x01,0x00,0x00, +0xb0,0x00,0x05,0x00,0xc1,0x00,0x00,0x00,0xf4,0x01,0x00,0x00, +0xca,0x02,0x00,0x00,0x62,0x00,0x00,0x00,0xf6,0x00,0x04,0x00, +0xf0,0x01,0x00,0x00,0xef,0x01,0x00,0x00,0x01,0x00,0x00,0x00, +0xfa,0x00,0x04,0x00,0xf4,0x01,0x00,0x00,0xef,0x01,0x00,0x00, +0xf0,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0xef,0x01,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xf6,0x01,0x00,0x00, +0xc2,0x02,0x00,0x00,0xbb,0x00,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xf8,0x01,0x00,0x00,0xf6,0x01,0x00,0x00, +0xc8,0x02,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xfa,0x01,0x00,0x00,0xf8,0x01,0x00,0x00,0xf9,0x01,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xfc,0x01,0x00,0x00, +0xc6,0x02,0x00,0x00,0x62,0x00,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xfd,0x01,0x00,0x00,0xfa,0x01,0x00,0x00, +0xfc,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xff,0x01,0x00,0x00,0xfd,0x01,0x00,0x00,0xca,0x02,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x03,0x02,0x00,0x00, +0xfc,0x01,0x00,0x00,0xca,0x02,0x00,0x00,0x41,0x00,0x05,0x00, +0xcc,0x00,0x00,0x00,0x04,0x02,0x00,0x00,0x8d,0x01,0x00,0x00, +0x03,0x02,0x00,0x00,0x3d,0x00,0x04,0x00,0xc3,0x00,0x00,0x00, +0x05,0x02,0x00,0x00,0x04,0x02,0x00,0x00,0x41,0x00,0x05,0x00, +0xcc,0x00,0x00,0x00,0x0a,0x02,0x00,0x00,0xba,0x01,0x00,0x00, +0xf8,0x01,0x00,0x00,0x3d,0x00,0x04,0x00,0xc3,0x00,0x00,0x00, +0x0b,0x02,0x00,0x00,0x0a,0x02,0x00,0x00,0x41,0x00,0x05,0x00, +0xcc,0x00,0x00,0x00,0x0d,0x02,0x00,0x00,0xc9,0x00,0x00,0x00, +0xff,0x01,0x00,0x00,0x3d,0x00,0x04,0x00,0xc3,0x00,0x00,0x00, +0x0e,0x02,0x00,0x00,0x0d,0x02,0x00,0x00,0x0c,0x00,0x08,0x00, +0xc3,0x00,0x00,0x00,0x0f,0x02,0x00,0x00,0x01,0x00,0x00,0x00, +0x32,0x00,0x00,0x00,0x05,0x02,0x00,0x00,0x0b,0x02,0x00,0x00, +0x0e,0x02,0x00,0x00,0x3e,0x00,0x03,0x00,0x0d,0x02,0x00,0x00, +0x0f,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x12,0x02,0x00,0x00,0xca,0x02,0x00,0x00,0xcf,0x00,0x00,0x00, +0xf9,0x00,0x02,0x00,0xee,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, +0xf0,0x01,0x00,0x00,0xf9,0x00,0x02,0x00,0xe9,0x01,0x00,0x00, +0xf8,0x00,0x02,0x00,0xe9,0x01,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x14,0x02,0x00,0x00,0xc8,0x02,0x00,0x00, +0xcf,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0xe6,0x01,0x00,0x00, +0xf8,0x00,0x02,0x00,0xe8,0x01,0x00,0x00,0xf9,0x00,0x02,0x00, +0xe1,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0xe1,0x01,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x16,0x02,0x00,0x00, +0xc6,0x02,0x00,0x00,0xcf,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, +0xde,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0xe0,0x01,0x00,0x00, +0xf9,0x00,0x02,0x00,0xd9,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, +0xd9,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x18,0x02,0x00,0x00,0xc2,0x02,0x00,0x00,0xcf,0x00,0x00,0x00, +0xf9,0x00,0x02,0x00,0xd6,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, +0xd8,0x01,0x00,0x00,0xf9,0x00,0x02,0x00,0x76,0x01,0x00,0x00, +0xf8,0x00,0x02,0x00,0x76,0x01,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x1a,0x02,0x00,0x00,0xbc,0x02,0x00,0x00, +0xcf,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0x73,0x01,0x00,0x00, +0xf8,0x00,0x02,0x00,0x75,0x01,0x00,0x00,0xe0,0x00,0x04,0x00, +0x0c,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x6b,0x01,0x00,0x00, +0xf9,0x00,0x02,0x00,0xd6,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, +0xd6,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x1c,0x02,0x00,0x00,0xa2,0x02,0x00,0x00,0x6c,0x00,0x00,0x00, +0xf9,0x00,0x02,0x00,0xd3,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, +0xd5,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x21,0x02,0x00,0x00,0x55,0x00,0x00,0x00,0x53,0x00,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x22,0x02,0x00,0x00, +0x95,0x00,0x00,0x00,0x21,0x02,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x27,0x02,0x00,0x00,0x59,0x00,0x00,0x00, +0xb8,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x28,0x02,0x00,0x00,0xa7,0x00,0x00,0x00,0x27,0x02,0x00,0x00, +0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00,0x2d,0x02,0x00,0x00, +0x12,0x00,0x00,0x00,0x2c,0x02,0x00,0x00,0x3d,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x2e,0x02,0x00,0x00,0x2d,0x02,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x2f,0x02,0x00,0x00, +0x0f,0x00,0x00,0x00,0x2e,0x02,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x33,0x02,0x00,0x00,0x47,0x00,0x00,0x00, +0x2e,0x02,0x00,0x00,0x41,0x00,0x05,0x00,0x0d,0x00,0x00,0x00, +0x35,0x02,0x00,0x00,0x34,0x02,0x00,0x00,0x0c,0x00,0x00,0x00, +0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x36,0x02,0x00,0x00, +0x35,0x02,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x37,0x02,0x00,0x00,0x33,0x02,0x00,0x00,0x36,0x02,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x38,0x02,0x00,0x00, +0x2f,0x02,0x00,0x00,0x37,0x02,0x00,0x00,0xf9,0x00,0x02,0x00, +0x3a,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x3a,0x02,0x00,0x00, +0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0xa3,0x02,0x00,0x00, +0x3e,0x00,0x00,0x00,0xd5,0x00,0x00,0x00,0xa0,0x02,0x00,0x00, +0x3d,0x02,0x00,0x00,0xb0,0x00,0x05,0x00,0xc1,0x00,0x00,0x00, +0x40,0x02,0x00,0x00,0xa3,0x02,0x00,0x00,0xbe,0x00,0x00,0x00, +0xf6,0x00,0x04,0x00,0x3c,0x02,0x00,0x00,0x3d,0x02,0x00,0x00, +0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0x40,0x02,0x00,0x00, +0x3b,0x02,0x00,0x00,0x3c,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, +0x3b,0x02,0x00,0x00,0xf9,0x00,0x02,0x00,0x42,0x02,0x00,0x00, +0xf8,0x00,0x02,0x00,0x42,0x02,0x00,0x00,0xf5,0x00,0x07,0x00, +0x06,0x00,0x00,0x00,0xa4,0x02,0x00,0x00,0x3e,0x00,0x00,0x00, +0x3b,0x02,0x00,0x00,0x9e,0x02,0x00,0x00,0x45,0x02,0x00,0x00, +0xb0,0x00,0x05,0x00,0xc1,0x00,0x00,0x00,0x48,0x02,0x00,0x00, +0xa4,0x02,0x00,0x00,0x60,0x00,0x00,0x00,0xf6,0x00,0x04,0x00, +0x44,0x02,0x00,0x00,0x45,0x02,0x00,0x00,0x01,0x00,0x00,0x00, +0xfa,0x00,0x04,0x00,0x48,0x02,0x00,0x00,0x43,0x02,0x00,0x00, +0x44,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x43,0x02,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x4c,0x02,0x00,0x00, +0xa4,0x02,0x00,0x00,0x61,0x00,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x4d,0x02,0x00,0x00,0x22,0x02,0x00,0x00, +0x4c,0x02,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x4f,0x02,0x00,0x00,0x64,0x00,0x00,0x00,0x62,0x00,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x50,0x02,0x00,0x00, +0x4d,0x02,0x00,0x00,0x4f,0x02,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x54,0x02,0x00,0x00,0xa3,0x02,0x00,0x00, +0xc2,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x55,0x02,0x00,0x00,0x28,0x02,0x00,0x00,0x54,0x02,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x57,0x02,0x00,0x00, +0x68,0x00,0x00,0x00,0xbb,0x00,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x58,0x02,0x00,0x00,0x55,0x02,0x00,0x00, +0x57,0x02,0x00,0x00,0xf9,0x00,0x02,0x00,0x5a,0x02,0x00,0x00, +0xf8,0x00,0x02,0x00,0x5a,0x02,0x00,0x00,0xf5,0x00,0x07,0x00, +0x06,0x00,0x00,0x00,0xa6,0x02,0x00,0x00,0x3e,0x00,0x00,0x00, +0x43,0x02,0x00,0x00,0x9c,0x02,0x00,0x00,0x5d,0x02,0x00,0x00, +0xb0,0x00,0x05,0x00,0xc1,0x00,0x00,0x00,0x60,0x02,0x00,0x00, +0xa6,0x02,0x00,0x00,0xbb,0x00,0x00,0x00,0xf6,0x00,0x04,0x00, +0x5c,0x02,0x00,0x00,0x5d,0x02,0x00,0x00,0x01,0x00,0x00,0x00, +0xfa,0x00,0x04,0x00,0x60,0x02,0x00,0x00,0x5b,0x02,0x00,0x00, +0x5c,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x5b,0x02,0x00,0x00, +0xf9,0x00,0x02,0x00,0x62,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, +0x62,0x02,0x00,0x00,0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00, +0xa8,0x02,0x00,0x00,0x3e,0x00,0x00,0x00,0x5b,0x02,0x00,0x00, +0x9a,0x02,0x00,0x00,0x65,0x02,0x00,0x00,0xb0,0x00,0x05,0x00, +0xc1,0x00,0x00,0x00,0x68,0x02,0x00,0x00,0xa8,0x02,0x00,0x00, +0x62,0x00,0x00,0x00,0xf6,0x00,0x04,0x00,0x64,0x02,0x00,0x00, +0x65,0x02,0x00,0x00,0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, +0x68,0x02,0x00,0x00,0x63,0x02,0x00,0x00,0x64,0x02,0x00,0x00, +0xf8,0x00,0x02,0x00,0x63,0x02,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x6b,0x02,0x00,0x00,0x50,0x02,0x00,0x00, +0xa8,0x02,0x00,0x00,0xb0,0x00,0x05,0x00,0xc1,0x00,0x00,0x00, +0x6e,0x02,0x00,0x00,0x6b,0x02,0x00,0x00,0x36,0x00,0x00,0x00, +0xf7,0x00,0x03,0x00,0x70,0x02,0x00,0x00,0x00,0x00,0x00,0x00, +0xfa,0x00,0x04,0x00,0x6e,0x02,0x00,0x00,0x6f,0x02,0x00,0x00, +0x70,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x6f,0x02,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x73,0x02,0x00,0x00, +0x58,0x02,0x00,0x00,0xa6,0x02,0x00,0x00,0x41,0x00,0x05,0x00, +0x15,0x00,0x00,0x00,0x74,0x02,0x00,0x00,0x12,0x00,0x00,0x00, +0xcf,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x75,0x02,0x00,0x00,0x74,0x02,0x00,0x00,0xb0,0x00,0x05,0x00, +0xc1,0x00,0x00,0x00,0x76,0x02,0x00,0x00,0x73,0x02,0x00,0x00, +0x75,0x02,0x00,0x00,0xf9,0x00,0x02,0x00,0x70,0x02,0x00,0x00, +0xf8,0x00,0x02,0x00,0x70,0x02,0x00,0x00,0xf5,0x00,0x07,0x00, +0xc1,0x00,0x00,0x00,0x77,0x02,0x00,0x00,0x6e,0x02,0x00,0x00, +0x63,0x02,0x00,0x00,0x76,0x02,0x00,0x00,0x6f,0x02,0x00,0x00, +0xf7,0x00,0x03,0x00,0x79,0x02,0x00,0x00,0x00,0x00,0x00,0x00, +0xfa,0x00,0x04,0x00,0x77,0x02,0x00,0x00,0x78,0x02,0x00,0x00, +0x79,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x78,0x02,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x81,0x02,0x00,0x00, +0x58,0x02,0x00,0x00,0xa6,0x02,0x00,0x00,0x41,0x00,0x05,0x00, +0x15,0x00,0x00,0x00,0x83,0x02,0x00,0x00,0x12,0x00,0x00,0x00, +0x82,0x02,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x84,0x02,0x00,0x00,0x83,0x02,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x85,0x02,0x00,0x00,0x81,0x02,0x00,0x00, +0x84,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x86,0x02,0x00,0x00,0x38,0x02,0x00,0x00,0x85,0x02,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x88,0x02,0x00,0x00, +0x86,0x02,0x00,0x00,0x50,0x02,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x8a,0x02,0x00,0x00,0x88,0x02,0x00,0x00, +0xa8,0x02,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x8c,0x02,0x00,0x00,0xa3,0x02,0x00,0x00,0xbb,0x00,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x8e,0x02,0x00,0x00, +0x8c,0x02,0x00,0x00,0xa6,0x02,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x90,0x02,0x00,0x00,0x8e,0x02,0x00,0x00, +0x8f,0x02,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x92,0x02,0x00,0x00,0xa4,0x02,0x00,0x00,0x62,0x00,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x93,0x02,0x00,0x00, +0x90,0x02,0x00,0x00,0x92,0x02,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x95,0x02,0x00,0x00,0x93,0x02,0x00,0x00, +0xa8,0x02,0x00,0x00,0x41,0x00,0x05,0x00,0xcc,0x00,0x00,0x00, +0x96,0x02,0x00,0x00,0xc9,0x00,0x00,0x00,0x95,0x02,0x00,0x00, +0x3d,0x00,0x04,0x00,0xc3,0x00,0x00,0x00,0x97,0x02,0x00,0x00, +0x96,0x02,0x00,0x00,0x41,0x00,0x06,0x00,0x10,0x01,0x00,0x00, +0x98,0x02,0x00,0x00,0x7d,0x02,0x00,0x00,0x34,0x00,0x00,0x00, +0x8a,0x02,0x00,0x00,0x3e,0x00,0x03,0x00,0x98,0x02,0x00,0x00, +0x97,0x02,0x00,0x00,0xf9,0x00,0x02,0x00,0x79,0x02,0x00,0x00, +0xf8,0x00,0x02,0x00,0x79,0x02,0x00,0x00,0xf9,0x00,0x02,0x00, +0x65,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x65,0x02,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x9a,0x02,0x00,0x00, +0xa8,0x02,0x00,0x00,0xcf,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, +0x62,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x64,0x02,0x00,0x00, +0xf9,0x00,0x02,0x00,0x5d,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, +0x5d,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x9c,0x02,0x00,0x00,0xa6,0x02,0x00,0x00,0xcf,0x00,0x00,0x00, +0xf9,0x00,0x02,0x00,0x5a,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, +0x5c,0x02,0x00,0x00,0xf9,0x00,0x02,0x00,0x45,0x02,0x00,0x00, +0xf8,0x00,0x02,0x00,0x45,0x02,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x9e,0x02,0x00,0x00,0xa4,0x02,0x00,0x00, +0xcf,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0x42,0x02,0x00,0x00, +0xf8,0x00,0x02,0x00,0x44,0x02,0x00,0x00,0xf9,0x00,0x02,0x00, +0x3d,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x3d,0x02,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xa0,0x02,0x00,0x00, +0xa3,0x02,0x00,0x00,0xcf,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, +0x3a,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x3c,0x02,0x00,0x00, +0xfd,0x00,0x01,0x00,0x38,0x00,0x01,0x00, +}; +const uint64_t matmul_f32_fp32_len = 10208; + +unsigned char matmul_q4_0_f32_data[] = { +0x03,0x02,0x23,0x07,0x00,0x05,0x01,0x00,0x0b,0x00,0x0d,0x00, +0x1c,0x03,0x00,0x00,0x00,0x00,0x00,0x00,0x11,0x00,0x02,0x00, +0x01,0x00,0x00,0x00,0x11,0x00,0x02,0x00,0x09,0x00,0x00,0x00, +0x11,0x00,0x02,0x00,0x51,0x11,0x00,0x00,0x11,0x00,0x02,0x00, +0x60,0x11,0x00,0x00,0x0b,0x00,0x06,0x00,0x01,0x00,0x00,0x00, +0x47,0x4c,0x53,0x4c,0x2e,0x73,0x74,0x64,0x2e,0x34,0x35,0x30, +0x00,0x00,0x00,0x00,0x0e,0x00,0x03,0x00,0x00,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0x0f,0x00,0x0f,0x00,0x05,0x00,0x00,0x00, +0x04,0x00,0x00,0x00,0x6d,0x61,0x69,0x6e,0x00,0x00,0x00,0x00, +0x0b,0x00,0x00,0x00,0x12,0x00,0x00,0x00,0x3d,0x00,0x00,0x00, +0x4c,0x00,0x00,0x00,0x07,0x01,0x00,0x00,0x27,0x01,0x00,0x00, +0x5a,0x01,0x00,0x00,0x65,0x01,0x00,0x00,0x50,0x02,0x00,0x00, +0x99,0x02,0x00,0x00,0x10,0x00,0x06,0x00,0x04,0x00,0x00,0x00, +0x11,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x0b,0x00,0x00,0x00, +0x0b,0x00,0x00,0x00,0x1c,0x00,0x00,0x00,0x48,0x00,0x05,0x00, +0x10,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00, +0x00,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x04,0x00,0x00,0x00, +0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00,0x02,0x00,0x00,0x00, +0x23,0x00,0x00,0x00,0x08,0x00,0x00,0x00,0x48,0x00,0x05,0x00, +0x10,0x00,0x00,0x00,0x03,0x00,0x00,0x00,0x23,0x00,0x00,0x00, +0x0c,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00, +0x04,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x10,0x00,0x00,0x00, +0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00,0x05,0x00,0x00,0x00, +0x23,0x00,0x00,0x00,0x14,0x00,0x00,0x00,0x48,0x00,0x05,0x00, +0x10,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x23,0x00,0x00,0x00, +0x18,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00, +0x07,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x1c,0x00,0x00,0x00, +0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00,0x08,0x00,0x00,0x00, +0x23,0x00,0x00,0x00,0x20,0x00,0x00,0x00,0x48,0x00,0x05,0x00, +0x10,0x00,0x00,0x00,0x09,0x00,0x00,0x00,0x23,0x00,0x00,0x00, +0x24,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00, +0x0a,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x28,0x00,0x00,0x00, +0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00,0x0b,0x00,0x00,0x00, +0x23,0x00,0x00,0x00,0x2c,0x00,0x00,0x00,0x48,0x00,0x05,0x00, +0x10,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x23,0x00,0x00,0x00, +0x30,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00, +0x0d,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x34,0x00,0x00,0x00, +0x47,0x00,0x03,0x00,0x10,0x00,0x00,0x00,0x02,0x00,0x00,0x00, +0x47,0x00,0x04,0x00,0x37,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x3d,0x00,0x00,0x00, +0x0b,0x00,0x00,0x00,0x1a,0x00,0x00,0x00,0x47,0x00,0x04,0x00, +0x4c,0x00,0x00,0x00,0x0b,0x00,0x00,0x00,0x1b,0x00,0x00,0x00, +0x47,0x00,0x04,0x00,0x4f,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0x09,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x53,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x47,0x00,0x04,0x00, +0x60,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x06,0x00,0x00,0x00, +0x47,0x00,0x04,0x00,0x62,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0x07,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x6c,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0x03,0x00,0x00,0x00,0x47,0x00,0x04,0x00, +0xa6,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x02,0x00,0x00,0x00, +0x47,0x00,0x04,0x00,0xb8,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0x05,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0xbb,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0x08,0x00,0x00,0x00,0x47,0x00,0x04,0x00, +0x02,0x01,0x00,0x00,0x06,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0x48,0x00,0x05,0x00,0x03,0x01,0x00,0x00,0x00,0x00,0x00,0x00, +0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x48,0x00,0x05,0x00, +0x03,0x01,0x00,0x00,0x01,0x00,0x00,0x00,0x23,0x00,0x00,0x00, +0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x04,0x01,0x00,0x00, +0x06,0x00,0x00,0x00,0x12,0x00,0x00,0x00,0x48,0x00,0x04,0x00, +0x05,0x01,0x00,0x00,0x00,0x00,0x00,0x00,0x18,0x00,0x00,0x00, +0x48,0x00,0x05,0x00,0x05,0x01,0x00,0x00,0x00,0x00,0x00,0x00, +0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x03,0x00, +0x05,0x01,0x00,0x00,0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00, +0x07,0x01,0x00,0x00,0x22,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0x47,0x00,0x04,0x00,0x07,0x01,0x00,0x00,0x21,0x00,0x00,0x00, +0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x34,0x01,0x00,0x00, +0x01,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00, +0x35,0x01,0x00,0x00,0x0b,0x00,0x00,0x00,0x19,0x00,0x00,0x00, +0x47,0x00,0x04,0x00,0x62,0x01,0x00,0x00,0x06,0x00,0x00,0x00, +0x04,0x00,0x00,0x00,0x48,0x00,0x04,0x00,0x63,0x01,0x00,0x00, +0x00,0x00,0x00,0x00,0x18,0x00,0x00,0x00,0x48,0x00,0x05,0x00, +0x63,0x01,0x00,0x00,0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00, +0x00,0x00,0x00,0x00,0x47,0x00,0x03,0x00,0x63,0x01,0x00,0x00, +0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x65,0x01,0x00,0x00, +0x22,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00, +0x65,0x01,0x00,0x00,0x21,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0x47,0x00,0x04,0x00,0x50,0x02,0x00,0x00,0x0b,0x00,0x00,0x00, +0x18,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x96,0x02,0x00,0x00, +0x06,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x48,0x00,0x04,0x00, +0x97,0x02,0x00,0x00,0x00,0x00,0x00,0x00,0x19,0x00,0x00,0x00, +0x48,0x00,0x05,0x00,0x97,0x02,0x00,0x00,0x00,0x00,0x00,0x00, +0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x03,0x00, +0x97,0x02,0x00,0x00,0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00, +0x99,0x02,0x00,0x00,0x22,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0x47,0x00,0x04,0x00,0x99,0x02,0x00,0x00,0x21,0x00,0x00,0x00, +0x02,0x00,0x00,0x00,0x13,0x00,0x02,0x00,0x02,0x00,0x00,0x00, +0x21,0x00,0x03,0x00,0x03,0x00,0x00,0x00,0x02,0x00,0x00,0x00, +0x15,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x20,0x00,0x00,0x00, +0x00,0x00,0x00,0x00,0x17,0x00,0x04,0x00,0x09,0x00,0x00,0x00, +0x06,0x00,0x00,0x00,0x03,0x00,0x00,0x00,0x20,0x00,0x04,0x00, +0x0a,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x09,0x00,0x00,0x00, +0x3b,0x00,0x04,0x00,0x0a,0x00,0x00,0x00,0x0b,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x0c,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x20,0x00,0x04,0x00, +0x0d,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x06,0x00,0x00,0x00, +0x1e,0x00,0x10,0x00,0x10,0x00,0x00,0x00,0x06,0x00,0x00,0x00, +0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00, +0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00, +0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00, +0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00, +0x06,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x11,0x00,0x00,0x00, +0x09,0x00,0x00,0x00,0x10,0x00,0x00,0x00,0x3b,0x00,0x04,0x00, +0x11,0x00,0x00,0x00,0x12,0x00,0x00,0x00,0x09,0x00,0x00,0x00, +0x15,0x00,0x04,0x00,0x13,0x00,0x00,0x00,0x20,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00, +0x14,0x00,0x00,0x00,0x08,0x00,0x00,0x00,0x20,0x00,0x04,0x00, +0x15,0x00,0x00,0x00,0x09,0x00,0x00,0x00,0x06,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00,0x21,0x00,0x00,0x00, +0x0a,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00, +0x27,0x00,0x00,0x00,0x09,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x13,0x00,0x00,0x00,0x2d,0x00,0x00,0x00,0x07,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00,0x34,0x00,0x00,0x00, +0x00,0x00,0x00,0x00,0x32,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x37,0x00,0x00,0x00,0x40,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0x3b,0x00,0x04,0x00,0x0a,0x00,0x00,0x00,0x3d,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x3e,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x3b,0x00,0x04,0x00, +0x0a,0x00,0x00,0x00,0x4c,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0x32,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x4f,0x00,0x00,0x00, +0x20,0x00,0x00,0x00,0x32,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x53,0x00,0x00,0x00,0x20,0x00,0x00,0x00,0x34,0x00,0x06,0x00, +0x06,0x00,0x00,0x00,0x54,0x00,0x00,0x00,0x86,0x00,0x00,0x00, +0x37,0x00,0x00,0x00,0x53,0x00,0x00,0x00,0x34,0x00,0x06,0x00, +0x06,0x00,0x00,0x00,0x58,0x00,0x00,0x00,0x86,0x00,0x00,0x00, +0x37,0x00,0x00,0x00,0x53,0x00,0x00,0x00,0x32,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x60,0x00,0x00,0x00,0x02,0x00,0x00,0x00, +0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x61,0x00,0x00,0x00, +0x86,0x00,0x00,0x00,0x53,0x00,0x00,0x00,0x60,0x00,0x00,0x00, +0x32,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x62,0x00,0x00,0x00, +0x04,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, +0x63,0x00,0x00,0x00,0x86,0x00,0x00,0x00,0x61,0x00,0x00,0x00, +0x62,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, +0x67,0x00,0x00,0x00,0x86,0x00,0x00,0x00,0x61,0x00,0x00,0x00, +0x62,0x00,0x00,0x00,0x32,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x6c,0x00,0x00,0x00,0x10,0x00,0x00,0x00,0x34,0x00,0x06,0x00, +0x06,0x00,0x00,0x00,0x6d,0x00,0x00,0x00,0x86,0x00,0x00,0x00, +0x6c,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x34,0x00,0x06,0x00, +0x06,0x00,0x00,0x00,0x72,0x00,0x00,0x00,0x86,0x00,0x00,0x00, +0x6c,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x34,0x00,0x06,0x00, +0x06,0x00,0x00,0x00,0x77,0x00,0x00,0x00,0x86,0x00,0x00,0x00, +0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x34,0x00,0x06,0x00, +0x06,0x00,0x00,0x00,0x7c,0x00,0x00,0x00,0x86,0x00,0x00,0x00, +0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x13,0x00,0x00,0x00,0x80,0x00,0x00,0x00,0x06,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00,0x85,0x00,0x00,0x00, +0x02,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00, +0x90,0x00,0x00,0x00,0x0b,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x13,0x00,0x00,0x00,0x96,0x00,0x00,0x00,0x03,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00,0xa1,0x00,0x00,0x00, +0x0c,0x00,0x00,0x00,0x32,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0xa6,0x00,0x00,0x00,0x40,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x13,0x00,0x00,0x00,0xa8,0x00,0x00,0x00,0x04,0x00,0x00,0x00, +0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0xb7,0x00,0x00,0x00, +0x84,0x00,0x00,0x00,0x60,0x00,0x00,0x00,0x62,0x00,0x00,0x00, +0x32,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0xb8,0x00,0x00,0x00, +0x20,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, +0xb9,0x00,0x00,0x00,0x84,0x00,0x00,0x00,0x53,0x00,0x00,0x00, +0xb8,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, +0xba,0x00,0x00,0x00,0x84,0x00,0x00,0x00,0x4f,0x00,0x00,0x00, +0x62,0x00,0x00,0x00,0x32,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0xbb,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x34,0x00,0x06,0x00, +0x06,0x00,0x00,0x00,0xbc,0x00,0x00,0x00,0x84,0x00,0x00,0x00, +0xba,0x00,0x00,0x00,0xbb,0x00,0x00,0x00,0x34,0x00,0x06,0x00, +0x06,0x00,0x00,0x00,0xbd,0x00,0x00,0x00,0x84,0x00,0x00,0x00, +0xbc,0x00,0x00,0x00,0x60,0x00,0x00,0x00,0x34,0x00,0x06,0x00, +0x06,0x00,0x00,0x00,0xbe,0x00,0x00,0x00,0x86,0x00,0x00,0x00, +0xb9,0x00,0x00,0x00,0xbd,0x00,0x00,0x00,0x34,0x00,0x06,0x00, +0x06,0x00,0x00,0x00,0xbf,0x00,0x00,0x00,0x84,0x00,0x00,0x00, +0xb7,0x00,0x00,0x00,0xbe,0x00,0x00,0x00,0x34,0x00,0x06,0x00, +0x06,0x00,0x00,0x00,0xc0,0x00,0x00,0x00,0x84,0x00,0x00,0x00, +0xbf,0x00,0x00,0x00,0xbb,0x00,0x00,0x00,0x14,0x00,0x02,0x00, +0xc1,0x00,0x00,0x00,0x16,0x00,0x03,0x00,0xc3,0x00,0x00,0x00, +0x20,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, +0xc4,0x00,0x00,0x00,0x84,0x00,0x00,0x00,0x60,0x00,0x00,0x00, +0x62,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, +0xc5,0x00,0x00,0x00,0x84,0x00,0x00,0x00,0xc4,0x00,0x00,0x00, +0xbe,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, +0xc6,0x00,0x00,0x00,0x84,0x00,0x00,0x00,0xc5,0x00,0x00,0x00, +0xbb,0x00,0x00,0x00,0x1c,0x00,0x04,0x00,0xc7,0x00,0x00,0x00, +0xc3,0x00,0x00,0x00,0xc6,0x00,0x00,0x00,0x20,0x00,0x04,0x00, +0xc8,0x00,0x00,0x00,0x07,0x00,0x00,0x00,0xc7,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0xc3,0x00,0x00,0x00,0xcb,0x00,0x00,0x00, +0x00,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0xcc,0x00,0x00,0x00, +0x07,0x00,0x00,0x00,0xc3,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x13,0x00,0x00,0x00,0xcf,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0xf3,0x00,0x00,0x00, +0x80,0x00,0x00,0x00,0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0xf9,0x00,0x00,0x00, +0x10,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0xfd,0x00,0x00,0x00,0x0f,0x00,0x00,0x00,0x16,0x00,0x03,0x00, +0x00,0x01,0x00,0x00,0x10,0x00,0x00,0x00,0x15,0x00,0x04,0x00, +0x01,0x01,0x00,0x00,0x08,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0x1c,0x00,0x04,0x00,0x02,0x01,0x00,0x00,0x01,0x01,0x00,0x00, +0xf9,0x00,0x00,0x00,0x1e,0x00,0x04,0x00,0x03,0x01,0x00,0x00, +0x00,0x01,0x00,0x00,0x02,0x01,0x00,0x00,0x1d,0x00,0x03,0x00, +0x04,0x01,0x00,0x00,0x03,0x01,0x00,0x00,0x1e,0x00,0x03,0x00, +0x05,0x01,0x00,0x00,0x04,0x01,0x00,0x00,0x20,0x00,0x04,0x00, +0x06,0x01,0x00,0x00,0x0c,0x00,0x00,0x00,0x05,0x01,0x00,0x00, +0x3b,0x00,0x04,0x00,0x06,0x01,0x00,0x00,0x07,0x01,0x00,0x00, +0x0c,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x09,0x01,0x00,0x00, +0x0c,0x00,0x00,0x00,0x00,0x01,0x00,0x00,0x20,0x00,0x04,0x00, +0x10,0x01,0x00,0x00,0x0c,0x00,0x00,0x00,0x01,0x01,0x00,0x00, +0x17,0x00,0x04,0x00,0x14,0x01,0x00,0x00,0xc3,0x00,0x00,0x00, +0x02,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0xc3,0x00,0x00,0x00, +0x1e,0x01,0x00,0x00,0x00,0x00,0x00,0x41,0x34,0x00,0x06,0x00, +0x06,0x00,0x00,0x00,0x23,0x01,0x00,0x00,0x80,0x00,0x00,0x00, +0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x34,0x00,0x06,0x00, +0x06,0x00,0x00,0x00,0x24,0x01,0x00,0x00,0x84,0x00,0x00,0x00, +0x37,0x00,0x00,0x00,0x23,0x01,0x00,0x00,0x1c,0x00,0x04,0x00, +0x25,0x01,0x00,0x00,0x00,0x01,0x00,0x00,0x24,0x01,0x00,0x00, +0x20,0x00,0x04,0x00,0x26,0x01,0x00,0x00,0x04,0x00,0x00,0x00, +0x25,0x01,0x00,0x00,0x3b,0x00,0x04,0x00,0x26,0x01,0x00,0x00, +0x27,0x01,0x00,0x00,0x04,0x00,0x00,0x00,0x20,0x00,0x04,0x00, +0x2c,0x01,0x00,0x00,0x04,0x00,0x00,0x00,0x00,0x01,0x00,0x00, +0x32,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x34,0x01,0x00,0x00, +0x01,0x00,0x00,0x00,0x33,0x00,0x06,0x00,0x09,0x00,0x00,0x00, +0x35,0x01,0x00,0x00,0x34,0x01,0x00,0x00,0x39,0x00,0x00,0x00, +0x39,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, +0x36,0x01,0x00,0x00,0x51,0x00,0x00,0x00,0x35,0x01,0x00,0x00, +0x00,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, +0x37,0x01,0x00,0x00,0x84,0x00,0x00,0x00,0x36,0x01,0x00,0x00, +0x0c,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, +0x38,0x01,0x00,0x00,0x86,0x00,0x00,0x00,0x37,0x01,0x00,0x00, +0x6c,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, +0x56,0x01,0x00,0x00,0x80,0x00,0x00,0x00,0x6c,0x00,0x00,0x00, +0x39,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, +0x57,0x01,0x00,0x00,0x84,0x00,0x00,0x00,0xa6,0x00,0x00,0x00, +0x56,0x01,0x00,0x00,0x1c,0x00,0x04,0x00,0x58,0x01,0x00,0x00, +0x00,0x01,0x00,0x00,0x57,0x01,0x00,0x00,0x20,0x00,0x04,0x00, +0x59,0x01,0x00,0x00,0x04,0x00,0x00,0x00,0x58,0x01,0x00,0x00, +0x3b,0x00,0x04,0x00,0x59,0x01,0x00,0x00,0x5a,0x01,0x00,0x00, +0x04,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, +0x5e,0x01,0x00,0x00,0x80,0x00,0x00,0x00,0x6c,0x00,0x00,0x00, +0x39,0x00,0x00,0x00,0x1d,0x00,0x03,0x00,0x62,0x01,0x00,0x00, +0xc3,0x00,0x00,0x00,0x1e,0x00,0x03,0x00,0x63,0x01,0x00,0x00, +0x62,0x01,0x00,0x00,0x20,0x00,0x04,0x00,0x64,0x01,0x00,0x00, +0x0c,0x00,0x00,0x00,0x63,0x01,0x00,0x00,0x3b,0x00,0x04,0x00, +0x64,0x01,0x00,0x00,0x65,0x01,0x00,0x00,0x0c,0x00,0x00,0x00, +0x20,0x00,0x04,0x00,0x70,0x01,0x00,0x00,0x0c,0x00,0x00,0x00, +0xc3,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, +0x79,0x01,0x00,0x00,0x80,0x00,0x00,0x00,0x6c,0x00,0x00,0x00, +0x39,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x00,0x01,0x00,0x00, +0x7d,0x01,0x00,0x00,0x00,0x00,0x00,0x00,0x34,0x00,0x06,0x00, +0x06,0x00,0x00,0x00,0x7f,0x01,0x00,0x00,0x51,0x00,0x00,0x00, +0x35,0x01,0x00,0x00,0x00,0x00,0x00,0x00,0x34,0x00,0x06,0x00, +0x06,0x00,0x00,0x00,0x80,0x01,0x00,0x00,0x84,0x00,0x00,0x00, +0x7f,0x01,0x00,0x00,0x39,0x00,0x00,0x00,0x34,0x00,0x06,0x00, +0x06,0x00,0x00,0x00,0x81,0x01,0x00,0x00,0x86,0x00,0x00,0x00, +0x80,0x01,0x00,0x00,0x6c,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x84,0x01,0x00,0x00,0x08,0x01,0x00,0x00, +0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x85,0x01,0x00,0x00, +0x86,0x00,0x00,0x00,0x6c,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, +0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x88,0x01,0x00,0x00, +0x86,0x00,0x00,0x00,0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00, +0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0xa3,0x01,0x00,0x00, +0x84,0x00,0x00,0x00,0x60,0x00,0x00,0x00,0x62,0x00,0x00,0x00, +0x1c,0x00,0x04,0x00,0xa4,0x01,0x00,0x00,0x00,0x01,0x00,0x00, +0xa3,0x01,0x00,0x00,0x20,0x00,0x04,0x00,0xa5,0x01,0x00,0x00, +0x07,0x00,0x00,0x00,0xa4,0x01,0x00,0x00,0x34,0x00,0x06,0x00, +0x06,0x00,0x00,0x00,0xb5,0x01,0x00,0x00,0x80,0x00,0x00,0x00, +0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x20,0x00,0x04,0x00, +0xbb,0x01,0x00,0x00,0x07,0x00,0x00,0x00,0x00,0x01,0x00,0x00, +0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0xd1,0x01,0x00,0x00, +0x84,0x00,0x00,0x00,0xbe,0x00,0x00,0x00,0xbb,0x00,0x00,0x00, +0x1c,0x00,0x04,0x00,0xd2,0x01,0x00,0x00,0x00,0x01,0x00,0x00, +0xd1,0x01,0x00,0x00,0x20,0x00,0x04,0x00,0xd3,0x01,0x00,0x00, +0x07,0x00,0x00,0x00,0xd2,0x01,0x00,0x00,0x34,0x00,0x06,0x00, +0x06,0x00,0x00,0x00,0xdc,0x01,0x00,0x00,0x86,0x00,0x00,0x00, +0xb8,0x00,0x00,0x00,0xbe,0x00,0x00,0x00,0x34,0x00,0x06,0x00, +0x06,0x00,0x00,0x00,0xe4,0x01,0x00,0x00,0x80,0x00,0x00,0x00, +0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x34,0x00,0x06,0x00, +0x06,0x00,0x00,0x00,0x13,0x02,0x00,0x00,0x84,0x00,0x00,0x00, +0x60,0x00,0x00,0x00,0x62,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x13,0x00,0x00,0x00,0x48,0x02,0x00,0x00,0x0d,0x00,0x00,0x00, +0x3b,0x00,0x04,0x00,0x0a,0x00,0x00,0x00,0x50,0x02,0x00,0x00, +0x01,0x00,0x00,0x00,0x1d,0x00,0x03,0x00,0x96,0x02,0x00,0x00, +0xc3,0x00,0x00,0x00,0x1e,0x00,0x03,0x00,0x97,0x02,0x00,0x00, +0x96,0x02,0x00,0x00,0x20,0x00,0x04,0x00,0x98,0x02,0x00,0x00, +0x0c,0x00,0x00,0x00,0x97,0x02,0x00,0x00,0x3b,0x00,0x04,0x00, +0x98,0x02,0x00,0x00,0x99,0x02,0x00,0x00,0x0c,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00,0x9e,0x02,0x00,0x00, +0x05,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, +0xab,0x02,0x00,0x00,0x84,0x00,0x00,0x00,0x60,0x00,0x00,0x00, +0x62,0x00,0x00,0x00,0x2c,0x00,0x05,0x00,0x14,0x01,0x00,0x00, +0x1b,0x03,0x00,0x00,0x1e,0x01,0x00,0x00,0x1e,0x01,0x00,0x00, +0x36,0x00,0x05,0x00,0x02,0x00,0x00,0x00,0x04,0x00,0x00,0x00, +0x00,0x00,0x00,0x00,0x03,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, +0x05,0x00,0x00,0x00,0x3b,0x00,0x04,0x00,0xc8,0x00,0x00,0x00, +0xc9,0x00,0x00,0x00,0x07,0x00,0x00,0x00,0x3b,0x00,0x04,0x00, +0xa5,0x01,0x00,0x00,0xa6,0x01,0x00,0x00,0x07,0x00,0x00,0x00, +0x3b,0x00,0x04,0x00,0xd3,0x01,0x00,0x00,0xd4,0x01,0x00,0x00, +0x07,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x0d,0x00,0x00,0x00, +0x0e,0x00,0x00,0x00,0x0b,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, +0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x0f,0x00,0x00,0x00, +0x0e,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00, +0x16,0x00,0x00,0x00,0x12,0x00,0x00,0x00,0x14,0x00,0x00,0x00, +0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x17,0x00,0x00,0x00, +0x16,0x00,0x00,0x00,0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x18,0x00,0x00,0x00,0x0f,0x00,0x00,0x00,0x17,0x00,0x00,0x00, +0x89,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x1e,0x00,0x00,0x00, +0x0f,0x00,0x00,0x00,0x17,0x00,0x00,0x00,0x41,0x00,0x05,0x00, +0x15,0x00,0x00,0x00,0x22,0x00,0x00,0x00,0x12,0x00,0x00,0x00, +0x21,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x23,0x00,0x00,0x00,0x22,0x00,0x00,0x00,0x86,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x24,0x00,0x00,0x00,0x18,0x00,0x00,0x00, +0x23,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00, +0x28,0x00,0x00,0x00,0x12,0x00,0x00,0x00,0x27,0x00,0x00,0x00, +0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x29,0x00,0x00,0x00, +0x28,0x00,0x00,0x00,0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x2a,0x00,0x00,0x00,0x1e,0x00,0x00,0x00,0x29,0x00,0x00,0x00, +0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, +0x12,0x00,0x00,0x00,0x2d,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x2f,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x30,0x00,0x00,0x00, +0x24,0x00,0x00,0x00,0x2f,0x00,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x32,0x00,0x00,0x00,0x30,0x00,0x00,0x00, +0x2a,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00, +0x35,0x00,0x00,0x00,0x12,0x00,0x00,0x00,0x34,0x00,0x00,0x00, +0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x36,0x00,0x00,0x00, +0x35,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x38,0x00,0x00,0x00,0x36,0x00,0x00,0x00,0x37,0x00,0x00,0x00, +0x82,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x3a,0x00,0x00,0x00, +0x38,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x86,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x3b,0x00,0x00,0x00,0x3a,0x00,0x00,0x00, +0x37,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x0d,0x00,0x00,0x00, +0x3f,0x00,0x00,0x00,0x3d,0x00,0x00,0x00,0x3e,0x00,0x00,0x00, +0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x40,0x00,0x00,0x00, +0x3f,0x00,0x00,0x00,0x89,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x42,0x00,0x00,0x00,0x40,0x00,0x00,0x00,0x3b,0x00,0x00,0x00, +0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x47,0x00,0x00,0x00, +0x40,0x00,0x00,0x00,0x3b,0x00,0x00,0x00,0x41,0x00,0x05,0x00, +0x0d,0x00,0x00,0x00,0x49,0x00,0x00,0x00,0x3d,0x00,0x00,0x00, +0x39,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x4a,0x00,0x00,0x00,0x49,0x00,0x00,0x00,0x41,0x00,0x05,0x00, +0x0d,0x00,0x00,0x00,0x4d,0x00,0x00,0x00,0x4c,0x00,0x00,0x00, +0x3e,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x4e,0x00,0x00,0x00,0x4d,0x00,0x00,0x00,0x86,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x50,0x00,0x00,0x00,0x4e,0x00,0x00,0x00, +0x4f,0x00,0x00,0x00,0x89,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x55,0x00,0x00,0x00,0x50,0x00,0x00,0x00,0x54,0x00,0x00,0x00, +0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x59,0x00,0x00,0x00, +0x50,0x00,0x00,0x00,0x58,0x00,0x00,0x00,0x89,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x5d,0x00,0x00,0x00,0x4e,0x00,0x00,0x00, +0x4f,0x00,0x00,0x00,0x89,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x64,0x00,0x00,0x00,0x5d,0x00,0x00,0x00,0x63,0x00,0x00,0x00, +0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x68,0x00,0x00,0x00, +0x5d,0x00,0x00,0x00,0x67,0x00,0x00,0x00,0x89,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x6e,0x00,0x00,0x00,0x4e,0x00,0x00,0x00, +0x6d,0x00,0x00,0x00,0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x73,0x00,0x00,0x00,0x4e,0x00,0x00,0x00,0x72,0x00,0x00,0x00, +0x89,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x78,0x00,0x00,0x00, +0x4e,0x00,0x00,0x00,0x77,0x00,0x00,0x00,0x86,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x7d,0x00,0x00,0x00,0x4e,0x00,0x00,0x00, +0x7c,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00, +0x81,0x00,0x00,0x00,0x12,0x00,0x00,0x00,0x80,0x00,0x00,0x00, +0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x82,0x00,0x00,0x00, +0x81,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x83,0x00,0x00,0x00,0x47,0x00,0x00,0x00,0x82,0x00,0x00,0x00, +0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00,0x86,0x00,0x00,0x00, +0x12,0x00,0x00,0x00,0x85,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x87,0x00,0x00,0x00,0x86,0x00,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x89,0x00,0x00,0x00, +0x47,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x8c,0x00,0x00,0x00,0x89,0x00,0x00,0x00, +0x82,0x00,0x00,0x00,0x0c,0x00,0x07,0x00,0x06,0x00,0x00,0x00, +0x8d,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x26,0x00,0x00,0x00, +0x87,0x00,0x00,0x00,0x8c,0x00,0x00,0x00,0x41,0x00,0x05,0x00, +0x15,0x00,0x00,0x00,0x91,0x00,0x00,0x00,0x12,0x00,0x00,0x00, +0x90,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x92,0x00,0x00,0x00,0x91,0x00,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x93,0x00,0x00,0x00,0x32,0x00,0x00,0x00, +0x92,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x95,0x00,0x00,0x00,0x42,0x00,0x00,0x00,0x37,0x00,0x00,0x00, +0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00,0x97,0x00,0x00,0x00, +0x12,0x00,0x00,0x00,0x96,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x98,0x00,0x00,0x00,0x97,0x00,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x99,0x00,0x00,0x00, +0x95,0x00,0x00,0x00,0x98,0x00,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x9a,0x00,0x00,0x00,0x93,0x00,0x00,0x00, +0x99,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x9c,0x00,0x00,0x00,0x9a,0x00,0x00,0x00,0x83,0x00,0x00,0x00, +0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x9d,0x00,0x00,0x00, +0x9c,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x41,0x00,0x05,0x00, +0x15,0x00,0x00,0x00,0xa2,0x00,0x00,0x00,0x12,0x00,0x00,0x00, +0xa1,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0xa3,0x00,0x00,0x00,0xa2,0x00,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xa4,0x00,0x00,0x00,0x0f,0x00,0x00,0x00, +0xa3,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xa7,0x00,0x00,0x00,0x4a,0x00,0x00,0x00,0xa6,0x00,0x00,0x00, +0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00,0xa9,0x00,0x00,0x00, +0x12,0x00,0x00,0x00,0xa8,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0xaa,0x00,0x00,0x00,0xa9,0x00,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xab,0x00,0x00,0x00, +0xa7,0x00,0x00,0x00,0xaa,0x00,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xac,0x00,0x00,0x00,0xa4,0x00,0x00,0x00, +0xab,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xae,0x00,0x00,0x00,0xac,0x00,0x00,0x00,0x83,0x00,0x00,0x00, +0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xaf,0x00,0x00,0x00, +0xae,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, +0xb1,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0xb1,0x00,0x00,0x00, +0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0xbf,0x02,0x00,0x00, +0x3e,0x00,0x00,0x00,0x05,0x00,0x00,0x00,0xd0,0x00,0x00,0x00, +0xb2,0x00,0x00,0x00,0xb0,0x00,0x05,0x00,0xc1,0x00,0x00,0x00, +0xc2,0x00,0x00,0x00,0xbf,0x02,0x00,0x00,0xc0,0x00,0x00,0x00, +0xf6,0x00,0x04,0x00,0xb3,0x00,0x00,0x00,0xb2,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0xc2,0x00,0x00,0x00, +0xb2,0x00,0x00,0x00,0xb3,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, +0xb2,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0xcc,0x00,0x00,0x00, +0xcd,0x00,0x00,0x00,0xc9,0x00,0x00,0x00,0xbf,0x02,0x00,0x00, +0x3e,0x00,0x03,0x00,0xcd,0x00,0x00,0x00,0xcb,0x00,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xd0,0x00,0x00,0x00, +0xbf,0x02,0x00,0x00,0xcf,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, +0xb1,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0xb3,0x00,0x00,0x00, +0xf9,0x00,0x02,0x00,0xd3,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, +0xd3,0x00,0x00,0x00,0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00, +0xd8,0x02,0x00,0x00,0xaf,0x00,0x00,0x00,0xb3,0x00,0x00,0x00, +0x8a,0x01,0x00,0x00,0xd6,0x00,0x00,0x00,0xf5,0x00,0x07,0x00, +0x06,0x00,0x00,0x00,0xd4,0x02,0x00,0x00,0x9d,0x00,0x00,0x00, +0xb3,0x00,0x00,0x00,0x87,0x01,0x00,0x00,0xd6,0x00,0x00,0x00, +0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0xc0,0x02,0x00,0x00, +0x83,0x00,0x00,0x00,0xb3,0x00,0x00,0x00,0x38,0x02,0x00,0x00, +0xd6,0x00,0x00,0x00,0xb0,0x00,0x05,0x00,0xc1,0x00,0x00,0x00, +0xda,0x00,0x00,0x00,0xc0,0x02,0x00,0x00,0x8d,0x00,0x00,0x00, +0xf6,0x00,0x04,0x00,0xd5,0x00,0x00,0x00,0xd6,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0xda,0x00,0x00,0x00, +0xd4,0x00,0x00,0x00,0xd5,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, +0xd4,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0xdc,0x00,0x00,0x00, +0xf8,0x00,0x02,0x00,0xdc,0x00,0x00,0x00,0xf5,0x00,0x07,0x00, +0x06,0x00,0x00,0x00,0xd0,0x02,0x00,0x00,0x3e,0x00,0x00,0x00, +0xd4,0x00,0x00,0x00,0x3a,0x01,0x00,0x00,0xdd,0x00,0x00,0x00, +0xb0,0x00,0x05,0x00,0xc1,0x00,0x00,0x00,0xe2,0x00,0x00,0x00, +0xd0,0x02,0x00,0x00,0x37,0x00,0x00,0x00,0xf6,0x00,0x04,0x00, +0xde,0x00,0x00,0x00,0xdd,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0xfa,0x00,0x04,0x00,0xe2,0x00,0x00,0x00,0xdd,0x00,0x00,0x00, +0xde,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0xdd,0x00,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xe7,0x00,0x00,0x00, +0x73,0x00,0x00,0x00,0xd0,0x02,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xea,0x00,0x00,0x00,0xe7,0x00,0x00,0x00, +0x98,0x00,0x00,0x00,0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xeb,0x00,0x00,0x00,0xea,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xec,0x00,0x00,0x00, +0xd4,0x02,0x00,0x00,0xeb,0x00,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xee,0x00,0x00,0x00,0xec,0x00,0x00,0x00, +0x6e,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xf4,0x00,0x00,0x00,0xe7,0x00,0x00,0x00,0xf3,0x00,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xf6,0x00,0x00,0x00, +0xf4,0x00,0x00,0x00,0x6e,0x00,0x00,0x00,0x86,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xfa,0x00,0x00,0x00,0xee,0x00,0x00,0x00, +0xf9,0x00,0x00,0x00,0xc7,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xfe,0x00,0x00,0x00,0xee,0x00,0x00,0x00,0xfd,0x00,0x00,0x00, +0x41,0x00,0x07,0x00,0x09,0x01,0x00,0x00,0x0a,0x01,0x00,0x00, +0x07,0x01,0x00,0x00,0x34,0x00,0x00,0x00,0xfa,0x00,0x00,0x00, +0x34,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x00,0x01,0x00,0x00, +0x0b,0x01,0x00,0x00,0x0a,0x01,0x00,0x00,0x73,0x00,0x04,0x00, +0xc3,0x00,0x00,0x00,0x0c,0x01,0x00,0x00,0x0b,0x01,0x00,0x00, +0x41,0x00,0x08,0x00,0x10,0x01,0x00,0x00,0x11,0x01,0x00,0x00, +0x07,0x01,0x00,0x00,0x34,0x00,0x00,0x00,0xfa,0x00,0x00,0x00, +0xcf,0x00,0x00,0x00,0xfe,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0x01,0x01,0x00,0x00,0x12,0x01,0x00,0x00,0x11,0x01,0x00,0x00, +0x71,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x13,0x01,0x00,0x00, +0x12,0x01,0x00,0x00,0xc7,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x18,0x01,0x00,0x00,0x13,0x01,0x00,0x00,0xfd,0x00,0x00,0x00, +0x70,0x00,0x04,0x00,0xc3,0x00,0x00,0x00,0x19,0x01,0x00,0x00, +0x18,0x01,0x00,0x00,0xc2,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x1b,0x01,0x00,0x00,0x13,0x01,0x00,0x00,0xa8,0x00,0x00,0x00, +0x70,0x00,0x04,0x00,0xc3,0x00,0x00,0x00,0x1c,0x01,0x00,0x00, +0x1b,0x01,0x00,0x00,0x50,0x00,0x05,0x00,0x14,0x01,0x00,0x00, +0x1d,0x01,0x00,0x00,0x19,0x01,0x00,0x00,0x1c,0x01,0x00,0x00, +0x83,0x00,0x05,0x00,0x14,0x01,0x00,0x00,0x20,0x01,0x00,0x00, +0x1d,0x01,0x00,0x00,0x1b,0x03,0x00,0x00,0x8e,0x00,0x05,0x00, +0x14,0x01,0x00,0x00,0x22,0x01,0x00,0x00,0x20,0x01,0x00,0x00, +0x0c,0x01,0x00,0x00,0x51,0x00,0x05,0x00,0xc3,0x00,0x00,0x00, +0x2a,0x01,0x00,0x00,0x22,0x01,0x00,0x00,0x00,0x00,0x00,0x00, +0x73,0x00,0x04,0x00,0x00,0x01,0x00,0x00,0x2b,0x01,0x00,0x00, +0x2a,0x01,0x00,0x00,0x41,0x00,0x05,0x00,0x2c,0x01,0x00,0x00, +0x2d,0x01,0x00,0x00,0x27,0x01,0x00,0x00,0xf6,0x00,0x00,0x00, +0x3e,0x00,0x03,0x00,0x2d,0x01,0x00,0x00,0x2b,0x01,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x2f,0x01,0x00,0x00, +0xf6,0x00,0x00,0x00,0xf9,0x00,0x00,0x00,0x51,0x00,0x05,0x00, +0xc3,0x00,0x00,0x00,0x31,0x01,0x00,0x00,0x22,0x01,0x00,0x00, +0x01,0x00,0x00,0x00,0x73,0x00,0x04,0x00,0x00,0x01,0x00,0x00, +0x32,0x01,0x00,0x00,0x31,0x01,0x00,0x00,0x41,0x00,0x05,0x00, +0x2c,0x01,0x00,0x00,0x33,0x01,0x00,0x00,0x27,0x01,0x00,0x00, +0x2f,0x01,0x00,0x00,0x3e,0x00,0x03,0x00,0x33,0x01,0x00,0x00, +0x32,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x3a,0x01,0x00,0x00,0xd0,0x02,0x00,0x00,0x38,0x01,0x00,0x00, +0xf9,0x00,0x02,0x00,0xdc,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, +0xde,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0x3c,0x01,0x00,0x00, +0xf8,0x00,0x02,0x00,0x3c,0x01,0x00,0x00,0xf5,0x00,0x07,0x00, +0x06,0x00,0x00,0x00,0xd1,0x02,0x00,0x00,0x3e,0x00,0x00,0x00, +0xde,0x00,0x00,0x00,0x83,0x01,0x00,0x00,0x3f,0x01,0x00,0x00, +0xb0,0x00,0x05,0x00,0xc1,0x00,0x00,0x00,0x42,0x01,0x00,0x00, +0xd1,0x02,0x00,0x00,0xa6,0x00,0x00,0x00,0xf6,0x00,0x04,0x00, +0x3e,0x01,0x00,0x00,0x3f,0x01,0x00,0x00,0x01,0x00,0x00,0x00, +0xfa,0x00,0x04,0x00,0x42,0x01,0x00,0x00,0x3d,0x01,0x00,0x00, +0x3e,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0x3d,0x01,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x46,0x01,0x00,0x00, +0xa7,0x00,0x00,0x00,0x7d,0x00,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x48,0x01,0x00,0x00,0x46,0x01,0x00,0x00, +0xd1,0x02,0x00,0x00,0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00, +0x49,0x01,0x00,0x00,0x12,0x00,0x00,0x00,0xcf,0x00,0x00,0x00, +0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x4a,0x01,0x00,0x00, +0x49,0x01,0x00,0x00,0xb0,0x00,0x05,0x00,0xc1,0x00,0x00,0x00, +0x4b,0x01,0x00,0x00,0x48,0x01,0x00,0x00,0x4a,0x01,0x00,0x00, +0xf7,0x00,0x03,0x00,0x4d,0x01,0x00,0x00,0x00,0x00,0x00,0x00, +0xfa,0x00,0x04,0x00,0x4b,0x01,0x00,0x00,0x4c,0x01,0x00,0x00, +0x4d,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0x4c,0x01,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x50,0x01,0x00,0x00, +0xc0,0x02,0x00,0x00,0x78,0x00,0x00,0x00,0xb0,0x00,0x05,0x00, +0xc1,0x00,0x00,0x00,0x52,0x01,0x00,0x00,0x50,0x01,0x00,0x00, +0x8d,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0x4d,0x01,0x00,0x00, +0xf8,0x00,0x02,0x00,0x4d,0x01,0x00,0x00,0xf5,0x00,0x07,0x00, +0xc1,0x00,0x00,0x00,0x53,0x01,0x00,0x00,0x4b,0x01,0x00,0x00, +0x3d,0x01,0x00,0x00,0x52,0x01,0x00,0x00,0x4c,0x01,0x00,0x00, +0xf7,0x00,0x03,0x00,0x55,0x01,0x00,0x00,0x00,0x00,0x00,0x00, +0xfa,0x00,0x04,0x00,0x53,0x01,0x00,0x00,0x54,0x01,0x00,0x00, +0x75,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0x54,0x01,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x5d,0x01,0x00,0x00, +0x7d,0x00,0x00,0x00,0xd1,0x02,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x5f,0x01,0x00,0x00,0x5d,0x01,0x00,0x00, +0x5e,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x61,0x01,0x00,0x00,0x5f,0x01,0x00,0x00,0x78,0x00,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x6c,0x01,0x00,0x00, +0x5d,0x01,0x00,0x00,0xaa,0x00,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x6d,0x01,0x00,0x00,0xd8,0x02,0x00,0x00, +0x6c,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x6f,0x01,0x00,0x00,0x6d,0x01,0x00,0x00,0x78,0x00,0x00,0x00, +0x41,0x00,0x06,0x00,0x70,0x01,0x00,0x00,0x71,0x01,0x00,0x00, +0x65,0x01,0x00,0x00,0x34,0x00,0x00,0x00,0x6f,0x01,0x00,0x00, +0x3d,0x00,0x04,0x00,0xc3,0x00,0x00,0x00,0x72,0x01,0x00,0x00, +0x71,0x01,0x00,0x00,0x73,0x00,0x04,0x00,0x00,0x01,0x00,0x00, +0x73,0x01,0x00,0x00,0x72,0x01,0x00,0x00,0x41,0x00,0x05,0x00, +0x2c,0x01,0x00,0x00,0x74,0x01,0x00,0x00,0x5a,0x01,0x00,0x00, +0x61,0x01,0x00,0x00,0x3e,0x00,0x03,0x00,0x74,0x01,0x00,0x00, +0x73,0x01,0x00,0x00,0xf9,0x00,0x02,0x00,0x55,0x01,0x00,0x00, +0xf8,0x00,0x02,0x00,0x75,0x01,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x78,0x01,0x00,0x00,0x7d,0x00,0x00,0x00, +0xd1,0x02,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x7a,0x01,0x00,0x00,0x78,0x01,0x00,0x00,0x79,0x01,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x7c,0x01,0x00,0x00, +0x7a,0x01,0x00,0x00,0x78,0x00,0x00,0x00,0x41,0x00,0x05,0x00, +0x2c,0x01,0x00,0x00,0x7e,0x01,0x00,0x00,0x5a,0x01,0x00,0x00, +0x7c,0x01,0x00,0x00,0x3e,0x00,0x03,0x00,0x7e,0x01,0x00,0x00, +0x7d,0x01,0x00,0x00,0xf9,0x00,0x02,0x00,0x55,0x01,0x00,0x00, +0xf8,0x00,0x02,0x00,0x55,0x01,0x00,0x00,0xf9,0x00,0x02,0x00, +0x3f,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0x3f,0x01,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x83,0x01,0x00,0x00, +0xd1,0x02,0x00,0x00,0x81,0x01,0x00,0x00,0xf9,0x00,0x02,0x00, +0x3c,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0x3e,0x01,0x00,0x00, +0xe0,0x00,0x04,0x00,0x0c,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, +0x84,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x87,0x01,0x00,0x00,0xd4,0x02,0x00,0x00,0x85,0x01,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x8a,0x01,0x00,0x00, +0xd8,0x02,0x00,0x00,0x88,0x01,0x00,0x00,0xf9,0x00,0x02,0x00, +0x8c,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0x8c,0x01,0x00,0x00, +0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0xda,0x02,0x00,0x00, +0x3e,0x00,0x00,0x00,0x3e,0x01,0x00,0x00,0x36,0x02,0x00,0x00, +0x8f,0x01,0x00,0x00,0xb0,0x00,0x05,0x00,0xc1,0x00,0x00,0x00, +0x92,0x01,0x00,0x00,0xda,0x02,0x00,0x00,0x6c,0x00,0x00,0x00, +0xf6,0x00,0x04,0x00,0x8e,0x01,0x00,0x00,0x8f,0x01,0x00,0x00, +0x00,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0x92,0x01,0x00,0x00, +0x8d,0x01,0x00,0x00,0x8e,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, +0x8d,0x01,0x00,0x00,0xf9,0x00,0x02,0x00,0x94,0x01,0x00,0x00, +0xf8,0x00,0x02,0x00,0x94,0x01,0x00,0x00,0xf5,0x00,0x07,0x00, +0x06,0x00,0x00,0x00,0xde,0x02,0x00,0x00,0x3e,0x00,0x00,0x00, +0x8d,0x01,0x00,0x00,0xc0,0x01,0x00,0x00,0x97,0x01,0x00,0x00, +0xb0,0x00,0x05,0x00,0xc1,0x00,0x00,0x00,0x9a,0x01,0x00,0x00, +0xde,0x02,0x00,0x00,0x60,0x00,0x00,0x00,0xf6,0x00,0x04,0x00, +0x96,0x01,0x00,0x00,0x97,0x01,0x00,0x00,0x01,0x00,0x00,0x00, +0xfa,0x00,0x04,0x00,0x9a,0x01,0x00,0x00,0x95,0x01,0x00,0x00, +0x96,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0x95,0x01,0x00,0x00, +0xf9,0x00,0x02,0x00,0x9c,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, +0x9c,0x01,0x00,0x00,0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00, +0xf0,0x02,0x00,0x00,0x3e,0x00,0x00,0x00,0x95,0x01,0x00,0x00, +0xbe,0x01,0x00,0x00,0x9d,0x01,0x00,0x00,0xb0,0x00,0x05,0x00, +0xc1,0x00,0x00,0x00,0xa2,0x01,0x00,0x00,0xf0,0x02,0x00,0x00, +0x62,0x00,0x00,0x00,0xf6,0x00,0x04,0x00,0x9e,0x01,0x00,0x00, +0x9d,0x01,0x00,0x00,0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, +0xa2,0x01,0x00,0x00,0x9d,0x01,0x00,0x00,0x9e,0x01,0x00,0x00, +0xf8,0x00,0x02,0x00,0x9d,0x01,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xa8,0x01,0x00,0x00,0xde,0x02,0x00,0x00, +0x62,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xaa,0x01,0x00,0x00,0xa8,0x01,0x00,0x00,0xf0,0x02,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xac,0x01,0x00,0x00, +0x55,0x00,0x00,0x00,0x53,0x00,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xae,0x01,0x00,0x00,0xde,0x02,0x00,0x00, +0x61,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xaf,0x01,0x00,0x00,0xac,0x01,0x00,0x00,0xae,0x01,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xb1,0x01,0x00,0x00, +0x64,0x00,0x00,0x00,0x62,0x00,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xb2,0x01,0x00,0x00,0xaf,0x01,0x00,0x00, +0xb1,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xb4,0x01,0x00,0x00,0xb2,0x01,0x00,0x00,0xf0,0x02,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xb6,0x01,0x00,0x00, +0xb4,0x01,0x00,0x00,0xb5,0x01,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xb8,0x01,0x00,0x00,0xb6,0x01,0x00,0x00, +0xda,0x02,0x00,0x00,0x41,0x00,0x05,0x00,0x2c,0x01,0x00,0x00, +0xb9,0x01,0x00,0x00,0x27,0x01,0x00,0x00,0xb8,0x01,0x00,0x00, +0x3d,0x00,0x04,0x00,0x00,0x01,0x00,0x00,0xba,0x01,0x00,0x00, +0xb9,0x01,0x00,0x00,0x41,0x00,0x05,0x00,0xbb,0x01,0x00,0x00, +0xbc,0x01,0x00,0x00,0xa6,0x01,0x00,0x00,0xaa,0x01,0x00,0x00, +0x3e,0x00,0x03,0x00,0xbc,0x01,0x00,0x00,0xba,0x01,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xbe,0x01,0x00,0x00, +0xf0,0x02,0x00,0x00,0xcf,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, +0x9c,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0x9e,0x01,0x00,0x00, +0xf9,0x00,0x02,0x00,0x97,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, +0x97,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xc0,0x01,0x00,0x00,0xde,0x02,0x00,0x00,0xcf,0x00,0x00,0x00, +0xf9,0x00,0x02,0x00,0x94,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, +0x96,0x01,0x00,0x00,0xf9,0x00,0x02,0x00,0xc2,0x01,0x00,0x00, +0xf8,0x00,0x02,0x00,0xc2,0x01,0x00,0x00,0xf5,0x00,0x07,0x00, +0x06,0x00,0x00,0x00,0xdf,0x02,0x00,0x00,0x3e,0x00,0x00,0x00, +0x96,0x01,0x00,0x00,0xee,0x01,0x00,0x00,0xc5,0x01,0x00,0x00, +0xb0,0x00,0x05,0x00,0xc1,0x00,0x00,0x00,0xc8,0x01,0x00,0x00, +0xdf,0x02,0x00,0x00,0xbe,0x00,0x00,0x00,0xf6,0x00,0x04,0x00, +0xc4,0x01,0x00,0x00,0xc5,0x01,0x00,0x00,0x01,0x00,0x00,0x00, +0xfa,0x00,0x04,0x00,0xc8,0x01,0x00,0x00,0xc3,0x01,0x00,0x00, +0xc4,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0xc3,0x01,0x00,0x00, +0xf9,0x00,0x02,0x00,0xca,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, +0xca,0x01,0x00,0x00,0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00, +0xed,0x02,0x00,0x00,0x3e,0x00,0x00,0x00,0xc3,0x01,0x00,0x00, +0xec,0x01,0x00,0x00,0xcb,0x01,0x00,0x00,0xb0,0x00,0x05,0x00, +0xc1,0x00,0x00,0x00,0xd0,0x01,0x00,0x00,0xed,0x02,0x00,0x00, +0xbb,0x00,0x00,0x00,0xf6,0x00,0x04,0x00,0xcc,0x01,0x00,0x00, +0xcb,0x01,0x00,0x00,0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, +0xd0,0x01,0x00,0x00,0xcb,0x01,0x00,0x00,0xcc,0x01,0x00,0x00, +0xf8,0x00,0x02,0x00,0xcb,0x01,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xd6,0x01,0x00,0x00,0xdf,0x02,0x00,0x00, +0xbb,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xd8,0x01,0x00,0x00,0xd6,0x01,0x00,0x00,0xed,0x02,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xda,0x01,0x00,0x00, +0x59,0x00,0x00,0x00,0xb8,0x00,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xdd,0x01,0x00,0x00,0xdf,0x02,0x00,0x00, +0xdc,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xde,0x01,0x00,0x00,0xda,0x01,0x00,0x00,0xdd,0x01,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xe0,0x01,0x00,0x00, +0x68,0x00,0x00,0x00,0xbb,0x00,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xe1,0x01,0x00,0x00,0xde,0x01,0x00,0x00, +0xe0,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xe3,0x01,0x00,0x00,0xe1,0x01,0x00,0x00,0xed,0x02,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xe5,0x01,0x00,0x00, +0xe3,0x01,0x00,0x00,0xe4,0x01,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xe7,0x01,0x00,0x00,0xe5,0x01,0x00,0x00, +0xda,0x02,0x00,0x00,0x41,0x00,0x05,0x00,0x2c,0x01,0x00,0x00, +0xe8,0x01,0x00,0x00,0x5a,0x01,0x00,0x00,0xe7,0x01,0x00,0x00, +0x3d,0x00,0x04,0x00,0x00,0x01,0x00,0x00,0xe9,0x01,0x00,0x00, +0xe8,0x01,0x00,0x00,0x41,0x00,0x05,0x00,0xbb,0x01,0x00,0x00, +0xea,0x01,0x00,0x00,0xd4,0x01,0x00,0x00,0xd8,0x01,0x00,0x00, +0x3e,0x00,0x03,0x00,0xea,0x01,0x00,0x00,0xe9,0x01,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xec,0x01,0x00,0x00, +0xed,0x02,0x00,0x00,0xcf,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, +0xca,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0xcc,0x01,0x00,0x00, +0xf9,0x00,0x02,0x00,0xc5,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, +0xc5,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xee,0x01,0x00,0x00,0xdf,0x02,0x00,0x00,0xcf,0x00,0x00,0x00, +0xf9,0x00,0x02,0x00,0xc2,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, +0xc4,0x01,0x00,0x00,0xf9,0x00,0x02,0x00,0xf0,0x01,0x00,0x00, +0xf8,0x00,0x02,0x00,0xf0,0x01,0x00,0x00,0xf5,0x00,0x07,0x00, +0x06,0x00,0x00,0x00,0xe0,0x02,0x00,0x00,0x3e,0x00,0x00,0x00, +0xc4,0x01,0x00,0x00,0x34,0x02,0x00,0x00,0xf3,0x01,0x00,0x00, +0xb0,0x00,0x05,0x00,0xc1,0x00,0x00,0x00,0xf6,0x01,0x00,0x00, +0xe0,0x02,0x00,0x00,0xbe,0x00,0x00,0x00,0xf6,0x00,0x04,0x00, +0xf2,0x01,0x00,0x00,0xf3,0x01,0x00,0x00,0x01,0x00,0x00,0x00, +0xfa,0x00,0x04,0x00,0xf6,0x01,0x00,0x00,0xf1,0x01,0x00,0x00, +0xf2,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0xf1,0x01,0x00,0x00, +0xf9,0x00,0x02,0x00,0xf8,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, +0xf8,0x01,0x00,0x00,0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00, +0xe4,0x02,0x00,0x00,0x3e,0x00,0x00,0x00,0xf1,0x01,0x00,0x00, +0x32,0x02,0x00,0x00,0xfb,0x01,0x00,0x00,0xb0,0x00,0x05,0x00, +0xc1,0x00,0x00,0x00,0xfe,0x01,0x00,0x00,0xe4,0x02,0x00,0x00, +0x60,0x00,0x00,0x00,0xf6,0x00,0x04,0x00,0xfa,0x01,0x00,0x00, +0xfb,0x01,0x00,0x00,0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, +0xfe,0x01,0x00,0x00,0xf9,0x01,0x00,0x00,0xfa,0x01,0x00,0x00, +0xf8,0x00,0x02,0x00,0xf9,0x01,0x00,0x00,0xf9,0x00,0x02,0x00, +0x00,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x00,0x02,0x00,0x00, +0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0xe6,0x02,0x00,0x00, +0x3e,0x00,0x00,0x00,0xf9,0x01,0x00,0x00,0x30,0x02,0x00,0x00, +0x03,0x02,0x00,0x00,0xb0,0x00,0x05,0x00,0xc1,0x00,0x00,0x00, +0x06,0x02,0x00,0x00,0xe6,0x02,0x00,0x00,0xbb,0x00,0x00,0x00, +0xf6,0x00,0x04,0x00,0x02,0x02,0x00,0x00,0x03,0x02,0x00,0x00, +0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0x06,0x02,0x00,0x00, +0x01,0x02,0x00,0x00,0x02,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, +0x01,0x02,0x00,0x00,0xf9,0x00,0x02,0x00,0x08,0x02,0x00,0x00, +0xf8,0x00,0x02,0x00,0x08,0x02,0x00,0x00,0xf5,0x00,0x07,0x00, +0x06,0x00,0x00,0x00,0xe8,0x02,0x00,0x00,0x3e,0x00,0x00,0x00, +0x01,0x02,0x00,0x00,0x2e,0x02,0x00,0x00,0x09,0x02,0x00,0x00, +0xb0,0x00,0x05,0x00,0xc1,0x00,0x00,0x00,0x0e,0x02,0x00,0x00, +0xe8,0x02,0x00,0x00,0x62,0x00,0x00,0x00,0xf6,0x00,0x04,0x00, +0x0a,0x02,0x00,0x00,0x09,0x02,0x00,0x00,0x01,0x00,0x00,0x00, +0xfa,0x00,0x04,0x00,0x0e,0x02,0x00,0x00,0x09,0x02,0x00,0x00, +0x0a,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x09,0x02,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x10,0x02,0x00,0x00, +0xe0,0x02,0x00,0x00,0xbb,0x00,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x12,0x02,0x00,0x00,0x10,0x02,0x00,0x00, +0xe6,0x02,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x14,0x02,0x00,0x00,0x12,0x02,0x00,0x00,0x13,0x02,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x16,0x02,0x00,0x00, +0xe4,0x02,0x00,0x00,0x62,0x00,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x17,0x02,0x00,0x00,0x14,0x02,0x00,0x00, +0x16,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x19,0x02,0x00,0x00,0x17,0x02,0x00,0x00,0xe8,0x02,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x1d,0x02,0x00,0x00, +0x16,0x02,0x00,0x00,0xe8,0x02,0x00,0x00,0x41,0x00,0x05,0x00, +0xbb,0x01,0x00,0x00,0x1e,0x02,0x00,0x00,0xa6,0x01,0x00,0x00, +0x1d,0x02,0x00,0x00,0x3d,0x00,0x04,0x00,0x00,0x01,0x00,0x00, +0x1f,0x02,0x00,0x00,0x1e,0x02,0x00,0x00,0x73,0x00,0x04,0x00, +0xc3,0x00,0x00,0x00,0x20,0x02,0x00,0x00,0x1f,0x02,0x00,0x00, +0x41,0x00,0x05,0x00,0xbb,0x01,0x00,0x00,0x25,0x02,0x00,0x00, +0xd4,0x01,0x00,0x00,0x12,0x02,0x00,0x00,0x3d,0x00,0x04,0x00, +0x00,0x01,0x00,0x00,0x26,0x02,0x00,0x00,0x25,0x02,0x00,0x00, +0x73,0x00,0x04,0x00,0xc3,0x00,0x00,0x00,0x27,0x02,0x00,0x00, +0x26,0x02,0x00,0x00,0x41,0x00,0x05,0x00,0xcc,0x00,0x00,0x00, +0x29,0x02,0x00,0x00,0xc9,0x00,0x00,0x00,0x19,0x02,0x00,0x00, +0x3d,0x00,0x04,0x00,0xc3,0x00,0x00,0x00,0x2a,0x02,0x00,0x00, +0x29,0x02,0x00,0x00,0x0c,0x00,0x08,0x00,0xc3,0x00,0x00,0x00, +0x2b,0x02,0x00,0x00,0x01,0x00,0x00,0x00,0x32,0x00,0x00,0x00, +0x20,0x02,0x00,0x00,0x27,0x02,0x00,0x00,0x2a,0x02,0x00,0x00, +0x3e,0x00,0x03,0x00,0x29,0x02,0x00,0x00,0x2b,0x02,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x2e,0x02,0x00,0x00, +0xe8,0x02,0x00,0x00,0xcf,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, +0x08,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x0a,0x02,0x00,0x00, +0xf9,0x00,0x02,0x00,0x03,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, +0x03,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x30,0x02,0x00,0x00,0xe6,0x02,0x00,0x00,0xcf,0x00,0x00,0x00, +0xf9,0x00,0x02,0x00,0x00,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, +0x02,0x02,0x00,0x00,0xf9,0x00,0x02,0x00,0xfb,0x01,0x00,0x00, +0xf8,0x00,0x02,0x00,0xfb,0x01,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x32,0x02,0x00,0x00,0xe4,0x02,0x00,0x00, +0xcf,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0xf8,0x01,0x00,0x00, +0xf8,0x00,0x02,0x00,0xfa,0x01,0x00,0x00,0xf9,0x00,0x02,0x00, +0xf3,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0xf3,0x01,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x34,0x02,0x00,0x00, +0xe0,0x02,0x00,0x00,0xcf,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, +0xf0,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0xf2,0x01,0x00,0x00, +0xf9,0x00,0x02,0x00,0x8f,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, +0x8f,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x36,0x02,0x00,0x00,0xda,0x02,0x00,0x00,0xcf,0x00,0x00,0x00, +0xf9,0x00,0x02,0x00,0x8c,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, +0x8e,0x01,0x00,0x00,0xe0,0x00,0x04,0x00,0x0c,0x00,0x00,0x00, +0x0c,0x00,0x00,0x00,0x84,0x01,0x00,0x00,0xf9,0x00,0x02,0x00, +0xd6,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0xd6,0x00,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x38,0x02,0x00,0x00, +0xc0,0x02,0x00,0x00,0x6c,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, +0xd3,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0xd5,0x00,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x3d,0x02,0x00,0x00, +0x55,0x00,0x00,0x00,0x53,0x00,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x3e,0x02,0x00,0x00,0x95,0x00,0x00,0x00, +0x3d,0x02,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x43,0x02,0x00,0x00,0x59,0x00,0x00,0x00,0xb8,0x00,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x44,0x02,0x00,0x00, +0xa7,0x00,0x00,0x00,0x43,0x02,0x00,0x00,0x41,0x00,0x05,0x00, +0x15,0x00,0x00,0x00,0x49,0x02,0x00,0x00,0x12,0x00,0x00,0x00, +0x48,0x02,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x4a,0x02,0x00,0x00,0x49,0x02,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x4b,0x02,0x00,0x00,0x0f,0x00,0x00,0x00, +0x4a,0x02,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x4f,0x02,0x00,0x00,0x47,0x00,0x00,0x00,0x4a,0x02,0x00,0x00, +0x41,0x00,0x05,0x00,0x0d,0x00,0x00,0x00,0x51,0x02,0x00,0x00, +0x50,0x02,0x00,0x00,0x0c,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x52,0x02,0x00,0x00,0x51,0x02,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x53,0x02,0x00,0x00, +0x4f,0x02,0x00,0x00,0x52,0x02,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x54,0x02,0x00,0x00,0x4b,0x02,0x00,0x00, +0x53,0x02,0x00,0x00,0xf9,0x00,0x02,0x00,0x56,0x02,0x00,0x00, +0xf8,0x00,0x02,0x00,0x56,0x02,0x00,0x00,0xf5,0x00,0x07,0x00, +0x06,0x00,0x00,0x00,0xc1,0x02,0x00,0x00,0x3e,0x00,0x00,0x00, +0xd5,0x00,0x00,0x00,0xbc,0x02,0x00,0x00,0x59,0x02,0x00,0x00, +0xb0,0x00,0x05,0x00,0xc1,0x00,0x00,0x00,0x5c,0x02,0x00,0x00, +0xc1,0x02,0x00,0x00,0xbe,0x00,0x00,0x00,0xf6,0x00,0x04,0x00, +0x58,0x02,0x00,0x00,0x59,0x02,0x00,0x00,0x01,0x00,0x00,0x00, +0xfa,0x00,0x04,0x00,0x5c,0x02,0x00,0x00,0x57,0x02,0x00,0x00, +0x58,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x57,0x02,0x00,0x00, +0xf9,0x00,0x02,0x00,0x5e,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, +0x5e,0x02,0x00,0x00,0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00, +0xc2,0x02,0x00,0x00,0x3e,0x00,0x00,0x00,0x57,0x02,0x00,0x00, +0xba,0x02,0x00,0x00,0x61,0x02,0x00,0x00,0xb0,0x00,0x05,0x00, +0xc1,0x00,0x00,0x00,0x64,0x02,0x00,0x00,0xc2,0x02,0x00,0x00, +0x60,0x00,0x00,0x00,0xf6,0x00,0x04,0x00,0x60,0x02,0x00,0x00, +0x61,0x02,0x00,0x00,0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, +0x64,0x02,0x00,0x00,0x5f,0x02,0x00,0x00,0x60,0x02,0x00,0x00, +0xf8,0x00,0x02,0x00,0x5f,0x02,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x68,0x02,0x00,0x00,0xc2,0x02,0x00,0x00, +0x61,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x69,0x02,0x00,0x00,0x3e,0x02,0x00,0x00,0x68,0x02,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x6b,0x02,0x00,0x00, +0x64,0x00,0x00,0x00,0x62,0x00,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x6c,0x02,0x00,0x00,0x69,0x02,0x00,0x00, +0x6b,0x02,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x70,0x02,0x00,0x00,0xc1,0x02,0x00,0x00,0xdc,0x01,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x71,0x02,0x00,0x00, +0x44,0x02,0x00,0x00,0x70,0x02,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x73,0x02,0x00,0x00,0x68,0x00,0x00,0x00, +0xbb,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x74,0x02,0x00,0x00,0x71,0x02,0x00,0x00,0x73,0x02,0x00,0x00, +0xf9,0x00,0x02,0x00,0x76,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, +0x76,0x02,0x00,0x00,0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00, +0xc4,0x02,0x00,0x00,0x3e,0x00,0x00,0x00,0x5f,0x02,0x00,0x00, +0xb8,0x02,0x00,0x00,0x79,0x02,0x00,0x00,0xb0,0x00,0x05,0x00, +0xc1,0x00,0x00,0x00,0x7c,0x02,0x00,0x00,0xc4,0x02,0x00,0x00, +0xbb,0x00,0x00,0x00,0xf6,0x00,0x04,0x00,0x78,0x02,0x00,0x00, +0x79,0x02,0x00,0x00,0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, +0x7c,0x02,0x00,0x00,0x77,0x02,0x00,0x00,0x78,0x02,0x00,0x00, +0xf8,0x00,0x02,0x00,0x77,0x02,0x00,0x00,0xf9,0x00,0x02,0x00, +0x7e,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x7e,0x02,0x00,0x00, +0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0xc6,0x02,0x00,0x00, +0x3e,0x00,0x00,0x00,0x77,0x02,0x00,0x00,0xb6,0x02,0x00,0x00, +0x81,0x02,0x00,0x00,0xb0,0x00,0x05,0x00,0xc1,0x00,0x00,0x00, +0x84,0x02,0x00,0x00,0xc6,0x02,0x00,0x00,0x62,0x00,0x00,0x00, +0xf6,0x00,0x04,0x00,0x80,0x02,0x00,0x00,0x81,0x02,0x00,0x00, +0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0x84,0x02,0x00,0x00, +0x7f,0x02,0x00,0x00,0x80,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, +0x7f,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x87,0x02,0x00,0x00,0x6c,0x02,0x00,0x00,0xc6,0x02,0x00,0x00, +0xb0,0x00,0x05,0x00,0xc1,0x00,0x00,0x00,0x8a,0x02,0x00,0x00, +0x87,0x02,0x00,0x00,0x36,0x00,0x00,0x00,0xf7,0x00,0x03,0x00, +0x8c,0x02,0x00,0x00,0x00,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, +0x8a,0x02,0x00,0x00,0x8b,0x02,0x00,0x00,0x8c,0x02,0x00,0x00, +0xf8,0x00,0x02,0x00,0x8b,0x02,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x8f,0x02,0x00,0x00,0x74,0x02,0x00,0x00, +0xc4,0x02,0x00,0x00,0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00, +0x90,0x02,0x00,0x00,0x12,0x00,0x00,0x00,0xcf,0x00,0x00,0x00, +0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x91,0x02,0x00,0x00, +0x90,0x02,0x00,0x00,0xb0,0x00,0x05,0x00,0xc1,0x00,0x00,0x00, +0x92,0x02,0x00,0x00,0x8f,0x02,0x00,0x00,0x91,0x02,0x00,0x00, +0xf9,0x00,0x02,0x00,0x8c,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, +0x8c,0x02,0x00,0x00,0xf5,0x00,0x07,0x00,0xc1,0x00,0x00,0x00, +0x93,0x02,0x00,0x00,0x8a,0x02,0x00,0x00,0x7f,0x02,0x00,0x00, +0x92,0x02,0x00,0x00,0x8b,0x02,0x00,0x00,0xf7,0x00,0x03,0x00, +0x95,0x02,0x00,0x00,0x00,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, +0x93,0x02,0x00,0x00,0x94,0x02,0x00,0x00,0x95,0x02,0x00,0x00, +0xf8,0x00,0x02,0x00,0x94,0x02,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x9d,0x02,0x00,0x00,0x74,0x02,0x00,0x00, +0xc4,0x02,0x00,0x00,0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00, +0x9f,0x02,0x00,0x00,0x12,0x00,0x00,0x00,0x9e,0x02,0x00,0x00, +0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0xa0,0x02,0x00,0x00, +0x9f,0x02,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xa1,0x02,0x00,0x00,0x9d,0x02,0x00,0x00,0xa0,0x02,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xa2,0x02,0x00,0x00, +0x54,0x02,0x00,0x00,0xa1,0x02,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xa4,0x02,0x00,0x00,0xa2,0x02,0x00,0x00, +0x6c,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xa6,0x02,0x00,0x00,0xa4,0x02,0x00,0x00,0xc6,0x02,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xa8,0x02,0x00,0x00, +0xc1,0x02,0x00,0x00,0xbb,0x00,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xaa,0x02,0x00,0x00,0xa8,0x02,0x00,0x00, +0xc4,0x02,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xac,0x02,0x00,0x00,0xaa,0x02,0x00,0x00,0xab,0x02,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xae,0x02,0x00,0x00, +0xc2,0x02,0x00,0x00,0x62,0x00,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xaf,0x02,0x00,0x00,0xac,0x02,0x00,0x00, +0xae,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xb1,0x02,0x00,0x00,0xaf,0x02,0x00,0x00,0xc6,0x02,0x00,0x00, +0x41,0x00,0x05,0x00,0xcc,0x00,0x00,0x00,0xb2,0x02,0x00,0x00, +0xc9,0x00,0x00,0x00,0xb1,0x02,0x00,0x00,0x3d,0x00,0x04,0x00, +0xc3,0x00,0x00,0x00,0xb3,0x02,0x00,0x00,0xb2,0x02,0x00,0x00, +0x41,0x00,0x06,0x00,0x70,0x01,0x00,0x00,0xb4,0x02,0x00,0x00, +0x99,0x02,0x00,0x00,0x34,0x00,0x00,0x00,0xa6,0x02,0x00,0x00, +0x3e,0x00,0x03,0x00,0xb4,0x02,0x00,0x00,0xb3,0x02,0x00,0x00, +0xf9,0x00,0x02,0x00,0x95,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, +0x95,0x02,0x00,0x00,0xf9,0x00,0x02,0x00,0x81,0x02,0x00,0x00, +0xf8,0x00,0x02,0x00,0x81,0x02,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xb6,0x02,0x00,0x00,0xc6,0x02,0x00,0x00, +0xcf,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0x7e,0x02,0x00,0x00, +0xf8,0x00,0x02,0x00,0x80,0x02,0x00,0x00,0xf9,0x00,0x02,0x00, +0x79,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x79,0x02,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xb8,0x02,0x00,0x00, +0xc4,0x02,0x00,0x00,0xcf,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, +0x76,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x78,0x02,0x00,0x00, +0xf9,0x00,0x02,0x00,0x61,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, +0x61,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xba,0x02,0x00,0x00,0xc2,0x02,0x00,0x00,0xcf,0x00,0x00,0x00, +0xf9,0x00,0x02,0x00,0x5e,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, +0x60,0x02,0x00,0x00,0xf9,0x00,0x02,0x00,0x59,0x02,0x00,0x00, +0xf8,0x00,0x02,0x00,0x59,0x02,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xbc,0x02,0x00,0x00,0xc1,0x02,0x00,0x00, +0xcf,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0x56,0x02,0x00,0x00, +0xf8,0x00,0x02,0x00,0x58,0x02,0x00,0x00,0xfd,0x00,0x01,0x00, +0x38,0x00,0x01,0x00, +}; +const uint64_t matmul_q4_0_f32_len = 10564; + +unsigned char matmul_q4_0_f32_aligned_data[] = { +0x03,0x02,0x23,0x07,0x00,0x05,0x01,0x00,0x0b,0x00,0x0d,0x00, +0x37,0x03,0x00,0x00,0x00,0x00,0x00,0x00,0x11,0x00,0x02,0x00, +0x01,0x00,0x00,0x00,0x11,0x00,0x02,0x00,0x09,0x00,0x00,0x00, +0x11,0x00,0x02,0x00,0x51,0x11,0x00,0x00,0x11,0x00,0x02,0x00, +0x60,0x11,0x00,0x00,0x0b,0x00,0x06,0x00,0x01,0x00,0x00,0x00, +0x47,0x4c,0x53,0x4c,0x2e,0x73,0x74,0x64,0x2e,0x34,0x35,0x30, +0x00,0x00,0x00,0x00,0x0e,0x00,0x03,0x00,0x00,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0x0f,0x00,0x0f,0x00,0x05,0x00,0x00,0x00, +0x04,0x00,0x00,0x00,0x6d,0x61,0x69,0x6e,0x00,0x00,0x00,0x00, +0x0b,0x00,0x00,0x00,0x12,0x00,0x00,0x00,0x3d,0x00,0x00,0x00, +0x4c,0x00,0x00,0x00,0x08,0x01,0x00,0x00,0x28,0x01,0x00,0x00, +0x5d,0x01,0x00,0x00,0x65,0x01,0x00,0x00,0x73,0x02,0x00,0x00, +0xbc,0x02,0x00,0x00,0x10,0x00,0x06,0x00,0x04,0x00,0x00,0x00, +0x11,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x0b,0x00,0x00,0x00, +0x0b,0x00,0x00,0x00,0x1c,0x00,0x00,0x00,0x48,0x00,0x05,0x00, +0x10,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00, +0x00,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x04,0x00,0x00,0x00, +0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00,0x02,0x00,0x00,0x00, +0x23,0x00,0x00,0x00,0x08,0x00,0x00,0x00,0x48,0x00,0x05,0x00, +0x10,0x00,0x00,0x00,0x03,0x00,0x00,0x00,0x23,0x00,0x00,0x00, +0x0c,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00, +0x04,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x10,0x00,0x00,0x00, +0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00,0x05,0x00,0x00,0x00, +0x23,0x00,0x00,0x00,0x14,0x00,0x00,0x00,0x48,0x00,0x05,0x00, +0x10,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x23,0x00,0x00,0x00, +0x18,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00, +0x07,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x1c,0x00,0x00,0x00, +0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00,0x08,0x00,0x00,0x00, +0x23,0x00,0x00,0x00,0x20,0x00,0x00,0x00,0x48,0x00,0x05,0x00, +0x10,0x00,0x00,0x00,0x09,0x00,0x00,0x00,0x23,0x00,0x00,0x00, +0x24,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00, +0x0a,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x28,0x00,0x00,0x00, +0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00,0x0b,0x00,0x00,0x00, +0x23,0x00,0x00,0x00,0x2c,0x00,0x00,0x00,0x48,0x00,0x05,0x00, +0x10,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x23,0x00,0x00,0x00, +0x30,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00, +0x0d,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x34,0x00,0x00,0x00, +0x47,0x00,0x03,0x00,0x10,0x00,0x00,0x00,0x02,0x00,0x00,0x00, +0x47,0x00,0x04,0x00,0x37,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x3d,0x00,0x00,0x00, +0x0b,0x00,0x00,0x00,0x1a,0x00,0x00,0x00,0x47,0x00,0x04,0x00, +0x4c,0x00,0x00,0x00,0x0b,0x00,0x00,0x00,0x1b,0x00,0x00,0x00, +0x47,0x00,0x04,0x00,0x4f,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0x09,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x53,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x47,0x00,0x04,0x00, +0x60,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x06,0x00,0x00,0x00, +0x47,0x00,0x04,0x00,0x62,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0x07,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x6c,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0x03,0x00,0x00,0x00,0x47,0x00,0x04,0x00, +0xa7,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x02,0x00,0x00,0x00, +0x47,0x00,0x04,0x00,0xb9,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0x05,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0xbc,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0x08,0x00,0x00,0x00,0x47,0x00,0x04,0x00, +0x03,0x01,0x00,0x00,0x06,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0x48,0x00,0x05,0x00,0x04,0x01,0x00,0x00,0x00,0x00,0x00,0x00, +0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x48,0x00,0x05,0x00, +0x04,0x01,0x00,0x00,0x01,0x00,0x00,0x00,0x23,0x00,0x00,0x00, +0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x05,0x01,0x00,0x00, +0x06,0x00,0x00,0x00,0x12,0x00,0x00,0x00,0x48,0x00,0x04,0x00, +0x06,0x01,0x00,0x00,0x00,0x00,0x00,0x00,0x18,0x00,0x00,0x00, +0x48,0x00,0x05,0x00,0x06,0x01,0x00,0x00,0x00,0x00,0x00,0x00, +0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x03,0x00, +0x06,0x01,0x00,0x00,0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00, +0x08,0x01,0x00,0x00,0x22,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0x47,0x00,0x04,0x00,0x08,0x01,0x00,0x00,0x21,0x00,0x00,0x00, +0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x35,0x01,0x00,0x00, +0x01,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00, +0x36,0x01,0x00,0x00,0x0b,0x00,0x00,0x00,0x19,0x00,0x00,0x00, +0x47,0x00,0x04,0x00,0x62,0x01,0x00,0x00,0x06,0x00,0x00,0x00, +0x20,0x00,0x00,0x00,0x48,0x00,0x04,0x00,0x63,0x01,0x00,0x00, +0x00,0x00,0x00,0x00,0x05,0x00,0x00,0x00,0x48,0x00,0x04,0x00, +0x63,0x01,0x00,0x00,0x00,0x00,0x00,0x00,0x18,0x00,0x00,0x00, +0x48,0x00,0x05,0x00,0x63,0x01,0x00,0x00,0x00,0x00,0x00,0x00, +0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x48,0x00,0x05,0x00, +0x63,0x01,0x00,0x00,0x00,0x00,0x00,0x00,0x07,0x00,0x00,0x00, +0x10,0x00,0x00,0x00,0x47,0x00,0x03,0x00,0x63,0x01,0x00,0x00, +0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x65,0x01,0x00,0x00, +0x22,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00, +0x65,0x01,0x00,0x00,0x21,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0x47,0x00,0x04,0x00,0x73,0x02,0x00,0x00,0x0b,0x00,0x00,0x00, +0x18,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0xb9,0x02,0x00,0x00, +0x06,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x48,0x00,0x04,0x00, +0xba,0x02,0x00,0x00,0x00,0x00,0x00,0x00,0x19,0x00,0x00,0x00, +0x48,0x00,0x05,0x00,0xba,0x02,0x00,0x00,0x00,0x00,0x00,0x00, +0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x03,0x00, +0xba,0x02,0x00,0x00,0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00, +0xbc,0x02,0x00,0x00,0x22,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0x47,0x00,0x04,0x00,0xbc,0x02,0x00,0x00,0x21,0x00,0x00,0x00, +0x02,0x00,0x00,0x00,0x13,0x00,0x02,0x00,0x02,0x00,0x00,0x00, +0x21,0x00,0x03,0x00,0x03,0x00,0x00,0x00,0x02,0x00,0x00,0x00, +0x15,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x20,0x00,0x00,0x00, +0x00,0x00,0x00,0x00,0x17,0x00,0x04,0x00,0x09,0x00,0x00,0x00, +0x06,0x00,0x00,0x00,0x03,0x00,0x00,0x00,0x20,0x00,0x04,0x00, +0x0a,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x09,0x00,0x00,0x00, +0x3b,0x00,0x04,0x00,0x0a,0x00,0x00,0x00,0x0b,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x0c,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x20,0x00,0x04,0x00, +0x0d,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x06,0x00,0x00,0x00, +0x1e,0x00,0x10,0x00,0x10,0x00,0x00,0x00,0x06,0x00,0x00,0x00, +0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00, +0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00, +0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00, +0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00, +0x06,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x11,0x00,0x00,0x00, +0x09,0x00,0x00,0x00,0x10,0x00,0x00,0x00,0x3b,0x00,0x04,0x00, +0x11,0x00,0x00,0x00,0x12,0x00,0x00,0x00,0x09,0x00,0x00,0x00, +0x15,0x00,0x04,0x00,0x13,0x00,0x00,0x00,0x20,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00, +0x14,0x00,0x00,0x00,0x08,0x00,0x00,0x00,0x20,0x00,0x04,0x00, +0x15,0x00,0x00,0x00,0x09,0x00,0x00,0x00,0x06,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00,0x21,0x00,0x00,0x00, +0x0a,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00, +0x27,0x00,0x00,0x00,0x09,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x13,0x00,0x00,0x00,0x2d,0x00,0x00,0x00,0x07,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00,0x34,0x00,0x00,0x00, +0x00,0x00,0x00,0x00,0x32,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x37,0x00,0x00,0x00,0x40,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0x3b,0x00,0x04,0x00,0x0a,0x00,0x00,0x00,0x3d,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x3e,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x3b,0x00,0x04,0x00, +0x0a,0x00,0x00,0x00,0x4c,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0x32,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x4f,0x00,0x00,0x00, +0x20,0x00,0x00,0x00,0x32,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x53,0x00,0x00,0x00,0x20,0x00,0x00,0x00,0x34,0x00,0x06,0x00, +0x06,0x00,0x00,0x00,0x54,0x00,0x00,0x00,0x86,0x00,0x00,0x00, +0x37,0x00,0x00,0x00,0x53,0x00,0x00,0x00,0x34,0x00,0x06,0x00, +0x06,0x00,0x00,0x00,0x58,0x00,0x00,0x00,0x86,0x00,0x00,0x00, +0x37,0x00,0x00,0x00,0x53,0x00,0x00,0x00,0x32,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x60,0x00,0x00,0x00,0x02,0x00,0x00,0x00, +0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x61,0x00,0x00,0x00, +0x86,0x00,0x00,0x00,0x53,0x00,0x00,0x00,0x60,0x00,0x00,0x00, +0x32,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x62,0x00,0x00,0x00, +0x04,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, +0x63,0x00,0x00,0x00,0x86,0x00,0x00,0x00,0x61,0x00,0x00,0x00, +0x62,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, +0x67,0x00,0x00,0x00,0x86,0x00,0x00,0x00,0x61,0x00,0x00,0x00, +0x62,0x00,0x00,0x00,0x32,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x6c,0x00,0x00,0x00,0x10,0x00,0x00,0x00,0x34,0x00,0x06,0x00, +0x06,0x00,0x00,0x00,0x6d,0x00,0x00,0x00,0x86,0x00,0x00,0x00, +0x6c,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x34,0x00,0x06,0x00, +0x06,0x00,0x00,0x00,0x72,0x00,0x00,0x00,0x86,0x00,0x00,0x00, +0x6c,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x77,0x00,0x00,0x00,0x08,0x00,0x00,0x00, +0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x78,0x00,0x00,0x00, +0x86,0x00,0x00,0x00,0x6c,0x00,0x00,0x00,0x77,0x00,0x00,0x00, +0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x7d,0x00,0x00,0x00, +0x86,0x00,0x00,0x00,0x6c,0x00,0x00,0x00,0x77,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00,0x81,0x00,0x00,0x00, +0x06,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00, +0x86,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x13,0x00,0x00,0x00,0x91,0x00,0x00,0x00,0x0b,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00,0x97,0x00,0x00,0x00, +0x03,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00, +0xa2,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x32,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0xa7,0x00,0x00,0x00,0x40,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00,0xa9,0x00,0x00,0x00, +0x04,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, +0xb8,0x00,0x00,0x00,0x84,0x00,0x00,0x00,0x60,0x00,0x00,0x00, +0x62,0x00,0x00,0x00,0x32,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0xb9,0x00,0x00,0x00,0x20,0x00,0x00,0x00,0x34,0x00,0x06,0x00, +0x06,0x00,0x00,0x00,0xba,0x00,0x00,0x00,0x84,0x00,0x00,0x00, +0x53,0x00,0x00,0x00,0xb9,0x00,0x00,0x00,0x34,0x00,0x06,0x00, +0x06,0x00,0x00,0x00,0xbb,0x00,0x00,0x00,0x84,0x00,0x00,0x00, +0x4f,0x00,0x00,0x00,0x62,0x00,0x00,0x00,0x32,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0xbc,0x00,0x00,0x00,0x02,0x00,0x00,0x00, +0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0xbd,0x00,0x00,0x00, +0x84,0x00,0x00,0x00,0xbb,0x00,0x00,0x00,0xbc,0x00,0x00,0x00, +0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0xbe,0x00,0x00,0x00, +0x84,0x00,0x00,0x00,0xbd,0x00,0x00,0x00,0x60,0x00,0x00,0x00, +0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0xbf,0x00,0x00,0x00, +0x86,0x00,0x00,0x00,0xba,0x00,0x00,0x00,0xbe,0x00,0x00,0x00, +0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0xc0,0x00,0x00,0x00, +0x84,0x00,0x00,0x00,0xb8,0x00,0x00,0x00,0xbf,0x00,0x00,0x00, +0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0xc1,0x00,0x00,0x00, +0x84,0x00,0x00,0x00,0xc0,0x00,0x00,0x00,0xbc,0x00,0x00,0x00, +0x14,0x00,0x02,0x00,0xc2,0x00,0x00,0x00,0x16,0x00,0x03,0x00, +0xc4,0x00,0x00,0x00,0x20,0x00,0x00,0x00,0x34,0x00,0x06,0x00, +0x06,0x00,0x00,0x00,0xc5,0x00,0x00,0x00,0x84,0x00,0x00,0x00, +0x60,0x00,0x00,0x00,0x62,0x00,0x00,0x00,0x34,0x00,0x06,0x00, +0x06,0x00,0x00,0x00,0xc6,0x00,0x00,0x00,0x84,0x00,0x00,0x00, +0xc5,0x00,0x00,0x00,0xbf,0x00,0x00,0x00,0x34,0x00,0x06,0x00, +0x06,0x00,0x00,0x00,0xc7,0x00,0x00,0x00,0x84,0x00,0x00,0x00, +0xc6,0x00,0x00,0x00,0xbc,0x00,0x00,0x00,0x1c,0x00,0x04,0x00, +0xc8,0x00,0x00,0x00,0xc4,0x00,0x00,0x00,0xc7,0x00,0x00,0x00, +0x20,0x00,0x04,0x00,0xc9,0x00,0x00,0x00,0x07,0x00,0x00,0x00, +0xc8,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0xc4,0x00,0x00,0x00, +0xcc,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x20,0x00,0x04,0x00, +0xcd,0x00,0x00,0x00,0x07,0x00,0x00,0x00,0xc4,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00,0xd0,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, +0xf4,0x00,0x00,0x00,0x80,0x00,0x00,0x00,0x6c,0x00,0x00,0x00, +0x39,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0xfa,0x00,0x00,0x00,0x10,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0xfe,0x00,0x00,0x00,0x0f,0x00,0x00,0x00, +0x16,0x00,0x03,0x00,0x01,0x01,0x00,0x00,0x10,0x00,0x00,0x00, +0x15,0x00,0x04,0x00,0x02,0x01,0x00,0x00,0x08,0x00,0x00,0x00, +0x00,0x00,0x00,0x00,0x1c,0x00,0x04,0x00,0x03,0x01,0x00,0x00, +0x02,0x01,0x00,0x00,0xfa,0x00,0x00,0x00,0x1e,0x00,0x04,0x00, +0x04,0x01,0x00,0x00,0x01,0x01,0x00,0x00,0x03,0x01,0x00,0x00, +0x1d,0x00,0x03,0x00,0x05,0x01,0x00,0x00,0x04,0x01,0x00,0x00, +0x1e,0x00,0x03,0x00,0x06,0x01,0x00,0x00,0x05,0x01,0x00,0x00, +0x20,0x00,0x04,0x00,0x07,0x01,0x00,0x00,0x0c,0x00,0x00,0x00, +0x06,0x01,0x00,0x00,0x3b,0x00,0x04,0x00,0x07,0x01,0x00,0x00, +0x08,0x01,0x00,0x00,0x0c,0x00,0x00,0x00,0x20,0x00,0x04,0x00, +0x0a,0x01,0x00,0x00,0x0c,0x00,0x00,0x00,0x01,0x01,0x00,0x00, +0x20,0x00,0x04,0x00,0x11,0x01,0x00,0x00,0x0c,0x00,0x00,0x00, +0x02,0x01,0x00,0x00,0x17,0x00,0x04,0x00,0x15,0x01,0x00,0x00, +0xc4,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0xc4,0x00,0x00,0x00,0x1f,0x01,0x00,0x00,0x00,0x00,0x00,0x41, +0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x24,0x01,0x00,0x00, +0x80,0x00,0x00,0x00,0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00, +0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x25,0x01,0x00,0x00, +0x84,0x00,0x00,0x00,0x37,0x00,0x00,0x00,0x24,0x01,0x00,0x00, +0x1c,0x00,0x04,0x00,0x26,0x01,0x00,0x00,0x01,0x01,0x00,0x00, +0x25,0x01,0x00,0x00,0x20,0x00,0x04,0x00,0x27,0x01,0x00,0x00, +0x04,0x00,0x00,0x00,0x26,0x01,0x00,0x00,0x3b,0x00,0x04,0x00, +0x27,0x01,0x00,0x00,0x28,0x01,0x00,0x00,0x04,0x00,0x00,0x00, +0x20,0x00,0x04,0x00,0x2d,0x01,0x00,0x00,0x04,0x00,0x00,0x00, +0x01,0x01,0x00,0x00,0x32,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x35,0x01,0x00,0x00,0x01,0x00,0x00,0x00,0x33,0x00,0x06,0x00, +0x09,0x00,0x00,0x00,0x36,0x01,0x00,0x00,0x35,0x01,0x00,0x00, +0x39,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x34,0x00,0x06,0x00, +0x06,0x00,0x00,0x00,0x37,0x01,0x00,0x00,0x51,0x00,0x00,0x00, +0x36,0x01,0x00,0x00,0x00,0x00,0x00,0x00,0x34,0x00,0x06,0x00, +0x06,0x00,0x00,0x00,0x38,0x01,0x00,0x00,0x84,0x00,0x00,0x00, +0x37,0x01,0x00,0x00,0x0c,0x00,0x00,0x00,0x34,0x00,0x06,0x00, +0x06,0x00,0x00,0x00,0x39,0x01,0x00,0x00,0x86,0x00,0x00,0x00, +0x38,0x01,0x00,0x00,0x6c,0x00,0x00,0x00,0x34,0x00,0x06,0x00, +0x06,0x00,0x00,0x00,0x54,0x01,0x00,0x00,0x80,0x00,0x00,0x00, +0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x34,0x00,0x06,0x00, +0x06,0x00,0x00,0x00,0x59,0x01,0x00,0x00,0x80,0x00,0x00,0x00, +0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x34,0x00,0x06,0x00, +0x06,0x00,0x00,0x00,0x5a,0x01,0x00,0x00,0x84,0x00,0x00,0x00, +0xa7,0x00,0x00,0x00,0x59,0x01,0x00,0x00,0x1c,0x00,0x04,0x00, +0x5b,0x01,0x00,0x00,0x01,0x01,0x00,0x00,0x5a,0x01,0x00,0x00, +0x20,0x00,0x04,0x00,0x5c,0x01,0x00,0x00,0x04,0x00,0x00,0x00, +0x5b,0x01,0x00,0x00,0x3b,0x00,0x04,0x00,0x5c,0x01,0x00,0x00, +0x5d,0x01,0x00,0x00,0x04,0x00,0x00,0x00,0x17,0x00,0x04,0x00, +0x60,0x01,0x00,0x00,0xc4,0x00,0x00,0x00,0x04,0x00,0x00,0x00, +0x18,0x00,0x04,0x00,0x61,0x01,0x00,0x00,0x60,0x01,0x00,0x00, +0x02,0x00,0x00,0x00,0x1d,0x00,0x03,0x00,0x62,0x01,0x00,0x00, +0x61,0x01,0x00,0x00,0x1e,0x00,0x03,0x00,0x63,0x01,0x00,0x00, +0x62,0x01,0x00,0x00,0x20,0x00,0x04,0x00,0x64,0x01,0x00,0x00, +0x0c,0x00,0x00,0x00,0x63,0x01,0x00,0x00,0x3b,0x00,0x04,0x00, +0x64,0x01,0x00,0x00,0x65,0x01,0x00,0x00,0x0c,0x00,0x00,0x00, +0x20,0x00,0x04,0x00,0x67,0x01,0x00,0x00,0x0c,0x00,0x00,0x00, +0xc4,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x7b,0x01,0x00,0x00,0x03,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x83,0x01,0x00,0x00,0x04,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x8b,0x01,0x00,0x00, +0x05,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x93,0x01,0x00,0x00,0x06,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x9b,0x01,0x00,0x00,0x07,0x00,0x00,0x00, +0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0xa2,0x01,0x00,0x00, +0x51,0x00,0x00,0x00,0x36,0x01,0x00,0x00,0x00,0x00,0x00,0x00, +0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0xa3,0x01,0x00,0x00, +0x84,0x00,0x00,0x00,0xa2,0x01,0x00,0x00,0x77,0x00,0x00,0x00, +0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0xa4,0x01,0x00,0x00, +0x86,0x00,0x00,0x00,0xa3,0x01,0x00,0x00,0x6c,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0xa7,0x01,0x00,0x00, +0x08,0x01,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, +0xa8,0x01,0x00,0x00,0x86,0x00,0x00,0x00,0x6c,0x00,0x00,0x00, +0x0c,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, +0xab,0x01,0x00,0x00,0x86,0x00,0x00,0x00,0x6c,0x00,0x00,0x00, +0x77,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, +0xc6,0x01,0x00,0x00,0x84,0x00,0x00,0x00,0x60,0x00,0x00,0x00, +0x62,0x00,0x00,0x00,0x1c,0x00,0x04,0x00,0xc7,0x01,0x00,0x00, +0x01,0x01,0x00,0x00,0xc6,0x01,0x00,0x00,0x20,0x00,0x04,0x00, +0xc8,0x01,0x00,0x00,0x07,0x00,0x00,0x00,0xc7,0x01,0x00,0x00, +0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0xd8,0x01,0x00,0x00, +0x80,0x00,0x00,0x00,0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00, +0x20,0x00,0x04,0x00,0xde,0x01,0x00,0x00,0x07,0x00,0x00,0x00, +0x01,0x01,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, +0xf4,0x01,0x00,0x00,0x84,0x00,0x00,0x00,0xbf,0x00,0x00,0x00, +0xbc,0x00,0x00,0x00,0x1c,0x00,0x04,0x00,0xf5,0x01,0x00,0x00, +0x01,0x01,0x00,0x00,0xf4,0x01,0x00,0x00,0x20,0x00,0x04,0x00, +0xf6,0x01,0x00,0x00,0x07,0x00,0x00,0x00,0xf5,0x01,0x00,0x00, +0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0xff,0x01,0x00,0x00, +0x86,0x00,0x00,0x00,0xb9,0x00,0x00,0x00,0xbf,0x00,0x00,0x00, +0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x07,0x02,0x00,0x00, +0x80,0x00,0x00,0x00,0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00, +0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x36,0x02,0x00,0x00, +0x84,0x00,0x00,0x00,0x60,0x00,0x00,0x00,0x62,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00,0x6b,0x02,0x00,0x00, +0x0d,0x00,0x00,0x00,0x3b,0x00,0x04,0x00,0x0a,0x00,0x00,0x00, +0x73,0x02,0x00,0x00,0x01,0x00,0x00,0x00,0x1d,0x00,0x03,0x00, +0xb9,0x02,0x00,0x00,0xc4,0x00,0x00,0x00,0x1e,0x00,0x03,0x00, +0xba,0x02,0x00,0x00,0xb9,0x02,0x00,0x00,0x20,0x00,0x04,0x00, +0xbb,0x02,0x00,0x00,0x0c,0x00,0x00,0x00,0xba,0x02,0x00,0x00, +0x3b,0x00,0x04,0x00,0xbb,0x02,0x00,0x00,0xbc,0x02,0x00,0x00, +0x0c,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00, +0xc1,0x02,0x00,0x00,0x05,0x00,0x00,0x00,0x34,0x00,0x06,0x00, +0x06,0x00,0x00,0x00,0xce,0x02,0x00,0x00,0x84,0x00,0x00,0x00, +0x60,0x00,0x00,0x00,0x62,0x00,0x00,0x00,0x2c,0x00,0x05,0x00, +0x15,0x01,0x00,0x00,0x36,0x03,0x00,0x00,0x1f,0x01,0x00,0x00, +0x1f,0x01,0x00,0x00,0x36,0x00,0x05,0x00,0x02,0x00,0x00,0x00, +0x04,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x03,0x00,0x00,0x00, +0xf8,0x00,0x02,0x00,0x05,0x00,0x00,0x00,0x3b,0x00,0x04,0x00, +0xc9,0x00,0x00,0x00,0xca,0x00,0x00,0x00,0x07,0x00,0x00,0x00, +0x3b,0x00,0x04,0x00,0xc8,0x01,0x00,0x00,0xc9,0x01,0x00,0x00, +0x07,0x00,0x00,0x00,0x3b,0x00,0x04,0x00,0xf6,0x01,0x00,0x00, +0xf7,0x01,0x00,0x00,0x07,0x00,0x00,0x00,0x41,0x00,0x05,0x00, +0x0d,0x00,0x00,0x00,0x0e,0x00,0x00,0x00,0x0b,0x00,0x00,0x00, +0x0c,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x0f,0x00,0x00,0x00,0x0e,0x00,0x00,0x00,0x41,0x00,0x05,0x00, +0x15,0x00,0x00,0x00,0x16,0x00,0x00,0x00,0x12,0x00,0x00,0x00, +0x14,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x17,0x00,0x00,0x00,0x16,0x00,0x00,0x00,0x86,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x18,0x00,0x00,0x00,0x0f,0x00,0x00,0x00, +0x17,0x00,0x00,0x00,0x89,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x1e,0x00,0x00,0x00,0x0f,0x00,0x00,0x00,0x17,0x00,0x00,0x00, +0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00,0x22,0x00,0x00,0x00, +0x12,0x00,0x00,0x00,0x21,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x22,0x00,0x00,0x00, +0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x24,0x00,0x00,0x00, +0x18,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x41,0x00,0x05,0x00, +0x15,0x00,0x00,0x00,0x28,0x00,0x00,0x00,0x12,0x00,0x00,0x00, +0x27,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x29,0x00,0x00,0x00,0x28,0x00,0x00,0x00,0x86,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x2a,0x00,0x00,0x00,0x1e,0x00,0x00,0x00, +0x29,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00, +0x2e,0x00,0x00,0x00,0x12,0x00,0x00,0x00,0x2d,0x00,0x00,0x00, +0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x2f,0x00,0x00,0x00, +0x2e,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x30,0x00,0x00,0x00,0x24,0x00,0x00,0x00,0x2f,0x00,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x32,0x00,0x00,0x00, +0x30,0x00,0x00,0x00,0x2a,0x00,0x00,0x00,0x41,0x00,0x05,0x00, +0x15,0x00,0x00,0x00,0x35,0x00,0x00,0x00,0x12,0x00,0x00,0x00, +0x34,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x36,0x00,0x00,0x00,0x35,0x00,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x38,0x00,0x00,0x00,0x36,0x00,0x00,0x00, +0x37,0x00,0x00,0x00,0x82,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x3a,0x00,0x00,0x00,0x38,0x00,0x00,0x00,0x39,0x00,0x00,0x00, +0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x3b,0x00,0x00,0x00, +0x3a,0x00,0x00,0x00,0x37,0x00,0x00,0x00,0x41,0x00,0x05,0x00, +0x0d,0x00,0x00,0x00,0x3f,0x00,0x00,0x00,0x3d,0x00,0x00,0x00, +0x3e,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x40,0x00,0x00,0x00,0x3f,0x00,0x00,0x00,0x89,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x42,0x00,0x00,0x00,0x40,0x00,0x00,0x00, +0x3b,0x00,0x00,0x00,0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x47,0x00,0x00,0x00,0x40,0x00,0x00,0x00,0x3b,0x00,0x00,0x00, +0x41,0x00,0x05,0x00,0x0d,0x00,0x00,0x00,0x49,0x00,0x00,0x00, +0x3d,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x4a,0x00,0x00,0x00,0x49,0x00,0x00,0x00, +0x41,0x00,0x05,0x00,0x0d,0x00,0x00,0x00,0x4d,0x00,0x00,0x00, +0x4c,0x00,0x00,0x00,0x3e,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x4e,0x00,0x00,0x00,0x4d,0x00,0x00,0x00, +0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x50,0x00,0x00,0x00, +0x4e,0x00,0x00,0x00,0x4f,0x00,0x00,0x00,0x89,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x55,0x00,0x00,0x00,0x50,0x00,0x00,0x00, +0x54,0x00,0x00,0x00,0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x59,0x00,0x00,0x00,0x50,0x00,0x00,0x00,0x58,0x00,0x00,0x00, +0x89,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x5d,0x00,0x00,0x00, +0x4e,0x00,0x00,0x00,0x4f,0x00,0x00,0x00,0x89,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x64,0x00,0x00,0x00,0x5d,0x00,0x00,0x00, +0x63,0x00,0x00,0x00,0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x68,0x00,0x00,0x00,0x5d,0x00,0x00,0x00,0x67,0x00,0x00,0x00, +0x89,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x6e,0x00,0x00,0x00, +0x4e,0x00,0x00,0x00,0x6d,0x00,0x00,0x00,0x86,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x73,0x00,0x00,0x00,0x4e,0x00,0x00,0x00, +0x72,0x00,0x00,0x00,0x89,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x79,0x00,0x00,0x00,0x4e,0x00,0x00,0x00,0x78,0x00,0x00,0x00, +0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x7e,0x00,0x00,0x00, +0x4e,0x00,0x00,0x00,0x7d,0x00,0x00,0x00,0x41,0x00,0x05,0x00, +0x15,0x00,0x00,0x00,0x82,0x00,0x00,0x00,0x12,0x00,0x00,0x00, +0x81,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x83,0x00,0x00,0x00,0x82,0x00,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x84,0x00,0x00,0x00,0x47,0x00,0x00,0x00, +0x83,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00, +0x87,0x00,0x00,0x00,0x12,0x00,0x00,0x00,0x86,0x00,0x00,0x00, +0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x88,0x00,0x00,0x00, +0x87,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x8a,0x00,0x00,0x00,0x47,0x00,0x00,0x00,0x39,0x00,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x8d,0x00,0x00,0x00, +0x8a,0x00,0x00,0x00,0x83,0x00,0x00,0x00,0x0c,0x00,0x07,0x00, +0x06,0x00,0x00,0x00,0x8e,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0x26,0x00,0x00,0x00,0x88,0x00,0x00,0x00,0x8d,0x00,0x00,0x00, +0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00,0x92,0x00,0x00,0x00, +0x12,0x00,0x00,0x00,0x91,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x93,0x00,0x00,0x00,0x92,0x00,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x94,0x00,0x00,0x00, +0x32,0x00,0x00,0x00,0x93,0x00,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x96,0x00,0x00,0x00,0x42,0x00,0x00,0x00, +0x37,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00, +0x98,0x00,0x00,0x00,0x12,0x00,0x00,0x00,0x97,0x00,0x00,0x00, +0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x99,0x00,0x00,0x00, +0x98,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x9a,0x00,0x00,0x00,0x96,0x00,0x00,0x00,0x99,0x00,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x9b,0x00,0x00,0x00, +0x94,0x00,0x00,0x00,0x9a,0x00,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x9d,0x00,0x00,0x00,0x9b,0x00,0x00,0x00, +0x84,0x00,0x00,0x00,0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x9e,0x00,0x00,0x00,0x9d,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, +0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00,0xa3,0x00,0x00,0x00, +0x12,0x00,0x00,0x00,0xa2,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0xa4,0x00,0x00,0x00,0xa3,0x00,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xa5,0x00,0x00,0x00, +0x0f,0x00,0x00,0x00,0xa4,0x00,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xa8,0x00,0x00,0x00,0x4a,0x00,0x00,0x00, +0xa7,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00, +0xaa,0x00,0x00,0x00,0x12,0x00,0x00,0x00,0xa9,0x00,0x00,0x00, +0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0xab,0x00,0x00,0x00, +0xaa,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xac,0x00,0x00,0x00,0xa8,0x00,0x00,0x00,0xab,0x00,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xad,0x00,0x00,0x00, +0xa5,0x00,0x00,0x00,0xac,0x00,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xaf,0x00,0x00,0x00,0xad,0x00,0x00,0x00, +0x84,0x00,0x00,0x00,0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xb0,0x00,0x00,0x00,0xaf,0x00,0x00,0x00,0x77,0x00,0x00,0x00, +0xf9,0x00,0x02,0x00,0xb2,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, +0xb2,0x00,0x00,0x00,0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00, +0xe2,0x02,0x00,0x00,0x3e,0x00,0x00,0x00,0x05,0x00,0x00,0x00, +0xd1,0x00,0x00,0x00,0xb3,0x00,0x00,0x00,0xb0,0x00,0x05,0x00, +0xc2,0x00,0x00,0x00,0xc3,0x00,0x00,0x00,0xe2,0x02,0x00,0x00, +0xc1,0x00,0x00,0x00,0xf6,0x00,0x04,0x00,0xb4,0x00,0x00,0x00, +0xb3,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, +0xc3,0x00,0x00,0x00,0xb3,0x00,0x00,0x00,0xb4,0x00,0x00,0x00, +0xf8,0x00,0x02,0x00,0xb3,0x00,0x00,0x00,0x41,0x00,0x05,0x00, +0xcd,0x00,0x00,0x00,0xce,0x00,0x00,0x00,0xca,0x00,0x00,0x00, +0xe2,0x02,0x00,0x00,0x3e,0x00,0x03,0x00,0xce,0x00,0x00,0x00, +0xcc,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xd1,0x00,0x00,0x00,0xe2,0x02,0x00,0x00,0xd0,0x00,0x00,0x00, +0xf9,0x00,0x02,0x00,0xb2,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, +0xb4,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0xd4,0x00,0x00,0x00, +0xf8,0x00,0x02,0x00,0xd4,0x00,0x00,0x00,0xf5,0x00,0x07,0x00, +0x06,0x00,0x00,0x00,0xfb,0x02,0x00,0x00,0xb0,0x00,0x00,0x00, +0xb4,0x00,0x00,0x00,0xad,0x01,0x00,0x00,0xd7,0x00,0x00,0x00, +0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0xf7,0x02,0x00,0x00, +0x9e,0x00,0x00,0x00,0xb4,0x00,0x00,0x00,0xaa,0x01,0x00,0x00, +0xd7,0x00,0x00,0x00,0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00, +0xe3,0x02,0x00,0x00,0x84,0x00,0x00,0x00,0xb4,0x00,0x00,0x00, +0x5b,0x02,0x00,0x00,0xd7,0x00,0x00,0x00,0xb0,0x00,0x05,0x00, +0xc2,0x00,0x00,0x00,0xdb,0x00,0x00,0x00,0xe3,0x02,0x00,0x00, +0x8e,0x00,0x00,0x00,0xf6,0x00,0x04,0x00,0xd6,0x00,0x00,0x00, +0xd7,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, +0xdb,0x00,0x00,0x00,0xd5,0x00,0x00,0x00,0xd6,0x00,0x00,0x00, +0xf8,0x00,0x02,0x00,0xd5,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, +0xdd,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0xdd,0x00,0x00,0x00, +0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0xf3,0x02,0x00,0x00, +0x3e,0x00,0x00,0x00,0xd5,0x00,0x00,0x00,0x3b,0x01,0x00,0x00, +0xde,0x00,0x00,0x00,0xb0,0x00,0x05,0x00,0xc2,0x00,0x00,0x00, +0xe3,0x00,0x00,0x00,0xf3,0x02,0x00,0x00,0x37,0x00,0x00,0x00, +0xf6,0x00,0x04,0x00,0xdf,0x00,0x00,0x00,0xde,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0xe3,0x00,0x00,0x00, +0xde,0x00,0x00,0x00,0xdf,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, +0xde,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xe8,0x00,0x00,0x00,0x73,0x00,0x00,0x00,0xf3,0x02,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xeb,0x00,0x00,0x00, +0xe8,0x00,0x00,0x00,0x99,0x00,0x00,0x00,0x86,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xec,0x00,0x00,0x00,0xeb,0x00,0x00,0x00, +0x0c,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xed,0x00,0x00,0x00,0xf7,0x02,0x00,0x00,0xec,0x00,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xef,0x00,0x00,0x00, +0xed,0x00,0x00,0x00,0x6e,0x00,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xf5,0x00,0x00,0x00,0xe8,0x00,0x00,0x00, +0xf4,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xf7,0x00,0x00,0x00,0xf5,0x00,0x00,0x00,0x6e,0x00,0x00,0x00, +0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xfb,0x00,0x00,0x00, +0xef,0x00,0x00,0x00,0xfa,0x00,0x00,0x00,0xc7,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xff,0x00,0x00,0x00,0xef,0x00,0x00,0x00, +0xfe,0x00,0x00,0x00,0x41,0x00,0x07,0x00,0x0a,0x01,0x00,0x00, +0x0b,0x01,0x00,0x00,0x08,0x01,0x00,0x00,0x34,0x00,0x00,0x00, +0xfb,0x00,0x00,0x00,0x34,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0x01,0x01,0x00,0x00,0x0c,0x01,0x00,0x00,0x0b,0x01,0x00,0x00, +0x73,0x00,0x04,0x00,0xc4,0x00,0x00,0x00,0x0d,0x01,0x00,0x00, +0x0c,0x01,0x00,0x00,0x41,0x00,0x08,0x00,0x11,0x01,0x00,0x00, +0x12,0x01,0x00,0x00,0x08,0x01,0x00,0x00,0x34,0x00,0x00,0x00, +0xfb,0x00,0x00,0x00,0xd0,0x00,0x00,0x00,0xff,0x00,0x00,0x00, +0x3d,0x00,0x04,0x00,0x02,0x01,0x00,0x00,0x13,0x01,0x00,0x00, +0x12,0x01,0x00,0x00,0x71,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x14,0x01,0x00,0x00,0x13,0x01,0x00,0x00,0xc7,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x19,0x01,0x00,0x00,0x14,0x01,0x00,0x00, +0xfe,0x00,0x00,0x00,0x70,0x00,0x04,0x00,0xc4,0x00,0x00,0x00, +0x1a,0x01,0x00,0x00,0x19,0x01,0x00,0x00,0xc2,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x1c,0x01,0x00,0x00,0x14,0x01,0x00,0x00, +0xa9,0x00,0x00,0x00,0x70,0x00,0x04,0x00,0xc4,0x00,0x00,0x00, +0x1d,0x01,0x00,0x00,0x1c,0x01,0x00,0x00,0x50,0x00,0x05,0x00, +0x15,0x01,0x00,0x00,0x1e,0x01,0x00,0x00,0x1a,0x01,0x00,0x00, +0x1d,0x01,0x00,0x00,0x83,0x00,0x05,0x00,0x15,0x01,0x00,0x00, +0x21,0x01,0x00,0x00,0x1e,0x01,0x00,0x00,0x36,0x03,0x00,0x00, +0x8e,0x00,0x05,0x00,0x15,0x01,0x00,0x00,0x23,0x01,0x00,0x00, +0x21,0x01,0x00,0x00,0x0d,0x01,0x00,0x00,0x51,0x00,0x05,0x00, +0xc4,0x00,0x00,0x00,0x2b,0x01,0x00,0x00,0x23,0x01,0x00,0x00, +0x00,0x00,0x00,0x00,0x73,0x00,0x04,0x00,0x01,0x01,0x00,0x00, +0x2c,0x01,0x00,0x00,0x2b,0x01,0x00,0x00,0x41,0x00,0x05,0x00, +0x2d,0x01,0x00,0x00,0x2e,0x01,0x00,0x00,0x28,0x01,0x00,0x00, +0xf7,0x00,0x00,0x00,0x3e,0x00,0x03,0x00,0x2e,0x01,0x00,0x00, +0x2c,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x30,0x01,0x00,0x00,0xf7,0x00,0x00,0x00,0xfa,0x00,0x00,0x00, +0x51,0x00,0x05,0x00,0xc4,0x00,0x00,0x00,0x32,0x01,0x00,0x00, +0x23,0x01,0x00,0x00,0x01,0x00,0x00,0x00,0x73,0x00,0x04,0x00, +0x01,0x01,0x00,0x00,0x33,0x01,0x00,0x00,0x32,0x01,0x00,0x00, +0x41,0x00,0x05,0x00,0x2d,0x01,0x00,0x00,0x34,0x01,0x00,0x00, +0x28,0x01,0x00,0x00,0x30,0x01,0x00,0x00,0x3e,0x00,0x03,0x00, +0x34,0x01,0x00,0x00,0x33,0x01,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x3b,0x01,0x00,0x00,0xf3,0x02,0x00,0x00, +0x39,0x01,0x00,0x00,0xf9,0x00,0x02,0x00,0xdd,0x00,0x00,0x00, +0xf8,0x00,0x02,0x00,0xdf,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, +0x3d,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0x3d,0x01,0x00,0x00, +0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0xf4,0x02,0x00,0x00, +0x3e,0x00,0x00,0x00,0xdf,0x00,0x00,0x00,0xa6,0x01,0x00,0x00, +0x3e,0x01,0x00,0x00,0xb0,0x00,0x05,0x00,0xc2,0x00,0x00,0x00, +0x43,0x01,0x00,0x00,0xf4,0x02,0x00,0x00,0xa7,0x00,0x00,0x00, +0xf6,0x00,0x04,0x00,0x3f,0x01,0x00,0x00,0x3e,0x01,0x00,0x00, +0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0x43,0x01,0x00,0x00, +0x3e,0x01,0x00,0x00,0x3f,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, +0x3e,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x48,0x01,0x00,0x00,0x7e,0x00,0x00,0x00,0xf4,0x02,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x4b,0x01,0x00,0x00, +0x48,0x01,0x00,0x00,0xab,0x00,0x00,0x00,0x86,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x4c,0x01,0x00,0x00,0x4b,0x01,0x00,0x00, +0x77,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x4d,0x01,0x00,0x00,0xfb,0x02,0x00,0x00,0x4c,0x01,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x4f,0x01,0x00,0x00, +0x4d,0x01,0x00,0x00,0x79,0x00,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x55,0x01,0x00,0x00,0x48,0x01,0x00,0x00, +0x54,0x01,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x57,0x01,0x00,0x00,0x79,0x00,0x00,0x00,0x77,0x00,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x58,0x01,0x00,0x00, +0x55,0x01,0x00,0x00,0x57,0x01,0x00,0x00,0x41,0x00,0x08,0x00, +0x67,0x01,0x00,0x00,0x68,0x01,0x00,0x00,0x65,0x01,0x00,0x00, +0x34,0x00,0x00,0x00,0x4f,0x01,0x00,0x00,0x34,0x00,0x00,0x00, +0x3e,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0xc4,0x00,0x00,0x00, +0x69,0x01,0x00,0x00,0x68,0x01,0x00,0x00,0x73,0x00,0x04,0x00, +0x01,0x01,0x00,0x00,0x6a,0x01,0x00,0x00,0x69,0x01,0x00,0x00, +0x41,0x00,0x05,0x00,0x2d,0x01,0x00,0x00,0x6b,0x01,0x00,0x00, +0x5d,0x01,0x00,0x00,0x58,0x01,0x00,0x00,0x3e,0x00,0x03,0x00, +0x6b,0x01,0x00,0x00,0x6a,0x01,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x6d,0x01,0x00,0x00,0x58,0x01,0x00,0x00, +0x39,0x00,0x00,0x00,0x41,0x00,0x08,0x00,0x67,0x01,0x00,0x00, +0x6f,0x01,0x00,0x00,0x65,0x01,0x00,0x00,0x34,0x00,0x00,0x00, +0x4f,0x01,0x00,0x00,0x34,0x00,0x00,0x00,0x39,0x00,0x00,0x00, +0x3d,0x00,0x04,0x00,0xc4,0x00,0x00,0x00,0x70,0x01,0x00,0x00, +0x6f,0x01,0x00,0x00,0x73,0x00,0x04,0x00,0x01,0x01,0x00,0x00, +0x71,0x01,0x00,0x00,0x70,0x01,0x00,0x00,0x41,0x00,0x05,0x00, +0x2d,0x01,0x00,0x00,0x72,0x01,0x00,0x00,0x5d,0x01,0x00,0x00, +0x6d,0x01,0x00,0x00,0x3e,0x00,0x03,0x00,0x72,0x01,0x00,0x00, +0x71,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x74,0x01,0x00,0x00,0x58,0x01,0x00,0x00,0x0c,0x00,0x00,0x00, +0x41,0x00,0x08,0x00,0x67,0x01,0x00,0x00,0x76,0x01,0x00,0x00, +0x65,0x01,0x00,0x00,0x34,0x00,0x00,0x00,0x4f,0x01,0x00,0x00, +0x34,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0xc4,0x00,0x00,0x00,0x77,0x01,0x00,0x00,0x76,0x01,0x00,0x00, +0x73,0x00,0x04,0x00,0x01,0x01,0x00,0x00,0x78,0x01,0x00,0x00, +0x77,0x01,0x00,0x00,0x41,0x00,0x05,0x00,0x2d,0x01,0x00,0x00, +0x79,0x01,0x00,0x00,0x5d,0x01,0x00,0x00,0x74,0x01,0x00,0x00, +0x3e,0x00,0x03,0x00,0x79,0x01,0x00,0x00,0x78,0x01,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x7c,0x01,0x00,0x00, +0x58,0x01,0x00,0x00,0x7b,0x01,0x00,0x00,0x41,0x00,0x08,0x00, +0x67,0x01,0x00,0x00,0x7e,0x01,0x00,0x00,0x65,0x01,0x00,0x00, +0x34,0x00,0x00,0x00,0x4f,0x01,0x00,0x00,0x34,0x00,0x00,0x00, +0x7b,0x01,0x00,0x00,0x3d,0x00,0x04,0x00,0xc4,0x00,0x00,0x00, +0x7f,0x01,0x00,0x00,0x7e,0x01,0x00,0x00,0x73,0x00,0x04,0x00, +0x01,0x01,0x00,0x00,0x80,0x01,0x00,0x00,0x7f,0x01,0x00,0x00, +0x41,0x00,0x05,0x00,0x2d,0x01,0x00,0x00,0x81,0x01,0x00,0x00, +0x5d,0x01,0x00,0x00,0x7c,0x01,0x00,0x00,0x3e,0x00,0x03,0x00, +0x81,0x01,0x00,0x00,0x80,0x01,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x84,0x01,0x00,0x00,0x58,0x01,0x00,0x00, +0x83,0x01,0x00,0x00,0x41,0x00,0x08,0x00,0x67,0x01,0x00,0x00, +0x86,0x01,0x00,0x00,0x65,0x01,0x00,0x00,0x34,0x00,0x00,0x00, +0x4f,0x01,0x00,0x00,0xd0,0x00,0x00,0x00,0x3e,0x00,0x00,0x00, +0x3d,0x00,0x04,0x00,0xc4,0x00,0x00,0x00,0x87,0x01,0x00,0x00, +0x86,0x01,0x00,0x00,0x73,0x00,0x04,0x00,0x01,0x01,0x00,0x00, +0x88,0x01,0x00,0x00,0x87,0x01,0x00,0x00,0x41,0x00,0x05,0x00, +0x2d,0x01,0x00,0x00,0x89,0x01,0x00,0x00,0x5d,0x01,0x00,0x00, +0x84,0x01,0x00,0x00,0x3e,0x00,0x03,0x00,0x89,0x01,0x00,0x00, +0x88,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x8c,0x01,0x00,0x00,0x58,0x01,0x00,0x00,0x8b,0x01,0x00,0x00, +0x41,0x00,0x08,0x00,0x67,0x01,0x00,0x00,0x8e,0x01,0x00,0x00, +0x65,0x01,0x00,0x00,0x34,0x00,0x00,0x00,0x4f,0x01,0x00,0x00, +0xd0,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0xc4,0x00,0x00,0x00,0x8f,0x01,0x00,0x00,0x8e,0x01,0x00,0x00, +0x73,0x00,0x04,0x00,0x01,0x01,0x00,0x00,0x90,0x01,0x00,0x00, +0x8f,0x01,0x00,0x00,0x41,0x00,0x05,0x00,0x2d,0x01,0x00,0x00, +0x91,0x01,0x00,0x00,0x5d,0x01,0x00,0x00,0x8c,0x01,0x00,0x00, +0x3e,0x00,0x03,0x00,0x91,0x01,0x00,0x00,0x90,0x01,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x94,0x01,0x00,0x00, +0x58,0x01,0x00,0x00,0x93,0x01,0x00,0x00,0x41,0x00,0x08,0x00, +0x67,0x01,0x00,0x00,0x96,0x01,0x00,0x00,0x65,0x01,0x00,0x00, +0x34,0x00,0x00,0x00,0x4f,0x01,0x00,0x00,0xd0,0x00,0x00,0x00, +0x0c,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0xc4,0x00,0x00,0x00, +0x97,0x01,0x00,0x00,0x96,0x01,0x00,0x00,0x73,0x00,0x04,0x00, +0x01,0x01,0x00,0x00,0x98,0x01,0x00,0x00,0x97,0x01,0x00,0x00, +0x41,0x00,0x05,0x00,0x2d,0x01,0x00,0x00,0x99,0x01,0x00,0x00, +0x5d,0x01,0x00,0x00,0x94,0x01,0x00,0x00,0x3e,0x00,0x03,0x00, +0x99,0x01,0x00,0x00,0x98,0x01,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x9c,0x01,0x00,0x00,0x58,0x01,0x00,0x00, +0x9b,0x01,0x00,0x00,0x41,0x00,0x08,0x00,0x67,0x01,0x00,0x00, +0x9e,0x01,0x00,0x00,0x65,0x01,0x00,0x00,0x34,0x00,0x00,0x00, +0x4f,0x01,0x00,0x00,0xd0,0x00,0x00,0x00,0x7b,0x01,0x00,0x00, +0x3d,0x00,0x04,0x00,0xc4,0x00,0x00,0x00,0x9f,0x01,0x00,0x00, +0x9e,0x01,0x00,0x00,0x73,0x00,0x04,0x00,0x01,0x01,0x00,0x00, +0xa0,0x01,0x00,0x00,0x9f,0x01,0x00,0x00,0x41,0x00,0x05,0x00, +0x2d,0x01,0x00,0x00,0xa1,0x01,0x00,0x00,0x5d,0x01,0x00,0x00, +0x9c,0x01,0x00,0x00,0x3e,0x00,0x03,0x00,0xa1,0x01,0x00,0x00, +0xa0,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xa6,0x01,0x00,0x00,0xf4,0x02,0x00,0x00,0xa4,0x01,0x00,0x00, +0xf9,0x00,0x02,0x00,0x3d,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, +0x3f,0x01,0x00,0x00,0xe0,0x00,0x04,0x00,0x0c,0x00,0x00,0x00, +0x0c,0x00,0x00,0x00,0xa7,0x01,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xaa,0x01,0x00,0x00,0xf7,0x02,0x00,0x00, +0xa8,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xad,0x01,0x00,0x00,0xfb,0x02,0x00,0x00,0xab,0x01,0x00,0x00, +0xf9,0x00,0x02,0x00,0xaf,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, +0xaf,0x01,0x00,0x00,0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00, +0xfd,0x02,0x00,0x00,0x3e,0x00,0x00,0x00,0x3f,0x01,0x00,0x00, +0x59,0x02,0x00,0x00,0xb2,0x01,0x00,0x00,0xb0,0x00,0x05,0x00, +0xc2,0x00,0x00,0x00,0xb5,0x01,0x00,0x00,0xfd,0x02,0x00,0x00, +0x6c,0x00,0x00,0x00,0xf6,0x00,0x04,0x00,0xb1,0x01,0x00,0x00, +0xb2,0x01,0x00,0x00,0x00,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, +0xb5,0x01,0x00,0x00,0xb0,0x01,0x00,0x00,0xb1,0x01,0x00,0x00, +0xf8,0x00,0x02,0x00,0xb0,0x01,0x00,0x00,0xf9,0x00,0x02,0x00, +0xb7,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0xb7,0x01,0x00,0x00, +0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0x01,0x03,0x00,0x00, +0x3e,0x00,0x00,0x00,0xb0,0x01,0x00,0x00,0xe3,0x01,0x00,0x00, +0xba,0x01,0x00,0x00,0xb0,0x00,0x05,0x00,0xc2,0x00,0x00,0x00, +0xbd,0x01,0x00,0x00,0x01,0x03,0x00,0x00,0x60,0x00,0x00,0x00, +0xf6,0x00,0x04,0x00,0xb9,0x01,0x00,0x00,0xba,0x01,0x00,0x00, +0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0xbd,0x01,0x00,0x00, +0xb8,0x01,0x00,0x00,0xb9,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, +0xb8,0x01,0x00,0x00,0xf9,0x00,0x02,0x00,0xbf,0x01,0x00,0x00, +0xf8,0x00,0x02,0x00,0xbf,0x01,0x00,0x00,0xf5,0x00,0x07,0x00, +0x06,0x00,0x00,0x00,0x13,0x03,0x00,0x00,0x3e,0x00,0x00,0x00, +0xb8,0x01,0x00,0x00,0xe1,0x01,0x00,0x00,0xc0,0x01,0x00,0x00, +0xb0,0x00,0x05,0x00,0xc2,0x00,0x00,0x00,0xc5,0x01,0x00,0x00, +0x13,0x03,0x00,0x00,0x62,0x00,0x00,0x00,0xf6,0x00,0x04,0x00, +0xc1,0x01,0x00,0x00,0xc0,0x01,0x00,0x00,0x01,0x00,0x00,0x00, +0xfa,0x00,0x04,0x00,0xc5,0x01,0x00,0x00,0xc0,0x01,0x00,0x00, +0xc1,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0xc0,0x01,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xcb,0x01,0x00,0x00, +0x01,0x03,0x00,0x00,0x62,0x00,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xcd,0x01,0x00,0x00,0xcb,0x01,0x00,0x00, +0x13,0x03,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xcf,0x01,0x00,0x00,0x55,0x00,0x00,0x00,0x53,0x00,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xd1,0x01,0x00,0x00, +0x01,0x03,0x00,0x00,0x61,0x00,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xd2,0x01,0x00,0x00,0xcf,0x01,0x00,0x00, +0xd1,0x01,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xd4,0x01,0x00,0x00,0x64,0x00,0x00,0x00,0x62,0x00,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xd5,0x01,0x00,0x00, +0xd2,0x01,0x00,0x00,0xd4,0x01,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xd7,0x01,0x00,0x00,0xd5,0x01,0x00,0x00, +0x13,0x03,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xd9,0x01,0x00,0x00,0xd7,0x01,0x00,0x00,0xd8,0x01,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xdb,0x01,0x00,0x00, +0xd9,0x01,0x00,0x00,0xfd,0x02,0x00,0x00,0x41,0x00,0x05,0x00, +0x2d,0x01,0x00,0x00,0xdc,0x01,0x00,0x00,0x28,0x01,0x00,0x00, +0xdb,0x01,0x00,0x00,0x3d,0x00,0x04,0x00,0x01,0x01,0x00,0x00, +0xdd,0x01,0x00,0x00,0xdc,0x01,0x00,0x00,0x41,0x00,0x05,0x00, +0xde,0x01,0x00,0x00,0xdf,0x01,0x00,0x00,0xc9,0x01,0x00,0x00, +0xcd,0x01,0x00,0x00,0x3e,0x00,0x03,0x00,0xdf,0x01,0x00,0x00, +0xdd,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xe1,0x01,0x00,0x00,0x13,0x03,0x00,0x00,0xd0,0x00,0x00,0x00, +0xf9,0x00,0x02,0x00,0xbf,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, +0xc1,0x01,0x00,0x00,0xf9,0x00,0x02,0x00,0xba,0x01,0x00,0x00, +0xf8,0x00,0x02,0x00,0xba,0x01,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xe3,0x01,0x00,0x00,0x01,0x03,0x00,0x00, +0xd0,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0xb7,0x01,0x00,0x00, +0xf8,0x00,0x02,0x00,0xb9,0x01,0x00,0x00,0xf9,0x00,0x02,0x00, +0xe5,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0xe5,0x01,0x00,0x00, +0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0x02,0x03,0x00,0x00, +0x3e,0x00,0x00,0x00,0xb9,0x01,0x00,0x00,0x11,0x02,0x00,0x00, +0xe8,0x01,0x00,0x00,0xb0,0x00,0x05,0x00,0xc2,0x00,0x00,0x00, +0xeb,0x01,0x00,0x00,0x02,0x03,0x00,0x00,0xbf,0x00,0x00,0x00, +0xf6,0x00,0x04,0x00,0xe7,0x01,0x00,0x00,0xe8,0x01,0x00,0x00, +0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0xeb,0x01,0x00,0x00, +0xe6,0x01,0x00,0x00,0xe7,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, +0xe6,0x01,0x00,0x00,0xf9,0x00,0x02,0x00,0xed,0x01,0x00,0x00, +0xf8,0x00,0x02,0x00,0xed,0x01,0x00,0x00,0xf5,0x00,0x07,0x00, +0x06,0x00,0x00,0x00,0x10,0x03,0x00,0x00,0x3e,0x00,0x00,0x00, +0xe6,0x01,0x00,0x00,0x0f,0x02,0x00,0x00,0xee,0x01,0x00,0x00, +0xb0,0x00,0x05,0x00,0xc2,0x00,0x00,0x00,0xf3,0x01,0x00,0x00, +0x10,0x03,0x00,0x00,0xbc,0x00,0x00,0x00,0xf6,0x00,0x04,0x00, +0xef,0x01,0x00,0x00,0xee,0x01,0x00,0x00,0x01,0x00,0x00,0x00, +0xfa,0x00,0x04,0x00,0xf3,0x01,0x00,0x00,0xee,0x01,0x00,0x00, +0xef,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0xee,0x01,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xf9,0x01,0x00,0x00, +0x02,0x03,0x00,0x00,0xbc,0x00,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xfb,0x01,0x00,0x00,0xf9,0x01,0x00,0x00, +0x10,0x03,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xfd,0x01,0x00,0x00,0x59,0x00,0x00,0x00,0xb9,0x00,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x00,0x02,0x00,0x00, +0x02,0x03,0x00,0x00,0xff,0x01,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x01,0x02,0x00,0x00,0xfd,0x01,0x00,0x00, +0x00,0x02,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x03,0x02,0x00,0x00,0x68,0x00,0x00,0x00,0xbc,0x00,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x04,0x02,0x00,0x00, +0x01,0x02,0x00,0x00,0x03,0x02,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x06,0x02,0x00,0x00,0x04,0x02,0x00,0x00, +0x10,0x03,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x08,0x02,0x00,0x00,0x06,0x02,0x00,0x00,0x07,0x02,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x0a,0x02,0x00,0x00, +0x08,0x02,0x00,0x00,0xfd,0x02,0x00,0x00,0x41,0x00,0x05,0x00, +0x2d,0x01,0x00,0x00,0x0b,0x02,0x00,0x00,0x5d,0x01,0x00,0x00, +0x0a,0x02,0x00,0x00,0x3d,0x00,0x04,0x00,0x01,0x01,0x00,0x00, +0x0c,0x02,0x00,0x00,0x0b,0x02,0x00,0x00,0x41,0x00,0x05,0x00, +0xde,0x01,0x00,0x00,0x0d,0x02,0x00,0x00,0xf7,0x01,0x00,0x00, +0xfb,0x01,0x00,0x00,0x3e,0x00,0x03,0x00,0x0d,0x02,0x00,0x00, +0x0c,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x0f,0x02,0x00,0x00,0x10,0x03,0x00,0x00,0xd0,0x00,0x00,0x00, +0xf9,0x00,0x02,0x00,0xed,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, +0xef,0x01,0x00,0x00,0xf9,0x00,0x02,0x00,0xe8,0x01,0x00,0x00, +0xf8,0x00,0x02,0x00,0xe8,0x01,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x11,0x02,0x00,0x00,0x02,0x03,0x00,0x00, +0xd0,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0xe5,0x01,0x00,0x00, +0xf8,0x00,0x02,0x00,0xe7,0x01,0x00,0x00,0xf9,0x00,0x02,0x00, +0x13,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x13,0x02,0x00,0x00, +0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0x03,0x03,0x00,0x00, +0x3e,0x00,0x00,0x00,0xe7,0x01,0x00,0x00,0x57,0x02,0x00,0x00, +0x16,0x02,0x00,0x00,0xb0,0x00,0x05,0x00,0xc2,0x00,0x00,0x00, +0x19,0x02,0x00,0x00,0x03,0x03,0x00,0x00,0xbf,0x00,0x00,0x00, +0xf6,0x00,0x04,0x00,0x15,0x02,0x00,0x00,0x16,0x02,0x00,0x00, +0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0x19,0x02,0x00,0x00, +0x14,0x02,0x00,0x00,0x15,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, +0x14,0x02,0x00,0x00,0xf9,0x00,0x02,0x00,0x1b,0x02,0x00,0x00, +0xf8,0x00,0x02,0x00,0x1b,0x02,0x00,0x00,0xf5,0x00,0x07,0x00, +0x06,0x00,0x00,0x00,0x07,0x03,0x00,0x00,0x3e,0x00,0x00,0x00, +0x14,0x02,0x00,0x00,0x55,0x02,0x00,0x00,0x1e,0x02,0x00,0x00, +0xb0,0x00,0x05,0x00,0xc2,0x00,0x00,0x00,0x21,0x02,0x00,0x00, +0x07,0x03,0x00,0x00,0x60,0x00,0x00,0x00,0xf6,0x00,0x04,0x00, +0x1d,0x02,0x00,0x00,0x1e,0x02,0x00,0x00,0x01,0x00,0x00,0x00, +0xfa,0x00,0x04,0x00,0x21,0x02,0x00,0x00,0x1c,0x02,0x00,0x00, +0x1d,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x1c,0x02,0x00,0x00, +0xf9,0x00,0x02,0x00,0x23,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, +0x23,0x02,0x00,0x00,0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00, +0x09,0x03,0x00,0x00,0x3e,0x00,0x00,0x00,0x1c,0x02,0x00,0x00, +0x53,0x02,0x00,0x00,0x26,0x02,0x00,0x00,0xb0,0x00,0x05,0x00, +0xc2,0x00,0x00,0x00,0x29,0x02,0x00,0x00,0x09,0x03,0x00,0x00, +0xbc,0x00,0x00,0x00,0xf6,0x00,0x04,0x00,0x25,0x02,0x00,0x00, +0x26,0x02,0x00,0x00,0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, +0x29,0x02,0x00,0x00,0x24,0x02,0x00,0x00,0x25,0x02,0x00,0x00, +0xf8,0x00,0x02,0x00,0x24,0x02,0x00,0x00,0xf9,0x00,0x02,0x00, +0x2b,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x2b,0x02,0x00,0x00, +0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0x0b,0x03,0x00,0x00, +0x3e,0x00,0x00,0x00,0x24,0x02,0x00,0x00,0x51,0x02,0x00,0x00, +0x2c,0x02,0x00,0x00,0xb0,0x00,0x05,0x00,0xc2,0x00,0x00,0x00, +0x31,0x02,0x00,0x00,0x0b,0x03,0x00,0x00,0x62,0x00,0x00,0x00, +0xf6,0x00,0x04,0x00,0x2d,0x02,0x00,0x00,0x2c,0x02,0x00,0x00, +0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0x31,0x02,0x00,0x00, +0x2c,0x02,0x00,0x00,0x2d,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, +0x2c,0x02,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x33,0x02,0x00,0x00,0x03,0x03,0x00,0x00,0xbc,0x00,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x35,0x02,0x00,0x00, +0x33,0x02,0x00,0x00,0x09,0x03,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x37,0x02,0x00,0x00,0x35,0x02,0x00,0x00, +0x36,0x02,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x39,0x02,0x00,0x00,0x07,0x03,0x00,0x00,0x62,0x00,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x3a,0x02,0x00,0x00, +0x37,0x02,0x00,0x00,0x39,0x02,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x3c,0x02,0x00,0x00,0x3a,0x02,0x00,0x00, +0x0b,0x03,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x40,0x02,0x00,0x00,0x39,0x02,0x00,0x00,0x0b,0x03,0x00,0x00, +0x41,0x00,0x05,0x00,0xde,0x01,0x00,0x00,0x41,0x02,0x00,0x00, +0xc9,0x01,0x00,0x00,0x40,0x02,0x00,0x00,0x3d,0x00,0x04,0x00, +0x01,0x01,0x00,0x00,0x42,0x02,0x00,0x00,0x41,0x02,0x00,0x00, +0x73,0x00,0x04,0x00,0xc4,0x00,0x00,0x00,0x43,0x02,0x00,0x00, +0x42,0x02,0x00,0x00,0x41,0x00,0x05,0x00,0xde,0x01,0x00,0x00, +0x48,0x02,0x00,0x00,0xf7,0x01,0x00,0x00,0x35,0x02,0x00,0x00, +0x3d,0x00,0x04,0x00,0x01,0x01,0x00,0x00,0x49,0x02,0x00,0x00, +0x48,0x02,0x00,0x00,0x73,0x00,0x04,0x00,0xc4,0x00,0x00,0x00, +0x4a,0x02,0x00,0x00,0x49,0x02,0x00,0x00,0x41,0x00,0x05,0x00, +0xcd,0x00,0x00,0x00,0x4c,0x02,0x00,0x00,0xca,0x00,0x00,0x00, +0x3c,0x02,0x00,0x00,0x3d,0x00,0x04,0x00,0xc4,0x00,0x00,0x00, +0x4d,0x02,0x00,0x00,0x4c,0x02,0x00,0x00,0x0c,0x00,0x08,0x00, +0xc4,0x00,0x00,0x00,0x4e,0x02,0x00,0x00,0x01,0x00,0x00,0x00, +0x32,0x00,0x00,0x00,0x43,0x02,0x00,0x00,0x4a,0x02,0x00,0x00, +0x4d,0x02,0x00,0x00,0x3e,0x00,0x03,0x00,0x4c,0x02,0x00,0x00, +0x4e,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x51,0x02,0x00,0x00,0x0b,0x03,0x00,0x00,0xd0,0x00,0x00,0x00, +0xf9,0x00,0x02,0x00,0x2b,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, +0x2d,0x02,0x00,0x00,0xf9,0x00,0x02,0x00,0x26,0x02,0x00,0x00, +0xf8,0x00,0x02,0x00,0x26,0x02,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x53,0x02,0x00,0x00,0x09,0x03,0x00,0x00, +0xd0,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0x23,0x02,0x00,0x00, +0xf8,0x00,0x02,0x00,0x25,0x02,0x00,0x00,0xf9,0x00,0x02,0x00, +0x1e,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x1e,0x02,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x55,0x02,0x00,0x00, +0x07,0x03,0x00,0x00,0xd0,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, +0x1b,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x1d,0x02,0x00,0x00, +0xf9,0x00,0x02,0x00,0x16,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, +0x16,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x57,0x02,0x00,0x00,0x03,0x03,0x00,0x00,0xd0,0x00,0x00,0x00, +0xf9,0x00,0x02,0x00,0x13,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, +0x15,0x02,0x00,0x00,0xf9,0x00,0x02,0x00,0xb2,0x01,0x00,0x00, +0xf8,0x00,0x02,0x00,0xb2,0x01,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x59,0x02,0x00,0x00,0xfd,0x02,0x00,0x00, +0xd0,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0xaf,0x01,0x00,0x00, +0xf8,0x00,0x02,0x00,0xb1,0x01,0x00,0x00,0xe0,0x00,0x04,0x00, +0x0c,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0xa7,0x01,0x00,0x00, +0xf9,0x00,0x02,0x00,0xd7,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, +0xd7,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x5b,0x02,0x00,0x00,0xe3,0x02,0x00,0x00,0x6c,0x00,0x00,0x00, +0xf9,0x00,0x02,0x00,0xd4,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, +0xd6,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x60,0x02,0x00,0x00,0x55,0x00,0x00,0x00,0x53,0x00,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x61,0x02,0x00,0x00, +0x96,0x00,0x00,0x00,0x60,0x02,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x66,0x02,0x00,0x00,0x59,0x00,0x00,0x00, +0xb9,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x67,0x02,0x00,0x00,0xa8,0x00,0x00,0x00,0x66,0x02,0x00,0x00, +0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00,0x6c,0x02,0x00,0x00, +0x12,0x00,0x00,0x00,0x6b,0x02,0x00,0x00,0x3d,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x6d,0x02,0x00,0x00,0x6c,0x02,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x6e,0x02,0x00,0x00, +0x0f,0x00,0x00,0x00,0x6d,0x02,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x72,0x02,0x00,0x00,0x47,0x00,0x00,0x00, +0x6d,0x02,0x00,0x00,0x41,0x00,0x05,0x00,0x0d,0x00,0x00,0x00, +0x74,0x02,0x00,0x00,0x73,0x02,0x00,0x00,0x0c,0x00,0x00,0x00, +0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x75,0x02,0x00,0x00, +0x74,0x02,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x76,0x02,0x00,0x00,0x72,0x02,0x00,0x00,0x75,0x02,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x77,0x02,0x00,0x00, +0x6e,0x02,0x00,0x00,0x76,0x02,0x00,0x00,0xf9,0x00,0x02,0x00, +0x79,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x79,0x02,0x00,0x00, +0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0xe4,0x02,0x00,0x00, +0x3e,0x00,0x00,0x00,0xd6,0x00,0x00,0x00,0xdf,0x02,0x00,0x00, +0x7c,0x02,0x00,0x00,0xb0,0x00,0x05,0x00,0xc2,0x00,0x00,0x00, +0x7f,0x02,0x00,0x00,0xe4,0x02,0x00,0x00,0xbf,0x00,0x00,0x00, +0xf6,0x00,0x04,0x00,0x7b,0x02,0x00,0x00,0x7c,0x02,0x00,0x00, +0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0x7f,0x02,0x00,0x00, +0x7a,0x02,0x00,0x00,0x7b,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, +0x7a,0x02,0x00,0x00,0xf9,0x00,0x02,0x00,0x81,0x02,0x00,0x00, +0xf8,0x00,0x02,0x00,0x81,0x02,0x00,0x00,0xf5,0x00,0x07,0x00, +0x06,0x00,0x00,0x00,0xe5,0x02,0x00,0x00,0x3e,0x00,0x00,0x00, +0x7a,0x02,0x00,0x00,0xdd,0x02,0x00,0x00,0x84,0x02,0x00,0x00, +0xb0,0x00,0x05,0x00,0xc2,0x00,0x00,0x00,0x87,0x02,0x00,0x00, +0xe5,0x02,0x00,0x00,0x60,0x00,0x00,0x00,0xf6,0x00,0x04,0x00, +0x83,0x02,0x00,0x00,0x84,0x02,0x00,0x00,0x01,0x00,0x00,0x00, +0xfa,0x00,0x04,0x00,0x87,0x02,0x00,0x00,0x82,0x02,0x00,0x00, +0x83,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x82,0x02,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x8b,0x02,0x00,0x00, +0xe5,0x02,0x00,0x00,0x61,0x00,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x8c,0x02,0x00,0x00,0x61,0x02,0x00,0x00, +0x8b,0x02,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x8e,0x02,0x00,0x00,0x64,0x00,0x00,0x00,0x62,0x00,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x8f,0x02,0x00,0x00, +0x8c,0x02,0x00,0x00,0x8e,0x02,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x93,0x02,0x00,0x00,0xe4,0x02,0x00,0x00, +0xff,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x94,0x02,0x00,0x00,0x67,0x02,0x00,0x00,0x93,0x02,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x96,0x02,0x00,0x00, +0x68,0x00,0x00,0x00,0xbc,0x00,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x97,0x02,0x00,0x00,0x94,0x02,0x00,0x00, +0x96,0x02,0x00,0x00,0xf9,0x00,0x02,0x00,0x99,0x02,0x00,0x00, +0xf8,0x00,0x02,0x00,0x99,0x02,0x00,0x00,0xf5,0x00,0x07,0x00, +0x06,0x00,0x00,0x00,0xe7,0x02,0x00,0x00,0x3e,0x00,0x00,0x00, +0x82,0x02,0x00,0x00,0xdb,0x02,0x00,0x00,0x9c,0x02,0x00,0x00, +0xb0,0x00,0x05,0x00,0xc2,0x00,0x00,0x00,0x9f,0x02,0x00,0x00, +0xe7,0x02,0x00,0x00,0xbc,0x00,0x00,0x00,0xf6,0x00,0x04,0x00, +0x9b,0x02,0x00,0x00,0x9c,0x02,0x00,0x00,0x01,0x00,0x00,0x00, +0xfa,0x00,0x04,0x00,0x9f,0x02,0x00,0x00,0x9a,0x02,0x00,0x00, +0x9b,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x9a,0x02,0x00,0x00, +0xf9,0x00,0x02,0x00,0xa1,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, +0xa1,0x02,0x00,0x00,0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00, +0xe9,0x02,0x00,0x00,0x3e,0x00,0x00,0x00,0x9a,0x02,0x00,0x00, +0xd9,0x02,0x00,0x00,0xa4,0x02,0x00,0x00,0xb0,0x00,0x05,0x00, +0xc2,0x00,0x00,0x00,0xa7,0x02,0x00,0x00,0xe9,0x02,0x00,0x00, +0x62,0x00,0x00,0x00,0xf6,0x00,0x04,0x00,0xa3,0x02,0x00,0x00, +0xa4,0x02,0x00,0x00,0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, +0xa7,0x02,0x00,0x00,0xa2,0x02,0x00,0x00,0xa3,0x02,0x00,0x00, +0xf8,0x00,0x02,0x00,0xa2,0x02,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xaa,0x02,0x00,0x00,0x8f,0x02,0x00,0x00, +0xe9,0x02,0x00,0x00,0xb0,0x00,0x05,0x00,0xc2,0x00,0x00,0x00, +0xad,0x02,0x00,0x00,0xaa,0x02,0x00,0x00,0x36,0x00,0x00,0x00, +0xf7,0x00,0x03,0x00,0xaf,0x02,0x00,0x00,0x00,0x00,0x00,0x00, +0xfa,0x00,0x04,0x00,0xad,0x02,0x00,0x00,0xae,0x02,0x00,0x00, +0xaf,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0xae,0x02,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xb2,0x02,0x00,0x00, +0x97,0x02,0x00,0x00,0xe7,0x02,0x00,0x00,0x41,0x00,0x05,0x00, +0x15,0x00,0x00,0x00,0xb3,0x02,0x00,0x00,0x12,0x00,0x00,0x00, +0xd0,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0xb4,0x02,0x00,0x00,0xb3,0x02,0x00,0x00,0xb0,0x00,0x05,0x00, +0xc2,0x00,0x00,0x00,0xb5,0x02,0x00,0x00,0xb2,0x02,0x00,0x00, +0xb4,0x02,0x00,0x00,0xf9,0x00,0x02,0x00,0xaf,0x02,0x00,0x00, +0xf8,0x00,0x02,0x00,0xaf,0x02,0x00,0x00,0xf5,0x00,0x07,0x00, +0xc2,0x00,0x00,0x00,0xb6,0x02,0x00,0x00,0xad,0x02,0x00,0x00, +0xa2,0x02,0x00,0x00,0xb5,0x02,0x00,0x00,0xae,0x02,0x00,0x00, +0xf7,0x00,0x03,0x00,0xb8,0x02,0x00,0x00,0x00,0x00,0x00,0x00, +0xfa,0x00,0x04,0x00,0xb6,0x02,0x00,0x00,0xb7,0x02,0x00,0x00, +0xb8,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0xb7,0x02,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xc0,0x02,0x00,0x00, +0x97,0x02,0x00,0x00,0xe7,0x02,0x00,0x00,0x41,0x00,0x05,0x00, +0x15,0x00,0x00,0x00,0xc2,0x02,0x00,0x00,0x12,0x00,0x00,0x00, +0xc1,0x02,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0xc3,0x02,0x00,0x00,0xc2,0x02,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xc4,0x02,0x00,0x00,0xc0,0x02,0x00,0x00, +0xc3,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xc5,0x02,0x00,0x00,0x77,0x02,0x00,0x00,0xc4,0x02,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xc7,0x02,0x00,0x00, +0xc5,0x02,0x00,0x00,0x8f,0x02,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xc9,0x02,0x00,0x00,0xc7,0x02,0x00,0x00, +0xe9,0x02,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xcb,0x02,0x00,0x00,0xe4,0x02,0x00,0x00,0xbc,0x00,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xcd,0x02,0x00,0x00, +0xcb,0x02,0x00,0x00,0xe7,0x02,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xcf,0x02,0x00,0x00,0xcd,0x02,0x00,0x00, +0xce,0x02,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xd1,0x02,0x00,0x00,0xe5,0x02,0x00,0x00,0x62,0x00,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xd2,0x02,0x00,0x00, +0xcf,0x02,0x00,0x00,0xd1,0x02,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xd4,0x02,0x00,0x00,0xd2,0x02,0x00,0x00, +0xe9,0x02,0x00,0x00,0x41,0x00,0x05,0x00,0xcd,0x00,0x00,0x00, +0xd5,0x02,0x00,0x00,0xca,0x00,0x00,0x00,0xd4,0x02,0x00,0x00, +0x3d,0x00,0x04,0x00,0xc4,0x00,0x00,0x00,0xd6,0x02,0x00,0x00, +0xd5,0x02,0x00,0x00,0x41,0x00,0x06,0x00,0x67,0x01,0x00,0x00, +0xd7,0x02,0x00,0x00,0xbc,0x02,0x00,0x00,0x34,0x00,0x00,0x00, +0xc9,0x02,0x00,0x00,0x3e,0x00,0x03,0x00,0xd7,0x02,0x00,0x00, +0xd6,0x02,0x00,0x00,0xf9,0x00,0x02,0x00,0xb8,0x02,0x00,0x00, +0xf8,0x00,0x02,0x00,0xb8,0x02,0x00,0x00,0xf9,0x00,0x02,0x00, +0xa4,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0xa4,0x02,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xd9,0x02,0x00,0x00, +0xe9,0x02,0x00,0x00,0xd0,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, +0xa1,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0xa3,0x02,0x00,0x00, +0xf9,0x00,0x02,0x00,0x9c,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, +0x9c,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xdb,0x02,0x00,0x00,0xe7,0x02,0x00,0x00,0xd0,0x00,0x00,0x00, +0xf9,0x00,0x02,0x00,0x99,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, +0x9b,0x02,0x00,0x00,0xf9,0x00,0x02,0x00,0x84,0x02,0x00,0x00, +0xf8,0x00,0x02,0x00,0x84,0x02,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xdd,0x02,0x00,0x00,0xe5,0x02,0x00,0x00, +0xd0,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0x81,0x02,0x00,0x00, +0xf8,0x00,0x02,0x00,0x83,0x02,0x00,0x00,0xf9,0x00,0x02,0x00, +0x7c,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x7c,0x02,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xdf,0x02,0x00,0x00, +0xe4,0x02,0x00,0x00,0xd0,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, +0x79,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x7b,0x02,0x00,0x00, +0xfd,0x00,0x01,0x00,0x38,0x00,0x01,0x00, +}; +const uint64_t matmul_q4_0_f32_aligned_len = 11156; + +unsigned char matmul_q4_0_f32_aligned_fp32_data[] = { +0x03,0x02,0x23,0x07,0x00,0x05,0x01,0x00,0x0b,0x00,0x0d,0x00, +0x0d,0x03,0x00,0x00,0x00,0x00,0x00,0x00,0x11,0x00,0x02,0x00, +0x01,0x00,0x00,0x00,0x11,0x00,0x02,0x00,0x51,0x11,0x00,0x00, +0x11,0x00,0x02,0x00,0x60,0x11,0x00,0x00,0x0b,0x00,0x06,0x00, +0x01,0x00,0x00,0x00,0x47,0x4c,0x53,0x4c,0x2e,0x73,0x74,0x64, +0x2e,0x34,0x35,0x30,0x00,0x00,0x00,0x00,0x0e,0x00,0x03,0x00, +0x00,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x0f,0x00,0x0f,0x00, +0x05,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x6d,0x61,0x69,0x6e, +0x00,0x00,0x00,0x00,0x0b,0x00,0x00,0x00,0x12,0x00,0x00,0x00, +0x3d,0x00,0x00,0x00,0x4c,0x00,0x00,0x00,0x08,0x01,0x00,0x00, +0x28,0x01,0x00,0x00,0x5b,0x01,0x00,0x00,0x62,0x01,0x00,0x00, +0x49,0x02,0x00,0x00,0x92,0x02,0x00,0x00,0x10,0x00,0x06,0x00, +0x04,0x00,0x00,0x00,0x11,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x47,0x00,0x04,0x00, +0x0b,0x00,0x00,0x00,0x0b,0x00,0x00,0x00,0x1c,0x00,0x00,0x00, +0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x48,0x00,0x05,0x00, +0x10,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x23,0x00,0x00,0x00, +0x04,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00, +0x02,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x08,0x00,0x00,0x00, +0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00,0x03,0x00,0x00,0x00, +0x23,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x48,0x00,0x05,0x00, +0x10,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x23,0x00,0x00,0x00, +0x10,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00, +0x05,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x14,0x00,0x00,0x00, +0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00,0x06,0x00,0x00,0x00, +0x23,0x00,0x00,0x00,0x18,0x00,0x00,0x00,0x48,0x00,0x05,0x00, +0x10,0x00,0x00,0x00,0x07,0x00,0x00,0x00,0x23,0x00,0x00,0x00, +0x1c,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00, +0x08,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x20,0x00,0x00,0x00, +0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00,0x09,0x00,0x00,0x00, +0x23,0x00,0x00,0x00,0x24,0x00,0x00,0x00,0x48,0x00,0x05,0x00, +0x10,0x00,0x00,0x00,0x0a,0x00,0x00,0x00,0x23,0x00,0x00,0x00, +0x28,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00, +0x0b,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x2c,0x00,0x00,0x00, +0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, +0x23,0x00,0x00,0x00,0x30,0x00,0x00,0x00,0x48,0x00,0x05,0x00, +0x10,0x00,0x00,0x00,0x0d,0x00,0x00,0x00,0x23,0x00,0x00,0x00, +0x34,0x00,0x00,0x00,0x47,0x00,0x03,0x00,0x10,0x00,0x00,0x00, +0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x37,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x47,0x00,0x04,0x00, +0x3d,0x00,0x00,0x00,0x0b,0x00,0x00,0x00,0x1a,0x00,0x00,0x00, +0x47,0x00,0x04,0x00,0x4c,0x00,0x00,0x00,0x0b,0x00,0x00,0x00, +0x1b,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x4f,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0x09,0x00,0x00,0x00,0x47,0x00,0x04,0x00, +0x53,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x04,0x00,0x00,0x00, +0x47,0x00,0x04,0x00,0x60,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0x06,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x62,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0x07,0x00,0x00,0x00,0x47,0x00,0x04,0x00, +0x6c,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x03,0x00,0x00,0x00, +0x47,0x00,0x04,0x00,0xa7,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0xb9,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0x05,0x00,0x00,0x00,0x47,0x00,0x04,0x00, +0xbc,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x08,0x00,0x00,0x00, +0x47,0x00,0x04,0x00,0x03,0x01,0x00,0x00,0x06,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x04,0x01,0x00,0x00, +0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0x48,0x00,0x05,0x00,0x04,0x01,0x00,0x00,0x01,0x00,0x00,0x00, +0x23,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00, +0x05,0x01,0x00,0x00,0x06,0x00,0x00,0x00,0x12,0x00,0x00,0x00, +0x48,0x00,0x04,0x00,0x06,0x01,0x00,0x00,0x00,0x00,0x00,0x00, +0x18,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x06,0x01,0x00,0x00, +0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0x47,0x00,0x03,0x00,0x06,0x01,0x00,0x00,0x02,0x00,0x00,0x00, +0x47,0x00,0x04,0x00,0x08,0x01,0x00,0x00,0x22,0x00,0x00,0x00, +0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x08,0x01,0x00,0x00, +0x21,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00, +0x33,0x01,0x00,0x00,0x01,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0x47,0x00,0x04,0x00,0x34,0x01,0x00,0x00,0x0b,0x00,0x00,0x00, +0x19,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x5f,0x01,0x00,0x00, +0x06,0x00,0x00,0x00,0x10,0x00,0x00,0x00,0x48,0x00,0x04,0x00, +0x60,0x01,0x00,0x00,0x00,0x00,0x00,0x00,0x18,0x00,0x00,0x00, +0x48,0x00,0x05,0x00,0x60,0x01,0x00,0x00,0x00,0x00,0x00,0x00, +0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x03,0x00, +0x60,0x01,0x00,0x00,0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00, +0x62,0x01,0x00,0x00,0x22,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0x47,0x00,0x04,0x00,0x62,0x01,0x00,0x00,0x21,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x49,0x02,0x00,0x00, +0x0b,0x00,0x00,0x00,0x18,0x00,0x00,0x00,0x47,0x00,0x04,0x00, +0x8f,0x02,0x00,0x00,0x06,0x00,0x00,0x00,0x04,0x00,0x00,0x00, +0x48,0x00,0x04,0x00,0x90,0x02,0x00,0x00,0x00,0x00,0x00,0x00, +0x19,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x90,0x02,0x00,0x00, +0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0x47,0x00,0x03,0x00,0x90,0x02,0x00,0x00,0x02,0x00,0x00,0x00, +0x47,0x00,0x04,0x00,0x92,0x02,0x00,0x00,0x22,0x00,0x00,0x00, +0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x92,0x02,0x00,0x00, +0x21,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x13,0x00,0x02,0x00, +0x02,0x00,0x00,0x00,0x21,0x00,0x03,0x00,0x03,0x00,0x00,0x00, +0x02,0x00,0x00,0x00,0x15,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x20,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x17,0x00,0x04,0x00, +0x09,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x03,0x00,0x00,0x00, +0x20,0x00,0x04,0x00,0x0a,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0x09,0x00,0x00,0x00,0x3b,0x00,0x04,0x00,0x0a,0x00,0x00,0x00, +0x0b,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x02,0x00,0x00,0x00, +0x20,0x00,0x04,0x00,0x0d,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0x06,0x00,0x00,0x00,0x1e,0x00,0x10,0x00,0x10,0x00,0x00,0x00, +0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00, +0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00, +0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00, +0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00, +0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x20,0x00,0x04,0x00, +0x11,0x00,0x00,0x00,0x09,0x00,0x00,0x00,0x10,0x00,0x00,0x00, +0x3b,0x00,0x04,0x00,0x11,0x00,0x00,0x00,0x12,0x00,0x00,0x00, +0x09,0x00,0x00,0x00,0x15,0x00,0x04,0x00,0x13,0x00,0x00,0x00, +0x20,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x13,0x00,0x00,0x00,0x14,0x00,0x00,0x00,0x08,0x00,0x00,0x00, +0x20,0x00,0x04,0x00,0x15,0x00,0x00,0x00,0x09,0x00,0x00,0x00, +0x06,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00, +0x21,0x00,0x00,0x00,0x0a,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x13,0x00,0x00,0x00,0x27,0x00,0x00,0x00,0x09,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00,0x2d,0x00,0x00,0x00, +0x07,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00, +0x34,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x32,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x37,0x00,0x00,0x00,0x40,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x39,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0x3b,0x00,0x04,0x00,0x0a,0x00,0x00,0x00, +0x3d,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x3e,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0x3b,0x00,0x04,0x00,0x0a,0x00,0x00,0x00,0x4c,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0x32,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x4f,0x00,0x00,0x00,0x20,0x00,0x00,0x00,0x32,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x53,0x00,0x00,0x00,0x20,0x00,0x00,0x00, +0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x54,0x00,0x00,0x00, +0x86,0x00,0x00,0x00,0x37,0x00,0x00,0x00,0x53,0x00,0x00,0x00, +0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x58,0x00,0x00,0x00, +0x86,0x00,0x00,0x00,0x37,0x00,0x00,0x00,0x53,0x00,0x00,0x00, +0x32,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x60,0x00,0x00,0x00, +0x02,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, +0x61,0x00,0x00,0x00,0x86,0x00,0x00,0x00,0x53,0x00,0x00,0x00, +0x60,0x00,0x00,0x00,0x32,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x62,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x34,0x00,0x06,0x00, +0x06,0x00,0x00,0x00,0x63,0x00,0x00,0x00,0x86,0x00,0x00,0x00, +0x61,0x00,0x00,0x00,0x62,0x00,0x00,0x00,0x34,0x00,0x06,0x00, +0x06,0x00,0x00,0x00,0x67,0x00,0x00,0x00,0x86,0x00,0x00,0x00, +0x61,0x00,0x00,0x00,0x62,0x00,0x00,0x00,0x32,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x6c,0x00,0x00,0x00,0x10,0x00,0x00,0x00, +0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x6d,0x00,0x00,0x00, +0x86,0x00,0x00,0x00,0x6c,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, +0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x72,0x00,0x00,0x00, +0x86,0x00,0x00,0x00,0x6c,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x77,0x00,0x00,0x00, +0x04,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, +0x78,0x00,0x00,0x00,0x86,0x00,0x00,0x00,0x6c,0x00,0x00,0x00, +0x77,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, +0x7d,0x00,0x00,0x00,0x86,0x00,0x00,0x00,0x6c,0x00,0x00,0x00, +0x77,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00, +0x81,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x13,0x00,0x00,0x00,0x86,0x00,0x00,0x00,0x02,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00,0x91,0x00,0x00,0x00, +0x0b,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00, +0x97,0x00,0x00,0x00,0x03,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x13,0x00,0x00,0x00,0xa2,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, +0x32,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0xa7,0x00,0x00,0x00, +0x40,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00, +0xa9,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x34,0x00,0x06,0x00, +0x06,0x00,0x00,0x00,0xb8,0x00,0x00,0x00,0x84,0x00,0x00,0x00, +0x60,0x00,0x00,0x00,0x62,0x00,0x00,0x00,0x32,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0xb9,0x00,0x00,0x00,0x20,0x00,0x00,0x00, +0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0xba,0x00,0x00,0x00, +0x84,0x00,0x00,0x00,0x53,0x00,0x00,0x00,0xb9,0x00,0x00,0x00, +0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0xbb,0x00,0x00,0x00, +0x84,0x00,0x00,0x00,0x4f,0x00,0x00,0x00,0x62,0x00,0x00,0x00, +0x32,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0xbc,0x00,0x00,0x00, +0x02,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, +0xbd,0x00,0x00,0x00,0x84,0x00,0x00,0x00,0xbb,0x00,0x00,0x00, +0xbc,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, +0xbe,0x00,0x00,0x00,0x84,0x00,0x00,0x00,0xbd,0x00,0x00,0x00, +0x60,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, +0xbf,0x00,0x00,0x00,0x86,0x00,0x00,0x00,0xba,0x00,0x00,0x00, +0xbe,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, +0xc0,0x00,0x00,0x00,0x84,0x00,0x00,0x00,0xb8,0x00,0x00,0x00, +0xbf,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, +0xc1,0x00,0x00,0x00,0x84,0x00,0x00,0x00,0xc0,0x00,0x00,0x00, +0xbc,0x00,0x00,0x00,0x14,0x00,0x02,0x00,0xc2,0x00,0x00,0x00, +0x16,0x00,0x03,0x00,0xc4,0x00,0x00,0x00,0x20,0x00,0x00,0x00, +0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0xc5,0x00,0x00,0x00, +0x84,0x00,0x00,0x00,0x60,0x00,0x00,0x00,0x62,0x00,0x00,0x00, +0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0xc6,0x00,0x00,0x00, +0x84,0x00,0x00,0x00,0xc5,0x00,0x00,0x00,0xbf,0x00,0x00,0x00, +0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0xc7,0x00,0x00,0x00, +0x84,0x00,0x00,0x00,0xc6,0x00,0x00,0x00,0xbc,0x00,0x00,0x00, +0x1c,0x00,0x04,0x00,0xc8,0x00,0x00,0x00,0xc4,0x00,0x00,0x00, +0xc7,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0xc9,0x00,0x00,0x00, +0x07,0x00,0x00,0x00,0xc8,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0xc4,0x00,0x00,0x00,0xcc,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0x20,0x00,0x04,0x00,0xcd,0x00,0x00,0x00,0x07,0x00,0x00,0x00, +0xc4,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00, +0xd0,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x34,0x00,0x06,0x00, +0x06,0x00,0x00,0x00,0xf4,0x00,0x00,0x00,0x80,0x00,0x00,0x00, +0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0xfa,0x00,0x00,0x00,0x10,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0xfe,0x00,0x00,0x00, +0x0f,0x00,0x00,0x00,0x16,0x00,0x03,0x00,0x01,0x01,0x00,0x00, +0x10,0x00,0x00,0x00,0x15,0x00,0x04,0x00,0x02,0x01,0x00,0x00, +0x08,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x1c,0x00,0x04,0x00, +0x03,0x01,0x00,0x00,0x02,0x01,0x00,0x00,0xfa,0x00,0x00,0x00, +0x1e,0x00,0x04,0x00,0x04,0x01,0x00,0x00,0x01,0x01,0x00,0x00, +0x03,0x01,0x00,0x00,0x1d,0x00,0x03,0x00,0x05,0x01,0x00,0x00, +0x04,0x01,0x00,0x00,0x1e,0x00,0x03,0x00,0x06,0x01,0x00,0x00, +0x05,0x01,0x00,0x00,0x20,0x00,0x04,0x00,0x07,0x01,0x00,0x00, +0x0c,0x00,0x00,0x00,0x06,0x01,0x00,0x00,0x3b,0x00,0x04,0x00, +0x07,0x01,0x00,0x00,0x08,0x01,0x00,0x00,0x0c,0x00,0x00,0x00, +0x20,0x00,0x04,0x00,0x0a,0x01,0x00,0x00,0x0c,0x00,0x00,0x00, +0x01,0x01,0x00,0x00,0x20,0x00,0x04,0x00,0x11,0x01,0x00,0x00, +0x0c,0x00,0x00,0x00,0x02,0x01,0x00,0x00,0x17,0x00,0x04,0x00, +0x15,0x01,0x00,0x00,0xc4,0x00,0x00,0x00,0x02,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0xc4,0x00,0x00,0x00,0x1f,0x01,0x00,0x00, +0x00,0x00,0x00,0x41,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, +0x24,0x01,0x00,0x00,0x80,0x00,0x00,0x00,0x6c,0x00,0x00,0x00, +0x39,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, +0x25,0x01,0x00,0x00,0x84,0x00,0x00,0x00,0x37,0x00,0x00,0x00, +0x24,0x01,0x00,0x00,0x1c,0x00,0x04,0x00,0x26,0x01,0x00,0x00, +0xc4,0x00,0x00,0x00,0x25,0x01,0x00,0x00,0x20,0x00,0x04,0x00, +0x27,0x01,0x00,0x00,0x04,0x00,0x00,0x00,0x26,0x01,0x00,0x00, +0x3b,0x00,0x04,0x00,0x27,0x01,0x00,0x00,0x28,0x01,0x00,0x00, +0x04,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x2c,0x01,0x00,0x00, +0x04,0x00,0x00,0x00,0xc4,0x00,0x00,0x00,0x32,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x33,0x01,0x00,0x00,0x01,0x00,0x00,0x00, +0x33,0x00,0x06,0x00,0x09,0x00,0x00,0x00,0x34,0x01,0x00,0x00, +0x33,0x01,0x00,0x00,0x39,0x00,0x00,0x00,0x39,0x00,0x00,0x00, +0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x35,0x01,0x00,0x00, +0x51,0x00,0x00,0x00,0x34,0x01,0x00,0x00,0x00,0x00,0x00,0x00, +0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x36,0x01,0x00,0x00, +0x84,0x00,0x00,0x00,0x35,0x01,0x00,0x00,0x0c,0x00,0x00,0x00, +0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x37,0x01,0x00,0x00, +0x86,0x00,0x00,0x00,0x36,0x01,0x00,0x00,0x6c,0x00,0x00,0x00, +0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x52,0x01,0x00,0x00, +0x80,0x00,0x00,0x00,0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00, +0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x57,0x01,0x00,0x00, +0x80,0x00,0x00,0x00,0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00, +0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x58,0x01,0x00,0x00, +0x84,0x00,0x00,0x00,0xa7,0x00,0x00,0x00,0x57,0x01,0x00,0x00, +0x1c,0x00,0x04,0x00,0x59,0x01,0x00,0x00,0xc4,0x00,0x00,0x00, +0x58,0x01,0x00,0x00,0x20,0x00,0x04,0x00,0x5a,0x01,0x00,0x00, +0x04,0x00,0x00,0x00,0x59,0x01,0x00,0x00,0x3b,0x00,0x04,0x00, +0x5a,0x01,0x00,0x00,0x5b,0x01,0x00,0x00,0x04,0x00,0x00,0x00, +0x17,0x00,0x04,0x00,0x5e,0x01,0x00,0x00,0xc4,0x00,0x00,0x00, +0x04,0x00,0x00,0x00,0x1d,0x00,0x03,0x00,0x5f,0x01,0x00,0x00, +0x5e,0x01,0x00,0x00,0x1e,0x00,0x03,0x00,0x60,0x01,0x00,0x00, +0x5f,0x01,0x00,0x00,0x20,0x00,0x04,0x00,0x61,0x01,0x00,0x00, +0x0c,0x00,0x00,0x00,0x60,0x01,0x00,0x00,0x3b,0x00,0x04,0x00, +0x61,0x01,0x00,0x00,0x62,0x01,0x00,0x00,0x0c,0x00,0x00,0x00, +0x20,0x00,0x04,0x00,0x64,0x01,0x00,0x00,0x0c,0x00,0x00,0x00, +0xc4,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x75,0x01,0x00,0x00,0x03,0x00,0x00,0x00,0x34,0x00,0x06,0x00, +0x06,0x00,0x00,0x00,0x7b,0x01,0x00,0x00,0x51,0x00,0x00,0x00, +0x34,0x01,0x00,0x00,0x00,0x00,0x00,0x00,0x34,0x00,0x06,0x00, +0x06,0x00,0x00,0x00,0x7c,0x01,0x00,0x00,0x84,0x00,0x00,0x00, +0x7b,0x01,0x00,0x00,0x77,0x00,0x00,0x00,0x34,0x00,0x06,0x00, +0x06,0x00,0x00,0x00,0x7d,0x01,0x00,0x00,0x86,0x00,0x00,0x00, +0x7c,0x01,0x00,0x00,0x6c,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x80,0x01,0x00,0x00,0x08,0x01,0x00,0x00, +0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x81,0x01,0x00,0x00, +0x86,0x00,0x00,0x00,0x6c,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, +0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x84,0x01,0x00,0x00, +0x86,0x00,0x00,0x00,0x6c,0x00,0x00,0x00,0x77,0x00,0x00,0x00, +0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x9f,0x01,0x00,0x00, +0x84,0x00,0x00,0x00,0x60,0x00,0x00,0x00,0x62,0x00,0x00,0x00, +0x1c,0x00,0x04,0x00,0xa0,0x01,0x00,0x00,0xc4,0x00,0x00,0x00, +0x9f,0x01,0x00,0x00,0x20,0x00,0x04,0x00,0xa1,0x01,0x00,0x00, +0x07,0x00,0x00,0x00,0xa0,0x01,0x00,0x00,0x34,0x00,0x06,0x00, +0x06,0x00,0x00,0x00,0xb1,0x01,0x00,0x00,0x80,0x00,0x00,0x00, +0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x34,0x00,0x06,0x00, +0x06,0x00,0x00,0x00,0xcc,0x01,0x00,0x00,0x84,0x00,0x00,0x00, +0xbf,0x00,0x00,0x00,0xbc,0x00,0x00,0x00,0x1c,0x00,0x04,0x00, +0xcd,0x01,0x00,0x00,0xc4,0x00,0x00,0x00,0xcc,0x01,0x00,0x00, +0x20,0x00,0x04,0x00,0xce,0x01,0x00,0x00,0x07,0x00,0x00,0x00, +0xcd,0x01,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, +0xd7,0x01,0x00,0x00,0x86,0x00,0x00,0x00,0xb9,0x00,0x00,0x00, +0xbf,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, +0xdf,0x01,0x00,0x00,0x80,0x00,0x00,0x00,0x6c,0x00,0x00,0x00, +0x39,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, +0x0e,0x02,0x00,0x00,0x84,0x00,0x00,0x00,0x60,0x00,0x00,0x00, +0x62,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00, +0x41,0x02,0x00,0x00,0x0d,0x00,0x00,0x00,0x3b,0x00,0x04,0x00, +0x0a,0x00,0x00,0x00,0x49,0x02,0x00,0x00,0x01,0x00,0x00,0x00, +0x1d,0x00,0x03,0x00,0x8f,0x02,0x00,0x00,0xc4,0x00,0x00,0x00, +0x1e,0x00,0x03,0x00,0x90,0x02,0x00,0x00,0x8f,0x02,0x00,0x00, +0x20,0x00,0x04,0x00,0x91,0x02,0x00,0x00,0x0c,0x00,0x00,0x00, +0x90,0x02,0x00,0x00,0x3b,0x00,0x04,0x00,0x91,0x02,0x00,0x00, +0x92,0x02,0x00,0x00,0x0c,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x13,0x00,0x00,0x00,0x97,0x02,0x00,0x00,0x05,0x00,0x00,0x00, +0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0xa4,0x02,0x00,0x00, +0x84,0x00,0x00,0x00,0x60,0x00,0x00,0x00,0x62,0x00,0x00,0x00, +0x2c,0x00,0x05,0x00,0x15,0x01,0x00,0x00,0x0c,0x03,0x00,0x00, +0x1f,0x01,0x00,0x00,0x1f,0x01,0x00,0x00,0x36,0x00,0x05,0x00, +0x02,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0x03,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0x05,0x00,0x00,0x00, +0x3b,0x00,0x04,0x00,0xc9,0x00,0x00,0x00,0xca,0x00,0x00,0x00, +0x07,0x00,0x00,0x00,0x3b,0x00,0x04,0x00,0xa1,0x01,0x00,0x00, +0xa2,0x01,0x00,0x00,0x07,0x00,0x00,0x00,0x3b,0x00,0x04,0x00, +0xce,0x01,0x00,0x00,0xcf,0x01,0x00,0x00,0x07,0x00,0x00,0x00, +0x41,0x00,0x05,0x00,0x0d,0x00,0x00,0x00,0x0e,0x00,0x00,0x00, +0x0b,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x0f,0x00,0x00,0x00,0x0e,0x00,0x00,0x00, +0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00,0x16,0x00,0x00,0x00, +0x12,0x00,0x00,0x00,0x14,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x17,0x00,0x00,0x00,0x16,0x00,0x00,0x00, +0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x18,0x00,0x00,0x00, +0x0f,0x00,0x00,0x00,0x17,0x00,0x00,0x00,0x89,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x1e,0x00,0x00,0x00,0x0f,0x00,0x00,0x00, +0x17,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00, +0x22,0x00,0x00,0x00,0x12,0x00,0x00,0x00,0x21,0x00,0x00,0x00, +0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x23,0x00,0x00,0x00, +0x22,0x00,0x00,0x00,0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x24,0x00,0x00,0x00,0x18,0x00,0x00,0x00,0x23,0x00,0x00,0x00, +0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00,0x28,0x00,0x00,0x00, +0x12,0x00,0x00,0x00,0x27,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x29,0x00,0x00,0x00,0x28,0x00,0x00,0x00, +0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x2a,0x00,0x00,0x00, +0x1e,0x00,0x00,0x00,0x29,0x00,0x00,0x00,0x41,0x00,0x05,0x00, +0x15,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0x12,0x00,0x00,0x00, +0x2d,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x2f,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x30,0x00,0x00,0x00,0x24,0x00,0x00,0x00, +0x2f,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x32,0x00,0x00,0x00,0x30,0x00,0x00,0x00,0x2a,0x00,0x00,0x00, +0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00,0x35,0x00,0x00,0x00, +0x12,0x00,0x00,0x00,0x34,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x36,0x00,0x00,0x00,0x35,0x00,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x38,0x00,0x00,0x00, +0x36,0x00,0x00,0x00,0x37,0x00,0x00,0x00,0x82,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x3a,0x00,0x00,0x00,0x38,0x00,0x00,0x00, +0x39,0x00,0x00,0x00,0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x3b,0x00,0x00,0x00,0x3a,0x00,0x00,0x00,0x37,0x00,0x00,0x00, +0x41,0x00,0x05,0x00,0x0d,0x00,0x00,0x00,0x3f,0x00,0x00,0x00, +0x3d,0x00,0x00,0x00,0x3e,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x40,0x00,0x00,0x00,0x3f,0x00,0x00,0x00, +0x89,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x42,0x00,0x00,0x00, +0x40,0x00,0x00,0x00,0x3b,0x00,0x00,0x00,0x86,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x47,0x00,0x00,0x00,0x40,0x00,0x00,0x00, +0x3b,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x0d,0x00,0x00,0x00, +0x49,0x00,0x00,0x00,0x3d,0x00,0x00,0x00,0x39,0x00,0x00,0x00, +0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x4a,0x00,0x00,0x00, +0x49,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x0d,0x00,0x00,0x00, +0x4d,0x00,0x00,0x00,0x4c,0x00,0x00,0x00,0x3e,0x00,0x00,0x00, +0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x4e,0x00,0x00,0x00, +0x4d,0x00,0x00,0x00,0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x50,0x00,0x00,0x00,0x4e,0x00,0x00,0x00,0x4f,0x00,0x00,0x00, +0x89,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x55,0x00,0x00,0x00, +0x50,0x00,0x00,0x00,0x54,0x00,0x00,0x00,0x86,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x59,0x00,0x00,0x00,0x50,0x00,0x00,0x00, +0x58,0x00,0x00,0x00,0x89,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x5d,0x00,0x00,0x00,0x4e,0x00,0x00,0x00,0x4f,0x00,0x00,0x00, +0x89,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x64,0x00,0x00,0x00, +0x5d,0x00,0x00,0x00,0x63,0x00,0x00,0x00,0x86,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x68,0x00,0x00,0x00,0x5d,0x00,0x00,0x00, +0x67,0x00,0x00,0x00,0x89,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x6e,0x00,0x00,0x00,0x4e,0x00,0x00,0x00,0x6d,0x00,0x00,0x00, +0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x73,0x00,0x00,0x00, +0x4e,0x00,0x00,0x00,0x72,0x00,0x00,0x00,0x89,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x79,0x00,0x00,0x00,0x4e,0x00,0x00,0x00, +0x78,0x00,0x00,0x00,0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x7e,0x00,0x00,0x00,0x4e,0x00,0x00,0x00,0x7d,0x00,0x00,0x00, +0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00,0x82,0x00,0x00,0x00, +0x12,0x00,0x00,0x00,0x81,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x83,0x00,0x00,0x00,0x82,0x00,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x84,0x00,0x00,0x00, +0x47,0x00,0x00,0x00,0x83,0x00,0x00,0x00,0x41,0x00,0x05,0x00, +0x15,0x00,0x00,0x00,0x87,0x00,0x00,0x00,0x12,0x00,0x00,0x00, +0x86,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x88,0x00,0x00,0x00,0x87,0x00,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x8a,0x00,0x00,0x00,0x47,0x00,0x00,0x00, +0x39,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x8d,0x00,0x00,0x00,0x8a,0x00,0x00,0x00,0x83,0x00,0x00,0x00, +0x0c,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0x8e,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0x26,0x00,0x00,0x00,0x88,0x00,0x00,0x00, +0x8d,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00, +0x92,0x00,0x00,0x00,0x12,0x00,0x00,0x00,0x91,0x00,0x00,0x00, +0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x93,0x00,0x00,0x00, +0x92,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x94,0x00,0x00,0x00,0x32,0x00,0x00,0x00,0x93,0x00,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x96,0x00,0x00,0x00, +0x42,0x00,0x00,0x00,0x37,0x00,0x00,0x00,0x41,0x00,0x05,0x00, +0x15,0x00,0x00,0x00,0x98,0x00,0x00,0x00,0x12,0x00,0x00,0x00, +0x97,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x99,0x00,0x00,0x00,0x98,0x00,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x9a,0x00,0x00,0x00,0x96,0x00,0x00,0x00, +0x99,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x9b,0x00,0x00,0x00,0x94,0x00,0x00,0x00,0x9a,0x00,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x9d,0x00,0x00,0x00, +0x9b,0x00,0x00,0x00,0x84,0x00,0x00,0x00,0x86,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x9e,0x00,0x00,0x00,0x9d,0x00,0x00,0x00, +0x0c,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00, +0xa3,0x00,0x00,0x00,0x12,0x00,0x00,0x00,0xa2,0x00,0x00,0x00, +0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0xa4,0x00,0x00,0x00, +0xa3,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xa5,0x00,0x00,0x00,0x0f,0x00,0x00,0x00,0xa4,0x00,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xa8,0x00,0x00,0x00, +0x4a,0x00,0x00,0x00,0xa7,0x00,0x00,0x00,0x41,0x00,0x05,0x00, +0x15,0x00,0x00,0x00,0xaa,0x00,0x00,0x00,0x12,0x00,0x00,0x00, +0xa9,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0xab,0x00,0x00,0x00,0xaa,0x00,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xac,0x00,0x00,0x00,0xa8,0x00,0x00,0x00, +0xab,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xad,0x00,0x00,0x00,0xa5,0x00,0x00,0x00,0xac,0x00,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xaf,0x00,0x00,0x00, +0xad,0x00,0x00,0x00,0x84,0x00,0x00,0x00,0x86,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xb0,0x00,0x00,0x00,0xaf,0x00,0x00,0x00, +0x77,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0xb2,0x00,0x00,0x00, +0xf8,0x00,0x02,0x00,0xb2,0x00,0x00,0x00,0xf5,0x00,0x07,0x00, +0x06,0x00,0x00,0x00,0xb8,0x02,0x00,0x00,0x3e,0x00,0x00,0x00, +0x05,0x00,0x00,0x00,0xd1,0x00,0x00,0x00,0xb3,0x00,0x00,0x00, +0xb0,0x00,0x05,0x00,0xc2,0x00,0x00,0x00,0xc3,0x00,0x00,0x00, +0xb8,0x02,0x00,0x00,0xc1,0x00,0x00,0x00,0xf6,0x00,0x04,0x00, +0xb4,0x00,0x00,0x00,0xb3,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0xfa,0x00,0x04,0x00,0xc3,0x00,0x00,0x00,0xb3,0x00,0x00,0x00, +0xb4,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0xb3,0x00,0x00,0x00, +0x41,0x00,0x05,0x00,0xcd,0x00,0x00,0x00,0xce,0x00,0x00,0x00, +0xca,0x00,0x00,0x00,0xb8,0x02,0x00,0x00,0x3e,0x00,0x03,0x00, +0xce,0x00,0x00,0x00,0xcc,0x00,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xd1,0x00,0x00,0x00,0xb8,0x02,0x00,0x00, +0xd0,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0xb2,0x00,0x00,0x00, +0xf8,0x00,0x02,0x00,0xb4,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, +0xd4,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0xd4,0x00,0x00,0x00, +0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0xd1,0x02,0x00,0x00, +0xb0,0x00,0x00,0x00,0xb4,0x00,0x00,0x00,0x86,0x01,0x00,0x00, +0xd7,0x00,0x00,0x00,0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00, +0xcd,0x02,0x00,0x00,0x9e,0x00,0x00,0x00,0xb4,0x00,0x00,0x00, +0x83,0x01,0x00,0x00,0xd7,0x00,0x00,0x00,0xf5,0x00,0x07,0x00, +0x06,0x00,0x00,0x00,0xb9,0x02,0x00,0x00,0x84,0x00,0x00,0x00, +0xb4,0x00,0x00,0x00,0x31,0x02,0x00,0x00,0xd7,0x00,0x00,0x00, +0xb0,0x00,0x05,0x00,0xc2,0x00,0x00,0x00,0xdb,0x00,0x00,0x00, +0xb9,0x02,0x00,0x00,0x8e,0x00,0x00,0x00,0xf6,0x00,0x04,0x00, +0xd6,0x00,0x00,0x00,0xd7,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0xfa,0x00,0x04,0x00,0xdb,0x00,0x00,0x00,0xd5,0x00,0x00,0x00, +0xd6,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0xd5,0x00,0x00,0x00, +0xf9,0x00,0x02,0x00,0xdd,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, +0xdd,0x00,0x00,0x00,0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00, +0xc9,0x02,0x00,0x00,0x3e,0x00,0x00,0x00,0xd5,0x00,0x00,0x00, +0x39,0x01,0x00,0x00,0xde,0x00,0x00,0x00,0xb0,0x00,0x05,0x00, +0xc2,0x00,0x00,0x00,0xe3,0x00,0x00,0x00,0xc9,0x02,0x00,0x00, +0x37,0x00,0x00,0x00,0xf6,0x00,0x04,0x00,0xdf,0x00,0x00,0x00, +0xde,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, +0xe3,0x00,0x00,0x00,0xde,0x00,0x00,0x00,0xdf,0x00,0x00,0x00, +0xf8,0x00,0x02,0x00,0xde,0x00,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xe8,0x00,0x00,0x00,0x73,0x00,0x00,0x00, +0xc9,0x02,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xeb,0x00,0x00,0x00,0xe8,0x00,0x00,0x00,0x99,0x00,0x00,0x00, +0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xec,0x00,0x00,0x00, +0xeb,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xed,0x00,0x00,0x00,0xcd,0x02,0x00,0x00, +0xec,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xef,0x00,0x00,0x00,0xed,0x00,0x00,0x00,0x6e,0x00,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xf5,0x00,0x00,0x00, +0xe8,0x00,0x00,0x00,0xf4,0x00,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xf7,0x00,0x00,0x00,0xf5,0x00,0x00,0x00, +0x6e,0x00,0x00,0x00,0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xfb,0x00,0x00,0x00,0xef,0x00,0x00,0x00,0xfa,0x00,0x00,0x00, +0xc7,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xff,0x00,0x00,0x00, +0xef,0x00,0x00,0x00,0xfe,0x00,0x00,0x00,0x41,0x00,0x07,0x00, +0x0a,0x01,0x00,0x00,0x0b,0x01,0x00,0x00,0x08,0x01,0x00,0x00, +0x34,0x00,0x00,0x00,0xfb,0x00,0x00,0x00,0x34,0x00,0x00,0x00, +0x3d,0x00,0x04,0x00,0x01,0x01,0x00,0x00,0x0c,0x01,0x00,0x00, +0x0b,0x01,0x00,0x00,0x73,0x00,0x04,0x00,0xc4,0x00,0x00,0x00, +0x0d,0x01,0x00,0x00,0x0c,0x01,0x00,0x00,0x41,0x00,0x08,0x00, +0x11,0x01,0x00,0x00,0x12,0x01,0x00,0x00,0x08,0x01,0x00,0x00, +0x34,0x00,0x00,0x00,0xfb,0x00,0x00,0x00,0xd0,0x00,0x00,0x00, +0xff,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x02,0x01,0x00,0x00, +0x13,0x01,0x00,0x00,0x12,0x01,0x00,0x00,0x71,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x14,0x01,0x00,0x00,0x13,0x01,0x00,0x00, +0xc7,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x19,0x01,0x00,0x00, +0x14,0x01,0x00,0x00,0xfe,0x00,0x00,0x00,0x70,0x00,0x04,0x00, +0xc4,0x00,0x00,0x00,0x1a,0x01,0x00,0x00,0x19,0x01,0x00,0x00, +0xc2,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x1c,0x01,0x00,0x00, +0x14,0x01,0x00,0x00,0xa9,0x00,0x00,0x00,0x70,0x00,0x04,0x00, +0xc4,0x00,0x00,0x00,0x1d,0x01,0x00,0x00,0x1c,0x01,0x00,0x00, +0x50,0x00,0x05,0x00,0x15,0x01,0x00,0x00,0x1e,0x01,0x00,0x00, +0x1a,0x01,0x00,0x00,0x1d,0x01,0x00,0x00,0x83,0x00,0x05,0x00, +0x15,0x01,0x00,0x00,0x21,0x01,0x00,0x00,0x1e,0x01,0x00,0x00, +0x0c,0x03,0x00,0x00,0x8e,0x00,0x05,0x00,0x15,0x01,0x00,0x00, +0x23,0x01,0x00,0x00,0x21,0x01,0x00,0x00,0x0d,0x01,0x00,0x00, +0x51,0x00,0x05,0x00,0xc4,0x00,0x00,0x00,0x2b,0x01,0x00,0x00, +0x23,0x01,0x00,0x00,0x00,0x00,0x00,0x00,0x41,0x00,0x05,0x00, +0x2c,0x01,0x00,0x00,0x2d,0x01,0x00,0x00,0x28,0x01,0x00,0x00, +0xf7,0x00,0x00,0x00,0x3e,0x00,0x03,0x00,0x2d,0x01,0x00,0x00, +0x2b,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x2f,0x01,0x00,0x00,0xf7,0x00,0x00,0x00,0xfa,0x00,0x00,0x00, +0x51,0x00,0x05,0x00,0xc4,0x00,0x00,0x00,0x31,0x01,0x00,0x00, +0x23,0x01,0x00,0x00,0x01,0x00,0x00,0x00,0x41,0x00,0x05,0x00, +0x2c,0x01,0x00,0x00,0x32,0x01,0x00,0x00,0x28,0x01,0x00,0x00, +0x2f,0x01,0x00,0x00,0x3e,0x00,0x03,0x00,0x32,0x01,0x00,0x00, +0x31,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x39,0x01,0x00,0x00,0xc9,0x02,0x00,0x00,0x37,0x01,0x00,0x00, +0xf9,0x00,0x02,0x00,0xdd,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, +0xdf,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0x3b,0x01,0x00,0x00, +0xf8,0x00,0x02,0x00,0x3b,0x01,0x00,0x00,0xf5,0x00,0x07,0x00, +0x06,0x00,0x00,0x00,0xca,0x02,0x00,0x00,0x3e,0x00,0x00,0x00, +0xdf,0x00,0x00,0x00,0x7f,0x01,0x00,0x00,0x3c,0x01,0x00,0x00, +0xb0,0x00,0x05,0x00,0xc2,0x00,0x00,0x00,0x41,0x01,0x00,0x00, +0xca,0x02,0x00,0x00,0xa7,0x00,0x00,0x00,0xf6,0x00,0x04,0x00, +0x3d,0x01,0x00,0x00,0x3c,0x01,0x00,0x00,0x01,0x00,0x00,0x00, +0xfa,0x00,0x04,0x00,0x41,0x01,0x00,0x00,0x3c,0x01,0x00,0x00, +0x3d,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0x3c,0x01,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x46,0x01,0x00,0x00, +0x7e,0x00,0x00,0x00,0xca,0x02,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x49,0x01,0x00,0x00,0x46,0x01,0x00,0x00, +0xab,0x00,0x00,0x00,0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x4a,0x01,0x00,0x00,0x49,0x01,0x00,0x00,0x77,0x00,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x4b,0x01,0x00,0x00, +0xd1,0x02,0x00,0x00,0x4a,0x01,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x4d,0x01,0x00,0x00,0x4b,0x01,0x00,0x00, +0x79,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x53,0x01,0x00,0x00,0x46,0x01,0x00,0x00,0x52,0x01,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x55,0x01,0x00,0x00, +0x79,0x00,0x00,0x00,0x77,0x00,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x56,0x01,0x00,0x00,0x53,0x01,0x00,0x00, +0x55,0x01,0x00,0x00,0x41,0x00,0x07,0x00,0x64,0x01,0x00,0x00, +0x65,0x01,0x00,0x00,0x62,0x01,0x00,0x00,0x34,0x00,0x00,0x00, +0x4d,0x01,0x00,0x00,0x3e,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0xc4,0x00,0x00,0x00,0x66,0x01,0x00,0x00,0x65,0x01,0x00,0x00, +0x41,0x00,0x05,0x00,0x2c,0x01,0x00,0x00,0x67,0x01,0x00,0x00, +0x5b,0x01,0x00,0x00,0x56,0x01,0x00,0x00,0x3e,0x00,0x03,0x00, +0x67,0x01,0x00,0x00,0x66,0x01,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x69,0x01,0x00,0x00,0x56,0x01,0x00,0x00, +0x39,0x00,0x00,0x00,0x41,0x00,0x07,0x00,0x64,0x01,0x00,0x00, +0x6b,0x01,0x00,0x00,0x62,0x01,0x00,0x00,0x34,0x00,0x00,0x00, +0x4d,0x01,0x00,0x00,0x39,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0xc4,0x00,0x00,0x00,0x6c,0x01,0x00,0x00,0x6b,0x01,0x00,0x00, +0x41,0x00,0x05,0x00,0x2c,0x01,0x00,0x00,0x6d,0x01,0x00,0x00, +0x5b,0x01,0x00,0x00,0x69,0x01,0x00,0x00,0x3e,0x00,0x03,0x00, +0x6d,0x01,0x00,0x00,0x6c,0x01,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x6f,0x01,0x00,0x00,0x56,0x01,0x00,0x00, +0x0c,0x00,0x00,0x00,0x41,0x00,0x07,0x00,0x64,0x01,0x00,0x00, +0x71,0x01,0x00,0x00,0x62,0x01,0x00,0x00,0x34,0x00,0x00,0x00, +0x4d,0x01,0x00,0x00,0x0c,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0xc4,0x00,0x00,0x00,0x72,0x01,0x00,0x00,0x71,0x01,0x00,0x00, +0x41,0x00,0x05,0x00,0x2c,0x01,0x00,0x00,0x73,0x01,0x00,0x00, +0x5b,0x01,0x00,0x00,0x6f,0x01,0x00,0x00,0x3e,0x00,0x03,0x00, +0x73,0x01,0x00,0x00,0x72,0x01,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x76,0x01,0x00,0x00,0x56,0x01,0x00,0x00, +0x75,0x01,0x00,0x00,0x41,0x00,0x07,0x00,0x64,0x01,0x00,0x00, +0x78,0x01,0x00,0x00,0x62,0x01,0x00,0x00,0x34,0x00,0x00,0x00, +0x4d,0x01,0x00,0x00,0x75,0x01,0x00,0x00,0x3d,0x00,0x04,0x00, +0xc4,0x00,0x00,0x00,0x79,0x01,0x00,0x00,0x78,0x01,0x00,0x00, +0x41,0x00,0x05,0x00,0x2c,0x01,0x00,0x00,0x7a,0x01,0x00,0x00, +0x5b,0x01,0x00,0x00,0x76,0x01,0x00,0x00,0x3e,0x00,0x03,0x00, +0x7a,0x01,0x00,0x00,0x79,0x01,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x7f,0x01,0x00,0x00,0xca,0x02,0x00,0x00, +0x7d,0x01,0x00,0x00,0xf9,0x00,0x02,0x00,0x3b,0x01,0x00,0x00, +0xf8,0x00,0x02,0x00,0x3d,0x01,0x00,0x00,0xe0,0x00,0x04,0x00, +0x0c,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x80,0x01,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x83,0x01,0x00,0x00, +0xcd,0x02,0x00,0x00,0x81,0x01,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x86,0x01,0x00,0x00,0xd1,0x02,0x00,0x00, +0x84,0x01,0x00,0x00,0xf9,0x00,0x02,0x00,0x88,0x01,0x00,0x00, +0xf8,0x00,0x02,0x00,0x88,0x01,0x00,0x00,0xf5,0x00,0x07,0x00, +0x06,0x00,0x00,0x00,0xd3,0x02,0x00,0x00,0x3e,0x00,0x00,0x00, +0x3d,0x01,0x00,0x00,0x2f,0x02,0x00,0x00,0x8b,0x01,0x00,0x00, +0xb0,0x00,0x05,0x00,0xc2,0x00,0x00,0x00,0x8e,0x01,0x00,0x00, +0xd3,0x02,0x00,0x00,0x6c,0x00,0x00,0x00,0xf6,0x00,0x04,0x00, +0x8a,0x01,0x00,0x00,0x8b,0x01,0x00,0x00,0x00,0x00,0x00,0x00, +0xfa,0x00,0x04,0x00,0x8e,0x01,0x00,0x00,0x89,0x01,0x00,0x00, +0x8a,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0x89,0x01,0x00,0x00, +0xf9,0x00,0x02,0x00,0x90,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, +0x90,0x01,0x00,0x00,0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00, +0xd7,0x02,0x00,0x00,0x3e,0x00,0x00,0x00,0x89,0x01,0x00,0x00, +0xbb,0x01,0x00,0x00,0x93,0x01,0x00,0x00,0xb0,0x00,0x05,0x00, +0xc2,0x00,0x00,0x00,0x96,0x01,0x00,0x00,0xd7,0x02,0x00,0x00, +0x60,0x00,0x00,0x00,0xf6,0x00,0x04,0x00,0x92,0x01,0x00,0x00, +0x93,0x01,0x00,0x00,0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, +0x96,0x01,0x00,0x00,0x91,0x01,0x00,0x00,0x92,0x01,0x00,0x00, +0xf8,0x00,0x02,0x00,0x91,0x01,0x00,0x00,0xf9,0x00,0x02,0x00, +0x98,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0x98,0x01,0x00,0x00, +0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0xe9,0x02,0x00,0x00, +0x3e,0x00,0x00,0x00,0x91,0x01,0x00,0x00,0xb9,0x01,0x00,0x00, +0x99,0x01,0x00,0x00,0xb0,0x00,0x05,0x00,0xc2,0x00,0x00,0x00, +0x9e,0x01,0x00,0x00,0xe9,0x02,0x00,0x00,0x62,0x00,0x00,0x00, +0xf6,0x00,0x04,0x00,0x9a,0x01,0x00,0x00,0x99,0x01,0x00,0x00, +0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0x9e,0x01,0x00,0x00, +0x99,0x01,0x00,0x00,0x9a,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, +0x99,0x01,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xa4,0x01,0x00,0x00,0xd7,0x02,0x00,0x00,0x62,0x00,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xa6,0x01,0x00,0x00, +0xa4,0x01,0x00,0x00,0xe9,0x02,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xa8,0x01,0x00,0x00,0x55,0x00,0x00,0x00, +0x53,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xaa,0x01,0x00,0x00,0xd7,0x02,0x00,0x00,0x61,0x00,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xab,0x01,0x00,0x00, +0xa8,0x01,0x00,0x00,0xaa,0x01,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xad,0x01,0x00,0x00,0x64,0x00,0x00,0x00, +0x62,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xae,0x01,0x00,0x00,0xab,0x01,0x00,0x00,0xad,0x01,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xb0,0x01,0x00,0x00, +0xae,0x01,0x00,0x00,0xe9,0x02,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xb2,0x01,0x00,0x00,0xb0,0x01,0x00,0x00, +0xb1,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xb4,0x01,0x00,0x00,0xb2,0x01,0x00,0x00,0xd3,0x02,0x00,0x00, +0x41,0x00,0x05,0x00,0x2c,0x01,0x00,0x00,0xb5,0x01,0x00,0x00, +0x28,0x01,0x00,0x00,0xb4,0x01,0x00,0x00,0x3d,0x00,0x04,0x00, +0xc4,0x00,0x00,0x00,0xb6,0x01,0x00,0x00,0xb5,0x01,0x00,0x00, +0x41,0x00,0x05,0x00,0xcd,0x00,0x00,0x00,0xb7,0x01,0x00,0x00, +0xa2,0x01,0x00,0x00,0xa6,0x01,0x00,0x00,0x3e,0x00,0x03,0x00, +0xb7,0x01,0x00,0x00,0xb6,0x01,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xb9,0x01,0x00,0x00,0xe9,0x02,0x00,0x00, +0xd0,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0x98,0x01,0x00,0x00, +0xf8,0x00,0x02,0x00,0x9a,0x01,0x00,0x00,0xf9,0x00,0x02,0x00, +0x93,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0x93,0x01,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xbb,0x01,0x00,0x00, +0xd7,0x02,0x00,0x00,0xd0,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, +0x90,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0x92,0x01,0x00,0x00, +0xf9,0x00,0x02,0x00,0xbd,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, +0xbd,0x01,0x00,0x00,0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00, +0xd8,0x02,0x00,0x00,0x3e,0x00,0x00,0x00,0x92,0x01,0x00,0x00, +0xe9,0x01,0x00,0x00,0xc0,0x01,0x00,0x00,0xb0,0x00,0x05,0x00, +0xc2,0x00,0x00,0x00,0xc3,0x01,0x00,0x00,0xd8,0x02,0x00,0x00, +0xbf,0x00,0x00,0x00,0xf6,0x00,0x04,0x00,0xbf,0x01,0x00,0x00, +0xc0,0x01,0x00,0x00,0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, +0xc3,0x01,0x00,0x00,0xbe,0x01,0x00,0x00,0xbf,0x01,0x00,0x00, +0xf8,0x00,0x02,0x00,0xbe,0x01,0x00,0x00,0xf9,0x00,0x02,0x00, +0xc5,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0xc5,0x01,0x00,0x00, +0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0xe6,0x02,0x00,0x00, +0x3e,0x00,0x00,0x00,0xbe,0x01,0x00,0x00,0xe7,0x01,0x00,0x00, +0xc6,0x01,0x00,0x00,0xb0,0x00,0x05,0x00,0xc2,0x00,0x00,0x00, +0xcb,0x01,0x00,0x00,0xe6,0x02,0x00,0x00,0xbc,0x00,0x00,0x00, +0xf6,0x00,0x04,0x00,0xc7,0x01,0x00,0x00,0xc6,0x01,0x00,0x00, +0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0xcb,0x01,0x00,0x00, +0xc6,0x01,0x00,0x00,0xc7,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, +0xc6,0x01,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xd1,0x01,0x00,0x00,0xd8,0x02,0x00,0x00,0xbc,0x00,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xd3,0x01,0x00,0x00, +0xd1,0x01,0x00,0x00,0xe6,0x02,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xd5,0x01,0x00,0x00,0x59,0x00,0x00,0x00, +0xb9,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xd8,0x01,0x00,0x00,0xd8,0x02,0x00,0x00,0xd7,0x01,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xd9,0x01,0x00,0x00, +0xd5,0x01,0x00,0x00,0xd8,0x01,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xdb,0x01,0x00,0x00,0x68,0x00,0x00,0x00, +0xbc,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xdc,0x01,0x00,0x00,0xd9,0x01,0x00,0x00,0xdb,0x01,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xde,0x01,0x00,0x00, +0xdc,0x01,0x00,0x00,0xe6,0x02,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xe0,0x01,0x00,0x00,0xde,0x01,0x00,0x00, +0xdf,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xe2,0x01,0x00,0x00,0xe0,0x01,0x00,0x00,0xd3,0x02,0x00,0x00, +0x41,0x00,0x05,0x00,0x2c,0x01,0x00,0x00,0xe3,0x01,0x00,0x00, +0x5b,0x01,0x00,0x00,0xe2,0x01,0x00,0x00,0x3d,0x00,0x04,0x00, +0xc4,0x00,0x00,0x00,0xe4,0x01,0x00,0x00,0xe3,0x01,0x00,0x00, +0x41,0x00,0x05,0x00,0xcd,0x00,0x00,0x00,0xe5,0x01,0x00,0x00, +0xcf,0x01,0x00,0x00,0xd3,0x01,0x00,0x00,0x3e,0x00,0x03,0x00, +0xe5,0x01,0x00,0x00,0xe4,0x01,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xe7,0x01,0x00,0x00,0xe6,0x02,0x00,0x00, +0xd0,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0xc5,0x01,0x00,0x00, +0xf8,0x00,0x02,0x00,0xc7,0x01,0x00,0x00,0xf9,0x00,0x02,0x00, +0xc0,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0xc0,0x01,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xe9,0x01,0x00,0x00, +0xd8,0x02,0x00,0x00,0xd0,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, +0xbd,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0xbf,0x01,0x00,0x00, +0xf9,0x00,0x02,0x00,0xeb,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, +0xeb,0x01,0x00,0x00,0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00, +0xd9,0x02,0x00,0x00,0x3e,0x00,0x00,0x00,0xbf,0x01,0x00,0x00, +0x2d,0x02,0x00,0x00,0xee,0x01,0x00,0x00,0xb0,0x00,0x05,0x00, +0xc2,0x00,0x00,0x00,0xf1,0x01,0x00,0x00,0xd9,0x02,0x00,0x00, +0xbf,0x00,0x00,0x00,0xf6,0x00,0x04,0x00,0xed,0x01,0x00,0x00, +0xee,0x01,0x00,0x00,0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, +0xf1,0x01,0x00,0x00,0xec,0x01,0x00,0x00,0xed,0x01,0x00,0x00, +0xf8,0x00,0x02,0x00,0xec,0x01,0x00,0x00,0xf9,0x00,0x02,0x00, +0xf3,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0xf3,0x01,0x00,0x00, +0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0xdd,0x02,0x00,0x00, +0x3e,0x00,0x00,0x00,0xec,0x01,0x00,0x00,0x2b,0x02,0x00,0x00, +0xf6,0x01,0x00,0x00,0xb0,0x00,0x05,0x00,0xc2,0x00,0x00,0x00, +0xf9,0x01,0x00,0x00,0xdd,0x02,0x00,0x00,0x60,0x00,0x00,0x00, +0xf6,0x00,0x04,0x00,0xf5,0x01,0x00,0x00,0xf6,0x01,0x00,0x00, +0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0xf9,0x01,0x00,0x00, +0xf4,0x01,0x00,0x00,0xf5,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, +0xf4,0x01,0x00,0x00,0xf9,0x00,0x02,0x00,0xfb,0x01,0x00,0x00, +0xf8,0x00,0x02,0x00,0xfb,0x01,0x00,0x00,0xf5,0x00,0x07,0x00, +0x06,0x00,0x00,0x00,0xdf,0x02,0x00,0x00,0x3e,0x00,0x00,0x00, +0xf4,0x01,0x00,0x00,0x29,0x02,0x00,0x00,0xfe,0x01,0x00,0x00, +0xb0,0x00,0x05,0x00,0xc2,0x00,0x00,0x00,0x01,0x02,0x00,0x00, +0xdf,0x02,0x00,0x00,0xbc,0x00,0x00,0x00,0xf6,0x00,0x04,0x00, +0xfd,0x01,0x00,0x00,0xfe,0x01,0x00,0x00,0x01,0x00,0x00,0x00, +0xfa,0x00,0x04,0x00,0x01,0x02,0x00,0x00,0xfc,0x01,0x00,0x00, +0xfd,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0xfc,0x01,0x00,0x00, +0xf9,0x00,0x02,0x00,0x03,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, +0x03,0x02,0x00,0x00,0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00, +0xe1,0x02,0x00,0x00,0x3e,0x00,0x00,0x00,0xfc,0x01,0x00,0x00, +0x27,0x02,0x00,0x00,0x04,0x02,0x00,0x00,0xb0,0x00,0x05,0x00, +0xc2,0x00,0x00,0x00,0x09,0x02,0x00,0x00,0xe1,0x02,0x00,0x00, +0x62,0x00,0x00,0x00,0xf6,0x00,0x04,0x00,0x05,0x02,0x00,0x00, +0x04,0x02,0x00,0x00,0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, +0x09,0x02,0x00,0x00,0x04,0x02,0x00,0x00,0x05,0x02,0x00,0x00, +0xf8,0x00,0x02,0x00,0x04,0x02,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x0b,0x02,0x00,0x00,0xd9,0x02,0x00,0x00, +0xbc,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x0d,0x02,0x00,0x00,0x0b,0x02,0x00,0x00,0xdf,0x02,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x0f,0x02,0x00,0x00, +0x0d,0x02,0x00,0x00,0x0e,0x02,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x11,0x02,0x00,0x00,0xdd,0x02,0x00,0x00, +0x62,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x12,0x02,0x00,0x00,0x0f,0x02,0x00,0x00,0x11,0x02,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x14,0x02,0x00,0x00, +0x12,0x02,0x00,0x00,0xe1,0x02,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x18,0x02,0x00,0x00,0x11,0x02,0x00,0x00, +0xe1,0x02,0x00,0x00,0x41,0x00,0x05,0x00,0xcd,0x00,0x00,0x00, +0x19,0x02,0x00,0x00,0xa2,0x01,0x00,0x00,0x18,0x02,0x00,0x00, +0x3d,0x00,0x04,0x00,0xc4,0x00,0x00,0x00,0x1a,0x02,0x00,0x00, +0x19,0x02,0x00,0x00,0x41,0x00,0x05,0x00,0xcd,0x00,0x00,0x00, +0x1f,0x02,0x00,0x00,0xcf,0x01,0x00,0x00,0x0d,0x02,0x00,0x00, +0x3d,0x00,0x04,0x00,0xc4,0x00,0x00,0x00,0x20,0x02,0x00,0x00, +0x1f,0x02,0x00,0x00,0x41,0x00,0x05,0x00,0xcd,0x00,0x00,0x00, +0x22,0x02,0x00,0x00,0xca,0x00,0x00,0x00,0x14,0x02,0x00,0x00, +0x3d,0x00,0x04,0x00,0xc4,0x00,0x00,0x00,0x23,0x02,0x00,0x00, +0x22,0x02,0x00,0x00,0x0c,0x00,0x08,0x00,0xc4,0x00,0x00,0x00, +0x24,0x02,0x00,0x00,0x01,0x00,0x00,0x00,0x32,0x00,0x00,0x00, +0x1a,0x02,0x00,0x00,0x20,0x02,0x00,0x00,0x23,0x02,0x00,0x00, +0x3e,0x00,0x03,0x00,0x22,0x02,0x00,0x00,0x24,0x02,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x27,0x02,0x00,0x00, +0xe1,0x02,0x00,0x00,0xd0,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, +0x03,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x05,0x02,0x00,0x00, +0xf9,0x00,0x02,0x00,0xfe,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, +0xfe,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x29,0x02,0x00,0x00,0xdf,0x02,0x00,0x00,0xd0,0x00,0x00,0x00, +0xf9,0x00,0x02,0x00,0xfb,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, +0xfd,0x01,0x00,0x00,0xf9,0x00,0x02,0x00,0xf6,0x01,0x00,0x00, +0xf8,0x00,0x02,0x00,0xf6,0x01,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x2b,0x02,0x00,0x00,0xdd,0x02,0x00,0x00, +0xd0,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0xf3,0x01,0x00,0x00, +0xf8,0x00,0x02,0x00,0xf5,0x01,0x00,0x00,0xf9,0x00,0x02,0x00, +0xee,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0xee,0x01,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x2d,0x02,0x00,0x00, +0xd9,0x02,0x00,0x00,0xd0,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, +0xeb,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0xed,0x01,0x00,0x00, +0xf9,0x00,0x02,0x00,0x8b,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, +0x8b,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x2f,0x02,0x00,0x00,0xd3,0x02,0x00,0x00,0xd0,0x00,0x00,0x00, +0xf9,0x00,0x02,0x00,0x88,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, +0x8a,0x01,0x00,0x00,0xe0,0x00,0x04,0x00,0x0c,0x00,0x00,0x00, +0x0c,0x00,0x00,0x00,0x80,0x01,0x00,0x00,0xf9,0x00,0x02,0x00, +0xd7,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0xd7,0x00,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x31,0x02,0x00,0x00, +0xb9,0x02,0x00,0x00,0x6c,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, +0xd4,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0xd6,0x00,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x36,0x02,0x00,0x00, +0x55,0x00,0x00,0x00,0x53,0x00,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x37,0x02,0x00,0x00,0x96,0x00,0x00,0x00, +0x36,0x02,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x3c,0x02,0x00,0x00,0x59,0x00,0x00,0x00,0xb9,0x00,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x3d,0x02,0x00,0x00, +0xa8,0x00,0x00,0x00,0x3c,0x02,0x00,0x00,0x41,0x00,0x05,0x00, +0x15,0x00,0x00,0x00,0x42,0x02,0x00,0x00,0x12,0x00,0x00,0x00, +0x41,0x02,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x43,0x02,0x00,0x00,0x42,0x02,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x44,0x02,0x00,0x00,0x0f,0x00,0x00,0x00, +0x43,0x02,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x48,0x02,0x00,0x00,0x47,0x00,0x00,0x00,0x43,0x02,0x00,0x00, +0x41,0x00,0x05,0x00,0x0d,0x00,0x00,0x00,0x4a,0x02,0x00,0x00, +0x49,0x02,0x00,0x00,0x0c,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x4b,0x02,0x00,0x00,0x4a,0x02,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x4c,0x02,0x00,0x00, +0x48,0x02,0x00,0x00,0x4b,0x02,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x4d,0x02,0x00,0x00,0x44,0x02,0x00,0x00, +0x4c,0x02,0x00,0x00,0xf9,0x00,0x02,0x00,0x4f,0x02,0x00,0x00, +0xf8,0x00,0x02,0x00,0x4f,0x02,0x00,0x00,0xf5,0x00,0x07,0x00, +0x06,0x00,0x00,0x00,0xba,0x02,0x00,0x00,0x3e,0x00,0x00,0x00, +0xd6,0x00,0x00,0x00,0xb5,0x02,0x00,0x00,0x52,0x02,0x00,0x00, +0xb0,0x00,0x05,0x00,0xc2,0x00,0x00,0x00,0x55,0x02,0x00,0x00, +0xba,0x02,0x00,0x00,0xbf,0x00,0x00,0x00,0xf6,0x00,0x04,0x00, +0x51,0x02,0x00,0x00,0x52,0x02,0x00,0x00,0x01,0x00,0x00,0x00, +0xfa,0x00,0x04,0x00,0x55,0x02,0x00,0x00,0x50,0x02,0x00,0x00, +0x51,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x50,0x02,0x00,0x00, +0xf9,0x00,0x02,0x00,0x57,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, +0x57,0x02,0x00,0x00,0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00, +0xbb,0x02,0x00,0x00,0x3e,0x00,0x00,0x00,0x50,0x02,0x00,0x00, +0xb3,0x02,0x00,0x00,0x5a,0x02,0x00,0x00,0xb0,0x00,0x05,0x00, +0xc2,0x00,0x00,0x00,0x5d,0x02,0x00,0x00,0xbb,0x02,0x00,0x00, +0x60,0x00,0x00,0x00,0xf6,0x00,0x04,0x00,0x59,0x02,0x00,0x00, +0x5a,0x02,0x00,0x00,0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, +0x5d,0x02,0x00,0x00,0x58,0x02,0x00,0x00,0x59,0x02,0x00,0x00, +0xf8,0x00,0x02,0x00,0x58,0x02,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x61,0x02,0x00,0x00,0xbb,0x02,0x00,0x00, +0x61,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x62,0x02,0x00,0x00,0x37,0x02,0x00,0x00,0x61,0x02,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x64,0x02,0x00,0x00, +0x64,0x00,0x00,0x00,0x62,0x00,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x65,0x02,0x00,0x00,0x62,0x02,0x00,0x00, +0x64,0x02,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x69,0x02,0x00,0x00,0xba,0x02,0x00,0x00,0xd7,0x01,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x6a,0x02,0x00,0x00, +0x3d,0x02,0x00,0x00,0x69,0x02,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x6c,0x02,0x00,0x00,0x68,0x00,0x00,0x00, +0xbc,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x6d,0x02,0x00,0x00,0x6a,0x02,0x00,0x00,0x6c,0x02,0x00,0x00, +0xf9,0x00,0x02,0x00,0x6f,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, +0x6f,0x02,0x00,0x00,0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00, +0xbd,0x02,0x00,0x00,0x3e,0x00,0x00,0x00,0x58,0x02,0x00,0x00, +0xb1,0x02,0x00,0x00,0x72,0x02,0x00,0x00,0xb0,0x00,0x05,0x00, +0xc2,0x00,0x00,0x00,0x75,0x02,0x00,0x00,0xbd,0x02,0x00,0x00, +0xbc,0x00,0x00,0x00,0xf6,0x00,0x04,0x00,0x71,0x02,0x00,0x00, +0x72,0x02,0x00,0x00,0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, +0x75,0x02,0x00,0x00,0x70,0x02,0x00,0x00,0x71,0x02,0x00,0x00, +0xf8,0x00,0x02,0x00,0x70,0x02,0x00,0x00,0xf9,0x00,0x02,0x00, +0x77,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x77,0x02,0x00,0x00, +0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0xbf,0x02,0x00,0x00, +0x3e,0x00,0x00,0x00,0x70,0x02,0x00,0x00,0xaf,0x02,0x00,0x00, +0x7a,0x02,0x00,0x00,0xb0,0x00,0x05,0x00,0xc2,0x00,0x00,0x00, +0x7d,0x02,0x00,0x00,0xbf,0x02,0x00,0x00,0x62,0x00,0x00,0x00, +0xf6,0x00,0x04,0x00,0x79,0x02,0x00,0x00,0x7a,0x02,0x00,0x00, +0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0x7d,0x02,0x00,0x00, +0x78,0x02,0x00,0x00,0x79,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, +0x78,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x80,0x02,0x00,0x00,0x65,0x02,0x00,0x00,0xbf,0x02,0x00,0x00, +0xb0,0x00,0x05,0x00,0xc2,0x00,0x00,0x00,0x83,0x02,0x00,0x00, +0x80,0x02,0x00,0x00,0x36,0x00,0x00,0x00,0xf7,0x00,0x03,0x00, +0x85,0x02,0x00,0x00,0x00,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, +0x83,0x02,0x00,0x00,0x84,0x02,0x00,0x00,0x85,0x02,0x00,0x00, +0xf8,0x00,0x02,0x00,0x84,0x02,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x88,0x02,0x00,0x00,0x6d,0x02,0x00,0x00, +0xbd,0x02,0x00,0x00,0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00, +0x89,0x02,0x00,0x00,0x12,0x00,0x00,0x00,0xd0,0x00,0x00,0x00, +0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x8a,0x02,0x00,0x00, +0x89,0x02,0x00,0x00,0xb0,0x00,0x05,0x00,0xc2,0x00,0x00,0x00, +0x8b,0x02,0x00,0x00,0x88,0x02,0x00,0x00,0x8a,0x02,0x00,0x00, +0xf9,0x00,0x02,0x00,0x85,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, +0x85,0x02,0x00,0x00,0xf5,0x00,0x07,0x00,0xc2,0x00,0x00,0x00, +0x8c,0x02,0x00,0x00,0x83,0x02,0x00,0x00,0x78,0x02,0x00,0x00, +0x8b,0x02,0x00,0x00,0x84,0x02,0x00,0x00,0xf7,0x00,0x03,0x00, +0x8e,0x02,0x00,0x00,0x00,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, +0x8c,0x02,0x00,0x00,0x8d,0x02,0x00,0x00,0x8e,0x02,0x00,0x00, +0xf8,0x00,0x02,0x00,0x8d,0x02,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x96,0x02,0x00,0x00,0x6d,0x02,0x00,0x00, +0xbd,0x02,0x00,0x00,0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00, +0x98,0x02,0x00,0x00,0x12,0x00,0x00,0x00,0x97,0x02,0x00,0x00, +0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x99,0x02,0x00,0x00, +0x98,0x02,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x9a,0x02,0x00,0x00,0x96,0x02,0x00,0x00,0x99,0x02,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x9b,0x02,0x00,0x00, +0x4d,0x02,0x00,0x00,0x9a,0x02,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x9d,0x02,0x00,0x00,0x9b,0x02,0x00,0x00, +0x65,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x9f,0x02,0x00,0x00,0x9d,0x02,0x00,0x00,0xbf,0x02,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xa1,0x02,0x00,0x00, +0xba,0x02,0x00,0x00,0xbc,0x00,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xa3,0x02,0x00,0x00,0xa1,0x02,0x00,0x00, +0xbd,0x02,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xa5,0x02,0x00,0x00,0xa3,0x02,0x00,0x00,0xa4,0x02,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xa7,0x02,0x00,0x00, +0xbb,0x02,0x00,0x00,0x62,0x00,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xa8,0x02,0x00,0x00,0xa5,0x02,0x00,0x00, +0xa7,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xaa,0x02,0x00,0x00,0xa8,0x02,0x00,0x00,0xbf,0x02,0x00,0x00, +0x41,0x00,0x05,0x00,0xcd,0x00,0x00,0x00,0xab,0x02,0x00,0x00, +0xca,0x00,0x00,0x00,0xaa,0x02,0x00,0x00,0x3d,0x00,0x04,0x00, +0xc4,0x00,0x00,0x00,0xac,0x02,0x00,0x00,0xab,0x02,0x00,0x00, +0x41,0x00,0x06,0x00,0x64,0x01,0x00,0x00,0xad,0x02,0x00,0x00, +0x92,0x02,0x00,0x00,0x34,0x00,0x00,0x00,0x9f,0x02,0x00,0x00, +0x3e,0x00,0x03,0x00,0xad,0x02,0x00,0x00,0xac,0x02,0x00,0x00, +0xf9,0x00,0x02,0x00,0x8e,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, +0x8e,0x02,0x00,0x00,0xf9,0x00,0x02,0x00,0x7a,0x02,0x00,0x00, +0xf8,0x00,0x02,0x00,0x7a,0x02,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xaf,0x02,0x00,0x00,0xbf,0x02,0x00,0x00, +0xd0,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0x77,0x02,0x00,0x00, +0xf8,0x00,0x02,0x00,0x79,0x02,0x00,0x00,0xf9,0x00,0x02,0x00, +0x72,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x72,0x02,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xb1,0x02,0x00,0x00, +0xbd,0x02,0x00,0x00,0xd0,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, +0x6f,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x71,0x02,0x00,0x00, +0xf9,0x00,0x02,0x00,0x5a,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, +0x5a,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xb3,0x02,0x00,0x00,0xbb,0x02,0x00,0x00,0xd0,0x00,0x00,0x00, +0xf9,0x00,0x02,0x00,0x57,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, +0x59,0x02,0x00,0x00,0xf9,0x00,0x02,0x00,0x52,0x02,0x00,0x00, +0xf8,0x00,0x02,0x00,0x52,0x02,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xb5,0x02,0x00,0x00,0xba,0x02,0x00,0x00, +0xd0,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0x4f,0x02,0x00,0x00, +0xf8,0x00,0x02,0x00,0x51,0x02,0x00,0x00,0xfd,0x00,0x01,0x00, +0x38,0x00,0x01,0x00, +}; +const uint64_t matmul_q4_0_f32_aligned_fp32_len = 10408; + +unsigned char matmul_q4_0_f32_fp32_data[] = { +0x03,0x02,0x23,0x07,0x00,0x05,0x01,0x00,0x0b,0x00,0x0d,0x00, +0x15,0x03,0x00,0x00,0x00,0x00,0x00,0x00,0x11,0x00,0x02,0x00, +0x01,0x00,0x00,0x00,0x11,0x00,0x02,0x00,0x51,0x11,0x00,0x00, +0x11,0x00,0x02,0x00,0x60,0x11,0x00,0x00,0x0b,0x00,0x06,0x00, +0x01,0x00,0x00,0x00,0x47,0x4c,0x53,0x4c,0x2e,0x73,0x74,0x64, +0x2e,0x34,0x35,0x30,0x00,0x00,0x00,0x00,0x0e,0x00,0x03,0x00, +0x00,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x0f,0x00,0x0f,0x00, +0x05,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x6d,0x61,0x69,0x6e, +0x00,0x00,0x00,0x00,0x0b,0x00,0x00,0x00,0x12,0x00,0x00,0x00, +0x3d,0x00,0x00,0x00,0x4c,0x00,0x00,0x00,0x07,0x01,0x00,0x00, +0x27,0x01,0x00,0x00,0x58,0x01,0x00,0x00,0x63,0x01,0x00,0x00, +0x49,0x02,0x00,0x00,0x92,0x02,0x00,0x00,0x10,0x00,0x06,0x00, +0x04,0x00,0x00,0x00,0x11,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x47,0x00,0x04,0x00, +0x0b,0x00,0x00,0x00,0x0b,0x00,0x00,0x00,0x1c,0x00,0x00,0x00, +0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x48,0x00,0x05,0x00, +0x10,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x23,0x00,0x00,0x00, +0x04,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00, +0x02,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x08,0x00,0x00,0x00, +0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00,0x03,0x00,0x00,0x00, +0x23,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x48,0x00,0x05,0x00, +0x10,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x23,0x00,0x00,0x00, +0x10,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00, +0x05,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x14,0x00,0x00,0x00, +0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00,0x06,0x00,0x00,0x00, +0x23,0x00,0x00,0x00,0x18,0x00,0x00,0x00,0x48,0x00,0x05,0x00, +0x10,0x00,0x00,0x00,0x07,0x00,0x00,0x00,0x23,0x00,0x00,0x00, +0x1c,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00, +0x08,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x20,0x00,0x00,0x00, +0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00,0x09,0x00,0x00,0x00, +0x23,0x00,0x00,0x00,0x24,0x00,0x00,0x00,0x48,0x00,0x05,0x00, +0x10,0x00,0x00,0x00,0x0a,0x00,0x00,0x00,0x23,0x00,0x00,0x00, +0x28,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00, +0x0b,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x2c,0x00,0x00,0x00, +0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, +0x23,0x00,0x00,0x00,0x30,0x00,0x00,0x00,0x48,0x00,0x05,0x00, +0x10,0x00,0x00,0x00,0x0d,0x00,0x00,0x00,0x23,0x00,0x00,0x00, +0x34,0x00,0x00,0x00,0x47,0x00,0x03,0x00,0x10,0x00,0x00,0x00, +0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x37,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x47,0x00,0x04,0x00, +0x3d,0x00,0x00,0x00,0x0b,0x00,0x00,0x00,0x1a,0x00,0x00,0x00, +0x47,0x00,0x04,0x00,0x4c,0x00,0x00,0x00,0x0b,0x00,0x00,0x00, +0x1b,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x4f,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0x09,0x00,0x00,0x00,0x47,0x00,0x04,0x00, +0x53,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x04,0x00,0x00,0x00, +0x47,0x00,0x04,0x00,0x60,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0x06,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x62,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0x07,0x00,0x00,0x00,0x47,0x00,0x04,0x00, +0x6c,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x03,0x00,0x00,0x00, +0x47,0x00,0x04,0x00,0xa6,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0xb8,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0x05,0x00,0x00,0x00,0x47,0x00,0x04,0x00, +0xbb,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x08,0x00,0x00,0x00, +0x47,0x00,0x04,0x00,0x02,0x01,0x00,0x00,0x06,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x03,0x01,0x00,0x00, +0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0x48,0x00,0x05,0x00,0x03,0x01,0x00,0x00,0x01,0x00,0x00,0x00, +0x23,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00, +0x04,0x01,0x00,0x00,0x06,0x00,0x00,0x00,0x12,0x00,0x00,0x00, +0x48,0x00,0x04,0x00,0x05,0x01,0x00,0x00,0x00,0x00,0x00,0x00, +0x18,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x05,0x01,0x00,0x00, +0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0x47,0x00,0x03,0x00,0x05,0x01,0x00,0x00,0x02,0x00,0x00,0x00, +0x47,0x00,0x04,0x00,0x07,0x01,0x00,0x00,0x22,0x00,0x00,0x00, +0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x07,0x01,0x00,0x00, +0x21,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00, +0x32,0x01,0x00,0x00,0x01,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0x47,0x00,0x04,0x00,0x33,0x01,0x00,0x00,0x0b,0x00,0x00,0x00, +0x19,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x60,0x01,0x00,0x00, +0x06,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x48,0x00,0x04,0x00, +0x61,0x01,0x00,0x00,0x00,0x00,0x00,0x00,0x18,0x00,0x00,0x00, +0x48,0x00,0x05,0x00,0x61,0x01,0x00,0x00,0x00,0x00,0x00,0x00, +0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x03,0x00, +0x61,0x01,0x00,0x00,0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00, +0x63,0x01,0x00,0x00,0x22,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0x47,0x00,0x04,0x00,0x63,0x01,0x00,0x00,0x21,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x49,0x02,0x00,0x00, +0x0b,0x00,0x00,0x00,0x18,0x00,0x00,0x00,0x47,0x00,0x04,0x00, +0x8f,0x02,0x00,0x00,0x06,0x00,0x00,0x00,0x04,0x00,0x00,0x00, +0x48,0x00,0x04,0x00,0x90,0x02,0x00,0x00,0x00,0x00,0x00,0x00, +0x19,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x90,0x02,0x00,0x00, +0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0x47,0x00,0x03,0x00,0x90,0x02,0x00,0x00,0x02,0x00,0x00,0x00, +0x47,0x00,0x04,0x00,0x92,0x02,0x00,0x00,0x22,0x00,0x00,0x00, +0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x92,0x02,0x00,0x00, +0x21,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x13,0x00,0x02,0x00, +0x02,0x00,0x00,0x00,0x21,0x00,0x03,0x00,0x03,0x00,0x00,0x00, +0x02,0x00,0x00,0x00,0x15,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x20,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x17,0x00,0x04,0x00, +0x09,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x03,0x00,0x00,0x00, +0x20,0x00,0x04,0x00,0x0a,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0x09,0x00,0x00,0x00,0x3b,0x00,0x04,0x00,0x0a,0x00,0x00,0x00, +0x0b,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x02,0x00,0x00,0x00, +0x20,0x00,0x04,0x00,0x0d,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0x06,0x00,0x00,0x00,0x1e,0x00,0x10,0x00,0x10,0x00,0x00,0x00, +0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00, +0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00, +0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00, +0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00, +0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x20,0x00,0x04,0x00, +0x11,0x00,0x00,0x00,0x09,0x00,0x00,0x00,0x10,0x00,0x00,0x00, +0x3b,0x00,0x04,0x00,0x11,0x00,0x00,0x00,0x12,0x00,0x00,0x00, +0x09,0x00,0x00,0x00,0x15,0x00,0x04,0x00,0x13,0x00,0x00,0x00, +0x20,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x13,0x00,0x00,0x00,0x14,0x00,0x00,0x00,0x08,0x00,0x00,0x00, +0x20,0x00,0x04,0x00,0x15,0x00,0x00,0x00,0x09,0x00,0x00,0x00, +0x06,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00, +0x21,0x00,0x00,0x00,0x0a,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x13,0x00,0x00,0x00,0x27,0x00,0x00,0x00,0x09,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00,0x2d,0x00,0x00,0x00, +0x07,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00, +0x34,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x32,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x37,0x00,0x00,0x00,0x40,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x39,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0x3b,0x00,0x04,0x00,0x0a,0x00,0x00,0x00, +0x3d,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x3e,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0x3b,0x00,0x04,0x00,0x0a,0x00,0x00,0x00,0x4c,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0x32,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x4f,0x00,0x00,0x00,0x20,0x00,0x00,0x00,0x32,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x53,0x00,0x00,0x00,0x20,0x00,0x00,0x00, +0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x54,0x00,0x00,0x00, +0x86,0x00,0x00,0x00,0x37,0x00,0x00,0x00,0x53,0x00,0x00,0x00, +0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x58,0x00,0x00,0x00, +0x86,0x00,0x00,0x00,0x37,0x00,0x00,0x00,0x53,0x00,0x00,0x00, +0x32,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x60,0x00,0x00,0x00, +0x02,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, +0x61,0x00,0x00,0x00,0x86,0x00,0x00,0x00,0x53,0x00,0x00,0x00, +0x60,0x00,0x00,0x00,0x32,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x62,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x34,0x00,0x06,0x00, +0x06,0x00,0x00,0x00,0x63,0x00,0x00,0x00,0x86,0x00,0x00,0x00, +0x61,0x00,0x00,0x00,0x62,0x00,0x00,0x00,0x34,0x00,0x06,0x00, +0x06,0x00,0x00,0x00,0x67,0x00,0x00,0x00,0x86,0x00,0x00,0x00, +0x61,0x00,0x00,0x00,0x62,0x00,0x00,0x00,0x32,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x6c,0x00,0x00,0x00,0x10,0x00,0x00,0x00, +0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x6d,0x00,0x00,0x00, +0x86,0x00,0x00,0x00,0x6c,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, +0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x72,0x00,0x00,0x00, +0x86,0x00,0x00,0x00,0x6c,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, +0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x77,0x00,0x00,0x00, +0x86,0x00,0x00,0x00,0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00, +0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x7c,0x00,0x00,0x00, +0x86,0x00,0x00,0x00,0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00,0x80,0x00,0x00,0x00, +0x06,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00, +0x85,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x13,0x00,0x00,0x00,0x90,0x00,0x00,0x00,0x0b,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00,0x96,0x00,0x00,0x00, +0x03,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00, +0xa1,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x32,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0xa6,0x00,0x00,0x00,0x40,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00,0xa8,0x00,0x00,0x00, +0x04,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, +0xb7,0x00,0x00,0x00,0x84,0x00,0x00,0x00,0x60,0x00,0x00,0x00, +0x62,0x00,0x00,0x00,0x32,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0xb8,0x00,0x00,0x00,0x20,0x00,0x00,0x00,0x34,0x00,0x06,0x00, +0x06,0x00,0x00,0x00,0xb9,0x00,0x00,0x00,0x84,0x00,0x00,0x00, +0x53,0x00,0x00,0x00,0xb8,0x00,0x00,0x00,0x34,0x00,0x06,0x00, +0x06,0x00,0x00,0x00,0xba,0x00,0x00,0x00,0x84,0x00,0x00,0x00, +0x4f,0x00,0x00,0x00,0x62,0x00,0x00,0x00,0x32,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0xbb,0x00,0x00,0x00,0x02,0x00,0x00,0x00, +0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0xbc,0x00,0x00,0x00, +0x84,0x00,0x00,0x00,0xba,0x00,0x00,0x00,0xbb,0x00,0x00,0x00, +0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0xbd,0x00,0x00,0x00, +0x84,0x00,0x00,0x00,0xbc,0x00,0x00,0x00,0x60,0x00,0x00,0x00, +0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0xbe,0x00,0x00,0x00, +0x86,0x00,0x00,0x00,0xb9,0x00,0x00,0x00,0xbd,0x00,0x00,0x00, +0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0xbf,0x00,0x00,0x00, +0x84,0x00,0x00,0x00,0xb7,0x00,0x00,0x00,0xbe,0x00,0x00,0x00, +0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0xc0,0x00,0x00,0x00, +0x84,0x00,0x00,0x00,0xbf,0x00,0x00,0x00,0xbb,0x00,0x00,0x00, +0x14,0x00,0x02,0x00,0xc1,0x00,0x00,0x00,0x16,0x00,0x03,0x00, +0xc3,0x00,0x00,0x00,0x20,0x00,0x00,0x00,0x34,0x00,0x06,0x00, +0x06,0x00,0x00,0x00,0xc4,0x00,0x00,0x00,0x84,0x00,0x00,0x00, +0x60,0x00,0x00,0x00,0x62,0x00,0x00,0x00,0x34,0x00,0x06,0x00, +0x06,0x00,0x00,0x00,0xc5,0x00,0x00,0x00,0x84,0x00,0x00,0x00, +0xc4,0x00,0x00,0x00,0xbe,0x00,0x00,0x00,0x34,0x00,0x06,0x00, +0x06,0x00,0x00,0x00,0xc6,0x00,0x00,0x00,0x84,0x00,0x00,0x00, +0xc5,0x00,0x00,0x00,0xbb,0x00,0x00,0x00,0x1c,0x00,0x04,0x00, +0xc7,0x00,0x00,0x00,0xc3,0x00,0x00,0x00,0xc6,0x00,0x00,0x00, +0x20,0x00,0x04,0x00,0xc8,0x00,0x00,0x00,0x07,0x00,0x00,0x00, +0xc7,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0xc3,0x00,0x00,0x00, +0xcb,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x20,0x00,0x04,0x00, +0xcc,0x00,0x00,0x00,0x07,0x00,0x00,0x00,0xc3,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00,0xcf,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, +0xf3,0x00,0x00,0x00,0x80,0x00,0x00,0x00,0x6c,0x00,0x00,0x00, +0x39,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0xf9,0x00,0x00,0x00,0x10,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0xfd,0x00,0x00,0x00,0x0f,0x00,0x00,0x00, +0x16,0x00,0x03,0x00,0x00,0x01,0x00,0x00,0x10,0x00,0x00,0x00, +0x15,0x00,0x04,0x00,0x01,0x01,0x00,0x00,0x08,0x00,0x00,0x00, +0x00,0x00,0x00,0x00,0x1c,0x00,0x04,0x00,0x02,0x01,0x00,0x00, +0x01,0x01,0x00,0x00,0xf9,0x00,0x00,0x00,0x1e,0x00,0x04,0x00, +0x03,0x01,0x00,0x00,0x00,0x01,0x00,0x00,0x02,0x01,0x00,0x00, +0x1d,0x00,0x03,0x00,0x04,0x01,0x00,0x00,0x03,0x01,0x00,0x00, +0x1e,0x00,0x03,0x00,0x05,0x01,0x00,0x00,0x04,0x01,0x00,0x00, +0x20,0x00,0x04,0x00,0x06,0x01,0x00,0x00,0x0c,0x00,0x00,0x00, +0x05,0x01,0x00,0x00,0x3b,0x00,0x04,0x00,0x06,0x01,0x00,0x00, +0x07,0x01,0x00,0x00,0x0c,0x00,0x00,0x00,0x20,0x00,0x04,0x00, +0x09,0x01,0x00,0x00,0x0c,0x00,0x00,0x00,0x00,0x01,0x00,0x00, +0x20,0x00,0x04,0x00,0x10,0x01,0x00,0x00,0x0c,0x00,0x00,0x00, +0x01,0x01,0x00,0x00,0x17,0x00,0x04,0x00,0x14,0x01,0x00,0x00, +0xc3,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0xc3,0x00,0x00,0x00,0x1e,0x01,0x00,0x00,0x00,0x00,0x00,0x41, +0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x23,0x01,0x00,0x00, +0x80,0x00,0x00,0x00,0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00, +0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x24,0x01,0x00,0x00, +0x84,0x00,0x00,0x00,0x37,0x00,0x00,0x00,0x23,0x01,0x00,0x00, +0x1c,0x00,0x04,0x00,0x25,0x01,0x00,0x00,0xc3,0x00,0x00,0x00, +0x24,0x01,0x00,0x00,0x20,0x00,0x04,0x00,0x26,0x01,0x00,0x00, +0x04,0x00,0x00,0x00,0x25,0x01,0x00,0x00,0x3b,0x00,0x04,0x00, +0x26,0x01,0x00,0x00,0x27,0x01,0x00,0x00,0x04,0x00,0x00,0x00, +0x20,0x00,0x04,0x00,0x2b,0x01,0x00,0x00,0x04,0x00,0x00,0x00, +0xc3,0x00,0x00,0x00,0x32,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x32,0x01,0x00,0x00,0x01,0x00,0x00,0x00,0x33,0x00,0x06,0x00, +0x09,0x00,0x00,0x00,0x33,0x01,0x00,0x00,0x32,0x01,0x00,0x00, +0x39,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x34,0x00,0x06,0x00, +0x06,0x00,0x00,0x00,0x34,0x01,0x00,0x00,0x51,0x00,0x00,0x00, +0x33,0x01,0x00,0x00,0x00,0x00,0x00,0x00,0x34,0x00,0x06,0x00, +0x06,0x00,0x00,0x00,0x35,0x01,0x00,0x00,0x84,0x00,0x00,0x00, +0x34,0x01,0x00,0x00,0x0c,0x00,0x00,0x00,0x34,0x00,0x06,0x00, +0x06,0x00,0x00,0x00,0x36,0x01,0x00,0x00,0x86,0x00,0x00,0x00, +0x35,0x01,0x00,0x00,0x6c,0x00,0x00,0x00,0x34,0x00,0x06,0x00, +0x06,0x00,0x00,0x00,0x54,0x01,0x00,0x00,0x80,0x00,0x00,0x00, +0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x34,0x00,0x06,0x00, +0x06,0x00,0x00,0x00,0x55,0x01,0x00,0x00,0x84,0x00,0x00,0x00, +0xa6,0x00,0x00,0x00,0x54,0x01,0x00,0x00,0x1c,0x00,0x04,0x00, +0x56,0x01,0x00,0x00,0xc3,0x00,0x00,0x00,0x55,0x01,0x00,0x00, +0x20,0x00,0x04,0x00,0x57,0x01,0x00,0x00,0x04,0x00,0x00,0x00, +0x56,0x01,0x00,0x00,0x3b,0x00,0x04,0x00,0x57,0x01,0x00,0x00, +0x58,0x01,0x00,0x00,0x04,0x00,0x00,0x00,0x34,0x00,0x06,0x00, +0x06,0x00,0x00,0x00,0x5c,0x01,0x00,0x00,0x80,0x00,0x00,0x00, +0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x1d,0x00,0x03,0x00, +0x60,0x01,0x00,0x00,0xc3,0x00,0x00,0x00,0x1e,0x00,0x03,0x00, +0x61,0x01,0x00,0x00,0x60,0x01,0x00,0x00,0x20,0x00,0x04,0x00, +0x62,0x01,0x00,0x00,0x0c,0x00,0x00,0x00,0x61,0x01,0x00,0x00, +0x3b,0x00,0x04,0x00,0x62,0x01,0x00,0x00,0x63,0x01,0x00,0x00, +0x0c,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x6e,0x01,0x00,0x00, +0x0c,0x00,0x00,0x00,0xc3,0x00,0x00,0x00,0x34,0x00,0x06,0x00, +0x06,0x00,0x00,0x00,0x76,0x01,0x00,0x00,0x80,0x00,0x00,0x00, +0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x34,0x00,0x06,0x00, +0x06,0x00,0x00,0x00,0x7b,0x01,0x00,0x00,0x51,0x00,0x00,0x00, +0x33,0x01,0x00,0x00,0x00,0x00,0x00,0x00,0x34,0x00,0x06,0x00, +0x06,0x00,0x00,0x00,0x7c,0x01,0x00,0x00,0x84,0x00,0x00,0x00, +0x7b,0x01,0x00,0x00,0x39,0x00,0x00,0x00,0x34,0x00,0x06,0x00, +0x06,0x00,0x00,0x00,0x7d,0x01,0x00,0x00,0x86,0x00,0x00,0x00, +0x7c,0x01,0x00,0x00,0x6c,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x80,0x01,0x00,0x00,0x08,0x01,0x00,0x00, +0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x81,0x01,0x00,0x00, +0x86,0x00,0x00,0x00,0x6c,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, +0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x84,0x01,0x00,0x00, +0x86,0x00,0x00,0x00,0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00, +0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x9f,0x01,0x00,0x00, +0x84,0x00,0x00,0x00,0x60,0x00,0x00,0x00,0x62,0x00,0x00,0x00, +0x1c,0x00,0x04,0x00,0xa0,0x01,0x00,0x00,0xc3,0x00,0x00,0x00, +0x9f,0x01,0x00,0x00,0x20,0x00,0x04,0x00,0xa1,0x01,0x00,0x00, +0x07,0x00,0x00,0x00,0xa0,0x01,0x00,0x00,0x34,0x00,0x06,0x00, +0x06,0x00,0x00,0x00,0xb1,0x01,0x00,0x00,0x80,0x00,0x00,0x00, +0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x34,0x00,0x06,0x00, +0x06,0x00,0x00,0x00,0xcc,0x01,0x00,0x00,0x84,0x00,0x00,0x00, +0xbe,0x00,0x00,0x00,0xbb,0x00,0x00,0x00,0x1c,0x00,0x04,0x00, +0xcd,0x01,0x00,0x00,0xc3,0x00,0x00,0x00,0xcc,0x01,0x00,0x00, +0x20,0x00,0x04,0x00,0xce,0x01,0x00,0x00,0x07,0x00,0x00,0x00, +0xcd,0x01,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, +0xd7,0x01,0x00,0x00,0x86,0x00,0x00,0x00,0xb8,0x00,0x00,0x00, +0xbe,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, +0xdf,0x01,0x00,0x00,0x80,0x00,0x00,0x00,0x6c,0x00,0x00,0x00, +0x39,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, +0x0e,0x02,0x00,0x00,0x84,0x00,0x00,0x00,0x60,0x00,0x00,0x00, +0x62,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00, +0x41,0x02,0x00,0x00,0x0d,0x00,0x00,0x00,0x3b,0x00,0x04,0x00, +0x0a,0x00,0x00,0x00,0x49,0x02,0x00,0x00,0x01,0x00,0x00,0x00, +0x1d,0x00,0x03,0x00,0x8f,0x02,0x00,0x00,0xc3,0x00,0x00,0x00, +0x1e,0x00,0x03,0x00,0x90,0x02,0x00,0x00,0x8f,0x02,0x00,0x00, +0x20,0x00,0x04,0x00,0x91,0x02,0x00,0x00,0x0c,0x00,0x00,0x00, +0x90,0x02,0x00,0x00,0x3b,0x00,0x04,0x00,0x91,0x02,0x00,0x00, +0x92,0x02,0x00,0x00,0x0c,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x13,0x00,0x00,0x00,0x97,0x02,0x00,0x00,0x05,0x00,0x00,0x00, +0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0xa4,0x02,0x00,0x00, +0x84,0x00,0x00,0x00,0x60,0x00,0x00,0x00,0x62,0x00,0x00,0x00, +0x2c,0x00,0x05,0x00,0x14,0x01,0x00,0x00,0x14,0x03,0x00,0x00, +0x1e,0x01,0x00,0x00,0x1e,0x01,0x00,0x00,0x36,0x00,0x05,0x00, +0x02,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0x03,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0x05,0x00,0x00,0x00, +0x3b,0x00,0x04,0x00,0xc8,0x00,0x00,0x00,0xc9,0x00,0x00,0x00, +0x07,0x00,0x00,0x00,0x3b,0x00,0x04,0x00,0xa1,0x01,0x00,0x00, +0xa2,0x01,0x00,0x00,0x07,0x00,0x00,0x00,0x3b,0x00,0x04,0x00, +0xce,0x01,0x00,0x00,0xcf,0x01,0x00,0x00,0x07,0x00,0x00,0x00, +0x41,0x00,0x05,0x00,0x0d,0x00,0x00,0x00,0x0e,0x00,0x00,0x00, +0x0b,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x0f,0x00,0x00,0x00,0x0e,0x00,0x00,0x00, +0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00,0x16,0x00,0x00,0x00, +0x12,0x00,0x00,0x00,0x14,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x17,0x00,0x00,0x00,0x16,0x00,0x00,0x00, +0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x18,0x00,0x00,0x00, +0x0f,0x00,0x00,0x00,0x17,0x00,0x00,0x00,0x89,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x1e,0x00,0x00,0x00,0x0f,0x00,0x00,0x00, +0x17,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00, +0x22,0x00,0x00,0x00,0x12,0x00,0x00,0x00,0x21,0x00,0x00,0x00, +0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x23,0x00,0x00,0x00, +0x22,0x00,0x00,0x00,0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x24,0x00,0x00,0x00,0x18,0x00,0x00,0x00,0x23,0x00,0x00,0x00, +0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00,0x28,0x00,0x00,0x00, +0x12,0x00,0x00,0x00,0x27,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x29,0x00,0x00,0x00,0x28,0x00,0x00,0x00, +0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x2a,0x00,0x00,0x00, +0x1e,0x00,0x00,0x00,0x29,0x00,0x00,0x00,0x41,0x00,0x05,0x00, +0x15,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0x12,0x00,0x00,0x00, +0x2d,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x2f,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x30,0x00,0x00,0x00,0x24,0x00,0x00,0x00, +0x2f,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x32,0x00,0x00,0x00,0x30,0x00,0x00,0x00,0x2a,0x00,0x00,0x00, +0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00,0x35,0x00,0x00,0x00, +0x12,0x00,0x00,0x00,0x34,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x36,0x00,0x00,0x00,0x35,0x00,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x38,0x00,0x00,0x00, +0x36,0x00,0x00,0x00,0x37,0x00,0x00,0x00,0x82,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x3a,0x00,0x00,0x00,0x38,0x00,0x00,0x00, +0x39,0x00,0x00,0x00,0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x3b,0x00,0x00,0x00,0x3a,0x00,0x00,0x00,0x37,0x00,0x00,0x00, +0x41,0x00,0x05,0x00,0x0d,0x00,0x00,0x00,0x3f,0x00,0x00,0x00, +0x3d,0x00,0x00,0x00,0x3e,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x40,0x00,0x00,0x00,0x3f,0x00,0x00,0x00, +0x89,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x42,0x00,0x00,0x00, +0x40,0x00,0x00,0x00,0x3b,0x00,0x00,0x00,0x86,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x47,0x00,0x00,0x00,0x40,0x00,0x00,0x00, +0x3b,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x0d,0x00,0x00,0x00, +0x49,0x00,0x00,0x00,0x3d,0x00,0x00,0x00,0x39,0x00,0x00,0x00, +0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x4a,0x00,0x00,0x00, +0x49,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x0d,0x00,0x00,0x00, +0x4d,0x00,0x00,0x00,0x4c,0x00,0x00,0x00,0x3e,0x00,0x00,0x00, +0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x4e,0x00,0x00,0x00, +0x4d,0x00,0x00,0x00,0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x50,0x00,0x00,0x00,0x4e,0x00,0x00,0x00,0x4f,0x00,0x00,0x00, +0x89,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x55,0x00,0x00,0x00, +0x50,0x00,0x00,0x00,0x54,0x00,0x00,0x00,0x86,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x59,0x00,0x00,0x00,0x50,0x00,0x00,0x00, +0x58,0x00,0x00,0x00,0x89,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x5d,0x00,0x00,0x00,0x4e,0x00,0x00,0x00,0x4f,0x00,0x00,0x00, +0x89,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x64,0x00,0x00,0x00, +0x5d,0x00,0x00,0x00,0x63,0x00,0x00,0x00,0x86,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x68,0x00,0x00,0x00,0x5d,0x00,0x00,0x00, +0x67,0x00,0x00,0x00,0x89,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x6e,0x00,0x00,0x00,0x4e,0x00,0x00,0x00,0x6d,0x00,0x00,0x00, +0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x73,0x00,0x00,0x00, +0x4e,0x00,0x00,0x00,0x72,0x00,0x00,0x00,0x89,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x78,0x00,0x00,0x00,0x4e,0x00,0x00,0x00, +0x77,0x00,0x00,0x00,0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x7d,0x00,0x00,0x00,0x4e,0x00,0x00,0x00,0x7c,0x00,0x00,0x00, +0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00,0x81,0x00,0x00,0x00, +0x12,0x00,0x00,0x00,0x80,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x82,0x00,0x00,0x00,0x81,0x00,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x83,0x00,0x00,0x00, +0x47,0x00,0x00,0x00,0x82,0x00,0x00,0x00,0x41,0x00,0x05,0x00, +0x15,0x00,0x00,0x00,0x86,0x00,0x00,0x00,0x12,0x00,0x00,0x00, +0x85,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x87,0x00,0x00,0x00,0x86,0x00,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x89,0x00,0x00,0x00,0x47,0x00,0x00,0x00, +0x39,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x8c,0x00,0x00,0x00,0x89,0x00,0x00,0x00,0x82,0x00,0x00,0x00, +0x0c,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0x8d,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0x26,0x00,0x00,0x00,0x87,0x00,0x00,0x00, +0x8c,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00, +0x91,0x00,0x00,0x00,0x12,0x00,0x00,0x00,0x90,0x00,0x00,0x00, +0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x92,0x00,0x00,0x00, +0x91,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x93,0x00,0x00,0x00,0x32,0x00,0x00,0x00,0x92,0x00,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x95,0x00,0x00,0x00, +0x42,0x00,0x00,0x00,0x37,0x00,0x00,0x00,0x41,0x00,0x05,0x00, +0x15,0x00,0x00,0x00,0x97,0x00,0x00,0x00,0x12,0x00,0x00,0x00, +0x96,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x98,0x00,0x00,0x00,0x97,0x00,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x99,0x00,0x00,0x00,0x95,0x00,0x00,0x00, +0x98,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x9a,0x00,0x00,0x00,0x93,0x00,0x00,0x00,0x99,0x00,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x9c,0x00,0x00,0x00, +0x9a,0x00,0x00,0x00,0x83,0x00,0x00,0x00,0x86,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x9d,0x00,0x00,0x00,0x9c,0x00,0x00,0x00, +0x0c,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00, +0xa2,0x00,0x00,0x00,0x12,0x00,0x00,0x00,0xa1,0x00,0x00,0x00, +0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0xa3,0x00,0x00,0x00, +0xa2,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xa4,0x00,0x00,0x00,0x0f,0x00,0x00,0x00,0xa3,0x00,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xa7,0x00,0x00,0x00, +0x4a,0x00,0x00,0x00,0xa6,0x00,0x00,0x00,0x41,0x00,0x05,0x00, +0x15,0x00,0x00,0x00,0xa9,0x00,0x00,0x00,0x12,0x00,0x00,0x00, +0xa8,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0xaa,0x00,0x00,0x00,0xa9,0x00,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xab,0x00,0x00,0x00,0xa7,0x00,0x00,0x00, +0xaa,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xac,0x00,0x00,0x00,0xa4,0x00,0x00,0x00,0xab,0x00,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xae,0x00,0x00,0x00, +0xac,0x00,0x00,0x00,0x83,0x00,0x00,0x00,0x86,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xaf,0x00,0x00,0x00,0xae,0x00,0x00,0x00, +0x39,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0xb1,0x00,0x00,0x00, +0xf8,0x00,0x02,0x00,0xb1,0x00,0x00,0x00,0xf5,0x00,0x07,0x00, +0x06,0x00,0x00,0x00,0xb8,0x02,0x00,0x00,0x3e,0x00,0x00,0x00, +0x05,0x00,0x00,0x00,0xd0,0x00,0x00,0x00,0xb2,0x00,0x00,0x00, +0xb0,0x00,0x05,0x00,0xc1,0x00,0x00,0x00,0xc2,0x00,0x00,0x00, +0xb8,0x02,0x00,0x00,0xc0,0x00,0x00,0x00,0xf6,0x00,0x04,0x00, +0xb3,0x00,0x00,0x00,0xb2,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0xfa,0x00,0x04,0x00,0xc2,0x00,0x00,0x00,0xb2,0x00,0x00,0x00, +0xb3,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0xb2,0x00,0x00,0x00, +0x41,0x00,0x05,0x00,0xcc,0x00,0x00,0x00,0xcd,0x00,0x00,0x00, +0xc9,0x00,0x00,0x00,0xb8,0x02,0x00,0x00,0x3e,0x00,0x03,0x00, +0xcd,0x00,0x00,0x00,0xcb,0x00,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xd0,0x00,0x00,0x00,0xb8,0x02,0x00,0x00, +0xcf,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0xb1,0x00,0x00,0x00, +0xf8,0x00,0x02,0x00,0xb3,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, +0xd3,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0xd3,0x00,0x00,0x00, +0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0xd1,0x02,0x00,0x00, +0xaf,0x00,0x00,0x00,0xb3,0x00,0x00,0x00,0x86,0x01,0x00,0x00, +0xd6,0x00,0x00,0x00,0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00, +0xcd,0x02,0x00,0x00,0x9d,0x00,0x00,0x00,0xb3,0x00,0x00,0x00, +0x83,0x01,0x00,0x00,0xd6,0x00,0x00,0x00,0xf5,0x00,0x07,0x00, +0x06,0x00,0x00,0x00,0xb9,0x02,0x00,0x00,0x83,0x00,0x00,0x00, +0xb3,0x00,0x00,0x00,0x31,0x02,0x00,0x00,0xd6,0x00,0x00,0x00, +0xb0,0x00,0x05,0x00,0xc1,0x00,0x00,0x00,0xda,0x00,0x00,0x00, +0xb9,0x02,0x00,0x00,0x8d,0x00,0x00,0x00,0xf6,0x00,0x04,0x00, +0xd5,0x00,0x00,0x00,0xd6,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0xfa,0x00,0x04,0x00,0xda,0x00,0x00,0x00,0xd4,0x00,0x00,0x00, +0xd5,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0xd4,0x00,0x00,0x00, +0xf9,0x00,0x02,0x00,0xdc,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, +0xdc,0x00,0x00,0x00,0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00, +0xc9,0x02,0x00,0x00,0x3e,0x00,0x00,0x00,0xd4,0x00,0x00,0x00, +0x38,0x01,0x00,0x00,0xdd,0x00,0x00,0x00,0xb0,0x00,0x05,0x00, +0xc1,0x00,0x00,0x00,0xe2,0x00,0x00,0x00,0xc9,0x02,0x00,0x00, +0x37,0x00,0x00,0x00,0xf6,0x00,0x04,0x00,0xde,0x00,0x00,0x00, +0xdd,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, +0xe2,0x00,0x00,0x00,0xdd,0x00,0x00,0x00,0xde,0x00,0x00,0x00, +0xf8,0x00,0x02,0x00,0xdd,0x00,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xe7,0x00,0x00,0x00,0x73,0x00,0x00,0x00, +0xc9,0x02,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xea,0x00,0x00,0x00,0xe7,0x00,0x00,0x00,0x98,0x00,0x00,0x00, +0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xeb,0x00,0x00,0x00, +0xea,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xec,0x00,0x00,0x00,0xcd,0x02,0x00,0x00, +0xeb,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xee,0x00,0x00,0x00,0xec,0x00,0x00,0x00,0x6e,0x00,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xf4,0x00,0x00,0x00, +0xe7,0x00,0x00,0x00,0xf3,0x00,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xf6,0x00,0x00,0x00,0xf4,0x00,0x00,0x00, +0x6e,0x00,0x00,0x00,0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xfa,0x00,0x00,0x00,0xee,0x00,0x00,0x00,0xf9,0x00,0x00,0x00, +0xc7,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xfe,0x00,0x00,0x00, +0xee,0x00,0x00,0x00,0xfd,0x00,0x00,0x00,0x41,0x00,0x07,0x00, +0x09,0x01,0x00,0x00,0x0a,0x01,0x00,0x00,0x07,0x01,0x00,0x00, +0x34,0x00,0x00,0x00,0xfa,0x00,0x00,0x00,0x34,0x00,0x00,0x00, +0x3d,0x00,0x04,0x00,0x00,0x01,0x00,0x00,0x0b,0x01,0x00,0x00, +0x0a,0x01,0x00,0x00,0x73,0x00,0x04,0x00,0xc3,0x00,0x00,0x00, +0x0c,0x01,0x00,0x00,0x0b,0x01,0x00,0x00,0x41,0x00,0x08,0x00, +0x10,0x01,0x00,0x00,0x11,0x01,0x00,0x00,0x07,0x01,0x00,0x00, +0x34,0x00,0x00,0x00,0xfa,0x00,0x00,0x00,0xcf,0x00,0x00,0x00, +0xfe,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x01,0x01,0x00,0x00, +0x12,0x01,0x00,0x00,0x11,0x01,0x00,0x00,0x71,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x13,0x01,0x00,0x00,0x12,0x01,0x00,0x00, +0xc7,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x18,0x01,0x00,0x00, +0x13,0x01,0x00,0x00,0xfd,0x00,0x00,0x00,0x70,0x00,0x04,0x00, +0xc3,0x00,0x00,0x00,0x19,0x01,0x00,0x00,0x18,0x01,0x00,0x00, +0xc2,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x1b,0x01,0x00,0x00, +0x13,0x01,0x00,0x00,0xa8,0x00,0x00,0x00,0x70,0x00,0x04,0x00, +0xc3,0x00,0x00,0x00,0x1c,0x01,0x00,0x00,0x1b,0x01,0x00,0x00, +0x50,0x00,0x05,0x00,0x14,0x01,0x00,0x00,0x1d,0x01,0x00,0x00, +0x19,0x01,0x00,0x00,0x1c,0x01,0x00,0x00,0x83,0x00,0x05,0x00, +0x14,0x01,0x00,0x00,0x20,0x01,0x00,0x00,0x1d,0x01,0x00,0x00, +0x14,0x03,0x00,0x00,0x8e,0x00,0x05,0x00,0x14,0x01,0x00,0x00, +0x22,0x01,0x00,0x00,0x20,0x01,0x00,0x00,0x0c,0x01,0x00,0x00, +0x51,0x00,0x05,0x00,0xc3,0x00,0x00,0x00,0x2a,0x01,0x00,0x00, +0x22,0x01,0x00,0x00,0x00,0x00,0x00,0x00,0x41,0x00,0x05,0x00, +0x2b,0x01,0x00,0x00,0x2c,0x01,0x00,0x00,0x27,0x01,0x00,0x00, +0xf6,0x00,0x00,0x00,0x3e,0x00,0x03,0x00,0x2c,0x01,0x00,0x00, +0x2a,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x2e,0x01,0x00,0x00,0xf6,0x00,0x00,0x00,0xf9,0x00,0x00,0x00, +0x51,0x00,0x05,0x00,0xc3,0x00,0x00,0x00,0x30,0x01,0x00,0x00, +0x22,0x01,0x00,0x00,0x01,0x00,0x00,0x00,0x41,0x00,0x05,0x00, +0x2b,0x01,0x00,0x00,0x31,0x01,0x00,0x00,0x27,0x01,0x00,0x00, +0x2e,0x01,0x00,0x00,0x3e,0x00,0x03,0x00,0x31,0x01,0x00,0x00, +0x30,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x38,0x01,0x00,0x00,0xc9,0x02,0x00,0x00,0x36,0x01,0x00,0x00, +0xf9,0x00,0x02,0x00,0xdc,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, +0xde,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0x3a,0x01,0x00,0x00, +0xf8,0x00,0x02,0x00,0x3a,0x01,0x00,0x00,0xf5,0x00,0x07,0x00, +0x06,0x00,0x00,0x00,0xca,0x02,0x00,0x00,0x3e,0x00,0x00,0x00, +0xde,0x00,0x00,0x00,0x7f,0x01,0x00,0x00,0x3d,0x01,0x00,0x00, +0xb0,0x00,0x05,0x00,0xc1,0x00,0x00,0x00,0x40,0x01,0x00,0x00, +0xca,0x02,0x00,0x00,0xa6,0x00,0x00,0x00,0xf6,0x00,0x04,0x00, +0x3c,0x01,0x00,0x00,0x3d,0x01,0x00,0x00,0x01,0x00,0x00,0x00, +0xfa,0x00,0x04,0x00,0x40,0x01,0x00,0x00,0x3b,0x01,0x00,0x00, +0x3c,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0x3b,0x01,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x44,0x01,0x00,0x00, +0xa7,0x00,0x00,0x00,0x7d,0x00,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x46,0x01,0x00,0x00,0x44,0x01,0x00,0x00, +0xca,0x02,0x00,0x00,0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00, +0x47,0x01,0x00,0x00,0x12,0x00,0x00,0x00,0xcf,0x00,0x00,0x00, +0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x48,0x01,0x00,0x00, +0x47,0x01,0x00,0x00,0xb0,0x00,0x05,0x00,0xc1,0x00,0x00,0x00, +0x49,0x01,0x00,0x00,0x46,0x01,0x00,0x00,0x48,0x01,0x00,0x00, +0xf7,0x00,0x03,0x00,0x4b,0x01,0x00,0x00,0x00,0x00,0x00,0x00, +0xfa,0x00,0x04,0x00,0x49,0x01,0x00,0x00,0x4a,0x01,0x00,0x00, +0x4b,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0x4a,0x01,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x4e,0x01,0x00,0x00, +0xb9,0x02,0x00,0x00,0x78,0x00,0x00,0x00,0xb0,0x00,0x05,0x00, +0xc1,0x00,0x00,0x00,0x50,0x01,0x00,0x00,0x4e,0x01,0x00,0x00, +0x8d,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0x4b,0x01,0x00,0x00, +0xf8,0x00,0x02,0x00,0x4b,0x01,0x00,0x00,0xf5,0x00,0x07,0x00, +0xc1,0x00,0x00,0x00,0x51,0x01,0x00,0x00,0x49,0x01,0x00,0x00, +0x3b,0x01,0x00,0x00,0x50,0x01,0x00,0x00,0x4a,0x01,0x00,0x00, +0xf7,0x00,0x03,0x00,0x53,0x01,0x00,0x00,0x00,0x00,0x00,0x00, +0xfa,0x00,0x04,0x00,0x51,0x01,0x00,0x00,0x52,0x01,0x00,0x00, +0x72,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0x52,0x01,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x5b,0x01,0x00,0x00, +0x7d,0x00,0x00,0x00,0xca,0x02,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x5d,0x01,0x00,0x00,0x5b,0x01,0x00,0x00, +0x5c,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x5f,0x01,0x00,0x00,0x5d,0x01,0x00,0x00,0x78,0x00,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x6a,0x01,0x00,0x00, +0x5b,0x01,0x00,0x00,0xaa,0x00,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x6b,0x01,0x00,0x00,0xd1,0x02,0x00,0x00, +0x6a,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x6d,0x01,0x00,0x00,0x6b,0x01,0x00,0x00,0x78,0x00,0x00,0x00, +0x41,0x00,0x06,0x00,0x6e,0x01,0x00,0x00,0x6f,0x01,0x00,0x00, +0x63,0x01,0x00,0x00,0x34,0x00,0x00,0x00,0x6d,0x01,0x00,0x00, +0x3d,0x00,0x04,0x00,0xc3,0x00,0x00,0x00,0x70,0x01,0x00,0x00, +0x6f,0x01,0x00,0x00,0x41,0x00,0x05,0x00,0x2b,0x01,0x00,0x00, +0x71,0x01,0x00,0x00,0x58,0x01,0x00,0x00,0x5f,0x01,0x00,0x00, +0x3e,0x00,0x03,0x00,0x71,0x01,0x00,0x00,0x70,0x01,0x00,0x00, +0xf9,0x00,0x02,0x00,0x53,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, +0x72,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x75,0x01,0x00,0x00,0x7d,0x00,0x00,0x00,0xca,0x02,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x77,0x01,0x00,0x00, +0x75,0x01,0x00,0x00,0x76,0x01,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x79,0x01,0x00,0x00,0x77,0x01,0x00,0x00, +0x78,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x2b,0x01,0x00,0x00, +0x7a,0x01,0x00,0x00,0x58,0x01,0x00,0x00,0x79,0x01,0x00,0x00, +0x3e,0x00,0x03,0x00,0x7a,0x01,0x00,0x00,0xcb,0x00,0x00,0x00, +0xf9,0x00,0x02,0x00,0x53,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, +0x53,0x01,0x00,0x00,0xf9,0x00,0x02,0x00,0x3d,0x01,0x00,0x00, +0xf8,0x00,0x02,0x00,0x3d,0x01,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x7f,0x01,0x00,0x00,0xca,0x02,0x00,0x00, +0x7d,0x01,0x00,0x00,0xf9,0x00,0x02,0x00,0x3a,0x01,0x00,0x00, +0xf8,0x00,0x02,0x00,0x3c,0x01,0x00,0x00,0xe0,0x00,0x04,0x00, +0x0c,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x80,0x01,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x83,0x01,0x00,0x00, +0xcd,0x02,0x00,0x00,0x81,0x01,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x86,0x01,0x00,0x00,0xd1,0x02,0x00,0x00, +0x84,0x01,0x00,0x00,0xf9,0x00,0x02,0x00,0x88,0x01,0x00,0x00, +0xf8,0x00,0x02,0x00,0x88,0x01,0x00,0x00,0xf5,0x00,0x07,0x00, +0x06,0x00,0x00,0x00,0xd3,0x02,0x00,0x00,0x3e,0x00,0x00,0x00, +0x3c,0x01,0x00,0x00,0x2f,0x02,0x00,0x00,0x8b,0x01,0x00,0x00, +0xb0,0x00,0x05,0x00,0xc1,0x00,0x00,0x00,0x8e,0x01,0x00,0x00, +0xd3,0x02,0x00,0x00,0x6c,0x00,0x00,0x00,0xf6,0x00,0x04,0x00, +0x8a,0x01,0x00,0x00,0x8b,0x01,0x00,0x00,0x00,0x00,0x00,0x00, +0xfa,0x00,0x04,0x00,0x8e,0x01,0x00,0x00,0x89,0x01,0x00,0x00, +0x8a,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0x89,0x01,0x00,0x00, +0xf9,0x00,0x02,0x00,0x90,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, +0x90,0x01,0x00,0x00,0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00, +0xd7,0x02,0x00,0x00,0x3e,0x00,0x00,0x00,0x89,0x01,0x00,0x00, +0xbb,0x01,0x00,0x00,0x93,0x01,0x00,0x00,0xb0,0x00,0x05,0x00, +0xc1,0x00,0x00,0x00,0x96,0x01,0x00,0x00,0xd7,0x02,0x00,0x00, +0x60,0x00,0x00,0x00,0xf6,0x00,0x04,0x00,0x92,0x01,0x00,0x00, +0x93,0x01,0x00,0x00,0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, +0x96,0x01,0x00,0x00,0x91,0x01,0x00,0x00,0x92,0x01,0x00,0x00, +0xf8,0x00,0x02,0x00,0x91,0x01,0x00,0x00,0xf9,0x00,0x02,0x00, +0x98,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0x98,0x01,0x00,0x00, +0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0xe9,0x02,0x00,0x00, +0x3e,0x00,0x00,0x00,0x91,0x01,0x00,0x00,0xb9,0x01,0x00,0x00, +0x99,0x01,0x00,0x00,0xb0,0x00,0x05,0x00,0xc1,0x00,0x00,0x00, +0x9e,0x01,0x00,0x00,0xe9,0x02,0x00,0x00,0x62,0x00,0x00,0x00, +0xf6,0x00,0x04,0x00,0x9a,0x01,0x00,0x00,0x99,0x01,0x00,0x00, +0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0x9e,0x01,0x00,0x00, +0x99,0x01,0x00,0x00,0x9a,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, +0x99,0x01,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xa4,0x01,0x00,0x00,0xd7,0x02,0x00,0x00,0x62,0x00,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xa6,0x01,0x00,0x00, +0xa4,0x01,0x00,0x00,0xe9,0x02,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xa8,0x01,0x00,0x00,0x55,0x00,0x00,0x00, +0x53,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xaa,0x01,0x00,0x00,0xd7,0x02,0x00,0x00,0x61,0x00,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xab,0x01,0x00,0x00, +0xa8,0x01,0x00,0x00,0xaa,0x01,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xad,0x01,0x00,0x00,0x64,0x00,0x00,0x00, +0x62,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xae,0x01,0x00,0x00,0xab,0x01,0x00,0x00,0xad,0x01,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xb0,0x01,0x00,0x00, +0xae,0x01,0x00,0x00,0xe9,0x02,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xb2,0x01,0x00,0x00,0xb0,0x01,0x00,0x00, +0xb1,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xb4,0x01,0x00,0x00,0xb2,0x01,0x00,0x00,0xd3,0x02,0x00,0x00, +0x41,0x00,0x05,0x00,0x2b,0x01,0x00,0x00,0xb5,0x01,0x00,0x00, +0x27,0x01,0x00,0x00,0xb4,0x01,0x00,0x00,0x3d,0x00,0x04,0x00, +0xc3,0x00,0x00,0x00,0xb6,0x01,0x00,0x00,0xb5,0x01,0x00,0x00, +0x41,0x00,0x05,0x00,0xcc,0x00,0x00,0x00,0xb7,0x01,0x00,0x00, +0xa2,0x01,0x00,0x00,0xa6,0x01,0x00,0x00,0x3e,0x00,0x03,0x00, +0xb7,0x01,0x00,0x00,0xb6,0x01,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xb9,0x01,0x00,0x00,0xe9,0x02,0x00,0x00, +0xcf,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0x98,0x01,0x00,0x00, +0xf8,0x00,0x02,0x00,0x9a,0x01,0x00,0x00,0xf9,0x00,0x02,0x00, +0x93,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0x93,0x01,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xbb,0x01,0x00,0x00, +0xd7,0x02,0x00,0x00,0xcf,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, +0x90,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0x92,0x01,0x00,0x00, +0xf9,0x00,0x02,0x00,0xbd,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, +0xbd,0x01,0x00,0x00,0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00, +0xd8,0x02,0x00,0x00,0x3e,0x00,0x00,0x00,0x92,0x01,0x00,0x00, +0xe9,0x01,0x00,0x00,0xc0,0x01,0x00,0x00,0xb0,0x00,0x05,0x00, +0xc1,0x00,0x00,0x00,0xc3,0x01,0x00,0x00,0xd8,0x02,0x00,0x00, +0xbe,0x00,0x00,0x00,0xf6,0x00,0x04,0x00,0xbf,0x01,0x00,0x00, +0xc0,0x01,0x00,0x00,0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, +0xc3,0x01,0x00,0x00,0xbe,0x01,0x00,0x00,0xbf,0x01,0x00,0x00, +0xf8,0x00,0x02,0x00,0xbe,0x01,0x00,0x00,0xf9,0x00,0x02,0x00, +0xc5,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0xc5,0x01,0x00,0x00, +0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0xe6,0x02,0x00,0x00, +0x3e,0x00,0x00,0x00,0xbe,0x01,0x00,0x00,0xe7,0x01,0x00,0x00, +0xc6,0x01,0x00,0x00,0xb0,0x00,0x05,0x00,0xc1,0x00,0x00,0x00, +0xcb,0x01,0x00,0x00,0xe6,0x02,0x00,0x00,0xbb,0x00,0x00,0x00, +0xf6,0x00,0x04,0x00,0xc7,0x01,0x00,0x00,0xc6,0x01,0x00,0x00, +0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0xcb,0x01,0x00,0x00, +0xc6,0x01,0x00,0x00,0xc7,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, +0xc6,0x01,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xd1,0x01,0x00,0x00,0xd8,0x02,0x00,0x00,0xbb,0x00,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xd3,0x01,0x00,0x00, +0xd1,0x01,0x00,0x00,0xe6,0x02,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xd5,0x01,0x00,0x00,0x59,0x00,0x00,0x00, +0xb8,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xd8,0x01,0x00,0x00,0xd8,0x02,0x00,0x00,0xd7,0x01,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xd9,0x01,0x00,0x00, +0xd5,0x01,0x00,0x00,0xd8,0x01,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xdb,0x01,0x00,0x00,0x68,0x00,0x00,0x00, +0xbb,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xdc,0x01,0x00,0x00,0xd9,0x01,0x00,0x00,0xdb,0x01,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xde,0x01,0x00,0x00, +0xdc,0x01,0x00,0x00,0xe6,0x02,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xe0,0x01,0x00,0x00,0xde,0x01,0x00,0x00, +0xdf,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xe2,0x01,0x00,0x00,0xe0,0x01,0x00,0x00,0xd3,0x02,0x00,0x00, +0x41,0x00,0x05,0x00,0x2b,0x01,0x00,0x00,0xe3,0x01,0x00,0x00, +0x58,0x01,0x00,0x00,0xe2,0x01,0x00,0x00,0x3d,0x00,0x04,0x00, +0xc3,0x00,0x00,0x00,0xe4,0x01,0x00,0x00,0xe3,0x01,0x00,0x00, +0x41,0x00,0x05,0x00,0xcc,0x00,0x00,0x00,0xe5,0x01,0x00,0x00, +0xcf,0x01,0x00,0x00,0xd3,0x01,0x00,0x00,0x3e,0x00,0x03,0x00, +0xe5,0x01,0x00,0x00,0xe4,0x01,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xe7,0x01,0x00,0x00,0xe6,0x02,0x00,0x00, +0xcf,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0xc5,0x01,0x00,0x00, +0xf8,0x00,0x02,0x00,0xc7,0x01,0x00,0x00,0xf9,0x00,0x02,0x00, +0xc0,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0xc0,0x01,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xe9,0x01,0x00,0x00, +0xd8,0x02,0x00,0x00,0xcf,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, +0xbd,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0xbf,0x01,0x00,0x00, +0xf9,0x00,0x02,0x00,0xeb,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, +0xeb,0x01,0x00,0x00,0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00, +0xd9,0x02,0x00,0x00,0x3e,0x00,0x00,0x00,0xbf,0x01,0x00,0x00, +0x2d,0x02,0x00,0x00,0xee,0x01,0x00,0x00,0xb0,0x00,0x05,0x00, +0xc1,0x00,0x00,0x00,0xf1,0x01,0x00,0x00,0xd9,0x02,0x00,0x00, +0xbe,0x00,0x00,0x00,0xf6,0x00,0x04,0x00,0xed,0x01,0x00,0x00, +0xee,0x01,0x00,0x00,0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, +0xf1,0x01,0x00,0x00,0xec,0x01,0x00,0x00,0xed,0x01,0x00,0x00, +0xf8,0x00,0x02,0x00,0xec,0x01,0x00,0x00,0xf9,0x00,0x02,0x00, +0xf3,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0xf3,0x01,0x00,0x00, +0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0xdd,0x02,0x00,0x00, +0x3e,0x00,0x00,0x00,0xec,0x01,0x00,0x00,0x2b,0x02,0x00,0x00, +0xf6,0x01,0x00,0x00,0xb0,0x00,0x05,0x00,0xc1,0x00,0x00,0x00, +0xf9,0x01,0x00,0x00,0xdd,0x02,0x00,0x00,0x60,0x00,0x00,0x00, +0xf6,0x00,0x04,0x00,0xf5,0x01,0x00,0x00,0xf6,0x01,0x00,0x00, +0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0xf9,0x01,0x00,0x00, +0xf4,0x01,0x00,0x00,0xf5,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, +0xf4,0x01,0x00,0x00,0xf9,0x00,0x02,0x00,0xfb,0x01,0x00,0x00, +0xf8,0x00,0x02,0x00,0xfb,0x01,0x00,0x00,0xf5,0x00,0x07,0x00, +0x06,0x00,0x00,0x00,0xdf,0x02,0x00,0x00,0x3e,0x00,0x00,0x00, +0xf4,0x01,0x00,0x00,0x29,0x02,0x00,0x00,0xfe,0x01,0x00,0x00, +0xb0,0x00,0x05,0x00,0xc1,0x00,0x00,0x00,0x01,0x02,0x00,0x00, +0xdf,0x02,0x00,0x00,0xbb,0x00,0x00,0x00,0xf6,0x00,0x04,0x00, +0xfd,0x01,0x00,0x00,0xfe,0x01,0x00,0x00,0x01,0x00,0x00,0x00, +0xfa,0x00,0x04,0x00,0x01,0x02,0x00,0x00,0xfc,0x01,0x00,0x00, +0xfd,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0xfc,0x01,0x00,0x00, +0xf9,0x00,0x02,0x00,0x03,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, +0x03,0x02,0x00,0x00,0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00, +0xe1,0x02,0x00,0x00,0x3e,0x00,0x00,0x00,0xfc,0x01,0x00,0x00, +0x27,0x02,0x00,0x00,0x04,0x02,0x00,0x00,0xb0,0x00,0x05,0x00, +0xc1,0x00,0x00,0x00,0x09,0x02,0x00,0x00,0xe1,0x02,0x00,0x00, +0x62,0x00,0x00,0x00,0xf6,0x00,0x04,0x00,0x05,0x02,0x00,0x00, +0x04,0x02,0x00,0x00,0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, +0x09,0x02,0x00,0x00,0x04,0x02,0x00,0x00,0x05,0x02,0x00,0x00, +0xf8,0x00,0x02,0x00,0x04,0x02,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x0b,0x02,0x00,0x00,0xd9,0x02,0x00,0x00, +0xbb,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x0d,0x02,0x00,0x00,0x0b,0x02,0x00,0x00,0xdf,0x02,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x0f,0x02,0x00,0x00, +0x0d,0x02,0x00,0x00,0x0e,0x02,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x11,0x02,0x00,0x00,0xdd,0x02,0x00,0x00, +0x62,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x12,0x02,0x00,0x00,0x0f,0x02,0x00,0x00,0x11,0x02,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x14,0x02,0x00,0x00, +0x12,0x02,0x00,0x00,0xe1,0x02,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x18,0x02,0x00,0x00,0x11,0x02,0x00,0x00, +0xe1,0x02,0x00,0x00,0x41,0x00,0x05,0x00,0xcc,0x00,0x00,0x00, +0x19,0x02,0x00,0x00,0xa2,0x01,0x00,0x00,0x18,0x02,0x00,0x00, +0x3d,0x00,0x04,0x00,0xc3,0x00,0x00,0x00,0x1a,0x02,0x00,0x00, +0x19,0x02,0x00,0x00,0x41,0x00,0x05,0x00,0xcc,0x00,0x00,0x00, +0x1f,0x02,0x00,0x00,0xcf,0x01,0x00,0x00,0x0d,0x02,0x00,0x00, +0x3d,0x00,0x04,0x00,0xc3,0x00,0x00,0x00,0x20,0x02,0x00,0x00, +0x1f,0x02,0x00,0x00,0x41,0x00,0x05,0x00,0xcc,0x00,0x00,0x00, +0x22,0x02,0x00,0x00,0xc9,0x00,0x00,0x00,0x14,0x02,0x00,0x00, +0x3d,0x00,0x04,0x00,0xc3,0x00,0x00,0x00,0x23,0x02,0x00,0x00, +0x22,0x02,0x00,0x00,0x0c,0x00,0x08,0x00,0xc3,0x00,0x00,0x00, +0x24,0x02,0x00,0x00,0x01,0x00,0x00,0x00,0x32,0x00,0x00,0x00, +0x1a,0x02,0x00,0x00,0x20,0x02,0x00,0x00,0x23,0x02,0x00,0x00, +0x3e,0x00,0x03,0x00,0x22,0x02,0x00,0x00,0x24,0x02,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x27,0x02,0x00,0x00, +0xe1,0x02,0x00,0x00,0xcf,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, +0x03,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x05,0x02,0x00,0x00, +0xf9,0x00,0x02,0x00,0xfe,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, +0xfe,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x29,0x02,0x00,0x00,0xdf,0x02,0x00,0x00,0xcf,0x00,0x00,0x00, +0xf9,0x00,0x02,0x00,0xfb,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, +0xfd,0x01,0x00,0x00,0xf9,0x00,0x02,0x00,0xf6,0x01,0x00,0x00, +0xf8,0x00,0x02,0x00,0xf6,0x01,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x2b,0x02,0x00,0x00,0xdd,0x02,0x00,0x00, +0xcf,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0xf3,0x01,0x00,0x00, +0xf8,0x00,0x02,0x00,0xf5,0x01,0x00,0x00,0xf9,0x00,0x02,0x00, +0xee,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0xee,0x01,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x2d,0x02,0x00,0x00, +0xd9,0x02,0x00,0x00,0xcf,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, +0xeb,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0xed,0x01,0x00,0x00, +0xf9,0x00,0x02,0x00,0x8b,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, +0x8b,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x2f,0x02,0x00,0x00,0xd3,0x02,0x00,0x00,0xcf,0x00,0x00,0x00, +0xf9,0x00,0x02,0x00,0x88,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, +0x8a,0x01,0x00,0x00,0xe0,0x00,0x04,0x00,0x0c,0x00,0x00,0x00, +0x0c,0x00,0x00,0x00,0x80,0x01,0x00,0x00,0xf9,0x00,0x02,0x00, +0xd6,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0xd6,0x00,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x31,0x02,0x00,0x00, +0xb9,0x02,0x00,0x00,0x6c,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, +0xd3,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0xd5,0x00,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x36,0x02,0x00,0x00, +0x55,0x00,0x00,0x00,0x53,0x00,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x37,0x02,0x00,0x00,0x95,0x00,0x00,0x00, +0x36,0x02,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x3c,0x02,0x00,0x00,0x59,0x00,0x00,0x00,0xb8,0x00,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x3d,0x02,0x00,0x00, +0xa7,0x00,0x00,0x00,0x3c,0x02,0x00,0x00,0x41,0x00,0x05,0x00, +0x15,0x00,0x00,0x00,0x42,0x02,0x00,0x00,0x12,0x00,0x00,0x00, +0x41,0x02,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x43,0x02,0x00,0x00,0x42,0x02,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x44,0x02,0x00,0x00,0x0f,0x00,0x00,0x00, +0x43,0x02,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x48,0x02,0x00,0x00,0x47,0x00,0x00,0x00,0x43,0x02,0x00,0x00, +0x41,0x00,0x05,0x00,0x0d,0x00,0x00,0x00,0x4a,0x02,0x00,0x00, +0x49,0x02,0x00,0x00,0x0c,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x4b,0x02,0x00,0x00,0x4a,0x02,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x4c,0x02,0x00,0x00, +0x48,0x02,0x00,0x00,0x4b,0x02,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x4d,0x02,0x00,0x00,0x44,0x02,0x00,0x00, +0x4c,0x02,0x00,0x00,0xf9,0x00,0x02,0x00,0x4f,0x02,0x00,0x00, +0xf8,0x00,0x02,0x00,0x4f,0x02,0x00,0x00,0xf5,0x00,0x07,0x00, +0x06,0x00,0x00,0x00,0xba,0x02,0x00,0x00,0x3e,0x00,0x00,0x00, +0xd5,0x00,0x00,0x00,0xb5,0x02,0x00,0x00,0x52,0x02,0x00,0x00, +0xb0,0x00,0x05,0x00,0xc1,0x00,0x00,0x00,0x55,0x02,0x00,0x00, +0xba,0x02,0x00,0x00,0xbe,0x00,0x00,0x00,0xf6,0x00,0x04,0x00, +0x51,0x02,0x00,0x00,0x52,0x02,0x00,0x00,0x01,0x00,0x00,0x00, +0xfa,0x00,0x04,0x00,0x55,0x02,0x00,0x00,0x50,0x02,0x00,0x00, +0x51,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x50,0x02,0x00,0x00, +0xf9,0x00,0x02,0x00,0x57,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, +0x57,0x02,0x00,0x00,0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00, +0xbb,0x02,0x00,0x00,0x3e,0x00,0x00,0x00,0x50,0x02,0x00,0x00, +0xb3,0x02,0x00,0x00,0x5a,0x02,0x00,0x00,0xb0,0x00,0x05,0x00, +0xc1,0x00,0x00,0x00,0x5d,0x02,0x00,0x00,0xbb,0x02,0x00,0x00, +0x60,0x00,0x00,0x00,0xf6,0x00,0x04,0x00,0x59,0x02,0x00,0x00, +0x5a,0x02,0x00,0x00,0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, +0x5d,0x02,0x00,0x00,0x58,0x02,0x00,0x00,0x59,0x02,0x00,0x00, +0xf8,0x00,0x02,0x00,0x58,0x02,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x61,0x02,0x00,0x00,0xbb,0x02,0x00,0x00, +0x61,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x62,0x02,0x00,0x00,0x37,0x02,0x00,0x00,0x61,0x02,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x64,0x02,0x00,0x00, +0x64,0x00,0x00,0x00,0x62,0x00,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x65,0x02,0x00,0x00,0x62,0x02,0x00,0x00, +0x64,0x02,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x69,0x02,0x00,0x00,0xba,0x02,0x00,0x00,0xd7,0x01,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x6a,0x02,0x00,0x00, +0x3d,0x02,0x00,0x00,0x69,0x02,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x6c,0x02,0x00,0x00,0x68,0x00,0x00,0x00, +0xbb,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x6d,0x02,0x00,0x00,0x6a,0x02,0x00,0x00,0x6c,0x02,0x00,0x00, +0xf9,0x00,0x02,0x00,0x6f,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, +0x6f,0x02,0x00,0x00,0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00, +0xbd,0x02,0x00,0x00,0x3e,0x00,0x00,0x00,0x58,0x02,0x00,0x00, +0xb1,0x02,0x00,0x00,0x72,0x02,0x00,0x00,0xb0,0x00,0x05,0x00, +0xc1,0x00,0x00,0x00,0x75,0x02,0x00,0x00,0xbd,0x02,0x00,0x00, +0xbb,0x00,0x00,0x00,0xf6,0x00,0x04,0x00,0x71,0x02,0x00,0x00, +0x72,0x02,0x00,0x00,0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, +0x75,0x02,0x00,0x00,0x70,0x02,0x00,0x00,0x71,0x02,0x00,0x00, +0xf8,0x00,0x02,0x00,0x70,0x02,0x00,0x00,0xf9,0x00,0x02,0x00, +0x77,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x77,0x02,0x00,0x00, +0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0xbf,0x02,0x00,0x00, +0x3e,0x00,0x00,0x00,0x70,0x02,0x00,0x00,0xaf,0x02,0x00,0x00, +0x7a,0x02,0x00,0x00,0xb0,0x00,0x05,0x00,0xc1,0x00,0x00,0x00, +0x7d,0x02,0x00,0x00,0xbf,0x02,0x00,0x00,0x62,0x00,0x00,0x00, +0xf6,0x00,0x04,0x00,0x79,0x02,0x00,0x00,0x7a,0x02,0x00,0x00, +0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0x7d,0x02,0x00,0x00, +0x78,0x02,0x00,0x00,0x79,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, +0x78,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x80,0x02,0x00,0x00,0x65,0x02,0x00,0x00,0xbf,0x02,0x00,0x00, +0xb0,0x00,0x05,0x00,0xc1,0x00,0x00,0x00,0x83,0x02,0x00,0x00, +0x80,0x02,0x00,0x00,0x36,0x00,0x00,0x00,0xf7,0x00,0x03,0x00, +0x85,0x02,0x00,0x00,0x00,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, +0x83,0x02,0x00,0x00,0x84,0x02,0x00,0x00,0x85,0x02,0x00,0x00, +0xf8,0x00,0x02,0x00,0x84,0x02,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x88,0x02,0x00,0x00,0x6d,0x02,0x00,0x00, +0xbd,0x02,0x00,0x00,0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00, +0x89,0x02,0x00,0x00,0x12,0x00,0x00,0x00,0xcf,0x00,0x00,0x00, +0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x8a,0x02,0x00,0x00, +0x89,0x02,0x00,0x00,0xb0,0x00,0x05,0x00,0xc1,0x00,0x00,0x00, +0x8b,0x02,0x00,0x00,0x88,0x02,0x00,0x00,0x8a,0x02,0x00,0x00, +0xf9,0x00,0x02,0x00,0x85,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, +0x85,0x02,0x00,0x00,0xf5,0x00,0x07,0x00,0xc1,0x00,0x00,0x00, +0x8c,0x02,0x00,0x00,0x83,0x02,0x00,0x00,0x78,0x02,0x00,0x00, +0x8b,0x02,0x00,0x00,0x84,0x02,0x00,0x00,0xf7,0x00,0x03,0x00, +0x8e,0x02,0x00,0x00,0x00,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, +0x8c,0x02,0x00,0x00,0x8d,0x02,0x00,0x00,0x8e,0x02,0x00,0x00, +0xf8,0x00,0x02,0x00,0x8d,0x02,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x96,0x02,0x00,0x00,0x6d,0x02,0x00,0x00, +0xbd,0x02,0x00,0x00,0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00, +0x98,0x02,0x00,0x00,0x12,0x00,0x00,0x00,0x97,0x02,0x00,0x00, +0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x99,0x02,0x00,0x00, +0x98,0x02,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x9a,0x02,0x00,0x00,0x96,0x02,0x00,0x00,0x99,0x02,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x9b,0x02,0x00,0x00, +0x4d,0x02,0x00,0x00,0x9a,0x02,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x9d,0x02,0x00,0x00,0x9b,0x02,0x00,0x00, +0x65,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x9f,0x02,0x00,0x00,0x9d,0x02,0x00,0x00,0xbf,0x02,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xa1,0x02,0x00,0x00, +0xba,0x02,0x00,0x00,0xbb,0x00,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xa3,0x02,0x00,0x00,0xa1,0x02,0x00,0x00, +0xbd,0x02,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xa5,0x02,0x00,0x00,0xa3,0x02,0x00,0x00,0xa4,0x02,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xa7,0x02,0x00,0x00, +0xbb,0x02,0x00,0x00,0x62,0x00,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xa8,0x02,0x00,0x00,0xa5,0x02,0x00,0x00, +0xa7,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xaa,0x02,0x00,0x00,0xa8,0x02,0x00,0x00,0xbf,0x02,0x00,0x00, +0x41,0x00,0x05,0x00,0xcc,0x00,0x00,0x00,0xab,0x02,0x00,0x00, +0xc9,0x00,0x00,0x00,0xaa,0x02,0x00,0x00,0x3d,0x00,0x04,0x00, +0xc3,0x00,0x00,0x00,0xac,0x02,0x00,0x00,0xab,0x02,0x00,0x00, +0x41,0x00,0x06,0x00,0x6e,0x01,0x00,0x00,0xad,0x02,0x00,0x00, +0x92,0x02,0x00,0x00,0x34,0x00,0x00,0x00,0x9f,0x02,0x00,0x00, +0x3e,0x00,0x03,0x00,0xad,0x02,0x00,0x00,0xac,0x02,0x00,0x00, +0xf9,0x00,0x02,0x00,0x8e,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, +0x8e,0x02,0x00,0x00,0xf9,0x00,0x02,0x00,0x7a,0x02,0x00,0x00, +0xf8,0x00,0x02,0x00,0x7a,0x02,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xaf,0x02,0x00,0x00,0xbf,0x02,0x00,0x00, +0xcf,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0x77,0x02,0x00,0x00, +0xf8,0x00,0x02,0x00,0x79,0x02,0x00,0x00,0xf9,0x00,0x02,0x00, +0x72,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x72,0x02,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xb1,0x02,0x00,0x00, +0xbd,0x02,0x00,0x00,0xcf,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, +0x6f,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x71,0x02,0x00,0x00, +0xf9,0x00,0x02,0x00,0x5a,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, +0x5a,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xb3,0x02,0x00,0x00,0xbb,0x02,0x00,0x00,0xcf,0x00,0x00,0x00, +0xf9,0x00,0x02,0x00,0x57,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, +0x59,0x02,0x00,0x00,0xf9,0x00,0x02,0x00,0x52,0x02,0x00,0x00, +0xf8,0x00,0x02,0x00,0x52,0x02,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xb5,0x02,0x00,0x00,0xba,0x02,0x00,0x00, +0xcf,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0x4f,0x02,0x00,0x00, +0xf8,0x00,0x02,0x00,0x51,0x02,0x00,0x00,0xfd,0x00,0x01,0x00, +0x38,0x00,0x01,0x00, +}; +const uint64_t matmul_q4_0_f32_fp32_len = 10444; + +unsigned char matmul_q4_1_f32_data[] = { +0x03,0x02,0x23,0x07,0x00,0x05,0x01,0x00,0x0b,0x00,0x0d,0x00, +0xf6,0x02,0x00,0x00,0x00,0x00,0x00,0x00,0x11,0x00,0x02,0x00, +0x01,0x00,0x00,0x00,0x11,0x00,0x02,0x00,0x09,0x00,0x00,0x00, +0x11,0x00,0x02,0x00,0x51,0x11,0x00,0x00,0x11,0x00,0x02,0x00, +0x60,0x11,0x00,0x00,0x0b,0x00,0x06,0x00,0x01,0x00,0x00,0x00, +0x47,0x4c,0x53,0x4c,0x2e,0x73,0x74,0x64,0x2e,0x34,0x35,0x30, +0x00,0x00,0x00,0x00,0x0e,0x00,0x03,0x00,0x00,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0x0f,0x00,0x0f,0x00,0x05,0x00,0x00,0x00, +0x04,0x00,0x00,0x00,0x6d,0x61,0x69,0x6e,0x00,0x00,0x00,0x00, +0x0b,0x00,0x00,0x00,0x12,0x00,0x00,0x00,0x3d,0x00,0x00,0x00, +0x4c,0x00,0x00,0x00,0x07,0x01,0x00,0x00,0x2c,0x01,0x00,0x00, +0x5f,0x01,0x00,0x00,0x6a,0x01,0x00,0x00,0x55,0x02,0x00,0x00, +0x9e,0x02,0x00,0x00,0x10,0x00,0x06,0x00,0x04,0x00,0x00,0x00, +0x11,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x0b,0x00,0x00,0x00, +0x0b,0x00,0x00,0x00,0x1c,0x00,0x00,0x00,0x48,0x00,0x05,0x00, +0x10,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00, +0x00,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x04,0x00,0x00,0x00, +0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00,0x02,0x00,0x00,0x00, +0x23,0x00,0x00,0x00,0x08,0x00,0x00,0x00,0x48,0x00,0x05,0x00, +0x10,0x00,0x00,0x00,0x03,0x00,0x00,0x00,0x23,0x00,0x00,0x00, +0x0c,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00, +0x04,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x10,0x00,0x00,0x00, +0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00,0x05,0x00,0x00,0x00, +0x23,0x00,0x00,0x00,0x14,0x00,0x00,0x00,0x48,0x00,0x05,0x00, +0x10,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x23,0x00,0x00,0x00, +0x18,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00, +0x07,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x1c,0x00,0x00,0x00, +0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00,0x08,0x00,0x00,0x00, +0x23,0x00,0x00,0x00,0x20,0x00,0x00,0x00,0x48,0x00,0x05,0x00, +0x10,0x00,0x00,0x00,0x09,0x00,0x00,0x00,0x23,0x00,0x00,0x00, +0x24,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00, +0x0a,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x28,0x00,0x00,0x00, +0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00,0x0b,0x00,0x00,0x00, +0x23,0x00,0x00,0x00,0x2c,0x00,0x00,0x00,0x48,0x00,0x05,0x00, +0x10,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x23,0x00,0x00,0x00, +0x30,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00, +0x0d,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x34,0x00,0x00,0x00, +0x47,0x00,0x03,0x00,0x10,0x00,0x00,0x00,0x02,0x00,0x00,0x00, +0x47,0x00,0x04,0x00,0x37,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x3d,0x00,0x00,0x00, +0x0b,0x00,0x00,0x00,0x1a,0x00,0x00,0x00,0x47,0x00,0x04,0x00, +0x4c,0x00,0x00,0x00,0x0b,0x00,0x00,0x00,0x1b,0x00,0x00,0x00, +0x47,0x00,0x04,0x00,0x4f,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0x09,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x53,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x47,0x00,0x04,0x00, +0x60,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x06,0x00,0x00,0x00, +0x47,0x00,0x04,0x00,0x62,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0x07,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x6c,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0x03,0x00,0x00,0x00,0x47,0x00,0x04,0x00, +0xa6,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x02,0x00,0x00,0x00, +0x47,0x00,0x04,0x00,0xb8,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0x05,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0xbb,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0x08,0x00,0x00,0x00,0x47,0x00,0x04,0x00, +0x02,0x01,0x00,0x00,0x06,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0x48,0x00,0x05,0x00,0x03,0x01,0x00,0x00,0x00,0x00,0x00,0x00, +0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x48,0x00,0x05,0x00, +0x03,0x01,0x00,0x00,0x01,0x00,0x00,0x00,0x23,0x00,0x00,0x00, +0x02,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x03,0x01,0x00,0x00, +0x02,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x04,0x00,0x00,0x00, +0x47,0x00,0x04,0x00,0x04,0x01,0x00,0x00,0x06,0x00,0x00,0x00, +0x14,0x00,0x00,0x00,0x48,0x00,0x04,0x00,0x05,0x01,0x00,0x00, +0x00,0x00,0x00,0x00,0x18,0x00,0x00,0x00,0x48,0x00,0x05,0x00, +0x05,0x01,0x00,0x00,0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00, +0x00,0x00,0x00,0x00,0x47,0x00,0x03,0x00,0x05,0x01,0x00,0x00, +0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x07,0x01,0x00,0x00, +0x22,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00, +0x07,0x01,0x00,0x00,0x21,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0x47,0x00,0x04,0x00,0x39,0x01,0x00,0x00,0x01,0x00,0x00,0x00, +0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x3a,0x01,0x00,0x00, +0x0b,0x00,0x00,0x00,0x19,0x00,0x00,0x00,0x47,0x00,0x04,0x00, +0x67,0x01,0x00,0x00,0x06,0x00,0x00,0x00,0x04,0x00,0x00,0x00, +0x48,0x00,0x04,0x00,0x68,0x01,0x00,0x00,0x00,0x00,0x00,0x00, +0x18,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x68,0x01,0x00,0x00, +0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0x47,0x00,0x03,0x00,0x68,0x01,0x00,0x00,0x02,0x00,0x00,0x00, +0x47,0x00,0x04,0x00,0x6a,0x01,0x00,0x00,0x22,0x00,0x00,0x00, +0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x6a,0x01,0x00,0x00, +0x21,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x47,0x00,0x04,0x00, +0x55,0x02,0x00,0x00,0x0b,0x00,0x00,0x00,0x18,0x00,0x00,0x00, +0x47,0x00,0x04,0x00,0x9b,0x02,0x00,0x00,0x06,0x00,0x00,0x00, +0x04,0x00,0x00,0x00,0x48,0x00,0x04,0x00,0x9c,0x02,0x00,0x00, +0x00,0x00,0x00,0x00,0x19,0x00,0x00,0x00,0x48,0x00,0x05,0x00, +0x9c,0x02,0x00,0x00,0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00, +0x00,0x00,0x00,0x00,0x47,0x00,0x03,0x00,0x9c,0x02,0x00,0x00, +0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x9e,0x02,0x00,0x00, +0x22,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00, +0x9e,0x02,0x00,0x00,0x21,0x00,0x00,0x00,0x02,0x00,0x00,0x00, +0x13,0x00,0x02,0x00,0x02,0x00,0x00,0x00,0x21,0x00,0x03,0x00, +0x03,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x15,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x20,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0x17,0x00,0x04,0x00,0x09,0x00,0x00,0x00,0x06,0x00,0x00,0x00, +0x03,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x0a,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0x09,0x00,0x00,0x00,0x3b,0x00,0x04,0x00, +0x0a,0x00,0x00,0x00,0x0b,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, +0x02,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x0d,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x1e,0x00,0x10,0x00, +0x10,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00, +0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00, +0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00, +0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00, +0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00, +0x20,0x00,0x04,0x00,0x11,0x00,0x00,0x00,0x09,0x00,0x00,0x00, +0x10,0x00,0x00,0x00,0x3b,0x00,0x04,0x00,0x11,0x00,0x00,0x00, +0x12,0x00,0x00,0x00,0x09,0x00,0x00,0x00,0x15,0x00,0x04,0x00, +0x13,0x00,0x00,0x00,0x20,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00,0x14,0x00,0x00,0x00, +0x08,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x15,0x00,0x00,0x00, +0x09,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x13,0x00,0x00,0x00,0x21,0x00,0x00,0x00,0x0a,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00,0x27,0x00,0x00,0x00, +0x09,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00, +0x2d,0x00,0x00,0x00,0x07,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x13,0x00,0x00,0x00,0x34,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0x32,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x37,0x00,0x00,0x00, +0x40,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x39,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x3b,0x00,0x04,0x00, +0x0a,0x00,0x00,0x00,0x3d,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x3e,0x00,0x00,0x00, +0x00,0x00,0x00,0x00,0x3b,0x00,0x04,0x00,0x0a,0x00,0x00,0x00, +0x4c,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x32,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x4f,0x00,0x00,0x00,0x20,0x00,0x00,0x00, +0x32,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x53,0x00,0x00,0x00, +0x20,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, +0x54,0x00,0x00,0x00,0x86,0x00,0x00,0x00,0x37,0x00,0x00,0x00, +0x53,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, +0x58,0x00,0x00,0x00,0x86,0x00,0x00,0x00,0x37,0x00,0x00,0x00, +0x53,0x00,0x00,0x00,0x32,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x60,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x34,0x00,0x06,0x00, +0x06,0x00,0x00,0x00,0x61,0x00,0x00,0x00,0x86,0x00,0x00,0x00, +0x53,0x00,0x00,0x00,0x60,0x00,0x00,0x00,0x32,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x62,0x00,0x00,0x00,0x04,0x00,0x00,0x00, +0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x63,0x00,0x00,0x00, +0x86,0x00,0x00,0x00,0x61,0x00,0x00,0x00,0x62,0x00,0x00,0x00, +0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x67,0x00,0x00,0x00, +0x86,0x00,0x00,0x00,0x61,0x00,0x00,0x00,0x62,0x00,0x00,0x00, +0x32,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x6c,0x00,0x00,0x00, +0x10,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, +0x6d,0x00,0x00,0x00,0x86,0x00,0x00,0x00,0x6c,0x00,0x00,0x00, +0x0c,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, +0x72,0x00,0x00,0x00,0x86,0x00,0x00,0x00,0x6c,0x00,0x00,0x00, +0x0c,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, +0x77,0x00,0x00,0x00,0x86,0x00,0x00,0x00,0x6c,0x00,0x00,0x00, +0x39,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, +0x7c,0x00,0x00,0x00,0x86,0x00,0x00,0x00,0x6c,0x00,0x00,0x00, +0x39,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00, +0x80,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x13,0x00,0x00,0x00,0x85,0x00,0x00,0x00,0x02,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00,0x90,0x00,0x00,0x00, +0x0b,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00, +0x96,0x00,0x00,0x00,0x03,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x13,0x00,0x00,0x00,0xa1,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, +0x32,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0xa6,0x00,0x00,0x00, +0x40,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00, +0xa8,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x34,0x00,0x06,0x00, +0x06,0x00,0x00,0x00,0xb7,0x00,0x00,0x00,0x84,0x00,0x00,0x00, +0x60,0x00,0x00,0x00,0x62,0x00,0x00,0x00,0x32,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0xb8,0x00,0x00,0x00,0x20,0x00,0x00,0x00, +0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0xb9,0x00,0x00,0x00, +0x84,0x00,0x00,0x00,0x53,0x00,0x00,0x00,0xb8,0x00,0x00,0x00, +0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0xba,0x00,0x00,0x00, +0x84,0x00,0x00,0x00,0x4f,0x00,0x00,0x00,0x62,0x00,0x00,0x00, +0x32,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0xbb,0x00,0x00,0x00, +0x02,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, +0xbc,0x00,0x00,0x00,0x84,0x00,0x00,0x00,0xba,0x00,0x00,0x00, +0xbb,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, +0xbd,0x00,0x00,0x00,0x84,0x00,0x00,0x00,0xbc,0x00,0x00,0x00, +0x60,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, +0xbe,0x00,0x00,0x00,0x86,0x00,0x00,0x00,0xb9,0x00,0x00,0x00, +0xbd,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, +0xbf,0x00,0x00,0x00,0x84,0x00,0x00,0x00,0xb7,0x00,0x00,0x00, +0xbe,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, +0xc0,0x00,0x00,0x00,0x84,0x00,0x00,0x00,0xbf,0x00,0x00,0x00, +0xbb,0x00,0x00,0x00,0x14,0x00,0x02,0x00,0xc1,0x00,0x00,0x00, +0x16,0x00,0x03,0x00,0xc3,0x00,0x00,0x00,0x20,0x00,0x00,0x00, +0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0xc4,0x00,0x00,0x00, +0x84,0x00,0x00,0x00,0x60,0x00,0x00,0x00,0x62,0x00,0x00,0x00, +0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0xc5,0x00,0x00,0x00, +0x84,0x00,0x00,0x00,0xc4,0x00,0x00,0x00,0xbe,0x00,0x00,0x00, +0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0xc6,0x00,0x00,0x00, +0x84,0x00,0x00,0x00,0xc5,0x00,0x00,0x00,0xbb,0x00,0x00,0x00, +0x1c,0x00,0x04,0x00,0xc7,0x00,0x00,0x00,0xc3,0x00,0x00,0x00, +0xc6,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0xc8,0x00,0x00,0x00, +0x07,0x00,0x00,0x00,0xc7,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0xc3,0x00,0x00,0x00,0xcb,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0x20,0x00,0x04,0x00,0xcc,0x00,0x00,0x00,0x07,0x00,0x00,0x00, +0xc3,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00, +0xcf,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x34,0x00,0x06,0x00, +0x06,0x00,0x00,0x00,0xf3,0x00,0x00,0x00,0x80,0x00,0x00,0x00, +0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0xf9,0x00,0x00,0x00,0x10,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0xfd,0x00,0x00,0x00, +0x0f,0x00,0x00,0x00,0x16,0x00,0x03,0x00,0x00,0x01,0x00,0x00, +0x10,0x00,0x00,0x00,0x15,0x00,0x04,0x00,0x01,0x01,0x00,0x00, +0x08,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x1c,0x00,0x04,0x00, +0x02,0x01,0x00,0x00,0x01,0x01,0x00,0x00,0xf9,0x00,0x00,0x00, +0x1e,0x00,0x05,0x00,0x03,0x01,0x00,0x00,0x00,0x01,0x00,0x00, +0x00,0x01,0x00,0x00,0x02,0x01,0x00,0x00,0x1d,0x00,0x03,0x00, +0x04,0x01,0x00,0x00,0x03,0x01,0x00,0x00,0x1e,0x00,0x03,0x00, +0x05,0x01,0x00,0x00,0x04,0x01,0x00,0x00,0x20,0x00,0x04,0x00, +0x06,0x01,0x00,0x00,0x0c,0x00,0x00,0x00,0x05,0x01,0x00,0x00, +0x3b,0x00,0x04,0x00,0x06,0x01,0x00,0x00,0x07,0x01,0x00,0x00, +0x0c,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x09,0x01,0x00,0x00, +0x0c,0x00,0x00,0x00,0x00,0x01,0x00,0x00,0x20,0x00,0x04,0x00, +0x15,0x01,0x00,0x00,0x0c,0x00,0x00,0x00,0x01,0x01,0x00,0x00, +0x17,0x00,0x04,0x00,0x19,0x01,0x00,0x00,0xc3,0x00,0x00,0x00, +0x02,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, +0x28,0x01,0x00,0x00,0x80,0x00,0x00,0x00,0x6c,0x00,0x00,0x00, +0x39,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, +0x29,0x01,0x00,0x00,0x84,0x00,0x00,0x00,0x37,0x00,0x00,0x00, +0x28,0x01,0x00,0x00,0x1c,0x00,0x04,0x00,0x2a,0x01,0x00,0x00, +0x00,0x01,0x00,0x00,0x29,0x01,0x00,0x00,0x20,0x00,0x04,0x00, +0x2b,0x01,0x00,0x00,0x04,0x00,0x00,0x00,0x2a,0x01,0x00,0x00, +0x3b,0x00,0x04,0x00,0x2b,0x01,0x00,0x00,0x2c,0x01,0x00,0x00, +0x04,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x31,0x01,0x00,0x00, +0x04,0x00,0x00,0x00,0x00,0x01,0x00,0x00,0x32,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x39,0x01,0x00,0x00,0x01,0x00,0x00,0x00, +0x33,0x00,0x06,0x00,0x09,0x00,0x00,0x00,0x3a,0x01,0x00,0x00, +0x39,0x01,0x00,0x00,0x39,0x00,0x00,0x00,0x39,0x00,0x00,0x00, +0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x3b,0x01,0x00,0x00, +0x51,0x00,0x00,0x00,0x3a,0x01,0x00,0x00,0x00,0x00,0x00,0x00, +0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x3c,0x01,0x00,0x00, +0x84,0x00,0x00,0x00,0x3b,0x01,0x00,0x00,0x0c,0x00,0x00,0x00, +0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x3d,0x01,0x00,0x00, +0x86,0x00,0x00,0x00,0x3c,0x01,0x00,0x00,0x6c,0x00,0x00,0x00, +0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x5b,0x01,0x00,0x00, +0x80,0x00,0x00,0x00,0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00, +0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x5c,0x01,0x00,0x00, +0x84,0x00,0x00,0x00,0xa6,0x00,0x00,0x00,0x5b,0x01,0x00,0x00, +0x1c,0x00,0x04,0x00,0x5d,0x01,0x00,0x00,0x00,0x01,0x00,0x00, +0x5c,0x01,0x00,0x00,0x20,0x00,0x04,0x00,0x5e,0x01,0x00,0x00, +0x04,0x00,0x00,0x00,0x5d,0x01,0x00,0x00,0x3b,0x00,0x04,0x00, +0x5e,0x01,0x00,0x00,0x5f,0x01,0x00,0x00,0x04,0x00,0x00,0x00, +0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x63,0x01,0x00,0x00, +0x80,0x00,0x00,0x00,0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00, +0x1d,0x00,0x03,0x00,0x67,0x01,0x00,0x00,0xc3,0x00,0x00,0x00, +0x1e,0x00,0x03,0x00,0x68,0x01,0x00,0x00,0x67,0x01,0x00,0x00, +0x20,0x00,0x04,0x00,0x69,0x01,0x00,0x00,0x0c,0x00,0x00,0x00, +0x68,0x01,0x00,0x00,0x3b,0x00,0x04,0x00,0x69,0x01,0x00,0x00, +0x6a,0x01,0x00,0x00,0x0c,0x00,0x00,0x00,0x20,0x00,0x04,0x00, +0x75,0x01,0x00,0x00,0x0c,0x00,0x00,0x00,0xc3,0x00,0x00,0x00, +0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x7e,0x01,0x00,0x00, +0x80,0x00,0x00,0x00,0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x00,0x01,0x00,0x00,0x82,0x01,0x00,0x00, +0x00,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, +0x84,0x01,0x00,0x00,0x51,0x00,0x00,0x00,0x3a,0x01,0x00,0x00, +0x00,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, +0x85,0x01,0x00,0x00,0x84,0x00,0x00,0x00,0x84,0x01,0x00,0x00, +0x39,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, +0x86,0x01,0x00,0x00,0x86,0x00,0x00,0x00,0x85,0x01,0x00,0x00, +0x6c,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x89,0x01,0x00,0x00,0x08,0x01,0x00,0x00,0x34,0x00,0x06,0x00, +0x06,0x00,0x00,0x00,0x8a,0x01,0x00,0x00,0x86,0x00,0x00,0x00, +0x6c,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x34,0x00,0x06,0x00, +0x06,0x00,0x00,0x00,0x8d,0x01,0x00,0x00,0x86,0x00,0x00,0x00, +0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x34,0x00,0x06,0x00, +0x06,0x00,0x00,0x00,0xa8,0x01,0x00,0x00,0x84,0x00,0x00,0x00, +0x60,0x00,0x00,0x00,0x62,0x00,0x00,0x00,0x1c,0x00,0x04,0x00, +0xa9,0x01,0x00,0x00,0x00,0x01,0x00,0x00,0xa8,0x01,0x00,0x00, +0x20,0x00,0x04,0x00,0xaa,0x01,0x00,0x00,0x07,0x00,0x00,0x00, +0xa9,0x01,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, +0xba,0x01,0x00,0x00,0x80,0x00,0x00,0x00,0x6c,0x00,0x00,0x00, +0x39,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0xc0,0x01,0x00,0x00, +0x07,0x00,0x00,0x00,0x00,0x01,0x00,0x00,0x34,0x00,0x06,0x00, +0x06,0x00,0x00,0x00,0xd6,0x01,0x00,0x00,0x84,0x00,0x00,0x00, +0xbe,0x00,0x00,0x00,0xbb,0x00,0x00,0x00,0x1c,0x00,0x04,0x00, +0xd7,0x01,0x00,0x00,0x00,0x01,0x00,0x00,0xd6,0x01,0x00,0x00, +0x20,0x00,0x04,0x00,0xd8,0x01,0x00,0x00,0x07,0x00,0x00,0x00, +0xd7,0x01,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, +0xe1,0x01,0x00,0x00,0x86,0x00,0x00,0x00,0xb8,0x00,0x00,0x00, +0xbe,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, +0xe9,0x01,0x00,0x00,0x80,0x00,0x00,0x00,0x6c,0x00,0x00,0x00, +0x39,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, +0x18,0x02,0x00,0x00,0x84,0x00,0x00,0x00,0x60,0x00,0x00,0x00, +0x62,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00, +0x4d,0x02,0x00,0x00,0x0d,0x00,0x00,0x00,0x3b,0x00,0x04,0x00, +0x0a,0x00,0x00,0x00,0x55,0x02,0x00,0x00,0x01,0x00,0x00,0x00, +0x1d,0x00,0x03,0x00,0x9b,0x02,0x00,0x00,0xc3,0x00,0x00,0x00, +0x1e,0x00,0x03,0x00,0x9c,0x02,0x00,0x00,0x9b,0x02,0x00,0x00, +0x20,0x00,0x04,0x00,0x9d,0x02,0x00,0x00,0x0c,0x00,0x00,0x00, +0x9c,0x02,0x00,0x00,0x3b,0x00,0x04,0x00,0x9d,0x02,0x00,0x00, +0x9e,0x02,0x00,0x00,0x0c,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x13,0x00,0x00,0x00,0xa3,0x02,0x00,0x00,0x05,0x00,0x00,0x00, +0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0xb0,0x02,0x00,0x00, +0x84,0x00,0x00,0x00,0x60,0x00,0x00,0x00,0x62,0x00,0x00,0x00, +0x36,0x00,0x05,0x00,0x02,0x00,0x00,0x00,0x04,0x00,0x00,0x00, +0x00,0x00,0x00,0x00,0x03,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, +0x05,0x00,0x00,0x00,0x3b,0x00,0x04,0x00,0xc8,0x00,0x00,0x00, +0xc9,0x00,0x00,0x00,0x07,0x00,0x00,0x00,0x3b,0x00,0x04,0x00, +0xaa,0x01,0x00,0x00,0xab,0x01,0x00,0x00,0x07,0x00,0x00,0x00, +0x3b,0x00,0x04,0x00,0xd8,0x01,0x00,0x00,0xd9,0x01,0x00,0x00, +0x07,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x0d,0x00,0x00,0x00, +0x0e,0x00,0x00,0x00,0x0b,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, +0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x0f,0x00,0x00,0x00, +0x0e,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00, +0x16,0x00,0x00,0x00,0x12,0x00,0x00,0x00,0x14,0x00,0x00,0x00, +0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x17,0x00,0x00,0x00, +0x16,0x00,0x00,0x00,0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x18,0x00,0x00,0x00,0x0f,0x00,0x00,0x00,0x17,0x00,0x00,0x00, +0x89,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x1e,0x00,0x00,0x00, +0x0f,0x00,0x00,0x00,0x17,0x00,0x00,0x00,0x41,0x00,0x05,0x00, +0x15,0x00,0x00,0x00,0x22,0x00,0x00,0x00,0x12,0x00,0x00,0x00, +0x21,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x23,0x00,0x00,0x00,0x22,0x00,0x00,0x00,0x86,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x24,0x00,0x00,0x00,0x18,0x00,0x00,0x00, +0x23,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00, +0x28,0x00,0x00,0x00,0x12,0x00,0x00,0x00,0x27,0x00,0x00,0x00, +0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x29,0x00,0x00,0x00, +0x28,0x00,0x00,0x00,0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x2a,0x00,0x00,0x00,0x1e,0x00,0x00,0x00,0x29,0x00,0x00,0x00, +0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, +0x12,0x00,0x00,0x00,0x2d,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x2f,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x30,0x00,0x00,0x00, +0x24,0x00,0x00,0x00,0x2f,0x00,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x32,0x00,0x00,0x00,0x30,0x00,0x00,0x00, +0x2a,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00, +0x35,0x00,0x00,0x00,0x12,0x00,0x00,0x00,0x34,0x00,0x00,0x00, +0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x36,0x00,0x00,0x00, +0x35,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x38,0x00,0x00,0x00,0x36,0x00,0x00,0x00,0x37,0x00,0x00,0x00, +0x82,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x3a,0x00,0x00,0x00, +0x38,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x86,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x3b,0x00,0x00,0x00,0x3a,0x00,0x00,0x00, +0x37,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x0d,0x00,0x00,0x00, +0x3f,0x00,0x00,0x00,0x3d,0x00,0x00,0x00,0x3e,0x00,0x00,0x00, +0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x40,0x00,0x00,0x00, +0x3f,0x00,0x00,0x00,0x89,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x42,0x00,0x00,0x00,0x40,0x00,0x00,0x00,0x3b,0x00,0x00,0x00, +0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x47,0x00,0x00,0x00, +0x40,0x00,0x00,0x00,0x3b,0x00,0x00,0x00,0x41,0x00,0x05,0x00, +0x0d,0x00,0x00,0x00,0x49,0x00,0x00,0x00,0x3d,0x00,0x00,0x00, +0x39,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x4a,0x00,0x00,0x00,0x49,0x00,0x00,0x00,0x41,0x00,0x05,0x00, +0x0d,0x00,0x00,0x00,0x4d,0x00,0x00,0x00,0x4c,0x00,0x00,0x00, +0x3e,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x4e,0x00,0x00,0x00,0x4d,0x00,0x00,0x00,0x86,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x50,0x00,0x00,0x00,0x4e,0x00,0x00,0x00, +0x4f,0x00,0x00,0x00,0x89,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x55,0x00,0x00,0x00,0x50,0x00,0x00,0x00,0x54,0x00,0x00,0x00, +0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x59,0x00,0x00,0x00, +0x50,0x00,0x00,0x00,0x58,0x00,0x00,0x00,0x89,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x5d,0x00,0x00,0x00,0x4e,0x00,0x00,0x00, +0x4f,0x00,0x00,0x00,0x89,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x64,0x00,0x00,0x00,0x5d,0x00,0x00,0x00,0x63,0x00,0x00,0x00, +0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x68,0x00,0x00,0x00, +0x5d,0x00,0x00,0x00,0x67,0x00,0x00,0x00,0x89,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x6e,0x00,0x00,0x00,0x4e,0x00,0x00,0x00, +0x6d,0x00,0x00,0x00,0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x73,0x00,0x00,0x00,0x4e,0x00,0x00,0x00,0x72,0x00,0x00,0x00, +0x89,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x78,0x00,0x00,0x00, +0x4e,0x00,0x00,0x00,0x77,0x00,0x00,0x00,0x86,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x7d,0x00,0x00,0x00,0x4e,0x00,0x00,0x00, +0x7c,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00, +0x81,0x00,0x00,0x00,0x12,0x00,0x00,0x00,0x80,0x00,0x00,0x00, +0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x82,0x00,0x00,0x00, +0x81,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x83,0x00,0x00,0x00,0x47,0x00,0x00,0x00,0x82,0x00,0x00,0x00, +0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00,0x86,0x00,0x00,0x00, +0x12,0x00,0x00,0x00,0x85,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x87,0x00,0x00,0x00,0x86,0x00,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x89,0x00,0x00,0x00, +0x47,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x8c,0x00,0x00,0x00,0x89,0x00,0x00,0x00, +0x82,0x00,0x00,0x00,0x0c,0x00,0x07,0x00,0x06,0x00,0x00,0x00, +0x8d,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x26,0x00,0x00,0x00, +0x87,0x00,0x00,0x00,0x8c,0x00,0x00,0x00,0x41,0x00,0x05,0x00, +0x15,0x00,0x00,0x00,0x91,0x00,0x00,0x00,0x12,0x00,0x00,0x00, +0x90,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x92,0x00,0x00,0x00,0x91,0x00,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x93,0x00,0x00,0x00,0x32,0x00,0x00,0x00, +0x92,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x95,0x00,0x00,0x00,0x42,0x00,0x00,0x00,0x37,0x00,0x00,0x00, +0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00,0x97,0x00,0x00,0x00, +0x12,0x00,0x00,0x00,0x96,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x98,0x00,0x00,0x00,0x97,0x00,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x99,0x00,0x00,0x00, +0x95,0x00,0x00,0x00,0x98,0x00,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x9a,0x00,0x00,0x00,0x93,0x00,0x00,0x00, +0x99,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x9c,0x00,0x00,0x00,0x9a,0x00,0x00,0x00,0x83,0x00,0x00,0x00, +0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x9d,0x00,0x00,0x00, +0x9c,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x41,0x00,0x05,0x00, +0x15,0x00,0x00,0x00,0xa2,0x00,0x00,0x00,0x12,0x00,0x00,0x00, +0xa1,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0xa3,0x00,0x00,0x00,0xa2,0x00,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xa4,0x00,0x00,0x00,0x0f,0x00,0x00,0x00, +0xa3,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xa7,0x00,0x00,0x00,0x4a,0x00,0x00,0x00,0xa6,0x00,0x00,0x00, +0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00,0xa9,0x00,0x00,0x00, +0x12,0x00,0x00,0x00,0xa8,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0xaa,0x00,0x00,0x00,0xa9,0x00,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xab,0x00,0x00,0x00, +0xa7,0x00,0x00,0x00,0xaa,0x00,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xac,0x00,0x00,0x00,0xa4,0x00,0x00,0x00, +0xab,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xae,0x00,0x00,0x00,0xac,0x00,0x00,0x00,0x83,0x00,0x00,0x00, +0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xaf,0x00,0x00,0x00, +0xae,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, +0xb1,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0xb1,0x00,0x00,0x00, +0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0xc4,0x02,0x00,0x00, +0x3e,0x00,0x00,0x00,0x05,0x00,0x00,0x00,0xd0,0x00,0x00,0x00, +0xb2,0x00,0x00,0x00,0xb0,0x00,0x05,0x00,0xc1,0x00,0x00,0x00, +0xc2,0x00,0x00,0x00,0xc4,0x02,0x00,0x00,0xc0,0x00,0x00,0x00, +0xf6,0x00,0x04,0x00,0xb3,0x00,0x00,0x00,0xb2,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0xc2,0x00,0x00,0x00, +0xb2,0x00,0x00,0x00,0xb3,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, +0xb2,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0xcc,0x00,0x00,0x00, +0xcd,0x00,0x00,0x00,0xc9,0x00,0x00,0x00,0xc4,0x02,0x00,0x00, +0x3e,0x00,0x03,0x00,0xcd,0x00,0x00,0x00,0xcb,0x00,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xd0,0x00,0x00,0x00, +0xc4,0x02,0x00,0x00,0xcf,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, +0xb1,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0xb3,0x00,0x00,0x00, +0xf9,0x00,0x02,0x00,0xd3,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, +0xd3,0x00,0x00,0x00,0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00, +0xdd,0x02,0x00,0x00,0xaf,0x00,0x00,0x00,0xb3,0x00,0x00,0x00, +0x8f,0x01,0x00,0x00,0xd6,0x00,0x00,0x00,0xf5,0x00,0x07,0x00, +0x06,0x00,0x00,0x00,0xd9,0x02,0x00,0x00,0x9d,0x00,0x00,0x00, +0xb3,0x00,0x00,0x00,0x8c,0x01,0x00,0x00,0xd6,0x00,0x00,0x00, +0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0xc5,0x02,0x00,0x00, +0x83,0x00,0x00,0x00,0xb3,0x00,0x00,0x00,0x3d,0x02,0x00,0x00, +0xd6,0x00,0x00,0x00,0xb0,0x00,0x05,0x00,0xc1,0x00,0x00,0x00, +0xda,0x00,0x00,0x00,0xc5,0x02,0x00,0x00,0x8d,0x00,0x00,0x00, +0xf6,0x00,0x04,0x00,0xd5,0x00,0x00,0x00,0xd6,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0xda,0x00,0x00,0x00, +0xd4,0x00,0x00,0x00,0xd5,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, +0xd4,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0xdc,0x00,0x00,0x00, +0xf8,0x00,0x02,0x00,0xdc,0x00,0x00,0x00,0xf5,0x00,0x07,0x00, +0x06,0x00,0x00,0x00,0xd5,0x02,0x00,0x00,0x3e,0x00,0x00,0x00, +0xd4,0x00,0x00,0x00,0x3f,0x01,0x00,0x00,0xdd,0x00,0x00,0x00, +0xb0,0x00,0x05,0x00,0xc1,0x00,0x00,0x00,0xe2,0x00,0x00,0x00, +0xd5,0x02,0x00,0x00,0x37,0x00,0x00,0x00,0xf6,0x00,0x04,0x00, +0xde,0x00,0x00,0x00,0xdd,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0xfa,0x00,0x04,0x00,0xe2,0x00,0x00,0x00,0xdd,0x00,0x00,0x00, +0xde,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0xdd,0x00,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xe7,0x00,0x00,0x00, +0x73,0x00,0x00,0x00,0xd5,0x02,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xea,0x00,0x00,0x00,0xe7,0x00,0x00,0x00, +0x98,0x00,0x00,0x00,0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xeb,0x00,0x00,0x00,0xea,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xec,0x00,0x00,0x00, +0xd9,0x02,0x00,0x00,0xeb,0x00,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xee,0x00,0x00,0x00,0xec,0x00,0x00,0x00, +0x6e,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xf4,0x00,0x00,0x00,0xe7,0x00,0x00,0x00,0xf3,0x00,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xf6,0x00,0x00,0x00, +0xf4,0x00,0x00,0x00,0x6e,0x00,0x00,0x00,0x86,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xfa,0x00,0x00,0x00,0xee,0x00,0x00,0x00, +0xf9,0x00,0x00,0x00,0xc7,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xfe,0x00,0x00,0x00,0xee,0x00,0x00,0x00,0xfd,0x00,0x00,0x00, +0x41,0x00,0x07,0x00,0x09,0x01,0x00,0x00,0x0a,0x01,0x00,0x00, +0x07,0x01,0x00,0x00,0x34,0x00,0x00,0x00,0xfa,0x00,0x00,0x00, +0x34,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x00,0x01,0x00,0x00, +0x0b,0x01,0x00,0x00,0x0a,0x01,0x00,0x00,0x73,0x00,0x04,0x00, +0xc3,0x00,0x00,0x00,0x0c,0x01,0x00,0x00,0x0b,0x01,0x00,0x00, +0x41,0x00,0x07,0x00,0x09,0x01,0x00,0x00,0x0f,0x01,0x00,0x00, +0x07,0x01,0x00,0x00,0x34,0x00,0x00,0x00,0xfa,0x00,0x00,0x00, +0xcf,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x00,0x01,0x00,0x00, +0x10,0x01,0x00,0x00,0x0f,0x01,0x00,0x00,0x73,0x00,0x04,0x00, +0xc3,0x00,0x00,0x00,0x11,0x01,0x00,0x00,0x10,0x01,0x00,0x00, +0x41,0x00,0x08,0x00,0x15,0x01,0x00,0x00,0x16,0x01,0x00,0x00, +0x07,0x01,0x00,0x00,0x34,0x00,0x00,0x00,0xfa,0x00,0x00,0x00, +0x85,0x00,0x00,0x00,0xfe,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0x01,0x01,0x00,0x00,0x17,0x01,0x00,0x00,0x16,0x01,0x00,0x00, +0x71,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x18,0x01,0x00,0x00, +0x17,0x01,0x00,0x00,0xc7,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x1d,0x01,0x00,0x00,0x18,0x01,0x00,0x00,0xfd,0x00,0x00,0x00, +0x70,0x00,0x04,0x00,0xc3,0x00,0x00,0x00,0x1e,0x01,0x00,0x00, +0x1d,0x01,0x00,0x00,0xc2,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x20,0x01,0x00,0x00,0x18,0x01,0x00,0x00,0xa8,0x00,0x00,0x00, +0x70,0x00,0x04,0x00,0xc3,0x00,0x00,0x00,0x21,0x01,0x00,0x00, +0x20,0x01,0x00,0x00,0x50,0x00,0x05,0x00,0x19,0x01,0x00,0x00, +0x22,0x01,0x00,0x00,0x1e,0x01,0x00,0x00,0x21,0x01,0x00,0x00, +0x8e,0x00,0x05,0x00,0x19,0x01,0x00,0x00,0x24,0x01,0x00,0x00, +0x22,0x01,0x00,0x00,0x0c,0x01,0x00,0x00,0x50,0x00,0x05,0x00, +0x19,0x01,0x00,0x00,0x26,0x01,0x00,0x00,0x11,0x01,0x00,0x00, +0x11,0x01,0x00,0x00,0x81,0x00,0x05,0x00,0x19,0x01,0x00,0x00, +0x27,0x01,0x00,0x00,0x24,0x01,0x00,0x00,0x26,0x01,0x00,0x00, +0x51,0x00,0x05,0x00,0xc3,0x00,0x00,0x00,0x2f,0x01,0x00,0x00, +0x27,0x01,0x00,0x00,0x00,0x00,0x00,0x00,0x73,0x00,0x04,0x00, +0x00,0x01,0x00,0x00,0x30,0x01,0x00,0x00,0x2f,0x01,0x00,0x00, +0x41,0x00,0x05,0x00,0x31,0x01,0x00,0x00,0x32,0x01,0x00,0x00, +0x2c,0x01,0x00,0x00,0xf6,0x00,0x00,0x00,0x3e,0x00,0x03,0x00, +0x32,0x01,0x00,0x00,0x30,0x01,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x34,0x01,0x00,0x00,0xf6,0x00,0x00,0x00, +0xf9,0x00,0x00,0x00,0x51,0x00,0x05,0x00,0xc3,0x00,0x00,0x00, +0x36,0x01,0x00,0x00,0x27,0x01,0x00,0x00,0x01,0x00,0x00,0x00, +0x73,0x00,0x04,0x00,0x00,0x01,0x00,0x00,0x37,0x01,0x00,0x00, +0x36,0x01,0x00,0x00,0x41,0x00,0x05,0x00,0x31,0x01,0x00,0x00, +0x38,0x01,0x00,0x00,0x2c,0x01,0x00,0x00,0x34,0x01,0x00,0x00, +0x3e,0x00,0x03,0x00,0x38,0x01,0x00,0x00,0x37,0x01,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x3f,0x01,0x00,0x00, +0xd5,0x02,0x00,0x00,0x3d,0x01,0x00,0x00,0xf9,0x00,0x02,0x00, +0xdc,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0xde,0x00,0x00,0x00, +0xf9,0x00,0x02,0x00,0x41,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, +0x41,0x01,0x00,0x00,0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00, +0xd6,0x02,0x00,0x00,0x3e,0x00,0x00,0x00,0xde,0x00,0x00,0x00, +0x88,0x01,0x00,0x00,0x44,0x01,0x00,0x00,0xb0,0x00,0x05,0x00, +0xc1,0x00,0x00,0x00,0x47,0x01,0x00,0x00,0xd6,0x02,0x00,0x00, +0xa6,0x00,0x00,0x00,0xf6,0x00,0x04,0x00,0x43,0x01,0x00,0x00, +0x44,0x01,0x00,0x00,0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, +0x47,0x01,0x00,0x00,0x42,0x01,0x00,0x00,0x43,0x01,0x00,0x00, +0xf8,0x00,0x02,0x00,0x42,0x01,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x4b,0x01,0x00,0x00,0xa7,0x00,0x00,0x00, +0x7d,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x4d,0x01,0x00,0x00,0x4b,0x01,0x00,0x00,0xd6,0x02,0x00,0x00, +0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00,0x4e,0x01,0x00,0x00, +0x12,0x00,0x00,0x00,0xcf,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x4f,0x01,0x00,0x00,0x4e,0x01,0x00,0x00, +0xb0,0x00,0x05,0x00,0xc1,0x00,0x00,0x00,0x50,0x01,0x00,0x00, +0x4d,0x01,0x00,0x00,0x4f,0x01,0x00,0x00,0xf7,0x00,0x03,0x00, +0x52,0x01,0x00,0x00,0x00,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, +0x50,0x01,0x00,0x00,0x51,0x01,0x00,0x00,0x52,0x01,0x00,0x00, +0xf8,0x00,0x02,0x00,0x51,0x01,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x55,0x01,0x00,0x00,0xc5,0x02,0x00,0x00, +0x78,0x00,0x00,0x00,0xb0,0x00,0x05,0x00,0xc1,0x00,0x00,0x00, +0x57,0x01,0x00,0x00,0x55,0x01,0x00,0x00,0x8d,0x00,0x00,0x00, +0xf9,0x00,0x02,0x00,0x52,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, +0x52,0x01,0x00,0x00,0xf5,0x00,0x07,0x00,0xc1,0x00,0x00,0x00, +0x58,0x01,0x00,0x00,0x50,0x01,0x00,0x00,0x42,0x01,0x00,0x00, +0x57,0x01,0x00,0x00,0x51,0x01,0x00,0x00,0xf7,0x00,0x03,0x00, +0x5a,0x01,0x00,0x00,0x00,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, +0x58,0x01,0x00,0x00,0x59,0x01,0x00,0x00,0x7a,0x01,0x00,0x00, +0xf8,0x00,0x02,0x00,0x59,0x01,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x62,0x01,0x00,0x00,0x7d,0x00,0x00,0x00, +0xd6,0x02,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x64,0x01,0x00,0x00,0x62,0x01,0x00,0x00,0x63,0x01,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x66,0x01,0x00,0x00, +0x64,0x01,0x00,0x00,0x78,0x00,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x71,0x01,0x00,0x00,0x62,0x01,0x00,0x00, +0xaa,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x72,0x01,0x00,0x00,0xdd,0x02,0x00,0x00,0x71,0x01,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x74,0x01,0x00,0x00, +0x72,0x01,0x00,0x00,0x78,0x00,0x00,0x00,0x41,0x00,0x06,0x00, +0x75,0x01,0x00,0x00,0x76,0x01,0x00,0x00,0x6a,0x01,0x00,0x00, +0x34,0x00,0x00,0x00,0x74,0x01,0x00,0x00,0x3d,0x00,0x04,0x00, +0xc3,0x00,0x00,0x00,0x77,0x01,0x00,0x00,0x76,0x01,0x00,0x00, +0x73,0x00,0x04,0x00,0x00,0x01,0x00,0x00,0x78,0x01,0x00,0x00, +0x77,0x01,0x00,0x00,0x41,0x00,0x05,0x00,0x31,0x01,0x00,0x00, +0x79,0x01,0x00,0x00,0x5f,0x01,0x00,0x00,0x66,0x01,0x00,0x00, +0x3e,0x00,0x03,0x00,0x79,0x01,0x00,0x00,0x78,0x01,0x00,0x00, +0xf9,0x00,0x02,0x00,0x5a,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, +0x7a,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x7d,0x01,0x00,0x00,0x7d,0x00,0x00,0x00,0xd6,0x02,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x7f,0x01,0x00,0x00, +0x7d,0x01,0x00,0x00,0x7e,0x01,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x81,0x01,0x00,0x00,0x7f,0x01,0x00,0x00, +0x78,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x31,0x01,0x00,0x00, +0x83,0x01,0x00,0x00,0x5f,0x01,0x00,0x00,0x81,0x01,0x00,0x00, +0x3e,0x00,0x03,0x00,0x83,0x01,0x00,0x00,0x82,0x01,0x00,0x00, +0xf9,0x00,0x02,0x00,0x5a,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, +0x5a,0x01,0x00,0x00,0xf9,0x00,0x02,0x00,0x44,0x01,0x00,0x00, +0xf8,0x00,0x02,0x00,0x44,0x01,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x88,0x01,0x00,0x00,0xd6,0x02,0x00,0x00, +0x86,0x01,0x00,0x00,0xf9,0x00,0x02,0x00,0x41,0x01,0x00,0x00, +0xf8,0x00,0x02,0x00,0x43,0x01,0x00,0x00,0xe0,0x00,0x04,0x00, +0x0c,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x89,0x01,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x8c,0x01,0x00,0x00, +0xd9,0x02,0x00,0x00,0x8a,0x01,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x8f,0x01,0x00,0x00,0xdd,0x02,0x00,0x00, +0x8d,0x01,0x00,0x00,0xf9,0x00,0x02,0x00,0x91,0x01,0x00,0x00, +0xf8,0x00,0x02,0x00,0x91,0x01,0x00,0x00,0xf5,0x00,0x07,0x00, +0x06,0x00,0x00,0x00,0xdf,0x02,0x00,0x00,0x3e,0x00,0x00,0x00, +0x43,0x01,0x00,0x00,0x3b,0x02,0x00,0x00,0x94,0x01,0x00,0x00, +0xb0,0x00,0x05,0x00,0xc1,0x00,0x00,0x00,0x97,0x01,0x00,0x00, +0xdf,0x02,0x00,0x00,0x6c,0x00,0x00,0x00,0xf6,0x00,0x04,0x00, +0x93,0x01,0x00,0x00,0x94,0x01,0x00,0x00,0x00,0x00,0x00,0x00, +0xfa,0x00,0x04,0x00,0x97,0x01,0x00,0x00,0x92,0x01,0x00,0x00, +0x93,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0x92,0x01,0x00,0x00, +0xf9,0x00,0x02,0x00,0x99,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, +0x99,0x01,0x00,0x00,0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00, +0xe3,0x02,0x00,0x00,0x3e,0x00,0x00,0x00,0x92,0x01,0x00,0x00, +0xc5,0x01,0x00,0x00,0x9c,0x01,0x00,0x00,0xb0,0x00,0x05,0x00, +0xc1,0x00,0x00,0x00,0x9f,0x01,0x00,0x00,0xe3,0x02,0x00,0x00, +0x60,0x00,0x00,0x00,0xf6,0x00,0x04,0x00,0x9b,0x01,0x00,0x00, +0x9c,0x01,0x00,0x00,0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, +0x9f,0x01,0x00,0x00,0x9a,0x01,0x00,0x00,0x9b,0x01,0x00,0x00, +0xf8,0x00,0x02,0x00,0x9a,0x01,0x00,0x00,0xf9,0x00,0x02,0x00, +0xa1,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0xa1,0x01,0x00,0x00, +0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0xf5,0x02,0x00,0x00, +0x3e,0x00,0x00,0x00,0x9a,0x01,0x00,0x00,0xc3,0x01,0x00,0x00, +0xa2,0x01,0x00,0x00,0xb0,0x00,0x05,0x00,0xc1,0x00,0x00,0x00, +0xa7,0x01,0x00,0x00,0xf5,0x02,0x00,0x00,0x62,0x00,0x00,0x00, +0xf6,0x00,0x04,0x00,0xa3,0x01,0x00,0x00,0xa2,0x01,0x00,0x00, +0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0xa7,0x01,0x00,0x00, +0xa2,0x01,0x00,0x00,0xa3,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, +0xa2,0x01,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xad,0x01,0x00,0x00,0xe3,0x02,0x00,0x00,0x62,0x00,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xaf,0x01,0x00,0x00, +0xad,0x01,0x00,0x00,0xf5,0x02,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xb1,0x01,0x00,0x00,0x55,0x00,0x00,0x00, +0x53,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xb3,0x01,0x00,0x00,0xe3,0x02,0x00,0x00,0x61,0x00,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xb4,0x01,0x00,0x00, +0xb1,0x01,0x00,0x00,0xb3,0x01,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xb6,0x01,0x00,0x00,0x64,0x00,0x00,0x00, +0x62,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xb7,0x01,0x00,0x00,0xb4,0x01,0x00,0x00,0xb6,0x01,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xb9,0x01,0x00,0x00, +0xb7,0x01,0x00,0x00,0xf5,0x02,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xbb,0x01,0x00,0x00,0xb9,0x01,0x00,0x00, +0xba,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xbd,0x01,0x00,0x00,0xbb,0x01,0x00,0x00,0xdf,0x02,0x00,0x00, +0x41,0x00,0x05,0x00,0x31,0x01,0x00,0x00,0xbe,0x01,0x00,0x00, +0x2c,0x01,0x00,0x00,0xbd,0x01,0x00,0x00,0x3d,0x00,0x04,0x00, +0x00,0x01,0x00,0x00,0xbf,0x01,0x00,0x00,0xbe,0x01,0x00,0x00, +0x41,0x00,0x05,0x00,0xc0,0x01,0x00,0x00,0xc1,0x01,0x00,0x00, +0xab,0x01,0x00,0x00,0xaf,0x01,0x00,0x00,0x3e,0x00,0x03,0x00, +0xc1,0x01,0x00,0x00,0xbf,0x01,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xc3,0x01,0x00,0x00,0xf5,0x02,0x00,0x00, +0xcf,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0xa1,0x01,0x00,0x00, +0xf8,0x00,0x02,0x00,0xa3,0x01,0x00,0x00,0xf9,0x00,0x02,0x00, +0x9c,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0x9c,0x01,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xc5,0x01,0x00,0x00, +0xe3,0x02,0x00,0x00,0xcf,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, +0x99,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0x9b,0x01,0x00,0x00, +0xf9,0x00,0x02,0x00,0xc7,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, +0xc7,0x01,0x00,0x00,0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00, +0xe4,0x02,0x00,0x00,0x3e,0x00,0x00,0x00,0x9b,0x01,0x00,0x00, +0xf3,0x01,0x00,0x00,0xca,0x01,0x00,0x00,0xb0,0x00,0x05,0x00, +0xc1,0x00,0x00,0x00,0xcd,0x01,0x00,0x00,0xe4,0x02,0x00,0x00, +0xbe,0x00,0x00,0x00,0xf6,0x00,0x04,0x00,0xc9,0x01,0x00,0x00, +0xca,0x01,0x00,0x00,0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, +0xcd,0x01,0x00,0x00,0xc8,0x01,0x00,0x00,0xc9,0x01,0x00,0x00, +0xf8,0x00,0x02,0x00,0xc8,0x01,0x00,0x00,0xf9,0x00,0x02,0x00, +0xcf,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0xcf,0x01,0x00,0x00, +0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0xf2,0x02,0x00,0x00, +0x3e,0x00,0x00,0x00,0xc8,0x01,0x00,0x00,0xf1,0x01,0x00,0x00, +0xd0,0x01,0x00,0x00,0xb0,0x00,0x05,0x00,0xc1,0x00,0x00,0x00, +0xd5,0x01,0x00,0x00,0xf2,0x02,0x00,0x00,0xbb,0x00,0x00,0x00, +0xf6,0x00,0x04,0x00,0xd1,0x01,0x00,0x00,0xd0,0x01,0x00,0x00, +0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0xd5,0x01,0x00,0x00, +0xd0,0x01,0x00,0x00,0xd1,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, +0xd0,0x01,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xdb,0x01,0x00,0x00,0xe4,0x02,0x00,0x00,0xbb,0x00,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xdd,0x01,0x00,0x00, +0xdb,0x01,0x00,0x00,0xf2,0x02,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xdf,0x01,0x00,0x00,0x59,0x00,0x00,0x00, +0xb8,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xe2,0x01,0x00,0x00,0xe4,0x02,0x00,0x00,0xe1,0x01,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xe3,0x01,0x00,0x00, +0xdf,0x01,0x00,0x00,0xe2,0x01,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xe5,0x01,0x00,0x00,0x68,0x00,0x00,0x00, +0xbb,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xe6,0x01,0x00,0x00,0xe3,0x01,0x00,0x00,0xe5,0x01,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xe8,0x01,0x00,0x00, +0xe6,0x01,0x00,0x00,0xf2,0x02,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xea,0x01,0x00,0x00,0xe8,0x01,0x00,0x00, +0xe9,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xec,0x01,0x00,0x00,0xea,0x01,0x00,0x00,0xdf,0x02,0x00,0x00, +0x41,0x00,0x05,0x00,0x31,0x01,0x00,0x00,0xed,0x01,0x00,0x00, +0x5f,0x01,0x00,0x00,0xec,0x01,0x00,0x00,0x3d,0x00,0x04,0x00, +0x00,0x01,0x00,0x00,0xee,0x01,0x00,0x00,0xed,0x01,0x00,0x00, +0x41,0x00,0x05,0x00,0xc0,0x01,0x00,0x00,0xef,0x01,0x00,0x00, +0xd9,0x01,0x00,0x00,0xdd,0x01,0x00,0x00,0x3e,0x00,0x03,0x00, +0xef,0x01,0x00,0x00,0xee,0x01,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xf1,0x01,0x00,0x00,0xf2,0x02,0x00,0x00, +0xcf,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0xcf,0x01,0x00,0x00, +0xf8,0x00,0x02,0x00,0xd1,0x01,0x00,0x00,0xf9,0x00,0x02,0x00, +0xca,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0xca,0x01,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xf3,0x01,0x00,0x00, +0xe4,0x02,0x00,0x00,0xcf,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, +0xc7,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0xc9,0x01,0x00,0x00, +0xf9,0x00,0x02,0x00,0xf5,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, +0xf5,0x01,0x00,0x00,0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00, +0xe5,0x02,0x00,0x00,0x3e,0x00,0x00,0x00,0xc9,0x01,0x00,0x00, +0x39,0x02,0x00,0x00,0xf8,0x01,0x00,0x00,0xb0,0x00,0x05,0x00, +0xc1,0x00,0x00,0x00,0xfb,0x01,0x00,0x00,0xe5,0x02,0x00,0x00, +0xbe,0x00,0x00,0x00,0xf6,0x00,0x04,0x00,0xf7,0x01,0x00,0x00, +0xf8,0x01,0x00,0x00,0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, +0xfb,0x01,0x00,0x00,0xf6,0x01,0x00,0x00,0xf7,0x01,0x00,0x00, +0xf8,0x00,0x02,0x00,0xf6,0x01,0x00,0x00,0xf9,0x00,0x02,0x00, +0xfd,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0xfd,0x01,0x00,0x00, +0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0xe9,0x02,0x00,0x00, +0x3e,0x00,0x00,0x00,0xf6,0x01,0x00,0x00,0x37,0x02,0x00,0x00, +0x00,0x02,0x00,0x00,0xb0,0x00,0x05,0x00,0xc1,0x00,0x00,0x00, +0x03,0x02,0x00,0x00,0xe9,0x02,0x00,0x00,0x60,0x00,0x00,0x00, +0xf6,0x00,0x04,0x00,0xff,0x01,0x00,0x00,0x00,0x02,0x00,0x00, +0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0x03,0x02,0x00,0x00, +0xfe,0x01,0x00,0x00,0xff,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, +0xfe,0x01,0x00,0x00,0xf9,0x00,0x02,0x00,0x05,0x02,0x00,0x00, +0xf8,0x00,0x02,0x00,0x05,0x02,0x00,0x00,0xf5,0x00,0x07,0x00, +0x06,0x00,0x00,0x00,0xeb,0x02,0x00,0x00,0x3e,0x00,0x00,0x00, +0xfe,0x01,0x00,0x00,0x35,0x02,0x00,0x00,0x08,0x02,0x00,0x00, +0xb0,0x00,0x05,0x00,0xc1,0x00,0x00,0x00,0x0b,0x02,0x00,0x00, +0xeb,0x02,0x00,0x00,0xbb,0x00,0x00,0x00,0xf6,0x00,0x04,0x00, +0x07,0x02,0x00,0x00,0x08,0x02,0x00,0x00,0x01,0x00,0x00,0x00, +0xfa,0x00,0x04,0x00,0x0b,0x02,0x00,0x00,0x06,0x02,0x00,0x00, +0x07,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x06,0x02,0x00,0x00, +0xf9,0x00,0x02,0x00,0x0d,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, +0x0d,0x02,0x00,0x00,0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00, +0xed,0x02,0x00,0x00,0x3e,0x00,0x00,0x00,0x06,0x02,0x00,0x00, +0x33,0x02,0x00,0x00,0x0e,0x02,0x00,0x00,0xb0,0x00,0x05,0x00, +0xc1,0x00,0x00,0x00,0x13,0x02,0x00,0x00,0xed,0x02,0x00,0x00, +0x62,0x00,0x00,0x00,0xf6,0x00,0x04,0x00,0x0f,0x02,0x00,0x00, +0x0e,0x02,0x00,0x00,0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, +0x13,0x02,0x00,0x00,0x0e,0x02,0x00,0x00,0x0f,0x02,0x00,0x00, +0xf8,0x00,0x02,0x00,0x0e,0x02,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x15,0x02,0x00,0x00,0xe5,0x02,0x00,0x00, +0xbb,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x17,0x02,0x00,0x00,0x15,0x02,0x00,0x00,0xeb,0x02,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x19,0x02,0x00,0x00, +0x17,0x02,0x00,0x00,0x18,0x02,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x1b,0x02,0x00,0x00,0xe9,0x02,0x00,0x00, +0x62,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x1c,0x02,0x00,0x00,0x19,0x02,0x00,0x00,0x1b,0x02,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x1e,0x02,0x00,0x00, +0x1c,0x02,0x00,0x00,0xed,0x02,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x22,0x02,0x00,0x00,0x1b,0x02,0x00,0x00, +0xed,0x02,0x00,0x00,0x41,0x00,0x05,0x00,0xc0,0x01,0x00,0x00, +0x23,0x02,0x00,0x00,0xab,0x01,0x00,0x00,0x22,0x02,0x00,0x00, +0x3d,0x00,0x04,0x00,0x00,0x01,0x00,0x00,0x24,0x02,0x00,0x00, +0x23,0x02,0x00,0x00,0x73,0x00,0x04,0x00,0xc3,0x00,0x00,0x00, +0x25,0x02,0x00,0x00,0x24,0x02,0x00,0x00,0x41,0x00,0x05,0x00, +0xc0,0x01,0x00,0x00,0x2a,0x02,0x00,0x00,0xd9,0x01,0x00,0x00, +0x17,0x02,0x00,0x00,0x3d,0x00,0x04,0x00,0x00,0x01,0x00,0x00, +0x2b,0x02,0x00,0x00,0x2a,0x02,0x00,0x00,0x73,0x00,0x04,0x00, +0xc3,0x00,0x00,0x00,0x2c,0x02,0x00,0x00,0x2b,0x02,0x00,0x00, +0x41,0x00,0x05,0x00,0xcc,0x00,0x00,0x00,0x2e,0x02,0x00,0x00, +0xc9,0x00,0x00,0x00,0x1e,0x02,0x00,0x00,0x3d,0x00,0x04,0x00, +0xc3,0x00,0x00,0x00,0x2f,0x02,0x00,0x00,0x2e,0x02,0x00,0x00, +0x0c,0x00,0x08,0x00,0xc3,0x00,0x00,0x00,0x30,0x02,0x00,0x00, +0x01,0x00,0x00,0x00,0x32,0x00,0x00,0x00,0x25,0x02,0x00,0x00, +0x2c,0x02,0x00,0x00,0x2f,0x02,0x00,0x00,0x3e,0x00,0x03,0x00, +0x2e,0x02,0x00,0x00,0x30,0x02,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x33,0x02,0x00,0x00,0xed,0x02,0x00,0x00, +0xcf,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0x0d,0x02,0x00,0x00, +0xf8,0x00,0x02,0x00,0x0f,0x02,0x00,0x00,0xf9,0x00,0x02,0x00, +0x08,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x08,0x02,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x35,0x02,0x00,0x00, +0xeb,0x02,0x00,0x00,0xcf,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, +0x05,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x07,0x02,0x00,0x00, +0xf9,0x00,0x02,0x00,0x00,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, +0x00,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x37,0x02,0x00,0x00,0xe9,0x02,0x00,0x00,0xcf,0x00,0x00,0x00, +0xf9,0x00,0x02,0x00,0xfd,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, +0xff,0x01,0x00,0x00,0xf9,0x00,0x02,0x00,0xf8,0x01,0x00,0x00, +0xf8,0x00,0x02,0x00,0xf8,0x01,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x39,0x02,0x00,0x00,0xe5,0x02,0x00,0x00, +0xcf,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0xf5,0x01,0x00,0x00, +0xf8,0x00,0x02,0x00,0xf7,0x01,0x00,0x00,0xf9,0x00,0x02,0x00, +0x94,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0x94,0x01,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x3b,0x02,0x00,0x00, +0xdf,0x02,0x00,0x00,0xcf,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, +0x91,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0x93,0x01,0x00,0x00, +0xe0,0x00,0x04,0x00,0x0c,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, +0x89,0x01,0x00,0x00,0xf9,0x00,0x02,0x00,0xd6,0x00,0x00,0x00, +0xf8,0x00,0x02,0x00,0xd6,0x00,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x3d,0x02,0x00,0x00,0xc5,0x02,0x00,0x00, +0x6c,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0xd3,0x00,0x00,0x00, +0xf8,0x00,0x02,0x00,0xd5,0x00,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x42,0x02,0x00,0x00,0x55,0x00,0x00,0x00, +0x53,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x43,0x02,0x00,0x00,0x95,0x00,0x00,0x00,0x42,0x02,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x48,0x02,0x00,0x00, +0x59,0x00,0x00,0x00,0xb8,0x00,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x49,0x02,0x00,0x00,0xa7,0x00,0x00,0x00, +0x48,0x02,0x00,0x00,0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00, +0x4e,0x02,0x00,0x00,0x12,0x00,0x00,0x00,0x4d,0x02,0x00,0x00, +0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x4f,0x02,0x00,0x00, +0x4e,0x02,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x50,0x02,0x00,0x00,0x0f,0x00,0x00,0x00,0x4f,0x02,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x54,0x02,0x00,0x00, +0x47,0x00,0x00,0x00,0x4f,0x02,0x00,0x00,0x41,0x00,0x05,0x00, +0x0d,0x00,0x00,0x00,0x56,0x02,0x00,0x00,0x55,0x02,0x00,0x00, +0x0c,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x57,0x02,0x00,0x00,0x56,0x02,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x58,0x02,0x00,0x00,0x54,0x02,0x00,0x00, +0x57,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x59,0x02,0x00,0x00,0x50,0x02,0x00,0x00,0x58,0x02,0x00,0x00, +0xf9,0x00,0x02,0x00,0x5b,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, +0x5b,0x02,0x00,0x00,0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00, +0xc6,0x02,0x00,0x00,0x3e,0x00,0x00,0x00,0xd5,0x00,0x00,0x00, +0xc1,0x02,0x00,0x00,0x5e,0x02,0x00,0x00,0xb0,0x00,0x05,0x00, +0xc1,0x00,0x00,0x00,0x61,0x02,0x00,0x00,0xc6,0x02,0x00,0x00, +0xbe,0x00,0x00,0x00,0xf6,0x00,0x04,0x00,0x5d,0x02,0x00,0x00, +0x5e,0x02,0x00,0x00,0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, +0x61,0x02,0x00,0x00,0x5c,0x02,0x00,0x00,0x5d,0x02,0x00,0x00, +0xf8,0x00,0x02,0x00,0x5c,0x02,0x00,0x00,0xf9,0x00,0x02,0x00, +0x63,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x63,0x02,0x00,0x00, +0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0xc7,0x02,0x00,0x00, +0x3e,0x00,0x00,0x00,0x5c,0x02,0x00,0x00,0xbf,0x02,0x00,0x00, +0x66,0x02,0x00,0x00,0xb0,0x00,0x05,0x00,0xc1,0x00,0x00,0x00, +0x69,0x02,0x00,0x00,0xc7,0x02,0x00,0x00,0x60,0x00,0x00,0x00, +0xf6,0x00,0x04,0x00,0x65,0x02,0x00,0x00,0x66,0x02,0x00,0x00, +0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0x69,0x02,0x00,0x00, +0x64,0x02,0x00,0x00,0x65,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, +0x64,0x02,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x6d,0x02,0x00,0x00,0xc7,0x02,0x00,0x00,0x61,0x00,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x6e,0x02,0x00,0x00, +0x43,0x02,0x00,0x00,0x6d,0x02,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x70,0x02,0x00,0x00,0x64,0x00,0x00,0x00, +0x62,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x71,0x02,0x00,0x00,0x6e,0x02,0x00,0x00,0x70,0x02,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x75,0x02,0x00,0x00, +0xc6,0x02,0x00,0x00,0xe1,0x01,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x76,0x02,0x00,0x00,0x49,0x02,0x00,0x00, +0x75,0x02,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x78,0x02,0x00,0x00,0x68,0x00,0x00,0x00,0xbb,0x00,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x79,0x02,0x00,0x00, +0x76,0x02,0x00,0x00,0x78,0x02,0x00,0x00,0xf9,0x00,0x02,0x00, +0x7b,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x7b,0x02,0x00,0x00, +0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0xc9,0x02,0x00,0x00, +0x3e,0x00,0x00,0x00,0x64,0x02,0x00,0x00,0xbd,0x02,0x00,0x00, +0x7e,0x02,0x00,0x00,0xb0,0x00,0x05,0x00,0xc1,0x00,0x00,0x00, +0x81,0x02,0x00,0x00,0xc9,0x02,0x00,0x00,0xbb,0x00,0x00,0x00, +0xf6,0x00,0x04,0x00,0x7d,0x02,0x00,0x00,0x7e,0x02,0x00,0x00, +0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0x81,0x02,0x00,0x00, +0x7c,0x02,0x00,0x00,0x7d,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, +0x7c,0x02,0x00,0x00,0xf9,0x00,0x02,0x00,0x83,0x02,0x00,0x00, +0xf8,0x00,0x02,0x00,0x83,0x02,0x00,0x00,0xf5,0x00,0x07,0x00, +0x06,0x00,0x00,0x00,0xcb,0x02,0x00,0x00,0x3e,0x00,0x00,0x00, +0x7c,0x02,0x00,0x00,0xbb,0x02,0x00,0x00,0x86,0x02,0x00,0x00, +0xb0,0x00,0x05,0x00,0xc1,0x00,0x00,0x00,0x89,0x02,0x00,0x00, +0xcb,0x02,0x00,0x00,0x62,0x00,0x00,0x00,0xf6,0x00,0x04,0x00, +0x85,0x02,0x00,0x00,0x86,0x02,0x00,0x00,0x01,0x00,0x00,0x00, +0xfa,0x00,0x04,0x00,0x89,0x02,0x00,0x00,0x84,0x02,0x00,0x00, +0x85,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x84,0x02,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x8c,0x02,0x00,0x00, +0x71,0x02,0x00,0x00,0xcb,0x02,0x00,0x00,0xb0,0x00,0x05,0x00, +0xc1,0x00,0x00,0x00,0x8f,0x02,0x00,0x00,0x8c,0x02,0x00,0x00, +0x36,0x00,0x00,0x00,0xf7,0x00,0x03,0x00,0x91,0x02,0x00,0x00, +0x00,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0x8f,0x02,0x00,0x00, +0x90,0x02,0x00,0x00,0x91,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, +0x90,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x94,0x02,0x00,0x00,0x79,0x02,0x00,0x00,0xc9,0x02,0x00,0x00, +0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00,0x95,0x02,0x00,0x00, +0x12,0x00,0x00,0x00,0xcf,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x96,0x02,0x00,0x00,0x95,0x02,0x00,0x00, +0xb0,0x00,0x05,0x00,0xc1,0x00,0x00,0x00,0x97,0x02,0x00,0x00, +0x94,0x02,0x00,0x00,0x96,0x02,0x00,0x00,0xf9,0x00,0x02,0x00, +0x91,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x91,0x02,0x00,0x00, +0xf5,0x00,0x07,0x00,0xc1,0x00,0x00,0x00,0x98,0x02,0x00,0x00, +0x8f,0x02,0x00,0x00,0x84,0x02,0x00,0x00,0x97,0x02,0x00,0x00, +0x90,0x02,0x00,0x00,0xf7,0x00,0x03,0x00,0x9a,0x02,0x00,0x00, +0x00,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0x98,0x02,0x00,0x00, +0x99,0x02,0x00,0x00,0x9a,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, +0x99,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xa2,0x02,0x00,0x00,0x79,0x02,0x00,0x00,0xc9,0x02,0x00,0x00, +0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00,0xa4,0x02,0x00,0x00, +0x12,0x00,0x00,0x00,0xa3,0x02,0x00,0x00,0x3d,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0xa5,0x02,0x00,0x00,0xa4,0x02,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xa6,0x02,0x00,0x00, +0xa2,0x02,0x00,0x00,0xa5,0x02,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xa7,0x02,0x00,0x00,0x59,0x02,0x00,0x00, +0xa6,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xa9,0x02,0x00,0x00,0xa7,0x02,0x00,0x00,0x71,0x02,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xab,0x02,0x00,0x00, +0xa9,0x02,0x00,0x00,0xcb,0x02,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xad,0x02,0x00,0x00,0xc6,0x02,0x00,0x00, +0xbb,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xaf,0x02,0x00,0x00,0xad,0x02,0x00,0x00,0xc9,0x02,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xb1,0x02,0x00,0x00, +0xaf,0x02,0x00,0x00,0xb0,0x02,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xb3,0x02,0x00,0x00,0xc7,0x02,0x00,0x00, +0x62,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xb4,0x02,0x00,0x00,0xb1,0x02,0x00,0x00,0xb3,0x02,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xb6,0x02,0x00,0x00, +0xb4,0x02,0x00,0x00,0xcb,0x02,0x00,0x00,0x41,0x00,0x05,0x00, +0xcc,0x00,0x00,0x00,0xb7,0x02,0x00,0x00,0xc9,0x00,0x00,0x00, +0xb6,0x02,0x00,0x00,0x3d,0x00,0x04,0x00,0xc3,0x00,0x00,0x00, +0xb8,0x02,0x00,0x00,0xb7,0x02,0x00,0x00,0x41,0x00,0x06,0x00, +0x75,0x01,0x00,0x00,0xb9,0x02,0x00,0x00,0x9e,0x02,0x00,0x00, +0x34,0x00,0x00,0x00,0xab,0x02,0x00,0x00,0x3e,0x00,0x03,0x00, +0xb9,0x02,0x00,0x00,0xb8,0x02,0x00,0x00,0xf9,0x00,0x02,0x00, +0x9a,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x9a,0x02,0x00,0x00, +0xf9,0x00,0x02,0x00,0x86,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, +0x86,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xbb,0x02,0x00,0x00,0xcb,0x02,0x00,0x00,0xcf,0x00,0x00,0x00, +0xf9,0x00,0x02,0x00,0x83,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, +0x85,0x02,0x00,0x00,0xf9,0x00,0x02,0x00,0x7e,0x02,0x00,0x00, +0xf8,0x00,0x02,0x00,0x7e,0x02,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xbd,0x02,0x00,0x00,0xc9,0x02,0x00,0x00, +0xcf,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0x7b,0x02,0x00,0x00, +0xf8,0x00,0x02,0x00,0x7d,0x02,0x00,0x00,0xf9,0x00,0x02,0x00, +0x66,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x66,0x02,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xbf,0x02,0x00,0x00, +0xc7,0x02,0x00,0x00,0xcf,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, +0x63,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x65,0x02,0x00,0x00, +0xf9,0x00,0x02,0x00,0x5e,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, +0x5e,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xc1,0x02,0x00,0x00,0xc6,0x02,0x00,0x00,0xcf,0x00,0x00,0x00, +0xf9,0x00,0x02,0x00,0x5b,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, +0x5d,0x02,0x00,0x00,0xfd,0x00,0x01,0x00,0x38,0x00,0x01,0x00, + +}; +const uint64_t matmul_q4_1_f32_len = 10632; + +unsigned char matmul_q4_1_f32_aligned_data[] = { +0x03,0x02,0x23,0x07,0x00,0x05,0x01,0x00,0x0b,0x00,0x0d,0x00, +0x19,0x03,0x00,0x00,0x00,0x00,0x00,0x00,0x11,0x00,0x02,0x00, +0x01,0x00,0x00,0x00,0x11,0x00,0x02,0x00,0x09,0x00,0x00,0x00, +0x11,0x00,0x02,0x00,0x51,0x11,0x00,0x00,0x11,0x00,0x02,0x00, +0x60,0x11,0x00,0x00,0x0b,0x00,0x06,0x00,0x01,0x00,0x00,0x00, +0x47,0x4c,0x53,0x4c,0x2e,0x73,0x74,0x64,0x2e,0x34,0x35,0x30, +0x00,0x00,0x00,0x00,0x0e,0x00,0x03,0x00,0x00,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0x0f,0x00,0x0f,0x00,0x05,0x00,0x00,0x00, +0x04,0x00,0x00,0x00,0x6d,0x61,0x69,0x6e,0x00,0x00,0x00,0x00, +0x0b,0x00,0x00,0x00,0x12,0x00,0x00,0x00,0x3d,0x00,0x00,0x00, +0x4c,0x00,0x00,0x00,0x08,0x01,0x00,0x00,0x2d,0x01,0x00,0x00, +0x62,0x01,0x00,0x00,0x6a,0x01,0x00,0x00,0x78,0x02,0x00,0x00, +0xc1,0x02,0x00,0x00,0x10,0x00,0x06,0x00,0x04,0x00,0x00,0x00, +0x11,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x0b,0x00,0x00,0x00, +0x0b,0x00,0x00,0x00,0x1c,0x00,0x00,0x00,0x48,0x00,0x05,0x00, +0x10,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00, +0x00,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x04,0x00,0x00,0x00, +0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00,0x02,0x00,0x00,0x00, +0x23,0x00,0x00,0x00,0x08,0x00,0x00,0x00,0x48,0x00,0x05,0x00, +0x10,0x00,0x00,0x00,0x03,0x00,0x00,0x00,0x23,0x00,0x00,0x00, +0x0c,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00, +0x04,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x10,0x00,0x00,0x00, +0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00,0x05,0x00,0x00,0x00, +0x23,0x00,0x00,0x00,0x14,0x00,0x00,0x00,0x48,0x00,0x05,0x00, +0x10,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x23,0x00,0x00,0x00, +0x18,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00, +0x07,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x1c,0x00,0x00,0x00, +0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00,0x08,0x00,0x00,0x00, +0x23,0x00,0x00,0x00,0x20,0x00,0x00,0x00,0x48,0x00,0x05,0x00, +0x10,0x00,0x00,0x00,0x09,0x00,0x00,0x00,0x23,0x00,0x00,0x00, +0x24,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00, +0x0a,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x28,0x00,0x00,0x00, +0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00,0x0b,0x00,0x00,0x00, +0x23,0x00,0x00,0x00,0x2c,0x00,0x00,0x00,0x48,0x00,0x05,0x00, +0x10,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x23,0x00,0x00,0x00, +0x30,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00, +0x0d,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x34,0x00,0x00,0x00, +0x47,0x00,0x03,0x00,0x10,0x00,0x00,0x00,0x02,0x00,0x00,0x00, +0x47,0x00,0x04,0x00,0x37,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x3d,0x00,0x00,0x00, +0x0b,0x00,0x00,0x00,0x1a,0x00,0x00,0x00,0x47,0x00,0x04,0x00, +0x4c,0x00,0x00,0x00,0x0b,0x00,0x00,0x00,0x1b,0x00,0x00,0x00, +0x47,0x00,0x04,0x00,0x4f,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0x09,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x53,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x47,0x00,0x04,0x00, +0x60,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x06,0x00,0x00,0x00, +0x47,0x00,0x04,0x00,0x62,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0x07,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x6c,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0x03,0x00,0x00,0x00,0x47,0x00,0x04,0x00, +0xa7,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x02,0x00,0x00,0x00, +0x47,0x00,0x04,0x00,0xb9,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0x05,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0xbc,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0x08,0x00,0x00,0x00,0x47,0x00,0x04,0x00, +0x03,0x01,0x00,0x00,0x06,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0x48,0x00,0x05,0x00,0x04,0x01,0x00,0x00,0x00,0x00,0x00,0x00, +0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x48,0x00,0x05,0x00, +0x04,0x01,0x00,0x00,0x01,0x00,0x00,0x00,0x23,0x00,0x00,0x00, +0x02,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x04,0x01,0x00,0x00, +0x02,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x04,0x00,0x00,0x00, +0x47,0x00,0x04,0x00,0x05,0x01,0x00,0x00,0x06,0x00,0x00,0x00, +0x14,0x00,0x00,0x00,0x48,0x00,0x04,0x00,0x06,0x01,0x00,0x00, +0x00,0x00,0x00,0x00,0x18,0x00,0x00,0x00,0x48,0x00,0x05,0x00, +0x06,0x01,0x00,0x00,0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00, +0x00,0x00,0x00,0x00,0x47,0x00,0x03,0x00,0x06,0x01,0x00,0x00, +0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x08,0x01,0x00,0x00, +0x22,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00, +0x08,0x01,0x00,0x00,0x21,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0x47,0x00,0x04,0x00,0x3a,0x01,0x00,0x00,0x01,0x00,0x00,0x00, +0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x3b,0x01,0x00,0x00, +0x0b,0x00,0x00,0x00,0x19,0x00,0x00,0x00,0x47,0x00,0x04,0x00, +0x67,0x01,0x00,0x00,0x06,0x00,0x00,0x00,0x20,0x00,0x00,0x00, +0x48,0x00,0x04,0x00,0x68,0x01,0x00,0x00,0x00,0x00,0x00,0x00, +0x05,0x00,0x00,0x00,0x48,0x00,0x04,0x00,0x68,0x01,0x00,0x00, +0x00,0x00,0x00,0x00,0x18,0x00,0x00,0x00,0x48,0x00,0x05,0x00, +0x68,0x01,0x00,0x00,0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00, +0x00,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x68,0x01,0x00,0x00, +0x00,0x00,0x00,0x00,0x07,0x00,0x00,0x00,0x10,0x00,0x00,0x00, +0x47,0x00,0x03,0x00,0x68,0x01,0x00,0x00,0x02,0x00,0x00,0x00, +0x47,0x00,0x04,0x00,0x6a,0x01,0x00,0x00,0x22,0x00,0x00,0x00, +0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x6a,0x01,0x00,0x00, +0x21,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x47,0x00,0x04,0x00, +0x78,0x02,0x00,0x00,0x0b,0x00,0x00,0x00,0x18,0x00,0x00,0x00, +0x47,0x00,0x04,0x00,0xbe,0x02,0x00,0x00,0x06,0x00,0x00,0x00, +0x04,0x00,0x00,0x00,0x48,0x00,0x04,0x00,0xbf,0x02,0x00,0x00, +0x00,0x00,0x00,0x00,0x19,0x00,0x00,0x00,0x48,0x00,0x05,0x00, +0xbf,0x02,0x00,0x00,0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00, +0x00,0x00,0x00,0x00,0x47,0x00,0x03,0x00,0xbf,0x02,0x00,0x00, +0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0xc1,0x02,0x00,0x00, +0x22,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00, +0xc1,0x02,0x00,0x00,0x21,0x00,0x00,0x00,0x02,0x00,0x00,0x00, +0x13,0x00,0x02,0x00,0x02,0x00,0x00,0x00,0x21,0x00,0x03,0x00, +0x03,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x15,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x20,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0x17,0x00,0x04,0x00,0x09,0x00,0x00,0x00,0x06,0x00,0x00,0x00, +0x03,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x0a,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0x09,0x00,0x00,0x00,0x3b,0x00,0x04,0x00, +0x0a,0x00,0x00,0x00,0x0b,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, +0x02,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x0d,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x1e,0x00,0x10,0x00, +0x10,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00, +0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00, +0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00, +0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00, +0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00, +0x20,0x00,0x04,0x00,0x11,0x00,0x00,0x00,0x09,0x00,0x00,0x00, +0x10,0x00,0x00,0x00,0x3b,0x00,0x04,0x00,0x11,0x00,0x00,0x00, +0x12,0x00,0x00,0x00,0x09,0x00,0x00,0x00,0x15,0x00,0x04,0x00, +0x13,0x00,0x00,0x00,0x20,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00,0x14,0x00,0x00,0x00, +0x08,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x15,0x00,0x00,0x00, +0x09,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x13,0x00,0x00,0x00,0x21,0x00,0x00,0x00,0x0a,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00,0x27,0x00,0x00,0x00, +0x09,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00, +0x2d,0x00,0x00,0x00,0x07,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x13,0x00,0x00,0x00,0x34,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0x32,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x37,0x00,0x00,0x00, +0x40,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x39,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x3b,0x00,0x04,0x00, +0x0a,0x00,0x00,0x00,0x3d,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x3e,0x00,0x00,0x00, +0x00,0x00,0x00,0x00,0x3b,0x00,0x04,0x00,0x0a,0x00,0x00,0x00, +0x4c,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x32,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x4f,0x00,0x00,0x00,0x20,0x00,0x00,0x00, +0x32,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x53,0x00,0x00,0x00, +0x20,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, +0x54,0x00,0x00,0x00,0x86,0x00,0x00,0x00,0x37,0x00,0x00,0x00, +0x53,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, +0x58,0x00,0x00,0x00,0x86,0x00,0x00,0x00,0x37,0x00,0x00,0x00, +0x53,0x00,0x00,0x00,0x32,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x60,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x34,0x00,0x06,0x00, +0x06,0x00,0x00,0x00,0x61,0x00,0x00,0x00,0x86,0x00,0x00,0x00, +0x53,0x00,0x00,0x00,0x60,0x00,0x00,0x00,0x32,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x62,0x00,0x00,0x00,0x04,0x00,0x00,0x00, +0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x63,0x00,0x00,0x00, +0x86,0x00,0x00,0x00,0x61,0x00,0x00,0x00,0x62,0x00,0x00,0x00, +0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x67,0x00,0x00,0x00, +0x86,0x00,0x00,0x00,0x61,0x00,0x00,0x00,0x62,0x00,0x00,0x00, +0x32,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x6c,0x00,0x00,0x00, +0x10,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, +0x6d,0x00,0x00,0x00,0x86,0x00,0x00,0x00,0x6c,0x00,0x00,0x00, +0x0c,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, +0x72,0x00,0x00,0x00,0x86,0x00,0x00,0x00,0x6c,0x00,0x00,0x00, +0x0c,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x77,0x00,0x00,0x00,0x08,0x00,0x00,0x00,0x34,0x00,0x06,0x00, +0x06,0x00,0x00,0x00,0x78,0x00,0x00,0x00,0x86,0x00,0x00,0x00, +0x6c,0x00,0x00,0x00,0x77,0x00,0x00,0x00,0x34,0x00,0x06,0x00, +0x06,0x00,0x00,0x00,0x7d,0x00,0x00,0x00,0x86,0x00,0x00,0x00, +0x6c,0x00,0x00,0x00,0x77,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x13,0x00,0x00,0x00,0x81,0x00,0x00,0x00,0x06,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00,0x86,0x00,0x00,0x00, +0x02,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00, +0x91,0x00,0x00,0x00,0x0b,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x13,0x00,0x00,0x00,0x97,0x00,0x00,0x00,0x03,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00,0xa2,0x00,0x00,0x00, +0x0c,0x00,0x00,0x00,0x32,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0xa7,0x00,0x00,0x00,0x40,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x13,0x00,0x00,0x00,0xa9,0x00,0x00,0x00,0x04,0x00,0x00,0x00, +0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0xb8,0x00,0x00,0x00, +0x84,0x00,0x00,0x00,0x60,0x00,0x00,0x00,0x62,0x00,0x00,0x00, +0x32,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0xb9,0x00,0x00,0x00, +0x20,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, +0xba,0x00,0x00,0x00,0x84,0x00,0x00,0x00,0x53,0x00,0x00,0x00, +0xb9,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, +0xbb,0x00,0x00,0x00,0x84,0x00,0x00,0x00,0x4f,0x00,0x00,0x00, +0x62,0x00,0x00,0x00,0x32,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0xbc,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x34,0x00,0x06,0x00, +0x06,0x00,0x00,0x00,0xbd,0x00,0x00,0x00,0x84,0x00,0x00,0x00, +0xbb,0x00,0x00,0x00,0xbc,0x00,0x00,0x00,0x34,0x00,0x06,0x00, +0x06,0x00,0x00,0x00,0xbe,0x00,0x00,0x00,0x84,0x00,0x00,0x00, +0xbd,0x00,0x00,0x00,0x60,0x00,0x00,0x00,0x34,0x00,0x06,0x00, +0x06,0x00,0x00,0x00,0xbf,0x00,0x00,0x00,0x86,0x00,0x00,0x00, +0xba,0x00,0x00,0x00,0xbe,0x00,0x00,0x00,0x34,0x00,0x06,0x00, +0x06,0x00,0x00,0x00,0xc0,0x00,0x00,0x00,0x84,0x00,0x00,0x00, +0xb8,0x00,0x00,0x00,0xbf,0x00,0x00,0x00,0x34,0x00,0x06,0x00, +0x06,0x00,0x00,0x00,0xc1,0x00,0x00,0x00,0x84,0x00,0x00,0x00, +0xc0,0x00,0x00,0x00,0xbc,0x00,0x00,0x00,0x14,0x00,0x02,0x00, +0xc2,0x00,0x00,0x00,0x16,0x00,0x03,0x00,0xc4,0x00,0x00,0x00, +0x20,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, +0xc5,0x00,0x00,0x00,0x84,0x00,0x00,0x00,0x60,0x00,0x00,0x00, +0x62,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, +0xc6,0x00,0x00,0x00,0x84,0x00,0x00,0x00,0xc5,0x00,0x00,0x00, +0xbf,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, +0xc7,0x00,0x00,0x00,0x84,0x00,0x00,0x00,0xc6,0x00,0x00,0x00, +0xbc,0x00,0x00,0x00,0x1c,0x00,0x04,0x00,0xc8,0x00,0x00,0x00, +0xc4,0x00,0x00,0x00,0xc7,0x00,0x00,0x00,0x20,0x00,0x04,0x00, +0xc9,0x00,0x00,0x00,0x07,0x00,0x00,0x00,0xc8,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0xc4,0x00,0x00,0x00,0xcc,0x00,0x00,0x00, +0x00,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0xcd,0x00,0x00,0x00, +0x07,0x00,0x00,0x00,0xc4,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x13,0x00,0x00,0x00,0xd0,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0xf4,0x00,0x00,0x00, +0x80,0x00,0x00,0x00,0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0xfa,0x00,0x00,0x00, +0x10,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0xfe,0x00,0x00,0x00,0x0f,0x00,0x00,0x00,0x16,0x00,0x03,0x00, +0x01,0x01,0x00,0x00,0x10,0x00,0x00,0x00,0x15,0x00,0x04,0x00, +0x02,0x01,0x00,0x00,0x08,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0x1c,0x00,0x04,0x00,0x03,0x01,0x00,0x00,0x02,0x01,0x00,0x00, +0xfa,0x00,0x00,0x00,0x1e,0x00,0x05,0x00,0x04,0x01,0x00,0x00, +0x01,0x01,0x00,0x00,0x01,0x01,0x00,0x00,0x03,0x01,0x00,0x00, +0x1d,0x00,0x03,0x00,0x05,0x01,0x00,0x00,0x04,0x01,0x00,0x00, +0x1e,0x00,0x03,0x00,0x06,0x01,0x00,0x00,0x05,0x01,0x00,0x00, +0x20,0x00,0x04,0x00,0x07,0x01,0x00,0x00,0x0c,0x00,0x00,0x00, +0x06,0x01,0x00,0x00,0x3b,0x00,0x04,0x00,0x07,0x01,0x00,0x00, +0x08,0x01,0x00,0x00,0x0c,0x00,0x00,0x00,0x20,0x00,0x04,0x00, +0x0a,0x01,0x00,0x00,0x0c,0x00,0x00,0x00,0x01,0x01,0x00,0x00, +0x20,0x00,0x04,0x00,0x16,0x01,0x00,0x00,0x0c,0x00,0x00,0x00, +0x02,0x01,0x00,0x00,0x17,0x00,0x04,0x00,0x1a,0x01,0x00,0x00, +0xc4,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x34,0x00,0x06,0x00, +0x06,0x00,0x00,0x00,0x29,0x01,0x00,0x00,0x80,0x00,0x00,0x00, +0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x34,0x00,0x06,0x00, +0x06,0x00,0x00,0x00,0x2a,0x01,0x00,0x00,0x84,0x00,0x00,0x00, +0x37,0x00,0x00,0x00,0x29,0x01,0x00,0x00,0x1c,0x00,0x04,0x00, +0x2b,0x01,0x00,0x00,0x01,0x01,0x00,0x00,0x2a,0x01,0x00,0x00, +0x20,0x00,0x04,0x00,0x2c,0x01,0x00,0x00,0x04,0x00,0x00,0x00, +0x2b,0x01,0x00,0x00,0x3b,0x00,0x04,0x00,0x2c,0x01,0x00,0x00, +0x2d,0x01,0x00,0x00,0x04,0x00,0x00,0x00,0x20,0x00,0x04,0x00, +0x32,0x01,0x00,0x00,0x04,0x00,0x00,0x00,0x01,0x01,0x00,0x00, +0x32,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x3a,0x01,0x00,0x00, +0x01,0x00,0x00,0x00,0x33,0x00,0x06,0x00,0x09,0x00,0x00,0x00, +0x3b,0x01,0x00,0x00,0x3a,0x01,0x00,0x00,0x39,0x00,0x00,0x00, +0x39,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, +0x3c,0x01,0x00,0x00,0x51,0x00,0x00,0x00,0x3b,0x01,0x00,0x00, +0x00,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, +0x3d,0x01,0x00,0x00,0x84,0x00,0x00,0x00,0x3c,0x01,0x00,0x00, +0x0c,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, +0x3e,0x01,0x00,0x00,0x86,0x00,0x00,0x00,0x3d,0x01,0x00,0x00, +0x6c,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, +0x59,0x01,0x00,0x00,0x80,0x00,0x00,0x00,0x6c,0x00,0x00,0x00, +0x39,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, +0x5e,0x01,0x00,0x00,0x80,0x00,0x00,0x00,0x6c,0x00,0x00,0x00, +0x39,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, +0x5f,0x01,0x00,0x00,0x84,0x00,0x00,0x00,0xa7,0x00,0x00,0x00, +0x5e,0x01,0x00,0x00,0x1c,0x00,0x04,0x00,0x60,0x01,0x00,0x00, +0x01,0x01,0x00,0x00,0x5f,0x01,0x00,0x00,0x20,0x00,0x04,0x00, +0x61,0x01,0x00,0x00,0x04,0x00,0x00,0x00,0x60,0x01,0x00,0x00, +0x3b,0x00,0x04,0x00,0x61,0x01,0x00,0x00,0x62,0x01,0x00,0x00, +0x04,0x00,0x00,0x00,0x17,0x00,0x04,0x00,0x65,0x01,0x00,0x00, +0xc4,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x18,0x00,0x04,0x00, +0x66,0x01,0x00,0x00,0x65,0x01,0x00,0x00,0x02,0x00,0x00,0x00, +0x1d,0x00,0x03,0x00,0x67,0x01,0x00,0x00,0x66,0x01,0x00,0x00, +0x1e,0x00,0x03,0x00,0x68,0x01,0x00,0x00,0x67,0x01,0x00,0x00, +0x20,0x00,0x04,0x00,0x69,0x01,0x00,0x00,0x0c,0x00,0x00,0x00, +0x68,0x01,0x00,0x00,0x3b,0x00,0x04,0x00,0x69,0x01,0x00,0x00, +0x6a,0x01,0x00,0x00,0x0c,0x00,0x00,0x00,0x20,0x00,0x04,0x00, +0x6c,0x01,0x00,0x00,0x0c,0x00,0x00,0x00,0xc4,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x80,0x01,0x00,0x00, +0x03,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x88,0x01,0x00,0x00,0x04,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x90,0x01,0x00,0x00,0x05,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x98,0x01,0x00,0x00, +0x06,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0xa0,0x01,0x00,0x00,0x07,0x00,0x00,0x00,0x34,0x00,0x06,0x00, +0x06,0x00,0x00,0x00,0xa7,0x01,0x00,0x00,0x51,0x00,0x00,0x00, +0x3b,0x01,0x00,0x00,0x00,0x00,0x00,0x00,0x34,0x00,0x06,0x00, +0x06,0x00,0x00,0x00,0xa8,0x01,0x00,0x00,0x84,0x00,0x00,0x00, +0xa7,0x01,0x00,0x00,0x77,0x00,0x00,0x00,0x34,0x00,0x06,0x00, +0x06,0x00,0x00,0x00,0xa9,0x01,0x00,0x00,0x86,0x00,0x00,0x00, +0xa8,0x01,0x00,0x00,0x6c,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0xac,0x01,0x00,0x00,0x08,0x01,0x00,0x00, +0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0xad,0x01,0x00,0x00, +0x86,0x00,0x00,0x00,0x6c,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, +0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0xb0,0x01,0x00,0x00, +0x86,0x00,0x00,0x00,0x6c,0x00,0x00,0x00,0x77,0x00,0x00,0x00, +0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0xcb,0x01,0x00,0x00, +0x84,0x00,0x00,0x00,0x60,0x00,0x00,0x00,0x62,0x00,0x00,0x00, +0x1c,0x00,0x04,0x00,0xcc,0x01,0x00,0x00,0x01,0x01,0x00,0x00, +0xcb,0x01,0x00,0x00,0x20,0x00,0x04,0x00,0xcd,0x01,0x00,0x00, +0x07,0x00,0x00,0x00,0xcc,0x01,0x00,0x00,0x34,0x00,0x06,0x00, +0x06,0x00,0x00,0x00,0xdd,0x01,0x00,0x00,0x80,0x00,0x00,0x00, +0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x20,0x00,0x04,0x00, +0xe3,0x01,0x00,0x00,0x07,0x00,0x00,0x00,0x01,0x01,0x00,0x00, +0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0xf9,0x01,0x00,0x00, +0x84,0x00,0x00,0x00,0xbf,0x00,0x00,0x00,0xbc,0x00,0x00,0x00, +0x1c,0x00,0x04,0x00,0xfa,0x01,0x00,0x00,0x01,0x01,0x00,0x00, +0xf9,0x01,0x00,0x00,0x20,0x00,0x04,0x00,0xfb,0x01,0x00,0x00, +0x07,0x00,0x00,0x00,0xfa,0x01,0x00,0x00,0x34,0x00,0x06,0x00, +0x06,0x00,0x00,0x00,0x04,0x02,0x00,0x00,0x86,0x00,0x00,0x00, +0xb9,0x00,0x00,0x00,0xbf,0x00,0x00,0x00,0x34,0x00,0x06,0x00, +0x06,0x00,0x00,0x00,0x0c,0x02,0x00,0x00,0x80,0x00,0x00,0x00, +0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x34,0x00,0x06,0x00, +0x06,0x00,0x00,0x00,0x3b,0x02,0x00,0x00,0x84,0x00,0x00,0x00, +0x60,0x00,0x00,0x00,0x62,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x13,0x00,0x00,0x00,0x70,0x02,0x00,0x00,0x0d,0x00,0x00,0x00, +0x3b,0x00,0x04,0x00,0x0a,0x00,0x00,0x00,0x78,0x02,0x00,0x00, +0x01,0x00,0x00,0x00,0x1d,0x00,0x03,0x00,0xbe,0x02,0x00,0x00, +0xc4,0x00,0x00,0x00,0x1e,0x00,0x03,0x00,0xbf,0x02,0x00,0x00, +0xbe,0x02,0x00,0x00,0x20,0x00,0x04,0x00,0xc0,0x02,0x00,0x00, +0x0c,0x00,0x00,0x00,0xbf,0x02,0x00,0x00,0x3b,0x00,0x04,0x00, +0xc0,0x02,0x00,0x00,0xc1,0x02,0x00,0x00,0x0c,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00,0xc6,0x02,0x00,0x00, +0x05,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, +0xd3,0x02,0x00,0x00,0x84,0x00,0x00,0x00,0x60,0x00,0x00,0x00, +0x62,0x00,0x00,0x00,0x36,0x00,0x05,0x00,0x02,0x00,0x00,0x00, +0x04,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x03,0x00,0x00,0x00, +0xf8,0x00,0x02,0x00,0x05,0x00,0x00,0x00,0x3b,0x00,0x04,0x00, +0xc9,0x00,0x00,0x00,0xca,0x00,0x00,0x00,0x07,0x00,0x00,0x00, +0x3b,0x00,0x04,0x00,0xcd,0x01,0x00,0x00,0xce,0x01,0x00,0x00, +0x07,0x00,0x00,0x00,0x3b,0x00,0x04,0x00,0xfb,0x01,0x00,0x00, +0xfc,0x01,0x00,0x00,0x07,0x00,0x00,0x00,0x41,0x00,0x05,0x00, +0x0d,0x00,0x00,0x00,0x0e,0x00,0x00,0x00,0x0b,0x00,0x00,0x00, +0x0c,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x0f,0x00,0x00,0x00,0x0e,0x00,0x00,0x00,0x41,0x00,0x05,0x00, +0x15,0x00,0x00,0x00,0x16,0x00,0x00,0x00,0x12,0x00,0x00,0x00, +0x14,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x17,0x00,0x00,0x00,0x16,0x00,0x00,0x00,0x86,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x18,0x00,0x00,0x00,0x0f,0x00,0x00,0x00, +0x17,0x00,0x00,0x00,0x89,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x1e,0x00,0x00,0x00,0x0f,0x00,0x00,0x00,0x17,0x00,0x00,0x00, +0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00,0x22,0x00,0x00,0x00, +0x12,0x00,0x00,0x00,0x21,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x22,0x00,0x00,0x00, +0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x24,0x00,0x00,0x00, +0x18,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x41,0x00,0x05,0x00, +0x15,0x00,0x00,0x00,0x28,0x00,0x00,0x00,0x12,0x00,0x00,0x00, +0x27,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x29,0x00,0x00,0x00,0x28,0x00,0x00,0x00,0x86,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x2a,0x00,0x00,0x00,0x1e,0x00,0x00,0x00, +0x29,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00, +0x2e,0x00,0x00,0x00,0x12,0x00,0x00,0x00,0x2d,0x00,0x00,0x00, +0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x2f,0x00,0x00,0x00, +0x2e,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x30,0x00,0x00,0x00,0x24,0x00,0x00,0x00,0x2f,0x00,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x32,0x00,0x00,0x00, +0x30,0x00,0x00,0x00,0x2a,0x00,0x00,0x00,0x41,0x00,0x05,0x00, +0x15,0x00,0x00,0x00,0x35,0x00,0x00,0x00,0x12,0x00,0x00,0x00, +0x34,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x36,0x00,0x00,0x00,0x35,0x00,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x38,0x00,0x00,0x00,0x36,0x00,0x00,0x00, +0x37,0x00,0x00,0x00,0x82,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x3a,0x00,0x00,0x00,0x38,0x00,0x00,0x00,0x39,0x00,0x00,0x00, +0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x3b,0x00,0x00,0x00, +0x3a,0x00,0x00,0x00,0x37,0x00,0x00,0x00,0x41,0x00,0x05,0x00, +0x0d,0x00,0x00,0x00,0x3f,0x00,0x00,0x00,0x3d,0x00,0x00,0x00, +0x3e,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x40,0x00,0x00,0x00,0x3f,0x00,0x00,0x00,0x89,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x42,0x00,0x00,0x00,0x40,0x00,0x00,0x00, +0x3b,0x00,0x00,0x00,0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x47,0x00,0x00,0x00,0x40,0x00,0x00,0x00,0x3b,0x00,0x00,0x00, +0x41,0x00,0x05,0x00,0x0d,0x00,0x00,0x00,0x49,0x00,0x00,0x00, +0x3d,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x4a,0x00,0x00,0x00,0x49,0x00,0x00,0x00, +0x41,0x00,0x05,0x00,0x0d,0x00,0x00,0x00,0x4d,0x00,0x00,0x00, +0x4c,0x00,0x00,0x00,0x3e,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x4e,0x00,0x00,0x00,0x4d,0x00,0x00,0x00, +0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x50,0x00,0x00,0x00, +0x4e,0x00,0x00,0x00,0x4f,0x00,0x00,0x00,0x89,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x55,0x00,0x00,0x00,0x50,0x00,0x00,0x00, +0x54,0x00,0x00,0x00,0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x59,0x00,0x00,0x00,0x50,0x00,0x00,0x00,0x58,0x00,0x00,0x00, +0x89,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x5d,0x00,0x00,0x00, +0x4e,0x00,0x00,0x00,0x4f,0x00,0x00,0x00,0x89,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x64,0x00,0x00,0x00,0x5d,0x00,0x00,0x00, +0x63,0x00,0x00,0x00,0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x68,0x00,0x00,0x00,0x5d,0x00,0x00,0x00,0x67,0x00,0x00,0x00, +0x89,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x6e,0x00,0x00,0x00, +0x4e,0x00,0x00,0x00,0x6d,0x00,0x00,0x00,0x86,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x73,0x00,0x00,0x00,0x4e,0x00,0x00,0x00, +0x72,0x00,0x00,0x00,0x89,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x79,0x00,0x00,0x00,0x4e,0x00,0x00,0x00,0x78,0x00,0x00,0x00, +0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x7e,0x00,0x00,0x00, +0x4e,0x00,0x00,0x00,0x7d,0x00,0x00,0x00,0x41,0x00,0x05,0x00, +0x15,0x00,0x00,0x00,0x82,0x00,0x00,0x00,0x12,0x00,0x00,0x00, +0x81,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x83,0x00,0x00,0x00,0x82,0x00,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x84,0x00,0x00,0x00,0x47,0x00,0x00,0x00, +0x83,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00, +0x87,0x00,0x00,0x00,0x12,0x00,0x00,0x00,0x86,0x00,0x00,0x00, +0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x88,0x00,0x00,0x00, +0x87,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x8a,0x00,0x00,0x00,0x47,0x00,0x00,0x00,0x39,0x00,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x8d,0x00,0x00,0x00, +0x8a,0x00,0x00,0x00,0x83,0x00,0x00,0x00,0x0c,0x00,0x07,0x00, +0x06,0x00,0x00,0x00,0x8e,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0x26,0x00,0x00,0x00,0x88,0x00,0x00,0x00,0x8d,0x00,0x00,0x00, +0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00,0x92,0x00,0x00,0x00, +0x12,0x00,0x00,0x00,0x91,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x93,0x00,0x00,0x00,0x92,0x00,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x94,0x00,0x00,0x00, +0x32,0x00,0x00,0x00,0x93,0x00,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x96,0x00,0x00,0x00,0x42,0x00,0x00,0x00, +0x37,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00, +0x98,0x00,0x00,0x00,0x12,0x00,0x00,0x00,0x97,0x00,0x00,0x00, +0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x99,0x00,0x00,0x00, +0x98,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x9a,0x00,0x00,0x00,0x96,0x00,0x00,0x00,0x99,0x00,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x9b,0x00,0x00,0x00, +0x94,0x00,0x00,0x00,0x9a,0x00,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x9d,0x00,0x00,0x00,0x9b,0x00,0x00,0x00, +0x84,0x00,0x00,0x00,0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x9e,0x00,0x00,0x00,0x9d,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, +0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00,0xa3,0x00,0x00,0x00, +0x12,0x00,0x00,0x00,0xa2,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0xa4,0x00,0x00,0x00,0xa3,0x00,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xa5,0x00,0x00,0x00, +0x0f,0x00,0x00,0x00,0xa4,0x00,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xa8,0x00,0x00,0x00,0x4a,0x00,0x00,0x00, +0xa7,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00, +0xaa,0x00,0x00,0x00,0x12,0x00,0x00,0x00,0xa9,0x00,0x00,0x00, +0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0xab,0x00,0x00,0x00, +0xaa,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xac,0x00,0x00,0x00,0xa8,0x00,0x00,0x00,0xab,0x00,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xad,0x00,0x00,0x00, +0xa5,0x00,0x00,0x00,0xac,0x00,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xaf,0x00,0x00,0x00,0xad,0x00,0x00,0x00, +0x84,0x00,0x00,0x00,0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xb0,0x00,0x00,0x00,0xaf,0x00,0x00,0x00,0x77,0x00,0x00,0x00, +0xf9,0x00,0x02,0x00,0xb2,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, +0xb2,0x00,0x00,0x00,0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00, +0xe7,0x02,0x00,0x00,0x3e,0x00,0x00,0x00,0x05,0x00,0x00,0x00, +0xd1,0x00,0x00,0x00,0xb3,0x00,0x00,0x00,0xb0,0x00,0x05,0x00, +0xc2,0x00,0x00,0x00,0xc3,0x00,0x00,0x00,0xe7,0x02,0x00,0x00, +0xc1,0x00,0x00,0x00,0xf6,0x00,0x04,0x00,0xb4,0x00,0x00,0x00, +0xb3,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, +0xc3,0x00,0x00,0x00,0xb3,0x00,0x00,0x00,0xb4,0x00,0x00,0x00, +0xf8,0x00,0x02,0x00,0xb3,0x00,0x00,0x00,0x41,0x00,0x05,0x00, +0xcd,0x00,0x00,0x00,0xce,0x00,0x00,0x00,0xca,0x00,0x00,0x00, +0xe7,0x02,0x00,0x00,0x3e,0x00,0x03,0x00,0xce,0x00,0x00,0x00, +0xcc,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xd1,0x00,0x00,0x00,0xe7,0x02,0x00,0x00,0xd0,0x00,0x00,0x00, +0xf9,0x00,0x02,0x00,0xb2,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, +0xb4,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0xd4,0x00,0x00,0x00, +0xf8,0x00,0x02,0x00,0xd4,0x00,0x00,0x00,0xf5,0x00,0x07,0x00, +0x06,0x00,0x00,0x00,0x00,0x03,0x00,0x00,0xb0,0x00,0x00,0x00, +0xb4,0x00,0x00,0x00,0xb2,0x01,0x00,0x00,0xd7,0x00,0x00,0x00, +0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0xfc,0x02,0x00,0x00, +0x9e,0x00,0x00,0x00,0xb4,0x00,0x00,0x00,0xaf,0x01,0x00,0x00, +0xd7,0x00,0x00,0x00,0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00, +0xe8,0x02,0x00,0x00,0x84,0x00,0x00,0x00,0xb4,0x00,0x00,0x00, +0x60,0x02,0x00,0x00,0xd7,0x00,0x00,0x00,0xb0,0x00,0x05,0x00, +0xc2,0x00,0x00,0x00,0xdb,0x00,0x00,0x00,0xe8,0x02,0x00,0x00, +0x8e,0x00,0x00,0x00,0xf6,0x00,0x04,0x00,0xd6,0x00,0x00,0x00, +0xd7,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, +0xdb,0x00,0x00,0x00,0xd5,0x00,0x00,0x00,0xd6,0x00,0x00,0x00, +0xf8,0x00,0x02,0x00,0xd5,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, +0xdd,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0xdd,0x00,0x00,0x00, +0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0xf8,0x02,0x00,0x00, +0x3e,0x00,0x00,0x00,0xd5,0x00,0x00,0x00,0x40,0x01,0x00,0x00, +0xde,0x00,0x00,0x00,0xb0,0x00,0x05,0x00,0xc2,0x00,0x00,0x00, +0xe3,0x00,0x00,0x00,0xf8,0x02,0x00,0x00,0x37,0x00,0x00,0x00, +0xf6,0x00,0x04,0x00,0xdf,0x00,0x00,0x00,0xde,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0xe3,0x00,0x00,0x00, +0xde,0x00,0x00,0x00,0xdf,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, +0xde,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xe8,0x00,0x00,0x00,0x73,0x00,0x00,0x00,0xf8,0x02,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xeb,0x00,0x00,0x00, +0xe8,0x00,0x00,0x00,0x99,0x00,0x00,0x00,0x86,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xec,0x00,0x00,0x00,0xeb,0x00,0x00,0x00, +0x0c,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xed,0x00,0x00,0x00,0xfc,0x02,0x00,0x00,0xec,0x00,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xef,0x00,0x00,0x00, +0xed,0x00,0x00,0x00,0x6e,0x00,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xf5,0x00,0x00,0x00,0xe8,0x00,0x00,0x00, +0xf4,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xf7,0x00,0x00,0x00,0xf5,0x00,0x00,0x00,0x6e,0x00,0x00,0x00, +0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xfb,0x00,0x00,0x00, +0xef,0x00,0x00,0x00,0xfa,0x00,0x00,0x00,0xc7,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xff,0x00,0x00,0x00,0xef,0x00,0x00,0x00, +0xfe,0x00,0x00,0x00,0x41,0x00,0x07,0x00,0x0a,0x01,0x00,0x00, +0x0b,0x01,0x00,0x00,0x08,0x01,0x00,0x00,0x34,0x00,0x00,0x00, +0xfb,0x00,0x00,0x00,0x34,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0x01,0x01,0x00,0x00,0x0c,0x01,0x00,0x00,0x0b,0x01,0x00,0x00, +0x73,0x00,0x04,0x00,0xc4,0x00,0x00,0x00,0x0d,0x01,0x00,0x00, +0x0c,0x01,0x00,0x00,0x41,0x00,0x07,0x00,0x0a,0x01,0x00,0x00, +0x10,0x01,0x00,0x00,0x08,0x01,0x00,0x00,0x34,0x00,0x00,0x00, +0xfb,0x00,0x00,0x00,0xd0,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0x01,0x01,0x00,0x00,0x11,0x01,0x00,0x00,0x10,0x01,0x00,0x00, +0x73,0x00,0x04,0x00,0xc4,0x00,0x00,0x00,0x12,0x01,0x00,0x00, +0x11,0x01,0x00,0x00,0x41,0x00,0x08,0x00,0x16,0x01,0x00,0x00, +0x17,0x01,0x00,0x00,0x08,0x01,0x00,0x00,0x34,0x00,0x00,0x00, +0xfb,0x00,0x00,0x00,0x86,0x00,0x00,0x00,0xff,0x00,0x00,0x00, +0x3d,0x00,0x04,0x00,0x02,0x01,0x00,0x00,0x18,0x01,0x00,0x00, +0x17,0x01,0x00,0x00,0x71,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x19,0x01,0x00,0x00,0x18,0x01,0x00,0x00,0xc7,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x1e,0x01,0x00,0x00,0x19,0x01,0x00,0x00, +0xfe,0x00,0x00,0x00,0x70,0x00,0x04,0x00,0xc4,0x00,0x00,0x00, +0x1f,0x01,0x00,0x00,0x1e,0x01,0x00,0x00,0xc2,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x21,0x01,0x00,0x00,0x19,0x01,0x00,0x00, +0xa9,0x00,0x00,0x00,0x70,0x00,0x04,0x00,0xc4,0x00,0x00,0x00, +0x22,0x01,0x00,0x00,0x21,0x01,0x00,0x00,0x50,0x00,0x05,0x00, +0x1a,0x01,0x00,0x00,0x23,0x01,0x00,0x00,0x1f,0x01,0x00,0x00, +0x22,0x01,0x00,0x00,0x8e,0x00,0x05,0x00,0x1a,0x01,0x00,0x00, +0x25,0x01,0x00,0x00,0x23,0x01,0x00,0x00,0x0d,0x01,0x00,0x00, +0x50,0x00,0x05,0x00,0x1a,0x01,0x00,0x00,0x27,0x01,0x00,0x00, +0x12,0x01,0x00,0x00,0x12,0x01,0x00,0x00,0x81,0x00,0x05,0x00, +0x1a,0x01,0x00,0x00,0x28,0x01,0x00,0x00,0x25,0x01,0x00,0x00, +0x27,0x01,0x00,0x00,0x51,0x00,0x05,0x00,0xc4,0x00,0x00,0x00, +0x30,0x01,0x00,0x00,0x28,0x01,0x00,0x00,0x00,0x00,0x00,0x00, +0x73,0x00,0x04,0x00,0x01,0x01,0x00,0x00,0x31,0x01,0x00,0x00, +0x30,0x01,0x00,0x00,0x41,0x00,0x05,0x00,0x32,0x01,0x00,0x00, +0x33,0x01,0x00,0x00,0x2d,0x01,0x00,0x00,0xf7,0x00,0x00,0x00, +0x3e,0x00,0x03,0x00,0x33,0x01,0x00,0x00,0x31,0x01,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x35,0x01,0x00,0x00, +0xf7,0x00,0x00,0x00,0xfa,0x00,0x00,0x00,0x51,0x00,0x05,0x00, +0xc4,0x00,0x00,0x00,0x37,0x01,0x00,0x00,0x28,0x01,0x00,0x00, +0x01,0x00,0x00,0x00,0x73,0x00,0x04,0x00,0x01,0x01,0x00,0x00, +0x38,0x01,0x00,0x00,0x37,0x01,0x00,0x00,0x41,0x00,0x05,0x00, +0x32,0x01,0x00,0x00,0x39,0x01,0x00,0x00,0x2d,0x01,0x00,0x00, +0x35,0x01,0x00,0x00,0x3e,0x00,0x03,0x00,0x39,0x01,0x00,0x00, +0x38,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x40,0x01,0x00,0x00,0xf8,0x02,0x00,0x00,0x3e,0x01,0x00,0x00, +0xf9,0x00,0x02,0x00,0xdd,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, +0xdf,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0x42,0x01,0x00,0x00, +0xf8,0x00,0x02,0x00,0x42,0x01,0x00,0x00,0xf5,0x00,0x07,0x00, +0x06,0x00,0x00,0x00,0xf9,0x02,0x00,0x00,0x3e,0x00,0x00,0x00, +0xdf,0x00,0x00,0x00,0xab,0x01,0x00,0x00,0x43,0x01,0x00,0x00, +0xb0,0x00,0x05,0x00,0xc2,0x00,0x00,0x00,0x48,0x01,0x00,0x00, +0xf9,0x02,0x00,0x00,0xa7,0x00,0x00,0x00,0xf6,0x00,0x04,0x00, +0x44,0x01,0x00,0x00,0x43,0x01,0x00,0x00,0x01,0x00,0x00,0x00, +0xfa,0x00,0x04,0x00,0x48,0x01,0x00,0x00,0x43,0x01,0x00,0x00, +0x44,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0x43,0x01,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x4d,0x01,0x00,0x00, +0x7e,0x00,0x00,0x00,0xf9,0x02,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x50,0x01,0x00,0x00,0x4d,0x01,0x00,0x00, +0xab,0x00,0x00,0x00,0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x51,0x01,0x00,0x00,0x50,0x01,0x00,0x00,0x77,0x00,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x52,0x01,0x00,0x00, +0x00,0x03,0x00,0x00,0x51,0x01,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x54,0x01,0x00,0x00,0x52,0x01,0x00,0x00, +0x79,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x5a,0x01,0x00,0x00,0x4d,0x01,0x00,0x00,0x59,0x01,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x5c,0x01,0x00,0x00, +0x79,0x00,0x00,0x00,0x77,0x00,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x5d,0x01,0x00,0x00,0x5a,0x01,0x00,0x00, +0x5c,0x01,0x00,0x00,0x41,0x00,0x08,0x00,0x6c,0x01,0x00,0x00, +0x6d,0x01,0x00,0x00,0x6a,0x01,0x00,0x00,0x34,0x00,0x00,0x00, +0x54,0x01,0x00,0x00,0x34,0x00,0x00,0x00,0x3e,0x00,0x00,0x00, +0x3d,0x00,0x04,0x00,0xc4,0x00,0x00,0x00,0x6e,0x01,0x00,0x00, +0x6d,0x01,0x00,0x00,0x73,0x00,0x04,0x00,0x01,0x01,0x00,0x00, +0x6f,0x01,0x00,0x00,0x6e,0x01,0x00,0x00,0x41,0x00,0x05,0x00, +0x32,0x01,0x00,0x00,0x70,0x01,0x00,0x00,0x62,0x01,0x00,0x00, +0x5d,0x01,0x00,0x00,0x3e,0x00,0x03,0x00,0x70,0x01,0x00,0x00, +0x6f,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x72,0x01,0x00,0x00,0x5d,0x01,0x00,0x00,0x39,0x00,0x00,0x00, +0x41,0x00,0x08,0x00,0x6c,0x01,0x00,0x00,0x74,0x01,0x00,0x00, +0x6a,0x01,0x00,0x00,0x34,0x00,0x00,0x00,0x54,0x01,0x00,0x00, +0x34,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0xc4,0x00,0x00,0x00,0x75,0x01,0x00,0x00,0x74,0x01,0x00,0x00, +0x73,0x00,0x04,0x00,0x01,0x01,0x00,0x00,0x76,0x01,0x00,0x00, +0x75,0x01,0x00,0x00,0x41,0x00,0x05,0x00,0x32,0x01,0x00,0x00, +0x77,0x01,0x00,0x00,0x62,0x01,0x00,0x00,0x72,0x01,0x00,0x00, +0x3e,0x00,0x03,0x00,0x77,0x01,0x00,0x00,0x76,0x01,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x79,0x01,0x00,0x00, +0x5d,0x01,0x00,0x00,0x0c,0x00,0x00,0x00,0x41,0x00,0x08,0x00, +0x6c,0x01,0x00,0x00,0x7b,0x01,0x00,0x00,0x6a,0x01,0x00,0x00, +0x34,0x00,0x00,0x00,0x54,0x01,0x00,0x00,0x34,0x00,0x00,0x00, +0x0c,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0xc4,0x00,0x00,0x00, +0x7c,0x01,0x00,0x00,0x7b,0x01,0x00,0x00,0x73,0x00,0x04,0x00, +0x01,0x01,0x00,0x00,0x7d,0x01,0x00,0x00,0x7c,0x01,0x00,0x00, +0x41,0x00,0x05,0x00,0x32,0x01,0x00,0x00,0x7e,0x01,0x00,0x00, +0x62,0x01,0x00,0x00,0x79,0x01,0x00,0x00,0x3e,0x00,0x03,0x00, +0x7e,0x01,0x00,0x00,0x7d,0x01,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x81,0x01,0x00,0x00,0x5d,0x01,0x00,0x00, +0x80,0x01,0x00,0x00,0x41,0x00,0x08,0x00,0x6c,0x01,0x00,0x00, +0x83,0x01,0x00,0x00,0x6a,0x01,0x00,0x00,0x34,0x00,0x00,0x00, +0x54,0x01,0x00,0x00,0x34,0x00,0x00,0x00,0x80,0x01,0x00,0x00, +0x3d,0x00,0x04,0x00,0xc4,0x00,0x00,0x00,0x84,0x01,0x00,0x00, +0x83,0x01,0x00,0x00,0x73,0x00,0x04,0x00,0x01,0x01,0x00,0x00, +0x85,0x01,0x00,0x00,0x84,0x01,0x00,0x00,0x41,0x00,0x05,0x00, +0x32,0x01,0x00,0x00,0x86,0x01,0x00,0x00,0x62,0x01,0x00,0x00, +0x81,0x01,0x00,0x00,0x3e,0x00,0x03,0x00,0x86,0x01,0x00,0x00, +0x85,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x89,0x01,0x00,0x00,0x5d,0x01,0x00,0x00,0x88,0x01,0x00,0x00, +0x41,0x00,0x08,0x00,0x6c,0x01,0x00,0x00,0x8b,0x01,0x00,0x00, +0x6a,0x01,0x00,0x00,0x34,0x00,0x00,0x00,0x54,0x01,0x00,0x00, +0xd0,0x00,0x00,0x00,0x3e,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0xc4,0x00,0x00,0x00,0x8c,0x01,0x00,0x00,0x8b,0x01,0x00,0x00, +0x73,0x00,0x04,0x00,0x01,0x01,0x00,0x00,0x8d,0x01,0x00,0x00, +0x8c,0x01,0x00,0x00,0x41,0x00,0x05,0x00,0x32,0x01,0x00,0x00, +0x8e,0x01,0x00,0x00,0x62,0x01,0x00,0x00,0x89,0x01,0x00,0x00, +0x3e,0x00,0x03,0x00,0x8e,0x01,0x00,0x00,0x8d,0x01,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x91,0x01,0x00,0x00, +0x5d,0x01,0x00,0x00,0x90,0x01,0x00,0x00,0x41,0x00,0x08,0x00, +0x6c,0x01,0x00,0x00,0x93,0x01,0x00,0x00,0x6a,0x01,0x00,0x00, +0x34,0x00,0x00,0x00,0x54,0x01,0x00,0x00,0xd0,0x00,0x00,0x00, +0x39,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0xc4,0x00,0x00,0x00, +0x94,0x01,0x00,0x00,0x93,0x01,0x00,0x00,0x73,0x00,0x04,0x00, +0x01,0x01,0x00,0x00,0x95,0x01,0x00,0x00,0x94,0x01,0x00,0x00, +0x41,0x00,0x05,0x00,0x32,0x01,0x00,0x00,0x96,0x01,0x00,0x00, +0x62,0x01,0x00,0x00,0x91,0x01,0x00,0x00,0x3e,0x00,0x03,0x00, +0x96,0x01,0x00,0x00,0x95,0x01,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x99,0x01,0x00,0x00,0x5d,0x01,0x00,0x00, +0x98,0x01,0x00,0x00,0x41,0x00,0x08,0x00,0x6c,0x01,0x00,0x00, +0x9b,0x01,0x00,0x00,0x6a,0x01,0x00,0x00,0x34,0x00,0x00,0x00, +0x54,0x01,0x00,0x00,0xd0,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, +0x3d,0x00,0x04,0x00,0xc4,0x00,0x00,0x00,0x9c,0x01,0x00,0x00, +0x9b,0x01,0x00,0x00,0x73,0x00,0x04,0x00,0x01,0x01,0x00,0x00, +0x9d,0x01,0x00,0x00,0x9c,0x01,0x00,0x00,0x41,0x00,0x05,0x00, +0x32,0x01,0x00,0x00,0x9e,0x01,0x00,0x00,0x62,0x01,0x00,0x00, +0x99,0x01,0x00,0x00,0x3e,0x00,0x03,0x00,0x9e,0x01,0x00,0x00, +0x9d,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xa1,0x01,0x00,0x00,0x5d,0x01,0x00,0x00,0xa0,0x01,0x00,0x00, +0x41,0x00,0x08,0x00,0x6c,0x01,0x00,0x00,0xa3,0x01,0x00,0x00, +0x6a,0x01,0x00,0x00,0x34,0x00,0x00,0x00,0x54,0x01,0x00,0x00, +0xd0,0x00,0x00,0x00,0x80,0x01,0x00,0x00,0x3d,0x00,0x04,0x00, +0xc4,0x00,0x00,0x00,0xa4,0x01,0x00,0x00,0xa3,0x01,0x00,0x00, +0x73,0x00,0x04,0x00,0x01,0x01,0x00,0x00,0xa5,0x01,0x00,0x00, +0xa4,0x01,0x00,0x00,0x41,0x00,0x05,0x00,0x32,0x01,0x00,0x00, +0xa6,0x01,0x00,0x00,0x62,0x01,0x00,0x00,0xa1,0x01,0x00,0x00, +0x3e,0x00,0x03,0x00,0xa6,0x01,0x00,0x00,0xa5,0x01,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xab,0x01,0x00,0x00, +0xf9,0x02,0x00,0x00,0xa9,0x01,0x00,0x00,0xf9,0x00,0x02,0x00, +0x42,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0x44,0x01,0x00,0x00, +0xe0,0x00,0x04,0x00,0x0c,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, +0xac,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xaf,0x01,0x00,0x00,0xfc,0x02,0x00,0x00,0xad,0x01,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xb2,0x01,0x00,0x00, +0x00,0x03,0x00,0x00,0xb0,0x01,0x00,0x00,0xf9,0x00,0x02,0x00, +0xb4,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0xb4,0x01,0x00,0x00, +0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0x02,0x03,0x00,0x00, +0x3e,0x00,0x00,0x00,0x44,0x01,0x00,0x00,0x5e,0x02,0x00,0x00, +0xb7,0x01,0x00,0x00,0xb0,0x00,0x05,0x00,0xc2,0x00,0x00,0x00, +0xba,0x01,0x00,0x00,0x02,0x03,0x00,0x00,0x6c,0x00,0x00,0x00, +0xf6,0x00,0x04,0x00,0xb6,0x01,0x00,0x00,0xb7,0x01,0x00,0x00, +0x00,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0xba,0x01,0x00,0x00, +0xb5,0x01,0x00,0x00,0xb6,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, +0xb5,0x01,0x00,0x00,0xf9,0x00,0x02,0x00,0xbc,0x01,0x00,0x00, +0xf8,0x00,0x02,0x00,0xbc,0x01,0x00,0x00,0xf5,0x00,0x07,0x00, +0x06,0x00,0x00,0x00,0x06,0x03,0x00,0x00,0x3e,0x00,0x00,0x00, +0xb5,0x01,0x00,0x00,0xe8,0x01,0x00,0x00,0xbf,0x01,0x00,0x00, +0xb0,0x00,0x05,0x00,0xc2,0x00,0x00,0x00,0xc2,0x01,0x00,0x00, +0x06,0x03,0x00,0x00,0x60,0x00,0x00,0x00,0xf6,0x00,0x04,0x00, +0xbe,0x01,0x00,0x00,0xbf,0x01,0x00,0x00,0x01,0x00,0x00,0x00, +0xfa,0x00,0x04,0x00,0xc2,0x01,0x00,0x00,0xbd,0x01,0x00,0x00, +0xbe,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0xbd,0x01,0x00,0x00, +0xf9,0x00,0x02,0x00,0xc4,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, +0xc4,0x01,0x00,0x00,0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00, +0x18,0x03,0x00,0x00,0x3e,0x00,0x00,0x00,0xbd,0x01,0x00,0x00, +0xe6,0x01,0x00,0x00,0xc5,0x01,0x00,0x00,0xb0,0x00,0x05,0x00, +0xc2,0x00,0x00,0x00,0xca,0x01,0x00,0x00,0x18,0x03,0x00,0x00, +0x62,0x00,0x00,0x00,0xf6,0x00,0x04,0x00,0xc6,0x01,0x00,0x00, +0xc5,0x01,0x00,0x00,0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, +0xca,0x01,0x00,0x00,0xc5,0x01,0x00,0x00,0xc6,0x01,0x00,0x00, +0xf8,0x00,0x02,0x00,0xc5,0x01,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xd0,0x01,0x00,0x00,0x06,0x03,0x00,0x00, +0x62,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xd2,0x01,0x00,0x00,0xd0,0x01,0x00,0x00,0x18,0x03,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xd4,0x01,0x00,0x00, +0x55,0x00,0x00,0x00,0x53,0x00,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xd6,0x01,0x00,0x00,0x06,0x03,0x00,0x00, +0x61,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xd7,0x01,0x00,0x00,0xd4,0x01,0x00,0x00,0xd6,0x01,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xd9,0x01,0x00,0x00, +0x64,0x00,0x00,0x00,0x62,0x00,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xda,0x01,0x00,0x00,0xd7,0x01,0x00,0x00, +0xd9,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xdc,0x01,0x00,0x00,0xda,0x01,0x00,0x00,0x18,0x03,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xde,0x01,0x00,0x00, +0xdc,0x01,0x00,0x00,0xdd,0x01,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xe0,0x01,0x00,0x00,0xde,0x01,0x00,0x00, +0x02,0x03,0x00,0x00,0x41,0x00,0x05,0x00,0x32,0x01,0x00,0x00, +0xe1,0x01,0x00,0x00,0x2d,0x01,0x00,0x00,0xe0,0x01,0x00,0x00, +0x3d,0x00,0x04,0x00,0x01,0x01,0x00,0x00,0xe2,0x01,0x00,0x00, +0xe1,0x01,0x00,0x00,0x41,0x00,0x05,0x00,0xe3,0x01,0x00,0x00, +0xe4,0x01,0x00,0x00,0xce,0x01,0x00,0x00,0xd2,0x01,0x00,0x00, +0x3e,0x00,0x03,0x00,0xe4,0x01,0x00,0x00,0xe2,0x01,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xe6,0x01,0x00,0x00, +0x18,0x03,0x00,0x00,0xd0,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, +0xc4,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0xc6,0x01,0x00,0x00, +0xf9,0x00,0x02,0x00,0xbf,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, +0xbf,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xe8,0x01,0x00,0x00,0x06,0x03,0x00,0x00,0xd0,0x00,0x00,0x00, +0xf9,0x00,0x02,0x00,0xbc,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, +0xbe,0x01,0x00,0x00,0xf9,0x00,0x02,0x00,0xea,0x01,0x00,0x00, +0xf8,0x00,0x02,0x00,0xea,0x01,0x00,0x00,0xf5,0x00,0x07,0x00, +0x06,0x00,0x00,0x00,0x07,0x03,0x00,0x00,0x3e,0x00,0x00,0x00, +0xbe,0x01,0x00,0x00,0x16,0x02,0x00,0x00,0xed,0x01,0x00,0x00, +0xb0,0x00,0x05,0x00,0xc2,0x00,0x00,0x00,0xf0,0x01,0x00,0x00, +0x07,0x03,0x00,0x00,0xbf,0x00,0x00,0x00,0xf6,0x00,0x04,0x00, +0xec,0x01,0x00,0x00,0xed,0x01,0x00,0x00,0x01,0x00,0x00,0x00, +0xfa,0x00,0x04,0x00,0xf0,0x01,0x00,0x00,0xeb,0x01,0x00,0x00, +0xec,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0xeb,0x01,0x00,0x00, +0xf9,0x00,0x02,0x00,0xf2,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, +0xf2,0x01,0x00,0x00,0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00, +0x15,0x03,0x00,0x00,0x3e,0x00,0x00,0x00,0xeb,0x01,0x00,0x00, +0x14,0x02,0x00,0x00,0xf3,0x01,0x00,0x00,0xb0,0x00,0x05,0x00, +0xc2,0x00,0x00,0x00,0xf8,0x01,0x00,0x00,0x15,0x03,0x00,0x00, +0xbc,0x00,0x00,0x00,0xf6,0x00,0x04,0x00,0xf4,0x01,0x00,0x00, +0xf3,0x01,0x00,0x00,0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, +0xf8,0x01,0x00,0x00,0xf3,0x01,0x00,0x00,0xf4,0x01,0x00,0x00, +0xf8,0x00,0x02,0x00,0xf3,0x01,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xfe,0x01,0x00,0x00,0x07,0x03,0x00,0x00, +0xbc,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x00,0x02,0x00,0x00,0xfe,0x01,0x00,0x00,0x15,0x03,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x02,0x02,0x00,0x00, +0x59,0x00,0x00,0x00,0xb9,0x00,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x05,0x02,0x00,0x00,0x07,0x03,0x00,0x00, +0x04,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x06,0x02,0x00,0x00,0x02,0x02,0x00,0x00,0x05,0x02,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x08,0x02,0x00,0x00, +0x68,0x00,0x00,0x00,0xbc,0x00,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x09,0x02,0x00,0x00,0x06,0x02,0x00,0x00, +0x08,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x0b,0x02,0x00,0x00,0x09,0x02,0x00,0x00,0x15,0x03,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x0d,0x02,0x00,0x00, +0x0b,0x02,0x00,0x00,0x0c,0x02,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x0f,0x02,0x00,0x00,0x0d,0x02,0x00,0x00, +0x02,0x03,0x00,0x00,0x41,0x00,0x05,0x00,0x32,0x01,0x00,0x00, +0x10,0x02,0x00,0x00,0x62,0x01,0x00,0x00,0x0f,0x02,0x00,0x00, +0x3d,0x00,0x04,0x00,0x01,0x01,0x00,0x00,0x11,0x02,0x00,0x00, +0x10,0x02,0x00,0x00,0x41,0x00,0x05,0x00,0xe3,0x01,0x00,0x00, +0x12,0x02,0x00,0x00,0xfc,0x01,0x00,0x00,0x00,0x02,0x00,0x00, +0x3e,0x00,0x03,0x00,0x12,0x02,0x00,0x00,0x11,0x02,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x14,0x02,0x00,0x00, +0x15,0x03,0x00,0x00,0xd0,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, +0xf2,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0xf4,0x01,0x00,0x00, +0xf9,0x00,0x02,0x00,0xed,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, +0xed,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x16,0x02,0x00,0x00,0x07,0x03,0x00,0x00,0xd0,0x00,0x00,0x00, +0xf9,0x00,0x02,0x00,0xea,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, +0xec,0x01,0x00,0x00,0xf9,0x00,0x02,0x00,0x18,0x02,0x00,0x00, +0xf8,0x00,0x02,0x00,0x18,0x02,0x00,0x00,0xf5,0x00,0x07,0x00, +0x06,0x00,0x00,0x00,0x08,0x03,0x00,0x00,0x3e,0x00,0x00,0x00, +0xec,0x01,0x00,0x00,0x5c,0x02,0x00,0x00,0x1b,0x02,0x00,0x00, +0xb0,0x00,0x05,0x00,0xc2,0x00,0x00,0x00,0x1e,0x02,0x00,0x00, +0x08,0x03,0x00,0x00,0xbf,0x00,0x00,0x00,0xf6,0x00,0x04,0x00, +0x1a,0x02,0x00,0x00,0x1b,0x02,0x00,0x00,0x01,0x00,0x00,0x00, +0xfa,0x00,0x04,0x00,0x1e,0x02,0x00,0x00,0x19,0x02,0x00,0x00, +0x1a,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x19,0x02,0x00,0x00, +0xf9,0x00,0x02,0x00,0x20,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, +0x20,0x02,0x00,0x00,0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00, +0x0c,0x03,0x00,0x00,0x3e,0x00,0x00,0x00,0x19,0x02,0x00,0x00, +0x5a,0x02,0x00,0x00,0x23,0x02,0x00,0x00,0xb0,0x00,0x05,0x00, +0xc2,0x00,0x00,0x00,0x26,0x02,0x00,0x00,0x0c,0x03,0x00,0x00, +0x60,0x00,0x00,0x00,0xf6,0x00,0x04,0x00,0x22,0x02,0x00,0x00, +0x23,0x02,0x00,0x00,0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, +0x26,0x02,0x00,0x00,0x21,0x02,0x00,0x00,0x22,0x02,0x00,0x00, +0xf8,0x00,0x02,0x00,0x21,0x02,0x00,0x00,0xf9,0x00,0x02,0x00, +0x28,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x28,0x02,0x00,0x00, +0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0x0e,0x03,0x00,0x00, +0x3e,0x00,0x00,0x00,0x21,0x02,0x00,0x00,0x58,0x02,0x00,0x00, +0x2b,0x02,0x00,0x00,0xb0,0x00,0x05,0x00,0xc2,0x00,0x00,0x00, +0x2e,0x02,0x00,0x00,0x0e,0x03,0x00,0x00,0xbc,0x00,0x00,0x00, +0xf6,0x00,0x04,0x00,0x2a,0x02,0x00,0x00,0x2b,0x02,0x00,0x00, +0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0x2e,0x02,0x00,0x00, +0x29,0x02,0x00,0x00,0x2a,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, +0x29,0x02,0x00,0x00,0xf9,0x00,0x02,0x00,0x30,0x02,0x00,0x00, +0xf8,0x00,0x02,0x00,0x30,0x02,0x00,0x00,0xf5,0x00,0x07,0x00, +0x06,0x00,0x00,0x00,0x10,0x03,0x00,0x00,0x3e,0x00,0x00,0x00, +0x29,0x02,0x00,0x00,0x56,0x02,0x00,0x00,0x31,0x02,0x00,0x00, +0xb0,0x00,0x05,0x00,0xc2,0x00,0x00,0x00,0x36,0x02,0x00,0x00, +0x10,0x03,0x00,0x00,0x62,0x00,0x00,0x00,0xf6,0x00,0x04,0x00, +0x32,0x02,0x00,0x00,0x31,0x02,0x00,0x00,0x01,0x00,0x00,0x00, +0xfa,0x00,0x04,0x00,0x36,0x02,0x00,0x00,0x31,0x02,0x00,0x00, +0x32,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x31,0x02,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x38,0x02,0x00,0x00, +0x08,0x03,0x00,0x00,0xbc,0x00,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x3a,0x02,0x00,0x00,0x38,0x02,0x00,0x00, +0x0e,0x03,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x3c,0x02,0x00,0x00,0x3a,0x02,0x00,0x00,0x3b,0x02,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x3e,0x02,0x00,0x00, +0x0c,0x03,0x00,0x00,0x62,0x00,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x3f,0x02,0x00,0x00,0x3c,0x02,0x00,0x00, +0x3e,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x41,0x02,0x00,0x00,0x3f,0x02,0x00,0x00,0x10,0x03,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x45,0x02,0x00,0x00, +0x3e,0x02,0x00,0x00,0x10,0x03,0x00,0x00,0x41,0x00,0x05,0x00, +0xe3,0x01,0x00,0x00,0x46,0x02,0x00,0x00,0xce,0x01,0x00,0x00, +0x45,0x02,0x00,0x00,0x3d,0x00,0x04,0x00,0x01,0x01,0x00,0x00, +0x47,0x02,0x00,0x00,0x46,0x02,0x00,0x00,0x73,0x00,0x04,0x00, +0xc4,0x00,0x00,0x00,0x48,0x02,0x00,0x00,0x47,0x02,0x00,0x00, +0x41,0x00,0x05,0x00,0xe3,0x01,0x00,0x00,0x4d,0x02,0x00,0x00, +0xfc,0x01,0x00,0x00,0x3a,0x02,0x00,0x00,0x3d,0x00,0x04,0x00, +0x01,0x01,0x00,0x00,0x4e,0x02,0x00,0x00,0x4d,0x02,0x00,0x00, +0x73,0x00,0x04,0x00,0xc4,0x00,0x00,0x00,0x4f,0x02,0x00,0x00, +0x4e,0x02,0x00,0x00,0x41,0x00,0x05,0x00,0xcd,0x00,0x00,0x00, +0x51,0x02,0x00,0x00,0xca,0x00,0x00,0x00,0x41,0x02,0x00,0x00, +0x3d,0x00,0x04,0x00,0xc4,0x00,0x00,0x00,0x52,0x02,0x00,0x00, +0x51,0x02,0x00,0x00,0x0c,0x00,0x08,0x00,0xc4,0x00,0x00,0x00, +0x53,0x02,0x00,0x00,0x01,0x00,0x00,0x00,0x32,0x00,0x00,0x00, +0x48,0x02,0x00,0x00,0x4f,0x02,0x00,0x00,0x52,0x02,0x00,0x00, +0x3e,0x00,0x03,0x00,0x51,0x02,0x00,0x00,0x53,0x02,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x56,0x02,0x00,0x00, +0x10,0x03,0x00,0x00,0xd0,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, +0x30,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x32,0x02,0x00,0x00, +0xf9,0x00,0x02,0x00,0x2b,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, +0x2b,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x58,0x02,0x00,0x00,0x0e,0x03,0x00,0x00,0xd0,0x00,0x00,0x00, +0xf9,0x00,0x02,0x00,0x28,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, +0x2a,0x02,0x00,0x00,0xf9,0x00,0x02,0x00,0x23,0x02,0x00,0x00, +0xf8,0x00,0x02,0x00,0x23,0x02,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x5a,0x02,0x00,0x00,0x0c,0x03,0x00,0x00, +0xd0,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0x20,0x02,0x00,0x00, +0xf8,0x00,0x02,0x00,0x22,0x02,0x00,0x00,0xf9,0x00,0x02,0x00, +0x1b,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x1b,0x02,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x5c,0x02,0x00,0x00, +0x08,0x03,0x00,0x00,0xd0,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, +0x18,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x1a,0x02,0x00,0x00, +0xf9,0x00,0x02,0x00,0xb7,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, +0xb7,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x5e,0x02,0x00,0x00,0x02,0x03,0x00,0x00,0xd0,0x00,0x00,0x00, +0xf9,0x00,0x02,0x00,0xb4,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, +0xb6,0x01,0x00,0x00,0xe0,0x00,0x04,0x00,0x0c,0x00,0x00,0x00, +0x0c,0x00,0x00,0x00,0xac,0x01,0x00,0x00,0xf9,0x00,0x02,0x00, +0xd7,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0xd7,0x00,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x60,0x02,0x00,0x00, +0xe8,0x02,0x00,0x00,0x6c,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, +0xd4,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0xd6,0x00,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x65,0x02,0x00,0x00, +0x55,0x00,0x00,0x00,0x53,0x00,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x66,0x02,0x00,0x00,0x96,0x00,0x00,0x00, +0x65,0x02,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x6b,0x02,0x00,0x00,0x59,0x00,0x00,0x00,0xb9,0x00,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x6c,0x02,0x00,0x00, +0xa8,0x00,0x00,0x00,0x6b,0x02,0x00,0x00,0x41,0x00,0x05,0x00, +0x15,0x00,0x00,0x00,0x71,0x02,0x00,0x00,0x12,0x00,0x00,0x00, +0x70,0x02,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x72,0x02,0x00,0x00,0x71,0x02,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x73,0x02,0x00,0x00,0x0f,0x00,0x00,0x00, +0x72,0x02,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x77,0x02,0x00,0x00,0x47,0x00,0x00,0x00,0x72,0x02,0x00,0x00, +0x41,0x00,0x05,0x00,0x0d,0x00,0x00,0x00,0x79,0x02,0x00,0x00, +0x78,0x02,0x00,0x00,0x0c,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x7a,0x02,0x00,0x00,0x79,0x02,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x7b,0x02,0x00,0x00, +0x77,0x02,0x00,0x00,0x7a,0x02,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x7c,0x02,0x00,0x00,0x73,0x02,0x00,0x00, +0x7b,0x02,0x00,0x00,0xf9,0x00,0x02,0x00,0x7e,0x02,0x00,0x00, +0xf8,0x00,0x02,0x00,0x7e,0x02,0x00,0x00,0xf5,0x00,0x07,0x00, +0x06,0x00,0x00,0x00,0xe9,0x02,0x00,0x00,0x3e,0x00,0x00,0x00, +0xd6,0x00,0x00,0x00,0xe4,0x02,0x00,0x00,0x81,0x02,0x00,0x00, +0xb0,0x00,0x05,0x00,0xc2,0x00,0x00,0x00,0x84,0x02,0x00,0x00, +0xe9,0x02,0x00,0x00,0xbf,0x00,0x00,0x00,0xf6,0x00,0x04,0x00, +0x80,0x02,0x00,0x00,0x81,0x02,0x00,0x00,0x01,0x00,0x00,0x00, +0xfa,0x00,0x04,0x00,0x84,0x02,0x00,0x00,0x7f,0x02,0x00,0x00, +0x80,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x7f,0x02,0x00,0x00, +0xf9,0x00,0x02,0x00,0x86,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, +0x86,0x02,0x00,0x00,0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00, +0xea,0x02,0x00,0x00,0x3e,0x00,0x00,0x00,0x7f,0x02,0x00,0x00, +0xe2,0x02,0x00,0x00,0x89,0x02,0x00,0x00,0xb0,0x00,0x05,0x00, +0xc2,0x00,0x00,0x00,0x8c,0x02,0x00,0x00,0xea,0x02,0x00,0x00, +0x60,0x00,0x00,0x00,0xf6,0x00,0x04,0x00,0x88,0x02,0x00,0x00, +0x89,0x02,0x00,0x00,0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, +0x8c,0x02,0x00,0x00,0x87,0x02,0x00,0x00,0x88,0x02,0x00,0x00, +0xf8,0x00,0x02,0x00,0x87,0x02,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x90,0x02,0x00,0x00,0xea,0x02,0x00,0x00, +0x61,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x91,0x02,0x00,0x00,0x66,0x02,0x00,0x00,0x90,0x02,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x93,0x02,0x00,0x00, +0x64,0x00,0x00,0x00,0x62,0x00,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x94,0x02,0x00,0x00,0x91,0x02,0x00,0x00, +0x93,0x02,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x98,0x02,0x00,0x00,0xe9,0x02,0x00,0x00,0x04,0x02,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x99,0x02,0x00,0x00, +0x6c,0x02,0x00,0x00,0x98,0x02,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x9b,0x02,0x00,0x00,0x68,0x00,0x00,0x00, +0xbc,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x9c,0x02,0x00,0x00,0x99,0x02,0x00,0x00,0x9b,0x02,0x00,0x00, +0xf9,0x00,0x02,0x00,0x9e,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, +0x9e,0x02,0x00,0x00,0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00, +0xec,0x02,0x00,0x00,0x3e,0x00,0x00,0x00,0x87,0x02,0x00,0x00, +0xe0,0x02,0x00,0x00,0xa1,0x02,0x00,0x00,0xb0,0x00,0x05,0x00, +0xc2,0x00,0x00,0x00,0xa4,0x02,0x00,0x00,0xec,0x02,0x00,0x00, +0xbc,0x00,0x00,0x00,0xf6,0x00,0x04,0x00,0xa0,0x02,0x00,0x00, +0xa1,0x02,0x00,0x00,0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, +0xa4,0x02,0x00,0x00,0x9f,0x02,0x00,0x00,0xa0,0x02,0x00,0x00, +0xf8,0x00,0x02,0x00,0x9f,0x02,0x00,0x00,0xf9,0x00,0x02,0x00, +0xa6,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0xa6,0x02,0x00,0x00, +0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0xee,0x02,0x00,0x00, +0x3e,0x00,0x00,0x00,0x9f,0x02,0x00,0x00,0xde,0x02,0x00,0x00, +0xa9,0x02,0x00,0x00,0xb0,0x00,0x05,0x00,0xc2,0x00,0x00,0x00, +0xac,0x02,0x00,0x00,0xee,0x02,0x00,0x00,0x62,0x00,0x00,0x00, +0xf6,0x00,0x04,0x00,0xa8,0x02,0x00,0x00,0xa9,0x02,0x00,0x00, +0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0xac,0x02,0x00,0x00, +0xa7,0x02,0x00,0x00,0xa8,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, +0xa7,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xaf,0x02,0x00,0x00,0x94,0x02,0x00,0x00,0xee,0x02,0x00,0x00, +0xb0,0x00,0x05,0x00,0xc2,0x00,0x00,0x00,0xb2,0x02,0x00,0x00, +0xaf,0x02,0x00,0x00,0x36,0x00,0x00,0x00,0xf7,0x00,0x03,0x00, +0xb4,0x02,0x00,0x00,0x00,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, +0xb2,0x02,0x00,0x00,0xb3,0x02,0x00,0x00,0xb4,0x02,0x00,0x00, +0xf8,0x00,0x02,0x00,0xb3,0x02,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xb7,0x02,0x00,0x00,0x9c,0x02,0x00,0x00, +0xec,0x02,0x00,0x00,0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00, +0xb8,0x02,0x00,0x00,0x12,0x00,0x00,0x00,0xd0,0x00,0x00,0x00, +0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0xb9,0x02,0x00,0x00, +0xb8,0x02,0x00,0x00,0xb0,0x00,0x05,0x00,0xc2,0x00,0x00,0x00, +0xba,0x02,0x00,0x00,0xb7,0x02,0x00,0x00,0xb9,0x02,0x00,0x00, +0xf9,0x00,0x02,0x00,0xb4,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, +0xb4,0x02,0x00,0x00,0xf5,0x00,0x07,0x00,0xc2,0x00,0x00,0x00, +0xbb,0x02,0x00,0x00,0xb2,0x02,0x00,0x00,0xa7,0x02,0x00,0x00, +0xba,0x02,0x00,0x00,0xb3,0x02,0x00,0x00,0xf7,0x00,0x03,0x00, +0xbd,0x02,0x00,0x00,0x00,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, +0xbb,0x02,0x00,0x00,0xbc,0x02,0x00,0x00,0xbd,0x02,0x00,0x00, +0xf8,0x00,0x02,0x00,0xbc,0x02,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xc5,0x02,0x00,0x00,0x9c,0x02,0x00,0x00, +0xec,0x02,0x00,0x00,0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00, +0xc7,0x02,0x00,0x00,0x12,0x00,0x00,0x00,0xc6,0x02,0x00,0x00, +0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0xc8,0x02,0x00,0x00, +0xc7,0x02,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xc9,0x02,0x00,0x00,0xc5,0x02,0x00,0x00,0xc8,0x02,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xca,0x02,0x00,0x00, +0x7c,0x02,0x00,0x00,0xc9,0x02,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xcc,0x02,0x00,0x00,0xca,0x02,0x00,0x00, +0x94,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xce,0x02,0x00,0x00,0xcc,0x02,0x00,0x00,0xee,0x02,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xd0,0x02,0x00,0x00, +0xe9,0x02,0x00,0x00,0xbc,0x00,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xd2,0x02,0x00,0x00,0xd0,0x02,0x00,0x00, +0xec,0x02,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xd4,0x02,0x00,0x00,0xd2,0x02,0x00,0x00,0xd3,0x02,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xd6,0x02,0x00,0x00, +0xea,0x02,0x00,0x00,0x62,0x00,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xd7,0x02,0x00,0x00,0xd4,0x02,0x00,0x00, +0xd6,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xd9,0x02,0x00,0x00,0xd7,0x02,0x00,0x00,0xee,0x02,0x00,0x00, +0x41,0x00,0x05,0x00,0xcd,0x00,0x00,0x00,0xda,0x02,0x00,0x00, +0xca,0x00,0x00,0x00,0xd9,0x02,0x00,0x00,0x3d,0x00,0x04,0x00, +0xc4,0x00,0x00,0x00,0xdb,0x02,0x00,0x00,0xda,0x02,0x00,0x00, +0x41,0x00,0x06,0x00,0x6c,0x01,0x00,0x00,0xdc,0x02,0x00,0x00, +0xc1,0x02,0x00,0x00,0x34,0x00,0x00,0x00,0xce,0x02,0x00,0x00, +0x3e,0x00,0x03,0x00,0xdc,0x02,0x00,0x00,0xdb,0x02,0x00,0x00, +0xf9,0x00,0x02,0x00,0xbd,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, +0xbd,0x02,0x00,0x00,0xf9,0x00,0x02,0x00,0xa9,0x02,0x00,0x00, +0xf8,0x00,0x02,0x00,0xa9,0x02,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xde,0x02,0x00,0x00,0xee,0x02,0x00,0x00, +0xd0,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0xa6,0x02,0x00,0x00, +0xf8,0x00,0x02,0x00,0xa8,0x02,0x00,0x00,0xf9,0x00,0x02,0x00, +0xa1,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0xa1,0x02,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xe0,0x02,0x00,0x00, +0xec,0x02,0x00,0x00,0xd0,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, +0x9e,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0xa0,0x02,0x00,0x00, +0xf9,0x00,0x02,0x00,0x89,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, +0x89,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xe2,0x02,0x00,0x00,0xea,0x02,0x00,0x00,0xd0,0x00,0x00,0x00, +0xf9,0x00,0x02,0x00,0x86,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, +0x88,0x02,0x00,0x00,0xf9,0x00,0x02,0x00,0x81,0x02,0x00,0x00, +0xf8,0x00,0x02,0x00,0x81,0x02,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xe4,0x02,0x00,0x00,0xe9,0x02,0x00,0x00, +0xd0,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0x7e,0x02,0x00,0x00, +0xf8,0x00,0x02,0x00,0x80,0x02,0x00,0x00,0xfd,0x00,0x01,0x00, +0x38,0x00,0x01,0x00, +}; +const uint64_t matmul_q4_1_f32_aligned_len = 11224; + +unsigned char matmul_q4_1_f32_aligned_fp32_data[] = { +0x03,0x02,0x23,0x07,0x00,0x05,0x01,0x00,0x0b,0x00,0x0d,0x00, +0xef,0x02,0x00,0x00,0x00,0x00,0x00,0x00,0x11,0x00,0x02,0x00, +0x01,0x00,0x00,0x00,0x11,0x00,0x02,0x00,0x51,0x11,0x00,0x00, +0x11,0x00,0x02,0x00,0x60,0x11,0x00,0x00,0x0b,0x00,0x06,0x00, +0x01,0x00,0x00,0x00,0x47,0x4c,0x53,0x4c,0x2e,0x73,0x74,0x64, +0x2e,0x34,0x35,0x30,0x00,0x00,0x00,0x00,0x0e,0x00,0x03,0x00, +0x00,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x0f,0x00,0x0f,0x00, +0x05,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x6d,0x61,0x69,0x6e, +0x00,0x00,0x00,0x00,0x0b,0x00,0x00,0x00,0x12,0x00,0x00,0x00, +0x3d,0x00,0x00,0x00,0x4c,0x00,0x00,0x00,0x08,0x01,0x00,0x00, +0x2d,0x01,0x00,0x00,0x60,0x01,0x00,0x00,0x67,0x01,0x00,0x00, +0x4e,0x02,0x00,0x00,0x97,0x02,0x00,0x00,0x10,0x00,0x06,0x00, +0x04,0x00,0x00,0x00,0x11,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x47,0x00,0x04,0x00, +0x0b,0x00,0x00,0x00,0x0b,0x00,0x00,0x00,0x1c,0x00,0x00,0x00, +0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x48,0x00,0x05,0x00, +0x10,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x23,0x00,0x00,0x00, +0x04,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00, +0x02,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x08,0x00,0x00,0x00, +0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00,0x03,0x00,0x00,0x00, +0x23,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x48,0x00,0x05,0x00, +0x10,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x23,0x00,0x00,0x00, +0x10,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00, +0x05,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x14,0x00,0x00,0x00, +0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00,0x06,0x00,0x00,0x00, +0x23,0x00,0x00,0x00,0x18,0x00,0x00,0x00,0x48,0x00,0x05,0x00, +0x10,0x00,0x00,0x00,0x07,0x00,0x00,0x00,0x23,0x00,0x00,0x00, +0x1c,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00, +0x08,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x20,0x00,0x00,0x00, +0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00,0x09,0x00,0x00,0x00, +0x23,0x00,0x00,0x00,0x24,0x00,0x00,0x00,0x48,0x00,0x05,0x00, +0x10,0x00,0x00,0x00,0x0a,0x00,0x00,0x00,0x23,0x00,0x00,0x00, +0x28,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00, +0x0b,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x2c,0x00,0x00,0x00, +0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, +0x23,0x00,0x00,0x00,0x30,0x00,0x00,0x00,0x48,0x00,0x05,0x00, +0x10,0x00,0x00,0x00,0x0d,0x00,0x00,0x00,0x23,0x00,0x00,0x00, +0x34,0x00,0x00,0x00,0x47,0x00,0x03,0x00,0x10,0x00,0x00,0x00, +0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x37,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x47,0x00,0x04,0x00, +0x3d,0x00,0x00,0x00,0x0b,0x00,0x00,0x00,0x1a,0x00,0x00,0x00, +0x47,0x00,0x04,0x00,0x4c,0x00,0x00,0x00,0x0b,0x00,0x00,0x00, +0x1b,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x4f,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0x09,0x00,0x00,0x00,0x47,0x00,0x04,0x00, +0x53,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x04,0x00,0x00,0x00, +0x47,0x00,0x04,0x00,0x60,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0x06,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x62,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0x07,0x00,0x00,0x00,0x47,0x00,0x04,0x00, +0x6c,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x03,0x00,0x00,0x00, +0x47,0x00,0x04,0x00,0xa7,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0xb9,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0x05,0x00,0x00,0x00,0x47,0x00,0x04,0x00, +0xbc,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x08,0x00,0x00,0x00, +0x47,0x00,0x04,0x00,0x03,0x01,0x00,0x00,0x06,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x04,0x01,0x00,0x00, +0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0x48,0x00,0x05,0x00,0x04,0x01,0x00,0x00,0x01,0x00,0x00,0x00, +0x23,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x48,0x00,0x05,0x00, +0x04,0x01,0x00,0x00,0x02,0x00,0x00,0x00,0x23,0x00,0x00,0x00, +0x04,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x05,0x01,0x00,0x00, +0x06,0x00,0x00,0x00,0x14,0x00,0x00,0x00,0x48,0x00,0x04,0x00, +0x06,0x01,0x00,0x00,0x00,0x00,0x00,0x00,0x18,0x00,0x00,0x00, +0x48,0x00,0x05,0x00,0x06,0x01,0x00,0x00,0x00,0x00,0x00,0x00, +0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x03,0x00, +0x06,0x01,0x00,0x00,0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00, +0x08,0x01,0x00,0x00,0x22,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0x47,0x00,0x04,0x00,0x08,0x01,0x00,0x00,0x21,0x00,0x00,0x00, +0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x38,0x01,0x00,0x00, +0x01,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00, +0x39,0x01,0x00,0x00,0x0b,0x00,0x00,0x00,0x19,0x00,0x00,0x00, +0x47,0x00,0x04,0x00,0x64,0x01,0x00,0x00,0x06,0x00,0x00,0x00, +0x10,0x00,0x00,0x00,0x48,0x00,0x04,0x00,0x65,0x01,0x00,0x00, +0x00,0x00,0x00,0x00,0x18,0x00,0x00,0x00,0x48,0x00,0x05,0x00, +0x65,0x01,0x00,0x00,0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00, +0x00,0x00,0x00,0x00,0x47,0x00,0x03,0x00,0x65,0x01,0x00,0x00, +0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x67,0x01,0x00,0x00, +0x22,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00, +0x67,0x01,0x00,0x00,0x21,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0x47,0x00,0x04,0x00,0x4e,0x02,0x00,0x00,0x0b,0x00,0x00,0x00, +0x18,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x94,0x02,0x00,0x00, +0x06,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x48,0x00,0x04,0x00, +0x95,0x02,0x00,0x00,0x00,0x00,0x00,0x00,0x19,0x00,0x00,0x00, +0x48,0x00,0x05,0x00,0x95,0x02,0x00,0x00,0x00,0x00,0x00,0x00, +0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x03,0x00, +0x95,0x02,0x00,0x00,0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00, +0x97,0x02,0x00,0x00,0x22,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0x47,0x00,0x04,0x00,0x97,0x02,0x00,0x00,0x21,0x00,0x00,0x00, +0x02,0x00,0x00,0x00,0x13,0x00,0x02,0x00,0x02,0x00,0x00,0x00, +0x21,0x00,0x03,0x00,0x03,0x00,0x00,0x00,0x02,0x00,0x00,0x00, +0x15,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x20,0x00,0x00,0x00, +0x00,0x00,0x00,0x00,0x17,0x00,0x04,0x00,0x09,0x00,0x00,0x00, +0x06,0x00,0x00,0x00,0x03,0x00,0x00,0x00,0x20,0x00,0x04,0x00, +0x0a,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x09,0x00,0x00,0x00, +0x3b,0x00,0x04,0x00,0x0a,0x00,0x00,0x00,0x0b,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x0c,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x20,0x00,0x04,0x00, +0x0d,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x06,0x00,0x00,0x00, +0x1e,0x00,0x10,0x00,0x10,0x00,0x00,0x00,0x06,0x00,0x00,0x00, +0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00, +0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00, +0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00, +0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00, +0x06,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x11,0x00,0x00,0x00, +0x09,0x00,0x00,0x00,0x10,0x00,0x00,0x00,0x3b,0x00,0x04,0x00, +0x11,0x00,0x00,0x00,0x12,0x00,0x00,0x00,0x09,0x00,0x00,0x00, +0x15,0x00,0x04,0x00,0x13,0x00,0x00,0x00,0x20,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00, +0x14,0x00,0x00,0x00,0x08,0x00,0x00,0x00,0x20,0x00,0x04,0x00, +0x15,0x00,0x00,0x00,0x09,0x00,0x00,0x00,0x06,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00,0x21,0x00,0x00,0x00, +0x0a,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00, +0x27,0x00,0x00,0x00,0x09,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x13,0x00,0x00,0x00,0x2d,0x00,0x00,0x00,0x07,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00,0x34,0x00,0x00,0x00, +0x00,0x00,0x00,0x00,0x32,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x37,0x00,0x00,0x00,0x40,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0x3b,0x00,0x04,0x00,0x0a,0x00,0x00,0x00,0x3d,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x3e,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x3b,0x00,0x04,0x00, +0x0a,0x00,0x00,0x00,0x4c,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0x32,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x4f,0x00,0x00,0x00, +0x20,0x00,0x00,0x00,0x32,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x53,0x00,0x00,0x00,0x20,0x00,0x00,0x00,0x34,0x00,0x06,0x00, +0x06,0x00,0x00,0x00,0x54,0x00,0x00,0x00,0x86,0x00,0x00,0x00, +0x37,0x00,0x00,0x00,0x53,0x00,0x00,0x00,0x34,0x00,0x06,0x00, +0x06,0x00,0x00,0x00,0x58,0x00,0x00,0x00,0x86,0x00,0x00,0x00, +0x37,0x00,0x00,0x00,0x53,0x00,0x00,0x00,0x32,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x60,0x00,0x00,0x00,0x02,0x00,0x00,0x00, +0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x61,0x00,0x00,0x00, +0x86,0x00,0x00,0x00,0x53,0x00,0x00,0x00,0x60,0x00,0x00,0x00, +0x32,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x62,0x00,0x00,0x00, +0x04,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, +0x63,0x00,0x00,0x00,0x86,0x00,0x00,0x00,0x61,0x00,0x00,0x00, +0x62,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, +0x67,0x00,0x00,0x00,0x86,0x00,0x00,0x00,0x61,0x00,0x00,0x00, +0x62,0x00,0x00,0x00,0x32,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x6c,0x00,0x00,0x00,0x10,0x00,0x00,0x00,0x34,0x00,0x06,0x00, +0x06,0x00,0x00,0x00,0x6d,0x00,0x00,0x00,0x86,0x00,0x00,0x00, +0x6c,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x34,0x00,0x06,0x00, +0x06,0x00,0x00,0x00,0x72,0x00,0x00,0x00,0x86,0x00,0x00,0x00, +0x6c,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x77,0x00,0x00,0x00,0x04,0x00,0x00,0x00, +0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x78,0x00,0x00,0x00, +0x86,0x00,0x00,0x00,0x6c,0x00,0x00,0x00,0x77,0x00,0x00,0x00, +0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x7d,0x00,0x00,0x00, +0x86,0x00,0x00,0x00,0x6c,0x00,0x00,0x00,0x77,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00,0x81,0x00,0x00,0x00, +0x06,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00, +0x86,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x13,0x00,0x00,0x00,0x91,0x00,0x00,0x00,0x0b,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00,0x97,0x00,0x00,0x00, +0x03,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00, +0xa2,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x32,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0xa7,0x00,0x00,0x00,0x40,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00,0xa9,0x00,0x00,0x00, +0x04,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, +0xb8,0x00,0x00,0x00,0x84,0x00,0x00,0x00,0x60,0x00,0x00,0x00, +0x62,0x00,0x00,0x00,0x32,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0xb9,0x00,0x00,0x00,0x20,0x00,0x00,0x00,0x34,0x00,0x06,0x00, +0x06,0x00,0x00,0x00,0xba,0x00,0x00,0x00,0x84,0x00,0x00,0x00, +0x53,0x00,0x00,0x00,0xb9,0x00,0x00,0x00,0x34,0x00,0x06,0x00, +0x06,0x00,0x00,0x00,0xbb,0x00,0x00,0x00,0x84,0x00,0x00,0x00, +0x4f,0x00,0x00,0x00,0x62,0x00,0x00,0x00,0x32,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0xbc,0x00,0x00,0x00,0x02,0x00,0x00,0x00, +0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0xbd,0x00,0x00,0x00, +0x84,0x00,0x00,0x00,0xbb,0x00,0x00,0x00,0xbc,0x00,0x00,0x00, +0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0xbe,0x00,0x00,0x00, +0x84,0x00,0x00,0x00,0xbd,0x00,0x00,0x00,0x60,0x00,0x00,0x00, +0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0xbf,0x00,0x00,0x00, +0x86,0x00,0x00,0x00,0xba,0x00,0x00,0x00,0xbe,0x00,0x00,0x00, +0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0xc0,0x00,0x00,0x00, +0x84,0x00,0x00,0x00,0xb8,0x00,0x00,0x00,0xbf,0x00,0x00,0x00, +0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0xc1,0x00,0x00,0x00, +0x84,0x00,0x00,0x00,0xc0,0x00,0x00,0x00,0xbc,0x00,0x00,0x00, +0x14,0x00,0x02,0x00,0xc2,0x00,0x00,0x00,0x16,0x00,0x03,0x00, +0xc4,0x00,0x00,0x00,0x20,0x00,0x00,0x00,0x34,0x00,0x06,0x00, +0x06,0x00,0x00,0x00,0xc5,0x00,0x00,0x00,0x84,0x00,0x00,0x00, +0x60,0x00,0x00,0x00,0x62,0x00,0x00,0x00,0x34,0x00,0x06,0x00, +0x06,0x00,0x00,0x00,0xc6,0x00,0x00,0x00,0x84,0x00,0x00,0x00, +0xc5,0x00,0x00,0x00,0xbf,0x00,0x00,0x00,0x34,0x00,0x06,0x00, +0x06,0x00,0x00,0x00,0xc7,0x00,0x00,0x00,0x84,0x00,0x00,0x00, +0xc6,0x00,0x00,0x00,0xbc,0x00,0x00,0x00,0x1c,0x00,0x04,0x00, +0xc8,0x00,0x00,0x00,0xc4,0x00,0x00,0x00,0xc7,0x00,0x00,0x00, +0x20,0x00,0x04,0x00,0xc9,0x00,0x00,0x00,0x07,0x00,0x00,0x00, +0xc8,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0xc4,0x00,0x00,0x00, +0xcc,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x20,0x00,0x04,0x00, +0xcd,0x00,0x00,0x00,0x07,0x00,0x00,0x00,0xc4,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00,0xd0,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, +0xf4,0x00,0x00,0x00,0x80,0x00,0x00,0x00,0x6c,0x00,0x00,0x00, +0x39,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0xfa,0x00,0x00,0x00,0x10,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0xfe,0x00,0x00,0x00,0x0f,0x00,0x00,0x00, +0x16,0x00,0x03,0x00,0x01,0x01,0x00,0x00,0x10,0x00,0x00,0x00, +0x15,0x00,0x04,0x00,0x02,0x01,0x00,0x00,0x08,0x00,0x00,0x00, +0x00,0x00,0x00,0x00,0x1c,0x00,0x04,0x00,0x03,0x01,0x00,0x00, +0x02,0x01,0x00,0x00,0xfa,0x00,0x00,0x00,0x1e,0x00,0x05,0x00, +0x04,0x01,0x00,0x00,0x01,0x01,0x00,0x00,0x01,0x01,0x00,0x00, +0x03,0x01,0x00,0x00,0x1d,0x00,0x03,0x00,0x05,0x01,0x00,0x00, +0x04,0x01,0x00,0x00,0x1e,0x00,0x03,0x00,0x06,0x01,0x00,0x00, +0x05,0x01,0x00,0x00,0x20,0x00,0x04,0x00,0x07,0x01,0x00,0x00, +0x0c,0x00,0x00,0x00,0x06,0x01,0x00,0x00,0x3b,0x00,0x04,0x00, +0x07,0x01,0x00,0x00,0x08,0x01,0x00,0x00,0x0c,0x00,0x00,0x00, +0x20,0x00,0x04,0x00,0x0a,0x01,0x00,0x00,0x0c,0x00,0x00,0x00, +0x01,0x01,0x00,0x00,0x20,0x00,0x04,0x00,0x16,0x01,0x00,0x00, +0x0c,0x00,0x00,0x00,0x02,0x01,0x00,0x00,0x17,0x00,0x04,0x00, +0x1a,0x01,0x00,0x00,0xc4,0x00,0x00,0x00,0x02,0x00,0x00,0x00, +0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x29,0x01,0x00,0x00, +0x80,0x00,0x00,0x00,0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00, +0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x2a,0x01,0x00,0x00, +0x84,0x00,0x00,0x00,0x37,0x00,0x00,0x00,0x29,0x01,0x00,0x00, +0x1c,0x00,0x04,0x00,0x2b,0x01,0x00,0x00,0xc4,0x00,0x00,0x00, +0x2a,0x01,0x00,0x00,0x20,0x00,0x04,0x00,0x2c,0x01,0x00,0x00, +0x04,0x00,0x00,0x00,0x2b,0x01,0x00,0x00,0x3b,0x00,0x04,0x00, +0x2c,0x01,0x00,0x00,0x2d,0x01,0x00,0x00,0x04,0x00,0x00,0x00, +0x20,0x00,0x04,0x00,0x31,0x01,0x00,0x00,0x04,0x00,0x00,0x00, +0xc4,0x00,0x00,0x00,0x32,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x38,0x01,0x00,0x00,0x01,0x00,0x00,0x00,0x33,0x00,0x06,0x00, +0x09,0x00,0x00,0x00,0x39,0x01,0x00,0x00,0x38,0x01,0x00,0x00, +0x39,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x34,0x00,0x06,0x00, +0x06,0x00,0x00,0x00,0x3a,0x01,0x00,0x00,0x51,0x00,0x00,0x00, +0x39,0x01,0x00,0x00,0x00,0x00,0x00,0x00,0x34,0x00,0x06,0x00, +0x06,0x00,0x00,0x00,0x3b,0x01,0x00,0x00,0x84,0x00,0x00,0x00, +0x3a,0x01,0x00,0x00,0x0c,0x00,0x00,0x00,0x34,0x00,0x06,0x00, +0x06,0x00,0x00,0x00,0x3c,0x01,0x00,0x00,0x86,0x00,0x00,0x00, +0x3b,0x01,0x00,0x00,0x6c,0x00,0x00,0x00,0x34,0x00,0x06,0x00, +0x06,0x00,0x00,0x00,0x57,0x01,0x00,0x00,0x80,0x00,0x00,0x00, +0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x34,0x00,0x06,0x00, +0x06,0x00,0x00,0x00,0x5c,0x01,0x00,0x00,0x80,0x00,0x00,0x00, +0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x34,0x00,0x06,0x00, +0x06,0x00,0x00,0x00,0x5d,0x01,0x00,0x00,0x84,0x00,0x00,0x00, +0xa7,0x00,0x00,0x00,0x5c,0x01,0x00,0x00,0x1c,0x00,0x04,0x00, +0x5e,0x01,0x00,0x00,0xc4,0x00,0x00,0x00,0x5d,0x01,0x00,0x00, +0x20,0x00,0x04,0x00,0x5f,0x01,0x00,0x00,0x04,0x00,0x00,0x00, +0x5e,0x01,0x00,0x00,0x3b,0x00,0x04,0x00,0x5f,0x01,0x00,0x00, +0x60,0x01,0x00,0x00,0x04,0x00,0x00,0x00,0x17,0x00,0x04,0x00, +0x63,0x01,0x00,0x00,0xc4,0x00,0x00,0x00,0x04,0x00,0x00,0x00, +0x1d,0x00,0x03,0x00,0x64,0x01,0x00,0x00,0x63,0x01,0x00,0x00, +0x1e,0x00,0x03,0x00,0x65,0x01,0x00,0x00,0x64,0x01,0x00,0x00, +0x20,0x00,0x04,0x00,0x66,0x01,0x00,0x00,0x0c,0x00,0x00,0x00, +0x65,0x01,0x00,0x00,0x3b,0x00,0x04,0x00,0x66,0x01,0x00,0x00, +0x67,0x01,0x00,0x00,0x0c,0x00,0x00,0x00,0x20,0x00,0x04,0x00, +0x69,0x01,0x00,0x00,0x0c,0x00,0x00,0x00,0xc4,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x7a,0x01,0x00,0x00, +0x03,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, +0x80,0x01,0x00,0x00,0x51,0x00,0x00,0x00,0x39,0x01,0x00,0x00, +0x00,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, +0x81,0x01,0x00,0x00,0x84,0x00,0x00,0x00,0x80,0x01,0x00,0x00, +0x77,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, +0x82,0x01,0x00,0x00,0x86,0x00,0x00,0x00,0x81,0x01,0x00,0x00, +0x6c,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x85,0x01,0x00,0x00,0x08,0x01,0x00,0x00,0x34,0x00,0x06,0x00, +0x06,0x00,0x00,0x00,0x86,0x01,0x00,0x00,0x86,0x00,0x00,0x00, +0x6c,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x34,0x00,0x06,0x00, +0x06,0x00,0x00,0x00,0x89,0x01,0x00,0x00,0x86,0x00,0x00,0x00, +0x6c,0x00,0x00,0x00,0x77,0x00,0x00,0x00,0x34,0x00,0x06,0x00, +0x06,0x00,0x00,0x00,0xa4,0x01,0x00,0x00,0x84,0x00,0x00,0x00, +0x60,0x00,0x00,0x00,0x62,0x00,0x00,0x00,0x1c,0x00,0x04,0x00, +0xa5,0x01,0x00,0x00,0xc4,0x00,0x00,0x00,0xa4,0x01,0x00,0x00, +0x20,0x00,0x04,0x00,0xa6,0x01,0x00,0x00,0x07,0x00,0x00,0x00, +0xa5,0x01,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, +0xb6,0x01,0x00,0x00,0x80,0x00,0x00,0x00,0x6c,0x00,0x00,0x00, +0x39,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, +0xd1,0x01,0x00,0x00,0x84,0x00,0x00,0x00,0xbf,0x00,0x00,0x00, +0xbc,0x00,0x00,0x00,0x1c,0x00,0x04,0x00,0xd2,0x01,0x00,0x00, +0xc4,0x00,0x00,0x00,0xd1,0x01,0x00,0x00,0x20,0x00,0x04,0x00, +0xd3,0x01,0x00,0x00,0x07,0x00,0x00,0x00,0xd2,0x01,0x00,0x00, +0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0xdc,0x01,0x00,0x00, +0x86,0x00,0x00,0x00,0xb9,0x00,0x00,0x00,0xbf,0x00,0x00,0x00, +0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0xe4,0x01,0x00,0x00, +0x80,0x00,0x00,0x00,0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00, +0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x13,0x02,0x00,0x00, +0x84,0x00,0x00,0x00,0x60,0x00,0x00,0x00,0x62,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00,0x46,0x02,0x00,0x00, +0x0d,0x00,0x00,0x00,0x3b,0x00,0x04,0x00,0x0a,0x00,0x00,0x00, +0x4e,0x02,0x00,0x00,0x01,0x00,0x00,0x00,0x1d,0x00,0x03,0x00, +0x94,0x02,0x00,0x00,0xc4,0x00,0x00,0x00,0x1e,0x00,0x03,0x00, +0x95,0x02,0x00,0x00,0x94,0x02,0x00,0x00,0x20,0x00,0x04,0x00, +0x96,0x02,0x00,0x00,0x0c,0x00,0x00,0x00,0x95,0x02,0x00,0x00, +0x3b,0x00,0x04,0x00,0x96,0x02,0x00,0x00,0x97,0x02,0x00,0x00, +0x0c,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00, +0x9c,0x02,0x00,0x00,0x05,0x00,0x00,0x00,0x34,0x00,0x06,0x00, +0x06,0x00,0x00,0x00,0xa9,0x02,0x00,0x00,0x84,0x00,0x00,0x00, +0x60,0x00,0x00,0x00,0x62,0x00,0x00,0x00,0x36,0x00,0x05,0x00, +0x02,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0x03,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0x05,0x00,0x00,0x00, +0x3b,0x00,0x04,0x00,0xc9,0x00,0x00,0x00,0xca,0x00,0x00,0x00, +0x07,0x00,0x00,0x00,0x3b,0x00,0x04,0x00,0xa6,0x01,0x00,0x00, +0xa7,0x01,0x00,0x00,0x07,0x00,0x00,0x00,0x3b,0x00,0x04,0x00, +0xd3,0x01,0x00,0x00,0xd4,0x01,0x00,0x00,0x07,0x00,0x00,0x00, +0x41,0x00,0x05,0x00,0x0d,0x00,0x00,0x00,0x0e,0x00,0x00,0x00, +0x0b,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x0f,0x00,0x00,0x00,0x0e,0x00,0x00,0x00, +0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00,0x16,0x00,0x00,0x00, +0x12,0x00,0x00,0x00,0x14,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x17,0x00,0x00,0x00,0x16,0x00,0x00,0x00, +0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x18,0x00,0x00,0x00, +0x0f,0x00,0x00,0x00,0x17,0x00,0x00,0x00,0x89,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x1e,0x00,0x00,0x00,0x0f,0x00,0x00,0x00, +0x17,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00, +0x22,0x00,0x00,0x00,0x12,0x00,0x00,0x00,0x21,0x00,0x00,0x00, +0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x23,0x00,0x00,0x00, +0x22,0x00,0x00,0x00,0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x24,0x00,0x00,0x00,0x18,0x00,0x00,0x00,0x23,0x00,0x00,0x00, +0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00,0x28,0x00,0x00,0x00, +0x12,0x00,0x00,0x00,0x27,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x29,0x00,0x00,0x00,0x28,0x00,0x00,0x00, +0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x2a,0x00,0x00,0x00, +0x1e,0x00,0x00,0x00,0x29,0x00,0x00,0x00,0x41,0x00,0x05,0x00, +0x15,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0x12,0x00,0x00,0x00, +0x2d,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x2f,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x30,0x00,0x00,0x00,0x24,0x00,0x00,0x00, +0x2f,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x32,0x00,0x00,0x00,0x30,0x00,0x00,0x00,0x2a,0x00,0x00,0x00, +0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00,0x35,0x00,0x00,0x00, +0x12,0x00,0x00,0x00,0x34,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x36,0x00,0x00,0x00,0x35,0x00,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x38,0x00,0x00,0x00, +0x36,0x00,0x00,0x00,0x37,0x00,0x00,0x00,0x82,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x3a,0x00,0x00,0x00,0x38,0x00,0x00,0x00, +0x39,0x00,0x00,0x00,0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x3b,0x00,0x00,0x00,0x3a,0x00,0x00,0x00,0x37,0x00,0x00,0x00, +0x41,0x00,0x05,0x00,0x0d,0x00,0x00,0x00,0x3f,0x00,0x00,0x00, +0x3d,0x00,0x00,0x00,0x3e,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x40,0x00,0x00,0x00,0x3f,0x00,0x00,0x00, +0x89,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x42,0x00,0x00,0x00, +0x40,0x00,0x00,0x00,0x3b,0x00,0x00,0x00,0x86,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x47,0x00,0x00,0x00,0x40,0x00,0x00,0x00, +0x3b,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x0d,0x00,0x00,0x00, +0x49,0x00,0x00,0x00,0x3d,0x00,0x00,0x00,0x39,0x00,0x00,0x00, +0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x4a,0x00,0x00,0x00, +0x49,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x0d,0x00,0x00,0x00, +0x4d,0x00,0x00,0x00,0x4c,0x00,0x00,0x00,0x3e,0x00,0x00,0x00, +0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x4e,0x00,0x00,0x00, +0x4d,0x00,0x00,0x00,0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x50,0x00,0x00,0x00,0x4e,0x00,0x00,0x00,0x4f,0x00,0x00,0x00, +0x89,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x55,0x00,0x00,0x00, +0x50,0x00,0x00,0x00,0x54,0x00,0x00,0x00,0x86,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x59,0x00,0x00,0x00,0x50,0x00,0x00,0x00, +0x58,0x00,0x00,0x00,0x89,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x5d,0x00,0x00,0x00,0x4e,0x00,0x00,0x00,0x4f,0x00,0x00,0x00, +0x89,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x64,0x00,0x00,0x00, +0x5d,0x00,0x00,0x00,0x63,0x00,0x00,0x00,0x86,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x68,0x00,0x00,0x00,0x5d,0x00,0x00,0x00, +0x67,0x00,0x00,0x00,0x89,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x6e,0x00,0x00,0x00,0x4e,0x00,0x00,0x00,0x6d,0x00,0x00,0x00, +0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x73,0x00,0x00,0x00, +0x4e,0x00,0x00,0x00,0x72,0x00,0x00,0x00,0x89,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x79,0x00,0x00,0x00,0x4e,0x00,0x00,0x00, +0x78,0x00,0x00,0x00,0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x7e,0x00,0x00,0x00,0x4e,0x00,0x00,0x00,0x7d,0x00,0x00,0x00, +0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00,0x82,0x00,0x00,0x00, +0x12,0x00,0x00,0x00,0x81,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x83,0x00,0x00,0x00,0x82,0x00,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x84,0x00,0x00,0x00, +0x47,0x00,0x00,0x00,0x83,0x00,0x00,0x00,0x41,0x00,0x05,0x00, +0x15,0x00,0x00,0x00,0x87,0x00,0x00,0x00,0x12,0x00,0x00,0x00, +0x86,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x88,0x00,0x00,0x00,0x87,0x00,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x8a,0x00,0x00,0x00,0x47,0x00,0x00,0x00, +0x39,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x8d,0x00,0x00,0x00,0x8a,0x00,0x00,0x00,0x83,0x00,0x00,0x00, +0x0c,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0x8e,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0x26,0x00,0x00,0x00,0x88,0x00,0x00,0x00, +0x8d,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00, +0x92,0x00,0x00,0x00,0x12,0x00,0x00,0x00,0x91,0x00,0x00,0x00, +0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x93,0x00,0x00,0x00, +0x92,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x94,0x00,0x00,0x00,0x32,0x00,0x00,0x00,0x93,0x00,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x96,0x00,0x00,0x00, +0x42,0x00,0x00,0x00,0x37,0x00,0x00,0x00,0x41,0x00,0x05,0x00, +0x15,0x00,0x00,0x00,0x98,0x00,0x00,0x00,0x12,0x00,0x00,0x00, +0x97,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x99,0x00,0x00,0x00,0x98,0x00,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x9a,0x00,0x00,0x00,0x96,0x00,0x00,0x00, +0x99,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x9b,0x00,0x00,0x00,0x94,0x00,0x00,0x00,0x9a,0x00,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x9d,0x00,0x00,0x00, +0x9b,0x00,0x00,0x00,0x84,0x00,0x00,0x00,0x86,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x9e,0x00,0x00,0x00,0x9d,0x00,0x00,0x00, +0x0c,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00, +0xa3,0x00,0x00,0x00,0x12,0x00,0x00,0x00,0xa2,0x00,0x00,0x00, +0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0xa4,0x00,0x00,0x00, +0xa3,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xa5,0x00,0x00,0x00,0x0f,0x00,0x00,0x00,0xa4,0x00,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xa8,0x00,0x00,0x00, +0x4a,0x00,0x00,0x00,0xa7,0x00,0x00,0x00,0x41,0x00,0x05,0x00, +0x15,0x00,0x00,0x00,0xaa,0x00,0x00,0x00,0x12,0x00,0x00,0x00, +0xa9,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0xab,0x00,0x00,0x00,0xaa,0x00,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xac,0x00,0x00,0x00,0xa8,0x00,0x00,0x00, +0xab,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xad,0x00,0x00,0x00,0xa5,0x00,0x00,0x00,0xac,0x00,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xaf,0x00,0x00,0x00, +0xad,0x00,0x00,0x00,0x84,0x00,0x00,0x00,0x86,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xb0,0x00,0x00,0x00,0xaf,0x00,0x00,0x00, +0x77,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0xb2,0x00,0x00,0x00, +0xf8,0x00,0x02,0x00,0xb2,0x00,0x00,0x00,0xf5,0x00,0x07,0x00, +0x06,0x00,0x00,0x00,0xbd,0x02,0x00,0x00,0x3e,0x00,0x00,0x00, +0x05,0x00,0x00,0x00,0xd1,0x00,0x00,0x00,0xb3,0x00,0x00,0x00, +0xb0,0x00,0x05,0x00,0xc2,0x00,0x00,0x00,0xc3,0x00,0x00,0x00, +0xbd,0x02,0x00,0x00,0xc1,0x00,0x00,0x00,0xf6,0x00,0x04,0x00, +0xb4,0x00,0x00,0x00,0xb3,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0xfa,0x00,0x04,0x00,0xc3,0x00,0x00,0x00,0xb3,0x00,0x00,0x00, +0xb4,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0xb3,0x00,0x00,0x00, +0x41,0x00,0x05,0x00,0xcd,0x00,0x00,0x00,0xce,0x00,0x00,0x00, +0xca,0x00,0x00,0x00,0xbd,0x02,0x00,0x00,0x3e,0x00,0x03,0x00, +0xce,0x00,0x00,0x00,0xcc,0x00,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xd1,0x00,0x00,0x00,0xbd,0x02,0x00,0x00, +0xd0,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0xb2,0x00,0x00,0x00, +0xf8,0x00,0x02,0x00,0xb4,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, +0xd4,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0xd4,0x00,0x00,0x00, +0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0xd6,0x02,0x00,0x00, +0xb0,0x00,0x00,0x00,0xb4,0x00,0x00,0x00,0x8b,0x01,0x00,0x00, +0xd7,0x00,0x00,0x00,0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00, +0xd2,0x02,0x00,0x00,0x9e,0x00,0x00,0x00,0xb4,0x00,0x00,0x00, +0x88,0x01,0x00,0x00,0xd7,0x00,0x00,0x00,0xf5,0x00,0x07,0x00, +0x06,0x00,0x00,0x00,0xbe,0x02,0x00,0x00,0x84,0x00,0x00,0x00, +0xb4,0x00,0x00,0x00,0x36,0x02,0x00,0x00,0xd7,0x00,0x00,0x00, +0xb0,0x00,0x05,0x00,0xc2,0x00,0x00,0x00,0xdb,0x00,0x00,0x00, +0xbe,0x02,0x00,0x00,0x8e,0x00,0x00,0x00,0xf6,0x00,0x04,0x00, +0xd6,0x00,0x00,0x00,0xd7,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0xfa,0x00,0x04,0x00,0xdb,0x00,0x00,0x00,0xd5,0x00,0x00,0x00, +0xd6,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0xd5,0x00,0x00,0x00, +0xf9,0x00,0x02,0x00,0xdd,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, +0xdd,0x00,0x00,0x00,0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00, +0xce,0x02,0x00,0x00,0x3e,0x00,0x00,0x00,0xd5,0x00,0x00,0x00, +0x3e,0x01,0x00,0x00,0xde,0x00,0x00,0x00,0xb0,0x00,0x05,0x00, +0xc2,0x00,0x00,0x00,0xe3,0x00,0x00,0x00,0xce,0x02,0x00,0x00, +0x37,0x00,0x00,0x00,0xf6,0x00,0x04,0x00,0xdf,0x00,0x00,0x00, +0xde,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, +0xe3,0x00,0x00,0x00,0xde,0x00,0x00,0x00,0xdf,0x00,0x00,0x00, +0xf8,0x00,0x02,0x00,0xde,0x00,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xe8,0x00,0x00,0x00,0x73,0x00,0x00,0x00, +0xce,0x02,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xeb,0x00,0x00,0x00,0xe8,0x00,0x00,0x00,0x99,0x00,0x00,0x00, +0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xec,0x00,0x00,0x00, +0xeb,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xed,0x00,0x00,0x00,0xd2,0x02,0x00,0x00, +0xec,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xef,0x00,0x00,0x00,0xed,0x00,0x00,0x00,0x6e,0x00,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xf5,0x00,0x00,0x00, +0xe8,0x00,0x00,0x00,0xf4,0x00,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xf7,0x00,0x00,0x00,0xf5,0x00,0x00,0x00, +0x6e,0x00,0x00,0x00,0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xfb,0x00,0x00,0x00,0xef,0x00,0x00,0x00,0xfa,0x00,0x00,0x00, +0xc7,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xff,0x00,0x00,0x00, +0xef,0x00,0x00,0x00,0xfe,0x00,0x00,0x00,0x41,0x00,0x07,0x00, +0x0a,0x01,0x00,0x00,0x0b,0x01,0x00,0x00,0x08,0x01,0x00,0x00, +0x34,0x00,0x00,0x00,0xfb,0x00,0x00,0x00,0x34,0x00,0x00,0x00, +0x3d,0x00,0x04,0x00,0x01,0x01,0x00,0x00,0x0c,0x01,0x00,0x00, +0x0b,0x01,0x00,0x00,0x73,0x00,0x04,0x00,0xc4,0x00,0x00,0x00, +0x0d,0x01,0x00,0x00,0x0c,0x01,0x00,0x00,0x41,0x00,0x07,0x00, +0x0a,0x01,0x00,0x00,0x10,0x01,0x00,0x00,0x08,0x01,0x00,0x00, +0x34,0x00,0x00,0x00,0xfb,0x00,0x00,0x00,0xd0,0x00,0x00,0x00, +0x3d,0x00,0x04,0x00,0x01,0x01,0x00,0x00,0x11,0x01,0x00,0x00, +0x10,0x01,0x00,0x00,0x73,0x00,0x04,0x00,0xc4,0x00,0x00,0x00, +0x12,0x01,0x00,0x00,0x11,0x01,0x00,0x00,0x41,0x00,0x08,0x00, +0x16,0x01,0x00,0x00,0x17,0x01,0x00,0x00,0x08,0x01,0x00,0x00, +0x34,0x00,0x00,0x00,0xfb,0x00,0x00,0x00,0x86,0x00,0x00,0x00, +0xff,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x02,0x01,0x00,0x00, +0x18,0x01,0x00,0x00,0x17,0x01,0x00,0x00,0x71,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x19,0x01,0x00,0x00,0x18,0x01,0x00,0x00, +0xc7,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x1e,0x01,0x00,0x00, +0x19,0x01,0x00,0x00,0xfe,0x00,0x00,0x00,0x70,0x00,0x04,0x00, +0xc4,0x00,0x00,0x00,0x1f,0x01,0x00,0x00,0x1e,0x01,0x00,0x00, +0xc2,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x21,0x01,0x00,0x00, +0x19,0x01,0x00,0x00,0xa9,0x00,0x00,0x00,0x70,0x00,0x04,0x00, +0xc4,0x00,0x00,0x00,0x22,0x01,0x00,0x00,0x21,0x01,0x00,0x00, +0x50,0x00,0x05,0x00,0x1a,0x01,0x00,0x00,0x23,0x01,0x00,0x00, +0x1f,0x01,0x00,0x00,0x22,0x01,0x00,0x00,0x8e,0x00,0x05,0x00, +0x1a,0x01,0x00,0x00,0x25,0x01,0x00,0x00,0x23,0x01,0x00,0x00, +0x0d,0x01,0x00,0x00,0x50,0x00,0x05,0x00,0x1a,0x01,0x00,0x00, +0x27,0x01,0x00,0x00,0x12,0x01,0x00,0x00,0x12,0x01,0x00,0x00, +0x81,0x00,0x05,0x00,0x1a,0x01,0x00,0x00,0x28,0x01,0x00,0x00, +0x25,0x01,0x00,0x00,0x27,0x01,0x00,0x00,0x51,0x00,0x05,0x00, +0xc4,0x00,0x00,0x00,0x30,0x01,0x00,0x00,0x28,0x01,0x00,0x00, +0x00,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x31,0x01,0x00,0x00, +0x32,0x01,0x00,0x00,0x2d,0x01,0x00,0x00,0xf7,0x00,0x00,0x00, +0x3e,0x00,0x03,0x00,0x32,0x01,0x00,0x00,0x30,0x01,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x34,0x01,0x00,0x00, +0xf7,0x00,0x00,0x00,0xfa,0x00,0x00,0x00,0x51,0x00,0x05,0x00, +0xc4,0x00,0x00,0x00,0x36,0x01,0x00,0x00,0x28,0x01,0x00,0x00, +0x01,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x31,0x01,0x00,0x00, +0x37,0x01,0x00,0x00,0x2d,0x01,0x00,0x00,0x34,0x01,0x00,0x00, +0x3e,0x00,0x03,0x00,0x37,0x01,0x00,0x00,0x36,0x01,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x3e,0x01,0x00,0x00, +0xce,0x02,0x00,0x00,0x3c,0x01,0x00,0x00,0xf9,0x00,0x02,0x00, +0xdd,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0xdf,0x00,0x00,0x00, +0xf9,0x00,0x02,0x00,0x40,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, +0x40,0x01,0x00,0x00,0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00, +0xcf,0x02,0x00,0x00,0x3e,0x00,0x00,0x00,0xdf,0x00,0x00,0x00, +0x84,0x01,0x00,0x00,0x41,0x01,0x00,0x00,0xb0,0x00,0x05,0x00, +0xc2,0x00,0x00,0x00,0x46,0x01,0x00,0x00,0xcf,0x02,0x00,0x00, +0xa7,0x00,0x00,0x00,0xf6,0x00,0x04,0x00,0x42,0x01,0x00,0x00, +0x41,0x01,0x00,0x00,0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, +0x46,0x01,0x00,0x00,0x41,0x01,0x00,0x00,0x42,0x01,0x00,0x00, +0xf8,0x00,0x02,0x00,0x41,0x01,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x4b,0x01,0x00,0x00,0x7e,0x00,0x00,0x00, +0xcf,0x02,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x4e,0x01,0x00,0x00,0x4b,0x01,0x00,0x00,0xab,0x00,0x00,0x00, +0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x4f,0x01,0x00,0x00, +0x4e,0x01,0x00,0x00,0x77,0x00,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x50,0x01,0x00,0x00,0xd6,0x02,0x00,0x00, +0x4f,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x52,0x01,0x00,0x00,0x50,0x01,0x00,0x00,0x79,0x00,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x58,0x01,0x00,0x00, +0x4b,0x01,0x00,0x00,0x57,0x01,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x5a,0x01,0x00,0x00,0x79,0x00,0x00,0x00, +0x77,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x5b,0x01,0x00,0x00,0x58,0x01,0x00,0x00,0x5a,0x01,0x00,0x00, +0x41,0x00,0x07,0x00,0x69,0x01,0x00,0x00,0x6a,0x01,0x00,0x00, +0x67,0x01,0x00,0x00,0x34,0x00,0x00,0x00,0x52,0x01,0x00,0x00, +0x3e,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0xc4,0x00,0x00,0x00, +0x6b,0x01,0x00,0x00,0x6a,0x01,0x00,0x00,0x41,0x00,0x05,0x00, +0x31,0x01,0x00,0x00,0x6c,0x01,0x00,0x00,0x60,0x01,0x00,0x00, +0x5b,0x01,0x00,0x00,0x3e,0x00,0x03,0x00,0x6c,0x01,0x00,0x00, +0x6b,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x6e,0x01,0x00,0x00,0x5b,0x01,0x00,0x00,0x39,0x00,0x00,0x00, +0x41,0x00,0x07,0x00,0x69,0x01,0x00,0x00,0x70,0x01,0x00,0x00, +0x67,0x01,0x00,0x00,0x34,0x00,0x00,0x00,0x52,0x01,0x00,0x00, +0x39,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0xc4,0x00,0x00,0x00, +0x71,0x01,0x00,0x00,0x70,0x01,0x00,0x00,0x41,0x00,0x05,0x00, +0x31,0x01,0x00,0x00,0x72,0x01,0x00,0x00,0x60,0x01,0x00,0x00, +0x6e,0x01,0x00,0x00,0x3e,0x00,0x03,0x00,0x72,0x01,0x00,0x00, +0x71,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x74,0x01,0x00,0x00,0x5b,0x01,0x00,0x00,0x0c,0x00,0x00,0x00, +0x41,0x00,0x07,0x00,0x69,0x01,0x00,0x00,0x76,0x01,0x00,0x00, +0x67,0x01,0x00,0x00,0x34,0x00,0x00,0x00,0x52,0x01,0x00,0x00, +0x0c,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0xc4,0x00,0x00,0x00, +0x77,0x01,0x00,0x00,0x76,0x01,0x00,0x00,0x41,0x00,0x05,0x00, +0x31,0x01,0x00,0x00,0x78,0x01,0x00,0x00,0x60,0x01,0x00,0x00, +0x74,0x01,0x00,0x00,0x3e,0x00,0x03,0x00,0x78,0x01,0x00,0x00, +0x77,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x7b,0x01,0x00,0x00,0x5b,0x01,0x00,0x00,0x7a,0x01,0x00,0x00, +0x41,0x00,0x07,0x00,0x69,0x01,0x00,0x00,0x7d,0x01,0x00,0x00, +0x67,0x01,0x00,0x00,0x34,0x00,0x00,0x00,0x52,0x01,0x00,0x00, +0x7a,0x01,0x00,0x00,0x3d,0x00,0x04,0x00,0xc4,0x00,0x00,0x00, +0x7e,0x01,0x00,0x00,0x7d,0x01,0x00,0x00,0x41,0x00,0x05,0x00, +0x31,0x01,0x00,0x00,0x7f,0x01,0x00,0x00,0x60,0x01,0x00,0x00, +0x7b,0x01,0x00,0x00,0x3e,0x00,0x03,0x00,0x7f,0x01,0x00,0x00, +0x7e,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x84,0x01,0x00,0x00,0xcf,0x02,0x00,0x00,0x82,0x01,0x00,0x00, +0xf9,0x00,0x02,0x00,0x40,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, +0x42,0x01,0x00,0x00,0xe0,0x00,0x04,0x00,0x0c,0x00,0x00,0x00, +0x0c,0x00,0x00,0x00,0x85,0x01,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x88,0x01,0x00,0x00,0xd2,0x02,0x00,0x00, +0x86,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x8b,0x01,0x00,0x00,0xd6,0x02,0x00,0x00,0x89,0x01,0x00,0x00, +0xf9,0x00,0x02,0x00,0x8d,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, +0x8d,0x01,0x00,0x00,0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00, +0xd8,0x02,0x00,0x00,0x3e,0x00,0x00,0x00,0x42,0x01,0x00,0x00, +0x34,0x02,0x00,0x00,0x90,0x01,0x00,0x00,0xb0,0x00,0x05,0x00, +0xc2,0x00,0x00,0x00,0x93,0x01,0x00,0x00,0xd8,0x02,0x00,0x00, +0x6c,0x00,0x00,0x00,0xf6,0x00,0x04,0x00,0x8f,0x01,0x00,0x00, +0x90,0x01,0x00,0x00,0x00,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, +0x93,0x01,0x00,0x00,0x8e,0x01,0x00,0x00,0x8f,0x01,0x00,0x00, +0xf8,0x00,0x02,0x00,0x8e,0x01,0x00,0x00,0xf9,0x00,0x02,0x00, +0x95,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0x95,0x01,0x00,0x00, +0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0xdc,0x02,0x00,0x00, +0x3e,0x00,0x00,0x00,0x8e,0x01,0x00,0x00,0xc0,0x01,0x00,0x00, +0x98,0x01,0x00,0x00,0xb0,0x00,0x05,0x00,0xc2,0x00,0x00,0x00, +0x9b,0x01,0x00,0x00,0xdc,0x02,0x00,0x00,0x60,0x00,0x00,0x00, +0xf6,0x00,0x04,0x00,0x97,0x01,0x00,0x00,0x98,0x01,0x00,0x00, +0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0x9b,0x01,0x00,0x00, +0x96,0x01,0x00,0x00,0x97,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, +0x96,0x01,0x00,0x00,0xf9,0x00,0x02,0x00,0x9d,0x01,0x00,0x00, +0xf8,0x00,0x02,0x00,0x9d,0x01,0x00,0x00,0xf5,0x00,0x07,0x00, +0x06,0x00,0x00,0x00,0xee,0x02,0x00,0x00,0x3e,0x00,0x00,0x00, +0x96,0x01,0x00,0x00,0xbe,0x01,0x00,0x00,0x9e,0x01,0x00,0x00, +0xb0,0x00,0x05,0x00,0xc2,0x00,0x00,0x00,0xa3,0x01,0x00,0x00, +0xee,0x02,0x00,0x00,0x62,0x00,0x00,0x00,0xf6,0x00,0x04,0x00, +0x9f,0x01,0x00,0x00,0x9e,0x01,0x00,0x00,0x01,0x00,0x00,0x00, +0xfa,0x00,0x04,0x00,0xa3,0x01,0x00,0x00,0x9e,0x01,0x00,0x00, +0x9f,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0x9e,0x01,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xa9,0x01,0x00,0x00, +0xdc,0x02,0x00,0x00,0x62,0x00,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xab,0x01,0x00,0x00,0xa9,0x01,0x00,0x00, +0xee,0x02,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xad,0x01,0x00,0x00,0x55,0x00,0x00,0x00,0x53,0x00,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xaf,0x01,0x00,0x00, +0xdc,0x02,0x00,0x00,0x61,0x00,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xb0,0x01,0x00,0x00,0xad,0x01,0x00,0x00, +0xaf,0x01,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xb2,0x01,0x00,0x00,0x64,0x00,0x00,0x00,0x62,0x00,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xb3,0x01,0x00,0x00, +0xb0,0x01,0x00,0x00,0xb2,0x01,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xb5,0x01,0x00,0x00,0xb3,0x01,0x00,0x00, +0xee,0x02,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xb7,0x01,0x00,0x00,0xb5,0x01,0x00,0x00,0xb6,0x01,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xb9,0x01,0x00,0x00, +0xb7,0x01,0x00,0x00,0xd8,0x02,0x00,0x00,0x41,0x00,0x05,0x00, +0x31,0x01,0x00,0x00,0xba,0x01,0x00,0x00,0x2d,0x01,0x00,0x00, +0xb9,0x01,0x00,0x00,0x3d,0x00,0x04,0x00,0xc4,0x00,0x00,0x00, +0xbb,0x01,0x00,0x00,0xba,0x01,0x00,0x00,0x41,0x00,0x05,0x00, +0xcd,0x00,0x00,0x00,0xbc,0x01,0x00,0x00,0xa7,0x01,0x00,0x00, +0xab,0x01,0x00,0x00,0x3e,0x00,0x03,0x00,0xbc,0x01,0x00,0x00, +0xbb,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xbe,0x01,0x00,0x00,0xee,0x02,0x00,0x00,0xd0,0x00,0x00,0x00, +0xf9,0x00,0x02,0x00,0x9d,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, +0x9f,0x01,0x00,0x00,0xf9,0x00,0x02,0x00,0x98,0x01,0x00,0x00, +0xf8,0x00,0x02,0x00,0x98,0x01,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xc0,0x01,0x00,0x00,0xdc,0x02,0x00,0x00, +0xd0,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0x95,0x01,0x00,0x00, +0xf8,0x00,0x02,0x00,0x97,0x01,0x00,0x00,0xf9,0x00,0x02,0x00, +0xc2,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0xc2,0x01,0x00,0x00, +0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0xdd,0x02,0x00,0x00, +0x3e,0x00,0x00,0x00,0x97,0x01,0x00,0x00,0xee,0x01,0x00,0x00, +0xc5,0x01,0x00,0x00,0xb0,0x00,0x05,0x00,0xc2,0x00,0x00,0x00, +0xc8,0x01,0x00,0x00,0xdd,0x02,0x00,0x00,0xbf,0x00,0x00,0x00, +0xf6,0x00,0x04,0x00,0xc4,0x01,0x00,0x00,0xc5,0x01,0x00,0x00, +0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0xc8,0x01,0x00,0x00, +0xc3,0x01,0x00,0x00,0xc4,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, +0xc3,0x01,0x00,0x00,0xf9,0x00,0x02,0x00,0xca,0x01,0x00,0x00, +0xf8,0x00,0x02,0x00,0xca,0x01,0x00,0x00,0xf5,0x00,0x07,0x00, +0x06,0x00,0x00,0x00,0xeb,0x02,0x00,0x00,0x3e,0x00,0x00,0x00, +0xc3,0x01,0x00,0x00,0xec,0x01,0x00,0x00,0xcb,0x01,0x00,0x00, +0xb0,0x00,0x05,0x00,0xc2,0x00,0x00,0x00,0xd0,0x01,0x00,0x00, +0xeb,0x02,0x00,0x00,0xbc,0x00,0x00,0x00,0xf6,0x00,0x04,0x00, +0xcc,0x01,0x00,0x00,0xcb,0x01,0x00,0x00,0x01,0x00,0x00,0x00, +0xfa,0x00,0x04,0x00,0xd0,0x01,0x00,0x00,0xcb,0x01,0x00,0x00, +0xcc,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0xcb,0x01,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xd6,0x01,0x00,0x00, +0xdd,0x02,0x00,0x00,0xbc,0x00,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xd8,0x01,0x00,0x00,0xd6,0x01,0x00,0x00, +0xeb,0x02,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xda,0x01,0x00,0x00,0x59,0x00,0x00,0x00,0xb9,0x00,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xdd,0x01,0x00,0x00, +0xdd,0x02,0x00,0x00,0xdc,0x01,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xde,0x01,0x00,0x00,0xda,0x01,0x00,0x00, +0xdd,0x01,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xe0,0x01,0x00,0x00,0x68,0x00,0x00,0x00,0xbc,0x00,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xe1,0x01,0x00,0x00, +0xde,0x01,0x00,0x00,0xe0,0x01,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xe3,0x01,0x00,0x00,0xe1,0x01,0x00,0x00, +0xeb,0x02,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xe5,0x01,0x00,0x00,0xe3,0x01,0x00,0x00,0xe4,0x01,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xe7,0x01,0x00,0x00, +0xe5,0x01,0x00,0x00,0xd8,0x02,0x00,0x00,0x41,0x00,0x05,0x00, +0x31,0x01,0x00,0x00,0xe8,0x01,0x00,0x00,0x60,0x01,0x00,0x00, +0xe7,0x01,0x00,0x00,0x3d,0x00,0x04,0x00,0xc4,0x00,0x00,0x00, +0xe9,0x01,0x00,0x00,0xe8,0x01,0x00,0x00,0x41,0x00,0x05,0x00, +0xcd,0x00,0x00,0x00,0xea,0x01,0x00,0x00,0xd4,0x01,0x00,0x00, +0xd8,0x01,0x00,0x00,0x3e,0x00,0x03,0x00,0xea,0x01,0x00,0x00, +0xe9,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xec,0x01,0x00,0x00,0xeb,0x02,0x00,0x00,0xd0,0x00,0x00,0x00, +0xf9,0x00,0x02,0x00,0xca,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, +0xcc,0x01,0x00,0x00,0xf9,0x00,0x02,0x00,0xc5,0x01,0x00,0x00, +0xf8,0x00,0x02,0x00,0xc5,0x01,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xee,0x01,0x00,0x00,0xdd,0x02,0x00,0x00, +0xd0,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0xc2,0x01,0x00,0x00, +0xf8,0x00,0x02,0x00,0xc4,0x01,0x00,0x00,0xf9,0x00,0x02,0x00, +0xf0,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0xf0,0x01,0x00,0x00, +0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0xde,0x02,0x00,0x00, +0x3e,0x00,0x00,0x00,0xc4,0x01,0x00,0x00,0x32,0x02,0x00,0x00, +0xf3,0x01,0x00,0x00,0xb0,0x00,0x05,0x00,0xc2,0x00,0x00,0x00, +0xf6,0x01,0x00,0x00,0xde,0x02,0x00,0x00,0xbf,0x00,0x00,0x00, +0xf6,0x00,0x04,0x00,0xf2,0x01,0x00,0x00,0xf3,0x01,0x00,0x00, +0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0xf6,0x01,0x00,0x00, +0xf1,0x01,0x00,0x00,0xf2,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, +0xf1,0x01,0x00,0x00,0xf9,0x00,0x02,0x00,0xf8,0x01,0x00,0x00, +0xf8,0x00,0x02,0x00,0xf8,0x01,0x00,0x00,0xf5,0x00,0x07,0x00, +0x06,0x00,0x00,0x00,0xe2,0x02,0x00,0x00,0x3e,0x00,0x00,0x00, +0xf1,0x01,0x00,0x00,0x30,0x02,0x00,0x00,0xfb,0x01,0x00,0x00, +0xb0,0x00,0x05,0x00,0xc2,0x00,0x00,0x00,0xfe,0x01,0x00,0x00, +0xe2,0x02,0x00,0x00,0x60,0x00,0x00,0x00,0xf6,0x00,0x04,0x00, +0xfa,0x01,0x00,0x00,0xfb,0x01,0x00,0x00,0x01,0x00,0x00,0x00, +0xfa,0x00,0x04,0x00,0xfe,0x01,0x00,0x00,0xf9,0x01,0x00,0x00, +0xfa,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0xf9,0x01,0x00,0x00, +0xf9,0x00,0x02,0x00,0x00,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, +0x00,0x02,0x00,0x00,0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00, +0xe4,0x02,0x00,0x00,0x3e,0x00,0x00,0x00,0xf9,0x01,0x00,0x00, +0x2e,0x02,0x00,0x00,0x03,0x02,0x00,0x00,0xb0,0x00,0x05,0x00, +0xc2,0x00,0x00,0x00,0x06,0x02,0x00,0x00,0xe4,0x02,0x00,0x00, +0xbc,0x00,0x00,0x00,0xf6,0x00,0x04,0x00,0x02,0x02,0x00,0x00, +0x03,0x02,0x00,0x00,0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, +0x06,0x02,0x00,0x00,0x01,0x02,0x00,0x00,0x02,0x02,0x00,0x00, +0xf8,0x00,0x02,0x00,0x01,0x02,0x00,0x00,0xf9,0x00,0x02,0x00, +0x08,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x08,0x02,0x00,0x00, +0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0xe6,0x02,0x00,0x00, +0x3e,0x00,0x00,0x00,0x01,0x02,0x00,0x00,0x2c,0x02,0x00,0x00, +0x09,0x02,0x00,0x00,0xb0,0x00,0x05,0x00,0xc2,0x00,0x00,0x00, +0x0e,0x02,0x00,0x00,0xe6,0x02,0x00,0x00,0x62,0x00,0x00,0x00, +0xf6,0x00,0x04,0x00,0x0a,0x02,0x00,0x00,0x09,0x02,0x00,0x00, +0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0x0e,0x02,0x00,0x00, +0x09,0x02,0x00,0x00,0x0a,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, +0x09,0x02,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x10,0x02,0x00,0x00,0xde,0x02,0x00,0x00,0xbc,0x00,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x12,0x02,0x00,0x00, +0x10,0x02,0x00,0x00,0xe4,0x02,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x14,0x02,0x00,0x00,0x12,0x02,0x00,0x00, +0x13,0x02,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x16,0x02,0x00,0x00,0xe2,0x02,0x00,0x00,0x62,0x00,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x17,0x02,0x00,0x00, +0x14,0x02,0x00,0x00,0x16,0x02,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x19,0x02,0x00,0x00,0x17,0x02,0x00,0x00, +0xe6,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x1d,0x02,0x00,0x00,0x16,0x02,0x00,0x00,0xe6,0x02,0x00,0x00, +0x41,0x00,0x05,0x00,0xcd,0x00,0x00,0x00,0x1e,0x02,0x00,0x00, +0xa7,0x01,0x00,0x00,0x1d,0x02,0x00,0x00,0x3d,0x00,0x04,0x00, +0xc4,0x00,0x00,0x00,0x1f,0x02,0x00,0x00,0x1e,0x02,0x00,0x00, +0x41,0x00,0x05,0x00,0xcd,0x00,0x00,0x00,0x24,0x02,0x00,0x00, +0xd4,0x01,0x00,0x00,0x12,0x02,0x00,0x00,0x3d,0x00,0x04,0x00, +0xc4,0x00,0x00,0x00,0x25,0x02,0x00,0x00,0x24,0x02,0x00,0x00, +0x41,0x00,0x05,0x00,0xcd,0x00,0x00,0x00,0x27,0x02,0x00,0x00, +0xca,0x00,0x00,0x00,0x19,0x02,0x00,0x00,0x3d,0x00,0x04,0x00, +0xc4,0x00,0x00,0x00,0x28,0x02,0x00,0x00,0x27,0x02,0x00,0x00, +0x0c,0x00,0x08,0x00,0xc4,0x00,0x00,0x00,0x29,0x02,0x00,0x00, +0x01,0x00,0x00,0x00,0x32,0x00,0x00,0x00,0x1f,0x02,0x00,0x00, +0x25,0x02,0x00,0x00,0x28,0x02,0x00,0x00,0x3e,0x00,0x03,0x00, +0x27,0x02,0x00,0x00,0x29,0x02,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x2c,0x02,0x00,0x00,0xe6,0x02,0x00,0x00, +0xd0,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0x08,0x02,0x00,0x00, +0xf8,0x00,0x02,0x00,0x0a,0x02,0x00,0x00,0xf9,0x00,0x02,0x00, +0x03,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x03,0x02,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x2e,0x02,0x00,0x00, +0xe4,0x02,0x00,0x00,0xd0,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, +0x00,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x02,0x02,0x00,0x00, +0xf9,0x00,0x02,0x00,0xfb,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, +0xfb,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x30,0x02,0x00,0x00,0xe2,0x02,0x00,0x00,0xd0,0x00,0x00,0x00, +0xf9,0x00,0x02,0x00,0xf8,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, +0xfa,0x01,0x00,0x00,0xf9,0x00,0x02,0x00,0xf3,0x01,0x00,0x00, +0xf8,0x00,0x02,0x00,0xf3,0x01,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x32,0x02,0x00,0x00,0xde,0x02,0x00,0x00, +0xd0,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0xf0,0x01,0x00,0x00, +0xf8,0x00,0x02,0x00,0xf2,0x01,0x00,0x00,0xf9,0x00,0x02,0x00, +0x90,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0x90,0x01,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x34,0x02,0x00,0x00, +0xd8,0x02,0x00,0x00,0xd0,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, +0x8d,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0x8f,0x01,0x00,0x00, +0xe0,0x00,0x04,0x00,0x0c,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, +0x85,0x01,0x00,0x00,0xf9,0x00,0x02,0x00,0xd7,0x00,0x00,0x00, +0xf8,0x00,0x02,0x00,0xd7,0x00,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x36,0x02,0x00,0x00,0xbe,0x02,0x00,0x00, +0x6c,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0xd4,0x00,0x00,0x00, +0xf8,0x00,0x02,0x00,0xd6,0x00,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x3b,0x02,0x00,0x00,0x55,0x00,0x00,0x00, +0x53,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x3c,0x02,0x00,0x00,0x96,0x00,0x00,0x00,0x3b,0x02,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x41,0x02,0x00,0x00, +0x59,0x00,0x00,0x00,0xb9,0x00,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x42,0x02,0x00,0x00,0xa8,0x00,0x00,0x00, +0x41,0x02,0x00,0x00,0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00, +0x47,0x02,0x00,0x00,0x12,0x00,0x00,0x00,0x46,0x02,0x00,0x00, +0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x48,0x02,0x00,0x00, +0x47,0x02,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x49,0x02,0x00,0x00,0x0f,0x00,0x00,0x00,0x48,0x02,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x4d,0x02,0x00,0x00, +0x47,0x00,0x00,0x00,0x48,0x02,0x00,0x00,0x41,0x00,0x05,0x00, +0x0d,0x00,0x00,0x00,0x4f,0x02,0x00,0x00,0x4e,0x02,0x00,0x00, +0x0c,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x50,0x02,0x00,0x00,0x4f,0x02,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x51,0x02,0x00,0x00,0x4d,0x02,0x00,0x00, +0x50,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x52,0x02,0x00,0x00,0x49,0x02,0x00,0x00,0x51,0x02,0x00,0x00, +0xf9,0x00,0x02,0x00,0x54,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, +0x54,0x02,0x00,0x00,0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00, +0xbf,0x02,0x00,0x00,0x3e,0x00,0x00,0x00,0xd6,0x00,0x00,0x00, +0xba,0x02,0x00,0x00,0x57,0x02,0x00,0x00,0xb0,0x00,0x05,0x00, +0xc2,0x00,0x00,0x00,0x5a,0x02,0x00,0x00,0xbf,0x02,0x00,0x00, +0xbf,0x00,0x00,0x00,0xf6,0x00,0x04,0x00,0x56,0x02,0x00,0x00, +0x57,0x02,0x00,0x00,0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, +0x5a,0x02,0x00,0x00,0x55,0x02,0x00,0x00,0x56,0x02,0x00,0x00, +0xf8,0x00,0x02,0x00,0x55,0x02,0x00,0x00,0xf9,0x00,0x02,0x00, +0x5c,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x5c,0x02,0x00,0x00, +0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0xc0,0x02,0x00,0x00, +0x3e,0x00,0x00,0x00,0x55,0x02,0x00,0x00,0xb8,0x02,0x00,0x00, +0x5f,0x02,0x00,0x00,0xb0,0x00,0x05,0x00,0xc2,0x00,0x00,0x00, +0x62,0x02,0x00,0x00,0xc0,0x02,0x00,0x00,0x60,0x00,0x00,0x00, +0xf6,0x00,0x04,0x00,0x5e,0x02,0x00,0x00,0x5f,0x02,0x00,0x00, +0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0x62,0x02,0x00,0x00, +0x5d,0x02,0x00,0x00,0x5e,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, +0x5d,0x02,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x66,0x02,0x00,0x00,0xc0,0x02,0x00,0x00,0x61,0x00,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x67,0x02,0x00,0x00, +0x3c,0x02,0x00,0x00,0x66,0x02,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x69,0x02,0x00,0x00,0x64,0x00,0x00,0x00, +0x62,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x6a,0x02,0x00,0x00,0x67,0x02,0x00,0x00,0x69,0x02,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x6e,0x02,0x00,0x00, +0xbf,0x02,0x00,0x00,0xdc,0x01,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x6f,0x02,0x00,0x00,0x42,0x02,0x00,0x00, +0x6e,0x02,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x71,0x02,0x00,0x00,0x68,0x00,0x00,0x00,0xbc,0x00,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x72,0x02,0x00,0x00, +0x6f,0x02,0x00,0x00,0x71,0x02,0x00,0x00,0xf9,0x00,0x02,0x00, +0x74,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x74,0x02,0x00,0x00, +0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0xc2,0x02,0x00,0x00, +0x3e,0x00,0x00,0x00,0x5d,0x02,0x00,0x00,0xb6,0x02,0x00,0x00, +0x77,0x02,0x00,0x00,0xb0,0x00,0x05,0x00,0xc2,0x00,0x00,0x00, +0x7a,0x02,0x00,0x00,0xc2,0x02,0x00,0x00,0xbc,0x00,0x00,0x00, +0xf6,0x00,0x04,0x00,0x76,0x02,0x00,0x00,0x77,0x02,0x00,0x00, +0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0x7a,0x02,0x00,0x00, +0x75,0x02,0x00,0x00,0x76,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, +0x75,0x02,0x00,0x00,0xf9,0x00,0x02,0x00,0x7c,0x02,0x00,0x00, +0xf8,0x00,0x02,0x00,0x7c,0x02,0x00,0x00,0xf5,0x00,0x07,0x00, +0x06,0x00,0x00,0x00,0xc4,0x02,0x00,0x00,0x3e,0x00,0x00,0x00, +0x75,0x02,0x00,0x00,0xb4,0x02,0x00,0x00,0x7f,0x02,0x00,0x00, +0xb0,0x00,0x05,0x00,0xc2,0x00,0x00,0x00,0x82,0x02,0x00,0x00, +0xc4,0x02,0x00,0x00,0x62,0x00,0x00,0x00,0xf6,0x00,0x04,0x00, +0x7e,0x02,0x00,0x00,0x7f,0x02,0x00,0x00,0x01,0x00,0x00,0x00, +0xfa,0x00,0x04,0x00,0x82,0x02,0x00,0x00,0x7d,0x02,0x00,0x00, +0x7e,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x7d,0x02,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x85,0x02,0x00,0x00, +0x6a,0x02,0x00,0x00,0xc4,0x02,0x00,0x00,0xb0,0x00,0x05,0x00, +0xc2,0x00,0x00,0x00,0x88,0x02,0x00,0x00,0x85,0x02,0x00,0x00, +0x36,0x00,0x00,0x00,0xf7,0x00,0x03,0x00,0x8a,0x02,0x00,0x00, +0x00,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0x88,0x02,0x00,0x00, +0x89,0x02,0x00,0x00,0x8a,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, +0x89,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x8d,0x02,0x00,0x00,0x72,0x02,0x00,0x00,0xc2,0x02,0x00,0x00, +0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00,0x8e,0x02,0x00,0x00, +0x12,0x00,0x00,0x00,0xd0,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x8f,0x02,0x00,0x00,0x8e,0x02,0x00,0x00, +0xb0,0x00,0x05,0x00,0xc2,0x00,0x00,0x00,0x90,0x02,0x00,0x00, +0x8d,0x02,0x00,0x00,0x8f,0x02,0x00,0x00,0xf9,0x00,0x02,0x00, +0x8a,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x8a,0x02,0x00,0x00, +0xf5,0x00,0x07,0x00,0xc2,0x00,0x00,0x00,0x91,0x02,0x00,0x00, +0x88,0x02,0x00,0x00,0x7d,0x02,0x00,0x00,0x90,0x02,0x00,0x00, +0x89,0x02,0x00,0x00,0xf7,0x00,0x03,0x00,0x93,0x02,0x00,0x00, +0x00,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0x91,0x02,0x00,0x00, +0x92,0x02,0x00,0x00,0x93,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, +0x92,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x9b,0x02,0x00,0x00,0x72,0x02,0x00,0x00,0xc2,0x02,0x00,0x00, +0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00,0x9d,0x02,0x00,0x00, +0x12,0x00,0x00,0x00,0x9c,0x02,0x00,0x00,0x3d,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x9e,0x02,0x00,0x00,0x9d,0x02,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x9f,0x02,0x00,0x00, +0x9b,0x02,0x00,0x00,0x9e,0x02,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xa0,0x02,0x00,0x00,0x52,0x02,0x00,0x00, +0x9f,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xa2,0x02,0x00,0x00,0xa0,0x02,0x00,0x00,0x6a,0x02,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xa4,0x02,0x00,0x00, +0xa2,0x02,0x00,0x00,0xc4,0x02,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xa6,0x02,0x00,0x00,0xbf,0x02,0x00,0x00, +0xbc,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xa8,0x02,0x00,0x00,0xa6,0x02,0x00,0x00,0xc2,0x02,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xaa,0x02,0x00,0x00, +0xa8,0x02,0x00,0x00,0xa9,0x02,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xac,0x02,0x00,0x00,0xc0,0x02,0x00,0x00, +0x62,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xad,0x02,0x00,0x00,0xaa,0x02,0x00,0x00,0xac,0x02,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xaf,0x02,0x00,0x00, +0xad,0x02,0x00,0x00,0xc4,0x02,0x00,0x00,0x41,0x00,0x05,0x00, +0xcd,0x00,0x00,0x00,0xb0,0x02,0x00,0x00,0xca,0x00,0x00,0x00, +0xaf,0x02,0x00,0x00,0x3d,0x00,0x04,0x00,0xc4,0x00,0x00,0x00, +0xb1,0x02,0x00,0x00,0xb0,0x02,0x00,0x00,0x41,0x00,0x06,0x00, +0x69,0x01,0x00,0x00,0xb2,0x02,0x00,0x00,0x97,0x02,0x00,0x00, +0x34,0x00,0x00,0x00,0xa4,0x02,0x00,0x00,0x3e,0x00,0x03,0x00, +0xb2,0x02,0x00,0x00,0xb1,0x02,0x00,0x00,0xf9,0x00,0x02,0x00, +0x93,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x93,0x02,0x00,0x00, +0xf9,0x00,0x02,0x00,0x7f,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, +0x7f,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xb4,0x02,0x00,0x00,0xc4,0x02,0x00,0x00,0xd0,0x00,0x00,0x00, +0xf9,0x00,0x02,0x00,0x7c,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, +0x7e,0x02,0x00,0x00,0xf9,0x00,0x02,0x00,0x77,0x02,0x00,0x00, +0xf8,0x00,0x02,0x00,0x77,0x02,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xb6,0x02,0x00,0x00,0xc2,0x02,0x00,0x00, +0xd0,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0x74,0x02,0x00,0x00, +0xf8,0x00,0x02,0x00,0x76,0x02,0x00,0x00,0xf9,0x00,0x02,0x00, +0x5f,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x5f,0x02,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xb8,0x02,0x00,0x00, +0xc0,0x02,0x00,0x00,0xd0,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, +0x5c,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x5e,0x02,0x00,0x00, +0xf9,0x00,0x02,0x00,0x57,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, +0x57,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xba,0x02,0x00,0x00,0xbf,0x02,0x00,0x00,0xd0,0x00,0x00,0x00, +0xf9,0x00,0x02,0x00,0x54,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, +0x56,0x02,0x00,0x00,0xfd,0x00,0x01,0x00,0x38,0x00,0x01,0x00, + +}; +const uint64_t matmul_q4_1_f32_aligned_fp32_len = 10476; + +unsigned char matmul_q4_1_f32_fp32_data[] = { +0x03,0x02,0x23,0x07,0x00,0x05,0x01,0x00,0x0b,0x00,0x0d,0x00, +0xef,0x02,0x00,0x00,0x00,0x00,0x00,0x00,0x11,0x00,0x02,0x00, +0x01,0x00,0x00,0x00,0x11,0x00,0x02,0x00,0x51,0x11,0x00,0x00, +0x11,0x00,0x02,0x00,0x60,0x11,0x00,0x00,0x0b,0x00,0x06,0x00, +0x01,0x00,0x00,0x00,0x47,0x4c,0x53,0x4c,0x2e,0x73,0x74,0x64, +0x2e,0x34,0x35,0x30,0x00,0x00,0x00,0x00,0x0e,0x00,0x03,0x00, +0x00,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x0f,0x00,0x0f,0x00, +0x05,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x6d,0x61,0x69,0x6e, +0x00,0x00,0x00,0x00,0x0b,0x00,0x00,0x00,0x12,0x00,0x00,0x00, +0x3d,0x00,0x00,0x00,0x4c,0x00,0x00,0x00,0x07,0x01,0x00,0x00, +0x2c,0x01,0x00,0x00,0x5d,0x01,0x00,0x00,0x68,0x01,0x00,0x00, +0x4e,0x02,0x00,0x00,0x97,0x02,0x00,0x00,0x10,0x00,0x06,0x00, +0x04,0x00,0x00,0x00,0x11,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x47,0x00,0x04,0x00, +0x0b,0x00,0x00,0x00,0x0b,0x00,0x00,0x00,0x1c,0x00,0x00,0x00, +0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x48,0x00,0x05,0x00, +0x10,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x23,0x00,0x00,0x00, +0x04,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00, +0x02,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x08,0x00,0x00,0x00, +0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00,0x03,0x00,0x00,0x00, +0x23,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x48,0x00,0x05,0x00, +0x10,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x23,0x00,0x00,0x00, +0x10,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00, +0x05,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x14,0x00,0x00,0x00, +0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00,0x06,0x00,0x00,0x00, +0x23,0x00,0x00,0x00,0x18,0x00,0x00,0x00,0x48,0x00,0x05,0x00, +0x10,0x00,0x00,0x00,0x07,0x00,0x00,0x00,0x23,0x00,0x00,0x00, +0x1c,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00, +0x08,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x20,0x00,0x00,0x00, +0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00,0x09,0x00,0x00,0x00, +0x23,0x00,0x00,0x00,0x24,0x00,0x00,0x00,0x48,0x00,0x05,0x00, +0x10,0x00,0x00,0x00,0x0a,0x00,0x00,0x00,0x23,0x00,0x00,0x00, +0x28,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00, +0x0b,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x2c,0x00,0x00,0x00, +0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, +0x23,0x00,0x00,0x00,0x30,0x00,0x00,0x00,0x48,0x00,0x05,0x00, +0x10,0x00,0x00,0x00,0x0d,0x00,0x00,0x00,0x23,0x00,0x00,0x00, +0x34,0x00,0x00,0x00,0x47,0x00,0x03,0x00,0x10,0x00,0x00,0x00, +0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x37,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x47,0x00,0x04,0x00, +0x3d,0x00,0x00,0x00,0x0b,0x00,0x00,0x00,0x1a,0x00,0x00,0x00, +0x47,0x00,0x04,0x00,0x4c,0x00,0x00,0x00,0x0b,0x00,0x00,0x00, +0x1b,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x4f,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0x09,0x00,0x00,0x00,0x47,0x00,0x04,0x00, +0x53,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x04,0x00,0x00,0x00, +0x47,0x00,0x04,0x00,0x60,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0x06,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x62,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0x07,0x00,0x00,0x00,0x47,0x00,0x04,0x00, +0x6c,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x03,0x00,0x00,0x00, +0x47,0x00,0x04,0x00,0xa6,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0xb8,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0x05,0x00,0x00,0x00,0x47,0x00,0x04,0x00, +0xbb,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x08,0x00,0x00,0x00, +0x47,0x00,0x04,0x00,0x02,0x01,0x00,0x00,0x06,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x03,0x01,0x00,0x00, +0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0x48,0x00,0x05,0x00,0x03,0x01,0x00,0x00,0x01,0x00,0x00,0x00, +0x23,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x48,0x00,0x05,0x00, +0x03,0x01,0x00,0x00,0x02,0x00,0x00,0x00,0x23,0x00,0x00,0x00, +0x04,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x04,0x01,0x00,0x00, +0x06,0x00,0x00,0x00,0x14,0x00,0x00,0x00,0x48,0x00,0x04,0x00, +0x05,0x01,0x00,0x00,0x00,0x00,0x00,0x00,0x18,0x00,0x00,0x00, +0x48,0x00,0x05,0x00,0x05,0x01,0x00,0x00,0x00,0x00,0x00,0x00, +0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x03,0x00, +0x05,0x01,0x00,0x00,0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00, +0x07,0x01,0x00,0x00,0x22,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0x47,0x00,0x04,0x00,0x07,0x01,0x00,0x00,0x21,0x00,0x00,0x00, +0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x37,0x01,0x00,0x00, +0x01,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00, +0x38,0x01,0x00,0x00,0x0b,0x00,0x00,0x00,0x19,0x00,0x00,0x00, +0x47,0x00,0x04,0x00,0x65,0x01,0x00,0x00,0x06,0x00,0x00,0x00, +0x04,0x00,0x00,0x00,0x48,0x00,0x04,0x00,0x66,0x01,0x00,0x00, +0x00,0x00,0x00,0x00,0x18,0x00,0x00,0x00,0x48,0x00,0x05,0x00, +0x66,0x01,0x00,0x00,0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00, +0x00,0x00,0x00,0x00,0x47,0x00,0x03,0x00,0x66,0x01,0x00,0x00, +0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x68,0x01,0x00,0x00, +0x22,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00, +0x68,0x01,0x00,0x00,0x21,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0x47,0x00,0x04,0x00,0x4e,0x02,0x00,0x00,0x0b,0x00,0x00,0x00, +0x18,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x94,0x02,0x00,0x00, +0x06,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x48,0x00,0x04,0x00, +0x95,0x02,0x00,0x00,0x00,0x00,0x00,0x00,0x19,0x00,0x00,0x00, +0x48,0x00,0x05,0x00,0x95,0x02,0x00,0x00,0x00,0x00,0x00,0x00, +0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x03,0x00, +0x95,0x02,0x00,0x00,0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00, +0x97,0x02,0x00,0x00,0x22,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0x47,0x00,0x04,0x00,0x97,0x02,0x00,0x00,0x21,0x00,0x00,0x00, +0x02,0x00,0x00,0x00,0x13,0x00,0x02,0x00,0x02,0x00,0x00,0x00, +0x21,0x00,0x03,0x00,0x03,0x00,0x00,0x00,0x02,0x00,0x00,0x00, +0x15,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x20,0x00,0x00,0x00, +0x00,0x00,0x00,0x00,0x17,0x00,0x04,0x00,0x09,0x00,0x00,0x00, +0x06,0x00,0x00,0x00,0x03,0x00,0x00,0x00,0x20,0x00,0x04,0x00, +0x0a,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x09,0x00,0x00,0x00, +0x3b,0x00,0x04,0x00,0x0a,0x00,0x00,0x00,0x0b,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x0c,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x20,0x00,0x04,0x00, +0x0d,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x06,0x00,0x00,0x00, +0x1e,0x00,0x10,0x00,0x10,0x00,0x00,0x00,0x06,0x00,0x00,0x00, +0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00, +0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00, +0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00, +0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00, +0x06,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x11,0x00,0x00,0x00, +0x09,0x00,0x00,0x00,0x10,0x00,0x00,0x00,0x3b,0x00,0x04,0x00, +0x11,0x00,0x00,0x00,0x12,0x00,0x00,0x00,0x09,0x00,0x00,0x00, +0x15,0x00,0x04,0x00,0x13,0x00,0x00,0x00,0x20,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00, +0x14,0x00,0x00,0x00,0x08,0x00,0x00,0x00,0x20,0x00,0x04,0x00, +0x15,0x00,0x00,0x00,0x09,0x00,0x00,0x00,0x06,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00,0x21,0x00,0x00,0x00, +0x0a,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00, +0x27,0x00,0x00,0x00,0x09,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x13,0x00,0x00,0x00,0x2d,0x00,0x00,0x00,0x07,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00,0x34,0x00,0x00,0x00, +0x00,0x00,0x00,0x00,0x32,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x37,0x00,0x00,0x00,0x40,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0x3b,0x00,0x04,0x00,0x0a,0x00,0x00,0x00,0x3d,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x3e,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x3b,0x00,0x04,0x00, +0x0a,0x00,0x00,0x00,0x4c,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0x32,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x4f,0x00,0x00,0x00, +0x20,0x00,0x00,0x00,0x32,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x53,0x00,0x00,0x00,0x20,0x00,0x00,0x00,0x34,0x00,0x06,0x00, +0x06,0x00,0x00,0x00,0x54,0x00,0x00,0x00,0x86,0x00,0x00,0x00, +0x37,0x00,0x00,0x00,0x53,0x00,0x00,0x00,0x34,0x00,0x06,0x00, +0x06,0x00,0x00,0x00,0x58,0x00,0x00,0x00,0x86,0x00,0x00,0x00, +0x37,0x00,0x00,0x00,0x53,0x00,0x00,0x00,0x32,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x60,0x00,0x00,0x00,0x02,0x00,0x00,0x00, +0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x61,0x00,0x00,0x00, +0x86,0x00,0x00,0x00,0x53,0x00,0x00,0x00,0x60,0x00,0x00,0x00, +0x32,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x62,0x00,0x00,0x00, +0x04,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, +0x63,0x00,0x00,0x00,0x86,0x00,0x00,0x00,0x61,0x00,0x00,0x00, +0x62,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, +0x67,0x00,0x00,0x00,0x86,0x00,0x00,0x00,0x61,0x00,0x00,0x00, +0x62,0x00,0x00,0x00,0x32,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x6c,0x00,0x00,0x00,0x10,0x00,0x00,0x00,0x34,0x00,0x06,0x00, +0x06,0x00,0x00,0x00,0x6d,0x00,0x00,0x00,0x86,0x00,0x00,0x00, +0x6c,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x34,0x00,0x06,0x00, +0x06,0x00,0x00,0x00,0x72,0x00,0x00,0x00,0x86,0x00,0x00,0x00, +0x6c,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x34,0x00,0x06,0x00, +0x06,0x00,0x00,0x00,0x77,0x00,0x00,0x00,0x86,0x00,0x00,0x00, +0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x34,0x00,0x06,0x00, +0x06,0x00,0x00,0x00,0x7c,0x00,0x00,0x00,0x86,0x00,0x00,0x00, +0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x13,0x00,0x00,0x00,0x80,0x00,0x00,0x00,0x06,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00,0x85,0x00,0x00,0x00, +0x02,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00, +0x90,0x00,0x00,0x00,0x0b,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x13,0x00,0x00,0x00,0x96,0x00,0x00,0x00,0x03,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00,0xa1,0x00,0x00,0x00, +0x0c,0x00,0x00,0x00,0x32,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0xa6,0x00,0x00,0x00,0x40,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x13,0x00,0x00,0x00,0xa8,0x00,0x00,0x00,0x04,0x00,0x00,0x00, +0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0xb7,0x00,0x00,0x00, +0x84,0x00,0x00,0x00,0x60,0x00,0x00,0x00,0x62,0x00,0x00,0x00, +0x32,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0xb8,0x00,0x00,0x00, +0x20,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, +0xb9,0x00,0x00,0x00,0x84,0x00,0x00,0x00,0x53,0x00,0x00,0x00, +0xb8,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, +0xba,0x00,0x00,0x00,0x84,0x00,0x00,0x00,0x4f,0x00,0x00,0x00, +0x62,0x00,0x00,0x00,0x32,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0xbb,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x34,0x00,0x06,0x00, +0x06,0x00,0x00,0x00,0xbc,0x00,0x00,0x00,0x84,0x00,0x00,0x00, +0xba,0x00,0x00,0x00,0xbb,0x00,0x00,0x00,0x34,0x00,0x06,0x00, +0x06,0x00,0x00,0x00,0xbd,0x00,0x00,0x00,0x84,0x00,0x00,0x00, +0xbc,0x00,0x00,0x00,0x60,0x00,0x00,0x00,0x34,0x00,0x06,0x00, +0x06,0x00,0x00,0x00,0xbe,0x00,0x00,0x00,0x86,0x00,0x00,0x00, +0xb9,0x00,0x00,0x00,0xbd,0x00,0x00,0x00,0x34,0x00,0x06,0x00, +0x06,0x00,0x00,0x00,0xbf,0x00,0x00,0x00,0x84,0x00,0x00,0x00, +0xb7,0x00,0x00,0x00,0xbe,0x00,0x00,0x00,0x34,0x00,0x06,0x00, +0x06,0x00,0x00,0x00,0xc0,0x00,0x00,0x00,0x84,0x00,0x00,0x00, +0xbf,0x00,0x00,0x00,0xbb,0x00,0x00,0x00,0x14,0x00,0x02,0x00, +0xc1,0x00,0x00,0x00,0x16,0x00,0x03,0x00,0xc3,0x00,0x00,0x00, +0x20,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, +0xc4,0x00,0x00,0x00,0x84,0x00,0x00,0x00,0x60,0x00,0x00,0x00, +0x62,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, +0xc5,0x00,0x00,0x00,0x84,0x00,0x00,0x00,0xc4,0x00,0x00,0x00, +0xbe,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, +0xc6,0x00,0x00,0x00,0x84,0x00,0x00,0x00,0xc5,0x00,0x00,0x00, +0xbb,0x00,0x00,0x00,0x1c,0x00,0x04,0x00,0xc7,0x00,0x00,0x00, +0xc3,0x00,0x00,0x00,0xc6,0x00,0x00,0x00,0x20,0x00,0x04,0x00, +0xc8,0x00,0x00,0x00,0x07,0x00,0x00,0x00,0xc7,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0xc3,0x00,0x00,0x00,0xcb,0x00,0x00,0x00, +0x00,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0xcc,0x00,0x00,0x00, +0x07,0x00,0x00,0x00,0xc3,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x13,0x00,0x00,0x00,0xcf,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0xf3,0x00,0x00,0x00, +0x80,0x00,0x00,0x00,0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0xf9,0x00,0x00,0x00, +0x10,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0xfd,0x00,0x00,0x00,0x0f,0x00,0x00,0x00,0x16,0x00,0x03,0x00, +0x00,0x01,0x00,0x00,0x10,0x00,0x00,0x00,0x15,0x00,0x04,0x00, +0x01,0x01,0x00,0x00,0x08,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0x1c,0x00,0x04,0x00,0x02,0x01,0x00,0x00,0x01,0x01,0x00,0x00, +0xf9,0x00,0x00,0x00,0x1e,0x00,0x05,0x00,0x03,0x01,0x00,0x00, +0x00,0x01,0x00,0x00,0x00,0x01,0x00,0x00,0x02,0x01,0x00,0x00, +0x1d,0x00,0x03,0x00,0x04,0x01,0x00,0x00,0x03,0x01,0x00,0x00, +0x1e,0x00,0x03,0x00,0x05,0x01,0x00,0x00,0x04,0x01,0x00,0x00, +0x20,0x00,0x04,0x00,0x06,0x01,0x00,0x00,0x0c,0x00,0x00,0x00, +0x05,0x01,0x00,0x00,0x3b,0x00,0x04,0x00,0x06,0x01,0x00,0x00, +0x07,0x01,0x00,0x00,0x0c,0x00,0x00,0x00,0x20,0x00,0x04,0x00, +0x09,0x01,0x00,0x00,0x0c,0x00,0x00,0x00,0x00,0x01,0x00,0x00, +0x20,0x00,0x04,0x00,0x15,0x01,0x00,0x00,0x0c,0x00,0x00,0x00, +0x01,0x01,0x00,0x00,0x17,0x00,0x04,0x00,0x19,0x01,0x00,0x00, +0xc3,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x34,0x00,0x06,0x00, +0x06,0x00,0x00,0x00,0x28,0x01,0x00,0x00,0x80,0x00,0x00,0x00, +0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x34,0x00,0x06,0x00, +0x06,0x00,0x00,0x00,0x29,0x01,0x00,0x00,0x84,0x00,0x00,0x00, +0x37,0x00,0x00,0x00,0x28,0x01,0x00,0x00,0x1c,0x00,0x04,0x00, +0x2a,0x01,0x00,0x00,0xc3,0x00,0x00,0x00,0x29,0x01,0x00,0x00, +0x20,0x00,0x04,0x00,0x2b,0x01,0x00,0x00,0x04,0x00,0x00,0x00, +0x2a,0x01,0x00,0x00,0x3b,0x00,0x04,0x00,0x2b,0x01,0x00,0x00, +0x2c,0x01,0x00,0x00,0x04,0x00,0x00,0x00,0x20,0x00,0x04,0x00, +0x30,0x01,0x00,0x00,0x04,0x00,0x00,0x00,0xc3,0x00,0x00,0x00, +0x32,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x37,0x01,0x00,0x00, +0x01,0x00,0x00,0x00,0x33,0x00,0x06,0x00,0x09,0x00,0x00,0x00, +0x38,0x01,0x00,0x00,0x37,0x01,0x00,0x00,0x39,0x00,0x00,0x00, +0x39,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, +0x39,0x01,0x00,0x00,0x51,0x00,0x00,0x00,0x38,0x01,0x00,0x00, +0x00,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, +0x3a,0x01,0x00,0x00,0x84,0x00,0x00,0x00,0x39,0x01,0x00,0x00, +0x0c,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, +0x3b,0x01,0x00,0x00,0x86,0x00,0x00,0x00,0x3a,0x01,0x00,0x00, +0x6c,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, +0x59,0x01,0x00,0x00,0x80,0x00,0x00,0x00,0x6c,0x00,0x00,0x00, +0x39,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, +0x5a,0x01,0x00,0x00,0x84,0x00,0x00,0x00,0xa6,0x00,0x00,0x00, +0x59,0x01,0x00,0x00,0x1c,0x00,0x04,0x00,0x5b,0x01,0x00,0x00, +0xc3,0x00,0x00,0x00,0x5a,0x01,0x00,0x00,0x20,0x00,0x04,0x00, +0x5c,0x01,0x00,0x00,0x04,0x00,0x00,0x00,0x5b,0x01,0x00,0x00, +0x3b,0x00,0x04,0x00,0x5c,0x01,0x00,0x00,0x5d,0x01,0x00,0x00, +0x04,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, +0x61,0x01,0x00,0x00,0x80,0x00,0x00,0x00,0x6c,0x00,0x00,0x00, +0x39,0x00,0x00,0x00,0x1d,0x00,0x03,0x00,0x65,0x01,0x00,0x00, +0xc3,0x00,0x00,0x00,0x1e,0x00,0x03,0x00,0x66,0x01,0x00,0x00, +0x65,0x01,0x00,0x00,0x20,0x00,0x04,0x00,0x67,0x01,0x00,0x00, +0x0c,0x00,0x00,0x00,0x66,0x01,0x00,0x00,0x3b,0x00,0x04,0x00, +0x67,0x01,0x00,0x00,0x68,0x01,0x00,0x00,0x0c,0x00,0x00,0x00, +0x20,0x00,0x04,0x00,0x73,0x01,0x00,0x00,0x0c,0x00,0x00,0x00, +0xc3,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, +0x7b,0x01,0x00,0x00,0x80,0x00,0x00,0x00,0x6c,0x00,0x00,0x00, +0x39,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, +0x80,0x01,0x00,0x00,0x51,0x00,0x00,0x00,0x38,0x01,0x00,0x00, +0x00,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, +0x81,0x01,0x00,0x00,0x84,0x00,0x00,0x00,0x80,0x01,0x00,0x00, +0x39,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, +0x82,0x01,0x00,0x00,0x86,0x00,0x00,0x00,0x81,0x01,0x00,0x00, +0x6c,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x85,0x01,0x00,0x00,0x08,0x01,0x00,0x00,0x34,0x00,0x06,0x00, +0x06,0x00,0x00,0x00,0x86,0x01,0x00,0x00,0x86,0x00,0x00,0x00, +0x6c,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x34,0x00,0x06,0x00, +0x06,0x00,0x00,0x00,0x89,0x01,0x00,0x00,0x86,0x00,0x00,0x00, +0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x34,0x00,0x06,0x00, +0x06,0x00,0x00,0x00,0xa4,0x01,0x00,0x00,0x84,0x00,0x00,0x00, +0x60,0x00,0x00,0x00,0x62,0x00,0x00,0x00,0x1c,0x00,0x04,0x00, +0xa5,0x01,0x00,0x00,0xc3,0x00,0x00,0x00,0xa4,0x01,0x00,0x00, +0x20,0x00,0x04,0x00,0xa6,0x01,0x00,0x00,0x07,0x00,0x00,0x00, +0xa5,0x01,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, +0xb6,0x01,0x00,0x00,0x80,0x00,0x00,0x00,0x6c,0x00,0x00,0x00, +0x39,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, +0xd1,0x01,0x00,0x00,0x84,0x00,0x00,0x00,0xbe,0x00,0x00,0x00, +0xbb,0x00,0x00,0x00,0x1c,0x00,0x04,0x00,0xd2,0x01,0x00,0x00, +0xc3,0x00,0x00,0x00,0xd1,0x01,0x00,0x00,0x20,0x00,0x04,0x00, +0xd3,0x01,0x00,0x00,0x07,0x00,0x00,0x00,0xd2,0x01,0x00,0x00, +0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0xdc,0x01,0x00,0x00, +0x86,0x00,0x00,0x00,0xb8,0x00,0x00,0x00,0xbe,0x00,0x00,0x00, +0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0xe4,0x01,0x00,0x00, +0x80,0x00,0x00,0x00,0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00, +0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x13,0x02,0x00,0x00, +0x84,0x00,0x00,0x00,0x60,0x00,0x00,0x00,0x62,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00,0x46,0x02,0x00,0x00, +0x0d,0x00,0x00,0x00,0x3b,0x00,0x04,0x00,0x0a,0x00,0x00,0x00, +0x4e,0x02,0x00,0x00,0x01,0x00,0x00,0x00,0x1d,0x00,0x03,0x00, +0x94,0x02,0x00,0x00,0xc3,0x00,0x00,0x00,0x1e,0x00,0x03,0x00, +0x95,0x02,0x00,0x00,0x94,0x02,0x00,0x00,0x20,0x00,0x04,0x00, +0x96,0x02,0x00,0x00,0x0c,0x00,0x00,0x00,0x95,0x02,0x00,0x00, +0x3b,0x00,0x04,0x00,0x96,0x02,0x00,0x00,0x97,0x02,0x00,0x00, +0x0c,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00, +0x9c,0x02,0x00,0x00,0x05,0x00,0x00,0x00,0x34,0x00,0x06,0x00, +0x06,0x00,0x00,0x00,0xa9,0x02,0x00,0x00,0x84,0x00,0x00,0x00, +0x60,0x00,0x00,0x00,0x62,0x00,0x00,0x00,0x36,0x00,0x05,0x00, +0x02,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0x03,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0x05,0x00,0x00,0x00, +0x3b,0x00,0x04,0x00,0xc8,0x00,0x00,0x00,0xc9,0x00,0x00,0x00, +0x07,0x00,0x00,0x00,0x3b,0x00,0x04,0x00,0xa6,0x01,0x00,0x00, +0xa7,0x01,0x00,0x00,0x07,0x00,0x00,0x00,0x3b,0x00,0x04,0x00, +0xd3,0x01,0x00,0x00,0xd4,0x01,0x00,0x00,0x07,0x00,0x00,0x00, +0x41,0x00,0x05,0x00,0x0d,0x00,0x00,0x00,0x0e,0x00,0x00,0x00, +0x0b,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x0f,0x00,0x00,0x00,0x0e,0x00,0x00,0x00, +0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00,0x16,0x00,0x00,0x00, +0x12,0x00,0x00,0x00,0x14,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x17,0x00,0x00,0x00,0x16,0x00,0x00,0x00, +0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x18,0x00,0x00,0x00, +0x0f,0x00,0x00,0x00,0x17,0x00,0x00,0x00,0x89,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x1e,0x00,0x00,0x00,0x0f,0x00,0x00,0x00, +0x17,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00, +0x22,0x00,0x00,0x00,0x12,0x00,0x00,0x00,0x21,0x00,0x00,0x00, +0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x23,0x00,0x00,0x00, +0x22,0x00,0x00,0x00,0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x24,0x00,0x00,0x00,0x18,0x00,0x00,0x00,0x23,0x00,0x00,0x00, +0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00,0x28,0x00,0x00,0x00, +0x12,0x00,0x00,0x00,0x27,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x29,0x00,0x00,0x00,0x28,0x00,0x00,0x00, +0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x2a,0x00,0x00,0x00, +0x1e,0x00,0x00,0x00,0x29,0x00,0x00,0x00,0x41,0x00,0x05,0x00, +0x15,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0x12,0x00,0x00,0x00, +0x2d,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x2f,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x30,0x00,0x00,0x00,0x24,0x00,0x00,0x00, +0x2f,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x32,0x00,0x00,0x00,0x30,0x00,0x00,0x00,0x2a,0x00,0x00,0x00, +0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00,0x35,0x00,0x00,0x00, +0x12,0x00,0x00,0x00,0x34,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x36,0x00,0x00,0x00,0x35,0x00,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x38,0x00,0x00,0x00, +0x36,0x00,0x00,0x00,0x37,0x00,0x00,0x00,0x82,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x3a,0x00,0x00,0x00,0x38,0x00,0x00,0x00, +0x39,0x00,0x00,0x00,0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x3b,0x00,0x00,0x00,0x3a,0x00,0x00,0x00,0x37,0x00,0x00,0x00, +0x41,0x00,0x05,0x00,0x0d,0x00,0x00,0x00,0x3f,0x00,0x00,0x00, +0x3d,0x00,0x00,0x00,0x3e,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x40,0x00,0x00,0x00,0x3f,0x00,0x00,0x00, +0x89,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x42,0x00,0x00,0x00, +0x40,0x00,0x00,0x00,0x3b,0x00,0x00,0x00,0x86,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x47,0x00,0x00,0x00,0x40,0x00,0x00,0x00, +0x3b,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x0d,0x00,0x00,0x00, +0x49,0x00,0x00,0x00,0x3d,0x00,0x00,0x00,0x39,0x00,0x00,0x00, +0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x4a,0x00,0x00,0x00, +0x49,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x0d,0x00,0x00,0x00, +0x4d,0x00,0x00,0x00,0x4c,0x00,0x00,0x00,0x3e,0x00,0x00,0x00, +0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x4e,0x00,0x00,0x00, +0x4d,0x00,0x00,0x00,0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x50,0x00,0x00,0x00,0x4e,0x00,0x00,0x00,0x4f,0x00,0x00,0x00, +0x89,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x55,0x00,0x00,0x00, +0x50,0x00,0x00,0x00,0x54,0x00,0x00,0x00,0x86,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x59,0x00,0x00,0x00,0x50,0x00,0x00,0x00, +0x58,0x00,0x00,0x00,0x89,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x5d,0x00,0x00,0x00,0x4e,0x00,0x00,0x00,0x4f,0x00,0x00,0x00, +0x89,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x64,0x00,0x00,0x00, +0x5d,0x00,0x00,0x00,0x63,0x00,0x00,0x00,0x86,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x68,0x00,0x00,0x00,0x5d,0x00,0x00,0x00, +0x67,0x00,0x00,0x00,0x89,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x6e,0x00,0x00,0x00,0x4e,0x00,0x00,0x00,0x6d,0x00,0x00,0x00, +0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x73,0x00,0x00,0x00, +0x4e,0x00,0x00,0x00,0x72,0x00,0x00,0x00,0x89,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x78,0x00,0x00,0x00,0x4e,0x00,0x00,0x00, +0x77,0x00,0x00,0x00,0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x7d,0x00,0x00,0x00,0x4e,0x00,0x00,0x00,0x7c,0x00,0x00,0x00, +0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00,0x81,0x00,0x00,0x00, +0x12,0x00,0x00,0x00,0x80,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x82,0x00,0x00,0x00,0x81,0x00,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x83,0x00,0x00,0x00, +0x47,0x00,0x00,0x00,0x82,0x00,0x00,0x00,0x41,0x00,0x05,0x00, +0x15,0x00,0x00,0x00,0x86,0x00,0x00,0x00,0x12,0x00,0x00,0x00, +0x85,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x87,0x00,0x00,0x00,0x86,0x00,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x89,0x00,0x00,0x00,0x47,0x00,0x00,0x00, +0x39,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x8c,0x00,0x00,0x00,0x89,0x00,0x00,0x00,0x82,0x00,0x00,0x00, +0x0c,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0x8d,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0x26,0x00,0x00,0x00,0x87,0x00,0x00,0x00, +0x8c,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00, +0x91,0x00,0x00,0x00,0x12,0x00,0x00,0x00,0x90,0x00,0x00,0x00, +0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x92,0x00,0x00,0x00, +0x91,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x93,0x00,0x00,0x00,0x32,0x00,0x00,0x00,0x92,0x00,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x95,0x00,0x00,0x00, +0x42,0x00,0x00,0x00,0x37,0x00,0x00,0x00,0x41,0x00,0x05,0x00, +0x15,0x00,0x00,0x00,0x97,0x00,0x00,0x00,0x12,0x00,0x00,0x00, +0x96,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x98,0x00,0x00,0x00,0x97,0x00,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x99,0x00,0x00,0x00,0x95,0x00,0x00,0x00, +0x98,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x9a,0x00,0x00,0x00,0x93,0x00,0x00,0x00,0x99,0x00,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x9c,0x00,0x00,0x00, +0x9a,0x00,0x00,0x00,0x83,0x00,0x00,0x00,0x86,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x9d,0x00,0x00,0x00,0x9c,0x00,0x00,0x00, +0x0c,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00, +0xa2,0x00,0x00,0x00,0x12,0x00,0x00,0x00,0xa1,0x00,0x00,0x00, +0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0xa3,0x00,0x00,0x00, +0xa2,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xa4,0x00,0x00,0x00,0x0f,0x00,0x00,0x00,0xa3,0x00,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xa7,0x00,0x00,0x00, +0x4a,0x00,0x00,0x00,0xa6,0x00,0x00,0x00,0x41,0x00,0x05,0x00, +0x15,0x00,0x00,0x00,0xa9,0x00,0x00,0x00,0x12,0x00,0x00,0x00, +0xa8,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0xaa,0x00,0x00,0x00,0xa9,0x00,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xab,0x00,0x00,0x00,0xa7,0x00,0x00,0x00, +0xaa,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xac,0x00,0x00,0x00,0xa4,0x00,0x00,0x00,0xab,0x00,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xae,0x00,0x00,0x00, +0xac,0x00,0x00,0x00,0x83,0x00,0x00,0x00,0x86,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xaf,0x00,0x00,0x00,0xae,0x00,0x00,0x00, +0x39,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0xb1,0x00,0x00,0x00, +0xf8,0x00,0x02,0x00,0xb1,0x00,0x00,0x00,0xf5,0x00,0x07,0x00, +0x06,0x00,0x00,0x00,0xbd,0x02,0x00,0x00,0x3e,0x00,0x00,0x00, +0x05,0x00,0x00,0x00,0xd0,0x00,0x00,0x00,0xb2,0x00,0x00,0x00, +0xb0,0x00,0x05,0x00,0xc1,0x00,0x00,0x00,0xc2,0x00,0x00,0x00, +0xbd,0x02,0x00,0x00,0xc0,0x00,0x00,0x00,0xf6,0x00,0x04,0x00, +0xb3,0x00,0x00,0x00,0xb2,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0xfa,0x00,0x04,0x00,0xc2,0x00,0x00,0x00,0xb2,0x00,0x00,0x00, +0xb3,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0xb2,0x00,0x00,0x00, +0x41,0x00,0x05,0x00,0xcc,0x00,0x00,0x00,0xcd,0x00,0x00,0x00, +0xc9,0x00,0x00,0x00,0xbd,0x02,0x00,0x00,0x3e,0x00,0x03,0x00, +0xcd,0x00,0x00,0x00,0xcb,0x00,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xd0,0x00,0x00,0x00,0xbd,0x02,0x00,0x00, +0xcf,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0xb1,0x00,0x00,0x00, +0xf8,0x00,0x02,0x00,0xb3,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, +0xd3,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0xd3,0x00,0x00,0x00, +0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0xd6,0x02,0x00,0x00, +0xaf,0x00,0x00,0x00,0xb3,0x00,0x00,0x00,0x8b,0x01,0x00,0x00, +0xd6,0x00,0x00,0x00,0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00, +0xd2,0x02,0x00,0x00,0x9d,0x00,0x00,0x00,0xb3,0x00,0x00,0x00, +0x88,0x01,0x00,0x00,0xd6,0x00,0x00,0x00,0xf5,0x00,0x07,0x00, +0x06,0x00,0x00,0x00,0xbe,0x02,0x00,0x00,0x83,0x00,0x00,0x00, +0xb3,0x00,0x00,0x00,0x36,0x02,0x00,0x00,0xd6,0x00,0x00,0x00, +0xb0,0x00,0x05,0x00,0xc1,0x00,0x00,0x00,0xda,0x00,0x00,0x00, +0xbe,0x02,0x00,0x00,0x8d,0x00,0x00,0x00,0xf6,0x00,0x04,0x00, +0xd5,0x00,0x00,0x00,0xd6,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0xfa,0x00,0x04,0x00,0xda,0x00,0x00,0x00,0xd4,0x00,0x00,0x00, +0xd5,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0xd4,0x00,0x00,0x00, +0xf9,0x00,0x02,0x00,0xdc,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, +0xdc,0x00,0x00,0x00,0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00, +0xce,0x02,0x00,0x00,0x3e,0x00,0x00,0x00,0xd4,0x00,0x00,0x00, +0x3d,0x01,0x00,0x00,0xdd,0x00,0x00,0x00,0xb0,0x00,0x05,0x00, +0xc1,0x00,0x00,0x00,0xe2,0x00,0x00,0x00,0xce,0x02,0x00,0x00, +0x37,0x00,0x00,0x00,0xf6,0x00,0x04,0x00,0xde,0x00,0x00,0x00, +0xdd,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, +0xe2,0x00,0x00,0x00,0xdd,0x00,0x00,0x00,0xde,0x00,0x00,0x00, +0xf8,0x00,0x02,0x00,0xdd,0x00,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xe7,0x00,0x00,0x00,0x73,0x00,0x00,0x00, +0xce,0x02,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xea,0x00,0x00,0x00,0xe7,0x00,0x00,0x00,0x98,0x00,0x00,0x00, +0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xeb,0x00,0x00,0x00, +0xea,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xec,0x00,0x00,0x00,0xd2,0x02,0x00,0x00, +0xeb,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xee,0x00,0x00,0x00,0xec,0x00,0x00,0x00,0x6e,0x00,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xf4,0x00,0x00,0x00, +0xe7,0x00,0x00,0x00,0xf3,0x00,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xf6,0x00,0x00,0x00,0xf4,0x00,0x00,0x00, +0x6e,0x00,0x00,0x00,0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xfa,0x00,0x00,0x00,0xee,0x00,0x00,0x00,0xf9,0x00,0x00,0x00, +0xc7,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xfe,0x00,0x00,0x00, +0xee,0x00,0x00,0x00,0xfd,0x00,0x00,0x00,0x41,0x00,0x07,0x00, +0x09,0x01,0x00,0x00,0x0a,0x01,0x00,0x00,0x07,0x01,0x00,0x00, +0x34,0x00,0x00,0x00,0xfa,0x00,0x00,0x00,0x34,0x00,0x00,0x00, +0x3d,0x00,0x04,0x00,0x00,0x01,0x00,0x00,0x0b,0x01,0x00,0x00, +0x0a,0x01,0x00,0x00,0x73,0x00,0x04,0x00,0xc3,0x00,0x00,0x00, +0x0c,0x01,0x00,0x00,0x0b,0x01,0x00,0x00,0x41,0x00,0x07,0x00, +0x09,0x01,0x00,0x00,0x0f,0x01,0x00,0x00,0x07,0x01,0x00,0x00, +0x34,0x00,0x00,0x00,0xfa,0x00,0x00,0x00,0xcf,0x00,0x00,0x00, +0x3d,0x00,0x04,0x00,0x00,0x01,0x00,0x00,0x10,0x01,0x00,0x00, +0x0f,0x01,0x00,0x00,0x73,0x00,0x04,0x00,0xc3,0x00,0x00,0x00, +0x11,0x01,0x00,0x00,0x10,0x01,0x00,0x00,0x41,0x00,0x08,0x00, +0x15,0x01,0x00,0x00,0x16,0x01,0x00,0x00,0x07,0x01,0x00,0x00, +0x34,0x00,0x00,0x00,0xfa,0x00,0x00,0x00,0x85,0x00,0x00,0x00, +0xfe,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x01,0x01,0x00,0x00, +0x17,0x01,0x00,0x00,0x16,0x01,0x00,0x00,0x71,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x18,0x01,0x00,0x00,0x17,0x01,0x00,0x00, +0xc7,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x1d,0x01,0x00,0x00, +0x18,0x01,0x00,0x00,0xfd,0x00,0x00,0x00,0x70,0x00,0x04,0x00, +0xc3,0x00,0x00,0x00,0x1e,0x01,0x00,0x00,0x1d,0x01,0x00,0x00, +0xc2,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x20,0x01,0x00,0x00, +0x18,0x01,0x00,0x00,0xa8,0x00,0x00,0x00,0x70,0x00,0x04,0x00, +0xc3,0x00,0x00,0x00,0x21,0x01,0x00,0x00,0x20,0x01,0x00,0x00, +0x50,0x00,0x05,0x00,0x19,0x01,0x00,0x00,0x22,0x01,0x00,0x00, +0x1e,0x01,0x00,0x00,0x21,0x01,0x00,0x00,0x8e,0x00,0x05,0x00, +0x19,0x01,0x00,0x00,0x24,0x01,0x00,0x00,0x22,0x01,0x00,0x00, +0x0c,0x01,0x00,0x00,0x50,0x00,0x05,0x00,0x19,0x01,0x00,0x00, +0x26,0x01,0x00,0x00,0x11,0x01,0x00,0x00,0x11,0x01,0x00,0x00, +0x81,0x00,0x05,0x00,0x19,0x01,0x00,0x00,0x27,0x01,0x00,0x00, +0x24,0x01,0x00,0x00,0x26,0x01,0x00,0x00,0x51,0x00,0x05,0x00, +0xc3,0x00,0x00,0x00,0x2f,0x01,0x00,0x00,0x27,0x01,0x00,0x00, +0x00,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x30,0x01,0x00,0x00, +0x31,0x01,0x00,0x00,0x2c,0x01,0x00,0x00,0xf6,0x00,0x00,0x00, +0x3e,0x00,0x03,0x00,0x31,0x01,0x00,0x00,0x2f,0x01,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x33,0x01,0x00,0x00, +0xf6,0x00,0x00,0x00,0xf9,0x00,0x00,0x00,0x51,0x00,0x05,0x00, +0xc3,0x00,0x00,0x00,0x35,0x01,0x00,0x00,0x27,0x01,0x00,0x00, +0x01,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x30,0x01,0x00,0x00, +0x36,0x01,0x00,0x00,0x2c,0x01,0x00,0x00,0x33,0x01,0x00,0x00, +0x3e,0x00,0x03,0x00,0x36,0x01,0x00,0x00,0x35,0x01,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x3d,0x01,0x00,0x00, +0xce,0x02,0x00,0x00,0x3b,0x01,0x00,0x00,0xf9,0x00,0x02,0x00, +0xdc,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0xde,0x00,0x00,0x00, +0xf9,0x00,0x02,0x00,0x3f,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, +0x3f,0x01,0x00,0x00,0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00, +0xcf,0x02,0x00,0x00,0x3e,0x00,0x00,0x00,0xde,0x00,0x00,0x00, +0x84,0x01,0x00,0x00,0x42,0x01,0x00,0x00,0xb0,0x00,0x05,0x00, +0xc1,0x00,0x00,0x00,0x45,0x01,0x00,0x00,0xcf,0x02,0x00,0x00, +0xa6,0x00,0x00,0x00,0xf6,0x00,0x04,0x00,0x41,0x01,0x00,0x00, +0x42,0x01,0x00,0x00,0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, +0x45,0x01,0x00,0x00,0x40,0x01,0x00,0x00,0x41,0x01,0x00,0x00, +0xf8,0x00,0x02,0x00,0x40,0x01,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x49,0x01,0x00,0x00,0xa7,0x00,0x00,0x00, +0x7d,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x4b,0x01,0x00,0x00,0x49,0x01,0x00,0x00,0xcf,0x02,0x00,0x00, +0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00,0x4c,0x01,0x00,0x00, +0x12,0x00,0x00,0x00,0xcf,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x4d,0x01,0x00,0x00,0x4c,0x01,0x00,0x00, +0xb0,0x00,0x05,0x00,0xc1,0x00,0x00,0x00,0x4e,0x01,0x00,0x00, +0x4b,0x01,0x00,0x00,0x4d,0x01,0x00,0x00,0xf7,0x00,0x03,0x00, +0x50,0x01,0x00,0x00,0x00,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, +0x4e,0x01,0x00,0x00,0x4f,0x01,0x00,0x00,0x50,0x01,0x00,0x00, +0xf8,0x00,0x02,0x00,0x4f,0x01,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x53,0x01,0x00,0x00,0xbe,0x02,0x00,0x00, +0x78,0x00,0x00,0x00,0xb0,0x00,0x05,0x00,0xc1,0x00,0x00,0x00, +0x55,0x01,0x00,0x00,0x53,0x01,0x00,0x00,0x8d,0x00,0x00,0x00, +0xf9,0x00,0x02,0x00,0x50,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, +0x50,0x01,0x00,0x00,0xf5,0x00,0x07,0x00,0xc1,0x00,0x00,0x00, +0x56,0x01,0x00,0x00,0x4e,0x01,0x00,0x00,0x40,0x01,0x00,0x00, +0x55,0x01,0x00,0x00,0x4f,0x01,0x00,0x00,0xf7,0x00,0x03,0x00, +0x58,0x01,0x00,0x00,0x00,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, +0x56,0x01,0x00,0x00,0x57,0x01,0x00,0x00,0x77,0x01,0x00,0x00, +0xf8,0x00,0x02,0x00,0x57,0x01,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x60,0x01,0x00,0x00,0x7d,0x00,0x00,0x00, +0xcf,0x02,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x62,0x01,0x00,0x00,0x60,0x01,0x00,0x00,0x61,0x01,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x64,0x01,0x00,0x00, +0x62,0x01,0x00,0x00,0x78,0x00,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x6f,0x01,0x00,0x00,0x60,0x01,0x00,0x00, +0xaa,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x70,0x01,0x00,0x00,0xd6,0x02,0x00,0x00,0x6f,0x01,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x72,0x01,0x00,0x00, +0x70,0x01,0x00,0x00,0x78,0x00,0x00,0x00,0x41,0x00,0x06,0x00, +0x73,0x01,0x00,0x00,0x74,0x01,0x00,0x00,0x68,0x01,0x00,0x00, +0x34,0x00,0x00,0x00,0x72,0x01,0x00,0x00,0x3d,0x00,0x04,0x00, +0xc3,0x00,0x00,0x00,0x75,0x01,0x00,0x00,0x74,0x01,0x00,0x00, +0x41,0x00,0x05,0x00,0x30,0x01,0x00,0x00,0x76,0x01,0x00,0x00, +0x5d,0x01,0x00,0x00,0x64,0x01,0x00,0x00,0x3e,0x00,0x03,0x00, +0x76,0x01,0x00,0x00,0x75,0x01,0x00,0x00,0xf9,0x00,0x02,0x00, +0x58,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0x77,0x01,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x7a,0x01,0x00,0x00, +0x7d,0x00,0x00,0x00,0xcf,0x02,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x7c,0x01,0x00,0x00,0x7a,0x01,0x00,0x00, +0x7b,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x7e,0x01,0x00,0x00,0x7c,0x01,0x00,0x00,0x78,0x00,0x00,0x00, +0x41,0x00,0x05,0x00,0x30,0x01,0x00,0x00,0x7f,0x01,0x00,0x00, +0x5d,0x01,0x00,0x00,0x7e,0x01,0x00,0x00,0x3e,0x00,0x03,0x00, +0x7f,0x01,0x00,0x00,0xcb,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, +0x58,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0x58,0x01,0x00,0x00, +0xf9,0x00,0x02,0x00,0x42,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, +0x42,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x84,0x01,0x00,0x00,0xcf,0x02,0x00,0x00,0x82,0x01,0x00,0x00, +0xf9,0x00,0x02,0x00,0x3f,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, +0x41,0x01,0x00,0x00,0xe0,0x00,0x04,0x00,0x0c,0x00,0x00,0x00, +0x0c,0x00,0x00,0x00,0x85,0x01,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x88,0x01,0x00,0x00,0xd2,0x02,0x00,0x00, +0x86,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x8b,0x01,0x00,0x00,0xd6,0x02,0x00,0x00,0x89,0x01,0x00,0x00, +0xf9,0x00,0x02,0x00,0x8d,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, +0x8d,0x01,0x00,0x00,0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00, +0xd8,0x02,0x00,0x00,0x3e,0x00,0x00,0x00,0x41,0x01,0x00,0x00, +0x34,0x02,0x00,0x00,0x90,0x01,0x00,0x00,0xb0,0x00,0x05,0x00, +0xc1,0x00,0x00,0x00,0x93,0x01,0x00,0x00,0xd8,0x02,0x00,0x00, +0x6c,0x00,0x00,0x00,0xf6,0x00,0x04,0x00,0x8f,0x01,0x00,0x00, +0x90,0x01,0x00,0x00,0x00,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, +0x93,0x01,0x00,0x00,0x8e,0x01,0x00,0x00,0x8f,0x01,0x00,0x00, +0xf8,0x00,0x02,0x00,0x8e,0x01,0x00,0x00,0xf9,0x00,0x02,0x00, +0x95,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0x95,0x01,0x00,0x00, +0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0xdc,0x02,0x00,0x00, +0x3e,0x00,0x00,0x00,0x8e,0x01,0x00,0x00,0xc0,0x01,0x00,0x00, +0x98,0x01,0x00,0x00,0xb0,0x00,0x05,0x00,0xc1,0x00,0x00,0x00, +0x9b,0x01,0x00,0x00,0xdc,0x02,0x00,0x00,0x60,0x00,0x00,0x00, +0xf6,0x00,0x04,0x00,0x97,0x01,0x00,0x00,0x98,0x01,0x00,0x00, +0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0x9b,0x01,0x00,0x00, +0x96,0x01,0x00,0x00,0x97,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, +0x96,0x01,0x00,0x00,0xf9,0x00,0x02,0x00,0x9d,0x01,0x00,0x00, +0xf8,0x00,0x02,0x00,0x9d,0x01,0x00,0x00,0xf5,0x00,0x07,0x00, +0x06,0x00,0x00,0x00,0xee,0x02,0x00,0x00,0x3e,0x00,0x00,0x00, +0x96,0x01,0x00,0x00,0xbe,0x01,0x00,0x00,0x9e,0x01,0x00,0x00, +0xb0,0x00,0x05,0x00,0xc1,0x00,0x00,0x00,0xa3,0x01,0x00,0x00, +0xee,0x02,0x00,0x00,0x62,0x00,0x00,0x00,0xf6,0x00,0x04,0x00, +0x9f,0x01,0x00,0x00,0x9e,0x01,0x00,0x00,0x01,0x00,0x00,0x00, +0xfa,0x00,0x04,0x00,0xa3,0x01,0x00,0x00,0x9e,0x01,0x00,0x00, +0x9f,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0x9e,0x01,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xa9,0x01,0x00,0x00, +0xdc,0x02,0x00,0x00,0x62,0x00,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xab,0x01,0x00,0x00,0xa9,0x01,0x00,0x00, +0xee,0x02,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xad,0x01,0x00,0x00,0x55,0x00,0x00,0x00,0x53,0x00,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xaf,0x01,0x00,0x00, +0xdc,0x02,0x00,0x00,0x61,0x00,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xb0,0x01,0x00,0x00,0xad,0x01,0x00,0x00, +0xaf,0x01,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xb2,0x01,0x00,0x00,0x64,0x00,0x00,0x00,0x62,0x00,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xb3,0x01,0x00,0x00, +0xb0,0x01,0x00,0x00,0xb2,0x01,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xb5,0x01,0x00,0x00,0xb3,0x01,0x00,0x00, +0xee,0x02,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xb7,0x01,0x00,0x00,0xb5,0x01,0x00,0x00,0xb6,0x01,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xb9,0x01,0x00,0x00, +0xb7,0x01,0x00,0x00,0xd8,0x02,0x00,0x00,0x41,0x00,0x05,0x00, +0x30,0x01,0x00,0x00,0xba,0x01,0x00,0x00,0x2c,0x01,0x00,0x00, +0xb9,0x01,0x00,0x00,0x3d,0x00,0x04,0x00,0xc3,0x00,0x00,0x00, +0xbb,0x01,0x00,0x00,0xba,0x01,0x00,0x00,0x41,0x00,0x05,0x00, +0xcc,0x00,0x00,0x00,0xbc,0x01,0x00,0x00,0xa7,0x01,0x00,0x00, +0xab,0x01,0x00,0x00,0x3e,0x00,0x03,0x00,0xbc,0x01,0x00,0x00, +0xbb,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xbe,0x01,0x00,0x00,0xee,0x02,0x00,0x00,0xcf,0x00,0x00,0x00, +0xf9,0x00,0x02,0x00,0x9d,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, +0x9f,0x01,0x00,0x00,0xf9,0x00,0x02,0x00,0x98,0x01,0x00,0x00, +0xf8,0x00,0x02,0x00,0x98,0x01,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xc0,0x01,0x00,0x00,0xdc,0x02,0x00,0x00, +0xcf,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0x95,0x01,0x00,0x00, +0xf8,0x00,0x02,0x00,0x97,0x01,0x00,0x00,0xf9,0x00,0x02,0x00, +0xc2,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0xc2,0x01,0x00,0x00, +0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0xdd,0x02,0x00,0x00, +0x3e,0x00,0x00,0x00,0x97,0x01,0x00,0x00,0xee,0x01,0x00,0x00, +0xc5,0x01,0x00,0x00,0xb0,0x00,0x05,0x00,0xc1,0x00,0x00,0x00, +0xc8,0x01,0x00,0x00,0xdd,0x02,0x00,0x00,0xbe,0x00,0x00,0x00, +0xf6,0x00,0x04,0x00,0xc4,0x01,0x00,0x00,0xc5,0x01,0x00,0x00, +0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0xc8,0x01,0x00,0x00, +0xc3,0x01,0x00,0x00,0xc4,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, +0xc3,0x01,0x00,0x00,0xf9,0x00,0x02,0x00,0xca,0x01,0x00,0x00, +0xf8,0x00,0x02,0x00,0xca,0x01,0x00,0x00,0xf5,0x00,0x07,0x00, +0x06,0x00,0x00,0x00,0xeb,0x02,0x00,0x00,0x3e,0x00,0x00,0x00, +0xc3,0x01,0x00,0x00,0xec,0x01,0x00,0x00,0xcb,0x01,0x00,0x00, +0xb0,0x00,0x05,0x00,0xc1,0x00,0x00,0x00,0xd0,0x01,0x00,0x00, +0xeb,0x02,0x00,0x00,0xbb,0x00,0x00,0x00,0xf6,0x00,0x04,0x00, +0xcc,0x01,0x00,0x00,0xcb,0x01,0x00,0x00,0x01,0x00,0x00,0x00, +0xfa,0x00,0x04,0x00,0xd0,0x01,0x00,0x00,0xcb,0x01,0x00,0x00, +0xcc,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0xcb,0x01,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xd6,0x01,0x00,0x00, +0xdd,0x02,0x00,0x00,0xbb,0x00,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xd8,0x01,0x00,0x00,0xd6,0x01,0x00,0x00, +0xeb,0x02,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xda,0x01,0x00,0x00,0x59,0x00,0x00,0x00,0xb8,0x00,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xdd,0x01,0x00,0x00, +0xdd,0x02,0x00,0x00,0xdc,0x01,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xde,0x01,0x00,0x00,0xda,0x01,0x00,0x00, +0xdd,0x01,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xe0,0x01,0x00,0x00,0x68,0x00,0x00,0x00,0xbb,0x00,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xe1,0x01,0x00,0x00, +0xde,0x01,0x00,0x00,0xe0,0x01,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xe3,0x01,0x00,0x00,0xe1,0x01,0x00,0x00, +0xeb,0x02,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xe5,0x01,0x00,0x00,0xe3,0x01,0x00,0x00,0xe4,0x01,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xe7,0x01,0x00,0x00, +0xe5,0x01,0x00,0x00,0xd8,0x02,0x00,0x00,0x41,0x00,0x05,0x00, +0x30,0x01,0x00,0x00,0xe8,0x01,0x00,0x00,0x5d,0x01,0x00,0x00, +0xe7,0x01,0x00,0x00,0x3d,0x00,0x04,0x00,0xc3,0x00,0x00,0x00, +0xe9,0x01,0x00,0x00,0xe8,0x01,0x00,0x00,0x41,0x00,0x05,0x00, +0xcc,0x00,0x00,0x00,0xea,0x01,0x00,0x00,0xd4,0x01,0x00,0x00, +0xd8,0x01,0x00,0x00,0x3e,0x00,0x03,0x00,0xea,0x01,0x00,0x00, +0xe9,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xec,0x01,0x00,0x00,0xeb,0x02,0x00,0x00,0xcf,0x00,0x00,0x00, +0xf9,0x00,0x02,0x00,0xca,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, +0xcc,0x01,0x00,0x00,0xf9,0x00,0x02,0x00,0xc5,0x01,0x00,0x00, +0xf8,0x00,0x02,0x00,0xc5,0x01,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xee,0x01,0x00,0x00,0xdd,0x02,0x00,0x00, +0xcf,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0xc2,0x01,0x00,0x00, +0xf8,0x00,0x02,0x00,0xc4,0x01,0x00,0x00,0xf9,0x00,0x02,0x00, +0xf0,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0xf0,0x01,0x00,0x00, +0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0xde,0x02,0x00,0x00, +0x3e,0x00,0x00,0x00,0xc4,0x01,0x00,0x00,0x32,0x02,0x00,0x00, +0xf3,0x01,0x00,0x00,0xb0,0x00,0x05,0x00,0xc1,0x00,0x00,0x00, +0xf6,0x01,0x00,0x00,0xde,0x02,0x00,0x00,0xbe,0x00,0x00,0x00, +0xf6,0x00,0x04,0x00,0xf2,0x01,0x00,0x00,0xf3,0x01,0x00,0x00, +0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0xf6,0x01,0x00,0x00, +0xf1,0x01,0x00,0x00,0xf2,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, +0xf1,0x01,0x00,0x00,0xf9,0x00,0x02,0x00,0xf8,0x01,0x00,0x00, +0xf8,0x00,0x02,0x00,0xf8,0x01,0x00,0x00,0xf5,0x00,0x07,0x00, +0x06,0x00,0x00,0x00,0xe2,0x02,0x00,0x00,0x3e,0x00,0x00,0x00, +0xf1,0x01,0x00,0x00,0x30,0x02,0x00,0x00,0xfb,0x01,0x00,0x00, +0xb0,0x00,0x05,0x00,0xc1,0x00,0x00,0x00,0xfe,0x01,0x00,0x00, +0xe2,0x02,0x00,0x00,0x60,0x00,0x00,0x00,0xf6,0x00,0x04,0x00, +0xfa,0x01,0x00,0x00,0xfb,0x01,0x00,0x00,0x01,0x00,0x00,0x00, +0xfa,0x00,0x04,0x00,0xfe,0x01,0x00,0x00,0xf9,0x01,0x00,0x00, +0xfa,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0xf9,0x01,0x00,0x00, +0xf9,0x00,0x02,0x00,0x00,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, +0x00,0x02,0x00,0x00,0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00, +0xe4,0x02,0x00,0x00,0x3e,0x00,0x00,0x00,0xf9,0x01,0x00,0x00, +0x2e,0x02,0x00,0x00,0x03,0x02,0x00,0x00,0xb0,0x00,0x05,0x00, +0xc1,0x00,0x00,0x00,0x06,0x02,0x00,0x00,0xe4,0x02,0x00,0x00, +0xbb,0x00,0x00,0x00,0xf6,0x00,0x04,0x00,0x02,0x02,0x00,0x00, +0x03,0x02,0x00,0x00,0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, +0x06,0x02,0x00,0x00,0x01,0x02,0x00,0x00,0x02,0x02,0x00,0x00, +0xf8,0x00,0x02,0x00,0x01,0x02,0x00,0x00,0xf9,0x00,0x02,0x00, +0x08,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x08,0x02,0x00,0x00, +0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0xe6,0x02,0x00,0x00, +0x3e,0x00,0x00,0x00,0x01,0x02,0x00,0x00,0x2c,0x02,0x00,0x00, +0x09,0x02,0x00,0x00,0xb0,0x00,0x05,0x00,0xc1,0x00,0x00,0x00, +0x0e,0x02,0x00,0x00,0xe6,0x02,0x00,0x00,0x62,0x00,0x00,0x00, +0xf6,0x00,0x04,0x00,0x0a,0x02,0x00,0x00,0x09,0x02,0x00,0x00, +0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0x0e,0x02,0x00,0x00, +0x09,0x02,0x00,0x00,0x0a,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, +0x09,0x02,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x10,0x02,0x00,0x00,0xde,0x02,0x00,0x00,0xbb,0x00,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x12,0x02,0x00,0x00, +0x10,0x02,0x00,0x00,0xe4,0x02,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x14,0x02,0x00,0x00,0x12,0x02,0x00,0x00, +0x13,0x02,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x16,0x02,0x00,0x00,0xe2,0x02,0x00,0x00,0x62,0x00,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x17,0x02,0x00,0x00, +0x14,0x02,0x00,0x00,0x16,0x02,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x19,0x02,0x00,0x00,0x17,0x02,0x00,0x00, +0xe6,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x1d,0x02,0x00,0x00,0x16,0x02,0x00,0x00,0xe6,0x02,0x00,0x00, +0x41,0x00,0x05,0x00,0xcc,0x00,0x00,0x00,0x1e,0x02,0x00,0x00, +0xa7,0x01,0x00,0x00,0x1d,0x02,0x00,0x00,0x3d,0x00,0x04,0x00, +0xc3,0x00,0x00,0x00,0x1f,0x02,0x00,0x00,0x1e,0x02,0x00,0x00, +0x41,0x00,0x05,0x00,0xcc,0x00,0x00,0x00,0x24,0x02,0x00,0x00, +0xd4,0x01,0x00,0x00,0x12,0x02,0x00,0x00,0x3d,0x00,0x04,0x00, +0xc3,0x00,0x00,0x00,0x25,0x02,0x00,0x00,0x24,0x02,0x00,0x00, +0x41,0x00,0x05,0x00,0xcc,0x00,0x00,0x00,0x27,0x02,0x00,0x00, +0xc9,0x00,0x00,0x00,0x19,0x02,0x00,0x00,0x3d,0x00,0x04,0x00, +0xc3,0x00,0x00,0x00,0x28,0x02,0x00,0x00,0x27,0x02,0x00,0x00, +0x0c,0x00,0x08,0x00,0xc3,0x00,0x00,0x00,0x29,0x02,0x00,0x00, +0x01,0x00,0x00,0x00,0x32,0x00,0x00,0x00,0x1f,0x02,0x00,0x00, +0x25,0x02,0x00,0x00,0x28,0x02,0x00,0x00,0x3e,0x00,0x03,0x00, +0x27,0x02,0x00,0x00,0x29,0x02,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x2c,0x02,0x00,0x00,0xe6,0x02,0x00,0x00, +0xcf,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0x08,0x02,0x00,0x00, +0xf8,0x00,0x02,0x00,0x0a,0x02,0x00,0x00,0xf9,0x00,0x02,0x00, +0x03,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x03,0x02,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x2e,0x02,0x00,0x00, +0xe4,0x02,0x00,0x00,0xcf,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, +0x00,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x02,0x02,0x00,0x00, +0xf9,0x00,0x02,0x00,0xfb,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, +0xfb,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x30,0x02,0x00,0x00,0xe2,0x02,0x00,0x00,0xcf,0x00,0x00,0x00, +0xf9,0x00,0x02,0x00,0xf8,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, +0xfa,0x01,0x00,0x00,0xf9,0x00,0x02,0x00,0xf3,0x01,0x00,0x00, +0xf8,0x00,0x02,0x00,0xf3,0x01,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x32,0x02,0x00,0x00,0xde,0x02,0x00,0x00, +0xcf,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0xf0,0x01,0x00,0x00, +0xf8,0x00,0x02,0x00,0xf2,0x01,0x00,0x00,0xf9,0x00,0x02,0x00, +0x90,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0x90,0x01,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x34,0x02,0x00,0x00, +0xd8,0x02,0x00,0x00,0xcf,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, +0x8d,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0x8f,0x01,0x00,0x00, +0xe0,0x00,0x04,0x00,0x0c,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, +0x85,0x01,0x00,0x00,0xf9,0x00,0x02,0x00,0xd6,0x00,0x00,0x00, +0xf8,0x00,0x02,0x00,0xd6,0x00,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x36,0x02,0x00,0x00,0xbe,0x02,0x00,0x00, +0x6c,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0xd3,0x00,0x00,0x00, +0xf8,0x00,0x02,0x00,0xd5,0x00,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x3b,0x02,0x00,0x00,0x55,0x00,0x00,0x00, +0x53,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x3c,0x02,0x00,0x00,0x95,0x00,0x00,0x00,0x3b,0x02,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x41,0x02,0x00,0x00, +0x59,0x00,0x00,0x00,0xb8,0x00,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x42,0x02,0x00,0x00,0xa7,0x00,0x00,0x00, +0x41,0x02,0x00,0x00,0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00, +0x47,0x02,0x00,0x00,0x12,0x00,0x00,0x00,0x46,0x02,0x00,0x00, +0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x48,0x02,0x00,0x00, +0x47,0x02,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x49,0x02,0x00,0x00,0x0f,0x00,0x00,0x00,0x48,0x02,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x4d,0x02,0x00,0x00, +0x47,0x00,0x00,0x00,0x48,0x02,0x00,0x00,0x41,0x00,0x05,0x00, +0x0d,0x00,0x00,0x00,0x4f,0x02,0x00,0x00,0x4e,0x02,0x00,0x00, +0x0c,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x50,0x02,0x00,0x00,0x4f,0x02,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x51,0x02,0x00,0x00,0x4d,0x02,0x00,0x00, +0x50,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x52,0x02,0x00,0x00,0x49,0x02,0x00,0x00,0x51,0x02,0x00,0x00, +0xf9,0x00,0x02,0x00,0x54,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, +0x54,0x02,0x00,0x00,0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00, +0xbf,0x02,0x00,0x00,0x3e,0x00,0x00,0x00,0xd5,0x00,0x00,0x00, +0xba,0x02,0x00,0x00,0x57,0x02,0x00,0x00,0xb0,0x00,0x05,0x00, +0xc1,0x00,0x00,0x00,0x5a,0x02,0x00,0x00,0xbf,0x02,0x00,0x00, +0xbe,0x00,0x00,0x00,0xf6,0x00,0x04,0x00,0x56,0x02,0x00,0x00, +0x57,0x02,0x00,0x00,0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, +0x5a,0x02,0x00,0x00,0x55,0x02,0x00,0x00,0x56,0x02,0x00,0x00, +0xf8,0x00,0x02,0x00,0x55,0x02,0x00,0x00,0xf9,0x00,0x02,0x00, +0x5c,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x5c,0x02,0x00,0x00, +0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0xc0,0x02,0x00,0x00, +0x3e,0x00,0x00,0x00,0x55,0x02,0x00,0x00,0xb8,0x02,0x00,0x00, +0x5f,0x02,0x00,0x00,0xb0,0x00,0x05,0x00,0xc1,0x00,0x00,0x00, +0x62,0x02,0x00,0x00,0xc0,0x02,0x00,0x00,0x60,0x00,0x00,0x00, +0xf6,0x00,0x04,0x00,0x5e,0x02,0x00,0x00,0x5f,0x02,0x00,0x00, +0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0x62,0x02,0x00,0x00, +0x5d,0x02,0x00,0x00,0x5e,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, +0x5d,0x02,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x66,0x02,0x00,0x00,0xc0,0x02,0x00,0x00,0x61,0x00,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x67,0x02,0x00,0x00, +0x3c,0x02,0x00,0x00,0x66,0x02,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x69,0x02,0x00,0x00,0x64,0x00,0x00,0x00, +0x62,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x6a,0x02,0x00,0x00,0x67,0x02,0x00,0x00,0x69,0x02,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x6e,0x02,0x00,0x00, +0xbf,0x02,0x00,0x00,0xdc,0x01,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x6f,0x02,0x00,0x00,0x42,0x02,0x00,0x00, +0x6e,0x02,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x71,0x02,0x00,0x00,0x68,0x00,0x00,0x00,0xbb,0x00,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x72,0x02,0x00,0x00, +0x6f,0x02,0x00,0x00,0x71,0x02,0x00,0x00,0xf9,0x00,0x02,0x00, +0x74,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x74,0x02,0x00,0x00, +0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0xc2,0x02,0x00,0x00, +0x3e,0x00,0x00,0x00,0x5d,0x02,0x00,0x00,0xb6,0x02,0x00,0x00, +0x77,0x02,0x00,0x00,0xb0,0x00,0x05,0x00,0xc1,0x00,0x00,0x00, +0x7a,0x02,0x00,0x00,0xc2,0x02,0x00,0x00,0xbb,0x00,0x00,0x00, +0xf6,0x00,0x04,0x00,0x76,0x02,0x00,0x00,0x77,0x02,0x00,0x00, +0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0x7a,0x02,0x00,0x00, +0x75,0x02,0x00,0x00,0x76,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, +0x75,0x02,0x00,0x00,0xf9,0x00,0x02,0x00,0x7c,0x02,0x00,0x00, +0xf8,0x00,0x02,0x00,0x7c,0x02,0x00,0x00,0xf5,0x00,0x07,0x00, +0x06,0x00,0x00,0x00,0xc4,0x02,0x00,0x00,0x3e,0x00,0x00,0x00, +0x75,0x02,0x00,0x00,0xb4,0x02,0x00,0x00,0x7f,0x02,0x00,0x00, +0xb0,0x00,0x05,0x00,0xc1,0x00,0x00,0x00,0x82,0x02,0x00,0x00, +0xc4,0x02,0x00,0x00,0x62,0x00,0x00,0x00,0xf6,0x00,0x04,0x00, +0x7e,0x02,0x00,0x00,0x7f,0x02,0x00,0x00,0x01,0x00,0x00,0x00, +0xfa,0x00,0x04,0x00,0x82,0x02,0x00,0x00,0x7d,0x02,0x00,0x00, +0x7e,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x7d,0x02,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x85,0x02,0x00,0x00, +0x6a,0x02,0x00,0x00,0xc4,0x02,0x00,0x00,0xb0,0x00,0x05,0x00, +0xc1,0x00,0x00,0x00,0x88,0x02,0x00,0x00,0x85,0x02,0x00,0x00, +0x36,0x00,0x00,0x00,0xf7,0x00,0x03,0x00,0x8a,0x02,0x00,0x00, +0x00,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0x88,0x02,0x00,0x00, +0x89,0x02,0x00,0x00,0x8a,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, +0x89,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x8d,0x02,0x00,0x00,0x72,0x02,0x00,0x00,0xc2,0x02,0x00,0x00, +0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00,0x8e,0x02,0x00,0x00, +0x12,0x00,0x00,0x00,0xcf,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x8f,0x02,0x00,0x00,0x8e,0x02,0x00,0x00, +0xb0,0x00,0x05,0x00,0xc1,0x00,0x00,0x00,0x90,0x02,0x00,0x00, +0x8d,0x02,0x00,0x00,0x8f,0x02,0x00,0x00,0xf9,0x00,0x02,0x00, +0x8a,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x8a,0x02,0x00,0x00, +0xf5,0x00,0x07,0x00,0xc1,0x00,0x00,0x00,0x91,0x02,0x00,0x00, +0x88,0x02,0x00,0x00,0x7d,0x02,0x00,0x00,0x90,0x02,0x00,0x00, +0x89,0x02,0x00,0x00,0xf7,0x00,0x03,0x00,0x93,0x02,0x00,0x00, +0x00,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0x91,0x02,0x00,0x00, +0x92,0x02,0x00,0x00,0x93,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, +0x92,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x9b,0x02,0x00,0x00,0x72,0x02,0x00,0x00,0xc2,0x02,0x00,0x00, +0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00,0x9d,0x02,0x00,0x00, +0x12,0x00,0x00,0x00,0x9c,0x02,0x00,0x00,0x3d,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x9e,0x02,0x00,0x00,0x9d,0x02,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x9f,0x02,0x00,0x00, +0x9b,0x02,0x00,0x00,0x9e,0x02,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xa0,0x02,0x00,0x00,0x52,0x02,0x00,0x00, +0x9f,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xa2,0x02,0x00,0x00,0xa0,0x02,0x00,0x00,0x6a,0x02,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xa4,0x02,0x00,0x00, +0xa2,0x02,0x00,0x00,0xc4,0x02,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xa6,0x02,0x00,0x00,0xbf,0x02,0x00,0x00, +0xbb,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xa8,0x02,0x00,0x00,0xa6,0x02,0x00,0x00,0xc2,0x02,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xaa,0x02,0x00,0x00, +0xa8,0x02,0x00,0x00,0xa9,0x02,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xac,0x02,0x00,0x00,0xc0,0x02,0x00,0x00, +0x62,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xad,0x02,0x00,0x00,0xaa,0x02,0x00,0x00,0xac,0x02,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xaf,0x02,0x00,0x00, +0xad,0x02,0x00,0x00,0xc4,0x02,0x00,0x00,0x41,0x00,0x05,0x00, +0xcc,0x00,0x00,0x00,0xb0,0x02,0x00,0x00,0xc9,0x00,0x00,0x00, +0xaf,0x02,0x00,0x00,0x3d,0x00,0x04,0x00,0xc3,0x00,0x00,0x00, +0xb1,0x02,0x00,0x00,0xb0,0x02,0x00,0x00,0x41,0x00,0x06,0x00, +0x73,0x01,0x00,0x00,0xb2,0x02,0x00,0x00,0x97,0x02,0x00,0x00, +0x34,0x00,0x00,0x00,0xa4,0x02,0x00,0x00,0x3e,0x00,0x03,0x00, +0xb2,0x02,0x00,0x00,0xb1,0x02,0x00,0x00,0xf9,0x00,0x02,0x00, +0x93,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x93,0x02,0x00,0x00, +0xf9,0x00,0x02,0x00,0x7f,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, +0x7f,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xb4,0x02,0x00,0x00,0xc4,0x02,0x00,0x00,0xcf,0x00,0x00,0x00, +0xf9,0x00,0x02,0x00,0x7c,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, +0x7e,0x02,0x00,0x00,0xf9,0x00,0x02,0x00,0x77,0x02,0x00,0x00, +0xf8,0x00,0x02,0x00,0x77,0x02,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xb6,0x02,0x00,0x00,0xc2,0x02,0x00,0x00, +0xcf,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0x74,0x02,0x00,0x00, +0xf8,0x00,0x02,0x00,0x76,0x02,0x00,0x00,0xf9,0x00,0x02,0x00, +0x5f,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x5f,0x02,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xb8,0x02,0x00,0x00, +0xc0,0x02,0x00,0x00,0xcf,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, +0x5c,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x5e,0x02,0x00,0x00, +0xf9,0x00,0x02,0x00,0x57,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, +0x57,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xba,0x02,0x00,0x00,0xbf,0x02,0x00,0x00,0xcf,0x00,0x00,0x00, +0xf9,0x00,0x02,0x00,0x54,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, +0x56,0x02,0x00,0x00,0xfd,0x00,0x01,0x00,0x38,0x00,0x01,0x00, + +}; +const uint64_t matmul_q4_1_f32_fp32_len = 10512; + +unsigned char matmul_q5_0_f32_data[] = { +0x03,0x02,0x23,0x07,0x00,0x05,0x01,0x00,0x0b,0x00,0x0d,0x00, +0x47,0x03,0x00,0x00,0x00,0x00,0x00,0x00,0x11,0x00,0x02,0x00, +0x01,0x00,0x00,0x00,0x11,0x00,0x02,0x00,0x09,0x00,0x00,0x00, +0x11,0x00,0x02,0x00,0x51,0x11,0x00,0x00,0x11,0x00,0x02,0x00, +0x60,0x11,0x00,0x00,0x0b,0x00,0x06,0x00,0x01,0x00,0x00,0x00, +0x47,0x4c,0x53,0x4c,0x2e,0x73,0x74,0x64,0x2e,0x34,0x35,0x30, +0x00,0x00,0x00,0x00,0x0e,0x00,0x03,0x00,0x00,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0x0f,0x00,0x0f,0x00,0x05,0x00,0x00,0x00, +0x04,0x00,0x00,0x00,0x6d,0x61,0x69,0x6e,0x00,0x00,0x00,0x00, +0x0b,0x00,0x00,0x00,0x12,0x00,0x00,0x00,0x3d,0x00,0x00,0x00, +0x4c,0x00,0x00,0x00,0x09,0x01,0x00,0x00,0x50,0x01,0x00,0x00, +0x83,0x01,0x00,0x00,0x8e,0x01,0x00,0x00,0x79,0x02,0x00,0x00, +0xc2,0x02,0x00,0x00,0x10,0x00,0x06,0x00,0x04,0x00,0x00,0x00, +0x11,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x0b,0x00,0x00,0x00, +0x0b,0x00,0x00,0x00,0x1c,0x00,0x00,0x00,0x48,0x00,0x05,0x00, +0x10,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00, +0x00,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x04,0x00,0x00,0x00, +0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00,0x02,0x00,0x00,0x00, +0x23,0x00,0x00,0x00,0x08,0x00,0x00,0x00,0x48,0x00,0x05,0x00, +0x10,0x00,0x00,0x00,0x03,0x00,0x00,0x00,0x23,0x00,0x00,0x00, +0x0c,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00, +0x04,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x10,0x00,0x00,0x00, +0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00,0x05,0x00,0x00,0x00, +0x23,0x00,0x00,0x00,0x14,0x00,0x00,0x00,0x48,0x00,0x05,0x00, +0x10,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x23,0x00,0x00,0x00, +0x18,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00, +0x07,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x1c,0x00,0x00,0x00, +0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00,0x08,0x00,0x00,0x00, +0x23,0x00,0x00,0x00,0x20,0x00,0x00,0x00,0x48,0x00,0x05,0x00, +0x10,0x00,0x00,0x00,0x09,0x00,0x00,0x00,0x23,0x00,0x00,0x00, +0x24,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00, +0x0a,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x28,0x00,0x00,0x00, +0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00,0x0b,0x00,0x00,0x00, +0x23,0x00,0x00,0x00,0x2c,0x00,0x00,0x00,0x48,0x00,0x05,0x00, +0x10,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x23,0x00,0x00,0x00, +0x30,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00, +0x0d,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x34,0x00,0x00,0x00, +0x47,0x00,0x03,0x00,0x10,0x00,0x00,0x00,0x02,0x00,0x00,0x00, +0x47,0x00,0x04,0x00,0x37,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x3d,0x00,0x00,0x00, +0x0b,0x00,0x00,0x00,0x1a,0x00,0x00,0x00,0x47,0x00,0x04,0x00, +0x4c,0x00,0x00,0x00,0x0b,0x00,0x00,0x00,0x1b,0x00,0x00,0x00, +0x47,0x00,0x04,0x00,0x4f,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0x09,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x53,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x47,0x00,0x04,0x00, +0x60,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x06,0x00,0x00,0x00, +0x47,0x00,0x04,0x00,0x62,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0x07,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x6c,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0x03,0x00,0x00,0x00,0x47,0x00,0x04,0x00, +0xa6,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x02,0x00,0x00,0x00, +0x47,0x00,0x04,0x00,0xb8,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0x05,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0xbb,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0x08,0x00,0x00,0x00,0x47,0x00,0x04,0x00, +0x02,0x01,0x00,0x00,0x06,0x00,0x00,0x00,0x02,0x00,0x00,0x00, +0x47,0x00,0x04,0x00,0x04,0x01,0x00,0x00,0x06,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x05,0x01,0x00,0x00, +0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0x48,0x00,0x05,0x00,0x05,0x01,0x00,0x00,0x01,0x00,0x00,0x00, +0x23,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x48,0x00,0x05,0x00, +0x05,0x01,0x00,0x00,0x02,0x00,0x00,0x00,0x23,0x00,0x00,0x00, +0x06,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x06,0x01,0x00,0x00, +0x06,0x00,0x00,0x00,0x16,0x00,0x00,0x00,0x48,0x00,0x04,0x00, +0x07,0x01,0x00,0x00,0x00,0x00,0x00,0x00,0x18,0x00,0x00,0x00, +0x48,0x00,0x05,0x00,0x07,0x01,0x00,0x00,0x00,0x00,0x00,0x00, +0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x03,0x00, +0x07,0x01,0x00,0x00,0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00, +0x09,0x01,0x00,0x00,0x22,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0x47,0x00,0x04,0x00,0x09,0x01,0x00,0x00,0x21,0x00,0x00,0x00, +0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x5d,0x01,0x00,0x00, +0x01,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00, +0x5e,0x01,0x00,0x00,0x0b,0x00,0x00,0x00,0x19,0x00,0x00,0x00, +0x47,0x00,0x04,0x00,0x8b,0x01,0x00,0x00,0x06,0x00,0x00,0x00, +0x04,0x00,0x00,0x00,0x48,0x00,0x04,0x00,0x8c,0x01,0x00,0x00, +0x00,0x00,0x00,0x00,0x18,0x00,0x00,0x00,0x48,0x00,0x05,0x00, +0x8c,0x01,0x00,0x00,0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00, +0x00,0x00,0x00,0x00,0x47,0x00,0x03,0x00,0x8c,0x01,0x00,0x00, +0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x8e,0x01,0x00,0x00, +0x22,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00, +0x8e,0x01,0x00,0x00,0x21,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0x47,0x00,0x04,0x00,0x79,0x02,0x00,0x00,0x0b,0x00,0x00,0x00, +0x18,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0xbf,0x02,0x00,0x00, +0x06,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x48,0x00,0x04,0x00, +0xc0,0x02,0x00,0x00,0x00,0x00,0x00,0x00,0x19,0x00,0x00,0x00, +0x48,0x00,0x05,0x00,0xc0,0x02,0x00,0x00,0x00,0x00,0x00,0x00, +0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x03,0x00, +0xc0,0x02,0x00,0x00,0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00, +0xc2,0x02,0x00,0x00,0x22,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0x47,0x00,0x04,0x00,0xc2,0x02,0x00,0x00,0x21,0x00,0x00,0x00, +0x02,0x00,0x00,0x00,0x13,0x00,0x02,0x00,0x02,0x00,0x00,0x00, +0x21,0x00,0x03,0x00,0x03,0x00,0x00,0x00,0x02,0x00,0x00,0x00, +0x15,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x20,0x00,0x00,0x00, +0x00,0x00,0x00,0x00,0x17,0x00,0x04,0x00,0x09,0x00,0x00,0x00, +0x06,0x00,0x00,0x00,0x03,0x00,0x00,0x00,0x20,0x00,0x04,0x00, +0x0a,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x09,0x00,0x00,0x00, +0x3b,0x00,0x04,0x00,0x0a,0x00,0x00,0x00,0x0b,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x0c,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x20,0x00,0x04,0x00, +0x0d,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x06,0x00,0x00,0x00, +0x1e,0x00,0x10,0x00,0x10,0x00,0x00,0x00,0x06,0x00,0x00,0x00, +0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00, +0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00, +0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00, +0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00, +0x06,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x11,0x00,0x00,0x00, +0x09,0x00,0x00,0x00,0x10,0x00,0x00,0x00,0x3b,0x00,0x04,0x00, +0x11,0x00,0x00,0x00,0x12,0x00,0x00,0x00,0x09,0x00,0x00,0x00, +0x15,0x00,0x04,0x00,0x13,0x00,0x00,0x00,0x20,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00, +0x14,0x00,0x00,0x00,0x08,0x00,0x00,0x00,0x20,0x00,0x04,0x00, +0x15,0x00,0x00,0x00,0x09,0x00,0x00,0x00,0x06,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00,0x21,0x00,0x00,0x00, +0x0a,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00, +0x27,0x00,0x00,0x00,0x09,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x13,0x00,0x00,0x00,0x2d,0x00,0x00,0x00,0x07,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00,0x34,0x00,0x00,0x00, +0x00,0x00,0x00,0x00,0x32,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x37,0x00,0x00,0x00,0x40,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0x3b,0x00,0x04,0x00,0x0a,0x00,0x00,0x00,0x3d,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x3e,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x3b,0x00,0x04,0x00, +0x0a,0x00,0x00,0x00,0x4c,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0x32,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x4f,0x00,0x00,0x00, +0x20,0x00,0x00,0x00,0x32,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x53,0x00,0x00,0x00,0x20,0x00,0x00,0x00,0x34,0x00,0x06,0x00, +0x06,0x00,0x00,0x00,0x54,0x00,0x00,0x00,0x86,0x00,0x00,0x00, +0x37,0x00,0x00,0x00,0x53,0x00,0x00,0x00,0x34,0x00,0x06,0x00, +0x06,0x00,0x00,0x00,0x58,0x00,0x00,0x00,0x86,0x00,0x00,0x00, +0x37,0x00,0x00,0x00,0x53,0x00,0x00,0x00,0x32,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x60,0x00,0x00,0x00,0x02,0x00,0x00,0x00, +0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x61,0x00,0x00,0x00, +0x86,0x00,0x00,0x00,0x53,0x00,0x00,0x00,0x60,0x00,0x00,0x00, +0x32,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x62,0x00,0x00,0x00, +0x04,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, +0x63,0x00,0x00,0x00,0x86,0x00,0x00,0x00,0x61,0x00,0x00,0x00, +0x62,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, +0x67,0x00,0x00,0x00,0x86,0x00,0x00,0x00,0x61,0x00,0x00,0x00, +0x62,0x00,0x00,0x00,0x32,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x6c,0x00,0x00,0x00,0x10,0x00,0x00,0x00,0x34,0x00,0x06,0x00, +0x06,0x00,0x00,0x00,0x6d,0x00,0x00,0x00,0x86,0x00,0x00,0x00, +0x6c,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x34,0x00,0x06,0x00, +0x06,0x00,0x00,0x00,0x72,0x00,0x00,0x00,0x86,0x00,0x00,0x00, +0x6c,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x34,0x00,0x06,0x00, +0x06,0x00,0x00,0x00,0x77,0x00,0x00,0x00,0x86,0x00,0x00,0x00, +0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x34,0x00,0x06,0x00, +0x06,0x00,0x00,0x00,0x7c,0x00,0x00,0x00,0x86,0x00,0x00,0x00, +0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x13,0x00,0x00,0x00,0x80,0x00,0x00,0x00,0x06,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00,0x85,0x00,0x00,0x00, +0x02,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00, +0x90,0x00,0x00,0x00,0x0b,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x13,0x00,0x00,0x00,0x96,0x00,0x00,0x00,0x03,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00,0xa1,0x00,0x00,0x00, +0x0c,0x00,0x00,0x00,0x32,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0xa6,0x00,0x00,0x00,0x40,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x13,0x00,0x00,0x00,0xa8,0x00,0x00,0x00,0x04,0x00,0x00,0x00, +0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0xb7,0x00,0x00,0x00, +0x84,0x00,0x00,0x00,0x60,0x00,0x00,0x00,0x62,0x00,0x00,0x00, +0x32,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0xb8,0x00,0x00,0x00, +0x20,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, +0xb9,0x00,0x00,0x00,0x84,0x00,0x00,0x00,0x53,0x00,0x00,0x00, +0xb8,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, +0xba,0x00,0x00,0x00,0x84,0x00,0x00,0x00,0x4f,0x00,0x00,0x00, +0x62,0x00,0x00,0x00,0x32,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0xbb,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x34,0x00,0x06,0x00, +0x06,0x00,0x00,0x00,0xbc,0x00,0x00,0x00,0x84,0x00,0x00,0x00, +0xba,0x00,0x00,0x00,0xbb,0x00,0x00,0x00,0x34,0x00,0x06,0x00, +0x06,0x00,0x00,0x00,0xbd,0x00,0x00,0x00,0x84,0x00,0x00,0x00, +0xbc,0x00,0x00,0x00,0x60,0x00,0x00,0x00,0x34,0x00,0x06,0x00, +0x06,0x00,0x00,0x00,0xbe,0x00,0x00,0x00,0x86,0x00,0x00,0x00, +0xb9,0x00,0x00,0x00,0xbd,0x00,0x00,0x00,0x34,0x00,0x06,0x00, +0x06,0x00,0x00,0x00,0xbf,0x00,0x00,0x00,0x84,0x00,0x00,0x00, +0xb7,0x00,0x00,0x00,0xbe,0x00,0x00,0x00,0x34,0x00,0x06,0x00, +0x06,0x00,0x00,0x00,0xc0,0x00,0x00,0x00,0x84,0x00,0x00,0x00, +0xbf,0x00,0x00,0x00,0xbb,0x00,0x00,0x00,0x14,0x00,0x02,0x00, +0xc1,0x00,0x00,0x00,0x16,0x00,0x03,0x00,0xc3,0x00,0x00,0x00, +0x20,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, +0xc4,0x00,0x00,0x00,0x84,0x00,0x00,0x00,0x60,0x00,0x00,0x00, +0x62,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, +0xc5,0x00,0x00,0x00,0x84,0x00,0x00,0x00,0xc4,0x00,0x00,0x00, +0xbe,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, +0xc6,0x00,0x00,0x00,0x84,0x00,0x00,0x00,0xc5,0x00,0x00,0x00, +0xbb,0x00,0x00,0x00,0x1c,0x00,0x04,0x00,0xc7,0x00,0x00,0x00, +0xc3,0x00,0x00,0x00,0xc6,0x00,0x00,0x00,0x20,0x00,0x04,0x00, +0xc8,0x00,0x00,0x00,0x07,0x00,0x00,0x00,0xc7,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0xc3,0x00,0x00,0x00,0xcb,0x00,0x00,0x00, +0x00,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0xcc,0x00,0x00,0x00, +0x07,0x00,0x00,0x00,0xc3,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x13,0x00,0x00,0x00,0xcf,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0xf3,0x00,0x00,0x00, +0x80,0x00,0x00,0x00,0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0xf9,0x00,0x00,0x00, +0x10,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0xfd,0x00,0x00,0x00,0x0f,0x00,0x00,0x00,0x16,0x00,0x03,0x00, +0x00,0x01,0x00,0x00,0x10,0x00,0x00,0x00,0x15,0x00,0x04,0x00, +0x01,0x01,0x00,0x00,0x10,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0x1c,0x00,0x04,0x00,0x02,0x01,0x00,0x00,0x01,0x01,0x00,0x00, +0x0c,0x00,0x00,0x00,0x15,0x00,0x04,0x00,0x03,0x01,0x00,0x00, +0x08,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x1c,0x00,0x04,0x00, +0x04,0x01,0x00,0x00,0x03,0x01,0x00,0x00,0xf9,0x00,0x00,0x00, +0x1e,0x00,0x05,0x00,0x05,0x01,0x00,0x00,0x00,0x01,0x00,0x00, +0x02,0x01,0x00,0x00,0x04,0x01,0x00,0x00,0x1d,0x00,0x03,0x00, +0x06,0x01,0x00,0x00,0x05,0x01,0x00,0x00,0x1e,0x00,0x03,0x00, +0x07,0x01,0x00,0x00,0x06,0x01,0x00,0x00,0x20,0x00,0x04,0x00, +0x08,0x01,0x00,0x00,0x0c,0x00,0x00,0x00,0x07,0x01,0x00,0x00, +0x3b,0x00,0x04,0x00,0x08,0x01,0x00,0x00,0x09,0x01,0x00,0x00, +0x0c,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x0b,0x01,0x00,0x00, +0x0c,0x00,0x00,0x00,0x00,0x01,0x00,0x00,0x20,0x00,0x04,0x00, +0x11,0x01,0x00,0x00,0x0c,0x00,0x00,0x00,0x01,0x01,0x00,0x00, +0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00,0x15,0x01,0x00,0x00, +0x10,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x27,0x01,0x00,0x00,0x0c,0x00,0x00,0x00,0x20,0x00,0x04,0x00, +0x30,0x01,0x00,0x00,0x0c,0x00,0x00,0x00,0x03,0x01,0x00,0x00, +0x17,0x00,0x04,0x00,0x34,0x01,0x00,0x00,0xc3,0x00,0x00,0x00, +0x02,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0xc3,0x00,0x00,0x00, +0x47,0x01,0x00,0x00,0x00,0x00,0x80,0x41,0x34,0x00,0x06,0x00, +0x06,0x00,0x00,0x00,0x4c,0x01,0x00,0x00,0x80,0x00,0x00,0x00, +0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x34,0x00,0x06,0x00, +0x06,0x00,0x00,0x00,0x4d,0x01,0x00,0x00,0x84,0x00,0x00,0x00, +0x37,0x00,0x00,0x00,0x4c,0x01,0x00,0x00,0x1c,0x00,0x04,0x00, +0x4e,0x01,0x00,0x00,0x00,0x01,0x00,0x00,0x4d,0x01,0x00,0x00, +0x20,0x00,0x04,0x00,0x4f,0x01,0x00,0x00,0x04,0x00,0x00,0x00, +0x4e,0x01,0x00,0x00,0x3b,0x00,0x04,0x00,0x4f,0x01,0x00,0x00, +0x50,0x01,0x00,0x00,0x04,0x00,0x00,0x00,0x20,0x00,0x04,0x00, +0x55,0x01,0x00,0x00,0x04,0x00,0x00,0x00,0x00,0x01,0x00,0x00, +0x32,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x5d,0x01,0x00,0x00, +0x01,0x00,0x00,0x00,0x33,0x00,0x06,0x00,0x09,0x00,0x00,0x00, +0x5e,0x01,0x00,0x00,0x5d,0x01,0x00,0x00,0x39,0x00,0x00,0x00, +0x39,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, +0x5f,0x01,0x00,0x00,0x51,0x00,0x00,0x00,0x5e,0x01,0x00,0x00, +0x00,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, +0x60,0x01,0x00,0x00,0x84,0x00,0x00,0x00,0x5f,0x01,0x00,0x00, +0x0c,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, +0x61,0x01,0x00,0x00,0x86,0x00,0x00,0x00,0x60,0x01,0x00,0x00, +0x6c,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, +0x7f,0x01,0x00,0x00,0x80,0x00,0x00,0x00,0x6c,0x00,0x00,0x00, +0x39,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, +0x80,0x01,0x00,0x00,0x84,0x00,0x00,0x00,0xa6,0x00,0x00,0x00, +0x7f,0x01,0x00,0x00,0x1c,0x00,0x04,0x00,0x81,0x01,0x00,0x00, +0x00,0x01,0x00,0x00,0x80,0x01,0x00,0x00,0x20,0x00,0x04,0x00, +0x82,0x01,0x00,0x00,0x04,0x00,0x00,0x00,0x81,0x01,0x00,0x00, +0x3b,0x00,0x04,0x00,0x82,0x01,0x00,0x00,0x83,0x01,0x00,0x00, +0x04,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, +0x87,0x01,0x00,0x00,0x80,0x00,0x00,0x00,0x6c,0x00,0x00,0x00, +0x39,0x00,0x00,0x00,0x1d,0x00,0x03,0x00,0x8b,0x01,0x00,0x00, +0xc3,0x00,0x00,0x00,0x1e,0x00,0x03,0x00,0x8c,0x01,0x00,0x00, +0x8b,0x01,0x00,0x00,0x20,0x00,0x04,0x00,0x8d,0x01,0x00,0x00, +0x0c,0x00,0x00,0x00,0x8c,0x01,0x00,0x00,0x3b,0x00,0x04,0x00, +0x8d,0x01,0x00,0x00,0x8e,0x01,0x00,0x00,0x0c,0x00,0x00,0x00, +0x20,0x00,0x04,0x00,0x99,0x01,0x00,0x00,0x0c,0x00,0x00,0x00, +0xc3,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, +0xa2,0x01,0x00,0x00,0x80,0x00,0x00,0x00,0x6c,0x00,0x00,0x00, +0x39,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x00,0x01,0x00,0x00, +0xa6,0x01,0x00,0x00,0x00,0x00,0x00,0x00,0x34,0x00,0x06,0x00, +0x06,0x00,0x00,0x00,0xa8,0x01,0x00,0x00,0x51,0x00,0x00,0x00, +0x5e,0x01,0x00,0x00,0x00,0x00,0x00,0x00,0x34,0x00,0x06,0x00, +0x06,0x00,0x00,0x00,0xa9,0x01,0x00,0x00,0x84,0x00,0x00,0x00, +0xa8,0x01,0x00,0x00,0x39,0x00,0x00,0x00,0x34,0x00,0x06,0x00, +0x06,0x00,0x00,0x00,0xaa,0x01,0x00,0x00,0x86,0x00,0x00,0x00, +0xa9,0x01,0x00,0x00,0x6c,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0xad,0x01,0x00,0x00,0x08,0x01,0x00,0x00, +0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0xae,0x01,0x00,0x00, +0x86,0x00,0x00,0x00,0x6c,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, +0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0xb1,0x01,0x00,0x00, +0x86,0x00,0x00,0x00,0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00, +0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0xcc,0x01,0x00,0x00, +0x84,0x00,0x00,0x00,0x60,0x00,0x00,0x00,0x62,0x00,0x00,0x00, +0x1c,0x00,0x04,0x00,0xcd,0x01,0x00,0x00,0x00,0x01,0x00,0x00, +0xcc,0x01,0x00,0x00,0x20,0x00,0x04,0x00,0xce,0x01,0x00,0x00, +0x07,0x00,0x00,0x00,0xcd,0x01,0x00,0x00,0x34,0x00,0x06,0x00, +0x06,0x00,0x00,0x00,0xde,0x01,0x00,0x00,0x80,0x00,0x00,0x00, +0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x20,0x00,0x04,0x00, +0xe4,0x01,0x00,0x00,0x07,0x00,0x00,0x00,0x00,0x01,0x00,0x00, +0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0xfa,0x01,0x00,0x00, +0x84,0x00,0x00,0x00,0xbe,0x00,0x00,0x00,0xbb,0x00,0x00,0x00, +0x1c,0x00,0x04,0x00,0xfb,0x01,0x00,0x00,0x00,0x01,0x00,0x00, +0xfa,0x01,0x00,0x00,0x20,0x00,0x04,0x00,0xfc,0x01,0x00,0x00, +0x07,0x00,0x00,0x00,0xfb,0x01,0x00,0x00,0x34,0x00,0x06,0x00, +0x06,0x00,0x00,0x00,0x05,0x02,0x00,0x00,0x86,0x00,0x00,0x00, +0xb8,0x00,0x00,0x00,0xbe,0x00,0x00,0x00,0x34,0x00,0x06,0x00, +0x06,0x00,0x00,0x00,0x0d,0x02,0x00,0x00,0x80,0x00,0x00,0x00, +0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x34,0x00,0x06,0x00, +0x06,0x00,0x00,0x00,0x3c,0x02,0x00,0x00,0x84,0x00,0x00,0x00, +0x60,0x00,0x00,0x00,0x62,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x13,0x00,0x00,0x00,0x71,0x02,0x00,0x00,0x0d,0x00,0x00,0x00, +0x3b,0x00,0x04,0x00,0x0a,0x00,0x00,0x00,0x79,0x02,0x00,0x00, +0x01,0x00,0x00,0x00,0x1d,0x00,0x03,0x00,0xbf,0x02,0x00,0x00, +0xc3,0x00,0x00,0x00,0x1e,0x00,0x03,0x00,0xc0,0x02,0x00,0x00, +0xbf,0x02,0x00,0x00,0x20,0x00,0x04,0x00,0xc1,0x02,0x00,0x00, +0x0c,0x00,0x00,0x00,0xc0,0x02,0x00,0x00,0x3b,0x00,0x04,0x00, +0xc1,0x02,0x00,0x00,0xc2,0x02,0x00,0x00,0x0c,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00,0xc7,0x02,0x00,0x00, +0x05,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, +0xd4,0x02,0x00,0x00,0x84,0x00,0x00,0x00,0x60,0x00,0x00,0x00, +0x62,0x00,0x00,0x00,0x2c,0x00,0x05,0x00,0x34,0x01,0x00,0x00, +0x46,0x03,0x00,0x00,0x47,0x01,0x00,0x00,0x47,0x01,0x00,0x00, +0x36,0x00,0x05,0x00,0x02,0x00,0x00,0x00,0x04,0x00,0x00,0x00, +0x00,0x00,0x00,0x00,0x03,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, +0x05,0x00,0x00,0x00,0x3b,0x00,0x04,0x00,0xc8,0x00,0x00,0x00, +0xc9,0x00,0x00,0x00,0x07,0x00,0x00,0x00,0x3b,0x00,0x04,0x00, +0xce,0x01,0x00,0x00,0xcf,0x01,0x00,0x00,0x07,0x00,0x00,0x00, +0x3b,0x00,0x04,0x00,0xfc,0x01,0x00,0x00,0xfd,0x01,0x00,0x00, +0x07,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x0d,0x00,0x00,0x00, +0x0e,0x00,0x00,0x00,0x0b,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, +0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x0f,0x00,0x00,0x00, +0x0e,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00, +0x16,0x00,0x00,0x00,0x12,0x00,0x00,0x00,0x14,0x00,0x00,0x00, +0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x17,0x00,0x00,0x00, +0x16,0x00,0x00,0x00,0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x18,0x00,0x00,0x00,0x0f,0x00,0x00,0x00,0x17,0x00,0x00,0x00, +0x89,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x1e,0x00,0x00,0x00, +0x0f,0x00,0x00,0x00,0x17,0x00,0x00,0x00,0x41,0x00,0x05,0x00, +0x15,0x00,0x00,0x00,0x22,0x00,0x00,0x00,0x12,0x00,0x00,0x00, +0x21,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x23,0x00,0x00,0x00,0x22,0x00,0x00,0x00,0x86,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x24,0x00,0x00,0x00,0x18,0x00,0x00,0x00, +0x23,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00, +0x28,0x00,0x00,0x00,0x12,0x00,0x00,0x00,0x27,0x00,0x00,0x00, +0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x29,0x00,0x00,0x00, +0x28,0x00,0x00,0x00,0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x2a,0x00,0x00,0x00,0x1e,0x00,0x00,0x00,0x29,0x00,0x00,0x00, +0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, +0x12,0x00,0x00,0x00,0x2d,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x2f,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x30,0x00,0x00,0x00, +0x24,0x00,0x00,0x00,0x2f,0x00,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x32,0x00,0x00,0x00,0x30,0x00,0x00,0x00, +0x2a,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00, +0x35,0x00,0x00,0x00,0x12,0x00,0x00,0x00,0x34,0x00,0x00,0x00, +0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x36,0x00,0x00,0x00, +0x35,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x38,0x00,0x00,0x00,0x36,0x00,0x00,0x00,0x37,0x00,0x00,0x00, +0x82,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x3a,0x00,0x00,0x00, +0x38,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x86,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x3b,0x00,0x00,0x00,0x3a,0x00,0x00,0x00, +0x37,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x0d,0x00,0x00,0x00, +0x3f,0x00,0x00,0x00,0x3d,0x00,0x00,0x00,0x3e,0x00,0x00,0x00, +0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x40,0x00,0x00,0x00, +0x3f,0x00,0x00,0x00,0x89,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x42,0x00,0x00,0x00,0x40,0x00,0x00,0x00,0x3b,0x00,0x00,0x00, +0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x47,0x00,0x00,0x00, +0x40,0x00,0x00,0x00,0x3b,0x00,0x00,0x00,0x41,0x00,0x05,0x00, +0x0d,0x00,0x00,0x00,0x49,0x00,0x00,0x00,0x3d,0x00,0x00,0x00, +0x39,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x4a,0x00,0x00,0x00,0x49,0x00,0x00,0x00,0x41,0x00,0x05,0x00, +0x0d,0x00,0x00,0x00,0x4d,0x00,0x00,0x00,0x4c,0x00,0x00,0x00, +0x3e,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x4e,0x00,0x00,0x00,0x4d,0x00,0x00,0x00,0x86,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x50,0x00,0x00,0x00,0x4e,0x00,0x00,0x00, +0x4f,0x00,0x00,0x00,0x89,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x55,0x00,0x00,0x00,0x50,0x00,0x00,0x00,0x54,0x00,0x00,0x00, +0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x59,0x00,0x00,0x00, +0x50,0x00,0x00,0x00,0x58,0x00,0x00,0x00,0x89,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x5d,0x00,0x00,0x00,0x4e,0x00,0x00,0x00, +0x4f,0x00,0x00,0x00,0x89,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x64,0x00,0x00,0x00,0x5d,0x00,0x00,0x00,0x63,0x00,0x00,0x00, +0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x68,0x00,0x00,0x00, +0x5d,0x00,0x00,0x00,0x67,0x00,0x00,0x00,0x89,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x6e,0x00,0x00,0x00,0x4e,0x00,0x00,0x00, +0x6d,0x00,0x00,0x00,0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x73,0x00,0x00,0x00,0x4e,0x00,0x00,0x00,0x72,0x00,0x00,0x00, +0x89,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x78,0x00,0x00,0x00, +0x4e,0x00,0x00,0x00,0x77,0x00,0x00,0x00,0x86,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x7d,0x00,0x00,0x00,0x4e,0x00,0x00,0x00, +0x7c,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00, +0x81,0x00,0x00,0x00,0x12,0x00,0x00,0x00,0x80,0x00,0x00,0x00, +0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x82,0x00,0x00,0x00, +0x81,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x83,0x00,0x00,0x00,0x47,0x00,0x00,0x00,0x82,0x00,0x00,0x00, +0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00,0x86,0x00,0x00,0x00, +0x12,0x00,0x00,0x00,0x85,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x87,0x00,0x00,0x00,0x86,0x00,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x89,0x00,0x00,0x00, +0x47,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x8c,0x00,0x00,0x00,0x89,0x00,0x00,0x00, +0x82,0x00,0x00,0x00,0x0c,0x00,0x07,0x00,0x06,0x00,0x00,0x00, +0x8d,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x26,0x00,0x00,0x00, +0x87,0x00,0x00,0x00,0x8c,0x00,0x00,0x00,0x41,0x00,0x05,0x00, +0x15,0x00,0x00,0x00,0x91,0x00,0x00,0x00,0x12,0x00,0x00,0x00, +0x90,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x92,0x00,0x00,0x00,0x91,0x00,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x93,0x00,0x00,0x00,0x32,0x00,0x00,0x00, +0x92,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x95,0x00,0x00,0x00,0x42,0x00,0x00,0x00,0x37,0x00,0x00,0x00, +0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00,0x97,0x00,0x00,0x00, +0x12,0x00,0x00,0x00,0x96,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x98,0x00,0x00,0x00,0x97,0x00,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x99,0x00,0x00,0x00, +0x95,0x00,0x00,0x00,0x98,0x00,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x9a,0x00,0x00,0x00,0x93,0x00,0x00,0x00, +0x99,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x9c,0x00,0x00,0x00,0x9a,0x00,0x00,0x00,0x83,0x00,0x00,0x00, +0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x9d,0x00,0x00,0x00, +0x9c,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x41,0x00,0x05,0x00, +0x15,0x00,0x00,0x00,0xa2,0x00,0x00,0x00,0x12,0x00,0x00,0x00, +0xa1,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0xa3,0x00,0x00,0x00,0xa2,0x00,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xa4,0x00,0x00,0x00,0x0f,0x00,0x00,0x00, +0xa3,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xa7,0x00,0x00,0x00,0x4a,0x00,0x00,0x00,0xa6,0x00,0x00,0x00, +0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00,0xa9,0x00,0x00,0x00, +0x12,0x00,0x00,0x00,0xa8,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0xaa,0x00,0x00,0x00,0xa9,0x00,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xab,0x00,0x00,0x00, +0xa7,0x00,0x00,0x00,0xaa,0x00,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xac,0x00,0x00,0x00,0xa4,0x00,0x00,0x00, +0xab,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xae,0x00,0x00,0x00,0xac,0x00,0x00,0x00,0x83,0x00,0x00,0x00, +0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xaf,0x00,0x00,0x00, +0xae,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, +0xb1,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0xb1,0x00,0x00,0x00, +0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0xea,0x02,0x00,0x00, +0x3e,0x00,0x00,0x00,0x05,0x00,0x00,0x00,0xd0,0x00,0x00,0x00, +0xb2,0x00,0x00,0x00,0xb0,0x00,0x05,0x00,0xc1,0x00,0x00,0x00, +0xc2,0x00,0x00,0x00,0xea,0x02,0x00,0x00,0xc0,0x00,0x00,0x00, +0xf6,0x00,0x04,0x00,0xb3,0x00,0x00,0x00,0xb2,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0xc2,0x00,0x00,0x00, +0xb2,0x00,0x00,0x00,0xb3,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, +0xb2,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0xcc,0x00,0x00,0x00, +0xcd,0x00,0x00,0x00,0xc9,0x00,0x00,0x00,0xea,0x02,0x00,0x00, +0x3e,0x00,0x03,0x00,0xcd,0x00,0x00,0x00,0xcb,0x00,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xd0,0x00,0x00,0x00, +0xea,0x02,0x00,0x00,0xcf,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, +0xb1,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0xb3,0x00,0x00,0x00, +0xf9,0x00,0x02,0x00,0xd3,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, +0xd3,0x00,0x00,0x00,0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00, +0x03,0x03,0x00,0x00,0xaf,0x00,0x00,0x00,0xb3,0x00,0x00,0x00, +0xb3,0x01,0x00,0x00,0xd6,0x00,0x00,0x00,0xf5,0x00,0x07,0x00, +0x06,0x00,0x00,0x00,0xff,0x02,0x00,0x00,0x9d,0x00,0x00,0x00, +0xb3,0x00,0x00,0x00,0xb0,0x01,0x00,0x00,0xd6,0x00,0x00,0x00, +0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0xeb,0x02,0x00,0x00, +0x83,0x00,0x00,0x00,0xb3,0x00,0x00,0x00,0x61,0x02,0x00,0x00, +0xd6,0x00,0x00,0x00,0xb0,0x00,0x05,0x00,0xc1,0x00,0x00,0x00, +0xda,0x00,0x00,0x00,0xeb,0x02,0x00,0x00,0x8d,0x00,0x00,0x00, +0xf6,0x00,0x04,0x00,0xd5,0x00,0x00,0x00,0xd6,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0xda,0x00,0x00,0x00, +0xd4,0x00,0x00,0x00,0xd5,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, +0xd4,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0xdc,0x00,0x00,0x00, +0xf8,0x00,0x02,0x00,0xdc,0x00,0x00,0x00,0xf5,0x00,0x07,0x00, +0x06,0x00,0x00,0x00,0xfb,0x02,0x00,0x00,0x3e,0x00,0x00,0x00, +0xd4,0x00,0x00,0x00,0x63,0x01,0x00,0x00,0xdd,0x00,0x00,0x00, +0xb0,0x00,0x05,0x00,0xc1,0x00,0x00,0x00,0xe2,0x00,0x00,0x00, +0xfb,0x02,0x00,0x00,0x37,0x00,0x00,0x00,0xf6,0x00,0x04,0x00, +0xde,0x00,0x00,0x00,0xdd,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0xfa,0x00,0x04,0x00,0xe2,0x00,0x00,0x00,0xdd,0x00,0x00,0x00, +0xde,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0xdd,0x00,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xe7,0x00,0x00,0x00, +0x73,0x00,0x00,0x00,0xfb,0x02,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xea,0x00,0x00,0x00,0xe7,0x00,0x00,0x00, +0x98,0x00,0x00,0x00,0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xeb,0x00,0x00,0x00,0xea,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xec,0x00,0x00,0x00, +0xff,0x02,0x00,0x00,0xeb,0x00,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xee,0x00,0x00,0x00,0xec,0x00,0x00,0x00, +0x6e,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xf4,0x00,0x00,0x00,0xe7,0x00,0x00,0x00,0xf3,0x00,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xf6,0x00,0x00,0x00, +0xf4,0x00,0x00,0x00,0x6e,0x00,0x00,0x00,0x86,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xfa,0x00,0x00,0x00,0xee,0x00,0x00,0x00, +0xf9,0x00,0x00,0x00,0xc7,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xfe,0x00,0x00,0x00,0xee,0x00,0x00,0x00,0xfd,0x00,0x00,0x00, +0x41,0x00,0x07,0x00,0x0b,0x01,0x00,0x00,0x0c,0x01,0x00,0x00, +0x09,0x01,0x00,0x00,0x34,0x00,0x00,0x00,0xfa,0x00,0x00,0x00, +0x34,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x00,0x01,0x00,0x00, +0x0d,0x01,0x00,0x00,0x0c,0x01,0x00,0x00,0x73,0x00,0x04,0x00, +0xc3,0x00,0x00,0x00,0x0e,0x01,0x00,0x00,0x0d,0x01,0x00,0x00, +0x41,0x00,0x08,0x00,0x11,0x01,0x00,0x00,0x12,0x01,0x00,0x00, +0x09,0x01,0x00,0x00,0x34,0x00,0x00,0x00,0xfa,0x00,0x00,0x00, +0xcf,0x00,0x00,0x00,0xcf,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0x01,0x01,0x00,0x00,0x13,0x01,0x00,0x00,0x12,0x01,0x00,0x00, +0x71,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x14,0x01,0x00,0x00, +0x13,0x01,0x00,0x00,0xc4,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x16,0x01,0x00,0x00,0x14,0x01,0x00,0x00,0x15,0x01,0x00,0x00, +0x41,0x00,0x08,0x00,0x11,0x01,0x00,0x00,0x18,0x01,0x00,0x00, +0x09,0x01,0x00,0x00,0x34,0x00,0x00,0x00,0xfa,0x00,0x00,0x00, +0xcf,0x00,0x00,0x00,0x34,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0x01,0x01,0x00,0x00,0x19,0x01,0x00,0x00,0x18,0x01,0x00,0x00, +0x71,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x1a,0x01,0x00,0x00, +0x19,0x01,0x00,0x00,0xc5,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x1b,0x01,0x00,0x00,0x16,0x01,0x00,0x00,0x1a,0x01,0x00,0x00, +0xc2,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x21,0x01,0x00,0x00, +0x1b,0x01,0x00,0x00,0xfe,0x00,0x00,0x00,0xc4,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x22,0x01,0x00,0x00,0x21,0x01,0x00,0x00, +0xa8,0x00,0x00,0x00,0xc7,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x23,0x01,0x00,0x00,0x22,0x01,0x00,0x00,0xf9,0x00,0x00,0x00, +0x7c,0x00,0x04,0x00,0x13,0x00,0x00,0x00,0x24,0x01,0x00,0x00, +0x23,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x28,0x01,0x00,0x00,0xfe,0x00,0x00,0x00,0x27,0x01,0x00,0x00, +0xc2,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x29,0x01,0x00,0x00, +0x1b,0x01,0x00,0x00,0x28,0x01,0x00,0x00,0xc7,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x2a,0x01,0x00,0x00,0x29,0x01,0x00,0x00, +0xf9,0x00,0x00,0x00,0x7c,0x00,0x04,0x00,0x13,0x00,0x00,0x00, +0x2b,0x01,0x00,0x00,0x2a,0x01,0x00,0x00,0x41,0x00,0x08,0x00, +0x30,0x01,0x00,0x00,0x31,0x01,0x00,0x00,0x09,0x01,0x00,0x00, +0x34,0x00,0x00,0x00,0xfa,0x00,0x00,0x00,0x85,0x00,0x00,0x00, +0xfe,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x03,0x01,0x00,0x00, +0x32,0x01,0x00,0x00,0x31,0x01,0x00,0x00,0x71,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x33,0x01,0x00,0x00,0x32,0x01,0x00,0x00, +0xc7,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x38,0x01,0x00,0x00, +0x33,0x01,0x00,0x00,0xfd,0x00,0x00,0x00,0x7c,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x3c,0x01,0x00,0x00,0x24,0x01,0x00,0x00, +0xc5,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x3d,0x01,0x00,0x00, +0x38,0x01,0x00,0x00,0x3c,0x01,0x00,0x00,0x70,0x00,0x04,0x00, +0xc3,0x00,0x00,0x00,0x3e,0x01,0x00,0x00,0x3d,0x01,0x00,0x00, +0xc2,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x40,0x01,0x00,0x00, +0x33,0x01,0x00,0x00,0xa8,0x00,0x00,0x00,0x7c,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x43,0x01,0x00,0x00,0x2b,0x01,0x00,0x00, +0xc5,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x44,0x01,0x00,0x00, +0x40,0x01,0x00,0x00,0x43,0x01,0x00,0x00,0x70,0x00,0x04,0x00, +0xc3,0x00,0x00,0x00,0x45,0x01,0x00,0x00,0x44,0x01,0x00,0x00, +0x50,0x00,0x05,0x00,0x34,0x01,0x00,0x00,0x46,0x01,0x00,0x00, +0x3e,0x01,0x00,0x00,0x45,0x01,0x00,0x00,0x83,0x00,0x05,0x00, +0x34,0x01,0x00,0x00,0x49,0x01,0x00,0x00,0x46,0x01,0x00,0x00, +0x46,0x03,0x00,0x00,0x8e,0x00,0x05,0x00,0x34,0x01,0x00,0x00, +0x4b,0x01,0x00,0x00,0x49,0x01,0x00,0x00,0x0e,0x01,0x00,0x00, +0x51,0x00,0x05,0x00,0xc3,0x00,0x00,0x00,0x53,0x01,0x00,0x00, +0x4b,0x01,0x00,0x00,0x00,0x00,0x00,0x00,0x73,0x00,0x04,0x00, +0x00,0x01,0x00,0x00,0x54,0x01,0x00,0x00,0x53,0x01,0x00,0x00, +0x41,0x00,0x05,0x00,0x55,0x01,0x00,0x00,0x56,0x01,0x00,0x00, +0x50,0x01,0x00,0x00,0xf6,0x00,0x00,0x00,0x3e,0x00,0x03,0x00, +0x56,0x01,0x00,0x00,0x54,0x01,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x58,0x01,0x00,0x00,0xf6,0x00,0x00,0x00, +0xf9,0x00,0x00,0x00,0x51,0x00,0x05,0x00,0xc3,0x00,0x00,0x00, +0x5a,0x01,0x00,0x00,0x4b,0x01,0x00,0x00,0x01,0x00,0x00,0x00, +0x73,0x00,0x04,0x00,0x00,0x01,0x00,0x00,0x5b,0x01,0x00,0x00, +0x5a,0x01,0x00,0x00,0x41,0x00,0x05,0x00,0x55,0x01,0x00,0x00, +0x5c,0x01,0x00,0x00,0x50,0x01,0x00,0x00,0x58,0x01,0x00,0x00, +0x3e,0x00,0x03,0x00,0x5c,0x01,0x00,0x00,0x5b,0x01,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x63,0x01,0x00,0x00, +0xfb,0x02,0x00,0x00,0x61,0x01,0x00,0x00,0xf9,0x00,0x02,0x00, +0xdc,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0xde,0x00,0x00,0x00, +0xf9,0x00,0x02,0x00,0x65,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, +0x65,0x01,0x00,0x00,0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00, +0xfc,0x02,0x00,0x00,0x3e,0x00,0x00,0x00,0xde,0x00,0x00,0x00, +0xac,0x01,0x00,0x00,0x68,0x01,0x00,0x00,0xb0,0x00,0x05,0x00, +0xc1,0x00,0x00,0x00,0x6b,0x01,0x00,0x00,0xfc,0x02,0x00,0x00, +0xa6,0x00,0x00,0x00,0xf6,0x00,0x04,0x00,0x67,0x01,0x00,0x00, +0x68,0x01,0x00,0x00,0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, +0x6b,0x01,0x00,0x00,0x66,0x01,0x00,0x00,0x67,0x01,0x00,0x00, +0xf8,0x00,0x02,0x00,0x66,0x01,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x6f,0x01,0x00,0x00,0xa7,0x00,0x00,0x00, +0x7d,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x71,0x01,0x00,0x00,0x6f,0x01,0x00,0x00,0xfc,0x02,0x00,0x00, +0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00,0x72,0x01,0x00,0x00, +0x12,0x00,0x00,0x00,0xcf,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x73,0x01,0x00,0x00,0x72,0x01,0x00,0x00, +0xb0,0x00,0x05,0x00,0xc1,0x00,0x00,0x00,0x74,0x01,0x00,0x00, +0x71,0x01,0x00,0x00,0x73,0x01,0x00,0x00,0xf7,0x00,0x03,0x00, +0x76,0x01,0x00,0x00,0x00,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, +0x74,0x01,0x00,0x00,0x75,0x01,0x00,0x00,0x76,0x01,0x00,0x00, +0xf8,0x00,0x02,0x00,0x75,0x01,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x79,0x01,0x00,0x00,0xeb,0x02,0x00,0x00, +0x78,0x00,0x00,0x00,0xb0,0x00,0x05,0x00,0xc1,0x00,0x00,0x00, +0x7b,0x01,0x00,0x00,0x79,0x01,0x00,0x00,0x8d,0x00,0x00,0x00, +0xf9,0x00,0x02,0x00,0x76,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, +0x76,0x01,0x00,0x00,0xf5,0x00,0x07,0x00,0xc1,0x00,0x00,0x00, +0x7c,0x01,0x00,0x00,0x74,0x01,0x00,0x00,0x66,0x01,0x00,0x00, +0x7b,0x01,0x00,0x00,0x75,0x01,0x00,0x00,0xf7,0x00,0x03,0x00, +0x7e,0x01,0x00,0x00,0x00,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, +0x7c,0x01,0x00,0x00,0x7d,0x01,0x00,0x00,0x9e,0x01,0x00,0x00, +0xf8,0x00,0x02,0x00,0x7d,0x01,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x86,0x01,0x00,0x00,0x7d,0x00,0x00,0x00, +0xfc,0x02,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x88,0x01,0x00,0x00,0x86,0x01,0x00,0x00,0x87,0x01,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x8a,0x01,0x00,0x00, +0x88,0x01,0x00,0x00,0x78,0x00,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x95,0x01,0x00,0x00,0x86,0x01,0x00,0x00, +0xaa,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x96,0x01,0x00,0x00,0x03,0x03,0x00,0x00,0x95,0x01,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x98,0x01,0x00,0x00, +0x96,0x01,0x00,0x00,0x78,0x00,0x00,0x00,0x41,0x00,0x06,0x00, +0x99,0x01,0x00,0x00,0x9a,0x01,0x00,0x00,0x8e,0x01,0x00,0x00, +0x34,0x00,0x00,0x00,0x98,0x01,0x00,0x00,0x3d,0x00,0x04,0x00, +0xc3,0x00,0x00,0x00,0x9b,0x01,0x00,0x00,0x9a,0x01,0x00,0x00, +0x73,0x00,0x04,0x00,0x00,0x01,0x00,0x00,0x9c,0x01,0x00,0x00, +0x9b,0x01,0x00,0x00,0x41,0x00,0x05,0x00,0x55,0x01,0x00,0x00, +0x9d,0x01,0x00,0x00,0x83,0x01,0x00,0x00,0x8a,0x01,0x00,0x00, +0x3e,0x00,0x03,0x00,0x9d,0x01,0x00,0x00,0x9c,0x01,0x00,0x00, +0xf9,0x00,0x02,0x00,0x7e,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, +0x9e,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xa1,0x01,0x00,0x00,0x7d,0x00,0x00,0x00,0xfc,0x02,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xa3,0x01,0x00,0x00, +0xa1,0x01,0x00,0x00,0xa2,0x01,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xa5,0x01,0x00,0x00,0xa3,0x01,0x00,0x00, +0x78,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x55,0x01,0x00,0x00, +0xa7,0x01,0x00,0x00,0x83,0x01,0x00,0x00,0xa5,0x01,0x00,0x00, +0x3e,0x00,0x03,0x00,0xa7,0x01,0x00,0x00,0xa6,0x01,0x00,0x00, +0xf9,0x00,0x02,0x00,0x7e,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, +0x7e,0x01,0x00,0x00,0xf9,0x00,0x02,0x00,0x68,0x01,0x00,0x00, +0xf8,0x00,0x02,0x00,0x68,0x01,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xac,0x01,0x00,0x00,0xfc,0x02,0x00,0x00, +0xaa,0x01,0x00,0x00,0xf9,0x00,0x02,0x00,0x65,0x01,0x00,0x00, +0xf8,0x00,0x02,0x00,0x67,0x01,0x00,0x00,0xe0,0x00,0x04,0x00, +0x0c,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0xad,0x01,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xb0,0x01,0x00,0x00, +0xff,0x02,0x00,0x00,0xae,0x01,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xb3,0x01,0x00,0x00,0x03,0x03,0x00,0x00, +0xb1,0x01,0x00,0x00,0xf9,0x00,0x02,0x00,0xb5,0x01,0x00,0x00, +0xf8,0x00,0x02,0x00,0xb5,0x01,0x00,0x00,0xf5,0x00,0x07,0x00, +0x06,0x00,0x00,0x00,0x05,0x03,0x00,0x00,0x3e,0x00,0x00,0x00, +0x67,0x01,0x00,0x00,0x5f,0x02,0x00,0x00,0xb8,0x01,0x00,0x00, +0xb0,0x00,0x05,0x00,0xc1,0x00,0x00,0x00,0xbb,0x01,0x00,0x00, +0x05,0x03,0x00,0x00,0x6c,0x00,0x00,0x00,0xf6,0x00,0x04,0x00, +0xb7,0x01,0x00,0x00,0xb8,0x01,0x00,0x00,0x00,0x00,0x00,0x00, +0xfa,0x00,0x04,0x00,0xbb,0x01,0x00,0x00,0xb6,0x01,0x00,0x00, +0xb7,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0xb6,0x01,0x00,0x00, +0xf9,0x00,0x02,0x00,0xbd,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, +0xbd,0x01,0x00,0x00,0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00, +0x09,0x03,0x00,0x00,0x3e,0x00,0x00,0x00,0xb6,0x01,0x00,0x00, +0xe9,0x01,0x00,0x00,0xc0,0x01,0x00,0x00,0xb0,0x00,0x05,0x00, +0xc1,0x00,0x00,0x00,0xc3,0x01,0x00,0x00,0x09,0x03,0x00,0x00, +0x60,0x00,0x00,0x00,0xf6,0x00,0x04,0x00,0xbf,0x01,0x00,0x00, +0xc0,0x01,0x00,0x00,0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, +0xc3,0x01,0x00,0x00,0xbe,0x01,0x00,0x00,0xbf,0x01,0x00,0x00, +0xf8,0x00,0x02,0x00,0xbe,0x01,0x00,0x00,0xf9,0x00,0x02,0x00, +0xc5,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0xc5,0x01,0x00,0x00, +0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0x1b,0x03,0x00,0x00, +0x3e,0x00,0x00,0x00,0xbe,0x01,0x00,0x00,0xe7,0x01,0x00,0x00, +0xc6,0x01,0x00,0x00,0xb0,0x00,0x05,0x00,0xc1,0x00,0x00,0x00, +0xcb,0x01,0x00,0x00,0x1b,0x03,0x00,0x00,0x62,0x00,0x00,0x00, +0xf6,0x00,0x04,0x00,0xc7,0x01,0x00,0x00,0xc6,0x01,0x00,0x00, +0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0xcb,0x01,0x00,0x00, +0xc6,0x01,0x00,0x00,0xc7,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, +0xc6,0x01,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xd1,0x01,0x00,0x00,0x09,0x03,0x00,0x00,0x62,0x00,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xd3,0x01,0x00,0x00, +0xd1,0x01,0x00,0x00,0x1b,0x03,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xd5,0x01,0x00,0x00,0x55,0x00,0x00,0x00, +0x53,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xd7,0x01,0x00,0x00,0x09,0x03,0x00,0x00,0x61,0x00,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xd8,0x01,0x00,0x00, +0xd5,0x01,0x00,0x00,0xd7,0x01,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xda,0x01,0x00,0x00,0x64,0x00,0x00,0x00, +0x62,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xdb,0x01,0x00,0x00,0xd8,0x01,0x00,0x00,0xda,0x01,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xdd,0x01,0x00,0x00, +0xdb,0x01,0x00,0x00,0x1b,0x03,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xdf,0x01,0x00,0x00,0xdd,0x01,0x00,0x00, +0xde,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xe1,0x01,0x00,0x00,0xdf,0x01,0x00,0x00,0x05,0x03,0x00,0x00, +0x41,0x00,0x05,0x00,0x55,0x01,0x00,0x00,0xe2,0x01,0x00,0x00, +0x50,0x01,0x00,0x00,0xe1,0x01,0x00,0x00,0x3d,0x00,0x04,0x00, +0x00,0x01,0x00,0x00,0xe3,0x01,0x00,0x00,0xe2,0x01,0x00,0x00, +0x41,0x00,0x05,0x00,0xe4,0x01,0x00,0x00,0xe5,0x01,0x00,0x00, +0xcf,0x01,0x00,0x00,0xd3,0x01,0x00,0x00,0x3e,0x00,0x03,0x00, +0xe5,0x01,0x00,0x00,0xe3,0x01,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xe7,0x01,0x00,0x00,0x1b,0x03,0x00,0x00, +0xcf,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0xc5,0x01,0x00,0x00, +0xf8,0x00,0x02,0x00,0xc7,0x01,0x00,0x00,0xf9,0x00,0x02,0x00, +0xc0,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0xc0,0x01,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xe9,0x01,0x00,0x00, +0x09,0x03,0x00,0x00,0xcf,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, +0xbd,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0xbf,0x01,0x00,0x00, +0xf9,0x00,0x02,0x00,0xeb,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, +0xeb,0x01,0x00,0x00,0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00, +0x0a,0x03,0x00,0x00,0x3e,0x00,0x00,0x00,0xbf,0x01,0x00,0x00, +0x17,0x02,0x00,0x00,0xee,0x01,0x00,0x00,0xb0,0x00,0x05,0x00, +0xc1,0x00,0x00,0x00,0xf1,0x01,0x00,0x00,0x0a,0x03,0x00,0x00, +0xbe,0x00,0x00,0x00,0xf6,0x00,0x04,0x00,0xed,0x01,0x00,0x00, +0xee,0x01,0x00,0x00,0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, +0xf1,0x01,0x00,0x00,0xec,0x01,0x00,0x00,0xed,0x01,0x00,0x00, +0xf8,0x00,0x02,0x00,0xec,0x01,0x00,0x00,0xf9,0x00,0x02,0x00, +0xf3,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0xf3,0x01,0x00,0x00, +0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0x18,0x03,0x00,0x00, +0x3e,0x00,0x00,0x00,0xec,0x01,0x00,0x00,0x15,0x02,0x00,0x00, +0xf4,0x01,0x00,0x00,0xb0,0x00,0x05,0x00,0xc1,0x00,0x00,0x00, +0xf9,0x01,0x00,0x00,0x18,0x03,0x00,0x00,0xbb,0x00,0x00,0x00, +0xf6,0x00,0x04,0x00,0xf5,0x01,0x00,0x00,0xf4,0x01,0x00,0x00, +0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0xf9,0x01,0x00,0x00, +0xf4,0x01,0x00,0x00,0xf5,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, +0xf4,0x01,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xff,0x01,0x00,0x00,0x0a,0x03,0x00,0x00,0xbb,0x00,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x01,0x02,0x00,0x00, +0xff,0x01,0x00,0x00,0x18,0x03,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x03,0x02,0x00,0x00,0x59,0x00,0x00,0x00, +0xb8,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x06,0x02,0x00,0x00,0x0a,0x03,0x00,0x00,0x05,0x02,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x07,0x02,0x00,0x00, +0x03,0x02,0x00,0x00,0x06,0x02,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x09,0x02,0x00,0x00,0x68,0x00,0x00,0x00, +0xbb,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x0a,0x02,0x00,0x00,0x07,0x02,0x00,0x00,0x09,0x02,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x0c,0x02,0x00,0x00, +0x0a,0x02,0x00,0x00,0x18,0x03,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x0e,0x02,0x00,0x00,0x0c,0x02,0x00,0x00, +0x0d,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x10,0x02,0x00,0x00,0x0e,0x02,0x00,0x00,0x05,0x03,0x00,0x00, +0x41,0x00,0x05,0x00,0x55,0x01,0x00,0x00,0x11,0x02,0x00,0x00, +0x83,0x01,0x00,0x00,0x10,0x02,0x00,0x00,0x3d,0x00,0x04,0x00, +0x00,0x01,0x00,0x00,0x12,0x02,0x00,0x00,0x11,0x02,0x00,0x00, +0x41,0x00,0x05,0x00,0xe4,0x01,0x00,0x00,0x13,0x02,0x00,0x00, +0xfd,0x01,0x00,0x00,0x01,0x02,0x00,0x00,0x3e,0x00,0x03,0x00, +0x13,0x02,0x00,0x00,0x12,0x02,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x15,0x02,0x00,0x00,0x18,0x03,0x00,0x00, +0xcf,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0xf3,0x01,0x00,0x00, +0xf8,0x00,0x02,0x00,0xf5,0x01,0x00,0x00,0xf9,0x00,0x02,0x00, +0xee,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0xee,0x01,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x17,0x02,0x00,0x00, +0x0a,0x03,0x00,0x00,0xcf,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, +0xeb,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0xed,0x01,0x00,0x00, +0xf9,0x00,0x02,0x00,0x19,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, +0x19,0x02,0x00,0x00,0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00, +0x0b,0x03,0x00,0x00,0x3e,0x00,0x00,0x00,0xed,0x01,0x00,0x00, +0x5d,0x02,0x00,0x00,0x1c,0x02,0x00,0x00,0xb0,0x00,0x05,0x00, +0xc1,0x00,0x00,0x00,0x1f,0x02,0x00,0x00,0x0b,0x03,0x00,0x00, +0xbe,0x00,0x00,0x00,0xf6,0x00,0x04,0x00,0x1b,0x02,0x00,0x00, +0x1c,0x02,0x00,0x00,0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, +0x1f,0x02,0x00,0x00,0x1a,0x02,0x00,0x00,0x1b,0x02,0x00,0x00, +0xf8,0x00,0x02,0x00,0x1a,0x02,0x00,0x00,0xf9,0x00,0x02,0x00, +0x21,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x21,0x02,0x00,0x00, +0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0x0f,0x03,0x00,0x00, +0x3e,0x00,0x00,0x00,0x1a,0x02,0x00,0x00,0x5b,0x02,0x00,0x00, +0x24,0x02,0x00,0x00,0xb0,0x00,0x05,0x00,0xc1,0x00,0x00,0x00, +0x27,0x02,0x00,0x00,0x0f,0x03,0x00,0x00,0x60,0x00,0x00,0x00, +0xf6,0x00,0x04,0x00,0x23,0x02,0x00,0x00,0x24,0x02,0x00,0x00, +0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0x27,0x02,0x00,0x00, +0x22,0x02,0x00,0x00,0x23,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, +0x22,0x02,0x00,0x00,0xf9,0x00,0x02,0x00,0x29,0x02,0x00,0x00, +0xf8,0x00,0x02,0x00,0x29,0x02,0x00,0x00,0xf5,0x00,0x07,0x00, +0x06,0x00,0x00,0x00,0x11,0x03,0x00,0x00,0x3e,0x00,0x00,0x00, +0x22,0x02,0x00,0x00,0x59,0x02,0x00,0x00,0x2c,0x02,0x00,0x00, +0xb0,0x00,0x05,0x00,0xc1,0x00,0x00,0x00,0x2f,0x02,0x00,0x00, +0x11,0x03,0x00,0x00,0xbb,0x00,0x00,0x00,0xf6,0x00,0x04,0x00, +0x2b,0x02,0x00,0x00,0x2c,0x02,0x00,0x00,0x01,0x00,0x00,0x00, +0xfa,0x00,0x04,0x00,0x2f,0x02,0x00,0x00,0x2a,0x02,0x00,0x00, +0x2b,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x2a,0x02,0x00,0x00, +0xf9,0x00,0x02,0x00,0x31,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, +0x31,0x02,0x00,0x00,0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00, +0x13,0x03,0x00,0x00,0x3e,0x00,0x00,0x00,0x2a,0x02,0x00,0x00, +0x57,0x02,0x00,0x00,0x32,0x02,0x00,0x00,0xb0,0x00,0x05,0x00, +0xc1,0x00,0x00,0x00,0x37,0x02,0x00,0x00,0x13,0x03,0x00,0x00, +0x62,0x00,0x00,0x00,0xf6,0x00,0x04,0x00,0x33,0x02,0x00,0x00, +0x32,0x02,0x00,0x00,0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, +0x37,0x02,0x00,0x00,0x32,0x02,0x00,0x00,0x33,0x02,0x00,0x00, +0xf8,0x00,0x02,0x00,0x32,0x02,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x39,0x02,0x00,0x00,0x0b,0x03,0x00,0x00, +0xbb,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x3b,0x02,0x00,0x00,0x39,0x02,0x00,0x00,0x11,0x03,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x3d,0x02,0x00,0x00, +0x3b,0x02,0x00,0x00,0x3c,0x02,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x3f,0x02,0x00,0x00,0x0f,0x03,0x00,0x00, +0x62,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x40,0x02,0x00,0x00,0x3d,0x02,0x00,0x00,0x3f,0x02,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x42,0x02,0x00,0x00, +0x40,0x02,0x00,0x00,0x13,0x03,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x46,0x02,0x00,0x00,0x3f,0x02,0x00,0x00, +0x13,0x03,0x00,0x00,0x41,0x00,0x05,0x00,0xe4,0x01,0x00,0x00, +0x47,0x02,0x00,0x00,0xcf,0x01,0x00,0x00,0x46,0x02,0x00,0x00, +0x3d,0x00,0x04,0x00,0x00,0x01,0x00,0x00,0x48,0x02,0x00,0x00, +0x47,0x02,0x00,0x00,0x73,0x00,0x04,0x00,0xc3,0x00,0x00,0x00, +0x49,0x02,0x00,0x00,0x48,0x02,0x00,0x00,0x41,0x00,0x05,0x00, +0xe4,0x01,0x00,0x00,0x4e,0x02,0x00,0x00,0xfd,0x01,0x00,0x00, +0x3b,0x02,0x00,0x00,0x3d,0x00,0x04,0x00,0x00,0x01,0x00,0x00, +0x4f,0x02,0x00,0x00,0x4e,0x02,0x00,0x00,0x73,0x00,0x04,0x00, +0xc3,0x00,0x00,0x00,0x50,0x02,0x00,0x00,0x4f,0x02,0x00,0x00, +0x41,0x00,0x05,0x00,0xcc,0x00,0x00,0x00,0x52,0x02,0x00,0x00, +0xc9,0x00,0x00,0x00,0x42,0x02,0x00,0x00,0x3d,0x00,0x04,0x00, +0xc3,0x00,0x00,0x00,0x53,0x02,0x00,0x00,0x52,0x02,0x00,0x00, +0x0c,0x00,0x08,0x00,0xc3,0x00,0x00,0x00,0x54,0x02,0x00,0x00, +0x01,0x00,0x00,0x00,0x32,0x00,0x00,0x00,0x49,0x02,0x00,0x00, +0x50,0x02,0x00,0x00,0x53,0x02,0x00,0x00,0x3e,0x00,0x03,0x00, +0x52,0x02,0x00,0x00,0x54,0x02,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x57,0x02,0x00,0x00,0x13,0x03,0x00,0x00, +0xcf,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0x31,0x02,0x00,0x00, +0xf8,0x00,0x02,0x00,0x33,0x02,0x00,0x00,0xf9,0x00,0x02,0x00, +0x2c,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x2c,0x02,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x59,0x02,0x00,0x00, +0x11,0x03,0x00,0x00,0xcf,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, +0x29,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x2b,0x02,0x00,0x00, +0xf9,0x00,0x02,0x00,0x24,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, +0x24,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x5b,0x02,0x00,0x00,0x0f,0x03,0x00,0x00,0xcf,0x00,0x00,0x00, +0xf9,0x00,0x02,0x00,0x21,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, +0x23,0x02,0x00,0x00,0xf9,0x00,0x02,0x00,0x1c,0x02,0x00,0x00, +0xf8,0x00,0x02,0x00,0x1c,0x02,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x5d,0x02,0x00,0x00,0x0b,0x03,0x00,0x00, +0xcf,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0x19,0x02,0x00,0x00, +0xf8,0x00,0x02,0x00,0x1b,0x02,0x00,0x00,0xf9,0x00,0x02,0x00, +0xb8,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0xb8,0x01,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x5f,0x02,0x00,0x00, +0x05,0x03,0x00,0x00,0xcf,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, +0xb5,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0xb7,0x01,0x00,0x00, +0xe0,0x00,0x04,0x00,0x0c,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, +0xad,0x01,0x00,0x00,0xf9,0x00,0x02,0x00,0xd6,0x00,0x00,0x00, +0xf8,0x00,0x02,0x00,0xd6,0x00,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x61,0x02,0x00,0x00,0xeb,0x02,0x00,0x00, +0x6c,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0xd3,0x00,0x00,0x00, +0xf8,0x00,0x02,0x00,0xd5,0x00,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x66,0x02,0x00,0x00,0x55,0x00,0x00,0x00, +0x53,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x67,0x02,0x00,0x00,0x95,0x00,0x00,0x00,0x66,0x02,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x6c,0x02,0x00,0x00, +0x59,0x00,0x00,0x00,0xb8,0x00,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x6d,0x02,0x00,0x00,0xa7,0x00,0x00,0x00, +0x6c,0x02,0x00,0x00,0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00, +0x72,0x02,0x00,0x00,0x12,0x00,0x00,0x00,0x71,0x02,0x00,0x00, +0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x73,0x02,0x00,0x00, +0x72,0x02,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x74,0x02,0x00,0x00,0x0f,0x00,0x00,0x00,0x73,0x02,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x78,0x02,0x00,0x00, +0x47,0x00,0x00,0x00,0x73,0x02,0x00,0x00,0x41,0x00,0x05,0x00, +0x0d,0x00,0x00,0x00,0x7a,0x02,0x00,0x00,0x79,0x02,0x00,0x00, +0x0c,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x7b,0x02,0x00,0x00,0x7a,0x02,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x7c,0x02,0x00,0x00,0x78,0x02,0x00,0x00, +0x7b,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x7d,0x02,0x00,0x00,0x74,0x02,0x00,0x00,0x7c,0x02,0x00,0x00, +0xf9,0x00,0x02,0x00,0x7f,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, +0x7f,0x02,0x00,0x00,0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00, +0xec,0x02,0x00,0x00,0x3e,0x00,0x00,0x00,0xd5,0x00,0x00,0x00, +0xe5,0x02,0x00,0x00,0x82,0x02,0x00,0x00,0xb0,0x00,0x05,0x00, +0xc1,0x00,0x00,0x00,0x85,0x02,0x00,0x00,0xec,0x02,0x00,0x00, +0xbe,0x00,0x00,0x00,0xf6,0x00,0x04,0x00,0x81,0x02,0x00,0x00, +0x82,0x02,0x00,0x00,0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, +0x85,0x02,0x00,0x00,0x80,0x02,0x00,0x00,0x81,0x02,0x00,0x00, +0xf8,0x00,0x02,0x00,0x80,0x02,0x00,0x00,0xf9,0x00,0x02,0x00, +0x87,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x87,0x02,0x00,0x00, +0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0xed,0x02,0x00,0x00, +0x3e,0x00,0x00,0x00,0x80,0x02,0x00,0x00,0xe3,0x02,0x00,0x00, +0x8a,0x02,0x00,0x00,0xb0,0x00,0x05,0x00,0xc1,0x00,0x00,0x00, +0x8d,0x02,0x00,0x00,0xed,0x02,0x00,0x00,0x60,0x00,0x00,0x00, +0xf6,0x00,0x04,0x00,0x89,0x02,0x00,0x00,0x8a,0x02,0x00,0x00, +0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0x8d,0x02,0x00,0x00, +0x88,0x02,0x00,0x00,0x89,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, +0x88,0x02,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x91,0x02,0x00,0x00,0xed,0x02,0x00,0x00,0x61,0x00,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x92,0x02,0x00,0x00, +0x67,0x02,0x00,0x00,0x91,0x02,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x94,0x02,0x00,0x00,0x64,0x00,0x00,0x00, +0x62,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x95,0x02,0x00,0x00,0x92,0x02,0x00,0x00,0x94,0x02,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x99,0x02,0x00,0x00, +0xec,0x02,0x00,0x00,0x05,0x02,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x9a,0x02,0x00,0x00,0x6d,0x02,0x00,0x00, +0x99,0x02,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x9c,0x02,0x00,0x00,0x68,0x00,0x00,0x00,0xbb,0x00,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x9d,0x02,0x00,0x00, +0x9a,0x02,0x00,0x00,0x9c,0x02,0x00,0x00,0xf9,0x00,0x02,0x00, +0x9f,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x9f,0x02,0x00,0x00, +0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0xef,0x02,0x00,0x00, +0x3e,0x00,0x00,0x00,0x88,0x02,0x00,0x00,0xe1,0x02,0x00,0x00, +0xa2,0x02,0x00,0x00,0xb0,0x00,0x05,0x00,0xc1,0x00,0x00,0x00, +0xa5,0x02,0x00,0x00,0xef,0x02,0x00,0x00,0xbb,0x00,0x00,0x00, +0xf6,0x00,0x04,0x00,0xa1,0x02,0x00,0x00,0xa2,0x02,0x00,0x00, +0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0xa5,0x02,0x00,0x00, +0xa0,0x02,0x00,0x00,0xa1,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, +0xa0,0x02,0x00,0x00,0xf9,0x00,0x02,0x00,0xa7,0x02,0x00,0x00, +0xf8,0x00,0x02,0x00,0xa7,0x02,0x00,0x00,0xf5,0x00,0x07,0x00, +0x06,0x00,0x00,0x00,0xf1,0x02,0x00,0x00,0x3e,0x00,0x00,0x00, +0xa0,0x02,0x00,0x00,0xdf,0x02,0x00,0x00,0xaa,0x02,0x00,0x00, +0xb0,0x00,0x05,0x00,0xc1,0x00,0x00,0x00,0xad,0x02,0x00,0x00, +0xf1,0x02,0x00,0x00,0x62,0x00,0x00,0x00,0xf6,0x00,0x04,0x00, +0xa9,0x02,0x00,0x00,0xaa,0x02,0x00,0x00,0x01,0x00,0x00,0x00, +0xfa,0x00,0x04,0x00,0xad,0x02,0x00,0x00,0xa8,0x02,0x00,0x00, +0xa9,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0xa8,0x02,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xb0,0x02,0x00,0x00, +0x95,0x02,0x00,0x00,0xf1,0x02,0x00,0x00,0xb0,0x00,0x05,0x00, +0xc1,0x00,0x00,0x00,0xb3,0x02,0x00,0x00,0xb0,0x02,0x00,0x00, +0x36,0x00,0x00,0x00,0xf7,0x00,0x03,0x00,0xb5,0x02,0x00,0x00, +0x00,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0xb3,0x02,0x00,0x00, +0xb4,0x02,0x00,0x00,0xb5,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, +0xb4,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xb8,0x02,0x00,0x00,0x9d,0x02,0x00,0x00,0xef,0x02,0x00,0x00, +0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00,0xb9,0x02,0x00,0x00, +0x12,0x00,0x00,0x00,0xcf,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0xba,0x02,0x00,0x00,0xb9,0x02,0x00,0x00, +0xb0,0x00,0x05,0x00,0xc1,0x00,0x00,0x00,0xbb,0x02,0x00,0x00, +0xb8,0x02,0x00,0x00,0xba,0x02,0x00,0x00,0xf9,0x00,0x02,0x00, +0xb5,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0xb5,0x02,0x00,0x00, +0xf5,0x00,0x07,0x00,0xc1,0x00,0x00,0x00,0xbc,0x02,0x00,0x00, +0xb3,0x02,0x00,0x00,0xa8,0x02,0x00,0x00,0xbb,0x02,0x00,0x00, +0xb4,0x02,0x00,0x00,0xf7,0x00,0x03,0x00,0xbe,0x02,0x00,0x00, +0x00,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0xbc,0x02,0x00,0x00, +0xbd,0x02,0x00,0x00,0xbe,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, +0xbd,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xc6,0x02,0x00,0x00,0x9d,0x02,0x00,0x00,0xef,0x02,0x00,0x00, +0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00,0xc8,0x02,0x00,0x00, +0x12,0x00,0x00,0x00,0xc7,0x02,0x00,0x00,0x3d,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0xc9,0x02,0x00,0x00,0xc8,0x02,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xca,0x02,0x00,0x00, +0xc6,0x02,0x00,0x00,0xc9,0x02,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xcb,0x02,0x00,0x00,0x7d,0x02,0x00,0x00, +0xca,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xcd,0x02,0x00,0x00,0xcb,0x02,0x00,0x00,0x95,0x02,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xcf,0x02,0x00,0x00, +0xcd,0x02,0x00,0x00,0xf1,0x02,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xd1,0x02,0x00,0x00,0xec,0x02,0x00,0x00, +0xbb,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xd3,0x02,0x00,0x00,0xd1,0x02,0x00,0x00,0xef,0x02,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xd5,0x02,0x00,0x00, +0xd3,0x02,0x00,0x00,0xd4,0x02,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xd7,0x02,0x00,0x00,0xed,0x02,0x00,0x00, +0x62,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xd8,0x02,0x00,0x00,0xd5,0x02,0x00,0x00,0xd7,0x02,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xda,0x02,0x00,0x00, +0xd8,0x02,0x00,0x00,0xf1,0x02,0x00,0x00,0x41,0x00,0x05,0x00, +0xcc,0x00,0x00,0x00,0xdb,0x02,0x00,0x00,0xc9,0x00,0x00,0x00, +0xda,0x02,0x00,0x00,0x3d,0x00,0x04,0x00,0xc3,0x00,0x00,0x00, +0xdc,0x02,0x00,0x00,0xdb,0x02,0x00,0x00,0x41,0x00,0x06,0x00, +0x99,0x01,0x00,0x00,0xdd,0x02,0x00,0x00,0xc2,0x02,0x00,0x00, +0x34,0x00,0x00,0x00,0xcf,0x02,0x00,0x00,0x3e,0x00,0x03,0x00, +0xdd,0x02,0x00,0x00,0xdc,0x02,0x00,0x00,0xf9,0x00,0x02,0x00, +0xbe,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0xbe,0x02,0x00,0x00, +0xf9,0x00,0x02,0x00,0xaa,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, +0xaa,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xdf,0x02,0x00,0x00,0xf1,0x02,0x00,0x00,0xcf,0x00,0x00,0x00, +0xf9,0x00,0x02,0x00,0xa7,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, +0xa9,0x02,0x00,0x00,0xf9,0x00,0x02,0x00,0xa2,0x02,0x00,0x00, +0xf8,0x00,0x02,0x00,0xa2,0x02,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xe1,0x02,0x00,0x00,0xef,0x02,0x00,0x00, +0xcf,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0x9f,0x02,0x00,0x00, +0xf8,0x00,0x02,0x00,0xa1,0x02,0x00,0x00,0xf9,0x00,0x02,0x00, +0x8a,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x8a,0x02,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xe3,0x02,0x00,0x00, +0xed,0x02,0x00,0x00,0xcf,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, +0x87,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x89,0x02,0x00,0x00, +0xf9,0x00,0x02,0x00,0x82,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, +0x82,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xe5,0x02,0x00,0x00,0xec,0x02,0x00,0x00,0xcf,0x00,0x00,0x00, +0xf9,0x00,0x02,0x00,0x7f,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, +0x81,0x02,0x00,0x00,0xfd,0x00,0x01,0x00,0x38,0x00,0x01,0x00, + +}; +const uint64_t matmul_q5_0_f32_len = 11076; + +unsigned char matmul_q5_0_f32_aligned_data[] = { +0x03,0x02,0x23,0x07,0x00,0x05,0x01,0x00,0x0b,0x00,0x0d,0x00, +0x62,0x03,0x00,0x00,0x00,0x00,0x00,0x00,0x11,0x00,0x02,0x00, +0x01,0x00,0x00,0x00,0x11,0x00,0x02,0x00,0x09,0x00,0x00,0x00, +0x11,0x00,0x02,0x00,0x51,0x11,0x00,0x00,0x11,0x00,0x02,0x00, +0x60,0x11,0x00,0x00,0x0b,0x00,0x06,0x00,0x01,0x00,0x00,0x00, +0x47,0x4c,0x53,0x4c,0x2e,0x73,0x74,0x64,0x2e,0x34,0x35,0x30, +0x00,0x00,0x00,0x00,0x0e,0x00,0x03,0x00,0x00,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0x0f,0x00,0x0f,0x00,0x05,0x00,0x00,0x00, +0x04,0x00,0x00,0x00,0x6d,0x61,0x69,0x6e,0x00,0x00,0x00,0x00, +0x0b,0x00,0x00,0x00,0x12,0x00,0x00,0x00,0x3d,0x00,0x00,0x00, +0x4c,0x00,0x00,0x00,0x0a,0x01,0x00,0x00,0x51,0x01,0x00,0x00, +0x86,0x01,0x00,0x00,0x8e,0x01,0x00,0x00,0x9c,0x02,0x00,0x00, +0xe5,0x02,0x00,0x00,0x10,0x00,0x06,0x00,0x04,0x00,0x00,0x00, +0x11,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x0b,0x00,0x00,0x00, +0x0b,0x00,0x00,0x00,0x1c,0x00,0x00,0x00,0x48,0x00,0x05,0x00, +0x10,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00, +0x00,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x04,0x00,0x00,0x00, +0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00,0x02,0x00,0x00,0x00, +0x23,0x00,0x00,0x00,0x08,0x00,0x00,0x00,0x48,0x00,0x05,0x00, +0x10,0x00,0x00,0x00,0x03,0x00,0x00,0x00,0x23,0x00,0x00,0x00, +0x0c,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00, +0x04,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x10,0x00,0x00,0x00, +0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00,0x05,0x00,0x00,0x00, +0x23,0x00,0x00,0x00,0x14,0x00,0x00,0x00,0x48,0x00,0x05,0x00, +0x10,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x23,0x00,0x00,0x00, +0x18,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00, +0x07,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x1c,0x00,0x00,0x00, +0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00,0x08,0x00,0x00,0x00, +0x23,0x00,0x00,0x00,0x20,0x00,0x00,0x00,0x48,0x00,0x05,0x00, +0x10,0x00,0x00,0x00,0x09,0x00,0x00,0x00,0x23,0x00,0x00,0x00, +0x24,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00, +0x0a,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x28,0x00,0x00,0x00, +0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00,0x0b,0x00,0x00,0x00, +0x23,0x00,0x00,0x00,0x2c,0x00,0x00,0x00,0x48,0x00,0x05,0x00, +0x10,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x23,0x00,0x00,0x00, +0x30,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00, +0x0d,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x34,0x00,0x00,0x00, +0x47,0x00,0x03,0x00,0x10,0x00,0x00,0x00,0x02,0x00,0x00,0x00, +0x47,0x00,0x04,0x00,0x37,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x3d,0x00,0x00,0x00, +0x0b,0x00,0x00,0x00,0x1a,0x00,0x00,0x00,0x47,0x00,0x04,0x00, +0x4c,0x00,0x00,0x00,0x0b,0x00,0x00,0x00,0x1b,0x00,0x00,0x00, +0x47,0x00,0x04,0x00,0x4f,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0x09,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x53,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x47,0x00,0x04,0x00, +0x60,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x06,0x00,0x00,0x00, +0x47,0x00,0x04,0x00,0x62,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0x07,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x6c,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0x03,0x00,0x00,0x00,0x47,0x00,0x04,0x00, +0xa7,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x02,0x00,0x00,0x00, +0x47,0x00,0x04,0x00,0xb9,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0x05,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0xbc,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0x08,0x00,0x00,0x00,0x47,0x00,0x04,0x00, +0x03,0x01,0x00,0x00,0x06,0x00,0x00,0x00,0x02,0x00,0x00,0x00, +0x47,0x00,0x04,0x00,0x05,0x01,0x00,0x00,0x06,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x06,0x01,0x00,0x00, +0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0x48,0x00,0x05,0x00,0x06,0x01,0x00,0x00,0x01,0x00,0x00,0x00, +0x23,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x48,0x00,0x05,0x00, +0x06,0x01,0x00,0x00,0x02,0x00,0x00,0x00,0x23,0x00,0x00,0x00, +0x06,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x07,0x01,0x00,0x00, +0x06,0x00,0x00,0x00,0x16,0x00,0x00,0x00,0x48,0x00,0x04,0x00, +0x08,0x01,0x00,0x00,0x00,0x00,0x00,0x00,0x18,0x00,0x00,0x00, +0x48,0x00,0x05,0x00,0x08,0x01,0x00,0x00,0x00,0x00,0x00,0x00, +0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x03,0x00, +0x08,0x01,0x00,0x00,0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00, +0x0a,0x01,0x00,0x00,0x22,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0x47,0x00,0x04,0x00,0x0a,0x01,0x00,0x00,0x21,0x00,0x00,0x00, +0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x5e,0x01,0x00,0x00, +0x01,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00, +0x5f,0x01,0x00,0x00,0x0b,0x00,0x00,0x00,0x19,0x00,0x00,0x00, +0x47,0x00,0x04,0x00,0x8b,0x01,0x00,0x00,0x06,0x00,0x00,0x00, +0x20,0x00,0x00,0x00,0x48,0x00,0x04,0x00,0x8c,0x01,0x00,0x00, +0x00,0x00,0x00,0x00,0x05,0x00,0x00,0x00,0x48,0x00,0x04,0x00, +0x8c,0x01,0x00,0x00,0x00,0x00,0x00,0x00,0x18,0x00,0x00,0x00, +0x48,0x00,0x05,0x00,0x8c,0x01,0x00,0x00,0x00,0x00,0x00,0x00, +0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x48,0x00,0x05,0x00, +0x8c,0x01,0x00,0x00,0x00,0x00,0x00,0x00,0x07,0x00,0x00,0x00, +0x10,0x00,0x00,0x00,0x47,0x00,0x03,0x00,0x8c,0x01,0x00,0x00, +0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x8e,0x01,0x00,0x00, +0x22,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00, +0x8e,0x01,0x00,0x00,0x21,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0x47,0x00,0x04,0x00,0x9c,0x02,0x00,0x00,0x0b,0x00,0x00,0x00, +0x18,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0xe2,0x02,0x00,0x00, +0x06,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x48,0x00,0x04,0x00, +0xe3,0x02,0x00,0x00,0x00,0x00,0x00,0x00,0x19,0x00,0x00,0x00, +0x48,0x00,0x05,0x00,0xe3,0x02,0x00,0x00,0x00,0x00,0x00,0x00, +0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x03,0x00, +0xe3,0x02,0x00,0x00,0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00, +0xe5,0x02,0x00,0x00,0x22,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0x47,0x00,0x04,0x00,0xe5,0x02,0x00,0x00,0x21,0x00,0x00,0x00, +0x02,0x00,0x00,0x00,0x13,0x00,0x02,0x00,0x02,0x00,0x00,0x00, +0x21,0x00,0x03,0x00,0x03,0x00,0x00,0x00,0x02,0x00,0x00,0x00, +0x15,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x20,0x00,0x00,0x00, +0x00,0x00,0x00,0x00,0x17,0x00,0x04,0x00,0x09,0x00,0x00,0x00, +0x06,0x00,0x00,0x00,0x03,0x00,0x00,0x00,0x20,0x00,0x04,0x00, +0x0a,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x09,0x00,0x00,0x00, +0x3b,0x00,0x04,0x00,0x0a,0x00,0x00,0x00,0x0b,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x0c,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x20,0x00,0x04,0x00, +0x0d,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x06,0x00,0x00,0x00, +0x1e,0x00,0x10,0x00,0x10,0x00,0x00,0x00,0x06,0x00,0x00,0x00, +0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00, +0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00, +0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00, +0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00, +0x06,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x11,0x00,0x00,0x00, +0x09,0x00,0x00,0x00,0x10,0x00,0x00,0x00,0x3b,0x00,0x04,0x00, +0x11,0x00,0x00,0x00,0x12,0x00,0x00,0x00,0x09,0x00,0x00,0x00, +0x15,0x00,0x04,0x00,0x13,0x00,0x00,0x00,0x20,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00, +0x14,0x00,0x00,0x00,0x08,0x00,0x00,0x00,0x20,0x00,0x04,0x00, +0x15,0x00,0x00,0x00,0x09,0x00,0x00,0x00,0x06,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00,0x21,0x00,0x00,0x00, +0x0a,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00, +0x27,0x00,0x00,0x00,0x09,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x13,0x00,0x00,0x00,0x2d,0x00,0x00,0x00,0x07,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00,0x34,0x00,0x00,0x00, +0x00,0x00,0x00,0x00,0x32,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x37,0x00,0x00,0x00,0x40,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0x3b,0x00,0x04,0x00,0x0a,0x00,0x00,0x00,0x3d,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x3e,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x3b,0x00,0x04,0x00, +0x0a,0x00,0x00,0x00,0x4c,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0x32,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x4f,0x00,0x00,0x00, +0x20,0x00,0x00,0x00,0x32,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x53,0x00,0x00,0x00,0x20,0x00,0x00,0x00,0x34,0x00,0x06,0x00, +0x06,0x00,0x00,0x00,0x54,0x00,0x00,0x00,0x86,0x00,0x00,0x00, +0x37,0x00,0x00,0x00,0x53,0x00,0x00,0x00,0x34,0x00,0x06,0x00, +0x06,0x00,0x00,0x00,0x58,0x00,0x00,0x00,0x86,0x00,0x00,0x00, +0x37,0x00,0x00,0x00,0x53,0x00,0x00,0x00,0x32,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x60,0x00,0x00,0x00,0x02,0x00,0x00,0x00, +0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x61,0x00,0x00,0x00, +0x86,0x00,0x00,0x00,0x53,0x00,0x00,0x00,0x60,0x00,0x00,0x00, +0x32,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x62,0x00,0x00,0x00, +0x04,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, +0x63,0x00,0x00,0x00,0x86,0x00,0x00,0x00,0x61,0x00,0x00,0x00, +0x62,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, +0x67,0x00,0x00,0x00,0x86,0x00,0x00,0x00,0x61,0x00,0x00,0x00, +0x62,0x00,0x00,0x00,0x32,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x6c,0x00,0x00,0x00,0x10,0x00,0x00,0x00,0x34,0x00,0x06,0x00, +0x06,0x00,0x00,0x00,0x6d,0x00,0x00,0x00,0x86,0x00,0x00,0x00, +0x6c,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x34,0x00,0x06,0x00, +0x06,0x00,0x00,0x00,0x72,0x00,0x00,0x00,0x86,0x00,0x00,0x00, +0x6c,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x77,0x00,0x00,0x00,0x08,0x00,0x00,0x00, +0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x78,0x00,0x00,0x00, +0x86,0x00,0x00,0x00,0x6c,0x00,0x00,0x00,0x77,0x00,0x00,0x00, +0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x7d,0x00,0x00,0x00, +0x86,0x00,0x00,0x00,0x6c,0x00,0x00,0x00,0x77,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00,0x81,0x00,0x00,0x00, +0x06,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00, +0x86,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x13,0x00,0x00,0x00,0x91,0x00,0x00,0x00,0x0b,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00,0x97,0x00,0x00,0x00, +0x03,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00, +0xa2,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x32,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0xa7,0x00,0x00,0x00,0x40,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00,0xa9,0x00,0x00,0x00, +0x04,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, +0xb8,0x00,0x00,0x00,0x84,0x00,0x00,0x00,0x60,0x00,0x00,0x00, +0x62,0x00,0x00,0x00,0x32,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0xb9,0x00,0x00,0x00,0x20,0x00,0x00,0x00,0x34,0x00,0x06,0x00, +0x06,0x00,0x00,0x00,0xba,0x00,0x00,0x00,0x84,0x00,0x00,0x00, +0x53,0x00,0x00,0x00,0xb9,0x00,0x00,0x00,0x34,0x00,0x06,0x00, +0x06,0x00,0x00,0x00,0xbb,0x00,0x00,0x00,0x84,0x00,0x00,0x00, +0x4f,0x00,0x00,0x00,0x62,0x00,0x00,0x00,0x32,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0xbc,0x00,0x00,0x00,0x02,0x00,0x00,0x00, +0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0xbd,0x00,0x00,0x00, +0x84,0x00,0x00,0x00,0xbb,0x00,0x00,0x00,0xbc,0x00,0x00,0x00, +0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0xbe,0x00,0x00,0x00, +0x84,0x00,0x00,0x00,0xbd,0x00,0x00,0x00,0x60,0x00,0x00,0x00, +0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0xbf,0x00,0x00,0x00, +0x86,0x00,0x00,0x00,0xba,0x00,0x00,0x00,0xbe,0x00,0x00,0x00, +0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0xc0,0x00,0x00,0x00, +0x84,0x00,0x00,0x00,0xb8,0x00,0x00,0x00,0xbf,0x00,0x00,0x00, +0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0xc1,0x00,0x00,0x00, +0x84,0x00,0x00,0x00,0xc0,0x00,0x00,0x00,0xbc,0x00,0x00,0x00, +0x14,0x00,0x02,0x00,0xc2,0x00,0x00,0x00,0x16,0x00,0x03,0x00, +0xc4,0x00,0x00,0x00,0x20,0x00,0x00,0x00,0x34,0x00,0x06,0x00, +0x06,0x00,0x00,0x00,0xc5,0x00,0x00,0x00,0x84,0x00,0x00,0x00, +0x60,0x00,0x00,0x00,0x62,0x00,0x00,0x00,0x34,0x00,0x06,0x00, +0x06,0x00,0x00,0x00,0xc6,0x00,0x00,0x00,0x84,0x00,0x00,0x00, +0xc5,0x00,0x00,0x00,0xbf,0x00,0x00,0x00,0x34,0x00,0x06,0x00, +0x06,0x00,0x00,0x00,0xc7,0x00,0x00,0x00,0x84,0x00,0x00,0x00, +0xc6,0x00,0x00,0x00,0xbc,0x00,0x00,0x00,0x1c,0x00,0x04,0x00, +0xc8,0x00,0x00,0x00,0xc4,0x00,0x00,0x00,0xc7,0x00,0x00,0x00, +0x20,0x00,0x04,0x00,0xc9,0x00,0x00,0x00,0x07,0x00,0x00,0x00, +0xc8,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0xc4,0x00,0x00,0x00, +0xcc,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x20,0x00,0x04,0x00, +0xcd,0x00,0x00,0x00,0x07,0x00,0x00,0x00,0xc4,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00,0xd0,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, +0xf4,0x00,0x00,0x00,0x80,0x00,0x00,0x00,0x6c,0x00,0x00,0x00, +0x39,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0xfa,0x00,0x00,0x00,0x10,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0xfe,0x00,0x00,0x00,0x0f,0x00,0x00,0x00, +0x16,0x00,0x03,0x00,0x01,0x01,0x00,0x00,0x10,0x00,0x00,0x00, +0x15,0x00,0x04,0x00,0x02,0x01,0x00,0x00,0x10,0x00,0x00,0x00, +0x00,0x00,0x00,0x00,0x1c,0x00,0x04,0x00,0x03,0x01,0x00,0x00, +0x02,0x01,0x00,0x00,0x0c,0x00,0x00,0x00,0x15,0x00,0x04,0x00, +0x04,0x01,0x00,0x00,0x08,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0x1c,0x00,0x04,0x00,0x05,0x01,0x00,0x00,0x04,0x01,0x00,0x00, +0xfa,0x00,0x00,0x00,0x1e,0x00,0x05,0x00,0x06,0x01,0x00,0x00, +0x01,0x01,0x00,0x00,0x03,0x01,0x00,0x00,0x05,0x01,0x00,0x00, +0x1d,0x00,0x03,0x00,0x07,0x01,0x00,0x00,0x06,0x01,0x00,0x00, +0x1e,0x00,0x03,0x00,0x08,0x01,0x00,0x00,0x07,0x01,0x00,0x00, +0x20,0x00,0x04,0x00,0x09,0x01,0x00,0x00,0x0c,0x00,0x00,0x00, +0x08,0x01,0x00,0x00,0x3b,0x00,0x04,0x00,0x09,0x01,0x00,0x00, +0x0a,0x01,0x00,0x00,0x0c,0x00,0x00,0x00,0x20,0x00,0x04,0x00, +0x0c,0x01,0x00,0x00,0x0c,0x00,0x00,0x00,0x01,0x01,0x00,0x00, +0x20,0x00,0x04,0x00,0x12,0x01,0x00,0x00,0x0c,0x00,0x00,0x00, +0x02,0x01,0x00,0x00,0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00, +0x16,0x01,0x00,0x00,0x10,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x28,0x01,0x00,0x00,0x0c,0x00,0x00,0x00, +0x20,0x00,0x04,0x00,0x31,0x01,0x00,0x00,0x0c,0x00,0x00,0x00, +0x04,0x01,0x00,0x00,0x17,0x00,0x04,0x00,0x35,0x01,0x00,0x00, +0xc4,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0xc4,0x00,0x00,0x00,0x48,0x01,0x00,0x00,0x00,0x00,0x80,0x41, +0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x4d,0x01,0x00,0x00, +0x80,0x00,0x00,0x00,0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00, +0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x4e,0x01,0x00,0x00, +0x84,0x00,0x00,0x00,0x37,0x00,0x00,0x00,0x4d,0x01,0x00,0x00, +0x1c,0x00,0x04,0x00,0x4f,0x01,0x00,0x00,0x01,0x01,0x00,0x00, +0x4e,0x01,0x00,0x00,0x20,0x00,0x04,0x00,0x50,0x01,0x00,0x00, +0x04,0x00,0x00,0x00,0x4f,0x01,0x00,0x00,0x3b,0x00,0x04,0x00, +0x50,0x01,0x00,0x00,0x51,0x01,0x00,0x00,0x04,0x00,0x00,0x00, +0x20,0x00,0x04,0x00,0x56,0x01,0x00,0x00,0x04,0x00,0x00,0x00, +0x01,0x01,0x00,0x00,0x32,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x5e,0x01,0x00,0x00,0x01,0x00,0x00,0x00,0x33,0x00,0x06,0x00, +0x09,0x00,0x00,0x00,0x5f,0x01,0x00,0x00,0x5e,0x01,0x00,0x00, +0x39,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x34,0x00,0x06,0x00, +0x06,0x00,0x00,0x00,0x60,0x01,0x00,0x00,0x51,0x00,0x00,0x00, +0x5f,0x01,0x00,0x00,0x00,0x00,0x00,0x00,0x34,0x00,0x06,0x00, +0x06,0x00,0x00,0x00,0x61,0x01,0x00,0x00,0x84,0x00,0x00,0x00, +0x60,0x01,0x00,0x00,0x0c,0x00,0x00,0x00,0x34,0x00,0x06,0x00, +0x06,0x00,0x00,0x00,0x62,0x01,0x00,0x00,0x86,0x00,0x00,0x00, +0x61,0x01,0x00,0x00,0x6c,0x00,0x00,0x00,0x34,0x00,0x06,0x00, +0x06,0x00,0x00,0x00,0x7d,0x01,0x00,0x00,0x80,0x00,0x00,0x00, +0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x34,0x00,0x06,0x00, +0x06,0x00,0x00,0x00,0x82,0x01,0x00,0x00,0x80,0x00,0x00,0x00, +0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x34,0x00,0x06,0x00, +0x06,0x00,0x00,0x00,0x83,0x01,0x00,0x00,0x84,0x00,0x00,0x00, +0xa7,0x00,0x00,0x00,0x82,0x01,0x00,0x00,0x1c,0x00,0x04,0x00, +0x84,0x01,0x00,0x00,0x01,0x01,0x00,0x00,0x83,0x01,0x00,0x00, +0x20,0x00,0x04,0x00,0x85,0x01,0x00,0x00,0x04,0x00,0x00,0x00, +0x84,0x01,0x00,0x00,0x3b,0x00,0x04,0x00,0x85,0x01,0x00,0x00, +0x86,0x01,0x00,0x00,0x04,0x00,0x00,0x00,0x17,0x00,0x04,0x00, +0x89,0x01,0x00,0x00,0xc4,0x00,0x00,0x00,0x04,0x00,0x00,0x00, +0x18,0x00,0x04,0x00,0x8a,0x01,0x00,0x00,0x89,0x01,0x00,0x00, +0x02,0x00,0x00,0x00,0x1d,0x00,0x03,0x00,0x8b,0x01,0x00,0x00, +0x8a,0x01,0x00,0x00,0x1e,0x00,0x03,0x00,0x8c,0x01,0x00,0x00, +0x8b,0x01,0x00,0x00,0x20,0x00,0x04,0x00,0x8d,0x01,0x00,0x00, +0x0c,0x00,0x00,0x00,0x8c,0x01,0x00,0x00,0x3b,0x00,0x04,0x00, +0x8d,0x01,0x00,0x00,0x8e,0x01,0x00,0x00,0x0c,0x00,0x00,0x00, +0x20,0x00,0x04,0x00,0x90,0x01,0x00,0x00,0x0c,0x00,0x00,0x00, +0xc4,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0xa4,0x01,0x00,0x00,0x03,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0xac,0x01,0x00,0x00,0x04,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0xb4,0x01,0x00,0x00, +0x05,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0xbc,0x01,0x00,0x00,0x06,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0xc4,0x01,0x00,0x00,0x07,0x00,0x00,0x00, +0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0xcb,0x01,0x00,0x00, +0x51,0x00,0x00,0x00,0x5f,0x01,0x00,0x00,0x00,0x00,0x00,0x00, +0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0xcc,0x01,0x00,0x00, +0x84,0x00,0x00,0x00,0xcb,0x01,0x00,0x00,0x77,0x00,0x00,0x00, +0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0xcd,0x01,0x00,0x00, +0x86,0x00,0x00,0x00,0xcc,0x01,0x00,0x00,0x6c,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0xd0,0x01,0x00,0x00, +0x08,0x01,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, +0xd1,0x01,0x00,0x00,0x86,0x00,0x00,0x00,0x6c,0x00,0x00,0x00, +0x0c,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, +0xd4,0x01,0x00,0x00,0x86,0x00,0x00,0x00,0x6c,0x00,0x00,0x00, +0x77,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, +0xef,0x01,0x00,0x00,0x84,0x00,0x00,0x00,0x60,0x00,0x00,0x00, +0x62,0x00,0x00,0x00,0x1c,0x00,0x04,0x00,0xf0,0x01,0x00,0x00, +0x01,0x01,0x00,0x00,0xef,0x01,0x00,0x00,0x20,0x00,0x04,0x00, +0xf1,0x01,0x00,0x00,0x07,0x00,0x00,0x00,0xf0,0x01,0x00,0x00, +0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x01,0x02,0x00,0x00, +0x80,0x00,0x00,0x00,0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00, +0x20,0x00,0x04,0x00,0x07,0x02,0x00,0x00,0x07,0x00,0x00,0x00, +0x01,0x01,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, +0x1d,0x02,0x00,0x00,0x84,0x00,0x00,0x00,0xbf,0x00,0x00,0x00, +0xbc,0x00,0x00,0x00,0x1c,0x00,0x04,0x00,0x1e,0x02,0x00,0x00, +0x01,0x01,0x00,0x00,0x1d,0x02,0x00,0x00,0x20,0x00,0x04,0x00, +0x1f,0x02,0x00,0x00,0x07,0x00,0x00,0x00,0x1e,0x02,0x00,0x00, +0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x28,0x02,0x00,0x00, +0x86,0x00,0x00,0x00,0xb9,0x00,0x00,0x00,0xbf,0x00,0x00,0x00, +0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x30,0x02,0x00,0x00, +0x80,0x00,0x00,0x00,0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00, +0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x5f,0x02,0x00,0x00, +0x84,0x00,0x00,0x00,0x60,0x00,0x00,0x00,0x62,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00,0x94,0x02,0x00,0x00, +0x0d,0x00,0x00,0x00,0x3b,0x00,0x04,0x00,0x0a,0x00,0x00,0x00, +0x9c,0x02,0x00,0x00,0x01,0x00,0x00,0x00,0x1d,0x00,0x03,0x00, +0xe2,0x02,0x00,0x00,0xc4,0x00,0x00,0x00,0x1e,0x00,0x03,0x00, +0xe3,0x02,0x00,0x00,0xe2,0x02,0x00,0x00,0x20,0x00,0x04,0x00, +0xe4,0x02,0x00,0x00,0x0c,0x00,0x00,0x00,0xe3,0x02,0x00,0x00, +0x3b,0x00,0x04,0x00,0xe4,0x02,0x00,0x00,0xe5,0x02,0x00,0x00, +0x0c,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00, +0xea,0x02,0x00,0x00,0x05,0x00,0x00,0x00,0x34,0x00,0x06,0x00, +0x06,0x00,0x00,0x00,0xf7,0x02,0x00,0x00,0x84,0x00,0x00,0x00, +0x60,0x00,0x00,0x00,0x62,0x00,0x00,0x00,0x2c,0x00,0x05,0x00, +0x35,0x01,0x00,0x00,0x61,0x03,0x00,0x00,0x48,0x01,0x00,0x00, +0x48,0x01,0x00,0x00,0x36,0x00,0x05,0x00,0x02,0x00,0x00,0x00, +0x04,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x03,0x00,0x00,0x00, +0xf8,0x00,0x02,0x00,0x05,0x00,0x00,0x00,0x3b,0x00,0x04,0x00, +0xc9,0x00,0x00,0x00,0xca,0x00,0x00,0x00,0x07,0x00,0x00,0x00, +0x3b,0x00,0x04,0x00,0xf1,0x01,0x00,0x00,0xf2,0x01,0x00,0x00, +0x07,0x00,0x00,0x00,0x3b,0x00,0x04,0x00,0x1f,0x02,0x00,0x00, +0x20,0x02,0x00,0x00,0x07,0x00,0x00,0x00,0x41,0x00,0x05,0x00, +0x0d,0x00,0x00,0x00,0x0e,0x00,0x00,0x00,0x0b,0x00,0x00,0x00, +0x0c,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x0f,0x00,0x00,0x00,0x0e,0x00,0x00,0x00,0x41,0x00,0x05,0x00, +0x15,0x00,0x00,0x00,0x16,0x00,0x00,0x00,0x12,0x00,0x00,0x00, +0x14,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x17,0x00,0x00,0x00,0x16,0x00,0x00,0x00,0x86,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x18,0x00,0x00,0x00,0x0f,0x00,0x00,0x00, +0x17,0x00,0x00,0x00,0x89,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x1e,0x00,0x00,0x00,0x0f,0x00,0x00,0x00,0x17,0x00,0x00,0x00, +0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00,0x22,0x00,0x00,0x00, +0x12,0x00,0x00,0x00,0x21,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x22,0x00,0x00,0x00, +0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x24,0x00,0x00,0x00, +0x18,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x41,0x00,0x05,0x00, +0x15,0x00,0x00,0x00,0x28,0x00,0x00,0x00,0x12,0x00,0x00,0x00, +0x27,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x29,0x00,0x00,0x00,0x28,0x00,0x00,0x00,0x86,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x2a,0x00,0x00,0x00,0x1e,0x00,0x00,0x00, +0x29,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00, +0x2e,0x00,0x00,0x00,0x12,0x00,0x00,0x00,0x2d,0x00,0x00,0x00, +0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x2f,0x00,0x00,0x00, +0x2e,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x30,0x00,0x00,0x00,0x24,0x00,0x00,0x00,0x2f,0x00,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x32,0x00,0x00,0x00, +0x30,0x00,0x00,0x00,0x2a,0x00,0x00,0x00,0x41,0x00,0x05,0x00, +0x15,0x00,0x00,0x00,0x35,0x00,0x00,0x00,0x12,0x00,0x00,0x00, +0x34,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x36,0x00,0x00,0x00,0x35,0x00,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x38,0x00,0x00,0x00,0x36,0x00,0x00,0x00, +0x37,0x00,0x00,0x00,0x82,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x3a,0x00,0x00,0x00,0x38,0x00,0x00,0x00,0x39,0x00,0x00,0x00, +0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x3b,0x00,0x00,0x00, +0x3a,0x00,0x00,0x00,0x37,0x00,0x00,0x00,0x41,0x00,0x05,0x00, +0x0d,0x00,0x00,0x00,0x3f,0x00,0x00,0x00,0x3d,0x00,0x00,0x00, +0x3e,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x40,0x00,0x00,0x00,0x3f,0x00,0x00,0x00,0x89,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x42,0x00,0x00,0x00,0x40,0x00,0x00,0x00, +0x3b,0x00,0x00,0x00,0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x47,0x00,0x00,0x00,0x40,0x00,0x00,0x00,0x3b,0x00,0x00,0x00, +0x41,0x00,0x05,0x00,0x0d,0x00,0x00,0x00,0x49,0x00,0x00,0x00, +0x3d,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x4a,0x00,0x00,0x00,0x49,0x00,0x00,0x00, +0x41,0x00,0x05,0x00,0x0d,0x00,0x00,0x00,0x4d,0x00,0x00,0x00, +0x4c,0x00,0x00,0x00,0x3e,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x4e,0x00,0x00,0x00,0x4d,0x00,0x00,0x00, +0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x50,0x00,0x00,0x00, +0x4e,0x00,0x00,0x00,0x4f,0x00,0x00,0x00,0x89,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x55,0x00,0x00,0x00,0x50,0x00,0x00,0x00, +0x54,0x00,0x00,0x00,0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x59,0x00,0x00,0x00,0x50,0x00,0x00,0x00,0x58,0x00,0x00,0x00, +0x89,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x5d,0x00,0x00,0x00, +0x4e,0x00,0x00,0x00,0x4f,0x00,0x00,0x00,0x89,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x64,0x00,0x00,0x00,0x5d,0x00,0x00,0x00, +0x63,0x00,0x00,0x00,0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x68,0x00,0x00,0x00,0x5d,0x00,0x00,0x00,0x67,0x00,0x00,0x00, +0x89,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x6e,0x00,0x00,0x00, +0x4e,0x00,0x00,0x00,0x6d,0x00,0x00,0x00,0x86,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x73,0x00,0x00,0x00,0x4e,0x00,0x00,0x00, +0x72,0x00,0x00,0x00,0x89,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x79,0x00,0x00,0x00,0x4e,0x00,0x00,0x00,0x78,0x00,0x00,0x00, +0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x7e,0x00,0x00,0x00, +0x4e,0x00,0x00,0x00,0x7d,0x00,0x00,0x00,0x41,0x00,0x05,0x00, +0x15,0x00,0x00,0x00,0x82,0x00,0x00,0x00,0x12,0x00,0x00,0x00, +0x81,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x83,0x00,0x00,0x00,0x82,0x00,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x84,0x00,0x00,0x00,0x47,0x00,0x00,0x00, +0x83,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00, +0x87,0x00,0x00,0x00,0x12,0x00,0x00,0x00,0x86,0x00,0x00,0x00, +0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x88,0x00,0x00,0x00, +0x87,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x8a,0x00,0x00,0x00,0x47,0x00,0x00,0x00,0x39,0x00,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x8d,0x00,0x00,0x00, +0x8a,0x00,0x00,0x00,0x83,0x00,0x00,0x00,0x0c,0x00,0x07,0x00, +0x06,0x00,0x00,0x00,0x8e,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0x26,0x00,0x00,0x00,0x88,0x00,0x00,0x00,0x8d,0x00,0x00,0x00, +0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00,0x92,0x00,0x00,0x00, +0x12,0x00,0x00,0x00,0x91,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x93,0x00,0x00,0x00,0x92,0x00,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x94,0x00,0x00,0x00, +0x32,0x00,0x00,0x00,0x93,0x00,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x96,0x00,0x00,0x00,0x42,0x00,0x00,0x00, +0x37,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00, +0x98,0x00,0x00,0x00,0x12,0x00,0x00,0x00,0x97,0x00,0x00,0x00, +0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x99,0x00,0x00,0x00, +0x98,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x9a,0x00,0x00,0x00,0x96,0x00,0x00,0x00,0x99,0x00,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x9b,0x00,0x00,0x00, +0x94,0x00,0x00,0x00,0x9a,0x00,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x9d,0x00,0x00,0x00,0x9b,0x00,0x00,0x00, +0x84,0x00,0x00,0x00,0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x9e,0x00,0x00,0x00,0x9d,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, +0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00,0xa3,0x00,0x00,0x00, +0x12,0x00,0x00,0x00,0xa2,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0xa4,0x00,0x00,0x00,0xa3,0x00,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xa5,0x00,0x00,0x00, +0x0f,0x00,0x00,0x00,0xa4,0x00,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xa8,0x00,0x00,0x00,0x4a,0x00,0x00,0x00, +0xa7,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00, +0xaa,0x00,0x00,0x00,0x12,0x00,0x00,0x00,0xa9,0x00,0x00,0x00, +0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0xab,0x00,0x00,0x00, +0xaa,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xac,0x00,0x00,0x00,0xa8,0x00,0x00,0x00,0xab,0x00,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xad,0x00,0x00,0x00, +0xa5,0x00,0x00,0x00,0xac,0x00,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xaf,0x00,0x00,0x00,0xad,0x00,0x00,0x00, +0x84,0x00,0x00,0x00,0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xb0,0x00,0x00,0x00,0xaf,0x00,0x00,0x00,0x77,0x00,0x00,0x00, +0xf9,0x00,0x02,0x00,0xb2,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, +0xb2,0x00,0x00,0x00,0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00, +0x0d,0x03,0x00,0x00,0x3e,0x00,0x00,0x00,0x05,0x00,0x00,0x00, +0xd1,0x00,0x00,0x00,0xb3,0x00,0x00,0x00,0xb0,0x00,0x05,0x00, +0xc2,0x00,0x00,0x00,0xc3,0x00,0x00,0x00,0x0d,0x03,0x00,0x00, +0xc1,0x00,0x00,0x00,0xf6,0x00,0x04,0x00,0xb4,0x00,0x00,0x00, +0xb3,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, +0xc3,0x00,0x00,0x00,0xb3,0x00,0x00,0x00,0xb4,0x00,0x00,0x00, +0xf8,0x00,0x02,0x00,0xb3,0x00,0x00,0x00,0x41,0x00,0x05,0x00, +0xcd,0x00,0x00,0x00,0xce,0x00,0x00,0x00,0xca,0x00,0x00,0x00, +0x0d,0x03,0x00,0x00,0x3e,0x00,0x03,0x00,0xce,0x00,0x00,0x00, +0xcc,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xd1,0x00,0x00,0x00,0x0d,0x03,0x00,0x00,0xd0,0x00,0x00,0x00, +0xf9,0x00,0x02,0x00,0xb2,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, +0xb4,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0xd4,0x00,0x00,0x00, +0xf8,0x00,0x02,0x00,0xd4,0x00,0x00,0x00,0xf5,0x00,0x07,0x00, +0x06,0x00,0x00,0x00,0x26,0x03,0x00,0x00,0xb0,0x00,0x00,0x00, +0xb4,0x00,0x00,0x00,0xd6,0x01,0x00,0x00,0xd7,0x00,0x00,0x00, +0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0x22,0x03,0x00,0x00, +0x9e,0x00,0x00,0x00,0xb4,0x00,0x00,0x00,0xd3,0x01,0x00,0x00, +0xd7,0x00,0x00,0x00,0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00, +0x0e,0x03,0x00,0x00,0x84,0x00,0x00,0x00,0xb4,0x00,0x00,0x00, +0x84,0x02,0x00,0x00,0xd7,0x00,0x00,0x00,0xb0,0x00,0x05,0x00, +0xc2,0x00,0x00,0x00,0xdb,0x00,0x00,0x00,0x0e,0x03,0x00,0x00, +0x8e,0x00,0x00,0x00,0xf6,0x00,0x04,0x00,0xd6,0x00,0x00,0x00, +0xd7,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, +0xdb,0x00,0x00,0x00,0xd5,0x00,0x00,0x00,0xd6,0x00,0x00,0x00, +0xf8,0x00,0x02,0x00,0xd5,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, +0xdd,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0xdd,0x00,0x00,0x00, +0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0x1e,0x03,0x00,0x00, +0x3e,0x00,0x00,0x00,0xd5,0x00,0x00,0x00,0x64,0x01,0x00,0x00, +0xde,0x00,0x00,0x00,0xb0,0x00,0x05,0x00,0xc2,0x00,0x00,0x00, +0xe3,0x00,0x00,0x00,0x1e,0x03,0x00,0x00,0x37,0x00,0x00,0x00, +0xf6,0x00,0x04,0x00,0xdf,0x00,0x00,0x00,0xde,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0xe3,0x00,0x00,0x00, +0xde,0x00,0x00,0x00,0xdf,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, +0xde,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xe8,0x00,0x00,0x00,0x73,0x00,0x00,0x00,0x1e,0x03,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xeb,0x00,0x00,0x00, +0xe8,0x00,0x00,0x00,0x99,0x00,0x00,0x00,0x86,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xec,0x00,0x00,0x00,0xeb,0x00,0x00,0x00, +0x0c,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xed,0x00,0x00,0x00,0x22,0x03,0x00,0x00,0xec,0x00,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xef,0x00,0x00,0x00, +0xed,0x00,0x00,0x00,0x6e,0x00,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xf5,0x00,0x00,0x00,0xe8,0x00,0x00,0x00, +0xf4,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xf7,0x00,0x00,0x00,0xf5,0x00,0x00,0x00,0x6e,0x00,0x00,0x00, +0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xfb,0x00,0x00,0x00, +0xef,0x00,0x00,0x00,0xfa,0x00,0x00,0x00,0xc7,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xff,0x00,0x00,0x00,0xef,0x00,0x00,0x00, +0xfe,0x00,0x00,0x00,0x41,0x00,0x07,0x00,0x0c,0x01,0x00,0x00, +0x0d,0x01,0x00,0x00,0x0a,0x01,0x00,0x00,0x34,0x00,0x00,0x00, +0xfb,0x00,0x00,0x00,0x34,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0x01,0x01,0x00,0x00,0x0e,0x01,0x00,0x00,0x0d,0x01,0x00,0x00, +0x73,0x00,0x04,0x00,0xc4,0x00,0x00,0x00,0x0f,0x01,0x00,0x00, +0x0e,0x01,0x00,0x00,0x41,0x00,0x08,0x00,0x12,0x01,0x00,0x00, +0x13,0x01,0x00,0x00,0x0a,0x01,0x00,0x00,0x34,0x00,0x00,0x00, +0xfb,0x00,0x00,0x00,0xd0,0x00,0x00,0x00,0xd0,0x00,0x00,0x00, +0x3d,0x00,0x04,0x00,0x02,0x01,0x00,0x00,0x14,0x01,0x00,0x00, +0x13,0x01,0x00,0x00,0x71,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x15,0x01,0x00,0x00,0x14,0x01,0x00,0x00,0xc4,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x17,0x01,0x00,0x00,0x15,0x01,0x00,0x00, +0x16,0x01,0x00,0x00,0x41,0x00,0x08,0x00,0x12,0x01,0x00,0x00, +0x19,0x01,0x00,0x00,0x0a,0x01,0x00,0x00,0x34,0x00,0x00,0x00, +0xfb,0x00,0x00,0x00,0xd0,0x00,0x00,0x00,0x34,0x00,0x00,0x00, +0x3d,0x00,0x04,0x00,0x02,0x01,0x00,0x00,0x1a,0x01,0x00,0x00, +0x19,0x01,0x00,0x00,0x71,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x1b,0x01,0x00,0x00,0x1a,0x01,0x00,0x00,0xc5,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x1c,0x01,0x00,0x00,0x17,0x01,0x00,0x00, +0x1b,0x01,0x00,0x00,0xc2,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x22,0x01,0x00,0x00,0x1c,0x01,0x00,0x00,0xff,0x00,0x00,0x00, +0xc4,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x23,0x01,0x00,0x00, +0x22,0x01,0x00,0x00,0xa9,0x00,0x00,0x00,0xc7,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x24,0x01,0x00,0x00,0x23,0x01,0x00,0x00, +0xfa,0x00,0x00,0x00,0x7c,0x00,0x04,0x00,0x13,0x00,0x00,0x00, +0x25,0x01,0x00,0x00,0x24,0x01,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x29,0x01,0x00,0x00,0xff,0x00,0x00,0x00, +0x28,0x01,0x00,0x00,0xc2,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x2a,0x01,0x00,0x00,0x1c,0x01,0x00,0x00,0x29,0x01,0x00,0x00, +0xc7,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x2b,0x01,0x00,0x00, +0x2a,0x01,0x00,0x00,0xfa,0x00,0x00,0x00,0x7c,0x00,0x04,0x00, +0x13,0x00,0x00,0x00,0x2c,0x01,0x00,0x00,0x2b,0x01,0x00,0x00, +0x41,0x00,0x08,0x00,0x31,0x01,0x00,0x00,0x32,0x01,0x00,0x00, +0x0a,0x01,0x00,0x00,0x34,0x00,0x00,0x00,0xfb,0x00,0x00,0x00, +0x86,0x00,0x00,0x00,0xff,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0x04,0x01,0x00,0x00,0x33,0x01,0x00,0x00,0x32,0x01,0x00,0x00, +0x71,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x34,0x01,0x00,0x00, +0x33,0x01,0x00,0x00,0xc7,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x39,0x01,0x00,0x00,0x34,0x01,0x00,0x00,0xfe,0x00,0x00,0x00, +0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x3d,0x01,0x00,0x00, +0x25,0x01,0x00,0x00,0xc5,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x3e,0x01,0x00,0x00,0x39,0x01,0x00,0x00,0x3d,0x01,0x00,0x00, +0x70,0x00,0x04,0x00,0xc4,0x00,0x00,0x00,0x3f,0x01,0x00,0x00, +0x3e,0x01,0x00,0x00,0xc2,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x41,0x01,0x00,0x00,0x34,0x01,0x00,0x00,0xa9,0x00,0x00,0x00, +0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x44,0x01,0x00,0x00, +0x2c,0x01,0x00,0x00,0xc5,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x45,0x01,0x00,0x00,0x41,0x01,0x00,0x00,0x44,0x01,0x00,0x00, +0x70,0x00,0x04,0x00,0xc4,0x00,0x00,0x00,0x46,0x01,0x00,0x00, +0x45,0x01,0x00,0x00,0x50,0x00,0x05,0x00,0x35,0x01,0x00,0x00, +0x47,0x01,0x00,0x00,0x3f,0x01,0x00,0x00,0x46,0x01,0x00,0x00, +0x83,0x00,0x05,0x00,0x35,0x01,0x00,0x00,0x4a,0x01,0x00,0x00, +0x47,0x01,0x00,0x00,0x61,0x03,0x00,0x00,0x8e,0x00,0x05,0x00, +0x35,0x01,0x00,0x00,0x4c,0x01,0x00,0x00,0x4a,0x01,0x00,0x00, +0x0f,0x01,0x00,0x00,0x51,0x00,0x05,0x00,0xc4,0x00,0x00,0x00, +0x54,0x01,0x00,0x00,0x4c,0x01,0x00,0x00,0x00,0x00,0x00,0x00, +0x73,0x00,0x04,0x00,0x01,0x01,0x00,0x00,0x55,0x01,0x00,0x00, +0x54,0x01,0x00,0x00,0x41,0x00,0x05,0x00,0x56,0x01,0x00,0x00, +0x57,0x01,0x00,0x00,0x51,0x01,0x00,0x00,0xf7,0x00,0x00,0x00, +0x3e,0x00,0x03,0x00,0x57,0x01,0x00,0x00,0x55,0x01,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x59,0x01,0x00,0x00, +0xf7,0x00,0x00,0x00,0xfa,0x00,0x00,0x00,0x51,0x00,0x05,0x00, +0xc4,0x00,0x00,0x00,0x5b,0x01,0x00,0x00,0x4c,0x01,0x00,0x00, +0x01,0x00,0x00,0x00,0x73,0x00,0x04,0x00,0x01,0x01,0x00,0x00, +0x5c,0x01,0x00,0x00,0x5b,0x01,0x00,0x00,0x41,0x00,0x05,0x00, +0x56,0x01,0x00,0x00,0x5d,0x01,0x00,0x00,0x51,0x01,0x00,0x00, +0x59,0x01,0x00,0x00,0x3e,0x00,0x03,0x00,0x5d,0x01,0x00,0x00, +0x5c,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x64,0x01,0x00,0x00,0x1e,0x03,0x00,0x00,0x62,0x01,0x00,0x00, +0xf9,0x00,0x02,0x00,0xdd,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, +0xdf,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0x66,0x01,0x00,0x00, +0xf8,0x00,0x02,0x00,0x66,0x01,0x00,0x00,0xf5,0x00,0x07,0x00, +0x06,0x00,0x00,0x00,0x1f,0x03,0x00,0x00,0x3e,0x00,0x00,0x00, +0xdf,0x00,0x00,0x00,0xcf,0x01,0x00,0x00,0x67,0x01,0x00,0x00, +0xb0,0x00,0x05,0x00,0xc2,0x00,0x00,0x00,0x6c,0x01,0x00,0x00, +0x1f,0x03,0x00,0x00,0xa7,0x00,0x00,0x00,0xf6,0x00,0x04,0x00, +0x68,0x01,0x00,0x00,0x67,0x01,0x00,0x00,0x01,0x00,0x00,0x00, +0xfa,0x00,0x04,0x00,0x6c,0x01,0x00,0x00,0x67,0x01,0x00,0x00, +0x68,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0x67,0x01,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x71,0x01,0x00,0x00, +0x7e,0x00,0x00,0x00,0x1f,0x03,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x74,0x01,0x00,0x00,0x71,0x01,0x00,0x00, +0xab,0x00,0x00,0x00,0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x75,0x01,0x00,0x00,0x74,0x01,0x00,0x00,0x77,0x00,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x76,0x01,0x00,0x00, +0x26,0x03,0x00,0x00,0x75,0x01,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x78,0x01,0x00,0x00,0x76,0x01,0x00,0x00, +0x79,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x7e,0x01,0x00,0x00,0x71,0x01,0x00,0x00,0x7d,0x01,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x80,0x01,0x00,0x00, +0x79,0x00,0x00,0x00,0x77,0x00,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x81,0x01,0x00,0x00,0x7e,0x01,0x00,0x00, +0x80,0x01,0x00,0x00,0x41,0x00,0x08,0x00,0x90,0x01,0x00,0x00, +0x91,0x01,0x00,0x00,0x8e,0x01,0x00,0x00,0x34,0x00,0x00,0x00, +0x78,0x01,0x00,0x00,0x34,0x00,0x00,0x00,0x3e,0x00,0x00,0x00, +0x3d,0x00,0x04,0x00,0xc4,0x00,0x00,0x00,0x92,0x01,0x00,0x00, +0x91,0x01,0x00,0x00,0x73,0x00,0x04,0x00,0x01,0x01,0x00,0x00, +0x93,0x01,0x00,0x00,0x92,0x01,0x00,0x00,0x41,0x00,0x05,0x00, +0x56,0x01,0x00,0x00,0x94,0x01,0x00,0x00,0x86,0x01,0x00,0x00, +0x81,0x01,0x00,0x00,0x3e,0x00,0x03,0x00,0x94,0x01,0x00,0x00, +0x93,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x96,0x01,0x00,0x00,0x81,0x01,0x00,0x00,0x39,0x00,0x00,0x00, +0x41,0x00,0x08,0x00,0x90,0x01,0x00,0x00,0x98,0x01,0x00,0x00, +0x8e,0x01,0x00,0x00,0x34,0x00,0x00,0x00,0x78,0x01,0x00,0x00, +0x34,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0xc4,0x00,0x00,0x00,0x99,0x01,0x00,0x00,0x98,0x01,0x00,0x00, +0x73,0x00,0x04,0x00,0x01,0x01,0x00,0x00,0x9a,0x01,0x00,0x00, +0x99,0x01,0x00,0x00,0x41,0x00,0x05,0x00,0x56,0x01,0x00,0x00, +0x9b,0x01,0x00,0x00,0x86,0x01,0x00,0x00,0x96,0x01,0x00,0x00, +0x3e,0x00,0x03,0x00,0x9b,0x01,0x00,0x00,0x9a,0x01,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x9d,0x01,0x00,0x00, +0x81,0x01,0x00,0x00,0x0c,0x00,0x00,0x00,0x41,0x00,0x08,0x00, +0x90,0x01,0x00,0x00,0x9f,0x01,0x00,0x00,0x8e,0x01,0x00,0x00, +0x34,0x00,0x00,0x00,0x78,0x01,0x00,0x00,0x34,0x00,0x00,0x00, +0x0c,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0xc4,0x00,0x00,0x00, +0xa0,0x01,0x00,0x00,0x9f,0x01,0x00,0x00,0x73,0x00,0x04,0x00, +0x01,0x01,0x00,0x00,0xa1,0x01,0x00,0x00,0xa0,0x01,0x00,0x00, +0x41,0x00,0x05,0x00,0x56,0x01,0x00,0x00,0xa2,0x01,0x00,0x00, +0x86,0x01,0x00,0x00,0x9d,0x01,0x00,0x00,0x3e,0x00,0x03,0x00, +0xa2,0x01,0x00,0x00,0xa1,0x01,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xa5,0x01,0x00,0x00,0x81,0x01,0x00,0x00, +0xa4,0x01,0x00,0x00,0x41,0x00,0x08,0x00,0x90,0x01,0x00,0x00, +0xa7,0x01,0x00,0x00,0x8e,0x01,0x00,0x00,0x34,0x00,0x00,0x00, +0x78,0x01,0x00,0x00,0x34,0x00,0x00,0x00,0xa4,0x01,0x00,0x00, +0x3d,0x00,0x04,0x00,0xc4,0x00,0x00,0x00,0xa8,0x01,0x00,0x00, +0xa7,0x01,0x00,0x00,0x73,0x00,0x04,0x00,0x01,0x01,0x00,0x00, +0xa9,0x01,0x00,0x00,0xa8,0x01,0x00,0x00,0x41,0x00,0x05,0x00, +0x56,0x01,0x00,0x00,0xaa,0x01,0x00,0x00,0x86,0x01,0x00,0x00, +0xa5,0x01,0x00,0x00,0x3e,0x00,0x03,0x00,0xaa,0x01,0x00,0x00, +0xa9,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xad,0x01,0x00,0x00,0x81,0x01,0x00,0x00,0xac,0x01,0x00,0x00, +0x41,0x00,0x08,0x00,0x90,0x01,0x00,0x00,0xaf,0x01,0x00,0x00, +0x8e,0x01,0x00,0x00,0x34,0x00,0x00,0x00,0x78,0x01,0x00,0x00, +0xd0,0x00,0x00,0x00,0x3e,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0xc4,0x00,0x00,0x00,0xb0,0x01,0x00,0x00,0xaf,0x01,0x00,0x00, +0x73,0x00,0x04,0x00,0x01,0x01,0x00,0x00,0xb1,0x01,0x00,0x00, +0xb0,0x01,0x00,0x00,0x41,0x00,0x05,0x00,0x56,0x01,0x00,0x00, +0xb2,0x01,0x00,0x00,0x86,0x01,0x00,0x00,0xad,0x01,0x00,0x00, +0x3e,0x00,0x03,0x00,0xb2,0x01,0x00,0x00,0xb1,0x01,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xb5,0x01,0x00,0x00, +0x81,0x01,0x00,0x00,0xb4,0x01,0x00,0x00,0x41,0x00,0x08,0x00, +0x90,0x01,0x00,0x00,0xb7,0x01,0x00,0x00,0x8e,0x01,0x00,0x00, +0x34,0x00,0x00,0x00,0x78,0x01,0x00,0x00,0xd0,0x00,0x00,0x00, +0x39,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0xc4,0x00,0x00,0x00, +0xb8,0x01,0x00,0x00,0xb7,0x01,0x00,0x00,0x73,0x00,0x04,0x00, +0x01,0x01,0x00,0x00,0xb9,0x01,0x00,0x00,0xb8,0x01,0x00,0x00, +0x41,0x00,0x05,0x00,0x56,0x01,0x00,0x00,0xba,0x01,0x00,0x00, +0x86,0x01,0x00,0x00,0xb5,0x01,0x00,0x00,0x3e,0x00,0x03,0x00, +0xba,0x01,0x00,0x00,0xb9,0x01,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xbd,0x01,0x00,0x00,0x81,0x01,0x00,0x00, +0xbc,0x01,0x00,0x00,0x41,0x00,0x08,0x00,0x90,0x01,0x00,0x00, +0xbf,0x01,0x00,0x00,0x8e,0x01,0x00,0x00,0x34,0x00,0x00,0x00, +0x78,0x01,0x00,0x00,0xd0,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, +0x3d,0x00,0x04,0x00,0xc4,0x00,0x00,0x00,0xc0,0x01,0x00,0x00, +0xbf,0x01,0x00,0x00,0x73,0x00,0x04,0x00,0x01,0x01,0x00,0x00, +0xc1,0x01,0x00,0x00,0xc0,0x01,0x00,0x00,0x41,0x00,0x05,0x00, +0x56,0x01,0x00,0x00,0xc2,0x01,0x00,0x00,0x86,0x01,0x00,0x00, +0xbd,0x01,0x00,0x00,0x3e,0x00,0x03,0x00,0xc2,0x01,0x00,0x00, +0xc1,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xc5,0x01,0x00,0x00,0x81,0x01,0x00,0x00,0xc4,0x01,0x00,0x00, +0x41,0x00,0x08,0x00,0x90,0x01,0x00,0x00,0xc7,0x01,0x00,0x00, +0x8e,0x01,0x00,0x00,0x34,0x00,0x00,0x00,0x78,0x01,0x00,0x00, +0xd0,0x00,0x00,0x00,0xa4,0x01,0x00,0x00,0x3d,0x00,0x04,0x00, +0xc4,0x00,0x00,0x00,0xc8,0x01,0x00,0x00,0xc7,0x01,0x00,0x00, +0x73,0x00,0x04,0x00,0x01,0x01,0x00,0x00,0xc9,0x01,0x00,0x00, +0xc8,0x01,0x00,0x00,0x41,0x00,0x05,0x00,0x56,0x01,0x00,0x00, +0xca,0x01,0x00,0x00,0x86,0x01,0x00,0x00,0xc5,0x01,0x00,0x00, +0x3e,0x00,0x03,0x00,0xca,0x01,0x00,0x00,0xc9,0x01,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xcf,0x01,0x00,0x00, +0x1f,0x03,0x00,0x00,0xcd,0x01,0x00,0x00,0xf9,0x00,0x02,0x00, +0x66,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0x68,0x01,0x00,0x00, +0xe0,0x00,0x04,0x00,0x0c,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, +0xd0,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xd3,0x01,0x00,0x00,0x22,0x03,0x00,0x00,0xd1,0x01,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xd6,0x01,0x00,0x00, +0x26,0x03,0x00,0x00,0xd4,0x01,0x00,0x00,0xf9,0x00,0x02,0x00, +0xd8,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0xd8,0x01,0x00,0x00, +0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0x28,0x03,0x00,0x00, +0x3e,0x00,0x00,0x00,0x68,0x01,0x00,0x00,0x82,0x02,0x00,0x00, +0xdb,0x01,0x00,0x00,0xb0,0x00,0x05,0x00,0xc2,0x00,0x00,0x00, +0xde,0x01,0x00,0x00,0x28,0x03,0x00,0x00,0x6c,0x00,0x00,0x00, +0xf6,0x00,0x04,0x00,0xda,0x01,0x00,0x00,0xdb,0x01,0x00,0x00, +0x00,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0xde,0x01,0x00,0x00, +0xd9,0x01,0x00,0x00,0xda,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, +0xd9,0x01,0x00,0x00,0xf9,0x00,0x02,0x00,0xe0,0x01,0x00,0x00, +0xf8,0x00,0x02,0x00,0xe0,0x01,0x00,0x00,0xf5,0x00,0x07,0x00, +0x06,0x00,0x00,0x00,0x2c,0x03,0x00,0x00,0x3e,0x00,0x00,0x00, +0xd9,0x01,0x00,0x00,0x0c,0x02,0x00,0x00,0xe3,0x01,0x00,0x00, +0xb0,0x00,0x05,0x00,0xc2,0x00,0x00,0x00,0xe6,0x01,0x00,0x00, +0x2c,0x03,0x00,0x00,0x60,0x00,0x00,0x00,0xf6,0x00,0x04,0x00, +0xe2,0x01,0x00,0x00,0xe3,0x01,0x00,0x00,0x01,0x00,0x00,0x00, +0xfa,0x00,0x04,0x00,0xe6,0x01,0x00,0x00,0xe1,0x01,0x00,0x00, +0xe2,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0xe1,0x01,0x00,0x00, +0xf9,0x00,0x02,0x00,0xe8,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, +0xe8,0x01,0x00,0x00,0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00, +0x3e,0x03,0x00,0x00,0x3e,0x00,0x00,0x00,0xe1,0x01,0x00,0x00, +0x0a,0x02,0x00,0x00,0xe9,0x01,0x00,0x00,0xb0,0x00,0x05,0x00, +0xc2,0x00,0x00,0x00,0xee,0x01,0x00,0x00,0x3e,0x03,0x00,0x00, +0x62,0x00,0x00,0x00,0xf6,0x00,0x04,0x00,0xea,0x01,0x00,0x00, +0xe9,0x01,0x00,0x00,0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, +0xee,0x01,0x00,0x00,0xe9,0x01,0x00,0x00,0xea,0x01,0x00,0x00, +0xf8,0x00,0x02,0x00,0xe9,0x01,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xf4,0x01,0x00,0x00,0x2c,0x03,0x00,0x00, +0x62,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xf6,0x01,0x00,0x00,0xf4,0x01,0x00,0x00,0x3e,0x03,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xf8,0x01,0x00,0x00, +0x55,0x00,0x00,0x00,0x53,0x00,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xfa,0x01,0x00,0x00,0x2c,0x03,0x00,0x00, +0x61,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xfb,0x01,0x00,0x00,0xf8,0x01,0x00,0x00,0xfa,0x01,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xfd,0x01,0x00,0x00, +0x64,0x00,0x00,0x00,0x62,0x00,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xfe,0x01,0x00,0x00,0xfb,0x01,0x00,0x00, +0xfd,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x00,0x02,0x00,0x00,0xfe,0x01,0x00,0x00,0x3e,0x03,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x02,0x02,0x00,0x00, +0x00,0x02,0x00,0x00,0x01,0x02,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x04,0x02,0x00,0x00,0x02,0x02,0x00,0x00, +0x28,0x03,0x00,0x00,0x41,0x00,0x05,0x00,0x56,0x01,0x00,0x00, +0x05,0x02,0x00,0x00,0x51,0x01,0x00,0x00,0x04,0x02,0x00,0x00, +0x3d,0x00,0x04,0x00,0x01,0x01,0x00,0x00,0x06,0x02,0x00,0x00, +0x05,0x02,0x00,0x00,0x41,0x00,0x05,0x00,0x07,0x02,0x00,0x00, +0x08,0x02,0x00,0x00,0xf2,0x01,0x00,0x00,0xf6,0x01,0x00,0x00, +0x3e,0x00,0x03,0x00,0x08,0x02,0x00,0x00,0x06,0x02,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x0a,0x02,0x00,0x00, +0x3e,0x03,0x00,0x00,0xd0,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, +0xe8,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0xea,0x01,0x00,0x00, +0xf9,0x00,0x02,0x00,0xe3,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, +0xe3,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x0c,0x02,0x00,0x00,0x2c,0x03,0x00,0x00,0xd0,0x00,0x00,0x00, +0xf9,0x00,0x02,0x00,0xe0,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, +0xe2,0x01,0x00,0x00,0xf9,0x00,0x02,0x00,0x0e,0x02,0x00,0x00, +0xf8,0x00,0x02,0x00,0x0e,0x02,0x00,0x00,0xf5,0x00,0x07,0x00, +0x06,0x00,0x00,0x00,0x2d,0x03,0x00,0x00,0x3e,0x00,0x00,0x00, +0xe2,0x01,0x00,0x00,0x3a,0x02,0x00,0x00,0x11,0x02,0x00,0x00, +0xb0,0x00,0x05,0x00,0xc2,0x00,0x00,0x00,0x14,0x02,0x00,0x00, +0x2d,0x03,0x00,0x00,0xbf,0x00,0x00,0x00,0xf6,0x00,0x04,0x00, +0x10,0x02,0x00,0x00,0x11,0x02,0x00,0x00,0x01,0x00,0x00,0x00, +0xfa,0x00,0x04,0x00,0x14,0x02,0x00,0x00,0x0f,0x02,0x00,0x00, +0x10,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x0f,0x02,0x00,0x00, +0xf9,0x00,0x02,0x00,0x16,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, +0x16,0x02,0x00,0x00,0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00, +0x3b,0x03,0x00,0x00,0x3e,0x00,0x00,0x00,0x0f,0x02,0x00,0x00, +0x38,0x02,0x00,0x00,0x17,0x02,0x00,0x00,0xb0,0x00,0x05,0x00, +0xc2,0x00,0x00,0x00,0x1c,0x02,0x00,0x00,0x3b,0x03,0x00,0x00, +0xbc,0x00,0x00,0x00,0xf6,0x00,0x04,0x00,0x18,0x02,0x00,0x00, +0x17,0x02,0x00,0x00,0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, +0x1c,0x02,0x00,0x00,0x17,0x02,0x00,0x00,0x18,0x02,0x00,0x00, +0xf8,0x00,0x02,0x00,0x17,0x02,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x22,0x02,0x00,0x00,0x2d,0x03,0x00,0x00, +0xbc,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x24,0x02,0x00,0x00,0x22,0x02,0x00,0x00,0x3b,0x03,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x26,0x02,0x00,0x00, +0x59,0x00,0x00,0x00,0xb9,0x00,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x29,0x02,0x00,0x00,0x2d,0x03,0x00,0x00, +0x28,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x2a,0x02,0x00,0x00,0x26,0x02,0x00,0x00,0x29,0x02,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x2c,0x02,0x00,0x00, +0x68,0x00,0x00,0x00,0xbc,0x00,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x2d,0x02,0x00,0x00,0x2a,0x02,0x00,0x00, +0x2c,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x2f,0x02,0x00,0x00,0x2d,0x02,0x00,0x00,0x3b,0x03,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x31,0x02,0x00,0x00, +0x2f,0x02,0x00,0x00,0x30,0x02,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x33,0x02,0x00,0x00,0x31,0x02,0x00,0x00, +0x28,0x03,0x00,0x00,0x41,0x00,0x05,0x00,0x56,0x01,0x00,0x00, +0x34,0x02,0x00,0x00,0x86,0x01,0x00,0x00,0x33,0x02,0x00,0x00, +0x3d,0x00,0x04,0x00,0x01,0x01,0x00,0x00,0x35,0x02,0x00,0x00, +0x34,0x02,0x00,0x00,0x41,0x00,0x05,0x00,0x07,0x02,0x00,0x00, +0x36,0x02,0x00,0x00,0x20,0x02,0x00,0x00,0x24,0x02,0x00,0x00, +0x3e,0x00,0x03,0x00,0x36,0x02,0x00,0x00,0x35,0x02,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x38,0x02,0x00,0x00, +0x3b,0x03,0x00,0x00,0xd0,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, +0x16,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x18,0x02,0x00,0x00, +0xf9,0x00,0x02,0x00,0x11,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, +0x11,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x3a,0x02,0x00,0x00,0x2d,0x03,0x00,0x00,0xd0,0x00,0x00,0x00, +0xf9,0x00,0x02,0x00,0x0e,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, +0x10,0x02,0x00,0x00,0xf9,0x00,0x02,0x00,0x3c,0x02,0x00,0x00, +0xf8,0x00,0x02,0x00,0x3c,0x02,0x00,0x00,0xf5,0x00,0x07,0x00, +0x06,0x00,0x00,0x00,0x2e,0x03,0x00,0x00,0x3e,0x00,0x00,0x00, +0x10,0x02,0x00,0x00,0x80,0x02,0x00,0x00,0x3f,0x02,0x00,0x00, +0xb0,0x00,0x05,0x00,0xc2,0x00,0x00,0x00,0x42,0x02,0x00,0x00, +0x2e,0x03,0x00,0x00,0xbf,0x00,0x00,0x00,0xf6,0x00,0x04,0x00, +0x3e,0x02,0x00,0x00,0x3f,0x02,0x00,0x00,0x01,0x00,0x00,0x00, +0xfa,0x00,0x04,0x00,0x42,0x02,0x00,0x00,0x3d,0x02,0x00,0x00, +0x3e,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x3d,0x02,0x00,0x00, +0xf9,0x00,0x02,0x00,0x44,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, +0x44,0x02,0x00,0x00,0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00, +0x32,0x03,0x00,0x00,0x3e,0x00,0x00,0x00,0x3d,0x02,0x00,0x00, +0x7e,0x02,0x00,0x00,0x47,0x02,0x00,0x00,0xb0,0x00,0x05,0x00, +0xc2,0x00,0x00,0x00,0x4a,0x02,0x00,0x00,0x32,0x03,0x00,0x00, +0x60,0x00,0x00,0x00,0xf6,0x00,0x04,0x00,0x46,0x02,0x00,0x00, +0x47,0x02,0x00,0x00,0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, +0x4a,0x02,0x00,0x00,0x45,0x02,0x00,0x00,0x46,0x02,0x00,0x00, +0xf8,0x00,0x02,0x00,0x45,0x02,0x00,0x00,0xf9,0x00,0x02,0x00, +0x4c,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x4c,0x02,0x00,0x00, +0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0x34,0x03,0x00,0x00, +0x3e,0x00,0x00,0x00,0x45,0x02,0x00,0x00,0x7c,0x02,0x00,0x00, +0x4f,0x02,0x00,0x00,0xb0,0x00,0x05,0x00,0xc2,0x00,0x00,0x00, +0x52,0x02,0x00,0x00,0x34,0x03,0x00,0x00,0xbc,0x00,0x00,0x00, +0xf6,0x00,0x04,0x00,0x4e,0x02,0x00,0x00,0x4f,0x02,0x00,0x00, +0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0x52,0x02,0x00,0x00, +0x4d,0x02,0x00,0x00,0x4e,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, +0x4d,0x02,0x00,0x00,0xf9,0x00,0x02,0x00,0x54,0x02,0x00,0x00, +0xf8,0x00,0x02,0x00,0x54,0x02,0x00,0x00,0xf5,0x00,0x07,0x00, +0x06,0x00,0x00,0x00,0x36,0x03,0x00,0x00,0x3e,0x00,0x00,0x00, +0x4d,0x02,0x00,0x00,0x7a,0x02,0x00,0x00,0x55,0x02,0x00,0x00, +0xb0,0x00,0x05,0x00,0xc2,0x00,0x00,0x00,0x5a,0x02,0x00,0x00, +0x36,0x03,0x00,0x00,0x62,0x00,0x00,0x00,0xf6,0x00,0x04,0x00, +0x56,0x02,0x00,0x00,0x55,0x02,0x00,0x00,0x01,0x00,0x00,0x00, +0xfa,0x00,0x04,0x00,0x5a,0x02,0x00,0x00,0x55,0x02,0x00,0x00, +0x56,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x55,0x02,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x5c,0x02,0x00,0x00, +0x2e,0x03,0x00,0x00,0xbc,0x00,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x5e,0x02,0x00,0x00,0x5c,0x02,0x00,0x00, +0x34,0x03,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x60,0x02,0x00,0x00,0x5e,0x02,0x00,0x00,0x5f,0x02,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x62,0x02,0x00,0x00, +0x32,0x03,0x00,0x00,0x62,0x00,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x63,0x02,0x00,0x00,0x60,0x02,0x00,0x00, +0x62,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x65,0x02,0x00,0x00,0x63,0x02,0x00,0x00,0x36,0x03,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x69,0x02,0x00,0x00, +0x62,0x02,0x00,0x00,0x36,0x03,0x00,0x00,0x41,0x00,0x05,0x00, +0x07,0x02,0x00,0x00,0x6a,0x02,0x00,0x00,0xf2,0x01,0x00,0x00, +0x69,0x02,0x00,0x00,0x3d,0x00,0x04,0x00,0x01,0x01,0x00,0x00, +0x6b,0x02,0x00,0x00,0x6a,0x02,0x00,0x00,0x73,0x00,0x04,0x00, +0xc4,0x00,0x00,0x00,0x6c,0x02,0x00,0x00,0x6b,0x02,0x00,0x00, +0x41,0x00,0x05,0x00,0x07,0x02,0x00,0x00,0x71,0x02,0x00,0x00, +0x20,0x02,0x00,0x00,0x5e,0x02,0x00,0x00,0x3d,0x00,0x04,0x00, +0x01,0x01,0x00,0x00,0x72,0x02,0x00,0x00,0x71,0x02,0x00,0x00, +0x73,0x00,0x04,0x00,0xc4,0x00,0x00,0x00,0x73,0x02,0x00,0x00, +0x72,0x02,0x00,0x00,0x41,0x00,0x05,0x00,0xcd,0x00,0x00,0x00, +0x75,0x02,0x00,0x00,0xca,0x00,0x00,0x00,0x65,0x02,0x00,0x00, +0x3d,0x00,0x04,0x00,0xc4,0x00,0x00,0x00,0x76,0x02,0x00,0x00, +0x75,0x02,0x00,0x00,0x0c,0x00,0x08,0x00,0xc4,0x00,0x00,0x00, +0x77,0x02,0x00,0x00,0x01,0x00,0x00,0x00,0x32,0x00,0x00,0x00, +0x6c,0x02,0x00,0x00,0x73,0x02,0x00,0x00,0x76,0x02,0x00,0x00, +0x3e,0x00,0x03,0x00,0x75,0x02,0x00,0x00,0x77,0x02,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x7a,0x02,0x00,0x00, +0x36,0x03,0x00,0x00,0xd0,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, +0x54,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x56,0x02,0x00,0x00, +0xf9,0x00,0x02,0x00,0x4f,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, +0x4f,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x7c,0x02,0x00,0x00,0x34,0x03,0x00,0x00,0xd0,0x00,0x00,0x00, +0xf9,0x00,0x02,0x00,0x4c,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, +0x4e,0x02,0x00,0x00,0xf9,0x00,0x02,0x00,0x47,0x02,0x00,0x00, +0xf8,0x00,0x02,0x00,0x47,0x02,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x7e,0x02,0x00,0x00,0x32,0x03,0x00,0x00, +0xd0,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0x44,0x02,0x00,0x00, +0xf8,0x00,0x02,0x00,0x46,0x02,0x00,0x00,0xf9,0x00,0x02,0x00, +0x3f,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x3f,0x02,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x80,0x02,0x00,0x00, +0x2e,0x03,0x00,0x00,0xd0,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, +0x3c,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x3e,0x02,0x00,0x00, +0xf9,0x00,0x02,0x00,0xdb,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, +0xdb,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x82,0x02,0x00,0x00,0x28,0x03,0x00,0x00,0xd0,0x00,0x00,0x00, +0xf9,0x00,0x02,0x00,0xd8,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, +0xda,0x01,0x00,0x00,0xe0,0x00,0x04,0x00,0x0c,0x00,0x00,0x00, +0x0c,0x00,0x00,0x00,0xd0,0x01,0x00,0x00,0xf9,0x00,0x02,0x00, +0xd7,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0xd7,0x00,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x84,0x02,0x00,0x00, +0x0e,0x03,0x00,0x00,0x6c,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, +0xd4,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0xd6,0x00,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x89,0x02,0x00,0x00, +0x55,0x00,0x00,0x00,0x53,0x00,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x8a,0x02,0x00,0x00,0x96,0x00,0x00,0x00, +0x89,0x02,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x8f,0x02,0x00,0x00,0x59,0x00,0x00,0x00,0xb9,0x00,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x90,0x02,0x00,0x00, +0xa8,0x00,0x00,0x00,0x8f,0x02,0x00,0x00,0x41,0x00,0x05,0x00, +0x15,0x00,0x00,0x00,0x95,0x02,0x00,0x00,0x12,0x00,0x00,0x00, +0x94,0x02,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x96,0x02,0x00,0x00,0x95,0x02,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x97,0x02,0x00,0x00,0x0f,0x00,0x00,0x00, +0x96,0x02,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x9b,0x02,0x00,0x00,0x47,0x00,0x00,0x00,0x96,0x02,0x00,0x00, +0x41,0x00,0x05,0x00,0x0d,0x00,0x00,0x00,0x9d,0x02,0x00,0x00, +0x9c,0x02,0x00,0x00,0x0c,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x9e,0x02,0x00,0x00,0x9d,0x02,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x9f,0x02,0x00,0x00, +0x9b,0x02,0x00,0x00,0x9e,0x02,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xa0,0x02,0x00,0x00,0x97,0x02,0x00,0x00, +0x9f,0x02,0x00,0x00,0xf9,0x00,0x02,0x00,0xa2,0x02,0x00,0x00, +0xf8,0x00,0x02,0x00,0xa2,0x02,0x00,0x00,0xf5,0x00,0x07,0x00, +0x06,0x00,0x00,0x00,0x0f,0x03,0x00,0x00,0x3e,0x00,0x00,0x00, +0xd6,0x00,0x00,0x00,0x08,0x03,0x00,0x00,0xa5,0x02,0x00,0x00, +0xb0,0x00,0x05,0x00,0xc2,0x00,0x00,0x00,0xa8,0x02,0x00,0x00, +0x0f,0x03,0x00,0x00,0xbf,0x00,0x00,0x00,0xf6,0x00,0x04,0x00, +0xa4,0x02,0x00,0x00,0xa5,0x02,0x00,0x00,0x01,0x00,0x00,0x00, +0xfa,0x00,0x04,0x00,0xa8,0x02,0x00,0x00,0xa3,0x02,0x00,0x00, +0xa4,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0xa3,0x02,0x00,0x00, +0xf9,0x00,0x02,0x00,0xaa,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, +0xaa,0x02,0x00,0x00,0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00, +0x10,0x03,0x00,0x00,0x3e,0x00,0x00,0x00,0xa3,0x02,0x00,0x00, +0x06,0x03,0x00,0x00,0xad,0x02,0x00,0x00,0xb0,0x00,0x05,0x00, +0xc2,0x00,0x00,0x00,0xb0,0x02,0x00,0x00,0x10,0x03,0x00,0x00, +0x60,0x00,0x00,0x00,0xf6,0x00,0x04,0x00,0xac,0x02,0x00,0x00, +0xad,0x02,0x00,0x00,0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, +0xb0,0x02,0x00,0x00,0xab,0x02,0x00,0x00,0xac,0x02,0x00,0x00, +0xf8,0x00,0x02,0x00,0xab,0x02,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xb4,0x02,0x00,0x00,0x10,0x03,0x00,0x00, +0x61,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xb5,0x02,0x00,0x00,0x8a,0x02,0x00,0x00,0xb4,0x02,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xb7,0x02,0x00,0x00, +0x64,0x00,0x00,0x00,0x62,0x00,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xb8,0x02,0x00,0x00,0xb5,0x02,0x00,0x00, +0xb7,0x02,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xbc,0x02,0x00,0x00,0x0f,0x03,0x00,0x00,0x28,0x02,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xbd,0x02,0x00,0x00, +0x90,0x02,0x00,0x00,0xbc,0x02,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xbf,0x02,0x00,0x00,0x68,0x00,0x00,0x00, +0xbc,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xc0,0x02,0x00,0x00,0xbd,0x02,0x00,0x00,0xbf,0x02,0x00,0x00, +0xf9,0x00,0x02,0x00,0xc2,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, +0xc2,0x02,0x00,0x00,0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00, +0x12,0x03,0x00,0x00,0x3e,0x00,0x00,0x00,0xab,0x02,0x00,0x00, +0x04,0x03,0x00,0x00,0xc5,0x02,0x00,0x00,0xb0,0x00,0x05,0x00, +0xc2,0x00,0x00,0x00,0xc8,0x02,0x00,0x00,0x12,0x03,0x00,0x00, +0xbc,0x00,0x00,0x00,0xf6,0x00,0x04,0x00,0xc4,0x02,0x00,0x00, +0xc5,0x02,0x00,0x00,0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, +0xc8,0x02,0x00,0x00,0xc3,0x02,0x00,0x00,0xc4,0x02,0x00,0x00, +0xf8,0x00,0x02,0x00,0xc3,0x02,0x00,0x00,0xf9,0x00,0x02,0x00, +0xca,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0xca,0x02,0x00,0x00, +0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0x14,0x03,0x00,0x00, +0x3e,0x00,0x00,0x00,0xc3,0x02,0x00,0x00,0x02,0x03,0x00,0x00, +0xcd,0x02,0x00,0x00,0xb0,0x00,0x05,0x00,0xc2,0x00,0x00,0x00, +0xd0,0x02,0x00,0x00,0x14,0x03,0x00,0x00,0x62,0x00,0x00,0x00, +0xf6,0x00,0x04,0x00,0xcc,0x02,0x00,0x00,0xcd,0x02,0x00,0x00, +0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0xd0,0x02,0x00,0x00, +0xcb,0x02,0x00,0x00,0xcc,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, +0xcb,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xd3,0x02,0x00,0x00,0xb8,0x02,0x00,0x00,0x14,0x03,0x00,0x00, +0xb0,0x00,0x05,0x00,0xc2,0x00,0x00,0x00,0xd6,0x02,0x00,0x00, +0xd3,0x02,0x00,0x00,0x36,0x00,0x00,0x00,0xf7,0x00,0x03,0x00, +0xd8,0x02,0x00,0x00,0x00,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, +0xd6,0x02,0x00,0x00,0xd7,0x02,0x00,0x00,0xd8,0x02,0x00,0x00, +0xf8,0x00,0x02,0x00,0xd7,0x02,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xdb,0x02,0x00,0x00,0xc0,0x02,0x00,0x00, +0x12,0x03,0x00,0x00,0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00, +0xdc,0x02,0x00,0x00,0x12,0x00,0x00,0x00,0xd0,0x00,0x00,0x00, +0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0xdd,0x02,0x00,0x00, +0xdc,0x02,0x00,0x00,0xb0,0x00,0x05,0x00,0xc2,0x00,0x00,0x00, +0xde,0x02,0x00,0x00,0xdb,0x02,0x00,0x00,0xdd,0x02,0x00,0x00, +0xf9,0x00,0x02,0x00,0xd8,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, +0xd8,0x02,0x00,0x00,0xf5,0x00,0x07,0x00,0xc2,0x00,0x00,0x00, +0xdf,0x02,0x00,0x00,0xd6,0x02,0x00,0x00,0xcb,0x02,0x00,0x00, +0xde,0x02,0x00,0x00,0xd7,0x02,0x00,0x00,0xf7,0x00,0x03,0x00, +0xe1,0x02,0x00,0x00,0x00,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, +0xdf,0x02,0x00,0x00,0xe0,0x02,0x00,0x00,0xe1,0x02,0x00,0x00, +0xf8,0x00,0x02,0x00,0xe0,0x02,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xe9,0x02,0x00,0x00,0xc0,0x02,0x00,0x00, +0x12,0x03,0x00,0x00,0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00, +0xeb,0x02,0x00,0x00,0x12,0x00,0x00,0x00,0xea,0x02,0x00,0x00, +0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0xec,0x02,0x00,0x00, +0xeb,0x02,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xed,0x02,0x00,0x00,0xe9,0x02,0x00,0x00,0xec,0x02,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xee,0x02,0x00,0x00, +0xa0,0x02,0x00,0x00,0xed,0x02,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xf0,0x02,0x00,0x00,0xee,0x02,0x00,0x00, +0xb8,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xf2,0x02,0x00,0x00,0xf0,0x02,0x00,0x00,0x14,0x03,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xf4,0x02,0x00,0x00, +0x0f,0x03,0x00,0x00,0xbc,0x00,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xf6,0x02,0x00,0x00,0xf4,0x02,0x00,0x00, +0x12,0x03,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xf8,0x02,0x00,0x00,0xf6,0x02,0x00,0x00,0xf7,0x02,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xfa,0x02,0x00,0x00, +0x10,0x03,0x00,0x00,0x62,0x00,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xfb,0x02,0x00,0x00,0xf8,0x02,0x00,0x00, +0xfa,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xfd,0x02,0x00,0x00,0xfb,0x02,0x00,0x00,0x14,0x03,0x00,0x00, +0x41,0x00,0x05,0x00,0xcd,0x00,0x00,0x00,0xfe,0x02,0x00,0x00, +0xca,0x00,0x00,0x00,0xfd,0x02,0x00,0x00,0x3d,0x00,0x04,0x00, +0xc4,0x00,0x00,0x00,0xff,0x02,0x00,0x00,0xfe,0x02,0x00,0x00, +0x41,0x00,0x06,0x00,0x90,0x01,0x00,0x00,0x00,0x03,0x00,0x00, +0xe5,0x02,0x00,0x00,0x34,0x00,0x00,0x00,0xf2,0x02,0x00,0x00, +0x3e,0x00,0x03,0x00,0x00,0x03,0x00,0x00,0xff,0x02,0x00,0x00, +0xf9,0x00,0x02,0x00,0xe1,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, +0xe1,0x02,0x00,0x00,0xf9,0x00,0x02,0x00,0xcd,0x02,0x00,0x00, +0xf8,0x00,0x02,0x00,0xcd,0x02,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x02,0x03,0x00,0x00,0x14,0x03,0x00,0x00, +0xd0,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0xca,0x02,0x00,0x00, +0xf8,0x00,0x02,0x00,0xcc,0x02,0x00,0x00,0xf9,0x00,0x02,0x00, +0xc5,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0xc5,0x02,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x04,0x03,0x00,0x00, +0x12,0x03,0x00,0x00,0xd0,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, +0xc2,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0xc4,0x02,0x00,0x00, +0xf9,0x00,0x02,0x00,0xad,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, +0xad,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x06,0x03,0x00,0x00,0x10,0x03,0x00,0x00,0xd0,0x00,0x00,0x00, +0xf9,0x00,0x02,0x00,0xaa,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, +0xac,0x02,0x00,0x00,0xf9,0x00,0x02,0x00,0xa5,0x02,0x00,0x00, +0xf8,0x00,0x02,0x00,0xa5,0x02,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x08,0x03,0x00,0x00,0x0f,0x03,0x00,0x00, +0xd0,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0xa2,0x02,0x00,0x00, +0xf8,0x00,0x02,0x00,0xa4,0x02,0x00,0x00,0xfd,0x00,0x01,0x00, +0x38,0x00,0x01,0x00, +}; +const uint64_t matmul_q5_0_f32_aligned_len = 11668; + +unsigned char matmul_q5_0_f32_aligned_fp32_data[] = { +0x03,0x02,0x23,0x07,0x00,0x05,0x01,0x00,0x0b,0x00,0x0d,0x00, +0x38,0x03,0x00,0x00,0x00,0x00,0x00,0x00,0x11,0x00,0x02,0x00, +0x01,0x00,0x00,0x00,0x11,0x00,0x02,0x00,0x51,0x11,0x00,0x00, +0x11,0x00,0x02,0x00,0x60,0x11,0x00,0x00,0x0b,0x00,0x06,0x00, +0x01,0x00,0x00,0x00,0x47,0x4c,0x53,0x4c,0x2e,0x73,0x74,0x64, +0x2e,0x34,0x35,0x30,0x00,0x00,0x00,0x00,0x0e,0x00,0x03,0x00, +0x00,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x0f,0x00,0x0f,0x00, +0x05,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x6d,0x61,0x69,0x6e, +0x00,0x00,0x00,0x00,0x0b,0x00,0x00,0x00,0x12,0x00,0x00,0x00, +0x3d,0x00,0x00,0x00,0x4c,0x00,0x00,0x00,0x0a,0x01,0x00,0x00, +0x51,0x01,0x00,0x00,0x84,0x01,0x00,0x00,0x8b,0x01,0x00,0x00, +0x72,0x02,0x00,0x00,0xbb,0x02,0x00,0x00,0x10,0x00,0x06,0x00, +0x04,0x00,0x00,0x00,0x11,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x47,0x00,0x04,0x00, +0x0b,0x00,0x00,0x00,0x0b,0x00,0x00,0x00,0x1c,0x00,0x00,0x00, +0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x48,0x00,0x05,0x00, +0x10,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x23,0x00,0x00,0x00, +0x04,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00, +0x02,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x08,0x00,0x00,0x00, +0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00,0x03,0x00,0x00,0x00, +0x23,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x48,0x00,0x05,0x00, +0x10,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x23,0x00,0x00,0x00, +0x10,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00, +0x05,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x14,0x00,0x00,0x00, +0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00,0x06,0x00,0x00,0x00, +0x23,0x00,0x00,0x00,0x18,0x00,0x00,0x00,0x48,0x00,0x05,0x00, +0x10,0x00,0x00,0x00,0x07,0x00,0x00,0x00,0x23,0x00,0x00,0x00, +0x1c,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00, +0x08,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x20,0x00,0x00,0x00, +0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00,0x09,0x00,0x00,0x00, +0x23,0x00,0x00,0x00,0x24,0x00,0x00,0x00,0x48,0x00,0x05,0x00, +0x10,0x00,0x00,0x00,0x0a,0x00,0x00,0x00,0x23,0x00,0x00,0x00, +0x28,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00, +0x0b,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x2c,0x00,0x00,0x00, +0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, +0x23,0x00,0x00,0x00,0x30,0x00,0x00,0x00,0x48,0x00,0x05,0x00, +0x10,0x00,0x00,0x00,0x0d,0x00,0x00,0x00,0x23,0x00,0x00,0x00, +0x34,0x00,0x00,0x00,0x47,0x00,0x03,0x00,0x10,0x00,0x00,0x00, +0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x37,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x47,0x00,0x04,0x00, +0x3d,0x00,0x00,0x00,0x0b,0x00,0x00,0x00,0x1a,0x00,0x00,0x00, +0x47,0x00,0x04,0x00,0x4c,0x00,0x00,0x00,0x0b,0x00,0x00,0x00, +0x1b,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x4f,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0x09,0x00,0x00,0x00,0x47,0x00,0x04,0x00, +0x53,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x04,0x00,0x00,0x00, +0x47,0x00,0x04,0x00,0x60,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0x06,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x62,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0x07,0x00,0x00,0x00,0x47,0x00,0x04,0x00, +0x6c,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x03,0x00,0x00,0x00, +0x47,0x00,0x04,0x00,0xa7,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0xb9,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0x05,0x00,0x00,0x00,0x47,0x00,0x04,0x00, +0xbc,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x08,0x00,0x00,0x00, +0x47,0x00,0x04,0x00,0x03,0x01,0x00,0x00,0x06,0x00,0x00,0x00, +0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x05,0x01,0x00,0x00, +0x06,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x48,0x00,0x05,0x00, +0x06,0x01,0x00,0x00,0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00, +0x00,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x06,0x01,0x00,0x00, +0x01,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x02,0x00,0x00,0x00, +0x48,0x00,0x05,0x00,0x06,0x01,0x00,0x00,0x02,0x00,0x00,0x00, +0x23,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x47,0x00,0x04,0x00, +0x07,0x01,0x00,0x00,0x06,0x00,0x00,0x00,0x16,0x00,0x00,0x00, +0x48,0x00,0x04,0x00,0x08,0x01,0x00,0x00,0x00,0x00,0x00,0x00, +0x18,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x08,0x01,0x00,0x00, +0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0x47,0x00,0x03,0x00,0x08,0x01,0x00,0x00,0x02,0x00,0x00,0x00, +0x47,0x00,0x04,0x00,0x0a,0x01,0x00,0x00,0x22,0x00,0x00,0x00, +0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x0a,0x01,0x00,0x00, +0x21,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00, +0x5c,0x01,0x00,0x00,0x01,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0x47,0x00,0x04,0x00,0x5d,0x01,0x00,0x00,0x0b,0x00,0x00,0x00, +0x19,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x88,0x01,0x00,0x00, +0x06,0x00,0x00,0x00,0x10,0x00,0x00,0x00,0x48,0x00,0x04,0x00, +0x89,0x01,0x00,0x00,0x00,0x00,0x00,0x00,0x18,0x00,0x00,0x00, +0x48,0x00,0x05,0x00,0x89,0x01,0x00,0x00,0x00,0x00,0x00,0x00, +0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x03,0x00, +0x89,0x01,0x00,0x00,0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00, +0x8b,0x01,0x00,0x00,0x22,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0x47,0x00,0x04,0x00,0x8b,0x01,0x00,0x00,0x21,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x72,0x02,0x00,0x00, +0x0b,0x00,0x00,0x00,0x18,0x00,0x00,0x00,0x47,0x00,0x04,0x00, +0xb8,0x02,0x00,0x00,0x06,0x00,0x00,0x00,0x04,0x00,0x00,0x00, +0x48,0x00,0x04,0x00,0xb9,0x02,0x00,0x00,0x00,0x00,0x00,0x00, +0x19,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0xb9,0x02,0x00,0x00, +0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0x47,0x00,0x03,0x00,0xb9,0x02,0x00,0x00,0x02,0x00,0x00,0x00, +0x47,0x00,0x04,0x00,0xbb,0x02,0x00,0x00,0x22,0x00,0x00,0x00, +0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0xbb,0x02,0x00,0x00, +0x21,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x13,0x00,0x02,0x00, +0x02,0x00,0x00,0x00,0x21,0x00,0x03,0x00,0x03,0x00,0x00,0x00, +0x02,0x00,0x00,0x00,0x15,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x20,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x17,0x00,0x04,0x00, +0x09,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x03,0x00,0x00,0x00, +0x20,0x00,0x04,0x00,0x0a,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0x09,0x00,0x00,0x00,0x3b,0x00,0x04,0x00,0x0a,0x00,0x00,0x00, +0x0b,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x02,0x00,0x00,0x00, +0x20,0x00,0x04,0x00,0x0d,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0x06,0x00,0x00,0x00,0x1e,0x00,0x10,0x00,0x10,0x00,0x00,0x00, +0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00, +0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00, +0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00, +0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00, +0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x20,0x00,0x04,0x00, +0x11,0x00,0x00,0x00,0x09,0x00,0x00,0x00,0x10,0x00,0x00,0x00, +0x3b,0x00,0x04,0x00,0x11,0x00,0x00,0x00,0x12,0x00,0x00,0x00, +0x09,0x00,0x00,0x00,0x15,0x00,0x04,0x00,0x13,0x00,0x00,0x00, +0x20,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x13,0x00,0x00,0x00,0x14,0x00,0x00,0x00,0x08,0x00,0x00,0x00, +0x20,0x00,0x04,0x00,0x15,0x00,0x00,0x00,0x09,0x00,0x00,0x00, +0x06,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00, +0x21,0x00,0x00,0x00,0x0a,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x13,0x00,0x00,0x00,0x27,0x00,0x00,0x00,0x09,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00,0x2d,0x00,0x00,0x00, +0x07,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00, +0x34,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x32,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x37,0x00,0x00,0x00,0x40,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x39,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0x3b,0x00,0x04,0x00,0x0a,0x00,0x00,0x00, +0x3d,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x3e,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0x3b,0x00,0x04,0x00,0x0a,0x00,0x00,0x00,0x4c,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0x32,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x4f,0x00,0x00,0x00,0x20,0x00,0x00,0x00,0x32,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x53,0x00,0x00,0x00,0x20,0x00,0x00,0x00, +0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x54,0x00,0x00,0x00, +0x86,0x00,0x00,0x00,0x37,0x00,0x00,0x00,0x53,0x00,0x00,0x00, +0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x58,0x00,0x00,0x00, +0x86,0x00,0x00,0x00,0x37,0x00,0x00,0x00,0x53,0x00,0x00,0x00, +0x32,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x60,0x00,0x00,0x00, +0x02,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, +0x61,0x00,0x00,0x00,0x86,0x00,0x00,0x00,0x53,0x00,0x00,0x00, +0x60,0x00,0x00,0x00,0x32,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x62,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x34,0x00,0x06,0x00, +0x06,0x00,0x00,0x00,0x63,0x00,0x00,0x00,0x86,0x00,0x00,0x00, +0x61,0x00,0x00,0x00,0x62,0x00,0x00,0x00,0x34,0x00,0x06,0x00, +0x06,0x00,0x00,0x00,0x67,0x00,0x00,0x00,0x86,0x00,0x00,0x00, +0x61,0x00,0x00,0x00,0x62,0x00,0x00,0x00,0x32,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x6c,0x00,0x00,0x00,0x10,0x00,0x00,0x00, +0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x6d,0x00,0x00,0x00, +0x86,0x00,0x00,0x00,0x6c,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, +0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x72,0x00,0x00,0x00, +0x86,0x00,0x00,0x00,0x6c,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x77,0x00,0x00,0x00, +0x04,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, +0x78,0x00,0x00,0x00,0x86,0x00,0x00,0x00,0x6c,0x00,0x00,0x00, +0x77,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, +0x7d,0x00,0x00,0x00,0x86,0x00,0x00,0x00,0x6c,0x00,0x00,0x00, +0x77,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00, +0x81,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x13,0x00,0x00,0x00,0x86,0x00,0x00,0x00,0x02,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00,0x91,0x00,0x00,0x00, +0x0b,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00, +0x97,0x00,0x00,0x00,0x03,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x13,0x00,0x00,0x00,0xa2,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, +0x32,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0xa7,0x00,0x00,0x00, +0x40,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00, +0xa9,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x34,0x00,0x06,0x00, +0x06,0x00,0x00,0x00,0xb8,0x00,0x00,0x00,0x84,0x00,0x00,0x00, +0x60,0x00,0x00,0x00,0x62,0x00,0x00,0x00,0x32,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0xb9,0x00,0x00,0x00,0x20,0x00,0x00,0x00, +0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0xba,0x00,0x00,0x00, +0x84,0x00,0x00,0x00,0x53,0x00,0x00,0x00,0xb9,0x00,0x00,0x00, +0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0xbb,0x00,0x00,0x00, +0x84,0x00,0x00,0x00,0x4f,0x00,0x00,0x00,0x62,0x00,0x00,0x00, +0x32,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0xbc,0x00,0x00,0x00, +0x02,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, +0xbd,0x00,0x00,0x00,0x84,0x00,0x00,0x00,0xbb,0x00,0x00,0x00, +0xbc,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, +0xbe,0x00,0x00,0x00,0x84,0x00,0x00,0x00,0xbd,0x00,0x00,0x00, +0x60,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, +0xbf,0x00,0x00,0x00,0x86,0x00,0x00,0x00,0xba,0x00,0x00,0x00, +0xbe,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, +0xc0,0x00,0x00,0x00,0x84,0x00,0x00,0x00,0xb8,0x00,0x00,0x00, +0xbf,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, +0xc1,0x00,0x00,0x00,0x84,0x00,0x00,0x00,0xc0,0x00,0x00,0x00, +0xbc,0x00,0x00,0x00,0x14,0x00,0x02,0x00,0xc2,0x00,0x00,0x00, +0x16,0x00,0x03,0x00,0xc4,0x00,0x00,0x00,0x20,0x00,0x00,0x00, +0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0xc5,0x00,0x00,0x00, +0x84,0x00,0x00,0x00,0x60,0x00,0x00,0x00,0x62,0x00,0x00,0x00, +0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0xc6,0x00,0x00,0x00, +0x84,0x00,0x00,0x00,0xc5,0x00,0x00,0x00,0xbf,0x00,0x00,0x00, +0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0xc7,0x00,0x00,0x00, +0x84,0x00,0x00,0x00,0xc6,0x00,0x00,0x00,0xbc,0x00,0x00,0x00, +0x1c,0x00,0x04,0x00,0xc8,0x00,0x00,0x00,0xc4,0x00,0x00,0x00, +0xc7,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0xc9,0x00,0x00,0x00, +0x07,0x00,0x00,0x00,0xc8,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0xc4,0x00,0x00,0x00,0xcc,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0x20,0x00,0x04,0x00,0xcd,0x00,0x00,0x00,0x07,0x00,0x00,0x00, +0xc4,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00, +0xd0,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x34,0x00,0x06,0x00, +0x06,0x00,0x00,0x00,0xf4,0x00,0x00,0x00,0x80,0x00,0x00,0x00, +0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0xfa,0x00,0x00,0x00,0x10,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0xfe,0x00,0x00,0x00, +0x0f,0x00,0x00,0x00,0x16,0x00,0x03,0x00,0x01,0x01,0x00,0x00, +0x10,0x00,0x00,0x00,0x15,0x00,0x04,0x00,0x02,0x01,0x00,0x00, +0x10,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x1c,0x00,0x04,0x00, +0x03,0x01,0x00,0x00,0x02,0x01,0x00,0x00,0x0c,0x00,0x00,0x00, +0x15,0x00,0x04,0x00,0x04,0x01,0x00,0x00,0x08,0x00,0x00,0x00, +0x00,0x00,0x00,0x00,0x1c,0x00,0x04,0x00,0x05,0x01,0x00,0x00, +0x04,0x01,0x00,0x00,0xfa,0x00,0x00,0x00,0x1e,0x00,0x05,0x00, +0x06,0x01,0x00,0x00,0x01,0x01,0x00,0x00,0x03,0x01,0x00,0x00, +0x05,0x01,0x00,0x00,0x1d,0x00,0x03,0x00,0x07,0x01,0x00,0x00, +0x06,0x01,0x00,0x00,0x1e,0x00,0x03,0x00,0x08,0x01,0x00,0x00, +0x07,0x01,0x00,0x00,0x20,0x00,0x04,0x00,0x09,0x01,0x00,0x00, +0x0c,0x00,0x00,0x00,0x08,0x01,0x00,0x00,0x3b,0x00,0x04,0x00, +0x09,0x01,0x00,0x00,0x0a,0x01,0x00,0x00,0x0c,0x00,0x00,0x00, +0x20,0x00,0x04,0x00,0x0c,0x01,0x00,0x00,0x0c,0x00,0x00,0x00, +0x01,0x01,0x00,0x00,0x20,0x00,0x04,0x00,0x12,0x01,0x00,0x00, +0x0c,0x00,0x00,0x00,0x02,0x01,0x00,0x00,0x2b,0x00,0x04,0x00, +0x13,0x00,0x00,0x00,0x16,0x01,0x00,0x00,0x10,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x28,0x01,0x00,0x00, +0x0c,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x31,0x01,0x00,0x00, +0x0c,0x00,0x00,0x00,0x04,0x01,0x00,0x00,0x17,0x00,0x04,0x00, +0x35,0x01,0x00,0x00,0xc4,0x00,0x00,0x00,0x02,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0xc4,0x00,0x00,0x00,0x48,0x01,0x00,0x00, +0x00,0x00,0x80,0x41,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, +0x4d,0x01,0x00,0x00,0x80,0x00,0x00,0x00,0x6c,0x00,0x00,0x00, +0x39,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, +0x4e,0x01,0x00,0x00,0x84,0x00,0x00,0x00,0x37,0x00,0x00,0x00, +0x4d,0x01,0x00,0x00,0x1c,0x00,0x04,0x00,0x4f,0x01,0x00,0x00, +0xc4,0x00,0x00,0x00,0x4e,0x01,0x00,0x00,0x20,0x00,0x04,0x00, +0x50,0x01,0x00,0x00,0x04,0x00,0x00,0x00,0x4f,0x01,0x00,0x00, +0x3b,0x00,0x04,0x00,0x50,0x01,0x00,0x00,0x51,0x01,0x00,0x00, +0x04,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x55,0x01,0x00,0x00, +0x04,0x00,0x00,0x00,0xc4,0x00,0x00,0x00,0x32,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x5c,0x01,0x00,0x00,0x01,0x00,0x00,0x00, +0x33,0x00,0x06,0x00,0x09,0x00,0x00,0x00,0x5d,0x01,0x00,0x00, +0x5c,0x01,0x00,0x00,0x39,0x00,0x00,0x00,0x39,0x00,0x00,0x00, +0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x5e,0x01,0x00,0x00, +0x51,0x00,0x00,0x00,0x5d,0x01,0x00,0x00,0x00,0x00,0x00,0x00, +0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x5f,0x01,0x00,0x00, +0x84,0x00,0x00,0x00,0x5e,0x01,0x00,0x00,0x0c,0x00,0x00,0x00, +0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x60,0x01,0x00,0x00, +0x86,0x00,0x00,0x00,0x5f,0x01,0x00,0x00,0x6c,0x00,0x00,0x00, +0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x7b,0x01,0x00,0x00, +0x80,0x00,0x00,0x00,0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00, +0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x80,0x01,0x00,0x00, +0x80,0x00,0x00,0x00,0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00, +0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x81,0x01,0x00,0x00, +0x84,0x00,0x00,0x00,0xa7,0x00,0x00,0x00,0x80,0x01,0x00,0x00, +0x1c,0x00,0x04,0x00,0x82,0x01,0x00,0x00,0xc4,0x00,0x00,0x00, +0x81,0x01,0x00,0x00,0x20,0x00,0x04,0x00,0x83,0x01,0x00,0x00, +0x04,0x00,0x00,0x00,0x82,0x01,0x00,0x00,0x3b,0x00,0x04,0x00, +0x83,0x01,0x00,0x00,0x84,0x01,0x00,0x00,0x04,0x00,0x00,0x00, +0x17,0x00,0x04,0x00,0x87,0x01,0x00,0x00,0xc4,0x00,0x00,0x00, +0x04,0x00,0x00,0x00,0x1d,0x00,0x03,0x00,0x88,0x01,0x00,0x00, +0x87,0x01,0x00,0x00,0x1e,0x00,0x03,0x00,0x89,0x01,0x00,0x00, +0x88,0x01,0x00,0x00,0x20,0x00,0x04,0x00,0x8a,0x01,0x00,0x00, +0x0c,0x00,0x00,0x00,0x89,0x01,0x00,0x00,0x3b,0x00,0x04,0x00, +0x8a,0x01,0x00,0x00,0x8b,0x01,0x00,0x00,0x0c,0x00,0x00,0x00, +0x20,0x00,0x04,0x00,0x8d,0x01,0x00,0x00,0x0c,0x00,0x00,0x00, +0xc4,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x9e,0x01,0x00,0x00,0x03,0x00,0x00,0x00,0x34,0x00,0x06,0x00, +0x06,0x00,0x00,0x00,0xa4,0x01,0x00,0x00,0x51,0x00,0x00,0x00, +0x5d,0x01,0x00,0x00,0x00,0x00,0x00,0x00,0x34,0x00,0x06,0x00, +0x06,0x00,0x00,0x00,0xa5,0x01,0x00,0x00,0x84,0x00,0x00,0x00, +0xa4,0x01,0x00,0x00,0x77,0x00,0x00,0x00,0x34,0x00,0x06,0x00, +0x06,0x00,0x00,0x00,0xa6,0x01,0x00,0x00,0x86,0x00,0x00,0x00, +0xa5,0x01,0x00,0x00,0x6c,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0xa9,0x01,0x00,0x00,0x08,0x01,0x00,0x00, +0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0xaa,0x01,0x00,0x00, +0x86,0x00,0x00,0x00,0x6c,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, +0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0xad,0x01,0x00,0x00, +0x86,0x00,0x00,0x00,0x6c,0x00,0x00,0x00,0x77,0x00,0x00,0x00, +0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0xc8,0x01,0x00,0x00, +0x84,0x00,0x00,0x00,0x60,0x00,0x00,0x00,0x62,0x00,0x00,0x00, +0x1c,0x00,0x04,0x00,0xc9,0x01,0x00,0x00,0xc4,0x00,0x00,0x00, +0xc8,0x01,0x00,0x00,0x20,0x00,0x04,0x00,0xca,0x01,0x00,0x00, +0x07,0x00,0x00,0x00,0xc9,0x01,0x00,0x00,0x34,0x00,0x06,0x00, +0x06,0x00,0x00,0x00,0xda,0x01,0x00,0x00,0x80,0x00,0x00,0x00, +0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x34,0x00,0x06,0x00, +0x06,0x00,0x00,0x00,0xf5,0x01,0x00,0x00,0x84,0x00,0x00,0x00, +0xbf,0x00,0x00,0x00,0xbc,0x00,0x00,0x00,0x1c,0x00,0x04,0x00, +0xf6,0x01,0x00,0x00,0xc4,0x00,0x00,0x00,0xf5,0x01,0x00,0x00, +0x20,0x00,0x04,0x00,0xf7,0x01,0x00,0x00,0x07,0x00,0x00,0x00, +0xf6,0x01,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, +0x00,0x02,0x00,0x00,0x86,0x00,0x00,0x00,0xb9,0x00,0x00,0x00, +0xbf,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, +0x08,0x02,0x00,0x00,0x80,0x00,0x00,0x00,0x6c,0x00,0x00,0x00, +0x39,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, +0x37,0x02,0x00,0x00,0x84,0x00,0x00,0x00,0x60,0x00,0x00,0x00, +0x62,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00, +0x6a,0x02,0x00,0x00,0x0d,0x00,0x00,0x00,0x3b,0x00,0x04,0x00, +0x0a,0x00,0x00,0x00,0x72,0x02,0x00,0x00,0x01,0x00,0x00,0x00, +0x1d,0x00,0x03,0x00,0xb8,0x02,0x00,0x00,0xc4,0x00,0x00,0x00, +0x1e,0x00,0x03,0x00,0xb9,0x02,0x00,0x00,0xb8,0x02,0x00,0x00, +0x20,0x00,0x04,0x00,0xba,0x02,0x00,0x00,0x0c,0x00,0x00,0x00, +0xb9,0x02,0x00,0x00,0x3b,0x00,0x04,0x00,0xba,0x02,0x00,0x00, +0xbb,0x02,0x00,0x00,0x0c,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x13,0x00,0x00,0x00,0xc0,0x02,0x00,0x00,0x05,0x00,0x00,0x00, +0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0xcd,0x02,0x00,0x00, +0x84,0x00,0x00,0x00,0x60,0x00,0x00,0x00,0x62,0x00,0x00,0x00, +0x2c,0x00,0x05,0x00,0x35,0x01,0x00,0x00,0x37,0x03,0x00,0x00, +0x48,0x01,0x00,0x00,0x48,0x01,0x00,0x00,0x36,0x00,0x05,0x00, +0x02,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0x03,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0x05,0x00,0x00,0x00, +0x3b,0x00,0x04,0x00,0xc9,0x00,0x00,0x00,0xca,0x00,0x00,0x00, +0x07,0x00,0x00,0x00,0x3b,0x00,0x04,0x00,0xca,0x01,0x00,0x00, +0xcb,0x01,0x00,0x00,0x07,0x00,0x00,0x00,0x3b,0x00,0x04,0x00, +0xf7,0x01,0x00,0x00,0xf8,0x01,0x00,0x00,0x07,0x00,0x00,0x00, +0x41,0x00,0x05,0x00,0x0d,0x00,0x00,0x00,0x0e,0x00,0x00,0x00, +0x0b,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x0f,0x00,0x00,0x00,0x0e,0x00,0x00,0x00, +0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00,0x16,0x00,0x00,0x00, +0x12,0x00,0x00,0x00,0x14,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x17,0x00,0x00,0x00,0x16,0x00,0x00,0x00, +0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x18,0x00,0x00,0x00, +0x0f,0x00,0x00,0x00,0x17,0x00,0x00,0x00,0x89,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x1e,0x00,0x00,0x00,0x0f,0x00,0x00,0x00, +0x17,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00, +0x22,0x00,0x00,0x00,0x12,0x00,0x00,0x00,0x21,0x00,0x00,0x00, +0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x23,0x00,0x00,0x00, +0x22,0x00,0x00,0x00,0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x24,0x00,0x00,0x00,0x18,0x00,0x00,0x00,0x23,0x00,0x00,0x00, +0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00,0x28,0x00,0x00,0x00, +0x12,0x00,0x00,0x00,0x27,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x29,0x00,0x00,0x00,0x28,0x00,0x00,0x00, +0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x2a,0x00,0x00,0x00, +0x1e,0x00,0x00,0x00,0x29,0x00,0x00,0x00,0x41,0x00,0x05,0x00, +0x15,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0x12,0x00,0x00,0x00, +0x2d,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x2f,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x30,0x00,0x00,0x00,0x24,0x00,0x00,0x00, +0x2f,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x32,0x00,0x00,0x00,0x30,0x00,0x00,0x00,0x2a,0x00,0x00,0x00, +0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00,0x35,0x00,0x00,0x00, +0x12,0x00,0x00,0x00,0x34,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x36,0x00,0x00,0x00,0x35,0x00,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x38,0x00,0x00,0x00, +0x36,0x00,0x00,0x00,0x37,0x00,0x00,0x00,0x82,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x3a,0x00,0x00,0x00,0x38,0x00,0x00,0x00, +0x39,0x00,0x00,0x00,0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x3b,0x00,0x00,0x00,0x3a,0x00,0x00,0x00,0x37,0x00,0x00,0x00, +0x41,0x00,0x05,0x00,0x0d,0x00,0x00,0x00,0x3f,0x00,0x00,0x00, +0x3d,0x00,0x00,0x00,0x3e,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x40,0x00,0x00,0x00,0x3f,0x00,0x00,0x00, +0x89,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x42,0x00,0x00,0x00, +0x40,0x00,0x00,0x00,0x3b,0x00,0x00,0x00,0x86,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x47,0x00,0x00,0x00,0x40,0x00,0x00,0x00, +0x3b,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x0d,0x00,0x00,0x00, +0x49,0x00,0x00,0x00,0x3d,0x00,0x00,0x00,0x39,0x00,0x00,0x00, +0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x4a,0x00,0x00,0x00, +0x49,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x0d,0x00,0x00,0x00, +0x4d,0x00,0x00,0x00,0x4c,0x00,0x00,0x00,0x3e,0x00,0x00,0x00, +0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x4e,0x00,0x00,0x00, +0x4d,0x00,0x00,0x00,0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x50,0x00,0x00,0x00,0x4e,0x00,0x00,0x00,0x4f,0x00,0x00,0x00, +0x89,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x55,0x00,0x00,0x00, +0x50,0x00,0x00,0x00,0x54,0x00,0x00,0x00,0x86,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x59,0x00,0x00,0x00,0x50,0x00,0x00,0x00, +0x58,0x00,0x00,0x00,0x89,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x5d,0x00,0x00,0x00,0x4e,0x00,0x00,0x00,0x4f,0x00,0x00,0x00, +0x89,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x64,0x00,0x00,0x00, +0x5d,0x00,0x00,0x00,0x63,0x00,0x00,0x00,0x86,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x68,0x00,0x00,0x00,0x5d,0x00,0x00,0x00, +0x67,0x00,0x00,0x00,0x89,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x6e,0x00,0x00,0x00,0x4e,0x00,0x00,0x00,0x6d,0x00,0x00,0x00, +0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x73,0x00,0x00,0x00, +0x4e,0x00,0x00,0x00,0x72,0x00,0x00,0x00,0x89,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x79,0x00,0x00,0x00,0x4e,0x00,0x00,0x00, +0x78,0x00,0x00,0x00,0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x7e,0x00,0x00,0x00,0x4e,0x00,0x00,0x00,0x7d,0x00,0x00,0x00, +0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00,0x82,0x00,0x00,0x00, +0x12,0x00,0x00,0x00,0x81,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x83,0x00,0x00,0x00,0x82,0x00,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x84,0x00,0x00,0x00, +0x47,0x00,0x00,0x00,0x83,0x00,0x00,0x00,0x41,0x00,0x05,0x00, +0x15,0x00,0x00,0x00,0x87,0x00,0x00,0x00,0x12,0x00,0x00,0x00, +0x86,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x88,0x00,0x00,0x00,0x87,0x00,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x8a,0x00,0x00,0x00,0x47,0x00,0x00,0x00, +0x39,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x8d,0x00,0x00,0x00,0x8a,0x00,0x00,0x00,0x83,0x00,0x00,0x00, +0x0c,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0x8e,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0x26,0x00,0x00,0x00,0x88,0x00,0x00,0x00, +0x8d,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00, +0x92,0x00,0x00,0x00,0x12,0x00,0x00,0x00,0x91,0x00,0x00,0x00, +0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x93,0x00,0x00,0x00, +0x92,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x94,0x00,0x00,0x00,0x32,0x00,0x00,0x00,0x93,0x00,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x96,0x00,0x00,0x00, +0x42,0x00,0x00,0x00,0x37,0x00,0x00,0x00,0x41,0x00,0x05,0x00, +0x15,0x00,0x00,0x00,0x98,0x00,0x00,0x00,0x12,0x00,0x00,0x00, +0x97,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x99,0x00,0x00,0x00,0x98,0x00,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x9a,0x00,0x00,0x00,0x96,0x00,0x00,0x00, +0x99,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x9b,0x00,0x00,0x00,0x94,0x00,0x00,0x00,0x9a,0x00,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x9d,0x00,0x00,0x00, +0x9b,0x00,0x00,0x00,0x84,0x00,0x00,0x00,0x86,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x9e,0x00,0x00,0x00,0x9d,0x00,0x00,0x00, +0x0c,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00, +0xa3,0x00,0x00,0x00,0x12,0x00,0x00,0x00,0xa2,0x00,0x00,0x00, +0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0xa4,0x00,0x00,0x00, +0xa3,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xa5,0x00,0x00,0x00,0x0f,0x00,0x00,0x00,0xa4,0x00,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xa8,0x00,0x00,0x00, +0x4a,0x00,0x00,0x00,0xa7,0x00,0x00,0x00,0x41,0x00,0x05,0x00, +0x15,0x00,0x00,0x00,0xaa,0x00,0x00,0x00,0x12,0x00,0x00,0x00, +0xa9,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0xab,0x00,0x00,0x00,0xaa,0x00,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xac,0x00,0x00,0x00,0xa8,0x00,0x00,0x00, +0xab,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xad,0x00,0x00,0x00,0xa5,0x00,0x00,0x00,0xac,0x00,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xaf,0x00,0x00,0x00, +0xad,0x00,0x00,0x00,0x84,0x00,0x00,0x00,0x86,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xb0,0x00,0x00,0x00,0xaf,0x00,0x00,0x00, +0x77,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0xb2,0x00,0x00,0x00, +0xf8,0x00,0x02,0x00,0xb2,0x00,0x00,0x00,0xf5,0x00,0x07,0x00, +0x06,0x00,0x00,0x00,0xe3,0x02,0x00,0x00,0x3e,0x00,0x00,0x00, +0x05,0x00,0x00,0x00,0xd1,0x00,0x00,0x00,0xb3,0x00,0x00,0x00, +0xb0,0x00,0x05,0x00,0xc2,0x00,0x00,0x00,0xc3,0x00,0x00,0x00, +0xe3,0x02,0x00,0x00,0xc1,0x00,0x00,0x00,0xf6,0x00,0x04,0x00, +0xb4,0x00,0x00,0x00,0xb3,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0xfa,0x00,0x04,0x00,0xc3,0x00,0x00,0x00,0xb3,0x00,0x00,0x00, +0xb4,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0xb3,0x00,0x00,0x00, +0x41,0x00,0x05,0x00,0xcd,0x00,0x00,0x00,0xce,0x00,0x00,0x00, +0xca,0x00,0x00,0x00,0xe3,0x02,0x00,0x00,0x3e,0x00,0x03,0x00, +0xce,0x00,0x00,0x00,0xcc,0x00,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xd1,0x00,0x00,0x00,0xe3,0x02,0x00,0x00, +0xd0,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0xb2,0x00,0x00,0x00, +0xf8,0x00,0x02,0x00,0xb4,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, +0xd4,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0xd4,0x00,0x00,0x00, +0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0xfc,0x02,0x00,0x00, +0xb0,0x00,0x00,0x00,0xb4,0x00,0x00,0x00,0xaf,0x01,0x00,0x00, +0xd7,0x00,0x00,0x00,0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00, +0xf8,0x02,0x00,0x00,0x9e,0x00,0x00,0x00,0xb4,0x00,0x00,0x00, +0xac,0x01,0x00,0x00,0xd7,0x00,0x00,0x00,0xf5,0x00,0x07,0x00, +0x06,0x00,0x00,0x00,0xe4,0x02,0x00,0x00,0x84,0x00,0x00,0x00, +0xb4,0x00,0x00,0x00,0x5a,0x02,0x00,0x00,0xd7,0x00,0x00,0x00, +0xb0,0x00,0x05,0x00,0xc2,0x00,0x00,0x00,0xdb,0x00,0x00,0x00, +0xe4,0x02,0x00,0x00,0x8e,0x00,0x00,0x00,0xf6,0x00,0x04,0x00, +0xd6,0x00,0x00,0x00,0xd7,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0xfa,0x00,0x04,0x00,0xdb,0x00,0x00,0x00,0xd5,0x00,0x00,0x00, +0xd6,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0xd5,0x00,0x00,0x00, +0xf9,0x00,0x02,0x00,0xdd,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, +0xdd,0x00,0x00,0x00,0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00, +0xf4,0x02,0x00,0x00,0x3e,0x00,0x00,0x00,0xd5,0x00,0x00,0x00, +0x62,0x01,0x00,0x00,0xde,0x00,0x00,0x00,0xb0,0x00,0x05,0x00, +0xc2,0x00,0x00,0x00,0xe3,0x00,0x00,0x00,0xf4,0x02,0x00,0x00, +0x37,0x00,0x00,0x00,0xf6,0x00,0x04,0x00,0xdf,0x00,0x00,0x00, +0xde,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, +0xe3,0x00,0x00,0x00,0xde,0x00,0x00,0x00,0xdf,0x00,0x00,0x00, +0xf8,0x00,0x02,0x00,0xde,0x00,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xe8,0x00,0x00,0x00,0x73,0x00,0x00,0x00, +0xf4,0x02,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xeb,0x00,0x00,0x00,0xe8,0x00,0x00,0x00,0x99,0x00,0x00,0x00, +0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xec,0x00,0x00,0x00, +0xeb,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xed,0x00,0x00,0x00,0xf8,0x02,0x00,0x00, +0xec,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xef,0x00,0x00,0x00,0xed,0x00,0x00,0x00,0x6e,0x00,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xf5,0x00,0x00,0x00, +0xe8,0x00,0x00,0x00,0xf4,0x00,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xf7,0x00,0x00,0x00,0xf5,0x00,0x00,0x00, +0x6e,0x00,0x00,0x00,0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xfb,0x00,0x00,0x00,0xef,0x00,0x00,0x00,0xfa,0x00,0x00,0x00, +0xc7,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xff,0x00,0x00,0x00, +0xef,0x00,0x00,0x00,0xfe,0x00,0x00,0x00,0x41,0x00,0x07,0x00, +0x0c,0x01,0x00,0x00,0x0d,0x01,0x00,0x00,0x0a,0x01,0x00,0x00, +0x34,0x00,0x00,0x00,0xfb,0x00,0x00,0x00,0x34,0x00,0x00,0x00, +0x3d,0x00,0x04,0x00,0x01,0x01,0x00,0x00,0x0e,0x01,0x00,0x00, +0x0d,0x01,0x00,0x00,0x73,0x00,0x04,0x00,0xc4,0x00,0x00,0x00, +0x0f,0x01,0x00,0x00,0x0e,0x01,0x00,0x00,0x41,0x00,0x08,0x00, +0x12,0x01,0x00,0x00,0x13,0x01,0x00,0x00,0x0a,0x01,0x00,0x00, +0x34,0x00,0x00,0x00,0xfb,0x00,0x00,0x00,0xd0,0x00,0x00,0x00, +0xd0,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x02,0x01,0x00,0x00, +0x14,0x01,0x00,0x00,0x13,0x01,0x00,0x00,0x71,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x15,0x01,0x00,0x00,0x14,0x01,0x00,0x00, +0xc4,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x17,0x01,0x00,0x00, +0x15,0x01,0x00,0x00,0x16,0x01,0x00,0x00,0x41,0x00,0x08,0x00, +0x12,0x01,0x00,0x00,0x19,0x01,0x00,0x00,0x0a,0x01,0x00,0x00, +0x34,0x00,0x00,0x00,0xfb,0x00,0x00,0x00,0xd0,0x00,0x00,0x00, +0x34,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x02,0x01,0x00,0x00, +0x1a,0x01,0x00,0x00,0x19,0x01,0x00,0x00,0x71,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x1b,0x01,0x00,0x00,0x1a,0x01,0x00,0x00, +0xc5,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x1c,0x01,0x00,0x00, +0x17,0x01,0x00,0x00,0x1b,0x01,0x00,0x00,0xc2,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x22,0x01,0x00,0x00,0x1c,0x01,0x00,0x00, +0xff,0x00,0x00,0x00,0xc4,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x23,0x01,0x00,0x00,0x22,0x01,0x00,0x00,0xa9,0x00,0x00,0x00, +0xc7,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x24,0x01,0x00,0x00, +0x23,0x01,0x00,0x00,0xfa,0x00,0x00,0x00,0x7c,0x00,0x04,0x00, +0x13,0x00,0x00,0x00,0x25,0x01,0x00,0x00,0x24,0x01,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x29,0x01,0x00,0x00, +0xff,0x00,0x00,0x00,0x28,0x01,0x00,0x00,0xc2,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x2a,0x01,0x00,0x00,0x1c,0x01,0x00,0x00, +0x29,0x01,0x00,0x00,0xc7,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x2b,0x01,0x00,0x00,0x2a,0x01,0x00,0x00,0xfa,0x00,0x00,0x00, +0x7c,0x00,0x04,0x00,0x13,0x00,0x00,0x00,0x2c,0x01,0x00,0x00, +0x2b,0x01,0x00,0x00,0x41,0x00,0x08,0x00,0x31,0x01,0x00,0x00, +0x32,0x01,0x00,0x00,0x0a,0x01,0x00,0x00,0x34,0x00,0x00,0x00, +0xfb,0x00,0x00,0x00,0x86,0x00,0x00,0x00,0xff,0x00,0x00,0x00, +0x3d,0x00,0x04,0x00,0x04,0x01,0x00,0x00,0x33,0x01,0x00,0x00, +0x32,0x01,0x00,0x00,0x71,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x34,0x01,0x00,0x00,0x33,0x01,0x00,0x00,0xc7,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x39,0x01,0x00,0x00,0x34,0x01,0x00,0x00, +0xfe,0x00,0x00,0x00,0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x3d,0x01,0x00,0x00,0x25,0x01,0x00,0x00,0xc5,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x3e,0x01,0x00,0x00,0x39,0x01,0x00,0x00, +0x3d,0x01,0x00,0x00,0x70,0x00,0x04,0x00,0xc4,0x00,0x00,0x00, +0x3f,0x01,0x00,0x00,0x3e,0x01,0x00,0x00,0xc2,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x41,0x01,0x00,0x00,0x34,0x01,0x00,0x00, +0xa9,0x00,0x00,0x00,0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x44,0x01,0x00,0x00,0x2c,0x01,0x00,0x00,0xc5,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x45,0x01,0x00,0x00,0x41,0x01,0x00,0x00, +0x44,0x01,0x00,0x00,0x70,0x00,0x04,0x00,0xc4,0x00,0x00,0x00, +0x46,0x01,0x00,0x00,0x45,0x01,0x00,0x00,0x50,0x00,0x05,0x00, +0x35,0x01,0x00,0x00,0x47,0x01,0x00,0x00,0x3f,0x01,0x00,0x00, +0x46,0x01,0x00,0x00,0x83,0x00,0x05,0x00,0x35,0x01,0x00,0x00, +0x4a,0x01,0x00,0x00,0x47,0x01,0x00,0x00,0x37,0x03,0x00,0x00, +0x8e,0x00,0x05,0x00,0x35,0x01,0x00,0x00,0x4c,0x01,0x00,0x00, +0x4a,0x01,0x00,0x00,0x0f,0x01,0x00,0x00,0x51,0x00,0x05,0x00, +0xc4,0x00,0x00,0x00,0x54,0x01,0x00,0x00,0x4c,0x01,0x00,0x00, +0x00,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x55,0x01,0x00,0x00, +0x56,0x01,0x00,0x00,0x51,0x01,0x00,0x00,0xf7,0x00,0x00,0x00, +0x3e,0x00,0x03,0x00,0x56,0x01,0x00,0x00,0x54,0x01,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x58,0x01,0x00,0x00, +0xf7,0x00,0x00,0x00,0xfa,0x00,0x00,0x00,0x51,0x00,0x05,0x00, +0xc4,0x00,0x00,0x00,0x5a,0x01,0x00,0x00,0x4c,0x01,0x00,0x00, +0x01,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x55,0x01,0x00,0x00, +0x5b,0x01,0x00,0x00,0x51,0x01,0x00,0x00,0x58,0x01,0x00,0x00, +0x3e,0x00,0x03,0x00,0x5b,0x01,0x00,0x00,0x5a,0x01,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x62,0x01,0x00,0x00, +0xf4,0x02,0x00,0x00,0x60,0x01,0x00,0x00,0xf9,0x00,0x02,0x00, +0xdd,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0xdf,0x00,0x00,0x00, +0xf9,0x00,0x02,0x00,0x64,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, +0x64,0x01,0x00,0x00,0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00, +0xf5,0x02,0x00,0x00,0x3e,0x00,0x00,0x00,0xdf,0x00,0x00,0x00, +0xa8,0x01,0x00,0x00,0x65,0x01,0x00,0x00,0xb0,0x00,0x05,0x00, +0xc2,0x00,0x00,0x00,0x6a,0x01,0x00,0x00,0xf5,0x02,0x00,0x00, +0xa7,0x00,0x00,0x00,0xf6,0x00,0x04,0x00,0x66,0x01,0x00,0x00, +0x65,0x01,0x00,0x00,0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, +0x6a,0x01,0x00,0x00,0x65,0x01,0x00,0x00,0x66,0x01,0x00,0x00, +0xf8,0x00,0x02,0x00,0x65,0x01,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x6f,0x01,0x00,0x00,0x7e,0x00,0x00,0x00, +0xf5,0x02,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x72,0x01,0x00,0x00,0x6f,0x01,0x00,0x00,0xab,0x00,0x00,0x00, +0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x73,0x01,0x00,0x00, +0x72,0x01,0x00,0x00,0x77,0x00,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x74,0x01,0x00,0x00,0xfc,0x02,0x00,0x00, +0x73,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x76,0x01,0x00,0x00,0x74,0x01,0x00,0x00,0x79,0x00,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x7c,0x01,0x00,0x00, +0x6f,0x01,0x00,0x00,0x7b,0x01,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x7e,0x01,0x00,0x00,0x79,0x00,0x00,0x00, +0x77,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x7f,0x01,0x00,0x00,0x7c,0x01,0x00,0x00,0x7e,0x01,0x00,0x00, +0x41,0x00,0x07,0x00,0x8d,0x01,0x00,0x00,0x8e,0x01,0x00,0x00, +0x8b,0x01,0x00,0x00,0x34,0x00,0x00,0x00,0x76,0x01,0x00,0x00, +0x3e,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0xc4,0x00,0x00,0x00, +0x8f,0x01,0x00,0x00,0x8e,0x01,0x00,0x00,0x41,0x00,0x05,0x00, +0x55,0x01,0x00,0x00,0x90,0x01,0x00,0x00,0x84,0x01,0x00,0x00, +0x7f,0x01,0x00,0x00,0x3e,0x00,0x03,0x00,0x90,0x01,0x00,0x00, +0x8f,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x92,0x01,0x00,0x00,0x7f,0x01,0x00,0x00,0x39,0x00,0x00,0x00, +0x41,0x00,0x07,0x00,0x8d,0x01,0x00,0x00,0x94,0x01,0x00,0x00, +0x8b,0x01,0x00,0x00,0x34,0x00,0x00,0x00,0x76,0x01,0x00,0x00, +0x39,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0xc4,0x00,0x00,0x00, +0x95,0x01,0x00,0x00,0x94,0x01,0x00,0x00,0x41,0x00,0x05,0x00, +0x55,0x01,0x00,0x00,0x96,0x01,0x00,0x00,0x84,0x01,0x00,0x00, +0x92,0x01,0x00,0x00,0x3e,0x00,0x03,0x00,0x96,0x01,0x00,0x00, +0x95,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x98,0x01,0x00,0x00,0x7f,0x01,0x00,0x00,0x0c,0x00,0x00,0x00, +0x41,0x00,0x07,0x00,0x8d,0x01,0x00,0x00,0x9a,0x01,0x00,0x00, +0x8b,0x01,0x00,0x00,0x34,0x00,0x00,0x00,0x76,0x01,0x00,0x00, +0x0c,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0xc4,0x00,0x00,0x00, +0x9b,0x01,0x00,0x00,0x9a,0x01,0x00,0x00,0x41,0x00,0x05,0x00, +0x55,0x01,0x00,0x00,0x9c,0x01,0x00,0x00,0x84,0x01,0x00,0x00, +0x98,0x01,0x00,0x00,0x3e,0x00,0x03,0x00,0x9c,0x01,0x00,0x00, +0x9b,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x9f,0x01,0x00,0x00,0x7f,0x01,0x00,0x00,0x9e,0x01,0x00,0x00, +0x41,0x00,0x07,0x00,0x8d,0x01,0x00,0x00,0xa1,0x01,0x00,0x00, +0x8b,0x01,0x00,0x00,0x34,0x00,0x00,0x00,0x76,0x01,0x00,0x00, +0x9e,0x01,0x00,0x00,0x3d,0x00,0x04,0x00,0xc4,0x00,0x00,0x00, +0xa2,0x01,0x00,0x00,0xa1,0x01,0x00,0x00,0x41,0x00,0x05,0x00, +0x55,0x01,0x00,0x00,0xa3,0x01,0x00,0x00,0x84,0x01,0x00,0x00, +0x9f,0x01,0x00,0x00,0x3e,0x00,0x03,0x00,0xa3,0x01,0x00,0x00, +0xa2,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xa8,0x01,0x00,0x00,0xf5,0x02,0x00,0x00,0xa6,0x01,0x00,0x00, +0xf9,0x00,0x02,0x00,0x64,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, +0x66,0x01,0x00,0x00,0xe0,0x00,0x04,0x00,0x0c,0x00,0x00,0x00, +0x0c,0x00,0x00,0x00,0xa9,0x01,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xac,0x01,0x00,0x00,0xf8,0x02,0x00,0x00, +0xaa,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xaf,0x01,0x00,0x00,0xfc,0x02,0x00,0x00,0xad,0x01,0x00,0x00, +0xf9,0x00,0x02,0x00,0xb1,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, +0xb1,0x01,0x00,0x00,0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00, +0xfe,0x02,0x00,0x00,0x3e,0x00,0x00,0x00,0x66,0x01,0x00,0x00, +0x58,0x02,0x00,0x00,0xb4,0x01,0x00,0x00,0xb0,0x00,0x05,0x00, +0xc2,0x00,0x00,0x00,0xb7,0x01,0x00,0x00,0xfe,0x02,0x00,0x00, +0x6c,0x00,0x00,0x00,0xf6,0x00,0x04,0x00,0xb3,0x01,0x00,0x00, +0xb4,0x01,0x00,0x00,0x00,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, +0xb7,0x01,0x00,0x00,0xb2,0x01,0x00,0x00,0xb3,0x01,0x00,0x00, +0xf8,0x00,0x02,0x00,0xb2,0x01,0x00,0x00,0xf9,0x00,0x02,0x00, +0xb9,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0xb9,0x01,0x00,0x00, +0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0x02,0x03,0x00,0x00, +0x3e,0x00,0x00,0x00,0xb2,0x01,0x00,0x00,0xe4,0x01,0x00,0x00, +0xbc,0x01,0x00,0x00,0xb0,0x00,0x05,0x00,0xc2,0x00,0x00,0x00, +0xbf,0x01,0x00,0x00,0x02,0x03,0x00,0x00,0x60,0x00,0x00,0x00, +0xf6,0x00,0x04,0x00,0xbb,0x01,0x00,0x00,0xbc,0x01,0x00,0x00, +0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0xbf,0x01,0x00,0x00, +0xba,0x01,0x00,0x00,0xbb,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, +0xba,0x01,0x00,0x00,0xf9,0x00,0x02,0x00,0xc1,0x01,0x00,0x00, +0xf8,0x00,0x02,0x00,0xc1,0x01,0x00,0x00,0xf5,0x00,0x07,0x00, +0x06,0x00,0x00,0x00,0x14,0x03,0x00,0x00,0x3e,0x00,0x00,0x00, +0xba,0x01,0x00,0x00,0xe2,0x01,0x00,0x00,0xc2,0x01,0x00,0x00, +0xb0,0x00,0x05,0x00,0xc2,0x00,0x00,0x00,0xc7,0x01,0x00,0x00, +0x14,0x03,0x00,0x00,0x62,0x00,0x00,0x00,0xf6,0x00,0x04,0x00, +0xc3,0x01,0x00,0x00,0xc2,0x01,0x00,0x00,0x01,0x00,0x00,0x00, +0xfa,0x00,0x04,0x00,0xc7,0x01,0x00,0x00,0xc2,0x01,0x00,0x00, +0xc3,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0xc2,0x01,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xcd,0x01,0x00,0x00, +0x02,0x03,0x00,0x00,0x62,0x00,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xcf,0x01,0x00,0x00,0xcd,0x01,0x00,0x00, +0x14,0x03,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xd1,0x01,0x00,0x00,0x55,0x00,0x00,0x00,0x53,0x00,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xd3,0x01,0x00,0x00, +0x02,0x03,0x00,0x00,0x61,0x00,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xd4,0x01,0x00,0x00,0xd1,0x01,0x00,0x00, +0xd3,0x01,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xd6,0x01,0x00,0x00,0x64,0x00,0x00,0x00,0x62,0x00,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xd7,0x01,0x00,0x00, +0xd4,0x01,0x00,0x00,0xd6,0x01,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xd9,0x01,0x00,0x00,0xd7,0x01,0x00,0x00, +0x14,0x03,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xdb,0x01,0x00,0x00,0xd9,0x01,0x00,0x00,0xda,0x01,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xdd,0x01,0x00,0x00, +0xdb,0x01,0x00,0x00,0xfe,0x02,0x00,0x00,0x41,0x00,0x05,0x00, +0x55,0x01,0x00,0x00,0xde,0x01,0x00,0x00,0x51,0x01,0x00,0x00, +0xdd,0x01,0x00,0x00,0x3d,0x00,0x04,0x00,0xc4,0x00,0x00,0x00, +0xdf,0x01,0x00,0x00,0xde,0x01,0x00,0x00,0x41,0x00,0x05,0x00, +0xcd,0x00,0x00,0x00,0xe0,0x01,0x00,0x00,0xcb,0x01,0x00,0x00, +0xcf,0x01,0x00,0x00,0x3e,0x00,0x03,0x00,0xe0,0x01,0x00,0x00, +0xdf,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xe2,0x01,0x00,0x00,0x14,0x03,0x00,0x00,0xd0,0x00,0x00,0x00, +0xf9,0x00,0x02,0x00,0xc1,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, +0xc3,0x01,0x00,0x00,0xf9,0x00,0x02,0x00,0xbc,0x01,0x00,0x00, +0xf8,0x00,0x02,0x00,0xbc,0x01,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xe4,0x01,0x00,0x00,0x02,0x03,0x00,0x00, +0xd0,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0xb9,0x01,0x00,0x00, +0xf8,0x00,0x02,0x00,0xbb,0x01,0x00,0x00,0xf9,0x00,0x02,0x00, +0xe6,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0xe6,0x01,0x00,0x00, +0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0x03,0x03,0x00,0x00, +0x3e,0x00,0x00,0x00,0xbb,0x01,0x00,0x00,0x12,0x02,0x00,0x00, +0xe9,0x01,0x00,0x00,0xb0,0x00,0x05,0x00,0xc2,0x00,0x00,0x00, +0xec,0x01,0x00,0x00,0x03,0x03,0x00,0x00,0xbf,0x00,0x00,0x00, +0xf6,0x00,0x04,0x00,0xe8,0x01,0x00,0x00,0xe9,0x01,0x00,0x00, +0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0xec,0x01,0x00,0x00, +0xe7,0x01,0x00,0x00,0xe8,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, +0xe7,0x01,0x00,0x00,0xf9,0x00,0x02,0x00,0xee,0x01,0x00,0x00, +0xf8,0x00,0x02,0x00,0xee,0x01,0x00,0x00,0xf5,0x00,0x07,0x00, +0x06,0x00,0x00,0x00,0x11,0x03,0x00,0x00,0x3e,0x00,0x00,0x00, +0xe7,0x01,0x00,0x00,0x10,0x02,0x00,0x00,0xef,0x01,0x00,0x00, +0xb0,0x00,0x05,0x00,0xc2,0x00,0x00,0x00,0xf4,0x01,0x00,0x00, +0x11,0x03,0x00,0x00,0xbc,0x00,0x00,0x00,0xf6,0x00,0x04,0x00, +0xf0,0x01,0x00,0x00,0xef,0x01,0x00,0x00,0x01,0x00,0x00,0x00, +0xfa,0x00,0x04,0x00,0xf4,0x01,0x00,0x00,0xef,0x01,0x00,0x00, +0xf0,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0xef,0x01,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xfa,0x01,0x00,0x00, +0x03,0x03,0x00,0x00,0xbc,0x00,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xfc,0x01,0x00,0x00,0xfa,0x01,0x00,0x00, +0x11,0x03,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xfe,0x01,0x00,0x00,0x59,0x00,0x00,0x00,0xb9,0x00,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x01,0x02,0x00,0x00, +0x03,0x03,0x00,0x00,0x00,0x02,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x02,0x02,0x00,0x00,0xfe,0x01,0x00,0x00, +0x01,0x02,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x04,0x02,0x00,0x00,0x68,0x00,0x00,0x00,0xbc,0x00,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x05,0x02,0x00,0x00, +0x02,0x02,0x00,0x00,0x04,0x02,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x07,0x02,0x00,0x00,0x05,0x02,0x00,0x00, +0x11,0x03,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x09,0x02,0x00,0x00,0x07,0x02,0x00,0x00,0x08,0x02,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x0b,0x02,0x00,0x00, +0x09,0x02,0x00,0x00,0xfe,0x02,0x00,0x00,0x41,0x00,0x05,0x00, +0x55,0x01,0x00,0x00,0x0c,0x02,0x00,0x00,0x84,0x01,0x00,0x00, +0x0b,0x02,0x00,0x00,0x3d,0x00,0x04,0x00,0xc4,0x00,0x00,0x00, +0x0d,0x02,0x00,0x00,0x0c,0x02,0x00,0x00,0x41,0x00,0x05,0x00, +0xcd,0x00,0x00,0x00,0x0e,0x02,0x00,0x00,0xf8,0x01,0x00,0x00, +0xfc,0x01,0x00,0x00,0x3e,0x00,0x03,0x00,0x0e,0x02,0x00,0x00, +0x0d,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x10,0x02,0x00,0x00,0x11,0x03,0x00,0x00,0xd0,0x00,0x00,0x00, +0xf9,0x00,0x02,0x00,0xee,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, +0xf0,0x01,0x00,0x00,0xf9,0x00,0x02,0x00,0xe9,0x01,0x00,0x00, +0xf8,0x00,0x02,0x00,0xe9,0x01,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x12,0x02,0x00,0x00,0x03,0x03,0x00,0x00, +0xd0,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0xe6,0x01,0x00,0x00, +0xf8,0x00,0x02,0x00,0xe8,0x01,0x00,0x00,0xf9,0x00,0x02,0x00, +0x14,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x14,0x02,0x00,0x00, +0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0x04,0x03,0x00,0x00, +0x3e,0x00,0x00,0x00,0xe8,0x01,0x00,0x00,0x56,0x02,0x00,0x00, +0x17,0x02,0x00,0x00,0xb0,0x00,0x05,0x00,0xc2,0x00,0x00,0x00, +0x1a,0x02,0x00,0x00,0x04,0x03,0x00,0x00,0xbf,0x00,0x00,0x00, +0xf6,0x00,0x04,0x00,0x16,0x02,0x00,0x00,0x17,0x02,0x00,0x00, +0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0x1a,0x02,0x00,0x00, +0x15,0x02,0x00,0x00,0x16,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, +0x15,0x02,0x00,0x00,0xf9,0x00,0x02,0x00,0x1c,0x02,0x00,0x00, +0xf8,0x00,0x02,0x00,0x1c,0x02,0x00,0x00,0xf5,0x00,0x07,0x00, +0x06,0x00,0x00,0x00,0x08,0x03,0x00,0x00,0x3e,0x00,0x00,0x00, +0x15,0x02,0x00,0x00,0x54,0x02,0x00,0x00,0x1f,0x02,0x00,0x00, +0xb0,0x00,0x05,0x00,0xc2,0x00,0x00,0x00,0x22,0x02,0x00,0x00, +0x08,0x03,0x00,0x00,0x60,0x00,0x00,0x00,0xf6,0x00,0x04,0x00, +0x1e,0x02,0x00,0x00,0x1f,0x02,0x00,0x00,0x01,0x00,0x00,0x00, +0xfa,0x00,0x04,0x00,0x22,0x02,0x00,0x00,0x1d,0x02,0x00,0x00, +0x1e,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x1d,0x02,0x00,0x00, +0xf9,0x00,0x02,0x00,0x24,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, +0x24,0x02,0x00,0x00,0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00, +0x0a,0x03,0x00,0x00,0x3e,0x00,0x00,0x00,0x1d,0x02,0x00,0x00, +0x52,0x02,0x00,0x00,0x27,0x02,0x00,0x00,0xb0,0x00,0x05,0x00, +0xc2,0x00,0x00,0x00,0x2a,0x02,0x00,0x00,0x0a,0x03,0x00,0x00, +0xbc,0x00,0x00,0x00,0xf6,0x00,0x04,0x00,0x26,0x02,0x00,0x00, +0x27,0x02,0x00,0x00,0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, +0x2a,0x02,0x00,0x00,0x25,0x02,0x00,0x00,0x26,0x02,0x00,0x00, +0xf8,0x00,0x02,0x00,0x25,0x02,0x00,0x00,0xf9,0x00,0x02,0x00, +0x2c,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x2c,0x02,0x00,0x00, +0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0x0c,0x03,0x00,0x00, +0x3e,0x00,0x00,0x00,0x25,0x02,0x00,0x00,0x50,0x02,0x00,0x00, +0x2d,0x02,0x00,0x00,0xb0,0x00,0x05,0x00,0xc2,0x00,0x00,0x00, +0x32,0x02,0x00,0x00,0x0c,0x03,0x00,0x00,0x62,0x00,0x00,0x00, +0xf6,0x00,0x04,0x00,0x2e,0x02,0x00,0x00,0x2d,0x02,0x00,0x00, +0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0x32,0x02,0x00,0x00, +0x2d,0x02,0x00,0x00,0x2e,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, +0x2d,0x02,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x34,0x02,0x00,0x00,0x04,0x03,0x00,0x00,0xbc,0x00,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x36,0x02,0x00,0x00, +0x34,0x02,0x00,0x00,0x0a,0x03,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x38,0x02,0x00,0x00,0x36,0x02,0x00,0x00, +0x37,0x02,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x3a,0x02,0x00,0x00,0x08,0x03,0x00,0x00,0x62,0x00,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x3b,0x02,0x00,0x00, +0x38,0x02,0x00,0x00,0x3a,0x02,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x3d,0x02,0x00,0x00,0x3b,0x02,0x00,0x00, +0x0c,0x03,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x41,0x02,0x00,0x00,0x3a,0x02,0x00,0x00,0x0c,0x03,0x00,0x00, +0x41,0x00,0x05,0x00,0xcd,0x00,0x00,0x00,0x42,0x02,0x00,0x00, +0xcb,0x01,0x00,0x00,0x41,0x02,0x00,0x00,0x3d,0x00,0x04,0x00, +0xc4,0x00,0x00,0x00,0x43,0x02,0x00,0x00,0x42,0x02,0x00,0x00, +0x41,0x00,0x05,0x00,0xcd,0x00,0x00,0x00,0x48,0x02,0x00,0x00, +0xf8,0x01,0x00,0x00,0x36,0x02,0x00,0x00,0x3d,0x00,0x04,0x00, +0xc4,0x00,0x00,0x00,0x49,0x02,0x00,0x00,0x48,0x02,0x00,0x00, +0x41,0x00,0x05,0x00,0xcd,0x00,0x00,0x00,0x4b,0x02,0x00,0x00, +0xca,0x00,0x00,0x00,0x3d,0x02,0x00,0x00,0x3d,0x00,0x04,0x00, +0xc4,0x00,0x00,0x00,0x4c,0x02,0x00,0x00,0x4b,0x02,0x00,0x00, +0x0c,0x00,0x08,0x00,0xc4,0x00,0x00,0x00,0x4d,0x02,0x00,0x00, +0x01,0x00,0x00,0x00,0x32,0x00,0x00,0x00,0x43,0x02,0x00,0x00, +0x49,0x02,0x00,0x00,0x4c,0x02,0x00,0x00,0x3e,0x00,0x03,0x00, +0x4b,0x02,0x00,0x00,0x4d,0x02,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x50,0x02,0x00,0x00,0x0c,0x03,0x00,0x00, +0xd0,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0x2c,0x02,0x00,0x00, +0xf8,0x00,0x02,0x00,0x2e,0x02,0x00,0x00,0xf9,0x00,0x02,0x00, +0x27,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x27,0x02,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x52,0x02,0x00,0x00, +0x0a,0x03,0x00,0x00,0xd0,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, +0x24,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x26,0x02,0x00,0x00, +0xf9,0x00,0x02,0x00,0x1f,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, +0x1f,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x54,0x02,0x00,0x00,0x08,0x03,0x00,0x00,0xd0,0x00,0x00,0x00, +0xf9,0x00,0x02,0x00,0x1c,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, +0x1e,0x02,0x00,0x00,0xf9,0x00,0x02,0x00,0x17,0x02,0x00,0x00, +0xf8,0x00,0x02,0x00,0x17,0x02,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x56,0x02,0x00,0x00,0x04,0x03,0x00,0x00, +0xd0,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0x14,0x02,0x00,0x00, +0xf8,0x00,0x02,0x00,0x16,0x02,0x00,0x00,0xf9,0x00,0x02,0x00, +0xb4,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0xb4,0x01,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x58,0x02,0x00,0x00, +0xfe,0x02,0x00,0x00,0xd0,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, +0xb1,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0xb3,0x01,0x00,0x00, +0xe0,0x00,0x04,0x00,0x0c,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, +0xa9,0x01,0x00,0x00,0xf9,0x00,0x02,0x00,0xd7,0x00,0x00,0x00, +0xf8,0x00,0x02,0x00,0xd7,0x00,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x5a,0x02,0x00,0x00,0xe4,0x02,0x00,0x00, +0x6c,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0xd4,0x00,0x00,0x00, +0xf8,0x00,0x02,0x00,0xd6,0x00,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x5f,0x02,0x00,0x00,0x55,0x00,0x00,0x00, +0x53,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x60,0x02,0x00,0x00,0x96,0x00,0x00,0x00,0x5f,0x02,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x65,0x02,0x00,0x00, +0x59,0x00,0x00,0x00,0xb9,0x00,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x66,0x02,0x00,0x00,0xa8,0x00,0x00,0x00, +0x65,0x02,0x00,0x00,0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00, +0x6b,0x02,0x00,0x00,0x12,0x00,0x00,0x00,0x6a,0x02,0x00,0x00, +0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x6c,0x02,0x00,0x00, +0x6b,0x02,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x6d,0x02,0x00,0x00,0x0f,0x00,0x00,0x00,0x6c,0x02,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x71,0x02,0x00,0x00, +0x47,0x00,0x00,0x00,0x6c,0x02,0x00,0x00,0x41,0x00,0x05,0x00, +0x0d,0x00,0x00,0x00,0x73,0x02,0x00,0x00,0x72,0x02,0x00,0x00, +0x0c,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x74,0x02,0x00,0x00,0x73,0x02,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x75,0x02,0x00,0x00,0x71,0x02,0x00,0x00, +0x74,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x76,0x02,0x00,0x00,0x6d,0x02,0x00,0x00,0x75,0x02,0x00,0x00, +0xf9,0x00,0x02,0x00,0x78,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, +0x78,0x02,0x00,0x00,0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00, +0xe5,0x02,0x00,0x00,0x3e,0x00,0x00,0x00,0xd6,0x00,0x00,0x00, +0xde,0x02,0x00,0x00,0x7b,0x02,0x00,0x00,0xb0,0x00,0x05,0x00, +0xc2,0x00,0x00,0x00,0x7e,0x02,0x00,0x00,0xe5,0x02,0x00,0x00, +0xbf,0x00,0x00,0x00,0xf6,0x00,0x04,0x00,0x7a,0x02,0x00,0x00, +0x7b,0x02,0x00,0x00,0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, +0x7e,0x02,0x00,0x00,0x79,0x02,0x00,0x00,0x7a,0x02,0x00,0x00, +0xf8,0x00,0x02,0x00,0x79,0x02,0x00,0x00,0xf9,0x00,0x02,0x00, +0x80,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x80,0x02,0x00,0x00, +0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0xe6,0x02,0x00,0x00, +0x3e,0x00,0x00,0x00,0x79,0x02,0x00,0x00,0xdc,0x02,0x00,0x00, +0x83,0x02,0x00,0x00,0xb0,0x00,0x05,0x00,0xc2,0x00,0x00,0x00, +0x86,0x02,0x00,0x00,0xe6,0x02,0x00,0x00,0x60,0x00,0x00,0x00, +0xf6,0x00,0x04,0x00,0x82,0x02,0x00,0x00,0x83,0x02,0x00,0x00, +0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0x86,0x02,0x00,0x00, +0x81,0x02,0x00,0x00,0x82,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, +0x81,0x02,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x8a,0x02,0x00,0x00,0xe6,0x02,0x00,0x00,0x61,0x00,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x8b,0x02,0x00,0x00, +0x60,0x02,0x00,0x00,0x8a,0x02,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x8d,0x02,0x00,0x00,0x64,0x00,0x00,0x00, +0x62,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x8e,0x02,0x00,0x00,0x8b,0x02,0x00,0x00,0x8d,0x02,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x92,0x02,0x00,0x00, +0xe5,0x02,0x00,0x00,0x00,0x02,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x93,0x02,0x00,0x00,0x66,0x02,0x00,0x00, +0x92,0x02,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x95,0x02,0x00,0x00,0x68,0x00,0x00,0x00,0xbc,0x00,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x96,0x02,0x00,0x00, +0x93,0x02,0x00,0x00,0x95,0x02,0x00,0x00,0xf9,0x00,0x02,0x00, +0x98,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x98,0x02,0x00,0x00, +0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0xe8,0x02,0x00,0x00, +0x3e,0x00,0x00,0x00,0x81,0x02,0x00,0x00,0xda,0x02,0x00,0x00, +0x9b,0x02,0x00,0x00,0xb0,0x00,0x05,0x00,0xc2,0x00,0x00,0x00, +0x9e,0x02,0x00,0x00,0xe8,0x02,0x00,0x00,0xbc,0x00,0x00,0x00, +0xf6,0x00,0x04,0x00,0x9a,0x02,0x00,0x00,0x9b,0x02,0x00,0x00, +0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0x9e,0x02,0x00,0x00, +0x99,0x02,0x00,0x00,0x9a,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, +0x99,0x02,0x00,0x00,0xf9,0x00,0x02,0x00,0xa0,0x02,0x00,0x00, +0xf8,0x00,0x02,0x00,0xa0,0x02,0x00,0x00,0xf5,0x00,0x07,0x00, +0x06,0x00,0x00,0x00,0xea,0x02,0x00,0x00,0x3e,0x00,0x00,0x00, +0x99,0x02,0x00,0x00,0xd8,0x02,0x00,0x00,0xa3,0x02,0x00,0x00, +0xb0,0x00,0x05,0x00,0xc2,0x00,0x00,0x00,0xa6,0x02,0x00,0x00, +0xea,0x02,0x00,0x00,0x62,0x00,0x00,0x00,0xf6,0x00,0x04,0x00, +0xa2,0x02,0x00,0x00,0xa3,0x02,0x00,0x00,0x01,0x00,0x00,0x00, +0xfa,0x00,0x04,0x00,0xa6,0x02,0x00,0x00,0xa1,0x02,0x00,0x00, +0xa2,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0xa1,0x02,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xa9,0x02,0x00,0x00, +0x8e,0x02,0x00,0x00,0xea,0x02,0x00,0x00,0xb0,0x00,0x05,0x00, +0xc2,0x00,0x00,0x00,0xac,0x02,0x00,0x00,0xa9,0x02,0x00,0x00, +0x36,0x00,0x00,0x00,0xf7,0x00,0x03,0x00,0xae,0x02,0x00,0x00, +0x00,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0xac,0x02,0x00,0x00, +0xad,0x02,0x00,0x00,0xae,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, +0xad,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xb1,0x02,0x00,0x00,0x96,0x02,0x00,0x00,0xe8,0x02,0x00,0x00, +0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00,0xb2,0x02,0x00,0x00, +0x12,0x00,0x00,0x00,0xd0,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0xb3,0x02,0x00,0x00,0xb2,0x02,0x00,0x00, +0xb0,0x00,0x05,0x00,0xc2,0x00,0x00,0x00,0xb4,0x02,0x00,0x00, +0xb1,0x02,0x00,0x00,0xb3,0x02,0x00,0x00,0xf9,0x00,0x02,0x00, +0xae,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0xae,0x02,0x00,0x00, +0xf5,0x00,0x07,0x00,0xc2,0x00,0x00,0x00,0xb5,0x02,0x00,0x00, +0xac,0x02,0x00,0x00,0xa1,0x02,0x00,0x00,0xb4,0x02,0x00,0x00, +0xad,0x02,0x00,0x00,0xf7,0x00,0x03,0x00,0xb7,0x02,0x00,0x00, +0x00,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0xb5,0x02,0x00,0x00, +0xb6,0x02,0x00,0x00,0xb7,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, +0xb6,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xbf,0x02,0x00,0x00,0x96,0x02,0x00,0x00,0xe8,0x02,0x00,0x00, +0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00,0xc1,0x02,0x00,0x00, +0x12,0x00,0x00,0x00,0xc0,0x02,0x00,0x00,0x3d,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0xc2,0x02,0x00,0x00,0xc1,0x02,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xc3,0x02,0x00,0x00, +0xbf,0x02,0x00,0x00,0xc2,0x02,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xc4,0x02,0x00,0x00,0x76,0x02,0x00,0x00, +0xc3,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xc6,0x02,0x00,0x00,0xc4,0x02,0x00,0x00,0x8e,0x02,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xc8,0x02,0x00,0x00, +0xc6,0x02,0x00,0x00,0xea,0x02,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xca,0x02,0x00,0x00,0xe5,0x02,0x00,0x00, +0xbc,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xcc,0x02,0x00,0x00,0xca,0x02,0x00,0x00,0xe8,0x02,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xce,0x02,0x00,0x00, +0xcc,0x02,0x00,0x00,0xcd,0x02,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xd0,0x02,0x00,0x00,0xe6,0x02,0x00,0x00, +0x62,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xd1,0x02,0x00,0x00,0xce,0x02,0x00,0x00,0xd0,0x02,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xd3,0x02,0x00,0x00, +0xd1,0x02,0x00,0x00,0xea,0x02,0x00,0x00,0x41,0x00,0x05,0x00, +0xcd,0x00,0x00,0x00,0xd4,0x02,0x00,0x00,0xca,0x00,0x00,0x00, +0xd3,0x02,0x00,0x00,0x3d,0x00,0x04,0x00,0xc4,0x00,0x00,0x00, +0xd5,0x02,0x00,0x00,0xd4,0x02,0x00,0x00,0x41,0x00,0x06,0x00, +0x8d,0x01,0x00,0x00,0xd6,0x02,0x00,0x00,0xbb,0x02,0x00,0x00, +0x34,0x00,0x00,0x00,0xc8,0x02,0x00,0x00,0x3e,0x00,0x03,0x00, +0xd6,0x02,0x00,0x00,0xd5,0x02,0x00,0x00,0xf9,0x00,0x02,0x00, +0xb7,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0xb7,0x02,0x00,0x00, +0xf9,0x00,0x02,0x00,0xa3,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, +0xa3,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xd8,0x02,0x00,0x00,0xea,0x02,0x00,0x00,0xd0,0x00,0x00,0x00, +0xf9,0x00,0x02,0x00,0xa0,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, +0xa2,0x02,0x00,0x00,0xf9,0x00,0x02,0x00,0x9b,0x02,0x00,0x00, +0xf8,0x00,0x02,0x00,0x9b,0x02,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xda,0x02,0x00,0x00,0xe8,0x02,0x00,0x00, +0xd0,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0x98,0x02,0x00,0x00, +0xf8,0x00,0x02,0x00,0x9a,0x02,0x00,0x00,0xf9,0x00,0x02,0x00, +0x83,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x83,0x02,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xdc,0x02,0x00,0x00, +0xe6,0x02,0x00,0x00,0xd0,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, +0x80,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x82,0x02,0x00,0x00, +0xf9,0x00,0x02,0x00,0x7b,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, +0x7b,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xde,0x02,0x00,0x00,0xe5,0x02,0x00,0x00,0xd0,0x00,0x00,0x00, +0xf9,0x00,0x02,0x00,0x78,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, +0x7a,0x02,0x00,0x00,0xfd,0x00,0x01,0x00,0x38,0x00,0x01,0x00, + +}; +const uint64_t matmul_q5_0_f32_aligned_fp32_len = 10920; + +unsigned char matmul_q5_0_f32_fp32_data[] = { +0x03,0x02,0x23,0x07,0x00,0x05,0x01,0x00,0x0b,0x00,0x0d,0x00, +0x40,0x03,0x00,0x00,0x00,0x00,0x00,0x00,0x11,0x00,0x02,0x00, +0x01,0x00,0x00,0x00,0x11,0x00,0x02,0x00,0x51,0x11,0x00,0x00, +0x11,0x00,0x02,0x00,0x60,0x11,0x00,0x00,0x0b,0x00,0x06,0x00, +0x01,0x00,0x00,0x00,0x47,0x4c,0x53,0x4c,0x2e,0x73,0x74,0x64, +0x2e,0x34,0x35,0x30,0x00,0x00,0x00,0x00,0x0e,0x00,0x03,0x00, +0x00,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x0f,0x00,0x0f,0x00, +0x05,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x6d,0x61,0x69,0x6e, +0x00,0x00,0x00,0x00,0x0b,0x00,0x00,0x00,0x12,0x00,0x00,0x00, +0x3d,0x00,0x00,0x00,0x4c,0x00,0x00,0x00,0x09,0x01,0x00,0x00, +0x50,0x01,0x00,0x00,0x81,0x01,0x00,0x00,0x8c,0x01,0x00,0x00, +0x72,0x02,0x00,0x00,0xbb,0x02,0x00,0x00,0x10,0x00,0x06,0x00, +0x04,0x00,0x00,0x00,0x11,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x47,0x00,0x04,0x00, +0x0b,0x00,0x00,0x00,0x0b,0x00,0x00,0x00,0x1c,0x00,0x00,0x00, +0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x48,0x00,0x05,0x00, +0x10,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x23,0x00,0x00,0x00, +0x04,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00, +0x02,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x08,0x00,0x00,0x00, +0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00,0x03,0x00,0x00,0x00, +0x23,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x48,0x00,0x05,0x00, +0x10,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x23,0x00,0x00,0x00, +0x10,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00, +0x05,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x14,0x00,0x00,0x00, +0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00,0x06,0x00,0x00,0x00, +0x23,0x00,0x00,0x00,0x18,0x00,0x00,0x00,0x48,0x00,0x05,0x00, +0x10,0x00,0x00,0x00,0x07,0x00,0x00,0x00,0x23,0x00,0x00,0x00, +0x1c,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00, +0x08,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x20,0x00,0x00,0x00, +0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00,0x09,0x00,0x00,0x00, +0x23,0x00,0x00,0x00,0x24,0x00,0x00,0x00,0x48,0x00,0x05,0x00, +0x10,0x00,0x00,0x00,0x0a,0x00,0x00,0x00,0x23,0x00,0x00,0x00, +0x28,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00, +0x0b,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x2c,0x00,0x00,0x00, +0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, +0x23,0x00,0x00,0x00,0x30,0x00,0x00,0x00,0x48,0x00,0x05,0x00, +0x10,0x00,0x00,0x00,0x0d,0x00,0x00,0x00,0x23,0x00,0x00,0x00, +0x34,0x00,0x00,0x00,0x47,0x00,0x03,0x00,0x10,0x00,0x00,0x00, +0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x37,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x47,0x00,0x04,0x00, +0x3d,0x00,0x00,0x00,0x0b,0x00,0x00,0x00,0x1a,0x00,0x00,0x00, +0x47,0x00,0x04,0x00,0x4c,0x00,0x00,0x00,0x0b,0x00,0x00,0x00, +0x1b,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x4f,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0x09,0x00,0x00,0x00,0x47,0x00,0x04,0x00, +0x53,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x04,0x00,0x00,0x00, +0x47,0x00,0x04,0x00,0x60,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0x06,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x62,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0x07,0x00,0x00,0x00,0x47,0x00,0x04,0x00, +0x6c,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x03,0x00,0x00,0x00, +0x47,0x00,0x04,0x00,0xa6,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0xb8,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0x05,0x00,0x00,0x00,0x47,0x00,0x04,0x00, +0xbb,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x08,0x00,0x00,0x00, +0x47,0x00,0x04,0x00,0x02,0x01,0x00,0x00,0x06,0x00,0x00,0x00, +0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x04,0x01,0x00,0x00, +0x06,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x48,0x00,0x05,0x00, +0x05,0x01,0x00,0x00,0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00, +0x00,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x05,0x01,0x00,0x00, +0x01,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x02,0x00,0x00,0x00, +0x48,0x00,0x05,0x00,0x05,0x01,0x00,0x00,0x02,0x00,0x00,0x00, +0x23,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x47,0x00,0x04,0x00, +0x06,0x01,0x00,0x00,0x06,0x00,0x00,0x00,0x16,0x00,0x00,0x00, +0x48,0x00,0x04,0x00,0x07,0x01,0x00,0x00,0x00,0x00,0x00,0x00, +0x18,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x07,0x01,0x00,0x00, +0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0x47,0x00,0x03,0x00,0x07,0x01,0x00,0x00,0x02,0x00,0x00,0x00, +0x47,0x00,0x04,0x00,0x09,0x01,0x00,0x00,0x22,0x00,0x00,0x00, +0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x09,0x01,0x00,0x00, +0x21,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00, +0x5b,0x01,0x00,0x00,0x01,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0x47,0x00,0x04,0x00,0x5c,0x01,0x00,0x00,0x0b,0x00,0x00,0x00, +0x19,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x89,0x01,0x00,0x00, +0x06,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x48,0x00,0x04,0x00, +0x8a,0x01,0x00,0x00,0x00,0x00,0x00,0x00,0x18,0x00,0x00,0x00, +0x48,0x00,0x05,0x00,0x8a,0x01,0x00,0x00,0x00,0x00,0x00,0x00, +0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x03,0x00, +0x8a,0x01,0x00,0x00,0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00, +0x8c,0x01,0x00,0x00,0x22,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0x47,0x00,0x04,0x00,0x8c,0x01,0x00,0x00,0x21,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x72,0x02,0x00,0x00, +0x0b,0x00,0x00,0x00,0x18,0x00,0x00,0x00,0x47,0x00,0x04,0x00, +0xb8,0x02,0x00,0x00,0x06,0x00,0x00,0x00,0x04,0x00,0x00,0x00, +0x48,0x00,0x04,0x00,0xb9,0x02,0x00,0x00,0x00,0x00,0x00,0x00, +0x19,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0xb9,0x02,0x00,0x00, +0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0x47,0x00,0x03,0x00,0xb9,0x02,0x00,0x00,0x02,0x00,0x00,0x00, +0x47,0x00,0x04,0x00,0xbb,0x02,0x00,0x00,0x22,0x00,0x00,0x00, +0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0xbb,0x02,0x00,0x00, +0x21,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x13,0x00,0x02,0x00, +0x02,0x00,0x00,0x00,0x21,0x00,0x03,0x00,0x03,0x00,0x00,0x00, +0x02,0x00,0x00,0x00,0x15,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x20,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x17,0x00,0x04,0x00, +0x09,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x03,0x00,0x00,0x00, +0x20,0x00,0x04,0x00,0x0a,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0x09,0x00,0x00,0x00,0x3b,0x00,0x04,0x00,0x0a,0x00,0x00,0x00, +0x0b,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x02,0x00,0x00,0x00, +0x20,0x00,0x04,0x00,0x0d,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0x06,0x00,0x00,0x00,0x1e,0x00,0x10,0x00,0x10,0x00,0x00,0x00, +0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00, +0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00, +0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00, +0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00, +0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x20,0x00,0x04,0x00, +0x11,0x00,0x00,0x00,0x09,0x00,0x00,0x00,0x10,0x00,0x00,0x00, +0x3b,0x00,0x04,0x00,0x11,0x00,0x00,0x00,0x12,0x00,0x00,0x00, +0x09,0x00,0x00,0x00,0x15,0x00,0x04,0x00,0x13,0x00,0x00,0x00, +0x20,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x13,0x00,0x00,0x00,0x14,0x00,0x00,0x00,0x08,0x00,0x00,0x00, +0x20,0x00,0x04,0x00,0x15,0x00,0x00,0x00,0x09,0x00,0x00,0x00, +0x06,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00, +0x21,0x00,0x00,0x00,0x0a,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x13,0x00,0x00,0x00,0x27,0x00,0x00,0x00,0x09,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00,0x2d,0x00,0x00,0x00, +0x07,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00, +0x34,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x32,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x37,0x00,0x00,0x00,0x40,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x39,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0x3b,0x00,0x04,0x00,0x0a,0x00,0x00,0x00, +0x3d,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x3e,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0x3b,0x00,0x04,0x00,0x0a,0x00,0x00,0x00,0x4c,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0x32,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x4f,0x00,0x00,0x00,0x20,0x00,0x00,0x00,0x32,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x53,0x00,0x00,0x00,0x20,0x00,0x00,0x00, +0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x54,0x00,0x00,0x00, +0x86,0x00,0x00,0x00,0x37,0x00,0x00,0x00,0x53,0x00,0x00,0x00, +0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x58,0x00,0x00,0x00, +0x86,0x00,0x00,0x00,0x37,0x00,0x00,0x00,0x53,0x00,0x00,0x00, +0x32,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x60,0x00,0x00,0x00, +0x02,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, +0x61,0x00,0x00,0x00,0x86,0x00,0x00,0x00,0x53,0x00,0x00,0x00, +0x60,0x00,0x00,0x00,0x32,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x62,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x34,0x00,0x06,0x00, +0x06,0x00,0x00,0x00,0x63,0x00,0x00,0x00,0x86,0x00,0x00,0x00, +0x61,0x00,0x00,0x00,0x62,0x00,0x00,0x00,0x34,0x00,0x06,0x00, +0x06,0x00,0x00,0x00,0x67,0x00,0x00,0x00,0x86,0x00,0x00,0x00, +0x61,0x00,0x00,0x00,0x62,0x00,0x00,0x00,0x32,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x6c,0x00,0x00,0x00,0x10,0x00,0x00,0x00, +0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x6d,0x00,0x00,0x00, +0x86,0x00,0x00,0x00,0x6c,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, +0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x72,0x00,0x00,0x00, +0x86,0x00,0x00,0x00,0x6c,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, +0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x77,0x00,0x00,0x00, +0x86,0x00,0x00,0x00,0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00, +0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x7c,0x00,0x00,0x00, +0x86,0x00,0x00,0x00,0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00,0x80,0x00,0x00,0x00, +0x06,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00, +0x85,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x13,0x00,0x00,0x00,0x90,0x00,0x00,0x00,0x0b,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00,0x96,0x00,0x00,0x00, +0x03,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00, +0xa1,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x32,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0xa6,0x00,0x00,0x00,0x40,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00,0xa8,0x00,0x00,0x00, +0x04,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, +0xb7,0x00,0x00,0x00,0x84,0x00,0x00,0x00,0x60,0x00,0x00,0x00, +0x62,0x00,0x00,0x00,0x32,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0xb8,0x00,0x00,0x00,0x20,0x00,0x00,0x00,0x34,0x00,0x06,0x00, +0x06,0x00,0x00,0x00,0xb9,0x00,0x00,0x00,0x84,0x00,0x00,0x00, +0x53,0x00,0x00,0x00,0xb8,0x00,0x00,0x00,0x34,0x00,0x06,0x00, +0x06,0x00,0x00,0x00,0xba,0x00,0x00,0x00,0x84,0x00,0x00,0x00, +0x4f,0x00,0x00,0x00,0x62,0x00,0x00,0x00,0x32,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0xbb,0x00,0x00,0x00,0x02,0x00,0x00,0x00, +0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0xbc,0x00,0x00,0x00, +0x84,0x00,0x00,0x00,0xba,0x00,0x00,0x00,0xbb,0x00,0x00,0x00, +0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0xbd,0x00,0x00,0x00, +0x84,0x00,0x00,0x00,0xbc,0x00,0x00,0x00,0x60,0x00,0x00,0x00, +0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0xbe,0x00,0x00,0x00, +0x86,0x00,0x00,0x00,0xb9,0x00,0x00,0x00,0xbd,0x00,0x00,0x00, +0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0xbf,0x00,0x00,0x00, +0x84,0x00,0x00,0x00,0xb7,0x00,0x00,0x00,0xbe,0x00,0x00,0x00, +0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0xc0,0x00,0x00,0x00, +0x84,0x00,0x00,0x00,0xbf,0x00,0x00,0x00,0xbb,0x00,0x00,0x00, +0x14,0x00,0x02,0x00,0xc1,0x00,0x00,0x00,0x16,0x00,0x03,0x00, +0xc3,0x00,0x00,0x00,0x20,0x00,0x00,0x00,0x34,0x00,0x06,0x00, +0x06,0x00,0x00,0x00,0xc4,0x00,0x00,0x00,0x84,0x00,0x00,0x00, +0x60,0x00,0x00,0x00,0x62,0x00,0x00,0x00,0x34,0x00,0x06,0x00, +0x06,0x00,0x00,0x00,0xc5,0x00,0x00,0x00,0x84,0x00,0x00,0x00, +0xc4,0x00,0x00,0x00,0xbe,0x00,0x00,0x00,0x34,0x00,0x06,0x00, +0x06,0x00,0x00,0x00,0xc6,0x00,0x00,0x00,0x84,0x00,0x00,0x00, +0xc5,0x00,0x00,0x00,0xbb,0x00,0x00,0x00,0x1c,0x00,0x04,0x00, +0xc7,0x00,0x00,0x00,0xc3,0x00,0x00,0x00,0xc6,0x00,0x00,0x00, +0x20,0x00,0x04,0x00,0xc8,0x00,0x00,0x00,0x07,0x00,0x00,0x00, +0xc7,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0xc3,0x00,0x00,0x00, +0xcb,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x20,0x00,0x04,0x00, +0xcc,0x00,0x00,0x00,0x07,0x00,0x00,0x00,0xc3,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00,0xcf,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, +0xf3,0x00,0x00,0x00,0x80,0x00,0x00,0x00,0x6c,0x00,0x00,0x00, +0x39,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0xf9,0x00,0x00,0x00,0x10,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0xfd,0x00,0x00,0x00,0x0f,0x00,0x00,0x00, +0x16,0x00,0x03,0x00,0x00,0x01,0x00,0x00,0x10,0x00,0x00,0x00, +0x15,0x00,0x04,0x00,0x01,0x01,0x00,0x00,0x10,0x00,0x00,0x00, +0x00,0x00,0x00,0x00,0x1c,0x00,0x04,0x00,0x02,0x01,0x00,0x00, +0x01,0x01,0x00,0x00,0x0c,0x00,0x00,0x00,0x15,0x00,0x04,0x00, +0x03,0x01,0x00,0x00,0x08,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0x1c,0x00,0x04,0x00,0x04,0x01,0x00,0x00,0x03,0x01,0x00,0x00, +0xf9,0x00,0x00,0x00,0x1e,0x00,0x05,0x00,0x05,0x01,0x00,0x00, +0x00,0x01,0x00,0x00,0x02,0x01,0x00,0x00,0x04,0x01,0x00,0x00, +0x1d,0x00,0x03,0x00,0x06,0x01,0x00,0x00,0x05,0x01,0x00,0x00, +0x1e,0x00,0x03,0x00,0x07,0x01,0x00,0x00,0x06,0x01,0x00,0x00, +0x20,0x00,0x04,0x00,0x08,0x01,0x00,0x00,0x0c,0x00,0x00,0x00, +0x07,0x01,0x00,0x00,0x3b,0x00,0x04,0x00,0x08,0x01,0x00,0x00, +0x09,0x01,0x00,0x00,0x0c,0x00,0x00,0x00,0x20,0x00,0x04,0x00, +0x0b,0x01,0x00,0x00,0x0c,0x00,0x00,0x00,0x00,0x01,0x00,0x00, +0x20,0x00,0x04,0x00,0x11,0x01,0x00,0x00,0x0c,0x00,0x00,0x00, +0x01,0x01,0x00,0x00,0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00, +0x15,0x01,0x00,0x00,0x10,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x27,0x01,0x00,0x00,0x0c,0x00,0x00,0x00, +0x20,0x00,0x04,0x00,0x30,0x01,0x00,0x00,0x0c,0x00,0x00,0x00, +0x03,0x01,0x00,0x00,0x17,0x00,0x04,0x00,0x34,0x01,0x00,0x00, +0xc3,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0xc3,0x00,0x00,0x00,0x47,0x01,0x00,0x00,0x00,0x00,0x80,0x41, +0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x4c,0x01,0x00,0x00, +0x80,0x00,0x00,0x00,0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00, +0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x4d,0x01,0x00,0x00, +0x84,0x00,0x00,0x00,0x37,0x00,0x00,0x00,0x4c,0x01,0x00,0x00, +0x1c,0x00,0x04,0x00,0x4e,0x01,0x00,0x00,0xc3,0x00,0x00,0x00, +0x4d,0x01,0x00,0x00,0x20,0x00,0x04,0x00,0x4f,0x01,0x00,0x00, +0x04,0x00,0x00,0x00,0x4e,0x01,0x00,0x00,0x3b,0x00,0x04,0x00, +0x4f,0x01,0x00,0x00,0x50,0x01,0x00,0x00,0x04,0x00,0x00,0x00, +0x20,0x00,0x04,0x00,0x54,0x01,0x00,0x00,0x04,0x00,0x00,0x00, +0xc3,0x00,0x00,0x00,0x32,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x5b,0x01,0x00,0x00,0x01,0x00,0x00,0x00,0x33,0x00,0x06,0x00, +0x09,0x00,0x00,0x00,0x5c,0x01,0x00,0x00,0x5b,0x01,0x00,0x00, +0x39,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x34,0x00,0x06,0x00, +0x06,0x00,0x00,0x00,0x5d,0x01,0x00,0x00,0x51,0x00,0x00,0x00, +0x5c,0x01,0x00,0x00,0x00,0x00,0x00,0x00,0x34,0x00,0x06,0x00, +0x06,0x00,0x00,0x00,0x5e,0x01,0x00,0x00,0x84,0x00,0x00,0x00, +0x5d,0x01,0x00,0x00,0x0c,0x00,0x00,0x00,0x34,0x00,0x06,0x00, +0x06,0x00,0x00,0x00,0x5f,0x01,0x00,0x00,0x86,0x00,0x00,0x00, +0x5e,0x01,0x00,0x00,0x6c,0x00,0x00,0x00,0x34,0x00,0x06,0x00, +0x06,0x00,0x00,0x00,0x7d,0x01,0x00,0x00,0x80,0x00,0x00,0x00, +0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x34,0x00,0x06,0x00, +0x06,0x00,0x00,0x00,0x7e,0x01,0x00,0x00,0x84,0x00,0x00,0x00, +0xa6,0x00,0x00,0x00,0x7d,0x01,0x00,0x00,0x1c,0x00,0x04,0x00, +0x7f,0x01,0x00,0x00,0xc3,0x00,0x00,0x00,0x7e,0x01,0x00,0x00, +0x20,0x00,0x04,0x00,0x80,0x01,0x00,0x00,0x04,0x00,0x00,0x00, +0x7f,0x01,0x00,0x00,0x3b,0x00,0x04,0x00,0x80,0x01,0x00,0x00, +0x81,0x01,0x00,0x00,0x04,0x00,0x00,0x00,0x34,0x00,0x06,0x00, +0x06,0x00,0x00,0x00,0x85,0x01,0x00,0x00,0x80,0x00,0x00,0x00, +0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x1d,0x00,0x03,0x00, +0x89,0x01,0x00,0x00,0xc3,0x00,0x00,0x00,0x1e,0x00,0x03,0x00, +0x8a,0x01,0x00,0x00,0x89,0x01,0x00,0x00,0x20,0x00,0x04,0x00, +0x8b,0x01,0x00,0x00,0x0c,0x00,0x00,0x00,0x8a,0x01,0x00,0x00, +0x3b,0x00,0x04,0x00,0x8b,0x01,0x00,0x00,0x8c,0x01,0x00,0x00, +0x0c,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x97,0x01,0x00,0x00, +0x0c,0x00,0x00,0x00,0xc3,0x00,0x00,0x00,0x34,0x00,0x06,0x00, +0x06,0x00,0x00,0x00,0x9f,0x01,0x00,0x00,0x80,0x00,0x00,0x00, +0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x34,0x00,0x06,0x00, +0x06,0x00,0x00,0x00,0xa4,0x01,0x00,0x00,0x51,0x00,0x00,0x00, +0x5c,0x01,0x00,0x00,0x00,0x00,0x00,0x00,0x34,0x00,0x06,0x00, +0x06,0x00,0x00,0x00,0xa5,0x01,0x00,0x00,0x84,0x00,0x00,0x00, +0xa4,0x01,0x00,0x00,0x39,0x00,0x00,0x00,0x34,0x00,0x06,0x00, +0x06,0x00,0x00,0x00,0xa6,0x01,0x00,0x00,0x86,0x00,0x00,0x00, +0xa5,0x01,0x00,0x00,0x6c,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0xa9,0x01,0x00,0x00,0x08,0x01,0x00,0x00, +0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0xaa,0x01,0x00,0x00, +0x86,0x00,0x00,0x00,0x6c,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, +0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0xad,0x01,0x00,0x00, +0x86,0x00,0x00,0x00,0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00, +0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0xc8,0x01,0x00,0x00, +0x84,0x00,0x00,0x00,0x60,0x00,0x00,0x00,0x62,0x00,0x00,0x00, +0x1c,0x00,0x04,0x00,0xc9,0x01,0x00,0x00,0xc3,0x00,0x00,0x00, +0xc8,0x01,0x00,0x00,0x20,0x00,0x04,0x00,0xca,0x01,0x00,0x00, +0x07,0x00,0x00,0x00,0xc9,0x01,0x00,0x00,0x34,0x00,0x06,0x00, +0x06,0x00,0x00,0x00,0xda,0x01,0x00,0x00,0x80,0x00,0x00,0x00, +0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x34,0x00,0x06,0x00, +0x06,0x00,0x00,0x00,0xf5,0x01,0x00,0x00,0x84,0x00,0x00,0x00, +0xbe,0x00,0x00,0x00,0xbb,0x00,0x00,0x00,0x1c,0x00,0x04,0x00, +0xf6,0x01,0x00,0x00,0xc3,0x00,0x00,0x00,0xf5,0x01,0x00,0x00, +0x20,0x00,0x04,0x00,0xf7,0x01,0x00,0x00,0x07,0x00,0x00,0x00, +0xf6,0x01,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, +0x00,0x02,0x00,0x00,0x86,0x00,0x00,0x00,0xb8,0x00,0x00,0x00, +0xbe,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, +0x08,0x02,0x00,0x00,0x80,0x00,0x00,0x00,0x6c,0x00,0x00,0x00, +0x39,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, +0x37,0x02,0x00,0x00,0x84,0x00,0x00,0x00,0x60,0x00,0x00,0x00, +0x62,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00, +0x6a,0x02,0x00,0x00,0x0d,0x00,0x00,0x00,0x3b,0x00,0x04,0x00, +0x0a,0x00,0x00,0x00,0x72,0x02,0x00,0x00,0x01,0x00,0x00,0x00, +0x1d,0x00,0x03,0x00,0xb8,0x02,0x00,0x00,0xc3,0x00,0x00,0x00, +0x1e,0x00,0x03,0x00,0xb9,0x02,0x00,0x00,0xb8,0x02,0x00,0x00, +0x20,0x00,0x04,0x00,0xba,0x02,0x00,0x00,0x0c,0x00,0x00,0x00, +0xb9,0x02,0x00,0x00,0x3b,0x00,0x04,0x00,0xba,0x02,0x00,0x00, +0xbb,0x02,0x00,0x00,0x0c,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x13,0x00,0x00,0x00,0xc0,0x02,0x00,0x00,0x05,0x00,0x00,0x00, +0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0xcd,0x02,0x00,0x00, +0x84,0x00,0x00,0x00,0x60,0x00,0x00,0x00,0x62,0x00,0x00,0x00, +0x2c,0x00,0x05,0x00,0x34,0x01,0x00,0x00,0x3f,0x03,0x00,0x00, +0x47,0x01,0x00,0x00,0x47,0x01,0x00,0x00,0x36,0x00,0x05,0x00, +0x02,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0x03,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0x05,0x00,0x00,0x00, +0x3b,0x00,0x04,0x00,0xc8,0x00,0x00,0x00,0xc9,0x00,0x00,0x00, +0x07,0x00,0x00,0x00,0x3b,0x00,0x04,0x00,0xca,0x01,0x00,0x00, +0xcb,0x01,0x00,0x00,0x07,0x00,0x00,0x00,0x3b,0x00,0x04,0x00, +0xf7,0x01,0x00,0x00,0xf8,0x01,0x00,0x00,0x07,0x00,0x00,0x00, +0x41,0x00,0x05,0x00,0x0d,0x00,0x00,0x00,0x0e,0x00,0x00,0x00, +0x0b,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x0f,0x00,0x00,0x00,0x0e,0x00,0x00,0x00, +0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00,0x16,0x00,0x00,0x00, +0x12,0x00,0x00,0x00,0x14,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x17,0x00,0x00,0x00,0x16,0x00,0x00,0x00, +0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x18,0x00,0x00,0x00, +0x0f,0x00,0x00,0x00,0x17,0x00,0x00,0x00,0x89,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x1e,0x00,0x00,0x00,0x0f,0x00,0x00,0x00, +0x17,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00, +0x22,0x00,0x00,0x00,0x12,0x00,0x00,0x00,0x21,0x00,0x00,0x00, +0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x23,0x00,0x00,0x00, +0x22,0x00,0x00,0x00,0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x24,0x00,0x00,0x00,0x18,0x00,0x00,0x00,0x23,0x00,0x00,0x00, +0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00,0x28,0x00,0x00,0x00, +0x12,0x00,0x00,0x00,0x27,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x29,0x00,0x00,0x00,0x28,0x00,0x00,0x00, +0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x2a,0x00,0x00,0x00, +0x1e,0x00,0x00,0x00,0x29,0x00,0x00,0x00,0x41,0x00,0x05,0x00, +0x15,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0x12,0x00,0x00,0x00, +0x2d,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x2f,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x30,0x00,0x00,0x00,0x24,0x00,0x00,0x00, +0x2f,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x32,0x00,0x00,0x00,0x30,0x00,0x00,0x00,0x2a,0x00,0x00,0x00, +0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00,0x35,0x00,0x00,0x00, +0x12,0x00,0x00,0x00,0x34,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x36,0x00,0x00,0x00,0x35,0x00,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x38,0x00,0x00,0x00, +0x36,0x00,0x00,0x00,0x37,0x00,0x00,0x00,0x82,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x3a,0x00,0x00,0x00,0x38,0x00,0x00,0x00, +0x39,0x00,0x00,0x00,0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x3b,0x00,0x00,0x00,0x3a,0x00,0x00,0x00,0x37,0x00,0x00,0x00, +0x41,0x00,0x05,0x00,0x0d,0x00,0x00,0x00,0x3f,0x00,0x00,0x00, +0x3d,0x00,0x00,0x00,0x3e,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x40,0x00,0x00,0x00,0x3f,0x00,0x00,0x00, +0x89,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x42,0x00,0x00,0x00, +0x40,0x00,0x00,0x00,0x3b,0x00,0x00,0x00,0x86,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x47,0x00,0x00,0x00,0x40,0x00,0x00,0x00, +0x3b,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x0d,0x00,0x00,0x00, +0x49,0x00,0x00,0x00,0x3d,0x00,0x00,0x00,0x39,0x00,0x00,0x00, +0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x4a,0x00,0x00,0x00, +0x49,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x0d,0x00,0x00,0x00, +0x4d,0x00,0x00,0x00,0x4c,0x00,0x00,0x00,0x3e,0x00,0x00,0x00, +0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x4e,0x00,0x00,0x00, +0x4d,0x00,0x00,0x00,0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x50,0x00,0x00,0x00,0x4e,0x00,0x00,0x00,0x4f,0x00,0x00,0x00, +0x89,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x55,0x00,0x00,0x00, +0x50,0x00,0x00,0x00,0x54,0x00,0x00,0x00,0x86,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x59,0x00,0x00,0x00,0x50,0x00,0x00,0x00, +0x58,0x00,0x00,0x00,0x89,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x5d,0x00,0x00,0x00,0x4e,0x00,0x00,0x00,0x4f,0x00,0x00,0x00, +0x89,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x64,0x00,0x00,0x00, +0x5d,0x00,0x00,0x00,0x63,0x00,0x00,0x00,0x86,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x68,0x00,0x00,0x00,0x5d,0x00,0x00,0x00, +0x67,0x00,0x00,0x00,0x89,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x6e,0x00,0x00,0x00,0x4e,0x00,0x00,0x00,0x6d,0x00,0x00,0x00, +0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x73,0x00,0x00,0x00, +0x4e,0x00,0x00,0x00,0x72,0x00,0x00,0x00,0x89,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x78,0x00,0x00,0x00,0x4e,0x00,0x00,0x00, +0x77,0x00,0x00,0x00,0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x7d,0x00,0x00,0x00,0x4e,0x00,0x00,0x00,0x7c,0x00,0x00,0x00, +0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00,0x81,0x00,0x00,0x00, +0x12,0x00,0x00,0x00,0x80,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x82,0x00,0x00,0x00,0x81,0x00,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x83,0x00,0x00,0x00, +0x47,0x00,0x00,0x00,0x82,0x00,0x00,0x00,0x41,0x00,0x05,0x00, +0x15,0x00,0x00,0x00,0x86,0x00,0x00,0x00,0x12,0x00,0x00,0x00, +0x85,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x87,0x00,0x00,0x00,0x86,0x00,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x89,0x00,0x00,0x00,0x47,0x00,0x00,0x00, +0x39,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x8c,0x00,0x00,0x00,0x89,0x00,0x00,0x00,0x82,0x00,0x00,0x00, +0x0c,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0x8d,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0x26,0x00,0x00,0x00,0x87,0x00,0x00,0x00, +0x8c,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00, +0x91,0x00,0x00,0x00,0x12,0x00,0x00,0x00,0x90,0x00,0x00,0x00, +0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x92,0x00,0x00,0x00, +0x91,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x93,0x00,0x00,0x00,0x32,0x00,0x00,0x00,0x92,0x00,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x95,0x00,0x00,0x00, +0x42,0x00,0x00,0x00,0x37,0x00,0x00,0x00,0x41,0x00,0x05,0x00, +0x15,0x00,0x00,0x00,0x97,0x00,0x00,0x00,0x12,0x00,0x00,0x00, +0x96,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x98,0x00,0x00,0x00,0x97,0x00,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x99,0x00,0x00,0x00,0x95,0x00,0x00,0x00, +0x98,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x9a,0x00,0x00,0x00,0x93,0x00,0x00,0x00,0x99,0x00,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x9c,0x00,0x00,0x00, +0x9a,0x00,0x00,0x00,0x83,0x00,0x00,0x00,0x86,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x9d,0x00,0x00,0x00,0x9c,0x00,0x00,0x00, +0x0c,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00, +0xa2,0x00,0x00,0x00,0x12,0x00,0x00,0x00,0xa1,0x00,0x00,0x00, +0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0xa3,0x00,0x00,0x00, +0xa2,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xa4,0x00,0x00,0x00,0x0f,0x00,0x00,0x00,0xa3,0x00,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xa7,0x00,0x00,0x00, +0x4a,0x00,0x00,0x00,0xa6,0x00,0x00,0x00,0x41,0x00,0x05,0x00, +0x15,0x00,0x00,0x00,0xa9,0x00,0x00,0x00,0x12,0x00,0x00,0x00, +0xa8,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0xaa,0x00,0x00,0x00,0xa9,0x00,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xab,0x00,0x00,0x00,0xa7,0x00,0x00,0x00, +0xaa,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xac,0x00,0x00,0x00,0xa4,0x00,0x00,0x00,0xab,0x00,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xae,0x00,0x00,0x00, +0xac,0x00,0x00,0x00,0x83,0x00,0x00,0x00,0x86,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xaf,0x00,0x00,0x00,0xae,0x00,0x00,0x00, +0x39,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0xb1,0x00,0x00,0x00, +0xf8,0x00,0x02,0x00,0xb1,0x00,0x00,0x00,0xf5,0x00,0x07,0x00, +0x06,0x00,0x00,0x00,0xe3,0x02,0x00,0x00,0x3e,0x00,0x00,0x00, +0x05,0x00,0x00,0x00,0xd0,0x00,0x00,0x00,0xb2,0x00,0x00,0x00, +0xb0,0x00,0x05,0x00,0xc1,0x00,0x00,0x00,0xc2,0x00,0x00,0x00, +0xe3,0x02,0x00,0x00,0xc0,0x00,0x00,0x00,0xf6,0x00,0x04,0x00, +0xb3,0x00,0x00,0x00,0xb2,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0xfa,0x00,0x04,0x00,0xc2,0x00,0x00,0x00,0xb2,0x00,0x00,0x00, +0xb3,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0xb2,0x00,0x00,0x00, +0x41,0x00,0x05,0x00,0xcc,0x00,0x00,0x00,0xcd,0x00,0x00,0x00, +0xc9,0x00,0x00,0x00,0xe3,0x02,0x00,0x00,0x3e,0x00,0x03,0x00, +0xcd,0x00,0x00,0x00,0xcb,0x00,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xd0,0x00,0x00,0x00,0xe3,0x02,0x00,0x00, +0xcf,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0xb1,0x00,0x00,0x00, +0xf8,0x00,0x02,0x00,0xb3,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, +0xd3,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0xd3,0x00,0x00,0x00, +0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0xfc,0x02,0x00,0x00, +0xaf,0x00,0x00,0x00,0xb3,0x00,0x00,0x00,0xaf,0x01,0x00,0x00, +0xd6,0x00,0x00,0x00,0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00, +0xf8,0x02,0x00,0x00,0x9d,0x00,0x00,0x00,0xb3,0x00,0x00,0x00, +0xac,0x01,0x00,0x00,0xd6,0x00,0x00,0x00,0xf5,0x00,0x07,0x00, +0x06,0x00,0x00,0x00,0xe4,0x02,0x00,0x00,0x83,0x00,0x00,0x00, +0xb3,0x00,0x00,0x00,0x5a,0x02,0x00,0x00,0xd6,0x00,0x00,0x00, +0xb0,0x00,0x05,0x00,0xc1,0x00,0x00,0x00,0xda,0x00,0x00,0x00, +0xe4,0x02,0x00,0x00,0x8d,0x00,0x00,0x00,0xf6,0x00,0x04,0x00, +0xd5,0x00,0x00,0x00,0xd6,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0xfa,0x00,0x04,0x00,0xda,0x00,0x00,0x00,0xd4,0x00,0x00,0x00, +0xd5,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0xd4,0x00,0x00,0x00, +0xf9,0x00,0x02,0x00,0xdc,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, +0xdc,0x00,0x00,0x00,0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00, +0xf4,0x02,0x00,0x00,0x3e,0x00,0x00,0x00,0xd4,0x00,0x00,0x00, +0x61,0x01,0x00,0x00,0xdd,0x00,0x00,0x00,0xb0,0x00,0x05,0x00, +0xc1,0x00,0x00,0x00,0xe2,0x00,0x00,0x00,0xf4,0x02,0x00,0x00, +0x37,0x00,0x00,0x00,0xf6,0x00,0x04,0x00,0xde,0x00,0x00,0x00, +0xdd,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, +0xe2,0x00,0x00,0x00,0xdd,0x00,0x00,0x00,0xde,0x00,0x00,0x00, +0xf8,0x00,0x02,0x00,0xdd,0x00,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xe7,0x00,0x00,0x00,0x73,0x00,0x00,0x00, +0xf4,0x02,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xea,0x00,0x00,0x00,0xe7,0x00,0x00,0x00,0x98,0x00,0x00,0x00, +0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xeb,0x00,0x00,0x00, +0xea,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xec,0x00,0x00,0x00,0xf8,0x02,0x00,0x00, +0xeb,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xee,0x00,0x00,0x00,0xec,0x00,0x00,0x00,0x6e,0x00,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xf4,0x00,0x00,0x00, +0xe7,0x00,0x00,0x00,0xf3,0x00,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xf6,0x00,0x00,0x00,0xf4,0x00,0x00,0x00, +0x6e,0x00,0x00,0x00,0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xfa,0x00,0x00,0x00,0xee,0x00,0x00,0x00,0xf9,0x00,0x00,0x00, +0xc7,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xfe,0x00,0x00,0x00, +0xee,0x00,0x00,0x00,0xfd,0x00,0x00,0x00,0x41,0x00,0x07,0x00, +0x0b,0x01,0x00,0x00,0x0c,0x01,0x00,0x00,0x09,0x01,0x00,0x00, +0x34,0x00,0x00,0x00,0xfa,0x00,0x00,0x00,0x34,0x00,0x00,0x00, +0x3d,0x00,0x04,0x00,0x00,0x01,0x00,0x00,0x0d,0x01,0x00,0x00, +0x0c,0x01,0x00,0x00,0x73,0x00,0x04,0x00,0xc3,0x00,0x00,0x00, +0x0e,0x01,0x00,0x00,0x0d,0x01,0x00,0x00,0x41,0x00,0x08,0x00, +0x11,0x01,0x00,0x00,0x12,0x01,0x00,0x00,0x09,0x01,0x00,0x00, +0x34,0x00,0x00,0x00,0xfa,0x00,0x00,0x00,0xcf,0x00,0x00,0x00, +0xcf,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x01,0x01,0x00,0x00, +0x13,0x01,0x00,0x00,0x12,0x01,0x00,0x00,0x71,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x14,0x01,0x00,0x00,0x13,0x01,0x00,0x00, +0xc4,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x16,0x01,0x00,0x00, +0x14,0x01,0x00,0x00,0x15,0x01,0x00,0x00,0x41,0x00,0x08,0x00, +0x11,0x01,0x00,0x00,0x18,0x01,0x00,0x00,0x09,0x01,0x00,0x00, +0x34,0x00,0x00,0x00,0xfa,0x00,0x00,0x00,0xcf,0x00,0x00,0x00, +0x34,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x01,0x01,0x00,0x00, +0x19,0x01,0x00,0x00,0x18,0x01,0x00,0x00,0x71,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x1a,0x01,0x00,0x00,0x19,0x01,0x00,0x00, +0xc5,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x1b,0x01,0x00,0x00, +0x16,0x01,0x00,0x00,0x1a,0x01,0x00,0x00,0xc2,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x21,0x01,0x00,0x00,0x1b,0x01,0x00,0x00, +0xfe,0x00,0x00,0x00,0xc4,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x22,0x01,0x00,0x00,0x21,0x01,0x00,0x00,0xa8,0x00,0x00,0x00, +0xc7,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x23,0x01,0x00,0x00, +0x22,0x01,0x00,0x00,0xf9,0x00,0x00,0x00,0x7c,0x00,0x04,0x00, +0x13,0x00,0x00,0x00,0x24,0x01,0x00,0x00,0x23,0x01,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x28,0x01,0x00,0x00, +0xfe,0x00,0x00,0x00,0x27,0x01,0x00,0x00,0xc2,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x29,0x01,0x00,0x00,0x1b,0x01,0x00,0x00, +0x28,0x01,0x00,0x00,0xc7,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x2a,0x01,0x00,0x00,0x29,0x01,0x00,0x00,0xf9,0x00,0x00,0x00, +0x7c,0x00,0x04,0x00,0x13,0x00,0x00,0x00,0x2b,0x01,0x00,0x00, +0x2a,0x01,0x00,0x00,0x41,0x00,0x08,0x00,0x30,0x01,0x00,0x00, +0x31,0x01,0x00,0x00,0x09,0x01,0x00,0x00,0x34,0x00,0x00,0x00, +0xfa,0x00,0x00,0x00,0x85,0x00,0x00,0x00,0xfe,0x00,0x00,0x00, +0x3d,0x00,0x04,0x00,0x03,0x01,0x00,0x00,0x32,0x01,0x00,0x00, +0x31,0x01,0x00,0x00,0x71,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x33,0x01,0x00,0x00,0x32,0x01,0x00,0x00,0xc7,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x38,0x01,0x00,0x00,0x33,0x01,0x00,0x00, +0xfd,0x00,0x00,0x00,0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x3c,0x01,0x00,0x00,0x24,0x01,0x00,0x00,0xc5,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x3d,0x01,0x00,0x00,0x38,0x01,0x00,0x00, +0x3c,0x01,0x00,0x00,0x70,0x00,0x04,0x00,0xc3,0x00,0x00,0x00, +0x3e,0x01,0x00,0x00,0x3d,0x01,0x00,0x00,0xc2,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x40,0x01,0x00,0x00,0x33,0x01,0x00,0x00, +0xa8,0x00,0x00,0x00,0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x43,0x01,0x00,0x00,0x2b,0x01,0x00,0x00,0xc5,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x44,0x01,0x00,0x00,0x40,0x01,0x00,0x00, +0x43,0x01,0x00,0x00,0x70,0x00,0x04,0x00,0xc3,0x00,0x00,0x00, +0x45,0x01,0x00,0x00,0x44,0x01,0x00,0x00,0x50,0x00,0x05,0x00, +0x34,0x01,0x00,0x00,0x46,0x01,0x00,0x00,0x3e,0x01,0x00,0x00, +0x45,0x01,0x00,0x00,0x83,0x00,0x05,0x00,0x34,0x01,0x00,0x00, +0x49,0x01,0x00,0x00,0x46,0x01,0x00,0x00,0x3f,0x03,0x00,0x00, +0x8e,0x00,0x05,0x00,0x34,0x01,0x00,0x00,0x4b,0x01,0x00,0x00, +0x49,0x01,0x00,0x00,0x0e,0x01,0x00,0x00,0x51,0x00,0x05,0x00, +0xc3,0x00,0x00,0x00,0x53,0x01,0x00,0x00,0x4b,0x01,0x00,0x00, +0x00,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x54,0x01,0x00,0x00, +0x55,0x01,0x00,0x00,0x50,0x01,0x00,0x00,0xf6,0x00,0x00,0x00, +0x3e,0x00,0x03,0x00,0x55,0x01,0x00,0x00,0x53,0x01,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x57,0x01,0x00,0x00, +0xf6,0x00,0x00,0x00,0xf9,0x00,0x00,0x00,0x51,0x00,0x05,0x00, +0xc3,0x00,0x00,0x00,0x59,0x01,0x00,0x00,0x4b,0x01,0x00,0x00, +0x01,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x54,0x01,0x00,0x00, +0x5a,0x01,0x00,0x00,0x50,0x01,0x00,0x00,0x57,0x01,0x00,0x00, +0x3e,0x00,0x03,0x00,0x5a,0x01,0x00,0x00,0x59,0x01,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x61,0x01,0x00,0x00, +0xf4,0x02,0x00,0x00,0x5f,0x01,0x00,0x00,0xf9,0x00,0x02,0x00, +0xdc,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0xde,0x00,0x00,0x00, +0xf9,0x00,0x02,0x00,0x63,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, +0x63,0x01,0x00,0x00,0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00, +0xf5,0x02,0x00,0x00,0x3e,0x00,0x00,0x00,0xde,0x00,0x00,0x00, +0xa8,0x01,0x00,0x00,0x66,0x01,0x00,0x00,0xb0,0x00,0x05,0x00, +0xc1,0x00,0x00,0x00,0x69,0x01,0x00,0x00,0xf5,0x02,0x00,0x00, +0xa6,0x00,0x00,0x00,0xf6,0x00,0x04,0x00,0x65,0x01,0x00,0x00, +0x66,0x01,0x00,0x00,0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, +0x69,0x01,0x00,0x00,0x64,0x01,0x00,0x00,0x65,0x01,0x00,0x00, +0xf8,0x00,0x02,0x00,0x64,0x01,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x6d,0x01,0x00,0x00,0xa7,0x00,0x00,0x00, +0x7d,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x6f,0x01,0x00,0x00,0x6d,0x01,0x00,0x00,0xf5,0x02,0x00,0x00, +0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00,0x70,0x01,0x00,0x00, +0x12,0x00,0x00,0x00,0xcf,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x71,0x01,0x00,0x00,0x70,0x01,0x00,0x00, +0xb0,0x00,0x05,0x00,0xc1,0x00,0x00,0x00,0x72,0x01,0x00,0x00, +0x6f,0x01,0x00,0x00,0x71,0x01,0x00,0x00,0xf7,0x00,0x03,0x00, +0x74,0x01,0x00,0x00,0x00,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, +0x72,0x01,0x00,0x00,0x73,0x01,0x00,0x00,0x74,0x01,0x00,0x00, +0xf8,0x00,0x02,0x00,0x73,0x01,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x77,0x01,0x00,0x00,0xe4,0x02,0x00,0x00, +0x78,0x00,0x00,0x00,0xb0,0x00,0x05,0x00,0xc1,0x00,0x00,0x00, +0x79,0x01,0x00,0x00,0x77,0x01,0x00,0x00,0x8d,0x00,0x00,0x00, +0xf9,0x00,0x02,0x00,0x74,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, +0x74,0x01,0x00,0x00,0xf5,0x00,0x07,0x00,0xc1,0x00,0x00,0x00, +0x7a,0x01,0x00,0x00,0x72,0x01,0x00,0x00,0x64,0x01,0x00,0x00, +0x79,0x01,0x00,0x00,0x73,0x01,0x00,0x00,0xf7,0x00,0x03,0x00, +0x7c,0x01,0x00,0x00,0x00,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, +0x7a,0x01,0x00,0x00,0x7b,0x01,0x00,0x00,0x9b,0x01,0x00,0x00, +0xf8,0x00,0x02,0x00,0x7b,0x01,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x84,0x01,0x00,0x00,0x7d,0x00,0x00,0x00, +0xf5,0x02,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x86,0x01,0x00,0x00,0x84,0x01,0x00,0x00,0x85,0x01,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x88,0x01,0x00,0x00, +0x86,0x01,0x00,0x00,0x78,0x00,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x93,0x01,0x00,0x00,0x84,0x01,0x00,0x00, +0xaa,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x94,0x01,0x00,0x00,0xfc,0x02,0x00,0x00,0x93,0x01,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x96,0x01,0x00,0x00, +0x94,0x01,0x00,0x00,0x78,0x00,0x00,0x00,0x41,0x00,0x06,0x00, +0x97,0x01,0x00,0x00,0x98,0x01,0x00,0x00,0x8c,0x01,0x00,0x00, +0x34,0x00,0x00,0x00,0x96,0x01,0x00,0x00,0x3d,0x00,0x04,0x00, +0xc3,0x00,0x00,0x00,0x99,0x01,0x00,0x00,0x98,0x01,0x00,0x00, +0x41,0x00,0x05,0x00,0x54,0x01,0x00,0x00,0x9a,0x01,0x00,0x00, +0x81,0x01,0x00,0x00,0x88,0x01,0x00,0x00,0x3e,0x00,0x03,0x00, +0x9a,0x01,0x00,0x00,0x99,0x01,0x00,0x00,0xf9,0x00,0x02,0x00, +0x7c,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0x9b,0x01,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x9e,0x01,0x00,0x00, +0x7d,0x00,0x00,0x00,0xf5,0x02,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xa0,0x01,0x00,0x00,0x9e,0x01,0x00,0x00, +0x9f,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xa2,0x01,0x00,0x00,0xa0,0x01,0x00,0x00,0x78,0x00,0x00,0x00, +0x41,0x00,0x05,0x00,0x54,0x01,0x00,0x00,0xa3,0x01,0x00,0x00, +0x81,0x01,0x00,0x00,0xa2,0x01,0x00,0x00,0x3e,0x00,0x03,0x00, +0xa3,0x01,0x00,0x00,0xcb,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, +0x7c,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0x7c,0x01,0x00,0x00, +0xf9,0x00,0x02,0x00,0x66,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, +0x66,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xa8,0x01,0x00,0x00,0xf5,0x02,0x00,0x00,0xa6,0x01,0x00,0x00, +0xf9,0x00,0x02,0x00,0x63,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, +0x65,0x01,0x00,0x00,0xe0,0x00,0x04,0x00,0x0c,0x00,0x00,0x00, +0x0c,0x00,0x00,0x00,0xa9,0x01,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xac,0x01,0x00,0x00,0xf8,0x02,0x00,0x00, +0xaa,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xaf,0x01,0x00,0x00,0xfc,0x02,0x00,0x00,0xad,0x01,0x00,0x00, +0xf9,0x00,0x02,0x00,0xb1,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, +0xb1,0x01,0x00,0x00,0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00, +0xfe,0x02,0x00,0x00,0x3e,0x00,0x00,0x00,0x65,0x01,0x00,0x00, +0x58,0x02,0x00,0x00,0xb4,0x01,0x00,0x00,0xb0,0x00,0x05,0x00, +0xc1,0x00,0x00,0x00,0xb7,0x01,0x00,0x00,0xfe,0x02,0x00,0x00, +0x6c,0x00,0x00,0x00,0xf6,0x00,0x04,0x00,0xb3,0x01,0x00,0x00, +0xb4,0x01,0x00,0x00,0x00,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, +0xb7,0x01,0x00,0x00,0xb2,0x01,0x00,0x00,0xb3,0x01,0x00,0x00, +0xf8,0x00,0x02,0x00,0xb2,0x01,0x00,0x00,0xf9,0x00,0x02,0x00, +0xb9,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0xb9,0x01,0x00,0x00, +0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0x02,0x03,0x00,0x00, +0x3e,0x00,0x00,0x00,0xb2,0x01,0x00,0x00,0xe4,0x01,0x00,0x00, +0xbc,0x01,0x00,0x00,0xb0,0x00,0x05,0x00,0xc1,0x00,0x00,0x00, +0xbf,0x01,0x00,0x00,0x02,0x03,0x00,0x00,0x60,0x00,0x00,0x00, +0xf6,0x00,0x04,0x00,0xbb,0x01,0x00,0x00,0xbc,0x01,0x00,0x00, +0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0xbf,0x01,0x00,0x00, +0xba,0x01,0x00,0x00,0xbb,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, +0xba,0x01,0x00,0x00,0xf9,0x00,0x02,0x00,0xc1,0x01,0x00,0x00, +0xf8,0x00,0x02,0x00,0xc1,0x01,0x00,0x00,0xf5,0x00,0x07,0x00, +0x06,0x00,0x00,0x00,0x14,0x03,0x00,0x00,0x3e,0x00,0x00,0x00, +0xba,0x01,0x00,0x00,0xe2,0x01,0x00,0x00,0xc2,0x01,0x00,0x00, +0xb0,0x00,0x05,0x00,0xc1,0x00,0x00,0x00,0xc7,0x01,0x00,0x00, +0x14,0x03,0x00,0x00,0x62,0x00,0x00,0x00,0xf6,0x00,0x04,0x00, +0xc3,0x01,0x00,0x00,0xc2,0x01,0x00,0x00,0x01,0x00,0x00,0x00, +0xfa,0x00,0x04,0x00,0xc7,0x01,0x00,0x00,0xc2,0x01,0x00,0x00, +0xc3,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0xc2,0x01,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xcd,0x01,0x00,0x00, +0x02,0x03,0x00,0x00,0x62,0x00,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xcf,0x01,0x00,0x00,0xcd,0x01,0x00,0x00, +0x14,0x03,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xd1,0x01,0x00,0x00,0x55,0x00,0x00,0x00,0x53,0x00,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xd3,0x01,0x00,0x00, +0x02,0x03,0x00,0x00,0x61,0x00,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xd4,0x01,0x00,0x00,0xd1,0x01,0x00,0x00, +0xd3,0x01,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xd6,0x01,0x00,0x00,0x64,0x00,0x00,0x00,0x62,0x00,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xd7,0x01,0x00,0x00, +0xd4,0x01,0x00,0x00,0xd6,0x01,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xd9,0x01,0x00,0x00,0xd7,0x01,0x00,0x00, +0x14,0x03,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xdb,0x01,0x00,0x00,0xd9,0x01,0x00,0x00,0xda,0x01,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xdd,0x01,0x00,0x00, +0xdb,0x01,0x00,0x00,0xfe,0x02,0x00,0x00,0x41,0x00,0x05,0x00, +0x54,0x01,0x00,0x00,0xde,0x01,0x00,0x00,0x50,0x01,0x00,0x00, +0xdd,0x01,0x00,0x00,0x3d,0x00,0x04,0x00,0xc3,0x00,0x00,0x00, +0xdf,0x01,0x00,0x00,0xde,0x01,0x00,0x00,0x41,0x00,0x05,0x00, +0xcc,0x00,0x00,0x00,0xe0,0x01,0x00,0x00,0xcb,0x01,0x00,0x00, +0xcf,0x01,0x00,0x00,0x3e,0x00,0x03,0x00,0xe0,0x01,0x00,0x00, +0xdf,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xe2,0x01,0x00,0x00,0x14,0x03,0x00,0x00,0xcf,0x00,0x00,0x00, +0xf9,0x00,0x02,0x00,0xc1,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, +0xc3,0x01,0x00,0x00,0xf9,0x00,0x02,0x00,0xbc,0x01,0x00,0x00, +0xf8,0x00,0x02,0x00,0xbc,0x01,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xe4,0x01,0x00,0x00,0x02,0x03,0x00,0x00, +0xcf,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0xb9,0x01,0x00,0x00, +0xf8,0x00,0x02,0x00,0xbb,0x01,0x00,0x00,0xf9,0x00,0x02,0x00, +0xe6,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0xe6,0x01,0x00,0x00, +0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0x03,0x03,0x00,0x00, +0x3e,0x00,0x00,0x00,0xbb,0x01,0x00,0x00,0x12,0x02,0x00,0x00, +0xe9,0x01,0x00,0x00,0xb0,0x00,0x05,0x00,0xc1,0x00,0x00,0x00, +0xec,0x01,0x00,0x00,0x03,0x03,0x00,0x00,0xbe,0x00,0x00,0x00, +0xf6,0x00,0x04,0x00,0xe8,0x01,0x00,0x00,0xe9,0x01,0x00,0x00, +0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0xec,0x01,0x00,0x00, +0xe7,0x01,0x00,0x00,0xe8,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, +0xe7,0x01,0x00,0x00,0xf9,0x00,0x02,0x00,0xee,0x01,0x00,0x00, +0xf8,0x00,0x02,0x00,0xee,0x01,0x00,0x00,0xf5,0x00,0x07,0x00, +0x06,0x00,0x00,0x00,0x11,0x03,0x00,0x00,0x3e,0x00,0x00,0x00, +0xe7,0x01,0x00,0x00,0x10,0x02,0x00,0x00,0xef,0x01,0x00,0x00, +0xb0,0x00,0x05,0x00,0xc1,0x00,0x00,0x00,0xf4,0x01,0x00,0x00, +0x11,0x03,0x00,0x00,0xbb,0x00,0x00,0x00,0xf6,0x00,0x04,0x00, +0xf0,0x01,0x00,0x00,0xef,0x01,0x00,0x00,0x01,0x00,0x00,0x00, +0xfa,0x00,0x04,0x00,0xf4,0x01,0x00,0x00,0xef,0x01,0x00,0x00, +0xf0,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0xef,0x01,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xfa,0x01,0x00,0x00, +0x03,0x03,0x00,0x00,0xbb,0x00,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xfc,0x01,0x00,0x00,0xfa,0x01,0x00,0x00, +0x11,0x03,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xfe,0x01,0x00,0x00,0x59,0x00,0x00,0x00,0xb8,0x00,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x01,0x02,0x00,0x00, +0x03,0x03,0x00,0x00,0x00,0x02,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x02,0x02,0x00,0x00,0xfe,0x01,0x00,0x00, +0x01,0x02,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x04,0x02,0x00,0x00,0x68,0x00,0x00,0x00,0xbb,0x00,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x05,0x02,0x00,0x00, +0x02,0x02,0x00,0x00,0x04,0x02,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x07,0x02,0x00,0x00,0x05,0x02,0x00,0x00, +0x11,0x03,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x09,0x02,0x00,0x00,0x07,0x02,0x00,0x00,0x08,0x02,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x0b,0x02,0x00,0x00, +0x09,0x02,0x00,0x00,0xfe,0x02,0x00,0x00,0x41,0x00,0x05,0x00, +0x54,0x01,0x00,0x00,0x0c,0x02,0x00,0x00,0x81,0x01,0x00,0x00, +0x0b,0x02,0x00,0x00,0x3d,0x00,0x04,0x00,0xc3,0x00,0x00,0x00, +0x0d,0x02,0x00,0x00,0x0c,0x02,0x00,0x00,0x41,0x00,0x05,0x00, +0xcc,0x00,0x00,0x00,0x0e,0x02,0x00,0x00,0xf8,0x01,0x00,0x00, +0xfc,0x01,0x00,0x00,0x3e,0x00,0x03,0x00,0x0e,0x02,0x00,0x00, +0x0d,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x10,0x02,0x00,0x00,0x11,0x03,0x00,0x00,0xcf,0x00,0x00,0x00, +0xf9,0x00,0x02,0x00,0xee,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, +0xf0,0x01,0x00,0x00,0xf9,0x00,0x02,0x00,0xe9,0x01,0x00,0x00, +0xf8,0x00,0x02,0x00,0xe9,0x01,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x12,0x02,0x00,0x00,0x03,0x03,0x00,0x00, +0xcf,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0xe6,0x01,0x00,0x00, +0xf8,0x00,0x02,0x00,0xe8,0x01,0x00,0x00,0xf9,0x00,0x02,0x00, +0x14,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x14,0x02,0x00,0x00, +0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0x04,0x03,0x00,0x00, +0x3e,0x00,0x00,0x00,0xe8,0x01,0x00,0x00,0x56,0x02,0x00,0x00, +0x17,0x02,0x00,0x00,0xb0,0x00,0x05,0x00,0xc1,0x00,0x00,0x00, +0x1a,0x02,0x00,0x00,0x04,0x03,0x00,0x00,0xbe,0x00,0x00,0x00, +0xf6,0x00,0x04,0x00,0x16,0x02,0x00,0x00,0x17,0x02,0x00,0x00, +0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0x1a,0x02,0x00,0x00, +0x15,0x02,0x00,0x00,0x16,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, +0x15,0x02,0x00,0x00,0xf9,0x00,0x02,0x00,0x1c,0x02,0x00,0x00, +0xf8,0x00,0x02,0x00,0x1c,0x02,0x00,0x00,0xf5,0x00,0x07,0x00, +0x06,0x00,0x00,0x00,0x08,0x03,0x00,0x00,0x3e,0x00,0x00,0x00, +0x15,0x02,0x00,0x00,0x54,0x02,0x00,0x00,0x1f,0x02,0x00,0x00, +0xb0,0x00,0x05,0x00,0xc1,0x00,0x00,0x00,0x22,0x02,0x00,0x00, +0x08,0x03,0x00,0x00,0x60,0x00,0x00,0x00,0xf6,0x00,0x04,0x00, +0x1e,0x02,0x00,0x00,0x1f,0x02,0x00,0x00,0x01,0x00,0x00,0x00, +0xfa,0x00,0x04,0x00,0x22,0x02,0x00,0x00,0x1d,0x02,0x00,0x00, +0x1e,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x1d,0x02,0x00,0x00, +0xf9,0x00,0x02,0x00,0x24,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, +0x24,0x02,0x00,0x00,0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00, +0x0a,0x03,0x00,0x00,0x3e,0x00,0x00,0x00,0x1d,0x02,0x00,0x00, +0x52,0x02,0x00,0x00,0x27,0x02,0x00,0x00,0xb0,0x00,0x05,0x00, +0xc1,0x00,0x00,0x00,0x2a,0x02,0x00,0x00,0x0a,0x03,0x00,0x00, +0xbb,0x00,0x00,0x00,0xf6,0x00,0x04,0x00,0x26,0x02,0x00,0x00, +0x27,0x02,0x00,0x00,0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, +0x2a,0x02,0x00,0x00,0x25,0x02,0x00,0x00,0x26,0x02,0x00,0x00, +0xf8,0x00,0x02,0x00,0x25,0x02,0x00,0x00,0xf9,0x00,0x02,0x00, +0x2c,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x2c,0x02,0x00,0x00, +0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0x0c,0x03,0x00,0x00, +0x3e,0x00,0x00,0x00,0x25,0x02,0x00,0x00,0x50,0x02,0x00,0x00, +0x2d,0x02,0x00,0x00,0xb0,0x00,0x05,0x00,0xc1,0x00,0x00,0x00, +0x32,0x02,0x00,0x00,0x0c,0x03,0x00,0x00,0x62,0x00,0x00,0x00, +0xf6,0x00,0x04,0x00,0x2e,0x02,0x00,0x00,0x2d,0x02,0x00,0x00, +0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0x32,0x02,0x00,0x00, +0x2d,0x02,0x00,0x00,0x2e,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, +0x2d,0x02,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x34,0x02,0x00,0x00,0x04,0x03,0x00,0x00,0xbb,0x00,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x36,0x02,0x00,0x00, +0x34,0x02,0x00,0x00,0x0a,0x03,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x38,0x02,0x00,0x00,0x36,0x02,0x00,0x00, +0x37,0x02,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x3a,0x02,0x00,0x00,0x08,0x03,0x00,0x00,0x62,0x00,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x3b,0x02,0x00,0x00, +0x38,0x02,0x00,0x00,0x3a,0x02,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x3d,0x02,0x00,0x00,0x3b,0x02,0x00,0x00, +0x0c,0x03,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x41,0x02,0x00,0x00,0x3a,0x02,0x00,0x00,0x0c,0x03,0x00,0x00, +0x41,0x00,0x05,0x00,0xcc,0x00,0x00,0x00,0x42,0x02,0x00,0x00, +0xcb,0x01,0x00,0x00,0x41,0x02,0x00,0x00,0x3d,0x00,0x04,0x00, +0xc3,0x00,0x00,0x00,0x43,0x02,0x00,0x00,0x42,0x02,0x00,0x00, +0x41,0x00,0x05,0x00,0xcc,0x00,0x00,0x00,0x48,0x02,0x00,0x00, +0xf8,0x01,0x00,0x00,0x36,0x02,0x00,0x00,0x3d,0x00,0x04,0x00, +0xc3,0x00,0x00,0x00,0x49,0x02,0x00,0x00,0x48,0x02,0x00,0x00, +0x41,0x00,0x05,0x00,0xcc,0x00,0x00,0x00,0x4b,0x02,0x00,0x00, +0xc9,0x00,0x00,0x00,0x3d,0x02,0x00,0x00,0x3d,0x00,0x04,0x00, +0xc3,0x00,0x00,0x00,0x4c,0x02,0x00,0x00,0x4b,0x02,0x00,0x00, +0x0c,0x00,0x08,0x00,0xc3,0x00,0x00,0x00,0x4d,0x02,0x00,0x00, +0x01,0x00,0x00,0x00,0x32,0x00,0x00,0x00,0x43,0x02,0x00,0x00, +0x49,0x02,0x00,0x00,0x4c,0x02,0x00,0x00,0x3e,0x00,0x03,0x00, +0x4b,0x02,0x00,0x00,0x4d,0x02,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x50,0x02,0x00,0x00,0x0c,0x03,0x00,0x00, +0xcf,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0x2c,0x02,0x00,0x00, +0xf8,0x00,0x02,0x00,0x2e,0x02,0x00,0x00,0xf9,0x00,0x02,0x00, +0x27,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x27,0x02,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x52,0x02,0x00,0x00, +0x0a,0x03,0x00,0x00,0xcf,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, +0x24,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x26,0x02,0x00,0x00, +0xf9,0x00,0x02,0x00,0x1f,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, +0x1f,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x54,0x02,0x00,0x00,0x08,0x03,0x00,0x00,0xcf,0x00,0x00,0x00, +0xf9,0x00,0x02,0x00,0x1c,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, +0x1e,0x02,0x00,0x00,0xf9,0x00,0x02,0x00,0x17,0x02,0x00,0x00, +0xf8,0x00,0x02,0x00,0x17,0x02,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x56,0x02,0x00,0x00,0x04,0x03,0x00,0x00, +0xcf,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0x14,0x02,0x00,0x00, +0xf8,0x00,0x02,0x00,0x16,0x02,0x00,0x00,0xf9,0x00,0x02,0x00, +0xb4,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0xb4,0x01,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x58,0x02,0x00,0x00, +0xfe,0x02,0x00,0x00,0xcf,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, +0xb1,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0xb3,0x01,0x00,0x00, +0xe0,0x00,0x04,0x00,0x0c,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, +0xa9,0x01,0x00,0x00,0xf9,0x00,0x02,0x00,0xd6,0x00,0x00,0x00, +0xf8,0x00,0x02,0x00,0xd6,0x00,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x5a,0x02,0x00,0x00,0xe4,0x02,0x00,0x00, +0x6c,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0xd3,0x00,0x00,0x00, +0xf8,0x00,0x02,0x00,0xd5,0x00,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x5f,0x02,0x00,0x00,0x55,0x00,0x00,0x00, +0x53,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x60,0x02,0x00,0x00,0x95,0x00,0x00,0x00,0x5f,0x02,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x65,0x02,0x00,0x00, +0x59,0x00,0x00,0x00,0xb8,0x00,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x66,0x02,0x00,0x00,0xa7,0x00,0x00,0x00, +0x65,0x02,0x00,0x00,0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00, +0x6b,0x02,0x00,0x00,0x12,0x00,0x00,0x00,0x6a,0x02,0x00,0x00, +0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x6c,0x02,0x00,0x00, +0x6b,0x02,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x6d,0x02,0x00,0x00,0x0f,0x00,0x00,0x00,0x6c,0x02,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x71,0x02,0x00,0x00, +0x47,0x00,0x00,0x00,0x6c,0x02,0x00,0x00,0x41,0x00,0x05,0x00, +0x0d,0x00,0x00,0x00,0x73,0x02,0x00,0x00,0x72,0x02,0x00,0x00, +0x0c,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x74,0x02,0x00,0x00,0x73,0x02,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x75,0x02,0x00,0x00,0x71,0x02,0x00,0x00, +0x74,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x76,0x02,0x00,0x00,0x6d,0x02,0x00,0x00,0x75,0x02,0x00,0x00, +0xf9,0x00,0x02,0x00,0x78,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, +0x78,0x02,0x00,0x00,0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00, +0xe5,0x02,0x00,0x00,0x3e,0x00,0x00,0x00,0xd5,0x00,0x00,0x00, +0xde,0x02,0x00,0x00,0x7b,0x02,0x00,0x00,0xb0,0x00,0x05,0x00, +0xc1,0x00,0x00,0x00,0x7e,0x02,0x00,0x00,0xe5,0x02,0x00,0x00, +0xbe,0x00,0x00,0x00,0xf6,0x00,0x04,0x00,0x7a,0x02,0x00,0x00, +0x7b,0x02,0x00,0x00,0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, +0x7e,0x02,0x00,0x00,0x79,0x02,0x00,0x00,0x7a,0x02,0x00,0x00, +0xf8,0x00,0x02,0x00,0x79,0x02,0x00,0x00,0xf9,0x00,0x02,0x00, +0x80,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x80,0x02,0x00,0x00, +0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0xe6,0x02,0x00,0x00, +0x3e,0x00,0x00,0x00,0x79,0x02,0x00,0x00,0xdc,0x02,0x00,0x00, +0x83,0x02,0x00,0x00,0xb0,0x00,0x05,0x00,0xc1,0x00,0x00,0x00, +0x86,0x02,0x00,0x00,0xe6,0x02,0x00,0x00,0x60,0x00,0x00,0x00, +0xf6,0x00,0x04,0x00,0x82,0x02,0x00,0x00,0x83,0x02,0x00,0x00, +0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0x86,0x02,0x00,0x00, +0x81,0x02,0x00,0x00,0x82,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, +0x81,0x02,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x8a,0x02,0x00,0x00,0xe6,0x02,0x00,0x00,0x61,0x00,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x8b,0x02,0x00,0x00, +0x60,0x02,0x00,0x00,0x8a,0x02,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x8d,0x02,0x00,0x00,0x64,0x00,0x00,0x00, +0x62,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x8e,0x02,0x00,0x00,0x8b,0x02,0x00,0x00,0x8d,0x02,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x92,0x02,0x00,0x00, +0xe5,0x02,0x00,0x00,0x00,0x02,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x93,0x02,0x00,0x00,0x66,0x02,0x00,0x00, +0x92,0x02,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x95,0x02,0x00,0x00,0x68,0x00,0x00,0x00,0xbb,0x00,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x96,0x02,0x00,0x00, +0x93,0x02,0x00,0x00,0x95,0x02,0x00,0x00,0xf9,0x00,0x02,0x00, +0x98,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x98,0x02,0x00,0x00, +0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0xe8,0x02,0x00,0x00, +0x3e,0x00,0x00,0x00,0x81,0x02,0x00,0x00,0xda,0x02,0x00,0x00, +0x9b,0x02,0x00,0x00,0xb0,0x00,0x05,0x00,0xc1,0x00,0x00,0x00, +0x9e,0x02,0x00,0x00,0xe8,0x02,0x00,0x00,0xbb,0x00,0x00,0x00, +0xf6,0x00,0x04,0x00,0x9a,0x02,0x00,0x00,0x9b,0x02,0x00,0x00, +0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0x9e,0x02,0x00,0x00, +0x99,0x02,0x00,0x00,0x9a,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, +0x99,0x02,0x00,0x00,0xf9,0x00,0x02,0x00,0xa0,0x02,0x00,0x00, +0xf8,0x00,0x02,0x00,0xa0,0x02,0x00,0x00,0xf5,0x00,0x07,0x00, +0x06,0x00,0x00,0x00,0xea,0x02,0x00,0x00,0x3e,0x00,0x00,0x00, +0x99,0x02,0x00,0x00,0xd8,0x02,0x00,0x00,0xa3,0x02,0x00,0x00, +0xb0,0x00,0x05,0x00,0xc1,0x00,0x00,0x00,0xa6,0x02,0x00,0x00, +0xea,0x02,0x00,0x00,0x62,0x00,0x00,0x00,0xf6,0x00,0x04,0x00, +0xa2,0x02,0x00,0x00,0xa3,0x02,0x00,0x00,0x01,0x00,0x00,0x00, +0xfa,0x00,0x04,0x00,0xa6,0x02,0x00,0x00,0xa1,0x02,0x00,0x00, +0xa2,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0xa1,0x02,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xa9,0x02,0x00,0x00, +0x8e,0x02,0x00,0x00,0xea,0x02,0x00,0x00,0xb0,0x00,0x05,0x00, +0xc1,0x00,0x00,0x00,0xac,0x02,0x00,0x00,0xa9,0x02,0x00,0x00, +0x36,0x00,0x00,0x00,0xf7,0x00,0x03,0x00,0xae,0x02,0x00,0x00, +0x00,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0xac,0x02,0x00,0x00, +0xad,0x02,0x00,0x00,0xae,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, +0xad,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xb1,0x02,0x00,0x00,0x96,0x02,0x00,0x00,0xe8,0x02,0x00,0x00, +0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00,0xb2,0x02,0x00,0x00, +0x12,0x00,0x00,0x00,0xcf,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0xb3,0x02,0x00,0x00,0xb2,0x02,0x00,0x00, +0xb0,0x00,0x05,0x00,0xc1,0x00,0x00,0x00,0xb4,0x02,0x00,0x00, +0xb1,0x02,0x00,0x00,0xb3,0x02,0x00,0x00,0xf9,0x00,0x02,0x00, +0xae,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0xae,0x02,0x00,0x00, +0xf5,0x00,0x07,0x00,0xc1,0x00,0x00,0x00,0xb5,0x02,0x00,0x00, +0xac,0x02,0x00,0x00,0xa1,0x02,0x00,0x00,0xb4,0x02,0x00,0x00, +0xad,0x02,0x00,0x00,0xf7,0x00,0x03,0x00,0xb7,0x02,0x00,0x00, +0x00,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0xb5,0x02,0x00,0x00, +0xb6,0x02,0x00,0x00,0xb7,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, +0xb6,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xbf,0x02,0x00,0x00,0x96,0x02,0x00,0x00,0xe8,0x02,0x00,0x00, +0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00,0xc1,0x02,0x00,0x00, +0x12,0x00,0x00,0x00,0xc0,0x02,0x00,0x00,0x3d,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0xc2,0x02,0x00,0x00,0xc1,0x02,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xc3,0x02,0x00,0x00, +0xbf,0x02,0x00,0x00,0xc2,0x02,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xc4,0x02,0x00,0x00,0x76,0x02,0x00,0x00, +0xc3,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xc6,0x02,0x00,0x00,0xc4,0x02,0x00,0x00,0x8e,0x02,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xc8,0x02,0x00,0x00, +0xc6,0x02,0x00,0x00,0xea,0x02,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xca,0x02,0x00,0x00,0xe5,0x02,0x00,0x00, +0xbb,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xcc,0x02,0x00,0x00,0xca,0x02,0x00,0x00,0xe8,0x02,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xce,0x02,0x00,0x00, +0xcc,0x02,0x00,0x00,0xcd,0x02,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xd0,0x02,0x00,0x00,0xe6,0x02,0x00,0x00, +0x62,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xd1,0x02,0x00,0x00,0xce,0x02,0x00,0x00,0xd0,0x02,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xd3,0x02,0x00,0x00, +0xd1,0x02,0x00,0x00,0xea,0x02,0x00,0x00,0x41,0x00,0x05,0x00, +0xcc,0x00,0x00,0x00,0xd4,0x02,0x00,0x00,0xc9,0x00,0x00,0x00, +0xd3,0x02,0x00,0x00,0x3d,0x00,0x04,0x00,0xc3,0x00,0x00,0x00, +0xd5,0x02,0x00,0x00,0xd4,0x02,0x00,0x00,0x41,0x00,0x06,0x00, +0x97,0x01,0x00,0x00,0xd6,0x02,0x00,0x00,0xbb,0x02,0x00,0x00, +0x34,0x00,0x00,0x00,0xc8,0x02,0x00,0x00,0x3e,0x00,0x03,0x00, +0xd6,0x02,0x00,0x00,0xd5,0x02,0x00,0x00,0xf9,0x00,0x02,0x00, +0xb7,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0xb7,0x02,0x00,0x00, +0xf9,0x00,0x02,0x00,0xa3,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, +0xa3,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xd8,0x02,0x00,0x00,0xea,0x02,0x00,0x00,0xcf,0x00,0x00,0x00, +0xf9,0x00,0x02,0x00,0xa0,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, +0xa2,0x02,0x00,0x00,0xf9,0x00,0x02,0x00,0x9b,0x02,0x00,0x00, +0xf8,0x00,0x02,0x00,0x9b,0x02,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xda,0x02,0x00,0x00,0xe8,0x02,0x00,0x00, +0xcf,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0x98,0x02,0x00,0x00, +0xf8,0x00,0x02,0x00,0x9a,0x02,0x00,0x00,0xf9,0x00,0x02,0x00, +0x83,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x83,0x02,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xdc,0x02,0x00,0x00, +0xe6,0x02,0x00,0x00,0xcf,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, +0x80,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x82,0x02,0x00,0x00, +0xf9,0x00,0x02,0x00,0x7b,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, +0x7b,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xde,0x02,0x00,0x00,0xe5,0x02,0x00,0x00,0xcf,0x00,0x00,0x00, +0xf9,0x00,0x02,0x00,0x78,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, +0x7a,0x02,0x00,0x00,0xfd,0x00,0x01,0x00,0x38,0x00,0x01,0x00, + +}; +const uint64_t matmul_q5_0_f32_fp32_len = 10956; + +unsigned char matmul_q5_1_f32_data[] = { +0x03,0x02,0x23,0x07,0x00,0x05,0x01,0x00,0x0b,0x00,0x0d,0x00, +0x17,0x03,0x00,0x00,0x00,0x00,0x00,0x00,0x11,0x00,0x02,0x00, +0x01,0x00,0x00,0x00,0x11,0x00,0x02,0x00,0x09,0x00,0x00,0x00, +0x11,0x00,0x02,0x00,0x51,0x11,0x00,0x00,0x11,0x00,0x02,0x00, +0x60,0x11,0x00,0x00,0x0b,0x00,0x06,0x00,0x01,0x00,0x00,0x00, +0x47,0x4c,0x53,0x4c,0x2e,0x73,0x74,0x64,0x2e,0x34,0x35,0x30, +0x00,0x00,0x00,0x00,0x0e,0x00,0x03,0x00,0x00,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0x0f,0x00,0x0f,0x00,0x05,0x00,0x00,0x00, +0x04,0x00,0x00,0x00,0x6d,0x61,0x69,0x6e,0x00,0x00,0x00,0x00, +0x0b,0x00,0x00,0x00,0x12,0x00,0x00,0x00,0x3d,0x00,0x00,0x00, +0x4c,0x00,0x00,0x00,0x07,0x01,0x00,0x00,0x4b,0x01,0x00,0x00, +0x7e,0x01,0x00,0x00,0x89,0x01,0x00,0x00,0x74,0x02,0x00,0x00, +0xbd,0x02,0x00,0x00,0x10,0x00,0x06,0x00,0x04,0x00,0x00,0x00, +0x11,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x0b,0x00,0x00,0x00, +0x0b,0x00,0x00,0x00,0x1c,0x00,0x00,0x00,0x48,0x00,0x05,0x00, +0x10,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00, +0x00,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x04,0x00,0x00,0x00, +0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00,0x02,0x00,0x00,0x00, +0x23,0x00,0x00,0x00,0x08,0x00,0x00,0x00,0x48,0x00,0x05,0x00, +0x10,0x00,0x00,0x00,0x03,0x00,0x00,0x00,0x23,0x00,0x00,0x00, +0x0c,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00, +0x04,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x10,0x00,0x00,0x00, +0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00,0x05,0x00,0x00,0x00, +0x23,0x00,0x00,0x00,0x14,0x00,0x00,0x00,0x48,0x00,0x05,0x00, +0x10,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x23,0x00,0x00,0x00, +0x18,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00, +0x07,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x1c,0x00,0x00,0x00, +0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00,0x08,0x00,0x00,0x00, +0x23,0x00,0x00,0x00,0x20,0x00,0x00,0x00,0x48,0x00,0x05,0x00, +0x10,0x00,0x00,0x00,0x09,0x00,0x00,0x00,0x23,0x00,0x00,0x00, +0x24,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00, +0x0a,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x28,0x00,0x00,0x00, +0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00,0x0b,0x00,0x00,0x00, +0x23,0x00,0x00,0x00,0x2c,0x00,0x00,0x00,0x48,0x00,0x05,0x00, +0x10,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x23,0x00,0x00,0x00, +0x30,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00, +0x0d,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x34,0x00,0x00,0x00, +0x47,0x00,0x03,0x00,0x10,0x00,0x00,0x00,0x02,0x00,0x00,0x00, +0x47,0x00,0x04,0x00,0x37,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x3d,0x00,0x00,0x00, +0x0b,0x00,0x00,0x00,0x1a,0x00,0x00,0x00,0x47,0x00,0x04,0x00, +0x4c,0x00,0x00,0x00,0x0b,0x00,0x00,0x00,0x1b,0x00,0x00,0x00, +0x47,0x00,0x04,0x00,0x4f,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0x09,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x53,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x47,0x00,0x04,0x00, +0x60,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x06,0x00,0x00,0x00, +0x47,0x00,0x04,0x00,0x62,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0x07,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x6c,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0x03,0x00,0x00,0x00,0x47,0x00,0x04,0x00, +0xa6,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x02,0x00,0x00,0x00, +0x47,0x00,0x04,0x00,0xb8,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0x05,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0xbb,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0x08,0x00,0x00,0x00,0x47,0x00,0x04,0x00, +0x02,0x01,0x00,0x00,0x06,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0x48,0x00,0x05,0x00,0x03,0x01,0x00,0x00,0x00,0x00,0x00,0x00, +0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x48,0x00,0x05,0x00, +0x03,0x01,0x00,0x00,0x01,0x00,0x00,0x00,0x23,0x00,0x00,0x00, +0x02,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x03,0x01,0x00,0x00, +0x02,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x04,0x00,0x00,0x00, +0x48,0x00,0x05,0x00,0x03,0x01,0x00,0x00,0x03,0x00,0x00,0x00, +0x23,0x00,0x00,0x00,0x08,0x00,0x00,0x00,0x47,0x00,0x04,0x00, +0x04,0x01,0x00,0x00,0x06,0x00,0x00,0x00,0x18,0x00,0x00,0x00, +0x48,0x00,0x04,0x00,0x05,0x01,0x00,0x00,0x00,0x00,0x00,0x00, +0x18,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x05,0x01,0x00,0x00, +0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0x47,0x00,0x03,0x00,0x05,0x01,0x00,0x00,0x02,0x00,0x00,0x00, +0x47,0x00,0x04,0x00,0x07,0x01,0x00,0x00,0x22,0x00,0x00,0x00, +0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x07,0x01,0x00,0x00, +0x21,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00, +0x58,0x01,0x00,0x00,0x01,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0x47,0x00,0x04,0x00,0x59,0x01,0x00,0x00,0x0b,0x00,0x00,0x00, +0x19,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x86,0x01,0x00,0x00, +0x06,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x48,0x00,0x04,0x00, +0x87,0x01,0x00,0x00,0x00,0x00,0x00,0x00,0x18,0x00,0x00,0x00, +0x48,0x00,0x05,0x00,0x87,0x01,0x00,0x00,0x00,0x00,0x00,0x00, +0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x03,0x00, +0x87,0x01,0x00,0x00,0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00, +0x89,0x01,0x00,0x00,0x22,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0x47,0x00,0x04,0x00,0x89,0x01,0x00,0x00,0x21,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x74,0x02,0x00,0x00, +0x0b,0x00,0x00,0x00,0x18,0x00,0x00,0x00,0x47,0x00,0x04,0x00, +0xba,0x02,0x00,0x00,0x06,0x00,0x00,0x00,0x04,0x00,0x00,0x00, +0x48,0x00,0x04,0x00,0xbb,0x02,0x00,0x00,0x00,0x00,0x00,0x00, +0x19,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0xbb,0x02,0x00,0x00, +0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0x47,0x00,0x03,0x00,0xbb,0x02,0x00,0x00,0x02,0x00,0x00,0x00, +0x47,0x00,0x04,0x00,0xbd,0x02,0x00,0x00,0x22,0x00,0x00,0x00, +0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0xbd,0x02,0x00,0x00, +0x21,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x13,0x00,0x02,0x00, +0x02,0x00,0x00,0x00,0x21,0x00,0x03,0x00,0x03,0x00,0x00,0x00, +0x02,0x00,0x00,0x00,0x15,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x20,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x17,0x00,0x04,0x00, +0x09,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x03,0x00,0x00,0x00, +0x20,0x00,0x04,0x00,0x0a,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0x09,0x00,0x00,0x00,0x3b,0x00,0x04,0x00,0x0a,0x00,0x00,0x00, +0x0b,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x02,0x00,0x00,0x00, +0x20,0x00,0x04,0x00,0x0d,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0x06,0x00,0x00,0x00,0x1e,0x00,0x10,0x00,0x10,0x00,0x00,0x00, +0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00, +0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00, +0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00, +0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00, +0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x20,0x00,0x04,0x00, +0x11,0x00,0x00,0x00,0x09,0x00,0x00,0x00,0x10,0x00,0x00,0x00, +0x3b,0x00,0x04,0x00,0x11,0x00,0x00,0x00,0x12,0x00,0x00,0x00, +0x09,0x00,0x00,0x00,0x15,0x00,0x04,0x00,0x13,0x00,0x00,0x00, +0x20,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x13,0x00,0x00,0x00,0x14,0x00,0x00,0x00,0x08,0x00,0x00,0x00, +0x20,0x00,0x04,0x00,0x15,0x00,0x00,0x00,0x09,0x00,0x00,0x00, +0x06,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00, +0x21,0x00,0x00,0x00,0x0a,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x13,0x00,0x00,0x00,0x27,0x00,0x00,0x00,0x09,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00,0x2d,0x00,0x00,0x00, +0x07,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00, +0x34,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x32,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x37,0x00,0x00,0x00,0x40,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x39,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0x3b,0x00,0x04,0x00,0x0a,0x00,0x00,0x00, +0x3d,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x3e,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0x3b,0x00,0x04,0x00,0x0a,0x00,0x00,0x00,0x4c,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0x32,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x4f,0x00,0x00,0x00,0x20,0x00,0x00,0x00,0x32,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x53,0x00,0x00,0x00,0x20,0x00,0x00,0x00, +0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x54,0x00,0x00,0x00, +0x86,0x00,0x00,0x00,0x37,0x00,0x00,0x00,0x53,0x00,0x00,0x00, +0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x58,0x00,0x00,0x00, +0x86,0x00,0x00,0x00,0x37,0x00,0x00,0x00,0x53,0x00,0x00,0x00, +0x32,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x60,0x00,0x00,0x00, +0x02,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, +0x61,0x00,0x00,0x00,0x86,0x00,0x00,0x00,0x53,0x00,0x00,0x00, +0x60,0x00,0x00,0x00,0x32,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x62,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x34,0x00,0x06,0x00, +0x06,0x00,0x00,0x00,0x63,0x00,0x00,0x00,0x86,0x00,0x00,0x00, +0x61,0x00,0x00,0x00,0x62,0x00,0x00,0x00,0x34,0x00,0x06,0x00, +0x06,0x00,0x00,0x00,0x67,0x00,0x00,0x00,0x86,0x00,0x00,0x00, +0x61,0x00,0x00,0x00,0x62,0x00,0x00,0x00,0x32,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x6c,0x00,0x00,0x00,0x10,0x00,0x00,0x00, +0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x6d,0x00,0x00,0x00, +0x86,0x00,0x00,0x00,0x6c,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, +0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x72,0x00,0x00,0x00, +0x86,0x00,0x00,0x00,0x6c,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, +0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x77,0x00,0x00,0x00, +0x86,0x00,0x00,0x00,0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00, +0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x7c,0x00,0x00,0x00, +0x86,0x00,0x00,0x00,0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00,0x80,0x00,0x00,0x00, +0x06,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00, +0x85,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x13,0x00,0x00,0x00,0x90,0x00,0x00,0x00,0x0b,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00,0x96,0x00,0x00,0x00, +0x03,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00, +0xa1,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x32,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0xa6,0x00,0x00,0x00,0x40,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00,0xa8,0x00,0x00,0x00, +0x04,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, +0xb7,0x00,0x00,0x00,0x84,0x00,0x00,0x00,0x60,0x00,0x00,0x00, +0x62,0x00,0x00,0x00,0x32,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0xb8,0x00,0x00,0x00,0x20,0x00,0x00,0x00,0x34,0x00,0x06,0x00, +0x06,0x00,0x00,0x00,0xb9,0x00,0x00,0x00,0x84,0x00,0x00,0x00, +0x53,0x00,0x00,0x00,0xb8,0x00,0x00,0x00,0x34,0x00,0x06,0x00, +0x06,0x00,0x00,0x00,0xba,0x00,0x00,0x00,0x84,0x00,0x00,0x00, +0x4f,0x00,0x00,0x00,0x62,0x00,0x00,0x00,0x32,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0xbb,0x00,0x00,0x00,0x02,0x00,0x00,0x00, +0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0xbc,0x00,0x00,0x00, +0x84,0x00,0x00,0x00,0xba,0x00,0x00,0x00,0xbb,0x00,0x00,0x00, +0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0xbd,0x00,0x00,0x00, +0x84,0x00,0x00,0x00,0xbc,0x00,0x00,0x00,0x60,0x00,0x00,0x00, +0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0xbe,0x00,0x00,0x00, +0x86,0x00,0x00,0x00,0xb9,0x00,0x00,0x00,0xbd,0x00,0x00,0x00, +0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0xbf,0x00,0x00,0x00, +0x84,0x00,0x00,0x00,0xb7,0x00,0x00,0x00,0xbe,0x00,0x00,0x00, +0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0xc0,0x00,0x00,0x00, +0x84,0x00,0x00,0x00,0xbf,0x00,0x00,0x00,0xbb,0x00,0x00,0x00, +0x14,0x00,0x02,0x00,0xc1,0x00,0x00,0x00,0x16,0x00,0x03,0x00, +0xc3,0x00,0x00,0x00,0x20,0x00,0x00,0x00,0x34,0x00,0x06,0x00, +0x06,0x00,0x00,0x00,0xc4,0x00,0x00,0x00,0x84,0x00,0x00,0x00, +0x60,0x00,0x00,0x00,0x62,0x00,0x00,0x00,0x34,0x00,0x06,0x00, +0x06,0x00,0x00,0x00,0xc5,0x00,0x00,0x00,0x84,0x00,0x00,0x00, +0xc4,0x00,0x00,0x00,0xbe,0x00,0x00,0x00,0x34,0x00,0x06,0x00, +0x06,0x00,0x00,0x00,0xc6,0x00,0x00,0x00,0x84,0x00,0x00,0x00, +0xc5,0x00,0x00,0x00,0xbb,0x00,0x00,0x00,0x1c,0x00,0x04,0x00, +0xc7,0x00,0x00,0x00,0xc3,0x00,0x00,0x00,0xc6,0x00,0x00,0x00, +0x20,0x00,0x04,0x00,0xc8,0x00,0x00,0x00,0x07,0x00,0x00,0x00, +0xc7,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0xc3,0x00,0x00,0x00, +0xcb,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x20,0x00,0x04,0x00, +0xcc,0x00,0x00,0x00,0x07,0x00,0x00,0x00,0xc3,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00,0xcf,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, +0xf3,0x00,0x00,0x00,0x80,0x00,0x00,0x00,0x6c,0x00,0x00,0x00, +0x39,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0xf9,0x00,0x00,0x00,0x10,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0xfd,0x00,0x00,0x00,0x0f,0x00,0x00,0x00, +0x16,0x00,0x03,0x00,0x00,0x01,0x00,0x00,0x10,0x00,0x00,0x00, +0x15,0x00,0x04,0x00,0x01,0x01,0x00,0x00,0x08,0x00,0x00,0x00, +0x00,0x00,0x00,0x00,0x1c,0x00,0x04,0x00,0x02,0x01,0x00,0x00, +0x01,0x01,0x00,0x00,0xf9,0x00,0x00,0x00,0x1e,0x00,0x06,0x00, +0x03,0x01,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x01,0x00,0x00, +0x06,0x00,0x00,0x00,0x02,0x01,0x00,0x00,0x1d,0x00,0x03,0x00, +0x04,0x01,0x00,0x00,0x03,0x01,0x00,0x00,0x1e,0x00,0x03,0x00, +0x05,0x01,0x00,0x00,0x04,0x01,0x00,0x00,0x20,0x00,0x04,0x00, +0x06,0x01,0x00,0x00,0x0c,0x00,0x00,0x00,0x05,0x01,0x00,0x00, +0x3b,0x00,0x04,0x00,0x06,0x01,0x00,0x00,0x07,0x01,0x00,0x00, +0x0c,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x09,0x01,0x00,0x00, +0x0c,0x00,0x00,0x00,0x00,0x01,0x00,0x00,0x20,0x00,0x04,0x00, +0x14,0x01,0x00,0x00,0x0c,0x00,0x00,0x00,0x06,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x22,0x01,0x00,0x00, +0x0c,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x2b,0x01,0x00,0x00, +0x0c,0x00,0x00,0x00,0x01,0x01,0x00,0x00,0x17,0x00,0x04,0x00, +0x2f,0x01,0x00,0x00,0xc3,0x00,0x00,0x00,0x02,0x00,0x00,0x00, +0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x47,0x01,0x00,0x00, +0x80,0x00,0x00,0x00,0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00, +0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x48,0x01,0x00,0x00, +0x84,0x00,0x00,0x00,0x37,0x00,0x00,0x00,0x47,0x01,0x00,0x00, +0x1c,0x00,0x04,0x00,0x49,0x01,0x00,0x00,0x00,0x01,0x00,0x00, +0x48,0x01,0x00,0x00,0x20,0x00,0x04,0x00,0x4a,0x01,0x00,0x00, +0x04,0x00,0x00,0x00,0x49,0x01,0x00,0x00,0x3b,0x00,0x04,0x00, +0x4a,0x01,0x00,0x00,0x4b,0x01,0x00,0x00,0x04,0x00,0x00,0x00, +0x20,0x00,0x04,0x00,0x50,0x01,0x00,0x00,0x04,0x00,0x00,0x00, +0x00,0x01,0x00,0x00,0x32,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x58,0x01,0x00,0x00,0x01,0x00,0x00,0x00,0x33,0x00,0x06,0x00, +0x09,0x00,0x00,0x00,0x59,0x01,0x00,0x00,0x58,0x01,0x00,0x00, +0x39,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x34,0x00,0x06,0x00, +0x06,0x00,0x00,0x00,0x5a,0x01,0x00,0x00,0x51,0x00,0x00,0x00, +0x59,0x01,0x00,0x00,0x00,0x00,0x00,0x00,0x34,0x00,0x06,0x00, +0x06,0x00,0x00,0x00,0x5b,0x01,0x00,0x00,0x84,0x00,0x00,0x00, +0x5a,0x01,0x00,0x00,0x0c,0x00,0x00,0x00,0x34,0x00,0x06,0x00, +0x06,0x00,0x00,0x00,0x5c,0x01,0x00,0x00,0x86,0x00,0x00,0x00, +0x5b,0x01,0x00,0x00,0x6c,0x00,0x00,0x00,0x34,0x00,0x06,0x00, +0x06,0x00,0x00,0x00,0x7a,0x01,0x00,0x00,0x80,0x00,0x00,0x00, +0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x34,0x00,0x06,0x00, +0x06,0x00,0x00,0x00,0x7b,0x01,0x00,0x00,0x84,0x00,0x00,0x00, +0xa6,0x00,0x00,0x00,0x7a,0x01,0x00,0x00,0x1c,0x00,0x04,0x00, +0x7c,0x01,0x00,0x00,0x00,0x01,0x00,0x00,0x7b,0x01,0x00,0x00, +0x20,0x00,0x04,0x00,0x7d,0x01,0x00,0x00,0x04,0x00,0x00,0x00, +0x7c,0x01,0x00,0x00,0x3b,0x00,0x04,0x00,0x7d,0x01,0x00,0x00, +0x7e,0x01,0x00,0x00,0x04,0x00,0x00,0x00,0x34,0x00,0x06,0x00, +0x06,0x00,0x00,0x00,0x82,0x01,0x00,0x00,0x80,0x00,0x00,0x00, +0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x1d,0x00,0x03,0x00, +0x86,0x01,0x00,0x00,0xc3,0x00,0x00,0x00,0x1e,0x00,0x03,0x00, +0x87,0x01,0x00,0x00,0x86,0x01,0x00,0x00,0x20,0x00,0x04,0x00, +0x88,0x01,0x00,0x00,0x0c,0x00,0x00,0x00,0x87,0x01,0x00,0x00, +0x3b,0x00,0x04,0x00,0x88,0x01,0x00,0x00,0x89,0x01,0x00,0x00, +0x0c,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x94,0x01,0x00,0x00, +0x0c,0x00,0x00,0x00,0xc3,0x00,0x00,0x00,0x34,0x00,0x06,0x00, +0x06,0x00,0x00,0x00,0x9d,0x01,0x00,0x00,0x80,0x00,0x00,0x00, +0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x00,0x01,0x00,0x00,0xa1,0x01,0x00,0x00,0x00,0x00,0x00,0x00, +0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0xa3,0x01,0x00,0x00, +0x51,0x00,0x00,0x00,0x59,0x01,0x00,0x00,0x00,0x00,0x00,0x00, +0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0xa4,0x01,0x00,0x00, +0x84,0x00,0x00,0x00,0xa3,0x01,0x00,0x00,0x39,0x00,0x00,0x00, +0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0xa5,0x01,0x00,0x00, +0x86,0x00,0x00,0x00,0xa4,0x01,0x00,0x00,0x6c,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0xa8,0x01,0x00,0x00, +0x08,0x01,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, +0xa9,0x01,0x00,0x00,0x86,0x00,0x00,0x00,0x6c,0x00,0x00,0x00, +0x0c,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, +0xac,0x01,0x00,0x00,0x86,0x00,0x00,0x00,0x6c,0x00,0x00,0x00, +0x39,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, +0xc7,0x01,0x00,0x00,0x84,0x00,0x00,0x00,0x60,0x00,0x00,0x00, +0x62,0x00,0x00,0x00,0x1c,0x00,0x04,0x00,0xc8,0x01,0x00,0x00, +0x00,0x01,0x00,0x00,0xc7,0x01,0x00,0x00,0x20,0x00,0x04,0x00, +0xc9,0x01,0x00,0x00,0x07,0x00,0x00,0x00,0xc8,0x01,0x00,0x00, +0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0xd9,0x01,0x00,0x00, +0x80,0x00,0x00,0x00,0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00, +0x20,0x00,0x04,0x00,0xdf,0x01,0x00,0x00,0x07,0x00,0x00,0x00, +0x00,0x01,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, +0xf5,0x01,0x00,0x00,0x84,0x00,0x00,0x00,0xbe,0x00,0x00,0x00, +0xbb,0x00,0x00,0x00,0x1c,0x00,0x04,0x00,0xf6,0x01,0x00,0x00, +0x00,0x01,0x00,0x00,0xf5,0x01,0x00,0x00,0x20,0x00,0x04,0x00, +0xf7,0x01,0x00,0x00,0x07,0x00,0x00,0x00,0xf6,0x01,0x00,0x00, +0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x00,0x02,0x00,0x00, +0x86,0x00,0x00,0x00,0xb8,0x00,0x00,0x00,0xbe,0x00,0x00,0x00, +0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x08,0x02,0x00,0x00, +0x80,0x00,0x00,0x00,0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00, +0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x37,0x02,0x00,0x00, +0x84,0x00,0x00,0x00,0x60,0x00,0x00,0x00,0x62,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00,0x6c,0x02,0x00,0x00, +0x0d,0x00,0x00,0x00,0x3b,0x00,0x04,0x00,0x0a,0x00,0x00,0x00, +0x74,0x02,0x00,0x00,0x01,0x00,0x00,0x00,0x1d,0x00,0x03,0x00, +0xba,0x02,0x00,0x00,0xc3,0x00,0x00,0x00,0x1e,0x00,0x03,0x00, +0xbb,0x02,0x00,0x00,0xba,0x02,0x00,0x00,0x20,0x00,0x04,0x00, +0xbc,0x02,0x00,0x00,0x0c,0x00,0x00,0x00,0xbb,0x02,0x00,0x00, +0x3b,0x00,0x04,0x00,0xbc,0x02,0x00,0x00,0xbd,0x02,0x00,0x00, +0x0c,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00, +0xc2,0x02,0x00,0x00,0x05,0x00,0x00,0x00,0x34,0x00,0x06,0x00, +0x06,0x00,0x00,0x00,0xcf,0x02,0x00,0x00,0x84,0x00,0x00,0x00, +0x60,0x00,0x00,0x00,0x62,0x00,0x00,0x00,0x36,0x00,0x05,0x00, +0x02,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0x03,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0x05,0x00,0x00,0x00, +0x3b,0x00,0x04,0x00,0xc8,0x00,0x00,0x00,0xc9,0x00,0x00,0x00, +0x07,0x00,0x00,0x00,0x3b,0x00,0x04,0x00,0xc9,0x01,0x00,0x00, +0xca,0x01,0x00,0x00,0x07,0x00,0x00,0x00,0x3b,0x00,0x04,0x00, +0xf7,0x01,0x00,0x00,0xf8,0x01,0x00,0x00,0x07,0x00,0x00,0x00, +0x41,0x00,0x05,0x00,0x0d,0x00,0x00,0x00,0x0e,0x00,0x00,0x00, +0x0b,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x0f,0x00,0x00,0x00,0x0e,0x00,0x00,0x00, +0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00,0x16,0x00,0x00,0x00, +0x12,0x00,0x00,0x00,0x14,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x17,0x00,0x00,0x00,0x16,0x00,0x00,0x00, +0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x18,0x00,0x00,0x00, +0x0f,0x00,0x00,0x00,0x17,0x00,0x00,0x00,0x89,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x1e,0x00,0x00,0x00,0x0f,0x00,0x00,0x00, +0x17,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00, +0x22,0x00,0x00,0x00,0x12,0x00,0x00,0x00,0x21,0x00,0x00,0x00, +0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x23,0x00,0x00,0x00, +0x22,0x00,0x00,0x00,0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x24,0x00,0x00,0x00,0x18,0x00,0x00,0x00,0x23,0x00,0x00,0x00, +0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00,0x28,0x00,0x00,0x00, +0x12,0x00,0x00,0x00,0x27,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x29,0x00,0x00,0x00,0x28,0x00,0x00,0x00, +0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x2a,0x00,0x00,0x00, +0x1e,0x00,0x00,0x00,0x29,0x00,0x00,0x00,0x41,0x00,0x05,0x00, +0x15,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0x12,0x00,0x00,0x00, +0x2d,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x2f,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x30,0x00,0x00,0x00,0x24,0x00,0x00,0x00, +0x2f,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x32,0x00,0x00,0x00,0x30,0x00,0x00,0x00,0x2a,0x00,0x00,0x00, +0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00,0x35,0x00,0x00,0x00, +0x12,0x00,0x00,0x00,0x34,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x36,0x00,0x00,0x00,0x35,0x00,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x38,0x00,0x00,0x00, +0x36,0x00,0x00,0x00,0x37,0x00,0x00,0x00,0x82,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x3a,0x00,0x00,0x00,0x38,0x00,0x00,0x00, +0x39,0x00,0x00,0x00,0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x3b,0x00,0x00,0x00,0x3a,0x00,0x00,0x00,0x37,0x00,0x00,0x00, +0x41,0x00,0x05,0x00,0x0d,0x00,0x00,0x00,0x3f,0x00,0x00,0x00, +0x3d,0x00,0x00,0x00,0x3e,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x40,0x00,0x00,0x00,0x3f,0x00,0x00,0x00, +0x89,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x42,0x00,0x00,0x00, +0x40,0x00,0x00,0x00,0x3b,0x00,0x00,0x00,0x86,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x47,0x00,0x00,0x00,0x40,0x00,0x00,0x00, +0x3b,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x0d,0x00,0x00,0x00, +0x49,0x00,0x00,0x00,0x3d,0x00,0x00,0x00,0x39,0x00,0x00,0x00, +0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x4a,0x00,0x00,0x00, +0x49,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x0d,0x00,0x00,0x00, +0x4d,0x00,0x00,0x00,0x4c,0x00,0x00,0x00,0x3e,0x00,0x00,0x00, +0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x4e,0x00,0x00,0x00, +0x4d,0x00,0x00,0x00,0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x50,0x00,0x00,0x00,0x4e,0x00,0x00,0x00,0x4f,0x00,0x00,0x00, +0x89,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x55,0x00,0x00,0x00, +0x50,0x00,0x00,0x00,0x54,0x00,0x00,0x00,0x86,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x59,0x00,0x00,0x00,0x50,0x00,0x00,0x00, +0x58,0x00,0x00,0x00,0x89,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x5d,0x00,0x00,0x00,0x4e,0x00,0x00,0x00,0x4f,0x00,0x00,0x00, +0x89,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x64,0x00,0x00,0x00, +0x5d,0x00,0x00,0x00,0x63,0x00,0x00,0x00,0x86,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x68,0x00,0x00,0x00,0x5d,0x00,0x00,0x00, +0x67,0x00,0x00,0x00,0x89,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x6e,0x00,0x00,0x00,0x4e,0x00,0x00,0x00,0x6d,0x00,0x00,0x00, +0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x73,0x00,0x00,0x00, +0x4e,0x00,0x00,0x00,0x72,0x00,0x00,0x00,0x89,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x78,0x00,0x00,0x00,0x4e,0x00,0x00,0x00, +0x77,0x00,0x00,0x00,0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x7d,0x00,0x00,0x00,0x4e,0x00,0x00,0x00,0x7c,0x00,0x00,0x00, +0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00,0x81,0x00,0x00,0x00, +0x12,0x00,0x00,0x00,0x80,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x82,0x00,0x00,0x00,0x81,0x00,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x83,0x00,0x00,0x00, +0x47,0x00,0x00,0x00,0x82,0x00,0x00,0x00,0x41,0x00,0x05,0x00, +0x15,0x00,0x00,0x00,0x86,0x00,0x00,0x00,0x12,0x00,0x00,0x00, +0x85,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x87,0x00,0x00,0x00,0x86,0x00,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x89,0x00,0x00,0x00,0x47,0x00,0x00,0x00, +0x39,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x8c,0x00,0x00,0x00,0x89,0x00,0x00,0x00,0x82,0x00,0x00,0x00, +0x0c,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0x8d,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0x26,0x00,0x00,0x00,0x87,0x00,0x00,0x00, +0x8c,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00, +0x91,0x00,0x00,0x00,0x12,0x00,0x00,0x00,0x90,0x00,0x00,0x00, +0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x92,0x00,0x00,0x00, +0x91,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x93,0x00,0x00,0x00,0x32,0x00,0x00,0x00,0x92,0x00,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x95,0x00,0x00,0x00, +0x42,0x00,0x00,0x00,0x37,0x00,0x00,0x00,0x41,0x00,0x05,0x00, +0x15,0x00,0x00,0x00,0x97,0x00,0x00,0x00,0x12,0x00,0x00,0x00, +0x96,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x98,0x00,0x00,0x00,0x97,0x00,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x99,0x00,0x00,0x00,0x95,0x00,0x00,0x00, +0x98,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x9a,0x00,0x00,0x00,0x93,0x00,0x00,0x00,0x99,0x00,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x9c,0x00,0x00,0x00, +0x9a,0x00,0x00,0x00,0x83,0x00,0x00,0x00,0x86,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x9d,0x00,0x00,0x00,0x9c,0x00,0x00,0x00, +0x0c,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00, +0xa2,0x00,0x00,0x00,0x12,0x00,0x00,0x00,0xa1,0x00,0x00,0x00, +0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0xa3,0x00,0x00,0x00, +0xa2,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xa4,0x00,0x00,0x00,0x0f,0x00,0x00,0x00,0xa3,0x00,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xa7,0x00,0x00,0x00, +0x4a,0x00,0x00,0x00,0xa6,0x00,0x00,0x00,0x41,0x00,0x05,0x00, +0x15,0x00,0x00,0x00,0xa9,0x00,0x00,0x00,0x12,0x00,0x00,0x00, +0xa8,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0xaa,0x00,0x00,0x00,0xa9,0x00,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xab,0x00,0x00,0x00,0xa7,0x00,0x00,0x00, +0xaa,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xac,0x00,0x00,0x00,0xa4,0x00,0x00,0x00,0xab,0x00,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xae,0x00,0x00,0x00, +0xac,0x00,0x00,0x00,0x83,0x00,0x00,0x00,0x86,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xaf,0x00,0x00,0x00,0xae,0x00,0x00,0x00, +0x39,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0xb1,0x00,0x00,0x00, +0xf8,0x00,0x02,0x00,0xb1,0x00,0x00,0x00,0xf5,0x00,0x07,0x00, +0x06,0x00,0x00,0x00,0xe5,0x02,0x00,0x00,0x3e,0x00,0x00,0x00, +0x05,0x00,0x00,0x00,0xd0,0x00,0x00,0x00,0xb2,0x00,0x00,0x00, +0xb0,0x00,0x05,0x00,0xc1,0x00,0x00,0x00,0xc2,0x00,0x00,0x00, +0xe5,0x02,0x00,0x00,0xc0,0x00,0x00,0x00,0xf6,0x00,0x04,0x00, +0xb3,0x00,0x00,0x00,0xb2,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0xfa,0x00,0x04,0x00,0xc2,0x00,0x00,0x00,0xb2,0x00,0x00,0x00, +0xb3,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0xb2,0x00,0x00,0x00, +0x41,0x00,0x05,0x00,0xcc,0x00,0x00,0x00,0xcd,0x00,0x00,0x00, +0xc9,0x00,0x00,0x00,0xe5,0x02,0x00,0x00,0x3e,0x00,0x03,0x00, +0xcd,0x00,0x00,0x00,0xcb,0x00,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xd0,0x00,0x00,0x00,0xe5,0x02,0x00,0x00, +0xcf,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0xb1,0x00,0x00,0x00, +0xf8,0x00,0x02,0x00,0xb3,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, +0xd3,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0xd3,0x00,0x00,0x00, +0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0xfe,0x02,0x00,0x00, +0xaf,0x00,0x00,0x00,0xb3,0x00,0x00,0x00,0xae,0x01,0x00,0x00, +0xd6,0x00,0x00,0x00,0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00, +0xfa,0x02,0x00,0x00,0x9d,0x00,0x00,0x00,0xb3,0x00,0x00,0x00, +0xab,0x01,0x00,0x00,0xd6,0x00,0x00,0x00,0xf5,0x00,0x07,0x00, +0x06,0x00,0x00,0x00,0xe6,0x02,0x00,0x00,0x83,0x00,0x00,0x00, +0xb3,0x00,0x00,0x00,0x5c,0x02,0x00,0x00,0xd6,0x00,0x00,0x00, +0xb0,0x00,0x05,0x00,0xc1,0x00,0x00,0x00,0xda,0x00,0x00,0x00, +0xe6,0x02,0x00,0x00,0x8d,0x00,0x00,0x00,0xf6,0x00,0x04,0x00, +0xd5,0x00,0x00,0x00,0xd6,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0xfa,0x00,0x04,0x00,0xda,0x00,0x00,0x00,0xd4,0x00,0x00,0x00, +0xd5,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0xd4,0x00,0x00,0x00, +0xf9,0x00,0x02,0x00,0xdc,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, +0xdc,0x00,0x00,0x00,0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00, +0xf6,0x02,0x00,0x00,0x3e,0x00,0x00,0x00,0xd4,0x00,0x00,0x00, +0x5e,0x01,0x00,0x00,0xdd,0x00,0x00,0x00,0xb0,0x00,0x05,0x00, +0xc1,0x00,0x00,0x00,0xe2,0x00,0x00,0x00,0xf6,0x02,0x00,0x00, +0x37,0x00,0x00,0x00,0xf6,0x00,0x04,0x00,0xde,0x00,0x00,0x00, +0xdd,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, +0xe2,0x00,0x00,0x00,0xdd,0x00,0x00,0x00,0xde,0x00,0x00,0x00, +0xf8,0x00,0x02,0x00,0xdd,0x00,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xe7,0x00,0x00,0x00,0x73,0x00,0x00,0x00, +0xf6,0x02,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xea,0x00,0x00,0x00,0xe7,0x00,0x00,0x00,0x98,0x00,0x00,0x00, +0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xeb,0x00,0x00,0x00, +0xea,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xec,0x00,0x00,0x00,0xfa,0x02,0x00,0x00, +0xeb,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xee,0x00,0x00,0x00,0xec,0x00,0x00,0x00,0x6e,0x00,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xf4,0x00,0x00,0x00, +0xe7,0x00,0x00,0x00,0xf3,0x00,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xf6,0x00,0x00,0x00,0xf4,0x00,0x00,0x00, +0x6e,0x00,0x00,0x00,0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xfa,0x00,0x00,0x00,0xee,0x00,0x00,0x00,0xf9,0x00,0x00,0x00, +0xc7,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xfe,0x00,0x00,0x00, +0xee,0x00,0x00,0x00,0xfd,0x00,0x00,0x00,0x41,0x00,0x07,0x00, +0x09,0x01,0x00,0x00,0x0a,0x01,0x00,0x00,0x07,0x01,0x00,0x00, +0x34,0x00,0x00,0x00,0xfa,0x00,0x00,0x00,0x34,0x00,0x00,0x00, +0x3d,0x00,0x04,0x00,0x00,0x01,0x00,0x00,0x0b,0x01,0x00,0x00, +0x0a,0x01,0x00,0x00,0x73,0x00,0x04,0x00,0xc3,0x00,0x00,0x00, +0x0c,0x01,0x00,0x00,0x0b,0x01,0x00,0x00,0x41,0x00,0x07,0x00, +0x09,0x01,0x00,0x00,0x0f,0x01,0x00,0x00,0x07,0x01,0x00,0x00, +0x34,0x00,0x00,0x00,0xfa,0x00,0x00,0x00,0xcf,0x00,0x00,0x00, +0x3d,0x00,0x04,0x00,0x00,0x01,0x00,0x00,0x10,0x01,0x00,0x00, +0x0f,0x01,0x00,0x00,0x73,0x00,0x04,0x00,0xc3,0x00,0x00,0x00, +0x11,0x01,0x00,0x00,0x10,0x01,0x00,0x00,0x41,0x00,0x07,0x00, +0x14,0x01,0x00,0x00,0x15,0x01,0x00,0x00,0x07,0x01,0x00,0x00, +0x34,0x00,0x00,0x00,0xfa,0x00,0x00,0x00,0x85,0x00,0x00,0x00, +0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x16,0x01,0x00,0x00, +0x15,0x01,0x00,0x00,0xc2,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x1c,0x01,0x00,0x00,0x16,0x01,0x00,0x00,0xfe,0x00,0x00,0x00, +0xc4,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x1d,0x01,0x00,0x00, +0x1c,0x01,0x00,0x00,0xa8,0x00,0x00,0x00,0xc7,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x1e,0x01,0x00,0x00,0x1d,0x01,0x00,0x00, +0xf9,0x00,0x00,0x00,0x7c,0x00,0x04,0x00,0x13,0x00,0x00,0x00, +0x1f,0x01,0x00,0x00,0x1e,0x01,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x23,0x01,0x00,0x00,0xfe,0x00,0x00,0x00, +0x22,0x01,0x00,0x00,0xc2,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x24,0x01,0x00,0x00,0x16,0x01,0x00,0x00,0x23,0x01,0x00,0x00, +0xc7,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x25,0x01,0x00,0x00, +0x24,0x01,0x00,0x00,0xf9,0x00,0x00,0x00,0x7c,0x00,0x04,0x00, +0x13,0x00,0x00,0x00,0x26,0x01,0x00,0x00,0x25,0x01,0x00,0x00, +0x41,0x00,0x08,0x00,0x2b,0x01,0x00,0x00,0x2c,0x01,0x00,0x00, +0x07,0x01,0x00,0x00,0x34,0x00,0x00,0x00,0xfa,0x00,0x00,0x00, +0x96,0x00,0x00,0x00,0xfe,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0x01,0x01,0x00,0x00,0x2d,0x01,0x00,0x00,0x2c,0x01,0x00,0x00, +0x71,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x2e,0x01,0x00,0x00, +0x2d,0x01,0x00,0x00,0xc7,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x33,0x01,0x00,0x00,0x2e,0x01,0x00,0x00,0xfd,0x00,0x00,0x00, +0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x37,0x01,0x00,0x00, +0x1f,0x01,0x00,0x00,0xc5,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x38,0x01,0x00,0x00,0x33,0x01,0x00,0x00,0x37,0x01,0x00,0x00, +0x70,0x00,0x04,0x00,0xc3,0x00,0x00,0x00,0x39,0x01,0x00,0x00, +0x38,0x01,0x00,0x00,0xc2,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x3b,0x01,0x00,0x00,0x2e,0x01,0x00,0x00,0xa8,0x00,0x00,0x00, +0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x3e,0x01,0x00,0x00, +0x26,0x01,0x00,0x00,0xc5,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x3f,0x01,0x00,0x00,0x3b,0x01,0x00,0x00,0x3e,0x01,0x00,0x00, +0x70,0x00,0x04,0x00,0xc3,0x00,0x00,0x00,0x40,0x01,0x00,0x00, +0x3f,0x01,0x00,0x00,0x50,0x00,0x05,0x00,0x2f,0x01,0x00,0x00, +0x41,0x01,0x00,0x00,0x39,0x01,0x00,0x00,0x40,0x01,0x00,0x00, +0x8e,0x00,0x05,0x00,0x2f,0x01,0x00,0x00,0x43,0x01,0x00,0x00, +0x41,0x01,0x00,0x00,0x0c,0x01,0x00,0x00,0x50,0x00,0x05,0x00, +0x2f,0x01,0x00,0x00,0x45,0x01,0x00,0x00,0x11,0x01,0x00,0x00, +0x11,0x01,0x00,0x00,0x81,0x00,0x05,0x00,0x2f,0x01,0x00,0x00, +0x46,0x01,0x00,0x00,0x43,0x01,0x00,0x00,0x45,0x01,0x00,0x00, +0x51,0x00,0x05,0x00,0xc3,0x00,0x00,0x00,0x4e,0x01,0x00,0x00, +0x46,0x01,0x00,0x00,0x00,0x00,0x00,0x00,0x73,0x00,0x04,0x00, +0x00,0x01,0x00,0x00,0x4f,0x01,0x00,0x00,0x4e,0x01,0x00,0x00, +0x41,0x00,0x05,0x00,0x50,0x01,0x00,0x00,0x51,0x01,0x00,0x00, +0x4b,0x01,0x00,0x00,0xf6,0x00,0x00,0x00,0x3e,0x00,0x03,0x00, +0x51,0x01,0x00,0x00,0x4f,0x01,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x53,0x01,0x00,0x00,0xf6,0x00,0x00,0x00, +0xf9,0x00,0x00,0x00,0x51,0x00,0x05,0x00,0xc3,0x00,0x00,0x00, +0x55,0x01,0x00,0x00,0x46,0x01,0x00,0x00,0x01,0x00,0x00,0x00, +0x73,0x00,0x04,0x00,0x00,0x01,0x00,0x00,0x56,0x01,0x00,0x00, +0x55,0x01,0x00,0x00,0x41,0x00,0x05,0x00,0x50,0x01,0x00,0x00, +0x57,0x01,0x00,0x00,0x4b,0x01,0x00,0x00,0x53,0x01,0x00,0x00, +0x3e,0x00,0x03,0x00,0x57,0x01,0x00,0x00,0x56,0x01,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x5e,0x01,0x00,0x00, +0xf6,0x02,0x00,0x00,0x5c,0x01,0x00,0x00,0xf9,0x00,0x02,0x00, +0xdc,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0xde,0x00,0x00,0x00, +0xf9,0x00,0x02,0x00,0x60,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, +0x60,0x01,0x00,0x00,0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00, +0xf7,0x02,0x00,0x00,0x3e,0x00,0x00,0x00,0xde,0x00,0x00,0x00, +0xa7,0x01,0x00,0x00,0x63,0x01,0x00,0x00,0xb0,0x00,0x05,0x00, +0xc1,0x00,0x00,0x00,0x66,0x01,0x00,0x00,0xf7,0x02,0x00,0x00, +0xa6,0x00,0x00,0x00,0xf6,0x00,0x04,0x00,0x62,0x01,0x00,0x00, +0x63,0x01,0x00,0x00,0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, +0x66,0x01,0x00,0x00,0x61,0x01,0x00,0x00,0x62,0x01,0x00,0x00, +0xf8,0x00,0x02,0x00,0x61,0x01,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x6a,0x01,0x00,0x00,0xa7,0x00,0x00,0x00, +0x7d,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x6c,0x01,0x00,0x00,0x6a,0x01,0x00,0x00,0xf7,0x02,0x00,0x00, +0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00,0x6d,0x01,0x00,0x00, +0x12,0x00,0x00,0x00,0xcf,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x6e,0x01,0x00,0x00,0x6d,0x01,0x00,0x00, +0xb0,0x00,0x05,0x00,0xc1,0x00,0x00,0x00,0x6f,0x01,0x00,0x00, +0x6c,0x01,0x00,0x00,0x6e,0x01,0x00,0x00,0xf7,0x00,0x03,0x00, +0x71,0x01,0x00,0x00,0x00,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, +0x6f,0x01,0x00,0x00,0x70,0x01,0x00,0x00,0x71,0x01,0x00,0x00, +0xf8,0x00,0x02,0x00,0x70,0x01,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x74,0x01,0x00,0x00,0xe6,0x02,0x00,0x00, +0x78,0x00,0x00,0x00,0xb0,0x00,0x05,0x00,0xc1,0x00,0x00,0x00, +0x76,0x01,0x00,0x00,0x74,0x01,0x00,0x00,0x8d,0x00,0x00,0x00, +0xf9,0x00,0x02,0x00,0x71,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, +0x71,0x01,0x00,0x00,0xf5,0x00,0x07,0x00,0xc1,0x00,0x00,0x00, +0x77,0x01,0x00,0x00,0x6f,0x01,0x00,0x00,0x61,0x01,0x00,0x00, +0x76,0x01,0x00,0x00,0x70,0x01,0x00,0x00,0xf7,0x00,0x03,0x00, +0x79,0x01,0x00,0x00,0x00,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, +0x77,0x01,0x00,0x00,0x78,0x01,0x00,0x00,0x99,0x01,0x00,0x00, +0xf8,0x00,0x02,0x00,0x78,0x01,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x81,0x01,0x00,0x00,0x7d,0x00,0x00,0x00, +0xf7,0x02,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x83,0x01,0x00,0x00,0x81,0x01,0x00,0x00,0x82,0x01,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x85,0x01,0x00,0x00, +0x83,0x01,0x00,0x00,0x78,0x00,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x90,0x01,0x00,0x00,0x81,0x01,0x00,0x00, +0xaa,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x91,0x01,0x00,0x00,0xfe,0x02,0x00,0x00,0x90,0x01,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x93,0x01,0x00,0x00, +0x91,0x01,0x00,0x00,0x78,0x00,0x00,0x00,0x41,0x00,0x06,0x00, +0x94,0x01,0x00,0x00,0x95,0x01,0x00,0x00,0x89,0x01,0x00,0x00, +0x34,0x00,0x00,0x00,0x93,0x01,0x00,0x00,0x3d,0x00,0x04,0x00, +0xc3,0x00,0x00,0x00,0x96,0x01,0x00,0x00,0x95,0x01,0x00,0x00, +0x73,0x00,0x04,0x00,0x00,0x01,0x00,0x00,0x97,0x01,0x00,0x00, +0x96,0x01,0x00,0x00,0x41,0x00,0x05,0x00,0x50,0x01,0x00,0x00, +0x98,0x01,0x00,0x00,0x7e,0x01,0x00,0x00,0x85,0x01,0x00,0x00, +0x3e,0x00,0x03,0x00,0x98,0x01,0x00,0x00,0x97,0x01,0x00,0x00, +0xf9,0x00,0x02,0x00,0x79,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, +0x99,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x9c,0x01,0x00,0x00,0x7d,0x00,0x00,0x00,0xf7,0x02,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x9e,0x01,0x00,0x00, +0x9c,0x01,0x00,0x00,0x9d,0x01,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xa0,0x01,0x00,0x00,0x9e,0x01,0x00,0x00, +0x78,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x50,0x01,0x00,0x00, +0xa2,0x01,0x00,0x00,0x7e,0x01,0x00,0x00,0xa0,0x01,0x00,0x00, +0x3e,0x00,0x03,0x00,0xa2,0x01,0x00,0x00,0xa1,0x01,0x00,0x00, +0xf9,0x00,0x02,0x00,0x79,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, +0x79,0x01,0x00,0x00,0xf9,0x00,0x02,0x00,0x63,0x01,0x00,0x00, +0xf8,0x00,0x02,0x00,0x63,0x01,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xa7,0x01,0x00,0x00,0xf7,0x02,0x00,0x00, +0xa5,0x01,0x00,0x00,0xf9,0x00,0x02,0x00,0x60,0x01,0x00,0x00, +0xf8,0x00,0x02,0x00,0x62,0x01,0x00,0x00,0xe0,0x00,0x04,0x00, +0x0c,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0xa8,0x01,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xab,0x01,0x00,0x00, +0xfa,0x02,0x00,0x00,0xa9,0x01,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xae,0x01,0x00,0x00,0xfe,0x02,0x00,0x00, +0xac,0x01,0x00,0x00,0xf9,0x00,0x02,0x00,0xb0,0x01,0x00,0x00, +0xf8,0x00,0x02,0x00,0xb0,0x01,0x00,0x00,0xf5,0x00,0x07,0x00, +0x06,0x00,0x00,0x00,0x00,0x03,0x00,0x00,0x3e,0x00,0x00,0x00, +0x62,0x01,0x00,0x00,0x5a,0x02,0x00,0x00,0xb3,0x01,0x00,0x00, +0xb0,0x00,0x05,0x00,0xc1,0x00,0x00,0x00,0xb6,0x01,0x00,0x00, +0x00,0x03,0x00,0x00,0x6c,0x00,0x00,0x00,0xf6,0x00,0x04,0x00, +0xb2,0x01,0x00,0x00,0xb3,0x01,0x00,0x00,0x00,0x00,0x00,0x00, +0xfa,0x00,0x04,0x00,0xb6,0x01,0x00,0x00,0xb1,0x01,0x00,0x00, +0xb2,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0xb1,0x01,0x00,0x00, +0xf9,0x00,0x02,0x00,0xb8,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, +0xb8,0x01,0x00,0x00,0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00, +0x04,0x03,0x00,0x00,0x3e,0x00,0x00,0x00,0xb1,0x01,0x00,0x00, +0xe4,0x01,0x00,0x00,0xbb,0x01,0x00,0x00,0xb0,0x00,0x05,0x00, +0xc1,0x00,0x00,0x00,0xbe,0x01,0x00,0x00,0x04,0x03,0x00,0x00, +0x60,0x00,0x00,0x00,0xf6,0x00,0x04,0x00,0xba,0x01,0x00,0x00, +0xbb,0x01,0x00,0x00,0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, +0xbe,0x01,0x00,0x00,0xb9,0x01,0x00,0x00,0xba,0x01,0x00,0x00, +0xf8,0x00,0x02,0x00,0xb9,0x01,0x00,0x00,0xf9,0x00,0x02,0x00, +0xc0,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0xc0,0x01,0x00,0x00, +0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0x16,0x03,0x00,0x00, +0x3e,0x00,0x00,0x00,0xb9,0x01,0x00,0x00,0xe2,0x01,0x00,0x00, +0xc1,0x01,0x00,0x00,0xb0,0x00,0x05,0x00,0xc1,0x00,0x00,0x00, +0xc6,0x01,0x00,0x00,0x16,0x03,0x00,0x00,0x62,0x00,0x00,0x00, +0xf6,0x00,0x04,0x00,0xc2,0x01,0x00,0x00,0xc1,0x01,0x00,0x00, +0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0xc6,0x01,0x00,0x00, +0xc1,0x01,0x00,0x00,0xc2,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, +0xc1,0x01,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xcc,0x01,0x00,0x00,0x04,0x03,0x00,0x00,0x62,0x00,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xce,0x01,0x00,0x00, +0xcc,0x01,0x00,0x00,0x16,0x03,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xd0,0x01,0x00,0x00,0x55,0x00,0x00,0x00, +0x53,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xd2,0x01,0x00,0x00,0x04,0x03,0x00,0x00,0x61,0x00,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xd3,0x01,0x00,0x00, +0xd0,0x01,0x00,0x00,0xd2,0x01,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xd5,0x01,0x00,0x00,0x64,0x00,0x00,0x00, +0x62,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xd6,0x01,0x00,0x00,0xd3,0x01,0x00,0x00,0xd5,0x01,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xd8,0x01,0x00,0x00, +0xd6,0x01,0x00,0x00,0x16,0x03,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xda,0x01,0x00,0x00,0xd8,0x01,0x00,0x00, +0xd9,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xdc,0x01,0x00,0x00,0xda,0x01,0x00,0x00,0x00,0x03,0x00,0x00, +0x41,0x00,0x05,0x00,0x50,0x01,0x00,0x00,0xdd,0x01,0x00,0x00, +0x4b,0x01,0x00,0x00,0xdc,0x01,0x00,0x00,0x3d,0x00,0x04,0x00, +0x00,0x01,0x00,0x00,0xde,0x01,0x00,0x00,0xdd,0x01,0x00,0x00, +0x41,0x00,0x05,0x00,0xdf,0x01,0x00,0x00,0xe0,0x01,0x00,0x00, +0xca,0x01,0x00,0x00,0xce,0x01,0x00,0x00,0x3e,0x00,0x03,0x00, +0xe0,0x01,0x00,0x00,0xde,0x01,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xe2,0x01,0x00,0x00,0x16,0x03,0x00,0x00, +0xcf,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0xc0,0x01,0x00,0x00, +0xf8,0x00,0x02,0x00,0xc2,0x01,0x00,0x00,0xf9,0x00,0x02,0x00, +0xbb,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0xbb,0x01,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xe4,0x01,0x00,0x00, +0x04,0x03,0x00,0x00,0xcf,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, +0xb8,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0xba,0x01,0x00,0x00, +0xf9,0x00,0x02,0x00,0xe6,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, +0xe6,0x01,0x00,0x00,0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00, +0x05,0x03,0x00,0x00,0x3e,0x00,0x00,0x00,0xba,0x01,0x00,0x00, +0x12,0x02,0x00,0x00,0xe9,0x01,0x00,0x00,0xb0,0x00,0x05,0x00, +0xc1,0x00,0x00,0x00,0xec,0x01,0x00,0x00,0x05,0x03,0x00,0x00, +0xbe,0x00,0x00,0x00,0xf6,0x00,0x04,0x00,0xe8,0x01,0x00,0x00, +0xe9,0x01,0x00,0x00,0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, +0xec,0x01,0x00,0x00,0xe7,0x01,0x00,0x00,0xe8,0x01,0x00,0x00, +0xf8,0x00,0x02,0x00,0xe7,0x01,0x00,0x00,0xf9,0x00,0x02,0x00, +0xee,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0xee,0x01,0x00,0x00, +0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0x13,0x03,0x00,0x00, +0x3e,0x00,0x00,0x00,0xe7,0x01,0x00,0x00,0x10,0x02,0x00,0x00, +0xef,0x01,0x00,0x00,0xb0,0x00,0x05,0x00,0xc1,0x00,0x00,0x00, +0xf4,0x01,0x00,0x00,0x13,0x03,0x00,0x00,0xbb,0x00,0x00,0x00, +0xf6,0x00,0x04,0x00,0xf0,0x01,0x00,0x00,0xef,0x01,0x00,0x00, +0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0xf4,0x01,0x00,0x00, +0xef,0x01,0x00,0x00,0xf0,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, +0xef,0x01,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xfa,0x01,0x00,0x00,0x05,0x03,0x00,0x00,0xbb,0x00,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xfc,0x01,0x00,0x00, +0xfa,0x01,0x00,0x00,0x13,0x03,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xfe,0x01,0x00,0x00,0x59,0x00,0x00,0x00, +0xb8,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x01,0x02,0x00,0x00,0x05,0x03,0x00,0x00,0x00,0x02,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x02,0x02,0x00,0x00, +0xfe,0x01,0x00,0x00,0x01,0x02,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x04,0x02,0x00,0x00,0x68,0x00,0x00,0x00, +0xbb,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x05,0x02,0x00,0x00,0x02,0x02,0x00,0x00,0x04,0x02,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x07,0x02,0x00,0x00, +0x05,0x02,0x00,0x00,0x13,0x03,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x09,0x02,0x00,0x00,0x07,0x02,0x00,0x00, +0x08,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x0b,0x02,0x00,0x00,0x09,0x02,0x00,0x00,0x00,0x03,0x00,0x00, +0x41,0x00,0x05,0x00,0x50,0x01,0x00,0x00,0x0c,0x02,0x00,0x00, +0x7e,0x01,0x00,0x00,0x0b,0x02,0x00,0x00,0x3d,0x00,0x04,0x00, +0x00,0x01,0x00,0x00,0x0d,0x02,0x00,0x00,0x0c,0x02,0x00,0x00, +0x41,0x00,0x05,0x00,0xdf,0x01,0x00,0x00,0x0e,0x02,0x00,0x00, +0xf8,0x01,0x00,0x00,0xfc,0x01,0x00,0x00,0x3e,0x00,0x03,0x00, +0x0e,0x02,0x00,0x00,0x0d,0x02,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x10,0x02,0x00,0x00,0x13,0x03,0x00,0x00, +0xcf,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0xee,0x01,0x00,0x00, +0xf8,0x00,0x02,0x00,0xf0,0x01,0x00,0x00,0xf9,0x00,0x02,0x00, +0xe9,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0xe9,0x01,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x12,0x02,0x00,0x00, +0x05,0x03,0x00,0x00,0xcf,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, +0xe6,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0xe8,0x01,0x00,0x00, +0xf9,0x00,0x02,0x00,0x14,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, +0x14,0x02,0x00,0x00,0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00, +0x06,0x03,0x00,0x00,0x3e,0x00,0x00,0x00,0xe8,0x01,0x00,0x00, +0x58,0x02,0x00,0x00,0x17,0x02,0x00,0x00,0xb0,0x00,0x05,0x00, +0xc1,0x00,0x00,0x00,0x1a,0x02,0x00,0x00,0x06,0x03,0x00,0x00, +0xbe,0x00,0x00,0x00,0xf6,0x00,0x04,0x00,0x16,0x02,0x00,0x00, +0x17,0x02,0x00,0x00,0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, +0x1a,0x02,0x00,0x00,0x15,0x02,0x00,0x00,0x16,0x02,0x00,0x00, +0xf8,0x00,0x02,0x00,0x15,0x02,0x00,0x00,0xf9,0x00,0x02,0x00, +0x1c,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x1c,0x02,0x00,0x00, +0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0x0a,0x03,0x00,0x00, +0x3e,0x00,0x00,0x00,0x15,0x02,0x00,0x00,0x56,0x02,0x00,0x00, +0x1f,0x02,0x00,0x00,0xb0,0x00,0x05,0x00,0xc1,0x00,0x00,0x00, +0x22,0x02,0x00,0x00,0x0a,0x03,0x00,0x00,0x60,0x00,0x00,0x00, +0xf6,0x00,0x04,0x00,0x1e,0x02,0x00,0x00,0x1f,0x02,0x00,0x00, +0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0x22,0x02,0x00,0x00, +0x1d,0x02,0x00,0x00,0x1e,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, +0x1d,0x02,0x00,0x00,0xf9,0x00,0x02,0x00,0x24,0x02,0x00,0x00, +0xf8,0x00,0x02,0x00,0x24,0x02,0x00,0x00,0xf5,0x00,0x07,0x00, +0x06,0x00,0x00,0x00,0x0c,0x03,0x00,0x00,0x3e,0x00,0x00,0x00, +0x1d,0x02,0x00,0x00,0x54,0x02,0x00,0x00,0x27,0x02,0x00,0x00, +0xb0,0x00,0x05,0x00,0xc1,0x00,0x00,0x00,0x2a,0x02,0x00,0x00, +0x0c,0x03,0x00,0x00,0xbb,0x00,0x00,0x00,0xf6,0x00,0x04,0x00, +0x26,0x02,0x00,0x00,0x27,0x02,0x00,0x00,0x01,0x00,0x00,0x00, +0xfa,0x00,0x04,0x00,0x2a,0x02,0x00,0x00,0x25,0x02,0x00,0x00, +0x26,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x25,0x02,0x00,0x00, +0xf9,0x00,0x02,0x00,0x2c,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, +0x2c,0x02,0x00,0x00,0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00, +0x0e,0x03,0x00,0x00,0x3e,0x00,0x00,0x00,0x25,0x02,0x00,0x00, +0x52,0x02,0x00,0x00,0x2d,0x02,0x00,0x00,0xb0,0x00,0x05,0x00, +0xc1,0x00,0x00,0x00,0x32,0x02,0x00,0x00,0x0e,0x03,0x00,0x00, +0x62,0x00,0x00,0x00,0xf6,0x00,0x04,0x00,0x2e,0x02,0x00,0x00, +0x2d,0x02,0x00,0x00,0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, +0x32,0x02,0x00,0x00,0x2d,0x02,0x00,0x00,0x2e,0x02,0x00,0x00, +0xf8,0x00,0x02,0x00,0x2d,0x02,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x34,0x02,0x00,0x00,0x06,0x03,0x00,0x00, +0xbb,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x36,0x02,0x00,0x00,0x34,0x02,0x00,0x00,0x0c,0x03,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x38,0x02,0x00,0x00, +0x36,0x02,0x00,0x00,0x37,0x02,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x3a,0x02,0x00,0x00,0x0a,0x03,0x00,0x00, +0x62,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x3b,0x02,0x00,0x00,0x38,0x02,0x00,0x00,0x3a,0x02,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x3d,0x02,0x00,0x00, +0x3b,0x02,0x00,0x00,0x0e,0x03,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x41,0x02,0x00,0x00,0x3a,0x02,0x00,0x00, +0x0e,0x03,0x00,0x00,0x41,0x00,0x05,0x00,0xdf,0x01,0x00,0x00, +0x42,0x02,0x00,0x00,0xca,0x01,0x00,0x00,0x41,0x02,0x00,0x00, +0x3d,0x00,0x04,0x00,0x00,0x01,0x00,0x00,0x43,0x02,0x00,0x00, +0x42,0x02,0x00,0x00,0x73,0x00,0x04,0x00,0xc3,0x00,0x00,0x00, +0x44,0x02,0x00,0x00,0x43,0x02,0x00,0x00,0x41,0x00,0x05,0x00, +0xdf,0x01,0x00,0x00,0x49,0x02,0x00,0x00,0xf8,0x01,0x00,0x00, +0x36,0x02,0x00,0x00,0x3d,0x00,0x04,0x00,0x00,0x01,0x00,0x00, +0x4a,0x02,0x00,0x00,0x49,0x02,0x00,0x00,0x73,0x00,0x04,0x00, +0xc3,0x00,0x00,0x00,0x4b,0x02,0x00,0x00,0x4a,0x02,0x00,0x00, +0x41,0x00,0x05,0x00,0xcc,0x00,0x00,0x00,0x4d,0x02,0x00,0x00, +0xc9,0x00,0x00,0x00,0x3d,0x02,0x00,0x00,0x3d,0x00,0x04,0x00, +0xc3,0x00,0x00,0x00,0x4e,0x02,0x00,0x00,0x4d,0x02,0x00,0x00, +0x0c,0x00,0x08,0x00,0xc3,0x00,0x00,0x00,0x4f,0x02,0x00,0x00, +0x01,0x00,0x00,0x00,0x32,0x00,0x00,0x00,0x44,0x02,0x00,0x00, +0x4b,0x02,0x00,0x00,0x4e,0x02,0x00,0x00,0x3e,0x00,0x03,0x00, +0x4d,0x02,0x00,0x00,0x4f,0x02,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x52,0x02,0x00,0x00,0x0e,0x03,0x00,0x00, +0xcf,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0x2c,0x02,0x00,0x00, +0xf8,0x00,0x02,0x00,0x2e,0x02,0x00,0x00,0xf9,0x00,0x02,0x00, +0x27,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x27,0x02,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x54,0x02,0x00,0x00, +0x0c,0x03,0x00,0x00,0xcf,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, +0x24,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x26,0x02,0x00,0x00, +0xf9,0x00,0x02,0x00,0x1f,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, +0x1f,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x56,0x02,0x00,0x00,0x0a,0x03,0x00,0x00,0xcf,0x00,0x00,0x00, +0xf9,0x00,0x02,0x00,0x1c,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, +0x1e,0x02,0x00,0x00,0xf9,0x00,0x02,0x00,0x17,0x02,0x00,0x00, +0xf8,0x00,0x02,0x00,0x17,0x02,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x58,0x02,0x00,0x00,0x06,0x03,0x00,0x00, +0xcf,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0x14,0x02,0x00,0x00, +0xf8,0x00,0x02,0x00,0x16,0x02,0x00,0x00,0xf9,0x00,0x02,0x00, +0xb3,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0xb3,0x01,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x5a,0x02,0x00,0x00, +0x00,0x03,0x00,0x00,0xcf,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, +0xb0,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0xb2,0x01,0x00,0x00, +0xe0,0x00,0x04,0x00,0x0c,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, +0xa8,0x01,0x00,0x00,0xf9,0x00,0x02,0x00,0xd6,0x00,0x00,0x00, +0xf8,0x00,0x02,0x00,0xd6,0x00,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x5c,0x02,0x00,0x00,0xe6,0x02,0x00,0x00, +0x6c,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0xd3,0x00,0x00,0x00, +0xf8,0x00,0x02,0x00,0xd5,0x00,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x61,0x02,0x00,0x00,0x55,0x00,0x00,0x00, +0x53,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x62,0x02,0x00,0x00,0x95,0x00,0x00,0x00,0x61,0x02,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x67,0x02,0x00,0x00, +0x59,0x00,0x00,0x00,0xb8,0x00,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x68,0x02,0x00,0x00,0xa7,0x00,0x00,0x00, +0x67,0x02,0x00,0x00,0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00, +0x6d,0x02,0x00,0x00,0x12,0x00,0x00,0x00,0x6c,0x02,0x00,0x00, +0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x6e,0x02,0x00,0x00, +0x6d,0x02,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x6f,0x02,0x00,0x00,0x0f,0x00,0x00,0x00,0x6e,0x02,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x73,0x02,0x00,0x00, +0x47,0x00,0x00,0x00,0x6e,0x02,0x00,0x00,0x41,0x00,0x05,0x00, +0x0d,0x00,0x00,0x00,0x75,0x02,0x00,0x00,0x74,0x02,0x00,0x00, +0x0c,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x76,0x02,0x00,0x00,0x75,0x02,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x77,0x02,0x00,0x00,0x73,0x02,0x00,0x00, +0x76,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x78,0x02,0x00,0x00,0x6f,0x02,0x00,0x00,0x77,0x02,0x00,0x00, +0xf9,0x00,0x02,0x00,0x7a,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, +0x7a,0x02,0x00,0x00,0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00, +0xe7,0x02,0x00,0x00,0x3e,0x00,0x00,0x00,0xd5,0x00,0x00,0x00, +0xe0,0x02,0x00,0x00,0x7d,0x02,0x00,0x00,0xb0,0x00,0x05,0x00, +0xc1,0x00,0x00,0x00,0x80,0x02,0x00,0x00,0xe7,0x02,0x00,0x00, +0xbe,0x00,0x00,0x00,0xf6,0x00,0x04,0x00,0x7c,0x02,0x00,0x00, +0x7d,0x02,0x00,0x00,0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, +0x80,0x02,0x00,0x00,0x7b,0x02,0x00,0x00,0x7c,0x02,0x00,0x00, +0xf8,0x00,0x02,0x00,0x7b,0x02,0x00,0x00,0xf9,0x00,0x02,0x00, +0x82,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x82,0x02,0x00,0x00, +0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0xe8,0x02,0x00,0x00, +0x3e,0x00,0x00,0x00,0x7b,0x02,0x00,0x00,0xde,0x02,0x00,0x00, +0x85,0x02,0x00,0x00,0xb0,0x00,0x05,0x00,0xc1,0x00,0x00,0x00, +0x88,0x02,0x00,0x00,0xe8,0x02,0x00,0x00,0x60,0x00,0x00,0x00, +0xf6,0x00,0x04,0x00,0x84,0x02,0x00,0x00,0x85,0x02,0x00,0x00, +0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0x88,0x02,0x00,0x00, +0x83,0x02,0x00,0x00,0x84,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, +0x83,0x02,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x8c,0x02,0x00,0x00,0xe8,0x02,0x00,0x00,0x61,0x00,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x8d,0x02,0x00,0x00, +0x62,0x02,0x00,0x00,0x8c,0x02,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x8f,0x02,0x00,0x00,0x64,0x00,0x00,0x00, +0x62,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x90,0x02,0x00,0x00,0x8d,0x02,0x00,0x00,0x8f,0x02,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x94,0x02,0x00,0x00, +0xe7,0x02,0x00,0x00,0x00,0x02,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x95,0x02,0x00,0x00,0x68,0x02,0x00,0x00, +0x94,0x02,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x97,0x02,0x00,0x00,0x68,0x00,0x00,0x00,0xbb,0x00,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x98,0x02,0x00,0x00, +0x95,0x02,0x00,0x00,0x97,0x02,0x00,0x00,0xf9,0x00,0x02,0x00, +0x9a,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x9a,0x02,0x00,0x00, +0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0xea,0x02,0x00,0x00, +0x3e,0x00,0x00,0x00,0x83,0x02,0x00,0x00,0xdc,0x02,0x00,0x00, +0x9d,0x02,0x00,0x00,0xb0,0x00,0x05,0x00,0xc1,0x00,0x00,0x00, +0xa0,0x02,0x00,0x00,0xea,0x02,0x00,0x00,0xbb,0x00,0x00,0x00, +0xf6,0x00,0x04,0x00,0x9c,0x02,0x00,0x00,0x9d,0x02,0x00,0x00, +0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0xa0,0x02,0x00,0x00, +0x9b,0x02,0x00,0x00,0x9c,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, +0x9b,0x02,0x00,0x00,0xf9,0x00,0x02,0x00,0xa2,0x02,0x00,0x00, +0xf8,0x00,0x02,0x00,0xa2,0x02,0x00,0x00,0xf5,0x00,0x07,0x00, +0x06,0x00,0x00,0x00,0xec,0x02,0x00,0x00,0x3e,0x00,0x00,0x00, +0x9b,0x02,0x00,0x00,0xda,0x02,0x00,0x00,0xa5,0x02,0x00,0x00, +0xb0,0x00,0x05,0x00,0xc1,0x00,0x00,0x00,0xa8,0x02,0x00,0x00, +0xec,0x02,0x00,0x00,0x62,0x00,0x00,0x00,0xf6,0x00,0x04,0x00, +0xa4,0x02,0x00,0x00,0xa5,0x02,0x00,0x00,0x01,0x00,0x00,0x00, +0xfa,0x00,0x04,0x00,0xa8,0x02,0x00,0x00,0xa3,0x02,0x00,0x00, +0xa4,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0xa3,0x02,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xab,0x02,0x00,0x00, +0x90,0x02,0x00,0x00,0xec,0x02,0x00,0x00,0xb0,0x00,0x05,0x00, +0xc1,0x00,0x00,0x00,0xae,0x02,0x00,0x00,0xab,0x02,0x00,0x00, +0x36,0x00,0x00,0x00,0xf7,0x00,0x03,0x00,0xb0,0x02,0x00,0x00, +0x00,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0xae,0x02,0x00,0x00, +0xaf,0x02,0x00,0x00,0xb0,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, +0xaf,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xb3,0x02,0x00,0x00,0x98,0x02,0x00,0x00,0xea,0x02,0x00,0x00, +0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00,0xb4,0x02,0x00,0x00, +0x12,0x00,0x00,0x00,0xcf,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0xb5,0x02,0x00,0x00,0xb4,0x02,0x00,0x00, +0xb0,0x00,0x05,0x00,0xc1,0x00,0x00,0x00,0xb6,0x02,0x00,0x00, +0xb3,0x02,0x00,0x00,0xb5,0x02,0x00,0x00,0xf9,0x00,0x02,0x00, +0xb0,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0xb0,0x02,0x00,0x00, +0xf5,0x00,0x07,0x00,0xc1,0x00,0x00,0x00,0xb7,0x02,0x00,0x00, +0xae,0x02,0x00,0x00,0xa3,0x02,0x00,0x00,0xb6,0x02,0x00,0x00, +0xaf,0x02,0x00,0x00,0xf7,0x00,0x03,0x00,0xb9,0x02,0x00,0x00, +0x00,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0xb7,0x02,0x00,0x00, +0xb8,0x02,0x00,0x00,0xb9,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, +0xb8,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xc1,0x02,0x00,0x00,0x98,0x02,0x00,0x00,0xea,0x02,0x00,0x00, +0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00,0xc3,0x02,0x00,0x00, +0x12,0x00,0x00,0x00,0xc2,0x02,0x00,0x00,0x3d,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0xc4,0x02,0x00,0x00,0xc3,0x02,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xc5,0x02,0x00,0x00, +0xc1,0x02,0x00,0x00,0xc4,0x02,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xc6,0x02,0x00,0x00,0x78,0x02,0x00,0x00, +0xc5,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xc8,0x02,0x00,0x00,0xc6,0x02,0x00,0x00,0x90,0x02,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xca,0x02,0x00,0x00, +0xc8,0x02,0x00,0x00,0xec,0x02,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xcc,0x02,0x00,0x00,0xe7,0x02,0x00,0x00, +0xbb,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xce,0x02,0x00,0x00,0xcc,0x02,0x00,0x00,0xea,0x02,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xd0,0x02,0x00,0x00, +0xce,0x02,0x00,0x00,0xcf,0x02,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xd2,0x02,0x00,0x00,0xe8,0x02,0x00,0x00, +0x62,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xd3,0x02,0x00,0x00,0xd0,0x02,0x00,0x00,0xd2,0x02,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xd5,0x02,0x00,0x00, +0xd3,0x02,0x00,0x00,0xec,0x02,0x00,0x00,0x41,0x00,0x05,0x00, +0xcc,0x00,0x00,0x00,0xd6,0x02,0x00,0x00,0xc9,0x00,0x00,0x00, +0xd5,0x02,0x00,0x00,0x3d,0x00,0x04,0x00,0xc3,0x00,0x00,0x00, +0xd7,0x02,0x00,0x00,0xd6,0x02,0x00,0x00,0x41,0x00,0x06,0x00, +0x94,0x01,0x00,0x00,0xd8,0x02,0x00,0x00,0xbd,0x02,0x00,0x00, +0x34,0x00,0x00,0x00,0xca,0x02,0x00,0x00,0x3e,0x00,0x03,0x00, +0xd8,0x02,0x00,0x00,0xd7,0x02,0x00,0x00,0xf9,0x00,0x02,0x00, +0xb9,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0xb9,0x02,0x00,0x00, +0xf9,0x00,0x02,0x00,0xa5,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, +0xa5,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xda,0x02,0x00,0x00,0xec,0x02,0x00,0x00,0xcf,0x00,0x00,0x00, +0xf9,0x00,0x02,0x00,0xa2,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, +0xa4,0x02,0x00,0x00,0xf9,0x00,0x02,0x00,0x9d,0x02,0x00,0x00, +0xf8,0x00,0x02,0x00,0x9d,0x02,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xdc,0x02,0x00,0x00,0xea,0x02,0x00,0x00, +0xcf,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0x9a,0x02,0x00,0x00, +0xf8,0x00,0x02,0x00,0x9c,0x02,0x00,0x00,0xf9,0x00,0x02,0x00, +0x85,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x85,0x02,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xde,0x02,0x00,0x00, +0xe8,0x02,0x00,0x00,0xcf,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, +0x82,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x84,0x02,0x00,0x00, +0xf9,0x00,0x02,0x00,0x7d,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, +0x7d,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xe0,0x02,0x00,0x00,0xe7,0x02,0x00,0x00,0xcf,0x00,0x00,0x00, +0xf9,0x00,0x02,0x00,0x7a,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, +0x7c,0x02,0x00,0x00,0xfd,0x00,0x01,0x00,0x38,0x00,0x01,0x00, + +}; +const uint64_t matmul_q5_1_f32_len = 10956; + +unsigned char matmul_q5_1_f32_aligned_data[] = { +0x03,0x02,0x23,0x07,0x00,0x05,0x01,0x00,0x0b,0x00,0x0d,0x00, +0x3a,0x03,0x00,0x00,0x00,0x00,0x00,0x00,0x11,0x00,0x02,0x00, +0x01,0x00,0x00,0x00,0x11,0x00,0x02,0x00,0x09,0x00,0x00,0x00, +0x11,0x00,0x02,0x00,0x51,0x11,0x00,0x00,0x11,0x00,0x02,0x00, +0x60,0x11,0x00,0x00,0x0b,0x00,0x06,0x00,0x01,0x00,0x00,0x00, +0x47,0x4c,0x53,0x4c,0x2e,0x73,0x74,0x64,0x2e,0x34,0x35,0x30, +0x00,0x00,0x00,0x00,0x0e,0x00,0x03,0x00,0x00,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0x0f,0x00,0x0f,0x00,0x05,0x00,0x00,0x00, +0x04,0x00,0x00,0x00,0x6d,0x61,0x69,0x6e,0x00,0x00,0x00,0x00, +0x0b,0x00,0x00,0x00,0x12,0x00,0x00,0x00,0x3d,0x00,0x00,0x00, +0x4c,0x00,0x00,0x00,0x08,0x01,0x00,0x00,0x4c,0x01,0x00,0x00, +0x81,0x01,0x00,0x00,0x89,0x01,0x00,0x00,0x97,0x02,0x00,0x00, +0xe0,0x02,0x00,0x00,0x10,0x00,0x06,0x00,0x04,0x00,0x00,0x00, +0x11,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x0b,0x00,0x00,0x00, +0x0b,0x00,0x00,0x00,0x1c,0x00,0x00,0x00,0x48,0x00,0x05,0x00, +0x10,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00, +0x00,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x04,0x00,0x00,0x00, +0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00,0x02,0x00,0x00,0x00, +0x23,0x00,0x00,0x00,0x08,0x00,0x00,0x00,0x48,0x00,0x05,0x00, +0x10,0x00,0x00,0x00,0x03,0x00,0x00,0x00,0x23,0x00,0x00,0x00, +0x0c,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00, +0x04,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x10,0x00,0x00,0x00, +0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00,0x05,0x00,0x00,0x00, +0x23,0x00,0x00,0x00,0x14,0x00,0x00,0x00,0x48,0x00,0x05,0x00, +0x10,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x23,0x00,0x00,0x00, +0x18,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00, +0x07,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x1c,0x00,0x00,0x00, +0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00,0x08,0x00,0x00,0x00, +0x23,0x00,0x00,0x00,0x20,0x00,0x00,0x00,0x48,0x00,0x05,0x00, +0x10,0x00,0x00,0x00,0x09,0x00,0x00,0x00,0x23,0x00,0x00,0x00, +0x24,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00, +0x0a,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x28,0x00,0x00,0x00, +0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00,0x0b,0x00,0x00,0x00, +0x23,0x00,0x00,0x00,0x2c,0x00,0x00,0x00,0x48,0x00,0x05,0x00, +0x10,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x23,0x00,0x00,0x00, +0x30,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00, +0x0d,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x34,0x00,0x00,0x00, +0x47,0x00,0x03,0x00,0x10,0x00,0x00,0x00,0x02,0x00,0x00,0x00, +0x47,0x00,0x04,0x00,0x37,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x3d,0x00,0x00,0x00, +0x0b,0x00,0x00,0x00,0x1a,0x00,0x00,0x00,0x47,0x00,0x04,0x00, +0x4c,0x00,0x00,0x00,0x0b,0x00,0x00,0x00,0x1b,0x00,0x00,0x00, +0x47,0x00,0x04,0x00,0x4f,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0x09,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x53,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x47,0x00,0x04,0x00, +0x60,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x06,0x00,0x00,0x00, +0x47,0x00,0x04,0x00,0x62,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0x07,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x6c,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0x03,0x00,0x00,0x00,0x47,0x00,0x04,0x00, +0xa7,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x02,0x00,0x00,0x00, +0x47,0x00,0x04,0x00,0xb9,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0x05,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0xbc,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0x08,0x00,0x00,0x00,0x47,0x00,0x04,0x00, +0x03,0x01,0x00,0x00,0x06,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0x48,0x00,0x05,0x00,0x04,0x01,0x00,0x00,0x00,0x00,0x00,0x00, +0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x48,0x00,0x05,0x00, +0x04,0x01,0x00,0x00,0x01,0x00,0x00,0x00,0x23,0x00,0x00,0x00, +0x02,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x04,0x01,0x00,0x00, +0x02,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x04,0x00,0x00,0x00, +0x48,0x00,0x05,0x00,0x04,0x01,0x00,0x00,0x03,0x00,0x00,0x00, +0x23,0x00,0x00,0x00,0x08,0x00,0x00,0x00,0x47,0x00,0x04,0x00, +0x05,0x01,0x00,0x00,0x06,0x00,0x00,0x00,0x18,0x00,0x00,0x00, +0x48,0x00,0x04,0x00,0x06,0x01,0x00,0x00,0x00,0x00,0x00,0x00, +0x18,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x06,0x01,0x00,0x00, +0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0x47,0x00,0x03,0x00,0x06,0x01,0x00,0x00,0x02,0x00,0x00,0x00, +0x47,0x00,0x04,0x00,0x08,0x01,0x00,0x00,0x22,0x00,0x00,0x00, +0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x08,0x01,0x00,0x00, +0x21,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00, +0x59,0x01,0x00,0x00,0x01,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0x47,0x00,0x04,0x00,0x5a,0x01,0x00,0x00,0x0b,0x00,0x00,0x00, +0x19,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x86,0x01,0x00,0x00, +0x06,0x00,0x00,0x00,0x20,0x00,0x00,0x00,0x48,0x00,0x04,0x00, +0x87,0x01,0x00,0x00,0x00,0x00,0x00,0x00,0x05,0x00,0x00,0x00, +0x48,0x00,0x04,0x00,0x87,0x01,0x00,0x00,0x00,0x00,0x00,0x00, +0x18,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x87,0x01,0x00,0x00, +0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0x48,0x00,0x05,0x00,0x87,0x01,0x00,0x00,0x00,0x00,0x00,0x00, +0x07,0x00,0x00,0x00,0x10,0x00,0x00,0x00,0x47,0x00,0x03,0x00, +0x87,0x01,0x00,0x00,0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00, +0x89,0x01,0x00,0x00,0x22,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0x47,0x00,0x04,0x00,0x89,0x01,0x00,0x00,0x21,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x97,0x02,0x00,0x00, +0x0b,0x00,0x00,0x00,0x18,0x00,0x00,0x00,0x47,0x00,0x04,0x00, +0xdd,0x02,0x00,0x00,0x06,0x00,0x00,0x00,0x04,0x00,0x00,0x00, +0x48,0x00,0x04,0x00,0xde,0x02,0x00,0x00,0x00,0x00,0x00,0x00, +0x19,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0xde,0x02,0x00,0x00, +0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0x47,0x00,0x03,0x00,0xde,0x02,0x00,0x00,0x02,0x00,0x00,0x00, +0x47,0x00,0x04,0x00,0xe0,0x02,0x00,0x00,0x22,0x00,0x00,0x00, +0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0xe0,0x02,0x00,0x00, +0x21,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x13,0x00,0x02,0x00, +0x02,0x00,0x00,0x00,0x21,0x00,0x03,0x00,0x03,0x00,0x00,0x00, +0x02,0x00,0x00,0x00,0x15,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x20,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x17,0x00,0x04,0x00, +0x09,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x03,0x00,0x00,0x00, +0x20,0x00,0x04,0x00,0x0a,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0x09,0x00,0x00,0x00,0x3b,0x00,0x04,0x00,0x0a,0x00,0x00,0x00, +0x0b,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x02,0x00,0x00,0x00, +0x20,0x00,0x04,0x00,0x0d,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0x06,0x00,0x00,0x00,0x1e,0x00,0x10,0x00,0x10,0x00,0x00,0x00, +0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00, +0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00, +0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00, +0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00, +0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x20,0x00,0x04,0x00, +0x11,0x00,0x00,0x00,0x09,0x00,0x00,0x00,0x10,0x00,0x00,0x00, +0x3b,0x00,0x04,0x00,0x11,0x00,0x00,0x00,0x12,0x00,0x00,0x00, +0x09,0x00,0x00,0x00,0x15,0x00,0x04,0x00,0x13,0x00,0x00,0x00, +0x20,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x13,0x00,0x00,0x00,0x14,0x00,0x00,0x00,0x08,0x00,0x00,0x00, +0x20,0x00,0x04,0x00,0x15,0x00,0x00,0x00,0x09,0x00,0x00,0x00, +0x06,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00, +0x21,0x00,0x00,0x00,0x0a,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x13,0x00,0x00,0x00,0x27,0x00,0x00,0x00,0x09,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00,0x2d,0x00,0x00,0x00, +0x07,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00, +0x34,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x32,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x37,0x00,0x00,0x00,0x40,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x39,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0x3b,0x00,0x04,0x00,0x0a,0x00,0x00,0x00, +0x3d,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x3e,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0x3b,0x00,0x04,0x00,0x0a,0x00,0x00,0x00,0x4c,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0x32,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x4f,0x00,0x00,0x00,0x20,0x00,0x00,0x00,0x32,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x53,0x00,0x00,0x00,0x20,0x00,0x00,0x00, +0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x54,0x00,0x00,0x00, +0x86,0x00,0x00,0x00,0x37,0x00,0x00,0x00,0x53,0x00,0x00,0x00, +0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x58,0x00,0x00,0x00, +0x86,0x00,0x00,0x00,0x37,0x00,0x00,0x00,0x53,0x00,0x00,0x00, +0x32,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x60,0x00,0x00,0x00, +0x02,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, +0x61,0x00,0x00,0x00,0x86,0x00,0x00,0x00,0x53,0x00,0x00,0x00, +0x60,0x00,0x00,0x00,0x32,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x62,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x34,0x00,0x06,0x00, +0x06,0x00,0x00,0x00,0x63,0x00,0x00,0x00,0x86,0x00,0x00,0x00, +0x61,0x00,0x00,0x00,0x62,0x00,0x00,0x00,0x34,0x00,0x06,0x00, +0x06,0x00,0x00,0x00,0x67,0x00,0x00,0x00,0x86,0x00,0x00,0x00, +0x61,0x00,0x00,0x00,0x62,0x00,0x00,0x00,0x32,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x6c,0x00,0x00,0x00,0x10,0x00,0x00,0x00, +0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x6d,0x00,0x00,0x00, +0x86,0x00,0x00,0x00,0x6c,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, +0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x72,0x00,0x00,0x00, +0x86,0x00,0x00,0x00,0x6c,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x77,0x00,0x00,0x00, +0x08,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, +0x78,0x00,0x00,0x00,0x86,0x00,0x00,0x00,0x6c,0x00,0x00,0x00, +0x77,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, +0x7d,0x00,0x00,0x00,0x86,0x00,0x00,0x00,0x6c,0x00,0x00,0x00, +0x77,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00, +0x81,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x13,0x00,0x00,0x00,0x86,0x00,0x00,0x00,0x02,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00,0x91,0x00,0x00,0x00, +0x0b,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00, +0x97,0x00,0x00,0x00,0x03,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x13,0x00,0x00,0x00,0xa2,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, +0x32,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0xa7,0x00,0x00,0x00, +0x40,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00, +0xa9,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x34,0x00,0x06,0x00, +0x06,0x00,0x00,0x00,0xb8,0x00,0x00,0x00,0x84,0x00,0x00,0x00, +0x60,0x00,0x00,0x00,0x62,0x00,0x00,0x00,0x32,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0xb9,0x00,0x00,0x00,0x20,0x00,0x00,0x00, +0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0xba,0x00,0x00,0x00, +0x84,0x00,0x00,0x00,0x53,0x00,0x00,0x00,0xb9,0x00,0x00,0x00, +0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0xbb,0x00,0x00,0x00, +0x84,0x00,0x00,0x00,0x4f,0x00,0x00,0x00,0x62,0x00,0x00,0x00, +0x32,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0xbc,0x00,0x00,0x00, +0x02,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, +0xbd,0x00,0x00,0x00,0x84,0x00,0x00,0x00,0xbb,0x00,0x00,0x00, +0xbc,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, +0xbe,0x00,0x00,0x00,0x84,0x00,0x00,0x00,0xbd,0x00,0x00,0x00, +0x60,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, +0xbf,0x00,0x00,0x00,0x86,0x00,0x00,0x00,0xba,0x00,0x00,0x00, +0xbe,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, +0xc0,0x00,0x00,0x00,0x84,0x00,0x00,0x00,0xb8,0x00,0x00,0x00, +0xbf,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, +0xc1,0x00,0x00,0x00,0x84,0x00,0x00,0x00,0xc0,0x00,0x00,0x00, +0xbc,0x00,0x00,0x00,0x14,0x00,0x02,0x00,0xc2,0x00,0x00,0x00, +0x16,0x00,0x03,0x00,0xc4,0x00,0x00,0x00,0x20,0x00,0x00,0x00, +0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0xc5,0x00,0x00,0x00, +0x84,0x00,0x00,0x00,0x60,0x00,0x00,0x00,0x62,0x00,0x00,0x00, +0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0xc6,0x00,0x00,0x00, +0x84,0x00,0x00,0x00,0xc5,0x00,0x00,0x00,0xbf,0x00,0x00,0x00, +0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0xc7,0x00,0x00,0x00, +0x84,0x00,0x00,0x00,0xc6,0x00,0x00,0x00,0xbc,0x00,0x00,0x00, +0x1c,0x00,0x04,0x00,0xc8,0x00,0x00,0x00,0xc4,0x00,0x00,0x00, +0xc7,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0xc9,0x00,0x00,0x00, +0x07,0x00,0x00,0x00,0xc8,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0xc4,0x00,0x00,0x00,0xcc,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0x20,0x00,0x04,0x00,0xcd,0x00,0x00,0x00,0x07,0x00,0x00,0x00, +0xc4,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00, +0xd0,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x34,0x00,0x06,0x00, +0x06,0x00,0x00,0x00,0xf4,0x00,0x00,0x00,0x80,0x00,0x00,0x00, +0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0xfa,0x00,0x00,0x00,0x10,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0xfe,0x00,0x00,0x00, +0x0f,0x00,0x00,0x00,0x16,0x00,0x03,0x00,0x01,0x01,0x00,0x00, +0x10,0x00,0x00,0x00,0x15,0x00,0x04,0x00,0x02,0x01,0x00,0x00, +0x08,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x1c,0x00,0x04,0x00, +0x03,0x01,0x00,0x00,0x02,0x01,0x00,0x00,0xfa,0x00,0x00,0x00, +0x1e,0x00,0x06,0x00,0x04,0x01,0x00,0x00,0x01,0x01,0x00,0x00, +0x01,0x01,0x00,0x00,0x06,0x00,0x00,0x00,0x03,0x01,0x00,0x00, +0x1d,0x00,0x03,0x00,0x05,0x01,0x00,0x00,0x04,0x01,0x00,0x00, +0x1e,0x00,0x03,0x00,0x06,0x01,0x00,0x00,0x05,0x01,0x00,0x00, +0x20,0x00,0x04,0x00,0x07,0x01,0x00,0x00,0x0c,0x00,0x00,0x00, +0x06,0x01,0x00,0x00,0x3b,0x00,0x04,0x00,0x07,0x01,0x00,0x00, +0x08,0x01,0x00,0x00,0x0c,0x00,0x00,0x00,0x20,0x00,0x04,0x00, +0x0a,0x01,0x00,0x00,0x0c,0x00,0x00,0x00,0x01,0x01,0x00,0x00, +0x20,0x00,0x04,0x00,0x15,0x01,0x00,0x00,0x0c,0x00,0x00,0x00, +0x06,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x23,0x01,0x00,0x00,0x0c,0x00,0x00,0x00,0x20,0x00,0x04,0x00, +0x2c,0x01,0x00,0x00,0x0c,0x00,0x00,0x00,0x02,0x01,0x00,0x00, +0x17,0x00,0x04,0x00,0x30,0x01,0x00,0x00,0xc4,0x00,0x00,0x00, +0x02,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, +0x48,0x01,0x00,0x00,0x80,0x00,0x00,0x00,0x6c,0x00,0x00,0x00, +0x39,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, +0x49,0x01,0x00,0x00,0x84,0x00,0x00,0x00,0x37,0x00,0x00,0x00, +0x48,0x01,0x00,0x00,0x1c,0x00,0x04,0x00,0x4a,0x01,0x00,0x00, +0x01,0x01,0x00,0x00,0x49,0x01,0x00,0x00,0x20,0x00,0x04,0x00, +0x4b,0x01,0x00,0x00,0x04,0x00,0x00,0x00,0x4a,0x01,0x00,0x00, +0x3b,0x00,0x04,0x00,0x4b,0x01,0x00,0x00,0x4c,0x01,0x00,0x00, +0x04,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x51,0x01,0x00,0x00, +0x04,0x00,0x00,0x00,0x01,0x01,0x00,0x00,0x32,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x59,0x01,0x00,0x00,0x01,0x00,0x00,0x00, +0x33,0x00,0x06,0x00,0x09,0x00,0x00,0x00,0x5a,0x01,0x00,0x00, +0x59,0x01,0x00,0x00,0x39,0x00,0x00,0x00,0x39,0x00,0x00,0x00, +0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x5b,0x01,0x00,0x00, +0x51,0x00,0x00,0x00,0x5a,0x01,0x00,0x00,0x00,0x00,0x00,0x00, +0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x5c,0x01,0x00,0x00, +0x84,0x00,0x00,0x00,0x5b,0x01,0x00,0x00,0x0c,0x00,0x00,0x00, +0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x5d,0x01,0x00,0x00, +0x86,0x00,0x00,0x00,0x5c,0x01,0x00,0x00,0x6c,0x00,0x00,0x00, +0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x78,0x01,0x00,0x00, +0x80,0x00,0x00,0x00,0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00, +0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x7d,0x01,0x00,0x00, +0x80,0x00,0x00,0x00,0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00, +0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x7e,0x01,0x00,0x00, +0x84,0x00,0x00,0x00,0xa7,0x00,0x00,0x00,0x7d,0x01,0x00,0x00, +0x1c,0x00,0x04,0x00,0x7f,0x01,0x00,0x00,0x01,0x01,0x00,0x00, +0x7e,0x01,0x00,0x00,0x20,0x00,0x04,0x00,0x80,0x01,0x00,0x00, +0x04,0x00,0x00,0x00,0x7f,0x01,0x00,0x00,0x3b,0x00,0x04,0x00, +0x80,0x01,0x00,0x00,0x81,0x01,0x00,0x00,0x04,0x00,0x00,0x00, +0x17,0x00,0x04,0x00,0x84,0x01,0x00,0x00,0xc4,0x00,0x00,0x00, +0x04,0x00,0x00,0x00,0x18,0x00,0x04,0x00,0x85,0x01,0x00,0x00, +0x84,0x01,0x00,0x00,0x02,0x00,0x00,0x00,0x1d,0x00,0x03,0x00, +0x86,0x01,0x00,0x00,0x85,0x01,0x00,0x00,0x1e,0x00,0x03,0x00, +0x87,0x01,0x00,0x00,0x86,0x01,0x00,0x00,0x20,0x00,0x04,0x00, +0x88,0x01,0x00,0x00,0x0c,0x00,0x00,0x00,0x87,0x01,0x00,0x00, +0x3b,0x00,0x04,0x00,0x88,0x01,0x00,0x00,0x89,0x01,0x00,0x00, +0x0c,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x8b,0x01,0x00,0x00, +0x0c,0x00,0x00,0x00,0xc4,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x9f,0x01,0x00,0x00,0x03,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0xa7,0x01,0x00,0x00, +0x04,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0xaf,0x01,0x00,0x00,0x05,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0xb7,0x01,0x00,0x00,0x06,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0xbf,0x01,0x00,0x00, +0x07,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, +0xc6,0x01,0x00,0x00,0x51,0x00,0x00,0x00,0x5a,0x01,0x00,0x00, +0x00,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, +0xc7,0x01,0x00,0x00,0x84,0x00,0x00,0x00,0xc6,0x01,0x00,0x00, +0x77,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, +0xc8,0x01,0x00,0x00,0x86,0x00,0x00,0x00,0xc7,0x01,0x00,0x00, +0x6c,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0xcb,0x01,0x00,0x00,0x08,0x01,0x00,0x00,0x34,0x00,0x06,0x00, +0x06,0x00,0x00,0x00,0xcc,0x01,0x00,0x00,0x86,0x00,0x00,0x00, +0x6c,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x34,0x00,0x06,0x00, +0x06,0x00,0x00,0x00,0xcf,0x01,0x00,0x00,0x86,0x00,0x00,0x00, +0x6c,0x00,0x00,0x00,0x77,0x00,0x00,0x00,0x34,0x00,0x06,0x00, +0x06,0x00,0x00,0x00,0xea,0x01,0x00,0x00,0x84,0x00,0x00,0x00, +0x60,0x00,0x00,0x00,0x62,0x00,0x00,0x00,0x1c,0x00,0x04,0x00, +0xeb,0x01,0x00,0x00,0x01,0x01,0x00,0x00,0xea,0x01,0x00,0x00, +0x20,0x00,0x04,0x00,0xec,0x01,0x00,0x00,0x07,0x00,0x00,0x00, +0xeb,0x01,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, +0xfc,0x01,0x00,0x00,0x80,0x00,0x00,0x00,0x6c,0x00,0x00,0x00, +0x39,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x02,0x02,0x00,0x00, +0x07,0x00,0x00,0x00,0x01,0x01,0x00,0x00,0x34,0x00,0x06,0x00, +0x06,0x00,0x00,0x00,0x18,0x02,0x00,0x00,0x84,0x00,0x00,0x00, +0xbf,0x00,0x00,0x00,0xbc,0x00,0x00,0x00,0x1c,0x00,0x04,0x00, +0x19,0x02,0x00,0x00,0x01,0x01,0x00,0x00,0x18,0x02,0x00,0x00, +0x20,0x00,0x04,0x00,0x1a,0x02,0x00,0x00,0x07,0x00,0x00,0x00, +0x19,0x02,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, +0x23,0x02,0x00,0x00,0x86,0x00,0x00,0x00,0xb9,0x00,0x00,0x00, +0xbf,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, +0x2b,0x02,0x00,0x00,0x80,0x00,0x00,0x00,0x6c,0x00,0x00,0x00, +0x39,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, +0x5a,0x02,0x00,0x00,0x84,0x00,0x00,0x00,0x60,0x00,0x00,0x00, +0x62,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00, +0x8f,0x02,0x00,0x00,0x0d,0x00,0x00,0x00,0x3b,0x00,0x04,0x00, +0x0a,0x00,0x00,0x00,0x97,0x02,0x00,0x00,0x01,0x00,0x00,0x00, +0x1d,0x00,0x03,0x00,0xdd,0x02,0x00,0x00,0xc4,0x00,0x00,0x00, +0x1e,0x00,0x03,0x00,0xde,0x02,0x00,0x00,0xdd,0x02,0x00,0x00, +0x20,0x00,0x04,0x00,0xdf,0x02,0x00,0x00,0x0c,0x00,0x00,0x00, +0xde,0x02,0x00,0x00,0x3b,0x00,0x04,0x00,0xdf,0x02,0x00,0x00, +0xe0,0x02,0x00,0x00,0x0c,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x13,0x00,0x00,0x00,0xe5,0x02,0x00,0x00,0x05,0x00,0x00,0x00, +0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0xf2,0x02,0x00,0x00, +0x84,0x00,0x00,0x00,0x60,0x00,0x00,0x00,0x62,0x00,0x00,0x00, +0x36,0x00,0x05,0x00,0x02,0x00,0x00,0x00,0x04,0x00,0x00,0x00, +0x00,0x00,0x00,0x00,0x03,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, +0x05,0x00,0x00,0x00,0x3b,0x00,0x04,0x00,0xc9,0x00,0x00,0x00, +0xca,0x00,0x00,0x00,0x07,0x00,0x00,0x00,0x3b,0x00,0x04,0x00, +0xec,0x01,0x00,0x00,0xed,0x01,0x00,0x00,0x07,0x00,0x00,0x00, +0x3b,0x00,0x04,0x00,0x1a,0x02,0x00,0x00,0x1b,0x02,0x00,0x00, +0x07,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x0d,0x00,0x00,0x00, +0x0e,0x00,0x00,0x00,0x0b,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, +0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x0f,0x00,0x00,0x00, +0x0e,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00, +0x16,0x00,0x00,0x00,0x12,0x00,0x00,0x00,0x14,0x00,0x00,0x00, +0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x17,0x00,0x00,0x00, +0x16,0x00,0x00,0x00,0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x18,0x00,0x00,0x00,0x0f,0x00,0x00,0x00,0x17,0x00,0x00,0x00, +0x89,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x1e,0x00,0x00,0x00, +0x0f,0x00,0x00,0x00,0x17,0x00,0x00,0x00,0x41,0x00,0x05,0x00, +0x15,0x00,0x00,0x00,0x22,0x00,0x00,0x00,0x12,0x00,0x00,0x00, +0x21,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x23,0x00,0x00,0x00,0x22,0x00,0x00,0x00,0x86,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x24,0x00,0x00,0x00,0x18,0x00,0x00,0x00, +0x23,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00, +0x28,0x00,0x00,0x00,0x12,0x00,0x00,0x00,0x27,0x00,0x00,0x00, +0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x29,0x00,0x00,0x00, +0x28,0x00,0x00,0x00,0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x2a,0x00,0x00,0x00,0x1e,0x00,0x00,0x00,0x29,0x00,0x00,0x00, +0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, +0x12,0x00,0x00,0x00,0x2d,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x2f,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x30,0x00,0x00,0x00, +0x24,0x00,0x00,0x00,0x2f,0x00,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x32,0x00,0x00,0x00,0x30,0x00,0x00,0x00, +0x2a,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00, +0x35,0x00,0x00,0x00,0x12,0x00,0x00,0x00,0x34,0x00,0x00,0x00, +0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x36,0x00,0x00,0x00, +0x35,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x38,0x00,0x00,0x00,0x36,0x00,0x00,0x00,0x37,0x00,0x00,0x00, +0x82,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x3a,0x00,0x00,0x00, +0x38,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x86,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x3b,0x00,0x00,0x00,0x3a,0x00,0x00,0x00, +0x37,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x0d,0x00,0x00,0x00, +0x3f,0x00,0x00,0x00,0x3d,0x00,0x00,0x00,0x3e,0x00,0x00,0x00, +0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x40,0x00,0x00,0x00, +0x3f,0x00,0x00,0x00,0x89,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x42,0x00,0x00,0x00,0x40,0x00,0x00,0x00,0x3b,0x00,0x00,0x00, +0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x47,0x00,0x00,0x00, +0x40,0x00,0x00,0x00,0x3b,0x00,0x00,0x00,0x41,0x00,0x05,0x00, +0x0d,0x00,0x00,0x00,0x49,0x00,0x00,0x00,0x3d,0x00,0x00,0x00, +0x39,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x4a,0x00,0x00,0x00,0x49,0x00,0x00,0x00,0x41,0x00,0x05,0x00, +0x0d,0x00,0x00,0x00,0x4d,0x00,0x00,0x00,0x4c,0x00,0x00,0x00, +0x3e,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x4e,0x00,0x00,0x00,0x4d,0x00,0x00,0x00,0x86,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x50,0x00,0x00,0x00,0x4e,0x00,0x00,0x00, +0x4f,0x00,0x00,0x00,0x89,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x55,0x00,0x00,0x00,0x50,0x00,0x00,0x00,0x54,0x00,0x00,0x00, +0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x59,0x00,0x00,0x00, +0x50,0x00,0x00,0x00,0x58,0x00,0x00,0x00,0x89,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x5d,0x00,0x00,0x00,0x4e,0x00,0x00,0x00, +0x4f,0x00,0x00,0x00,0x89,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x64,0x00,0x00,0x00,0x5d,0x00,0x00,0x00,0x63,0x00,0x00,0x00, +0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x68,0x00,0x00,0x00, +0x5d,0x00,0x00,0x00,0x67,0x00,0x00,0x00,0x89,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x6e,0x00,0x00,0x00,0x4e,0x00,0x00,0x00, +0x6d,0x00,0x00,0x00,0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x73,0x00,0x00,0x00,0x4e,0x00,0x00,0x00,0x72,0x00,0x00,0x00, +0x89,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x79,0x00,0x00,0x00, +0x4e,0x00,0x00,0x00,0x78,0x00,0x00,0x00,0x86,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x7e,0x00,0x00,0x00,0x4e,0x00,0x00,0x00, +0x7d,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00, +0x82,0x00,0x00,0x00,0x12,0x00,0x00,0x00,0x81,0x00,0x00,0x00, +0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x83,0x00,0x00,0x00, +0x82,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x84,0x00,0x00,0x00,0x47,0x00,0x00,0x00,0x83,0x00,0x00,0x00, +0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00,0x87,0x00,0x00,0x00, +0x12,0x00,0x00,0x00,0x86,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x88,0x00,0x00,0x00,0x87,0x00,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x8a,0x00,0x00,0x00, +0x47,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x8d,0x00,0x00,0x00,0x8a,0x00,0x00,0x00, +0x83,0x00,0x00,0x00,0x0c,0x00,0x07,0x00,0x06,0x00,0x00,0x00, +0x8e,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x26,0x00,0x00,0x00, +0x88,0x00,0x00,0x00,0x8d,0x00,0x00,0x00,0x41,0x00,0x05,0x00, +0x15,0x00,0x00,0x00,0x92,0x00,0x00,0x00,0x12,0x00,0x00,0x00, +0x91,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x93,0x00,0x00,0x00,0x92,0x00,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x94,0x00,0x00,0x00,0x32,0x00,0x00,0x00, +0x93,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x96,0x00,0x00,0x00,0x42,0x00,0x00,0x00,0x37,0x00,0x00,0x00, +0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00,0x98,0x00,0x00,0x00, +0x12,0x00,0x00,0x00,0x97,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x99,0x00,0x00,0x00,0x98,0x00,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x9a,0x00,0x00,0x00, +0x96,0x00,0x00,0x00,0x99,0x00,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x9b,0x00,0x00,0x00,0x94,0x00,0x00,0x00, +0x9a,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x9d,0x00,0x00,0x00,0x9b,0x00,0x00,0x00,0x84,0x00,0x00,0x00, +0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x9e,0x00,0x00,0x00, +0x9d,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x41,0x00,0x05,0x00, +0x15,0x00,0x00,0x00,0xa3,0x00,0x00,0x00,0x12,0x00,0x00,0x00, +0xa2,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0xa4,0x00,0x00,0x00,0xa3,0x00,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xa5,0x00,0x00,0x00,0x0f,0x00,0x00,0x00, +0xa4,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xa8,0x00,0x00,0x00,0x4a,0x00,0x00,0x00,0xa7,0x00,0x00,0x00, +0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00,0xaa,0x00,0x00,0x00, +0x12,0x00,0x00,0x00,0xa9,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0xab,0x00,0x00,0x00,0xaa,0x00,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xac,0x00,0x00,0x00, +0xa8,0x00,0x00,0x00,0xab,0x00,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xad,0x00,0x00,0x00,0xa5,0x00,0x00,0x00, +0xac,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xaf,0x00,0x00,0x00,0xad,0x00,0x00,0x00,0x84,0x00,0x00,0x00, +0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xb0,0x00,0x00,0x00, +0xaf,0x00,0x00,0x00,0x77,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, +0xb2,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0xb2,0x00,0x00,0x00, +0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0x08,0x03,0x00,0x00, +0x3e,0x00,0x00,0x00,0x05,0x00,0x00,0x00,0xd1,0x00,0x00,0x00, +0xb3,0x00,0x00,0x00,0xb0,0x00,0x05,0x00,0xc2,0x00,0x00,0x00, +0xc3,0x00,0x00,0x00,0x08,0x03,0x00,0x00,0xc1,0x00,0x00,0x00, +0xf6,0x00,0x04,0x00,0xb4,0x00,0x00,0x00,0xb3,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0xc3,0x00,0x00,0x00, +0xb3,0x00,0x00,0x00,0xb4,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, +0xb3,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0xcd,0x00,0x00,0x00, +0xce,0x00,0x00,0x00,0xca,0x00,0x00,0x00,0x08,0x03,0x00,0x00, +0x3e,0x00,0x03,0x00,0xce,0x00,0x00,0x00,0xcc,0x00,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xd1,0x00,0x00,0x00, +0x08,0x03,0x00,0x00,0xd0,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, +0xb2,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0xb4,0x00,0x00,0x00, +0xf9,0x00,0x02,0x00,0xd4,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, +0xd4,0x00,0x00,0x00,0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00, +0x21,0x03,0x00,0x00,0xb0,0x00,0x00,0x00,0xb4,0x00,0x00,0x00, +0xd1,0x01,0x00,0x00,0xd7,0x00,0x00,0x00,0xf5,0x00,0x07,0x00, +0x06,0x00,0x00,0x00,0x1d,0x03,0x00,0x00,0x9e,0x00,0x00,0x00, +0xb4,0x00,0x00,0x00,0xce,0x01,0x00,0x00,0xd7,0x00,0x00,0x00, +0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0x09,0x03,0x00,0x00, +0x84,0x00,0x00,0x00,0xb4,0x00,0x00,0x00,0x7f,0x02,0x00,0x00, +0xd7,0x00,0x00,0x00,0xb0,0x00,0x05,0x00,0xc2,0x00,0x00,0x00, +0xdb,0x00,0x00,0x00,0x09,0x03,0x00,0x00,0x8e,0x00,0x00,0x00, +0xf6,0x00,0x04,0x00,0xd6,0x00,0x00,0x00,0xd7,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0xdb,0x00,0x00,0x00, +0xd5,0x00,0x00,0x00,0xd6,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, +0xd5,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0xdd,0x00,0x00,0x00, +0xf8,0x00,0x02,0x00,0xdd,0x00,0x00,0x00,0xf5,0x00,0x07,0x00, +0x06,0x00,0x00,0x00,0x19,0x03,0x00,0x00,0x3e,0x00,0x00,0x00, +0xd5,0x00,0x00,0x00,0x5f,0x01,0x00,0x00,0xde,0x00,0x00,0x00, +0xb0,0x00,0x05,0x00,0xc2,0x00,0x00,0x00,0xe3,0x00,0x00,0x00, +0x19,0x03,0x00,0x00,0x37,0x00,0x00,0x00,0xf6,0x00,0x04,0x00, +0xdf,0x00,0x00,0x00,0xde,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0xfa,0x00,0x04,0x00,0xe3,0x00,0x00,0x00,0xde,0x00,0x00,0x00, +0xdf,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0xde,0x00,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xe8,0x00,0x00,0x00, +0x73,0x00,0x00,0x00,0x19,0x03,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xeb,0x00,0x00,0x00,0xe8,0x00,0x00,0x00, +0x99,0x00,0x00,0x00,0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xec,0x00,0x00,0x00,0xeb,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xed,0x00,0x00,0x00, +0x1d,0x03,0x00,0x00,0xec,0x00,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xef,0x00,0x00,0x00,0xed,0x00,0x00,0x00, +0x6e,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xf5,0x00,0x00,0x00,0xe8,0x00,0x00,0x00,0xf4,0x00,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xf7,0x00,0x00,0x00, +0xf5,0x00,0x00,0x00,0x6e,0x00,0x00,0x00,0x86,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xfb,0x00,0x00,0x00,0xef,0x00,0x00,0x00, +0xfa,0x00,0x00,0x00,0xc7,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xff,0x00,0x00,0x00,0xef,0x00,0x00,0x00,0xfe,0x00,0x00,0x00, +0x41,0x00,0x07,0x00,0x0a,0x01,0x00,0x00,0x0b,0x01,0x00,0x00, +0x08,0x01,0x00,0x00,0x34,0x00,0x00,0x00,0xfb,0x00,0x00,0x00, +0x34,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x01,0x01,0x00,0x00, +0x0c,0x01,0x00,0x00,0x0b,0x01,0x00,0x00,0x73,0x00,0x04,0x00, +0xc4,0x00,0x00,0x00,0x0d,0x01,0x00,0x00,0x0c,0x01,0x00,0x00, +0x41,0x00,0x07,0x00,0x0a,0x01,0x00,0x00,0x10,0x01,0x00,0x00, +0x08,0x01,0x00,0x00,0x34,0x00,0x00,0x00,0xfb,0x00,0x00,0x00, +0xd0,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x01,0x01,0x00,0x00, +0x11,0x01,0x00,0x00,0x10,0x01,0x00,0x00,0x73,0x00,0x04,0x00, +0xc4,0x00,0x00,0x00,0x12,0x01,0x00,0x00,0x11,0x01,0x00,0x00, +0x41,0x00,0x07,0x00,0x15,0x01,0x00,0x00,0x16,0x01,0x00,0x00, +0x08,0x01,0x00,0x00,0x34,0x00,0x00,0x00,0xfb,0x00,0x00,0x00, +0x86,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x17,0x01,0x00,0x00,0x16,0x01,0x00,0x00,0xc2,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x1d,0x01,0x00,0x00,0x17,0x01,0x00,0x00, +0xff,0x00,0x00,0x00,0xc4,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x1e,0x01,0x00,0x00,0x1d,0x01,0x00,0x00,0xa9,0x00,0x00,0x00, +0xc7,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x1f,0x01,0x00,0x00, +0x1e,0x01,0x00,0x00,0xfa,0x00,0x00,0x00,0x7c,0x00,0x04,0x00, +0x13,0x00,0x00,0x00,0x20,0x01,0x00,0x00,0x1f,0x01,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x24,0x01,0x00,0x00, +0xff,0x00,0x00,0x00,0x23,0x01,0x00,0x00,0xc2,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x25,0x01,0x00,0x00,0x17,0x01,0x00,0x00, +0x24,0x01,0x00,0x00,0xc7,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x26,0x01,0x00,0x00,0x25,0x01,0x00,0x00,0xfa,0x00,0x00,0x00, +0x7c,0x00,0x04,0x00,0x13,0x00,0x00,0x00,0x27,0x01,0x00,0x00, +0x26,0x01,0x00,0x00,0x41,0x00,0x08,0x00,0x2c,0x01,0x00,0x00, +0x2d,0x01,0x00,0x00,0x08,0x01,0x00,0x00,0x34,0x00,0x00,0x00, +0xfb,0x00,0x00,0x00,0x97,0x00,0x00,0x00,0xff,0x00,0x00,0x00, +0x3d,0x00,0x04,0x00,0x02,0x01,0x00,0x00,0x2e,0x01,0x00,0x00, +0x2d,0x01,0x00,0x00,0x71,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x2f,0x01,0x00,0x00,0x2e,0x01,0x00,0x00,0xc7,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x34,0x01,0x00,0x00,0x2f,0x01,0x00,0x00, +0xfe,0x00,0x00,0x00,0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x38,0x01,0x00,0x00,0x20,0x01,0x00,0x00,0xc5,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x39,0x01,0x00,0x00,0x34,0x01,0x00,0x00, +0x38,0x01,0x00,0x00,0x70,0x00,0x04,0x00,0xc4,0x00,0x00,0x00, +0x3a,0x01,0x00,0x00,0x39,0x01,0x00,0x00,0xc2,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x3c,0x01,0x00,0x00,0x2f,0x01,0x00,0x00, +0xa9,0x00,0x00,0x00,0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x3f,0x01,0x00,0x00,0x27,0x01,0x00,0x00,0xc5,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x40,0x01,0x00,0x00,0x3c,0x01,0x00,0x00, +0x3f,0x01,0x00,0x00,0x70,0x00,0x04,0x00,0xc4,0x00,0x00,0x00, +0x41,0x01,0x00,0x00,0x40,0x01,0x00,0x00,0x50,0x00,0x05,0x00, +0x30,0x01,0x00,0x00,0x42,0x01,0x00,0x00,0x3a,0x01,0x00,0x00, +0x41,0x01,0x00,0x00,0x8e,0x00,0x05,0x00,0x30,0x01,0x00,0x00, +0x44,0x01,0x00,0x00,0x42,0x01,0x00,0x00,0x0d,0x01,0x00,0x00, +0x50,0x00,0x05,0x00,0x30,0x01,0x00,0x00,0x46,0x01,0x00,0x00, +0x12,0x01,0x00,0x00,0x12,0x01,0x00,0x00,0x81,0x00,0x05,0x00, +0x30,0x01,0x00,0x00,0x47,0x01,0x00,0x00,0x44,0x01,0x00,0x00, +0x46,0x01,0x00,0x00,0x51,0x00,0x05,0x00,0xc4,0x00,0x00,0x00, +0x4f,0x01,0x00,0x00,0x47,0x01,0x00,0x00,0x00,0x00,0x00,0x00, +0x73,0x00,0x04,0x00,0x01,0x01,0x00,0x00,0x50,0x01,0x00,0x00, +0x4f,0x01,0x00,0x00,0x41,0x00,0x05,0x00,0x51,0x01,0x00,0x00, +0x52,0x01,0x00,0x00,0x4c,0x01,0x00,0x00,0xf7,0x00,0x00,0x00, +0x3e,0x00,0x03,0x00,0x52,0x01,0x00,0x00,0x50,0x01,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x54,0x01,0x00,0x00, +0xf7,0x00,0x00,0x00,0xfa,0x00,0x00,0x00,0x51,0x00,0x05,0x00, +0xc4,0x00,0x00,0x00,0x56,0x01,0x00,0x00,0x47,0x01,0x00,0x00, +0x01,0x00,0x00,0x00,0x73,0x00,0x04,0x00,0x01,0x01,0x00,0x00, +0x57,0x01,0x00,0x00,0x56,0x01,0x00,0x00,0x41,0x00,0x05,0x00, +0x51,0x01,0x00,0x00,0x58,0x01,0x00,0x00,0x4c,0x01,0x00,0x00, +0x54,0x01,0x00,0x00,0x3e,0x00,0x03,0x00,0x58,0x01,0x00,0x00, +0x57,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x5f,0x01,0x00,0x00,0x19,0x03,0x00,0x00,0x5d,0x01,0x00,0x00, +0xf9,0x00,0x02,0x00,0xdd,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, +0xdf,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0x61,0x01,0x00,0x00, +0xf8,0x00,0x02,0x00,0x61,0x01,0x00,0x00,0xf5,0x00,0x07,0x00, +0x06,0x00,0x00,0x00,0x1a,0x03,0x00,0x00,0x3e,0x00,0x00,0x00, +0xdf,0x00,0x00,0x00,0xca,0x01,0x00,0x00,0x62,0x01,0x00,0x00, +0xb0,0x00,0x05,0x00,0xc2,0x00,0x00,0x00,0x67,0x01,0x00,0x00, +0x1a,0x03,0x00,0x00,0xa7,0x00,0x00,0x00,0xf6,0x00,0x04,0x00, +0x63,0x01,0x00,0x00,0x62,0x01,0x00,0x00,0x01,0x00,0x00,0x00, +0xfa,0x00,0x04,0x00,0x67,0x01,0x00,0x00,0x62,0x01,0x00,0x00, +0x63,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0x62,0x01,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x6c,0x01,0x00,0x00, +0x7e,0x00,0x00,0x00,0x1a,0x03,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x6f,0x01,0x00,0x00,0x6c,0x01,0x00,0x00, +0xab,0x00,0x00,0x00,0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x70,0x01,0x00,0x00,0x6f,0x01,0x00,0x00,0x77,0x00,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x71,0x01,0x00,0x00, +0x21,0x03,0x00,0x00,0x70,0x01,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x73,0x01,0x00,0x00,0x71,0x01,0x00,0x00, +0x79,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x79,0x01,0x00,0x00,0x6c,0x01,0x00,0x00,0x78,0x01,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x7b,0x01,0x00,0x00, +0x79,0x00,0x00,0x00,0x77,0x00,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x7c,0x01,0x00,0x00,0x79,0x01,0x00,0x00, +0x7b,0x01,0x00,0x00,0x41,0x00,0x08,0x00,0x8b,0x01,0x00,0x00, +0x8c,0x01,0x00,0x00,0x89,0x01,0x00,0x00,0x34,0x00,0x00,0x00, +0x73,0x01,0x00,0x00,0x34,0x00,0x00,0x00,0x3e,0x00,0x00,0x00, +0x3d,0x00,0x04,0x00,0xc4,0x00,0x00,0x00,0x8d,0x01,0x00,0x00, +0x8c,0x01,0x00,0x00,0x73,0x00,0x04,0x00,0x01,0x01,0x00,0x00, +0x8e,0x01,0x00,0x00,0x8d,0x01,0x00,0x00,0x41,0x00,0x05,0x00, +0x51,0x01,0x00,0x00,0x8f,0x01,0x00,0x00,0x81,0x01,0x00,0x00, +0x7c,0x01,0x00,0x00,0x3e,0x00,0x03,0x00,0x8f,0x01,0x00,0x00, +0x8e,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x91,0x01,0x00,0x00,0x7c,0x01,0x00,0x00,0x39,0x00,0x00,0x00, +0x41,0x00,0x08,0x00,0x8b,0x01,0x00,0x00,0x93,0x01,0x00,0x00, +0x89,0x01,0x00,0x00,0x34,0x00,0x00,0x00,0x73,0x01,0x00,0x00, +0x34,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0xc4,0x00,0x00,0x00,0x94,0x01,0x00,0x00,0x93,0x01,0x00,0x00, +0x73,0x00,0x04,0x00,0x01,0x01,0x00,0x00,0x95,0x01,0x00,0x00, +0x94,0x01,0x00,0x00,0x41,0x00,0x05,0x00,0x51,0x01,0x00,0x00, +0x96,0x01,0x00,0x00,0x81,0x01,0x00,0x00,0x91,0x01,0x00,0x00, +0x3e,0x00,0x03,0x00,0x96,0x01,0x00,0x00,0x95,0x01,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x98,0x01,0x00,0x00, +0x7c,0x01,0x00,0x00,0x0c,0x00,0x00,0x00,0x41,0x00,0x08,0x00, +0x8b,0x01,0x00,0x00,0x9a,0x01,0x00,0x00,0x89,0x01,0x00,0x00, +0x34,0x00,0x00,0x00,0x73,0x01,0x00,0x00,0x34,0x00,0x00,0x00, +0x0c,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0xc4,0x00,0x00,0x00, +0x9b,0x01,0x00,0x00,0x9a,0x01,0x00,0x00,0x73,0x00,0x04,0x00, +0x01,0x01,0x00,0x00,0x9c,0x01,0x00,0x00,0x9b,0x01,0x00,0x00, +0x41,0x00,0x05,0x00,0x51,0x01,0x00,0x00,0x9d,0x01,0x00,0x00, +0x81,0x01,0x00,0x00,0x98,0x01,0x00,0x00,0x3e,0x00,0x03,0x00, +0x9d,0x01,0x00,0x00,0x9c,0x01,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xa0,0x01,0x00,0x00,0x7c,0x01,0x00,0x00, +0x9f,0x01,0x00,0x00,0x41,0x00,0x08,0x00,0x8b,0x01,0x00,0x00, +0xa2,0x01,0x00,0x00,0x89,0x01,0x00,0x00,0x34,0x00,0x00,0x00, +0x73,0x01,0x00,0x00,0x34,0x00,0x00,0x00,0x9f,0x01,0x00,0x00, +0x3d,0x00,0x04,0x00,0xc4,0x00,0x00,0x00,0xa3,0x01,0x00,0x00, +0xa2,0x01,0x00,0x00,0x73,0x00,0x04,0x00,0x01,0x01,0x00,0x00, +0xa4,0x01,0x00,0x00,0xa3,0x01,0x00,0x00,0x41,0x00,0x05,0x00, +0x51,0x01,0x00,0x00,0xa5,0x01,0x00,0x00,0x81,0x01,0x00,0x00, +0xa0,0x01,0x00,0x00,0x3e,0x00,0x03,0x00,0xa5,0x01,0x00,0x00, +0xa4,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xa8,0x01,0x00,0x00,0x7c,0x01,0x00,0x00,0xa7,0x01,0x00,0x00, +0x41,0x00,0x08,0x00,0x8b,0x01,0x00,0x00,0xaa,0x01,0x00,0x00, +0x89,0x01,0x00,0x00,0x34,0x00,0x00,0x00,0x73,0x01,0x00,0x00, +0xd0,0x00,0x00,0x00,0x3e,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0xc4,0x00,0x00,0x00,0xab,0x01,0x00,0x00,0xaa,0x01,0x00,0x00, +0x73,0x00,0x04,0x00,0x01,0x01,0x00,0x00,0xac,0x01,0x00,0x00, +0xab,0x01,0x00,0x00,0x41,0x00,0x05,0x00,0x51,0x01,0x00,0x00, +0xad,0x01,0x00,0x00,0x81,0x01,0x00,0x00,0xa8,0x01,0x00,0x00, +0x3e,0x00,0x03,0x00,0xad,0x01,0x00,0x00,0xac,0x01,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xb0,0x01,0x00,0x00, +0x7c,0x01,0x00,0x00,0xaf,0x01,0x00,0x00,0x41,0x00,0x08,0x00, +0x8b,0x01,0x00,0x00,0xb2,0x01,0x00,0x00,0x89,0x01,0x00,0x00, +0x34,0x00,0x00,0x00,0x73,0x01,0x00,0x00,0xd0,0x00,0x00,0x00, +0x39,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0xc4,0x00,0x00,0x00, +0xb3,0x01,0x00,0x00,0xb2,0x01,0x00,0x00,0x73,0x00,0x04,0x00, +0x01,0x01,0x00,0x00,0xb4,0x01,0x00,0x00,0xb3,0x01,0x00,0x00, +0x41,0x00,0x05,0x00,0x51,0x01,0x00,0x00,0xb5,0x01,0x00,0x00, +0x81,0x01,0x00,0x00,0xb0,0x01,0x00,0x00,0x3e,0x00,0x03,0x00, +0xb5,0x01,0x00,0x00,0xb4,0x01,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xb8,0x01,0x00,0x00,0x7c,0x01,0x00,0x00, +0xb7,0x01,0x00,0x00,0x41,0x00,0x08,0x00,0x8b,0x01,0x00,0x00, +0xba,0x01,0x00,0x00,0x89,0x01,0x00,0x00,0x34,0x00,0x00,0x00, +0x73,0x01,0x00,0x00,0xd0,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, +0x3d,0x00,0x04,0x00,0xc4,0x00,0x00,0x00,0xbb,0x01,0x00,0x00, +0xba,0x01,0x00,0x00,0x73,0x00,0x04,0x00,0x01,0x01,0x00,0x00, +0xbc,0x01,0x00,0x00,0xbb,0x01,0x00,0x00,0x41,0x00,0x05,0x00, +0x51,0x01,0x00,0x00,0xbd,0x01,0x00,0x00,0x81,0x01,0x00,0x00, +0xb8,0x01,0x00,0x00,0x3e,0x00,0x03,0x00,0xbd,0x01,0x00,0x00, +0xbc,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xc0,0x01,0x00,0x00,0x7c,0x01,0x00,0x00,0xbf,0x01,0x00,0x00, +0x41,0x00,0x08,0x00,0x8b,0x01,0x00,0x00,0xc2,0x01,0x00,0x00, +0x89,0x01,0x00,0x00,0x34,0x00,0x00,0x00,0x73,0x01,0x00,0x00, +0xd0,0x00,0x00,0x00,0x9f,0x01,0x00,0x00,0x3d,0x00,0x04,0x00, +0xc4,0x00,0x00,0x00,0xc3,0x01,0x00,0x00,0xc2,0x01,0x00,0x00, +0x73,0x00,0x04,0x00,0x01,0x01,0x00,0x00,0xc4,0x01,0x00,0x00, +0xc3,0x01,0x00,0x00,0x41,0x00,0x05,0x00,0x51,0x01,0x00,0x00, +0xc5,0x01,0x00,0x00,0x81,0x01,0x00,0x00,0xc0,0x01,0x00,0x00, +0x3e,0x00,0x03,0x00,0xc5,0x01,0x00,0x00,0xc4,0x01,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xca,0x01,0x00,0x00, +0x1a,0x03,0x00,0x00,0xc8,0x01,0x00,0x00,0xf9,0x00,0x02,0x00, +0x61,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0x63,0x01,0x00,0x00, +0xe0,0x00,0x04,0x00,0x0c,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, +0xcb,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xce,0x01,0x00,0x00,0x1d,0x03,0x00,0x00,0xcc,0x01,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xd1,0x01,0x00,0x00, +0x21,0x03,0x00,0x00,0xcf,0x01,0x00,0x00,0xf9,0x00,0x02,0x00, +0xd3,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0xd3,0x01,0x00,0x00, +0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0x23,0x03,0x00,0x00, +0x3e,0x00,0x00,0x00,0x63,0x01,0x00,0x00,0x7d,0x02,0x00,0x00, +0xd6,0x01,0x00,0x00,0xb0,0x00,0x05,0x00,0xc2,0x00,0x00,0x00, +0xd9,0x01,0x00,0x00,0x23,0x03,0x00,0x00,0x6c,0x00,0x00,0x00, +0xf6,0x00,0x04,0x00,0xd5,0x01,0x00,0x00,0xd6,0x01,0x00,0x00, +0x00,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0xd9,0x01,0x00,0x00, +0xd4,0x01,0x00,0x00,0xd5,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, +0xd4,0x01,0x00,0x00,0xf9,0x00,0x02,0x00,0xdb,0x01,0x00,0x00, +0xf8,0x00,0x02,0x00,0xdb,0x01,0x00,0x00,0xf5,0x00,0x07,0x00, +0x06,0x00,0x00,0x00,0x27,0x03,0x00,0x00,0x3e,0x00,0x00,0x00, +0xd4,0x01,0x00,0x00,0x07,0x02,0x00,0x00,0xde,0x01,0x00,0x00, +0xb0,0x00,0x05,0x00,0xc2,0x00,0x00,0x00,0xe1,0x01,0x00,0x00, +0x27,0x03,0x00,0x00,0x60,0x00,0x00,0x00,0xf6,0x00,0x04,0x00, +0xdd,0x01,0x00,0x00,0xde,0x01,0x00,0x00,0x01,0x00,0x00,0x00, +0xfa,0x00,0x04,0x00,0xe1,0x01,0x00,0x00,0xdc,0x01,0x00,0x00, +0xdd,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0xdc,0x01,0x00,0x00, +0xf9,0x00,0x02,0x00,0xe3,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, +0xe3,0x01,0x00,0x00,0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00, +0x39,0x03,0x00,0x00,0x3e,0x00,0x00,0x00,0xdc,0x01,0x00,0x00, +0x05,0x02,0x00,0x00,0xe4,0x01,0x00,0x00,0xb0,0x00,0x05,0x00, +0xc2,0x00,0x00,0x00,0xe9,0x01,0x00,0x00,0x39,0x03,0x00,0x00, +0x62,0x00,0x00,0x00,0xf6,0x00,0x04,0x00,0xe5,0x01,0x00,0x00, +0xe4,0x01,0x00,0x00,0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, +0xe9,0x01,0x00,0x00,0xe4,0x01,0x00,0x00,0xe5,0x01,0x00,0x00, +0xf8,0x00,0x02,0x00,0xe4,0x01,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xef,0x01,0x00,0x00,0x27,0x03,0x00,0x00, +0x62,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xf1,0x01,0x00,0x00,0xef,0x01,0x00,0x00,0x39,0x03,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xf3,0x01,0x00,0x00, +0x55,0x00,0x00,0x00,0x53,0x00,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xf5,0x01,0x00,0x00,0x27,0x03,0x00,0x00, +0x61,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xf6,0x01,0x00,0x00,0xf3,0x01,0x00,0x00,0xf5,0x01,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xf8,0x01,0x00,0x00, +0x64,0x00,0x00,0x00,0x62,0x00,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xf9,0x01,0x00,0x00,0xf6,0x01,0x00,0x00, +0xf8,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xfb,0x01,0x00,0x00,0xf9,0x01,0x00,0x00,0x39,0x03,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xfd,0x01,0x00,0x00, +0xfb,0x01,0x00,0x00,0xfc,0x01,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xff,0x01,0x00,0x00,0xfd,0x01,0x00,0x00, +0x23,0x03,0x00,0x00,0x41,0x00,0x05,0x00,0x51,0x01,0x00,0x00, +0x00,0x02,0x00,0x00,0x4c,0x01,0x00,0x00,0xff,0x01,0x00,0x00, +0x3d,0x00,0x04,0x00,0x01,0x01,0x00,0x00,0x01,0x02,0x00,0x00, +0x00,0x02,0x00,0x00,0x41,0x00,0x05,0x00,0x02,0x02,0x00,0x00, +0x03,0x02,0x00,0x00,0xed,0x01,0x00,0x00,0xf1,0x01,0x00,0x00, +0x3e,0x00,0x03,0x00,0x03,0x02,0x00,0x00,0x01,0x02,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x05,0x02,0x00,0x00, +0x39,0x03,0x00,0x00,0xd0,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, +0xe3,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0xe5,0x01,0x00,0x00, +0xf9,0x00,0x02,0x00,0xde,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, +0xde,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x07,0x02,0x00,0x00,0x27,0x03,0x00,0x00,0xd0,0x00,0x00,0x00, +0xf9,0x00,0x02,0x00,0xdb,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, +0xdd,0x01,0x00,0x00,0xf9,0x00,0x02,0x00,0x09,0x02,0x00,0x00, +0xf8,0x00,0x02,0x00,0x09,0x02,0x00,0x00,0xf5,0x00,0x07,0x00, +0x06,0x00,0x00,0x00,0x28,0x03,0x00,0x00,0x3e,0x00,0x00,0x00, +0xdd,0x01,0x00,0x00,0x35,0x02,0x00,0x00,0x0c,0x02,0x00,0x00, +0xb0,0x00,0x05,0x00,0xc2,0x00,0x00,0x00,0x0f,0x02,0x00,0x00, +0x28,0x03,0x00,0x00,0xbf,0x00,0x00,0x00,0xf6,0x00,0x04,0x00, +0x0b,0x02,0x00,0x00,0x0c,0x02,0x00,0x00,0x01,0x00,0x00,0x00, +0xfa,0x00,0x04,0x00,0x0f,0x02,0x00,0x00,0x0a,0x02,0x00,0x00, +0x0b,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x0a,0x02,0x00,0x00, +0xf9,0x00,0x02,0x00,0x11,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, +0x11,0x02,0x00,0x00,0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00, +0x36,0x03,0x00,0x00,0x3e,0x00,0x00,0x00,0x0a,0x02,0x00,0x00, +0x33,0x02,0x00,0x00,0x12,0x02,0x00,0x00,0xb0,0x00,0x05,0x00, +0xc2,0x00,0x00,0x00,0x17,0x02,0x00,0x00,0x36,0x03,0x00,0x00, +0xbc,0x00,0x00,0x00,0xf6,0x00,0x04,0x00,0x13,0x02,0x00,0x00, +0x12,0x02,0x00,0x00,0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, +0x17,0x02,0x00,0x00,0x12,0x02,0x00,0x00,0x13,0x02,0x00,0x00, +0xf8,0x00,0x02,0x00,0x12,0x02,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x1d,0x02,0x00,0x00,0x28,0x03,0x00,0x00, +0xbc,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x1f,0x02,0x00,0x00,0x1d,0x02,0x00,0x00,0x36,0x03,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x21,0x02,0x00,0x00, +0x59,0x00,0x00,0x00,0xb9,0x00,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x24,0x02,0x00,0x00,0x28,0x03,0x00,0x00, +0x23,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x25,0x02,0x00,0x00,0x21,0x02,0x00,0x00,0x24,0x02,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x27,0x02,0x00,0x00, +0x68,0x00,0x00,0x00,0xbc,0x00,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x28,0x02,0x00,0x00,0x25,0x02,0x00,0x00, +0x27,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x2a,0x02,0x00,0x00,0x28,0x02,0x00,0x00,0x36,0x03,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x2c,0x02,0x00,0x00, +0x2a,0x02,0x00,0x00,0x2b,0x02,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x2e,0x02,0x00,0x00,0x2c,0x02,0x00,0x00, +0x23,0x03,0x00,0x00,0x41,0x00,0x05,0x00,0x51,0x01,0x00,0x00, +0x2f,0x02,0x00,0x00,0x81,0x01,0x00,0x00,0x2e,0x02,0x00,0x00, +0x3d,0x00,0x04,0x00,0x01,0x01,0x00,0x00,0x30,0x02,0x00,0x00, +0x2f,0x02,0x00,0x00,0x41,0x00,0x05,0x00,0x02,0x02,0x00,0x00, +0x31,0x02,0x00,0x00,0x1b,0x02,0x00,0x00,0x1f,0x02,0x00,0x00, +0x3e,0x00,0x03,0x00,0x31,0x02,0x00,0x00,0x30,0x02,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x33,0x02,0x00,0x00, +0x36,0x03,0x00,0x00,0xd0,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, +0x11,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x13,0x02,0x00,0x00, +0xf9,0x00,0x02,0x00,0x0c,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, +0x0c,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x35,0x02,0x00,0x00,0x28,0x03,0x00,0x00,0xd0,0x00,0x00,0x00, +0xf9,0x00,0x02,0x00,0x09,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, +0x0b,0x02,0x00,0x00,0xf9,0x00,0x02,0x00,0x37,0x02,0x00,0x00, +0xf8,0x00,0x02,0x00,0x37,0x02,0x00,0x00,0xf5,0x00,0x07,0x00, +0x06,0x00,0x00,0x00,0x29,0x03,0x00,0x00,0x3e,0x00,0x00,0x00, +0x0b,0x02,0x00,0x00,0x7b,0x02,0x00,0x00,0x3a,0x02,0x00,0x00, +0xb0,0x00,0x05,0x00,0xc2,0x00,0x00,0x00,0x3d,0x02,0x00,0x00, +0x29,0x03,0x00,0x00,0xbf,0x00,0x00,0x00,0xf6,0x00,0x04,0x00, +0x39,0x02,0x00,0x00,0x3a,0x02,0x00,0x00,0x01,0x00,0x00,0x00, +0xfa,0x00,0x04,0x00,0x3d,0x02,0x00,0x00,0x38,0x02,0x00,0x00, +0x39,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x38,0x02,0x00,0x00, +0xf9,0x00,0x02,0x00,0x3f,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, +0x3f,0x02,0x00,0x00,0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00, +0x2d,0x03,0x00,0x00,0x3e,0x00,0x00,0x00,0x38,0x02,0x00,0x00, +0x79,0x02,0x00,0x00,0x42,0x02,0x00,0x00,0xb0,0x00,0x05,0x00, +0xc2,0x00,0x00,0x00,0x45,0x02,0x00,0x00,0x2d,0x03,0x00,0x00, +0x60,0x00,0x00,0x00,0xf6,0x00,0x04,0x00,0x41,0x02,0x00,0x00, +0x42,0x02,0x00,0x00,0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, +0x45,0x02,0x00,0x00,0x40,0x02,0x00,0x00,0x41,0x02,0x00,0x00, +0xf8,0x00,0x02,0x00,0x40,0x02,0x00,0x00,0xf9,0x00,0x02,0x00, +0x47,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x47,0x02,0x00,0x00, +0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0x2f,0x03,0x00,0x00, +0x3e,0x00,0x00,0x00,0x40,0x02,0x00,0x00,0x77,0x02,0x00,0x00, +0x4a,0x02,0x00,0x00,0xb0,0x00,0x05,0x00,0xc2,0x00,0x00,0x00, +0x4d,0x02,0x00,0x00,0x2f,0x03,0x00,0x00,0xbc,0x00,0x00,0x00, +0xf6,0x00,0x04,0x00,0x49,0x02,0x00,0x00,0x4a,0x02,0x00,0x00, +0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0x4d,0x02,0x00,0x00, +0x48,0x02,0x00,0x00,0x49,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, +0x48,0x02,0x00,0x00,0xf9,0x00,0x02,0x00,0x4f,0x02,0x00,0x00, +0xf8,0x00,0x02,0x00,0x4f,0x02,0x00,0x00,0xf5,0x00,0x07,0x00, +0x06,0x00,0x00,0x00,0x31,0x03,0x00,0x00,0x3e,0x00,0x00,0x00, +0x48,0x02,0x00,0x00,0x75,0x02,0x00,0x00,0x50,0x02,0x00,0x00, +0xb0,0x00,0x05,0x00,0xc2,0x00,0x00,0x00,0x55,0x02,0x00,0x00, +0x31,0x03,0x00,0x00,0x62,0x00,0x00,0x00,0xf6,0x00,0x04,0x00, +0x51,0x02,0x00,0x00,0x50,0x02,0x00,0x00,0x01,0x00,0x00,0x00, +0xfa,0x00,0x04,0x00,0x55,0x02,0x00,0x00,0x50,0x02,0x00,0x00, +0x51,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x50,0x02,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x57,0x02,0x00,0x00, +0x29,0x03,0x00,0x00,0xbc,0x00,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x59,0x02,0x00,0x00,0x57,0x02,0x00,0x00, +0x2f,0x03,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x5b,0x02,0x00,0x00,0x59,0x02,0x00,0x00,0x5a,0x02,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x5d,0x02,0x00,0x00, +0x2d,0x03,0x00,0x00,0x62,0x00,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x5e,0x02,0x00,0x00,0x5b,0x02,0x00,0x00, +0x5d,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x60,0x02,0x00,0x00,0x5e,0x02,0x00,0x00,0x31,0x03,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x64,0x02,0x00,0x00, +0x5d,0x02,0x00,0x00,0x31,0x03,0x00,0x00,0x41,0x00,0x05,0x00, +0x02,0x02,0x00,0x00,0x65,0x02,0x00,0x00,0xed,0x01,0x00,0x00, +0x64,0x02,0x00,0x00,0x3d,0x00,0x04,0x00,0x01,0x01,0x00,0x00, +0x66,0x02,0x00,0x00,0x65,0x02,0x00,0x00,0x73,0x00,0x04,0x00, +0xc4,0x00,0x00,0x00,0x67,0x02,0x00,0x00,0x66,0x02,0x00,0x00, +0x41,0x00,0x05,0x00,0x02,0x02,0x00,0x00,0x6c,0x02,0x00,0x00, +0x1b,0x02,0x00,0x00,0x59,0x02,0x00,0x00,0x3d,0x00,0x04,0x00, +0x01,0x01,0x00,0x00,0x6d,0x02,0x00,0x00,0x6c,0x02,0x00,0x00, +0x73,0x00,0x04,0x00,0xc4,0x00,0x00,0x00,0x6e,0x02,0x00,0x00, +0x6d,0x02,0x00,0x00,0x41,0x00,0x05,0x00,0xcd,0x00,0x00,0x00, +0x70,0x02,0x00,0x00,0xca,0x00,0x00,0x00,0x60,0x02,0x00,0x00, +0x3d,0x00,0x04,0x00,0xc4,0x00,0x00,0x00,0x71,0x02,0x00,0x00, +0x70,0x02,0x00,0x00,0x0c,0x00,0x08,0x00,0xc4,0x00,0x00,0x00, +0x72,0x02,0x00,0x00,0x01,0x00,0x00,0x00,0x32,0x00,0x00,0x00, +0x67,0x02,0x00,0x00,0x6e,0x02,0x00,0x00,0x71,0x02,0x00,0x00, +0x3e,0x00,0x03,0x00,0x70,0x02,0x00,0x00,0x72,0x02,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x75,0x02,0x00,0x00, +0x31,0x03,0x00,0x00,0xd0,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, +0x4f,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x51,0x02,0x00,0x00, +0xf9,0x00,0x02,0x00,0x4a,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, +0x4a,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x77,0x02,0x00,0x00,0x2f,0x03,0x00,0x00,0xd0,0x00,0x00,0x00, +0xf9,0x00,0x02,0x00,0x47,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, +0x49,0x02,0x00,0x00,0xf9,0x00,0x02,0x00,0x42,0x02,0x00,0x00, +0xf8,0x00,0x02,0x00,0x42,0x02,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x79,0x02,0x00,0x00,0x2d,0x03,0x00,0x00, +0xd0,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0x3f,0x02,0x00,0x00, +0xf8,0x00,0x02,0x00,0x41,0x02,0x00,0x00,0xf9,0x00,0x02,0x00, +0x3a,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x3a,0x02,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x7b,0x02,0x00,0x00, +0x29,0x03,0x00,0x00,0xd0,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, +0x37,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x39,0x02,0x00,0x00, +0xf9,0x00,0x02,0x00,0xd6,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, +0xd6,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x7d,0x02,0x00,0x00,0x23,0x03,0x00,0x00,0xd0,0x00,0x00,0x00, +0xf9,0x00,0x02,0x00,0xd3,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, +0xd5,0x01,0x00,0x00,0xe0,0x00,0x04,0x00,0x0c,0x00,0x00,0x00, +0x0c,0x00,0x00,0x00,0xcb,0x01,0x00,0x00,0xf9,0x00,0x02,0x00, +0xd7,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0xd7,0x00,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x7f,0x02,0x00,0x00, +0x09,0x03,0x00,0x00,0x6c,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, +0xd4,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0xd6,0x00,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x84,0x02,0x00,0x00, +0x55,0x00,0x00,0x00,0x53,0x00,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x85,0x02,0x00,0x00,0x96,0x00,0x00,0x00, +0x84,0x02,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x8a,0x02,0x00,0x00,0x59,0x00,0x00,0x00,0xb9,0x00,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x8b,0x02,0x00,0x00, +0xa8,0x00,0x00,0x00,0x8a,0x02,0x00,0x00,0x41,0x00,0x05,0x00, +0x15,0x00,0x00,0x00,0x90,0x02,0x00,0x00,0x12,0x00,0x00,0x00, +0x8f,0x02,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x91,0x02,0x00,0x00,0x90,0x02,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x92,0x02,0x00,0x00,0x0f,0x00,0x00,0x00, +0x91,0x02,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x96,0x02,0x00,0x00,0x47,0x00,0x00,0x00,0x91,0x02,0x00,0x00, +0x41,0x00,0x05,0x00,0x0d,0x00,0x00,0x00,0x98,0x02,0x00,0x00, +0x97,0x02,0x00,0x00,0x0c,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x99,0x02,0x00,0x00,0x98,0x02,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x9a,0x02,0x00,0x00, +0x96,0x02,0x00,0x00,0x99,0x02,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x9b,0x02,0x00,0x00,0x92,0x02,0x00,0x00, +0x9a,0x02,0x00,0x00,0xf9,0x00,0x02,0x00,0x9d,0x02,0x00,0x00, +0xf8,0x00,0x02,0x00,0x9d,0x02,0x00,0x00,0xf5,0x00,0x07,0x00, +0x06,0x00,0x00,0x00,0x0a,0x03,0x00,0x00,0x3e,0x00,0x00,0x00, +0xd6,0x00,0x00,0x00,0x03,0x03,0x00,0x00,0xa0,0x02,0x00,0x00, +0xb0,0x00,0x05,0x00,0xc2,0x00,0x00,0x00,0xa3,0x02,0x00,0x00, +0x0a,0x03,0x00,0x00,0xbf,0x00,0x00,0x00,0xf6,0x00,0x04,0x00, +0x9f,0x02,0x00,0x00,0xa0,0x02,0x00,0x00,0x01,0x00,0x00,0x00, +0xfa,0x00,0x04,0x00,0xa3,0x02,0x00,0x00,0x9e,0x02,0x00,0x00, +0x9f,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x9e,0x02,0x00,0x00, +0xf9,0x00,0x02,0x00,0xa5,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, +0xa5,0x02,0x00,0x00,0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00, +0x0b,0x03,0x00,0x00,0x3e,0x00,0x00,0x00,0x9e,0x02,0x00,0x00, +0x01,0x03,0x00,0x00,0xa8,0x02,0x00,0x00,0xb0,0x00,0x05,0x00, +0xc2,0x00,0x00,0x00,0xab,0x02,0x00,0x00,0x0b,0x03,0x00,0x00, +0x60,0x00,0x00,0x00,0xf6,0x00,0x04,0x00,0xa7,0x02,0x00,0x00, +0xa8,0x02,0x00,0x00,0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, +0xab,0x02,0x00,0x00,0xa6,0x02,0x00,0x00,0xa7,0x02,0x00,0x00, +0xf8,0x00,0x02,0x00,0xa6,0x02,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xaf,0x02,0x00,0x00,0x0b,0x03,0x00,0x00, +0x61,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xb0,0x02,0x00,0x00,0x85,0x02,0x00,0x00,0xaf,0x02,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xb2,0x02,0x00,0x00, +0x64,0x00,0x00,0x00,0x62,0x00,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xb3,0x02,0x00,0x00,0xb0,0x02,0x00,0x00, +0xb2,0x02,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xb7,0x02,0x00,0x00,0x0a,0x03,0x00,0x00,0x23,0x02,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xb8,0x02,0x00,0x00, +0x8b,0x02,0x00,0x00,0xb7,0x02,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xba,0x02,0x00,0x00,0x68,0x00,0x00,0x00, +0xbc,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xbb,0x02,0x00,0x00,0xb8,0x02,0x00,0x00,0xba,0x02,0x00,0x00, +0xf9,0x00,0x02,0x00,0xbd,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, +0xbd,0x02,0x00,0x00,0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00, +0x0d,0x03,0x00,0x00,0x3e,0x00,0x00,0x00,0xa6,0x02,0x00,0x00, +0xff,0x02,0x00,0x00,0xc0,0x02,0x00,0x00,0xb0,0x00,0x05,0x00, +0xc2,0x00,0x00,0x00,0xc3,0x02,0x00,0x00,0x0d,0x03,0x00,0x00, +0xbc,0x00,0x00,0x00,0xf6,0x00,0x04,0x00,0xbf,0x02,0x00,0x00, +0xc0,0x02,0x00,0x00,0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, +0xc3,0x02,0x00,0x00,0xbe,0x02,0x00,0x00,0xbf,0x02,0x00,0x00, +0xf8,0x00,0x02,0x00,0xbe,0x02,0x00,0x00,0xf9,0x00,0x02,0x00, +0xc5,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0xc5,0x02,0x00,0x00, +0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0x0f,0x03,0x00,0x00, +0x3e,0x00,0x00,0x00,0xbe,0x02,0x00,0x00,0xfd,0x02,0x00,0x00, +0xc8,0x02,0x00,0x00,0xb0,0x00,0x05,0x00,0xc2,0x00,0x00,0x00, +0xcb,0x02,0x00,0x00,0x0f,0x03,0x00,0x00,0x62,0x00,0x00,0x00, +0xf6,0x00,0x04,0x00,0xc7,0x02,0x00,0x00,0xc8,0x02,0x00,0x00, +0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0xcb,0x02,0x00,0x00, +0xc6,0x02,0x00,0x00,0xc7,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, +0xc6,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xce,0x02,0x00,0x00,0xb3,0x02,0x00,0x00,0x0f,0x03,0x00,0x00, +0xb0,0x00,0x05,0x00,0xc2,0x00,0x00,0x00,0xd1,0x02,0x00,0x00, +0xce,0x02,0x00,0x00,0x36,0x00,0x00,0x00,0xf7,0x00,0x03,0x00, +0xd3,0x02,0x00,0x00,0x00,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, +0xd1,0x02,0x00,0x00,0xd2,0x02,0x00,0x00,0xd3,0x02,0x00,0x00, +0xf8,0x00,0x02,0x00,0xd2,0x02,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xd6,0x02,0x00,0x00,0xbb,0x02,0x00,0x00, +0x0d,0x03,0x00,0x00,0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00, +0xd7,0x02,0x00,0x00,0x12,0x00,0x00,0x00,0xd0,0x00,0x00,0x00, +0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0xd8,0x02,0x00,0x00, +0xd7,0x02,0x00,0x00,0xb0,0x00,0x05,0x00,0xc2,0x00,0x00,0x00, +0xd9,0x02,0x00,0x00,0xd6,0x02,0x00,0x00,0xd8,0x02,0x00,0x00, +0xf9,0x00,0x02,0x00,0xd3,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, +0xd3,0x02,0x00,0x00,0xf5,0x00,0x07,0x00,0xc2,0x00,0x00,0x00, +0xda,0x02,0x00,0x00,0xd1,0x02,0x00,0x00,0xc6,0x02,0x00,0x00, +0xd9,0x02,0x00,0x00,0xd2,0x02,0x00,0x00,0xf7,0x00,0x03,0x00, +0xdc,0x02,0x00,0x00,0x00,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, +0xda,0x02,0x00,0x00,0xdb,0x02,0x00,0x00,0xdc,0x02,0x00,0x00, +0xf8,0x00,0x02,0x00,0xdb,0x02,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xe4,0x02,0x00,0x00,0xbb,0x02,0x00,0x00, +0x0d,0x03,0x00,0x00,0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00, +0xe6,0x02,0x00,0x00,0x12,0x00,0x00,0x00,0xe5,0x02,0x00,0x00, +0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0xe7,0x02,0x00,0x00, +0xe6,0x02,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xe8,0x02,0x00,0x00,0xe4,0x02,0x00,0x00,0xe7,0x02,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xe9,0x02,0x00,0x00, +0x9b,0x02,0x00,0x00,0xe8,0x02,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xeb,0x02,0x00,0x00,0xe9,0x02,0x00,0x00, +0xb3,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xed,0x02,0x00,0x00,0xeb,0x02,0x00,0x00,0x0f,0x03,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xef,0x02,0x00,0x00, +0x0a,0x03,0x00,0x00,0xbc,0x00,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xf1,0x02,0x00,0x00,0xef,0x02,0x00,0x00, +0x0d,0x03,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xf3,0x02,0x00,0x00,0xf1,0x02,0x00,0x00,0xf2,0x02,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xf5,0x02,0x00,0x00, +0x0b,0x03,0x00,0x00,0x62,0x00,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xf6,0x02,0x00,0x00,0xf3,0x02,0x00,0x00, +0xf5,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xf8,0x02,0x00,0x00,0xf6,0x02,0x00,0x00,0x0f,0x03,0x00,0x00, +0x41,0x00,0x05,0x00,0xcd,0x00,0x00,0x00,0xf9,0x02,0x00,0x00, +0xca,0x00,0x00,0x00,0xf8,0x02,0x00,0x00,0x3d,0x00,0x04,0x00, +0xc4,0x00,0x00,0x00,0xfa,0x02,0x00,0x00,0xf9,0x02,0x00,0x00, +0x41,0x00,0x06,0x00,0x8b,0x01,0x00,0x00,0xfb,0x02,0x00,0x00, +0xe0,0x02,0x00,0x00,0x34,0x00,0x00,0x00,0xed,0x02,0x00,0x00, +0x3e,0x00,0x03,0x00,0xfb,0x02,0x00,0x00,0xfa,0x02,0x00,0x00, +0xf9,0x00,0x02,0x00,0xdc,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, +0xdc,0x02,0x00,0x00,0xf9,0x00,0x02,0x00,0xc8,0x02,0x00,0x00, +0xf8,0x00,0x02,0x00,0xc8,0x02,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xfd,0x02,0x00,0x00,0x0f,0x03,0x00,0x00, +0xd0,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0xc5,0x02,0x00,0x00, +0xf8,0x00,0x02,0x00,0xc7,0x02,0x00,0x00,0xf9,0x00,0x02,0x00, +0xc0,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0xc0,0x02,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xff,0x02,0x00,0x00, +0x0d,0x03,0x00,0x00,0xd0,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, +0xbd,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0xbf,0x02,0x00,0x00, +0xf9,0x00,0x02,0x00,0xa8,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, +0xa8,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x01,0x03,0x00,0x00,0x0b,0x03,0x00,0x00,0xd0,0x00,0x00,0x00, +0xf9,0x00,0x02,0x00,0xa5,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, +0xa7,0x02,0x00,0x00,0xf9,0x00,0x02,0x00,0xa0,0x02,0x00,0x00, +0xf8,0x00,0x02,0x00,0xa0,0x02,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x03,0x03,0x00,0x00,0x0a,0x03,0x00,0x00, +0xd0,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0x9d,0x02,0x00,0x00, +0xf8,0x00,0x02,0x00,0x9f,0x02,0x00,0x00,0xfd,0x00,0x01,0x00, +0x38,0x00,0x01,0x00, +}; +const uint64_t matmul_q5_1_f32_aligned_len = 11548; + +unsigned char matmul_q5_1_f32_aligned_fp32_data[] = { +0x03,0x02,0x23,0x07,0x00,0x05,0x01,0x00,0x0b,0x00,0x0d,0x00, +0x10,0x03,0x00,0x00,0x00,0x00,0x00,0x00,0x11,0x00,0x02,0x00, +0x01,0x00,0x00,0x00,0x11,0x00,0x02,0x00,0x51,0x11,0x00,0x00, +0x11,0x00,0x02,0x00,0x60,0x11,0x00,0x00,0x0b,0x00,0x06,0x00, +0x01,0x00,0x00,0x00,0x47,0x4c,0x53,0x4c,0x2e,0x73,0x74,0x64, +0x2e,0x34,0x35,0x30,0x00,0x00,0x00,0x00,0x0e,0x00,0x03,0x00, +0x00,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x0f,0x00,0x0f,0x00, +0x05,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x6d,0x61,0x69,0x6e, +0x00,0x00,0x00,0x00,0x0b,0x00,0x00,0x00,0x12,0x00,0x00,0x00, +0x3d,0x00,0x00,0x00,0x4c,0x00,0x00,0x00,0x08,0x01,0x00,0x00, +0x4c,0x01,0x00,0x00,0x7f,0x01,0x00,0x00,0x86,0x01,0x00,0x00, +0x6d,0x02,0x00,0x00,0xb6,0x02,0x00,0x00,0x10,0x00,0x06,0x00, +0x04,0x00,0x00,0x00,0x11,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x47,0x00,0x04,0x00, +0x0b,0x00,0x00,0x00,0x0b,0x00,0x00,0x00,0x1c,0x00,0x00,0x00, +0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x48,0x00,0x05,0x00, +0x10,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x23,0x00,0x00,0x00, +0x04,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00, +0x02,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x08,0x00,0x00,0x00, +0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00,0x03,0x00,0x00,0x00, +0x23,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x48,0x00,0x05,0x00, +0x10,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x23,0x00,0x00,0x00, +0x10,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00, +0x05,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x14,0x00,0x00,0x00, +0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00,0x06,0x00,0x00,0x00, +0x23,0x00,0x00,0x00,0x18,0x00,0x00,0x00,0x48,0x00,0x05,0x00, +0x10,0x00,0x00,0x00,0x07,0x00,0x00,0x00,0x23,0x00,0x00,0x00, +0x1c,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00, +0x08,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x20,0x00,0x00,0x00, +0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00,0x09,0x00,0x00,0x00, +0x23,0x00,0x00,0x00,0x24,0x00,0x00,0x00,0x48,0x00,0x05,0x00, +0x10,0x00,0x00,0x00,0x0a,0x00,0x00,0x00,0x23,0x00,0x00,0x00, +0x28,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00, +0x0b,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x2c,0x00,0x00,0x00, +0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, +0x23,0x00,0x00,0x00,0x30,0x00,0x00,0x00,0x48,0x00,0x05,0x00, +0x10,0x00,0x00,0x00,0x0d,0x00,0x00,0x00,0x23,0x00,0x00,0x00, +0x34,0x00,0x00,0x00,0x47,0x00,0x03,0x00,0x10,0x00,0x00,0x00, +0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x37,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x47,0x00,0x04,0x00, +0x3d,0x00,0x00,0x00,0x0b,0x00,0x00,0x00,0x1a,0x00,0x00,0x00, +0x47,0x00,0x04,0x00,0x4c,0x00,0x00,0x00,0x0b,0x00,0x00,0x00, +0x1b,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x4f,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0x09,0x00,0x00,0x00,0x47,0x00,0x04,0x00, +0x53,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x04,0x00,0x00,0x00, +0x47,0x00,0x04,0x00,0x60,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0x06,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x62,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0x07,0x00,0x00,0x00,0x47,0x00,0x04,0x00, +0x6c,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x03,0x00,0x00,0x00, +0x47,0x00,0x04,0x00,0xa7,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0xb9,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0x05,0x00,0x00,0x00,0x47,0x00,0x04,0x00, +0xbc,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x08,0x00,0x00,0x00, +0x47,0x00,0x04,0x00,0x03,0x01,0x00,0x00,0x06,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x04,0x01,0x00,0x00, +0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0x48,0x00,0x05,0x00,0x04,0x01,0x00,0x00,0x01,0x00,0x00,0x00, +0x23,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x48,0x00,0x05,0x00, +0x04,0x01,0x00,0x00,0x02,0x00,0x00,0x00,0x23,0x00,0x00,0x00, +0x04,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x04,0x01,0x00,0x00, +0x03,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x08,0x00,0x00,0x00, +0x47,0x00,0x04,0x00,0x05,0x01,0x00,0x00,0x06,0x00,0x00,0x00, +0x18,0x00,0x00,0x00,0x48,0x00,0x04,0x00,0x06,0x01,0x00,0x00, +0x00,0x00,0x00,0x00,0x18,0x00,0x00,0x00,0x48,0x00,0x05,0x00, +0x06,0x01,0x00,0x00,0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00, +0x00,0x00,0x00,0x00,0x47,0x00,0x03,0x00,0x06,0x01,0x00,0x00, +0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x08,0x01,0x00,0x00, +0x22,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00, +0x08,0x01,0x00,0x00,0x21,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0x47,0x00,0x04,0x00,0x57,0x01,0x00,0x00,0x01,0x00,0x00,0x00, +0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x58,0x01,0x00,0x00, +0x0b,0x00,0x00,0x00,0x19,0x00,0x00,0x00,0x47,0x00,0x04,0x00, +0x83,0x01,0x00,0x00,0x06,0x00,0x00,0x00,0x10,0x00,0x00,0x00, +0x48,0x00,0x04,0x00,0x84,0x01,0x00,0x00,0x00,0x00,0x00,0x00, +0x18,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x84,0x01,0x00,0x00, +0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0x47,0x00,0x03,0x00,0x84,0x01,0x00,0x00,0x02,0x00,0x00,0x00, +0x47,0x00,0x04,0x00,0x86,0x01,0x00,0x00,0x22,0x00,0x00,0x00, +0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x86,0x01,0x00,0x00, +0x21,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x47,0x00,0x04,0x00, +0x6d,0x02,0x00,0x00,0x0b,0x00,0x00,0x00,0x18,0x00,0x00,0x00, +0x47,0x00,0x04,0x00,0xb3,0x02,0x00,0x00,0x06,0x00,0x00,0x00, +0x04,0x00,0x00,0x00,0x48,0x00,0x04,0x00,0xb4,0x02,0x00,0x00, +0x00,0x00,0x00,0x00,0x19,0x00,0x00,0x00,0x48,0x00,0x05,0x00, +0xb4,0x02,0x00,0x00,0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00, +0x00,0x00,0x00,0x00,0x47,0x00,0x03,0x00,0xb4,0x02,0x00,0x00, +0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0xb6,0x02,0x00,0x00, +0x22,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00, +0xb6,0x02,0x00,0x00,0x21,0x00,0x00,0x00,0x02,0x00,0x00,0x00, +0x13,0x00,0x02,0x00,0x02,0x00,0x00,0x00,0x21,0x00,0x03,0x00, +0x03,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x15,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x20,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0x17,0x00,0x04,0x00,0x09,0x00,0x00,0x00,0x06,0x00,0x00,0x00, +0x03,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x0a,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0x09,0x00,0x00,0x00,0x3b,0x00,0x04,0x00, +0x0a,0x00,0x00,0x00,0x0b,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, +0x02,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x0d,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x1e,0x00,0x10,0x00, +0x10,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00, +0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00, +0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00, +0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00, +0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00, +0x20,0x00,0x04,0x00,0x11,0x00,0x00,0x00,0x09,0x00,0x00,0x00, +0x10,0x00,0x00,0x00,0x3b,0x00,0x04,0x00,0x11,0x00,0x00,0x00, +0x12,0x00,0x00,0x00,0x09,0x00,0x00,0x00,0x15,0x00,0x04,0x00, +0x13,0x00,0x00,0x00,0x20,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00,0x14,0x00,0x00,0x00, +0x08,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x15,0x00,0x00,0x00, +0x09,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x13,0x00,0x00,0x00,0x21,0x00,0x00,0x00,0x0a,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00,0x27,0x00,0x00,0x00, +0x09,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00, +0x2d,0x00,0x00,0x00,0x07,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x13,0x00,0x00,0x00,0x34,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0x32,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x37,0x00,0x00,0x00, +0x40,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x39,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x3b,0x00,0x04,0x00, +0x0a,0x00,0x00,0x00,0x3d,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x3e,0x00,0x00,0x00, +0x00,0x00,0x00,0x00,0x3b,0x00,0x04,0x00,0x0a,0x00,0x00,0x00, +0x4c,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x32,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x4f,0x00,0x00,0x00,0x20,0x00,0x00,0x00, +0x32,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x53,0x00,0x00,0x00, +0x20,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, +0x54,0x00,0x00,0x00,0x86,0x00,0x00,0x00,0x37,0x00,0x00,0x00, +0x53,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, +0x58,0x00,0x00,0x00,0x86,0x00,0x00,0x00,0x37,0x00,0x00,0x00, +0x53,0x00,0x00,0x00,0x32,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x60,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x34,0x00,0x06,0x00, +0x06,0x00,0x00,0x00,0x61,0x00,0x00,0x00,0x86,0x00,0x00,0x00, +0x53,0x00,0x00,0x00,0x60,0x00,0x00,0x00,0x32,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x62,0x00,0x00,0x00,0x04,0x00,0x00,0x00, +0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x63,0x00,0x00,0x00, +0x86,0x00,0x00,0x00,0x61,0x00,0x00,0x00,0x62,0x00,0x00,0x00, +0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x67,0x00,0x00,0x00, +0x86,0x00,0x00,0x00,0x61,0x00,0x00,0x00,0x62,0x00,0x00,0x00, +0x32,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x6c,0x00,0x00,0x00, +0x10,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, +0x6d,0x00,0x00,0x00,0x86,0x00,0x00,0x00,0x6c,0x00,0x00,0x00, +0x0c,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, +0x72,0x00,0x00,0x00,0x86,0x00,0x00,0x00,0x6c,0x00,0x00,0x00, +0x0c,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x77,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x34,0x00,0x06,0x00, +0x06,0x00,0x00,0x00,0x78,0x00,0x00,0x00,0x86,0x00,0x00,0x00, +0x6c,0x00,0x00,0x00,0x77,0x00,0x00,0x00,0x34,0x00,0x06,0x00, +0x06,0x00,0x00,0x00,0x7d,0x00,0x00,0x00,0x86,0x00,0x00,0x00, +0x6c,0x00,0x00,0x00,0x77,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x13,0x00,0x00,0x00,0x81,0x00,0x00,0x00,0x06,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00,0x86,0x00,0x00,0x00, +0x02,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00, +0x91,0x00,0x00,0x00,0x0b,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x13,0x00,0x00,0x00,0x97,0x00,0x00,0x00,0x03,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00,0xa2,0x00,0x00,0x00, +0x0c,0x00,0x00,0x00,0x32,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0xa7,0x00,0x00,0x00,0x40,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x13,0x00,0x00,0x00,0xa9,0x00,0x00,0x00,0x04,0x00,0x00,0x00, +0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0xb8,0x00,0x00,0x00, +0x84,0x00,0x00,0x00,0x60,0x00,0x00,0x00,0x62,0x00,0x00,0x00, +0x32,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0xb9,0x00,0x00,0x00, +0x20,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, +0xba,0x00,0x00,0x00,0x84,0x00,0x00,0x00,0x53,0x00,0x00,0x00, +0xb9,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, +0xbb,0x00,0x00,0x00,0x84,0x00,0x00,0x00,0x4f,0x00,0x00,0x00, +0x62,0x00,0x00,0x00,0x32,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0xbc,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x34,0x00,0x06,0x00, +0x06,0x00,0x00,0x00,0xbd,0x00,0x00,0x00,0x84,0x00,0x00,0x00, +0xbb,0x00,0x00,0x00,0xbc,0x00,0x00,0x00,0x34,0x00,0x06,0x00, +0x06,0x00,0x00,0x00,0xbe,0x00,0x00,0x00,0x84,0x00,0x00,0x00, +0xbd,0x00,0x00,0x00,0x60,0x00,0x00,0x00,0x34,0x00,0x06,0x00, +0x06,0x00,0x00,0x00,0xbf,0x00,0x00,0x00,0x86,0x00,0x00,0x00, +0xba,0x00,0x00,0x00,0xbe,0x00,0x00,0x00,0x34,0x00,0x06,0x00, +0x06,0x00,0x00,0x00,0xc0,0x00,0x00,0x00,0x84,0x00,0x00,0x00, +0xb8,0x00,0x00,0x00,0xbf,0x00,0x00,0x00,0x34,0x00,0x06,0x00, +0x06,0x00,0x00,0x00,0xc1,0x00,0x00,0x00,0x84,0x00,0x00,0x00, +0xc0,0x00,0x00,0x00,0xbc,0x00,0x00,0x00,0x14,0x00,0x02,0x00, +0xc2,0x00,0x00,0x00,0x16,0x00,0x03,0x00,0xc4,0x00,0x00,0x00, +0x20,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, +0xc5,0x00,0x00,0x00,0x84,0x00,0x00,0x00,0x60,0x00,0x00,0x00, +0x62,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, +0xc6,0x00,0x00,0x00,0x84,0x00,0x00,0x00,0xc5,0x00,0x00,0x00, +0xbf,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, +0xc7,0x00,0x00,0x00,0x84,0x00,0x00,0x00,0xc6,0x00,0x00,0x00, +0xbc,0x00,0x00,0x00,0x1c,0x00,0x04,0x00,0xc8,0x00,0x00,0x00, +0xc4,0x00,0x00,0x00,0xc7,0x00,0x00,0x00,0x20,0x00,0x04,0x00, +0xc9,0x00,0x00,0x00,0x07,0x00,0x00,0x00,0xc8,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0xc4,0x00,0x00,0x00,0xcc,0x00,0x00,0x00, +0x00,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0xcd,0x00,0x00,0x00, +0x07,0x00,0x00,0x00,0xc4,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x13,0x00,0x00,0x00,0xd0,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0xf4,0x00,0x00,0x00, +0x80,0x00,0x00,0x00,0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0xfa,0x00,0x00,0x00, +0x10,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0xfe,0x00,0x00,0x00,0x0f,0x00,0x00,0x00,0x16,0x00,0x03,0x00, +0x01,0x01,0x00,0x00,0x10,0x00,0x00,0x00,0x15,0x00,0x04,0x00, +0x02,0x01,0x00,0x00,0x08,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0x1c,0x00,0x04,0x00,0x03,0x01,0x00,0x00,0x02,0x01,0x00,0x00, +0xfa,0x00,0x00,0x00,0x1e,0x00,0x06,0x00,0x04,0x01,0x00,0x00, +0x01,0x01,0x00,0x00,0x01,0x01,0x00,0x00,0x06,0x00,0x00,0x00, +0x03,0x01,0x00,0x00,0x1d,0x00,0x03,0x00,0x05,0x01,0x00,0x00, +0x04,0x01,0x00,0x00,0x1e,0x00,0x03,0x00,0x06,0x01,0x00,0x00, +0x05,0x01,0x00,0x00,0x20,0x00,0x04,0x00,0x07,0x01,0x00,0x00, +0x0c,0x00,0x00,0x00,0x06,0x01,0x00,0x00,0x3b,0x00,0x04,0x00, +0x07,0x01,0x00,0x00,0x08,0x01,0x00,0x00,0x0c,0x00,0x00,0x00, +0x20,0x00,0x04,0x00,0x0a,0x01,0x00,0x00,0x0c,0x00,0x00,0x00, +0x01,0x01,0x00,0x00,0x20,0x00,0x04,0x00,0x15,0x01,0x00,0x00, +0x0c,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x23,0x01,0x00,0x00,0x0c,0x00,0x00,0x00, +0x20,0x00,0x04,0x00,0x2c,0x01,0x00,0x00,0x0c,0x00,0x00,0x00, +0x02,0x01,0x00,0x00,0x17,0x00,0x04,0x00,0x30,0x01,0x00,0x00, +0xc4,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x34,0x00,0x06,0x00, +0x06,0x00,0x00,0x00,0x48,0x01,0x00,0x00,0x80,0x00,0x00,0x00, +0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x34,0x00,0x06,0x00, +0x06,0x00,0x00,0x00,0x49,0x01,0x00,0x00,0x84,0x00,0x00,0x00, +0x37,0x00,0x00,0x00,0x48,0x01,0x00,0x00,0x1c,0x00,0x04,0x00, +0x4a,0x01,0x00,0x00,0xc4,0x00,0x00,0x00,0x49,0x01,0x00,0x00, +0x20,0x00,0x04,0x00,0x4b,0x01,0x00,0x00,0x04,0x00,0x00,0x00, +0x4a,0x01,0x00,0x00,0x3b,0x00,0x04,0x00,0x4b,0x01,0x00,0x00, +0x4c,0x01,0x00,0x00,0x04,0x00,0x00,0x00,0x20,0x00,0x04,0x00, +0x50,0x01,0x00,0x00,0x04,0x00,0x00,0x00,0xc4,0x00,0x00,0x00, +0x32,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x57,0x01,0x00,0x00, +0x01,0x00,0x00,0x00,0x33,0x00,0x06,0x00,0x09,0x00,0x00,0x00, +0x58,0x01,0x00,0x00,0x57,0x01,0x00,0x00,0x39,0x00,0x00,0x00, +0x39,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, +0x59,0x01,0x00,0x00,0x51,0x00,0x00,0x00,0x58,0x01,0x00,0x00, +0x00,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, +0x5a,0x01,0x00,0x00,0x84,0x00,0x00,0x00,0x59,0x01,0x00,0x00, +0x0c,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, +0x5b,0x01,0x00,0x00,0x86,0x00,0x00,0x00,0x5a,0x01,0x00,0x00, +0x6c,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, +0x76,0x01,0x00,0x00,0x80,0x00,0x00,0x00,0x6c,0x00,0x00,0x00, +0x39,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, +0x7b,0x01,0x00,0x00,0x80,0x00,0x00,0x00,0x6c,0x00,0x00,0x00, +0x39,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, +0x7c,0x01,0x00,0x00,0x84,0x00,0x00,0x00,0xa7,0x00,0x00,0x00, +0x7b,0x01,0x00,0x00,0x1c,0x00,0x04,0x00,0x7d,0x01,0x00,0x00, +0xc4,0x00,0x00,0x00,0x7c,0x01,0x00,0x00,0x20,0x00,0x04,0x00, +0x7e,0x01,0x00,0x00,0x04,0x00,0x00,0x00,0x7d,0x01,0x00,0x00, +0x3b,0x00,0x04,0x00,0x7e,0x01,0x00,0x00,0x7f,0x01,0x00,0x00, +0x04,0x00,0x00,0x00,0x17,0x00,0x04,0x00,0x82,0x01,0x00,0x00, +0xc4,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x1d,0x00,0x03,0x00, +0x83,0x01,0x00,0x00,0x82,0x01,0x00,0x00,0x1e,0x00,0x03,0x00, +0x84,0x01,0x00,0x00,0x83,0x01,0x00,0x00,0x20,0x00,0x04,0x00, +0x85,0x01,0x00,0x00,0x0c,0x00,0x00,0x00,0x84,0x01,0x00,0x00, +0x3b,0x00,0x04,0x00,0x85,0x01,0x00,0x00,0x86,0x01,0x00,0x00, +0x0c,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x88,0x01,0x00,0x00, +0x0c,0x00,0x00,0x00,0xc4,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x99,0x01,0x00,0x00,0x03,0x00,0x00,0x00, +0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x9f,0x01,0x00,0x00, +0x51,0x00,0x00,0x00,0x58,0x01,0x00,0x00,0x00,0x00,0x00,0x00, +0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0xa0,0x01,0x00,0x00, +0x84,0x00,0x00,0x00,0x9f,0x01,0x00,0x00,0x77,0x00,0x00,0x00, +0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0xa1,0x01,0x00,0x00, +0x86,0x00,0x00,0x00,0xa0,0x01,0x00,0x00,0x6c,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0xa4,0x01,0x00,0x00, +0x08,0x01,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, +0xa5,0x01,0x00,0x00,0x86,0x00,0x00,0x00,0x6c,0x00,0x00,0x00, +0x0c,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, +0xa8,0x01,0x00,0x00,0x86,0x00,0x00,0x00,0x6c,0x00,0x00,0x00, +0x77,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, +0xc3,0x01,0x00,0x00,0x84,0x00,0x00,0x00,0x60,0x00,0x00,0x00, +0x62,0x00,0x00,0x00,0x1c,0x00,0x04,0x00,0xc4,0x01,0x00,0x00, +0xc4,0x00,0x00,0x00,0xc3,0x01,0x00,0x00,0x20,0x00,0x04,0x00, +0xc5,0x01,0x00,0x00,0x07,0x00,0x00,0x00,0xc4,0x01,0x00,0x00, +0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0xd5,0x01,0x00,0x00, +0x80,0x00,0x00,0x00,0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00, +0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0xf0,0x01,0x00,0x00, +0x84,0x00,0x00,0x00,0xbf,0x00,0x00,0x00,0xbc,0x00,0x00,0x00, +0x1c,0x00,0x04,0x00,0xf1,0x01,0x00,0x00,0xc4,0x00,0x00,0x00, +0xf0,0x01,0x00,0x00,0x20,0x00,0x04,0x00,0xf2,0x01,0x00,0x00, +0x07,0x00,0x00,0x00,0xf1,0x01,0x00,0x00,0x34,0x00,0x06,0x00, +0x06,0x00,0x00,0x00,0xfb,0x01,0x00,0x00,0x86,0x00,0x00,0x00, +0xb9,0x00,0x00,0x00,0xbf,0x00,0x00,0x00,0x34,0x00,0x06,0x00, +0x06,0x00,0x00,0x00,0x03,0x02,0x00,0x00,0x80,0x00,0x00,0x00, +0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x34,0x00,0x06,0x00, +0x06,0x00,0x00,0x00,0x32,0x02,0x00,0x00,0x84,0x00,0x00,0x00, +0x60,0x00,0x00,0x00,0x62,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x13,0x00,0x00,0x00,0x65,0x02,0x00,0x00,0x0d,0x00,0x00,0x00, +0x3b,0x00,0x04,0x00,0x0a,0x00,0x00,0x00,0x6d,0x02,0x00,0x00, +0x01,0x00,0x00,0x00,0x1d,0x00,0x03,0x00,0xb3,0x02,0x00,0x00, +0xc4,0x00,0x00,0x00,0x1e,0x00,0x03,0x00,0xb4,0x02,0x00,0x00, +0xb3,0x02,0x00,0x00,0x20,0x00,0x04,0x00,0xb5,0x02,0x00,0x00, +0x0c,0x00,0x00,0x00,0xb4,0x02,0x00,0x00,0x3b,0x00,0x04,0x00, +0xb5,0x02,0x00,0x00,0xb6,0x02,0x00,0x00,0x0c,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00,0xbb,0x02,0x00,0x00, +0x05,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, +0xc8,0x02,0x00,0x00,0x84,0x00,0x00,0x00,0x60,0x00,0x00,0x00, +0x62,0x00,0x00,0x00,0x36,0x00,0x05,0x00,0x02,0x00,0x00,0x00, +0x04,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x03,0x00,0x00,0x00, +0xf8,0x00,0x02,0x00,0x05,0x00,0x00,0x00,0x3b,0x00,0x04,0x00, +0xc9,0x00,0x00,0x00,0xca,0x00,0x00,0x00,0x07,0x00,0x00,0x00, +0x3b,0x00,0x04,0x00,0xc5,0x01,0x00,0x00,0xc6,0x01,0x00,0x00, +0x07,0x00,0x00,0x00,0x3b,0x00,0x04,0x00,0xf2,0x01,0x00,0x00, +0xf3,0x01,0x00,0x00,0x07,0x00,0x00,0x00,0x41,0x00,0x05,0x00, +0x0d,0x00,0x00,0x00,0x0e,0x00,0x00,0x00,0x0b,0x00,0x00,0x00, +0x0c,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x0f,0x00,0x00,0x00,0x0e,0x00,0x00,0x00,0x41,0x00,0x05,0x00, +0x15,0x00,0x00,0x00,0x16,0x00,0x00,0x00,0x12,0x00,0x00,0x00, +0x14,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x17,0x00,0x00,0x00,0x16,0x00,0x00,0x00,0x86,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x18,0x00,0x00,0x00,0x0f,0x00,0x00,0x00, +0x17,0x00,0x00,0x00,0x89,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x1e,0x00,0x00,0x00,0x0f,0x00,0x00,0x00,0x17,0x00,0x00,0x00, +0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00,0x22,0x00,0x00,0x00, +0x12,0x00,0x00,0x00,0x21,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x22,0x00,0x00,0x00, +0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x24,0x00,0x00,0x00, +0x18,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x41,0x00,0x05,0x00, +0x15,0x00,0x00,0x00,0x28,0x00,0x00,0x00,0x12,0x00,0x00,0x00, +0x27,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x29,0x00,0x00,0x00,0x28,0x00,0x00,0x00,0x86,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x2a,0x00,0x00,0x00,0x1e,0x00,0x00,0x00, +0x29,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00, +0x2e,0x00,0x00,0x00,0x12,0x00,0x00,0x00,0x2d,0x00,0x00,0x00, +0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x2f,0x00,0x00,0x00, +0x2e,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x30,0x00,0x00,0x00,0x24,0x00,0x00,0x00,0x2f,0x00,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x32,0x00,0x00,0x00, +0x30,0x00,0x00,0x00,0x2a,0x00,0x00,0x00,0x41,0x00,0x05,0x00, +0x15,0x00,0x00,0x00,0x35,0x00,0x00,0x00,0x12,0x00,0x00,0x00, +0x34,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x36,0x00,0x00,0x00,0x35,0x00,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x38,0x00,0x00,0x00,0x36,0x00,0x00,0x00, +0x37,0x00,0x00,0x00,0x82,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x3a,0x00,0x00,0x00,0x38,0x00,0x00,0x00,0x39,0x00,0x00,0x00, +0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x3b,0x00,0x00,0x00, +0x3a,0x00,0x00,0x00,0x37,0x00,0x00,0x00,0x41,0x00,0x05,0x00, +0x0d,0x00,0x00,0x00,0x3f,0x00,0x00,0x00,0x3d,0x00,0x00,0x00, +0x3e,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x40,0x00,0x00,0x00,0x3f,0x00,0x00,0x00,0x89,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x42,0x00,0x00,0x00,0x40,0x00,0x00,0x00, +0x3b,0x00,0x00,0x00,0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x47,0x00,0x00,0x00,0x40,0x00,0x00,0x00,0x3b,0x00,0x00,0x00, +0x41,0x00,0x05,0x00,0x0d,0x00,0x00,0x00,0x49,0x00,0x00,0x00, +0x3d,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x4a,0x00,0x00,0x00,0x49,0x00,0x00,0x00, +0x41,0x00,0x05,0x00,0x0d,0x00,0x00,0x00,0x4d,0x00,0x00,0x00, +0x4c,0x00,0x00,0x00,0x3e,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x4e,0x00,0x00,0x00,0x4d,0x00,0x00,0x00, +0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x50,0x00,0x00,0x00, +0x4e,0x00,0x00,0x00,0x4f,0x00,0x00,0x00,0x89,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x55,0x00,0x00,0x00,0x50,0x00,0x00,0x00, +0x54,0x00,0x00,0x00,0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x59,0x00,0x00,0x00,0x50,0x00,0x00,0x00,0x58,0x00,0x00,0x00, +0x89,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x5d,0x00,0x00,0x00, +0x4e,0x00,0x00,0x00,0x4f,0x00,0x00,0x00,0x89,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x64,0x00,0x00,0x00,0x5d,0x00,0x00,0x00, +0x63,0x00,0x00,0x00,0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x68,0x00,0x00,0x00,0x5d,0x00,0x00,0x00,0x67,0x00,0x00,0x00, +0x89,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x6e,0x00,0x00,0x00, +0x4e,0x00,0x00,0x00,0x6d,0x00,0x00,0x00,0x86,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x73,0x00,0x00,0x00,0x4e,0x00,0x00,0x00, +0x72,0x00,0x00,0x00,0x89,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x79,0x00,0x00,0x00,0x4e,0x00,0x00,0x00,0x78,0x00,0x00,0x00, +0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x7e,0x00,0x00,0x00, +0x4e,0x00,0x00,0x00,0x7d,0x00,0x00,0x00,0x41,0x00,0x05,0x00, +0x15,0x00,0x00,0x00,0x82,0x00,0x00,0x00,0x12,0x00,0x00,0x00, +0x81,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x83,0x00,0x00,0x00,0x82,0x00,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x84,0x00,0x00,0x00,0x47,0x00,0x00,0x00, +0x83,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00, +0x87,0x00,0x00,0x00,0x12,0x00,0x00,0x00,0x86,0x00,0x00,0x00, +0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x88,0x00,0x00,0x00, +0x87,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x8a,0x00,0x00,0x00,0x47,0x00,0x00,0x00,0x39,0x00,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x8d,0x00,0x00,0x00, +0x8a,0x00,0x00,0x00,0x83,0x00,0x00,0x00,0x0c,0x00,0x07,0x00, +0x06,0x00,0x00,0x00,0x8e,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0x26,0x00,0x00,0x00,0x88,0x00,0x00,0x00,0x8d,0x00,0x00,0x00, +0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00,0x92,0x00,0x00,0x00, +0x12,0x00,0x00,0x00,0x91,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x93,0x00,0x00,0x00,0x92,0x00,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x94,0x00,0x00,0x00, +0x32,0x00,0x00,0x00,0x93,0x00,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x96,0x00,0x00,0x00,0x42,0x00,0x00,0x00, +0x37,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00, +0x98,0x00,0x00,0x00,0x12,0x00,0x00,0x00,0x97,0x00,0x00,0x00, +0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x99,0x00,0x00,0x00, +0x98,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x9a,0x00,0x00,0x00,0x96,0x00,0x00,0x00,0x99,0x00,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x9b,0x00,0x00,0x00, +0x94,0x00,0x00,0x00,0x9a,0x00,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x9d,0x00,0x00,0x00,0x9b,0x00,0x00,0x00, +0x84,0x00,0x00,0x00,0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x9e,0x00,0x00,0x00,0x9d,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, +0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00,0xa3,0x00,0x00,0x00, +0x12,0x00,0x00,0x00,0xa2,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0xa4,0x00,0x00,0x00,0xa3,0x00,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xa5,0x00,0x00,0x00, +0x0f,0x00,0x00,0x00,0xa4,0x00,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xa8,0x00,0x00,0x00,0x4a,0x00,0x00,0x00, +0xa7,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00, +0xaa,0x00,0x00,0x00,0x12,0x00,0x00,0x00,0xa9,0x00,0x00,0x00, +0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0xab,0x00,0x00,0x00, +0xaa,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xac,0x00,0x00,0x00,0xa8,0x00,0x00,0x00,0xab,0x00,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xad,0x00,0x00,0x00, +0xa5,0x00,0x00,0x00,0xac,0x00,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xaf,0x00,0x00,0x00,0xad,0x00,0x00,0x00, +0x84,0x00,0x00,0x00,0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xb0,0x00,0x00,0x00,0xaf,0x00,0x00,0x00,0x77,0x00,0x00,0x00, +0xf9,0x00,0x02,0x00,0xb2,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, +0xb2,0x00,0x00,0x00,0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00, +0xde,0x02,0x00,0x00,0x3e,0x00,0x00,0x00,0x05,0x00,0x00,0x00, +0xd1,0x00,0x00,0x00,0xb3,0x00,0x00,0x00,0xb0,0x00,0x05,0x00, +0xc2,0x00,0x00,0x00,0xc3,0x00,0x00,0x00,0xde,0x02,0x00,0x00, +0xc1,0x00,0x00,0x00,0xf6,0x00,0x04,0x00,0xb4,0x00,0x00,0x00, +0xb3,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, +0xc3,0x00,0x00,0x00,0xb3,0x00,0x00,0x00,0xb4,0x00,0x00,0x00, +0xf8,0x00,0x02,0x00,0xb3,0x00,0x00,0x00,0x41,0x00,0x05,0x00, +0xcd,0x00,0x00,0x00,0xce,0x00,0x00,0x00,0xca,0x00,0x00,0x00, +0xde,0x02,0x00,0x00,0x3e,0x00,0x03,0x00,0xce,0x00,0x00,0x00, +0xcc,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xd1,0x00,0x00,0x00,0xde,0x02,0x00,0x00,0xd0,0x00,0x00,0x00, +0xf9,0x00,0x02,0x00,0xb2,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, +0xb4,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0xd4,0x00,0x00,0x00, +0xf8,0x00,0x02,0x00,0xd4,0x00,0x00,0x00,0xf5,0x00,0x07,0x00, +0x06,0x00,0x00,0x00,0xf7,0x02,0x00,0x00,0xb0,0x00,0x00,0x00, +0xb4,0x00,0x00,0x00,0xaa,0x01,0x00,0x00,0xd7,0x00,0x00,0x00, +0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0xf3,0x02,0x00,0x00, +0x9e,0x00,0x00,0x00,0xb4,0x00,0x00,0x00,0xa7,0x01,0x00,0x00, +0xd7,0x00,0x00,0x00,0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00, +0xdf,0x02,0x00,0x00,0x84,0x00,0x00,0x00,0xb4,0x00,0x00,0x00, +0x55,0x02,0x00,0x00,0xd7,0x00,0x00,0x00,0xb0,0x00,0x05,0x00, +0xc2,0x00,0x00,0x00,0xdb,0x00,0x00,0x00,0xdf,0x02,0x00,0x00, +0x8e,0x00,0x00,0x00,0xf6,0x00,0x04,0x00,0xd6,0x00,0x00,0x00, +0xd7,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, +0xdb,0x00,0x00,0x00,0xd5,0x00,0x00,0x00,0xd6,0x00,0x00,0x00, +0xf8,0x00,0x02,0x00,0xd5,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, +0xdd,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0xdd,0x00,0x00,0x00, +0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0xef,0x02,0x00,0x00, +0x3e,0x00,0x00,0x00,0xd5,0x00,0x00,0x00,0x5d,0x01,0x00,0x00, +0xde,0x00,0x00,0x00,0xb0,0x00,0x05,0x00,0xc2,0x00,0x00,0x00, +0xe3,0x00,0x00,0x00,0xef,0x02,0x00,0x00,0x37,0x00,0x00,0x00, +0xf6,0x00,0x04,0x00,0xdf,0x00,0x00,0x00,0xde,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0xe3,0x00,0x00,0x00, +0xde,0x00,0x00,0x00,0xdf,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, +0xde,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xe8,0x00,0x00,0x00,0x73,0x00,0x00,0x00,0xef,0x02,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xeb,0x00,0x00,0x00, +0xe8,0x00,0x00,0x00,0x99,0x00,0x00,0x00,0x86,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xec,0x00,0x00,0x00,0xeb,0x00,0x00,0x00, +0x0c,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xed,0x00,0x00,0x00,0xf3,0x02,0x00,0x00,0xec,0x00,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xef,0x00,0x00,0x00, +0xed,0x00,0x00,0x00,0x6e,0x00,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xf5,0x00,0x00,0x00,0xe8,0x00,0x00,0x00, +0xf4,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xf7,0x00,0x00,0x00,0xf5,0x00,0x00,0x00,0x6e,0x00,0x00,0x00, +0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xfb,0x00,0x00,0x00, +0xef,0x00,0x00,0x00,0xfa,0x00,0x00,0x00,0xc7,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xff,0x00,0x00,0x00,0xef,0x00,0x00,0x00, +0xfe,0x00,0x00,0x00,0x41,0x00,0x07,0x00,0x0a,0x01,0x00,0x00, +0x0b,0x01,0x00,0x00,0x08,0x01,0x00,0x00,0x34,0x00,0x00,0x00, +0xfb,0x00,0x00,0x00,0x34,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0x01,0x01,0x00,0x00,0x0c,0x01,0x00,0x00,0x0b,0x01,0x00,0x00, +0x73,0x00,0x04,0x00,0xc4,0x00,0x00,0x00,0x0d,0x01,0x00,0x00, +0x0c,0x01,0x00,0x00,0x41,0x00,0x07,0x00,0x0a,0x01,0x00,0x00, +0x10,0x01,0x00,0x00,0x08,0x01,0x00,0x00,0x34,0x00,0x00,0x00, +0xfb,0x00,0x00,0x00,0xd0,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0x01,0x01,0x00,0x00,0x11,0x01,0x00,0x00,0x10,0x01,0x00,0x00, +0x73,0x00,0x04,0x00,0xc4,0x00,0x00,0x00,0x12,0x01,0x00,0x00, +0x11,0x01,0x00,0x00,0x41,0x00,0x07,0x00,0x15,0x01,0x00,0x00, +0x16,0x01,0x00,0x00,0x08,0x01,0x00,0x00,0x34,0x00,0x00,0x00, +0xfb,0x00,0x00,0x00,0x86,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x17,0x01,0x00,0x00,0x16,0x01,0x00,0x00, +0xc2,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x1d,0x01,0x00,0x00, +0x17,0x01,0x00,0x00,0xff,0x00,0x00,0x00,0xc4,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x1e,0x01,0x00,0x00,0x1d,0x01,0x00,0x00, +0xa9,0x00,0x00,0x00,0xc7,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x1f,0x01,0x00,0x00,0x1e,0x01,0x00,0x00,0xfa,0x00,0x00,0x00, +0x7c,0x00,0x04,0x00,0x13,0x00,0x00,0x00,0x20,0x01,0x00,0x00, +0x1f,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x24,0x01,0x00,0x00,0xff,0x00,0x00,0x00,0x23,0x01,0x00,0x00, +0xc2,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x25,0x01,0x00,0x00, +0x17,0x01,0x00,0x00,0x24,0x01,0x00,0x00,0xc7,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x26,0x01,0x00,0x00,0x25,0x01,0x00,0x00, +0xfa,0x00,0x00,0x00,0x7c,0x00,0x04,0x00,0x13,0x00,0x00,0x00, +0x27,0x01,0x00,0x00,0x26,0x01,0x00,0x00,0x41,0x00,0x08,0x00, +0x2c,0x01,0x00,0x00,0x2d,0x01,0x00,0x00,0x08,0x01,0x00,0x00, +0x34,0x00,0x00,0x00,0xfb,0x00,0x00,0x00,0x97,0x00,0x00,0x00, +0xff,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x02,0x01,0x00,0x00, +0x2e,0x01,0x00,0x00,0x2d,0x01,0x00,0x00,0x71,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x2f,0x01,0x00,0x00,0x2e,0x01,0x00,0x00, +0xc7,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x34,0x01,0x00,0x00, +0x2f,0x01,0x00,0x00,0xfe,0x00,0x00,0x00,0x7c,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x38,0x01,0x00,0x00,0x20,0x01,0x00,0x00, +0xc5,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x39,0x01,0x00,0x00, +0x34,0x01,0x00,0x00,0x38,0x01,0x00,0x00,0x70,0x00,0x04,0x00, +0xc4,0x00,0x00,0x00,0x3a,0x01,0x00,0x00,0x39,0x01,0x00,0x00, +0xc2,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x3c,0x01,0x00,0x00, +0x2f,0x01,0x00,0x00,0xa9,0x00,0x00,0x00,0x7c,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x3f,0x01,0x00,0x00,0x27,0x01,0x00,0x00, +0xc5,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x40,0x01,0x00,0x00, +0x3c,0x01,0x00,0x00,0x3f,0x01,0x00,0x00,0x70,0x00,0x04,0x00, +0xc4,0x00,0x00,0x00,0x41,0x01,0x00,0x00,0x40,0x01,0x00,0x00, +0x50,0x00,0x05,0x00,0x30,0x01,0x00,0x00,0x42,0x01,0x00,0x00, +0x3a,0x01,0x00,0x00,0x41,0x01,0x00,0x00,0x8e,0x00,0x05,0x00, +0x30,0x01,0x00,0x00,0x44,0x01,0x00,0x00,0x42,0x01,0x00,0x00, +0x0d,0x01,0x00,0x00,0x50,0x00,0x05,0x00,0x30,0x01,0x00,0x00, +0x46,0x01,0x00,0x00,0x12,0x01,0x00,0x00,0x12,0x01,0x00,0x00, +0x81,0x00,0x05,0x00,0x30,0x01,0x00,0x00,0x47,0x01,0x00,0x00, +0x44,0x01,0x00,0x00,0x46,0x01,0x00,0x00,0x51,0x00,0x05,0x00, +0xc4,0x00,0x00,0x00,0x4f,0x01,0x00,0x00,0x47,0x01,0x00,0x00, +0x00,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x50,0x01,0x00,0x00, +0x51,0x01,0x00,0x00,0x4c,0x01,0x00,0x00,0xf7,0x00,0x00,0x00, +0x3e,0x00,0x03,0x00,0x51,0x01,0x00,0x00,0x4f,0x01,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x53,0x01,0x00,0x00, +0xf7,0x00,0x00,0x00,0xfa,0x00,0x00,0x00,0x51,0x00,0x05,0x00, +0xc4,0x00,0x00,0x00,0x55,0x01,0x00,0x00,0x47,0x01,0x00,0x00, +0x01,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x50,0x01,0x00,0x00, +0x56,0x01,0x00,0x00,0x4c,0x01,0x00,0x00,0x53,0x01,0x00,0x00, +0x3e,0x00,0x03,0x00,0x56,0x01,0x00,0x00,0x55,0x01,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x5d,0x01,0x00,0x00, +0xef,0x02,0x00,0x00,0x5b,0x01,0x00,0x00,0xf9,0x00,0x02,0x00, +0xdd,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0xdf,0x00,0x00,0x00, +0xf9,0x00,0x02,0x00,0x5f,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, +0x5f,0x01,0x00,0x00,0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00, +0xf0,0x02,0x00,0x00,0x3e,0x00,0x00,0x00,0xdf,0x00,0x00,0x00, +0xa3,0x01,0x00,0x00,0x60,0x01,0x00,0x00,0xb0,0x00,0x05,0x00, +0xc2,0x00,0x00,0x00,0x65,0x01,0x00,0x00,0xf0,0x02,0x00,0x00, +0xa7,0x00,0x00,0x00,0xf6,0x00,0x04,0x00,0x61,0x01,0x00,0x00, +0x60,0x01,0x00,0x00,0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, +0x65,0x01,0x00,0x00,0x60,0x01,0x00,0x00,0x61,0x01,0x00,0x00, +0xf8,0x00,0x02,0x00,0x60,0x01,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x6a,0x01,0x00,0x00,0x7e,0x00,0x00,0x00, +0xf0,0x02,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x6d,0x01,0x00,0x00,0x6a,0x01,0x00,0x00,0xab,0x00,0x00,0x00, +0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x6e,0x01,0x00,0x00, +0x6d,0x01,0x00,0x00,0x77,0x00,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x6f,0x01,0x00,0x00,0xf7,0x02,0x00,0x00, +0x6e,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x71,0x01,0x00,0x00,0x6f,0x01,0x00,0x00,0x79,0x00,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x77,0x01,0x00,0x00, +0x6a,0x01,0x00,0x00,0x76,0x01,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x79,0x01,0x00,0x00,0x79,0x00,0x00,0x00, +0x77,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x7a,0x01,0x00,0x00,0x77,0x01,0x00,0x00,0x79,0x01,0x00,0x00, +0x41,0x00,0x07,0x00,0x88,0x01,0x00,0x00,0x89,0x01,0x00,0x00, +0x86,0x01,0x00,0x00,0x34,0x00,0x00,0x00,0x71,0x01,0x00,0x00, +0x3e,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0xc4,0x00,0x00,0x00, +0x8a,0x01,0x00,0x00,0x89,0x01,0x00,0x00,0x41,0x00,0x05,0x00, +0x50,0x01,0x00,0x00,0x8b,0x01,0x00,0x00,0x7f,0x01,0x00,0x00, +0x7a,0x01,0x00,0x00,0x3e,0x00,0x03,0x00,0x8b,0x01,0x00,0x00, +0x8a,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x8d,0x01,0x00,0x00,0x7a,0x01,0x00,0x00,0x39,0x00,0x00,0x00, +0x41,0x00,0x07,0x00,0x88,0x01,0x00,0x00,0x8f,0x01,0x00,0x00, +0x86,0x01,0x00,0x00,0x34,0x00,0x00,0x00,0x71,0x01,0x00,0x00, +0x39,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0xc4,0x00,0x00,0x00, +0x90,0x01,0x00,0x00,0x8f,0x01,0x00,0x00,0x41,0x00,0x05,0x00, +0x50,0x01,0x00,0x00,0x91,0x01,0x00,0x00,0x7f,0x01,0x00,0x00, +0x8d,0x01,0x00,0x00,0x3e,0x00,0x03,0x00,0x91,0x01,0x00,0x00, +0x90,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x93,0x01,0x00,0x00,0x7a,0x01,0x00,0x00,0x0c,0x00,0x00,0x00, +0x41,0x00,0x07,0x00,0x88,0x01,0x00,0x00,0x95,0x01,0x00,0x00, +0x86,0x01,0x00,0x00,0x34,0x00,0x00,0x00,0x71,0x01,0x00,0x00, +0x0c,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0xc4,0x00,0x00,0x00, +0x96,0x01,0x00,0x00,0x95,0x01,0x00,0x00,0x41,0x00,0x05,0x00, +0x50,0x01,0x00,0x00,0x97,0x01,0x00,0x00,0x7f,0x01,0x00,0x00, +0x93,0x01,0x00,0x00,0x3e,0x00,0x03,0x00,0x97,0x01,0x00,0x00, +0x96,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x9a,0x01,0x00,0x00,0x7a,0x01,0x00,0x00,0x99,0x01,0x00,0x00, +0x41,0x00,0x07,0x00,0x88,0x01,0x00,0x00,0x9c,0x01,0x00,0x00, +0x86,0x01,0x00,0x00,0x34,0x00,0x00,0x00,0x71,0x01,0x00,0x00, +0x99,0x01,0x00,0x00,0x3d,0x00,0x04,0x00,0xc4,0x00,0x00,0x00, +0x9d,0x01,0x00,0x00,0x9c,0x01,0x00,0x00,0x41,0x00,0x05,0x00, +0x50,0x01,0x00,0x00,0x9e,0x01,0x00,0x00,0x7f,0x01,0x00,0x00, +0x9a,0x01,0x00,0x00,0x3e,0x00,0x03,0x00,0x9e,0x01,0x00,0x00, +0x9d,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xa3,0x01,0x00,0x00,0xf0,0x02,0x00,0x00,0xa1,0x01,0x00,0x00, +0xf9,0x00,0x02,0x00,0x5f,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, +0x61,0x01,0x00,0x00,0xe0,0x00,0x04,0x00,0x0c,0x00,0x00,0x00, +0x0c,0x00,0x00,0x00,0xa4,0x01,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xa7,0x01,0x00,0x00,0xf3,0x02,0x00,0x00, +0xa5,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xaa,0x01,0x00,0x00,0xf7,0x02,0x00,0x00,0xa8,0x01,0x00,0x00, +0xf9,0x00,0x02,0x00,0xac,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, +0xac,0x01,0x00,0x00,0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00, +0xf9,0x02,0x00,0x00,0x3e,0x00,0x00,0x00,0x61,0x01,0x00,0x00, +0x53,0x02,0x00,0x00,0xaf,0x01,0x00,0x00,0xb0,0x00,0x05,0x00, +0xc2,0x00,0x00,0x00,0xb2,0x01,0x00,0x00,0xf9,0x02,0x00,0x00, +0x6c,0x00,0x00,0x00,0xf6,0x00,0x04,0x00,0xae,0x01,0x00,0x00, +0xaf,0x01,0x00,0x00,0x00,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, +0xb2,0x01,0x00,0x00,0xad,0x01,0x00,0x00,0xae,0x01,0x00,0x00, +0xf8,0x00,0x02,0x00,0xad,0x01,0x00,0x00,0xf9,0x00,0x02,0x00, +0xb4,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0xb4,0x01,0x00,0x00, +0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0xfd,0x02,0x00,0x00, +0x3e,0x00,0x00,0x00,0xad,0x01,0x00,0x00,0xdf,0x01,0x00,0x00, +0xb7,0x01,0x00,0x00,0xb0,0x00,0x05,0x00,0xc2,0x00,0x00,0x00, +0xba,0x01,0x00,0x00,0xfd,0x02,0x00,0x00,0x60,0x00,0x00,0x00, +0xf6,0x00,0x04,0x00,0xb6,0x01,0x00,0x00,0xb7,0x01,0x00,0x00, +0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0xba,0x01,0x00,0x00, +0xb5,0x01,0x00,0x00,0xb6,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, +0xb5,0x01,0x00,0x00,0xf9,0x00,0x02,0x00,0xbc,0x01,0x00,0x00, +0xf8,0x00,0x02,0x00,0xbc,0x01,0x00,0x00,0xf5,0x00,0x07,0x00, +0x06,0x00,0x00,0x00,0x0f,0x03,0x00,0x00,0x3e,0x00,0x00,0x00, +0xb5,0x01,0x00,0x00,0xdd,0x01,0x00,0x00,0xbd,0x01,0x00,0x00, +0xb0,0x00,0x05,0x00,0xc2,0x00,0x00,0x00,0xc2,0x01,0x00,0x00, +0x0f,0x03,0x00,0x00,0x62,0x00,0x00,0x00,0xf6,0x00,0x04,0x00, +0xbe,0x01,0x00,0x00,0xbd,0x01,0x00,0x00,0x01,0x00,0x00,0x00, +0xfa,0x00,0x04,0x00,0xc2,0x01,0x00,0x00,0xbd,0x01,0x00,0x00, +0xbe,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0xbd,0x01,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xc8,0x01,0x00,0x00, +0xfd,0x02,0x00,0x00,0x62,0x00,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xca,0x01,0x00,0x00,0xc8,0x01,0x00,0x00, +0x0f,0x03,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xcc,0x01,0x00,0x00,0x55,0x00,0x00,0x00,0x53,0x00,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xce,0x01,0x00,0x00, +0xfd,0x02,0x00,0x00,0x61,0x00,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xcf,0x01,0x00,0x00,0xcc,0x01,0x00,0x00, +0xce,0x01,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xd1,0x01,0x00,0x00,0x64,0x00,0x00,0x00,0x62,0x00,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xd2,0x01,0x00,0x00, +0xcf,0x01,0x00,0x00,0xd1,0x01,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xd4,0x01,0x00,0x00,0xd2,0x01,0x00,0x00, +0x0f,0x03,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xd6,0x01,0x00,0x00,0xd4,0x01,0x00,0x00,0xd5,0x01,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xd8,0x01,0x00,0x00, +0xd6,0x01,0x00,0x00,0xf9,0x02,0x00,0x00,0x41,0x00,0x05,0x00, +0x50,0x01,0x00,0x00,0xd9,0x01,0x00,0x00,0x4c,0x01,0x00,0x00, +0xd8,0x01,0x00,0x00,0x3d,0x00,0x04,0x00,0xc4,0x00,0x00,0x00, +0xda,0x01,0x00,0x00,0xd9,0x01,0x00,0x00,0x41,0x00,0x05,0x00, +0xcd,0x00,0x00,0x00,0xdb,0x01,0x00,0x00,0xc6,0x01,0x00,0x00, +0xca,0x01,0x00,0x00,0x3e,0x00,0x03,0x00,0xdb,0x01,0x00,0x00, +0xda,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xdd,0x01,0x00,0x00,0x0f,0x03,0x00,0x00,0xd0,0x00,0x00,0x00, +0xf9,0x00,0x02,0x00,0xbc,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, +0xbe,0x01,0x00,0x00,0xf9,0x00,0x02,0x00,0xb7,0x01,0x00,0x00, +0xf8,0x00,0x02,0x00,0xb7,0x01,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xdf,0x01,0x00,0x00,0xfd,0x02,0x00,0x00, +0xd0,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0xb4,0x01,0x00,0x00, +0xf8,0x00,0x02,0x00,0xb6,0x01,0x00,0x00,0xf9,0x00,0x02,0x00, +0xe1,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0xe1,0x01,0x00,0x00, +0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0xfe,0x02,0x00,0x00, +0x3e,0x00,0x00,0x00,0xb6,0x01,0x00,0x00,0x0d,0x02,0x00,0x00, +0xe4,0x01,0x00,0x00,0xb0,0x00,0x05,0x00,0xc2,0x00,0x00,0x00, +0xe7,0x01,0x00,0x00,0xfe,0x02,0x00,0x00,0xbf,0x00,0x00,0x00, +0xf6,0x00,0x04,0x00,0xe3,0x01,0x00,0x00,0xe4,0x01,0x00,0x00, +0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0xe7,0x01,0x00,0x00, +0xe2,0x01,0x00,0x00,0xe3,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, +0xe2,0x01,0x00,0x00,0xf9,0x00,0x02,0x00,0xe9,0x01,0x00,0x00, +0xf8,0x00,0x02,0x00,0xe9,0x01,0x00,0x00,0xf5,0x00,0x07,0x00, +0x06,0x00,0x00,0x00,0x0c,0x03,0x00,0x00,0x3e,0x00,0x00,0x00, +0xe2,0x01,0x00,0x00,0x0b,0x02,0x00,0x00,0xea,0x01,0x00,0x00, +0xb0,0x00,0x05,0x00,0xc2,0x00,0x00,0x00,0xef,0x01,0x00,0x00, +0x0c,0x03,0x00,0x00,0xbc,0x00,0x00,0x00,0xf6,0x00,0x04,0x00, +0xeb,0x01,0x00,0x00,0xea,0x01,0x00,0x00,0x01,0x00,0x00,0x00, +0xfa,0x00,0x04,0x00,0xef,0x01,0x00,0x00,0xea,0x01,0x00,0x00, +0xeb,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0xea,0x01,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xf5,0x01,0x00,0x00, +0xfe,0x02,0x00,0x00,0xbc,0x00,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xf7,0x01,0x00,0x00,0xf5,0x01,0x00,0x00, +0x0c,0x03,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xf9,0x01,0x00,0x00,0x59,0x00,0x00,0x00,0xb9,0x00,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xfc,0x01,0x00,0x00, +0xfe,0x02,0x00,0x00,0xfb,0x01,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xfd,0x01,0x00,0x00,0xf9,0x01,0x00,0x00, +0xfc,0x01,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xff,0x01,0x00,0x00,0x68,0x00,0x00,0x00,0xbc,0x00,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x00,0x02,0x00,0x00, +0xfd,0x01,0x00,0x00,0xff,0x01,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x02,0x02,0x00,0x00,0x00,0x02,0x00,0x00, +0x0c,0x03,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x04,0x02,0x00,0x00,0x02,0x02,0x00,0x00,0x03,0x02,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x06,0x02,0x00,0x00, +0x04,0x02,0x00,0x00,0xf9,0x02,0x00,0x00,0x41,0x00,0x05,0x00, +0x50,0x01,0x00,0x00,0x07,0x02,0x00,0x00,0x7f,0x01,0x00,0x00, +0x06,0x02,0x00,0x00,0x3d,0x00,0x04,0x00,0xc4,0x00,0x00,0x00, +0x08,0x02,0x00,0x00,0x07,0x02,0x00,0x00,0x41,0x00,0x05,0x00, +0xcd,0x00,0x00,0x00,0x09,0x02,0x00,0x00,0xf3,0x01,0x00,0x00, +0xf7,0x01,0x00,0x00,0x3e,0x00,0x03,0x00,0x09,0x02,0x00,0x00, +0x08,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x0b,0x02,0x00,0x00,0x0c,0x03,0x00,0x00,0xd0,0x00,0x00,0x00, +0xf9,0x00,0x02,0x00,0xe9,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, +0xeb,0x01,0x00,0x00,0xf9,0x00,0x02,0x00,0xe4,0x01,0x00,0x00, +0xf8,0x00,0x02,0x00,0xe4,0x01,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x0d,0x02,0x00,0x00,0xfe,0x02,0x00,0x00, +0xd0,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0xe1,0x01,0x00,0x00, +0xf8,0x00,0x02,0x00,0xe3,0x01,0x00,0x00,0xf9,0x00,0x02,0x00, +0x0f,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x0f,0x02,0x00,0x00, +0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0xff,0x02,0x00,0x00, +0x3e,0x00,0x00,0x00,0xe3,0x01,0x00,0x00,0x51,0x02,0x00,0x00, +0x12,0x02,0x00,0x00,0xb0,0x00,0x05,0x00,0xc2,0x00,0x00,0x00, +0x15,0x02,0x00,0x00,0xff,0x02,0x00,0x00,0xbf,0x00,0x00,0x00, +0xf6,0x00,0x04,0x00,0x11,0x02,0x00,0x00,0x12,0x02,0x00,0x00, +0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0x15,0x02,0x00,0x00, +0x10,0x02,0x00,0x00,0x11,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, +0x10,0x02,0x00,0x00,0xf9,0x00,0x02,0x00,0x17,0x02,0x00,0x00, +0xf8,0x00,0x02,0x00,0x17,0x02,0x00,0x00,0xf5,0x00,0x07,0x00, +0x06,0x00,0x00,0x00,0x03,0x03,0x00,0x00,0x3e,0x00,0x00,0x00, +0x10,0x02,0x00,0x00,0x4f,0x02,0x00,0x00,0x1a,0x02,0x00,0x00, +0xb0,0x00,0x05,0x00,0xc2,0x00,0x00,0x00,0x1d,0x02,0x00,0x00, +0x03,0x03,0x00,0x00,0x60,0x00,0x00,0x00,0xf6,0x00,0x04,0x00, +0x19,0x02,0x00,0x00,0x1a,0x02,0x00,0x00,0x01,0x00,0x00,0x00, +0xfa,0x00,0x04,0x00,0x1d,0x02,0x00,0x00,0x18,0x02,0x00,0x00, +0x19,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x18,0x02,0x00,0x00, +0xf9,0x00,0x02,0x00,0x1f,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, +0x1f,0x02,0x00,0x00,0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00, +0x05,0x03,0x00,0x00,0x3e,0x00,0x00,0x00,0x18,0x02,0x00,0x00, +0x4d,0x02,0x00,0x00,0x22,0x02,0x00,0x00,0xb0,0x00,0x05,0x00, +0xc2,0x00,0x00,0x00,0x25,0x02,0x00,0x00,0x05,0x03,0x00,0x00, +0xbc,0x00,0x00,0x00,0xf6,0x00,0x04,0x00,0x21,0x02,0x00,0x00, +0x22,0x02,0x00,0x00,0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, +0x25,0x02,0x00,0x00,0x20,0x02,0x00,0x00,0x21,0x02,0x00,0x00, +0xf8,0x00,0x02,0x00,0x20,0x02,0x00,0x00,0xf9,0x00,0x02,0x00, +0x27,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x27,0x02,0x00,0x00, +0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0x07,0x03,0x00,0x00, +0x3e,0x00,0x00,0x00,0x20,0x02,0x00,0x00,0x4b,0x02,0x00,0x00, +0x28,0x02,0x00,0x00,0xb0,0x00,0x05,0x00,0xc2,0x00,0x00,0x00, +0x2d,0x02,0x00,0x00,0x07,0x03,0x00,0x00,0x62,0x00,0x00,0x00, +0xf6,0x00,0x04,0x00,0x29,0x02,0x00,0x00,0x28,0x02,0x00,0x00, +0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0x2d,0x02,0x00,0x00, +0x28,0x02,0x00,0x00,0x29,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, +0x28,0x02,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x2f,0x02,0x00,0x00,0xff,0x02,0x00,0x00,0xbc,0x00,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x31,0x02,0x00,0x00, +0x2f,0x02,0x00,0x00,0x05,0x03,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x33,0x02,0x00,0x00,0x31,0x02,0x00,0x00, +0x32,0x02,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x35,0x02,0x00,0x00,0x03,0x03,0x00,0x00,0x62,0x00,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x36,0x02,0x00,0x00, +0x33,0x02,0x00,0x00,0x35,0x02,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x38,0x02,0x00,0x00,0x36,0x02,0x00,0x00, +0x07,0x03,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x3c,0x02,0x00,0x00,0x35,0x02,0x00,0x00,0x07,0x03,0x00,0x00, +0x41,0x00,0x05,0x00,0xcd,0x00,0x00,0x00,0x3d,0x02,0x00,0x00, +0xc6,0x01,0x00,0x00,0x3c,0x02,0x00,0x00,0x3d,0x00,0x04,0x00, +0xc4,0x00,0x00,0x00,0x3e,0x02,0x00,0x00,0x3d,0x02,0x00,0x00, +0x41,0x00,0x05,0x00,0xcd,0x00,0x00,0x00,0x43,0x02,0x00,0x00, +0xf3,0x01,0x00,0x00,0x31,0x02,0x00,0x00,0x3d,0x00,0x04,0x00, +0xc4,0x00,0x00,0x00,0x44,0x02,0x00,0x00,0x43,0x02,0x00,0x00, +0x41,0x00,0x05,0x00,0xcd,0x00,0x00,0x00,0x46,0x02,0x00,0x00, +0xca,0x00,0x00,0x00,0x38,0x02,0x00,0x00,0x3d,0x00,0x04,0x00, +0xc4,0x00,0x00,0x00,0x47,0x02,0x00,0x00,0x46,0x02,0x00,0x00, +0x0c,0x00,0x08,0x00,0xc4,0x00,0x00,0x00,0x48,0x02,0x00,0x00, +0x01,0x00,0x00,0x00,0x32,0x00,0x00,0x00,0x3e,0x02,0x00,0x00, +0x44,0x02,0x00,0x00,0x47,0x02,0x00,0x00,0x3e,0x00,0x03,0x00, +0x46,0x02,0x00,0x00,0x48,0x02,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x4b,0x02,0x00,0x00,0x07,0x03,0x00,0x00, +0xd0,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0x27,0x02,0x00,0x00, +0xf8,0x00,0x02,0x00,0x29,0x02,0x00,0x00,0xf9,0x00,0x02,0x00, +0x22,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x22,0x02,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x4d,0x02,0x00,0x00, +0x05,0x03,0x00,0x00,0xd0,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, +0x1f,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x21,0x02,0x00,0x00, +0xf9,0x00,0x02,0x00,0x1a,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, +0x1a,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x4f,0x02,0x00,0x00,0x03,0x03,0x00,0x00,0xd0,0x00,0x00,0x00, +0xf9,0x00,0x02,0x00,0x17,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, +0x19,0x02,0x00,0x00,0xf9,0x00,0x02,0x00,0x12,0x02,0x00,0x00, +0xf8,0x00,0x02,0x00,0x12,0x02,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x51,0x02,0x00,0x00,0xff,0x02,0x00,0x00, +0xd0,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0x0f,0x02,0x00,0x00, +0xf8,0x00,0x02,0x00,0x11,0x02,0x00,0x00,0xf9,0x00,0x02,0x00, +0xaf,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0xaf,0x01,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x53,0x02,0x00,0x00, +0xf9,0x02,0x00,0x00,0xd0,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, +0xac,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0xae,0x01,0x00,0x00, +0xe0,0x00,0x04,0x00,0x0c,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, +0xa4,0x01,0x00,0x00,0xf9,0x00,0x02,0x00,0xd7,0x00,0x00,0x00, +0xf8,0x00,0x02,0x00,0xd7,0x00,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x55,0x02,0x00,0x00,0xdf,0x02,0x00,0x00, +0x6c,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0xd4,0x00,0x00,0x00, +0xf8,0x00,0x02,0x00,0xd6,0x00,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x5a,0x02,0x00,0x00,0x55,0x00,0x00,0x00, +0x53,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x5b,0x02,0x00,0x00,0x96,0x00,0x00,0x00,0x5a,0x02,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x60,0x02,0x00,0x00, +0x59,0x00,0x00,0x00,0xb9,0x00,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x61,0x02,0x00,0x00,0xa8,0x00,0x00,0x00, +0x60,0x02,0x00,0x00,0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00, +0x66,0x02,0x00,0x00,0x12,0x00,0x00,0x00,0x65,0x02,0x00,0x00, +0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x67,0x02,0x00,0x00, +0x66,0x02,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x68,0x02,0x00,0x00,0x0f,0x00,0x00,0x00,0x67,0x02,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x6c,0x02,0x00,0x00, +0x47,0x00,0x00,0x00,0x67,0x02,0x00,0x00,0x41,0x00,0x05,0x00, +0x0d,0x00,0x00,0x00,0x6e,0x02,0x00,0x00,0x6d,0x02,0x00,0x00, +0x0c,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x6f,0x02,0x00,0x00,0x6e,0x02,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x70,0x02,0x00,0x00,0x6c,0x02,0x00,0x00, +0x6f,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x71,0x02,0x00,0x00,0x68,0x02,0x00,0x00,0x70,0x02,0x00,0x00, +0xf9,0x00,0x02,0x00,0x73,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, +0x73,0x02,0x00,0x00,0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00, +0xe0,0x02,0x00,0x00,0x3e,0x00,0x00,0x00,0xd6,0x00,0x00,0x00, +0xd9,0x02,0x00,0x00,0x76,0x02,0x00,0x00,0xb0,0x00,0x05,0x00, +0xc2,0x00,0x00,0x00,0x79,0x02,0x00,0x00,0xe0,0x02,0x00,0x00, +0xbf,0x00,0x00,0x00,0xf6,0x00,0x04,0x00,0x75,0x02,0x00,0x00, +0x76,0x02,0x00,0x00,0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, +0x79,0x02,0x00,0x00,0x74,0x02,0x00,0x00,0x75,0x02,0x00,0x00, +0xf8,0x00,0x02,0x00,0x74,0x02,0x00,0x00,0xf9,0x00,0x02,0x00, +0x7b,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x7b,0x02,0x00,0x00, +0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0xe1,0x02,0x00,0x00, +0x3e,0x00,0x00,0x00,0x74,0x02,0x00,0x00,0xd7,0x02,0x00,0x00, +0x7e,0x02,0x00,0x00,0xb0,0x00,0x05,0x00,0xc2,0x00,0x00,0x00, +0x81,0x02,0x00,0x00,0xe1,0x02,0x00,0x00,0x60,0x00,0x00,0x00, +0xf6,0x00,0x04,0x00,0x7d,0x02,0x00,0x00,0x7e,0x02,0x00,0x00, +0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0x81,0x02,0x00,0x00, +0x7c,0x02,0x00,0x00,0x7d,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, +0x7c,0x02,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x85,0x02,0x00,0x00,0xe1,0x02,0x00,0x00,0x61,0x00,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x86,0x02,0x00,0x00, +0x5b,0x02,0x00,0x00,0x85,0x02,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x88,0x02,0x00,0x00,0x64,0x00,0x00,0x00, +0x62,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x89,0x02,0x00,0x00,0x86,0x02,0x00,0x00,0x88,0x02,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x8d,0x02,0x00,0x00, +0xe0,0x02,0x00,0x00,0xfb,0x01,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x8e,0x02,0x00,0x00,0x61,0x02,0x00,0x00, +0x8d,0x02,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x90,0x02,0x00,0x00,0x68,0x00,0x00,0x00,0xbc,0x00,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x91,0x02,0x00,0x00, +0x8e,0x02,0x00,0x00,0x90,0x02,0x00,0x00,0xf9,0x00,0x02,0x00, +0x93,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x93,0x02,0x00,0x00, +0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0xe3,0x02,0x00,0x00, +0x3e,0x00,0x00,0x00,0x7c,0x02,0x00,0x00,0xd5,0x02,0x00,0x00, +0x96,0x02,0x00,0x00,0xb0,0x00,0x05,0x00,0xc2,0x00,0x00,0x00, +0x99,0x02,0x00,0x00,0xe3,0x02,0x00,0x00,0xbc,0x00,0x00,0x00, +0xf6,0x00,0x04,0x00,0x95,0x02,0x00,0x00,0x96,0x02,0x00,0x00, +0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0x99,0x02,0x00,0x00, +0x94,0x02,0x00,0x00,0x95,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, +0x94,0x02,0x00,0x00,0xf9,0x00,0x02,0x00,0x9b,0x02,0x00,0x00, +0xf8,0x00,0x02,0x00,0x9b,0x02,0x00,0x00,0xf5,0x00,0x07,0x00, +0x06,0x00,0x00,0x00,0xe5,0x02,0x00,0x00,0x3e,0x00,0x00,0x00, +0x94,0x02,0x00,0x00,0xd3,0x02,0x00,0x00,0x9e,0x02,0x00,0x00, +0xb0,0x00,0x05,0x00,0xc2,0x00,0x00,0x00,0xa1,0x02,0x00,0x00, +0xe5,0x02,0x00,0x00,0x62,0x00,0x00,0x00,0xf6,0x00,0x04,0x00, +0x9d,0x02,0x00,0x00,0x9e,0x02,0x00,0x00,0x01,0x00,0x00,0x00, +0xfa,0x00,0x04,0x00,0xa1,0x02,0x00,0x00,0x9c,0x02,0x00,0x00, +0x9d,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x9c,0x02,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xa4,0x02,0x00,0x00, +0x89,0x02,0x00,0x00,0xe5,0x02,0x00,0x00,0xb0,0x00,0x05,0x00, +0xc2,0x00,0x00,0x00,0xa7,0x02,0x00,0x00,0xa4,0x02,0x00,0x00, +0x36,0x00,0x00,0x00,0xf7,0x00,0x03,0x00,0xa9,0x02,0x00,0x00, +0x00,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0xa7,0x02,0x00,0x00, +0xa8,0x02,0x00,0x00,0xa9,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, +0xa8,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xac,0x02,0x00,0x00,0x91,0x02,0x00,0x00,0xe3,0x02,0x00,0x00, +0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00,0xad,0x02,0x00,0x00, +0x12,0x00,0x00,0x00,0xd0,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0xae,0x02,0x00,0x00,0xad,0x02,0x00,0x00, +0xb0,0x00,0x05,0x00,0xc2,0x00,0x00,0x00,0xaf,0x02,0x00,0x00, +0xac,0x02,0x00,0x00,0xae,0x02,0x00,0x00,0xf9,0x00,0x02,0x00, +0xa9,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0xa9,0x02,0x00,0x00, +0xf5,0x00,0x07,0x00,0xc2,0x00,0x00,0x00,0xb0,0x02,0x00,0x00, +0xa7,0x02,0x00,0x00,0x9c,0x02,0x00,0x00,0xaf,0x02,0x00,0x00, +0xa8,0x02,0x00,0x00,0xf7,0x00,0x03,0x00,0xb2,0x02,0x00,0x00, +0x00,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0xb0,0x02,0x00,0x00, +0xb1,0x02,0x00,0x00,0xb2,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, +0xb1,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xba,0x02,0x00,0x00,0x91,0x02,0x00,0x00,0xe3,0x02,0x00,0x00, +0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00,0xbc,0x02,0x00,0x00, +0x12,0x00,0x00,0x00,0xbb,0x02,0x00,0x00,0x3d,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0xbd,0x02,0x00,0x00,0xbc,0x02,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xbe,0x02,0x00,0x00, +0xba,0x02,0x00,0x00,0xbd,0x02,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xbf,0x02,0x00,0x00,0x71,0x02,0x00,0x00, +0xbe,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xc1,0x02,0x00,0x00,0xbf,0x02,0x00,0x00,0x89,0x02,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xc3,0x02,0x00,0x00, +0xc1,0x02,0x00,0x00,0xe5,0x02,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xc5,0x02,0x00,0x00,0xe0,0x02,0x00,0x00, +0xbc,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xc7,0x02,0x00,0x00,0xc5,0x02,0x00,0x00,0xe3,0x02,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xc9,0x02,0x00,0x00, +0xc7,0x02,0x00,0x00,0xc8,0x02,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xcb,0x02,0x00,0x00,0xe1,0x02,0x00,0x00, +0x62,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xcc,0x02,0x00,0x00,0xc9,0x02,0x00,0x00,0xcb,0x02,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xce,0x02,0x00,0x00, +0xcc,0x02,0x00,0x00,0xe5,0x02,0x00,0x00,0x41,0x00,0x05,0x00, +0xcd,0x00,0x00,0x00,0xcf,0x02,0x00,0x00,0xca,0x00,0x00,0x00, +0xce,0x02,0x00,0x00,0x3d,0x00,0x04,0x00,0xc4,0x00,0x00,0x00, +0xd0,0x02,0x00,0x00,0xcf,0x02,0x00,0x00,0x41,0x00,0x06,0x00, +0x88,0x01,0x00,0x00,0xd1,0x02,0x00,0x00,0xb6,0x02,0x00,0x00, +0x34,0x00,0x00,0x00,0xc3,0x02,0x00,0x00,0x3e,0x00,0x03,0x00, +0xd1,0x02,0x00,0x00,0xd0,0x02,0x00,0x00,0xf9,0x00,0x02,0x00, +0xb2,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0xb2,0x02,0x00,0x00, +0xf9,0x00,0x02,0x00,0x9e,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, +0x9e,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xd3,0x02,0x00,0x00,0xe5,0x02,0x00,0x00,0xd0,0x00,0x00,0x00, +0xf9,0x00,0x02,0x00,0x9b,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, +0x9d,0x02,0x00,0x00,0xf9,0x00,0x02,0x00,0x96,0x02,0x00,0x00, +0xf8,0x00,0x02,0x00,0x96,0x02,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xd5,0x02,0x00,0x00,0xe3,0x02,0x00,0x00, +0xd0,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0x93,0x02,0x00,0x00, +0xf8,0x00,0x02,0x00,0x95,0x02,0x00,0x00,0xf9,0x00,0x02,0x00, +0x7e,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x7e,0x02,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xd7,0x02,0x00,0x00, +0xe1,0x02,0x00,0x00,0xd0,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, +0x7b,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x7d,0x02,0x00,0x00, +0xf9,0x00,0x02,0x00,0x76,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, +0x76,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xd9,0x02,0x00,0x00,0xe0,0x02,0x00,0x00,0xd0,0x00,0x00,0x00, +0xf9,0x00,0x02,0x00,0x73,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, +0x75,0x02,0x00,0x00,0xfd,0x00,0x01,0x00,0x38,0x00,0x01,0x00, + +}; +const uint64_t matmul_q5_1_f32_aligned_fp32_len = 10800; + +unsigned char matmul_q5_1_f32_fp32_data[] = { +0x03,0x02,0x23,0x07,0x00,0x05,0x01,0x00,0x0b,0x00,0x0d,0x00, +0x10,0x03,0x00,0x00,0x00,0x00,0x00,0x00,0x11,0x00,0x02,0x00, +0x01,0x00,0x00,0x00,0x11,0x00,0x02,0x00,0x51,0x11,0x00,0x00, +0x11,0x00,0x02,0x00,0x60,0x11,0x00,0x00,0x0b,0x00,0x06,0x00, +0x01,0x00,0x00,0x00,0x47,0x4c,0x53,0x4c,0x2e,0x73,0x74,0x64, +0x2e,0x34,0x35,0x30,0x00,0x00,0x00,0x00,0x0e,0x00,0x03,0x00, +0x00,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x0f,0x00,0x0f,0x00, +0x05,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x6d,0x61,0x69,0x6e, +0x00,0x00,0x00,0x00,0x0b,0x00,0x00,0x00,0x12,0x00,0x00,0x00, +0x3d,0x00,0x00,0x00,0x4c,0x00,0x00,0x00,0x07,0x01,0x00,0x00, +0x4b,0x01,0x00,0x00,0x7c,0x01,0x00,0x00,0x87,0x01,0x00,0x00, +0x6d,0x02,0x00,0x00,0xb6,0x02,0x00,0x00,0x10,0x00,0x06,0x00, +0x04,0x00,0x00,0x00,0x11,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x47,0x00,0x04,0x00, +0x0b,0x00,0x00,0x00,0x0b,0x00,0x00,0x00,0x1c,0x00,0x00,0x00, +0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x48,0x00,0x05,0x00, +0x10,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x23,0x00,0x00,0x00, +0x04,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00, +0x02,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x08,0x00,0x00,0x00, +0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00,0x03,0x00,0x00,0x00, +0x23,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x48,0x00,0x05,0x00, +0x10,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x23,0x00,0x00,0x00, +0x10,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00, +0x05,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x14,0x00,0x00,0x00, +0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00,0x06,0x00,0x00,0x00, +0x23,0x00,0x00,0x00,0x18,0x00,0x00,0x00,0x48,0x00,0x05,0x00, +0x10,0x00,0x00,0x00,0x07,0x00,0x00,0x00,0x23,0x00,0x00,0x00, +0x1c,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00, +0x08,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x20,0x00,0x00,0x00, +0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00,0x09,0x00,0x00,0x00, +0x23,0x00,0x00,0x00,0x24,0x00,0x00,0x00,0x48,0x00,0x05,0x00, +0x10,0x00,0x00,0x00,0x0a,0x00,0x00,0x00,0x23,0x00,0x00,0x00, +0x28,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00, +0x0b,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x2c,0x00,0x00,0x00, +0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, +0x23,0x00,0x00,0x00,0x30,0x00,0x00,0x00,0x48,0x00,0x05,0x00, +0x10,0x00,0x00,0x00,0x0d,0x00,0x00,0x00,0x23,0x00,0x00,0x00, +0x34,0x00,0x00,0x00,0x47,0x00,0x03,0x00,0x10,0x00,0x00,0x00, +0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x37,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x47,0x00,0x04,0x00, +0x3d,0x00,0x00,0x00,0x0b,0x00,0x00,0x00,0x1a,0x00,0x00,0x00, +0x47,0x00,0x04,0x00,0x4c,0x00,0x00,0x00,0x0b,0x00,0x00,0x00, +0x1b,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x4f,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0x09,0x00,0x00,0x00,0x47,0x00,0x04,0x00, +0x53,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x04,0x00,0x00,0x00, +0x47,0x00,0x04,0x00,0x60,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0x06,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x62,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0x07,0x00,0x00,0x00,0x47,0x00,0x04,0x00, +0x6c,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x03,0x00,0x00,0x00, +0x47,0x00,0x04,0x00,0xa6,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0xb8,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0x05,0x00,0x00,0x00,0x47,0x00,0x04,0x00, +0xbb,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x08,0x00,0x00,0x00, +0x47,0x00,0x04,0x00,0x02,0x01,0x00,0x00,0x06,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x03,0x01,0x00,0x00, +0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0x48,0x00,0x05,0x00,0x03,0x01,0x00,0x00,0x01,0x00,0x00,0x00, +0x23,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x48,0x00,0x05,0x00, +0x03,0x01,0x00,0x00,0x02,0x00,0x00,0x00,0x23,0x00,0x00,0x00, +0x04,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x03,0x01,0x00,0x00, +0x03,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x08,0x00,0x00,0x00, +0x47,0x00,0x04,0x00,0x04,0x01,0x00,0x00,0x06,0x00,0x00,0x00, +0x18,0x00,0x00,0x00,0x48,0x00,0x04,0x00,0x05,0x01,0x00,0x00, +0x00,0x00,0x00,0x00,0x18,0x00,0x00,0x00,0x48,0x00,0x05,0x00, +0x05,0x01,0x00,0x00,0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00, +0x00,0x00,0x00,0x00,0x47,0x00,0x03,0x00,0x05,0x01,0x00,0x00, +0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x07,0x01,0x00,0x00, +0x22,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00, +0x07,0x01,0x00,0x00,0x21,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0x47,0x00,0x04,0x00,0x56,0x01,0x00,0x00,0x01,0x00,0x00,0x00, +0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x57,0x01,0x00,0x00, +0x0b,0x00,0x00,0x00,0x19,0x00,0x00,0x00,0x47,0x00,0x04,0x00, +0x84,0x01,0x00,0x00,0x06,0x00,0x00,0x00,0x04,0x00,0x00,0x00, +0x48,0x00,0x04,0x00,0x85,0x01,0x00,0x00,0x00,0x00,0x00,0x00, +0x18,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x85,0x01,0x00,0x00, +0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0x47,0x00,0x03,0x00,0x85,0x01,0x00,0x00,0x02,0x00,0x00,0x00, +0x47,0x00,0x04,0x00,0x87,0x01,0x00,0x00,0x22,0x00,0x00,0x00, +0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x87,0x01,0x00,0x00, +0x21,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x47,0x00,0x04,0x00, +0x6d,0x02,0x00,0x00,0x0b,0x00,0x00,0x00,0x18,0x00,0x00,0x00, +0x47,0x00,0x04,0x00,0xb3,0x02,0x00,0x00,0x06,0x00,0x00,0x00, +0x04,0x00,0x00,0x00,0x48,0x00,0x04,0x00,0xb4,0x02,0x00,0x00, +0x00,0x00,0x00,0x00,0x19,0x00,0x00,0x00,0x48,0x00,0x05,0x00, +0xb4,0x02,0x00,0x00,0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00, +0x00,0x00,0x00,0x00,0x47,0x00,0x03,0x00,0xb4,0x02,0x00,0x00, +0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0xb6,0x02,0x00,0x00, +0x22,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00, +0xb6,0x02,0x00,0x00,0x21,0x00,0x00,0x00,0x02,0x00,0x00,0x00, +0x13,0x00,0x02,0x00,0x02,0x00,0x00,0x00,0x21,0x00,0x03,0x00, +0x03,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x15,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x20,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0x17,0x00,0x04,0x00,0x09,0x00,0x00,0x00,0x06,0x00,0x00,0x00, +0x03,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x0a,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0x09,0x00,0x00,0x00,0x3b,0x00,0x04,0x00, +0x0a,0x00,0x00,0x00,0x0b,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, +0x02,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x0d,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x1e,0x00,0x10,0x00, +0x10,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00, +0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00, +0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00, +0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00, +0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00, +0x20,0x00,0x04,0x00,0x11,0x00,0x00,0x00,0x09,0x00,0x00,0x00, +0x10,0x00,0x00,0x00,0x3b,0x00,0x04,0x00,0x11,0x00,0x00,0x00, +0x12,0x00,0x00,0x00,0x09,0x00,0x00,0x00,0x15,0x00,0x04,0x00, +0x13,0x00,0x00,0x00,0x20,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00,0x14,0x00,0x00,0x00, +0x08,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x15,0x00,0x00,0x00, +0x09,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x13,0x00,0x00,0x00,0x21,0x00,0x00,0x00,0x0a,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00,0x27,0x00,0x00,0x00, +0x09,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00, +0x2d,0x00,0x00,0x00,0x07,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x13,0x00,0x00,0x00,0x34,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0x32,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x37,0x00,0x00,0x00, +0x40,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x39,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x3b,0x00,0x04,0x00, +0x0a,0x00,0x00,0x00,0x3d,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x3e,0x00,0x00,0x00, +0x00,0x00,0x00,0x00,0x3b,0x00,0x04,0x00,0x0a,0x00,0x00,0x00, +0x4c,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x32,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x4f,0x00,0x00,0x00,0x20,0x00,0x00,0x00, +0x32,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x53,0x00,0x00,0x00, +0x20,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, +0x54,0x00,0x00,0x00,0x86,0x00,0x00,0x00,0x37,0x00,0x00,0x00, +0x53,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, +0x58,0x00,0x00,0x00,0x86,0x00,0x00,0x00,0x37,0x00,0x00,0x00, +0x53,0x00,0x00,0x00,0x32,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x60,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x34,0x00,0x06,0x00, +0x06,0x00,0x00,0x00,0x61,0x00,0x00,0x00,0x86,0x00,0x00,0x00, +0x53,0x00,0x00,0x00,0x60,0x00,0x00,0x00,0x32,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x62,0x00,0x00,0x00,0x04,0x00,0x00,0x00, +0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x63,0x00,0x00,0x00, +0x86,0x00,0x00,0x00,0x61,0x00,0x00,0x00,0x62,0x00,0x00,0x00, +0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x67,0x00,0x00,0x00, +0x86,0x00,0x00,0x00,0x61,0x00,0x00,0x00,0x62,0x00,0x00,0x00, +0x32,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x6c,0x00,0x00,0x00, +0x10,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, +0x6d,0x00,0x00,0x00,0x86,0x00,0x00,0x00,0x6c,0x00,0x00,0x00, +0x0c,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, +0x72,0x00,0x00,0x00,0x86,0x00,0x00,0x00,0x6c,0x00,0x00,0x00, +0x0c,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, +0x77,0x00,0x00,0x00,0x86,0x00,0x00,0x00,0x6c,0x00,0x00,0x00, +0x39,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, +0x7c,0x00,0x00,0x00,0x86,0x00,0x00,0x00,0x6c,0x00,0x00,0x00, +0x39,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00, +0x80,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x13,0x00,0x00,0x00,0x85,0x00,0x00,0x00,0x02,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00,0x90,0x00,0x00,0x00, +0x0b,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00, +0x96,0x00,0x00,0x00,0x03,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x13,0x00,0x00,0x00,0xa1,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, +0x32,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0xa6,0x00,0x00,0x00, +0x40,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00, +0xa8,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x34,0x00,0x06,0x00, +0x06,0x00,0x00,0x00,0xb7,0x00,0x00,0x00,0x84,0x00,0x00,0x00, +0x60,0x00,0x00,0x00,0x62,0x00,0x00,0x00,0x32,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0xb8,0x00,0x00,0x00,0x20,0x00,0x00,0x00, +0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0xb9,0x00,0x00,0x00, +0x84,0x00,0x00,0x00,0x53,0x00,0x00,0x00,0xb8,0x00,0x00,0x00, +0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0xba,0x00,0x00,0x00, +0x84,0x00,0x00,0x00,0x4f,0x00,0x00,0x00,0x62,0x00,0x00,0x00, +0x32,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0xbb,0x00,0x00,0x00, +0x02,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, +0xbc,0x00,0x00,0x00,0x84,0x00,0x00,0x00,0xba,0x00,0x00,0x00, +0xbb,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, +0xbd,0x00,0x00,0x00,0x84,0x00,0x00,0x00,0xbc,0x00,0x00,0x00, +0x60,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, +0xbe,0x00,0x00,0x00,0x86,0x00,0x00,0x00,0xb9,0x00,0x00,0x00, +0xbd,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, +0xbf,0x00,0x00,0x00,0x84,0x00,0x00,0x00,0xb7,0x00,0x00,0x00, +0xbe,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, +0xc0,0x00,0x00,0x00,0x84,0x00,0x00,0x00,0xbf,0x00,0x00,0x00, +0xbb,0x00,0x00,0x00,0x14,0x00,0x02,0x00,0xc1,0x00,0x00,0x00, +0x16,0x00,0x03,0x00,0xc3,0x00,0x00,0x00,0x20,0x00,0x00,0x00, +0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0xc4,0x00,0x00,0x00, +0x84,0x00,0x00,0x00,0x60,0x00,0x00,0x00,0x62,0x00,0x00,0x00, +0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0xc5,0x00,0x00,0x00, +0x84,0x00,0x00,0x00,0xc4,0x00,0x00,0x00,0xbe,0x00,0x00,0x00, +0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0xc6,0x00,0x00,0x00, +0x84,0x00,0x00,0x00,0xc5,0x00,0x00,0x00,0xbb,0x00,0x00,0x00, +0x1c,0x00,0x04,0x00,0xc7,0x00,0x00,0x00,0xc3,0x00,0x00,0x00, +0xc6,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0xc8,0x00,0x00,0x00, +0x07,0x00,0x00,0x00,0xc7,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0xc3,0x00,0x00,0x00,0xcb,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0x20,0x00,0x04,0x00,0xcc,0x00,0x00,0x00,0x07,0x00,0x00,0x00, +0xc3,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00, +0xcf,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x34,0x00,0x06,0x00, +0x06,0x00,0x00,0x00,0xf3,0x00,0x00,0x00,0x80,0x00,0x00,0x00, +0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0xf9,0x00,0x00,0x00,0x10,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0xfd,0x00,0x00,0x00, +0x0f,0x00,0x00,0x00,0x16,0x00,0x03,0x00,0x00,0x01,0x00,0x00, +0x10,0x00,0x00,0x00,0x15,0x00,0x04,0x00,0x01,0x01,0x00,0x00, +0x08,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x1c,0x00,0x04,0x00, +0x02,0x01,0x00,0x00,0x01,0x01,0x00,0x00,0xf9,0x00,0x00,0x00, +0x1e,0x00,0x06,0x00,0x03,0x01,0x00,0x00,0x00,0x01,0x00,0x00, +0x00,0x01,0x00,0x00,0x06,0x00,0x00,0x00,0x02,0x01,0x00,0x00, +0x1d,0x00,0x03,0x00,0x04,0x01,0x00,0x00,0x03,0x01,0x00,0x00, +0x1e,0x00,0x03,0x00,0x05,0x01,0x00,0x00,0x04,0x01,0x00,0x00, +0x20,0x00,0x04,0x00,0x06,0x01,0x00,0x00,0x0c,0x00,0x00,0x00, +0x05,0x01,0x00,0x00,0x3b,0x00,0x04,0x00,0x06,0x01,0x00,0x00, +0x07,0x01,0x00,0x00,0x0c,0x00,0x00,0x00,0x20,0x00,0x04,0x00, +0x09,0x01,0x00,0x00,0x0c,0x00,0x00,0x00,0x00,0x01,0x00,0x00, +0x20,0x00,0x04,0x00,0x14,0x01,0x00,0x00,0x0c,0x00,0x00,0x00, +0x06,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x22,0x01,0x00,0x00,0x0c,0x00,0x00,0x00,0x20,0x00,0x04,0x00, +0x2b,0x01,0x00,0x00,0x0c,0x00,0x00,0x00,0x01,0x01,0x00,0x00, +0x17,0x00,0x04,0x00,0x2f,0x01,0x00,0x00,0xc3,0x00,0x00,0x00, +0x02,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, +0x47,0x01,0x00,0x00,0x80,0x00,0x00,0x00,0x6c,0x00,0x00,0x00, +0x39,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, +0x48,0x01,0x00,0x00,0x84,0x00,0x00,0x00,0x37,0x00,0x00,0x00, +0x47,0x01,0x00,0x00,0x1c,0x00,0x04,0x00,0x49,0x01,0x00,0x00, +0xc3,0x00,0x00,0x00,0x48,0x01,0x00,0x00,0x20,0x00,0x04,0x00, +0x4a,0x01,0x00,0x00,0x04,0x00,0x00,0x00,0x49,0x01,0x00,0x00, +0x3b,0x00,0x04,0x00,0x4a,0x01,0x00,0x00,0x4b,0x01,0x00,0x00, +0x04,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x4f,0x01,0x00,0x00, +0x04,0x00,0x00,0x00,0xc3,0x00,0x00,0x00,0x32,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x56,0x01,0x00,0x00,0x01,0x00,0x00,0x00, +0x33,0x00,0x06,0x00,0x09,0x00,0x00,0x00,0x57,0x01,0x00,0x00, +0x56,0x01,0x00,0x00,0x39,0x00,0x00,0x00,0x39,0x00,0x00,0x00, +0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x58,0x01,0x00,0x00, +0x51,0x00,0x00,0x00,0x57,0x01,0x00,0x00,0x00,0x00,0x00,0x00, +0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x59,0x01,0x00,0x00, +0x84,0x00,0x00,0x00,0x58,0x01,0x00,0x00,0x0c,0x00,0x00,0x00, +0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x5a,0x01,0x00,0x00, +0x86,0x00,0x00,0x00,0x59,0x01,0x00,0x00,0x6c,0x00,0x00,0x00, +0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x78,0x01,0x00,0x00, +0x80,0x00,0x00,0x00,0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00, +0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x79,0x01,0x00,0x00, +0x84,0x00,0x00,0x00,0xa6,0x00,0x00,0x00,0x78,0x01,0x00,0x00, +0x1c,0x00,0x04,0x00,0x7a,0x01,0x00,0x00,0xc3,0x00,0x00,0x00, +0x79,0x01,0x00,0x00,0x20,0x00,0x04,0x00,0x7b,0x01,0x00,0x00, +0x04,0x00,0x00,0x00,0x7a,0x01,0x00,0x00,0x3b,0x00,0x04,0x00, +0x7b,0x01,0x00,0x00,0x7c,0x01,0x00,0x00,0x04,0x00,0x00,0x00, +0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x80,0x01,0x00,0x00, +0x80,0x00,0x00,0x00,0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00, +0x1d,0x00,0x03,0x00,0x84,0x01,0x00,0x00,0xc3,0x00,0x00,0x00, +0x1e,0x00,0x03,0x00,0x85,0x01,0x00,0x00,0x84,0x01,0x00,0x00, +0x20,0x00,0x04,0x00,0x86,0x01,0x00,0x00,0x0c,0x00,0x00,0x00, +0x85,0x01,0x00,0x00,0x3b,0x00,0x04,0x00,0x86,0x01,0x00,0x00, +0x87,0x01,0x00,0x00,0x0c,0x00,0x00,0x00,0x20,0x00,0x04,0x00, +0x92,0x01,0x00,0x00,0x0c,0x00,0x00,0x00,0xc3,0x00,0x00,0x00, +0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x9a,0x01,0x00,0x00, +0x80,0x00,0x00,0x00,0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00, +0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x9f,0x01,0x00,0x00, +0x51,0x00,0x00,0x00,0x57,0x01,0x00,0x00,0x00,0x00,0x00,0x00, +0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0xa0,0x01,0x00,0x00, +0x84,0x00,0x00,0x00,0x9f,0x01,0x00,0x00,0x39,0x00,0x00,0x00, +0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0xa1,0x01,0x00,0x00, +0x86,0x00,0x00,0x00,0xa0,0x01,0x00,0x00,0x6c,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0xa4,0x01,0x00,0x00, +0x08,0x01,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, +0xa5,0x01,0x00,0x00,0x86,0x00,0x00,0x00,0x6c,0x00,0x00,0x00, +0x0c,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, +0xa8,0x01,0x00,0x00,0x86,0x00,0x00,0x00,0x6c,0x00,0x00,0x00, +0x39,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, +0xc3,0x01,0x00,0x00,0x84,0x00,0x00,0x00,0x60,0x00,0x00,0x00, +0x62,0x00,0x00,0x00,0x1c,0x00,0x04,0x00,0xc4,0x01,0x00,0x00, +0xc3,0x00,0x00,0x00,0xc3,0x01,0x00,0x00,0x20,0x00,0x04,0x00, +0xc5,0x01,0x00,0x00,0x07,0x00,0x00,0x00,0xc4,0x01,0x00,0x00, +0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0xd5,0x01,0x00,0x00, +0x80,0x00,0x00,0x00,0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00, +0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0xf0,0x01,0x00,0x00, +0x84,0x00,0x00,0x00,0xbe,0x00,0x00,0x00,0xbb,0x00,0x00,0x00, +0x1c,0x00,0x04,0x00,0xf1,0x01,0x00,0x00,0xc3,0x00,0x00,0x00, +0xf0,0x01,0x00,0x00,0x20,0x00,0x04,0x00,0xf2,0x01,0x00,0x00, +0x07,0x00,0x00,0x00,0xf1,0x01,0x00,0x00,0x34,0x00,0x06,0x00, +0x06,0x00,0x00,0x00,0xfb,0x01,0x00,0x00,0x86,0x00,0x00,0x00, +0xb8,0x00,0x00,0x00,0xbe,0x00,0x00,0x00,0x34,0x00,0x06,0x00, +0x06,0x00,0x00,0x00,0x03,0x02,0x00,0x00,0x80,0x00,0x00,0x00, +0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x34,0x00,0x06,0x00, +0x06,0x00,0x00,0x00,0x32,0x02,0x00,0x00,0x84,0x00,0x00,0x00, +0x60,0x00,0x00,0x00,0x62,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x13,0x00,0x00,0x00,0x65,0x02,0x00,0x00,0x0d,0x00,0x00,0x00, +0x3b,0x00,0x04,0x00,0x0a,0x00,0x00,0x00,0x6d,0x02,0x00,0x00, +0x01,0x00,0x00,0x00,0x1d,0x00,0x03,0x00,0xb3,0x02,0x00,0x00, +0xc3,0x00,0x00,0x00,0x1e,0x00,0x03,0x00,0xb4,0x02,0x00,0x00, +0xb3,0x02,0x00,0x00,0x20,0x00,0x04,0x00,0xb5,0x02,0x00,0x00, +0x0c,0x00,0x00,0x00,0xb4,0x02,0x00,0x00,0x3b,0x00,0x04,0x00, +0xb5,0x02,0x00,0x00,0xb6,0x02,0x00,0x00,0x0c,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00,0xbb,0x02,0x00,0x00, +0x05,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, +0xc8,0x02,0x00,0x00,0x84,0x00,0x00,0x00,0x60,0x00,0x00,0x00, +0x62,0x00,0x00,0x00,0x36,0x00,0x05,0x00,0x02,0x00,0x00,0x00, +0x04,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x03,0x00,0x00,0x00, +0xf8,0x00,0x02,0x00,0x05,0x00,0x00,0x00,0x3b,0x00,0x04,0x00, +0xc8,0x00,0x00,0x00,0xc9,0x00,0x00,0x00,0x07,0x00,0x00,0x00, +0x3b,0x00,0x04,0x00,0xc5,0x01,0x00,0x00,0xc6,0x01,0x00,0x00, +0x07,0x00,0x00,0x00,0x3b,0x00,0x04,0x00,0xf2,0x01,0x00,0x00, +0xf3,0x01,0x00,0x00,0x07,0x00,0x00,0x00,0x41,0x00,0x05,0x00, +0x0d,0x00,0x00,0x00,0x0e,0x00,0x00,0x00,0x0b,0x00,0x00,0x00, +0x0c,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x0f,0x00,0x00,0x00,0x0e,0x00,0x00,0x00,0x41,0x00,0x05,0x00, +0x15,0x00,0x00,0x00,0x16,0x00,0x00,0x00,0x12,0x00,0x00,0x00, +0x14,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x17,0x00,0x00,0x00,0x16,0x00,0x00,0x00,0x86,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x18,0x00,0x00,0x00,0x0f,0x00,0x00,0x00, +0x17,0x00,0x00,0x00,0x89,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x1e,0x00,0x00,0x00,0x0f,0x00,0x00,0x00,0x17,0x00,0x00,0x00, +0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00,0x22,0x00,0x00,0x00, +0x12,0x00,0x00,0x00,0x21,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x22,0x00,0x00,0x00, +0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x24,0x00,0x00,0x00, +0x18,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x41,0x00,0x05,0x00, +0x15,0x00,0x00,0x00,0x28,0x00,0x00,0x00,0x12,0x00,0x00,0x00, +0x27,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x29,0x00,0x00,0x00,0x28,0x00,0x00,0x00,0x86,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x2a,0x00,0x00,0x00,0x1e,0x00,0x00,0x00, +0x29,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00, +0x2e,0x00,0x00,0x00,0x12,0x00,0x00,0x00,0x2d,0x00,0x00,0x00, +0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x2f,0x00,0x00,0x00, +0x2e,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x30,0x00,0x00,0x00,0x24,0x00,0x00,0x00,0x2f,0x00,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x32,0x00,0x00,0x00, +0x30,0x00,0x00,0x00,0x2a,0x00,0x00,0x00,0x41,0x00,0x05,0x00, +0x15,0x00,0x00,0x00,0x35,0x00,0x00,0x00,0x12,0x00,0x00,0x00, +0x34,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x36,0x00,0x00,0x00,0x35,0x00,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x38,0x00,0x00,0x00,0x36,0x00,0x00,0x00, +0x37,0x00,0x00,0x00,0x82,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x3a,0x00,0x00,0x00,0x38,0x00,0x00,0x00,0x39,0x00,0x00,0x00, +0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x3b,0x00,0x00,0x00, +0x3a,0x00,0x00,0x00,0x37,0x00,0x00,0x00,0x41,0x00,0x05,0x00, +0x0d,0x00,0x00,0x00,0x3f,0x00,0x00,0x00,0x3d,0x00,0x00,0x00, +0x3e,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x40,0x00,0x00,0x00,0x3f,0x00,0x00,0x00,0x89,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x42,0x00,0x00,0x00,0x40,0x00,0x00,0x00, +0x3b,0x00,0x00,0x00,0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x47,0x00,0x00,0x00,0x40,0x00,0x00,0x00,0x3b,0x00,0x00,0x00, +0x41,0x00,0x05,0x00,0x0d,0x00,0x00,0x00,0x49,0x00,0x00,0x00, +0x3d,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x4a,0x00,0x00,0x00,0x49,0x00,0x00,0x00, +0x41,0x00,0x05,0x00,0x0d,0x00,0x00,0x00,0x4d,0x00,0x00,0x00, +0x4c,0x00,0x00,0x00,0x3e,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x4e,0x00,0x00,0x00,0x4d,0x00,0x00,0x00, +0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x50,0x00,0x00,0x00, +0x4e,0x00,0x00,0x00,0x4f,0x00,0x00,0x00,0x89,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x55,0x00,0x00,0x00,0x50,0x00,0x00,0x00, +0x54,0x00,0x00,0x00,0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x59,0x00,0x00,0x00,0x50,0x00,0x00,0x00,0x58,0x00,0x00,0x00, +0x89,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x5d,0x00,0x00,0x00, +0x4e,0x00,0x00,0x00,0x4f,0x00,0x00,0x00,0x89,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x64,0x00,0x00,0x00,0x5d,0x00,0x00,0x00, +0x63,0x00,0x00,0x00,0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x68,0x00,0x00,0x00,0x5d,0x00,0x00,0x00,0x67,0x00,0x00,0x00, +0x89,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x6e,0x00,0x00,0x00, +0x4e,0x00,0x00,0x00,0x6d,0x00,0x00,0x00,0x86,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x73,0x00,0x00,0x00,0x4e,0x00,0x00,0x00, +0x72,0x00,0x00,0x00,0x89,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x78,0x00,0x00,0x00,0x4e,0x00,0x00,0x00,0x77,0x00,0x00,0x00, +0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x7d,0x00,0x00,0x00, +0x4e,0x00,0x00,0x00,0x7c,0x00,0x00,0x00,0x41,0x00,0x05,0x00, +0x15,0x00,0x00,0x00,0x81,0x00,0x00,0x00,0x12,0x00,0x00,0x00, +0x80,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x82,0x00,0x00,0x00,0x81,0x00,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x83,0x00,0x00,0x00,0x47,0x00,0x00,0x00, +0x82,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00, +0x86,0x00,0x00,0x00,0x12,0x00,0x00,0x00,0x85,0x00,0x00,0x00, +0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x87,0x00,0x00,0x00, +0x86,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x89,0x00,0x00,0x00,0x47,0x00,0x00,0x00,0x39,0x00,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x8c,0x00,0x00,0x00, +0x89,0x00,0x00,0x00,0x82,0x00,0x00,0x00,0x0c,0x00,0x07,0x00, +0x06,0x00,0x00,0x00,0x8d,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0x26,0x00,0x00,0x00,0x87,0x00,0x00,0x00,0x8c,0x00,0x00,0x00, +0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00,0x91,0x00,0x00,0x00, +0x12,0x00,0x00,0x00,0x90,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x92,0x00,0x00,0x00,0x91,0x00,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x93,0x00,0x00,0x00, +0x32,0x00,0x00,0x00,0x92,0x00,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x95,0x00,0x00,0x00,0x42,0x00,0x00,0x00, +0x37,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00, +0x97,0x00,0x00,0x00,0x12,0x00,0x00,0x00,0x96,0x00,0x00,0x00, +0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x98,0x00,0x00,0x00, +0x97,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x99,0x00,0x00,0x00,0x95,0x00,0x00,0x00,0x98,0x00,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x9a,0x00,0x00,0x00, +0x93,0x00,0x00,0x00,0x99,0x00,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x9c,0x00,0x00,0x00,0x9a,0x00,0x00,0x00, +0x83,0x00,0x00,0x00,0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x9d,0x00,0x00,0x00,0x9c,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, +0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00,0xa2,0x00,0x00,0x00, +0x12,0x00,0x00,0x00,0xa1,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0xa3,0x00,0x00,0x00,0xa2,0x00,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xa4,0x00,0x00,0x00, +0x0f,0x00,0x00,0x00,0xa3,0x00,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xa7,0x00,0x00,0x00,0x4a,0x00,0x00,0x00, +0xa6,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00, +0xa9,0x00,0x00,0x00,0x12,0x00,0x00,0x00,0xa8,0x00,0x00,0x00, +0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0xaa,0x00,0x00,0x00, +0xa9,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xab,0x00,0x00,0x00,0xa7,0x00,0x00,0x00,0xaa,0x00,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xac,0x00,0x00,0x00, +0xa4,0x00,0x00,0x00,0xab,0x00,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xae,0x00,0x00,0x00,0xac,0x00,0x00,0x00, +0x83,0x00,0x00,0x00,0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xaf,0x00,0x00,0x00,0xae,0x00,0x00,0x00,0x39,0x00,0x00,0x00, +0xf9,0x00,0x02,0x00,0xb1,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, +0xb1,0x00,0x00,0x00,0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00, +0xde,0x02,0x00,0x00,0x3e,0x00,0x00,0x00,0x05,0x00,0x00,0x00, +0xd0,0x00,0x00,0x00,0xb2,0x00,0x00,0x00,0xb0,0x00,0x05,0x00, +0xc1,0x00,0x00,0x00,0xc2,0x00,0x00,0x00,0xde,0x02,0x00,0x00, +0xc0,0x00,0x00,0x00,0xf6,0x00,0x04,0x00,0xb3,0x00,0x00,0x00, +0xb2,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, +0xc2,0x00,0x00,0x00,0xb2,0x00,0x00,0x00,0xb3,0x00,0x00,0x00, +0xf8,0x00,0x02,0x00,0xb2,0x00,0x00,0x00,0x41,0x00,0x05,0x00, +0xcc,0x00,0x00,0x00,0xcd,0x00,0x00,0x00,0xc9,0x00,0x00,0x00, +0xde,0x02,0x00,0x00,0x3e,0x00,0x03,0x00,0xcd,0x00,0x00,0x00, +0xcb,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xd0,0x00,0x00,0x00,0xde,0x02,0x00,0x00,0xcf,0x00,0x00,0x00, +0xf9,0x00,0x02,0x00,0xb1,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, +0xb3,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0xd3,0x00,0x00,0x00, +0xf8,0x00,0x02,0x00,0xd3,0x00,0x00,0x00,0xf5,0x00,0x07,0x00, +0x06,0x00,0x00,0x00,0xf7,0x02,0x00,0x00,0xaf,0x00,0x00,0x00, +0xb3,0x00,0x00,0x00,0xaa,0x01,0x00,0x00,0xd6,0x00,0x00,0x00, +0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0xf3,0x02,0x00,0x00, +0x9d,0x00,0x00,0x00,0xb3,0x00,0x00,0x00,0xa7,0x01,0x00,0x00, +0xd6,0x00,0x00,0x00,0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00, +0xdf,0x02,0x00,0x00,0x83,0x00,0x00,0x00,0xb3,0x00,0x00,0x00, +0x55,0x02,0x00,0x00,0xd6,0x00,0x00,0x00,0xb0,0x00,0x05,0x00, +0xc1,0x00,0x00,0x00,0xda,0x00,0x00,0x00,0xdf,0x02,0x00,0x00, +0x8d,0x00,0x00,0x00,0xf6,0x00,0x04,0x00,0xd5,0x00,0x00,0x00, +0xd6,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, +0xda,0x00,0x00,0x00,0xd4,0x00,0x00,0x00,0xd5,0x00,0x00,0x00, +0xf8,0x00,0x02,0x00,0xd4,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, +0xdc,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0xdc,0x00,0x00,0x00, +0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0xef,0x02,0x00,0x00, +0x3e,0x00,0x00,0x00,0xd4,0x00,0x00,0x00,0x5c,0x01,0x00,0x00, +0xdd,0x00,0x00,0x00,0xb0,0x00,0x05,0x00,0xc1,0x00,0x00,0x00, +0xe2,0x00,0x00,0x00,0xef,0x02,0x00,0x00,0x37,0x00,0x00,0x00, +0xf6,0x00,0x04,0x00,0xde,0x00,0x00,0x00,0xdd,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0xe2,0x00,0x00,0x00, +0xdd,0x00,0x00,0x00,0xde,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, +0xdd,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xe7,0x00,0x00,0x00,0x73,0x00,0x00,0x00,0xef,0x02,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xea,0x00,0x00,0x00, +0xe7,0x00,0x00,0x00,0x98,0x00,0x00,0x00,0x86,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xeb,0x00,0x00,0x00,0xea,0x00,0x00,0x00, +0x0c,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xec,0x00,0x00,0x00,0xf3,0x02,0x00,0x00,0xeb,0x00,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xee,0x00,0x00,0x00, +0xec,0x00,0x00,0x00,0x6e,0x00,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xf4,0x00,0x00,0x00,0xe7,0x00,0x00,0x00, +0xf3,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xf6,0x00,0x00,0x00,0xf4,0x00,0x00,0x00,0x6e,0x00,0x00,0x00, +0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xfa,0x00,0x00,0x00, +0xee,0x00,0x00,0x00,0xf9,0x00,0x00,0x00,0xc7,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xfe,0x00,0x00,0x00,0xee,0x00,0x00,0x00, +0xfd,0x00,0x00,0x00,0x41,0x00,0x07,0x00,0x09,0x01,0x00,0x00, +0x0a,0x01,0x00,0x00,0x07,0x01,0x00,0x00,0x34,0x00,0x00,0x00, +0xfa,0x00,0x00,0x00,0x34,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0x00,0x01,0x00,0x00,0x0b,0x01,0x00,0x00,0x0a,0x01,0x00,0x00, +0x73,0x00,0x04,0x00,0xc3,0x00,0x00,0x00,0x0c,0x01,0x00,0x00, +0x0b,0x01,0x00,0x00,0x41,0x00,0x07,0x00,0x09,0x01,0x00,0x00, +0x0f,0x01,0x00,0x00,0x07,0x01,0x00,0x00,0x34,0x00,0x00,0x00, +0xfa,0x00,0x00,0x00,0xcf,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0x00,0x01,0x00,0x00,0x10,0x01,0x00,0x00,0x0f,0x01,0x00,0x00, +0x73,0x00,0x04,0x00,0xc3,0x00,0x00,0x00,0x11,0x01,0x00,0x00, +0x10,0x01,0x00,0x00,0x41,0x00,0x07,0x00,0x14,0x01,0x00,0x00, +0x15,0x01,0x00,0x00,0x07,0x01,0x00,0x00,0x34,0x00,0x00,0x00, +0xfa,0x00,0x00,0x00,0x85,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x16,0x01,0x00,0x00,0x15,0x01,0x00,0x00, +0xc2,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x1c,0x01,0x00,0x00, +0x16,0x01,0x00,0x00,0xfe,0x00,0x00,0x00,0xc4,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x1d,0x01,0x00,0x00,0x1c,0x01,0x00,0x00, +0xa8,0x00,0x00,0x00,0xc7,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x1e,0x01,0x00,0x00,0x1d,0x01,0x00,0x00,0xf9,0x00,0x00,0x00, +0x7c,0x00,0x04,0x00,0x13,0x00,0x00,0x00,0x1f,0x01,0x00,0x00, +0x1e,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x23,0x01,0x00,0x00,0xfe,0x00,0x00,0x00,0x22,0x01,0x00,0x00, +0xc2,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x24,0x01,0x00,0x00, +0x16,0x01,0x00,0x00,0x23,0x01,0x00,0x00,0xc7,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x25,0x01,0x00,0x00,0x24,0x01,0x00,0x00, +0xf9,0x00,0x00,0x00,0x7c,0x00,0x04,0x00,0x13,0x00,0x00,0x00, +0x26,0x01,0x00,0x00,0x25,0x01,0x00,0x00,0x41,0x00,0x08,0x00, +0x2b,0x01,0x00,0x00,0x2c,0x01,0x00,0x00,0x07,0x01,0x00,0x00, +0x34,0x00,0x00,0x00,0xfa,0x00,0x00,0x00,0x96,0x00,0x00,0x00, +0xfe,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x01,0x01,0x00,0x00, +0x2d,0x01,0x00,0x00,0x2c,0x01,0x00,0x00,0x71,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x2e,0x01,0x00,0x00,0x2d,0x01,0x00,0x00, +0xc7,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x33,0x01,0x00,0x00, +0x2e,0x01,0x00,0x00,0xfd,0x00,0x00,0x00,0x7c,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x37,0x01,0x00,0x00,0x1f,0x01,0x00,0x00, +0xc5,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x38,0x01,0x00,0x00, +0x33,0x01,0x00,0x00,0x37,0x01,0x00,0x00,0x70,0x00,0x04,0x00, +0xc3,0x00,0x00,0x00,0x39,0x01,0x00,0x00,0x38,0x01,0x00,0x00, +0xc2,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x3b,0x01,0x00,0x00, +0x2e,0x01,0x00,0x00,0xa8,0x00,0x00,0x00,0x7c,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x3e,0x01,0x00,0x00,0x26,0x01,0x00,0x00, +0xc5,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x3f,0x01,0x00,0x00, +0x3b,0x01,0x00,0x00,0x3e,0x01,0x00,0x00,0x70,0x00,0x04,0x00, +0xc3,0x00,0x00,0x00,0x40,0x01,0x00,0x00,0x3f,0x01,0x00,0x00, +0x50,0x00,0x05,0x00,0x2f,0x01,0x00,0x00,0x41,0x01,0x00,0x00, +0x39,0x01,0x00,0x00,0x40,0x01,0x00,0x00,0x8e,0x00,0x05,0x00, +0x2f,0x01,0x00,0x00,0x43,0x01,0x00,0x00,0x41,0x01,0x00,0x00, +0x0c,0x01,0x00,0x00,0x50,0x00,0x05,0x00,0x2f,0x01,0x00,0x00, +0x45,0x01,0x00,0x00,0x11,0x01,0x00,0x00,0x11,0x01,0x00,0x00, +0x81,0x00,0x05,0x00,0x2f,0x01,0x00,0x00,0x46,0x01,0x00,0x00, +0x43,0x01,0x00,0x00,0x45,0x01,0x00,0x00,0x51,0x00,0x05,0x00, +0xc3,0x00,0x00,0x00,0x4e,0x01,0x00,0x00,0x46,0x01,0x00,0x00, +0x00,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x4f,0x01,0x00,0x00, +0x50,0x01,0x00,0x00,0x4b,0x01,0x00,0x00,0xf6,0x00,0x00,0x00, +0x3e,0x00,0x03,0x00,0x50,0x01,0x00,0x00,0x4e,0x01,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x52,0x01,0x00,0x00, +0xf6,0x00,0x00,0x00,0xf9,0x00,0x00,0x00,0x51,0x00,0x05,0x00, +0xc3,0x00,0x00,0x00,0x54,0x01,0x00,0x00,0x46,0x01,0x00,0x00, +0x01,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x4f,0x01,0x00,0x00, +0x55,0x01,0x00,0x00,0x4b,0x01,0x00,0x00,0x52,0x01,0x00,0x00, +0x3e,0x00,0x03,0x00,0x55,0x01,0x00,0x00,0x54,0x01,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x5c,0x01,0x00,0x00, +0xef,0x02,0x00,0x00,0x5a,0x01,0x00,0x00,0xf9,0x00,0x02,0x00, +0xdc,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0xde,0x00,0x00,0x00, +0xf9,0x00,0x02,0x00,0x5e,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, +0x5e,0x01,0x00,0x00,0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00, +0xf0,0x02,0x00,0x00,0x3e,0x00,0x00,0x00,0xde,0x00,0x00,0x00, +0xa3,0x01,0x00,0x00,0x61,0x01,0x00,0x00,0xb0,0x00,0x05,0x00, +0xc1,0x00,0x00,0x00,0x64,0x01,0x00,0x00,0xf0,0x02,0x00,0x00, +0xa6,0x00,0x00,0x00,0xf6,0x00,0x04,0x00,0x60,0x01,0x00,0x00, +0x61,0x01,0x00,0x00,0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, +0x64,0x01,0x00,0x00,0x5f,0x01,0x00,0x00,0x60,0x01,0x00,0x00, +0xf8,0x00,0x02,0x00,0x5f,0x01,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x68,0x01,0x00,0x00,0xa7,0x00,0x00,0x00, +0x7d,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x6a,0x01,0x00,0x00,0x68,0x01,0x00,0x00,0xf0,0x02,0x00,0x00, +0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00,0x6b,0x01,0x00,0x00, +0x12,0x00,0x00,0x00,0xcf,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x6c,0x01,0x00,0x00,0x6b,0x01,0x00,0x00, +0xb0,0x00,0x05,0x00,0xc1,0x00,0x00,0x00,0x6d,0x01,0x00,0x00, +0x6a,0x01,0x00,0x00,0x6c,0x01,0x00,0x00,0xf7,0x00,0x03,0x00, +0x6f,0x01,0x00,0x00,0x00,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, +0x6d,0x01,0x00,0x00,0x6e,0x01,0x00,0x00,0x6f,0x01,0x00,0x00, +0xf8,0x00,0x02,0x00,0x6e,0x01,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x72,0x01,0x00,0x00,0xdf,0x02,0x00,0x00, +0x78,0x00,0x00,0x00,0xb0,0x00,0x05,0x00,0xc1,0x00,0x00,0x00, +0x74,0x01,0x00,0x00,0x72,0x01,0x00,0x00,0x8d,0x00,0x00,0x00, +0xf9,0x00,0x02,0x00,0x6f,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, +0x6f,0x01,0x00,0x00,0xf5,0x00,0x07,0x00,0xc1,0x00,0x00,0x00, +0x75,0x01,0x00,0x00,0x6d,0x01,0x00,0x00,0x5f,0x01,0x00,0x00, +0x74,0x01,0x00,0x00,0x6e,0x01,0x00,0x00,0xf7,0x00,0x03,0x00, +0x77,0x01,0x00,0x00,0x00,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, +0x75,0x01,0x00,0x00,0x76,0x01,0x00,0x00,0x96,0x01,0x00,0x00, +0xf8,0x00,0x02,0x00,0x76,0x01,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x7f,0x01,0x00,0x00,0x7d,0x00,0x00,0x00, +0xf0,0x02,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x81,0x01,0x00,0x00,0x7f,0x01,0x00,0x00,0x80,0x01,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x83,0x01,0x00,0x00, +0x81,0x01,0x00,0x00,0x78,0x00,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x8e,0x01,0x00,0x00,0x7f,0x01,0x00,0x00, +0xaa,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x8f,0x01,0x00,0x00,0xf7,0x02,0x00,0x00,0x8e,0x01,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x91,0x01,0x00,0x00, +0x8f,0x01,0x00,0x00,0x78,0x00,0x00,0x00,0x41,0x00,0x06,0x00, +0x92,0x01,0x00,0x00,0x93,0x01,0x00,0x00,0x87,0x01,0x00,0x00, +0x34,0x00,0x00,0x00,0x91,0x01,0x00,0x00,0x3d,0x00,0x04,0x00, +0xc3,0x00,0x00,0x00,0x94,0x01,0x00,0x00,0x93,0x01,0x00,0x00, +0x41,0x00,0x05,0x00,0x4f,0x01,0x00,0x00,0x95,0x01,0x00,0x00, +0x7c,0x01,0x00,0x00,0x83,0x01,0x00,0x00,0x3e,0x00,0x03,0x00, +0x95,0x01,0x00,0x00,0x94,0x01,0x00,0x00,0xf9,0x00,0x02,0x00, +0x77,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0x96,0x01,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x99,0x01,0x00,0x00, +0x7d,0x00,0x00,0x00,0xf0,0x02,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x9b,0x01,0x00,0x00,0x99,0x01,0x00,0x00, +0x9a,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x9d,0x01,0x00,0x00,0x9b,0x01,0x00,0x00,0x78,0x00,0x00,0x00, +0x41,0x00,0x05,0x00,0x4f,0x01,0x00,0x00,0x9e,0x01,0x00,0x00, +0x7c,0x01,0x00,0x00,0x9d,0x01,0x00,0x00,0x3e,0x00,0x03,0x00, +0x9e,0x01,0x00,0x00,0xcb,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, +0x77,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0x77,0x01,0x00,0x00, +0xf9,0x00,0x02,0x00,0x61,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, +0x61,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xa3,0x01,0x00,0x00,0xf0,0x02,0x00,0x00,0xa1,0x01,0x00,0x00, +0xf9,0x00,0x02,0x00,0x5e,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, +0x60,0x01,0x00,0x00,0xe0,0x00,0x04,0x00,0x0c,0x00,0x00,0x00, +0x0c,0x00,0x00,0x00,0xa4,0x01,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xa7,0x01,0x00,0x00,0xf3,0x02,0x00,0x00, +0xa5,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xaa,0x01,0x00,0x00,0xf7,0x02,0x00,0x00,0xa8,0x01,0x00,0x00, +0xf9,0x00,0x02,0x00,0xac,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, +0xac,0x01,0x00,0x00,0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00, +0xf9,0x02,0x00,0x00,0x3e,0x00,0x00,0x00,0x60,0x01,0x00,0x00, +0x53,0x02,0x00,0x00,0xaf,0x01,0x00,0x00,0xb0,0x00,0x05,0x00, +0xc1,0x00,0x00,0x00,0xb2,0x01,0x00,0x00,0xf9,0x02,0x00,0x00, +0x6c,0x00,0x00,0x00,0xf6,0x00,0x04,0x00,0xae,0x01,0x00,0x00, +0xaf,0x01,0x00,0x00,0x00,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, +0xb2,0x01,0x00,0x00,0xad,0x01,0x00,0x00,0xae,0x01,0x00,0x00, +0xf8,0x00,0x02,0x00,0xad,0x01,0x00,0x00,0xf9,0x00,0x02,0x00, +0xb4,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0xb4,0x01,0x00,0x00, +0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0xfd,0x02,0x00,0x00, +0x3e,0x00,0x00,0x00,0xad,0x01,0x00,0x00,0xdf,0x01,0x00,0x00, +0xb7,0x01,0x00,0x00,0xb0,0x00,0x05,0x00,0xc1,0x00,0x00,0x00, +0xba,0x01,0x00,0x00,0xfd,0x02,0x00,0x00,0x60,0x00,0x00,0x00, +0xf6,0x00,0x04,0x00,0xb6,0x01,0x00,0x00,0xb7,0x01,0x00,0x00, +0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0xba,0x01,0x00,0x00, +0xb5,0x01,0x00,0x00,0xb6,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, +0xb5,0x01,0x00,0x00,0xf9,0x00,0x02,0x00,0xbc,0x01,0x00,0x00, +0xf8,0x00,0x02,0x00,0xbc,0x01,0x00,0x00,0xf5,0x00,0x07,0x00, +0x06,0x00,0x00,0x00,0x0f,0x03,0x00,0x00,0x3e,0x00,0x00,0x00, +0xb5,0x01,0x00,0x00,0xdd,0x01,0x00,0x00,0xbd,0x01,0x00,0x00, +0xb0,0x00,0x05,0x00,0xc1,0x00,0x00,0x00,0xc2,0x01,0x00,0x00, +0x0f,0x03,0x00,0x00,0x62,0x00,0x00,0x00,0xf6,0x00,0x04,0x00, +0xbe,0x01,0x00,0x00,0xbd,0x01,0x00,0x00,0x01,0x00,0x00,0x00, +0xfa,0x00,0x04,0x00,0xc2,0x01,0x00,0x00,0xbd,0x01,0x00,0x00, +0xbe,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0xbd,0x01,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xc8,0x01,0x00,0x00, +0xfd,0x02,0x00,0x00,0x62,0x00,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xca,0x01,0x00,0x00,0xc8,0x01,0x00,0x00, +0x0f,0x03,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xcc,0x01,0x00,0x00,0x55,0x00,0x00,0x00,0x53,0x00,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xce,0x01,0x00,0x00, +0xfd,0x02,0x00,0x00,0x61,0x00,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xcf,0x01,0x00,0x00,0xcc,0x01,0x00,0x00, +0xce,0x01,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xd1,0x01,0x00,0x00,0x64,0x00,0x00,0x00,0x62,0x00,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xd2,0x01,0x00,0x00, +0xcf,0x01,0x00,0x00,0xd1,0x01,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xd4,0x01,0x00,0x00,0xd2,0x01,0x00,0x00, +0x0f,0x03,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xd6,0x01,0x00,0x00,0xd4,0x01,0x00,0x00,0xd5,0x01,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xd8,0x01,0x00,0x00, +0xd6,0x01,0x00,0x00,0xf9,0x02,0x00,0x00,0x41,0x00,0x05,0x00, +0x4f,0x01,0x00,0x00,0xd9,0x01,0x00,0x00,0x4b,0x01,0x00,0x00, +0xd8,0x01,0x00,0x00,0x3d,0x00,0x04,0x00,0xc3,0x00,0x00,0x00, +0xda,0x01,0x00,0x00,0xd9,0x01,0x00,0x00,0x41,0x00,0x05,0x00, +0xcc,0x00,0x00,0x00,0xdb,0x01,0x00,0x00,0xc6,0x01,0x00,0x00, +0xca,0x01,0x00,0x00,0x3e,0x00,0x03,0x00,0xdb,0x01,0x00,0x00, +0xda,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xdd,0x01,0x00,0x00,0x0f,0x03,0x00,0x00,0xcf,0x00,0x00,0x00, +0xf9,0x00,0x02,0x00,0xbc,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, +0xbe,0x01,0x00,0x00,0xf9,0x00,0x02,0x00,0xb7,0x01,0x00,0x00, +0xf8,0x00,0x02,0x00,0xb7,0x01,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xdf,0x01,0x00,0x00,0xfd,0x02,0x00,0x00, +0xcf,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0xb4,0x01,0x00,0x00, +0xf8,0x00,0x02,0x00,0xb6,0x01,0x00,0x00,0xf9,0x00,0x02,0x00, +0xe1,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0xe1,0x01,0x00,0x00, +0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0xfe,0x02,0x00,0x00, +0x3e,0x00,0x00,0x00,0xb6,0x01,0x00,0x00,0x0d,0x02,0x00,0x00, +0xe4,0x01,0x00,0x00,0xb0,0x00,0x05,0x00,0xc1,0x00,0x00,0x00, +0xe7,0x01,0x00,0x00,0xfe,0x02,0x00,0x00,0xbe,0x00,0x00,0x00, +0xf6,0x00,0x04,0x00,0xe3,0x01,0x00,0x00,0xe4,0x01,0x00,0x00, +0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0xe7,0x01,0x00,0x00, +0xe2,0x01,0x00,0x00,0xe3,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, +0xe2,0x01,0x00,0x00,0xf9,0x00,0x02,0x00,0xe9,0x01,0x00,0x00, +0xf8,0x00,0x02,0x00,0xe9,0x01,0x00,0x00,0xf5,0x00,0x07,0x00, +0x06,0x00,0x00,0x00,0x0c,0x03,0x00,0x00,0x3e,0x00,0x00,0x00, +0xe2,0x01,0x00,0x00,0x0b,0x02,0x00,0x00,0xea,0x01,0x00,0x00, +0xb0,0x00,0x05,0x00,0xc1,0x00,0x00,0x00,0xef,0x01,0x00,0x00, +0x0c,0x03,0x00,0x00,0xbb,0x00,0x00,0x00,0xf6,0x00,0x04,0x00, +0xeb,0x01,0x00,0x00,0xea,0x01,0x00,0x00,0x01,0x00,0x00,0x00, +0xfa,0x00,0x04,0x00,0xef,0x01,0x00,0x00,0xea,0x01,0x00,0x00, +0xeb,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0xea,0x01,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xf5,0x01,0x00,0x00, +0xfe,0x02,0x00,0x00,0xbb,0x00,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xf7,0x01,0x00,0x00,0xf5,0x01,0x00,0x00, +0x0c,0x03,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xf9,0x01,0x00,0x00,0x59,0x00,0x00,0x00,0xb8,0x00,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xfc,0x01,0x00,0x00, +0xfe,0x02,0x00,0x00,0xfb,0x01,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xfd,0x01,0x00,0x00,0xf9,0x01,0x00,0x00, +0xfc,0x01,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xff,0x01,0x00,0x00,0x68,0x00,0x00,0x00,0xbb,0x00,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x00,0x02,0x00,0x00, +0xfd,0x01,0x00,0x00,0xff,0x01,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x02,0x02,0x00,0x00,0x00,0x02,0x00,0x00, +0x0c,0x03,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x04,0x02,0x00,0x00,0x02,0x02,0x00,0x00,0x03,0x02,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x06,0x02,0x00,0x00, +0x04,0x02,0x00,0x00,0xf9,0x02,0x00,0x00,0x41,0x00,0x05,0x00, +0x4f,0x01,0x00,0x00,0x07,0x02,0x00,0x00,0x7c,0x01,0x00,0x00, +0x06,0x02,0x00,0x00,0x3d,0x00,0x04,0x00,0xc3,0x00,0x00,0x00, +0x08,0x02,0x00,0x00,0x07,0x02,0x00,0x00,0x41,0x00,0x05,0x00, +0xcc,0x00,0x00,0x00,0x09,0x02,0x00,0x00,0xf3,0x01,0x00,0x00, +0xf7,0x01,0x00,0x00,0x3e,0x00,0x03,0x00,0x09,0x02,0x00,0x00, +0x08,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x0b,0x02,0x00,0x00,0x0c,0x03,0x00,0x00,0xcf,0x00,0x00,0x00, +0xf9,0x00,0x02,0x00,0xe9,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, +0xeb,0x01,0x00,0x00,0xf9,0x00,0x02,0x00,0xe4,0x01,0x00,0x00, +0xf8,0x00,0x02,0x00,0xe4,0x01,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x0d,0x02,0x00,0x00,0xfe,0x02,0x00,0x00, +0xcf,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0xe1,0x01,0x00,0x00, +0xf8,0x00,0x02,0x00,0xe3,0x01,0x00,0x00,0xf9,0x00,0x02,0x00, +0x0f,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x0f,0x02,0x00,0x00, +0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0xff,0x02,0x00,0x00, +0x3e,0x00,0x00,0x00,0xe3,0x01,0x00,0x00,0x51,0x02,0x00,0x00, +0x12,0x02,0x00,0x00,0xb0,0x00,0x05,0x00,0xc1,0x00,0x00,0x00, +0x15,0x02,0x00,0x00,0xff,0x02,0x00,0x00,0xbe,0x00,0x00,0x00, +0xf6,0x00,0x04,0x00,0x11,0x02,0x00,0x00,0x12,0x02,0x00,0x00, +0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0x15,0x02,0x00,0x00, +0x10,0x02,0x00,0x00,0x11,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, +0x10,0x02,0x00,0x00,0xf9,0x00,0x02,0x00,0x17,0x02,0x00,0x00, +0xf8,0x00,0x02,0x00,0x17,0x02,0x00,0x00,0xf5,0x00,0x07,0x00, +0x06,0x00,0x00,0x00,0x03,0x03,0x00,0x00,0x3e,0x00,0x00,0x00, +0x10,0x02,0x00,0x00,0x4f,0x02,0x00,0x00,0x1a,0x02,0x00,0x00, +0xb0,0x00,0x05,0x00,0xc1,0x00,0x00,0x00,0x1d,0x02,0x00,0x00, +0x03,0x03,0x00,0x00,0x60,0x00,0x00,0x00,0xf6,0x00,0x04,0x00, +0x19,0x02,0x00,0x00,0x1a,0x02,0x00,0x00,0x01,0x00,0x00,0x00, +0xfa,0x00,0x04,0x00,0x1d,0x02,0x00,0x00,0x18,0x02,0x00,0x00, +0x19,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x18,0x02,0x00,0x00, +0xf9,0x00,0x02,0x00,0x1f,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, +0x1f,0x02,0x00,0x00,0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00, +0x05,0x03,0x00,0x00,0x3e,0x00,0x00,0x00,0x18,0x02,0x00,0x00, +0x4d,0x02,0x00,0x00,0x22,0x02,0x00,0x00,0xb0,0x00,0x05,0x00, +0xc1,0x00,0x00,0x00,0x25,0x02,0x00,0x00,0x05,0x03,0x00,0x00, +0xbb,0x00,0x00,0x00,0xf6,0x00,0x04,0x00,0x21,0x02,0x00,0x00, +0x22,0x02,0x00,0x00,0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, +0x25,0x02,0x00,0x00,0x20,0x02,0x00,0x00,0x21,0x02,0x00,0x00, +0xf8,0x00,0x02,0x00,0x20,0x02,0x00,0x00,0xf9,0x00,0x02,0x00, +0x27,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x27,0x02,0x00,0x00, +0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0x07,0x03,0x00,0x00, +0x3e,0x00,0x00,0x00,0x20,0x02,0x00,0x00,0x4b,0x02,0x00,0x00, +0x28,0x02,0x00,0x00,0xb0,0x00,0x05,0x00,0xc1,0x00,0x00,0x00, +0x2d,0x02,0x00,0x00,0x07,0x03,0x00,0x00,0x62,0x00,0x00,0x00, +0xf6,0x00,0x04,0x00,0x29,0x02,0x00,0x00,0x28,0x02,0x00,0x00, +0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0x2d,0x02,0x00,0x00, +0x28,0x02,0x00,0x00,0x29,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, +0x28,0x02,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x2f,0x02,0x00,0x00,0xff,0x02,0x00,0x00,0xbb,0x00,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x31,0x02,0x00,0x00, +0x2f,0x02,0x00,0x00,0x05,0x03,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x33,0x02,0x00,0x00,0x31,0x02,0x00,0x00, +0x32,0x02,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x35,0x02,0x00,0x00,0x03,0x03,0x00,0x00,0x62,0x00,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x36,0x02,0x00,0x00, +0x33,0x02,0x00,0x00,0x35,0x02,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x38,0x02,0x00,0x00,0x36,0x02,0x00,0x00, +0x07,0x03,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x3c,0x02,0x00,0x00,0x35,0x02,0x00,0x00,0x07,0x03,0x00,0x00, +0x41,0x00,0x05,0x00,0xcc,0x00,0x00,0x00,0x3d,0x02,0x00,0x00, +0xc6,0x01,0x00,0x00,0x3c,0x02,0x00,0x00,0x3d,0x00,0x04,0x00, +0xc3,0x00,0x00,0x00,0x3e,0x02,0x00,0x00,0x3d,0x02,0x00,0x00, +0x41,0x00,0x05,0x00,0xcc,0x00,0x00,0x00,0x43,0x02,0x00,0x00, +0xf3,0x01,0x00,0x00,0x31,0x02,0x00,0x00,0x3d,0x00,0x04,0x00, +0xc3,0x00,0x00,0x00,0x44,0x02,0x00,0x00,0x43,0x02,0x00,0x00, +0x41,0x00,0x05,0x00,0xcc,0x00,0x00,0x00,0x46,0x02,0x00,0x00, +0xc9,0x00,0x00,0x00,0x38,0x02,0x00,0x00,0x3d,0x00,0x04,0x00, +0xc3,0x00,0x00,0x00,0x47,0x02,0x00,0x00,0x46,0x02,0x00,0x00, +0x0c,0x00,0x08,0x00,0xc3,0x00,0x00,0x00,0x48,0x02,0x00,0x00, +0x01,0x00,0x00,0x00,0x32,0x00,0x00,0x00,0x3e,0x02,0x00,0x00, +0x44,0x02,0x00,0x00,0x47,0x02,0x00,0x00,0x3e,0x00,0x03,0x00, +0x46,0x02,0x00,0x00,0x48,0x02,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x4b,0x02,0x00,0x00,0x07,0x03,0x00,0x00, +0xcf,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0x27,0x02,0x00,0x00, +0xf8,0x00,0x02,0x00,0x29,0x02,0x00,0x00,0xf9,0x00,0x02,0x00, +0x22,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x22,0x02,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x4d,0x02,0x00,0x00, +0x05,0x03,0x00,0x00,0xcf,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, +0x1f,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x21,0x02,0x00,0x00, +0xf9,0x00,0x02,0x00,0x1a,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, +0x1a,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x4f,0x02,0x00,0x00,0x03,0x03,0x00,0x00,0xcf,0x00,0x00,0x00, +0xf9,0x00,0x02,0x00,0x17,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, +0x19,0x02,0x00,0x00,0xf9,0x00,0x02,0x00,0x12,0x02,0x00,0x00, +0xf8,0x00,0x02,0x00,0x12,0x02,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x51,0x02,0x00,0x00,0xff,0x02,0x00,0x00, +0xcf,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0x0f,0x02,0x00,0x00, +0xf8,0x00,0x02,0x00,0x11,0x02,0x00,0x00,0xf9,0x00,0x02,0x00, +0xaf,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0xaf,0x01,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x53,0x02,0x00,0x00, +0xf9,0x02,0x00,0x00,0xcf,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, +0xac,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0xae,0x01,0x00,0x00, +0xe0,0x00,0x04,0x00,0x0c,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, +0xa4,0x01,0x00,0x00,0xf9,0x00,0x02,0x00,0xd6,0x00,0x00,0x00, +0xf8,0x00,0x02,0x00,0xd6,0x00,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x55,0x02,0x00,0x00,0xdf,0x02,0x00,0x00, +0x6c,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0xd3,0x00,0x00,0x00, +0xf8,0x00,0x02,0x00,0xd5,0x00,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x5a,0x02,0x00,0x00,0x55,0x00,0x00,0x00, +0x53,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x5b,0x02,0x00,0x00,0x95,0x00,0x00,0x00,0x5a,0x02,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x60,0x02,0x00,0x00, +0x59,0x00,0x00,0x00,0xb8,0x00,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x61,0x02,0x00,0x00,0xa7,0x00,0x00,0x00, +0x60,0x02,0x00,0x00,0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00, +0x66,0x02,0x00,0x00,0x12,0x00,0x00,0x00,0x65,0x02,0x00,0x00, +0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x67,0x02,0x00,0x00, +0x66,0x02,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x68,0x02,0x00,0x00,0x0f,0x00,0x00,0x00,0x67,0x02,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x6c,0x02,0x00,0x00, +0x47,0x00,0x00,0x00,0x67,0x02,0x00,0x00,0x41,0x00,0x05,0x00, +0x0d,0x00,0x00,0x00,0x6e,0x02,0x00,0x00,0x6d,0x02,0x00,0x00, +0x0c,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x6f,0x02,0x00,0x00,0x6e,0x02,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x70,0x02,0x00,0x00,0x6c,0x02,0x00,0x00, +0x6f,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x71,0x02,0x00,0x00,0x68,0x02,0x00,0x00,0x70,0x02,0x00,0x00, +0xf9,0x00,0x02,0x00,0x73,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, +0x73,0x02,0x00,0x00,0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00, +0xe0,0x02,0x00,0x00,0x3e,0x00,0x00,0x00,0xd5,0x00,0x00,0x00, +0xd9,0x02,0x00,0x00,0x76,0x02,0x00,0x00,0xb0,0x00,0x05,0x00, +0xc1,0x00,0x00,0x00,0x79,0x02,0x00,0x00,0xe0,0x02,0x00,0x00, +0xbe,0x00,0x00,0x00,0xf6,0x00,0x04,0x00,0x75,0x02,0x00,0x00, +0x76,0x02,0x00,0x00,0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, +0x79,0x02,0x00,0x00,0x74,0x02,0x00,0x00,0x75,0x02,0x00,0x00, +0xf8,0x00,0x02,0x00,0x74,0x02,0x00,0x00,0xf9,0x00,0x02,0x00, +0x7b,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x7b,0x02,0x00,0x00, +0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0xe1,0x02,0x00,0x00, +0x3e,0x00,0x00,0x00,0x74,0x02,0x00,0x00,0xd7,0x02,0x00,0x00, +0x7e,0x02,0x00,0x00,0xb0,0x00,0x05,0x00,0xc1,0x00,0x00,0x00, +0x81,0x02,0x00,0x00,0xe1,0x02,0x00,0x00,0x60,0x00,0x00,0x00, +0xf6,0x00,0x04,0x00,0x7d,0x02,0x00,0x00,0x7e,0x02,0x00,0x00, +0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0x81,0x02,0x00,0x00, +0x7c,0x02,0x00,0x00,0x7d,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, +0x7c,0x02,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x85,0x02,0x00,0x00,0xe1,0x02,0x00,0x00,0x61,0x00,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x86,0x02,0x00,0x00, +0x5b,0x02,0x00,0x00,0x85,0x02,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x88,0x02,0x00,0x00,0x64,0x00,0x00,0x00, +0x62,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x89,0x02,0x00,0x00,0x86,0x02,0x00,0x00,0x88,0x02,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x8d,0x02,0x00,0x00, +0xe0,0x02,0x00,0x00,0xfb,0x01,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x8e,0x02,0x00,0x00,0x61,0x02,0x00,0x00, +0x8d,0x02,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x90,0x02,0x00,0x00,0x68,0x00,0x00,0x00,0xbb,0x00,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x91,0x02,0x00,0x00, +0x8e,0x02,0x00,0x00,0x90,0x02,0x00,0x00,0xf9,0x00,0x02,0x00, +0x93,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x93,0x02,0x00,0x00, +0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0xe3,0x02,0x00,0x00, +0x3e,0x00,0x00,0x00,0x7c,0x02,0x00,0x00,0xd5,0x02,0x00,0x00, +0x96,0x02,0x00,0x00,0xb0,0x00,0x05,0x00,0xc1,0x00,0x00,0x00, +0x99,0x02,0x00,0x00,0xe3,0x02,0x00,0x00,0xbb,0x00,0x00,0x00, +0xf6,0x00,0x04,0x00,0x95,0x02,0x00,0x00,0x96,0x02,0x00,0x00, +0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0x99,0x02,0x00,0x00, +0x94,0x02,0x00,0x00,0x95,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, +0x94,0x02,0x00,0x00,0xf9,0x00,0x02,0x00,0x9b,0x02,0x00,0x00, +0xf8,0x00,0x02,0x00,0x9b,0x02,0x00,0x00,0xf5,0x00,0x07,0x00, +0x06,0x00,0x00,0x00,0xe5,0x02,0x00,0x00,0x3e,0x00,0x00,0x00, +0x94,0x02,0x00,0x00,0xd3,0x02,0x00,0x00,0x9e,0x02,0x00,0x00, +0xb0,0x00,0x05,0x00,0xc1,0x00,0x00,0x00,0xa1,0x02,0x00,0x00, +0xe5,0x02,0x00,0x00,0x62,0x00,0x00,0x00,0xf6,0x00,0x04,0x00, +0x9d,0x02,0x00,0x00,0x9e,0x02,0x00,0x00,0x01,0x00,0x00,0x00, +0xfa,0x00,0x04,0x00,0xa1,0x02,0x00,0x00,0x9c,0x02,0x00,0x00, +0x9d,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x9c,0x02,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xa4,0x02,0x00,0x00, +0x89,0x02,0x00,0x00,0xe5,0x02,0x00,0x00,0xb0,0x00,0x05,0x00, +0xc1,0x00,0x00,0x00,0xa7,0x02,0x00,0x00,0xa4,0x02,0x00,0x00, +0x36,0x00,0x00,0x00,0xf7,0x00,0x03,0x00,0xa9,0x02,0x00,0x00, +0x00,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0xa7,0x02,0x00,0x00, +0xa8,0x02,0x00,0x00,0xa9,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, +0xa8,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xac,0x02,0x00,0x00,0x91,0x02,0x00,0x00,0xe3,0x02,0x00,0x00, +0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00,0xad,0x02,0x00,0x00, +0x12,0x00,0x00,0x00,0xcf,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0xae,0x02,0x00,0x00,0xad,0x02,0x00,0x00, +0xb0,0x00,0x05,0x00,0xc1,0x00,0x00,0x00,0xaf,0x02,0x00,0x00, +0xac,0x02,0x00,0x00,0xae,0x02,0x00,0x00,0xf9,0x00,0x02,0x00, +0xa9,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0xa9,0x02,0x00,0x00, +0xf5,0x00,0x07,0x00,0xc1,0x00,0x00,0x00,0xb0,0x02,0x00,0x00, +0xa7,0x02,0x00,0x00,0x9c,0x02,0x00,0x00,0xaf,0x02,0x00,0x00, +0xa8,0x02,0x00,0x00,0xf7,0x00,0x03,0x00,0xb2,0x02,0x00,0x00, +0x00,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0xb0,0x02,0x00,0x00, +0xb1,0x02,0x00,0x00,0xb2,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, +0xb1,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xba,0x02,0x00,0x00,0x91,0x02,0x00,0x00,0xe3,0x02,0x00,0x00, +0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00,0xbc,0x02,0x00,0x00, +0x12,0x00,0x00,0x00,0xbb,0x02,0x00,0x00,0x3d,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0xbd,0x02,0x00,0x00,0xbc,0x02,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xbe,0x02,0x00,0x00, +0xba,0x02,0x00,0x00,0xbd,0x02,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xbf,0x02,0x00,0x00,0x71,0x02,0x00,0x00, +0xbe,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xc1,0x02,0x00,0x00,0xbf,0x02,0x00,0x00,0x89,0x02,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xc3,0x02,0x00,0x00, +0xc1,0x02,0x00,0x00,0xe5,0x02,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xc5,0x02,0x00,0x00,0xe0,0x02,0x00,0x00, +0xbb,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xc7,0x02,0x00,0x00,0xc5,0x02,0x00,0x00,0xe3,0x02,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xc9,0x02,0x00,0x00, +0xc7,0x02,0x00,0x00,0xc8,0x02,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xcb,0x02,0x00,0x00,0xe1,0x02,0x00,0x00, +0x62,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xcc,0x02,0x00,0x00,0xc9,0x02,0x00,0x00,0xcb,0x02,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xce,0x02,0x00,0x00, +0xcc,0x02,0x00,0x00,0xe5,0x02,0x00,0x00,0x41,0x00,0x05,0x00, +0xcc,0x00,0x00,0x00,0xcf,0x02,0x00,0x00,0xc9,0x00,0x00,0x00, +0xce,0x02,0x00,0x00,0x3d,0x00,0x04,0x00,0xc3,0x00,0x00,0x00, +0xd0,0x02,0x00,0x00,0xcf,0x02,0x00,0x00,0x41,0x00,0x06,0x00, +0x92,0x01,0x00,0x00,0xd1,0x02,0x00,0x00,0xb6,0x02,0x00,0x00, +0x34,0x00,0x00,0x00,0xc3,0x02,0x00,0x00,0x3e,0x00,0x03,0x00, +0xd1,0x02,0x00,0x00,0xd0,0x02,0x00,0x00,0xf9,0x00,0x02,0x00, +0xb2,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0xb2,0x02,0x00,0x00, +0xf9,0x00,0x02,0x00,0x9e,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, +0x9e,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xd3,0x02,0x00,0x00,0xe5,0x02,0x00,0x00,0xcf,0x00,0x00,0x00, +0xf9,0x00,0x02,0x00,0x9b,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, +0x9d,0x02,0x00,0x00,0xf9,0x00,0x02,0x00,0x96,0x02,0x00,0x00, +0xf8,0x00,0x02,0x00,0x96,0x02,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xd5,0x02,0x00,0x00,0xe3,0x02,0x00,0x00, +0xcf,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0x93,0x02,0x00,0x00, +0xf8,0x00,0x02,0x00,0x95,0x02,0x00,0x00,0xf9,0x00,0x02,0x00, +0x7e,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x7e,0x02,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xd7,0x02,0x00,0x00, +0xe1,0x02,0x00,0x00,0xcf,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, +0x7b,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x7d,0x02,0x00,0x00, +0xf9,0x00,0x02,0x00,0x76,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, +0x76,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xd9,0x02,0x00,0x00,0xe0,0x02,0x00,0x00,0xcf,0x00,0x00,0x00, +0xf9,0x00,0x02,0x00,0x73,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, +0x75,0x02,0x00,0x00,0xfd,0x00,0x01,0x00,0x38,0x00,0x01,0x00, + +}; +const uint64_t matmul_q5_1_f32_fp32_len = 10836; + +unsigned char matmul_q8_0_f32_data[] = { +0x03,0x02,0x23,0x07,0x00,0x05,0x01,0x00,0x0b,0x00,0x0d,0x00, +0xf2,0x02,0x00,0x00,0x00,0x00,0x00,0x00,0x11,0x00,0x02,0x00, +0x01,0x00,0x00,0x00,0x11,0x00,0x02,0x00,0x09,0x00,0x00,0x00, +0x11,0x00,0x02,0x00,0x51,0x11,0x00,0x00,0x11,0x00,0x02,0x00, +0x60,0x11,0x00,0x00,0x0b,0x00,0x06,0x00,0x01,0x00,0x00,0x00, +0x47,0x4c,0x53,0x4c,0x2e,0x73,0x74,0x64,0x2e,0x34,0x35,0x30, +0x00,0x00,0x00,0x00,0x0e,0x00,0x03,0x00,0x00,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0x0f,0x00,0x0f,0x00,0x05,0x00,0x00,0x00, +0x04,0x00,0x00,0x00,0x6d,0x61,0x69,0x6e,0x00,0x00,0x00,0x00, +0x0b,0x00,0x00,0x00,0x12,0x00,0x00,0x00,0x3d,0x00,0x00,0x00, +0x4c,0x00,0x00,0x00,0x0a,0x01,0x00,0x00,0x28,0x01,0x00,0x00, +0x5b,0x01,0x00,0x00,0x66,0x01,0x00,0x00,0x51,0x02,0x00,0x00, +0x9a,0x02,0x00,0x00,0x10,0x00,0x06,0x00,0x04,0x00,0x00,0x00, +0x11,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x0b,0x00,0x00,0x00, +0x0b,0x00,0x00,0x00,0x1c,0x00,0x00,0x00,0x48,0x00,0x05,0x00, +0x10,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00, +0x00,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x04,0x00,0x00,0x00, +0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00,0x02,0x00,0x00,0x00, +0x23,0x00,0x00,0x00,0x08,0x00,0x00,0x00,0x48,0x00,0x05,0x00, +0x10,0x00,0x00,0x00,0x03,0x00,0x00,0x00,0x23,0x00,0x00,0x00, +0x0c,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00, +0x04,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x10,0x00,0x00,0x00, +0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00,0x05,0x00,0x00,0x00, +0x23,0x00,0x00,0x00,0x14,0x00,0x00,0x00,0x48,0x00,0x05,0x00, +0x10,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x23,0x00,0x00,0x00, +0x18,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00, +0x07,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x1c,0x00,0x00,0x00, +0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00,0x08,0x00,0x00,0x00, +0x23,0x00,0x00,0x00,0x20,0x00,0x00,0x00,0x48,0x00,0x05,0x00, +0x10,0x00,0x00,0x00,0x09,0x00,0x00,0x00,0x23,0x00,0x00,0x00, +0x24,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00, +0x0a,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x28,0x00,0x00,0x00, +0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00,0x0b,0x00,0x00,0x00, +0x23,0x00,0x00,0x00,0x2c,0x00,0x00,0x00,0x48,0x00,0x05,0x00, +0x10,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x23,0x00,0x00,0x00, +0x30,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00, +0x0d,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x34,0x00,0x00,0x00, +0x47,0x00,0x03,0x00,0x10,0x00,0x00,0x00,0x02,0x00,0x00,0x00, +0x47,0x00,0x04,0x00,0x37,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x3d,0x00,0x00,0x00, +0x0b,0x00,0x00,0x00,0x1a,0x00,0x00,0x00,0x47,0x00,0x04,0x00, +0x4c,0x00,0x00,0x00,0x0b,0x00,0x00,0x00,0x1b,0x00,0x00,0x00, +0x47,0x00,0x04,0x00,0x4f,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0x09,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x53,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x47,0x00,0x04,0x00, +0x60,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x06,0x00,0x00,0x00, +0x47,0x00,0x04,0x00,0x62,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0x07,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x6c,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0x03,0x00,0x00,0x00,0x47,0x00,0x04,0x00, +0xa6,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x02,0x00,0x00,0x00, +0x47,0x00,0x04,0x00,0xb8,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0x05,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0xbb,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0x08,0x00,0x00,0x00,0x47,0x00,0x04,0x00, +0x05,0x01,0x00,0x00,0x06,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0x48,0x00,0x05,0x00,0x06,0x01,0x00,0x00,0x00,0x00,0x00,0x00, +0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x48,0x00,0x05,0x00, +0x06,0x01,0x00,0x00,0x01,0x00,0x00,0x00,0x23,0x00,0x00,0x00, +0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x07,0x01,0x00,0x00, +0x06,0x00,0x00,0x00,0x22,0x00,0x00,0x00,0x48,0x00,0x04,0x00, +0x08,0x01,0x00,0x00,0x00,0x00,0x00,0x00,0x18,0x00,0x00,0x00, +0x48,0x00,0x05,0x00,0x08,0x01,0x00,0x00,0x00,0x00,0x00,0x00, +0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x03,0x00, +0x08,0x01,0x00,0x00,0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00, +0x0a,0x01,0x00,0x00,0x22,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0x47,0x00,0x04,0x00,0x0a,0x01,0x00,0x00,0x21,0x00,0x00,0x00, +0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x35,0x01,0x00,0x00, +0x01,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00, +0x36,0x01,0x00,0x00,0x0b,0x00,0x00,0x00,0x19,0x00,0x00,0x00, +0x47,0x00,0x04,0x00,0x63,0x01,0x00,0x00,0x06,0x00,0x00,0x00, +0x04,0x00,0x00,0x00,0x48,0x00,0x04,0x00,0x64,0x01,0x00,0x00, +0x00,0x00,0x00,0x00,0x18,0x00,0x00,0x00,0x48,0x00,0x05,0x00, +0x64,0x01,0x00,0x00,0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00, +0x00,0x00,0x00,0x00,0x47,0x00,0x03,0x00,0x64,0x01,0x00,0x00, +0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x66,0x01,0x00,0x00, +0x22,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00, +0x66,0x01,0x00,0x00,0x21,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0x47,0x00,0x04,0x00,0x51,0x02,0x00,0x00,0x0b,0x00,0x00,0x00, +0x18,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x97,0x02,0x00,0x00, +0x06,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x48,0x00,0x04,0x00, +0x98,0x02,0x00,0x00,0x00,0x00,0x00,0x00,0x19,0x00,0x00,0x00, +0x48,0x00,0x05,0x00,0x98,0x02,0x00,0x00,0x00,0x00,0x00,0x00, +0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x03,0x00, +0x98,0x02,0x00,0x00,0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00, +0x9a,0x02,0x00,0x00,0x22,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0x47,0x00,0x04,0x00,0x9a,0x02,0x00,0x00,0x21,0x00,0x00,0x00, +0x02,0x00,0x00,0x00,0x13,0x00,0x02,0x00,0x02,0x00,0x00,0x00, +0x21,0x00,0x03,0x00,0x03,0x00,0x00,0x00,0x02,0x00,0x00,0x00, +0x15,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x20,0x00,0x00,0x00, +0x00,0x00,0x00,0x00,0x17,0x00,0x04,0x00,0x09,0x00,0x00,0x00, +0x06,0x00,0x00,0x00,0x03,0x00,0x00,0x00,0x20,0x00,0x04,0x00, +0x0a,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x09,0x00,0x00,0x00, +0x3b,0x00,0x04,0x00,0x0a,0x00,0x00,0x00,0x0b,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x0c,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x20,0x00,0x04,0x00, +0x0d,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x06,0x00,0x00,0x00, +0x1e,0x00,0x10,0x00,0x10,0x00,0x00,0x00,0x06,0x00,0x00,0x00, +0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00, +0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00, +0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00, +0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00, +0x06,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x11,0x00,0x00,0x00, +0x09,0x00,0x00,0x00,0x10,0x00,0x00,0x00,0x3b,0x00,0x04,0x00, +0x11,0x00,0x00,0x00,0x12,0x00,0x00,0x00,0x09,0x00,0x00,0x00, +0x15,0x00,0x04,0x00,0x13,0x00,0x00,0x00,0x20,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00, +0x14,0x00,0x00,0x00,0x08,0x00,0x00,0x00,0x20,0x00,0x04,0x00, +0x15,0x00,0x00,0x00,0x09,0x00,0x00,0x00,0x06,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00,0x21,0x00,0x00,0x00, +0x0a,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00, +0x27,0x00,0x00,0x00,0x09,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x13,0x00,0x00,0x00,0x2d,0x00,0x00,0x00,0x07,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00,0x34,0x00,0x00,0x00, +0x00,0x00,0x00,0x00,0x32,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x37,0x00,0x00,0x00,0x40,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0x3b,0x00,0x04,0x00,0x0a,0x00,0x00,0x00,0x3d,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x3e,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x3b,0x00,0x04,0x00, +0x0a,0x00,0x00,0x00,0x4c,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0x32,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x4f,0x00,0x00,0x00, +0x20,0x00,0x00,0x00,0x32,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x53,0x00,0x00,0x00,0x20,0x00,0x00,0x00,0x34,0x00,0x06,0x00, +0x06,0x00,0x00,0x00,0x54,0x00,0x00,0x00,0x86,0x00,0x00,0x00, +0x37,0x00,0x00,0x00,0x53,0x00,0x00,0x00,0x34,0x00,0x06,0x00, +0x06,0x00,0x00,0x00,0x58,0x00,0x00,0x00,0x86,0x00,0x00,0x00, +0x37,0x00,0x00,0x00,0x53,0x00,0x00,0x00,0x32,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x60,0x00,0x00,0x00,0x02,0x00,0x00,0x00, +0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x61,0x00,0x00,0x00, +0x86,0x00,0x00,0x00,0x53,0x00,0x00,0x00,0x60,0x00,0x00,0x00, +0x32,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x62,0x00,0x00,0x00, +0x04,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, +0x63,0x00,0x00,0x00,0x86,0x00,0x00,0x00,0x61,0x00,0x00,0x00, +0x62,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, +0x67,0x00,0x00,0x00,0x86,0x00,0x00,0x00,0x61,0x00,0x00,0x00, +0x62,0x00,0x00,0x00,0x32,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x6c,0x00,0x00,0x00,0x10,0x00,0x00,0x00,0x34,0x00,0x06,0x00, +0x06,0x00,0x00,0x00,0x6d,0x00,0x00,0x00,0x86,0x00,0x00,0x00, +0x6c,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x34,0x00,0x06,0x00, +0x06,0x00,0x00,0x00,0x72,0x00,0x00,0x00,0x86,0x00,0x00,0x00, +0x6c,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x34,0x00,0x06,0x00, +0x06,0x00,0x00,0x00,0x77,0x00,0x00,0x00,0x86,0x00,0x00,0x00, +0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x34,0x00,0x06,0x00, +0x06,0x00,0x00,0x00,0x7c,0x00,0x00,0x00,0x86,0x00,0x00,0x00, +0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x13,0x00,0x00,0x00,0x80,0x00,0x00,0x00,0x06,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00,0x85,0x00,0x00,0x00, +0x02,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00, +0x90,0x00,0x00,0x00,0x0b,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x13,0x00,0x00,0x00,0x96,0x00,0x00,0x00,0x03,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00,0xa1,0x00,0x00,0x00, +0x0c,0x00,0x00,0x00,0x32,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0xa6,0x00,0x00,0x00,0x40,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x13,0x00,0x00,0x00,0xa8,0x00,0x00,0x00,0x04,0x00,0x00,0x00, +0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0xb7,0x00,0x00,0x00, +0x84,0x00,0x00,0x00,0x60,0x00,0x00,0x00,0x62,0x00,0x00,0x00, +0x32,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0xb8,0x00,0x00,0x00, +0x20,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, +0xb9,0x00,0x00,0x00,0x84,0x00,0x00,0x00,0x53,0x00,0x00,0x00, +0xb8,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, +0xba,0x00,0x00,0x00,0x84,0x00,0x00,0x00,0x4f,0x00,0x00,0x00, +0x62,0x00,0x00,0x00,0x32,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0xbb,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x34,0x00,0x06,0x00, +0x06,0x00,0x00,0x00,0xbc,0x00,0x00,0x00,0x84,0x00,0x00,0x00, +0xba,0x00,0x00,0x00,0xbb,0x00,0x00,0x00,0x34,0x00,0x06,0x00, +0x06,0x00,0x00,0x00,0xbd,0x00,0x00,0x00,0x84,0x00,0x00,0x00, +0xbc,0x00,0x00,0x00,0x60,0x00,0x00,0x00,0x34,0x00,0x06,0x00, +0x06,0x00,0x00,0x00,0xbe,0x00,0x00,0x00,0x86,0x00,0x00,0x00, +0xb9,0x00,0x00,0x00,0xbd,0x00,0x00,0x00,0x34,0x00,0x06,0x00, +0x06,0x00,0x00,0x00,0xbf,0x00,0x00,0x00,0x84,0x00,0x00,0x00, +0xb7,0x00,0x00,0x00,0xbe,0x00,0x00,0x00,0x34,0x00,0x06,0x00, +0x06,0x00,0x00,0x00,0xc0,0x00,0x00,0x00,0x84,0x00,0x00,0x00, +0xbf,0x00,0x00,0x00,0xbb,0x00,0x00,0x00,0x14,0x00,0x02,0x00, +0xc1,0x00,0x00,0x00,0x16,0x00,0x03,0x00,0xc3,0x00,0x00,0x00, +0x20,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, +0xc4,0x00,0x00,0x00,0x84,0x00,0x00,0x00,0x60,0x00,0x00,0x00, +0x62,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, +0xc5,0x00,0x00,0x00,0x84,0x00,0x00,0x00,0xc4,0x00,0x00,0x00, +0xbe,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, +0xc6,0x00,0x00,0x00,0x84,0x00,0x00,0x00,0xc5,0x00,0x00,0x00, +0xbb,0x00,0x00,0x00,0x1c,0x00,0x04,0x00,0xc7,0x00,0x00,0x00, +0xc3,0x00,0x00,0x00,0xc6,0x00,0x00,0x00,0x20,0x00,0x04,0x00, +0xc8,0x00,0x00,0x00,0x07,0x00,0x00,0x00,0xc7,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0xc3,0x00,0x00,0x00,0xcb,0x00,0x00,0x00, +0x00,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0xcc,0x00,0x00,0x00, +0x07,0x00,0x00,0x00,0xc3,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x13,0x00,0x00,0x00,0xcf,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0xf3,0x00,0x00,0x00, +0x80,0x00,0x00,0x00,0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0xfa,0x00,0x00,0x00, +0x10,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0xfe,0x00,0x00,0x00,0x0f,0x00,0x00,0x00,0x16,0x00,0x03,0x00, +0x02,0x01,0x00,0x00,0x10,0x00,0x00,0x00,0x15,0x00,0x04,0x00, +0x03,0x01,0x00,0x00,0x08,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x04,0x01,0x00,0x00, +0x20,0x00,0x00,0x00,0x1c,0x00,0x04,0x00,0x05,0x01,0x00,0x00, +0x03,0x01,0x00,0x00,0x04,0x01,0x00,0x00,0x1e,0x00,0x04,0x00, +0x06,0x01,0x00,0x00,0x02,0x01,0x00,0x00,0x05,0x01,0x00,0x00, +0x1d,0x00,0x03,0x00,0x07,0x01,0x00,0x00,0x06,0x01,0x00,0x00, +0x1e,0x00,0x03,0x00,0x08,0x01,0x00,0x00,0x07,0x01,0x00,0x00, +0x20,0x00,0x04,0x00,0x09,0x01,0x00,0x00,0x0c,0x00,0x00,0x00, +0x08,0x01,0x00,0x00,0x3b,0x00,0x04,0x00,0x09,0x01,0x00,0x00, +0x0a,0x01,0x00,0x00,0x0c,0x00,0x00,0x00,0x20,0x00,0x04,0x00, +0x0c,0x01,0x00,0x00,0x0c,0x00,0x00,0x00,0x02,0x01,0x00,0x00, +0x17,0x00,0x04,0x00,0x10,0x01,0x00,0x00,0xc3,0x00,0x00,0x00, +0x02,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x15,0x01,0x00,0x00, +0x0c,0x00,0x00,0x00,0x03,0x01,0x00,0x00,0x34,0x00,0x06,0x00, +0x06,0x00,0x00,0x00,0x24,0x01,0x00,0x00,0x80,0x00,0x00,0x00, +0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x34,0x00,0x06,0x00, +0x06,0x00,0x00,0x00,0x25,0x01,0x00,0x00,0x84,0x00,0x00,0x00, +0x37,0x00,0x00,0x00,0x24,0x01,0x00,0x00,0x1c,0x00,0x04,0x00, +0x26,0x01,0x00,0x00,0x02,0x01,0x00,0x00,0x25,0x01,0x00,0x00, +0x20,0x00,0x04,0x00,0x27,0x01,0x00,0x00,0x04,0x00,0x00,0x00, +0x26,0x01,0x00,0x00,0x3b,0x00,0x04,0x00,0x27,0x01,0x00,0x00, +0x28,0x01,0x00,0x00,0x04,0x00,0x00,0x00,0x20,0x00,0x04,0x00, +0x2d,0x01,0x00,0x00,0x04,0x00,0x00,0x00,0x02,0x01,0x00,0x00, +0x32,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x35,0x01,0x00,0x00, +0x01,0x00,0x00,0x00,0x33,0x00,0x06,0x00,0x09,0x00,0x00,0x00, +0x36,0x01,0x00,0x00,0x35,0x01,0x00,0x00,0x39,0x00,0x00,0x00, +0x39,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, +0x37,0x01,0x00,0x00,0x51,0x00,0x00,0x00,0x36,0x01,0x00,0x00, +0x00,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, +0x38,0x01,0x00,0x00,0x84,0x00,0x00,0x00,0x37,0x01,0x00,0x00, +0x0c,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, +0x39,0x01,0x00,0x00,0x86,0x00,0x00,0x00,0x38,0x01,0x00,0x00, +0x6c,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, +0x57,0x01,0x00,0x00,0x80,0x00,0x00,0x00,0x6c,0x00,0x00,0x00, +0x39,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, +0x58,0x01,0x00,0x00,0x84,0x00,0x00,0x00,0xa6,0x00,0x00,0x00, +0x57,0x01,0x00,0x00,0x1c,0x00,0x04,0x00,0x59,0x01,0x00,0x00, +0x02,0x01,0x00,0x00,0x58,0x01,0x00,0x00,0x20,0x00,0x04,0x00, +0x5a,0x01,0x00,0x00,0x04,0x00,0x00,0x00,0x59,0x01,0x00,0x00, +0x3b,0x00,0x04,0x00,0x5a,0x01,0x00,0x00,0x5b,0x01,0x00,0x00, +0x04,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, +0x5f,0x01,0x00,0x00,0x80,0x00,0x00,0x00,0x6c,0x00,0x00,0x00, +0x39,0x00,0x00,0x00,0x1d,0x00,0x03,0x00,0x63,0x01,0x00,0x00, +0xc3,0x00,0x00,0x00,0x1e,0x00,0x03,0x00,0x64,0x01,0x00,0x00, +0x63,0x01,0x00,0x00,0x20,0x00,0x04,0x00,0x65,0x01,0x00,0x00, +0x0c,0x00,0x00,0x00,0x64,0x01,0x00,0x00,0x3b,0x00,0x04,0x00, +0x65,0x01,0x00,0x00,0x66,0x01,0x00,0x00,0x0c,0x00,0x00,0x00, +0x20,0x00,0x04,0x00,0x71,0x01,0x00,0x00,0x0c,0x00,0x00,0x00, +0xc3,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, +0x7a,0x01,0x00,0x00,0x80,0x00,0x00,0x00,0x6c,0x00,0x00,0x00, +0x39,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x02,0x01,0x00,0x00, +0x7e,0x01,0x00,0x00,0x00,0x00,0x00,0x00,0x34,0x00,0x06,0x00, +0x06,0x00,0x00,0x00,0x80,0x01,0x00,0x00,0x51,0x00,0x00,0x00, +0x36,0x01,0x00,0x00,0x00,0x00,0x00,0x00,0x34,0x00,0x06,0x00, +0x06,0x00,0x00,0x00,0x81,0x01,0x00,0x00,0x84,0x00,0x00,0x00, +0x80,0x01,0x00,0x00,0x39,0x00,0x00,0x00,0x34,0x00,0x06,0x00, +0x06,0x00,0x00,0x00,0x82,0x01,0x00,0x00,0x86,0x00,0x00,0x00, +0x81,0x01,0x00,0x00,0x6c,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x85,0x01,0x00,0x00,0x08,0x01,0x00,0x00, +0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x86,0x01,0x00,0x00, +0x86,0x00,0x00,0x00,0x6c,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, +0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x89,0x01,0x00,0x00, +0x86,0x00,0x00,0x00,0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00, +0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0xa4,0x01,0x00,0x00, +0x84,0x00,0x00,0x00,0x60,0x00,0x00,0x00,0x62,0x00,0x00,0x00, +0x1c,0x00,0x04,0x00,0xa5,0x01,0x00,0x00,0x02,0x01,0x00,0x00, +0xa4,0x01,0x00,0x00,0x20,0x00,0x04,0x00,0xa6,0x01,0x00,0x00, +0x07,0x00,0x00,0x00,0xa5,0x01,0x00,0x00,0x34,0x00,0x06,0x00, +0x06,0x00,0x00,0x00,0xb6,0x01,0x00,0x00,0x80,0x00,0x00,0x00, +0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x20,0x00,0x04,0x00, +0xbc,0x01,0x00,0x00,0x07,0x00,0x00,0x00,0x02,0x01,0x00,0x00, +0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0xd2,0x01,0x00,0x00, +0x84,0x00,0x00,0x00,0xbe,0x00,0x00,0x00,0xbb,0x00,0x00,0x00, +0x1c,0x00,0x04,0x00,0xd3,0x01,0x00,0x00,0x02,0x01,0x00,0x00, +0xd2,0x01,0x00,0x00,0x20,0x00,0x04,0x00,0xd4,0x01,0x00,0x00, +0x07,0x00,0x00,0x00,0xd3,0x01,0x00,0x00,0x34,0x00,0x06,0x00, +0x06,0x00,0x00,0x00,0xdd,0x01,0x00,0x00,0x86,0x00,0x00,0x00, +0xb8,0x00,0x00,0x00,0xbe,0x00,0x00,0x00,0x34,0x00,0x06,0x00, +0x06,0x00,0x00,0x00,0xe5,0x01,0x00,0x00,0x80,0x00,0x00,0x00, +0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x34,0x00,0x06,0x00, +0x06,0x00,0x00,0x00,0x14,0x02,0x00,0x00,0x84,0x00,0x00,0x00, +0x60,0x00,0x00,0x00,0x62,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x13,0x00,0x00,0x00,0x49,0x02,0x00,0x00,0x0d,0x00,0x00,0x00, +0x3b,0x00,0x04,0x00,0x0a,0x00,0x00,0x00,0x51,0x02,0x00,0x00, +0x01,0x00,0x00,0x00,0x1d,0x00,0x03,0x00,0x97,0x02,0x00,0x00, +0xc3,0x00,0x00,0x00,0x1e,0x00,0x03,0x00,0x98,0x02,0x00,0x00, +0x97,0x02,0x00,0x00,0x20,0x00,0x04,0x00,0x99,0x02,0x00,0x00, +0x0c,0x00,0x00,0x00,0x98,0x02,0x00,0x00,0x3b,0x00,0x04,0x00, +0x99,0x02,0x00,0x00,0x9a,0x02,0x00,0x00,0x0c,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00,0x9f,0x02,0x00,0x00, +0x05,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, +0xac,0x02,0x00,0x00,0x84,0x00,0x00,0x00,0x60,0x00,0x00,0x00, +0x62,0x00,0x00,0x00,0x36,0x00,0x05,0x00,0x02,0x00,0x00,0x00, +0x04,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x03,0x00,0x00,0x00, +0xf8,0x00,0x02,0x00,0x05,0x00,0x00,0x00,0x3b,0x00,0x04,0x00, +0xc8,0x00,0x00,0x00,0xc9,0x00,0x00,0x00,0x07,0x00,0x00,0x00, +0x3b,0x00,0x04,0x00,0xa6,0x01,0x00,0x00,0xa7,0x01,0x00,0x00, +0x07,0x00,0x00,0x00,0x3b,0x00,0x04,0x00,0xd4,0x01,0x00,0x00, +0xd5,0x01,0x00,0x00,0x07,0x00,0x00,0x00,0x41,0x00,0x05,0x00, +0x0d,0x00,0x00,0x00,0x0e,0x00,0x00,0x00,0x0b,0x00,0x00,0x00, +0x0c,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x0f,0x00,0x00,0x00,0x0e,0x00,0x00,0x00,0x41,0x00,0x05,0x00, +0x15,0x00,0x00,0x00,0x16,0x00,0x00,0x00,0x12,0x00,0x00,0x00, +0x14,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x17,0x00,0x00,0x00,0x16,0x00,0x00,0x00,0x86,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x18,0x00,0x00,0x00,0x0f,0x00,0x00,0x00, +0x17,0x00,0x00,0x00,0x89,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x1e,0x00,0x00,0x00,0x0f,0x00,0x00,0x00,0x17,0x00,0x00,0x00, +0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00,0x22,0x00,0x00,0x00, +0x12,0x00,0x00,0x00,0x21,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x22,0x00,0x00,0x00, +0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x24,0x00,0x00,0x00, +0x18,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x41,0x00,0x05,0x00, +0x15,0x00,0x00,0x00,0x28,0x00,0x00,0x00,0x12,0x00,0x00,0x00, +0x27,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x29,0x00,0x00,0x00,0x28,0x00,0x00,0x00,0x86,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x2a,0x00,0x00,0x00,0x1e,0x00,0x00,0x00, +0x29,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00, +0x2e,0x00,0x00,0x00,0x12,0x00,0x00,0x00,0x2d,0x00,0x00,0x00, +0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x2f,0x00,0x00,0x00, +0x2e,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x30,0x00,0x00,0x00,0x24,0x00,0x00,0x00,0x2f,0x00,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x32,0x00,0x00,0x00, +0x30,0x00,0x00,0x00,0x2a,0x00,0x00,0x00,0x41,0x00,0x05,0x00, +0x15,0x00,0x00,0x00,0x35,0x00,0x00,0x00,0x12,0x00,0x00,0x00, +0x34,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x36,0x00,0x00,0x00,0x35,0x00,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x38,0x00,0x00,0x00,0x36,0x00,0x00,0x00, +0x37,0x00,0x00,0x00,0x82,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x3a,0x00,0x00,0x00,0x38,0x00,0x00,0x00,0x39,0x00,0x00,0x00, +0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x3b,0x00,0x00,0x00, +0x3a,0x00,0x00,0x00,0x37,0x00,0x00,0x00,0x41,0x00,0x05,0x00, +0x0d,0x00,0x00,0x00,0x3f,0x00,0x00,0x00,0x3d,0x00,0x00,0x00, +0x3e,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x40,0x00,0x00,0x00,0x3f,0x00,0x00,0x00,0x89,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x42,0x00,0x00,0x00,0x40,0x00,0x00,0x00, +0x3b,0x00,0x00,0x00,0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x47,0x00,0x00,0x00,0x40,0x00,0x00,0x00,0x3b,0x00,0x00,0x00, +0x41,0x00,0x05,0x00,0x0d,0x00,0x00,0x00,0x49,0x00,0x00,0x00, +0x3d,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x4a,0x00,0x00,0x00,0x49,0x00,0x00,0x00, +0x41,0x00,0x05,0x00,0x0d,0x00,0x00,0x00,0x4d,0x00,0x00,0x00, +0x4c,0x00,0x00,0x00,0x3e,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x4e,0x00,0x00,0x00,0x4d,0x00,0x00,0x00, +0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x50,0x00,0x00,0x00, +0x4e,0x00,0x00,0x00,0x4f,0x00,0x00,0x00,0x89,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x55,0x00,0x00,0x00,0x50,0x00,0x00,0x00, +0x54,0x00,0x00,0x00,0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x59,0x00,0x00,0x00,0x50,0x00,0x00,0x00,0x58,0x00,0x00,0x00, +0x89,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x5d,0x00,0x00,0x00, +0x4e,0x00,0x00,0x00,0x4f,0x00,0x00,0x00,0x89,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x64,0x00,0x00,0x00,0x5d,0x00,0x00,0x00, +0x63,0x00,0x00,0x00,0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x68,0x00,0x00,0x00,0x5d,0x00,0x00,0x00,0x67,0x00,0x00,0x00, +0x89,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x6e,0x00,0x00,0x00, +0x4e,0x00,0x00,0x00,0x6d,0x00,0x00,0x00,0x86,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x73,0x00,0x00,0x00,0x4e,0x00,0x00,0x00, +0x72,0x00,0x00,0x00,0x89,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x78,0x00,0x00,0x00,0x4e,0x00,0x00,0x00,0x77,0x00,0x00,0x00, +0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x7d,0x00,0x00,0x00, +0x4e,0x00,0x00,0x00,0x7c,0x00,0x00,0x00,0x41,0x00,0x05,0x00, +0x15,0x00,0x00,0x00,0x81,0x00,0x00,0x00,0x12,0x00,0x00,0x00, +0x80,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x82,0x00,0x00,0x00,0x81,0x00,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x83,0x00,0x00,0x00,0x47,0x00,0x00,0x00, +0x82,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00, +0x86,0x00,0x00,0x00,0x12,0x00,0x00,0x00,0x85,0x00,0x00,0x00, +0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x87,0x00,0x00,0x00, +0x86,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x89,0x00,0x00,0x00,0x47,0x00,0x00,0x00,0x39,0x00,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x8c,0x00,0x00,0x00, +0x89,0x00,0x00,0x00,0x82,0x00,0x00,0x00,0x0c,0x00,0x07,0x00, +0x06,0x00,0x00,0x00,0x8d,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0x26,0x00,0x00,0x00,0x87,0x00,0x00,0x00,0x8c,0x00,0x00,0x00, +0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00,0x91,0x00,0x00,0x00, +0x12,0x00,0x00,0x00,0x90,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x92,0x00,0x00,0x00,0x91,0x00,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x93,0x00,0x00,0x00, +0x32,0x00,0x00,0x00,0x92,0x00,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x95,0x00,0x00,0x00,0x42,0x00,0x00,0x00, +0x37,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00, +0x97,0x00,0x00,0x00,0x12,0x00,0x00,0x00,0x96,0x00,0x00,0x00, +0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x98,0x00,0x00,0x00, +0x97,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x99,0x00,0x00,0x00,0x95,0x00,0x00,0x00,0x98,0x00,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x9a,0x00,0x00,0x00, +0x93,0x00,0x00,0x00,0x99,0x00,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x9c,0x00,0x00,0x00,0x9a,0x00,0x00,0x00, +0x83,0x00,0x00,0x00,0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x9d,0x00,0x00,0x00,0x9c,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, +0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00,0xa2,0x00,0x00,0x00, +0x12,0x00,0x00,0x00,0xa1,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0xa3,0x00,0x00,0x00,0xa2,0x00,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xa4,0x00,0x00,0x00, +0x0f,0x00,0x00,0x00,0xa3,0x00,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xa7,0x00,0x00,0x00,0x4a,0x00,0x00,0x00, +0xa6,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00, +0xa9,0x00,0x00,0x00,0x12,0x00,0x00,0x00,0xa8,0x00,0x00,0x00, +0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0xaa,0x00,0x00,0x00, +0xa9,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xab,0x00,0x00,0x00,0xa7,0x00,0x00,0x00,0xaa,0x00,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xac,0x00,0x00,0x00, +0xa4,0x00,0x00,0x00,0xab,0x00,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xae,0x00,0x00,0x00,0xac,0x00,0x00,0x00, +0x83,0x00,0x00,0x00,0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xaf,0x00,0x00,0x00,0xae,0x00,0x00,0x00,0x39,0x00,0x00,0x00, +0xf9,0x00,0x02,0x00,0xb1,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, +0xb1,0x00,0x00,0x00,0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00, +0xc0,0x02,0x00,0x00,0x3e,0x00,0x00,0x00,0x05,0x00,0x00,0x00, +0xd0,0x00,0x00,0x00,0xb2,0x00,0x00,0x00,0xb0,0x00,0x05,0x00, +0xc1,0x00,0x00,0x00,0xc2,0x00,0x00,0x00,0xc0,0x02,0x00,0x00, +0xc0,0x00,0x00,0x00,0xf6,0x00,0x04,0x00,0xb3,0x00,0x00,0x00, +0xb2,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, +0xc2,0x00,0x00,0x00,0xb2,0x00,0x00,0x00,0xb3,0x00,0x00,0x00, +0xf8,0x00,0x02,0x00,0xb2,0x00,0x00,0x00,0x41,0x00,0x05,0x00, +0xcc,0x00,0x00,0x00,0xcd,0x00,0x00,0x00,0xc9,0x00,0x00,0x00, +0xc0,0x02,0x00,0x00,0x3e,0x00,0x03,0x00,0xcd,0x00,0x00,0x00, +0xcb,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xd0,0x00,0x00,0x00,0xc0,0x02,0x00,0x00,0xcf,0x00,0x00,0x00, +0xf9,0x00,0x02,0x00,0xb1,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, +0xb3,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0xd3,0x00,0x00,0x00, +0xf8,0x00,0x02,0x00,0xd3,0x00,0x00,0x00,0xf5,0x00,0x07,0x00, +0x06,0x00,0x00,0x00,0xd9,0x02,0x00,0x00,0xaf,0x00,0x00,0x00, +0xb3,0x00,0x00,0x00,0x8b,0x01,0x00,0x00,0xd6,0x00,0x00,0x00, +0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0xd5,0x02,0x00,0x00, +0x9d,0x00,0x00,0x00,0xb3,0x00,0x00,0x00,0x88,0x01,0x00,0x00, +0xd6,0x00,0x00,0x00,0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00, +0xc1,0x02,0x00,0x00,0x83,0x00,0x00,0x00,0xb3,0x00,0x00,0x00, +0x39,0x02,0x00,0x00,0xd6,0x00,0x00,0x00,0xb0,0x00,0x05,0x00, +0xc1,0x00,0x00,0x00,0xda,0x00,0x00,0x00,0xc1,0x02,0x00,0x00, +0x8d,0x00,0x00,0x00,0xf6,0x00,0x04,0x00,0xd5,0x00,0x00,0x00, +0xd6,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, +0xda,0x00,0x00,0x00,0xd4,0x00,0x00,0x00,0xd5,0x00,0x00,0x00, +0xf8,0x00,0x02,0x00,0xd4,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, +0xdc,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0xdc,0x00,0x00,0x00, +0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0xd1,0x02,0x00,0x00, +0x3e,0x00,0x00,0x00,0xd4,0x00,0x00,0x00,0x3b,0x01,0x00,0x00, +0xdd,0x00,0x00,0x00,0xb0,0x00,0x05,0x00,0xc1,0x00,0x00,0x00, +0xe2,0x00,0x00,0x00,0xd1,0x02,0x00,0x00,0x37,0x00,0x00,0x00, +0xf6,0x00,0x04,0x00,0xde,0x00,0x00,0x00,0xdd,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0xe2,0x00,0x00,0x00, +0xdd,0x00,0x00,0x00,0xde,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, +0xdd,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xe7,0x00,0x00,0x00,0x73,0x00,0x00,0x00,0xd1,0x02,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xea,0x00,0x00,0x00, +0xe7,0x00,0x00,0x00,0x98,0x00,0x00,0x00,0x86,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xeb,0x00,0x00,0x00,0xea,0x00,0x00,0x00, +0x0c,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xec,0x00,0x00,0x00,0xd5,0x02,0x00,0x00,0xeb,0x00,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xee,0x00,0x00,0x00, +0xec,0x00,0x00,0x00,0x6e,0x00,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xf4,0x00,0x00,0x00,0xe7,0x00,0x00,0x00, +0xf3,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xf6,0x00,0x00,0x00,0x6e,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xf7,0x00,0x00,0x00, +0xf4,0x00,0x00,0x00,0xf6,0x00,0x00,0x00,0x86,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xfb,0x00,0x00,0x00,0xee,0x00,0x00,0x00, +0xfa,0x00,0x00,0x00,0xc7,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xff,0x00,0x00,0x00,0xee,0x00,0x00,0x00,0xfe,0x00,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x00,0x01,0x00,0x00, +0xff,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x41,0x00,0x07,0x00, +0x0c,0x01,0x00,0x00,0x0d,0x01,0x00,0x00,0x0a,0x01,0x00,0x00, +0x34,0x00,0x00,0x00,0xfb,0x00,0x00,0x00,0x34,0x00,0x00,0x00, +0x3d,0x00,0x04,0x00,0x02,0x01,0x00,0x00,0x0e,0x01,0x00,0x00, +0x0d,0x01,0x00,0x00,0x73,0x00,0x04,0x00,0xc3,0x00,0x00,0x00, +0x0f,0x01,0x00,0x00,0x0e,0x01,0x00,0x00,0x41,0x00,0x08,0x00, +0x15,0x01,0x00,0x00,0x16,0x01,0x00,0x00,0x0a,0x01,0x00,0x00, +0x34,0x00,0x00,0x00,0xfb,0x00,0x00,0x00,0xcf,0x00,0x00,0x00, +0x00,0x01,0x00,0x00,0x3d,0x00,0x04,0x00,0x03,0x01,0x00,0x00, +0x17,0x01,0x00,0x00,0x16,0x01,0x00,0x00,0x72,0x00,0x04,0x00, +0x13,0x00,0x00,0x00,0x18,0x01,0x00,0x00,0x17,0x01,0x00,0x00, +0x6f,0x00,0x04,0x00,0xc3,0x00,0x00,0x00,0x19,0x01,0x00,0x00, +0x18,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x1c,0x01,0x00,0x00,0x00,0x01,0x00,0x00,0x39,0x00,0x00,0x00, +0x41,0x00,0x08,0x00,0x15,0x01,0x00,0x00,0x1d,0x01,0x00,0x00, +0x0a,0x01,0x00,0x00,0x34,0x00,0x00,0x00,0xfb,0x00,0x00,0x00, +0xcf,0x00,0x00,0x00,0x1c,0x01,0x00,0x00,0x3d,0x00,0x04,0x00, +0x03,0x01,0x00,0x00,0x1e,0x01,0x00,0x00,0x1d,0x01,0x00,0x00, +0x72,0x00,0x04,0x00,0x13,0x00,0x00,0x00,0x1f,0x01,0x00,0x00, +0x1e,0x01,0x00,0x00,0x6f,0x00,0x04,0x00,0xc3,0x00,0x00,0x00, +0x20,0x01,0x00,0x00,0x1f,0x01,0x00,0x00,0x50,0x00,0x05,0x00, +0x10,0x01,0x00,0x00,0x21,0x01,0x00,0x00,0x19,0x01,0x00,0x00, +0x20,0x01,0x00,0x00,0x8e,0x00,0x05,0x00,0x10,0x01,0x00,0x00, +0x23,0x01,0x00,0x00,0x21,0x01,0x00,0x00,0x0f,0x01,0x00,0x00, +0x51,0x00,0x05,0x00,0xc3,0x00,0x00,0x00,0x2b,0x01,0x00,0x00, +0x23,0x01,0x00,0x00,0x00,0x00,0x00,0x00,0x73,0x00,0x04,0x00, +0x02,0x01,0x00,0x00,0x2c,0x01,0x00,0x00,0x2b,0x01,0x00,0x00, +0x41,0x00,0x05,0x00,0x2d,0x01,0x00,0x00,0x2e,0x01,0x00,0x00, +0x28,0x01,0x00,0x00,0xf7,0x00,0x00,0x00,0x3e,0x00,0x03,0x00, +0x2e,0x01,0x00,0x00,0x2c,0x01,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x30,0x01,0x00,0x00,0xf7,0x00,0x00,0x00, +0x39,0x00,0x00,0x00,0x51,0x00,0x05,0x00,0xc3,0x00,0x00,0x00, +0x32,0x01,0x00,0x00,0x23,0x01,0x00,0x00,0x01,0x00,0x00,0x00, +0x73,0x00,0x04,0x00,0x02,0x01,0x00,0x00,0x33,0x01,0x00,0x00, +0x32,0x01,0x00,0x00,0x41,0x00,0x05,0x00,0x2d,0x01,0x00,0x00, +0x34,0x01,0x00,0x00,0x28,0x01,0x00,0x00,0x30,0x01,0x00,0x00, +0x3e,0x00,0x03,0x00,0x34,0x01,0x00,0x00,0x33,0x01,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x3b,0x01,0x00,0x00, +0xd1,0x02,0x00,0x00,0x39,0x01,0x00,0x00,0xf9,0x00,0x02,0x00, +0xdc,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0xde,0x00,0x00,0x00, +0xf9,0x00,0x02,0x00,0x3d,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, +0x3d,0x01,0x00,0x00,0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00, +0xd2,0x02,0x00,0x00,0x3e,0x00,0x00,0x00,0xde,0x00,0x00,0x00, +0x84,0x01,0x00,0x00,0x40,0x01,0x00,0x00,0xb0,0x00,0x05,0x00, +0xc1,0x00,0x00,0x00,0x43,0x01,0x00,0x00,0xd2,0x02,0x00,0x00, +0xa6,0x00,0x00,0x00,0xf6,0x00,0x04,0x00,0x3f,0x01,0x00,0x00, +0x40,0x01,0x00,0x00,0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, +0x43,0x01,0x00,0x00,0x3e,0x01,0x00,0x00,0x3f,0x01,0x00,0x00, +0xf8,0x00,0x02,0x00,0x3e,0x01,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x47,0x01,0x00,0x00,0xa7,0x00,0x00,0x00, +0x7d,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x49,0x01,0x00,0x00,0x47,0x01,0x00,0x00,0xd2,0x02,0x00,0x00, +0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00,0x4a,0x01,0x00,0x00, +0x12,0x00,0x00,0x00,0xcf,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x4b,0x01,0x00,0x00,0x4a,0x01,0x00,0x00, +0xb0,0x00,0x05,0x00,0xc1,0x00,0x00,0x00,0x4c,0x01,0x00,0x00, +0x49,0x01,0x00,0x00,0x4b,0x01,0x00,0x00,0xf7,0x00,0x03,0x00, +0x4e,0x01,0x00,0x00,0x00,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, +0x4c,0x01,0x00,0x00,0x4d,0x01,0x00,0x00,0x4e,0x01,0x00,0x00, +0xf8,0x00,0x02,0x00,0x4d,0x01,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x51,0x01,0x00,0x00,0xc1,0x02,0x00,0x00, +0x78,0x00,0x00,0x00,0xb0,0x00,0x05,0x00,0xc1,0x00,0x00,0x00, +0x53,0x01,0x00,0x00,0x51,0x01,0x00,0x00,0x8d,0x00,0x00,0x00, +0xf9,0x00,0x02,0x00,0x4e,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, +0x4e,0x01,0x00,0x00,0xf5,0x00,0x07,0x00,0xc1,0x00,0x00,0x00, +0x54,0x01,0x00,0x00,0x4c,0x01,0x00,0x00,0x3e,0x01,0x00,0x00, +0x53,0x01,0x00,0x00,0x4d,0x01,0x00,0x00,0xf7,0x00,0x03,0x00, +0x56,0x01,0x00,0x00,0x00,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, +0x54,0x01,0x00,0x00,0x55,0x01,0x00,0x00,0x76,0x01,0x00,0x00, +0xf8,0x00,0x02,0x00,0x55,0x01,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x5e,0x01,0x00,0x00,0x7d,0x00,0x00,0x00, +0xd2,0x02,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x60,0x01,0x00,0x00,0x5e,0x01,0x00,0x00,0x5f,0x01,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x62,0x01,0x00,0x00, +0x60,0x01,0x00,0x00,0x78,0x00,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x6d,0x01,0x00,0x00,0x5e,0x01,0x00,0x00, +0xaa,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x6e,0x01,0x00,0x00,0xd9,0x02,0x00,0x00,0x6d,0x01,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x70,0x01,0x00,0x00, +0x6e,0x01,0x00,0x00,0x78,0x00,0x00,0x00,0x41,0x00,0x06,0x00, +0x71,0x01,0x00,0x00,0x72,0x01,0x00,0x00,0x66,0x01,0x00,0x00, +0x34,0x00,0x00,0x00,0x70,0x01,0x00,0x00,0x3d,0x00,0x04,0x00, +0xc3,0x00,0x00,0x00,0x73,0x01,0x00,0x00,0x72,0x01,0x00,0x00, +0x73,0x00,0x04,0x00,0x02,0x01,0x00,0x00,0x74,0x01,0x00,0x00, +0x73,0x01,0x00,0x00,0x41,0x00,0x05,0x00,0x2d,0x01,0x00,0x00, +0x75,0x01,0x00,0x00,0x5b,0x01,0x00,0x00,0x62,0x01,0x00,0x00, +0x3e,0x00,0x03,0x00,0x75,0x01,0x00,0x00,0x74,0x01,0x00,0x00, +0xf9,0x00,0x02,0x00,0x56,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, +0x76,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x79,0x01,0x00,0x00,0x7d,0x00,0x00,0x00,0xd2,0x02,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x7b,0x01,0x00,0x00, +0x79,0x01,0x00,0x00,0x7a,0x01,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x7d,0x01,0x00,0x00,0x7b,0x01,0x00,0x00, +0x78,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x2d,0x01,0x00,0x00, +0x7f,0x01,0x00,0x00,0x5b,0x01,0x00,0x00,0x7d,0x01,0x00,0x00, +0x3e,0x00,0x03,0x00,0x7f,0x01,0x00,0x00,0x7e,0x01,0x00,0x00, +0xf9,0x00,0x02,0x00,0x56,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, +0x56,0x01,0x00,0x00,0xf9,0x00,0x02,0x00,0x40,0x01,0x00,0x00, +0xf8,0x00,0x02,0x00,0x40,0x01,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x84,0x01,0x00,0x00,0xd2,0x02,0x00,0x00, +0x82,0x01,0x00,0x00,0xf9,0x00,0x02,0x00,0x3d,0x01,0x00,0x00, +0xf8,0x00,0x02,0x00,0x3f,0x01,0x00,0x00,0xe0,0x00,0x04,0x00, +0x0c,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x85,0x01,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x88,0x01,0x00,0x00, +0xd5,0x02,0x00,0x00,0x86,0x01,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x8b,0x01,0x00,0x00,0xd9,0x02,0x00,0x00, +0x89,0x01,0x00,0x00,0xf9,0x00,0x02,0x00,0x8d,0x01,0x00,0x00, +0xf8,0x00,0x02,0x00,0x8d,0x01,0x00,0x00,0xf5,0x00,0x07,0x00, +0x06,0x00,0x00,0x00,0xdb,0x02,0x00,0x00,0x3e,0x00,0x00,0x00, +0x3f,0x01,0x00,0x00,0x37,0x02,0x00,0x00,0x90,0x01,0x00,0x00, +0xb0,0x00,0x05,0x00,0xc1,0x00,0x00,0x00,0x93,0x01,0x00,0x00, +0xdb,0x02,0x00,0x00,0x6c,0x00,0x00,0x00,0xf6,0x00,0x04,0x00, +0x8f,0x01,0x00,0x00,0x90,0x01,0x00,0x00,0x00,0x00,0x00,0x00, +0xfa,0x00,0x04,0x00,0x93,0x01,0x00,0x00,0x8e,0x01,0x00,0x00, +0x8f,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0x8e,0x01,0x00,0x00, +0xf9,0x00,0x02,0x00,0x95,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, +0x95,0x01,0x00,0x00,0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00, +0xdf,0x02,0x00,0x00,0x3e,0x00,0x00,0x00,0x8e,0x01,0x00,0x00, +0xc1,0x01,0x00,0x00,0x98,0x01,0x00,0x00,0xb0,0x00,0x05,0x00, +0xc1,0x00,0x00,0x00,0x9b,0x01,0x00,0x00,0xdf,0x02,0x00,0x00, +0x60,0x00,0x00,0x00,0xf6,0x00,0x04,0x00,0x97,0x01,0x00,0x00, +0x98,0x01,0x00,0x00,0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, +0x9b,0x01,0x00,0x00,0x96,0x01,0x00,0x00,0x97,0x01,0x00,0x00, +0xf8,0x00,0x02,0x00,0x96,0x01,0x00,0x00,0xf9,0x00,0x02,0x00, +0x9d,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0x9d,0x01,0x00,0x00, +0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0xf1,0x02,0x00,0x00, +0x3e,0x00,0x00,0x00,0x96,0x01,0x00,0x00,0xbf,0x01,0x00,0x00, +0x9e,0x01,0x00,0x00,0xb0,0x00,0x05,0x00,0xc1,0x00,0x00,0x00, +0xa3,0x01,0x00,0x00,0xf1,0x02,0x00,0x00,0x62,0x00,0x00,0x00, +0xf6,0x00,0x04,0x00,0x9f,0x01,0x00,0x00,0x9e,0x01,0x00,0x00, +0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0xa3,0x01,0x00,0x00, +0x9e,0x01,0x00,0x00,0x9f,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, +0x9e,0x01,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xa9,0x01,0x00,0x00,0xdf,0x02,0x00,0x00,0x62,0x00,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xab,0x01,0x00,0x00, +0xa9,0x01,0x00,0x00,0xf1,0x02,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xad,0x01,0x00,0x00,0x55,0x00,0x00,0x00, +0x53,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xaf,0x01,0x00,0x00,0xdf,0x02,0x00,0x00,0x61,0x00,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xb0,0x01,0x00,0x00, +0xad,0x01,0x00,0x00,0xaf,0x01,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xb2,0x01,0x00,0x00,0x64,0x00,0x00,0x00, +0x62,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xb3,0x01,0x00,0x00,0xb0,0x01,0x00,0x00,0xb2,0x01,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xb5,0x01,0x00,0x00, +0xb3,0x01,0x00,0x00,0xf1,0x02,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xb7,0x01,0x00,0x00,0xb5,0x01,0x00,0x00, +0xb6,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xb9,0x01,0x00,0x00,0xb7,0x01,0x00,0x00,0xdb,0x02,0x00,0x00, +0x41,0x00,0x05,0x00,0x2d,0x01,0x00,0x00,0xba,0x01,0x00,0x00, +0x28,0x01,0x00,0x00,0xb9,0x01,0x00,0x00,0x3d,0x00,0x04,0x00, +0x02,0x01,0x00,0x00,0xbb,0x01,0x00,0x00,0xba,0x01,0x00,0x00, +0x41,0x00,0x05,0x00,0xbc,0x01,0x00,0x00,0xbd,0x01,0x00,0x00, +0xa7,0x01,0x00,0x00,0xab,0x01,0x00,0x00,0x3e,0x00,0x03,0x00, +0xbd,0x01,0x00,0x00,0xbb,0x01,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xbf,0x01,0x00,0x00,0xf1,0x02,0x00,0x00, +0xcf,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0x9d,0x01,0x00,0x00, +0xf8,0x00,0x02,0x00,0x9f,0x01,0x00,0x00,0xf9,0x00,0x02,0x00, +0x98,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0x98,0x01,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xc1,0x01,0x00,0x00, +0xdf,0x02,0x00,0x00,0xcf,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, +0x95,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0x97,0x01,0x00,0x00, +0xf9,0x00,0x02,0x00,0xc3,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, +0xc3,0x01,0x00,0x00,0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00, +0xe0,0x02,0x00,0x00,0x3e,0x00,0x00,0x00,0x97,0x01,0x00,0x00, +0xef,0x01,0x00,0x00,0xc6,0x01,0x00,0x00,0xb0,0x00,0x05,0x00, +0xc1,0x00,0x00,0x00,0xc9,0x01,0x00,0x00,0xe0,0x02,0x00,0x00, +0xbe,0x00,0x00,0x00,0xf6,0x00,0x04,0x00,0xc5,0x01,0x00,0x00, +0xc6,0x01,0x00,0x00,0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, +0xc9,0x01,0x00,0x00,0xc4,0x01,0x00,0x00,0xc5,0x01,0x00,0x00, +0xf8,0x00,0x02,0x00,0xc4,0x01,0x00,0x00,0xf9,0x00,0x02,0x00, +0xcb,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0xcb,0x01,0x00,0x00, +0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0xee,0x02,0x00,0x00, +0x3e,0x00,0x00,0x00,0xc4,0x01,0x00,0x00,0xed,0x01,0x00,0x00, +0xcc,0x01,0x00,0x00,0xb0,0x00,0x05,0x00,0xc1,0x00,0x00,0x00, +0xd1,0x01,0x00,0x00,0xee,0x02,0x00,0x00,0xbb,0x00,0x00,0x00, +0xf6,0x00,0x04,0x00,0xcd,0x01,0x00,0x00,0xcc,0x01,0x00,0x00, +0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0xd1,0x01,0x00,0x00, +0xcc,0x01,0x00,0x00,0xcd,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, +0xcc,0x01,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xd7,0x01,0x00,0x00,0xe0,0x02,0x00,0x00,0xbb,0x00,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xd9,0x01,0x00,0x00, +0xd7,0x01,0x00,0x00,0xee,0x02,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xdb,0x01,0x00,0x00,0x59,0x00,0x00,0x00, +0xb8,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xde,0x01,0x00,0x00,0xe0,0x02,0x00,0x00,0xdd,0x01,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xdf,0x01,0x00,0x00, +0xdb,0x01,0x00,0x00,0xde,0x01,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xe1,0x01,0x00,0x00,0x68,0x00,0x00,0x00, +0xbb,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xe2,0x01,0x00,0x00,0xdf,0x01,0x00,0x00,0xe1,0x01,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xe4,0x01,0x00,0x00, +0xe2,0x01,0x00,0x00,0xee,0x02,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xe6,0x01,0x00,0x00,0xe4,0x01,0x00,0x00, +0xe5,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xe8,0x01,0x00,0x00,0xe6,0x01,0x00,0x00,0xdb,0x02,0x00,0x00, +0x41,0x00,0x05,0x00,0x2d,0x01,0x00,0x00,0xe9,0x01,0x00,0x00, +0x5b,0x01,0x00,0x00,0xe8,0x01,0x00,0x00,0x3d,0x00,0x04,0x00, +0x02,0x01,0x00,0x00,0xea,0x01,0x00,0x00,0xe9,0x01,0x00,0x00, +0x41,0x00,0x05,0x00,0xbc,0x01,0x00,0x00,0xeb,0x01,0x00,0x00, +0xd5,0x01,0x00,0x00,0xd9,0x01,0x00,0x00,0x3e,0x00,0x03,0x00, +0xeb,0x01,0x00,0x00,0xea,0x01,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xed,0x01,0x00,0x00,0xee,0x02,0x00,0x00, +0xcf,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0xcb,0x01,0x00,0x00, +0xf8,0x00,0x02,0x00,0xcd,0x01,0x00,0x00,0xf9,0x00,0x02,0x00, +0xc6,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0xc6,0x01,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xef,0x01,0x00,0x00, +0xe0,0x02,0x00,0x00,0xcf,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, +0xc3,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0xc5,0x01,0x00,0x00, +0xf9,0x00,0x02,0x00,0xf1,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, +0xf1,0x01,0x00,0x00,0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00, +0xe1,0x02,0x00,0x00,0x3e,0x00,0x00,0x00,0xc5,0x01,0x00,0x00, +0x35,0x02,0x00,0x00,0xf4,0x01,0x00,0x00,0xb0,0x00,0x05,0x00, +0xc1,0x00,0x00,0x00,0xf7,0x01,0x00,0x00,0xe1,0x02,0x00,0x00, +0xbe,0x00,0x00,0x00,0xf6,0x00,0x04,0x00,0xf3,0x01,0x00,0x00, +0xf4,0x01,0x00,0x00,0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, +0xf7,0x01,0x00,0x00,0xf2,0x01,0x00,0x00,0xf3,0x01,0x00,0x00, +0xf8,0x00,0x02,0x00,0xf2,0x01,0x00,0x00,0xf9,0x00,0x02,0x00, +0xf9,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0xf9,0x01,0x00,0x00, +0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0xe5,0x02,0x00,0x00, +0x3e,0x00,0x00,0x00,0xf2,0x01,0x00,0x00,0x33,0x02,0x00,0x00, +0xfc,0x01,0x00,0x00,0xb0,0x00,0x05,0x00,0xc1,0x00,0x00,0x00, +0xff,0x01,0x00,0x00,0xe5,0x02,0x00,0x00,0x60,0x00,0x00,0x00, +0xf6,0x00,0x04,0x00,0xfb,0x01,0x00,0x00,0xfc,0x01,0x00,0x00, +0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0xff,0x01,0x00,0x00, +0xfa,0x01,0x00,0x00,0xfb,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, +0xfa,0x01,0x00,0x00,0xf9,0x00,0x02,0x00,0x01,0x02,0x00,0x00, +0xf8,0x00,0x02,0x00,0x01,0x02,0x00,0x00,0xf5,0x00,0x07,0x00, +0x06,0x00,0x00,0x00,0xe7,0x02,0x00,0x00,0x3e,0x00,0x00,0x00, +0xfa,0x01,0x00,0x00,0x31,0x02,0x00,0x00,0x04,0x02,0x00,0x00, +0xb0,0x00,0x05,0x00,0xc1,0x00,0x00,0x00,0x07,0x02,0x00,0x00, +0xe7,0x02,0x00,0x00,0xbb,0x00,0x00,0x00,0xf6,0x00,0x04,0x00, +0x03,0x02,0x00,0x00,0x04,0x02,0x00,0x00,0x01,0x00,0x00,0x00, +0xfa,0x00,0x04,0x00,0x07,0x02,0x00,0x00,0x02,0x02,0x00,0x00, +0x03,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x02,0x02,0x00,0x00, +0xf9,0x00,0x02,0x00,0x09,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, +0x09,0x02,0x00,0x00,0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00, +0xe9,0x02,0x00,0x00,0x3e,0x00,0x00,0x00,0x02,0x02,0x00,0x00, +0x2f,0x02,0x00,0x00,0x0a,0x02,0x00,0x00,0xb0,0x00,0x05,0x00, +0xc1,0x00,0x00,0x00,0x0f,0x02,0x00,0x00,0xe9,0x02,0x00,0x00, +0x62,0x00,0x00,0x00,0xf6,0x00,0x04,0x00,0x0b,0x02,0x00,0x00, +0x0a,0x02,0x00,0x00,0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, +0x0f,0x02,0x00,0x00,0x0a,0x02,0x00,0x00,0x0b,0x02,0x00,0x00, +0xf8,0x00,0x02,0x00,0x0a,0x02,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x11,0x02,0x00,0x00,0xe1,0x02,0x00,0x00, +0xbb,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x13,0x02,0x00,0x00,0x11,0x02,0x00,0x00,0xe7,0x02,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x15,0x02,0x00,0x00, +0x13,0x02,0x00,0x00,0x14,0x02,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x17,0x02,0x00,0x00,0xe5,0x02,0x00,0x00, +0x62,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x18,0x02,0x00,0x00,0x15,0x02,0x00,0x00,0x17,0x02,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x1a,0x02,0x00,0x00, +0x18,0x02,0x00,0x00,0xe9,0x02,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x1e,0x02,0x00,0x00,0x17,0x02,0x00,0x00, +0xe9,0x02,0x00,0x00,0x41,0x00,0x05,0x00,0xbc,0x01,0x00,0x00, +0x1f,0x02,0x00,0x00,0xa7,0x01,0x00,0x00,0x1e,0x02,0x00,0x00, +0x3d,0x00,0x04,0x00,0x02,0x01,0x00,0x00,0x20,0x02,0x00,0x00, +0x1f,0x02,0x00,0x00,0x73,0x00,0x04,0x00,0xc3,0x00,0x00,0x00, +0x21,0x02,0x00,0x00,0x20,0x02,0x00,0x00,0x41,0x00,0x05,0x00, +0xbc,0x01,0x00,0x00,0x26,0x02,0x00,0x00,0xd5,0x01,0x00,0x00, +0x13,0x02,0x00,0x00,0x3d,0x00,0x04,0x00,0x02,0x01,0x00,0x00, +0x27,0x02,0x00,0x00,0x26,0x02,0x00,0x00,0x73,0x00,0x04,0x00, +0xc3,0x00,0x00,0x00,0x28,0x02,0x00,0x00,0x27,0x02,0x00,0x00, +0x41,0x00,0x05,0x00,0xcc,0x00,0x00,0x00,0x2a,0x02,0x00,0x00, +0xc9,0x00,0x00,0x00,0x1a,0x02,0x00,0x00,0x3d,0x00,0x04,0x00, +0xc3,0x00,0x00,0x00,0x2b,0x02,0x00,0x00,0x2a,0x02,0x00,0x00, +0x0c,0x00,0x08,0x00,0xc3,0x00,0x00,0x00,0x2c,0x02,0x00,0x00, +0x01,0x00,0x00,0x00,0x32,0x00,0x00,0x00,0x21,0x02,0x00,0x00, +0x28,0x02,0x00,0x00,0x2b,0x02,0x00,0x00,0x3e,0x00,0x03,0x00, +0x2a,0x02,0x00,0x00,0x2c,0x02,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x2f,0x02,0x00,0x00,0xe9,0x02,0x00,0x00, +0xcf,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0x09,0x02,0x00,0x00, +0xf8,0x00,0x02,0x00,0x0b,0x02,0x00,0x00,0xf9,0x00,0x02,0x00, +0x04,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x04,0x02,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x31,0x02,0x00,0x00, +0xe7,0x02,0x00,0x00,0xcf,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, +0x01,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x03,0x02,0x00,0x00, +0xf9,0x00,0x02,0x00,0xfc,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, +0xfc,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x33,0x02,0x00,0x00,0xe5,0x02,0x00,0x00,0xcf,0x00,0x00,0x00, +0xf9,0x00,0x02,0x00,0xf9,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, +0xfb,0x01,0x00,0x00,0xf9,0x00,0x02,0x00,0xf4,0x01,0x00,0x00, +0xf8,0x00,0x02,0x00,0xf4,0x01,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x35,0x02,0x00,0x00,0xe1,0x02,0x00,0x00, +0xcf,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0xf1,0x01,0x00,0x00, +0xf8,0x00,0x02,0x00,0xf3,0x01,0x00,0x00,0xf9,0x00,0x02,0x00, +0x90,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0x90,0x01,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x37,0x02,0x00,0x00, +0xdb,0x02,0x00,0x00,0xcf,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, +0x8d,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0x8f,0x01,0x00,0x00, +0xe0,0x00,0x04,0x00,0x0c,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, +0x85,0x01,0x00,0x00,0xf9,0x00,0x02,0x00,0xd6,0x00,0x00,0x00, +0xf8,0x00,0x02,0x00,0xd6,0x00,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x39,0x02,0x00,0x00,0xc1,0x02,0x00,0x00, +0x6c,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0xd3,0x00,0x00,0x00, +0xf8,0x00,0x02,0x00,0xd5,0x00,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x3e,0x02,0x00,0x00,0x55,0x00,0x00,0x00, +0x53,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x3f,0x02,0x00,0x00,0x95,0x00,0x00,0x00,0x3e,0x02,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x44,0x02,0x00,0x00, +0x59,0x00,0x00,0x00,0xb8,0x00,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x45,0x02,0x00,0x00,0xa7,0x00,0x00,0x00, +0x44,0x02,0x00,0x00,0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00, +0x4a,0x02,0x00,0x00,0x12,0x00,0x00,0x00,0x49,0x02,0x00,0x00, +0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x4b,0x02,0x00,0x00, +0x4a,0x02,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x4c,0x02,0x00,0x00,0x0f,0x00,0x00,0x00,0x4b,0x02,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x50,0x02,0x00,0x00, +0x47,0x00,0x00,0x00,0x4b,0x02,0x00,0x00,0x41,0x00,0x05,0x00, +0x0d,0x00,0x00,0x00,0x52,0x02,0x00,0x00,0x51,0x02,0x00,0x00, +0x0c,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x53,0x02,0x00,0x00,0x52,0x02,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x54,0x02,0x00,0x00,0x50,0x02,0x00,0x00, +0x53,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x55,0x02,0x00,0x00,0x4c,0x02,0x00,0x00,0x54,0x02,0x00,0x00, +0xf9,0x00,0x02,0x00,0x57,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, +0x57,0x02,0x00,0x00,0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00, +0xc2,0x02,0x00,0x00,0x3e,0x00,0x00,0x00,0xd5,0x00,0x00,0x00, +0xbd,0x02,0x00,0x00,0x5a,0x02,0x00,0x00,0xb0,0x00,0x05,0x00, +0xc1,0x00,0x00,0x00,0x5d,0x02,0x00,0x00,0xc2,0x02,0x00,0x00, +0xbe,0x00,0x00,0x00,0xf6,0x00,0x04,0x00,0x59,0x02,0x00,0x00, +0x5a,0x02,0x00,0x00,0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, +0x5d,0x02,0x00,0x00,0x58,0x02,0x00,0x00,0x59,0x02,0x00,0x00, +0xf8,0x00,0x02,0x00,0x58,0x02,0x00,0x00,0xf9,0x00,0x02,0x00, +0x5f,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x5f,0x02,0x00,0x00, +0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0xc3,0x02,0x00,0x00, +0x3e,0x00,0x00,0x00,0x58,0x02,0x00,0x00,0xbb,0x02,0x00,0x00, +0x62,0x02,0x00,0x00,0xb0,0x00,0x05,0x00,0xc1,0x00,0x00,0x00, +0x65,0x02,0x00,0x00,0xc3,0x02,0x00,0x00,0x60,0x00,0x00,0x00, +0xf6,0x00,0x04,0x00,0x61,0x02,0x00,0x00,0x62,0x02,0x00,0x00, +0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0x65,0x02,0x00,0x00, +0x60,0x02,0x00,0x00,0x61,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, +0x60,0x02,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x69,0x02,0x00,0x00,0xc3,0x02,0x00,0x00,0x61,0x00,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x6a,0x02,0x00,0x00, +0x3f,0x02,0x00,0x00,0x69,0x02,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x6c,0x02,0x00,0x00,0x64,0x00,0x00,0x00, +0x62,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x6d,0x02,0x00,0x00,0x6a,0x02,0x00,0x00,0x6c,0x02,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x71,0x02,0x00,0x00, +0xc2,0x02,0x00,0x00,0xdd,0x01,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x72,0x02,0x00,0x00,0x45,0x02,0x00,0x00, +0x71,0x02,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x74,0x02,0x00,0x00,0x68,0x00,0x00,0x00,0xbb,0x00,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x75,0x02,0x00,0x00, +0x72,0x02,0x00,0x00,0x74,0x02,0x00,0x00,0xf9,0x00,0x02,0x00, +0x77,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x77,0x02,0x00,0x00, +0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0xc5,0x02,0x00,0x00, +0x3e,0x00,0x00,0x00,0x60,0x02,0x00,0x00,0xb9,0x02,0x00,0x00, +0x7a,0x02,0x00,0x00,0xb0,0x00,0x05,0x00,0xc1,0x00,0x00,0x00, +0x7d,0x02,0x00,0x00,0xc5,0x02,0x00,0x00,0xbb,0x00,0x00,0x00, +0xf6,0x00,0x04,0x00,0x79,0x02,0x00,0x00,0x7a,0x02,0x00,0x00, +0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0x7d,0x02,0x00,0x00, +0x78,0x02,0x00,0x00,0x79,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, +0x78,0x02,0x00,0x00,0xf9,0x00,0x02,0x00,0x7f,0x02,0x00,0x00, +0xf8,0x00,0x02,0x00,0x7f,0x02,0x00,0x00,0xf5,0x00,0x07,0x00, +0x06,0x00,0x00,0x00,0xc7,0x02,0x00,0x00,0x3e,0x00,0x00,0x00, +0x78,0x02,0x00,0x00,0xb7,0x02,0x00,0x00,0x82,0x02,0x00,0x00, +0xb0,0x00,0x05,0x00,0xc1,0x00,0x00,0x00,0x85,0x02,0x00,0x00, +0xc7,0x02,0x00,0x00,0x62,0x00,0x00,0x00,0xf6,0x00,0x04,0x00, +0x81,0x02,0x00,0x00,0x82,0x02,0x00,0x00,0x01,0x00,0x00,0x00, +0xfa,0x00,0x04,0x00,0x85,0x02,0x00,0x00,0x80,0x02,0x00,0x00, +0x81,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x80,0x02,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x88,0x02,0x00,0x00, +0x6d,0x02,0x00,0x00,0xc7,0x02,0x00,0x00,0xb0,0x00,0x05,0x00, +0xc1,0x00,0x00,0x00,0x8b,0x02,0x00,0x00,0x88,0x02,0x00,0x00, +0x36,0x00,0x00,0x00,0xf7,0x00,0x03,0x00,0x8d,0x02,0x00,0x00, +0x00,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0x8b,0x02,0x00,0x00, +0x8c,0x02,0x00,0x00,0x8d,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, +0x8c,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x90,0x02,0x00,0x00,0x75,0x02,0x00,0x00,0xc5,0x02,0x00,0x00, +0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00,0x91,0x02,0x00,0x00, +0x12,0x00,0x00,0x00,0xcf,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x92,0x02,0x00,0x00,0x91,0x02,0x00,0x00, +0xb0,0x00,0x05,0x00,0xc1,0x00,0x00,0x00,0x93,0x02,0x00,0x00, +0x90,0x02,0x00,0x00,0x92,0x02,0x00,0x00,0xf9,0x00,0x02,0x00, +0x8d,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x8d,0x02,0x00,0x00, +0xf5,0x00,0x07,0x00,0xc1,0x00,0x00,0x00,0x94,0x02,0x00,0x00, +0x8b,0x02,0x00,0x00,0x80,0x02,0x00,0x00,0x93,0x02,0x00,0x00, +0x8c,0x02,0x00,0x00,0xf7,0x00,0x03,0x00,0x96,0x02,0x00,0x00, +0x00,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0x94,0x02,0x00,0x00, +0x95,0x02,0x00,0x00,0x96,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, +0x95,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x9e,0x02,0x00,0x00,0x75,0x02,0x00,0x00,0xc5,0x02,0x00,0x00, +0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00,0xa0,0x02,0x00,0x00, +0x12,0x00,0x00,0x00,0x9f,0x02,0x00,0x00,0x3d,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0xa1,0x02,0x00,0x00,0xa0,0x02,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xa2,0x02,0x00,0x00, +0x9e,0x02,0x00,0x00,0xa1,0x02,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xa3,0x02,0x00,0x00,0x55,0x02,0x00,0x00, +0xa2,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xa5,0x02,0x00,0x00,0xa3,0x02,0x00,0x00,0x6d,0x02,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xa7,0x02,0x00,0x00, +0xa5,0x02,0x00,0x00,0xc7,0x02,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xa9,0x02,0x00,0x00,0xc2,0x02,0x00,0x00, +0xbb,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xab,0x02,0x00,0x00,0xa9,0x02,0x00,0x00,0xc5,0x02,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xad,0x02,0x00,0x00, +0xab,0x02,0x00,0x00,0xac,0x02,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xaf,0x02,0x00,0x00,0xc3,0x02,0x00,0x00, +0x62,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xb0,0x02,0x00,0x00,0xad,0x02,0x00,0x00,0xaf,0x02,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xb2,0x02,0x00,0x00, +0xb0,0x02,0x00,0x00,0xc7,0x02,0x00,0x00,0x41,0x00,0x05,0x00, +0xcc,0x00,0x00,0x00,0xb3,0x02,0x00,0x00,0xc9,0x00,0x00,0x00, +0xb2,0x02,0x00,0x00,0x3d,0x00,0x04,0x00,0xc3,0x00,0x00,0x00, +0xb4,0x02,0x00,0x00,0xb3,0x02,0x00,0x00,0x41,0x00,0x06,0x00, +0x71,0x01,0x00,0x00,0xb5,0x02,0x00,0x00,0x9a,0x02,0x00,0x00, +0x34,0x00,0x00,0x00,0xa7,0x02,0x00,0x00,0x3e,0x00,0x03,0x00, +0xb5,0x02,0x00,0x00,0xb4,0x02,0x00,0x00,0xf9,0x00,0x02,0x00, +0x96,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x96,0x02,0x00,0x00, +0xf9,0x00,0x02,0x00,0x82,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, +0x82,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xb7,0x02,0x00,0x00,0xc7,0x02,0x00,0x00,0xcf,0x00,0x00,0x00, +0xf9,0x00,0x02,0x00,0x7f,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, +0x81,0x02,0x00,0x00,0xf9,0x00,0x02,0x00,0x7a,0x02,0x00,0x00, +0xf8,0x00,0x02,0x00,0x7a,0x02,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xb9,0x02,0x00,0x00,0xc5,0x02,0x00,0x00, +0xcf,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0x77,0x02,0x00,0x00, +0xf8,0x00,0x02,0x00,0x79,0x02,0x00,0x00,0xf9,0x00,0x02,0x00, +0x62,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x62,0x02,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xbb,0x02,0x00,0x00, +0xc3,0x02,0x00,0x00,0xcf,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, +0x5f,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x61,0x02,0x00,0x00, +0xf9,0x00,0x02,0x00,0x5a,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, +0x5a,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xbd,0x02,0x00,0x00,0xc2,0x02,0x00,0x00,0xcf,0x00,0x00,0x00, +0xf9,0x00,0x02,0x00,0x57,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, +0x59,0x02,0x00,0x00,0xfd,0x00,0x01,0x00,0x38,0x00,0x01,0x00, + +}; +const uint64_t matmul_q8_0_f32_len = 10608; + +unsigned char matmul_q8_0_f32_aligned_data[] = { +0x03,0x02,0x23,0x07,0x00,0x05,0x01,0x00,0x0b,0x00,0x0d,0x00, +0x15,0x03,0x00,0x00,0x00,0x00,0x00,0x00,0x11,0x00,0x02,0x00, +0x01,0x00,0x00,0x00,0x11,0x00,0x02,0x00,0x09,0x00,0x00,0x00, +0x11,0x00,0x02,0x00,0x51,0x11,0x00,0x00,0x11,0x00,0x02,0x00, +0x60,0x11,0x00,0x00,0x0b,0x00,0x06,0x00,0x01,0x00,0x00,0x00, +0x47,0x4c,0x53,0x4c,0x2e,0x73,0x74,0x64,0x2e,0x34,0x35,0x30, +0x00,0x00,0x00,0x00,0x0e,0x00,0x03,0x00,0x00,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0x0f,0x00,0x0f,0x00,0x05,0x00,0x00,0x00, +0x04,0x00,0x00,0x00,0x6d,0x61,0x69,0x6e,0x00,0x00,0x00,0x00, +0x0b,0x00,0x00,0x00,0x12,0x00,0x00,0x00,0x3d,0x00,0x00,0x00, +0x4c,0x00,0x00,0x00,0x0b,0x01,0x00,0x00,0x29,0x01,0x00,0x00, +0x5e,0x01,0x00,0x00,0x66,0x01,0x00,0x00,0x74,0x02,0x00,0x00, +0xbd,0x02,0x00,0x00,0x10,0x00,0x06,0x00,0x04,0x00,0x00,0x00, +0x11,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x0b,0x00,0x00,0x00, +0x0b,0x00,0x00,0x00,0x1c,0x00,0x00,0x00,0x48,0x00,0x05,0x00, +0x10,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00, +0x00,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x04,0x00,0x00,0x00, +0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00,0x02,0x00,0x00,0x00, +0x23,0x00,0x00,0x00,0x08,0x00,0x00,0x00,0x48,0x00,0x05,0x00, +0x10,0x00,0x00,0x00,0x03,0x00,0x00,0x00,0x23,0x00,0x00,0x00, +0x0c,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00, +0x04,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x10,0x00,0x00,0x00, +0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00,0x05,0x00,0x00,0x00, +0x23,0x00,0x00,0x00,0x14,0x00,0x00,0x00,0x48,0x00,0x05,0x00, +0x10,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x23,0x00,0x00,0x00, +0x18,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00, +0x07,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x1c,0x00,0x00,0x00, +0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00,0x08,0x00,0x00,0x00, +0x23,0x00,0x00,0x00,0x20,0x00,0x00,0x00,0x48,0x00,0x05,0x00, +0x10,0x00,0x00,0x00,0x09,0x00,0x00,0x00,0x23,0x00,0x00,0x00, +0x24,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00, +0x0a,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x28,0x00,0x00,0x00, +0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00,0x0b,0x00,0x00,0x00, +0x23,0x00,0x00,0x00,0x2c,0x00,0x00,0x00,0x48,0x00,0x05,0x00, +0x10,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x23,0x00,0x00,0x00, +0x30,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00, +0x0d,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x34,0x00,0x00,0x00, +0x47,0x00,0x03,0x00,0x10,0x00,0x00,0x00,0x02,0x00,0x00,0x00, +0x47,0x00,0x04,0x00,0x37,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x3d,0x00,0x00,0x00, +0x0b,0x00,0x00,0x00,0x1a,0x00,0x00,0x00,0x47,0x00,0x04,0x00, +0x4c,0x00,0x00,0x00,0x0b,0x00,0x00,0x00,0x1b,0x00,0x00,0x00, +0x47,0x00,0x04,0x00,0x4f,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0x09,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x53,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x47,0x00,0x04,0x00, +0x60,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x06,0x00,0x00,0x00, +0x47,0x00,0x04,0x00,0x62,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0x07,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x6c,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0x03,0x00,0x00,0x00,0x47,0x00,0x04,0x00, +0xa7,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x02,0x00,0x00,0x00, +0x47,0x00,0x04,0x00,0xb9,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0x05,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0xbc,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0x08,0x00,0x00,0x00,0x47,0x00,0x04,0x00, +0x06,0x01,0x00,0x00,0x06,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0x48,0x00,0x05,0x00,0x07,0x01,0x00,0x00,0x00,0x00,0x00,0x00, +0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x48,0x00,0x05,0x00, +0x07,0x01,0x00,0x00,0x01,0x00,0x00,0x00,0x23,0x00,0x00,0x00, +0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x08,0x01,0x00,0x00, +0x06,0x00,0x00,0x00,0x22,0x00,0x00,0x00,0x48,0x00,0x04,0x00, +0x09,0x01,0x00,0x00,0x00,0x00,0x00,0x00,0x18,0x00,0x00,0x00, +0x48,0x00,0x05,0x00,0x09,0x01,0x00,0x00,0x00,0x00,0x00,0x00, +0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x03,0x00, +0x09,0x01,0x00,0x00,0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00, +0x0b,0x01,0x00,0x00,0x22,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0x47,0x00,0x04,0x00,0x0b,0x01,0x00,0x00,0x21,0x00,0x00,0x00, +0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x36,0x01,0x00,0x00, +0x01,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00, +0x37,0x01,0x00,0x00,0x0b,0x00,0x00,0x00,0x19,0x00,0x00,0x00, +0x47,0x00,0x04,0x00,0x63,0x01,0x00,0x00,0x06,0x00,0x00,0x00, +0x20,0x00,0x00,0x00,0x48,0x00,0x04,0x00,0x64,0x01,0x00,0x00, +0x00,0x00,0x00,0x00,0x05,0x00,0x00,0x00,0x48,0x00,0x04,0x00, +0x64,0x01,0x00,0x00,0x00,0x00,0x00,0x00,0x18,0x00,0x00,0x00, +0x48,0x00,0x05,0x00,0x64,0x01,0x00,0x00,0x00,0x00,0x00,0x00, +0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x48,0x00,0x05,0x00, +0x64,0x01,0x00,0x00,0x00,0x00,0x00,0x00,0x07,0x00,0x00,0x00, +0x10,0x00,0x00,0x00,0x47,0x00,0x03,0x00,0x64,0x01,0x00,0x00, +0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x66,0x01,0x00,0x00, +0x22,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00, +0x66,0x01,0x00,0x00,0x21,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0x47,0x00,0x04,0x00,0x74,0x02,0x00,0x00,0x0b,0x00,0x00,0x00, +0x18,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0xba,0x02,0x00,0x00, +0x06,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x48,0x00,0x04,0x00, +0xbb,0x02,0x00,0x00,0x00,0x00,0x00,0x00,0x19,0x00,0x00,0x00, +0x48,0x00,0x05,0x00,0xbb,0x02,0x00,0x00,0x00,0x00,0x00,0x00, +0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x03,0x00, +0xbb,0x02,0x00,0x00,0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00, +0xbd,0x02,0x00,0x00,0x22,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0x47,0x00,0x04,0x00,0xbd,0x02,0x00,0x00,0x21,0x00,0x00,0x00, +0x02,0x00,0x00,0x00,0x13,0x00,0x02,0x00,0x02,0x00,0x00,0x00, +0x21,0x00,0x03,0x00,0x03,0x00,0x00,0x00,0x02,0x00,0x00,0x00, +0x15,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x20,0x00,0x00,0x00, +0x00,0x00,0x00,0x00,0x17,0x00,0x04,0x00,0x09,0x00,0x00,0x00, +0x06,0x00,0x00,0x00,0x03,0x00,0x00,0x00,0x20,0x00,0x04,0x00, +0x0a,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x09,0x00,0x00,0x00, +0x3b,0x00,0x04,0x00,0x0a,0x00,0x00,0x00,0x0b,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x0c,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x20,0x00,0x04,0x00, +0x0d,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x06,0x00,0x00,0x00, +0x1e,0x00,0x10,0x00,0x10,0x00,0x00,0x00,0x06,0x00,0x00,0x00, +0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00, +0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00, +0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00, +0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00, +0x06,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x11,0x00,0x00,0x00, +0x09,0x00,0x00,0x00,0x10,0x00,0x00,0x00,0x3b,0x00,0x04,0x00, +0x11,0x00,0x00,0x00,0x12,0x00,0x00,0x00,0x09,0x00,0x00,0x00, +0x15,0x00,0x04,0x00,0x13,0x00,0x00,0x00,0x20,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00, +0x14,0x00,0x00,0x00,0x08,0x00,0x00,0x00,0x20,0x00,0x04,0x00, +0x15,0x00,0x00,0x00,0x09,0x00,0x00,0x00,0x06,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00,0x21,0x00,0x00,0x00, +0x0a,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00, +0x27,0x00,0x00,0x00,0x09,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x13,0x00,0x00,0x00,0x2d,0x00,0x00,0x00,0x07,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00,0x34,0x00,0x00,0x00, +0x00,0x00,0x00,0x00,0x32,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x37,0x00,0x00,0x00,0x40,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0x3b,0x00,0x04,0x00,0x0a,0x00,0x00,0x00,0x3d,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x3e,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x3b,0x00,0x04,0x00, +0x0a,0x00,0x00,0x00,0x4c,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0x32,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x4f,0x00,0x00,0x00, +0x20,0x00,0x00,0x00,0x32,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x53,0x00,0x00,0x00,0x20,0x00,0x00,0x00,0x34,0x00,0x06,0x00, +0x06,0x00,0x00,0x00,0x54,0x00,0x00,0x00,0x86,0x00,0x00,0x00, +0x37,0x00,0x00,0x00,0x53,0x00,0x00,0x00,0x34,0x00,0x06,0x00, +0x06,0x00,0x00,0x00,0x58,0x00,0x00,0x00,0x86,0x00,0x00,0x00, +0x37,0x00,0x00,0x00,0x53,0x00,0x00,0x00,0x32,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x60,0x00,0x00,0x00,0x02,0x00,0x00,0x00, +0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x61,0x00,0x00,0x00, +0x86,0x00,0x00,0x00,0x53,0x00,0x00,0x00,0x60,0x00,0x00,0x00, +0x32,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x62,0x00,0x00,0x00, +0x04,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, +0x63,0x00,0x00,0x00,0x86,0x00,0x00,0x00,0x61,0x00,0x00,0x00, +0x62,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, +0x67,0x00,0x00,0x00,0x86,0x00,0x00,0x00,0x61,0x00,0x00,0x00, +0x62,0x00,0x00,0x00,0x32,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x6c,0x00,0x00,0x00,0x10,0x00,0x00,0x00,0x34,0x00,0x06,0x00, +0x06,0x00,0x00,0x00,0x6d,0x00,0x00,0x00,0x86,0x00,0x00,0x00, +0x6c,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x34,0x00,0x06,0x00, +0x06,0x00,0x00,0x00,0x72,0x00,0x00,0x00,0x86,0x00,0x00,0x00, +0x6c,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x77,0x00,0x00,0x00,0x08,0x00,0x00,0x00, +0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x78,0x00,0x00,0x00, +0x86,0x00,0x00,0x00,0x6c,0x00,0x00,0x00,0x77,0x00,0x00,0x00, +0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x7d,0x00,0x00,0x00, +0x86,0x00,0x00,0x00,0x6c,0x00,0x00,0x00,0x77,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00,0x81,0x00,0x00,0x00, +0x06,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00, +0x86,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x13,0x00,0x00,0x00,0x91,0x00,0x00,0x00,0x0b,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00,0x97,0x00,0x00,0x00, +0x03,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00, +0xa2,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x32,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0xa7,0x00,0x00,0x00,0x40,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00,0xa9,0x00,0x00,0x00, +0x04,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, +0xb8,0x00,0x00,0x00,0x84,0x00,0x00,0x00,0x60,0x00,0x00,0x00, +0x62,0x00,0x00,0x00,0x32,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0xb9,0x00,0x00,0x00,0x20,0x00,0x00,0x00,0x34,0x00,0x06,0x00, +0x06,0x00,0x00,0x00,0xba,0x00,0x00,0x00,0x84,0x00,0x00,0x00, +0x53,0x00,0x00,0x00,0xb9,0x00,0x00,0x00,0x34,0x00,0x06,0x00, +0x06,0x00,0x00,0x00,0xbb,0x00,0x00,0x00,0x84,0x00,0x00,0x00, +0x4f,0x00,0x00,0x00,0x62,0x00,0x00,0x00,0x32,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0xbc,0x00,0x00,0x00,0x02,0x00,0x00,0x00, +0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0xbd,0x00,0x00,0x00, +0x84,0x00,0x00,0x00,0xbb,0x00,0x00,0x00,0xbc,0x00,0x00,0x00, +0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0xbe,0x00,0x00,0x00, +0x84,0x00,0x00,0x00,0xbd,0x00,0x00,0x00,0x60,0x00,0x00,0x00, +0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0xbf,0x00,0x00,0x00, +0x86,0x00,0x00,0x00,0xba,0x00,0x00,0x00,0xbe,0x00,0x00,0x00, +0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0xc0,0x00,0x00,0x00, +0x84,0x00,0x00,0x00,0xb8,0x00,0x00,0x00,0xbf,0x00,0x00,0x00, +0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0xc1,0x00,0x00,0x00, +0x84,0x00,0x00,0x00,0xc0,0x00,0x00,0x00,0xbc,0x00,0x00,0x00, +0x14,0x00,0x02,0x00,0xc2,0x00,0x00,0x00,0x16,0x00,0x03,0x00, +0xc4,0x00,0x00,0x00,0x20,0x00,0x00,0x00,0x34,0x00,0x06,0x00, +0x06,0x00,0x00,0x00,0xc5,0x00,0x00,0x00,0x84,0x00,0x00,0x00, +0x60,0x00,0x00,0x00,0x62,0x00,0x00,0x00,0x34,0x00,0x06,0x00, +0x06,0x00,0x00,0x00,0xc6,0x00,0x00,0x00,0x84,0x00,0x00,0x00, +0xc5,0x00,0x00,0x00,0xbf,0x00,0x00,0x00,0x34,0x00,0x06,0x00, +0x06,0x00,0x00,0x00,0xc7,0x00,0x00,0x00,0x84,0x00,0x00,0x00, +0xc6,0x00,0x00,0x00,0xbc,0x00,0x00,0x00,0x1c,0x00,0x04,0x00, +0xc8,0x00,0x00,0x00,0xc4,0x00,0x00,0x00,0xc7,0x00,0x00,0x00, +0x20,0x00,0x04,0x00,0xc9,0x00,0x00,0x00,0x07,0x00,0x00,0x00, +0xc8,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0xc4,0x00,0x00,0x00, +0xcc,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x20,0x00,0x04,0x00, +0xcd,0x00,0x00,0x00,0x07,0x00,0x00,0x00,0xc4,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00,0xd0,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, +0xf4,0x00,0x00,0x00,0x80,0x00,0x00,0x00,0x6c,0x00,0x00,0x00, +0x39,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0xfb,0x00,0x00,0x00,0x10,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0xff,0x00,0x00,0x00,0x0f,0x00,0x00,0x00, +0x16,0x00,0x03,0x00,0x03,0x01,0x00,0x00,0x10,0x00,0x00,0x00, +0x15,0x00,0x04,0x00,0x04,0x01,0x00,0x00,0x08,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x05,0x01,0x00,0x00,0x20,0x00,0x00,0x00,0x1c,0x00,0x04,0x00, +0x06,0x01,0x00,0x00,0x04,0x01,0x00,0x00,0x05,0x01,0x00,0x00, +0x1e,0x00,0x04,0x00,0x07,0x01,0x00,0x00,0x03,0x01,0x00,0x00, +0x06,0x01,0x00,0x00,0x1d,0x00,0x03,0x00,0x08,0x01,0x00,0x00, +0x07,0x01,0x00,0x00,0x1e,0x00,0x03,0x00,0x09,0x01,0x00,0x00, +0x08,0x01,0x00,0x00,0x20,0x00,0x04,0x00,0x0a,0x01,0x00,0x00, +0x0c,0x00,0x00,0x00,0x09,0x01,0x00,0x00,0x3b,0x00,0x04,0x00, +0x0a,0x01,0x00,0x00,0x0b,0x01,0x00,0x00,0x0c,0x00,0x00,0x00, +0x20,0x00,0x04,0x00,0x0d,0x01,0x00,0x00,0x0c,0x00,0x00,0x00, +0x03,0x01,0x00,0x00,0x17,0x00,0x04,0x00,0x11,0x01,0x00,0x00, +0xc4,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x20,0x00,0x04,0x00, +0x16,0x01,0x00,0x00,0x0c,0x00,0x00,0x00,0x04,0x01,0x00,0x00, +0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x25,0x01,0x00,0x00, +0x80,0x00,0x00,0x00,0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00, +0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x26,0x01,0x00,0x00, +0x84,0x00,0x00,0x00,0x37,0x00,0x00,0x00,0x25,0x01,0x00,0x00, +0x1c,0x00,0x04,0x00,0x27,0x01,0x00,0x00,0x03,0x01,0x00,0x00, +0x26,0x01,0x00,0x00,0x20,0x00,0x04,0x00,0x28,0x01,0x00,0x00, +0x04,0x00,0x00,0x00,0x27,0x01,0x00,0x00,0x3b,0x00,0x04,0x00, +0x28,0x01,0x00,0x00,0x29,0x01,0x00,0x00,0x04,0x00,0x00,0x00, +0x20,0x00,0x04,0x00,0x2e,0x01,0x00,0x00,0x04,0x00,0x00,0x00, +0x03,0x01,0x00,0x00,0x32,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x36,0x01,0x00,0x00,0x01,0x00,0x00,0x00,0x33,0x00,0x06,0x00, +0x09,0x00,0x00,0x00,0x37,0x01,0x00,0x00,0x36,0x01,0x00,0x00, +0x39,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x34,0x00,0x06,0x00, +0x06,0x00,0x00,0x00,0x38,0x01,0x00,0x00,0x51,0x00,0x00,0x00, +0x37,0x01,0x00,0x00,0x00,0x00,0x00,0x00,0x34,0x00,0x06,0x00, +0x06,0x00,0x00,0x00,0x39,0x01,0x00,0x00,0x84,0x00,0x00,0x00, +0x38,0x01,0x00,0x00,0x0c,0x00,0x00,0x00,0x34,0x00,0x06,0x00, +0x06,0x00,0x00,0x00,0x3a,0x01,0x00,0x00,0x86,0x00,0x00,0x00, +0x39,0x01,0x00,0x00,0x6c,0x00,0x00,0x00,0x34,0x00,0x06,0x00, +0x06,0x00,0x00,0x00,0x55,0x01,0x00,0x00,0x80,0x00,0x00,0x00, +0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x34,0x00,0x06,0x00, +0x06,0x00,0x00,0x00,0x5a,0x01,0x00,0x00,0x80,0x00,0x00,0x00, +0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x34,0x00,0x06,0x00, +0x06,0x00,0x00,0x00,0x5b,0x01,0x00,0x00,0x84,0x00,0x00,0x00, +0xa7,0x00,0x00,0x00,0x5a,0x01,0x00,0x00,0x1c,0x00,0x04,0x00, +0x5c,0x01,0x00,0x00,0x03,0x01,0x00,0x00,0x5b,0x01,0x00,0x00, +0x20,0x00,0x04,0x00,0x5d,0x01,0x00,0x00,0x04,0x00,0x00,0x00, +0x5c,0x01,0x00,0x00,0x3b,0x00,0x04,0x00,0x5d,0x01,0x00,0x00, +0x5e,0x01,0x00,0x00,0x04,0x00,0x00,0x00,0x17,0x00,0x04,0x00, +0x61,0x01,0x00,0x00,0xc4,0x00,0x00,0x00,0x04,0x00,0x00,0x00, +0x18,0x00,0x04,0x00,0x62,0x01,0x00,0x00,0x61,0x01,0x00,0x00, +0x02,0x00,0x00,0x00,0x1d,0x00,0x03,0x00,0x63,0x01,0x00,0x00, +0x62,0x01,0x00,0x00,0x1e,0x00,0x03,0x00,0x64,0x01,0x00,0x00, +0x63,0x01,0x00,0x00,0x20,0x00,0x04,0x00,0x65,0x01,0x00,0x00, +0x0c,0x00,0x00,0x00,0x64,0x01,0x00,0x00,0x3b,0x00,0x04,0x00, +0x65,0x01,0x00,0x00,0x66,0x01,0x00,0x00,0x0c,0x00,0x00,0x00, +0x20,0x00,0x04,0x00,0x68,0x01,0x00,0x00,0x0c,0x00,0x00,0x00, +0xc4,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x7c,0x01,0x00,0x00,0x03,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x84,0x01,0x00,0x00,0x04,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x8c,0x01,0x00,0x00, +0x05,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x94,0x01,0x00,0x00,0x06,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x9c,0x01,0x00,0x00,0x07,0x00,0x00,0x00, +0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0xa3,0x01,0x00,0x00, +0x51,0x00,0x00,0x00,0x37,0x01,0x00,0x00,0x00,0x00,0x00,0x00, +0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0xa4,0x01,0x00,0x00, +0x84,0x00,0x00,0x00,0xa3,0x01,0x00,0x00,0x77,0x00,0x00,0x00, +0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0xa5,0x01,0x00,0x00, +0x86,0x00,0x00,0x00,0xa4,0x01,0x00,0x00,0x6c,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0xa8,0x01,0x00,0x00, +0x08,0x01,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, +0xa9,0x01,0x00,0x00,0x86,0x00,0x00,0x00,0x6c,0x00,0x00,0x00, +0x0c,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, +0xac,0x01,0x00,0x00,0x86,0x00,0x00,0x00,0x6c,0x00,0x00,0x00, +0x77,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, +0xc7,0x01,0x00,0x00,0x84,0x00,0x00,0x00,0x60,0x00,0x00,0x00, +0x62,0x00,0x00,0x00,0x1c,0x00,0x04,0x00,0xc8,0x01,0x00,0x00, +0x03,0x01,0x00,0x00,0xc7,0x01,0x00,0x00,0x20,0x00,0x04,0x00, +0xc9,0x01,0x00,0x00,0x07,0x00,0x00,0x00,0xc8,0x01,0x00,0x00, +0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0xd9,0x01,0x00,0x00, +0x80,0x00,0x00,0x00,0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00, +0x20,0x00,0x04,0x00,0xdf,0x01,0x00,0x00,0x07,0x00,0x00,0x00, +0x03,0x01,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, +0xf5,0x01,0x00,0x00,0x84,0x00,0x00,0x00,0xbf,0x00,0x00,0x00, +0xbc,0x00,0x00,0x00,0x1c,0x00,0x04,0x00,0xf6,0x01,0x00,0x00, +0x03,0x01,0x00,0x00,0xf5,0x01,0x00,0x00,0x20,0x00,0x04,0x00, +0xf7,0x01,0x00,0x00,0x07,0x00,0x00,0x00,0xf6,0x01,0x00,0x00, +0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x00,0x02,0x00,0x00, +0x86,0x00,0x00,0x00,0xb9,0x00,0x00,0x00,0xbf,0x00,0x00,0x00, +0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x08,0x02,0x00,0x00, +0x80,0x00,0x00,0x00,0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00, +0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x37,0x02,0x00,0x00, +0x84,0x00,0x00,0x00,0x60,0x00,0x00,0x00,0x62,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00,0x6c,0x02,0x00,0x00, +0x0d,0x00,0x00,0x00,0x3b,0x00,0x04,0x00,0x0a,0x00,0x00,0x00, +0x74,0x02,0x00,0x00,0x01,0x00,0x00,0x00,0x1d,0x00,0x03,0x00, +0xba,0x02,0x00,0x00,0xc4,0x00,0x00,0x00,0x1e,0x00,0x03,0x00, +0xbb,0x02,0x00,0x00,0xba,0x02,0x00,0x00,0x20,0x00,0x04,0x00, +0xbc,0x02,0x00,0x00,0x0c,0x00,0x00,0x00,0xbb,0x02,0x00,0x00, +0x3b,0x00,0x04,0x00,0xbc,0x02,0x00,0x00,0xbd,0x02,0x00,0x00, +0x0c,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00, +0xc2,0x02,0x00,0x00,0x05,0x00,0x00,0x00,0x34,0x00,0x06,0x00, +0x06,0x00,0x00,0x00,0xcf,0x02,0x00,0x00,0x84,0x00,0x00,0x00, +0x60,0x00,0x00,0x00,0x62,0x00,0x00,0x00,0x36,0x00,0x05,0x00, +0x02,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0x03,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0x05,0x00,0x00,0x00, +0x3b,0x00,0x04,0x00,0xc9,0x00,0x00,0x00,0xca,0x00,0x00,0x00, +0x07,0x00,0x00,0x00,0x3b,0x00,0x04,0x00,0xc9,0x01,0x00,0x00, +0xca,0x01,0x00,0x00,0x07,0x00,0x00,0x00,0x3b,0x00,0x04,0x00, +0xf7,0x01,0x00,0x00,0xf8,0x01,0x00,0x00,0x07,0x00,0x00,0x00, +0x41,0x00,0x05,0x00,0x0d,0x00,0x00,0x00,0x0e,0x00,0x00,0x00, +0x0b,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x0f,0x00,0x00,0x00,0x0e,0x00,0x00,0x00, +0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00,0x16,0x00,0x00,0x00, +0x12,0x00,0x00,0x00,0x14,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x17,0x00,0x00,0x00,0x16,0x00,0x00,0x00, +0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x18,0x00,0x00,0x00, +0x0f,0x00,0x00,0x00,0x17,0x00,0x00,0x00,0x89,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x1e,0x00,0x00,0x00,0x0f,0x00,0x00,0x00, +0x17,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00, +0x22,0x00,0x00,0x00,0x12,0x00,0x00,0x00,0x21,0x00,0x00,0x00, +0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x23,0x00,0x00,0x00, +0x22,0x00,0x00,0x00,0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x24,0x00,0x00,0x00,0x18,0x00,0x00,0x00,0x23,0x00,0x00,0x00, +0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00,0x28,0x00,0x00,0x00, +0x12,0x00,0x00,0x00,0x27,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x29,0x00,0x00,0x00,0x28,0x00,0x00,0x00, +0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x2a,0x00,0x00,0x00, +0x1e,0x00,0x00,0x00,0x29,0x00,0x00,0x00,0x41,0x00,0x05,0x00, +0x15,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0x12,0x00,0x00,0x00, +0x2d,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x2f,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x30,0x00,0x00,0x00,0x24,0x00,0x00,0x00, +0x2f,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x32,0x00,0x00,0x00,0x30,0x00,0x00,0x00,0x2a,0x00,0x00,0x00, +0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00,0x35,0x00,0x00,0x00, +0x12,0x00,0x00,0x00,0x34,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x36,0x00,0x00,0x00,0x35,0x00,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x38,0x00,0x00,0x00, +0x36,0x00,0x00,0x00,0x37,0x00,0x00,0x00,0x82,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x3a,0x00,0x00,0x00,0x38,0x00,0x00,0x00, +0x39,0x00,0x00,0x00,0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x3b,0x00,0x00,0x00,0x3a,0x00,0x00,0x00,0x37,0x00,0x00,0x00, +0x41,0x00,0x05,0x00,0x0d,0x00,0x00,0x00,0x3f,0x00,0x00,0x00, +0x3d,0x00,0x00,0x00,0x3e,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x40,0x00,0x00,0x00,0x3f,0x00,0x00,0x00, +0x89,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x42,0x00,0x00,0x00, +0x40,0x00,0x00,0x00,0x3b,0x00,0x00,0x00,0x86,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x47,0x00,0x00,0x00,0x40,0x00,0x00,0x00, +0x3b,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x0d,0x00,0x00,0x00, +0x49,0x00,0x00,0x00,0x3d,0x00,0x00,0x00,0x39,0x00,0x00,0x00, +0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x4a,0x00,0x00,0x00, +0x49,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x0d,0x00,0x00,0x00, +0x4d,0x00,0x00,0x00,0x4c,0x00,0x00,0x00,0x3e,0x00,0x00,0x00, +0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x4e,0x00,0x00,0x00, +0x4d,0x00,0x00,0x00,0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x50,0x00,0x00,0x00,0x4e,0x00,0x00,0x00,0x4f,0x00,0x00,0x00, +0x89,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x55,0x00,0x00,0x00, +0x50,0x00,0x00,0x00,0x54,0x00,0x00,0x00,0x86,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x59,0x00,0x00,0x00,0x50,0x00,0x00,0x00, +0x58,0x00,0x00,0x00,0x89,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x5d,0x00,0x00,0x00,0x4e,0x00,0x00,0x00,0x4f,0x00,0x00,0x00, +0x89,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x64,0x00,0x00,0x00, +0x5d,0x00,0x00,0x00,0x63,0x00,0x00,0x00,0x86,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x68,0x00,0x00,0x00,0x5d,0x00,0x00,0x00, +0x67,0x00,0x00,0x00,0x89,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x6e,0x00,0x00,0x00,0x4e,0x00,0x00,0x00,0x6d,0x00,0x00,0x00, +0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x73,0x00,0x00,0x00, +0x4e,0x00,0x00,0x00,0x72,0x00,0x00,0x00,0x89,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x79,0x00,0x00,0x00,0x4e,0x00,0x00,0x00, +0x78,0x00,0x00,0x00,0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x7e,0x00,0x00,0x00,0x4e,0x00,0x00,0x00,0x7d,0x00,0x00,0x00, +0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00,0x82,0x00,0x00,0x00, +0x12,0x00,0x00,0x00,0x81,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x83,0x00,0x00,0x00,0x82,0x00,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x84,0x00,0x00,0x00, +0x47,0x00,0x00,0x00,0x83,0x00,0x00,0x00,0x41,0x00,0x05,0x00, +0x15,0x00,0x00,0x00,0x87,0x00,0x00,0x00,0x12,0x00,0x00,0x00, +0x86,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x88,0x00,0x00,0x00,0x87,0x00,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x8a,0x00,0x00,0x00,0x47,0x00,0x00,0x00, +0x39,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x8d,0x00,0x00,0x00,0x8a,0x00,0x00,0x00,0x83,0x00,0x00,0x00, +0x0c,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0x8e,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0x26,0x00,0x00,0x00,0x88,0x00,0x00,0x00, +0x8d,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00, +0x92,0x00,0x00,0x00,0x12,0x00,0x00,0x00,0x91,0x00,0x00,0x00, +0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x93,0x00,0x00,0x00, +0x92,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x94,0x00,0x00,0x00,0x32,0x00,0x00,0x00,0x93,0x00,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x96,0x00,0x00,0x00, +0x42,0x00,0x00,0x00,0x37,0x00,0x00,0x00,0x41,0x00,0x05,0x00, +0x15,0x00,0x00,0x00,0x98,0x00,0x00,0x00,0x12,0x00,0x00,0x00, +0x97,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x99,0x00,0x00,0x00,0x98,0x00,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x9a,0x00,0x00,0x00,0x96,0x00,0x00,0x00, +0x99,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x9b,0x00,0x00,0x00,0x94,0x00,0x00,0x00,0x9a,0x00,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x9d,0x00,0x00,0x00, +0x9b,0x00,0x00,0x00,0x84,0x00,0x00,0x00,0x86,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x9e,0x00,0x00,0x00,0x9d,0x00,0x00,0x00, +0x0c,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00, +0xa3,0x00,0x00,0x00,0x12,0x00,0x00,0x00,0xa2,0x00,0x00,0x00, +0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0xa4,0x00,0x00,0x00, +0xa3,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xa5,0x00,0x00,0x00,0x0f,0x00,0x00,0x00,0xa4,0x00,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xa8,0x00,0x00,0x00, +0x4a,0x00,0x00,0x00,0xa7,0x00,0x00,0x00,0x41,0x00,0x05,0x00, +0x15,0x00,0x00,0x00,0xaa,0x00,0x00,0x00,0x12,0x00,0x00,0x00, +0xa9,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0xab,0x00,0x00,0x00,0xaa,0x00,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xac,0x00,0x00,0x00,0xa8,0x00,0x00,0x00, +0xab,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xad,0x00,0x00,0x00,0xa5,0x00,0x00,0x00,0xac,0x00,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xaf,0x00,0x00,0x00, +0xad,0x00,0x00,0x00,0x84,0x00,0x00,0x00,0x86,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xb0,0x00,0x00,0x00,0xaf,0x00,0x00,0x00, +0x77,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0xb2,0x00,0x00,0x00, +0xf8,0x00,0x02,0x00,0xb2,0x00,0x00,0x00,0xf5,0x00,0x07,0x00, +0x06,0x00,0x00,0x00,0xe3,0x02,0x00,0x00,0x3e,0x00,0x00,0x00, +0x05,0x00,0x00,0x00,0xd1,0x00,0x00,0x00,0xb3,0x00,0x00,0x00, +0xb0,0x00,0x05,0x00,0xc2,0x00,0x00,0x00,0xc3,0x00,0x00,0x00, +0xe3,0x02,0x00,0x00,0xc1,0x00,0x00,0x00,0xf6,0x00,0x04,0x00, +0xb4,0x00,0x00,0x00,0xb3,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0xfa,0x00,0x04,0x00,0xc3,0x00,0x00,0x00,0xb3,0x00,0x00,0x00, +0xb4,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0xb3,0x00,0x00,0x00, +0x41,0x00,0x05,0x00,0xcd,0x00,0x00,0x00,0xce,0x00,0x00,0x00, +0xca,0x00,0x00,0x00,0xe3,0x02,0x00,0x00,0x3e,0x00,0x03,0x00, +0xce,0x00,0x00,0x00,0xcc,0x00,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xd1,0x00,0x00,0x00,0xe3,0x02,0x00,0x00, +0xd0,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0xb2,0x00,0x00,0x00, +0xf8,0x00,0x02,0x00,0xb4,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, +0xd4,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0xd4,0x00,0x00,0x00, +0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0xfc,0x02,0x00,0x00, +0xb0,0x00,0x00,0x00,0xb4,0x00,0x00,0x00,0xae,0x01,0x00,0x00, +0xd7,0x00,0x00,0x00,0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00, +0xf8,0x02,0x00,0x00,0x9e,0x00,0x00,0x00,0xb4,0x00,0x00,0x00, +0xab,0x01,0x00,0x00,0xd7,0x00,0x00,0x00,0xf5,0x00,0x07,0x00, +0x06,0x00,0x00,0x00,0xe4,0x02,0x00,0x00,0x84,0x00,0x00,0x00, +0xb4,0x00,0x00,0x00,0x5c,0x02,0x00,0x00,0xd7,0x00,0x00,0x00, +0xb0,0x00,0x05,0x00,0xc2,0x00,0x00,0x00,0xdb,0x00,0x00,0x00, +0xe4,0x02,0x00,0x00,0x8e,0x00,0x00,0x00,0xf6,0x00,0x04,0x00, +0xd6,0x00,0x00,0x00,0xd7,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0xfa,0x00,0x04,0x00,0xdb,0x00,0x00,0x00,0xd5,0x00,0x00,0x00, +0xd6,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0xd5,0x00,0x00,0x00, +0xf9,0x00,0x02,0x00,0xdd,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, +0xdd,0x00,0x00,0x00,0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00, +0xf4,0x02,0x00,0x00,0x3e,0x00,0x00,0x00,0xd5,0x00,0x00,0x00, +0x3c,0x01,0x00,0x00,0xde,0x00,0x00,0x00,0xb0,0x00,0x05,0x00, +0xc2,0x00,0x00,0x00,0xe3,0x00,0x00,0x00,0xf4,0x02,0x00,0x00, +0x37,0x00,0x00,0x00,0xf6,0x00,0x04,0x00,0xdf,0x00,0x00,0x00, +0xde,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, +0xe3,0x00,0x00,0x00,0xde,0x00,0x00,0x00,0xdf,0x00,0x00,0x00, +0xf8,0x00,0x02,0x00,0xde,0x00,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xe8,0x00,0x00,0x00,0x73,0x00,0x00,0x00, +0xf4,0x02,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xeb,0x00,0x00,0x00,0xe8,0x00,0x00,0x00,0x99,0x00,0x00,0x00, +0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xec,0x00,0x00,0x00, +0xeb,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xed,0x00,0x00,0x00,0xf8,0x02,0x00,0x00, +0xec,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xef,0x00,0x00,0x00,0xed,0x00,0x00,0x00,0x6e,0x00,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xf5,0x00,0x00,0x00, +0xe8,0x00,0x00,0x00,0xf4,0x00,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xf7,0x00,0x00,0x00,0x6e,0x00,0x00,0x00, +0x0c,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xf8,0x00,0x00,0x00,0xf5,0x00,0x00,0x00,0xf7,0x00,0x00,0x00, +0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xfc,0x00,0x00,0x00, +0xef,0x00,0x00,0x00,0xfb,0x00,0x00,0x00,0xc7,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x00,0x01,0x00,0x00,0xef,0x00,0x00,0x00, +0xff,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x01,0x01,0x00,0x00,0x00,0x01,0x00,0x00,0x0c,0x00,0x00,0x00, +0x41,0x00,0x07,0x00,0x0d,0x01,0x00,0x00,0x0e,0x01,0x00,0x00, +0x0b,0x01,0x00,0x00,0x34,0x00,0x00,0x00,0xfc,0x00,0x00,0x00, +0x34,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x03,0x01,0x00,0x00, +0x0f,0x01,0x00,0x00,0x0e,0x01,0x00,0x00,0x73,0x00,0x04,0x00, +0xc4,0x00,0x00,0x00,0x10,0x01,0x00,0x00,0x0f,0x01,0x00,0x00, +0x41,0x00,0x08,0x00,0x16,0x01,0x00,0x00,0x17,0x01,0x00,0x00, +0x0b,0x01,0x00,0x00,0x34,0x00,0x00,0x00,0xfc,0x00,0x00,0x00, +0xd0,0x00,0x00,0x00,0x01,0x01,0x00,0x00,0x3d,0x00,0x04,0x00, +0x04,0x01,0x00,0x00,0x18,0x01,0x00,0x00,0x17,0x01,0x00,0x00, +0x72,0x00,0x04,0x00,0x13,0x00,0x00,0x00,0x19,0x01,0x00,0x00, +0x18,0x01,0x00,0x00,0x6f,0x00,0x04,0x00,0xc4,0x00,0x00,0x00, +0x1a,0x01,0x00,0x00,0x19,0x01,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x1d,0x01,0x00,0x00,0x01,0x01,0x00,0x00, +0x39,0x00,0x00,0x00,0x41,0x00,0x08,0x00,0x16,0x01,0x00,0x00, +0x1e,0x01,0x00,0x00,0x0b,0x01,0x00,0x00,0x34,0x00,0x00,0x00, +0xfc,0x00,0x00,0x00,0xd0,0x00,0x00,0x00,0x1d,0x01,0x00,0x00, +0x3d,0x00,0x04,0x00,0x04,0x01,0x00,0x00,0x1f,0x01,0x00,0x00, +0x1e,0x01,0x00,0x00,0x72,0x00,0x04,0x00,0x13,0x00,0x00,0x00, +0x20,0x01,0x00,0x00,0x1f,0x01,0x00,0x00,0x6f,0x00,0x04,0x00, +0xc4,0x00,0x00,0x00,0x21,0x01,0x00,0x00,0x20,0x01,0x00,0x00, +0x50,0x00,0x05,0x00,0x11,0x01,0x00,0x00,0x22,0x01,0x00,0x00, +0x1a,0x01,0x00,0x00,0x21,0x01,0x00,0x00,0x8e,0x00,0x05,0x00, +0x11,0x01,0x00,0x00,0x24,0x01,0x00,0x00,0x22,0x01,0x00,0x00, +0x10,0x01,0x00,0x00,0x51,0x00,0x05,0x00,0xc4,0x00,0x00,0x00, +0x2c,0x01,0x00,0x00,0x24,0x01,0x00,0x00,0x00,0x00,0x00,0x00, +0x73,0x00,0x04,0x00,0x03,0x01,0x00,0x00,0x2d,0x01,0x00,0x00, +0x2c,0x01,0x00,0x00,0x41,0x00,0x05,0x00,0x2e,0x01,0x00,0x00, +0x2f,0x01,0x00,0x00,0x29,0x01,0x00,0x00,0xf8,0x00,0x00,0x00, +0x3e,0x00,0x03,0x00,0x2f,0x01,0x00,0x00,0x2d,0x01,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x31,0x01,0x00,0x00, +0xf8,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x51,0x00,0x05,0x00, +0xc4,0x00,0x00,0x00,0x33,0x01,0x00,0x00,0x24,0x01,0x00,0x00, +0x01,0x00,0x00,0x00,0x73,0x00,0x04,0x00,0x03,0x01,0x00,0x00, +0x34,0x01,0x00,0x00,0x33,0x01,0x00,0x00,0x41,0x00,0x05,0x00, +0x2e,0x01,0x00,0x00,0x35,0x01,0x00,0x00,0x29,0x01,0x00,0x00, +0x31,0x01,0x00,0x00,0x3e,0x00,0x03,0x00,0x35,0x01,0x00,0x00, +0x34,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x3c,0x01,0x00,0x00,0xf4,0x02,0x00,0x00,0x3a,0x01,0x00,0x00, +0xf9,0x00,0x02,0x00,0xdd,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, +0xdf,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0x3e,0x01,0x00,0x00, +0xf8,0x00,0x02,0x00,0x3e,0x01,0x00,0x00,0xf5,0x00,0x07,0x00, +0x06,0x00,0x00,0x00,0xf5,0x02,0x00,0x00,0x3e,0x00,0x00,0x00, +0xdf,0x00,0x00,0x00,0xa7,0x01,0x00,0x00,0x3f,0x01,0x00,0x00, +0xb0,0x00,0x05,0x00,0xc2,0x00,0x00,0x00,0x44,0x01,0x00,0x00, +0xf5,0x02,0x00,0x00,0xa7,0x00,0x00,0x00,0xf6,0x00,0x04,0x00, +0x40,0x01,0x00,0x00,0x3f,0x01,0x00,0x00,0x01,0x00,0x00,0x00, +0xfa,0x00,0x04,0x00,0x44,0x01,0x00,0x00,0x3f,0x01,0x00,0x00, +0x40,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0x3f,0x01,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x49,0x01,0x00,0x00, +0x7e,0x00,0x00,0x00,0xf5,0x02,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x4c,0x01,0x00,0x00,0x49,0x01,0x00,0x00, +0xab,0x00,0x00,0x00,0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x4d,0x01,0x00,0x00,0x4c,0x01,0x00,0x00,0x77,0x00,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x4e,0x01,0x00,0x00, +0xfc,0x02,0x00,0x00,0x4d,0x01,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x50,0x01,0x00,0x00,0x4e,0x01,0x00,0x00, +0x79,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x56,0x01,0x00,0x00,0x49,0x01,0x00,0x00,0x55,0x01,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x58,0x01,0x00,0x00, +0x79,0x00,0x00,0x00,0x77,0x00,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x59,0x01,0x00,0x00,0x56,0x01,0x00,0x00, +0x58,0x01,0x00,0x00,0x41,0x00,0x08,0x00,0x68,0x01,0x00,0x00, +0x69,0x01,0x00,0x00,0x66,0x01,0x00,0x00,0x34,0x00,0x00,0x00, +0x50,0x01,0x00,0x00,0x34,0x00,0x00,0x00,0x3e,0x00,0x00,0x00, +0x3d,0x00,0x04,0x00,0xc4,0x00,0x00,0x00,0x6a,0x01,0x00,0x00, +0x69,0x01,0x00,0x00,0x73,0x00,0x04,0x00,0x03,0x01,0x00,0x00, +0x6b,0x01,0x00,0x00,0x6a,0x01,0x00,0x00,0x41,0x00,0x05,0x00, +0x2e,0x01,0x00,0x00,0x6c,0x01,0x00,0x00,0x5e,0x01,0x00,0x00, +0x59,0x01,0x00,0x00,0x3e,0x00,0x03,0x00,0x6c,0x01,0x00,0x00, +0x6b,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x6e,0x01,0x00,0x00,0x59,0x01,0x00,0x00,0x39,0x00,0x00,0x00, +0x41,0x00,0x08,0x00,0x68,0x01,0x00,0x00,0x70,0x01,0x00,0x00, +0x66,0x01,0x00,0x00,0x34,0x00,0x00,0x00,0x50,0x01,0x00,0x00, +0x34,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0xc4,0x00,0x00,0x00,0x71,0x01,0x00,0x00,0x70,0x01,0x00,0x00, +0x73,0x00,0x04,0x00,0x03,0x01,0x00,0x00,0x72,0x01,0x00,0x00, +0x71,0x01,0x00,0x00,0x41,0x00,0x05,0x00,0x2e,0x01,0x00,0x00, +0x73,0x01,0x00,0x00,0x5e,0x01,0x00,0x00,0x6e,0x01,0x00,0x00, +0x3e,0x00,0x03,0x00,0x73,0x01,0x00,0x00,0x72,0x01,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x75,0x01,0x00,0x00, +0x59,0x01,0x00,0x00,0x0c,0x00,0x00,0x00,0x41,0x00,0x08,0x00, +0x68,0x01,0x00,0x00,0x77,0x01,0x00,0x00,0x66,0x01,0x00,0x00, +0x34,0x00,0x00,0x00,0x50,0x01,0x00,0x00,0x34,0x00,0x00,0x00, +0x0c,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0xc4,0x00,0x00,0x00, +0x78,0x01,0x00,0x00,0x77,0x01,0x00,0x00,0x73,0x00,0x04,0x00, +0x03,0x01,0x00,0x00,0x79,0x01,0x00,0x00,0x78,0x01,0x00,0x00, +0x41,0x00,0x05,0x00,0x2e,0x01,0x00,0x00,0x7a,0x01,0x00,0x00, +0x5e,0x01,0x00,0x00,0x75,0x01,0x00,0x00,0x3e,0x00,0x03,0x00, +0x7a,0x01,0x00,0x00,0x79,0x01,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x7d,0x01,0x00,0x00,0x59,0x01,0x00,0x00, +0x7c,0x01,0x00,0x00,0x41,0x00,0x08,0x00,0x68,0x01,0x00,0x00, +0x7f,0x01,0x00,0x00,0x66,0x01,0x00,0x00,0x34,0x00,0x00,0x00, +0x50,0x01,0x00,0x00,0x34,0x00,0x00,0x00,0x7c,0x01,0x00,0x00, +0x3d,0x00,0x04,0x00,0xc4,0x00,0x00,0x00,0x80,0x01,0x00,0x00, +0x7f,0x01,0x00,0x00,0x73,0x00,0x04,0x00,0x03,0x01,0x00,0x00, +0x81,0x01,0x00,0x00,0x80,0x01,0x00,0x00,0x41,0x00,0x05,0x00, +0x2e,0x01,0x00,0x00,0x82,0x01,0x00,0x00,0x5e,0x01,0x00,0x00, +0x7d,0x01,0x00,0x00,0x3e,0x00,0x03,0x00,0x82,0x01,0x00,0x00, +0x81,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x85,0x01,0x00,0x00,0x59,0x01,0x00,0x00,0x84,0x01,0x00,0x00, +0x41,0x00,0x08,0x00,0x68,0x01,0x00,0x00,0x87,0x01,0x00,0x00, +0x66,0x01,0x00,0x00,0x34,0x00,0x00,0x00,0x50,0x01,0x00,0x00, +0xd0,0x00,0x00,0x00,0x3e,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0xc4,0x00,0x00,0x00,0x88,0x01,0x00,0x00,0x87,0x01,0x00,0x00, +0x73,0x00,0x04,0x00,0x03,0x01,0x00,0x00,0x89,0x01,0x00,0x00, +0x88,0x01,0x00,0x00,0x41,0x00,0x05,0x00,0x2e,0x01,0x00,0x00, +0x8a,0x01,0x00,0x00,0x5e,0x01,0x00,0x00,0x85,0x01,0x00,0x00, +0x3e,0x00,0x03,0x00,0x8a,0x01,0x00,0x00,0x89,0x01,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x8d,0x01,0x00,0x00, +0x59,0x01,0x00,0x00,0x8c,0x01,0x00,0x00,0x41,0x00,0x08,0x00, +0x68,0x01,0x00,0x00,0x8f,0x01,0x00,0x00,0x66,0x01,0x00,0x00, +0x34,0x00,0x00,0x00,0x50,0x01,0x00,0x00,0xd0,0x00,0x00,0x00, +0x39,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0xc4,0x00,0x00,0x00, +0x90,0x01,0x00,0x00,0x8f,0x01,0x00,0x00,0x73,0x00,0x04,0x00, +0x03,0x01,0x00,0x00,0x91,0x01,0x00,0x00,0x90,0x01,0x00,0x00, +0x41,0x00,0x05,0x00,0x2e,0x01,0x00,0x00,0x92,0x01,0x00,0x00, +0x5e,0x01,0x00,0x00,0x8d,0x01,0x00,0x00,0x3e,0x00,0x03,0x00, +0x92,0x01,0x00,0x00,0x91,0x01,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x95,0x01,0x00,0x00,0x59,0x01,0x00,0x00, +0x94,0x01,0x00,0x00,0x41,0x00,0x08,0x00,0x68,0x01,0x00,0x00, +0x97,0x01,0x00,0x00,0x66,0x01,0x00,0x00,0x34,0x00,0x00,0x00, +0x50,0x01,0x00,0x00,0xd0,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, +0x3d,0x00,0x04,0x00,0xc4,0x00,0x00,0x00,0x98,0x01,0x00,0x00, +0x97,0x01,0x00,0x00,0x73,0x00,0x04,0x00,0x03,0x01,0x00,0x00, +0x99,0x01,0x00,0x00,0x98,0x01,0x00,0x00,0x41,0x00,0x05,0x00, +0x2e,0x01,0x00,0x00,0x9a,0x01,0x00,0x00,0x5e,0x01,0x00,0x00, +0x95,0x01,0x00,0x00,0x3e,0x00,0x03,0x00,0x9a,0x01,0x00,0x00, +0x99,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x9d,0x01,0x00,0x00,0x59,0x01,0x00,0x00,0x9c,0x01,0x00,0x00, +0x41,0x00,0x08,0x00,0x68,0x01,0x00,0x00,0x9f,0x01,0x00,0x00, +0x66,0x01,0x00,0x00,0x34,0x00,0x00,0x00,0x50,0x01,0x00,0x00, +0xd0,0x00,0x00,0x00,0x7c,0x01,0x00,0x00,0x3d,0x00,0x04,0x00, +0xc4,0x00,0x00,0x00,0xa0,0x01,0x00,0x00,0x9f,0x01,0x00,0x00, +0x73,0x00,0x04,0x00,0x03,0x01,0x00,0x00,0xa1,0x01,0x00,0x00, +0xa0,0x01,0x00,0x00,0x41,0x00,0x05,0x00,0x2e,0x01,0x00,0x00, +0xa2,0x01,0x00,0x00,0x5e,0x01,0x00,0x00,0x9d,0x01,0x00,0x00, +0x3e,0x00,0x03,0x00,0xa2,0x01,0x00,0x00,0xa1,0x01,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xa7,0x01,0x00,0x00, +0xf5,0x02,0x00,0x00,0xa5,0x01,0x00,0x00,0xf9,0x00,0x02,0x00, +0x3e,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0x40,0x01,0x00,0x00, +0xe0,0x00,0x04,0x00,0x0c,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, +0xa8,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xab,0x01,0x00,0x00,0xf8,0x02,0x00,0x00,0xa9,0x01,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xae,0x01,0x00,0x00, +0xfc,0x02,0x00,0x00,0xac,0x01,0x00,0x00,0xf9,0x00,0x02,0x00, +0xb0,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0xb0,0x01,0x00,0x00, +0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0xfe,0x02,0x00,0x00, +0x3e,0x00,0x00,0x00,0x40,0x01,0x00,0x00,0x5a,0x02,0x00,0x00, +0xb3,0x01,0x00,0x00,0xb0,0x00,0x05,0x00,0xc2,0x00,0x00,0x00, +0xb6,0x01,0x00,0x00,0xfe,0x02,0x00,0x00,0x6c,0x00,0x00,0x00, +0xf6,0x00,0x04,0x00,0xb2,0x01,0x00,0x00,0xb3,0x01,0x00,0x00, +0x00,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0xb6,0x01,0x00,0x00, +0xb1,0x01,0x00,0x00,0xb2,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, +0xb1,0x01,0x00,0x00,0xf9,0x00,0x02,0x00,0xb8,0x01,0x00,0x00, +0xf8,0x00,0x02,0x00,0xb8,0x01,0x00,0x00,0xf5,0x00,0x07,0x00, +0x06,0x00,0x00,0x00,0x02,0x03,0x00,0x00,0x3e,0x00,0x00,0x00, +0xb1,0x01,0x00,0x00,0xe4,0x01,0x00,0x00,0xbb,0x01,0x00,0x00, +0xb0,0x00,0x05,0x00,0xc2,0x00,0x00,0x00,0xbe,0x01,0x00,0x00, +0x02,0x03,0x00,0x00,0x60,0x00,0x00,0x00,0xf6,0x00,0x04,0x00, +0xba,0x01,0x00,0x00,0xbb,0x01,0x00,0x00,0x01,0x00,0x00,0x00, +0xfa,0x00,0x04,0x00,0xbe,0x01,0x00,0x00,0xb9,0x01,0x00,0x00, +0xba,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0xb9,0x01,0x00,0x00, +0xf9,0x00,0x02,0x00,0xc0,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, +0xc0,0x01,0x00,0x00,0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00, +0x14,0x03,0x00,0x00,0x3e,0x00,0x00,0x00,0xb9,0x01,0x00,0x00, +0xe2,0x01,0x00,0x00,0xc1,0x01,0x00,0x00,0xb0,0x00,0x05,0x00, +0xc2,0x00,0x00,0x00,0xc6,0x01,0x00,0x00,0x14,0x03,0x00,0x00, +0x62,0x00,0x00,0x00,0xf6,0x00,0x04,0x00,0xc2,0x01,0x00,0x00, +0xc1,0x01,0x00,0x00,0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, +0xc6,0x01,0x00,0x00,0xc1,0x01,0x00,0x00,0xc2,0x01,0x00,0x00, +0xf8,0x00,0x02,0x00,0xc1,0x01,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xcc,0x01,0x00,0x00,0x02,0x03,0x00,0x00, +0x62,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xce,0x01,0x00,0x00,0xcc,0x01,0x00,0x00,0x14,0x03,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xd0,0x01,0x00,0x00, +0x55,0x00,0x00,0x00,0x53,0x00,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xd2,0x01,0x00,0x00,0x02,0x03,0x00,0x00, +0x61,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xd3,0x01,0x00,0x00,0xd0,0x01,0x00,0x00,0xd2,0x01,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xd5,0x01,0x00,0x00, +0x64,0x00,0x00,0x00,0x62,0x00,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xd6,0x01,0x00,0x00,0xd3,0x01,0x00,0x00, +0xd5,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xd8,0x01,0x00,0x00,0xd6,0x01,0x00,0x00,0x14,0x03,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xda,0x01,0x00,0x00, +0xd8,0x01,0x00,0x00,0xd9,0x01,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xdc,0x01,0x00,0x00,0xda,0x01,0x00,0x00, +0xfe,0x02,0x00,0x00,0x41,0x00,0x05,0x00,0x2e,0x01,0x00,0x00, +0xdd,0x01,0x00,0x00,0x29,0x01,0x00,0x00,0xdc,0x01,0x00,0x00, +0x3d,0x00,0x04,0x00,0x03,0x01,0x00,0x00,0xde,0x01,0x00,0x00, +0xdd,0x01,0x00,0x00,0x41,0x00,0x05,0x00,0xdf,0x01,0x00,0x00, +0xe0,0x01,0x00,0x00,0xca,0x01,0x00,0x00,0xce,0x01,0x00,0x00, +0x3e,0x00,0x03,0x00,0xe0,0x01,0x00,0x00,0xde,0x01,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xe2,0x01,0x00,0x00, +0x14,0x03,0x00,0x00,0xd0,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, +0xc0,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0xc2,0x01,0x00,0x00, +0xf9,0x00,0x02,0x00,0xbb,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, +0xbb,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xe4,0x01,0x00,0x00,0x02,0x03,0x00,0x00,0xd0,0x00,0x00,0x00, +0xf9,0x00,0x02,0x00,0xb8,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, +0xba,0x01,0x00,0x00,0xf9,0x00,0x02,0x00,0xe6,0x01,0x00,0x00, +0xf8,0x00,0x02,0x00,0xe6,0x01,0x00,0x00,0xf5,0x00,0x07,0x00, +0x06,0x00,0x00,0x00,0x03,0x03,0x00,0x00,0x3e,0x00,0x00,0x00, +0xba,0x01,0x00,0x00,0x12,0x02,0x00,0x00,0xe9,0x01,0x00,0x00, +0xb0,0x00,0x05,0x00,0xc2,0x00,0x00,0x00,0xec,0x01,0x00,0x00, +0x03,0x03,0x00,0x00,0xbf,0x00,0x00,0x00,0xf6,0x00,0x04,0x00, +0xe8,0x01,0x00,0x00,0xe9,0x01,0x00,0x00,0x01,0x00,0x00,0x00, +0xfa,0x00,0x04,0x00,0xec,0x01,0x00,0x00,0xe7,0x01,0x00,0x00, +0xe8,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0xe7,0x01,0x00,0x00, +0xf9,0x00,0x02,0x00,0xee,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, +0xee,0x01,0x00,0x00,0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00, +0x11,0x03,0x00,0x00,0x3e,0x00,0x00,0x00,0xe7,0x01,0x00,0x00, +0x10,0x02,0x00,0x00,0xef,0x01,0x00,0x00,0xb0,0x00,0x05,0x00, +0xc2,0x00,0x00,0x00,0xf4,0x01,0x00,0x00,0x11,0x03,0x00,0x00, +0xbc,0x00,0x00,0x00,0xf6,0x00,0x04,0x00,0xf0,0x01,0x00,0x00, +0xef,0x01,0x00,0x00,0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, +0xf4,0x01,0x00,0x00,0xef,0x01,0x00,0x00,0xf0,0x01,0x00,0x00, +0xf8,0x00,0x02,0x00,0xef,0x01,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xfa,0x01,0x00,0x00,0x03,0x03,0x00,0x00, +0xbc,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xfc,0x01,0x00,0x00,0xfa,0x01,0x00,0x00,0x11,0x03,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xfe,0x01,0x00,0x00, +0x59,0x00,0x00,0x00,0xb9,0x00,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x01,0x02,0x00,0x00,0x03,0x03,0x00,0x00, +0x00,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x02,0x02,0x00,0x00,0xfe,0x01,0x00,0x00,0x01,0x02,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x04,0x02,0x00,0x00, +0x68,0x00,0x00,0x00,0xbc,0x00,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x05,0x02,0x00,0x00,0x02,0x02,0x00,0x00, +0x04,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x07,0x02,0x00,0x00,0x05,0x02,0x00,0x00,0x11,0x03,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x09,0x02,0x00,0x00, +0x07,0x02,0x00,0x00,0x08,0x02,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x0b,0x02,0x00,0x00,0x09,0x02,0x00,0x00, +0xfe,0x02,0x00,0x00,0x41,0x00,0x05,0x00,0x2e,0x01,0x00,0x00, +0x0c,0x02,0x00,0x00,0x5e,0x01,0x00,0x00,0x0b,0x02,0x00,0x00, +0x3d,0x00,0x04,0x00,0x03,0x01,0x00,0x00,0x0d,0x02,0x00,0x00, +0x0c,0x02,0x00,0x00,0x41,0x00,0x05,0x00,0xdf,0x01,0x00,0x00, +0x0e,0x02,0x00,0x00,0xf8,0x01,0x00,0x00,0xfc,0x01,0x00,0x00, +0x3e,0x00,0x03,0x00,0x0e,0x02,0x00,0x00,0x0d,0x02,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x10,0x02,0x00,0x00, +0x11,0x03,0x00,0x00,0xd0,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, +0xee,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0xf0,0x01,0x00,0x00, +0xf9,0x00,0x02,0x00,0xe9,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, +0xe9,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x12,0x02,0x00,0x00,0x03,0x03,0x00,0x00,0xd0,0x00,0x00,0x00, +0xf9,0x00,0x02,0x00,0xe6,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, +0xe8,0x01,0x00,0x00,0xf9,0x00,0x02,0x00,0x14,0x02,0x00,0x00, +0xf8,0x00,0x02,0x00,0x14,0x02,0x00,0x00,0xf5,0x00,0x07,0x00, +0x06,0x00,0x00,0x00,0x04,0x03,0x00,0x00,0x3e,0x00,0x00,0x00, +0xe8,0x01,0x00,0x00,0x58,0x02,0x00,0x00,0x17,0x02,0x00,0x00, +0xb0,0x00,0x05,0x00,0xc2,0x00,0x00,0x00,0x1a,0x02,0x00,0x00, +0x04,0x03,0x00,0x00,0xbf,0x00,0x00,0x00,0xf6,0x00,0x04,0x00, +0x16,0x02,0x00,0x00,0x17,0x02,0x00,0x00,0x01,0x00,0x00,0x00, +0xfa,0x00,0x04,0x00,0x1a,0x02,0x00,0x00,0x15,0x02,0x00,0x00, +0x16,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x15,0x02,0x00,0x00, +0xf9,0x00,0x02,0x00,0x1c,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, +0x1c,0x02,0x00,0x00,0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00, +0x08,0x03,0x00,0x00,0x3e,0x00,0x00,0x00,0x15,0x02,0x00,0x00, +0x56,0x02,0x00,0x00,0x1f,0x02,0x00,0x00,0xb0,0x00,0x05,0x00, +0xc2,0x00,0x00,0x00,0x22,0x02,0x00,0x00,0x08,0x03,0x00,0x00, +0x60,0x00,0x00,0x00,0xf6,0x00,0x04,0x00,0x1e,0x02,0x00,0x00, +0x1f,0x02,0x00,0x00,0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, +0x22,0x02,0x00,0x00,0x1d,0x02,0x00,0x00,0x1e,0x02,0x00,0x00, +0xf8,0x00,0x02,0x00,0x1d,0x02,0x00,0x00,0xf9,0x00,0x02,0x00, +0x24,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x24,0x02,0x00,0x00, +0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0x0a,0x03,0x00,0x00, +0x3e,0x00,0x00,0x00,0x1d,0x02,0x00,0x00,0x54,0x02,0x00,0x00, +0x27,0x02,0x00,0x00,0xb0,0x00,0x05,0x00,0xc2,0x00,0x00,0x00, +0x2a,0x02,0x00,0x00,0x0a,0x03,0x00,0x00,0xbc,0x00,0x00,0x00, +0xf6,0x00,0x04,0x00,0x26,0x02,0x00,0x00,0x27,0x02,0x00,0x00, +0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0x2a,0x02,0x00,0x00, +0x25,0x02,0x00,0x00,0x26,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, +0x25,0x02,0x00,0x00,0xf9,0x00,0x02,0x00,0x2c,0x02,0x00,0x00, +0xf8,0x00,0x02,0x00,0x2c,0x02,0x00,0x00,0xf5,0x00,0x07,0x00, +0x06,0x00,0x00,0x00,0x0c,0x03,0x00,0x00,0x3e,0x00,0x00,0x00, +0x25,0x02,0x00,0x00,0x52,0x02,0x00,0x00,0x2d,0x02,0x00,0x00, +0xb0,0x00,0x05,0x00,0xc2,0x00,0x00,0x00,0x32,0x02,0x00,0x00, +0x0c,0x03,0x00,0x00,0x62,0x00,0x00,0x00,0xf6,0x00,0x04,0x00, +0x2e,0x02,0x00,0x00,0x2d,0x02,0x00,0x00,0x01,0x00,0x00,0x00, +0xfa,0x00,0x04,0x00,0x32,0x02,0x00,0x00,0x2d,0x02,0x00,0x00, +0x2e,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x2d,0x02,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x34,0x02,0x00,0x00, +0x04,0x03,0x00,0x00,0xbc,0x00,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x36,0x02,0x00,0x00,0x34,0x02,0x00,0x00, +0x0a,0x03,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x38,0x02,0x00,0x00,0x36,0x02,0x00,0x00,0x37,0x02,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x3a,0x02,0x00,0x00, +0x08,0x03,0x00,0x00,0x62,0x00,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x3b,0x02,0x00,0x00,0x38,0x02,0x00,0x00, +0x3a,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x3d,0x02,0x00,0x00,0x3b,0x02,0x00,0x00,0x0c,0x03,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x41,0x02,0x00,0x00, +0x3a,0x02,0x00,0x00,0x0c,0x03,0x00,0x00,0x41,0x00,0x05,0x00, +0xdf,0x01,0x00,0x00,0x42,0x02,0x00,0x00,0xca,0x01,0x00,0x00, +0x41,0x02,0x00,0x00,0x3d,0x00,0x04,0x00,0x03,0x01,0x00,0x00, +0x43,0x02,0x00,0x00,0x42,0x02,0x00,0x00,0x73,0x00,0x04,0x00, +0xc4,0x00,0x00,0x00,0x44,0x02,0x00,0x00,0x43,0x02,0x00,0x00, +0x41,0x00,0x05,0x00,0xdf,0x01,0x00,0x00,0x49,0x02,0x00,0x00, +0xf8,0x01,0x00,0x00,0x36,0x02,0x00,0x00,0x3d,0x00,0x04,0x00, +0x03,0x01,0x00,0x00,0x4a,0x02,0x00,0x00,0x49,0x02,0x00,0x00, +0x73,0x00,0x04,0x00,0xc4,0x00,0x00,0x00,0x4b,0x02,0x00,0x00, +0x4a,0x02,0x00,0x00,0x41,0x00,0x05,0x00,0xcd,0x00,0x00,0x00, +0x4d,0x02,0x00,0x00,0xca,0x00,0x00,0x00,0x3d,0x02,0x00,0x00, +0x3d,0x00,0x04,0x00,0xc4,0x00,0x00,0x00,0x4e,0x02,0x00,0x00, +0x4d,0x02,0x00,0x00,0x0c,0x00,0x08,0x00,0xc4,0x00,0x00,0x00, +0x4f,0x02,0x00,0x00,0x01,0x00,0x00,0x00,0x32,0x00,0x00,0x00, +0x44,0x02,0x00,0x00,0x4b,0x02,0x00,0x00,0x4e,0x02,0x00,0x00, +0x3e,0x00,0x03,0x00,0x4d,0x02,0x00,0x00,0x4f,0x02,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x52,0x02,0x00,0x00, +0x0c,0x03,0x00,0x00,0xd0,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, +0x2c,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x2e,0x02,0x00,0x00, +0xf9,0x00,0x02,0x00,0x27,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, +0x27,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x54,0x02,0x00,0x00,0x0a,0x03,0x00,0x00,0xd0,0x00,0x00,0x00, +0xf9,0x00,0x02,0x00,0x24,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, +0x26,0x02,0x00,0x00,0xf9,0x00,0x02,0x00,0x1f,0x02,0x00,0x00, +0xf8,0x00,0x02,0x00,0x1f,0x02,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x56,0x02,0x00,0x00,0x08,0x03,0x00,0x00, +0xd0,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0x1c,0x02,0x00,0x00, +0xf8,0x00,0x02,0x00,0x1e,0x02,0x00,0x00,0xf9,0x00,0x02,0x00, +0x17,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x17,0x02,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x58,0x02,0x00,0x00, +0x04,0x03,0x00,0x00,0xd0,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, +0x14,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x16,0x02,0x00,0x00, +0xf9,0x00,0x02,0x00,0xb3,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, +0xb3,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x5a,0x02,0x00,0x00,0xfe,0x02,0x00,0x00,0xd0,0x00,0x00,0x00, +0xf9,0x00,0x02,0x00,0xb0,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, +0xb2,0x01,0x00,0x00,0xe0,0x00,0x04,0x00,0x0c,0x00,0x00,0x00, +0x0c,0x00,0x00,0x00,0xa8,0x01,0x00,0x00,0xf9,0x00,0x02,0x00, +0xd7,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0xd7,0x00,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x5c,0x02,0x00,0x00, +0xe4,0x02,0x00,0x00,0x6c,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, +0xd4,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0xd6,0x00,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x61,0x02,0x00,0x00, +0x55,0x00,0x00,0x00,0x53,0x00,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x62,0x02,0x00,0x00,0x96,0x00,0x00,0x00, +0x61,0x02,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x67,0x02,0x00,0x00,0x59,0x00,0x00,0x00,0xb9,0x00,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x68,0x02,0x00,0x00, +0xa8,0x00,0x00,0x00,0x67,0x02,0x00,0x00,0x41,0x00,0x05,0x00, +0x15,0x00,0x00,0x00,0x6d,0x02,0x00,0x00,0x12,0x00,0x00,0x00, +0x6c,0x02,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x6e,0x02,0x00,0x00,0x6d,0x02,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x6f,0x02,0x00,0x00,0x0f,0x00,0x00,0x00, +0x6e,0x02,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x73,0x02,0x00,0x00,0x47,0x00,0x00,0x00,0x6e,0x02,0x00,0x00, +0x41,0x00,0x05,0x00,0x0d,0x00,0x00,0x00,0x75,0x02,0x00,0x00, +0x74,0x02,0x00,0x00,0x0c,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x76,0x02,0x00,0x00,0x75,0x02,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x77,0x02,0x00,0x00, +0x73,0x02,0x00,0x00,0x76,0x02,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x78,0x02,0x00,0x00,0x6f,0x02,0x00,0x00, +0x77,0x02,0x00,0x00,0xf9,0x00,0x02,0x00,0x7a,0x02,0x00,0x00, +0xf8,0x00,0x02,0x00,0x7a,0x02,0x00,0x00,0xf5,0x00,0x07,0x00, +0x06,0x00,0x00,0x00,0xe5,0x02,0x00,0x00,0x3e,0x00,0x00,0x00, +0xd6,0x00,0x00,0x00,0xe0,0x02,0x00,0x00,0x7d,0x02,0x00,0x00, +0xb0,0x00,0x05,0x00,0xc2,0x00,0x00,0x00,0x80,0x02,0x00,0x00, +0xe5,0x02,0x00,0x00,0xbf,0x00,0x00,0x00,0xf6,0x00,0x04,0x00, +0x7c,0x02,0x00,0x00,0x7d,0x02,0x00,0x00,0x01,0x00,0x00,0x00, +0xfa,0x00,0x04,0x00,0x80,0x02,0x00,0x00,0x7b,0x02,0x00,0x00, +0x7c,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x7b,0x02,0x00,0x00, +0xf9,0x00,0x02,0x00,0x82,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, +0x82,0x02,0x00,0x00,0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00, +0xe6,0x02,0x00,0x00,0x3e,0x00,0x00,0x00,0x7b,0x02,0x00,0x00, +0xde,0x02,0x00,0x00,0x85,0x02,0x00,0x00,0xb0,0x00,0x05,0x00, +0xc2,0x00,0x00,0x00,0x88,0x02,0x00,0x00,0xe6,0x02,0x00,0x00, +0x60,0x00,0x00,0x00,0xf6,0x00,0x04,0x00,0x84,0x02,0x00,0x00, +0x85,0x02,0x00,0x00,0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, +0x88,0x02,0x00,0x00,0x83,0x02,0x00,0x00,0x84,0x02,0x00,0x00, +0xf8,0x00,0x02,0x00,0x83,0x02,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x8c,0x02,0x00,0x00,0xe6,0x02,0x00,0x00, +0x61,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x8d,0x02,0x00,0x00,0x62,0x02,0x00,0x00,0x8c,0x02,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x8f,0x02,0x00,0x00, +0x64,0x00,0x00,0x00,0x62,0x00,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x90,0x02,0x00,0x00,0x8d,0x02,0x00,0x00, +0x8f,0x02,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x94,0x02,0x00,0x00,0xe5,0x02,0x00,0x00,0x00,0x02,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x95,0x02,0x00,0x00, +0x68,0x02,0x00,0x00,0x94,0x02,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x97,0x02,0x00,0x00,0x68,0x00,0x00,0x00, +0xbc,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x98,0x02,0x00,0x00,0x95,0x02,0x00,0x00,0x97,0x02,0x00,0x00, +0xf9,0x00,0x02,0x00,0x9a,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, +0x9a,0x02,0x00,0x00,0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00, +0xe8,0x02,0x00,0x00,0x3e,0x00,0x00,0x00,0x83,0x02,0x00,0x00, +0xdc,0x02,0x00,0x00,0x9d,0x02,0x00,0x00,0xb0,0x00,0x05,0x00, +0xc2,0x00,0x00,0x00,0xa0,0x02,0x00,0x00,0xe8,0x02,0x00,0x00, +0xbc,0x00,0x00,0x00,0xf6,0x00,0x04,0x00,0x9c,0x02,0x00,0x00, +0x9d,0x02,0x00,0x00,0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, +0xa0,0x02,0x00,0x00,0x9b,0x02,0x00,0x00,0x9c,0x02,0x00,0x00, +0xf8,0x00,0x02,0x00,0x9b,0x02,0x00,0x00,0xf9,0x00,0x02,0x00, +0xa2,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0xa2,0x02,0x00,0x00, +0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0xea,0x02,0x00,0x00, +0x3e,0x00,0x00,0x00,0x9b,0x02,0x00,0x00,0xda,0x02,0x00,0x00, +0xa5,0x02,0x00,0x00,0xb0,0x00,0x05,0x00,0xc2,0x00,0x00,0x00, +0xa8,0x02,0x00,0x00,0xea,0x02,0x00,0x00,0x62,0x00,0x00,0x00, +0xf6,0x00,0x04,0x00,0xa4,0x02,0x00,0x00,0xa5,0x02,0x00,0x00, +0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0xa8,0x02,0x00,0x00, +0xa3,0x02,0x00,0x00,0xa4,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, +0xa3,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xab,0x02,0x00,0x00,0x90,0x02,0x00,0x00,0xea,0x02,0x00,0x00, +0xb0,0x00,0x05,0x00,0xc2,0x00,0x00,0x00,0xae,0x02,0x00,0x00, +0xab,0x02,0x00,0x00,0x36,0x00,0x00,0x00,0xf7,0x00,0x03,0x00, +0xb0,0x02,0x00,0x00,0x00,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, +0xae,0x02,0x00,0x00,0xaf,0x02,0x00,0x00,0xb0,0x02,0x00,0x00, +0xf8,0x00,0x02,0x00,0xaf,0x02,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xb3,0x02,0x00,0x00,0x98,0x02,0x00,0x00, +0xe8,0x02,0x00,0x00,0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00, +0xb4,0x02,0x00,0x00,0x12,0x00,0x00,0x00,0xd0,0x00,0x00,0x00, +0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0xb5,0x02,0x00,0x00, +0xb4,0x02,0x00,0x00,0xb0,0x00,0x05,0x00,0xc2,0x00,0x00,0x00, +0xb6,0x02,0x00,0x00,0xb3,0x02,0x00,0x00,0xb5,0x02,0x00,0x00, +0xf9,0x00,0x02,0x00,0xb0,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, +0xb0,0x02,0x00,0x00,0xf5,0x00,0x07,0x00,0xc2,0x00,0x00,0x00, +0xb7,0x02,0x00,0x00,0xae,0x02,0x00,0x00,0xa3,0x02,0x00,0x00, +0xb6,0x02,0x00,0x00,0xaf,0x02,0x00,0x00,0xf7,0x00,0x03,0x00, +0xb9,0x02,0x00,0x00,0x00,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, +0xb7,0x02,0x00,0x00,0xb8,0x02,0x00,0x00,0xb9,0x02,0x00,0x00, +0xf8,0x00,0x02,0x00,0xb8,0x02,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xc1,0x02,0x00,0x00,0x98,0x02,0x00,0x00, +0xe8,0x02,0x00,0x00,0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00, +0xc3,0x02,0x00,0x00,0x12,0x00,0x00,0x00,0xc2,0x02,0x00,0x00, +0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0xc4,0x02,0x00,0x00, +0xc3,0x02,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xc5,0x02,0x00,0x00,0xc1,0x02,0x00,0x00,0xc4,0x02,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xc6,0x02,0x00,0x00, +0x78,0x02,0x00,0x00,0xc5,0x02,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xc8,0x02,0x00,0x00,0xc6,0x02,0x00,0x00, +0x90,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xca,0x02,0x00,0x00,0xc8,0x02,0x00,0x00,0xea,0x02,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xcc,0x02,0x00,0x00, +0xe5,0x02,0x00,0x00,0xbc,0x00,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xce,0x02,0x00,0x00,0xcc,0x02,0x00,0x00, +0xe8,0x02,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xd0,0x02,0x00,0x00,0xce,0x02,0x00,0x00,0xcf,0x02,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xd2,0x02,0x00,0x00, +0xe6,0x02,0x00,0x00,0x62,0x00,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xd3,0x02,0x00,0x00,0xd0,0x02,0x00,0x00, +0xd2,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xd5,0x02,0x00,0x00,0xd3,0x02,0x00,0x00,0xea,0x02,0x00,0x00, +0x41,0x00,0x05,0x00,0xcd,0x00,0x00,0x00,0xd6,0x02,0x00,0x00, +0xca,0x00,0x00,0x00,0xd5,0x02,0x00,0x00,0x3d,0x00,0x04,0x00, +0xc4,0x00,0x00,0x00,0xd7,0x02,0x00,0x00,0xd6,0x02,0x00,0x00, +0x41,0x00,0x06,0x00,0x68,0x01,0x00,0x00,0xd8,0x02,0x00,0x00, +0xbd,0x02,0x00,0x00,0x34,0x00,0x00,0x00,0xca,0x02,0x00,0x00, +0x3e,0x00,0x03,0x00,0xd8,0x02,0x00,0x00,0xd7,0x02,0x00,0x00, +0xf9,0x00,0x02,0x00,0xb9,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, +0xb9,0x02,0x00,0x00,0xf9,0x00,0x02,0x00,0xa5,0x02,0x00,0x00, +0xf8,0x00,0x02,0x00,0xa5,0x02,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xda,0x02,0x00,0x00,0xea,0x02,0x00,0x00, +0xd0,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0xa2,0x02,0x00,0x00, +0xf8,0x00,0x02,0x00,0xa4,0x02,0x00,0x00,0xf9,0x00,0x02,0x00, +0x9d,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x9d,0x02,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xdc,0x02,0x00,0x00, +0xe8,0x02,0x00,0x00,0xd0,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, +0x9a,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x9c,0x02,0x00,0x00, +0xf9,0x00,0x02,0x00,0x85,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, +0x85,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xde,0x02,0x00,0x00,0xe6,0x02,0x00,0x00,0xd0,0x00,0x00,0x00, +0xf9,0x00,0x02,0x00,0x82,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, +0x84,0x02,0x00,0x00,0xf9,0x00,0x02,0x00,0x7d,0x02,0x00,0x00, +0xf8,0x00,0x02,0x00,0x7d,0x02,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xe0,0x02,0x00,0x00,0xe5,0x02,0x00,0x00, +0xd0,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0x7a,0x02,0x00,0x00, +0xf8,0x00,0x02,0x00,0x7c,0x02,0x00,0x00,0xfd,0x00,0x01,0x00, +0x38,0x00,0x01,0x00, +}; +const uint64_t matmul_q8_0_f32_aligned_len = 11200; + +unsigned char matmul_q8_0_f32_aligned_fp32_data[] = { +0x03,0x02,0x23,0x07,0x00,0x05,0x01,0x00,0x0b,0x00,0x0d,0x00, +0xeb,0x02,0x00,0x00,0x00,0x00,0x00,0x00,0x11,0x00,0x02,0x00, +0x01,0x00,0x00,0x00,0x11,0x00,0x02,0x00,0x51,0x11,0x00,0x00, +0x11,0x00,0x02,0x00,0x60,0x11,0x00,0x00,0x0b,0x00,0x06,0x00, +0x01,0x00,0x00,0x00,0x47,0x4c,0x53,0x4c,0x2e,0x73,0x74,0x64, +0x2e,0x34,0x35,0x30,0x00,0x00,0x00,0x00,0x0e,0x00,0x03,0x00, +0x00,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x0f,0x00,0x0f,0x00, +0x05,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x6d,0x61,0x69,0x6e, +0x00,0x00,0x00,0x00,0x0b,0x00,0x00,0x00,0x12,0x00,0x00,0x00, +0x3d,0x00,0x00,0x00,0x4c,0x00,0x00,0x00,0x0b,0x01,0x00,0x00, +0x29,0x01,0x00,0x00,0x5c,0x01,0x00,0x00,0x63,0x01,0x00,0x00, +0x4a,0x02,0x00,0x00,0x93,0x02,0x00,0x00,0x10,0x00,0x06,0x00, +0x04,0x00,0x00,0x00,0x11,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x47,0x00,0x04,0x00, +0x0b,0x00,0x00,0x00,0x0b,0x00,0x00,0x00,0x1c,0x00,0x00,0x00, +0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x48,0x00,0x05,0x00, +0x10,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x23,0x00,0x00,0x00, +0x04,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00, +0x02,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x08,0x00,0x00,0x00, +0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00,0x03,0x00,0x00,0x00, +0x23,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x48,0x00,0x05,0x00, +0x10,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x23,0x00,0x00,0x00, +0x10,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00, +0x05,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x14,0x00,0x00,0x00, +0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00,0x06,0x00,0x00,0x00, +0x23,0x00,0x00,0x00,0x18,0x00,0x00,0x00,0x48,0x00,0x05,0x00, +0x10,0x00,0x00,0x00,0x07,0x00,0x00,0x00,0x23,0x00,0x00,0x00, +0x1c,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00, +0x08,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x20,0x00,0x00,0x00, +0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00,0x09,0x00,0x00,0x00, +0x23,0x00,0x00,0x00,0x24,0x00,0x00,0x00,0x48,0x00,0x05,0x00, +0x10,0x00,0x00,0x00,0x0a,0x00,0x00,0x00,0x23,0x00,0x00,0x00, +0x28,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00, +0x0b,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x2c,0x00,0x00,0x00, +0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, +0x23,0x00,0x00,0x00,0x30,0x00,0x00,0x00,0x48,0x00,0x05,0x00, +0x10,0x00,0x00,0x00,0x0d,0x00,0x00,0x00,0x23,0x00,0x00,0x00, +0x34,0x00,0x00,0x00,0x47,0x00,0x03,0x00,0x10,0x00,0x00,0x00, +0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x37,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x47,0x00,0x04,0x00, +0x3d,0x00,0x00,0x00,0x0b,0x00,0x00,0x00,0x1a,0x00,0x00,0x00, +0x47,0x00,0x04,0x00,0x4c,0x00,0x00,0x00,0x0b,0x00,0x00,0x00, +0x1b,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x4f,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0x09,0x00,0x00,0x00,0x47,0x00,0x04,0x00, +0x53,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x04,0x00,0x00,0x00, +0x47,0x00,0x04,0x00,0x60,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0x06,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x62,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0x07,0x00,0x00,0x00,0x47,0x00,0x04,0x00, +0x6c,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x03,0x00,0x00,0x00, +0x47,0x00,0x04,0x00,0xa7,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0xb9,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0x05,0x00,0x00,0x00,0x47,0x00,0x04,0x00, +0xbc,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x08,0x00,0x00,0x00, +0x47,0x00,0x04,0x00,0x06,0x01,0x00,0x00,0x06,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x07,0x01,0x00,0x00, +0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0x48,0x00,0x05,0x00,0x07,0x01,0x00,0x00,0x01,0x00,0x00,0x00, +0x23,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00, +0x08,0x01,0x00,0x00,0x06,0x00,0x00,0x00,0x22,0x00,0x00,0x00, +0x48,0x00,0x04,0x00,0x09,0x01,0x00,0x00,0x00,0x00,0x00,0x00, +0x18,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x09,0x01,0x00,0x00, +0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0x47,0x00,0x03,0x00,0x09,0x01,0x00,0x00,0x02,0x00,0x00,0x00, +0x47,0x00,0x04,0x00,0x0b,0x01,0x00,0x00,0x22,0x00,0x00,0x00, +0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x0b,0x01,0x00,0x00, +0x21,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00, +0x34,0x01,0x00,0x00,0x01,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0x47,0x00,0x04,0x00,0x35,0x01,0x00,0x00,0x0b,0x00,0x00,0x00, +0x19,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x60,0x01,0x00,0x00, +0x06,0x00,0x00,0x00,0x10,0x00,0x00,0x00,0x48,0x00,0x04,0x00, +0x61,0x01,0x00,0x00,0x00,0x00,0x00,0x00,0x18,0x00,0x00,0x00, +0x48,0x00,0x05,0x00,0x61,0x01,0x00,0x00,0x00,0x00,0x00,0x00, +0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x03,0x00, +0x61,0x01,0x00,0x00,0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00, +0x63,0x01,0x00,0x00,0x22,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0x47,0x00,0x04,0x00,0x63,0x01,0x00,0x00,0x21,0x00,0x00,0x00, 0x01,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x4a,0x02,0x00,0x00, 0x0b,0x00,0x00,0x00,0x18,0x00,0x00,0x00,0x47,0x00,0x04,0x00, 0x90,0x02,0x00,0x00,0x06,0x00,0x00,0x00,0x04,0x00,0x00,0x00, @@ -33313,136 +36461,147 @@ unsigned char matmul_f32_aligned_l_fp32_data[] = { 0x06,0x00,0x00,0x00,0x67,0x00,0x00,0x00,0x86,0x00,0x00,0x00, 0x61,0x00,0x00,0x00,0x62,0x00,0x00,0x00,0x32,0x00,0x04,0x00, 0x06,0x00,0x00,0x00,0x6c,0x00,0x00,0x00,0x10,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x6d,0x00,0x00,0x00, +0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x6d,0x00,0x00,0x00, +0x86,0x00,0x00,0x00,0x6c,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, +0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x72,0x00,0x00,0x00, +0x86,0x00,0x00,0x00,0x6c,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x77,0x00,0x00,0x00, 0x04,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, -0x6e,0x00,0x00,0x00,0x86,0x00,0x00,0x00,0x6c,0x00,0x00,0x00, -0x6d,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, -0x73,0x00,0x00,0x00,0x86,0x00,0x00,0x00,0x6c,0x00,0x00,0x00, -0x6d,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00, -0x77,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x13,0x00,0x00,0x00,0x7c,0x00,0x00,0x00,0x02,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00,0x87,0x00,0x00,0x00, +0x78,0x00,0x00,0x00,0x86,0x00,0x00,0x00,0x6c,0x00,0x00,0x00, +0x77,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, +0x7d,0x00,0x00,0x00,0x86,0x00,0x00,0x00,0x6c,0x00,0x00,0x00, +0x77,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00, +0x81,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x13,0x00,0x00,0x00,0x86,0x00,0x00,0x00,0x02,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00,0x91,0x00,0x00,0x00, 0x0b,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00, -0x8d,0x00,0x00,0x00,0x03,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x13,0x00,0x00,0x00,0x98,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0x32,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x9d,0x00,0x00,0x00, +0x97,0x00,0x00,0x00,0x03,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x13,0x00,0x00,0x00,0xa2,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, +0x32,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0xa7,0x00,0x00,0x00, 0x40,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00, -0x9f,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0xae,0x00,0x00,0x00,0x84,0x00,0x00,0x00, +0xa9,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x34,0x00,0x06,0x00, +0x06,0x00,0x00,0x00,0xb8,0x00,0x00,0x00,0x84,0x00,0x00,0x00, 0x60,0x00,0x00,0x00,0x62,0x00,0x00,0x00,0x32,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0xaf,0x00,0x00,0x00,0x20,0x00,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0xb0,0x00,0x00,0x00, -0x84,0x00,0x00,0x00,0x53,0x00,0x00,0x00,0xaf,0x00,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0xb1,0x00,0x00,0x00, -0x84,0x00,0x00,0x00,0x4f,0x00,0x00,0x00,0x62,0x00,0x00,0x00, -0x32,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0xb2,0x00,0x00,0x00, -0x02,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, -0xb3,0x00,0x00,0x00,0x84,0x00,0x00,0x00,0xb1,0x00,0x00,0x00, -0xb2,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, -0xb4,0x00,0x00,0x00,0x84,0x00,0x00,0x00,0xb3,0x00,0x00,0x00, -0x60,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, -0xb5,0x00,0x00,0x00,0x86,0x00,0x00,0x00,0xb0,0x00,0x00,0x00, -0xb4,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, -0xb6,0x00,0x00,0x00,0x84,0x00,0x00,0x00,0xae,0x00,0x00,0x00, -0xb5,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, -0xb7,0x00,0x00,0x00,0x84,0x00,0x00,0x00,0xb6,0x00,0x00,0x00, -0xb2,0x00,0x00,0x00,0x14,0x00,0x02,0x00,0xb8,0x00,0x00,0x00, -0x16,0x00,0x03,0x00,0xba,0x00,0x00,0x00,0x20,0x00,0x00,0x00, +0x06,0x00,0x00,0x00,0xb9,0x00,0x00,0x00,0x20,0x00,0x00,0x00, +0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0xba,0x00,0x00,0x00, +0x84,0x00,0x00,0x00,0x53,0x00,0x00,0x00,0xb9,0x00,0x00,0x00, 0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0xbb,0x00,0x00,0x00, +0x84,0x00,0x00,0x00,0x4f,0x00,0x00,0x00,0x62,0x00,0x00,0x00, +0x32,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0xbc,0x00,0x00,0x00, +0x02,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, +0xbd,0x00,0x00,0x00,0x84,0x00,0x00,0x00,0xbb,0x00,0x00,0x00, +0xbc,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, +0xbe,0x00,0x00,0x00,0x84,0x00,0x00,0x00,0xbd,0x00,0x00,0x00, +0x60,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, +0xbf,0x00,0x00,0x00,0x86,0x00,0x00,0x00,0xba,0x00,0x00,0x00, +0xbe,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, +0xc0,0x00,0x00,0x00,0x84,0x00,0x00,0x00,0xb8,0x00,0x00,0x00, +0xbf,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, +0xc1,0x00,0x00,0x00,0x84,0x00,0x00,0x00,0xc0,0x00,0x00,0x00, +0xbc,0x00,0x00,0x00,0x14,0x00,0x02,0x00,0xc2,0x00,0x00,0x00, +0x16,0x00,0x03,0x00,0xc4,0x00,0x00,0x00,0x20,0x00,0x00,0x00, +0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0xc5,0x00,0x00,0x00, 0x84,0x00,0x00,0x00,0x60,0x00,0x00,0x00,0x62,0x00,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0xbc,0x00,0x00,0x00, -0x84,0x00,0x00,0x00,0xbb,0x00,0x00,0x00,0xb5,0x00,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0xbd,0x00,0x00,0x00, -0x84,0x00,0x00,0x00,0xbc,0x00,0x00,0x00,0xb2,0x00,0x00,0x00, -0x1c,0x00,0x04,0x00,0xbe,0x00,0x00,0x00,0xba,0x00,0x00,0x00, -0xbd,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0xbf,0x00,0x00,0x00, -0x07,0x00,0x00,0x00,0xbe,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0xba,0x00,0x00,0x00,0xc2,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x20,0x00,0x04,0x00,0xc3,0x00,0x00,0x00,0x07,0x00,0x00,0x00, -0xba,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00, -0xc6,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0xe6,0x00,0x00,0x00,0x80,0x00,0x00,0x00, -0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0xe7,0x00,0x00,0x00,0x84,0x00,0x00,0x00, -0x37,0x00,0x00,0x00,0xe6,0x00,0x00,0x00,0x1c,0x00,0x04,0x00, -0xe8,0x00,0x00,0x00,0xba,0x00,0x00,0x00,0xe7,0x00,0x00,0x00, -0x20,0x00,0x04,0x00,0xe9,0x00,0x00,0x00,0x04,0x00,0x00,0x00, -0xe8,0x00,0x00,0x00,0x3b,0x00,0x04,0x00,0xe9,0x00,0x00,0x00, -0xea,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0xee,0x00,0x00,0x00,0x80,0x00,0x00,0x00, -0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x17,0x00,0x04,0x00, -0xf4,0x00,0x00,0x00,0xba,0x00,0x00,0x00,0x04,0x00,0x00,0x00, -0x1d,0x00,0x03,0x00,0xf5,0x00,0x00,0x00,0xf4,0x00,0x00,0x00, -0x1e,0x00,0x03,0x00,0xf6,0x00,0x00,0x00,0xf5,0x00,0x00,0x00, -0x20,0x00,0x04,0x00,0xf7,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0xf6,0x00,0x00,0x00,0x3b,0x00,0x04,0x00,0xf7,0x00,0x00,0x00, -0xf8,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0xfa,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0xba,0x00,0x00,0x00, -0x20,0x00,0x04,0x00,0xfd,0x00,0x00,0x00,0x04,0x00,0x00,0x00, -0xba,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, -0x02,0x01,0x00,0x00,0x80,0x00,0x00,0x00,0x6c,0x00,0x00,0x00, -0x39,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, -0x0f,0x01,0x00,0x00,0x80,0x00,0x00,0x00,0x6c,0x00,0x00,0x00, -0x39,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, -0x1c,0x01,0x00,0x00,0x80,0x00,0x00,0x00,0x6c,0x00,0x00,0x00, -0x39,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x21,0x01,0x00,0x00,0x03,0x00,0x00,0x00,0x32,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x27,0x01,0x00,0x00,0x01,0x00,0x00,0x00, -0x33,0x00,0x06,0x00,0x09,0x00,0x00,0x00,0x28,0x01,0x00,0x00, -0x27,0x01,0x00,0x00,0x39,0x00,0x00,0x00,0x39,0x00,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x29,0x01,0x00,0x00, -0x51,0x00,0x00,0x00,0x28,0x01,0x00,0x00,0x00,0x00,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x2a,0x01,0x00,0x00, -0x84,0x00,0x00,0x00,0x29,0x01,0x00,0x00,0x6d,0x00,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x2b,0x01,0x00,0x00, -0x86,0x00,0x00,0x00,0x2a,0x01,0x00,0x00,0x6c,0x00,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x42,0x01,0x00,0x00, -0x80,0x00,0x00,0x00,0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x43,0x01,0x00,0x00, -0x84,0x00,0x00,0x00,0x9d,0x00,0x00,0x00,0x42,0x01,0x00,0x00, -0x1c,0x00,0x04,0x00,0x44,0x01,0x00,0x00,0xba,0x00,0x00,0x00, -0x43,0x01,0x00,0x00,0x20,0x00,0x04,0x00,0x45,0x01,0x00,0x00, -0x04,0x00,0x00,0x00,0x44,0x01,0x00,0x00,0x3b,0x00,0x04,0x00, -0x45,0x01,0x00,0x00,0x46,0x01,0x00,0x00,0x04,0x00,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x4a,0x01,0x00,0x00, -0x80,0x00,0x00,0x00,0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00, -0x1d,0x00,0x03,0x00,0x50,0x01,0x00,0x00,0xf4,0x00,0x00,0x00, -0x1e,0x00,0x03,0x00,0x51,0x01,0x00,0x00,0x50,0x01,0x00,0x00, -0x20,0x00,0x04,0x00,0x52,0x01,0x00,0x00,0x0c,0x00,0x00,0x00, -0x51,0x01,0x00,0x00,0x3b,0x00,0x04,0x00,0x52,0x01,0x00,0x00, -0x53,0x01,0x00,0x00,0x0c,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0x5b,0x01,0x00,0x00,0x80,0x00,0x00,0x00, -0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0x68,0x01,0x00,0x00,0x80,0x00,0x00,0x00, -0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0x75,0x01,0x00,0x00,0x80,0x00,0x00,0x00, +0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0xc6,0x00,0x00,0x00, +0x84,0x00,0x00,0x00,0xc5,0x00,0x00,0x00,0xbf,0x00,0x00,0x00, +0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0xc7,0x00,0x00,0x00, +0x84,0x00,0x00,0x00,0xc6,0x00,0x00,0x00,0xbc,0x00,0x00,0x00, +0x1c,0x00,0x04,0x00,0xc8,0x00,0x00,0x00,0xc4,0x00,0x00,0x00, +0xc7,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0xc9,0x00,0x00,0x00, +0x07,0x00,0x00,0x00,0xc8,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0xc4,0x00,0x00,0x00,0xcc,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0x20,0x00,0x04,0x00,0xcd,0x00,0x00,0x00,0x07,0x00,0x00,0x00, +0xc4,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00, +0xd0,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x34,0x00,0x06,0x00, +0x06,0x00,0x00,0x00,0xf4,0x00,0x00,0x00,0x80,0x00,0x00,0x00, 0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0xfb,0x00,0x00,0x00,0x10,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0xff,0x00,0x00,0x00, +0x0f,0x00,0x00,0x00,0x16,0x00,0x03,0x00,0x03,0x01,0x00,0x00, +0x10,0x00,0x00,0x00,0x15,0x00,0x04,0x00,0x04,0x01,0x00,0x00, +0x08,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x05,0x01,0x00,0x00,0x20,0x00,0x00,0x00, +0x1c,0x00,0x04,0x00,0x06,0x01,0x00,0x00,0x04,0x01,0x00,0x00, +0x05,0x01,0x00,0x00,0x1e,0x00,0x04,0x00,0x07,0x01,0x00,0x00, +0x03,0x01,0x00,0x00,0x06,0x01,0x00,0x00,0x1d,0x00,0x03,0x00, +0x08,0x01,0x00,0x00,0x07,0x01,0x00,0x00,0x1e,0x00,0x03,0x00, +0x09,0x01,0x00,0x00,0x08,0x01,0x00,0x00,0x20,0x00,0x04,0x00, +0x0a,0x01,0x00,0x00,0x0c,0x00,0x00,0x00,0x09,0x01,0x00,0x00, +0x3b,0x00,0x04,0x00,0x0a,0x01,0x00,0x00,0x0b,0x01,0x00,0x00, +0x0c,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x0d,0x01,0x00,0x00, +0x0c,0x00,0x00,0x00,0x03,0x01,0x00,0x00,0x17,0x00,0x04,0x00, +0x11,0x01,0x00,0x00,0xc4,0x00,0x00,0x00,0x02,0x00,0x00,0x00, +0x20,0x00,0x04,0x00,0x16,0x01,0x00,0x00,0x0c,0x00,0x00,0x00, +0x04,0x01,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, +0x25,0x01,0x00,0x00,0x80,0x00,0x00,0x00,0x6c,0x00,0x00,0x00, +0x39,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, +0x26,0x01,0x00,0x00,0x84,0x00,0x00,0x00,0x37,0x00,0x00,0x00, +0x25,0x01,0x00,0x00,0x1c,0x00,0x04,0x00,0x27,0x01,0x00,0x00, +0xc4,0x00,0x00,0x00,0x26,0x01,0x00,0x00,0x20,0x00,0x04,0x00, +0x28,0x01,0x00,0x00,0x04,0x00,0x00,0x00,0x27,0x01,0x00,0x00, +0x3b,0x00,0x04,0x00,0x28,0x01,0x00,0x00,0x29,0x01,0x00,0x00, +0x04,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x2d,0x01,0x00,0x00, +0x04,0x00,0x00,0x00,0xc4,0x00,0x00,0x00,0x32,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x34,0x01,0x00,0x00,0x01,0x00,0x00,0x00, +0x33,0x00,0x06,0x00,0x09,0x00,0x00,0x00,0x35,0x01,0x00,0x00, +0x34,0x01,0x00,0x00,0x39,0x00,0x00,0x00,0x39,0x00,0x00,0x00, +0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x36,0x01,0x00,0x00, +0x51,0x00,0x00,0x00,0x35,0x01,0x00,0x00,0x00,0x00,0x00,0x00, +0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x37,0x01,0x00,0x00, +0x84,0x00,0x00,0x00,0x36,0x01,0x00,0x00,0x0c,0x00,0x00,0x00, +0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x38,0x01,0x00,0x00, +0x86,0x00,0x00,0x00,0x37,0x01,0x00,0x00,0x6c,0x00,0x00,0x00, +0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x53,0x01,0x00,0x00, +0x80,0x00,0x00,0x00,0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00, +0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x58,0x01,0x00,0x00, +0x80,0x00,0x00,0x00,0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00, +0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x59,0x01,0x00,0x00, +0x84,0x00,0x00,0x00,0xa7,0x00,0x00,0x00,0x58,0x01,0x00,0x00, +0x1c,0x00,0x04,0x00,0x5a,0x01,0x00,0x00,0xc4,0x00,0x00,0x00, +0x59,0x01,0x00,0x00,0x20,0x00,0x04,0x00,0x5b,0x01,0x00,0x00, +0x04,0x00,0x00,0x00,0x5a,0x01,0x00,0x00,0x3b,0x00,0x04,0x00, +0x5b,0x01,0x00,0x00,0x5c,0x01,0x00,0x00,0x04,0x00,0x00,0x00, +0x17,0x00,0x04,0x00,0x5f,0x01,0x00,0x00,0xc4,0x00,0x00,0x00, +0x04,0x00,0x00,0x00,0x1d,0x00,0x03,0x00,0x60,0x01,0x00,0x00, +0x5f,0x01,0x00,0x00,0x1e,0x00,0x03,0x00,0x61,0x01,0x00,0x00, +0x60,0x01,0x00,0x00,0x20,0x00,0x04,0x00,0x62,0x01,0x00,0x00, +0x0c,0x00,0x00,0x00,0x61,0x01,0x00,0x00,0x3b,0x00,0x04,0x00, +0x62,0x01,0x00,0x00,0x63,0x01,0x00,0x00,0x0c,0x00,0x00,0x00, +0x20,0x00,0x04,0x00,0x65,0x01,0x00,0x00,0x0c,0x00,0x00,0x00, +0xc4,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x76,0x01,0x00,0x00,0x03,0x00,0x00,0x00,0x34,0x00,0x06,0x00, +0x06,0x00,0x00,0x00,0x7c,0x01,0x00,0x00,0x51,0x00,0x00,0x00, +0x35,0x01,0x00,0x00,0x00,0x00,0x00,0x00,0x34,0x00,0x06,0x00, +0x06,0x00,0x00,0x00,0x7d,0x01,0x00,0x00,0x84,0x00,0x00,0x00, +0x7c,0x01,0x00,0x00,0x77,0x00,0x00,0x00,0x34,0x00,0x06,0x00, +0x06,0x00,0x00,0x00,0x7e,0x01,0x00,0x00,0x86,0x00,0x00,0x00, +0x7d,0x01,0x00,0x00,0x6c,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, 0x06,0x00,0x00,0x00,0x81,0x01,0x00,0x00,0x08,0x01,0x00,0x00, 0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x82,0x01,0x00,0x00, -0x86,0x00,0x00,0x00,0x6c,0x00,0x00,0x00,0x6d,0x00,0x00,0x00, +0x86,0x00,0x00,0x00,0x6c,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, 0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x85,0x01,0x00,0x00, -0x86,0x00,0x00,0x00,0x6c,0x00,0x00,0x00,0x6d,0x00,0x00,0x00, +0x86,0x00,0x00,0x00,0x6c,0x00,0x00,0x00,0x77,0x00,0x00,0x00, 0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0xa0,0x01,0x00,0x00, 0x84,0x00,0x00,0x00,0x60,0x00,0x00,0x00,0x62,0x00,0x00,0x00, -0x1c,0x00,0x04,0x00,0xa1,0x01,0x00,0x00,0xba,0x00,0x00,0x00, +0x1c,0x00,0x04,0x00,0xa1,0x01,0x00,0x00,0xc4,0x00,0x00,0x00, 0xa0,0x01,0x00,0x00,0x20,0x00,0x04,0x00,0xa2,0x01,0x00,0x00, 0x07,0x00,0x00,0x00,0xa1,0x01,0x00,0x00,0x34,0x00,0x06,0x00, 0x06,0x00,0x00,0x00,0xb2,0x01,0x00,0x00,0x80,0x00,0x00,0x00, 0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x34,0x00,0x06,0x00, 0x06,0x00,0x00,0x00,0xcd,0x01,0x00,0x00,0x84,0x00,0x00,0x00, -0xb5,0x00,0x00,0x00,0xb2,0x00,0x00,0x00,0x1c,0x00,0x04,0x00, -0xce,0x01,0x00,0x00,0xba,0x00,0x00,0x00,0xcd,0x01,0x00,0x00, +0xbf,0x00,0x00,0x00,0xbc,0x00,0x00,0x00,0x1c,0x00,0x04,0x00, +0xce,0x01,0x00,0x00,0xc4,0x00,0x00,0x00,0xcd,0x01,0x00,0x00, 0x20,0x00,0x04,0x00,0xcf,0x01,0x00,0x00,0x07,0x00,0x00,0x00, 0xce,0x01,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, -0xd8,0x01,0x00,0x00,0x86,0x00,0x00,0x00,0xaf,0x00,0x00,0x00, -0xb5,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, +0xd8,0x01,0x00,0x00,0x86,0x00,0x00,0x00,0xb9,0x00,0x00,0x00, +0xbf,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, 0xe0,0x01,0x00,0x00,0x80,0x00,0x00,0x00,0x6c,0x00,0x00,0x00, 0x39,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, 0x0f,0x02,0x00,0x00,0x84,0x00,0x00,0x00,0x60,0x00,0x00,0x00, 0x62,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00, 0x42,0x02,0x00,0x00,0x0d,0x00,0x00,0x00,0x3b,0x00,0x04,0x00, 0x0a,0x00,0x00,0x00,0x4a,0x02,0x00,0x00,0x01,0x00,0x00,0x00, -0x1d,0x00,0x03,0x00,0x90,0x02,0x00,0x00,0xba,0x00,0x00,0x00, +0x1d,0x00,0x03,0x00,0x90,0x02,0x00,0x00,0xc4,0x00,0x00,0x00, 0x1e,0x00,0x03,0x00,0x91,0x02,0x00,0x00,0x90,0x02,0x00,0x00, 0x20,0x00,0x04,0x00,0x92,0x02,0x00,0x00,0x0c,0x00,0x00,0x00, 0x91,0x02,0x00,0x00,0x3b,0x00,0x04,0x00,0x92,0x02,0x00,0x00, @@ -33452,8 +36611,8 @@ unsigned char matmul_f32_aligned_l_fp32_data[] = { 0x84,0x00,0x00,0x00,0x60,0x00,0x00,0x00,0x62,0x00,0x00,0x00, 0x36,0x00,0x05,0x00,0x02,0x00,0x00,0x00,0x04,0x00,0x00,0x00, 0x00,0x00,0x00,0x00,0x03,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, -0x05,0x00,0x00,0x00,0x3b,0x00,0x04,0x00,0xbf,0x00,0x00,0x00, -0xc0,0x00,0x00,0x00,0x07,0x00,0x00,0x00,0x3b,0x00,0x04,0x00, +0x05,0x00,0x00,0x00,0x3b,0x00,0x04,0x00,0xc9,0x00,0x00,0x00, +0xca,0x00,0x00,0x00,0x07,0x00,0x00,0x00,0x3b,0x00,0x04,0x00, 0xa2,0x01,0x00,0x00,0xa3,0x01,0x00,0x00,0x07,0x00,0x00,0x00, 0x3b,0x00,0x04,0x00,0xcf,0x01,0x00,0x00,0xd0,0x01,0x00,0x00, 0x07,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x0d,0x00,0x00,0x00, @@ -33512,1633 +36671,612 @@ unsigned char matmul_f32_aligned_l_fp32_data[] = { 0x64,0x00,0x00,0x00,0x5d,0x00,0x00,0x00,0x63,0x00,0x00,0x00, 0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x68,0x00,0x00,0x00, 0x5d,0x00,0x00,0x00,0x67,0x00,0x00,0x00,0x89,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x6f,0x00,0x00,0x00,0x4e,0x00,0x00,0x00, -0x6e,0x00,0x00,0x00,0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x74,0x00,0x00,0x00,0x4e,0x00,0x00,0x00,0x73,0x00,0x00,0x00, -0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00,0x78,0x00,0x00,0x00, -0x12,0x00,0x00,0x00,0x77,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x79,0x00,0x00,0x00,0x78,0x00,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x7a,0x00,0x00,0x00, -0x47,0x00,0x00,0x00,0x79,0x00,0x00,0x00,0x41,0x00,0x05,0x00, -0x15,0x00,0x00,0x00,0x7d,0x00,0x00,0x00,0x12,0x00,0x00,0x00, -0x7c,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x7e,0x00,0x00,0x00,0x7d,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x80,0x00,0x00,0x00,0x47,0x00,0x00,0x00, -0x39,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x83,0x00,0x00,0x00,0x80,0x00,0x00,0x00,0x79,0x00,0x00,0x00, -0x0c,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0x84,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x26,0x00,0x00,0x00,0x7e,0x00,0x00,0x00, -0x83,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00, -0x88,0x00,0x00,0x00,0x12,0x00,0x00,0x00,0x87,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x89,0x00,0x00,0x00, -0x88,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x8a,0x00,0x00,0x00,0x32,0x00,0x00,0x00,0x89,0x00,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x8c,0x00,0x00,0x00, -0x42,0x00,0x00,0x00,0x37,0x00,0x00,0x00,0x41,0x00,0x05,0x00, -0x15,0x00,0x00,0x00,0x8e,0x00,0x00,0x00,0x12,0x00,0x00,0x00, -0x8d,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x8f,0x00,0x00,0x00,0x8e,0x00,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x90,0x00,0x00,0x00,0x8c,0x00,0x00,0x00, -0x8f,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x91,0x00,0x00,0x00,0x8a,0x00,0x00,0x00,0x90,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x93,0x00,0x00,0x00, -0x91,0x00,0x00,0x00,0x7a,0x00,0x00,0x00,0x86,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x94,0x00,0x00,0x00,0x93,0x00,0x00,0x00, -0x6d,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00, -0x99,0x00,0x00,0x00,0x12,0x00,0x00,0x00,0x98,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x9a,0x00,0x00,0x00, -0x99,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x9b,0x00,0x00,0x00,0x0f,0x00,0x00,0x00,0x9a,0x00,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x9e,0x00,0x00,0x00, -0x4a,0x00,0x00,0x00,0x9d,0x00,0x00,0x00,0x41,0x00,0x05,0x00, -0x15,0x00,0x00,0x00,0xa0,0x00,0x00,0x00,0x12,0x00,0x00,0x00, -0x9f,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0xa1,0x00,0x00,0x00,0xa0,0x00,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xa2,0x00,0x00,0x00,0x9e,0x00,0x00,0x00, -0xa1,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xa3,0x00,0x00,0x00,0x9b,0x00,0x00,0x00,0xa2,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xa5,0x00,0x00,0x00, -0xa3,0x00,0x00,0x00,0x7a,0x00,0x00,0x00,0x86,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xa6,0x00,0x00,0x00,0xa5,0x00,0x00,0x00, -0x6d,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0xa8,0x00,0x00,0x00, -0xf8,0x00,0x02,0x00,0xa8,0x00,0x00,0x00,0xf5,0x00,0x07,0x00, -0x06,0x00,0x00,0x00,0xb7,0x02,0x00,0x00,0x3e,0x00,0x00,0x00, -0x05,0x00,0x00,0x00,0xc7,0x00,0x00,0x00,0xa9,0x00,0x00,0x00, -0xb0,0x00,0x05,0x00,0xb8,0x00,0x00,0x00,0xb9,0x00,0x00,0x00, -0xb7,0x02,0x00,0x00,0xb7,0x00,0x00,0x00,0xf6,0x00,0x04,0x00, -0xaa,0x00,0x00,0x00,0xa9,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0xfa,0x00,0x04,0x00,0xb9,0x00,0x00,0x00,0xa9,0x00,0x00,0x00, -0xaa,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0xa9,0x00,0x00,0x00, -0x41,0x00,0x05,0x00,0xc3,0x00,0x00,0x00,0xc4,0x00,0x00,0x00, -0xc0,0x00,0x00,0x00,0xb7,0x02,0x00,0x00,0x3e,0x00,0x03,0x00, -0xc4,0x00,0x00,0x00,0xc2,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xc7,0x00,0x00,0x00,0xb7,0x02,0x00,0x00, -0xc6,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0xa8,0x00,0x00,0x00, -0xf8,0x00,0x02,0x00,0xaa,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, -0xca,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0xca,0x00,0x00,0x00, -0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0xd0,0x02,0x00,0x00, -0xa6,0x00,0x00,0x00,0xaa,0x00,0x00,0x00,0x87,0x01,0x00,0x00, -0xcd,0x00,0x00,0x00,0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00, -0xcc,0x02,0x00,0x00,0x94,0x00,0x00,0x00,0xaa,0x00,0x00,0x00, -0x84,0x01,0x00,0x00,0xcd,0x00,0x00,0x00,0xf5,0x00,0x07,0x00, -0x06,0x00,0x00,0x00,0xb8,0x02,0x00,0x00,0x7a,0x00,0x00,0x00, -0xaa,0x00,0x00,0x00,0x32,0x02,0x00,0x00,0xcd,0x00,0x00,0x00, -0xb0,0x00,0x05,0x00,0xb8,0x00,0x00,0x00,0xd1,0x00,0x00,0x00, -0xb8,0x02,0x00,0x00,0x84,0x00,0x00,0x00,0xf6,0x00,0x04,0x00, -0xcc,0x00,0x00,0x00,0xcd,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0xfa,0x00,0x04,0x00,0xd1,0x00,0x00,0x00,0xcb,0x00,0x00,0x00, -0xcc,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0xcb,0x00,0x00,0x00, -0xf9,0x00,0x02,0x00,0xd3,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, -0xd3,0x00,0x00,0x00,0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00, -0xc8,0x02,0x00,0x00,0x3e,0x00,0x00,0x00,0xcb,0x00,0x00,0x00, -0x2d,0x01,0x00,0x00,0xd4,0x00,0x00,0x00,0xb0,0x00,0x05,0x00, -0xb8,0x00,0x00,0x00,0xd9,0x00,0x00,0x00,0xc8,0x02,0x00,0x00, -0x37,0x00,0x00,0x00,0xf6,0x00,0x04,0x00,0xd5,0x00,0x00,0x00, -0xd4,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, -0xd9,0x00,0x00,0x00,0xd4,0x00,0x00,0x00,0xd5,0x00,0x00,0x00, -0xf8,0x00,0x02,0x00,0xd4,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xde,0x00,0x00,0x00,0x74,0x00,0x00,0x00, -0xc8,0x02,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xe1,0x00,0x00,0x00,0xde,0x00,0x00,0x00,0x8f,0x00,0x00,0x00, -0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xe2,0x00,0x00,0x00, -0xe1,0x00,0x00,0x00,0x6d,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xe3,0x00,0x00,0x00,0xcc,0x02,0x00,0x00, -0xe2,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xe5,0x00,0x00,0x00,0xe3,0x00,0x00,0x00,0x6f,0x00,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xef,0x00,0x00,0x00, -0xde,0x00,0x00,0x00,0xee,0x00,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xf1,0x00,0x00,0x00,0x6f,0x00,0x00,0x00, -0x6d,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xf2,0x00,0x00,0x00,0xef,0x00,0x00,0x00,0xf1,0x00,0x00,0x00, -0x41,0x00,0x07,0x00,0xfa,0x00,0x00,0x00,0xfb,0x00,0x00,0x00, -0xf8,0x00,0x00,0x00,0x34,0x00,0x00,0x00,0xe5,0x00,0x00,0x00, -0x3e,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0xba,0x00,0x00,0x00, -0xfc,0x00,0x00,0x00,0xfb,0x00,0x00,0x00,0x41,0x00,0x05,0x00, -0xfd,0x00,0x00,0x00,0xfe,0x00,0x00,0x00,0xea,0x00,0x00,0x00, -0xf2,0x00,0x00,0x00,0x3e,0x00,0x03,0x00,0xfe,0x00,0x00,0x00, -0xfc,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x03,0x01,0x00,0x00,0xde,0x00,0x00,0x00,0x02,0x01,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x06,0x01,0x00,0x00, -0x03,0x01,0x00,0x00,0xf1,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x07,0x01,0x00,0x00,0x06,0x01,0x00,0x00, -0x39,0x00,0x00,0x00,0x41,0x00,0x07,0x00,0xfa,0x00,0x00,0x00, -0x09,0x01,0x00,0x00,0xf8,0x00,0x00,0x00,0x34,0x00,0x00,0x00, -0xe5,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0xba,0x00,0x00,0x00,0x0a,0x01,0x00,0x00,0x09,0x01,0x00,0x00, -0x41,0x00,0x05,0x00,0xfd,0x00,0x00,0x00,0x0b,0x01,0x00,0x00, -0xea,0x00,0x00,0x00,0x07,0x01,0x00,0x00,0x3e,0x00,0x03,0x00, -0x0b,0x01,0x00,0x00,0x0a,0x01,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x10,0x01,0x00,0x00,0xde,0x00,0x00,0x00, -0x0f,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x13,0x01,0x00,0x00,0x10,0x01,0x00,0x00,0xf1,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x14,0x01,0x00,0x00, -0x13,0x01,0x00,0x00,0x0c,0x00,0x00,0x00,0x41,0x00,0x07,0x00, -0xfa,0x00,0x00,0x00,0x16,0x01,0x00,0x00,0xf8,0x00,0x00,0x00, -0x34,0x00,0x00,0x00,0xe5,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0xba,0x00,0x00,0x00,0x17,0x01,0x00,0x00, -0x16,0x01,0x00,0x00,0x41,0x00,0x05,0x00,0xfd,0x00,0x00,0x00, -0x18,0x01,0x00,0x00,0xea,0x00,0x00,0x00,0x14,0x01,0x00,0x00, -0x3e,0x00,0x03,0x00,0x18,0x01,0x00,0x00,0x17,0x01,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x1d,0x01,0x00,0x00, -0xde,0x00,0x00,0x00,0x1c,0x01,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x20,0x01,0x00,0x00,0x1d,0x01,0x00,0x00, -0xf1,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x22,0x01,0x00,0x00,0x20,0x01,0x00,0x00,0x21,0x01,0x00,0x00, -0x41,0x00,0x07,0x00,0xfa,0x00,0x00,0x00,0x24,0x01,0x00,0x00, -0xf8,0x00,0x00,0x00,0x34,0x00,0x00,0x00,0xe5,0x00,0x00,0x00, -0x21,0x01,0x00,0x00,0x3d,0x00,0x04,0x00,0xba,0x00,0x00,0x00, -0x25,0x01,0x00,0x00,0x24,0x01,0x00,0x00,0x41,0x00,0x05,0x00, -0xfd,0x00,0x00,0x00,0x26,0x01,0x00,0x00,0xea,0x00,0x00,0x00, -0x22,0x01,0x00,0x00,0x3e,0x00,0x03,0x00,0x26,0x01,0x00,0x00, -0x25,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x2d,0x01,0x00,0x00,0xc8,0x02,0x00,0x00,0x2b,0x01,0x00,0x00, -0xf9,0x00,0x02,0x00,0xd3,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, -0xd5,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0x2f,0x01,0x00,0x00, -0xf8,0x00,0x02,0x00,0x2f,0x01,0x00,0x00,0xf5,0x00,0x07,0x00, -0x06,0x00,0x00,0x00,0xc9,0x02,0x00,0x00,0x3e,0x00,0x00,0x00, -0xd5,0x00,0x00,0x00,0x80,0x01,0x00,0x00,0x30,0x01,0x00,0x00, -0xb0,0x00,0x05,0x00,0xb8,0x00,0x00,0x00,0x35,0x01,0x00,0x00, -0xc9,0x02,0x00,0x00,0x9d,0x00,0x00,0x00,0xf6,0x00,0x04,0x00, -0x31,0x01,0x00,0x00,0x30,0x01,0x00,0x00,0x01,0x00,0x00,0x00, -0xfa,0x00,0x04,0x00,0x35,0x01,0x00,0x00,0x30,0x01,0x00,0x00, -0x31,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0x30,0x01,0x00,0x00, +0x06,0x00,0x00,0x00,0x6e,0x00,0x00,0x00,0x4e,0x00,0x00,0x00, +0x6d,0x00,0x00,0x00,0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x73,0x00,0x00,0x00,0x4e,0x00,0x00,0x00,0x72,0x00,0x00,0x00, +0x89,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x79,0x00,0x00,0x00, +0x4e,0x00,0x00,0x00,0x78,0x00,0x00,0x00,0x86,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x7e,0x00,0x00,0x00,0x4e,0x00,0x00,0x00, +0x7d,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00, +0x82,0x00,0x00,0x00,0x12,0x00,0x00,0x00,0x81,0x00,0x00,0x00, +0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x83,0x00,0x00,0x00, +0x82,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x84,0x00,0x00,0x00,0x47,0x00,0x00,0x00,0x83,0x00,0x00,0x00, +0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00,0x87,0x00,0x00,0x00, +0x12,0x00,0x00,0x00,0x86,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x88,0x00,0x00,0x00,0x87,0x00,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x8a,0x00,0x00,0x00, +0x47,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x8d,0x00,0x00,0x00,0x8a,0x00,0x00,0x00, +0x83,0x00,0x00,0x00,0x0c,0x00,0x07,0x00,0x06,0x00,0x00,0x00, +0x8e,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x26,0x00,0x00,0x00, +0x88,0x00,0x00,0x00,0x8d,0x00,0x00,0x00,0x41,0x00,0x05,0x00, +0x15,0x00,0x00,0x00,0x92,0x00,0x00,0x00,0x12,0x00,0x00,0x00, +0x91,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x93,0x00,0x00,0x00,0x92,0x00,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x94,0x00,0x00,0x00,0x32,0x00,0x00,0x00, +0x93,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x96,0x00,0x00,0x00,0x42,0x00,0x00,0x00,0x37,0x00,0x00,0x00, +0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00,0x98,0x00,0x00,0x00, +0x12,0x00,0x00,0x00,0x97,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x99,0x00,0x00,0x00,0x98,0x00,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x9a,0x00,0x00,0x00, +0x96,0x00,0x00,0x00,0x99,0x00,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x9b,0x00,0x00,0x00,0x94,0x00,0x00,0x00, +0x9a,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x9d,0x00,0x00,0x00,0x9b,0x00,0x00,0x00,0x84,0x00,0x00,0x00, +0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x9e,0x00,0x00,0x00, +0x9d,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x41,0x00,0x05,0x00, +0x15,0x00,0x00,0x00,0xa3,0x00,0x00,0x00,0x12,0x00,0x00,0x00, +0xa2,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0xa4,0x00,0x00,0x00,0xa3,0x00,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xa5,0x00,0x00,0x00,0x0f,0x00,0x00,0x00, +0xa4,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xa8,0x00,0x00,0x00,0x4a,0x00,0x00,0x00,0xa7,0x00,0x00,0x00, +0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00,0xaa,0x00,0x00,0x00, +0x12,0x00,0x00,0x00,0xa9,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0xab,0x00,0x00,0x00,0xaa,0x00,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xac,0x00,0x00,0x00, +0xa8,0x00,0x00,0x00,0xab,0x00,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xad,0x00,0x00,0x00,0xa5,0x00,0x00,0x00, +0xac,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xaf,0x00,0x00,0x00,0xad,0x00,0x00,0x00,0x84,0x00,0x00,0x00, +0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xb0,0x00,0x00,0x00, +0xaf,0x00,0x00,0x00,0x77,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, +0xb2,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0xb2,0x00,0x00,0x00, +0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0xb9,0x02,0x00,0x00, +0x3e,0x00,0x00,0x00,0x05,0x00,0x00,0x00,0xd1,0x00,0x00,0x00, +0xb3,0x00,0x00,0x00,0xb0,0x00,0x05,0x00,0xc2,0x00,0x00,0x00, +0xc3,0x00,0x00,0x00,0xb9,0x02,0x00,0x00,0xc1,0x00,0x00,0x00, +0xf6,0x00,0x04,0x00,0xb4,0x00,0x00,0x00,0xb3,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0xc3,0x00,0x00,0x00, +0xb3,0x00,0x00,0x00,0xb4,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, +0xb3,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0xcd,0x00,0x00,0x00, +0xce,0x00,0x00,0x00,0xca,0x00,0x00,0x00,0xb9,0x02,0x00,0x00, +0x3e,0x00,0x03,0x00,0xce,0x00,0x00,0x00,0xcc,0x00,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xd1,0x00,0x00,0x00, +0xb9,0x02,0x00,0x00,0xd0,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, +0xb2,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0xb4,0x00,0x00,0x00, +0xf9,0x00,0x02,0x00,0xd4,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, +0xd4,0x00,0x00,0x00,0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00, +0xd2,0x02,0x00,0x00,0xb0,0x00,0x00,0x00,0xb4,0x00,0x00,0x00, +0x87,0x01,0x00,0x00,0xd7,0x00,0x00,0x00,0xf5,0x00,0x07,0x00, +0x06,0x00,0x00,0x00,0xce,0x02,0x00,0x00,0x9e,0x00,0x00,0x00, +0xb4,0x00,0x00,0x00,0x84,0x01,0x00,0x00,0xd7,0x00,0x00,0x00, +0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0xba,0x02,0x00,0x00, +0x84,0x00,0x00,0x00,0xb4,0x00,0x00,0x00,0x32,0x02,0x00,0x00, +0xd7,0x00,0x00,0x00,0xb0,0x00,0x05,0x00,0xc2,0x00,0x00,0x00, +0xdb,0x00,0x00,0x00,0xba,0x02,0x00,0x00,0x8e,0x00,0x00,0x00, +0xf6,0x00,0x04,0x00,0xd6,0x00,0x00,0x00,0xd7,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0xdb,0x00,0x00,0x00, +0xd5,0x00,0x00,0x00,0xd6,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, +0xd5,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0xdd,0x00,0x00,0x00, +0xf8,0x00,0x02,0x00,0xdd,0x00,0x00,0x00,0xf5,0x00,0x07,0x00, +0x06,0x00,0x00,0x00,0xca,0x02,0x00,0x00,0x3e,0x00,0x00,0x00, +0xd5,0x00,0x00,0x00,0x3a,0x01,0x00,0x00,0xde,0x00,0x00,0x00, +0xb0,0x00,0x05,0x00,0xc2,0x00,0x00,0x00,0xe3,0x00,0x00,0x00, +0xca,0x02,0x00,0x00,0x37,0x00,0x00,0x00,0xf6,0x00,0x04,0x00, +0xdf,0x00,0x00,0x00,0xde,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0xfa,0x00,0x04,0x00,0xe3,0x00,0x00,0x00,0xde,0x00,0x00,0x00, +0xdf,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0xde,0x00,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xe8,0x00,0x00,0x00, +0x73,0x00,0x00,0x00,0xca,0x02,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xeb,0x00,0x00,0x00,0xe8,0x00,0x00,0x00, +0x99,0x00,0x00,0x00,0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xec,0x00,0x00,0x00,0xeb,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xed,0x00,0x00,0x00, +0xce,0x02,0x00,0x00,0xec,0x00,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xef,0x00,0x00,0x00,0xed,0x00,0x00,0x00, +0x6e,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xf5,0x00,0x00,0x00,0xe8,0x00,0x00,0x00,0xf4,0x00,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xf7,0x00,0x00,0x00, +0x6e,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xf8,0x00,0x00,0x00,0xf5,0x00,0x00,0x00, +0xf7,0x00,0x00,0x00,0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xfc,0x00,0x00,0x00,0xef,0x00,0x00,0x00,0xfb,0x00,0x00,0x00, +0xc7,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x00,0x01,0x00,0x00, +0xef,0x00,0x00,0x00,0xff,0x00,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x01,0x01,0x00,0x00,0x00,0x01,0x00,0x00, +0x0c,0x00,0x00,0x00,0x41,0x00,0x07,0x00,0x0d,0x01,0x00,0x00, +0x0e,0x01,0x00,0x00,0x0b,0x01,0x00,0x00,0x34,0x00,0x00,0x00, +0xfc,0x00,0x00,0x00,0x34,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0x03,0x01,0x00,0x00,0x0f,0x01,0x00,0x00,0x0e,0x01,0x00,0x00, +0x73,0x00,0x04,0x00,0xc4,0x00,0x00,0x00,0x10,0x01,0x00,0x00, +0x0f,0x01,0x00,0x00,0x41,0x00,0x08,0x00,0x16,0x01,0x00,0x00, +0x17,0x01,0x00,0x00,0x0b,0x01,0x00,0x00,0x34,0x00,0x00,0x00, +0xfc,0x00,0x00,0x00,0xd0,0x00,0x00,0x00,0x01,0x01,0x00,0x00, +0x3d,0x00,0x04,0x00,0x04,0x01,0x00,0x00,0x18,0x01,0x00,0x00, +0x17,0x01,0x00,0x00,0x72,0x00,0x04,0x00,0x13,0x00,0x00,0x00, +0x19,0x01,0x00,0x00,0x18,0x01,0x00,0x00,0x6f,0x00,0x04,0x00, +0xc4,0x00,0x00,0x00,0x1a,0x01,0x00,0x00,0x19,0x01,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x1d,0x01,0x00,0x00, +0x01,0x01,0x00,0x00,0x39,0x00,0x00,0x00,0x41,0x00,0x08,0x00, +0x16,0x01,0x00,0x00,0x1e,0x01,0x00,0x00,0x0b,0x01,0x00,0x00, +0x34,0x00,0x00,0x00,0xfc,0x00,0x00,0x00,0xd0,0x00,0x00,0x00, +0x1d,0x01,0x00,0x00,0x3d,0x00,0x04,0x00,0x04,0x01,0x00,0x00, +0x1f,0x01,0x00,0x00,0x1e,0x01,0x00,0x00,0x72,0x00,0x04,0x00, +0x13,0x00,0x00,0x00,0x20,0x01,0x00,0x00,0x1f,0x01,0x00,0x00, +0x6f,0x00,0x04,0x00,0xc4,0x00,0x00,0x00,0x21,0x01,0x00,0x00, +0x20,0x01,0x00,0x00,0x50,0x00,0x05,0x00,0x11,0x01,0x00,0x00, +0x22,0x01,0x00,0x00,0x1a,0x01,0x00,0x00,0x21,0x01,0x00,0x00, +0x8e,0x00,0x05,0x00,0x11,0x01,0x00,0x00,0x24,0x01,0x00,0x00, +0x22,0x01,0x00,0x00,0x10,0x01,0x00,0x00,0x51,0x00,0x05,0x00, +0xc4,0x00,0x00,0x00,0x2c,0x01,0x00,0x00,0x24,0x01,0x00,0x00, +0x00,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x2d,0x01,0x00,0x00, +0x2e,0x01,0x00,0x00,0x29,0x01,0x00,0x00,0xf8,0x00,0x00,0x00, +0x3e,0x00,0x03,0x00,0x2e,0x01,0x00,0x00,0x2c,0x01,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x30,0x01,0x00,0x00, +0xf8,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x51,0x00,0x05,0x00, +0xc4,0x00,0x00,0x00,0x32,0x01,0x00,0x00,0x24,0x01,0x00,0x00, +0x01,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x2d,0x01,0x00,0x00, +0x33,0x01,0x00,0x00,0x29,0x01,0x00,0x00,0x30,0x01,0x00,0x00, +0x3e,0x00,0x03,0x00,0x33,0x01,0x00,0x00,0x32,0x01,0x00,0x00, 0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x3a,0x01,0x00,0x00, -0x74,0x00,0x00,0x00,0xc9,0x02,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x3d,0x01,0x00,0x00,0x3a,0x01,0x00,0x00, -0xa1,0x00,0x00,0x00,0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x3e,0x01,0x00,0x00,0x3d,0x01,0x00,0x00,0x6d,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x3f,0x01,0x00,0x00, -0xd0,0x02,0x00,0x00,0x3e,0x01,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x41,0x01,0x00,0x00,0x3f,0x01,0x00,0x00, -0x6f,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x4b,0x01,0x00,0x00,0x3a,0x01,0x00,0x00,0x4a,0x01,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x4d,0x01,0x00,0x00, -0x6f,0x00,0x00,0x00,0x6d,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x4e,0x01,0x00,0x00,0x4b,0x01,0x00,0x00, -0x4d,0x01,0x00,0x00,0x41,0x00,0x07,0x00,0xfa,0x00,0x00,0x00, -0x55,0x01,0x00,0x00,0x53,0x01,0x00,0x00,0x34,0x00,0x00,0x00, -0x41,0x01,0x00,0x00,0x3e,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0xba,0x00,0x00,0x00,0x56,0x01,0x00,0x00,0x55,0x01,0x00,0x00, -0x41,0x00,0x05,0x00,0xfd,0x00,0x00,0x00,0x57,0x01,0x00,0x00, -0x46,0x01,0x00,0x00,0x4e,0x01,0x00,0x00,0x3e,0x00,0x03,0x00, -0x57,0x01,0x00,0x00,0x56,0x01,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x5c,0x01,0x00,0x00,0x3a,0x01,0x00,0x00, -0x5b,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x5f,0x01,0x00,0x00,0x5c,0x01,0x00,0x00,0x4d,0x01,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x60,0x01,0x00,0x00, -0x5f,0x01,0x00,0x00,0x39,0x00,0x00,0x00,0x41,0x00,0x07,0x00, -0xfa,0x00,0x00,0x00,0x62,0x01,0x00,0x00,0x53,0x01,0x00,0x00, -0x34,0x00,0x00,0x00,0x41,0x01,0x00,0x00,0x39,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0xba,0x00,0x00,0x00,0x63,0x01,0x00,0x00, -0x62,0x01,0x00,0x00,0x41,0x00,0x05,0x00,0xfd,0x00,0x00,0x00, -0x64,0x01,0x00,0x00,0x46,0x01,0x00,0x00,0x60,0x01,0x00,0x00, -0x3e,0x00,0x03,0x00,0x64,0x01,0x00,0x00,0x63,0x01,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x69,0x01,0x00,0x00, -0x3a,0x01,0x00,0x00,0x68,0x01,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x6c,0x01,0x00,0x00,0x69,0x01,0x00,0x00, -0x4d,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x6d,0x01,0x00,0x00,0x6c,0x01,0x00,0x00,0x0c,0x00,0x00,0x00, -0x41,0x00,0x07,0x00,0xfa,0x00,0x00,0x00,0x6f,0x01,0x00,0x00, -0x53,0x01,0x00,0x00,0x34,0x00,0x00,0x00,0x41,0x01,0x00,0x00, -0x0c,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0xba,0x00,0x00,0x00, -0x70,0x01,0x00,0x00,0x6f,0x01,0x00,0x00,0x41,0x00,0x05,0x00, -0xfd,0x00,0x00,0x00,0x71,0x01,0x00,0x00,0x46,0x01,0x00,0x00, -0x6d,0x01,0x00,0x00,0x3e,0x00,0x03,0x00,0x71,0x01,0x00,0x00, -0x70,0x01,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x76,0x01,0x00,0x00,0x3a,0x01,0x00,0x00,0x75,0x01,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x79,0x01,0x00,0x00, -0x76,0x01,0x00,0x00,0x4d,0x01,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x7a,0x01,0x00,0x00,0x79,0x01,0x00,0x00, -0x21,0x01,0x00,0x00,0x41,0x00,0x07,0x00,0xfa,0x00,0x00,0x00, -0x7c,0x01,0x00,0x00,0x53,0x01,0x00,0x00,0x34,0x00,0x00,0x00, -0x41,0x01,0x00,0x00,0x21,0x01,0x00,0x00,0x3d,0x00,0x04,0x00, -0xba,0x00,0x00,0x00,0x7d,0x01,0x00,0x00,0x7c,0x01,0x00,0x00, -0x41,0x00,0x05,0x00,0xfd,0x00,0x00,0x00,0x7e,0x01,0x00,0x00, -0x46,0x01,0x00,0x00,0x7a,0x01,0x00,0x00,0x3e,0x00,0x03,0x00, -0x7e,0x01,0x00,0x00,0x7d,0x01,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x80,0x01,0x00,0x00,0xc9,0x02,0x00,0x00, -0x2b,0x01,0x00,0x00,0xf9,0x00,0x02,0x00,0x2f,0x01,0x00,0x00, -0xf8,0x00,0x02,0x00,0x31,0x01,0x00,0x00,0xe0,0x00,0x04,0x00, -0x0c,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x81,0x01,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x84,0x01,0x00,0x00, -0xcc,0x02,0x00,0x00,0x82,0x01,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x87,0x01,0x00,0x00,0xd0,0x02,0x00,0x00, -0x85,0x01,0x00,0x00,0xf9,0x00,0x02,0x00,0x89,0x01,0x00,0x00, -0xf8,0x00,0x02,0x00,0x89,0x01,0x00,0x00,0xf5,0x00,0x07,0x00, -0x06,0x00,0x00,0x00,0xd2,0x02,0x00,0x00,0x3e,0x00,0x00,0x00, -0x31,0x01,0x00,0x00,0x30,0x02,0x00,0x00,0x8c,0x01,0x00,0x00, -0xb0,0x00,0x05,0x00,0xb8,0x00,0x00,0x00,0x8f,0x01,0x00,0x00, -0xd2,0x02,0x00,0x00,0x6c,0x00,0x00,0x00,0xf6,0x00,0x04,0x00, -0x8b,0x01,0x00,0x00,0x8c,0x01,0x00,0x00,0x00,0x00,0x00,0x00, -0xfa,0x00,0x04,0x00,0x8f,0x01,0x00,0x00,0x8a,0x01,0x00,0x00, -0x8b,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0x8a,0x01,0x00,0x00, -0xf9,0x00,0x02,0x00,0x91,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, -0x91,0x01,0x00,0x00,0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00, -0xd6,0x02,0x00,0x00,0x3e,0x00,0x00,0x00,0x8a,0x01,0x00,0x00, -0xbc,0x01,0x00,0x00,0x94,0x01,0x00,0x00,0xb0,0x00,0x05,0x00, -0xb8,0x00,0x00,0x00,0x97,0x01,0x00,0x00,0xd6,0x02,0x00,0x00, -0x60,0x00,0x00,0x00,0xf6,0x00,0x04,0x00,0x93,0x01,0x00,0x00, -0x94,0x01,0x00,0x00,0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, -0x97,0x01,0x00,0x00,0x92,0x01,0x00,0x00,0x93,0x01,0x00,0x00, -0xf8,0x00,0x02,0x00,0x92,0x01,0x00,0x00,0xf9,0x00,0x02,0x00, -0x99,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0x99,0x01,0x00,0x00, -0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0xe8,0x02,0x00,0x00, -0x3e,0x00,0x00,0x00,0x92,0x01,0x00,0x00,0xba,0x01,0x00,0x00, -0x9a,0x01,0x00,0x00,0xb0,0x00,0x05,0x00,0xb8,0x00,0x00,0x00, -0x9f,0x01,0x00,0x00,0xe8,0x02,0x00,0x00,0x62,0x00,0x00,0x00, -0xf6,0x00,0x04,0x00,0x9b,0x01,0x00,0x00,0x9a,0x01,0x00,0x00, -0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0x9f,0x01,0x00,0x00, -0x9a,0x01,0x00,0x00,0x9b,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, -0x9a,0x01,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xa5,0x01,0x00,0x00,0xd6,0x02,0x00,0x00,0x62,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xa7,0x01,0x00,0x00, -0xa5,0x01,0x00,0x00,0xe8,0x02,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xa9,0x01,0x00,0x00,0x55,0x00,0x00,0x00, -0x53,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xab,0x01,0x00,0x00,0xd6,0x02,0x00,0x00,0x61,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xac,0x01,0x00,0x00, -0xa9,0x01,0x00,0x00,0xab,0x01,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xae,0x01,0x00,0x00,0x64,0x00,0x00,0x00, -0x62,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xaf,0x01,0x00,0x00,0xac,0x01,0x00,0x00,0xae,0x01,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xb1,0x01,0x00,0x00, -0xaf,0x01,0x00,0x00,0xe8,0x02,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xb3,0x01,0x00,0x00,0xb1,0x01,0x00,0x00, -0xb2,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xb5,0x01,0x00,0x00,0xb3,0x01,0x00,0x00,0xd2,0x02,0x00,0x00, -0x41,0x00,0x05,0x00,0xfd,0x00,0x00,0x00,0xb6,0x01,0x00,0x00, -0xea,0x00,0x00,0x00,0xb5,0x01,0x00,0x00,0x3d,0x00,0x04,0x00, -0xba,0x00,0x00,0x00,0xb7,0x01,0x00,0x00,0xb6,0x01,0x00,0x00, -0x41,0x00,0x05,0x00,0xc3,0x00,0x00,0x00,0xb8,0x01,0x00,0x00, -0xa3,0x01,0x00,0x00,0xa7,0x01,0x00,0x00,0x3e,0x00,0x03,0x00, -0xb8,0x01,0x00,0x00,0xb7,0x01,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xba,0x01,0x00,0x00,0xe8,0x02,0x00,0x00, -0xc6,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0x99,0x01,0x00,0x00, -0xf8,0x00,0x02,0x00,0x9b,0x01,0x00,0x00,0xf9,0x00,0x02,0x00, -0x94,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0x94,0x01,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xbc,0x01,0x00,0x00, -0xd6,0x02,0x00,0x00,0xc6,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, -0x91,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0x93,0x01,0x00,0x00, -0xf9,0x00,0x02,0x00,0xbe,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, -0xbe,0x01,0x00,0x00,0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00, -0xd7,0x02,0x00,0x00,0x3e,0x00,0x00,0x00,0x93,0x01,0x00,0x00, -0xea,0x01,0x00,0x00,0xc1,0x01,0x00,0x00,0xb0,0x00,0x05,0x00, -0xb8,0x00,0x00,0x00,0xc4,0x01,0x00,0x00,0xd7,0x02,0x00,0x00, -0xb5,0x00,0x00,0x00,0xf6,0x00,0x04,0x00,0xc0,0x01,0x00,0x00, -0xc1,0x01,0x00,0x00,0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, -0xc4,0x01,0x00,0x00,0xbf,0x01,0x00,0x00,0xc0,0x01,0x00,0x00, -0xf8,0x00,0x02,0x00,0xbf,0x01,0x00,0x00,0xf9,0x00,0x02,0x00, -0xc6,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0xc6,0x01,0x00,0x00, -0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0xe5,0x02,0x00,0x00, -0x3e,0x00,0x00,0x00,0xbf,0x01,0x00,0x00,0xe8,0x01,0x00,0x00, -0xc7,0x01,0x00,0x00,0xb0,0x00,0x05,0x00,0xb8,0x00,0x00,0x00, -0xcc,0x01,0x00,0x00,0xe5,0x02,0x00,0x00,0xb2,0x00,0x00,0x00, -0xf6,0x00,0x04,0x00,0xc8,0x01,0x00,0x00,0xc7,0x01,0x00,0x00, -0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0xcc,0x01,0x00,0x00, -0xc7,0x01,0x00,0x00,0xc8,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, -0xc7,0x01,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xd2,0x01,0x00,0x00,0xd7,0x02,0x00,0x00,0xb2,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xd4,0x01,0x00,0x00, -0xd2,0x01,0x00,0x00,0xe5,0x02,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xd6,0x01,0x00,0x00,0x59,0x00,0x00,0x00, -0xaf,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xd9,0x01,0x00,0x00,0xd7,0x02,0x00,0x00,0xd8,0x01,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xda,0x01,0x00,0x00, -0xd6,0x01,0x00,0x00,0xd9,0x01,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xdc,0x01,0x00,0x00,0x68,0x00,0x00,0x00, -0xb2,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xdd,0x01,0x00,0x00,0xda,0x01,0x00,0x00,0xdc,0x01,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xdf,0x01,0x00,0x00, -0xdd,0x01,0x00,0x00,0xe5,0x02,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xe1,0x01,0x00,0x00,0xdf,0x01,0x00,0x00, -0xe0,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xe3,0x01,0x00,0x00,0xe1,0x01,0x00,0x00,0xd2,0x02,0x00,0x00, -0x41,0x00,0x05,0x00,0xfd,0x00,0x00,0x00,0xe4,0x01,0x00,0x00, -0x46,0x01,0x00,0x00,0xe3,0x01,0x00,0x00,0x3d,0x00,0x04,0x00, -0xba,0x00,0x00,0x00,0xe5,0x01,0x00,0x00,0xe4,0x01,0x00,0x00, -0x41,0x00,0x05,0x00,0xc3,0x00,0x00,0x00,0xe6,0x01,0x00,0x00, -0xd0,0x01,0x00,0x00,0xd4,0x01,0x00,0x00,0x3e,0x00,0x03,0x00, -0xe6,0x01,0x00,0x00,0xe5,0x01,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xe8,0x01,0x00,0x00,0xe5,0x02,0x00,0x00, -0xc6,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0xc6,0x01,0x00,0x00, -0xf8,0x00,0x02,0x00,0xc8,0x01,0x00,0x00,0xf9,0x00,0x02,0x00, -0xc1,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0xc1,0x01,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xea,0x01,0x00,0x00, -0xd7,0x02,0x00,0x00,0xc6,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, -0xbe,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0xc0,0x01,0x00,0x00, -0xf9,0x00,0x02,0x00,0xec,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, -0xec,0x01,0x00,0x00,0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00, -0xd8,0x02,0x00,0x00,0x3e,0x00,0x00,0x00,0xc0,0x01,0x00,0x00, -0x2e,0x02,0x00,0x00,0xef,0x01,0x00,0x00,0xb0,0x00,0x05,0x00, -0xb8,0x00,0x00,0x00,0xf2,0x01,0x00,0x00,0xd8,0x02,0x00,0x00, -0xb5,0x00,0x00,0x00,0xf6,0x00,0x04,0x00,0xee,0x01,0x00,0x00, -0xef,0x01,0x00,0x00,0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, -0xf2,0x01,0x00,0x00,0xed,0x01,0x00,0x00,0xee,0x01,0x00,0x00, -0xf8,0x00,0x02,0x00,0xed,0x01,0x00,0x00,0xf9,0x00,0x02,0x00, -0xf4,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0xf4,0x01,0x00,0x00, -0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0xdc,0x02,0x00,0x00, -0x3e,0x00,0x00,0x00,0xed,0x01,0x00,0x00,0x2c,0x02,0x00,0x00, -0xf7,0x01,0x00,0x00,0xb0,0x00,0x05,0x00,0xb8,0x00,0x00,0x00, -0xfa,0x01,0x00,0x00,0xdc,0x02,0x00,0x00,0x60,0x00,0x00,0x00, -0xf6,0x00,0x04,0x00,0xf6,0x01,0x00,0x00,0xf7,0x01,0x00,0x00, -0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0xfa,0x01,0x00,0x00, -0xf5,0x01,0x00,0x00,0xf6,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, -0xf5,0x01,0x00,0x00,0xf9,0x00,0x02,0x00,0xfc,0x01,0x00,0x00, -0xf8,0x00,0x02,0x00,0xfc,0x01,0x00,0x00,0xf5,0x00,0x07,0x00, -0x06,0x00,0x00,0x00,0xde,0x02,0x00,0x00,0x3e,0x00,0x00,0x00, -0xf5,0x01,0x00,0x00,0x2a,0x02,0x00,0x00,0xff,0x01,0x00,0x00, -0xb0,0x00,0x05,0x00,0xb8,0x00,0x00,0x00,0x02,0x02,0x00,0x00, -0xde,0x02,0x00,0x00,0xb2,0x00,0x00,0x00,0xf6,0x00,0x04,0x00, -0xfe,0x01,0x00,0x00,0xff,0x01,0x00,0x00,0x01,0x00,0x00,0x00, -0xfa,0x00,0x04,0x00,0x02,0x02,0x00,0x00,0xfd,0x01,0x00,0x00, -0xfe,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0xfd,0x01,0x00,0x00, -0xf9,0x00,0x02,0x00,0x04,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, -0x04,0x02,0x00,0x00,0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00, -0xe0,0x02,0x00,0x00,0x3e,0x00,0x00,0x00,0xfd,0x01,0x00,0x00, -0x28,0x02,0x00,0x00,0x05,0x02,0x00,0x00,0xb0,0x00,0x05,0x00, -0xb8,0x00,0x00,0x00,0x0a,0x02,0x00,0x00,0xe0,0x02,0x00,0x00, -0x62,0x00,0x00,0x00,0xf6,0x00,0x04,0x00,0x06,0x02,0x00,0x00, -0x05,0x02,0x00,0x00,0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, -0x0a,0x02,0x00,0x00,0x05,0x02,0x00,0x00,0x06,0x02,0x00,0x00, -0xf8,0x00,0x02,0x00,0x05,0x02,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x0c,0x02,0x00,0x00,0xd8,0x02,0x00,0x00, -0xb2,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x0e,0x02,0x00,0x00,0x0c,0x02,0x00,0x00,0xde,0x02,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x10,0x02,0x00,0x00, -0x0e,0x02,0x00,0x00,0x0f,0x02,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x12,0x02,0x00,0x00,0xdc,0x02,0x00,0x00, -0x62,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x13,0x02,0x00,0x00,0x10,0x02,0x00,0x00,0x12,0x02,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x15,0x02,0x00,0x00, -0x13,0x02,0x00,0x00,0xe0,0x02,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x19,0x02,0x00,0x00,0x12,0x02,0x00,0x00, -0xe0,0x02,0x00,0x00,0x41,0x00,0x05,0x00,0xc3,0x00,0x00,0x00, -0x1a,0x02,0x00,0x00,0xa3,0x01,0x00,0x00,0x19,0x02,0x00,0x00, -0x3d,0x00,0x04,0x00,0xba,0x00,0x00,0x00,0x1b,0x02,0x00,0x00, -0x1a,0x02,0x00,0x00,0x41,0x00,0x05,0x00,0xc3,0x00,0x00,0x00, -0x20,0x02,0x00,0x00,0xd0,0x01,0x00,0x00,0x0e,0x02,0x00,0x00, -0x3d,0x00,0x04,0x00,0xba,0x00,0x00,0x00,0x21,0x02,0x00,0x00, -0x20,0x02,0x00,0x00,0x41,0x00,0x05,0x00,0xc3,0x00,0x00,0x00, -0x23,0x02,0x00,0x00,0xc0,0x00,0x00,0x00,0x15,0x02,0x00,0x00, -0x3d,0x00,0x04,0x00,0xba,0x00,0x00,0x00,0x24,0x02,0x00,0x00, -0x23,0x02,0x00,0x00,0x0c,0x00,0x08,0x00,0xba,0x00,0x00,0x00, -0x25,0x02,0x00,0x00,0x01,0x00,0x00,0x00,0x32,0x00,0x00,0x00, -0x1b,0x02,0x00,0x00,0x21,0x02,0x00,0x00,0x24,0x02,0x00,0x00, -0x3e,0x00,0x03,0x00,0x23,0x02,0x00,0x00,0x25,0x02,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x28,0x02,0x00,0x00, -0xe0,0x02,0x00,0x00,0xc6,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, -0x04,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x06,0x02,0x00,0x00, -0xf9,0x00,0x02,0x00,0xff,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, -0xff,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x2a,0x02,0x00,0x00,0xde,0x02,0x00,0x00,0xc6,0x00,0x00,0x00, -0xf9,0x00,0x02,0x00,0xfc,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, -0xfe,0x01,0x00,0x00,0xf9,0x00,0x02,0x00,0xf7,0x01,0x00,0x00, -0xf8,0x00,0x02,0x00,0xf7,0x01,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x2c,0x02,0x00,0x00,0xdc,0x02,0x00,0x00, -0xc6,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0xf4,0x01,0x00,0x00, -0xf8,0x00,0x02,0x00,0xf6,0x01,0x00,0x00,0xf9,0x00,0x02,0x00, -0xef,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0xef,0x01,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x2e,0x02,0x00,0x00, -0xd8,0x02,0x00,0x00,0xc6,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, -0xec,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0xee,0x01,0x00,0x00, -0xf9,0x00,0x02,0x00,0x8c,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, -0x8c,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x30,0x02,0x00,0x00,0xd2,0x02,0x00,0x00,0xc6,0x00,0x00,0x00, +0xca,0x02,0x00,0x00,0x38,0x01,0x00,0x00,0xf9,0x00,0x02,0x00, +0xdd,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0xdf,0x00,0x00,0x00, +0xf9,0x00,0x02,0x00,0x3c,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, +0x3c,0x01,0x00,0x00,0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00, +0xcb,0x02,0x00,0x00,0x3e,0x00,0x00,0x00,0xdf,0x00,0x00,0x00, +0x80,0x01,0x00,0x00,0x3d,0x01,0x00,0x00,0xb0,0x00,0x05,0x00, +0xc2,0x00,0x00,0x00,0x42,0x01,0x00,0x00,0xcb,0x02,0x00,0x00, +0xa7,0x00,0x00,0x00,0xf6,0x00,0x04,0x00,0x3e,0x01,0x00,0x00, +0x3d,0x01,0x00,0x00,0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, +0x42,0x01,0x00,0x00,0x3d,0x01,0x00,0x00,0x3e,0x01,0x00,0x00, +0xf8,0x00,0x02,0x00,0x3d,0x01,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x47,0x01,0x00,0x00,0x7e,0x00,0x00,0x00, +0xcb,0x02,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x4a,0x01,0x00,0x00,0x47,0x01,0x00,0x00,0xab,0x00,0x00,0x00, +0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x4b,0x01,0x00,0x00, +0x4a,0x01,0x00,0x00,0x77,0x00,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x4c,0x01,0x00,0x00,0xd2,0x02,0x00,0x00, +0x4b,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x4e,0x01,0x00,0x00,0x4c,0x01,0x00,0x00,0x79,0x00,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x54,0x01,0x00,0x00, +0x47,0x01,0x00,0x00,0x53,0x01,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x56,0x01,0x00,0x00,0x79,0x00,0x00,0x00, +0x77,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x57,0x01,0x00,0x00,0x54,0x01,0x00,0x00,0x56,0x01,0x00,0x00, +0x41,0x00,0x07,0x00,0x65,0x01,0x00,0x00,0x66,0x01,0x00,0x00, +0x63,0x01,0x00,0x00,0x34,0x00,0x00,0x00,0x4e,0x01,0x00,0x00, +0x3e,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0xc4,0x00,0x00,0x00, +0x67,0x01,0x00,0x00,0x66,0x01,0x00,0x00,0x41,0x00,0x05,0x00, +0x2d,0x01,0x00,0x00,0x68,0x01,0x00,0x00,0x5c,0x01,0x00,0x00, +0x57,0x01,0x00,0x00,0x3e,0x00,0x03,0x00,0x68,0x01,0x00,0x00, +0x67,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x6a,0x01,0x00,0x00,0x57,0x01,0x00,0x00,0x39,0x00,0x00,0x00, +0x41,0x00,0x07,0x00,0x65,0x01,0x00,0x00,0x6c,0x01,0x00,0x00, +0x63,0x01,0x00,0x00,0x34,0x00,0x00,0x00,0x4e,0x01,0x00,0x00, +0x39,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0xc4,0x00,0x00,0x00, +0x6d,0x01,0x00,0x00,0x6c,0x01,0x00,0x00,0x41,0x00,0x05,0x00, +0x2d,0x01,0x00,0x00,0x6e,0x01,0x00,0x00,0x5c,0x01,0x00,0x00, +0x6a,0x01,0x00,0x00,0x3e,0x00,0x03,0x00,0x6e,0x01,0x00,0x00, +0x6d,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x70,0x01,0x00,0x00,0x57,0x01,0x00,0x00,0x0c,0x00,0x00,0x00, +0x41,0x00,0x07,0x00,0x65,0x01,0x00,0x00,0x72,0x01,0x00,0x00, +0x63,0x01,0x00,0x00,0x34,0x00,0x00,0x00,0x4e,0x01,0x00,0x00, +0x0c,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0xc4,0x00,0x00,0x00, +0x73,0x01,0x00,0x00,0x72,0x01,0x00,0x00,0x41,0x00,0x05,0x00, +0x2d,0x01,0x00,0x00,0x74,0x01,0x00,0x00,0x5c,0x01,0x00,0x00, +0x70,0x01,0x00,0x00,0x3e,0x00,0x03,0x00,0x74,0x01,0x00,0x00, +0x73,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x77,0x01,0x00,0x00,0x57,0x01,0x00,0x00,0x76,0x01,0x00,0x00, +0x41,0x00,0x07,0x00,0x65,0x01,0x00,0x00,0x79,0x01,0x00,0x00, +0x63,0x01,0x00,0x00,0x34,0x00,0x00,0x00,0x4e,0x01,0x00,0x00, +0x76,0x01,0x00,0x00,0x3d,0x00,0x04,0x00,0xc4,0x00,0x00,0x00, +0x7a,0x01,0x00,0x00,0x79,0x01,0x00,0x00,0x41,0x00,0x05,0x00, +0x2d,0x01,0x00,0x00,0x7b,0x01,0x00,0x00,0x5c,0x01,0x00,0x00, +0x77,0x01,0x00,0x00,0x3e,0x00,0x03,0x00,0x7b,0x01,0x00,0x00, +0x7a,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x80,0x01,0x00,0x00,0xcb,0x02,0x00,0x00,0x7e,0x01,0x00,0x00, +0xf9,0x00,0x02,0x00,0x3c,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, +0x3e,0x01,0x00,0x00,0xe0,0x00,0x04,0x00,0x0c,0x00,0x00,0x00, +0x0c,0x00,0x00,0x00,0x81,0x01,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x84,0x01,0x00,0x00,0xce,0x02,0x00,0x00, +0x82,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x87,0x01,0x00,0x00,0xd2,0x02,0x00,0x00,0x85,0x01,0x00,0x00, 0xf9,0x00,0x02,0x00,0x89,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, -0x8b,0x01,0x00,0x00,0xe0,0x00,0x04,0x00,0x0c,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x81,0x01,0x00,0x00,0xf9,0x00,0x02,0x00, -0xcd,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0xcd,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x32,0x02,0x00,0x00, -0xb8,0x02,0x00,0x00,0x6c,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, -0xca,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0xcc,0x00,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x37,0x02,0x00,0x00, -0x55,0x00,0x00,0x00,0x53,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x38,0x02,0x00,0x00,0x8c,0x00,0x00,0x00, -0x37,0x02,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x3d,0x02,0x00,0x00,0x59,0x00,0x00,0x00,0xaf,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x3e,0x02,0x00,0x00, -0x9e,0x00,0x00,0x00,0x3d,0x02,0x00,0x00,0x41,0x00,0x05,0x00, -0x15,0x00,0x00,0x00,0x43,0x02,0x00,0x00,0x12,0x00,0x00,0x00, -0x42,0x02,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x44,0x02,0x00,0x00,0x43,0x02,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x45,0x02,0x00,0x00,0x0f,0x00,0x00,0x00, -0x44,0x02,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x49,0x02,0x00,0x00,0x47,0x00,0x00,0x00,0x44,0x02,0x00,0x00, -0x41,0x00,0x05,0x00,0x0d,0x00,0x00,0x00,0x4b,0x02,0x00,0x00, -0x4a,0x02,0x00,0x00,0x0c,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x4c,0x02,0x00,0x00,0x4b,0x02,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x4d,0x02,0x00,0x00, -0x49,0x02,0x00,0x00,0x4c,0x02,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x4e,0x02,0x00,0x00,0x45,0x02,0x00,0x00, -0x4d,0x02,0x00,0x00,0xf9,0x00,0x02,0x00,0x50,0x02,0x00,0x00, -0xf8,0x00,0x02,0x00,0x50,0x02,0x00,0x00,0xf5,0x00,0x07,0x00, -0x06,0x00,0x00,0x00,0xb9,0x02,0x00,0x00,0x3e,0x00,0x00,0x00, -0xcc,0x00,0x00,0x00,0xb6,0x02,0x00,0x00,0x53,0x02,0x00,0x00, -0xb0,0x00,0x05,0x00,0xb8,0x00,0x00,0x00,0x56,0x02,0x00,0x00, -0xb9,0x02,0x00,0x00,0xb5,0x00,0x00,0x00,0xf6,0x00,0x04,0x00, -0x52,0x02,0x00,0x00,0x53,0x02,0x00,0x00,0x01,0x00,0x00,0x00, -0xfa,0x00,0x04,0x00,0x56,0x02,0x00,0x00,0x51,0x02,0x00,0x00, -0x52,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x51,0x02,0x00,0x00, -0xf9,0x00,0x02,0x00,0x58,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, -0x58,0x02,0x00,0x00,0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00, -0xba,0x02,0x00,0x00,0x3e,0x00,0x00,0x00,0x51,0x02,0x00,0x00, -0xb4,0x02,0x00,0x00,0x5b,0x02,0x00,0x00,0xb0,0x00,0x05,0x00, -0xb8,0x00,0x00,0x00,0x5e,0x02,0x00,0x00,0xba,0x02,0x00,0x00, -0x60,0x00,0x00,0x00,0xf6,0x00,0x04,0x00,0x5a,0x02,0x00,0x00, -0x5b,0x02,0x00,0x00,0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, -0x5e,0x02,0x00,0x00,0x59,0x02,0x00,0x00,0x5a,0x02,0x00,0x00, -0xf8,0x00,0x02,0x00,0x59,0x02,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x62,0x02,0x00,0x00,0xba,0x02,0x00,0x00, -0x61,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x63,0x02,0x00,0x00,0x38,0x02,0x00,0x00,0x62,0x02,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x65,0x02,0x00,0x00, -0x64,0x00,0x00,0x00,0x62,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x66,0x02,0x00,0x00,0x63,0x02,0x00,0x00, -0x65,0x02,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x6a,0x02,0x00,0x00,0xb9,0x02,0x00,0x00,0xd8,0x01,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x6b,0x02,0x00,0x00, -0x3e,0x02,0x00,0x00,0x6a,0x02,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x6d,0x02,0x00,0x00,0x68,0x00,0x00,0x00, -0xb2,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x6e,0x02,0x00,0x00,0x6b,0x02,0x00,0x00,0x6d,0x02,0x00,0x00, -0xf9,0x00,0x02,0x00,0x70,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, -0x70,0x02,0x00,0x00,0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00, -0xbc,0x02,0x00,0x00,0x3e,0x00,0x00,0x00,0x59,0x02,0x00,0x00, -0xb2,0x02,0x00,0x00,0x73,0x02,0x00,0x00,0xb0,0x00,0x05,0x00, -0xb8,0x00,0x00,0x00,0x76,0x02,0x00,0x00,0xbc,0x02,0x00,0x00, -0xb2,0x00,0x00,0x00,0xf6,0x00,0x04,0x00,0x72,0x02,0x00,0x00, -0x73,0x02,0x00,0x00,0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, -0x76,0x02,0x00,0x00,0x71,0x02,0x00,0x00,0x72,0x02,0x00,0x00, -0xf8,0x00,0x02,0x00,0x71,0x02,0x00,0x00,0xf9,0x00,0x02,0x00, -0x78,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x78,0x02,0x00,0x00, -0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0xbe,0x02,0x00,0x00, -0x3e,0x00,0x00,0x00,0x71,0x02,0x00,0x00,0xb0,0x02,0x00,0x00, -0x7b,0x02,0x00,0x00,0xb0,0x00,0x05,0x00,0xb8,0x00,0x00,0x00, -0x7e,0x02,0x00,0x00,0xbe,0x02,0x00,0x00,0x62,0x00,0x00,0x00, -0xf6,0x00,0x04,0x00,0x7a,0x02,0x00,0x00,0x7b,0x02,0x00,0x00, -0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0x7e,0x02,0x00,0x00, -0x79,0x02,0x00,0x00,0x7a,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, -0x79,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x81,0x02,0x00,0x00,0x66,0x02,0x00,0x00,0xbe,0x02,0x00,0x00, -0xb0,0x00,0x05,0x00,0xb8,0x00,0x00,0x00,0x84,0x02,0x00,0x00, -0x81,0x02,0x00,0x00,0x36,0x00,0x00,0x00,0xf7,0x00,0x03,0x00, -0x86,0x02,0x00,0x00,0x00,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, -0x84,0x02,0x00,0x00,0x85,0x02,0x00,0x00,0x86,0x02,0x00,0x00, -0xf8,0x00,0x02,0x00,0x85,0x02,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x89,0x02,0x00,0x00,0x6e,0x02,0x00,0x00, -0xbc,0x02,0x00,0x00,0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00, -0x8a,0x02,0x00,0x00,0x12,0x00,0x00,0x00,0xc6,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x8b,0x02,0x00,0x00, -0x8a,0x02,0x00,0x00,0xb0,0x00,0x05,0x00,0xb8,0x00,0x00,0x00, -0x8c,0x02,0x00,0x00,0x89,0x02,0x00,0x00,0x8b,0x02,0x00,0x00, -0xf9,0x00,0x02,0x00,0x86,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, -0x86,0x02,0x00,0x00,0xf5,0x00,0x07,0x00,0xb8,0x00,0x00,0x00, -0x8d,0x02,0x00,0x00,0x84,0x02,0x00,0x00,0x79,0x02,0x00,0x00, -0x8c,0x02,0x00,0x00,0x85,0x02,0x00,0x00,0xf7,0x00,0x03,0x00, -0x8f,0x02,0x00,0x00,0x00,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, -0x8d,0x02,0x00,0x00,0x8e,0x02,0x00,0x00,0x8f,0x02,0x00,0x00, -0xf8,0x00,0x02,0x00,0x8e,0x02,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x97,0x02,0x00,0x00,0x6e,0x02,0x00,0x00, -0xbc,0x02,0x00,0x00,0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00, -0x99,0x02,0x00,0x00,0x12,0x00,0x00,0x00,0x98,0x02,0x00,0x00, -0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x9a,0x02,0x00,0x00, -0x99,0x02,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x9b,0x02,0x00,0x00,0x97,0x02,0x00,0x00,0x9a,0x02,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x9c,0x02,0x00,0x00, -0x4e,0x02,0x00,0x00,0x9b,0x02,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x9e,0x02,0x00,0x00,0x9c,0x02,0x00,0x00, -0x66,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xa0,0x02,0x00,0x00,0x9e,0x02,0x00,0x00,0xbe,0x02,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xa2,0x02,0x00,0x00, -0xb9,0x02,0x00,0x00,0xb2,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xa4,0x02,0x00,0x00,0xa2,0x02,0x00,0x00, -0xbc,0x02,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xa6,0x02,0x00,0x00,0xa4,0x02,0x00,0x00,0xa5,0x02,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xa8,0x02,0x00,0x00, -0xba,0x02,0x00,0x00,0x62,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xa9,0x02,0x00,0x00,0xa6,0x02,0x00,0x00, -0xa8,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xab,0x02,0x00,0x00,0xa9,0x02,0x00,0x00,0xbe,0x02,0x00,0x00, -0x41,0x00,0x05,0x00,0xc3,0x00,0x00,0x00,0xac,0x02,0x00,0x00, -0xc0,0x00,0x00,0x00,0xab,0x02,0x00,0x00,0x3d,0x00,0x04,0x00, -0xba,0x00,0x00,0x00,0xad,0x02,0x00,0x00,0xac,0x02,0x00,0x00, -0x41,0x00,0x06,0x00,0xfa,0x00,0x00,0x00,0xae,0x02,0x00,0x00, -0x93,0x02,0x00,0x00,0x34,0x00,0x00,0x00,0xa0,0x02,0x00,0x00, -0x3e,0x00,0x03,0x00,0xae,0x02,0x00,0x00,0xad,0x02,0x00,0x00, -0xf9,0x00,0x02,0x00,0x8f,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, -0x8f,0x02,0x00,0x00,0xf9,0x00,0x02,0x00,0x7b,0x02,0x00,0x00, -0xf8,0x00,0x02,0x00,0x7b,0x02,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xb0,0x02,0x00,0x00,0xbe,0x02,0x00,0x00, -0xc6,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0x78,0x02,0x00,0x00, -0xf8,0x00,0x02,0x00,0x7a,0x02,0x00,0x00,0xf9,0x00,0x02,0x00, -0x73,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x73,0x02,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xb2,0x02,0x00,0x00, -0xbc,0x02,0x00,0x00,0xc6,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, -0x70,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x72,0x02,0x00,0x00, -0xf9,0x00,0x02,0x00,0x5b,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, -0x5b,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xb4,0x02,0x00,0x00,0xba,0x02,0x00,0x00,0xc6,0x00,0x00,0x00, -0xf9,0x00,0x02,0x00,0x58,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, -0x5a,0x02,0x00,0x00,0xf9,0x00,0x02,0x00,0x53,0x02,0x00,0x00, -0xf8,0x00,0x02,0x00,0x53,0x02,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xb6,0x02,0x00,0x00,0xb9,0x02,0x00,0x00, -0xc6,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0x50,0x02,0x00,0x00, -0xf8,0x00,0x02,0x00,0x52,0x02,0x00,0x00,0xfd,0x00,0x01,0x00, -0x38,0x00,0x01,0x00, -}; -const uint64_t matmul_f32_aligned_l_fp32_len = 10348; - -unsigned char matmul_f32_aligned_m_data[] = { -0x03,0x02,0x23,0x07,0x00,0x05,0x01,0x00,0x0b,0x00,0x0d,0x00, -0x6a,0x03,0x00,0x00,0x00,0x00,0x00,0x00,0x11,0x00,0x02,0x00, -0x01,0x00,0x00,0x00,0x11,0x00,0x02,0x00,0x09,0x00,0x00,0x00, -0x0b,0x00,0x06,0x00,0x01,0x00,0x00,0x00,0x47,0x4c,0x53,0x4c, -0x2e,0x73,0x74,0x64,0x2e,0x34,0x35,0x30,0x00,0x00,0x00,0x00, -0x0e,0x00,0x03,0x00,0x00,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x0f,0x00,0x0f,0x00,0x05,0x00,0x00,0x00,0x04,0x00,0x00,0x00, -0x6d,0x61,0x69,0x6e,0x00,0x00,0x00,0x00,0x0b,0x00,0x00,0x00, -0x12,0x00,0x00,0x00,0x3d,0x00,0x00,0x00,0x4c,0x00,0x00,0x00, -0xeb,0x00,0x00,0x00,0xfa,0x00,0x00,0x00,0x88,0x01,0x00,0x00, -0x95,0x01,0x00,0x00,0xcb,0x02,0x00,0x00,0x14,0x03,0x00,0x00, -0x10,0x00,0x06,0x00,0x04,0x00,0x00,0x00,0x11,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x0b,0x00,0x00,0x00,0x0b,0x00,0x00,0x00, -0x1c,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0x10,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x08,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00, -0x03,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00,0x04,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x10,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0x10,0x00,0x00,0x00,0x05,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x14,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x18,0x00,0x00,0x00, -0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00,0x07,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x1c,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0x10,0x00,0x00,0x00,0x08,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x20,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00, -0x09,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x24,0x00,0x00,0x00, -0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00,0x0a,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x28,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0x10,0x00,0x00,0x00,0x0b,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x2c,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x30,0x00,0x00,0x00, -0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00,0x0d,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x34,0x00,0x00,0x00,0x47,0x00,0x03,0x00, -0x10,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x37,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x3d,0x00,0x00,0x00,0x0b,0x00,0x00,0x00, -0x1a,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x4c,0x00,0x00,0x00, -0x0b,0x00,0x00,0x00,0x1b,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x4f,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x09,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x53,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x04,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x60,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x62,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x07,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x6c,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x03,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x9d,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0xaf,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x05,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0xb2,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x08,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0xf7,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x20,0x00,0x00,0x00,0x48,0x00,0x04,0x00, -0xf8,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x05,0x00,0x00,0x00, -0x48,0x00,0x04,0x00,0xf8,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x18,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0xf8,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x48,0x00,0x05,0x00,0xf8,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x07,0x00,0x00,0x00,0x10,0x00,0x00,0x00,0x47,0x00,0x03,0x00, -0xf8,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0xfa,0x00,0x00,0x00,0x22,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0xfa,0x00,0x00,0x00,0x21,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x69,0x01,0x00,0x00, -0x01,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x6a,0x01,0x00,0x00,0x0b,0x00,0x00,0x00,0x19,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x92,0x01,0x00,0x00,0x06,0x00,0x00,0x00, -0x20,0x00,0x00,0x00,0x48,0x00,0x04,0x00,0x93,0x01,0x00,0x00, -0x00,0x00,0x00,0x00,0x05,0x00,0x00,0x00,0x48,0x00,0x04,0x00, -0x93,0x01,0x00,0x00,0x00,0x00,0x00,0x00,0x18,0x00,0x00,0x00, -0x48,0x00,0x05,0x00,0x93,0x01,0x00,0x00,0x00,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0x93,0x01,0x00,0x00,0x00,0x00,0x00,0x00,0x07,0x00,0x00,0x00, -0x10,0x00,0x00,0x00,0x47,0x00,0x03,0x00,0x93,0x01,0x00,0x00, -0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x95,0x01,0x00,0x00, -0x22,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x95,0x01,0x00,0x00,0x21,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0xcb,0x02,0x00,0x00,0x0b,0x00,0x00,0x00, -0x18,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x11,0x03,0x00,0x00, -0x06,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x48,0x00,0x04,0x00, -0x12,0x03,0x00,0x00,0x00,0x00,0x00,0x00,0x19,0x00,0x00,0x00, -0x48,0x00,0x05,0x00,0x12,0x03,0x00,0x00,0x00,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x03,0x00, -0x12,0x03,0x00,0x00,0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x14,0x03,0x00,0x00,0x22,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x14,0x03,0x00,0x00,0x21,0x00,0x00,0x00, -0x02,0x00,0x00,0x00,0x13,0x00,0x02,0x00,0x02,0x00,0x00,0x00, -0x21,0x00,0x03,0x00,0x03,0x00,0x00,0x00,0x02,0x00,0x00,0x00, -0x15,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x20,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x17,0x00,0x04,0x00,0x09,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x03,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0x0a,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x09,0x00,0x00,0x00, -0x3b,0x00,0x04,0x00,0x0a,0x00,0x00,0x00,0x0b,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0x0d,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x1e,0x00,0x10,0x00,0x10,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x11,0x00,0x00,0x00, -0x09,0x00,0x00,0x00,0x10,0x00,0x00,0x00,0x3b,0x00,0x04,0x00, -0x11,0x00,0x00,0x00,0x12,0x00,0x00,0x00,0x09,0x00,0x00,0x00, -0x15,0x00,0x04,0x00,0x13,0x00,0x00,0x00,0x20,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00, -0x14,0x00,0x00,0x00,0x08,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0x15,0x00,0x00,0x00,0x09,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00,0x21,0x00,0x00,0x00, -0x0a,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00, -0x27,0x00,0x00,0x00,0x09,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x13,0x00,0x00,0x00,0x2d,0x00,0x00,0x00,0x07,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00,0x34,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x32,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x37,0x00,0x00,0x00,0x40,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x3b,0x00,0x04,0x00,0x0a,0x00,0x00,0x00,0x3d,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x3e,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x3b,0x00,0x04,0x00, -0x0a,0x00,0x00,0x00,0x4c,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x32,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x4f,0x00,0x00,0x00, -0x20,0x00,0x00,0x00,0x32,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x53,0x00,0x00,0x00,0x20,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0x54,0x00,0x00,0x00,0x86,0x00,0x00,0x00, -0x37,0x00,0x00,0x00,0x53,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0x58,0x00,0x00,0x00,0x86,0x00,0x00,0x00, -0x37,0x00,0x00,0x00,0x53,0x00,0x00,0x00,0x32,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x60,0x00,0x00,0x00,0x02,0x00,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x61,0x00,0x00,0x00, -0x86,0x00,0x00,0x00,0x53,0x00,0x00,0x00,0x60,0x00,0x00,0x00, -0x32,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x62,0x00,0x00,0x00, -0x04,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, -0x63,0x00,0x00,0x00,0x86,0x00,0x00,0x00,0x61,0x00,0x00,0x00, -0x62,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, -0x67,0x00,0x00,0x00,0x86,0x00,0x00,0x00,0x61,0x00,0x00,0x00, -0x62,0x00,0x00,0x00,0x32,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x6c,0x00,0x00,0x00,0x10,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x6d,0x00,0x00,0x00,0x08,0x00,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x6e,0x00,0x00,0x00, -0x86,0x00,0x00,0x00,0x6c,0x00,0x00,0x00,0x6d,0x00,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x73,0x00,0x00,0x00, -0x86,0x00,0x00,0x00,0x6c,0x00,0x00,0x00,0x6d,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00,0x77,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00, -0x7c,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x13,0x00,0x00,0x00,0x87,0x00,0x00,0x00,0x0b,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00,0x8d,0x00,0x00,0x00, -0x03,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00, -0x98,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x32,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x9d,0x00,0x00,0x00,0x40,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00,0x9f,0x00,0x00,0x00, -0x04,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, -0xae,0x00,0x00,0x00,0x84,0x00,0x00,0x00,0x60,0x00,0x00,0x00, -0x62,0x00,0x00,0x00,0x32,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0xaf,0x00,0x00,0x00,0x20,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0xb0,0x00,0x00,0x00,0x84,0x00,0x00,0x00, -0x53,0x00,0x00,0x00,0xaf,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0xb1,0x00,0x00,0x00,0x84,0x00,0x00,0x00, -0x4f,0x00,0x00,0x00,0x62,0x00,0x00,0x00,0x32,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0xb2,0x00,0x00,0x00,0x02,0x00,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0xb3,0x00,0x00,0x00, -0x84,0x00,0x00,0x00,0xb1,0x00,0x00,0x00,0xb2,0x00,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0xb4,0x00,0x00,0x00, -0x84,0x00,0x00,0x00,0xb3,0x00,0x00,0x00,0x60,0x00,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0xb5,0x00,0x00,0x00, -0x86,0x00,0x00,0x00,0xb0,0x00,0x00,0x00,0xb4,0x00,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0xb6,0x00,0x00,0x00, -0x84,0x00,0x00,0x00,0xae,0x00,0x00,0x00,0xb5,0x00,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0xb7,0x00,0x00,0x00, -0x84,0x00,0x00,0x00,0xb6,0x00,0x00,0x00,0xb2,0x00,0x00,0x00, -0x14,0x00,0x02,0x00,0xb8,0x00,0x00,0x00,0x16,0x00,0x03,0x00, -0xba,0x00,0x00,0x00,0x20,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0xbb,0x00,0x00,0x00,0x84,0x00,0x00,0x00, -0x60,0x00,0x00,0x00,0x62,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0xbc,0x00,0x00,0x00,0x84,0x00,0x00,0x00, -0xbb,0x00,0x00,0x00,0xb5,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0xbd,0x00,0x00,0x00,0x84,0x00,0x00,0x00, -0xbc,0x00,0x00,0x00,0xb2,0x00,0x00,0x00,0x1c,0x00,0x04,0x00, -0xbe,0x00,0x00,0x00,0xba,0x00,0x00,0x00,0xbd,0x00,0x00,0x00, -0x20,0x00,0x04,0x00,0xbf,0x00,0x00,0x00,0x07,0x00,0x00,0x00, -0xbe,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0xba,0x00,0x00,0x00, -0xc2,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0xc3,0x00,0x00,0x00,0x07,0x00,0x00,0x00,0xba,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00,0xc6,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x16,0x00,0x03,0x00,0xe6,0x00,0x00,0x00, -0x10,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, -0xe7,0x00,0x00,0x00,0x80,0x00,0x00,0x00,0x6c,0x00,0x00,0x00, -0x39,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, -0xe8,0x00,0x00,0x00,0x84,0x00,0x00,0x00,0x37,0x00,0x00,0x00, -0xe7,0x00,0x00,0x00,0x1c,0x00,0x04,0x00,0xe9,0x00,0x00,0x00, -0xe6,0x00,0x00,0x00,0xe8,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0xea,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0xe9,0x00,0x00,0x00, -0x3b,0x00,0x04,0x00,0xea,0x00,0x00,0x00,0xeb,0x00,0x00,0x00, -0x04,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, -0xef,0x00,0x00,0x00,0x80,0x00,0x00,0x00,0x6c,0x00,0x00,0x00, -0x39,0x00,0x00,0x00,0x17,0x00,0x04,0x00,0xf5,0x00,0x00,0x00, -0xba,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x18,0x00,0x04,0x00, -0xf6,0x00,0x00,0x00,0xf5,0x00,0x00,0x00,0x02,0x00,0x00,0x00, -0x1d,0x00,0x03,0x00,0xf7,0x00,0x00,0x00,0xf6,0x00,0x00,0x00, -0x1e,0x00,0x03,0x00,0xf8,0x00,0x00,0x00,0xf7,0x00,0x00,0x00, -0x20,0x00,0x04,0x00,0xf9,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0xf8,0x00,0x00,0x00,0x3b,0x00,0x04,0x00,0xf9,0x00,0x00,0x00, -0xfa,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0xfc,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0xba,0x00,0x00,0x00, -0x20,0x00,0x04,0x00,0x00,0x01,0x00,0x00,0x04,0x00,0x00,0x00, -0xe6,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, -0x05,0x01,0x00,0x00,0x80,0x00,0x00,0x00,0x6c,0x00,0x00,0x00, -0x39,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, -0x13,0x01,0x00,0x00,0x80,0x00,0x00,0x00,0x6c,0x00,0x00,0x00, -0x39,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, -0x21,0x01,0x00,0x00,0x80,0x00,0x00,0x00,0x6c,0x00,0x00,0x00, -0x39,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x26,0x01,0x00,0x00,0x03,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0x30,0x01,0x00,0x00,0x80,0x00,0x00,0x00, -0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x35,0x01,0x00,0x00,0x04,0x00,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x3f,0x01,0x00,0x00, -0x80,0x00,0x00,0x00,0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x44,0x01,0x00,0x00, -0x05,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, -0x4e,0x01,0x00,0x00,0x80,0x00,0x00,0x00,0x6c,0x00,0x00,0x00, -0x39,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x53,0x01,0x00,0x00,0x06,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0x5d,0x01,0x00,0x00,0x80,0x00,0x00,0x00, -0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x62,0x01,0x00,0x00,0x07,0x00,0x00,0x00, -0x32,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x69,0x01,0x00,0x00, -0x01,0x00,0x00,0x00,0x33,0x00,0x06,0x00,0x09,0x00,0x00,0x00, -0x6a,0x01,0x00,0x00,0x69,0x01,0x00,0x00,0x39,0x00,0x00,0x00, -0x39,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, -0x6b,0x01,0x00,0x00,0x51,0x00,0x00,0x00,0x6a,0x01,0x00,0x00, -0x00,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, -0x6c,0x01,0x00,0x00,0x84,0x00,0x00,0x00,0x6b,0x01,0x00,0x00, -0x6d,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, -0x6d,0x01,0x00,0x00,0x86,0x00,0x00,0x00,0x6c,0x01,0x00,0x00, -0x6c,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, -0x84,0x01,0x00,0x00,0x80,0x00,0x00,0x00,0x6c,0x00,0x00,0x00, -0x39,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, -0x85,0x01,0x00,0x00,0x84,0x00,0x00,0x00,0x9d,0x00,0x00,0x00, -0x84,0x01,0x00,0x00,0x1c,0x00,0x04,0x00,0x86,0x01,0x00,0x00, -0xe6,0x00,0x00,0x00,0x85,0x01,0x00,0x00,0x20,0x00,0x04,0x00, -0x87,0x01,0x00,0x00,0x04,0x00,0x00,0x00,0x86,0x01,0x00,0x00, -0x3b,0x00,0x04,0x00,0x87,0x01,0x00,0x00,0x88,0x01,0x00,0x00, -0x04,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, -0x8c,0x01,0x00,0x00,0x80,0x00,0x00,0x00,0x6c,0x00,0x00,0x00, -0x39,0x00,0x00,0x00,0x1d,0x00,0x03,0x00,0x92,0x01,0x00,0x00, -0xf6,0x00,0x00,0x00,0x1e,0x00,0x03,0x00,0x93,0x01,0x00,0x00, -0x92,0x01,0x00,0x00,0x20,0x00,0x04,0x00,0x94,0x01,0x00,0x00, -0x0c,0x00,0x00,0x00,0x93,0x01,0x00,0x00,0x3b,0x00,0x04,0x00, -0x94,0x01,0x00,0x00,0x95,0x01,0x00,0x00,0x0c,0x00,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x9e,0x01,0x00,0x00, -0x80,0x00,0x00,0x00,0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0xac,0x01,0x00,0x00, -0x80,0x00,0x00,0x00,0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0xba,0x01,0x00,0x00, -0x80,0x00,0x00,0x00,0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0xc8,0x01,0x00,0x00, -0x80,0x00,0x00,0x00,0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0xd6,0x01,0x00,0x00, -0x80,0x00,0x00,0x00,0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0xe4,0x01,0x00,0x00, -0x80,0x00,0x00,0x00,0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0xf2,0x01,0x00,0x00, -0x80,0x00,0x00,0x00,0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0xff,0x01,0x00,0x00, -0x08,0x01,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, -0x00,0x02,0x00,0x00,0x86,0x00,0x00,0x00,0x6c,0x00,0x00,0x00, -0x6d,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, -0x03,0x02,0x00,0x00,0x86,0x00,0x00,0x00,0x6c,0x00,0x00,0x00, -0x6d,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, -0x1e,0x02,0x00,0x00,0x84,0x00,0x00,0x00,0x60,0x00,0x00,0x00, -0x62,0x00,0x00,0x00,0x1c,0x00,0x04,0x00,0x1f,0x02,0x00,0x00, -0xe6,0x00,0x00,0x00,0x1e,0x02,0x00,0x00,0x20,0x00,0x04,0x00, -0x20,0x02,0x00,0x00,0x07,0x00,0x00,0x00,0x1f,0x02,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x30,0x02,0x00,0x00, -0x80,0x00,0x00,0x00,0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00, -0x20,0x00,0x04,0x00,0x36,0x02,0x00,0x00,0x07,0x00,0x00,0x00, -0xe6,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, -0x4c,0x02,0x00,0x00,0x84,0x00,0x00,0x00,0xb5,0x00,0x00,0x00, -0xb2,0x00,0x00,0x00,0x1c,0x00,0x04,0x00,0x4d,0x02,0x00,0x00, -0xe6,0x00,0x00,0x00,0x4c,0x02,0x00,0x00,0x20,0x00,0x04,0x00, -0x4e,0x02,0x00,0x00,0x07,0x00,0x00,0x00,0x4d,0x02,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x57,0x02,0x00,0x00, -0x86,0x00,0x00,0x00,0xaf,0x00,0x00,0x00,0xb5,0x00,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x5f,0x02,0x00,0x00, -0x80,0x00,0x00,0x00,0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x8e,0x02,0x00,0x00, -0x84,0x00,0x00,0x00,0x60,0x00,0x00,0x00,0x62,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00,0xc3,0x02,0x00,0x00, -0x0d,0x00,0x00,0x00,0x3b,0x00,0x04,0x00,0x0a,0x00,0x00,0x00, -0xcb,0x02,0x00,0x00,0x01,0x00,0x00,0x00,0x1d,0x00,0x03,0x00, -0x11,0x03,0x00,0x00,0xba,0x00,0x00,0x00,0x1e,0x00,0x03,0x00, -0x12,0x03,0x00,0x00,0x11,0x03,0x00,0x00,0x20,0x00,0x04,0x00, -0x13,0x03,0x00,0x00,0x0c,0x00,0x00,0x00,0x12,0x03,0x00,0x00, -0x3b,0x00,0x04,0x00,0x13,0x03,0x00,0x00,0x14,0x03,0x00,0x00, -0x0c,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00, -0x19,0x03,0x00,0x00,0x05,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0x26,0x03,0x00,0x00,0x84,0x00,0x00,0x00, -0x60,0x00,0x00,0x00,0x62,0x00,0x00,0x00,0x36,0x00,0x05,0x00, -0x02,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x03,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0x05,0x00,0x00,0x00, -0x3b,0x00,0x04,0x00,0xbf,0x00,0x00,0x00,0xc0,0x00,0x00,0x00, -0x07,0x00,0x00,0x00,0x3b,0x00,0x04,0x00,0x20,0x02,0x00,0x00, -0x21,0x02,0x00,0x00,0x07,0x00,0x00,0x00,0x3b,0x00,0x04,0x00, -0x4e,0x02,0x00,0x00,0x4f,0x02,0x00,0x00,0x07,0x00,0x00,0x00, -0x41,0x00,0x05,0x00,0x0d,0x00,0x00,0x00,0x0e,0x00,0x00,0x00, -0x0b,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x0f,0x00,0x00,0x00,0x0e,0x00,0x00,0x00, -0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00,0x16,0x00,0x00,0x00, -0x12,0x00,0x00,0x00,0x14,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x17,0x00,0x00,0x00,0x16,0x00,0x00,0x00, -0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x18,0x00,0x00,0x00, -0x0f,0x00,0x00,0x00,0x17,0x00,0x00,0x00,0x89,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x1e,0x00,0x00,0x00,0x0f,0x00,0x00,0x00, -0x17,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00, -0x22,0x00,0x00,0x00,0x12,0x00,0x00,0x00,0x21,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x22,0x00,0x00,0x00,0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x24,0x00,0x00,0x00,0x18,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00,0x28,0x00,0x00,0x00, -0x12,0x00,0x00,0x00,0x27,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x29,0x00,0x00,0x00,0x28,0x00,0x00,0x00, -0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x2a,0x00,0x00,0x00, -0x1e,0x00,0x00,0x00,0x29,0x00,0x00,0x00,0x41,0x00,0x05,0x00, -0x15,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0x12,0x00,0x00,0x00, -0x2d,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x2f,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x30,0x00,0x00,0x00,0x24,0x00,0x00,0x00, -0x2f,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x32,0x00,0x00,0x00,0x30,0x00,0x00,0x00,0x2a,0x00,0x00,0x00, -0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00,0x35,0x00,0x00,0x00, -0x12,0x00,0x00,0x00,0x34,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x36,0x00,0x00,0x00,0x35,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x38,0x00,0x00,0x00, -0x36,0x00,0x00,0x00,0x37,0x00,0x00,0x00,0x82,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x3a,0x00,0x00,0x00,0x38,0x00,0x00,0x00, -0x39,0x00,0x00,0x00,0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x3b,0x00,0x00,0x00,0x3a,0x00,0x00,0x00,0x37,0x00,0x00,0x00, -0x41,0x00,0x05,0x00,0x0d,0x00,0x00,0x00,0x3f,0x00,0x00,0x00, -0x3d,0x00,0x00,0x00,0x3e,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x40,0x00,0x00,0x00,0x3f,0x00,0x00,0x00, -0x89,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x42,0x00,0x00,0x00, -0x40,0x00,0x00,0x00,0x3b,0x00,0x00,0x00,0x86,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x47,0x00,0x00,0x00,0x40,0x00,0x00,0x00, -0x3b,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x0d,0x00,0x00,0x00, -0x49,0x00,0x00,0x00,0x3d,0x00,0x00,0x00,0x39,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x4a,0x00,0x00,0x00, -0x49,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x0d,0x00,0x00,0x00, -0x4d,0x00,0x00,0x00,0x4c,0x00,0x00,0x00,0x3e,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x4e,0x00,0x00,0x00, -0x4d,0x00,0x00,0x00,0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x50,0x00,0x00,0x00,0x4e,0x00,0x00,0x00,0x4f,0x00,0x00,0x00, -0x89,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x55,0x00,0x00,0x00, -0x50,0x00,0x00,0x00,0x54,0x00,0x00,0x00,0x86,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x59,0x00,0x00,0x00,0x50,0x00,0x00,0x00, -0x58,0x00,0x00,0x00,0x89,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x5d,0x00,0x00,0x00,0x4e,0x00,0x00,0x00,0x4f,0x00,0x00,0x00, -0x89,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x64,0x00,0x00,0x00, -0x5d,0x00,0x00,0x00,0x63,0x00,0x00,0x00,0x86,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x68,0x00,0x00,0x00,0x5d,0x00,0x00,0x00, -0x67,0x00,0x00,0x00,0x89,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x6f,0x00,0x00,0x00,0x4e,0x00,0x00,0x00,0x6e,0x00,0x00,0x00, -0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x74,0x00,0x00,0x00, -0x4e,0x00,0x00,0x00,0x73,0x00,0x00,0x00,0x41,0x00,0x05,0x00, -0x15,0x00,0x00,0x00,0x78,0x00,0x00,0x00,0x12,0x00,0x00,0x00, -0x77,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x79,0x00,0x00,0x00,0x78,0x00,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x7a,0x00,0x00,0x00,0x47,0x00,0x00,0x00, -0x79,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00, -0x7d,0x00,0x00,0x00,0x12,0x00,0x00,0x00,0x7c,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x7e,0x00,0x00,0x00, -0x7d,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x80,0x00,0x00,0x00,0x47,0x00,0x00,0x00,0x39,0x00,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x83,0x00,0x00,0x00, -0x80,0x00,0x00,0x00,0x79,0x00,0x00,0x00,0x0c,0x00,0x07,0x00, -0x06,0x00,0x00,0x00,0x84,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x26,0x00,0x00,0x00,0x7e,0x00,0x00,0x00,0x83,0x00,0x00,0x00, -0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00,0x88,0x00,0x00,0x00, -0x12,0x00,0x00,0x00,0x87,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x89,0x00,0x00,0x00,0x88,0x00,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x8a,0x00,0x00,0x00, -0x32,0x00,0x00,0x00,0x89,0x00,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x8c,0x00,0x00,0x00,0x42,0x00,0x00,0x00, -0x37,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00, -0x8e,0x00,0x00,0x00,0x12,0x00,0x00,0x00,0x8d,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x8f,0x00,0x00,0x00, -0x8e,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x90,0x00,0x00,0x00,0x8c,0x00,0x00,0x00,0x8f,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x91,0x00,0x00,0x00, -0x8a,0x00,0x00,0x00,0x90,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x93,0x00,0x00,0x00,0x91,0x00,0x00,0x00, -0x7a,0x00,0x00,0x00,0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x94,0x00,0x00,0x00,0x93,0x00,0x00,0x00,0x6d,0x00,0x00,0x00, -0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00,0x99,0x00,0x00,0x00, -0x12,0x00,0x00,0x00,0x98,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x9a,0x00,0x00,0x00,0x99,0x00,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x9b,0x00,0x00,0x00, -0x0f,0x00,0x00,0x00,0x9a,0x00,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x9e,0x00,0x00,0x00,0x4a,0x00,0x00,0x00, -0x9d,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00, -0xa0,0x00,0x00,0x00,0x12,0x00,0x00,0x00,0x9f,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0xa1,0x00,0x00,0x00, -0xa0,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xa2,0x00,0x00,0x00,0x9e,0x00,0x00,0x00,0xa1,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xa3,0x00,0x00,0x00, -0x9b,0x00,0x00,0x00,0xa2,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xa5,0x00,0x00,0x00,0xa3,0x00,0x00,0x00, -0x7a,0x00,0x00,0x00,0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xa6,0x00,0x00,0x00,0xa5,0x00,0x00,0x00,0x6d,0x00,0x00,0x00, -0xf9,0x00,0x02,0x00,0xa8,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, -0xa8,0x00,0x00,0x00,0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00, -0x38,0x03,0x00,0x00,0x3e,0x00,0x00,0x00,0x05,0x00,0x00,0x00, -0xc7,0x00,0x00,0x00,0xa9,0x00,0x00,0x00,0xb0,0x00,0x05,0x00, -0xb8,0x00,0x00,0x00,0xb9,0x00,0x00,0x00,0x38,0x03,0x00,0x00, -0xb7,0x00,0x00,0x00,0xf6,0x00,0x04,0x00,0xaa,0x00,0x00,0x00, -0xa9,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, -0xb9,0x00,0x00,0x00,0xa9,0x00,0x00,0x00,0xaa,0x00,0x00,0x00, -0xf8,0x00,0x02,0x00,0xa9,0x00,0x00,0x00,0x41,0x00,0x05,0x00, -0xc3,0x00,0x00,0x00,0xc4,0x00,0x00,0x00,0xc0,0x00,0x00,0x00, -0x38,0x03,0x00,0x00,0x3e,0x00,0x03,0x00,0xc4,0x00,0x00,0x00, -0xc2,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xc7,0x00,0x00,0x00,0x38,0x03,0x00,0x00,0xc6,0x00,0x00,0x00, -0xf9,0x00,0x02,0x00,0xa8,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, -0xaa,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0xca,0x00,0x00,0x00, -0xf8,0x00,0x02,0x00,0xca,0x00,0x00,0x00,0xf5,0x00,0x07,0x00, -0x06,0x00,0x00,0x00,0x51,0x03,0x00,0x00,0xa6,0x00,0x00,0x00, -0xaa,0x00,0x00,0x00,0x05,0x02,0x00,0x00,0xcd,0x00,0x00,0x00, -0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0x4d,0x03,0x00,0x00, -0x94,0x00,0x00,0x00,0xaa,0x00,0x00,0x00,0x02,0x02,0x00,0x00, -0xcd,0x00,0x00,0x00,0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00, -0x39,0x03,0x00,0x00,0x7a,0x00,0x00,0x00,0xaa,0x00,0x00,0x00, -0xb3,0x02,0x00,0x00,0xcd,0x00,0x00,0x00,0xb0,0x00,0x05,0x00, -0xb8,0x00,0x00,0x00,0xd1,0x00,0x00,0x00,0x39,0x03,0x00,0x00, -0x84,0x00,0x00,0x00,0xf6,0x00,0x04,0x00,0xcc,0x00,0x00,0x00, -0xcd,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, -0xd1,0x00,0x00,0x00,0xcb,0x00,0x00,0x00,0xcc,0x00,0x00,0x00, -0xf8,0x00,0x02,0x00,0xcb,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, -0xd3,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0xd3,0x00,0x00,0x00, -0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0x49,0x03,0x00,0x00, -0x3e,0x00,0x00,0x00,0xcb,0x00,0x00,0x00,0x6f,0x01,0x00,0x00, -0xd4,0x00,0x00,0x00,0xb0,0x00,0x05,0x00,0xb8,0x00,0x00,0x00, -0xd9,0x00,0x00,0x00,0x49,0x03,0x00,0x00,0x37,0x00,0x00,0x00, -0xf6,0x00,0x04,0x00,0xd5,0x00,0x00,0x00,0xd4,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0xd9,0x00,0x00,0x00, -0xd4,0x00,0x00,0x00,0xd5,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, -0xd4,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xde,0x00,0x00,0x00,0x74,0x00,0x00,0x00,0x49,0x03,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xe1,0x00,0x00,0x00, -0xde,0x00,0x00,0x00,0x8f,0x00,0x00,0x00,0x86,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xe2,0x00,0x00,0x00,0xe1,0x00,0x00,0x00, -0x6d,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xe3,0x00,0x00,0x00,0x4d,0x03,0x00,0x00,0xe2,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xe5,0x00,0x00,0x00, -0xe3,0x00,0x00,0x00,0x6f,0x00,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xf0,0x00,0x00,0x00,0xde,0x00,0x00,0x00, -0xef,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xf2,0x00,0x00,0x00,0x6f,0x00,0x00,0x00,0x6d,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xf3,0x00,0x00,0x00, -0xf0,0x00,0x00,0x00,0xf2,0x00,0x00,0x00,0x41,0x00,0x08,0x00, -0xfc,0x00,0x00,0x00,0xfd,0x00,0x00,0x00,0xfa,0x00,0x00,0x00, -0x34,0x00,0x00,0x00,0xe5,0x00,0x00,0x00,0x34,0x00,0x00,0x00, -0x3e,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0xba,0x00,0x00,0x00, -0xfe,0x00,0x00,0x00,0xfd,0x00,0x00,0x00,0x73,0x00,0x04,0x00, -0xe6,0x00,0x00,0x00,0xff,0x00,0x00,0x00,0xfe,0x00,0x00,0x00, -0x41,0x00,0x05,0x00,0x00,0x01,0x00,0x00,0x01,0x01,0x00,0x00, -0xeb,0x00,0x00,0x00,0xf3,0x00,0x00,0x00,0x3e,0x00,0x03,0x00, -0x01,0x01,0x00,0x00,0xff,0x00,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x06,0x01,0x00,0x00,0xde,0x00,0x00,0x00, -0x05,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x09,0x01,0x00,0x00,0x06,0x01,0x00,0x00,0xf2,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x0a,0x01,0x00,0x00, -0x09,0x01,0x00,0x00,0x39,0x00,0x00,0x00,0x41,0x00,0x08,0x00, -0xfc,0x00,0x00,0x00,0x0c,0x01,0x00,0x00,0xfa,0x00,0x00,0x00, -0x34,0x00,0x00,0x00,0xe5,0x00,0x00,0x00,0x34,0x00,0x00,0x00, -0x39,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0xba,0x00,0x00,0x00, -0x0d,0x01,0x00,0x00,0x0c,0x01,0x00,0x00,0x73,0x00,0x04,0x00, -0xe6,0x00,0x00,0x00,0x0e,0x01,0x00,0x00,0x0d,0x01,0x00,0x00, -0x41,0x00,0x05,0x00,0x00,0x01,0x00,0x00,0x0f,0x01,0x00,0x00, -0xeb,0x00,0x00,0x00,0x0a,0x01,0x00,0x00,0x3e,0x00,0x03,0x00, -0x0f,0x01,0x00,0x00,0x0e,0x01,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x14,0x01,0x00,0x00,0xde,0x00,0x00,0x00, -0x13,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x17,0x01,0x00,0x00,0x14,0x01,0x00,0x00,0xf2,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x18,0x01,0x00,0x00, -0x17,0x01,0x00,0x00,0x0c,0x00,0x00,0x00,0x41,0x00,0x08,0x00, -0xfc,0x00,0x00,0x00,0x1a,0x01,0x00,0x00,0xfa,0x00,0x00,0x00, -0x34,0x00,0x00,0x00,0xe5,0x00,0x00,0x00,0x34,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0xba,0x00,0x00,0x00, -0x1b,0x01,0x00,0x00,0x1a,0x01,0x00,0x00,0x73,0x00,0x04,0x00, -0xe6,0x00,0x00,0x00,0x1c,0x01,0x00,0x00,0x1b,0x01,0x00,0x00, -0x41,0x00,0x05,0x00,0x00,0x01,0x00,0x00,0x1d,0x01,0x00,0x00, -0xeb,0x00,0x00,0x00,0x18,0x01,0x00,0x00,0x3e,0x00,0x03,0x00, -0x1d,0x01,0x00,0x00,0x1c,0x01,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x22,0x01,0x00,0x00,0xde,0x00,0x00,0x00, -0x21,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x25,0x01,0x00,0x00,0x22,0x01,0x00,0x00,0xf2,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x27,0x01,0x00,0x00, -0x25,0x01,0x00,0x00,0x26,0x01,0x00,0x00,0x41,0x00,0x08,0x00, -0xfc,0x00,0x00,0x00,0x29,0x01,0x00,0x00,0xfa,0x00,0x00,0x00, -0x34,0x00,0x00,0x00,0xe5,0x00,0x00,0x00,0x34,0x00,0x00,0x00, -0x26,0x01,0x00,0x00,0x3d,0x00,0x04,0x00,0xba,0x00,0x00,0x00, -0x2a,0x01,0x00,0x00,0x29,0x01,0x00,0x00,0x73,0x00,0x04,0x00, -0xe6,0x00,0x00,0x00,0x2b,0x01,0x00,0x00,0x2a,0x01,0x00,0x00, -0x41,0x00,0x05,0x00,0x00,0x01,0x00,0x00,0x2c,0x01,0x00,0x00, -0xeb,0x00,0x00,0x00,0x27,0x01,0x00,0x00,0x3e,0x00,0x03,0x00, -0x2c,0x01,0x00,0x00,0x2b,0x01,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x31,0x01,0x00,0x00,0xde,0x00,0x00,0x00, -0x30,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x34,0x01,0x00,0x00,0x31,0x01,0x00,0x00,0xf2,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x36,0x01,0x00,0x00, -0x34,0x01,0x00,0x00,0x35,0x01,0x00,0x00,0x41,0x00,0x08,0x00, -0xfc,0x00,0x00,0x00,0x38,0x01,0x00,0x00,0xfa,0x00,0x00,0x00, -0x34,0x00,0x00,0x00,0xe5,0x00,0x00,0x00,0xc6,0x00,0x00,0x00, -0x3e,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0xba,0x00,0x00,0x00, -0x39,0x01,0x00,0x00,0x38,0x01,0x00,0x00,0x73,0x00,0x04,0x00, -0xe6,0x00,0x00,0x00,0x3a,0x01,0x00,0x00,0x39,0x01,0x00,0x00, -0x41,0x00,0x05,0x00,0x00,0x01,0x00,0x00,0x3b,0x01,0x00,0x00, -0xeb,0x00,0x00,0x00,0x36,0x01,0x00,0x00,0x3e,0x00,0x03,0x00, -0x3b,0x01,0x00,0x00,0x3a,0x01,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x40,0x01,0x00,0x00,0xde,0x00,0x00,0x00, -0x3f,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x43,0x01,0x00,0x00,0x40,0x01,0x00,0x00,0xf2,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x45,0x01,0x00,0x00, -0x43,0x01,0x00,0x00,0x44,0x01,0x00,0x00,0x41,0x00,0x08,0x00, -0xfc,0x00,0x00,0x00,0x47,0x01,0x00,0x00,0xfa,0x00,0x00,0x00, -0x34,0x00,0x00,0x00,0xe5,0x00,0x00,0x00,0xc6,0x00,0x00,0x00, -0x39,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0xba,0x00,0x00,0x00, -0x48,0x01,0x00,0x00,0x47,0x01,0x00,0x00,0x73,0x00,0x04,0x00, -0xe6,0x00,0x00,0x00,0x49,0x01,0x00,0x00,0x48,0x01,0x00,0x00, -0x41,0x00,0x05,0x00,0x00,0x01,0x00,0x00,0x4a,0x01,0x00,0x00, -0xeb,0x00,0x00,0x00,0x45,0x01,0x00,0x00,0x3e,0x00,0x03,0x00, -0x4a,0x01,0x00,0x00,0x49,0x01,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x4f,0x01,0x00,0x00,0xde,0x00,0x00,0x00, -0x4e,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x52,0x01,0x00,0x00,0x4f,0x01,0x00,0x00,0xf2,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x54,0x01,0x00,0x00, -0x52,0x01,0x00,0x00,0x53,0x01,0x00,0x00,0x41,0x00,0x08,0x00, -0xfc,0x00,0x00,0x00,0x56,0x01,0x00,0x00,0xfa,0x00,0x00,0x00, -0x34,0x00,0x00,0x00,0xe5,0x00,0x00,0x00,0xc6,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0xba,0x00,0x00,0x00, -0x57,0x01,0x00,0x00,0x56,0x01,0x00,0x00,0x73,0x00,0x04,0x00, -0xe6,0x00,0x00,0x00,0x58,0x01,0x00,0x00,0x57,0x01,0x00,0x00, -0x41,0x00,0x05,0x00,0x00,0x01,0x00,0x00,0x59,0x01,0x00,0x00, -0xeb,0x00,0x00,0x00,0x54,0x01,0x00,0x00,0x3e,0x00,0x03,0x00, -0x59,0x01,0x00,0x00,0x58,0x01,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x5e,0x01,0x00,0x00,0xde,0x00,0x00,0x00, -0x5d,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x61,0x01,0x00,0x00,0x5e,0x01,0x00,0x00,0xf2,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x63,0x01,0x00,0x00, -0x61,0x01,0x00,0x00,0x62,0x01,0x00,0x00,0x41,0x00,0x08,0x00, -0xfc,0x00,0x00,0x00,0x65,0x01,0x00,0x00,0xfa,0x00,0x00,0x00, -0x34,0x00,0x00,0x00,0xe5,0x00,0x00,0x00,0xc6,0x00,0x00,0x00, -0x26,0x01,0x00,0x00,0x3d,0x00,0x04,0x00,0xba,0x00,0x00,0x00, -0x66,0x01,0x00,0x00,0x65,0x01,0x00,0x00,0x73,0x00,0x04,0x00, -0xe6,0x00,0x00,0x00,0x67,0x01,0x00,0x00,0x66,0x01,0x00,0x00, -0x41,0x00,0x05,0x00,0x00,0x01,0x00,0x00,0x68,0x01,0x00,0x00, -0xeb,0x00,0x00,0x00,0x63,0x01,0x00,0x00,0x3e,0x00,0x03,0x00, -0x68,0x01,0x00,0x00,0x67,0x01,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x6f,0x01,0x00,0x00,0x49,0x03,0x00,0x00, -0x6d,0x01,0x00,0x00,0xf9,0x00,0x02,0x00,0xd3,0x00,0x00,0x00, -0xf8,0x00,0x02,0x00,0xd5,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, -0x71,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0x71,0x01,0x00,0x00, -0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0x4a,0x03,0x00,0x00, -0x3e,0x00,0x00,0x00,0xd5,0x00,0x00,0x00,0xfe,0x01,0x00,0x00, -0x72,0x01,0x00,0x00,0xb0,0x00,0x05,0x00,0xb8,0x00,0x00,0x00, -0x77,0x01,0x00,0x00,0x4a,0x03,0x00,0x00,0x9d,0x00,0x00,0x00, -0xf6,0x00,0x04,0x00,0x73,0x01,0x00,0x00,0x72,0x01,0x00,0x00, -0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0x77,0x01,0x00,0x00, -0x72,0x01,0x00,0x00,0x73,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, -0x72,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x7c,0x01,0x00,0x00,0x74,0x00,0x00,0x00,0x4a,0x03,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x7f,0x01,0x00,0x00, -0x7c,0x01,0x00,0x00,0xa1,0x00,0x00,0x00,0x86,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x80,0x01,0x00,0x00,0x7f,0x01,0x00,0x00, -0x6d,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x81,0x01,0x00,0x00,0x51,0x03,0x00,0x00,0x80,0x01,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x83,0x01,0x00,0x00, -0x81,0x01,0x00,0x00,0x6f,0x00,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x8d,0x01,0x00,0x00,0x7c,0x01,0x00,0x00, -0x8c,0x01,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x8f,0x01,0x00,0x00,0x6f,0x00,0x00,0x00,0x6d,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x90,0x01,0x00,0x00, -0x8d,0x01,0x00,0x00,0x8f,0x01,0x00,0x00,0x41,0x00,0x08,0x00, -0xfc,0x00,0x00,0x00,0x97,0x01,0x00,0x00,0x95,0x01,0x00,0x00, -0x34,0x00,0x00,0x00,0x83,0x01,0x00,0x00,0x34,0x00,0x00,0x00, -0x3e,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0xba,0x00,0x00,0x00, -0x98,0x01,0x00,0x00,0x97,0x01,0x00,0x00,0x73,0x00,0x04,0x00, -0xe6,0x00,0x00,0x00,0x99,0x01,0x00,0x00,0x98,0x01,0x00,0x00, -0x41,0x00,0x05,0x00,0x00,0x01,0x00,0x00,0x9a,0x01,0x00,0x00, -0x88,0x01,0x00,0x00,0x90,0x01,0x00,0x00,0x3e,0x00,0x03,0x00, -0x9a,0x01,0x00,0x00,0x99,0x01,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x9f,0x01,0x00,0x00,0x7c,0x01,0x00,0x00, -0x9e,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xa2,0x01,0x00,0x00,0x9f,0x01,0x00,0x00,0x8f,0x01,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xa3,0x01,0x00,0x00, -0xa2,0x01,0x00,0x00,0x39,0x00,0x00,0x00,0x41,0x00,0x08,0x00, -0xfc,0x00,0x00,0x00,0xa5,0x01,0x00,0x00,0x95,0x01,0x00,0x00, -0x34,0x00,0x00,0x00,0x83,0x01,0x00,0x00,0x34,0x00,0x00,0x00, -0x39,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0xba,0x00,0x00,0x00, -0xa6,0x01,0x00,0x00,0xa5,0x01,0x00,0x00,0x73,0x00,0x04,0x00, -0xe6,0x00,0x00,0x00,0xa7,0x01,0x00,0x00,0xa6,0x01,0x00,0x00, -0x41,0x00,0x05,0x00,0x00,0x01,0x00,0x00,0xa8,0x01,0x00,0x00, -0x88,0x01,0x00,0x00,0xa3,0x01,0x00,0x00,0x3e,0x00,0x03,0x00, -0xa8,0x01,0x00,0x00,0xa7,0x01,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xad,0x01,0x00,0x00,0x7c,0x01,0x00,0x00, -0xac,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xb0,0x01,0x00,0x00,0xad,0x01,0x00,0x00,0x8f,0x01,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xb1,0x01,0x00,0x00, -0xb0,0x01,0x00,0x00,0x0c,0x00,0x00,0x00,0x41,0x00,0x08,0x00, -0xfc,0x00,0x00,0x00,0xb3,0x01,0x00,0x00,0x95,0x01,0x00,0x00, -0x34,0x00,0x00,0x00,0x83,0x01,0x00,0x00,0x34,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0xba,0x00,0x00,0x00, -0xb4,0x01,0x00,0x00,0xb3,0x01,0x00,0x00,0x73,0x00,0x04,0x00, -0xe6,0x00,0x00,0x00,0xb5,0x01,0x00,0x00,0xb4,0x01,0x00,0x00, -0x41,0x00,0x05,0x00,0x00,0x01,0x00,0x00,0xb6,0x01,0x00,0x00, -0x88,0x01,0x00,0x00,0xb1,0x01,0x00,0x00,0x3e,0x00,0x03,0x00, -0xb6,0x01,0x00,0x00,0xb5,0x01,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xbb,0x01,0x00,0x00,0x7c,0x01,0x00,0x00, -0xba,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xbe,0x01,0x00,0x00,0xbb,0x01,0x00,0x00,0x8f,0x01,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xbf,0x01,0x00,0x00, -0xbe,0x01,0x00,0x00,0x26,0x01,0x00,0x00,0x41,0x00,0x08,0x00, -0xfc,0x00,0x00,0x00,0xc1,0x01,0x00,0x00,0x95,0x01,0x00,0x00, -0x34,0x00,0x00,0x00,0x83,0x01,0x00,0x00,0x34,0x00,0x00,0x00, -0x26,0x01,0x00,0x00,0x3d,0x00,0x04,0x00,0xba,0x00,0x00,0x00, -0xc2,0x01,0x00,0x00,0xc1,0x01,0x00,0x00,0x73,0x00,0x04,0x00, -0xe6,0x00,0x00,0x00,0xc3,0x01,0x00,0x00,0xc2,0x01,0x00,0x00, -0x41,0x00,0x05,0x00,0x00,0x01,0x00,0x00,0xc4,0x01,0x00,0x00, -0x88,0x01,0x00,0x00,0xbf,0x01,0x00,0x00,0x3e,0x00,0x03,0x00, -0xc4,0x01,0x00,0x00,0xc3,0x01,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xc9,0x01,0x00,0x00,0x7c,0x01,0x00,0x00, -0xc8,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xcc,0x01,0x00,0x00,0xc9,0x01,0x00,0x00,0x8f,0x01,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xcd,0x01,0x00,0x00, -0xcc,0x01,0x00,0x00,0x35,0x01,0x00,0x00,0x41,0x00,0x08,0x00, -0xfc,0x00,0x00,0x00,0xcf,0x01,0x00,0x00,0x95,0x01,0x00,0x00, -0x34,0x00,0x00,0x00,0x83,0x01,0x00,0x00,0xc6,0x00,0x00,0x00, -0x3e,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0xba,0x00,0x00,0x00, -0xd0,0x01,0x00,0x00,0xcf,0x01,0x00,0x00,0x73,0x00,0x04,0x00, -0xe6,0x00,0x00,0x00,0xd1,0x01,0x00,0x00,0xd0,0x01,0x00,0x00, -0x41,0x00,0x05,0x00,0x00,0x01,0x00,0x00,0xd2,0x01,0x00,0x00, -0x88,0x01,0x00,0x00,0xcd,0x01,0x00,0x00,0x3e,0x00,0x03,0x00, -0xd2,0x01,0x00,0x00,0xd1,0x01,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xd7,0x01,0x00,0x00,0x7c,0x01,0x00,0x00, -0xd6,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xda,0x01,0x00,0x00,0xd7,0x01,0x00,0x00,0x8f,0x01,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xdb,0x01,0x00,0x00, -0xda,0x01,0x00,0x00,0x44,0x01,0x00,0x00,0x41,0x00,0x08,0x00, -0xfc,0x00,0x00,0x00,0xdd,0x01,0x00,0x00,0x95,0x01,0x00,0x00, -0x34,0x00,0x00,0x00,0x83,0x01,0x00,0x00,0xc6,0x00,0x00,0x00, -0x39,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0xba,0x00,0x00,0x00, -0xde,0x01,0x00,0x00,0xdd,0x01,0x00,0x00,0x73,0x00,0x04,0x00, -0xe6,0x00,0x00,0x00,0xdf,0x01,0x00,0x00,0xde,0x01,0x00,0x00, -0x41,0x00,0x05,0x00,0x00,0x01,0x00,0x00,0xe0,0x01,0x00,0x00, -0x88,0x01,0x00,0x00,0xdb,0x01,0x00,0x00,0x3e,0x00,0x03,0x00, -0xe0,0x01,0x00,0x00,0xdf,0x01,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xe5,0x01,0x00,0x00,0x7c,0x01,0x00,0x00, -0xe4,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xe8,0x01,0x00,0x00,0xe5,0x01,0x00,0x00,0x8f,0x01,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xe9,0x01,0x00,0x00, -0xe8,0x01,0x00,0x00,0x53,0x01,0x00,0x00,0x41,0x00,0x08,0x00, -0xfc,0x00,0x00,0x00,0xeb,0x01,0x00,0x00,0x95,0x01,0x00,0x00, -0x34,0x00,0x00,0x00,0x83,0x01,0x00,0x00,0xc6,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0xba,0x00,0x00,0x00, -0xec,0x01,0x00,0x00,0xeb,0x01,0x00,0x00,0x73,0x00,0x04,0x00, -0xe6,0x00,0x00,0x00,0xed,0x01,0x00,0x00,0xec,0x01,0x00,0x00, -0x41,0x00,0x05,0x00,0x00,0x01,0x00,0x00,0xee,0x01,0x00,0x00, -0x88,0x01,0x00,0x00,0xe9,0x01,0x00,0x00,0x3e,0x00,0x03,0x00, -0xee,0x01,0x00,0x00,0xed,0x01,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xf3,0x01,0x00,0x00,0x7c,0x01,0x00,0x00, -0xf2,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xf6,0x01,0x00,0x00,0xf3,0x01,0x00,0x00,0x8f,0x01,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xf7,0x01,0x00,0x00, -0xf6,0x01,0x00,0x00,0x62,0x01,0x00,0x00,0x41,0x00,0x08,0x00, -0xfc,0x00,0x00,0x00,0xf9,0x01,0x00,0x00,0x95,0x01,0x00,0x00, -0x34,0x00,0x00,0x00,0x83,0x01,0x00,0x00,0xc6,0x00,0x00,0x00, -0x26,0x01,0x00,0x00,0x3d,0x00,0x04,0x00,0xba,0x00,0x00,0x00, -0xfa,0x01,0x00,0x00,0xf9,0x01,0x00,0x00,0x73,0x00,0x04,0x00, -0xe6,0x00,0x00,0x00,0xfb,0x01,0x00,0x00,0xfa,0x01,0x00,0x00, -0x41,0x00,0x05,0x00,0x00,0x01,0x00,0x00,0xfc,0x01,0x00,0x00, -0x88,0x01,0x00,0x00,0xf7,0x01,0x00,0x00,0x3e,0x00,0x03,0x00, -0xfc,0x01,0x00,0x00,0xfb,0x01,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xfe,0x01,0x00,0x00,0x4a,0x03,0x00,0x00, -0x6d,0x01,0x00,0x00,0xf9,0x00,0x02,0x00,0x71,0x01,0x00,0x00, -0xf8,0x00,0x02,0x00,0x73,0x01,0x00,0x00,0xe0,0x00,0x04,0x00, -0x0c,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0xff,0x01,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x02,0x02,0x00,0x00, -0x4d,0x03,0x00,0x00,0x00,0x02,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x05,0x02,0x00,0x00,0x51,0x03,0x00,0x00, -0x03,0x02,0x00,0x00,0xf9,0x00,0x02,0x00,0x07,0x02,0x00,0x00, -0xf8,0x00,0x02,0x00,0x07,0x02,0x00,0x00,0xf5,0x00,0x07,0x00, -0x06,0x00,0x00,0x00,0x53,0x03,0x00,0x00,0x3e,0x00,0x00,0x00, -0x73,0x01,0x00,0x00,0xb1,0x02,0x00,0x00,0x0a,0x02,0x00,0x00, -0xb0,0x00,0x05,0x00,0xb8,0x00,0x00,0x00,0x0d,0x02,0x00,0x00, -0x53,0x03,0x00,0x00,0x6c,0x00,0x00,0x00,0xf6,0x00,0x04,0x00, -0x09,0x02,0x00,0x00,0x0a,0x02,0x00,0x00,0x00,0x00,0x00,0x00, -0xfa,0x00,0x04,0x00,0x0d,0x02,0x00,0x00,0x08,0x02,0x00,0x00, -0x09,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x08,0x02,0x00,0x00, -0xf9,0x00,0x02,0x00,0x0f,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, -0x0f,0x02,0x00,0x00,0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00, -0x57,0x03,0x00,0x00,0x3e,0x00,0x00,0x00,0x08,0x02,0x00,0x00, -0x3b,0x02,0x00,0x00,0x12,0x02,0x00,0x00,0xb0,0x00,0x05,0x00, -0xb8,0x00,0x00,0x00,0x15,0x02,0x00,0x00,0x57,0x03,0x00,0x00, -0x60,0x00,0x00,0x00,0xf6,0x00,0x04,0x00,0x11,0x02,0x00,0x00, -0x12,0x02,0x00,0x00,0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, -0x15,0x02,0x00,0x00,0x10,0x02,0x00,0x00,0x11,0x02,0x00,0x00, -0xf8,0x00,0x02,0x00,0x10,0x02,0x00,0x00,0xf9,0x00,0x02,0x00, -0x17,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x17,0x02,0x00,0x00, -0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0x69,0x03,0x00,0x00, -0x3e,0x00,0x00,0x00,0x10,0x02,0x00,0x00,0x39,0x02,0x00,0x00, -0x18,0x02,0x00,0x00,0xb0,0x00,0x05,0x00,0xb8,0x00,0x00,0x00, -0x1d,0x02,0x00,0x00,0x69,0x03,0x00,0x00,0x62,0x00,0x00,0x00, -0xf6,0x00,0x04,0x00,0x19,0x02,0x00,0x00,0x18,0x02,0x00,0x00, -0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0x1d,0x02,0x00,0x00, -0x18,0x02,0x00,0x00,0x19,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, -0x18,0x02,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x23,0x02,0x00,0x00,0x57,0x03,0x00,0x00,0x62,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x25,0x02,0x00,0x00, -0x23,0x02,0x00,0x00,0x69,0x03,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x27,0x02,0x00,0x00,0x55,0x00,0x00,0x00, -0x53,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x29,0x02,0x00,0x00,0x57,0x03,0x00,0x00,0x61,0x00,0x00,0x00, +0x89,0x01,0x00,0x00,0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00, +0xd4,0x02,0x00,0x00,0x3e,0x00,0x00,0x00,0x3e,0x01,0x00,0x00, +0x30,0x02,0x00,0x00,0x8c,0x01,0x00,0x00,0xb0,0x00,0x05,0x00, +0xc2,0x00,0x00,0x00,0x8f,0x01,0x00,0x00,0xd4,0x02,0x00,0x00, +0x6c,0x00,0x00,0x00,0xf6,0x00,0x04,0x00,0x8b,0x01,0x00,0x00, +0x8c,0x01,0x00,0x00,0x00,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, +0x8f,0x01,0x00,0x00,0x8a,0x01,0x00,0x00,0x8b,0x01,0x00,0x00, +0xf8,0x00,0x02,0x00,0x8a,0x01,0x00,0x00,0xf9,0x00,0x02,0x00, +0x91,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0x91,0x01,0x00,0x00, +0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0xd8,0x02,0x00,0x00, +0x3e,0x00,0x00,0x00,0x8a,0x01,0x00,0x00,0xbc,0x01,0x00,0x00, +0x94,0x01,0x00,0x00,0xb0,0x00,0x05,0x00,0xc2,0x00,0x00,0x00, +0x97,0x01,0x00,0x00,0xd8,0x02,0x00,0x00,0x60,0x00,0x00,0x00, +0xf6,0x00,0x04,0x00,0x93,0x01,0x00,0x00,0x94,0x01,0x00,0x00, +0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0x97,0x01,0x00,0x00, +0x92,0x01,0x00,0x00,0x93,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, +0x92,0x01,0x00,0x00,0xf9,0x00,0x02,0x00,0x99,0x01,0x00,0x00, +0xf8,0x00,0x02,0x00,0x99,0x01,0x00,0x00,0xf5,0x00,0x07,0x00, +0x06,0x00,0x00,0x00,0xea,0x02,0x00,0x00,0x3e,0x00,0x00,0x00, +0x92,0x01,0x00,0x00,0xba,0x01,0x00,0x00,0x9a,0x01,0x00,0x00, +0xb0,0x00,0x05,0x00,0xc2,0x00,0x00,0x00,0x9f,0x01,0x00,0x00, +0xea,0x02,0x00,0x00,0x62,0x00,0x00,0x00,0xf6,0x00,0x04,0x00, +0x9b,0x01,0x00,0x00,0x9a,0x01,0x00,0x00,0x01,0x00,0x00,0x00, +0xfa,0x00,0x04,0x00,0x9f,0x01,0x00,0x00,0x9a,0x01,0x00,0x00, +0x9b,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0x9a,0x01,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xa5,0x01,0x00,0x00, +0xd8,0x02,0x00,0x00,0x62,0x00,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xa7,0x01,0x00,0x00,0xa5,0x01,0x00,0x00, +0xea,0x02,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xa9,0x01,0x00,0x00,0x55,0x00,0x00,0x00,0x53,0x00,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xab,0x01,0x00,0x00, +0xd8,0x02,0x00,0x00,0x61,0x00,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xac,0x01,0x00,0x00,0xa9,0x01,0x00,0x00, +0xab,0x01,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xae,0x01,0x00,0x00,0x64,0x00,0x00,0x00,0x62,0x00,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xaf,0x01,0x00,0x00, +0xac,0x01,0x00,0x00,0xae,0x01,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xb1,0x01,0x00,0x00,0xaf,0x01,0x00,0x00, +0xea,0x02,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xb3,0x01,0x00,0x00,0xb1,0x01,0x00,0x00,0xb2,0x01,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xb5,0x01,0x00,0x00, +0xb3,0x01,0x00,0x00,0xd4,0x02,0x00,0x00,0x41,0x00,0x05,0x00, +0x2d,0x01,0x00,0x00,0xb6,0x01,0x00,0x00,0x29,0x01,0x00,0x00, +0xb5,0x01,0x00,0x00,0x3d,0x00,0x04,0x00,0xc4,0x00,0x00,0x00, +0xb7,0x01,0x00,0x00,0xb6,0x01,0x00,0x00,0x41,0x00,0x05,0x00, +0xcd,0x00,0x00,0x00,0xb8,0x01,0x00,0x00,0xa3,0x01,0x00,0x00, +0xa7,0x01,0x00,0x00,0x3e,0x00,0x03,0x00,0xb8,0x01,0x00,0x00, +0xb7,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xba,0x01,0x00,0x00,0xea,0x02,0x00,0x00,0xd0,0x00,0x00,0x00, +0xf9,0x00,0x02,0x00,0x99,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, +0x9b,0x01,0x00,0x00,0xf9,0x00,0x02,0x00,0x94,0x01,0x00,0x00, +0xf8,0x00,0x02,0x00,0x94,0x01,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xbc,0x01,0x00,0x00,0xd8,0x02,0x00,0x00, +0xd0,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0x91,0x01,0x00,0x00, +0xf8,0x00,0x02,0x00,0x93,0x01,0x00,0x00,0xf9,0x00,0x02,0x00, +0xbe,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0xbe,0x01,0x00,0x00, +0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0xd9,0x02,0x00,0x00, +0x3e,0x00,0x00,0x00,0x93,0x01,0x00,0x00,0xea,0x01,0x00,0x00, +0xc1,0x01,0x00,0x00,0xb0,0x00,0x05,0x00,0xc2,0x00,0x00,0x00, +0xc4,0x01,0x00,0x00,0xd9,0x02,0x00,0x00,0xbf,0x00,0x00,0x00, +0xf6,0x00,0x04,0x00,0xc0,0x01,0x00,0x00,0xc1,0x01,0x00,0x00, +0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0xc4,0x01,0x00,0x00, +0xbf,0x01,0x00,0x00,0xc0,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, +0xbf,0x01,0x00,0x00,0xf9,0x00,0x02,0x00,0xc6,0x01,0x00,0x00, +0xf8,0x00,0x02,0x00,0xc6,0x01,0x00,0x00,0xf5,0x00,0x07,0x00, +0x06,0x00,0x00,0x00,0xe7,0x02,0x00,0x00,0x3e,0x00,0x00,0x00, +0xbf,0x01,0x00,0x00,0xe8,0x01,0x00,0x00,0xc7,0x01,0x00,0x00, +0xb0,0x00,0x05,0x00,0xc2,0x00,0x00,0x00,0xcc,0x01,0x00,0x00, +0xe7,0x02,0x00,0x00,0xbc,0x00,0x00,0x00,0xf6,0x00,0x04,0x00, +0xc8,0x01,0x00,0x00,0xc7,0x01,0x00,0x00,0x01,0x00,0x00,0x00, +0xfa,0x00,0x04,0x00,0xcc,0x01,0x00,0x00,0xc7,0x01,0x00,0x00, +0xc8,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0xc7,0x01,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xd2,0x01,0x00,0x00, +0xd9,0x02,0x00,0x00,0xbc,0x00,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xd4,0x01,0x00,0x00,0xd2,0x01,0x00,0x00, +0xe7,0x02,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xd6,0x01,0x00,0x00,0x59,0x00,0x00,0x00,0xb9,0x00,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xd9,0x01,0x00,0x00, +0xd9,0x02,0x00,0x00,0xd8,0x01,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xda,0x01,0x00,0x00,0xd6,0x01,0x00,0x00, +0xd9,0x01,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xdc,0x01,0x00,0x00,0x68,0x00,0x00,0x00,0xbc,0x00,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xdd,0x01,0x00,0x00, +0xda,0x01,0x00,0x00,0xdc,0x01,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xdf,0x01,0x00,0x00,0xdd,0x01,0x00,0x00, +0xe7,0x02,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xe1,0x01,0x00,0x00,0xdf,0x01,0x00,0x00,0xe0,0x01,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xe3,0x01,0x00,0x00, +0xe1,0x01,0x00,0x00,0xd4,0x02,0x00,0x00,0x41,0x00,0x05,0x00, +0x2d,0x01,0x00,0x00,0xe4,0x01,0x00,0x00,0x5c,0x01,0x00,0x00, +0xe3,0x01,0x00,0x00,0x3d,0x00,0x04,0x00,0xc4,0x00,0x00,0x00, +0xe5,0x01,0x00,0x00,0xe4,0x01,0x00,0x00,0x41,0x00,0x05,0x00, +0xcd,0x00,0x00,0x00,0xe6,0x01,0x00,0x00,0xd0,0x01,0x00,0x00, +0xd4,0x01,0x00,0x00,0x3e,0x00,0x03,0x00,0xe6,0x01,0x00,0x00, +0xe5,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xe8,0x01,0x00,0x00,0xe7,0x02,0x00,0x00,0xd0,0x00,0x00,0x00, +0xf9,0x00,0x02,0x00,0xc6,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, +0xc8,0x01,0x00,0x00,0xf9,0x00,0x02,0x00,0xc1,0x01,0x00,0x00, +0xf8,0x00,0x02,0x00,0xc1,0x01,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xea,0x01,0x00,0x00,0xd9,0x02,0x00,0x00, +0xd0,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0xbe,0x01,0x00,0x00, +0xf8,0x00,0x02,0x00,0xc0,0x01,0x00,0x00,0xf9,0x00,0x02,0x00, +0xec,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0xec,0x01,0x00,0x00, +0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0xda,0x02,0x00,0x00, +0x3e,0x00,0x00,0x00,0xc0,0x01,0x00,0x00,0x2e,0x02,0x00,0x00, +0xef,0x01,0x00,0x00,0xb0,0x00,0x05,0x00,0xc2,0x00,0x00,0x00, +0xf2,0x01,0x00,0x00,0xda,0x02,0x00,0x00,0xbf,0x00,0x00,0x00, +0xf6,0x00,0x04,0x00,0xee,0x01,0x00,0x00,0xef,0x01,0x00,0x00, +0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0xf2,0x01,0x00,0x00, +0xed,0x01,0x00,0x00,0xee,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, +0xed,0x01,0x00,0x00,0xf9,0x00,0x02,0x00,0xf4,0x01,0x00,0x00, +0xf8,0x00,0x02,0x00,0xf4,0x01,0x00,0x00,0xf5,0x00,0x07,0x00, +0x06,0x00,0x00,0x00,0xde,0x02,0x00,0x00,0x3e,0x00,0x00,0x00, +0xed,0x01,0x00,0x00,0x2c,0x02,0x00,0x00,0xf7,0x01,0x00,0x00, +0xb0,0x00,0x05,0x00,0xc2,0x00,0x00,0x00,0xfa,0x01,0x00,0x00, +0xde,0x02,0x00,0x00,0x60,0x00,0x00,0x00,0xf6,0x00,0x04,0x00, +0xf6,0x01,0x00,0x00,0xf7,0x01,0x00,0x00,0x01,0x00,0x00,0x00, +0xfa,0x00,0x04,0x00,0xfa,0x01,0x00,0x00,0xf5,0x01,0x00,0x00, +0xf6,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0xf5,0x01,0x00,0x00, +0xf9,0x00,0x02,0x00,0xfc,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, +0xfc,0x01,0x00,0x00,0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00, +0xe0,0x02,0x00,0x00,0x3e,0x00,0x00,0x00,0xf5,0x01,0x00,0x00, +0x2a,0x02,0x00,0x00,0xff,0x01,0x00,0x00,0xb0,0x00,0x05,0x00, +0xc2,0x00,0x00,0x00,0x02,0x02,0x00,0x00,0xe0,0x02,0x00,0x00, +0xbc,0x00,0x00,0x00,0xf6,0x00,0x04,0x00,0xfe,0x01,0x00,0x00, +0xff,0x01,0x00,0x00,0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, +0x02,0x02,0x00,0x00,0xfd,0x01,0x00,0x00,0xfe,0x01,0x00,0x00, +0xf8,0x00,0x02,0x00,0xfd,0x01,0x00,0x00,0xf9,0x00,0x02,0x00, +0x04,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x04,0x02,0x00,0x00, +0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0xe2,0x02,0x00,0x00, +0x3e,0x00,0x00,0x00,0xfd,0x01,0x00,0x00,0x28,0x02,0x00,0x00, +0x05,0x02,0x00,0x00,0xb0,0x00,0x05,0x00,0xc2,0x00,0x00,0x00, +0x0a,0x02,0x00,0x00,0xe2,0x02,0x00,0x00,0x62,0x00,0x00,0x00, +0xf6,0x00,0x04,0x00,0x06,0x02,0x00,0x00,0x05,0x02,0x00,0x00, +0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0x0a,0x02,0x00,0x00, +0x05,0x02,0x00,0x00,0x06,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, +0x05,0x02,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x0c,0x02,0x00,0x00,0xda,0x02,0x00,0x00,0xbc,0x00,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x0e,0x02,0x00,0x00, +0x0c,0x02,0x00,0x00,0xe0,0x02,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x10,0x02,0x00,0x00,0x0e,0x02,0x00,0x00, +0x0f,0x02,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x12,0x02,0x00,0x00,0xde,0x02,0x00,0x00,0x62,0x00,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x13,0x02,0x00,0x00, +0x10,0x02,0x00,0x00,0x12,0x02,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x15,0x02,0x00,0x00,0x13,0x02,0x00,0x00, +0xe2,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x19,0x02,0x00,0x00,0x12,0x02,0x00,0x00,0xe2,0x02,0x00,0x00, +0x41,0x00,0x05,0x00,0xcd,0x00,0x00,0x00,0x1a,0x02,0x00,0x00, +0xa3,0x01,0x00,0x00,0x19,0x02,0x00,0x00,0x3d,0x00,0x04,0x00, +0xc4,0x00,0x00,0x00,0x1b,0x02,0x00,0x00,0x1a,0x02,0x00,0x00, +0x41,0x00,0x05,0x00,0xcd,0x00,0x00,0x00,0x20,0x02,0x00,0x00, +0xd0,0x01,0x00,0x00,0x0e,0x02,0x00,0x00,0x3d,0x00,0x04,0x00, +0xc4,0x00,0x00,0x00,0x21,0x02,0x00,0x00,0x20,0x02,0x00,0x00, +0x41,0x00,0x05,0x00,0xcd,0x00,0x00,0x00,0x23,0x02,0x00,0x00, +0xca,0x00,0x00,0x00,0x15,0x02,0x00,0x00,0x3d,0x00,0x04,0x00, +0xc4,0x00,0x00,0x00,0x24,0x02,0x00,0x00,0x23,0x02,0x00,0x00, +0x0c,0x00,0x08,0x00,0xc4,0x00,0x00,0x00,0x25,0x02,0x00,0x00, +0x01,0x00,0x00,0x00,0x32,0x00,0x00,0x00,0x1b,0x02,0x00,0x00, +0x21,0x02,0x00,0x00,0x24,0x02,0x00,0x00,0x3e,0x00,0x03,0x00, +0x23,0x02,0x00,0x00,0x25,0x02,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x28,0x02,0x00,0x00,0xe2,0x02,0x00,0x00, +0xd0,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0x04,0x02,0x00,0x00, +0xf8,0x00,0x02,0x00,0x06,0x02,0x00,0x00,0xf9,0x00,0x02,0x00, +0xff,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0xff,0x01,0x00,0x00, 0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x2a,0x02,0x00,0x00, -0x27,0x02,0x00,0x00,0x29,0x02,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x2c,0x02,0x00,0x00,0x64,0x00,0x00,0x00, -0x62,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x2d,0x02,0x00,0x00,0x2a,0x02,0x00,0x00,0x2c,0x02,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x2f,0x02,0x00,0x00, -0x2d,0x02,0x00,0x00,0x69,0x03,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x31,0x02,0x00,0x00,0x2f,0x02,0x00,0x00, -0x30,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x33,0x02,0x00,0x00,0x31,0x02,0x00,0x00,0x53,0x03,0x00,0x00, -0x41,0x00,0x05,0x00,0x00,0x01,0x00,0x00,0x34,0x02,0x00,0x00, -0xeb,0x00,0x00,0x00,0x33,0x02,0x00,0x00,0x3d,0x00,0x04,0x00, -0xe6,0x00,0x00,0x00,0x35,0x02,0x00,0x00,0x34,0x02,0x00,0x00, -0x41,0x00,0x05,0x00,0x36,0x02,0x00,0x00,0x37,0x02,0x00,0x00, -0x21,0x02,0x00,0x00,0x25,0x02,0x00,0x00,0x3e,0x00,0x03,0x00, -0x37,0x02,0x00,0x00,0x35,0x02,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x39,0x02,0x00,0x00,0x69,0x03,0x00,0x00, -0xc6,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0x17,0x02,0x00,0x00, -0xf8,0x00,0x02,0x00,0x19,0x02,0x00,0x00,0xf9,0x00,0x02,0x00, -0x12,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x12,0x02,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x3b,0x02,0x00,0x00, -0x57,0x03,0x00,0x00,0xc6,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, -0x0f,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x11,0x02,0x00,0x00, -0xf9,0x00,0x02,0x00,0x3d,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, -0x3d,0x02,0x00,0x00,0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00, -0x58,0x03,0x00,0x00,0x3e,0x00,0x00,0x00,0x11,0x02,0x00,0x00, -0x69,0x02,0x00,0x00,0x40,0x02,0x00,0x00,0xb0,0x00,0x05,0x00, -0xb8,0x00,0x00,0x00,0x43,0x02,0x00,0x00,0x58,0x03,0x00,0x00, -0xb5,0x00,0x00,0x00,0xf6,0x00,0x04,0x00,0x3f,0x02,0x00,0x00, -0x40,0x02,0x00,0x00,0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, -0x43,0x02,0x00,0x00,0x3e,0x02,0x00,0x00,0x3f,0x02,0x00,0x00, -0xf8,0x00,0x02,0x00,0x3e,0x02,0x00,0x00,0xf9,0x00,0x02,0x00, -0x45,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x45,0x02,0x00,0x00, -0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0x66,0x03,0x00,0x00, -0x3e,0x00,0x00,0x00,0x3e,0x02,0x00,0x00,0x67,0x02,0x00,0x00, -0x46,0x02,0x00,0x00,0xb0,0x00,0x05,0x00,0xb8,0x00,0x00,0x00, -0x4b,0x02,0x00,0x00,0x66,0x03,0x00,0x00,0xb2,0x00,0x00,0x00, -0xf6,0x00,0x04,0x00,0x47,0x02,0x00,0x00,0x46,0x02,0x00,0x00, -0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0x4b,0x02,0x00,0x00, -0x46,0x02,0x00,0x00,0x47,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, -0x46,0x02,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x51,0x02,0x00,0x00,0x58,0x03,0x00,0x00,0xb2,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x53,0x02,0x00,0x00, -0x51,0x02,0x00,0x00,0x66,0x03,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x55,0x02,0x00,0x00,0x59,0x00,0x00,0x00, -0xaf,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x58,0x02,0x00,0x00,0x58,0x03,0x00,0x00,0x57,0x02,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x59,0x02,0x00,0x00, -0x55,0x02,0x00,0x00,0x58,0x02,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x5b,0x02,0x00,0x00,0x68,0x00,0x00,0x00, -0xb2,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x5c,0x02,0x00,0x00,0x59,0x02,0x00,0x00,0x5b,0x02,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x5e,0x02,0x00,0x00, -0x5c,0x02,0x00,0x00,0x66,0x03,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x60,0x02,0x00,0x00,0x5e,0x02,0x00,0x00, -0x5f,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x62,0x02,0x00,0x00,0x60,0x02,0x00,0x00,0x53,0x03,0x00,0x00, -0x41,0x00,0x05,0x00,0x00,0x01,0x00,0x00,0x63,0x02,0x00,0x00, -0x88,0x01,0x00,0x00,0x62,0x02,0x00,0x00,0x3d,0x00,0x04,0x00, -0xe6,0x00,0x00,0x00,0x64,0x02,0x00,0x00,0x63,0x02,0x00,0x00, -0x41,0x00,0x05,0x00,0x36,0x02,0x00,0x00,0x65,0x02,0x00,0x00, -0x4f,0x02,0x00,0x00,0x53,0x02,0x00,0x00,0x3e,0x00,0x03,0x00, -0x65,0x02,0x00,0x00,0x64,0x02,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x67,0x02,0x00,0x00,0x66,0x03,0x00,0x00, -0xc6,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0x45,0x02,0x00,0x00, -0xf8,0x00,0x02,0x00,0x47,0x02,0x00,0x00,0xf9,0x00,0x02,0x00, -0x40,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x40,0x02,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x69,0x02,0x00,0x00, -0x58,0x03,0x00,0x00,0xc6,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, -0x3d,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x3f,0x02,0x00,0x00, -0xf9,0x00,0x02,0x00,0x6b,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, -0x6b,0x02,0x00,0x00,0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00, -0x59,0x03,0x00,0x00,0x3e,0x00,0x00,0x00,0x3f,0x02,0x00,0x00, -0xaf,0x02,0x00,0x00,0x6e,0x02,0x00,0x00,0xb0,0x00,0x05,0x00, -0xb8,0x00,0x00,0x00,0x71,0x02,0x00,0x00,0x59,0x03,0x00,0x00, -0xb5,0x00,0x00,0x00,0xf6,0x00,0x04,0x00,0x6d,0x02,0x00,0x00, -0x6e,0x02,0x00,0x00,0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, -0x71,0x02,0x00,0x00,0x6c,0x02,0x00,0x00,0x6d,0x02,0x00,0x00, -0xf8,0x00,0x02,0x00,0x6c,0x02,0x00,0x00,0xf9,0x00,0x02,0x00, -0x73,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x73,0x02,0x00,0x00, -0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0x5d,0x03,0x00,0x00, -0x3e,0x00,0x00,0x00,0x6c,0x02,0x00,0x00,0xad,0x02,0x00,0x00, -0x76,0x02,0x00,0x00,0xb0,0x00,0x05,0x00,0xb8,0x00,0x00,0x00, -0x79,0x02,0x00,0x00,0x5d,0x03,0x00,0x00,0x60,0x00,0x00,0x00, -0xf6,0x00,0x04,0x00,0x75,0x02,0x00,0x00,0x76,0x02,0x00,0x00, -0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0x79,0x02,0x00,0x00, -0x74,0x02,0x00,0x00,0x75,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, -0x74,0x02,0x00,0x00,0xf9,0x00,0x02,0x00,0x7b,0x02,0x00,0x00, -0xf8,0x00,0x02,0x00,0x7b,0x02,0x00,0x00,0xf5,0x00,0x07,0x00, -0x06,0x00,0x00,0x00,0x5f,0x03,0x00,0x00,0x3e,0x00,0x00,0x00, -0x74,0x02,0x00,0x00,0xab,0x02,0x00,0x00,0x7e,0x02,0x00,0x00, -0xb0,0x00,0x05,0x00,0xb8,0x00,0x00,0x00,0x81,0x02,0x00,0x00, -0x5f,0x03,0x00,0x00,0xb2,0x00,0x00,0x00,0xf6,0x00,0x04,0x00, -0x7d,0x02,0x00,0x00,0x7e,0x02,0x00,0x00,0x01,0x00,0x00,0x00, -0xfa,0x00,0x04,0x00,0x81,0x02,0x00,0x00,0x7c,0x02,0x00,0x00, -0x7d,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x7c,0x02,0x00,0x00, -0xf9,0x00,0x02,0x00,0x83,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, -0x83,0x02,0x00,0x00,0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00, -0x61,0x03,0x00,0x00,0x3e,0x00,0x00,0x00,0x7c,0x02,0x00,0x00, -0xa9,0x02,0x00,0x00,0x84,0x02,0x00,0x00,0xb0,0x00,0x05,0x00, -0xb8,0x00,0x00,0x00,0x89,0x02,0x00,0x00,0x61,0x03,0x00,0x00, -0x62,0x00,0x00,0x00,0xf6,0x00,0x04,0x00,0x85,0x02,0x00,0x00, -0x84,0x02,0x00,0x00,0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, -0x89,0x02,0x00,0x00,0x84,0x02,0x00,0x00,0x85,0x02,0x00,0x00, -0xf8,0x00,0x02,0x00,0x84,0x02,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x8b,0x02,0x00,0x00,0x59,0x03,0x00,0x00, -0xb2,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x8d,0x02,0x00,0x00,0x8b,0x02,0x00,0x00,0x5f,0x03,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x8f,0x02,0x00,0x00, -0x8d,0x02,0x00,0x00,0x8e,0x02,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x91,0x02,0x00,0x00,0x5d,0x03,0x00,0x00, -0x62,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x92,0x02,0x00,0x00,0x8f,0x02,0x00,0x00,0x91,0x02,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x94,0x02,0x00,0x00, -0x92,0x02,0x00,0x00,0x61,0x03,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x98,0x02,0x00,0x00,0x91,0x02,0x00,0x00, -0x61,0x03,0x00,0x00,0x41,0x00,0x05,0x00,0x36,0x02,0x00,0x00, -0x99,0x02,0x00,0x00,0x21,0x02,0x00,0x00,0x98,0x02,0x00,0x00, -0x3d,0x00,0x04,0x00,0xe6,0x00,0x00,0x00,0x9a,0x02,0x00,0x00, -0x99,0x02,0x00,0x00,0x73,0x00,0x04,0x00,0xba,0x00,0x00,0x00, -0x9b,0x02,0x00,0x00,0x9a,0x02,0x00,0x00,0x41,0x00,0x05,0x00, -0x36,0x02,0x00,0x00,0xa0,0x02,0x00,0x00,0x4f,0x02,0x00,0x00, -0x8d,0x02,0x00,0x00,0x3d,0x00,0x04,0x00,0xe6,0x00,0x00,0x00, -0xa1,0x02,0x00,0x00,0xa0,0x02,0x00,0x00,0x73,0x00,0x04,0x00, -0xba,0x00,0x00,0x00,0xa2,0x02,0x00,0x00,0xa1,0x02,0x00,0x00, -0x41,0x00,0x05,0x00,0xc3,0x00,0x00,0x00,0xa4,0x02,0x00,0x00, -0xc0,0x00,0x00,0x00,0x94,0x02,0x00,0x00,0x3d,0x00,0x04,0x00, -0xba,0x00,0x00,0x00,0xa5,0x02,0x00,0x00,0xa4,0x02,0x00,0x00, -0x0c,0x00,0x08,0x00,0xba,0x00,0x00,0x00,0xa6,0x02,0x00,0x00, -0x01,0x00,0x00,0x00,0x32,0x00,0x00,0x00,0x9b,0x02,0x00,0x00, -0xa2,0x02,0x00,0x00,0xa5,0x02,0x00,0x00,0x3e,0x00,0x03,0x00, -0xa4,0x02,0x00,0x00,0xa6,0x02,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xa9,0x02,0x00,0x00,0x61,0x03,0x00,0x00, -0xc6,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0x83,0x02,0x00,0x00, -0xf8,0x00,0x02,0x00,0x85,0x02,0x00,0x00,0xf9,0x00,0x02,0x00, -0x7e,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x7e,0x02,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xab,0x02,0x00,0x00, -0x5f,0x03,0x00,0x00,0xc6,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, -0x7b,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x7d,0x02,0x00,0x00, -0xf9,0x00,0x02,0x00,0x76,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, -0x76,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xad,0x02,0x00,0x00,0x5d,0x03,0x00,0x00,0xc6,0x00,0x00,0x00, -0xf9,0x00,0x02,0x00,0x73,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, -0x75,0x02,0x00,0x00,0xf9,0x00,0x02,0x00,0x6e,0x02,0x00,0x00, -0xf8,0x00,0x02,0x00,0x6e,0x02,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xaf,0x02,0x00,0x00,0x59,0x03,0x00,0x00, -0xc6,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0x6b,0x02,0x00,0x00, -0xf8,0x00,0x02,0x00,0x6d,0x02,0x00,0x00,0xf9,0x00,0x02,0x00, -0x0a,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x0a,0x02,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xb1,0x02,0x00,0x00, -0x53,0x03,0x00,0x00,0xc6,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, -0x07,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x09,0x02,0x00,0x00, +0xe0,0x02,0x00,0x00,0xd0,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, +0xfc,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0xfe,0x01,0x00,0x00, +0xf9,0x00,0x02,0x00,0xf7,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, +0xf7,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x2c,0x02,0x00,0x00,0xde,0x02,0x00,0x00,0xd0,0x00,0x00,0x00, +0xf9,0x00,0x02,0x00,0xf4,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, +0xf6,0x01,0x00,0x00,0xf9,0x00,0x02,0x00,0xef,0x01,0x00,0x00, +0xf8,0x00,0x02,0x00,0xef,0x01,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x2e,0x02,0x00,0x00,0xda,0x02,0x00,0x00, +0xd0,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0xec,0x01,0x00,0x00, +0xf8,0x00,0x02,0x00,0xee,0x01,0x00,0x00,0xf9,0x00,0x02,0x00, +0x8c,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0x8c,0x01,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x30,0x02,0x00,0x00, +0xd4,0x02,0x00,0x00,0xd0,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, +0x89,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0x8b,0x01,0x00,0x00, 0xe0,0x00,0x04,0x00,0x0c,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0xff,0x01,0x00,0x00,0xf9,0x00,0x02,0x00,0xcd,0x00,0x00,0x00, -0xf8,0x00,0x02,0x00,0xcd,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xb3,0x02,0x00,0x00,0x39,0x03,0x00,0x00, -0x6c,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0xca,0x00,0x00,0x00, -0xf8,0x00,0x02,0x00,0xcc,0x00,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xb8,0x02,0x00,0x00,0x55,0x00,0x00,0x00, +0x81,0x01,0x00,0x00,0xf9,0x00,0x02,0x00,0xd7,0x00,0x00,0x00, +0xf8,0x00,0x02,0x00,0xd7,0x00,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x32,0x02,0x00,0x00,0xba,0x02,0x00,0x00, +0x6c,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0xd4,0x00,0x00,0x00, +0xf8,0x00,0x02,0x00,0xd6,0x00,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x37,0x02,0x00,0x00,0x55,0x00,0x00,0x00, 0x53,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xb9,0x02,0x00,0x00,0x8c,0x00,0x00,0x00,0xb8,0x02,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xbe,0x02,0x00,0x00, -0x59,0x00,0x00,0x00,0xaf,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xbf,0x02,0x00,0x00,0x9e,0x00,0x00,0x00, -0xbe,0x02,0x00,0x00,0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00, -0xc4,0x02,0x00,0x00,0x12,0x00,0x00,0x00,0xc3,0x02,0x00,0x00, -0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0xc5,0x02,0x00,0x00, -0xc4,0x02,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xc6,0x02,0x00,0x00,0x0f,0x00,0x00,0x00,0xc5,0x02,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xca,0x02,0x00,0x00, -0x47,0x00,0x00,0x00,0xc5,0x02,0x00,0x00,0x41,0x00,0x05,0x00, -0x0d,0x00,0x00,0x00,0xcc,0x02,0x00,0x00,0xcb,0x02,0x00,0x00, +0x38,0x02,0x00,0x00,0x96,0x00,0x00,0x00,0x37,0x02,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x3d,0x02,0x00,0x00, +0x59,0x00,0x00,0x00,0xb9,0x00,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x3e,0x02,0x00,0x00,0xa8,0x00,0x00,0x00, +0x3d,0x02,0x00,0x00,0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00, +0x43,0x02,0x00,0x00,0x12,0x00,0x00,0x00,0x42,0x02,0x00,0x00, +0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x44,0x02,0x00,0x00, +0x43,0x02,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x45,0x02,0x00,0x00,0x0f,0x00,0x00,0x00,0x44,0x02,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x49,0x02,0x00,0x00, +0x47,0x00,0x00,0x00,0x44,0x02,0x00,0x00,0x41,0x00,0x05,0x00, +0x0d,0x00,0x00,0x00,0x4b,0x02,0x00,0x00,0x4a,0x02,0x00,0x00, 0x0c,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0xcd,0x02,0x00,0x00,0xcc,0x02,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xce,0x02,0x00,0x00,0xca,0x02,0x00,0x00, -0xcd,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xcf,0x02,0x00,0x00,0xc6,0x02,0x00,0x00,0xce,0x02,0x00,0x00, -0xf9,0x00,0x02,0x00,0xd1,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, -0xd1,0x02,0x00,0x00,0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00, -0x3a,0x03,0x00,0x00,0x3e,0x00,0x00,0x00,0xcc,0x00,0x00,0x00, -0x37,0x03,0x00,0x00,0xd4,0x02,0x00,0x00,0xb0,0x00,0x05,0x00, -0xb8,0x00,0x00,0x00,0xd7,0x02,0x00,0x00,0x3a,0x03,0x00,0x00, -0xb5,0x00,0x00,0x00,0xf6,0x00,0x04,0x00,0xd3,0x02,0x00,0x00, -0xd4,0x02,0x00,0x00,0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, -0xd7,0x02,0x00,0x00,0xd2,0x02,0x00,0x00,0xd3,0x02,0x00,0x00, -0xf8,0x00,0x02,0x00,0xd2,0x02,0x00,0x00,0xf9,0x00,0x02,0x00, -0xd9,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0xd9,0x02,0x00,0x00, -0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0x3b,0x03,0x00,0x00, -0x3e,0x00,0x00,0x00,0xd2,0x02,0x00,0x00,0x35,0x03,0x00,0x00, -0xdc,0x02,0x00,0x00,0xb0,0x00,0x05,0x00,0xb8,0x00,0x00,0x00, -0xdf,0x02,0x00,0x00,0x3b,0x03,0x00,0x00,0x60,0x00,0x00,0x00, -0xf6,0x00,0x04,0x00,0xdb,0x02,0x00,0x00,0xdc,0x02,0x00,0x00, -0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0xdf,0x02,0x00,0x00, -0xda,0x02,0x00,0x00,0xdb,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, -0xda,0x02,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xe3,0x02,0x00,0x00,0x3b,0x03,0x00,0x00,0x61,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xe4,0x02,0x00,0x00, -0xb9,0x02,0x00,0x00,0xe3,0x02,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xe6,0x02,0x00,0x00,0x64,0x00,0x00,0x00, +0x4c,0x02,0x00,0x00,0x4b,0x02,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x4d,0x02,0x00,0x00,0x49,0x02,0x00,0x00, +0x4c,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x4e,0x02,0x00,0x00,0x45,0x02,0x00,0x00,0x4d,0x02,0x00,0x00, +0xf9,0x00,0x02,0x00,0x50,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, +0x50,0x02,0x00,0x00,0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00, +0xbb,0x02,0x00,0x00,0x3e,0x00,0x00,0x00,0xd6,0x00,0x00,0x00, +0xb6,0x02,0x00,0x00,0x53,0x02,0x00,0x00,0xb0,0x00,0x05,0x00, +0xc2,0x00,0x00,0x00,0x56,0x02,0x00,0x00,0xbb,0x02,0x00,0x00, +0xbf,0x00,0x00,0x00,0xf6,0x00,0x04,0x00,0x52,0x02,0x00,0x00, +0x53,0x02,0x00,0x00,0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, +0x56,0x02,0x00,0x00,0x51,0x02,0x00,0x00,0x52,0x02,0x00,0x00, +0xf8,0x00,0x02,0x00,0x51,0x02,0x00,0x00,0xf9,0x00,0x02,0x00, +0x58,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x58,0x02,0x00,0x00, +0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0xbc,0x02,0x00,0x00, +0x3e,0x00,0x00,0x00,0x51,0x02,0x00,0x00,0xb4,0x02,0x00,0x00, +0x5b,0x02,0x00,0x00,0xb0,0x00,0x05,0x00,0xc2,0x00,0x00,0x00, +0x5e,0x02,0x00,0x00,0xbc,0x02,0x00,0x00,0x60,0x00,0x00,0x00, +0xf6,0x00,0x04,0x00,0x5a,0x02,0x00,0x00,0x5b,0x02,0x00,0x00, +0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0x5e,0x02,0x00,0x00, +0x59,0x02,0x00,0x00,0x5a,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, +0x59,0x02,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x62,0x02,0x00,0x00,0xbc,0x02,0x00,0x00,0x61,0x00,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x63,0x02,0x00,0x00, +0x38,0x02,0x00,0x00,0x62,0x02,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x65,0x02,0x00,0x00,0x64,0x00,0x00,0x00, 0x62,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xe7,0x02,0x00,0x00,0xe4,0x02,0x00,0x00,0xe6,0x02,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xeb,0x02,0x00,0x00, -0x3a,0x03,0x00,0x00,0x57,0x02,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xec,0x02,0x00,0x00,0xbf,0x02,0x00,0x00, -0xeb,0x02,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xee,0x02,0x00,0x00,0x68,0x00,0x00,0x00,0xb2,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xef,0x02,0x00,0x00, -0xec,0x02,0x00,0x00,0xee,0x02,0x00,0x00,0xf9,0x00,0x02,0x00, -0xf1,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0xf1,0x02,0x00,0x00, -0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0x3d,0x03,0x00,0x00, -0x3e,0x00,0x00,0x00,0xda,0x02,0x00,0x00,0x33,0x03,0x00,0x00, -0xf4,0x02,0x00,0x00,0xb0,0x00,0x05,0x00,0xb8,0x00,0x00,0x00, -0xf7,0x02,0x00,0x00,0x3d,0x03,0x00,0x00,0xb2,0x00,0x00,0x00, -0xf6,0x00,0x04,0x00,0xf3,0x02,0x00,0x00,0xf4,0x02,0x00,0x00, -0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0xf7,0x02,0x00,0x00, -0xf2,0x02,0x00,0x00,0xf3,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, -0xf2,0x02,0x00,0x00,0xf9,0x00,0x02,0x00,0xf9,0x02,0x00,0x00, -0xf8,0x00,0x02,0x00,0xf9,0x02,0x00,0x00,0xf5,0x00,0x07,0x00, -0x06,0x00,0x00,0x00,0x3f,0x03,0x00,0x00,0x3e,0x00,0x00,0x00, -0xf2,0x02,0x00,0x00,0x31,0x03,0x00,0x00,0xfc,0x02,0x00,0x00, -0xb0,0x00,0x05,0x00,0xb8,0x00,0x00,0x00,0xff,0x02,0x00,0x00, -0x3f,0x03,0x00,0x00,0x62,0x00,0x00,0x00,0xf6,0x00,0x04,0x00, -0xfb,0x02,0x00,0x00,0xfc,0x02,0x00,0x00,0x01,0x00,0x00,0x00, -0xfa,0x00,0x04,0x00,0xff,0x02,0x00,0x00,0xfa,0x02,0x00,0x00, -0xfb,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0xfa,0x02,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x02,0x03,0x00,0x00, -0xe7,0x02,0x00,0x00,0x3f,0x03,0x00,0x00,0xb0,0x00,0x05,0x00, -0xb8,0x00,0x00,0x00,0x05,0x03,0x00,0x00,0x02,0x03,0x00,0x00, -0x36,0x00,0x00,0x00,0xf7,0x00,0x03,0x00,0x07,0x03,0x00,0x00, -0x00,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0x05,0x03,0x00,0x00, -0x06,0x03,0x00,0x00,0x07,0x03,0x00,0x00,0xf8,0x00,0x02,0x00, -0x06,0x03,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x0a,0x03,0x00,0x00,0xef,0x02,0x00,0x00,0x3d,0x03,0x00,0x00, -0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00,0x0b,0x03,0x00,0x00, -0x12,0x00,0x00,0x00,0xc6,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x0c,0x03,0x00,0x00,0x0b,0x03,0x00,0x00, -0xb0,0x00,0x05,0x00,0xb8,0x00,0x00,0x00,0x0d,0x03,0x00,0x00, -0x0a,0x03,0x00,0x00,0x0c,0x03,0x00,0x00,0xf9,0x00,0x02,0x00, -0x07,0x03,0x00,0x00,0xf8,0x00,0x02,0x00,0x07,0x03,0x00,0x00, -0xf5,0x00,0x07,0x00,0xb8,0x00,0x00,0x00,0x0e,0x03,0x00,0x00, -0x05,0x03,0x00,0x00,0xfa,0x02,0x00,0x00,0x0d,0x03,0x00,0x00, -0x06,0x03,0x00,0x00,0xf7,0x00,0x03,0x00,0x10,0x03,0x00,0x00, -0x00,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0x0e,0x03,0x00,0x00, -0x0f,0x03,0x00,0x00,0x10,0x03,0x00,0x00,0xf8,0x00,0x02,0x00, -0x0f,0x03,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x18,0x03,0x00,0x00,0xef,0x02,0x00,0x00,0x3d,0x03,0x00,0x00, -0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00,0x1a,0x03,0x00,0x00, -0x12,0x00,0x00,0x00,0x19,0x03,0x00,0x00,0x3d,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x1b,0x03,0x00,0x00,0x1a,0x03,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x1c,0x03,0x00,0x00, -0x18,0x03,0x00,0x00,0x1b,0x03,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x1d,0x03,0x00,0x00,0xcf,0x02,0x00,0x00, -0x1c,0x03,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x1f,0x03,0x00,0x00,0x1d,0x03,0x00,0x00,0xe7,0x02,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x21,0x03,0x00,0x00, -0x1f,0x03,0x00,0x00,0x3f,0x03,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x23,0x03,0x00,0x00,0x3a,0x03,0x00,0x00, -0xb2,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x25,0x03,0x00,0x00,0x23,0x03,0x00,0x00,0x3d,0x03,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x27,0x03,0x00,0x00, -0x25,0x03,0x00,0x00,0x26,0x03,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x29,0x03,0x00,0x00,0x3b,0x03,0x00,0x00, +0x66,0x02,0x00,0x00,0x63,0x02,0x00,0x00,0x65,0x02,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x6a,0x02,0x00,0x00, +0xbb,0x02,0x00,0x00,0xd8,0x01,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x6b,0x02,0x00,0x00,0x3e,0x02,0x00,0x00, +0x6a,0x02,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x6d,0x02,0x00,0x00,0x68,0x00,0x00,0x00,0xbc,0x00,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x6e,0x02,0x00,0x00, +0x6b,0x02,0x00,0x00,0x6d,0x02,0x00,0x00,0xf9,0x00,0x02,0x00, +0x70,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x70,0x02,0x00,0x00, +0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0xbe,0x02,0x00,0x00, +0x3e,0x00,0x00,0x00,0x59,0x02,0x00,0x00,0xb2,0x02,0x00,0x00, +0x73,0x02,0x00,0x00,0xb0,0x00,0x05,0x00,0xc2,0x00,0x00,0x00, +0x76,0x02,0x00,0x00,0xbe,0x02,0x00,0x00,0xbc,0x00,0x00,0x00, +0xf6,0x00,0x04,0x00,0x72,0x02,0x00,0x00,0x73,0x02,0x00,0x00, +0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0x76,0x02,0x00,0x00, +0x71,0x02,0x00,0x00,0x72,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, +0x71,0x02,0x00,0x00,0xf9,0x00,0x02,0x00,0x78,0x02,0x00,0x00, +0xf8,0x00,0x02,0x00,0x78,0x02,0x00,0x00,0xf5,0x00,0x07,0x00, +0x06,0x00,0x00,0x00,0xc0,0x02,0x00,0x00,0x3e,0x00,0x00,0x00, +0x71,0x02,0x00,0x00,0xb0,0x02,0x00,0x00,0x7b,0x02,0x00,0x00, +0xb0,0x00,0x05,0x00,0xc2,0x00,0x00,0x00,0x7e,0x02,0x00,0x00, +0xc0,0x02,0x00,0x00,0x62,0x00,0x00,0x00,0xf6,0x00,0x04,0x00, +0x7a,0x02,0x00,0x00,0x7b,0x02,0x00,0x00,0x01,0x00,0x00,0x00, +0xfa,0x00,0x04,0x00,0x7e,0x02,0x00,0x00,0x79,0x02,0x00,0x00, +0x7a,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x79,0x02,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x81,0x02,0x00,0x00, +0x66,0x02,0x00,0x00,0xc0,0x02,0x00,0x00,0xb0,0x00,0x05,0x00, +0xc2,0x00,0x00,0x00,0x84,0x02,0x00,0x00,0x81,0x02,0x00,0x00, +0x36,0x00,0x00,0x00,0xf7,0x00,0x03,0x00,0x86,0x02,0x00,0x00, +0x00,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0x84,0x02,0x00,0x00, +0x85,0x02,0x00,0x00,0x86,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, +0x85,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x89,0x02,0x00,0x00,0x6e,0x02,0x00,0x00,0xbe,0x02,0x00,0x00, +0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00,0x8a,0x02,0x00,0x00, +0x12,0x00,0x00,0x00,0xd0,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x8b,0x02,0x00,0x00,0x8a,0x02,0x00,0x00, +0xb0,0x00,0x05,0x00,0xc2,0x00,0x00,0x00,0x8c,0x02,0x00,0x00, +0x89,0x02,0x00,0x00,0x8b,0x02,0x00,0x00,0xf9,0x00,0x02,0x00, +0x86,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x86,0x02,0x00,0x00, +0xf5,0x00,0x07,0x00,0xc2,0x00,0x00,0x00,0x8d,0x02,0x00,0x00, +0x84,0x02,0x00,0x00,0x79,0x02,0x00,0x00,0x8c,0x02,0x00,0x00, +0x85,0x02,0x00,0x00,0xf7,0x00,0x03,0x00,0x8f,0x02,0x00,0x00, +0x00,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0x8d,0x02,0x00,0x00, +0x8e,0x02,0x00,0x00,0x8f,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, +0x8e,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x97,0x02,0x00,0x00,0x6e,0x02,0x00,0x00,0xbe,0x02,0x00,0x00, +0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00,0x99,0x02,0x00,0x00, +0x12,0x00,0x00,0x00,0x98,0x02,0x00,0x00,0x3d,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x9a,0x02,0x00,0x00,0x99,0x02,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x9b,0x02,0x00,0x00, +0x97,0x02,0x00,0x00,0x9a,0x02,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x9c,0x02,0x00,0x00,0x4e,0x02,0x00,0x00, +0x9b,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x9e,0x02,0x00,0x00,0x9c,0x02,0x00,0x00,0x66,0x02,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xa0,0x02,0x00,0x00, +0x9e,0x02,0x00,0x00,0xc0,0x02,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xa2,0x02,0x00,0x00,0xbb,0x02,0x00,0x00, +0xbc,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xa4,0x02,0x00,0x00,0xa2,0x02,0x00,0x00,0xbe,0x02,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xa6,0x02,0x00,0x00, +0xa4,0x02,0x00,0x00,0xa5,0x02,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xa8,0x02,0x00,0x00,0xbc,0x02,0x00,0x00, 0x62,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x2a,0x03,0x00,0x00,0x27,0x03,0x00,0x00,0x29,0x03,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x2c,0x03,0x00,0x00, -0x2a,0x03,0x00,0x00,0x3f,0x03,0x00,0x00,0x41,0x00,0x05,0x00, -0xc3,0x00,0x00,0x00,0x2d,0x03,0x00,0x00,0xc0,0x00,0x00,0x00, -0x2c,0x03,0x00,0x00,0x3d,0x00,0x04,0x00,0xba,0x00,0x00,0x00, -0x2e,0x03,0x00,0x00,0x2d,0x03,0x00,0x00,0x41,0x00,0x06,0x00, -0xfc,0x00,0x00,0x00,0x2f,0x03,0x00,0x00,0x14,0x03,0x00,0x00, -0x34,0x00,0x00,0x00,0x21,0x03,0x00,0x00,0x3e,0x00,0x03,0x00, -0x2f,0x03,0x00,0x00,0x2e,0x03,0x00,0x00,0xf9,0x00,0x02,0x00, -0x10,0x03,0x00,0x00,0xf8,0x00,0x02,0x00,0x10,0x03,0x00,0x00, -0xf9,0x00,0x02,0x00,0xfc,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, -0xfc,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x31,0x03,0x00,0x00,0x3f,0x03,0x00,0x00,0xc6,0x00,0x00,0x00, -0xf9,0x00,0x02,0x00,0xf9,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, -0xfb,0x02,0x00,0x00,0xf9,0x00,0x02,0x00,0xf4,0x02,0x00,0x00, -0xf8,0x00,0x02,0x00,0xf4,0x02,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x33,0x03,0x00,0x00,0x3d,0x03,0x00,0x00, -0xc6,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0xf1,0x02,0x00,0x00, -0xf8,0x00,0x02,0x00,0xf3,0x02,0x00,0x00,0xf9,0x00,0x02,0x00, -0xdc,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0xdc,0x02,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x35,0x03,0x00,0x00, -0x3b,0x03,0x00,0x00,0xc6,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, -0xd9,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0xdb,0x02,0x00,0x00, -0xf9,0x00,0x02,0x00,0xd4,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, -0xd4,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x37,0x03,0x00,0x00,0x3a,0x03,0x00,0x00,0xc6,0x00,0x00,0x00, -0xf9,0x00,0x02,0x00,0xd1,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, -0xd3,0x02,0x00,0x00,0xfd,0x00,0x01,0x00,0x38,0x00,0x01,0x00, +0xa9,0x02,0x00,0x00,0xa6,0x02,0x00,0x00,0xa8,0x02,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xab,0x02,0x00,0x00, +0xa9,0x02,0x00,0x00,0xc0,0x02,0x00,0x00,0x41,0x00,0x05,0x00, +0xcd,0x00,0x00,0x00,0xac,0x02,0x00,0x00,0xca,0x00,0x00,0x00, +0xab,0x02,0x00,0x00,0x3d,0x00,0x04,0x00,0xc4,0x00,0x00,0x00, +0xad,0x02,0x00,0x00,0xac,0x02,0x00,0x00,0x41,0x00,0x06,0x00, +0x65,0x01,0x00,0x00,0xae,0x02,0x00,0x00,0x93,0x02,0x00,0x00, +0x34,0x00,0x00,0x00,0xa0,0x02,0x00,0x00,0x3e,0x00,0x03,0x00, +0xae,0x02,0x00,0x00,0xad,0x02,0x00,0x00,0xf9,0x00,0x02,0x00, +0x8f,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x8f,0x02,0x00,0x00, +0xf9,0x00,0x02,0x00,0x7b,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, +0x7b,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xb0,0x02,0x00,0x00,0xc0,0x02,0x00,0x00,0xd0,0x00,0x00,0x00, +0xf9,0x00,0x02,0x00,0x78,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, +0x7a,0x02,0x00,0x00,0xf9,0x00,0x02,0x00,0x73,0x02,0x00,0x00, +0xf8,0x00,0x02,0x00,0x73,0x02,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xb2,0x02,0x00,0x00,0xbe,0x02,0x00,0x00, +0xd0,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0x70,0x02,0x00,0x00, +0xf8,0x00,0x02,0x00,0x72,0x02,0x00,0x00,0xf9,0x00,0x02,0x00, +0x5b,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x5b,0x02,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xb4,0x02,0x00,0x00, +0xbc,0x02,0x00,0x00,0xd0,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, +0x58,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x5a,0x02,0x00,0x00, +0xf9,0x00,0x02,0x00,0x53,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, +0x53,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xb6,0x02,0x00,0x00,0xbb,0x02,0x00,0x00,0xd0,0x00,0x00,0x00, +0xf9,0x00,0x02,0x00,0x50,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, +0x52,0x02,0x00,0x00,0xfd,0x00,0x01,0x00,0x38,0x00,0x01,0x00, }; -const uint64_t matmul_f32_aligned_m_len = 12168; +const uint64_t matmul_q8_0_f32_aligned_fp32_len = 10452; -unsigned char matmul_f32_aligned_m_fp32_data[] = { +unsigned char matmul_q8_0_f32_fp32_data[] = { 0x03,0x02,0x23,0x07,0x00,0x05,0x01,0x00,0x0b,0x00,0x0d,0x00, -0xe9,0x02,0x00,0x00,0x00,0x00,0x00,0x00,0x11,0x00,0x02,0x00, -0x01,0x00,0x00,0x00,0x0b,0x00,0x06,0x00,0x01,0x00,0x00,0x00, -0x47,0x4c,0x53,0x4c,0x2e,0x73,0x74,0x64,0x2e,0x34,0x35,0x30, -0x00,0x00,0x00,0x00,0x0e,0x00,0x03,0x00,0x00,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x0f,0x00,0x0f,0x00,0x05,0x00,0x00,0x00, -0x04,0x00,0x00,0x00,0x6d,0x61,0x69,0x6e,0x00,0x00,0x00,0x00, -0x0b,0x00,0x00,0x00,0x12,0x00,0x00,0x00,0x3d,0x00,0x00,0x00, -0x4c,0x00,0x00,0x00,0xea,0x00,0x00,0x00,0xf8,0x00,0x00,0x00, -0x46,0x01,0x00,0x00,0x53,0x01,0x00,0x00,0x4a,0x02,0x00,0x00, -0x93,0x02,0x00,0x00,0x10,0x00,0x06,0x00,0x04,0x00,0x00,0x00, -0x11,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x0b,0x00,0x00,0x00, -0x0b,0x00,0x00,0x00,0x1c,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0x10,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x04,0x00,0x00,0x00, -0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00,0x02,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x08,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0x10,0x00,0x00,0x00,0x03,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00, -0x04,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x10,0x00,0x00,0x00, -0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00,0x05,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x14,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0x10,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x18,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00, -0x07,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x1c,0x00,0x00,0x00, -0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00,0x08,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x20,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0x10,0x00,0x00,0x00,0x09,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x24,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00, -0x0a,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x28,0x00,0x00,0x00, -0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00,0x0b,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x2c,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0x10,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x30,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00, -0x0d,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x34,0x00,0x00,0x00, -0x47,0x00,0x03,0x00,0x10,0x00,0x00,0x00,0x02,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x37,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x3d,0x00,0x00,0x00, -0x0b,0x00,0x00,0x00,0x1a,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x4c,0x00,0x00,0x00,0x0b,0x00,0x00,0x00,0x1b,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x4f,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x09,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x53,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x60,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x62,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x07,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x6c,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x03,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x9d,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x02,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0xaf,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x05,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0xb2,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x08,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0xf5,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x10,0x00,0x00,0x00, -0x48,0x00,0x04,0x00,0xf6,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x18,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0xf6,0x00,0x00,0x00, +0xeb,0x02,0x00,0x00,0x00,0x00,0x00,0x00,0x11,0x00,0x02,0x00, +0x01,0x00,0x00,0x00,0x11,0x00,0x02,0x00,0x51,0x11,0x00,0x00, +0x11,0x00,0x02,0x00,0x60,0x11,0x00,0x00,0x0b,0x00,0x06,0x00, +0x01,0x00,0x00,0x00,0x47,0x4c,0x53,0x4c,0x2e,0x73,0x74,0x64, +0x2e,0x34,0x35,0x30,0x00,0x00,0x00,0x00,0x0e,0x00,0x03,0x00, +0x00,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x0f,0x00,0x0f,0x00, +0x05,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x6d,0x61,0x69,0x6e, +0x00,0x00,0x00,0x00,0x0b,0x00,0x00,0x00,0x12,0x00,0x00,0x00, +0x3d,0x00,0x00,0x00,0x4c,0x00,0x00,0x00,0x0a,0x01,0x00,0x00, +0x28,0x01,0x00,0x00,0x59,0x01,0x00,0x00,0x64,0x01,0x00,0x00, +0x4a,0x02,0x00,0x00,0x93,0x02,0x00,0x00,0x10,0x00,0x06,0x00, +0x04,0x00,0x00,0x00,0x11,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x47,0x00,0x04,0x00, +0x0b,0x00,0x00,0x00,0x0b,0x00,0x00,0x00,0x1c,0x00,0x00,0x00, +0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x48,0x00,0x05,0x00, +0x10,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x23,0x00,0x00,0x00, +0x04,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00, +0x02,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x08,0x00,0x00,0x00, +0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00,0x03,0x00,0x00,0x00, +0x23,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x48,0x00,0x05,0x00, +0x10,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x23,0x00,0x00,0x00, +0x10,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00, +0x05,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x14,0x00,0x00,0x00, +0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00,0x06,0x00,0x00,0x00, +0x23,0x00,0x00,0x00,0x18,0x00,0x00,0x00,0x48,0x00,0x05,0x00, +0x10,0x00,0x00,0x00,0x07,0x00,0x00,0x00,0x23,0x00,0x00,0x00, +0x1c,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00, +0x08,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x20,0x00,0x00,0x00, +0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00,0x09,0x00,0x00,0x00, +0x23,0x00,0x00,0x00,0x24,0x00,0x00,0x00,0x48,0x00,0x05,0x00, +0x10,0x00,0x00,0x00,0x0a,0x00,0x00,0x00,0x23,0x00,0x00,0x00, +0x28,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00, +0x0b,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x2c,0x00,0x00,0x00, +0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, +0x23,0x00,0x00,0x00,0x30,0x00,0x00,0x00,0x48,0x00,0x05,0x00, +0x10,0x00,0x00,0x00,0x0d,0x00,0x00,0x00,0x23,0x00,0x00,0x00, +0x34,0x00,0x00,0x00,0x47,0x00,0x03,0x00,0x10,0x00,0x00,0x00, +0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x37,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x47,0x00,0x04,0x00, +0x3d,0x00,0x00,0x00,0x0b,0x00,0x00,0x00,0x1a,0x00,0x00,0x00, +0x47,0x00,0x04,0x00,0x4c,0x00,0x00,0x00,0x0b,0x00,0x00,0x00, +0x1b,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x4f,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0x09,0x00,0x00,0x00,0x47,0x00,0x04,0x00, +0x53,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x04,0x00,0x00,0x00, +0x47,0x00,0x04,0x00,0x60,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0x06,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x62,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0x07,0x00,0x00,0x00,0x47,0x00,0x04,0x00, +0x6c,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x03,0x00,0x00,0x00, +0x47,0x00,0x04,0x00,0xa6,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0xb8,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0x05,0x00,0x00,0x00,0x47,0x00,0x04,0x00, +0xbb,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x08,0x00,0x00,0x00, +0x47,0x00,0x04,0x00,0x05,0x01,0x00,0x00,0x06,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x06,0x01,0x00,0x00, 0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x47,0x00,0x03,0x00,0xf6,0x00,0x00,0x00,0x02,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0xf8,0x00,0x00,0x00,0x22,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0xf8,0x00,0x00,0x00, +0x48,0x00,0x05,0x00,0x06,0x01,0x00,0x00,0x01,0x00,0x00,0x00, +0x23,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00, +0x07,0x01,0x00,0x00,0x06,0x00,0x00,0x00,0x22,0x00,0x00,0x00, +0x48,0x00,0x04,0x00,0x08,0x01,0x00,0x00,0x00,0x00,0x00,0x00, +0x18,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x08,0x01,0x00,0x00, +0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0x47,0x00,0x03,0x00,0x08,0x01,0x00,0x00,0x02,0x00,0x00,0x00, +0x47,0x00,0x04,0x00,0x0a,0x01,0x00,0x00,0x22,0x00,0x00,0x00, +0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x0a,0x01,0x00,0x00, 0x21,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x27,0x01,0x00,0x00,0x01,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x28,0x01,0x00,0x00,0x0b,0x00,0x00,0x00, -0x19,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x50,0x01,0x00,0x00, -0x06,0x00,0x00,0x00,0x10,0x00,0x00,0x00,0x48,0x00,0x04,0x00, -0x51,0x01,0x00,0x00,0x00,0x00,0x00,0x00,0x18,0x00,0x00,0x00, -0x48,0x00,0x05,0x00,0x51,0x01,0x00,0x00,0x00,0x00,0x00,0x00, +0x33,0x01,0x00,0x00,0x01,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0x47,0x00,0x04,0x00,0x34,0x01,0x00,0x00,0x0b,0x00,0x00,0x00, +0x19,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x61,0x01,0x00,0x00, +0x06,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x48,0x00,0x04,0x00, +0x62,0x01,0x00,0x00,0x00,0x00,0x00,0x00,0x18,0x00,0x00,0x00, +0x48,0x00,0x05,0x00,0x62,0x01,0x00,0x00,0x00,0x00,0x00,0x00, 0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x03,0x00, -0x51,0x01,0x00,0x00,0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x53,0x01,0x00,0x00,0x22,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x53,0x01,0x00,0x00,0x21,0x00,0x00,0x00, +0x62,0x01,0x00,0x00,0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00, +0x64,0x01,0x00,0x00,0x22,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0x47,0x00,0x04,0x00,0x64,0x01,0x00,0x00,0x21,0x00,0x00,0x00, 0x01,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x4a,0x02,0x00,0x00, 0x0b,0x00,0x00,0x00,0x18,0x00,0x00,0x00,0x47,0x00,0x04,0x00, 0x90,0x02,0x00,0x00,0x06,0x00,0x00,0x00,0x04,0x00,0x00,0x00, @@ -35199,2022 +37337,145 @@ unsigned char matmul_f32_aligned_m_fp32_data[] = { 0x06,0x00,0x00,0x00,0x67,0x00,0x00,0x00,0x86,0x00,0x00,0x00, 0x61,0x00,0x00,0x00,0x62,0x00,0x00,0x00,0x32,0x00,0x04,0x00, 0x06,0x00,0x00,0x00,0x6c,0x00,0x00,0x00,0x10,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x6d,0x00,0x00,0x00, -0x04,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, -0x6e,0x00,0x00,0x00,0x86,0x00,0x00,0x00,0x6c,0x00,0x00,0x00, -0x6d,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, -0x73,0x00,0x00,0x00,0x86,0x00,0x00,0x00,0x6c,0x00,0x00,0x00, -0x6d,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00, -0x77,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x13,0x00,0x00,0x00,0x7c,0x00,0x00,0x00,0x02,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00,0x87,0x00,0x00,0x00, -0x0b,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00, -0x8d,0x00,0x00,0x00,0x03,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x13,0x00,0x00,0x00,0x98,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0x32,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x9d,0x00,0x00,0x00, -0x40,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00, -0x9f,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0xae,0x00,0x00,0x00,0x84,0x00,0x00,0x00, -0x60,0x00,0x00,0x00,0x62,0x00,0x00,0x00,0x32,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0xaf,0x00,0x00,0x00,0x20,0x00,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0xb0,0x00,0x00,0x00, -0x84,0x00,0x00,0x00,0x53,0x00,0x00,0x00,0xaf,0x00,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0xb1,0x00,0x00,0x00, -0x84,0x00,0x00,0x00,0x4f,0x00,0x00,0x00,0x62,0x00,0x00,0x00, -0x32,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0xb2,0x00,0x00,0x00, -0x02,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, -0xb3,0x00,0x00,0x00,0x84,0x00,0x00,0x00,0xb1,0x00,0x00,0x00, -0xb2,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, -0xb4,0x00,0x00,0x00,0x84,0x00,0x00,0x00,0xb3,0x00,0x00,0x00, -0x60,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, -0xb5,0x00,0x00,0x00,0x86,0x00,0x00,0x00,0xb0,0x00,0x00,0x00, -0xb4,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, -0xb6,0x00,0x00,0x00,0x84,0x00,0x00,0x00,0xae,0x00,0x00,0x00, -0xb5,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, -0xb7,0x00,0x00,0x00,0x84,0x00,0x00,0x00,0xb6,0x00,0x00,0x00, -0xb2,0x00,0x00,0x00,0x14,0x00,0x02,0x00,0xb8,0x00,0x00,0x00, -0x16,0x00,0x03,0x00,0xba,0x00,0x00,0x00,0x20,0x00,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0xbb,0x00,0x00,0x00, -0x84,0x00,0x00,0x00,0x60,0x00,0x00,0x00,0x62,0x00,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0xbc,0x00,0x00,0x00, -0x84,0x00,0x00,0x00,0xbb,0x00,0x00,0x00,0xb5,0x00,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0xbd,0x00,0x00,0x00, -0x84,0x00,0x00,0x00,0xbc,0x00,0x00,0x00,0xb2,0x00,0x00,0x00, -0x1c,0x00,0x04,0x00,0xbe,0x00,0x00,0x00,0xba,0x00,0x00,0x00, -0xbd,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0xbf,0x00,0x00,0x00, -0x07,0x00,0x00,0x00,0xbe,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0xba,0x00,0x00,0x00,0xc2,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x20,0x00,0x04,0x00,0xc3,0x00,0x00,0x00,0x07,0x00,0x00,0x00, -0xba,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00, -0xc6,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0xe6,0x00,0x00,0x00,0x80,0x00,0x00,0x00, -0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0xe7,0x00,0x00,0x00,0x84,0x00,0x00,0x00, -0x37,0x00,0x00,0x00,0xe6,0x00,0x00,0x00,0x1c,0x00,0x04,0x00, -0xe8,0x00,0x00,0x00,0xba,0x00,0x00,0x00,0xe7,0x00,0x00,0x00, -0x20,0x00,0x04,0x00,0xe9,0x00,0x00,0x00,0x04,0x00,0x00,0x00, -0xe8,0x00,0x00,0x00,0x3b,0x00,0x04,0x00,0xe9,0x00,0x00,0x00, -0xea,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0xee,0x00,0x00,0x00,0x80,0x00,0x00,0x00, -0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x17,0x00,0x04,0x00, -0xf4,0x00,0x00,0x00,0xba,0x00,0x00,0x00,0x04,0x00,0x00,0x00, -0x1d,0x00,0x03,0x00,0xf5,0x00,0x00,0x00,0xf4,0x00,0x00,0x00, -0x1e,0x00,0x03,0x00,0xf6,0x00,0x00,0x00,0xf5,0x00,0x00,0x00, -0x20,0x00,0x04,0x00,0xf7,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0xf6,0x00,0x00,0x00,0x3b,0x00,0x04,0x00,0xf7,0x00,0x00,0x00, -0xf8,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0xfa,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0xba,0x00,0x00,0x00, -0x20,0x00,0x04,0x00,0xfd,0x00,0x00,0x00,0x04,0x00,0x00,0x00, -0xba,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, -0x02,0x01,0x00,0x00,0x80,0x00,0x00,0x00,0x6c,0x00,0x00,0x00, -0x39,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, -0x0f,0x01,0x00,0x00,0x80,0x00,0x00,0x00,0x6c,0x00,0x00,0x00, -0x39,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, -0x1c,0x01,0x00,0x00,0x80,0x00,0x00,0x00,0x6c,0x00,0x00,0x00, -0x39,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x21,0x01,0x00,0x00,0x03,0x00,0x00,0x00,0x32,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x27,0x01,0x00,0x00,0x01,0x00,0x00,0x00, -0x33,0x00,0x06,0x00,0x09,0x00,0x00,0x00,0x28,0x01,0x00,0x00, -0x27,0x01,0x00,0x00,0x39,0x00,0x00,0x00,0x39,0x00,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x29,0x01,0x00,0x00, -0x51,0x00,0x00,0x00,0x28,0x01,0x00,0x00,0x00,0x00,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x2a,0x01,0x00,0x00, -0x84,0x00,0x00,0x00,0x29,0x01,0x00,0x00,0x6d,0x00,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x2b,0x01,0x00,0x00, -0x86,0x00,0x00,0x00,0x2a,0x01,0x00,0x00,0x6c,0x00,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x42,0x01,0x00,0x00, -0x80,0x00,0x00,0x00,0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x43,0x01,0x00,0x00, -0x84,0x00,0x00,0x00,0x9d,0x00,0x00,0x00,0x42,0x01,0x00,0x00, -0x1c,0x00,0x04,0x00,0x44,0x01,0x00,0x00,0xba,0x00,0x00,0x00, -0x43,0x01,0x00,0x00,0x20,0x00,0x04,0x00,0x45,0x01,0x00,0x00, -0x04,0x00,0x00,0x00,0x44,0x01,0x00,0x00,0x3b,0x00,0x04,0x00, -0x45,0x01,0x00,0x00,0x46,0x01,0x00,0x00,0x04,0x00,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x4a,0x01,0x00,0x00, -0x80,0x00,0x00,0x00,0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00, -0x1d,0x00,0x03,0x00,0x50,0x01,0x00,0x00,0xf4,0x00,0x00,0x00, -0x1e,0x00,0x03,0x00,0x51,0x01,0x00,0x00,0x50,0x01,0x00,0x00, -0x20,0x00,0x04,0x00,0x52,0x01,0x00,0x00,0x0c,0x00,0x00,0x00, -0x51,0x01,0x00,0x00,0x3b,0x00,0x04,0x00,0x52,0x01,0x00,0x00, -0x53,0x01,0x00,0x00,0x0c,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0x5b,0x01,0x00,0x00,0x80,0x00,0x00,0x00, -0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0x68,0x01,0x00,0x00,0x80,0x00,0x00,0x00, -0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0x75,0x01,0x00,0x00,0x80,0x00,0x00,0x00, -0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x81,0x01,0x00,0x00,0x08,0x01,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x82,0x01,0x00,0x00, -0x86,0x00,0x00,0x00,0x6c,0x00,0x00,0x00,0x6d,0x00,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x85,0x01,0x00,0x00, -0x86,0x00,0x00,0x00,0x6c,0x00,0x00,0x00,0x6d,0x00,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0xa0,0x01,0x00,0x00, -0x84,0x00,0x00,0x00,0x60,0x00,0x00,0x00,0x62,0x00,0x00,0x00, -0x1c,0x00,0x04,0x00,0xa1,0x01,0x00,0x00,0xba,0x00,0x00,0x00, -0xa0,0x01,0x00,0x00,0x20,0x00,0x04,0x00,0xa2,0x01,0x00,0x00, -0x07,0x00,0x00,0x00,0xa1,0x01,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0xb2,0x01,0x00,0x00,0x80,0x00,0x00,0x00, -0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0xcd,0x01,0x00,0x00,0x84,0x00,0x00,0x00, -0xb5,0x00,0x00,0x00,0xb2,0x00,0x00,0x00,0x1c,0x00,0x04,0x00, -0xce,0x01,0x00,0x00,0xba,0x00,0x00,0x00,0xcd,0x01,0x00,0x00, -0x20,0x00,0x04,0x00,0xcf,0x01,0x00,0x00,0x07,0x00,0x00,0x00, -0xce,0x01,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, -0xd8,0x01,0x00,0x00,0x86,0x00,0x00,0x00,0xaf,0x00,0x00,0x00, -0xb5,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, -0xe0,0x01,0x00,0x00,0x80,0x00,0x00,0x00,0x6c,0x00,0x00,0x00, -0x39,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, -0x0f,0x02,0x00,0x00,0x84,0x00,0x00,0x00,0x60,0x00,0x00,0x00, -0x62,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00, -0x42,0x02,0x00,0x00,0x0d,0x00,0x00,0x00,0x3b,0x00,0x04,0x00, -0x0a,0x00,0x00,0x00,0x4a,0x02,0x00,0x00,0x01,0x00,0x00,0x00, -0x1d,0x00,0x03,0x00,0x90,0x02,0x00,0x00,0xba,0x00,0x00,0x00, -0x1e,0x00,0x03,0x00,0x91,0x02,0x00,0x00,0x90,0x02,0x00,0x00, -0x20,0x00,0x04,0x00,0x92,0x02,0x00,0x00,0x0c,0x00,0x00,0x00, -0x91,0x02,0x00,0x00,0x3b,0x00,0x04,0x00,0x92,0x02,0x00,0x00, -0x93,0x02,0x00,0x00,0x0c,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x13,0x00,0x00,0x00,0x98,0x02,0x00,0x00,0x05,0x00,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0xa5,0x02,0x00,0x00, -0x84,0x00,0x00,0x00,0x60,0x00,0x00,0x00,0x62,0x00,0x00,0x00, -0x36,0x00,0x05,0x00,0x02,0x00,0x00,0x00,0x04,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x03,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, -0x05,0x00,0x00,0x00,0x3b,0x00,0x04,0x00,0xbf,0x00,0x00,0x00, -0xc0,0x00,0x00,0x00,0x07,0x00,0x00,0x00,0x3b,0x00,0x04,0x00, -0xa2,0x01,0x00,0x00,0xa3,0x01,0x00,0x00,0x07,0x00,0x00,0x00, -0x3b,0x00,0x04,0x00,0xcf,0x01,0x00,0x00,0xd0,0x01,0x00,0x00, -0x07,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x0d,0x00,0x00,0x00, -0x0e,0x00,0x00,0x00,0x0b,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x0f,0x00,0x00,0x00, -0x0e,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00, -0x16,0x00,0x00,0x00,0x12,0x00,0x00,0x00,0x14,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x17,0x00,0x00,0x00, -0x16,0x00,0x00,0x00,0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x18,0x00,0x00,0x00,0x0f,0x00,0x00,0x00,0x17,0x00,0x00,0x00, -0x89,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x1e,0x00,0x00,0x00, -0x0f,0x00,0x00,0x00,0x17,0x00,0x00,0x00,0x41,0x00,0x05,0x00, -0x15,0x00,0x00,0x00,0x22,0x00,0x00,0x00,0x12,0x00,0x00,0x00, -0x21,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x22,0x00,0x00,0x00,0x86,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x24,0x00,0x00,0x00,0x18,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00, -0x28,0x00,0x00,0x00,0x12,0x00,0x00,0x00,0x27,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x29,0x00,0x00,0x00, -0x28,0x00,0x00,0x00,0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x2a,0x00,0x00,0x00,0x1e,0x00,0x00,0x00,0x29,0x00,0x00,0x00, -0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0x12,0x00,0x00,0x00,0x2d,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x2f,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x30,0x00,0x00,0x00, -0x24,0x00,0x00,0x00,0x2f,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x32,0x00,0x00,0x00,0x30,0x00,0x00,0x00, -0x2a,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00, -0x35,0x00,0x00,0x00,0x12,0x00,0x00,0x00,0x34,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x36,0x00,0x00,0x00, -0x35,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x38,0x00,0x00,0x00,0x36,0x00,0x00,0x00,0x37,0x00,0x00,0x00, -0x82,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x3a,0x00,0x00,0x00, -0x38,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x86,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x3b,0x00,0x00,0x00,0x3a,0x00,0x00,0x00, -0x37,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x0d,0x00,0x00,0x00, -0x3f,0x00,0x00,0x00,0x3d,0x00,0x00,0x00,0x3e,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x40,0x00,0x00,0x00, -0x3f,0x00,0x00,0x00,0x89,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x42,0x00,0x00,0x00,0x40,0x00,0x00,0x00,0x3b,0x00,0x00,0x00, -0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x47,0x00,0x00,0x00, -0x40,0x00,0x00,0x00,0x3b,0x00,0x00,0x00,0x41,0x00,0x05,0x00, -0x0d,0x00,0x00,0x00,0x49,0x00,0x00,0x00,0x3d,0x00,0x00,0x00, -0x39,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x4a,0x00,0x00,0x00,0x49,0x00,0x00,0x00,0x41,0x00,0x05,0x00, -0x0d,0x00,0x00,0x00,0x4d,0x00,0x00,0x00,0x4c,0x00,0x00,0x00, -0x3e,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x4e,0x00,0x00,0x00,0x4d,0x00,0x00,0x00,0x86,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x50,0x00,0x00,0x00,0x4e,0x00,0x00,0x00, -0x4f,0x00,0x00,0x00,0x89,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x55,0x00,0x00,0x00,0x50,0x00,0x00,0x00,0x54,0x00,0x00,0x00, -0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x59,0x00,0x00,0x00, -0x50,0x00,0x00,0x00,0x58,0x00,0x00,0x00,0x89,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x5d,0x00,0x00,0x00,0x4e,0x00,0x00,0x00, -0x4f,0x00,0x00,0x00,0x89,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x64,0x00,0x00,0x00,0x5d,0x00,0x00,0x00,0x63,0x00,0x00,0x00, -0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x68,0x00,0x00,0x00, -0x5d,0x00,0x00,0x00,0x67,0x00,0x00,0x00,0x89,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x6f,0x00,0x00,0x00,0x4e,0x00,0x00,0x00, -0x6e,0x00,0x00,0x00,0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x74,0x00,0x00,0x00,0x4e,0x00,0x00,0x00,0x73,0x00,0x00,0x00, -0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00,0x78,0x00,0x00,0x00, -0x12,0x00,0x00,0x00,0x77,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x79,0x00,0x00,0x00,0x78,0x00,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x7a,0x00,0x00,0x00, -0x47,0x00,0x00,0x00,0x79,0x00,0x00,0x00,0x41,0x00,0x05,0x00, -0x15,0x00,0x00,0x00,0x7d,0x00,0x00,0x00,0x12,0x00,0x00,0x00, -0x7c,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x7e,0x00,0x00,0x00,0x7d,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x80,0x00,0x00,0x00,0x47,0x00,0x00,0x00, -0x39,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x83,0x00,0x00,0x00,0x80,0x00,0x00,0x00,0x79,0x00,0x00,0x00, -0x0c,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0x84,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x26,0x00,0x00,0x00,0x7e,0x00,0x00,0x00, -0x83,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00, -0x88,0x00,0x00,0x00,0x12,0x00,0x00,0x00,0x87,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x89,0x00,0x00,0x00, -0x88,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x8a,0x00,0x00,0x00,0x32,0x00,0x00,0x00,0x89,0x00,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x8c,0x00,0x00,0x00, -0x42,0x00,0x00,0x00,0x37,0x00,0x00,0x00,0x41,0x00,0x05,0x00, -0x15,0x00,0x00,0x00,0x8e,0x00,0x00,0x00,0x12,0x00,0x00,0x00, -0x8d,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x8f,0x00,0x00,0x00,0x8e,0x00,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x90,0x00,0x00,0x00,0x8c,0x00,0x00,0x00, -0x8f,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x91,0x00,0x00,0x00,0x8a,0x00,0x00,0x00,0x90,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x93,0x00,0x00,0x00, -0x91,0x00,0x00,0x00,0x7a,0x00,0x00,0x00,0x86,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x94,0x00,0x00,0x00,0x93,0x00,0x00,0x00, -0x6d,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00, -0x99,0x00,0x00,0x00,0x12,0x00,0x00,0x00,0x98,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x9a,0x00,0x00,0x00, -0x99,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x9b,0x00,0x00,0x00,0x0f,0x00,0x00,0x00,0x9a,0x00,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x9e,0x00,0x00,0x00, -0x4a,0x00,0x00,0x00,0x9d,0x00,0x00,0x00,0x41,0x00,0x05,0x00, -0x15,0x00,0x00,0x00,0xa0,0x00,0x00,0x00,0x12,0x00,0x00,0x00, -0x9f,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0xa1,0x00,0x00,0x00,0xa0,0x00,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xa2,0x00,0x00,0x00,0x9e,0x00,0x00,0x00, -0xa1,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xa3,0x00,0x00,0x00,0x9b,0x00,0x00,0x00,0xa2,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xa5,0x00,0x00,0x00, -0xa3,0x00,0x00,0x00,0x7a,0x00,0x00,0x00,0x86,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xa6,0x00,0x00,0x00,0xa5,0x00,0x00,0x00, -0x6d,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0xa8,0x00,0x00,0x00, -0xf8,0x00,0x02,0x00,0xa8,0x00,0x00,0x00,0xf5,0x00,0x07,0x00, -0x06,0x00,0x00,0x00,0xb7,0x02,0x00,0x00,0x3e,0x00,0x00,0x00, -0x05,0x00,0x00,0x00,0xc7,0x00,0x00,0x00,0xa9,0x00,0x00,0x00, -0xb0,0x00,0x05,0x00,0xb8,0x00,0x00,0x00,0xb9,0x00,0x00,0x00, -0xb7,0x02,0x00,0x00,0xb7,0x00,0x00,0x00,0xf6,0x00,0x04,0x00, -0xaa,0x00,0x00,0x00,0xa9,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0xfa,0x00,0x04,0x00,0xb9,0x00,0x00,0x00,0xa9,0x00,0x00,0x00, -0xaa,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0xa9,0x00,0x00,0x00, -0x41,0x00,0x05,0x00,0xc3,0x00,0x00,0x00,0xc4,0x00,0x00,0x00, -0xc0,0x00,0x00,0x00,0xb7,0x02,0x00,0x00,0x3e,0x00,0x03,0x00, -0xc4,0x00,0x00,0x00,0xc2,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xc7,0x00,0x00,0x00,0xb7,0x02,0x00,0x00, -0xc6,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0xa8,0x00,0x00,0x00, -0xf8,0x00,0x02,0x00,0xaa,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, -0xca,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0xca,0x00,0x00,0x00, -0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0xd0,0x02,0x00,0x00, -0xa6,0x00,0x00,0x00,0xaa,0x00,0x00,0x00,0x87,0x01,0x00,0x00, -0xcd,0x00,0x00,0x00,0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00, -0xcc,0x02,0x00,0x00,0x94,0x00,0x00,0x00,0xaa,0x00,0x00,0x00, -0x84,0x01,0x00,0x00,0xcd,0x00,0x00,0x00,0xf5,0x00,0x07,0x00, -0x06,0x00,0x00,0x00,0xb8,0x02,0x00,0x00,0x7a,0x00,0x00,0x00, -0xaa,0x00,0x00,0x00,0x32,0x02,0x00,0x00,0xcd,0x00,0x00,0x00, -0xb0,0x00,0x05,0x00,0xb8,0x00,0x00,0x00,0xd1,0x00,0x00,0x00, -0xb8,0x02,0x00,0x00,0x84,0x00,0x00,0x00,0xf6,0x00,0x04,0x00, -0xcc,0x00,0x00,0x00,0xcd,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0xfa,0x00,0x04,0x00,0xd1,0x00,0x00,0x00,0xcb,0x00,0x00,0x00, -0xcc,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0xcb,0x00,0x00,0x00, -0xf9,0x00,0x02,0x00,0xd3,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, -0xd3,0x00,0x00,0x00,0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00, -0xc8,0x02,0x00,0x00,0x3e,0x00,0x00,0x00,0xcb,0x00,0x00,0x00, -0x2d,0x01,0x00,0x00,0xd4,0x00,0x00,0x00,0xb0,0x00,0x05,0x00, -0xb8,0x00,0x00,0x00,0xd9,0x00,0x00,0x00,0xc8,0x02,0x00,0x00, -0x37,0x00,0x00,0x00,0xf6,0x00,0x04,0x00,0xd5,0x00,0x00,0x00, -0xd4,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, -0xd9,0x00,0x00,0x00,0xd4,0x00,0x00,0x00,0xd5,0x00,0x00,0x00, -0xf8,0x00,0x02,0x00,0xd4,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xde,0x00,0x00,0x00,0x74,0x00,0x00,0x00, -0xc8,0x02,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xe1,0x00,0x00,0x00,0xde,0x00,0x00,0x00,0x8f,0x00,0x00,0x00, -0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xe2,0x00,0x00,0x00, -0xe1,0x00,0x00,0x00,0x6d,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xe3,0x00,0x00,0x00,0xcc,0x02,0x00,0x00, -0xe2,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xe5,0x00,0x00,0x00,0xe3,0x00,0x00,0x00,0x6f,0x00,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xef,0x00,0x00,0x00, -0xde,0x00,0x00,0x00,0xee,0x00,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xf1,0x00,0x00,0x00,0x6f,0x00,0x00,0x00, -0x6d,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xf2,0x00,0x00,0x00,0xef,0x00,0x00,0x00,0xf1,0x00,0x00,0x00, -0x41,0x00,0x07,0x00,0xfa,0x00,0x00,0x00,0xfb,0x00,0x00,0x00, -0xf8,0x00,0x00,0x00,0x34,0x00,0x00,0x00,0xe5,0x00,0x00,0x00, -0x3e,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0xba,0x00,0x00,0x00, -0xfc,0x00,0x00,0x00,0xfb,0x00,0x00,0x00,0x41,0x00,0x05,0x00, -0xfd,0x00,0x00,0x00,0xfe,0x00,0x00,0x00,0xea,0x00,0x00,0x00, -0xf2,0x00,0x00,0x00,0x3e,0x00,0x03,0x00,0xfe,0x00,0x00,0x00, -0xfc,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x03,0x01,0x00,0x00,0xde,0x00,0x00,0x00,0x02,0x01,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x06,0x01,0x00,0x00, -0x03,0x01,0x00,0x00,0xf1,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x07,0x01,0x00,0x00,0x06,0x01,0x00,0x00, -0x39,0x00,0x00,0x00,0x41,0x00,0x07,0x00,0xfa,0x00,0x00,0x00, -0x09,0x01,0x00,0x00,0xf8,0x00,0x00,0x00,0x34,0x00,0x00,0x00, -0xe5,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0xba,0x00,0x00,0x00,0x0a,0x01,0x00,0x00,0x09,0x01,0x00,0x00, -0x41,0x00,0x05,0x00,0xfd,0x00,0x00,0x00,0x0b,0x01,0x00,0x00, -0xea,0x00,0x00,0x00,0x07,0x01,0x00,0x00,0x3e,0x00,0x03,0x00, -0x0b,0x01,0x00,0x00,0x0a,0x01,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x10,0x01,0x00,0x00,0xde,0x00,0x00,0x00, -0x0f,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x13,0x01,0x00,0x00,0x10,0x01,0x00,0x00,0xf1,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x14,0x01,0x00,0x00, -0x13,0x01,0x00,0x00,0x0c,0x00,0x00,0x00,0x41,0x00,0x07,0x00, -0xfa,0x00,0x00,0x00,0x16,0x01,0x00,0x00,0xf8,0x00,0x00,0x00, -0x34,0x00,0x00,0x00,0xe5,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0xba,0x00,0x00,0x00,0x17,0x01,0x00,0x00, -0x16,0x01,0x00,0x00,0x41,0x00,0x05,0x00,0xfd,0x00,0x00,0x00, -0x18,0x01,0x00,0x00,0xea,0x00,0x00,0x00,0x14,0x01,0x00,0x00, -0x3e,0x00,0x03,0x00,0x18,0x01,0x00,0x00,0x17,0x01,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x1d,0x01,0x00,0x00, -0xde,0x00,0x00,0x00,0x1c,0x01,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x20,0x01,0x00,0x00,0x1d,0x01,0x00,0x00, -0xf1,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x22,0x01,0x00,0x00,0x20,0x01,0x00,0x00,0x21,0x01,0x00,0x00, -0x41,0x00,0x07,0x00,0xfa,0x00,0x00,0x00,0x24,0x01,0x00,0x00, -0xf8,0x00,0x00,0x00,0x34,0x00,0x00,0x00,0xe5,0x00,0x00,0x00, -0x21,0x01,0x00,0x00,0x3d,0x00,0x04,0x00,0xba,0x00,0x00,0x00, -0x25,0x01,0x00,0x00,0x24,0x01,0x00,0x00,0x41,0x00,0x05,0x00, -0xfd,0x00,0x00,0x00,0x26,0x01,0x00,0x00,0xea,0x00,0x00,0x00, -0x22,0x01,0x00,0x00,0x3e,0x00,0x03,0x00,0x26,0x01,0x00,0x00, -0x25,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x2d,0x01,0x00,0x00,0xc8,0x02,0x00,0x00,0x2b,0x01,0x00,0x00, -0xf9,0x00,0x02,0x00,0xd3,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, -0xd5,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0x2f,0x01,0x00,0x00, -0xf8,0x00,0x02,0x00,0x2f,0x01,0x00,0x00,0xf5,0x00,0x07,0x00, -0x06,0x00,0x00,0x00,0xc9,0x02,0x00,0x00,0x3e,0x00,0x00,0x00, -0xd5,0x00,0x00,0x00,0x80,0x01,0x00,0x00,0x30,0x01,0x00,0x00, -0xb0,0x00,0x05,0x00,0xb8,0x00,0x00,0x00,0x35,0x01,0x00,0x00, -0xc9,0x02,0x00,0x00,0x9d,0x00,0x00,0x00,0xf6,0x00,0x04,0x00, -0x31,0x01,0x00,0x00,0x30,0x01,0x00,0x00,0x01,0x00,0x00,0x00, -0xfa,0x00,0x04,0x00,0x35,0x01,0x00,0x00,0x30,0x01,0x00,0x00, -0x31,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0x30,0x01,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x3a,0x01,0x00,0x00, -0x74,0x00,0x00,0x00,0xc9,0x02,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x3d,0x01,0x00,0x00,0x3a,0x01,0x00,0x00, -0xa1,0x00,0x00,0x00,0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x3e,0x01,0x00,0x00,0x3d,0x01,0x00,0x00,0x6d,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x3f,0x01,0x00,0x00, -0xd0,0x02,0x00,0x00,0x3e,0x01,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x41,0x01,0x00,0x00,0x3f,0x01,0x00,0x00, -0x6f,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x4b,0x01,0x00,0x00,0x3a,0x01,0x00,0x00,0x4a,0x01,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x4d,0x01,0x00,0x00, -0x6f,0x00,0x00,0x00,0x6d,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x4e,0x01,0x00,0x00,0x4b,0x01,0x00,0x00, -0x4d,0x01,0x00,0x00,0x41,0x00,0x07,0x00,0xfa,0x00,0x00,0x00, -0x55,0x01,0x00,0x00,0x53,0x01,0x00,0x00,0x34,0x00,0x00,0x00, -0x41,0x01,0x00,0x00,0x3e,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0xba,0x00,0x00,0x00,0x56,0x01,0x00,0x00,0x55,0x01,0x00,0x00, -0x41,0x00,0x05,0x00,0xfd,0x00,0x00,0x00,0x57,0x01,0x00,0x00, -0x46,0x01,0x00,0x00,0x4e,0x01,0x00,0x00,0x3e,0x00,0x03,0x00, -0x57,0x01,0x00,0x00,0x56,0x01,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x5c,0x01,0x00,0x00,0x3a,0x01,0x00,0x00, -0x5b,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x5f,0x01,0x00,0x00,0x5c,0x01,0x00,0x00,0x4d,0x01,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x60,0x01,0x00,0x00, -0x5f,0x01,0x00,0x00,0x39,0x00,0x00,0x00,0x41,0x00,0x07,0x00, -0xfa,0x00,0x00,0x00,0x62,0x01,0x00,0x00,0x53,0x01,0x00,0x00, -0x34,0x00,0x00,0x00,0x41,0x01,0x00,0x00,0x39,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0xba,0x00,0x00,0x00,0x63,0x01,0x00,0x00, -0x62,0x01,0x00,0x00,0x41,0x00,0x05,0x00,0xfd,0x00,0x00,0x00, -0x64,0x01,0x00,0x00,0x46,0x01,0x00,0x00,0x60,0x01,0x00,0x00, -0x3e,0x00,0x03,0x00,0x64,0x01,0x00,0x00,0x63,0x01,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x69,0x01,0x00,0x00, -0x3a,0x01,0x00,0x00,0x68,0x01,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x6c,0x01,0x00,0x00,0x69,0x01,0x00,0x00, -0x4d,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x6d,0x01,0x00,0x00,0x6c,0x01,0x00,0x00,0x0c,0x00,0x00,0x00, -0x41,0x00,0x07,0x00,0xfa,0x00,0x00,0x00,0x6f,0x01,0x00,0x00, -0x53,0x01,0x00,0x00,0x34,0x00,0x00,0x00,0x41,0x01,0x00,0x00, -0x0c,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0xba,0x00,0x00,0x00, -0x70,0x01,0x00,0x00,0x6f,0x01,0x00,0x00,0x41,0x00,0x05,0x00, -0xfd,0x00,0x00,0x00,0x71,0x01,0x00,0x00,0x46,0x01,0x00,0x00, -0x6d,0x01,0x00,0x00,0x3e,0x00,0x03,0x00,0x71,0x01,0x00,0x00, -0x70,0x01,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x76,0x01,0x00,0x00,0x3a,0x01,0x00,0x00,0x75,0x01,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x79,0x01,0x00,0x00, -0x76,0x01,0x00,0x00,0x4d,0x01,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x7a,0x01,0x00,0x00,0x79,0x01,0x00,0x00, -0x21,0x01,0x00,0x00,0x41,0x00,0x07,0x00,0xfa,0x00,0x00,0x00, -0x7c,0x01,0x00,0x00,0x53,0x01,0x00,0x00,0x34,0x00,0x00,0x00, -0x41,0x01,0x00,0x00,0x21,0x01,0x00,0x00,0x3d,0x00,0x04,0x00, -0xba,0x00,0x00,0x00,0x7d,0x01,0x00,0x00,0x7c,0x01,0x00,0x00, -0x41,0x00,0x05,0x00,0xfd,0x00,0x00,0x00,0x7e,0x01,0x00,0x00, -0x46,0x01,0x00,0x00,0x7a,0x01,0x00,0x00,0x3e,0x00,0x03,0x00, -0x7e,0x01,0x00,0x00,0x7d,0x01,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x80,0x01,0x00,0x00,0xc9,0x02,0x00,0x00, -0x2b,0x01,0x00,0x00,0xf9,0x00,0x02,0x00,0x2f,0x01,0x00,0x00, -0xf8,0x00,0x02,0x00,0x31,0x01,0x00,0x00,0xe0,0x00,0x04,0x00, -0x0c,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x81,0x01,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x84,0x01,0x00,0x00, -0xcc,0x02,0x00,0x00,0x82,0x01,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x87,0x01,0x00,0x00,0xd0,0x02,0x00,0x00, -0x85,0x01,0x00,0x00,0xf9,0x00,0x02,0x00,0x89,0x01,0x00,0x00, -0xf8,0x00,0x02,0x00,0x89,0x01,0x00,0x00,0xf5,0x00,0x07,0x00, -0x06,0x00,0x00,0x00,0xd2,0x02,0x00,0x00,0x3e,0x00,0x00,0x00, -0x31,0x01,0x00,0x00,0x30,0x02,0x00,0x00,0x8c,0x01,0x00,0x00, -0xb0,0x00,0x05,0x00,0xb8,0x00,0x00,0x00,0x8f,0x01,0x00,0x00, -0xd2,0x02,0x00,0x00,0x6c,0x00,0x00,0x00,0xf6,0x00,0x04,0x00, -0x8b,0x01,0x00,0x00,0x8c,0x01,0x00,0x00,0x00,0x00,0x00,0x00, -0xfa,0x00,0x04,0x00,0x8f,0x01,0x00,0x00,0x8a,0x01,0x00,0x00, -0x8b,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0x8a,0x01,0x00,0x00, -0xf9,0x00,0x02,0x00,0x91,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, -0x91,0x01,0x00,0x00,0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00, -0xd6,0x02,0x00,0x00,0x3e,0x00,0x00,0x00,0x8a,0x01,0x00,0x00, -0xbc,0x01,0x00,0x00,0x94,0x01,0x00,0x00,0xb0,0x00,0x05,0x00, -0xb8,0x00,0x00,0x00,0x97,0x01,0x00,0x00,0xd6,0x02,0x00,0x00, -0x60,0x00,0x00,0x00,0xf6,0x00,0x04,0x00,0x93,0x01,0x00,0x00, -0x94,0x01,0x00,0x00,0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, -0x97,0x01,0x00,0x00,0x92,0x01,0x00,0x00,0x93,0x01,0x00,0x00, -0xf8,0x00,0x02,0x00,0x92,0x01,0x00,0x00,0xf9,0x00,0x02,0x00, -0x99,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0x99,0x01,0x00,0x00, -0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0xe8,0x02,0x00,0x00, -0x3e,0x00,0x00,0x00,0x92,0x01,0x00,0x00,0xba,0x01,0x00,0x00, -0x9a,0x01,0x00,0x00,0xb0,0x00,0x05,0x00,0xb8,0x00,0x00,0x00, -0x9f,0x01,0x00,0x00,0xe8,0x02,0x00,0x00,0x62,0x00,0x00,0x00, -0xf6,0x00,0x04,0x00,0x9b,0x01,0x00,0x00,0x9a,0x01,0x00,0x00, -0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0x9f,0x01,0x00,0x00, -0x9a,0x01,0x00,0x00,0x9b,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, -0x9a,0x01,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xa5,0x01,0x00,0x00,0xd6,0x02,0x00,0x00,0x62,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xa7,0x01,0x00,0x00, -0xa5,0x01,0x00,0x00,0xe8,0x02,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xa9,0x01,0x00,0x00,0x55,0x00,0x00,0x00, -0x53,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xab,0x01,0x00,0x00,0xd6,0x02,0x00,0x00,0x61,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xac,0x01,0x00,0x00, -0xa9,0x01,0x00,0x00,0xab,0x01,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xae,0x01,0x00,0x00,0x64,0x00,0x00,0x00, -0x62,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xaf,0x01,0x00,0x00,0xac,0x01,0x00,0x00,0xae,0x01,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xb1,0x01,0x00,0x00, -0xaf,0x01,0x00,0x00,0xe8,0x02,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xb3,0x01,0x00,0x00,0xb1,0x01,0x00,0x00, -0xb2,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xb5,0x01,0x00,0x00,0xb3,0x01,0x00,0x00,0xd2,0x02,0x00,0x00, -0x41,0x00,0x05,0x00,0xfd,0x00,0x00,0x00,0xb6,0x01,0x00,0x00, -0xea,0x00,0x00,0x00,0xb5,0x01,0x00,0x00,0x3d,0x00,0x04,0x00, -0xba,0x00,0x00,0x00,0xb7,0x01,0x00,0x00,0xb6,0x01,0x00,0x00, -0x41,0x00,0x05,0x00,0xc3,0x00,0x00,0x00,0xb8,0x01,0x00,0x00, -0xa3,0x01,0x00,0x00,0xa7,0x01,0x00,0x00,0x3e,0x00,0x03,0x00, -0xb8,0x01,0x00,0x00,0xb7,0x01,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xba,0x01,0x00,0x00,0xe8,0x02,0x00,0x00, -0xc6,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0x99,0x01,0x00,0x00, -0xf8,0x00,0x02,0x00,0x9b,0x01,0x00,0x00,0xf9,0x00,0x02,0x00, -0x94,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0x94,0x01,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xbc,0x01,0x00,0x00, -0xd6,0x02,0x00,0x00,0xc6,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, -0x91,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0x93,0x01,0x00,0x00, -0xf9,0x00,0x02,0x00,0xbe,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, -0xbe,0x01,0x00,0x00,0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00, -0xd7,0x02,0x00,0x00,0x3e,0x00,0x00,0x00,0x93,0x01,0x00,0x00, -0xea,0x01,0x00,0x00,0xc1,0x01,0x00,0x00,0xb0,0x00,0x05,0x00, -0xb8,0x00,0x00,0x00,0xc4,0x01,0x00,0x00,0xd7,0x02,0x00,0x00, -0xb5,0x00,0x00,0x00,0xf6,0x00,0x04,0x00,0xc0,0x01,0x00,0x00, -0xc1,0x01,0x00,0x00,0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, -0xc4,0x01,0x00,0x00,0xbf,0x01,0x00,0x00,0xc0,0x01,0x00,0x00, -0xf8,0x00,0x02,0x00,0xbf,0x01,0x00,0x00,0xf9,0x00,0x02,0x00, -0xc6,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0xc6,0x01,0x00,0x00, -0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0xe5,0x02,0x00,0x00, -0x3e,0x00,0x00,0x00,0xbf,0x01,0x00,0x00,0xe8,0x01,0x00,0x00, -0xc7,0x01,0x00,0x00,0xb0,0x00,0x05,0x00,0xb8,0x00,0x00,0x00, -0xcc,0x01,0x00,0x00,0xe5,0x02,0x00,0x00,0xb2,0x00,0x00,0x00, -0xf6,0x00,0x04,0x00,0xc8,0x01,0x00,0x00,0xc7,0x01,0x00,0x00, -0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0xcc,0x01,0x00,0x00, -0xc7,0x01,0x00,0x00,0xc8,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, -0xc7,0x01,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xd2,0x01,0x00,0x00,0xd7,0x02,0x00,0x00,0xb2,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xd4,0x01,0x00,0x00, -0xd2,0x01,0x00,0x00,0xe5,0x02,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xd6,0x01,0x00,0x00,0x59,0x00,0x00,0x00, -0xaf,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xd9,0x01,0x00,0x00,0xd7,0x02,0x00,0x00,0xd8,0x01,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xda,0x01,0x00,0x00, -0xd6,0x01,0x00,0x00,0xd9,0x01,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xdc,0x01,0x00,0x00,0x68,0x00,0x00,0x00, -0xb2,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xdd,0x01,0x00,0x00,0xda,0x01,0x00,0x00,0xdc,0x01,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xdf,0x01,0x00,0x00, -0xdd,0x01,0x00,0x00,0xe5,0x02,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xe1,0x01,0x00,0x00,0xdf,0x01,0x00,0x00, -0xe0,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xe3,0x01,0x00,0x00,0xe1,0x01,0x00,0x00,0xd2,0x02,0x00,0x00, -0x41,0x00,0x05,0x00,0xfd,0x00,0x00,0x00,0xe4,0x01,0x00,0x00, -0x46,0x01,0x00,0x00,0xe3,0x01,0x00,0x00,0x3d,0x00,0x04,0x00, -0xba,0x00,0x00,0x00,0xe5,0x01,0x00,0x00,0xe4,0x01,0x00,0x00, -0x41,0x00,0x05,0x00,0xc3,0x00,0x00,0x00,0xe6,0x01,0x00,0x00, -0xd0,0x01,0x00,0x00,0xd4,0x01,0x00,0x00,0x3e,0x00,0x03,0x00, -0xe6,0x01,0x00,0x00,0xe5,0x01,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xe8,0x01,0x00,0x00,0xe5,0x02,0x00,0x00, -0xc6,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0xc6,0x01,0x00,0x00, -0xf8,0x00,0x02,0x00,0xc8,0x01,0x00,0x00,0xf9,0x00,0x02,0x00, -0xc1,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0xc1,0x01,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xea,0x01,0x00,0x00, -0xd7,0x02,0x00,0x00,0xc6,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, -0xbe,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0xc0,0x01,0x00,0x00, -0xf9,0x00,0x02,0x00,0xec,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, -0xec,0x01,0x00,0x00,0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00, -0xd8,0x02,0x00,0x00,0x3e,0x00,0x00,0x00,0xc0,0x01,0x00,0x00, -0x2e,0x02,0x00,0x00,0xef,0x01,0x00,0x00,0xb0,0x00,0x05,0x00, -0xb8,0x00,0x00,0x00,0xf2,0x01,0x00,0x00,0xd8,0x02,0x00,0x00, -0xb5,0x00,0x00,0x00,0xf6,0x00,0x04,0x00,0xee,0x01,0x00,0x00, -0xef,0x01,0x00,0x00,0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, -0xf2,0x01,0x00,0x00,0xed,0x01,0x00,0x00,0xee,0x01,0x00,0x00, -0xf8,0x00,0x02,0x00,0xed,0x01,0x00,0x00,0xf9,0x00,0x02,0x00, -0xf4,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0xf4,0x01,0x00,0x00, -0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0xdc,0x02,0x00,0x00, -0x3e,0x00,0x00,0x00,0xed,0x01,0x00,0x00,0x2c,0x02,0x00,0x00, -0xf7,0x01,0x00,0x00,0xb0,0x00,0x05,0x00,0xb8,0x00,0x00,0x00, -0xfa,0x01,0x00,0x00,0xdc,0x02,0x00,0x00,0x60,0x00,0x00,0x00, -0xf6,0x00,0x04,0x00,0xf6,0x01,0x00,0x00,0xf7,0x01,0x00,0x00, -0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0xfa,0x01,0x00,0x00, -0xf5,0x01,0x00,0x00,0xf6,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, -0xf5,0x01,0x00,0x00,0xf9,0x00,0x02,0x00,0xfc,0x01,0x00,0x00, -0xf8,0x00,0x02,0x00,0xfc,0x01,0x00,0x00,0xf5,0x00,0x07,0x00, -0x06,0x00,0x00,0x00,0xde,0x02,0x00,0x00,0x3e,0x00,0x00,0x00, -0xf5,0x01,0x00,0x00,0x2a,0x02,0x00,0x00,0xff,0x01,0x00,0x00, -0xb0,0x00,0x05,0x00,0xb8,0x00,0x00,0x00,0x02,0x02,0x00,0x00, -0xde,0x02,0x00,0x00,0xb2,0x00,0x00,0x00,0xf6,0x00,0x04,0x00, -0xfe,0x01,0x00,0x00,0xff,0x01,0x00,0x00,0x01,0x00,0x00,0x00, -0xfa,0x00,0x04,0x00,0x02,0x02,0x00,0x00,0xfd,0x01,0x00,0x00, -0xfe,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0xfd,0x01,0x00,0x00, -0xf9,0x00,0x02,0x00,0x04,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, -0x04,0x02,0x00,0x00,0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00, -0xe0,0x02,0x00,0x00,0x3e,0x00,0x00,0x00,0xfd,0x01,0x00,0x00, -0x28,0x02,0x00,0x00,0x05,0x02,0x00,0x00,0xb0,0x00,0x05,0x00, -0xb8,0x00,0x00,0x00,0x0a,0x02,0x00,0x00,0xe0,0x02,0x00,0x00, -0x62,0x00,0x00,0x00,0xf6,0x00,0x04,0x00,0x06,0x02,0x00,0x00, -0x05,0x02,0x00,0x00,0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, -0x0a,0x02,0x00,0x00,0x05,0x02,0x00,0x00,0x06,0x02,0x00,0x00, -0xf8,0x00,0x02,0x00,0x05,0x02,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x0c,0x02,0x00,0x00,0xd8,0x02,0x00,0x00, -0xb2,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x0e,0x02,0x00,0x00,0x0c,0x02,0x00,0x00,0xde,0x02,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x10,0x02,0x00,0x00, -0x0e,0x02,0x00,0x00,0x0f,0x02,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x12,0x02,0x00,0x00,0xdc,0x02,0x00,0x00, -0x62,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x13,0x02,0x00,0x00,0x10,0x02,0x00,0x00,0x12,0x02,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x15,0x02,0x00,0x00, -0x13,0x02,0x00,0x00,0xe0,0x02,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x19,0x02,0x00,0x00,0x12,0x02,0x00,0x00, -0xe0,0x02,0x00,0x00,0x41,0x00,0x05,0x00,0xc3,0x00,0x00,0x00, -0x1a,0x02,0x00,0x00,0xa3,0x01,0x00,0x00,0x19,0x02,0x00,0x00, -0x3d,0x00,0x04,0x00,0xba,0x00,0x00,0x00,0x1b,0x02,0x00,0x00, -0x1a,0x02,0x00,0x00,0x41,0x00,0x05,0x00,0xc3,0x00,0x00,0x00, -0x20,0x02,0x00,0x00,0xd0,0x01,0x00,0x00,0x0e,0x02,0x00,0x00, -0x3d,0x00,0x04,0x00,0xba,0x00,0x00,0x00,0x21,0x02,0x00,0x00, -0x20,0x02,0x00,0x00,0x41,0x00,0x05,0x00,0xc3,0x00,0x00,0x00, -0x23,0x02,0x00,0x00,0xc0,0x00,0x00,0x00,0x15,0x02,0x00,0x00, -0x3d,0x00,0x04,0x00,0xba,0x00,0x00,0x00,0x24,0x02,0x00,0x00, -0x23,0x02,0x00,0x00,0x0c,0x00,0x08,0x00,0xba,0x00,0x00,0x00, -0x25,0x02,0x00,0x00,0x01,0x00,0x00,0x00,0x32,0x00,0x00,0x00, -0x1b,0x02,0x00,0x00,0x21,0x02,0x00,0x00,0x24,0x02,0x00,0x00, -0x3e,0x00,0x03,0x00,0x23,0x02,0x00,0x00,0x25,0x02,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x28,0x02,0x00,0x00, -0xe0,0x02,0x00,0x00,0xc6,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, -0x04,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x06,0x02,0x00,0x00, -0xf9,0x00,0x02,0x00,0xff,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, -0xff,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x2a,0x02,0x00,0x00,0xde,0x02,0x00,0x00,0xc6,0x00,0x00,0x00, -0xf9,0x00,0x02,0x00,0xfc,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, -0xfe,0x01,0x00,0x00,0xf9,0x00,0x02,0x00,0xf7,0x01,0x00,0x00, -0xf8,0x00,0x02,0x00,0xf7,0x01,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x2c,0x02,0x00,0x00,0xdc,0x02,0x00,0x00, -0xc6,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0xf4,0x01,0x00,0x00, -0xf8,0x00,0x02,0x00,0xf6,0x01,0x00,0x00,0xf9,0x00,0x02,0x00, -0xef,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0xef,0x01,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x2e,0x02,0x00,0x00, -0xd8,0x02,0x00,0x00,0xc6,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, -0xec,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0xee,0x01,0x00,0x00, -0xf9,0x00,0x02,0x00,0x8c,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, -0x8c,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x30,0x02,0x00,0x00,0xd2,0x02,0x00,0x00,0xc6,0x00,0x00,0x00, -0xf9,0x00,0x02,0x00,0x89,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, -0x8b,0x01,0x00,0x00,0xe0,0x00,0x04,0x00,0x0c,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x81,0x01,0x00,0x00,0xf9,0x00,0x02,0x00, -0xcd,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0xcd,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x32,0x02,0x00,0x00, -0xb8,0x02,0x00,0x00,0x6c,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, -0xca,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0xcc,0x00,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x37,0x02,0x00,0x00, -0x55,0x00,0x00,0x00,0x53,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x38,0x02,0x00,0x00,0x8c,0x00,0x00,0x00, -0x37,0x02,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x3d,0x02,0x00,0x00,0x59,0x00,0x00,0x00,0xaf,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x3e,0x02,0x00,0x00, -0x9e,0x00,0x00,0x00,0x3d,0x02,0x00,0x00,0x41,0x00,0x05,0x00, -0x15,0x00,0x00,0x00,0x43,0x02,0x00,0x00,0x12,0x00,0x00,0x00, -0x42,0x02,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x44,0x02,0x00,0x00,0x43,0x02,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x45,0x02,0x00,0x00,0x0f,0x00,0x00,0x00, -0x44,0x02,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x49,0x02,0x00,0x00,0x47,0x00,0x00,0x00,0x44,0x02,0x00,0x00, -0x41,0x00,0x05,0x00,0x0d,0x00,0x00,0x00,0x4b,0x02,0x00,0x00, -0x4a,0x02,0x00,0x00,0x0c,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x4c,0x02,0x00,0x00,0x4b,0x02,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x4d,0x02,0x00,0x00, -0x49,0x02,0x00,0x00,0x4c,0x02,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x4e,0x02,0x00,0x00,0x45,0x02,0x00,0x00, -0x4d,0x02,0x00,0x00,0xf9,0x00,0x02,0x00,0x50,0x02,0x00,0x00, -0xf8,0x00,0x02,0x00,0x50,0x02,0x00,0x00,0xf5,0x00,0x07,0x00, -0x06,0x00,0x00,0x00,0xb9,0x02,0x00,0x00,0x3e,0x00,0x00,0x00, -0xcc,0x00,0x00,0x00,0xb6,0x02,0x00,0x00,0x53,0x02,0x00,0x00, -0xb0,0x00,0x05,0x00,0xb8,0x00,0x00,0x00,0x56,0x02,0x00,0x00, -0xb9,0x02,0x00,0x00,0xb5,0x00,0x00,0x00,0xf6,0x00,0x04,0x00, -0x52,0x02,0x00,0x00,0x53,0x02,0x00,0x00,0x01,0x00,0x00,0x00, -0xfa,0x00,0x04,0x00,0x56,0x02,0x00,0x00,0x51,0x02,0x00,0x00, -0x52,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x51,0x02,0x00,0x00, -0xf9,0x00,0x02,0x00,0x58,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, -0x58,0x02,0x00,0x00,0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00, -0xba,0x02,0x00,0x00,0x3e,0x00,0x00,0x00,0x51,0x02,0x00,0x00, -0xb4,0x02,0x00,0x00,0x5b,0x02,0x00,0x00,0xb0,0x00,0x05,0x00, -0xb8,0x00,0x00,0x00,0x5e,0x02,0x00,0x00,0xba,0x02,0x00,0x00, -0x60,0x00,0x00,0x00,0xf6,0x00,0x04,0x00,0x5a,0x02,0x00,0x00, -0x5b,0x02,0x00,0x00,0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, -0x5e,0x02,0x00,0x00,0x59,0x02,0x00,0x00,0x5a,0x02,0x00,0x00, -0xf8,0x00,0x02,0x00,0x59,0x02,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x62,0x02,0x00,0x00,0xba,0x02,0x00,0x00, -0x61,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x63,0x02,0x00,0x00,0x38,0x02,0x00,0x00,0x62,0x02,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x65,0x02,0x00,0x00, -0x64,0x00,0x00,0x00,0x62,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x66,0x02,0x00,0x00,0x63,0x02,0x00,0x00, -0x65,0x02,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x6a,0x02,0x00,0x00,0xb9,0x02,0x00,0x00,0xd8,0x01,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x6b,0x02,0x00,0x00, -0x3e,0x02,0x00,0x00,0x6a,0x02,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x6d,0x02,0x00,0x00,0x68,0x00,0x00,0x00, -0xb2,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x6e,0x02,0x00,0x00,0x6b,0x02,0x00,0x00,0x6d,0x02,0x00,0x00, -0xf9,0x00,0x02,0x00,0x70,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, -0x70,0x02,0x00,0x00,0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00, -0xbc,0x02,0x00,0x00,0x3e,0x00,0x00,0x00,0x59,0x02,0x00,0x00, -0xb2,0x02,0x00,0x00,0x73,0x02,0x00,0x00,0xb0,0x00,0x05,0x00, -0xb8,0x00,0x00,0x00,0x76,0x02,0x00,0x00,0xbc,0x02,0x00,0x00, -0xb2,0x00,0x00,0x00,0xf6,0x00,0x04,0x00,0x72,0x02,0x00,0x00, -0x73,0x02,0x00,0x00,0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, -0x76,0x02,0x00,0x00,0x71,0x02,0x00,0x00,0x72,0x02,0x00,0x00, -0xf8,0x00,0x02,0x00,0x71,0x02,0x00,0x00,0xf9,0x00,0x02,0x00, -0x78,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x78,0x02,0x00,0x00, -0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0xbe,0x02,0x00,0x00, -0x3e,0x00,0x00,0x00,0x71,0x02,0x00,0x00,0xb0,0x02,0x00,0x00, -0x7b,0x02,0x00,0x00,0xb0,0x00,0x05,0x00,0xb8,0x00,0x00,0x00, -0x7e,0x02,0x00,0x00,0xbe,0x02,0x00,0x00,0x62,0x00,0x00,0x00, -0xf6,0x00,0x04,0x00,0x7a,0x02,0x00,0x00,0x7b,0x02,0x00,0x00, -0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0x7e,0x02,0x00,0x00, -0x79,0x02,0x00,0x00,0x7a,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, -0x79,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x81,0x02,0x00,0x00,0x66,0x02,0x00,0x00,0xbe,0x02,0x00,0x00, -0xb0,0x00,0x05,0x00,0xb8,0x00,0x00,0x00,0x84,0x02,0x00,0x00, -0x81,0x02,0x00,0x00,0x36,0x00,0x00,0x00,0xf7,0x00,0x03,0x00, -0x86,0x02,0x00,0x00,0x00,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, -0x84,0x02,0x00,0x00,0x85,0x02,0x00,0x00,0x86,0x02,0x00,0x00, -0xf8,0x00,0x02,0x00,0x85,0x02,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x89,0x02,0x00,0x00,0x6e,0x02,0x00,0x00, -0xbc,0x02,0x00,0x00,0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00, -0x8a,0x02,0x00,0x00,0x12,0x00,0x00,0x00,0xc6,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x8b,0x02,0x00,0x00, -0x8a,0x02,0x00,0x00,0xb0,0x00,0x05,0x00,0xb8,0x00,0x00,0x00, -0x8c,0x02,0x00,0x00,0x89,0x02,0x00,0x00,0x8b,0x02,0x00,0x00, -0xf9,0x00,0x02,0x00,0x86,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, -0x86,0x02,0x00,0x00,0xf5,0x00,0x07,0x00,0xb8,0x00,0x00,0x00, -0x8d,0x02,0x00,0x00,0x84,0x02,0x00,0x00,0x79,0x02,0x00,0x00, -0x8c,0x02,0x00,0x00,0x85,0x02,0x00,0x00,0xf7,0x00,0x03,0x00, -0x8f,0x02,0x00,0x00,0x00,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, -0x8d,0x02,0x00,0x00,0x8e,0x02,0x00,0x00,0x8f,0x02,0x00,0x00, -0xf8,0x00,0x02,0x00,0x8e,0x02,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x97,0x02,0x00,0x00,0x6e,0x02,0x00,0x00, -0xbc,0x02,0x00,0x00,0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00, -0x99,0x02,0x00,0x00,0x12,0x00,0x00,0x00,0x98,0x02,0x00,0x00, -0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x9a,0x02,0x00,0x00, -0x99,0x02,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x9b,0x02,0x00,0x00,0x97,0x02,0x00,0x00,0x9a,0x02,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x9c,0x02,0x00,0x00, -0x4e,0x02,0x00,0x00,0x9b,0x02,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x9e,0x02,0x00,0x00,0x9c,0x02,0x00,0x00, -0x66,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xa0,0x02,0x00,0x00,0x9e,0x02,0x00,0x00,0xbe,0x02,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xa2,0x02,0x00,0x00, -0xb9,0x02,0x00,0x00,0xb2,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xa4,0x02,0x00,0x00,0xa2,0x02,0x00,0x00, -0xbc,0x02,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xa6,0x02,0x00,0x00,0xa4,0x02,0x00,0x00,0xa5,0x02,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xa8,0x02,0x00,0x00, -0xba,0x02,0x00,0x00,0x62,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xa9,0x02,0x00,0x00,0xa6,0x02,0x00,0x00, -0xa8,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xab,0x02,0x00,0x00,0xa9,0x02,0x00,0x00,0xbe,0x02,0x00,0x00, -0x41,0x00,0x05,0x00,0xc3,0x00,0x00,0x00,0xac,0x02,0x00,0x00, -0xc0,0x00,0x00,0x00,0xab,0x02,0x00,0x00,0x3d,0x00,0x04,0x00, -0xba,0x00,0x00,0x00,0xad,0x02,0x00,0x00,0xac,0x02,0x00,0x00, -0x41,0x00,0x06,0x00,0xfa,0x00,0x00,0x00,0xae,0x02,0x00,0x00, -0x93,0x02,0x00,0x00,0x34,0x00,0x00,0x00,0xa0,0x02,0x00,0x00, -0x3e,0x00,0x03,0x00,0xae,0x02,0x00,0x00,0xad,0x02,0x00,0x00, -0xf9,0x00,0x02,0x00,0x8f,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, -0x8f,0x02,0x00,0x00,0xf9,0x00,0x02,0x00,0x7b,0x02,0x00,0x00, -0xf8,0x00,0x02,0x00,0x7b,0x02,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xb0,0x02,0x00,0x00,0xbe,0x02,0x00,0x00, -0xc6,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0x78,0x02,0x00,0x00, -0xf8,0x00,0x02,0x00,0x7a,0x02,0x00,0x00,0xf9,0x00,0x02,0x00, -0x73,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x73,0x02,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xb2,0x02,0x00,0x00, -0xbc,0x02,0x00,0x00,0xc6,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, -0x70,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x72,0x02,0x00,0x00, -0xf9,0x00,0x02,0x00,0x5b,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, -0x5b,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xb4,0x02,0x00,0x00,0xba,0x02,0x00,0x00,0xc6,0x00,0x00,0x00, -0xf9,0x00,0x02,0x00,0x58,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, -0x5a,0x02,0x00,0x00,0xf9,0x00,0x02,0x00,0x53,0x02,0x00,0x00, -0xf8,0x00,0x02,0x00,0x53,0x02,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xb6,0x02,0x00,0x00,0xb9,0x02,0x00,0x00, -0xc6,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0x50,0x02,0x00,0x00, -0xf8,0x00,0x02,0x00,0x52,0x02,0x00,0x00,0xfd,0x00,0x01,0x00, -0x38,0x00,0x01,0x00, -}; -const uint64_t matmul_f32_aligned_m_fp32_len = 10348; - -unsigned char matmul_f32_aligned_s_data[] = { -0x03,0x02,0x23,0x07,0x00,0x05,0x01,0x00,0x0b,0x00,0x0d,0x00, -0x6a,0x03,0x00,0x00,0x00,0x00,0x00,0x00,0x11,0x00,0x02,0x00, -0x01,0x00,0x00,0x00,0x11,0x00,0x02,0x00,0x09,0x00,0x00,0x00, -0x0b,0x00,0x06,0x00,0x01,0x00,0x00,0x00,0x47,0x4c,0x53,0x4c, -0x2e,0x73,0x74,0x64,0x2e,0x34,0x35,0x30,0x00,0x00,0x00,0x00, -0x0e,0x00,0x03,0x00,0x00,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x0f,0x00,0x0f,0x00,0x05,0x00,0x00,0x00,0x04,0x00,0x00,0x00, -0x6d,0x61,0x69,0x6e,0x00,0x00,0x00,0x00,0x0b,0x00,0x00,0x00, -0x12,0x00,0x00,0x00,0x3d,0x00,0x00,0x00,0x4c,0x00,0x00,0x00, -0xeb,0x00,0x00,0x00,0xfa,0x00,0x00,0x00,0x88,0x01,0x00,0x00, -0x95,0x01,0x00,0x00,0xcb,0x02,0x00,0x00,0x14,0x03,0x00,0x00, -0x10,0x00,0x06,0x00,0x04,0x00,0x00,0x00,0x11,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x0b,0x00,0x00,0x00,0x0b,0x00,0x00,0x00, -0x1c,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0x10,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x08,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00, -0x03,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00,0x04,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x10,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0x10,0x00,0x00,0x00,0x05,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x14,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x18,0x00,0x00,0x00, -0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00,0x07,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x1c,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0x10,0x00,0x00,0x00,0x08,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x20,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00, -0x09,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x24,0x00,0x00,0x00, -0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00,0x0a,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x28,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0x10,0x00,0x00,0x00,0x0b,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x2c,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x30,0x00,0x00,0x00, -0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00,0x0d,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x34,0x00,0x00,0x00,0x47,0x00,0x03,0x00, -0x10,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x37,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x3d,0x00,0x00,0x00,0x0b,0x00,0x00,0x00, -0x1a,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x4c,0x00,0x00,0x00, -0x0b,0x00,0x00,0x00,0x1b,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x4f,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x09,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x53,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x04,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x60,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x62,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x07,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x6c,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x03,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x9d,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0xaf,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x05,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0xb2,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x08,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0xf7,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x20,0x00,0x00,0x00,0x48,0x00,0x04,0x00, -0xf8,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x05,0x00,0x00,0x00, -0x48,0x00,0x04,0x00,0xf8,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x18,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0xf8,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x48,0x00,0x05,0x00,0xf8,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x07,0x00,0x00,0x00,0x10,0x00,0x00,0x00,0x47,0x00,0x03,0x00, -0xf8,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0xfa,0x00,0x00,0x00,0x22,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0xfa,0x00,0x00,0x00,0x21,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x69,0x01,0x00,0x00, -0x01,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x6a,0x01,0x00,0x00,0x0b,0x00,0x00,0x00,0x19,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x92,0x01,0x00,0x00,0x06,0x00,0x00,0x00, -0x20,0x00,0x00,0x00,0x48,0x00,0x04,0x00,0x93,0x01,0x00,0x00, -0x00,0x00,0x00,0x00,0x05,0x00,0x00,0x00,0x48,0x00,0x04,0x00, -0x93,0x01,0x00,0x00,0x00,0x00,0x00,0x00,0x18,0x00,0x00,0x00, -0x48,0x00,0x05,0x00,0x93,0x01,0x00,0x00,0x00,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0x93,0x01,0x00,0x00,0x00,0x00,0x00,0x00,0x07,0x00,0x00,0x00, -0x10,0x00,0x00,0x00,0x47,0x00,0x03,0x00,0x93,0x01,0x00,0x00, -0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x95,0x01,0x00,0x00, -0x22,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x95,0x01,0x00,0x00,0x21,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0xcb,0x02,0x00,0x00,0x0b,0x00,0x00,0x00, -0x18,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x11,0x03,0x00,0x00, -0x06,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x48,0x00,0x04,0x00, -0x12,0x03,0x00,0x00,0x00,0x00,0x00,0x00,0x19,0x00,0x00,0x00, -0x48,0x00,0x05,0x00,0x12,0x03,0x00,0x00,0x00,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x03,0x00, -0x12,0x03,0x00,0x00,0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x14,0x03,0x00,0x00,0x22,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x14,0x03,0x00,0x00,0x21,0x00,0x00,0x00, -0x02,0x00,0x00,0x00,0x13,0x00,0x02,0x00,0x02,0x00,0x00,0x00, -0x21,0x00,0x03,0x00,0x03,0x00,0x00,0x00,0x02,0x00,0x00,0x00, -0x15,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x20,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x17,0x00,0x04,0x00,0x09,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x03,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0x0a,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x09,0x00,0x00,0x00, -0x3b,0x00,0x04,0x00,0x0a,0x00,0x00,0x00,0x0b,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0x0d,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x1e,0x00,0x10,0x00,0x10,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x11,0x00,0x00,0x00, -0x09,0x00,0x00,0x00,0x10,0x00,0x00,0x00,0x3b,0x00,0x04,0x00, -0x11,0x00,0x00,0x00,0x12,0x00,0x00,0x00,0x09,0x00,0x00,0x00, -0x15,0x00,0x04,0x00,0x13,0x00,0x00,0x00,0x20,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00, -0x14,0x00,0x00,0x00,0x08,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0x15,0x00,0x00,0x00,0x09,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00,0x21,0x00,0x00,0x00, -0x0a,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00, -0x27,0x00,0x00,0x00,0x09,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x13,0x00,0x00,0x00,0x2d,0x00,0x00,0x00,0x07,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00,0x34,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x32,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x37,0x00,0x00,0x00,0x40,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x3b,0x00,0x04,0x00,0x0a,0x00,0x00,0x00,0x3d,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x3e,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x3b,0x00,0x04,0x00, -0x0a,0x00,0x00,0x00,0x4c,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x32,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x4f,0x00,0x00,0x00, -0x20,0x00,0x00,0x00,0x32,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x53,0x00,0x00,0x00,0x20,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0x54,0x00,0x00,0x00,0x86,0x00,0x00,0x00, -0x37,0x00,0x00,0x00,0x53,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0x58,0x00,0x00,0x00,0x86,0x00,0x00,0x00, -0x37,0x00,0x00,0x00,0x53,0x00,0x00,0x00,0x32,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x60,0x00,0x00,0x00,0x02,0x00,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x61,0x00,0x00,0x00, -0x86,0x00,0x00,0x00,0x53,0x00,0x00,0x00,0x60,0x00,0x00,0x00, -0x32,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x62,0x00,0x00,0x00, -0x04,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, -0x63,0x00,0x00,0x00,0x86,0x00,0x00,0x00,0x61,0x00,0x00,0x00, -0x62,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, -0x67,0x00,0x00,0x00,0x86,0x00,0x00,0x00,0x61,0x00,0x00,0x00, -0x62,0x00,0x00,0x00,0x32,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x6c,0x00,0x00,0x00,0x10,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x6d,0x00,0x00,0x00,0x08,0x00,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x6e,0x00,0x00,0x00, -0x86,0x00,0x00,0x00,0x6c,0x00,0x00,0x00,0x6d,0x00,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x73,0x00,0x00,0x00, -0x86,0x00,0x00,0x00,0x6c,0x00,0x00,0x00,0x6d,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00,0x77,0x00,0x00,0x00, +0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x6d,0x00,0x00,0x00, +0x86,0x00,0x00,0x00,0x6c,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, +0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x72,0x00,0x00,0x00, +0x86,0x00,0x00,0x00,0x6c,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, +0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x77,0x00,0x00,0x00, +0x86,0x00,0x00,0x00,0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00, +0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x7c,0x00,0x00,0x00, +0x86,0x00,0x00,0x00,0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00,0x80,0x00,0x00,0x00, 0x06,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00, -0x7c,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x13,0x00,0x00,0x00,0x87,0x00,0x00,0x00,0x0b,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00,0x8d,0x00,0x00,0x00, +0x85,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x13,0x00,0x00,0x00,0x90,0x00,0x00,0x00,0x0b,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00,0x96,0x00,0x00,0x00, 0x03,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00, -0x98,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x32,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x9d,0x00,0x00,0x00,0x40,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00,0x9f,0x00,0x00,0x00, +0xa1,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x32,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0xa6,0x00,0x00,0x00,0x40,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00,0xa8,0x00,0x00,0x00, 0x04,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, -0xae,0x00,0x00,0x00,0x84,0x00,0x00,0x00,0x60,0x00,0x00,0x00, +0xb7,0x00,0x00,0x00,0x84,0x00,0x00,0x00,0x60,0x00,0x00,0x00, 0x62,0x00,0x00,0x00,0x32,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0xaf,0x00,0x00,0x00,0x20,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0xb0,0x00,0x00,0x00,0x84,0x00,0x00,0x00, -0x53,0x00,0x00,0x00,0xaf,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0xb1,0x00,0x00,0x00,0x84,0x00,0x00,0x00, +0xb8,0x00,0x00,0x00,0x20,0x00,0x00,0x00,0x34,0x00,0x06,0x00, +0x06,0x00,0x00,0x00,0xb9,0x00,0x00,0x00,0x84,0x00,0x00,0x00, +0x53,0x00,0x00,0x00,0xb8,0x00,0x00,0x00,0x34,0x00,0x06,0x00, +0x06,0x00,0x00,0x00,0xba,0x00,0x00,0x00,0x84,0x00,0x00,0x00, 0x4f,0x00,0x00,0x00,0x62,0x00,0x00,0x00,0x32,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0xb2,0x00,0x00,0x00,0x02,0x00,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0xb3,0x00,0x00,0x00, -0x84,0x00,0x00,0x00,0xb1,0x00,0x00,0x00,0xb2,0x00,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0xb4,0x00,0x00,0x00, -0x84,0x00,0x00,0x00,0xb3,0x00,0x00,0x00,0x60,0x00,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0xb5,0x00,0x00,0x00, -0x86,0x00,0x00,0x00,0xb0,0x00,0x00,0x00,0xb4,0x00,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0xb6,0x00,0x00,0x00, -0x84,0x00,0x00,0x00,0xae,0x00,0x00,0x00,0xb5,0x00,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0xb7,0x00,0x00,0x00, -0x84,0x00,0x00,0x00,0xb6,0x00,0x00,0x00,0xb2,0x00,0x00,0x00, -0x14,0x00,0x02,0x00,0xb8,0x00,0x00,0x00,0x16,0x00,0x03,0x00, -0xba,0x00,0x00,0x00,0x20,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0xbb,0x00,0x00,0x00,0x84,0x00,0x00,0x00, +0x06,0x00,0x00,0x00,0xbb,0x00,0x00,0x00,0x02,0x00,0x00,0x00, +0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0xbc,0x00,0x00,0x00, +0x84,0x00,0x00,0x00,0xba,0x00,0x00,0x00,0xbb,0x00,0x00,0x00, +0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0xbd,0x00,0x00,0x00, +0x84,0x00,0x00,0x00,0xbc,0x00,0x00,0x00,0x60,0x00,0x00,0x00, +0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0xbe,0x00,0x00,0x00, +0x86,0x00,0x00,0x00,0xb9,0x00,0x00,0x00,0xbd,0x00,0x00,0x00, +0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0xbf,0x00,0x00,0x00, +0x84,0x00,0x00,0x00,0xb7,0x00,0x00,0x00,0xbe,0x00,0x00,0x00, +0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0xc0,0x00,0x00,0x00, +0x84,0x00,0x00,0x00,0xbf,0x00,0x00,0x00,0xbb,0x00,0x00,0x00, +0x14,0x00,0x02,0x00,0xc1,0x00,0x00,0x00,0x16,0x00,0x03,0x00, +0xc3,0x00,0x00,0x00,0x20,0x00,0x00,0x00,0x34,0x00,0x06,0x00, +0x06,0x00,0x00,0x00,0xc4,0x00,0x00,0x00,0x84,0x00,0x00,0x00, 0x60,0x00,0x00,0x00,0x62,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0xbc,0x00,0x00,0x00,0x84,0x00,0x00,0x00, -0xbb,0x00,0x00,0x00,0xb5,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0xbd,0x00,0x00,0x00,0x84,0x00,0x00,0x00, -0xbc,0x00,0x00,0x00,0xb2,0x00,0x00,0x00,0x1c,0x00,0x04,0x00, -0xbe,0x00,0x00,0x00,0xba,0x00,0x00,0x00,0xbd,0x00,0x00,0x00, -0x20,0x00,0x04,0x00,0xbf,0x00,0x00,0x00,0x07,0x00,0x00,0x00, -0xbe,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0xba,0x00,0x00,0x00, -0xc2,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0xc3,0x00,0x00,0x00,0x07,0x00,0x00,0x00,0xba,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00,0xc6,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x16,0x00,0x03,0x00,0xe6,0x00,0x00,0x00, -0x10,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, -0xe7,0x00,0x00,0x00,0x80,0x00,0x00,0x00,0x6c,0x00,0x00,0x00, -0x39,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, -0xe8,0x00,0x00,0x00,0x84,0x00,0x00,0x00,0x37,0x00,0x00,0x00, -0xe7,0x00,0x00,0x00,0x1c,0x00,0x04,0x00,0xe9,0x00,0x00,0x00, -0xe6,0x00,0x00,0x00,0xe8,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0xea,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0xe9,0x00,0x00,0x00, -0x3b,0x00,0x04,0x00,0xea,0x00,0x00,0x00,0xeb,0x00,0x00,0x00, -0x04,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, -0xef,0x00,0x00,0x00,0x80,0x00,0x00,0x00,0x6c,0x00,0x00,0x00, -0x39,0x00,0x00,0x00,0x17,0x00,0x04,0x00,0xf5,0x00,0x00,0x00, -0xba,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x18,0x00,0x04,0x00, -0xf6,0x00,0x00,0x00,0xf5,0x00,0x00,0x00,0x02,0x00,0x00,0x00, -0x1d,0x00,0x03,0x00,0xf7,0x00,0x00,0x00,0xf6,0x00,0x00,0x00, -0x1e,0x00,0x03,0x00,0xf8,0x00,0x00,0x00,0xf7,0x00,0x00,0x00, -0x20,0x00,0x04,0x00,0xf9,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0xf8,0x00,0x00,0x00,0x3b,0x00,0x04,0x00,0xf9,0x00,0x00,0x00, -0xfa,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0xfc,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0xba,0x00,0x00,0x00, -0x20,0x00,0x04,0x00,0x00,0x01,0x00,0x00,0x04,0x00,0x00,0x00, -0xe6,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, -0x05,0x01,0x00,0x00,0x80,0x00,0x00,0x00,0x6c,0x00,0x00,0x00, -0x39,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, -0x13,0x01,0x00,0x00,0x80,0x00,0x00,0x00,0x6c,0x00,0x00,0x00, -0x39,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, -0x21,0x01,0x00,0x00,0x80,0x00,0x00,0x00,0x6c,0x00,0x00,0x00, +0x06,0x00,0x00,0x00,0xc5,0x00,0x00,0x00,0x84,0x00,0x00,0x00, +0xc4,0x00,0x00,0x00,0xbe,0x00,0x00,0x00,0x34,0x00,0x06,0x00, +0x06,0x00,0x00,0x00,0xc6,0x00,0x00,0x00,0x84,0x00,0x00,0x00, +0xc5,0x00,0x00,0x00,0xbb,0x00,0x00,0x00,0x1c,0x00,0x04,0x00, +0xc7,0x00,0x00,0x00,0xc3,0x00,0x00,0x00,0xc6,0x00,0x00,0x00, +0x20,0x00,0x04,0x00,0xc8,0x00,0x00,0x00,0x07,0x00,0x00,0x00, +0xc7,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0xc3,0x00,0x00,0x00, +0xcb,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x20,0x00,0x04,0x00, +0xcc,0x00,0x00,0x00,0x07,0x00,0x00,0x00,0xc3,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00,0xcf,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, +0xf3,0x00,0x00,0x00,0x80,0x00,0x00,0x00,0x6c,0x00,0x00,0x00, 0x39,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x26,0x01,0x00,0x00,0x03,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0x30,0x01,0x00,0x00,0x80,0x00,0x00,0x00, -0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x35,0x01,0x00,0x00,0x04,0x00,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x3f,0x01,0x00,0x00, +0xfa,0x00,0x00,0x00,0x10,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0xfe,0x00,0x00,0x00,0x0f,0x00,0x00,0x00, +0x16,0x00,0x03,0x00,0x02,0x01,0x00,0x00,0x10,0x00,0x00,0x00, +0x15,0x00,0x04,0x00,0x03,0x01,0x00,0x00,0x08,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x04,0x01,0x00,0x00,0x20,0x00,0x00,0x00,0x1c,0x00,0x04,0x00, +0x05,0x01,0x00,0x00,0x03,0x01,0x00,0x00,0x04,0x01,0x00,0x00, +0x1e,0x00,0x04,0x00,0x06,0x01,0x00,0x00,0x02,0x01,0x00,0x00, +0x05,0x01,0x00,0x00,0x1d,0x00,0x03,0x00,0x07,0x01,0x00,0x00, +0x06,0x01,0x00,0x00,0x1e,0x00,0x03,0x00,0x08,0x01,0x00,0x00, +0x07,0x01,0x00,0x00,0x20,0x00,0x04,0x00,0x09,0x01,0x00,0x00, +0x0c,0x00,0x00,0x00,0x08,0x01,0x00,0x00,0x3b,0x00,0x04,0x00, +0x09,0x01,0x00,0x00,0x0a,0x01,0x00,0x00,0x0c,0x00,0x00,0x00, +0x20,0x00,0x04,0x00,0x0c,0x01,0x00,0x00,0x0c,0x00,0x00,0x00, +0x02,0x01,0x00,0x00,0x17,0x00,0x04,0x00,0x10,0x01,0x00,0x00, +0xc3,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x20,0x00,0x04,0x00, +0x15,0x01,0x00,0x00,0x0c,0x00,0x00,0x00,0x03,0x01,0x00,0x00, +0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x24,0x01,0x00,0x00, 0x80,0x00,0x00,0x00,0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x44,0x01,0x00,0x00, -0x05,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, -0x4e,0x01,0x00,0x00,0x80,0x00,0x00,0x00,0x6c,0x00,0x00,0x00, -0x39,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x53,0x01,0x00,0x00,0x06,0x00,0x00,0x00,0x34,0x00,0x06,0x00, +0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x25,0x01,0x00,0x00, +0x84,0x00,0x00,0x00,0x37,0x00,0x00,0x00,0x24,0x01,0x00,0x00, +0x1c,0x00,0x04,0x00,0x26,0x01,0x00,0x00,0xc3,0x00,0x00,0x00, +0x25,0x01,0x00,0x00,0x20,0x00,0x04,0x00,0x27,0x01,0x00,0x00, +0x04,0x00,0x00,0x00,0x26,0x01,0x00,0x00,0x3b,0x00,0x04,0x00, +0x27,0x01,0x00,0x00,0x28,0x01,0x00,0x00,0x04,0x00,0x00,0x00, +0x20,0x00,0x04,0x00,0x2c,0x01,0x00,0x00,0x04,0x00,0x00,0x00, +0xc3,0x00,0x00,0x00,0x32,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x33,0x01,0x00,0x00,0x01,0x00,0x00,0x00,0x33,0x00,0x06,0x00, +0x09,0x00,0x00,0x00,0x34,0x01,0x00,0x00,0x33,0x01,0x00,0x00, +0x39,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x34,0x00,0x06,0x00, +0x06,0x00,0x00,0x00,0x35,0x01,0x00,0x00,0x51,0x00,0x00,0x00, +0x34,0x01,0x00,0x00,0x00,0x00,0x00,0x00,0x34,0x00,0x06,0x00, +0x06,0x00,0x00,0x00,0x36,0x01,0x00,0x00,0x84,0x00,0x00,0x00, +0x35,0x01,0x00,0x00,0x0c,0x00,0x00,0x00,0x34,0x00,0x06,0x00, +0x06,0x00,0x00,0x00,0x37,0x01,0x00,0x00,0x86,0x00,0x00,0x00, +0x36,0x01,0x00,0x00,0x6c,0x00,0x00,0x00,0x34,0x00,0x06,0x00, +0x06,0x00,0x00,0x00,0x55,0x01,0x00,0x00,0x80,0x00,0x00,0x00, +0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x34,0x00,0x06,0x00, +0x06,0x00,0x00,0x00,0x56,0x01,0x00,0x00,0x84,0x00,0x00,0x00, +0xa6,0x00,0x00,0x00,0x55,0x01,0x00,0x00,0x1c,0x00,0x04,0x00, +0x57,0x01,0x00,0x00,0xc3,0x00,0x00,0x00,0x56,0x01,0x00,0x00, +0x20,0x00,0x04,0x00,0x58,0x01,0x00,0x00,0x04,0x00,0x00,0x00, +0x57,0x01,0x00,0x00,0x3b,0x00,0x04,0x00,0x58,0x01,0x00,0x00, +0x59,0x01,0x00,0x00,0x04,0x00,0x00,0x00,0x34,0x00,0x06,0x00, 0x06,0x00,0x00,0x00,0x5d,0x01,0x00,0x00,0x80,0x00,0x00,0x00, -0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x62,0x01,0x00,0x00,0x07,0x00,0x00,0x00, -0x32,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x69,0x01,0x00,0x00, -0x01,0x00,0x00,0x00,0x33,0x00,0x06,0x00,0x09,0x00,0x00,0x00, -0x6a,0x01,0x00,0x00,0x69,0x01,0x00,0x00,0x39,0x00,0x00,0x00, -0x39,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, -0x6b,0x01,0x00,0x00,0x51,0x00,0x00,0x00,0x6a,0x01,0x00,0x00, -0x00,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, -0x6c,0x01,0x00,0x00,0x84,0x00,0x00,0x00,0x6b,0x01,0x00,0x00, -0x6d,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, -0x6d,0x01,0x00,0x00,0x86,0x00,0x00,0x00,0x6c,0x01,0x00,0x00, -0x6c,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, -0x84,0x01,0x00,0x00,0x80,0x00,0x00,0x00,0x6c,0x00,0x00,0x00, -0x39,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, -0x85,0x01,0x00,0x00,0x84,0x00,0x00,0x00,0x9d,0x00,0x00,0x00, -0x84,0x01,0x00,0x00,0x1c,0x00,0x04,0x00,0x86,0x01,0x00,0x00, -0xe6,0x00,0x00,0x00,0x85,0x01,0x00,0x00,0x20,0x00,0x04,0x00, -0x87,0x01,0x00,0x00,0x04,0x00,0x00,0x00,0x86,0x01,0x00,0x00, -0x3b,0x00,0x04,0x00,0x87,0x01,0x00,0x00,0x88,0x01,0x00,0x00, -0x04,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, -0x8c,0x01,0x00,0x00,0x80,0x00,0x00,0x00,0x6c,0x00,0x00,0x00, -0x39,0x00,0x00,0x00,0x1d,0x00,0x03,0x00,0x92,0x01,0x00,0x00, -0xf6,0x00,0x00,0x00,0x1e,0x00,0x03,0x00,0x93,0x01,0x00,0x00, -0x92,0x01,0x00,0x00,0x20,0x00,0x04,0x00,0x94,0x01,0x00,0x00, -0x0c,0x00,0x00,0x00,0x93,0x01,0x00,0x00,0x3b,0x00,0x04,0x00, -0x94,0x01,0x00,0x00,0x95,0x01,0x00,0x00,0x0c,0x00,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x9e,0x01,0x00,0x00, -0x80,0x00,0x00,0x00,0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0xac,0x01,0x00,0x00, -0x80,0x00,0x00,0x00,0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0xba,0x01,0x00,0x00, -0x80,0x00,0x00,0x00,0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0xc8,0x01,0x00,0x00, -0x80,0x00,0x00,0x00,0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0xd6,0x01,0x00,0x00, -0x80,0x00,0x00,0x00,0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0xe4,0x01,0x00,0x00, -0x80,0x00,0x00,0x00,0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0xf2,0x01,0x00,0x00, -0x80,0x00,0x00,0x00,0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0xff,0x01,0x00,0x00, -0x08,0x01,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, -0x00,0x02,0x00,0x00,0x86,0x00,0x00,0x00,0x6c,0x00,0x00,0x00, -0x6d,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, -0x03,0x02,0x00,0x00,0x86,0x00,0x00,0x00,0x6c,0x00,0x00,0x00, -0x6d,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, -0x1e,0x02,0x00,0x00,0x84,0x00,0x00,0x00,0x60,0x00,0x00,0x00, -0x62,0x00,0x00,0x00,0x1c,0x00,0x04,0x00,0x1f,0x02,0x00,0x00, -0xe6,0x00,0x00,0x00,0x1e,0x02,0x00,0x00,0x20,0x00,0x04,0x00, -0x20,0x02,0x00,0x00,0x07,0x00,0x00,0x00,0x1f,0x02,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x30,0x02,0x00,0x00, -0x80,0x00,0x00,0x00,0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00, -0x20,0x00,0x04,0x00,0x36,0x02,0x00,0x00,0x07,0x00,0x00,0x00, -0xe6,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, -0x4c,0x02,0x00,0x00,0x84,0x00,0x00,0x00,0xb5,0x00,0x00,0x00, -0xb2,0x00,0x00,0x00,0x1c,0x00,0x04,0x00,0x4d,0x02,0x00,0x00, -0xe6,0x00,0x00,0x00,0x4c,0x02,0x00,0x00,0x20,0x00,0x04,0x00, -0x4e,0x02,0x00,0x00,0x07,0x00,0x00,0x00,0x4d,0x02,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x57,0x02,0x00,0x00, -0x86,0x00,0x00,0x00,0xaf,0x00,0x00,0x00,0xb5,0x00,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x5f,0x02,0x00,0x00, -0x80,0x00,0x00,0x00,0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x8e,0x02,0x00,0x00, -0x84,0x00,0x00,0x00,0x60,0x00,0x00,0x00,0x62,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00,0xc3,0x02,0x00,0x00, -0x0d,0x00,0x00,0x00,0x3b,0x00,0x04,0x00,0x0a,0x00,0x00,0x00, -0xcb,0x02,0x00,0x00,0x01,0x00,0x00,0x00,0x1d,0x00,0x03,0x00, -0x11,0x03,0x00,0x00,0xba,0x00,0x00,0x00,0x1e,0x00,0x03,0x00, -0x12,0x03,0x00,0x00,0x11,0x03,0x00,0x00,0x20,0x00,0x04,0x00, -0x13,0x03,0x00,0x00,0x0c,0x00,0x00,0x00,0x12,0x03,0x00,0x00, -0x3b,0x00,0x04,0x00,0x13,0x03,0x00,0x00,0x14,0x03,0x00,0x00, -0x0c,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00, -0x19,0x03,0x00,0x00,0x05,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0x26,0x03,0x00,0x00,0x84,0x00,0x00,0x00, -0x60,0x00,0x00,0x00,0x62,0x00,0x00,0x00,0x36,0x00,0x05,0x00, -0x02,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x03,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0x05,0x00,0x00,0x00, -0x3b,0x00,0x04,0x00,0xbf,0x00,0x00,0x00,0xc0,0x00,0x00,0x00, -0x07,0x00,0x00,0x00,0x3b,0x00,0x04,0x00,0x20,0x02,0x00,0x00, -0x21,0x02,0x00,0x00,0x07,0x00,0x00,0x00,0x3b,0x00,0x04,0x00, -0x4e,0x02,0x00,0x00,0x4f,0x02,0x00,0x00,0x07,0x00,0x00,0x00, -0x41,0x00,0x05,0x00,0x0d,0x00,0x00,0x00,0x0e,0x00,0x00,0x00, -0x0b,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x0f,0x00,0x00,0x00,0x0e,0x00,0x00,0x00, -0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00,0x16,0x00,0x00,0x00, -0x12,0x00,0x00,0x00,0x14,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x17,0x00,0x00,0x00,0x16,0x00,0x00,0x00, -0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x18,0x00,0x00,0x00, -0x0f,0x00,0x00,0x00,0x17,0x00,0x00,0x00,0x89,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x1e,0x00,0x00,0x00,0x0f,0x00,0x00,0x00, -0x17,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00, -0x22,0x00,0x00,0x00,0x12,0x00,0x00,0x00,0x21,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x22,0x00,0x00,0x00,0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x24,0x00,0x00,0x00,0x18,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00,0x28,0x00,0x00,0x00, -0x12,0x00,0x00,0x00,0x27,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x29,0x00,0x00,0x00,0x28,0x00,0x00,0x00, -0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x2a,0x00,0x00,0x00, -0x1e,0x00,0x00,0x00,0x29,0x00,0x00,0x00,0x41,0x00,0x05,0x00, -0x15,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0x12,0x00,0x00,0x00, -0x2d,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x2f,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x30,0x00,0x00,0x00,0x24,0x00,0x00,0x00, -0x2f,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x32,0x00,0x00,0x00,0x30,0x00,0x00,0x00,0x2a,0x00,0x00,0x00, -0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00,0x35,0x00,0x00,0x00, -0x12,0x00,0x00,0x00,0x34,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x36,0x00,0x00,0x00,0x35,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x38,0x00,0x00,0x00, -0x36,0x00,0x00,0x00,0x37,0x00,0x00,0x00,0x82,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x3a,0x00,0x00,0x00,0x38,0x00,0x00,0x00, -0x39,0x00,0x00,0x00,0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x3b,0x00,0x00,0x00,0x3a,0x00,0x00,0x00,0x37,0x00,0x00,0x00, -0x41,0x00,0x05,0x00,0x0d,0x00,0x00,0x00,0x3f,0x00,0x00,0x00, -0x3d,0x00,0x00,0x00,0x3e,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x40,0x00,0x00,0x00,0x3f,0x00,0x00,0x00, -0x89,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x42,0x00,0x00,0x00, -0x40,0x00,0x00,0x00,0x3b,0x00,0x00,0x00,0x86,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x47,0x00,0x00,0x00,0x40,0x00,0x00,0x00, -0x3b,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x0d,0x00,0x00,0x00, -0x49,0x00,0x00,0x00,0x3d,0x00,0x00,0x00,0x39,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x4a,0x00,0x00,0x00, -0x49,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x0d,0x00,0x00,0x00, -0x4d,0x00,0x00,0x00,0x4c,0x00,0x00,0x00,0x3e,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x4e,0x00,0x00,0x00, -0x4d,0x00,0x00,0x00,0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x50,0x00,0x00,0x00,0x4e,0x00,0x00,0x00,0x4f,0x00,0x00,0x00, -0x89,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x55,0x00,0x00,0x00, -0x50,0x00,0x00,0x00,0x54,0x00,0x00,0x00,0x86,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x59,0x00,0x00,0x00,0x50,0x00,0x00,0x00, -0x58,0x00,0x00,0x00,0x89,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x5d,0x00,0x00,0x00,0x4e,0x00,0x00,0x00,0x4f,0x00,0x00,0x00, -0x89,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x64,0x00,0x00,0x00, -0x5d,0x00,0x00,0x00,0x63,0x00,0x00,0x00,0x86,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x68,0x00,0x00,0x00,0x5d,0x00,0x00,0x00, -0x67,0x00,0x00,0x00,0x89,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x6f,0x00,0x00,0x00,0x4e,0x00,0x00,0x00,0x6e,0x00,0x00,0x00, -0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x74,0x00,0x00,0x00, -0x4e,0x00,0x00,0x00,0x73,0x00,0x00,0x00,0x41,0x00,0x05,0x00, -0x15,0x00,0x00,0x00,0x78,0x00,0x00,0x00,0x12,0x00,0x00,0x00, -0x77,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x79,0x00,0x00,0x00,0x78,0x00,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x7a,0x00,0x00,0x00,0x47,0x00,0x00,0x00, -0x79,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00, -0x7d,0x00,0x00,0x00,0x12,0x00,0x00,0x00,0x7c,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x7e,0x00,0x00,0x00, -0x7d,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x80,0x00,0x00,0x00,0x47,0x00,0x00,0x00,0x39,0x00,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x83,0x00,0x00,0x00, -0x80,0x00,0x00,0x00,0x79,0x00,0x00,0x00,0x0c,0x00,0x07,0x00, -0x06,0x00,0x00,0x00,0x84,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x26,0x00,0x00,0x00,0x7e,0x00,0x00,0x00,0x83,0x00,0x00,0x00, -0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00,0x88,0x00,0x00,0x00, -0x12,0x00,0x00,0x00,0x87,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x89,0x00,0x00,0x00,0x88,0x00,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x8a,0x00,0x00,0x00, -0x32,0x00,0x00,0x00,0x89,0x00,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x8c,0x00,0x00,0x00,0x42,0x00,0x00,0x00, -0x37,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00, -0x8e,0x00,0x00,0x00,0x12,0x00,0x00,0x00,0x8d,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x8f,0x00,0x00,0x00, -0x8e,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x90,0x00,0x00,0x00,0x8c,0x00,0x00,0x00,0x8f,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x91,0x00,0x00,0x00, -0x8a,0x00,0x00,0x00,0x90,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x93,0x00,0x00,0x00,0x91,0x00,0x00,0x00, -0x7a,0x00,0x00,0x00,0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x94,0x00,0x00,0x00,0x93,0x00,0x00,0x00,0x6d,0x00,0x00,0x00, -0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00,0x99,0x00,0x00,0x00, -0x12,0x00,0x00,0x00,0x98,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x9a,0x00,0x00,0x00,0x99,0x00,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x9b,0x00,0x00,0x00, -0x0f,0x00,0x00,0x00,0x9a,0x00,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x9e,0x00,0x00,0x00,0x4a,0x00,0x00,0x00, -0x9d,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00, -0xa0,0x00,0x00,0x00,0x12,0x00,0x00,0x00,0x9f,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0xa1,0x00,0x00,0x00, -0xa0,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xa2,0x00,0x00,0x00,0x9e,0x00,0x00,0x00,0xa1,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xa3,0x00,0x00,0x00, -0x9b,0x00,0x00,0x00,0xa2,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xa5,0x00,0x00,0x00,0xa3,0x00,0x00,0x00, -0x7a,0x00,0x00,0x00,0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xa6,0x00,0x00,0x00,0xa5,0x00,0x00,0x00,0x6d,0x00,0x00,0x00, -0xf9,0x00,0x02,0x00,0xa8,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, -0xa8,0x00,0x00,0x00,0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00, -0x38,0x03,0x00,0x00,0x3e,0x00,0x00,0x00,0x05,0x00,0x00,0x00, -0xc7,0x00,0x00,0x00,0xa9,0x00,0x00,0x00,0xb0,0x00,0x05,0x00, -0xb8,0x00,0x00,0x00,0xb9,0x00,0x00,0x00,0x38,0x03,0x00,0x00, -0xb7,0x00,0x00,0x00,0xf6,0x00,0x04,0x00,0xaa,0x00,0x00,0x00, -0xa9,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, -0xb9,0x00,0x00,0x00,0xa9,0x00,0x00,0x00,0xaa,0x00,0x00,0x00, -0xf8,0x00,0x02,0x00,0xa9,0x00,0x00,0x00,0x41,0x00,0x05,0x00, -0xc3,0x00,0x00,0x00,0xc4,0x00,0x00,0x00,0xc0,0x00,0x00,0x00, -0x38,0x03,0x00,0x00,0x3e,0x00,0x03,0x00,0xc4,0x00,0x00,0x00, -0xc2,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xc7,0x00,0x00,0x00,0x38,0x03,0x00,0x00,0xc6,0x00,0x00,0x00, -0xf9,0x00,0x02,0x00,0xa8,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, -0xaa,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0xca,0x00,0x00,0x00, -0xf8,0x00,0x02,0x00,0xca,0x00,0x00,0x00,0xf5,0x00,0x07,0x00, -0x06,0x00,0x00,0x00,0x51,0x03,0x00,0x00,0xa6,0x00,0x00,0x00, -0xaa,0x00,0x00,0x00,0x05,0x02,0x00,0x00,0xcd,0x00,0x00,0x00, -0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0x4d,0x03,0x00,0x00, -0x94,0x00,0x00,0x00,0xaa,0x00,0x00,0x00,0x02,0x02,0x00,0x00, -0xcd,0x00,0x00,0x00,0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00, -0x39,0x03,0x00,0x00,0x7a,0x00,0x00,0x00,0xaa,0x00,0x00,0x00, -0xb3,0x02,0x00,0x00,0xcd,0x00,0x00,0x00,0xb0,0x00,0x05,0x00, -0xb8,0x00,0x00,0x00,0xd1,0x00,0x00,0x00,0x39,0x03,0x00,0x00, -0x84,0x00,0x00,0x00,0xf6,0x00,0x04,0x00,0xcc,0x00,0x00,0x00, -0xcd,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, -0xd1,0x00,0x00,0x00,0xcb,0x00,0x00,0x00,0xcc,0x00,0x00,0x00, -0xf8,0x00,0x02,0x00,0xcb,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, -0xd3,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0xd3,0x00,0x00,0x00, -0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0x49,0x03,0x00,0x00, -0x3e,0x00,0x00,0x00,0xcb,0x00,0x00,0x00,0x6f,0x01,0x00,0x00, -0xd4,0x00,0x00,0x00,0xb0,0x00,0x05,0x00,0xb8,0x00,0x00,0x00, -0xd9,0x00,0x00,0x00,0x49,0x03,0x00,0x00,0x37,0x00,0x00,0x00, -0xf6,0x00,0x04,0x00,0xd5,0x00,0x00,0x00,0xd4,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0xd9,0x00,0x00,0x00, -0xd4,0x00,0x00,0x00,0xd5,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, -0xd4,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xde,0x00,0x00,0x00,0x74,0x00,0x00,0x00,0x49,0x03,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xe1,0x00,0x00,0x00, -0xde,0x00,0x00,0x00,0x8f,0x00,0x00,0x00,0x86,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xe2,0x00,0x00,0x00,0xe1,0x00,0x00,0x00, -0x6d,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xe3,0x00,0x00,0x00,0x4d,0x03,0x00,0x00,0xe2,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xe5,0x00,0x00,0x00, -0xe3,0x00,0x00,0x00,0x6f,0x00,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xf0,0x00,0x00,0x00,0xde,0x00,0x00,0x00, -0xef,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xf2,0x00,0x00,0x00,0x6f,0x00,0x00,0x00,0x6d,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xf3,0x00,0x00,0x00, -0xf0,0x00,0x00,0x00,0xf2,0x00,0x00,0x00,0x41,0x00,0x08,0x00, -0xfc,0x00,0x00,0x00,0xfd,0x00,0x00,0x00,0xfa,0x00,0x00,0x00, -0x34,0x00,0x00,0x00,0xe5,0x00,0x00,0x00,0x34,0x00,0x00,0x00, -0x3e,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0xba,0x00,0x00,0x00, -0xfe,0x00,0x00,0x00,0xfd,0x00,0x00,0x00,0x73,0x00,0x04,0x00, -0xe6,0x00,0x00,0x00,0xff,0x00,0x00,0x00,0xfe,0x00,0x00,0x00, -0x41,0x00,0x05,0x00,0x00,0x01,0x00,0x00,0x01,0x01,0x00,0x00, -0xeb,0x00,0x00,0x00,0xf3,0x00,0x00,0x00,0x3e,0x00,0x03,0x00, -0x01,0x01,0x00,0x00,0xff,0x00,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x06,0x01,0x00,0x00,0xde,0x00,0x00,0x00, -0x05,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x09,0x01,0x00,0x00,0x06,0x01,0x00,0x00,0xf2,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x0a,0x01,0x00,0x00, -0x09,0x01,0x00,0x00,0x39,0x00,0x00,0x00,0x41,0x00,0x08,0x00, -0xfc,0x00,0x00,0x00,0x0c,0x01,0x00,0x00,0xfa,0x00,0x00,0x00, -0x34,0x00,0x00,0x00,0xe5,0x00,0x00,0x00,0x34,0x00,0x00,0x00, -0x39,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0xba,0x00,0x00,0x00, -0x0d,0x01,0x00,0x00,0x0c,0x01,0x00,0x00,0x73,0x00,0x04,0x00, -0xe6,0x00,0x00,0x00,0x0e,0x01,0x00,0x00,0x0d,0x01,0x00,0x00, -0x41,0x00,0x05,0x00,0x00,0x01,0x00,0x00,0x0f,0x01,0x00,0x00, -0xeb,0x00,0x00,0x00,0x0a,0x01,0x00,0x00,0x3e,0x00,0x03,0x00, -0x0f,0x01,0x00,0x00,0x0e,0x01,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x14,0x01,0x00,0x00,0xde,0x00,0x00,0x00, -0x13,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x17,0x01,0x00,0x00,0x14,0x01,0x00,0x00,0xf2,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x18,0x01,0x00,0x00, -0x17,0x01,0x00,0x00,0x0c,0x00,0x00,0x00,0x41,0x00,0x08,0x00, -0xfc,0x00,0x00,0x00,0x1a,0x01,0x00,0x00,0xfa,0x00,0x00,0x00, -0x34,0x00,0x00,0x00,0xe5,0x00,0x00,0x00,0x34,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0xba,0x00,0x00,0x00, -0x1b,0x01,0x00,0x00,0x1a,0x01,0x00,0x00,0x73,0x00,0x04,0x00, -0xe6,0x00,0x00,0x00,0x1c,0x01,0x00,0x00,0x1b,0x01,0x00,0x00, -0x41,0x00,0x05,0x00,0x00,0x01,0x00,0x00,0x1d,0x01,0x00,0x00, -0xeb,0x00,0x00,0x00,0x18,0x01,0x00,0x00,0x3e,0x00,0x03,0x00, -0x1d,0x01,0x00,0x00,0x1c,0x01,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x22,0x01,0x00,0x00,0xde,0x00,0x00,0x00, -0x21,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x25,0x01,0x00,0x00,0x22,0x01,0x00,0x00,0xf2,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x27,0x01,0x00,0x00, -0x25,0x01,0x00,0x00,0x26,0x01,0x00,0x00,0x41,0x00,0x08,0x00, -0xfc,0x00,0x00,0x00,0x29,0x01,0x00,0x00,0xfa,0x00,0x00,0x00, -0x34,0x00,0x00,0x00,0xe5,0x00,0x00,0x00,0x34,0x00,0x00,0x00, -0x26,0x01,0x00,0x00,0x3d,0x00,0x04,0x00,0xba,0x00,0x00,0x00, -0x2a,0x01,0x00,0x00,0x29,0x01,0x00,0x00,0x73,0x00,0x04,0x00, -0xe6,0x00,0x00,0x00,0x2b,0x01,0x00,0x00,0x2a,0x01,0x00,0x00, -0x41,0x00,0x05,0x00,0x00,0x01,0x00,0x00,0x2c,0x01,0x00,0x00, -0xeb,0x00,0x00,0x00,0x27,0x01,0x00,0x00,0x3e,0x00,0x03,0x00, -0x2c,0x01,0x00,0x00,0x2b,0x01,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x31,0x01,0x00,0x00,0xde,0x00,0x00,0x00, -0x30,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x34,0x01,0x00,0x00,0x31,0x01,0x00,0x00,0xf2,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x36,0x01,0x00,0x00, -0x34,0x01,0x00,0x00,0x35,0x01,0x00,0x00,0x41,0x00,0x08,0x00, -0xfc,0x00,0x00,0x00,0x38,0x01,0x00,0x00,0xfa,0x00,0x00,0x00, -0x34,0x00,0x00,0x00,0xe5,0x00,0x00,0x00,0xc6,0x00,0x00,0x00, -0x3e,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0xba,0x00,0x00,0x00, -0x39,0x01,0x00,0x00,0x38,0x01,0x00,0x00,0x73,0x00,0x04,0x00, -0xe6,0x00,0x00,0x00,0x3a,0x01,0x00,0x00,0x39,0x01,0x00,0x00, -0x41,0x00,0x05,0x00,0x00,0x01,0x00,0x00,0x3b,0x01,0x00,0x00, -0xeb,0x00,0x00,0x00,0x36,0x01,0x00,0x00,0x3e,0x00,0x03,0x00, -0x3b,0x01,0x00,0x00,0x3a,0x01,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x40,0x01,0x00,0x00,0xde,0x00,0x00,0x00, -0x3f,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x43,0x01,0x00,0x00,0x40,0x01,0x00,0x00,0xf2,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x45,0x01,0x00,0x00, -0x43,0x01,0x00,0x00,0x44,0x01,0x00,0x00,0x41,0x00,0x08,0x00, -0xfc,0x00,0x00,0x00,0x47,0x01,0x00,0x00,0xfa,0x00,0x00,0x00, -0x34,0x00,0x00,0x00,0xe5,0x00,0x00,0x00,0xc6,0x00,0x00,0x00, -0x39,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0xba,0x00,0x00,0x00, -0x48,0x01,0x00,0x00,0x47,0x01,0x00,0x00,0x73,0x00,0x04,0x00, -0xe6,0x00,0x00,0x00,0x49,0x01,0x00,0x00,0x48,0x01,0x00,0x00, -0x41,0x00,0x05,0x00,0x00,0x01,0x00,0x00,0x4a,0x01,0x00,0x00, -0xeb,0x00,0x00,0x00,0x45,0x01,0x00,0x00,0x3e,0x00,0x03,0x00, -0x4a,0x01,0x00,0x00,0x49,0x01,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x4f,0x01,0x00,0x00,0xde,0x00,0x00,0x00, -0x4e,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x52,0x01,0x00,0x00,0x4f,0x01,0x00,0x00,0xf2,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x54,0x01,0x00,0x00, -0x52,0x01,0x00,0x00,0x53,0x01,0x00,0x00,0x41,0x00,0x08,0x00, -0xfc,0x00,0x00,0x00,0x56,0x01,0x00,0x00,0xfa,0x00,0x00,0x00, -0x34,0x00,0x00,0x00,0xe5,0x00,0x00,0x00,0xc6,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0xba,0x00,0x00,0x00, -0x57,0x01,0x00,0x00,0x56,0x01,0x00,0x00,0x73,0x00,0x04,0x00, -0xe6,0x00,0x00,0x00,0x58,0x01,0x00,0x00,0x57,0x01,0x00,0x00, -0x41,0x00,0x05,0x00,0x00,0x01,0x00,0x00,0x59,0x01,0x00,0x00, -0xeb,0x00,0x00,0x00,0x54,0x01,0x00,0x00,0x3e,0x00,0x03,0x00, -0x59,0x01,0x00,0x00,0x58,0x01,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x5e,0x01,0x00,0x00,0xde,0x00,0x00,0x00, -0x5d,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x61,0x01,0x00,0x00,0x5e,0x01,0x00,0x00,0xf2,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x63,0x01,0x00,0x00, -0x61,0x01,0x00,0x00,0x62,0x01,0x00,0x00,0x41,0x00,0x08,0x00, -0xfc,0x00,0x00,0x00,0x65,0x01,0x00,0x00,0xfa,0x00,0x00,0x00, -0x34,0x00,0x00,0x00,0xe5,0x00,0x00,0x00,0xc6,0x00,0x00,0x00, -0x26,0x01,0x00,0x00,0x3d,0x00,0x04,0x00,0xba,0x00,0x00,0x00, -0x66,0x01,0x00,0x00,0x65,0x01,0x00,0x00,0x73,0x00,0x04,0x00, -0xe6,0x00,0x00,0x00,0x67,0x01,0x00,0x00,0x66,0x01,0x00,0x00, -0x41,0x00,0x05,0x00,0x00,0x01,0x00,0x00,0x68,0x01,0x00,0x00, -0xeb,0x00,0x00,0x00,0x63,0x01,0x00,0x00,0x3e,0x00,0x03,0x00, -0x68,0x01,0x00,0x00,0x67,0x01,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x6f,0x01,0x00,0x00,0x49,0x03,0x00,0x00, -0x6d,0x01,0x00,0x00,0xf9,0x00,0x02,0x00,0xd3,0x00,0x00,0x00, -0xf8,0x00,0x02,0x00,0xd5,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, -0x71,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0x71,0x01,0x00,0x00, -0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0x4a,0x03,0x00,0x00, -0x3e,0x00,0x00,0x00,0xd5,0x00,0x00,0x00,0xfe,0x01,0x00,0x00, -0x72,0x01,0x00,0x00,0xb0,0x00,0x05,0x00,0xb8,0x00,0x00,0x00, -0x77,0x01,0x00,0x00,0x4a,0x03,0x00,0x00,0x9d,0x00,0x00,0x00, -0xf6,0x00,0x04,0x00,0x73,0x01,0x00,0x00,0x72,0x01,0x00,0x00, -0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0x77,0x01,0x00,0x00, -0x72,0x01,0x00,0x00,0x73,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, -0x72,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x7c,0x01,0x00,0x00,0x74,0x00,0x00,0x00,0x4a,0x03,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x7f,0x01,0x00,0x00, -0x7c,0x01,0x00,0x00,0xa1,0x00,0x00,0x00,0x86,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x80,0x01,0x00,0x00,0x7f,0x01,0x00,0x00, -0x6d,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x81,0x01,0x00,0x00,0x51,0x03,0x00,0x00,0x80,0x01,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x83,0x01,0x00,0x00, -0x81,0x01,0x00,0x00,0x6f,0x00,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x8d,0x01,0x00,0x00,0x7c,0x01,0x00,0x00, -0x8c,0x01,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x8f,0x01,0x00,0x00,0x6f,0x00,0x00,0x00,0x6d,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x90,0x01,0x00,0x00, -0x8d,0x01,0x00,0x00,0x8f,0x01,0x00,0x00,0x41,0x00,0x08,0x00, -0xfc,0x00,0x00,0x00,0x97,0x01,0x00,0x00,0x95,0x01,0x00,0x00, -0x34,0x00,0x00,0x00,0x83,0x01,0x00,0x00,0x34,0x00,0x00,0x00, -0x3e,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0xba,0x00,0x00,0x00, -0x98,0x01,0x00,0x00,0x97,0x01,0x00,0x00,0x73,0x00,0x04,0x00, -0xe6,0x00,0x00,0x00,0x99,0x01,0x00,0x00,0x98,0x01,0x00,0x00, -0x41,0x00,0x05,0x00,0x00,0x01,0x00,0x00,0x9a,0x01,0x00,0x00, -0x88,0x01,0x00,0x00,0x90,0x01,0x00,0x00,0x3e,0x00,0x03,0x00, -0x9a,0x01,0x00,0x00,0x99,0x01,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x9f,0x01,0x00,0x00,0x7c,0x01,0x00,0x00, -0x9e,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xa2,0x01,0x00,0x00,0x9f,0x01,0x00,0x00,0x8f,0x01,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xa3,0x01,0x00,0x00, -0xa2,0x01,0x00,0x00,0x39,0x00,0x00,0x00,0x41,0x00,0x08,0x00, -0xfc,0x00,0x00,0x00,0xa5,0x01,0x00,0x00,0x95,0x01,0x00,0x00, -0x34,0x00,0x00,0x00,0x83,0x01,0x00,0x00,0x34,0x00,0x00,0x00, -0x39,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0xba,0x00,0x00,0x00, -0xa6,0x01,0x00,0x00,0xa5,0x01,0x00,0x00,0x73,0x00,0x04,0x00, -0xe6,0x00,0x00,0x00,0xa7,0x01,0x00,0x00,0xa6,0x01,0x00,0x00, -0x41,0x00,0x05,0x00,0x00,0x01,0x00,0x00,0xa8,0x01,0x00,0x00, -0x88,0x01,0x00,0x00,0xa3,0x01,0x00,0x00,0x3e,0x00,0x03,0x00, -0xa8,0x01,0x00,0x00,0xa7,0x01,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xad,0x01,0x00,0x00,0x7c,0x01,0x00,0x00, -0xac,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xb0,0x01,0x00,0x00,0xad,0x01,0x00,0x00,0x8f,0x01,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xb1,0x01,0x00,0x00, -0xb0,0x01,0x00,0x00,0x0c,0x00,0x00,0x00,0x41,0x00,0x08,0x00, -0xfc,0x00,0x00,0x00,0xb3,0x01,0x00,0x00,0x95,0x01,0x00,0x00, -0x34,0x00,0x00,0x00,0x83,0x01,0x00,0x00,0x34,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0xba,0x00,0x00,0x00, -0xb4,0x01,0x00,0x00,0xb3,0x01,0x00,0x00,0x73,0x00,0x04,0x00, -0xe6,0x00,0x00,0x00,0xb5,0x01,0x00,0x00,0xb4,0x01,0x00,0x00, -0x41,0x00,0x05,0x00,0x00,0x01,0x00,0x00,0xb6,0x01,0x00,0x00, -0x88,0x01,0x00,0x00,0xb1,0x01,0x00,0x00,0x3e,0x00,0x03,0x00, -0xb6,0x01,0x00,0x00,0xb5,0x01,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xbb,0x01,0x00,0x00,0x7c,0x01,0x00,0x00, -0xba,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xbe,0x01,0x00,0x00,0xbb,0x01,0x00,0x00,0x8f,0x01,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xbf,0x01,0x00,0x00, -0xbe,0x01,0x00,0x00,0x26,0x01,0x00,0x00,0x41,0x00,0x08,0x00, -0xfc,0x00,0x00,0x00,0xc1,0x01,0x00,0x00,0x95,0x01,0x00,0x00, -0x34,0x00,0x00,0x00,0x83,0x01,0x00,0x00,0x34,0x00,0x00,0x00, -0x26,0x01,0x00,0x00,0x3d,0x00,0x04,0x00,0xba,0x00,0x00,0x00, -0xc2,0x01,0x00,0x00,0xc1,0x01,0x00,0x00,0x73,0x00,0x04,0x00, -0xe6,0x00,0x00,0x00,0xc3,0x01,0x00,0x00,0xc2,0x01,0x00,0x00, -0x41,0x00,0x05,0x00,0x00,0x01,0x00,0x00,0xc4,0x01,0x00,0x00, -0x88,0x01,0x00,0x00,0xbf,0x01,0x00,0x00,0x3e,0x00,0x03,0x00, -0xc4,0x01,0x00,0x00,0xc3,0x01,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xc9,0x01,0x00,0x00,0x7c,0x01,0x00,0x00, -0xc8,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xcc,0x01,0x00,0x00,0xc9,0x01,0x00,0x00,0x8f,0x01,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xcd,0x01,0x00,0x00, -0xcc,0x01,0x00,0x00,0x35,0x01,0x00,0x00,0x41,0x00,0x08,0x00, -0xfc,0x00,0x00,0x00,0xcf,0x01,0x00,0x00,0x95,0x01,0x00,0x00, -0x34,0x00,0x00,0x00,0x83,0x01,0x00,0x00,0xc6,0x00,0x00,0x00, -0x3e,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0xba,0x00,0x00,0x00, -0xd0,0x01,0x00,0x00,0xcf,0x01,0x00,0x00,0x73,0x00,0x04,0x00, -0xe6,0x00,0x00,0x00,0xd1,0x01,0x00,0x00,0xd0,0x01,0x00,0x00, -0x41,0x00,0x05,0x00,0x00,0x01,0x00,0x00,0xd2,0x01,0x00,0x00, -0x88,0x01,0x00,0x00,0xcd,0x01,0x00,0x00,0x3e,0x00,0x03,0x00, -0xd2,0x01,0x00,0x00,0xd1,0x01,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xd7,0x01,0x00,0x00,0x7c,0x01,0x00,0x00, -0xd6,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xda,0x01,0x00,0x00,0xd7,0x01,0x00,0x00,0x8f,0x01,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xdb,0x01,0x00,0x00, -0xda,0x01,0x00,0x00,0x44,0x01,0x00,0x00,0x41,0x00,0x08,0x00, -0xfc,0x00,0x00,0x00,0xdd,0x01,0x00,0x00,0x95,0x01,0x00,0x00, -0x34,0x00,0x00,0x00,0x83,0x01,0x00,0x00,0xc6,0x00,0x00,0x00, -0x39,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0xba,0x00,0x00,0x00, -0xde,0x01,0x00,0x00,0xdd,0x01,0x00,0x00,0x73,0x00,0x04,0x00, -0xe6,0x00,0x00,0x00,0xdf,0x01,0x00,0x00,0xde,0x01,0x00,0x00, -0x41,0x00,0x05,0x00,0x00,0x01,0x00,0x00,0xe0,0x01,0x00,0x00, -0x88,0x01,0x00,0x00,0xdb,0x01,0x00,0x00,0x3e,0x00,0x03,0x00, -0xe0,0x01,0x00,0x00,0xdf,0x01,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xe5,0x01,0x00,0x00,0x7c,0x01,0x00,0x00, -0xe4,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xe8,0x01,0x00,0x00,0xe5,0x01,0x00,0x00,0x8f,0x01,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xe9,0x01,0x00,0x00, -0xe8,0x01,0x00,0x00,0x53,0x01,0x00,0x00,0x41,0x00,0x08,0x00, -0xfc,0x00,0x00,0x00,0xeb,0x01,0x00,0x00,0x95,0x01,0x00,0x00, -0x34,0x00,0x00,0x00,0x83,0x01,0x00,0x00,0xc6,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0xba,0x00,0x00,0x00, -0xec,0x01,0x00,0x00,0xeb,0x01,0x00,0x00,0x73,0x00,0x04,0x00, -0xe6,0x00,0x00,0x00,0xed,0x01,0x00,0x00,0xec,0x01,0x00,0x00, -0x41,0x00,0x05,0x00,0x00,0x01,0x00,0x00,0xee,0x01,0x00,0x00, -0x88,0x01,0x00,0x00,0xe9,0x01,0x00,0x00,0x3e,0x00,0x03,0x00, -0xee,0x01,0x00,0x00,0xed,0x01,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xf3,0x01,0x00,0x00,0x7c,0x01,0x00,0x00, -0xf2,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xf6,0x01,0x00,0x00,0xf3,0x01,0x00,0x00,0x8f,0x01,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xf7,0x01,0x00,0x00, -0xf6,0x01,0x00,0x00,0x62,0x01,0x00,0x00,0x41,0x00,0x08,0x00, -0xfc,0x00,0x00,0x00,0xf9,0x01,0x00,0x00,0x95,0x01,0x00,0x00, -0x34,0x00,0x00,0x00,0x83,0x01,0x00,0x00,0xc6,0x00,0x00,0x00, -0x26,0x01,0x00,0x00,0x3d,0x00,0x04,0x00,0xba,0x00,0x00,0x00, -0xfa,0x01,0x00,0x00,0xf9,0x01,0x00,0x00,0x73,0x00,0x04,0x00, -0xe6,0x00,0x00,0x00,0xfb,0x01,0x00,0x00,0xfa,0x01,0x00,0x00, -0x41,0x00,0x05,0x00,0x00,0x01,0x00,0x00,0xfc,0x01,0x00,0x00, -0x88,0x01,0x00,0x00,0xf7,0x01,0x00,0x00,0x3e,0x00,0x03,0x00, -0xfc,0x01,0x00,0x00,0xfb,0x01,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xfe,0x01,0x00,0x00,0x4a,0x03,0x00,0x00, -0x6d,0x01,0x00,0x00,0xf9,0x00,0x02,0x00,0x71,0x01,0x00,0x00, -0xf8,0x00,0x02,0x00,0x73,0x01,0x00,0x00,0xe0,0x00,0x04,0x00, -0x0c,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0xff,0x01,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x02,0x02,0x00,0x00, -0x4d,0x03,0x00,0x00,0x00,0x02,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x05,0x02,0x00,0x00,0x51,0x03,0x00,0x00, -0x03,0x02,0x00,0x00,0xf9,0x00,0x02,0x00,0x07,0x02,0x00,0x00, -0xf8,0x00,0x02,0x00,0x07,0x02,0x00,0x00,0xf5,0x00,0x07,0x00, -0x06,0x00,0x00,0x00,0x53,0x03,0x00,0x00,0x3e,0x00,0x00,0x00, -0x73,0x01,0x00,0x00,0xb1,0x02,0x00,0x00,0x0a,0x02,0x00,0x00, -0xb0,0x00,0x05,0x00,0xb8,0x00,0x00,0x00,0x0d,0x02,0x00,0x00, -0x53,0x03,0x00,0x00,0x6c,0x00,0x00,0x00,0xf6,0x00,0x04,0x00, -0x09,0x02,0x00,0x00,0x0a,0x02,0x00,0x00,0x00,0x00,0x00,0x00, -0xfa,0x00,0x04,0x00,0x0d,0x02,0x00,0x00,0x08,0x02,0x00,0x00, -0x09,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x08,0x02,0x00,0x00, -0xf9,0x00,0x02,0x00,0x0f,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, -0x0f,0x02,0x00,0x00,0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00, -0x57,0x03,0x00,0x00,0x3e,0x00,0x00,0x00,0x08,0x02,0x00,0x00, -0x3b,0x02,0x00,0x00,0x12,0x02,0x00,0x00,0xb0,0x00,0x05,0x00, -0xb8,0x00,0x00,0x00,0x15,0x02,0x00,0x00,0x57,0x03,0x00,0x00, -0x60,0x00,0x00,0x00,0xf6,0x00,0x04,0x00,0x11,0x02,0x00,0x00, -0x12,0x02,0x00,0x00,0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, -0x15,0x02,0x00,0x00,0x10,0x02,0x00,0x00,0x11,0x02,0x00,0x00, -0xf8,0x00,0x02,0x00,0x10,0x02,0x00,0x00,0xf9,0x00,0x02,0x00, -0x17,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x17,0x02,0x00,0x00, -0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0x69,0x03,0x00,0x00, -0x3e,0x00,0x00,0x00,0x10,0x02,0x00,0x00,0x39,0x02,0x00,0x00, -0x18,0x02,0x00,0x00,0xb0,0x00,0x05,0x00,0xb8,0x00,0x00,0x00, -0x1d,0x02,0x00,0x00,0x69,0x03,0x00,0x00,0x62,0x00,0x00,0x00, -0xf6,0x00,0x04,0x00,0x19,0x02,0x00,0x00,0x18,0x02,0x00,0x00, -0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0x1d,0x02,0x00,0x00, -0x18,0x02,0x00,0x00,0x19,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, -0x18,0x02,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x23,0x02,0x00,0x00,0x57,0x03,0x00,0x00,0x62,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x25,0x02,0x00,0x00, -0x23,0x02,0x00,0x00,0x69,0x03,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x27,0x02,0x00,0x00,0x55,0x00,0x00,0x00, -0x53,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x29,0x02,0x00,0x00,0x57,0x03,0x00,0x00,0x61,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x2a,0x02,0x00,0x00, -0x27,0x02,0x00,0x00,0x29,0x02,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x2c,0x02,0x00,0x00,0x64,0x00,0x00,0x00, -0x62,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x2d,0x02,0x00,0x00,0x2a,0x02,0x00,0x00,0x2c,0x02,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x2f,0x02,0x00,0x00, -0x2d,0x02,0x00,0x00,0x69,0x03,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x31,0x02,0x00,0x00,0x2f,0x02,0x00,0x00, -0x30,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x33,0x02,0x00,0x00,0x31,0x02,0x00,0x00,0x53,0x03,0x00,0x00, -0x41,0x00,0x05,0x00,0x00,0x01,0x00,0x00,0x34,0x02,0x00,0x00, -0xeb,0x00,0x00,0x00,0x33,0x02,0x00,0x00,0x3d,0x00,0x04,0x00, -0xe6,0x00,0x00,0x00,0x35,0x02,0x00,0x00,0x34,0x02,0x00,0x00, -0x41,0x00,0x05,0x00,0x36,0x02,0x00,0x00,0x37,0x02,0x00,0x00, -0x21,0x02,0x00,0x00,0x25,0x02,0x00,0x00,0x3e,0x00,0x03,0x00, -0x37,0x02,0x00,0x00,0x35,0x02,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x39,0x02,0x00,0x00,0x69,0x03,0x00,0x00, -0xc6,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0x17,0x02,0x00,0x00, -0xf8,0x00,0x02,0x00,0x19,0x02,0x00,0x00,0xf9,0x00,0x02,0x00, -0x12,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x12,0x02,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x3b,0x02,0x00,0x00, -0x57,0x03,0x00,0x00,0xc6,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, -0x0f,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x11,0x02,0x00,0x00, -0xf9,0x00,0x02,0x00,0x3d,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, -0x3d,0x02,0x00,0x00,0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00, -0x58,0x03,0x00,0x00,0x3e,0x00,0x00,0x00,0x11,0x02,0x00,0x00, -0x69,0x02,0x00,0x00,0x40,0x02,0x00,0x00,0xb0,0x00,0x05,0x00, -0xb8,0x00,0x00,0x00,0x43,0x02,0x00,0x00,0x58,0x03,0x00,0x00, -0xb5,0x00,0x00,0x00,0xf6,0x00,0x04,0x00,0x3f,0x02,0x00,0x00, -0x40,0x02,0x00,0x00,0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, -0x43,0x02,0x00,0x00,0x3e,0x02,0x00,0x00,0x3f,0x02,0x00,0x00, -0xf8,0x00,0x02,0x00,0x3e,0x02,0x00,0x00,0xf9,0x00,0x02,0x00, -0x45,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x45,0x02,0x00,0x00, -0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0x66,0x03,0x00,0x00, -0x3e,0x00,0x00,0x00,0x3e,0x02,0x00,0x00,0x67,0x02,0x00,0x00, -0x46,0x02,0x00,0x00,0xb0,0x00,0x05,0x00,0xb8,0x00,0x00,0x00, -0x4b,0x02,0x00,0x00,0x66,0x03,0x00,0x00,0xb2,0x00,0x00,0x00, -0xf6,0x00,0x04,0x00,0x47,0x02,0x00,0x00,0x46,0x02,0x00,0x00, -0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0x4b,0x02,0x00,0x00, -0x46,0x02,0x00,0x00,0x47,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, -0x46,0x02,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x51,0x02,0x00,0x00,0x58,0x03,0x00,0x00,0xb2,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x53,0x02,0x00,0x00, -0x51,0x02,0x00,0x00,0x66,0x03,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x55,0x02,0x00,0x00,0x59,0x00,0x00,0x00, -0xaf,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x58,0x02,0x00,0x00,0x58,0x03,0x00,0x00,0x57,0x02,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x59,0x02,0x00,0x00, -0x55,0x02,0x00,0x00,0x58,0x02,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x5b,0x02,0x00,0x00,0x68,0x00,0x00,0x00, -0xb2,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x5c,0x02,0x00,0x00,0x59,0x02,0x00,0x00,0x5b,0x02,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x5e,0x02,0x00,0x00, -0x5c,0x02,0x00,0x00,0x66,0x03,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x60,0x02,0x00,0x00,0x5e,0x02,0x00,0x00, -0x5f,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x62,0x02,0x00,0x00,0x60,0x02,0x00,0x00,0x53,0x03,0x00,0x00, -0x41,0x00,0x05,0x00,0x00,0x01,0x00,0x00,0x63,0x02,0x00,0x00, -0x88,0x01,0x00,0x00,0x62,0x02,0x00,0x00,0x3d,0x00,0x04,0x00, -0xe6,0x00,0x00,0x00,0x64,0x02,0x00,0x00,0x63,0x02,0x00,0x00, -0x41,0x00,0x05,0x00,0x36,0x02,0x00,0x00,0x65,0x02,0x00,0x00, -0x4f,0x02,0x00,0x00,0x53,0x02,0x00,0x00,0x3e,0x00,0x03,0x00, -0x65,0x02,0x00,0x00,0x64,0x02,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x67,0x02,0x00,0x00,0x66,0x03,0x00,0x00, -0xc6,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0x45,0x02,0x00,0x00, -0xf8,0x00,0x02,0x00,0x47,0x02,0x00,0x00,0xf9,0x00,0x02,0x00, -0x40,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x40,0x02,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x69,0x02,0x00,0x00, -0x58,0x03,0x00,0x00,0xc6,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, -0x3d,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x3f,0x02,0x00,0x00, -0xf9,0x00,0x02,0x00,0x6b,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, -0x6b,0x02,0x00,0x00,0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00, -0x59,0x03,0x00,0x00,0x3e,0x00,0x00,0x00,0x3f,0x02,0x00,0x00, -0xaf,0x02,0x00,0x00,0x6e,0x02,0x00,0x00,0xb0,0x00,0x05,0x00, -0xb8,0x00,0x00,0x00,0x71,0x02,0x00,0x00,0x59,0x03,0x00,0x00, -0xb5,0x00,0x00,0x00,0xf6,0x00,0x04,0x00,0x6d,0x02,0x00,0x00, -0x6e,0x02,0x00,0x00,0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, -0x71,0x02,0x00,0x00,0x6c,0x02,0x00,0x00,0x6d,0x02,0x00,0x00, -0xf8,0x00,0x02,0x00,0x6c,0x02,0x00,0x00,0xf9,0x00,0x02,0x00, -0x73,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x73,0x02,0x00,0x00, -0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0x5d,0x03,0x00,0x00, -0x3e,0x00,0x00,0x00,0x6c,0x02,0x00,0x00,0xad,0x02,0x00,0x00, -0x76,0x02,0x00,0x00,0xb0,0x00,0x05,0x00,0xb8,0x00,0x00,0x00, -0x79,0x02,0x00,0x00,0x5d,0x03,0x00,0x00,0x60,0x00,0x00,0x00, -0xf6,0x00,0x04,0x00,0x75,0x02,0x00,0x00,0x76,0x02,0x00,0x00, -0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0x79,0x02,0x00,0x00, -0x74,0x02,0x00,0x00,0x75,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, -0x74,0x02,0x00,0x00,0xf9,0x00,0x02,0x00,0x7b,0x02,0x00,0x00, -0xf8,0x00,0x02,0x00,0x7b,0x02,0x00,0x00,0xf5,0x00,0x07,0x00, -0x06,0x00,0x00,0x00,0x5f,0x03,0x00,0x00,0x3e,0x00,0x00,0x00, -0x74,0x02,0x00,0x00,0xab,0x02,0x00,0x00,0x7e,0x02,0x00,0x00, -0xb0,0x00,0x05,0x00,0xb8,0x00,0x00,0x00,0x81,0x02,0x00,0x00, -0x5f,0x03,0x00,0x00,0xb2,0x00,0x00,0x00,0xf6,0x00,0x04,0x00, -0x7d,0x02,0x00,0x00,0x7e,0x02,0x00,0x00,0x01,0x00,0x00,0x00, -0xfa,0x00,0x04,0x00,0x81,0x02,0x00,0x00,0x7c,0x02,0x00,0x00, -0x7d,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x7c,0x02,0x00,0x00, -0xf9,0x00,0x02,0x00,0x83,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, -0x83,0x02,0x00,0x00,0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00, -0x61,0x03,0x00,0x00,0x3e,0x00,0x00,0x00,0x7c,0x02,0x00,0x00, -0xa9,0x02,0x00,0x00,0x84,0x02,0x00,0x00,0xb0,0x00,0x05,0x00, -0xb8,0x00,0x00,0x00,0x89,0x02,0x00,0x00,0x61,0x03,0x00,0x00, -0x62,0x00,0x00,0x00,0xf6,0x00,0x04,0x00,0x85,0x02,0x00,0x00, -0x84,0x02,0x00,0x00,0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, -0x89,0x02,0x00,0x00,0x84,0x02,0x00,0x00,0x85,0x02,0x00,0x00, -0xf8,0x00,0x02,0x00,0x84,0x02,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x8b,0x02,0x00,0x00,0x59,0x03,0x00,0x00, -0xb2,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x8d,0x02,0x00,0x00,0x8b,0x02,0x00,0x00,0x5f,0x03,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x8f,0x02,0x00,0x00, -0x8d,0x02,0x00,0x00,0x8e,0x02,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x91,0x02,0x00,0x00,0x5d,0x03,0x00,0x00, -0x62,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x92,0x02,0x00,0x00,0x8f,0x02,0x00,0x00,0x91,0x02,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x94,0x02,0x00,0x00, -0x92,0x02,0x00,0x00,0x61,0x03,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x98,0x02,0x00,0x00,0x91,0x02,0x00,0x00, -0x61,0x03,0x00,0x00,0x41,0x00,0x05,0x00,0x36,0x02,0x00,0x00, -0x99,0x02,0x00,0x00,0x21,0x02,0x00,0x00,0x98,0x02,0x00,0x00, -0x3d,0x00,0x04,0x00,0xe6,0x00,0x00,0x00,0x9a,0x02,0x00,0x00, -0x99,0x02,0x00,0x00,0x73,0x00,0x04,0x00,0xba,0x00,0x00,0x00, -0x9b,0x02,0x00,0x00,0x9a,0x02,0x00,0x00,0x41,0x00,0x05,0x00, -0x36,0x02,0x00,0x00,0xa0,0x02,0x00,0x00,0x4f,0x02,0x00,0x00, -0x8d,0x02,0x00,0x00,0x3d,0x00,0x04,0x00,0xe6,0x00,0x00,0x00, -0xa1,0x02,0x00,0x00,0xa0,0x02,0x00,0x00,0x73,0x00,0x04,0x00, -0xba,0x00,0x00,0x00,0xa2,0x02,0x00,0x00,0xa1,0x02,0x00,0x00, -0x41,0x00,0x05,0x00,0xc3,0x00,0x00,0x00,0xa4,0x02,0x00,0x00, -0xc0,0x00,0x00,0x00,0x94,0x02,0x00,0x00,0x3d,0x00,0x04,0x00, -0xba,0x00,0x00,0x00,0xa5,0x02,0x00,0x00,0xa4,0x02,0x00,0x00, -0x0c,0x00,0x08,0x00,0xba,0x00,0x00,0x00,0xa6,0x02,0x00,0x00, -0x01,0x00,0x00,0x00,0x32,0x00,0x00,0x00,0x9b,0x02,0x00,0x00, -0xa2,0x02,0x00,0x00,0xa5,0x02,0x00,0x00,0x3e,0x00,0x03,0x00, -0xa4,0x02,0x00,0x00,0xa6,0x02,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xa9,0x02,0x00,0x00,0x61,0x03,0x00,0x00, -0xc6,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0x83,0x02,0x00,0x00, -0xf8,0x00,0x02,0x00,0x85,0x02,0x00,0x00,0xf9,0x00,0x02,0x00, -0x7e,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x7e,0x02,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xab,0x02,0x00,0x00, -0x5f,0x03,0x00,0x00,0xc6,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, -0x7b,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x7d,0x02,0x00,0x00, -0xf9,0x00,0x02,0x00,0x76,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, -0x76,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xad,0x02,0x00,0x00,0x5d,0x03,0x00,0x00,0xc6,0x00,0x00,0x00, -0xf9,0x00,0x02,0x00,0x73,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, -0x75,0x02,0x00,0x00,0xf9,0x00,0x02,0x00,0x6e,0x02,0x00,0x00, -0xf8,0x00,0x02,0x00,0x6e,0x02,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xaf,0x02,0x00,0x00,0x59,0x03,0x00,0x00, -0xc6,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0x6b,0x02,0x00,0x00, -0xf8,0x00,0x02,0x00,0x6d,0x02,0x00,0x00,0xf9,0x00,0x02,0x00, -0x0a,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x0a,0x02,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xb1,0x02,0x00,0x00, -0x53,0x03,0x00,0x00,0xc6,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, -0x07,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x09,0x02,0x00,0x00, -0xe0,0x00,0x04,0x00,0x0c,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0xff,0x01,0x00,0x00,0xf9,0x00,0x02,0x00,0xcd,0x00,0x00,0x00, -0xf8,0x00,0x02,0x00,0xcd,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xb3,0x02,0x00,0x00,0x39,0x03,0x00,0x00, -0x6c,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0xca,0x00,0x00,0x00, -0xf8,0x00,0x02,0x00,0xcc,0x00,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xb8,0x02,0x00,0x00,0x55,0x00,0x00,0x00, -0x53,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xb9,0x02,0x00,0x00,0x8c,0x00,0x00,0x00,0xb8,0x02,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xbe,0x02,0x00,0x00, -0x59,0x00,0x00,0x00,0xaf,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xbf,0x02,0x00,0x00,0x9e,0x00,0x00,0x00, -0xbe,0x02,0x00,0x00,0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00, -0xc4,0x02,0x00,0x00,0x12,0x00,0x00,0x00,0xc3,0x02,0x00,0x00, -0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0xc5,0x02,0x00,0x00, -0xc4,0x02,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xc6,0x02,0x00,0x00,0x0f,0x00,0x00,0x00,0xc5,0x02,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xca,0x02,0x00,0x00, -0x47,0x00,0x00,0x00,0xc5,0x02,0x00,0x00,0x41,0x00,0x05,0x00, -0x0d,0x00,0x00,0x00,0xcc,0x02,0x00,0x00,0xcb,0x02,0x00,0x00, -0x0c,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0xcd,0x02,0x00,0x00,0xcc,0x02,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xce,0x02,0x00,0x00,0xca,0x02,0x00,0x00, -0xcd,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xcf,0x02,0x00,0x00,0xc6,0x02,0x00,0x00,0xce,0x02,0x00,0x00, -0xf9,0x00,0x02,0x00,0xd1,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, -0xd1,0x02,0x00,0x00,0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00, -0x3a,0x03,0x00,0x00,0x3e,0x00,0x00,0x00,0xcc,0x00,0x00,0x00, -0x37,0x03,0x00,0x00,0xd4,0x02,0x00,0x00,0xb0,0x00,0x05,0x00, -0xb8,0x00,0x00,0x00,0xd7,0x02,0x00,0x00,0x3a,0x03,0x00,0x00, -0xb5,0x00,0x00,0x00,0xf6,0x00,0x04,0x00,0xd3,0x02,0x00,0x00, -0xd4,0x02,0x00,0x00,0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, -0xd7,0x02,0x00,0x00,0xd2,0x02,0x00,0x00,0xd3,0x02,0x00,0x00, -0xf8,0x00,0x02,0x00,0xd2,0x02,0x00,0x00,0xf9,0x00,0x02,0x00, -0xd9,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0xd9,0x02,0x00,0x00, -0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0x3b,0x03,0x00,0x00, -0x3e,0x00,0x00,0x00,0xd2,0x02,0x00,0x00,0x35,0x03,0x00,0x00, -0xdc,0x02,0x00,0x00,0xb0,0x00,0x05,0x00,0xb8,0x00,0x00,0x00, -0xdf,0x02,0x00,0x00,0x3b,0x03,0x00,0x00,0x60,0x00,0x00,0x00, -0xf6,0x00,0x04,0x00,0xdb,0x02,0x00,0x00,0xdc,0x02,0x00,0x00, -0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0xdf,0x02,0x00,0x00, -0xda,0x02,0x00,0x00,0xdb,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, -0xda,0x02,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xe3,0x02,0x00,0x00,0x3b,0x03,0x00,0x00,0x61,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xe4,0x02,0x00,0x00, -0xb9,0x02,0x00,0x00,0xe3,0x02,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xe6,0x02,0x00,0x00,0x64,0x00,0x00,0x00, -0x62,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xe7,0x02,0x00,0x00,0xe4,0x02,0x00,0x00,0xe6,0x02,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xeb,0x02,0x00,0x00, -0x3a,0x03,0x00,0x00,0x57,0x02,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xec,0x02,0x00,0x00,0xbf,0x02,0x00,0x00, -0xeb,0x02,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xee,0x02,0x00,0x00,0x68,0x00,0x00,0x00,0xb2,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xef,0x02,0x00,0x00, -0xec,0x02,0x00,0x00,0xee,0x02,0x00,0x00,0xf9,0x00,0x02,0x00, -0xf1,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0xf1,0x02,0x00,0x00, -0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0x3d,0x03,0x00,0x00, -0x3e,0x00,0x00,0x00,0xda,0x02,0x00,0x00,0x33,0x03,0x00,0x00, -0xf4,0x02,0x00,0x00,0xb0,0x00,0x05,0x00,0xb8,0x00,0x00,0x00, -0xf7,0x02,0x00,0x00,0x3d,0x03,0x00,0x00,0xb2,0x00,0x00,0x00, -0xf6,0x00,0x04,0x00,0xf3,0x02,0x00,0x00,0xf4,0x02,0x00,0x00, -0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0xf7,0x02,0x00,0x00, -0xf2,0x02,0x00,0x00,0xf3,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, -0xf2,0x02,0x00,0x00,0xf9,0x00,0x02,0x00,0xf9,0x02,0x00,0x00, -0xf8,0x00,0x02,0x00,0xf9,0x02,0x00,0x00,0xf5,0x00,0x07,0x00, -0x06,0x00,0x00,0x00,0x3f,0x03,0x00,0x00,0x3e,0x00,0x00,0x00, -0xf2,0x02,0x00,0x00,0x31,0x03,0x00,0x00,0xfc,0x02,0x00,0x00, -0xb0,0x00,0x05,0x00,0xb8,0x00,0x00,0x00,0xff,0x02,0x00,0x00, -0x3f,0x03,0x00,0x00,0x62,0x00,0x00,0x00,0xf6,0x00,0x04,0x00, -0xfb,0x02,0x00,0x00,0xfc,0x02,0x00,0x00,0x01,0x00,0x00,0x00, -0xfa,0x00,0x04,0x00,0xff,0x02,0x00,0x00,0xfa,0x02,0x00,0x00, -0xfb,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0xfa,0x02,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x02,0x03,0x00,0x00, -0xe7,0x02,0x00,0x00,0x3f,0x03,0x00,0x00,0xb0,0x00,0x05,0x00, -0xb8,0x00,0x00,0x00,0x05,0x03,0x00,0x00,0x02,0x03,0x00,0x00, -0x36,0x00,0x00,0x00,0xf7,0x00,0x03,0x00,0x07,0x03,0x00,0x00, -0x00,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0x05,0x03,0x00,0x00, -0x06,0x03,0x00,0x00,0x07,0x03,0x00,0x00,0xf8,0x00,0x02,0x00, -0x06,0x03,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x0a,0x03,0x00,0x00,0xef,0x02,0x00,0x00,0x3d,0x03,0x00,0x00, -0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00,0x0b,0x03,0x00,0x00, -0x12,0x00,0x00,0x00,0xc6,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x0c,0x03,0x00,0x00,0x0b,0x03,0x00,0x00, -0xb0,0x00,0x05,0x00,0xb8,0x00,0x00,0x00,0x0d,0x03,0x00,0x00, -0x0a,0x03,0x00,0x00,0x0c,0x03,0x00,0x00,0xf9,0x00,0x02,0x00, -0x07,0x03,0x00,0x00,0xf8,0x00,0x02,0x00,0x07,0x03,0x00,0x00, -0xf5,0x00,0x07,0x00,0xb8,0x00,0x00,0x00,0x0e,0x03,0x00,0x00, -0x05,0x03,0x00,0x00,0xfa,0x02,0x00,0x00,0x0d,0x03,0x00,0x00, -0x06,0x03,0x00,0x00,0xf7,0x00,0x03,0x00,0x10,0x03,0x00,0x00, -0x00,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0x0e,0x03,0x00,0x00, -0x0f,0x03,0x00,0x00,0x10,0x03,0x00,0x00,0xf8,0x00,0x02,0x00, -0x0f,0x03,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x18,0x03,0x00,0x00,0xef,0x02,0x00,0x00,0x3d,0x03,0x00,0x00, -0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00,0x1a,0x03,0x00,0x00, -0x12,0x00,0x00,0x00,0x19,0x03,0x00,0x00,0x3d,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x1b,0x03,0x00,0x00,0x1a,0x03,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x1c,0x03,0x00,0x00, -0x18,0x03,0x00,0x00,0x1b,0x03,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x1d,0x03,0x00,0x00,0xcf,0x02,0x00,0x00, -0x1c,0x03,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x1f,0x03,0x00,0x00,0x1d,0x03,0x00,0x00,0xe7,0x02,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x21,0x03,0x00,0x00, -0x1f,0x03,0x00,0x00,0x3f,0x03,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x23,0x03,0x00,0x00,0x3a,0x03,0x00,0x00, -0xb2,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x25,0x03,0x00,0x00,0x23,0x03,0x00,0x00,0x3d,0x03,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x27,0x03,0x00,0x00, -0x25,0x03,0x00,0x00,0x26,0x03,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x29,0x03,0x00,0x00,0x3b,0x03,0x00,0x00, -0x62,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x2a,0x03,0x00,0x00,0x27,0x03,0x00,0x00,0x29,0x03,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x2c,0x03,0x00,0x00, -0x2a,0x03,0x00,0x00,0x3f,0x03,0x00,0x00,0x41,0x00,0x05,0x00, -0xc3,0x00,0x00,0x00,0x2d,0x03,0x00,0x00,0xc0,0x00,0x00,0x00, -0x2c,0x03,0x00,0x00,0x3d,0x00,0x04,0x00,0xba,0x00,0x00,0x00, -0x2e,0x03,0x00,0x00,0x2d,0x03,0x00,0x00,0x41,0x00,0x06,0x00, -0xfc,0x00,0x00,0x00,0x2f,0x03,0x00,0x00,0x14,0x03,0x00,0x00, -0x34,0x00,0x00,0x00,0x21,0x03,0x00,0x00,0x3e,0x00,0x03,0x00, -0x2f,0x03,0x00,0x00,0x2e,0x03,0x00,0x00,0xf9,0x00,0x02,0x00, -0x10,0x03,0x00,0x00,0xf8,0x00,0x02,0x00,0x10,0x03,0x00,0x00, -0xf9,0x00,0x02,0x00,0xfc,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, -0xfc,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x31,0x03,0x00,0x00,0x3f,0x03,0x00,0x00,0xc6,0x00,0x00,0x00, -0xf9,0x00,0x02,0x00,0xf9,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, -0xfb,0x02,0x00,0x00,0xf9,0x00,0x02,0x00,0xf4,0x02,0x00,0x00, -0xf8,0x00,0x02,0x00,0xf4,0x02,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x33,0x03,0x00,0x00,0x3d,0x03,0x00,0x00, -0xc6,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0xf1,0x02,0x00,0x00, -0xf8,0x00,0x02,0x00,0xf3,0x02,0x00,0x00,0xf9,0x00,0x02,0x00, -0xdc,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0xdc,0x02,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x35,0x03,0x00,0x00, -0x3b,0x03,0x00,0x00,0xc6,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, -0xd9,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0xdb,0x02,0x00,0x00, -0xf9,0x00,0x02,0x00,0xd4,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, -0xd4,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x37,0x03,0x00,0x00,0x3a,0x03,0x00,0x00,0xc6,0x00,0x00,0x00, -0xf9,0x00,0x02,0x00,0xd1,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, -0xd3,0x02,0x00,0x00,0xfd,0x00,0x01,0x00,0x38,0x00,0x01,0x00, - -}; -const uint64_t matmul_f32_aligned_s_len = 12168; - -unsigned char matmul_f32_aligned_s_fp32_data[] = { -0x03,0x02,0x23,0x07,0x00,0x05,0x01,0x00,0x0b,0x00,0x0d,0x00, -0xe9,0x02,0x00,0x00,0x00,0x00,0x00,0x00,0x11,0x00,0x02,0x00, -0x01,0x00,0x00,0x00,0x0b,0x00,0x06,0x00,0x01,0x00,0x00,0x00, -0x47,0x4c,0x53,0x4c,0x2e,0x73,0x74,0x64,0x2e,0x34,0x35,0x30, -0x00,0x00,0x00,0x00,0x0e,0x00,0x03,0x00,0x00,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x0f,0x00,0x0f,0x00,0x05,0x00,0x00,0x00, -0x04,0x00,0x00,0x00,0x6d,0x61,0x69,0x6e,0x00,0x00,0x00,0x00, -0x0b,0x00,0x00,0x00,0x12,0x00,0x00,0x00,0x3d,0x00,0x00,0x00, -0x4c,0x00,0x00,0x00,0xea,0x00,0x00,0x00,0xf8,0x00,0x00,0x00, -0x46,0x01,0x00,0x00,0x53,0x01,0x00,0x00,0x4a,0x02,0x00,0x00, -0x93,0x02,0x00,0x00,0x10,0x00,0x06,0x00,0x04,0x00,0x00,0x00, -0x11,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x0b,0x00,0x00,0x00, -0x0b,0x00,0x00,0x00,0x1c,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0x10,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x04,0x00,0x00,0x00, -0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00,0x02,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x08,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0x10,0x00,0x00,0x00,0x03,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00, -0x04,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x10,0x00,0x00,0x00, -0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00,0x05,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x14,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0x10,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x18,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00, -0x07,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x1c,0x00,0x00,0x00, -0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00,0x08,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x20,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0x10,0x00,0x00,0x00,0x09,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x24,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00, -0x0a,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x28,0x00,0x00,0x00, -0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00,0x0b,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x2c,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0x10,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x30,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00, -0x0d,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x34,0x00,0x00,0x00, -0x47,0x00,0x03,0x00,0x10,0x00,0x00,0x00,0x02,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x37,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x3d,0x00,0x00,0x00, -0x0b,0x00,0x00,0x00,0x1a,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x4c,0x00,0x00,0x00,0x0b,0x00,0x00,0x00,0x1b,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x4f,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x09,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x53,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x60,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x62,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x07,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x6c,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x03,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x9d,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x02,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0xaf,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x05,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0xb2,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x08,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0xf5,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x10,0x00,0x00,0x00, -0x48,0x00,0x04,0x00,0xf6,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x18,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0xf6,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x47,0x00,0x03,0x00,0xf6,0x00,0x00,0x00,0x02,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0xf8,0x00,0x00,0x00,0x22,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0xf8,0x00,0x00,0x00, -0x21,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x27,0x01,0x00,0x00,0x01,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x28,0x01,0x00,0x00,0x0b,0x00,0x00,0x00, -0x19,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x50,0x01,0x00,0x00, -0x06,0x00,0x00,0x00,0x10,0x00,0x00,0x00,0x48,0x00,0x04,0x00, -0x51,0x01,0x00,0x00,0x00,0x00,0x00,0x00,0x18,0x00,0x00,0x00, -0x48,0x00,0x05,0x00,0x51,0x01,0x00,0x00,0x00,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x03,0x00, -0x51,0x01,0x00,0x00,0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x53,0x01,0x00,0x00,0x22,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x53,0x01,0x00,0x00,0x21,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x4a,0x02,0x00,0x00, -0x0b,0x00,0x00,0x00,0x18,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x90,0x02,0x00,0x00,0x06,0x00,0x00,0x00,0x04,0x00,0x00,0x00, -0x48,0x00,0x04,0x00,0x91,0x02,0x00,0x00,0x00,0x00,0x00,0x00, -0x19,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x91,0x02,0x00,0x00, -0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x47,0x00,0x03,0x00,0x91,0x02,0x00,0x00,0x02,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x93,0x02,0x00,0x00,0x22,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x93,0x02,0x00,0x00, -0x21,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x13,0x00,0x02,0x00, -0x02,0x00,0x00,0x00,0x21,0x00,0x03,0x00,0x03,0x00,0x00,0x00, -0x02,0x00,0x00,0x00,0x15,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x20,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x17,0x00,0x04,0x00, -0x09,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x03,0x00,0x00,0x00, -0x20,0x00,0x04,0x00,0x0a,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x09,0x00,0x00,0x00,0x3b,0x00,0x04,0x00,0x0a,0x00,0x00,0x00, -0x0b,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x02,0x00,0x00,0x00, -0x20,0x00,0x04,0x00,0x0d,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x1e,0x00,0x10,0x00,0x10,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0x11,0x00,0x00,0x00,0x09,0x00,0x00,0x00,0x10,0x00,0x00,0x00, -0x3b,0x00,0x04,0x00,0x11,0x00,0x00,0x00,0x12,0x00,0x00,0x00, -0x09,0x00,0x00,0x00,0x15,0x00,0x04,0x00,0x13,0x00,0x00,0x00, -0x20,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x13,0x00,0x00,0x00,0x14,0x00,0x00,0x00,0x08,0x00,0x00,0x00, -0x20,0x00,0x04,0x00,0x15,0x00,0x00,0x00,0x09,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00, -0x21,0x00,0x00,0x00,0x0a,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x13,0x00,0x00,0x00,0x27,0x00,0x00,0x00,0x09,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00,0x2d,0x00,0x00,0x00, -0x07,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00, -0x34,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x32,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x37,0x00,0x00,0x00,0x40,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x39,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x3b,0x00,0x04,0x00,0x0a,0x00,0x00,0x00, -0x3d,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x3e,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x3b,0x00,0x04,0x00,0x0a,0x00,0x00,0x00,0x4c,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x32,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x4f,0x00,0x00,0x00,0x20,0x00,0x00,0x00,0x32,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x53,0x00,0x00,0x00,0x20,0x00,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x54,0x00,0x00,0x00, -0x86,0x00,0x00,0x00,0x37,0x00,0x00,0x00,0x53,0x00,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x58,0x00,0x00,0x00, -0x86,0x00,0x00,0x00,0x37,0x00,0x00,0x00,0x53,0x00,0x00,0x00, -0x32,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x60,0x00,0x00,0x00, -0x02,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, -0x61,0x00,0x00,0x00,0x86,0x00,0x00,0x00,0x53,0x00,0x00,0x00, -0x60,0x00,0x00,0x00,0x32,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x62,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0x63,0x00,0x00,0x00,0x86,0x00,0x00,0x00, -0x61,0x00,0x00,0x00,0x62,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0x67,0x00,0x00,0x00,0x86,0x00,0x00,0x00, -0x61,0x00,0x00,0x00,0x62,0x00,0x00,0x00,0x32,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x6c,0x00,0x00,0x00,0x10,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x6d,0x00,0x00,0x00, -0x04,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, -0x6e,0x00,0x00,0x00,0x86,0x00,0x00,0x00,0x6c,0x00,0x00,0x00, -0x6d,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, -0x73,0x00,0x00,0x00,0x86,0x00,0x00,0x00,0x6c,0x00,0x00,0x00, -0x6d,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00, -0x77,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x13,0x00,0x00,0x00,0x7c,0x00,0x00,0x00,0x02,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00,0x87,0x00,0x00,0x00, -0x0b,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00, -0x8d,0x00,0x00,0x00,0x03,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x13,0x00,0x00,0x00,0x98,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0x32,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x9d,0x00,0x00,0x00, -0x40,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00, -0x9f,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0xae,0x00,0x00,0x00,0x84,0x00,0x00,0x00, -0x60,0x00,0x00,0x00,0x62,0x00,0x00,0x00,0x32,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0xaf,0x00,0x00,0x00,0x20,0x00,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0xb0,0x00,0x00,0x00, -0x84,0x00,0x00,0x00,0x53,0x00,0x00,0x00,0xaf,0x00,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0xb1,0x00,0x00,0x00, -0x84,0x00,0x00,0x00,0x4f,0x00,0x00,0x00,0x62,0x00,0x00,0x00, -0x32,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0xb2,0x00,0x00,0x00, -0x02,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, -0xb3,0x00,0x00,0x00,0x84,0x00,0x00,0x00,0xb1,0x00,0x00,0x00, -0xb2,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, -0xb4,0x00,0x00,0x00,0x84,0x00,0x00,0x00,0xb3,0x00,0x00,0x00, -0x60,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, -0xb5,0x00,0x00,0x00,0x86,0x00,0x00,0x00,0xb0,0x00,0x00,0x00, -0xb4,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, -0xb6,0x00,0x00,0x00,0x84,0x00,0x00,0x00,0xae,0x00,0x00,0x00, -0xb5,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, -0xb7,0x00,0x00,0x00,0x84,0x00,0x00,0x00,0xb6,0x00,0x00,0x00, -0xb2,0x00,0x00,0x00,0x14,0x00,0x02,0x00,0xb8,0x00,0x00,0x00, -0x16,0x00,0x03,0x00,0xba,0x00,0x00,0x00,0x20,0x00,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0xbb,0x00,0x00,0x00, -0x84,0x00,0x00,0x00,0x60,0x00,0x00,0x00,0x62,0x00,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0xbc,0x00,0x00,0x00, -0x84,0x00,0x00,0x00,0xbb,0x00,0x00,0x00,0xb5,0x00,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0xbd,0x00,0x00,0x00, -0x84,0x00,0x00,0x00,0xbc,0x00,0x00,0x00,0xb2,0x00,0x00,0x00, -0x1c,0x00,0x04,0x00,0xbe,0x00,0x00,0x00,0xba,0x00,0x00,0x00, -0xbd,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0xbf,0x00,0x00,0x00, -0x07,0x00,0x00,0x00,0xbe,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0xba,0x00,0x00,0x00,0xc2,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x20,0x00,0x04,0x00,0xc3,0x00,0x00,0x00,0x07,0x00,0x00,0x00, -0xba,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00, -0xc6,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0xe6,0x00,0x00,0x00,0x80,0x00,0x00,0x00, +0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x1d,0x00,0x03,0x00, +0x61,0x01,0x00,0x00,0xc3,0x00,0x00,0x00,0x1e,0x00,0x03,0x00, +0x62,0x01,0x00,0x00,0x61,0x01,0x00,0x00,0x20,0x00,0x04,0x00, +0x63,0x01,0x00,0x00,0x0c,0x00,0x00,0x00,0x62,0x01,0x00,0x00, +0x3b,0x00,0x04,0x00,0x63,0x01,0x00,0x00,0x64,0x01,0x00,0x00, +0x0c,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x6f,0x01,0x00,0x00, +0x0c,0x00,0x00,0x00,0xc3,0x00,0x00,0x00,0x34,0x00,0x06,0x00, +0x06,0x00,0x00,0x00,0x77,0x01,0x00,0x00,0x80,0x00,0x00,0x00, 0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0xe7,0x00,0x00,0x00,0x84,0x00,0x00,0x00, -0x37,0x00,0x00,0x00,0xe6,0x00,0x00,0x00,0x1c,0x00,0x04,0x00, -0xe8,0x00,0x00,0x00,0xba,0x00,0x00,0x00,0xe7,0x00,0x00,0x00, -0x20,0x00,0x04,0x00,0xe9,0x00,0x00,0x00,0x04,0x00,0x00,0x00, -0xe8,0x00,0x00,0x00,0x3b,0x00,0x04,0x00,0xe9,0x00,0x00,0x00, -0xea,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0xee,0x00,0x00,0x00,0x80,0x00,0x00,0x00, -0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x17,0x00,0x04,0x00, -0xf4,0x00,0x00,0x00,0xba,0x00,0x00,0x00,0x04,0x00,0x00,0x00, -0x1d,0x00,0x03,0x00,0xf5,0x00,0x00,0x00,0xf4,0x00,0x00,0x00, -0x1e,0x00,0x03,0x00,0xf6,0x00,0x00,0x00,0xf5,0x00,0x00,0x00, -0x20,0x00,0x04,0x00,0xf7,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0xf6,0x00,0x00,0x00,0x3b,0x00,0x04,0x00,0xf7,0x00,0x00,0x00, -0xf8,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0xfa,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0xba,0x00,0x00,0x00, -0x20,0x00,0x04,0x00,0xfd,0x00,0x00,0x00,0x04,0x00,0x00,0x00, -0xba,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, -0x02,0x01,0x00,0x00,0x80,0x00,0x00,0x00,0x6c,0x00,0x00,0x00, -0x39,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, -0x0f,0x01,0x00,0x00,0x80,0x00,0x00,0x00,0x6c,0x00,0x00,0x00, -0x39,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, -0x1c,0x01,0x00,0x00,0x80,0x00,0x00,0x00,0x6c,0x00,0x00,0x00, -0x39,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x21,0x01,0x00,0x00,0x03,0x00,0x00,0x00,0x32,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x27,0x01,0x00,0x00,0x01,0x00,0x00,0x00, -0x33,0x00,0x06,0x00,0x09,0x00,0x00,0x00,0x28,0x01,0x00,0x00, -0x27,0x01,0x00,0x00,0x39,0x00,0x00,0x00,0x39,0x00,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x29,0x01,0x00,0x00, -0x51,0x00,0x00,0x00,0x28,0x01,0x00,0x00,0x00,0x00,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x2a,0x01,0x00,0x00, -0x84,0x00,0x00,0x00,0x29,0x01,0x00,0x00,0x6d,0x00,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x2b,0x01,0x00,0x00, -0x86,0x00,0x00,0x00,0x2a,0x01,0x00,0x00,0x6c,0x00,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x42,0x01,0x00,0x00, -0x80,0x00,0x00,0x00,0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x43,0x01,0x00,0x00, -0x84,0x00,0x00,0x00,0x9d,0x00,0x00,0x00,0x42,0x01,0x00,0x00, -0x1c,0x00,0x04,0x00,0x44,0x01,0x00,0x00,0xba,0x00,0x00,0x00, -0x43,0x01,0x00,0x00,0x20,0x00,0x04,0x00,0x45,0x01,0x00,0x00, -0x04,0x00,0x00,0x00,0x44,0x01,0x00,0x00,0x3b,0x00,0x04,0x00, -0x45,0x01,0x00,0x00,0x46,0x01,0x00,0x00,0x04,0x00,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x4a,0x01,0x00,0x00, -0x80,0x00,0x00,0x00,0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00, -0x1d,0x00,0x03,0x00,0x50,0x01,0x00,0x00,0xf4,0x00,0x00,0x00, -0x1e,0x00,0x03,0x00,0x51,0x01,0x00,0x00,0x50,0x01,0x00,0x00, -0x20,0x00,0x04,0x00,0x52,0x01,0x00,0x00,0x0c,0x00,0x00,0x00, -0x51,0x01,0x00,0x00,0x3b,0x00,0x04,0x00,0x52,0x01,0x00,0x00, -0x53,0x01,0x00,0x00,0x0c,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0x5b,0x01,0x00,0x00,0x80,0x00,0x00,0x00, -0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0x68,0x01,0x00,0x00,0x80,0x00,0x00,0x00, -0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0x75,0x01,0x00,0x00,0x80,0x00,0x00,0x00, -0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x7c,0x01,0x00,0x00,0x51,0x00,0x00,0x00, +0x34,0x01,0x00,0x00,0x00,0x00,0x00,0x00,0x34,0x00,0x06,0x00, +0x06,0x00,0x00,0x00,0x7d,0x01,0x00,0x00,0x84,0x00,0x00,0x00, +0x7c,0x01,0x00,0x00,0x39,0x00,0x00,0x00,0x34,0x00,0x06,0x00, +0x06,0x00,0x00,0x00,0x7e,0x01,0x00,0x00,0x86,0x00,0x00,0x00, +0x7d,0x01,0x00,0x00,0x6c,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, 0x06,0x00,0x00,0x00,0x81,0x01,0x00,0x00,0x08,0x01,0x00,0x00, 0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x82,0x01,0x00,0x00, -0x86,0x00,0x00,0x00,0x6c,0x00,0x00,0x00,0x6d,0x00,0x00,0x00, +0x86,0x00,0x00,0x00,0x6c,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, 0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x85,0x01,0x00,0x00, -0x86,0x00,0x00,0x00,0x6c,0x00,0x00,0x00,0x6d,0x00,0x00,0x00, +0x86,0x00,0x00,0x00,0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00, 0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0xa0,0x01,0x00,0x00, 0x84,0x00,0x00,0x00,0x60,0x00,0x00,0x00,0x62,0x00,0x00,0x00, -0x1c,0x00,0x04,0x00,0xa1,0x01,0x00,0x00,0xba,0x00,0x00,0x00, +0x1c,0x00,0x04,0x00,0xa1,0x01,0x00,0x00,0xc3,0x00,0x00,0x00, 0xa0,0x01,0x00,0x00,0x20,0x00,0x04,0x00,0xa2,0x01,0x00,0x00, 0x07,0x00,0x00,0x00,0xa1,0x01,0x00,0x00,0x34,0x00,0x06,0x00, 0x06,0x00,0x00,0x00,0xb2,0x01,0x00,0x00,0x80,0x00,0x00,0x00, 0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x34,0x00,0x06,0x00, 0x06,0x00,0x00,0x00,0xcd,0x01,0x00,0x00,0x84,0x00,0x00,0x00, -0xb5,0x00,0x00,0x00,0xb2,0x00,0x00,0x00,0x1c,0x00,0x04,0x00, -0xce,0x01,0x00,0x00,0xba,0x00,0x00,0x00,0xcd,0x01,0x00,0x00, +0xbe,0x00,0x00,0x00,0xbb,0x00,0x00,0x00,0x1c,0x00,0x04,0x00, +0xce,0x01,0x00,0x00,0xc3,0x00,0x00,0x00,0xcd,0x01,0x00,0x00, 0x20,0x00,0x04,0x00,0xcf,0x01,0x00,0x00,0x07,0x00,0x00,0x00, 0xce,0x01,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, -0xd8,0x01,0x00,0x00,0x86,0x00,0x00,0x00,0xaf,0x00,0x00,0x00, -0xb5,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, +0xd8,0x01,0x00,0x00,0x86,0x00,0x00,0x00,0xb8,0x00,0x00,0x00, +0xbe,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, 0xe0,0x01,0x00,0x00,0x80,0x00,0x00,0x00,0x6c,0x00,0x00,0x00, 0x39,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, 0x0f,0x02,0x00,0x00,0x84,0x00,0x00,0x00,0x60,0x00,0x00,0x00, 0x62,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00, 0x42,0x02,0x00,0x00,0x0d,0x00,0x00,0x00,0x3b,0x00,0x04,0x00, 0x0a,0x00,0x00,0x00,0x4a,0x02,0x00,0x00,0x01,0x00,0x00,0x00, -0x1d,0x00,0x03,0x00,0x90,0x02,0x00,0x00,0xba,0x00,0x00,0x00, +0x1d,0x00,0x03,0x00,0x90,0x02,0x00,0x00,0xc3,0x00,0x00,0x00, 0x1e,0x00,0x03,0x00,0x91,0x02,0x00,0x00,0x90,0x02,0x00,0x00, 0x20,0x00,0x04,0x00,0x92,0x02,0x00,0x00,0x0c,0x00,0x00,0x00, 0x91,0x02,0x00,0x00,0x3b,0x00,0x04,0x00,0x92,0x02,0x00,0x00, @@ -37224,8 +37485,8 @@ unsigned char matmul_f32_aligned_s_fp32_data[] = { 0x84,0x00,0x00,0x00,0x60,0x00,0x00,0x00,0x62,0x00,0x00,0x00, 0x36,0x00,0x05,0x00,0x02,0x00,0x00,0x00,0x04,0x00,0x00,0x00, 0x00,0x00,0x00,0x00,0x03,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, -0x05,0x00,0x00,0x00,0x3b,0x00,0x04,0x00,0xbf,0x00,0x00,0x00, -0xc0,0x00,0x00,0x00,0x07,0x00,0x00,0x00,0x3b,0x00,0x04,0x00, +0x05,0x00,0x00,0x00,0x3b,0x00,0x04,0x00,0xc8,0x00,0x00,0x00, +0xc9,0x00,0x00,0x00,0x07,0x00,0x00,0x00,0x3b,0x00,0x04,0x00, 0xa2,0x01,0x00,0x00,0xa3,0x01,0x00,0x00,0x07,0x00,0x00,0x00, 0x3b,0x00,0x04,0x00,0xcf,0x01,0x00,0x00,0xd0,0x01,0x00,0x00, 0x07,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x0d,0x00,0x00,0x00, @@ -37284,5951 +37545,1135 @@ unsigned char matmul_f32_aligned_s_fp32_data[] = { 0x64,0x00,0x00,0x00,0x5d,0x00,0x00,0x00,0x63,0x00,0x00,0x00, 0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x68,0x00,0x00,0x00, 0x5d,0x00,0x00,0x00,0x67,0x00,0x00,0x00,0x89,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x6f,0x00,0x00,0x00,0x4e,0x00,0x00,0x00, -0x6e,0x00,0x00,0x00,0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x74,0x00,0x00,0x00,0x4e,0x00,0x00,0x00,0x73,0x00,0x00,0x00, -0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00,0x78,0x00,0x00,0x00, -0x12,0x00,0x00,0x00,0x77,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x79,0x00,0x00,0x00,0x78,0x00,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x7a,0x00,0x00,0x00, -0x47,0x00,0x00,0x00,0x79,0x00,0x00,0x00,0x41,0x00,0x05,0x00, -0x15,0x00,0x00,0x00,0x7d,0x00,0x00,0x00,0x12,0x00,0x00,0x00, -0x7c,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x7e,0x00,0x00,0x00,0x7d,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x80,0x00,0x00,0x00,0x47,0x00,0x00,0x00, -0x39,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x83,0x00,0x00,0x00,0x80,0x00,0x00,0x00,0x79,0x00,0x00,0x00, -0x0c,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0x84,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x26,0x00,0x00,0x00,0x7e,0x00,0x00,0x00, -0x83,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00, -0x88,0x00,0x00,0x00,0x12,0x00,0x00,0x00,0x87,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x89,0x00,0x00,0x00, -0x88,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x8a,0x00,0x00,0x00,0x32,0x00,0x00,0x00,0x89,0x00,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x8c,0x00,0x00,0x00, -0x42,0x00,0x00,0x00,0x37,0x00,0x00,0x00,0x41,0x00,0x05,0x00, -0x15,0x00,0x00,0x00,0x8e,0x00,0x00,0x00,0x12,0x00,0x00,0x00, -0x8d,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x8f,0x00,0x00,0x00,0x8e,0x00,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x90,0x00,0x00,0x00,0x8c,0x00,0x00,0x00, -0x8f,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x91,0x00,0x00,0x00,0x8a,0x00,0x00,0x00,0x90,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x93,0x00,0x00,0x00, -0x91,0x00,0x00,0x00,0x7a,0x00,0x00,0x00,0x86,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x94,0x00,0x00,0x00,0x93,0x00,0x00,0x00, -0x6d,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00, -0x99,0x00,0x00,0x00,0x12,0x00,0x00,0x00,0x98,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x9a,0x00,0x00,0x00, -0x99,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x9b,0x00,0x00,0x00,0x0f,0x00,0x00,0x00,0x9a,0x00,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x9e,0x00,0x00,0x00, -0x4a,0x00,0x00,0x00,0x9d,0x00,0x00,0x00,0x41,0x00,0x05,0x00, -0x15,0x00,0x00,0x00,0xa0,0x00,0x00,0x00,0x12,0x00,0x00,0x00, -0x9f,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0xa1,0x00,0x00,0x00,0xa0,0x00,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xa2,0x00,0x00,0x00,0x9e,0x00,0x00,0x00, -0xa1,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xa3,0x00,0x00,0x00,0x9b,0x00,0x00,0x00,0xa2,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xa5,0x00,0x00,0x00, -0xa3,0x00,0x00,0x00,0x7a,0x00,0x00,0x00,0x86,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xa6,0x00,0x00,0x00,0xa5,0x00,0x00,0x00, -0x6d,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0xa8,0x00,0x00,0x00, -0xf8,0x00,0x02,0x00,0xa8,0x00,0x00,0x00,0xf5,0x00,0x07,0x00, -0x06,0x00,0x00,0x00,0xb7,0x02,0x00,0x00,0x3e,0x00,0x00,0x00, -0x05,0x00,0x00,0x00,0xc7,0x00,0x00,0x00,0xa9,0x00,0x00,0x00, -0xb0,0x00,0x05,0x00,0xb8,0x00,0x00,0x00,0xb9,0x00,0x00,0x00, -0xb7,0x02,0x00,0x00,0xb7,0x00,0x00,0x00,0xf6,0x00,0x04,0x00, -0xaa,0x00,0x00,0x00,0xa9,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0xfa,0x00,0x04,0x00,0xb9,0x00,0x00,0x00,0xa9,0x00,0x00,0x00, -0xaa,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0xa9,0x00,0x00,0x00, -0x41,0x00,0x05,0x00,0xc3,0x00,0x00,0x00,0xc4,0x00,0x00,0x00, -0xc0,0x00,0x00,0x00,0xb7,0x02,0x00,0x00,0x3e,0x00,0x03,0x00, -0xc4,0x00,0x00,0x00,0xc2,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xc7,0x00,0x00,0x00,0xb7,0x02,0x00,0x00, -0xc6,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0xa8,0x00,0x00,0x00, -0xf8,0x00,0x02,0x00,0xaa,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, -0xca,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0xca,0x00,0x00,0x00, -0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0xd0,0x02,0x00,0x00, -0xa6,0x00,0x00,0x00,0xaa,0x00,0x00,0x00,0x87,0x01,0x00,0x00, -0xcd,0x00,0x00,0x00,0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00, -0xcc,0x02,0x00,0x00,0x94,0x00,0x00,0x00,0xaa,0x00,0x00,0x00, -0x84,0x01,0x00,0x00,0xcd,0x00,0x00,0x00,0xf5,0x00,0x07,0x00, -0x06,0x00,0x00,0x00,0xb8,0x02,0x00,0x00,0x7a,0x00,0x00,0x00, -0xaa,0x00,0x00,0x00,0x32,0x02,0x00,0x00,0xcd,0x00,0x00,0x00, -0xb0,0x00,0x05,0x00,0xb8,0x00,0x00,0x00,0xd1,0x00,0x00,0x00, -0xb8,0x02,0x00,0x00,0x84,0x00,0x00,0x00,0xf6,0x00,0x04,0x00, -0xcc,0x00,0x00,0x00,0xcd,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0xfa,0x00,0x04,0x00,0xd1,0x00,0x00,0x00,0xcb,0x00,0x00,0x00, -0xcc,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0xcb,0x00,0x00,0x00, +0x06,0x00,0x00,0x00,0x6e,0x00,0x00,0x00,0x4e,0x00,0x00,0x00, +0x6d,0x00,0x00,0x00,0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x73,0x00,0x00,0x00,0x4e,0x00,0x00,0x00,0x72,0x00,0x00,0x00, +0x89,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x78,0x00,0x00,0x00, +0x4e,0x00,0x00,0x00,0x77,0x00,0x00,0x00,0x86,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x7d,0x00,0x00,0x00,0x4e,0x00,0x00,0x00, +0x7c,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00, +0x81,0x00,0x00,0x00,0x12,0x00,0x00,0x00,0x80,0x00,0x00,0x00, +0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x82,0x00,0x00,0x00, +0x81,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x83,0x00,0x00,0x00,0x47,0x00,0x00,0x00,0x82,0x00,0x00,0x00, +0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00,0x86,0x00,0x00,0x00, +0x12,0x00,0x00,0x00,0x85,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x87,0x00,0x00,0x00,0x86,0x00,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x89,0x00,0x00,0x00, +0x47,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x8c,0x00,0x00,0x00,0x89,0x00,0x00,0x00, +0x82,0x00,0x00,0x00,0x0c,0x00,0x07,0x00,0x06,0x00,0x00,0x00, +0x8d,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x26,0x00,0x00,0x00, +0x87,0x00,0x00,0x00,0x8c,0x00,0x00,0x00,0x41,0x00,0x05,0x00, +0x15,0x00,0x00,0x00,0x91,0x00,0x00,0x00,0x12,0x00,0x00,0x00, +0x90,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x92,0x00,0x00,0x00,0x91,0x00,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x93,0x00,0x00,0x00,0x32,0x00,0x00,0x00, +0x92,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x95,0x00,0x00,0x00,0x42,0x00,0x00,0x00,0x37,0x00,0x00,0x00, +0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00,0x97,0x00,0x00,0x00, +0x12,0x00,0x00,0x00,0x96,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x98,0x00,0x00,0x00,0x97,0x00,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x99,0x00,0x00,0x00, +0x95,0x00,0x00,0x00,0x98,0x00,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x9a,0x00,0x00,0x00,0x93,0x00,0x00,0x00, +0x99,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x9c,0x00,0x00,0x00,0x9a,0x00,0x00,0x00,0x83,0x00,0x00,0x00, +0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x9d,0x00,0x00,0x00, +0x9c,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x41,0x00,0x05,0x00, +0x15,0x00,0x00,0x00,0xa2,0x00,0x00,0x00,0x12,0x00,0x00,0x00, +0xa1,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0xa3,0x00,0x00,0x00,0xa2,0x00,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xa4,0x00,0x00,0x00,0x0f,0x00,0x00,0x00, +0xa3,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xa7,0x00,0x00,0x00,0x4a,0x00,0x00,0x00,0xa6,0x00,0x00,0x00, +0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00,0xa9,0x00,0x00,0x00, +0x12,0x00,0x00,0x00,0xa8,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0xaa,0x00,0x00,0x00,0xa9,0x00,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xab,0x00,0x00,0x00, +0xa7,0x00,0x00,0x00,0xaa,0x00,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xac,0x00,0x00,0x00,0xa4,0x00,0x00,0x00, +0xab,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xae,0x00,0x00,0x00,0xac,0x00,0x00,0x00,0x83,0x00,0x00,0x00, +0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xaf,0x00,0x00,0x00, +0xae,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, +0xb1,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0xb1,0x00,0x00,0x00, +0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0xb9,0x02,0x00,0x00, +0x3e,0x00,0x00,0x00,0x05,0x00,0x00,0x00,0xd0,0x00,0x00,0x00, +0xb2,0x00,0x00,0x00,0xb0,0x00,0x05,0x00,0xc1,0x00,0x00,0x00, +0xc2,0x00,0x00,0x00,0xb9,0x02,0x00,0x00,0xc0,0x00,0x00,0x00, +0xf6,0x00,0x04,0x00,0xb3,0x00,0x00,0x00,0xb2,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0xc2,0x00,0x00,0x00, +0xb2,0x00,0x00,0x00,0xb3,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, +0xb2,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0xcc,0x00,0x00,0x00, +0xcd,0x00,0x00,0x00,0xc9,0x00,0x00,0x00,0xb9,0x02,0x00,0x00, +0x3e,0x00,0x03,0x00,0xcd,0x00,0x00,0x00,0xcb,0x00,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xd0,0x00,0x00,0x00, +0xb9,0x02,0x00,0x00,0xcf,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, +0xb1,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0xb3,0x00,0x00,0x00, 0xf9,0x00,0x02,0x00,0xd3,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, 0xd3,0x00,0x00,0x00,0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00, -0xc8,0x02,0x00,0x00,0x3e,0x00,0x00,0x00,0xcb,0x00,0x00,0x00, -0x2d,0x01,0x00,0x00,0xd4,0x00,0x00,0x00,0xb0,0x00,0x05,0x00, -0xb8,0x00,0x00,0x00,0xd9,0x00,0x00,0x00,0xc8,0x02,0x00,0x00, -0x37,0x00,0x00,0x00,0xf6,0x00,0x04,0x00,0xd5,0x00,0x00,0x00, -0xd4,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, -0xd9,0x00,0x00,0x00,0xd4,0x00,0x00,0x00,0xd5,0x00,0x00,0x00, -0xf8,0x00,0x02,0x00,0xd4,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xde,0x00,0x00,0x00,0x74,0x00,0x00,0x00, -0xc8,0x02,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xe1,0x00,0x00,0x00,0xde,0x00,0x00,0x00,0x8f,0x00,0x00,0x00, -0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xe2,0x00,0x00,0x00, -0xe1,0x00,0x00,0x00,0x6d,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xe3,0x00,0x00,0x00,0xcc,0x02,0x00,0x00, -0xe2,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xe5,0x00,0x00,0x00,0xe3,0x00,0x00,0x00,0x6f,0x00,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xef,0x00,0x00,0x00, -0xde,0x00,0x00,0x00,0xee,0x00,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xf1,0x00,0x00,0x00,0x6f,0x00,0x00,0x00, -0x6d,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xf2,0x00,0x00,0x00,0xef,0x00,0x00,0x00,0xf1,0x00,0x00,0x00, -0x41,0x00,0x07,0x00,0xfa,0x00,0x00,0x00,0xfb,0x00,0x00,0x00, -0xf8,0x00,0x00,0x00,0x34,0x00,0x00,0x00,0xe5,0x00,0x00,0x00, -0x3e,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0xba,0x00,0x00,0x00, -0xfc,0x00,0x00,0x00,0xfb,0x00,0x00,0x00,0x41,0x00,0x05,0x00, -0xfd,0x00,0x00,0x00,0xfe,0x00,0x00,0x00,0xea,0x00,0x00,0x00, -0xf2,0x00,0x00,0x00,0x3e,0x00,0x03,0x00,0xfe,0x00,0x00,0x00, -0xfc,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x03,0x01,0x00,0x00,0xde,0x00,0x00,0x00,0x02,0x01,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x06,0x01,0x00,0x00, -0x03,0x01,0x00,0x00,0xf1,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x07,0x01,0x00,0x00,0x06,0x01,0x00,0x00, -0x39,0x00,0x00,0x00,0x41,0x00,0x07,0x00,0xfa,0x00,0x00,0x00, -0x09,0x01,0x00,0x00,0xf8,0x00,0x00,0x00,0x34,0x00,0x00,0x00, -0xe5,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0xba,0x00,0x00,0x00,0x0a,0x01,0x00,0x00,0x09,0x01,0x00,0x00, -0x41,0x00,0x05,0x00,0xfd,0x00,0x00,0x00,0x0b,0x01,0x00,0x00, -0xea,0x00,0x00,0x00,0x07,0x01,0x00,0x00,0x3e,0x00,0x03,0x00, -0x0b,0x01,0x00,0x00,0x0a,0x01,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x10,0x01,0x00,0x00,0xde,0x00,0x00,0x00, -0x0f,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x13,0x01,0x00,0x00,0x10,0x01,0x00,0x00,0xf1,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x14,0x01,0x00,0x00, -0x13,0x01,0x00,0x00,0x0c,0x00,0x00,0x00,0x41,0x00,0x07,0x00, -0xfa,0x00,0x00,0x00,0x16,0x01,0x00,0x00,0xf8,0x00,0x00,0x00, -0x34,0x00,0x00,0x00,0xe5,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0xba,0x00,0x00,0x00,0x17,0x01,0x00,0x00, -0x16,0x01,0x00,0x00,0x41,0x00,0x05,0x00,0xfd,0x00,0x00,0x00, -0x18,0x01,0x00,0x00,0xea,0x00,0x00,0x00,0x14,0x01,0x00,0x00, -0x3e,0x00,0x03,0x00,0x18,0x01,0x00,0x00,0x17,0x01,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x1d,0x01,0x00,0x00, -0xde,0x00,0x00,0x00,0x1c,0x01,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x20,0x01,0x00,0x00,0x1d,0x01,0x00,0x00, -0xf1,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x22,0x01,0x00,0x00,0x20,0x01,0x00,0x00,0x21,0x01,0x00,0x00, -0x41,0x00,0x07,0x00,0xfa,0x00,0x00,0x00,0x24,0x01,0x00,0x00, -0xf8,0x00,0x00,0x00,0x34,0x00,0x00,0x00,0xe5,0x00,0x00,0x00, -0x21,0x01,0x00,0x00,0x3d,0x00,0x04,0x00,0xba,0x00,0x00,0x00, -0x25,0x01,0x00,0x00,0x24,0x01,0x00,0x00,0x41,0x00,0x05,0x00, -0xfd,0x00,0x00,0x00,0x26,0x01,0x00,0x00,0xea,0x00,0x00,0x00, -0x22,0x01,0x00,0x00,0x3e,0x00,0x03,0x00,0x26,0x01,0x00,0x00, -0x25,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x2d,0x01,0x00,0x00,0xc8,0x02,0x00,0x00,0x2b,0x01,0x00,0x00, -0xf9,0x00,0x02,0x00,0xd3,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, -0xd5,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0x2f,0x01,0x00,0x00, -0xf8,0x00,0x02,0x00,0x2f,0x01,0x00,0x00,0xf5,0x00,0x07,0x00, -0x06,0x00,0x00,0x00,0xc9,0x02,0x00,0x00,0x3e,0x00,0x00,0x00, -0xd5,0x00,0x00,0x00,0x80,0x01,0x00,0x00,0x30,0x01,0x00,0x00, -0xb0,0x00,0x05,0x00,0xb8,0x00,0x00,0x00,0x35,0x01,0x00,0x00, -0xc9,0x02,0x00,0x00,0x9d,0x00,0x00,0x00,0xf6,0x00,0x04,0x00, -0x31,0x01,0x00,0x00,0x30,0x01,0x00,0x00,0x01,0x00,0x00,0x00, -0xfa,0x00,0x04,0x00,0x35,0x01,0x00,0x00,0x30,0x01,0x00,0x00, -0x31,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0x30,0x01,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x3a,0x01,0x00,0x00, -0x74,0x00,0x00,0x00,0xc9,0x02,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x3d,0x01,0x00,0x00,0x3a,0x01,0x00,0x00, -0xa1,0x00,0x00,0x00,0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x3e,0x01,0x00,0x00,0x3d,0x01,0x00,0x00,0x6d,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x3f,0x01,0x00,0x00, -0xd0,0x02,0x00,0x00,0x3e,0x01,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x41,0x01,0x00,0x00,0x3f,0x01,0x00,0x00, -0x6f,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x4b,0x01,0x00,0x00,0x3a,0x01,0x00,0x00,0x4a,0x01,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x4d,0x01,0x00,0x00, -0x6f,0x00,0x00,0x00,0x6d,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x4e,0x01,0x00,0x00,0x4b,0x01,0x00,0x00, -0x4d,0x01,0x00,0x00,0x41,0x00,0x07,0x00,0xfa,0x00,0x00,0x00, -0x55,0x01,0x00,0x00,0x53,0x01,0x00,0x00,0x34,0x00,0x00,0x00, -0x41,0x01,0x00,0x00,0x3e,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0xba,0x00,0x00,0x00,0x56,0x01,0x00,0x00,0x55,0x01,0x00,0x00, -0x41,0x00,0x05,0x00,0xfd,0x00,0x00,0x00,0x57,0x01,0x00,0x00, -0x46,0x01,0x00,0x00,0x4e,0x01,0x00,0x00,0x3e,0x00,0x03,0x00, -0x57,0x01,0x00,0x00,0x56,0x01,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x5c,0x01,0x00,0x00,0x3a,0x01,0x00,0x00, -0x5b,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x5f,0x01,0x00,0x00,0x5c,0x01,0x00,0x00,0x4d,0x01,0x00,0x00, +0xd2,0x02,0x00,0x00,0xaf,0x00,0x00,0x00,0xb3,0x00,0x00,0x00, +0x87,0x01,0x00,0x00,0xd6,0x00,0x00,0x00,0xf5,0x00,0x07,0x00, +0x06,0x00,0x00,0x00,0xce,0x02,0x00,0x00,0x9d,0x00,0x00,0x00, +0xb3,0x00,0x00,0x00,0x84,0x01,0x00,0x00,0xd6,0x00,0x00,0x00, +0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0xba,0x02,0x00,0x00, +0x83,0x00,0x00,0x00,0xb3,0x00,0x00,0x00,0x32,0x02,0x00,0x00, +0xd6,0x00,0x00,0x00,0xb0,0x00,0x05,0x00,0xc1,0x00,0x00,0x00, +0xda,0x00,0x00,0x00,0xba,0x02,0x00,0x00,0x8d,0x00,0x00,0x00, +0xf6,0x00,0x04,0x00,0xd5,0x00,0x00,0x00,0xd6,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0xda,0x00,0x00,0x00, +0xd4,0x00,0x00,0x00,0xd5,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, +0xd4,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0xdc,0x00,0x00,0x00, +0xf8,0x00,0x02,0x00,0xdc,0x00,0x00,0x00,0xf5,0x00,0x07,0x00, +0x06,0x00,0x00,0x00,0xca,0x02,0x00,0x00,0x3e,0x00,0x00,0x00, +0xd4,0x00,0x00,0x00,0x39,0x01,0x00,0x00,0xdd,0x00,0x00,0x00, +0xb0,0x00,0x05,0x00,0xc1,0x00,0x00,0x00,0xe2,0x00,0x00,0x00, +0xca,0x02,0x00,0x00,0x37,0x00,0x00,0x00,0xf6,0x00,0x04,0x00, +0xde,0x00,0x00,0x00,0xdd,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0xfa,0x00,0x04,0x00,0xe2,0x00,0x00,0x00,0xdd,0x00,0x00,0x00, +0xde,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0xdd,0x00,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xe7,0x00,0x00,0x00, +0x73,0x00,0x00,0x00,0xca,0x02,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xea,0x00,0x00,0x00,0xe7,0x00,0x00,0x00, +0x98,0x00,0x00,0x00,0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xeb,0x00,0x00,0x00,0xea,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xec,0x00,0x00,0x00, +0xce,0x02,0x00,0x00,0xeb,0x00,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xee,0x00,0x00,0x00,0xec,0x00,0x00,0x00, +0x6e,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xf4,0x00,0x00,0x00,0xe7,0x00,0x00,0x00,0xf3,0x00,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xf6,0x00,0x00,0x00, +0x6e,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xf7,0x00,0x00,0x00,0xf4,0x00,0x00,0x00, +0xf6,0x00,0x00,0x00,0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xfb,0x00,0x00,0x00,0xee,0x00,0x00,0x00,0xfa,0x00,0x00,0x00, +0xc7,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xff,0x00,0x00,0x00, +0xee,0x00,0x00,0x00,0xfe,0x00,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x00,0x01,0x00,0x00,0xff,0x00,0x00,0x00, +0x0c,0x00,0x00,0x00,0x41,0x00,0x07,0x00,0x0c,0x01,0x00,0x00, +0x0d,0x01,0x00,0x00,0x0a,0x01,0x00,0x00,0x34,0x00,0x00,0x00, +0xfb,0x00,0x00,0x00,0x34,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0x02,0x01,0x00,0x00,0x0e,0x01,0x00,0x00,0x0d,0x01,0x00,0x00, +0x73,0x00,0x04,0x00,0xc3,0x00,0x00,0x00,0x0f,0x01,0x00,0x00, +0x0e,0x01,0x00,0x00,0x41,0x00,0x08,0x00,0x15,0x01,0x00,0x00, +0x16,0x01,0x00,0x00,0x0a,0x01,0x00,0x00,0x34,0x00,0x00,0x00, +0xfb,0x00,0x00,0x00,0xcf,0x00,0x00,0x00,0x00,0x01,0x00,0x00, +0x3d,0x00,0x04,0x00,0x03,0x01,0x00,0x00,0x17,0x01,0x00,0x00, +0x16,0x01,0x00,0x00,0x72,0x00,0x04,0x00,0x13,0x00,0x00,0x00, +0x18,0x01,0x00,0x00,0x17,0x01,0x00,0x00,0x6f,0x00,0x04,0x00, +0xc3,0x00,0x00,0x00,0x19,0x01,0x00,0x00,0x18,0x01,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x1c,0x01,0x00,0x00, +0x00,0x01,0x00,0x00,0x39,0x00,0x00,0x00,0x41,0x00,0x08,0x00, +0x15,0x01,0x00,0x00,0x1d,0x01,0x00,0x00,0x0a,0x01,0x00,0x00, +0x34,0x00,0x00,0x00,0xfb,0x00,0x00,0x00,0xcf,0x00,0x00,0x00, +0x1c,0x01,0x00,0x00,0x3d,0x00,0x04,0x00,0x03,0x01,0x00,0x00, +0x1e,0x01,0x00,0x00,0x1d,0x01,0x00,0x00,0x72,0x00,0x04,0x00, +0x13,0x00,0x00,0x00,0x1f,0x01,0x00,0x00,0x1e,0x01,0x00,0x00, +0x6f,0x00,0x04,0x00,0xc3,0x00,0x00,0x00,0x20,0x01,0x00,0x00, +0x1f,0x01,0x00,0x00,0x50,0x00,0x05,0x00,0x10,0x01,0x00,0x00, +0x21,0x01,0x00,0x00,0x19,0x01,0x00,0x00,0x20,0x01,0x00,0x00, +0x8e,0x00,0x05,0x00,0x10,0x01,0x00,0x00,0x23,0x01,0x00,0x00, +0x21,0x01,0x00,0x00,0x0f,0x01,0x00,0x00,0x51,0x00,0x05,0x00, +0xc3,0x00,0x00,0x00,0x2b,0x01,0x00,0x00,0x23,0x01,0x00,0x00, +0x00,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x2c,0x01,0x00,0x00, +0x2d,0x01,0x00,0x00,0x28,0x01,0x00,0x00,0xf7,0x00,0x00,0x00, +0x3e,0x00,0x03,0x00,0x2d,0x01,0x00,0x00,0x2b,0x01,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x2f,0x01,0x00,0x00, +0xf7,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x51,0x00,0x05,0x00, +0xc3,0x00,0x00,0x00,0x31,0x01,0x00,0x00,0x23,0x01,0x00,0x00, +0x01,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x2c,0x01,0x00,0x00, +0x32,0x01,0x00,0x00,0x28,0x01,0x00,0x00,0x2f,0x01,0x00,0x00, +0x3e,0x00,0x03,0x00,0x32,0x01,0x00,0x00,0x31,0x01,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x39,0x01,0x00,0x00, +0xca,0x02,0x00,0x00,0x37,0x01,0x00,0x00,0xf9,0x00,0x02,0x00, +0xdc,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0xde,0x00,0x00,0x00, +0xf9,0x00,0x02,0x00,0x3b,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, +0x3b,0x01,0x00,0x00,0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00, +0xcb,0x02,0x00,0x00,0x3e,0x00,0x00,0x00,0xde,0x00,0x00,0x00, +0x80,0x01,0x00,0x00,0x3e,0x01,0x00,0x00,0xb0,0x00,0x05,0x00, +0xc1,0x00,0x00,0x00,0x41,0x01,0x00,0x00,0xcb,0x02,0x00,0x00, +0xa6,0x00,0x00,0x00,0xf6,0x00,0x04,0x00,0x3d,0x01,0x00,0x00, +0x3e,0x01,0x00,0x00,0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, +0x41,0x01,0x00,0x00,0x3c,0x01,0x00,0x00,0x3d,0x01,0x00,0x00, +0xf8,0x00,0x02,0x00,0x3c,0x01,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x45,0x01,0x00,0x00,0xa7,0x00,0x00,0x00, +0x7d,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x47,0x01,0x00,0x00,0x45,0x01,0x00,0x00,0xcb,0x02,0x00,0x00, +0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00,0x48,0x01,0x00,0x00, +0x12,0x00,0x00,0x00,0xcf,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x49,0x01,0x00,0x00,0x48,0x01,0x00,0x00, +0xb0,0x00,0x05,0x00,0xc1,0x00,0x00,0x00,0x4a,0x01,0x00,0x00, +0x47,0x01,0x00,0x00,0x49,0x01,0x00,0x00,0xf7,0x00,0x03,0x00, +0x4c,0x01,0x00,0x00,0x00,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, +0x4a,0x01,0x00,0x00,0x4b,0x01,0x00,0x00,0x4c,0x01,0x00,0x00, +0xf8,0x00,0x02,0x00,0x4b,0x01,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x4f,0x01,0x00,0x00,0xba,0x02,0x00,0x00, +0x78,0x00,0x00,0x00,0xb0,0x00,0x05,0x00,0xc1,0x00,0x00,0x00, +0x51,0x01,0x00,0x00,0x4f,0x01,0x00,0x00,0x8d,0x00,0x00,0x00, +0xf9,0x00,0x02,0x00,0x4c,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, +0x4c,0x01,0x00,0x00,0xf5,0x00,0x07,0x00,0xc1,0x00,0x00,0x00, +0x52,0x01,0x00,0x00,0x4a,0x01,0x00,0x00,0x3c,0x01,0x00,0x00, +0x51,0x01,0x00,0x00,0x4b,0x01,0x00,0x00,0xf7,0x00,0x03,0x00, +0x54,0x01,0x00,0x00,0x00,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, +0x52,0x01,0x00,0x00,0x53,0x01,0x00,0x00,0x73,0x01,0x00,0x00, +0xf8,0x00,0x02,0x00,0x53,0x01,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x5c,0x01,0x00,0x00,0x7d,0x00,0x00,0x00, +0xcb,0x02,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x5e,0x01,0x00,0x00,0x5c,0x01,0x00,0x00,0x5d,0x01,0x00,0x00, 0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x60,0x01,0x00,0x00, -0x5f,0x01,0x00,0x00,0x39,0x00,0x00,0x00,0x41,0x00,0x07,0x00, -0xfa,0x00,0x00,0x00,0x62,0x01,0x00,0x00,0x53,0x01,0x00,0x00, -0x34,0x00,0x00,0x00,0x41,0x01,0x00,0x00,0x39,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0xba,0x00,0x00,0x00,0x63,0x01,0x00,0x00, -0x62,0x01,0x00,0x00,0x41,0x00,0x05,0x00,0xfd,0x00,0x00,0x00, -0x64,0x01,0x00,0x00,0x46,0x01,0x00,0x00,0x60,0x01,0x00,0x00, -0x3e,0x00,0x03,0x00,0x64,0x01,0x00,0x00,0x63,0x01,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x69,0x01,0x00,0x00, -0x3a,0x01,0x00,0x00,0x68,0x01,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x6c,0x01,0x00,0x00,0x69,0x01,0x00,0x00, -0x4d,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x6d,0x01,0x00,0x00,0x6c,0x01,0x00,0x00,0x0c,0x00,0x00,0x00, -0x41,0x00,0x07,0x00,0xfa,0x00,0x00,0x00,0x6f,0x01,0x00,0x00, -0x53,0x01,0x00,0x00,0x34,0x00,0x00,0x00,0x41,0x01,0x00,0x00, -0x0c,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0xba,0x00,0x00,0x00, -0x70,0x01,0x00,0x00,0x6f,0x01,0x00,0x00,0x41,0x00,0x05,0x00, -0xfd,0x00,0x00,0x00,0x71,0x01,0x00,0x00,0x46,0x01,0x00,0x00, -0x6d,0x01,0x00,0x00,0x3e,0x00,0x03,0x00,0x71,0x01,0x00,0x00, -0x70,0x01,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x76,0x01,0x00,0x00,0x3a,0x01,0x00,0x00,0x75,0x01,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x79,0x01,0x00,0x00, -0x76,0x01,0x00,0x00,0x4d,0x01,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x7a,0x01,0x00,0x00,0x79,0x01,0x00,0x00, -0x21,0x01,0x00,0x00,0x41,0x00,0x07,0x00,0xfa,0x00,0x00,0x00, -0x7c,0x01,0x00,0x00,0x53,0x01,0x00,0x00,0x34,0x00,0x00,0x00, -0x41,0x01,0x00,0x00,0x21,0x01,0x00,0x00,0x3d,0x00,0x04,0x00, -0xba,0x00,0x00,0x00,0x7d,0x01,0x00,0x00,0x7c,0x01,0x00,0x00, -0x41,0x00,0x05,0x00,0xfd,0x00,0x00,0x00,0x7e,0x01,0x00,0x00, -0x46,0x01,0x00,0x00,0x7a,0x01,0x00,0x00,0x3e,0x00,0x03,0x00, -0x7e,0x01,0x00,0x00,0x7d,0x01,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x80,0x01,0x00,0x00,0xc9,0x02,0x00,0x00, -0x2b,0x01,0x00,0x00,0xf9,0x00,0x02,0x00,0x2f,0x01,0x00,0x00, -0xf8,0x00,0x02,0x00,0x31,0x01,0x00,0x00,0xe0,0x00,0x04,0x00, -0x0c,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x81,0x01,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x84,0x01,0x00,0x00, -0xcc,0x02,0x00,0x00,0x82,0x01,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x87,0x01,0x00,0x00,0xd0,0x02,0x00,0x00, -0x85,0x01,0x00,0x00,0xf9,0x00,0x02,0x00,0x89,0x01,0x00,0x00, -0xf8,0x00,0x02,0x00,0x89,0x01,0x00,0x00,0xf5,0x00,0x07,0x00, -0x06,0x00,0x00,0x00,0xd2,0x02,0x00,0x00,0x3e,0x00,0x00,0x00, -0x31,0x01,0x00,0x00,0x30,0x02,0x00,0x00,0x8c,0x01,0x00,0x00, -0xb0,0x00,0x05,0x00,0xb8,0x00,0x00,0x00,0x8f,0x01,0x00,0x00, -0xd2,0x02,0x00,0x00,0x6c,0x00,0x00,0x00,0xf6,0x00,0x04,0x00, -0x8b,0x01,0x00,0x00,0x8c,0x01,0x00,0x00,0x00,0x00,0x00,0x00, -0xfa,0x00,0x04,0x00,0x8f,0x01,0x00,0x00,0x8a,0x01,0x00,0x00, -0x8b,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0x8a,0x01,0x00,0x00, -0xf9,0x00,0x02,0x00,0x91,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, -0x91,0x01,0x00,0x00,0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00, -0xd6,0x02,0x00,0x00,0x3e,0x00,0x00,0x00,0x8a,0x01,0x00,0x00, -0xbc,0x01,0x00,0x00,0x94,0x01,0x00,0x00,0xb0,0x00,0x05,0x00, -0xb8,0x00,0x00,0x00,0x97,0x01,0x00,0x00,0xd6,0x02,0x00,0x00, -0x60,0x00,0x00,0x00,0xf6,0x00,0x04,0x00,0x93,0x01,0x00,0x00, -0x94,0x01,0x00,0x00,0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, -0x97,0x01,0x00,0x00,0x92,0x01,0x00,0x00,0x93,0x01,0x00,0x00, -0xf8,0x00,0x02,0x00,0x92,0x01,0x00,0x00,0xf9,0x00,0x02,0x00, -0x99,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0x99,0x01,0x00,0x00, -0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0xe8,0x02,0x00,0x00, -0x3e,0x00,0x00,0x00,0x92,0x01,0x00,0x00,0xba,0x01,0x00,0x00, -0x9a,0x01,0x00,0x00,0xb0,0x00,0x05,0x00,0xb8,0x00,0x00,0x00, -0x9f,0x01,0x00,0x00,0xe8,0x02,0x00,0x00,0x62,0x00,0x00,0x00, -0xf6,0x00,0x04,0x00,0x9b,0x01,0x00,0x00,0x9a,0x01,0x00,0x00, -0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0x9f,0x01,0x00,0x00, -0x9a,0x01,0x00,0x00,0x9b,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, -0x9a,0x01,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xa5,0x01,0x00,0x00,0xd6,0x02,0x00,0x00,0x62,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xa7,0x01,0x00,0x00, -0xa5,0x01,0x00,0x00,0xe8,0x02,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xa9,0x01,0x00,0x00,0x55,0x00,0x00,0x00, -0x53,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xab,0x01,0x00,0x00,0xd6,0x02,0x00,0x00,0x61,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xac,0x01,0x00,0x00, -0xa9,0x01,0x00,0x00,0xab,0x01,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xae,0x01,0x00,0x00,0x64,0x00,0x00,0x00, -0x62,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xaf,0x01,0x00,0x00,0xac,0x01,0x00,0x00,0xae,0x01,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xb1,0x01,0x00,0x00, -0xaf,0x01,0x00,0x00,0xe8,0x02,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xb3,0x01,0x00,0x00,0xb1,0x01,0x00,0x00, -0xb2,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xb5,0x01,0x00,0x00,0xb3,0x01,0x00,0x00,0xd2,0x02,0x00,0x00, -0x41,0x00,0x05,0x00,0xfd,0x00,0x00,0x00,0xb6,0x01,0x00,0x00, -0xea,0x00,0x00,0x00,0xb5,0x01,0x00,0x00,0x3d,0x00,0x04,0x00, -0xba,0x00,0x00,0x00,0xb7,0x01,0x00,0x00,0xb6,0x01,0x00,0x00, -0x41,0x00,0x05,0x00,0xc3,0x00,0x00,0x00,0xb8,0x01,0x00,0x00, -0xa3,0x01,0x00,0x00,0xa7,0x01,0x00,0x00,0x3e,0x00,0x03,0x00, -0xb8,0x01,0x00,0x00,0xb7,0x01,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xba,0x01,0x00,0x00,0xe8,0x02,0x00,0x00, -0xc6,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0x99,0x01,0x00,0x00, -0xf8,0x00,0x02,0x00,0x9b,0x01,0x00,0x00,0xf9,0x00,0x02,0x00, -0x94,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0x94,0x01,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xbc,0x01,0x00,0x00, -0xd6,0x02,0x00,0x00,0xc6,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, -0x91,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0x93,0x01,0x00,0x00, -0xf9,0x00,0x02,0x00,0xbe,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, -0xbe,0x01,0x00,0x00,0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00, -0xd7,0x02,0x00,0x00,0x3e,0x00,0x00,0x00,0x93,0x01,0x00,0x00, -0xea,0x01,0x00,0x00,0xc1,0x01,0x00,0x00,0xb0,0x00,0x05,0x00, -0xb8,0x00,0x00,0x00,0xc4,0x01,0x00,0x00,0xd7,0x02,0x00,0x00, -0xb5,0x00,0x00,0x00,0xf6,0x00,0x04,0x00,0xc0,0x01,0x00,0x00, -0xc1,0x01,0x00,0x00,0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, -0xc4,0x01,0x00,0x00,0xbf,0x01,0x00,0x00,0xc0,0x01,0x00,0x00, -0xf8,0x00,0x02,0x00,0xbf,0x01,0x00,0x00,0xf9,0x00,0x02,0x00, -0xc6,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0xc6,0x01,0x00,0x00, -0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0xe5,0x02,0x00,0x00, -0x3e,0x00,0x00,0x00,0xbf,0x01,0x00,0x00,0xe8,0x01,0x00,0x00, -0xc7,0x01,0x00,0x00,0xb0,0x00,0x05,0x00,0xb8,0x00,0x00,0x00, -0xcc,0x01,0x00,0x00,0xe5,0x02,0x00,0x00,0xb2,0x00,0x00,0x00, -0xf6,0x00,0x04,0x00,0xc8,0x01,0x00,0x00,0xc7,0x01,0x00,0x00, -0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0xcc,0x01,0x00,0x00, -0xc7,0x01,0x00,0x00,0xc8,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, -0xc7,0x01,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xd2,0x01,0x00,0x00,0xd7,0x02,0x00,0x00,0xb2,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xd4,0x01,0x00,0x00, -0xd2,0x01,0x00,0x00,0xe5,0x02,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xd6,0x01,0x00,0x00,0x59,0x00,0x00,0x00, -0xaf,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xd9,0x01,0x00,0x00,0xd7,0x02,0x00,0x00,0xd8,0x01,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xda,0x01,0x00,0x00, -0xd6,0x01,0x00,0x00,0xd9,0x01,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xdc,0x01,0x00,0x00,0x68,0x00,0x00,0x00, -0xb2,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xdd,0x01,0x00,0x00,0xda,0x01,0x00,0x00,0xdc,0x01,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xdf,0x01,0x00,0x00, -0xdd,0x01,0x00,0x00,0xe5,0x02,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xe1,0x01,0x00,0x00,0xdf,0x01,0x00,0x00, -0xe0,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xe3,0x01,0x00,0x00,0xe1,0x01,0x00,0x00,0xd2,0x02,0x00,0x00, -0x41,0x00,0x05,0x00,0xfd,0x00,0x00,0x00,0xe4,0x01,0x00,0x00, -0x46,0x01,0x00,0x00,0xe3,0x01,0x00,0x00,0x3d,0x00,0x04,0x00, -0xba,0x00,0x00,0x00,0xe5,0x01,0x00,0x00,0xe4,0x01,0x00,0x00, -0x41,0x00,0x05,0x00,0xc3,0x00,0x00,0x00,0xe6,0x01,0x00,0x00, -0xd0,0x01,0x00,0x00,0xd4,0x01,0x00,0x00,0x3e,0x00,0x03,0x00, -0xe6,0x01,0x00,0x00,0xe5,0x01,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xe8,0x01,0x00,0x00,0xe5,0x02,0x00,0x00, -0xc6,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0xc6,0x01,0x00,0x00, -0xf8,0x00,0x02,0x00,0xc8,0x01,0x00,0x00,0xf9,0x00,0x02,0x00, -0xc1,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0xc1,0x01,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xea,0x01,0x00,0x00, -0xd7,0x02,0x00,0x00,0xc6,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, -0xbe,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0xc0,0x01,0x00,0x00, -0xf9,0x00,0x02,0x00,0xec,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, -0xec,0x01,0x00,0x00,0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00, -0xd8,0x02,0x00,0x00,0x3e,0x00,0x00,0x00,0xc0,0x01,0x00,0x00, -0x2e,0x02,0x00,0x00,0xef,0x01,0x00,0x00,0xb0,0x00,0x05,0x00, -0xb8,0x00,0x00,0x00,0xf2,0x01,0x00,0x00,0xd8,0x02,0x00,0x00, -0xb5,0x00,0x00,0x00,0xf6,0x00,0x04,0x00,0xee,0x01,0x00,0x00, -0xef,0x01,0x00,0x00,0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, -0xf2,0x01,0x00,0x00,0xed,0x01,0x00,0x00,0xee,0x01,0x00,0x00, -0xf8,0x00,0x02,0x00,0xed,0x01,0x00,0x00,0xf9,0x00,0x02,0x00, -0xf4,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0xf4,0x01,0x00,0x00, -0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0xdc,0x02,0x00,0x00, -0x3e,0x00,0x00,0x00,0xed,0x01,0x00,0x00,0x2c,0x02,0x00,0x00, -0xf7,0x01,0x00,0x00,0xb0,0x00,0x05,0x00,0xb8,0x00,0x00,0x00, -0xfa,0x01,0x00,0x00,0xdc,0x02,0x00,0x00,0x60,0x00,0x00,0x00, -0xf6,0x00,0x04,0x00,0xf6,0x01,0x00,0x00,0xf7,0x01,0x00,0x00, -0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0xfa,0x01,0x00,0x00, -0xf5,0x01,0x00,0x00,0xf6,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, -0xf5,0x01,0x00,0x00,0xf9,0x00,0x02,0x00,0xfc,0x01,0x00,0x00, -0xf8,0x00,0x02,0x00,0xfc,0x01,0x00,0x00,0xf5,0x00,0x07,0x00, -0x06,0x00,0x00,0x00,0xde,0x02,0x00,0x00,0x3e,0x00,0x00,0x00, -0xf5,0x01,0x00,0x00,0x2a,0x02,0x00,0x00,0xff,0x01,0x00,0x00, -0xb0,0x00,0x05,0x00,0xb8,0x00,0x00,0x00,0x02,0x02,0x00,0x00, -0xde,0x02,0x00,0x00,0xb2,0x00,0x00,0x00,0xf6,0x00,0x04,0x00, -0xfe,0x01,0x00,0x00,0xff,0x01,0x00,0x00,0x01,0x00,0x00,0x00, -0xfa,0x00,0x04,0x00,0x02,0x02,0x00,0x00,0xfd,0x01,0x00,0x00, -0xfe,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0xfd,0x01,0x00,0x00, -0xf9,0x00,0x02,0x00,0x04,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, -0x04,0x02,0x00,0x00,0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00, -0xe0,0x02,0x00,0x00,0x3e,0x00,0x00,0x00,0xfd,0x01,0x00,0x00, -0x28,0x02,0x00,0x00,0x05,0x02,0x00,0x00,0xb0,0x00,0x05,0x00, -0xb8,0x00,0x00,0x00,0x0a,0x02,0x00,0x00,0xe0,0x02,0x00,0x00, -0x62,0x00,0x00,0x00,0xf6,0x00,0x04,0x00,0x06,0x02,0x00,0x00, -0x05,0x02,0x00,0x00,0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, -0x0a,0x02,0x00,0x00,0x05,0x02,0x00,0x00,0x06,0x02,0x00,0x00, -0xf8,0x00,0x02,0x00,0x05,0x02,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x0c,0x02,0x00,0x00,0xd8,0x02,0x00,0x00, -0xb2,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x0e,0x02,0x00,0x00,0x0c,0x02,0x00,0x00,0xde,0x02,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x10,0x02,0x00,0x00, -0x0e,0x02,0x00,0x00,0x0f,0x02,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x12,0x02,0x00,0x00,0xdc,0x02,0x00,0x00, -0x62,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x13,0x02,0x00,0x00,0x10,0x02,0x00,0x00,0x12,0x02,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x15,0x02,0x00,0x00, -0x13,0x02,0x00,0x00,0xe0,0x02,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x19,0x02,0x00,0x00,0x12,0x02,0x00,0x00, -0xe0,0x02,0x00,0x00,0x41,0x00,0x05,0x00,0xc3,0x00,0x00,0x00, -0x1a,0x02,0x00,0x00,0xa3,0x01,0x00,0x00,0x19,0x02,0x00,0x00, -0x3d,0x00,0x04,0x00,0xba,0x00,0x00,0x00,0x1b,0x02,0x00,0x00, -0x1a,0x02,0x00,0x00,0x41,0x00,0x05,0x00,0xc3,0x00,0x00,0x00, -0x20,0x02,0x00,0x00,0xd0,0x01,0x00,0x00,0x0e,0x02,0x00,0x00, -0x3d,0x00,0x04,0x00,0xba,0x00,0x00,0x00,0x21,0x02,0x00,0x00, -0x20,0x02,0x00,0x00,0x41,0x00,0x05,0x00,0xc3,0x00,0x00,0x00, -0x23,0x02,0x00,0x00,0xc0,0x00,0x00,0x00,0x15,0x02,0x00,0x00, -0x3d,0x00,0x04,0x00,0xba,0x00,0x00,0x00,0x24,0x02,0x00,0x00, -0x23,0x02,0x00,0x00,0x0c,0x00,0x08,0x00,0xba,0x00,0x00,0x00, -0x25,0x02,0x00,0x00,0x01,0x00,0x00,0x00,0x32,0x00,0x00,0x00, -0x1b,0x02,0x00,0x00,0x21,0x02,0x00,0x00,0x24,0x02,0x00,0x00, -0x3e,0x00,0x03,0x00,0x23,0x02,0x00,0x00,0x25,0x02,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x28,0x02,0x00,0x00, -0xe0,0x02,0x00,0x00,0xc6,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, -0x04,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x06,0x02,0x00,0x00, -0xf9,0x00,0x02,0x00,0xff,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, -0xff,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x2a,0x02,0x00,0x00,0xde,0x02,0x00,0x00,0xc6,0x00,0x00,0x00, -0xf9,0x00,0x02,0x00,0xfc,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, -0xfe,0x01,0x00,0x00,0xf9,0x00,0x02,0x00,0xf7,0x01,0x00,0x00, -0xf8,0x00,0x02,0x00,0xf7,0x01,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x2c,0x02,0x00,0x00,0xdc,0x02,0x00,0x00, -0xc6,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0xf4,0x01,0x00,0x00, -0xf8,0x00,0x02,0x00,0xf6,0x01,0x00,0x00,0xf9,0x00,0x02,0x00, -0xef,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0xef,0x01,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x2e,0x02,0x00,0x00, -0xd8,0x02,0x00,0x00,0xc6,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, -0xec,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0xee,0x01,0x00,0x00, -0xf9,0x00,0x02,0x00,0x8c,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, -0x8c,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x30,0x02,0x00,0x00,0xd2,0x02,0x00,0x00,0xc6,0x00,0x00,0x00, +0x5e,0x01,0x00,0x00,0x78,0x00,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x6b,0x01,0x00,0x00,0x5c,0x01,0x00,0x00, +0xaa,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x6c,0x01,0x00,0x00,0xd2,0x02,0x00,0x00,0x6b,0x01,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x6e,0x01,0x00,0x00, +0x6c,0x01,0x00,0x00,0x78,0x00,0x00,0x00,0x41,0x00,0x06,0x00, +0x6f,0x01,0x00,0x00,0x70,0x01,0x00,0x00,0x64,0x01,0x00,0x00, +0x34,0x00,0x00,0x00,0x6e,0x01,0x00,0x00,0x3d,0x00,0x04,0x00, +0xc3,0x00,0x00,0x00,0x71,0x01,0x00,0x00,0x70,0x01,0x00,0x00, +0x41,0x00,0x05,0x00,0x2c,0x01,0x00,0x00,0x72,0x01,0x00,0x00, +0x59,0x01,0x00,0x00,0x60,0x01,0x00,0x00,0x3e,0x00,0x03,0x00, +0x72,0x01,0x00,0x00,0x71,0x01,0x00,0x00,0xf9,0x00,0x02,0x00, +0x54,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0x73,0x01,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x76,0x01,0x00,0x00, +0x7d,0x00,0x00,0x00,0xcb,0x02,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x78,0x01,0x00,0x00,0x76,0x01,0x00,0x00, +0x77,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x7a,0x01,0x00,0x00,0x78,0x01,0x00,0x00,0x78,0x00,0x00,0x00, +0x41,0x00,0x05,0x00,0x2c,0x01,0x00,0x00,0x7b,0x01,0x00,0x00, +0x59,0x01,0x00,0x00,0x7a,0x01,0x00,0x00,0x3e,0x00,0x03,0x00, +0x7b,0x01,0x00,0x00,0xcb,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, +0x54,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0x54,0x01,0x00,0x00, +0xf9,0x00,0x02,0x00,0x3e,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, +0x3e,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x80,0x01,0x00,0x00,0xcb,0x02,0x00,0x00,0x7e,0x01,0x00,0x00, +0xf9,0x00,0x02,0x00,0x3b,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, +0x3d,0x01,0x00,0x00,0xe0,0x00,0x04,0x00,0x0c,0x00,0x00,0x00, +0x0c,0x00,0x00,0x00,0x81,0x01,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x84,0x01,0x00,0x00,0xce,0x02,0x00,0x00, +0x82,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x87,0x01,0x00,0x00,0xd2,0x02,0x00,0x00,0x85,0x01,0x00,0x00, 0xf9,0x00,0x02,0x00,0x89,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, -0x8b,0x01,0x00,0x00,0xe0,0x00,0x04,0x00,0x0c,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x81,0x01,0x00,0x00,0xf9,0x00,0x02,0x00, -0xcd,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0xcd,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x32,0x02,0x00,0x00, -0xb8,0x02,0x00,0x00,0x6c,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, -0xca,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0xcc,0x00,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x37,0x02,0x00,0x00, -0x55,0x00,0x00,0x00,0x53,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x38,0x02,0x00,0x00,0x8c,0x00,0x00,0x00, -0x37,0x02,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x3d,0x02,0x00,0x00,0x59,0x00,0x00,0x00,0xaf,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x3e,0x02,0x00,0x00, -0x9e,0x00,0x00,0x00,0x3d,0x02,0x00,0x00,0x41,0x00,0x05,0x00, -0x15,0x00,0x00,0x00,0x43,0x02,0x00,0x00,0x12,0x00,0x00,0x00, -0x42,0x02,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x44,0x02,0x00,0x00,0x43,0x02,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x45,0x02,0x00,0x00,0x0f,0x00,0x00,0x00, -0x44,0x02,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x49,0x02,0x00,0x00,0x47,0x00,0x00,0x00,0x44,0x02,0x00,0x00, -0x41,0x00,0x05,0x00,0x0d,0x00,0x00,0x00,0x4b,0x02,0x00,0x00, -0x4a,0x02,0x00,0x00,0x0c,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x4c,0x02,0x00,0x00,0x4b,0x02,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x4d,0x02,0x00,0x00, -0x49,0x02,0x00,0x00,0x4c,0x02,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x4e,0x02,0x00,0x00,0x45,0x02,0x00,0x00, -0x4d,0x02,0x00,0x00,0xf9,0x00,0x02,0x00,0x50,0x02,0x00,0x00, -0xf8,0x00,0x02,0x00,0x50,0x02,0x00,0x00,0xf5,0x00,0x07,0x00, -0x06,0x00,0x00,0x00,0xb9,0x02,0x00,0x00,0x3e,0x00,0x00,0x00, -0xcc,0x00,0x00,0x00,0xb6,0x02,0x00,0x00,0x53,0x02,0x00,0x00, -0xb0,0x00,0x05,0x00,0xb8,0x00,0x00,0x00,0x56,0x02,0x00,0x00, -0xb9,0x02,0x00,0x00,0xb5,0x00,0x00,0x00,0xf6,0x00,0x04,0x00, -0x52,0x02,0x00,0x00,0x53,0x02,0x00,0x00,0x01,0x00,0x00,0x00, -0xfa,0x00,0x04,0x00,0x56,0x02,0x00,0x00,0x51,0x02,0x00,0x00, -0x52,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x51,0x02,0x00,0x00, -0xf9,0x00,0x02,0x00,0x58,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, -0x58,0x02,0x00,0x00,0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00, -0xba,0x02,0x00,0x00,0x3e,0x00,0x00,0x00,0x51,0x02,0x00,0x00, -0xb4,0x02,0x00,0x00,0x5b,0x02,0x00,0x00,0xb0,0x00,0x05,0x00, -0xb8,0x00,0x00,0x00,0x5e,0x02,0x00,0x00,0xba,0x02,0x00,0x00, -0x60,0x00,0x00,0x00,0xf6,0x00,0x04,0x00,0x5a,0x02,0x00,0x00, -0x5b,0x02,0x00,0x00,0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, -0x5e,0x02,0x00,0x00,0x59,0x02,0x00,0x00,0x5a,0x02,0x00,0x00, -0xf8,0x00,0x02,0x00,0x59,0x02,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x62,0x02,0x00,0x00,0xba,0x02,0x00,0x00, -0x61,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x63,0x02,0x00,0x00,0x38,0x02,0x00,0x00,0x62,0x02,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x65,0x02,0x00,0x00, -0x64,0x00,0x00,0x00,0x62,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x66,0x02,0x00,0x00,0x63,0x02,0x00,0x00, -0x65,0x02,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x6a,0x02,0x00,0x00,0xb9,0x02,0x00,0x00,0xd8,0x01,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x6b,0x02,0x00,0x00, -0x3e,0x02,0x00,0x00,0x6a,0x02,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x6d,0x02,0x00,0x00,0x68,0x00,0x00,0x00, -0xb2,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x6e,0x02,0x00,0x00,0x6b,0x02,0x00,0x00,0x6d,0x02,0x00,0x00, -0xf9,0x00,0x02,0x00,0x70,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, -0x70,0x02,0x00,0x00,0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00, -0xbc,0x02,0x00,0x00,0x3e,0x00,0x00,0x00,0x59,0x02,0x00,0x00, -0xb2,0x02,0x00,0x00,0x73,0x02,0x00,0x00,0xb0,0x00,0x05,0x00, -0xb8,0x00,0x00,0x00,0x76,0x02,0x00,0x00,0xbc,0x02,0x00,0x00, -0xb2,0x00,0x00,0x00,0xf6,0x00,0x04,0x00,0x72,0x02,0x00,0x00, -0x73,0x02,0x00,0x00,0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, -0x76,0x02,0x00,0x00,0x71,0x02,0x00,0x00,0x72,0x02,0x00,0x00, -0xf8,0x00,0x02,0x00,0x71,0x02,0x00,0x00,0xf9,0x00,0x02,0x00, -0x78,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x78,0x02,0x00,0x00, +0x89,0x01,0x00,0x00,0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00, +0xd4,0x02,0x00,0x00,0x3e,0x00,0x00,0x00,0x3d,0x01,0x00,0x00, +0x30,0x02,0x00,0x00,0x8c,0x01,0x00,0x00,0xb0,0x00,0x05,0x00, +0xc1,0x00,0x00,0x00,0x8f,0x01,0x00,0x00,0xd4,0x02,0x00,0x00, +0x6c,0x00,0x00,0x00,0xf6,0x00,0x04,0x00,0x8b,0x01,0x00,0x00, +0x8c,0x01,0x00,0x00,0x00,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, +0x8f,0x01,0x00,0x00,0x8a,0x01,0x00,0x00,0x8b,0x01,0x00,0x00, +0xf8,0x00,0x02,0x00,0x8a,0x01,0x00,0x00,0xf9,0x00,0x02,0x00, +0x91,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0x91,0x01,0x00,0x00, +0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0xd8,0x02,0x00,0x00, +0x3e,0x00,0x00,0x00,0x8a,0x01,0x00,0x00,0xbc,0x01,0x00,0x00, +0x94,0x01,0x00,0x00,0xb0,0x00,0x05,0x00,0xc1,0x00,0x00,0x00, +0x97,0x01,0x00,0x00,0xd8,0x02,0x00,0x00,0x60,0x00,0x00,0x00, +0xf6,0x00,0x04,0x00,0x93,0x01,0x00,0x00,0x94,0x01,0x00,0x00, +0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0x97,0x01,0x00,0x00, +0x92,0x01,0x00,0x00,0x93,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, +0x92,0x01,0x00,0x00,0xf9,0x00,0x02,0x00,0x99,0x01,0x00,0x00, +0xf8,0x00,0x02,0x00,0x99,0x01,0x00,0x00,0xf5,0x00,0x07,0x00, +0x06,0x00,0x00,0x00,0xea,0x02,0x00,0x00,0x3e,0x00,0x00,0x00, +0x92,0x01,0x00,0x00,0xba,0x01,0x00,0x00,0x9a,0x01,0x00,0x00, +0xb0,0x00,0x05,0x00,0xc1,0x00,0x00,0x00,0x9f,0x01,0x00,0x00, +0xea,0x02,0x00,0x00,0x62,0x00,0x00,0x00,0xf6,0x00,0x04,0x00, +0x9b,0x01,0x00,0x00,0x9a,0x01,0x00,0x00,0x01,0x00,0x00,0x00, +0xfa,0x00,0x04,0x00,0x9f,0x01,0x00,0x00,0x9a,0x01,0x00,0x00, +0x9b,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0x9a,0x01,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xa5,0x01,0x00,0x00, +0xd8,0x02,0x00,0x00,0x62,0x00,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xa7,0x01,0x00,0x00,0xa5,0x01,0x00,0x00, +0xea,0x02,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xa9,0x01,0x00,0x00,0x55,0x00,0x00,0x00,0x53,0x00,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xab,0x01,0x00,0x00, +0xd8,0x02,0x00,0x00,0x61,0x00,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xac,0x01,0x00,0x00,0xa9,0x01,0x00,0x00, +0xab,0x01,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xae,0x01,0x00,0x00,0x64,0x00,0x00,0x00,0x62,0x00,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xaf,0x01,0x00,0x00, +0xac,0x01,0x00,0x00,0xae,0x01,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xb1,0x01,0x00,0x00,0xaf,0x01,0x00,0x00, +0xea,0x02,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xb3,0x01,0x00,0x00,0xb1,0x01,0x00,0x00,0xb2,0x01,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xb5,0x01,0x00,0x00, +0xb3,0x01,0x00,0x00,0xd4,0x02,0x00,0x00,0x41,0x00,0x05,0x00, +0x2c,0x01,0x00,0x00,0xb6,0x01,0x00,0x00,0x28,0x01,0x00,0x00, +0xb5,0x01,0x00,0x00,0x3d,0x00,0x04,0x00,0xc3,0x00,0x00,0x00, +0xb7,0x01,0x00,0x00,0xb6,0x01,0x00,0x00,0x41,0x00,0x05,0x00, +0xcc,0x00,0x00,0x00,0xb8,0x01,0x00,0x00,0xa3,0x01,0x00,0x00, +0xa7,0x01,0x00,0x00,0x3e,0x00,0x03,0x00,0xb8,0x01,0x00,0x00, +0xb7,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xba,0x01,0x00,0x00,0xea,0x02,0x00,0x00,0xcf,0x00,0x00,0x00, +0xf9,0x00,0x02,0x00,0x99,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, +0x9b,0x01,0x00,0x00,0xf9,0x00,0x02,0x00,0x94,0x01,0x00,0x00, +0xf8,0x00,0x02,0x00,0x94,0x01,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xbc,0x01,0x00,0x00,0xd8,0x02,0x00,0x00, +0xcf,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0x91,0x01,0x00,0x00, +0xf8,0x00,0x02,0x00,0x93,0x01,0x00,0x00,0xf9,0x00,0x02,0x00, +0xbe,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0xbe,0x01,0x00,0x00, +0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0xd9,0x02,0x00,0x00, +0x3e,0x00,0x00,0x00,0x93,0x01,0x00,0x00,0xea,0x01,0x00,0x00, +0xc1,0x01,0x00,0x00,0xb0,0x00,0x05,0x00,0xc1,0x00,0x00,0x00, +0xc4,0x01,0x00,0x00,0xd9,0x02,0x00,0x00,0xbe,0x00,0x00,0x00, +0xf6,0x00,0x04,0x00,0xc0,0x01,0x00,0x00,0xc1,0x01,0x00,0x00, +0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0xc4,0x01,0x00,0x00, +0xbf,0x01,0x00,0x00,0xc0,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, +0xbf,0x01,0x00,0x00,0xf9,0x00,0x02,0x00,0xc6,0x01,0x00,0x00, +0xf8,0x00,0x02,0x00,0xc6,0x01,0x00,0x00,0xf5,0x00,0x07,0x00, +0x06,0x00,0x00,0x00,0xe7,0x02,0x00,0x00,0x3e,0x00,0x00,0x00, +0xbf,0x01,0x00,0x00,0xe8,0x01,0x00,0x00,0xc7,0x01,0x00,0x00, +0xb0,0x00,0x05,0x00,0xc1,0x00,0x00,0x00,0xcc,0x01,0x00,0x00, +0xe7,0x02,0x00,0x00,0xbb,0x00,0x00,0x00,0xf6,0x00,0x04,0x00, +0xc8,0x01,0x00,0x00,0xc7,0x01,0x00,0x00,0x01,0x00,0x00,0x00, +0xfa,0x00,0x04,0x00,0xcc,0x01,0x00,0x00,0xc7,0x01,0x00,0x00, +0xc8,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0xc7,0x01,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xd2,0x01,0x00,0x00, +0xd9,0x02,0x00,0x00,0xbb,0x00,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xd4,0x01,0x00,0x00,0xd2,0x01,0x00,0x00, +0xe7,0x02,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xd6,0x01,0x00,0x00,0x59,0x00,0x00,0x00,0xb8,0x00,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xd9,0x01,0x00,0x00, +0xd9,0x02,0x00,0x00,0xd8,0x01,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xda,0x01,0x00,0x00,0xd6,0x01,0x00,0x00, +0xd9,0x01,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xdc,0x01,0x00,0x00,0x68,0x00,0x00,0x00,0xbb,0x00,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xdd,0x01,0x00,0x00, +0xda,0x01,0x00,0x00,0xdc,0x01,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xdf,0x01,0x00,0x00,0xdd,0x01,0x00,0x00, +0xe7,0x02,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xe1,0x01,0x00,0x00,0xdf,0x01,0x00,0x00,0xe0,0x01,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xe3,0x01,0x00,0x00, +0xe1,0x01,0x00,0x00,0xd4,0x02,0x00,0x00,0x41,0x00,0x05,0x00, +0x2c,0x01,0x00,0x00,0xe4,0x01,0x00,0x00,0x59,0x01,0x00,0x00, +0xe3,0x01,0x00,0x00,0x3d,0x00,0x04,0x00,0xc3,0x00,0x00,0x00, +0xe5,0x01,0x00,0x00,0xe4,0x01,0x00,0x00,0x41,0x00,0x05,0x00, +0xcc,0x00,0x00,0x00,0xe6,0x01,0x00,0x00,0xd0,0x01,0x00,0x00, +0xd4,0x01,0x00,0x00,0x3e,0x00,0x03,0x00,0xe6,0x01,0x00,0x00, +0xe5,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xe8,0x01,0x00,0x00,0xe7,0x02,0x00,0x00,0xcf,0x00,0x00,0x00, +0xf9,0x00,0x02,0x00,0xc6,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, +0xc8,0x01,0x00,0x00,0xf9,0x00,0x02,0x00,0xc1,0x01,0x00,0x00, +0xf8,0x00,0x02,0x00,0xc1,0x01,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xea,0x01,0x00,0x00,0xd9,0x02,0x00,0x00, +0xcf,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0xbe,0x01,0x00,0x00, +0xf8,0x00,0x02,0x00,0xc0,0x01,0x00,0x00,0xf9,0x00,0x02,0x00, +0xec,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0xec,0x01,0x00,0x00, +0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0xda,0x02,0x00,0x00, +0x3e,0x00,0x00,0x00,0xc0,0x01,0x00,0x00,0x2e,0x02,0x00,0x00, +0xef,0x01,0x00,0x00,0xb0,0x00,0x05,0x00,0xc1,0x00,0x00,0x00, +0xf2,0x01,0x00,0x00,0xda,0x02,0x00,0x00,0xbe,0x00,0x00,0x00, +0xf6,0x00,0x04,0x00,0xee,0x01,0x00,0x00,0xef,0x01,0x00,0x00, +0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0xf2,0x01,0x00,0x00, +0xed,0x01,0x00,0x00,0xee,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, +0xed,0x01,0x00,0x00,0xf9,0x00,0x02,0x00,0xf4,0x01,0x00,0x00, +0xf8,0x00,0x02,0x00,0xf4,0x01,0x00,0x00,0xf5,0x00,0x07,0x00, +0x06,0x00,0x00,0x00,0xde,0x02,0x00,0x00,0x3e,0x00,0x00,0x00, +0xed,0x01,0x00,0x00,0x2c,0x02,0x00,0x00,0xf7,0x01,0x00,0x00, +0xb0,0x00,0x05,0x00,0xc1,0x00,0x00,0x00,0xfa,0x01,0x00,0x00, +0xde,0x02,0x00,0x00,0x60,0x00,0x00,0x00,0xf6,0x00,0x04,0x00, +0xf6,0x01,0x00,0x00,0xf7,0x01,0x00,0x00,0x01,0x00,0x00,0x00, +0xfa,0x00,0x04,0x00,0xfa,0x01,0x00,0x00,0xf5,0x01,0x00,0x00, +0xf6,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0xf5,0x01,0x00,0x00, +0xf9,0x00,0x02,0x00,0xfc,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, +0xfc,0x01,0x00,0x00,0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00, +0xe0,0x02,0x00,0x00,0x3e,0x00,0x00,0x00,0xf5,0x01,0x00,0x00, +0x2a,0x02,0x00,0x00,0xff,0x01,0x00,0x00,0xb0,0x00,0x05,0x00, +0xc1,0x00,0x00,0x00,0x02,0x02,0x00,0x00,0xe0,0x02,0x00,0x00, +0xbb,0x00,0x00,0x00,0xf6,0x00,0x04,0x00,0xfe,0x01,0x00,0x00, +0xff,0x01,0x00,0x00,0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, +0x02,0x02,0x00,0x00,0xfd,0x01,0x00,0x00,0xfe,0x01,0x00,0x00, +0xf8,0x00,0x02,0x00,0xfd,0x01,0x00,0x00,0xf9,0x00,0x02,0x00, +0x04,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x04,0x02,0x00,0x00, +0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0xe2,0x02,0x00,0x00, +0x3e,0x00,0x00,0x00,0xfd,0x01,0x00,0x00,0x28,0x02,0x00,0x00, +0x05,0x02,0x00,0x00,0xb0,0x00,0x05,0x00,0xc1,0x00,0x00,0x00, +0x0a,0x02,0x00,0x00,0xe2,0x02,0x00,0x00,0x62,0x00,0x00,0x00, +0xf6,0x00,0x04,0x00,0x06,0x02,0x00,0x00,0x05,0x02,0x00,0x00, +0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0x0a,0x02,0x00,0x00, +0x05,0x02,0x00,0x00,0x06,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, +0x05,0x02,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x0c,0x02,0x00,0x00,0xda,0x02,0x00,0x00,0xbb,0x00,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x0e,0x02,0x00,0x00, +0x0c,0x02,0x00,0x00,0xe0,0x02,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x10,0x02,0x00,0x00,0x0e,0x02,0x00,0x00, +0x0f,0x02,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x12,0x02,0x00,0x00,0xde,0x02,0x00,0x00,0x62,0x00,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x13,0x02,0x00,0x00, +0x10,0x02,0x00,0x00,0x12,0x02,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x15,0x02,0x00,0x00,0x13,0x02,0x00,0x00, +0xe2,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x19,0x02,0x00,0x00,0x12,0x02,0x00,0x00,0xe2,0x02,0x00,0x00, +0x41,0x00,0x05,0x00,0xcc,0x00,0x00,0x00,0x1a,0x02,0x00,0x00, +0xa3,0x01,0x00,0x00,0x19,0x02,0x00,0x00,0x3d,0x00,0x04,0x00, +0xc3,0x00,0x00,0x00,0x1b,0x02,0x00,0x00,0x1a,0x02,0x00,0x00, +0x41,0x00,0x05,0x00,0xcc,0x00,0x00,0x00,0x20,0x02,0x00,0x00, +0xd0,0x01,0x00,0x00,0x0e,0x02,0x00,0x00,0x3d,0x00,0x04,0x00, +0xc3,0x00,0x00,0x00,0x21,0x02,0x00,0x00,0x20,0x02,0x00,0x00, +0x41,0x00,0x05,0x00,0xcc,0x00,0x00,0x00,0x23,0x02,0x00,0x00, +0xc9,0x00,0x00,0x00,0x15,0x02,0x00,0x00,0x3d,0x00,0x04,0x00, +0xc3,0x00,0x00,0x00,0x24,0x02,0x00,0x00,0x23,0x02,0x00,0x00, +0x0c,0x00,0x08,0x00,0xc3,0x00,0x00,0x00,0x25,0x02,0x00,0x00, +0x01,0x00,0x00,0x00,0x32,0x00,0x00,0x00,0x1b,0x02,0x00,0x00, +0x21,0x02,0x00,0x00,0x24,0x02,0x00,0x00,0x3e,0x00,0x03,0x00, +0x23,0x02,0x00,0x00,0x25,0x02,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x28,0x02,0x00,0x00,0xe2,0x02,0x00,0x00, +0xcf,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0x04,0x02,0x00,0x00, +0xf8,0x00,0x02,0x00,0x06,0x02,0x00,0x00,0xf9,0x00,0x02,0x00, +0xff,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0xff,0x01,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x2a,0x02,0x00,0x00, +0xe0,0x02,0x00,0x00,0xcf,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, +0xfc,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0xfe,0x01,0x00,0x00, +0xf9,0x00,0x02,0x00,0xf7,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, +0xf7,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x2c,0x02,0x00,0x00,0xde,0x02,0x00,0x00,0xcf,0x00,0x00,0x00, +0xf9,0x00,0x02,0x00,0xf4,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, +0xf6,0x01,0x00,0x00,0xf9,0x00,0x02,0x00,0xef,0x01,0x00,0x00, +0xf8,0x00,0x02,0x00,0xef,0x01,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x2e,0x02,0x00,0x00,0xda,0x02,0x00,0x00, +0xcf,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0xec,0x01,0x00,0x00, +0xf8,0x00,0x02,0x00,0xee,0x01,0x00,0x00,0xf9,0x00,0x02,0x00, +0x8c,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0x8c,0x01,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x30,0x02,0x00,0x00, +0xd4,0x02,0x00,0x00,0xcf,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, +0x89,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0x8b,0x01,0x00,0x00, +0xe0,0x00,0x04,0x00,0x0c,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, +0x81,0x01,0x00,0x00,0xf9,0x00,0x02,0x00,0xd6,0x00,0x00,0x00, +0xf8,0x00,0x02,0x00,0xd6,0x00,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x32,0x02,0x00,0x00,0xba,0x02,0x00,0x00, +0x6c,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0xd3,0x00,0x00,0x00, +0xf8,0x00,0x02,0x00,0xd5,0x00,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x37,0x02,0x00,0x00,0x55,0x00,0x00,0x00, +0x53,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x38,0x02,0x00,0x00,0x95,0x00,0x00,0x00,0x37,0x02,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x3d,0x02,0x00,0x00, +0x59,0x00,0x00,0x00,0xb8,0x00,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x3e,0x02,0x00,0x00,0xa7,0x00,0x00,0x00, +0x3d,0x02,0x00,0x00,0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00, +0x43,0x02,0x00,0x00,0x12,0x00,0x00,0x00,0x42,0x02,0x00,0x00, +0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x44,0x02,0x00,0x00, +0x43,0x02,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x45,0x02,0x00,0x00,0x0f,0x00,0x00,0x00,0x44,0x02,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x49,0x02,0x00,0x00, +0x47,0x00,0x00,0x00,0x44,0x02,0x00,0x00,0x41,0x00,0x05,0x00, +0x0d,0x00,0x00,0x00,0x4b,0x02,0x00,0x00,0x4a,0x02,0x00,0x00, +0x0c,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x4c,0x02,0x00,0x00,0x4b,0x02,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x4d,0x02,0x00,0x00,0x49,0x02,0x00,0x00, +0x4c,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x4e,0x02,0x00,0x00,0x45,0x02,0x00,0x00,0x4d,0x02,0x00,0x00, +0xf9,0x00,0x02,0x00,0x50,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, +0x50,0x02,0x00,0x00,0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00, +0xbb,0x02,0x00,0x00,0x3e,0x00,0x00,0x00,0xd5,0x00,0x00,0x00, +0xb6,0x02,0x00,0x00,0x53,0x02,0x00,0x00,0xb0,0x00,0x05,0x00, +0xc1,0x00,0x00,0x00,0x56,0x02,0x00,0x00,0xbb,0x02,0x00,0x00, +0xbe,0x00,0x00,0x00,0xf6,0x00,0x04,0x00,0x52,0x02,0x00,0x00, +0x53,0x02,0x00,0x00,0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, +0x56,0x02,0x00,0x00,0x51,0x02,0x00,0x00,0x52,0x02,0x00,0x00, +0xf8,0x00,0x02,0x00,0x51,0x02,0x00,0x00,0xf9,0x00,0x02,0x00, +0x58,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x58,0x02,0x00,0x00, +0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0xbc,0x02,0x00,0x00, +0x3e,0x00,0x00,0x00,0x51,0x02,0x00,0x00,0xb4,0x02,0x00,0x00, +0x5b,0x02,0x00,0x00,0xb0,0x00,0x05,0x00,0xc1,0x00,0x00,0x00, +0x5e,0x02,0x00,0x00,0xbc,0x02,0x00,0x00,0x60,0x00,0x00,0x00, +0xf6,0x00,0x04,0x00,0x5a,0x02,0x00,0x00,0x5b,0x02,0x00,0x00, +0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0x5e,0x02,0x00,0x00, +0x59,0x02,0x00,0x00,0x5a,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, +0x59,0x02,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x62,0x02,0x00,0x00,0xbc,0x02,0x00,0x00,0x61,0x00,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x63,0x02,0x00,0x00, +0x38,0x02,0x00,0x00,0x62,0x02,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x65,0x02,0x00,0x00,0x64,0x00,0x00,0x00, +0x62,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x66,0x02,0x00,0x00,0x63,0x02,0x00,0x00,0x65,0x02,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x6a,0x02,0x00,0x00, +0xbb,0x02,0x00,0x00,0xd8,0x01,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x6b,0x02,0x00,0x00,0x3e,0x02,0x00,0x00, +0x6a,0x02,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x6d,0x02,0x00,0x00,0x68,0x00,0x00,0x00,0xbb,0x00,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x6e,0x02,0x00,0x00, +0x6b,0x02,0x00,0x00,0x6d,0x02,0x00,0x00,0xf9,0x00,0x02,0x00, +0x70,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x70,0x02,0x00,0x00, 0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0xbe,0x02,0x00,0x00, -0x3e,0x00,0x00,0x00,0x71,0x02,0x00,0x00,0xb0,0x02,0x00,0x00, -0x7b,0x02,0x00,0x00,0xb0,0x00,0x05,0x00,0xb8,0x00,0x00,0x00, -0x7e,0x02,0x00,0x00,0xbe,0x02,0x00,0x00,0x62,0x00,0x00,0x00, -0xf6,0x00,0x04,0x00,0x7a,0x02,0x00,0x00,0x7b,0x02,0x00,0x00, -0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0x7e,0x02,0x00,0x00, -0x79,0x02,0x00,0x00,0x7a,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, -0x79,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x81,0x02,0x00,0x00,0x66,0x02,0x00,0x00,0xbe,0x02,0x00,0x00, -0xb0,0x00,0x05,0x00,0xb8,0x00,0x00,0x00,0x84,0x02,0x00,0x00, -0x81,0x02,0x00,0x00,0x36,0x00,0x00,0x00,0xf7,0x00,0x03,0x00, -0x86,0x02,0x00,0x00,0x00,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, -0x84,0x02,0x00,0x00,0x85,0x02,0x00,0x00,0x86,0x02,0x00,0x00, -0xf8,0x00,0x02,0x00,0x85,0x02,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x89,0x02,0x00,0x00,0x6e,0x02,0x00,0x00, -0xbc,0x02,0x00,0x00,0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00, -0x8a,0x02,0x00,0x00,0x12,0x00,0x00,0x00,0xc6,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x8b,0x02,0x00,0x00, -0x8a,0x02,0x00,0x00,0xb0,0x00,0x05,0x00,0xb8,0x00,0x00,0x00, -0x8c,0x02,0x00,0x00,0x89,0x02,0x00,0x00,0x8b,0x02,0x00,0x00, -0xf9,0x00,0x02,0x00,0x86,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, -0x86,0x02,0x00,0x00,0xf5,0x00,0x07,0x00,0xb8,0x00,0x00,0x00, -0x8d,0x02,0x00,0x00,0x84,0x02,0x00,0x00,0x79,0x02,0x00,0x00, -0x8c,0x02,0x00,0x00,0x85,0x02,0x00,0x00,0xf7,0x00,0x03,0x00, -0x8f,0x02,0x00,0x00,0x00,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, -0x8d,0x02,0x00,0x00,0x8e,0x02,0x00,0x00,0x8f,0x02,0x00,0x00, -0xf8,0x00,0x02,0x00,0x8e,0x02,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x97,0x02,0x00,0x00,0x6e,0x02,0x00,0x00, -0xbc,0x02,0x00,0x00,0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00, -0x99,0x02,0x00,0x00,0x12,0x00,0x00,0x00,0x98,0x02,0x00,0x00, -0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x9a,0x02,0x00,0x00, -0x99,0x02,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x9b,0x02,0x00,0x00,0x97,0x02,0x00,0x00,0x9a,0x02,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x9c,0x02,0x00,0x00, -0x4e,0x02,0x00,0x00,0x9b,0x02,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x9e,0x02,0x00,0x00,0x9c,0x02,0x00,0x00, -0x66,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xa0,0x02,0x00,0x00,0x9e,0x02,0x00,0x00,0xbe,0x02,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xa2,0x02,0x00,0x00, -0xb9,0x02,0x00,0x00,0xb2,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xa4,0x02,0x00,0x00,0xa2,0x02,0x00,0x00, -0xbc,0x02,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xa6,0x02,0x00,0x00,0xa4,0x02,0x00,0x00,0xa5,0x02,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xa8,0x02,0x00,0x00, -0xba,0x02,0x00,0x00,0x62,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xa9,0x02,0x00,0x00,0xa6,0x02,0x00,0x00, -0xa8,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xab,0x02,0x00,0x00,0xa9,0x02,0x00,0x00,0xbe,0x02,0x00,0x00, -0x41,0x00,0x05,0x00,0xc3,0x00,0x00,0x00,0xac,0x02,0x00,0x00, -0xc0,0x00,0x00,0x00,0xab,0x02,0x00,0x00,0x3d,0x00,0x04,0x00, -0xba,0x00,0x00,0x00,0xad,0x02,0x00,0x00,0xac,0x02,0x00,0x00, -0x41,0x00,0x06,0x00,0xfa,0x00,0x00,0x00,0xae,0x02,0x00,0x00, -0x93,0x02,0x00,0x00,0x34,0x00,0x00,0x00,0xa0,0x02,0x00,0x00, -0x3e,0x00,0x03,0x00,0xae,0x02,0x00,0x00,0xad,0x02,0x00,0x00, -0xf9,0x00,0x02,0x00,0x8f,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, -0x8f,0x02,0x00,0x00,0xf9,0x00,0x02,0x00,0x7b,0x02,0x00,0x00, -0xf8,0x00,0x02,0x00,0x7b,0x02,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xb0,0x02,0x00,0x00,0xbe,0x02,0x00,0x00, -0xc6,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0x78,0x02,0x00,0x00, -0xf8,0x00,0x02,0x00,0x7a,0x02,0x00,0x00,0xf9,0x00,0x02,0x00, -0x73,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x73,0x02,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xb2,0x02,0x00,0x00, -0xbc,0x02,0x00,0x00,0xc6,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, -0x70,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x72,0x02,0x00,0x00, -0xf9,0x00,0x02,0x00,0x5b,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, -0x5b,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xb4,0x02,0x00,0x00,0xba,0x02,0x00,0x00,0xc6,0x00,0x00,0x00, -0xf9,0x00,0x02,0x00,0x58,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, -0x5a,0x02,0x00,0x00,0xf9,0x00,0x02,0x00,0x53,0x02,0x00,0x00, -0xf8,0x00,0x02,0x00,0x53,0x02,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xb6,0x02,0x00,0x00,0xb9,0x02,0x00,0x00, -0xc6,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0x50,0x02,0x00,0x00, -0xf8,0x00,0x02,0x00,0x52,0x02,0x00,0x00,0xfd,0x00,0x01,0x00, -0x38,0x00,0x01,0x00, -}; -const uint64_t matmul_f32_aligned_s_fp32_len = 10348; - -unsigned char matmul_f32_l_data[] = { -0x03,0x02,0x23,0x07,0x00,0x05,0x01,0x00,0x0b,0x00,0x0d,0x00, -0xcd,0x02,0x00,0x00,0x00,0x00,0x00,0x00,0x11,0x00,0x02,0x00, -0x01,0x00,0x00,0x00,0x11,0x00,0x02,0x00,0x09,0x00,0x00,0x00, -0x0b,0x00,0x06,0x00,0x01,0x00,0x00,0x00,0x47,0x4c,0x53,0x4c, -0x2e,0x73,0x74,0x64,0x2e,0x34,0x35,0x30,0x00,0x00,0x00,0x00, -0x0e,0x00,0x03,0x00,0x00,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x0f,0x00,0x0f,0x00,0x05,0x00,0x00,0x00,0x04,0x00,0x00,0x00, -0x6d,0x61,0x69,0x6e,0x00,0x00,0x00,0x00,0x0b,0x00,0x00,0x00, -0x12,0x00,0x00,0x00,0x3d,0x00,0x00,0x00,0x4c,0x00,0x00,0x00, -0xf1,0x00,0x00,0x00,0xfc,0x00,0x00,0x00,0x3d,0x01,0x00,0x00, -0x48,0x01,0x00,0x00,0x2e,0x02,0x00,0x00,0x77,0x02,0x00,0x00, -0x10,0x00,0x06,0x00,0x04,0x00,0x00,0x00,0x11,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x0b,0x00,0x00,0x00,0x0b,0x00,0x00,0x00, -0x1c,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0x10,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x08,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00, -0x03,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00,0x04,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x10,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0x10,0x00,0x00,0x00,0x05,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x14,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x18,0x00,0x00,0x00, -0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00,0x07,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x1c,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0x10,0x00,0x00,0x00,0x08,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x20,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00, -0x09,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x24,0x00,0x00,0x00, -0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00,0x0a,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x28,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0x10,0x00,0x00,0x00,0x0b,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x2c,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x30,0x00,0x00,0x00, -0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00,0x0d,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x34,0x00,0x00,0x00,0x47,0x00,0x03,0x00, -0x10,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x37,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x3d,0x00,0x00,0x00,0x0b,0x00,0x00,0x00, -0x1a,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x4c,0x00,0x00,0x00, -0x0b,0x00,0x00,0x00,0x1b,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x4f,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x09,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x53,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x04,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x60,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x62,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x07,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x6c,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x03,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x9c,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0xae,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x05,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0xb1,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x08,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0xf9,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x48,0x00,0x04,0x00, -0xfa,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x18,0x00,0x00,0x00, -0x48,0x00,0x05,0x00,0xfa,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x03,0x00, -0xfa,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0xfc,0x00,0x00,0x00,0x22,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0xfc,0x00,0x00,0x00,0x21,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x17,0x01,0x00,0x00, -0x01,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x18,0x01,0x00,0x00,0x0b,0x00,0x00,0x00,0x19,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x45,0x01,0x00,0x00,0x06,0x00,0x00,0x00, -0x04,0x00,0x00,0x00,0x48,0x00,0x04,0x00,0x46,0x01,0x00,0x00, -0x00,0x00,0x00,0x00,0x18,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0x46,0x01,0x00,0x00,0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x47,0x00,0x03,0x00,0x46,0x01,0x00,0x00, -0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x48,0x01,0x00,0x00, -0x22,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x48,0x01,0x00,0x00,0x21,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x2e,0x02,0x00,0x00,0x0b,0x00,0x00,0x00, -0x18,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x74,0x02,0x00,0x00, -0x06,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x48,0x00,0x04,0x00, -0x75,0x02,0x00,0x00,0x00,0x00,0x00,0x00,0x19,0x00,0x00,0x00, -0x48,0x00,0x05,0x00,0x75,0x02,0x00,0x00,0x00,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x03,0x00, -0x75,0x02,0x00,0x00,0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x77,0x02,0x00,0x00,0x22,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x77,0x02,0x00,0x00,0x21,0x00,0x00,0x00, -0x02,0x00,0x00,0x00,0x13,0x00,0x02,0x00,0x02,0x00,0x00,0x00, -0x21,0x00,0x03,0x00,0x03,0x00,0x00,0x00,0x02,0x00,0x00,0x00, -0x15,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x20,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x17,0x00,0x04,0x00,0x09,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x03,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0x0a,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x09,0x00,0x00,0x00, -0x3b,0x00,0x04,0x00,0x0a,0x00,0x00,0x00,0x0b,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0x0d,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x1e,0x00,0x10,0x00,0x10,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x11,0x00,0x00,0x00, -0x09,0x00,0x00,0x00,0x10,0x00,0x00,0x00,0x3b,0x00,0x04,0x00, -0x11,0x00,0x00,0x00,0x12,0x00,0x00,0x00,0x09,0x00,0x00,0x00, -0x15,0x00,0x04,0x00,0x13,0x00,0x00,0x00,0x20,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00, -0x14,0x00,0x00,0x00,0x08,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0x15,0x00,0x00,0x00,0x09,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00,0x21,0x00,0x00,0x00, -0x0a,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00, -0x27,0x00,0x00,0x00,0x09,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x13,0x00,0x00,0x00,0x2d,0x00,0x00,0x00,0x07,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00,0x34,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x32,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x37,0x00,0x00,0x00,0x40,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x3b,0x00,0x04,0x00,0x0a,0x00,0x00,0x00,0x3d,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x3e,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x3b,0x00,0x04,0x00, -0x0a,0x00,0x00,0x00,0x4c,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x32,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x4f,0x00,0x00,0x00, -0x20,0x00,0x00,0x00,0x32,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x53,0x00,0x00,0x00,0x20,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0x54,0x00,0x00,0x00,0x86,0x00,0x00,0x00, -0x37,0x00,0x00,0x00,0x53,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0x58,0x00,0x00,0x00,0x86,0x00,0x00,0x00, -0x37,0x00,0x00,0x00,0x53,0x00,0x00,0x00,0x32,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x60,0x00,0x00,0x00,0x02,0x00,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x61,0x00,0x00,0x00, -0x86,0x00,0x00,0x00,0x53,0x00,0x00,0x00,0x60,0x00,0x00,0x00, -0x32,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x62,0x00,0x00,0x00, -0x04,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, -0x63,0x00,0x00,0x00,0x86,0x00,0x00,0x00,0x61,0x00,0x00,0x00, -0x62,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, -0x67,0x00,0x00,0x00,0x86,0x00,0x00,0x00,0x61,0x00,0x00,0x00, -0x62,0x00,0x00,0x00,0x32,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x6c,0x00,0x00,0x00,0x10,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0x6d,0x00,0x00,0x00,0x86,0x00,0x00,0x00, -0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0x72,0x00,0x00,0x00,0x86,0x00,0x00,0x00, -0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x13,0x00,0x00,0x00,0x76,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00,0x7b,0x00,0x00,0x00, -0x02,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00, -0x86,0x00,0x00,0x00,0x0b,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x13,0x00,0x00,0x00,0x8c,0x00,0x00,0x00,0x03,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00,0x97,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x32,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x9c,0x00,0x00,0x00,0x40,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x13,0x00,0x00,0x00,0x9e,0x00,0x00,0x00,0x04,0x00,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0xad,0x00,0x00,0x00, -0x84,0x00,0x00,0x00,0x60,0x00,0x00,0x00,0x62,0x00,0x00,0x00, -0x32,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0xae,0x00,0x00,0x00, -0x20,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, -0xaf,0x00,0x00,0x00,0x84,0x00,0x00,0x00,0x53,0x00,0x00,0x00, -0xae,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, -0xb0,0x00,0x00,0x00,0x84,0x00,0x00,0x00,0x4f,0x00,0x00,0x00, -0x62,0x00,0x00,0x00,0x32,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0xb1,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0xb2,0x00,0x00,0x00,0x84,0x00,0x00,0x00, -0xb0,0x00,0x00,0x00,0xb1,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0xb3,0x00,0x00,0x00,0x84,0x00,0x00,0x00, -0xb2,0x00,0x00,0x00,0x60,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0xb4,0x00,0x00,0x00,0x86,0x00,0x00,0x00, -0xaf,0x00,0x00,0x00,0xb3,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0xb5,0x00,0x00,0x00,0x84,0x00,0x00,0x00, -0xad,0x00,0x00,0x00,0xb4,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0xb6,0x00,0x00,0x00,0x84,0x00,0x00,0x00, -0xb5,0x00,0x00,0x00,0xb1,0x00,0x00,0x00,0x14,0x00,0x02,0x00, -0xb7,0x00,0x00,0x00,0x16,0x00,0x03,0x00,0xb9,0x00,0x00,0x00, -0x20,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, -0xba,0x00,0x00,0x00,0x84,0x00,0x00,0x00,0x60,0x00,0x00,0x00, -0x62,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, -0xbb,0x00,0x00,0x00,0x84,0x00,0x00,0x00,0xba,0x00,0x00,0x00, -0xb4,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, -0xbc,0x00,0x00,0x00,0x84,0x00,0x00,0x00,0xbb,0x00,0x00,0x00, -0xb1,0x00,0x00,0x00,0x1c,0x00,0x04,0x00,0xbd,0x00,0x00,0x00, -0xb9,0x00,0x00,0x00,0xbc,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0xbe,0x00,0x00,0x00,0x07,0x00,0x00,0x00,0xbd,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0xb9,0x00,0x00,0x00,0xc1,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0xc2,0x00,0x00,0x00, -0x07,0x00,0x00,0x00,0xb9,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x13,0x00,0x00,0x00,0xc5,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x16,0x00,0x03,0x00,0xec,0x00,0x00,0x00,0x10,0x00,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0xed,0x00,0x00,0x00, -0x80,0x00,0x00,0x00,0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0xee,0x00,0x00,0x00, -0x84,0x00,0x00,0x00,0x37,0x00,0x00,0x00,0xed,0x00,0x00,0x00, -0x1c,0x00,0x04,0x00,0xef,0x00,0x00,0x00,0xec,0x00,0x00,0x00, -0xee,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0xf0,0x00,0x00,0x00, -0x04,0x00,0x00,0x00,0xef,0x00,0x00,0x00,0x3b,0x00,0x04,0x00, -0xf0,0x00,0x00,0x00,0xf1,0x00,0x00,0x00,0x04,0x00,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0xf5,0x00,0x00,0x00, -0x80,0x00,0x00,0x00,0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00, -0x1d,0x00,0x03,0x00,0xf9,0x00,0x00,0x00,0xb9,0x00,0x00,0x00, -0x1e,0x00,0x03,0x00,0xfa,0x00,0x00,0x00,0xf9,0x00,0x00,0x00, -0x20,0x00,0x04,0x00,0xfb,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0xfa,0x00,0x00,0x00,0x3b,0x00,0x04,0x00,0xfb,0x00,0x00,0x00, -0xfc,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0x07,0x01,0x00,0x00,0x0c,0x00,0x00,0x00,0xb9,0x00,0x00,0x00, -0x20,0x00,0x04,0x00,0x0b,0x01,0x00,0x00,0x04,0x00,0x00,0x00, -0xec,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, -0x11,0x01,0x00,0x00,0x80,0x00,0x00,0x00,0x6c,0x00,0x00,0x00, -0x39,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0xec,0x00,0x00,0x00, -0x15,0x01,0x00,0x00,0x00,0x00,0x00,0x00,0x32,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x17,0x01,0x00,0x00,0x01,0x00,0x00,0x00, -0x33,0x00,0x06,0x00,0x09,0x00,0x00,0x00,0x18,0x01,0x00,0x00, -0x17,0x01,0x00,0x00,0x39,0x00,0x00,0x00,0x39,0x00,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x19,0x01,0x00,0x00, -0x51,0x00,0x00,0x00,0x18,0x01,0x00,0x00,0x00,0x00,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x1a,0x01,0x00,0x00, -0x84,0x00,0x00,0x00,0x19,0x01,0x00,0x00,0x39,0x00,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x1b,0x01,0x00,0x00, -0x86,0x00,0x00,0x00,0x1a,0x01,0x00,0x00,0x6c,0x00,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x39,0x01,0x00,0x00, -0x80,0x00,0x00,0x00,0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x3a,0x01,0x00,0x00, -0x84,0x00,0x00,0x00,0x9c,0x00,0x00,0x00,0x39,0x01,0x00,0x00, -0x1c,0x00,0x04,0x00,0x3b,0x01,0x00,0x00,0xec,0x00,0x00,0x00, -0x3a,0x01,0x00,0x00,0x20,0x00,0x04,0x00,0x3c,0x01,0x00,0x00, -0x04,0x00,0x00,0x00,0x3b,0x01,0x00,0x00,0x3b,0x00,0x04,0x00, -0x3c,0x01,0x00,0x00,0x3d,0x01,0x00,0x00,0x04,0x00,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x41,0x01,0x00,0x00, -0x80,0x00,0x00,0x00,0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00, -0x1d,0x00,0x03,0x00,0x45,0x01,0x00,0x00,0xb9,0x00,0x00,0x00, -0x1e,0x00,0x03,0x00,0x46,0x01,0x00,0x00,0x45,0x01,0x00,0x00, -0x20,0x00,0x04,0x00,0x47,0x01,0x00,0x00,0x0c,0x00,0x00,0x00, -0x46,0x01,0x00,0x00,0x3b,0x00,0x04,0x00,0x47,0x01,0x00,0x00, -0x48,0x01,0x00,0x00,0x0c,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0x5b,0x01,0x00,0x00,0x80,0x00,0x00,0x00, -0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x62,0x01,0x00,0x00,0x08,0x01,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x63,0x01,0x00,0x00, -0x86,0x00,0x00,0x00,0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x66,0x01,0x00,0x00, -0x86,0x00,0x00,0x00,0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x81,0x01,0x00,0x00, -0x84,0x00,0x00,0x00,0x60,0x00,0x00,0x00,0x62,0x00,0x00,0x00, -0x1c,0x00,0x04,0x00,0x82,0x01,0x00,0x00,0xec,0x00,0x00,0x00, -0x81,0x01,0x00,0x00,0x20,0x00,0x04,0x00,0x83,0x01,0x00,0x00, -0x07,0x00,0x00,0x00,0x82,0x01,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0x93,0x01,0x00,0x00,0x80,0x00,0x00,0x00, -0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0x99,0x01,0x00,0x00,0x07,0x00,0x00,0x00,0xec,0x00,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0xaf,0x01,0x00,0x00, -0x84,0x00,0x00,0x00,0xb4,0x00,0x00,0x00,0xb1,0x00,0x00,0x00, -0x1c,0x00,0x04,0x00,0xb0,0x01,0x00,0x00,0xec,0x00,0x00,0x00, -0xaf,0x01,0x00,0x00,0x20,0x00,0x04,0x00,0xb1,0x01,0x00,0x00, -0x07,0x00,0x00,0x00,0xb0,0x01,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0xba,0x01,0x00,0x00,0x86,0x00,0x00,0x00, -0xae,0x00,0x00,0x00,0xb4,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0xc2,0x01,0x00,0x00,0x80,0x00,0x00,0x00, -0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0xf1,0x01,0x00,0x00,0x84,0x00,0x00,0x00, -0x60,0x00,0x00,0x00,0x62,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x13,0x00,0x00,0x00,0x26,0x02,0x00,0x00,0x0d,0x00,0x00,0x00, -0x3b,0x00,0x04,0x00,0x0a,0x00,0x00,0x00,0x2e,0x02,0x00,0x00, -0x01,0x00,0x00,0x00,0x1d,0x00,0x03,0x00,0x74,0x02,0x00,0x00, -0xb9,0x00,0x00,0x00,0x1e,0x00,0x03,0x00,0x75,0x02,0x00,0x00, -0x74,0x02,0x00,0x00,0x20,0x00,0x04,0x00,0x76,0x02,0x00,0x00, -0x0c,0x00,0x00,0x00,0x75,0x02,0x00,0x00,0x3b,0x00,0x04,0x00, -0x76,0x02,0x00,0x00,0x77,0x02,0x00,0x00,0x0c,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00,0x7c,0x02,0x00,0x00, -0x05,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, -0x89,0x02,0x00,0x00,0x84,0x00,0x00,0x00,0x60,0x00,0x00,0x00, -0x62,0x00,0x00,0x00,0x36,0x00,0x05,0x00,0x02,0x00,0x00,0x00, -0x04,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x03,0x00,0x00,0x00, -0xf8,0x00,0x02,0x00,0x05,0x00,0x00,0x00,0x3b,0x00,0x04,0x00, -0xbe,0x00,0x00,0x00,0xbf,0x00,0x00,0x00,0x07,0x00,0x00,0x00, -0x3b,0x00,0x04,0x00,0x83,0x01,0x00,0x00,0x84,0x01,0x00,0x00, -0x07,0x00,0x00,0x00,0x3b,0x00,0x04,0x00,0xb1,0x01,0x00,0x00, -0xb2,0x01,0x00,0x00,0x07,0x00,0x00,0x00,0x41,0x00,0x05,0x00, -0x0d,0x00,0x00,0x00,0x0e,0x00,0x00,0x00,0x0b,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x0f,0x00,0x00,0x00,0x0e,0x00,0x00,0x00,0x41,0x00,0x05,0x00, -0x15,0x00,0x00,0x00,0x16,0x00,0x00,0x00,0x12,0x00,0x00,0x00, -0x14,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x17,0x00,0x00,0x00,0x16,0x00,0x00,0x00,0x86,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x18,0x00,0x00,0x00,0x0f,0x00,0x00,0x00, -0x17,0x00,0x00,0x00,0x89,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x1e,0x00,0x00,0x00,0x0f,0x00,0x00,0x00,0x17,0x00,0x00,0x00, -0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00,0x22,0x00,0x00,0x00, -0x12,0x00,0x00,0x00,0x21,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x22,0x00,0x00,0x00, -0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x24,0x00,0x00,0x00, -0x18,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x41,0x00,0x05,0x00, -0x15,0x00,0x00,0x00,0x28,0x00,0x00,0x00,0x12,0x00,0x00,0x00, -0x27,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x29,0x00,0x00,0x00,0x28,0x00,0x00,0x00,0x86,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x2a,0x00,0x00,0x00,0x1e,0x00,0x00,0x00, -0x29,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00, -0x2e,0x00,0x00,0x00,0x12,0x00,0x00,0x00,0x2d,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x2f,0x00,0x00,0x00, -0x2e,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x30,0x00,0x00,0x00,0x24,0x00,0x00,0x00,0x2f,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x32,0x00,0x00,0x00, -0x30,0x00,0x00,0x00,0x2a,0x00,0x00,0x00,0x41,0x00,0x05,0x00, -0x15,0x00,0x00,0x00,0x35,0x00,0x00,0x00,0x12,0x00,0x00,0x00, -0x34,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x36,0x00,0x00,0x00,0x35,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x38,0x00,0x00,0x00,0x36,0x00,0x00,0x00, -0x37,0x00,0x00,0x00,0x82,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x3a,0x00,0x00,0x00,0x38,0x00,0x00,0x00,0x39,0x00,0x00,0x00, -0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x3b,0x00,0x00,0x00, -0x3a,0x00,0x00,0x00,0x37,0x00,0x00,0x00,0x41,0x00,0x05,0x00, -0x0d,0x00,0x00,0x00,0x3f,0x00,0x00,0x00,0x3d,0x00,0x00,0x00, -0x3e,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x40,0x00,0x00,0x00,0x3f,0x00,0x00,0x00,0x89,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x42,0x00,0x00,0x00,0x40,0x00,0x00,0x00, -0x3b,0x00,0x00,0x00,0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x47,0x00,0x00,0x00,0x40,0x00,0x00,0x00,0x3b,0x00,0x00,0x00, -0x41,0x00,0x05,0x00,0x0d,0x00,0x00,0x00,0x49,0x00,0x00,0x00, -0x3d,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x4a,0x00,0x00,0x00,0x49,0x00,0x00,0x00, -0x41,0x00,0x05,0x00,0x0d,0x00,0x00,0x00,0x4d,0x00,0x00,0x00, -0x4c,0x00,0x00,0x00,0x3e,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x4e,0x00,0x00,0x00,0x4d,0x00,0x00,0x00, -0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x50,0x00,0x00,0x00, -0x4e,0x00,0x00,0x00,0x4f,0x00,0x00,0x00,0x89,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x55,0x00,0x00,0x00,0x50,0x00,0x00,0x00, -0x54,0x00,0x00,0x00,0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x59,0x00,0x00,0x00,0x50,0x00,0x00,0x00,0x58,0x00,0x00,0x00, -0x89,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x5d,0x00,0x00,0x00, -0x4e,0x00,0x00,0x00,0x4f,0x00,0x00,0x00,0x89,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x64,0x00,0x00,0x00,0x5d,0x00,0x00,0x00, -0x63,0x00,0x00,0x00,0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x68,0x00,0x00,0x00,0x5d,0x00,0x00,0x00,0x67,0x00,0x00,0x00, -0x89,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x6e,0x00,0x00,0x00, -0x4e,0x00,0x00,0x00,0x6d,0x00,0x00,0x00,0x86,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x73,0x00,0x00,0x00,0x4e,0x00,0x00,0x00, -0x72,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00, -0x77,0x00,0x00,0x00,0x12,0x00,0x00,0x00,0x76,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x78,0x00,0x00,0x00, -0x77,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x79,0x00,0x00,0x00,0x47,0x00,0x00,0x00,0x78,0x00,0x00,0x00, -0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00,0x7c,0x00,0x00,0x00, -0x12,0x00,0x00,0x00,0x7b,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x7d,0x00,0x00,0x00,0x7c,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x7f,0x00,0x00,0x00, -0x47,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x82,0x00,0x00,0x00,0x7f,0x00,0x00,0x00, -0x78,0x00,0x00,0x00,0x0c,0x00,0x07,0x00,0x06,0x00,0x00,0x00, -0x83,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x26,0x00,0x00,0x00, -0x7d,0x00,0x00,0x00,0x82,0x00,0x00,0x00,0x41,0x00,0x05,0x00, -0x15,0x00,0x00,0x00,0x87,0x00,0x00,0x00,0x12,0x00,0x00,0x00, -0x86,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x88,0x00,0x00,0x00,0x87,0x00,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x89,0x00,0x00,0x00,0x32,0x00,0x00,0x00, -0x88,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x8b,0x00,0x00,0x00,0x42,0x00,0x00,0x00,0x37,0x00,0x00,0x00, -0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00,0x8d,0x00,0x00,0x00, -0x12,0x00,0x00,0x00,0x8c,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x8e,0x00,0x00,0x00,0x8d,0x00,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x8f,0x00,0x00,0x00, -0x8b,0x00,0x00,0x00,0x8e,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x90,0x00,0x00,0x00,0x89,0x00,0x00,0x00, -0x8f,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x92,0x00,0x00,0x00,0x90,0x00,0x00,0x00,0x79,0x00,0x00,0x00, -0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x93,0x00,0x00,0x00, -0x92,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x41,0x00,0x05,0x00, -0x15,0x00,0x00,0x00,0x98,0x00,0x00,0x00,0x12,0x00,0x00,0x00, -0x97,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x99,0x00,0x00,0x00,0x98,0x00,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x9a,0x00,0x00,0x00,0x0f,0x00,0x00,0x00, -0x99,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x9d,0x00,0x00,0x00,0x4a,0x00,0x00,0x00,0x9c,0x00,0x00,0x00, -0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00,0x9f,0x00,0x00,0x00, -0x12,0x00,0x00,0x00,0x9e,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0xa0,0x00,0x00,0x00,0x9f,0x00,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xa1,0x00,0x00,0x00, -0x9d,0x00,0x00,0x00,0xa0,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xa2,0x00,0x00,0x00,0x9a,0x00,0x00,0x00, -0xa1,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xa4,0x00,0x00,0x00,0xa2,0x00,0x00,0x00,0x79,0x00,0x00,0x00, -0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xa5,0x00,0x00,0x00, -0xa4,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, -0xa7,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0xa7,0x00,0x00,0x00, -0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0x9b,0x02,0x00,0x00, -0x3e,0x00,0x00,0x00,0x05,0x00,0x00,0x00,0xc6,0x00,0x00,0x00, -0xa8,0x00,0x00,0x00,0xb0,0x00,0x05,0x00,0xb7,0x00,0x00,0x00, -0xb8,0x00,0x00,0x00,0x9b,0x02,0x00,0x00,0xb6,0x00,0x00,0x00, -0xf6,0x00,0x04,0x00,0xa9,0x00,0x00,0x00,0xa8,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0xb8,0x00,0x00,0x00, -0xa8,0x00,0x00,0x00,0xa9,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, -0xa8,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0xc2,0x00,0x00,0x00, -0xc3,0x00,0x00,0x00,0xbf,0x00,0x00,0x00,0x9b,0x02,0x00,0x00, -0x3e,0x00,0x03,0x00,0xc3,0x00,0x00,0x00,0xc1,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xc6,0x00,0x00,0x00, -0x9b,0x02,0x00,0x00,0xc5,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, -0xa7,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0xa9,0x00,0x00,0x00, -0xf9,0x00,0x02,0x00,0xc9,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, -0xc9,0x00,0x00,0x00,0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00, -0xb4,0x02,0x00,0x00,0xa5,0x00,0x00,0x00,0xa9,0x00,0x00,0x00, -0x68,0x01,0x00,0x00,0xcc,0x00,0x00,0x00,0xf5,0x00,0x07,0x00, -0x06,0x00,0x00,0x00,0xb0,0x02,0x00,0x00,0x93,0x00,0x00,0x00, -0xa9,0x00,0x00,0x00,0x65,0x01,0x00,0x00,0xcc,0x00,0x00,0x00, -0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0x9c,0x02,0x00,0x00, -0x79,0x00,0x00,0x00,0xa9,0x00,0x00,0x00,0x16,0x02,0x00,0x00, -0xcc,0x00,0x00,0x00,0xb0,0x00,0x05,0x00,0xb7,0x00,0x00,0x00, -0xd0,0x00,0x00,0x00,0x9c,0x02,0x00,0x00,0x83,0x00,0x00,0x00, -0xf6,0x00,0x04,0x00,0xcb,0x00,0x00,0x00,0xcc,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0xd0,0x00,0x00,0x00, -0xca,0x00,0x00,0x00,0xcb,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, -0xca,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0xd2,0x00,0x00,0x00, -0xf8,0x00,0x02,0x00,0xd2,0x00,0x00,0x00,0xf5,0x00,0x07,0x00, -0x06,0x00,0x00,0x00,0xac,0x02,0x00,0x00,0x3e,0x00,0x00,0x00, -0xca,0x00,0x00,0x00,0x1d,0x01,0x00,0x00,0xd5,0x00,0x00,0x00, -0xb0,0x00,0x05,0x00,0xb7,0x00,0x00,0x00,0xd8,0x00,0x00,0x00, -0xac,0x02,0x00,0x00,0x37,0x00,0x00,0x00,0xf6,0x00,0x04,0x00, -0xd4,0x00,0x00,0x00,0xd5,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0xfa,0x00,0x04,0x00,0xd8,0x00,0x00,0x00,0xd3,0x00,0x00,0x00, -0xd4,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0xd3,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xdc,0x00,0x00,0x00, -0x8b,0x00,0x00,0x00,0x73,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xde,0x00,0x00,0x00,0xdc,0x00,0x00,0x00, -0xac,0x02,0x00,0x00,0xb0,0x00,0x05,0x00,0xb7,0x00,0x00,0x00, -0xe1,0x00,0x00,0x00,0xde,0x00,0x00,0x00,0x36,0x00,0x00,0x00, -0xf7,0x00,0x03,0x00,0xe3,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0xfa,0x00,0x04,0x00,0xe1,0x00,0x00,0x00,0xe2,0x00,0x00,0x00, -0xe3,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0xe2,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xe6,0x00,0x00,0x00, -0x9c,0x02,0x00,0x00,0x6e,0x00,0x00,0x00,0xb0,0x00,0x05,0x00, -0xb7,0x00,0x00,0x00,0xe8,0x00,0x00,0x00,0xe6,0x00,0x00,0x00, -0x83,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0xe3,0x00,0x00,0x00, -0xf8,0x00,0x02,0x00,0xe3,0x00,0x00,0x00,0xf5,0x00,0x07,0x00, -0xb7,0x00,0x00,0x00,0xe9,0x00,0x00,0x00,0xe1,0x00,0x00,0x00, -0xd3,0x00,0x00,0x00,0xe8,0x00,0x00,0x00,0xe2,0x00,0x00,0x00, -0xf7,0x00,0x03,0x00,0xeb,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0xfa,0x00,0x04,0x00,0xe9,0x00,0x00,0x00,0xea,0x00,0x00,0x00, -0x0d,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0xea,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xf4,0x00,0x00,0x00, -0x73,0x00,0x00,0x00,0xac,0x02,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xf6,0x00,0x00,0x00,0xf4,0x00,0x00,0x00, -0xf5,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xf8,0x00,0x00,0x00,0xf6,0x00,0x00,0x00,0x6e,0x00,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x03,0x01,0x00,0x00, -0xf4,0x00,0x00,0x00,0x8e,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x04,0x01,0x00,0x00,0xb0,0x02,0x00,0x00, -0x03,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x06,0x01,0x00,0x00,0x04,0x01,0x00,0x00,0x6e,0x00,0x00,0x00, -0x41,0x00,0x06,0x00,0x07,0x01,0x00,0x00,0x08,0x01,0x00,0x00, -0xfc,0x00,0x00,0x00,0x34,0x00,0x00,0x00,0x06,0x01,0x00,0x00, -0x3d,0x00,0x04,0x00,0xb9,0x00,0x00,0x00,0x09,0x01,0x00,0x00, -0x08,0x01,0x00,0x00,0x73,0x00,0x04,0x00,0xec,0x00,0x00,0x00, -0x0a,0x01,0x00,0x00,0x09,0x01,0x00,0x00,0x41,0x00,0x05,0x00, -0x0b,0x01,0x00,0x00,0x0c,0x01,0x00,0x00,0xf1,0x00,0x00,0x00, -0xf8,0x00,0x00,0x00,0x3e,0x00,0x03,0x00,0x0c,0x01,0x00,0x00, -0x0a,0x01,0x00,0x00,0xf9,0x00,0x02,0x00,0xeb,0x00,0x00,0x00, -0xf8,0x00,0x02,0x00,0x0d,0x01,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x10,0x01,0x00,0x00,0x73,0x00,0x00,0x00, -0xac,0x02,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x12,0x01,0x00,0x00,0x10,0x01,0x00,0x00,0x11,0x01,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x14,0x01,0x00,0x00, -0x12,0x01,0x00,0x00,0x6e,0x00,0x00,0x00,0x41,0x00,0x05,0x00, -0x0b,0x01,0x00,0x00,0x16,0x01,0x00,0x00,0xf1,0x00,0x00,0x00, -0x14,0x01,0x00,0x00,0x3e,0x00,0x03,0x00,0x16,0x01,0x00,0x00, -0x15,0x01,0x00,0x00,0xf9,0x00,0x02,0x00,0xeb,0x00,0x00,0x00, -0xf8,0x00,0x02,0x00,0xeb,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, -0xd5,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0xd5,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x1d,0x01,0x00,0x00, -0xac,0x02,0x00,0x00,0x1b,0x01,0x00,0x00,0xf9,0x00,0x02,0x00, -0xd2,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0xd4,0x00,0x00,0x00, -0xf9,0x00,0x02,0x00,0x1f,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, -0x1f,0x01,0x00,0x00,0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00, -0xad,0x02,0x00,0x00,0x3e,0x00,0x00,0x00,0xd4,0x00,0x00,0x00, -0x61,0x01,0x00,0x00,0x22,0x01,0x00,0x00,0xb0,0x00,0x05,0x00, -0xb7,0x00,0x00,0x00,0x25,0x01,0x00,0x00,0xad,0x02,0x00,0x00, -0x9c,0x00,0x00,0x00,0xf6,0x00,0x04,0x00,0x21,0x01,0x00,0x00, -0x22,0x01,0x00,0x00,0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, -0x25,0x01,0x00,0x00,0x20,0x01,0x00,0x00,0x21,0x01,0x00,0x00, -0xf8,0x00,0x02,0x00,0x20,0x01,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x29,0x01,0x00,0x00,0x9d,0x00,0x00,0x00, -0x73,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x2b,0x01,0x00,0x00,0x29,0x01,0x00,0x00,0xad,0x02,0x00,0x00, -0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00,0x2c,0x01,0x00,0x00, -0x12,0x00,0x00,0x00,0xc5,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x2d,0x01,0x00,0x00,0x2c,0x01,0x00,0x00, -0xb0,0x00,0x05,0x00,0xb7,0x00,0x00,0x00,0x2e,0x01,0x00,0x00, -0x2b,0x01,0x00,0x00,0x2d,0x01,0x00,0x00,0xf7,0x00,0x03,0x00, -0x30,0x01,0x00,0x00,0x00,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, -0x2e,0x01,0x00,0x00,0x2f,0x01,0x00,0x00,0x30,0x01,0x00,0x00, -0xf8,0x00,0x02,0x00,0x2f,0x01,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x33,0x01,0x00,0x00,0x9c,0x02,0x00,0x00, -0x6e,0x00,0x00,0x00,0xb0,0x00,0x05,0x00,0xb7,0x00,0x00,0x00, -0x35,0x01,0x00,0x00,0x33,0x01,0x00,0x00,0x83,0x00,0x00,0x00, -0xf9,0x00,0x02,0x00,0x30,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, -0x30,0x01,0x00,0x00,0xf5,0x00,0x07,0x00,0xb7,0x00,0x00,0x00, -0x36,0x01,0x00,0x00,0x2e,0x01,0x00,0x00,0x20,0x01,0x00,0x00, -0x35,0x01,0x00,0x00,0x2f,0x01,0x00,0x00,0xf7,0x00,0x03,0x00, -0x38,0x01,0x00,0x00,0x00,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, -0x36,0x01,0x00,0x00,0x37,0x01,0x00,0x00,0x57,0x01,0x00,0x00, -0xf8,0x00,0x02,0x00,0x37,0x01,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x40,0x01,0x00,0x00,0x73,0x00,0x00,0x00, -0xad,0x02,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x42,0x01,0x00,0x00,0x40,0x01,0x00,0x00,0x41,0x01,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x44,0x01,0x00,0x00, -0x42,0x01,0x00,0x00,0x6e,0x00,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x4f,0x01,0x00,0x00,0x40,0x01,0x00,0x00, -0xa0,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x50,0x01,0x00,0x00,0xb4,0x02,0x00,0x00,0x4f,0x01,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x52,0x01,0x00,0x00, -0x50,0x01,0x00,0x00,0x6e,0x00,0x00,0x00,0x41,0x00,0x06,0x00, -0x07,0x01,0x00,0x00,0x53,0x01,0x00,0x00,0x48,0x01,0x00,0x00, -0x34,0x00,0x00,0x00,0x52,0x01,0x00,0x00,0x3d,0x00,0x04,0x00, -0xb9,0x00,0x00,0x00,0x54,0x01,0x00,0x00,0x53,0x01,0x00,0x00, -0x73,0x00,0x04,0x00,0xec,0x00,0x00,0x00,0x55,0x01,0x00,0x00, -0x54,0x01,0x00,0x00,0x41,0x00,0x05,0x00,0x0b,0x01,0x00,0x00, -0x56,0x01,0x00,0x00,0x3d,0x01,0x00,0x00,0x44,0x01,0x00,0x00, -0x3e,0x00,0x03,0x00,0x56,0x01,0x00,0x00,0x55,0x01,0x00,0x00, -0xf9,0x00,0x02,0x00,0x38,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, -0x57,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x5a,0x01,0x00,0x00,0x73,0x00,0x00,0x00,0xad,0x02,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x5c,0x01,0x00,0x00, -0x5a,0x01,0x00,0x00,0x5b,0x01,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x5e,0x01,0x00,0x00,0x5c,0x01,0x00,0x00, -0x6e,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x0b,0x01,0x00,0x00, -0x5f,0x01,0x00,0x00,0x3d,0x01,0x00,0x00,0x5e,0x01,0x00,0x00, -0x3e,0x00,0x03,0x00,0x5f,0x01,0x00,0x00,0x15,0x01,0x00,0x00, -0xf9,0x00,0x02,0x00,0x38,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, -0x38,0x01,0x00,0x00,0xf9,0x00,0x02,0x00,0x22,0x01,0x00,0x00, -0xf8,0x00,0x02,0x00,0x22,0x01,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x61,0x01,0x00,0x00,0xad,0x02,0x00,0x00, -0x1b,0x01,0x00,0x00,0xf9,0x00,0x02,0x00,0x1f,0x01,0x00,0x00, -0xf8,0x00,0x02,0x00,0x21,0x01,0x00,0x00,0xe0,0x00,0x04,0x00, -0x0c,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x62,0x01,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x65,0x01,0x00,0x00, -0xb0,0x02,0x00,0x00,0x63,0x01,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x68,0x01,0x00,0x00,0xb4,0x02,0x00,0x00, -0x66,0x01,0x00,0x00,0xf9,0x00,0x02,0x00,0x6a,0x01,0x00,0x00, -0xf8,0x00,0x02,0x00,0x6a,0x01,0x00,0x00,0xf5,0x00,0x07,0x00, -0x06,0x00,0x00,0x00,0xb6,0x02,0x00,0x00,0x3e,0x00,0x00,0x00, -0x21,0x01,0x00,0x00,0x14,0x02,0x00,0x00,0x6d,0x01,0x00,0x00, -0xb0,0x00,0x05,0x00,0xb7,0x00,0x00,0x00,0x70,0x01,0x00,0x00, -0xb6,0x02,0x00,0x00,0x6c,0x00,0x00,0x00,0xf6,0x00,0x04,0x00, -0x6c,0x01,0x00,0x00,0x6d,0x01,0x00,0x00,0x00,0x00,0x00,0x00, -0xfa,0x00,0x04,0x00,0x70,0x01,0x00,0x00,0x6b,0x01,0x00,0x00, -0x6c,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0x6b,0x01,0x00,0x00, -0xf9,0x00,0x02,0x00,0x72,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, -0x72,0x01,0x00,0x00,0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00, -0xba,0x02,0x00,0x00,0x3e,0x00,0x00,0x00,0x6b,0x01,0x00,0x00, -0x9e,0x01,0x00,0x00,0x75,0x01,0x00,0x00,0xb0,0x00,0x05,0x00, -0xb7,0x00,0x00,0x00,0x78,0x01,0x00,0x00,0xba,0x02,0x00,0x00, -0x60,0x00,0x00,0x00,0xf6,0x00,0x04,0x00,0x74,0x01,0x00,0x00, -0x75,0x01,0x00,0x00,0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, -0x78,0x01,0x00,0x00,0x73,0x01,0x00,0x00,0x74,0x01,0x00,0x00, -0xf8,0x00,0x02,0x00,0x73,0x01,0x00,0x00,0xf9,0x00,0x02,0x00, -0x7a,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0x7a,0x01,0x00,0x00, -0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0xcc,0x02,0x00,0x00, -0x3e,0x00,0x00,0x00,0x73,0x01,0x00,0x00,0x9c,0x01,0x00,0x00, -0x7b,0x01,0x00,0x00,0xb0,0x00,0x05,0x00,0xb7,0x00,0x00,0x00, -0x80,0x01,0x00,0x00,0xcc,0x02,0x00,0x00,0x62,0x00,0x00,0x00, -0xf6,0x00,0x04,0x00,0x7c,0x01,0x00,0x00,0x7b,0x01,0x00,0x00, -0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0x80,0x01,0x00,0x00, -0x7b,0x01,0x00,0x00,0x7c,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, -0x7b,0x01,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x86,0x01,0x00,0x00,0xba,0x02,0x00,0x00,0x62,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x88,0x01,0x00,0x00, -0x86,0x01,0x00,0x00,0xcc,0x02,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x8a,0x01,0x00,0x00,0x55,0x00,0x00,0x00, -0x53,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x8c,0x01,0x00,0x00,0xba,0x02,0x00,0x00,0x61,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x8d,0x01,0x00,0x00, -0x8a,0x01,0x00,0x00,0x8c,0x01,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x8f,0x01,0x00,0x00,0x64,0x00,0x00,0x00, -0x62,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x90,0x01,0x00,0x00,0x8d,0x01,0x00,0x00,0x8f,0x01,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x92,0x01,0x00,0x00, -0x90,0x01,0x00,0x00,0xcc,0x02,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x94,0x01,0x00,0x00,0x92,0x01,0x00,0x00, -0x93,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x96,0x01,0x00,0x00,0x94,0x01,0x00,0x00,0xb6,0x02,0x00,0x00, -0x41,0x00,0x05,0x00,0x0b,0x01,0x00,0x00,0x97,0x01,0x00,0x00, -0xf1,0x00,0x00,0x00,0x96,0x01,0x00,0x00,0x3d,0x00,0x04,0x00, -0xec,0x00,0x00,0x00,0x98,0x01,0x00,0x00,0x97,0x01,0x00,0x00, -0x41,0x00,0x05,0x00,0x99,0x01,0x00,0x00,0x9a,0x01,0x00,0x00, -0x84,0x01,0x00,0x00,0x88,0x01,0x00,0x00,0x3e,0x00,0x03,0x00, -0x9a,0x01,0x00,0x00,0x98,0x01,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x9c,0x01,0x00,0x00,0xcc,0x02,0x00,0x00, -0xc5,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0x7a,0x01,0x00,0x00, -0xf8,0x00,0x02,0x00,0x7c,0x01,0x00,0x00,0xf9,0x00,0x02,0x00, -0x75,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0x75,0x01,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x9e,0x01,0x00,0x00, -0xba,0x02,0x00,0x00,0xc5,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, -0x72,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0x74,0x01,0x00,0x00, -0xf9,0x00,0x02,0x00,0xa0,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, -0xa0,0x01,0x00,0x00,0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00, -0xbb,0x02,0x00,0x00,0x3e,0x00,0x00,0x00,0x74,0x01,0x00,0x00, -0xcc,0x01,0x00,0x00,0xa3,0x01,0x00,0x00,0xb0,0x00,0x05,0x00, -0xb7,0x00,0x00,0x00,0xa6,0x01,0x00,0x00,0xbb,0x02,0x00,0x00, -0xb4,0x00,0x00,0x00,0xf6,0x00,0x04,0x00,0xa2,0x01,0x00,0x00, -0xa3,0x01,0x00,0x00,0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, -0xa6,0x01,0x00,0x00,0xa1,0x01,0x00,0x00,0xa2,0x01,0x00,0x00, -0xf8,0x00,0x02,0x00,0xa1,0x01,0x00,0x00,0xf9,0x00,0x02,0x00, -0xa8,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0xa8,0x01,0x00,0x00, -0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0xc9,0x02,0x00,0x00, -0x3e,0x00,0x00,0x00,0xa1,0x01,0x00,0x00,0xca,0x01,0x00,0x00, -0xa9,0x01,0x00,0x00,0xb0,0x00,0x05,0x00,0xb7,0x00,0x00,0x00, -0xae,0x01,0x00,0x00,0xc9,0x02,0x00,0x00,0xb1,0x00,0x00,0x00, -0xf6,0x00,0x04,0x00,0xaa,0x01,0x00,0x00,0xa9,0x01,0x00,0x00, -0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0xae,0x01,0x00,0x00, -0xa9,0x01,0x00,0x00,0xaa,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, -0xa9,0x01,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xb4,0x01,0x00,0x00,0xbb,0x02,0x00,0x00,0xb1,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xb6,0x01,0x00,0x00, -0xb4,0x01,0x00,0x00,0xc9,0x02,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xb8,0x01,0x00,0x00,0x59,0x00,0x00,0x00, -0xae,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xbb,0x01,0x00,0x00,0xbb,0x02,0x00,0x00,0xba,0x01,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xbc,0x01,0x00,0x00, -0xb8,0x01,0x00,0x00,0xbb,0x01,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xbe,0x01,0x00,0x00,0x68,0x00,0x00,0x00, -0xb1,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xbf,0x01,0x00,0x00,0xbc,0x01,0x00,0x00,0xbe,0x01,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xc1,0x01,0x00,0x00, -0xbf,0x01,0x00,0x00,0xc9,0x02,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xc3,0x01,0x00,0x00,0xc1,0x01,0x00,0x00, -0xc2,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xc5,0x01,0x00,0x00,0xc3,0x01,0x00,0x00,0xb6,0x02,0x00,0x00, -0x41,0x00,0x05,0x00,0x0b,0x01,0x00,0x00,0xc6,0x01,0x00,0x00, -0x3d,0x01,0x00,0x00,0xc5,0x01,0x00,0x00,0x3d,0x00,0x04,0x00, -0xec,0x00,0x00,0x00,0xc7,0x01,0x00,0x00,0xc6,0x01,0x00,0x00, -0x41,0x00,0x05,0x00,0x99,0x01,0x00,0x00,0xc8,0x01,0x00,0x00, -0xb2,0x01,0x00,0x00,0xb6,0x01,0x00,0x00,0x3e,0x00,0x03,0x00, -0xc8,0x01,0x00,0x00,0xc7,0x01,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xca,0x01,0x00,0x00,0xc9,0x02,0x00,0x00, -0xc5,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0xa8,0x01,0x00,0x00, -0xf8,0x00,0x02,0x00,0xaa,0x01,0x00,0x00,0xf9,0x00,0x02,0x00, -0xa3,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0xa3,0x01,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xcc,0x01,0x00,0x00, -0xbb,0x02,0x00,0x00,0xc5,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, -0xa0,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0xa2,0x01,0x00,0x00, -0xf9,0x00,0x02,0x00,0xce,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, -0xce,0x01,0x00,0x00,0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00, -0xbc,0x02,0x00,0x00,0x3e,0x00,0x00,0x00,0xa2,0x01,0x00,0x00, -0x12,0x02,0x00,0x00,0xd1,0x01,0x00,0x00,0xb0,0x00,0x05,0x00, -0xb7,0x00,0x00,0x00,0xd4,0x01,0x00,0x00,0xbc,0x02,0x00,0x00, -0xb4,0x00,0x00,0x00,0xf6,0x00,0x04,0x00,0xd0,0x01,0x00,0x00, -0xd1,0x01,0x00,0x00,0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, -0xd4,0x01,0x00,0x00,0xcf,0x01,0x00,0x00,0xd0,0x01,0x00,0x00, -0xf8,0x00,0x02,0x00,0xcf,0x01,0x00,0x00,0xf9,0x00,0x02,0x00, -0xd6,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0xd6,0x01,0x00,0x00, -0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0xc0,0x02,0x00,0x00, -0x3e,0x00,0x00,0x00,0xcf,0x01,0x00,0x00,0x10,0x02,0x00,0x00, -0xd9,0x01,0x00,0x00,0xb0,0x00,0x05,0x00,0xb7,0x00,0x00,0x00, -0xdc,0x01,0x00,0x00,0xc0,0x02,0x00,0x00,0x60,0x00,0x00,0x00, -0xf6,0x00,0x04,0x00,0xd8,0x01,0x00,0x00,0xd9,0x01,0x00,0x00, -0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0xdc,0x01,0x00,0x00, -0xd7,0x01,0x00,0x00,0xd8,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, -0xd7,0x01,0x00,0x00,0xf9,0x00,0x02,0x00,0xde,0x01,0x00,0x00, -0xf8,0x00,0x02,0x00,0xde,0x01,0x00,0x00,0xf5,0x00,0x07,0x00, -0x06,0x00,0x00,0x00,0xc2,0x02,0x00,0x00,0x3e,0x00,0x00,0x00, -0xd7,0x01,0x00,0x00,0x0e,0x02,0x00,0x00,0xe1,0x01,0x00,0x00, -0xb0,0x00,0x05,0x00,0xb7,0x00,0x00,0x00,0xe4,0x01,0x00,0x00, -0xc2,0x02,0x00,0x00,0xb1,0x00,0x00,0x00,0xf6,0x00,0x04,0x00, -0xe0,0x01,0x00,0x00,0xe1,0x01,0x00,0x00,0x01,0x00,0x00,0x00, -0xfa,0x00,0x04,0x00,0xe4,0x01,0x00,0x00,0xdf,0x01,0x00,0x00, -0xe0,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0xdf,0x01,0x00,0x00, -0xf9,0x00,0x02,0x00,0xe6,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, -0xe6,0x01,0x00,0x00,0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00, -0xc4,0x02,0x00,0x00,0x3e,0x00,0x00,0x00,0xdf,0x01,0x00,0x00, -0x0c,0x02,0x00,0x00,0xe7,0x01,0x00,0x00,0xb0,0x00,0x05,0x00, -0xb7,0x00,0x00,0x00,0xec,0x01,0x00,0x00,0xc4,0x02,0x00,0x00, -0x62,0x00,0x00,0x00,0xf6,0x00,0x04,0x00,0xe8,0x01,0x00,0x00, -0xe7,0x01,0x00,0x00,0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, -0xec,0x01,0x00,0x00,0xe7,0x01,0x00,0x00,0xe8,0x01,0x00,0x00, -0xf8,0x00,0x02,0x00,0xe7,0x01,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xee,0x01,0x00,0x00,0xbc,0x02,0x00,0x00, -0xb1,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xf0,0x01,0x00,0x00,0xee,0x01,0x00,0x00,0xc2,0x02,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xf2,0x01,0x00,0x00, -0xf0,0x01,0x00,0x00,0xf1,0x01,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xf4,0x01,0x00,0x00,0xc0,0x02,0x00,0x00, -0x62,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xf5,0x01,0x00,0x00,0xf2,0x01,0x00,0x00,0xf4,0x01,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xf7,0x01,0x00,0x00, -0xf5,0x01,0x00,0x00,0xc4,0x02,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xfb,0x01,0x00,0x00,0xf4,0x01,0x00,0x00, -0xc4,0x02,0x00,0x00,0x41,0x00,0x05,0x00,0x99,0x01,0x00,0x00, -0xfc,0x01,0x00,0x00,0x84,0x01,0x00,0x00,0xfb,0x01,0x00,0x00, -0x3d,0x00,0x04,0x00,0xec,0x00,0x00,0x00,0xfd,0x01,0x00,0x00, -0xfc,0x01,0x00,0x00,0x73,0x00,0x04,0x00,0xb9,0x00,0x00,0x00, -0xfe,0x01,0x00,0x00,0xfd,0x01,0x00,0x00,0x41,0x00,0x05,0x00, -0x99,0x01,0x00,0x00,0x03,0x02,0x00,0x00,0xb2,0x01,0x00,0x00, -0xf0,0x01,0x00,0x00,0x3d,0x00,0x04,0x00,0xec,0x00,0x00,0x00, -0x04,0x02,0x00,0x00,0x03,0x02,0x00,0x00,0x73,0x00,0x04,0x00, -0xb9,0x00,0x00,0x00,0x05,0x02,0x00,0x00,0x04,0x02,0x00,0x00, -0x41,0x00,0x05,0x00,0xc2,0x00,0x00,0x00,0x07,0x02,0x00,0x00, -0xbf,0x00,0x00,0x00,0xf7,0x01,0x00,0x00,0x3d,0x00,0x04,0x00, -0xb9,0x00,0x00,0x00,0x08,0x02,0x00,0x00,0x07,0x02,0x00,0x00, -0x0c,0x00,0x08,0x00,0xb9,0x00,0x00,0x00,0x09,0x02,0x00,0x00, -0x01,0x00,0x00,0x00,0x32,0x00,0x00,0x00,0xfe,0x01,0x00,0x00, -0x05,0x02,0x00,0x00,0x08,0x02,0x00,0x00,0x3e,0x00,0x03,0x00, -0x07,0x02,0x00,0x00,0x09,0x02,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x0c,0x02,0x00,0x00,0xc4,0x02,0x00,0x00, -0xc5,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0xe6,0x01,0x00,0x00, -0xf8,0x00,0x02,0x00,0xe8,0x01,0x00,0x00,0xf9,0x00,0x02,0x00, -0xe1,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0xe1,0x01,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x0e,0x02,0x00,0x00, -0xc2,0x02,0x00,0x00,0xc5,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, -0xde,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0xe0,0x01,0x00,0x00, -0xf9,0x00,0x02,0x00,0xd9,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, -0xd9,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x10,0x02,0x00,0x00,0xc0,0x02,0x00,0x00,0xc5,0x00,0x00,0x00, -0xf9,0x00,0x02,0x00,0xd6,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, -0xd8,0x01,0x00,0x00,0xf9,0x00,0x02,0x00,0xd1,0x01,0x00,0x00, -0xf8,0x00,0x02,0x00,0xd1,0x01,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x12,0x02,0x00,0x00,0xbc,0x02,0x00,0x00, -0xc5,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0xce,0x01,0x00,0x00, -0xf8,0x00,0x02,0x00,0xd0,0x01,0x00,0x00,0xf9,0x00,0x02,0x00, -0x6d,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0x6d,0x01,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x14,0x02,0x00,0x00, -0xb6,0x02,0x00,0x00,0xc5,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, -0x6a,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0x6c,0x01,0x00,0x00, -0xe0,0x00,0x04,0x00,0x0c,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0x62,0x01,0x00,0x00,0xf9,0x00,0x02,0x00,0xcc,0x00,0x00,0x00, -0xf8,0x00,0x02,0x00,0xcc,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x16,0x02,0x00,0x00,0x9c,0x02,0x00,0x00, -0x6c,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0xc9,0x00,0x00,0x00, -0xf8,0x00,0x02,0x00,0xcb,0x00,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x1b,0x02,0x00,0x00,0x55,0x00,0x00,0x00, -0x53,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x1c,0x02,0x00,0x00,0x8b,0x00,0x00,0x00,0x1b,0x02,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x21,0x02,0x00,0x00, -0x59,0x00,0x00,0x00,0xae,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x22,0x02,0x00,0x00,0x9d,0x00,0x00,0x00, -0x21,0x02,0x00,0x00,0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00, -0x27,0x02,0x00,0x00,0x12,0x00,0x00,0x00,0x26,0x02,0x00,0x00, -0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x28,0x02,0x00,0x00, -0x27,0x02,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x29,0x02,0x00,0x00,0x0f,0x00,0x00,0x00,0x28,0x02,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x2d,0x02,0x00,0x00, -0x47,0x00,0x00,0x00,0x28,0x02,0x00,0x00,0x41,0x00,0x05,0x00, -0x0d,0x00,0x00,0x00,0x2f,0x02,0x00,0x00,0x2e,0x02,0x00,0x00, -0x0c,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x30,0x02,0x00,0x00,0x2f,0x02,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x31,0x02,0x00,0x00,0x2d,0x02,0x00,0x00, -0x30,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x32,0x02,0x00,0x00,0x29,0x02,0x00,0x00,0x31,0x02,0x00,0x00, -0xf9,0x00,0x02,0x00,0x34,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, -0x34,0x02,0x00,0x00,0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00, -0x9d,0x02,0x00,0x00,0x3e,0x00,0x00,0x00,0xcb,0x00,0x00,0x00, -0x9a,0x02,0x00,0x00,0x37,0x02,0x00,0x00,0xb0,0x00,0x05,0x00, -0xb7,0x00,0x00,0x00,0x3a,0x02,0x00,0x00,0x9d,0x02,0x00,0x00, -0xb4,0x00,0x00,0x00,0xf6,0x00,0x04,0x00,0x36,0x02,0x00,0x00, -0x37,0x02,0x00,0x00,0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, -0x3a,0x02,0x00,0x00,0x35,0x02,0x00,0x00,0x36,0x02,0x00,0x00, -0xf8,0x00,0x02,0x00,0x35,0x02,0x00,0x00,0xf9,0x00,0x02,0x00, -0x3c,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x3c,0x02,0x00,0x00, -0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0x9e,0x02,0x00,0x00, -0x3e,0x00,0x00,0x00,0x35,0x02,0x00,0x00,0x98,0x02,0x00,0x00, -0x3f,0x02,0x00,0x00,0xb0,0x00,0x05,0x00,0xb7,0x00,0x00,0x00, -0x42,0x02,0x00,0x00,0x9e,0x02,0x00,0x00,0x60,0x00,0x00,0x00, -0xf6,0x00,0x04,0x00,0x3e,0x02,0x00,0x00,0x3f,0x02,0x00,0x00, -0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0x42,0x02,0x00,0x00, -0x3d,0x02,0x00,0x00,0x3e,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, -0x3d,0x02,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x46,0x02,0x00,0x00,0x9e,0x02,0x00,0x00,0x61,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x47,0x02,0x00,0x00, -0x1c,0x02,0x00,0x00,0x46,0x02,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x49,0x02,0x00,0x00,0x64,0x00,0x00,0x00, -0x62,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x4a,0x02,0x00,0x00,0x47,0x02,0x00,0x00,0x49,0x02,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x4e,0x02,0x00,0x00, -0x9d,0x02,0x00,0x00,0xba,0x01,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x4f,0x02,0x00,0x00,0x22,0x02,0x00,0x00, -0x4e,0x02,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x51,0x02,0x00,0x00,0x68,0x00,0x00,0x00,0xb1,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x52,0x02,0x00,0x00, -0x4f,0x02,0x00,0x00,0x51,0x02,0x00,0x00,0xf9,0x00,0x02,0x00, -0x54,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x54,0x02,0x00,0x00, -0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0xa0,0x02,0x00,0x00, -0x3e,0x00,0x00,0x00,0x3d,0x02,0x00,0x00,0x96,0x02,0x00,0x00, -0x57,0x02,0x00,0x00,0xb0,0x00,0x05,0x00,0xb7,0x00,0x00,0x00, -0x5a,0x02,0x00,0x00,0xa0,0x02,0x00,0x00,0xb1,0x00,0x00,0x00, -0xf6,0x00,0x04,0x00,0x56,0x02,0x00,0x00,0x57,0x02,0x00,0x00, -0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0x5a,0x02,0x00,0x00, -0x55,0x02,0x00,0x00,0x56,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, -0x55,0x02,0x00,0x00,0xf9,0x00,0x02,0x00,0x5c,0x02,0x00,0x00, -0xf8,0x00,0x02,0x00,0x5c,0x02,0x00,0x00,0xf5,0x00,0x07,0x00, -0x06,0x00,0x00,0x00,0xa2,0x02,0x00,0x00,0x3e,0x00,0x00,0x00, -0x55,0x02,0x00,0x00,0x94,0x02,0x00,0x00,0x5f,0x02,0x00,0x00, -0xb0,0x00,0x05,0x00,0xb7,0x00,0x00,0x00,0x62,0x02,0x00,0x00, -0xa2,0x02,0x00,0x00,0x62,0x00,0x00,0x00,0xf6,0x00,0x04,0x00, -0x5e,0x02,0x00,0x00,0x5f,0x02,0x00,0x00,0x01,0x00,0x00,0x00, -0xfa,0x00,0x04,0x00,0x62,0x02,0x00,0x00,0x5d,0x02,0x00,0x00, -0x5e,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x5d,0x02,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x65,0x02,0x00,0x00, -0x4a,0x02,0x00,0x00,0xa2,0x02,0x00,0x00,0xb0,0x00,0x05,0x00, -0xb7,0x00,0x00,0x00,0x68,0x02,0x00,0x00,0x65,0x02,0x00,0x00, -0x36,0x00,0x00,0x00,0xf7,0x00,0x03,0x00,0x6a,0x02,0x00,0x00, -0x00,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0x68,0x02,0x00,0x00, -0x69,0x02,0x00,0x00,0x6a,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, -0x69,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x6d,0x02,0x00,0x00,0x52,0x02,0x00,0x00,0xa0,0x02,0x00,0x00, -0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00,0x6e,0x02,0x00,0x00, -0x12,0x00,0x00,0x00,0xc5,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x6f,0x02,0x00,0x00,0x6e,0x02,0x00,0x00, -0xb0,0x00,0x05,0x00,0xb7,0x00,0x00,0x00,0x70,0x02,0x00,0x00, -0x6d,0x02,0x00,0x00,0x6f,0x02,0x00,0x00,0xf9,0x00,0x02,0x00, -0x6a,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x6a,0x02,0x00,0x00, -0xf5,0x00,0x07,0x00,0xb7,0x00,0x00,0x00,0x71,0x02,0x00,0x00, -0x68,0x02,0x00,0x00,0x5d,0x02,0x00,0x00,0x70,0x02,0x00,0x00, -0x69,0x02,0x00,0x00,0xf7,0x00,0x03,0x00,0x73,0x02,0x00,0x00, -0x00,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0x71,0x02,0x00,0x00, -0x72,0x02,0x00,0x00,0x73,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, -0x72,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x7b,0x02,0x00,0x00,0x52,0x02,0x00,0x00,0xa0,0x02,0x00,0x00, -0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00,0x7d,0x02,0x00,0x00, -0x12,0x00,0x00,0x00,0x7c,0x02,0x00,0x00,0x3d,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x7e,0x02,0x00,0x00,0x7d,0x02,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x7f,0x02,0x00,0x00, -0x7b,0x02,0x00,0x00,0x7e,0x02,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x80,0x02,0x00,0x00,0x32,0x02,0x00,0x00, -0x7f,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x82,0x02,0x00,0x00,0x80,0x02,0x00,0x00,0x4a,0x02,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x84,0x02,0x00,0x00, -0x82,0x02,0x00,0x00,0xa2,0x02,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x86,0x02,0x00,0x00,0x9d,0x02,0x00,0x00, -0xb1,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x88,0x02,0x00,0x00,0x86,0x02,0x00,0x00,0xa0,0x02,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x8a,0x02,0x00,0x00, -0x88,0x02,0x00,0x00,0x89,0x02,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x8c,0x02,0x00,0x00,0x9e,0x02,0x00,0x00, -0x62,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x8d,0x02,0x00,0x00,0x8a,0x02,0x00,0x00,0x8c,0x02,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x8f,0x02,0x00,0x00, -0x8d,0x02,0x00,0x00,0xa2,0x02,0x00,0x00,0x41,0x00,0x05,0x00, -0xc2,0x00,0x00,0x00,0x90,0x02,0x00,0x00,0xbf,0x00,0x00,0x00, -0x8f,0x02,0x00,0x00,0x3d,0x00,0x04,0x00,0xb9,0x00,0x00,0x00, -0x91,0x02,0x00,0x00,0x90,0x02,0x00,0x00,0x41,0x00,0x06,0x00, -0x07,0x01,0x00,0x00,0x92,0x02,0x00,0x00,0x77,0x02,0x00,0x00, -0x34,0x00,0x00,0x00,0x84,0x02,0x00,0x00,0x3e,0x00,0x03,0x00, -0x92,0x02,0x00,0x00,0x91,0x02,0x00,0x00,0xf9,0x00,0x02,0x00, -0x73,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x73,0x02,0x00,0x00, -0xf9,0x00,0x02,0x00,0x5f,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, -0x5f,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x94,0x02,0x00,0x00,0xa2,0x02,0x00,0x00,0xc5,0x00,0x00,0x00, -0xf9,0x00,0x02,0x00,0x5c,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, -0x5e,0x02,0x00,0x00,0xf9,0x00,0x02,0x00,0x57,0x02,0x00,0x00, -0xf8,0x00,0x02,0x00,0x57,0x02,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x96,0x02,0x00,0x00,0xa0,0x02,0x00,0x00, -0xc5,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0x54,0x02,0x00,0x00, -0xf8,0x00,0x02,0x00,0x56,0x02,0x00,0x00,0xf9,0x00,0x02,0x00, -0x3f,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x3f,0x02,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x98,0x02,0x00,0x00, -0x9e,0x02,0x00,0x00,0xc5,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, -0x3c,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x3e,0x02,0x00,0x00, -0xf9,0x00,0x02,0x00,0x37,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, -0x37,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x9a,0x02,0x00,0x00,0x9d,0x02,0x00,0x00,0xc5,0x00,0x00,0x00, -0xf9,0x00,0x02,0x00,0x34,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, -0x36,0x02,0x00,0x00,0xfd,0x00,0x01,0x00,0x38,0x00,0x01,0x00, - -}; -const uint64_t matmul_f32_l_len = 10164; - -unsigned char matmul_f32_l_fp32_data[] = { -0x03,0x02,0x23,0x07,0x00,0x05,0x01,0x00,0x0b,0x00,0x0d,0x00, -0xc6,0x02,0x00,0x00,0x00,0x00,0x00,0x00,0x11,0x00,0x02,0x00, -0x01,0x00,0x00,0x00,0x0b,0x00,0x06,0x00,0x01,0x00,0x00,0x00, -0x47,0x4c,0x53,0x4c,0x2e,0x73,0x74,0x64,0x2e,0x34,0x35,0x30, -0x00,0x00,0x00,0x00,0x0e,0x00,0x03,0x00,0x00,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x0f,0x00,0x0f,0x00,0x05,0x00,0x00,0x00, -0x04,0x00,0x00,0x00,0x6d,0x61,0x69,0x6e,0x00,0x00,0x00,0x00, -0x0b,0x00,0x00,0x00,0x12,0x00,0x00,0x00,0x3d,0x00,0x00,0x00, -0x4c,0x00,0x00,0x00,0xf0,0x00,0x00,0x00,0xfb,0x00,0x00,0x00, -0x3a,0x01,0x00,0x00,0x45,0x01,0x00,0x00,0x27,0x02,0x00,0x00, -0x70,0x02,0x00,0x00,0x10,0x00,0x06,0x00,0x04,0x00,0x00,0x00, -0x11,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x0b,0x00,0x00,0x00, -0x0b,0x00,0x00,0x00,0x1c,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0x10,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x04,0x00,0x00,0x00, -0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00,0x02,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x08,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0x10,0x00,0x00,0x00,0x03,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00, -0x04,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x10,0x00,0x00,0x00, -0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00,0x05,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x14,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0x10,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x18,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00, -0x07,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x1c,0x00,0x00,0x00, -0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00,0x08,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x20,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0x10,0x00,0x00,0x00,0x09,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x24,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00, -0x0a,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x28,0x00,0x00,0x00, -0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00,0x0b,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x2c,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0x10,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x30,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00, -0x0d,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x34,0x00,0x00,0x00, -0x47,0x00,0x03,0x00,0x10,0x00,0x00,0x00,0x02,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x37,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x3d,0x00,0x00,0x00, -0x0b,0x00,0x00,0x00,0x1a,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x4c,0x00,0x00,0x00,0x0b,0x00,0x00,0x00,0x1b,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x4f,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x09,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x53,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x60,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x62,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x07,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x6c,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x03,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x9c,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x02,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0xae,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x05,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0xb1,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x08,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0xf8,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x04,0x00,0x00,0x00, -0x48,0x00,0x04,0x00,0xf9,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x18,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0xf9,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x47,0x00,0x03,0x00,0xf9,0x00,0x00,0x00,0x02,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0xfb,0x00,0x00,0x00,0x22,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0xfb,0x00,0x00,0x00, -0x21,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x14,0x01,0x00,0x00,0x01,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x15,0x01,0x00,0x00,0x0b,0x00,0x00,0x00, -0x19,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x42,0x01,0x00,0x00, -0x06,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x48,0x00,0x04,0x00, -0x43,0x01,0x00,0x00,0x00,0x00,0x00,0x00,0x18,0x00,0x00,0x00, -0x48,0x00,0x05,0x00,0x43,0x01,0x00,0x00,0x00,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x03,0x00, -0x43,0x01,0x00,0x00,0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x45,0x01,0x00,0x00,0x22,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x45,0x01,0x00,0x00,0x21,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x27,0x02,0x00,0x00, -0x0b,0x00,0x00,0x00,0x18,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x6d,0x02,0x00,0x00,0x06,0x00,0x00,0x00,0x04,0x00,0x00,0x00, -0x48,0x00,0x04,0x00,0x6e,0x02,0x00,0x00,0x00,0x00,0x00,0x00, -0x19,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x6e,0x02,0x00,0x00, -0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x47,0x00,0x03,0x00,0x6e,0x02,0x00,0x00,0x02,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x70,0x02,0x00,0x00,0x22,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x70,0x02,0x00,0x00, -0x21,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x13,0x00,0x02,0x00, -0x02,0x00,0x00,0x00,0x21,0x00,0x03,0x00,0x03,0x00,0x00,0x00, -0x02,0x00,0x00,0x00,0x15,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x20,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x17,0x00,0x04,0x00, -0x09,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x03,0x00,0x00,0x00, -0x20,0x00,0x04,0x00,0x0a,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x09,0x00,0x00,0x00,0x3b,0x00,0x04,0x00,0x0a,0x00,0x00,0x00, -0x0b,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x02,0x00,0x00,0x00, -0x20,0x00,0x04,0x00,0x0d,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x1e,0x00,0x10,0x00,0x10,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0x11,0x00,0x00,0x00,0x09,0x00,0x00,0x00,0x10,0x00,0x00,0x00, -0x3b,0x00,0x04,0x00,0x11,0x00,0x00,0x00,0x12,0x00,0x00,0x00, -0x09,0x00,0x00,0x00,0x15,0x00,0x04,0x00,0x13,0x00,0x00,0x00, -0x20,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x13,0x00,0x00,0x00,0x14,0x00,0x00,0x00,0x08,0x00,0x00,0x00, -0x20,0x00,0x04,0x00,0x15,0x00,0x00,0x00,0x09,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00, -0x21,0x00,0x00,0x00,0x0a,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x13,0x00,0x00,0x00,0x27,0x00,0x00,0x00,0x09,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00,0x2d,0x00,0x00,0x00, -0x07,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00, -0x34,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x32,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x37,0x00,0x00,0x00,0x40,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x39,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x3b,0x00,0x04,0x00,0x0a,0x00,0x00,0x00, -0x3d,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x3e,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x3b,0x00,0x04,0x00,0x0a,0x00,0x00,0x00,0x4c,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x32,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x4f,0x00,0x00,0x00,0x20,0x00,0x00,0x00,0x32,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x53,0x00,0x00,0x00,0x20,0x00,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x54,0x00,0x00,0x00, -0x86,0x00,0x00,0x00,0x37,0x00,0x00,0x00,0x53,0x00,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x58,0x00,0x00,0x00, -0x86,0x00,0x00,0x00,0x37,0x00,0x00,0x00,0x53,0x00,0x00,0x00, -0x32,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x60,0x00,0x00,0x00, -0x02,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, -0x61,0x00,0x00,0x00,0x86,0x00,0x00,0x00,0x53,0x00,0x00,0x00, -0x60,0x00,0x00,0x00,0x32,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x62,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0x63,0x00,0x00,0x00,0x86,0x00,0x00,0x00, -0x61,0x00,0x00,0x00,0x62,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0x67,0x00,0x00,0x00,0x86,0x00,0x00,0x00, -0x61,0x00,0x00,0x00,0x62,0x00,0x00,0x00,0x32,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x6c,0x00,0x00,0x00,0x10,0x00,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x6d,0x00,0x00,0x00, -0x86,0x00,0x00,0x00,0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x72,0x00,0x00,0x00, -0x86,0x00,0x00,0x00,0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00,0x76,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00, -0x7b,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x13,0x00,0x00,0x00,0x86,0x00,0x00,0x00,0x0b,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00,0x8c,0x00,0x00,0x00, -0x03,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00, -0x97,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x32,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x9c,0x00,0x00,0x00,0x40,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00,0x9e,0x00,0x00,0x00, -0x04,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, -0xad,0x00,0x00,0x00,0x84,0x00,0x00,0x00,0x60,0x00,0x00,0x00, -0x62,0x00,0x00,0x00,0x32,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0xae,0x00,0x00,0x00,0x20,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0xaf,0x00,0x00,0x00,0x84,0x00,0x00,0x00, -0x53,0x00,0x00,0x00,0xae,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0xb0,0x00,0x00,0x00,0x84,0x00,0x00,0x00, -0x4f,0x00,0x00,0x00,0x62,0x00,0x00,0x00,0x32,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0xb1,0x00,0x00,0x00,0x02,0x00,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0xb2,0x00,0x00,0x00, -0x84,0x00,0x00,0x00,0xb0,0x00,0x00,0x00,0xb1,0x00,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0xb3,0x00,0x00,0x00, -0x84,0x00,0x00,0x00,0xb2,0x00,0x00,0x00,0x60,0x00,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0xb4,0x00,0x00,0x00, -0x86,0x00,0x00,0x00,0xaf,0x00,0x00,0x00,0xb3,0x00,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0xb5,0x00,0x00,0x00, -0x84,0x00,0x00,0x00,0xad,0x00,0x00,0x00,0xb4,0x00,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0xb6,0x00,0x00,0x00, -0x84,0x00,0x00,0x00,0xb5,0x00,0x00,0x00,0xb1,0x00,0x00,0x00, -0x14,0x00,0x02,0x00,0xb7,0x00,0x00,0x00,0x16,0x00,0x03,0x00, -0xb9,0x00,0x00,0x00,0x20,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0xba,0x00,0x00,0x00,0x84,0x00,0x00,0x00, -0x60,0x00,0x00,0x00,0x62,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0xbb,0x00,0x00,0x00,0x84,0x00,0x00,0x00, -0xba,0x00,0x00,0x00,0xb4,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0xbc,0x00,0x00,0x00,0x84,0x00,0x00,0x00, -0xbb,0x00,0x00,0x00,0xb1,0x00,0x00,0x00,0x1c,0x00,0x04,0x00, -0xbd,0x00,0x00,0x00,0xb9,0x00,0x00,0x00,0xbc,0x00,0x00,0x00, -0x20,0x00,0x04,0x00,0xbe,0x00,0x00,0x00,0x07,0x00,0x00,0x00, -0xbd,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0xb9,0x00,0x00,0x00, -0xc1,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0xc2,0x00,0x00,0x00,0x07,0x00,0x00,0x00,0xb9,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00,0xc5,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, -0xec,0x00,0x00,0x00,0x80,0x00,0x00,0x00,0x6c,0x00,0x00,0x00, -0x39,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, -0xed,0x00,0x00,0x00,0x84,0x00,0x00,0x00,0x37,0x00,0x00,0x00, -0xec,0x00,0x00,0x00,0x1c,0x00,0x04,0x00,0xee,0x00,0x00,0x00, -0xb9,0x00,0x00,0x00,0xed,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0xef,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0xee,0x00,0x00,0x00, -0x3b,0x00,0x04,0x00,0xef,0x00,0x00,0x00,0xf0,0x00,0x00,0x00, -0x04,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, -0xf4,0x00,0x00,0x00,0x80,0x00,0x00,0x00,0x6c,0x00,0x00,0x00, -0x39,0x00,0x00,0x00,0x1d,0x00,0x03,0x00,0xf8,0x00,0x00,0x00, -0xb9,0x00,0x00,0x00,0x1e,0x00,0x03,0x00,0xf9,0x00,0x00,0x00, -0xf8,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0xfa,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0xf9,0x00,0x00,0x00,0x3b,0x00,0x04,0x00, -0xfa,0x00,0x00,0x00,0xfb,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0x20,0x00,0x04,0x00,0x06,0x01,0x00,0x00,0x0c,0x00,0x00,0x00, -0xb9,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x09,0x01,0x00,0x00, -0x04,0x00,0x00,0x00,0xb9,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0x0f,0x01,0x00,0x00,0x80,0x00,0x00,0x00, -0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x32,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x14,0x01,0x00,0x00,0x01,0x00,0x00,0x00, -0x33,0x00,0x06,0x00,0x09,0x00,0x00,0x00,0x15,0x01,0x00,0x00, -0x14,0x01,0x00,0x00,0x39,0x00,0x00,0x00,0x39,0x00,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x16,0x01,0x00,0x00, -0x51,0x00,0x00,0x00,0x15,0x01,0x00,0x00,0x00,0x00,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x17,0x01,0x00,0x00, -0x84,0x00,0x00,0x00,0x16,0x01,0x00,0x00,0x39,0x00,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x18,0x01,0x00,0x00, -0x86,0x00,0x00,0x00,0x17,0x01,0x00,0x00,0x6c,0x00,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x36,0x01,0x00,0x00, -0x80,0x00,0x00,0x00,0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x37,0x01,0x00,0x00, -0x84,0x00,0x00,0x00,0x9c,0x00,0x00,0x00,0x36,0x01,0x00,0x00, -0x1c,0x00,0x04,0x00,0x38,0x01,0x00,0x00,0xb9,0x00,0x00,0x00, -0x37,0x01,0x00,0x00,0x20,0x00,0x04,0x00,0x39,0x01,0x00,0x00, -0x04,0x00,0x00,0x00,0x38,0x01,0x00,0x00,0x3b,0x00,0x04,0x00, -0x39,0x01,0x00,0x00,0x3a,0x01,0x00,0x00,0x04,0x00,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x3e,0x01,0x00,0x00, -0x80,0x00,0x00,0x00,0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00, -0x1d,0x00,0x03,0x00,0x42,0x01,0x00,0x00,0xb9,0x00,0x00,0x00, -0x1e,0x00,0x03,0x00,0x43,0x01,0x00,0x00,0x42,0x01,0x00,0x00, -0x20,0x00,0x04,0x00,0x44,0x01,0x00,0x00,0x0c,0x00,0x00,0x00, -0x43,0x01,0x00,0x00,0x3b,0x00,0x04,0x00,0x44,0x01,0x00,0x00, -0x45,0x01,0x00,0x00,0x0c,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0x57,0x01,0x00,0x00,0x80,0x00,0x00,0x00, -0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x5e,0x01,0x00,0x00,0x08,0x01,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x5f,0x01,0x00,0x00, -0x86,0x00,0x00,0x00,0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x62,0x01,0x00,0x00, -0x86,0x00,0x00,0x00,0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x7d,0x01,0x00,0x00, -0x84,0x00,0x00,0x00,0x60,0x00,0x00,0x00,0x62,0x00,0x00,0x00, -0x1c,0x00,0x04,0x00,0x7e,0x01,0x00,0x00,0xb9,0x00,0x00,0x00, -0x7d,0x01,0x00,0x00,0x20,0x00,0x04,0x00,0x7f,0x01,0x00,0x00, -0x07,0x00,0x00,0x00,0x7e,0x01,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0x8f,0x01,0x00,0x00,0x80,0x00,0x00,0x00, -0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0xaa,0x01,0x00,0x00,0x84,0x00,0x00,0x00, -0xb4,0x00,0x00,0x00,0xb1,0x00,0x00,0x00,0x1c,0x00,0x04,0x00, -0xab,0x01,0x00,0x00,0xb9,0x00,0x00,0x00,0xaa,0x01,0x00,0x00, -0x20,0x00,0x04,0x00,0xac,0x01,0x00,0x00,0x07,0x00,0x00,0x00, -0xab,0x01,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, -0xb5,0x01,0x00,0x00,0x86,0x00,0x00,0x00,0xae,0x00,0x00,0x00, -0xb4,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, -0xbd,0x01,0x00,0x00,0x80,0x00,0x00,0x00,0x6c,0x00,0x00,0x00, -0x39,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, -0xec,0x01,0x00,0x00,0x84,0x00,0x00,0x00,0x60,0x00,0x00,0x00, -0x62,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00, -0x1f,0x02,0x00,0x00,0x0d,0x00,0x00,0x00,0x3b,0x00,0x04,0x00, -0x0a,0x00,0x00,0x00,0x27,0x02,0x00,0x00,0x01,0x00,0x00,0x00, -0x1d,0x00,0x03,0x00,0x6d,0x02,0x00,0x00,0xb9,0x00,0x00,0x00, -0x1e,0x00,0x03,0x00,0x6e,0x02,0x00,0x00,0x6d,0x02,0x00,0x00, -0x20,0x00,0x04,0x00,0x6f,0x02,0x00,0x00,0x0c,0x00,0x00,0x00, -0x6e,0x02,0x00,0x00,0x3b,0x00,0x04,0x00,0x6f,0x02,0x00,0x00, -0x70,0x02,0x00,0x00,0x0c,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x13,0x00,0x00,0x00,0x75,0x02,0x00,0x00,0x05,0x00,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x82,0x02,0x00,0x00, -0x84,0x00,0x00,0x00,0x60,0x00,0x00,0x00,0x62,0x00,0x00,0x00, -0x36,0x00,0x05,0x00,0x02,0x00,0x00,0x00,0x04,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x03,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, -0x05,0x00,0x00,0x00,0x3b,0x00,0x04,0x00,0xbe,0x00,0x00,0x00, -0xbf,0x00,0x00,0x00,0x07,0x00,0x00,0x00,0x3b,0x00,0x04,0x00, -0x7f,0x01,0x00,0x00,0x80,0x01,0x00,0x00,0x07,0x00,0x00,0x00, -0x3b,0x00,0x04,0x00,0xac,0x01,0x00,0x00,0xad,0x01,0x00,0x00, -0x07,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x0d,0x00,0x00,0x00, -0x0e,0x00,0x00,0x00,0x0b,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x0f,0x00,0x00,0x00, -0x0e,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00, -0x16,0x00,0x00,0x00,0x12,0x00,0x00,0x00,0x14,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x17,0x00,0x00,0x00, -0x16,0x00,0x00,0x00,0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x18,0x00,0x00,0x00,0x0f,0x00,0x00,0x00,0x17,0x00,0x00,0x00, -0x89,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x1e,0x00,0x00,0x00, -0x0f,0x00,0x00,0x00,0x17,0x00,0x00,0x00,0x41,0x00,0x05,0x00, -0x15,0x00,0x00,0x00,0x22,0x00,0x00,0x00,0x12,0x00,0x00,0x00, -0x21,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x22,0x00,0x00,0x00,0x86,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x24,0x00,0x00,0x00,0x18,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00, -0x28,0x00,0x00,0x00,0x12,0x00,0x00,0x00,0x27,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x29,0x00,0x00,0x00, -0x28,0x00,0x00,0x00,0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x2a,0x00,0x00,0x00,0x1e,0x00,0x00,0x00,0x29,0x00,0x00,0x00, -0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0x12,0x00,0x00,0x00,0x2d,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x2f,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x30,0x00,0x00,0x00, -0x24,0x00,0x00,0x00,0x2f,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x32,0x00,0x00,0x00,0x30,0x00,0x00,0x00, -0x2a,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00, -0x35,0x00,0x00,0x00,0x12,0x00,0x00,0x00,0x34,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x36,0x00,0x00,0x00, -0x35,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x38,0x00,0x00,0x00,0x36,0x00,0x00,0x00,0x37,0x00,0x00,0x00, -0x82,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x3a,0x00,0x00,0x00, -0x38,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x86,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x3b,0x00,0x00,0x00,0x3a,0x00,0x00,0x00, -0x37,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x0d,0x00,0x00,0x00, -0x3f,0x00,0x00,0x00,0x3d,0x00,0x00,0x00,0x3e,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x40,0x00,0x00,0x00, -0x3f,0x00,0x00,0x00,0x89,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x42,0x00,0x00,0x00,0x40,0x00,0x00,0x00,0x3b,0x00,0x00,0x00, -0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x47,0x00,0x00,0x00, -0x40,0x00,0x00,0x00,0x3b,0x00,0x00,0x00,0x41,0x00,0x05,0x00, -0x0d,0x00,0x00,0x00,0x49,0x00,0x00,0x00,0x3d,0x00,0x00,0x00, -0x39,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x4a,0x00,0x00,0x00,0x49,0x00,0x00,0x00,0x41,0x00,0x05,0x00, -0x0d,0x00,0x00,0x00,0x4d,0x00,0x00,0x00,0x4c,0x00,0x00,0x00, -0x3e,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x4e,0x00,0x00,0x00,0x4d,0x00,0x00,0x00,0x86,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x50,0x00,0x00,0x00,0x4e,0x00,0x00,0x00, -0x4f,0x00,0x00,0x00,0x89,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x55,0x00,0x00,0x00,0x50,0x00,0x00,0x00,0x54,0x00,0x00,0x00, -0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x59,0x00,0x00,0x00, -0x50,0x00,0x00,0x00,0x58,0x00,0x00,0x00,0x89,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x5d,0x00,0x00,0x00,0x4e,0x00,0x00,0x00, -0x4f,0x00,0x00,0x00,0x89,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x64,0x00,0x00,0x00,0x5d,0x00,0x00,0x00,0x63,0x00,0x00,0x00, -0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x68,0x00,0x00,0x00, -0x5d,0x00,0x00,0x00,0x67,0x00,0x00,0x00,0x89,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x6e,0x00,0x00,0x00,0x4e,0x00,0x00,0x00, -0x6d,0x00,0x00,0x00,0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x73,0x00,0x00,0x00,0x4e,0x00,0x00,0x00,0x72,0x00,0x00,0x00, -0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00,0x77,0x00,0x00,0x00, -0x12,0x00,0x00,0x00,0x76,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x78,0x00,0x00,0x00,0x77,0x00,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x79,0x00,0x00,0x00, -0x47,0x00,0x00,0x00,0x78,0x00,0x00,0x00,0x41,0x00,0x05,0x00, -0x15,0x00,0x00,0x00,0x7c,0x00,0x00,0x00,0x12,0x00,0x00,0x00, -0x7b,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x7d,0x00,0x00,0x00,0x7c,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x7f,0x00,0x00,0x00,0x47,0x00,0x00,0x00, -0x39,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x82,0x00,0x00,0x00,0x7f,0x00,0x00,0x00,0x78,0x00,0x00,0x00, -0x0c,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0x83,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x26,0x00,0x00,0x00,0x7d,0x00,0x00,0x00, -0x82,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00, -0x87,0x00,0x00,0x00,0x12,0x00,0x00,0x00,0x86,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x88,0x00,0x00,0x00, -0x87,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x89,0x00,0x00,0x00,0x32,0x00,0x00,0x00,0x88,0x00,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x8b,0x00,0x00,0x00, -0x42,0x00,0x00,0x00,0x37,0x00,0x00,0x00,0x41,0x00,0x05,0x00, -0x15,0x00,0x00,0x00,0x8d,0x00,0x00,0x00,0x12,0x00,0x00,0x00, -0x8c,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x8e,0x00,0x00,0x00,0x8d,0x00,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x8f,0x00,0x00,0x00,0x8b,0x00,0x00,0x00, -0x8e,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x90,0x00,0x00,0x00,0x89,0x00,0x00,0x00,0x8f,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x92,0x00,0x00,0x00, -0x90,0x00,0x00,0x00,0x79,0x00,0x00,0x00,0x86,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x93,0x00,0x00,0x00,0x92,0x00,0x00,0x00, -0x39,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00, -0x98,0x00,0x00,0x00,0x12,0x00,0x00,0x00,0x97,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x99,0x00,0x00,0x00, -0x98,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x9a,0x00,0x00,0x00,0x0f,0x00,0x00,0x00,0x99,0x00,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x9d,0x00,0x00,0x00, -0x4a,0x00,0x00,0x00,0x9c,0x00,0x00,0x00,0x41,0x00,0x05,0x00, -0x15,0x00,0x00,0x00,0x9f,0x00,0x00,0x00,0x12,0x00,0x00,0x00, -0x9e,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0xa0,0x00,0x00,0x00,0x9f,0x00,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xa1,0x00,0x00,0x00,0x9d,0x00,0x00,0x00, -0xa0,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xa2,0x00,0x00,0x00,0x9a,0x00,0x00,0x00,0xa1,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xa4,0x00,0x00,0x00, -0xa2,0x00,0x00,0x00,0x79,0x00,0x00,0x00,0x86,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xa5,0x00,0x00,0x00,0xa4,0x00,0x00,0x00, -0x39,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0xa7,0x00,0x00,0x00, -0xf8,0x00,0x02,0x00,0xa7,0x00,0x00,0x00,0xf5,0x00,0x07,0x00, -0x06,0x00,0x00,0x00,0x94,0x02,0x00,0x00,0x3e,0x00,0x00,0x00, -0x05,0x00,0x00,0x00,0xc6,0x00,0x00,0x00,0xa8,0x00,0x00,0x00, -0xb0,0x00,0x05,0x00,0xb7,0x00,0x00,0x00,0xb8,0x00,0x00,0x00, -0x94,0x02,0x00,0x00,0xb6,0x00,0x00,0x00,0xf6,0x00,0x04,0x00, -0xa9,0x00,0x00,0x00,0xa8,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0xfa,0x00,0x04,0x00,0xb8,0x00,0x00,0x00,0xa8,0x00,0x00,0x00, -0xa9,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0xa8,0x00,0x00,0x00, -0x41,0x00,0x05,0x00,0xc2,0x00,0x00,0x00,0xc3,0x00,0x00,0x00, -0xbf,0x00,0x00,0x00,0x94,0x02,0x00,0x00,0x3e,0x00,0x03,0x00, -0xc3,0x00,0x00,0x00,0xc1,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xc6,0x00,0x00,0x00,0x94,0x02,0x00,0x00, -0xc5,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0xa7,0x00,0x00,0x00, -0xf8,0x00,0x02,0x00,0xa9,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, -0xc9,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0xc9,0x00,0x00,0x00, -0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0xad,0x02,0x00,0x00, -0xa5,0x00,0x00,0x00,0xa9,0x00,0x00,0x00,0x64,0x01,0x00,0x00, -0xcc,0x00,0x00,0x00,0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00, -0xa9,0x02,0x00,0x00,0x93,0x00,0x00,0x00,0xa9,0x00,0x00,0x00, -0x61,0x01,0x00,0x00,0xcc,0x00,0x00,0x00,0xf5,0x00,0x07,0x00, -0x06,0x00,0x00,0x00,0x95,0x02,0x00,0x00,0x79,0x00,0x00,0x00, -0xa9,0x00,0x00,0x00,0x0f,0x02,0x00,0x00,0xcc,0x00,0x00,0x00, -0xb0,0x00,0x05,0x00,0xb7,0x00,0x00,0x00,0xd0,0x00,0x00,0x00, -0x95,0x02,0x00,0x00,0x83,0x00,0x00,0x00,0xf6,0x00,0x04,0x00, -0xcb,0x00,0x00,0x00,0xcc,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0xfa,0x00,0x04,0x00,0xd0,0x00,0x00,0x00,0xca,0x00,0x00,0x00, -0xcb,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0xca,0x00,0x00,0x00, -0xf9,0x00,0x02,0x00,0xd2,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, -0xd2,0x00,0x00,0x00,0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00, -0xa5,0x02,0x00,0x00,0x3e,0x00,0x00,0x00,0xca,0x00,0x00,0x00, -0x1a,0x01,0x00,0x00,0xd5,0x00,0x00,0x00,0xb0,0x00,0x05,0x00, -0xb7,0x00,0x00,0x00,0xd8,0x00,0x00,0x00,0xa5,0x02,0x00,0x00, -0x37,0x00,0x00,0x00,0xf6,0x00,0x04,0x00,0xd4,0x00,0x00,0x00, -0xd5,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, -0xd8,0x00,0x00,0x00,0xd3,0x00,0x00,0x00,0xd4,0x00,0x00,0x00, -0xf8,0x00,0x02,0x00,0xd3,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xdc,0x00,0x00,0x00,0x8b,0x00,0x00,0x00, -0x73,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xde,0x00,0x00,0x00,0xdc,0x00,0x00,0x00,0xa5,0x02,0x00,0x00, -0xb0,0x00,0x05,0x00,0xb7,0x00,0x00,0x00,0xe1,0x00,0x00,0x00, -0xde,0x00,0x00,0x00,0x36,0x00,0x00,0x00,0xf7,0x00,0x03,0x00, -0xe3,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, -0xe1,0x00,0x00,0x00,0xe2,0x00,0x00,0x00,0xe3,0x00,0x00,0x00, -0xf8,0x00,0x02,0x00,0xe2,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xe6,0x00,0x00,0x00,0x95,0x02,0x00,0x00, -0x6e,0x00,0x00,0x00,0xb0,0x00,0x05,0x00,0xb7,0x00,0x00,0x00, -0xe8,0x00,0x00,0x00,0xe6,0x00,0x00,0x00,0x83,0x00,0x00,0x00, -0xf9,0x00,0x02,0x00,0xe3,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, -0xe3,0x00,0x00,0x00,0xf5,0x00,0x07,0x00,0xb7,0x00,0x00,0x00, -0xe9,0x00,0x00,0x00,0xe1,0x00,0x00,0x00,0xd3,0x00,0x00,0x00, -0xe8,0x00,0x00,0x00,0xe2,0x00,0x00,0x00,0xf7,0x00,0x03,0x00, -0xeb,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, -0xe9,0x00,0x00,0x00,0xea,0x00,0x00,0x00,0x0b,0x01,0x00,0x00, -0xf8,0x00,0x02,0x00,0xea,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xf3,0x00,0x00,0x00,0x73,0x00,0x00,0x00, -0xa5,0x02,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xf5,0x00,0x00,0x00,0xf3,0x00,0x00,0x00,0xf4,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xf7,0x00,0x00,0x00, -0xf5,0x00,0x00,0x00,0x6e,0x00,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x02,0x01,0x00,0x00,0xf3,0x00,0x00,0x00, -0x8e,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x03,0x01,0x00,0x00,0xa9,0x02,0x00,0x00,0x02,0x01,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x05,0x01,0x00,0x00, -0x03,0x01,0x00,0x00,0x6e,0x00,0x00,0x00,0x41,0x00,0x06,0x00, -0x06,0x01,0x00,0x00,0x07,0x01,0x00,0x00,0xfb,0x00,0x00,0x00, -0x34,0x00,0x00,0x00,0x05,0x01,0x00,0x00,0x3d,0x00,0x04,0x00, -0xb9,0x00,0x00,0x00,0x08,0x01,0x00,0x00,0x07,0x01,0x00,0x00, -0x41,0x00,0x05,0x00,0x09,0x01,0x00,0x00,0x0a,0x01,0x00,0x00, -0xf0,0x00,0x00,0x00,0xf7,0x00,0x00,0x00,0x3e,0x00,0x03,0x00, -0x0a,0x01,0x00,0x00,0x08,0x01,0x00,0x00,0xf9,0x00,0x02,0x00, -0xeb,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0x0b,0x01,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x0e,0x01,0x00,0x00, -0x73,0x00,0x00,0x00,0xa5,0x02,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x10,0x01,0x00,0x00,0x0e,0x01,0x00,0x00, -0x0f,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x12,0x01,0x00,0x00,0x10,0x01,0x00,0x00,0x6e,0x00,0x00,0x00, -0x41,0x00,0x05,0x00,0x09,0x01,0x00,0x00,0x13,0x01,0x00,0x00, -0xf0,0x00,0x00,0x00,0x12,0x01,0x00,0x00,0x3e,0x00,0x03,0x00, -0x13,0x01,0x00,0x00,0xc1,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, -0xeb,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0xeb,0x00,0x00,0x00, -0xf9,0x00,0x02,0x00,0xd5,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, -0xd5,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x1a,0x01,0x00,0x00,0xa5,0x02,0x00,0x00,0x18,0x01,0x00,0x00, -0xf9,0x00,0x02,0x00,0xd2,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, -0xd4,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0x1c,0x01,0x00,0x00, -0xf8,0x00,0x02,0x00,0x1c,0x01,0x00,0x00,0xf5,0x00,0x07,0x00, -0x06,0x00,0x00,0x00,0xa6,0x02,0x00,0x00,0x3e,0x00,0x00,0x00, -0xd4,0x00,0x00,0x00,0x5d,0x01,0x00,0x00,0x1f,0x01,0x00,0x00, -0xb0,0x00,0x05,0x00,0xb7,0x00,0x00,0x00,0x22,0x01,0x00,0x00, -0xa6,0x02,0x00,0x00,0x9c,0x00,0x00,0x00,0xf6,0x00,0x04,0x00, -0x1e,0x01,0x00,0x00,0x1f,0x01,0x00,0x00,0x01,0x00,0x00,0x00, -0xfa,0x00,0x04,0x00,0x22,0x01,0x00,0x00,0x1d,0x01,0x00,0x00, -0x1e,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0x1d,0x01,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x26,0x01,0x00,0x00, -0x9d,0x00,0x00,0x00,0x73,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x28,0x01,0x00,0x00,0x26,0x01,0x00,0x00, -0xa6,0x02,0x00,0x00,0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00, -0x29,0x01,0x00,0x00,0x12,0x00,0x00,0x00,0xc5,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x2a,0x01,0x00,0x00, -0x29,0x01,0x00,0x00,0xb0,0x00,0x05,0x00,0xb7,0x00,0x00,0x00, -0x2b,0x01,0x00,0x00,0x28,0x01,0x00,0x00,0x2a,0x01,0x00,0x00, -0xf7,0x00,0x03,0x00,0x2d,0x01,0x00,0x00,0x00,0x00,0x00,0x00, -0xfa,0x00,0x04,0x00,0x2b,0x01,0x00,0x00,0x2c,0x01,0x00,0x00, -0x2d,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0x2c,0x01,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x30,0x01,0x00,0x00, -0x95,0x02,0x00,0x00,0x6e,0x00,0x00,0x00,0xb0,0x00,0x05,0x00, -0xb7,0x00,0x00,0x00,0x32,0x01,0x00,0x00,0x30,0x01,0x00,0x00, -0x83,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0x2d,0x01,0x00,0x00, -0xf8,0x00,0x02,0x00,0x2d,0x01,0x00,0x00,0xf5,0x00,0x07,0x00, -0xb7,0x00,0x00,0x00,0x33,0x01,0x00,0x00,0x2b,0x01,0x00,0x00, -0x1d,0x01,0x00,0x00,0x32,0x01,0x00,0x00,0x2c,0x01,0x00,0x00, -0xf7,0x00,0x03,0x00,0x35,0x01,0x00,0x00,0x00,0x00,0x00,0x00, -0xfa,0x00,0x04,0x00,0x33,0x01,0x00,0x00,0x34,0x01,0x00,0x00, -0x53,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0x34,0x01,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x3d,0x01,0x00,0x00, -0x73,0x00,0x00,0x00,0xa6,0x02,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x3f,0x01,0x00,0x00,0x3d,0x01,0x00,0x00, -0x3e,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x41,0x01,0x00,0x00,0x3f,0x01,0x00,0x00,0x6e,0x00,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x4c,0x01,0x00,0x00, -0x3d,0x01,0x00,0x00,0xa0,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x4d,0x01,0x00,0x00,0xad,0x02,0x00,0x00, -0x4c,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x4f,0x01,0x00,0x00,0x4d,0x01,0x00,0x00,0x6e,0x00,0x00,0x00, -0x41,0x00,0x06,0x00,0x06,0x01,0x00,0x00,0x50,0x01,0x00,0x00, -0x45,0x01,0x00,0x00,0x34,0x00,0x00,0x00,0x4f,0x01,0x00,0x00, -0x3d,0x00,0x04,0x00,0xb9,0x00,0x00,0x00,0x51,0x01,0x00,0x00, -0x50,0x01,0x00,0x00,0x41,0x00,0x05,0x00,0x09,0x01,0x00,0x00, -0x52,0x01,0x00,0x00,0x3a,0x01,0x00,0x00,0x41,0x01,0x00,0x00, -0x3e,0x00,0x03,0x00,0x52,0x01,0x00,0x00,0x51,0x01,0x00,0x00, -0xf9,0x00,0x02,0x00,0x35,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, -0x53,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x56,0x01,0x00,0x00,0x73,0x00,0x00,0x00,0xa6,0x02,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x58,0x01,0x00,0x00, -0x56,0x01,0x00,0x00,0x57,0x01,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x5a,0x01,0x00,0x00,0x58,0x01,0x00,0x00, -0x6e,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x09,0x01,0x00,0x00, -0x5b,0x01,0x00,0x00,0x3a,0x01,0x00,0x00,0x5a,0x01,0x00,0x00, -0x3e,0x00,0x03,0x00,0x5b,0x01,0x00,0x00,0xc1,0x00,0x00,0x00, -0xf9,0x00,0x02,0x00,0x35,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, -0x35,0x01,0x00,0x00,0xf9,0x00,0x02,0x00,0x1f,0x01,0x00,0x00, -0xf8,0x00,0x02,0x00,0x1f,0x01,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x5d,0x01,0x00,0x00,0xa6,0x02,0x00,0x00, -0x18,0x01,0x00,0x00,0xf9,0x00,0x02,0x00,0x1c,0x01,0x00,0x00, -0xf8,0x00,0x02,0x00,0x1e,0x01,0x00,0x00,0xe0,0x00,0x04,0x00, -0x0c,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x5e,0x01,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x61,0x01,0x00,0x00, -0xa9,0x02,0x00,0x00,0x5f,0x01,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x64,0x01,0x00,0x00,0xad,0x02,0x00,0x00, -0x62,0x01,0x00,0x00,0xf9,0x00,0x02,0x00,0x66,0x01,0x00,0x00, -0xf8,0x00,0x02,0x00,0x66,0x01,0x00,0x00,0xf5,0x00,0x07,0x00, -0x06,0x00,0x00,0x00,0xaf,0x02,0x00,0x00,0x3e,0x00,0x00,0x00, -0x1e,0x01,0x00,0x00,0x0d,0x02,0x00,0x00,0x69,0x01,0x00,0x00, -0xb0,0x00,0x05,0x00,0xb7,0x00,0x00,0x00,0x6c,0x01,0x00,0x00, -0xaf,0x02,0x00,0x00,0x6c,0x00,0x00,0x00,0xf6,0x00,0x04,0x00, -0x68,0x01,0x00,0x00,0x69,0x01,0x00,0x00,0x00,0x00,0x00,0x00, -0xfa,0x00,0x04,0x00,0x6c,0x01,0x00,0x00,0x67,0x01,0x00,0x00, -0x68,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0x67,0x01,0x00,0x00, -0xf9,0x00,0x02,0x00,0x6e,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, -0x6e,0x01,0x00,0x00,0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00, -0xb3,0x02,0x00,0x00,0x3e,0x00,0x00,0x00,0x67,0x01,0x00,0x00, -0x99,0x01,0x00,0x00,0x71,0x01,0x00,0x00,0xb0,0x00,0x05,0x00, -0xb7,0x00,0x00,0x00,0x74,0x01,0x00,0x00,0xb3,0x02,0x00,0x00, -0x60,0x00,0x00,0x00,0xf6,0x00,0x04,0x00,0x70,0x01,0x00,0x00, -0x71,0x01,0x00,0x00,0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, -0x74,0x01,0x00,0x00,0x6f,0x01,0x00,0x00,0x70,0x01,0x00,0x00, -0xf8,0x00,0x02,0x00,0x6f,0x01,0x00,0x00,0xf9,0x00,0x02,0x00, -0x76,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0x76,0x01,0x00,0x00, -0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0xc5,0x02,0x00,0x00, -0x3e,0x00,0x00,0x00,0x6f,0x01,0x00,0x00,0x97,0x01,0x00,0x00, -0x77,0x01,0x00,0x00,0xb0,0x00,0x05,0x00,0xb7,0x00,0x00,0x00, -0x7c,0x01,0x00,0x00,0xc5,0x02,0x00,0x00,0x62,0x00,0x00,0x00, -0xf6,0x00,0x04,0x00,0x78,0x01,0x00,0x00,0x77,0x01,0x00,0x00, -0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0x7c,0x01,0x00,0x00, -0x77,0x01,0x00,0x00,0x78,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, -0x77,0x01,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x82,0x01,0x00,0x00,0xb3,0x02,0x00,0x00,0x62,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x84,0x01,0x00,0x00, -0x82,0x01,0x00,0x00,0xc5,0x02,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x86,0x01,0x00,0x00,0x55,0x00,0x00,0x00, -0x53,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x88,0x01,0x00,0x00,0xb3,0x02,0x00,0x00,0x61,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x89,0x01,0x00,0x00, -0x86,0x01,0x00,0x00,0x88,0x01,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x8b,0x01,0x00,0x00,0x64,0x00,0x00,0x00, -0x62,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x8c,0x01,0x00,0x00,0x89,0x01,0x00,0x00,0x8b,0x01,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x8e,0x01,0x00,0x00, -0x8c,0x01,0x00,0x00,0xc5,0x02,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x90,0x01,0x00,0x00,0x8e,0x01,0x00,0x00, -0x8f,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x92,0x01,0x00,0x00,0x90,0x01,0x00,0x00,0xaf,0x02,0x00,0x00, -0x41,0x00,0x05,0x00,0x09,0x01,0x00,0x00,0x93,0x01,0x00,0x00, -0xf0,0x00,0x00,0x00,0x92,0x01,0x00,0x00,0x3d,0x00,0x04,0x00, -0xb9,0x00,0x00,0x00,0x94,0x01,0x00,0x00,0x93,0x01,0x00,0x00, -0x41,0x00,0x05,0x00,0xc2,0x00,0x00,0x00,0x95,0x01,0x00,0x00, -0x80,0x01,0x00,0x00,0x84,0x01,0x00,0x00,0x3e,0x00,0x03,0x00, -0x95,0x01,0x00,0x00,0x94,0x01,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x97,0x01,0x00,0x00,0xc5,0x02,0x00,0x00, -0xc5,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0x76,0x01,0x00,0x00, -0xf8,0x00,0x02,0x00,0x78,0x01,0x00,0x00,0xf9,0x00,0x02,0x00, -0x71,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0x71,0x01,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x99,0x01,0x00,0x00, -0xb3,0x02,0x00,0x00,0xc5,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, -0x6e,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0x70,0x01,0x00,0x00, -0xf9,0x00,0x02,0x00,0x9b,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, -0x9b,0x01,0x00,0x00,0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00, -0xb4,0x02,0x00,0x00,0x3e,0x00,0x00,0x00,0x70,0x01,0x00,0x00, -0xc7,0x01,0x00,0x00,0x9e,0x01,0x00,0x00,0xb0,0x00,0x05,0x00, -0xb7,0x00,0x00,0x00,0xa1,0x01,0x00,0x00,0xb4,0x02,0x00,0x00, -0xb4,0x00,0x00,0x00,0xf6,0x00,0x04,0x00,0x9d,0x01,0x00,0x00, -0x9e,0x01,0x00,0x00,0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, -0xa1,0x01,0x00,0x00,0x9c,0x01,0x00,0x00,0x9d,0x01,0x00,0x00, -0xf8,0x00,0x02,0x00,0x9c,0x01,0x00,0x00,0xf9,0x00,0x02,0x00, -0xa3,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0xa3,0x01,0x00,0x00, -0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0xc2,0x02,0x00,0x00, -0x3e,0x00,0x00,0x00,0x9c,0x01,0x00,0x00,0xc5,0x01,0x00,0x00, -0xa4,0x01,0x00,0x00,0xb0,0x00,0x05,0x00,0xb7,0x00,0x00,0x00, -0xa9,0x01,0x00,0x00,0xc2,0x02,0x00,0x00,0xb1,0x00,0x00,0x00, -0xf6,0x00,0x04,0x00,0xa5,0x01,0x00,0x00,0xa4,0x01,0x00,0x00, -0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0xa9,0x01,0x00,0x00, -0xa4,0x01,0x00,0x00,0xa5,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, -0xa4,0x01,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xaf,0x01,0x00,0x00,0xb4,0x02,0x00,0x00,0xb1,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xb1,0x01,0x00,0x00, -0xaf,0x01,0x00,0x00,0xc2,0x02,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xb3,0x01,0x00,0x00,0x59,0x00,0x00,0x00, -0xae,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xb6,0x01,0x00,0x00,0xb4,0x02,0x00,0x00,0xb5,0x01,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xb7,0x01,0x00,0x00, -0xb3,0x01,0x00,0x00,0xb6,0x01,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xb9,0x01,0x00,0x00,0x68,0x00,0x00,0x00, -0xb1,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xba,0x01,0x00,0x00,0xb7,0x01,0x00,0x00,0xb9,0x01,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xbc,0x01,0x00,0x00, -0xba,0x01,0x00,0x00,0xc2,0x02,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xbe,0x01,0x00,0x00,0xbc,0x01,0x00,0x00, -0xbd,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xc0,0x01,0x00,0x00,0xbe,0x01,0x00,0x00,0xaf,0x02,0x00,0x00, -0x41,0x00,0x05,0x00,0x09,0x01,0x00,0x00,0xc1,0x01,0x00,0x00, -0x3a,0x01,0x00,0x00,0xc0,0x01,0x00,0x00,0x3d,0x00,0x04,0x00, -0xb9,0x00,0x00,0x00,0xc2,0x01,0x00,0x00,0xc1,0x01,0x00,0x00, -0x41,0x00,0x05,0x00,0xc2,0x00,0x00,0x00,0xc3,0x01,0x00,0x00, -0xad,0x01,0x00,0x00,0xb1,0x01,0x00,0x00,0x3e,0x00,0x03,0x00, -0xc3,0x01,0x00,0x00,0xc2,0x01,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xc5,0x01,0x00,0x00,0xc2,0x02,0x00,0x00, -0xc5,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0xa3,0x01,0x00,0x00, -0xf8,0x00,0x02,0x00,0xa5,0x01,0x00,0x00,0xf9,0x00,0x02,0x00, -0x9e,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0x9e,0x01,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xc7,0x01,0x00,0x00, -0xb4,0x02,0x00,0x00,0xc5,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, -0x9b,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0x9d,0x01,0x00,0x00, -0xf9,0x00,0x02,0x00,0xc9,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, -0xc9,0x01,0x00,0x00,0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00, -0xb5,0x02,0x00,0x00,0x3e,0x00,0x00,0x00,0x9d,0x01,0x00,0x00, -0x0b,0x02,0x00,0x00,0xcc,0x01,0x00,0x00,0xb0,0x00,0x05,0x00, -0xb7,0x00,0x00,0x00,0xcf,0x01,0x00,0x00,0xb5,0x02,0x00,0x00, -0xb4,0x00,0x00,0x00,0xf6,0x00,0x04,0x00,0xcb,0x01,0x00,0x00, -0xcc,0x01,0x00,0x00,0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, -0xcf,0x01,0x00,0x00,0xca,0x01,0x00,0x00,0xcb,0x01,0x00,0x00, -0xf8,0x00,0x02,0x00,0xca,0x01,0x00,0x00,0xf9,0x00,0x02,0x00, -0xd1,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0xd1,0x01,0x00,0x00, -0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0xb9,0x02,0x00,0x00, -0x3e,0x00,0x00,0x00,0xca,0x01,0x00,0x00,0x09,0x02,0x00,0x00, -0xd4,0x01,0x00,0x00,0xb0,0x00,0x05,0x00,0xb7,0x00,0x00,0x00, -0xd7,0x01,0x00,0x00,0xb9,0x02,0x00,0x00,0x60,0x00,0x00,0x00, -0xf6,0x00,0x04,0x00,0xd3,0x01,0x00,0x00,0xd4,0x01,0x00,0x00, -0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0xd7,0x01,0x00,0x00, -0xd2,0x01,0x00,0x00,0xd3,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, -0xd2,0x01,0x00,0x00,0xf9,0x00,0x02,0x00,0xd9,0x01,0x00,0x00, -0xf8,0x00,0x02,0x00,0xd9,0x01,0x00,0x00,0xf5,0x00,0x07,0x00, -0x06,0x00,0x00,0x00,0xbb,0x02,0x00,0x00,0x3e,0x00,0x00,0x00, -0xd2,0x01,0x00,0x00,0x07,0x02,0x00,0x00,0xdc,0x01,0x00,0x00, -0xb0,0x00,0x05,0x00,0xb7,0x00,0x00,0x00,0xdf,0x01,0x00,0x00, -0xbb,0x02,0x00,0x00,0xb1,0x00,0x00,0x00,0xf6,0x00,0x04,0x00, -0xdb,0x01,0x00,0x00,0xdc,0x01,0x00,0x00,0x01,0x00,0x00,0x00, -0xfa,0x00,0x04,0x00,0xdf,0x01,0x00,0x00,0xda,0x01,0x00,0x00, -0xdb,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0xda,0x01,0x00,0x00, -0xf9,0x00,0x02,0x00,0xe1,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, -0xe1,0x01,0x00,0x00,0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00, -0xbd,0x02,0x00,0x00,0x3e,0x00,0x00,0x00,0xda,0x01,0x00,0x00, -0x05,0x02,0x00,0x00,0xe2,0x01,0x00,0x00,0xb0,0x00,0x05,0x00, -0xb7,0x00,0x00,0x00,0xe7,0x01,0x00,0x00,0xbd,0x02,0x00,0x00, -0x62,0x00,0x00,0x00,0xf6,0x00,0x04,0x00,0xe3,0x01,0x00,0x00, -0xe2,0x01,0x00,0x00,0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, -0xe7,0x01,0x00,0x00,0xe2,0x01,0x00,0x00,0xe3,0x01,0x00,0x00, -0xf8,0x00,0x02,0x00,0xe2,0x01,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xe9,0x01,0x00,0x00,0xb5,0x02,0x00,0x00, -0xb1,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xeb,0x01,0x00,0x00,0xe9,0x01,0x00,0x00,0xbb,0x02,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xed,0x01,0x00,0x00, -0xeb,0x01,0x00,0x00,0xec,0x01,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xef,0x01,0x00,0x00,0xb9,0x02,0x00,0x00, -0x62,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xf0,0x01,0x00,0x00,0xed,0x01,0x00,0x00,0xef,0x01,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xf2,0x01,0x00,0x00, -0xf0,0x01,0x00,0x00,0xbd,0x02,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xf6,0x01,0x00,0x00,0xef,0x01,0x00,0x00, -0xbd,0x02,0x00,0x00,0x41,0x00,0x05,0x00,0xc2,0x00,0x00,0x00, -0xf7,0x01,0x00,0x00,0x80,0x01,0x00,0x00,0xf6,0x01,0x00,0x00, -0x3d,0x00,0x04,0x00,0xb9,0x00,0x00,0x00,0xf8,0x01,0x00,0x00, -0xf7,0x01,0x00,0x00,0x41,0x00,0x05,0x00,0xc2,0x00,0x00,0x00, -0xfd,0x01,0x00,0x00,0xad,0x01,0x00,0x00,0xeb,0x01,0x00,0x00, -0x3d,0x00,0x04,0x00,0xb9,0x00,0x00,0x00,0xfe,0x01,0x00,0x00, -0xfd,0x01,0x00,0x00,0x41,0x00,0x05,0x00,0xc2,0x00,0x00,0x00, -0x00,0x02,0x00,0x00,0xbf,0x00,0x00,0x00,0xf2,0x01,0x00,0x00, -0x3d,0x00,0x04,0x00,0xb9,0x00,0x00,0x00,0x01,0x02,0x00,0x00, -0x00,0x02,0x00,0x00,0x0c,0x00,0x08,0x00,0xb9,0x00,0x00,0x00, -0x02,0x02,0x00,0x00,0x01,0x00,0x00,0x00,0x32,0x00,0x00,0x00, -0xf8,0x01,0x00,0x00,0xfe,0x01,0x00,0x00,0x01,0x02,0x00,0x00, -0x3e,0x00,0x03,0x00,0x00,0x02,0x00,0x00,0x02,0x02,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x05,0x02,0x00,0x00, -0xbd,0x02,0x00,0x00,0xc5,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, -0xe1,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0xe3,0x01,0x00,0x00, -0xf9,0x00,0x02,0x00,0xdc,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, -0xdc,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x07,0x02,0x00,0x00,0xbb,0x02,0x00,0x00,0xc5,0x00,0x00,0x00, -0xf9,0x00,0x02,0x00,0xd9,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, -0xdb,0x01,0x00,0x00,0xf9,0x00,0x02,0x00,0xd4,0x01,0x00,0x00, -0xf8,0x00,0x02,0x00,0xd4,0x01,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x09,0x02,0x00,0x00,0xb9,0x02,0x00,0x00, -0xc5,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0xd1,0x01,0x00,0x00, -0xf8,0x00,0x02,0x00,0xd3,0x01,0x00,0x00,0xf9,0x00,0x02,0x00, -0xcc,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0xcc,0x01,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x0b,0x02,0x00,0x00, -0xb5,0x02,0x00,0x00,0xc5,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, -0xc9,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0xcb,0x01,0x00,0x00, -0xf9,0x00,0x02,0x00,0x69,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, -0x69,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x0d,0x02,0x00,0x00,0xaf,0x02,0x00,0x00,0xc5,0x00,0x00,0x00, -0xf9,0x00,0x02,0x00,0x66,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, -0x68,0x01,0x00,0x00,0xe0,0x00,0x04,0x00,0x0c,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x5e,0x01,0x00,0x00,0xf9,0x00,0x02,0x00, -0xcc,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0xcc,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x0f,0x02,0x00,0x00, -0x95,0x02,0x00,0x00,0x6c,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, -0xc9,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0xcb,0x00,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x14,0x02,0x00,0x00, -0x55,0x00,0x00,0x00,0x53,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x15,0x02,0x00,0x00,0x8b,0x00,0x00,0x00, -0x14,0x02,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x1a,0x02,0x00,0x00,0x59,0x00,0x00,0x00,0xae,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x1b,0x02,0x00,0x00, -0x9d,0x00,0x00,0x00,0x1a,0x02,0x00,0x00,0x41,0x00,0x05,0x00, -0x15,0x00,0x00,0x00,0x20,0x02,0x00,0x00,0x12,0x00,0x00,0x00, -0x1f,0x02,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x21,0x02,0x00,0x00,0x20,0x02,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x22,0x02,0x00,0x00,0x0f,0x00,0x00,0x00, -0x21,0x02,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x26,0x02,0x00,0x00,0x47,0x00,0x00,0x00,0x21,0x02,0x00,0x00, -0x41,0x00,0x05,0x00,0x0d,0x00,0x00,0x00,0x28,0x02,0x00,0x00, -0x27,0x02,0x00,0x00,0x0c,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x29,0x02,0x00,0x00,0x28,0x02,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x2a,0x02,0x00,0x00, -0x26,0x02,0x00,0x00,0x29,0x02,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x2b,0x02,0x00,0x00,0x22,0x02,0x00,0x00, -0x2a,0x02,0x00,0x00,0xf9,0x00,0x02,0x00,0x2d,0x02,0x00,0x00, -0xf8,0x00,0x02,0x00,0x2d,0x02,0x00,0x00,0xf5,0x00,0x07,0x00, -0x06,0x00,0x00,0x00,0x96,0x02,0x00,0x00,0x3e,0x00,0x00,0x00, -0xcb,0x00,0x00,0x00,0x93,0x02,0x00,0x00,0x30,0x02,0x00,0x00, -0xb0,0x00,0x05,0x00,0xb7,0x00,0x00,0x00,0x33,0x02,0x00,0x00, -0x96,0x02,0x00,0x00,0xb4,0x00,0x00,0x00,0xf6,0x00,0x04,0x00, -0x2f,0x02,0x00,0x00,0x30,0x02,0x00,0x00,0x01,0x00,0x00,0x00, -0xfa,0x00,0x04,0x00,0x33,0x02,0x00,0x00,0x2e,0x02,0x00,0x00, -0x2f,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x2e,0x02,0x00,0x00, -0xf9,0x00,0x02,0x00,0x35,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, -0x35,0x02,0x00,0x00,0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00, -0x97,0x02,0x00,0x00,0x3e,0x00,0x00,0x00,0x2e,0x02,0x00,0x00, -0x91,0x02,0x00,0x00,0x38,0x02,0x00,0x00,0xb0,0x00,0x05,0x00, -0xb7,0x00,0x00,0x00,0x3b,0x02,0x00,0x00,0x97,0x02,0x00,0x00, -0x60,0x00,0x00,0x00,0xf6,0x00,0x04,0x00,0x37,0x02,0x00,0x00, -0x38,0x02,0x00,0x00,0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, -0x3b,0x02,0x00,0x00,0x36,0x02,0x00,0x00,0x37,0x02,0x00,0x00, -0xf8,0x00,0x02,0x00,0x36,0x02,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x3f,0x02,0x00,0x00,0x97,0x02,0x00,0x00, -0x61,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x40,0x02,0x00,0x00,0x15,0x02,0x00,0x00,0x3f,0x02,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x42,0x02,0x00,0x00, -0x64,0x00,0x00,0x00,0x62,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x43,0x02,0x00,0x00,0x40,0x02,0x00,0x00, -0x42,0x02,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x47,0x02,0x00,0x00,0x96,0x02,0x00,0x00,0xb5,0x01,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x48,0x02,0x00,0x00, -0x1b,0x02,0x00,0x00,0x47,0x02,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x4a,0x02,0x00,0x00,0x68,0x00,0x00,0x00, -0xb1,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x4b,0x02,0x00,0x00,0x48,0x02,0x00,0x00,0x4a,0x02,0x00,0x00, -0xf9,0x00,0x02,0x00,0x4d,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, -0x4d,0x02,0x00,0x00,0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00, -0x99,0x02,0x00,0x00,0x3e,0x00,0x00,0x00,0x36,0x02,0x00,0x00, -0x8f,0x02,0x00,0x00,0x50,0x02,0x00,0x00,0xb0,0x00,0x05,0x00, -0xb7,0x00,0x00,0x00,0x53,0x02,0x00,0x00,0x99,0x02,0x00,0x00, -0xb1,0x00,0x00,0x00,0xf6,0x00,0x04,0x00,0x4f,0x02,0x00,0x00, -0x50,0x02,0x00,0x00,0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, -0x53,0x02,0x00,0x00,0x4e,0x02,0x00,0x00,0x4f,0x02,0x00,0x00, -0xf8,0x00,0x02,0x00,0x4e,0x02,0x00,0x00,0xf9,0x00,0x02,0x00, -0x55,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x55,0x02,0x00,0x00, -0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0x9b,0x02,0x00,0x00, -0x3e,0x00,0x00,0x00,0x4e,0x02,0x00,0x00,0x8d,0x02,0x00,0x00, -0x58,0x02,0x00,0x00,0xb0,0x00,0x05,0x00,0xb7,0x00,0x00,0x00, -0x5b,0x02,0x00,0x00,0x9b,0x02,0x00,0x00,0x62,0x00,0x00,0x00, -0xf6,0x00,0x04,0x00,0x57,0x02,0x00,0x00,0x58,0x02,0x00,0x00, -0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0x5b,0x02,0x00,0x00, -0x56,0x02,0x00,0x00,0x57,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, -0x56,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x5e,0x02,0x00,0x00,0x43,0x02,0x00,0x00,0x9b,0x02,0x00,0x00, -0xb0,0x00,0x05,0x00,0xb7,0x00,0x00,0x00,0x61,0x02,0x00,0x00, -0x5e,0x02,0x00,0x00,0x36,0x00,0x00,0x00,0xf7,0x00,0x03,0x00, -0x63,0x02,0x00,0x00,0x00,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, -0x61,0x02,0x00,0x00,0x62,0x02,0x00,0x00,0x63,0x02,0x00,0x00, -0xf8,0x00,0x02,0x00,0x62,0x02,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x66,0x02,0x00,0x00,0x4b,0x02,0x00,0x00, -0x99,0x02,0x00,0x00,0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00, -0x67,0x02,0x00,0x00,0x12,0x00,0x00,0x00,0xc5,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x68,0x02,0x00,0x00, -0x67,0x02,0x00,0x00,0xb0,0x00,0x05,0x00,0xb7,0x00,0x00,0x00, -0x69,0x02,0x00,0x00,0x66,0x02,0x00,0x00,0x68,0x02,0x00,0x00, -0xf9,0x00,0x02,0x00,0x63,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, -0x63,0x02,0x00,0x00,0xf5,0x00,0x07,0x00,0xb7,0x00,0x00,0x00, -0x6a,0x02,0x00,0x00,0x61,0x02,0x00,0x00,0x56,0x02,0x00,0x00, -0x69,0x02,0x00,0x00,0x62,0x02,0x00,0x00,0xf7,0x00,0x03,0x00, -0x6c,0x02,0x00,0x00,0x00,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, -0x6a,0x02,0x00,0x00,0x6b,0x02,0x00,0x00,0x6c,0x02,0x00,0x00, -0xf8,0x00,0x02,0x00,0x6b,0x02,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x74,0x02,0x00,0x00,0x4b,0x02,0x00,0x00, -0x99,0x02,0x00,0x00,0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00, -0x76,0x02,0x00,0x00,0x12,0x00,0x00,0x00,0x75,0x02,0x00,0x00, -0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x77,0x02,0x00,0x00, -0x76,0x02,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x78,0x02,0x00,0x00,0x74,0x02,0x00,0x00,0x77,0x02,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x79,0x02,0x00,0x00, -0x2b,0x02,0x00,0x00,0x78,0x02,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x7b,0x02,0x00,0x00,0x79,0x02,0x00,0x00, -0x43,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x7d,0x02,0x00,0x00,0x7b,0x02,0x00,0x00,0x9b,0x02,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x7f,0x02,0x00,0x00, -0x96,0x02,0x00,0x00,0xb1,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x81,0x02,0x00,0x00,0x7f,0x02,0x00,0x00, -0x99,0x02,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x83,0x02,0x00,0x00,0x81,0x02,0x00,0x00,0x82,0x02,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x85,0x02,0x00,0x00, -0x97,0x02,0x00,0x00,0x62,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x86,0x02,0x00,0x00,0x83,0x02,0x00,0x00, +0x3e,0x00,0x00,0x00,0x59,0x02,0x00,0x00,0xb2,0x02,0x00,0x00, +0x73,0x02,0x00,0x00,0xb0,0x00,0x05,0x00,0xc1,0x00,0x00,0x00, +0x76,0x02,0x00,0x00,0xbe,0x02,0x00,0x00,0xbb,0x00,0x00,0x00, +0xf6,0x00,0x04,0x00,0x72,0x02,0x00,0x00,0x73,0x02,0x00,0x00, +0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0x76,0x02,0x00,0x00, +0x71,0x02,0x00,0x00,0x72,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, +0x71,0x02,0x00,0x00,0xf9,0x00,0x02,0x00,0x78,0x02,0x00,0x00, +0xf8,0x00,0x02,0x00,0x78,0x02,0x00,0x00,0xf5,0x00,0x07,0x00, +0x06,0x00,0x00,0x00,0xc0,0x02,0x00,0x00,0x3e,0x00,0x00,0x00, +0x71,0x02,0x00,0x00,0xb0,0x02,0x00,0x00,0x7b,0x02,0x00,0x00, +0xb0,0x00,0x05,0x00,0xc1,0x00,0x00,0x00,0x7e,0x02,0x00,0x00, +0xc0,0x02,0x00,0x00,0x62,0x00,0x00,0x00,0xf6,0x00,0x04,0x00, +0x7a,0x02,0x00,0x00,0x7b,0x02,0x00,0x00,0x01,0x00,0x00,0x00, +0xfa,0x00,0x04,0x00,0x7e,0x02,0x00,0x00,0x79,0x02,0x00,0x00, +0x7a,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x79,0x02,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x81,0x02,0x00,0x00, +0x66,0x02,0x00,0x00,0xc0,0x02,0x00,0x00,0xb0,0x00,0x05,0x00, +0xc1,0x00,0x00,0x00,0x84,0x02,0x00,0x00,0x81,0x02,0x00,0x00, +0x36,0x00,0x00,0x00,0xf7,0x00,0x03,0x00,0x86,0x02,0x00,0x00, +0x00,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0x84,0x02,0x00,0x00, +0x85,0x02,0x00,0x00,0x86,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, 0x85,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x88,0x02,0x00,0x00,0x86,0x02,0x00,0x00,0x9b,0x02,0x00,0x00, -0x41,0x00,0x05,0x00,0xc2,0x00,0x00,0x00,0x89,0x02,0x00,0x00, -0xbf,0x00,0x00,0x00,0x88,0x02,0x00,0x00,0x3d,0x00,0x04,0x00, -0xb9,0x00,0x00,0x00,0x8a,0x02,0x00,0x00,0x89,0x02,0x00,0x00, -0x41,0x00,0x06,0x00,0x06,0x01,0x00,0x00,0x8b,0x02,0x00,0x00, -0x70,0x02,0x00,0x00,0x34,0x00,0x00,0x00,0x7d,0x02,0x00,0x00, -0x3e,0x00,0x03,0x00,0x8b,0x02,0x00,0x00,0x8a,0x02,0x00,0x00, -0xf9,0x00,0x02,0x00,0x6c,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, -0x6c,0x02,0x00,0x00,0xf9,0x00,0x02,0x00,0x58,0x02,0x00,0x00, -0xf8,0x00,0x02,0x00,0x58,0x02,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x8d,0x02,0x00,0x00,0x9b,0x02,0x00,0x00, -0xc5,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0x55,0x02,0x00,0x00, -0xf8,0x00,0x02,0x00,0x57,0x02,0x00,0x00,0xf9,0x00,0x02,0x00, -0x50,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x50,0x02,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x8f,0x02,0x00,0x00, -0x99,0x02,0x00,0x00,0xc5,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, -0x4d,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x4f,0x02,0x00,0x00, -0xf9,0x00,0x02,0x00,0x38,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, -0x38,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x91,0x02,0x00,0x00,0x97,0x02,0x00,0x00,0xc5,0x00,0x00,0x00, -0xf9,0x00,0x02,0x00,0x35,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, -0x37,0x02,0x00,0x00,0xf9,0x00,0x02,0x00,0x30,0x02,0x00,0x00, -0xf8,0x00,0x02,0x00,0x30,0x02,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x93,0x02,0x00,0x00,0x96,0x02,0x00,0x00, -0xc5,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0x2d,0x02,0x00,0x00, -0xf8,0x00,0x02,0x00,0x2f,0x02,0x00,0x00,0xfd,0x00,0x01,0x00, -0x38,0x00,0x01,0x00, -}; -const uint64_t matmul_f32_l_fp32_len = 10048; - -unsigned char matmul_f32_m_data[] = { -0x03,0x02,0x23,0x07,0x00,0x05,0x01,0x00,0x0b,0x00,0x0d,0x00, -0xcd,0x02,0x00,0x00,0x00,0x00,0x00,0x00,0x11,0x00,0x02,0x00, -0x01,0x00,0x00,0x00,0x11,0x00,0x02,0x00,0x09,0x00,0x00,0x00, -0x0b,0x00,0x06,0x00,0x01,0x00,0x00,0x00,0x47,0x4c,0x53,0x4c, -0x2e,0x73,0x74,0x64,0x2e,0x34,0x35,0x30,0x00,0x00,0x00,0x00, -0x0e,0x00,0x03,0x00,0x00,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x0f,0x00,0x0f,0x00,0x05,0x00,0x00,0x00,0x04,0x00,0x00,0x00, -0x6d,0x61,0x69,0x6e,0x00,0x00,0x00,0x00,0x0b,0x00,0x00,0x00, -0x12,0x00,0x00,0x00,0x3d,0x00,0x00,0x00,0x4c,0x00,0x00,0x00, -0xf1,0x00,0x00,0x00,0xfc,0x00,0x00,0x00,0x3d,0x01,0x00,0x00, -0x48,0x01,0x00,0x00,0x2e,0x02,0x00,0x00,0x77,0x02,0x00,0x00, -0x10,0x00,0x06,0x00,0x04,0x00,0x00,0x00,0x11,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x0b,0x00,0x00,0x00,0x0b,0x00,0x00,0x00, -0x1c,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0x10,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x08,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00, -0x03,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00,0x04,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x10,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0x10,0x00,0x00,0x00,0x05,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x14,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x18,0x00,0x00,0x00, -0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00,0x07,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x1c,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0x10,0x00,0x00,0x00,0x08,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x20,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00, -0x09,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x24,0x00,0x00,0x00, -0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00,0x0a,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x28,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0x10,0x00,0x00,0x00,0x0b,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x2c,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x30,0x00,0x00,0x00, -0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00,0x0d,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x34,0x00,0x00,0x00,0x47,0x00,0x03,0x00, -0x10,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x37,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x3d,0x00,0x00,0x00,0x0b,0x00,0x00,0x00, -0x1a,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x4c,0x00,0x00,0x00, -0x0b,0x00,0x00,0x00,0x1b,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x4f,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x09,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x53,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x04,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x60,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x62,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x07,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x6c,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x03,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x9c,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0xae,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x05,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0xb1,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x08,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0xf9,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x48,0x00,0x04,0x00, -0xfa,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x18,0x00,0x00,0x00, -0x48,0x00,0x05,0x00,0xfa,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x03,0x00, -0xfa,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0xfc,0x00,0x00,0x00,0x22,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0xfc,0x00,0x00,0x00,0x21,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x17,0x01,0x00,0x00, -0x01,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x18,0x01,0x00,0x00,0x0b,0x00,0x00,0x00,0x19,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x45,0x01,0x00,0x00,0x06,0x00,0x00,0x00, -0x04,0x00,0x00,0x00,0x48,0x00,0x04,0x00,0x46,0x01,0x00,0x00, -0x00,0x00,0x00,0x00,0x18,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0x46,0x01,0x00,0x00,0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x47,0x00,0x03,0x00,0x46,0x01,0x00,0x00, -0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x48,0x01,0x00,0x00, -0x22,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x48,0x01,0x00,0x00,0x21,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x2e,0x02,0x00,0x00,0x0b,0x00,0x00,0x00, -0x18,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x74,0x02,0x00,0x00, -0x06,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x48,0x00,0x04,0x00, -0x75,0x02,0x00,0x00,0x00,0x00,0x00,0x00,0x19,0x00,0x00,0x00, -0x48,0x00,0x05,0x00,0x75,0x02,0x00,0x00,0x00,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x03,0x00, -0x75,0x02,0x00,0x00,0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x77,0x02,0x00,0x00,0x22,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x77,0x02,0x00,0x00,0x21,0x00,0x00,0x00, -0x02,0x00,0x00,0x00,0x13,0x00,0x02,0x00,0x02,0x00,0x00,0x00, -0x21,0x00,0x03,0x00,0x03,0x00,0x00,0x00,0x02,0x00,0x00,0x00, -0x15,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x20,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x17,0x00,0x04,0x00,0x09,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x03,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0x0a,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x09,0x00,0x00,0x00, -0x3b,0x00,0x04,0x00,0x0a,0x00,0x00,0x00,0x0b,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0x0d,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x1e,0x00,0x10,0x00,0x10,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x11,0x00,0x00,0x00, -0x09,0x00,0x00,0x00,0x10,0x00,0x00,0x00,0x3b,0x00,0x04,0x00, -0x11,0x00,0x00,0x00,0x12,0x00,0x00,0x00,0x09,0x00,0x00,0x00, -0x15,0x00,0x04,0x00,0x13,0x00,0x00,0x00,0x20,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00, -0x14,0x00,0x00,0x00,0x08,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0x15,0x00,0x00,0x00,0x09,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00,0x21,0x00,0x00,0x00, -0x0a,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00, -0x27,0x00,0x00,0x00,0x09,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x13,0x00,0x00,0x00,0x2d,0x00,0x00,0x00,0x07,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00,0x34,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x32,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x37,0x00,0x00,0x00,0x40,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x3b,0x00,0x04,0x00,0x0a,0x00,0x00,0x00,0x3d,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x3e,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x3b,0x00,0x04,0x00, -0x0a,0x00,0x00,0x00,0x4c,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x32,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x4f,0x00,0x00,0x00, -0x20,0x00,0x00,0x00,0x32,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x53,0x00,0x00,0x00,0x20,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0x54,0x00,0x00,0x00,0x86,0x00,0x00,0x00, -0x37,0x00,0x00,0x00,0x53,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0x58,0x00,0x00,0x00,0x86,0x00,0x00,0x00, -0x37,0x00,0x00,0x00,0x53,0x00,0x00,0x00,0x32,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x60,0x00,0x00,0x00,0x02,0x00,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x61,0x00,0x00,0x00, -0x86,0x00,0x00,0x00,0x53,0x00,0x00,0x00,0x60,0x00,0x00,0x00, -0x32,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x62,0x00,0x00,0x00, -0x04,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, -0x63,0x00,0x00,0x00,0x86,0x00,0x00,0x00,0x61,0x00,0x00,0x00, -0x62,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, -0x67,0x00,0x00,0x00,0x86,0x00,0x00,0x00,0x61,0x00,0x00,0x00, -0x62,0x00,0x00,0x00,0x32,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x6c,0x00,0x00,0x00,0x10,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0x6d,0x00,0x00,0x00,0x86,0x00,0x00,0x00, -0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0x72,0x00,0x00,0x00,0x86,0x00,0x00,0x00, -0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x13,0x00,0x00,0x00,0x76,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00,0x7b,0x00,0x00,0x00, -0x02,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00, -0x86,0x00,0x00,0x00,0x0b,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x13,0x00,0x00,0x00,0x8c,0x00,0x00,0x00,0x03,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00,0x97,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x32,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x9c,0x00,0x00,0x00,0x40,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x13,0x00,0x00,0x00,0x9e,0x00,0x00,0x00,0x04,0x00,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0xad,0x00,0x00,0x00, -0x84,0x00,0x00,0x00,0x60,0x00,0x00,0x00,0x62,0x00,0x00,0x00, -0x32,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0xae,0x00,0x00,0x00, -0x20,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, -0xaf,0x00,0x00,0x00,0x84,0x00,0x00,0x00,0x53,0x00,0x00,0x00, -0xae,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, -0xb0,0x00,0x00,0x00,0x84,0x00,0x00,0x00,0x4f,0x00,0x00,0x00, -0x62,0x00,0x00,0x00,0x32,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0xb1,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0xb2,0x00,0x00,0x00,0x84,0x00,0x00,0x00, -0xb0,0x00,0x00,0x00,0xb1,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0xb3,0x00,0x00,0x00,0x84,0x00,0x00,0x00, -0xb2,0x00,0x00,0x00,0x60,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0xb4,0x00,0x00,0x00,0x86,0x00,0x00,0x00, -0xaf,0x00,0x00,0x00,0xb3,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0xb5,0x00,0x00,0x00,0x84,0x00,0x00,0x00, -0xad,0x00,0x00,0x00,0xb4,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0xb6,0x00,0x00,0x00,0x84,0x00,0x00,0x00, -0xb5,0x00,0x00,0x00,0xb1,0x00,0x00,0x00,0x14,0x00,0x02,0x00, -0xb7,0x00,0x00,0x00,0x16,0x00,0x03,0x00,0xb9,0x00,0x00,0x00, -0x20,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, -0xba,0x00,0x00,0x00,0x84,0x00,0x00,0x00,0x60,0x00,0x00,0x00, -0x62,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, -0xbb,0x00,0x00,0x00,0x84,0x00,0x00,0x00,0xba,0x00,0x00,0x00, -0xb4,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, -0xbc,0x00,0x00,0x00,0x84,0x00,0x00,0x00,0xbb,0x00,0x00,0x00, -0xb1,0x00,0x00,0x00,0x1c,0x00,0x04,0x00,0xbd,0x00,0x00,0x00, -0xb9,0x00,0x00,0x00,0xbc,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0xbe,0x00,0x00,0x00,0x07,0x00,0x00,0x00,0xbd,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0xb9,0x00,0x00,0x00,0xc1,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0xc2,0x00,0x00,0x00, -0x07,0x00,0x00,0x00,0xb9,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x13,0x00,0x00,0x00,0xc5,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x16,0x00,0x03,0x00,0xec,0x00,0x00,0x00,0x10,0x00,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0xed,0x00,0x00,0x00, -0x80,0x00,0x00,0x00,0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0xee,0x00,0x00,0x00, -0x84,0x00,0x00,0x00,0x37,0x00,0x00,0x00,0xed,0x00,0x00,0x00, -0x1c,0x00,0x04,0x00,0xef,0x00,0x00,0x00,0xec,0x00,0x00,0x00, -0xee,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0xf0,0x00,0x00,0x00, -0x04,0x00,0x00,0x00,0xef,0x00,0x00,0x00,0x3b,0x00,0x04,0x00, -0xf0,0x00,0x00,0x00,0xf1,0x00,0x00,0x00,0x04,0x00,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0xf5,0x00,0x00,0x00, -0x80,0x00,0x00,0x00,0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00, -0x1d,0x00,0x03,0x00,0xf9,0x00,0x00,0x00,0xb9,0x00,0x00,0x00, -0x1e,0x00,0x03,0x00,0xfa,0x00,0x00,0x00,0xf9,0x00,0x00,0x00, -0x20,0x00,0x04,0x00,0xfb,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0xfa,0x00,0x00,0x00,0x3b,0x00,0x04,0x00,0xfb,0x00,0x00,0x00, -0xfc,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0x07,0x01,0x00,0x00,0x0c,0x00,0x00,0x00,0xb9,0x00,0x00,0x00, -0x20,0x00,0x04,0x00,0x0b,0x01,0x00,0x00,0x04,0x00,0x00,0x00, -0xec,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, -0x11,0x01,0x00,0x00,0x80,0x00,0x00,0x00,0x6c,0x00,0x00,0x00, -0x39,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0xec,0x00,0x00,0x00, -0x15,0x01,0x00,0x00,0x00,0x00,0x00,0x00,0x32,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x17,0x01,0x00,0x00,0x01,0x00,0x00,0x00, -0x33,0x00,0x06,0x00,0x09,0x00,0x00,0x00,0x18,0x01,0x00,0x00, -0x17,0x01,0x00,0x00,0x39,0x00,0x00,0x00,0x39,0x00,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x19,0x01,0x00,0x00, -0x51,0x00,0x00,0x00,0x18,0x01,0x00,0x00,0x00,0x00,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x1a,0x01,0x00,0x00, -0x84,0x00,0x00,0x00,0x19,0x01,0x00,0x00,0x39,0x00,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x1b,0x01,0x00,0x00, -0x86,0x00,0x00,0x00,0x1a,0x01,0x00,0x00,0x6c,0x00,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x39,0x01,0x00,0x00, -0x80,0x00,0x00,0x00,0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x3a,0x01,0x00,0x00, -0x84,0x00,0x00,0x00,0x9c,0x00,0x00,0x00,0x39,0x01,0x00,0x00, -0x1c,0x00,0x04,0x00,0x3b,0x01,0x00,0x00,0xec,0x00,0x00,0x00, -0x3a,0x01,0x00,0x00,0x20,0x00,0x04,0x00,0x3c,0x01,0x00,0x00, -0x04,0x00,0x00,0x00,0x3b,0x01,0x00,0x00,0x3b,0x00,0x04,0x00, -0x3c,0x01,0x00,0x00,0x3d,0x01,0x00,0x00,0x04,0x00,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x41,0x01,0x00,0x00, -0x80,0x00,0x00,0x00,0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00, -0x1d,0x00,0x03,0x00,0x45,0x01,0x00,0x00,0xb9,0x00,0x00,0x00, -0x1e,0x00,0x03,0x00,0x46,0x01,0x00,0x00,0x45,0x01,0x00,0x00, -0x20,0x00,0x04,0x00,0x47,0x01,0x00,0x00,0x0c,0x00,0x00,0x00, -0x46,0x01,0x00,0x00,0x3b,0x00,0x04,0x00,0x47,0x01,0x00,0x00, -0x48,0x01,0x00,0x00,0x0c,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0x5b,0x01,0x00,0x00,0x80,0x00,0x00,0x00, -0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x62,0x01,0x00,0x00,0x08,0x01,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x63,0x01,0x00,0x00, -0x86,0x00,0x00,0x00,0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x66,0x01,0x00,0x00, -0x86,0x00,0x00,0x00,0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x81,0x01,0x00,0x00, -0x84,0x00,0x00,0x00,0x60,0x00,0x00,0x00,0x62,0x00,0x00,0x00, -0x1c,0x00,0x04,0x00,0x82,0x01,0x00,0x00,0xec,0x00,0x00,0x00, -0x81,0x01,0x00,0x00,0x20,0x00,0x04,0x00,0x83,0x01,0x00,0x00, -0x07,0x00,0x00,0x00,0x82,0x01,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0x93,0x01,0x00,0x00,0x80,0x00,0x00,0x00, -0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0x99,0x01,0x00,0x00,0x07,0x00,0x00,0x00,0xec,0x00,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0xaf,0x01,0x00,0x00, -0x84,0x00,0x00,0x00,0xb4,0x00,0x00,0x00,0xb1,0x00,0x00,0x00, -0x1c,0x00,0x04,0x00,0xb0,0x01,0x00,0x00,0xec,0x00,0x00,0x00, -0xaf,0x01,0x00,0x00,0x20,0x00,0x04,0x00,0xb1,0x01,0x00,0x00, -0x07,0x00,0x00,0x00,0xb0,0x01,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0xba,0x01,0x00,0x00,0x86,0x00,0x00,0x00, -0xae,0x00,0x00,0x00,0xb4,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0xc2,0x01,0x00,0x00,0x80,0x00,0x00,0x00, -0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0xf1,0x01,0x00,0x00,0x84,0x00,0x00,0x00, -0x60,0x00,0x00,0x00,0x62,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x13,0x00,0x00,0x00,0x26,0x02,0x00,0x00,0x0d,0x00,0x00,0x00, -0x3b,0x00,0x04,0x00,0x0a,0x00,0x00,0x00,0x2e,0x02,0x00,0x00, -0x01,0x00,0x00,0x00,0x1d,0x00,0x03,0x00,0x74,0x02,0x00,0x00, -0xb9,0x00,0x00,0x00,0x1e,0x00,0x03,0x00,0x75,0x02,0x00,0x00, -0x74,0x02,0x00,0x00,0x20,0x00,0x04,0x00,0x76,0x02,0x00,0x00, -0x0c,0x00,0x00,0x00,0x75,0x02,0x00,0x00,0x3b,0x00,0x04,0x00, -0x76,0x02,0x00,0x00,0x77,0x02,0x00,0x00,0x0c,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00,0x7c,0x02,0x00,0x00, -0x05,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, -0x89,0x02,0x00,0x00,0x84,0x00,0x00,0x00,0x60,0x00,0x00,0x00, -0x62,0x00,0x00,0x00,0x36,0x00,0x05,0x00,0x02,0x00,0x00,0x00, -0x04,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x03,0x00,0x00,0x00, -0xf8,0x00,0x02,0x00,0x05,0x00,0x00,0x00,0x3b,0x00,0x04,0x00, -0xbe,0x00,0x00,0x00,0xbf,0x00,0x00,0x00,0x07,0x00,0x00,0x00, -0x3b,0x00,0x04,0x00,0x83,0x01,0x00,0x00,0x84,0x01,0x00,0x00, -0x07,0x00,0x00,0x00,0x3b,0x00,0x04,0x00,0xb1,0x01,0x00,0x00, -0xb2,0x01,0x00,0x00,0x07,0x00,0x00,0x00,0x41,0x00,0x05,0x00, -0x0d,0x00,0x00,0x00,0x0e,0x00,0x00,0x00,0x0b,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x0f,0x00,0x00,0x00,0x0e,0x00,0x00,0x00,0x41,0x00,0x05,0x00, -0x15,0x00,0x00,0x00,0x16,0x00,0x00,0x00,0x12,0x00,0x00,0x00, -0x14,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x17,0x00,0x00,0x00,0x16,0x00,0x00,0x00,0x86,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x18,0x00,0x00,0x00,0x0f,0x00,0x00,0x00, -0x17,0x00,0x00,0x00,0x89,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x1e,0x00,0x00,0x00,0x0f,0x00,0x00,0x00,0x17,0x00,0x00,0x00, -0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00,0x22,0x00,0x00,0x00, -0x12,0x00,0x00,0x00,0x21,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x22,0x00,0x00,0x00, -0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x24,0x00,0x00,0x00, -0x18,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x41,0x00,0x05,0x00, -0x15,0x00,0x00,0x00,0x28,0x00,0x00,0x00,0x12,0x00,0x00,0x00, -0x27,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x29,0x00,0x00,0x00,0x28,0x00,0x00,0x00,0x86,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x2a,0x00,0x00,0x00,0x1e,0x00,0x00,0x00, -0x29,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00, -0x2e,0x00,0x00,0x00,0x12,0x00,0x00,0x00,0x2d,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x2f,0x00,0x00,0x00, -0x2e,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x30,0x00,0x00,0x00,0x24,0x00,0x00,0x00,0x2f,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x32,0x00,0x00,0x00, -0x30,0x00,0x00,0x00,0x2a,0x00,0x00,0x00,0x41,0x00,0x05,0x00, -0x15,0x00,0x00,0x00,0x35,0x00,0x00,0x00,0x12,0x00,0x00,0x00, -0x34,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x36,0x00,0x00,0x00,0x35,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x38,0x00,0x00,0x00,0x36,0x00,0x00,0x00, -0x37,0x00,0x00,0x00,0x82,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x3a,0x00,0x00,0x00,0x38,0x00,0x00,0x00,0x39,0x00,0x00,0x00, -0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x3b,0x00,0x00,0x00, -0x3a,0x00,0x00,0x00,0x37,0x00,0x00,0x00,0x41,0x00,0x05,0x00, -0x0d,0x00,0x00,0x00,0x3f,0x00,0x00,0x00,0x3d,0x00,0x00,0x00, -0x3e,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x40,0x00,0x00,0x00,0x3f,0x00,0x00,0x00,0x89,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x42,0x00,0x00,0x00,0x40,0x00,0x00,0x00, -0x3b,0x00,0x00,0x00,0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x47,0x00,0x00,0x00,0x40,0x00,0x00,0x00,0x3b,0x00,0x00,0x00, -0x41,0x00,0x05,0x00,0x0d,0x00,0x00,0x00,0x49,0x00,0x00,0x00, -0x3d,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x4a,0x00,0x00,0x00,0x49,0x00,0x00,0x00, -0x41,0x00,0x05,0x00,0x0d,0x00,0x00,0x00,0x4d,0x00,0x00,0x00, -0x4c,0x00,0x00,0x00,0x3e,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x4e,0x00,0x00,0x00,0x4d,0x00,0x00,0x00, -0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x50,0x00,0x00,0x00, -0x4e,0x00,0x00,0x00,0x4f,0x00,0x00,0x00,0x89,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x55,0x00,0x00,0x00,0x50,0x00,0x00,0x00, -0x54,0x00,0x00,0x00,0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x59,0x00,0x00,0x00,0x50,0x00,0x00,0x00,0x58,0x00,0x00,0x00, -0x89,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x5d,0x00,0x00,0x00, -0x4e,0x00,0x00,0x00,0x4f,0x00,0x00,0x00,0x89,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x64,0x00,0x00,0x00,0x5d,0x00,0x00,0x00, -0x63,0x00,0x00,0x00,0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x68,0x00,0x00,0x00,0x5d,0x00,0x00,0x00,0x67,0x00,0x00,0x00, -0x89,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x6e,0x00,0x00,0x00, -0x4e,0x00,0x00,0x00,0x6d,0x00,0x00,0x00,0x86,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x73,0x00,0x00,0x00,0x4e,0x00,0x00,0x00, -0x72,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00, -0x77,0x00,0x00,0x00,0x12,0x00,0x00,0x00,0x76,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x78,0x00,0x00,0x00, -0x77,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x79,0x00,0x00,0x00,0x47,0x00,0x00,0x00,0x78,0x00,0x00,0x00, -0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00,0x7c,0x00,0x00,0x00, -0x12,0x00,0x00,0x00,0x7b,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x7d,0x00,0x00,0x00,0x7c,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x7f,0x00,0x00,0x00, -0x47,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x82,0x00,0x00,0x00,0x7f,0x00,0x00,0x00, -0x78,0x00,0x00,0x00,0x0c,0x00,0x07,0x00,0x06,0x00,0x00,0x00, -0x83,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x26,0x00,0x00,0x00, -0x7d,0x00,0x00,0x00,0x82,0x00,0x00,0x00,0x41,0x00,0x05,0x00, -0x15,0x00,0x00,0x00,0x87,0x00,0x00,0x00,0x12,0x00,0x00,0x00, -0x86,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x88,0x00,0x00,0x00,0x87,0x00,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x89,0x00,0x00,0x00,0x32,0x00,0x00,0x00, -0x88,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x8b,0x00,0x00,0x00,0x42,0x00,0x00,0x00,0x37,0x00,0x00,0x00, -0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00,0x8d,0x00,0x00,0x00, -0x12,0x00,0x00,0x00,0x8c,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x8e,0x00,0x00,0x00,0x8d,0x00,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x8f,0x00,0x00,0x00, -0x8b,0x00,0x00,0x00,0x8e,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x90,0x00,0x00,0x00,0x89,0x00,0x00,0x00, -0x8f,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x92,0x00,0x00,0x00,0x90,0x00,0x00,0x00,0x79,0x00,0x00,0x00, -0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x93,0x00,0x00,0x00, -0x92,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x41,0x00,0x05,0x00, -0x15,0x00,0x00,0x00,0x98,0x00,0x00,0x00,0x12,0x00,0x00,0x00, -0x97,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x99,0x00,0x00,0x00,0x98,0x00,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x9a,0x00,0x00,0x00,0x0f,0x00,0x00,0x00, -0x99,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x9d,0x00,0x00,0x00,0x4a,0x00,0x00,0x00,0x9c,0x00,0x00,0x00, -0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00,0x9f,0x00,0x00,0x00, -0x12,0x00,0x00,0x00,0x9e,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0xa0,0x00,0x00,0x00,0x9f,0x00,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xa1,0x00,0x00,0x00, -0x9d,0x00,0x00,0x00,0xa0,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xa2,0x00,0x00,0x00,0x9a,0x00,0x00,0x00, -0xa1,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xa4,0x00,0x00,0x00,0xa2,0x00,0x00,0x00,0x79,0x00,0x00,0x00, -0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xa5,0x00,0x00,0x00, -0xa4,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, -0xa7,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0xa7,0x00,0x00,0x00, -0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0x9b,0x02,0x00,0x00, -0x3e,0x00,0x00,0x00,0x05,0x00,0x00,0x00,0xc6,0x00,0x00,0x00, -0xa8,0x00,0x00,0x00,0xb0,0x00,0x05,0x00,0xb7,0x00,0x00,0x00, -0xb8,0x00,0x00,0x00,0x9b,0x02,0x00,0x00,0xb6,0x00,0x00,0x00, -0xf6,0x00,0x04,0x00,0xa9,0x00,0x00,0x00,0xa8,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0xb8,0x00,0x00,0x00, -0xa8,0x00,0x00,0x00,0xa9,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, -0xa8,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0xc2,0x00,0x00,0x00, -0xc3,0x00,0x00,0x00,0xbf,0x00,0x00,0x00,0x9b,0x02,0x00,0x00, -0x3e,0x00,0x03,0x00,0xc3,0x00,0x00,0x00,0xc1,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xc6,0x00,0x00,0x00, -0x9b,0x02,0x00,0x00,0xc5,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, -0xa7,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0xa9,0x00,0x00,0x00, -0xf9,0x00,0x02,0x00,0xc9,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, -0xc9,0x00,0x00,0x00,0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00, -0xb4,0x02,0x00,0x00,0xa5,0x00,0x00,0x00,0xa9,0x00,0x00,0x00, -0x68,0x01,0x00,0x00,0xcc,0x00,0x00,0x00,0xf5,0x00,0x07,0x00, -0x06,0x00,0x00,0x00,0xb0,0x02,0x00,0x00,0x93,0x00,0x00,0x00, -0xa9,0x00,0x00,0x00,0x65,0x01,0x00,0x00,0xcc,0x00,0x00,0x00, -0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0x9c,0x02,0x00,0x00, -0x79,0x00,0x00,0x00,0xa9,0x00,0x00,0x00,0x16,0x02,0x00,0x00, -0xcc,0x00,0x00,0x00,0xb0,0x00,0x05,0x00,0xb7,0x00,0x00,0x00, -0xd0,0x00,0x00,0x00,0x9c,0x02,0x00,0x00,0x83,0x00,0x00,0x00, -0xf6,0x00,0x04,0x00,0xcb,0x00,0x00,0x00,0xcc,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0xd0,0x00,0x00,0x00, -0xca,0x00,0x00,0x00,0xcb,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, -0xca,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0xd2,0x00,0x00,0x00, -0xf8,0x00,0x02,0x00,0xd2,0x00,0x00,0x00,0xf5,0x00,0x07,0x00, -0x06,0x00,0x00,0x00,0xac,0x02,0x00,0x00,0x3e,0x00,0x00,0x00, -0xca,0x00,0x00,0x00,0x1d,0x01,0x00,0x00,0xd5,0x00,0x00,0x00, -0xb0,0x00,0x05,0x00,0xb7,0x00,0x00,0x00,0xd8,0x00,0x00,0x00, -0xac,0x02,0x00,0x00,0x37,0x00,0x00,0x00,0xf6,0x00,0x04,0x00, -0xd4,0x00,0x00,0x00,0xd5,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0xfa,0x00,0x04,0x00,0xd8,0x00,0x00,0x00,0xd3,0x00,0x00,0x00, -0xd4,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0xd3,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xdc,0x00,0x00,0x00, -0x8b,0x00,0x00,0x00,0x73,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xde,0x00,0x00,0x00,0xdc,0x00,0x00,0x00, -0xac,0x02,0x00,0x00,0xb0,0x00,0x05,0x00,0xb7,0x00,0x00,0x00, -0xe1,0x00,0x00,0x00,0xde,0x00,0x00,0x00,0x36,0x00,0x00,0x00, -0xf7,0x00,0x03,0x00,0xe3,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0xfa,0x00,0x04,0x00,0xe1,0x00,0x00,0x00,0xe2,0x00,0x00,0x00, -0xe3,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0xe2,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xe6,0x00,0x00,0x00, -0x9c,0x02,0x00,0x00,0x6e,0x00,0x00,0x00,0xb0,0x00,0x05,0x00, -0xb7,0x00,0x00,0x00,0xe8,0x00,0x00,0x00,0xe6,0x00,0x00,0x00, -0x83,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0xe3,0x00,0x00,0x00, -0xf8,0x00,0x02,0x00,0xe3,0x00,0x00,0x00,0xf5,0x00,0x07,0x00, -0xb7,0x00,0x00,0x00,0xe9,0x00,0x00,0x00,0xe1,0x00,0x00,0x00, -0xd3,0x00,0x00,0x00,0xe8,0x00,0x00,0x00,0xe2,0x00,0x00,0x00, -0xf7,0x00,0x03,0x00,0xeb,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0xfa,0x00,0x04,0x00,0xe9,0x00,0x00,0x00,0xea,0x00,0x00,0x00, -0x0d,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0xea,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xf4,0x00,0x00,0x00, -0x73,0x00,0x00,0x00,0xac,0x02,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xf6,0x00,0x00,0x00,0xf4,0x00,0x00,0x00, -0xf5,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xf8,0x00,0x00,0x00,0xf6,0x00,0x00,0x00,0x6e,0x00,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x03,0x01,0x00,0x00, -0xf4,0x00,0x00,0x00,0x8e,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x04,0x01,0x00,0x00,0xb0,0x02,0x00,0x00, -0x03,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x06,0x01,0x00,0x00,0x04,0x01,0x00,0x00,0x6e,0x00,0x00,0x00, -0x41,0x00,0x06,0x00,0x07,0x01,0x00,0x00,0x08,0x01,0x00,0x00, -0xfc,0x00,0x00,0x00,0x34,0x00,0x00,0x00,0x06,0x01,0x00,0x00, -0x3d,0x00,0x04,0x00,0xb9,0x00,0x00,0x00,0x09,0x01,0x00,0x00, -0x08,0x01,0x00,0x00,0x73,0x00,0x04,0x00,0xec,0x00,0x00,0x00, -0x0a,0x01,0x00,0x00,0x09,0x01,0x00,0x00,0x41,0x00,0x05,0x00, -0x0b,0x01,0x00,0x00,0x0c,0x01,0x00,0x00,0xf1,0x00,0x00,0x00, -0xf8,0x00,0x00,0x00,0x3e,0x00,0x03,0x00,0x0c,0x01,0x00,0x00, -0x0a,0x01,0x00,0x00,0xf9,0x00,0x02,0x00,0xeb,0x00,0x00,0x00, -0xf8,0x00,0x02,0x00,0x0d,0x01,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x10,0x01,0x00,0x00,0x73,0x00,0x00,0x00, -0xac,0x02,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x12,0x01,0x00,0x00,0x10,0x01,0x00,0x00,0x11,0x01,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x14,0x01,0x00,0x00, -0x12,0x01,0x00,0x00,0x6e,0x00,0x00,0x00,0x41,0x00,0x05,0x00, -0x0b,0x01,0x00,0x00,0x16,0x01,0x00,0x00,0xf1,0x00,0x00,0x00, -0x14,0x01,0x00,0x00,0x3e,0x00,0x03,0x00,0x16,0x01,0x00,0x00, -0x15,0x01,0x00,0x00,0xf9,0x00,0x02,0x00,0xeb,0x00,0x00,0x00, -0xf8,0x00,0x02,0x00,0xeb,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, -0xd5,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0xd5,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x1d,0x01,0x00,0x00, -0xac,0x02,0x00,0x00,0x1b,0x01,0x00,0x00,0xf9,0x00,0x02,0x00, -0xd2,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0xd4,0x00,0x00,0x00, -0xf9,0x00,0x02,0x00,0x1f,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, -0x1f,0x01,0x00,0x00,0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00, -0xad,0x02,0x00,0x00,0x3e,0x00,0x00,0x00,0xd4,0x00,0x00,0x00, -0x61,0x01,0x00,0x00,0x22,0x01,0x00,0x00,0xb0,0x00,0x05,0x00, -0xb7,0x00,0x00,0x00,0x25,0x01,0x00,0x00,0xad,0x02,0x00,0x00, -0x9c,0x00,0x00,0x00,0xf6,0x00,0x04,0x00,0x21,0x01,0x00,0x00, -0x22,0x01,0x00,0x00,0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, -0x25,0x01,0x00,0x00,0x20,0x01,0x00,0x00,0x21,0x01,0x00,0x00, -0xf8,0x00,0x02,0x00,0x20,0x01,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x29,0x01,0x00,0x00,0x9d,0x00,0x00,0x00, -0x73,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x2b,0x01,0x00,0x00,0x29,0x01,0x00,0x00,0xad,0x02,0x00,0x00, -0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00,0x2c,0x01,0x00,0x00, -0x12,0x00,0x00,0x00,0xc5,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x2d,0x01,0x00,0x00,0x2c,0x01,0x00,0x00, -0xb0,0x00,0x05,0x00,0xb7,0x00,0x00,0x00,0x2e,0x01,0x00,0x00, -0x2b,0x01,0x00,0x00,0x2d,0x01,0x00,0x00,0xf7,0x00,0x03,0x00, -0x30,0x01,0x00,0x00,0x00,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, -0x2e,0x01,0x00,0x00,0x2f,0x01,0x00,0x00,0x30,0x01,0x00,0x00, -0xf8,0x00,0x02,0x00,0x2f,0x01,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x33,0x01,0x00,0x00,0x9c,0x02,0x00,0x00, -0x6e,0x00,0x00,0x00,0xb0,0x00,0x05,0x00,0xb7,0x00,0x00,0x00, -0x35,0x01,0x00,0x00,0x33,0x01,0x00,0x00,0x83,0x00,0x00,0x00, -0xf9,0x00,0x02,0x00,0x30,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, -0x30,0x01,0x00,0x00,0xf5,0x00,0x07,0x00,0xb7,0x00,0x00,0x00, -0x36,0x01,0x00,0x00,0x2e,0x01,0x00,0x00,0x20,0x01,0x00,0x00, -0x35,0x01,0x00,0x00,0x2f,0x01,0x00,0x00,0xf7,0x00,0x03,0x00, -0x38,0x01,0x00,0x00,0x00,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, -0x36,0x01,0x00,0x00,0x37,0x01,0x00,0x00,0x57,0x01,0x00,0x00, -0xf8,0x00,0x02,0x00,0x37,0x01,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x40,0x01,0x00,0x00,0x73,0x00,0x00,0x00, -0xad,0x02,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x42,0x01,0x00,0x00,0x40,0x01,0x00,0x00,0x41,0x01,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x44,0x01,0x00,0x00, -0x42,0x01,0x00,0x00,0x6e,0x00,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x4f,0x01,0x00,0x00,0x40,0x01,0x00,0x00, -0xa0,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x50,0x01,0x00,0x00,0xb4,0x02,0x00,0x00,0x4f,0x01,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x52,0x01,0x00,0x00, -0x50,0x01,0x00,0x00,0x6e,0x00,0x00,0x00,0x41,0x00,0x06,0x00, -0x07,0x01,0x00,0x00,0x53,0x01,0x00,0x00,0x48,0x01,0x00,0x00, -0x34,0x00,0x00,0x00,0x52,0x01,0x00,0x00,0x3d,0x00,0x04,0x00, -0xb9,0x00,0x00,0x00,0x54,0x01,0x00,0x00,0x53,0x01,0x00,0x00, -0x73,0x00,0x04,0x00,0xec,0x00,0x00,0x00,0x55,0x01,0x00,0x00, -0x54,0x01,0x00,0x00,0x41,0x00,0x05,0x00,0x0b,0x01,0x00,0x00, -0x56,0x01,0x00,0x00,0x3d,0x01,0x00,0x00,0x44,0x01,0x00,0x00, -0x3e,0x00,0x03,0x00,0x56,0x01,0x00,0x00,0x55,0x01,0x00,0x00, -0xf9,0x00,0x02,0x00,0x38,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, -0x57,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x5a,0x01,0x00,0x00,0x73,0x00,0x00,0x00,0xad,0x02,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x5c,0x01,0x00,0x00, -0x5a,0x01,0x00,0x00,0x5b,0x01,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x5e,0x01,0x00,0x00,0x5c,0x01,0x00,0x00, -0x6e,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x0b,0x01,0x00,0x00, -0x5f,0x01,0x00,0x00,0x3d,0x01,0x00,0x00,0x5e,0x01,0x00,0x00, -0x3e,0x00,0x03,0x00,0x5f,0x01,0x00,0x00,0x15,0x01,0x00,0x00, -0xf9,0x00,0x02,0x00,0x38,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, -0x38,0x01,0x00,0x00,0xf9,0x00,0x02,0x00,0x22,0x01,0x00,0x00, -0xf8,0x00,0x02,0x00,0x22,0x01,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x61,0x01,0x00,0x00,0xad,0x02,0x00,0x00, -0x1b,0x01,0x00,0x00,0xf9,0x00,0x02,0x00,0x1f,0x01,0x00,0x00, -0xf8,0x00,0x02,0x00,0x21,0x01,0x00,0x00,0xe0,0x00,0x04,0x00, -0x0c,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x62,0x01,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x65,0x01,0x00,0x00, -0xb0,0x02,0x00,0x00,0x63,0x01,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x68,0x01,0x00,0x00,0xb4,0x02,0x00,0x00, -0x66,0x01,0x00,0x00,0xf9,0x00,0x02,0x00,0x6a,0x01,0x00,0x00, -0xf8,0x00,0x02,0x00,0x6a,0x01,0x00,0x00,0xf5,0x00,0x07,0x00, -0x06,0x00,0x00,0x00,0xb6,0x02,0x00,0x00,0x3e,0x00,0x00,0x00, -0x21,0x01,0x00,0x00,0x14,0x02,0x00,0x00,0x6d,0x01,0x00,0x00, -0xb0,0x00,0x05,0x00,0xb7,0x00,0x00,0x00,0x70,0x01,0x00,0x00, -0xb6,0x02,0x00,0x00,0x6c,0x00,0x00,0x00,0xf6,0x00,0x04,0x00, -0x6c,0x01,0x00,0x00,0x6d,0x01,0x00,0x00,0x00,0x00,0x00,0x00, -0xfa,0x00,0x04,0x00,0x70,0x01,0x00,0x00,0x6b,0x01,0x00,0x00, -0x6c,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0x6b,0x01,0x00,0x00, -0xf9,0x00,0x02,0x00,0x72,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, -0x72,0x01,0x00,0x00,0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00, -0xba,0x02,0x00,0x00,0x3e,0x00,0x00,0x00,0x6b,0x01,0x00,0x00, -0x9e,0x01,0x00,0x00,0x75,0x01,0x00,0x00,0xb0,0x00,0x05,0x00, -0xb7,0x00,0x00,0x00,0x78,0x01,0x00,0x00,0xba,0x02,0x00,0x00, -0x60,0x00,0x00,0x00,0xf6,0x00,0x04,0x00,0x74,0x01,0x00,0x00, -0x75,0x01,0x00,0x00,0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, -0x78,0x01,0x00,0x00,0x73,0x01,0x00,0x00,0x74,0x01,0x00,0x00, -0xf8,0x00,0x02,0x00,0x73,0x01,0x00,0x00,0xf9,0x00,0x02,0x00, -0x7a,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0x7a,0x01,0x00,0x00, -0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0xcc,0x02,0x00,0x00, -0x3e,0x00,0x00,0x00,0x73,0x01,0x00,0x00,0x9c,0x01,0x00,0x00, -0x7b,0x01,0x00,0x00,0xb0,0x00,0x05,0x00,0xb7,0x00,0x00,0x00, -0x80,0x01,0x00,0x00,0xcc,0x02,0x00,0x00,0x62,0x00,0x00,0x00, -0xf6,0x00,0x04,0x00,0x7c,0x01,0x00,0x00,0x7b,0x01,0x00,0x00, -0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0x80,0x01,0x00,0x00, -0x7b,0x01,0x00,0x00,0x7c,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, -0x7b,0x01,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x86,0x01,0x00,0x00,0xba,0x02,0x00,0x00,0x62,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x88,0x01,0x00,0x00, -0x86,0x01,0x00,0x00,0xcc,0x02,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x8a,0x01,0x00,0x00,0x55,0x00,0x00,0x00, -0x53,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x8c,0x01,0x00,0x00,0xba,0x02,0x00,0x00,0x61,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x8d,0x01,0x00,0x00, -0x8a,0x01,0x00,0x00,0x8c,0x01,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x8f,0x01,0x00,0x00,0x64,0x00,0x00,0x00, +0x89,0x02,0x00,0x00,0x6e,0x02,0x00,0x00,0xbe,0x02,0x00,0x00, +0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00,0x8a,0x02,0x00,0x00, +0x12,0x00,0x00,0x00,0xcf,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x8b,0x02,0x00,0x00,0x8a,0x02,0x00,0x00, +0xb0,0x00,0x05,0x00,0xc1,0x00,0x00,0x00,0x8c,0x02,0x00,0x00, +0x89,0x02,0x00,0x00,0x8b,0x02,0x00,0x00,0xf9,0x00,0x02,0x00, +0x86,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x86,0x02,0x00,0x00, +0xf5,0x00,0x07,0x00,0xc1,0x00,0x00,0x00,0x8d,0x02,0x00,0x00, +0x84,0x02,0x00,0x00,0x79,0x02,0x00,0x00,0x8c,0x02,0x00,0x00, +0x85,0x02,0x00,0x00,0xf7,0x00,0x03,0x00,0x8f,0x02,0x00,0x00, +0x00,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0x8d,0x02,0x00,0x00, +0x8e,0x02,0x00,0x00,0x8f,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, +0x8e,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x97,0x02,0x00,0x00,0x6e,0x02,0x00,0x00,0xbe,0x02,0x00,0x00, +0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00,0x99,0x02,0x00,0x00, +0x12,0x00,0x00,0x00,0x98,0x02,0x00,0x00,0x3d,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x9a,0x02,0x00,0x00,0x99,0x02,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x9b,0x02,0x00,0x00, +0x97,0x02,0x00,0x00,0x9a,0x02,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x9c,0x02,0x00,0x00,0x4e,0x02,0x00,0x00, +0x9b,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x9e,0x02,0x00,0x00,0x9c,0x02,0x00,0x00,0x66,0x02,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xa0,0x02,0x00,0x00, +0x9e,0x02,0x00,0x00,0xc0,0x02,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xa2,0x02,0x00,0x00,0xbb,0x02,0x00,0x00, +0xbb,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xa4,0x02,0x00,0x00,0xa2,0x02,0x00,0x00,0xbe,0x02,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xa6,0x02,0x00,0x00, +0xa4,0x02,0x00,0x00,0xa5,0x02,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xa8,0x02,0x00,0x00,0xbc,0x02,0x00,0x00, 0x62,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x90,0x01,0x00,0x00,0x8d,0x01,0x00,0x00,0x8f,0x01,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x92,0x01,0x00,0x00, -0x90,0x01,0x00,0x00,0xcc,0x02,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x94,0x01,0x00,0x00,0x92,0x01,0x00,0x00, -0x93,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x96,0x01,0x00,0x00,0x94,0x01,0x00,0x00,0xb6,0x02,0x00,0x00, -0x41,0x00,0x05,0x00,0x0b,0x01,0x00,0x00,0x97,0x01,0x00,0x00, -0xf1,0x00,0x00,0x00,0x96,0x01,0x00,0x00,0x3d,0x00,0x04,0x00, -0xec,0x00,0x00,0x00,0x98,0x01,0x00,0x00,0x97,0x01,0x00,0x00, -0x41,0x00,0x05,0x00,0x99,0x01,0x00,0x00,0x9a,0x01,0x00,0x00, -0x84,0x01,0x00,0x00,0x88,0x01,0x00,0x00,0x3e,0x00,0x03,0x00, -0x9a,0x01,0x00,0x00,0x98,0x01,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x9c,0x01,0x00,0x00,0xcc,0x02,0x00,0x00, -0xc5,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0x7a,0x01,0x00,0x00, -0xf8,0x00,0x02,0x00,0x7c,0x01,0x00,0x00,0xf9,0x00,0x02,0x00, -0x75,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0x75,0x01,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x9e,0x01,0x00,0x00, -0xba,0x02,0x00,0x00,0xc5,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, -0x72,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0x74,0x01,0x00,0x00, -0xf9,0x00,0x02,0x00,0xa0,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, -0xa0,0x01,0x00,0x00,0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00, -0xbb,0x02,0x00,0x00,0x3e,0x00,0x00,0x00,0x74,0x01,0x00,0x00, -0xcc,0x01,0x00,0x00,0xa3,0x01,0x00,0x00,0xb0,0x00,0x05,0x00, -0xb7,0x00,0x00,0x00,0xa6,0x01,0x00,0x00,0xbb,0x02,0x00,0x00, -0xb4,0x00,0x00,0x00,0xf6,0x00,0x04,0x00,0xa2,0x01,0x00,0x00, -0xa3,0x01,0x00,0x00,0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, -0xa6,0x01,0x00,0x00,0xa1,0x01,0x00,0x00,0xa2,0x01,0x00,0x00, -0xf8,0x00,0x02,0x00,0xa1,0x01,0x00,0x00,0xf9,0x00,0x02,0x00, -0xa8,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0xa8,0x01,0x00,0x00, -0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0xc9,0x02,0x00,0x00, -0x3e,0x00,0x00,0x00,0xa1,0x01,0x00,0x00,0xca,0x01,0x00,0x00, -0xa9,0x01,0x00,0x00,0xb0,0x00,0x05,0x00,0xb7,0x00,0x00,0x00, -0xae,0x01,0x00,0x00,0xc9,0x02,0x00,0x00,0xb1,0x00,0x00,0x00, -0xf6,0x00,0x04,0x00,0xaa,0x01,0x00,0x00,0xa9,0x01,0x00,0x00, -0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0xae,0x01,0x00,0x00, -0xa9,0x01,0x00,0x00,0xaa,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, -0xa9,0x01,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xb4,0x01,0x00,0x00,0xbb,0x02,0x00,0x00,0xb1,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xb6,0x01,0x00,0x00, -0xb4,0x01,0x00,0x00,0xc9,0x02,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xb8,0x01,0x00,0x00,0x59,0x00,0x00,0x00, -0xae,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xbb,0x01,0x00,0x00,0xbb,0x02,0x00,0x00,0xba,0x01,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xbc,0x01,0x00,0x00, -0xb8,0x01,0x00,0x00,0xbb,0x01,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xbe,0x01,0x00,0x00,0x68,0x00,0x00,0x00, -0xb1,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xbf,0x01,0x00,0x00,0xbc,0x01,0x00,0x00,0xbe,0x01,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xc1,0x01,0x00,0x00, -0xbf,0x01,0x00,0x00,0xc9,0x02,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xc3,0x01,0x00,0x00,0xc1,0x01,0x00,0x00, -0xc2,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xc5,0x01,0x00,0x00,0xc3,0x01,0x00,0x00,0xb6,0x02,0x00,0x00, -0x41,0x00,0x05,0x00,0x0b,0x01,0x00,0x00,0xc6,0x01,0x00,0x00, -0x3d,0x01,0x00,0x00,0xc5,0x01,0x00,0x00,0x3d,0x00,0x04,0x00, -0xec,0x00,0x00,0x00,0xc7,0x01,0x00,0x00,0xc6,0x01,0x00,0x00, -0x41,0x00,0x05,0x00,0x99,0x01,0x00,0x00,0xc8,0x01,0x00,0x00, -0xb2,0x01,0x00,0x00,0xb6,0x01,0x00,0x00,0x3e,0x00,0x03,0x00, -0xc8,0x01,0x00,0x00,0xc7,0x01,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xca,0x01,0x00,0x00,0xc9,0x02,0x00,0x00, -0xc5,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0xa8,0x01,0x00,0x00, -0xf8,0x00,0x02,0x00,0xaa,0x01,0x00,0x00,0xf9,0x00,0x02,0x00, -0xa3,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0xa3,0x01,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xcc,0x01,0x00,0x00, -0xbb,0x02,0x00,0x00,0xc5,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, -0xa0,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0xa2,0x01,0x00,0x00, -0xf9,0x00,0x02,0x00,0xce,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, -0xce,0x01,0x00,0x00,0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00, -0xbc,0x02,0x00,0x00,0x3e,0x00,0x00,0x00,0xa2,0x01,0x00,0x00, -0x12,0x02,0x00,0x00,0xd1,0x01,0x00,0x00,0xb0,0x00,0x05,0x00, -0xb7,0x00,0x00,0x00,0xd4,0x01,0x00,0x00,0xbc,0x02,0x00,0x00, -0xb4,0x00,0x00,0x00,0xf6,0x00,0x04,0x00,0xd0,0x01,0x00,0x00, -0xd1,0x01,0x00,0x00,0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, -0xd4,0x01,0x00,0x00,0xcf,0x01,0x00,0x00,0xd0,0x01,0x00,0x00, -0xf8,0x00,0x02,0x00,0xcf,0x01,0x00,0x00,0xf9,0x00,0x02,0x00, -0xd6,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0xd6,0x01,0x00,0x00, -0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0xc0,0x02,0x00,0x00, -0x3e,0x00,0x00,0x00,0xcf,0x01,0x00,0x00,0x10,0x02,0x00,0x00, -0xd9,0x01,0x00,0x00,0xb0,0x00,0x05,0x00,0xb7,0x00,0x00,0x00, -0xdc,0x01,0x00,0x00,0xc0,0x02,0x00,0x00,0x60,0x00,0x00,0x00, -0xf6,0x00,0x04,0x00,0xd8,0x01,0x00,0x00,0xd9,0x01,0x00,0x00, -0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0xdc,0x01,0x00,0x00, -0xd7,0x01,0x00,0x00,0xd8,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, -0xd7,0x01,0x00,0x00,0xf9,0x00,0x02,0x00,0xde,0x01,0x00,0x00, -0xf8,0x00,0x02,0x00,0xde,0x01,0x00,0x00,0xf5,0x00,0x07,0x00, -0x06,0x00,0x00,0x00,0xc2,0x02,0x00,0x00,0x3e,0x00,0x00,0x00, -0xd7,0x01,0x00,0x00,0x0e,0x02,0x00,0x00,0xe1,0x01,0x00,0x00, -0xb0,0x00,0x05,0x00,0xb7,0x00,0x00,0x00,0xe4,0x01,0x00,0x00, -0xc2,0x02,0x00,0x00,0xb1,0x00,0x00,0x00,0xf6,0x00,0x04,0x00, -0xe0,0x01,0x00,0x00,0xe1,0x01,0x00,0x00,0x01,0x00,0x00,0x00, -0xfa,0x00,0x04,0x00,0xe4,0x01,0x00,0x00,0xdf,0x01,0x00,0x00, -0xe0,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0xdf,0x01,0x00,0x00, -0xf9,0x00,0x02,0x00,0xe6,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, -0xe6,0x01,0x00,0x00,0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00, -0xc4,0x02,0x00,0x00,0x3e,0x00,0x00,0x00,0xdf,0x01,0x00,0x00, -0x0c,0x02,0x00,0x00,0xe7,0x01,0x00,0x00,0xb0,0x00,0x05,0x00, -0xb7,0x00,0x00,0x00,0xec,0x01,0x00,0x00,0xc4,0x02,0x00,0x00, -0x62,0x00,0x00,0x00,0xf6,0x00,0x04,0x00,0xe8,0x01,0x00,0x00, -0xe7,0x01,0x00,0x00,0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, -0xec,0x01,0x00,0x00,0xe7,0x01,0x00,0x00,0xe8,0x01,0x00,0x00, -0xf8,0x00,0x02,0x00,0xe7,0x01,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xee,0x01,0x00,0x00,0xbc,0x02,0x00,0x00, -0xb1,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xf0,0x01,0x00,0x00,0xee,0x01,0x00,0x00,0xc2,0x02,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xf2,0x01,0x00,0x00, -0xf0,0x01,0x00,0x00,0xf1,0x01,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xf4,0x01,0x00,0x00,0xc0,0x02,0x00,0x00, -0x62,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xf5,0x01,0x00,0x00,0xf2,0x01,0x00,0x00,0xf4,0x01,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xf7,0x01,0x00,0x00, -0xf5,0x01,0x00,0x00,0xc4,0x02,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xfb,0x01,0x00,0x00,0xf4,0x01,0x00,0x00, -0xc4,0x02,0x00,0x00,0x41,0x00,0x05,0x00,0x99,0x01,0x00,0x00, -0xfc,0x01,0x00,0x00,0x84,0x01,0x00,0x00,0xfb,0x01,0x00,0x00, -0x3d,0x00,0x04,0x00,0xec,0x00,0x00,0x00,0xfd,0x01,0x00,0x00, -0xfc,0x01,0x00,0x00,0x73,0x00,0x04,0x00,0xb9,0x00,0x00,0x00, -0xfe,0x01,0x00,0x00,0xfd,0x01,0x00,0x00,0x41,0x00,0x05,0x00, -0x99,0x01,0x00,0x00,0x03,0x02,0x00,0x00,0xb2,0x01,0x00,0x00, -0xf0,0x01,0x00,0x00,0x3d,0x00,0x04,0x00,0xec,0x00,0x00,0x00, -0x04,0x02,0x00,0x00,0x03,0x02,0x00,0x00,0x73,0x00,0x04,0x00, -0xb9,0x00,0x00,0x00,0x05,0x02,0x00,0x00,0x04,0x02,0x00,0x00, -0x41,0x00,0x05,0x00,0xc2,0x00,0x00,0x00,0x07,0x02,0x00,0x00, -0xbf,0x00,0x00,0x00,0xf7,0x01,0x00,0x00,0x3d,0x00,0x04,0x00, -0xb9,0x00,0x00,0x00,0x08,0x02,0x00,0x00,0x07,0x02,0x00,0x00, -0x0c,0x00,0x08,0x00,0xb9,0x00,0x00,0x00,0x09,0x02,0x00,0x00, -0x01,0x00,0x00,0x00,0x32,0x00,0x00,0x00,0xfe,0x01,0x00,0x00, -0x05,0x02,0x00,0x00,0x08,0x02,0x00,0x00,0x3e,0x00,0x03,0x00, -0x07,0x02,0x00,0x00,0x09,0x02,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x0c,0x02,0x00,0x00,0xc4,0x02,0x00,0x00, -0xc5,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0xe6,0x01,0x00,0x00, -0xf8,0x00,0x02,0x00,0xe8,0x01,0x00,0x00,0xf9,0x00,0x02,0x00, -0xe1,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0xe1,0x01,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x0e,0x02,0x00,0x00, -0xc2,0x02,0x00,0x00,0xc5,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, -0xde,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0xe0,0x01,0x00,0x00, -0xf9,0x00,0x02,0x00,0xd9,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, -0xd9,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x10,0x02,0x00,0x00,0xc0,0x02,0x00,0x00,0xc5,0x00,0x00,0x00, -0xf9,0x00,0x02,0x00,0xd6,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, -0xd8,0x01,0x00,0x00,0xf9,0x00,0x02,0x00,0xd1,0x01,0x00,0x00, -0xf8,0x00,0x02,0x00,0xd1,0x01,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x12,0x02,0x00,0x00,0xbc,0x02,0x00,0x00, -0xc5,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0xce,0x01,0x00,0x00, -0xf8,0x00,0x02,0x00,0xd0,0x01,0x00,0x00,0xf9,0x00,0x02,0x00, -0x6d,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0x6d,0x01,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x14,0x02,0x00,0x00, -0xb6,0x02,0x00,0x00,0xc5,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, -0x6a,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0x6c,0x01,0x00,0x00, -0xe0,0x00,0x04,0x00,0x0c,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0x62,0x01,0x00,0x00,0xf9,0x00,0x02,0x00,0xcc,0x00,0x00,0x00, -0xf8,0x00,0x02,0x00,0xcc,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x16,0x02,0x00,0x00,0x9c,0x02,0x00,0x00, -0x6c,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0xc9,0x00,0x00,0x00, -0xf8,0x00,0x02,0x00,0xcb,0x00,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x1b,0x02,0x00,0x00,0x55,0x00,0x00,0x00, -0x53,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x1c,0x02,0x00,0x00,0x8b,0x00,0x00,0x00,0x1b,0x02,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x21,0x02,0x00,0x00, -0x59,0x00,0x00,0x00,0xae,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x22,0x02,0x00,0x00,0x9d,0x00,0x00,0x00, -0x21,0x02,0x00,0x00,0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00, -0x27,0x02,0x00,0x00,0x12,0x00,0x00,0x00,0x26,0x02,0x00,0x00, -0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x28,0x02,0x00,0x00, -0x27,0x02,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x29,0x02,0x00,0x00,0x0f,0x00,0x00,0x00,0x28,0x02,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x2d,0x02,0x00,0x00, -0x47,0x00,0x00,0x00,0x28,0x02,0x00,0x00,0x41,0x00,0x05,0x00, -0x0d,0x00,0x00,0x00,0x2f,0x02,0x00,0x00,0x2e,0x02,0x00,0x00, -0x0c,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x30,0x02,0x00,0x00,0x2f,0x02,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x31,0x02,0x00,0x00,0x2d,0x02,0x00,0x00, -0x30,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x32,0x02,0x00,0x00,0x29,0x02,0x00,0x00,0x31,0x02,0x00,0x00, -0xf9,0x00,0x02,0x00,0x34,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, -0x34,0x02,0x00,0x00,0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00, -0x9d,0x02,0x00,0x00,0x3e,0x00,0x00,0x00,0xcb,0x00,0x00,0x00, -0x9a,0x02,0x00,0x00,0x37,0x02,0x00,0x00,0xb0,0x00,0x05,0x00, -0xb7,0x00,0x00,0x00,0x3a,0x02,0x00,0x00,0x9d,0x02,0x00,0x00, -0xb4,0x00,0x00,0x00,0xf6,0x00,0x04,0x00,0x36,0x02,0x00,0x00, -0x37,0x02,0x00,0x00,0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, -0x3a,0x02,0x00,0x00,0x35,0x02,0x00,0x00,0x36,0x02,0x00,0x00, -0xf8,0x00,0x02,0x00,0x35,0x02,0x00,0x00,0xf9,0x00,0x02,0x00, -0x3c,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x3c,0x02,0x00,0x00, -0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0x9e,0x02,0x00,0x00, -0x3e,0x00,0x00,0x00,0x35,0x02,0x00,0x00,0x98,0x02,0x00,0x00, -0x3f,0x02,0x00,0x00,0xb0,0x00,0x05,0x00,0xb7,0x00,0x00,0x00, -0x42,0x02,0x00,0x00,0x9e,0x02,0x00,0x00,0x60,0x00,0x00,0x00, -0xf6,0x00,0x04,0x00,0x3e,0x02,0x00,0x00,0x3f,0x02,0x00,0x00, -0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0x42,0x02,0x00,0x00, -0x3d,0x02,0x00,0x00,0x3e,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, -0x3d,0x02,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x46,0x02,0x00,0x00,0x9e,0x02,0x00,0x00,0x61,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x47,0x02,0x00,0x00, -0x1c,0x02,0x00,0x00,0x46,0x02,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x49,0x02,0x00,0x00,0x64,0x00,0x00,0x00, -0x62,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x4a,0x02,0x00,0x00,0x47,0x02,0x00,0x00,0x49,0x02,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x4e,0x02,0x00,0x00, -0x9d,0x02,0x00,0x00,0xba,0x01,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x4f,0x02,0x00,0x00,0x22,0x02,0x00,0x00, -0x4e,0x02,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x51,0x02,0x00,0x00,0x68,0x00,0x00,0x00,0xb1,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x52,0x02,0x00,0x00, -0x4f,0x02,0x00,0x00,0x51,0x02,0x00,0x00,0xf9,0x00,0x02,0x00, -0x54,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x54,0x02,0x00,0x00, -0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0xa0,0x02,0x00,0x00, -0x3e,0x00,0x00,0x00,0x3d,0x02,0x00,0x00,0x96,0x02,0x00,0x00, -0x57,0x02,0x00,0x00,0xb0,0x00,0x05,0x00,0xb7,0x00,0x00,0x00, -0x5a,0x02,0x00,0x00,0xa0,0x02,0x00,0x00,0xb1,0x00,0x00,0x00, -0xf6,0x00,0x04,0x00,0x56,0x02,0x00,0x00,0x57,0x02,0x00,0x00, -0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0x5a,0x02,0x00,0x00, -0x55,0x02,0x00,0x00,0x56,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, -0x55,0x02,0x00,0x00,0xf9,0x00,0x02,0x00,0x5c,0x02,0x00,0x00, -0xf8,0x00,0x02,0x00,0x5c,0x02,0x00,0x00,0xf5,0x00,0x07,0x00, -0x06,0x00,0x00,0x00,0xa2,0x02,0x00,0x00,0x3e,0x00,0x00,0x00, -0x55,0x02,0x00,0x00,0x94,0x02,0x00,0x00,0x5f,0x02,0x00,0x00, -0xb0,0x00,0x05,0x00,0xb7,0x00,0x00,0x00,0x62,0x02,0x00,0x00, -0xa2,0x02,0x00,0x00,0x62,0x00,0x00,0x00,0xf6,0x00,0x04,0x00, -0x5e,0x02,0x00,0x00,0x5f,0x02,0x00,0x00,0x01,0x00,0x00,0x00, -0xfa,0x00,0x04,0x00,0x62,0x02,0x00,0x00,0x5d,0x02,0x00,0x00, -0x5e,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x5d,0x02,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x65,0x02,0x00,0x00, -0x4a,0x02,0x00,0x00,0xa2,0x02,0x00,0x00,0xb0,0x00,0x05,0x00, -0xb7,0x00,0x00,0x00,0x68,0x02,0x00,0x00,0x65,0x02,0x00,0x00, -0x36,0x00,0x00,0x00,0xf7,0x00,0x03,0x00,0x6a,0x02,0x00,0x00, -0x00,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0x68,0x02,0x00,0x00, -0x69,0x02,0x00,0x00,0x6a,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, -0x69,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x6d,0x02,0x00,0x00,0x52,0x02,0x00,0x00,0xa0,0x02,0x00,0x00, -0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00,0x6e,0x02,0x00,0x00, -0x12,0x00,0x00,0x00,0xc5,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x6f,0x02,0x00,0x00,0x6e,0x02,0x00,0x00, -0xb0,0x00,0x05,0x00,0xb7,0x00,0x00,0x00,0x70,0x02,0x00,0x00, -0x6d,0x02,0x00,0x00,0x6f,0x02,0x00,0x00,0xf9,0x00,0x02,0x00, -0x6a,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x6a,0x02,0x00,0x00, -0xf5,0x00,0x07,0x00,0xb7,0x00,0x00,0x00,0x71,0x02,0x00,0x00, -0x68,0x02,0x00,0x00,0x5d,0x02,0x00,0x00,0x70,0x02,0x00,0x00, -0x69,0x02,0x00,0x00,0xf7,0x00,0x03,0x00,0x73,0x02,0x00,0x00, -0x00,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0x71,0x02,0x00,0x00, -0x72,0x02,0x00,0x00,0x73,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, -0x72,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x7b,0x02,0x00,0x00,0x52,0x02,0x00,0x00,0xa0,0x02,0x00,0x00, -0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00,0x7d,0x02,0x00,0x00, -0x12,0x00,0x00,0x00,0x7c,0x02,0x00,0x00,0x3d,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x7e,0x02,0x00,0x00,0x7d,0x02,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x7f,0x02,0x00,0x00, -0x7b,0x02,0x00,0x00,0x7e,0x02,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x80,0x02,0x00,0x00,0x32,0x02,0x00,0x00, -0x7f,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x82,0x02,0x00,0x00,0x80,0x02,0x00,0x00,0x4a,0x02,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x84,0x02,0x00,0x00, -0x82,0x02,0x00,0x00,0xa2,0x02,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x86,0x02,0x00,0x00,0x9d,0x02,0x00,0x00, -0xb1,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x88,0x02,0x00,0x00,0x86,0x02,0x00,0x00,0xa0,0x02,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x8a,0x02,0x00,0x00, -0x88,0x02,0x00,0x00,0x89,0x02,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x8c,0x02,0x00,0x00,0x9e,0x02,0x00,0x00, -0x62,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x8d,0x02,0x00,0x00,0x8a,0x02,0x00,0x00,0x8c,0x02,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x8f,0x02,0x00,0x00, -0x8d,0x02,0x00,0x00,0xa2,0x02,0x00,0x00,0x41,0x00,0x05,0x00, -0xc2,0x00,0x00,0x00,0x90,0x02,0x00,0x00,0xbf,0x00,0x00,0x00, -0x8f,0x02,0x00,0x00,0x3d,0x00,0x04,0x00,0xb9,0x00,0x00,0x00, -0x91,0x02,0x00,0x00,0x90,0x02,0x00,0x00,0x41,0x00,0x06,0x00, -0x07,0x01,0x00,0x00,0x92,0x02,0x00,0x00,0x77,0x02,0x00,0x00, -0x34,0x00,0x00,0x00,0x84,0x02,0x00,0x00,0x3e,0x00,0x03,0x00, -0x92,0x02,0x00,0x00,0x91,0x02,0x00,0x00,0xf9,0x00,0x02,0x00, -0x73,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x73,0x02,0x00,0x00, -0xf9,0x00,0x02,0x00,0x5f,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, -0x5f,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x94,0x02,0x00,0x00,0xa2,0x02,0x00,0x00,0xc5,0x00,0x00,0x00, -0xf9,0x00,0x02,0x00,0x5c,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, -0x5e,0x02,0x00,0x00,0xf9,0x00,0x02,0x00,0x57,0x02,0x00,0x00, -0xf8,0x00,0x02,0x00,0x57,0x02,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x96,0x02,0x00,0x00,0xa0,0x02,0x00,0x00, -0xc5,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0x54,0x02,0x00,0x00, -0xf8,0x00,0x02,0x00,0x56,0x02,0x00,0x00,0xf9,0x00,0x02,0x00, -0x3f,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x3f,0x02,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x98,0x02,0x00,0x00, -0x9e,0x02,0x00,0x00,0xc5,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, -0x3c,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x3e,0x02,0x00,0x00, -0xf9,0x00,0x02,0x00,0x37,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, -0x37,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x9a,0x02,0x00,0x00,0x9d,0x02,0x00,0x00,0xc5,0x00,0x00,0x00, -0xf9,0x00,0x02,0x00,0x34,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, -0x36,0x02,0x00,0x00,0xfd,0x00,0x01,0x00,0x38,0x00,0x01,0x00, +0xa9,0x02,0x00,0x00,0xa6,0x02,0x00,0x00,0xa8,0x02,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xab,0x02,0x00,0x00, +0xa9,0x02,0x00,0x00,0xc0,0x02,0x00,0x00,0x41,0x00,0x05,0x00, +0xcc,0x00,0x00,0x00,0xac,0x02,0x00,0x00,0xc9,0x00,0x00,0x00, +0xab,0x02,0x00,0x00,0x3d,0x00,0x04,0x00,0xc3,0x00,0x00,0x00, +0xad,0x02,0x00,0x00,0xac,0x02,0x00,0x00,0x41,0x00,0x06,0x00, +0x6f,0x01,0x00,0x00,0xae,0x02,0x00,0x00,0x93,0x02,0x00,0x00, +0x34,0x00,0x00,0x00,0xa0,0x02,0x00,0x00,0x3e,0x00,0x03,0x00, +0xae,0x02,0x00,0x00,0xad,0x02,0x00,0x00,0xf9,0x00,0x02,0x00, +0x8f,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x8f,0x02,0x00,0x00, +0xf9,0x00,0x02,0x00,0x7b,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, +0x7b,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xb0,0x02,0x00,0x00,0xc0,0x02,0x00,0x00,0xcf,0x00,0x00,0x00, +0xf9,0x00,0x02,0x00,0x78,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, +0x7a,0x02,0x00,0x00,0xf9,0x00,0x02,0x00,0x73,0x02,0x00,0x00, +0xf8,0x00,0x02,0x00,0x73,0x02,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xb2,0x02,0x00,0x00,0xbe,0x02,0x00,0x00, +0xcf,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0x70,0x02,0x00,0x00, +0xf8,0x00,0x02,0x00,0x72,0x02,0x00,0x00,0xf9,0x00,0x02,0x00, +0x5b,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x5b,0x02,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xb4,0x02,0x00,0x00, +0xbc,0x02,0x00,0x00,0xcf,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, +0x58,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x5a,0x02,0x00,0x00, +0xf9,0x00,0x02,0x00,0x53,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, +0x53,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xb6,0x02,0x00,0x00,0xbb,0x02,0x00,0x00,0xcf,0x00,0x00,0x00, +0xf9,0x00,0x02,0x00,0x50,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, +0x52,0x02,0x00,0x00,0xfd,0x00,0x01,0x00,0x38,0x00,0x01,0x00, }; -const uint64_t matmul_f32_m_len = 10164; - -unsigned char matmul_f32_m_fp32_data[] = { -0x03,0x02,0x23,0x07,0x00,0x05,0x01,0x00,0x0b,0x00,0x0d,0x00, -0xc6,0x02,0x00,0x00,0x00,0x00,0x00,0x00,0x11,0x00,0x02,0x00, -0x01,0x00,0x00,0x00,0x0b,0x00,0x06,0x00,0x01,0x00,0x00,0x00, -0x47,0x4c,0x53,0x4c,0x2e,0x73,0x74,0x64,0x2e,0x34,0x35,0x30, -0x00,0x00,0x00,0x00,0x0e,0x00,0x03,0x00,0x00,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x0f,0x00,0x0f,0x00,0x05,0x00,0x00,0x00, -0x04,0x00,0x00,0x00,0x6d,0x61,0x69,0x6e,0x00,0x00,0x00,0x00, -0x0b,0x00,0x00,0x00,0x12,0x00,0x00,0x00,0x3d,0x00,0x00,0x00, -0x4c,0x00,0x00,0x00,0xf0,0x00,0x00,0x00,0xfb,0x00,0x00,0x00, -0x3a,0x01,0x00,0x00,0x45,0x01,0x00,0x00,0x27,0x02,0x00,0x00, -0x70,0x02,0x00,0x00,0x10,0x00,0x06,0x00,0x04,0x00,0x00,0x00, -0x11,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x0b,0x00,0x00,0x00, -0x0b,0x00,0x00,0x00,0x1c,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0x10,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x04,0x00,0x00,0x00, -0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00,0x02,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x08,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0x10,0x00,0x00,0x00,0x03,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00, -0x04,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x10,0x00,0x00,0x00, -0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00,0x05,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x14,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0x10,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x18,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00, -0x07,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x1c,0x00,0x00,0x00, -0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00,0x08,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x20,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0x10,0x00,0x00,0x00,0x09,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x24,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00, -0x0a,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x28,0x00,0x00,0x00, -0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00,0x0b,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x2c,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0x10,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x30,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00, -0x0d,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x34,0x00,0x00,0x00, -0x47,0x00,0x03,0x00,0x10,0x00,0x00,0x00,0x02,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x37,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x3d,0x00,0x00,0x00, -0x0b,0x00,0x00,0x00,0x1a,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x4c,0x00,0x00,0x00,0x0b,0x00,0x00,0x00,0x1b,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x4f,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x09,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x53,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x60,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x62,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x07,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x6c,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x03,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x9c,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x02,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0xae,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x05,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0xb1,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x08,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0xf8,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x04,0x00,0x00,0x00, -0x48,0x00,0x04,0x00,0xf9,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x18,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0xf9,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x47,0x00,0x03,0x00,0xf9,0x00,0x00,0x00,0x02,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0xfb,0x00,0x00,0x00,0x22,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0xfb,0x00,0x00,0x00, -0x21,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x14,0x01,0x00,0x00,0x01,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x15,0x01,0x00,0x00,0x0b,0x00,0x00,0x00, -0x19,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x42,0x01,0x00,0x00, -0x06,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x48,0x00,0x04,0x00, -0x43,0x01,0x00,0x00,0x00,0x00,0x00,0x00,0x18,0x00,0x00,0x00, -0x48,0x00,0x05,0x00,0x43,0x01,0x00,0x00,0x00,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x03,0x00, -0x43,0x01,0x00,0x00,0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x45,0x01,0x00,0x00,0x22,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x45,0x01,0x00,0x00,0x21,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x27,0x02,0x00,0x00, -0x0b,0x00,0x00,0x00,0x18,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x6d,0x02,0x00,0x00,0x06,0x00,0x00,0x00,0x04,0x00,0x00,0x00, -0x48,0x00,0x04,0x00,0x6e,0x02,0x00,0x00,0x00,0x00,0x00,0x00, -0x19,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x6e,0x02,0x00,0x00, -0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x47,0x00,0x03,0x00,0x6e,0x02,0x00,0x00,0x02,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x70,0x02,0x00,0x00,0x22,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x70,0x02,0x00,0x00, -0x21,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x13,0x00,0x02,0x00, -0x02,0x00,0x00,0x00,0x21,0x00,0x03,0x00,0x03,0x00,0x00,0x00, -0x02,0x00,0x00,0x00,0x15,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x20,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x17,0x00,0x04,0x00, -0x09,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x03,0x00,0x00,0x00, -0x20,0x00,0x04,0x00,0x0a,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x09,0x00,0x00,0x00,0x3b,0x00,0x04,0x00,0x0a,0x00,0x00,0x00, -0x0b,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x02,0x00,0x00,0x00, -0x20,0x00,0x04,0x00,0x0d,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x1e,0x00,0x10,0x00,0x10,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0x11,0x00,0x00,0x00,0x09,0x00,0x00,0x00,0x10,0x00,0x00,0x00, -0x3b,0x00,0x04,0x00,0x11,0x00,0x00,0x00,0x12,0x00,0x00,0x00, -0x09,0x00,0x00,0x00,0x15,0x00,0x04,0x00,0x13,0x00,0x00,0x00, -0x20,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x13,0x00,0x00,0x00,0x14,0x00,0x00,0x00,0x08,0x00,0x00,0x00, -0x20,0x00,0x04,0x00,0x15,0x00,0x00,0x00,0x09,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00, -0x21,0x00,0x00,0x00,0x0a,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x13,0x00,0x00,0x00,0x27,0x00,0x00,0x00,0x09,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00,0x2d,0x00,0x00,0x00, -0x07,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00, -0x34,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x32,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x37,0x00,0x00,0x00,0x40,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x39,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x3b,0x00,0x04,0x00,0x0a,0x00,0x00,0x00, -0x3d,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x3e,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x3b,0x00,0x04,0x00,0x0a,0x00,0x00,0x00,0x4c,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x32,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x4f,0x00,0x00,0x00,0x20,0x00,0x00,0x00,0x32,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x53,0x00,0x00,0x00,0x20,0x00,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x54,0x00,0x00,0x00, -0x86,0x00,0x00,0x00,0x37,0x00,0x00,0x00,0x53,0x00,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x58,0x00,0x00,0x00, -0x86,0x00,0x00,0x00,0x37,0x00,0x00,0x00,0x53,0x00,0x00,0x00, -0x32,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x60,0x00,0x00,0x00, -0x02,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, -0x61,0x00,0x00,0x00,0x86,0x00,0x00,0x00,0x53,0x00,0x00,0x00, -0x60,0x00,0x00,0x00,0x32,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x62,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0x63,0x00,0x00,0x00,0x86,0x00,0x00,0x00, -0x61,0x00,0x00,0x00,0x62,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0x67,0x00,0x00,0x00,0x86,0x00,0x00,0x00, -0x61,0x00,0x00,0x00,0x62,0x00,0x00,0x00,0x32,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x6c,0x00,0x00,0x00,0x10,0x00,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x6d,0x00,0x00,0x00, -0x86,0x00,0x00,0x00,0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x72,0x00,0x00,0x00, -0x86,0x00,0x00,0x00,0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00,0x76,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00, -0x7b,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x13,0x00,0x00,0x00,0x86,0x00,0x00,0x00,0x0b,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00,0x8c,0x00,0x00,0x00, -0x03,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00, -0x97,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x32,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x9c,0x00,0x00,0x00,0x40,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00,0x9e,0x00,0x00,0x00, -0x04,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, -0xad,0x00,0x00,0x00,0x84,0x00,0x00,0x00,0x60,0x00,0x00,0x00, -0x62,0x00,0x00,0x00,0x32,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0xae,0x00,0x00,0x00,0x20,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0xaf,0x00,0x00,0x00,0x84,0x00,0x00,0x00, -0x53,0x00,0x00,0x00,0xae,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0xb0,0x00,0x00,0x00,0x84,0x00,0x00,0x00, -0x4f,0x00,0x00,0x00,0x62,0x00,0x00,0x00,0x32,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0xb1,0x00,0x00,0x00,0x02,0x00,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0xb2,0x00,0x00,0x00, -0x84,0x00,0x00,0x00,0xb0,0x00,0x00,0x00,0xb1,0x00,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0xb3,0x00,0x00,0x00, -0x84,0x00,0x00,0x00,0xb2,0x00,0x00,0x00,0x60,0x00,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0xb4,0x00,0x00,0x00, -0x86,0x00,0x00,0x00,0xaf,0x00,0x00,0x00,0xb3,0x00,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0xb5,0x00,0x00,0x00, -0x84,0x00,0x00,0x00,0xad,0x00,0x00,0x00,0xb4,0x00,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0xb6,0x00,0x00,0x00, -0x84,0x00,0x00,0x00,0xb5,0x00,0x00,0x00,0xb1,0x00,0x00,0x00, -0x14,0x00,0x02,0x00,0xb7,0x00,0x00,0x00,0x16,0x00,0x03,0x00, -0xb9,0x00,0x00,0x00,0x20,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0xba,0x00,0x00,0x00,0x84,0x00,0x00,0x00, -0x60,0x00,0x00,0x00,0x62,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0xbb,0x00,0x00,0x00,0x84,0x00,0x00,0x00, -0xba,0x00,0x00,0x00,0xb4,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0xbc,0x00,0x00,0x00,0x84,0x00,0x00,0x00, -0xbb,0x00,0x00,0x00,0xb1,0x00,0x00,0x00,0x1c,0x00,0x04,0x00, -0xbd,0x00,0x00,0x00,0xb9,0x00,0x00,0x00,0xbc,0x00,0x00,0x00, -0x20,0x00,0x04,0x00,0xbe,0x00,0x00,0x00,0x07,0x00,0x00,0x00, -0xbd,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0xb9,0x00,0x00,0x00, -0xc1,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0xc2,0x00,0x00,0x00,0x07,0x00,0x00,0x00,0xb9,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00,0xc5,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, -0xec,0x00,0x00,0x00,0x80,0x00,0x00,0x00,0x6c,0x00,0x00,0x00, -0x39,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, -0xed,0x00,0x00,0x00,0x84,0x00,0x00,0x00,0x37,0x00,0x00,0x00, -0xec,0x00,0x00,0x00,0x1c,0x00,0x04,0x00,0xee,0x00,0x00,0x00, -0xb9,0x00,0x00,0x00,0xed,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0xef,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0xee,0x00,0x00,0x00, -0x3b,0x00,0x04,0x00,0xef,0x00,0x00,0x00,0xf0,0x00,0x00,0x00, -0x04,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, -0xf4,0x00,0x00,0x00,0x80,0x00,0x00,0x00,0x6c,0x00,0x00,0x00, -0x39,0x00,0x00,0x00,0x1d,0x00,0x03,0x00,0xf8,0x00,0x00,0x00, -0xb9,0x00,0x00,0x00,0x1e,0x00,0x03,0x00,0xf9,0x00,0x00,0x00, -0xf8,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0xfa,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0xf9,0x00,0x00,0x00,0x3b,0x00,0x04,0x00, -0xfa,0x00,0x00,0x00,0xfb,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0x20,0x00,0x04,0x00,0x06,0x01,0x00,0x00,0x0c,0x00,0x00,0x00, -0xb9,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x09,0x01,0x00,0x00, -0x04,0x00,0x00,0x00,0xb9,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0x0f,0x01,0x00,0x00,0x80,0x00,0x00,0x00, -0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x32,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x14,0x01,0x00,0x00,0x01,0x00,0x00,0x00, -0x33,0x00,0x06,0x00,0x09,0x00,0x00,0x00,0x15,0x01,0x00,0x00, -0x14,0x01,0x00,0x00,0x39,0x00,0x00,0x00,0x39,0x00,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x16,0x01,0x00,0x00, -0x51,0x00,0x00,0x00,0x15,0x01,0x00,0x00,0x00,0x00,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x17,0x01,0x00,0x00, -0x84,0x00,0x00,0x00,0x16,0x01,0x00,0x00,0x39,0x00,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x18,0x01,0x00,0x00, -0x86,0x00,0x00,0x00,0x17,0x01,0x00,0x00,0x6c,0x00,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x36,0x01,0x00,0x00, -0x80,0x00,0x00,0x00,0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x37,0x01,0x00,0x00, -0x84,0x00,0x00,0x00,0x9c,0x00,0x00,0x00,0x36,0x01,0x00,0x00, -0x1c,0x00,0x04,0x00,0x38,0x01,0x00,0x00,0xb9,0x00,0x00,0x00, -0x37,0x01,0x00,0x00,0x20,0x00,0x04,0x00,0x39,0x01,0x00,0x00, -0x04,0x00,0x00,0x00,0x38,0x01,0x00,0x00,0x3b,0x00,0x04,0x00, -0x39,0x01,0x00,0x00,0x3a,0x01,0x00,0x00,0x04,0x00,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x3e,0x01,0x00,0x00, -0x80,0x00,0x00,0x00,0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00, -0x1d,0x00,0x03,0x00,0x42,0x01,0x00,0x00,0xb9,0x00,0x00,0x00, -0x1e,0x00,0x03,0x00,0x43,0x01,0x00,0x00,0x42,0x01,0x00,0x00, -0x20,0x00,0x04,0x00,0x44,0x01,0x00,0x00,0x0c,0x00,0x00,0x00, -0x43,0x01,0x00,0x00,0x3b,0x00,0x04,0x00,0x44,0x01,0x00,0x00, -0x45,0x01,0x00,0x00,0x0c,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0x57,0x01,0x00,0x00,0x80,0x00,0x00,0x00, -0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x5e,0x01,0x00,0x00,0x08,0x01,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x5f,0x01,0x00,0x00, -0x86,0x00,0x00,0x00,0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x62,0x01,0x00,0x00, -0x86,0x00,0x00,0x00,0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x7d,0x01,0x00,0x00, -0x84,0x00,0x00,0x00,0x60,0x00,0x00,0x00,0x62,0x00,0x00,0x00, -0x1c,0x00,0x04,0x00,0x7e,0x01,0x00,0x00,0xb9,0x00,0x00,0x00, -0x7d,0x01,0x00,0x00,0x20,0x00,0x04,0x00,0x7f,0x01,0x00,0x00, -0x07,0x00,0x00,0x00,0x7e,0x01,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0x8f,0x01,0x00,0x00,0x80,0x00,0x00,0x00, -0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0xaa,0x01,0x00,0x00,0x84,0x00,0x00,0x00, -0xb4,0x00,0x00,0x00,0xb1,0x00,0x00,0x00,0x1c,0x00,0x04,0x00, -0xab,0x01,0x00,0x00,0xb9,0x00,0x00,0x00,0xaa,0x01,0x00,0x00, -0x20,0x00,0x04,0x00,0xac,0x01,0x00,0x00,0x07,0x00,0x00,0x00, -0xab,0x01,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, -0xb5,0x01,0x00,0x00,0x86,0x00,0x00,0x00,0xae,0x00,0x00,0x00, -0xb4,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, -0xbd,0x01,0x00,0x00,0x80,0x00,0x00,0x00,0x6c,0x00,0x00,0x00, -0x39,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, -0xec,0x01,0x00,0x00,0x84,0x00,0x00,0x00,0x60,0x00,0x00,0x00, -0x62,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00, -0x1f,0x02,0x00,0x00,0x0d,0x00,0x00,0x00,0x3b,0x00,0x04,0x00, -0x0a,0x00,0x00,0x00,0x27,0x02,0x00,0x00,0x01,0x00,0x00,0x00, -0x1d,0x00,0x03,0x00,0x6d,0x02,0x00,0x00,0xb9,0x00,0x00,0x00, -0x1e,0x00,0x03,0x00,0x6e,0x02,0x00,0x00,0x6d,0x02,0x00,0x00, -0x20,0x00,0x04,0x00,0x6f,0x02,0x00,0x00,0x0c,0x00,0x00,0x00, -0x6e,0x02,0x00,0x00,0x3b,0x00,0x04,0x00,0x6f,0x02,0x00,0x00, -0x70,0x02,0x00,0x00,0x0c,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x13,0x00,0x00,0x00,0x75,0x02,0x00,0x00,0x05,0x00,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x82,0x02,0x00,0x00, -0x84,0x00,0x00,0x00,0x60,0x00,0x00,0x00,0x62,0x00,0x00,0x00, -0x36,0x00,0x05,0x00,0x02,0x00,0x00,0x00,0x04,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x03,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, -0x05,0x00,0x00,0x00,0x3b,0x00,0x04,0x00,0xbe,0x00,0x00,0x00, -0xbf,0x00,0x00,0x00,0x07,0x00,0x00,0x00,0x3b,0x00,0x04,0x00, -0x7f,0x01,0x00,0x00,0x80,0x01,0x00,0x00,0x07,0x00,0x00,0x00, -0x3b,0x00,0x04,0x00,0xac,0x01,0x00,0x00,0xad,0x01,0x00,0x00, -0x07,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x0d,0x00,0x00,0x00, -0x0e,0x00,0x00,0x00,0x0b,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x0f,0x00,0x00,0x00, -0x0e,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00, -0x16,0x00,0x00,0x00,0x12,0x00,0x00,0x00,0x14,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x17,0x00,0x00,0x00, -0x16,0x00,0x00,0x00,0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x18,0x00,0x00,0x00,0x0f,0x00,0x00,0x00,0x17,0x00,0x00,0x00, -0x89,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x1e,0x00,0x00,0x00, -0x0f,0x00,0x00,0x00,0x17,0x00,0x00,0x00,0x41,0x00,0x05,0x00, -0x15,0x00,0x00,0x00,0x22,0x00,0x00,0x00,0x12,0x00,0x00,0x00, -0x21,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x22,0x00,0x00,0x00,0x86,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x24,0x00,0x00,0x00,0x18,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00, -0x28,0x00,0x00,0x00,0x12,0x00,0x00,0x00,0x27,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x29,0x00,0x00,0x00, -0x28,0x00,0x00,0x00,0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x2a,0x00,0x00,0x00,0x1e,0x00,0x00,0x00,0x29,0x00,0x00,0x00, -0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0x12,0x00,0x00,0x00,0x2d,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x2f,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x30,0x00,0x00,0x00, -0x24,0x00,0x00,0x00,0x2f,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x32,0x00,0x00,0x00,0x30,0x00,0x00,0x00, -0x2a,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00, -0x35,0x00,0x00,0x00,0x12,0x00,0x00,0x00,0x34,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x36,0x00,0x00,0x00, -0x35,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x38,0x00,0x00,0x00,0x36,0x00,0x00,0x00,0x37,0x00,0x00,0x00, -0x82,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x3a,0x00,0x00,0x00, -0x38,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x86,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x3b,0x00,0x00,0x00,0x3a,0x00,0x00,0x00, -0x37,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x0d,0x00,0x00,0x00, -0x3f,0x00,0x00,0x00,0x3d,0x00,0x00,0x00,0x3e,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x40,0x00,0x00,0x00, -0x3f,0x00,0x00,0x00,0x89,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x42,0x00,0x00,0x00,0x40,0x00,0x00,0x00,0x3b,0x00,0x00,0x00, -0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x47,0x00,0x00,0x00, -0x40,0x00,0x00,0x00,0x3b,0x00,0x00,0x00,0x41,0x00,0x05,0x00, -0x0d,0x00,0x00,0x00,0x49,0x00,0x00,0x00,0x3d,0x00,0x00,0x00, -0x39,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x4a,0x00,0x00,0x00,0x49,0x00,0x00,0x00,0x41,0x00,0x05,0x00, -0x0d,0x00,0x00,0x00,0x4d,0x00,0x00,0x00,0x4c,0x00,0x00,0x00, -0x3e,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x4e,0x00,0x00,0x00,0x4d,0x00,0x00,0x00,0x86,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x50,0x00,0x00,0x00,0x4e,0x00,0x00,0x00, -0x4f,0x00,0x00,0x00,0x89,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x55,0x00,0x00,0x00,0x50,0x00,0x00,0x00,0x54,0x00,0x00,0x00, -0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x59,0x00,0x00,0x00, -0x50,0x00,0x00,0x00,0x58,0x00,0x00,0x00,0x89,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x5d,0x00,0x00,0x00,0x4e,0x00,0x00,0x00, -0x4f,0x00,0x00,0x00,0x89,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x64,0x00,0x00,0x00,0x5d,0x00,0x00,0x00,0x63,0x00,0x00,0x00, -0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x68,0x00,0x00,0x00, -0x5d,0x00,0x00,0x00,0x67,0x00,0x00,0x00,0x89,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x6e,0x00,0x00,0x00,0x4e,0x00,0x00,0x00, -0x6d,0x00,0x00,0x00,0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x73,0x00,0x00,0x00,0x4e,0x00,0x00,0x00,0x72,0x00,0x00,0x00, -0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00,0x77,0x00,0x00,0x00, -0x12,0x00,0x00,0x00,0x76,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x78,0x00,0x00,0x00,0x77,0x00,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x79,0x00,0x00,0x00, -0x47,0x00,0x00,0x00,0x78,0x00,0x00,0x00,0x41,0x00,0x05,0x00, -0x15,0x00,0x00,0x00,0x7c,0x00,0x00,0x00,0x12,0x00,0x00,0x00, -0x7b,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x7d,0x00,0x00,0x00,0x7c,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x7f,0x00,0x00,0x00,0x47,0x00,0x00,0x00, -0x39,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x82,0x00,0x00,0x00,0x7f,0x00,0x00,0x00,0x78,0x00,0x00,0x00, -0x0c,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0x83,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x26,0x00,0x00,0x00,0x7d,0x00,0x00,0x00, -0x82,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00, -0x87,0x00,0x00,0x00,0x12,0x00,0x00,0x00,0x86,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x88,0x00,0x00,0x00, -0x87,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x89,0x00,0x00,0x00,0x32,0x00,0x00,0x00,0x88,0x00,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x8b,0x00,0x00,0x00, -0x42,0x00,0x00,0x00,0x37,0x00,0x00,0x00,0x41,0x00,0x05,0x00, -0x15,0x00,0x00,0x00,0x8d,0x00,0x00,0x00,0x12,0x00,0x00,0x00, -0x8c,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x8e,0x00,0x00,0x00,0x8d,0x00,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x8f,0x00,0x00,0x00,0x8b,0x00,0x00,0x00, -0x8e,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x90,0x00,0x00,0x00,0x89,0x00,0x00,0x00,0x8f,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x92,0x00,0x00,0x00, -0x90,0x00,0x00,0x00,0x79,0x00,0x00,0x00,0x86,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x93,0x00,0x00,0x00,0x92,0x00,0x00,0x00, -0x39,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00, -0x98,0x00,0x00,0x00,0x12,0x00,0x00,0x00,0x97,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x99,0x00,0x00,0x00, -0x98,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x9a,0x00,0x00,0x00,0x0f,0x00,0x00,0x00,0x99,0x00,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x9d,0x00,0x00,0x00, -0x4a,0x00,0x00,0x00,0x9c,0x00,0x00,0x00,0x41,0x00,0x05,0x00, -0x15,0x00,0x00,0x00,0x9f,0x00,0x00,0x00,0x12,0x00,0x00,0x00, -0x9e,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0xa0,0x00,0x00,0x00,0x9f,0x00,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xa1,0x00,0x00,0x00,0x9d,0x00,0x00,0x00, -0xa0,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xa2,0x00,0x00,0x00,0x9a,0x00,0x00,0x00,0xa1,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xa4,0x00,0x00,0x00, -0xa2,0x00,0x00,0x00,0x79,0x00,0x00,0x00,0x86,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xa5,0x00,0x00,0x00,0xa4,0x00,0x00,0x00, -0x39,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0xa7,0x00,0x00,0x00, -0xf8,0x00,0x02,0x00,0xa7,0x00,0x00,0x00,0xf5,0x00,0x07,0x00, -0x06,0x00,0x00,0x00,0x94,0x02,0x00,0x00,0x3e,0x00,0x00,0x00, -0x05,0x00,0x00,0x00,0xc6,0x00,0x00,0x00,0xa8,0x00,0x00,0x00, -0xb0,0x00,0x05,0x00,0xb7,0x00,0x00,0x00,0xb8,0x00,0x00,0x00, -0x94,0x02,0x00,0x00,0xb6,0x00,0x00,0x00,0xf6,0x00,0x04,0x00, -0xa9,0x00,0x00,0x00,0xa8,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0xfa,0x00,0x04,0x00,0xb8,0x00,0x00,0x00,0xa8,0x00,0x00,0x00, -0xa9,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0xa8,0x00,0x00,0x00, -0x41,0x00,0x05,0x00,0xc2,0x00,0x00,0x00,0xc3,0x00,0x00,0x00, -0xbf,0x00,0x00,0x00,0x94,0x02,0x00,0x00,0x3e,0x00,0x03,0x00, -0xc3,0x00,0x00,0x00,0xc1,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xc6,0x00,0x00,0x00,0x94,0x02,0x00,0x00, -0xc5,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0xa7,0x00,0x00,0x00, -0xf8,0x00,0x02,0x00,0xa9,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, -0xc9,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0xc9,0x00,0x00,0x00, -0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0xad,0x02,0x00,0x00, -0xa5,0x00,0x00,0x00,0xa9,0x00,0x00,0x00,0x64,0x01,0x00,0x00, -0xcc,0x00,0x00,0x00,0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00, -0xa9,0x02,0x00,0x00,0x93,0x00,0x00,0x00,0xa9,0x00,0x00,0x00, -0x61,0x01,0x00,0x00,0xcc,0x00,0x00,0x00,0xf5,0x00,0x07,0x00, -0x06,0x00,0x00,0x00,0x95,0x02,0x00,0x00,0x79,0x00,0x00,0x00, -0xa9,0x00,0x00,0x00,0x0f,0x02,0x00,0x00,0xcc,0x00,0x00,0x00, -0xb0,0x00,0x05,0x00,0xb7,0x00,0x00,0x00,0xd0,0x00,0x00,0x00, -0x95,0x02,0x00,0x00,0x83,0x00,0x00,0x00,0xf6,0x00,0x04,0x00, -0xcb,0x00,0x00,0x00,0xcc,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0xfa,0x00,0x04,0x00,0xd0,0x00,0x00,0x00,0xca,0x00,0x00,0x00, -0xcb,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0xca,0x00,0x00,0x00, -0xf9,0x00,0x02,0x00,0xd2,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, -0xd2,0x00,0x00,0x00,0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00, -0xa5,0x02,0x00,0x00,0x3e,0x00,0x00,0x00,0xca,0x00,0x00,0x00, -0x1a,0x01,0x00,0x00,0xd5,0x00,0x00,0x00,0xb0,0x00,0x05,0x00, -0xb7,0x00,0x00,0x00,0xd8,0x00,0x00,0x00,0xa5,0x02,0x00,0x00, -0x37,0x00,0x00,0x00,0xf6,0x00,0x04,0x00,0xd4,0x00,0x00,0x00, -0xd5,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, -0xd8,0x00,0x00,0x00,0xd3,0x00,0x00,0x00,0xd4,0x00,0x00,0x00, -0xf8,0x00,0x02,0x00,0xd3,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xdc,0x00,0x00,0x00,0x8b,0x00,0x00,0x00, -0x73,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xde,0x00,0x00,0x00,0xdc,0x00,0x00,0x00,0xa5,0x02,0x00,0x00, -0xb0,0x00,0x05,0x00,0xb7,0x00,0x00,0x00,0xe1,0x00,0x00,0x00, -0xde,0x00,0x00,0x00,0x36,0x00,0x00,0x00,0xf7,0x00,0x03,0x00, -0xe3,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, -0xe1,0x00,0x00,0x00,0xe2,0x00,0x00,0x00,0xe3,0x00,0x00,0x00, -0xf8,0x00,0x02,0x00,0xe2,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xe6,0x00,0x00,0x00,0x95,0x02,0x00,0x00, -0x6e,0x00,0x00,0x00,0xb0,0x00,0x05,0x00,0xb7,0x00,0x00,0x00, -0xe8,0x00,0x00,0x00,0xe6,0x00,0x00,0x00,0x83,0x00,0x00,0x00, -0xf9,0x00,0x02,0x00,0xe3,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, -0xe3,0x00,0x00,0x00,0xf5,0x00,0x07,0x00,0xb7,0x00,0x00,0x00, -0xe9,0x00,0x00,0x00,0xe1,0x00,0x00,0x00,0xd3,0x00,0x00,0x00, -0xe8,0x00,0x00,0x00,0xe2,0x00,0x00,0x00,0xf7,0x00,0x03,0x00, -0xeb,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, -0xe9,0x00,0x00,0x00,0xea,0x00,0x00,0x00,0x0b,0x01,0x00,0x00, -0xf8,0x00,0x02,0x00,0xea,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xf3,0x00,0x00,0x00,0x73,0x00,0x00,0x00, -0xa5,0x02,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xf5,0x00,0x00,0x00,0xf3,0x00,0x00,0x00,0xf4,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xf7,0x00,0x00,0x00, -0xf5,0x00,0x00,0x00,0x6e,0x00,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x02,0x01,0x00,0x00,0xf3,0x00,0x00,0x00, -0x8e,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x03,0x01,0x00,0x00,0xa9,0x02,0x00,0x00,0x02,0x01,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x05,0x01,0x00,0x00, -0x03,0x01,0x00,0x00,0x6e,0x00,0x00,0x00,0x41,0x00,0x06,0x00, -0x06,0x01,0x00,0x00,0x07,0x01,0x00,0x00,0xfb,0x00,0x00,0x00, -0x34,0x00,0x00,0x00,0x05,0x01,0x00,0x00,0x3d,0x00,0x04,0x00, -0xb9,0x00,0x00,0x00,0x08,0x01,0x00,0x00,0x07,0x01,0x00,0x00, -0x41,0x00,0x05,0x00,0x09,0x01,0x00,0x00,0x0a,0x01,0x00,0x00, -0xf0,0x00,0x00,0x00,0xf7,0x00,0x00,0x00,0x3e,0x00,0x03,0x00, -0x0a,0x01,0x00,0x00,0x08,0x01,0x00,0x00,0xf9,0x00,0x02,0x00, -0xeb,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0x0b,0x01,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x0e,0x01,0x00,0x00, -0x73,0x00,0x00,0x00,0xa5,0x02,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x10,0x01,0x00,0x00,0x0e,0x01,0x00,0x00, -0x0f,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x12,0x01,0x00,0x00,0x10,0x01,0x00,0x00,0x6e,0x00,0x00,0x00, -0x41,0x00,0x05,0x00,0x09,0x01,0x00,0x00,0x13,0x01,0x00,0x00, -0xf0,0x00,0x00,0x00,0x12,0x01,0x00,0x00,0x3e,0x00,0x03,0x00, -0x13,0x01,0x00,0x00,0xc1,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, -0xeb,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0xeb,0x00,0x00,0x00, -0xf9,0x00,0x02,0x00,0xd5,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, -0xd5,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x1a,0x01,0x00,0x00,0xa5,0x02,0x00,0x00,0x18,0x01,0x00,0x00, -0xf9,0x00,0x02,0x00,0xd2,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, -0xd4,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0x1c,0x01,0x00,0x00, -0xf8,0x00,0x02,0x00,0x1c,0x01,0x00,0x00,0xf5,0x00,0x07,0x00, -0x06,0x00,0x00,0x00,0xa6,0x02,0x00,0x00,0x3e,0x00,0x00,0x00, -0xd4,0x00,0x00,0x00,0x5d,0x01,0x00,0x00,0x1f,0x01,0x00,0x00, -0xb0,0x00,0x05,0x00,0xb7,0x00,0x00,0x00,0x22,0x01,0x00,0x00, -0xa6,0x02,0x00,0x00,0x9c,0x00,0x00,0x00,0xf6,0x00,0x04,0x00, -0x1e,0x01,0x00,0x00,0x1f,0x01,0x00,0x00,0x01,0x00,0x00,0x00, -0xfa,0x00,0x04,0x00,0x22,0x01,0x00,0x00,0x1d,0x01,0x00,0x00, -0x1e,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0x1d,0x01,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x26,0x01,0x00,0x00, -0x9d,0x00,0x00,0x00,0x73,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x28,0x01,0x00,0x00,0x26,0x01,0x00,0x00, -0xa6,0x02,0x00,0x00,0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00, -0x29,0x01,0x00,0x00,0x12,0x00,0x00,0x00,0xc5,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x2a,0x01,0x00,0x00, -0x29,0x01,0x00,0x00,0xb0,0x00,0x05,0x00,0xb7,0x00,0x00,0x00, -0x2b,0x01,0x00,0x00,0x28,0x01,0x00,0x00,0x2a,0x01,0x00,0x00, -0xf7,0x00,0x03,0x00,0x2d,0x01,0x00,0x00,0x00,0x00,0x00,0x00, -0xfa,0x00,0x04,0x00,0x2b,0x01,0x00,0x00,0x2c,0x01,0x00,0x00, -0x2d,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0x2c,0x01,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x30,0x01,0x00,0x00, -0x95,0x02,0x00,0x00,0x6e,0x00,0x00,0x00,0xb0,0x00,0x05,0x00, -0xb7,0x00,0x00,0x00,0x32,0x01,0x00,0x00,0x30,0x01,0x00,0x00, -0x83,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0x2d,0x01,0x00,0x00, -0xf8,0x00,0x02,0x00,0x2d,0x01,0x00,0x00,0xf5,0x00,0x07,0x00, -0xb7,0x00,0x00,0x00,0x33,0x01,0x00,0x00,0x2b,0x01,0x00,0x00, -0x1d,0x01,0x00,0x00,0x32,0x01,0x00,0x00,0x2c,0x01,0x00,0x00, -0xf7,0x00,0x03,0x00,0x35,0x01,0x00,0x00,0x00,0x00,0x00,0x00, -0xfa,0x00,0x04,0x00,0x33,0x01,0x00,0x00,0x34,0x01,0x00,0x00, -0x53,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0x34,0x01,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x3d,0x01,0x00,0x00, -0x73,0x00,0x00,0x00,0xa6,0x02,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x3f,0x01,0x00,0x00,0x3d,0x01,0x00,0x00, -0x3e,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x41,0x01,0x00,0x00,0x3f,0x01,0x00,0x00,0x6e,0x00,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x4c,0x01,0x00,0x00, -0x3d,0x01,0x00,0x00,0xa0,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x4d,0x01,0x00,0x00,0xad,0x02,0x00,0x00, -0x4c,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x4f,0x01,0x00,0x00,0x4d,0x01,0x00,0x00,0x6e,0x00,0x00,0x00, -0x41,0x00,0x06,0x00,0x06,0x01,0x00,0x00,0x50,0x01,0x00,0x00, -0x45,0x01,0x00,0x00,0x34,0x00,0x00,0x00,0x4f,0x01,0x00,0x00, -0x3d,0x00,0x04,0x00,0xb9,0x00,0x00,0x00,0x51,0x01,0x00,0x00, -0x50,0x01,0x00,0x00,0x41,0x00,0x05,0x00,0x09,0x01,0x00,0x00, -0x52,0x01,0x00,0x00,0x3a,0x01,0x00,0x00,0x41,0x01,0x00,0x00, -0x3e,0x00,0x03,0x00,0x52,0x01,0x00,0x00,0x51,0x01,0x00,0x00, -0xf9,0x00,0x02,0x00,0x35,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, -0x53,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x56,0x01,0x00,0x00,0x73,0x00,0x00,0x00,0xa6,0x02,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x58,0x01,0x00,0x00, -0x56,0x01,0x00,0x00,0x57,0x01,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x5a,0x01,0x00,0x00,0x58,0x01,0x00,0x00, -0x6e,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x09,0x01,0x00,0x00, -0x5b,0x01,0x00,0x00,0x3a,0x01,0x00,0x00,0x5a,0x01,0x00,0x00, -0x3e,0x00,0x03,0x00,0x5b,0x01,0x00,0x00,0xc1,0x00,0x00,0x00, -0xf9,0x00,0x02,0x00,0x35,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, -0x35,0x01,0x00,0x00,0xf9,0x00,0x02,0x00,0x1f,0x01,0x00,0x00, -0xf8,0x00,0x02,0x00,0x1f,0x01,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x5d,0x01,0x00,0x00,0xa6,0x02,0x00,0x00, -0x18,0x01,0x00,0x00,0xf9,0x00,0x02,0x00,0x1c,0x01,0x00,0x00, -0xf8,0x00,0x02,0x00,0x1e,0x01,0x00,0x00,0xe0,0x00,0x04,0x00, -0x0c,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x5e,0x01,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x61,0x01,0x00,0x00, -0xa9,0x02,0x00,0x00,0x5f,0x01,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x64,0x01,0x00,0x00,0xad,0x02,0x00,0x00, -0x62,0x01,0x00,0x00,0xf9,0x00,0x02,0x00,0x66,0x01,0x00,0x00, -0xf8,0x00,0x02,0x00,0x66,0x01,0x00,0x00,0xf5,0x00,0x07,0x00, -0x06,0x00,0x00,0x00,0xaf,0x02,0x00,0x00,0x3e,0x00,0x00,0x00, -0x1e,0x01,0x00,0x00,0x0d,0x02,0x00,0x00,0x69,0x01,0x00,0x00, -0xb0,0x00,0x05,0x00,0xb7,0x00,0x00,0x00,0x6c,0x01,0x00,0x00, -0xaf,0x02,0x00,0x00,0x6c,0x00,0x00,0x00,0xf6,0x00,0x04,0x00, -0x68,0x01,0x00,0x00,0x69,0x01,0x00,0x00,0x00,0x00,0x00,0x00, -0xfa,0x00,0x04,0x00,0x6c,0x01,0x00,0x00,0x67,0x01,0x00,0x00, -0x68,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0x67,0x01,0x00,0x00, -0xf9,0x00,0x02,0x00,0x6e,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, -0x6e,0x01,0x00,0x00,0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00, -0xb3,0x02,0x00,0x00,0x3e,0x00,0x00,0x00,0x67,0x01,0x00,0x00, -0x99,0x01,0x00,0x00,0x71,0x01,0x00,0x00,0xb0,0x00,0x05,0x00, -0xb7,0x00,0x00,0x00,0x74,0x01,0x00,0x00,0xb3,0x02,0x00,0x00, -0x60,0x00,0x00,0x00,0xf6,0x00,0x04,0x00,0x70,0x01,0x00,0x00, -0x71,0x01,0x00,0x00,0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, -0x74,0x01,0x00,0x00,0x6f,0x01,0x00,0x00,0x70,0x01,0x00,0x00, -0xf8,0x00,0x02,0x00,0x6f,0x01,0x00,0x00,0xf9,0x00,0x02,0x00, -0x76,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0x76,0x01,0x00,0x00, -0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0xc5,0x02,0x00,0x00, -0x3e,0x00,0x00,0x00,0x6f,0x01,0x00,0x00,0x97,0x01,0x00,0x00, -0x77,0x01,0x00,0x00,0xb0,0x00,0x05,0x00,0xb7,0x00,0x00,0x00, -0x7c,0x01,0x00,0x00,0xc5,0x02,0x00,0x00,0x62,0x00,0x00,0x00, -0xf6,0x00,0x04,0x00,0x78,0x01,0x00,0x00,0x77,0x01,0x00,0x00, -0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0x7c,0x01,0x00,0x00, -0x77,0x01,0x00,0x00,0x78,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, -0x77,0x01,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x82,0x01,0x00,0x00,0xb3,0x02,0x00,0x00,0x62,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x84,0x01,0x00,0x00, -0x82,0x01,0x00,0x00,0xc5,0x02,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x86,0x01,0x00,0x00,0x55,0x00,0x00,0x00, -0x53,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x88,0x01,0x00,0x00,0xb3,0x02,0x00,0x00,0x61,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x89,0x01,0x00,0x00, -0x86,0x01,0x00,0x00,0x88,0x01,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x8b,0x01,0x00,0x00,0x64,0x00,0x00,0x00, -0x62,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x8c,0x01,0x00,0x00,0x89,0x01,0x00,0x00,0x8b,0x01,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x8e,0x01,0x00,0x00, -0x8c,0x01,0x00,0x00,0xc5,0x02,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x90,0x01,0x00,0x00,0x8e,0x01,0x00,0x00, -0x8f,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x92,0x01,0x00,0x00,0x90,0x01,0x00,0x00,0xaf,0x02,0x00,0x00, -0x41,0x00,0x05,0x00,0x09,0x01,0x00,0x00,0x93,0x01,0x00,0x00, -0xf0,0x00,0x00,0x00,0x92,0x01,0x00,0x00,0x3d,0x00,0x04,0x00, -0xb9,0x00,0x00,0x00,0x94,0x01,0x00,0x00,0x93,0x01,0x00,0x00, -0x41,0x00,0x05,0x00,0xc2,0x00,0x00,0x00,0x95,0x01,0x00,0x00, -0x80,0x01,0x00,0x00,0x84,0x01,0x00,0x00,0x3e,0x00,0x03,0x00, -0x95,0x01,0x00,0x00,0x94,0x01,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x97,0x01,0x00,0x00,0xc5,0x02,0x00,0x00, -0xc5,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0x76,0x01,0x00,0x00, -0xf8,0x00,0x02,0x00,0x78,0x01,0x00,0x00,0xf9,0x00,0x02,0x00, -0x71,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0x71,0x01,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x99,0x01,0x00,0x00, -0xb3,0x02,0x00,0x00,0xc5,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, -0x6e,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0x70,0x01,0x00,0x00, -0xf9,0x00,0x02,0x00,0x9b,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, -0x9b,0x01,0x00,0x00,0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00, -0xb4,0x02,0x00,0x00,0x3e,0x00,0x00,0x00,0x70,0x01,0x00,0x00, -0xc7,0x01,0x00,0x00,0x9e,0x01,0x00,0x00,0xb0,0x00,0x05,0x00, -0xb7,0x00,0x00,0x00,0xa1,0x01,0x00,0x00,0xb4,0x02,0x00,0x00, -0xb4,0x00,0x00,0x00,0xf6,0x00,0x04,0x00,0x9d,0x01,0x00,0x00, -0x9e,0x01,0x00,0x00,0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, -0xa1,0x01,0x00,0x00,0x9c,0x01,0x00,0x00,0x9d,0x01,0x00,0x00, -0xf8,0x00,0x02,0x00,0x9c,0x01,0x00,0x00,0xf9,0x00,0x02,0x00, -0xa3,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0xa3,0x01,0x00,0x00, -0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0xc2,0x02,0x00,0x00, -0x3e,0x00,0x00,0x00,0x9c,0x01,0x00,0x00,0xc5,0x01,0x00,0x00, -0xa4,0x01,0x00,0x00,0xb0,0x00,0x05,0x00,0xb7,0x00,0x00,0x00, -0xa9,0x01,0x00,0x00,0xc2,0x02,0x00,0x00,0xb1,0x00,0x00,0x00, -0xf6,0x00,0x04,0x00,0xa5,0x01,0x00,0x00,0xa4,0x01,0x00,0x00, -0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0xa9,0x01,0x00,0x00, -0xa4,0x01,0x00,0x00,0xa5,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, -0xa4,0x01,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xaf,0x01,0x00,0x00,0xb4,0x02,0x00,0x00,0xb1,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xb1,0x01,0x00,0x00, -0xaf,0x01,0x00,0x00,0xc2,0x02,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xb3,0x01,0x00,0x00,0x59,0x00,0x00,0x00, -0xae,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xb6,0x01,0x00,0x00,0xb4,0x02,0x00,0x00,0xb5,0x01,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xb7,0x01,0x00,0x00, -0xb3,0x01,0x00,0x00,0xb6,0x01,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xb9,0x01,0x00,0x00,0x68,0x00,0x00,0x00, -0xb1,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xba,0x01,0x00,0x00,0xb7,0x01,0x00,0x00,0xb9,0x01,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xbc,0x01,0x00,0x00, -0xba,0x01,0x00,0x00,0xc2,0x02,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xbe,0x01,0x00,0x00,0xbc,0x01,0x00,0x00, -0xbd,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xc0,0x01,0x00,0x00,0xbe,0x01,0x00,0x00,0xaf,0x02,0x00,0x00, -0x41,0x00,0x05,0x00,0x09,0x01,0x00,0x00,0xc1,0x01,0x00,0x00, -0x3a,0x01,0x00,0x00,0xc0,0x01,0x00,0x00,0x3d,0x00,0x04,0x00, -0xb9,0x00,0x00,0x00,0xc2,0x01,0x00,0x00,0xc1,0x01,0x00,0x00, -0x41,0x00,0x05,0x00,0xc2,0x00,0x00,0x00,0xc3,0x01,0x00,0x00, -0xad,0x01,0x00,0x00,0xb1,0x01,0x00,0x00,0x3e,0x00,0x03,0x00, -0xc3,0x01,0x00,0x00,0xc2,0x01,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xc5,0x01,0x00,0x00,0xc2,0x02,0x00,0x00, -0xc5,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0xa3,0x01,0x00,0x00, -0xf8,0x00,0x02,0x00,0xa5,0x01,0x00,0x00,0xf9,0x00,0x02,0x00, -0x9e,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0x9e,0x01,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xc7,0x01,0x00,0x00, -0xb4,0x02,0x00,0x00,0xc5,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, -0x9b,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0x9d,0x01,0x00,0x00, -0xf9,0x00,0x02,0x00,0xc9,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, -0xc9,0x01,0x00,0x00,0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00, -0xb5,0x02,0x00,0x00,0x3e,0x00,0x00,0x00,0x9d,0x01,0x00,0x00, -0x0b,0x02,0x00,0x00,0xcc,0x01,0x00,0x00,0xb0,0x00,0x05,0x00, -0xb7,0x00,0x00,0x00,0xcf,0x01,0x00,0x00,0xb5,0x02,0x00,0x00, -0xb4,0x00,0x00,0x00,0xf6,0x00,0x04,0x00,0xcb,0x01,0x00,0x00, -0xcc,0x01,0x00,0x00,0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, -0xcf,0x01,0x00,0x00,0xca,0x01,0x00,0x00,0xcb,0x01,0x00,0x00, -0xf8,0x00,0x02,0x00,0xca,0x01,0x00,0x00,0xf9,0x00,0x02,0x00, -0xd1,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0xd1,0x01,0x00,0x00, -0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0xb9,0x02,0x00,0x00, -0x3e,0x00,0x00,0x00,0xca,0x01,0x00,0x00,0x09,0x02,0x00,0x00, -0xd4,0x01,0x00,0x00,0xb0,0x00,0x05,0x00,0xb7,0x00,0x00,0x00, -0xd7,0x01,0x00,0x00,0xb9,0x02,0x00,0x00,0x60,0x00,0x00,0x00, -0xf6,0x00,0x04,0x00,0xd3,0x01,0x00,0x00,0xd4,0x01,0x00,0x00, -0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0xd7,0x01,0x00,0x00, -0xd2,0x01,0x00,0x00,0xd3,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, -0xd2,0x01,0x00,0x00,0xf9,0x00,0x02,0x00,0xd9,0x01,0x00,0x00, -0xf8,0x00,0x02,0x00,0xd9,0x01,0x00,0x00,0xf5,0x00,0x07,0x00, -0x06,0x00,0x00,0x00,0xbb,0x02,0x00,0x00,0x3e,0x00,0x00,0x00, -0xd2,0x01,0x00,0x00,0x07,0x02,0x00,0x00,0xdc,0x01,0x00,0x00, -0xb0,0x00,0x05,0x00,0xb7,0x00,0x00,0x00,0xdf,0x01,0x00,0x00, -0xbb,0x02,0x00,0x00,0xb1,0x00,0x00,0x00,0xf6,0x00,0x04,0x00, -0xdb,0x01,0x00,0x00,0xdc,0x01,0x00,0x00,0x01,0x00,0x00,0x00, -0xfa,0x00,0x04,0x00,0xdf,0x01,0x00,0x00,0xda,0x01,0x00,0x00, -0xdb,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0xda,0x01,0x00,0x00, -0xf9,0x00,0x02,0x00,0xe1,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, -0xe1,0x01,0x00,0x00,0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00, -0xbd,0x02,0x00,0x00,0x3e,0x00,0x00,0x00,0xda,0x01,0x00,0x00, -0x05,0x02,0x00,0x00,0xe2,0x01,0x00,0x00,0xb0,0x00,0x05,0x00, -0xb7,0x00,0x00,0x00,0xe7,0x01,0x00,0x00,0xbd,0x02,0x00,0x00, -0x62,0x00,0x00,0x00,0xf6,0x00,0x04,0x00,0xe3,0x01,0x00,0x00, -0xe2,0x01,0x00,0x00,0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, -0xe7,0x01,0x00,0x00,0xe2,0x01,0x00,0x00,0xe3,0x01,0x00,0x00, -0xf8,0x00,0x02,0x00,0xe2,0x01,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xe9,0x01,0x00,0x00,0xb5,0x02,0x00,0x00, -0xb1,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xeb,0x01,0x00,0x00,0xe9,0x01,0x00,0x00,0xbb,0x02,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xed,0x01,0x00,0x00, -0xeb,0x01,0x00,0x00,0xec,0x01,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xef,0x01,0x00,0x00,0xb9,0x02,0x00,0x00, -0x62,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xf0,0x01,0x00,0x00,0xed,0x01,0x00,0x00,0xef,0x01,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xf2,0x01,0x00,0x00, -0xf0,0x01,0x00,0x00,0xbd,0x02,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xf6,0x01,0x00,0x00,0xef,0x01,0x00,0x00, -0xbd,0x02,0x00,0x00,0x41,0x00,0x05,0x00,0xc2,0x00,0x00,0x00, -0xf7,0x01,0x00,0x00,0x80,0x01,0x00,0x00,0xf6,0x01,0x00,0x00, -0x3d,0x00,0x04,0x00,0xb9,0x00,0x00,0x00,0xf8,0x01,0x00,0x00, -0xf7,0x01,0x00,0x00,0x41,0x00,0x05,0x00,0xc2,0x00,0x00,0x00, -0xfd,0x01,0x00,0x00,0xad,0x01,0x00,0x00,0xeb,0x01,0x00,0x00, -0x3d,0x00,0x04,0x00,0xb9,0x00,0x00,0x00,0xfe,0x01,0x00,0x00, -0xfd,0x01,0x00,0x00,0x41,0x00,0x05,0x00,0xc2,0x00,0x00,0x00, -0x00,0x02,0x00,0x00,0xbf,0x00,0x00,0x00,0xf2,0x01,0x00,0x00, -0x3d,0x00,0x04,0x00,0xb9,0x00,0x00,0x00,0x01,0x02,0x00,0x00, -0x00,0x02,0x00,0x00,0x0c,0x00,0x08,0x00,0xb9,0x00,0x00,0x00, -0x02,0x02,0x00,0x00,0x01,0x00,0x00,0x00,0x32,0x00,0x00,0x00, -0xf8,0x01,0x00,0x00,0xfe,0x01,0x00,0x00,0x01,0x02,0x00,0x00, -0x3e,0x00,0x03,0x00,0x00,0x02,0x00,0x00,0x02,0x02,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x05,0x02,0x00,0x00, -0xbd,0x02,0x00,0x00,0xc5,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, -0xe1,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0xe3,0x01,0x00,0x00, -0xf9,0x00,0x02,0x00,0xdc,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, -0xdc,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x07,0x02,0x00,0x00,0xbb,0x02,0x00,0x00,0xc5,0x00,0x00,0x00, -0xf9,0x00,0x02,0x00,0xd9,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, -0xdb,0x01,0x00,0x00,0xf9,0x00,0x02,0x00,0xd4,0x01,0x00,0x00, -0xf8,0x00,0x02,0x00,0xd4,0x01,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x09,0x02,0x00,0x00,0xb9,0x02,0x00,0x00, -0xc5,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0xd1,0x01,0x00,0x00, -0xf8,0x00,0x02,0x00,0xd3,0x01,0x00,0x00,0xf9,0x00,0x02,0x00, -0xcc,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0xcc,0x01,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x0b,0x02,0x00,0x00, -0xb5,0x02,0x00,0x00,0xc5,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, -0xc9,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0xcb,0x01,0x00,0x00, -0xf9,0x00,0x02,0x00,0x69,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, -0x69,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x0d,0x02,0x00,0x00,0xaf,0x02,0x00,0x00,0xc5,0x00,0x00,0x00, -0xf9,0x00,0x02,0x00,0x66,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, -0x68,0x01,0x00,0x00,0xe0,0x00,0x04,0x00,0x0c,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x5e,0x01,0x00,0x00,0xf9,0x00,0x02,0x00, -0xcc,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0xcc,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x0f,0x02,0x00,0x00, -0x95,0x02,0x00,0x00,0x6c,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, -0xc9,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0xcb,0x00,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x14,0x02,0x00,0x00, -0x55,0x00,0x00,0x00,0x53,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x15,0x02,0x00,0x00,0x8b,0x00,0x00,0x00, -0x14,0x02,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x1a,0x02,0x00,0x00,0x59,0x00,0x00,0x00,0xae,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x1b,0x02,0x00,0x00, -0x9d,0x00,0x00,0x00,0x1a,0x02,0x00,0x00,0x41,0x00,0x05,0x00, -0x15,0x00,0x00,0x00,0x20,0x02,0x00,0x00,0x12,0x00,0x00,0x00, -0x1f,0x02,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x21,0x02,0x00,0x00,0x20,0x02,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x22,0x02,0x00,0x00,0x0f,0x00,0x00,0x00, -0x21,0x02,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x26,0x02,0x00,0x00,0x47,0x00,0x00,0x00,0x21,0x02,0x00,0x00, -0x41,0x00,0x05,0x00,0x0d,0x00,0x00,0x00,0x28,0x02,0x00,0x00, -0x27,0x02,0x00,0x00,0x0c,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x29,0x02,0x00,0x00,0x28,0x02,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x2a,0x02,0x00,0x00, -0x26,0x02,0x00,0x00,0x29,0x02,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x2b,0x02,0x00,0x00,0x22,0x02,0x00,0x00, -0x2a,0x02,0x00,0x00,0xf9,0x00,0x02,0x00,0x2d,0x02,0x00,0x00, -0xf8,0x00,0x02,0x00,0x2d,0x02,0x00,0x00,0xf5,0x00,0x07,0x00, -0x06,0x00,0x00,0x00,0x96,0x02,0x00,0x00,0x3e,0x00,0x00,0x00, -0xcb,0x00,0x00,0x00,0x93,0x02,0x00,0x00,0x30,0x02,0x00,0x00, -0xb0,0x00,0x05,0x00,0xb7,0x00,0x00,0x00,0x33,0x02,0x00,0x00, -0x96,0x02,0x00,0x00,0xb4,0x00,0x00,0x00,0xf6,0x00,0x04,0x00, -0x2f,0x02,0x00,0x00,0x30,0x02,0x00,0x00,0x01,0x00,0x00,0x00, -0xfa,0x00,0x04,0x00,0x33,0x02,0x00,0x00,0x2e,0x02,0x00,0x00, -0x2f,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x2e,0x02,0x00,0x00, -0xf9,0x00,0x02,0x00,0x35,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, -0x35,0x02,0x00,0x00,0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00, -0x97,0x02,0x00,0x00,0x3e,0x00,0x00,0x00,0x2e,0x02,0x00,0x00, -0x91,0x02,0x00,0x00,0x38,0x02,0x00,0x00,0xb0,0x00,0x05,0x00, -0xb7,0x00,0x00,0x00,0x3b,0x02,0x00,0x00,0x97,0x02,0x00,0x00, -0x60,0x00,0x00,0x00,0xf6,0x00,0x04,0x00,0x37,0x02,0x00,0x00, -0x38,0x02,0x00,0x00,0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, -0x3b,0x02,0x00,0x00,0x36,0x02,0x00,0x00,0x37,0x02,0x00,0x00, -0xf8,0x00,0x02,0x00,0x36,0x02,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x3f,0x02,0x00,0x00,0x97,0x02,0x00,0x00, -0x61,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x40,0x02,0x00,0x00,0x15,0x02,0x00,0x00,0x3f,0x02,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x42,0x02,0x00,0x00, -0x64,0x00,0x00,0x00,0x62,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x43,0x02,0x00,0x00,0x40,0x02,0x00,0x00, -0x42,0x02,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x47,0x02,0x00,0x00,0x96,0x02,0x00,0x00,0xb5,0x01,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x48,0x02,0x00,0x00, -0x1b,0x02,0x00,0x00,0x47,0x02,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x4a,0x02,0x00,0x00,0x68,0x00,0x00,0x00, -0xb1,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x4b,0x02,0x00,0x00,0x48,0x02,0x00,0x00,0x4a,0x02,0x00,0x00, -0xf9,0x00,0x02,0x00,0x4d,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, -0x4d,0x02,0x00,0x00,0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00, -0x99,0x02,0x00,0x00,0x3e,0x00,0x00,0x00,0x36,0x02,0x00,0x00, -0x8f,0x02,0x00,0x00,0x50,0x02,0x00,0x00,0xb0,0x00,0x05,0x00, -0xb7,0x00,0x00,0x00,0x53,0x02,0x00,0x00,0x99,0x02,0x00,0x00, -0xb1,0x00,0x00,0x00,0xf6,0x00,0x04,0x00,0x4f,0x02,0x00,0x00, -0x50,0x02,0x00,0x00,0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, -0x53,0x02,0x00,0x00,0x4e,0x02,0x00,0x00,0x4f,0x02,0x00,0x00, -0xf8,0x00,0x02,0x00,0x4e,0x02,0x00,0x00,0xf9,0x00,0x02,0x00, -0x55,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x55,0x02,0x00,0x00, -0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0x9b,0x02,0x00,0x00, -0x3e,0x00,0x00,0x00,0x4e,0x02,0x00,0x00,0x8d,0x02,0x00,0x00, -0x58,0x02,0x00,0x00,0xb0,0x00,0x05,0x00,0xb7,0x00,0x00,0x00, -0x5b,0x02,0x00,0x00,0x9b,0x02,0x00,0x00,0x62,0x00,0x00,0x00, -0xf6,0x00,0x04,0x00,0x57,0x02,0x00,0x00,0x58,0x02,0x00,0x00, -0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0x5b,0x02,0x00,0x00, -0x56,0x02,0x00,0x00,0x57,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, -0x56,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x5e,0x02,0x00,0x00,0x43,0x02,0x00,0x00,0x9b,0x02,0x00,0x00, -0xb0,0x00,0x05,0x00,0xb7,0x00,0x00,0x00,0x61,0x02,0x00,0x00, -0x5e,0x02,0x00,0x00,0x36,0x00,0x00,0x00,0xf7,0x00,0x03,0x00, -0x63,0x02,0x00,0x00,0x00,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, -0x61,0x02,0x00,0x00,0x62,0x02,0x00,0x00,0x63,0x02,0x00,0x00, -0xf8,0x00,0x02,0x00,0x62,0x02,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x66,0x02,0x00,0x00,0x4b,0x02,0x00,0x00, -0x99,0x02,0x00,0x00,0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00, -0x67,0x02,0x00,0x00,0x12,0x00,0x00,0x00,0xc5,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x68,0x02,0x00,0x00, -0x67,0x02,0x00,0x00,0xb0,0x00,0x05,0x00,0xb7,0x00,0x00,0x00, -0x69,0x02,0x00,0x00,0x66,0x02,0x00,0x00,0x68,0x02,0x00,0x00, -0xf9,0x00,0x02,0x00,0x63,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, -0x63,0x02,0x00,0x00,0xf5,0x00,0x07,0x00,0xb7,0x00,0x00,0x00, -0x6a,0x02,0x00,0x00,0x61,0x02,0x00,0x00,0x56,0x02,0x00,0x00, -0x69,0x02,0x00,0x00,0x62,0x02,0x00,0x00,0xf7,0x00,0x03,0x00, -0x6c,0x02,0x00,0x00,0x00,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, -0x6a,0x02,0x00,0x00,0x6b,0x02,0x00,0x00,0x6c,0x02,0x00,0x00, -0xf8,0x00,0x02,0x00,0x6b,0x02,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x74,0x02,0x00,0x00,0x4b,0x02,0x00,0x00, -0x99,0x02,0x00,0x00,0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00, -0x76,0x02,0x00,0x00,0x12,0x00,0x00,0x00,0x75,0x02,0x00,0x00, -0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x77,0x02,0x00,0x00, -0x76,0x02,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x78,0x02,0x00,0x00,0x74,0x02,0x00,0x00,0x77,0x02,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x79,0x02,0x00,0x00, -0x2b,0x02,0x00,0x00,0x78,0x02,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x7b,0x02,0x00,0x00,0x79,0x02,0x00,0x00, -0x43,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x7d,0x02,0x00,0x00,0x7b,0x02,0x00,0x00,0x9b,0x02,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x7f,0x02,0x00,0x00, -0x96,0x02,0x00,0x00,0xb1,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x81,0x02,0x00,0x00,0x7f,0x02,0x00,0x00, -0x99,0x02,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x83,0x02,0x00,0x00,0x81,0x02,0x00,0x00,0x82,0x02,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x85,0x02,0x00,0x00, -0x97,0x02,0x00,0x00,0x62,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x86,0x02,0x00,0x00,0x83,0x02,0x00,0x00, -0x85,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x88,0x02,0x00,0x00,0x86,0x02,0x00,0x00,0x9b,0x02,0x00,0x00, -0x41,0x00,0x05,0x00,0xc2,0x00,0x00,0x00,0x89,0x02,0x00,0x00, -0xbf,0x00,0x00,0x00,0x88,0x02,0x00,0x00,0x3d,0x00,0x04,0x00, -0xb9,0x00,0x00,0x00,0x8a,0x02,0x00,0x00,0x89,0x02,0x00,0x00, -0x41,0x00,0x06,0x00,0x06,0x01,0x00,0x00,0x8b,0x02,0x00,0x00, -0x70,0x02,0x00,0x00,0x34,0x00,0x00,0x00,0x7d,0x02,0x00,0x00, -0x3e,0x00,0x03,0x00,0x8b,0x02,0x00,0x00,0x8a,0x02,0x00,0x00, -0xf9,0x00,0x02,0x00,0x6c,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, -0x6c,0x02,0x00,0x00,0xf9,0x00,0x02,0x00,0x58,0x02,0x00,0x00, -0xf8,0x00,0x02,0x00,0x58,0x02,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x8d,0x02,0x00,0x00,0x9b,0x02,0x00,0x00, -0xc5,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0x55,0x02,0x00,0x00, -0xf8,0x00,0x02,0x00,0x57,0x02,0x00,0x00,0xf9,0x00,0x02,0x00, -0x50,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x50,0x02,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x8f,0x02,0x00,0x00, -0x99,0x02,0x00,0x00,0xc5,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, -0x4d,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x4f,0x02,0x00,0x00, -0xf9,0x00,0x02,0x00,0x38,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, -0x38,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x91,0x02,0x00,0x00,0x97,0x02,0x00,0x00,0xc5,0x00,0x00,0x00, -0xf9,0x00,0x02,0x00,0x35,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, -0x37,0x02,0x00,0x00,0xf9,0x00,0x02,0x00,0x30,0x02,0x00,0x00, -0xf8,0x00,0x02,0x00,0x30,0x02,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x93,0x02,0x00,0x00,0x96,0x02,0x00,0x00, -0xc5,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0x2d,0x02,0x00,0x00, -0xf8,0x00,0x02,0x00,0x2f,0x02,0x00,0x00,0xfd,0x00,0x01,0x00, -0x38,0x00,0x01,0x00, -}; -const uint64_t matmul_f32_m_fp32_len = 10048; - -unsigned char matmul_f32_s_data[] = { -0x03,0x02,0x23,0x07,0x00,0x05,0x01,0x00,0x0b,0x00,0x0d,0x00, -0xcd,0x02,0x00,0x00,0x00,0x00,0x00,0x00,0x11,0x00,0x02,0x00, -0x01,0x00,0x00,0x00,0x11,0x00,0x02,0x00,0x09,0x00,0x00,0x00, -0x0b,0x00,0x06,0x00,0x01,0x00,0x00,0x00,0x47,0x4c,0x53,0x4c, -0x2e,0x73,0x74,0x64,0x2e,0x34,0x35,0x30,0x00,0x00,0x00,0x00, -0x0e,0x00,0x03,0x00,0x00,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x0f,0x00,0x0f,0x00,0x05,0x00,0x00,0x00,0x04,0x00,0x00,0x00, -0x6d,0x61,0x69,0x6e,0x00,0x00,0x00,0x00,0x0b,0x00,0x00,0x00, -0x12,0x00,0x00,0x00,0x3d,0x00,0x00,0x00,0x4c,0x00,0x00,0x00, -0xf1,0x00,0x00,0x00,0xfc,0x00,0x00,0x00,0x3d,0x01,0x00,0x00, -0x48,0x01,0x00,0x00,0x2e,0x02,0x00,0x00,0x77,0x02,0x00,0x00, -0x10,0x00,0x06,0x00,0x04,0x00,0x00,0x00,0x11,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x0b,0x00,0x00,0x00,0x0b,0x00,0x00,0x00, -0x1c,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0x10,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x08,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00, -0x03,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00,0x04,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x10,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0x10,0x00,0x00,0x00,0x05,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x14,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x18,0x00,0x00,0x00, -0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00,0x07,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x1c,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0x10,0x00,0x00,0x00,0x08,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x20,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00, -0x09,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x24,0x00,0x00,0x00, -0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00,0x0a,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x28,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0x10,0x00,0x00,0x00,0x0b,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x2c,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x30,0x00,0x00,0x00, -0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00,0x0d,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x34,0x00,0x00,0x00,0x47,0x00,0x03,0x00, -0x10,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x37,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x3d,0x00,0x00,0x00,0x0b,0x00,0x00,0x00, -0x1a,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x4c,0x00,0x00,0x00, -0x0b,0x00,0x00,0x00,0x1b,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x4f,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x09,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x53,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x04,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x60,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x62,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x07,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x6c,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x03,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x9c,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0xae,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x05,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0xb1,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x08,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0xf9,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x48,0x00,0x04,0x00, -0xfa,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x18,0x00,0x00,0x00, -0x48,0x00,0x05,0x00,0xfa,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x03,0x00, -0xfa,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0xfc,0x00,0x00,0x00,0x22,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0xfc,0x00,0x00,0x00,0x21,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x17,0x01,0x00,0x00, -0x01,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x18,0x01,0x00,0x00,0x0b,0x00,0x00,0x00,0x19,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x45,0x01,0x00,0x00,0x06,0x00,0x00,0x00, -0x04,0x00,0x00,0x00,0x48,0x00,0x04,0x00,0x46,0x01,0x00,0x00, -0x00,0x00,0x00,0x00,0x18,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0x46,0x01,0x00,0x00,0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x47,0x00,0x03,0x00,0x46,0x01,0x00,0x00, -0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x48,0x01,0x00,0x00, -0x22,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x48,0x01,0x00,0x00,0x21,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x2e,0x02,0x00,0x00,0x0b,0x00,0x00,0x00, -0x18,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x74,0x02,0x00,0x00, -0x06,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x48,0x00,0x04,0x00, -0x75,0x02,0x00,0x00,0x00,0x00,0x00,0x00,0x19,0x00,0x00,0x00, -0x48,0x00,0x05,0x00,0x75,0x02,0x00,0x00,0x00,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x03,0x00, -0x75,0x02,0x00,0x00,0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x77,0x02,0x00,0x00,0x22,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x77,0x02,0x00,0x00,0x21,0x00,0x00,0x00, -0x02,0x00,0x00,0x00,0x13,0x00,0x02,0x00,0x02,0x00,0x00,0x00, -0x21,0x00,0x03,0x00,0x03,0x00,0x00,0x00,0x02,0x00,0x00,0x00, -0x15,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x20,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x17,0x00,0x04,0x00,0x09,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x03,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0x0a,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x09,0x00,0x00,0x00, -0x3b,0x00,0x04,0x00,0x0a,0x00,0x00,0x00,0x0b,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0x0d,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x1e,0x00,0x10,0x00,0x10,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x11,0x00,0x00,0x00, -0x09,0x00,0x00,0x00,0x10,0x00,0x00,0x00,0x3b,0x00,0x04,0x00, -0x11,0x00,0x00,0x00,0x12,0x00,0x00,0x00,0x09,0x00,0x00,0x00, -0x15,0x00,0x04,0x00,0x13,0x00,0x00,0x00,0x20,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00, -0x14,0x00,0x00,0x00,0x08,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0x15,0x00,0x00,0x00,0x09,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00,0x21,0x00,0x00,0x00, -0x0a,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00, -0x27,0x00,0x00,0x00,0x09,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x13,0x00,0x00,0x00,0x2d,0x00,0x00,0x00,0x07,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00,0x34,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x32,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x37,0x00,0x00,0x00,0x40,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x3b,0x00,0x04,0x00,0x0a,0x00,0x00,0x00,0x3d,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x3e,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x3b,0x00,0x04,0x00, -0x0a,0x00,0x00,0x00,0x4c,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x32,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x4f,0x00,0x00,0x00, -0x20,0x00,0x00,0x00,0x32,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x53,0x00,0x00,0x00,0x20,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0x54,0x00,0x00,0x00,0x86,0x00,0x00,0x00, -0x37,0x00,0x00,0x00,0x53,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0x58,0x00,0x00,0x00,0x86,0x00,0x00,0x00, -0x37,0x00,0x00,0x00,0x53,0x00,0x00,0x00,0x32,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x60,0x00,0x00,0x00,0x02,0x00,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x61,0x00,0x00,0x00, -0x86,0x00,0x00,0x00,0x53,0x00,0x00,0x00,0x60,0x00,0x00,0x00, -0x32,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x62,0x00,0x00,0x00, -0x04,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, -0x63,0x00,0x00,0x00,0x86,0x00,0x00,0x00,0x61,0x00,0x00,0x00, -0x62,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, -0x67,0x00,0x00,0x00,0x86,0x00,0x00,0x00,0x61,0x00,0x00,0x00, -0x62,0x00,0x00,0x00,0x32,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x6c,0x00,0x00,0x00,0x10,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0x6d,0x00,0x00,0x00,0x86,0x00,0x00,0x00, -0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0x72,0x00,0x00,0x00,0x86,0x00,0x00,0x00, -0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x13,0x00,0x00,0x00,0x76,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00,0x7b,0x00,0x00,0x00, -0x02,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00, -0x86,0x00,0x00,0x00,0x0b,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x13,0x00,0x00,0x00,0x8c,0x00,0x00,0x00,0x03,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00,0x97,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x32,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x9c,0x00,0x00,0x00,0x40,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x13,0x00,0x00,0x00,0x9e,0x00,0x00,0x00,0x04,0x00,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0xad,0x00,0x00,0x00, -0x84,0x00,0x00,0x00,0x60,0x00,0x00,0x00,0x62,0x00,0x00,0x00, -0x32,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0xae,0x00,0x00,0x00, -0x20,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, -0xaf,0x00,0x00,0x00,0x84,0x00,0x00,0x00,0x53,0x00,0x00,0x00, -0xae,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, -0xb0,0x00,0x00,0x00,0x84,0x00,0x00,0x00,0x4f,0x00,0x00,0x00, -0x62,0x00,0x00,0x00,0x32,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0xb1,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0xb2,0x00,0x00,0x00,0x84,0x00,0x00,0x00, -0xb0,0x00,0x00,0x00,0xb1,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0xb3,0x00,0x00,0x00,0x84,0x00,0x00,0x00, -0xb2,0x00,0x00,0x00,0x60,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0xb4,0x00,0x00,0x00,0x86,0x00,0x00,0x00, -0xaf,0x00,0x00,0x00,0xb3,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0xb5,0x00,0x00,0x00,0x84,0x00,0x00,0x00, -0xad,0x00,0x00,0x00,0xb4,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0xb6,0x00,0x00,0x00,0x84,0x00,0x00,0x00, -0xb5,0x00,0x00,0x00,0xb1,0x00,0x00,0x00,0x14,0x00,0x02,0x00, -0xb7,0x00,0x00,0x00,0x16,0x00,0x03,0x00,0xb9,0x00,0x00,0x00, -0x20,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, -0xba,0x00,0x00,0x00,0x84,0x00,0x00,0x00,0x60,0x00,0x00,0x00, -0x62,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, -0xbb,0x00,0x00,0x00,0x84,0x00,0x00,0x00,0xba,0x00,0x00,0x00, -0xb4,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, -0xbc,0x00,0x00,0x00,0x84,0x00,0x00,0x00,0xbb,0x00,0x00,0x00, -0xb1,0x00,0x00,0x00,0x1c,0x00,0x04,0x00,0xbd,0x00,0x00,0x00, -0xb9,0x00,0x00,0x00,0xbc,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0xbe,0x00,0x00,0x00,0x07,0x00,0x00,0x00,0xbd,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0xb9,0x00,0x00,0x00,0xc1,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0xc2,0x00,0x00,0x00, -0x07,0x00,0x00,0x00,0xb9,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x13,0x00,0x00,0x00,0xc5,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x16,0x00,0x03,0x00,0xec,0x00,0x00,0x00,0x10,0x00,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0xed,0x00,0x00,0x00, -0x80,0x00,0x00,0x00,0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0xee,0x00,0x00,0x00, -0x84,0x00,0x00,0x00,0x37,0x00,0x00,0x00,0xed,0x00,0x00,0x00, -0x1c,0x00,0x04,0x00,0xef,0x00,0x00,0x00,0xec,0x00,0x00,0x00, -0xee,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0xf0,0x00,0x00,0x00, -0x04,0x00,0x00,0x00,0xef,0x00,0x00,0x00,0x3b,0x00,0x04,0x00, -0xf0,0x00,0x00,0x00,0xf1,0x00,0x00,0x00,0x04,0x00,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0xf5,0x00,0x00,0x00, -0x80,0x00,0x00,0x00,0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00, -0x1d,0x00,0x03,0x00,0xf9,0x00,0x00,0x00,0xb9,0x00,0x00,0x00, -0x1e,0x00,0x03,0x00,0xfa,0x00,0x00,0x00,0xf9,0x00,0x00,0x00, -0x20,0x00,0x04,0x00,0xfb,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0xfa,0x00,0x00,0x00,0x3b,0x00,0x04,0x00,0xfb,0x00,0x00,0x00, -0xfc,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0x07,0x01,0x00,0x00,0x0c,0x00,0x00,0x00,0xb9,0x00,0x00,0x00, -0x20,0x00,0x04,0x00,0x0b,0x01,0x00,0x00,0x04,0x00,0x00,0x00, -0xec,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, -0x11,0x01,0x00,0x00,0x80,0x00,0x00,0x00,0x6c,0x00,0x00,0x00, -0x39,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0xec,0x00,0x00,0x00, -0x15,0x01,0x00,0x00,0x00,0x00,0x00,0x00,0x32,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x17,0x01,0x00,0x00,0x01,0x00,0x00,0x00, -0x33,0x00,0x06,0x00,0x09,0x00,0x00,0x00,0x18,0x01,0x00,0x00, -0x17,0x01,0x00,0x00,0x39,0x00,0x00,0x00,0x39,0x00,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x19,0x01,0x00,0x00, -0x51,0x00,0x00,0x00,0x18,0x01,0x00,0x00,0x00,0x00,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x1a,0x01,0x00,0x00, -0x84,0x00,0x00,0x00,0x19,0x01,0x00,0x00,0x39,0x00,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x1b,0x01,0x00,0x00, -0x86,0x00,0x00,0x00,0x1a,0x01,0x00,0x00,0x6c,0x00,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x39,0x01,0x00,0x00, -0x80,0x00,0x00,0x00,0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x3a,0x01,0x00,0x00, -0x84,0x00,0x00,0x00,0x9c,0x00,0x00,0x00,0x39,0x01,0x00,0x00, -0x1c,0x00,0x04,0x00,0x3b,0x01,0x00,0x00,0xec,0x00,0x00,0x00, -0x3a,0x01,0x00,0x00,0x20,0x00,0x04,0x00,0x3c,0x01,0x00,0x00, -0x04,0x00,0x00,0x00,0x3b,0x01,0x00,0x00,0x3b,0x00,0x04,0x00, -0x3c,0x01,0x00,0x00,0x3d,0x01,0x00,0x00,0x04,0x00,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x41,0x01,0x00,0x00, -0x80,0x00,0x00,0x00,0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00, -0x1d,0x00,0x03,0x00,0x45,0x01,0x00,0x00,0xb9,0x00,0x00,0x00, -0x1e,0x00,0x03,0x00,0x46,0x01,0x00,0x00,0x45,0x01,0x00,0x00, -0x20,0x00,0x04,0x00,0x47,0x01,0x00,0x00,0x0c,0x00,0x00,0x00, -0x46,0x01,0x00,0x00,0x3b,0x00,0x04,0x00,0x47,0x01,0x00,0x00, -0x48,0x01,0x00,0x00,0x0c,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0x5b,0x01,0x00,0x00,0x80,0x00,0x00,0x00, -0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x62,0x01,0x00,0x00,0x08,0x01,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x63,0x01,0x00,0x00, -0x86,0x00,0x00,0x00,0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x66,0x01,0x00,0x00, -0x86,0x00,0x00,0x00,0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x81,0x01,0x00,0x00, -0x84,0x00,0x00,0x00,0x60,0x00,0x00,0x00,0x62,0x00,0x00,0x00, -0x1c,0x00,0x04,0x00,0x82,0x01,0x00,0x00,0xec,0x00,0x00,0x00, -0x81,0x01,0x00,0x00,0x20,0x00,0x04,0x00,0x83,0x01,0x00,0x00, -0x07,0x00,0x00,0x00,0x82,0x01,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0x93,0x01,0x00,0x00,0x80,0x00,0x00,0x00, -0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0x99,0x01,0x00,0x00,0x07,0x00,0x00,0x00,0xec,0x00,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0xaf,0x01,0x00,0x00, -0x84,0x00,0x00,0x00,0xb4,0x00,0x00,0x00,0xb1,0x00,0x00,0x00, -0x1c,0x00,0x04,0x00,0xb0,0x01,0x00,0x00,0xec,0x00,0x00,0x00, -0xaf,0x01,0x00,0x00,0x20,0x00,0x04,0x00,0xb1,0x01,0x00,0x00, -0x07,0x00,0x00,0x00,0xb0,0x01,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0xba,0x01,0x00,0x00,0x86,0x00,0x00,0x00, -0xae,0x00,0x00,0x00,0xb4,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0xc2,0x01,0x00,0x00,0x80,0x00,0x00,0x00, -0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0xf1,0x01,0x00,0x00,0x84,0x00,0x00,0x00, -0x60,0x00,0x00,0x00,0x62,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x13,0x00,0x00,0x00,0x26,0x02,0x00,0x00,0x0d,0x00,0x00,0x00, -0x3b,0x00,0x04,0x00,0x0a,0x00,0x00,0x00,0x2e,0x02,0x00,0x00, -0x01,0x00,0x00,0x00,0x1d,0x00,0x03,0x00,0x74,0x02,0x00,0x00, -0xb9,0x00,0x00,0x00,0x1e,0x00,0x03,0x00,0x75,0x02,0x00,0x00, -0x74,0x02,0x00,0x00,0x20,0x00,0x04,0x00,0x76,0x02,0x00,0x00, -0x0c,0x00,0x00,0x00,0x75,0x02,0x00,0x00,0x3b,0x00,0x04,0x00, -0x76,0x02,0x00,0x00,0x77,0x02,0x00,0x00,0x0c,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00,0x7c,0x02,0x00,0x00, -0x05,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, -0x89,0x02,0x00,0x00,0x84,0x00,0x00,0x00,0x60,0x00,0x00,0x00, -0x62,0x00,0x00,0x00,0x36,0x00,0x05,0x00,0x02,0x00,0x00,0x00, -0x04,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x03,0x00,0x00,0x00, -0xf8,0x00,0x02,0x00,0x05,0x00,0x00,0x00,0x3b,0x00,0x04,0x00, -0xbe,0x00,0x00,0x00,0xbf,0x00,0x00,0x00,0x07,0x00,0x00,0x00, -0x3b,0x00,0x04,0x00,0x83,0x01,0x00,0x00,0x84,0x01,0x00,0x00, -0x07,0x00,0x00,0x00,0x3b,0x00,0x04,0x00,0xb1,0x01,0x00,0x00, -0xb2,0x01,0x00,0x00,0x07,0x00,0x00,0x00,0x41,0x00,0x05,0x00, -0x0d,0x00,0x00,0x00,0x0e,0x00,0x00,0x00,0x0b,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x0f,0x00,0x00,0x00,0x0e,0x00,0x00,0x00,0x41,0x00,0x05,0x00, -0x15,0x00,0x00,0x00,0x16,0x00,0x00,0x00,0x12,0x00,0x00,0x00, -0x14,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x17,0x00,0x00,0x00,0x16,0x00,0x00,0x00,0x86,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x18,0x00,0x00,0x00,0x0f,0x00,0x00,0x00, -0x17,0x00,0x00,0x00,0x89,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x1e,0x00,0x00,0x00,0x0f,0x00,0x00,0x00,0x17,0x00,0x00,0x00, -0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00,0x22,0x00,0x00,0x00, -0x12,0x00,0x00,0x00,0x21,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x22,0x00,0x00,0x00, -0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x24,0x00,0x00,0x00, -0x18,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x41,0x00,0x05,0x00, -0x15,0x00,0x00,0x00,0x28,0x00,0x00,0x00,0x12,0x00,0x00,0x00, -0x27,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x29,0x00,0x00,0x00,0x28,0x00,0x00,0x00,0x86,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x2a,0x00,0x00,0x00,0x1e,0x00,0x00,0x00, -0x29,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00, -0x2e,0x00,0x00,0x00,0x12,0x00,0x00,0x00,0x2d,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x2f,0x00,0x00,0x00, -0x2e,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x30,0x00,0x00,0x00,0x24,0x00,0x00,0x00,0x2f,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x32,0x00,0x00,0x00, -0x30,0x00,0x00,0x00,0x2a,0x00,0x00,0x00,0x41,0x00,0x05,0x00, -0x15,0x00,0x00,0x00,0x35,0x00,0x00,0x00,0x12,0x00,0x00,0x00, -0x34,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x36,0x00,0x00,0x00,0x35,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x38,0x00,0x00,0x00,0x36,0x00,0x00,0x00, -0x37,0x00,0x00,0x00,0x82,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x3a,0x00,0x00,0x00,0x38,0x00,0x00,0x00,0x39,0x00,0x00,0x00, -0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x3b,0x00,0x00,0x00, -0x3a,0x00,0x00,0x00,0x37,0x00,0x00,0x00,0x41,0x00,0x05,0x00, -0x0d,0x00,0x00,0x00,0x3f,0x00,0x00,0x00,0x3d,0x00,0x00,0x00, -0x3e,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x40,0x00,0x00,0x00,0x3f,0x00,0x00,0x00,0x89,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x42,0x00,0x00,0x00,0x40,0x00,0x00,0x00, -0x3b,0x00,0x00,0x00,0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x47,0x00,0x00,0x00,0x40,0x00,0x00,0x00,0x3b,0x00,0x00,0x00, -0x41,0x00,0x05,0x00,0x0d,0x00,0x00,0x00,0x49,0x00,0x00,0x00, -0x3d,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x4a,0x00,0x00,0x00,0x49,0x00,0x00,0x00, -0x41,0x00,0x05,0x00,0x0d,0x00,0x00,0x00,0x4d,0x00,0x00,0x00, -0x4c,0x00,0x00,0x00,0x3e,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x4e,0x00,0x00,0x00,0x4d,0x00,0x00,0x00, -0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x50,0x00,0x00,0x00, -0x4e,0x00,0x00,0x00,0x4f,0x00,0x00,0x00,0x89,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x55,0x00,0x00,0x00,0x50,0x00,0x00,0x00, -0x54,0x00,0x00,0x00,0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x59,0x00,0x00,0x00,0x50,0x00,0x00,0x00,0x58,0x00,0x00,0x00, -0x89,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x5d,0x00,0x00,0x00, -0x4e,0x00,0x00,0x00,0x4f,0x00,0x00,0x00,0x89,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x64,0x00,0x00,0x00,0x5d,0x00,0x00,0x00, -0x63,0x00,0x00,0x00,0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x68,0x00,0x00,0x00,0x5d,0x00,0x00,0x00,0x67,0x00,0x00,0x00, -0x89,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x6e,0x00,0x00,0x00, -0x4e,0x00,0x00,0x00,0x6d,0x00,0x00,0x00,0x86,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x73,0x00,0x00,0x00,0x4e,0x00,0x00,0x00, -0x72,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00, -0x77,0x00,0x00,0x00,0x12,0x00,0x00,0x00,0x76,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x78,0x00,0x00,0x00, -0x77,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x79,0x00,0x00,0x00,0x47,0x00,0x00,0x00,0x78,0x00,0x00,0x00, -0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00,0x7c,0x00,0x00,0x00, -0x12,0x00,0x00,0x00,0x7b,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x7d,0x00,0x00,0x00,0x7c,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x7f,0x00,0x00,0x00, -0x47,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x82,0x00,0x00,0x00,0x7f,0x00,0x00,0x00, -0x78,0x00,0x00,0x00,0x0c,0x00,0x07,0x00,0x06,0x00,0x00,0x00, -0x83,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x26,0x00,0x00,0x00, -0x7d,0x00,0x00,0x00,0x82,0x00,0x00,0x00,0x41,0x00,0x05,0x00, -0x15,0x00,0x00,0x00,0x87,0x00,0x00,0x00,0x12,0x00,0x00,0x00, -0x86,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x88,0x00,0x00,0x00,0x87,0x00,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x89,0x00,0x00,0x00,0x32,0x00,0x00,0x00, -0x88,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x8b,0x00,0x00,0x00,0x42,0x00,0x00,0x00,0x37,0x00,0x00,0x00, -0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00,0x8d,0x00,0x00,0x00, -0x12,0x00,0x00,0x00,0x8c,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x8e,0x00,0x00,0x00,0x8d,0x00,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x8f,0x00,0x00,0x00, -0x8b,0x00,0x00,0x00,0x8e,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x90,0x00,0x00,0x00,0x89,0x00,0x00,0x00, -0x8f,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x92,0x00,0x00,0x00,0x90,0x00,0x00,0x00,0x79,0x00,0x00,0x00, -0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x93,0x00,0x00,0x00, -0x92,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x41,0x00,0x05,0x00, -0x15,0x00,0x00,0x00,0x98,0x00,0x00,0x00,0x12,0x00,0x00,0x00, -0x97,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x99,0x00,0x00,0x00,0x98,0x00,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x9a,0x00,0x00,0x00,0x0f,0x00,0x00,0x00, -0x99,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x9d,0x00,0x00,0x00,0x4a,0x00,0x00,0x00,0x9c,0x00,0x00,0x00, -0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00,0x9f,0x00,0x00,0x00, -0x12,0x00,0x00,0x00,0x9e,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0xa0,0x00,0x00,0x00,0x9f,0x00,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xa1,0x00,0x00,0x00, -0x9d,0x00,0x00,0x00,0xa0,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xa2,0x00,0x00,0x00,0x9a,0x00,0x00,0x00, -0xa1,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xa4,0x00,0x00,0x00,0xa2,0x00,0x00,0x00,0x79,0x00,0x00,0x00, -0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xa5,0x00,0x00,0x00, -0xa4,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, -0xa7,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0xa7,0x00,0x00,0x00, -0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0x9b,0x02,0x00,0x00, -0x3e,0x00,0x00,0x00,0x05,0x00,0x00,0x00,0xc6,0x00,0x00,0x00, -0xa8,0x00,0x00,0x00,0xb0,0x00,0x05,0x00,0xb7,0x00,0x00,0x00, -0xb8,0x00,0x00,0x00,0x9b,0x02,0x00,0x00,0xb6,0x00,0x00,0x00, -0xf6,0x00,0x04,0x00,0xa9,0x00,0x00,0x00,0xa8,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0xb8,0x00,0x00,0x00, -0xa8,0x00,0x00,0x00,0xa9,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, -0xa8,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0xc2,0x00,0x00,0x00, -0xc3,0x00,0x00,0x00,0xbf,0x00,0x00,0x00,0x9b,0x02,0x00,0x00, -0x3e,0x00,0x03,0x00,0xc3,0x00,0x00,0x00,0xc1,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xc6,0x00,0x00,0x00, -0x9b,0x02,0x00,0x00,0xc5,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, -0xa7,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0xa9,0x00,0x00,0x00, -0xf9,0x00,0x02,0x00,0xc9,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, -0xc9,0x00,0x00,0x00,0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00, -0xb4,0x02,0x00,0x00,0xa5,0x00,0x00,0x00,0xa9,0x00,0x00,0x00, -0x68,0x01,0x00,0x00,0xcc,0x00,0x00,0x00,0xf5,0x00,0x07,0x00, -0x06,0x00,0x00,0x00,0xb0,0x02,0x00,0x00,0x93,0x00,0x00,0x00, -0xa9,0x00,0x00,0x00,0x65,0x01,0x00,0x00,0xcc,0x00,0x00,0x00, -0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0x9c,0x02,0x00,0x00, -0x79,0x00,0x00,0x00,0xa9,0x00,0x00,0x00,0x16,0x02,0x00,0x00, -0xcc,0x00,0x00,0x00,0xb0,0x00,0x05,0x00,0xb7,0x00,0x00,0x00, -0xd0,0x00,0x00,0x00,0x9c,0x02,0x00,0x00,0x83,0x00,0x00,0x00, -0xf6,0x00,0x04,0x00,0xcb,0x00,0x00,0x00,0xcc,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0xd0,0x00,0x00,0x00, -0xca,0x00,0x00,0x00,0xcb,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, -0xca,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0xd2,0x00,0x00,0x00, -0xf8,0x00,0x02,0x00,0xd2,0x00,0x00,0x00,0xf5,0x00,0x07,0x00, -0x06,0x00,0x00,0x00,0xac,0x02,0x00,0x00,0x3e,0x00,0x00,0x00, -0xca,0x00,0x00,0x00,0x1d,0x01,0x00,0x00,0xd5,0x00,0x00,0x00, -0xb0,0x00,0x05,0x00,0xb7,0x00,0x00,0x00,0xd8,0x00,0x00,0x00, -0xac,0x02,0x00,0x00,0x37,0x00,0x00,0x00,0xf6,0x00,0x04,0x00, -0xd4,0x00,0x00,0x00,0xd5,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0xfa,0x00,0x04,0x00,0xd8,0x00,0x00,0x00,0xd3,0x00,0x00,0x00, -0xd4,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0xd3,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xdc,0x00,0x00,0x00, -0x8b,0x00,0x00,0x00,0x73,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xde,0x00,0x00,0x00,0xdc,0x00,0x00,0x00, -0xac,0x02,0x00,0x00,0xb0,0x00,0x05,0x00,0xb7,0x00,0x00,0x00, -0xe1,0x00,0x00,0x00,0xde,0x00,0x00,0x00,0x36,0x00,0x00,0x00, -0xf7,0x00,0x03,0x00,0xe3,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0xfa,0x00,0x04,0x00,0xe1,0x00,0x00,0x00,0xe2,0x00,0x00,0x00, -0xe3,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0xe2,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xe6,0x00,0x00,0x00, -0x9c,0x02,0x00,0x00,0x6e,0x00,0x00,0x00,0xb0,0x00,0x05,0x00, -0xb7,0x00,0x00,0x00,0xe8,0x00,0x00,0x00,0xe6,0x00,0x00,0x00, -0x83,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0xe3,0x00,0x00,0x00, -0xf8,0x00,0x02,0x00,0xe3,0x00,0x00,0x00,0xf5,0x00,0x07,0x00, -0xb7,0x00,0x00,0x00,0xe9,0x00,0x00,0x00,0xe1,0x00,0x00,0x00, -0xd3,0x00,0x00,0x00,0xe8,0x00,0x00,0x00,0xe2,0x00,0x00,0x00, -0xf7,0x00,0x03,0x00,0xeb,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0xfa,0x00,0x04,0x00,0xe9,0x00,0x00,0x00,0xea,0x00,0x00,0x00, -0x0d,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0xea,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xf4,0x00,0x00,0x00, -0x73,0x00,0x00,0x00,0xac,0x02,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xf6,0x00,0x00,0x00,0xf4,0x00,0x00,0x00, -0xf5,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xf8,0x00,0x00,0x00,0xf6,0x00,0x00,0x00,0x6e,0x00,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x03,0x01,0x00,0x00, -0xf4,0x00,0x00,0x00,0x8e,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x04,0x01,0x00,0x00,0xb0,0x02,0x00,0x00, -0x03,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x06,0x01,0x00,0x00,0x04,0x01,0x00,0x00,0x6e,0x00,0x00,0x00, -0x41,0x00,0x06,0x00,0x07,0x01,0x00,0x00,0x08,0x01,0x00,0x00, -0xfc,0x00,0x00,0x00,0x34,0x00,0x00,0x00,0x06,0x01,0x00,0x00, -0x3d,0x00,0x04,0x00,0xb9,0x00,0x00,0x00,0x09,0x01,0x00,0x00, -0x08,0x01,0x00,0x00,0x73,0x00,0x04,0x00,0xec,0x00,0x00,0x00, -0x0a,0x01,0x00,0x00,0x09,0x01,0x00,0x00,0x41,0x00,0x05,0x00, -0x0b,0x01,0x00,0x00,0x0c,0x01,0x00,0x00,0xf1,0x00,0x00,0x00, -0xf8,0x00,0x00,0x00,0x3e,0x00,0x03,0x00,0x0c,0x01,0x00,0x00, -0x0a,0x01,0x00,0x00,0xf9,0x00,0x02,0x00,0xeb,0x00,0x00,0x00, -0xf8,0x00,0x02,0x00,0x0d,0x01,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x10,0x01,0x00,0x00,0x73,0x00,0x00,0x00, -0xac,0x02,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x12,0x01,0x00,0x00,0x10,0x01,0x00,0x00,0x11,0x01,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x14,0x01,0x00,0x00, -0x12,0x01,0x00,0x00,0x6e,0x00,0x00,0x00,0x41,0x00,0x05,0x00, -0x0b,0x01,0x00,0x00,0x16,0x01,0x00,0x00,0xf1,0x00,0x00,0x00, -0x14,0x01,0x00,0x00,0x3e,0x00,0x03,0x00,0x16,0x01,0x00,0x00, -0x15,0x01,0x00,0x00,0xf9,0x00,0x02,0x00,0xeb,0x00,0x00,0x00, -0xf8,0x00,0x02,0x00,0xeb,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, -0xd5,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0xd5,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x1d,0x01,0x00,0x00, -0xac,0x02,0x00,0x00,0x1b,0x01,0x00,0x00,0xf9,0x00,0x02,0x00, -0xd2,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0xd4,0x00,0x00,0x00, -0xf9,0x00,0x02,0x00,0x1f,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, -0x1f,0x01,0x00,0x00,0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00, -0xad,0x02,0x00,0x00,0x3e,0x00,0x00,0x00,0xd4,0x00,0x00,0x00, -0x61,0x01,0x00,0x00,0x22,0x01,0x00,0x00,0xb0,0x00,0x05,0x00, -0xb7,0x00,0x00,0x00,0x25,0x01,0x00,0x00,0xad,0x02,0x00,0x00, -0x9c,0x00,0x00,0x00,0xf6,0x00,0x04,0x00,0x21,0x01,0x00,0x00, -0x22,0x01,0x00,0x00,0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, -0x25,0x01,0x00,0x00,0x20,0x01,0x00,0x00,0x21,0x01,0x00,0x00, -0xf8,0x00,0x02,0x00,0x20,0x01,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x29,0x01,0x00,0x00,0x9d,0x00,0x00,0x00, -0x73,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x2b,0x01,0x00,0x00,0x29,0x01,0x00,0x00,0xad,0x02,0x00,0x00, -0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00,0x2c,0x01,0x00,0x00, -0x12,0x00,0x00,0x00,0xc5,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x2d,0x01,0x00,0x00,0x2c,0x01,0x00,0x00, -0xb0,0x00,0x05,0x00,0xb7,0x00,0x00,0x00,0x2e,0x01,0x00,0x00, -0x2b,0x01,0x00,0x00,0x2d,0x01,0x00,0x00,0xf7,0x00,0x03,0x00, -0x30,0x01,0x00,0x00,0x00,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, -0x2e,0x01,0x00,0x00,0x2f,0x01,0x00,0x00,0x30,0x01,0x00,0x00, -0xf8,0x00,0x02,0x00,0x2f,0x01,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x33,0x01,0x00,0x00,0x9c,0x02,0x00,0x00, -0x6e,0x00,0x00,0x00,0xb0,0x00,0x05,0x00,0xb7,0x00,0x00,0x00, -0x35,0x01,0x00,0x00,0x33,0x01,0x00,0x00,0x83,0x00,0x00,0x00, -0xf9,0x00,0x02,0x00,0x30,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, -0x30,0x01,0x00,0x00,0xf5,0x00,0x07,0x00,0xb7,0x00,0x00,0x00, -0x36,0x01,0x00,0x00,0x2e,0x01,0x00,0x00,0x20,0x01,0x00,0x00, -0x35,0x01,0x00,0x00,0x2f,0x01,0x00,0x00,0xf7,0x00,0x03,0x00, -0x38,0x01,0x00,0x00,0x00,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, -0x36,0x01,0x00,0x00,0x37,0x01,0x00,0x00,0x57,0x01,0x00,0x00, -0xf8,0x00,0x02,0x00,0x37,0x01,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x40,0x01,0x00,0x00,0x73,0x00,0x00,0x00, -0xad,0x02,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x42,0x01,0x00,0x00,0x40,0x01,0x00,0x00,0x41,0x01,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x44,0x01,0x00,0x00, -0x42,0x01,0x00,0x00,0x6e,0x00,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x4f,0x01,0x00,0x00,0x40,0x01,0x00,0x00, -0xa0,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x50,0x01,0x00,0x00,0xb4,0x02,0x00,0x00,0x4f,0x01,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x52,0x01,0x00,0x00, -0x50,0x01,0x00,0x00,0x6e,0x00,0x00,0x00,0x41,0x00,0x06,0x00, -0x07,0x01,0x00,0x00,0x53,0x01,0x00,0x00,0x48,0x01,0x00,0x00, -0x34,0x00,0x00,0x00,0x52,0x01,0x00,0x00,0x3d,0x00,0x04,0x00, -0xb9,0x00,0x00,0x00,0x54,0x01,0x00,0x00,0x53,0x01,0x00,0x00, -0x73,0x00,0x04,0x00,0xec,0x00,0x00,0x00,0x55,0x01,0x00,0x00, -0x54,0x01,0x00,0x00,0x41,0x00,0x05,0x00,0x0b,0x01,0x00,0x00, -0x56,0x01,0x00,0x00,0x3d,0x01,0x00,0x00,0x44,0x01,0x00,0x00, -0x3e,0x00,0x03,0x00,0x56,0x01,0x00,0x00,0x55,0x01,0x00,0x00, -0xf9,0x00,0x02,0x00,0x38,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, -0x57,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x5a,0x01,0x00,0x00,0x73,0x00,0x00,0x00,0xad,0x02,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x5c,0x01,0x00,0x00, -0x5a,0x01,0x00,0x00,0x5b,0x01,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x5e,0x01,0x00,0x00,0x5c,0x01,0x00,0x00, -0x6e,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x0b,0x01,0x00,0x00, -0x5f,0x01,0x00,0x00,0x3d,0x01,0x00,0x00,0x5e,0x01,0x00,0x00, -0x3e,0x00,0x03,0x00,0x5f,0x01,0x00,0x00,0x15,0x01,0x00,0x00, -0xf9,0x00,0x02,0x00,0x38,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, -0x38,0x01,0x00,0x00,0xf9,0x00,0x02,0x00,0x22,0x01,0x00,0x00, -0xf8,0x00,0x02,0x00,0x22,0x01,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x61,0x01,0x00,0x00,0xad,0x02,0x00,0x00, -0x1b,0x01,0x00,0x00,0xf9,0x00,0x02,0x00,0x1f,0x01,0x00,0x00, -0xf8,0x00,0x02,0x00,0x21,0x01,0x00,0x00,0xe0,0x00,0x04,0x00, -0x0c,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x62,0x01,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x65,0x01,0x00,0x00, -0xb0,0x02,0x00,0x00,0x63,0x01,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x68,0x01,0x00,0x00,0xb4,0x02,0x00,0x00, -0x66,0x01,0x00,0x00,0xf9,0x00,0x02,0x00,0x6a,0x01,0x00,0x00, -0xf8,0x00,0x02,0x00,0x6a,0x01,0x00,0x00,0xf5,0x00,0x07,0x00, -0x06,0x00,0x00,0x00,0xb6,0x02,0x00,0x00,0x3e,0x00,0x00,0x00, -0x21,0x01,0x00,0x00,0x14,0x02,0x00,0x00,0x6d,0x01,0x00,0x00, -0xb0,0x00,0x05,0x00,0xb7,0x00,0x00,0x00,0x70,0x01,0x00,0x00, -0xb6,0x02,0x00,0x00,0x6c,0x00,0x00,0x00,0xf6,0x00,0x04,0x00, -0x6c,0x01,0x00,0x00,0x6d,0x01,0x00,0x00,0x00,0x00,0x00,0x00, -0xfa,0x00,0x04,0x00,0x70,0x01,0x00,0x00,0x6b,0x01,0x00,0x00, -0x6c,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0x6b,0x01,0x00,0x00, -0xf9,0x00,0x02,0x00,0x72,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, -0x72,0x01,0x00,0x00,0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00, -0xba,0x02,0x00,0x00,0x3e,0x00,0x00,0x00,0x6b,0x01,0x00,0x00, -0x9e,0x01,0x00,0x00,0x75,0x01,0x00,0x00,0xb0,0x00,0x05,0x00, -0xb7,0x00,0x00,0x00,0x78,0x01,0x00,0x00,0xba,0x02,0x00,0x00, -0x60,0x00,0x00,0x00,0xf6,0x00,0x04,0x00,0x74,0x01,0x00,0x00, -0x75,0x01,0x00,0x00,0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, -0x78,0x01,0x00,0x00,0x73,0x01,0x00,0x00,0x74,0x01,0x00,0x00, -0xf8,0x00,0x02,0x00,0x73,0x01,0x00,0x00,0xf9,0x00,0x02,0x00, -0x7a,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0x7a,0x01,0x00,0x00, -0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0xcc,0x02,0x00,0x00, -0x3e,0x00,0x00,0x00,0x73,0x01,0x00,0x00,0x9c,0x01,0x00,0x00, -0x7b,0x01,0x00,0x00,0xb0,0x00,0x05,0x00,0xb7,0x00,0x00,0x00, -0x80,0x01,0x00,0x00,0xcc,0x02,0x00,0x00,0x62,0x00,0x00,0x00, -0xf6,0x00,0x04,0x00,0x7c,0x01,0x00,0x00,0x7b,0x01,0x00,0x00, -0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0x80,0x01,0x00,0x00, -0x7b,0x01,0x00,0x00,0x7c,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, -0x7b,0x01,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x86,0x01,0x00,0x00,0xba,0x02,0x00,0x00,0x62,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x88,0x01,0x00,0x00, -0x86,0x01,0x00,0x00,0xcc,0x02,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x8a,0x01,0x00,0x00,0x55,0x00,0x00,0x00, -0x53,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x8c,0x01,0x00,0x00,0xba,0x02,0x00,0x00,0x61,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x8d,0x01,0x00,0x00, -0x8a,0x01,0x00,0x00,0x8c,0x01,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x8f,0x01,0x00,0x00,0x64,0x00,0x00,0x00, -0x62,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x90,0x01,0x00,0x00,0x8d,0x01,0x00,0x00,0x8f,0x01,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x92,0x01,0x00,0x00, -0x90,0x01,0x00,0x00,0xcc,0x02,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x94,0x01,0x00,0x00,0x92,0x01,0x00,0x00, -0x93,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x96,0x01,0x00,0x00,0x94,0x01,0x00,0x00,0xb6,0x02,0x00,0x00, -0x41,0x00,0x05,0x00,0x0b,0x01,0x00,0x00,0x97,0x01,0x00,0x00, -0xf1,0x00,0x00,0x00,0x96,0x01,0x00,0x00,0x3d,0x00,0x04,0x00, -0xec,0x00,0x00,0x00,0x98,0x01,0x00,0x00,0x97,0x01,0x00,0x00, -0x41,0x00,0x05,0x00,0x99,0x01,0x00,0x00,0x9a,0x01,0x00,0x00, -0x84,0x01,0x00,0x00,0x88,0x01,0x00,0x00,0x3e,0x00,0x03,0x00, -0x9a,0x01,0x00,0x00,0x98,0x01,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x9c,0x01,0x00,0x00,0xcc,0x02,0x00,0x00, -0xc5,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0x7a,0x01,0x00,0x00, -0xf8,0x00,0x02,0x00,0x7c,0x01,0x00,0x00,0xf9,0x00,0x02,0x00, -0x75,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0x75,0x01,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x9e,0x01,0x00,0x00, -0xba,0x02,0x00,0x00,0xc5,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, -0x72,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0x74,0x01,0x00,0x00, -0xf9,0x00,0x02,0x00,0xa0,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, -0xa0,0x01,0x00,0x00,0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00, -0xbb,0x02,0x00,0x00,0x3e,0x00,0x00,0x00,0x74,0x01,0x00,0x00, -0xcc,0x01,0x00,0x00,0xa3,0x01,0x00,0x00,0xb0,0x00,0x05,0x00, -0xb7,0x00,0x00,0x00,0xa6,0x01,0x00,0x00,0xbb,0x02,0x00,0x00, -0xb4,0x00,0x00,0x00,0xf6,0x00,0x04,0x00,0xa2,0x01,0x00,0x00, -0xa3,0x01,0x00,0x00,0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, -0xa6,0x01,0x00,0x00,0xa1,0x01,0x00,0x00,0xa2,0x01,0x00,0x00, -0xf8,0x00,0x02,0x00,0xa1,0x01,0x00,0x00,0xf9,0x00,0x02,0x00, -0xa8,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0xa8,0x01,0x00,0x00, -0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0xc9,0x02,0x00,0x00, -0x3e,0x00,0x00,0x00,0xa1,0x01,0x00,0x00,0xca,0x01,0x00,0x00, -0xa9,0x01,0x00,0x00,0xb0,0x00,0x05,0x00,0xb7,0x00,0x00,0x00, -0xae,0x01,0x00,0x00,0xc9,0x02,0x00,0x00,0xb1,0x00,0x00,0x00, -0xf6,0x00,0x04,0x00,0xaa,0x01,0x00,0x00,0xa9,0x01,0x00,0x00, -0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0xae,0x01,0x00,0x00, -0xa9,0x01,0x00,0x00,0xaa,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, -0xa9,0x01,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xb4,0x01,0x00,0x00,0xbb,0x02,0x00,0x00,0xb1,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xb6,0x01,0x00,0x00, -0xb4,0x01,0x00,0x00,0xc9,0x02,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xb8,0x01,0x00,0x00,0x59,0x00,0x00,0x00, -0xae,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xbb,0x01,0x00,0x00,0xbb,0x02,0x00,0x00,0xba,0x01,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xbc,0x01,0x00,0x00, -0xb8,0x01,0x00,0x00,0xbb,0x01,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xbe,0x01,0x00,0x00,0x68,0x00,0x00,0x00, -0xb1,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xbf,0x01,0x00,0x00,0xbc,0x01,0x00,0x00,0xbe,0x01,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xc1,0x01,0x00,0x00, -0xbf,0x01,0x00,0x00,0xc9,0x02,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xc3,0x01,0x00,0x00,0xc1,0x01,0x00,0x00, -0xc2,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xc5,0x01,0x00,0x00,0xc3,0x01,0x00,0x00,0xb6,0x02,0x00,0x00, -0x41,0x00,0x05,0x00,0x0b,0x01,0x00,0x00,0xc6,0x01,0x00,0x00, -0x3d,0x01,0x00,0x00,0xc5,0x01,0x00,0x00,0x3d,0x00,0x04,0x00, -0xec,0x00,0x00,0x00,0xc7,0x01,0x00,0x00,0xc6,0x01,0x00,0x00, -0x41,0x00,0x05,0x00,0x99,0x01,0x00,0x00,0xc8,0x01,0x00,0x00, -0xb2,0x01,0x00,0x00,0xb6,0x01,0x00,0x00,0x3e,0x00,0x03,0x00, -0xc8,0x01,0x00,0x00,0xc7,0x01,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xca,0x01,0x00,0x00,0xc9,0x02,0x00,0x00, -0xc5,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0xa8,0x01,0x00,0x00, -0xf8,0x00,0x02,0x00,0xaa,0x01,0x00,0x00,0xf9,0x00,0x02,0x00, -0xa3,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0xa3,0x01,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xcc,0x01,0x00,0x00, -0xbb,0x02,0x00,0x00,0xc5,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, -0xa0,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0xa2,0x01,0x00,0x00, -0xf9,0x00,0x02,0x00,0xce,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, -0xce,0x01,0x00,0x00,0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00, -0xbc,0x02,0x00,0x00,0x3e,0x00,0x00,0x00,0xa2,0x01,0x00,0x00, -0x12,0x02,0x00,0x00,0xd1,0x01,0x00,0x00,0xb0,0x00,0x05,0x00, -0xb7,0x00,0x00,0x00,0xd4,0x01,0x00,0x00,0xbc,0x02,0x00,0x00, -0xb4,0x00,0x00,0x00,0xf6,0x00,0x04,0x00,0xd0,0x01,0x00,0x00, -0xd1,0x01,0x00,0x00,0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, -0xd4,0x01,0x00,0x00,0xcf,0x01,0x00,0x00,0xd0,0x01,0x00,0x00, -0xf8,0x00,0x02,0x00,0xcf,0x01,0x00,0x00,0xf9,0x00,0x02,0x00, -0xd6,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0xd6,0x01,0x00,0x00, -0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0xc0,0x02,0x00,0x00, -0x3e,0x00,0x00,0x00,0xcf,0x01,0x00,0x00,0x10,0x02,0x00,0x00, -0xd9,0x01,0x00,0x00,0xb0,0x00,0x05,0x00,0xb7,0x00,0x00,0x00, -0xdc,0x01,0x00,0x00,0xc0,0x02,0x00,0x00,0x60,0x00,0x00,0x00, -0xf6,0x00,0x04,0x00,0xd8,0x01,0x00,0x00,0xd9,0x01,0x00,0x00, -0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0xdc,0x01,0x00,0x00, -0xd7,0x01,0x00,0x00,0xd8,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, -0xd7,0x01,0x00,0x00,0xf9,0x00,0x02,0x00,0xde,0x01,0x00,0x00, -0xf8,0x00,0x02,0x00,0xde,0x01,0x00,0x00,0xf5,0x00,0x07,0x00, -0x06,0x00,0x00,0x00,0xc2,0x02,0x00,0x00,0x3e,0x00,0x00,0x00, -0xd7,0x01,0x00,0x00,0x0e,0x02,0x00,0x00,0xe1,0x01,0x00,0x00, -0xb0,0x00,0x05,0x00,0xb7,0x00,0x00,0x00,0xe4,0x01,0x00,0x00, -0xc2,0x02,0x00,0x00,0xb1,0x00,0x00,0x00,0xf6,0x00,0x04,0x00, -0xe0,0x01,0x00,0x00,0xe1,0x01,0x00,0x00,0x01,0x00,0x00,0x00, -0xfa,0x00,0x04,0x00,0xe4,0x01,0x00,0x00,0xdf,0x01,0x00,0x00, -0xe0,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0xdf,0x01,0x00,0x00, -0xf9,0x00,0x02,0x00,0xe6,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, -0xe6,0x01,0x00,0x00,0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00, -0xc4,0x02,0x00,0x00,0x3e,0x00,0x00,0x00,0xdf,0x01,0x00,0x00, -0x0c,0x02,0x00,0x00,0xe7,0x01,0x00,0x00,0xb0,0x00,0x05,0x00, -0xb7,0x00,0x00,0x00,0xec,0x01,0x00,0x00,0xc4,0x02,0x00,0x00, -0x62,0x00,0x00,0x00,0xf6,0x00,0x04,0x00,0xe8,0x01,0x00,0x00, -0xe7,0x01,0x00,0x00,0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, -0xec,0x01,0x00,0x00,0xe7,0x01,0x00,0x00,0xe8,0x01,0x00,0x00, -0xf8,0x00,0x02,0x00,0xe7,0x01,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xee,0x01,0x00,0x00,0xbc,0x02,0x00,0x00, -0xb1,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xf0,0x01,0x00,0x00,0xee,0x01,0x00,0x00,0xc2,0x02,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xf2,0x01,0x00,0x00, -0xf0,0x01,0x00,0x00,0xf1,0x01,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xf4,0x01,0x00,0x00,0xc0,0x02,0x00,0x00, -0x62,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xf5,0x01,0x00,0x00,0xf2,0x01,0x00,0x00,0xf4,0x01,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xf7,0x01,0x00,0x00, -0xf5,0x01,0x00,0x00,0xc4,0x02,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xfb,0x01,0x00,0x00,0xf4,0x01,0x00,0x00, -0xc4,0x02,0x00,0x00,0x41,0x00,0x05,0x00,0x99,0x01,0x00,0x00, -0xfc,0x01,0x00,0x00,0x84,0x01,0x00,0x00,0xfb,0x01,0x00,0x00, -0x3d,0x00,0x04,0x00,0xec,0x00,0x00,0x00,0xfd,0x01,0x00,0x00, -0xfc,0x01,0x00,0x00,0x73,0x00,0x04,0x00,0xb9,0x00,0x00,0x00, -0xfe,0x01,0x00,0x00,0xfd,0x01,0x00,0x00,0x41,0x00,0x05,0x00, -0x99,0x01,0x00,0x00,0x03,0x02,0x00,0x00,0xb2,0x01,0x00,0x00, -0xf0,0x01,0x00,0x00,0x3d,0x00,0x04,0x00,0xec,0x00,0x00,0x00, -0x04,0x02,0x00,0x00,0x03,0x02,0x00,0x00,0x73,0x00,0x04,0x00, -0xb9,0x00,0x00,0x00,0x05,0x02,0x00,0x00,0x04,0x02,0x00,0x00, -0x41,0x00,0x05,0x00,0xc2,0x00,0x00,0x00,0x07,0x02,0x00,0x00, -0xbf,0x00,0x00,0x00,0xf7,0x01,0x00,0x00,0x3d,0x00,0x04,0x00, -0xb9,0x00,0x00,0x00,0x08,0x02,0x00,0x00,0x07,0x02,0x00,0x00, -0x0c,0x00,0x08,0x00,0xb9,0x00,0x00,0x00,0x09,0x02,0x00,0x00, -0x01,0x00,0x00,0x00,0x32,0x00,0x00,0x00,0xfe,0x01,0x00,0x00, -0x05,0x02,0x00,0x00,0x08,0x02,0x00,0x00,0x3e,0x00,0x03,0x00, -0x07,0x02,0x00,0x00,0x09,0x02,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x0c,0x02,0x00,0x00,0xc4,0x02,0x00,0x00, -0xc5,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0xe6,0x01,0x00,0x00, -0xf8,0x00,0x02,0x00,0xe8,0x01,0x00,0x00,0xf9,0x00,0x02,0x00, -0xe1,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0xe1,0x01,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x0e,0x02,0x00,0x00, -0xc2,0x02,0x00,0x00,0xc5,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, -0xde,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0xe0,0x01,0x00,0x00, -0xf9,0x00,0x02,0x00,0xd9,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, -0xd9,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x10,0x02,0x00,0x00,0xc0,0x02,0x00,0x00,0xc5,0x00,0x00,0x00, -0xf9,0x00,0x02,0x00,0xd6,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, -0xd8,0x01,0x00,0x00,0xf9,0x00,0x02,0x00,0xd1,0x01,0x00,0x00, -0xf8,0x00,0x02,0x00,0xd1,0x01,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x12,0x02,0x00,0x00,0xbc,0x02,0x00,0x00, -0xc5,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0xce,0x01,0x00,0x00, -0xf8,0x00,0x02,0x00,0xd0,0x01,0x00,0x00,0xf9,0x00,0x02,0x00, -0x6d,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0x6d,0x01,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x14,0x02,0x00,0x00, -0xb6,0x02,0x00,0x00,0xc5,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, -0x6a,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0x6c,0x01,0x00,0x00, -0xe0,0x00,0x04,0x00,0x0c,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0x62,0x01,0x00,0x00,0xf9,0x00,0x02,0x00,0xcc,0x00,0x00,0x00, -0xf8,0x00,0x02,0x00,0xcc,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x16,0x02,0x00,0x00,0x9c,0x02,0x00,0x00, -0x6c,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0xc9,0x00,0x00,0x00, -0xf8,0x00,0x02,0x00,0xcb,0x00,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x1b,0x02,0x00,0x00,0x55,0x00,0x00,0x00, -0x53,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x1c,0x02,0x00,0x00,0x8b,0x00,0x00,0x00,0x1b,0x02,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x21,0x02,0x00,0x00, -0x59,0x00,0x00,0x00,0xae,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x22,0x02,0x00,0x00,0x9d,0x00,0x00,0x00, -0x21,0x02,0x00,0x00,0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00, -0x27,0x02,0x00,0x00,0x12,0x00,0x00,0x00,0x26,0x02,0x00,0x00, -0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x28,0x02,0x00,0x00, -0x27,0x02,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x29,0x02,0x00,0x00,0x0f,0x00,0x00,0x00,0x28,0x02,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x2d,0x02,0x00,0x00, -0x47,0x00,0x00,0x00,0x28,0x02,0x00,0x00,0x41,0x00,0x05,0x00, -0x0d,0x00,0x00,0x00,0x2f,0x02,0x00,0x00,0x2e,0x02,0x00,0x00, -0x0c,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x30,0x02,0x00,0x00,0x2f,0x02,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x31,0x02,0x00,0x00,0x2d,0x02,0x00,0x00, -0x30,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x32,0x02,0x00,0x00,0x29,0x02,0x00,0x00,0x31,0x02,0x00,0x00, -0xf9,0x00,0x02,0x00,0x34,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, -0x34,0x02,0x00,0x00,0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00, -0x9d,0x02,0x00,0x00,0x3e,0x00,0x00,0x00,0xcb,0x00,0x00,0x00, -0x9a,0x02,0x00,0x00,0x37,0x02,0x00,0x00,0xb0,0x00,0x05,0x00, -0xb7,0x00,0x00,0x00,0x3a,0x02,0x00,0x00,0x9d,0x02,0x00,0x00, -0xb4,0x00,0x00,0x00,0xf6,0x00,0x04,0x00,0x36,0x02,0x00,0x00, -0x37,0x02,0x00,0x00,0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, -0x3a,0x02,0x00,0x00,0x35,0x02,0x00,0x00,0x36,0x02,0x00,0x00, -0xf8,0x00,0x02,0x00,0x35,0x02,0x00,0x00,0xf9,0x00,0x02,0x00, -0x3c,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x3c,0x02,0x00,0x00, -0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0x9e,0x02,0x00,0x00, -0x3e,0x00,0x00,0x00,0x35,0x02,0x00,0x00,0x98,0x02,0x00,0x00, -0x3f,0x02,0x00,0x00,0xb0,0x00,0x05,0x00,0xb7,0x00,0x00,0x00, -0x42,0x02,0x00,0x00,0x9e,0x02,0x00,0x00,0x60,0x00,0x00,0x00, -0xf6,0x00,0x04,0x00,0x3e,0x02,0x00,0x00,0x3f,0x02,0x00,0x00, -0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0x42,0x02,0x00,0x00, -0x3d,0x02,0x00,0x00,0x3e,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, -0x3d,0x02,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x46,0x02,0x00,0x00,0x9e,0x02,0x00,0x00,0x61,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x47,0x02,0x00,0x00, -0x1c,0x02,0x00,0x00,0x46,0x02,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x49,0x02,0x00,0x00,0x64,0x00,0x00,0x00, -0x62,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x4a,0x02,0x00,0x00,0x47,0x02,0x00,0x00,0x49,0x02,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x4e,0x02,0x00,0x00, -0x9d,0x02,0x00,0x00,0xba,0x01,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x4f,0x02,0x00,0x00,0x22,0x02,0x00,0x00, -0x4e,0x02,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x51,0x02,0x00,0x00,0x68,0x00,0x00,0x00,0xb1,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x52,0x02,0x00,0x00, -0x4f,0x02,0x00,0x00,0x51,0x02,0x00,0x00,0xf9,0x00,0x02,0x00, -0x54,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x54,0x02,0x00,0x00, -0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0xa0,0x02,0x00,0x00, -0x3e,0x00,0x00,0x00,0x3d,0x02,0x00,0x00,0x96,0x02,0x00,0x00, -0x57,0x02,0x00,0x00,0xb0,0x00,0x05,0x00,0xb7,0x00,0x00,0x00, -0x5a,0x02,0x00,0x00,0xa0,0x02,0x00,0x00,0xb1,0x00,0x00,0x00, -0xf6,0x00,0x04,0x00,0x56,0x02,0x00,0x00,0x57,0x02,0x00,0x00, -0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0x5a,0x02,0x00,0x00, -0x55,0x02,0x00,0x00,0x56,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, -0x55,0x02,0x00,0x00,0xf9,0x00,0x02,0x00,0x5c,0x02,0x00,0x00, -0xf8,0x00,0x02,0x00,0x5c,0x02,0x00,0x00,0xf5,0x00,0x07,0x00, -0x06,0x00,0x00,0x00,0xa2,0x02,0x00,0x00,0x3e,0x00,0x00,0x00, -0x55,0x02,0x00,0x00,0x94,0x02,0x00,0x00,0x5f,0x02,0x00,0x00, -0xb0,0x00,0x05,0x00,0xb7,0x00,0x00,0x00,0x62,0x02,0x00,0x00, -0xa2,0x02,0x00,0x00,0x62,0x00,0x00,0x00,0xf6,0x00,0x04,0x00, -0x5e,0x02,0x00,0x00,0x5f,0x02,0x00,0x00,0x01,0x00,0x00,0x00, -0xfa,0x00,0x04,0x00,0x62,0x02,0x00,0x00,0x5d,0x02,0x00,0x00, -0x5e,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x5d,0x02,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x65,0x02,0x00,0x00, -0x4a,0x02,0x00,0x00,0xa2,0x02,0x00,0x00,0xb0,0x00,0x05,0x00, -0xb7,0x00,0x00,0x00,0x68,0x02,0x00,0x00,0x65,0x02,0x00,0x00, -0x36,0x00,0x00,0x00,0xf7,0x00,0x03,0x00,0x6a,0x02,0x00,0x00, -0x00,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0x68,0x02,0x00,0x00, -0x69,0x02,0x00,0x00,0x6a,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, -0x69,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x6d,0x02,0x00,0x00,0x52,0x02,0x00,0x00,0xa0,0x02,0x00,0x00, -0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00,0x6e,0x02,0x00,0x00, -0x12,0x00,0x00,0x00,0xc5,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x6f,0x02,0x00,0x00,0x6e,0x02,0x00,0x00, -0xb0,0x00,0x05,0x00,0xb7,0x00,0x00,0x00,0x70,0x02,0x00,0x00, -0x6d,0x02,0x00,0x00,0x6f,0x02,0x00,0x00,0xf9,0x00,0x02,0x00, -0x6a,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x6a,0x02,0x00,0x00, -0xf5,0x00,0x07,0x00,0xb7,0x00,0x00,0x00,0x71,0x02,0x00,0x00, -0x68,0x02,0x00,0x00,0x5d,0x02,0x00,0x00,0x70,0x02,0x00,0x00, -0x69,0x02,0x00,0x00,0xf7,0x00,0x03,0x00,0x73,0x02,0x00,0x00, -0x00,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0x71,0x02,0x00,0x00, -0x72,0x02,0x00,0x00,0x73,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, -0x72,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x7b,0x02,0x00,0x00,0x52,0x02,0x00,0x00,0xa0,0x02,0x00,0x00, -0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00,0x7d,0x02,0x00,0x00, -0x12,0x00,0x00,0x00,0x7c,0x02,0x00,0x00,0x3d,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x7e,0x02,0x00,0x00,0x7d,0x02,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x7f,0x02,0x00,0x00, -0x7b,0x02,0x00,0x00,0x7e,0x02,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x80,0x02,0x00,0x00,0x32,0x02,0x00,0x00, -0x7f,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x82,0x02,0x00,0x00,0x80,0x02,0x00,0x00,0x4a,0x02,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x84,0x02,0x00,0x00, -0x82,0x02,0x00,0x00,0xa2,0x02,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x86,0x02,0x00,0x00,0x9d,0x02,0x00,0x00, -0xb1,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x88,0x02,0x00,0x00,0x86,0x02,0x00,0x00,0xa0,0x02,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x8a,0x02,0x00,0x00, -0x88,0x02,0x00,0x00,0x89,0x02,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x8c,0x02,0x00,0x00,0x9e,0x02,0x00,0x00, -0x62,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x8d,0x02,0x00,0x00,0x8a,0x02,0x00,0x00,0x8c,0x02,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x8f,0x02,0x00,0x00, -0x8d,0x02,0x00,0x00,0xa2,0x02,0x00,0x00,0x41,0x00,0x05,0x00, -0xc2,0x00,0x00,0x00,0x90,0x02,0x00,0x00,0xbf,0x00,0x00,0x00, -0x8f,0x02,0x00,0x00,0x3d,0x00,0x04,0x00,0xb9,0x00,0x00,0x00, -0x91,0x02,0x00,0x00,0x90,0x02,0x00,0x00,0x41,0x00,0x06,0x00, -0x07,0x01,0x00,0x00,0x92,0x02,0x00,0x00,0x77,0x02,0x00,0x00, -0x34,0x00,0x00,0x00,0x84,0x02,0x00,0x00,0x3e,0x00,0x03,0x00, -0x92,0x02,0x00,0x00,0x91,0x02,0x00,0x00,0xf9,0x00,0x02,0x00, -0x73,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x73,0x02,0x00,0x00, -0xf9,0x00,0x02,0x00,0x5f,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, -0x5f,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x94,0x02,0x00,0x00,0xa2,0x02,0x00,0x00,0xc5,0x00,0x00,0x00, -0xf9,0x00,0x02,0x00,0x5c,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, -0x5e,0x02,0x00,0x00,0xf9,0x00,0x02,0x00,0x57,0x02,0x00,0x00, -0xf8,0x00,0x02,0x00,0x57,0x02,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x96,0x02,0x00,0x00,0xa0,0x02,0x00,0x00, -0xc5,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0x54,0x02,0x00,0x00, -0xf8,0x00,0x02,0x00,0x56,0x02,0x00,0x00,0xf9,0x00,0x02,0x00, -0x3f,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x3f,0x02,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x98,0x02,0x00,0x00, -0x9e,0x02,0x00,0x00,0xc5,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, -0x3c,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x3e,0x02,0x00,0x00, -0xf9,0x00,0x02,0x00,0x37,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, -0x37,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x9a,0x02,0x00,0x00,0x9d,0x02,0x00,0x00,0xc5,0x00,0x00,0x00, -0xf9,0x00,0x02,0x00,0x34,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, -0x36,0x02,0x00,0x00,0xfd,0x00,0x01,0x00,0x38,0x00,0x01,0x00, - -}; -const uint64_t matmul_f32_s_len = 10164; - -unsigned char matmul_f32_s_fp32_data[] = { -0x03,0x02,0x23,0x07,0x00,0x05,0x01,0x00,0x0b,0x00,0x0d,0x00, -0xc6,0x02,0x00,0x00,0x00,0x00,0x00,0x00,0x11,0x00,0x02,0x00, -0x01,0x00,0x00,0x00,0x0b,0x00,0x06,0x00,0x01,0x00,0x00,0x00, -0x47,0x4c,0x53,0x4c,0x2e,0x73,0x74,0x64,0x2e,0x34,0x35,0x30, -0x00,0x00,0x00,0x00,0x0e,0x00,0x03,0x00,0x00,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x0f,0x00,0x0f,0x00,0x05,0x00,0x00,0x00, -0x04,0x00,0x00,0x00,0x6d,0x61,0x69,0x6e,0x00,0x00,0x00,0x00, -0x0b,0x00,0x00,0x00,0x12,0x00,0x00,0x00,0x3d,0x00,0x00,0x00, -0x4c,0x00,0x00,0x00,0xf0,0x00,0x00,0x00,0xfb,0x00,0x00,0x00, -0x3a,0x01,0x00,0x00,0x45,0x01,0x00,0x00,0x27,0x02,0x00,0x00, -0x70,0x02,0x00,0x00,0x10,0x00,0x06,0x00,0x04,0x00,0x00,0x00, -0x11,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x0b,0x00,0x00,0x00, -0x0b,0x00,0x00,0x00,0x1c,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0x10,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x04,0x00,0x00,0x00, -0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00,0x02,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x08,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0x10,0x00,0x00,0x00,0x03,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00, -0x04,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x10,0x00,0x00,0x00, -0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00,0x05,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x14,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0x10,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x18,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00, -0x07,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x1c,0x00,0x00,0x00, -0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00,0x08,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x20,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0x10,0x00,0x00,0x00,0x09,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x24,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00, -0x0a,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x28,0x00,0x00,0x00, -0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00,0x0b,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x2c,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0x10,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x30,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x10,0x00,0x00,0x00, -0x0d,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x34,0x00,0x00,0x00, -0x47,0x00,0x03,0x00,0x10,0x00,0x00,0x00,0x02,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x37,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x3d,0x00,0x00,0x00, -0x0b,0x00,0x00,0x00,0x1a,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x4c,0x00,0x00,0x00,0x0b,0x00,0x00,0x00,0x1b,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x4f,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x09,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x53,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x60,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x62,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x07,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x6c,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x03,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x9c,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x02,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0xae,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x05,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0xb1,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x08,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0xf8,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x04,0x00,0x00,0x00, -0x48,0x00,0x04,0x00,0xf9,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x18,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0xf9,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x47,0x00,0x03,0x00,0xf9,0x00,0x00,0x00,0x02,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0xfb,0x00,0x00,0x00,0x22,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0xfb,0x00,0x00,0x00, -0x21,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x14,0x01,0x00,0x00,0x01,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x15,0x01,0x00,0x00,0x0b,0x00,0x00,0x00, -0x19,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x42,0x01,0x00,0x00, -0x06,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x48,0x00,0x04,0x00, -0x43,0x01,0x00,0x00,0x00,0x00,0x00,0x00,0x18,0x00,0x00,0x00, -0x48,0x00,0x05,0x00,0x43,0x01,0x00,0x00,0x00,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x03,0x00, -0x43,0x01,0x00,0x00,0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x45,0x01,0x00,0x00,0x22,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x45,0x01,0x00,0x00,0x21,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x27,0x02,0x00,0x00, -0x0b,0x00,0x00,0x00,0x18,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x6d,0x02,0x00,0x00,0x06,0x00,0x00,0x00,0x04,0x00,0x00,0x00, -0x48,0x00,0x04,0x00,0x6e,0x02,0x00,0x00,0x00,0x00,0x00,0x00, -0x19,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x6e,0x02,0x00,0x00, -0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x47,0x00,0x03,0x00,0x6e,0x02,0x00,0x00,0x02,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x70,0x02,0x00,0x00,0x22,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x70,0x02,0x00,0x00, -0x21,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x13,0x00,0x02,0x00, -0x02,0x00,0x00,0x00,0x21,0x00,0x03,0x00,0x03,0x00,0x00,0x00, -0x02,0x00,0x00,0x00,0x15,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x20,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x17,0x00,0x04,0x00, -0x09,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x03,0x00,0x00,0x00, -0x20,0x00,0x04,0x00,0x0a,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x09,0x00,0x00,0x00,0x3b,0x00,0x04,0x00,0x0a,0x00,0x00,0x00, -0x0b,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x02,0x00,0x00,0x00, -0x20,0x00,0x04,0x00,0x0d,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x1e,0x00,0x10,0x00,0x10,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0x11,0x00,0x00,0x00,0x09,0x00,0x00,0x00,0x10,0x00,0x00,0x00, -0x3b,0x00,0x04,0x00,0x11,0x00,0x00,0x00,0x12,0x00,0x00,0x00, -0x09,0x00,0x00,0x00,0x15,0x00,0x04,0x00,0x13,0x00,0x00,0x00, -0x20,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x13,0x00,0x00,0x00,0x14,0x00,0x00,0x00,0x08,0x00,0x00,0x00, -0x20,0x00,0x04,0x00,0x15,0x00,0x00,0x00,0x09,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00, -0x21,0x00,0x00,0x00,0x0a,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x13,0x00,0x00,0x00,0x27,0x00,0x00,0x00,0x09,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00,0x2d,0x00,0x00,0x00, -0x07,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00, -0x34,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x32,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x37,0x00,0x00,0x00,0x40,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x39,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x3b,0x00,0x04,0x00,0x0a,0x00,0x00,0x00, -0x3d,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x3e,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x3b,0x00,0x04,0x00,0x0a,0x00,0x00,0x00,0x4c,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x32,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x4f,0x00,0x00,0x00,0x20,0x00,0x00,0x00,0x32,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x53,0x00,0x00,0x00,0x20,0x00,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x54,0x00,0x00,0x00, -0x86,0x00,0x00,0x00,0x37,0x00,0x00,0x00,0x53,0x00,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x58,0x00,0x00,0x00, -0x86,0x00,0x00,0x00,0x37,0x00,0x00,0x00,0x53,0x00,0x00,0x00, -0x32,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x60,0x00,0x00,0x00, -0x02,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, -0x61,0x00,0x00,0x00,0x86,0x00,0x00,0x00,0x53,0x00,0x00,0x00, -0x60,0x00,0x00,0x00,0x32,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x62,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0x63,0x00,0x00,0x00,0x86,0x00,0x00,0x00, -0x61,0x00,0x00,0x00,0x62,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0x67,0x00,0x00,0x00,0x86,0x00,0x00,0x00, -0x61,0x00,0x00,0x00,0x62,0x00,0x00,0x00,0x32,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x6c,0x00,0x00,0x00,0x10,0x00,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x6d,0x00,0x00,0x00, -0x86,0x00,0x00,0x00,0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x72,0x00,0x00,0x00, -0x86,0x00,0x00,0x00,0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00,0x76,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00, -0x7b,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x13,0x00,0x00,0x00,0x86,0x00,0x00,0x00,0x0b,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00,0x8c,0x00,0x00,0x00, -0x03,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00, -0x97,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x32,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x9c,0x00,0x00,0x00,0x40,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00,0x9e,0x00,0x00,0x00, -0x04,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, -0xad,0x00,0x00,0x00,0x84,0x00,0x00,0x00,0x60,0x00,0x00,0x00, -0x62,0x00,0x00,0x00,0x32,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0xae,0x00,0x00,0x00,0x20,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0xaf,0x00,0x00,0x00,0x84,0x00,0x00,0x00, -0x53,0x00,0x00,0x00,0xae,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0xb0,0x00,0x00,0x00,0x84,0x00,0x00,0x00, -0x4f,0x00,0x00,0x00,0x62,0x00,0x00,0x00,0x32,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0xb1,0x00,0x00,0x00,0x02,0x00,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0xb2,0x00,0x00,0x00, -0x84,0x00,0x00,0x00,0xb0,0x00,0x00,0x00,0xb1,0x00,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0xb3,0x00,0x00,0x00, -0x84,0x00,0x00,0x00,0xb2,0x00,0x00,0x00,0x60,0x00,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0xb4,0x00,0x00,0x00, -0x86,0x00,0x00,0x00,0xaf,0x00,0x00,0x00,0xb3,0x00,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0xb5,0x00,0x00,0x00, -0x84,0x00,0x00,0x00,0xad,0x00,0x00,0x00,0xb4,0x00,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0xb6,0x00,0x00,0x00, -0x84,0x00,0x00,0x00,0xb5,0x00,0x00,0x00,0xb1,0x00,0x00,0x00, -0x14,0x00,0x02,0x00,0xb7,0x00,0x00,0x00,0x16,0x00,0x03,0x00, -0xb9,0x00,0x00,0x00,0x20,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0xba,0x00,0x00,0x00,0x84,0x00,0x00,0x00, -0x60,0x00,0x00,0x00,0x62,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0xbb,0x00,0x00,0x00,0x84,0x00,0x00,0x00, -0xba,0x00,0x00,0x00,0xb4,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0xbc,0x00,0x00,0x00,0x84,0x00,0x00,0x00, -0xbb,0x00,0x00,0x00,0xb1,0x00,0x00,0x00,0x1c,0x00,0x04,0x00, -0xbd,0x00,0x00,0x00,0xb9,0x00,0x00,0x00,0xbc,0x00,0x00,0x00, -0x20,0x00,0x04,0x00,0xbe,0x00,0x00,0x00,0x07,0x00,0x00,0x00, -0xbd,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0xb9,0x00,0x00,0x00, -0xc1,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0xc2,0x00,0x00,0x00,0x07,0x00,0x00,0x00,0xb9,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00,0xc5,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, -0xec,0x00,0x00,0x00,0x80,0x00,0x00,0x00,0x6c,0x00,0x00,0x00, -0x39,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, -0xed,0x00,0x00,0x00,0x84,0x00,0x00,0x00,0x37,0x00,0x00,0x00, -0xec,0x00,0x00,0x00,0x1c,0x00,0x04,0x00,0xee,0x00,0x00,0x00, -0xb9,0x00,0x00,0x00,0xed,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0xef,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0xee,0x00,0x00,0x00, -0x3b,0x00,0x04,0x00,0xef,0x00,0x00,0x00,0xf0,0x00,0x00,0x00, -0x04,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, -0xf4,0x00,0x00,0x00,0x80,0x00,0x00,0x00,0x6c,0x00,0x00,0x00, -0x39,0x00,0x00,0x00,0x1d,0x00,0x03,0x00,0xf8,0x00,0x00,0x00, -0xb9,0x00,0x00,0x00,0x1e,0x00,0x03,0x00,0xf9,0x00,0x00,0x00, -0xf8,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0xfa,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0xf9,0x00,0x00,0x00,0x3b,0x00,0x04,0x00, -0xfa,0x00,0x00,0x00,0xfb,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0x20,0x00,0x04,0x00,0x06,0x01,0x00,0x00,0x0c,0x00,0x00,0x00, -0xb9,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x09,0x01,0x00,0x00, -0x04,0x00,0x00,0x00,0xb9,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0x0f,0x01,0x00,0x00,0x80,0x00,0x00,0x00, -0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x32,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x14,0x01,0x00,0x00,0x01,0x00,0x00,0x00, -0x33,0x00,0x06,0x00,0x09,0x00,0x00,0x00,0x15,0x01,0x00,0x00, -0x14,0x01,0x00,0x00,0x39,0x00,0x00,0x00,0x39,0x00,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x16,0x01,0x00,0x00, -0x51,0x00,0x00,0x00,0x15,0x01,0x00,0x00,0x00,0x00,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x17,0x01,0x00,0x00, -0x84,0x00,0x00,0x00,0x16,0x01,0x00,0x00,0x39,0x00,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x18,0x01,0x00,0x00, -0x86,0x00,0x00,0x00,0x17,0x01,0x00,0x00,0x6c,0x00,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x36,0x01,0x00,0x00, -0x80,0x00,0x00,0x00,0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x37,0x01,0x00,0x00, -0x84,0x00,0x00,0x00,0x9c,0x00,0x00,0x00,0x36,0x01,0x00,0x00, -0x1c,0x00,0x04,0x00,0x38,0x01,0x00,0x00,0xb9,0x00,0x00,0x00, -0x37,0x01,0x00,0x00,0x20,0x00,0x04,0x00,0x39,0x01,0x00,0x00, -0x04,0x00,0x00,0x00,0x38,0x01,0x00,0x00,0x3b,0x00,0x04,0x00, -0x39,0x01,0x00,0x00,0x3a,0x01,0x00,0x00,0x04,0x00,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x3e,0x01,0x00,0x00, -0x80,0x00,0x00,0x00,0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00, -0x1d,0x00,0x03,0x00,0x42,0x01,0x00,0x00,0xb9,0x00,0x00,0x00, -0x1e,0x00,0x03,0x00,0x43,0x01,0x00,0x00,0x42,0x01,0x00,0x00, -0x20,0x00,0x04,0x00,0x44,0x01,0x00,0x00,0x0c,0x00,0x00,0x00, -0x43,0x01,0x00,0x00,0x3b,0x00,0x04,0x00,0x44,0x01,0x00,0x00, -0x45,0x01,0x00,0x00,0x0c,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0x57,0x01,0x00,0x00,0x80,0x00,0x00,0x00, -0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x5e,0x01,0x00,0x00,0x08,0x01,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x5f,0x01,0x00,0x00, -0x86,0x00,0x00,0x00,0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x62,0x01,0x00,0x00, -0x86,0x00,0x00,0x00,0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x7d,0x01,0x00,0x00, -0x84,0x00,0x00,0x00,0x60,0x00,0x00,0x00,0x62,0x00,0x00,0x00, -0x1c,0x00,0x04,0x00,0x7e,0x01,0x00,0x00,0xb9,0x00,0x00,0x00, -0x7d,0x01,0x00,0x00,0x20,0x00,0x04,0x00,0x7f,0x01,0x00,0x00, -0x07,0x00,0x00,0x00,0x7e,0x01,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0x8f,0x01,0x00,0x00,0x80,0x00,0x00,0x00, -0x6c,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x34,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0xaa,0x01,0x00,0x00,0x84,0x00,0x00,0x00, -0xb4,0x00,0x00,0x00,0xb1,0x00,0x00,0x00,0x1c,0x00,0x04,0x00, -0xab,0x01,0x00,0x00,0xb9,0x00,0x00,0x00,0xaa,0x01,0x00,0x00, -0x20,0x00,0x04,0x00,0xac,0x01,0x00,0x00,0x07,0x00,0x00,0x00, -0xab,0x01,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, -0xb5,0x01,0x00,0x00,0x86,0x00,0x00,0x00,0xae,0x00,0x00,0x00, -0xb4,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, -0xbd,0x01,0x00,0x00,0x80,0x00,0x00,0x00,0x6c,0x00,0x00,0x00, -0x39,0x00,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, -0xec,0x01,0x00,0x00,0x84,0x00,0x00,0x00,0x60,0x00,0x00,0x00, -0x62,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x13,0x00,0x00,0x00, -0x1f,0x02,0x00,0x00,0x0d,0x00,0x00,0x00,0x3b,0x00,0x04,0x00, -0x0a,0x00,0x00,0x00,0x27,0x02,0x00,0x00,0x01,0x00,0x00,0x00, -0x1d,0x00,0x03,0x00,0x6d,0x02,0x00,0x00,0xb9,0x00,0x00,0x00, -0x1e,0x00,0x03,0x00,0x6e,0x02,0x00,0x00,0x6d,0x02,0x00,0x00, -0x20,0x00,0x04,0x00,0x6f,0x02,0x00,0x00,0x0c,0x00,0x00,0x00, -0x6e,0x02,0x00,0x00,0x3b,0x00,0x04,0x00,0x6f,0x02,0x00,0x00, -0x70,0x02,0x00,0x00,0x0c,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x13,0x00,0x00,0x00,0x75,0x02,0x00,0x00,0x05,0x00,0x00,0x00, -0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x82,0x02,0x00,0x00, -0x84,0x00,0x00,0x00,0x60,0x00,0x00,0x00,0x62,0x00,0x00,0x00, -0x36,0x00,0x05,0x00,0x02,0x00,0x00,0x00,0x04,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x03,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, -0x05,0x00,0x00,0x00,0x3b,0x00,0x04,0x00,0xbe,0x00,0x00,0x00, -0xbf,0x00,0x00,0x00,0x07,0x00,0x00,0x00,0x3b,0x00,0x04,0x00, -0x7f,0x01,0x00,0x00,0x80,0x01,0x00,0x00,0x07,0x00,0x00,0x00, -0x3b,0x00,0x04,0x00,0xac,0x01,0x00,0x00,0xad,0x01,0x00,0x00, -0x07,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x0d,0x00,0x00,0x00, -0x0e,0x00,0x00,0x00,0x0b,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x0f,0x00,0x00,0x00, -0x0e,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00, -0x16,0x00,0x00,0x00,0x12,0x00,0x00,0x00,0x14,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x17,0x00,0x00,0x00, -0x16,0x00,0x00,0x00,0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x18,0x00,0x00,0x00,0x0f,0x00,0x00,0x00,0x17,0x00,0x00,0x00, -0x89,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x1e,0x00,0x00,0x00, -0x0f,0x00,0x00,0x00,0x17,0x00,0x00,0x00,0x41,0x00,0x05,0x00, -0x15,0x00,0x00,0x00,0x22,0x00,0x00,0x00,0x12,0x00,0x00,0x00, -0x21,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x22,0x00,0x00,0x00,0x86,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x24,0x00,0x00,0x00,0x18,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00, -0x28,0x00,0x00,0x00,0x12,0x00,0x00,0x00,0x27,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x29,0x00,0x00,0x00, -0x28,0x00,0x00,0x00,0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x2a,0x00,0x00,0x00,0x1e,0x00,0x00,0x00,0x29,0x00,0x00,0x00, -0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0x12,0x00,0x00,0x00,0x2d,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x2f,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x30,0x00,0x00,0x00, -0x24,0x00,0x00,0x00,0x2f,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x32,0x00,0x00,0x00,0x30,0x00,0x00,0x00, -0x2a,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00, -0x35,0x00,0x00,0x00,0x12,0x00,0x00,0x00,0x34,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x36,0x00,0x00,0x00, -0x35,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x38,0x00,0x00,0x00,0x36,0x00,0x00,0x00,0x37,0x00,0x00,0x00, -0x82,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x3a,0x00,0x00,0x00, -0x38,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x86,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x3b,0x00,0x00,0x00,0x3a,0x00,0x00,0x00, -0x37,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x0d,0x00,0x00,0x00, -0x3f,0x00,0x00,0x00,0x3d,0x00,0x00,0x00,0x3e,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x40,0x00,0x00,0x00, -0x3f,0x00,0x00,0x00,0x89,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x42,0x00,0x00,0x00,0x40,0x00,0x00,0x00,0x3b,0x00,0x00,0x00, -0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x47,0x00,0x00,0x00, -0x40,0x00,0x00,0x00,0x3b,0x00,0x00,0x00,0x41,0x00,0x05,0x00, -0x0d,0x00,0x00,0x00,0x49,0x00,0x00,0x00,0x3d,0x00,0x00,0x00, -0x39,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x4a,0x00,0x00,0x00,0x49,0x00,0x00,0x00,0x41,0x00,0x05,0x00, -0x0d,0x00,0x00,0x00,0x4d,0x00,0x00,0x00,0x4c,0x00,0x00,0x00, -0x3e,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x4e,0x00,0x00,0x00,0x4d,0x00,0x00,0x00,0x86,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x50,0x00,0x00,0x00,0x4e,0x00,0x00,0x00, -0x4f,0x00,0x00,0x00,0x89,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x55,0x00,0x00,0x00,0x50,0x00,0x00,0x00,0x54,0x00,0x00,0x00, -0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x59,0x00,0x00,0x00, -0x50,0x00,0x00,0x00,0x58,0x00,0x00,0x00,0x89,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x5d,0x00,0x00,0x00,0x4e,0x00,0x00,0x00, -0x4f,0x00,0x00,0x00,0x89,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x64,0x00,0x00,0x00,0x5d,0x00,0x00,0x00,0x63,0x00,0x00,0x00, -0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x68,0x00,0x00,0x00, -0x5d,0x00,0x00,0x00,0x67,0x00,0x00,0x00,0x89,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x6e,0x00,0x00,0x00,0x4e,0x00,0x00,0x00, -0x6d,0x00,0x00,0x00,0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x73,0x00,0x00,0x00,0x4e,0x00,0x00,0x00,0x72,0x00,0x00,0x00, -0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00,0x77,0x00,0x00,0x00, -0x12,0x00,0x00,0x00,0x76,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x78,0x00,0x00,0x00,0x77,0x00,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x79,0x00,0x00,0x00, -0x47,0x00,0x00,0x00,0x78,0x00,0x00,0x00,0x41,0x00,0x05,0x00, -0x15,0x00,0x00,0x00,0x7c,0x00,0x00,0x00,0x12,0x00,0x00,0x00, -0x7b,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x7d,0x00,0x00,0x00,0x7c,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x7f,0x00,0x00,0x00,0x47,0x00,0x00,0x00, -0x39,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x82,0x00,0x00,0x00,0x7f,0x00,0x00,0x00,0x78,0x00,0x00,0x00, -0x0c,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0x83,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x26,0x00,0x00,0x00,0x7d,0x00,0x00,0x00, -0x82,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00, -0x87,0x00,0x00,0x00,0x12,0x00,0x00,0x00,0x86,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x88,0x00,0x00,0x00, -0x87,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x89,0x00,0x00,0x00,0x32,0x00,0x00,0x00,0x88,0x00,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x8b,0x00,0x00,0x00, -0x42,0x00,0x00,0x00,0x37,0x00,0x00,0x00,0x41,0x00,0x05,0x00, -0x15,0x00,0x00,0x00,0x8d,0x00,0x00,0x00,0x12,0x00,0x00,0x00, -0x8c,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x8e,0x00,0x00,0x00,0x8d,0x00,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x8f,0x00,0x00,0x00,0x8b,0x00,0x00,0x00, -0x8e,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x90,0x00,0x00,0x00,0x89,0x00,0x00,0x00,0x8f,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x92,0x00,0x00,0x00, -0x90,0x00,0x00,0x00,0x79,0x00,0x00,0x00,0x86,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x93,0x00,0x00,0x00,0x92,0x00,0x00,0x00, -0x39,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00, -0x98,0x00,0x00,0x00,0x12,0x00,0x00,0x00,0x97,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x99,0x00,0x00,0x00, -0x98,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x9a,0x00,0x00,0x00,0x0f,0x00,0x00,0x00,0x99,0x00,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x9d,0x00,0x00,0x00, -0x4a,0x00,0x00,0x00,0x9c,0x00,0x00,0x00,0x41,0x00,0x05,0x00, -0x15,0x00,0x00,0x00,0x9f,0x00,0x00,0x00,0x12,0x00,0x00,0x00, -0x9e,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0xa0,0x00,0x00,0x00,0x9f,0x00,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xa1,0x00,0x00,0x00,0x9d,0x00,0x00,0x00, -0xa0,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xa2,0x00,0x00,0x00,0x9a,0x00,0x00,0x00,0xa1,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xa4,0x00,0x00,0x00, -0xa2,0x00,0x00,0x00,0x79,0x00,0x00,0x00,0x86,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xa5,0x00,0x00,0x00,0xa4,0x00,0x00,0x00, -0x39,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0xa7,0x00,0x00,0x00, -0xf8,0x00,0x02,0x00,0xa7,0x00,0x00,0x00,0xf5,0x00,0x07,0x00, -0x06,0x00,0x00,0x00,0x94,0x02,0x00,0x00,0x3e,0x00,0x00,0x00, -0x05,0x00,0x00,0x00,0xc6,0x00,0x00,0x00,0xa8,0x00,0x00,0x00, -0xb0,0x00,0x05,0x00,0xb7,0x00,0x00,0x00,0xb8,0x00,0x00,0x00, -0x94,0x02,0x00,0x00,0xb6,0x00,0x00,0x00,0xf6,0x00,0x04,0x00, -0xa9,0x00,0x00,0x00,0xa8,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0xfa,0x00,0x04,0x00,0xb8,0x00,0x00,0x00,0xa8,0x00,0x00,0x00, -0xa9,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0xa8,0x00,0x00,0x00, -0x41,0x00,0x05,0x00,0xc2,0x00,0x00,0x00,0xc3,0x00,0x00,0x00, -0xbf,0x00,0x00,0x00,0x94,0x02,0x00,0x00,0x3e,0x00,0x03,0x00, -0xc3,0x00,0x00,0x00,0xc1,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xc6,0x00,0x00,0x00,0x94,0x02,0x00,0x00, -0xc5,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0xa7,0x00,0x00,0x00, -0xf8,0x00,0x02,0x00,0xa9,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, -0xc9,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0xc9,0x00,0x00,0x00, -0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0xad,0x02,0x00,0x00, -0xa5,0x00,0x00,0x00,0xa9,0x00,0x00,0x00,0x64,0x01,0x00,0x00, -0xcc,0x00,0x00,0x00,0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00, -0xa9,0x02,0x00,0x00,0x93,0x00,0x00,0x00,0xa9,0x00,0x00,0x00, -0x61,0x01,0x00,0x00,0xcc,0x00,0x00,0x00,0xf5,0x00,0x07,0x00, -0x06,0x00,0x00,0x00,0x95,0x02,0x00,0x00,0x79,0x00,0x00,0x00, -0xa9,0x00,0x00,0x00,0x0f,0x02,0x00,0x00,0xcc,0x00,0x00,0x00, -0xb0,0x00,0x05,0x00,0xb7,0x00,0x00,0x00,0xd0,0x00,0x00,0x00, -0x95,0x02,0x00,0x00,0x83,0x00,0x00,0x00,0xf6,0x00,0x04,0x00, -0xcb,0x00,0x00,0x00,0xcc,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0xfa,0x00,0x04,0x00,0xd0,0x00,0x00,0x00,0xca,0x00,0x00,0x00, -0xcb,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0xca,0x00,0x00,0x00, -0xf9,0x00,0x02,0x00,0xd2,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, -0xd2,0x00,0x00,0x00,0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00, -0xa5,0x02,0x00,0x00,0x3e,0x00,0x00,0x00,0xca,0x00,0x00,0x00, -0x1a,0x01,0x00,0x00,0xd5,0x00,0x00,0x00,0xb0,0x00,0x05,0x00, -0xb7,0x00,0x00,0x00,0xd8,0x00,0x00,0x00,0xa5,0x02,0x00,0x00, -0x37,0x00,0x00,0x00,0xf6,0x00,0x04,0x00,0xd4,0x00,0x00,0x00, -0xd5,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, -0xd8,0x00,0x00,0x00,0xd3,0x00,0x00,0x00,0xd4,0x00,0x00,0x00, -0xf8,0x00,0x02,0x00,0xd3,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xdc,0x00,0x00,0x00,0x8b,0x00,0x00,0x00, -0x73,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xde,0x00,0x00,0x00,0xdc,0x00,0x00,0x00,0xa5,0x02,0x00,0x00, -0xb0,0x00,0x05,0x00,0xb7,0x00,0x00,0x00,0xe1,0x00,0x00,0x00, -0xde,0x00,0x00,0x00,0x36,0x00,0x00,0x00,0xf7,0x00,0x03,0x00, -0xe3,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, -0xe1,0x00,0x00,0x00,0xe2,0x00,0x00,0x00,0xe3,0x00,0x00,0x00, -0xf8,0x00,0x02,0x00,0xe2,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xe6,0x00,0x00,0x00,0x95,0x02,0x00,0x00, -0x6e,0x00,0x00,0x00,0xb0,0x00,0x05,0x00,0xb7,0x00,0x00,0x00, -0xe8,0x00,0x00,0x00,0xe6,0x00,0x00,0x00,0x83,0x00,0x00,0x00, -0xf9,0x00,0x02,0x00,0xe3,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, -0xe3,0x00,0x00,0x00,0xf5,0x00,0x07,0x00,0xb7,0x00,0x00,0x00, -0xe9,0x00,0x00,0x00,0xe1,0x00,0x00,0x00,0xd3,0x00,0x00,0x00, -0xe8,0x00,0x00,0x00,0xe2,0x00,0x00,0x00,0xf7,0x00,0x03,0x00, -0xeb,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, -0xe9,0x00,0x00,0x00,0xea,0x00,0x00,0x00,0x0b,0x01,0x00,0x00, -0xf8,0x00,0x02,0x00,0xea,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xf3,0x00,0x00,0x00,0x73,0x00,0x00,0x00, -0xa5,0x02,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xf5,0x00,0x00,0x00,0xf3,0x00,0x00,0x00,0xf4,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xf7,0x00,0x00,0x00, -0xf5,0x00,0x00,0x00,0x6e,0x00,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x02,0x01,0x00,0x00,0xf3,0x00,0x00,0x00, -0x8e,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x03,0x01,0x00,0x00,0xa9,0x02,0x00,0x00,0x02,0x01,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x05,0x01,0x00,0x00, -0x03,0x01,0x00,0x00,0x6e,0x00,0x00,0x00,0x41,0x00,0x06,0x00, -0x06,0x01,0x00,0x00,0x07,0x01,0x00,0x00,0xfb,0x00,0x00,0x00, -0x34,0x00,0x00,0x00,0x05,0x01,0x00,0x00,0x3d,0x00,0x04,0x00, -0xb9,0x00,0x00,0x00,0x08,0x01,0x00,0x00,0x07,0x01,0x00,0x00, -0x41,0x00,0x05,0x00,0x09,0x01,0x00,0x00,0x0a,0x01,0x00,0x00, -0xf0,0x00,0x00,0x00,0xf7,0x00,0x00,0x00,0x3e,0x00,0x03,0x00, -0x0a,0x01,0x00,0x00,0x08,0x01,0x00,0x00,0xf9,0x00,0x02,0x00, -0xeb,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0x0b,0x01,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x0e,0x01,0x00,0x00, -0x73,0x00,0x00,0x00,0xa5,0x02,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x10,0x01,0x00,0x00,0x0e,0x01,0x00,0x00, -0x0f,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x12,0x01,0x00,0x00,0x10,0x01,0x00,0x00,0x6e,0x00,0x00,0x00, -0x41,0x00,0x05,0x00,0x09,0x01,0x00,0x00,0x13,0x01,0x00,0x00, -0xf0,0x00,0x00,0x00,0x12,0x01,0x00,0x00,0x3e,0x00,0x03,0x00, -0x13,0x01,0x00,0x00,0xc1,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, -0xeb,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0xeb,0x00,0x00,0x00, -0xf9,0x00,0x02,0x00,0xd5,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, -0xd5,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x1a,0x01,0x00,0x00,0xa5,0x02,0x00,0x00,0x18,0x01,0x00,0x00, -0xf9,0x00,0x02,0x00,0xd2,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, -0xd4,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0x1c,0x01,0x00,0x00, -0xf8,0x00,0x02,0x00,0x1c,0x01,0x00,0x00,0xf5,0x00,0x07,0x00, -0x06,0x00,0x00,0x00,0xa6,0x02,0x00,0x00,0x3e,0x00,0x00,0x00, -0xd4,0x00,0x00,0x00,0x5d,0x01,0x00,0x00,0x1f,0x01,0x00,0x00, -0xb0,0x00,0x05,0x00,0xb7,0x00,0x00,0x00,0x22,0x01,0x00,0x00, -0xa6,0x02,0x00,0x00,0x9c,0x00,0x00,0x00,0xf6,0x00,0x04,0x00, -0x1e,0x01,0x00,0x00,0x1f,0x01,0x00,0x00,0x01,0x00,0x00,0x00, -0xfa,0x00,0x04,0x00,0x22,0x01,0x00,0x00,0x1d,0x01,0x00,0x00, -0x1e,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0x1d,0x01,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x26,0x01,0x00,0x00, -0x9d,0x00,0x00,0x00,0x73,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x28,0x01,0x00,0x00,0x26,0x01,0x00,0x00, -0xa6,0x02,0x00,0x00,0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00, -0x29,0x01,0x00,0x00,0x12,0x00,0x00,0x00,0xc5,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x2a,0x01,0x00,0x00, -0x29,0x01,0x00,0x00,0xb0,0x00,0x05,0x00,0xb7,0x00,0x00,0x00, -0x2b,0x01,0x00,0x00,0x28,0x01,0x00,0x00,0x2a,0x01,0x00,0x00, -0xf7,0x00,0x03,0x00,0x2d,0x01,0x00,0x00,0x00,0x00,0x00,0x00, -0xfa,0x00,0x04,0x00,0x2b,0x01,0x00,0x00,0x2c,0x01,0x00,0x00, -0x2d,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0x2c,0x01,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x30,0x01,0x00,0x00, -0x95,0x02,0x00,0x00,0x6e,0x00,0x00,0x00,0xb0,0x00,0x05,0x00, -0xb7,0x00,0x00,0x00,0x32,0x01,0x00,0x00,0x30,0x01,0x00,0x00, -0x83,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0x2d,0x01,0x00,0x00, -0xf8,0x00,0x02,0x00,0x2d,0x01,0x00,0x00,0xf5,0x00,0x07,0x00, -0xb7,0x00,0x00,0x00,0x33,0x01,0x00,0x00,0x2b,0x01,0x00,0x00, -0x1d,0x01,0x00,0x00,0x32,0x01,0x00,0x00,0x2c,0x01,0x00,0x00, -0xf7,0x00,0x03,0x00,0x35,0x01,0x00,0x00,0x00,0x00,0x00,0x00, -0xfa,0x00,0x04,0x00,0x33,0x01,0x00,0x00,0x34,0x01,0x00,0x00, -0x53,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0x34,0x01,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x3d,0x01,0x00,0x00, -0x73,0x00,0x00,0x00,0xa6,0x02,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x3f,0x01,0x00,0x00,0x3d,0x01,0x00,0x00, -0x3e,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x41,0x01,0x00,0x00,0x3f,0x01,0x00,0x00,0x6e,0x00,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x4c,0x01,0x00,0x00, -0x3d,0x01,0x00,0x00,0xa0,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x4d,0x01,0x00,0x00,0xad,0x02,0x00,0x00, -0x4c,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x4f,0x01,0x00,0x00,0x4d,0x01,0x00,0x00,0x6e,0x00,0x00,0x00, -0x41,0x00,0x06,0x00,0x06,0x01,0x00,0x00,0x50,0x01,0x00,0x00, -0x45,0x01,0x00,0x00,0x34,0x00,0x00,0x00,0x4f,0x01,0x00,0x00, -0x3d,0x00,0x04,0x00,0xb9,0x00,0x00,0x00,0x51,0x01,0x00,0x00, -0x50,0x01,0x00,0x00,0x41,0x00,0x05,0x00,0x09,0x01,0x00,0x00, -0x52,0x01,0x00,0x00,0x3a,0x01,0x00,0x00,0x41,0x01,0x00,0x00, -0x3e,0x00,0x03,0x00,0x52,0x01,0x00,0x00,0x51,0x01,0x00,0x00, -0xf9,0x00,0x02,0x00,0x35,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, -0x53,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x56,0x01,0x00,0x00,0x73,0x00,0x00,0x00,0xa6,0x02,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x58,0x01,0x00,0x00, -0x56,0x01,0x00,0x00,0x57,0x01,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x5a,0x01,0x00,0x00,0x58,0x01,0x00,0x00, -0x6e,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x09,0x01,0x00,0x00, -0x5b,0x01,0x00,0x00,0x3a,0x01,0x00,0x00,0x5a,0x01,0x00,0x00, -0x3e,0x00,0x03,0x00,0x5b,0x01,0x00,0x00,0xc1,0x00,0x00,0x00, -0xf9,0x00,0x02,0x00,0x35,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, -0x35,0x01,0x00,0x00,0xf9,0x00,0x02,0x00,0x1f,0x01,0x00,0x00, -0xf8,0x00,0x02,0x00,0x1f,0x01,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x5d,0x01,0x00,0x00,0xa6,0x02,0x00,0x00, -0x18,0x01,0x00,0x00,0xf9,0x00,0x02,0x00,0x1c,0x01,0x00,0x00, -0xf8,0x00,0x02,0x00,0x1e,0x01,0x00,0x00,0xe0,0x00,0x04,0x00, -0x0c,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x5e,0x01,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x61,0x01,0x00,0x00, -0xa9,0x02,0x00,0x00,0x5f,0x01,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x64,0x01,0x00,0x00,0xad,0x02,0x00,0x00, -0x62,0x01,0x00,0x00,0xf9,0x00,0x02,0x00,0x66,0x01,0x00,0x00, -0xf8,0x00,0x02,0x00,0x66,0x01,0x00,0x00,0xf5,0x00,0x07,0x00, -0x06,0x00,0x00,0x00,0xaf,0x02,0x00,0x00,0x3e,0x00,0x00,0x00, -0x1e,0x01,0x00,0x00,0x0d,0x02,0x00,0x00,0x69,0x01,0x00,0x00, -0xb0,0x00,0x05,0x00,0xb7,0x00,0x00,0x00,0x6c,0x01,0x00,0x00, -0xaf,0x02,0x00,0x00,0x6c,0x00,0x00,0x00,0xf6,0x00,0x04,0x00, -0x68,0x01,0x00,0x00,0x69,0x01,0x00,0x00,0x00,0x00,0x00,0x00, -0xfa,0x00,0x04,0x00,0x6c,0x01,0x00,0x00,0x67,0x01,0x00,0x00, -0x68,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0x67,0x01,0x00,0x00, -0xf9,0x00,0x02,0x00,0x6e,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, -0x6e,0x01,0x00,0x00,0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00, -0xb3,0x02,0x00,0x00,0x3e,0x00,0x00,0x00,0x67,0x01,0x00,0x00, -0x99,0x01,0x00,0x00,0x71,0x01,0x00,0x00,0xb0,0x00,0x05,0x00, -0xb7,0x00,0x00,0x00,0x74,0x01,0x00,0x00,0xb3,0x02,0x00,0x00, -0x60,0x00,0x00,0x00,0xf6,0x00,0x04,0x00,0x70,0x01,0x00,0x00, -0x71,0x01,0x00,0x00,0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, -0x74,0x01,0x00,0x00,0x6f,0x01,0x00,0x00,0x70,0x01,0x00,0x00, -0xf8,0x00,0x02,0x00,0x6f,0x01,0x00,0x00,0xf9,0x00,0x02,0x00, -0x76,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0x76,0x01,0x00,0x00, -0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0xc5,0x02,0x00,0x00, -0x3e,0x00,0x00,0x00,0x6f,0x01,0x00,0x00,0x97,0x01,0x00,0x00, -0x77,0x01,0x00,0x00,0xb0,0x00,0x05,0x00,0xb7,0x00,0x00,0x00, -0x7c,0x01,0x00,0x00,0xc5,0x02,0x00,0x00,0x62,0x00,0x00,0x00, -0xf6,0x00,0x04,0x00,0x78,0x01,0x00,0x00,0x77,0x01,0x00,0x00, -0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0x7c,0x01,0x00,0x00, -0x77,0x01,0x00,0x00,0x78,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, -0x77,0x01,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x82,0x01,0x00,0x00,0xb3,0x02,0x00,0x00,0x62,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x84,0x01,0x00,0x00, -0x82,0x01,0x00,0x00,0xc5,0x02,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x86,0x01,0x00,0x00,0x55,0x00,0x00,0x00, -0x53,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x88,0x01,0x00,0x00,0xb3,0x02,0x00,0x00,0x61,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x89,0x01,0x00,0x00, -0x86,0x01,0x00,0x00,0x88,0x01,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x8b,0x01,0x00,0x00,0x64,0x00,0x00,0x00, -0x62,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x8c,0x01,0x00,0x00,0x89,0x01,0x00,0x00,0x8b,0x01,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x8e,0x01,0x00,0x00, -0x8c,0x01,0x00,0x00,0xc5,0x02,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x90,0x01,0x00,0x00,0x8e,0x01,0x00,0x00, -0x8f,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x92,0x01,0x00,0x00,0x90,0x01,0x00,0x00,0xaf,0x02,0x00,0x00, -0x41,0x00,0x05,0x00,0x09,0x01,0x00,0x00,0x93,0x01,0x00,0x00, -0xf0,0x00,0x00,0x00,0x92,0x01,0x00,0x00,0x3d,0x00,0x04,0x00, -0xb9,0x00,0x00,0x00,0x94,0x01,0x00,0x00,0x93,0x01,0x00,0x00, -0x41,0x00,0x05,0x00,0xc2,0x00,0x00,0x00,0x95,0x01,0x00,0x00, -0x80,0x01,0x00,0x00,0x84,0x01,0x00,0x00,0x3e,0x00,0x03,0x00, -0x95,0x01,0x00,0x00,0x94,0x01,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x97,0x01,0x00,0x00,0xc5,0x02,0x00,0x00, -0xc5,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0x76,0x01,0x00,0x00, -0xf8,0x00,0x02,0x00,0x78,0x01,0x00,0x00,0xf9,0x00,0x02,0x00, -0x71,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0x71,0x01,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x99,0x01,0x00,0x00, -0xb3,0x02,0x00,0x00,0xc5,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, -0x6e,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0x70,0x01,0x00,0x00, -0xf9,0x00,0x02,0x00,0x9b,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, -0x9b,0x01,0x00,0x00,0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00, -0xb4,0x02,0x00,0x00,0x3e,0x00,0x00,0x00,0x70,0x01,0x00,0x00, -0xc7,0x01,0x00,0x00,0x9e,0x01,0x00,0x00,0xb0,0x00,0x05,0x00, -0xb7,0x00,0x00,0x00,0xa1,0x01,0x00,0x00,0xb4,0x02,0x00,0x00, -0xb4,0x00,0x00,0x00,0xf6,0x00,0x04,0x00,0x9d,0x01,0x00,0x00, -0x9e,0x01,0x00,0x00,0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, -0xa1,0x01,0x00,0x00,0x9c,0x01,0x00,0x00,0x9d,0x01,0x00,0x00, -0xf8,0x00,0x02,0x00,0x9c,0x01,0x00,0x00,0xf9,0x00,0x02,0x00, -0xa3,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0xa3,0x01,0x00,0x00, -0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0xc2,0x02,0x00,0x00, -0x3e,0x00,0x00,0x00,0x9c,0x01,0x00,0x00,0xc5,0x01,0x00,0x00, -0xa4,0x01,0x00,0x00,0xb0,0x00,0x05,0x00,0xb7,0x00,0x00,0x00, -0xa9,0x01,0x00,0x00,0xc2,0x02,0x00,0x00,0xb1,0x00,0x00,0x00, -0xf6,0x00,0x04,0x00,0xa5,0x01,0x00,0x00,0xa4,0x01,0x00,0x00, -0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0xa9,0x01,0x00,0x00, -0xa4,0x01,0x00,0x00,0xa5,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, -0xa4,0x01,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xaf,0x01,0x00,0x00,0xb4,0x02,0x00,0x00,0xb1,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xb1,0x01,0x00,0x00, -0xaf,0x01,0x00,0x00,0xc2,0x02,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xb3,0x01,0x00,0x00,0x59,0x00,0x00,0x00, -0xae,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xb6,0x01,0x00,0x00,0xb4,0x02,0x00,0x00,0xb5,0x01,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xb7,0x01,0x00,0x00, -0xb3,0x01,0x00,0x00,0xb6,0x01,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xb9,0x01,0x00,0x00,0x68,0x00,0x00,0x00, -0xb1,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xba,0x01,0x00,0x00,0xb7,0x01,0x00,0x00,0xb9,0x01,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xbc,0x01,0x00,0x00, -0xba,0x01,0x00,0x00,0xc2,0x02,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xbe,0x01,0x00,0x00,0xbc,0x01,0x00,0x00, -0xbd,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xc0,0x01,0x00,0x00,0xbe,0x01,0x00,0x00,0xaf,0x02,0x00,0x00, -0x41,0x00,0x05,0x00,0x09,0x01,0x00,0x00,0xc1,0x01,0x00,0x00, -0x3a,0x01,0x00,0x00,0xc0,0x01,0x00,0x00,0x3d,0x00,0x04,0x00, -0xb9,0x00,0x00,0x00,0xc2,0x01,0x00,0x00,0xc1,0x01,0x00,0x00, -0x41,0x00,0x05,0x00,0xc2,0x00,0x00,0x00,0xc3,0x01,0x00,0x00, -0xad,0x01,0x00,0x00,0xb1,0x01,0x00,0x00,0x3e,0x00,0x03,0x00, -0xc3,0x01,0x00,0x00,0xc2,0x01,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xc5,0x01,0x00,0x00,0xc2,0x02,0x00,0x00, -0xc5,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0xa3,0x01,0x00,0x00, -0xf8,0x00,0x02,0x00,0xa5,0x01,0x00,0x00,0xf9,0x00,0x02,0x00, -0x9e,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0x9e,0x01,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xc7,0x01,0x00,0x00, -0xb4,0x02,0x00,0x00,0xc5,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, -0x9b,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0x9d,0x01,0x00,0x00, -0xf9,0x00,0x02,0x00,0xc9,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, -0xc9,0x01,0x00,0x00,0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00, -0xb5,0x02,0x00,0x00,0x3e,0x00,0x00,0x00,0x9d,0x01,0x00,0x00, -0x0b,0x02,0x00,0x00,0xcc,0x01,0x00,0x00,0xb0,0x00,0x05,0x00, -0xb7,0x00,0x00,0x00,0xcf,0x01,0x00,0x00,0xb5,0x02,0x00,0x00, -0xb4,0x00,0x00,0x00,0xf6,0x00,0x04,0x00,0xcb,0x01,0x00,0x00, -0xcc,0x01,0x00,0x00,0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, -0xcf,0x01,0x00,0x00,0xca,0x01,0x00,0x00,0xcb,0x01,0x00,0x00, -0xf8,0x00,0x02,0x00,0xca,0x01,0x00,0x00,0xf9,0x00,0x02,0x00, -0xd1,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0xd1,0x01,0x00,0x00, -0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0xb9,0x02,0x00,0x00, -0x3e,0x00,0x00,0x00,0xca,0x01,0x00,0x00,0x09,0x02,0x00,0x00, -0xd4,0x01,0x00,0x00,0xb0,0x00,0x05,0x00,0xb7,0x00,0x00,0x00, -0xd7,0x01,0x00,0x00,0xb9,0x02,0x00,0x00,0x60,0x00,0x00,0x00, -0xf6,0x00,0x04,0x00,0xd3,0x01,0x00,0x00,0xd4,0x01,0x00,0x00, -0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0xd7,0x01,0x00,0x00, -0xd2,0x01,0x00,0x00,0xd3,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, -0xd2,0x01,0x00,0x00,0xf9,0x00,0x02,0x00,0xd9,0x01,0x00,0x00, -0xf8,0x00,0x02,0x00,0xd9,0x01,0x00,0x00,0xf5,0x00,0x07,0x00, -0x06,0x00,0x00,0x00,0xbb,0x02,0x00,0x00,0x3e,0x00,0x00,0x00, -0xd2,0x01,0x00,0x00,0x07,0x02,0x00,0x00,0xdc,0x01,0x00,0x00, -0xb0,0x00,0x05,0x00,0xb7,0x00,0x00,0x00,0xdf,0x01,0x00,0x00, -0xbb,0x02,0x00,0x00,0xb1,0x00,0x00,0x00,0xf6,0x00,0x04,0x00, -0xdb,0x01,0x00,0x00,0xdc,0x01,0x00,0x00,0x01,0x00,0x00,0x00, -0xfa,0x00,0x04,0x00,0xdf,0x01,0x00,0x00,0xda,0x01,0x00,0x00, -0xdb,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0xda,0x01,0x00,0x00, -0xf9,0x00,0x02,0x00,0xe1,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, -0xe1,0x01,0x00,0x00,0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00, -0xbd,0x02,0x00,0x00,0x3e,0x00,0x00,0x00,0xda,0x01,0x00,0x00, -0x05,0x02,0x00,0x00,0xe2,0x01,0x00,0x00,0xb0,0x00,0x05,0x00, -0xb7,0x00,0x00,0x00,0xe7,0x01,0x00,0x00,0xbd,0x02,0x00,0x00, -0x62,0x00,0x00,0x00,0xf6,0x00,0x04,0x00,0xe3,0x01,0x00,0x00, -0xe2,0x01,0x00,0x00,0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, -0xe7,0x01,0x00,0x00,0xe2,0x01,0x00,0x00,0xe3,0x01,0x00,0x00, -0xf8,0x00,0x02,0x00,0xe2,0x01,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xe9,0x01,0x00,0x00,0xb5,0x02,0x00,0x00, -0xb1,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xeb,0x01,0x00,0x00,0xe9,0x01,0x00,0x00,0xbb,0x02,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xed,0x01,0x00,0x00, -0xeb,0x01,0x00,0x00,0xec,0x01,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xef,0x01,0x00,0x00,0xb9,0x02,0x00,0x00, -0x62,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xf0,0x01,0x00,0x00,0xed,0x01,0x00,0x00,0xef,0x01,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xf2,0x01,0x00,0x00, -0xf0,0x01,0x00,0x00,0xbd,0x02,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xf6,0x01,0x00,0x00,0xef,0x01,0x00,0x00, -0xbd,0x02,0x00,0x00,0x41,0x00,0x05,0x00,0xc2,0x00,0x00,0x00, -0xf7,0x01,0x00,0x00,0x80,0x01,0x00,0x00,0xf6,0x01,0x00,0x00, -0x3d,0x00,0x04,0x00,0xb9,0x00,0x00,0x00,0xf8,0x01,0x00,0x00, -0xf7,0x01,0x00,0x00,0x41,0x00,0x05,0x00,0xc2,0x00,0x00,0x00, -0xfd,0x01,0x00,0x00,0xad,0x01,0x00,0x00,0xeb,0x01,0x00,0x00, -0x3d,0x00,0x04,0x00,0xb9,0x00,0x00,0x00,0xfe,0x01,0x00,0x00, -0xfd,0x01,0x00,0x00,0x41,0x00,0x05,0x00,0xc2,0x00,0x00,0x00, -0x00,0x02,0x00,0x00,0xbf,0x00,0x00,0x00,0xf2,0x01,0x00,0x00, -0x3d,0x00,0x04,0x00,0xb9,0x00,0x00,0x00,0x01,0x02,0x00,0x00, -0x00,0x02,0x00,0x00,0x0c,0x00,0x08,0x00,0xb9,0x00,0x00,0x00, -0x02,0x02,0x00,0x00,0x01,0x00,0x00,0x00,0x32,0x00,0x00,0x00, -0xf8,0x01,0x00,0x00,0xfe,0x01,0x00,0x00,0x01,0x02,0x00,0x00, -0x3e,0x00,0x03,0x00,0x00,0x02,0x00,0x00,0x02,0x02,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x05,0x02,0x00,0x00, -0xbd,0x02,0x00,0x00,0xc5,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, -0xe1,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0xe3,0x01,0x00,0x00, -0xf9,0x00,0x02,0x00,0xdc,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, -0xdc,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x07,0x02,0x00,0x00,0xbb,0x02,0x00,0x00,0xc5,0x00,0x00,0x00, -0xf9,0x00,0x02,0x00,0xd9,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, -0xdb,0x01,0x00,0x00,0xf9,0x00,0x02,0x00,0xd4,0x01,0x00,0x00, -0xf8,0x00,0x02,0x00,0xd4,0x01,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x09,0x02,0x00,0x00,0xb9,0x02,0x00,0x00, -0xc5,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0xd1,0x01,0x00,0x00, -0xf8,0x00,0x02,0x00,0xd3,0x01,0x00,0x00,0xf9,0x00,0x02,0x00, -0xcc,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0xcc,0x01,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x0b,0x02,0x00,0x00, -0xb5,0x02,0x00,0x00,0xc5,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, -0xc9,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0xcb,0x01,0x00,0x00, -0xf9,0x00,0x02,0x00,0x69,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, -0x69,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x0d,0x02,0x00,0x00,0xaf,0x02,0x00,0x00,0xc5,0x00,0x00,0x00, -0xf9,0x00,0x02,0x00,0x66,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, -0x68,0x01,0x00,0x00,0xe0,0x00,0x04,0x00,0x0c,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x5e,0x01,0x00,0x00,0xf9,0x00,0x02,0x00, -0xcc,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0xcc,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x0f,0x02,0x00,0x00, -0x95,0x02,0x00,0x00,0x6c,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, -0xc9,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0xcb,0x00,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x14,0x02,0x00,0x00, -0x55,0x00,0x00,0x00,0x53,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x15,0x02,0x00,0x00,0x8b,0x00,0x00,0x00, -0x14,0x02,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x1a,0x02,0x00,0x00,0x59,0x00,0x00,0x00,0xae,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x1b,0x02,0x00,0x00, -0x9d,0x00,0x00,0x00,0x1a,0x02,0x00,0x00,0x41,0x00,0x05,0x00, -0x15,0x00,0x00,0x00,0x20,0x02,0x00,0x00,0x12,0x00,0x00,0x00, -0x1f,0x02,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x21,0x02,0x00,0x00,0x20,0x02,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x22,0x02,0x00,0x00,0x0f,0x00,0x00,0x00, -0x21,0x02,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x26,0x02,0x00,0x00,0x47,0x00,0x00,0x00,0x21,0x02,0x00,0x00, -0x41,0x00,0x05,0x00,0x0d,0x00,0x00,0x00,0x28,0x02,0x00,0x00, -0x27,0x02,0x00,0x00,0x0c,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x29,0x02,0x00,0x00,0x28,0x02,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x2a,0x02,0x00,0x00, -0x26,0x02,0x00,0x00,0x29,0x02,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x2b,0x02,0x00,0x00,0x22,0x02,0x00,0x00, -0x2a,0x02,0x00,0x00,0xf9,0x00,0x02,0x00,0x2d,0x02,0x00,0x00, -0xf8,0x00,0x02,0x00,0x2d,0x02,0x00,0x00,0xf5,0x00,0x07,0x00, -0x06,0x00,0x00,0x00,0x96,0x02,0x00,0x00,0x3e,0x00,0x00,0x00, -0xcb,0x00,0x00,0x00,0x93,0x02,0x00,0x00,0x30,0x02,0x00,0x00, -0xb0,0x00,0x05,0x00,0xb7,0x00,0x00,0x00,0x33,0x02,0x00,0x00, -0x96,0x02,0x00,0x00,0xb4,0x00,0x00,0x00,0xf6,0x00,0x04,0x00, -0x2f,0x02,0x00,0x00,0x30,0x02,0x00,0x00,0x01,0x00,0x00,0x00, -0xfa,0x00,0x04,0x00,0x33,0x02,0x00,0x00,0x2e,0x02,0x00,0x00, -0x2f,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x2e,0x02,0x00,0x00, -0xf9,0x00,0x02,0x00,0x35,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, -0x35,0x02,0x00,0x00,0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00, -0x97,0x02,0x00,0x00,0x3e,0x00,0x00,0x00,0x2e,0x02,0x00,0x00, -0x91,0x02,0x00,0x00,0x38,0x02,0x00,0x00,0xb0,0x00,0x05,0x00, -0xb7,0x00,0x00,0x00,0x3b,0x02,0x00,0x00,0x97,0x02,0x00,0x00, -0x60,0x00,0x00,0x00,0xf6,0x00,0x04,0x00,0x37,0x02,0x00,0x00, -0x38,0x02,0x00,0x00,0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, -0x3b,0x02,0x00,0x00,0x36,0x02,0x00,0x00,0x37,0x02,0x00,0x00, -0xf8,0x00,0x02,0x00,0x36,0x02,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x3f,0x02,0x00,0x00,0x97,0x02,0x00,0x00, -0x61,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x40,0x02,0x00,0x00,0x15,0x02,0x00,0x00,0x3f,0x02,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x42,0x02,0x00,0x00, -0x64,0x00,0x00,0x00,0x62,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x43,0x02,0x00,0x00,0x40,0x02,0x00,0x00, -0x42,0x02,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x47,0x02,0x00,0x00,0x96,0x02,0x00,0x00,0xb5,0x01,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x48,0x02,0x00,0x00, -0x1b,0x02,0x00,0x00,0x47,0x02,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x4a,0x02,0x00,0x00,0x68,0x00,0x00,0x00, -0xb1,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x4b,0x02,0x00,0x00,0x48,0x02,0x00,0x00,0x4a,0x02,0x00,0x00, -0xf9,0x00,0x02,0x00,0x4d,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, -0x4d,0x02,0x00,0x00,0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00, -0x99,0x02,0x00,0x00,0x3e,0x00,0x00,0x00,0x36,0x02,0x00,0x00, -0x8f,0x02,0x00,0x00,0x50,0x02,0x00,0x00,0xb0,0x00,0x05,0x00, -0xb7,0x00,0x00,0x00,0x53,0x02,0x00,0x00,0x99,0x02,0x00,0x00, -0xb1,0x00,0x00,0x00,0xf6,0x00,0x04,0x00,0x4f,0x02,0x00,0x00, -0x50,0x02,0x00,0x00,0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, -0x53,0x02,0x00,0x00,0x4e,0x02,0x00,0x00,0x4f,0x02,0x00,0x00, -0xf8,0x00,0x02,0x00,0x4e,0x02,0x00,0x00,0xf9,0x00,0x02,0x00, -0x55,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x55,0x02,0x00,0x00, -0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0x9b,0x02,0x00,0x00, -0x3e,0x00,0x00,0x00,0x4e,0x02,0x00,0x00,0x8d,0x02,0x00,0x00, -0x58,0x02,0x00,0x00,0xb0,0x00,0x05,0x00,0xb7,0x00,0x00,0x00, -0x5b,0x02,0x00,0x00,0x9b,0x02,0x00,0x00,0x62,0x00,0x00,0x00, -0xf6,0x00,0x04,0x00,0x57,0x02,0x00,0x00,0x58,0x02,0x00,0x00, -0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0x5b,0x02,0x00,0x00, -0x56,0x02,0x00,0x00,0x57,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, -0x56,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x5e,0x02,0x00,0x00,0x43,0x02,0x00,0x00,0x9b,0x02,0x00,0x00, -0xb0,0x00,0x05,0x00,0xb7,0x00,0x00,0x00,0x61,0x02,0x00,0x00, -0x5e,0x02,0x00,0x00,0x36,0x00,0x00,0x00,0xf7,0x00,0x03,0x00, -0x63,0x02,0x00,0x00,0x00,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, -0x61,0x02,0x00,0x00,0x62,0x02,0x00,0x00,0x63,0x02,0x00,0x00, -0xf8,0x00,0x02,0x00,0x62,0x02,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x66,0x02,0x00,0x00,0x4b,0x02,0x00,0x00, -0x99,0x02,0x00,0x00,0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00, -0x67,0x02,0x00,0x00,0x12,0x00,0x00,0x00,0xc5,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x68,0x02,0x00,0x00, -0x67,0x02,0x00,0x00,0xb0,0x00,0x05,0x00,0xb7,0x00,0x00,0x00, -0x69,0x02,0x00,0x00,0x66,0x02,0x00,0x00,0x68,0x02,0x00,0x00, -0xf9,0x00,0x02,0x00,0x63,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, -0x63,0x02,0x00,0x00,0xf5,0x00,0x07,0x00,0xb7,0x00,0x00,0x00, -0x6a,0x02,0x00,0x00,0x61,0x02,0x00,0x00,0x56,0x02,0x00,0x00, -0x69,0x02,0x00,0x00,0x62,0x02,0x00,0x00,0xf7,0x00,0x03,0x00, -0x6c,0x02,0x00,0x00,0x00,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, -0x6a,0x02,0x00,0x00,0x6b,0x02,0x00,0x00,0x6c,0x02,0x00,0x00, -0xf8,0x00,0x02,0x00,0x6b,0x02,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x74,0x02,0x00,0x00,0x4b,0x02,0x00,0x00, -0x99,0x02,0x00,0x00,0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00, -0x76,0x02,0x00,0x00,0x12,0x00,0x00,0x00,0x75,0x02,0x00,0x00, -0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x77,0x02,0x00,0x00, -0x76,0x02,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x78,0x02,0x00,0x00,0x74,0x02,0x00,0x00,0x77,0x02,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x79,0x02,0x00,0x00, -0x2b,0x02,0x00,0x00,0x78,0x02,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x7b,0x02,0x00,0x00,0x79,0x02,0x00,0x00, -0x43,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x7d,0x02,0x00,0x00,0x7b,0x02,0x00,0x00,0x9b,0x02,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x7f,0x02,0x00,0x00, -0x96,0x02,0x00,0x00,0xb1,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x81,0x02,0x00,0x00,0x7f,0x02,0x00,0x00, -0x99,0x02,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x83,0x02,0x00,0x00,0x81,0x02,0x00,0x00,0x82,0x02,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x85,0x02,0x00,0x00, -0x97,0x02,0x00,0x00,0x62,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x86,0x02,0x00,0x00,0x83,0x02,0x00,0x00, -0x85,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x88,0x02,0x00,0x00,0x86,0x02,0x00,0x00,0x9b,0x02,0x00,0x00, -0x41,0x00,0x05,0x00,0xc2,0x00,0x00,0x00,0x89,0x02,0x00,0x00, -0xbf,0x00,0x00,0x00,0x88,0x02,0x00,0x00,0x3d,0x00,0x04,0x00, -0xb9,0x00,0x00,0x00,0x8a,0x02,0x00,0x00,0x89,0x02,0x00,0x00, -0x41,0x00,0x06,0x00,0x06,0x01,0x00,0x00,0x8b,0x02,0x00,0x00, -0x70,0x02,0x00,0x00,0x34,0x00,0x00,0x00,0x7d,0x02,0x00,0x00, -0x3e,0x00,0x03,0x00,0x8b,0x02,0x00,0x00,0x8a,0x02,0x00,0x00, -0xf9,0x00,0x02,0x00,0x6c,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, -0x6c,0x02,0x00,0x00,0xf9,0x00,0x02,0x00,0x58,0x02,0x00,0x00, -0xf8,0x00,0x02,0x00,0x58,0x02,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x8d,0x02,0x00,0x00,0x9b,0x02,0x00,0x00, -0xc5,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0x55,0x02,0x00,0x00, -0xf8,0x00,0x02,0x00,0x57,0x02,0x00,0x00,0xf9,0x00,0x02,0x00, -0x50,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x50,0x02,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x8f,0x02,0x00,0x00, -0x99,0x02,0x00,0x00,0xc5,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, -0x4d,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x4f,0x02,0x00,0x00, -0xf9,0x00,0x02,0x00,0x38,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, -0x38,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x91,0x02,0x00,0x00,0x97,0x02,0x00,0x00,0xc5,0x00,0x00,0x00, -0xf9,0x00,0x02,0x00,0x35,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, -0x37,0x02,0x00,0x00,0xf9,0x00,0x02,0x00,0x30,0x02,0x00,0x00, -0xf8,0x00,0x02,0x00,0x30,0x02,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x93,0x02,0x00,0x00,0x96,0x02,0x00,0x00, -0xc5,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0x2d,0x02,0x00,0x00, -0xf8,0x00,0x02,0x00,0x2f,0x02,0x00,0x00,0xfd,0x00,0x01,0x00, -0x38,0x00,0x01,0x00, -}; -const uint64_t matmul_f32_s_fp32_len = 10048; +const uint64_t matmul_q8_0_f32_fp32_len = 10488; unsigned char mul_f32_data[] = { 0x03,0x02,0x23,0x07,0x00,0x05,0x01,0x00,0x0b,0x00,0x0d,0x00, -0x3e,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x11,0x00,0x02,0x00, +0x67,0x02,0x00,0x00,0x00,0x00,0x00,0x00,0x11,0x00,0x02,0x00, 0x01,0x00,0x00,0x00,0x0b,0x00,0x06,0x00,0x01,0x00,0x00,0x00, 0x47,0x4c,0x53,0x4c,0x2e,0x73,0x74,0x64,0x2e,0x34,0x35,0x30, 0x00,0x00,0x00,0x00,0x0e,0x00,0x03,0x00,0x00,0x00,0x00,0x00, 0x01,0x00,0x00,0x00,0x0f,0x00,0x0a,0x00,0x05,0x00,0x00,0x00, 0x04,0x00,0x00,0x00,0x6d,0x61,0x69,0x6e,0x00,0x00,0x00,0x00, -0x0b,0x00,0x00,0x00,0x14,0x00,0x00,0x00,0x22,0x00,0x00,0x00, -0x27,0x00,0x00,0x00,0x2f,0x00,0x00,0x00,0x10,0x00,0x06,0x00, +0x16,0x00,0x00,0x00,0x2e,0x01,0x00,0x00,0x3f,0x01,0x00,0x00, +0x4b,0x01,0x00,0x00,0x56,0x01,0x00,0x00,0x10,0x00,0x06,0x00, 0x04,0x00,0x00,0x00,0x11,0x00,0x00,0x00,0x00,0x02,0x00,0x00, -0x01,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x0b,0x00,0x00,0x00,0x0b,0x00,0x00,0x00,0x1c,0x00,0x00,0x00, -0x48,0x00,0x05,0x00,0x12,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0x12,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x04,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x12,0x00,0x00,0x00, -0x02,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x08,0x00,0x00,0x00, -0x48,0x00,0x05,0x00,0x12,0x00,0x00,0x00,0x03,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x47,0x00,0x03,0x00, -0x12,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x1f,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x04,0x00,0x00,0x00, -0x48,0x00,0x04,0x00,0x20,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x19,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x20,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x48,0x00,0x05,0x00, +0x14,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00, +0x00,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x14,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x04,0x00,0x00,0x00, +0x48,0x00,0x05,0x00,0x14,0x00,0x00,0x00,0x02,0x00,0x00,0x00, +0x23,0x00,0x00,0x00,0x08,0x00,0x00,0x00,0x48,0x00,0x05,0x00, +0x14,0x00,0x00,0x00,0x03,0x00,0x00,0x00,0x23,0x00,0x00,0x00, +0x0c,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x14,0x00,0x00,0x00, +0x04,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x10,0x00,0x00,0x00, +0x48,0x00,0x05,0x00,0x14,0x00,0x00,0x00,0x05,0x00,0x00,0x00, +0x23,0x00,0x00,0x00,0x14,0x00,0x00,0x00,0x48,0x00,0x05,0x00, +0x14,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x23,0x00,0x00,0x00, +0x18,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x14,0x00,0x00,0x00, +0x07,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x1c,0x00,0x00,0x00, +0x48,0x00,0x05,0x00,0x14,0x00,0x00,0x00,0x08,0x00,0x00,0x00, +0x23,0x00,0x00,0x00,0x20,0x00,0x00,0x00,0x48,0x00,0x05,0x00, +0x14,0x00,0x00,0x00,0x09,0x00,0x00,0x00,0x23,0x00,0x00,0x00, +0x24,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x14,0x00,0x00,0x00, +0x0a,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x28,0x00,0x00,0x00, +0x48,0x00,0x05,0x00,0x14,0x00,0x00,0x00,0x0b,0x00,0x00,0x00, +0x23,0x00,0x00,0x00,0x2c,0x00,0x00,0x00,0x48,0x00,0x05,0x00, +0x14,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x23,0x00,0x00,0x00, +0x30,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x14,0x00,0x00,0x00, +0x0d,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x34,0x00,0x00,0x00, +0x48,0x00,0x05,0x00,0x14,0x00,0x00,0x00,0x0e,0x00,0x00,0x00, +0x23,0x00,0x00,0x00,0x38,0x00,0x00,0x00,0x48,0x00,0x05,0x00, +0x14,0x00,0x00,0x00,0x0f,0x00,0x00,0x00,0x23,0x00,0x00,0x00, +0x3c,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x14,0x00,0x00,0x00, +0x10,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x40,0x00,0x00,0x00, +0x48,0x00,0x05,0x00,0x14,0x00,0x00,0x00,0x11,0x00,0x00,0x00, +0x23,0x00,0x00,0x00,0x44,0x00,0x00,0x00,0x48,0x00,0x05,0x00, +0x14,0x00,0x00,0x00,0x12,0x00,0x00,0x00,0x23,0x00,0x00,0x00, +0x48,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x14,0x00,0x00,0x00, +0x13,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x4c,0x00,0x00,0x00, +0x48,0x00,0x05,0x00,0x14,0x00,0x00,0x00,0x14,0x00,0x00,0x00, +0x23,0x00,0x00,0x00,0x50,0x00,0x00,0x00,0x48,0x00,0x05,0x00, +0x14,0x00,0x00,0x00,0x15,0x00,0x00,0x00,0x23,0x00,0x00,0x00, +0x54,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x14,0x00,0x00,0x00, +0x16,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x58,0x00,0x00,0x00, +0x48,0x00,0x05,0x00,0x14,0x00,0x00,0x00,0x17,0x00,0x00,0x00, +0x23,0x00,0x00,0x00,0x5c,0x00,0x00,0x00,0x48,0x00,0x05,0x00, +0x14,0x00,0x00,0x00,0x18,0x00,0x00,0x00,0x23,0x00,0x00,0x00, +0x60,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x14,0x00,0x00,0x00, +0x19,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x64,0x00,0x00,0x00, +0x48,0x00,0x05,0x00,0x14,0x00,0x00,0x00,0x1a,0x00,0x00,0x00, +0x23,0x00,0x00,0x00,0x68,0x00,0x00,0x00,0x48,0x00,0x05,0x00, +0x14,0x00,0x00,0x00,0x1b,0x00,0x00,0x00,0x23,0x00,0x00,0x00, +0x6c,0x00,0x00,0x00,0x47,0x00,0x03,0x00,0x14,0x00,0x00,0x00, +0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x2e,0x01,0x00,0x00, +0x0b,0x00,0x00,0x00,0x1c,0x00,0x00,0x00,0x47,0x00,0x04,0x00, +0x3c,0x01,0x00,0x00,0x06,0x00,0x00,0x00,0x04,0x00,0x00,0x00, +0x48,0x00,0x04,0x00,0x3d,0x01,0x00,0x00,0x00,0x00,0x00,0x00, +0x19,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x3d,0x01,0x00,0x00, 0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x47,0x00,0x03,0x00,0x20,0x00,0x00,0x00,0x02,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x22,0x00,0x00,0x00,0x22,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x22,0x00,0x00,0x00, +0x47,0x00,0x03,0x00,0x3d,0x01,0x00,0x00,0x02,0x00,0x00,0x00, +0x47,0x00,0x04,0x00,0x3f,0x01,0x00,0x00,0x22,0x00,0x00,0x00, +0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x3f,0x01,0x00,0x00, 0x21,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x24,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x04,0x00,0x00,0x00, -0x48,0x00,0x04,0x00,0x25,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x18,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x25,0x00,0x00,0x00, +0x48,0x01,0x00,0x00,0x06,0x00,0x00,0x00,0x04,0x00,0x00,0x00, +0x48,0x00,0x04,0x00,0x49,0x01,0x00,0x00,0x00,0x00,0x00,0x00, +0x18,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x49,0x01,0x00,0x00, 0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x47,0x00,0x03,0x00,0x25,0x00,0x00,0x00,0x02,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x27,0x00,0x00,0x00,0x22,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x27,0x00,0x00,0x00, +0x47,0x00,0x03,0x00,0x49,0x01,0x00,0x00,0x02,0x00,0x00,0x00, +0x47,0x00,0x04,0x00,0x4b,0x01,0x00,0x00,0x22,0x00,0x00,0x00, +0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x4b,0x01,0x00,0x00, 0x21,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x2c,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x04,0x00,0x00,0x00, -0x48,0x00,0x04,0x00,0x2d,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x18,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x2d,0x00,0x00,0x00, +0x53,0x01,0x00,0x00,0x06,0x00,0x00,0x00,0x04,0x00,0x00,0x00, +0x48,0x00,0x04,0x00,0x54,0x01,0x00,0x00,0x00,0x00,0x00,0x00, +0x18,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x54,0x01,0x00,0x00, 0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x47,0x00,0x03,0x00,0x2d,0x00,0x00,0x00,0x02,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x2f,0x00,0x00,0x00,0x22,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x2f,0x00,0x00,0x00, +0x47,0x00,0x03,0x00,0x54,0x01,0x00,0x00,0x02,0x00,0x00,0x00, +0x47,0x00,0x04,0x00,0x56,0x01,0x00,0x00,0x22,0x00,0x00,0x00, +0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x56,0x01,0x00,0x00, 0x21,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x3b,0x00,0x00,0x00,0x0b,0x00,0x00,0x00,0x19,0x00,0x00,0x00, +0x61,0x01,0x00,0x00,0x0b,0x00,0x00,0x00,0x19,0x00,0x00,0x00, 0x13,0x00,0x02,0x00,0x02,0x00,0x00,0x00,0x21,0x00,0x03,0x00, 0x03,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x15,0x00,0x04,0x00, 0x06,0x00,0x00,0x00,0x20,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x17,0x00,0x04,0x00,0x09,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x03,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x0a,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x09,0x00,0x00,0x00,0x3b,0x00,0x04,0x00, -0x0a,0x00,0x00,0x00,0x0b,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x0d,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x16,0x00,0x03,0x00, -0x11,0x00,0x00,0x00,0x20,0x00,0x00,0x00,0x1e,0x00,0x06,0x00, -0x12,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x11,0x00,0x00,0x00,0x11,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0x13,0x00,0x00,0x00,0x09,0x00,0x00,0x00,0x12,0x00,0x00,0x00, -0x3b,0x00,0x04,0x00,0x13,0x00,0x00,0x00,0x14,0x00,0x00,0x00, -0x09,0x00,0x00,0x00,0x15,0x00,0x04,0x00,0x15,0x00,0x00,0x00, -0x20,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x15,0x00,0x00,0x00,0x16,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x20,0x00,0x04,0x00,0x17,0x00,0x00,0x00,0x09,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x14,0x00,0x02,0x00,0x1a,0x00,0x00,0x00, -0x1d,0x00,0x03,0x00,0x1f,0x00,0x00,0x00,0x11,0x00,0x00,0x00, -0x1e,0x00,0x03,0x00,0x20,0x00,0x00,0x00,0x1f,0x00,0x00,0x00, -0x20,0x00,0x04,0x00,0x21,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0x20,0x00,0x00,0x00,0x3b,0x00,0x04,0x00,0x21,0x00,0x00,0x00, -0x22,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x1d,0x00,0x03,0x00, -0x24,0x00,0x00,0x00,0x11,0x00,0x00,0x00,0x1e,0x00,0x03,0x00, -0x25,0x00,0x00,0x00,0x24,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0x26,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x25,0x00,0x00,0x00, -0x3b,0x00,0x04,0x00,0x26,0x00,0x00,0x00,0x27,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x29,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x11,0x00,0x00,0x00,0x1d,0x00,0x03,0x00, -0x2c,0x00,0x00,0x00,0x11,0x00,0x00,0x00,0x1e,0x00,0x03,0x00, -0x2d,0x00,0x00,0x00,0x2c,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0x2e,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x2d,0x00,0x00,0x00, -0x3b,0x00,0x04,0x00,0x2e,0x00,0x00,0x00,0x2f,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x15,0x00,0x00,0x00, -0x31,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x00,0x02,0x00,0x00, -0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x3a,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x2c,0x00,0x06,0x00,0x09,0x00,0x00,0x00, -0x3b,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x3a,0x00,0x00,0x00, -0x3a,0x00,0x00,0x00,0x36,0x00,0x05,0x00,0x02,0x00,0x00,0x00, -0x04,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x03,0x00,0x00,0x00, -0xf8,0x00,0x02,0x00,0x05,0x00,0x00,0x00,0xf7,0x00,0x03,0x00, -0x3c,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xfb,0x00,0x03,0x00, -0x0c,0x00,0x00,0x00,0x3d,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, -0x3d,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x0d,0x00,0x00,0x00, -0x0e,0x00,0x00,0x00,0x0b,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x0f,0x00,0x00,0x00, -0x0e,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x17,0x00,0x00,0x00, -0x18,0x00,0x00,0x00,0x14,0x00,0x00,0x00,0x16,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x19,0x00,0x00,0x00, -0x18,0x00,0x00,0x00,0xae,0x00,0x05,0x00,0x1a,0x00,0x00,0x00, -0x1b,0x00,0x00,0x00,0x0f,0x00,0x00,0x00,0x19,0x00,0x00,0x00, -0xf7,0x00,0x03,0x00,0x1d,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0xfa,0x00,0x04,0x00,0x1b,0x00,0x00,0x00,0x1c,0x00,0x00,0x00, -0x1d,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0x1c,0x00,0x00,0x00, -0xf9,0x00,0x02,0x00,0x3c,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, -0x1d,0x00,0x00,0x00,0x41,0x00,0x06,0x00,0x29,0x00,0x00,0x00, -0x2a,0x00,0x00,0x00,0x27,0x00,0x00,0x00,0x16,0x00,0x00,0x00, -0x0f,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x11,0x00,0x00,0x00, -0x2b,0x00,0x00,0x00,0x2a,0x00,0x00,0x00,0x41,0x00,0x05,0x00, -0x17,0x00,0x00,0x00,0x32,0x00,0x00,0x00,0x14,0x00,0x00,0x00, -0x31,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x33,0x00,0x00,0x00,0x32,0x00,0x00,0x00,0x89,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x34,0x00,0x00,0x00,0x0f,0x00,0x00,0x00, -0x33,0x00,0x00,0x00,0x41,0x00,0x06,0x00,0x29,0x00,0x00,0x00, -0x35,0x00,0x00,0x00,0x2f,0x00,0x00,0x00,0x16,0x00,0x00,0x00, -0x34,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x11,0x00,0x00,0x00, -0x36,0x00,0x00,0x00,0x35,0x00,0x00,0x00,0x85,0x00,0x05,0x00, -0x11,0x00,0x00,0x00,0x37,0x00,0x00,0x00,0x2b,0x00,0x00,0x00, -0x36,0x00,0x00,0x00,0x41,0x00,0x06,0x00,0x29,0x00,0x00,0x00, -0x38,0x00,0x00,0x00,0x22,0x00,0x00,0x00,0x16,0x00,0x00,0x00, -0x0f,0x00,0x00,0x00,0x3e,0x00,0x03,0x00,0x38,0x00,0x00,0x00, -0x37,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0x3c,0x00,0x00,0x00, -0xf8,0x00,0x02,0x00,0x3c,0x00,0x00,0x00,0xfd,0x00,0x01,0x00, +0x1e,0x00,0x1e,0x00,0x14,0x00,0x00,0x00,0x06,0x00,0x00,0x00, +0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00, +0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00, +0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00, +0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00, +0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00, +0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00, +0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00, +0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00, +0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00, +0x20,0x00,0x04,0x00,0x15,0x00,0x00,0x00,0x09,0x00,0x00,0x00, +0x14,0x00,0x00,0x00,0x3b,0x00,0x04,0x00,0x15,0x00,0x00,0x00, +0x16,0x00,0x00,0x00,0x09,0x00,0x00,0x00,0x15,0x00,0x04,0x00, +0x17,0x00,0x00,0x00,0x20,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x17,0x00,0x00,0x00,0x18,0x00,0x00,0x00, +0x03,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x19,0x00,0x00,0x00, +0x09,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x17,0x00,0x00,0x00,0x1c,0x00,0x00,0x00,0x02,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x17,0x00,0x00,0x00,0x20,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x17,0x00,0x00,0x00, +0x57,0x00,0x00,0x00,0x08,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x17,0x00,0x00,0x00,0x5c,0x00,0x00,0x00,0x07,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x17,0x00,0x00,0x00,0x62,0x00,0x00,0x00, +0x06,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x17,0x00,0x00,0x00, +0x68,0x00,0x00,0x00,0x05,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x17,0x00,0x00,0x00,0xac,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x17,0x00,0x00,0x00,0xb0,0x00,0x00,0x00, +0x10,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x17,0x00,0x00,0x00, +0xb5,0x00,0x00,0x00,0x0b,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x17,0x00,0x00,0x00,0xb9,0x00,0x00,0x00,0x0f,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x17,0x00,0x00,0x00,0xbf,0x00,0x00,0x00, +0x0a,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x17,0x00,0x00,0x00, +0xc3,0x00,0x00,0x00,0x0e,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x17,0x00,0x00,0x00,0xc9,0x00,0x00,0x00,0x09,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x17,0x00,0x00,0x00,0xcd,0x00,0x00,0x00, +0x0d,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x17,0x00,0x00,0x00, +0xd6,0x00,0x00,0x00,0x13,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x17,0x00,0x00,0x00,0xd9,0x00,0x00,0x00,0x12,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x17,0x00,0x00,0x00,0xdd,0x00,0x00,0x00, +0x11,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x17,0x00,0x00,0x00, +0x14,0x01,0x00,0x00,0x18,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x17,0x00,0x00,0x00,0x19,0x01,0x00,0x00,0x17,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x17,0x00,0x00,0x00,0x1f,0x01,0x00,0x00, +0x16,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x17,0x00,0x00,0x00, +0x25,0x01,0x00,0x00,0x15,0x00,0x00,0x00,0x17,0x00,0x04,0x00, +0x2c,0x01,0x00,0x00,0x06,0x00,0x00,0x00,0x03,0x00,0x00,0x00, +0x20,0x00,0x04,0x00,0x2d,0x01,0x00,0x00,0x01,0x00,0x00,0x00, +0x2c,0x01,0x00,0x00,0x3b,0x00,0x04,0x00,0x2d,0x01,0x00,0x00, +0x2e,0x01,0x00,0x00,0x01,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x2f,0x01,0x00,0x00,0x00,0x00,0x00,0x00, +0x20,0x00,0x04,0x00,0x30,0x01,0x00,0x00,0x01,0x00,0x00,0x00, +0x06,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x17,0x00,0x00,0x00, +0x33,0x01,0x00,0x00,0x00,0x00,0x00,0x00,0x14,0x00,0x02,0x00, +0x36,0x01,0x00,0x00,0x16,0x00,0x03,0x00,0x3b,0x01,0x00,0x00, +0x20,0x00,0x00,0x00,0x1d,0x00,0x03,0x00,0x3c,0x01,0x00,0x00, +0x3b,0x01,0x00,0x00,0x1e,0x00,0x03,0x00,0x3d,0x01,0x00,0x00, +0x3c,0x01,0x00,0x00,0x20,0x00,0x04,0x00,0x3e,0x01,0x00,0x00, +0x0c,0x00,0x00,0x00,0x3d,0x01,0x00,0x00,0x3b,0x00,0x04,0x00, +0x3e,0x01,0x00,0x00,0x3f,0x01,0x00,0x00,0x0c,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x17,0x00,0x00,0x00,0x40,0x01,0x00,0x00, +0x19,0x00,0x00,0x00,0x1d,0x00,0x03,0x00,0x48,0x01,0x00,0x00, +0x3b,0x01,0x00,0x00,0x1e,0x00,0x03,0x00,0x49,0x01,0x00,0x00, +0x48,0x01,0x00,0x00,0x20,0x00,0x04,0x00,0x4a,0x01,0x00,0x00, +0x0c,0x00,0x00,0x00,0x49,0x01,0x00,0x00,0x3b,0x00,0x04,0x00, +0x4a,0x01,0x00,0x00,0x4b,0x01,0x00,0x00,0x0c,0x00,0x00,0x00, +0x20,0x00,0x04,0x00,0x50,0x01,0x00,0x00,0x0c,0x00,0x00,0x00, +0x3b,0x01,0x00,0x00,0x1d,0x00,0x03,0x00,0x53,0x01,0x00,0x00, +0x3b,0x01,0x00,0x00,0x1e,0x00,0x03,0x00,0x54,0x01,0x00,0x00, +0x53,0x01,0x00,0x00,0x20,0x00,0x04,0x00,0x55,0x01,0x00,0x00, +0x0c,0x00,0x00,0x00,0x54,0x01,0x00,0x00,0x3b,0x00,0x04,0x00, +0x55,0x01,0x00,0x00,0x56,0x01,0x00,0x00,0x0c,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x5f,0x01,0x00,0x00, +0x00,0x02,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x60,0x01,0x00,0x00,0x01,0x00,0x00,0x00,0x2c,0x00,0x06,0x00, +0x2c,0x01,0x00,0x00,0x61,0x01,0x00,0x00,0x5f,0x01,0x00,0x00, +0x60,0x01,0x00,0x00,0x60,0x01,0x00,0x00,0x36,0x00,0x05,0x00, +0x02,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0x03,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0x05,0x00,0x00,0x00, +0xf7,0x00,0x03,0x00,0x62,0x01,0x00,0x00,0x00,0x00,0x00,0x00, +0xfb,0x00,0x03,0x00,0x2f,0x01,0x00,0x00,0x63,0x01,0x00,0x00, +0xf8,0x00,0x02,0x00,0x63,0x01,0x00,0x00,0x41,0x00,0x05,0x00, +0x30,0x01,0x00,0x00,0x31,0x01,0x00,0x00,0x2e,0x01,0x00,0x00, +0x2f,0x01,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x32,0x01,0x00,0x00,0x31,0x01,0x00,0x00,0x41,0x00,0x05,0x00, +0x19,0x00,0x00,0x00,0x34,0x01,0x00,0x00,0x16,0x00,0x00,0x00, +0x33,0x01,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x35,0x01,0x00,0x00,0x34,0x01,0x00,0x00,0xae,0x00,0x05,0x00, +0x36,0x01,0x00,0x00,0x37,0x01,0x00,0x00,0x32,0x01,0x00,0x00, +0x35,0x01,0x00,0x00,0xf7,0x00,0x03,0x00,0x39,0x01,0x00,0x00, +0x00,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0x37,0x01,0x00,0x00, +0x38,0x01,0x00,0x00,0x39,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, +0x38,0x01,0x00,0x00,0xf9,0x00,0x02,0x00,0x62,0x01,0x00,0x00, +0xf8,0x00,0x02,0x00,0x39,0x01,0x00,0x00,0x41,0x00,0x05,0x00, +0x19,0x00,0x00,0x00,0x41,0x01,0x00,0x00,0x16,0x00,0x00,0x00, +0x40,0x01,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x42,0x01,0x00,0x00,0x41,0x01,0x00,0x00,0x41,0x00,0x05,0x00, +0x19,0x00,0x00,0x00,0x71,0x01,0x00,0x00,0x16,0x00,0x00,0x00, +0xd6,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x72,0x01,0x00,0x00,0x71,0x01,0x00,0x00,0x41,0x00,0x05,0x00, +0x19,0x00,0x00,0x00,0x73,0x01,0x00,0x00,0x16,0x00,0x00,0x00, +0xd9,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x74,0x01,0x00,0x00,0x73,0x01,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x75,0x01,0x00,0x00,0x72,0x01,0x00,0x00, +0x74,0x01,0x00,0x00,0x41,0x00,0x05,0x00,0x19,0x00,0x00,0x00, +0x76,0x01,0x00,0x00,0x16,0x00,0x00,0x00,0xdd,0x00,0x00,0x00, +0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x77,0x01,0x00,0x00, +0x76,0x01,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x78,0x01,0x00,0x00,0x75,0x01,0x00,0x00,0x77,0x01,0x00,0x00, +0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x79,0x01,0x00,0x00, +0x32,0x01,0x00,0x00,0x78,0x01,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x7d,0x01,0x00,0x00,0x79,0x01,0x00,0x00, +0x72,0x01,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x80,0x01,0x00,0x00,0x7d,0x01,0x00,0x00,0x74,0x01,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x83,0x01,0x00,0x00, +0x80,0x01,0x00,0x00,0x77,0x01,0x00,0x00,0x82,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x86,0x01,0x00,0x00,0x32,0x01,0x00,0x00, +0x83,0x01,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x8b,0x01,0x00,0x00,0x74,0x01,0x00,0x00,0x77,0x01,0x00,0x00, +0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x8c,0x01,0x00,0x00, +0x86,0x01,0x00,0x00,0x8b,0x01,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x90,0x01,0x00,0x00,0x8c,0x01,0x00,0x00, +0x74,0x01,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x93,0x01,0x00,0x00,0x90,0x01,0x00,0x00,0x77,0x01,0x00,0x00, +0x82,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x98,0x01,0x00,0x00, +0x86,0x01,0x00,0x00,0x93,0x01,0x00,0x00,0x86,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x9b,0x01,0x00,0x00,0x98,0x01,0x00,0x00, +0x77,0x01,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xa4,0x01,0x00,0x00,0x9b,0x01,0x00,0x00,0x77,0x01,0x00,0x00, +0x82,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xa5,0x01,0x00,0x00, +0x98,0x01,0x00,0x00,0xa4,0x01,0x00,0x00,0x41,0x00,0x05,0x00, +0x19,0x00,0x00,0x00,0xa7,0x01,0x00,0x00,0x16,0x00,0x00,0x00, +0x14,0x01,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0xa8,0x01,0x00,0x00,0xa7,0x01,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xa9,0x01,0x00,0x00,0x79,0x01,0x00,0x00, +0xa8,0x01,0x00,0x00,0x41,0x00,0x05,0x00,0x19,0x00,0x00,0x00, +0xab,0x01,0x00,0x00,0x16,0x00,0x00,0x00,0x19,0x01,0x00,0x00, +0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0xac,0x01,0x00,0x00, +0xab,0x01,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xad,0x01,0x00,0x00,0x8c,0x01,0x00,0x00,0xac,0x01,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xae,0x01,0x00,0x00, +0xa9,0x01,0x00,0x00,0xad,0x01,0x00,0x00,0x41,0x00,0x05,0x00, +0x19,0x00,0x00,0x00,0xb0,0x01,0x00,0x00,0x16,0x00,0x00,0x00, +0x1f,0x01,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0xb1,0x01,0x00,0x00,0xb0,0x01,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xb2,0x01,0x00,0x00,0x9b,0x01,0x00,0x00, +0xb1,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xb3,0x01,0x00,0x00,0xae,0x01,0x00,0x00,0xb2,0x01,0x00,0x00, +0x41,0x00,0x05,0x00,0x19,0x00,0x00,0x00,0xb5,0x01,0x00,0x00, +0x16,0x00,0x00,0x00,0x25,0x01,0x00,0x00,0x3d,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0xb6,0x01,0x00,0x00,0xb5,0x01,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xb7,0x01,0x00,0x00, +0xa5,0x01,0x00,0x00,0xb6,0x01,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xb8,0x01,0x00,0x00,0xb3,0x01,0x00,0x00, +0xb7,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x47,0x01,0x00,0x00,0x42,0x01,0x00,0x00,0xb8,0x01,0x00,0x00, +0x41,0x00,0x05,0x00,0x19,0x00,0x00,0x00,0xc2,0x01,0x00,0x00, +0x16,0x00,0x00,0x00,0x18,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0xc3,0x01,0x00,0x00,0xc2,0x01,0x00,0x00, +0x41,0x00,0x05,0x00,0x19,0x00,0x00,0x00,0xc4,0x01,0x00,0x00, +0x16,0x00,0x00,0x00,0x1c,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0xc5,0x01,0x00,0x00,0xc4,0x01,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xc6,0x01,0x00,0x00, +0xc3,0x01,0x00,0x00,0xc5,0x01,0x00,0x00,0x41,0x00,0x05,0x00, +0x19,0x00,0x00,0x00,0xc7,0x01,0x00,0x00,0x16,0x00,0x00,0x00, +0x20,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0xc8,0x01,0x00,0x00,0xc7,0x01,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xc9,0x01,0x00,0x00,0xc6,0x01,0x00,0x00, +0xc8,0x01,0x00,0x00,0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xca,0x01,0x00,0x00,0x32,0x01,0x00,0x00,0xc9,0x01,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xce,0x01,0x00,0x00, +0xca,0x01,0x00,0x00,0xc3,0x01,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xd1,0x01,0x00,0x00,0xce,0x01,0x00,0x00, +0xc5,0x01,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xd4,0x01,0x00,0x00,0xd1,0x01,0x00,0x00,0xc8,0x01,0x00,0x00, +0x82,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xd7,0x01,0x00,0x00, +0x32,0x01,0x00,0x00,0xd4,0x01,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xdc,0x01,0x00,0x00,0xc5,0x01,0x00,0x00, +0xc8,0x01,0x00,0x00,0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xdd,0x01,0x00,0x00,0xd7,0x01,0x00,0x00,0xdc,0x01,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xe1,0x01,0x00,0x00, +0xdd,0x01,0x00,0x00,0xc5,0x01,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xe4,0x01,0x00,0x00,0xe1,0x01,0x00,0x00, +0xc8,0x01,0x00,0x00,0x82,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xe9,0x01,0x00,0x00,0xd7,0x01,0x00,0x00,0xe4,0x01,0x00,0x00, +0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xec,0x01,0x00,0x00, +0xe9,0x01,0x00,0x00,0xc8,0x01,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xf5,0x01,0x00,0x00,0xec,0x01,0x00,0x00, +0xc8,0x01,0x00,0x00,0x82,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xf6,0x01,0x00,0x00,0xe9,0x01,0x00,0x00,0xf5,0x01,0x00,0x00, +0x41,0x00,0x05,0x00,0x19,0x00,0x00,0x00,0xf8,0x01,0x00,0x00, +0x16,0x00,0x00,0x00,0x57,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0xf9,0x01,0x00,0x00,0xf8,0x01,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xfa,0x01,0x00,0x00, +0xca,0x01,0x00,0x00,0xf9,0x01,0x00,0x00,0x41,0x00,0x05,0x00, +0x19,0x00,0x00,0x00,0xfc,0x01,0x00,0x00,0x16,0x00,0x00,0x00, +0x5c,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0xfd,0x01,0x00,0x00,0xfc,0x01,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xfe,0x01,0x00,0x00,0xdd,0x01,0x00,0x00, +0xfd,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xff,0x01,0x00,0x00,0xfa,0x01,0x00,0x00,0xfe,0x01,0x00,0x00, +0x41,0x00,0x05,0x00,0x19,0x00,0x00,0x00,0x01,0x02,0x00,0x00, +0x16,0x00,0x00,0x00,0x62,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x02,0x02,0x00,0x00,0x01,0x02,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x03,0x02,0x00,0x00, +0xec,0x01,0x00,0x00,0x02,0x02,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x04,0x02,0x00,0x00,0xff,0x01,0x00,0x00, +0x03,0x02,0x00,0x00,0x41,0x00,0x05,0x00,0x19,0x00,0x00,0x00, +0x06,0x02,0x00,0x00,0x16,0x00,0x00,0x00,0x68,0x00,0x00,0x00, +0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x07,0x02,0x00,0x00, +0x06,0x02,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x08,0x02,0x00,0x00,0xf6,0x01,0x00,0x00,0x07,0x02,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x09,0x02,0x00,0x00, +0x04,0x02,0x00,0x00,0x08,0x02,0x00,0x00,0x41,0x00,0x06,0x00, +0x50,0x01,0x00,0x00,0x51,0x01,0x00,0x00,0x4b,0x01,0x00,0x00, +0x33,0x01,0x00,0x00,0x09,0x02,0x00,0x00,0x3d,0x00,0x04,0x00, +0x3b,0x01,0x00,0x00,0x52,0x01,0x00,0x00,0x51,0x01,0x00,0x00, +0x41,0x00,0x05,0x00,0x19,0x00,0x00,0x00,0x49,0x02,0x00,0x00, +0x16,0x00,0x00,0x00,0xac,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x4a,0x02,0x00,0x00,0x49,0x02,0x00,0x00, +0x89,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x4b,0x02,0x00,0x00, +0xca,0x01,0x00,0x00,0x4a,0x02,0x00,0x00,0x41,0x00,0x05,0x00, +0x19,0x00,0x00,0x00,0x4c,0x02,0x00,0x00,0x16,0x00,0x00,0x00, +0xb0,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x4d,0x02,0x00,0x00,0x4c,0x02,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x4e,0x02,0x00,0x00,0x4b,0x02,0x00,0x00, +0x4d,0x02,0x00,0x00,0x41,0x00,0x05,0x00,0x19,0x00,0x00,0x00, +0x50,0x02,0x00,0x00,0x16,0x00,0x00,0x00,0xb5,0x00,0x00,0x00, +0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x51,0x02,0x00,0x00, +0x50,0x02,0x00,0x00,0x89,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x52,0x02,0x00,0x00,0xdd,0x01,0x00,0x00,0x51,0x02,0x00,0x00, +0x41,0x00,0x05,0x00,0x19,0x00,0x00,0x00,0x53,0x02,0x00,0x00, +0x16,0x00,0x00,0x00,0xb9,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x54,0x02,0x00,0x00,0x53,0x02,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x55,0x02,0x00,0x00, +0x52,0x02,0x00,0x00,0x54,0x02,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x56,0x02,0x00,0x00,0x4e,0x02,0x00,0x00, +0x55,0x02,0x00,0x00,0x41,0x00,0x05,0x00,0x19,0x00,0x00,0x00, +0x58,0x02,0x00,0x00,0x16,0x00,0x00,0x00,0xbf,0x00,0x00,0x00, +0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x59,0x02,0x00,0x00, +0x58,0x02,0x00,0x00,0x89,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x5a,0x02,0x00,0x00,0xec,0x01,0x00,0x00,0x59,0x02,0x00,0x00, +0x41,0x00,0x05,0x00,0x19,0x00,0x00,0x00,0x5b,0x02,0x00,0x00, +0x16,0x00,0x00,0x00,0xc3,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x5c,0x02,0x00,0x00,0x5b,0x02,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x5d,0x02,0x00,0x00, +0x5a,0x02,0x00,0x00,0x5c,0x02,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x5e,0x02,0x00,0x00,0x56,0x02,0x00,0x00, +0x5d,0x02,0x00,0x00,0x41,0x00,0x05,0x00,0x19,0x00,0x00,0x00, +0x60,0x02,0x00,0x00,0x16,0x00,0x00,0x00,0xc9,0x00,0x00,0x00, +0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x61,0x02,0x00,0x00, +0x60,0x02,0x00,0x00,0x89,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x62,0x02,0x00,0x00,0xf6,0x01,0x00,0x00,0x61,0x02,0x00,0x00, +0x41,0x00,0x05,0x00,0x19,0x00,0x00,0x00,0x63,0x02,0x00,0x00, +0x16,0x00,0x00,0x00,0xcd,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x64,0x02,0x00,0x00,0x63,0x02,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x65,0x02,0x00,0x00, +0x62,0x02,0x00,0x00,0x64,0x02,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x66,0x02,0x00,0x00,0x5e,0x02,0x00,0x00, +0x65,0x02,0x00,0x00,0x41,0x00,0x06,0x00,0x50,0x01,0x00,0x00, +0x5b,0x01,0x00,0x00,0x56,0x01,0x00,0x00,0x33,0x01,0x00,0x00, +0x66,0x02,0x00,0x00,0x3d,0x00,0x04,0x00,0x3b,0x01,0x00,0x00, +0x5c,0x01,0x00,0x00,0x5b,0x01,0x00,0x00,0x85,0x00,0x05,0x00, +0x3b,0x01,0x00,0x00,0x5d,0x01,0x00,0x00,0x52,0x01,0x00,0x00, +0x5c,0x01,0x00,0x00,0x41,0x00,0x06,0x00,0x50,0x01,0x00,0x00, +0x5e,0x01,0x00,0x00,0x3f,0x01,0x00,0x00,0x33,0x01,0x00,0x00, +0x47,0x01,0x00,0x00,0x3e,0x00,0x03,0x00,0x5e,0x01,0x00,0x00, +0x5d,0x01,0x00,0x00,0xf9,0x00,0x02,0x00,0x62,0x01,0x00,0x00, +0xf8,0x00,0x02,0x00,0x62,0x01,0x00,0x00,0xfd,0x00,0x01,0x00, 0x38,0x00,0x01,0x00, }; -const uint64_t mul_f32_len = 1456; +const uint64_t mul_f32_len = 4276; unsigned char mul_mat_vec_f16_f32_data[] = { 0x03,0x02,0x23,0x07,0x00,0x05,0x01,0x00,0x0b,0x00,0x0d,0x00, -0xb6,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x11,0x00,0x02,0x00, +0xb4,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x11,0x00,0x02,0x00, 0x01,0x00,0x00,0x00,0x11,0x00,0x02,0x00,0x51,0x11,0x00,0x00, 0x0b,0x00,0x06,0x00,0x01,0x00,0x00,0x00,0x47,0x4c,0x53,0x4c, 0x2e,0x73,0x74,0x64,0x2e,0x34,0x35,0x30,0x00,0x00,0x00,0x00, 0x0e,0x00,0x03,0x00,0x00,0x00,0x00,0x00,0x01,0x00,0x00,0x00, 0x0f,0x00,0x0c,0x00,0x05,0x00,0x00,0x00,0x04,0x00,0x00,0x00, -0x6d,0x61,0x69,0x6e,0x00,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0x13,0x00,0x00,0x00,0x1b,0x00,0x00,0x00,0x2a,0x00,0x00,0x00, -0x51,0x00,0x00,0x00,0x65,0x00,0x00,0x00,0xaa,0x00,0x00,0x00, +0x6d,0x61,0x69,0x6e,0x00,0x00,0x00,0x00,0x0b,0x00,0x00,0x00, +0x11,0x00,0x00,0x00,0x18,0x00,0x00,0x00,0x26,0x00,0x00,0x00, +0x4f,0x00,0x00,0x00,0x63,0x00,0x00,0x00,0xa5,0x00,0x00,0x00, 0x10,0x00,0x06,0x00,0x04,0x00,0x00,0x00,0x11,0x00,0x00,0x00, 0x01,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x0c,0x00,0x00,0x00,0x0b,0x00,0x00,0x00, -0x1a,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x13,0x00,0x00,0x00, -0x0b,0x00,0x00,0x00,0x1b,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0x28,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x28,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x04,0x00,0x00,0x00, -0x48,0x00,0x05,0x00,0x28,0x00,0x00,0x00,0x02,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x08,0x00,0x00,0x00,0x47,0x00,0x03,0x00, -0x28,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x4e,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x02,0x00,0x00,0x00, -0x48,0x00,0x04,0x00,0x4f,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x18,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x4f,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x47,0x00,0x03,0x00,0x4f,0x00,0x00,0x00,0x02,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x51,0x00,0x00,0x00,0x22,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x51,0x00,0x00,0x00, -0x21,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x62,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x04,0x00,0x00,0x00, -0x48,0x00,0x04,0x00,0x63,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x18,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x63,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x47,0x00,0x03,0x00,0x63,0x00,0x00,0x00,0x02,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x65,0x00,0x00,0x00,0x22,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x65,0x00,0x00,0x00, -0x21,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0xa7,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x04,0x00,0x00,0x00, -0x48,0x00,0x04,0x00,0xa8,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x19,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0xa8,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x47,0x00,0x03,0x00,0xa8,0x00,0x00,0x00,0x02,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0xaa,0x00,0x00,0x00,0x22,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0xaa,0x00,0x00,0x00, -0x21,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0xb2,0x00,0x00,0x00,0x0b,0x00,0x00,0x00,0x19,0x00,0x00,0x00, -0x13,0x00,0x02,0x00,0x02,0x00,0x00,0x00,0x21,0x00,0x03,0x00, -0x03,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x15,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x20,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x15,0x00,0x04,0x00,0x09,0x00,0x00,0x00,0x20,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x17,0x00,0x04,0x00,0x0a,0x00,0x00,0x00, -0x09,0x00,0x00,0x00,0x03,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0x0b,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x0a,0x00,0x00,0x00, -0x3b,0x00,0x04,0x00,0x0b,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x09,0x00,0x00,0x00, -0x0d,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0x0e,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x09,0x00,0x00,0x00, -0x3b,0x00,0x04,0x00,0x0b,0x00,0x00,0x00,0x13,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x16,0x00,0x03,0x00,0x17,0x00,0x00,0x00, -0x20,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x09,0x00,0x00,0x00, -0x18,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x1c,0x00,0x04,0x00, -0x19,0x00,0x00,0x00,0x17,0x00,0x00,0x00,0x18,0x00,0x00,0x00, -0x20,0x00,0x04,0x00,0x1a,0x00,0x00,0x00,0x04,0x00,0x00,0x00, -0x19,0x00,0x00,0x00,0x3b,0x00,0x04,0x00,0x1a,0x00,0x00,0x00, -0x1b,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x17,0x00,0x00,0x00,0x1d,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x20,0x00,0x04,0x00,0x1e,0x00,0x00,0x00,0x04,0x00,0x00,0x00, -0x17,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x21,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x1e,0x00,0x05,0x00, -0x28,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x29,0x00,0x00,0x00, -0x09,0x00,0x00,0x00,0x28,0x00,0x00,0x00,0x3b,0x00,0x04,0x00, -0x29,0x00,0x00,0x00,0x2a,0x00,0x00,0x00,0x09,0x00,0x00,0x00, -0x20,0x00,0x04,0x00,0x2b,0x00,0x00,0x00,0x09,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x2e,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x14,0x00,0x02,0x00, -0x30,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x35,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x16,0x00,0x03,0x00, -0x4d,0x00,0x00,0x00,0x10,0x00,0x00,0x00,0x1d,0x00,0x03,0x00, -0x4e,0x00,0x00,0x00,0x4d,0x00,0x00,0x00,0x1e,0x00,0x03,0x00, -0x4f,0x00,0x00,0x00,0x4e,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0x50,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x4f,0x00,0x00,0x00, -0x3b,0x00,0x04,0x00,0x50,0x00,0x00,0x00,0x51,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x54,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x4d,0x00,0x00,0x00,0x1d,0x00,0x03,0x00, -0x62,0x00,0x00,0x00,0x17,0x00,0x00,0x00,0x1e,0x00,0x03,0x00, -0x63,0x00,0x00,0x00,0x62,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0x64,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x63,0x00,0x00,0x00, -0x3b,0x00,0x04,0x00,0x64,0x00,0x00,0x00,0x65,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x6d,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x17,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x09,0x00,0x00,0x00,0x88,0x00,0x00,0x00,0x02,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x09,0x00,0x00,0x00,0x89,0x00,0x00,0x00, -0x08,0x01,0x00,0x00,0x1d,0x00,0x03,0x00,0xa7,0x00,0x00,0x00, -0x17,0x00,0x00,0x00,0x1e,0x00,0x03,0x00,0xa8,0x00,0x00,0x00, -0xa7,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0xa9,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0xa8,0x00,0x00,0x00,0x3b,0x00,0x04,0x00, -0xa9,0x00,0x00,0x00,0xaa,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0x2c,0x00,0x06,0x00,0x0a,0x00,0x00,0x00,0xb2,0x00,0x00,0x00, -0x18,0x00,0x00,0x00,0x18,0x00,0x00,0x00,0x18,0x00,0x00,0x00, -0x36,0x00,0x05,0x00,0x02,0x00,0x00,0x00,0x04,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x03,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, -0x05,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x0e,0x00,0x00,0x00, -0x0f,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x0d,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x09,0x00,0x00,0x00,0x10,0x00,0x00,0x00, -0x0f,0x00,0x00,0x00,0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x11,0x00,0x00,0x00,0x10,0x00,0x00,0x00,0x41,0x00,0x05,0x00, -0x0e,0x00,0x00,0x00,0x14,0x00,0x00,0x00,0x13,0x00,0x00,0x00, -0x0d,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x09,0x00,0x00,0x00, -0x15,0x00,0x00,0x00,0x14,0x00,0x00,0x00,0x7c,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x16,0x00,0x00,0x00,0x15,0x00,0x00,0x00, -0x41,0x00,0x05,0x00,0x1e,0x00,0x00,0x00,0x1f,0x00,0x00,0x00, -0x1b,0x00,0x00,0x00,0x16,0x00,0x00,0x00,0x3e,0x00,0x03,0x00, -0x1f,0x00,0x00,0x00,0x1d,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, -0x22,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0x22,0x00,0x00,0x00, -0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0xb5,0x00,0x00,0x00, -0x21,0x00,0x00,0x00,0x05,0x00,0x00,0x00,0x87,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x2b,0x00,0x00,0x00, -0x2c,0x00,0x00,0x00,0x2a,0x00,0x00,0x00,0x21,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x2d,0x00,0x00,0x00, -0x2c,0x00,0x00,0x00,0x87,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x2f,0x00,0x00,0x00,0x2d,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0xb1,0x00,0x05,0x00,0x30,0x00,0x00,0x00,0x31,0x00,0x00,0x00, -0xb5,0x00,0x00,0x00,0x2f,0x00,0x00,0x00,0xf6,0x00,0x04,0x00, -0x24,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0xfa,0x00,0x04,0x00,0x31,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x24,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0x23,0x00,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x37,0x00,0x00,0x00, -0x35,0x00,0x00,0x00,0x16,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x38,0x00,0x00,0x00,0xb5,0x00,0x00,0x00, -0x37,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x3d,0x00,0x00,0x00,0x11,0x00,0x00,0x00,0x2d,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x3f,0x00,0x00,0x00, -0x3d,0x00,0x00,0x00,0x38,0x00,0x00,0x00,0x87,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x40,0x00,0x00,0x00,0x3f,0x00,0x00,0x00, -0x2e,0x00,0x00,0x00,0x8b,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x43,0x00,0x00,0x00,0x38,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0x87,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x44,0x00,0x00,0x00, -0x43,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0x82,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x49,0x00,0x00,0x00,0x38,0x00,0x00,0x00, -0x43,0x00,0x00,0x00,0x41,0x00,0x06,0x00,0x54,0x00,0x00,0x00, -0x55,0x00,0x00,0x00,0x51,0x00,0x00,0x00,0x21,0x00,0x00,0x00, -0x40,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x4d,0x00,0x00,0x00, -0x56,0x00,0x00,0x00,0x55,0x00,0x00,0x00,0x73,0x00,0x04,0x00, -0x17,0x00,0x00,0x00,0x57,0x00,0x00,0x00,0x56,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x59,0x00,0x00,0x00, -0x40,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0x41,0x00,0x06,0x00, -0x54,0x00,0x00,0x00,0x5a,0x00,0x00,0x00,0x51,0x00,0x00,0x00, -0x21,0x00,0x00,0x00,0x59,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x4d,0x00,0x00,0x00,0x5b,0x00,0x00,0x00,0x5a,0x00,0x00,0x00, -0x73,0x00,0x04,0x00,0x17,0x00,0x00,0x00,0x5c,0x00,0x00,0x00, -0x5b,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x2b,0x00,0x00,0x00, -0x66,0x00,0x00,0x00,0x2a,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x67,0x00,0x00,0x00, -0x66,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x69,0x00,0x00,0x00,0x67,0x00,0x00,0x00,0x49,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x6b,0x00,0x00,0x00, -0x69,0x00,0x00,0x00,0x44,0x00,0x00,0x00,0x41,0x00,0x06,0x00, -0x6d,0x00,0x00,0x00,0x6e,0x00,0x00,0x00,0x65,0x00,0x00,0x00, -0x21,0x00,0x00,0x00,0x6b,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x17,0x00,0x00,0x00,0x6f,0x00,0x00,0x00,0x6e,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x17,0x00,0x00,0x00,0x72,0x00,0x00,0x00, -0x1f,0x00,0x00,0x00,0x0c,0x00,0x08,0x00,0x17,0x00,0x00,0x00, -0x73,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x32,0x00,0x00,0x00, -0x57,0x00,0x00,0x00,0x6f,0x00,0x00,0x00,0x72,0x00,0x00,0x00, -0x3e,0x00,0x03,0x00,0x1f,0x00,0x00,0x00,0x73,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x7e,0x00,0x00,0x00, -0x6b,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0x41,0x00,0x06,0x00, -0x6d,0x00,0x00,0x00,0x7f,0x00,0x00,0x00,0x65,0x00,0x00,0x00, -0x21,0x00,0x00,0x00,0x7e,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x17,0x00,0x00,0x00,0x80,0x00,0x00,0x00,0x7f,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x17,0x00,0x00,0x00,0x83,0x00,0x00,0x00, -0x1f,0x00,0x00,0x00,0x0c,0x00,0x08,0x00,0x17,0x00,0x00,0x00, -0x84,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x32,0x00,0x00,0x00, -0x5c,0x00,0x00,0x00,0x80,0x00,0x00,0x00,0x83,0x00,0x00,0x00, -0x3e,0x00,0x03,0x00,0x1f,0x00,0x00,0x00,0x84,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x87,0x00,0x00,0x00, -0xb5,0x00,0x00,0x00,0x35,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, -0x22,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0x24,0x00,0x00,0x00, -0xe0,0x00,0x04,0x00,0x88,0x00,0x00,0x00,0x88,0x00,0x00,0x00, -0x89,0x00,0x00,0x00,0xaa,0x00,0x05,0x00,0x30,0x00,0x00,0x00, -0xa4,0x00,0x00,0x00,0x16,0x00,0x00,0x00,0x21,0x00,0x00,0x00, -0xf7,0x00,0x03,0x00,0xa6,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0xfa,0x00,0x04,0x00,0xa4,0x00,0x00,0x00,0xa5,0x00,0x00,0x00, -0xa6,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0xa5,0x00,0x00,0x00, -0x41,0x00,0x05,0x00,0x2b,0x00,0x00,0x00,0xab,0x00,0x00,0x00, -0x2a,0x00,0x00,0x00,0x35,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0xac,0x00,0x00,0x00,0xab,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xae,0x00,0x00,0x00, -0xac,0x00,0x00,0x00,0x11,0x00,0x00,0x00,0x41,0x00,0x05,0x00, -0x1e,0x00,0x00,0x00,0xaf,0x00,0x00,0x00,0x1b,0x00,0x00,0x00, -0x21,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x17,0x00,0x00,0x00, -0xb0,0x00,0x00,0x00,0xaf,0x00,0x00,0x00,0x41,0x00,0x06,0x00, -0x6d,0x00,0x00,0x00,0xb1,0x00,0x00,0x00,0xaa,0x00,0x00,0x00, -0x21,0x00,0x00,0x00,0xae,0x00,0x00,0x00,0x3e,0x00,0x03,0x00, -0xb1,0x00,0x00,0x00,0xb0,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, -0xa6,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0xa6,0x00,0x00,0x00, -0xfd,0x00,0x01,0x00,0x38,0x00,0x01,0x00, +0x47,0x00,0x04,0x00,0x0b,0x00,0x00,0x00,0x0b,0x00,0x00,0x00, +0x1a,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x11,0x00,0x00,0x00, +0x0b,0x00,0x00,0x00,0x1b,0x00,0x00,0x00,0x47,0x00,0x04,0x00, +0x15,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0x48,0x00,0x05,0x00,0x24,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x48,0x00,0x05,0x00, +0x24,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x23,0x00,0x00,0x00, +0x04,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x24,0x00,0x00,0x00, +0x02,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x08,0x00,0x00,0x00, +0x47,0x00,0x03,0x00,0x24,0x00,0x00,0x00,0x02,0x00,0x00,0x00, +0x47,0x00,0x04,0x00,0x4c,0x00,0x00,0x00,0x06,0x00,0x00,0x00, +0x02,0x00,0x00,0x00,0x48,0x00,0x04,0x00,0x4d,0x00,0x00,0x00, +0x00,0x00,0x00,0x00,0x18,0x00,0x00,0x00,0x48,0x00,0x05,0x00, +0x4d,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00, +0x00,0x00,0x00,0x00,0x47,0x00,0x03,0x00,0x4d,0x00,0x00,0x00, +0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x4f,0x00,0x00,0x00, +0x22,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00, +0x4f,0x00,0x00,0x00,0x21,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0x47,0x00,0x04,0x00,0x60,0x00,0x00,0x00,0x06,0x00,0x00,0x00, +0x04,0x00,0x00,0x00,0x48,0x00,0x04,0x00,0x61,0x00,0x00,0x00, +0x00,0x00,0x00,0x00,0x18,0x00,0x00,0x00,0x48,0x00,0x05,0x00, +0x61,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00, +0x00,0x00,0x00,0x00,0x47,0x00,0x03,0x00,0x61,0x00,0x00,0x00, +0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x63,0x00,0x00,0x00, +0x22,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00, +0x63,0x00,0x00,0x00,0x21,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0x47,0x00,0x04,0x00,0xa2,0x00,0x00,0x00,0x06,0x00,0x00,0x00, +0x04,0x00,0x00,0x00,0x48,0x00,0x04,0x00,0xa3,0x00,0x00,0x00, +0x00,0x00,0x00,0x00,0x19,0x00,0x00,0x00,0x48,0x00,0x05,0x00, +0xa3,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00, +0x00,0x00,0x00,0x00,0x47,0x00,0x03,0x00,0xa3,0x00,0x00,0x00, +0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0xa5,0x00,0x00,0x00, +0x22,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00, +0xa5,0x00,0x00,0x00,0x21,0x00,0x00,0x00,0x02,0x00,0x00,0x00, +0x47,0x00,0x04,0x00,0xae,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0xaf,0x00,0x00,0x00, +0x0b,0x00,0x00,0x00,0x19,0x00,0x00,0x00,0x13,0x00,0x02,0x00, +0x02,0x00,0x00,0x00,0x21,0x00,0x03,0x00,0x03,0x00,0x00,0x00, +0x02,0x00,0x00,0x00,0x15,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x20,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x17,0x00,0x04,0x00, +0x09,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x03,0x00,0x00,0x00, +0x20,0x00,0x04,0x00,0x0a,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0x09,0x00,0x00,0x00,0x3b,0x00,0x04,0x00,0x0a,0x00,0x00,0x00, +0x0b,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0x20,0x00,0x04,0x00,0x0d,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0x06,0x00,0x00,0x00,0x3b,0x00,0x04,0x00,0x0a,0x00,0x00,0x00, +0x11,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x16,0x00,0x03,0x00, +0x14,0x00,0x00,0x00,0x20,0x00,0x00,0x00,0x32,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x15,0x00,0x00,0x00,0x20,0x00,0x00,0x00, +0x1c,0x00,0x04,0x00,0x16,0x00,0x00,0x00,0x14,0x00,0x00,0x00, +0x15,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x17,0x00,0x00,0x00, +0x04,0x00,0x00,0x00,0x16,0x00,0x00,0x00,0x3b,0x00,0x04,0x00, +0x17,0x00,0x00,0x00,0x18,0x00,0x00,0x00,0x04,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x14,0x00,0x00,0x00,0x1a,0x00,0x00,0x00, +0x00,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x1b,0x00,0x00,0x00, +0x04,0x00,0x00,0x00,0x14,0x00,0x00,0x00,0x1e,0x00,0x05,0x00, +0x24,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00, +0x06,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x25,0x00,0x00,0x00, +0x09,0x00,0x00,0x00,0x24,0x00,0x00,0x00,0x3b,0x00,0x04,0x00, +0x25,0x00,0x00,0x00,0x26,0x00,0x00,0x00,0x09,0x00,0x00,0x00, +0x15,0x00,0x04,0x00,0x27,0x00,0x00,0x00,0x20,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x27,0x00,0x00,0x00, +0x28,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x20,0x00,0x04,0x00, +0x29,0x00,0x00,0x00,0x09,0x00,0x00,0x00,0x06,0x00,0x00,0x00, +0x14,0x00,0x02,0x00,0x2d,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x32,0x00,0x00,0x00,0x02,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x3d,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0x16,0x00,0x03,0x00,0x4b,0x00,0x00,0x00, +0x10,0x00,0x00,0x00,0x1d,0x00,0x03,0x00,0x4c,0x00,0x00,0x00, +0x4b,0x00,0x00,0x00,0x1e,0x00,0x03,0x00,0x4d,0x00,0x00,0x00, +0x4c,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x4e,0x00,0x00,0x00, +0x0c,0x00,0x00,0x00,0x4d,0x00,0x00,0x00,0x3b,0x00,0x04,0x00, +0x4e,0x00,0x00,0x00,0x4f,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, +0x20,0x00,0x04,0x00,0x52,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, +0x4b,0x00,0x00,0x00,0x1d,0x00,0x03,0x00,0x60,0x00,0x00,0x00, +0x14,0x00,0x00,0x00,0x1e,0x00,0x03,0x00,0x61,0x00,0x00,0x00, +0x60,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x62,0x00,0x00,0x00, +0x0c,0x00,0x00,0x00,0x61,0x00,0x00,0x00,0x3b,0x00,0x04,0x00, +0x62,0x00,0x00,0x00,0x63,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x27,0x00,0x00,0x00,0x64,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x6c,0x00,0x00,0x00, +0x0c,0x00,0x00,0x00,0x14,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x83,0x00,0x00,0x00,0x08,0x01,0x00,0x00, +0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x85,0x00,0x00,0x00, +0x86,0x00,0x00,0x00,0x15,0x00,0x00,0x00,0x32,0x00,0x00,0x00, +0x1d,0x00,0x03,0x00,0xa2,0x00,0x00,0x00,0x14,0x00,0x00,0x00, +0x1e,0x00,0x03,0x00,0xa3,0x00,0x00,0x00,0xa2,0x00,0x00,0x00, +0x20,0x00,0x04,0x00,0xa4,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, +0xa3,0x00,0x00,0x00,0x3b,0x00,0x04,0x00,0xa4,0x00,0x00,0x00, +0xa5,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x27,0x00,0x00,0x00,0xa6,0x00,0x00,0x00,0x02,0x00,0x00,0x00, +0x32,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0xae,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0x33,0x00,0x06,0x00,0x09,0x00,0x00,0x00, +0xaf,0x00,0x00,0x00,0xae,0x00,0x00,0x00,0x3d,0x00,0x00,0x00, +0x3d,0x00,0x00,0x00,0x36,0x00,0x05,0x00,0x02,0x00,0x00,0x00, +0x04,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x03,0x00,0x00,0x00, +0xf8,0x00,0x02,0x00,0x05,0x00,0x00,0x00,0x41,0x00,0x05,0x00, +0x0d,0x00,0x00,0x00,0x0e,0x00,0x00,0x00,0x0b,0x00,0x00,0x00, +0x0c,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x0f,0x00,0x00,0x00,0x0e,0x00,0x00,0x00,0x41,0x00,0x05,0x00, +0x0d,0x00,0x00,0x00,0x12,0x00,0x00,0x00,0x11,0x00,0x00,0x00, +0x0c,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x13,0x00,0x00,0x00,0x12,0x00,0x00,0x00,0x41,0x00,0x05,0x00, +0x1b,0x00,0x00,0x00,0x1c,0x00,0x00,0x00,0x18,0x00,0x00,0x00, +0x13,0x00,0x00,0x00,0x3e,0x00,0x03,0x00,0x1c,0x00,0x00,0x00, +0x1a,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0x1e,0x00,0x00,0x00, +0xf8,0x00,0x02,0x00,0x1e,0x00,0x00,0x00,0xf5,0x00,0x07,0x00, +0x06,0x00,0x00,0x00,0xb2,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, +0x05,0x00,0x00,0x00,0x82,0x00,0x00,0x00,0x1f,0x00,0x00,0x00, +0x41,0x00,0x05,0x00,0x29,0x00,0x00,0x00,0x2a,0x00,0x00,0x00, +0x26,0x00,0x00,0x00,0x28,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x2b,0x00,0x00,0x00,0x2a,0x00,0x00,0x00, +0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x2c,0x00,0x00,0x00, +0x2b,0x00,0x00,0x00,0x15,0x00,0x00,0x00,0xb0,0x00,0x05,0x00, +0x2d,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0xb2,0x00,0x00,0x00, +0x2c,0x00,0x00,0x00,0xf6,0x00,0x04,0x00,0x20,0x00,0x00,0x00, +0x1f,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, +0x2e,0x00,0x00,0x00,0x1f,0x00,0x00,0x00,0x20,0x00,0x00,0x00, +0xf8,0x00,0x02,0x00,0x1f,0x00,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x31,0x00,0x00,0x00,0xb2,0x00,0x00,0x00, +0x15,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x34,0x00,0x00,0x00,0x32,0x00,0x00,0x00,0x13,0x00,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x35,0x00,0x00,0x00, +0x31,0x00,0x00,0x00,0x34,0x00,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x3a,0x00,0x00,0x00,0x0f,0x00,0x00,0x00, +0x2b,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x3c,0x00,0x00,0x00,0x3a,0x00,0x00,0x00,0x35,0x00,0x00,0x00, +0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x3e,0x00,0x00,0x00, +0x3c,0x00,0x00,0x00,0x3d,0x00,0x00,0x00,0x89,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x41,0x00,0x00,0x00,0x35,0x00,0x00,0x00, +0x3d,0x00,0x00,0x00,0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x42,0x00,0x00,0x00,0x41,0x00,0x00,0x00,0x3d,0x00,0x00,0x00, +0x82,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x47,0x00,0x00,0x00, +0x35,0x00,0x00,0x00,0x41,0x00,0x00,0x00,0x41,0x00,0x06,0x00, +0x52,0x00,0x00,0x00,0x53,0x00,0x00,0x00,0x4f,0x00,0x00,0x00, +0x28,0x00,0x00,0x00,0x3e,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0x4b,0x00,0x00,0x00,0x54,0x00,0x00,0x00,0x53,0x00,0x00,0x00, +0x73,0x00,0x04,0x00,0x14,0x00,0x00,0x00,0x55,0x00,0x00,0x00, +0x54,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x57,0x00,0x00,0x00,0x3e,0x00,0x00,0x00,0x3d,0x00,0x00,0x00, +0x41,0x00,0x06,0x00,0x52,0x00,0x00,0x00,0x58,0x00,0x00,0x00, +0x4f,0x00,0x00,0x00,0x28,0x00,0x00,0x00,0x57,0x00,0x00,0x00, +0x3d,0x00,0x04,0x00,0x4b,0x00,0x00,0x00,0x59,0x00,0x00,0x00, +0x58,0x00,0x00,0x00,0x73,0x00,0x04,0x00,0x14,0x00,0x00,0x00, +0x5a,0x00,0x00,0x00,0x59,0x00,0x00,0x00,0x41,0x00,0x05,0x00, +0x29,0x00,0x00,0x00,0x65,0x00,0x00,0x00,0x26,0x00,0x00,0x00, +0x64,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x66,0x00,0x00,0x00,0x65,0x00,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x68,0x00,0x00,0x00,0x66,0x00,0x00,0x00, +0x47,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x6a,0x00,0x00,0x00,0x68,0x00,0x00,0x00,0x42,0x00,0x00,0x00, +0x41,0x00,0x06,0x00,0x6c,0x00,0x00,0x00,0x6d,0x00,0x00,0x00, +0x63,0x00,0x00,0x00,0x28,0x00,0x00,0x00,0x6a,0x00,0x00,0x00, +0x3d,0x00,0x04,0x00,0x14,0x00,0x00,0x00,0x6e,0x00,0x00,0x00, +0x6d,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x78,0x00,0x00,0x00,0x6a,0x00,0x00,0x00,0x3d,0x00,0x00,0x00, +0x41,0x00,0x06,0x00,0x6c,0x00,0x00,0x00,0x79,0x00,0x00,0x00, +0x63,0x00,0x00,0x00,0x28,0x00,0x00,0x00,0x78,0x00,0x00,0x00, +0x3d,0x00,0x04,0x00,0x14,0x00,0x00,0x00,0x7a,0x00,0x00,0x00, +0x79,0x00,0x00,0x00,0x85,0x00,0x05,0x00,0x14,0x00,0x00,0x00, +0x7b,0x00,0x00,0x00,0x5a,0x00,0x00,0x00,0x7a,0x00,0x00,0x00, +0x0c,0x00,0x08,0x00,0x14,0x00,0x00,0x00,0x7c,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0x32,0x00,0x00,0x00,0x55,0x00,0x00,0x00, +0x6e,0x00,0x00,0x00,0x7b,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0x14,0x00,0x00,0x00,0x7e,0x00,0x00,0x00,0x1c,0x00,0x00,0x00, +0x81,0x00,0x05,0x00,0x14,0x00,0x00,0x00,0x7f,0x00,0x00,0x00, +0x7e,0x00,0x00,0x00,0x7c,0x00,0x00,0x00,0x3e,0x00,0x03,0x00, +0x1c,0x00,0x00,0x00,0x7f,0x00,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x82,0x00,0x00,0x00,0xb2,0x00,0x00,0x00, +0x32,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0x1e,0x00,0x00,0x00, +0xf8,0x00,0x02,0x00,0x20,0x00,0x00,0x00,0xe0,0x00,0x04,0x00, +0x32,0x00,0x00,0x00,0x32,0x00,0x00,0x00,0x83,0x00,0x00,0x00, +0xf9,0x00,0x02,0x00,0x86,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, +0x86,0x00,0x00,0x00,0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00, +0xb3,0x00,0x00,0x00,0x85,0x00,0x00,0x00,0x20,0x00,0x00,0x00, +0x9d,0x00,0x00,0x00,0x89,0x00,0x00,0x00,0xac,0x00,0x05,0x00, +0x2d,0x00,0x00,0x00,0x8c,0x00,0x00,0x00,0xb3,0x00,0x00,0x00, +0x0c,0x00,0x00,0x00,0xf6,0x00,0x04,0x00,0x88,0x00,0x00,0x00, +0x89,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, +0x8c,0x00,0x00,0x00,0x87,0x00,0x00,0x00,0x88,0x00,0x00,0x00, +0xf8,0x00,0x02,0x00,0x87,0x00,0x00,0x00,0xb0,0x00,0x05,0x00, +0x2d,0x00,0x00,0x00,0x8f,0x00,0x00,0x00,0x13,0x00,0x00,0x00, +0xb3,0x00,0x00,0x00,0xf7,0x00,0x03,0x00,0x91,0x00,0x00,0x00, +0x00,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0x8f,0x00,0x00,0x00, +0x90,0x00,0x00,0x00,0x91,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, +0x90,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x95,0x00,0x00,0x00,0x13,0x00,0x00,0x00,0xb3,0x00,0x00,0x00, +0x41,0x00,0x05,0x00,0x1b,0x00,0x00,0x00,0x96,0x00,0x00,0x00, +0x18,0x00,0x00,0x00,0x95,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0x14,0x00,0x00,0x00,0x97,0x00,0x00,0x00,0x96,0x00,0x00,0x00, +0x3d,0x00,0x04,0x00,0x14,0x00,0x00,0x00,0x99,0x00,0x00,0x00, +0x1c,0x00,0x00,0x00,0x81,0x00,0x05,0x00,0x14,0x00,0x00,0x00, +0x9a,0x00,0x00,0x00,0x99,0x00,0x00,0x00,0x97,0x00,0x00,0x00, +0x3e,0x00,0x03,0x00,0x1c,0x00,0x00,0x00,0x9a,0x00,0x00,0x00, +0xf9,0x00,0x02,0x00,0x91,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, +0x91,0x00,0x00,0x00,0xe0,0x00,0x04,0x00,0x32,0x00,0x00,0x00, +0x32,0x00,0x00,0x00,0x83,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, +0x89,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0x89,0x00,0x00,0x00, +0xc2,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x9d,0x00,0x00,0x00, +0xb3,0x00,0x00,0x00,0x64,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, +0x86,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0x88,0x00,0x00,0x00, +0xaa,0x00,0x05,0x00,0x2d,0x00,0x00,0x00,0x9f,0x00,0x00,0x00, +0x13,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0xf7,0x00,0x03,0x00, +0xa1,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, +0x9f,0x00,0x00,0x00,0xa0,0x00,0x00,0x00,0xa1,0x00,0x00,0x00, +0xf8,0x00,0x02,0x00,0xa0,0x00,0x00,0x00,0x41,0x00,0x05,0x00, +0x29,0x00,0x00,0x00,0xa7,0x00,0x00,0x00,0x26,0x00,0x00,0x00, +0xa6,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0xa8,0x00,0x00,0x00,0xa7,0x00,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xaa,0x00,0x00,0x00,0xa8,0x00,0x00,0x00, +0x0f,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x1b,0x00,0x00,0x00, +0xab,0x00,0x00,0x00,0x18,0x00,0x00,0x00,0x28,0x00,0x00,0x00, +0x3d,0x00,0x04,0x00,0x14,0x00,0x00,0x00,0xac,0x00,0x00,0x00, +0xab,0x00,0x00,0x00,0x41,0x00,0x06,0x00,0x6c,0x00,0x00,0x00, +0xad,0x00,0x00,0x00,0xa5,0x00,0x00,0x00,0x28,0x00,0x00,0x00, +0xaa,0x00,0x00,0x00,0x3e,0x00,0x03,0x00,0xad,0x00,0x00,0x00, +0xac,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0xa1,0x00,0x00,0x00, +0xf8,0x00,0x02,0x00,0xa1,0x00,0x00,0x00,0xfd,0x00,0x01,0x00, +0x38,0x00,0x01,0x00, }; -const uint64_t mul_mat_vec_f16_f32_len = 2372; +const uint64_t mul_mat_vec_f16_f32_len = 2776; unsigned char mul_mat_vec_nc_f16_f32_data[] = { 0x03,0x02,0x23,0x07,0x00,0x05,0x01,0x00,0x0b,0x00,0x0d,0x00, @@ -43707,7 +39152,7 @@ const uint64_t mul_mat_vec_p021_f16_f32_len = 2768; unsigned char mul_mat_vec_q2_K_f32_data[] = { 0x03,0x02,0x23,0x07,0x00,0x05,0x01,0x00,0x0b,0x00,0x0d,0x00, -0xc6,0x02,0x00,0x00,0x00,0x00,0x00,0x00,0x11,0x00,0x02,0x00, +0xdc,0x02,0x00,0x00,0x00,0x00,0x00,0x00,0x11,0x00,0x02,0x00, 0x01,0x00,0x00,0x00,0x11,0x00,0x02,0x00,0x27,0x00,0x00,0x00, 0x11,0x00,0x02,0x00,0x51,0x11,0x00,0x00,0x11,0x00,0x02,0x00, 0x60,0x11,0x00,0x00,0x0b,0x00,0x06,0x00,0x01,0x00,0x00,0x00, @@ -43715,34 +39160,676 @@ unsigned char mul_mat_vec_q2_K_f32_data[] = { 0x00,0x00,0x00,0x00,0x0e,0x00,0x03,0x00,0x00,0x00,0x00,0x00, 0x01,0x00,0x00,0x00,0x0f,0x00,0x0c,0x00,0x05,0x00,0x00,0x00, 0x04,0x00,0x00,0x00,0x6d,0x61,0x69,0x6e,0x00,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x15,0x00,0x00,0x00,0x21,0x00,0x00,0x00, -0x4b,0x00,0x00,0x00,0x71,0x00,0x00,0x00,0x8e,0x00,0x00,0x00, -0xb4,0x02,0x00,0x00,0x10,0x00,0x06,0x00,0x04,0x00,0x00,0x00, +0x0b,0x00,0x00,0x00,0x13,0x00,0x00,0x00,0x20,0x00,0x00,0x00, +0x47,0x00,0x00,0x00,0x6c,0x00,0x00,0x00,0x8b,0x00,0x00,0x00, +0xca,0x02,0x00,0x00,0x10,0x00,0x06,0x00,0x04,0x00,0x00,0x00, 0x11,0x00,0x00,0x00,0x20,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x0c,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x0b,0x00,0x00,0x00, 0x0b,0x00,0x00,0x00,0x1a,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0x13,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x13,0x00,0x00,0x00, +0x11,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00, +0x00,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x11,0x00,0x00,0x00, 0x01,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x04,0x00,0x00,0x00, -0x48,0x00,0x05,0x00,0x13,0x00,0x00,0x00,0x02,0x00,0x00,0x00, +0x48,0x00,0x05,0x00,0x11,0x00,0x00,0x00,0x02,0x00,0x00,0x00, 0x23,0x00,0x00,0x00,0x08,0x00,0x00,0x00,0x47,0x00,0x03,0x00, -0x13,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x21,0x00,0x00,0x00,0x0b,0x00,0x00,0x00,0x1b,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x68,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x6a,0x00,0x00,0x00, +0x11,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00, +0x20,0x00,0x00,0x00,0x0b,0x00,0x00,0x00,0x1b,0x00,0x00,0x00, +0x47,0x00,0x04,0x00,0x63,0x00,0x00,0x00,0x06,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x65,0x00,0x00,0x00, 0x06,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0x6d,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x6d,0x00,0x00,0x00, +0x68,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00, +0x00,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x68,0x00,0x00,0x00, 0x01,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x10,0x00,0x00,0x00, -0x48,0x00,0x05,0x00,0x6d,0x00,0x00,0x00,0x02,0x00,0x00,0x00, +0x48,0x00,0x05,0x00,0x68,0x00,0x00,0x00,0x02,0x00,0x00,0x00, 0x23,0x00,0x00,0x00,0x50,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x6e,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x54,0x00,0x00,0x00, -0x48,0x00,0x04,0x00,0x6f,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x18,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x6f,0x00,0x00,0x00, +0x69,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x54,0x00,0x00,0x00, +0x48,0x00,0x04,0x00,0x6a,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0x18,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x6a,0x00,0x00,0x00, 0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x47,0x00,0x03,0x00,0x6f,0x00,0x00,0x00,0x02,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x71,0x00,0x00,0x00,0x22,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x71,0x00,0x00,0x00, +0x47,0x00,0x03,0x00,0x6a,0x00,0x00,0x00,0x02,0x00,0x00,0x00, +0x47,0x00,0x04,0x00,0x6c,0x00,0x00,0x00,0x22,0x00,0x00,0x00, +0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x6c,0x00,0x00,0x00, +0x21,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00, +0x88,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x04,0x00,0x00,0x00, +0x48,0x00,0x04,0x00,0x89,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0x18,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x89,0x00,0x00,0x00, +0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0x47,0x00,0x03,0x00,0x89,0x00,0x00,0x00,0x02,0x00,0x00,0x00, +0x47,0x00,0x04,0x00,0x8b,0x00,0x00,0x00,0x22,0x00,0x00,0x00, +0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x8b,0x00,0x00,0x00, +0x21,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x47,0x00,0x04,0x00, +0xc7,0x02,0x00,0x00,0x06,0x00,0x00,0x00,0x04,0x00,0x00,0x00, +0x48,0x00,0x04,0x00,0xc8,0x02,0x00,0x00,0x00,0x00,0x00,0x00, +0x19,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0xc8,0x02,0x00,0x00, +0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0x47,0x00,0x03,0x00,0xc8,0x02,0x00,0x00,0x02,0x00,0x00,0x00, +0x47,0x00,0x04,0x00,0xca,0x02,0x00,0x00,0x22,0x00,0x00,0x00, +0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0xca,0x02,0x00,0x00, +0x21,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00, +0xd2,0x02,0x00,0x00,0x0b,0x00,0x00,0x00,0x19,0x00,0x00,0x00, +0x13,0x00,0x02,0x00,0x02,0x00,0x00,0x00,0x21,0x00,0x03,0x00, +0x03,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x15,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x20,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0x17,0x00,0x04,0x00,0x09,0x00,0x00,0x00,0x06,0x00,0x00,0x00, +0x03,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x0a,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0x09,0x00,0x00,0x00,0x3b,0x00,0x04,0x00, +0x0a,0x00,0x00,0x00,0x0b,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, +0x00,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x0d,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x1e,0x00,0x05,0x00, +0x11,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00, +0x06,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x12,0x00,0x00,0x00, +0x09,0x00,0x00,0x00,0x11,0x00,0x00,0x00,0x3b,0x00,0x04,0x00, +0x12,0x00,0x00,0x00,0x13,0x00,0x00,0x00,0x09,0x00,0x00,0x00, +0x15,0x00,0x04,0x00,0x14,0x00,0x00,0x00,0x20,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x14,0x00,0x00,0x00, +0x15,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x20,0x00,0x04,0x00, +0x16,0x00,0x00,0x00,0x09,0x00,0x00,0x00,0x06,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x19,0x00,0x00,0x00, +0x00,0x01,0x00,0x00,0x3b,0x00,0x04,0x00,0x0a,0x00,0x00,0x00, +0x20,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x02,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x2b,0x00,0x00,0x00, +0x08,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x36,0x00,0x00,0x00,0x20,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x3f,0x00,0x00,0x00,0x80,0x00,0x00,0x00, +0x16,0x00,0x03,0x00,0x44,0x00,0x00,0x00,0x20,0x00,0x00,0x00, +0x1c,0x00,0x04,0x00,0x45,0x00,0x00,0x00,0x44,0x00,0x00,0x00, +0x36,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x46,0x00,0x00,0x00, +0x04,0x00,0x00,0x00,0x45,0x00,0x00,0x00,0x3b,0x00,0x04,0x00, +0x46,0x00,0x00,0x00,0x47,0x00,0x00,0x00,0x04,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x48,0x00,0x00,0x00, +0x10,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x44,0x00,0x00,0x00, +0x4d,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x20,0x00,0x04,0x00, +0x4e,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x44,0x00,0x00,0x00, +0x14,0x00,0x02,0x00,0x59,0x00,0x00,0x00,0x15,0x00,0x04,0x00, +0x62,0x00,0x00,0x00,0x08,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0x1c,0x00,0x04,0x00,0x63,0x00,0x00,0x00,0x62,0x00,0x00,0x00, +0x48,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x64,0x00,0x00,0x00,0x40,0x00,0x00,0x00,0x1c,0x00,0x04,0x00, +0x65,0x00,0x00,0x00,0x62,0x00,0x00,0x00,0x64,0x00,0x00,0x00, +0x16,0x00,0x03,0x00,0x66,0x00,0x00,0x00,0x10,0x00,0x00,0x00, +0x17,0x00,0x04,0x00,0x67,0x00,0x00,0x00,0x66,0x00,0x00,0x00, +0x02,0x00,0x00,0x00,0x1e,0x00,0x05,0x00,0x68,0x00,0x00,0x00, +0x63,0x00,0x00,0x00,0x65,0x00,0x00,0x00,0x67,0x00,0x00,0x00, +0x1d,0x00,0x03,0x00,0x69,0x00,0x00,0x00,0x68,0x00,0x00,0x00, +0x1e,0x00,0x03,0x00,0x6a,0x00,0x00,0x00,0x69,0x00,0x00,0x00, +0x20,0x00,0x04,0x00,0x6b,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, +0x6a,0x00,0x00,0x00,0x3b,0x00,0x04,0x00,0x6b,0x00,0x00,0x00, +0x6c,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x14,0x00,0x00,0x00,0x70,0x00,0x00,0x00,0x02,0x00,0x00,0x00, +0x20,0x00,0x04,0x00,0x71,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, +0x66,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x79,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x1d,0x00,0x03,0x00, +0x88,0x00,0x00,0x00,0x44,0x00,0x00,0x00,0x1e,0x00,0x03,0x00, +0x89,0x00,0x00,0x00,0x88,0x00,0x00,0x00,0x20,0x00,0x04,0x00, +0x8a,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x89,0x00,0x00,0x00, +0x3b,0x00,0x04,0x00,0x8a,0x00,0x00,0x00,0x8b,0x00,0x00,0x00, +0x0c,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x14,0x00,0x00,0x00, +0x8c,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x20,0x00,0x04,0x00, +0x95,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x44,0x00,0x00,0x00, +0x20,0x00,0x04,0x00,0x9d,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, +0x62,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x14,0x00,0x00,0x00, +0xa2,0x00,0x00,0x00,0x0f,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x14,0x00,0x00,0x00,0xb3,0x00,0x00,0x00,0x03,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x0c,0x01,0x00,0x00, +0x30,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x14,0x01,0x00,0x00,0x03,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x3c,0x01,0x00,0x00,0x04,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x14,0x00,0x00,0x00,0x4f,0x01,0x00,0x00, +0x04,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x5e,0x01,0x00,0x00,0x50,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x66,0x01,0x00,0x00,0x05,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x87,0x01,0x00,0x00, +0x60,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x8f,0x01,0x00,0x00,0x06,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x14,0x00,0x00,0x00,0xa2,0x01,0x00,0x00,0x06,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0xb1,0x01,0x00,0x00, +0x70,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0xb9,0x01,0x00,0x00,0x07,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0xa9,0x02,0x00,0x00,0x08,0x01,0x00,0x00, +0x1d,0x00,0x03,0x00,0xc7,0x02,0x00,0x00,0x44,0x00,0x00,0x00, +0x1e,0x00,0x03,0x00,0xc8,0x02,0x00,0x00,0xc7,0x02,0x00,0x00, +0x20,0x00,0x04,0x00,0xc9,0x02,0x00,0x00,0x0c,0x00,0x00,0x00, +0xc8,0x02,0x00,0x00,0x3b,0x00,0x04,0x00,0xc9,0x02,0x00,0x00, +0xca,0x02,0x00,0x00,0x0c,0x00,0x00,0x00,0x2c,0x00,0x06,0x00, +0x09,0x00,0x00,0x00,0xd2,0x02,0x00,0x00,0x36,0x00,0x00,0x00, +0x79,0x00,0x00,0x00,0x79,0x00,0x00,0x00,0x36,0x00,0x05,0x00, +0x02,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0x03,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0x05,0x00,0x00,0x00, +0x41,0x00,0x05,0x00,0x0d,0x00,0x00,0x00,0x0e,0x00,0x00,0x00, +0x0b,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x0f,0x00,0x00,0x00,0x0e,0x00,0x00,0x00, +0x41,0x00,0x05,0x00,0x16,0x00,0x00,0x00,0x17,0x00,0x00,0x00, +0x13,0x00,0x00,0x00,0x15,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x18,0x00,0x00,0x00,0x17,0x00,0x00,0x00, +0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x1a,0x00,0x00,0x00, +0x18,0x00,0x00,0x00,0x19,0x00,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x1e,0x00,0x00,0x00,0x0f,0x00,0x00,0x00, +0x1a,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x0d,0x00,0x00,0x00, +0x21,0x00,0x00,0x00,0x20,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, +0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x22,0x00,0x00,0x00, +0x21,0x00,0x00,0x00,0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x24,0x00,0x00,0x00,0x22,0x00,0x00,0x00,0x23,0x00,0x00,0x00, +0x89,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x28,0x00,0x00,0x00, +0x22,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x86,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x2c,0x00,0x00,0x00,0x24,0x00,0x00,0x00, +0x2b,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x30,0x00,0x00,0x00,0x2b,0x00,0x00,0x00,0x2c,0x00,0x00,0x00, +0x82,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x31,0x00,0x00,0x00, +0x24,0x00,0x00,0x00,0x30,0x00,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x34,0x00,0x00,0x00,0x23,0x00,0x00,0x00, +0x31,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x38,0x00,0x00,0x00,0x36,0x00,0x00,0x00,0x2c,0x00,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x3a,0x00,0x00,0x00, +0x38,0x00,0x00,0x00,0x34,0x00,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x41,0x00,0x00,0x00,0x3f,0x00,0x00,0x00, +0x2c,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x43,0x00,0x00,0x00,0x41,0x00,0x00,0x00,0x34,0x00,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x4a,0x00,0x00,0x00, +0x48,0x00,0x00,0x00,0x28,0x00,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x4c,0x00,0x00,0x00,0x4a,0x00,0x00,0x00, +0x24,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x4e,0x00,0x00,0x00, +0x4f,0x00,0x00,0x00,0x47,0x00,0x00,0x00,0x4c,0x00,0x00,0x00, +0x3e,0x00,0x03,0x00,0x4f,0x00,0x00,0x00,0x4d,0x00,0x00,0x00, +0xf9,0x00,0x02,0x00,0x52,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, +0x52,0x00,0x00,0x00,0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00, +0xd3,0x02,0x00,0x00,0x28,0x00,0x00,0x00,0x05,0x00,0x00,0x00, +0xa8,0x02,0x00,0x00,0x55,0x00,0x00,0x00,0xb0,0x00,0x05,0x00, +0x59,0x00,0x00,0x00,0x5a,0x00,0x00,0x00,0xd3,0x02,0x00,0x00, +0x1a,0x00,0x00,0x00,0xf6,0x00,0x04,0x00,0x54,0x00,0x00,0x00, +0x55,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, +0x5a,0x00,0x00,0x00,0x53,0x00,0x00,0x00,0x54,0x00,0x00,0x00, +0xf8,0x00,0x02,0x00,0x53,0x00,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x5d,0x00,0x00,0x00,0xd3,0x02,0x00,0x00, +0x19,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x5f,0x00,0x00,0x00,0x5d,0x00,0x00,0x00,0x43,0x00,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x6f,0x00,0x00,0x00, +0x1e,0x00,0x00,0x00,0xd3,0x02,0x00,0x00,0x41,0x00,0x08,0x00, +0x71,0x00,0x00,0x00,0x72,0x00,0x00,0x00,0x6c,0x00,0x00,0x00, +0x15,0x00,0x00,0x00,0x6f,0x00,0x00,0x00,0x70,0x00,0x00,0x00, +0x0c,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x66,0x00,0x00,0x00, +0x73,0x00,0x00,0x00,0x72,0x00,0x00,0x00,0x73,0x00,0x04,0x00, +0x44,0x00,0x00,0x00,0x74,0x00,0x00,0x00,0x73,0x00,0x00,0x00, +0x41,0x00,0x08,0x00,0x71,0x00,0x00,0x00,0x7a,0x00,0x00,0x00, +0x6c,0x00,0x00,0x00,0x15,0x00,0x00,0x00,0x6f,0x00,0x00,0x00, +0x70,0x00,0x00,0x00,0x79,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0x66,0x00,0x00,0x00,0x7b,0x00,0x00,0x00,0x7a,0x00,0x00,0x00, +0x73,0x00,0x04,0x00,0x44,0x00,0x00,0x00,0x7c,0x00,0x00,0x00, +0x7b,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0x81,0x00,0x00,0x00, +0xf8,0x00,0x02,0x00,0x81,0x00,0x00,0x00,0xf5,0x00,0x07,0x00, +0x44,0x00,0x00,0x00,0xd8,0x02,0x00,0x00,0x4d,0x00,0x00,0x00, +0x53,0x00,0x00,0x00,0x95,0x02,0x00,0x00,0x82,0x00,0x00,0x00, +0xf5,0x00,0x07,0x00,0x44,0x00,0x00,0x00,0xd7,0x02,0x00,0x00, +0x4d,0x00,0x00,0x00,0x53,0x00,0x00,0x00,0xd4,0x01,0x00,0x00, +0x82,0x00,0x00,0x00,0xf5,0x00,0x07,0x00,0x14,0x00,0x00,0x00, +0xd6,0x02,0x00,0x00,0x15,0x00,0x00,0x00,0x53,0x00,0x00,0x00, +0x97,0x02,0x00,0x00,0x82,0x00,0x00,0x00,0xb1,0x00,0x05,0x00, +0x59,0x00,0x00,0x00,0x87,0x00,0x00,0x00,0xd6,0x02,0x00,0x00, +0x70,0x00,0x00,0x00,0xf6,0x00,0x04,0x00,0x83,0x00,0x00,0x00, +0x82,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, +0x87,0x00,0x00,0x00,0x82,0x00,0x00,0x00,0x83,0x00,0x00,0x00, +0xf8,0x00,0x02,0x00,0x82,0x00,0x00,0x00,0x41,0x00,0x05,0x00, +0x16,0x00,0x00,0x00,0x8d,0x00,0x00,0x00,0x13,0x00,0x00,0x00, +0x8c,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x8e,0x00,0x00,0x00,0x8d,0x00,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x90,0x00,0x00,0x00,0x8e,0x00,0x00,0x00, +0x5f,0x00,0x00,0x00,0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x92,0x00,0x00,0x00,0xd6,0x02,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x93,0x00,0x00,0x00,0x90,0x00,0x00,0x00, +0x92,0x00,0x00,0x00,0x41,0x00,0x06,0x00,0x95,0x00,0x00,0x00, +0x96,0x00,0x00,0x00,0x8b,0x00,0x00,0x00,0x15,0x00,0x00,0x00, +0x93,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x44,0x00,0x00,0x00, +0x97,0x00,0x00,0x00,0x96,0x00,0x00,0x00,0x41,0x00,0x08,0x00, +0x9d,0x00,0x00,0x00,0x9e,0x00,0x00,0x00,0x6c,0x00,0x00,0x00, +0x15,0x00,0x00,0x00,0x6f,0x00,0x00,0x00,0x15,0x00,0x00,0x00, +0x30,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x62,0x00,0x00,0x00, +0x9f,0x00,0x00,0x00,0x9e,0x00,0x00,0x00,0x71,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0xa0,0x00,0x00,0x00,0x9f,0x00,0x00,0x00, +0x7c,0x00,0x04,0x00,0x14,0x00,0x00,0x00,0xa1,0x00,0x00,0x00, +0xa0,0x00,0x00,0x00,0xc7,0x00,0x05,0x00,0x14,0x00,0x00,0x00, +0xa3,0x00,0x00,0x00,0xa1,0x00,0x00,0x00,0xa2,0x00,0x00,0x00, +0x6f,0x00,0x04,0x00,0x44,0x00,0x00,0x00,0xa4,0x00,0x00,0x00, +0xa3,0x00,0x00,0x00,0x85,0x00,0x05,0x00,0x44,0x00,0x00,0x00, +0xa5,0x00,0x00,0x00,0x97,0x00,0x00,0x00,0xa4,0x00,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xac,0x00,0x00,0x00, +0x3a,0x00,0x00,0x00,0x92,0x00,0x00,0x00,0x41,0x00,0x08,0x00, +0x9d,0x00,0x00,0x00,0xae,0x00,0x00,0x00,0x6c,0x00,0x00,0x00, +0x15,0x00,0x00,0x00,0x6f,0x00,0x00,0x00,0x8c,0x00,0x00,0x00, +0xac,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x62,0x00,0x00,0x00, +0xaf,0x00,0x00,0x00,0xae,0x00,0x00,0x00,0xc2,0x00,0x05,0x00, +0x62,0x00,0x00,0x00,0xb0,0x00,0x00,0x00,0xaf,0x00,0x00,0x00, +0x15,0x00,0x00,0x00,0x71,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0xb1,0x00,0x00,0x00,0xb0,0x00,0x00,0x00,0x7c,0x00,0x04,0x00, +0x14,0x00,0x00,0x00,0xb2,0x00,0x00,0x00,0xb1,0x00,0x00,0x00, +0xc7,0x00,0x05,0x00,0x14,0x00,0x00,0x00,0xb4,0x00,0x00,0x00, +0xb2,0x00,0x00,0x00,0xb3,0x00,0x00,0x00,0x6f,0x00,0x04,0x00, +0x44,0x00,0x00,0x00,0xb5,0x00,0x00,0x00,0xb4,0x00,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xbe,0x00,0x00,0x00, +0x93,0x00,0x00,0x00,0x48,0x00,0x00,0x00,0x41,0x00,0x06,0x00, +0x95,0x00,0x00,0x00,0xbf,0x00,0x00,0x00,0x8b,0x00,0x00,0x00, +0x15,0x00,0x00,0x00,0xbe,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0x44,0x00,0x00,0x00,0xc0,0x00,0x00,0x00,0xbf,0x00,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xc5,0x00,0x00,0x00, +0x30,0x00,0x00,0x00,0x79,0x00,0x00,0x00,0x41,0x00,0x08,0x00, +0x9d,0x00,0x00,0x00,0xc6,0x00,0x00,0x00,0x6c,0x00,0x00,0x00, +0x15,0x00,0x00,0x00,0x6f,0x00,0x00,0x00,0x15,0x00,0x00,0x00, +0xc5,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x62,0x00,0x00,0x00, +0xc7,0x00,0x00,0x00,0xc6,0x00,0x00,0x00,0x71,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0xc8,0x00,0x00,0x00,0xc7,0x00,0x00,0x00, +0x7c,0x00,0x04,0x00,0x14,0x00,0x00,0x00,0xc9,0x00,0x00,0x00, +0xc8,0x00,0x00,0x00,0xc7,0x00,0x05,0x00,0x14,0x00,0x00,0x00, +0xca,0x00,0x00,0x00,0xc9,0x00,0x00,0x00,0xa2,0x00,0x00,0x00, +0x6f,0x00,0x04,0x00,0x44,0x00,0x00,0x00,0xcb,0x00,0x00,0x00, +0xca,0x00,0x00,0x00,0x85,0x00,0x05,0x00,0x44,0x00,0x00,0x00, +0xcc,0x00,0x00,0x00,0xc0,0x00,0x00,0x00,0xcb,0x00,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xd4,0x00,0x00,0x00, +0xac,0x00,0x00,0x00,0x48,0x00,0x00,0x00,0x41,0x00,0x08,0x00, +0x9d,0x00,0x00,0x00,0xd5,0x00,0x00,0x00,0x6c,0x00,0x00,0x00, +0x15,0x00,0x00,0x00,0x6f,0x00,0x00,0x00,0x8c,0x00,0x00,0x00, +0xd4,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x62,0x00,0x00,0x00, +0xd6,0x00,0x00,0x00,0xd5,0x00,0x00,0x00,0xc2,0x00,0x05,0x00, +0x62,0x00,0x00,0x00,0xd7,0x00,0x00,0x00,0xd6,0x00,0x00,0x00, +0x15,0x00,0x00,0x00,0x71,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0xd8,0x00,0x00,0x00,0xd7,0x00,0x00,0x00,0x7c,0x00,0x04,0x00, +0x14,0x00,0x00,0x00,0xd9,0x00,0x00,0x00,0xd8,0x00,0x00,0x00, +0xc7,0x00,0x05,0x00,0x14,0x00,0x00,0x00,0xda,0x00,0x00,0x00, +0xd9,0x00,0x00,0x00,0xb3,0x00,0x00,0x00,0x6f,0x00,0x04,0x00, +0x44,0x00,0x00,0x00,0xdb,0x00,0x00,0x00,0xda,0x00,0x00,0x00, +0x85,0x00,0x05,0x00,0x44,0x00,0x00,0x00,0xdc,0x00,0x00,0x00, +0xcc,0x00,0x00,0x00,0xdb,0x00,0x00,0x00,0x0c,0x00,0x08,0x00, +0x44,0x00,0x00,0x00,0xdd,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0x32,0x00,0x00,0x00,0xa5,0x00,0x00,0x00,0xb5,0x00,0x00,0x00, +0xdc,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xe5,0x00,0x00,0x00,0x93,0x00,0x00,0x00,0x36,0x00,0x00,0x00, +0x41,0x00,0x06,0x00,0x95,0x00,0x00,0x00,0xe6,0x00,0x00,0x00, +0x8b,0x00,0x00,0x00,0x15,0x00,0x00,0x00,0xe5,0x00,0x00,0x00, +0x3d,0x00,0x04,0x00,0x44,0x00,0x00,0x00,0xe7,0x00,0x00,0x00, +0xe6,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xec,0x00,0x00,0x00,0x30,0x00,0x00,0x00,0x23,0x00,0x00,0x00, +0x41,0x00,0x08,0x00,0x9d,0x00,0x00,0x00,0xed,0x00,0x00,0x00, +0x6c,0x00,0x00,0x00,0x15,0x00,0x00,0x00,0x6f,0x00,0x00,0x00, +0x15,0x00,0x00,0x00,0xec,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0x62,0x00,0x00,0x00,0xee,0x00,0x00,0x00,0xed,0x00,0x00,0x00, +0x71,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0xef,0x00,0x00,0x00, +0xee,0x00,0x00,0x00,0x7c,0x00,0x04,0x00,0x14,0x00,0x00,0x00, +0xf0,0x00,0x00,0x00,0xef,0x00,0x00,0x00,0xc7,0x00,0x05,0x00, +0x14,0x00,0x00,0x00,0xf1,0x00,0x00,0x00,0xf0,0x00,0x00,0x00, +0xa2,0x00,0x00,0x00,0x6f,0x00,0x04,0x00,0x44,0x00,0x00,0x00, +0xf2,0x00,0x00,0x00,0xf1,0x00,0x00,0x00,0x85,0x00,0x05,0x00, +0x44,0x00,0x00,0x00,0xf3,0x00,0x00,0x00,0xe7,0x00,0x00,0x00, +0xf2,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x62,0x00,0x00,0x00, +0xfd,0x00,0x00,0x00,0xae,0x00,0x00,0x00,0xc2,0x00,0x05,0x00, +0x62,0x00,0x00,0x00,0xfe,0x00,0x00,0x00,0xfd,0x00,0x00,0x00, +0x70,0x00,0x00,0x00,0x71,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0xff,0x00,0x00,0x00,0xfe,0x00,0x00,0x00,0x7c,0x00,0x04,0x00, +0x14,0x00,0x00,0x00,0x00,0x01,0x00,0x00,0xff,0x00,0x00,0x00, +0xc7,0x00,0x05,0x00,0x14,0x00,0x00,0x00,0x01,0x01,0x00,0x00, +0x00,0x01,0x00,0x00,0xb3,0x00,0x00,0x00,0x6f,0x00,0x04,0x00, +0x44,0x00,0x00,0x00,0x02,0x01,0x00,0x00,0x01,0x01,0x00,0x00, +0x0c,0x00,0x08,0x00,0x44,0x00,0x00,0x00,0x04,0x01,0x00,0x00, +0x01,0x00,0x00,0x00,0x32,0x00,0x00,0x00,0xf3,0x00,0x00,0x00, +0x02,0x01,0x00,0x00,0xdd,0x00,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x0d,0x01,0x00,0x00,0x93,0x00,0x00,0x00, +0x0c,0x01,0x00,0x00,0x41,0x00,0x06,0x00,0x95,0x00,0x00,0x00, +0x0e,0x01,0x00,0x00,0x8b,0x00,0x00,0x00,0x15,0x00,0x00,0x00, +0x0d,0x01,0x00,0x00,0x3d,0x00,0x04,0x00,0x44,0x00,0x00,0x00, +0x0f,0x01,0x00,0x00,0x0e,0x01,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x15,0x01,0x00,0x00,0x30,0x00,0x00,0x00, +0x14,0x01,0x00,0x00,0x41,0x00,0x08,0x00,0x9d,0x00,0x00,0x00, +0x16,0x01,0x00,0x00,0x6c,0x00,0x00,0x00,0x15,0x00,0x00,0x00, +0x6f,0x00,0x00,0x00,0x15,0x00,0x00,0x00,0x15,0x01,0x00,0x00, +0x3d,0x00,0x04,0x00,0x62,0x00,0x00,0x00,0x17,0x01,0x00,0x00, +0x16,0x01,0x00,0x00,0x71,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x18,0x01,0x00,0x00,0x17,0x01,0x00,0x00,0x7c,0x00,0x04,0x00, +0x14,0x00,0x00,0x00,0x19,0x01,0x00,0x00,0x18,0x01,0x00,0x00, +0xc7,0x00,0x05,0x00,0x14,0x00,0x00,0x00,0x1a,0x01,0x00,0x00, +0x19,0x01,0x00,0x00,0xa2,0x00,0x00,0x00,0x6f,0x00,0x04,0x00, +0x44,0x00,0x00,0x00,0x1b,0x01,0x00,0x00,0x1a,0x01,0x00,0x00, +0x85,0x00,0x05,0x00,0x44,0x00,0x00,0x00,0x1c,0x01,0x00,0x00, +0x0f,0x01,0x00,0x00,0x1b,0x01,0x00,0x00,0x3d,0x00,0x04,0x00, +0x62,0x00,0x00,0x00,0x26,0x01,0x00,0x00,0xd5,0x00,0x00,0x00, +0xc2,0x00,0x05,0x00,0x62,0x00,0x00,0x00,0x27,0x01,0x00,0x00, +0x26,0x01,0x00,0x00,0x70,0x00,0x00,0x00,0x71,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x28,0x01,0x00,0x00,0x27,0x01,0x00,0x00, +0x7c,0x00,0x04,0x00,0x14,0x00,0x00,0x00,0x29,0x01,0x00,0x00, +0x28,0x01,0x00,0x00,0xc7,0x00,0x05,0x00,0x14,0x00,0x00,0x00, +0x2a,0x01,0x00,0x00,0x29,0x01,0x00,0x00,0xb3,0x00,0x00,0x00, +0x6f,0x00,0x04,0x00,0x44,0x00,0x00,0x00,0x2b,0x01,0x00,0x00, +0x2a,0x01,0x00,0x00,0x0c,0x00,0x08,0x00,0x44,0x00,0x00,0x00, +0x2d,0x01,0x00,0x00,0x01,0x00,0x00,0x00,0x32,0x00,0x00,0x00, +0x1c,0x01,0x00,0x00,0x2b,0x01,0x00,0x00,0x04,0x01,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x35,0x01,0x00,0x00, +0x93,0x00,0x00,0x00,0x64,0x00,0x00,0x00,0x41,0x00,0x06,0x00, +0x95,0x00,0x00,0x00,0x36,0x01,0x00,0x00,0x8b,0x00,0x00,0x00, +0x15,0x00,0x00,0x00,0x35,0x01,0x00,0x00,0x3d,0x00,0x04,0x00, +0x44,0x00,0x00,0x00,0x37,0x01,0x00,0x00,0x36,0x01,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x3d,0x01,0x00,0x00, +0x30,0x00,0x00,0x00,0x3c,0x01,0x00,0x00,0x41,0x00,0x08,0x00, +0x9d,0x00,0x00,0x00,0x3e,0x01,0x00,0x00,0x6c,0x00,0x00,0x00, +0x15,0x00,0x00,0x00,0x6f,0x00,0x00,0x00,0x15,0x00,0x00,0x00, +0x3d,0x01,0x00,0x00,0x3d,0x00,0x04,0x00,0x62,0x00,0x00,0x00, +0x3f,0x01,0x00,0x00,0x3e,0x01,0x00,0x00,0x71,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x40,0x01,0x00,0x00,0x3f,0x01,0x00,0x00, +0x7c,0x00,0x04,0x00,0x14,0x00,0x00,0x00,0x41,0x01,0x00,0x00, +0x40,0x01,0x00,0x00,0xc7,0x00,0x05,0x00,0x14,0x00,0x00,0x00, +0x42,0x01,0x00,0x00,0x41,0x01,0x00,0x00,0xa2,0x00,0x00,0x00, +0x6f,0x00,0x04,0x00,0x44,0x00,0x00,0x00,0x43,0x01,0x00,0x00, +0x42,0x01,0x00,0x00,0x85,0x00,0x05,0x00,0x44,0x00,0x00,0x00, +0x44,0x01,0x00,0x00,0x37,0x01,0x00,0x00,0x43,0x01,0x00,0x00, +0x3d,0x00,0x04,0x00,0x62,0x00,0x00,0x00,0x4e,0x01,0x00,0x00, +0xae,0x00,0x00,0x00,0xc2,0x00,0x05,0x00,0x62,0x00,0x00,0x00, +0x50,0x01,0x00,0x00,0x4e,0x01,0x00,0x00,0x4f,0x01,0x00,0x00, +0x71,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x51,0x01,0x00,0x00, +0x50,0x01,0x00,0x00,0x7c,0x00,0x04,0x00,0x14,0x00,0x00,0x00, +0x52,0x01,0x00,0x00,0x51,0x01,0x00,0x00,0xc7,0x00,0x05,0x00, +0x14,0x00,0x00,0x00,0x53,0x01,0x00,0x00,0x52,0x01,0x00,0x00, +0xb3,0x00,0x00,0x00,0x6f,0x00,0x04,0x00,0x44,0x00,0x00,0x00, +0x54,0x01,0x00,0x00,0x53,0x01,0x00,0x00,0x0c,0x00,0x08,0x00, +0x44,0x00,0x00,0x00,0x56,0x01,0x00,0x00,0x01,0x00,0x00,0x00, +0x32,0x00,0x00,0x00,0x44,0x01,0x00,0x00,0x54,0x01,0x00,0x00, +0x2d,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x5f,0x01,0x00,0x00,0x93,0x00,0x00,0x00,0x5e,0x01,0x00,0x00, +0x41,0x00,0x06,0x00,0x95,0x00,0x00,0x00,0x60,0x01,0x00,0x00, +0x8b,0x00,0x00,0x00,0x15,0x00,0x00,0x00,0x5f,0x01,0x00,0x00, +0x3d,0x00,0x04,0x00,0x44,0x00,0x00,0x00,0x61,0x01,0x00,0x00, +0x60,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x67,0x01,0x00,0x00,0x30,0x00,0x00,0x00,0x66,0x01,0x00,0x00, +0x41,0x00,0x08,0x00,0x9d,0x00,0x00,0x00,0x68,0x01,0x00,0x00, +0x6c,0x00,0x00,0x00,0x15,0x00,0x00,0x00,0x6f,0x00,0x00,0x00, +0x15,0x00,0x00,0x00,0x67,0x01,0x00,0x00,0x3d,0x00,0x04,0x00, +0x62,0x00,0x00,0x00,0x69,0x01,0x00,0x00,0x68,0x01,0x00,0x00, +0x71,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x6a,0x01,0x00,0x00, +0x69,0x01,0x00,0x00,0x7c,0x00,0x04,0x00,0x14,0x00,0x00,0x00, +0x6b,0x01,0x00,0x00,0x6a,0x01,0x00,0x00,0xc7,0x00,0x05,0x00, +0x14,0x00,0x00,0x00,0x6c,0x01,0x00,0x00,0x6b,0x01,0x00,0x00, +0xa2,0x00,0x00,0x00,0x6f,0x00,0x04,0x00,0x44,0x00,0x00,0x00, +0x6d,0x01,0x00,0x00,0x6c,0x01,0x00,0x00,0x85,0x00,0x05,0x00, +0x44,0x00,0x00,0x00,0x6e,0x01,0x00,0x00,0x61,0x01,0x00,0x00, +0x6d,0x01,0x00,0x00,0x3d,0x00,0x04,0x00,0x62,0x00,0x00,0x00, +0x78,0x01,0x00,0x00,0xd5,0x00,0x00,0x00,0xc2,0x00,0x05,0x00, +0x62,0x00,0x00,0x00,0x79,0x01,0x00,0x00,0x78,0x01,0x00,0x00, +0x4f,0x01,0x00,0x00,0x71,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x7a,0x01,0x00,0x00,0x79,0x01,0x00,0x00,0x7c,0x00,0x04,0x00, +0x14,0x00,0x00,0x00,0x7b,0x01,0x00,0x00,0x7a,0x01,0x00,0x00, +0xc7,0x00,0x05,0x00,0x14,0x00,0x00,0x00,0x7c,0x01,0x00,0x00, +0x7b,0x01,0x00,0x00,0xb3,0x00,0x00,0x00,0x6f,0x00,0x04,0x00, +0x44,0x00,0x00,0x00,0x7d,0x01,0x00,0x00,0x7c,0x01,0x00,0x00, +0x0c,0x00,0x08,0x00,0x44,0x00,0x00,0x00,0x7f,0x01,0x00,0x00, +0x01,0x00,0x00,0x00,0x32,0x00,0x00,0x00,0x6e,0x01,0x00,0x00, +0x7d,0x01,0x00,0x00,0x56,0x01,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x88,0x01,0x00,0x00,0x93,0x00,0x00,0x00, +0x87,0x01,0x00,0x00,0x41,0x00,0x06,0x00,0x95,0x00,0x00,0x00, +0x89,0x01,0x00,0x00,0x8b,0x00,0x00,0x00,0x15,0x00,0x00,0x00, +0x88,0x01,0x00,0x00,0x3d,0x00,0x04,0x00,0x44,0x00,0x00,0x00, +0x8a,0x01,0x00,0x00,0x89,0x01,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x90,0x01,0x00,0x00,0x30,0x00,0x00,0x00, +0x8f,0x01,0x00,0x00,0x41,0x00,0x08,0x00,0x9d,0x00,0x00,0x00, +0x91,0x01,0x00,0x00,0x6c,0x00,0x00,0x00,0x15,0x00,0x00,0x00, +0x6f,0x00,0x00,0x00,0x15,0x00,0x00,0x00,0x90,0x01,0x00,0x00, +0x3d,0x00,0x04,0x00,0x62,0x00,0x00,0x00,0x92,0x01,0x00,0x00, +0x91,0x01,0x00,0x00,0x71,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x93,0x01,0x00,0x00,0x92,0x01,0x00,0x00,0x7c,0x00,0x04,0x00, +0x14,0x00,0x00,0x00,0x94,0x01,0x00,0x00,0x93,0x01,0x00,0x00, +0xc7,0x00,0x05,0x00,0x14,0x00,0x00,0x00,0x95,0x01,0x00,0x00, +0x94,0x01,0x00,0x00,0xa2,0x00,0x00,0x00,0x6f,0x00,0x04,0x00, +0x44,0x00,0x00,0x00,0x96,0x01,0x00,0x00,0x95,0x01,0x00,0x00, +0x85,0x00,0x05,0x00,0x44,0x00,0x00,0x00,0x97,0x01,0x00,0x00, +0x8a,0x01,0x00,0x00,0x96,0x01,0x00,0x00,0x3d,0x00,0x04,0x00, +0x62,0x00,0x00,0x00,0xa1,0x01,0x00,0x00,0xae,0x00,0x00,0x00, +0xc2,0x00,0x05,0x00,0x62,0x00,0x00,0x00,0xa3,0x01,0x00,0x00, +0xa1,0x01,0x00,0x00,0xa2,0x01,0x00,0x00,0x71,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0xa4,0x01,0x00,0x00,0xa3,0x01,0x00,0x00, +0x7c,0x00,0x04,0x00,0x14,0x00,0x00,0x00,0xa5,0x01,0x00,0x00, +0xa4,0x01,0x00,0x00,0xc7,0x00,0x05,0x00,0x14,0x00,0x00,0x00, +0xa6,0x01,0x00,0x00,0xa5,0x01,0x00,0x00,0xb3,0x00,0x00,0x00, +0x6f,0x00,0x04,0x00,0x44,0x00,0x00,0x00,0xa7,0x01,0x00,0x00, +0xa6,0x01,0x00,0x00,0x0c,0x00,0x08,0x00,0x44,0x00,0x00,0x00, +0xa9,0x01,0x00,0x00,0x01,0x00,0x00,0x00,0x32,0x00,0x00,0x00, +0x97,0x01,0x00,0x00,0xa7,0x01,0x00,0x00,0x7f,0x01,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xb2,0x01,0x00,0x00, +0x93,0x00,0x00,0x00,0xb1,0x01,0x00,0x00,0x41,0x00,0x06,0x00, +0x95,0x00,0x00,0x00,0xb3,0x01,0x00,0x00,0x8b,0x00,0x00,0x00, +0x15,0x00,0x00,0x00,0xb2,0x01,0x00,0x00,0x3d,0x00,0x04,0x00, +0x44,0x00,0x00,0x00,0xb4,0x01,0x00,0x00,0xb3,0x01,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xba,0x01,0x00,0x00, +0x30,0x00,0x00,0x00,0xb9,0x01,0x00,0x00,0x41,0x00,0x08,0x00, +0x9d,0x00,0x00,0x00,0xbb,0x01,0x00,0x00,0x6c,0x00,0x00,0x00, +0x15,0x00,0x00,0x00,0x6f,0x00,0x00,0x00,0x15,0x00,0x00,0x00, +0xba,0x01,0x00,0x00,0x3d,0x00,0x04,0x00,0x62,0x00,0x00,0x00, +0xbc,0x01,0x00,0x00,0xbb,0x01,0x00,0x00,0x71,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0xbd,0x01,0x00,0x00,0xbc,0x01,0x00,0x00, +0x7c,0x00,0x04,0x00,0x14,0x00,0x00,0x00,0xbe,0x01,0x00,0x00, +0xbd,0x01,0x00,0x00,0xc7,0x00,0x05,0x00,0x14,0x00,0x00,0x00, +0xbf,0x01,0x00,0x00,0xbe,0x01,0x00,0x00,0xa2,0x00,0x00,0x00, +0x6f,0x00,0x04,0x00,0x44,0x00,0x00,0x00,0xc0,0x01,0x00,0x00, +0xbf,0x01,0x00,0x00,0x85,0x00,0x05,0x00,0x44,0x00,0x00,0x00, +0xc1,0x01,0x00,0x00,0xb4,0x01,0x00,0x00,0xc0,0x01,0x00,0x00, +0x3d,0x00,0x04,0x00,0x62,0x00,0x00,0x00,0xcb,0x01,0x00,0x00, +0xd5,0x00,0x00,0x00,0xc2,0x00,0x05,0x00,0x62,0x00,0x00,0x00, +0xcc,0x01,0x00,0x00,0xcb,0x01,0x00,0x00,0xa2,0x01,0x00,0x00, +0x71,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0xcd,0x01,0x00,0x00, +0xcc,0x01,0x00,0x00,0x7c,0x00,0x04,0x00,0x14,0x00,0x00,0x00, +0xce,0x01,0x00,0x00,0xcd,0x01,0x00,0x00,0xc7,0x00,0x05,0x00, +0x14,0x00,0x00,0x00,0xcf,0x01,0x00,0x00,0xce,0x01,0x00,0x00, +0xb3,0x00,0x00,0x00,0x6f,0x00,0x04,0x00,0x44,0x00,0x00,0x00, +0xd0,0x01,0x00,0x00,0xcf,0x01,0x00,0x00,0x0c,0x00,0x08,0x00, +0x44,0x00,0x00,0x00,0xd2,0x01,0x00,0x00,0x01,0x00,0x00,0x00, +0x32,0x00,0x00,0x00,0xc1,0x01,0x00,0x00,0xd0,0x01,0x00,0x00, +0xa9,0x01,0x00,0x00,0x81,0x00,0x05,0x00,0x44,0x00,0x00,0x00, +0xd4,0x01,0x00,0x00,0xd7,0x02,0x00,0x00,0xd2,0x01,0x00,0x00, +0x3d,0x00,0x04,0x00,0x44,0x00,0x00,0x00,0xde,0x01,0x00,0x00, +0x96,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x62,0x00,0x00,0x00, +0xe5,0x01,0x00,0x00,0x9e,0x00,0x00,0x00,0xc2,0x00,0x05,0x00, +0x62,0x00,0x00,0x00,0xe6,0x01,0x00,0x00,0xe5,0x01,0x00,0x00, +0x4f,0x01,0x00,0x00,0x71,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0xe7,0x01,0x00,0x00,0xe6,0x01,0x00,0x00,0x7c,0x00,0x04,0x00, +0x14,0x00,0x00,0x00,0xe8,0x01,0x00,0x00,0xe7,0x01,0x00,0x00, +0xc7,0x00,0x05,0x00,0x14,0x00,0x00,0x00,0xe9,0x01,0x00,0x00, +0xe8,0x01,0x00,0x00,0xa2,0x00,0x00,0x00,0x6f,0x00,0x04,0x00, +0x44,0x00,0x00,0x00,0xea,0x01,0x00,0x00,0xe9,0x01,0x00,0x00, +0x3d,0x00,0x04,0x00,0x44,0x00,0x00,0x00,0xf5,0x01,0x00,0x00, +0xbf,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x62,0x00,0x00,0x00, +0xfc,0x01,0x00,0x00,0xc6,0x00,0x00,0x00,0xc2,0x00,0x05,0x00, +0x62,0x00,0x00,0x00,0xfd,0x01,0x00,0x00,0xfc,0x01,0x00,0x00, +0x4f,0x01,0x00,0x00,0x71,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0xfe,0x01,0x00,0x00,0xfd,0x01,0x00,0x00,0x7c,0x00,0x04,0x00, +0x14,0x00,0x00,0x00,0xff,0x01,0x00,0x00,0xfe,0x01,0x00,0x00, +0xc7,0x00,0x05,0x00,0x14,0x00,0x00,0x00,0x00,0x02,0x00,0x00, +0xff,0x01,0x00,0x00,0xa2,0x00,0x00,0x00,0x6f,0x00,0x04,0x00, +0x44,0x00,0x00,0x00,0x01,0x02,0x00,0x00,0x00,0x02,0x00,0x00, +0x85,0x00,0x05,0x00,0x44,0x00,0x00,0x00,0x02,0x02,0x00,0x00, +0xf5,0x01,0x00,0x00,0x01,0x02,0x00,0x00,0x0c,0x00,0x08,0x00, +0x44,0x00,0x00,0x00,0x03,0x02,0x00,0x00,0x01,0x00,0x00,0x00, +0x32,0x00,0x00,0x00,0xde,0x01,0x00,0x00,0xea,0x01,0x00,0x00, +0x02,0x02,0x00,0x00,0x3d,0x00,0x04,0x00,0x44,0x00,0x00,0x00, +0x0d,0x02,0x00,0x00,0xe6,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0x62,0x00,0x00,0x00,0x14,0x02,0x00,0x00,0xed,0x00,0x00,0x00, +0xc2,0x00,0x05,0x00,0x62,0x00,0x00,0x00,0x15,0x02,0x00,0x00, +0x14,0x02,0x00,0x00,0x4f,0x01,0x00,0x00,0x71,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x16,0x02,0x00,0x00,0x15,0x02,0x00,0x00, +0x7c,0x00,0x04,0x00,0x14,0x00,0x00,0x00,0x17,0x02,0x00,0x00, +0x16,0x02,0x00,0x00,0xc7,0x00,0x05,0x00,0x14,0x00,0x00,0x00, +0x18,0x02,0x00,0x00,0x17,0x02,0x00,0x00,0xa2,0x00,0x00,0x00, +0x6f,0x00,0x04,0x00,0x44,0x00,0x00,0x00,0x19,0x02,0x00,0x00, +0x18,0x02,0x00,0x00,0x0c,0x00,0x08,0x00,0x44,0x00,0x00,0x00, +0x1b,0x02,0x00,0x00,0x01,0x00,0x00,0x00,0x32,0x00,0x00,0x00, +0x0d,0x02,0x00,0x00,0x19,0x02,0x00,0x00,0x03,0x02,0x00,0x00, +0x3d,0x00,0x04,0x00,0x44,0x00,0x00,0x00,0x25,0x02,0x00,0x00, +0x0e,0x01,0x00,0x00,0x3d,0x00,0x04,0x00,0x62,0x00,0x00,0x00, +0x2c,0x02,0x00,0x00,0x16,0x01,0x00,0x00,0xc2,0x00,0x05,0x00, +0x62,0x00,0x00,0x00,0x2d,0x02,0x00,0x00,0x2c,0x02,0x00,0x00, +0x4f,0x01,0x00,0x00,0x71,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x2e,0x02,0x00,0x00,0x2d,0x02,0x00,0x00,0x7c,0x00,0x04,0x00, +0x14,0x00,0x00,0x00,0x2f,0x02,0x00,0x00,0x2e,0x02,0x00,0x00, +0xc7,0x00,0x05,0x00,0x14,0x00,0x00,0x00,0x30,0x02,0x00,0x00, +0x2f,0x02,0x00,0x00,0xa2,0x00,0x00,0x00,0x6f,0x00,0x04,0x00, +0x44,0x00,0x00,0x00,0x31,0x02,0x00,0x00,0x30,0x02,0x00,0x00, +0x0c,0x00,0x08,0x00,0x44,0x00,0x00,0x00,0x33,0x02,0x00,0x00, +0x01,0x00,0x00,0x00,0x32,0x00,0x00,0x00,0x25,0x02,0x00,0x00, +0x31,0x02,0x00,0x00,0x1b,0x02,0x00,0x00,0x3d,0x00,0x04,0x00, +0x44,0x00,0x00,0x00,0x3d,0x02,0x00,0x00,0x36,0x01,0x00,0x00, +0x3d,0x00,0x04,0x00,0x62,0x00,0x00,0x00,0x44,0x02,0x00,0x00, +0x3e,0x01,0x00,0x00,0xc2,0x00,0x05,0x00,0x62,0x00,0x00,0x00, +0x45,0x02,0x00,0x00,0x44,0x02,0x00,0x00,0x4f,0x01,0x00,0x00, +0x71,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x46,0x02,0x00,0x00, +0x45,0x02,0x00,0x00,0x7c,0x00,0x04,0x00,0x14,0x00,0x00,0x00, +0x47,0x02,0x00,0x00,0x46,0x02,0x00,0x00,0xc7,0x00,0x05,0x00, +0x14,0x00,0x00,0x00,0x48,0x02,0x00,0x00,0x47,0x02,0x00,0x00, +0xa2,0x00,0x00,0x00,0x6f,0x00,0x04,0x00,0x44,0x00,0x00,0x00, +0x49,0x02,0x00,0x00,0x48,0x02,0x00,0x00,0x0c,0x00,0x08,0x00, +0x44,0x00,0x00,0x00,0x4b,0x02,0x00,0x00,0x01,0x00,0x00,0x00, +0x32,0x00,0x00,0x00,0x3d,0x02,0x00,0x00,0x49,0x02,0x00,0x00, +0x33,0x02,0x00,0x00,0x3d,0x00,0x04,0x00,0x44,0x00,0x00,0x00, +0x55,0x02,0x00,0x00,0x60,0x01,0x00,0x00,0x3d,0x00,0x04,0x00, +0x62,0x00,0x00,0x00,0x5c,0x02,0x00,0x00,0x68,0x01,0x00,0x00, +0xc2,0x00,0x05,0x00,0x62,0x00,0x00,0x00,0x5d,0x02,0x00,0x00, +0x5c,0x02,0x00,0x00,0x4f,0x01,0x00,0x00,0x71,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x5e,0x02,0x00,0x00,0x5d,0x02,0x00,0x00, +0x7c,0x00,0x04,0x00,0x14,0x00,0x00,0x00,0x5f,0x02,0x00,0x00, +0x5e,0x02,0x00,0x00,0xc7,0x00,0x05,0x00,0x14,0x00,0x00,0x00, +0x60,0x02,0x00,0x00,0x5f,0x02,0x00,0x00,0xa2,0x00,0x00,0x00, +0x6f,0x00,0x04,0x00,0x44,0x00,0x00,0x00,0x61,0x02,0x00,0x00, +0x60,0x02,0x00,0x00,0x0c,0x00,0x08,0x00,0x44,0x00,0x00,0x00, +0x63,0x02,0x00,0x00,0x01,0x00,0x00,0x00,0x32,0x00,0x00,0x00, +0x55,0x02,0x00,0x00,0x61,0x02,0x00,0x00,0x4b,0x02,0x00,0x00, +0x3d,0x00,0x04,0x00,0x44,0x00,0x00,0x00,0x6d,0x02,0x00,0x00, +0x89,0x01,0x00,0x00,0x3d,0x00,0x04,0x00,0x62,0x00,0x00,0x00, +0x74,0x02,0x00,0x00,0x91,0x01,0x00,0x00,0xc2,0x00,0x05,0x00, +0x62,0x00,0x00,0x00,0x75,0x02,0x00,0x00,0x74,0x02,0x00,0x00, +0x4f,0x01,0x00,0x00,0x71,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x76,0x02,0x00,0x00,0x75,0x02,0x00,0x00,0x7c,0x00,0x04,0x00, +0x14,0x00,0x00,0x00,0x77,0x02,0x00,0x00,0x76,0x02,0x00,0x00, +0xc7,0x00,0x05,0x00,0x14,0x00,0x00,0x00,0x78,0x02,0x00,0x00, +0x77,0x02,0x00,0x00,0xa2,0x00,0x00,0x00,0x6f,0x00,0x04,0x00, +0x44,0x00,0x00,0x00,0x79,0x02,0x00,0x00,0x78,0x02,0x00,0x00, +0x0c,0x00,0x08,0x00,0x44,0x00,0x00,0x00,0x7b,0x02,0x00,0x00, +0x01,0x00,0x00,0x00,0x32,0x00,0x00,0x00,0x6d,0x02,0x00,0x00, +0x79,0x02,0x00,0x00,0x63,0x02,0x00,0x00,0x3d,0x00,0x04,0x00, +0x44,0x00,0x00,0x00,0x85,0x02,0x00,0x00,0xb3,0x01,0x00,0x00, +0x3d,0x00,0x04,0x00,0x62,0x00,0x00,0x00,0x8c,0x02,0x00,0x00, +0xbb,0x01,0x00,0x00,0xc2,0x00,0x05,0x00,0x62,0x00,0x00,0x00, +0x8d,0x02,0x00,0x00,0x8c,0x02,0x00,0x00,0x4f,0x01,0x00,0x00, +0x71,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x8e,0x02,0x00,0x00, +0x8d,0x02,0x00,0x00,0x7c,0x00,0x04,0x00,0x14,0x00,0x00,0x00, +0x8f,0x02,0x00,0x00,0x8e,0x02,0x00,0x00,0xc7,0x00,0x05,0x00, +0x14,0x00,0x00,0x00,0x90,0x02,0x00,0x00,0x8f,0x02,0x00,0x00, +0xa2,0x00,0x00,0x00,0x6f,0x00,0x04,0x00,0x44,0x00,0x00,0x00, +0x91,0x02,0x00,0x00,0x90,0x02,0x00,0x00,0x0c,0x00,0x08,0x00, +0x44,0x00,0x00,0x00,0x93,0x02,0x00,0x00,0x01,0x00,0x00,0x00, +0x32,0x00,0x00,0x00,0x85,0x02,0x00,0x00,0x91,0x02,0x00,0x00, +0x7b,0x02,0x00,0x00,0x81,0x00,0x05,0x00,0x44,0x00,0x00,0x00, +0x95,0x02,0x00,0x00,0xd8,0x02,0x00,0x00,0x93,0x02,0x00,0x00, +0x80,0x00,0x05,0x00,0x14,0x00,0x00,0x00,0x97,0x02,0x00,0x00, +0xd6,0x02,0x00,0x00,0x8c,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, +0x81,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0x83,0x00,0x00,0x00, +0x85,0x00,0x05,0x00,0x44,0x00,0x00,0x00,0xa1,0x02,0x00,0x00, +0x7c,0x00,0x00,0x00,0xd8,0x02,0x00,0x00,0x7f,0x00,0x04,0x00, +0x44,0x00,0x00,0x00,0xdb,0x02,0x00,0x00,0xa1,0x02,0x00,0x00, +0x0c,0x00,0x08,0x00,0x44,0x00,0x00,0x00,0xa2,0x02,0x00,0x00, +0x01,0x00,0x00,0x00,0x32,0x00,0x00,0x00,0x74,0x00,0x00,0x00, +0xd7,0x02,0x00,0x00,0xdb,0x02,0x00,0x00,0x3d,0x00,0x04,0x00, +0x44,0x00,0x00,0x00,0xa4,0x02,0x00,0x00,0x4f,0x00,0x00,0x00, +0x81,0x00,0x05,0x00,0x44,0x00,0x00,0x00,0xa5,0x02,0x00,0x00, +0xa4,0x02,0x00,0x00,0xa2,0x02,0x00,0x00,0x3e,0x00,0x03,0x00, +0x4f,0x00,0x00,0x00,0xa5,0x02,0x00,0x00,0xf9,0x00,0x02,0x00, +0x55,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0x55,0x00,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xa8,0x02,0x00,0x00, +0xd3,0x02,0x00,0x00,0x23,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, +0x52,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0x54,0x00,0x00,0x00, +0xe0,0x00,0x04,0x00,0x23,0x00,0x00,0x00,0x23,0x00,0x00,0x00, +0xa9,0x02,0x00,0x00,0xf9,0x00,0x02,0x00,0xab,0x02,0x00,0x00, +0xf8,0x00,0x02,0x00,0xab,0x02,0x00,0x00,0xf5,0x00,0x07,0x00, +0x06,0x00,0x00,0x00,0xd4,0x02,0x00,0x00,0x48,0x00,0x00,0x00, +0x54,0x00,0x00,0x00,0xc2,0x02,0x00,0x00,0xae,0x02,0x00,0x00, +0xac,0x00,0x05,0x00,0x59,0x00,0x00,0x00,0xb1,0x02,0x00,0x00, +0xd4,0x02,0x00,0x00,0x0c,0x00,0x00,0x00,0xf6,0x00,0x04,0x00, +0xad,0x02,0x00,0x00,0xae,0x02,0x00,0x00,0x01,0x00,0x00,0x00, +0xfa,0x00,0x04,0x00,0xb1,0x02,0x00,0x00,0xac,0x02,0x00,0x00, +0xad,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0xac,0x02,0x00,0x00, +0xb0,0x00,0x05,0x00,0x59,0x00,0x00,0x00,0xb4,0x02,0x00,0x00, +0x24,0x00,0x00,0x00,0xd4,0x02,0x00,0x00,0xf7,0x00,0x03,0x00, +0xb6,0x02,0x00,0x00,0x00,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, +0xb4,0x02,0x00,0x00,0xb5,0x02,0x00,0x00,0xb6,0x02,0x00,0x00, +0xf8,0x00,0x02,0x00,0xb5,0x02,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xba,0x02,0x00,0x00,0x24,0x00,0x00,0x00, +0xd4,0x02,0x00,0x00,0x41,0x00,0x05,0x00,0x4e,0x00,0x00,0x00, +0xbb,0x02,0x00,0x00,0x47,0x00,0x00,0x00,0xba,0x02,0x00,0x00, +0x3d,0x00,0x04,0x00,0x44,0x00,0x00,0x00,0xbc,0x02,0x00,0x00, +0xbb,0x02,0x00,0x00,0x41,0x00,0x05,0x00,0x4e,0x00,0x00,0x00, +0xbd,0x02,0x00,0x00,0x47,0x00,0x00,0x00,0x24,0x00,0x00,0x00, +0x3d,0x00,0x04,0x00,0x44,0x00,0x00,0x00,0xbe,0x02,0x00,0x00, +0xbd,0x02,0x00,0x00,0x81,0x00,0x05,0x00,0x44,0x00,0x00,0x00, +0xbf,0x02,0x00,0x00,0xbe,0x02,0x00,0x00,0xbc,0x02,0x00,0x00, +0x3e,0x00,0x03,0x00,0xbd,0x02,0x00,0x00,0xbf,0x02,0x00,0x00, +0xf9,0x00,0x02,0x00,0xb6,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, +0xb6,0x02,0x00,0x00,0xe0,0x00,0x04,0x00,0x23,0x00,0x00,0x00, +0x23,0x00,0x00,0x00,0xa9,0x02,0x00,0x00,0xf9,0x00,0x02,0x00, +0xae,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0xae,0x02,0x00,0x00, +0xc2,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xc2,0x02,0x00,0x00, +0xd4,0x02,0x00,0x00,0x8c,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, +0xab,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0xad,0x02,0x00,0x00, +0xaa,0x00,0x05,0x00,0x59,0x00,0x00,0x00,0xc4,0x02,0x00,0x00, +0x24,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0xf7,0x00,0x03,0x00, +0xc6,0x02,0x00,0x00,0x00,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, +0xc4,0x02,0x00,0x00,0xc5,0x02,0x00,0x00,0xc6,0x02,0x00,0x00, +0xf8,0x00,0x02,0x00,0xc5,0x02,0x00,0x00,0x41,0x00,0x05,0x00, +0x16,0x00,0x00,0x00,0xcb,0x02,0x00,0x00,0x13,0x00,0x00,0x00, +0x70,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0xcc,0x02,0x00,0x00,0xcb,0x02,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xce,0x02,0x00,0x00,0xcc,0x02,0x00,0x00, +0x0f,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x4e,0x00,0x00,0x00, +0xcf,0x02,0x00,0x00,0x47,0x00,0x00,0x00,0x15,0x00,0x00,0x00, +0x3d,0x00,0x04,0x00,0x44,0x00,0x00,0x00,0xd0,0x02,0x00,0x00, +0xcf,0x02,0x00,0x00,0x41,0x00,0x06,0x00,0x95,0x00,0x00,0x00, +0xd1,0x02,0x00,0x00,0xca,0x02,0x00,0x00,0x15,0x00,0x00,0x00, +0xce,0x02,0x00,0x00,0x3e,0x00,0x03,0x00,0xd1,0x02,0x00,0x00, +0xd0,0x02,0x00,0x00,0xf9,0x00,0x02,0x00,0xc6,0x02,0x00,0x00, +0xf8,0x00,0x02,0x00,0xc6,0x02,0x00,0x00,0xfd,0x00,0x01,0x00, +0x38,0x00,0x01,0x00, +}; +const uint64_t mul_mat_vec_q2_K_f32_len = 7612; + +unsigned char mul_mat_vec_q3_K_f32_data[] = { +0x03,0x02,0x23,0x07,0x00,0x05,0x01,0x00,0x0b,0x00,0x0d,0x00, +0x1d,0x03,0x00,0x00,0x00,0x00,0x00,0x00,0x11,0x00,0x02,0x00, +0x01,0x00,0x00,0x00,0x11,0x00,0x02,0x00,0x27,0x00,0x00,0x00, +0x11,0x00,0x02,0x00,0x51,0x11,0x00,0x00,0x11,0x00,0x02,0x00, +0x60,0x11,0x00,0x00,0x0b,0x00,0x06,0x00,0x01,0x00,0x00,0x00, +0x47,0x4c,0x53,0x4c,0x2e,0x73,0x74,0x64,0x2e,0x34,0x35,0x30, +0x00,0x00,0x00,0x00,0x0e,0x00,0x03,0x00,0x00,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0x0f,0x00,0x0c,0x00,0x05,0x00,0x00,0x00, +0x04,0x00,0x00,0x00,0x6d,0x61,0x69,0x6e,0x00,0x00,0x00,0x00, +0x0b,0x00,0x00,0x00,0x13,0x00,0x00,0x00,0x20,0x00,0x00,0x00, +0x4f,0x00,0x00,0x00,0x77,0x00,0x00,0x00,0x8e,0x00,0x00,0x00, +0x0e,0x03,0x00,0x00,0x10,0x00,0x06,0x00,0x04,0x00,0x00,0x00, +0x11,0x00,0x00,0x00,0x20,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x0b,0x00,0x00,0x00, +0x0b,0x00,0x00,0x00,0x1a,0x00,0x00,0x00,0x48,0x00,0x05,0x00, +0x11,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00, +0x00,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x11,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x04,0x00,0x00,0x00, +0x48,0x00,0x05,0x00,0x11,0x00,0x00,0x00,0x02,0x00,0x00,0x00, +0x23,0x00,0x00,0x00,0x08,0x00,0x00,0x00,0x47,0x00,0x03,0x00, +0x11,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00, +0x20,0x00,0x00,0x00,0x0b,0x00,0x00,0x00,0x1b,0x00,0x00,0x00, +0x47,0x00,0x04,0x00,0x6d,0x00,0x00,0x00,0x06,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x6f,0x00,0x00,0x00, +0x06,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x47,0x00,0x04,0x00, +0x71,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0x48,0x00,0x05,0x00,0x73,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x48,0x00,0x05,0x00, +0x73,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x23,0x00,0x00,0x00, +0x20,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x73,0x00,0x00,0x00, +0x02,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x60,0x00,0x00,0x00, +0x48,0x00,0x05,0x00,0x73,0x00,0x00,0x00,0x03,0x00,0x00,0x00, +0x23,0x00,0x00,0x00,0x6c,0x00,0x00,0x00,0x47,0x00,0x04,0x00, +0x74,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x6e,0x00,0x00,0x00, +0x48,0x00,0x04,0x00,0x75,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0x18,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x75,0x00,0x00,0x00, +0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0x47,0x00,0x03,0x00,0x75,0x00,0x00,0x00,0x02,0x00,0x00,0x00, +0x47,0x00,0x04,0x00,0x77,0x00,0x00,0x00,0x22,0x00,0x00,0x00, +0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x77,0x00,0x00,0x00, 0x21,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00, 0x8b,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x04,0x00,0x00,0x00, 0x48,0x00,0x04,0x00,0x8c,0x00,0x00,0x00,0x00,0x00,0x00,0x00, @@ -43752,1923 +39839,1279 @@ unsigned char mul_mat_vec_q2_K_f32_data[] = { 0x47,0x00,0x04,0x00,0x8e,0x00,0x00,0x00,0x22,0x00,0x00,0x00, 0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x8e,0x00,0x00,0x00, 0x21,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0xb1,0x02,0x00,0x00,0x06,0x00,0x00,0x00,0x04,0x00,0x00,0x00, -0x48,0x00,0x04,0x00,0xb2,0x02,0x00,0x00,0x00,0x00,0x00,0x00, -0x19,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0xb2,0x02,0x00,0x00, +0x0b,0x03,0x00,0x00,0x06,0x00,0x00,0x00,0x04,0x00,0x00,0x00, +0x48,0x00,0x04,0x00,0x0c,0x03,0x00,0x00,0x00,0x00,0x00,0x00, +0x19,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x0c,0x03,0x00,0x00, 0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x47,0x00,0x03,0x00,0xb2,0x02,0x00,0x00,0x02,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0xb4,0x02,0x00,0x00,0x22,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0xb4,0x02,0x00,0x00, +0x47,0x00,0x03,0x00,0x0c,0x03,0x00,0x00,0x02,0x00,0x00,0x00, +0x47,0x00,0x04,0x00,0x0e,0x03,0x00,0x00,0x22,0x00,0x00,0x00, +0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x0e,0x03,0x00,0x00, 0x21,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0xbc,0x02,0x00,0x00,0x0b,0x00,0x00,0x00,0x19,0x00,0x00,0x00, +0x17,0x03,0x00,0x00,0x0b,0x00,0x00,0x00,0x19,0x00,0x00,0x00, 0x13,0x00,0x02,0x00,0x02,0x00,0x00,0x00,0x21,0x00,0x03,0x00, 0x03,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x15,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x20,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x15,0x00,0x04,0x00,0x09,0x00,0x00,0x00,0x20,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x17,0x00,0x04,0x00,0x0a,0x00,0x00,0x00, -0x09,0x00,0x00,0x00,0x03,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0x0b,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x0a,0x00,0x00,0x00, -0x3b,0x00,0x04,0x00,0x0b,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x09,0x00,0x00,0x00, -0x0d,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0x0e,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x09,0x00,0x00,0x00, -0x1e,0x00,0x05,0x00,0x13,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0x14,0x00,0x00,0x00,0x09,0x00,0x00,0x00,0x13,0x00,0x00,0x00, -0x3b,0x00,0x04,0x00,0x14,0x00,0x00,0x00,0x15,0x00,0x00,0x00, -0x09,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x16,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0x17,0x00,0x00,0x00,0x09,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x1a,0x00,0x00,0x00, -0x00,0x01,0x00,0x00,0x3b,0x00,0x04,0x00,0x0b,0x00,0x00,0x00, -0x21,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x25,0x00,0x00,0x00,0x02,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0x08,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x39,0x00,0x00,0x00,0x20,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x42,0x00,0x00,0x00,0x80,0x00,0x00,0x00, -0x16,0x00,0x03,0x00,0x47,0x00,0x00,0x00,0x20,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x09,0x00,0x00,0x00,0x48,0x00,0x00,0x00, -0x20,0x00,0x00,0x00,0x1c,0x00,0x04,0x00,0x49,0x00,0x00,0x00, -0x47,0x00,0x00,0x00,0x48,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0x4a,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x49,0x00,0x00,0x00, -0x3b,0x00,0x04,0x00,0x4a,0x00,0x00,0x00,0x4b,0x00,0x00,0x00, +0x06,0x00,0x00,0x00,0x20,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0x17,0x00,0x04,0x00,0x09,0x00,0x00,0x00,0x06,0x00,0x00,0x00, +0x03,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x0a,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0x09,0x00,0x00,0x00,0x3b,0x00,0x04,0x00, +0x0a,0x00,0x00,0x00,0x0b,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, +0x00,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x0d,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x1e,0x00,0x05,0x00, +0x11,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00, +0x06,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x12,0x00,0x00,0x00, +0x09,0x00,0x00,0x00,0x11,0x00,0x00,0x00,0x3b,0x00,0x04,0x00, +0x12,0x00,0x00,0x00,0x13,0x00,0x00,0x00,0x09,0x00,0x00,0x00, +0x15,0x00,0x04,0x00,0x14,0x00,0x00,0x00,0x20,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x14,0x00,0x00,0x00, +0x15,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x20,0x00,0x04,0x00, +0x16,0x00,0x00,0x00,0x09,0x00,0x00,0x00,0x06,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x19,0x00,0x00,0x00, +0x00,0x01,0x00,0x00,0x3b,0x00,0x04,0x00,0x0a,0x00,0x00,0x00, +0x20,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x02,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x2b,0x00,0x00,0x00, +0x08,0x00,0x00,0x00,0x15,0x00,0x04,0x00,0x32,0x00,0x00,0x00, +0x08,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x14,0x00,0x00,0x00,0x35,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x36,0x00,0x00,0x00, +0x04,0x00,0x00,0x00,0x15,0x00,0x04,0x00,0x3a,0x00,0x00,0x00, +0x08,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x41,0x00,0x00,0x00,0x20,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x47,0x00,0x00,0x00, +0x80,0x00,0x00,0x00,0x16,0x00,0x03,0x00,0x4c,0x00,0x00,0x00, +0x20,0x00,0x00,0x00,0x1c,0x00,0x04,0x00,0x4d,0x00,0x00,0x00, +0x4c,0x00,0x00,0x00,0x41,0x00,0x00,0x00,0x20,0x00,0x04,0x00, +0x4e,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x4d,0x00,0x00,0x00, +0x3b,0x00,0x04,0x00,0x4e,0x00,0x00,0x00,0x4f,0x00,0x00,0x00, 0x04,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x4c,0x00,0x00,0x00,0x10,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x47,0x00,0x00,0x00,0x51,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x20,0x00,0x04,0x00,0x52,0x00,0x00,0x00,0x04,0x00,0x00,0x00, -0x47,0x00,0x00,0x00,0x14,0x00,0x02,0x00,0x5d,0x00,0x00,0x00, -0x15,0x00,0x04,0x00,0x66,0x00,0x00,0x00,0x08,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x09,0x00,0x00,0x00, -0x67,0x00,0x00,0x00,0x10,0x00,0x00,0x00,0x1c,0x00,0x04,0x00, -0x68,0x00,0x00,0x00,0x66,0x00,0x00,0x00,0x67,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x09,0x00,0x00,0x00,0x69,0x00,0x00,0x00, -0x40,0x00,0x00,0x00,0x1c,0x00,0x04,0x00,0x6a,0x00,0x00,0x00, -0x66,0x00,0x00,0x00,0x69,0x00,0x00,0x00,0x16,0x00,0x03,0x00, -0x6b,0x00,0x00,0x00,0x10,0x00,0x00,0x00,0x17,0x00,0x04,0x00, -0x6c,0x00,0x00,0x00,0x6b,0x00,0x00,0x00,0x02,0x00,0x00,0x00, -0x1e,0x00,0x05,0x00,0x6d,0x00,0x00,0x00,0x68,0x00,0x00,0x00, -0x6a,0x00,0x00,0x00,0x6c,0x00,0x00,0x00,0x1d,0x00,0x03,0x00, -0x6e,0x00,0x00,0x00,0x6d,0x00,0x00,0x00,0x1e,0x00,0x03,0x00, -0x6f,0x00,0x00,0x00,0x6e,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0x70,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x6f,0x00,0x00,0x00, -0x3b,0x00,0x04,0x00,0x70,0x00,0x00,0x00,0x71,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x75,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x6b,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x09,0x00,0x00,0x00,0x7d,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x1d,0x00,0x03,0x00,0x8b,0x00,0x00,0x00,0x47,0x00,0x00,0x00, -0x1e,0x00,0x03,0x00,0x8c,0x00,0x00,0x00,0x8b,0x00,0x00,0x00, -0x20,0x00,0x04,0x00,0x8d,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0x8c,0x00,0x00,0x00,0x3b,0x00,0x04,0x00,0x8d,0x00,0x00,0x00, -0x8e,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x8f,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0x50,0x00,0x00,0x00,0x10,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x4c,0x00,0x00,0x00,0x55,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0x20,0x00,0x04,0x00,0x56,0x00,0x00,0x00,0x04,0x00,0x00,0x00, +0x4c,0x00,0x00,0x00,0x14,0x00,0x02,0x00,0x64,0x00,0x00,0x00, +0x1c,0x00,0x04,0x00,0x6d,0x00,0x00,0x00,0x32,0x00,0x00,0x00, +0x41,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x6e,0x00,0x00,0x00,0x40,0x00,0x00,0x00,0x1c,0x00,0x04,0x00, +0x6f,0x00,0x00,0x00,0x32,0x00,0x00,0x00,0x6e,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x70,0x00,0x00,0x00, +0x0c,0x00,0x00,0x00,0x1c,0x00,0x04,0x00,0x71,0x00,0x00,0x00, +0x32,0x00,0x00,0x00,0x70,0x00,0x00,0x00,0x16,0x00,0x03,0x00, +0x72,0x00,0x00,0x00,0x10,0x00,0x00,0x00,0x1e,0x00,0x06,0x00, +0x73,0x00,0x00,0x00,0x6d,0x00,0x00,0x00,0x6f,0x00,0x00,0x00, +0x71,0x00,0x00,0x00,0x72,0x00,0x00,0x00,0x1d,0x00,0x03,0x00, +0x74,0x00,0x00,0x00,0x73,0x00,0x00,0x00,0x1e,0x00,0x03,0x00, +0x75,0x00,0x00,0x00,0x74,0x00,0x00,0x00,0x20,0x00,0x04,0x00, +0x76,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x75,0x00,0x00,0x00, +0x3b,0x00,0x04,0x00,0x76,0x00,0x00,0x00,0x77,0x00,0x00,0x00, +0x0c,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x14,0x00,0x00,0x00, +0x7b,0x00,0x00,0x00,0x03,0x00,0x00,0x00,0x20,0x00,0x04,0x00, +0x7c,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x72,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x14,0x00,0x00,0x00,0x89,0x00,0x00,0x00, +0x02,0x00,0x00,0x00,0x1d,0x00,0x03,0x00,0x8b,0x00,0x00,0x00, +0x4c,0x00,0x00,0x00,0x1e,0x00,0x03,0x00,0x8c,0x00,0x00,0x00, +0x8b,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x8d,0x00,0x00,0x00, +0x0c,0x00,0x00,0x00,0x8c,0x00,0x00,0x00,0x3b,0x00,0x04,0x00, +0x8d,0x00,0x00,0x00,0x8e,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, 0x20,0x00,0x04,0x00,0x97,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0x47,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x9f,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x66,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0xa4,0x00,0x00,0x00,0x0f,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0xb4,0x00,0x00,0x00, -0x03,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x08,0x01,0x00,0x00,0x30,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x2e,0x01,0x00,0x00,0x40,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x36,0x01,0x00,0x00, -0x04,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x55,0x01,0x00,0x00,0x50,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x5d,0x01,0x00,0x00,0x05,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x7c,0x01,0x00,0x00, -0x60,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x84,0x01,0x00,0x00,0x06,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0xa3,0x01,0x00,0x00,0x70,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0xab,0x01,0x00,0x00, -0x07,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x09,0x00,0x00,0x00, -0x92,0x02,0x00,0x00,0x02,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x09,0x00,0x00,0x00,0x93,0x02,0x00,0x00,0x08,0x01,0x00,0x00, -0x1d,0x00,0x03,0x00,0xb1,0x02,0x00,0x00,0x47,0x00,0x00,0x00, -0x1e,0x00,0x03,0x00,0xb2,0x02,0x00,0x00,0xb1,0x02,0x00,0x00, -0x20,0x00,0x04,0x00,0xb3,0x02,0x00,0x00,0x0c,0x00,0x00,0x00, -0xb2,0x02,0x00,0x00,0x3b,0x00,0x04,0x00,0xb3,0x02,0x00,0x00, -0xb4,0x02,0x00,0x00,0x0c,0x00,0x00,0x00,0x2c,0x00,0x06,0x00, -0x0a,0x00,0x00,0x00,0xbc,0x02,0x00,0x00,0x48,0x00,0x00,0x00, -0x7d,0x00,0x00,0x00,0x7d,0x00,0x00,0x00,0x36,0x00,0x05,0x00, +0x4c,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x9d,0x00,0x00,0x00, +0x0c,0x00,0x00,0x00,0x32,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x14,0x00,0x00,0x00,0xa4,0x00,0x00,0x00,0x0f,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x14,0x00,0x00,0x00,0xa9,0x00,0x00,0x00, +0x08,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x14,0x00,0x00,0x00, +0xb2,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x14,0x00,0x00,0x00,0xb7,0x00,0x00,0x00,0x20,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x14,0x00,0x00,0x00,0xf1,0x00,0x00,0x00, +0x0a,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x70,0x01,0x00,0x00,0x60,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x14,0x00,0x00,0x00,0x77,0x01,0x00,0x00,0x06,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x14,0x00,0x00,0x00,0xc9,0x01,0x00,0x00, +0x09,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x02,0x02,0x00,0x00,0x30,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x14,0x00,0x00,0x00,0x13,0x02,0x00,0x00,0x0b,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x4d,0x02,0x00,0x00, +0x50,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x14,0x00,0x00,0x00, +0x54,0x02,0x00,0x00,0x05,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x98,0x02,0x00,0x00,0x70,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x14,0x00,0x00,0x00,0x9f,0x02,0x00,0x00, +0x07,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0xed,0x02,0x00,0x00,0x08,0x01,0x00,0x00,0x1d,0x00,0x03,0x00, +0x0b,0x03,0x00,0x00,0x4c,0x00,0x00,0x00,0x1e,0x00,0x03,0x00, +0x0c,0x03,0x00,0x00,0x0b,0x03,0x00,0x00,0x20,0x00,0x04,0x00, +0x0d,0x03,0x00,0x00,0x0c,0x00,0x00,0x00,0x0c,0x03,0x00,0x00, +0x3b,0x00,0x04,0x00,0x0d,0x03,0x00,0x00,0x0e,0x03,0x00,0x00, +0x0c,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x16,0x03,0x00,0x00,0x01,0x00,0x00,0x00,0x2c,0x00,0x06,0x00, +0x09,0x00,0x00,0x00,0x17,0x03,0x00,0x00,0x41,0x00,0x00,0x00, +0x16,0x03,0x00,0x00,0x16,0x03,0x00,0x00,0x36,0x00,0x05,0x00, 0x02,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x00,0x00,0x00,0x00, 0x03,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0x05,0x00,0x00,0x00, -0x41,0x00,0x05,0x00,0x0e,0x00,0x00,0x00,0x0f,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x0d,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x09,0x00,0x00,0x00,0x10,0x00,0x00,0x00,0x0f,0x00,0x00,0x00, -0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x11,0x00,0x00,0x00, -0x10,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x17,0x00,0x00,0x00, -0x18,0x00,0x00,0x00,0x15,0x00,0x00,0x00,0x16,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x19,0x00,0x00,0x00, -0x18,0x00,0x00,0x00,0x87,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x1b,0x00,0x00,0x00,0x19,0x00,0x00,0x00,0x1a,0x00,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x1f,0x00,0x00,0x00, -0x11,0x00,0x00,0x00,0x1b,0x00,0x00,0x00,0x41,0x00,0x05,0x00, -0x0e,0x00,0x00,0x00,0x22,0x00,0x00,0x00,0x21,0x00,0x00,0x00, -0x0d,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x09,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x22,0x00,0x00,0x00,0x7c,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x24,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x87,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x26,0x00,0x00,0x00, -0x24,0x00,0x00,0x00,0x25,0x00,0x00,0x00,0x8b,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x2b,0x00,0x00,0x00,0x24,0x00,0x00,0x00, -0x25,0x00,0x00,0x00,0x87,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x2f,0x00,0x00,0x00,0x26,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x33,0x00,0x00,0x00, -0x2e,0x00,0x00,0x00,0x2f,0x00,0x00,0x00,0x82,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x34,0x00,0x00,0x00,0x26,0x00,0x00,0x00, -0x33,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x37,0x00,0x00,0x00,0x25,0x00,0x00,0x00,0x34,0x00,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x3b,0x00,0x00,0x00, -0x39,0x00,0x00,0x00,0x2f,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x3d,0x00,0x00,0x00,0x3b,0x00,0x00,0x00, -0x37,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x44,0x00,0x00,0x00,0x42,0x00,0x00,0x00,0x2f,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x46,0x00,0x00,0x00, -0x44,0x00,0x00,0x00,0x37,0x00,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x4e,0x00,0x00,0x00,0x4c,0x00,0x00,0x00, -0x2b,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x50,0x00,0x00,0x00,0x4e,0x00,0x00,0x00,0x26,0x00,0x00,0x00, -0x41,0x00,0x05,0x00,0x52,0x00,0x00,0x00,0x53,0x00,0x00,0x00, -0x4b,0x00,0x00,0x00,0x50,0x00,0x00,0x00,0x3e,0x00,0x03,0x00, -0x53,0x00,0x00,0x00,0x51,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, -0x56,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0x56,0x00,0x00,0x00, -0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0xbd,0x02,0x00,0x00, -0x2b,0x00,0x00,0x00,0x05,0x00,0x00,0x00,0x91,0x02,0x00,0x00, -0x59,0x00,0x00,0x00,0xb1,0x00,0x05,0x00,0x5d,0x00,0x00,0x00, -0x5e,0x00,0x00,0x00,0xbd,0x02,0x00,0x00,0x1b,0x00,0x00,0x00, -0xf6,0x00,0x04,0x00,0x58,0x00,0x00,0x00,0x59,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0x5e,0x00,0x00,0x00, -0x57,0x00,0x00,0x00,0x58,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, -0x57,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x61,0x00,0x00,0x00,0xbd,0x02,0x00,0x00,0x1a,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x63,0x00,0x00,0x00, -0x61,0x00,0x00,0x00,0x46,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x74,0x00,0x00,0x00,0x1f,0x00,0x00,0x00, -0xbd,0x02,0x00,0x00,0x41,0x00,0x08,0x00,0x75,0x00,0x00,0x00, -0x76,0x00,0x00,0x00,0x71,0x00,0x00,0x00,0x16,0x00,0x00,0x00, -0x74,0x00,0x00,0x00,0x25,0x00,0x00,0x00,0x0d,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x6b,0x00,0x00,0x00,0x77,0x00,0x00,0x00, -0x76,0x00,0x00,0x00,0x73,0x00,0x04,0x00,0x47,0x00,0x00,0x00, -0x78,0x00,0x00,0x00,0x77,0x00,0x00,0x00,0x41,0x00,0x08,0x00, -0x75,0x00,0x00,0x00,0x7e,0x00,0x00,0x00,0x71,0x00,0x00,0x00, -0x16,0x00,0x00,0x00,0x74,0x00,0x00,0x00,0x25,0x00,0x00,0x00, -0x7d,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x6b,0x00,0x00,0x00, -0x7f,0x00,0x00,0x00,0x7e,0x00,0x00,0x00,0x73,0x00,0x04,0x00, -0x47,0x00,0x00,0x00,0x80,0x00,0x00,0x00,0x7f,0x00,0x00,0x00, -0xf9,0x00,0x02,0x00,0x84,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, -0x84,0x00,0x00,0x00,0xf5,0x00,0x07,0x00,0x47,0x00,0x00,0x00, -0xc2,0x02,0x00,0x00,0x51,0x00,0x00,0x00,0x57,0x00,0x00,0x00, -0x7e,0x02,0x00,0x00,0x85,0x00,0x00,0x00,0xf5,0x00,0x07,0x00, -0x47,0x00,0x00,0x00,0xc1,0x02,0x00,0x00,0x51,0x00,0x00,0x00, -0x57,0x00,0x00,0x00,0xc5,0x01,0x00,0x00,0x85,0x00,0x00,0x00, -0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0xc0,0x02,0x00,0x00, -0x16,0x00,0x00,0x00,0x57,0x00,0x00,0x00,0x80,0x02,0x00,0x00, -0x85,0x00,0x00,0x00,0xb1,0x00,0x05,0x00,0x5d,0x00,0x00,0x00, -0x8a,0x00,0x00,0x00,0xc0,0x02,0x00,0x00,0x25,0x00,0x00,0x00, -0xf6,0x00,0x04,0x00,0x86,0x00,0x00,0x00,0x85,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0x8a,0x00,0x00,0x00, -0x85,0x00,0x00,0x00,0x86,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, -0x85,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x17,0x00,0x00,0x00, -0x90,0x00,0x00,0x00,0x15,0x00,0x00,0x00,0x8f,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x91,0x00,0x00,0x00, -0x90,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x93,0x00,0x00,0x00,0x91,0x00,0x00,0x00,0x63,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x95,0x00,0x00,0x00, -0x93,0x00,0x00,0x00,0xc0,0x02,0x00,0x00,0x41,0x00,0x06,0x00, -0x97,0x00,0x00,0x00,0x98,0x00,0x00,0x00,0x8e,0x00,0x00,0x00, -0x16,0x00,0x00,0x00,0x95,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x47,0x00,0x00,0x00,0x99,0x00,0x00,0x00,0x98,0x00,0x00,0x00, -0x41,0x00,0x08,0x00,0x9f,0x00,0x00,0x00,0xa0,0x00,0x00,0x00, -0x71,0x00,0x00,0x00,0x16,0x00,0x00,0x00,0x74,0x00,0x00,0x00, -0x16,0x00,0x00,0x00,0x33,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x66,0x00,0x00,0x00,0xa1,0x00,0x00,0x00,0xa0,0x00,0x00,0x00, -0x71,0x00,0x04,0x00,0x09,0x00,0x00,0x00,0xa2,0x00,0x00,0x00, -0xa1,0x00,0x00,0x00,0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0xa3,0x00,0x00,0x00,0xa2,0x00,0x00,0x00,0xc7,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xa5,0x00,0x00,0x00,0xa3,0x00,0x00,0x00, -0xa4,0x00,0x00,0x00,0x6f,0x00,0x04,0x00,0x47,0x00,0x00,0x00, -0xa6,0x00,0x00,0x00,0xa5,0x00,0x00,0x00,0x85,0x00,0x05,0x00, -0x47,0x00,0x00,0x00,0xa7,0x00,0x00,0x00,0x99,0x00,0x00,0x00, -0xa6,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xad,0x00,0x00,0x00,0x3d,0x00,0x00,0x00,0xc0,0x02,0x00,0x00, -0x41,0x00,0x08,0x00,0x9f,0x00,0x00,0x00,0xaf,0x00,0x00,0x00, -0x71,0x00,0x00,0x00,0x16,0x00,0x00,0x00,0x74,0x00,0x00,0x00, -0x8f,0x00,0x00,0x00,0xad,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x66,0x00,0x00,0x00,0xb0,0x00,0x00,0x00,0xaf,0x00,0x00,0x00, -0xc2,0x00,0x05,0x00,0x66,0x00,0x00,0x00,0xb1,0x00,0x00,0x00, -0xb0,0x00,0x00,0x00,0x16,0x00,0x00,0x00,0x71,0x00,0x04,0x00, -0x09,0x00,0x00,0x00,0xb2,0x00,0x00,0x00,0xb1,0x00,0x00,0x00, -0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0xb3,0x00,0x00,0x00, -0xb2,0x00,0x00,0x00,0xc7,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xb5,0x00,0x00,0x00,0xb3,0x00,0x00,0x00,0xb4,0x00,0x00,0x00, -0x6f,0x00,0x04,0x00,0x47,0x00,0x00,0x00,0xb6,0x00,0x00,0x00, -0xb5,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xbe,0x00,0x00,0x00,0x95,0x00,0x00,0x00,0x4c,0x00,0x00,0x00, -0x41,0x00,0x06,0x00,0x97,0x00,0x00,0x00,0xbf,0x00,0x00,0x00, -0x8e,0x00,0x00,0x00,0x16,0x00,0x00,0x00,0xbe,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x47,0x00,0x00,0x00,0xc0,0x00,0x00,0x00, -0xbf,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xc5,0x00,0x00,0x00,0x33,0x00,0x00,0x00,0x8f,0x00,0x00,0x00, -0x41,0x00,0x08,0x00,0x9f,0x00,0x00,0x00,0xc6,0x00,0x00,0x00, -0x71,0x00,0x00,0x00,0x16,0x00,0x00,0x00,0x74,0x00,0x00,0x00, -0x16,0x00,0x00,0x00,0xc5,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x66,0x00,0x00,0x00,0xc7,0x00,0x00,0x00,0xc6,0x00,0x00,0x00, -0x71,0x00,0x04,0x00,0x09,0x00,0x00,0x00,0xc8,0x00,0x00,0x00, -0xc7,0x00,0x00,0x00,0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0xc9,0x00,0x00,0x00,0xc8,0x00,0x00,0x00,0xc7,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xca,0x00,0x00,0x00,0xc9,0x00,0x00,0x00, -0xa4,0x00,0x00,0x00,0x6f,0x00,0x04,0x00,0x47,0x00,0x00,0x00, -0xcb,0x00,0x00,0x00,0xca,0x00,0x00,0x00,0x85,0x00,0x05,0x00, -0x47,0x00,0x00,0x00,0xcc,0x00,0x00,0x00,0xc0,0x00,0x00,0x00, -0xcb,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xd3,0x00,0x00,0x00,0xad,0x00,0x00,0x00,0x4c,0x00,0x00,0x00, -0x41,0x00,0x08,0x00,0x9f,0x00,0x00,0x00,0xd4,0x00,0x00,0x00, -0x71,0x00,0x00,0x00,0x16,0x00,0x00,0x00,0x74,0x00,0x00,0x00, -0x8f,0x00,0x00,0x00,0xd3,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x66,0x00,0x00,0x00,0xd5,0x00,0x00,0x00,0xd4,0x00,0x00,0x00, -0xc2,0x00,0x05,0x00,0x66,0x00,0x00,0x00,0xd6,0x00,0x00,0x00, -0xd5,0x00,0x00,0x00,0x16,0x00,0x00,0x00,0x71,0x00,0x04,0x00, -0x09,0x00,0x00,0x00,0xd7,0x00,0x00,0x00,0xd6,0x00,0x00,0x00, -0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0xd8,0x00,0x00,0x00, -0xd7,0x00,0x00,0x00,0xc7,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xd9,0x00,0x00,0x00,0xd8,0x00,0x00,0x00,0xb4,0x00,0x00,0x00, -0x6f,0x00,0x04,0x00,0x47,0x00,0x00,0x00,0xda,0x00,0x00,0x00, -0xd9,0x00,0x00,0x00,0x85,0x00,0x05,0x00,0x47,0x00,0x00,0x00, -0xdb,0x00,0x00,0x00,0xcc,0x00,0x00,0x00,0xda,0x00,0x00,0x00, -0x0c,0x00,0x08,0x00,0x47,0x00,0x00,0x00,0xdc,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x32,0x00,0x00,0x00,0xa7,0x00,0x00,0x00, -0xb6,0x00,0x00,0x00,0xdb,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xe3,0x00,0x00,0x00,0x95,0x00,0x00,0x00, -0x39,0x00,0x00,0x00,0x41,0x00,0x06,0x00,0x97,0x00,0x00,0x00, -0xe4,0x00,0x00,0x00,0x8e,0x00,0x00,0x00,0x16,0x00,0x00,0x00, -0xe3,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x47,0x00,0x00,0x00, -0xe5,0x00,0x00,0x00,0xe4,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xea,0x00,0x00,0x00,0x33,0x00,0x00,0x00, -0x25,0x00,0x00,0x00,0x41,0x00,0x08,0x00,0x9f,0x00,0x00,0x00, -0xeb,0x00,0x00,0x00,0x71,0x00,0x00,0x00,0x16,0x00,0x00,0x00, -0x74,0x00,0x00,0x00,0x16,0x00,0x00,0x00,0xea,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x66,0x00,0x00,0x00,0xec,0x00,0x00,0x00, -0xeb,0x00,0x00,0x00,0x71,0x00,0x04,0x00,0x09,0x00,0x00,0x00, -0xed,0x00,0x00,0x00,0xec,0x00,0x00,0x00,0x7c,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0xee,0x00,0x00,0x00,0xed,0x00,0x00,0x00, -0xc7,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xef,0x00,0x00,0x00, -0xee,0x00,0x00,0x00,0xa4,0x00,0x00,0x00,0x6f,0x00,0x04,0x00, -0x47,0x00,0x00,0x00,0xf0,0x00,0x00,0x00,0xef,0x00,0x00,0x00, -0x85,0x00,0x05,0x00,0x47,0x00,0x00,0x00,0xf1,0x00,0x00,0x00, -0xe5,0x00,0x00,0x00,0xf0,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x66,0x00,0x00,0x00,0xfa,0x00,0x00,0x00,0xaf,0x00,0x00,0x00, -0xc2,0x00,0x05,0x00,0x66,0x00,0x00,0x00,0xfb,0x00,0x00,0x00, -0xfa,0x00,0x00,0x00,0x25,0x00,0x00,0x00,0x71,0x00,0x04,0x00, -0x09,0x00,0x00,0x00,0xfc,0x00,0x00,0x00,0xfb,0x00,0x00,0x00, -0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0xfd,0x00,0x00,0x00, -0xfc,0x00,0x00,0x00,0xc7,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xfe,0x00,0x00,0x00,0xfd,0x00,0x00,0x00,0xb4,0x00,0x00,0x00, -0x6f,0x00,0x04,0x00,0x47,0x00,0x00,0x00,0xff,0x00,0x00,0x00, -0xfe,0x00,0x00,0x00,0x0c,0x00,0x08,0x00,0x47,0x00,0x00,0x00, -0x01,0x01,0x00,0x00,0x01,0x00,0x00,0x00,0x32,0x00,0x00,0x00, -0xf1,0x00,0x00,0x00,0xff,0x00,0x00,0x00,0xdc,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x09,0x01,0x00,0x00, -0x95,0x00,0x00,0x00,0x08,0x01,0x00,0x00,0x41,0x00,0x06,0x00, -0x97,0x00,0x00,0x00,0x0a,0x01,0x00,0x00,0x8e,0x00,0x00,0x00, -0x16,0x00,0x00,0x00,0x09,0x01,0x00,0x00,0x3d,0x00,0x04,0x00, -0x47,0x00,0x00,0x00,0x0b,0x01,0x00,0x00,0x0a,0x01,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x10,0x01,0x00,0x00, -0x33,0x00,0x00,0x00,0xb4,0x00,0x00,0x00,0x41,0x00,0x08,0x00, -0x9f,0x00,0x00,0x00,0x11,0x01,0x00,0x00,0x71,0x00,0x00,0x00, -0x16,0x00,0x00,0x00,0x74,0x00,0x00,0x00,0x16,0x00,0x00,0x00, -0x10,0x01,0x00,0x00,0x3d,0x00,0x04,0x00,0x66,0x00,0x00,0x00, -0x12,0x01,0x00,0x00,0x11,0x01,0x00,0x00,0x71,0x00,0x04,0x00, -0x09,0x00,0x00,0x00,0x13,0x01,0x00,0x00,0x12,0x01,0x00,0x00, -0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x14,0x01,0x00,0x00, -0x13,0x01,0x00,0x00,0xc7,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x15,0x01,0x00,0x00,0x14,0x01,0x00,0x00,0xa4,0x00,0x00,0x00, -0x6f,0x00,0x04,0x00,0x47,0x00,0x00,0x00,0x16,0x01,0x00,0x00, -0x15,0x01,0x00,0x00,0x85,0x00,0x05,0x00,0x47,0x00,0x00,0x00, -0x17,0x01,0x00,0x00,0x0b,0x01,0x00,0x00,0x16,0x01,0x00,0x00, -0x3d,0x00,0x04,0x00,0x66,0x00,0x00,0x00,0x20,0x01,0x00,0x00, -0xd4,0x00,0x00,0x00,0xc2,0x00,0x05,0x00,0x66,0x00,0x00,0x00, -0x21,0x01,0x00,0x00,0x20,0x01,0x00,0x00,0x25,0x00,0x00,0x00, -0x71,0x00,0x04,0x00,0x09,0x00,0x00,0x00,0x22,0x01,0x00,0x00, -0x21,0x01,0x00,0x00,0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x23,0x01,0x00,0x00,0x22,0x01,0x00,0x00,0xc7,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x24,0x01,0x00,0x00,0x23,0x01,0x00,0x00, -0xb4,0x00,0x00,0x00,0x6f,0x00,0x04,0x00,0x47,0x00,0x00,0x00, -0x25,0x01,0x00,0x00,0x24,0x01,0x00,0x00,0x0c,0x00,0x08,0x00, -0x47,0x00,0x00,0x00,0x27,0x01,0x00,0x00,0x01,0x00,0x00,0x00, -0x32,0x00,0x00,0x00,0x17,0x01,0x00,0x00,0x25,0x01,0x00,0x00, -0x01,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x2f,0x01,0x00,0x00,0x95,0x00,0x00,0x00,0x2e,0x01,0x00,0x00, -0x41,0x00,0x06,0x00,0x97,0x00,0x00,0x00,0x30,0x01,0x00,0x00, -0x8e,0x00,0x00,0x00,0x16,0x00,0x00,0x00,0x2f,0x01,0x00,0x00, -0x3d,0x00,0x04,0x00,0x47,0x00,0x00,0x00,0x31,0x01,0x00,0x00, -0x30,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x37,0x01,0x00,0x00,0x33,0x00,0x00,0x00,0x36,0x01,0x00,0x00, -0x41,0x00,0x08,0x00,0x9f,0x00,0x00,0x00,0x38,0x01,0x00,0x00, -0x71,0x00,0x00,0x00,0x16,0x00,0x00,0x00,0x74,0x00,0x00,0x00, -0x16,0x00,0x00,0x00,0x37,0x01,0x00,0x00,0x3d,0x00,0x04,0x00, -0x66,0x00,0x00,0x00,0x39,0x01,0x00,0x00,0x38,0x01,0x00,0x00, -0x71,0x00,0x04,0x00,0x09,0x00,0x00,0x00,0x3a,0x01,0x00,0x00, -0x39,0x01,0x00,0x00,0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x3b,0x01,0x00,0x00,0x3a,0x01,0x00,0x00,0xc7,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x3c,0x01,0x00,0x00,0x3b,0x01,0x00,0x00, -0xa4,0x00,0x00,0x00,0x6f,0x00,0x04,0x00,0x47,0x00,0x00,0x00, -0x3d,0x01,0x00,0x00,0x3c,0x01,0x00,0x00,0x85,0x00,0x05,0x00, -0x47,0x00,0x00,0x00,0x3e,0x01,0x00,0x00,0x31,0x01,0x00,0x00, -0x3d,0x01,0x00,0x00,0x3d,0x00,0x04,0x00,0x66,0x00,0x00,0x00, -0x47,0x01,0x00,0x00,0xaf,0x00,0x00,0x00,0xc2,0x00,0x05,0x00, -0x66,0x00,0x00,0x00,0x48,0x01,0x00,0x00,0x47,0x01,0x00,0x00, -0x36,0x01,0x00,0x00,0x71,0x00,0x04,0x00,0x09,0x00,0x00,0x00, -0x49,0x01,0x00,0x00,0x48,0x01,0x00,0x00,0x7c,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x4a,0x01,0x00,0x00,0x49,0x01,0x00,0x00, -0xc7,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x4b,0x01,0x00,0x00, -0x4a,0x01,0x00,0x00,0xb4,0x00,0x00,0x00,0x6f,0x00,0x04,0x00, -0x47,0x00,0x00,0x00,0x4c,0x01,0x00,0x00,0x4b,0x01,0x00,0x00, -0x0c,0x00,0x08,0x00,0x47,0x00,0x00,0x00,0x4e,0x01,0x00,0x00, -0x01,0x00,0x00,0x00,0x32,0x00,0x00,0x00,0x3e,0x01,0x00,0x00, -0x4c,0x01,0x00,0x00,0x27,0x01,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x56,0x01,0x00,0x00,0x95,0x00,0x00,0x00, -0x55,0x01,0x00,0x00,0x41,0x00,0x06,0x00,0x97,0x00,0x00,0x00, -0x57,0x01,0x00,0x00,0x8e,0x00,0x00,0x00,0x16,0x00,0x00,0x00, -0x56,0x01,0x00,0x00,0x3d,0x00,0x04,0x00,0x47,0x00,0x00,0x00, -0x58,0x01,0x00,0x00,0x57,0x01,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x5e,0x01,0x00,0x00,0x33,0x00,0x00,0x00, -0x5d,0x01,0x00,0x00,0x41,0x00,0x08,0x00,0x9f,0x00,0x00,0x00, -0x5f,0x01,0x00,0x00,0x71,0x00,0x00,0x00,0x16,0x00,0x00,0x00, -0x74,0x00,0x00,0x00,0x16,0x00,0x00,0x00,0x5e,0x01,0x00,0x00, -0x3d,0x00,0x04,0x00,0x66,0x00,0x00,0x00,0x60,0x01,0x00,0x00, -0x5f,0x01,0x00,0x00,0x71,0x00,0x04,0x00,0x09,0x00,0x00,0x00, -0x61,0x01,0x00,0x00,0x60,0x01,0x00,0x00,0x7c,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x62,0x01,0x00,0x00,0x61,0x01,0x00,0x00, -0xc7,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x63,0x01,0x00,0x00, -0x62,0x01,0x00,0x00,0xa4,0x00,0x00,0x00,0x6f,0x00,0x04,0x00, -0x47,0x00,0x00,0x00,0x64,0x01,0x00,0x00,0x63,0x01,0x00,0x00, -0x85,0x00,0x05,0x00,0x47,0x00,0x00,0x00,0x65,0x01,0x00,0x00, -0x58,0x01,0x00,0x00,0x64,0x01,0x00,0x00,0x3d,0x00,0x04,0x00, -0x66,0x00,0x00,0x00,0x6e,0x01,0x00,0x00,0xd4,0x00,0x00,0x00, -0xc2,0x00,0x05,0x00,0x66,0x00,0x00,0x00,0x6f,0x01,0x00,0x00, -0x6e,0x01,0x00,0x00,0x36,0x01,0x00,0x00,0x71,0x00,0x04,0x00, -0x09,0x00,0x00,0x00,0x70,0x01,0x00,0x00,0x6f,0x01,0x00,0x00, -0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x71,0x01,0x00,0x00, -0x70,0x01,0x00,0x00,0xc7,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x72,0x01,0x00,0x00,0x71,0x01,0x00,0x00,0xb4,0x00,0x00,0x00, -0x6f,0x00,0x04,0x00,0x47,0x00,0x00,0x00,0x73,0x01,0x00,0x00, -0x72,0x01,0x00,0x00,0x0c,0x00,0x08,0x00,0x47,0x00,0x00,0x00, -0x75,0x01,0x00,0x00,0x01,0x00,0x00,0x00,0x32,0x00,0x00,0x00, -0x65,0x01,0x00,0x00,0x73,0x01,0x00,0x00,0x4e,0x01,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x7d,0x01,0x00,0x00, -0x95,0x00,0x00,0x00,0x7c,0x01,0x00,0x00,0x41,0x00,0x06,0x00, -0x97,0x00,0x00,0x00,0x7e,0x01,0x00,0x00,0x8e,0x00,0x00,0x00, -0x16,0x00,0x00,0x00,0x7d,0x01,0x00,0x00,0x3d,0x00,0x04,0x00, -0x47,0x00,0x00,0x00,0x7f,0x01,0x00,0x00,0x7e,0x01,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x85,0x01,0x00,0x00, -0x33,0x00,0x00,0x00,0x84,0x01,0x00,0x00,0x41,0x00,0x08,0x00, -0x9f,0x00,0x00,0x00,0x86,0x01,0x00,0x00,0x71,0x00,0x00,0x00, -0x16,0x00,0x00,0x00,0x74,0x00,0x00,0x00,0x16,0x00,0x00,0x00, -0x85,0x01,0x00,0x00,0x3d,0x00,0x04,0x00,0x66,0x00,0x00,0x00, -0x87,0x01,0x00,0x00,0x86,0x01,0x00,0x00,0x71,0x00,0x04,0x00, -0x09,0x00,0x00,0x00,0x88,0x01,0x00,0x00,0x87,0x01,0x00,0x00, -0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x89,0x01,0x00,0x00, -0x88,0x01,0x00,0x00,0xc7,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x8a,0x01,0x00,0x00,0x89,0x01,0x00,0x00,0xa4,0x00,0x00,0x00, -0x6f,0x00,0x04,0x00,0x47,0x00,0x00,0x00,0x8b,0x01,0x00,0x00, -0x8a,0x01,0x00,0x00,0x85,0x00,0x05,0x00,0x47,0x00,0x00,0x00, -0x8c,0x01,0x00,0x00,0x7f,0x01,0x00,0x00,0x8b,0x01,0x00,0x00, -0x3d,0x00,0x04,0x00,0x66,0x00,0x00,0x00,0x95,0x01,0x00,0x00, -0xaf,0x00,0x00,0x00,0xc2,0x00,0x05,0x00,0x66,0x00,0x00,0x00, -0x96,0x01,0x00,0x00,0x95,0x01,0x00,0x00,0x84,0x01,0x00,0x00, -0x71,0x00,0x04,0x00,0x09,0x00,0x00,0x00,0x97,0x01,0x00,0x00, -0x96,0x01,0x00,0x00,0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x98,0x01,0x00,0x00,0x97,0x01,0x00,0x00,0xc7,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x99,0x01,0x00,0x00,0x98,0x01,0x00,0x00, -0xb4,0x00,0x00,0x00,0x6f,0x00,0x04,0x00,0x47,0x00,0x00,0x00, -0x9a,0x01,0x00,0x00,0x99,0x01,0x00,0x00,0x0c,0x00,0x08,0x00, -0x47,0x00,0x00,0x00,0x9c,0x01,0x00,0x00,0x01,0x00,0x00,0x00, -0x32,0x00,0x00,0x00,0x8c,0x01,0x00,0x00,0x9a,0x01,0x00,0x00, -0x75,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xa4,0x01,0x00,0x00,0x95,0x00,0x00,0x00,0xa3,0x01,0x00,0x00, -0x41,0x00,0x06,0x00,0x97,0x00,0x00,0x00,0xa5,0x01,0x00,0x00, -0x8e,0x00,0x00,0x00,0x16,0x00,0x00,0x00,0xa4,0x01,0x00,0x00, -0x3d,0x00,0x04,0x00,0x47,0x00,0x00,0x00,0xa6,0x01,0x00,0x00, -0xa5,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xac,0x01,0x00,0x00,0x33,0x00,0x00,0x00,0xab,0x01,0x00,0x00, -0x41,0x00,0x08,0x00,0x9f,0x00,0x00,0x00,0xad,0x01,0x00,0x00, -0x71,0x00,0x00,0x00,0x16,0x00,0x00,0x00,0x74,0x00,0x00,0x00, -0x16,0x00,0x00,0x00,0xac,0x01,0x00,0x00,0x3d,0x00,0x04,0x00, -0x66,0x00,0x00,0x00,0xae,0x01,0x00,0x00,0xad,0x01,0x00,0x00, -0x71,0x00,0x04,0x00,0x09,0x00,0x00,0x00,0xaf,0x01,0x00,0x00, -0xae,0x01,0x00,0x00,0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0xb0,0x01,0x00,0x00,0xaf,0x01,0x00,0x00,0xc7,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xb1,0x01,0x00,0x00,0xb0,0x01,0x00,0x00, -0xa4,0x00,0x00,0x00,0x6f,0x00,0x04,0x00,0x47,0x00,0x00,0x00, -0xb2,0x01,0x00,0x00,0xb1,0x01,0x00,0x00,0x85,0x00,0x05,0x00, -0x47,0x00,0x00,0x00,0xb3,0x01,0x00,0x00,0xa6,0x01,0x00,0x00, -0xb2,0x01,0x00,0x00,0x3d,0x00,0x04,0x00,0x66,0x00,0x00,0x00, -0xbc,0x01,0x00,0x00,0xd4,0x00,0x00,0x00,0xc2,0x00,0x05,0x00, -0x66,0x00,0x00,0x00,0xbd,0x01,0x00,0x00,0xbc,0x01,0x00,0x00, -0x84,0x01,0x00,0x00,0x71,0x00,0x04,0x00,0x09,0x00,0x00,0x00, -0xbe,0x01,0x00,0x00,0xbd,0x01,0x00,0x00,0x7c,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0xbf,0x01,0x00,0x00,0xbe,0x01,0x00,0x00, -0xc7,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xc0,0x01,0x00,0x00, -0xbf,0x01,0x00,0x00,0xb4,0x00,0x00,0x00,0x6f,0x00,0x04,0x00, -0x47,0x00,0x00,0x00,0xc1,0x01,0x00,0x00,0xc0,0x01,0x00,0x00, -0x0c,0x00,0x08,0x00,0x47,0x00,0x00,0x00,0xc3,0x01,0x00,0x00, -0x01,0x00,0x00,0x00,0x32,0x00,0x00,0x00,0xb3,0x01,0x00,0x00, -0xc1,0x01,0x00,0x00,0x9c,0x01,0x00,0x00,0x81,0x00,0x05,0x00, -0x47,0x00,0x00,0x00,0xc5,0x01,0x00,0x00,0xc1,0x02,0x00,0x00, -0xc3,0x01,0x00,0x00,0x3d,0x00,0x04,0x00,0x47,0x00,0x00,0x00, -0xce,0x01,0x00,0x00,0x98,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x66,0x00,0x00,0x00,0xd5,0x01,0x00,0x00,0xa0,0x00,0x00,0x00, -0xc2,0x00,0x05,0x00,0x66,0x00,0x00,0x00,0xd6,0x01,0x00,0x00, -0xd5,0x01,0x00,0x00,0x36,0x01,0x00,0x00,0x71,0x00,0x04,0x00, -0x09,0x00,0x00,0x00,0xd7,0x01,0x00,0x00,0xd6,0x01,0x00,0x00, -0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0xd8,0x01,0x00,0x00, -0xd7,0x01,0x00,0x00,0xc7,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xd9,0x01,0x00,0x00,0xd8,0x01,0x00,0x00,0xa4,0x00,0x00,0x00, -0x6f,0x00,0x04,0x00,0x47,0x00,0x00,0x00,0xda,0x01,0x00,0x00, -0xd9,0x01,0x00,0x00,0x3d,0x00,0x04,0x00,0x47,0x00,0x00,0x00, -0xe4,0x01,0x00,0x00,0xbf,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x66,0x00,0x00,0x00,0xeb,0x01,0x00,0x00,0xc6,0x00,0x00,0x00, -0xc2,0x00,0x05,0x00,0x66,0x00,0x00,0x00,0xec,0x01,0x00,0x00, -0xeb,0x01,0x00,0x00,0x36,0x01,0x00,0x00,0x71,0x00,0x04,0x00, -0x09,0x00,0x00,0x00,0xed,0x01,0x00,0x00,0xec,0x01,0x00,0x00, -0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0xee,0x01,0x00,0x00, -0xed,0x01,0x00,0x00,0xc7,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xef,0x01,0x00,0x00,0xee,0x01,0x00,0x00,0xa4,0x00,0x00,0x00, -0x6f,0x00,0x04,0x00,0x47,0x00,0x00,0x00,0xf0,0x01,0x00,0x00, -0xef,0x01,0x00,0x00,0x85,0x00,0x05,0x00,0x47,0x00,0x00,0x00, -0xf1,0x01,0x00,0x00,0xe4,0x01,0x00,0x00,0xf0,0x01,0x00,0x00, -0x0c,0x00,0x08,0x00,0x47,0x00,0x00,0x00,0xf2,0x01,0x00,0x00, -0x01,0x00,0x00,0x00,0x32,0x00,0x00,0x00,0xce,0x01,0x00,0x00, -0xda,0x01,0x00,0x00,0xf1,0x01,0x00,0x00,0x3d,0x00,0x04,0x00, -0x47,0x00,0x00,0x00,0xfb,0x01,0x00,0x00,0xe4,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x66,0x00,0x00,0x00,0x02,0x02,0x00,0x00, -0xeb,0x00,0x00,0x00,0xc2,0x00,0x05,0x00,0x66,0x00,0x00,0x00, -0x03,0x02,0x00,0x00,0x02,0x02,0x00,0x00,0x36,0x01,0x00,0x00, -0x71,0x00,0x04,0x00,0x09,0x00,0x00,0x00,0x04,0x02,0x00,0x00, -0x03,0x02,0x00,0x00,0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x05,0x02,0x00,0x00,0x04,0x02,0x00,0x00,0xc7,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x06,0x02,0x00,0x00,0x05,0x02,0x00,0x00, -0xa4,0x00,0x00,0x00,0x6f,0x00,0x04,0x00,0x47,0x00,0x00,0x00, -0x07,0x02,0x00,0x00,0x06,0x02,0x00,0x00,0x0c,0x00,0x08,0x00, -0x47,0x00,0x00,0x00,0x09,0x02,0x00,0x00,0x01,0x00,0x00,0x00, -0x32,0x00,0x00,0x00,0xfb,0x01,0x00,0x00,0x07,0x02,0x00,0x00, -0xf2,0x01,0x00,0x00,0x3d,0x00,0x04,0x00,0x47,0x00,0x00,0x00, -0x12,0x02,0x00,0x00,0x0a,0x01,0x00,0x00,0x3d,0x00,0x04,0x00, -0x66,0x00,0x00,0x00,0x19,0x02,0x00,0x00,0x11,0x01,0x00,0x00, -0xc2,0x00,0x05,0x00,0x66,0x00,0x00,0x00,0x1a,0x02,0x00,0x00, -0x19,0x02,0x00,0x00,0x36,0x01,0x00,0x00,0x71,0x00,0x04,0x00, -0x09,0x00,0x00,0x00,0x1b,0x02,0x00,0x00,0x1a,0x02,0x00,0x00, -0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x1c,0x02,0x00,0x00, -0x1b,0x02,0x00,0x00,0xc7,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x1d,0x02,0x00,0x00,0x1c,0x02,0x00,0x00,0xa4,0x00,0x00,0x00, -0x6f,0x00,0x04,0x00,0x47,0x00,0x00,0x00,0x1e,0x02,0x00,0x00, -0x1d,0x02,0x00,0x00,0x0c,0x00,0x08,0x00,0x47,0x00,0x00,0x00, -0x20,0x02,0x00,0x00,0x01,0x00,0x00,0x00,0x32,0x00,0x00,0x00, -0x12,0x02,0x00,0x00,0x1e,0x02,0x00,0x00,0x09,0x02,0x00,0x00, -0x3d,0x00,0x04,0x00,0x47,0x00,0x00,0x00,0x29,0x02,0x00,0x00, -0x30,0x01,0x00,0x00,0x3d,0x00,0x04,0x00,0x66,0x00,0x00,0x00, -0x30,0x02,0x00,0x00,0x38,0x01,0x00,0x00,0xc2,0x00,0x05,0x00, -0x66,0x00,0x00,0x00,0x31,0x02,0x00,0x00,0x30,0x02,0x00,0x00, -0x36,0x01,0x00,0x00,0x71,0x00,0x04,0x00,0x09,0x00,0x00,0x00, -0x32,0x02,0x00,0x00,0x31,0x02,0x00,0x00,0x7c,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x33,0x02,0x00,0x00,0x32,0x02,0x00,0x00, -0xc7,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x34,0x02,0x00,0x00, -0x33,0x02,0x00,0x00,0xa4,0x00,0x00,0x00,0x6f,0x00,0x04,0x00, -0x47,0x00,0x00,0x00,0x35,0x02,0x00,0x00,0x34,0x02,0x00,0x00, -0x0c,0x00,0x08,0x00,0x47,0x00,0x00,0x00,0x37,0x02,0x00,0x00, -0x01,0x00,0x00,0x00,0x32,0x00,0x00,0x00,0x29,0x02,0x00,0x00, -0x35,0x02,0x00,0x00,0x20,0x02,0x00,0x00,0x3d,0x00,0x04,0x00, -0x47,0x00,0x00,0x00,0x40,0x02,0x00,0x00,0x57,0x01,0x00,0x00, -0x3d,0x00,0x04,0x00,0x66,0x00,0x00,0x00,0x47,0x02,0x00,0x00, -0x5f,0x01,0x00,0x00,0xc2,0x00,0x05,0x00,0x66,0x00,0x00,0x00, -0x48,0x02,0x00,0x00,0x47,0x02,0x00,0x00,0x36,0x01,0x00,0x00, -0x71,0x00,0x04,0x00,0x09,0x00,0x00,0x00,0x49,0x02,0x00,0x00, -0x48,0x02,0x00,0x00,0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x4a,0x02,0x00,0x00,0x49,0x02,0x00,0x00,0xc7,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x4b,0x02,0x00,0x00,0x4a,0x02,0x00,0x00, -0xa4,0x00,0x00,0x00,0x6f,0x00,0x04,0x00,0x47,0x00,0x00,0x00, -0x4c,0x02,0x00,0x00,0x4b,0x02,0x00,0x00,0x0c,0x00,0x08,0x00, -0x47,0x00,0x00,0x00,0x4e,0x02,0x00,0x00,0x01,0x00,0x00,0x00, -0x32,0x00,0x00,0x00,0x40,0x02,0x00,0x00,0x4c,0x02,0x00,0x00, -0x37,0x02,0x00,0x00,0x3d,0x00,0x04,0x00,0x47,0x00,0x00,0x00, -0x57,0x02,0x00,0x00,0x7e,0x01,0x00,0x00,0x3d,0x00,0x04,0x00, -0x66,0x00,0x00,0x00,0x5e,0x02,0x00,0x00,0x86,0x01,0x00,0x00, -0xc2,0x00,0x05,0x00,0x66,0x00,0x00,0x00,0x5f,0x02,0x00,0x00, -0x5e,0x02,0x00,0x00,0x36,0x01,0x00,0x00,0x71,0x00,0x04,0x00, -0x09,0x00,0x00,0x00,0x60,0x02,0x00,0x00,0x5f,0x02,0x00,0x00, -0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x61,0x02,0x00,0x00, -0x60,0x02,0x00,0x00,0xc7,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x62,0x02,0x00,0x00,0x61,0x02,0x00,0x00,0xa4,0x00,0x00,0x00, -0x6f,0x00,0x04,0x00,0x47,0x00,0x00,0x00,0x63,0x02,0x00,0x00, -0x62,0x02,0x00,0x00,0x0c,0x00,0x08,0x00,0x47,0x00,0x00,0x00, -0x65,0x02,0x00,0x00,0x01,0x00,0x00,0x00,0x32,0x00,0x00,0x00, -0x57,0x02,0x00,0x00,0x63,0x02,0x00,0x00,0x4e,0x02,0x00,0x00, -0x3d,0x00,0x04,0x00,0x47,0x00,0x00,0x00,0x6e,0x02,0x00,0x00, -0xa5,0x01,0x00,0x00,0x3d,0x00,0x04,0x00,0x66,0x00,0x00,0x00, -0x75,0x02,0x00,0x00,0xad,0x01,0x00,0x00,0xc2,0x00,0x05,0x00, -0x66,0x00,0x00,0x00,0x76,0x02,0x00,0x00,0x75,0x02,0x00,0x00, -0x36,0x01,0x00,0x00,0x71,0x00,0x04,0x00,0x09,0x00,0x00,0x00, -0x77,0x02,0x00,0x00,0x76,0x02,0x00,0x00,0x7c,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x78,0x02,0x00,0x00,0x77,0x02,0x00,0x00, -0xc7,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x79,0x02,0x00,0x00, -0x78,0x02,0x00,0x00,0xa4,0x00,0x00,0x00,0x6f,0x00,0x04,0x00, -0x47,0x00,0x00,0x00,0x7a,0x02,0x00,0x00,0x79,0x02,0x00,0x00, -0x0c,0x00,0x08,0x00,0x47,0x00,0x00,0x00,0x7c,0x02,0x00,0x00, -0x01,0x00,0x00,0x00,0x32,0x00,0x00,0x00,0x6e,0x02,0x00,0x00, -0x7a,0x02,0x00,0x00,0x65,0x02,0x00,0x00,0x81,0x00,0x05,0x00, -0x47,0x00,0x00,0x00,0x7e,0x02,0x00,0x00,0xc2,0x02,0x00,0x00, -0x7c,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x80,0x02,0x00,0x00,0xc0,0x02,0x00,0x00,0x8f,0x00,0x00,0x00, -0xf9,0x00,0x02,0x00,0x84,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, -0x86,0x00,0x00,0x00,0x85,0x00,0x05,0x00,0x47,0x00,0x00,0x00, -0x8a,0x02,0x00,0x00,0x80,0x00,0x00,0x00,0xc2,0x02,0x00,0x00, -0x7f,0x00,0x04,0x00,0x47,0x00,0x00,0x00,0xc5,0x02,0x00,0x00, -0x8a,0x02,0x00,0x00,0x0c,0x00,0x08,0x00,0x47,0x00,0x00,0x00, -0x8b,0x02,0x00,0x00,0x01,0x00,0x00,0x00,0x32,0x00,0x00,0x00, -0x78,0x00,0x00,0x00,0xc1,0x02,0x00,0x00,0xc5,0x02,0x00,0x00, -0x3d,0x00,0x04,0x00,0x47,0x00,0x00,0x00,0x8d,0x02,0x00,0x00, -0x53,0x00,0x00,0x00,0x81,0x00,0x05,0x00,0x47,0x00,0x00,0x00, -0x8e,0x02,0x00,0x00,0x8d,0x02,0x00,0x00,0x8b,0x02,0x00,0x00, -0x3e,0x00,0x03,0x00,0x53,0x00,0x00,0x00,0x8e,0x02,0x00,0x00, -0xf9,0x00,0x02,0x00,0x59,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, -0x59,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x91,0x02,0x00,0x00,0xbd,0x02,0x00,0x00,0x25,0x00,0x00,0x00, -0xf9,0x00,0x02,0x00,0x56,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, -0x58,0x00,0x00,0x00,0xe0,0x00,0x04,0x00,0x92,0x02,0x00,0x00, -0x92,0x02,0x00,0x00,0x93,0x02,0x00,0x00,0xf9,0x00,0x02,0x00, -0x95,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x95,0x02,0x00,0x00, -0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0xbe,0x02,0x00,0x00, -0x4c,0x00,0x00,0x00,0x58,0x00,0x00,0x00,0xac,0x02,0x00,0x00, -0x98,0x02,0x00,0x00,0xad,0x00,0x05,0x00,0x5d,0x00,0x00,0x00, -0x9b,0x02,0x00,0x00,0xbe,0x02,0x00,0x00,0x16,0x00,0x00,0x00, -0xf6,0x00,0x04,0x00,0x97,0x02,0x00,0x00,0x98,0x02,0x00,0x00, -0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0x9b,0x02,0x00,0x00, -0x96,0x02,0x00,0x00,0x97,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, -0x96,0x02,0x00,0x00,0xb1,0x00,0x05,0x00,0x5d,0x00,0x00,0x00, -0x9e,0x02,0x00,0x00,0x26,0x00,0x00,0x00,0xbe,0x02,0x00,0x00, -0xf7,0x00,0x03,0x00,0xa0,0x02,0x00,0x00,0x00,0x00,0x00,0x00, -0xfa,0x00,0x04,0x00,0x9e,0x02,0x00,0x00,0x9f,0x02,0x00,0x00, -0xa0,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0x9f,0x02,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xa4,0x02,0x00,0x00, -0x26,0x00,0x00,0x00,0xbe,0x02,0x00,0x00,0x41,0x00,0x05,0x00, -0x52,0x00,0x00,0x00,0xa5,0x02,0x00,0x00,0x4b,0x00,0x00,0x00, -0xa4,0x02,0x00,0x00,0x3d,0x00,0x04,0x00,0x47,0x00,0x00,0x00, -0xa6,0x02,0x00,0x00,0xa5,0x02,0x00,0x00,0x41,0x00,0x05,0x00, -0x52,0x00,0x00,0x00,0xa7,0x02,0x00,0x00,0x4b,0x00,0x00,0x00, -0x26,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x47,0x00,0x00,0x00, -0xa8,0x02,0x00,0x00,0xa7,0x02,0x00,0x00,0x81,0x00,0x05,0x00, -0x47,0x00,0x00,0x00,0xa9,0x02,0x00,0x00,0xa8,0x02,0x00,0x00, -0xa6,0x02,0x00,0x00,0x3e,0x00,0x03,0x00,0xa7,0x02,0x00,0x00, -0xa9,0x02,0x00,0x00,0xf9,0x00,0x02,0x00,0xa0,0x02,0x00,0x00, -0xf8,0x00,0x02,0x00,0xa0,0x02,0x00,0x00,0xe0,0x00,0x04,0x00, -0x92,0x02,0x00,0x00,0x92,0x02,0x00,0x00,0x93,0x02,0x00,0x00, -0xf9,0x00,0x02,0x00,0x98,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, -0x98,0x02,0x00,0x00,0xc3,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xac,0x02,0x00,0x00,0xbe,0x02,0x00,0x00,0x8f,0x00,0x00,0x00, -0xf9,0x00,0x02,0x00,0x95,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, -0x97,0x02,0x00,0x00,0xaa,0x00,0x05,0x00,0x5d,0x00,0x00,0x00, -0xae,0x02,0x00,0x00,0x26,0x00,0x00,0x00,0x16,0x00,0x00,0x00, -0xf7,0x00,0x03,0x00,0xb0,0x02,0x00,0x00,0x00,0x00,0x00,0x00, -0xfa,0x00,0x04,0x00,0xae,0x02,0x00,0x00,0xaf,0x02,0x00,0x00, -0xb0,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0xaf,0x02,0x00,0x00, -0x41,0x00,0x05,0x00,0x17,0x00,0x00,0x00,0xb5,0x02,0x00,0x00, -0x15,0x00,0x00,0x00,0x25,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0xb6,0x02,0x00,0x00,0xb5,0x02,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xb8,0x02,0x00,0x00, -0xb6,0x02,0x00,0x00,0x11,0x00,0x00,0x00,0x41,0x00,0x05,0x00, -0x52,0x00,0x00,0x00,0xb9,0x02,0x00,0x00,0x4b,0x00,0x00,0x00, -0x16,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x47,0x00,0x00,0x00, -0xba,0x02,0x00,0x00,0xb9,0x02,0x00,0x00,0x41,0x00,0x06,0x00, -0x97,0x00,0x00,0x00,0xbb,0x02,0x00,0x00,0xb4,0x02,0x00,0x00, -0x16,0x00,0x00,0x00,0xb8,0x02,0x00,0x00,0x3e,0x00,0x03,0x00, -0xbb,0x02,0x00,0x00,0xba,0x02,0x00,0x00,0xf9,0x00,0x02,0x00, -0xb0,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0xb0,0x02,0x00,0x00, +0x41,0x00,0x05,0x00,0x0d,0x00,0x00,0x00,0x0e,0x00,0x00,0x00, +0x0b,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x0f,0x00,0x00,0x00,0x0e,0x00,0x00,0x00, +0x41,0x00,0x05,0x00,0x16,0x00,0x00,0x00,0x17,0x00,0x00,0x00, +0x13,0x00,0x00,0x00,0x15,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x18,0x00,0x00,0x00,0x17,0x00,0x00,0x00, +0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x1a,0x00,0x00,0x00, +0x18,0x00,0x00,0x00,0x19,0x00,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x1e,0x00,0x00,0x00,0x0f,0x00,0x00,0x00, +0x1a,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x0d,0x00,0x00,0x00, +0x21,0x00,0x00,0x00,0x20,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, +0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x22,0x00,0x00,0x00, +0x21,0x00,0x00,0x00,0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x24,0x00,0x00,0x00,0x22,0x00,0x00,0x00,0x23,0x00,0x00,0x00, +0x89,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x28,0x00,0x00,0x00, +0x22,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x86,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x2c,0x00,0x00,0x00,0x24,0x00,0x00,0x00, +0x2b,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x30,0x00,0x00,0x00,0x2b,0x00,0x00,0x00,0x2c,0x00,0x00,0x00, +0x82,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x31,0x00,0x00,0x00, +0x24,0x00,0x00,0x00,0x30,0x00,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x38,0x00,0x00,0x00,0x36,0x00,0x00,0x00, +0x2c,0x00,0x00,0x00,0xc4,0x00,0x05,0x00,0x14,0x00,0x00,0x00, +0x39,0x00,0x00,0x00,0x35,0x00,0x00,0x00,0x38,0x00,0x00,0x00, +0x72,0x00,0x04,0x00,0x3a,0x00,0x00,0x00,0x3b,0x00,0x00,0x00, +0x39,0x00,0x00,0x00,0x7c,0x00,0x04,0x00,0x32,0x00,0x00,0x00, +0x3c,0x00,0x00,0x00,0x3b,0x00,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x3f,0x00,0x00,0x00,0x23,0x00,0x00,0x00, +0x31,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x43,0x00,0x00,0x00,0x41,0x00,0x00,0x00,0x2c,0x00,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x45,0x00,0x00,0x00, +0x43,0x00,0x00,0x00,0x3f,0x00,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x49,0x00,0x00,0x00,0x47,0x00,0x00,0x00, +0x2c,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x4b,0x00,0x00,0x00,0x49,0x00,0x00,0x00,0x3f,0x00,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x52,0x00,0x00,0x00, +0x50,0x00,0x00,0x00,0x28,0x00,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x54,0x00,0x00,0x00,0x52,0x00,0x00,0x00, +0x24,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x56,0x00,0x00,0x00, +0x57,0x00,0x00,0x00,0x4f,0x00,0x00,0x00,0x54,0x00,0x00,0x00, +0x3e,0x00,0x03,0x00,0x57,0x00,0x00,0x00,0x55,0x00,0x00,0x00, +0xf9,0x00,0x02,0x00,0x5d,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, +0x5d,0x00,0x00,0x00,0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00, +0x18,0x03,0x00,0x00,0x28,0x00,0x00,0x00,0x05,0x00,0x00,0x00, +0xec,0x02,0x00,0x00,0x60,0x00,0x00,0x00,0xb0,0x00,0x05,0x00, +0x64,0x00,0x00,0x00,0x65,0x00,0x00,0x00,0x18,0x03,0x00,0x00, +0x1a,0x00,0x00,0x00,0xf6,0x00,0x04,0x00,0x5f,0x00,0x00,0x00, +0x60,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, +0x65,0x00,0x00,0x00,0x5e,0x00,0x00,0x00,0x5f,0x00,0x00,0x00, +0xf8,0x00,0x02,0x00,0x5e,0x00,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x68,0x00,0x00,0x00,0x18,0x03,0x00,0x00, +0x19,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x6a,0x00,0x00,0x00,0x68,0x00,0x00,0x00,0x4b,0x00,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x7a,0x00,0x00,0x00, +0x1e,0x00,0x00,0x00,0x18,0x03,0x00,0x00,0x41,0x00,0x07,0x00, +0x7c,0x00,0x00,0x00,0x7d,0x00,0x00,0x00,0x77,0x00,0x00,0x00, +0x15,0x00,0x00,0x00,0x7a,0x00,0x00,0x00,0x7b,0x00,0x00,0x00, +0x3d,0x00,0x04,0x00,0x72,0x00,0x00,0x00,0x7e,0x00,0x00,0x00, +0x7d,0x00,0x00,0x00,0x73,0x00,0x04,0x00,0x4c,0x00,0x00,0x00, +0x7f,0x00,0x00,0x00,0x7e,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, +0x83,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0x83,0x00,0x00,0x00, +0xf5,0x00,0x07,0x00,0x4c,0x00,0x00,0x00,0x1c,0x03,0x00,0x00, +0x55,0x00,0x00,0x00,0x5e,0x00,0x00,0x00,0xdd,0x02,0x00,0x00, +0x84,0x00,0x00,0x00,0xf5,0x00,0x07,0x00,0x14,0x00,0x00,0x00, +0x1b,0x03,0x00,0x00,0x15,0x00,0x00,0x00,0x5e,0x00,0x00,0x00, +0xdf,0x02,0x00,0x00,0x84,0x00,0x00,0x00,0xb1,0x00,0x05,0x00, +0x64,0x00,0x00,0x00,0x8a,0x00,0x00,0x00,0x1b,0x03,0x00,0x00, +0x89,0x00,0x00,0x00,0xf6,0x00,0x04,0x00,0x85,0x00,0x00,0x00, +0x84,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, +0x8a,0x00,0x00,0x00,0x84,0x00,0x00,0x00,0x85,0x00,0x00,0x00, +0xf8,0x00,0x02,0x00,0x84,0x00,0x00,0x00,0x41,0x00,0x05,0x00, +0x16,0x00,0x00,0x00,0x8f,0x00,0x00,0x00,0x13,0x00,0x00,0x00, +0x35,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x90,0x00,0x00,0x00,0x8f,0x00,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x92,0x00,0x00,0x00,0x90,0x00,0x00,0x00, +0x6a,0x00,0x00,0x00,0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x94,0x00,0x00,0x00,0x1b,0x03,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x95,0x00,0x00,0x00,0x92,0x00,0x00,0x00, +0x94,0x00,0x00,0x00,0x41,0x00,0x06,0x00,0x97,0x00,0x00,0x00, +0x98,0x00,0x00,0x00,0x8e,0x00,0x00,0x00,0x15,0x00,0x00,0x00, +0x95,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x4c,0x00,0x00,0x00, +0x99,0x00,0x00,0x00,0x98,0x00,0x00,0x00,0x41,0x00,0x08,0x00, +0x9d,0x00,0x00,0x00,0x9e,0x00,0x00,0x00,0x77,0x00,0x00,0x00, +0x15,0x00,0x00,0x00,0x7a,0x00,0x00,0x00,0x89,0x00,0x00,0x00, +0x15,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x32,0x00,0x00,0x00, +0x9f,0x00,0x00,0x00,0x9e,0x00,0x00,0x00,0xc2,0x00,0x05,0x00, +0x32,0x00,0x00,0x00,0xa1,0x00,0x00,0x00,0x9f,0x00,0x00,0x00, +0x38,0x00,0x00,0x00,0x71,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0xa2,0x00,0x00,0x00,0xa1,0x00,0x00,0x00,0x7c,0x00,0x04,0x00, +0x14,0x00,0x00,0x00,0xa3,0x00,0x00,0x00,0xa2,0x00,0x00,0x00, +0xc7,0x00,0x05,0x00,0x14,0x00,0x00,0x00,0xa5,0x00,0x00,0x00, +0xa3,0x00,0x00,0x00,0xa4,0x00,0x00,0x00,0x41,0x00,0x08,0x00, +0x9d,0x00,0x00,0x00,0xaa,0x00,0x00,0x00,0x77,0x00,0x00,0x00, +0x15,0x00,0x00,0x00,0x7a,0x00,0x00,0x00,0x89,0x00,0x00,0x00, +0xa9,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x32,0x00,0x00,0x00, +0xab,0x00,0x00,0x00,0xaa,0x00,0x00,0x00,0xc2,0x00,0x05,0x00, +0x32,0x00,0x00,0x00,0xae,0x00,0x00,0x00,0xab,0x00,0x00,0x00, +0x38,0x00,0x00,0x00,0x71,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0xaf,0x00,0x00,0x00,0xae,0x00,0x00,0x00,0x7c,0x00,0x04,0x00, +0x14,0x00,0x00,0x00,0xb0,0x00,0x00,0x00,0xaf,0x00,0x00,0x00, +0xc7,0x00,0x05,0x00,0x14,0x00,0x00,0x00,0xb1,0x00,0x00,0x00, +0xb0,0x00,0x00,0x00,0x7b,0x00,0x00,0x00,0xc4,0x00,0x05,0x00, +0x14,0x00,0x00,0x00,0xb3,0x00,0x00,0x00,0xb1,0x00,0x00,0x00, +0xb2,0x00,0x00,0x00,0xc5,0x00,0x05,0x00,0x14,0x00,0x00,0x00, +0xb4,0x00,0x00,0x00,0xa5,0x00,0x00,0x00,0xb3,0x00,0x00,0x00, +0x72,0x00,0x04,0x00,0x3a,0x00,0x00,0x00,0xb5,0x00,0x00,0x00, +0xb4,0x00,0x00,0x00,0x72,0x00,0x04,0x00,0x14,0x00,0x00,0x00, +0xb6,0x00,0x00,0x00,0xb5,0x00,0x00,0x00,0x82,0x00,0x05,0x00, +0x14,0x00,0x00,0x00,0xb8,0x00,0x00,0x00,0xb6,0x00,0x00,0x00, +0xb7,0x00,0x00,0x00,0x6f,0x00,0x04,0x00,0x4c,0x00,0x00,0x00, +0xb9,0x00,0x00,0x00,0xb8,0x00,0x00,0x00,0x85,0x00,0x05,0x00, +0x4c,0x00,0x00,0x00,0xba,0x00,0x00,0x00,0x99,0x00,0x00,0x00, +0xb9,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xc1,0x00,0x00,0x00,0x45,0x00,0x00,0x00,0x94,0x00,0x00,0x00, +0x41,0x00,0x08,0x00,0x9d,0x00,0x00,0x00,0xc2,0x00,0x00,0x00, +0x77,0x00,0x00,0x00,0x15,0x00,0x00,0x00,0x7a,0x00,0x00,0x00, +0x35,0x00,0x00,0x00,0xc1,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0x32,0x00,0x00,0x00,0xc3,0x00,0x00,0x00,0xc2,0x00,0x00,0x00, +0x71,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0xc4,0x00,0x00,0x00, +0xc3,0x00,0x00,0x00,0x7c,0x00,0x04,0x00,0x14,0x00,0x00,0x00, +0xc5,0x00,0x00,0x00,0xc4,0x00,0x00,0x00,0xc7,0x00,0x05,0x00, +0x14,0x00,0x00,0x00,0xc6,0x00,0x00,0x00,0xc5,0x00,0x00,0x00, +0x7b,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xcd,0x00,0x00,0x00,0x3f,0x00,0x00,0x00,0x94,0x00,0x00,0x00, +0x41,0x00,0x08,0x00,0x9d,0x00,0x00,0x00,0xce,0x00,0x00,0x00, +0x77,0x00,0x00,0x00,0x15,0x00,0x00,0x00,0x7a,0x00,0x00,0x00, +0x15,0x00,0x00,0x00,0xcd,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0x32,0x00,0x00,0x00,0xcf,0x00,0x00,0x00,0xce,0x00,0x00,0x00, +0xc4,0x00,0x05,0x00,0x32,0x00,0x00,0x00,0xd1,0x00,0x00,0x00, +0x3c,0x00,0x00,0x00,0x15,0x00,0x00,0x00,0xc7,0x00,0x05,0x00, +0x32,0x00,0x00,0x00,0xd2,0x00,0x00,0x00,0xcf,0x00,0x00,0x00, +0xd1,0x00,0x00,0x00,0x71,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0xd3,0x00,0x00,0x00,0xd2,0x00,0x00,0x00,0x7c,0x00,0x04,0x00, +0x14,0x00,0x00,0x00,0xd4,0x00,0x00,0x00,0xd3,0x00,0x00,0x00, +0xab,0x00,0x05,0x00,0x64,0x00,0x00,0x00,0xd5,0x00,0x00,0x00, +0xd4,0x00,0x00,0x00,0x15,0x00,0x00,0x00,0xa9,0x00,0x06,0x00, +0x14,0x00,0x00,0x00,0xd6,0x00,0x00,0x00,0xd5,0x00,0x00,0x00, +0x15,0x00,0x00,0x00,0xb2,0x00,0x00,0x00,0x82,0x00,0x05,0x00, +0x14,0x00,0x00,0x00,0xd7,0x00,0x00,0x00,0xc6,0x00,0x00,0x00, +0xd6,0x00,0x00,0x00,0x6f,0x00,0x04,0x00,0x4c,0x00,0x00,0x00, +0xd8,0x00,0x00,0x00,0xd7,0x00,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xe1,0x00,0x00,0x00,0x95,0x00,0x00,0x00, +0x41,0x00,0x00,0x00,0x41,0x00,0x06,0x00,0x97,0x00,0x00,0x00, +0xe2,0x00,0x00,0x00,0x8e,0x00,0x00,0x00,0x15,0x00,0x00,0x00, +0xe1,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x4c,0x00,0x00,0x00, +0xe3,0x00,0x00,0x00,0xe2,0x00,0x00,0x00,0x41,0x00,0x08,0x00, +0x9d,0x00,0x00,0x00,0xe7,0x00,0x00,0x00,0x77,0x00,0x00,0x00, +0x15,0x00,0x00,0x00,0x7a,0x00,0x00,0x00,0x89,0x00,0x00,0x00, +0x89,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x32,0x00,0x00,0x00, +0xe8,0x00,0x00,0x00,0xe7,0x00,0x00,0x00,0xc2,0x00,0x05,0x00, +0x32,0x00,0x00,0x00,0xea,0x00,0x00,0x00,0xe8,0x00,0x00,0x00, +0x38,0x00,0x00,0x00,0x71,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0xeb,0x00,0x00,0x00,0xea,0x00,0x00,0x00,0x7c,0x00,0x04,0x00, +0x14,0x00,0x00,0x00,0xec,0x00,0x00,0x00,0xeb,0x00,0x00,0x00, +0xc7,0x00,0x05,0x00,0x14,0x00,0x00,0x00,0xed,0x00,0x00,0x00, +0xec,0x00,0x00,0x00,0xa4,0x00,0x00,0x00,0x41,0x00,0x08,0x00, +0x9d,0x00,0x00,0x00,0xf2,0x00,0x00,0x00,0x77,0x00,0x00,0x00, +0x15,0x00,0x00,0x00,0x7a,0x00,0x00,0x00,0x89,0x00,0x00,0x00, +0xf1,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x32,0x00,0x00,0x00, +0xf3,0x00,0x00,0x00,0xf2,0x00,0x00,0x00,0xc2,0x00,0x05,0x00, +0x32,0x00,0x00,0x00,0xf6,0x00,0x00,0x00,0xf3,0x00,0x00,0x00, +0x38,0x00,0x00,0x00,0x71,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0xf7,0x00,0x00,0x00,0xf6,0x00,0x00,0x00,0x7c,0x00,0x04,0x00, +0x14,0x00,0x00,0x00,0xf8,0x00,0x00,0x00,0xf7,0x00,0x00,0x00, +0xc7,0x00,0x05,0x00,0x14,0x00,0x00,0x00,0xf9,0x00,0x00,0x00, +0xf8,0x00,0x00,0x00,0x7b,0x00,0x00,0x00,0xc4,0x00,0x05,0x00, +0x14,0x00,0x00,0x00,0xfa,0x00,0x00,0x00,0xf9,0x00,0x00,0x00, +0xb2,0x00,0x00,0x00,0xc5,0x00,0x05,0x00,0x14,0x00,0x00,0x00, +0xfb,0x00,0x00,0x00,0xed,0x00,0x00,0x00,0xfa,0x00,0x00,0x00, +0x72,0x00,0x04,0x00,0x3a,0x00,0x00,0x00,0xfc,0x00,0x00,0x00, +0xfb,0x00,0x00,0x00,0x72,0x00,0x04,0x00,0x14,0x00,0x00,0x00, +0xfd,0x00,0x00,0x00,0xfc,0x00,0x00,0x00,0x82,0x00,0x05,0x00, +0x14,0x00,0x00,0x00,0xfe,0x00,0x00,0x00,0xfd,0x00,0x00,0x00, +0xb7,0x00,0x00,0x00,0x6f,0x00,0x04,0x00,0x4c,0x00,0x00,0x00, +0xff,0x00,0x00,0x00,0xfe,0x00,0x00,0x00,0x85,0x00,0x05,0x00, +0x4c,0x00,0x00,0x00,0x00,0x01,0x00,0x00,0xe3,0x00,0x00,0x00, +0xff,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x32,0x00,0x00,0x00, +0x09,0x01,0x00,0x00,0xc2,0x00,0x00,0x00,0xc2,0x00,0x05,0x00, +0x32,0x00,0x00,0x00,0x0a,0x01,0x00,0x00,0x09,0x01,0x00,0x00, +0x89,0x00,0x00,0x00,0x71,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x0b,0x01,0x00,0x00,0x0a,0x01,0x00,0x00,0x7c,0x00,0x04,0x00, +0x14,0x00,0x00,0x00,0x0c,0x01,0x00,0x00,0x0b,0x01,0x00,0x00, +0xc7,0x00,0x05,0x00,0x14,0x00,0x00,0x00,0x0d,0x01,0x00,0x00, +0x0c,0x01,0x00,0x00,0x7b,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0x32,0x00,0x00,0x00,0x16,0x01,0x00,0x00,0xce,0x00,0x00,0x00, +0xc4,0x00,0x05,0x00,0x32,0x00,0x00,0x00,0x18,0x01,0x00,0x00, +0x3c,0x00,0x00,0x00,0x35,0x00,0x00,0x00,0xc7,0x00,0x05,0x00, +0x32,0x00,0x00,0x00,0x19,0x01,0x00,0x00,0x16,0x01,0x00,0x00, +0x18,0x01,0x00,0x00,0x71,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x1a,0x01,0x00,0x00,0x19,0x01,0x00,0x00,0x7c,0x00,0x04,0x00, +0x14,0x00,0x00,0x00,0x1b,0x01,0x00,0x00,0x1a,0x01,0x00,0x00, +0xab,0x00,0x05,0x00,0x64,0x00,0x00,0x00,0x1c,0x01,0x00,0x00, +0x1b,0x01,0x00,0x00,0x15,0x00,0x00,0x00,0xa9,0x00,0x06,0x00, +0x14,0x00,0x00,0x00,0x1d,0x01,0x00,0x00,0x1c,0x01,0x00,0x00, +0x15,0x00,0x00,0x00,0xb2,0x00,0x00,0x00,0x82,0x00,0x05,0x00, +0x14,0x00,0x00,0x00,0x1e,0x01,0x00,0x00,0x0d,0x01,0x00,0x00, +0x1d,0x01,0x00,0x00,0x6f,0x00,0x04,0x00,0x4c,0x00,0x00,0x00, +0x1f,0x01,0x00,0x00,0x1e,0x01,0x00,0x00,0x85,0x00,0x05,0x00, +0x4c,0x00,0x00,0x00,0x20,0x01,0x00,0x00,0x00,0x01,0x00,0x00, +0x1f,0x01,0x00,0x00,0x0c,0x00,0x08,0x00,0x4c,0x00,0x00,0x00, +0x21,0x01,0x00,0x00,0x01,0x00,0x00,0x00,0x32,0x00,0x00,0x00, +0xba,0x00,0x00,0x00,0xd8,0x00,0x00,0x00,0x20,0x01,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x29,0x01,0x00,0x00, +0x95,0x00,0x00,0x00,0x6e,0x00,0x00,0x00,0x41,0x00,0x06,0x00, +0x97,0x00,0x00,0x00,0x2a,0x01,0x00,0x00,0x8e,0x00,0x00,0x00, +0x15,0x00,0x00,0x00,0x29,0x01,0x00,0x00,0x3d,0x00,0x04,0x00, +0x4c,0x00,0x00,0x00,0x2b,0x01,0x00,0x00,0x2a,0x01,0x00,0x00, +0x41,0x00,0x08,0x00,0x9d,0x00,0x00,0x00,0x2f,0x01,0x00,0x00, +0x77,0x00,0x00,0x00,0x15,0x00,0x00,0x00,0x7a,0x00,0x00,0x00, +0x89,0x00,0x00,0x00,0xb2,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0x32,0x00,0x00,0x00,0x30,0x01,0x00,0x00,0x2f,0x01,0x00,0x00, +0xc2,0x00,0x05,0x00,0x32,0x00,0x00,0x00,0x32,0x01,0x00,0x00, +0x30,0x01,0x00,0x00,0x38,0x00,0x00,0x00,0x71,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x33,0x01,0x00,0x00,0x32,0x01,0x00,0x00, +0x7c,0x00,0x04,0x00,0x14,0x00,0x00,0x00,0x34,0x01,0x00,0x00, +0x33,0x01,0x00,0x00,0xc7,0x00,0x05,0x00,0x14,0x00,0x00,0x00, +0x35,0x01,0x00,0x00,0x34,0x01,0x00,0x00,0xa4,0x00,0x00,0x00, +0x3d,0x00,0x04,0x00,0x32,0x00,0x00,0x00,0x3a,0x01,0x00,0x00, +0xaa,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x3c,0x01,0x00,0x00,0x38,0x00,0x00,0x00,0x23,0x00,0x00,0x00, +0xc2,0x00,0x05,0x00,0x32,0x00,0x00,0x00,0x3d,0x01,0x00,0x00, +0x3a,0x01,0x00,0x00,0x3c,0x01,0x00,0x00,0x71,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x3e,0x01,0x00,0x00,0x3d,0x01,0x00,0x00, +0x7c,0x00,0x04,0x00,0x14,0x00,0x00,0x00,0x3f,0x01,0x00,0x00, +0x3e,0x01,0x00,0x00,0xc7,0x00,0x05,0x00,0x14,0x00,0x00,0x00, +0x40,0x01,0x00,0x00,0x3f,0x01,0x00,0x00,0x7b,0x00,0x00,0x00, +0xc4,0x00,0x05,0x00,0x14,0x00,0x00,0x00,0x41,0x01,0x00,0x00, +0x40,0x01,0x00,0x00,0xb2,0x00,0x00,0x00,0xc5,0x00,0x05,0x00, +0x14,0x00,0x00,0x00,0x42,0x01,0x00,0x00,0x35,0x01,0x00,0x00, +0x41,0x01,0x00,0x00,0x72,0x00,0x04,0x00,0x3a,0x00,0x00,0x00, +0x43,0x01,0x00,0x00,0x42,0x01,0x00,0x00,0x72,0x00,0x04,0x00, +0x14,0x00,0x00,0x00,0x44,0x01,0x00,0x00,0x43,0x01,0x00,0x00, +0x82,0x00,0x05,0x00,0x14,0x00,0x00,0x00,0x45,0x01,0x00,0x00, +0x44,0x01,0x00,0x00,0xb7,0x00,0x00,0x00,0x6f,0x00,0x04,0x00, +0x4c,0x00,0x00,0x00,0x46,0x01,0x00,0x00,0x45,0x01,0x00,0x00, +0x85,0x00,0x05,0x00,0x4c,0x00,0x00,0x00,0x47,0x01,0x00,0x00, +0x2b,0x01,0x00,0x00,0x46,0x01,0x00,0x00,0x3d,0x00,0x04,0x00, +0x32,0x00,0x00,0x00,0x50,0x01,0x00,0x00,0xc2,0x00,0x00,0x00, +0xc2,0x00,0x05,0x00,0x32,0x00,0x00,0x00,0x51,0x01,0x00,0x00, +0x50,0x01,0x00,0x00,0xb2,0x00,0x00,0x00,0x71,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x52,0x01,0x00,0x00,0x51,0x01,0x00,0x00, +0x7c,0x00,0x04,0x00,0x14,0x00,0x00,0x00,0x53,0x01,0x00,0x00, +0x52,0x01,0x00,0x00,0xc7,0x00,0x05,0x00,0x14,0x00,0x00,0x00, +0x54,0x01,0x00,0x00,0x53,0x01,0x00,0x00,0x7b,0x00,0x00,0x00, +0x3d,0x00,0x04,0x00,0x32,0x00,0x00,0x00,0x5d,0x01,0x00,0x00, +0xce,0x00,0x00,0x00,0xc4,0x00,0x05,0x00,0x32,0x00,0x00,0x00, +0x5f,0x01,0x00,0x00,0x3c,0x00,0x00,0x00,0x89,0x00,0x00,0x00, +0xc7,0x00,0x05,0x00,0x32,0x00,0x00,0x00,0x60,0x01,0x00,0x00, +0x5d,0x01,0x00,0x00,0x5f,0x01,0x00,0x00,0x71,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x61,0x01,0x00,0x00,0x60,0x01,0x00,0x00, +0x7c,0x00,0x04,0x00,0x14,0x00,0x00,0x00,0x62,0x01,0x00,0x00, +0x61,0x01,0x00,0x00,0xab,0x00,0x05,0x00,0x64,0x00,0x00,0x00, +0x63,0x01,0x00,0x00,0x62,0x01,0x00,0x00,0x15,0x00,0x00,0x00, +0xa9,0x00,0x06,0x00,0x14,0x00,0x00,0x00,0x64,0x01,0x00,0x00, +0x63,0x01,0x00,0x00,0x15,0x00,0x00,0x00,0xb2,0x00,0x00,0x00, +0x82,0x00,0x05,0x00,0x14,0x00,0x00,0x00,0x65,0x01,0x00,0x00, +0x54,0x01,0x00,0x00,0x64,0x01,0x00,0x00,0x6f,0x00,0x04,0x00, +0x4c,0x00,0x00,0x00,0x66,0x01,0x00,0x00,0x65,0x01,0x00,0x00, +0x0c,0x00,0x08,0x00,0x4c,0x00,0x00,0x00,0x68,0x01,0x00,0x00, +0x01,0x00,0x00,0x00,0x32,0x00,0x00,0x00,0x47,0x01,0x00,0x00, +0x66,0x01,0x00,0x00,0x21,0x01,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x71,0x01,0x00,0x00,0x95,0x00,0x00,0x00, +0x70,0x01,0x00,0x00,0x41,0x00,0x06,0x00,0x97,0x00,0x00,0x00, +0x72,0x01,0x00,0x00,0x8e,0x00,0x00,0x00,0x15,0x00,0x00,0x00, +0x71,0x01,0x00,0x00,0x3d,0x00,0x04,0x00,0x4c,0x00,0x00,0x00, +0x73,0x01,0x00,0x00,0x72,0x01,0x00,0x00,0x41,0x00,0x08,0x00, +0x9d,0x00,0x00,0x00,0x78,0x01,0x00,0x00,0x77,0x00,0x00,0x00, +0x15,0x00,0x00,0x00,0x7a,0x00,0x00,0x00,0x89,0x00,0x00,0x00, +0x77,0x01,0x00,0x00,0x3d,0x00,0x04,0x00,0x32,0x00,0x00,0x00, +0x79,0x01,0x00,0x00,0x78,0x01,0x00,0x00,0xc2,0x00,0x05,0x00, +0x32,0x00,0x00,0x00,0x7b,0x01,0x00,0x00,0x79,0x01,0x00,0x00, +0x38,0x00,0x00,0x00,0x71,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x7c,0x01,0x00,0x00,0x7b,0x01,0x00,0x00,0x7c,0x00,0x04,0x00, +0x14,0x00,0x00,0x00,0x7d,0x01,0x00,0x00,0x7c,0x01,0x00,0x00, +0xc7,0x00,0x05,0x00,0x14,0x00,0x00,0x00,0x7e,0x01,0x00,0x00, +0x7d,0x01,0x00,0x00,0xa4,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0x32,0x00,0x00,0x00,0x83,0x01,0x00,0x00,0xf2,0x00,0x00,0x00, +0xc2,0x00,0x05,0x00,0x32,0x00,0x00,0x00,0x86,0x01,0x00,0x00, +0x83,0x01,0x00,0x00,0x3c,0x01,0x00,0x00,0x71,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x87,0x01,0x00,0x00,0x86,0x01,0x00,0x00, +0x7c,0x00,0x04,0x00,0x14,0x00,0x00,0x00,0x88,0x01,0x00,0x00, +0x87,0x01,0x00,0x00,0xc7,0x00,0x05,0x00,0x14,0x00,0x00,0x00, +0x89,0x01,0x00,0x00,0x88,0x01,0x00,0x00,0x7b,0x00,0x00,0x00, +0xc4,0x00,0x05,0x00,0x14,0x00,0x00,0x00,0x8a,0x01,0x00,0x00, +0x89,0x01,0x00,0x00,0xb2,0x00,0x00,0x00,0xc5,0x00,0x05,0x00, +0x14,0x00,0x00,0x00,0x8b,0x01,0x00,0x00,0x7e,0x01,0x00,0x00, +0x8a,0x01,0x00,0x00,0x72,0x00,0x04,0x00,0x3a,0x00,0x00,0x00, +0x8c,0x01,0x00,0x00,0x8b,0x01,0x00,0x00,0x72,0x00,0x04,0x00, +0x14,0x00,0x00,0x00,0x8d,0x01,0x00,0x00,0x8c,0x01,0x00,0x00, +0x82,0x00,0x05,0x00,0x14,0x00,0x00,0x00,0x8e,0x01,0x00,0x00, +0x8d,0x01,0x00,0x00,0xb7,0x00,0x00,0x00,0x6f,0x00,0x04,0x00, +0x4c,0x00,0x00,0x00,0x8f,0x01,0x00,0x00,0x8e,0x01,0x00,0x00, +0x85,0x00,0x05,0x00,0x4c,0x00,0x00,0x00,0x90,0x01,0x00,0x00, +0x73,0x01,0x00,0x00,0x8f,0x01,0x00,0x00,0x3d,0x00,0x04,0x00, +0x32,0x00,0x00,0x00,0x99,0x01,0x00,0x00,0xc2,0x00,0x00,0x00, +0xc2,0x00,0x05,0x00,0x32,0x00,0x00,0x00,0x9a,0x01,0x00,0x00, +0x99,0x01,0x00,0x00,0x77,0x01,0x00,0x00,0x71,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x9b,0x01,0x00,0x00,0x9a,0x01,0x00,0x00, +0x7c,0x00,0x04,0x00,0x14,0x00,0x00,0x00,0x9c,0x01,0x00,0x00, +0x9b,0x01,0x00,0x00,0xc7,0x00,0x05,0x00,0x14,0x00,0x00,0x00, +0x9d,0x01,0x00,0x00,0x9c,0x01,0x00,0x00,0x7b,0x00,0x00,0x00, +0x3d,0x00,0x04,0x00,0x32,0x00,0x00,0x00,0xa6,0x01,0x00,0x00, +0xce,0x00,0x00,0x00,0xc4,0x00,0x05,0x00,0x32,0x00,0x00,0x00, +0xa8,0x01,0x00,0x00,0x3c,0x00,0x00,0x00,0x7b,0x00,0x00,0x00, +0xc7,0x00,0x05,0x00,0x32,0x00,0x00,0x00,0xa9,0x01,0x00,0x00, +0xa6,0x01,0x00,0x00,0xa8,0x01,0x00,0x00,0x71,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0xaa,0x01,0x00,0x00,0xa9,0x01,0x00,0x00, +0x7c,0x00,0x04,0x00,0x14,0x00,0x00,0x00,0xab,0x01,0x00,0x00, +0xaa,0x01,0x00,0x00,0xab,0x00,0x05,0x00,0x64,0x00,0x00,0x00, +0xac,0x01,0x00,0x00,0xab,0x01,0x00,0x00,0x15,0x00,0x00,0x00, +0xa9,0x00,0x06,0x00,0x14,0x00,0x00,0x00,0xad,0x01,0x00,0x00, +0xac,0x01,0x00,0x00,0x15,0x00,0x00,0x00,0xb2,0x00,0x00,0x00, +0x82,0x00,0x05,0x00,0x14,0x00,0x00,0x00,0xae,0x01,0x00,0x00, +0x9d,0x01,0x00,0x00,0xad,0x01,0x00,0x00,0x6f,0x00,0x04,0x00, +0x4c,0x00,0x00,0x00,0xaf,0x01,0x00,0x00,0xae,0x01,0x00,0x00, +0x0c,0x00,0x08,0x00,0x4c,0x00,0x00,0x00,0xb1,0x01,0x00,0x00, +0x01,0x00,0x00,0x00,0x32,0x00,0x00,0x00,0x90,0x01,0x00,0x00, +0xaf,0x01,0x00,0x00,0x68,0x01,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xb9,0x01,0x00,0x00,0x95,0x00,0x00,0x00, +0x50,0x00,0x00,0x00,0x41,0x00,0x06,0x00,0x97,0x00,0x00,0x00, +0xba,0x01,0x00,0x00,0x8e,0x00,0x00,0x00,0x15,0x00,0x00,0x00, +0xb9,0x01,0x00,0x00,0x3d,0x00,0x04,0x00,0x4c,0x00,0x00,0x00, +0xbb,0x01,0x00,0x00,0xba,0x01,0x00,0x00,0x41,0x00,0x08,0x00, +0x9d,0x00,0x00,0x00,0xbf,0x01,0x00,0x00,0x77,0x00,0x00,0x00, +0x15,0x00,0x00,0x00,0x7a,0x00,0x00,0x00,0x89,0x00,0x00,0x00, +0x35,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x32,0x00,0x00,0x00, +0xc0,0x01,0x00,0x00,0xbf,0x01,0x00,0x00,0xc2,0x00,0x05,0x00, +0x32,0x00,0x00,0x00,0xc2,0x01,0x00,0x00,0xc0,0x01,0x00,0x00, +0x38,0x00,0x00,0x00,0x71,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0xc3,0x01,0x00,0x00,0xc2,0x01,0x00,0x00,0x7c,0x00,0x04,0x00, +0x14,0x00,0x00,0x00,0xc4,0x01,0x00,0x00,0xc3,0x01,0x00,0x00, +0xc7,0x00,0x05,0x00,0x14,0x00,0x00,0x00,0xc5,0x01,0x00,0x00, +0xc4,0x01,0x00,0x00,0xa4,0x00,0x00,0x00,0x41,0x00,0x08,0x00, +0x9d,0x00,0x00,0x00,0xca,0x01,0x00,0x00,0x77,0x00,0x00,0x00, +0x15,0x00,0x00,0x00,0x7a,0x00,0x00,0x00,0x89,0x00,0x00,0x00, +0xc9,0x01,0x00,0x00,0x3d,0x00,0x04,0x00,0x32,0x00,0x00,0x00, +0xcb,0x01,0x00,0x00,0xca,0x01,0x00,0x00,0xc2,0x00,0x05,0x00, +0x32,0x00,0x00,0x00,0xce,0x01,0x00,0x00,0xcb,0x01,0x00,0x00, +0x38,0x00,0x00,0x00,0x71,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0xcf,0x01,0x00,0x00,0xce,0x01,0x00,0x00,0x7c,0x00,0x04,0x00, +0x14,0x00,0x00,0x00,0xd0,0x01,0x00,0x00,0xcf,0x01,0x00,0x00, +0xc7,0x00,0x05,0x00,0x14,0x00,0x00,0x00,0xd1,0x01,0x00,0x00, +0xd0,0x01,0x00,0x00,0x7b,0x00,0x00,0x00,0xc4,0x00,0x05,0x00, +0x14,0x00,0x00,0x00,0xd2,0x01,0x00,0x00,0xd1,0x01,0x00,0x00, +0xb2,0x00,0x00,0x00,0xc5,0x00,0x05,0x00,0x14,0x00,0x00,0x00, +0xd3,0x01,0x00,0x00,0xc5,0x01,0x00,0x00,0xd2,0x01,0x00,0x00, +0x72,0x00,0x04,0x00,0x3a,0x00,0x00,0x00,0xd4,0x01,0x00,0x00, +0xd3,0x01,0x00,0x00,0x72,0x00,0x04,0x00,0x14,0x00,0x00,0x00, +0xd5,0x01,0x00,0x00,0xd4,0x01,0x00,0x00,0x82,0x00,0x05,0x00, +0x14,0x00,0x00,0x00,0xd6,0x01,0x00,0x00,0xd5,0x01,0x00,0x00, +0xb7,0x00,0x00,0x00,0x6f,0x00,0x04,0x00,0x4c,0x00,0x00,0x00, +0xd7,0x01,0x00,0x00,0xd6,0x01,0x00,0x00,0x85,0x00,0x05,0x00, +0x4c,0x00,0x00,0x00,0xd8,0x01,0x00,0x00,0xbb,0x01,0x00,0x00, +0xd7,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xe0,0x01,0x00,0x00,0xc1,0x00,0x00,0x00,0x50,0x00,0x00,0x00, +0x41,0x00,0x08,0x00,0x9d,0x00,0x00,0x00,0xe1,0x01,0x00,0x00, +0x77,0x00,0x00,0x00,0x15,0x00,0x00,0x00,0x7a,0x00,0x00,0x00, +0x35,0x00,0x00,0x00,0xe0,0x01,0x00,0x00,0x3d,0x00,0x04,0x00, +0x32,0x00,0x00,0x00,0xe2,0x01,0x00,0x00,0xe1,0x01,0x00,0x00, +0x71,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0xe3,0x01,0x00,0x00, +0xe2,0x01,0x00,0x00,0x7c,0x00,0x04,0x00,0x14,0x00,0x00,0x00, +0xe4,0x01,0x00,0x00,0xe3,0x01,0x00,0x00,0xc7,0x00,0x05,0x00, +0x14,0x00,0x00,0x00,0xe5,0x01,0x00,0x00,0xe4,0x01,0x00,0x00, +0x7b,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xed,0x01,0x00,0x00,0xcd,0x00,0x00,0x00,0x50,0x00,0x00,0x00, +0x41,0x00,0x08,0x00,0x9d,0x00,0x00,0x00,0xee,0x01,0x00,0x00, +0x77,0x00,0x00,0x00,0x15,0x00,0x00,0x00,0x7a,0x00,0x00,0x00, +0x15,0x00,0x00,0x00,0xed,0x01,0x00,0x00,0x3d,0x00,0x04,0x00, +0x32,0x00,0x00,0x00,0xef,0x01,0x00,0x00,0xee,0x01,0x00,0x00, +0xc7,0x00,0x05,0x00,0x32,0x00,0x00,0x00,0xf2,0x01,0x00,0x00, +0xef,0x01,0x00,0x00,0xd1,0x00,0x00,0x00,0x71,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0xf3,0x01,0x00,0x00,0xf2,0x01,0x00,0x00, +0x7c,0x00,0x04,0x00,0x14,0x00,0x00,0x00,0xf4,0x01,0x00,0x00, +0xf3,0x01,0x00,0x00,0xab,0x00,0x05,0x00,0x64,0x00,0x00,0x00, +0xf5,0x01,0x00,0x00,0xf4,0x01,0x00,0x00,0x15,0x00,0x00,0x00, +0xa9,0x00,0x06,0x00,0x14,0x00,0x00,0x00,0xf6,0x01,0x00,0x00, +0xf5,0x01,0x00,0x00,0x15,0x00,0x00,0x00,0xb2,0x00,0x00,0x00, +0x82,0x00,0x05,0x00,0x14,0x00,0x00,0x00,0xf7,0x01,0x00,0x00, +0xe5,0x01,0x00,0x00,0xf6,0x01,0x00,0x00,0x6f,0x00,0x04,0x00, +0x4c,0x00,0x00,0x00,0xf8,0x01,0x00,0x00,0xf7,0x01,0x00,0x00, +0x0c,0x00,0x08,0x00,0x4c,0x00,0x00,0x00,0xfa,0x01,0x00,0x00, +0x01,0x00,0x00,0x00,0x32,0x00,0x00,0x00,0xd8,0x01,0x00,0x00, +0xf8,0x01,0x00,0x00,0xb1,0x01,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x03,0x02,0x00,0x00,0x95,0x00,0x00,0x00, +0x02,0x02,0x00,0x00,0x41,0x00,0x06,0x00,0x97,0x00,0x00,0x00, +0x04,0x02,0x00,0x00,0x8e,0x00,0x00,0x00,0x15,0x00,0x00,0x00, +0x03,0x02,0x00,0x00,0x3d,0x00,0x04,0x00,0x4c,0x00,0x00,0x00, +0x05,0x02,0x00,0x00,0x04,0x02,0x00,0x00,0x41,0x00,0x08,0x00, +0x9d,0x00,0x00,0x00,0x09,0x02,0x00,0x00,0x77,0x00,0x00,0x00, +0x15,0x00,0x00,0x00,0x7a,0x00,0x00,0x00,0x89,0x00,0x00,0x00, +0x7b,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x32,0x00,0x00,0x00, +0x0a,0x02,0x00,0x00,0x09,0x02,0x00,0x00,0xc2,0x00,0x05,0x00, +0x32,0x00,0x00,0x00,0x0c,0x02,0x00,0x00,0x0a,0x02,0x00,0x00, +0x38,0x00,0x00,0x00,0x71,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x0d,0x02,0x00,0x00,0x0c,0x02,0x00,0x00,0x7c,0x00,0x04,0x00, +0x14,0x00,0x00,0x00,0x0e,0x02,0x00,0x00,0x0d,0x02,0x00,0x00, +0xc7,0x00,0x05,0x00,0x14,0x00,0x00,0x00,0x0f,0x02,0x00,0x00, +0x0e,0x02,0x00,0x00,0xa4,0x00,0x00,0x00,0x41,0x00,0x08,0x00, +0x9d,0x00,0x00,0x00,0x14,0x02,0x00,0x00,0x77,0x00,0x00,0x00, +0x15,0x00,0x00,0x00,0x7a,0x00,0x00,0x00,0x89,0x00,0x00,0x00, +0x13,0x02,0x00,0x00,0x3d,0x00,0x04,0x00,0x32,0x00,0x00,0x00, +0x15,0x02,0x00,0x00,0x14,0x02,0x00,0x00,0xc2,0x00,0x05,0x00, +0x32,0x00,0x00,0x00,0x18,0x02,0x00,0x00,0x15,0x02,0x00,0x00, +0x38,0x00,0x00,0x00,0x71,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x19,0x02,0x00,0x00,0x18,0x02,0x00,0x00,0x7c,0x00,0x04,0x00, +0x14,0x00,0x00,0x00,0x1a,0x02,0x00,0x00,0x19,0x02,0x00,0x00, +0xc7,0x00,0x05,0x00,0x14,0x00,0x00,0x00,0x1b,0x02,0x00,0x00, +0x1a,0x02,0x00,0x00,0x7b,0x00,0x00,0x00,0xc4,0x00,0x05,0x00, +0x14,0x00,0x00,0x00,0x1c,0x02,0x00,0x00,0x1b,0x02,0x00,0x00, +0xb2,0x00,0x00,0x00,0xc5,0x00,0x05,0x00,0x14,0x00,0x00,0x00, +0x1d,0x02,0x00,0x00,0x0f,0x02,0x00,0x00,0x1c,0x02,0x00,0x00, +0x72,0x00,0x04,0x00,0x3a,0x00,0x00,0x00,0x1e,0x02,0x00,0x00, +0x1d,0x02,0x00,0x00,0x72,0x00,0x04,0x00,0x14,0x00,0x00,0x00, +0x1f,0x02,0x00,0x00,0x1e,0x02,0x00,0x00,0x82,0x00,0x05,0x00, +0x14,0x00,0x00,0x00,0x20,0x02,0x00,0x00,0x1f,0x02,0x00,0x00, +0xb7,0x00,0x00,0x00,0x6f,0x00,0x04,0x00,0x4c,0x00,0x00,0x00, +0x21,0x02,0x00,0x00,0x20,0x02,0x00,0x00,0x85,0x00,0x05,0x00, +0x4c,0x00,0x00,0x00,0x22,0x02,0x00,0x00,0x05,0x02,0x00,0x00, +0x21,0x02,0x00,0x00,0x3d,0x00,0x04,0x00,0x32,0x00,0x00,0x00, +0x2c,0x02,0x00,0x00,0xe1,0x01,0x00,0x00,0xc2,0x00,0x05,0x00, +0x32,0x00,0x00,0x00,0x2d,0x02,0x00,0x00,0x2c,0x02,0x00,0x00, +0x89,0x00,0x00,0x00,0x71,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x2e,0x02,0x00,0x00,0x2d,0x02,0x00,0x00,0x7c,0x00,0x04,0x00, +0x14,0x00,0x00,0x00,0x2f,0x02,0x00,0x00,0x2e,0x02,0x00,0x00, +0xc7,0x00,0x05,0x00,0x14,0x00,0x00,0x00,0x30,0x02,0x00,0x00, +0x2f,0x02,0x00,0x00,0x7b,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0x32,0x00,0x00,0x00,0x3a,0x02,0x00,0x00,0xee,0x01,0x00,0x00, +0xc7,0x00,0x05,0x00,0x32,0x00,0x00,0x00,0x3d,0x02,0x00,0x00, +0x3a,0x02,0x00,0x00,0x18,0x01,0x00,0x00,0x71,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x3e,0x02,0x00,0x00,0x3d,0x02,0x00,0x00, +0x7c,0x00,0x04,0x00,0x14,0x00,0x00,0x00,0x3f,0x02,0x00,0x00, +0x3e,0x02,0x00,0x00,0xab,0x00,0x05,0x00,0x64,0x00,0x00,0x00, +0x40,0x02,0x00,0x00,0x3f,0x02,0x00,0x00,0x15,0x00,0x00,0x00, +0xa9,0x00,0x06,0x00,0x14,0x00,0x00,0x00,0x41,0x02,0x00,0x00, +0x40,0x02,0x00,0x00,0x15,0x00,0x00,0x00,0xb2,0x00,0x00,0x00, +0x82,0x00,0x05,0x00,0x14,0x00,0x00,0x00,0x42,0x02,0x00,0x00, +0x30,0x02,0x00,0x00,0x41,0x02,0x00,0x00,0x6f,0x00,0x04,0x00, +0x4c,0x00,0x00,0x00,0x43,0x02,0x00,0x00,0x42,0x02,0x00,0x00, +0x0c,0x00,0x08,0x00,0x4c,0x00,0x00,0x00,0x45,0x02,0x00,0x00, +0x01,0x00,0x00,0x00,0x32,0x00,0x00,0x00,0x22,0x02,0x00,0x00, +0x43,0x02,0x00,0x00,0xfa,0x01,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x4e,0x02,0x00,0x00,0x95,0x00,0x00,0x00, +0x4d,0x02,0x00,0x00,0x41,0x00,0x06,0x00,0x97,0x00,0x00,0x00, +0x4f,0x02,0x00,0x00,0x8e,0x00,0x00,0x00,0x15,0x00,0x00,0x00, +0x4e,0x02,0x00,0x00,0x3d,0x00,0x04,0x00,0x4c,0x00,0x00,0x00, +0x50,0x02,0x00,0x00,0x4f,0x02,0x00,0x00,0x41,0x00,0x08,0x00, +0x9d,0x00,0x00,0x00,0x55,0x02,0x00,0x00,0x77,0x00,0x00,0x00, +0x15,0x00,0x00,0x00,0x7a,0x00,0x00,0x00,0x89,0x00,0x00,0x00, +0x54,0x02,0x00,0x00,0x3d,0x00,0x04,0x00,0x32,0x00,0x00,0x00, +0x56,0x02,0x00,0x00,0x55,0x02,0x00,0x00,0xc2,0x00,0x05,0x00, +0x32,0x00,0x00,0x00,0x58,0x02,0x00,0x00,0x56,0x02,0x00,0x00, +0x38,0x00,0x00,0x00,0x71,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x59,0x02,0x00,0x00,0x58,0x02,0x00,0x00,0x7c,0x00,0x04,0x00, +0x14,0x00,0x00,0x00,0x5a,0x02,0x00,0x00,0x59,0x02,0x00,0x00, +0xc7,0x00,0x05,0x00,0x14,0x00,0x00,0x00,0x5b,0x02,0x00,0x00, +0x5a,0x02,0x00,0x00,0xa4,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0x32,0x00,0x00,0x00,0x60,0x02,0x00,0x00,0xca,0x01,0x00,0x00, +0xc2,0x00,0x05,0x00,0x32,0x00,0x00,0x00,0x63,0x02,0x00,0x00, +0x60,0x02,0x00,0x00,0x3c,0x01,0x00,0x00,0x71,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x64,0x02,0x00,0x00,0x63,0x02,0x00,0x00, +0x7c,0x00,0x04,0x00,0x14,0x00,0x00,0x00,0x65,0x02,0x00,0x00, +0x64,0x02,0x00,0x00,0xc7,0x00,0x05,0x00,0x14,0x00,0x00,0x00, +0x66,0x02,0x00,0x00,0x65,0x02,0x00,0x00,0x7b,0x00,0x00,0x00, +0xc4,0x00,0x05,0x00,0x14,0x00,0x00,0x00,0x67,0x02,0x00,0x00, +0x66,0x02,0x00,0x00,0xb2,0x00,0x00,0x00,0xc5,0x00,0x05,0x00, +0x14,0x00,0x00,0x00,0x68,0x02,0x00,0x00,0x5b,0x02,0x00,0x00, +0x67,0x02,0x00,0x00,0x72,0x00,0x04,0x00,0x3a,0x00,0x00,0x00, +0x69,0x02,0x00,0x00,0x68,0x02,0x00,0x00,0x72,0x00,0x04,0x00, +0x14,0x00,0x00,0x00,0x6a,0x02,0x00,0x00,0x69,0x02,0x00,0x00, +0x82,0x00,0x05,0x00,0x14,0x00,0x00,0x00,0x6b,0x02,0x00,0x00, +0x6a,0x02,0x00,0x00,0xb7,0x00,0x00,0x00,0x6f,0x00,0x04,0x00, +0x4c,0x00,0x00,0x00,0x6c,0x02,0x00,0x00,0x6b,0x02,0x00,0x00, +0x85,0x00,0x05,0x00,0x4c,0x00,0x00,0x00,0x6d,0x02,0x00,0x00, +0x50,0x02,0x00,0x00,0x6c,0x02,0x00,0x00,0x3d,0x00,0x04,0x00, +0x32,0x00,0x00,0x00,0x77,0x02,0x00,0x00,0xe1,0x01,0x00,0x00, +0xc2,0x00,0x05,0x00,0x32,0x00,0x00,0x00,0x78,0x02,0x00,0x00, +0x77,0x02,0x00,0x00,0xb2,0x00,0x00,0x00,0x71,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x79,0x02,0x00,0x00,0x78,0x02,0x00,0x00, +0x7c,0x00,0x04,0x00,0x14,0x00,0x00,0x00,0x7a,0x02,0x00,0x00, +0x79,0x02,0x00,0x00,0xc7,0x00,0x05,0x00,0x14,0x00,0x00,0x00, +0x7b,0x02,0x00,0x00,0x7a,0x02,0x00,0x00,0x7b,0x00,0x00,0x00, +0x3d,0x00,0x04,0x00,0x32,0x00,0x00,0x00,0x85,0x02,0x00,0x00, +0xee,0x01,0x00,0x00,0xc7,0x00,0x05,0x00,0x32,0x00,0x00,0x00, +0x88,0x02,0x00,0x00,0x85,0x02,0x00,0x00,0x5f,0x01,0x00,0x00, +0x71,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x89,0x02,0x00,0x00, +0x88,0x02,0x00,0x00,0x7c,0x00,0x04,0x00,0x14,0x00,0x00,0x00, +0x8a,0x02,0x00,0x00,0x89,0x02,0x00,0x00,0xab,0x00,0x05,0x00, +0x64,0x00,0x00,0x00,0x8b,0x02,0x00,0x00,0x8a,0x02,0x00,0x00, +0x15,0x00,0x00,0x00,0xa9,0x00,0x06,0x00,0x14,0x00,0x00,0x00, +0x8c,0x02,0x00,0x00,0x8b,0x02,0x00,0x00,0x15,0x00,0x00,0x00, +0xb2,0x00,0x00,0x00,0x82,0x00,0x05,0x00,0x14,0x00,0x00,0x00, +0x8d,0x02,0x00,0x00,0x7b,0x02,0x00,0x00,0x8c,0x02,0x00,0x00, +0x6f,0x00,0x04,0x00,0x4c,0x00,0x00,0x00,0x8e,0x02,0x00,0x00, +0x8d,0x02,0x00,0x00,0x0c,0x00,0x08,0x00,0x4c,0x00,0x00,0x00, +0x90,0x02,0x00,0x00,0x01,0x00,0x00,0x00,0x32,0x00,0x00,0x00, +0x6d,0x02,0x00,0x00,0x8e,0x02,0x00,0x00,0x45,0x02,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x99,0x02,0x00,0x00, +0x95,0x00,0x00,0x00,0x98,0x02,0x00,0x00,0x41,0x00,0x06,0x00, +0x97,0x00,0x00,0x00,0x9a,0x02,0x00,0x00,0x8e,0x00,0x00,0x00, +0x15,0x00,0x00,0x00,0x99,0x02,0x00,0x00,0x3d,0x00,0x04,0x00, +0x4c,0x00,0x00,0x00,0x9b,0x02,0x00,0x00,0x9a,0x02,0x00,0x00, +0x41,0x00,0x08,0x00,0x9d,0x00,0x00,0x00,0xa0,0x02,0x00,0x00, +0x77,0x00,0x00,0x00,0x15,0x00,0x00,0x00,0x7a,0x00,0x00,0x00, +0x89,0x00,0x00,0x00,0x9f,0x02,0x00,0x00,0x3d,0x00,0x04,0x00, +0x32,0x00,0x00,0x00,0xa1,0x02,0x00,0x00,0xa0,0x02,0x00,0x00, +0xc2,0x00,0x05,0x00,0x32,0x00,0x00,0x00,0xa3,0x02,0x00,0x00, +0xa1,0x02,0x00,0x00,0x38,0x00,0x00,0x00,0x71,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0xa4,0x02,0x00,0x00,0xa3,0x02,0x00,0x00, +0x7c,0x00,0x04,0x00,0x14,0x00,0x00,0x00,0xa5,0x02,0x00,0x00, +0xa4,0x02,0x00,0x00,0xc7,0x00,0x05,0x00,0x14,0x00,0x00,0x00, +0xa6,0x02,0x00,0x00,0xa5,0x02,0x00,0x00,0xa4,0x00,0x00,0x00, +0x3d,0x00,0x04,0x00,0x32,0x00,0x00,0x00,0xab,0x02,0x00,0x00, +0x14,0x02,0x00,0x00,0xc2,0x00,0x05,0x00,0x32,0x00,0x00,0x00, +0xae,0x02,0x00,0x00,0xab,0x02,0x00,0x00,0x3c,0x01,0x00,0x00, +0x71,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0xaf,0x02,0x00,0x00, +0xae,0x02,0x00,0x00,0x7c,0x00,0x04,0x00,0x14,0x00,0x00,0x00, +0xb0,0x02,0x00,0x00,0xaf,0x02,0x00,0x00,0xc7,0x00,0x05,0x00, +0x14,0x00,0x00,0x00,0xb1,0x02,0x00,0x00,0xb0,0x02,0x00,0x00, +0x7b,0x00,0x00,0x00,0xc4,0x00,0x05,0x00,0x14,0x00,0x00,0x00, +0xb2,0x02,0x00,0x00,0xb1,0x02,0x00,0x00,0xb2,0x00,0x00,0x00, +0xc5,0x00,0x05,0x00,0x14,0x00,0x00,0x00,0xb3,0x02,0x00,0x00, +0xa6,0x02,0x00,0x00,0xb2,0x02,0x00,0x00,0x72,0x00,0x04,0x00, +0x3a,0x00,0x00,0x00,0xb4,0x02,0x00,0x00,0xb3,0x02,0x00,0x00, +0x72,0x00,0x04,0x00,0x14,0x00,0x00,0x00,0xb5,0x02,0x00,0x00, +0xb4,0x02,0x00,0x00,0x82,0x00,0x05,0x00,0x14,0x00,0x00,0x00, +0xb6,0x02,0x00,0x00,0xb5,0x02,0x00,0x00,0xb7,0x00,0x00,0x00, +0x6f,0x00,0x04,0x00,0x4c,0x00,0x00,0x00,0xb7,0x02,0x00,0x00, +0xb6,0x02,0x00,0x00,0x85,0x00,0x05,0x00,0x4c,0x00,0x00,0x00, +0xb8,0x02,0x00,0x00,0x9b,0x02,0x00,0x00,0xb7,0x02,0x00,0x00, +0x3d,0x00,0x04,0x00,0x32,0x00,0x00,0x00,0xc2,0x02,0x00,0x00, +0xe1,0x01,0x00,0x00,0xc2,0x00,0x05,0x00,0x32,0x00,0x00,0x00, +0xc3,0x02,0x00,0x00,0xc2,0x02,0x00,0x00,0x77,0x01,0x00,0x00, +0x71,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0xc4,0x02,0x00,0x00, +0xc3,0x02,0x00,0x00,0x7c,0x00,0x04,0x00,0x14,0x00,0x00,0x00, +0xc5,0x02,0x00,0x00,0xc4,0x02,0x00,0x00,0xc7,0x00,0x05,0x00, +0x14,0x00,0x00,0x00,0xc6,0x02,0x00,0x00,0xc5,0x02,0x00,0x00, +0x7b,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x32,0x00,0x00,0x00, +0xd0,0x02,0x00,0x00,0xee,0x01,0x00,0x00,0xc7,0x00,0x05,0x00, +0x32,0x00,0x00,0x00,0xd3,0x02,0x00,0x00,0xd0,0x02,0x00,0x00, +0xa8,0x01,0x00,0x00,0x71,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0xd4,0x02,0x00,0x00,0xd3,0x02,0x00,0x00,0x7c,0x00,0x04,0x00, +0x14,0x00,0x00,0x00,0xd5,0x02,0x00,0x00,0xd4,0x02,0x00,0x00, +0xab,0x00,0x05,0x00,0x64,0x00,0x00,0x00,0xd6,0x02,0x00,0x00, +0xd5,0x02,0x00,0x00,0x15,0x00,0x00,0x00,0xa9,0x00,0x06,0x00, +0x14,0x00,0x00,0x00,0xd7,0x02,0x00,0x00,0xd6,0x02,0x00,0x00, +0x15,0x00,0x00,0x00,0xb2,0x00,0x00,0x00,0x82,0x00,0x05,0x00, +0x14,0x00,0x00,0x00,0xd8,0x02,0x00,0x00,0xc6,0x02,0x00,0x00, +0xd7,0x02,0x00,0x00,0x6f,0x00,0x04,0x00,0x4c,0x00,0x00,0x00, +0xd9,0x02,0x00,0x00,0xd8,0x02,0x00,0x00,0x0c,0x00,0x08,0x00, +0x4c,0x00,0x00,0x00,0xdb,0x02,0x00,0x00,0x01,0x00,0x00,0x00, +0x32,0x00,0x00,0x00,0xb8,0x02,0x00,0x00,0xd9,0x02,0x00,0x00, +0x90,0x02,0x00,0x00,0x81,0x00,0x05,0x00,0x4c,0x00,0x00,0x00, +0xdd,0x02,0x00,0x00,0x1c,0x03,0x00,0x00,0xdb,0x02,0x00,0x00, +0x80,0x00,0x05,0x00,0x14,0x00,0x00,0x00,0xdf,0x02,0x00,0x00, +0x1b,0x03,0x00,0x00,0x35,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, +0x83,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0x85,0x00,0x00,0x00, +0x3d,0x00,0x04,0x00,0x4c,0x00,0x00,0x00,0xe8,0x02,0x00,0x00, +0x57,0x00,0x00,0x00,0x0c,0x00,0x08,0x00,0x4c,0x00,0x00,0x00, +0xe9,0x02,0x00,0x00,0x01,0x00,0x00,0x00,0x32,0x00,0x00,0x00, +0x7f,0x00,0x00,0x00,0x1c,0x03,0x00,0x00,0xe8,0x02,0x00,0x00, +0x3e,0x00,0x03,0x00,0x57,0x00,0x00,0x00,0xe9,0x02,0x00,0x00, +0xf9,0x00,0x02,0x00,0x60,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, +0x60,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xec,0x02,0x00,0x00,0x18,0x03,0x00,0x00,0x23,0x00,0x00,0x00, +0xf9,0x00,0x02,0x00,0x5d,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, +0x5f,0x00,0x00,0x00,0xe0,0x00,0x04,0x00,0x23,0x00,0x00,0x00, +0x23,0x00,0x00,0x00,0xed,0x02,0x00,0x00,0xf9,0x00,0x02,0x00, +0xef,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0xef,0x02,0x00,0x00, +0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0x19,0x03,0x00,0x00, +0x50,0x00,0x00,0x00,0x5f,0x00,0x00,0x00,0x06,0x03,0x00,0x00, +0xf2,0x02,0x00,0x00,0xac,0x00,0x05,0x00,0x64,0x00,0x00,0x00, +0xf5,0x02,0x00,0x00,0x19,0x03,0x00,0x00,0x0c,0x00,0x00,0x00, +0xf6,0x00,0x04,0x00,0xf1,0x02,0x00,0x00,0xf2,0x02,0x00,0x00, +0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0xf5,0x02,0x00,0x00, +0xf0,0x02,0x00,0x00,0xf1,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, +0xf0,0x02,0x00,0x00,0xb0,0x00,0x05,0x00,0x64,0x00,0x00,0x00, +0xf8,0x02,0x00,0x00,0x24,0x00,0x00,0x00,0x19,0x03,0x00,0x00, +0xf7,0x00,0x03,0x00,0xfa,0x02,0x00,0x00,0x00,0x00,0x00,0x00, +0xfa,0x00,0x04,0x00,0xf8,0x02,0x00,0x00,0xf9,0x02,0x00,0x00, +0xfa,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0xf9,0x02,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xfe,0x02,0x00,0x00, +0x24,0x00,0x00,0x00,0x19,0x03,0x00,0x00,0x41,0x00,0x05,0x00, +0x56,0x00,0x00,0x00,0xff,0x02,0x00,0x00,0x4f,0x00,0x00,0x00, +0xfe,0x02,0x00,0x00,0x3d,0x00,0x04,0x00,0x4c,0x00,0x00,0x00, +0x00,0x03,0x00,0x00,0xff,0x02,0x00,0x00,0x41,0x00,0x05,0x00, +0x56,0x00,0x00,0x00,0x01,0x03,0x00,0x00,0x4f,0x00,0x00,0x00, +0x24,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x4c,0x00,0x00,0x00, +0x02,0x03,0x00,0x00,0x01,0x03,0x00,0x00,0x81,0x00,0x05,0x00, +0x4c,0x00,0x00,0x00,0x03,0x03,0x00,0x00,0x02,0x03,0x00,0x00, +0x00,0x03,0x00,0x00,0x3e,0x00,0x03,0x00,0x01,0x03,0x00,0x00, +0x03,0x03,0x00,0x00,0xf9,0x00,0x02,0x00,0xfa,0x02,0x00,0x00, +0xf8,0x00,0x02,0x00,0xfa,0x02,0x00,0x00,0xe0,0x00,0x04,0x00, +0x23,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0xed,0x02,0x00,0x00, +0xf9,0x00,0x02,0x00,0xf2,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, +0xf2,0x02,0x00,0x00,0xc2,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x06,0x03,0x00,0x00,0x19,0x03,0x00,0x00,0x35,0x00,0x00,0x00, +0xf9,0x00,0x02,0x00,0xef,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, +0xf1,0x02,0x00,0x00,0xaa,0x00,0x05,0x00,0x64,0x00,0x00,0x00, +0x08,0x03,0x00,0x00,0x24,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, +0xf7,0x00,0x03,0x00,0x0a,0x03,0x00,0x00,0x00,0x00,0x00,0x00, +0xfa,0x00,0x04,0x00,0x08,0x03,0x00,0x00,0x09,0x03,0x00,0x00, +0x0a,0x03,0x00,0x00,0xf8,0x00,0x02,0x00,0x09,0x03,0x00,0x00, +0x41,0x00,0x05,0x00,0x16,0x00,0x00,0x00,0x0f,0x03,0x00,0x00, +0x13,0x00,0x00,0x00,0x89,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x10,0x03,0x00,0x00,0x0f,0x03,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x12,0x03,0x00,0x00, +0x10,0x03,0x00,0x00,0x0f,0x00,0x00,0x00,0x41,0x00,0x05,0x00, +0x56,0x00,0x00,0x00,0x13,0x03,0x00,0x00,0x4f,0x00,0x00,0x00, +0x15,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x4c,0x00,0x00,0x00, +0x14,0x03,0x00,0x00,0x13,0x03,0x00,0x00,0x41,0x00,0x06,0x00, +0x97,0x00,0x00,0x00,0x15,0x03,0x00,0x00,0x0e,0x03,0x00,0x00, +0x15,0x00,0x00,0x00,0x12,0x03,0x00,0x00,0x3e,0x00,0x03,0x00, +0x15,0x03,0x00,0x00,0x14,0x03,0x00,0x00,0xf9,0x00,0x02,0x00, +0x0a,0x03,0x00,0x00,0xf8,0x00,0x02,0x00,0x0a,0x03,0x00,0x00, 0xfd,0x00,0x01,0x00,0x38,0x00,0x01,0x00, }; -const uint64_t mul_mat_vec_q2_K_f32_len = 7628; - -unsigned char mul_mat_vec_q3_K_f32_data[] = { -0x03,0x02,0x23,0x07,0x00,0x05,0x01,0x00,0x0b,0x00,0x0d,0x00, -0x08,0x03,0x00,0x00,0x00,0x00,0x00,0x00,0x11,0x00,0x02,0x00, -0x01,0x00,0x00,0x00,0x11,0x00,0x02,0x00,0x27,0x00,0x00,0x00, -0x11,0x00,0x02,0x00,0x51,0x11,0x00,0x00,0x11,0x00,0x02,0x00, -0x60,0x11,0x00,0x00,0x0b,0x00,0x06,0x00,0x01,0x00,0x00,0x00, -0x47,0x4c,0x53,0x4c,0x2e,0x73,0x74,0x64,0x2e,0x34,0x35,0x30, -0x00,0x00,0x00,0x00,0x0e,0x00,0x03,0x00,0x00,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x0f,0x00,0x0c,0x00,0x05,0x00,0x00,0x00, -0x04,0x00,0x00,0x00,0x6d,0x61,0x69,0x6e,0x00,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x15,0x00,0x00,0x00,0x21,0x00,0x00,0x00, -0x53,0x00,0x00,0x00,0x7d,0x00,0x00,0x00,0x92,0x00,0x00,0x00, -0xf9,0x02,0x00,0x00,0x10,0x00,0x06,0x00,0x04,0x00,0x00,0x00, -0x11,0x00,0x00,0x00,0x20,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x0c,0x00,0x00,0x00, -0x0b,0x00,0x00,0x00,0x1a,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0x13,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x13,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x04,0x00,0x00,0x00, -0x48,0x00,0x05,0x00,0x13,0x00,0x00,0x00,0x02,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x08,0x00,0x00,0x00,0x47,0x00,0x03,0x00, -0x13,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x21,0x00,0x00,0x00,0x0b,0x00,0x00,0x00,0x1b,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x73,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x75,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x77,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x48,0x00,0x05,0x00,0x79,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0x79,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x20,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x79,0x00,0x00,0x00, -0x02,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x60,0x00,0x00,0x00, -0x48,0x00,0x05,0x00,0x79,0x00,0x00,0x00,0x03,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x6c,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x7a,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x6e,0x00,0x00,0x00, -0x48,0x00,0x04,0x00,0x7b,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x18,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x7b,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x47,0x00,0x03,0x00,0x7b,0x00,0x00,0x00,0x02,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x7d,0x00,0x00,0x00,0x22,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x7d,0x00,0x00,0x00, -0x21,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x8f,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x04,0x00,0x00,0x00, -0x48,0x00,0x04,0x00,0x90,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x18,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x90,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x47,0x00,0x03,0x00,0x90,0x00,0x00,0x00,0x02,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x92,0x00,0x00,0x00,0x22,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x92,0x00,0x00,0x00, -0x21,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0xf6,0x02,0x00,0x00,0x06,0x00,0x00,0x00,0x04,0x00,0x00,0x00, -0x48,0x00,0x04,0x00,0xf7,0x02,0x00,0x00,0x00,0x00,0x00,0x00, -0x19,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0xf7,0x02,0x00,0x00, -0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x47,0x00,0x03,0x00,0xf7,0x02,0x00,0x00,0x02,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0xf9,0x02,0x00,0x00,0x22,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0xf9,0x02,0x00,0x00, -0x21,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x02,0x03,0x00,0x00,0x0b,0x00,0x00,0x00,0x19,0x00,0x00,0x00, -0x13,0x00,0x02,0x00,0x02,0x00,0x00,0x00,0x21,0x00,0x03,0x00, -0x03,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x15,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x20,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x15,0x00,0x04,0x00,0x09,0x00,0x00,0x00,0x20,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x17,0x00,0x04,0x00,0x0a,0x00,0x00,0x00, -0x09,0x00,0x00,0x00,0x03,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0x0b,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x0a,0x00,0x00,0x00, -0x3b,0x00,0x04,0x00,0x0b,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x09,0x00,0x00,0x00, -0x0d,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0x0e,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x09,0x00,0x00,0x00, -0x1e,0x00,0x05,0x00,0x13,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0x14,0x00,0x00,0x00,0x09,0x00,0x00,0x00,0x13,0x00,0x00,0x00, -0x3b,0x00,0x04,0x00,0x14,0x00,0x00,0x00,0x15,0x00,0x00,0x00, -0x09,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x16,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0x17,0x00,0x00,0x00,0x09,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x1a,0x00,0x00,0x00, -0x00,0x01,0x00,0x00,0x3b,0x00,0x04,0x00,0x0b,0x00,0x00,0x00, -0x21,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x25,0x00,0x00,0x00,0x02,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0x08,0x00,0x00,0x00,0x15,0x00,0x04,0x00,0x35,0x00,0x00,0x00, -0x08,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x38,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x39,0x00,0x00,0x00, -0x04,0x00,0x00,0x00,0x15,0x00,0x04,0x00,0x3d,0x00,0x00,0x00, -0x08,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x44,0x00,0x00,0x00,0x20,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x4a,0x00,0x00,0x00, -0x80,0x00,0x00,0x00,0x16,0x00,0x03,0x00,0x4f,0x00,0x00,0x00, -0x20,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x09,0x00,0x00,0x00, -0x50,0x00,0x00,0x00,0x20,0x00,0x00,0x00,0x1c,0x00,0x04,0x00, -0x51,0x00,0x00,0x00,0x4f,0x00,0x00,0x00,0x50,0x00,0x00,0x00, -0x20,0x00,0x04,0x00,0x52,0x00,0x00,0x00,0x04,0x00,0x00,0x00, -0x51,0x00,0x00,0x00,0x3b,0x00,0x04,0x00,0x52,0x00,0x00,0x00, -0x53,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x54,0x00,0x00,0x00,0x10,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x4f,0x00,0x00,0x00,0x59,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x5a,0x00,0x00,0x00, -0x04,0x00,0x00,0x00,0x4f,0x00,0x00,0x00,0x14,0x00,0x02,0x00, -0x6a,0x00,0x00,0x00,0x1c,0x00,0x04,0x00,0x73,0x00,0x00,0x00, -0x35,0x00,0x00,0x00,0x50,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x09,0x00,0x00,0x00,0x74,0x00,0x00,0x00,0x40,0x00,0x00,0x00, -0x1c,0x00,0x04,0x00,0x75,0x00,0x00,0x00,0x35,0x00,0x00,0x00, -0x74,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x09,0x00,0x00,0x00, -0x76,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x1c,0x00,0x04,0x00, -0x77,0x00,0x00,0x00,0x35,0x00,0x00,0x00,0x76,0x00,0x00,0x00, -0x16,0x00,0x03,0x00,0x78,0x00,0x00,0x00,0x10,0x00,0x00,0x00, -0x1e,0x00,0x06,0x00,0x79,0x00,0x00,0x00,0x73,0x00,0x00,0x00, -0x75,0x00,0x00,0x00,0x77,0x00,0x00,0x00,0x78,0x00,0x00,0x00, -0x1d,0x00,0x03,0x00,0x7a,0x00,0x00,0x00,0x79,0x00,0x00,0x00, -0x1e,0x00,0x03,0x00,0x7b,0x00,0x00,0x00,0x7a,0x00,0x00,0x00, -0x20,0x00,0x04,0x00,0x7c,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0x7b,0x00,0x00,0x00,0x3b,0x00,0x04,0x00,0x7c,0x00,0x00,0x00, -0x7d,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x81,0x00,0x00,0x00,0x03,0x00,0x00,0x00, -0x20,0x00,0x04,0x00,0x82,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0x78,0x00,0x00,0x00,0x1d,0x00,0x03,0x00,0x8f,0x00,0x00,0x00, -0x4f,0x00,0x00,0x00,0x1e,0x00,0x03,0x00,0x90,0x00,0x00,0x00, -0x8f,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x91,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x90,0x00,0x00,0x00,0x3b,0x00,0x04,0x00, -0x91,0x00,0x00,0x00,0x92,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0x20,0x00,0x04,0x00,0x9a,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0x4f,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0xa0,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x35,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0xa7,0x00,0x00,0x00,0x0f,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0xee,0x00,0x00,0x00, -0x0a,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x23,0x01,0x00,0x00,0x40,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x09,0x00,0x00,0x00,0x37,0x01,0x00,0x00,0x02,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x69,0x01,0x00,0x00, -0x60,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x70,0x01,0x00,0x00,0x06,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0xbf,0x01,0x00,0x00,0x09,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0xf5,0x01,0x00,0x00, -0x30,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x06,0x02,0x00,0x00,0x0b,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x3d,0x02,0x00,0x00,0x50,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x44,0x02,0x00,0x00, -0x05,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x85,0x02,0x00,0x00,0x70,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x8c,0x02,0x00,0x00,0x07,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x09,0x00,0x00,0x00,0xd8,0x02,0x00,0x00, -0x08,0x01,0x00,0x00,0x1d,0x00,0x03,0x00,0xf6,0x02,0x00,0x00, -0x4f,0x00,0x00,0x00,0x1e,0x00,0x03,0x00,0xf7,0x02,0x00,0x00, -0xf6,0x02,0x00,0x00,0x20,0x00,0x04,0x00,0xf8,0x02,0x00,0x00, -0x0c,0x00,0x00,0x00,0xf7,0x02,0x00,0x00,0x3b,0x00,0x04,0x00, -0xf8,0x02,0x00,0x00,0xf9,0x02,0x00,0x00,0x0c,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x09,0x00,0x00,0x00,0x01,0x03,0x00,0x00, -0x01,0x00,0x00,0x00,0x2c,0x00,0x06,0x00,0x0a,0x00,0x00,0x00, -0x02,0x03,0x00,0x00,0x50,0x00,0x00,0x00,0x01,0x03,0x00,0x00, -0x01,0x03,0x00,0x00,0x36,0x00,0x05,0x00,0x02,0x00,0x00,0x00, -0x04,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x03,0x00,0x00,0x00, -0xf8,0x00,0x02,0x00,0x05,0x00,0x00,0x00,0x41,0x00,0x05,0x00, -0x0e,0x00,0x00,0x00,0x0f,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0x0d,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x09,0x00,0x00,0x00, -0x10,0x00,0x00,0x00,0x0f,0x00,0x00,0x00,0x7c,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x11,0x00,0x00,0x00,0x10,0x00,0x00,0x00, -0x41,0x00,0x05,0x00,0x17,0x00,0x00,0x00,0x18,0x00,0x00,0x00, -0x15,0x00,0x00,0x00,0x16,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x19,0x00,0x00,0x00,0x18,0x00,0x00,0x00, -0x87,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x1b,0x00,0x00,0x00, -0x19,0x00,0x00,0x00,0x1a,0x00,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x1f,0x00,0x00,0x00,0x11,0x00,0x00,0x00, -0x1b,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x0e,0x00,0x00,0x00, -0x22,0x00,0x00,0x00,0x21,0x00,0x00,0x00,0x0d,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x09,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x22,0x00,0x00,0x00,0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x24,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x87,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x26,0x00,0x00,0x00,0x24,0x00,0x00,0x00, -0x25,0x00,0x00,0x00,0x8b,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x2b,0x00,0x00,0x00,0x24,0x00,0x00,0x00,0x25,0x00,0x00,0x00, -0x87,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x2f,0x00,0x00,0x00, -0x26,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x33,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0x2f,0x00,0x00,0x00,0x82,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x34,0x00,0x00,0x00,0x26,0x00,0x00,0x00,0x33,0x00,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x3b,0x00,0x00,0x00, -0x39,0x00,0x00,0x00,0x2f,0x00,0x00,0x00,0xc4,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x3c,0x00,0x00,0x00,0x38,0x00,0x00,0x00, -0x3b,0x00,0x00,0x00,0x72,0x00,0x04,0x00,0x3d,0x00,0x00,0x00, -0x3e,0x00,0x00,0x00,0x3c,0x00,0x00,0x00,0x7c,0x00,0x04,0x00, -0x35,0x00,0x00,0x00,0x3f,0x00,0x00,0x00,0x3e,0x00,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x42,0x00,0x00,0x00, -0x25,0x00,0x00,0x00,0x34,0x00,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x46,0x00,0x00,0x00,0x44,0x00,0x00,0x00, -0x2f,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x48,0x00,0x00,0x00,0x46,0x00,0x00,0x00,0x42,0x00,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x4c,0x00,0x00,0x00, -0x4a,0x00,0x00,0x00,0x2f,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x4e,0x00,0x00,0x00,0x4c,0x00,0x00,0x00, -0x42,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x56,0x00,0x00,0x00,0x54,0x00,0x00,0x00,0x2b,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x58,0x00,0x00,0x00, -0x56,0x00,0x00,0x00,0x26,0x00,0x00,0x00,0x41,0x00,0x05,0x00, -0x5a,0x00,0x00,0x00,0x5b,0x00,0x00,0x00,0x53,0x00,0x00,0x00, -0x58,0x00,0x00,0x00,0x3e,0x00,0x03,0x00,0x5b,0x00,0x00,0x00, -0x59,0x00,0x00,0x00,0x7c,0x00,0x04,0x00,0x09,0x00,0x00,0x00, -0x60,0x00,0x00,0x00,0x3b,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, -0x63,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0x63,0x00,0x00,0x00, -0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0x03,0x03,0x00,0x00, -0x2b,0x00,0x00,0x00,0x05,0x00,0x00,0x00,0xd7,0x02,0x00,0x00, -0x66,0x00,0x00,0x00,0xb1,0x00,0x05,0x00,0x6a,0x00,0x00,0x00, -0x6b,0x00,0x00,0x00,0x03,0x03,0x00,0x00,0x1b,0x00,0x00,0x00, -0xf6,0x00,0x04,0x00,0x65,0x00,0x00,0x00,0x66,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0x6b,0x00,0x00,0x00, -0x64,0x00,0x00,0x00,0x65,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, -0x64,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x6e,0x00,0x00,0x00,0x03,0x03,0x00,0x00,0x1a,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x70,0x00,0x00,0x00, -0x6e,0x00,0x00,0x00,0x4e,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x80,0x00,0x00,0x00,0x1f,0x00,0x00,0x00, -0x03,0x03,0x00,0x00,0x41,0x00,0x07,0x00,0x82,0x00,0x00,0x00, -0x83,0x00,0x00,0x00,0x7d,0x00,0x00,0x00,0x16,0x00,0x00,0x00, -0x80,0x00,0x00,0x00,0x81,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x78,0x00,0x00,0x00,0x84,0x00,0x00,0x00,0x83,0x00,0x00,0x00, -0x73,0x00,0x04,0x00,0x4f,0x00,0x00,0x00,0x85,0x00,0x00,0x00, -0x84,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0x88,0x00,0x00,0x00, -0xf8,0x00,0x02,0x00,0x88,0x00,0x00,0x00,0xf5,0x00,0x07,0x00, -0x4f,0x00,0x00,0x00,0x07,0x03,0x00,0x00,0x59,0x00,0x00,0x00, -0x64,0x00,0x00,0x00,0xc8,0x02,0x00,0x00,0x89,0x00,0x00,0x00, -0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0x06,0x03,0x00,0x00, -0x16,0x00,0x00,0x00,0x64,0x00,0x00,0x00,0xca,0x02,0x00,0x00, -0x89,0x00,0x00,0x00,0xb1,0x00,0x05,0x00,0x6a,0x00,0x00,0x00, -0x8e,0x00,0x00,0x00,0x06,0x03,0x00,0x00,0x25,0x00,0x00,0x00, -0xf6,0x00,0x04,0x00,0x8a,0x00,0x00,0x00,0x89,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0x8e,0x00,0x00,0x00, -0x89,0x00,0x00,0x00,0x8a,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, -0x89,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x17,0x00,0x00,0x00, -0x93,0x00,0x00,0x00,0x15,0x00,0x00,0x00,0x38,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x94,0x00,0x00,0x00, -0x93,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x96,0x00,0x00,0x00,0x94,0x00,0x00,0x00,0x70,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x98,0x00,0x00,0x00, -0x96,0x00,0x00,0x00,0x06,0x03,0x00,0x00,0x41,0x00,0x06,0x00, -0x9a,0x00,0x00,0x00,0x9b,0x00,0x00,0x00,0x92,0x00,0x00,0x00, -0x16,0x00,0x00,0x00,0x98,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x4f,0x00,0x00,0x00,0x9c,0x00,0x00,0x00,0x9b,0x00,0x00,0x00, -0x41,0x00,0x08,0x00,0xa0,0x00,0x00,0x00,0xa1,0x00,0x00,0x00, -0x7d,0x00,0x00,0x00,0x16,0x00,0x00,0x00,0x80,0x00,0x00,0x00, -0x25,0x00,0x00,0x00,0x16,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x35,0x00,0x00,0x00,0xa2,0x00,0x00,0x00,0xa1,0x00,0x00,0x00, -0xc2,0x00,0x05,0x00,0x35,0x00,0x00,0x00,0xa4,0x00,0x00,0x00, -0xa2,0x00,0x00,0x00,0x60,0x00,0x00,0x00,0x71,0x00,0x04,0x00, -0x09,0x00,0x00,0x00,0xa5,0x00,0x00,0x00,0xa4,0x00,0x00,0x00, -0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0xa6,0x00,0x00,0x00, -0xa5,0x00,0x00,0x00,0xc7,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xa8,0x00,0x00,0x00,0xa6,0x00,0x00,0x00,0xa7,0x00,0x00,0x00, -0x41,0x00,0x08,0x00,0xa0,0x00,0x00,0x00,0xac,0x00,0x00,0x00, -0x7d,0x00,0x00,0x00,0x16,0x00,0x00,0x00,0x80,0x00,0x00,0x00, -0x25,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x35,0x00,0x00,0x00,0xad,0x00,0x00,0x00,0xac,0x00,0x00,0x00, -0xc2,0x00,0x05,0x00,0x35,0x00,0x00,0x00,0xb0,0x00,0x00,0x00, -0xad,0x00,0x00,0x00,0x60,0x00,0x00,0x00,0x71,0x00,0x04,0x00, -0x09,0x00,0x00,0x00,0xb1,0x00,0x00,0x00,0xb0,0x00,0x00,0x00, -0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0xb2,0x00,0x00,0x00, -0xb1,0x00,0x00,0x00,0xc7,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xb3,0x00,0x00,0x00,0xb2,0x00,0x00,0x00,0x81,0x00,0x00,0x00, -0xc4,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xb4,0x00,0x00,0x00, -0xb3,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0xc5,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xb5,0x00,0x00,0x00,0xa8,0x00,0x00,0x00, -0xb4,0x00,0x00,0x00,0x72,0x00,0x04,0x00,0x3d,0x00,0x00,0x00, -0xb6,0x00,0x00,0x00,0xb5,0x00,0x00,0x00,0x72,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0xb7,0x00,0x00,0x00,0xb6,0x00,0x00,0x00, -0x82,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xb8,0x00,0x00,0x00, -0xb7,0x00,0x00,0x00,0x44,0x00,0x00,0x00,0x6f,0x00,0x04,0x00, -0x4f,0x00,0x00,0x00,0xb9,0x00,0x00,0x00,0xb8,0x00,0x00,0x00, -0x85,0x00,0x05,0x00,0x4f,0x00,0x00,0x00,0xba,0x00,0x00,0x00, -0x9c,0x00,0x00,0x00,0xb9,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xc0,0x00,0x00,0x00,0x48,0x00,0x00,0x00, -0x06,0x03,0x00,0x00,0x41,0x00,0x08,0x00,0xa0,0x00,0x00,0x00, -0xc1,0x00,0x00,0x00,0x7d,0x00,0x00,0x00,0x16,0x00,0x00,0x00, -0x80,0x00,0x00,0x00,0x38,0x00,0x00,0x00,0xc0,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x35,0x00,0x00,0x00,0xc2,0x00,0x00,0x00, -0xc1,0x00,0x00,0x00,0x71,0x00,0x04,0x00,0x09,0x00,0x00,0x00, -0xc3,0x00,0x00,0x00,0xc2,0x00,0x00,0x00,0x7c,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0xc4,0x00,0x00,0x00,0xc3,0x00,0x00,0x00, -0xc7,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xc5,0x00,0x00,0x00, -0xc4,0x00,0x00,0x00,0x81,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xcb,0x00,0x00,0x00,0x42,0x00,0x00,0x00, -0x06,0x03,0x00,0x00,0x41,0x00,0x08,0x00,0xa0,0x00,0x00,0x00, -0xcc,0x00,0x00,0x00,0x7d,0x00,0x00,0x00,0x16,0x00,0x00,0x00, -0x80,0x00,0x00,0x00,0x16,0x00,0x00,0x00,0xcb,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x35,0x00,0x00,0x00,0xcd,0x00,0x00,0x00, -0xcc,0x00,0x00,0x00,0xc4,0x00,0x05,0x00,0x35,0x00,0x00,0x00, -0xcf,0x00,0x00,0x00,0x3f,0x00,0x00,0x00,0x16,0x00,0x00,0x00, -0xc7,0x00,0x05,0x00,0x35,0x00,0x00,0x00,0xd0,0x00,0x00,0x00, -0xcd,0x00,0x00,0x00,0xcf,0x00,0x00,0x00,0x71,0x00,0x04,0x00, -0x09,0x00,0x00,0x00,0xd1,0x00,0x00,0x00,0xd0,0x00,0x00,0x00, -0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0xd2,0x00,0x00,0x00, -0xd1,0x00,0x00,0x00,0xab,0x00,0x05,0x00,0x6a,0x00,0x00,0x00, -0xd3,0x00,0x00,0x00,0xd2,0x00,0x00,0x00,0x16,0x00,0x00,0x00, -0xa9,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0xd4,0x00,0x00,0x00, -0xd3,0x00,0x00,0x00,0x16,0x00,0x00,0x00,0x39,0x00,0x00,0x00, -0x82,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xd5,0x00,0x00,0x00, -0xc5,0x00,0x00,0x00,0xd4,0x00,0x00,0x00,0x6f,0x00,0x04,0x00, -0x4f,0x00,0x00,0x00,0xd6,0x00,0x00,0x00,0xd5,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xde,0x00,0x00,0x00, -0x98,0x00,0x00,0x00,0x44,0x00,0x00,0x00,0x41,0x00,0x06,0x00, -0x9a,0x00,0x00,0x00,0xdf,0x00,0x00,0x00,0x92,0x00,0x00,0x00, -0x16,0x00,0x00,0x00,0xde,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x4f,0x00,0x00,0x00,0xe0,0x00,0x00,0x00,0xdf,0x00,0x00,0x00, -0x41,0x00,0x08,0x00,0xa0,0x00,0x00,0x00,0xe4,0x00,0x00,0x00, -0x7d,0x00,0x00,0x00,0x16,0x00,0x00,0x00,0x80,0x00,0x00,0x00, -0x25,0x00,0x00,0x00,0x25,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x35,0x00,0x00,0x00,0xe5,0x00,0x00,0x00,0xe4,0x00,0x00,0x00, -0xc2,0x00,0x05,0x00,0x35,0x00,0x00,0x00,0xe7,0x00,0x00,0x00, -0xe5,0x00,0x00,0x00,0x60,0x00,0x00,0x00,0x71,0x00,0x04,0x00, -0x09,0x00,0x00,0x00,0xe8,0x00,0x00,0x00,0xe7,0x00,0x00,0x00, -0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0xe9,0x00,0x00,0x00, -0xe8,0x00,0x00,0x00,0xc7,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xea,0x00,0x00,0x00,0xe9,0x00,0x00,0x00,0xa7,0x00,0x00,0x00, -0x41,0x00,0x08,0x00,0xa0,0x00,0x00,0x00,0xef,0x00,0x00,0x00, -0x7d,0x00,0x00,0x00,0x16,0x00,0x00,0x00,0x80,0x00,0x00,0x00, -0x25,0x00,0x00,0x00,0xee,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x35,0x00,0x00,0x00,0xf0,0x00,0x00,0x00,0xef,0x00,0x00,0x00, -0xc2,0x00,0x05,0x00,0x35,0x00,0x00,0x00,0xf3,0x00,0x00,0x00, -0xf0,0x00,0x00,0x00,0x60,0x00,0x00,0x00,0x71,0x00,0x04,0x00, -0x09,0x00,0x00,0x00,0xf4,0x00,0x00,0x00,0xf3,0x00,0x00,0x00, -0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0xf5,0x00,0x00,0x00, -0xf4,0x00,0x00,0x00,0xc7,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xf6,0x00,0x00,0x00,0xf5,0x00,0x00,0x00,0x81,0x00,0x00,0x00, -0xc4,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xf7,0x00,0x00,0x00, -0xf6,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0xc5,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xf8,0x00,0x00,0x00,0xea,0x00,0x00,0x00, -0xf7,0x00,0x00,0x00,0x72,0x00,0x04,0x00,0x3d,0x00,0x00,0x00, -0xf9,0x00,0x00,0x00,0xf8,0x00,0x00,0x00,0x72,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0xfa,0x00,0x00,0x00,0xf9,0x00,0x00,0x00, -0x82,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xfb,0x00,0x00,0x00, -0xfa,0x00,0x00,0x00,0x44,0x00,0x00,0x00,0x6f,0x00,0x04,0x00, -0x4f,0x00,0x00,0x00,0xfc,0x00,0x00,0x00,0xfb,0x00,0x00,0x00, -0x85,0x00,0x05,0x00,0x4f,0x00,0x00,0x00,0xfd,0x00,0x00,0x00, -0xe0,0x00,0x00,0x00,0xfc,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x35,0x00,0x00,0x00,0x05,0x01,0x00,0x00,0xc1,0x00,0x00,0x00, -0xc2,0x00,0x05,0x00,0x35,0x00,0x00,0x00,0x06,0x01,0x00,0x00, -0x05,0x01,0x00,0x00,0x25,0x00,0x00,0x00,0x71,0x00,0x04,0x00, -0x09,0x00,0x00,0x00,0x07,0x01,0x00,0x00,0x06,0x01,0x00,0x00, -0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x08,0x01,0x00,0x00, -0x07,0x01,0x00,0x00,0xc7,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x09,0x01,0x00,0x00,0x08,0x01,0x00,0x00,0x81,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x35,0x00,0x00,0x00,0x11,0x01,0x00,0x00, -0xcc,0x00,0x00,0x00,0xc4,0x00,0x05,0x00,0x35,0x00,0x00,0x00, -0x13,0x01,0x00,0x00,0x3f,0x00,0x00,0x00,0x38,0x00,0x00,0x00, -0xc7,0x00,0x05,0x00,0x35,0x00,0x00,0x00,0x14,0x01,0x00,0x00, -0x11,0x01,0x00,0x00,0x13,0x01,0x00,0x00,0x71,0x00,0x04,0x00, -0x09,0x00,0x00,0x00,0x15,0x01,0x00,0x00,0x14,0x01,0x00,0x00, -0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x16,0x01,0x00,0x00, -0x15,0x01,0x00,0x00,0xab,0x00,0x05,0x00,0x6a,0x00,0x00,0x00, -0x17,0x01,0x00,0x00,0x16,0x01,0x00,0x00,0x16,0x00,0x00,0x00, -0xa9,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x18,0x01,0x00,0x00, -0x17,0x01,0x00,0x00,0x16,0x00,0x00,0x00,0x39,0x00,0x00,0x00, -0x82,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x19,0x01,0x00,0x00, -0x09,0x01,0x00,0x00,0x18,0x01,0x00,0x00,0x6f,0x00,0x04,0x00, -0x4f,0x00,0x00,0x00,0x1a,0x01,0x00,0x00,0x19,0x01,0x00,0x00, -0x85,0x00,0x05,0x00,0x4f,0x00,0x00,0x00,0x1b,0x01,0x00,0x00, -0xfd,0x00,0x00,0x00,0x1a,0x01,0x00,0x00,0x0c,0x00,0x08,0x00, -0x4f,0x00,0x00,0x00,0x1c,0x01,0x00,0x00,0x01,0x00,0x00,0x00, -0x32,0x00,0x00,0x00,0xba,0x00,0x00,0x00,0xd6,0x00,0x00,0x00, -0x1b,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x24,0x01,0x00,0x00,0x98,0x00,0x00,0x00,0x23,0x01,0x00,0x00, -0x41,0x00,0x06,0x00,0x9a,0x00,0x00,0x00,0x25,0x01,0x00,0x00, -0x92,0x00,0x00,0x00,0x16,0x00,0x00,0x00,0x24,0x01,0x00,0x00, -0x3d,0x00,0x04,0x00,0x4f,0x00,0x00,0x00,0x26,0x01,0x00,0x00, -0x25,0x01,0x00,0x00,0x41,0x00,0x08,0x00,0xa0,0x00,0x00,0x00, -0x2a,0x01,0x00,0x00,0x7d,0x00,0x00,0x00,0x16,0x00,0x00,0x00, -0x80,0x00,0x00,0x00,0x25,0x00,0x00,0x00,0x39,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x35,0x00,0x00,0x00,0x2b,0x01,0x00,0x00, -0x2a,0x01,0x00,0x00,0xc2,0x00,0x05,0x00,0x35,0x00,0x00,0x00, -0x2d,0x01,0x00,0x00,0x2b,0x01,0x00,0x00,0x60,0x00,0x00,0x00, -0x71,0x00,0x04,0x00,0x09,0x00,0x00,0x00,0x2e,0x01,0x00,0x00, -0x2d,0x01,0x00,0x00,0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x2f,0x01,0x00,0x00,0x2e,0x01,0x00,0x00,0xc7,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x30,0x01,0x00,0x00,0x2f,0x01,0x00,0x00, -0xa7,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x35,0x00,0x00,0x00, -0x35,0x01,0x00,0x00,0xac,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x09,0x00,0x00,0x00,0x38,0x01,0x00,0x00,0x60,0x00,0x00,0x00, -0x37,0x01,0x00,0x00,0xc2,0x00,0x05,0x00,0x35,0x00,0x00,0x00, -0x39,0x01,0x00,0x00,0x35,0x01,0x00,0x00,0x38,0x01,0x00,0x00, -0x71,0x00,0x04,0x00,0x09,0x00,0x00,0x00,0x3a,0x01,0x00,0x00, -0x39,0x01,0x00,0x00,0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x3b,0x01,0x00,0x00,0x3a,0x01,0x00,0x00,0xc7,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x3c,0x01,0x00,0x00,0x3b,0x01,0x00,0x00, -0x81,0x00,0x00,0x00,0xc4,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x3d,0x01,0x00,0x00,0x3c,0x01,0x00,0x00,0x39,0x00,0x00,0x00, -0xc5,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x3e,0x01,0x00,0x00, -0x30,0x01,0x00,0x00,0x3d,0x01,0x00,0x00,0x72,0x00,0x04,0x00, -0x3d,0x00,0x00,0x00,0x3f,0x01,0x00,0x00,0x3e,0x01,0x00,0x00, -0x72,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x40,0x01,0x00,0x00, -0x3f,0x01,0x00,0x00,0x82,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x41,0x01,0x00,0x00,0x40,0x01,0x00,0x00,0x44,0x00,0x00,0x00, -0x6f,0x00,0x04,0x00,0x4f,0x00,0x00,0x00,0x42,0x01,0x00,0x00, -0x41,0x01,0x00,0x00,0x85,0x00,0x05,0x00,0x4f,0x00,0x00,0x00, -0x43,0x01,0x00,0x00,0x26,0x01,0x00,0x00,0x42,0x01,0x00,0x00, -0x3d,0x00,0x04,0x00,0x35,0x00,0x00,0x00,0x4b,0x01,0x00,0x00, -0xc1,0x00,0x00,0x00,0xc2,0x00,0x05,0x00,0x35,0x00,0x00,0x00, -0x4c,0x01,0x00,0x00,0x4b,0x01,0x00,0x00,0x39,0x00,0x00,0x00, -0x71,0x00,0x04,0x00,0x09,0x00,0x00,0x00,0x4d,0x01,0x00,0x00, -0x4c,0x01,0x00,0x00,0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x4e,0x01,0x00,0x00,0x4d,0x01,0x00,0x00,0xc7,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x4f,0x01,0x00,0x00,0x4e,0x01,0x00,0x00, -0x81,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x35,0x00,0x00,0x00, -0x57,0x01,0x00,0x00,0xcc,0x00,0x00,0x00,0xc4,0x00,0x05,0x00, -0x35,0x00,0x00,0x00,0x59,0x01,0x00,0x00,0x3f,0x00,0x00,0x00, -0x25,0x00,0x00,0x00,0xc7,0x00,0x05,0x00,0x35,0x00,0x00,0x00, -0x5a,0x01,0x00,0x00,0x57,0x01,0x00,0x00,0x59,0x01,0x00,0x00, -0x71,0x00,0x04,0x00,0x09,0x00,0x00,0x00,0x5b,0x01,0x00,0x00, -0x5a,0x01,0x00,0x00,0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x5c,0x01,0x00,0x00,0x5b,0x01,0x00,0x00,0xab,0x00,0x05,0x00, -0x6a,0x00,0x00,0x00,0x5d,0x01,0x00,0x00,0x5c,0x01,0x00,0x00, -0x16,0x00,0x00,0x00,0xa9,0x00,0x06,0x00,0x06,0x00,0x00,0x00, -0x5e,0x01,0x00,0x00,0x5d,0x01,0x00,0x00,0x16,0x00,0x00,0x00, -0x39,0x00,0x00,0x00,0x82,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x5f,0x01,0x00,0x00,0x4f,0x01,0x00,0x00,0x5e,0x01,0x00,0x00, -0x6f,0x00,0x04,0x00,0x4f,0x00,0x00,0x00,0x60,0x01,0x00,0x00, -0x5f,0x01,0x00,0x00,0x0c,0x00,0x08,0x00,0x4f,0x00,0x00,0x00, -0x62,0x01,0x00,0x00,0x01,0x00,0x00,0x00,0x32,0x00,0x00,0x00, -0x43,0x01,0x00,0x00,0x60,0x01,0x00,0x00,0x1c,0x01,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x6a,0x01,0x00,0x00, -0x98,0x00,0x00,0x00,0x69,0x01,0x00,0x00,0x41,0x00,0x06,0x00, -0x9a,0x00,0x00,0x00,0x6b,0x01,0x00,0x00,0x92,0x00,0x00,0x00, -0x16,0x00,0x00,0x00,0x6a,0x01,0x00,0x00,0x3d,0x00,0x04,0x00, -0x4f,0x00,0x00,0x00,0x6c,0x01,0x00,0x00,0x6b,0x01,0x00,0x00, -0x41,0x00,0x08,0x00,0xa0,0x00,0x00,0x00,0x71,0x01,0x00,0x00, -0x7d,0x00,0x00,0x00,0x16,0x00,0x00,0x00,0x80,0x00,0x00,0x00, -0x25,0x00,0x00,0x00,0x70,0x01,0x00,0x00,0x3d,0x00,0x04,0x00, -0x35,0x00,0x00,0x00,0x72,0x01,0x00,0x00,0x71,0x01,0x00,0x00, -0xc2,0x00,0x05,0x00,0x35,0x00,0x00,0x00,0x74,0x01,0x00,0x00, -0x72,0x01,0x00,0x00,0x60,0x00,0x00,0x00,0x71,0x00,0x04,0x00, -0x09,0x00,0x00,0x00,0x75,0x01,0x00,0x00,0x74,0x01,0x00,0x00, -0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x76,0x01,0x00,0x00, -0x75,0x01,0x00,0x00,0xc7,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x77,0x01,0x00,0x00,0x76,0x01,0x00,0x00,0xa7,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x35,0x00,0x00,0x00,0x7c,0x01,0x00,0x00, -0xef,0x00,0x00,0x00,0xc2,0x00,0x05,0x00,0x35,0x00,0x00,0x00, -0x7f,0x01,0x00,0x00,0x7c,0x01,0x00,0x00,0x38,0x01,0x00,0x00, -0x71,0x00,0x04,0x00,0x09,0x00,0x00,0x00,0x80,0x01,0x00,0x00, -0x7f,0x01,0x00,0x00,0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x81,0x01,0x00,0x00,0x80,0x01,0x00,0x00,0xc7,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x82,0x01,0x00,0x00,0x81,0x01,0x00,0x00, -0x81,0x00,0x00,0x00,0xc4,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x83,0x01,0x00,0x00,0x82,0x01,0x00,0x00,0x39,0x00,0x00,0x00, -0xc5,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x84,0x01,0x00,0x00, -0x77,0x01,0x00,0x00,0x83,0x01,0x00,0x00,0x72,0x00,0x04,0x00, -0x3d,0x00,0x00,0x00,0x85,0x01,0x00,0x00,0x84,0x01,0x00,0x00, -0x72,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x86,0x01,0x00,0x00, -0x85,0x01,0x00,0x00,0x82,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x87,0x01,0x00,0x00,0x86,0x01,0x00,0x00,0x44,0x00,0x00,0x00, -0x6f,0x00,0x04,0x00,0x4f,0x00,0x00,0x00,0x88,0x01,0x00,0x00, -0x87,0x01,0x00,0x00,0x85,0x00,0x05,0x00,0x4f,0x00,0x00,0x00, -0x89,0x01,0x00,0x00,0x6c,0x01,0x00,0x00,0x88,0x01,0x00,0x00, -0x3d,0x00,0x04,0x00,0x35,0x00,0x00,0x00,0x91,0x01,0x00,0x00, -0xc1,0x00,0x00,0x00,0xc2,0x00,0x05,0x00,0x35,0x00,0x00,0x00, -0x92,0x01,0x00,0x00,0x91,0x01,0x00,0x00,0x70,0x01,0x00,0x00, -0x71,0x00,0x04,0x00,0x09,0x00,0x00,0x00,0x93,0x01,0x00,0x00, -0x92,0x01,0x00,0x00,0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x94,0x01,0x00,0x00,0x93,0x01,0x00,0x00,0xc7,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x95,0x01,0x00,0x00,0x94,0x01,0x00,0x00, -0x81,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x35,0x00,0x00,0x00, -0x9d,0x01,0x00,0x00,0xcc,0x00,0x00,0x00,0xc4,0x00,0x05,0x00, -0x35,0x00,0x00,0x00,0x9f,0x01,0x00,0x00,0x3f,0x00,0x00,0x00, -0x81,0x00,0x00,0x00,0xc7,0x00,0x05,0x00,0x35,0x00,0x00,0x00, -0xa0,0x01,0x00,0x00,0x9d,0x01,0x00,0x00,0x9f,0x01,0x00,0x00, -0x71,0x00,0x04,0x00,0x09,0x00,0x00,0x00,0xa1,0x01,0x00,0x00, -0xa0,0x01,0x00,0x00,0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0xa2,0x01,0x00,0x00,0xa1,0x01,0x00,0x00,0xab,0x00,0x05,0x00, -0x6a,0x00,0x00,0x00,0xa3,0x01,0x00,0x00,0xa2,0x01,0x00,0x00, -0x16,0x00,0x00,0x00,0xa9,0x00,0x06,0x00,0x06,0x00,0x00,0x00, -0xa4,0x01,0x00,0x00,0xa3,0x01,0x00,0x00,0x16,0x00,0x00,0x00, -0x39,0x00,0x00,0x00,0x82,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xa5,0x01,0x00,0x00,0x95,0x01,0x00,0x00,0xa4,0x01,0x00,0x00, -0x6f,0x00,0x04,0x00,0x4f,0x00,0x00,0x00,0xa6,0x01,0x00,0x00, -0xa5,0x01,0x00,0x00,0x0c,0x00,0x08,0x00,0x4f,0x00,0x00,0x00, -0xa8,0x01,0x00,0x00,0x01,0x00,0x00,0x00,0x32,0x00,0x00,0x00, -0x89,0x01,0x00,0x00,0xa6,0x01,0x00,0x00,0x62,0x01,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xaf,0x01,0x00,0x00, -0x98,0x00,0x00,0x00,0x54,0x00,0x00,0x00,0x41,0x00,0x06,0x00, -0x9a,0x00,0x00,0x00,0xb0,0x01,0x00,0x00,0x92,0x00,0x00,0x00, -0x16,0x00,0x00,0x00,0xaf,0x01,0x00,0x00,0x3d,0x00,0x04,0x00, -0x4f,0x00,0x00,0x00,0xb1,0x01,0x00,0x00,0xb0,0x01,0x00,0x00, -0x41,0x00,0x08,0x00,0xa0,0x00,0x00,0x00,0xb5,0x01,0x00,0x00, -0x7d,0x00,0x00,0x00,0x16,0x00,0x00,0x00,0x80,0x00,0x00,0x00, -0x25,0x00,0x00,0x00,0x38,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x35,0x00,0x00,0x00,0xb6,0x01,0x00,0x00,0xb5,0x01,0x00,0x00, -0xc2,0x00,0x05,0x00,0x35,0x00,0x00,0x00,0xb8,0x01,0x00,0x00, -0xb6,0x01,0x00,0x00,0x60,0x00,0x00,0x00,0x71,0x00,0x04,0x00, -0x09,0x00,0x00,0x00,0xb9,0x01,0x00,0x00,0xb8,0x01,0x00,0x00, -0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0xba,0x01,0x00,0x00, -0xb9,0x01,0x00,0x00,0xc7,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xbb,0x01,0x00,0x00,0xba,0x01,0x00,0x00,0xa7,0x00,0x00,0x00, -0x41,0x00,0x08,0x00,0xa0,0x00,0x00,0x00,0xc0,0x01,0x00,0x00, -0x7d,0x00,0x00,0x00,0x16,0x00,0x00,0x00,0x80,0x00,0x00,0x00, -0x25,0x00,0x00,0x00,0xbf,0x01,0x00,0x00,0x3d,0x00,0x04,0x00, -0x35,0x00,0x00,0x00,0xc1,0x01,0x00,0x00,0xc0,0x01,0x00,0x00, -0xc2,0x00,0x05,0x00,0x35,0x00,0x00,0x00,0xc4,0x01,0x00,0x00, -0xc1,0x01,0x00,0x00,0x60,0x00,0x00,0x00,0x71,0x00,0x04,0x00, -0x09,0x00,0x00,0x00,0xc5,0x01,0x00,0x00,0xc4,0x01,0x00,0x00, -0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0xc6,0x01,0x00,0x00, -0xc5,0x01,0x00,0x00,0xc7,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xc7,0x01,0x00,0x00,0xc6,0x01,0x00,0x00,0x81,0x00,0x00,0x00, -0xc4,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xc8,0x01,0x00,0x00, -0xc7,0x01,0x00,0x00,0x39,0x00,0x00,0x00,0xc5,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xc9,0x01,0x00,0x00,0xbb,0x01,0x00,0x00, -0xc8,0x01,0x00,0x00,0x72,0x00,0x04,0x00,0x3d,0x00,0x00,0x00, -0xca,0x01,0x00,0x00,0xc9,0x01,0x00,0x00,0x72,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0xcb,0x01,0x00,0x00,0xca,0x01,0x00,0x00, -0x82,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xcc,0x01,0x00,0x00, -0xcb,0x01,0x00,0x00,0x44,0x00,0x00,0x00,0x6f,0x00,0x04,0x00, -0x4f,0x00,0x00,0x00,0xcd,0x01,0x00,0x00,0xcc,0x01,0x00,0x00, -0x85,0x00,0x05,0x00,0x4f,0x00,0x00,0x00,0xce,0x01,0x00,0x00, -0xb1,0x01,0x00,0x00,0xcd,0x01,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xd5,0x01,0x00,0x00,0xc0,0x00,0x00,0x00, -0x54,0x00,0x00,0x00,0x41,0x00,0x08,0x00,0xa0,0x00,0x00,0x00, -0xd6,0x01,0x00,0x00,0x7d,0x00,0x00,0x00,0x16,0x00,0x00,0x00, -0x80,0x00,0x00,0x00,0x38,0x00,0x00,0x00,0xd5,0x01,0x00,0x00, -0x3d,0x00,0x04,0x00,0x35,0x00,0x00,0x00,0xd7,0x01,0x00,0x00, -0xd6,0x01,0x00,0x00,0x71,0x00,0x04,0x00,0x09,0x00,0x00,0x00, -0xd8,0x01,0x00,0x00,0xd7,0x01,0x00,0x00,0x7c,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0xd9,0x01,0x00,0x00,0xd8,0x01,0x00,0x00, -0xc7,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xda,0x01,0x00,0x00, -0xd9,0x01,0x00,0x00,0x81,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xe1,0x01,0x00,0x00,0xcb,0x00,0x00,0x00, -0x54,0x00,0x00,0x00,0x41,0x00,0x08,0x00,0xa0,0x00,0x00,0x00, -0xe2,0x01,0x00,0x00,0x7d,0x00,0x00,0x00,0x16,0x00,0x00,0x00, -0x80,0x00,0x00,0x00,0x16,0x00,0x00,0x00,0xe1,0x01,0x00,0x00, -0x3d,0x00,0x04,0x00,0x35,0x00,0x00,0x00,0xe3,0x01,0x00,0x00, -0xe2,0x01,0x00,0x00,0xc7,0x00,0x05,0x00,0x35,0x00,0x00,0x00, -0xe6,0x01,0x00,0x00,0xe3,0x01,0x00,0x00,0xcf,0x00,0x00,0x00, -0x71,0x00,0x04,0x00,0x09,0x00,0x00,0x00,0xe7,0x01,0x00,0x00, -0xe6,0x01,0x00,0x00,0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0xe8,0x01,0x00,0x00,0xe7,0x01,0x00,0x00,0xab,0x00,0x05,0x00, -0x6a,0x00,0x00,0x00,0xe9,0x01,0x00,0x00,0xe8,0x01,0x00,0x00, -0x16,0x00,0x00,0x00,0xa9,0x00,0x06,0x00,0x06,0x00,0x00,0x00, -0xea,0x01,0x00,0x00,0xe9,0x01,0x00,0x00,0x16,0x00,0x00,0x00, -0x39,0x00,0x00,0x00,0x82,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xeb,0x01,0x00,0x00,0xda,0x01,0x00,0x00,0xea,0x01,0x00,0x00, -0x6f,0x00,0x04,0x00,0x4f,0x00,0x00,0x00,0xec,0x01,0x00,0x00, -0xeb,0x01,0x00,0x00,0x0c,0x00,0x08,0x00,0x4f,0x00,0x00,0x00, -0xee,0x01,0x00,0x00,0x01,0x00,0x00,0x00,0x32,0x00,0x00,0x00, -0xce,0x01,0x00,0x00,0xec,0x01,0x00,0x00,0xa8,0x01,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xf6,0x01,0x00,0x00, -0x98,0x00,0x00,0x00,0xf5,0x01,0x00,0x00,0x41,0x00,0x06,0x00, -0x9a,0x00,0x00,0x00,0xf7,0x01,0x00,0x00,0x92,0x00,0x00,0x00, -0x16,0x00,0x00,0x00,0xf6,0x01,0x00,0x00,0x3d,0x00,0x04,0x00, -0x4f,0x00,0x00,0x00,0xf8,0x01,0x00,0x00,0xf7,0x01,0x00,0x00, -0x41,0x00,0x08,0x00,0xa0,0x00,0x00,0x00,0xfc,0x01,0x00,0x00, -0x7d,0x00,0x00,0x00,0x16,0x00,0x00,0x00,0x80,0x00,0x00,0x00, -0x25,0x00,0x00,0x00,0x81,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x35,0x00,0x00,0x00,0xfd,0x01,0x00,0x00,0xfc,0x01,0x00,0x00, -0xc2,0x00,0x05,0x00,0x35,0x00,0x00,0x00,0xff,0x01,0x00,0x00, -0xfd,0x01,0x00,0x00,0x60,0x00,0x00,0x00,0x71,0x00,0x04,0x00, -0x09,0x00,0x00,0x00,0x00,0x02,0x00,0x00,0xff,0x01,0x00,0x00, -0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x01,0x02,0x00,0x00, -0x00,0x02,0x00,0x00,0xc7,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x02,0x02,0x00,0x00,0x01,0x02,0x00,0x00,0xa7,0x00,0x00,0x00, -0x41,0x00,0x08,0x00,0xa0,0x00,0x00,0x00,0x07,0x02,0x00,0x00, -0x7d,0x00,0x00,0x00,0x16,0x00,0x00,0x00,0x80,0x00,0x00,0x00, -0x25,0x00,0x00,0x00,0x06,0x02,0x00,0x00,0x3d,0x00,0x04,0x00, -0x35,0x00,0x00,0x00,0x08,0x02,0x00,0x00,0x07,0x02,0x00,0x00, -0xc2,0x00,0x05,0x00,0x35,0x00,0x00,0x00,0x0b,0x02,0x00,0x00, -0x08,0x02,0x00,0x00,0x60,0x00,0x00,0x00,0x71,0x00,0x04,0x00, -0x09,0x00,0x00,0x00,0x0c,0x02,0x00,0x00,0x0b,0x02,0x00,0x00, -0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x0d,0x02,0x00,0x00, -0x0c,0x02,0x00,0x00,0xc7,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x0e,0x02,0x00,0x00,0x0d,0x02,0x00,0x00,0x81,0x00,0x00,0x00, -0xc4,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x0f,0x02,0x00,0x00, -0x0e,0x02,0x00,0x00,0x39,0x00,0x00,0x00,0xc5,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x10,0x02,0x00,0x00,0x02,0x02,0x00,0x00, -0x0f,0x02,0x00,0x00,0x72,0x00,0x04,0x00,0x3d,0x00,0x00,0x00, -0x11,0x02,0x00,0x00,0x10,0x02,0x00,0x00,0x72,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x12,0x02,0x00,0x00,0x11,0x02,0x00,0x00, -0x82,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x13,0x02,0x00,0x00, -0x12,0x02,0x00,0x00,0x44,0x00,0x00,0x00,0x6f,0x00,0x04,0x00, -0x4f,0x00,0x00,0x00,0x14,0x02,0x00,0x00,0x13,0x02,0x00,0x00, -0x85,0x00,0x05,0x00,0x4f,0x00,0x00,0x00,0x15,0x02,0x00,0x00, -0xf8,0x01,0x00,0x00,0x14,0x02,0x00,0x00,0x3d,0x00,0x04,0x00, -0x35,0x00,0x00,0x00,0x1e,0x02,0x00,0x00,0xd6,0x01,0x00,0x00, -0xc2,0x00,0x05,0x00,0x35,0x00,0x00,0x00,0x1f,0x02,0x00,0x00, -0x1e,0x02,0x00,0x00,0x25,0x00,0x00,0x00,0x71,0x00,0x04,0x00, -0x09,0x00,0x00,0x00,0x20,0x02,0x00,0x00,0x1f,0x02,0x00,0x00, -0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x21,0x02,0x00,0x00, -0x20,0x02,0x00,0x00,0xc7,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x22,0x02,0x00,0x00,0x21,0x02,0x00,0x00,0x81,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x35,0x00,0x00,0x00,0x2b,0x02,0x00,0x00, -0xe2,0x01,0x00,0x00,0xc7,0x00,0x05,0x00,0x35,0x00,0x00,0x00, -0x2e,0x02,0x00,0x00,0x2b,0x02,0x00,0x00,0x13,0x01,0x00,0x00, -0x71,0x00,0x04,0x00,0x09,0x00,0x00,0x00,0x2f,0x02,0x00,0x00, -0x2e,0x02,0x00,0x00,0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x30,0x02,0x00,0x00,0x2f,0x02,0x00,0x00,0xab,0x00,0x05,0x00, -0x6a,0x00,0x00,0x00,0x31,0x02,0x00,0x00,0x30,0x02,0x00,0x00, -0x16,0x00,0x00,0x00,0xa9,0x00,0x06,0x00,0x06,0x00,0x00,0x00, -0x32,0x02,0x00,0x00,0x31,0x02,0x00,0x00,0x16,0x00,0x00,0x00, -0x39,0x00,0x00,0x00,0x82,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x33,0x02,0x00,0x00,0x22,0x02,0x00,0x00,0x32,0x02,0x00,0x00, -0x6f,0x00,0x04,0x00,0x4f,0x00,0x00,0x00,0x34,0x02,0x00,0x00, -0x33,0x02,0x00,0x00,0x0c,0x00,0x08,0x00,0x4f,0x00,0x00,0x00, -0x36,0x02,0x00,0x00,0x01,0x00,0x00,0x00,0x32,0x00,0x00,0x00, -0x15,0x02,0x00,0x00,0x34,0x02,0x00,0x00,0xee,0x01,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x3e,0x02,0x00,0x00, -0x98,0x00,0x00,0x00,0x3d,0x02,0x00,0x00,0x41,0x00,0x06,0x00, -0x9a,0x00,0x00,0x00,0x3f,0x02,0x00,0x00,0x92,0x00,0x00,0x00, -0x16,0x00,0x00,0x00,0x3e,0x02,0x00,0x00,0x3d,0x00,0x04,0x00, -0x4f,0x00,0x00,0x00,0x40,0x02,0x00,0x00,0x3f,0x02,0x00,0x00, -0x41,0x00,0x08,0x00,0xa0,0x00,0x00,0x00,0x45,0x02,0x00,0x00, -0x7d,0x00,0x00,0x00,0x16,0x00,0x00,0x00,0x80,0x00,0x00,0x00, -0x25,0x00,0x00,0x00,0x44,0x02,0x00,0x00,0x3d,0x00,0x04,0x00, -0x35,0x00,0x00,0x00,0x46,0x02,0x00,0x00,0x45,0x02,0x00,0x00, -0xc2,0x00,0x05,0x00,0x35,0x00,0x00,0x00,0x48,0x02,0x00,0x00, -0x46,0x02,0x00,0x00,0x60,0x00,0x00,0x00,0x71,0x00,0x04,0x00, -0x09,0x00,0x00,0x00,0x49,0x02,0x00,0x00,0x48,0x02,0x00,0x00, -0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x4a,0x02,0x00,0x00, -0x49,0x02,0x00,0x00,0xc7,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x4b,0x02,0x00,0x00,0x4a,0x02,0x00,0x00,0xa7,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x35,0x00,0x00,0x00,0x50,0x02,0x00,0x00, -0xc0,0x01,0x00,0x00,0xc2,0x00,0x05,0x00,0x35,0x00,0x00,0x00, -0x53,0x02,0x00,0x00,0x50,0x02,0x00,0x00,0x38,0x01,0x00,0x00, -0x71,0x00,0x04,0x00,0x09,0x00,0x00,0x00,0x54,0x02,0x00,0x00, -0x53,0x02,0x00,0x00,0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x55,0x02,0x00,0x00,0x54,0x02,0x00,0x00,0xc7,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x56,0x02,0x00,0x00,0x55,0x02,0x00,0x00, -0x81,0x00,0x00,0x00,0xc4,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x57,0x02,0x00,0x00,0x56,0x02,0x00,0x00,0x39,0x00,0x00,0x00, -0xc5,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x58,0x02,0x00,0x00, -0x4b,0x02,0x00,0x00,0x57,0x02,0x00,0x00,0x72,0x00,0x04,0x00, -0x3d,0x00,0x00,0x00,0x59,0x02,0x00,0x00,0x58,0x02,0x00,0x00, -0x72,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x5a,0x02,0x00,0x00, -0x59,0x02,0x00,0x00,0x82,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x5b,0x02,0x00,0x00,0x5a,0x02,0x00,0x00,0x44,0x00,0x00,0x00, -0x6f,0x00,0x04,0x00,0x4f,0x00,0x00,0x00,0x5c,0x02,0x00,0x00, -0x5b,0x02,0x00,0x00,0x85,0x00,0x05,0x00,0x4f,0x00,0x00,0x00, -0x5d,0x02,0x00,0x00,0x40,0x02,0x00,0x00,0x5c,0x02,0x00,0x00, -0x3d,0x00,0x04,0x00,0x35,0x00,0x00,0x00,0x66,0x02,0x00,0x00, -0xd6,0x01,0x00,0x00,0xc2,0x00,0x05,0x00,0x35,0x00,0x00,0x00, -0x67,0x02,0x00,0x00,0x66,0x02,0x00,0x00,0x39,0x00,0x00,0x00, -0x71,0x00,0x04,0x00,0x09,0x00,0x00,0x00,0x68,0x02,0x00,0x00, -0x67,0x02,0x00,0x00,0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x69,0x02,0x00,0x00,0x68,0x02,0x00,0x00,0xc7,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x6a,0x02,0x00,0x00,0x69,0x02,0x00,0x00, -0x81,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x35,0x00,0x00,0x00, -0x73,0x02,0x00,0x00,0xe2,0x01,0x00,0x00,0xc7,0x00,0x05,0x00, -0x35,0x00,0x00,0x00,0x76,0x02,0x00,0x00,0x73,0x02,0x00,0x00, -0x59,0x01,0x00,0x00,0x71,0x00,0x04,0x00,0x09,0x00,0x00,0x00, -0x77,0x02,0x00,0x00,0x76,0x02,0x00,0x00,0x7c,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x78,0x02,0x00,0x00,0x77,0x02,0x00,0x00, -0xab,0x00,0x05,0x00,0x6a,0x00,0x00,0x00,0x79,0x02,0x00,0x00, -0x78,0x02,0x00,0x00,0x16,0x00,0x00,0x00,0xa9,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0x7a,0x02,0x00,0x00,0x79,0x02,0x00,0x00, -0x16,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x82,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x7b,0x02,0x00,0x00,0x6a,0x02,0x00,0x00, -0x7a,0x02,0x00,0x00,0x6f,0x00,0x04,0x00,0x4f,0x00,0x00,0x00, -0x7c,0x02,0x00,0x00,0x7b,0x02,0x00,0x00,0x0c,0x00,0x08,0x00, -0x4f,0x00,0x00,0x00,0x7e,0x02,0x00,0x00,0x01,0x00,0x00,0x00, -0x32,0x00,0x00,0x00,0x5d,0x02,0x00,0x00,0x7c,0x02,0x00,0x00, -0x36,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x86,0x02,0x00,0x00,0x98,0x00,0x00,0x00,0x85,0x02,0x00,0x00, -0x41,0x00,0x06,0x00,0x9a,0x00,0x00,0x00,0x87,0x02,0x00,0x00, -0x92,0x00,0x00,0x00,0x16,0x00,0x00,0x00,0x86,0x02,0x00,0x00, -0x3d,0x00,0x04,0x00,0x4f,0x00,0x00,0x00,0x88,0x02,0x00,0x00, -0x87,0x02,0x00,0x00,0x41,0x00,0x08,0x00,0xa0,0x00,0x00,0x00, -0x8d,0x02,0x00,0x00,0x7d,0x00,0x00,0x00,0x16,0x00,0x00,0x00, -0x80,0x00,0x00,0x00,0x25,0x00,0x00,0x00,0x8c,0x02,0x00,0x00, -0x3d,0x00,0x04,0x00,0x35,0x00,0x00,0x00,0x8e,0x02,0x00,0x00, -0x8d,0x02,0x00,0x00,0xc2,0x00,0x05,0x00,0x35,0x00,0x00,0x00, -0x90,0x02,0x00,0x00,0x8e,0x02,0x00,0x00,0x60,0x00,0x00,0x00, -0x71,0x00,0x04,0x00,0x09,0x00,0x00,0x00,0x91,0x02,0x00,0x00, -0x90,0x02,0x00,0x00,0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x92,0x02,0x00,0x00,0x91,0x02,0x00,0x00,0xc7,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x93,0x02,0x00,0x00,0x92,0x02,0x00,0x00, -0xa7,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x35,0x00,0x00,0x00, -0x98,0x02,0x00,0x00,0x07,0x02,0x00,0x00,0xc2,0x00,0x05,0x00, -0x35,0x00,0x00,0x00,0x9b,0x02,0x00,0x00,0x98,0x02,0x00,0x00, -0x38,0x01,0x00,0x00,0x71,0x00,0x04,0x00,0x09,0x00,0x00,0x00, -0x9c,0x02,0x00,0x00,0x9b,0x02,0x00,0x00,0x7c,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x9d,0x02,0x00,0x00,0x9c,0x02,0x00,0x00, -0xc7,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x9e,0x02,0x00,0x00, -0x9d,0x02,0x00,0x00,0x81,0x00,0x00,0x00,0xc4,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x9f,0x02,0x00,0x00,0x9e,0x02,0x00,0x00, -0x39,0x00,0x00,0x00,0xc5,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xa0,0x02,0x00,0x00,0x93,0x02,0x00,0x00,0x9f,0x02,0x00,0x00, -0x72,0x00,0x04,0x00,0x3d,0x00,0x00,0x00,0xa1,0x02,0x00,0x00, -0xa0,0x02,0x00,0x00,0x72,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0xa2,0x02,0x00,0x00,0xa1,0x02,0x00,0x00,0x82,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xa3,0x02,0x00,0x00,0xa2,0x02,0x00,0x00, -0x44,0x00,0x00,0x00,0x6f,0x00,0x04,0x00,0x4f,0x00,0x00,0x00, -0xa4,0x02,0x00,0x00,0xa3,0x02,0x00,0x00,0x85,0x00,0x05,0x00, -0x4f,0x00,0x00,0x00,0xa5,0x02,0x00,0x00,0x88,0x02,0x00,0x00, -0xa4,0x02,0x00,0x00,0x3d,0x00,0x04,0x00,0x35,0x00,0x00,0x00, -0xae,0x02,0x00,0x00,0xd6,0x01,0x00,0x00,0xc2,0x00,0x05,0x00, -0x35,0x00,0x00,0x00,0xaf,0x02,0x00,0x00,0xae,0x02,0x00,0x00, -0x70,0x01,0x00,0x00,0x71,0x00,0x04,0x00,0x09,0x00,0x00,0x00, -0xb0,0x02,0x00,0x00,0xaf,0x02,0x00,0x00,0x7c,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0xb1,0x02,0x00,0x00,0xb0,0x02,0x00,0x00, -0xc7,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xb2,0x02,0x00,0x00, -0xb1,0x02,0x00,0x00,0x81,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x35,0x00,0x00,0x00,0xbb,0x02,0x00,0x00,0xe2,0x01,0x00,0x00, -0xc7,0x00,0x05,0x00,0x35,0x00,0x00,0x00,0xbe,0x02,0x00,0x00, -0xbb,0x02,0x00,0x00,0x9f,0x01,0x00,0x00,0x71,0x00,0x04,0x00, -0x09,0x00,0x00,0x00,0xbf,0x02,0x00,0x00,0xbe,0x02,0x00,0x00, -0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0xc0,0x02,0x00,0x00, -0xbf,0x02,0x00,0x00,0xab,0x00,0x05,0x00,0x6a,0x00,0x00,0x00, -0xc1,0x02,0x00,0x00,0xc0,0x02,0x00,0x00,0x16,0x00,0x00,0x00, -0xa9,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0xc2,0x02,0x00,0x00, -0xc1,0x02,0x00,0x00,0x16,0x00,0x00,0x00,0x39,0x00,0x00,0x00, -0x82,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xc3,0x02,0x00,0x00, -0xb2,0x02,0x00,0x00,0xc2,0x02,0x00,0x00,0x6f,0x00,0x04,0x00, -0x4f,0x00,0x00,0x00,0xc4,0x02,0x00,0x00,0xc3,0x02,0x00,0x00, -0x0c,0x00,0x08,0x00,0x4f,0x00,0x00,0x00,0xc6,0x02,0x00,0x00, -0x01,0x00,0x00,0x00,0x32,0x00,0x00,0x00,0xa5,0x02,0x00,0x00, -0xc4,0x02,0x00,0x00,0x7e,0x02,0x00,0x00,0x81,0x00,0x05,0x00, -0x4f,0x00,0x00,0x00,0xc8,0x02,0x00,0x00,0x07,0x03,0x00,0x00, -0xc6,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xca,0x02,0x00,0x00,0x06,0x03,0x00,0x00,0x38,0x00,0x00,0x00, -0xf9,0x00,0x02,0x00,0x88,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, -0x8a,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x4f,0x00,0x00,0x00, -0xd3,0x02,0x00,0x00,0x5b,0x00,0x00,0x00,0x0c,0x00,0x08,0x00, -0x4f,0x00,0x00,0x00,0xd4,0x02,0x00,0x00,0x01,0x00,0x00,0x00, -0x32,0x00,0x00,0x00,0x85,0x00,0x00,0x00,0x07,0x03,0x00,0x00, -0xd3,0x02,0x00,0x00,0x3e,0x00,0x03,0x00,0x5b,0x00,0x00,0x00, -0xd4,0x02,0x00,0x00,0xf9,0x00,0x02,0x00,0x66,0x00,0x00,0x00, -0xf8,0x00,0x02,0x00,0x66,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xd7,0x02,0x00,0x00,0x03,0x03,0x00,0x00, -0x25,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0x63,0x00,0x00,0x00, -0xf8,0x00,0x02,0x00,0x65,0x00,0x00,0x00,0xe0,0x00,0x04,0x00, -0x37,0x01,0x00,0x00,0x37,0x01,0x00,0x00,0xd8,0x02,0x00,0x00, -0xf9,0x00,0x02,0x00,0xda,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, -0xda,0x02,0x00,0x00,0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00, -0x04,0x03,0x00,0x00,0x54,0x00,0x00,0x00,0x65,0x00,0x00,0x00, -0xf1,0x02,0x00,0x00,0xdd,0x02,0x00,0x00,0xad,0x00,0x05,0x00, -0x6a,0x00,0x00,0x00,0xe0,0x02,0x00,0x00,0x04,0x03,0x00,0x00, -0x16,0x00,0x00,0x00,0xf6,0x00,0x04,0x00,0xdc,0x02,0x00,0x00, -0xdd,0x02,0x00,0x00,0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, -0xe0,0x02,0x00,0x00,0xdb,0x02,0x00,0x00,0xdc,0x02,0x00,0x00, -0xf8,0x00,0x02,0x00,0xdb,0x02,0x00,0x00,0xb1,0x00,0x05,0x00, -0x6a,0x00,0x00,0x00,0xe3,0x02,0x00,0x00,0x26,0x00,0x00,0x00, -0x04,0x03,0x00,0x00,0xf7,0x00,0x03,0x00,0xe5,0x02,0x00,0x00, -0x00,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0xe3,0x02,0x00,0x00, -0xe4,0x02,0x00,0x00,0xe5,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, -0xe4,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xe9,0x02,0x00,0x00,0x26,0x00,0x00,0x00,0x04,0x03,0x00,0x00, -0x41,0x00,0x05,0x00,0x5a,0x00,0x00,0x00,0xea,0x02,0x00,0x00, -0x53,0x00,0x00,0x00,0xe9,0x02,0x00,0x00,0x3d,0x00,0x04,0x00, -0x4f,0x00,0x00,0x00,0xeb,0x02,0x00,0x00,0xea,0x02,0x00,0x00, -0x41,0x00,0x05,0x00,0x5a,0x00,0x00,0x00,0xec,0x02,0x00,0x00, -0x53,0x00,0x00,0x00,0x26,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x4f,0x00,0x00,0x00,0xed,0x02,0x00,0x00,0xec,0x02,0x00,0x00, -0x81,0x00,0x05,0x00,0x4f,0x00,0x00,0x00,0xee,0x02,0x00,0x00, -0xed,0x02,0x00,0x00,0xeb,0x02,0x00,0x00,0x3e,0x00,0x03,0x00, -0xec,0x02,0x00,0x00,0xee,0x02,0x00,0x00,0xf9,0x00,0x02,0x00, -0xe5,0x02,0x00,0x00,0xf8,0x00,0x02,0x00,0xe5,0x02,0x00,0x00, -0xe0,0x00,0x04,0x00,0x37,0x01,0x00,0x00,0x37,0x01,0x00,0x00, -0xd8,0x02,0x00,0x00,0xf9,0x00,0x02,0x00,0xdd,0x02,0x00,0x00, -0xf8,0x00,0x02,0x00,0xdd,0x02,0x00,0x00,0xc3,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xf1,0x02,0x00,0x00,0x04,0x03,0x00,0x00, -0x38,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0xda,0x02,0x00,0x00, -0xf8,0x00,0x02,0x00,0xdc,0x02,0x00,0x00,0xaa,0x00,0x05,0x00, -0x6a,0x00,0x00,0x00,0xf3,0x02,0x00,0x00,0x26,0x00,0x00,0x00, -0x16,0x00,0x00,0x00,0xf7,0x00,0x03,0x00,0xf5,0x02,0x00,0x00, -0x00,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0xf3,0x02,0x00,0x00, -0xf4,0x02,0x00,0x00,0xf5,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, -0xf4,0x02,0x00,0x00,0x41,0x00,0x05,0x00,0x17,0x00,0x00,0x00, -0xfa,0x02,0x00,0x00,0x15,0x00,0x00,0x00,0x25,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0xfb,0x02,0x00,0x00, -0xfa,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xfd,0x02,0x00,0x00,0xfb,0x02,0x00,0x00,0x11,0x00,0x00,0x00, -0x41,0x00,0x05,0x00,0x5a,0x00,0x00,0x00,0xfe,0x02,0x00,0x00, -0x53,0x00,0x00,0x00,0x16,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x4f,0x00,0x00,0x00,0xff,0x02,0x00,0x00,0xfe,0x02,0x00,0x00, -0x41,0x00,0x06,0x00,0x9a,0x00,0x00,0x00,0x00,0x03,0x00,0x00, -0xf9,0x02,0x00,0x00,0x16,0x00,0x00,0x00,0xfd,0x02,0x00,0x00, -0x3e,0x00,0x03,0x00,0x00,0x03,0x00,0x00,0xff,0x02,0x00,0x00, -0xf9,0x00,0x02,0x00,0xf5,0x02,0x00,0x00,0xf8,0x00,0x02,0x00, -0xf5,0x02,0x00,0x00,0xfd,0x00,0x01,0x00,0x38,0x00,0x01,0x00, - -}; -const uint64_t mul_mat_vec_q3_K_f32_len = 9252; +const uint64_t mul_mat_vec_q3_K_f32_len = 9236; unsigned char mul_mat_vec_q4_0_f32_data[] = { 0x03,0x02,0x23,0x07,0x00,0x05,0x01,0x00,0x0b,0x00,0x0d,0x00, -0xd1,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x11,0x00,0x02,0x00, +0xcb,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x11,0x00,0x02,0x00, 0x01,0x00,0x00,0x00,0x11,0x00,0x02,0x00,0x51,0x11,0x00,0x00, 0x11,0x00,0x02,0x00,0x60,0x11,0x00,0x00,0x0b,0x00,0x06,0x00, 0x01,0x00,0x00,0x00,0x47,0x4c,0x53,0x4c,0x2e,0x73,0x74,0x64, 0x2e,0x34,0x35,0x30,0x00,0x00,0x00,0x00,0x0e,0x00,0x03,0x00, 0x00,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x0f,0x00,0x0c,0x00, 0x05,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x6d,0x61,0x69,0x6e, -0x00,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x13,0x00,0x00,0x00, -0x1b,0x00,0x00,0x00,0x2a,0x00,0x00,0x00,0x54,0x00,0x00,0x00, -0x7b,0x00,0x00,0x00,0xc2,0x00,0x00,0x00,0x10,0x00,0x06,0x00, -0x04,0x00,0x00,0x00,0x11,0x00,0x00,0x00,0x20,0x00,0x00,0x00, +0x00,0x00,0x00,0x00,0x0b,0x00,0x00,0x00,0x11,0x00,0x00,0x00, +0x18,0x00,0x00,0x00,0x26,0x00,0x00,0x00,0x52,0x00,0x00,0x00, +0x78,0x00,0x00,0x00,0xba,0x00,0x00,0x00,0x10,0x00,0x06,0x00, +0x04,0x00,0x00,0x00,0x11,0x00,0x00,0x00,0x01,0x00,0x00,0x00, 0x01,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x0c,0x00,0x00,0x00,0x0b,0x00,0x00,0x00,0x1a,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x13,0x00,0x00,0x00,0x0b,0x00,0x00,0x00, -0x1b,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x28,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x48,0x00,0x05,0x00,0x28,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0x28,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x08,0x00,0x00,0x00,0x47,0x00,0x03,0x00,0x28,0x00,0x00,0x00, +0x0b,0x00,0x00,0x00,0x0b,0x00,0x00,0x00,0x1a,0x00,0x00,0x00, +0x47,0x00,0x04,0x00,0x11,0x00,0x00,0x00,0x0b,0x00,0x00,0x00, +0x1b,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x15,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x48,0x00,0x05,0x00, +0x24,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00, +0x00,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x24,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x04,0x00,0x00,0x00, +0x48,0x00,0x05,0x00,0x24,0x00,0x00,0x00,0x02,0x00,0x00,0x00, +0x23,0x00,0x00,0x00,0x08,0x00,0x00,0x00,0x47,0x00,0x03,0x00, +0x24,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00, +0x4d,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0x48,0x00,0x05,0x00,0x4e,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x48,0x00,0x05,0x00, +0x4e,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x23,0x00,0x00,0x00, 0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x4f,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0x50,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x50,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x02,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x51,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x12,0x00,0x00,0x00,0x48,0x00,0x04,0x00,0x52,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x18,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0x52,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x47,0x00,0x03,0x00,0x52,0x00,0x00,0x00, -0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x54,0x00,0x00,0x00, -0x22,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x54,0x00,0x00,0x00,0x21,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x78,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x04,0x00,0x00,0x00,0x48,0x00,0x04,0x00,0x79,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x18,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0x79,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x47,0x00,0x03,0x00,0x79,0x00,0x00,0x00, -0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x7b,0x00,0x00,0x00, -0x22,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x7b,0x00,0x00,0x00,0x21,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0xbf,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x04,0x00,0x00,0x00,0x48,0x00,0x04,0x00,0xc0,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x19,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0xc0,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x47,0x00,0x03,0x00,0xc0,0x00,0x00,0x00, -0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0xc2,0x00,0x00,0x00, -0x22,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0xc2,0x00,0x00,0x00,0x21,0x00,0x00,0x00,0x02,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0xca,0x00,0x00,0x00,0x0b,0x00,0x00,0x00, -0x19,0x00,0x00,0x00,0x13,0x00,0x02,0x00,0x02,0x00,0x00,0x00, -0x21,0x00,0x03,0x00,0x03,0x00,0x00,0x00,0x02,0x00,0x00,0x00, -0x15,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x20,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x15,0x00,0x04,0x00,0x09,0x00,0x00,0x00, -0x20,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x17,0x00,0x04,0x00, -0x0a,0x00,0x00,0x00,0x09,0x00,0x00,0x00,0x03,0x00,0x00,0x00, -0x20,0x00,0x04,0x00,0x0b,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x0a,0x00,0x00,0x00,0x3b,0x00,0x04,0x00,0x0b,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x09,0x00,0x00,0x00,0x0d,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x20,0x00,0x04,0x00,0x0e,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x09,0x00,0x00,0x00,0x3b,0x00,0x04,0x00,0x0b,0x00,0x00,0x00, -0x13,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x16,0x00,0x03,0x00, -0x17,0x00,0x00,0x00,0x20,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x09,0x00,0x00,0x00,0x18,0x00,0x00,0x00,0x20,0x00,0x00,0x00, -0x1c,0x00,0x04,0x00,0x19,0x00,0x00,0x00,0x17,0x00,0x00,0x00, -0x18,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x1a,0x00,0x00,0x00, -0x04,0x00,0x00,0x00,0x19,0x00,0x00,0x00,0x3b,0x00,0x04,0x00, -0x1a,0x00,0x00,0x00,0x1b,0x00,0x00,0x00,0x04,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x17,0x00,0x00,0x00,0x1d,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x1e,0x00,0x00,0x00, -0x04,0x00,0x00,0x00,0x17,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x21,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x1e,0x00,0x05,0x00,0x28,0x00,0x00,0x00,0x06,0x00,0x00,0x00, +0x06,0x00,0x00,0x00,0x12,0x00,0x00,0x00,0x48,0x00,0x04,0x00, +0x50,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x18,0x00,0x00,0x00, +0x48,0x00,0x05,0x00,0x50,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x03,0x00, +0x50,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00, +0x52,0x00,0x00,0x00,0x22,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0x47,0x00,0x04,0x00,0x52,0x00,0x00,0x00,0x21,0x00,0x00,0x00, +0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x75,0x00,0x00,0x00, +0x06,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x48,0x00,0x04,0x00, +0x76,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x18,0x00,0x00,0x00, +0x48,0x00,0x05,0x00,0x76,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x03,0x00, +0x76,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00, +0x78,0x00,0x00,0x00,0x22,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0x47,0x00,0x04,0x00,0x78,0x00,0x00,0x00,0x21,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0xb7,0x00,0x00,0x00, +0x06,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x48,0x00,0x04,0x00, +0xb8,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x19,0x00,0x00,0x00, +0x48,0x00,0x05,0x00,0xb8,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x03,0x00, +0xb8,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00, +0xba,0x00,0x00,0x00,0x22,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0x47,0x00,0x04,0x00,0xba,0x00,0x00,0x00,0x21,0x00,0x00,0x00, +0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0xc3,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00, +0xc4,0x00,0x00,0x00,0x0b,0x00,0x00,0x00,0x19,0x00,0x00,0x00, +0x13,0x00,0x02,0x00,0x02,0x00,0x00,0x00,0x21,0x00,0x03,0x00, +0x03,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x15,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x20,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0x17,0x00,0x04,0x00,0x09,0x00,0x00,0x00,0x06,0x00,0x00,0x00, +0x03,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x0a,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0x09,0x00,0x00,0x00,0x3b,0x00,0x04,0x00, +0x0a,0x00,0x00,0x00,0x0b,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, +0x00,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x0d,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x3b,0x00,0x04,0x00, +0x0a,0x00,0x00,0x00,0x11,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0x16,0x00,0x03,0x00,0x14,0x00,0x00,0x00,0x20,0x00,0x00,0x00, +0x32,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x15,0x00,0x00,0x00, +0x20,0x00,0x00,0x00,0x1c,0x00,0x04,0x00,0x16,0x00,0x00,0x00, +0x14,0x00,0x00,0x00,0x15,0x00,0x00,0x00,0x20,0x00,0x04,0x00, +0x17,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x16,0x00,0x00,0x00, +0x3b,0x00,0x04,0x00,0x17,0x00,0x00,0x00,0x18,0x00,0x00,0x00, +0x04,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x14,0x00,0x00,0x00, +0x1a,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x20,0x00,0x04,0x00, +0x1b,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x14,0x00,0x00,0x00, +0x1e,0x00,0x05,0x00,0x24,0x00,0x00,0x00,0x06,0x00,0x00,0x00, 0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0x29,0x00,0x00,0x00,0x09,0x00,0x00,0x00,0x28,0x00,0x00,0x00, -0x3b,0x00,0x04,0x00,0x29,0x00,0x00,0x00,0x2a,0x00,0x00,0x00, -0x09,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x2b,0x00,0x00,0x00, -0x09,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0x20,0x00,0x00,0x00, -0x14,0x00,0x02,0x00,0x30,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x35,0x00,0x00,0x00,0x02,0x00,0x00,0x00, -0x16,0x00,0x03,0x00,0x4c,0x00,0x00,0x00,0x10,0x00,0x00,0x00, -0x15,0x00,0x04,0x00,0x4d,0x00,0x00,0x00,0x08,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x09,0x00,0x00,0x00, -0x4e,0x00,0x00,0x00,0x10,0x00,0x00,0x00,0x1c,0x00,0x04,0x00, -0x4f,0x00,0x00,0x00,0x4d,0x00,0x00,0x00,0x4e,0x00,0x00,0x00, -0x1e,0x00,0x04,0x00,0x50,0x00,0x00,0x00,0x4c,0x00,0x00,0x00, -0x4f,0x00,0x00,0x00,0x1d,0x00,0x03,0x00,0x51,0x00,0x00,0x00, -0x50,0x00,0x00,0x00,0x1e,0x00,0x03,0x00,0x52,0x00,0x00,0x00, -0x51,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x53,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x52,0x00,0x00,0x00,0x3b,0x00,0x04,0x00, -0x53,0x00,0x00,0x00,0x54,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0x20,0x00,0x04,0x00,0x56,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0x4c,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x5d,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0x5f,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x4d,0x00,0x00,0x00, -0x17,0x00,0x04,0x00,0x63,0x00,0x00,0x00,0x17,0x00,0x00,0x00, -0x02,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x09,0x00,0x00,0x00, -0x67,0x00,0x00,0x00,0x0f,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x6b,0x00,0x00,0x00,0x04,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x17,0x00,0x00,0x00,0x70,0x00,0x00,0x00, -0x00,0x00,0x00,0x41,0x1d,0x00,0x03,0x00,0x78,0x00,0x00,0x00, -0x17,0x00,0x00,0x00,0x1e,0x00,0x03,0x00,0x79,0x00,0x00,0x00, -0x78,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x7a,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x79,0x00,0x00,0x00,0x3b,0x00,0x04,0x00, -0x7a,0x00,0x00,0x00,0x7b,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0x20,0x00,0x04,0x00,0x83,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0x17,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x09,0x00,0x00,0x00, -0x8c,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x95,0x00,0x00,0x00,0x10,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x09,0x00,0x00,0x00,0xa0,0x00,0x00,0x00, -0x02,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x09,0x00,0x00,0x00, -0xa1,0x00,0x00,0x00,0x08,0x01,0x00,0x00,0x1d,0x00,0x03,0x00, -0xbf,0x00,0x00,0x00,0x17,0x00,0x00,0x00,0x1e,0x00,0x03,0x00, -0xc0,0x00,0x00,0x00,0xbf,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0xc1,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0xc0,0x00,0x00,0x00, -0x3b,0x00,0x04,0x00,0xc1,0x00,0x00,0x00,0xc2,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x2c,0x00,0x06,0x00,0x0a,0x00,0x00,0x00, -0xca,0x00,0x00,0x00,0x18,0x00,0x00,0x00,0x8c,0x00,0x00,0x00, -0x8c,0x00,0x00,0x00,0x2c,0x00,0x05,0x00,0x63,0x00,0x00,0x00, -0xd0,0x00,0x00,0x00,0x70,0x00,0x00,0x00,0x70,0x00,0x00,0x00, -0x36,0x00,0x05,0x00,0x02,0x00,0x00,0x00,0x04,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x03,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, -0x05,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x0e,0x00,0x00,0x00, -0x0f,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x0d,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x09,0x00,0x00,0x00,0x10,0x00,0x00,0x00, -0x0f,0x00,0x00,0x00,0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x11,0x00,0x00,0x00,0x10,0x00,0x00,0x00,0x41,0x00,0x05,0x00, -0x0e,0x00,0x00,0x00,0x14,0x00,0x00,0x00,0x13,0x00,0x00,0x00, -0x0d,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x09,0x00,0x00,0x00, -0x15,0x00,0x00,0x00,0x14,0x00,0x00,0x00,0x7c,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x16,0x00,0x00,0x00,0x15,0x00,0x00,0x00, -0x41,0x00,0x05,0x00,0x1e,0x00,0x00,0x00,0x1f,0x00,0x00,0x00, -0x1b,0x00,0x00,0x00,0x16,0x00,0x00,0x00,0x3e,0x00,0x03,0x00, -0x1f,0x00,0x00,0x00,0x1d,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, -0x22,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0x22,0x00,0x00,0x00, -0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0xcd,0x00,0x00,0x00, -0x21,0x00,0x00,0x00,0x05,0x00,0x00,0x00,0x9f,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x2b,0x00,0x00,0x00, -0x2c,0x00,0x00,0x00,0x2a,0x00,0x00,0x00,0x21,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x2d,0x00,0x00,0x00, -0x2c,0x00,0x00,0x00,0x87,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x2f,0x00,0x00,0x00,0x2d,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0xb1,0x00,0x05,0x00,0x30,0x00,0x00,0x00,0x31,0x00,0x00,0x00, -0xcd,0x00,0x00,0x00,0x2f,0x00,0x00,0x00,0xf6,0x00,0x04,0x00, -0x24,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0xfa,0x00,0x04,0x00,0x31,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x24,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0x23,0x00,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x34,0x00,0x00,0x00, -0xcd,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x37,0x00,0x00,0x00,0x35,0x00,0x00,0x00, -0x16,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x38,0x00,0x00,0x00,0x34,0x00,0x00,0x00,0x37,0x00,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x3d,0x00,0x00,0x00, -0x11,0x00,0x00,0x00,0x2d,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x3f,0x00,0x00,0x00,0x3d,0x00,0x00,0x00, -0x38,0x00,0x00,0x00,0x87,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x40,0x00,0x00,0x00,0x3f,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0x8b,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x43,0x00,0x00,0x00, -0x38,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0x87,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x44,0x00,0x00,0x00,0x43,0x00,0x00,0x00, -0x35,0x00,0x00,0x00,0x82,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x49,0x00,0x00,0x00,0x38,0x00,0x00,0x00,0x43,0x00,0x00,0x00, -0x41,0x00,0x07,0x00,0x56,0x00,0x00,0x00,0x57,0x00,0x00,0x00, -0x54,0x00,0x00,0x00,0x21,0x00,0x00,0x00,0x40,0x00,0x00,0x00, -0x21,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x4c,0x00,0x00,0x00, -0x58,0x00,0x00,0x00,0x57,0x00,0x00,0x00,0x73,0x00,0x04,0x00, -0x17,0x00,0x00,0x00,0x59,0x00,0x00,0x00,0x58,0x00,0x00,0x00, -0x41,0x00,0x08,0x00,0x5f,0x00,0x00,0x00,0x60,0x00,0x00,0x00, -0x54,0x00,0x00,0x00,0x21,0x00,0x00,0x00,0x40,0x00,0x00,0x00, -0x5d,0x00,0x00,0x00,0x44,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x4d,0x00,0x00,0x00,0x61,0x00,0x00,0x00,0x60,0x00,0x00,0x00, -0x71,0x00,0x04,0x00,0x09,0x00,0x00,0x00,0x62,0x00,0x00,0x00, -0x61,0x00,0x00,0x00,0xc7,0x00,0x05,0x00,0x09,0x00,0x00,0x00, -0x68,0x00,0x00,0x00,0x62,0x00,0x00,0x00,0x67,0x00,0x00,0x00, -0x70,0x00,0x04,0x00,0x17,0x00,0x00,0x00,0x69,0x00,0x00,0x00, -0x68,0x00,0x00,0x00,0xc2,0x00,0x05,0x00,0x09,0x00,0x00,0x00, -0x6c,0x00,0x00,0x00,0x62,0x00,0x00,0x00,0x6b,0x00,0x00,0x00, -0x70,0x00,0x04,0x00,0x17,0x00,0x00,0x00,0x6d,0x00,0x00,0x00, -0x6c,0x00,0x00,0x00,0x50,0x00,0x05,0x00,0x63,0x00,0x00,0x00, -0x6e,0x00,0x00,0x00,0x69,0x00,0x00,0x00,0x6d,0x00,0x00,0x00, -0x83,0x00,0x05,0x00,0x63,0x00,0x00,0x00,0x72,0x00,0x00,0x00, -0x6e,0x00,0x00,0x00,0xd0,0x00,0x00,0x00,0x8e,0x00,0x05,0x00, -0x63,0x00,0x00,0x00,0x74,0x00,0x00,0x00,0x72,0x00,0x00,0x00, -0x59,0x00,0x00,0x00,0x51,0x00,0x05,0x00,0x17,0x00,0x00,0x00, -0x77,0x00,0x00,0x00,0x74,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x41,0x00,0x05,0x00,0x2b,0x00,0x00,0x00,0x7c,0x00,0x00,0x00, -0x2a,0x00,0x00,0x00,0x5d,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x7d,0x00,0x00,0x00,0x7c,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x7f,0x00,0x00,0x00, -0x7d,0x00,0x00,0x00,0x49,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x81,0x00,0x00,0x00,0x7f,0x00,0x00,0x00, -0x44,0x00,0x00,0x00,0x41,0x00,0x06,0x00,0x83,0x00,0x00,0x00, -0x84,0x00,0x00,0x00,0x7b,0x00,0x00,0x00,0x21,0x00,0x00,0x00, -0x81,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x17,0x00,0x00,0x00, -0x85,0x00,0x00,0x00,0x84,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x17,0x00,0x00,0x00,0x88,0x00,0x00,0x00,0x1f,0x00,0x00,0x00, -0x0c,0x00,0x08,0x00,0x17,0x00,0x00,0x00,0x89,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x32,0x00,0x00,0x00,0x77,0x00,0x00,0x00, -0x85,0x00,0x00,0x00,0x88,0x00,0x00,0x00,0x3e,0x00,0x03,0x00, -0x1f,0x00,0x00,0x00,0x89,0x00,0x00,0x00,0x51,0x00,0x05,0x00, -0x17,0x00,0x00,0x00,0x8e,0x00,0x00,0x00,0x74,0x00,0x00,0x00, +0x25,0x00,0x00,0x00,0x09,0x00,0x00,0x00,0x24,0x00,0x00,0x00, +0x3b,0x00,0x04,0x00,0x25,0x00,0x00,0x00,0x26,0x00,0x00,0x00, +0x09,0x00,0x00,0x00,0x15,0x00,0x04,0x00,0x27,0x00,0x00,0x00, +0x20,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x27,0x00,0x00,0x00,0x28,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0x20,0x00,0x04,0x00,0x29,0x00,0x00,0x00,0x09,0x00,0x00,0x00, +0x06,0x00,0x00,0x00,0x14,0x00,0x02,0x00,0x2d,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x32,0x00,0x00,0x00, +0x02,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x3d,0x00,0x00,0x00,0x20,0x00,0x00,0x00,0x16,0x00,0x03,0x00, +0x4a,0x00,0x00,0x00,0x10,0x00,0x00,0x00,0x15,0x00,0x04,0x00, +0x4b,0x00,0x00,0x00,0x08,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x4c,0x00,0x00,0x00, +0x10,0x00,0x00,0x00,0x1c,0x00,0x04,0x00,0x4d,0x00,0x00,0x00, +0x4b,0x00,0x00,0x00,0x4c,0x00,0x00,0x00,0x1e,0x00,0x04,0x00, +0x4e,0x00,0x00,0x00,0x4a,0x00,0x00,0x00,0x4d,0x00,0x00,0x00, +0x1d,0x00,0x03,0x00,0x4f,0x00,0x00,0x00,0x4e,0x00,0x00,0x00, +0x1e,0x00,0x03,0x00,0x50,0x00,0x00,0x00,0x4f,0x00,0x00,0x00, +0x20,0x00,0x04,0x00,0x51,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, +0x50,0x00,0x00,0x00,0x3b,0x00,0x04,0x00,0x51,0x00,0x00,0x00, +0x52,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x20,0x00,0x04,0x00, +0x54,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x4a,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x27,0x00,0x00,0x00,0x5a,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x5c,0x00,0x00,0x00, +0x0c,0x00,0x00,0x00,0x4b,0x00,0x00,0x00,0x17,0x00,0x04,0x00, +0x60,0x00,0x00,0x00,0x14,0x00,0x00,0x00,0x02,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x64,0x00,0x00,0x00, +0x0f,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x27,0x00,0x00,0x00, +0x68,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x14,0x00,0x00,0x00,0x6d,0x00,0x00,0x00,0x00,0x00,0x00,0x41, +0x1d,0x00,0x03,0x00,0x75,0x00,0x00,0x00,0x14,0x00,0x00,0x00, +0x1e,0x00,0x03,0x00,0x76,0x00,0x00,0x00,0x75,0x00,0x00,0x00, +0x20,0x00,0x04,0x00,0x77,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, +0x76,0x00,0x00,0x00,0x3b,0x00,0x04,0x00,0x77,0x00,0x00,0x00, +0x78,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x20,0x00,0x04,0x00, +0x80,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x14,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x84,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x98,0x00,0x00,0x00,0x08,0x01,0x00,0x00,0x34,0x00,0x06,0x00, +0x06,0x00,0x00,0x00,0x9a,0x00,0x00,0x00,0x86,0x00,0x00,0x00, +0x15,0x00,0x00,0x00,0x32,0x00,0x00,0x00,0x1d,0x00,0x03,0x00, +0xb7,0x00,0x00,0x00,0x14,0x00,0x00,0x00,0x1e,0x00,0x03,0x00, +0xb8,0x00,0x00,0x00,0xb7,0x00,0x00,0x00,0x20,0x00,0x04,0x00, +0xb9,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0xb8,0x00,0x00,0x00, +0x3b,0x00,0x04,0x00,0xb9,0x00,0x00,0x00,0xba,0x00,0x00,0x00, +0x0c,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x27,0x00,0x00,0x00, +0xbb,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x32,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0xc3,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0x33,0x00,0x06,0x00,0x09,0x00,0x00,0x00,0xc4,0x00,0x00,0x00, +0xc3,0x00,0x00,0x00,0x84,0x00,0x00,0x00,0x84,0x00,0x00,0x00, +0x2c,0x00,0x05,0x00,0x60,0x00,0x00,0x00,0xca,0x00,0x00,0x00, +0x6d,0x00,0x00,0x00,0x6d,0x00,0x00,0x00,0x36,0x00,0x05,0x00, +0x02,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0x03,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0x05,0x00,0x00,0x00, +0x41,0x00,0x05,0x00,0x0d,0x00,0x00,0x00,0x0e,0x00,0x00,0x00, +0x0b,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x0f,0x00,0x00,0x00,0x0e,0x00,0x00,0x00, +0x41,0x00,0x05,0x00,0x0d,0x00,0x00,0x00,0x12,0x00,0x00,0x00, +0x11,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x13,0x00,0x00,0x00,0x12,0x00,0x00,0x00, +0x41,0x00,0x05,0x00,0x1b,0x00,0x00,0x00,0x1c,0x00,0x00,0x00, +0x18,0x00,0x00,0x00,0x13,0x00,0x00,0x00,0x3e,0x00,0x03,0x00, +0x1c,0x00,0x00,0x00,0x1a,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, +0x1e,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0x1e,0x00,0x00,0x00, +0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0xc7,0x00,0x00,0x00, +0x0c,0x00,0x00,0x00,0x05,0x00,0x00,0x00,0x97,0x00,0x00,0x00, +0x1f,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x29,0x00,0x00,0x00, +0x2a,0x00,0x00,0x00,0x26,0x00,0x00,0x00,0x28,0x00,0x00,0x00, +0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x2b,0x00,0x00,0x00, +0x2a,0x00,0x00,0x00,0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x2c,0x00,0x00,0x00,0x2b,0x00,0x00,0x00,0x15,0x00,0x00,0x00, +0xb0,0x00,0x05,0x00,0x2d,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, +0xc7,0x00,0x00,0x00,0x2c,0x00,0x00,0x00,0xf6,0x00,0x04,0x00, +0x20,0x00,0x00,0x00,0x1f,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0xfa,0x00,0x04,0x00,0x2e,0x00,0x00,0x00,0x1f,0x00,0x00,0x00, +0x20,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0x1f,0x00,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x31,0x00,0x00,0x00, +0xc7,0x00,0x00,0x00,0x15,0x00,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x34,0x00,0x00,0x00,0x32,0x00,0x00,0x00, +0x13,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x35,0x00,0x00,0x00,0x31,0x00,0x00,0x00,0x34,0x00,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x3a,0x00,0x00,0x00, +0x0f,0x00,0x00,0x00,0x2b,0x00,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x3c,0x00,0x00,0x00,0x3a,0x00,0x00,0x00, +0x35,0x00,0x00,0x00,0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x3e,0x00,0x00,0x00,0x3c,0x00,0x00,0x00,0x3d,0x00,0x00,0x00, +0x89,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x41,0x00,0x00,0x00, +0x35,0x00,0x00,0x00,0x3d,0x00,0x00,0x00,0x86,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x42,0x00,0x00,0x00,0x41,0x00,0x00,0x00, +0x32,0x00,0x00,0x00,0x82,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x47,0x00,0x00,0x00,0x35,0x00,0x00,0x00,0x41,0x00,0x00,0x00, +0x41,0x00,0x07,0x00,0x54,0x00,0x00,0x00,0x55,0x00,0x00,0x00, +0x52,0x00,0x00,0x00,0x28,0x00,0x00,0x00,0x3e,0x00,0x00,0x00, +0x28,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x4a,0x00,0x00,0x00, +0x56,0x00,0x00,0x00,0x55,0x00,0x00,0x00,0x73,0x00,0x04,0x00, +0x14,0x00,0x00,0x00,0x57,0x00,0x00,0x00,0x56,0x00,0x00,0x00, +0x41,0x00,0x08,0x00,0x5c,0x00,0x00,0x00,0x5d,0x00,0x00,0x00, +0x52,0x00,0x00,0x00,0x28,0x00,0x00,0x00,0x3e,0x00,0x00,0x00, +0x5a,0x00,0x00,0x00,0x42,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0x4b,0x00,0x00,0x00,0x5e,0x00,0x00,0x00,0x5d,0x00,0x00,0x00, +0x71,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x5f,0x00,0x00,0x00, +0x5e,0x00,0x00,0x00,0xc7,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x65,0x00,0x00,0x00,0x5f,0x00,0x00,0x00,0x64,0x00,0x00,0x00, +0x70,0x00,0x04,0x00,0x14,0x00,0x00,0x00,0x66,0x00,0x00,0x00, +0x65,0x00,0x00,0x00,0xc2,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x69,0x00,0x00,0x00,0x5f,0x00,0x00,0x00,0x68,0x00,0x00,0x00, +0x70,0x00,0x04,0x00,0x14,0x00,0x00,0x00,0x6a,0x00,0x00,0x00, +0x69,0x00,0x00,0x00,0x50,0x00,0x05,0x00,0x60,0x00,0x00,0x00, +0x6b,0x00,0x00,0x00,0x66,0x00,0x00,0x00,0x6a,0x00,0x00,0x00, +0x83,0x00,0x05,0x00,0x60,0x00,0x00,0x00,0x6f,0x00,0x00,0x00, +0x6b,0x00,0x00,0x00,0xca,0x00,0x00,0x00,0x8e,0x00,0x05,0x00, +0x60,0x00,0x00,0x00,0x71,0x00,0x00,0x00,0x6f,0x00,0x00,0x00, +0x57,0x00,0x00,0x00,0x51,0x00,0x05,0x00,0x14,0x00,0x00,0x00, +0x74,0x00,0x00,0x00,0x71,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0x41,0x00,0x05,0x00,0x29,0x00,0x00,0x00,0x79,0x00,0x00,0x00, +0x26,0x00,0x00,0x00,0x5a,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x7a,0x00,0x00,0x00,0x79,0x00,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x7c,0x00,0x00,0x00, +0x7a,0x00,0x00,0x00,0x47,0x00,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x7e,0x00,0x00,0x00,0x7c,0x00,0x00,0x00, +0x42,0x00,0x00,0x00,0x41,0x00,0x06,0x00,0x80,0x00,0x00,0x00, +0x81,0x00,0x00,0x00,0x78,0x00,0x00,0x00,0x28,0x00,0x00,0x00, +0x7e,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x14,0x00,0x00,0x00, +0x82,0x00,0x00,0x00,0x81,0x00,0x00,0x00,0x51,0x00,0x05,0x00, +0x14,0x00,0x00,0x00,0x86,0x00,0x00,0x00,0x71,0x00,0x00,0x00, 0x01,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x96,0x00,0x00,0x00,0x81,0x00,0x00,0x00,0x95,0x00,0x00,0x00, -0x41,0x00,0x06,0x00,0x83,0x00,0x00,0x00,0x97,0x00,0x00,0x00, -0x7b,0x00,0x00,0x00,0x21,0x00,0x00,0x00,0x96,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x17,0x00,0x00,0x00,0x98,0x00,0x00,0x00, -0x97,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x17,0x00,0x00,0x00, -0x9b,0x00,0x00,0x00,0x1f,0x00,0x00,0x00,0x0c,0x00,0x08,0x00, -0x17,0x00,0x00,0x00,0x9c,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x32,0x00,0x00,0x00,0x8e,0x00,0x00,0x00,0x98,0x00,0x00,0x00, -0x9b,0x00,0x00,0x00,0x3e,0x00,0x03,0x00,0x1f,0x00,0x00,0x00, -0x9c,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x9f,0x00,0x00,0x00,0xcd,0x00,0x00,0x00,0x35,0x00,0x00,0x00, -0xf9,0x00,0x02,0x00,0x22,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, -0x24,0x00,0x00,0x00,0xe0,0x00,0x04,0x00,0xa0,0x00,0x00,0x00, -0xa0,0x00,0x00,0x00,0xa1,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, -0xa3,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0xa3,0x00,0x00,0x00, -0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0xce,0x00,0x00,0x00, -0x95,0x00,0x00,0x00,0x24,0x00,0x00,0x00,0xba,0x00,0x00,0x00, -0xa6,0x00,0x00,0x00,0xad,0x00,0x05,0x00,0x30,0x00,0x00,0x00, -0xa9,0x00,0x00,0x00,0xce,0x00,0x00,0x00,0x21,0x00,0x00,0x00, -0xf6,0x00,0x04,0x00,0xa5,0x00,0x00,0x00,0xa6,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0xa9,0x00,0x00,0x00, -0xa4,0x00,0x00,0x00,0xa5,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, -0xa4,0x00,0x00,0x00,0xb1,0x00,0x05,0x00,0x30,0x00,0x00,0x00, -0xac,0x00,0x00,0x00,0x16,0x00,0x00,0x00,0xce,0x00,0x00,0x00, -0xf7,0x00,0x03,0x00,0xae,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0xfa,0x00,0x04,0x00,0xac,0x00,0x00,0x00,0xad,0x00,0x00,0x00, -0xae,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0xad,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xb2,0x00,0x00,0x00, -0x16,0x00,0x00,0x00,0xce,0x00,0x00,0x00,0x41,0x00,0x05,0x00, -0x1e,0x00,0x00,0x00,0xb3,0x00,0x00,0x00,0x1b,0x00,0x00,0x00, -0xb2,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x17,0x00,0x00,0x00, -0xb4,0x00,0x00,0x00,0xb3,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x17,0x00,0x00,0x00,0xb6,0x00,0x00,0x00,0x1f,0x00,0x00,0x00, -0x81,0x00,0x05,0x00,0x17,0x00,0x00,0x00,0xb7,0x00,0x00,0x00, -0xb6,0x00,0x00,0x00,0xb4,0x00,0x00,0x00,0x3e,0x00,0x03,0x00, -0x1f,0x00,0x00,0x00,0xb7,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, -0xae,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0xae,0x00,0x00,0x00, -0xe0,0x00,0x04,0x00,0xa0,0x00,0x00,0x00,0xa0,0x00,0x00,0x00, -0xa1,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0xa6,0x00,0x00,0x00, -0xf8,0x00,0x02,0x00,0xa6,0x00,0x00,0x00,0xc3,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xba,0x00,0x00,0x00,0xce,0x00,0x00,0x00, -0x5d,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0xa3,0x00,0x00,0x00, -0xf8,0x00,0x02,0x00,0xa5,0x00,0x00,0x00,0xaa,0x00,0x05,0x00, -0x30,0x00,0x00,0x00,0xbc,0x00,0x00,0x00,0x16,0x00,0x00,0x00, -0x21,0x00,0x00,0x00,0xf7,0x00,0x03,0x00,0xbe,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0xbc,0x00,0x00,0x00, -0xbd,0x00,0x00,0x00,0xbe,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, -0xbd,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x2b,0x00,0x00,0x00, -0xc3,0x00,0x00,0x00,0x2a,0x00,0x00,0x00,0x35,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0xc4,0x00,0x00,0x00, -0xc3,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xc6,0x00,0x00,0x00,0xc4,0x00,0x00,0x00,0x11,0x00,0x00,0x00, -0x41,0x00,0x05,0x00,0x1e,0x00,0x00,0x00,0xc7,0x00,0x00,0x00, -0x1b,0x00,0x00,0x00,0x21,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x17,0x00,0x00,0x00,0xc8,0x00,0x00,0x00,0xc7,0x00,0x00,0x00, -0x41,0x00,0x06,0x00,0x83,0x00,0x00,0x00,0xc9,0x00,0x00,0x00, -0xc2,0x00,0x00,0x00,0x21,0x00,0x00,0x00,0xc6,0x00,0x00,0x00, -0x3e,0x00,0x03,0x00,0xc9,0x00,0x00,0x00,0xc8,0x00,0x00,0x00, -0xf9,0x00,0x02,0x00,0xbe,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, -0xbe,0x00,0x00,0x00,0xfd,0x00,0x01,0x00,0x38,0x00,0x01,0x00, - +0x8d,0x00,0x00,0x00,0x7e,0x00,0x00,0x00,0x4c,0x00,0x00,0x00, +0x41,0x00,0x06,0x00,0x80,0x00,0x00,0x00,0x8e,0x00,0x00,0x00, +0x78,0x00,0x00,0x00,0x28,0x00,0x00,0x00,0x8d,0x00,0x00,0x00, +0x3d,0x00,0x04,0x00,0x14,0x00,0x00,0x00,0x8f,0x00,0x00,0x00, +0x8e,0x00,0x00,0x00,0x85,0x00,0x05,0x00,0x14,0x00,0x00,0x00, +0x90,0x00,0x00,0x00,0x86,0x00,0x00,0x00,0x8f,0x00,0x00,0x00, +0x0c,0x00,0x08,0x00,0x14,0x00,0x00,0x00,0x91,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0x32,0x00,0x00,0x00,0x74,0x00,0x00,0x00, +0x82,0x00,0x00,0x00,0x90,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0x14,0x00,0x00,0x00,0x93,0x00,0x00,0x00,0x1c,0x00,0x00,0x00, +0x81,0x00,0x05,0x00,0x14,0x00,0x00,0x00,0x94,0x00,0x00,0x00, +0x93,0x00,0x00,0x00,0x91,0x00,0x00,0x00,0x3e,0x00,0x03,0x00, +0x1c,0x00,0x00,0x00,0x94,0x00,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x97,0x00,0x00,0x00,0xc7,0x00,0x00,0x00, +0x32,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0x1e,0x00,0x00,0x00, +0xf8,0x00,0x02,0x00,0x20,0x00,0x00,0x00,0xe0,0x00,0x04,0x00, +0x32,0x00,0x00,0x00,0x32,0x00,0x00,0x00,0x98,0x00,0x00,0x00, +0xf9,0x00,0x02,0x00,0x9b,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, +0x9b,0x00,0x00,0x00,0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00, +0xc8,0x00,0x00,0x00,0x9a,0x00,0x00,0x00,0x20,0x00,0x00,0x00, +0xb2,0x00,0x00,0x00,0x9e,0x00,0x00,0x00,0xac,0x00,0x05,0x00, +0x2d,0x00,0x00,0x00,0xa1,0x00,0x00,0x00,0xc8,0x00,0x00,0x00, +0x0c,0x00,0x00,0x00,0xf6,0x00,0x04,0x00,0x9d,0x00,0x00,0x00, +0x9e,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, +0xa1,0x00,0x00,0x00,0x9c,0x00,0x00,0x00,0x9d,0x00,0x00,0x00, +0xf8,0x00,0x02,0x00,0x9c,0x00,0x00,0x00,0xb0,0x00,0x05,0x00, +0x2d,0x00,0x00,0x00,0xa4,0x00,0x00,0x00,0x13,0x00,0x00,0x00, +0xc8,0x00,0x00,0x00,0xf7,0x00,0x03,0x00,0xa6,0x00,0x00,0x00, +0x00,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0xa4,0x00,0x00,0x00, +0xa5,0x00,0x00,0x00,0xa6,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, +0xa5,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xaa,0x00,0x00,0x00,0x13,0x00,0x00,0x00,0xc8,0x00,0x00,0x00, +0x41,0x00,0x05,0x00,0x1b,0x00,0x00,0x00,0xab,0x00,0x00,0x00, +0x18,0x00,0x00,0x00,0xaa,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0x14,0x00,0x00,0x00,0xac,0x00,0x00,0x00,0xab,0x00,0x00,0x00, +0x3d,0x00,0x04,0x00,0x14,0x00,0x00,0x00,0xae,0x00,0x00,0x00, +0x1c,0x00,0x00,0x00,0x81,0x00,0x05,0x00,0x14,0x00,0x00,0x00, +0xaf,0x00,0x00,0x00,0xae,0x00,0x00,0x00,0xac,0x00,0x00,0x00, +0x3e,0x00,0x03,0x00,0x1c,0x00,0x00,0x00,0xaf,0x00,0x00,0x00, +0xf9,0x00,0x02,0x00,0xa6,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, +0xa6,0x00,0x00,0x00,0xe0,0x00,0x04,0x00,0x32,0x00,0x00,0x00, +0x32,0x00,0x00,0x00,0x98,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, +0x9e,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0x9e,0x00,0x00,0x00, +0xc2,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xb2,0x00,0x00,0x00, +0xc8,0x00,0x00,0x00,0x5a,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, +0x9b,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0x9d,0x00,0x00,0x00, +0xaa,0x00,0x05,0x00,0x2d,0x00,0x00,0x00,0xb4,0x00,0x00,0x00, +0x13,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0xf7,0x00,0x03,0x00, +0xb6,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, +0xb4,0x00,0x00,0x00,0xb5,0x00,0x00,0x00,0xb6,0x00,0x00,0x00, +0xf8,0x00,0x02,0x00,0xb5,0x00,0x00,0x00,0x41,0x00,0x05,0x00, +0x29,0x00,0x00,0x00,0xbc,0x00,0x00,0x00,0x26,0x00,0x00,0x00, +0xbb,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0xbd,0x00,0x00,0x00,0xbc,0x00,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xbf,0x00,0x00,0x00,0xbd,0x00,0x00,0x00, +0x0f,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x1b,0x00,0x00,0x00, +0xc0,0x00,0x00,0x00,0x18,0x00,0x00,0x00,0x28,0x00,0x00,0x00, +0x3d,0x00,0x04,0x00,0x14,0x00,0x00,0x00,0xc1,0x00,0x00,0x00, +0xc0,0x00,0x00,0x00,0x41,0x00,0x06,0x00,0x80,0x00,0x00,0x00, +0xc2,0x00,0x00,0x00,0xba,0x00,0x00,0x00,0x28,0x00,0x00,0x00, +0xbf,0x00,0x00,0x00,0x3e,0x00,0x03,0x00,0xc2,0x00,0x00,0x00, +0xc1,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0xb6,0x00,0x00,0x00, +0xf8,0x00,0x02,0x00,0xb6,0x00,0x00,0x00,0xfd,0x00,0x01,0x00, +0x38,0x00,0x01,0x00, }; -const uint64_t mul_mat_vec_q4_0_f32_len = 3180; +const uint64_t mul_mat_vec_q4_0_f32_len = 3184; unsigned char mul_mat_vec_q4_1_f32_data[] = { 0x03,0x02,0x23,0x07,0x00,0x05,0x01,0x00,0x0b,0x00,0x0d,0x00, -0xd4,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x11,0x00,0x02,0x00, +0xce,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x11,0x00,0x02,0x00, 0x01,0x00,0x00,0x00,0x11,0x00,0x02,0x00,0x51,0x11,0x00,0x00, 0x11,0x00,0x02,0x00,0x60,0x11,0x00,0x00,0x0b,0x00,0x06,0x00, 0x01,0x00,0x00,0x00,0x47,0x4c,0x53,0x4c,0x2e,0x73,0x74,0x64, 0x2e,0x34,0x35,0x30,0x00,0x00,0x00,0x00,0x0e,0x00,0x03,0x00, 0x00,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x0f,0x00,0x0c,0x00, 0x05,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x6d,0x61,0x69,0x6e, -0x00,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x13,0x00,0x00,0x00, -0x1b,0x00,0x00,0x00,0x2a,0x00,0x00,0x00,0x54,0x00,0x00,0x00, -0x80,0x00,0x00,0x00,0xc7,0x00,0x00,0x00,0x10,0x00,0x06,0x00, -0x04,0x00,0x00,0x00,0x11,0x00,0x00,0x00,0x20,0x00,0x00,0x00, +0x00,0x00,0x00,0x00,0x0b,0x00,0x00,0x00,0x11,0x00,0x00,0x00, +0x18,0x00,0x00,0x00,0x26,0x00,0x00,0x00,0x52,0x00,0x00,0x00, +0x7e,0x00,0x00,0x00,0xc0,0x00,0x00,0x00,0x10,0x00,0x06,0x00, +0x04,0x00,0x00,0x00,0x11,0x00,0x00,0x00,0x01,0x00,0x00,0x00, 0x01,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x0c,0x00,0x00,0x00,0x0b,0x00,0x00,0x00,0x1a,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x13,0x00,0x00,0x00,0x0b,0x00,0x00,0x00, -0x1b,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x28,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x48,0x00,0x05,0x00,0x28,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0x28,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x08,0x00,0x00,0x00,0x47,0x00,0x03,0x00,0x28,0x00,0x00,0x00, -0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x4f,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x48,0x00,0x05,0x00, +0x0b,0x00,0x00,0x00,0x0b,0x00,0x00,0x00,0x1a,0x00,0x00,0x00, +0x47,0x00,0x04,0x00,0x11,0x00,0x00,0x00,0x0b,0x00,0x00,0x00, +0x1b,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x15,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x48,0x00,0x05,0x00, +0x24,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00, +0x00,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x24,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x04,0x00,0x00,0x00, +0x48,0x00,0x05,0x00,0x24,0x00,0x00,0x00,0x02,0x00,0x00,0x00, +0x23,0x00,0x00,0x00,0x08,0x00,0x00,0x00,0x47,0x00,0x03,0x00, +0x24,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00, +0x4d,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0x48,0x00,0x05,0x00,0x4e,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x48,0x00,0x05,0x00, +0x4e,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x23,0x00,0x00,0x00, +0x02,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x4e,0x00,0x00,0x00, +0x02,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x04,0x00,0x00,0x00, +0x47,0x00,0x04,0x00,0x4f,0x00,0x00,0x00,0x06,0x00,0x00,0x00, +0x14,0x00,0x00,0x00,0x48,0x00,0x04,0x00,0x50,0x00,0x00,0x00, +0x00,0x00,0x00,0x00,0x18,0x00,0x00,0x00,0x48,0x00,0x05,0x00, 0x50,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x50,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x02,0x00,0x00,0x00, -0x48,0x00,0x05,0x00,0x50,0x00,0x00,0x00,0x02,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x51,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x14,0x00,0x00,0x00, -0x48,0x00,0x04,0x00,0x52,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x18,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x52,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x47,0x00,0x03,0x00,0x52,0x00,0x00,0x00,0x02,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x54,0x00,0x00,0x00,0x22,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x54,0x00,0x00,0x00, -0x21,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x7d,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x04,0x00,0x00,0x00, -0x48,0x00,0x04,0x00,0x7e,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x18,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x7e,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x47,0x00,0x03,0x00,0x7e,0x00,0x00,0x00,0x02,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x80,0x00,0x00,0x00,0x22,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x80,0x00,0x00,0x00, -0x21,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0xc4,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x04,0x00,0x00,0x00, -0x48,0x00,0x04,0x00,0xc5,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x19,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0xc5,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x47,0x00,0x03,0x00,0xc5,0x00,0x00,0x00,0x02,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0xc7,0x00,0x00,0x00,0x22,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0xc7,0x00,0x00,0x00, -0x21,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0xcf,0x00,0x00,0x00,0x0b,0x00,0x00,0x00,0x19,0x00,0x00,0x00, -0x13,0x00,0x02,0x00,0x02,0x00,0x00,0x00,0x21,0x00,0x03,0x00, -0x03,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x15,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x20,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x15,0x00,0x04,0x00,0x09,0x00,0x00,0x00,0x20,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x17,0x00,0x04,0x00,0x0a,0x00,0x00,0x00, -0x09,0x00,0x00,0x00,0x03,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0x0b,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x0a,0x00,0x00,0x00, -0x3b,0x00,0x04,0x00,0x0b,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x09,0x00,0x00,0x00, -0x0d,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0x0e,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x09,0x00,0x00,0x00, -0x3b,0x00,0x04,0x00,0x0b,0x00,0x00,0x00,0x13,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x16,0x00,0x03,0x00,0x17,0x00,0x00,0x00, -0x20,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x09,0x00,0x00,0x00, -0x18,0x00,0x00,0x00,0x20,0x00,0x00,0x00,0x1c,0x00,0x04,0x00, -0x19,0x00,0x00,0x00,0x17,0x00,0x00,0x00,0x18,0x00,0x00,0x00, -0x20,0x00,0x04,0x00,0x1a,0x00,0x00,0x00,0x04,0x00,0x00,0x00, -0x19,0x00,0x00,0x00,0x3b,0x00,0x04,0x00,0x1a,0x00,0x00,0x00, -0x1b,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x17,0x00,0x00,0x00,0x1d,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x20,0x00,0x04,0x00,0x1e,0x00,0x00,0x00,0x04,0x00,0x00,0x00, -0x17,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x21,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x1e,0x00,0x05,0x00, -0x28,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x29,0x00,0x00,0x00, -0x09,0x00,0x00,0x00,0x28,0x00,0x00,0x00,0x3b,0x00,0x04,0x00, -0x29,0x00,0x00,0x00,0x2a,0x00,0x00,0x00,0x09,0x00,0x00,0x00, -0x20,0x00,0x04,0x00,0x2b,0x00,0x00,0x00,0x09,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x2e,0x00,0x00,0x00,0x20,0x00,0x00,0x00,0x14,0x00,0x02,0x00, -0x30,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x35,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x16,0x00,0x03,0x00, -0x4c,0x00,0x00,0x00,0x10,0x00,0x00,0x00,0x15,0x00,0x04,0x00, -0x4d,0x00,0x00,0x00,0x08,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x09,0x00,0x00,0x00,0x4e,0x00,0x00,0x00, -0x10,0x00,0x00,0x00,0x1c,0x00,0x04,0x00,0x4f,0x00,0x00,0x00, -0x4d,0x00,0x00,0x00,0x4e,0x00,0x00,0x00,0x1e,0x00,0x05,0x00, -0x50,0x00,0x00,0x00,0x4c,0x00,0x00,0x00,0x4c,0x00,0x00,0x00, -0x4f,0x00,0x00,0x00,0x1d,0x00,0x03,0x00,0x51,0x00,0x00,0x00, -0x50,0x00,0x00,0x00,0x1e,0x00,0x03,0x00,0x52,0x00,0x00,0x00, -0x51,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x53,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x52,0x00,0x00,0x00,0x3b,0x00,0x04,0x00, -0x53,0x00,0x00,0x00,0x54,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0x20,0x00,0x04,0x00,0x56,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0x4c,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x5c,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0x64,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x4d,0x00,0x00,0x00, -0x17,0x00,0x04,0x00,0x68,0x00,0x00,0x00,0x17,0x00,0x00,0x00, -0x02,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x09,0x00,0x00,0x00, -0x6c,0x00,0x00,0x00,0x0f,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x70,0x00,0x00,0x00,0x04,0x00,0x00,0x00, -0x1d,0x00,0x03,0x00,0x7d,0x00,0x00,0x00,0x17,0x00,0x00,0x00, -0x1e,0x00,0x03,0x00,0x7e,0x00,0x00,0x00,0x7d,0x00,0x00,0x00, -0x20,0x00,0x04,0x00,0x7f,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0x7e,0x00,0x00,0x00,0x3b,0x00,0x04,0x00,0x7f,0x00,0x00,0x00, -0x80,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0x88,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x17,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x09,0x00,0x00,0x00,0x91,0x00,0x00,0x00, +0x00,0x00,0x00,0x00,0x47,0x00,0x03,0x00,0x50,0x00,0x00,0x00, +0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x52,0x00,0x00,0x00, +0x22,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00, +0x52,0x00,0x00,0x00,0x21,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0x47,0x00,0x04,0x00,0x7b,0x00,0x00,0x00,0x06,0x00,0x00,0x00, +0x04,0x00,0x00,0x00,0x48,0x00,0x04,0x00,0x7c,0x00,0x00,0x00, +0x00,0x00,0x00,0x00,0x18,0x00,0x00,0x00,0x48,0x00,0x05,0x00, +0x7c,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00, +0x00,0x00,0x00,0x00,0x47,0x00,0x03,0x00,0x7c,0x00,0x00,0x00, +0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x7e,0x00,0x00,0x00, +0x22,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00, +0x7e,0x00,0x00,0x00,0x21,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0x47,0x00,0x04,0x00,0xbd,0x00,0x00,0x00,0x06,0x00,0x00,0x00, +0x04,0x00,0x00,0x00,0x48,0x00,0x04,0x00,0xbe,0x00,0x00,0x00, +0x00,0x00,0x00,0x00,0x19,0x00,0x00,0x00,0x48,0x00,0x05,0x00, +0xbe,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00, +0x00,0x00,0x00,0x00,0x47,0x00,0x03,0x00,0xbe,0x00,0x00,0x00, +0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0xc0,0x00,0x00,0x00, +0x22,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00, +0xc0,0x00,0x00,0x00,0x21,0x00,0x00,0x00,0x02,0x00,0x00,0x00, +0x47,0x00,0x04,0x00,0xc8,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0xc9,0x00,0x00,0x00, +0x0b,0x00,0x00,0x00,0x19,0x00,0x00,0x00,0x13,0x00,0x02,0x00, +0x02,0x00,0x00,0x00,0x21,0x00,0x03,0x00,0x03,0x00,0x00,0x00, +0x02,0x00,0x00,0x00,0x15,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x20,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x17,0x00,0x04,0x00, +0x09,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x03,0x00,0x00,0x00, +0x20,0x00,0x04,0x00,0x0a,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0x09,0x00,0x00,0x00,0x3b,0x00,0x04,0x00,0x0a,0x00,0x00,0x00, +0x0b,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0x20,0x00,0x04,0x00,0x0d,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0x06,0x00,0x00,0x00,0x3b,0x00,0x04,0x00,0x0a,0x00,0x00,0x00, +0x11,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x16,0x00,0x03,0x00, +0x14,0x00,0x00,0x00,0x20,0x00,0x00,0x00,0x32,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x15,0x00,0x00,0x00,0x20,0x00,0x00,0x00, +0x1c,0x00,0x04,0x00,0x16,0x00,0x00,0x00,0x14,0x00,0x00,0x00, +0x15,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x17,0x00,0x00,0x00, +0x04,0x00,0x00,0x00,0x16,0x00,0x00,0x00,0x3b,0x00,0x04,0x00, +0x17,0x00,0x00,0x00,0x18,0x00,0x00,0x00,0x04,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x14,0x00,0x00,0x00,0x1a,0x00,0x00,0x00, +0x00,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x1b,0x00,0x00,0x00, +0x04,0x00,0x00,0x00,0x14,0x00,0x00,0x00,0x1e,0x00,0x05,0x00, +0x24,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00, +0x06,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x25,0x00,0x00,0x00, +0x09,0x00,0x00,0x00,0x24,0x00,0x00,0x00,0x3b,0x00,0x04,0x00, +0x25,0x00,0x00,0x00,0x26,0x00,0x00,0x00,0x09,0x00,0x00,0x00, +0x15,0x00,0x04,0x00,0x27,0x00,0x00,0x00,0x20,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x27,0x00,0x00,0x00, +0x28,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x20,0x00,0x04,0x00, +0x29,0x00,0x00,0x00,0x09,0x00,0x00,0x00,0x06,0x00,0x00,0x00, +0x14,0x00,0x02,0x00,0x2d,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x32,0x00,0x00,0x00,0x02,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x3d,0x00,0x00,0x00, +0x20,0x00,0x00,0x00,0x16,0x00,0x03,0x00,0x4a,0x00,0x00,0x00, +0x10,0x00,0x00,0x00,0x15,0x00,0x04,0x00,0x4b,0x00,0x00,0x00, +0x08,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x4c,0x00,0x00,0x00,0x10,0x00,0x00,0x00, +0x1c,0x00,0x04,0x00,0x4d,0x00,0x00,0x00,0x4b,0x00,0x00,0x00, +0x4c,0x00,0x00,0x00,0x1e,0x00,0x05,0x00,0x4e,0x00,0x00,0x00, +0x4a,0x00,0x00,0x00,0x4a,0x00,0x00,0x00,0x4d,0x00,0x00,0x00, +0x1d,0x00,0x03,0x00,0x4f,0x00,0x00,0x00,0x4e,0x00,0x00,0x00, +0x1e,0x00,0x03,0x00,0x50,0x00,0x00,0x00,0x4f,0x00,0x00,0x00, +0x20,0x00,0x04,0x00,0x51,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, +0x50,0x00,0x00,0x00,0x3b,0x00,0x04,0x00,0x51,0x00,0x00,0x00, +0x52,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x20,0x00,0x04,0x00, +0x54,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x4a,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x27,0x00,0x00,0x00,0x5a,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x27,0x00,0x00,0x00, +0x60,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x20,0x00,0x04,0x00, +0x62,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x4b,0x00,0x00,0x00, +0x17,0x00,0x04,0x00,0x66,0x00,0x00,0x00,0x14,0x00,0x00,0x00, +0x02,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x6a,0x00,0x00,0x00,0x0f,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x27,0x00,0x00,0x00,0x6e,0x00,0x00,0x00,0x04,0x00,0x00,0x00, +0x1d,0x00,0x03,0x00,0x7b,0x00,0x00,0x00,0x14,0x00,0x00,0x00, +0x1e,0x00,0x03,0x00,0x7c,0x00,0x00,0x00,0x7b,0x00,0x00,0x00, +0x20,0x00,0x04,0x00,0x7d,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, +0x7c,0x00,0x00,0x00,0x3b,0x00,0x04,0x00,0x7d,0x00,0x00,0x00, +0x7e,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x20,0x00,0x04,0x00, +0x86,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x14,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x8a,0x00,0x00,0x00, 0x01,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x9a,0x00,0x00,0x00,0x10,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x09,0x00,0x00,0x00,0xa5,0x00,0x00,0x00,0x02,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x09,0x00,0x00,0x00,0xa6,0x00,0x00,0x00, -0x08,0x01,0x00,0x00,0x1d,0x00,0x03,0x00,0xc4,0x00,0x00,0x00, -0x17,0x00,0x00,0x00,0x1e,0x00,0x03,0x00,0xc5,0x00,0x00,0x00, -0xc4,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0xc6,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0xc5,0x00,0x00,0x00,0x3b,0x00,0x04,0x00, -0xc6,0x00,0x00,0x00,0xc7,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0x2c,0x00,0x06,0x00,0x0a,0x00,0x00,0x00,0xcf,0x00,0x00,0x00, -0x18,0x00,0x00,0x00,0x91,0x00,0x00,0x00,0x91,0x00,0x00,0x00, -0x36,0x00,0x05,0x00,0x02,0x00,0x00,0x00,0x04,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x03,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, -0x05,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x0e,0x00,0x00,0x00, -0x0f,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x0d,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x09,0x00,0x00,0x00,0x10,0x00,0x00,0x00, -0x0f,0x00,0x00,0x00,0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x11,0x00,0x00,0x00,0x10,0x00,0x00,0x00,0x41,0x00,0x05,0x00, -0x0e,0x00,0x00,0x00,0x14,0x00,0x00,0x00,0x13,0x00,0x00,0x00, -0x0d,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x09,0x00,0x00,0x00, -0x15,0x00,0x00,0x00,0x14,0x00,0x00,0x00,0x7c,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x16,0x00,0x00,0x00,0x15,0x00,0x00,0x00, -0x41,0x00,0x05,0x00,0x1e,0x00,0x00,0x00,0x1f,0x00,0x00,0x00, -0x1b,0x00,0x00,0x00,0x16,0x00,0x00,0x00,0x3e,0x00,0x03,0x00, -0x1f,0x00,0x00,0x00,0x1d,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, -0x22,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0x22,0x00,0x00,0x00, -0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0xd2,0x00,0x00,0x00, -0x21,0x00,0x00,0x00,0x05,0x00,0x00,0x00,0xa4,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x2b,0x00,0x00,0x00, -0x2c,0x00,0x00,0x00,0x2a,0x00,0x00,0x00,0x21,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x2d,0x00,0x00,0x00, -0x2c,0x00,0x00,0x00,0x87,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x2f,0x00,0x00,0x00,0x2d,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0xb1,0x00,0x05,0x00,0x30,0x00,0x00,0x00,0x31,0x00,0x00,0x00, -0xd2,0x00,0x00,0x00,0x2f,0x00,0x00,0x00,0xf6,0x00,0x04,0x00, -0x24,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0xfa,0x00,0x04,0x00,0x31,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x24,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0x23,0x00,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x34,0x00,0x00,0x00, -0xd2,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x37,0x00,0x00,0x00,0x35,0x00,0x00,0x00, -0x16,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x38,0x00,0x00,0x00,0x34,0x00,0x00,0x00,0x37,0x00,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x3d,0x00,0x00,0x00, -0x11,0x00,0x00,0x00,0x2d,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x3f,0x00,0x00,0x00,0x3d,0x00,0x00,0x00, -0x38,0x00,0x00,0x00,0x87,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x40,0x00,0x00,0x00,0x3f,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0x8b,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x43,0x00,0x00,0x00, -0x38,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0x87,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x44,0x00,0x00,0x00,0x43,0x00,0x00,0x00, -0x35,0x00,0x00,0x00,0x82,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x49,0x00,0x00,0x00,0x38,0x00,0x00,0x00,0x43,0x00,0x00,0x00, -0x41,0x00,0x07,0x00,0x56,0x00,0x00,0x00,0x57,0x00,0x00,0x00, -0x54,0x00,0x00,0x00,0x21,0x00,0x00,0x00,0x40,0x00,0x00,0x00, -0x21,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x4c,0x00,0x00,0x00, -0x58,0x00,0x00,0x00,0x57,0x00,0x00,0x00,0x73,0x00,0x04,0x00, -0x17,0x00,0x00,0x00,0x59,0x00,0x00,0x00,0x58,0x00,0x00,0x00, -0x41,0x00,0x07,0x00,0x56,0x00,0x00,0x00,0x5d,0x00,0x00,0x00, -0x54,0x00,0x00,0x00,0x21,0x00,0x00,0x00,0x40,0x00,0x00,0x00, -0x5c,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x4c,0x00,0x00,0x00, -0x5e,0x00,0x00,0x00,0x5d,0x00,0x00,0x00,0x73,0x00,0x04,0x00, -0x17,0x00,0x00,0x00,0x5f,0x00,0x00,0x00,0x5e,0x00,0x00,0x00, -0x41,0x00,0x08,0x00,0x64,0x00,0x00,0x00,0x65,0x00,0x00,0x00, -0x54,0x00,0x00,0x00,0x21,0x00,0x00,0x00,0x40,0x00,0x00,0x00, -0x35,0x00,0x00,0x00,0x44,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x4d,0x00,0x00,0x00,0x66,0x00,0x00,0x00,0x65,0x00,0x00,0x00, -0x71,0x00,0x04,0x00,0x09,0x00,0x00,0x00,0x67,0x00,0x00,0x00, -0x66,0x00,0x00,0x00,0xc7,0x00,0x05,0x00,0x09,0x00,0x00,0x00, -0x6d,0x00,0x00,0x00,0x67,0x00,0x00,0x00,0x6c,0x00,0x00,0x00, -0x70,0x00,0x04,0x00,0x17,0x00,0x00,0x00,0x6e,0x00,0x00,0x00, -0x6d,0x00,0x00,0x00,0xc2,0x00,0x05,0x00,0x09,0x00,0x00,0x00, -0x71,0x00,0x00,0x00,0x67,0x00,0x00,0x00,0x70,0x00,0x00,0x00, -0x70,0x00,0x04,0x00,0x17,0x00,0x00,0x00,0x72,0x00,0x00,0x00, -0x71,0x00,0x00,0x00,0x50,0x00,0x05,0x00,0x68,0x00,0x00,0x00, -0x73,0x00,0x00,0x00,0x6e,0x00,0x00,0x00,0x72,0x00,0x00,0x00, -0x8e,0x00,0x05,0x00,0x68,0x00,0x00,0x00,0x76,0x00,0x00,0x00, -0x73,0x00,0x00,0x00,0x59,0x00,0x00,0x00,0x50,0x00,0x05,0x00, -0x68,0x00,0x00,0x00,0x78,0x00,0x00,0x00,0x5f,0x00,0x00,0x00, -0x5f,0x00,0x00,0x00,0x81,0x00,0x05,0x00,0x68,0x00,0x00,0x00, -0x79,0x00,0x00,0x00,0x76,0x00,0x00,0x00,0x78,0x00,0x00,0x00, -0x51,0x00,0x05,0x00,0x17,0x00,0x00,0x00,0x7c,0x00,0x00,0x00, -0x79,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x41,0x00,0x05,0x00, -0x2b,0x00,0x00,0x00,0x81,0x00,0x00,0x00,0x2a,0x00,0x00,0x00, -0x5c,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x82,0x00,0x00,0x00,0x81,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x84,0x00,0x00,0x00,0x82,0x00,0x00,0x00, -0x49,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x86,0x00,0x00,0x00,0x84,0x00,0x00,0x00,0x44,0x00,0x00,0x00, -0x41,0x00,0x06,0x00,0x88,0x00,0x00,0x00,0x89,0x00,0x00,0x00, -0x80,0x00,0x00,0x00,0x21,0x00,0x00,0x00,0x86,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x17,0x00,0x00,0x00,0x8a,0x00,0x00,0x00, -0x89,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x17,0x00,0x00,0x00, -0x8d,0x00,0x00,0x00,0x1f,0x00,0x00,0x00,0x0c,0x00,0x08,0x00, -0x17,0x00,0x00,0x00,0x8e,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x32,0x00,0x00,0x00,0x7c,0x00,0x00,0x00,0x8a,0x00,0x00,0x00, -0x8d,0x00,0x00,0x00,0x3e,0x00,0x03,0x00,0x1f,0x00,0x00,0x00, -0x8e,0x00,0x00,0x00,0x51,0x00,0x05,0x00,0x17,0x00,0x00,0x00, -0x93,0x00,0x00,0x00,0x79,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x9b,0x00,0x00,0x00, -0x86,0x00,0x00,0x00,0x9a,0x00,0x00,0x00,0x41,0x00,0x06,0x00, -0x88,0x00,0x00,0x00,0x9c,0x00,0x00,0x00,0x80,0x00,0x00,0x00, -0x21,0x00,0x00,0x00,0x9b,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x17,0x00,0x00,0x00,0x9d,0x00,0x00,0x00,0x9c,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x17,0x00,0x00,0x00,0xa0,0x00,0x00,0x00, -0x1f,0x00,0x00,0x00,0x0c,0x00,0x08,0x00,0x17,0x00,0x00,0x00, -0xa1,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x32,0x00,0x00,0x00, -0x93,0x00,0x00,0x00,0x9d,0x00,0x00,0x00,0xa0,0x00,0x00,0x00, -0x3e,0x00,0x03,0x00,0x1f,0x00,0x00,0x00,0xa1,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xa4,0x00,0x00,0x00, -0xd2,0x00,0x00,0x00,0x35,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, -0x22,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0x24,0x00,0x00,0x00, -0xe0,0x00,0x04,0x00,0xa5,0x00,0x00,0x00,0xa5,0x00,0x00,0x00, -0xa6,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0xa8,0x00,0x00,0x00, -0xf8,0x00,0x02,0x00,0xa8,0x00,0x00,0x00,0xf5,0x00,0x07,0x00, -0x06,0x00,0x00,0x00,0xd3,0x00,0x00,0x00,0x9a,0x00,0x00,0x00, -0x24,0x00,0x00,0x00,0xbf,0x00,0x00,0x00,0xab,0x00,0x00,0x00, -0xad,0x00,0x05,0x00,0x30,0x00,0x00,0x00,0xae,0x00,0x00,0x00, -0xd3,0x00,0x00,0x00,0x21,0x00,0x00,0x00,0xf6,0x00,0x04,0x00, -0xaa,0x00,0x00,0x00,0xab,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0xfa,0x00,0x04,0x00,0xae,0x00,0x00,0x00,0xa9,0x00,0x00,0x00, -0xaa,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0xa9,0x00,0x00,0x00, -0xb1,0x00,0x05,0x00,0x30,0x00,0x00,0x00,0xb1,0x00,0x00,0x00, -0x16,0x00,0x00,0x00,0xd3,0x00,0x00,0x00,0xf7,0x00,0x03,0x00, -0xb3,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, -0xb1,0x00,0x00,0x00,0xb2,0x00,0x00,0x00,0xb3,0x00,0x00,0x00, -0xf8,0x00,0x02,0x00,0xb2,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xb7,0x00,0x00,0x00,0x16,0x00,0x00,0x00, -0xd3,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x1e,0x00,0x00,0x00, -0xb8,0x00,0x00,0x00,0x1b,0x00,0x00,0x00,0xb7,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x17,0x00,0x00,0x00,0xb9,0x00,0x00,0x00, -0xb8,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x17,0x00,0x00,0x00, -0xbb,0x00,0x00,0x00,0x1f,0x00,0x00,0x00,0x81,0x00,0x05,0x00, -0x17,0x00,0x00,0x00,0xbc,0x00,0x00,0x00,0xbb,0x00,0x00,0x00, -0xb9,0x00,0x00,0x00,0x3e,0x00,0x03,0x00,0x1f,0x00,0x00,0x00, -0xbc,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0xb3,0x00,0x00,0x00, -0xf8,0x00,0x02,0x00,0xb3,0x00,0x00,0x00,0xe0,0x00,0x04,0x00, -0xa5,0x00,0x00,0x00,0xa5,0x00,0x00,0x00,0xa6,0x00,0x00,0x00, -0xf9,0x00,0x02,0x00,0xab,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, -0xab,0x00,0x00,0x00,0xc3,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xbf,0x00,0x00,0x00,0xd3,0x00,0x00,0x00,0x5c,0x00,0x00,0x00, -0xf9,0x00,0x02,0x00,0xa8,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, -0xaa,0x00,0x00,0x00,0xaa,0x00,0x05,0x00,0x30,0x00,0x00,0x00, -0xc1,0x00,0x00,0x00,0x16,0x00,0x00,0x00,0x21,0x00,0x00,0x00, -0xf7,0x00,0x03,0x00,0xc3,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0xfa,0x00,0x04,0x00,0xc1,0x00,0x00,0x00,0xc2,0x00,0x00,0x00, -0xc3,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0xc2,0x00,0x00,0x00, -0x41,0x00,0x05,0x00,0x2b,0x00,0x00,0x00,0xc8,0x00,0x00,0x00, -0x2a,0x00,0x00,0x00,0x35,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0xc9,0x00,0x00,0x00,0xc8,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xcb,0x00,0x00,0x00, -0xc9,0x00,0x00,0x00,0x11,0x00,0x00,0x00,0x41,0x00,0x05,0x00, -0x1e,0x00,0x00,0x00,0xcc,0x00,0x00,0x00,0x1b,0x00,0x00,0x00, -0x21,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x17,0x00,0x00,0x00, -0xcd,0x00,0x00,0x00,0xcc,0x00,0x00,0x00,0x41,0x00,0x06,0x00, -0x88,0x00,0x00,0x00,0xce,0x00,0x00,0x00,0xc7,0x00,0x00,0x00, -0x21,0x00,0x00,0x00,0xcb,0x00,0x00,0x00,0x3e,0x00,0x03,0x00, -0xce,0x00,0x00,0x00,0xcd,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, -0xc3,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0xc3,0x00,0x00,0x00, -0xfd,0x00,0x01,0x00,0x38,0x00,0x01,0x00, +0x9e,0x00,0x00,0x00,0x08,0x01,0x00,0x00,0x34,0x00,0x06,0x00, +0x06,0x00,0x00,0x00,0xa0,0x00,0x00,0x00,0x86,0x00,0x00,0x00, +0x15,0x00,0x00,0x00,0x32,0x00,0x00,0x00,0x1d,0x00,0x03,0x00, +0xbd,0x00,0x00,0x00,0x14,0x00,0x00,0x00,0x1e,0x00,0x03,0x00, +0xbe,0x00,0x00,0x00,0xbd,0x00,0x00,0x00,0x20,0x00,0x04,0x00, +0xbf,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0xbe,0x00,0x00,0x00, +0x3b,0x00,0x04,0x00,0xbf,0x00,0x00,0x00,0xc0,0x00,0x00,0x00, +0x0c,0x00,0x00,0x00,0x32,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0xc8,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x33,0x00,0x06,0x00, +0x09,0x00,0x00,0x00,0xc9,0x00,0x00,0x00,0xc8,0x00,0x00,0x00, +0x8a,0x00,0x00,0x00,0x8a,0x00,0x00,0x00,0x36,0x00,0x05,0x00, +0x02,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0x03,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0x05,0x00,0x00,0x00, +0x41,0x00,0x05,0x00,0x0d,0x00,0x00,0x00,0x0e,0x00,0x00,0x00, +0x0b,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x0f,0x00,0x00,0x00,0x0e,0x00,0x00,0x00, +0x41,0x00,0x05,0x00,0x0d,0x00,0x00,0x00,0x12,0x00,0x00,0x00, +0x11,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x13,0x00,0x00,0x00,0x12,0x00,0x00,0x00, +0x41,0x00,0x05,0x00,0x1b,0x00,0x00,0x00,0x1c,0x00,0x00,0x00, +0x18,0x00,0x00,0x00,0x13,0x00,0x00,0x00,0x3e,0x00,0x03,0x00, +0x1c,0x00,0x00,0x00,0x1a,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, +0x1e,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0x1e,0x00,0x00,0x00, +0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0xcc,0x00,0x00,0x00, +0x0c,0x00,0x00,0x00,0x05,0x00,0x00,0x00,0x9d,0x00,0x00,0x00, +0x1f,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x29,0x00,0x00,0x00, +0x2a,0x00,0x00,0x00,0x26,0x00,0x00,0x00,0x28,0x00,0x00,0x00, +0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x2b,0x00,0x00,0x00, +0x2a,0x00,0x00,0x00,0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x2c,0x00,0x00,0x00,0x2b,0x00,0x00,0x00,0x15,0x00,0x00,0x00, +0xb0,0x00,0x05,0x00,0x2d,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, +0xcc,0x00,0x00,0x00,0x2c,0x00,0x00,0x00,0xf6,0x00,0x04,0x00, +0x20,0x00,0x00,0x00,0x1f,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0xfa,0x00,0x04,0x00,0x2e,0x00,0x00,0x00,0x1f,0x00,0x00,0x00, +0x20,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0x1f,0x00,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x31,0x00,0x00,0x00, +0xcc,0x00,0x00,0x00,0x15,0x00,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x34,0x00,0x00,0x00,0x32,0x00,0x00,0x00, +0x13,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x35,0x00,0x00,0x00,0x31,0x00,0x00,0x00,0x34,0x00,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x3a,0x00,0x00,0x00, +0x0f,0x00,0x00,0x00,0x2b,0x00,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x3c,0x00,0x00,0x00,0x3a,0x00,0x00,0x00, +0x35,0x00,0x00,0x00,0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x3e,0x00,0x00,0x00,0x3c,0x00,0x00,0x00,0x3d,0x00,0x00,0x00, +0x89,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x41,0x00,0x00,0x00, +0x35,0x00,0x00,0x00,0x3d,0x00,0x00,0x00,0x86,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x42,0x00,0x00,0x00,0x41,0x00,0x00,0x00, +0x32,0x00,0x00,0x00,0x82,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x47,0x00,0x00,0x00,0x35,0x00,0x00,0x00,0x41,0x00,0x00,0x00, +0x41,0x00,0x07,0x00,0x54,0x00,0x00,0x00,0x55,0x00,0x00,0x00, +0x52,0x00,0x00,0x00,0x28,0x00,0x00,0x00,0x3e,0x00,0x00,0x00, +0x28,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x4a,0x00,0x00,0x00, +0x56,0x00,0x00,0x00,0x55,0x00,0x00,0x00,0x73,0x00,0x04,0x00, +0x14,0x00,0x00,0x00,0x57,0x00,0x00,0x00,0x56,0x00,0x00,0x00, +0x41,0x00,0x07,0x00,0x54,0x00,0x00,0x00,0x5b,0x00,0x00,0x00, +0x52,0x00,0x00,0x00,0x28,0x00,0x00,0x00,0x3e,0x00,0x00,0x00, +0x5a,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x4a,0x00,0x00,0x00, +0x5c,0x00,0x00,0x00,0x5b,0x00,0x00,0x00,0x73,0x00,0x04,0x00, +0x14,0x00,0x00,0x00,0x5d,0x00,0x00,0x00,0x5c,0x00,0x00,0x00, +0x41,0x00,0x08,0x00,0x62,0x00,0x00,0x00,0x63,0x00,0x00,0x00, +0x52,0x00,0x00,0x00,0x28,0x00,0x00,0x00,0x3e,0x00,0x00,0x00, +0x60,0x00,0x00,0x00,0x42,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0x4b,0x00,0x00,0x00,0x64,0x00,0x00,0x00,0x63,0x00,0x00,0x00, +0x71,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x65,0x00,0x00,0x00, +0x64,0x00,0x00,0x00,0xc7,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x6b,0x00,0x00,0x00,0x65,0x00,0x00,0x00,0x6a,0x00,0x00,0x00, +0x70,0x00,0x04,0x00,0x14,0x00,0x00,0x00,0x6c,0x00,0x00,0x00, +0x6b,0x00,0x00,0x00,0xc2,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x6f,0x00,0x00,0x00,0x65,0x00,0x00,0x00,0x6e,0x00,0x00,0x00, +0x70,0x00,0x04,0x00,0x14,0x00,0x00,0x00,0x70,0x00,0x00,0x00, +0x6f,0x00,0x00,0x00,0x50,0x00,0x05,0x00,0x66,0x00,0x00,0x00, +0x71,0x00,0x00,0x00,0x6c,0x00,0x00,0x00,0x70,0x00,0x00,0x00, +0x8e,0x00,0x05,0x00,0x66,0x00,0x00,0x00,0x74,0x00,0x00,0x00, +0x71,0x00,0x00,0x00,0x57,0x00,0x00,0x00,0x50,0x00,0x05,0x00, +0x66,0x00,0x00,0x00,0x76,0x00,0x00,0x00,0x5d,0x00,0x00,0x00, +0x5d,0x00,0x00,0x00,0x81,0x00,0x05,0x00,0x66,0x00,0x00,0x00, +0x77,0x00,0x00,0x00,0x74,0x00,0x00,0x00,0x76,0x00,0x00,0x00, +0x51,0x00,0x05,0x00,0x14,0x00,0x00,0x00,0x7a,0x00,0x00,0x00, +0x77,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x41,0x00,0x05,0x00, +0x29,0x00,0x00,0x00,0x7f,0x00,0x00,0x00,0x26,0x00,0x00,0x00, +0x5a,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x80,0x00,0x00,0x00,0x7f,0x00,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x82,0x00,0x00,0x00,0x80,0x00,0x00,0x00, +0x47,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x84,0x00,0x00,0x00,0x82,0x00,0x00,0x00,0x42,0x00,0x00,0x00, +0x41,0x00,0x06,0x00,0x86,0x00,0x00,0x00,0x87,0x00,0x00,0x00, +0x7e,0x00,0x00,0x00,0x28,0x00,0x00,0x00,0x84,0x00,0x00,0x00, +0x3d,0x00,0x04,0x00,0x14,0x00,0x00,0x00,0x88,0x00,0x00,0x00, +0x87,0x00,0x00,0x00,0x51,0x00,0x05,0x00,0x14,0x00,0x00,0x00, +0x8c,0x00,0x00,0x00,0x77,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x93,0x00,0x00,0x00, +0x84,0x00,0x00,0x00,0x4c,0x00,0x00,0x00,0x41,0x00,0x06,0x00, +0x86,0x00,0x00,0x00,0x94,0x00,0x00,0x00,0x7e,0x00,0x00,0x00, +0x28,0x00,0x00,0x00,0x93,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0x14,0x00,0x00,0x00,0x95,0x00,0x00,0x00,0x94,0x00,0x00,0x00, +0x85,0x00,0x05,0x00,0x14,0x00,0x00,0x00,0x96,0x00,0x00,0x00, +0x8c,0x00,0x00,0x00,0x95,0x00,0x00,0x00,0x0c,0x00,0x08,0x00, +0x14,0x00,0x00,0x00,0x97,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0x32,0x00,0x00,0x00,0x7a,0x00,0x00,0x00,0x88,0x00,0x00,0x00, +0x96,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x14,0x00,0x00,0x00, +0x99,0x00,0x00,0x00,0x1c,0x00,0x00,0x00,0x81,0x00,0x05,0x00, +0x14,0x00,0x00,0x00,0x9a,0x00,0x00,0x00,0x99,0x00,0x00,0x00, +0x97,0x00,0x00,0x00,0x3e,0x00,0x03,0x00,0x1c,0x00,0x00,0x00, +0x9a,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x9d,0x00,0x00,0x00,0xcc,0x00,0x00,0x00,0x32,0x00,0x00,0x00, +0xf9,0x00,0x02,0x00,0x1e,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, +0x20,0x00,0x00,0x00,0xe0,0x00,0x04,0x00,0x32,0x00,0x00,0x00, +0x32,0x00,0x00,0x00,0x9e,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, +0xa1,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0xa1,0x00,0x00,0x00, +0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0xcd,0x00,0x00,0x00, +0xa0,0x00,0x00,0x00,0x20,0x00,0x00,0x00,0xb8,0x00,0x00,0x00, +0xa4,0x00,0x00,0x00,0xac,0x00,0x05,0x00,0x2d,0x00,0x00,0x00, +0xa7,0x00,0x00,0x00,0xcd,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, +0xf6,0x00,0x04,0x00,0xa3,0x00,0x00,0x00,0xa4,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0xa7,0x00,0x00,0x00, +0xa2,0x00,0x00,0x00,0xa3,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, +0xa2,0x00,0x00,0x00,0xb0,0x00,0x05,0x00,0x2d,0x00,0x00,0x00, +0xaa,0x00,0x00,0x00,0x13,0x00,0x00,0x00,0xcd,0x00,0x00,0x00, +0xf7,0x00,0x03,0x00,0xac,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0xfa,0x00,0x04,0x00,0xaa,0x00,0x00,0x00,0xab,0x00,0x00,0x00, +0xac,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0xab,0x00,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xb0,0x00,0x00,0x00, +0x13,0x00,0x00,0x00,0xcd,0x00,0x00,0x00,0x41,0x00,0x05,0x00, +0x1b,0x00,0x00,0x00,0xb1,0x00,0x00,0x00,0x18,0x00,0x00,0x00, +0xb0,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x14,0x00,0x00,0x00, +0xb2,0x00,0x00,0x00,0xb1,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0x14,0x00,0x00,0x00,0xb4,0x00,0x00,0x00,0x1c,0x00,0x00,0x00, +0x81,0x00,0x05,0x00,0x14,0x00,0x00,0x00,0xb5,0x00,0x00,0x00, +0xb4,0x00,0x00,0x00,0xb2,0x00,0x00,0x00,0x3e,0x00,0x03,0x00, +0x1c,0x00,0x00,0x00,0xb5,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, +0xac,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0xac,0x00,0x00,0x00, +0xe0,0x00,0x04,0x00,0x32,0x00,0x00,0x00,0x32,0x00,0x00,0x00, +0x9e,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0xa4,0x00,0x00,0x00, +0xf8,0x00,0x02,0x00,0xa4,0x00,0x00,0x00,0xc2,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xb8,0x00,0x00,0x00,0xcd,0x00,0x00,0x00, +0x5a,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0xa1,0x00,0x00,0x00, +0xf8,0x00,0x02,0x00,0xa3,0x00,0x00,0x00,0xaa,0x00,0x05,0x00, +0x2d,0x00,0x00,0x00,0xba,0x00,0x00,0x00,0x13,0x00,0x00,0x00, +0x0c,0x00,0x00,0x00,0xf7,0x00,0x03,0x00,0xbc,0x00,0x00,0x00, +0x00,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0xba,0x00,0x00,0x00, +0xbb,0x00,0x00,0x00,0xbc,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, +0xbb,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x29,0x00,0x00,0x00, +0xc1,0x00,0x00,0x00,0x26,0x00,0x00,0x00,0x60,0x00,0x00,0x00, +0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0xc2,0x00,0x00,0x00, +0xc1,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xc4,0x00,0x00,0x00,0xc2,0x00,0x00,0x00,0x0f,0x00,0x00,0x00, +0x41,0x00,0x05,0x00,0x1b,0x00,0x00,0x00,0xc5,0x00,0x00,0x00, +0x18,0x00,0x00,0x00,0x28,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0x14,0x00,0x00,0x00,0xc6,0x00,0x00,0x00,0xc5,0x00,0x00,0x00, +0x41,0x00,0x06,0x00,0x86,0x00,0x00,0x00,0xc7,0x00,0x00,0x00, +0xc0,0x00,0x00,0x00,0x28,0x00,0x00,0x00,0xc4,0x00,0x00,0x00, +0x3e,0x00,0x03,0x00,0xc7,0x00,0x00,0x00,0xc6,0x00,0x00,0x00, +0xf9,0x00,0x02,0x00,0xbc,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, +0xbc,0x00,0x00,0x00,0xfd,0x00,0x01,0x00,0x38,0x00,0x01,0x00, + }; -const uint64_t mul_mat_vec_q4_1_f32_len = 3248; +const uint64_t mul_mat_vec_q4_1_f32_len = 3252; unsigned char mul_mat_vec_q4_K_f32_data[] = { 0x03,0x02,0x23,0x07,0x00,0x05,0x01,0x00,0x0b,0x00,0x0d,0x00, -0xa2,0x03,0x00,0x00,0x00,0x00,0x00,0x00,0x11,0x00,0x02,0x00, +0x9e,0x03,0x00,0x00,0x00,0x00,0x00,0x00,0x11,0x00,0x02,0x00, 0x01,0x00,0x00,0x00,0x11,0x00,0x02,0x00,0x27,0x00,0x00,0x00, 0x11,0x00,0x02,0x00,0x51,0x11,0x00,0x00,0x11,0x00,0x02,0x00, 0x60,0x11,0x00,0x00,0x0b,0x00,0x06,0x00,0x01,0x00,0x00,0x00, @@ -45676,3575 +41119,3566 @@ unsigned char mul_mat_vec_q4_K_f32_data[] = { 0x00,0x00,0x00,0x00,0x0e,0x00,0x03,0x00,0x00,0x00,0x00,0x00, 0x01,0x00,0x00,0x00,0x0f,0x00,0x0c,0x00,0x05,0x00,0x00,0x00, 0x04,0x00,0x00,0x00,0x6d,0x61,0x69,0x6e,0x00,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x15,0x00,0x00,0x00,0x21,0x00,0x00,0x00, -0x51,0x00,0x00,0x00,0x7b,0x00,0x00,0x00,0xef,0x01,0x00,0x00, -0x94,0x03,0x00,0x00,0x10,0x00,0x06,0x00,0x04,0x00,0x00,0x00, +0x0b,0x00,0x00,0x00,0x13,0x00,0x00,0x00,0x20,0x00,0x00,0x00, +0x4d,0x00,0x00,0x00,0x76,0x00,0x00,0x00,0xec,0x01,0x00,0x00, +0x90,0x03,0x00,0x00,0x10,0x00,0x06,0x00,0x04,0x00,0x00,0x00, 0x11,0x00,0x00,0x00,0x20,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x0c,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x0b,0x00,0x00,0x00, 0x0b,0x00,0x00,0x00,0x1a,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0x13,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x13,0x00,0x00,0x00, +0x11,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00, +0x00,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x11,0x00,0x00,0x00, 0x01,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x04,0x00,0x00,0x00, -0x48,0x00,0x05,0x00,0x13,0x00,0x00,0x00,0x02,0x00,0x00,0x00, +0x48,0x00,0x05,0x00,0x11,0x00,0x00,0x00,0x02,0x00,0x00,0x00, 0x23,0x00,0x00,0x00,0x08,0x00,0x00,0x00,0x47,0x00,0x03,0x00, -0x13,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x21,0x00,0x00,0x00,0x0b,0x00,0x00,0x00,0x1b,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x74,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x76,0x00,0x00,0x00, +0x11,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00, +0x20,0x00,0x00,0x00,0x0b,0x00,0x00,0x00,0x1b,0x00,0x00,0x00, +0x47,0x00,0x04,0x00,0x70,0x00,0x00,0x00,0x06,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x71,0x00,0x00,0x00, 0x06,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0x77,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x77,0x00,0x00,0x00, +0x72,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00, +0x00,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x72,0x00,0x00,0x00, 0x01,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x04,0x00,0x00,0x00, -0x48,0x00,0x05,0x00,0x77,0x00,0x00,0x00,0x02,0x00,0x00,0x00, +0x48,0x00,0x05,0x00,0x72,0x00,0x00,0x00,0x02,0x00,0x00,0x00, 0x23,0x00,0x00,0x00,0x10,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x78,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x90,0x00,0x00,0x00, -0x48,0x00,0x04,0x00,0x79,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x18,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x79,0x00,0x00,0x00, +0x73,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x90,0x00,0x00,0x00, +0x48,0x00,0x04,0x00,0x74,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0x18,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x74,0x00,0x00,0x00, 0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x47,0x00,0x03,0x00,0x79,0x00,0x00,0x00,0x02,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x7b,0x00,0x00,0x00,0x22,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x7b,0x00,0x00,0x00, +0x47,0x00,0x03,0x00,0x74,0x00,0x00,0x00,0x02,0x00,0x00,0x00, +0x47,0x00,0x04,0x00,0x76,0x00,0x00,0x00,0x22,0x00,0x00,0x00, +0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x76,0x00,0x00,0x00, 0x21,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0xec,0x01,0x00,0x00,0x06,0x00,0x00,0x00,0x04,0x00,0x00,0x00, -0x48,0x00,0x04,0x00,0xed,0x01,0x00,0x00,0x00,0x00,0x00,0x00, -0x18,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0xed,0x01,0x00,0x00, +0xe9,0x01,0x00,0x00,0x06,0x00,0x00,0x00,0x04,0x00,0x00,0x00, +0x48,0x00,0x04,0x00,0xea,0x01,0x00,0x00,0x00,0x00,0x00,0x00, +0x18,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0xea,0x01,0x00,0x00, 0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x47,0x00,0x03,0x00,0xed,0x01,0x00,0x00,0x02,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0xef,0x01,0x00,0x00,0x22,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0xef,0x01,0x00,0x00, +0x47,0x00,0x03,0x00,0xea,0x01,0x00,0x00,0x02,0x00,0x00,0x00, +0x47,0x00,0x04,0x00,0xec,0x01,0x00,0x00,0x22,0x00,0x00,0x00, +0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0xec,0x01,0x00,0x00, 0x21,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x91,0x03,0x00,0x00,0x06,0x00,0x00,0x00,0x04,0x00,0x00,0x00, -0x48,0x00,0x04,0x00,0x92,0x03,0x00,0x00,0x00,0x00,0x00,0x00, -0x19,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x92,0x03,0x00,0x00, +0x8d,0x03,0x00,0x00,0x06,0x00,0x00,0x00,0x04,0x00,0x00,0x00, +0x48,0x00,0x04,0x00,0x8e,0x03,0x00,0x00,0x00,0x00,0x00,0x00, +0x19,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x8e,0x03,0x00,0x00, 0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x47,0x00,0x03,0x00,0x92,0x03,0x00,0x00,0x02,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x94,0x03,0x00,0x00,0x22,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x94,0x03,0x00,0x00, +0x47,0x00,0x03,0x00,0x8e,0x03,0x00,0x00,0x02,0x00,0x00,0x00, +0x47,0x00,0x04,0x00,0x90,0x03,0x00,0x00,0x22,0x00,0x00,0x00, +0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x90,0x03,0x00,0x00, 0x21,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x9c,0x03,0x00,0x00,0x0b,0x00,0x00,0x00,0x19,0x00,0x00,0x00, +0x98,0x03,0x00,0x00,0x0b,0x00,0x00,0x00,0x19,0x00,0x00,0x00, 0x13,0x00,0x02,0x00,0x02,0x00,0x00,0x00,0x21,0x00,0x03,0x00, 0x03,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x15,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x20,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x15,0x00,0x04,0x00,0x09,0x00,0x00,0x00,0x20,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x17,0x00,0x04,0x00,0x0a,0x00,0x00,0x00, -0x09,0x00,0x00,0x00,0x03,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0x0b,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x0a,0x00,0x00,0x00, -0x3b,0x00,0x04,0x00,0x0b,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x09,0x00,0x00,0x00, -0x0d,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0x0e,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x09,0x00,0x00,0x00, -0x1e,0x00,0x05,0x00,0x13,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0x14,0x00,0x00,0x00,0x09,0x00,0x00,0x00,0x13,0x00,0x00,0x00, -0x3b,0x00,0x04,0x00,0x14,0x00,0x00,0x00,0x15,0x00,0x00,0x00, -0x09,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x16,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0x17,0x00,0x00,0x00,0x09,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x1a,0x00,0x00,0x00, -0x00,0x01,0x00,0x00,0x3b,0x00,0x04,0x00,0x0b,0x00,0x00,0x00, -0x21,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x25,0x00,0x00,0x00,0x02,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, +0x06,0x00,0x00,0x00,0x20,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0x17,0x00,0x04,0x00,0x09,0x00,0x00,0x00,0x06,0x00,0x00,0x00, +0x03,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x0a,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0x09,0x00,0x00,0x00,0x3b,0x00,0x04,0x00, +0x0a,0x00,0x00,0x00,0x0b,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, +0x00,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x0d,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x1e,0x00,0x05,0x00, +0x11,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00, +0x06,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x12,0x00,0x00,0x00, +0x09,0x00,0x00,0x00,0x11,0x00,0x00,0x00,0x3b,0x00,0x04,0x00, +0x12,0x00,0x00,0x00,0x13,0x00,0x00,0x00,0x09,0x00,0x00,0x00, +0x15,0x00,0x04,0x00,0x14,0x00,0x00,0x00,0x20,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x14,0x00,0x00,0x00, +0x15,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x20,0x00,0x04,0x00, +0x16,0x00,0x00,0x00,0x09,0x00,0x00,0x00,0x06,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x19,0x00,0x00,0x00, +0x00,0x01,0x00,0x00,0x3b,0x00,0x04,0x00,0x0a,0x00,0x00,0x00, +0x20,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x02,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x2b,0x00,0x00,0x00, 0x04,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x42,0x00,0x00,0x00,0x20,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x48,0x00,0x00,0x00,0x40,0x00,0x00,0x00, -0x16,0x00,0x03,0x00,0x4d,0x00,0x00,0x00,0x20,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x09,0x00,0x00,0x00,0x4e,0x00,0x00,0x00, -0x20,0x00,0x00,0x00,0x1c,0x00,0x04,0x00,0x4f,0x00,0x00,0x00, -0x4d,0x00,0x00,0x00,0x4e,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0x50,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x4f,0x00,0x00,0x00, -0x3b,0x00,0x04,0x00,0x50,0x00,0x00,0x00,0x51,0x00,0x00,0x00, -0x04,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x52,0x00,0x00,0x00,0x10,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x4d,0x00,0x00,0x00,0x57,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x20,0x00,0x04,0x00,0x58,0x00,0x00,0x00,0x04,0x00,0x00,0x00, -0x4d,0x00,0x00,0x00,0x14,0x00,0x02,0x00,0x63,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x6c,0x00,0x00,0x00, -0x80,0x00,0x00,0x00,0x16,0x00,0x03,0x00,0x70,0x00,0x00,0x00, -0x10,0x00,0x00,0x00,0x17,0x00,0x04,0x00,0x71,0x00,0x00,0x00, -0x70,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x15,0x00,0x04,0x00, -0x72,0x00,0x00,0x00,0x08,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x09,0x00,0x00,0x00,0x73,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x1c,0x00,0x04,0x00,0x74,0x00,0x00,0x00, -0x72,0x00,0x00,0x00,0x73,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x09,0x00,0x00,0x00,0x75,0x00,0x00,0x00,0x80,0x00,0x00,0x00, -0x1c,0x00,0x04,0x00,0x76,0x00,0x00,0x00,0x72,0x00,0x00,0x00, -0x75,0x00,0x00,0x00,0x1e,0x00,0x05,0x00,0x77,0x00,0x00,0x00, -0x71,0x00,0x00,0x00,0x74,0x00,0x00,0x00,0x76,0x00,0x00,0x00, -0x1d,0x00,0x03,0x00,0x78,0x00,0x00,0x00,0x77,0x00,0x00,0x00, -0x1e,0x00,0x03,0x00,0x79,0x00,0x00,0x00,0x78,0x00,0x00,0x00, +0x3f,0x00,0x00,0x00,0x20,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x45,0x00,0x00,0x00,0x40,0x00,0x00,0x00, +0x16,0x00,0x03,0x00,0x4a,0x00,0x00,0x00,0x20,0x00,0x00,0x00, +0x1c,0x00,0x04,0x00,0x4b,0x00,0x00,0x00,0x4a,0x00,0x00,0x00, +0x3f,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x4c,0x00,0x00,0x00, +0x04,0x00,0x00,0x00,0x4b,0x00,0x00,0x00,0x3b,0x00,0x04,0x00, +0x4c,0x00,0x00,0x00,0x4d,0x00,0x00,0x00,0x04,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x4e,0x00,0x00,0x00, +0x10,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x4a,0x00,0x00,0x00, +0x53,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x20,0x00,0x04,0x00, +0x54,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x4a,0x00,0x00,0x00, +0x14,0x00,0x02,0x00,0x5f,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x68,0x00,0x00,0x00,0x80,0x00,0x00,0x00, +0x16,0x00,0x03,0x00,0x6c,0x00,0x00,0x00,0x10,0x00,0x00,0x00, +0x17,0x00,0x04,0x00,0x6d,0x00,0x00,0x00,0x6c,0x00,0x00,0x00, +0x02,0x00,0x00,0x00,0x15,0x00,0x04,0x00,0x6e,0x00,0x00,0x00, +0x08,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x6f,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, +0x1c,0x00,0x04,0x00,0x70,0x00,0x00,0x00,0x6e,0x00,0x00,0x00, +0x6f,0x00,0x00,0x00,0x1c,0x00,0x04,0x00,0x71,0x00,0x00,0x00, +0x6e,0x00,0x00,0x00,0x68,0x00,0x00,0x00,0x1e,0x00,0x05,0x00, +0x72,0x00,0x00,0x00,0x6d,0x00,0x00,0x00,0x70,0x00,0x00,0x00, +0x71,0x00,0x00,0x00,0x1d,0x00,0x03,0x00,0x73,0x00,0x00,0x00, +0x72,0x00,0x00,0x00,0x1e,0x00,0x03,0x00,0x74,0x00,0x00,0x00, +0x73,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x75,0x00,0x00,0x00, +0x0c,0x00,0x00,0x00,0x74,0x00,0x00,0x00,0x3b,0x00,0x04,0x00, +0x75,0x00,0x00,0x00,0x76,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, 0x20,0x00,0x04,0x00,0x7a,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0x79,0x00,0x00,0x00,0x3b,0x00,0x04,0x00,0x7a,0x00,0x00,0x00, -0x7b,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0x7f,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x70,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x09,0x00,0x00,0x00,0x87,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x90,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0x93,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x72,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x98,0x00,0x00,0x00, -0x3f,0x00,0x00,0x00,0x15,0x00,0x04,0x00,0x9a,0x00,0x00,0x00, -0x08,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0xbf,0x00,0x00,0x00,0x05,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0xce,0x00,0x00,0x00, -0x08,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0xd4,0x00,0x00,0x00,0x0f,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0xdf,0x00,0x00,0x00,0xc0,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0xeb,0x00,0x00,0x00, -0x09,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x64,0x01,0x00,0x00,0x03,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0xa2,0x01,0x00,0x00,0x41,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0xb0,0x01,0x00,0x00, +0x6c,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x82,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x14,0x00,0x00,0x00,0x8b,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0x20,0x00,0x04,0x00,0x8e,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, +0x6e,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x14,0x00,0x00,0x00, +0x93,0x00,0x00,0x00,0x3f,0x00,0x00,0x00,0x15,0x00,0x04,0x00, +0x95,0x00,0x00,0x00,0x08,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0xba,0x00,0x00,0x00, +0x05,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0xc9,0x00,0x00,0x00,0x08,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x14,0x00,0x00,0x00,0xcf,0x00,0x00,0x00,0x0f,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x14,0x00,0x00,0x00,0xda,0x00,0x00,0x00, +0xc0,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x14,0x00,0x00,0x00, +0xdc,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0xe7,0x00,0x00,0x00,0x09,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x14,0x00,0x00,0x00,0x06,0x01,0x00,0x00, +0x04,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x61,0x01,0x00,0x00,0x03,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x9f,0x01,0x00,0x00,0x41,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0xad,0x01,0x00,0x00, 0x42,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0xbe,0x01,0x00,0x00,0x43,0x00,0x00,0x00,0x1d,0x00,0x03,0x00, -0xec,0x01,0x00,0x00,0x4d,0x00,0x00,0x00,0x1e,0x00,0x03,0x00, -0xed,0x01,0x00,0x00,0xec,0x01,0x00,0x00,0x20,0x00,0x04,0x00, -0xee,0x01,0x00,0x00,0x0c,0x00,0x00,0x00,0xed,0x01,0x00,0x00, -0x3b,0x00,0x04,0x00,0xee,0x01,0x00,0x00,0xef,0x01,0x00,0x00, -0x0c,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0xf4,0x01,0x00,0x00, -0x0c,0x00,0x00,0x00,0x4d,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x2a,0x02,0x00,0x00,0x21,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x36,0x02,0x00,0x00, +0xbb,0x01,0x00,0x00,0x43,0x00,0x00,0x00,0x1d,0x00,0x03,0x00, +0xe9,0x01,0x00,0x00,0x4a,0x00,0x00,0x00,0x1e,0x00,0x03,0x00, +0xea,0x01,0x00,0x00,0xe9,0x01,0x00,0x00,0x20,0x00,0x04,0x00, +0xeb,0x01,0x00,0x00,0x0c,0x00,0x00,0x00,0xea,0x01,0x00,0x00, +0x3b,0x00,0x04,0x00,0xeb,0x01,0x00,0x00,0xec,0x01,0x00,0x00, +0x0c,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0xf1,0x01,0x00,0x00, +0x0c,0x00,0x00,0x00,0x4a,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x27,0x02,0x00,0x00,0x21,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x33,0x02,0x00,0x00, 0x22,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x42,0x02,0x00,0x00,0x23,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x09,0x00,0x00,0x00,0x72,0x03,0x00,0x00,0x02,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x09,0x00,0x00,0x00,0x73,0x03,0x00,0x00, -0x08,0x01,0x00,0x00,0x1d,0x00,0x03,0x00,0x91,0x03,0x00,0x00, -0x4d,0x00,0x00,0x00,0x1e,0x00,0x03,0x00,0x92,0x03,0x00,0x00, -0x91,0x03,0x00,0x00,0x20,0x00,0x04,0x00,0x93,0x03,0x00,0x00, -0x0c,0x00,0x00,0x00,0x92,0x03,0x00,0x00,0x3b,0x00,0x04,0x00, -0x93,0x03,0x00,0x00,0x94,0x03,0x00,0x00,0x0c,0x00,0x00,0x00, -0x2c,0x00,0x06,0x00,0x0a,0x00,0x00,0x00,0x9c,0x03,0x00,0x00, -0x4e,0x00,0x00,0x00,0x87,0x00,0x00,0x00,0x87,0x00,0x00,0x00, -0x36,0x00,0x05,0x00,0x02,0x00,0x00,0x00,0x04,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x03,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, -0x05,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x0e,0x00,0x00,0x00, -0x0f,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x0d,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x09,0x00,0x00,0x00,0x10,0x00,0x00,0x00, -0x0f,0x00,0x00,0x00,0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x11,0x00,0x00,0x00,0x10,0x00,0x00,0x00,0x41,0x00,0x05,0x00, -0x17,0x00,0x00,0x00,0x18,0x00,0x00,0x00,0x15,0x00,0x00,0x00, -0x16,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x19,0x00,0x00,0x00,0x18,0x00,0x00,0x00,0x87,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x1b,0x00,0x00,0x00,0x19,0x00,0x00,0x00, -0x1a,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x1f,0x00,0x00,0x00,0x11,0x00,0x00,0x00,0x1b,0x00,0x00,0x00, -0x41,0x00,0x05,0x00,0x0e,0x00,0x00,0x00,0x22,0x00,0x00,0x00, -0x21,0x00,0x00,0x00,0x0d,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x09,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x22,0x00,0x00,0x00, -0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x24,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x87,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x26,0x00,0x00,0x00,0x24,0x00,0x00,0x00,0x25,0x00,0x00,0x00, -0x8b,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x2b,0x00,0x00,0x00, -0x24,0x00,0x00,0x00,0x25,0x00,0x00,0x00,0x87,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x2f,0x00,0x00,0x00,0x26,0x00,0x00,0x00, -0x2e,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x33,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0x2f,0x00,0x00,0x00, -0x82,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x34,0x00,0x00,0x00, -0x26,0x00,0x00,0x00,0x33,0x00,0x00,0x00,0x87,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x37,0x00,0x00,0x00,0x2f,0x00,0x00,0x00, -0x25,0x00,0x00,0x00,0x8b,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x3a,0x00,0x00,0x00,0x2f,0x00,0x00,0x00,0x25,0x00,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x3d,0x00,0x00,0x00, -0x25,0x00,0x00,0x00,0x34,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x3f,0x00,0x00,0x00,0x3d,0x00,0x00,0x00, -0x3a,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x40,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0x3f,0x00,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x44,0x00,0x00,0x00, -0x42,0x00,0x00,0x00,0x37,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x46,0x00,0x00,0x00,0x44,0x00,0x00,0x00, -0x40,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x4a,0x00,0x00,0x00,0x48,0x00,0x00,0x00,0x37,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x4c,0x00,0x00,0x00, -0x4a,0x00,0x00,0x00,0x40,0x00,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x54,0x00,0x00,0x00,0x52,0x00,0x00,0x00, -0x2b,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x56,0x00,0x00,0x00,0x54,0x00,0x00,0x00,0x26,0x00,0x00,0x00, -0x41,0x00,0x05,0x00,0x58,0x00,0x00,0x00,0x59,0x00,0x00,0x00, -0x51,0x00,0x00,0x00,0x56,0x00,0x00,0x00,0x3e,0x00,0x03,0x00, -0x59,0x00,0x00,0x00,0x57,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, -0x5c,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0x5c,0x00,0x00,0x00, -0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0x9d,0x03,0x00,0x00, -0x2b,0x00,0x00,0x00,0x05,0x00,0x00,0x00,0x71,0x03,0x00,0x00, -0x5d,0x00,0x00,0x00,0xb1,0x00,0x05,0x00,0x63,0x00,0x00,0x00, -0x64,0x00,0x00,0x00,0x9d,0x03,0x00,0x00,0x1b,0x00,0x00,0x00, -0xf6,0x00,0x04,0x00,0x5e,0x00,0x00,0x00,0x5d,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0x64,0x00,0x00,0x00, -0x5d,0x00,0x00,0x00,0x5e,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, -0x5d,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x67,0x00,0x00,0x00,0x9d,0x03,0x00,0x00,0x1a,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x69,0x00,0x00,0x00, -0x67,0x00,0x00,0x00,0x4c,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x6d,0x00,0x00,0x00,0x69,0x00,0x00,0x00, -0x6c,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x7e,0x00,0x00,0x00,0x1f,0x00,0x00,0x00,0x9d,0x03,0x00,0x00, -0x41,0x00,0x08,0x00,0x7f,0x00,0x00,0x00,0x80,0x00,0x00,0x00, -0x7b,0x00,0x00,0x00,0x16,0x00,0x00,0x00,0x7e,0x00,0x00,0x00, -0x16,0x00,0x00,0x00,0x0d,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x70,0x00,0x00,0x00,0x81,0x00,0x00,0x00,0x80,0x00,0x00,0x00, -0x73,0x00,0x04,0x00,0x4d,0x00,0x00,0x00,0x82,0x00,0x00,0x00, -0x81,0x00,0x00,0x00,0x41,0x00,0x08,0x00,0x7f,0x00,0x00,0x00, -0x88,0x00,0x00,0x00,0x7b,0x00,0x00,0x00,0x16,0x00,0x00,0x00, -0x7e,0x00,0x00,0x00,0x16,0x00,0x00,0x00,0x87,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x70,0x00,0x00,0x00,0x89,0x00,0x00,0x00, -0x88,0x00,0x00,0x00,0x73,0x00,0x04,0x00,0x4d,0x00,0x00,0x00, -0x8a,0x00,0x00,0x00,0x89,0x00,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x92,0x00,0x00,0x00,0x37,0x00,0x00,0x00, -0x25,0x00,0x00,0x00,0x41,0x00,0x08,0x00,0x93,0x00,0x00,0x00, -0x94,0x00,0x00,0x00,0x7b,0x00,0x00,0x00,0x16,0x00,0x00,0x00, -0x7e,0x00,0x00,0x00,0x90,0x00,0x00,0x00,0x92,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x72,0x00,0x00,0x00,0x95,0x00,0x00,0x00, -0x94,0x00,0x00,0x00,0x71,0x00,0x04,0x00,0x09,0x00,0x00,0x00, -0x96,0x00,0x00,0x00,0x95,0x00,0x00,0x00,0x7c,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x97,0x00,0x00,0x00,0x96,0x00,0x00,0x00, -0xc7,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x99,0x00,0x00,0x00, -0x97,0x00,0x00,0x00,0x98,0x00,0x00,0x00,0x72,0x00,0x04,0x00, -0x9a,0x00,0x00,0x00,0x9b,0x00,0x00,0x00,0x99,0x00,0x00,0x00, -0x7c,0x00,0x04,0x00,0x72,0x00,0x00,0x00,0x9c,0x00,0x00,0x00, -0x9b,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xa3,0x00,0x00,0x00,0x92,0x00,0x00,0x00,0x90,0x00,0x00,0x00, -0x41,0x00,0x08,0x00,0x93,0x00,0x00,0x00,0xa4,0x00,0x00,0x00, -0x7b,0x00,0x00,0x00,0x16,0x00,0x00,0x00,0x7e,0x00,0x00,0x00, -0x90,0x00,0x00,0x00,0xa3,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x72,0x00,0x00,0x00,0xa5,0x00,0x00,0x00,0xa4,0x00,0x00,0x00, -0x71,0x00,0x04,0x00,0x09,0x00,0x00,0x00,0xa6,0x00,0x00,0x00, -0xa5,0x00,0x00,0x00,0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0xa7,0x00,0x00,0x00,0xa6,0x00,0x00,0x00,0xc7,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xa8,0x00,0x00,0x00,0xa7,0x00,0x00,0x00, -0x98,0x00,0x00,0x00,0x72,0x00,0x04,0x00,0x9a,0x00,0x00,0x00, -0xa9,0x00,0x00,0x00,0xa8,0x00,0x00,0x00,0x7c,0x00,0x04,0x00, -0x72,0x00,0x00,0x00,0xaa,0x00,0x00,0x00,0xa9,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xb1,0x00,0x00,0x00, -0x92,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0x41,0x00,0x08,0x00, -0x93,0x00,0x00,0x00,0xb2,0x00,0x00,0x00,0x7b,0x00,0x00,0x00, -0x16,0x00,0x00,0x00,0x7e,0x00,0x00,0x00,0x90,0x00,0x00,0x00, -0xb1,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x72,0x00,0x00,0x00, -0xb3,0x00,0x00,0x00,0xb2,0x00,0x00,0x00,0x71,0x00,0x04,0x00, -0x09,0x00,0x00,0x00,0xb4,0x00,0x00,0x00,0xb3,0x00,0x00,0x00, -0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0xb5,0x00,0x00,0x00, -0xb4,0x00,0x00,0x00,0xc7,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xb6,0x00,0x00,0x00,0xb5,0x00,0x00,0x00,0x98,0x00,0x00,0x00, -0x72,0x00,0x04,0x00,0x9a,0x00,0x00,0x00,0xb7,0x00,0x00,0x00, -0xb6,0x00,0x00,0x00,0x7c,0x00,0x04,0x00,0x72,0x00,0x00,0x00, -0xb8,0x00,0x00,0x00,0xb7,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xc0,0x00,0x00,0x00,0x92,0x00,0x00,0x00, -0xbf,0x00,0x00,0x00,0x41,0x00,0x08,0x00,0x93,0x00,0x00,0x00, -0xc1,0x00,0x00,0x00,0x7b,0x00,0x00,0x00,0x16,0x00,0x00,0x00, -0x7e,0x00,0x00,0x00,0x90,0x00,0x00,0x00,0xc0,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x72,0x00,0x00,0x00,0xc2,0x00,0x00,0x00, -0xc1,0x00,0x00,0x00,0x71,0x00,0x04,0x00,0x09,0x00,0x00,0x00, -0xc3,0x00,0x00,0x00,0xc2,0x00,0x00,0x00,0x7c,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0xc4,0x00,0x00,0x00,0xc3,0x00,0x00,0x00, -0xc7,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xc5,0x00,0x00,0x00, -0xc4,0x00,0x00,0x00,0x98,0x00,0x00,0x00,0x72,0x00,0x04,0x00, -0x9a,0x00,0x00,0x00,0xc6,0x00,0x00,0x00,0xc5,0x00,0x00,0x00, -0x7c,0x00,0x04,0x00,0x72,0x00,0x00,0x00,0xc7,0x00,0x00,0x00, -0xc6,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xcf,0x00,0x00,0x00,0x92,0x00,0x00,0x00,0xce,0x00,0x00,0x00, -0x41,0x00,0x08,0x00,0x93,0x00,0x00,0x00,0xd0,0x00,0x00,0x00, -0x7b,0x00,0x00,0x00,0x16,0x00,0x00,0x00,0x7e,0x00,0x00,0x00, -0x90,0x00,0x00,0x00,0xcf,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x72,0x00,0x00,0x00,0xd1,0x00,0x00,0x00,0xd0,0x00,0x00,0x00, -0x71,0x00,0x04,0x00,0x09,0x00,0x00,0x00,0xd2,0x00,0x00,0x00, -0xd1,0x00,0x00,0x00,0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0xd3,0x00,0x00,0x00,0xd2,0x00,0x00,0x00,0xc7,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xd5,0x00,0x00,0x00,0xd3,0x00,0x00,0x00, -0xd4,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x72,0x00,0x00,0x00, -0xdc,0x00,0x00,0x00,0x94,0x00,0x00,0x00,0x71,0x00,0x04,0x00, -0x09,0x00,0x00,0x00,0xdd,0x00,0x00,0x00,0xdc,0x00,0x00,0x00, -0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0xde,0x00,0x00,0x00, -0xdd,0x00,0x00,0x00,0xc7,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xe0,0x00,0x00,0x00,0xde,0x00,0x00,0x00,0xdf,0x00,0x00,0x00, -0xc3,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xe1,0x00,0x00,0x00, -0xe0,0x00,0x00,0x00,0x25,0x00,0x00,0x00,0xc5,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xe2,0x00,0x00,0x00,0xd5,0x00,0x00,0x00, -0xe1,0x00,0x00,0x00,0x72,0x00,0x04,0x00,0x9a,0x00,0x00,0x00, -0xe3,0x00,0x00,0x00,0xe2,0x00,0x00,0x00,0x7c,0x00,0x04,0x00, -0x72,0x00,0x00,0x00,0xe4,0x00,0x00,0x00,0xe3,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xec,0x00,0x00,0x00, -0x92,0x00,0x00,0x00,0xeb,0x00,0x00,0x00,0x41,0x00,0x08,0x00, -0x93,0x00,0x00,0x00,0xed,0x00,0x00,0x00,0x7b,0x00,0x00,0x00, -0x16,0x00,0x00,0x00,0x7e,0x00,0x00,0x00,0x90,0x00,0x00,0x00, -0xec,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x72,0x00,0x00,0x00, -0xee,0x00,0x00,0x00,0xed,0x00,0x00,0x00,0x71,0x00,0x04,0x00, -0x09,0x00,0x00,0x00,0xef,0x00,0x00,0x00,0xee,0x00,0x00,0x00, -0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0xf0,0x00,0x00,0x00, -0xef,0x00,0x00,0x00,0xc7,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xf1,0x00,0x00,0x00,0xf0,0x00,0x00,0x00,0xd4,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x72,0x00,0x00,0x00,0xf9,0x00,0x00,0x00, -0xa4,0x00,0x00,0x00,0x71,0x00,0x04,0x00,0x09,0x00,0x00,0x00, -0xfa,0x00,0x00,0x00,0xf9,0x00,0x00,0x00,0x7c,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0xfb,0x00,0x00,0x00,0xfa,0x00,0x00,0x00, -0xc7,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xfc,0x00,0x00,0x00, -0xfb,0x00,0x00,0x00,0xdf,0x00,0x00,0x00,0xc3,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xfd,0x00,0x00,0x00,0xfc,0x00,0x00,0x00, -0x25,0x00,0x00,0x00,0xc5,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xfe,0x00,0x00,0x00,0xf1,0x00,0x00,0x00,0xfd,0x00,0x00,0x00, -0x72,0x00,0x04,0x00,0x9a,0x00,0x00,0x00,0xff,0x00,0x00,0x00, -0xfe,0x00,0x00,0x00,0x7c,0x00,0x04,0x00,0x72,0x00,0x00,0x00, -0x00,0x01,0x00,0x00,0xff,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x72,0x00,0x00,0x00,0x09,0x01,0x00,0x00,0xd0,0x00,0x00,0x00, -0xc2,0x00,0x05,0x00,0x72,0x00,0x00,0x00,0x0a,0x01,0x00,0x00, -0x09,0x01,0x00,0x00,0x2e,0x00,0x00,0x00,0x71,0x00,0x04,0x00, -0x09,0x00,0x00,0x00,0x0b,0x01,0x00,0x00,0x0a,0x01,0x00,0x00, -0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x0c,0x01,0x00,0x00, -0x0b,0x01,0x00,0x00,0xc7,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x0d,0x01,0x00,0x00,0x0c,0x01,0x00,0x00,0xd4,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x72,0x00,0x00,0x00,0x15,0x01,0x00,0x00, -0xb2,0x00,0x00,0x00,0x71,0x00,0x04,0x00,0x09,0x00,0x00,0x00, -0x16,0x01,0x00,0x00,0x15,0x01,0x00,0x00,0x7c,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x17,0x01,0x00,0x00,0x16,0x01,0x00,0x00, -0xc7,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x18,0x01,0x00,0x00, -0x17,0x01,0x00,0x00,0xdf,0x00,0x00,0x00,0xc3,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x19,0x01,0x00,0x00,0x18,0x01,0x00,0x00, -0x25,0x00,0x00,0x00,0xc5,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x1a,0x01,0x00,0x00,0x0d,0x01,0x00,0x00,0x19,0x01,0x00,0x00, -0x72,0x00,0x04,0x00,0x9a,0x00,0x00,0x00,0x1b,0x01,0x00,0x00, -0x1a,0x01,0x00,0x00,0x7c,0x00,0x04,0x00,0x72,0x00,0x00,0x00, -0x1c,0x01,0x00,0x00,0x1b,0x01,0x00,0x00,0x3d,0x00,0x04,0x00, -0x72,0x00,0x00,0x00,0x25,0x01,0x00,0x00,0xed,0x00,0x00,0x00, -0xc2,0x00,0x05,0x00,0x72,0x00,0x00,0x00,0x26,0x01,0x00,0x00, -0x25,0x01,0x00,0x00,0x2e,0x00,0x00,0x00,0x71,0x00,0x04,0x00, -0x09,0x00,0x00,0x00,0x27,0x01,0x00,0x00,0x26,0x01,0x00,0x00, -0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x28,0x01,0x00,0x00, -0x27,0x01,0x00,0x00,0xc7,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x29,0x01,0x00,0x00,0x28,0x01,0x00,0x00,0xd4,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x72,0x00,0x00,0x00,0x31,0x01,0x00,0x00, -0xc1,0x00,0x00,0x00,0x71,0x00,0x04,0x00,0x09,0x00,0x00,0x00, -0x32,0x01,0x00,0x00,0x31,0x01,0x00,0x00,0x7c,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x33,0x01,0x00,0x00,0x32,0x01,0x00,0x00, -0xc7,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x34,0x01,0x00,0x00, -0x33,0x01,0x00,0x00,0xdf,0x00,0x00,0x00,0xc3,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x35,0x01,0x00,0x00,0x34,0x01,0x00,0x00, -0x25,0x00,0x00,0x00,0xc5,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x36,0x01,0x00,0x00,0x29,0x01,0x00,0x00,0x35,0x01,0x00,0x00, -0x72,0x00,0x04,0x00,0x9a,0x00,0x00,0x00,0x37,0x01,0x00,0x00, -0x36,0x01,0x00,0x00,0x7c,0x00,0x04,0x00,0x72,0x00,0x00,0x00, -0x38,0x01,0x00,0x00,0x37,0x01,0x00,0x00,0x41,0x00,0x08,0x00, -0x93,0x00,0x00,0x00,0x3e,0x01,0x00,0x00,0x7b,0x00,0x00,0x00, -0x16,0x00,0x00,0x00,0x7e,0x00,0x00,0x00,0x25,0x00,0x00,0x00, -0x46,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x72,0x00,0x00,0x00, -0x3f,0x01,0x00,0x00,0x3e,0x01,0x00,0x00,0x71,0x00,0x04,0x00, -0x09,0x00,0x00,0x00,0x40,0x01,0x00,0x00,0x3f,0x01,0x00,0x00, -0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x41,0x01,0x00,0x00, -0x40,0x01,0x00,0x00,0xc7,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x42,0x01,0x00,0x00,0x41,0x01,0x00,0x00,0xd4,0x00,0x00,0x00, -0x72,0x00,0x04,0x00,0x9a,0x00,0x00,0x00,0x43,0x01,0x00,0x00, -0x42,0x01,0x00,0x00,0x7c,0x00,0x04,0x00,0x72,0x00,0x00,0x00, -0x44,0x01,0x00,0x00,0x43,0x01,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x4a,0x01,0x00,0x00,0x46,0x00,0x00,0x00, -0x90,0x00,0x00,0x00,0x41,0x00,0x08,0x00,0x93,0x00,0x00,0x00, -0x4b,0x01,0x00,0x00,0x7b,0x00,0x00,0x00,0x16,0x00,0x00,0x00, -0x7e,0x00,0x00,0x00,0x25,0x00,0x00,0x00,0x4a,0x01,0x00,0x00, -0x3d,0x00,0x04,0x00,0x72,0x00,0x00,0x00,0x4c,0x01,0x00,0x00, -0x4b,0x01,0x00,0x00,0x71,0x00,0x04,0x00,0x09,0x00,0x00,0x00, -0x4d,0x01,0x00,0x00,0x4c,0x01,0x00,0x00,0x7c,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x4e,0x01,0x00,0x00,0x4d,0x01,0x00,0x00, -0xc7,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x4f,0x01,0x00,0x00, -0x4e,0x01,0x00,0x00,0xd4,0x00,0x00,0x00,0x72,0x00,0x04,0x00, -0x9a,0x00,0x00,0x00,0x50,0x01,0x00,0x00,0x4f,0x01,0x00,0x00, -0x7c,0x00,0x04,0x00,0x72,0x00,0x00,0x00,0x51,0x01,0x00,0x00, -0x50,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x57,0x01,0x00,0x00,0x46,0x00,0x00,0x00,0x25,0x00,0x00,0x00, -0x41,0x00,0x08,0x00,0x93,0x00,0x00,0x00,0x58,0x01,0x00,0x00, -0x7b,0x00,0x00,0x00,0x16,0x00,0x00,0x00,0x7e,0x00,0x00,0x00, -0x25,0x00,0x00,0x00,0x57,0x01,0x00,0x00,0x3d,0x00,0x04,0x00, -0x72,0x00,0x00,0x00,0x59,0x01,0x00,0x00,0x58,0x01,0x00,0x00, -0x71,0x00,0x04,0x00,0x09,0x00,0x00,0x00,0x5a,0x01,0x00,0x00, -0x59,0x01,0x00,0x00,0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x5b,0x01,0x00,0x00,0x5a,0x01,0x00,0x00,0xc7,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x5c,0x01,0x00,0x00,0x5b,0x01,0x00,0x00, -0xd4,0x00,0x00,0x00,0x72,0x00,0x04,0x00,0x9a,0x00,0x00,0x00, -0x5d,0x01,0x00,0x00,0x5c,0x01,0x00,0x00,0x7c,0x00,0x04,0x00, -0x72,0x00,0x00,0x00,0x5e,0x01,0x00,0x00,0x5d,0x01,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x65,0x01,0x00,0x00, -0x46,0x00,0x00,0x00,0x64,0x01,0x00,0x00,0x41,0x00,0x08,0x00, -0x93,0x00,0x00,0x00,0x66,0x01,0x00,0x00,0x7b,0x00,0x00,0x00, -0x16,0x00,0x00,0x00,0x7e,0x00,0x00,0x00,0x25,0x00,0x00,0x00, -0x65,0x01,0x00,0x00,0x3d,0x00,0x04,0x00,0x72,0x00,0x00,0x00, -0x67,0x01,0x00,0x00,0x66,0x01,0x00,0x00,0x71,0x00,0x04,0x00, -0x09,0x00,0x00,0x00,0x68,0x01,0x00,0x00,0x67,0x01,0x00,0x00, -0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x69,0x01,0x00,0x00, -0x68,0x01,0x00,0x00,0xc7,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x6a,0x01,0x00,0x00,0x69,0x01,0x00,0x00,0xd4,0x00,0x00,0x00, -0x72,0x00,0x04,0x00,0x9a,0x00,0x00,0x00,0x6b,0x01,0x00,0x00, -0x6a,0x01,0x00,0x00,0x7c,0x00,0x04,0x00,0x72,0x00,0x00,0x00, -0x6c,0x01,0x00,0x00,0x6b,0x01,0x00,0x00,0x3d,0x00,0x04,0x00, -0x72,0x00,0x00,0x00,0x73,0x01,0x00,0x00,0x3e,0x01,0x00,0x00, -0xc2,0x00,0x05,0x00,0x72,0x00,0x00,0x00,0x74,0x01,0x00,0x00, -0x73,0x01,0x00,0x00,0x2e,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x72,0x00,0x00,0x00,0x7c,0x01,0x00,0x00,0x4b,0x01,0x00,0x00, -0xc2,0x00,0x05,0x00,0x72,0x00,0x00,0x00,0x7d,0x01,0x00,0x00, -0x7c,0x01,0x00,0x00,0x2e,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x72,0x00,0x00,0x00,0x85,0x01,0x00,0x00,0x58,0x01,0x00,0x00, -0xc2,0x00,0x05,0x00,0x72,0x00,0x00,0x00,0x86,0x01,0x00,0x00, -0x85,0x01,0x00,0x00,0x2e,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x72,0x00,0x00,0x00,0x8e,0x01,0x00,0x00,0x66,0x01,0x00,0x00, -0xc2,0x00,0x05,0x00,0x72,0x00,0x00,0x00,0x8f,0x01,0x00,0x00, -0x8e,0x01,0x00,0x00,0x2e,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x95,0x01,0x00,0x00,0x46,0x00,0x00,0x00, -0x48,0x00,0x00,0x00,0x41,0x00,0x08,0x00,0x93,0x00,0x00,0x00, -0x96,0x01,0x00,0x00,0x7b,0x00,0x00,0x00,0x16,0x00,0x00,0x00, -0x7e,0x00,0x00,0x00,0x25,0x00,0x00,0x00,0x95,0x01,0x00,0x00, -0x3d,0x00,0x04,0x00,0x72,0x00,0x00,0x00,0x97,0x01,0x00,0x00, -0x96,0x01,0x00,0x00,0x71,0x00,0x04,0x00,0x09,0x00,0x00,0x00, -0x98,0x01,0x00,0x00,0x97,0x01,0x00,0x00,0x7c,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x99,0x01,0x00,0x00,0x98,0x01,0x00,0x00, -0xc7,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x9a,0x01,0x00,0x00, -0x99,0x01,0x00,0x00,0xd4,0x00,0x00,0x00,0x72,0x00,0x04,0x00, -0x9a,0x00,0x00,0x00,0x9b,0x01,0x00,0x00,0x9a,0x01,0x00,0x00, -0x7c,0x00,0x04,0x00,0x72,0x00,0x00,0x00,0x9c,0x01,0x00,0x00, -0x9b,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xa3,0x01,0x00,0x00,0x46,0x00,0x00,0x00,0xa2,0x01,0x00,0x00, -0x41,0x00,0x08,0x00,0x93,0x00,0x00,0x00,0xa4,0x01,0x00,0x00, -0x7b,0x00,0x00,0x00,0x16,0x00,0x00,0x00,0x7e,0x00,0x00,0x00, -0x25,0x00,0x00,0x00,0xa3,0x01,0x00,0x00,0x3d,0x00,0x04,0x00, -0x72,0x00,0x00,0x00,0xa5,0x01,0x00,0x00,0xa4,0x01,0x00,0x00, -0x71,0x00,0x04,0x00,0x09,0x00,0x00,0x00,0xa6,0x01,0x00,0x00, -0xa5,0x01,0x00,0x00,0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0xa7,0x01,0x00,0x00,0xa6,0x01,0x00,0x00,0xc7,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xa8,0x01,0x00,0x00,0xa7,0x01,0x00,0x00, -0xd4,0x00,0x00,0x00,0x72,0x00,0x04,0x00,0x9a,0x00,0x00,0x00, -0xa9,0x01,0x00,0x00,0xa8,0x01,0x00,0x00,0x7c,0x00,0x04,0x00, -0x72,0x00,0x00,0x00,0xaa,0x01,0x00,0x00,0xa9,0x01,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xb1,0x01,0x00,0x00, -0x46,0x00,0x00,0x00,0xb0,0x01,0x00,0x00,0x41,0x00,0x08,0x00, -0x93,0x00,0x00,0x00,0xb2,0x01,0x00,0x00,0x7b,0x00,0x00,0x00, -0x16,0x00,0x00,0x00,0x7e,0x00,0x00,0x00,0x25,0x00,0x00,0x00, -0xb1,0x01,0x00,0x00,0x3d,0x00,0x04,0x00,0x72,0x00,0x00,0x00, -0xb3,0x01,0x00,0x00,0xb2,0x01,0x00,0x00,0x71,0x00,0x04,0x00, -0x09,0x00,0x00,0x00,0xb4,0x01,0x00,0x00,0xb3,0x01,0x00,0x00, -0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0xb5,0x01,0x00,0x00, -0xb4,0x01,0x00,0x00,0xc7,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xb6,0x01,0x00,0x00,0xb5,0x01,0x00,0x00,0xd4,0x00,0x00,0x00, -0x72,0x00,0x04,0x00,0x9a,0x00,0x00,0x00,0xb7,0x01,0x00,0x00, -0xb6,0x01,0x00,0x00,0x7c,0x00,0x04,0x00,0x72,0x00,0x00,0x00, -0xb8,0x01,0x00,0x00,0xb7,0x01,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xbf,0x01,0x00,0x00,0x46,0x00,0x00,0x00, -0xbe,0x01,0x00,0x00,0x41,0x00,0x08,0x00,0x93,0x00,0x00,0x00, -0xc0,0x01,0x00,0x00,0x7b,0x00,0x00,0x00,0x16,0x00,0x00,0x00, -0x7e,0x00,0x00,0x00,0x25,0x00,0x00,0x00,0xbf,0x01,0x00,0x00, -0x3d,0x00,0x04,0x00,0x72,0x00,0x00,0x00,0xc1,0x01,0x00,0x00, -0xc0,0x01,0x00,0x00,0x71,0x00,0x04,0x00,0x09,0x00,0x00,0x00, -0xc2,0x01,0x00,0x00,0xc1,0x01,0x00,0x00,0x7c,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0xc3,0x01,0x00,0x00,0xc2,0x01,0x00,0x00, -0xc7,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xc4,0x01,0x00,0x00, -0xc3,0x01,0x00,0x00,0xd4,0x00,0x00,0x00,0x72,0x00,0x04,0x00, -0x9a,0x00,0x00,0x00,0xc5,0x01,0x00,0x00,0xc4,0x01,0x00,0x00, -0x7c,0x00,0x04,0x00,0x72,0x00,0x00,0x00,0xc6,0x01,0x00,0x00, -0xc5,0x01,0x00,0x00,0x3d,0x00,0x04,0x00,0x72,0x00,0x00,0x00, -0xce,0x01,0x00,0x00,0x96,0x01,0x00,0x00,0xc2,0x00,0x05,0x00, -0x72,0x00,0x00,0x00,0xcf,0x01,0x00,0x00,0xce,0x01,0x00,0x00, -0x2e,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x72,0x00,0x00,0x00, -0xd7,0x01,0x00,0x00,0xa4,0x01,0x00,0x00,0xc2,0x00,0x05,0x00, -0x72,0x00,0x00,0x00,0xd8,0x01,0x00,0x00,0xd7,0x01,0x00,0x00, -0x2e,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x72,0x00,0x00,0x00, -0xe0,0x01,0x00,0x00,0xb2,0x01,0x00,0x00,0xc2,0x00,0x05,0x00, -0x72,0x00,0x00,0x00,0xe1,0x01,0x00,0x00,0xe0,0x01,0x00,0x00, -0x2e,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x72,0x00,0x00,0x00, -0xe9,0x01,0x00,0x00,0xc0,0x01,0x00,0x00,0xc2,0x00,0x05,0x00, -0x72,0x00,0x00,0x00,0xea,0x01,0x00,0x00,0xe9,0x01,0x00,0x00, -0x2e,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x17,0x00,0x00,0x00, -0xf0,0x01,0x00,0x00,0x15,0x00,0x00,0x00,0x90,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0xf1,0x01,0x00,0x00, -0xf0,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xf3,0x01,0x00,0x00,0xf1,0x01,0x00,0x00,0x69,0x00,0x00,0x00, -0x41,0x00,0x06,0x00,0xf4,0x01,0x00,0x00,0xf5,0x01,0x00,0x00, -0xef,0x01,0x00,0x00,0x16,0x00,0x00,0x00,0xf3,0x01,0x00,0x00, -0x3d,0x00,0x04,0x00,0x4d,0x00,0x00,0x00,0xf6,0x01,0x00,0x00, -0xf5,0x01,0x00,0x00,0x70,0x00,0x04,0x00,0x4d,0x00,0x00,0x00, -0xf8,0x01,0x00,0x00,0x44,0x01,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xfe,0x01,0x00,0x00,0xf3,0x01,0x00,0x00, -0x90,0x00,0x00,0x00,0x41,0x00,0x06,0x00,0xf4,0x01,0x00,0x00, -0xff,0x01,0x00,0x00,0xef,0x01,0x00,0x00,0x16,0x00,0x00,0x00, -0xfe,0x01,0x00,0x00,0x3d,0x00,0x04,0x00,0x4d,0x00,0x00,0x00, -0x00,0x02,0x00,0x00,0xff,0x01,0x00,0x00,0x70,0x00,0x04,0x00, -0x4d,0x00,0x00,0x00,0x02,0x02,0x00,0x00,0x51,0x01,0x00,0x00, -0x85,0x00,0x05,0x00,0x4d,0x00,0x00,0x00,0x03,0x02,0x00,0x00, -0x00,0x02,0x00,0x00,0x02,0x02,0x00,0x00,0x0c,0x00,0x08,0x00, -0x4d,0x00,0x00,0x00,0x04,0x02,0x00,0x00,0x01,0x00,0x00,0x00, -0x32,0x00,0x00,0x00,0xf6,0x01,0x00,0x00,0xf8,0x01,0x00,0x00, -0x03,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x09,0x02,0x00,0x00,0xf3,0x01,0x00,0x00,0x25,0x00,0x00,0x00, -0x41,0x00,0x06,0x00,0xf4,0x01,0x00,0x00,0x0a,0x02,0x00,0x00, -0xef,0x01,0x00,0x00,0x16,0x00,0x00,0x00,0x09,0x02,0x00,0x00, -0x3d,0x00,0x04,0x00,0x4d,0x00,0x00,0x00,0x0b,0x02,0x00,0x00, -0x0a,0x02,0x00,0x00,0x70,0x00,0x04,0x00,0x4d,0x00,0x00,0x00, -0x0d,0x02,0x00,0x00,0x5e,0x01,0x00,0x00,0x0c,0x00,0x08,0x00, -0x4d,0x00,0x00,0x00,0x0f,0x02,0x00,0x00,0x01,0x00,0x00,0x00, -0x32,0x00,0x00,0x00,0x0b,0x02,0x00,0x00,0x0d,0x02,0x00,0x00, -0x04,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x14,0x02,0x00,0x00,0xf3,0x01,0x00,0x00,0x64,0x01,0x00,0x00, -0x41,0x00,0x06,0x00,0xf4,0x01,0x00,0x00,0x15,0x02,0x00,0x00, -0xef,0x01,0x00,0x00,0x16,0x00,0x00,0x00,0x14,0x02,0x00,0x00, -0x3d,0x00,0x04,0x00,0x4d,0x00,0x00,0x00,0x16,0x02,0x00,0x00, -0x15,0x02,0x00,0x00,0x70,0x00,0x04,0x00,0x4d,0x00,0x00,0x00, -0x18,0x02,0x00,0x00,0x6c,0x01,0x00,0x00,0x0c,0x00,0x08,0x00, -0x4d,0x00,0x00,0x00,0x1a,0x02,0x00,0x00,0x01,0x00,0x00,0x00, -0x32,0x00,0x00,0x00,0x16,0x02,0x00,0x00,0x18,0x02,0x00,0x00, -0x0f,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x20,0x02,0x00,0x00,0xf3,0x01,0x00,0x00,0x42,0x00,0x00,0x00, -0x41,0x00,0x06,0x00,0xf4,0x01,0x00,0x00,0x21,0x02,0x00,0x00, -0xef,0x01,0x00,0x00,0x16,0x00,0x00,0x00,0x20,0x02,0x00,0x00, -0x3d,0x00,0x04,0x00,0x4d,0x00,0x00,0x00,0x22,0x02,0x00,0x00, -0x21,0x02,0x00,0x00,0x70,0x00,0x04,0x00,0x4d,0x00,0x00,0x00, -0x24,0x02,0x00,0x00,0x74,0x01,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x2b,0x02,0x00,0x00,0xf3,0x01,0x00,0x00, -0x2a,0x02,0x00,0x00,0x41,0x00,0x06,0x00,0xf4,0x01,0x00,0x00, -0x2c,0x02,0x00,0x00,0xef,0x01,0x00,0x00,0x16,0x00,0x00,0x00, -0x2b,0x02,0x00,0x00,0x3d,0x00,0x04,0x00,0x4d,0x00,0x00,0x00, -0x2d,0x02,0x00,0x00,0x2c,0x02,0x00,0x00,0x70,0x00,0x04,0x00, -0x4d,0x00,0x00,0x00,0x2f,0x02,0x00,0x00,0x7d,0x01,0x00,0x00, -0x85,0x00,0x05,0x00,0x4d,0x00,0x00,0x00,0x30,0x02,0x00,0x00, -0x2d,0x02,0x00,0x00,0x2f,0x02,0x00,0x00,0x0c,0x00,0x08,0x00, -0x4d,0x00,0x00,0x00,0x31,0x02,0x00,0x00,0x01,0x00,0x00,0x00, -0x32,0x00,0x00,0x00,0x22,0x02,0x00,0x00,0x24,0x02,0x00,0x00, -0x30,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x37,0x02,0x00,0x00,0xf3,0x01,0x00,0x00,0x36,0x02,0x00,0x00, -0x41,0x00,0x06,0x00,0xf4,0x01,0x00,0x00,0x38,0x02,0x00,0x00, -0xef,0x01,0x00,0x00,0x16,0x00,0x00,0x00,0x37,0x02,0x00,0x00, -0x3d,0x00,0x04,0x00,0x4d,0x00,0x00,0x00,0x39,0x02,0x00,0x00, -0x38,0x02,0x00,0x00,0x70,0x00,0x04,0x00,0x4d,0x00,0x00,0x00, -0x3b,0x02,0x00,0x00,0x86,0x01,0x00,0x00,0x0c,0x00,0x08,0x00, -0x4d,0x00,0x00,0x00,0x3d,0x02,0x00,0x00,0x01,0x00,0x00,0x00, -0x32,0x00,0x00,0x00,0x39,0x02,0x00,0x00,0x3b,0x02,0x00,0x00, -0x31,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x43,0x02,0x00,0x00,0xf3,0x01,0x00,0x00,0x42,0x02,0x00,0x00, -0x41,0x00,0x06,0x00,0xf4,0x01,0x00,0x00,0x44,0x02,0x00,0x00, -0xef,0x01,0x00,0x00,0x16,0x00,0x00,0x00,0x43,0x02,0x00,0x00, -0x3d,0x00,0x04,0x00,0x4d,0x00,0x00,0x00,0x45,0x02,0x00,0x00, -0x44,0x02,0x00,0x00,0x70,0x00,0x04,0x00,0x4d,0x00,0x00,0x00, -0x47,0x02,0x00,0x00,0x8f,0x01,0x00,0x00,0x0c,0x00,0x08,0x00, -0x4d,0x00,0x00,0x00,0x49,0x02,0x00,0x00,0x01,0x00,0x00,0x00, -0x32,0x00,0x00,0x00,0x45,0x02,0x00,0x00,0x47,0x02,0x00,0x00, -0x3d,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x4e,0x02,0x00,0x00,0xf1,0x01,0x00,0x00,0x6d,0x00,0x00,0x00, -0x41,0x00,0x06,0x00,0xf4,0x01,0x00,0x00,0x4f,0x02,0x00,0x00, -0xef,0x01,0x00,0x00,0x16,0x00,0x00,0x00,0x4e,0x02,0x00,0x00, -0x3d,0x00,0x04,0x00,0x4d,0x00,0x00,0x00,0x50,0x02,0x00,0x00, -0x4f,0x02,0x00,0x00,0x70,0x00,0x04,0x00,0x4d,0x00,0x00,0x00, -0x52,0x02,0x00,0x00,0x9c,0x01,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x58,0x02,0x00,0x00,0x4e,0x02,0x00,0x00, -0x90,0x00,0x00,0x00,0x41,0x00,0x06,0x00,0xf4,0x01,0x00,0x00, -0x59,0x02,0x00,0x00,0xef,0x01,0x00,0x00,0x16,0x00,0x00,0x00, -0x58,0x02,0x00,0x00,0x3d,0x00,0x04,0x00,0x4d,0x00,0x00,0x00, -0x5a,0x02,0x00,0x00,0x59,0x02,0x00,0x00,0x70,0x00,0x04,0x00, -0x4d,0x00,0x00,0x00,0x5c,0x02,0x00,0x00,0xaa,0x01,0x00,0x00, -0x85,0x00,0x05,0x00,0x4d,0x00,0x00,0x00,0x5d,0x02,0x00,0x00, -0x5a,0x02,0x00,0x00,0x5c,0x02,0x00,0x00,0x0c,0x00,0x08,0x00, -0x4d,0x00,0x00,0x00,0x5e,0x02,0x00,0x00,0x01,0x00,0x00,0x00, -0x32,0x00,0x00,0x00,0x50,0x02,0x00,0x00,0x52,0x02,0x00,0x00, -0x5d,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x63,0x02,0x00,0x00,0x4e,0x02,0x00,0x00,0x25,0x00,0x00,0x00, -0x41,0x00,0x06,0x00,0xf4,0x01,0x00,0x00,0x64,0x02,0x00,0x00, -0xef,0x01,0x00,0x00,0x16,0x00,0x00,0x00,0x63,0x02,0x00,0x00, -0x3d,0x00,0x04,0x00,0x4d,0x00,0x00,0x00,0x65,0x02,0x00,0x00, -0x64,0x02,0x00,0x00,0x70,0x00,0x04,0x00,0x4d,0x00,0x00,0x00, -0x67,0x02,0x00,0x00,0xb8,0x01,0x00,0x00,0x0c,0x00,0x08,0x00, -0x4d,0x00,0x00,0x00,0x69,0x02,0x00,0x00,0x01,0x00,0x00,0x00, -0x32,0x00,0x00,0x00,0x65,0x02,0x00,0x00,0x67,0x02,0x00,0x00, -0x5e,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x6e,0x02,0x00,0x00,0x4e,0x02,0x00,0x00,0x64,0x01,0x00,0x00, -0x41,0x00,0x06,0x00,0xf4,0x01,0x00,0x00,0x6f,0x02,0x00,0x00, -0xef,0x01,0x00,0x00,0x16,0x00,0x00,0x00,0x6e,0x02,0x00,0x00, -0x3d,0x00,0x04,0x00,0x4d,0x00,0x00,0x00,0x70,0x02,0x00,0x00, -0x6f,0x02,0x00,0x00,0x70,0x00,0x04,0x00,0x4d,0x00,0x00,0x00, -0x72,0x02,0x00,0x00,0xc6,0x01,0x00,0x00,0x0c,0x00,0x08,0x00, -0x4d,0x00,0x00,0x00,0x74,0x02,0x00,0x00,0x01,0x00,0x00,0x00, -0x32,0x00,0x00,0x00,0x70,0x02,0x00,0x00,0x72,0x02,0x00,0x00, -0x69,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x7a,0x02,0x00,0x00,0x4e,0x02,0x00,0x00,0x42,0x00,0x00,0x00, -0x41,0x00,0x06,0x00,0xf4,0x01,0x00,0x00,0x7b,0x02,0x00,0x00, -0xef,0x01,0x00,0x00,0x16,0x00,0x00,0x00,0x7a,0x02,0x00,0x00, -0x3d,0x00,0x04,0x00,0x4d,0x00,0x00,0x00,0x7c,0x02,0x00,0x00, -0x7b,0x02,0x00,0x00,0x70,0x00,0x04,0x00,0x4d,0x00,0x00,0x00, -0x7e,0x02,0x00,0x00,0xcf,0x01,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x84,0x02,0x00,0x00,0x4e,0x02,0x00,0x00, -0x2a,0x02,0x00,0x00,0x41,0x00,0x06,0x00,0xf4,0x01,0x00,0x00, -0x85,0x02,0x00,0x00,0xef,0x01,0x00,0x00,0x16,0x00,0x00,0x00, -0x84,0x02,0x00,0x00,0x3d,0x00,0x04,0x00,0x4d,0x00,0x00,0x00, -0x86,0x02,0x00,0x00,0x85,0x02,0x00,0x00,0x70,0x00,0x04,0x00, -0x4d,0x00,0x00,0x00,0x88,0x02,0x00,0x00,0xd8,0x01,0x00,0x00, -0x85,0x00,0x05,0x00,0x4d,0x00,0x00,0x00,0x89,0x02,0x00,0x00, -0x86,0x02,0x00,0x00,0x88,0x02,0x00,0x00,0x0c,0x00,0x08,0x00, -0x4d,0x00,0x00,0x00,0x8a,0x02,0x00,0x00,0x01,0x00,0x00,0x00, -0x32,0x00,0x00,0x00,0x7c,0x02,0x00,0x00,0x7e,0x02,0x00,0x00, -0x89,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x8f,0x02,0x00,0x00,0x4e,0x02,0x00,0x00,0x36,0x02,0x00,0x00, -0x41,0x00,0x06,0x00,0xf4,0x01,0x00,0x00,0x90,0x02,0x00,0x00, -0xef,0x01,0x00,0x00,0x16,0x00,0x00,0x00,0x8f,0x02,0x00,0x00, -0x3d,0x00,0x04,0x00,0x4d,0x00,0x00,0x00,0x91,0x02,0x00,0x00, -0x90,0x02,0x00,0x00,0x70,0x00,0x04,0x00,0x4d,0x00,0x00,0x00, -0x93,0x02,0x00,0x00,0xe1,0x01,0x00,0x00,0x0c,0x00,0x08,0x00, -0x4d,0x00,0x00,0x00,0x95,0x02,0x00,0x00,0x01,0x00,0x00,0x00, -0x32,0x00,0x00,0x00,0x91,0x02,0x00,0x00,0x93,0x02,0x00,0x00, -0x8a,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x9a,0x02,0x00,0x00,0x4e,0x02,0x00,0x00,0x42,0x02,0x00,0x00, -0x41,0x00,0x06,0x00,0xf4,0x01,0x00,0x00,0x9b,0x02,0x00,0x00, -0xef,0x01,0x00,0x00,0x16,0x00,0x00,0x00,0x9a,0x02,0x00,0x00, -0x3d,0x00,0x04,0x00,0x4d,0x00,0x00,0x00,0x9c,0x02,0x00,0x00, -0x9b,0x02,0x00,0x00,0x70,0x00,0x04,0x00,0x4d,0x00,0x00,0x00, -0x9e,0x02,0x00,0x00,0xea,0x01,0x00,0x00,0x0c,0x00,0x08,0x00, -0x4d,0x00,0x00,0x00,0xa0,0x02,0x00,0x00,0x01,0x00,0x00,0x00, -0x32,0x00,0x00,0x00,0x9c,0x02,0x00,0x00,0x9e,0x02,0x00,0x00, -0x95,0x02,0x00,0x00,0x3d,0x00,0x04,0x00,0x4d,0x00,0x00,0x00, -0xa7,0x02,0x00,0x00,0xf5,0x01,0x00,0x00,0x70,0x00,0x04,0x00, -0x4d,0x00,0x00,0x00,0xa9,0x02,0x00,0x00,0xb8,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x4d,0x00,0x00,0x00,0xb1,0x02,0x00,0x00, -0x21,0x02,0x00,0x00,0x70,0x00,0x04,0x00,0x4d,0x00,0x00,0x00, -0xb3,0x02,0x00,0x00,0xc7,0x00,0x00,0x00,0x85,0x00,0x05,0x00, -0x4d,0x00,0x00,0x00,0xb4,0x02,0x00,0x00,0xb1,0x02,0x00,0x00, -0xb3,0x02,0x00,0x00,0x0c,0x00,0x08,0x00,0x4d,0x00,0x00,0x00, -0xb5,0x02,0x00,0x00,0x01,0x00,0x00,0x00,0x32,0x00,0x00,0x00, -0xa7,0x02,0x00,0x00,0xa9,0x02,0x00,0x00,0xb4,0x02,0x00,0x00, -0x3d,0x00,0x04,0x00,0x4d,0x00,0x00,0x00,0xbb,0x02,0x00,0x00, -0x4f,0x02,0x00,0x00,0x70,0x00,0x04,0x00,0x4d,0x00,0x00,0x00, -0xbd,0x02,0x00,0x00,0x1c,0x01,0x00,0x00,0x0c,0x00,0x08,0x00, -0x4d,0x00,0x00,0x00,0xbf,0x02,0x00,0x00,0x01,0x00,0x00,0x00, -0x32,0x00,0x00,0x00,0xbb,0x02,0x00,0x00,0xbd,0x02,0x00,0x00, -0xb5,0x02,0x00,0x00,0x3d,0x00,0x04,0x00,0x4d,0x00,0x00,0x00, -0xc6,0x02,0x00,0x00,0x7b,0x02,0x00,0x00,0x70,0x00,0x04,0x00, -0x4d,0x00,0x00,0x00,0xc8,0x02,0x00,0x00,0x38,0x01,0x00,0x00, -0x0c,0x00,0x08,0x00,0x4d,0x00,0x00,0x00,0xca,0x02,0x00,0x00, -0x01,0x00,0x00,0x00,0x32,0x00,0x00,0x00,0xc6,0x02,0x00,0x00, -0xc8,0x02,0x00,0x00,0xbf,0x02,0x00,0x00,0x3d,0x00,0x04,0x00, -0x4d,0x00,0x00,0x00,0xd1,0x02,0x00,0x00,0xff,0x01,0x00,0x00, -0x0c,0x00,0x08,0x00,0x4d,0x00,0x00,0x00,0xd5,0x02,0x00,0x00, -0x01,0x00,0x00,0x00,0x32,0x00,0x00,0x00,0xd1,0x02,0x00,0x00, -0xa9,0x02,0x00,0x00,0xca,0x02,0x00,0x00,0x3d,0x00,0x04,0x00, -0x4d,0x00,0x00,0x00,0xdc,0x02,0x00,0x00,0x2c,0x02,0x00,0x00, -0x0c,0x00,0x08,0x00,0x4d,0x00,0x00,0x00,0xe0,0x02,0x00,0x00, -0x01,0x00,0x00,0x00,0x32,0x00,0x00,0x00,0xdc,0x02,0x00,0x00, -0xb3,0x02,0x00,0x00,0xd5,0x02,0x00,0x00,0x3d,0x00,0x04,0x00, -0x4d,0x00,0x00,0x00,0xe7,0x02,0x00,0x00,0x59,0x02,0x00,0x00, -0x0c,0x00,0x08,0x00,0x4d,0x00,0x00,0x00,0xeb,0x02,0x00,0x00, -0x01,0x00,0x00,0x00,0x32,0x00,0x00,0x00,0xe7,0x02,0x00,0x00, -0xbd,0x02,0x00,0x00,0xe0,0x02,0x00,0x00,0x3d,0x00,0x04,0x00, -0x4d,0x00,0x00,0x00,0xf2,0x02,0x00,0x00,0x85,0x02,0x00,0x00, -0x0c,0x00,0x08,0x00,0x4d,0x00,0x00,0x00,0xf6,0x02,0x00,0x00, -0x01,0x00,0x00,0x00,0x32,0x00,0x00,0x00,0xf2,0x02,0x00,0x00, -0xc8,0x02,0x00,0x00,0xeb,0x02,0x00,0x00,0x3d,0x00,0x04,0x00, -0x4d,0x00,0x00,0x00,0xfd,0x02,0x00,0x00,0x0a,0x02,0x00,0x00, -0x0c,0x00,0x08,0x00,0x4d,0x00,0x00,0x00,0x01,0x03,0x00,0x00, -0x01,0x00,0x00,0x00,0x32,0x00,0x00,0x00,0xfd,0x02,0x00,0x00, -0xa9,0x02,0x00,0x00,0xf6,0x02,0x00,0x00,0x3d,0x00,0x04,0x00, -0x4d,0x00,0x00,0x00,0x08,0x03,0x00,0x00,0x38,0x02,0x00,0x00, -0x0c,0x00,0x08,0x00,0x4d,0x00,0x00,0x00,0x0c,0x03,0x00,0x00, -0x01,0x00,0x00,0x00,0x32,0x00,0x00,0x00,0x08,0x03,0x00,0x00, -0xb3,0x02,0x00,0x00,0x01,0x03,0x00,0x00,0x3d,0x00,0x04,0x00, -0x4d,0x00,0x00,0x00,0x13,0x03,0x00,0x00,0x64,0x02,0x00,0x00, -0x0c,0x00,0x08,0x00,0x4d,0x00,0x00,0x00,0x17,0x03,0x00,0x00, -0x01,0x00,0x00,0x00,0x32,0x00,0x00,0x00,0x13,0x03,0x00,0x00, -0xbd,0x02,0x00,0x00,0x0c,0x03,0x00,0x00,0x3d,0x00,0x04,0x00, -0x4d,0x00,0x00,0x00,0x1e,0x03,0x00,0x00,0x90,0x02,0x00,0x00, -0x0c,0x00,0x08,0x00,0x4d,0x00,0x00,0x00,0x22,0x03,0x00,0x00, -0x01,0x00,0x00,0x00,0x32,0x00,0x00,0x00,0x1e,0x03,0x00,0x00, -0xc8,0x02,0x00,0x00,0x17,0x03,0x00,0x00,0x3d,0x00,0x04,0x00, -0x4d,0x00,0x00,0x00,0x29,0x03,0x00,0x00,0x15,0x02,0x00,0x00, -0x0c,0x00,0x08,0x00,0x4d,0x00,0x00,0x00,0x2d,0x03,0x00,0x00, -0x01,0x00,0x00,0x00,0x32,0x00,0x00,0x00,0x29,0x03,0x00,0x00, -0xa9,0x02,0x00,0x00,0x22,0x03,0x00,0x00,0x3d,0x00,0x04,0x00, -0x4d,0x00,0x00,0x00,0x34,0x03,0x00,0x00,0x44,0x02,0x00,0x00, -0x0c,0x00,0x08,0x00,0x4d,0x00,0x00,0x00,0x38,0x03,0x00,0x00, -0x01,0x00,0x00,0x00,0x32,0x00,0x00,0x00,0x34,0x03,0x00,0x00, -0xb3,0x02,0x00,0x00,0x2d,0x03,0x00,0x00,0x3d,0x00,0x04,0x00, -0x4d,0x00,0x00,0x00,0x3f,0x03,0x00,0x00,0x6f,0x02,0x00,0x00, -0x0c,0x00,0x08,0x00,0x4d,0x00,0x00,0x00,0x43,0x03,0x00,0x00, -0x01,0x00,0x00,0x00,0x32,0x00,0x00,0x00,0x3f,0x03,0x00,0x00, -0xbd,0x02,0x00,0x00,0x38,0x03,0x00,0x00,0x3d,0x00,0x04,0x00, -0x4d,0x00,0x00,0x00,0x4a,0x03,0x00,0x00,0x9b,0x02,0x00,0x00, -0x0c,0x00,0x08,0x00,0x4d,0x00,0x00,0x00,0x4e,0x03,0x00,0x00, -0x01,0x00,0x00,0x00,0x32,0x00,0x00,0x00,0x4a,0x03,0x00,0x00, -0xc8,0x02,0x00,0x00,0x43,0x03,0x00,0x00,0x70,0x00,0x04,0x00, -0x4d,0x00,0x00,0x00,0x56,0x03,0x00,0x00,0x9c,0x00,0x00,0x00, -0x70,0x00,0x04,0x00,0x4d,0x00,0x00,0x00,0x5a,0x03,0x00,0x00, -0xaa,0x00,0x00,0x00,0x85,0x00,0x05,0x00,0x4d,0x00,0x00,0x00, -0x5b,0x03,0x00,0x00,0x49,0x02,0x00,0x00,0x5a,0x03,0x00,0x00, -0x0c,0x00,0x08,0x00,0x4d,0x00,0x00,0x00,0x5c,0x03,0x00,0x00, -0x01,0x00,0x00,0x00,0x32,0x00,0x00,0x00,0x1a,0x02,0x00,0x00, -0x56,0x03,0x00,0x00,0x5b,0x03,0x00,0x00,0x70,0x00,0x04,0x00, -0x4d,0x00,0x00,0x00,0x5f,0x03,0x00,0x00,0xe4,0x00,0x00,0x00, -0x0c,0x00,0x08,0x00,0x4d,0x00,0x00,0x00,0x61,0x03,0x00,0x00, -0x01,0x00,0x00,0x00,0x32,0x00,0x00,0x00,0x74,0x02,0x00,0x00, -0x5f,0x03,0x00,0x00,0x5c,0x03,0x00,0x00,0x70,0x00,0x04,0x00, -0x4d,0x00,0x00,0x00,0x64,0x03,0x00,0x00,0x00,0x01,0x00,0x00, -0x0c,0x00,0x08,0x00,0x4d,0x00,0x00,0x00,0x66,0x03,0x00,0x00, -0x01,0x00,0x00,0x00,0x32,0x00,0x00,0x00,0xa0,0x02,0x00,0x00, -0x64,0x03,0x00,0x00,0x61,0x03,0x00,0x00,0x85,0x00,0x05,0x00, -0x4d,0x00,0x00,0x00,0x6a,0x03,0x00,0x00,0x8a,0x00,0x00,0x00, -0x4e,0x03,0x00,0x00,0x7f,0x00,0x04,0x00,0x4d,0x00,0x00,0x00, -0xa1,0x03,0x00,0x00,0x6a,0x03,0x00,0x00,0x0c,0x00,0x08,0x00, -0x4d,0x00,0x00,0x00,0x6b,0x03,0x00,0x00,0x01,0x00,0x00,0x00, -0x32,0x00,0x00,0x00,0x82,0x00,0x00,0x00,0x66,0x03,0x00,0x00, -0xa1,0x03,0x00,0x00,0x3d,0x00,0x04,0x00,0x4d,0x00,0x00,0x00, -0x6d,0x03,0x00,0x00,0x59,0x00,0x00,0x00,0x81,0x00,0x05,0x00, -0x4d,0x00,0x00,0x00,0x6e,0x03,0x00,0x00,0x6d,0x03,0x00,0x00, -0x6b,0x03,0x00,0x00,0x3e,0x00,0x03,0x00,0x59,0x00,0x00,0x00, -0x6e,0x03,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x71,0x03,0x00,0x00,0x9d,0x03,0x00,0x00,0x25,0x00,0x00,0x00, -0xf9,0x00,0x02,0x00,0x5c,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, -0x5e,0x00,0x00,0x00,0xe0,0x00,0x04,0x00,0x72,0x03,0x00,0x00, -0x72,0x03,0x00,0x00,0x73,0x03,0x00,0x00,0xf9,0x00,0x02,0x00, -0x75,0x03,0x00,0x00,0xf8,0x00,0x02,0x00,0x75,0x03,0x00,0x00, -0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0x9e,0x03,0x00,0x00, -0x52,0x00,0x00,0x00,0x5e,0x00,0x00,0x00,0x8c,0x03,0x00,0x00, -0x78,0x03,0x00,0x00,0xad,0x00,0x05,0x00,0x63,0x00,0x00,0x00, -0x7b,0x03,0x00,0x00,0x9e,0x03,0x00,0x00,0x16,0x00,0x00,0x00, -0xf6,0x00,0x04,0x00,0x77,0x03,0x00,0x00,0x78,0x03,0x00,0x00, -0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0x7b,0x03,0x00,0x00, -0x76,0x03,0x00,0x00,0x77,0x03,0x00,0x00,0xf8,0x00,0x02,0x00, -0x76,0x03,0x00,0x00,0xb1,0x00,0x05,0x00,0x63,0x00,0x00,0x00, -0x7e,0x03,0x00,0x00,0x26,0x00,0x00,0x00,0x9e,0x03,0x00,0x00, -0xf7,0x00,0x03,0x00,0x80,0x03,0x00,0x00,0x00,0x00,0x00,0x00, -0xfa,0x00,0x04,0x00,0x7e,0x03,0x00,0x00,0x7f,0x03,0x00,0x00, -0x80,0x03,0x00,0x00,0xf8,0x00,0x02,0x00,0x7f,0x03,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x84,0x03,0x00,0x00, -0x26,0x00,0x00,0x00,0x9e,0x03,0x00,0x00,0x41,0x00,0x05,0x00, -0x58,0x00,0x00,0x00,0x85,0x03,0x00,0x00,0x51,0x00,0x00,0x00, -0x84,0x03,0x00,0x00,0x3d,0x00,0x04,0x00,0x4d,0x00,0x00,0x00, -0x86,0x03,0x00,0x00,0x85,0x03,0x00,0x00,0x41,0x00,0x05,0x00, -0x58,0x00,0x00,0x00,0x87,0x03,0x00,0x00,0x51,0x00,0x00,0x00, -0x26,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x4d,0x00,0x00,0x00, -0x88,0x03,0x00,0x00,0x87,0x03,0x00,0x00,0x81,0x00,0x05,0x00, -0x4d,0x00,0x00,0x00,0x89,0x03,0x00,0x00,0x88,0x03,0x00,0x00, -0x86,0x03,0x00,0x00,0x3e,0x00,0x03,0x00,0x87,0x03,0x00,0x00, -0x89,0x03,0x00,0x00,0xf9,0x00,0x02,0x00,0x80,0x03,0x00,0x00, -0xf8,0x00,0x02,0x00,0x80,0x03,0x00,0x00,0xe0,0x00,0x04,0x00, -0x72,0x03,0x00,0x00,0x72,0x03,0x00,0x00,0x73,0x03,0x00,0x00, -0xf9,0x00,0x02,0x00,0x78,0x03,0x00,0x00,0xf8,0x00,0x02,0x00, -0x78,0x03,0x00,0x00,0xc3,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x8c,0x03,0x00,0x00,0x9e,0x03,0x00,0x00,0x90,0x00,0x00,0x00, -0xf9,0x00,0x02,0x00,0x75,0x03,0x00,0x00,0xf8,0x00,0x02,0x00, -0x77,0x03,0x00,0x00,0xaa,0x00,0x05,0x00,0x63,0x00,0x00,0x00, -0x8e,0x03,0x00,0x00,0x26,0x00,0x00,0x00,0x16,0x00,0x00,0x00, -0xf7,0x00,0x03,0x00,0x90,0x03,0x00,0x00,0x00,0x00,0x00,0x00, -0xfa,0x00,0x04,0x00,0x8e,0x03,0x00,0x00,0x8f,0x03,0x00,0x00, -0x90,0x03,0x00,0x00,0xf8,0x00,0x02,0x00,0x8f,0x03,0x00,0x00, -0x41,0x00,0x05,0x00,0x17,0x00,0x00,0x00,0x95,0x03,0x00,0x00, -0x15,0x00,0x00,0x00,0x25,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x96,0x03,0x00,0x00,0x95,0x03,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x98,0x03,0x00,0x00, -0x96,0x03,0x00,0x00,0x11,0x00,0x00,0x00,0x41,0x00,0x05,0x00, -0x58,0x00,0x00,0x00,0x99,0x03,0x00,0x00,0x51,0x00,0x00,0x00, -0x16,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x4d,0x00,0x00,0x00, -0x9a,0x03,0x00,0x00,0x99,0x03,0x00,0x00,0x41,0x00,0x06,0x00, -0xf4,0x01,0x00,0x00,0x9b,0x03,0x00,0x00,0x94,0x03,0x00,0x00, -0x16,0x00,0x00,0x00,0x98,0x03,0x00,0x00,0x3e,0x00,0x03,0x00, -0x9b,0x03,0x00,0x00,0x9a,0x03,0x00,0x00,0xf9,0x00,0x02,0x00, -0x90,0x03,0x00,0x00,0xf8,0x00,0x02,0x00,0x90,0x03,0x00,0x00, +0x3f,0x02,0x00,0x00,0x23,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x6f,0x03,0x00,0x00,0x08,0x01,0x00,0x00, +0x1d,0x00,0x03,0x00,0x8d,0x03,0x00,0x00,0x4a,0x00,0x00,0x00, +0x1e,0x00,0x03,0x00,0x8e,0x03,0x00,0x00,0x8d,0x03,0x00,0x00, +0x20,0x00,0x04,0x00,0x8f,0x03,0x00,0x00,0x0c,0x00,0x00,0x00, +0x8e,0x03,0x00,0x00,0x3b,0x00,0x04,0x00,0x8f,0x03,0x00,0x00, +0x90,0x03,0x00,0x00,0x0c,0x00,0x00,0x00,0x2c,0x00,0x06,0x00, +0x09,0x00,0x00,0x00,0x98,0x03,0x00,0x00,0x3f,0x00,0x00,0x00, +0x82,0x00,0x00,0x00,0x82,0x00,0x00,0x00,0x36,0x00,0x05,0x00, +0x02,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0x03,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0x05,0x00,0x00,0x00, +0x41,0x00,0x05,0x00,0x0d,0x00,0x00,0x00,0x0e,0x00,0x00,0x00, +0x0b,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x0f,0x00,0x00,0x00,0x0e,0x00,0x00,0x00, +0x41,0x00,0x05,0x00,0x16,0x00,0x00,0x00,0x17,0x00,0x00,0x00, +0x13,0x00,0x00,0x00,0x15,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x18,0x00,0x00,0x00,0x17,0x00,0x00,0x00, +0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x1a,0x00,0x00,0x00, +0x18,0x00,0x00,0x00,0x19,0x00,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x1e,0x00,0x00,0x00,0x0f,0x00,0x00,0x00, +0x1a,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x0d,0x00,0x00,0x00, +0x21,0x00,0x00,0x00,0x20,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, +0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x22,0x00,0x00,0x00, +0x21,0x00,0x00,0x00,0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x24,0x00,0x00,0x00,0x22,0x00,0x00,0x00,0x23,0x00,0x00,0x00, +0x89,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x28,0x00,0x00,0x00, +0x22,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x86,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x2c,0x00,0x00,0x00,0x24,0x00,0x00,0x00, +0x2b,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x30,0x00,0x00,0x00,0x2b,0x00,0x00,0x00,0x2c,0x00,0x00,0x00, +0x82,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x31,0x00,0x00,0x00, +0x24,0x00,0x00,0x00,0x30,0x00,0x00,0x00,0x86,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x34,0x00,0x00,0x00,0x2c,0x00,0x00,0x00, +0x23,0x00,0x00,0x00,0x89,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x37,0x00,0x00,0x00,0x2c,0x00,0x00,0x00,0x23,0x00,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x3a,0x00,0x00,0x00, +0x23,0x00,0x00,0x00,0x31,0x00,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x3c,0x00,0x00,0x00,0x3a,0x00,0x00,0x00, +0x37,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x3d,0x00,0x00,0x00,0x2b,0x00,0x00,0x00,0x3c,0x00,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x41,0x00,0x00,0x00, +0x3f,0x00,0x00,0x00,0x34,0x00,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x43,0x00,0x00,0x00,0x41,0x00,0x00,0x00, +0x3d,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x47,0x00,0x00,0x00,0x45,0x00,0x00,0x00,0x34,0x00,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x49,0x00,0x00,0x00, +0x47,0x00,0x00,0x00,0x3d,0x00,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x50,0x00,0x00,0x00,0x4e,0x00,0x00,0x00, +0x28,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x52,0x00,0x00,0x00,0x50,0x00,0x00,0x00,0x24,0x00,0x00,0x00, +0x41,0x00,0x05,0x00,0x54,0x00,0x00,0x00,0x55,0x00,0x00,0x00, +0x4d,0x00,0x00,0x00,0x52,0x00,0x00,0x00,0x3e,0x00,0x03,0x00, +0x55,0x00,0x00,0x00,0x53,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, +0x58,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0x58,0x00,0x00,0x00, +0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0x99,0x03,0x00,0x00, +0x28,0x00,0x00,0x00,0x05,0x00,0x00,0x00,0x6e,0x03,0x00,0x00, +0x59,0x00,0x00,0x00,0xb0,0x00,0x05,0x00,0x5f,0x00,0x00,0x00, +0x60,0x00,0x00,0x00,0x99,0x03,0x00,0x00,0x1a,0x00,0x00,0x00, +0xf6,0x00,0x04,0x00,0x5a,0x00,0x00,0x00,0x59,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0x60,0x00,0x00,0x00, +0x59,0x00,0x00,0x00,0x5a,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, +0x59,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x63,0x00,0x00,0x00,0x99,0x03,0x00,0x00,0x19,0x00,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x65,0x00,0x00,0x00, +0x63,0x00,0x00,0x00,0x49,0x00,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x69,0x00,0x00,0x00,0x65,0x00,0x00,0x00, +0x68,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x79,0x00,0x00,0x00,0x1e,0x00,0x00,0x00,0x99,0x03,0x00,0x00, +0x41,0x00,0x08,0x00,0x7a,0x00,0x00,0x00,0x7b,0x00,0x00,0x00, +0x76,0x00,0x00,0x00,0x15,0x00,0x00,0x00,0x79,0x00,0x00,0x00, +0x15,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0x6c,0x00,0x00,0x00,0x7c,0x00,0x00,0x00,0x7b,0x00,0x00,0x00, +0x73,0x00,0x04,0x00,0x4a,0x00,0x00,0x00,0x7d,0x00,0x00,0x00, +0x7c,0x00,0x00,0x00,0x41,0x00,0x08,0x00,0x7a,0x00,0x00,0x00, +0x83,0x00,0x00,0x00,0x76,0x00,0x00,0x00,0x15,0x00,0x00,0x00, +0x79,0x00,0x00,0x00,0x15,0x00,0x00,0x00,0x82,0x00,0x00,0x00, +0x3d,0x00,0x04,0x00,0x6c,0x00,0x00,0x00,0x84,0x00,0x00,0x00, +0x83,0x00,0x00,0x00,0x73,0x00,0x04,0x00,0x4a,0x00,0x00,0x00, +0x85,0x00,0x00,0x00,0x84,0x00,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x8d,0x00,0x00,0x00,0x34,0x00,0x00,0x00, +0x23,0x00,0x00,0x00,0x41,0x00,0x08,0x00,0x8e,0x00,0x00,0x00, +0x8f,0x00,0x00,0x00,0x76,0x00,0x00,0x00,0x15,0x00,0x00,0x00, +0x79,0x00,0x00,0x00,0x8b,0x00,0x00,0x00,0x8d,0x00,0x00,0x00, +0x3d,0x00,0x04,0x00,0x6e,0x00,0x00,0x00,0x90,0x00,0x00,0x00, +0x8f,0x00,0x00,0x00,0x71,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x91,0x00,0x00,0x00,0x90,0x00,0x00,0x00,0x7c,0x00,0x04,0x00, +0x14,0x00,0x00,0x00,0x92,0x00,0x00,0x00,0x91,0x00,0x00,0x00, +0xc7,0x00,0x05,0x00,0x14,0x00,0x00,0x00,0x94,0x00,0x00,0x00, +0x92,0x00,0x00,0x00,0x93,0x00,0x00,0x00,0x72,0x00,0x04,0x00, +0x95,0x00,0x00,0x00,0x96,0x00,0x00,0x00,0x94,0x00,0x00,0x00, +0x7c,0x00,0x04,0x00,0x6e,0x00,0x00,0x00,0x97,0x00,0x00,0x00, +0x96,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x9e,0x00,0x00,0x00,0x8d,0x00,0x00,0x00,0x82,0x00,0x00,0x00, +0x41,0x00,0x08,0x00,0x8e,0x00,0x00,0x00,0x9f,0x00,0x00,0x00, +0x76,0x00,0x00,0x00,0x15,0x00,0x00,0x00,0x79,0x00,0x00,0x00, +0x8b,0x00,0x00,0x00,0x9e,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0x6e,0x00,0x00,0x00,0xa0,0x00,0x00,0x00,0x9f,0x00,0x00,0x00, +0x71,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0xa1,0x00,0x00,0x00, +0xa0,0x00,0x00,0x00,0x7c,0x00,0x04,0x00,0x14,0x00,0x00,0x00, +0xa2,0x00,0x00,0x00,0xa1,0x00,0x00,0x00,0xc7,0x00,0x05,0x00, +0x14,0x00,0x00,0x00,0xa3,0x00,0x00,0x00,0xa2,0x00,0x00,0x00, +0x93,0x00,0x00,0x00,0x72,0x00,0x04,0x00,0x95,0x00,0x00,0x00, +0xa4,0x00,0x00,0x00,0xa3,0x00,0x00,0x00,0x7c,0x00,0x04,0x00, +0x6e,0x00,0x00,0x00,0xa5,0x00,0x00,0x00,0xa4,0x00,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xac,0x00,0x00,0x00, +0x8d,0x00,0x00,0x00,0x2b,0x00,0x00,0x00,0x41,0x00,0x08,0x00, +0x8e,0x00,0x00,0x00,0xad,0x00,0x00,0x00,0x76,0x00,0x00,0x00, +0x15,0x00,0x00,0x00,0x79,0x00,0x00,0x00,0x8b,0x00,0x00,0x00, +0xac,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x6e,0x00,0x00,0x00, +0xae,0x00,0x00,0x00,0xad,0x00,0x00,0x00,0x71,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0xaf,0x00,0x00,0x00,0xae,0x00,0x00,0x00, +0x7c,0x00,0x04,0x00,0x14,0x00,0x00,0x00,0xb0,0x00,0x00,0x00, +0xaf,0x00,0x00,0x00,0xc7,0x00,0x05,0x00,0x14,0x00,0x00,0x00, +0xb1,0x00,0x00,0x00,0xb0,0x00,0x00,0x00,0x93,0x00,0x00,0x00, +0x72,0x00,0x04,0x00,0x95,0x00,0x00,0x00,0xb2,0x00,0x00,0x00, +0xb1,0x00,0x00,0x00,0x7c,0x00,0x04,0x00,0x6e,0x00,0x00,0x00, +0xb3,0x00,0x00,0x00,0xb2,0x00,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xbb,0x00,0x00,0x00,0x8d,0x00,0x00,0x00, +0xba,0x00,0x00,0x00,0x41,0x00,0x08,0x00,0x8e,0x00,0x00,0x00, +0xbc,0x00,0x00,0x00,0x76,0x00,0x00,0x00,0x15,0x00,0x00,0x00, +0x79,0x00,0x00,0x00,0x8b,0x00,0x00,0x00,0xbb,0x00,0x00,0x00, +0x3d,0x00,0x04,0x00,0x6e,0x00,0x00,0x00,0xbd,0x00,0x00,0x00, +0xbc,0x00,0x00,0x00,0x71,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0xbe,0x00,0x00,0x00,0xbd,0x00,0x00,0x00,0x7c,0x00,0x04,0x00, +0x14,0x00,0x00,0x00,0xbf,0x00,0x00,0x00,0xbe,0x00,0x00,0x00, +0xc7,0x00,0x05,0x00,0x14,0x00,0x00,0x00,0xc0,0x00,0x00,0x00, +0xbf,0x00,0x00,0x00,0x93,0x00,0x00,0x00,0x72,0x00,0x04,0x00, +0x95,0x00,0x00,0x00,0xc1,0x00,0x00,0x00,0xc0,0x00,0x00,0x00, +0x7c,0x00,0x04,0x00,0x6e,0x00,0x00,0x00,0xc2,0x00,0x00,0x00, +0xc1,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xca,0x00,0x00,0x00,0x8d,0x00,0x00,0x00,0xc9,0x00,0x00,0x00, +0x41,0x00,0x08,0x00,0x8e,0x00,0x00,0x00,0xcb,0x00,0x00,0x00, +0x76,0x00,0x00,0x00,0x15,0x00,0x00,0x00,0x79,0x00,0x00,0x00, +0x8b,0x00,0x00,0x00,0xca,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0x6e,0x00,0x00,0x00,0xcc,0x00,0x00,0x00,0xcb,0x00,0x00,0x00, +0x71,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0xcd,0x00,0x00,0x00, +0xcc,0x00,0x00,0x00,0x7c,0x00,0x04,0x00,0x14,0x00,0x00,0x00, +0xce,0x00,0x00,0x00,0xcd,0x00,0x00,0x00,0xc7,0x00,0x05,0x00, +0x14,0x00,0x00,0x00,0xd0,0x00,0x00,0x00,0xce,0x00,0x00,0x00, +0xcf,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x6e,0x00,0x00,0x00, +0xd7,0x00,0x00,0x00,0x8f,0x00,0x00,0x00,0x71,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0xd8,0x00,0x00,0x00,0xd7,0x00,0x00,0x00, +0x7c,0x00,0x04,0x00,0x14,0x00,0x00,0x00,0xd9,0x00,0x00,0x00, +0xd8,0x00,0x00,0x00,0xc7,0x00,0x05,0x00,0x14,0x00,0x00,0x00, +0xdb,0x00,0x00,0x00,0xd9,0x00,0x00,0x00,0xda,0x00,0x00,0x00, +0xc3,0x00,0x05,0x00,0x14,0x00,0x00,0x00,0xdd,0x00,0x00,0x00, +0xdb,0x00,0x00,0x00,0xdc,0x00,0x00,0x00,0xc5,0x00,0x05,0x00, +0x14,0x00,0x00,0x00,0xde,0x00,0x00,0x00,0xd0,0x00,0x00,0x00, +0xdd,0x00,0x00,0x00,0x72,0x00,0x04,0x00,0x95,0x00,0x00,0x00, +0xdf,0x00,0x00,0x00,0xde,0x00,0x00,0x00,0x7c,0x00,0x04,0x00, +0x6e,0x00,0x00,0x00,0xe0,0x00,0x00,0x00,0xdf,0x00,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xe8,0x00,0x00,0x00, +0x8d,0x00,0x00,0x00,0xe7,0x00,0x00,0x00,0x41,0x00,0x08,0x00, +0x8e,0x00,0x00,0x00,0xe9,0x00,0x00,0x00,0x76,0x00,0x00,0x00, +0x15,0x00,0x00,0x00,0x79,0x00,0x00,0x00,0x8b,0x00,0x00,0x00, +0xe8,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x6e,0x00,0x00,0x00, +0xea,0x00,0x00,0x00,0xe9,0x00,0x00,0x00,0x71,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0xeb,0x00,0x00,0x00,0xea,0x00,0x00,0x00, +0x7c,0x00,0x04,0x00,0x14,0x00,0x00,0x00,0xec,0x00,0x00,0x00, +0xeb,0x00,0x00,0x00,0xc7,0x00,0x05,0x00,0x14,0x00,0x00,0x00, +0xed,0x00,0x00,0x00,0xec,0x00,0x00,0x00,0xcf,0x00,0x00,0x00, +0x3d,0x00,0x04,0x00,0x6e,0x00,0x00,0x00,0xf5,0x00,0x00,0x00, +0x9f,0x00,0x00,0x00,0x71,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0xf6,0x00,0x00,0x00,0xf5,0x00,0x00,0x00,0x7c,0x00,0x04,0x00, +0x14,0x00,0x00,0x00,0xf7,0x00,0x00,0x00,0xf6,0x00,0x00,0x00, +0xc7,0x00,0x05,0x00,0x14,0x00,0x00,0x00,0xf8,0x00,0x00,0x00, +0xf7,0x00,0x00,0x00,0xda,0x00,0x00,0x00,0xc3,0x00,0x05,0x00, +0x14,0x00,0x00,0x00,0xf9,0x00,0x00,0x00,0xf8,0x00,0x00,0x00, +0xdc,0x00,0x00,0x00,0xc5,0x00,0x05,0x00,0x14,0x00,0x00,0x00, +0xfa,0x00,0x00,0x00,0xed,0x00,0x00,0x00,0xf9,0x00,0x00,0x00, +0x72,0x00,0x04,0x00,0x95,0x00,0x00,0x00,0xfb,0x00,0x00,0x00, +0xfa,0x00,0x00,0x00,0x7c,0x00,0x04,0x00,0x6e,0x00,0x00,0x00, +0xfc,0x00,0x00,0x00,0xfb,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0x6e,0x00,0x00,0x00,0x05,0x01,0x00,0x00,0xcb,0x00,0x00,0x00, +0xc2,0x00,0x05,0x00,0x6e,0x00,0x00,0x00,0x07,0x01,0x00,0x00, +0x05,0x01,0x00,0x00,0x06,0x01,0x00,0x00,0x71,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x08,0x01,0x00,0x00,0x07,0x01,0x00,0x00, +0x7c,0x00,0x04,0x00,0x14,0x00,0x00,0x00,0x09,0x01,0x00,0x00, +0x08,0x01,0x00,0x00,0xc7,0x00,0x05,0x00,0x14,0x00,0x00,0x00, +0x0a,0x01,0x00,0x00,0x09,0x01,0x00,0x00,0xcf,0x00,0x00,0x00, +0x3d,0x00,0x04,0x00,0x6e,0x00,0x00,0x00,0x12,0x01,0x00,0x00, +0xad,0x00,0x00,0x00,0x71,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x13,0x01,0x00,0x00,0x12,0x01,0x00,0x00,0x7c,0x00,0x04,0x00, +0x14,0x00,0x00,0x00,0x14,0x01,0x00,0x00,0x13,0x01,0x00,0x00, +0xc7,0x00,0x05,0x00,0x14,0x00,0x00,0x00,0x15,0x01,0x00,0x00, +0x14,0x01,0x00,0x00,0xda,0x00,0x00,0x00,0xc3,0x00,0x05,0x00, +0x14,0x00,0x00,0x00,0x16,0x01,0x00,0x00,0x15,0x01,0x00,0x00, +0xdc,0x00,0x00,0x00,0xc5,0x00,0x05,0x00,0x14,0x00,0x00,0x00, +0x17,0x01,0x00,0x00,0x0a,0x01,0x00,0x00,0x16,0x01,0x00,0x00, +0x72,0x00,0x04,0x00,0x95,0x00,0x00,0x00,0x18,0x01,0x00,0x00, +0x17,0x01,0x00,0x00,0x7c,0x00,0x04,0x00,0x6e,0x00,0x00,0x00, +0x19,0x01,0x00,0x00,0x18,0x01,0x00,0x00,0x3d,0x00,0x04,0x00, +0x6e,0x00,0x00,0x00,0x22,0x01,0x00,0x00,0xe9,0x00,0x00,0x00, +0xc2,0x00,0x05,0x00,0x6e,0x00,0x00,0x00,0x23,0x01,0x00,0x00, +0x22,0x01,0x00,0x00,0x06,0x01,0x00,0x00,0x71,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x24,0x01,0x00,0x00,0x23,0x01,0x00,0x00, +0x7c,0x00,0x04,0x00,0x14,0x00,0x00,0x00,0x25,0x01,0x00,0x00, +0x24,0x01,0x00,0x00,0xc7,0x00,0x05,0x00,0x14,0x00,0x00,0x00, +0x26,0x01,0x00,0x00,0x25,0x01,0x00,0x00,0xcf,0x00,0x00,0x00, +0x3d,0x00,0x04,0x00,0x6e,0x00,0x00,0x00,0x2e,0x01,0x00,0x00, +0xbc,0x00,0x00,0x00,0x71,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x2f,0x01,0x00,0x00,0x2e,0x01,0x00,0x00,0x7c,0x00,0x04,0x00, +0x14,0x00,0x00,0x00,0x30,0x01,0x00,0x00,0x2f,0x01,0x00,0x00, +0xc7,0x00,0x05,0x00,0x14,0x00,0x00,0x00,0x31,0x01,0x00,0x00, +0x30,0x01,0x00,0x00,0xda,0x00,0x00,0x00,0xc3,0x00,0x05,0x00, +0x14,0x00,0x00,0x00,0x32,0x01,0x00,0x00,0x31,0x01,0x00,0x00, +0xdc,0x00,0x00,0x00,0xc5,0x00,0x05,0x00,0x14,0x00,0x00,0x00, +0x33,0x01,0x00,0x00,0x26,0x01,0x00,0x00,0x32,0x01,0x00,0x00, +0x72,0x00,0x04,0x00,0x95,0x00,0x00,0x00,0x34,0x01,0x00,0x00, +0x33,0x01,0x00,0x00,0x7c,0x00,0x04,0x00,0x6e,0x00,0x00,0x00, +0x35,0x01,0x00,0x00,0x34,0x01,0x00,0x00,0x41,0x00,0x08,0x00, +0x8e,0x00,0x00,0x00,0x3b,0x01,0x00,0x00,0x76,0x00,0x00,0x00, +0x15,0x00,0x00,0x00,0x79,0x00,0x00,0x00,0xdc,0x00,0x00,0x00, +0x43,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x6e,0x00,0x00,0x00, +0x3c,0x01,0x00,0x00,0x3b,0x01,0x00,0x00,0x71,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x3d,0x01,0x00,0x00,0x3c,0x01,0x00,0x00, +0x7c,0x00,0x04,0x00,0x14,0x00,0x00,0x00,0x3e,0x01,0x00,0x00, +0x3d,0x01,0x00,0x00,0xc7,0x00,0x05,0x00,0x14,0x00,0x00,0x00, +0x3f,0x01,0x00,0x00,0x3e,0x01,0x00,0x00,0xcf,0x00,0x00,0x00, +0x72,0x00,0x04,0x00,0x95,0x00,0x00,0x00,0x40,0x01,0x00,0x00, +0x3f,0x01,0x00,0x00,0x7c,0x00,0x04,0x00,0x6e,0x00,0x00,0x00, +0x41,0x01,0x00,0x00,0x40,0x01,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x47,0x01,0x00,0x00,0x43,0x00,0x00,0x00, +0x82,0x00,0x00,0x00,0x41,0x00,0x08,0x00,0x8e,0x00,0x00,0x00, +0x48,0x01,0x00,0x00,0x76,0x00,0x00,0x00,0x15,0x00,0x00,0x00, +0x79,0x00,0x00,0x00,0xdc,0x00,0x00,0x00,0x47,0x01,0x00,0x00, +0x3d,0x00,0x04,0x00,0x6e,0x00,0x00,0x00,0x49,0x01,0x00,0x00, +0x48,0x01,0x00,0x00,0x71,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x4a,0x01,0x00,0x00,0x49,0x01,0x00,0x00,0x7c,0x00,0x04,0x00, +0x14,0x00,0x00,0x00,0x4b,0x01,0x00,0x00,0x4a,0x01,0x00,0x00, +0xc7,0x00,0x05,0x00,0x14,0x00,0x00,0x00,0x4c,0x01,0x00,0x00, +0x4b,0x01,0x00,0x00,0xcf,0x00,0x00,0x00,0x72,0x00,0x04,0x00, +0x95,0x00,0x00,0x00,0x4d,0x01,0x00,0x00,0x4c,0x01,0x00,0x00, +0x7c,0x00,0x04,0x00,0x6e,0x00,0x00,0x00,0x4e,0x01,0x00,0x00, +0x4d,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x54,0x01,0x00,0x00,0x43,0x00,0x00,0x00,0x23,0x00,0x00,0x00, +0x41,0x00,0x08,0x00,0x8e,0x00,0x00,0x00,0x55,0x01,0x00,0x00, +0x76,0x00,0x00,0x00,0x15,0x00,0x00,0x00,0x79,0x00,0x00,0x00, +0xdc,0x00,0x00,0x00,0x54,0x01,0x00,0x00,0x3d,0x00,0x04,0x00, +0x6e,0x00,0x00,0x00,0x56,0x01,0x00,0x00,0x55,0x01,0x00,0x00, +0x71,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x57,0x01,0x00,0x00, +0x56,0x01,0x00,0x00,0x7c,0x00,0x04,0x00,0x14,0x00,0x00,0x00, +0x58,0x01,0x00,0x00,0x57,0x01,0x00,0x00,0xc7,0x00,0x05,0x00, +0x14,0x00,0x00,0x00,0x59,0x01,0x00,0x00,0x58,0x01,0x00,0x00, +0xcf,0x00,0x00,0x00,0x72,0x00,0x04,0x00,0x95,0x00,0x00,0x00, +0x5a,0x01,0x00,0x00,0x59,0x01,0x00,0x00,0x7c,0x00,0x04,0x00, +0x6e,0x00,0x00,0x00,0x5b,0x01,0x00,0x00,0x5a,0x01,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x62,0x01,0x00,0x00, +0x43,0x00,0x00,0x00,0x61,0x01,0x00,0x00,0x41,0x00,0x08,0x00, +0x8e,0x00,0x00,0x00,0x63,0x01,0x00,0x00,0x76,0x00,0x00,0x00, +0x15,0x00,0x00,0x00,0x79,0x00,0x00,0x00,0xdc,0x00,0x00,0x00, +0x62,0x01,0x00,0x00,0x3d,0x00,0x04,0x00,0x6e,0x00,0x00,0x00, +0x64,0x01,0x00,0x00,0x63,0x01,0x00,0x00,0x71,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x65,0x01,0x00,0x00,0x64,0x01,0x00,0x00, +0x7c,0x00,0x04,0x00,0x14,0x00,0x00,0x00,0x66,0x01,0x00,0x00, +0x65,0x01,0x00,0x00,0xc7,0x00,0x05,0x00,0x14,0x00,0x00,0x00, +0x67,0x01,0x00,0x00,0x66,0x01,0x00,0x00,0xcf,0x00,0x00,0x00, +0x72,0x00,0x04,0x00,0x95,0x00,0x00,0x00,0x68,0x01,0x00,0x00, +0x67,0x01,0x00,0x00,0x7c,0x00,0x04,0x00,0x6e,0x00,0x00,0x00, +0x69,0x01,0x00,0x00,0x68,0x01,0x00,0x00,0x3d,0x00,0x04,0x00, +0x6e,0x00,0x00,0x00,0x70,0x01,0x00,0x00,0x3b,0x01,0x00,0x00, +0xc2,0x00,0x05,0x00,0x6e,0x00,0x00,0x00,0x71,0x01,0x00,0x00, +0x70,0x01,0x00,0x00,0x06,0x01,0x00,0x00,0x3d,0x00,0x04,0x00, +0x6e,0x00,0x00,0x00,0x79,0x01,0x00,0x00,0x48,0x01,0x00,0x00, +0xc2,0x00,0x05,0x00,0x6e,0x00,0x00,0x00,0x7a,0x01,0x00,0x00, +0x79,0x01,0x00,0x00,0x06,0x01,0x00,0x00,0x3d,0x00,0x04,0x00, +0x6e,0x00,0x00,0x00,0x82,0x01,0x00,0x00,0x55,0x01,0x00,0x00, +0xc2,0x00,0x05,0x00,0x6e,0x00,0x00,0x00,0x83,0x01,0x00,0x00, +0x82,0x01,0x00,0x00,0x06,0x01,0x00,0x00,0x3d,0x00,0x04,0x00, +0x6e,0x00,0x00,0x00,0x8b,0x01,0x00,0x00,0x63,0x01,0x00,0x00, +0xc2,0x00,0x05,0x00,0x6e,0x00,0x00,0x00,0x8c,0x01,0x00,0x00, +0x8b,0x01,0x00,0x00,0x06,0x01,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x92,0x01,0x00,0x00,0x43,0x00,0x00,0x00, +0x45,0x00,0x00,0x00,0x41,0x00,0x08,0x00,0x8e,0x00,0x00,0x00, +0x93,0x01,0x00,0x00,0x76,0x00,0x00,0x00,0x15,0x00,0x00,0x00, +0x79,0x00,0x00,0x00,0xdc,0x00,0x00,0x00,0x92,0x01,0x00,0x00, +0x3d,0x00,0x04,0x00,0x6e,0x00,0x00,0x00,0x94,0x01,0x00,0x00, +0x93,0x01,0x00,0x00,0x71,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x95,0x01,0x00,0x00,0x94,0x01,0x00,0x00,0x7c,0x00,0x04,0x00, +0x14,0x00,0x00,0x00,0x96,0x01,0x00,0x00,0x95,0x01,0x00,0x00, +0xc7,0x00,0x05,0x00,0x14,0x00,0x00,0x00,0x97,0x01,0x00,0x00, +0x96,0x01,0x00,0x00,0xcf,0x00,0x00,0x00,0x72,0x00,0x04,0x00, +0x95,0x00,0x00,0x00,0x98,0x01,0x00,0x00,0x97,0x01,0x00,0x00, +0x7c,0x00,0x04,0x00,0x6e,0x00,0x00,0x00,0x99,0x01,0x00,0x00, +0x98,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xa0,0x01,0x00,0x00,0x43,0x00,0x00,0x00,0x9f,0x01,0x00,0x00, +0x41,0x00,0x08,0x00,0x8e,0x00,0x00,0x00,0xa1,0x01,0x00,0x00, +0x76,0x00,0x00,0x00,0x15,0x00,0x00,0x00,0x79,0x00,0x00,0x00, +0xdc,0x00,0x00,0x00,0xa0,0x01,0x00,0x00,0x3d,0x00,0x04,0x00, +0x6e,0x00,0x00,0x00,0xa2,0x01,0x00,0x00,0xa1,0x01,0x00,0x00, +0x71,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0xa3,0x01,0x00,0x00, +0xa2,0x01,0x00,0x00,0x7c,0x00,0x04,0x00,0x14,0x00,0x00,0x00, +0xa4,0x01,0x00,0x00,0xa3,0x01,0x00,0x00,0xc7,0x00,0x05,0x00, +0x14,0x00,0x00,0x00,0xa5,0x01,0x00,0x00,0xa4,0x01,0x00,0x00, +0xcf,0x00,0x00,0x00,0x72,0x00,0x04,0x00,0x95,0x00,0x00,0x00, +0xa6,0x01,0x00,0x00,0xa5,0x01,0x00,0x00,0x7c,0x00,0x04,0x00, +0x6e,0x00,0x00,0x00,0xa7,0x01,0x00,0x00,0xa6,0x01,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xae,0x01,0x00,0x00, +0x43,0x00,0x00,0x00,0xad,0x01,0x00,0x00,0x41,0x00,0x08,0x00, +0x8e,0x00,0x00,0x00,0xaf,0x01,0x00,0x00,0x76,0x00,0x00,0x00, +0x15,0x00,0x00,0x00,0x79,0x00,0x00,0x00,0xdc,0x00,0x00,0x00, +0xae,0x01,0x00,0x00,0x3d,0x00,0x04,0x00,0x6e,0x00,0x00,0x00, +0xb0,0x01,0x00,0x00,0xaf,0x01,0x00,0x00,0x71,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0xb1,0x01,0x00,0x00,0xb0,0x01,0x00,0x00, +0x7c,0x00,0x04,0x00,0x14,0x00,0x00,0x00,0xb2,0x01,0x00,0x00, +0xb1,0x01,0x00,0x00,0xc7,0x00,0x05,0x00,0x14,0x00,0x00,0x00, +0xb3,0x01,0x00,0x00,0xb2,0x01,0x00,0x00,0xcf,0x00,0x00,0x00, +0x72,0x00,0x04,0x00,0x95,0x00,0x00,0x00,0xb4,0x01,0x00,0x00, +0xb3,0x01,0x00,0x00,0x7c,0x00,0x04,0x00,0x6e,0x00,0x00,0x00, +0xb5,0x01,0x00,0x00,0xb4,0x01,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xbc,0x01,0x00,0x00,0x43,0x00,0x00,0x00, +0xbb,0x01,0x00,0x00,0x41,0x00,0x08,0x00,0x8e,0x00,0x00,0x00, +0xbd,0x01,0x00,0x00,0x76,0x00,0x00,0x00,0x15,0x00,0x00,0x00, +0x79,0x00,0x00,0x00,0xdc,0x00,0x00,0x00,0xbc,0x01,0x00,0x00, +0x3d,0x00,0x04,0x00,0x6e,0x00,0x00,0x00,0xbe,0x01,0x00,0x00, +0xbd,0x01,0x00,0x00,0x71,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0xbf,0x01,0x00,0x00,0xbe,0x01,0x00,0x00,0x7c,0x00,0x04,0x00, +0x14,0x00,0x00,0x00,0xc0,0x01,0x00,0x00,0xbf,0x01,0x00,0x00, +0xc7,0x00,0x05,0x00,0x14,0x00,0x00,0x00,0xc1,0x01,0x00,0x00, +0xc0,0x01,0x00,0x00,0xcf,0x00,0x00,0x00,0x72,0x00,0x04,0x00, +0x95,0x00,0x00,0x00,0xc2,0x01,0x00,0x00,0xc1,0x01,0x00,0x00, +0x7c,0x00,0x04,0x00,0x6e,0x00,0x00,0x00,0xc3,0x01,0x00,0x00, +0xc2,0x01,0x00,0x00,0x3d,0x00,0x04,0x00,0x6e,0x00,0x00,0x00, +0xcb,0x01,0x00,0x00,0x93,0x01,0x00,0x00,0xc2,0x00,0x05,0x00, +0x6e,0x00,0x00,0x00,0xcc,0x01,0x00,0x00,0xcb,0x01,0x00,0x00, +0x06,0x01,0x00,0x00,0x3d,0x00,0x04,0x00,0x6e,0x00,0x00,0x00, +0xd4,0x01,0x00,0x00,0xa1,0x01,0x00,0x00,0xc2,0x00,0x05,0x00, +0x6e,0x00,0x00,0x00,0xd5,0x01,0x00,0x00,0xd4,0x01,0x00,0x00, +0x06,0x01,0x00,0x00,0x3d,0x00,0x04,0x00,0x6e,0x00,0x00,0x00, +0xdd,0x01,0x00,0x00,0xaf,0x01,0x00,0x00,0xc2,0x00,0x05,0x00, +0x6e,0x00,0x00,0x00,0xde,0x01,0x00,0x00,0xdd,0x01,0x00,0x00, +0x06,0x01,0x00,0x00,0x3d,0x00,0x04,0x00,0x6e,0x00,0x00,0x00, +0xe6,0x01,0x00,0x00,0xbd,0x01,0x00,0x00,0xc2,0x00,0x05,0x00, +0x6e,0x00,0x00,0x00,0xe7,0x01,0x00,0x00,0xe6,0x01,0x00,0x00, +0x06,0x01,0x00,0x00,0x41,0x00,0x05,0x00,0x16,0x00,0x00,0x00, +0xed,0x01,0x00,0x00,0x13,0x00,0x00,0x00,0x8b,0x00,0x00,0x00, +0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0xee,0x01,0x00,0x00, +0xed,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xf0,0x01,0x00,0x00,0xee,0x01,0x00,0x00,0x65,0x00,0x00,0x00, +0x41,0x00,0x06,0x00,0xf1,0x01,0x00,0x00,0xf2,0x01,0x00,0x00, +0xec,0x01,0x00,0x00,0x15,0x00,0x00,0x00,0xf0,0x01,0x00,0x00, +0x3d,0x00,0x04,0x00,0x4a,0x00,0x00,0x00,0xf3,0x01,0x00,0x00, +0xf2,0x01,0x00,0x00,0x70,0x00,0x04,0x00,0x4a,0x00,0x00,0x00, +0xf5,0x01,0x00,0x00,0x41,0x01,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xfb,0x01,0x00,0x00,0xf0,0x01,0x00,0x00, +0x82,0x00,0x00,0x00,0x41,0x00,0x06,0x00,0xf1,0x01,0x00,0x00, +0xfc,0x01,0x00,0x00,0xec,0x01,0x00,0x00,0x15,0x00,0x00,0x00, +0xfb,0x01,0x00,0x00,0x3d,0x00,0x04,0x00,0x4a,0x00,0x00,0x00, +0xfd,0x01,0x00,0x00,0xfc,0x01,0x00,0x00,0x70,0x00,0x04,0x00, +0x4a,0x00,0x00,0x00,0xff,0x01,0x00,0x00,0x4e,0x01,0x00,0x00, +0x85,0x00,0x05,0x00,0x4a,0x00,0x00,0x00,0x00,0x02,0x00,0x00, +0xfd,0x01,0x00,0x00,0xff,0x01,0x00,0x00,0x0c,0x00,0x08,0x00, +0x4a,0x00,0x00,0x00,0x01,0x02,0x00,0x00,0x01,0x00,0x00,0x00, +0x32,0x00,0x00,0x00,0xf3,0x01,0x00,0x00,0xf5,0x01,0x00,0x00, +0x00,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x06,0x02,0x00,0x00,0xf0,0x01,0x00,0x00,0x23,0x00,0x00,0x00, +0x41,0x00,0x06,0x00,0xf1,0x01,0x00,0x00,0x07,0x02,0x00,0x00, +0xec,0x01,0x00,0x00,0x15,0x00,0x00,0x00,0x06,0x02,0x00,0x00, +0x3d,0x00,0x04,0x00,0x4a,0x00,0x00,0x00,0x08,0x02,0x00,0x00, +0x07,0x02,0x00,0x00,0x70,0x00,0x04,0x00,0x4a,0x00,0x00,0x00, +0x0a,0x02,0x00,0x00,0x5b,0x01,0x00,0x00,0x0c,0x00,0x08,0x00, +0x4a,0x00,0x00,0x00,0x0c,0x02,0x00,0x00,0x01,0x00,0x00,0x00, +0x32,0x00,0x00,0x00,0x08,0x02,0x00,0x00,0x0a,0x02,0x00,0x00, +0x01,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x11,0x02,0x00,0x00,0xf0,0x01,0x00,0x00,0x61,0x01,0x00,0x00, +0x41,0x00,0x06,0x00,0xf1,0x01,0x00,0x00,0x12,0x02,0x00,0x00, +0xec,0x01,0x00,0x00,0x15,0x00,0x00,0x00,0x11,0x02,0x00,0x00, +0x3d,0x00,0x04,0x00,0x4a,0x00,0x00,0x00,0x13,0x02,0x00,0x00, +0x12,0x02,0x00,0x00,0x70,0x00,0x04,0x00,0x4a,0x00,0x00,0x00, +0x15,0x02,0x00,0x00,0x69,0x01,0x00,0x00,0x0c,0x00,0x08,0x00, +0x4a,0x00,0x00,0x00,0x17,0x02,0x00,0x00,0x01,0x00,0x00,0x00, +0x32,0x00,0x00,0x00,0x13,0x02,0x00,0x00,0x15,0x02,0x00,0x00, +0x0c,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x1d,0x02,0x00,0x00,0xf0,0x01,0x00,0x00,0x3f,0x00,0x00,0x00, +0x41,0x00,0x06,0x00,0xf1,0x01,0x00,0x00,0x1e,0x02,0x00,0x00, +0xec,0x01,0x00,0x00,0x15,0x00,0x00,0x00,0x1d,0x02,0x00,0x00, +0x3d,0x00,0x04,0x00,0x4a,0x00,0x00,0x00,0x1f,0x02,0x00,0x00, +0x1e,0x02,0x00,0x00,0x70,0x00,0x04,0x00,0x4a,0x00,0x00,0x00, +0x21,0x02,0x00,0x00,0x71,0x01,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x28,0x02,0x00,0x00,0xf0,0x01,0x00,0x00, +0x27,0x02,0x00,0x00,0x41,0x00,0x06,0x00,0xf1,0x01,0x00,0x00, +0x29,0x02,0x00,0x00,0xec,0x01,0x00,0x00,0x15,0x00,0x00,0x00, +0x28,0x02,0x00,0x00,0x3d,0x00,0x04,0x00,0x4a,0x00,0x00,0x00, +0x2a,0x02,0x00,0x00,0x29,0x02,0x00,0x00,0x70,0x00,0x04,0x00, +0x4a,0x00,0x00,0x00,0x2c,0x02,0x00,0x00,0x7a,0x01,0x00,0x00, +0x85,0x00,0x05,0x00,0x4a,0x00,0x00,0x00,0x2d,0x02,0x00,0x00, +0x2a,0x02,0x00,0x00,0x2c,0x02,0x00,0x00,0x0c,0x00,0x08,0x00, +0x4a,0x00,0x00,0x00,0x2e,0x02,0x00,0x00,0x01,0x00,0x00,0x00, +0x32,0x00,0x00,0x00,0x1f,0x02,0x00,0x00,0x21,0x02,0x00,0x00, +0x2d,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x34,0x02,0x00,0x00,0xf0,0x01,0x00,0x00,0x33,0x02,0x00,0x00, +0x41,0x00,0x06,0x00,0xf1,0x01,0x00,0x00,0x35,0x02,0x00,0x00, +0xec,0x01,0x00,0x00,0x15,0x00,0x00,0x00,0x34,0x02,0x00,0x00, +0x3d,0x00,0x04,0x00,0x4a,0x00,0x00,0x00,0x36,0x02,0x00,0x00, +0x35,0x02,0x00,0x00,0x70,0x00,0x04,0x00,0x4a,0x00,0x00,0x00, +0x38,0x02,0x00,0x00,0x83,0x01,0x00,0x00,0x0c,0x00,0x08,0x00, +0x4a,0x00,0x00,0x00,0x3a,0x02,0x00,0x00,0x01,0x00,0x00,0x00, +0x32,0x00,0x00,0x00,0x36,0x02,0x00,0x00,0x38,0x02,0x00,0x00, +0x2e,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x40,0x02,0x00,0x00,0xf0,0x01,0x00,0x00,0x3f,0x02,0x00,0x00, +0x41,0x00,0x06,0x00,0xf1,0x01,0x00,0x00,0x41,0x02,0x00,0x00, +0xec,0x01,0x00,0x00,0x15,0x00,0x00,0x00,0x40,0x02,0x00,0x00, +0x3d,0x00,0x04,0x00,0x4a,0x00,0x00,0x00,0x42,0x02,0x00,0x00, +0x41,0x02,0x00,0x00,0x70,0x00,0x04,0x00,0x4a,0x00,0x00,0x00, +0x44,0x02,0x00,0x00,0x8c,0x01,0x00,0x00,0x0c,0x00,0x08,0x00, +0x4a,0x00,0x00,0x00,0x46,0x02,0x00,0x00,0x01,0x00,0x00,0x00, +0x32,0x00,0x00,0x00,0x42,0x02,0x00,0x00,0x44,0x02,0x00,0x00, +0x3a,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x4b,0x02,0x00,0x00,0xee,0x01,0x00,0x00,0x69,0x00,0x00,0x00, +0x41,0x00,0x06,0x00,0xf1,0x01,0x00,0x00,0x4c,0x02,0x00,0x00, +0xec,0x01,0x00,0x00,0x15,0x00,0x00,0x00,0x4b,0x02,0x00,0x00, +0x3d,0x00,0x04,0x00,0x4a,0x00,0x00,0x00,0x4d,0x02,0x00,0x00, +0x4c,0x02,0x00,0x00,0x70,0x00,0x04,0x00,0x4a,0x00,0x00,0x00, +0x4f,0x02,0x00,0x00,0x99,0x01,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x55,0x02,0x00,0x00,0x4b,0x02,0x00,0x00, +0x82,0x00,0x00,0x00,0x41,0x00,0x06,0x00,0xf1,0x01,0x00,0x00, +0x56,0x02,0x00,0x00,0xec,0x01,0x00,0x00,0x15,0x00,0x00,0x00, +0x55,0x02,0x00,0x00,0x3d,0x00,0x04,0x00,0x4a,0x00,0x00,0x00, +0x57,0x02,0x00,0x00,0x56,0x02,0x00,0x00,0x70,0x00,0x04,0x00, +0x4a,0x00,0x00,0x00,0x59,0x02,0x00,0x00,0xa7,0x01,0x00,0x00, +0x85,0x00,0x05,0x00,0x4a,0x00,0x00,0x00,0x5a,0x02,0x00,0x00, +0x57,0x02,0x00,0x00,0x59,0x02,0x00,0x00,0x0c,0x00,0x08,0x00, +0x4a,0x00,0x00,0x00,0x5b,0x02,0x00,0x00,0x01,0x00,0x00,0x00, +0x32,0x00,0x00,0x00,0x4d,0x02,0x00,0x00,0x4f,0x02,0x00,0x00, +0x5a,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x60,0x02,0x00,0x00,0x4b,0x02,0x00,0x00,0x23,0x00,0x00,0x00, +0x41,0x00,0x06,0x00,0xf1,0x01,0x00,0x00,0x61,0x02,0x00,0x00, +0xec,0x01,0x00,0x00,0x15,0x00,0x00,0x00,0x60,0x02,0x00,0x00, +0x3d,0x00,0x04,0x00,0x4a,0x00,0x00,0x00,0x62,0x02,0x00,0x00, +0x61,0x02,0x00,0x00,0x70,0x00,0x04,0x00,0x4a,0x00,0x00,0x00, +0x64,0x02,0x00,0x00,0xb5,0x01,0x00,0x00,0x0c,0x00,0x08,0x00, +0x4a,0x00,0x00,0x00,0x66,0x02,0x00,0x00,0x01,0x00,0x00,0x00, +0x32,0x00,0x00,0x00,0x62,0x02,0x00,0x00,0x64,0x02,0x00,0x00, +0x5b,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x6b,0x02,0x00,0x00,0x4b,0x02,0x00,0x00,0x61,0x01,0x00,0x00, +0x41,0x00,0x06,0x00,0xf1,0x01,0x00,0x00,0x6c,0x02,0x00,0x00, +0xec,0x01,0x00,0x00,0x15,0x00,0x00,0x00,0x6b,0x02,0x00,0x00, +0x3d,0x00,0x04,0x00,0x4a,0x00,0x00,0x00,0x6d,0x02,0x00,0x00, +0x6c,0x02,0x00,0x00,0x70,0x00,0x04,0x00,0x4a,0x00,0x00,0x00, +0x6f,0x02,0x00,0x00,0xc3,0x01,0x00,0x00,0x0c,0x00,0x08,0x00, +0x4a,0x00,0x00,0x00,0x71,0x02,0x00,0x00,0x01,0x00,0x00,0x00, +0x32,0x00,0x00,0x00,0x6d,0x02,0x00,0x00,0x6f,0x02,0x00,0x00, +0x66,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x77,0x02,0x00,0x00,0x4b,0x02,0x00,0x00,0x3f,0x00,0x00,0x00, +0x41,0x00,0x06,0x00,0xf1,0x01,0x00,0x00,0x78,0x02,0x00,0x00, +0xec,0x01,0x00,0x00,0x15,0x00,0x00,0x00,0x77,0x02,0x00,0x00, +0x3d,0x00,0x04,0x00,0x4a,0x00,0x00,0x00,0x79,0x02,0x00,0x00, +0x78,0x02,0x00,0x00,0x70,0x00,0x04,0x00,0x4a,0x00,0x00,0x00, +0x7b,0x02,0x00,0x00,0xcc,0x01,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x81,0x02,0x00,0x00,0x4b,0x02,0x00,0x00, +0x27,0x02,0x00,0x00,0x41,0x00,0x06,0x00,0xf1,0x01,0x00,0x00, +0x82,0x02,0x00,0x00,0xec,0x01,0x00,0x00,0x15,0x00,0x00,0x00, +0x81,0x02,0x00,0x00,0x3d,0x00,0x04,0x00,0x4a,0x00,0x00,0x00, +0x83,0x02,0x00,0x00,0x82,0x02,0x00,0x00,0x70,0x00,0x04,0x00, +0x4a,0x00,0x00,0x00,0x85,0x02,0x00,0x00,0xd5,0x01,0x00,0x00, +0x85,0x00,0x05,0x00,0x4a,0x00,0x00,0x00,0x86,0x02,0x00,0x00, +0x83,0x02,0x00,0x00,0x85,0x02,0x00,0x00,0x0c,0x00,0x08,0x00, +0x4a,0x00,0x00,0x00,0x87,0x02,0x00,0x00,0x01,0x00,0x00,0x00, +0x32,0x00,0x00,0x00,0x79,0x02,0x00,0x00,0x7b,0x02,0x00,0x00, +0x86,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x8c,0x02,0x00,0x00,0x4b,0x02,0x00,0x00,0x33,0x02,0x00,0x00, +0x41,0x00,0x06,0x00,0xf1,0x01,0x00,0x00,0x8d,0x02,0x00,0x00, +0xec,0x01,0x00,0x00,0x15,0x00,0x00,0x00,0x8c,0x02,0x00,0x00, +0x3d,0x00,0x04,0x00,0x4a,0x00,0x00,0x00,0x8e,0x02,0x00,0x00, +0x8d,0x02,0x00,0x00,0x70,0x00,0x04,0x00,0x4a,0x00,0x00,0x00, +0x90,0x02,0x00,0x00,0xde,0x01,0x00,0x00,0x0c,0x00,0x08,0x00, +0x4a,0x00,0x00,0x00,0x92,0x02,0x00,0x00,0x01,0x00,0x00,0x00, +0x32,0x00,0x00,0x00,0x8e,0x02,0x00,0x00,0x90,0x02,0x00,0x00, +0x87,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x97,0x02,0x00,0x00,0x4b,0x02,0x00,0x00,0x3f,0x02,0x00,0x00, +0x41,0x00,0x06,0x00,0xf1,0x01,0x00,0x00,0x98,0x02,0x00,0x00, +0xec,0x01,0x00,0x00,0x15,0x00,0x00,0x00,0x97,0x02,0x00,0x00, +0x3d,0x00,0x04,0x00,0x4a,0x00,0x00,0x00,0x99,0x02,0x00,0x00, +0x98,0x02,0x00,0x00,0x70,0x00,0x04,0x00,0x4a,0x00,0x00,0x00, +0x9b,0x02,0x00,0x00,0xe7,0x01,0x00,0x00,0x0c,0x00,0x08,0x00, +0x4a,0x00,0x00,0x00,0x9d,0x02,0x00,0x00,0x01,0x00,0x00,0x00, +0x32,0x00,0x00,0x00,0x99,0x02,0x00,0x00,0x9b,0x02,0x00,0x00, +0x92,0x02,0x00,0x00,0x3d,0x00,0x04,0x00,0x4a,0x00,0x00,0x00, +0xa4,0x02,0x00,0x00,0xf2,0x01,0x00,0x00,0x70,0x00,0x04,0x00, +0x4a,0x00,0x00,0x00,0xa6,0x02,0x00,0x00,0xb3,0x00,0x00,0x00, +0x3d,0x00,0x04,0x00,0x4a,0x00,0x00,0x00,0xae,0x02,0x00,0x00, +0x1e,0x02,0x00,0x00,0x70,0x00,0x04,0x00,0x4a,0x00,0x00,0x00, +0xb0,0x02,0x00,0x00,0xc2,0x00,0x00,0x00,0x85,0x00,0x05,0x00, +0x4a,0x00,0x00,0x00,0xb1,0x02,0x00,0x00,0xae,0x02,0x00,0x00, +0xb0,0x02,0x00,0x00,0x0c,0x00,0x08,0x00,0x4a,0x00,0x00,0x00, +0xb2,0x02,0x00,0x00,0x01,0x00,0x00,0x00,0x32,0x00,0x00,0x00, +0xa4,0x02,0x00,0x00,0xa6,0x02,0x00,0x00,0xb1,0x02,0x00,0x00, +0x3d,0x00,0x04,0x00,0x4a,0x00,0x00,0x00,0xb8,0x02,0x00,0x00, +0x4c,0x02,0x00,0x00,0x70,0x00,0x04,0x00,0x4a,0x00,0x00,0x00, +0xba,0x02,0x00,0x00,0x19,0x01,0x00,0x00,0x0c,0x00,0x08,0x00, +0x4a,0x00,0x00,0x00,0xbc,0x02,0x00,0x00,0x01,0x00,0x00,0x00, +0x32,0x00,0x00,0x00,0xb8,0x02,0x00,0x00,0xba,0x02,0x00,0x00, +0xb2,0x02,0x00,0x00,0x3d,0x00,0x04,0x00,0x4a,0x00,0x00,0x00, +0xc3,0x02,0x00,0x00,0x78,0x02,0x00,0x00,0x70,0x00,0x04,0x00, +0x4a,0x00,0x00,0x00,0xc5,0x02,0x00,0x00,0x35,0x01,0x00,0x00, +0x0c,0x00,0x08,0x00,0x4a,0x00,0x00,0x00,0xc7,0x02,0x00,0x00, +0x01,0x00,0x00,0x00,0x32,0x00,0x00,0x00,0xc3,0x02,0x00,0x00, +0xc5,0x02,0x00,0x00,0xbc,0x02,0x00,0x00,0x3d,0x00,0x04,0x00, +0x4a,0x00,0x00,0x00,0xce,0x02,0x00,0x00,0xfc,0x01,0x00,0x00, +0x0c,0x00,0x08,0x00,0x4a,0x00,0x00,0x00,0xd2,0x02,0x00,0x00, +0x01,0x00,0x00,0x00,0x32,0x00,0x00,0x00,0xce,0x02,0x00,0x00, +0xa6,0x02,0x00,0x00,0xc7,0x02,0x00,0x00,0x3d,0x00,0x04,0x00, +0x4a,0x00,0x00,0x00,0xd9,0x02,0x00,0x00,0x29,0x02,0x00,0x00, +0x0c,0x00,0x08,0x00,0x4a,0x00,0x00,0x00,0xdd,0x02,0x00,0x00, +0x01,0x00,0x00,0x00,0x32,0x00,0x00,0x00,0xd9,0x02,0x00,0x00, +0xb0,0x02,0x00,0x00,0xd2,0x02,0x00,0x00,0x3d,0x00,0x04,0x00, +0x4a,0x00,0x00,0x00,0xe4,0x02,0x00,0x00,0x56,0x02,0x00,0x00, +0x0c,0x00,0x08,0x00,0x4a,0x00,0x00,0x00,0xe8,0x02,0x00,0x00, +0x01,0x00,0x00,0x00,0x32,0x00,0x00,0x00,0xe4,0x02,0x00,0x00, +0xba,0x02,0x00,0x00,0xdd,0x02,0x00,0x00,0x3d,0x00,0x04,0x00, +0x4a,0x00,0x00,0x00,0xef,0x02,0x00,0x00,0x82,0x02,0x00,0x00, +0x0c,0x00,0x08,0x00,0x4a,0x00,0x00,0x00,0xf3,0x02,0x00,0x00, +0x01,0x00,0x00,0x00,0x32,0x00,0x00,0x00,0xef,0x02,0x00,0x00, +0xc5,0x02,0x00,0x00,0xe8,0x02,0x00,0x00,0x3d,0x00,0x04,0x00, +0x4a,0x00,0x00,0x00,0xfa,0x02,0x00,0x00,0x07,0x02,0x00,0x00, +0x0c,0x00,0x08,0x00,0x4a,0x00,0x00,0x00,0xfe,0x02,0x00,0x00, +0x01,0x00,0x00,0x00,0x32,0x00,0x00,0x00,0xfa,0x02,0x00,0x00, +0xa6,0x02,0x00,0x00,0xf3,0x02,0x00,0x00,0x3d,0x00,0x04,0x00, +0x4a,0x00,0x00,0x00,0x05,0x03,0x00,0x00,0x35,0x02,0x00,0x00, +0x0c,0x00,0x08,0x00,0x4a,0x00,0x00,0x00,0x09,0x03,0x00,0x00, +0x01,0x00,0x00,0x00,0x32,0x00,0x00,0x00,0x05,0x03,0x00,0x00, +0xb0,0x02,0x00,0x00,0xfe,0x02,0x00,0x00,0x3d,0x00,0x04,0x00, +0x4a,0x00,0x00,0x00,0x10,0x03,0x00,0x00,0x61,0x02,0x00,0x00, +0x0c,0x00,0x08,0x00,0x4a,0x00,0x00,0x00,0x14,0x03,0x00,0x00, +0x01,0x00,0x00,0x00,0x32,0x00,0x00,0x00,0x10,0x03,0x00,0x00, +0xba,0x02,0x00,0x00,0x09,0x03,0x00,0x00,0x3d,0x00,0x04,0x00, +0x4a,0x00,0x00,0x00,0x1b,0x03,0x00,0x00,0x8d,0x02,0x00,0x00, +0x0c,0x00,0x08,0x00,0x4a,0x00,0x00,0x00,0x1f,0x03,0x00,0x00, +0x01,0x00,0x00,0x00,0x32,0x00,0x00,0x00,0x1b,0x03,0x00,0x00, +0xc5,0x02,0x00,0x00,0x14,0x03,0x00,0x00,0x3d,0x00,0x04,0x00, +0x4a,0x00,0x00,0x00,0x26,0x03,0x00,0x00,0x12,0x02,0x00,0x00, +0x0c,0x00,0x08,0x00,0x4a,0x00,0x00,0x00,0x2a,0x03,0x00,0x00, +0x01,0x00,0x00,0x00,0x32,0x00,0x00,0x00,0x26,0x03,0x00,0x00, +0xa6,0x02,0x00,0x00,0x1f,0x03,0x00,0x00,0x3d,0x00,0x04,0x00, +0x4a,0x00,0x00,0x00,0x31,0x03,0x00,0x00,0x41,0x02,0x00,0x00, +0x0c,0x00,0x08,0x00,0x4a,0x00,0x00,0x00,0x35,0x03,0x00,0x00, +0x01,0x00,0x00,0x00,0x32,0x00,0x00,0x00,0x31,0x03,0x00,0x00, +0xb0,0x02,0x00,0x00,0x2a,0x03,0x00,0x00,0x3d,0x00,0x04,0x00, +0x4a,0x00,0x00,0x00,0x3c,0x03,0x00,0x00,0x6c,0x02,0x00,0x00, +0x0c,0x00,0x08,0x00,0x4a,0x00,0x00,0x00,0x40,0x03,0x00,0x00, +0x01,0x00,0x00,0x00,0x32,0x00,0x00,0x00,0x3c,0x03,0x00,0x00, +0xba,0x02,0x00,0x00,0x35,0x03,0x00,0x00,0x3d,0x00,0x04,0x00, +0x4a,0x00,0x00,0x00,0x47,0x03,0x00,0x00,0x98,0x02,0x00,0x00, +0x0c,0x00,0x08,0x00,0x4a,0x00,0x00,0x00,0x4b,0x03,0x00,0x00, +0x01,0x00,0x00,0x00,0x32,0x00,0x00,0x00,0x47,0x03,0x00,0x00, +0xc5,0x02,0x00,0x00,0x40,0x03,0x00,0x00,0x70,0x00,0x04,0x00, +0x4a,0x00,0x00,0x00,0x53,0x03,0x00,0x00,0x97,0x00,0x00,0x00, +0x70,0x00,0x04,0x00,0x4a,0x00,0x00,0x00,0x57,0x03,0x00,0x00, +0xa5,0x00,0x00,0x00,0x85,0x00,0x05,0x00,0x4a,0x00,0x00,0x00, +0x58,0x03,0x00,0x00,0x46,0x02,0x00,0x00,0x57,0x03,0x00,0x00, +0x0c,0x00,0x08,0x00,0x4a,0x00,0x00,0x00,0x59,0x03,0x00,0x00, +0x01,0x00,0x00,0x00,0x32,0x00,0x00,0x00,0x17,0x02,0x00,0x00, +0x53,0x03,0x00,0x00,0x58,0x03,0x00,0x00,0x70,0x00,0x04,0x00, +0x4a,0x00,0x00,0x00,0x5c,0x03,0x00,0x00,0xe0,0x00,0x00,0x00, +0x0c,0x00,0x08,0x00,0x4a,0x00,0x00,0x00,0x5e,0x03,0x00,0x00, +0x01,0x00,0x00,0x00,0x32,0x00,0x00,0x00,0x71,0x02,0x00,0x00, +0x5c,0x03,0x00,0x00,0x59,0x03,0x00,0x00,0x70,0x00,0x04,0x00, +0x4a,0x00,0x00,0x00,0x61,0x03,0x00,0x00,0xfc,0x00,0x00,0x00, +0x0c,0x00,0x08,0x00,0x4a,0x00,0x00,0x00,0x63,0x03,0x00,0x00, +0x01,0x00,0x00,0x00,0x32,0x00,0x00,0x00,0x9d,0x02,0x00,0x00, +0x61,0x03,0x00,0x00,0x5e,0x03,0x00,0x00,0x85,0x00,0x05,0x00, +0x4a,0x00,0x00,0x00,0x67,0x03,0x00,0x00,0x85,0x00,0x00,0x00, +0x4b,0x03,0x00,0x00,0x7f,0x00,0x04,0x00,0x4a,0x00,0x00,0x00, +0x9d,0x03,0x00,0x00,0x67,0x03,0x00,0x00,0x0c,0x00,0x08,0x00, +0x4a,0x00,0x00,0x00,0x68,0x03,0x00,0x00,0x01,0x00,0x00,0x00, +0x32,0x00,0x00,0x00,0x7d,0x00,0x00,0x00,0x63,0x03,0x00,0x00, +0x9d,0x03,0x00,0x00,0x3d,0x00,0x04,0x00,0x4a,0x00,0x00,0x00, +0x6a,0x03,0x00,0x00,0x55,0x00,0x00,0x00,0x81,0x00,0x05,0x00, +0x4a,0x00,0x00,0x00,0x6b,0x03,0x00,0x00,0x6a,0x03,0x00,0x00, +0x68,0x03,0x00,0x00,0x3e,0x00,0x03,0x00,0x55,0x00,0x00,0x00, +0x6b,0x03,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x6e,0x03,0x00,0x00,0x99,0x03,0x00,0x00,0x23,0x00,0x00,0x00, +0xf9,0x00,0x02,0x00,0x58,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, +0x5a,0x00,0x00,0x00,0xe0,0x00,0x04,0x00,0x23,0x00,0x00,0x00, +0x23,0x00,0x00,0x00,0x6f,0x03,0x00,0x00,0xf9,0x00,0x02,0x00, +0x71,0x03,0x00,0x00,0xf8,0x00,0x02,0x00,0x71,0x03,0x00,0x00, +0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0x9a,0x03,0x00,0x00, +0x4e,0x00,0x00,0x00,0x5a,0x00,0x00,0x00,0x88,0x03,0x00,0x00, +0x74,0x03,0x00,0x00,0xac,0x00,0x05,0x00,0x5f,0x00,0x00,0x00, +0x77,0x03,0x00,0x00,0x9a,0x03,0x00,0x00,0x0c,0x00,0x00,0x00, +0xf6,0x00,0x04,0x00,0x73,0x03,0x00,0x00,0x74,0x03,0x00,0x00, +0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0x77,0x03,0x00,0x00, +0x72,0x03,0x00,0x00,0x73,0x03,0x00,0x00,0xf8,0x00,0x02,0x00, +0x72,0x03,0x00,0x00,0xb0,0x00,0x05,0x00,0x5f,0x00,0x00,0x00, +0x7a,0x03,0x00,0x00,0x24,0x00,0x00,0x00,0x9a,0x03,0x00,0x00, +0xf7,0x00,0x03,0x00,0x7c,0x03,0x00,0x00,0x00,0x00,0x00,0x00, +0xfa,0x00,0x04,0x00,0x7a,0x03,0x00,0x00,0x7b,0x03,0x00,0x00, +0x7c,0x03,0x00,0x00,0xf8,0x00,0x02,0x00,0x7b,0x03,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x80,0x03,0x00,0x00, +0x24,0x00,0x00,0x00,0x9a,0x03,0x00,0x00,0x41,0x00,0x05,0x00, +0x54,0x00,0x00,0x00,0x81,0x03,0x00,0x00,0x4d,0x00,0x00,0x00, +0x80,0x03,0x00,0x00,0x3d,0x00,0x04,0x00,0x4a,0x00,0x00,0x00, +0x82,0x03,0x00,0x00,0x81,0x03,0x00,0x00,0x41,0x00,0x05,0x00, +0x54,0x00,0x00,0x00,0x83,0x03,0x00,0x00,0x4d,0x00,0x00,0x00, +0x24,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x4a,0x00,0x00,0x00, +0x84,0x03,0x00,0x00,0x83,0x03,0x00,0x00,0x81,0x00,0x05,0x00, +0x4a,0x00,0x00,0x00,0x85,0x03,0x00,0x00,0x84,0x03,0x00,0x00, +0x82,0x03,0x00,0x00,0x3e,0x00,0x03,0x00,0x83,0x03,0x00,0x00, +0x85,0x03,0x00,0x00,0xf9,0x00,0x02,0x00,0x7c,0x03,0x00,0x00, +0xf8,0x00,0x02,0x00,0x7c,0x03,0x00,0x00,0xe0,0x00,0x04,0x00, +0x23,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x6f,0x03,0x00,0x00, +0xf9,0x00,0x02,0x00,0x74,0x03,0x00,0x00,0xf8,0x00,0x02,0x00, +0x74,0x03,0x00,0x00,0xc2,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x88,0x03,0x00,0x00,0x9a,0x03,0x00,0x00,0x8b,0x00,0x00,0x00, +0xf9,0x00,0x02,0x00,0x71,0x03,0x00,0x00,0xf8,0x00,0x02,0x00, +0x73,0x03,0x00,0x00,0xaa,0x00,0x05,0x00,0x5f,0x00,0x00,0x00, +0x8a,0x03,0x00,0x00,0x24,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, +0xf7,0x00,0x03,0x00,0x8c,0x03,0x00,0x00,0x00,0x00,0x00,0x00, +0xfa,0x00,0x04,0x00,0x8a,0x03,0x00,0x00,0x8b,0x03,0x00,0x00, +0x8c,0x03,0x00,0x00,0xf8,0x00,0x02,0x00,0x8b,0x03,0x00,0x00, +0x41,0x00,0x05,0x00,0x16,0x00,0x00,0x00,0x91,0x03,0x00,0x00, +0x13,0x00,0x00,0x00,0xdc,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x92,0x03,0x00,0x00,0x91,0x03,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x94,0x03,0x00,0x00, +0x92,0x03,0x00,0x00,0x0f,0x00,0x00,0x00,0x41,0x00,0x05,0x00, +0x54,0x00,0x00,0x00,0x95,0x03,0x00,0x00,0x4d,0x00,0x00,0x00, +0x15,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x4a,0x00,0x00,0x00, +0x96,0x03,0x00,0x00,0x95,0x03,0x00,0x00,0x41,0x00,0x06,0x00, +0xf1,0x01,0x00,0x00,0x97,0x03,0x00,0x00,0x90,0x03,0x00,0x00, +0x15,0x00,0x00,0x00,0x94,0x03,0x00,0x00,0x3e,0x00,0x03,0x00, +0x97,0x03,0x00,0x00,0x96,0x03,0x00,0x00,0xf9,0x00,0x02,0x00, +0x8c,0x03,0x00,0x00,0xf8,0x00,0x02,0x00,0x8c,0x03,0x00,0x00, 0xfd,0x00,0x01,0x00,0x38,0x00,0x01,0x00, }; -const uint64_t mul_mat_vec_q4_K_f32_len = 9176; +const uint64_t mul_mat_vec_q4_K_f32_len = 9128; unsigned char mul_mat_vec_q5_0_f32_data[] = { 0x03,0x02,0x23,0x07,0x00,0x05,0x01,0x00,0x0b,0x00,0x0d,0x00, -0xfa,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x11,0x00,0x02,0x00, +0xf6,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x11,0x00,0x02,0x00, 0x01,0x00,0x00,0x00,0x11,0x00,0x02,0x00,0x51,0x11,0x00,0x00, 0x11,0x00,0x02,0x00,0x60,0x11,0x00,0x00,0x0b,0x00,0x06,0x00, 0x01,0x00,0x00,0x00,0x47,0x4c,0x53,0x4c,0x2e,0x73,0x74,0x64, 0x2e,0x34,0x35,0x30,0x00,0x00,0x00,0x00,0x0e,0x00,0x03,0x00, 0x00,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x0f,0x00,0x0c,0x00, 0x05,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x6d,0x61,0x69,0x6e, -0x00,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x13,0x00,0x00,0x00, -0x1b,0x00,0x00,0x00,0x2a,0x00,0x00,0x00,0x57,0x00,0x00,0x00, -0xa5,0x00,0x00,0x00,0xe9,0x00,0x00,0x00,0x10,0x00,0x06,0x00, -0x04,0x00,0x00,0x00,0x11,0x00,0x00,0x00,0x20,0x00,0x00,0x00, +0x00,0x00,0x00,0x00,0x0b,0x00,0x00,0x00,0x11,0x00,0x00,0x00, +0x18,0x00,0x00,0x00,0x26,0x00,0x00,0x00,0x54,0x00,0x00,0x00, +0xa3,0x00,0x00,0x00,0xe4,0x00,0x00,0x00,0x10,0x00,0x06,0x00, +0x04,0x00,0x00,0x00,0x11,0x00,0x00,0x00,0x01,0x00,0x00,0x00, 0x01,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x0c,0x00,0x00,0x00,0x0b,0x00,0x00,0x00,0x1a,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x13,0x00,0x00,0x00,0x0b,0x00,0x00,0x00, -0x1b,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x28,0x00,0x00,0x00, +0x0b,0x00,0x00,0x00,0x0b,0x00,0x00,0x00,0x1a,0x00,0x00,0x00, +0x47,0x00,0x04,0x00,0x11,0x00,0x00,0x00,0x0b,0x00,0x00,0x00, +0x1b,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x15,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x48,0x00,0x05,0x00, +0x24,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00, +0x00,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x24,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x04,0x00,0x00,0x00, +0x48,0x00,0x05,0x00,0x24,0x00,0x00,0x00,0x02,0x00,0x00,0x00, +0x23,0x00,0x00,0x00,0x08,0x00,0x00,0x00,0x47,0x00,0x03,0x00, +0x24,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00, +0x4c,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x02,0x00,0x00,0x00, +0x47,0x00,0x04,0x00,0x4f,0x00,0x00,0x00,0x06,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x50,0x00,0x00,0x00, 0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x48,0x00,0x05,0x00,0x28,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0x28,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x08,0x00,0x00,0x00,0x47,0x00,0x03,0x00,0x28,0x00,0x00,0x00, -0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x4f,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x52,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x48,0x00,0x05,0x00,0x53,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0x53,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x02,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x53,0x00,0x00,0x00, -0x02,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x54,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x16,0x00,0x00,0x00,0x48,0x00,0x04,0x00,0x55,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x18,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0x55,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x47,0x00,0x03,0x00,0x55,0x00,0x00,0x00, -0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x57,0x00,0x00,0x00, -0x22,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x57,0x00,0x00,0x00,0x21,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0xa2,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x04,0x00,0x00,0x00,0x48,0x00,0x04,0x00,0xa3,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x18,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0xa3,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x47,0x00,0x03,0x00,0xa3,0x00,0x00,0x00, -0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0xa5,0x00,0x00,0x00, -0x22,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0xa5,0x00,0x00,0x00,0x21,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0xe6,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x04,0x00,0x00,0x00,0x48,0x00,0x04,0x00,0xe7,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x19,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0xe7,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x47,0x00,0x03,0x00,0xe7,0x00,0x00,0x00, -0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0xe9,0x00,0x00,0x00, -0x22,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0xe9,0x00,0x00,0x00,0x21,0x00,0x00,0x00,0x02,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0xf1,0x00,0x00,0x00,0x0b,0x00,0x00,0x00, -0x19,0x00,0x00,0x00,0x13,0x00,0x02,0x00,0x02,0x00,0x00,0x00, -0x21,0x00,0x03,0x00,0x03,0x00,0x00,0x00,0x02,0x00,0x00,0x00, -0x15,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x20,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x15,0x00,0x04,0x00,0x09,0x00,0x00,0x00, -0x20,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x17,0x00,0x04,0x00, -0x0a,0x00,0x00,0x00,0x09,0x00,0x00,0x00,0x03,0x00,0x00,0x00, -0x20,0x00,0x04,0x00,0x0b,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x0a,0x00,0x00,0x00,0x3b,0x00,0x04,0x00,0x0b,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x09,0x00,0x00,0x00,0x0d,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x20,0x00,0x04,0x00,0x0e,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x09,0x00,0x00,0x00,0x3b,0x00,0x04,0x00,0x0b,0x00,0x00,0x00, -0x13,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x16,0x00,0x03,0x00, -0x17,0x00,0x00,0x00,0x20,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x09,0x00,0x00,0x00,0x18,0x00,0x00,0x00,0x20,0x00,0x00,0x00, -0x1c,0x00,0x04,0x00,0x19,0x00,0x00,0x00,0x17,0x00,0x00,0x00, -0x18,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x1a,0x00,0x00,0x00, -0x04,0x00,0x00,0x00,0x19,0x00,0x00,0x00,0x3b,0x00,0x04,0x00, -0x1a,0x00,0x00,0x00,0x1b,0x00,0x00,0x00,0x04,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x17,0x00,0x00,0x00,0x1d,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x1e,0x00,0x00,0x00, -0x04,0x00,0x00,0x00,0x17,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x21,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x1e,0x00,0x05,0x00,0x28,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0x29,0x00,0x00,0x00,0x09,0x00,0x00,0x00,0x28,0x00,0x00,0x00, -0x3b,0x00,0x04,0x00,0x29,0x00,0x00,0x00,0x2a,0x00,0x00,0x00, -0x09,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x2b,0x00,0x00,0x00, -0x09,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0x20,0x00,0x00,0x00, -0x14,0x00,0x02,0x00,0x30,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x35,0x00,0x00,0x00,0x02,0x00,0x00,0x00, -0x16,0x00,0x03,0x00,0x4c,0x00,0x00,0x00,0x10,0x00,0x00,0x00, -0x15,0x00,0x04,0x00,0x4d,0x00,0x00,0x00,0x10,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x09,0x00,0x00,0x00, -0x4e,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x1c,0x00,0x04,0x00, -0x4f,0x00,0x00,0x00,0x4d,0x00,0x00,0x00,0x4e,0x00,0x00,0x00, -0x15,0x00,0x04,0x00,0x50,0x00,0x00,0x00,0x08,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x09,0x00,0x00,0x00, -0x51,0x00,0x00,0x00,0x10,0x00,0x00,0x00,0x1c,0x00,0x04,0x00, -0x52,0x00,0x00,0x00,0x50,0x00,0x00,0x00,0x51,0x00,0x00,0x00, -0x1e,0x00,0x05,0x00,0x53,0x00,0x00,0x00,0x4c,0x00,0x00,0x00, -0x4f,0x00,0x00,0x00,0x52,0x00,0x00,0x00,0x1d,0x00,0x03,0x00, -0x54,0x00,0x00,0x00,0x53,0x00,0x00,0x00,0x1e,0x00,0x03,0x00, -0x55,0x00,0x00,0x00,0x54,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0x56,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x55,0x00,0x00,0x00, -0x3b,0x00,0x04,0x00,0x56,0x00,0x00,0x00,0x57,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x59,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x4c,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x60,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x20,0x00,0x04,0x00,0x61,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0x4d,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x65,0x00,0x00,0x00,0x10,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x72,0x00,0x00,0x00,0x04,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x78,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x81,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x50,0x00,0x00,0x00,0x17,0x00,0x04,0x00, -0x85,0x00,0x00,0x00,0x17,0x00,0x00,0x00,0x02,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x09,0x00,0x00,0x00,0x89,0x00,0x00,0x00, -0x0f,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x09,0x00,0x00,0x00, -0x92,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x17,0x00,0x00,0x00,0x9a,0x00,0x00,0x00,0x00,0x00,0x80,0x41, -0x1d,0x00,0x03,0x00,0xa2,0x00,0x00,0x00,0x17,0x00,0x00,0x00, -0x1e,0x00,0x03,0x00,0xa3,0x00,0x00,0x00,0xa2,0x00,0x00,0x00, -0x20,0x00,0x04,0x00,0xa4,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0xa3,0x00,0x00,0x00,0x3b,0x00,0x04,0x00,0xa4,0x00,0x00,0x00, -0xa5,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0xad,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x17,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x09,0x00,0x00,0x00,0xc8,0x00,0x00,0x00, -0x08,0x01,0x00,0x00,0x1d,0x00,0x03,0x00,0xe6,0x00,0x00,0x00, -0x17,0x00,0x00,0x00,0x1e,0x00,0x03,0x00,0xe7,0x00,0x00,0x00, -0xe6,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0xe8,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0xe7,0x00,0x00,0x00,0x3b,0x00,0x04,0x00, -0xe8,0x00,0x00,0x00,0xe9,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0x2c,0x00,0x06,0x00,0x0a,0x00,0x00,0x00,0xf1,0x00,0x00,0x00, -0x18,0x00,0x00,0x00,0x92,0x00,0x00,0x00,0x92,0x00,0x00,0x00, -0x2c,0x00,0x05,0x00,0x85,0x00,0x00,0x00,0xf9,0x00,0x00,0x00, -0x9a,0x00,0x00,0x00,0x9a,0x00,0x00,0x00,0x36,0x00,0x05,0x00, -0x02,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x03,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0x05,0x00,0x00,0x00, -0x41,0x00,0x05,0x00,0x0e,0x00,0x00,0x00,0x0f,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x0d,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x09,0x00,0x00,0x00,0x10,0x00,0x00,0x00,0x0f,0x00,0x00,0x00, -0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x11,0x00,0x00,0x00, -0x10,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x0e,0x00,0x00,0x00, -0x14,0x00,0x00,0x00,0x13,0x00,0x00,0x00,0x0d,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x09,0x00,0x00,0x00,0x15,0x00,0x00,0x00, -0x14,0x00,0x00,0x00,0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x16,0x00,0x00,0x00,0x15,0x00,0x00,0x00,0x41,0x00,0x05,0x00, -0x1e,0x00,0x00,0x00,0x1f,0x00,0x00,0x00,0x1b,0x00,0x00,0x00, -0x16,0x00,0x00,0x00,0x3e,0x00,0x03,0x00,0x1f,0x00,0x00,0x00, -0x1d,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0x22,0x00,0x00,0x00, -0xf8,0x00,0x02,0x00,0x22,0x00,0x00,0x00,0xf5,0x00,0x07,0x00, -0x06,0x00,0x00,0x00,0xf6,0x00,0x00,0x00,0x21,0x00,0x00,0x00, -0x05,0x00,0x00,0x00,0xc7,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x41,0x00,0x05,0x00,0x2b,0x00,0x00,0x00,0x2c,0x00,0x00,0x00, -0x2a,0x00,0x00,0x00,0x21,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x2d,0x00,0x00,0x00,0x2c,0x00,0x00,0x00, -0x87,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x2f,0x00,0x00,0x00, -0x2d,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0xb1,0x00,0x05,0x00, -0x30,0x00,0x00,0x00,0x31,0x00,0x00,0x00,0xf6,0x00,0x00,0x00, -0x2f,0x00,0x00,0x00,0xf6,0x00,0x04,0x00,0x24,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, -0x31,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x24,0x00,0x00,0x00, -0xf8,0x00,0x02,0x00,0x23,0x00,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x34,0x00,0x00,0x00,0xf6,0x00,0x00,0x00, -0x2e,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x37,0x00,0x00,0x00,0x35,0x00,0x00,0x00,0x16,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x38,0x00,0x00,0x00, -0x34,0x00,0x00,0x00,0x37,0x00,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x3d,0x00,0x00,0x00,0x11,0x00,0x00,0x00, -0x2d,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x3f,0x00,0x00,0x00,0x3d,0x00,0x00,0x00,0x38,0x00,0x00,0x00, -0x87,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x40,0x00,0x00,0x00, -0x3f,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0x8b,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x43,0x00,0x00,0x00,0x38,0x00,0x00,0x00, -0x2e,0x00,0x00,0x00,0x87,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x44,0x00,0x00,0x00,0x43,0x00,0x00,0x00,0x35,0x00,0x00,0x00, -0x82,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x49,0x00,0x00,0x00, -0x38,0x00,0x00,0x00,0x43,0x00,0x00,0x00,0x41,0x00,0x07,0x00, -0x59,0x00,0x00,0x00,0x5a,0x00,0x00,0x00,0x57,0x00,0x00,0x00, -0x21,0x00,0x00,0x00,0x40,0x00,0x00,0x00,0x21,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x4c,0x00,0x00,0x00,0x5b,0x00,0x00,0x00, -0x5a,0x00,0x00,0x00,0x73,0x00,0x04,0x00,0x17,0x00,0x00,0x00, -0x5c,0x00,0x00,0x00,0x5b,0x00,0x00,0x00,0x41,0x00,0x08,0x00, -0x61,0x00,0x00,0x00,0x62,0x00,0x00,0x00,0x57,0x00,0x00,0x00, -0x21,0x00,0x00,0x00,0x40,0x00,0x00,0x00,0x60,0x00,0x00,0x00, -0x60,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x4d,0x00,0x00,0x00, -0x63,0x00,0x00,0x00,0x62,0x00,0x00,0x00,0x71,0x00,0x04,0x00, -0x09,0x00,0x00,0x00,0x64,0x00,0x00,0x00,0x63,0x00,0x00,0x00, -0xc4,0x00,0x05,0x00,0x09,0x00,0x00,0x00,0x66,0x00,0x00,0x00, -0x64,0x00,0x00,0x00,0x65,0x00,0x00,0x00,0x41,0x00,0x08,0x00, -0x61,0x00,0x00,0x00,0x68,0x00,0x00,0x00,0x57,0x00,0x00,0x00, -0x21,0x00,0x00,0x00,0x40,0x00,0x00,0x00,0x60,0x00,0x00,0x00, -0x21,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x4d,0x00,0x00,0x00, -0x69,0x00,0x00,0x00,0x68,0x00,0x00,0x00,0x71,0x00,0x04,0x00, -0x09,0x00,0x00,0x00,0x6a,0x00,0x00,0x00,0x69,0x00,0x00,0x00, -0xc5,0x00,0x05,0x00,0x09,0x00,0x00,0x00,0x6b,0x00,0x00,0x00, -0x66,0x00,0x00,0x00,0x6a,0x00,0x00,0x00,0xc2,0x00,0x05,0x00, -0x09,0x00,0x00,0x00,0x71,0x00,0x00,0x00,0x6b,0x00,0x00,0x00, -0x44,0x00,0x00,0x00,0xc4,0x00,0x05,0x00,0x09,0x00,0x00,0x00, -0x73,0x00,0x00,0x00,0x71,0x00,0x00,0x00,0x72,0x00,0x00,0x00, -0xc7,0x00,0x05,0x00,0x09,0x00,0x00,0x00,0x74,0x00,0x00,0x00, -0x73,0x00,0x00,0x00,0x51,0x00,0x00,0x00,0x7c,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x75,0x00,0x00,0x00,0x74,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x79,0x00,0x00,0x00, -0x44,0x00,0x00,0x00,0x78,0x00,0x00,0x00,0xc2,0x00,0x05,0x00, -0x09,0x00,0x00,0x00,0x7a,0x00,0x00,0x00,0x6b,0x00,0x00,0x00, -0x79,0x00,0x00,0x00,0xc7,0x00,0x05,0x00,0x09,0x00,0x00,0x00, -0x7b,0x00,0x00,0x00,0x7a,0x00,0x00,0x00,0x51,0x00,0x00,0x00, -0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x7c,0x00,0x00,0x00, -0x7b,0x00,0x00,0x00,0x41,0x00,0x08,0x00,0x81,0x00,0x00,0x00, -0x82,0x00,0x00,0x00,0x57,0x00,0x00,0x00,0x21,0x00,0x00,0x00, -0x40,0x00,0x00,0x00,0x35,0x00,0x00,0x00,0x44,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x50,0x00,0x00,0x00,0x83,0x00,0x00,0x00, -0x82,0x00,0x00,0x00,0x71,0x00,0x04,0x00,0x09,0x00,0x00,0x00, -0x84,0x00,0x00,0x00,0x83,0x00,0x00,0x00,0xc7,0x00,0x05,0x00, -0x09,0x00,0x00,0x00,0x8a,0x00,0x00,0x00,0x84,0x00,0x00,0x00, -0x89,0x00,0x00,0x00,0x7c,0x00,0x04,0x00,0x09,0x00,0x00,0x00, -0x8d,0x00,0x00,0x00,0x75,0x00,0x00,0x00,0xc5,0x00,0x05,0x00, -0x09,0x00,0x00,0x00,0x8e,0x00,0x00,0x00,0x8a,0x00,0x00,0x00, -0x8d,0x00,0x00,0x00,0x70,0x00,0x04,0x00,0x17,0x00,0x00,0x00, -0x8f,0x00,0x00,0x00,0x8e,0x00,0x00,0x00,0xc2,0x00,0x05,0x00, -0x09,0x00,0x00,0x00,0x91,0x00,0x00,0x00,0x84,0x00,0x00,0x00, -0x72,0x00,0x00,0x00,0x7c,0x00,0x04,0x00,0x09,0x00,0x00,0x00, -0x95,0x00,0x00,0x00,0x7c,0x00,0x00,0x00,0xc5,0x00,0x05,0x00, -0x09,0x00,0x00,0x00,0x96,0x00,0x00,0x00,0x91,0x00,0x00,0x00, -0x95,0x00,0x00,0x00,0x70,0x00,0x04,0x00,0x17,0x00,0x00,0x00, -0x97,0x00,0x00,0x00,0x96,0x00,0x00,0x00,0x50,0x00,0x05,0x00, -0x85,0x00,0x00,0x00,0x98,0x00,0x00,0x00,0x8f,0x00,0x00,0x00, -0x97,0x00,0x00,0x00,0x83,0x00,0x05,0x00,0x85,0x00,0x00,0x00, -0x9c,0x00,0x00,0x00,0x98,0x00,0x00,0x00,0xf9,0x00,0x00,0x00, -0x8e,0x00,0x05,0x00,0x85,0x00,0x00,0x00,0x9e,0x00,0x00,0x00, -0x9c,0x00,0x00,0x00,0x5c,0x00,0x00,0x00,0x51,0x00,0x05,0x00, -0x17,0x00,0x00,0x00,0xa1,0x00,0x00,0x00,0x9e,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x2b,0x00,0x00,0x00, -0xa6,0x00,0x00,0x00,0x2a,0x00,0x00,0x00,0x60,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0xa7,0x00,0x00,0x00, -0xa6,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xa9,0x00,0x00,0x00,0xa7,0x00,0x00,0x00,0x49,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xab,0x00,0x00,0x00, -0xa9,0x00,0x00,0x00,0x44,0x00,0x00,0x00,0x41,0x00,0x06,0x00, -0xad,0x00,0x00,0x00,0xae,0x00,0x00,0x00,0xa5,0x00,0x00,0x00, -0x21,0x00,0x00,0x00,0xab,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x17,0x00,0x00,0x00,0xaf,0x00,0x00,0x00,0xae,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x17,0x00,0x00,0x00,0xb2,0x00,0x00,0x00, -0x1f,0x00,0x00,0x00,0x0c,0x00,0x08,0x00,0x17,0x00,0x00,0x00, -0xb3,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x32,0x00,0x00,0x00, -0xa1,0x00,0x00,0x00,0xaf,0x00,0x00,0x00,0xb2,0x00,0x00,0x00, -0x3e,0x00,0x03,0x00,0x1f,0x00,0x00,0x00,0xb3,0x00,0x00,0x00, -0x51,0x00,0x05,0x00,0x17,0x00,0x00,0x00,0xb7,0x00,0x00,0x00, -0x9e,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xbe,0x00,0x00,0x00,0xab,0x00,0x00,0x00, -0x65,0x00,0x00,0x00,0x41,0x00,0x06,0x00,0xad,0x00,0x00,0x00, -0xbf,0x00,0x00,0x00,0xa5,0x00,0x00,0x00,0x21,0x00,0x00,0x00, -0xbe,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x17,0x00,0x00,0x00, -0xc0,0x00,0x00,0x00,0xbf,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x17,0x00,0x00,0x00,0xc3,0x00,0x00,0x00,0x1f,0x00,0x00,0x00, -0x0c,0x00,0x08,0x00,0x17,0x00,0x00,0x00,0xc4,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x32,0x00,0x00,0x00,0xb7,0x00,0x00,0x00, -0xc0,0x00,0x00,0x00,0xc3,0x00,0x00,0x00,0x3e,0x00,0x03,0x00, -0x1f,0x00,0x00,0x00,0xc4,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xc7,0x00,0x00,0x00,0xf6,0x00,0x00,0x00, -0x35,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0x22,0x00,0x00,0x00, -0xf8,0x00,0x02,0x00,0x24,0x00,0x00,0x00,0xe0,0x00,0x04,0x00, -0x4e,0x00,0x00,0x00,0x4e,0x00,0x00,0x00,0xc8,0x00,0x00,0x00, -0xf9,0x00,0x02,0x00,0xca,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, -0xca,0x00,0x00,0x00,0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00, -0xf7,0x00,0x00,0x00,0x65,0x00,0x00,0x00,0x24,0x00,0x00,0x00, -0xe1,0x00,0x00,0x00,0xcd,0x00,0x00,0x00,0xad,0x00,0x05,0x00, -0x30,0x00,0x00,0x00,0xd0,0x00,0x00,0x00,0xf7,0x00,0x00,0x00, -0x21,0x00,0x00,0x00,0xf6,0x00,0x04,0x00,0xcc,0x00,0x00,0x00, -0xcd,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, -0xd0,0x00,0x00,0x00,0xcb,0x00,0x00,0x00,0xcc,0x00,0x00,0x00, -0xf8,0x00,0x02,0x00,0xcb,0x00,0x00,0x00,0xb1,0x00,0x05,0x00, -0x30,0x00,0x00,0x00,0xd3,0x00,0x00,0x00,0x16,0x00,0x00,0x00, -0xf7,0x00,0x00,0x00,0xf7,0x00,0x03,0x00,0xd5,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0xd3,0x00,0x00,0x00, -0xd4,0x00,0x00,0x00,0xd5,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, -0xd4,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xd9,0x00,0x00,0x00,0x16,0x00,0x00,0x00,0xf7,0x00,0x00,0x00, -0x41,0x00,0x05,0x00,0x1e,0x00,0x00,0x00,0xda,0x00,0x00,0x00, -0x1b,0x00,0x00,0x00,0xd9,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x17,0x00,0x00,0x00,0xdb,0x00,0x00,0x00,0xda,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x17,0x00,0x00,0x00,0xdd,0x00,0x00,0x00, -0x1f,0x00,0x00,0x00,0x81,0x00,0x05,0x00,0x17,0x00,0x00,0x00, -0xde,0x00,0x00,0x00,0xdd,0x00,0x00,0x00,0xdb,0x00,0x00,0x00, -0x3e,0x00,0x03,0x00,0x1f,0x00,0x00,0x00,0xde,0x00,0x00,0x00, -0xf9,0x00,0x02,0x00,0xd5,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, -0xd5,0x00,0x00,0x00,0xe0,0x00,0x04,0x00,0x4e,0x00,0x00,0x00, -0x4e,0x00,0x00,0x00,0xc8,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, -0xcd,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0xcd,0x00,0x00,0x00, -0xc3,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xe1,0x00,0x00,0x00, -0xf7,0x00,0x00,0x00,0x60,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, -0xca,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0xcc,0x00,0x00,0x00, -0xaa,0x00,0x05,0x00,0x30,0x00,0x00,0x00,0xe3,0x00,0x00,0x00, -0x16,0x00,0x00,0x00,0x21,0x00,0x00,0x00,0xf7,0x00,0x03,0x00, -0xe5,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, -0xe3,0x00,0x00,0x00,0xe4,0x00,0x00,0x00,0xe5,0x00,0x00,0x00, -0xf8,0x00,0x02,0x00,0xe4,0x00,0x00,0x00,0x41,0x00,0x05,0x00, -0x2b,0x00,0x00,0x00,0xea,0x00,0x00,0x00,0x2a,0x00,0x00,0x00, -0x35,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0xeb,0x00,0x00,0x00,0xea,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xed,0x00,0x00,0x00,0xeb,0x00,0x00,0x00, -0x11,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x1e,0x00,0x00,0x00, -0xee,0x00,0x00,0x00,0x1b,0x00,0x00,0x00,0x21,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x17,0x00,0x00,0x00,0xef,0x00,0x00,0x00, -0xee,0x00,0x00,0x00,0x41,0x00,0x06,0x00,0xad,0x00,0x00,0x00, -0xf0,0x00,0x00,0x00,0xe9,0x00,0x00,0x00,0x21,0x00,0x00,0x00, -0xed,0x00,0x00,0x00,0x3e,0x00,0x03,0x00,0xf0,0x00,0x00,0x00, -0xef,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0xe5,0x00,0x00,0x00, -0xf8,0x00,0x02,0x00,0xe5,0x00,0x00,0x00,0xfd,0x00,0x01,0x00, -0x38,0x00,0x01,0x00, -}; -const uint64_t mul_mat_vec_q5_0_f32_len = 3676; - -unsigned char mul_mat_vec_q5_1_f32_data[] = { -0x03,0x02,0x23,0x07,0x00,0x05,0x01,0x00,0x0b,0x00,0x0d,0x00, -0xf5,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x11,0x00,0x02,0x00, -0x01,0x00,0x00,0x00,0x11,0x00,0x02,0x00,0x51,0x11,0x00,0x00, -0x11,0x00,0x02,0x00,0x60,0x11,0x00,0x00,0x0b,0x00,0x06,0x00, -0x01,0x00,0x00,0x00,0x47,0x4c,0x53,0x4c,0x2e,0x73,0x74,0x64, -0x2e,0x34,0x35,0x30,0x00,0x00,0x00,0x00,0x0e,0x00,0x03,0x00, -0x00,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x0f,0x00,0x0c,0x00, -0x05,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x6d,0x61,0x69,0x6e, -0x00,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x13,0x00,0x00,0x00, -0x1b,0x00,0x00,0x00,0x2a,0x00,0x00,0x00,0x54,0x00,0x00,0x00, -0xa0,0x00,0x00,0x00,0xe6,0x00,0x00,0x00,0x10,0x00,0x06,0x00, -0x04,0x00,0x00,0x00,0x11,0x00,0x00,0x00,0x20,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x0c,0x00,0x00,0x00,0x0b,0x00,0x00,0x00,0x1a,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x13,0x00,0x00,0x00,0x0b,0x00,0x00,0x00, -0x1b,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x28,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x48,0x00,0x05,0x00,0x28,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0x28,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x08,0x00,0x00,0x00,0x47,0x00,0x03,0x00,0x28,0x00,0x00,0x00, -0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x4f,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0x50,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x50,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x02,0x00,0x00,0x00, -0x48,0x00,0x05,0x00,0x50,0x00,0x00,0x00,0x02,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0x50,0x00,0x00,0x00,0x03,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x08,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x51,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x18,0x00,0x00,0x00,0x48,0x00,0x04,0x00, +0x48,0x00,0x05,0x00,0x50,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0x23,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x48,0x00,0x05,0x00, +0x50,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x23,0x00,0x00,0x00, +0x06,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x51,0x00,0x00,0x00, +0x06,0x00,0x00,0x00,0x16,0x00,0x00,0x00,0x48,0x00,0x04,0x00, 0x52,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x18,0x00,0x00,0x00, 0x48,0x00,0x05,0x00,0x52,0x00,0x00,0x00,0x00,0x00,0x00,0x00, 0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x03,0x00, 0x52,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00, 0x54,0x00,0x00,0x00,0x22,0x00,0x00,0x00,0x00,0x00,0x00,0x00, 0x47,0x00,0x04,0x00,0x54,0x00,0x00,0x00,0x21,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x9d,0x00,0x00,0x00, +0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0xa0,0x00,0x00,0x00, 0x06,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x48,0x00,0x04,0x00, -0x9e,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x18,0x00,0x00,0x00, -0x48,0x00,0x05,0x00,0x9e,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0xa1,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x18,0x00,0x00,0x00, +0x48,0x00,0x05,0x00,0xa1,0x00,0x00,0x00,0x00,0x00,0x00,0x00, 0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x03,0x00, -0x9e,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0xa0,0x00,0x00,0x00,0x22,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0xa0,0x00,0x00,0x00,0x21,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0xe3,0x00,0x00,0x00, +0xa1,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00, +0xa3,0x00,0x00,0x00,0x22,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0x47,0x00,0x04,0x00,0xa3,0x00,0x00,0x00,0x21,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0xe1,0x00,0x00,0x00, 0x06,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x48,0x00,0x04,0x00, -0xe4,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x19,0x00,0x00,0x00, -0x48,0x00,0x05,0x00,0xe4,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0xe2,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x19,0x00,0x00,0x00, +0x48,0x00,0x05,0x00,0xe2,0x00,0x00,0x00,0x00,0x00,0x00,0x00, 0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x03,0x00, -0xe4,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0xe6,0x00,0x00,0x00,0x22,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0xe6,0x00,0x00,0x00,0x21,0x00,0x00,0x00, -0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0xee,0x00,0x00,0x00, -0x0b,0x00,0x00,0x00,0x19,0x00,0x00,0x00,0x13,0x00,0x02,0x00, -0x02,0x00,0x00,0x00,0x21,0x00,0x03,0x00,0x03,0x00,0x00,0x00, -0x02,0x00,0x00,0x00,0x15,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x20,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x15,0x00,0x04,0x00, -0x09,0x00,0x00,0x00,0x20,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x17,0x00,0x04,0x00,0x0a,0x00,0x00,0x00,0x09,0x00,0x00,0x00, -0x03,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x0b,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x0a,0x00,0x00,0x00,0x3b,0x00,0x04,0x00, -0x0b,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x09,0x00,0x00,0x00,0x0d,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x0e,0x00,0x00,0x00, +0xe2,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00, +0xe4,0x00,0x00,0x00,0x22,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0x47,0x00,0x04,0x00,0xe4,0x00,0x00,0x00,0x21,0x00,0x00,0x00, +0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0xec,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00, +0xed,0x00,0x00,0x00,0x0b,0x00,0x00,0x00,0x19,0x00,0x00,0x00, +0x13,0x00,0x02,0x00,0x02,0x00,0x00,0x00,0x21,0x00,0x03,0x00, +0x03,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x15,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x20,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0x17,0x00,0x04,0x00,0x09,0x00,0x00,0x00,0x06,0x00,0x00,0x00, +0x03,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x0a,0x00,0x00,0x00, 0x01,0x00,0x00,0x00,0x09,0x00,0x00,0x00,0x3b,0x00,0x04,0x00, -0x0b,0x00,0x00,0x00,0x13,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x16,0x00,0x03,0x00,0x17,0x00,0x00,0x00,0x20,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x09,0x00,0x00,0x00,0x18,0x00,0x00,0x00, -0x20,0x00,0x00,0x00,0x1c,0x00,0x04,0x00,0x19,0x00,0x00,0x00, -0x17,0x00,0x00,0x00,0x18,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0x1a,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x19,0x00,0x00,0x00, -0x3b,0x00,0x04,0x00,0x1a,0x00,0x00,0x00,0x1b,0x00,0x00,0x00, -0x04,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x17,0x00,0x00,0x00, -0x1d,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0x1e,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x17,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x21,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x1e,0x00,0x05,0x00,0x28,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00, +0x0a,0x00,0x00,0x00,0x0b,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, +0x00,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x0d,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x3b,0x00,0x04,0x00, +0x0a,0x00,0x00,0x00,0x11,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0x16,0x00,0x03,0x00,0x14,0x00,0x00,0x00,0x20,0x00,0x00,0x00, +0x32,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x15,0x00,0x00,0x00, +0x20,0x00,0x00,0x00,0x1c,0x00,0x04,0x00,0x16,0x00,0x00,0x00, +0x14,0x00,0x00,0x00,0x15,0x00,0x00,0x00,0x20,0x00,0x04,0x00, +0x17,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x16,0x00,0x00,0x00, +0x3b,0x00,0x04,0x00,0x17,0x00,0x00,0x00,0x18,0x00,0x00,0x00, +0x04,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x14,0x00,0x00,0x00, +0x1a,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x20,0x00,0x04,0x00, +0x1b,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x14,0x00,0x00,0x00, +0x1e,0x00,0x05,0x00,0x24,0x00,0x00,0x00,0x06,0x00,0x00,0x00, +0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x20,0x00,0x04,0x00, +0x25,0x00,0x00,0x00,0x09,0x00,0x00,0x00,0x24,0x00,0x00,0x00, +0x3b,0x00,0x04,0x00,0x25,0x00,0x00,0x00,0x26,0x00,0x00,0x00, +0x09,0x00,0x00,0x00,0x15,0x00,0x04,0x00,0x27,0x00,0x00,0x00, +0x20,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x27,0x00,0x00,0x00,0x28,0x00,0x00,0x00,0x00,0x00,0x00,0x00, 0x20,0x00,0x04,0x00,0x29,0x00,0x00,0x00,0x09,0x00,0x00,0x00, -0x28,0x00,0x00,0x00,0x3b,0x00,0x04,0x00,0x29,0x00,0x00,0x00, -0x2a,0x00,0x00,0x00,0x09,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0x2b,0x00,0x00,0x00,0x09,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0x20,0x00,0x00,0x00,0x14,0x00,0x02,0x00,0x30,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x35,0x00,0x00,0x00, -0x02,0x00,0x00,0x00,0x16,0x00,0x03,0x00,0x4c,0x00,0x00,0x00, -0x10,0x00,0x00,0x00,0x15,0x00,0x04,0x00,0x4d,0x00,0x00,0x00, +0x06,0x00,0x00,0x00,0x14,0x00,0x02,0x00,0x2d,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x32,0x00,0x00,0x00, +0x02,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x3d,0x00,0x00,0x00,0x20,0x00,0x00,0x00,0x16,0x00,0x03,0x00, +0x4a,0x00,0x00,0x00,0x10,0x00,0x00,0x00,0x15,0x00,0x04,0x00, +0x4b,0x00,0x00,0x00,0x10,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0x1c,0x00,0x04,0x00,0x4c,0x00,0x00,0x00,0x4b,0x00,0x00,0x00, +0x32,0x00,0x00,0x00,0x15,0x00,0x04,0x00,0x4d,0x00,0x00,0x00, 0x08,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x09,0x00,0x00,0x00,0x4e,0x00,0x00,0x00,0x10,0x00,0x00,0x00, +0x06,0x00,0x00,0x00,0x4e,0x00,0x00,0x00,0x10,0x00,0x00,0x00, 0x1c,0x00,0x04,0x00,0x4f,0x00,0x00,0x00,0x4d,0x00,0x00,0x00, -0x4e,0x00,0x00,0x00,0x1e,0x00,0x06,0x00,0x50,0x00,0x00,0x00, -0x4c,0x00,0x00,0x00,0x4c,0x00,0x00,0x00,0x09,0x00,0x00,0x00, -0x4f,0x00,0x00,0x00,0x1d,0x00,0x03,0x00,0x51,0x00,0x00,0x00, -0x50,0x00,0x00,0x00,0x1e,0x00,0x03,0x00,0x52,0x00,0x00,0x00, -0x51,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x53,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x52,0x00,0x00,0x00,0x3b,0x00,0x04,0x00, -0x53,0x00,0x00,0x00,0x54,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0x20,0x00,0x04,0x00,0x56,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0x4c,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x5c,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0x64,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x09,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x69,0x00,0x00,0x00, +0x4e,0x00,0x00,0x00,0x1e,0x00,0x05,0x00,0x50,0x00,0x00,0x00, +0x4a,0x00,0x00,0x00,0x4c,0x00,0x00,0x00,0x4f,0x00,0x00,0x00, +0x1d,0x00,0x03,0x00,0x51,0x00,0x00,0x00,0x50,0x00,0x00,0x00, +0x1e,0x00,0x03,0x00,0x52,0x00,0x00,0x00,0x51,0x00,0x00,0x00, +0x20,0x00,0x04,0x00,0x53,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, +0x52,0x00,0x00,0x00,0x3b,0x00,0x04,0x00,0x53,0x00,0x00,0x00, +0x54,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x20,0x00,0x04,0x00, +0x56,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x4a,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x27,0x00,0x00,0x00,0x5c,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x5d,0x00,0x00,0x00, +0x0c,0x00,0x00,0x00,0x4b,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x27,0x00,0x00,0x00,0x61,0x00,0x00,0x00,0x10,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x27,0x00,0x00,0x00,0x6e,0x00,0x00,0x00, 0x04,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x71,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x7a,0x00,0x00,0x00,0x03,0x00,0x00,0x00, -0x20,0x00,0x04,0x00,0x7c,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0x4d,0x00,0x00,0x00,0x17,0x00,0x04,0x00,0x80,0x00,0x00,0x00, -0x17,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x09,0x00,0x00,0x00,0x84,0x00,0x00,0x00,0x0f,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x09,0x00,0x00,0x00,0x8d,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x1d,0x00,0x03,0x00,0x9d,0x00,0x00,0x00, -0x17,0x00,0x00,0x00,0x1e,0x00,0x03,0x00,0x9e,0x00,0x00,0x00, -0x9d,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x9f,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x9e,0x00,0x00,0x00,0x3b,0x00,0x04,0x00, -0x9f,0x00,0x00,0x00,0xa0,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0x20,0x00,0x04,0x00,0xa8,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0x17,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0xb9,0x00,0x00,0x00,0x10,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x09,0x00,0x00,0x00,0xc4,0x00,0x00,0x00,0x02,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x09,0x00,0x00,0x00,0xc5,0x00,0x00,0x00, -0x08,0x01,0x00,0x00,0x1d,0x00,0x03,0x00,0xe3,0x00,0x00,0x00, -0x17,0x00,0x00,0x00,0x1e,0x00,0x03,0x00,0xe4,0x00,0x00,0x00, -0xe3,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0xe5,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0xe4,0x00,0x00,0x00,0x3b,0x00,0x04,0x00, -0xe5,0x00,0x00,0x00,0xe6,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0x2c,0x00,0x06,0x00,0x0a,0x00,0x00,0x00,0xee,0x00,0x00,0x00, -0x18,0x00,0x00,0x00,0x8d,0x00,0x00,0x00,0x8d,0x00,0x00,0x00, -0x36,0x00,0x05,0x00,0x02,0x00,0x00,0x00,0x04,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x03,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, -0x05,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x0e,0x00,0x00,0x00, -0x0f,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x0d,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x09,0x00,0x00,0x00,0x10,0x00,0x00,0x00, -0x0f,0x00,0x00,0x00,0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x11,0x00,0x00,0x00,0x10,0x00,0x00,0x00,0x41,0x00,0x05,0x00, -0x0e,0x00,0x00,0x00,0x14,0x00,0x00,0x00,0x13,0x00,0x00,0x00, -0x0d,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x09,0x00,0x00,0x00, -0x15,0x00,0x00,0x00,0x14,0x00,0x00,0x00,0x7c,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x16,0x00,0x00,0x00,0x15,0x00,0x00,0x00, -0x41,0x00,0x05,0x00,0x1e,0x00,0x00,0x00,0x1f,0x00,0x00,0x00, -0x1b,0x00,0x00,0x00,0x16,0x00,0x00,0x00,0x3e,0x00,0x03,0x00, -0x1f,0x00,0x00,0x00,0x1d,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, -0x22,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0x22,0x00,0x00,0x00, -0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0xf3,0x00,0x00,0x00, -0x21,0x00,0x00,0x00,0x05,0x00,0x00,0x00,0xc3,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x2b,0x00,0x00,0x00, -0x2c,0x00,0x00,0x00,0x2a,0x00,0x00,0x00,0x21,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x2d,0x00,0x00,0x00, -0x2c,0x00,0x00,0x00,0x87,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x2f,0x00,0x00,0x00,0x2d,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0xb1,0x00,0x05,0x00,0x30,0x00,0x00,0x00,0x31,0x00,0x00,0x00, -0xf3,0x00,0x00,0x00,0x2f,0x00,0x00,0x00,0xf6,0x00,0x04,0x00, -0x24,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0xfa,0x00,0x04,0x00,0x31,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x24,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0x23,0x00,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x34,0x00,0x00,0x00, -0xf3,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x37,0x00,0x00,0x00,0x35,0x00,0x00,0x00, -0x16,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x38,0x00,0x00,0x00,0x34,0x00,0x00,0x00,0x37,0x00,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x3d,0x00,0x00,0x00, -0x11,0x00,0x00,0x00,0x2d,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x3f,0x00,0x00,0x00,0x3d,0x00,0x00,0x00, -0x38,0x00,0x00,0x00,0x87,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x40,0x00,0x00,0x00,0x3f,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0x8b,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x43,0x00,0x00,0x00, -0x38,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0x87,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x44,0x00,0x00,0x00,0x43,0x00,0x00,0x00, -0x35,0x00,0x00,0x00,0x82,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x49,0x00,0x00,0x00,0x38,0x00,0x00,0x00,0x43,0x00,0x00,0x00, +0x74,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x27,0x00,0x00,0x00,0x7c,0x00,0x00,0x00,0x02,0x00,0x00,0x00, +0x20,0x00,0x04,0x00,0x7e,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, +0x4d,0x00,0x00,0x00,0x17,0x00,0x04,0x00,0x82,0x00,0x00,0x00, +0x14,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x86,0x00,0x00,0x00,0x0f,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x90,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x14,0x00,0x00,0x00, +0x98,0x00,0x00,0x00,0x00,0x00,0x80,0x41,0x1d,0x00,0x03,0x00, +0xa0,0x00,0x00,0x00,0x14,0x00,0x00,0x00,0x1e,0x00,0x03,0x00, +0xa1,0x00,0x00,0x00,0xa0,0x00,0x00,0x00,0x20,0x00,0x04,0x00, +0xa2,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0xa1,0x00,0x00,0x00, +0x3b,0x00,0x04,0x00,0xa2,0x00,0x00,0x00,0xa3,0x00,0x00,0x00, +0x0c,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0xab,0x00,0x00,0x00, +0x0c,0x00,0x00,0x00,0x14,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0xc2,0x00,0x00,0x00,0x08,0x01,0x00,0x00, +0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0xc4,0x00,0x00,0x00, +0x86,0x00,0x00,0x00,0x15,0x00,0x00,0x00,0x32,0x00,0x00,0x00, +0x1d,0x00,0x03,0x00,0xe1,0x00,0x00,0x00,0x14,0x00,0x00,0x00, +0x1e,0x00,0x03,0x00,0xe2,0x00,0x00,0x00,0xe1,0x00,0x00,0x00, +0x20,0x00,0x04,0x00,0xe3,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, +0xe2,0x00,0x00,0x00,0x3b,0x00,0x04,0x00,0xe3,0x00,0x00,0x00, +0xe4,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x32,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0xec,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0x33,0x00,0x06,0x00,0x09,0x00,0x00,0x00,0xed,0x00,0x00,0x00, +0xec,0x00,0x00,0x00,0x90,0x00,0x00,0x00,0x90,0x00,0x00,0x00, +0x2c,0x00,0x05,0x00,0x82,0x00,0x00,0x00,0xf5,0x00,0x00,0x00, +0x98,0x00,0x00,0x00,0x98,0x00,0x00,0x00,0x36,0x00,0x05,0x00, +0x02,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0x03,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0x05,0x00,0x00,0x00, +0x41,0x00,0x05,0x00,0x0d,0x00,0x00,0x00,0x0e,0x00,0x00,0x00, +0x0b,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x0f,0x00,0x00,0x00,0x0e,0x00,0x00,0x00, +0x41,0x00,0x05,0x00,0x0d,0x00,0x00,0x00,0x12,0x00,0x00,0x00, +0x11,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x13,0x00,0x00,0x00,0x12,0x00,0x00,0x00, +0x41,0x00,0x05,0x00,0x1b,0x00,0x00,0x00,0x1c,0x00,0x00,0x00, +0x18,0x00,0x00,0x00,0x13,0x00,0x00,0x00,0x3e,0x00,0x03,0x00, +0x1c,0x00,0x00,0x00,0x1a,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, +0x1e,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0x1e,0x00,0x00,0x00, +0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0xf2,0x00,0x00,0x00, +0x0c,0x00,0x00,0x00,0x05,0x00,0x00,0x00,0xc1,0x00,0x00,0x00, +0x1f,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x29,0x00,0x00,0x00, +0x2a,0x00,0x00,0x00,0x26,0x00,0x00,0x00,0x28,0x00,0x00,0x00, +0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x2b,0x00,0x00,0x00, +0x2a,0x00,0x00,0x00,0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x2c,0x00,0x00,0x00,0x2b,0x00,0x00,0x00,0x15,0x00,0x00,0x00, +0xb0,0x00,0x05,0x00,0x2d,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, +0xf2,0x00,0x00,0x00,0x2c,0x00,0x00,0x00,0xf6,0x00,0x04,0x00, +0x20,0x00,0x00,0x00,0x1f,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0xfa,0x00,0x04,0x00,0x2e,0x00,0x00,0x00,0x1f,0x00,0x00,0x00, +0x20,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0x1f,0x00,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x31,0x00,0x00,0x00, +0xf2,0x00,0x00,0x00,0x15,0x00,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x34,0x00,0x00,0x00,0x32,0x00,0x00,0x00, +0x13,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x35,0x00,0x00,0x00,0x31,0x00,0x00,0x00,0x34,0x00,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x3a,0x00,0x00,0x00, +0x0f,0x00,0x00,0x00,0x2b,0x00,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x3c,0x00,0x00,0x00,0x3a,0x00,0x00,0x00, +0x35,0x00,0x00,0x00,0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x3e,0x00,0x00,0x00,0x3c,0x00,0x00,0x00,0x3d,0x00,0x00,0x00, +0x89,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x41,0x00,0x00,0x00, +0x35,0x00,0x00,0x00,0x3d,0x00,0x00,0x00,0x86,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x42,0x00,0x00,0x00,0x41,0x00,0x00,0x00, +0x32,0x00,0x00,0x00,0x82,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x47,0x00,0x00,0x00,0x35,0x00,0x00,0x00,0x41,0x00,0x00,0x00, 0x41,0x00,0x07,0x00,0x56,0x00,0x00,0x00,0x57,0x00,0x00,0x00, -0x54,0x00,0x00,0x00,0x21,0x00,0x00,0x00,0x40,0x00,0x00,0x00, -0x21,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x4c,0x00,0x00,0x00, +0x54,0x00,0x00,0x00,0x28,0x00,0x00,0x00,0x3e,0x00,0x00,0x00, +0x28,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x4a,0x00,0x00,0x00, 0x58,0x00,0x00,0x00,0x57,0x00,0x00,0x00,0x73,0x00,0x04,0x00, -0x17,0x00,0x00,0x00,0x59,0x00,0x00,0x00,0x58,0x00,0x00,0x00, -0x41,0x00,0x07,0x00,0x56,0x00,0x00,0x00,0x5d,0x00,0x00,0x00, -0x54,0x00,0x00,0x00,0x21,0x00,0x00,0x00,0x40,0x00,0x00,0x00, -0x5c,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x4c,0x00,0x00,0x00, -0x5e,0x00,0x00,0x00,0x5d,0x00,0x00,0x00,0x73,0x00,0x04,0x00, -0x17,0x00,0x00,0x00,0x5f,0x00,0x00,0x00,0x5e,0x00,0x00,0x00, -0x41,0x00,0x07,0x00,0x64,0x00,0x00,0x00,0x65,0x00,0x00,0x00, -0x54,0x00,0x00,0x00,0x21,0x00,0x00,0x00,0x40,0x00,0x00,0x00, -0x35,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x09,0x00,0x00,0x00, -0x66,0x00,0x00,0x00,0x65,0x00,0x00,0x00,0xc2,0x00,0x05,0x00, -0x09,0x00,0x00,0x00,0x68,0x00,0x00,0x00,0x66,0x00,0x00,0x00, -0x44,0x00,0x00,0x00,0xc4,0x00,0x05,0x00,0x09,0x00,0x00,0x00, -0x6a,0x00,0x00,0x00,0x68,0x00,0x00,0x00,0x69,0x00,0x00,0x00, -0xc7,0x00,0x05,0x00,0x09,0x00,0x00,0x00,0x6b,0x00,0x00,0x00, -0x6a,0x00,0x00,0x00,0x4e,0x00,0x00,0x00,0x7c,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x6c,0x00,0x00,0x00,0x6b,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x09,0x00,0x00,0x00,0x6f,0x00,0x00,0x00, -0x65,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x72,0x00,0x00,0x00,0x44,0x00,0x00,0x00,0x71,0x00,0x00,0x00, -0xc2,0x00,0x05,0x00,0x09,0x00,0x00,0x00,0x73,0x00,0x00,0x00, -0x6f,0x00,0x00,0x00,0x72,0x00,0x00,0x00,0xc7,0x00,0x05,0x00, -0x09,0x00,0x00,0x00,0x74,0x00,0x00,0x00,0x73,0x00,0x00,0x00, -0x4e,0x00,0x00,0x00,0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x75,0x00,0x00,0x00,0x74,0x00,0x00,0x00,0x41,0x00,0x08,0x00, -0x7c,0x00,0x00,0x00,0x7d,0x00,0x00,0x00,0x54,0x00,0x00,0x00, -0x21,0x00,0x00,0x00,0x40,0x00,0x00,0x00,0x7a,0x00,0x00,0x00, -0x44,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x4d,0x00,0x00,0x00, -0x7e,0x00,0x00,0x00,0x7d,0x00,0x00,0x00,0x71,0x00,0x04,0x00, -0x09,0x00,0x00,0x00,0x7f,0x00,0x00,0x00,0x7e,0x00,0x00,0x00, -0xc7,0x00,0x05,0x00,0x09,0x00,0x00,0x00,0x85,0x00,0x00,0x00, -0x7f,0x00,0x00,0x00,0x84,0x00,0x00,0x00,0x7c,0x00,0x04,0x00, -0x09,0x00,0x00,0x00,0x88,0x00,0x00,0x00,0x6c,0x00,0x00,0x00, -0xc5,0x00,0x05,0x00,0x09,0x00,0x00,0x00,0x89,0x00,0x00,0x00, -0x85,0x00,0x00,0x00,0x88,0x00,0x00,0x00,0x70,0x00,0x04,0x00, -0x17,0x00,0x00,0x00,0x8a,0x00,0x00,0x00,0x89,0x00,0x00,0x00, -0xc2,0x00,0x05,0x00,0x09,0x00,0x00,0x00,0x8c,0x00,0x00,0x00, -0x7f,0x00,0x00,0x00,0x69,0x00,0x00,0x00,0x7c,0x00,0x04,0x00, -0x09,0x00,0x00,0x00,0x90,0x00,0x00,0x00,0x75,0x00,0x00,0x00, -0xc5,0x00,0x05,0x00,0x09,0x00,0x00,0x00,0x91,0x00,0x00,0x00, -0x8c,0x00,0x00,0x00,0x90,0x00,0x00,0x00,0x70,0x00,0x04,0x00, -0x17,0x00,0x00,0x00,0x92,0x00,0x00,0x00,0x91,0x00,0x00,0x00, -0x50,0x00,0x05,0x00,0x80,0x00,0x00,0x00,0x93,0x00,0x00,0x00, -0x8a,0x00,0x00,0x00,0x92,0x00,0x00,0x00,0x8e,0x00,0x05,0x00, -0x80,0x00,0x00,0x00,0x96,0x00,0x00,0x00,0x93,0x00,0x00,0x00, -0x59,0x00,0x00,0x00,0x50,0x00,0x05,0x00,0x80,0x00,0x00,0x00, -0x98,0x00,0x00,0x00,0x5f,0x00,0x00,0x00,0x5f,0x00,0x00,0x00, -0x81,0x00,0x05,0x00,0x80,0x00,0x00,0x00,0x99,0x00,0x00,0x00, -0x96,0x00,0x00,0x00,0x98,0x00,0x00,0x00,0x51,0x00,0x05,0x00, -0x17,0x00,0x00,0x00,0x9c,0x00,0x00,0x00,0x99,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x2b,0x00,0x00,0x00, -0xa1,0x00,0x00,0x00,0x2a,0x00,0x00,0x00,0x5c,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0xa2,0x00,0x00,0x00, -0xa1,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xa4,0x00,0x00,0x00,0xa2,0x00,0x00,0x00,0x49,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xa6,0x00,0x00,0x00, -0xa4,0x00,0x00,0x00,0x44,0x00,0x00,0x00,0x41,0x00,0x06,0x00, -0xa8,0x00,0x00,0x00,0xa9,0x00,0x00,0x00,0xa0,0x00,0x00,0x00, -0x21,0x00,0x00,0x00,0xa6,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x17,0x00,0x00,0x00,0xaa,0x00,0x00,0x00,0xa9,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x17,0x00,0x00,0x00,0xad,0x00,0x00,0x00, -0x1f,0x00,0x00,0x00,0x0c,0x00,0x08,0x00,0x17,0x00,0x00,0x00, -0xae,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x32,0x00,0x00,0x00, -0x9c,0x00,0x00,0x00,0xaa,0x00,0x00,0x00,0xad,0x00,0x00,0x00, -0x3e,0x00,0x03,0x00,0x1f,0x00,0x00,0x00,0xae,0x00,0x00,0x00, -0x51,0x00,0x05,0x00,0x17,0x00,0x00,0x00,0xb2,0x00,0x00,0x00, -0x99,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xba,0x00,0x00,0x00,0xa6,0x00,0x00,0x00, -0xb9,0x00,0x00,0x00,0x41,0x00,0x06,0x00,0xa8,0x00,0x00,0x00, -0xbb,0x00,0x00,0x00,0xa0,0x00,0x00,0x00,0x21,0x00,0x00,0x00, -0xba,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x17,0x00,0x00,0x00, -0xbc,0x00,0x00,0x00,0xbb,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x17,0x00,0x00,0x00,0xbf,0x00,0x00,0x00,0x1f,0x00,0x00,0x00, -0x0c,0x00,0x08,0x00,0x17,0x00,0x00,0x00,0xc0,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x32,0x00,0x00,0x00,0xb2,0x00,0x00,0x00, -0xbc,0x00,0x00,0x00,0xbf,0x00,0x00,0x00,0x3e,0x00,0x03,0x00, -0x1f,0x00,0x00,0x00,0xc0,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xc3,0x00,0x00,0x00,0xf3,0x00,0x00,0x00, -0x35,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0x22,0x00,0x00,0x00, -0xf8,0x00,0x02,0x00,0x24,0x00,0x00,0x00,0xe0,0x00,0x04,0x00, -0xc4,0x00,0x00,0x00,0xc4,0x00,0x00,0x00,0xc5,0x00,0x00,0x00, -0xf9,0x00,0x02,0x00,0xc7,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, -0xc7,0x00,0x00,0x00,0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00, -0xf4,0x00,0x00,0x00,0xb9,0x00,0x00,0x00,0x24,0x00,0x00,0x00, -0xde,0x00,0x00,0x00,0xca,0x00,0x00,0x00,0xad,0x00,0x05,0x00, -0x30,0x00,0x00,0x00,0xcd,0x00,0x00,0x00,0xf4,0x00,0x00,0x00, -0x21,0x00,0x00,0x00,0xf6,0x00,0x04,0x00,0xc9,0x00,0x00,0x00, -0xca,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, -0xcd,0x00,0x00,0x00,0xc8,0x00,0x00,0x00,0xc9,0x00,0x00,0x00, -0xf8,0x00,0x02,0x00,0xc8,0x00,0x00,0x00,0xb1,0x00,0x05,0x00, -0x30,0x00,0x00,0x00,0xd0,0x00,0x00,0x00,0x16,0x00,0x00,0x00, -0xf4,0x00,0x00,0x00,0xf7,0x00,0x03,0x00,0xd2,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0xd0,0x00,0x00,0x00, -0xd1,0x00,0x00,0x00,0xd2,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, -0xd1,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xd6,0x00,0x00,0x00,0x16,0x00,0x00,0x00,0xf4,0x00,0x00,0x00, -0x41,0x00,0x05,0x00,0x1e,0x00,0x00,0x00,0xd7,0x00,0x00,0x00, -0x1b,0x00,0x00,0x00,0xd6,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x17,0x00,0x00,0x00,0xd8,0x00,0x00,0x00,0xd7,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x17,0x00,0x00,0x00,0xda,0x00,0x00,0x00, -0x1f,0x00,0x00,0x00,0x81,0x00,0x05,0x00,0x17,0x00,0x00,0x00, -0xdb,0x00,0x00,0x00,0xda,0x00,0x00,0x00,0xd8,0x00,0x00,0x00, -0x3e,0x00,0x03,0x00,0x1f,0x00,0x00,0x00,0xdb,0x00,0x00,0x00, -0xf9,0x00,0x02,0x00,0xd2,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, -0xd2,0x00,0x00,0x00,0xe0,0x00,0x04,0x00,0xc4,0x00,0x00,0x00, -0xc4,0x00,0x00,0x00,0xc5,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, -0xca,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0xca,0x00,0x00,0x00, -0xc3,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xde,0x00,0x00,0x00, -0xf4,0x00,0x00,0x00,0x5c,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, -0xc7,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0xc9,0x00,0x00,0x00, -0xaa,0x00,0x05,0x00,0x30,0x00,0x00,0x00,0xe0,0x00,0x00,0x00, -0x16,0x00,0x00,0x00,0x21,0x00,0x00,0x00,0xf7,0x00,0x03,0x00, -0xe2,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, -0xe0,0x00,0x00,0x00,0xe1,0x00,0x00,0x00,0xe2,0x00,0x00,0x00, -0xf8,0x00,0x02,0x00,0xe1,0x00,0x00,0x00,0x41,0x00,0x05,0x00, -0x2b,0x00,0x00,0x00,0xe7,0x00,0x00,0x00,0x2a,0x00,0x00,0x00, -0x35,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0xe8,0x00,0x00,0x00,0xe7,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xea,0x00,0x00,0x00,0xe8,0x00,0x00,0x00, -0x11,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x1e,0x00,0x00,0x00, -0xeb,0x00,0x00,0x00,0x1b,0x00,0x00,0x00,0x21,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x17,0x00,0x00,0x00,0xec,0x00,0x00,0x00, -0xeb,0x00,0x00,0x00,0x41,0x00,0x06,0x00,0xa8,0x00,0x00,0x00, -0xed,0x00,0x00,0x00,0xe6,0x00,0x00,0x00,0x21,0x00,0x00,0x00, -0xea,0x00,0x00,0x00,0x3e,0x00,0x03,0x00,0xed,0x00,0x00,0x00, -0xec,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0xe2,0x00,0x00,0x00, -0xf8,0x00,0x02,0x00,0xe2,0x00,0x00,0x00,0xfd,0x00,0x01,0x00, -0x38,0x00,0x01,0x00, -}; -const uint64_t mul_mat_vec_q5_1_f32_len = 3604; - -unsigned char mul_mat_vec_q5_K_f32_data[] = { -0x03,0x02,0x23,0x07,0x00,0x05,0x01,0x00,0x0b,0x00,0x0d,0x00, -0x8d,0x04,0x00,0x00,0x00,0x00,0x00,0x00,0x11,0x00,0x02,0x00, -0x01,0x00,0x00,0x00,0x11,0x00,0x02,0x00,0x27,0x00,0x00,0x00, -0x11,0x00,0x02,0x00,0x51,0x11,0x00,0x00,0x11,0x00,0x02,0x00, -0x60,0x11,0x00,0x00,0x0b,0x00,0x06,0x00,0x01,0x00,0x00,0x00, -0x47,0x4c,0x53,0x4c,0x2e,0x73,0x74,0x64,0x2e,0x34,0x35,0x30, -0x00,0x00,0x00,0x00,0x0e,0x00,0x03,0x00,0x00,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x0f,0x00,0x0c,0x00,0x05,0x00,0x00,0x00, -0x04,0x00,0x00,0x00,0x6d,0x61,0x69,0x6e,0x00,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x15,0x00,0x00,0x00,0x21,0x00,0x00,0x00, -0x5e,0x00,0x00,0x00,0x88,0x00,0x00,0x00,0xfa,0x01,0x00,0x00, -0x7f,0x04,0x00,0x00,0x10,0x00,0x06,0x00,0x04,0x00,0x00,0x00, -0x11,0x00,0x00,0x00,0x20,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x0c,0x00,0x00,0x00, -0x0b,0x00,0x00,0x00,0x1a,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0x13,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x13,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x04,0x00,0x00,0x00, -0x48,0x00,0x05,0x00,0x13,0x00,0x00,0x00,0x02,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x08,0x00,0x00,0x00,0x47,0x00,0x03,0x00, -0x13,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x21,0x00,0x00,0x00,0x0b,0x00,0x00,0x00,0x1b,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x80,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x81,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x83,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x48,0x00,0x05,0x00,0x84,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0x84,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x04,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x84,0x00,0x00,0x00, -0x02,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x10,0x00,0x00,0x00, -0x48,0x00,0x05,0x00,0x84,0x00,0x00,0x00,0x03,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x30,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x85,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0xb0,0x00,0x00,0x00, -0x48,0x00,0x04,0x00,0x86,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x18,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x86,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x47,0x00,0x03,0x00,0x86,0x00,0x00,0x00,0x02,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x88,0x00,0x00,0x00,0x22,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x88,0x00,0x00,0x00, -0x21,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0xf7,0x01,0x00,0x00,0x06,0x00,0x00,0x00,0x04,0x00,0x00,0x00, -0x48,0x00,0x04,0x00,0xf8,0x01,0x00,0x00,0x00,0x00,0x00,0x00, -0x18,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0xf8,0x01,0x00,0x00, -0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x47,0x00,0x03,0x00,0xf8,0x01,0x00,0x00,0x02,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0xfa,0x01,0x00,0x00,0x22,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0xfa,0x01,0x00,0x00, -0x21,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x7c,0x04,0x00,0x00,0x06,0x00,0x00,0x00,0x04,0x00,0x00,0x00, -0x48,0x00,0x04,0x00,0x7d,0x04,0x00,0x00,0x00,0x00,0x00,0x00, -0x19,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x7d,0x04,0x00,0x00, -0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x47,0x00,0x03,0x00,0x7d,0x04,0x00,0x00,0x02,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x7f,0x04,0x00,0x00,0x22,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x7f,0x04,0x00,0x00, -0x21,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x87,0x04,0x00,0x00,0x0b,0x00,0x00,0x00,0x19,0x00,0x00,0x00, -0x13,0x00,0x02,0x00,0x02,0x00,0x00,0x00,0x21,0x00,0x03,0x00, -0x03,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x15,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x20,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x15,0x00,0x04,0x00,0x09,0x00,0x00,0x00,0x20,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x17,0x00,0x04,0x00,0x0a,0x00,0x00,0x00, -0x09,0x00,0x00,0x00,0x03,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0x0b,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x0a,0x00,0x00,0x00, -0x3b,0x00,0x04,0x00,0x0b,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x09,0x00,0x00,0x00, -0x0d,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0x0e,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x09,0x00,0x00,0x00, -0x1e,0x00,0x05,0x00,0x13,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0x14,0x00,0x00,0x00,0x09,0x00,0x00,0x00,0x13,0x00,0x00,0x00, -0x3b,0x00,0x04,0x00,0x14,0x00,0x00,0x00,0x15,0x00,0x00,0x00, -0x09,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x16,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0x17,0x00,0x00,0x00,0x09,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x1a,0x00,0x00,0x00, -0x00,0x01,0x00,0x00,0x3b,0x00,0x04,0x00,0x0b,0x00,0x00,0x00, -0x21,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x25,0x00,0x00,0x00,0x02,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0x04,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x42,0x00,0x00,0x00,0x20,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x48,0x00,0x00,0x00,0x40,0x00,0x00,0x00, -0x15,0x00,0x04,0x00,0x4d,0x00,0x00,0x00,0x08,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x50,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x15,0x00,0x04,0x00, -0x54,0x00,0x00,0x00,0x08,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x16,0x00,0x03,0x00,0x5a,0x00,0x00,0x00,0x20,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x09,0x00,0x00,0x00,0x5b,0x00,0x00,0x00, -0x20,0x00,0x00,0x00,0x1c,0x00,0x04,0x00,0x5c,0x00,0x00,0x00, -0x5a,0x00,0x00,0x00,0x5b,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0x5d,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x5c,0x00,0x00,0x00, -0x3b,0x00,0x04,0x00,0x5d,0x00,0x00,0x00,0x5e,0x00,0x00,0x00, -0x04,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x5f,0x00,0x00,0x00,0x10,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x5a,0x00,0x00,0x00,0x64,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x20,0x00,0x04,0x00,0x65,0x00,0x00,0x00,0x04,0x00,0x00,0x00, -0x5a,0x00,0x00,0x00,0x14,0x00,0x02,0x00,0x70,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x79,0x00,0x00,0x00, -0x80,0x00,0x00,0x00,0x16,0x00,0x03,0x00,0x7d,0x00,0x00,0x00, -0x10,0x00,0x00,0x00,0x17,0x00,0x04,0x00,0x7e,0x00,0x00,0x00, -0x7d,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x09,0x00,0x00,0x00,0x7f,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0x1c,0x00,0x04,0x00,0x80,0x00,0x00,0x00,0x4d,0x00,0x00,0x00, -0x7f,0x00,0x00,0x00,0x1c,0x00,0x04,0x00,0x81,0x00,0x00,0x00, -0x4d,0x00,0x00,0x00,0x5b,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x09,0x00,0x00,0x00,0x82,0x00,0x00,0x00,0x80,0x00,0x00,0x00, -0x1c,0x00,0x04,0x00,0x83,0x00,0x00,0x00,0x4d,0x00,0x00,0x00, -0x82,0x00,0x00,0x00,0x1e,0x00,0x06,0x00,0x84,0x00,0x00,0x00, -0x7e,0x00,0x00,0x00,0x80,0x00,0x00,0x00,0x81,0x00,0x00,0x00, -0x83,0x00,0x00,0x00,0x1d,0x00,0x03,0x00,0x85,0x00,0x00,0x00, -0x84,0x00,0x00,0x00,0x1e,0x00,0x03,0x00,0x86,0x00,0x00,0x00, -0x85,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x87,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x86,0x00,0x00,0x00,0x3b,0x00,0x04,0x00, -0x87,0x00,0x00,0x00,0x88,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0x20,0x00,0x04,0x00,0x8c,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0x7d,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x09,0x00,0x00,0x00, -0x94,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0x9e,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x4d,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0xa3,0x00,0x00,0x00, -0x3f,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0xc9,0x00,0x00,0x00,0x05,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0xd8,0x00,0x00,0x00,0x08,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0xde,0x00,0x00,0x00, -0x0f,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0xe9,0x00,0x00,0x00,0xc0,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0xf5,0x00,0x00,0x00,0x09,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x47,0x01,0x00,0x00, -0x03,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x6f,0x01,0x00,0x00,0x11,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0xad,0x01,0x00,0x00,0x41,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0xbb,0x01,0x00,0x00, -0x50,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0xc9,0x01,0x00,0x00,0x51,0x00,0x00,0x00,0x1d,0x00,0x03,0x00, -0xf7,0x01,0x00,0x00,0x5a,0x00,0x00,0x00,0x1e,0x00,0x03,0x00, -0xf8,0x01,0x00,0x00,0xf7,0x01,0x00,0x00,0x20,0x00,0x04,0x00, -0xf9,0x01,0x00,0x00,0x0c,0x00,0x00,0x00,0xf8,0x01,0x00,0x00, -0x3b,0x00,0x04,0x00,0xf9,0x01,0x00,0x00,0xfa,0x01,0x00,0x00, -0x0c,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0xff,0x01,0x00,0x00, -0x0c,0x00,0x00,0x00,0x5a,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x84,0x02,0x00,0x00,0x21,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0xa1,0x02,0x00,0x00, -0x30,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0xbe,0x02,0x00,0x00,0x31,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x09,0x00,0x00,0x00,0x5d,0x04,0x00,0x00,0x02,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x09,0x00,0x00,0x00,0x5e,0x04,0x00,0x00, -0x08,0x01,0x00,0x00,0x1d,0x00,0x03,0x00,0x7c,0x04,0x00,0x00, -0x5a,0x00,0x00,0x00,0x1e,0x00,0x03,0x00,0x7d,0x04,0x00,0x00, -0x7c,0x04,0x00,0x00,0x20,0x00,0x04,0x00,0x7e,0x04,0x00,0x00, -0x0c,0x00,0x00,0x00,0x7d,0x04,0x00,0x00,0x3b,0x00,0x04,0x00, -0x7e,0x04,0x00,0x00,0x7f,0x04,0x00,0x00,0x0c,0x00,0x00,0x00, -0x2c,0x00,0x06,0x00,0x0a,0x00,0x00,0x00,0x87,0x04,0x00,0x00, -0x5b,0x00,0x00,0x00,0x94,0x00,0x00,0x00,0x94,0x00,0x00,0x00, -0x36,0x00,0x05,0x00,0x02,0x00,0x00,0x00,0x04,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x03,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, -0x05,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x0e,0x00,0x00,0x00, -0x0f,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x0d,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x09,0x00,0x00,0x00,0x10,0x00,0x00,0x00, -0x0f,0x00,0x00,0x00,0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x11,0x00,0x00,0x00,0x10,0x00,0x00,0x00,0x41,0x00,0x05,0x00, -0x17,0x00,0x00,0x00,0x18,0x00,0x00,0x00,0x15,0x00,0x00,0x00, -0x16,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x19,0x00,0x00,0x00,0x18,0x00,0x00,0x00,0x87,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x1b,0x00,0x00,0x00,0x19,0x00,0x00,0x00, -0x1a,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x1f,0x00,0x00,0x00,0x11,0x00,0x00,0x00,0x1b,0x00,0x00,0x00, -0x41,0x00,0x05,0x00,0x0e,0x00,0x00,0x00,0x22,0x00,0x00,0x00, -0x21,0x00,0x00,0x00,0x0d,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x09,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x22,0x00,0x00,0x00, -0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x24,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x87,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x26,0x00,0x00,0x00,0x24,0x00,0x00,0x00,0x25,0x00,0x00,0x00, -0x8b,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x2b,0x00,0x00,0x00, -0x24,0x00,0x00,0x00,0x25,0x00,0x00,0x00,0x87,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x2f,0x00,0x00,0x00,0x26,0x00,0x00,0x00, -0x2e,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x33,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0x2f,0x00,0x00,0x00, -0x82,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x34,0x00,0x00,0x00, -0x26,0x00,0x00,0x00,0x33,0x00,0x00,0x00,0x87,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x37,0x00,0x00,0x00,0x2f,0x00,0x00,0x00, -0x25,0x00,0x00,0x00,0x8b,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x3a,0x00,0x00,0x00,0x2f,0x00,0x00,0x00,0x25,0x00,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x3d,0x00,0x00,0x00, -0x2e,0x00,0x00,0x00,0x34,0x00,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x3f,0x00,0x00,0x00,0x25,0x00,0x00,0x00, -0x3a,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x40,0x00,0x00,0x00,0x3d,0x00,0x00,0x00,0x3f,0x00,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x44,0x00,0x00,0x00, -0x42,0x00,0x00,0x00,0x37,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x46,0x00,0x00,0x00,0x44,0x00,0x00,0x00, -0x40,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x4a,0x00,0x00,0x00,0x48,0x00,0x00,0x00,0x37,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x4c,0x00,0x00,0x00, -0x4a,0x00,0x00,0x00,0x40,0x00,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x52,0x00,0x00,0x00,0x25,0x00,0x00,0x00, -0x37,0x00,0x00,0x00,0xc4,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x53,0x00,0x00,0x00,0x50,0x00,0x00,0x00,0x52,0x00,0x00,0x00, -0x72,0x00,0x04,0x00,0x54,0x00,0x00,0x00,0x55,0x00,0x00,0x00, -0x53,0x00,0x00,0x00,0x7c,0x00,0x04,0x00,0x4d,0x00,0x00,0x00, -0x56,0x00,0x00,0x00,0x55,0x00,0x00,0x00,0xc4,0x00,0x05,0x00, -0x4d,0x00,0x00,0x00,0x59,0x00,0x00,0x00,0x56,0x00,0x00,0x00, -0x2e,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x61,0x00,0x00,0x00,0x5f,0x00,0x00,0x00,0x2b,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x63,0x00,0x00,0x00, -0x61,0x00,0x00,0x00,0x26,0x00,0x00,0x00,0x41,0x00,0x05,0x00, -0x65,0x00,0x00,0x00,0x66,0x00,0x00,0x00,0x5e,0x00,0x00,0x00, -0x63,0x00,0x00,0x00,0x3e,0x00,0x03,0x00,0x66,0x00,0x00,0x00, -0x64,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0x69,0x00,0x00,0x00, -0xf8,0x00,0x02,0x00,0x69,0x00,0x00,0x00,0xf5,0x00,0x07,0x00, -0x06,0x00,0x00,0x00,0x88,0x04,0x00,0x00,0x2b,0x00,0x00,0x00, -0x05,0x00,0x00,0x00,0x5c,0x04,0x00,0x00,0x6a,0x00,0x00,0x00, -0xb1,0x00,0x05,0x00,0x70,0x00,0x00,0x00,0x71,0x00,0x00,0x00, -0x88,0x04,0x00,0x00,0x1b,0x00,0x00,0x00,0xf6,0x00,0x04,0x00, -0x6b,0x00,0x00,0x00,0x6a,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0xfa,0x00,0x04,0x00,0x71,0x00,0x00,0x00,0x6a,0x00,0x00,0x00, -0x6b,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0x6a,0x00,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x74,0x00,0x00,0x00, -0x88,0x04,0x00,0x00,0x1a,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x76,0x00,0x00,0x00,0x74,0x00,0x00,0x00, -0x4c,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x7a,0x00,0x00,0x00,0x76,0x00,0x00,0x00,0x79,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x8b,0x00,0x00,0x00, -0x1f,0x00,0x00,0x00,0x88,0x04,0x00,0x00,0x41,0x00,0x08,0x00, -0x8c,0x00,0x00,0x00,0x8d,0x00,0x00,0x00,0x88,0x00,0x00,0x00, -0x16,0x00,0x00,0x00,0x8b,0x00,0x00,0x00,0x16,0x00,0x00,0x00, -0x0d,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x7d,0x00,0x00,0x00, -0x8e,0x00,0x00,0x00,0x8d,0x00,0x00,0x00,0x73,0x00,0x04,0x00, -0x5a,0x00,0x00,0x00,0x8f,0x00,0x00,0x00,0x8e,0x00,0x00,0x00, -0x41,0x00,0x08,0x00,0x8c,0x00,0x00,0x00,0x95,0x00,0x00,0x00, -0x88,0x00,0x00,0x00,0x16,0x00,0x00,0x00,0x8b,0x00,0x00,0x00, -0x16,0x00,0x00,0x00,0x94,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x7d,0x00,0x00,0x00,0x96,0x00,0x00,0x00,0x95,0x00,0x00,0x00, -0x73,0x00,0x04,0x00,0x5a,0x00,0x00,0x00,0x97,0x00,0x00,0x00, -0x96,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x9d,0x00,0x00,0x00,0x37,0x00,0x00,0x00,0x25,0x00,0x00,0x00, -0x41,0x00,0x08,0x00,0x9e,0x00,0x00,0x00,0x9f,0x00,0x00,0x00, -0x88,0x00,0x00,0x00,0x16,0x00,0x00,0x00,0x8b,0x00,0x00,0x00, -0x50,0x00,0x00,0x00,0x9d,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x4d,0x00,0x00,0x00,0xa0,0x00,0x00,0x00,0x9f,0x00,0x00,0x00, -0x71,0x00,0x04,0x00,0x09,0x00,0x00,0x00,0xa1,0x00,0x00,0x00, -0xa0,0x00,0x00,0x00,0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0xa2,0x00,0x00,0x00,0xa1,0x00,0x00,0x00,0xc7,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xa4,0x00,0x00,0x00,0xa2,0x00,0x00,0x00, -0xa3,0x00,0x00,0x00,0x72,0x00,0x04,0x00,0x54,0x00,0x00,0x00, -0xa5,0x00,0x00,0x00,0xa4,0x00,0x00,0x00,0x7c,0x00,0x04,0x00, -0x4d,0x00,0x00,0x00,0xa6,0x00,0x00,0x00,0xa5,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xad,0x00,0x00,0x00, -0x9d,0x00,0x00,0x00,0x50,0x00,0x00,0x00,0x41,0x00,0x08,0x00, -0x9e,0x00,0x00,0x00,0xae,0x00,0x00,0x00,0x88,0x00,0x00,0x00, -0x16,0x00,0x00,0x00,0x8b,0x00,0x00,0x00,0x50,0x00,0x00,0x00, -0xad,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x4d,0x00,0x00,0x00, -0xaf,0x00,0x00,0x00,0xae,0x00,0x00,0x00,0x71,0x00,0x04,0x00, -0x09,0x00,0x00,0x00,0xb0,0x00,0x00,0x00,0xaf,0x00,0x00,0x00, -0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0xb1,0x00,0x00,0x00, -0xb0,0x00,0x00,0x00,0xc7,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xb2,0x00,0x00,0x00,0xb1,0x00,0x00,0x00,0xa3,0x00,0x00,0x00, -0x72,0x00,0x04,0x00,0x54,0x00,0x00,0x00,0xb3,0x00,0x00,0x00, -0xb2,0x00,0x00,0x00,0x7c,0x00,0x04,0x00,0x4d,0x00,0x00,0x00, -0xb4,0x00,0x00,0x00,0xb3,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xbb,0x00,0x00,0x00,0x9d,0x00,0x00,0x00, -0x2e,0x00,0x00,0x00,0x41,0x00,0x08,0x00,0x9e,0x00,0x00,0x00, -0xbc,0x00,0x00,0x00,0x88,0x00,0x00,0x00,0x16,0x00,0x00,0x00, -0x8b,0x00,0x00,0x00,0x50,0x00,0x00,0x00,0xbb,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x4d,0x00,0x00,0x00,0xbd,0x00,0x00,0x00, -0xbc,0x00,0x00,0x00,0x71,0x00,0x04,0x00,0x09,0x00,0x00,0x00, -0xbe,0x00,0x00,0x00,0xbd,0x00,0x00,0x00,0x7c,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0xbf,0x00,0x00,0x00,0xbe,0x00,0x00,0x00, -0xc7,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xc0,0x00,0x00,0x00, -0xbf,0x00,0x00,0x00,0xa3,0x00,0x00,0x00,0x72,0x00,0x04,0x00, -0x54,0x00,0x00,0x00,0xc1,0x00,0x00,0x00,0xc0,0x00,0x00,0x00, -0x7c,0x00,0x04,0x00,0x4d,0x00,0x00,0x00,0xc2,0x00,0x00,0x00, -0xc1,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xca,0x00,0x00,0x00,0x9d,0x00,0x00,0x00,0xc9,0x00,0x00,0x00, -0x41,0x00,0x08,0x00,0x9e,0x00,0x00,0x00,0xcb,0x00,0x00,0x00, -0x88,0x00,0x00,0x00,0x16,0x00,0x00,0x00,0x8b,0x00,0x00,0x00, -0x50,0x00,0x00,0x00,0xca,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x4d,0x00,0x00,0x00,0xcc,0x00,0x00,0x00,0xcb,0x00,0x00,0x00, -0x71,0x00,0x04,0x00,0x09,0x00,0x00,0x00,0xcd,0x00,0x00,0x00, -0xcc,0x00,0x00,0x00,0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0xce,0x00,0x00,0x00,0xcd,0x00,0x00,0x00,0xc7,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xcf,0x00,0x00,0x00,0xce,0x00,0x00,0x00, -0xa3,0x00,0x00,0x00,0x72,0x00,0x04,0x00,0x54,0x00,0x00,0x00, -0xd0,0x00,0x00,0x00,0xcf,0x00,0x00,0x00,0x7c,0x00,0x04,0x00, -0x4d,0x00,0x00,0x00,0xd1,0x00,0x00,0x00,0xd0,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xd9,0x00,0x00,0x00, -0x9d,0x00,0x00,0x00,0xd8,0x00,0x00,0x00,0x41,0x00,0x08,0x00, -0x9e,0x00,0x00,0x00,0xda,0x00,0x00,0x00,0x88,0x00,0x00,0x00, -0x16,0x00,0x00,0x00,0x8b,0x00,0x00,0x00,0x50,0x00,0x00,0x00, -0xd9,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x4d,0x00,0x00,0x00, -0xdb,0x00,0x00,0x00,0xda,0x00,0x00,0x00,0x71,0x00,0x04,0x00, -0x09,0x00,0x00,0x00,0xdc,0x00,0x00,0x00,0xdb,0x00,0x00,0x00, -0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0xdd,0x00,0x00,0x00, -0xdc,0x00,0x00,0x00,0xc7,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xdf,0x00,0x00,0x00,0xdd,0x00,0x00,0x00,0xde,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x4d,0x00,0x00,0x00,0xe6,0x00,0x00,0x00, -0x9f,0x00,0x00,0x00,0x71,0x00,0x04,0x00,0x09,0x00,0x00,0x00, -0xe7,0x00,0x00,0x00,0xe6,0x00,0x00,0x00,0x7c,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0xe8,0x00,0x00,0x00,0xe7,0x00,0x00,0x00, -0xc7,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xea,0x00,0x00,0x00, -0xe8,0x00,0x00,0x00,0xe9,0x00,0x00,0x00,0xc3,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xeb,0x00,0x00,0x00,0xea,0x00,0x00,0x00, -0x25,0x00,0x00,0x00,0xc5,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xec,0x00,0x00,0x00,0xdf,0x00,0x00,0x00,0xeb,0x00,0x00,0x00, -0x72,0x00,0x04,0x00,0x54,0x00,0x00,0x00,0xed,0x00,0x00,0x00, -0xec,0x00,0x00,0x00,0x7c,0x00,0x04,0x00,0x4d,0x00,0x00,0x00, -0xee,0x00,0x00,0x00,0xed,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xf6,0x00,0x00,0x00,0x9d,0x00,0x00,0x00, -0xf5,0x00,0x00,0x00,0x41,0x00,0x08,0x00,0x9e,0x00,0x00,0x00, -0xf7,0x00,0x00,0x00,0x88,0x00,0x00,0x00,0x16,0x00,0x00,0x00, -0x8b,0x00,0x00,0x00,0x50,0x00,0x00,0x00,0xf6,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x4d,0x00,0x00,0x00,0xf8,0x00,0x00,0x00, -0xf7,0x00,0x00,0x00,0x71,0x00,0x04,0x00,0x09,0x00,0x00,0x00, -0xf9,0x00,0x00,0x00,0xf8,0x00,0x00,0x00,0x7c,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0xfa,0x00,0x00,0x00,0xf9,0x00,0x00,0x00, -0xc7,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xfb,0x00,0x00,0x00, -0xfa,0x00,0x00,0x00,0xde,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x4d,0x00,0x00,0x00,0x03,0x01,0x00,0x00,0xae,0x00,0x00,0x00, -0x71,0x00,0x04,0x00,0x09,0x00,0x00,0x00,0x04,0x01,0x00,0x00, -0x03,0x01,0x00,0x00,0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x05,0x01,0x00,0x00,0x04,0x01,0x00,0x00,0xc7,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x06,0x01,0x00,0x00,0x05,0x01,0x00,0x00, -0xe9,0x00,0x00,0x00,0xc3,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x07,0x01,0x00,0x00,0x06,0x01,0x00,0x00,0x25,0x00,0x00,0x00, -0xc5,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x08,0x01,0x00,0x00, -0xfb,0x00,0x00,0x00,0x07,0x01,0x00,0x00,0x72,0x00,0x04,0x00, -0x54,0x00,0x00,0x00,0x09,0x01,0x00,0x00,0x08,0x01,0x00,0x00, -0x7c,0x00,0x04,0x00,0x4d,0x00,0x00,0x00,0x0a,0x01,0x00,0x00, -0x09,0x01,0x00,0x00,0x3d,0x00,0x04,0x00,0x4d,0x00,0x00,0x00, -0x13,0x01,0x00,0x00,0xda,0x00,0x00,0x00,0xc2,0x00,0x05,0x00, -0x4d,0x00,0x00,0x00,0x14,0x01,0x00,0x00,0x13,0x01,0x00,0x00, -0x2e,0x00,0x00,0x00,0x71,0x00,0x04,0x00,0x09,0x00,0x00,0x00, -0x15,0x01,0x00,0x00,0x14,0x01,0x00,0x00,0x7c,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x16,0x01,0x00,0x00,0x15,0x01,0x00,0x00, -0xc7,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x17,0x01,0x00,0x00, -0x16,0x01,0x00,0x00,0xde,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x4d,0x00,0x00,0x00,0x1f,0x01,0x00,0x00,0xbc,0x00,0x00,0x00, -0x71,0x00,0x04,0x00,0x09,0x00,0x00,0x00,0x20,0x01,0x00,0x00, -0x1f,0x01,0x00,0x00,0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x21,0x01,0x00,0x00,0x20,0x01,0x00,0x00,0xc7,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x22,0x01,0x00,0x00,0x21,0x01,0x00,0x00, -0xe9,0x00,0x00,0x00,0xc3,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x23,0x01,0x00,0x00,0x22,0x01,0x00,0x00,0x25,0x00,0x00,0x00, -0xc5,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x24,0x01,0x00,0x00, -0x17,0x01,0x00,0x00,0x23,0x01,0x00,0x00,0x72,0x00,0x04,0x00, -0x54,0x00,0x00,0x00,0x25,0x01,0x00,0x00,0x24,0x01,0x00,0x00, -0x7c,0x00,0x04,0x00,0x4d,0x00,0x00,0x00,0x26,0x01,0x00,0x00, -0x25,0x01,0x00,0x00,0x3d,0x00,0x04,0x00,0x4d,0x00,0x00,0x00, -0x2f,0x01,0x00,0x00,0xf7,0x00,0x00,0x00,0xc2,0x00,0x05,0x00, -0x4d,0x00,0x00,0x00,0x30,0x01,0x00,0x00,0x2f,0x01,0x00,0x00, -0x2e,0x00,0x00,0x00,0x71,0x00,0x04,0x00,0x09,0x00,0x00,0x00, -0x31,0x01,0x00,0x00,0x30,0x01,0x00,0x00,0x7c,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x32,0x01,0x00,0x00,0x31,0x01,0x00,0x00, -0xc7,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x33,0x01,0x00,0x00, -0x32,0x01,0x00,0x00,0xde,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x4d,0x00,0x00,0x00,0x3b,0x01,0x00,0x00,0xcb,0x00,0x00,0x00, -0x71,0x00,0x04,0x00,0x09,0x00,0x00,0x00,0x3c,0x01,0x00,0x00, -0x3b,0x01,0x00,0x00,0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x3d,0x01,0x00,0x00,0x3c,0x01,0x00,0x00,0xc7,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x3e,0x01,0x00,0x00,0x3d,0x01,0x00,0x00, -0xe9,0x00,0x00,0x00,0xc3,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x3f,0x01,0x00,0x00,0x3e,0x01,0x00,0x00,0x25,0x00,0x00,0x00, -0xc5,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x40,0x01,0x00,0x00, -0x33,0x01,0x00,0x00,0x3f,0x01,0x00,0x00,0x72,0x00,0x04,0x00, -0x54,0x00,0x00,0x00,0x41,0x01,0x00,0x00,0x40,0x01,0x00,0x00, -0x7c,0x00,0x04,0x00,0x4d,0x00,0x00,0x00,0x42,0x01,0x00,0x00, -0x41,0x01,0x00,0x00,0x41,0x00,0x08,0x00,0x9e,0x00,0x00,0x00, -0x49,0x01,0x00,0x00,0x88,0x00,0x00,0x00,0x16,0x00,0x00,0x00, -0x8b,0x00,0x00,0x00,0x47,0x01,0x00,0x00,0x46,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x4d,0x00,0x00,0x00,0x4a,0x01,0x00,0x00, -0x49,0x01,0x00,0x00,0x71,0x00,0x04,0x00,0x09,0x00,0x00,0x00, -0x4b,0x01,0x00,0x00,0x4a,0x01,0x00,0x00,0x7c,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x4c,0x01,0x00,0x00,0x4b,0x01,0x00,0x00, -0xc7,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x4d,0x01,0x00,0x00, -0x4c,0x01,0x00,0x00,0xde,0x00,0x00,0x00,0x72,0x00,0x04,0x00, -0x54,0x00,0x00,0x00,0x4e,0x01,0x00,0x00,0x4d,0x01,0x00,0x00, -0x7c,0x00,0x04,0x00,0x4d,0x00,0x00,0x00,0x4f,0x01,0x00,0x00, -0x4e,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x55,0x01,0x00,0x00,0x46,0x00,0x00,0x00,0x50,0x00,0x00,0x00, -0x41,0x00,0x08,0x00,0x9e,0x00,0x00,0x00,0x56,0x01,0x00,0x00, -0x88,0x00,0x00,0x00,0x16,0x00,0x00,0x00,0x8b,0x00,0x00,0x00, -0x47,0x01,0x00,0x00,0x55,0x01,0x00,0x00,0x3d,0x00,0x04,0x00, -0x4d,0x00,0x00,0x00,0x57,0x01,0x00,0x00,0x56,0x01,0x00,0x00, -0x71,0x00,0x04,0x00,0x09,0x00,0x00,0x00,0x58,0x01,0x00,0x00, -0x57,0x01,0x00,0x00,0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x59,0x01,0x00,0x00,0x58,0x01,0x00,0x00,0xc7,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x5a,0x01,0x00,0x00,0x59,0x01,0x00,0x00, -0xde,0x00,0x00,0x00,0x72,0x00,0x04,0x00,0x54,0x00,0x00,0x00, -0x5b,0x01,0x00,0x00,0x5a,0x01,0x00,0x00,0x7c,0x00,0x04,0x00, -0x4d,0x00,0x00,0x00,0x5c,0x01,0x00,0x00,0x5b,0x01,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x62,0x01,0x00,0x00, -0x46,0x00,0x00,0x00,0x5f,0x00,0x00,0x00,0x41,0x00,0x08,0x00, -0x9e,0x00,0x00,0x00,0x63,0x01,0x00,0x00,0x88,0x00,0x00,0x00, -0x16,0x00,0x00,0x00,0x8b,0x00,0x00,0x00,0x47,0x01,0x00,0x00, -0x62,0x01,0x00,0x00,0x3d,0x00,0x04,0x00,0x4d,0x00,0x00,0x00, -0x64,0x01,0x00,0x00,0x63,0x01,0x00,0x00,0x71,0x00,0x04,0x00, -0x09,0x00,0x00,0x00,0x65,0x01,0x00,0x00,0x64,0x01,0x00,0x00, -0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x66,0x01,0x00,0x00, -0x65,0x01,0x00,0x00,0xc7,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x67,0x01,0x00,0x00,0x66,0x01,0x00,0x00,0xde,0x00,0x00,0x00, -0x72,0x00,0x04,0x00,0x54,0x00,0x00,0x00,0x68,0x01,0x00,0x00, -0x67,0x01,0x00,0x00,0x7c,0x00,0x04,0x00,0x4d,0x00,0x00,0x00, -0x69,0x01,0x00,0x00,0x68,0x01,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x70,0x01,0x00,0x00,0x46,0x00,0x00,0x00, -0x6f,0x01,0x00,0x00,0x41,0x00,0x08,0x00,0x9e,0x00,0x00,0x00, -0x71,0x01,0x00,0x00,0x88,0x00,0x00,0x00,0x16,0x00,0x00,0x00, -0x8b,0x00,0x00,0x00,0x47,0x01,0x00,0x00,0x70,0x01,0x00,0x00, -0x3d,0x00,0x04,0x00,0x4d,0x00,0x00,0x00,0x72,0x01,0x00,0x00, -0x71,0x01,0x00,0x00,0x71,0x00,0x04,0x00,0x09,0x00,0x00,0x00, -0x73,0x01,0x00,0x00,0x72,0x01,0x00,0x00,0x7c,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x74,0x01,0x00,0x00,0x73,0x01,0x00,0x00, -0xc7,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x75,0x01,0x00,0x00, -0x74,0x01,0x00,0x00,0xde,0x00,0x00,0x00,0x72,0x00,0x04,0x00, -0x54,0x00,0x00,0x00,0x76,0x01,0x00,0x00,0x75,0x01,0x00,0x00, -0x7c,0x00,0x04,0x00,0x4d,0x00,0x00,0x00,0x77,0x01,0x00,0x00, -0x76,0x01,0x00,0x00,0x3d,0x00,0x04,0x00,0x4d,0x00,0x00,0x00, -0x7e,0x01,0x00,0x00,0x49,0x01,0x00,0x00,0xc2,0x00,0x05,0x00, -0x4d,0x00,0x00,0x00,0x7f,0x01,0x00,0x00,0x7e,0x01,0x00,0x00, -0x2e,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x4d,0x00,0x00,0x00, -0x87,0x01,0x00,0x00,0x56,0x01,0x00,0x00,0xc2,0x00,0x05,0x00, -0x4d,0x00,0x00,0x00,0x88,0x01,0x00,0x00,0x87,0x01,0x00,0x00, -0x2e,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x4d,0x00,0x00,0x00, -0x90,0x01,0x00,0x00,0x63,0x01,0x00,0x00,0xc2,0x00,0x05,0x00, -0x4d,0x00,0x00,0x00,0x91,0x01,0x00,0x00,0x90,0x01,0x00,0x00, -0x2e,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x4d,0x00,0x00,0x00, -0x99,0x01,0x00,0x00,0x71,0x01,0x00,0x00,0xc2,0x00,0x05,0x00, -0x4d,0x00,0x00,0x00,0x9a,0x01,0x00,0x00,0x99,0x01,0x00,0x00, -0x2e,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xa0,0x01,0x00,0x00,0x46,0x00,0x00,0x00,0x48,0x00,0x00,0x00, -0x41,0x00,0x08,0x00,0x9e,0x00,0x00,0x00,0xa1,0x01,0x00,0x00, -0x88,0x00,0x00,0x00,0x16,0x00,0x00,0x00,0x8b,0x00,0x00,0x00, -0x47,0x01,0x00,0x00,0xa0,0x01,0x00,0x00,0x3d,0x00,0x04,0x00, -0x4d,0x00,0x00,0x00,0xa2,0x01,0x00,0x00,0xa1,0x01,0x00,0x00, -0x71,0x00,0x04,0x00,0x09,0x00,0x00,0x00,0xa3,0x01,0x00,0x00, -0xa2,0x01,0x00,0x00,0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0xa4,0x01,0x00,0x00,0xa3,0x01,0x00,0x00,0xc7,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xa5,0x01,0x00,0x00,0xa4,0x01,0x00,0x00, -0xde,0x00,0x00,0x00,0x72,0x00,0x04,0x00,0x54,0x00,0x00,0x00, -0xa6,0x01,0x00,0x00,0xa5,0x01,0x00,0x00,0x7c,0x00,0x04,0x00, -0x4d,0x00,0x00,0x00,0xa7,0x01,0x00,0x00,0xa6,0x01,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xae,0x01,0x00,0x00, -0x46,0x00,0x00,0x00,0xad,0x01,0x00,0x00,0x41,0x00,0x08,0x00, -0x9e,0x00,0x00,0x00,0xaf,0x01,0x00,0x00,0x88,0x00,0x00,0x00, -0x16,0x00,0x00,0x00,0x8b,0x00,0x00,0x00,0x47,0x01,0x00,0x00, -0xae,0x01,0x00,0x00,0x3d,0x00,0x04,0x00,0x4d,0x00,0x00,0x00, -0xb0,0x01,0x00,0x00,0xaf,0x01,0x00,0x00,0x71,0x00,0x04,0x00, -0x09,0x00,0x00,0x00,0xb1,0x01,0x00,0x00,0xb0,0x01,0x00,0x00, -0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0xb2,0x01,0x00,0x00, -0xb1,0x01,0x00,0x00,0xc7,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xb3,0x01,0x00,0x00,0xb2,0x01,0x00,0x00,0xde,0x00,0x00,0x00, -0x72,0x00,0x04,0x00,0x54,0x00,0x00,0x00,0xb4,0x01,0x00,0x00, -0xb3,0x01,0x00,0x00,0x7c,0x00,0x04,0x00,0x4d,0x00,0x00,0x00, -0xb5,0x01,0x00,0x00,0xb4,0x01,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xbc,0x01,0x00,0x00,0x46,0x00,0x00,0x00, -0xbb,0x01,0x00,0x00,0x41,0x00,0x08,0x00,0x9e,0x00,0x00,0x00, -0xbd,0x01,0x00,0x00,0x88,0x00,0x00,0x00,0x16,0x00,0x00,0x00, -0x8b,0x00,0x00,0x00,0x47,0x01,0x00,0x00,0xbc,0x01,0x00,0x00, -0x3d,0x00,0x04,0x00,0x4d,0x00,0x00,0x00,0xbe,0x01,0x00,0x00, -0xbd,0x01,0x00,0x00,0x71,0x00,0x04,0x00,0x09,0x00,0x00,0x00, -0xbf,0x01,0x00,0x00,0xbe,0x01,0x00,0x00,0x7c,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0xc0,0x01,0x00,0x00,0xbf,0x01,0x00,0x00, -0xc7,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xc1,0x01,0x00,0x00, -0xc0,0x01,0x00,0x00,0xde,0x00,0x00,0x00,0x72,0x00,0x04,0x00, -0x54,0x00,0x00,0x00,0xc2,0x01,0x00,0x00,0xc1,0x01,0x00,0x00, -0x7c,0x00,0x04,0x00,0x4d,0x00,0x00,0x00,0xc3,0x01,0x00,0x00, -0xc2,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xca,0x01,0x00,0x00,0x46,0x00,0x00,0x00,0xc9,0x01,0x00,0x00, -0x41,0x00,0x08,0x00,0x9e,0x00,0x00,0x00,0xcb,0x01,0x00,0x00, -0x88,0x00,0x00,0x00,0x16,0x00,0x00,0x00,0x8b,0x00,0x00,0x00, -0x47,0x01,0x00,0x00,0xca,0x01,0x00,0x00,0x3d,0x00,0x04,0x00, -0x4d,0x00,0x00,0x00,0xcc,0x01,0x00,0x00,0xcb,0x01,0x00,0x00, -0x71,0x00,0x04,0x00,0x09,0x00,0x00,0x00,0xcd,0x01,0x00,0x00, -0xcc,0x01,0x00,0x00,0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0xce,0x01,0x00,0x00,0xcd,0x01,0x00,0x00,0xc7,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xcf,0x01,0x00,0x00,0xce,0x01,0x00,0x00, -0xde,0x00,0x00,0x00,0x72,0x00,0x04,0x00,0x54,0x00,0x00,0x00, -0xd0,0x01,0x00,0x00,0xcf,0x01,0x00,0x00,0x7c,0x00,0x04,0x00, -0x4d,0x00,0x00,0x00,0xd1,0x01,0x00,0x00,0xd0,0x01,0x00,0x00, -0x3d,0x00,0x04,0x00,0x4d,0x00,0x00,0x00,0xd9,0x01,0x00,0x00, -0xa1,0x01,0x00,0x00,0xc2,0x00,0x05,0x00,0x4d,0x00,0x00,0x00, -0xda,0x01,0x00,0x00,0xd9,0x01,0x00,0x00,0x2e,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x4d,0x00,0x00,0x00,0xe2,0x01,0x00,0x00, -0xaf,0x01,0x00,0x00,0xc2,0x00,0x05,0x00,0x4d,0x00,0x00,0x00, -0xe3,0x01,0x00,0x00,0xe2,0x01,0x00,0x00,0x2e,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x4d,0x00,0x00,0x00,0xeb,0x01,0x00,0x00, -0xbd,0x01,0x00,0x00,0xc2,0x00,0x05,0x00,0x4d,0x00,0x00,0x00, -0xec,0x01,0x00,0x00,0xeb,0x01,0x00,0x00,0x2e,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x4d,0x00,0x00,0x00,0xf4,0x01,0x00,0x00, -0xcb,0x01,0x00,0x00,0xc2,0x00,0x05,0x00,0x4d,0x00,0x00,0x00, -0xf5,0x01,0x00,0x00,0xf4,0x01,0x00,0x00,0x2e,0x00,0x00,0x00, -0x41,0x00,0x05,0x00,0x17,0x00,0x00,0x00,0xfb,0x01,0x00,0x00, -0x15,0x00,0x00,0x00,0x50,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0xfc,0x01,0x00,0x00,0xfb,0x01,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xfe,0x01,0x00,0x00, -0xfc,0x01,0x00,0x00,0x76,0x00,0x00,0x00,0x41,0x00,0x06,0x00, -0xff,0x01,0x00,0x00,0x00,0x02,0x00,0x00,0xfa,0x01,0x00,0x00, -0x16,0x00,0x00,0x00,0xfe,0x01,0x00,0x00,0x3d,0x00,0x04,0x00, -0x5a,0x00,0x00,0x00,0x01,0x02,0x00,0x00,0x00,0x02,0x00,0x00, -0x71,0x00,0x04,0x00,0x09,0x00,0x00,0x00,0x03,0x02,0x00,0x00, -0x4f,0x01,0x00,0x00,0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x04,0x02,0x00,0x00,0x03,0x02,0x00,0x00,0x41,0x00,0x08,0x00, -0x9e,0x00,0x00,0x00,0x09,0x02,0x00,0x00,0x88,0x00,0x00,0x00, -0x16,0x00,0x00,0x00,0x8b,0x00,0x00,0x00,0x25,0x00,0x00,0x00, -0x40,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x4d,0x00,0x00,0x00, -0x0a,0x02,0x00,0x00,0x09,0x02,0x00,0x00,0xc7,0x00,0x05,0x00, -0x4d,0x00,0x00,0x00,0x0c,0x02,0x00,0x00,0x0a,0x02,0x00,0x00, -0x56,0x00,0x00,0x00,0x71,0x00,0x04,0x00,0x09,0x00,0x00,0x00, -0x0d,0x02,0x00,0x00,0x0c,0x02,0x00,0x00,0x7c,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x0e,0x02,0x00,0x00,0x0d,0x02,0x00,0x00, -0xab,0x00,0x05,0x00,0x70,0x00,0x00,0x00,0x0f,0x02,0x00,0x00, -0x0e,0x02,0x00,0x00,0x16,0x00,0x00,0x00,0xa9,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0x10,0x02,0x00,0x00,0x0f,0x02,0x00,0x00, -0x5f,0x00,0x00,0x00,0x16,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x11,0x02,0x00,0x00,0x04,0x02,0x00,0x00, -0x10,0x02,0x00,0x00,0x6f,0x00,0x04,0x00,0x5a,0x00,0x00,0x00, -0x12,0x02,0x00,0x00,0x11,0x02,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x18,0x02,0x00,0x00,0xfe,0x01,0x00,0x00, -0x50,0x00,0x00,0x00,0x41,0x00,0x06,0x00,0xff,0x01,0x00,0x00, -0x19,0x02,0x00,0x00,0xfa,0x01,0x00,0x00,0x16,0x00,0x00,0x00, -0x18,0x02,0x00,0x00,0x3d,0x00,0x04,0x00,0x5a,0x00,0x00,0x00, -0x1a,0x02,0x00,0x00,0x19,0x02,0x00,0x00,0x71,0x00,0x04,0x00, -0x09,0x00,0x00,0x00,0x1c,0x02,0x00,0x00,0x5c,0x01,0x00,0x00, -0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x1d,0x02,0x00,0x00, -0x1c,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x22,0x02,0x00,0x00,0x40,0x00,0x00,0x00,0x50,0x00,0x00,0x00, -0x41,0x00,0x08,0x00,0x9e,0x00,0x00,0x00,0x23,0x02,0x00,0x00, -0x88,0x00,0x00,0x00,0x16,0x00,0x00,0x00,0x8b,0x00,0x00,0x00, -0x25,0x00,0x00,0x00,0x22,0x02,0x00,0x00,0x3d,0x00,0x04,0x00, -0x4d,0x00,0x00,0x00,0x24,0x02,0x00,0x00,0x23,0x02,0x00,0x00, -0xc7,0x00,0x05,0x00,0x4d,0x00,0x00,0x00,0x26,0x02,0x00,0x00, -0x24,0x02,0x00,0x00,0x56,0x00,0x00,0x00,0x71,0x00,0x04,0x00, -0x09,0x00,0x00,0x00,0x27,0x02,0x00,0x00,0x26,0x02,0x00,0x00, -0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x28,0x02,0x00,0x00, -0x27,0x02,0x00,0x00,0xab,0x00,0x05,0x00,0x70,0x00,0x00,0x00, -0x29,0x02,0x00,0x00,0x28,0x02,0x00,0x00,0x16,0x00,0x00,0x00, -0xa9,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x2a,0x02,0x00,0x00, -0x29,0x02,0x00,0x00,0x5f,0x00,0x00,0x00,0x16,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x2b,0x02,0x00,0x00, -0x1d,0x02,0x00,0x00,0x2a,0x02,0x00,0x00,0x6f,0x00,0x04,0x00, -0x5a,0x00,0x00,0x00,0x2c,0x02,0x00,0x00,0x2b,0x02,0x00,0x00, -0x85,0x00,0x05,0x00,0x5a,0x00,0x00,0x00,0x2d,0x02,0x00,0x00, -0x1a,0x02,0x00,0x00,0x2c,0x02,0x00,0x00,0x0c,0x00,0x08,0x00, -0x5a,0x00,0x00,0x00,0x2e,0x02,0x00,0x00,0x01,0x00,0x00,0x00, -0x32,0x00,0x00,0x00,0x01,0x02,0x00,0x00,0x12,0x02,0x00,0x00, -0x2d,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x33,0x02,0x00,0x00,0xfe,0x01,0x00,0x00,0x5f,0x00,0x00,0x00, -0x41,0x00,0x06,0x00,0xff,0x01,0x00,0x00,0x34,0x02,0x00,0x00, -0xfa,0x01,0x00,0x00,0x16,0x00,0x00,0x00,0x33,0x02,0x00,0x00, -0x3d,0x00,0x04,0x00,0x5a,0x00,0x00,0x00,0x35,0x02,0x00,0x00, -0x34,0x02,0x00,0x00,0x71,0x00,0x04,0x00,0x09,0x00,0x00,0x00, -0x37,0x02,0x00,0x00,0x69,0x01,0x00,0x00,0x7c,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x38,0x02,0x00,0x00,0x37,0x02,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x3d,0x02,0x00,0x00, -0x40,0x00,0x00,0x00,0x5f,0x00,0x00,0x00,0x41,0x00,0x08,0x00, -0x9e,0x00,0x00,0x00,0x3e,0x02,0x00,0x00,0x88,0x00,0x00,0x00, -0x16,0x00,0x00,0x00,0x8b,0x00,0x00,0x00,0x25,0x00,0x00,0x00, -0x3d,0x02,0x00,0x00,0x3d,0x00,0x04,0x00,0x4d,0x00,0x00,0x00, -0x3f,0x02,0x00,0x00,0x3e,0x02,0x00,0x00,0xc7,0x00,0x05,0x00, -0x4d,0x00,0x00,0x00,0x41,0x02,0x00,0x00,0x3f,0x02,0x00,0x00, -0x56,0x00,0x00,0x00,0x71,0x00,0x04,0x00,0x09,0x00,0x00,0x00, -0x42,0x02,0x00,0x00,0x41,0x02,0x00,0x00,0x7c,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x43,0x02,0x00,0x00,0x42,0x02,0x00,0x00, -0xab,0x00,0x05,0x00,0x70,0x00,0x00,0x00,0x44,0x02,0x00,0x00, -0x43,0x02,0x00,0x00,0x16,0x00,0x00,0x00,0xa9,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0x45,0x02,0x00,0x00,0x44,0x02,0x00,0x00, -0x5f,0x00,0x00,0x00,0x16,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x46,0x02,0x00,0x00,0x38,0x02,0x00,0x00, -0x45,0x02,0x00,0x00,0x6f,0x00,0x04,0x00,0x5a,0x00,0x00,0x00, -0x47,0x02,0x00,0x00,0x46,0x02,0x00,0x00,0x0c,0x00,0x08,0x00, -0x5a,0x00,0x00,0x00,0x49,0x02,0x00,0x00,0x01,0x00,0x00,0x00, -0x32,0x00,0x00,0x00,0x35,0x02,0x00,0x00,0x47,0x02,0x00,0x00, -0x2e,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x4e,0x02,0x00,0x00,0xfe,0x01,0x00,0x00,0x6f,0x01,0x00,0x00, -0x41,0x00,0x06,0x00,0xff,0x01,0x00,0x00,0x4f,0x02,0x00,0x00, -0xfa,0x01,0x00,0x00,0x16,0x00,0x00,0x00,0x4e,0x02,0x00,0x00, -0x3d,0x00,0x04,0x00,0x5a,0x00,0x00,0x00,0x50,0x02,0x00,0x00, -0x4f,0x02,0x00,0x00,0x71,0x00,0x04,0x00,0x09,0x00,0x00,0x00, -0x52,0x02,0x00,0x00,0x77,0x01,0x00,0x00,0x7c,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x53,0x02,0x00,0x00,0x52,0x02,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x58,0x02,0x00,0x00, -0x40,0x00,0x00,0x00,0x6f,0x01,0x00,0x00,0x41,0x00,0x08,0x00, -0x9e,0x00,0x00,0x00,0x59,0x02,0x00,0x00,0x88,0x00,0x00,0x00, -0x16,0x00,0x00,0x00,0x8b,0x00,0x00,0x00,0x25,0x00,0x00,0x00, -0x58,0x02,0x00,0x00,0x3d,0x00,0x04,0x00,0x4d,0x00,0x00,0x00, -0x5a,0x02,0x00,0x00,0x59,0x02,0x00,0x00,0xc7,0x00,0x05,0x00, -0x4d,0x00,0x00,0x00,0x5c,0x02,0x00,0x00,0x5a,0x02,0x00,0x00, -0x56,0x00,0x00,0x00,0x71,0x00,0x04,0x00,0x09,0x00,0x00,0x00, -0x5d,0x02,0x00,0x00,0x5c,0x02,0x00,0x00,0x7c,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x5e,0x02,0x00,0x00,0x5d,0x02,0x00,0x00, -0xab,0x00,0x05,0x00,0x70,0x00,0x00,0x00,0x5f,0x02,0x00,0x00, -0x5e,0x02,0x00,0x00,0x16,0x00,0x00,0x00,0xa9,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0x60,0x02,0x00,0x00,0x5f,0x02,0x00,0x00, -0x5f,0x00,0x00,0x00,0x16,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x61,0x02,0x00,0x00,0x53,0x02,0x00,0x00, -0x60,0x02,0x00,0x00,0x6f,0x00,0x04,0x00,0x5a,0x00,0x00,0x00, -0x62,0x02,0x00,0x00,0x61,0x02,0x00,0x00,0x0c,0x00,0x08,0x00, -0x5a,0x00,0x00,0x00,0x64,0x02,0x00,0x00,0x01,0x00,0x00,0x00, -0x32,0x00,0x00,0x00,0x50,0x02,0x00,0x00,0x62,0x02,0x00,0x00, -0x49,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x6a,0x02,0x00,0x00,0xfe,0x01,0x00,0x00,0x42,0x00,0x00,0x00, -0x41,0x00,0x06,0x00,0xff,0x01,0x00,0x00,0x6b,0x02,0x00,0x00, -0xfa,0x01,0x00,0x00,0x16,0x00,0x00,0x00,0x6a,0x02,0x00,0x00, -0x3d,0x00,0x04,0x00,0x5a,0x00,0x00,0x00,0x6c,0x02,0x00,0x00, -0x6b,0x02,0x00,0x00,0x71,0x00,0x04,0x00,0x09,0x00,0x00,0x00, -0x6e,0x02,0x00,0x00,0x7f,0x01,0x00,0x00,0x7c,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x6f,0x02,0x00,0x00,0x6e,0x02,0x00,0x00, -0x3d,0x00,0x04,0x00,0x4d,0x00,0x00,0x00,0x75,0x02,0x00,0x00, -0x09,0x02,0x00,0x00,0xc4,0x00,0x05,0x00,0x4d,0x00,0x00,0x00, -0x77,0x02,0x00,0x00,0x56,0x00,0x00,0x00,0x50,0x00,0x00,0x00, -0xc7,0x00,0x05,0x00,0x4d,0x00,0x00,0x00,0x78,0x02,0x00,0x00, -0x75,0x02,0x00,0x00,0x77,0x02,0x00,0x00,0x71,0x00,0x04,0x00, -0x09,0x00,0x00,0x00,0x79,0x02,0x00,0x00,0x78,0x02,0x00,0x00, -0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x7a,0x02,0x00,0x00, -0x79,0x02,0x00,0x00,0xab,0x00,0x05,0x00,0x70,0x00,0x00,0x00, -0x7b,0x02,0x00,0x00,0x7a,0x02,0x00,0x00,0x16,0x00,0x00,0x00, -0xa9,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x7c,0x02,0x00,0x00, -0x7b,0x02,0x00,0x00,0x5f,0x00,0x00,0x00,0x16,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x7d,0x02,0x00,0x00, -0x6f,0x02,0x00,0x00,0x7c,0x02,0x00,0x00,0x6f,0x00,0x04,0x00, -0x5a,0x00,0x00,0x00,0x7e,0x02,0x00,0x00,0x7d,0x02,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x85,0x02,0x00,0x00, -0xfe,0x01,0x00,0x00,0x84,0x02,0x00,0x00,0x41,0x00,0x06,0x00, -0xff,0x01,0x00,0x00,0x86,0x02,0x00,0x00,0xfa,0x01,0x00,0x00, -0x16,0x00,0x00,0x00,0x85,0x02,0x00,0x00,0x3d,0x00,0x04,0x00, -0x5a,0x00,0x00,0x00,0x87,0x02,0x00,0x00,0x86,0x02,0x00,0x00, -0x71,0x00,0x04,0x00,0x09,0x00,0x00,0x00,0x89,0x02,0x00,0x00, -0x88,0x01,0x00,0x00,0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x8a,0x02,0x00,0x00,0x89,0x02,0x00,0x00,0x3d,0x00,0x04,0x00, -0x4d,0x00,0x00,0x00,0x91,0x02,0x00,0x00,0x23,0x02,0x00,0x00, -0xc7,0x00,0x05,0x00,0x4d,0x00,0x00,0x00,0x94,0x02,0x00,0x00, -0x91,0x02,0x00,0x00,0x77,0x02,0x00,0x00,0x71,0x00,0x04,0x00, -0x09,0x00,0x00,0x00,0x95,0x02,0x00,0x00,0x94,0x02,0x00,0x00, -0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x96,0x02,0x00,0x00, -0x95,0x02,0x00,0x00,0xab,0x00,0x05,0x00,0x70,0x00,0x00,0x00, -0x97,0x02,0x00,0x00,0x96,0x02,0x00,0x00,0x16,0x00,0x00,0x00, -0xa9,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x98,0x02,0x00,0x00, -0x97,0x02,0x00,0x00,0x5f,0x00,0x00,0x00,0x16,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x99,0x02,0x00,0x00, -0x8a,0x02,0x00,0x00,0x98,0x02,0x00,0x00,0x6f,0x00,0x04,0x00, -0x5a,0x00,0x00,0x00,0x9a,0x02,0x00,0x00,0x99,0x02,0x00,0x00, -0x85,0x00,0x05,0x00,0x5a,0x00,0x00,0x00,0x9b,0x02,0x00,0x00, -0x87,0x02,0x00,0x00,0x9a,0x02,0x00,0x00,0x0c,0x00,0x08,0x00, -0x5a,0x00,0x00,0x00,0x9c,0x02,0x00,0x00,0x01,0x00,0x00,0x00, -0x32,0x00,0x00,0x00,0x6c,0x02,0x00,0x00,0x7e,0x02,0x00,0x00, -0x9b,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xa2,0x02,0x00,0x00,0xfe,0x01,0x00,0x00,0xa1,0x02,0x00,0x00, -0x41,0x00,0x06,0x00,0xff,0x01,0x00,0x00,0xa3,0x02,0x00,0x00, -0xfa,0x01,0x00,0x00,0x16,0x00,0x00,0x00,0xa2,0x02,0x00,0x00, -0x3d,0x00,0x04,0x00,0x5a,0x00,0x00,0x00,0xa4,0x02,0x00,0x00, -0xa3,0x02,0x00,0x00,0x71,0x00,0x04,0x00,0x09,0x00,0x00,0x00, -0xa6,0x02,0x00,0x00,0x91,0x01,0x00,0x00,0x7c,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0xa7,0x02,0x00,0x00,0xa6,0x02,0x00,0x00, -0x3d,0x00,0x04,0x00,0x4d,0x00,0x00,0x00,0xae,0x02,0x00,0x00, -0x3e,0x02,0x00,0x00,0xc7,0x00,0x05,0x00,0x4d,0x00,0x00,0x00, -0xb1,0x02,0x00,0x00,0xae,0x02,0x00,0x00,0x77,0x02,0x00,0x00, -0x71,0x00,0x04,0x00,0x09,0x00,0x00,0x00,0xb2,0x02,0x00,0x00, -0xb1,0x02,0x00,0x00,0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0xb3,0x02,0x00,0x00,0xb2,0x02,0x00,0x00,0xab,0x00,0x05,0x00, -0x70,0x00,0x00,0x00,0xb4,0x02,0x00,0x00,0xb3,0x02,0x00,0x00, -0x16,0x00,0x00,0x00,0xa9,0x00,0x06,0x00,0x06,0x00,0x00,0x00, -0xb5,0x02,0x00,0x00,0xb4,0x02,0x00,0x00,0x5f,0x00,0x00,0x00, -0x16,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xb6,0x02,0x00,0x00,0xa7,0x02,0x00,0x00,0xb5,0x02,0x00,0x00, -0x6f,0x00,0x04,0x00,0x5a,0x00,0x00,0x00,0xb7,0x02,0x00,0x00, -0xb6,0x02,0x00,0x00,0x0c,0x00,0x08,0x00,0x5a,0x00,0x00,0x00, -0xb9,0x02,0x00,0x00,0x01,0x00,0x00,0x00,0x32,0x00,0x00,0x00, -0xa4,0x02,0x00,0x00,0xb7,0x02,0x00,0x00,0x9c,0x02,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xbf,0x02,0x00,0x00, -0xfe,0x01,0x00,0x00,0xbe,0x02,0x00,0x00,0x41,0x00,0x06,0x00, -0xff,0x01,0x00,0x00,0xc0,0x02,0x00,0x00,0xfa,0x01,0x00,0x00, -0x16,0x00,0x00,0x00,0xbf,0x02,0x00,0x00,0x3d,0x00,0x04,0x00, -0x5a,0x00,0x00,0x00,0xc1,0x02,0x00,0x00,0xc0,0x02,0x00,0x00, -0x71,0x00,0x04,0x00,0x09,0x00,0x00,0x00,0xc3,0x02,0x00,0x00, -0x9a,0x01,0x00,0x00,0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0xc4,0x02,0x00,0x00,0xc3,0x02,0x00,0x00,0x3d,0x00,0x04,0x00, -0x4d,0x00,0x00,0x00,0xcb,0x02,0x00,0x00,0x59,0x02,0x00,0x00, -0xc7,0x00,0x05,0x00,0x4d,0x00,0x00,0x00,0xce,0x02,0x00,0x00, -0xcb,0x02,0x00,0x00,0x77,0x02,0x00,0x00,0x71,0x00,0x04,0x00, -0x09,0x00,0x00,0x00,0xcf,0x02,0x00,0x00,0xce,0x02,0x00,0x00, -0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0xd0,0x02,0x00,0x00, -0xcf,0x02,0x00,0x00,0xab,0x00,0x05,0x00,0x70,0x00,0x00,0x00, -0xd1,0x02,0x00,0x00,0xd0,0x02,0x00,0x00,0x16,0x00,0x00,0x00, -0xa9,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0xd2,0x02,0x00,0x00, -0xd1,0x02,0x00,0x00,0x5f,0x00,0x00,0x00,0x16,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xd3,0x02,0x00,0x00, -0xc4,0x02,0x00,0x00,0xd2,0x02,0x00,0x00,0x6f,0x00,0x04,0x00, -0x5a,0x00,0x00,0x00,0xd4,0x02,0x00,0x00,0xd3,0x02,0x00,0x00, -0x0c,0x00,0x08,0x00,0x5a,0x00,0x00,0x00,0xd6,0x02,0x00,0x00, -0x01,0x00,0x00,0x00,0x32,0x00,0x00,0x00,0xc1,0x02,0x00,0x00, -0xd4,0x02,0x00,0x00,0xb9,0x02,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xdb,0x02,0x00,0x00,0xfc,0x01,0x00,0x00, -0x7a,0x00,0x00,0x00,0x41,0x00,0x06,0x00,0xff,0x01,0x00,0x00, -0xdc,0x02,0x00,0x00,0xfa,0x01,0x00,0x00,0x16,0x00,0x00,0x00, -0xdb,0x02,0x00,0x00,0x3d,0x00,0x04,0x00,0x5a,0x00,0x00,0x00, -0xdd,0x02,0x00,0x00,0xdc,0x02,0x00,0x00,0x71,0x00,0x04,0x00, -0x09,0x00,0x00,0x00,0xdf,0x02,0x00,0x00,0xa7,0x01,0x00,0x00, -0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0xe0,0x02,0x00,0x00, -0xdf,0x02,0x00,0x00,0x3d,0x00,0x04,0x00,0x4d,0x00,0x00,0x00, -0xe6,0x02,0x00,0x00,0x09,0x02,0x00,0x00,0xc7,0x00,0x05,0x00, -0x4d,0x00,0x00,0x00,0xe8,0x02,0x00,0x00,0xe6,0x02,0x00,0x00, -0x59,0x00,0x00,0x00,0x71,0x00,0x04,0x00,0x09,0x00,0x00,0x00, -0xe9,0x02,0x00,0x00,0xe8,0x02,0x00,0x00,0x7c,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0xea,0x02,0x00,0x00,0xe9,0x02,0x00,0x00, -0xab,0x00,0x05,0x00,0x70,0x00,0x00,0x00,0xeb,0x02,0x00,0x00, -0xea,0x02,0x00,0x00,0x16,0x00,0x00,0x00,0xa9,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0xec,0x02,0x00,0x00,0xeb,0x02,0x00,0x00, -0x5f,0x00,0x00,0x00,0x16,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xed,0x02,0x00,0x00,0xe0,0x02,0x00,0x00, -0xec,0x02,0x00,0x00,0x6f,0x00,0x04,0x00,0x5a,0x00,0x00,0x00, -0xee,0x02,0x00,0x00,0xed,0x02,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xf4,0x02,0x00,0x00,0xdb,0x02,0x00,0x00, -0x50,0x00,0x00,0x00,0x41,0x00,0x06,0x00,0xff,0x01,0x00,0x00, -0xf5,0x02,0x00,0x00,0xfa,0x01,0x00,0x00,0x16,0x00,0x00,0x00, -0xf4,0x02,0x00,0x00,0x3d,0x00,0x04,0x00,0x5a,0x00,0x00,0x00, -0xf6,0x02,0x00,0x00,0xf5,0x02,0x00,0x00,0x71,0x00,0x04,0x00, -0x09,0x00,0x00,0x00,0xf8,0x02,0x00,0x00,0xb5,0x01,0x00,0x00, -0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0xf9,0x02,0x00,0x00, -0xf8,0x02,0x00,0x00,0x3d,0x00,0x04,0x00,0x4d,0x00,0x00,0x00, -0x00,0x03,0x00,0x00,0x23,0x02,0x00,0x00,0xc7,0x00,0x05,0x00, -0x4d,0x00,0x00,0x00,0x02,0x03,0x00,0x00,0x00,0x03,0x00,0x00, -0x59,0x00,0x00,0x00,0x71,0x00,0x04,0x00,0x09,0x00,0x00,0x00, -0x03,0x03,0x00,0x00,0x02,0x03,0x00,0x00,0x7c,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x04,0x03,0x00,0x00,0x03,0x03,0x00,0x00, -0xab,0x00,0x05,0x00,0x70,0x00,0x00,0x00,0x05,0x03,0x00,0x00, -0x04,0x03,0x00,0x00,0x16,0x00,0x00,0x00,0xa9,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0x06,0x03,0x00,0x00,0x05,0x03,0x00,0x00, -0x5f,0x00,0x00,0x00,0x16,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x07,0x03,0x00,0x00,0xf9,0x02,0x00,0x00, -0x06,0x03,0x00,0x00,0x6f,0x00,0x04,0x00,0x5a,0x00,0x00,0x00, -0x08,0x03,0x00,0x00,0x07,0x03,0x00,0x00,0x85,0x00,0x05,0x00, -0x5a,0x00,0x00,0x00,0x09,0x03,0x00,0x00,0xf6,0x02,0x00,0x00, -0x08,0x03,0x00,0x00,0x0c,0x00,0x08,0x00,0x5a,0x00,0x00,0x00, -0x0a,0x03,0x00,0x00,0x01,0x00,0x00,0x00,0x32,0x00,0x00,0x00, -0xdd,0x02,0x00,0x00,0xee,0x02,0x00,0x00,0x09,0x03,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x0f,0x03,0x00,0x00, -0xdb,0x02,0x00,0x00,0x5f,0x00,0x00,0x00,0x41,0x00,0x06,0x00, -0xff,0x01,0x00,0x00,0x10,0x03,0x00,0x00,0xfa,0x01,0x00,0x00, -0x16,0x00,0x00,0x00,0x0f,0x03,0x00,0x00,0x3d,0x00,0x04,0x00, -0x5a,0x00,0x00,0x00,0x11,0x03,0x00,0x00,0x10,0x03,0x00,0x00, -0x71,0x00,0x04,0x00,0x09,0x00,0x00,0x00,0x13,0x03,0x00,0x00, -0xc3,0x01,0x00,0x00,0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x14,0x03,0x00,0x00,0x13,0x03,0x00,0x00,0x3d,0x00,0x04,0x00, -0x4d,0x00,0x00,0x00,0x1b,0x03,0x00,0x00,0x3e,0x02,0x00,0x00, -0xc7,0x00,0x05,0x00,0x4d,0x00,0x00,0x00,0x1d,0x03,0x00,0x00, -0x1b,0x03,0x00,0x00,0x59,0x00,0x00,0x00,0x71,0x00,0x04,0x00, -0x09,0x00,0x00,0x00,0x1e,0x03,0x00,0x00,0x1d,0x03,0x00,0x00, -0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x1f,0x03,0x00,0x00, -0x1e,0x03,0x00,0x00,0xab,0x00,0x05,0x00,0x70,0x00,0x00,0x00, -0x20,0x03,0x00,0x00,0x1f,0x03,0x00,0x00,0x16,0x00,0x00,0x00, -0xa9,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x21,0x03,0x00,0x00, -0x20,0x03,0x00,0x00,0x5f,0x00,0x00,0x00,0x16,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x22,0x03,0x00,0x00, -0x14,0x03,0x00,0x00,0x21,0x03,0x00,0x00,0x6f,0x00,0x04,0x00, -0x5a,0x00,0x00,0x00,0x23,0x03,0x00,0x00,0x22,0x03,0x00,0x00, -0x0c,0x00,0x08,0x00,0x5a,0x00,0x00,0x00,0x25,0x03,0x00,0x00, -0x01,0x00,0x00,0x00,0x32,0x00,0x00,0x00,0x11,0x03,0x00,0x00, -0x23,0x03,0x00,0x00,0x0a,0x03,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x2a,0x03,0x00,0x00,0xdb,0x02,0x00,0x00, -0x6f,0x01,0x00,0x00,0x41,0x00,0x06,0x00,0xff,0x01,0x00,0x00, -0x2b,0x03,0x00,0x00,0xfa,0x01,0x00,0x00,0x16,0x00,0x00,0x00, -0x2a,0x03,0x00,0x00,0x3d,0x00,0x04,0x00,0x5a,0x00,0x00,0x00, -0x2c,0x03,0x00,0x00,0x2b,0x03,0x00,0x00,0x71,0x00,0x04,0x00, -0x09,0x00,0x00,0x00,0x2e,0x03,0x00,0x00,0xd1,0x01,0x00,0x00, -0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x2f,0x03,0x00,0x00, -0x2e,0x03,0x00,0x00,0x3d,0x00,0x04,0x00,0x4d,0x00,0x00,0x00, -0x36,0x03,0x00,0x00,0x59,0x02,0x00,0x00,0xc7,0x00,0x05,0x00, -0x4d,0x00,0x00,0x00,0x38,0x03,0x00,0x00,0x36,0x03,0x00,0x00, -0x59,0x00,0x00,0x00,0x71,0x00,0x04,0x00,0x09,0x00,0x00,0x00, -0x39,0x03,0x00,0x00,0x38,0x03,0x00,0x00,0x7c,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x3a,0x03,0x00,0x00,0x39,0x03,0x00,0x00, -0xab,0x00,0x05,0x00,0x70,0x00,0x00,0x00,0x3b,0x03,0x00,0x00, -0x3a,0x03,0x00,0x00,0x16,0x00,0x00,0x00,0xa9,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0x3c,0x03,0x00,0x00,0x3b,0x03,0x00,0x00, -0x5f,0x00,0x00,0x00,0x16,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x3d,0x03,0x00,0x00,0x2f,0x03,0x00,0x00, -0x3c,0x03,0x00,0x00,0x6f,0x00,0x04,0x00,0x5a,0x00,0x00,0x00, -0x3e,0x03,0x00,0x00,0x3d,0x03,0x00,0x00,0x0c,0x00,0x08,0x00, -0x5a,0x00,0x00,0x00,0x40,0x03,0x00,0x00,0x01,0x00,0x00,0x00, -0x32,0x00,0x00,0x00,0x2c,0x03,0x00,0x00,0x3e,0x03,0x00,0x00, -0x25,0x03,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x46,0x03,0x00,0x00,0xdb,0x02,0x00,0x00,0x42,0x00,0x00,0x00, -0x41,0x00,0x06,0x00,0xff,0x01,0x00,0x00,0x47,0x03,0x00,0x00, -0xfa,0x01,0x00,0x00,0x16,0x00,0x00,0x00,0x46,0x03,0x00,0x00, -0x3d,0x00,0x04,0x00,0x5a,0x00,0x00,0x00,0x48,0x03,0x00,0x00, -0x47,0x03,0x00,0x00,0x71,0x00,0x04,0x00,0x09,0x00,0x00,0x00, -0x4a,0x03,0x00,0x00,0xda,0x01,0x00,0x00,0x7c,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x4b,0x03,0x00,0x00,0x4a,0x03,0x00,0x00, -0x3d,0x00,0x04,0x00,0x4d,0x00,0x00,0x00,0x51,0x03,0x00,0x00, -0x09,0x02,0x00,0x00,0xc4,0x00,0x05,0x00,0x4d,0x00,0x00,0x00, -0x53,0x03,0x00,0x00,0x59,0x00,0x00,0x00,0x50,0x00,0x00,0x00, -0xc7,0x00,0x05,0x00,0x4d,0x00,0x00,0x00,0x54,0x03,0x00,0x00, -0x51,0x03,0x00,0x00,0x53,0x03,0x00,0x00,0x71,0x00,0x04,0x00, -0x09,0x00,0x00,0x00,0x55,0x03,0x00,0x00,0x54,0x03,0x00,0x00, -0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x56,0x03,0x00,0x00, -0x55,0x03,0x00,0x00,0xab,0x00,0x05,0x00,0x70,0x00,0x00,0x00, -0x57,0x03,0x00,0x00,0x56,0x03,0x00,0x00,0x16,0x00,0x00,0x00, -0xa9,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x58,0x03,0x00,0x00, -0x57,0x03,0x00,0x00,0x5f,0x00,0x00,0x00,0x16,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x59,0x03,0x00,0x00, -0x4b,0x03,0x00,0x00,0x58,0x03,0x00,0x00,0x6f,0x00,0x04,0x00, -0x5a,0x00,0x00,0x00,0x5a,0x03,0x00,0x00,0x59,0x03,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x60,0x03,0x00,0x00, -0xdb,0x02,0x00,0x00,0x84,0x02,0x00,0x00,0x41,0x00,0x06,0x00, -0xff,0x01,0x00,0x00,0x61,0x03,0x00,0x00,0xfa,0x01,0x00,0x00, -0x16,0x00,0x00,0x00,0x60,0x03,0x00,0x00,0x3d,0x00,0x04,0x00, -0x5a,0x00,0x00,0x00,0x62,0x03,0x00,0x00,0x61,0x03,0x00,0x00, -0x71,0x00,0x04,0x00,0x09,0x00,0x00,0x00,0x64,0x03,0x00,0x00, -0xe3,0x01,0x00,0x00,0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x65,0x03,0x00,0x00,0x64,0x03,0x00,0x00,0x3d,0x00,0x04,0x00, -0x4d,0x00,0x00,0x00,0x6c,0x03,0x00,0x00,0x23,0x02,0x00,0x00, -0xc7,0x00,0x05,0x00,0x4d,0x00,0x00,0x00,0x6f,0x03,0x00,0x00, -0x6c,0x03,0x00,0x00,0x53,0x03,0x00,0x00,0x71,0x00,0x04,0x00, -0x09,0x00,0x00,0x00,0x70,0x03,0x00,0x00,0x6f,0x03,0x00,0x00, -0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x71,0x03,0x00,0x00, -0x70,0x03,0x00,0x00,0xab,0x00,0x05,0x00,0x70,0x00,0x00,0x00, -0x72,0x03,0x00,0x00,0x71,0x03,0x00,0x00,0x16,0x00,0x00,0x00, -0xa9,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x73,0x03,0x00,0x00, -0x72,0x03,0x00,0x00,0x5f,0x00,0x00,0x00,0x16,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x74,0x03,0x00,0x00, -0x65,0x03,0x00,0x00,0x73,0x03,0x00,0x00,0x6f,0x00,0x04,0x00, -0x5a,0x00,0x00,0x00,0x75,0x03,0x00,0x00,0x74,0x03,0x00,0x00, -0x85,0x00,0x05,0x00,0x5a,0x00,0x00,0x00,0x76,0x03,0x00,0x00, -0x62,0x03,0x00,0x00,0x75,0x03,0x00,0x00,0x0c,0x00,0x08,0x00, -0x5a,0x00,0x00,0x00,0x77,0x03,0x00,0x00,0x01,0x00,0x00,0x00, -0x32,0x00,0x00,0x00,0x48,0x03,0x00,0x00,0x5a,0x03,0x00,0x00, -0x76,0x03,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x7c,0x03,0x00,0x00,0xdb,0x02,0x00,0x00,0xa1,0x02,0x00,0x00, -0x41,0x00,0x06,0x00,0xff,0x01,0x00,0x00,0x7d,0x03,0x00,0x00, -0xfa,0x01,0x00,0x00,0x16,0x00,0x00,0x00,0x7c,0x03,0x00,0x00, -0x3d,0x00,0x04,0x00,0x5a,0x00,0x00,0x00,0x7e,0x03,0x00,0x00, -0x7d,0x03,0x00,0x00,0x71,0x00,0x04,0x00,0x09,0x00,0x00,0x00, -0x80,0x03,0x00,0x00,0xec,0x01,0x00,0x00,0x7c,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x81,0x03,0x00,0x00,0x80,0x03,0x00,0x00, -0x3d,0x00,0x04,0x00,0x4d,0x00,0x00,0x00,0x88,0x03,0x00,0x00, -0x3e,0x02,0x00,0x00,0xc7,0x00,0x05,0x00,0x4d,0x00,0x00,0x00, -0x8b,0x03,0x00,0x00,0x88,0x03,0x00,0x00,0x53,0x03,0x00,0x00, -0x71,0x00,0x04,0x00,0x09,0x00,0x00,0x00,0x8c,0x03,0x00,0x00, -0x8b,0x03,0x00,0x00,0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x8d,0x03,0x00,0x00,0x8c,0x03,0x00,0x00,0xab,0x00,0x05,0x00, -0x70,0x00,0x00,0x00,0x8e,0x03,0x00,0x00,0x8d,0x03,0x00,0x00, -0x16,0x00,0x00,0x00,0xa9,0x00,0x06,0x00,0x06,0x00,0x00,0x00, -0x8f,0x03,0x00,0x00,0x8e,0x03,0x00,0x00,0x5f,0x00,0x00,0x00, -0x16,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x90,0x03,0x00,0x00,0x81,0x03,0x00,0x00,0x8f,0x03,0x00,0x00, -0x6f,0x00,0x04,0x00,0x5a,0x00,0x00,0x00,0x91,0x03,0x00,0x00, -0x90,0x03,0x00,0x00,0x0c,0x00,0x08,0x00,0x5a,0x00,0x00,0x00, -0x93,0x03,0x00,0x00,0x01,0x00,0x00,0x00,0x32,0x00,0x00,0x00, -0x7e,0x03,0x00,0x00,0x91,0x03,0x00,0x00,0x77,0x03,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x98,0x03,0x00,0x00, -0xdb,0x02,0x00,0x00,0xbe,0x02,0x00,0x00,0x41,0x00,0x06,0x00, -0xff,0x01,0x00,0x00,0x99,0x03,0x00,0x00,0xfa,0x01,0x00,0x00, -0x16,0x00,0x00,0x00,0x98,0x03,0x00,0x00,0x3d,0x00,0x04,0x00, -0x5a,0x00,0x00,0x00,0x9a,0x03,0x00,0x00,0x99,0x03,0x00,0x00, -0x71,0x00,0x04,0x00,0x09,0x00,0x00,0x00,0x9c,0x03,0x00,0x00, -0xf5,0x01,0x00,0x00,0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x9d,0x03,0x00,0x00,0x9c,0x03,0x00,0x00,0x3d,0x00,0x04,0x00, -0x4d,0x00,0x00,0x00,0xa4,0x03,0x00,0x00,0x59,0x02,0x00,0x00, -0xc7,0x00,0x05,0x00,0x4d,0x00,0x00,0x00,0xa7,0x03,0x00,0x00, -0xa4,0x03,0x00,0x00,0x53,0x03,0x00,0x00,0x71,0x00,0x04,0x00, -0x09,0x00,0x00,0x00,0xa8,0x03,0x00,0x00,0xa7,0x03,0x00,0x00, -0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0xa9,0x03,0x00,0x00, -0xa8,0x03,0x00,0x00,0xab,0x00,0x05,0x00,0x70,0x00,0x00,0x00, -0xaa,0x03,0x00,0x00,0xa9,0x03,0x00,0x00,0x16,0x00,0x00,0x00, -0xa9,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0xab,0x03,0x00,0x00, -0xaa,0x03,0x00,0x00,0x5f,0x00,0x00,0x00,0x16,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xac,0x03,0x00,0x00, -0x9d,0x03,0x00,0x00,0xab,0x03,0x00,0x00,0x6f,0x00,0x04,0x00, -0x5a,0x00,0x00,0x00,0xad,0x03,0x00,0x00,0xac,0x03,0x00,0x00, -0x0c,0x00,0x08,0x00,0x5a,0x00,0x00,0x00,0xaf,0x03,0x00,0x00, -0x01,0x00,0x00,0x00,0x32,0x00,0x00,0x00,0x9a,0x03,0x00,0x00, -0xad,0x03,0x00,0x00,0x93,0x03,0x00,0x00,0x3d,0x00,0x04,0x00, -0x5a,0x00,0x00,0x00,0xb6,0x03,0x00,0x00,0x00,0x02,0x00,0x00, -0x3d,0x00,0x04,0x00,0x5a,0x00,0x00,0x00,0xbd,0x03,0x00,0x00, -0x19,0x02,0x00,0x00,0x81,0x00,0x05,0x00,0x5a,0x00,0x00,0x00, -0xbe,0x03,0x00,0x00,0xb6,0x03,0x00,0x00,0xbd,0x03,0x00,0x00, -0x3d,0x00,0x04,0x00,0x5a,0x00,0x00,0x00,0xc5,0x03,0x00,0x00, -0x34,0x02,0x00,0x00,0x81,0x00,0x05,0x00,0x5a,0x00,0x00,0x00, -0xc6,0x03,0x00,0x00,0xbe,0x03,0x00,0x00,0xc5,0x03,0x00,0x00, -0x3d,0x00,0x04,0x00,0x5a,0x00,0x00,0x00,0xcd,0x03,0x00,0x00, -0x4f,0x02,0x00,0x00,0x81,0x00,0x05,0x00,0x5a,0x00,0x00,0x00, -0xce,0x03,0x00,0x00,0xc6,0x03,0x00,0x00,0xcd,0x03,0x00,0x00, -0x70,0x00,0x04,0x00,0x5a,0x00,0x00,0x00,0xd0,0x03,0x00,0x00, -0xc2,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x5a,0x00,0x00,0x00, -0xd8,0x03,0x00,0x00,0x6b,0x02,0x00,0x00,0x3d,0x00,0x04,0x00, -0x5a,0x00,0x00,0x00,0xdf,0x03,0x00,0x00,0x86,0x02,0x00,0x00, -0x81,0x00,0x05,0x00,0x5a,0x00,0x00,0x00,0xe0,0x03,0x00,0x00, -0xd8,0x03,0x00,0x00,0xdf,0x03,0x00,0x00,0x3d,0x00,0x04,0x00, -0x5a,0x00,0x00,0x00,0xe7,0x03,0x00,0x00,0xa3,0x02,0x00,0x00, -0x81,0x00,0x05,0x00,0x5a,0x00,0x00,0x00,0xe8,0x03,0x00,0x00, -0xe0,0x03,0x00,0x00,0xe7,0x03,0x00,0x00,0x3d,0x00,0x04,0x00, -0x5a,0x00,0x00,0x00,0xef,0x03,0x00,0x00,0xc0,0x02,0x00,0x00, -0x81,0x00,0x05,0x00,0x5a,0x00,0x00,0x00,0xf0,0x03,0x00,0x00, -0xe8,0x03,0x00,0x00,0xef,0x03,0x00,0x00,0x70,0x00,0x04,0x00, -0x5a,0x00,0x00,0x00,0xf2,0x03,0x00,0x00,0xd1,0x00,0x00,0x00, -0x85,0x00,0x05,0x00,0x5a,0x00,0x00,0x00,0xf3,0x03,0x00,0x00, -0xf0,0x03,0x00,0x00,0xf2,0x03,0x00,0x00,0x0c,0x00,0x08,0x00, -0x5a,0x00,0x00,0x00,0xf4,0x03,0x00,0x00,0x01,0x00,0x00,0x00, -0x32,0x00,0x00,0x00,0xce,0x03,0x00,0x00,0xd0,0x03,0x00,0x00, -0xf3,0x03,0x00,0x00,0x3d,0x00,0x04,0x00,0x5a,0x00,0x00,0x00, -0xfa,0x03,0x00,0x00,0xdc,0x02,0x00,0x00,0x3d,0x00,0x04,0x00, -0x5a,0x00,0x00,0x00,0x01,0x04,0x00,0x00,0xf5,0x02,0x00,0x00, -0x81,0x00,0x05,0x00,0x5a,0x00,0x00,0x00,0x02,0x04,0x00,0x00, -0xfa,0x03,0x00,0x00,0x01,0x04,0x00,0x00,0x3d,0x00,0x04,0x00, -0x5a,0x00,0x00,0x00,0x09,0x04,0x00,0x00,0x10,0x03,0x00,0x00, -0x81,0x00,0x05,0x00,0x5a,0x00,0x00,0x00,0x0a,0x04,0x00,0x00, -0x02,0x04,0x00,0x00,0x09,0x04,0x00,0x00,0x3d,0x00,0x04,0x00, -0x5a,0x00,0x00,0x00,0x11,0x04,0x00,0x00,0x2b,0x03,0x00,0x00, -0x81,0x00,0x05,0x00,0x5a,0x00,0x00,0x00,0x12,0x04,0x00,0x00, -0x0a,0x04,0x00,0x00,0x11,0x04,0x00,0x00,0x70,0x00,0x04,0x00, -0x5a,0x00,0x00,0x00,0x14,0x04,0x00,0x00,0x26,0x01,0x00,0x00, -0x0c,0x00,0x08,0x00,0x5a,0x00,0x00,0x00,0x16,0x04,0x00,0x00, -0x01,0x00,0x00,0x00,0x32,0x00,0x00,0x00,0x12,0x04,0x00,0x00, -0x14,0x04,0x00,0x00,0xf4,0x03,0x00,0x00,0x3d,0x00,0x04,0x00, -0x5a,0x00,0x00,0x00,0x1d,0x04,0x00,0x00,0x47,0x03,0x00,0x00, -0x3d,0x00,0x04,0x00,0x5a,0x00,0x00,0x00,0x24,0x04,0x00,0x00, -0x61,0x03,0x00,0x00,0x81,0x00,0x05,0x00,0x5a,0x00,0x00,0x00, -0x25,0x04,0x00,0x00,0x1d,0x04,0x00,0x00,0x24,0x04,0x00,0x00, -0x3d,0x00,0x04,0x00,0x5a,0x00,0x00,0x00,0x2c,0x04,0x00,0x00, -0x7d,0x03,0x00,0x00,0x81,0x00,0x05,0x00,0x5a,0x00,0x00,0x00, -0x2d,0x04,0x00,0x00,0x25,0x04,0x00,0x00,0x2c,0x04,0x00,0x00, -0x3d,0x00,0x04,0x00,0x5a,0x00,0x00,0x00,0x34,0x04,0x00,0x00, -0x99,0x03,0x00,0x00,0x81,0x00,0x05,0x00,0x5a,0x00,0x00,0x00, -0x35,0x04,0x00,0x00,0x2d,0x04,0x00,0x00,0x34,0x04,0x00,0x00, -0x70,0x00,0x04,0x00,0x5a,0x00,0x00,0x00,0x37,0x04,0x00,0x00, -0x42,0x01,0x00,0x00,0x0c,0x00,0x08,0x00,0x5a,0x00,0x00,0x00, -0x39,0x04,0x00,0x00,0x01,0x00,0x00,0x00,0x32,0x00,0x00,0x00, -0x35,0x04,0x00,0x00,0x37,0x04,0x00,0x00,0x16,0x04,0x00,0x00, -0x70,0x00,0x04,0x00,0x5a,0x00,0x00,0x00,0x41,0x04,0x00,0x00, -0xa6,0x00,0x00,0x00,0x70,0x00,0x04,0x00,0x5a,0x00,0x00,0x00, -0x45,0x04,0x00,0x00,0xb4,0x00,0x00,0x00,0x85,0x00,0x05,0x00, -0x5a,0x00,0x00,0x00,0x46,0x04,0x00,0x00,0xd6,0x02,0x00,0x00, -0x45,0x04,0x00,0x00,0x0c,0x00,0x08,0x00,0x5a,0x00,0x00,0x00, -0x47,0x04,0x00,0x00,0x01,0x00,0x00,0x00,0x32,0x00,0x00,0x00, -0x64,0x02,0x00,0x00,0x41,0x04,0x00,0x00,0x46,0x04,0x00,0x00, -0x70,0x00,0x04,0x00,0x5a,0x00,0x00,0x00,0x4a,0x04,0x00,0x00, -0xee,0x00,0x00,0x00,0x0c,0x00,0x08,0x00,0x5a,0x00,0x00,0x00, -0x4c,0x04,0x00,0x00,0x01,0x00,0x00,0x00,0x32,0x00,0x00,0x00, -0x40,0x03,0x00,0x00,0x4a,0x04,0x00,0x00,0x47,0x04,0x00,0x00, -0x70,0x00,0x04,0x00,0x5a,0x00,0x00,0x00,0x4f,0x04,0x00,0x00, -0x0a,0x01,0x00,0x00,0x0c,0x00,0x08,0x00,0x5a,0x00,0x00,0x00, -0x51,0x04,0x00,0x00,0x01,0x00,0x00,0x00,0x32,0x00,0x00,0x00, -0xaf,0x03,0x00,0x00,0x4f,0x04,0x00,0x00,0x4c,0x04,0x00,0x00, -0x85,0x00,0x05,0x00,0x5a,0x00,0x00,0x00,0x55,0x04,0x00,0x00, -0x97,0x00,0x00,0x00,0x39,0x04,0x00,0x00,0x7f,0x00,0x04,0x00, -0x5a,0x00,0x00,0x00,0x8c,0x04,0x00,0x00,0x55,0x04,0x00,0x00, -0x0c,0x00,0x08,0x00,0x5a,0x00,0x00,0x00,0x56,0x04,0x00,0x00, -0x01,0x00,0x00,0x00,0x32,0x00,0x00,0x00,0x8f,0x00,0x00,0x00, -0x51,0x04,0x00,0x00,0x8c,0x04,0x00,0x00,0x3d,0x00,0x04,0x00, -0x5a,0x00,0x00,0x00,0x58,0x04,0x00,0x00,0x66,0x00,0x00,0x00, -0x81,0x00,0x05,0x00,0x5a,0x00,0x00,0x00,0x59,0x04,0x00,0x00, -0x58,0x04,0x00,0x00,0x56,0x04,0x00,0x00,0x3e,0x00,0x03,0x00, -0x66,0x00,0x00,0x00,0x59,0x04,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x5c,0x04,0x00,0x00,0x88,0x04,0x00,0x00, -0x25,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0x69,0x00,0x00,0x00, -0xf8,0x00,0x02,0x00,0x6b,0x00,0x00,0x00,0xe0,0x00,0x04,0x00, -0x5d,0x04,0x00,0x00,0x5d,0x04,0x00,0x00,0x5e,0x04,0x00,0x00, -0xf9,0x00,0x02,0x00,0x60,0x04,0x00,0x00,0xf8,0x00,0x02,0x00, -0x60,0x04,0x00,0x00,0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00, -0x89,0x04,0x00,0x00,0x5f,0x00,0x00,0x00,0x6b,0x00,0x00,0x00, -0x77,0x04,0x00,0x00,0x63,0x04,0x00,0x00,0xad,0x00,0x05,0x00, -0x70,0x00,0x00,0x00,0x66,0x04,0x00,0x00,0x89,0x04,0x00,0x00, -0x16,0x00,0x00,0x00,0xf6,0x00,0x04,0x00,0x62,0x04,0x00,0x00, -0x63,0x04,0x00,0x00,0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, -0x66,0x04,0x00,0x00,0x61,0x04,0x00,0x00,0x62,0x04,0x00,0x00, -0xf8,0x00,0x02,0x00,0x61,0x04,0x00,0x00,0xb1,0x00,0x05,0x00, -0x70,0x00,0x00,0x00,0x69,0x04,0x00,0x00,0x26,0x00,0x00,0x00, -0x89,0x04,0x00,0x00,0xf7,0x00,0x03,0x00,0x6b,0x04,0x00,0x00, -0x00,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0x69,0x04,0x00,0x00, -0x6a,0x04,0x00,0x00,0x6b,0x04,0x00,0x00,0xf8,0x00,0x02,0x00, -0x6a,0x04,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x6f,0x04,0x00,0x00,0x26,0x00,0x00,0x00,0x89,0x04,0x00,0x00, -0x41,0x00,0x05,0x00,0x65,0x00,0x00,0x00,0x70,0x04,0x00,0x00, -0x5e,0x00,0x00,0x00,0x6f,0x04,0x00,0x00,0x3d,0x00,0x04,0x00, -0x5a,0x00,0x00,0x00,0x71,0x04,0x00,0x00,0x70,0x04,0x00,0x00, -0x41,0x00,0x05,0x00,0x65,0x00,0x00,0x00,0x72,0x04,0x00,0x00, -0x5e,0x00,0x00,0x00,0x26,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x5a,0x00,0x00,0x00,0x73,0x04,0x00,0x00,0x72,0x04,0x00,0x00, -0x81,0x00,0x05,0x00,0x5a,0x00,0x00,0x00,0x74,0x04,0x00,0x00, -0x73,0x04,0x00,0x00,0x71,0x04,0x00,0x00,0x3e,0x00,0x03,0x00, -0x72,0x04,0x00,0x00,0x74,0x04,0x00,0x00,0xf9,0x00,0x02,0x00, -0x6b,0x04,0x00,0x00,0xf8,0x00,0x02,0x00,0x6b,0x04,0x00,0x00, -0xe0,0x00,0x04,0x00,0x5d,0x04,0x00,0x00,0x5d,0x04,0x00,0x00, -0x5e,0x04,0x00,0x00,0xf9,0x00,0x02,0x00,0x63,0x04,0x00,0x00, -0xf8,0x00,0x02,0x00,0x63,0x04,0x00,0x00,0xc3,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x77,0x04,0x00,0x00,0x89,0x04,0x00,0x00, -0x50,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0x60,0x04,0x00,0x00, -0xf8,0x00,0x02,0x00,0x62,0x04,0x00,0x00,0xaa,0x00,0x05,0x00, -0x70,0x00,0x00,0x00,0x79,0x04,0x00,0x00,0x26,0x00,0x00,0x00, -0x16,0x00,0x00,0x00,0xf7,0x00,0x03,0x00,0x7b,0x04,0x00,0x00, -0x00,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0x79,0x04,0x00,0x00, -0x7a,0x04,0x00,0x00,0x7b,0x04,0x00,0x00,0xf8,0x00,0x02,0x00, -0x7a,0x04,0x00,0x00,0x41,0x00,0x05,0x00,0x17,0x00,0x00,0x00, -0x80,0x04,0x00,0x00,0x15,0x00,0x00,0x00,0x25,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x81,0x04,0x00,0x00, -0x80,0x04,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x83,0x04,0x00,0x00,0x81,0x04,0x00,0x00,0x11,0x00,0x00,0x00, -0x41,0x00,0x05,0x00,0x65,0x00,0x00,0x00,0x84,0x04,0x00,0x00, -0x5e,0x00,0x00,0x00,0x16,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x5a,0x00,0x00,0x00,0x85,0x04,0x00,0x00,0x84,0x04,0x00,0x00, -0x41,0x00,0x06,0x00,0xff,0x01,0x00,0x00,0x86,0x04,0x00,0x00, -0x7f,0x04,0x00,0x00,0x16,0x00,0x00,0x00,0x83,0x04,0x00,0x00, -0x3e,0x00,0x03,0x00,0x86,0x04,0x00,0x00,0x85,0x04,0x00,0x00, -0xf9,0x00,0x02,0x00,0x7b,0x04,0x00,0x00,0xf8,0x00,0x02,0x00, -0x7b,0x04,0x00,0x00,0xfd,0x00,0x01,0x00,0x38,0x00,0x01,0x00, +0x14,0x00,0x00,0x00,0x59,0x00,0x00,0x00,0x58,0x00,0x00,0x00, +0x41,0x00,0x08,0x00,0x5d,0x00,0x00,0x00,0x5e,0x00,0x00,0x00, +0x54,0x00,0x00,0x00,0x28,0x00,0x00,0x00,0x3e,0x00,0x00,0x00, +0x5c,0x00,0x00,0x00,0x5c,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0x4b,0x00,0x00,0x00,0x5f,0x00,0x00,0x00,0x5e,0x00,0x00,0x00, +0x71,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x60,0x00,0x00,0x00, +0x5f,0x00,0x00,0x00,0xc4,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x62,0x00,0x00,0x00,0x60,0x00,0x00,0x00,0x61,0x00,0x00,0x00, +0x41,0x00,0x08,0x00,0x5d,0x00,0x00,0x00,0x64,0x00,0x00,0x00, +0x54,0x00,0x00,0x00,0x28,0x00,0x00,0x00,0x3e,0x00,0x00,0x00, +0x5c,0x00,0x00,0x00,0x28,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0x4b,0x00,0x00,0x00,0x65,0x00,0x00,0x00,0x64,0x00,0x00,0x00, +0x71,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x66,0x00,0x00,0x00, +0x65,0x00,0x00,0x00,0xc5,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x67,0x00,0x00,0x00,0x62,0x00,0x00,0x00,0x66,0x00,0x00,0x00, +0xc2,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x6d,0x00,0x00,0x00, +0x67,0x00,0x00,0x00,0x42,0x00,0x00,0x00,0xc4,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x6f,0x00,0x00,0x00,0x6d,0x00,0x00,0x00, +0x6e,0x00,0x00,0x00,0xc7,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x70,0x00,0x00,0x00,0x6f,0x00,0x00,0x00,0x4e,0x00,0x00,0x00, +0x7c,0x00,0x04,0x00,0x27,0x00,0x00,0x00,0x71,0x00,0x00,0x00, +0x70,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x75,0x00,0x00,0x00,0x42,0x00,0x00,0x00,0x74,0x00,0x00,0x00, +0xc2,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x76,0x00,0x00,0x00, +0x67,0x00,0x00,0x00,0x75,0x00,0x00,0x00,0xc7,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x77,0x00,0x00,0x00,0x76,0x00,0x00,0x00, +0x4e,0x00,0x00,0x00,0x7c,0x00,0x04,0x00,0x27,0x00,0x00,0x00, +0x78,0x00,0x00,0x00,0x77,0x00,0x00,0x00,0x41,0x00,0x08,0x00, +0x7e,0x00,0x00,0x00,0x7f,0x00,0x00,0x00,0x54,0x00,0x00,0x00, +0x28,0x00,0x00,0x00,0x3e,0x00,0x00,0x00,0x7c,0x00,0x00,0x00, +0x42,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x4d,0x00,0x00,0x00, +0x80,0x00,0x00,0x00,0x7f,0x00,0x00,0x00,0x71,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x81,0x00,0x00,0x00,0x80,0x00,0x00,0x00, +0xc7,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x87,0x00,0x00,0x00, +0x81,0x00,0x00,0x00,0x86,0x00,0x00,0x00,0x7c,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x8b,0x00,0x00,0x00,0x71,0x00,0x00,0x00, +0xc5,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x8c,0x00,0x00,0x00, +0x87,0x00,0x00,0x00,0x8b,0x00,0x00,0x00,0x70,0x00,0x04,0x00, +0x14,0x00,0x00,0x00,0x8d,0x00,0x00,0x00,0x8c,0x00,0x00,0x00, +0xc2,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x8f,0x00,0x00,0x00, +0x81,0x00,0x00,0x00,0x6e,0x00,0x00,0x00,0x7c,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x93,0x00,0x00,0x00,0x78,0x00,0x00,0x00, +0xc5,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x94,0x00,0x00,0x00, +0x8f,0x00,0x00,0x00,0x93,0x00,0x00,0x00,0x70,0x00,0x04,0x00, +0x14,0x00,0x00,0x00,0x95,0x00,0x00,0x00,0x94,0x00,0x00,0x00, +0x50,0x00,0x05,0x00,0x82,0x00,0x00,0x00,0x96,0x00,0x00,0x00, +0x8d,0x00,0x00,0x00,0x95,0x00,0x00,0x00,0x83,0x00,0x05,0x00, +0x82,0x00,0x00,0x00,0x9a,0x00,0x00,0x00,0x96,0x00,0x00,0x00, +0xf5,0x00,0x00,0x00,0x8e,0x00,0x05,0x00,0x82,0x00,0x00,0x00, +0x9c,0x00,0x00,0x00,0x9a,0x00,0x00,0x00,0x59,0x00,0x00,0x00, +0x51,0x00,0x05,0x00,0x14,0x00,0x00,0x00,0x9f,0x00,0x00,0x00, +0x9c,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x41,0x00,0x05,0x00, +0x29,0x00,0x00,0x00,0xa4,0x00,0x00,0x00,0x26,0x00,0x00,0x00, +0x5c,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0xa5,0x00,0x00,0x00,0xa4,0x00,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xa7,0x00,0x00,0x00,0xa5,0x00,0x00,0x00, +0x47,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xa9,0x00,0x00,0x00,0xa7,0x00,0x00,0x00,0x42,0x00,0x00,0x00, +0x41,0x00,0x06,0x00,0xab,0x00,0x00,0x00,0xac,0x00,0x00,0x00, +0xa3,0x00,0x00,0x00,0x28,0x00,0x00,0x00,0xa9,0x00,0x00,0x00, +0x3d,0x00,0x04,0x00,0x14,0x00,0x00,0x00,0xad,0x00,0x00,0x00, +0xac,0x00,0x00,0x00,0x51,0x00,0x05,0x00,0x14,0x00,0x00,0x00, +0xb0,0x00,0x00,0x00,0x9c,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xb7,0x00,0x00,0x00, +0xa9,0x00,0x00,0x00,0x4e,0x00,0x00,0x00,0x41,0x00,0x06,0x00, +0xab,0x00,0x00,0x00,0xb8,0x00,0x00,0x00,0xa3,0x00,0x00,0x00, +0x28,0x00,0x00,0x00,0xb7,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0x14,0x00,0x00,0x00,0xb9,0x00,0x00,0x00,0xb8,0x00,0x00,0x00, +0x85,0x00,0x05,0x00,0x14,0x00,0x00,0x00,0xba,0x00,0x00,0x00, +0xb0,0x00,0x00,0x00,0xb9,0x00,0x00,0x00,0x0c,0x00,0x08,0x00, +0x14,0x00,0x00,0x00,0xbb,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0x32,0x00,0x00,0x00,0x9f,0x00,0x00,0x00,0xad,0x00,0x00,0x00, +0xba,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x14,0x00,0x00,0x00, +0xbd,0x00,0x00,0x00,0x1c,0x00,0x00,0x00,0x81,0x00,0x05,0x00, +0x14,0x00,0x00,0x00,0xbe,0x00,0x00,0x00,0xbd,0x00,0x00,0x00, +0xbb,0x00,0x00,0x00,0x3e,0x00,0x03,0x00,0x1c,0x00,0x00,0x00, +0xbe,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xc1,0x00,0x00,0x00,0xf2,0x00,0x00,0x00,0x32,0x00,0x00,0x00, +0xf9,0x00,0x02,0x00,0x1e,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, +0x20,0x00,0x00,0x00,0xe0,0x00,0x04,0x00,0x32,0x00,0x00,0x00, +0x32,0x00,0x00,0x00,0xc2,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, +0xc5,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0xc5,0x00,0x00,0x00, +0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0xf3,0x00,0x00,0x00, +0xc4,0x00,0x00,0x00,0x20,0x00,0x00,0x00,0xdc,0x00,0x00,0x00, +0xc8,0x00,0x00,0x00,0xac,0x00,0x05,0x00,0x2d,0x00,0x00,0x00, +0xcb,0x00,0x00,0x00,0xf3,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, +0xf6,0x00,0x04,0x00,0xc7,0x00,0x00,0x00,0xc8,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0xcb,0x00,0x00,0x00, +0xc6,0x00,0x00,0x00,0xc7,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, +0xc6,0x00,0x00,0x00,0xb0,0x00,0x05,0x00,0x2d,0x00,0x00,0x00, +0xce,0x00,0x00,0x00,0x13,0x00,0x00,0x00,0xf3,0x00,0x00,0x00, +0xf7,0x00,0x03,0x00,0xd0,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0xfa,0x00,0x04,0x00,0xce,0x00,0x00,0x00,0xcf,0x00,0x00,0x00, +0xd0,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0xcf,0x00,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xd4,0x00,0x00,0x00, +0x13,0x00,0x00,0x00,0xf3,0x00,0x00,0x00,0x41,0x00,0x05,0x00, +0x1b,0x00,0x00,0x00,0xd5,0x00,0x00,0x00,0x18,0x00,0x00,0x00, +0xd4,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x14,0x00,0x00,0x00, +0xd6,0x00,0x00,0x00,0xd5,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0x14,0x00,0x00,0x00,0xd8,0x00,0x00,0x00,0x1c,0x00,0x00,0x00, +0x81,0x00,0x05,0x00,0x14,0x00,0x00,0x00,0xd9,0x00,0x00,0x00, +0xd8,0x00,0x00,0x00,0xd6,0x00,0x00,0x00,0x3e,0x00,0x03,0x00, +0x1c,0x00,0x00,0x00,0xd9,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, +0xd0,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0xd0,0x00,0x00,0x00, +0xe0,0x00,0x04,0x00,0x32,0x00,0x00,0x00,0x32,0x00,0x00,0x00, +0xc2,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0xc8,0x00,0x00,0x00, +0xf8,0x00,0x02,0x00,0xc8,0x00,0x00,0x00,0xc2,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xdc,0x00,0x00,0x00,0xf3,0x00,0x00,0x00, +0x5c,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0xc5,0x00,0x00,0x00, +0xf8,0x00,0x02,0x00,0xc7,0x00,0x00,0x00,0xaa,0x00,0x05,0x00, +0x2d,0x00,0x00,0x00,0xde,0x00,0x00,0x00,0x13,0x00,0x00,0x00, +0x0c,0x00,0x00,0x00,0xf7,0x00,0x03,0x00,0xe0,0x00,0x00,0x00, +0x00,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0xde,0x00,0x00,0x00, +0xdf,0x00,0x00,0x00,0xe0,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, +0xdf,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x29,0x00,0x00,0x00, +0xe5,0x00,0x00,0x00,0x26,0x00,0x00,0x00,0x7c,0x00,0x00,0x00, +0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0xe6,0x00,0x00,0x00, +0xe5,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xe8,0x00,0x00,0x00,0xe6,0x00,0x00,0x00,0x0f,0x00,0x00,0x00, +0x41,0x00,0x05,0x00,0x1b,0x00,0x00,0x00,0xe9,0x00,0x00,0x00, +0x18,0x00,0x00,0x00,0x28,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0x14,0x00,0x00,0x00,0xea,0x00,0x00,0x00,0xe9,0x00,0x00,0x00, +0x41,0x00,0x06,0x00,0xab,0x00,0x00,0x00,0xeb,0x00,0x00,0x00, +0xe4,0x00,0x00,0x00,0x28,0x00,0x00,0x00,0xe8,0x00,0x00,0x00, +0x3e,0x00,0x03,0x00,0xeb,0x00,0x00,0x00,0xea,0x00,0x00,0x00, +0xf9,0x00,0x02,0x00,0xe0,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, +0xe0,0x00,0x00,0x00,0xfd,0x00,0x01,0x00,0x38,0x00,0x01,0x00, }; -const uint64_t mul_mat_vec_q5_K_f32_len = 12048; +const uint64_t mul_mat_vec_q5_0_f32_len = 3696; -unsigned char mul_mat_vec_q6_K_f32_data[] = { +unsigned char mul_mat_vec_q5_1_f32_data[] = { 0x03,0x02,0x23,0x07,0x00,0x05,0x01,0x00,0x0b,0x00,0x0d,0x00, -0x97,0x03,0x00,0x00,0x00,0x00,0x00,0x00,0x11,0x00,0x02,0x00, -0x01,0x00,0x00,0x00,0x11,0x00,0x02,0x00,0x27,0x00,0x00,0x00, -0x11,0x00,0x02,0x00,0x51,0x11,0x00,0x00,0x11,0x00,0x02,0x00, -0x60,0x11,0x00,0x00,0x0b,0x00,0x06,0x00,0x01,0x00,0x00,0x00, -0x47,0x4c,0x53,0x4c,0x2e,0x73,0x74,0x64,0x2e,0x34,0x35,0x30, -0x00,0x00,0x00,0x00,0x0e,0x00,0x03,0x00,0x00,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x0f,0x00,0x0c,0x00,0x05,0x00,0x00,0x00, -0x04,0x00,0x00,0x00,0x6d,0x61,0x69,0x6e,0x00,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x15,0x00,0x00,0x00,0x21,0x00,0x00,0x00, -0x57,0x00,0x00,0x00,0x7f,0x00,0x00,0x00,0x94,0x00,0x00,0x00, -0x9c,0x01,0x00,0x00,0x10,0x00,0x06,0x00,0x04,0x00,0x00,0x00, -0x11,0x00,0x00,0x00,0x20,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x0c,0x00,0x00,0x00, -0x0b,0x00,0x00,0x00,0x1a,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0x13,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x13,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x04,0x00,0x00,0x00, -0x48,0x00,0x05,0x00,0x13,0x00,0x00,0x00,0x02,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x08,0x00,0x00,0x00,0x47,0x00,0x03,0x00, -0x13,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x21,0x00,0x00,0x00,0x0b,0x00,0x00,0x00,0x1b,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x74,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x76,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x79,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x48,0x00,0x05,0x00,0x7b,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0x7b,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x80,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x7b,0x00,0x00,0x00, -0x02,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0xc0,0x00,0x00,0x00, -0x48,0x00,0x05,0x00,0x7b,0x00,0x00,0x00,0x03,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0xd0,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x7c,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0xd2,0x00,0x00,0x00, -0x48,0x00,0x04,0x00,0x7d,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x18,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x7d,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x47,0x00,0x03,0x00,0x7d,0x00,0x00,0x00,0x02,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x7f,0x00,0x00,0x00,0x22,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x7f,0x00,0x00,0x00, -0x21,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x91,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x04,0x00,0x00,0x00, -0x48,0x00,0x04,0x00,0x92,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x18,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x92,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x47,0x00,0x03,0x00,0x92,0x00,0x00,0x00,0x02,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x94,0x00,0x00,0x00,0x22,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x94,0x00,0x00,0x00, -0x21,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x99,0x01,0x00,0x00,0x06,0x00,0x00,0x00,0x04,0x00,0x00,0x00, -0x48,0x00,0x04,0x00,0x9a,0x01,0x00,0x00,0x00,0x00,0x00,0x00, -0x19,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x9a,0x01,0x00,0x00, -0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x47,0x00,0x03,0x00,0x9a,0x01,0x00,0x00,0x02,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x9c,0x01,0x00,0x00,0x22,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x9c,0x01,0x00,0x00, -0x21,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0xa5,0x01,0x00,0x00,0x0b,0x00,0x00,0x00,0x19,0x00,0x00,0x00, -0x13,0x00,0x02,0x00,0x02,0x00,0x00,0x00,0x21,0x00,0x03,0x00, -0x03,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x15,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x20,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x15,0x00,0x04,0x00,0x09,0x00,0x00,0x00,0x20,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x17,0x00,0x04,0x00,0x0a,0x00,0x00,0x00, -0x09,0x00,0x00,0x00,0x03,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0x0b,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x0a,0x00,0x00,0x00, -0x3b,0x00,0x04,0x00,0x0b,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x09,0x00,0x00,0x00, -0x0d,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0x0e,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x09,0x00,0x00,0x00, -0x1e,0x00,0x05,0x00,0x13,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0x14,0x00,0x00,0x00,0x09,0x00,0x00,0x00,0x13,0x00,0x00,0x00, -0x3b,0x00,0x04,0x00,0x14,0x00,0x00,0x00,0x15,0x00,0x00,0x00, -0x09,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x16,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0x17,0x00,0x00,0x00,0x09,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x1a,0x00,0x00,0x00, -0x00,0x01,0x00,0x00,0x3b,0x00,0x04,0x00,0x0b,0x00,0x00,0x00, -0x21,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x25,0x00,0x00,0x00,0x02,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0x08,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x36,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x3d,0x00,0x00,0x00,0x40,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x43,0x00,0x00,0x00, -0x20,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x4e,0x00,0x00,0x00,0x80,0x00,0x00,0x00,0x16,0x00,0x03,0x00, -0x53,0x00,0x00,0x00,0x20,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x09,0x00,0x00,0x00,0x54,0x00,0x00,0x00,0x20,0x00,0x00,0x00, -0x1c,0x00,0x04,0x00,0x55,0x00,0x00,0x00,0x53,0x00,0x00,0x00, -0x54,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x56,0x00,0x00,0x00, -0x04,0x00,0x00,0x00,0x55,0x00,0x00,0x00,0x3b,0x00,0x04,0x00, -0x56,0x00,0x00,0x00,0x57,0x00,0x00,0x00,0x04,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x58,0x00,0x00,0x00, -0x10,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x53,0x00,0x00,0x00, -0x5d,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0x5e,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x53,0x00,0x00,0x00, -0x14,0x00,0x02,0x00,0x69,0x00,0x00,0x00,0x15,0x00,0x04,0x00, -0x72,0x00,0x00,0x00,0x08,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x09,0x00,0x00,0x00,0x73,0x00,0x00,0x00, -0x80,0x00,0x00,0x00,0x1c,0x00,0x04,0x00,0x74,0x00,0x00,0x00, -0x72,0x00,0x00,0x00,0x73,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x09,0x00,0x00,0x00,0x75,0x00,0x00,0x00,0x40,0x00,0x00,0x00, -0x1c,0x00,0x04,0x00,0x76,0x00,0x00,0x00,0x72,0x00,0x00,0x00, -0x75,0x00,0x00,0x00,0x15,0x00,0x04,0x00,0x77,0x00,0x00,0x00, -0x08,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x09,0x00,0x00,0x00,0x78,0x00,0x00,0x00,0x10,0x00,0x00,0x00, -0x1c,0x00,0x04,0x00,0x79,0x00,0x00,0x00,0x77,0x00,0x00,0x00, -0x78,0x00,0x00,0x00,0x16,0x00,0x03,0x00,0x7a,0x00,0x00,0x00, -0x10,0x00,0x00,0x00,0x1e,0x00,0x06,0x00,0x7b,0x00,0x00,0x00, -0x74,0x00,0x00,0x00,0x76,0x00,0x00,0x00,0x79,0x00,0x00,0x00, -0x7a,0x00,0x00,0x00,0x1d,0x00,0x03,0x00,0x7c,0x00,0x00,0x00, -0x7b,0x00,0x00,0x00,0x1e,0x00,0x03,0x00,0x7d,0x00,0x00,0x00, -0x7c,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x7e,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x7d,0x00,0x00,0x00,0x3b,0x00,0x04,0x00, -0x7e,0x00,0x00,0x00,0x7f,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x83,0x00,0x00,0x00, -0x03,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x84,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x7a,0x00,0x00,0x00,0x1d,0x00,0x03,0x00, -0x91,0x00,0x00,0x00,0x53,0x00,0x00,0x00,0x1e,0x00,0x03,0x00, -0x92,0x00,0x00,0x00,0x91,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0x93,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x92,0x00,0x00,0x00, -0x3b,0x00,0x04,0x00,0x93,0x00,0x00,0x00,0x94,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x95,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0x9d,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x53,0x00,0x00,0x00, -0x20,0x00,0x04,0x00,0xa5,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0x77,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0xb3,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x72,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0xb8,0x00,0x00,0x00,0x0f,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x3b,0x01,0x00,0x00, -0x60,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x43,0x01,0x00,0x00,0x06,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x09,0x00,0x00,0x00,0x7a,0x01,0x00,0x00,0x02,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x09,0x00,0x00,0x00,0x7b,0x01,0x00,0x00, -0x08,0x01,0x00,0x00,0x1d,0x00,0x03,0x00,0x99,0x01,0x00,0x00, -0x53,0x00,0x00,0x00,0x1e,0x00,0x03,0x00,0x9a,0x01,0x00,0x00, -0x99,0x01,0x00,0x00,0x20,0x00,0x04,0x00,0x9b,0x01,0x00,0x00, -0x0c,0x00,0x00,0x00,0x9a,0x01,0x00,0x00,0x3b,0x00,0x04,0x00, -0x9b,0x01,0x00,0x00,0x9c,0x01,0x00,0x00,0x0c,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x09,0x00,0x00,0x00,0xa4,0x01,0x00,0x00, -0x01,0x00,0x00,0x00,0x2c,0x00,0x06,0x00,0x0a,0x00,0x00,0x00, -0xa5,0x01,0x00,0x00,0x54,0x00,0x00,0x00,0xa4,0x01,0x00,0x00, -0xa4,0x01,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x8e,0x03,0x00,0x00,0x21,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x8f,0x03,0x00,0x00,0x41,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x90,0x03,0x00,0x00, -0x61,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x91,0x03,0x00,0x00,0x22,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x92,0x03,0x00,0x00,0x42,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x93,0x03,0x00,0x00, -0x62,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x94,0x03,0x00,0x00,0x23,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x95,0x03,0x00,0x00,0x43,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x96,0x03,0x00,0x00, -0x63,0x00,0x00,0x00,0x36,0x00,0x05,0x00,0x02,0x00,0x00,0x00, -0x04,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x03,0x00,0x00,0x00, -0xf8,0x00,0x02,0x00,0x05,0x00,0x00,0x00,0x41,0x00,0x05,0x00, -0x0e,0x00,0x00,0x00,0x0f,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0x0d,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x09,0x00,0x00,0x00, -0x10,0x00,0x00,0x00,0x0f,0x00,0x00,0x00,0x7c,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x11,0x00,0x00,0x00,0x10,0x00,0x00,0x00, -0x41,0x00,0x05,0x00,0x17,0x00,0x00,0x00,0x18,0x00,0x00,0x00, -0x15,0x00,0x00,0x00,0x16,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x19,0x00,0x00,0x00,0x18,0x00,0x00,0x00, -0x87,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x1b,0x00,0x00,0x00, -0x19,0x00,0x00,0x00,0x1a,0x00,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x1f,0x00,0x00,0x00,0x11,0x00,0x00,0x00, -0x1b,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x0e,0x00,0x00,0x00, -0x22,0x00,0x00,0x00,0x21,0x00,0x00,0x00,0x0d,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x09,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x22,0x00,0x00,0x00,0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x24,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x87,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x26,0x00,0x00,0x00,0x24,0x00,0x00,0x00, -0x25,0x00,0x00,0x00,0x8b,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x2b,0x00,0x00,0x00,0x24,0x00,0x00,0x00,0x25,0x00,0x00,0x00, -0x87,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x2f,0x00,0x00,0x00, -0x26,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x33,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0x2f,0x00,0x00,0x00,0x82,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x34,0x00,0x00,0x00,0x26,0x00,0x00,0x00,0x33,0x00,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x38,0x00,0x00,0x00, -0x36,0x00,0x00,0x00,0x34,0x00,0x00,0x00,0x87,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x3b,0x00,0x00,0x00,0x34,0x00,0x00,0x00, -0x36,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x3f,0x00,0x00,0x00,0x3d,0x00,0x00,0x00,0x2f,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x41,0x00,0x00,0x00, -0x3f,0x00,0x00,0x00,0x38,0x00,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x45,0x00,0x00,0x00,0x43,0x00,0x00,0x00, -0x2f,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x47,0x00,0x00,0x00,0x45,0x00,0x00,0x00,0x38,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x4c,0x00,0x00,0x00, -0x33,0x00,0x00,0x00,0x3b,0x00,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x50,0x00,0x00,0x00,0x4e,0x00,0x00,0x00, -0x2f,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x52,0x00,0x00,0x00,0x50,0x00,0x00,0x00,0x38,0x00,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x5a,0x00,0x00,0x00, -0x58,0x00,0x00,0x00,0x2b,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x5c,0x00,0x00,0x00,0x5a,0x00,0x00,0x00, -0x26,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x5e,0x00,0x00,0x00, -0x5f,0x00,0x00,0x00,0x57,0x00,0x00,0x00,0x5c,0x00,0x00,0x00, -0x3e,0x00,0x03,0x00,0x5f,0x00,0x00,0x00,0x5d,0x00,0x00,0x00, -0xf9,0x00,0x02,0x00,0x62,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, -0x62,0x00,0x00,0x00,0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00, -0xa6,0x01,0x00,0x00,0x2b,0x00,0x00,0x00,0x05,0x00,0x00,0x00, -0x79,0x01,0x00,0x00,0x63,0x00,0x00,0x00,0xb1,0x00,0x05,0x00, -0x69,0x00,0x00,0x00,0x6a,0x00,0x00,0x00,0xa6,0x01,0x00,0x00, -0x1b,0x00,0x00,0x00,0xf6,0x00,0x04,0x00,0x64,0x00,0x00,0x00, -0x63,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, -0x6a,0x00,0x00,0x00,0x63,0x00,0x00,0x00,0x64,0x00,0x00,0x00, -0xf8,0x00,0x02,0x00,0x63,0x00,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x6d,0x00,0x00,0x00,0xa6,0x01,0x00,0x00, -0x1a,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x6f,0x00,0x00,0x00,0x6d,0x00,0x00,0x00,0x52,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x82,0x00,0x00,0x00, -0x1f,0x00,0x00,0x00,0xa6,0x01,0x00,0x00,0x41,0x00,0x07,0x00, -0x84,0x00,0x00,0x00,0x85,0x00,0x00,0x00,0x7f,0x00,0x00,0x00, -0x16,0x00,0x00,0x00,0x82,0x00,0x00,0x00,0x83,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x7a,0x00,0x00,0x00,0x86,0x00,0x00,0x00, -0x85,0x00,0x00,0x00,0x73,0x00,0x04,0x00,0x53,0x00,0x00,0x00, -0x87,0x00,0x00,0x00,0x86,0x00,0x00,0x00,0x41,0x00,0x05,0x00, -0x17,0x00,0x00,0x00,0x96,0x00,0x00,0x00,0x15,0x00,0x00,0x00, -0x95,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x97,0x00,0x00,0x00,0x96,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x99,0x00,0x00,0x00,0x97,0x00,0x00,0x00, -0x6f,0x00,0x00,0x00,0x41,0x00,0x06,0x00,0x9d,0x00,0x00,0x00, -0x9e,0x00,0x00,0x00,0x94,0x00,0x00,0x00,0x16,0x00,0x00,0x00, -0x99,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x53,0x00,0x00,0x00, -0x9f,0x00,0x00,0x00,0x9e,0x00,0x00,0x00,0x41,0x00,0x08,0x00, -0xa5,0x00,0x00,0x00,0xa6,0x00,0x00,0x00,0x7f,0x00,0x00,0x00, -0x16,0x00,0x00,0x00,0x82,0x00,0x00,0x00,0x25,0x00,0x00,0x00, -0x4c,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x77,0x00,0x00,0x00, -0xa7,0x00,0x00,0x00,0xa6,0x00,0x00,0x00,0x6f,0x00,0x04,0x00, -0x53,0x00,0x00,0x00,0xa8,0x00,0x00,0x00,0xa7,0x00,0x00,0x00, -0x85,0x00,0x05,0x00,0x53,0x00,0x00,0x00,0xa9,0x00,0x00,0x00, -0x9f,0x00,0x00,0x00,0xa8,0x00,0x00,0x00,0x85,0x00,0x05,0x00, -0x53,0x00,0x00,0x00,0xab,0x00,0x00,0x00,0xa9,0x00,0x00,0x00, -0x87,0x00,0x00,0x00,0x41,0x00,0x08,0x00,0xb3,0x00,0x00,0x00, -0xb4,0x00,0x00,0x00,0x7f,0x00,0x00,0x00,0x16,0x00,0x00,0x00, -0x82,0x00,0x00,0x00,0x16,0x00,0x00,0x00,0x41,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x72,0x00,0x00,0x00,0xb5,0x00,0x00,0x00, -0xb4,0x00,0x00,0x00,0x71,0x00,0x04,0x00,0x09,0x00,0x00,0x00, -0xb6,0x00,0x00,0x00,0xb5,0x00,0x00,0x00,0x7c,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0xb7,0x00,0x00,0x00,0xb6,0x00,0x00,0x00, -0xc7,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xb9,0x00,0x00,0x00, -0xb7,0x00,0x00,0x00,0xb8,0x00,0x00,0x00,0x41,0x00,0x08,0x00, -0xb3,0x00,0x00,0x00,0xc0,0x00,0x00,0x00,0x7f,0x00,0x00,0x00, -0x16,0x00,0x00,0x00,0x82,0x00,0x00,0x00,0x95,0x00,0x00,0x00, -0x47,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x72,0x00,0x00,0x00, -0xc1,0x00,0x00,0x00,0xc0,0x00,0x00,0x00,0xc2,0x00,0x05,0x00, -0x72,0x00,0x00,0x00,0xc2,0x00,0x00,0x00,0xc1,0x00,0x00,0x00, -0x16,0x00,0x00,0x00,0x71,0x00,0x04,0x00,0x09,0x00,0x00,0x00, -0xc3,0x00,0x00,0x00,0xc2,0x00,0x00,0x00,0x7c,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0xc4,0x00,0x00,0x00,0xc3,0x00,0x00,0x00, -0xc7,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xc5,0x00,0x00,0x00, -0xc4,0x00,0x00,0x00,0x83,0x00,0x00,0x00,0xc4,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xc6,0x00,0x00,0x00,0xc5,0x00,0x00,0x00, -0x36,0x00,0x00,0x00,0xc5,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xc7,0x00,0x00,0x00,0xb9,0x00,0x00,0x00,0xc6,0x00,0x00,0x00, -0x72,0x00,0x04,0x00,0x77,0x00,0x00,0x00,0xc8,0x00,0x00,0x00, -0xc7,0x00,0x00,0x00,0x72,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0xc9,0x00,0x00,0x00,0xc8,0x00,0x00,0x00,0x82,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xca,0x00,0x00,0x00,0xc9,0x00,0x00,0x00, -0x43,0x00,0x00,0x00,0x6f,0x00,0x04,0x00,0x53,0x00,0x00,0x00, -0xcb,0x00,0x00,0x00,0xca,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xd3,0x00,0x00,0x00,0x99,0x00,0x00,0x00, -0x43,0x00,0x00,0x00,0x41,0x00,0x06,0x00,0x9d,0x00,0x00,0x00, -0xd4,0x00,0x00,0x00,0x94,0x00,0x00,0x00,0x16,0x00,0x00,0x00, -0xd3,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x53,0x00,0x00,0x00, -0xd5,0x00,0x00,0x00,0xd4,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xda,0x00,0x00,0x00,0x4c,0x00,0x00,0x00, -0x25,0x00,0x00,0x00,0x41,0x00,0x08,0x00,0xa5,0x00,0x00,0x00, -0xdb,0x00,0x00,0x00,0x7f,0x00,0x00,0x00,0x16,0x00,0x00,0x00, -0x82,0x00,0x00,0x00,0x25,0x00,0x00,0x00,0xda,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x77,0x00,0x00,0x00,0xdc,0x00,0x00,0x00, -0xdb,0x00,0x00,0x00,0x6f,0x00,0x04,0x00,0x53,0x00,0x00,0x00, -0xdd,0x00,0x00,0x00,0xdc,0x00,0x00,0x00,0x85,0x00,0x05,0x00, -0x53,0x00,0x00,0x00,0xde,0x00,0x00,0x00,0xd5,0x00,0x00,0x00, -0xdd,0x00,0x00,0x00,0x85,0x00,0x05,0x00,0x53,0x00,0x00,0x00, -0xe0,0x00,0x00,0x00,0xde,0x00,0x00,0x00,0x87,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xe7,0x00,0x00,0x00, -0x41,0x00,0x00,0x00,0x43,0x00,0x00,0x00,0x41,0x00,0x08,0x00, -0xb3,0x00,0x00,0x00,0xe8,0x00,0x00,0x00,0x7f,0x00,0x00,0x00, -0x16,0x00,0x00,0x00,0x82,0x00,0x00,0x00,0x16,0x00,0x00,0x00, -0xe7,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x72,0x00,0x00,0x00, -0xe9,0x00,0x00,0x00,0xe8,0x00,0x00,0x00,0x71,0x00,0x04,0x00, -0x09,0x00,0x00,0x00,0xea,0x00,0x00,0x00,0xe9,0x00,0x00,0x00, -0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0xeb,0x00,0x00,0x00, -0xea,0x00,0x00,0x00,0xc7,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xec,0x00,0x00,0x00,0xeb,0x00,0x00,0x00,0xb8,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x72,0x00,0x00,0x00,0xf4,0x00,0x00,0x00, -0xc0,0x00,0x00,0x00,0xc2,0x00,0x05,0x00,0x72,0x00,0x00,0x00, -0xf5,0x00,0x00,0x00,0xf4,0x00,0x00,0x00,0x25,0x00,0x00,0x00, -0x71,0x00,0x04,0x00,0x09,0x00,0x00,0x00,0xf6,0x00,0x00,0x00, -0xf5,0x00,0x00,0x00,0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0xf7,0x00,0x00,0x00,0xf6,0x00,0x00,0x00,0xc7,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xf8,0x00,0x00,0x00,0xf7,0x00,0x00,0x00, -0x83,0x00,0x00,0x00,0xc4,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xf9,0x00,0x00,0x00,0xf8,0x00,0x00,0x00,0x36,0x00,0x00,0x00, -0xc5,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xfa,0x00,0x00,0x00, -0xec,0x00,0x00,0x00,0xf9,0x00,0x00,0x00,0x72,0x00,0x04,0x00, -0x77,0x00,0x00,0x00,0xfb,0x00,0x00,0x00,0xfa,0x00,0x00,0x00, -0x72,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0xfc,0x00,0x00,0x00, -0xfb,0x00,0x00,0x00,0x82,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xfd,0x00,0x00,0x00,0xfc,0x00,0x00,0x00,0x43,0x00,0x00,0x00, -0x6f,0x00,0x04,0x00,0x53,0x00,0x00,0x00,0xfe,0x00,0x00,0x00, -0xfd,0x00,0x00,0x00,0x85,0x00,0x05,0x00,0x53,0x00,0x00,0x00, -0xff,0x00,0x00,0x00,0xe0,0x00,0x00,0x00,0xfe,0x00,0x00,0x00, -0x0c,0x00,0x08,0x00,0x53,0x00,0x00,0x00,0x00,0x01,0x00,0x00, -0x01,0x00,0x00,0x00,0x32,0x00,0x00,0x00,0xab,0x00,0x00,0x00, -0xcb,0x00,0x00,0x00,0xff,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x07,0x01,0x00,0x00,0x99,0x00,0x00,0x00, -0x3d,0x00,0x00,0x00,0x41,0x00,0x06,0x00,0x9d,0x00,0x00,0x00, -0x08,0x01,0x00,0x00,0x94,0x00,0x00,0x00,0x16,0x00,0x00,0x00, -0x07,0x01,0x00,0x00,0x3d,0x00,0x04,0x00,0x53,0x00,0x00,0x00, -0x09,0x01,0x00,0x00,0x08,0x01,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x0e,0x01,0x00,0x00,0x4c,0x00,0x00,0x00, -0x36,0x00,0x00,0x00,0x41,0x00,0x08,0x00,0xa5,0x00,0x00,0x00, -0x0f,0x01,0x00,0x00,0x7f,0x00,0x00,0x00,0x16,0x00,0x00,0x00, -0x82,0x00,0x00,0x00,0x25,0x00,0x00,0x00,0x0e,0x01,0x00,0x00, -0x3d,0x00,0x04,0x00,0x77,0x00,0x00,0x00,0x10,0x01,0x00,0x00, -0x0f,0x01,0x00,0x00,0x6f,0x00,0x04,0x00,0x53,0x00,0x00,0x00, -0x11,0x01,0x00,0x00,0x10,0x01,0x00,0x00,0x85,0x00,0x05,0x00, -0x53,0x00,0x00,0x00,0x12,0x01,0x00,0x00,0x09,0x01,0x00,0x00, -0x11,0x01,0x00,0x00,0x85,0x00,0x05,0x00,0x53,0x00,0x00,0x00, -0x14,0x01,0x00,0x00,0x12,0x01,0x00,0x00,0x87,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x72,0x00,0x00,0x00,0x1d,0x01,0x00,0x00, -0xb4,0x00,0x00,0x00,0xc2,0x00,0x05,0x00,0x72,0x00,0x00,0x00, -0x1e,0x01,0x00,0x00,0x1d,0x01,0x00,0x00,0x36,0x00,0x00,0x00, -0x71,0x00,0x04,0x00,0x09,0x00,0x00,0x00,0x1f,0x01,0x00,0x00, -0x1e,0x01,0x00,0x00,0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x20,0x01,0x00,0x00,0x1f,0x01,0x00,0x00,0x3d,0x00,0x04,0x00, -0x72,0x00,0x00,0x00,0x28,0x01,0x00,0x00,0xc0,0x00,0x00,0x00, -0xc2,0x00,0x05,0x00,0x72,0x00,0x00,0x00,0x29,0x01,0x00,0x00, -0x28,0x01,0x00,0x00,0x36,0x00,0x00,0x00,0x71,0x00,0x04,0x00, -0x09,0x00,0x00,0x00,0x2a,0x01,0x00,0x00,0x29,0x01,0x00,0x00, -0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x2b,0x01,0x00,0x00, -0x2a,0x01,0x00,0x00,0xc7,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x2c,0x01,0x00,0x00,0x2b,0x01,0x00,0x00,0x83,0x00,0x00,0x00, -0xc4,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x2d,0x01,0x00,0x00, -0x2c,0x01,0x00,0x00,0x36,0x00,0x00,0x00,0xc5,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x2e,0x01,0x00,0x00,0x20,0x01,0x00,0x00, -0x2d,0x01,0x00,0x00,0x72,0x00,0x04,0x00,0x77,0x00,0x00,0x00, -0x2f,0x01,0x00,0x00,0x2e,0x01,0x00,0x00,0x72,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x30,0x01,0x00,0x00,0x2f,0x01,0x00,0x00, -0x82,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x31,0x01,0x00,0x00, -0x30,0x01,0x00,0x00,0x43,0x00,0x00,0x00,0x6f,0x00,0x04,0x00, -0x53,0x00,0x00,0x00,0x32,0x01,0x00,0x00,0x31,0x01,0x00,0x00, -0x0c,0x00,0x08,0x00,0x53,0x00,0x00,0x00,0x34,0x01,0x00,0x00, -0x01,0x00,0x00,0x00,0x32,0x00,0x00,0x00,0x14,0x01,0x00,0x00, -0x32,0x01,0x00,0x00,0x00,0x01,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x3c,0x01,0x00,0x00,0x99,0x00,0x00,0x00, -0x3b,0x01,0x00,0x00,0x41,0x00,0x06,0x00,0x9d,0x00,0x00,0x00, -0x3d,0x01,0x00,0x00,0x94,0x00,0x00,0x00,0x16,0x00,0x00,0x00, -0x3c,0x01,0x00,0x00,0x3d,0x00,0x04,0x00,0x53,0x00,0x00,0x00, -0x3e,0x01,0x00,0x00,0x3d,0x01,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x44,0x01,0x00,0x00,0x4c,0x00,0x00,0x00, -0x43,0x01,0x00,0x00,0x41,0x00,0x08,0x00,0xa5,0x00,0x00,0x00, -0x45,0x01,0x00,0x00,0x7f,0x00,0x00,0x00,0x16,0x00,0x00,0x00, -0x82,0x00,0x00,0x00,0x25,0x00,0x00,0x00,0x44,0x01,0x00,0x00, -0x3d,0x00,0x04,0x00,0x77,0x00,0x00,0x00,0x46,0x01,0x00,0x00, -0x45,0x01,0x00,0x00,0x6f,0x00,0x04,0x00,0x53,0x00,0x00,0x00, -0x47,0x01,0x00,0x00,0x46,0x01,0x00,0x00,0x85,0x00,0x05,0x00, -0x53,0x00,0x00,0x00,0x48,0x01,0x00,0x00,0x3e,0x01,0x00,0x00, -0x47,0x01,0x00,0x00,0x85,0x00,0x05,0x00,0x53,0x00,0x00,0x00, -0x4a,0x01,0x00,0x00,0x48,0x01,0x00,0x00,0x87,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x72,0x00,0x00,0x00,0x53,0x01,0x00,0x00, -0xe8,0x00,0x00,0x00,0xc2,0x00,0x05,0x00,0x72,0x00,0x00,0x00, -0x54,0x01,0x00,0x00,0x53,0x01,0x00,0x00,0x36,0x00,0x00,0x00, -0x71,0x00,0x04,0x00,0x09,0x00,0x00,0x00,0x55,0x01,0x00,0x00, -0x54,0x01,0x00,0x00,0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x56,0x01,0x00,0x00,0x55,0x01,0x00,0x00,0x3d,0x00,0x04,0x00, -0x72,0x00,0x00,0x00,0x5e,0x01,0x00,0x00,0xc0,0x00,0x00,0x00, -0xc2,0x00,0x05,0x00,0x72,0x00,0x00,0x00,0x5f,0x01,0x00,0x00, -0x5e,0x01,0x00,0x00,0x43,0x01,0x00,0x00,0x71,0x00,0x04,0x00, -0x09,0x00,0x00,0x00,0x60,0x01,0x00,0x00,0x5f,0x01,0x00,0x00, -0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x61,0x01,0x00,0x00, -0x60,0x01,0x00,0x00,0xc7,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x62,0x01,0x00,0x00,0x61,0x01,0x00,0x00,0x83,0x00,0x00,0x00, -0xc4,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x63,0x01,0x00,0x00, -0x62,0x01,0x00,0x00,0x36,0x00,0x00,0x00,0xc5,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x64,0x01,0x00,0x00,0x56,0x01,0x00,0x00, -0x63,0x01,0x00,0x00,0x72,0x00,0x04,0x00,0x77,0x00,0x00,0x00, -0x65,0x01,0x00,0x00,0x64,0x01,0x00,0x00,0x72,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x66,0x01,0x00,0x00,0x65,0x01,0x00,0x00, -0x82,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x67,0x01,0x00,0x00, -0x66,0x01,0x00,0x00,0x43,0x00,0x00,0x00,0x6f,0x00,0x04,0x00, -0x53,0x00,0x00,0x00,0x68,0x01,0x00,0x00,0x67,0x01,0x00,0x00, -0x0c,0x00,0x08,0x00,0x53,0x00,0x00,0x00,0x6a,0x01,0x00,0x00, -0x01,0x00,0x00,0x00,0x32,0x00,0x00,0x00,0x4a,0x01,0x00,0x00, -0x68,0x01,0x00,0x00,0x34,0x01,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xb6,0x01,0x00,0x00,0x99,0x00,0x00,0x00, -0x95,0x00,0x00,0x00,0x41,0x00,0x06,0x00,0x9d,0x00,0x00,0x00, -0xb8,0x01,0x00,0x00,0x94,0x00,0x00,0x00,0x16,0x00,0x00,0x00, -0xb6,0x01,0x00,0x00,0x3d,0x00,0x04,0x00,0x53,0x00,0x00,0x00, -0xb9,0x01,0x00,0x00,0xb8,0x01,0x00,0x00,0x3d,0x00,0x04,0x00, -0x77,0x00,0x00,0x00,0xbd,0x01,0x00,0x00,0xa6,0x00,0x00,0x00, -0x6f,0x00,0x04,0x00,0x53,0x00,0x00,0x00,0xbe,0x01,0x00,0x00, -0xbd,0x01,0x00,0x00,0x85,0x00,0x05,0x00,0x53,0x00,0x00,0x00, -0xbf,0x01,0x00,0x00,0xb9,0x01,0x00,0x00,0xbe,0x01,0x00,0x00, -0x85,0x00,0x05,0x00,0x53,0x00,0x00,0x00,0xc0,0x01,0x00,0x00, -0xbf,0x01,0x00,0x00,0x87,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xc2,0x01,0x00,0x00,0x41,0x00,0x00,0x00, -0x95,0x00,0x00,0x00,0x41,0x00,0x08,0x00,0xb3,0x00,0x00,0x00, -0xc4,0x01,0x00,0x00,0x7f,0x00,0x00,0x00,0x16,0x00,0x00,0x00, -0x82,0x00,0x00,0x00,0x16,0x00,0x00,0x00,0xc2,0x01,0x00,0x00, -0x3d,0x00,0x04,0x00,0x72,0x00,0x00,0x00,0xc5,0x01,0x00,0x00, -0xc4,0x01,0x00,0x00,0x71,0x00,0x04,0x00,0x09,0x00,0x00,0x00, -0xc6,0x01,0x00,0x00,0xc5,0x01,0x00,0x00,0x7c,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0xc7,0x01,0x00,0x00,0xc6,0x01,0x00,0x00, -0xc7,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xc8,0x01,0x00,0x00, -0xc7,0x01,0x00,0x00,0xb8,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xca,0x01,0x00,0x00,0x47,0x00,0x00,0x00, -0x95,0x00,0x00,0x00,0x41,0x00,0x08,0x00,0xb3,0x00,0x00,0x00, -0xcb,0x01,0x00,0x00,0x7f,0x00,0x00,0x00,0x16,0x00,0x00,0x00, -0x82,0x00,0x00,0x00,0x95,0x00,0x00,0x00,0xca,0x01,0x00,0x00, -0x3d,0x00,0x04,0x00,0x72,0x00,0x00,0x00,0xcc,0x01,0x00,0x00, -0xcb,0x01,0x00,0x00,0xc2,0x00,0x05,0x00,0x72,0x00,0x00,0x00, -0xcd,0x01,0x00,0x00,0xcc,0x01,0x00,0x00,0x16,0x00,0x00,0x00, -0x71,0x00,0x04,0x00,0x09,0x00,0x00,0x00,0xce,0x01,0x00,0x00, -0xcd,0x01,0x00,0x00,0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0xcf,0x01,0x00,0x00,0xce,0x01,0x00,0x00,0xc7,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xd0,0x01,0x00,0x00,0xcf,0x01,0x00,0x00, -0x83,0x00,0x00,0x00,0xc4,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xd1,0x01,0x00,0x00,0xd0,0x01,0x00,0x00,0x36,0x00,0x00,0x00, -0xc5,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xd2,0x01,0x00,0x00, -0xc8,0x01,0x00,0x00,0xd1,0x01,0x00,0x00,0x72,0x00,0x04,0x00, -0x77,0x00,0x00,0x00,0xd3,0x01,0x00,0x00,0xd2,0x01,0x00,0x00, -0x72,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0xd4,0x01,0x00,0x00, -0xd3,0x01,0x00,0x00,0x82,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xd5,0x01,0x00,0x00,0xd4,0x01,0x00,0x00,0x43,0x00,0x00,0x00, -0x6f,0x00,0x04,0x00,0x53,0x00,0x00,0x00,0xd6,0x01,0x00,0x00, -0xd5,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xdc,0x01,0x00,0x00,0x99,0x00,0x00,0x00,0x8e,0x03,0x00,0x00, -0x41,0x00,0x06,0x00,0x9d,0x00,0x00,0x00,0xdd,0x01,0x00,0x00, -0x94,0x00,0x00,0x00,0x16,0x00,0x00,0x00,0xdc,0x01,0x00,0x00, -0x3d,0x00,0x04,0x00,0x53,0x00,0x00,0x00,0xde,0x01,0x00,0x00, -0xdd,0x01,0x00,0x00,0x3d,0x00,0x04,0x00,0x77,0x00,0x00,0x00, -0xe2,0x01,0x00,0x00,0xdb,0x00,0x00,0x00,0x6f,0x00,0x04,0x00, -0x53,0x00,0x00,0x00,0xe3,0x01,0x00,0x00,0xe2,0x01,0x00,0x00, -0x85,0x00,0x05,0x00,0x53,0x00,0x00,0x00,0xe4,0x01,0x00,0x00, -0xde,0x01,0x00,0x00,0xe3,0x01,0x00,0x00,0x85,0x00,0x05,0x00, -0x53,0x00,0x00,0x00,0xe5,0x01,0x00,0x00,0xe4,0x01,0x00,0x00, -0x87,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xe8,0x01,0x00,0x00,0x41,0x00,0x00,0x00,0x8e,0x03,0x00,0x00, -0x41,0x00,0x08,0x00,0xb3,0x00,0x00,0x00,0xe9,0x01,0x00,0x00, -0x7f,0x00,0x00,0x00,0x16,0x00,0x00,0x00,0x82,0x00,0x00,0x00, -0x16,0x00,0x00,0x00,0xe8,0x01,0x00,0x00,0x3d,0x00,0x04,0x00, -0x72,0x00,0x00,0x00,0xea,0x01,0x00,0x00,0xe9,0x01,0x00,0x00, -0x71,0x00,0x04,0x00,0x09,0x00,0x00,0x00,0xeb,0x01,0x00,0x00, -0xea,0x01,0x00,0x00,0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0xec,0x01,0x00,0x00,0xeb,0x01,0x00,0x00,0xc7,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xed,0x01,0x00,0x00,0xec,0x01,0x00,0x00, -0xb8,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x72,0x00,0x00,0x00, -0xf1,0x01,0x00,0x00,0xcb,0x01,0x00,0x00,0xc2,0x00,0x05,0x00, -0x72,0x00,0x00,0x00,0xf2,0x01,0x00,0x00,0xf1,0x01,0x00,0x00, -0x25,0x00,0x00,0x00,0x71,0x00,0x04,0x00,0x09,0x00,0x00,0x00, -0xf3,0x01,0x00,0x00,0xf2,0x01,0x00,0x00,0x7c,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0xf4,0x01,0x00,0x00,0xf3,0x01,0x00,0x00, -0xc7,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xf5,0x01,0x00,0x00, -0xf4,0x01,0x00,0x00,0x83,0x00,0x00,0x00,0xc4,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xf6,0x01,0x00,0x00,0xf5,0x01,0x00,0x00, -0x36,0x00,0x00,0x00,0xc5,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xf7,0x01,0x00,0x00,0xed,0x01,0x00,0x00,0xf6,0x01,0x00,0x00, -0x72,0x00,0x04,0x00,0x77,0x00,0x00,0x00,0xf8,0x01,0x00,0x00, -0xf7,0x01,0x00,0x00,0x72,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0xf9,0x01,0x00,0x00,0xf8,0x01,0x00,0x00,0x82,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xfa,0x01,0x00,0x00,0xf9,0x01,0x00,0x00, -0x43,0x00,0x00,0x00,0x6f,0x00,0x04,0x00,0x53,0x00,0x00,0x00, -0xfb,0x01,0x00,0x00,0xfa,0x01,0x00,0x00,0x85,0x00,0x05,0x00, -0x53,0x00,0x00,0x00,0xfc,0x01,0x00,0x00,0xe5,0x01,0x00,0x00, -0xfb,0x01,0x00,0x00,0x0c,0x00,0x08,0x00,0x53,0x00,0x00,0x00, -0xfd,0x01,0x00,0x00,0x01,0x00,0x00,0x00,0x32,0x00,0x00,0x00, -0xc0,0x01,0x00,0x00,0xd6,0x01,0x00,0x00,0xfc,0x01,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x02,0x02,0x00,0x00, -0x99,0x00,0x00,0x00,0x8f,0x03,0x00,0x00,0x41,0x00,0x06,0x00, -0x9d,0x00,0x00,0x00,0x03,0x02,0x00,0x00,0x94,0x00,0x00,0x00, -0x16,0x00,0x00,0x00,0x02,0x02,0x00,0x00,0x3d,0x00,0x04,0x00, -0x53,0x00,0x00,0x00,0x04,0x02,0x00,0x00,0x03,0x02,0x00,0x00, -0x3d,0x00,0x04,0x00,0x77,0x00,0x00,0x00,0x08,0x02,0x00,0x00, -0x0f,0x01,0x00,0x00,0x6f,0x00,0x04,0x00,0x53,0x00,0x00,0x00, -0x09,0x02,0x00,0x00,0x08,0x02,0x00,0x00,0x85,0x00,0x05,0x00, -0x53,0x00,0x00,0x00,0x0a,0x02,0x00,0x00,0x04,0x02,0x00,0x00, -0x09,0x02,0x00,0x00,0x85,0x00,0x05,0x00,0x53,0x00,0x00,0x00, -0x0b,0x02,0x00,0x00,0x0a,0x02,0x00,0x00,0x87,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x72,0x00,0x00,0x00,0x10,0x02,0x00,0x00, -0xc4,0x01,0x00,0x00,0xc2,0x00,0x05,0x00,0x72,0x00,0x00,0x00, -0x11,0x02,0x00,0x00,0x10,0x02,0x00,0x00,0x36,0x00,0x00,0x00, -0x71,0x00,0x04,0x00,0x09,0x00,0x00,0x00,0x12,0x02,0x00,0x00, -0x11,0x02,0x00,0x00,0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x13,0x02,0x00,0x00,0x12,0x02,0x00,0x00,0x3d,0x00,0x04,0x00, -0x72,0x00,0x00,0x00,0x17,0x02,0x00,0x00,0xcb,0x01,0x00,0x00, -0xc2,0x00,0x05,0x00,0x72,0x00,0x00,0x00,0x18,0x02,0x00,0x00, -0x17,0x02,0x00,0x00,0x36,0x00,0x00,0x00,0x71,0x00,0x04,0x00, -0x09,0x00,0x00,0x00,0x19,0x02,0x00,0x00,0x18,0x02,0x00,0x00, -0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x1a,0x02,0x00,0x00, -0x19,0x02,0x00,0x00,0xc7,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x1b,0x02,0x00,0x00,0x1a,0x02,0x00,0x00,0x83,0x00,0x00,0x00, -0xc4,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x1c,0x02,0x00,0x00, -0x1b,0x02,0x00,0x00,0x36,0x00,0x00,0x00,0xc5,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x1d,0x02,0x00,0x00,0x13,0x02,0x00,0x00, -0x1c,0x02,0x00,0x00,0x72,0x00,0x04,0x00,0x77,0x00,0x00,0x00, -0x1e,0x02,0x00,0x00,0x1d,0x02,0x00,0x00,0x72,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x1f,0x02,0x00,0x00,0x1e,0x02,0x00,0x00, -0x82,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x20,0x02,0x00,0x00, -0x1f,0x02,0x00,0x00,0x43,0x00,0x00,0x00,0x6f,0x00,0x04,0x00, -0x53,0x00,0x00,0x00,0x21,0x02,0x00,0x00,0x20,0x02,0x00,0x00, -0x0c,0x00,0x08,0x00,0x53,0x00,0x00,0x00,0x23,0x02,0x00,0x00, -0x01,0x00,0x00,0x00,0x32,0x00,0x00,0x00,0x0b,0x02,0x00,0x00, -0x21,0x02,0x00,0x00,0xfd,0x01,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x28,0x02,0x00,0x00,0x99,0x00,0x00,0x00, -0x90,0x03,0x00,0x00,0x41,0x00,0x06,0x00,0x9d,0x00,0x00,0x00, -0x29,0x02,0x00,0x00,0x94,0x00,0x00,0x00,0x16,0x00,0x00,0x00, -0x28,0x02,0x00,0x00,0x3d,0x00,0x04,0x00,0x53,0x00,0x00,0x00, -0x2a,0x02,0x00,0x00,0x29,0x02,0x00,0x00,0x3d,0x00,0x04,0x00, -0x77,0x00,0x00,0x00,0x2e,0x02,0x00,0x00,0x45,0x01,0x00,0x00, -0x6f,0x00,0x04,0x00,0x53,0x00,0x00,0x00,0x2f,0x02,0x00,0x00, -0x2e,0x02,0x00,0x00,0x85,0x00,0x05,0x00,0x53,0x00,0x00,0x00, -0x30,0x02,0x00,0x00,0x2a,0x02,0x00,0x00,0x2f,0x02,0x00,0x00, -0x85,0x00,0x05,0x00,0x53,0x00,0x00,0x00,0x31,0x02,0x00,0x00, -0x30,0x02,0x00,0x00,0x87,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x72,0x00,0x00,0x00,0x36,0x02,0x00,0x00,0xe9,0x01,0x00,0x00, -0xc2,0x00,0x05,0x00,0x72,0x00,0x00,0x00,0x37,0x02,0x00,0x00, -0x36,0x02,0x00,0x00,0x36,0x00,0x00,0x00,0x71,0x00,0x04,0x00, -0x09,0x00,0x00,0x00,0x38,0x02,0x00,0x00,0x37,0x02,0x00,0x00, -0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x39,0x02,0x00,0x00, -0x38,0x02,0x00,0x00,0x3d,0x00,0x04,0x00,0x72,0x00,0x00,0x00, -0x3d,0x02,0x00,0x00,0xcb,0x01,0x00,0x00,0xc2,0x00,0x05,0x00, -0x72,0x00,0x00,0x00,0x3e,0x02,0x00,0x00,0x3d,0x02,0x00,0x00, -0x43,0x01,0x00,0x00,0x71,0x00,0x04,0x00,0x09,0x00,0x00,0x00, -0x3f,0x02,0x00,0x00,0x3e,0x02,0x00,0x00,0x7c,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x40,0x02,0x00,0x00,0x3f,0x02,0x00,0x00, -0xc7,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x41,0x02,0x00,0x00, -0x40,0x02,0x00,0x00,0x83,0x00,0x00,0x00,0xc4,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x42,0x02,0x00,0x00,0x41,0x02,0x00,0x00, -0x36,0x00,0x00,0x00,0xc5,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x43,0x02,0x00,0x00,0x39,0x02,0x00,0x00,0x42,0x02,0x00,0x00, -0x72,0x00,0x04,0x00,0x77,0x00,0x00,0x00,0x44,0x02,0x00,0x00, -0x43,0x02,0x00,0x00,0x72,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x45,0x02,0x00,0x00,0x44,0x02,0x00,0x00,0x82,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x46,0x02,0x00,0x00,0x45,0x02,0x00,0x00, -0x43,0x00,0x00,0x00,0x6f,0x00,0x04,0x00,0x53,0x00,0x00,0x00, -0x47,0x02,0x00,0x00,0x46,0x02,0x00,0x00,0x0c,0x00,0x08,0x00, -0x53,0x00,0x00,0x00,0x49,0x02,0x00,0x00,0x01,0x00,0x00,0x00, -0x32,0x00,0x00,0x00,0x31,0x02,0x00,0x00,0x47,0x02,0x00,0x00, -0x23,0x02,0x00,0x00,0x81,0x00,0x05,0x00,0x53,0x00,0x00,0x00, -0x4a,0x02,0x00,0x00,0x6a,0x01,0x00,0x00,0x49,0x02,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x56,0x02,0x00,0x00, -0x99,0x00,0x00,0x00,0x25,0x00,0x00,0x00,0x41,0x00,0x06,0x00, -0x9d,0x00,0x00,0x00,0x58,0x02,0x00,0x00,0x94,0x00,0x00,0x00, -0x16,0x00,0x00,0x00,0x56,0x02,0x00,0x00,0x3d,0x00,0x04,0x00, -0x53,0x00,0x00,0x00,0x59,0x02,0x00,0x00,0x58,0x02,0x00,0x00, -0x3d,0x00,0x04,0x00,0x77,0x00,0x00,0x00,0x5d,0x02,0x00,0x00, -0xa6,0x00,0x00,0x00,0x6f,0x00,0x04,0x00,0x53,0x00,0x00,0x00, -0x5e,0x02,0x00,0x00,0x5d,0x02,0x00,0x00,0x85,0x00,0x05,0x00, -0x53,0x00,0x00,0x00,0x5f,0x02,0x00,0x00,0x59,0x02,0x00,0x00, -0x5e,0x02,0x00,0x00,0x85,0x00,0x05,0x00,0x53,0x00,0x00,0x00, -0x60,0x02,0x00,0x00,0x5f,0x02,0x00,0x00,0x87,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x62,0x02,0x00,0x00, -0x41,0x00,0x00,0x00,0x25,0x00,0x00,0x00,0x41,0x00,0x08,0x00, -0xb3,0x00,0x00,0x00,0x64,0x02,0x00,0x00,0x7f,0x00,0x00,0x00, -0x16,0x00,0x00,0x00,0x82,0x00,0x00,0x00,0x16,0x00,0x00,0x00, -0x62,0x02,0x00,0x00,0x3d,0x00,0x04,0x00,0x72,0x00,0x00,0x00, -0x65,0x02,0x00,0x00,0x64,0x02,0x00,0x00,0x71,0x00,0x04,0x00, -0x09,0x00,0x00,0x00,0x66,0x02,0x00,0x00,0x65,0x02,0x00,0x00, -0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x67,0x02,0x00,0x00, -0x66,0x02,0x00,0x00,0xc7,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x68,0x02,0x00,0x00,0x67,0x02,0x00,0x00,0xb8,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x6a,0x02,0x00,0x00, -0x47,0x00,0x00,0x00,0x25,0x00,0x00,0x00,0x41,0x00,0x08,0x00, -0xb3,0x00,0x00,0x00,0x6b,0x02,0x00,0x00,0x7f,0x00,0x00,0x00, -0x16,0x00,0x00,0x00,0x82,0x00,0x00,0x00,0x95,0x00,0x00,0x00, -0x6a,0x02,0x00,0x00,0x3d,0x00,0x04,0x00,0x72,0x00,0x00,0x00, -0x6c,0x02,0x00,0x00,0x6b,0x02,0x00,0x00,0xc2,0x00,0x05,0x00, -0x72,0x00,0x00,0x00,0x6d,0x02,0x00,0x00,0x6c,0x02,0x00,0x00, -0x16,0x00,0x00,0x00,0x71,0x00,0x04,0x00,0x09,0x00,0x00,0x00, -0x6e,0x02,0x00,0x00,0x6d,0x02,0x00,0x00,0x7c,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x6f,0x02,0x00,0x00,0x6e,0x02,0x00,0x00, -0xc7,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x70,0x02,0x00,0x00, -0x6f,0x02,0x00,0x00,0x83,0x00,0x00,0x00,0xc4,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x71,0x02,0x00,0x00,0x70,0x02,0x00,0x00, -0x36,0x00,0x00,0x00,0xc5,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x72,0x02,0x00,0x00,0x68,0x02,0x00,0x00,0x71,0x02,0x00,0x00, -0x72,0x00,0x04,0x00,0x77,0x00,0x00,0x00,0x73,0x02,0x00,0x00, -0x72,0x02,0x00,0x00,0x72,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x74,0x02,0x00,0x00,0x73,0x02,0x00,0x00,0x82,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x75,0x02,0x00,0x00,0x74,0x02,0x00,0x00, -0x43,0x00,0x00,0x00,0x6f,0x00,0x04,0x00,0x53,0x00,0x00,0x00, -0x76,0x02,0x00,0x00,0x75,0x02,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x7c,0x02,0x00,0x00,0x99,0x00,0x00,0x00, -0x91,0x03,0x00,0x00,0x41,0x00,0x06,0x00,0x9d,0x00,0x00,0x00, -0x7d,0x02,0x00,0x00,0x94,0x00,0x00,0x00,0x16,0x00,0x00,0x00, -0x7c,0x02,0x00,0x00,0x3d,0x00,0x04,0x00,0x53,0x00,0x00,0x00, -0x7e,0x02,0x00,0x00,0x7d,0x02,0x00,0x00,0x3d,0x00,0x04,0x00, -0x77,0x00,0x00,0x00,0x82,0x02,0x00,0x00,0xdb,0x00,0x00,0x00, -0x6f,0x00,0x04,0x00,0x53,0x00,0x00,0x00,0x83,0x02,0x00,0x00, -0x82,0x02,0x00,0x00,0x85,0x00,0x05,0x00,0x53,0x00,0x00,0x00, -0x84,0x02,0x00,0x00,0x7e,0x02,0x00,0x00,0x83,0x02,0x00,0x00, -0x85,0x00,0x05,0x00,0x53,0x00,0x00,0x00,0x85,0x02,0x00,0x00, -0x84,0x02,0x00,0x00,0x87,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x88,0x02,0x00,0x00,0x41,0x00,0x00,0x00, -0x91,0x03,0x00,0x00,0x41,0x00,0x08,0x00,0xb3,0x00,0x00,0x00, -0x89,0x02,0x00,0x00,0x7f,0x00,0x00,0x00,0x16,0x00,0x00,0x00, -0x82,0x00,0x00,0x00,0x16,0x00,0x00,0x00,0x88,0x02,0x00,0x00, -0x3d,0x00,0x04,0x00,0x72,0x00,0x00,0x00,0x8a,0x02,0x00,0x00, -0x89,0x02,0x00,0x00,0x71,0x00,0x04,0x00,0x09,0x00,0x00,0x00, -0x8b,0x02,0x00,0x00,0x8a,0x02,0x00,0x00,0x7c,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x8c,0x02,0x00,0x00,0x8b,0x02,0x00,0x00, -0xc7,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x8d,0x02,0x00,0x00, -0x8c,0x02,0x00,0x00,0xb8,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x72,0x00,0x00,0x00,0x91,0x02,0x00,0x00,0x6b,0x02,0x00,0x00, -0xc2,0x00,0x05,0x00,0x72,0x00,0x00,0x00,0x92,0x02,0x00,0x00, -0x91,0x02,0x00,0x00,0x25,0x00,0x00,0x00,0x71,0x00,0x04,0x00, -0x09,0x00,0x00,0x00,0x93,0x02,0x00,0x00,0x92,0x02,0x00,0x00, -0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x94,0x02,0x00,0x00, -0x93,0x02,0x00,0x00,0xc7,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x95,0x02,0x00,0x00,0x94,0x02,0x00,0x00,0x83,0x00,0x00,0x00, -0xc4,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x96,0x02,0x00,0x00, -0x95,0x02,0x00,0x00,0x36,0x00,0x00,0x00,0xc5,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x97,0x02,0x00,0x00,0x8d,0x02,0x00,0x00, -0x96,0x02,0x00,0x00,0x72,0x00,0x04,0x00,0x77,0x00,0x00,0x00, -0x98,0x02,0x00,0x00,0x97,0x02,0x00,0x00,0x72,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x99,0x02,0x00,0x00,0x98,0x02,0x00,0x00, -0x82,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x9a,0x02,0x00,0x00, -0x99,0x02,0x00,0x00,0x43,0x00,0x00,0x00,0x6f,0x00,0x04,0x00, -0x53,0x00,0x00,0x00,0x9b,0x02,0x00,0x00,0x9a,0x02,0x00,0x00, -0x85,0x00,0x05,0x00,0x53,0x00,0x00,0x00,0x9c,0x02,0x00,0x00, -0x85,0x02,0x00,0x00,0x9b,0x02,0x00,0x00,0x0c,0x00,0x08,0x00, -0x53,0x00,0x00,0x00,0x9d,0x02,0x00,0x00,0x01,0x00,0x00,0x00, -0x32,0x00,0x00,0x00,0x60,0x02,0x00,0x00,0x76,0x02,0x00,0x00, -0x9c,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xa2,0x02,0x00,0x00,0x99,0x00,0x00,0x00,0x92,0x03,0x00,0x00, -0x41,0x00,0x06,0x00,0x9d,0x00,0x00,0x00,0xa3,0x02,0x00,0x00, -0x94,0x00,0x00,0x00,0x16,0x00,0x00,0x00,0xa2,0x02,0x00,0x00, -0x3d,0x00,0x04,0x00,0x53,0x00,0x00,0x00,0xa4,0x02,0x00,0x00, -0xa3,0x02,0x00,0x00,0x3d,0x00,0x04,0x00,0x77,0x00,0x00,0x00, -0xa8,0x02,0x00,0x00,0x0f,0x01,0x00,0x00,0x6f,0x00,0x04,0x00, -0x53,0x00,0x00,0x00,0xa9,0x02,0x00,0x00,0xa8,0x02,0x00,0x00, -0x85,0x00,0x05,0x00,0x53,0x00,0x00,0x00,0xaa,0x02,0x00,0x00, -0xa4,0x02,0x00,0x00,0xa9,0x02,0x00,0x00,0x85,0x00,0x05,0x00, -0x53,0x00,0x00,0x00,0xab,0x02,0x00,0x00,0xaa,0x02,0x00,0x00, -0x87,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x72,0x00,0x00,0x00, -0xb0,0x02,0x00,0x00,0x64,0x02,0x00,0x00,0xc2,0x00,0x05,0x00, -0x72,0x00,0x00,0x00,0xb1,0x02,0x00,0x00,0xb0,0x02,0x00,0x00, -0x36,0x00,0x00,0x00,0x71,0x00,0x04,0x00,0x09,0x00,0x00,0x00, -0xb2,0x02,0x00,0x00,0xb1,0x02,0x00,0x00,0x7c,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0xb3,0x02,0x00,0x00,0xb2,0x02,0x00,0x00, -0x3d,0x00,0x04,0x00,0x72,0x00,0x00,0x00,0xb7,0x02,0x00,0x00, -0x6b,0x02,0x00,0x00,0xc2,0x00,0x05,0x00,0x72,0x00,0x00,0x00, -0xb8,0x02,0x00,0x00,0xb7,0x02,0x00,0x00,0x36,0x00,0x00,0x00, -0x71,0x00,0x04,0x00,0x09,0x00,0x00,0x00,0xb9,0x02,0x00,0x00, -0xb8,0x02,0x00,0x00,0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0xba,0x02,0x00,0x00,0xb9,0x02,0x00,0x00,0xc7,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xbb,0x02,0x00,0x00,0xba,0x02,0x00,0x00, -0x83,0x00,0x00,0x00,0xc4,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xbc,0x02,0x00,0x00,0xbb,0x02,0x00,0x00,0x36,0x00,0x00,0x00, -0xc5,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xbd,0x02,0x00,0x00, -0xb3,0x02,0x00,0x00,0xbc,0x02,0x00,0x00,0x72,0x00,0x04,0x00, -0x77,0x00,0x00,0x00,0xbe,0x02,0x00,0x00,0xbd,0x02,0x00,0x00, -0x72,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0xbf,0x02,0x00,0x00, -0xbe,0x02,0x00,0x00,0x82,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xc0,0x02,0x00,0x00,0xbf,0x02,0x00,0x00,0x43,0x00,0x00,0x00, -0x6f,0x00,0x04,0x00,0x53,0x00,0x00,0x00,0xc1,0x02,0x00,0x00, -0xc0,0x02,0x00,0x00,0x0c,0x00,0x08,0x00,0x53,0x00,0x00,0x00, -0xc3,0x02,0x00,0x00,0x01,0x00,0x00,0x00,0x32,0x00,0x00,0x00, -0xab,0x02,0x00,0x00,0xc1,0x02,0x00,0x00,0x9d,0x02,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xc8,0x02,0x00,0x00, -0x99,0x00,0x00,0x00,0x93,0x03,0x00,0x00,0x41,0x00,0x06,0x00, -0x9d,0x00,0x00,0x00,0xc9,0x02,0x00,0x00,0x94,0x00,0x00,0x00, -0x16,0x00,0x00,0x00,0xc8,0x02,0x00,0x00,0x3d,0x00,0x04,0x00, -0x53,0x00,0x00,0x00,0xca,0x02,0x00,0x00,0xc9,0x02,0x00,0x00, -0x3d,0x00,0x04,0x00,0x77,0x00,0x00,0x00,0xce,0x02,0x00,0x00, -0x45,0x01,0x00,0x00,0x6f,0x00,0x04,0x00,0x53,0x00,0x00,0x00, -0xcf,0x02,0x00,0x00,0xce,0x02,0x00,0x00,0x85,0x00,0x05,0x00, -0x53,0x00,0x00,0x00,0xd0,0x02,0x00,0x00,0xca,0x02,0x00,0x00, -0xcf,0x02,0x00,0x00,0x85,0x00,0x05,0x00,0x53,0x00,0x00,0x00, -0xd1,0x02,0x00,0x00,0xd0,0x02,0x00,0x00,0x87,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x72,0x00,0x00,0x00,0xd6,0x02,0x00,0x00, -0x89,0x02,0x00,0x00,0xc2,0x00,0x05,0x00,0x72,0x00,0x00,0x00, -0xd7,0x02,0x00,0x00,0xd6,0x02,0x00,0x00,0x36,0x00,0x00,0x00, -0x71,0x00,0x04,0x00,0x09,0x00,0x00,0x00,0xd8,0x02,0x00,0x00, -0xd7,0x02,0x00,0x00,0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0xd9,0x02,0x00,0x00,0xd8,0x02,0x00,0x00,0x3d,0x00,0x04,0x00, -0x72,0x00,0x00,0x00,0xdd,0x02,0x00,0x00,0x6b,0x02,0x00,0x00, -0xc2,0x00,0x05,0x00,0x72,0x00,0x00,0x00,0xde,0x02,0x00,0x00, -0xdd,0x02,0x00,0x00,0x43,0x01,0x00,0x00,0x71,0x00,0x04,0x00, -0x09,0x00,0x00,0x00,0xdf,0x02,0x00,0x00,0xde,0x02,0x00,0x00, -0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0xe0,0x02,0x00,0x00, -0xdf,0x02,0x00,0x00,0xc7,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xe1,0x02,0x00,0x00,0xe0,0x02,0x00,0x00,0x83,0x00,0x00,0x00, -0xc4,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xe2,0x02,0x00,0x00, -0xe1,0x02,0x00,0x00,0x36,0x00,0x00,0x00,0xc5,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xe3,0x02,0x00,0x00,0xd9,0x02,0x00,0x00, -0xe2,0x02,0x00,0x00,0x72,0x00,0x04,0x00,0x77,0x00,0x00,0x00, -0xe4,0x02,0x00,0x00,0xe3,0x02,0x00,0x00,0x72,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0xe5,0x02,0x00,0x00,0xe4,0x02,0x00,0x00, -0x82,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xe6,0x02,0x00,0x00, -0xe5,0x02,0x00,0x00,0x43,0x00,0x00,0x00,0x6f,0x00,0x04,0x00, -0x53,0x00,0x00,0x00,0xe7,0x02,0x00,0x00,0xe6,0x02,0x00,0x00, -0x0c,0x00,0x08,0x00,0x53,0x00,0x00,0x00,0xe9,0x02,0x00,0x00, -0x01,0x00,0x00,0x00,0x32,0x00,0x00,0x00,0xd1,0x02,0x00,0x00, -0xe7,0x02,0x00,0x00,0xc3,0x02,0x00,0x00,0x81,0x00,0x05,0x00, -0x53,0x00,0x00,0x00,0xea,0x02,0x00,0x00,0x4a,0x02,0x00,0x00, -0xe9,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xf6,0x02,0x00,0x00,0x99,0x00,0x00,0x00,0x83,0x00,0x00,0x00, -0x41,0x00,0x06,0x00,0x9d,0x00,0x00,0x00,0xf8,0x02,0x00,0x00, -0x94,0x00,0x00,0x00,0x16,0x00,0x00,0x00,0xf6,0x02,0x00,0x00, -0x3d,0x00,0x04,0x00,0x53,0x00,0x00,0x00,0xf9,0x02,0x00,0x00, -0xf8,0x02,0x00,0x00,0x3d,0x00,0x04,0x00,0x77,0x00,0x00,0x00, -0xfd,0x02,0x00,0x00,0xa6,0x00,0x00,0x00,0x6f,0x00,0x04,0x00, -0x53,0x00,0x00,0x00,0xfe,0x02,0x00,0x00,0xfd,0x02,0x00,0x00, -0x85,0x00,0x05,0x00,0x53,0x00,0x00,0x00,0xff,0x02,0x00,0x00, -0xf9,0x02,0x00,0x00,0xfe,0x02,0x00,0x00,0x85,0x00,0x05,0x00, -0x53,0x00,0x00,0x00,0x00,0x03,0x00,0x00,0xff,0x02,0x00,0x00, -0x87,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x02,0x03,0x00,0x00,0x41,0x00,0x00,0x00,0x83,0x00,0x00,0x00, -0x41,0x00,0x08,0x00,0xb3,0x00,0x00,0x00,0x04,0x03,0x00,0x00, -0x7f,0x00,0x00,0x00,0x16,0x00,0x00,0x00,0x82,0x00,0x00,0x00, -0x16,0x00,0x00,0x00,0x02,0x03,0x00,0x00,0x3d,0x00,0x04,0x00, -0x72,0x00,0x00,0x00,0x05,0x03,0x00,0x00,0x04,0x03,0x00,0x00, -0x71,0x00,0x04,0x00,0x09,0x00,0x00,0x00,0x06,0x03,0x00,0x00, -0x05,0x03,0x00,0x00,0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x07,0x03,0x00,0x00,0x06,0x03,0x00,0x00,0xc7,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x08,0x03,0x00,0x00,0x07,0x03,0x00,0x00, -0xb8,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x0a,0x03,0x00,0x00,0x47,0x00,0x00,0x00,0x83,0x00,0x00,0x00, -0x41,0x00,0x08,0x00,0xb3,0x00,0x00,0x00,0x0b,0x03,0x00,0x00, -0x7f,0x00,0x00,0x00,0x16,0x00,0x00,0x00,0x82,0x00,0x00,0x00, -0x95,0x00,0x00,0x00,0x0a,0x03,0x00,0x00,0x3d,0x00,0x04,0x00, -0x72,0x00,0x00,0x00,0x0c,0x03,0x00,0x00,0x0b,0x03,0x00,0x00, -0xc2,0x00,0x05,0x00,0x72,0x00,0x00,0x00,0x0d,0x03,0x00,0x00, -0x0c,0x03,0x00,0x00,0x16,0x00,0x00,0x00,0x71,0x00,0x04,0x00, -0x09,0x00,0x00,0x00,0x0e,0x03,0x00,0x00,0x0d,0x03,0x00,0x00, -0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x0f,0x03,0x00,0x00, -0x0e,0x03,0x00,0x00,0xc7,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x10,0x03,0x00,0x00,0x0f,0x03,0x00,0x00,0x83,0x00,0x00,0x00, -0xc4,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x11,0x03,0x00,0x00, -0x10,0x03,0x00,0x00,0x36,0x00,0x00,0x00,0xc5,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x12,0x03,0x00,0x00,0x08,0x03,0x00,0x00, -0x11,0x03,0x00,0x00,0x72,0x00,0x04,0x00,0x77,0x00,0x00,0x00, -0x13,0x03,0x00,0x00,0x12,0x03,0x00,0x00,0x72,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x14,0x03,0x00,0x00,0x13,0x03,0x00,0x00, -0x82,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x15,0x03,0x00,0x00, -0x14,0x03,0x00,0x00,0x43,0x00,0x00,0x00,0x6f,0x00,0x04,0x00, -0x53,0x00,0x00,0x00,0x16,0x03,0x00,0x00,0x15,0x03,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x1c,0x03,0x00,0x00, -0x99,0x00,0x00,0x00,0x94,0x03,0x00,0x00,0x41,0x00,0x06,0x00, -0x9d,0x00,0x00,0x00,0x1d,0x03,0x00,0x00,0x94,0x00,0x00,0x00, -0x16,0x00,0x00,0x00,0x1c,0x03,0x00,0x00,0x3d,0x00,0x04,0x00, -0x53,0x00,0x00,0x00,0x1e,0x03,0x00,0x00,0x1d,0x03,0x00,0x00, -0x3d,0x00,0x04,0x00,0x77,0x00,0x00,0x00,0x22,0x03,0x00,0x00, -0xdb,0x00,0x00,0x00,0x6f,0x00,0x04,0x00,0x53,0x00,0x00,0x00, -0x23,0x03,0x00,0x00,0x22,0x03,0x00,0x00,0x85,0x00,0x05,0x00, -0x53,0x00,0x00,0x00,0x24,0x03,0x00,0x00,0x1e,0x03,0x00,0x00, -0x23,0x03,0x00,0x00,0x85,0x00,0x05,0x00,0x53,0x00,0x00,0x00, -0x25,0x03,0x00,0x00,0x24,0x03,0x00,0x00,0x87,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x28,0x03,0x00,0x00, -0x41,0x00,0x00,0x00,0x94,0x03,0x00,0x00,0x41,0x00,0x08,0x00, -0xb3,0x00,0x00,0x00,0x29,0x03,0x00,0x00,0x7f,0x00,0x00,0x00, -0x16,0x00,0x00,0x00,0x82,0x00,0x00,0x00,0x16,0x00,0x00,0x00, -0x28,0x03,0x00,0x00,0x3d,0x00,0x04,0x00,0x72,0x00,0x00,0x00, -0x2a,0x03,0x00,0x00,0x29,0x03,0x00,0x00,0x71,0x00,0x04,0x00, -0x09,0x00,0x00,0x00,0x2b,0x03,0x00,0x00,0x2a,0x03,0x00,0x00, -0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x2c,0x03,0x00,0x00, -0x2b,0x03,0x00,0x00,0xc7,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x2d,0x03,0x00,0x00,0x2c,0x03,0x00,0x00,0xb8,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x72,0x00,0x00,0x00,0x31,0x03,0x00,0x00, -0x0b,0x03,0x00,0x00,0xc2,0x00,0x05,0x00,0x72,0x00,0x00,0x00, -0x32,0x03,0x00,0x00,0x31,0x03,0x00,0x00,0x25,0x00,0x00,0x00, -0x71,0x00,0x04,0x00,0x09,0x00,0x00,0x00,0x33,0x03,0x00,0x00, -0x32,0x03,0x00,0x00,0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x34,0x03,0x00,0x00,0x33,0x03,0x00,0x00,0xc7,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x35,0x03,0x00,0x00,0x34,0x03,0x00,0x00, -0x83,0x00,0x00,0x00,0xc4,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x36,0x03,0x00,0x00,0x35,0x03,0x00,0x00,0x36,0x00,0x00,0x00, -0xc5,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x37,0x03,0x00,0x00, -0x2d,0x03,0x00,0x00,0x36,0x03,0x00,0x00,0x72,0x00,0x04,0x00, -0x77,0x00,0x00,0x00,0x38,0x03,0x00,0x00,0x37,0x03,0x00,0x00, -0x72,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x39,0x03,0x00,0x00, -0x38,0x03,0x00,0x00,0x82,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x3a,0x03,0x00,0x00,0x39,0x03,0x00,0x00,0x43,0x00,0x00,0x00, -0x6f,0x00,0x04,0x00,0x53,0x00,0x00,0x00,0x3b,0x03,0x00,0x00, -0x3a,0x03,0x00,0x00,0x85,0x00,0x05,0x00,0x53,0x00,0x00,0x00, -0x3c,0x03,0x00,0x00,0x25,0x03,0x00,0x00,0x3b,0x03,0x00,0x00, -0x0c,0x00,0x08,0x00,0x53,0x00,0x00,0x00,0x3d,0x03,0x00,0x00, -0x01,0x00,0x00,0x00,0x32,0x00,0x00,0x00,0x00,0x03,0x00,0x00, -0x16,0x03,0x00,0x00,0x3c,0x03,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x42,0x03,0x00,0x00,0x99,0x00,0x00,0x00, -0x95,0x03,0x00,0x00,0x41,0x00,0x06,0x00,0x9d,0x00,0x00,0x00, -0x43,0x03,0x00,0x00,0x94,0x00,0x00,0x00,0x16,0x00,0x00,0x00, -0x42,0x03,0x00,0x00,0x3d,0x00,0x04,0x00,0x53,0x00,0x00,0x00, -0x44,0x03,0x00,0x00,0x43,0x03,0x00,0x00,0x3d,0x00,0x04,0x00, -0x77,0x00,0x00,0x00,0x48,0x03,0x00,0x00,0x0f,0x01,0x00,0x00, -0x6f,0x00,0x04,0x00,0x53,0x00,0x00,0x00,0x49,0x03,0x00,0x00, -0x48,0x03,0x00,0x00,0x85,0x00,0x05,0x00,0x53,0x00,0x00,0x00, -0x4a,0x03,0x00,0x00,0x44,0x03,0x00,0x00,0x49,0x03,0x00,0x00, -0x85,0x00,0x05,0x00,0x53,0x00,0x00,0x00,0x4b,0x03,0x00,0x00, -0x4a,0x03,0x00,0x00,0x87,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x72,0x00,0x00,0x00,0x50,0x03,0x00,0x00,0x04,0x03,0x00,0x00, -0xc2,0x00,0x05,0x00,0x72,0x00,0x00,0x00,0x51,0x03,0x00,0x00, -0x50,0x03,0x00,0x00,0x36,0x00,0x00,0x00,0x71,0x00,0x04,0x00, -0x09,0x00,0x00,0x00,0x52,0x03,0x00,0x00,0x51,0x03,0x00,0x00, -0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x53,0x03,0x00,0x00, -0x52,0x03,0x00,0x00,0x3d,0x00,0x04,0x00,0x72,0x00,0x00,0x00, -0x57,0x03,0x00,0x00,0x0b,0x03,0x00,0x00,0xc2,0x00,0x05,0x00, -0x72,0x00,0x00,0x00,0x58,0x03,0x00,0x00,0x57,0x03,0x00,0x00, -0x36,0x00,0x00,0x00,0x71,0x00,0x04,0x00,0x09,0x00,0x00,0x00, -0x59,0x03,0x00,0x00,0x58,0x03,0x00,0x00,0x7c,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x5a,0x03,0x00,0x00,0x59,0x03,0x00,0x00, -0xc7,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x5b,0x03,0x00,0x00, -0x5a,0x03,0x00,0x00,0x83,0x00,0x00,0x00,0xc4,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x5c,0x03,0x00,0x00,0x5b,0x03,0x00,0x00, -0x36,0x00,0x00,0x00,0xc5,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x5d,0x03,0x00,0x00,0x53,0x03,0x00,0x00,0x5c,0x03,0x00,0x00, -0x72,0x00,0x04,0x00,0x77,0x00,0x00,0x00,0x5e,0x03,0x00,0x00, -0x5d,0x03,0x00,0x00,0x72,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x5f,0x03,0x00,0x00,0x5e,0x03,0x00,0x00,0x82,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x60,0x03,0x00,0x00,0x5f,0x03,0x00,0x00, -0x43,0x00,0x00,0x00,0x6f,0x00,0x04,0x00,0x53,0x00,0x00,0x00, -0x61,0x03,0x00,0x00,0x60,0x03,0x00,0x00,0x0c,0x00,0x08,0x00, -0x53,0x00,0x00,0x00,0x63,0x03,0x00,0x00,0x01,0x00,0x00,0x00, -0x32,0x00,0x00,0x00,0x4b,0x03,0x00,0x00,0x61,0x03,0x00,0x00, -0x3d,0x03,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x68,0x03,0x00,0x00,0x99,0x00,0x00,0x00,0x96,0x03,0x00,0x00, -0x41,0x00,0x06,0x00,0x9d,0x00,0x00,0x00,0x69,0x03,0x00,0x00, -0x94,0x00,0x00,0x00,0x16,0x00,0x00,0x00,0x68,0x03,0x00,0x00, -0x3d,0x00,0x04,0x00,0x53,0x00,0x00,0x00,0x6a,0x03,0x00,0x00, -0x69,0x03,0x00,0x00,0x3d,0x00,0x04,0x00,0x77,0x00,0x00,0x00, -0x6e,0x03,0x00,0x00,0x45,0x01,0x00,0x00,0x6f,0x00,0x04,0x00, -0x53,0x00,0x00,0x00,0x6f,0x03,0x00,0x00,0x6e,0x03,0x00,0x00, -0x85,0x00,0x05,0x00,0x53,0x00,0x00,0x00,0x70,0x03,0x00,0x00, -0x6a,0x03,0x00,0x00,0x6f,0x03,0x00,0x00,0x85,0x00,0x05,0x00, -0x53,0x00,0x00,0x00,0x71,0x03,0x00,0x00,0x70,0x03,0x00,0x00, -0x87,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x72,0x00,0x00,0x00, -0x76,0x03,0x00,0x00,0x29,0x03,0x00,0x00,0xc2,0x00,0x05,0x00, -0x72,0x00,0x00,0x00,0x77,0x03,0x00,0x00,0x76,0x03,0x00,0x00, -0x36,0x00,0x00,0x00,0x71,0x00,0x04,0x00,0x09,0x00,0x00,0x00, -0x78,0x03,0x00,0x00,0x77,0x03,0x00,0x00,0x7c,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x79,0x03,0x00,0x00,0x78,0x03,0x00,0x00, -0x3d,0x00,0x04,0x00,0x72,0x00,0x00,0x00,0x7d,0x03,0x00,0x00, -0x0b,0x03,0x00,0x00,0xc2,0x00,0x05,0x00,0x72,0x00,0x00,0x00, -0x7e,0x03,0x00,0x00,0x7d,0x03,0x00,0x00,0x43,0x01,0x00,0x00, -0x71,0x00,0x04,0x00,0x09,0x00,0x00,0x00,0x7f,0x03,0x00,0x00, -0x7e,0x03,0x00,0x00,0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x80,0x03,0x00,0x00,0x7f,0x03,0x00,0x00,0xc7,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x81,0x03,0x00,0x00,0x80,0x03,0x00,0x00, -0x83,0x00,0x00,0x00,0xc4,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x82,0x03,0x00,0x00,0x81,0x03,0x00,0x00,0x36,0x00,0x00,0x00, -0xc5,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x83,0x03,0x00,0x00, -0x79,0x03,0x00,0x00,0x82,0x03,0x00,0x00,0x72,0x00,0x04,0x00, -0x77,0x00,0x00,0x00,0x84,0x03,0x00,0x00,0x83,0x03,0x00,0x00, -0x72,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x85,0x03,0x00,0x00, -0x84,0x03,0x00,0x00,0x82,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x86,0x03,0x00,0x00,0x85,0x03,0x00,0x00,0x43,0x00,0x00,0x00, -0x6f,0x00,0x04,0x00,0x53,0x00,0x00,0x00,0x87,0x03,0x00,0x00, -0x86,0x03,0x00,0x00,0x0c,0x00,0x08,0x00,0x53,0x00,0x00,0x00, -0x89,0x03,0x00,0x00,0x01,0x00,0x00,0x00,0x32,0x00,0x00,0x00, -0x71,0x03,0x00,0x00,0x87,0x03,0x00,0x00,0x63,0x03,0x00,0x00, -0x81,0x00,0x05,0x00,0x53,0x00,0x00,0x00,0x8a,0x03,0x00,0x00, -0xea,0x02,0x00,0x00,0x89,0x03,0x00,0x00,0x3d,0x00,0x04,0x00, -0x53,0x00,0x00,0x00,0x75,0x01,0x00,0x00,0x5f,0x00,0x00,0x00, -0x81,0x00,0x05,0x00,0x53,0x00,0x00,0x00,0x76,0x01,0x00,0x00, -0x75,0x01,0x00,0x00,0x8a,0x03,0x00,0x00,0x3e,0x00,0x03,0x00, -0x5f,0x00,0x00,0x00,0x76,0x01,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x79,0x01,0x00,0x00,0xa6,0x01,0x00,0x00, -0x25,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0x62,0x00,0x00,0x00, -0xf8,0x00,0x02,0x00,0x64,0x00,0x00,0x00,0xe0,0x00,0x04,0x00, -0x7a,0x01,0x00,0x00,0x7a,0x01,0x00,0x00,0x7b,0x01,0x00,0x00, -0xf9,0x00,0x02,0x00,0x7d,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, -0x7d,0x01,0x00,0x00,0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00, -0xa7,0x01,0x00,0x00,0x58,0x00,0x00,0x00,0x64,0x00,0x00,0x00, -0x94,0x01,0x00,0x00,0x80,0x01,0x00,0x00,0xad,0x00,0x05,0x00, -0x69,0x00,0x00,0x00,0x83,0x01,0x00,0x00,0xa7,0x01,0x00,0x00, -0x16,0x00,0x00,0x00,0xf6,0x00,0x04,0x00,0x7f,0x01,0x00,0x00, -0x80,0x01,0x00,0x00,0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, -0x83,0x01,0x00,0x00,0x7e,0x01,0x00,0x00,0x7f,0x01,0x00,0x00, -0xf8,0x00,0x02,0x00,0x7e,0x01,0x00,0x00,0xb1,0x00,0x05,0x00, -0x69,0x00,0x00,0x00,0x86,0x01,0x00,0x00,0x26,0x00,0x00,0x00, -0xa7,0x01,0x00,0x00,0xf7,0x00,0x03,0x00,0x88,0x01,0x00,0x00, -0x00,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0x86,0x01,0x00,0x00, -0x87,0x01,0x00,0x00,0x88,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, -0x87,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x8c,0x01,0x00,0x00,0x26,0x00,0x00,0x00,0xa7,0x01,0x00,0x00, -0x41,0x00,0x05,0x00,0x5e,0x00,0x00,0x00,0x8d,0x01,0x00,0x00, -0x57,0x00,0x00,0x00,0x8c,0x01,0x00,0x00,0x3d,0x00,0x04,0x00, -0x53,0x00,0x00,0x00,0x8e,0x01,0x00,0x00,0x8d,0x01,0x00,0x00, -0x41,0x00,0x05,0x00,0x5e,0x00,0x00,0x00,0x8f,0x01,0x00,0x00, -0x57,0x00,0x00,0x00,0x26,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x53,0x00,0x00,0x00,0x90,0x01,0x00,0x00,0x8f,0x01,0x00,0x00, -0x81,0x00,0x05,0x00,0x53,0x00,0x00,0x00,0x91,0x01,0x00,0x00, -0x90,0x01,0x00,0x00,0x8e,0x01,0x00,0x00,0x3e,0x00,0x03,0x00, -0x8f,0x01,0x00,0x00,0x91,0x01,0x00,0x00,0xf9,0x00,0x02,0x00, -0x88,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0x88,0x01,0x00,0x00, -0xe0,0x00,0x04,0x00,0x7a,0x01,0x00,0x00,0x7a,0x01,0x00,0x00, -0x7b,0x01,0x00,0x00,0xf9,0x00,0x02,0x00,0x80,0x01,0x00,0x00, -0xf8,0x00,0x02,0x00,0x80,0x01,0x00,0x00,0xc3,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x94,0x01,0x00,0x00,0xa7,0x01,0x00,0x00, -0x95,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0x7d,0x01,0x00,0x00, -0xf8,0x00,0x02,0x00,0x7f,0x01,0x00,0x00,0xaa,0x00,0x05,0x00, -0x69,0x00,0x00,0x00,0x96,0x01,0x00,0x00,0x26,0x00,0x00,0x00, -0x16,0x00,0x00,0x00,0xf7,0x00,0x03,0x00,0x98,0x01,0x00,0x00, -0x00,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0x96,0x01,0x00,0x00, -0x97,0x01,0x00,0x00,0x98,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, -0x97,0x01,0x00,0x00,0x41,0x00,0x05,0x00,0x17,0x00,0x00,0x00, -0x9d,0x01,0x00,0x00,0x15,0x00,0x00,0x00,0x25,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x9e,0x01,0x00,0x00, -0x9d,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xa0,0x01,0x00,0x00,0x9e,0x01,0x00,0x00,0x11,0x00,0x00,0x00, -0x41,0x00,0x05,0x00,0x5e,0x00,0x00,0x00,0xa1,0x01,0x00,0x00, -0x57,0x00,0x00,0x00,0x16,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x53,0x00,0x00,0x00,0xa2,0x01,0x00,0x00,0xa1,0x01,0x00,0x00, -0x41,0x00,0x06,0x00,0x9d,0x00,0x00,0x00,0xa3,0x01,0x00,0x00, -0x9c,0x01,0x00,0x00,0x16,0x00,0x00,0x00,0xa0,0x01,0x00,0x00, -0x3e,0x00,0x03,0x00,0xa3,0x01,0x00,0x00,0xa2,0x01,0x00,0x00, -0xf9,0x00,0x02,0x00,0x98,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, -0x98,0x01,0x00,0x00,0xfd,0x00,0x01,0x00,0x38,0x00,0x01,0x00, - -}; -const uint64_t mul_mat_vec_q6_K_f32_len = 10992; - -unsigned char mul_mat_vec_q8_0_f32_data[] = { -0x03,0x02,0x23,0x07,0x00,0x05,0x01,0x00,0x0b,0x00,0x0d,0x00, -0xc9,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x11,0x00,0x02,0x00, +0xf0,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x11,0x00,0x02,0x00, 0x01,0x00,0x00,0x00,0x11,0x00,0x02,0x00,0x51,0x11,0x00,0x00, 0x11,0x00,0x02,0x00,0x60,0x11,0x00,0x00,0x0b,0x00,0x06,0x00, 0x01,0x00,0x00,0x00,0x47,0x4c,0x53,0x4c,0x2e,0x73,0x74,0x64, 0x2e,0x34,0x35,0x30,0x00,0x00,0x00,0x00,0x0e,0x00,0x03,0x00, 0x00,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x0f,0x00,0x0c,0x00, 0x05,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x6d,0x61,0x69,0x6e, -0x00,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x13,0x00,0x00,0x00, -0x1b,0x00,0x00,0x00,0x2a,0x00,0x00,0x00,0x54,0x00,0x00,0x00, -0x75,0x00,0x00,0x00,0xbc,0x00,0x00,0x00,0x10,0x00,0x06,0x00, -0x04,0x00,0x00,0x00,0x11,0x00,0x00,0x00,0x20,0x00,0x00,0x00, +0x00,0x00,0x00,0x00,0x0b,0x00,0x00,0x00,0x11,0x00,0x00,0x00, +0x18,0x00,0x00,0x00,0x26,0x00,0x00,0x00,0x52,0x00,0x00,0x00, +0x9f,0x00,0x00,0x00,0xe0,0x00,0x00,0x00,0x10,0x00,0x06,0x00, +0x04,0x00,0x00,0x00,0x11,0x00,0x00,0x00,0x01,0x00,0x00,0x00, 0x01,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x0c,0x00,0x00,0x00,0x0b,0x00,0x00,0x00,0x1a,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x13,0x00,0x00,0x00,0x0b,0x00,0x00,0x00, -0x1b,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x28,0x00,0x00,0x00, +0x0b,0x00,0x00,0x00,0x0b,0x00,0x00,0x00,0x1a,0x00,0x00,0x00, +0x47,0x00,0x04,0x00,0x11,0x00,0x00,0x00,0x0b,0x00,0x00,0x00, +0x1b,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x15,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x48,0x00,0x05,0x00, +0x24,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00, +0x00,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x24,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x04,0x00,0x00,0x00, +0x48,0x00,0x05,0x00,0x24,0x00,0x00,0x00,0x02,0x00,0x00,0x00, +0x23,0x00,0x00,0x00,0x08,0x00,0x00,0x00,0x47,0x00,0x03,0x00, +0x24,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00, +0x4d,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0x48,0x00,0x05,0x00,0x4e,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x48,0x00,0x05,0x00, +0x4e,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x23,0x00,0x00,0x00, +0x02,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x4e,0x00,0x00,0x00, +0x02,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x04,0x00,0x00,0x00, +0x48,0x00,0x05,0x00,0x4e,0x00,0x00,0x00,0x03,0x00,0x00,0x00, +0x23,0x00,0x00,0x00,0x08,0x00,0x00,0x00,0x47,0x00,0x04,0x00, +0x4f,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x18,0x00,0x00,0x00, +0x48,0x00,0x04,0x00,0x50,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0x18,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x50,0x00,0x00,0x00, 0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x48,0x00,0x05,0x00,0x28,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0x28,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x08,0x00,0x00,0x00,0x47,0x00,0x03,0x00,0x28,0x00,0x00,0x00, -0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x4f,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0x50,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x50,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x02,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x51,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x22,0x00,0x00,0x00,0x48,0x00,0x04,0x00,0x52,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x18,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0x52,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x47,0x00,0x03,0x00,0x52,0x00,0x00,0x00, -0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x54,0x00,0x00,0x00, -0x22,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x54,0x00,0x00,0x00,0x21,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x72,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x04,0x00,0x00,0x00,0x48,0x00,0x04,0x00,0x73,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x18,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0x73,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x47,0x00,0x03,0x00,0x73,0x00,0x00,0x00, -0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x75,0x00,0x00,0x00, -0x22,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x75,0x00,0x00,0x00,0x21,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0xb9,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x04,0x00,0x00,0x00,0x48,0x00,0x04,0x00,0xba,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x19,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0xba,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x47,0x00,0x03,0x00,0xba,0x00,0x00,0x00, -0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0xbc,0x00,0x00,0x00, -0x22,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0xbc,0x00,0x00,0x00,0x21,0x00,0x00,0x00,0x02,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0xc4,0x00,0x00,0x00,0x0b,0x00,0x00,0x00, +0x47,0x00,0x03,0x00,0x50,0x00,0x00,0x00,0x02,0x00,0x00,0x00, +0x47,0x00,0x04,0x00,0x52,0x00,0x00,0x00,0x22,0x00,0x00,0x00, +0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x52,0x00,0x00,0x00, +0x21,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00, +0x9c,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x04,0x00,0x00,0x00, +0x48,0x00,0x04,0x00,0x9d,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0x18,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x9d,0x00,0x00,0x00, +0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0x47,0x00,0x03,0x00,0x9d,0x00,0x00,0x00,0x02,0x00,0x00,0x00, +0x47,0x00,0x04,0x00,0x9f,0x00,0x00,0x00,0x22,0x00,0x00,0x00, +0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x9f,0x00,0x00,0x00, +0x21,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x47,0x00,0x04,0x00, +0xdd,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x04,0x00,0x00,0x00, +0x48,0x00,0x04,0x00,0xde,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0x19,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0xde,0x00,0x00,0x00, +0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0x47,0x00,0x03,0x00,0xde,0x00,0x00,0x00,0x02,0x00,0x00,0x00, +0x47,0x00,0x04,0x00,0xe0,0x00,0x00,0x00,0x22,0x00,0x00,0x00, +0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0xe0,0x00,0x00,0x00, +0x21,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00, +0xe8,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0x47,0x00,0x04,0x00,0xe9,0x00,0x00,0x00,0x0b,0x00,0x00,0x00, 0x19,0x00,0x00,0x00,0x13,0x00,0x02,0x00,0x02,0x00,0x00,0x00, 0x21,0x00,0x03,0x00,0x03,0x00,0x00,0x00,0x02,0x00,0x00,0x00, 0x15,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x20,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x15,0x00,0x04,0x00,0x09,0x00,0x00,0x00, -0x20,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x17,0x00,0x04,0x00, -0x0a,0x00,0x00,0x00,0x09,0x00,0x00,0x00,0x03,0x00,0x00,0x00, -0x20,0x00,0x04,0x00,0x0b,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x0a,0x00,0x00,0x00,0x3b,0x00,0x04,0x00,0x0b,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x09,0x00,0x00,0x00,0x0d,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x20,0x00,0x04,0x00,0x0e,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x09,0x00,0x00,0x00,0x3b,0x00,0x04,0x00,0x0b,0x00,0x00,0x00, -0x13,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x16,0x00,0x03,0x00, -0x17,0x00,0x00,0x00,0x20,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x09,0x00,0x00,0x00,0x18,0x00,0x00,0x00,0x20,0x00,0x00,0x00, -0x1c,0x00,0x04,0x00,0x19,0x00,0x00,0x00,0x17,0x00,0x00,0x00, -0x18,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x1a,0x00,0x00,0x00, -0x04,0x00,0x00,0x00,0x19,0x00,0x00,0x00,0x3b,0x00,0x04,0x00, -0x1a,0x00,0x00,0x00,0x1b,0x00,0x00,0x00,0x04,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x17,0x00,0x00,0x00,0x1d,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x1e,0x00,0x00,0x00, -0x04,0x00,0x00,0x00,0x17,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x21,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x1e,0x00,0x05,0x00,0x28,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0x29,0x00,0x00,0x00,0x09,0x00,0x00,0x00,0x28,0x00,0x00,0x00, -0x3b,0x00,0x04,0x00,0x29,0x00,0x00,0x00,0x2a,0x00,0x00,0x00, -0x09,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x2b,0x00,0x00,0x00, -0x09,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0x20,0x00,0x00,0x00, -0x14,0x00,0x02,0x00,0x30,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x35,0x00,0x00,0x00,0x02,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x44,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x16,0x00,0x03,0x00,0x4d,0x00,0x00,0x00, -0x10,0x00,0x00,0x00,0x15,0x00,0x04,0x00,0x4e,0x00,0x00,0x00, -0x08,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x1c,0x00,0x04,0x00, -0x4f,0x00,0x00,0x00,0x4e,0x00,0x00,0x00,0x18,0x00,0x00,0x00, -0x1e,0x00,0x04,0x00,0x50,0x00,0x00,0x00,0x4d,0x00,0x00,0x00, -0x4f,0x00,0x00,0x00,0x1d,0x00,0x03,0x00,0x51,0x00,0x00,0x00, -0x50,0x00,0x00,0x00,0x1e,0x00,0x03,0x00,0x52,0x00,0x00,0x00, -0x51,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x53,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x52,0x00,0x00,0x00,0x3b,0x00,0x04,0x00, -0x53,0x00,0x00,0x00,0x54,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0x20,0x00,0x04,0x00,0x56,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0x4d,0x00,0x00,0x00,0x17,0x00,0x04,0x00,0x5a,0x00,0x00,0x00, -0x17,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0x5f,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x4e,0x00,0x00,0x00, -0x1d,0x00,0x03,0x00,0x72,0x00,0x00,0x00,0x17,0x00,0x00,0x00, -0x1e,0x00,0x03,0x00,0x73,0x00,0x00,0x00,0x72,0x00,0x00,0x00, -0x20,0x00,0x04,0x00,0x74,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0x73,0x00,0x00,0x00,0x3b,0x00,0x04,0x00,0x74,0x00,0x00,0x00, -0x75,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0x7d,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x17,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x09,0x00,0x00,0x00,0x86,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x09,0x00,0x00,0x00, -0x99,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x09,0x00,0x00,0x00,0x9a,0x00,0x00,0x00,0x08,0x01,0x00,0x00, -0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x9c,0x00,0x00,0x00, -0x10,0x00,0x00,0x00,0x1d,0x00,0x03,0x00,0xb9,0x00,0x00,0x00, -0x17,0x00,0x00,0x00,0x1e,0x00,0x03,0x00,0xba,0x00,0x00,0x00, -0xb9,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0xbb,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0xba,0x00,0x00,0x00,0x3b,0x00,0x04,0x00, -0xbb,0x00,0x00,0x00,0xbc,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0x2c,0x00,0x06,0x00,0x0a,0x00,0x00,0x00,0xc4,0x00,0x00,0x00, -0x18,0x00,0x00,0x00,0x86,0x00,0x00,0x00,0x86,0x00,0x00,0x00, +0x00,0x00,0x00,0x00,0x17,0x00,0x04,0x00,0x09,0x00,0x00,0x00, +0x06,0x00,0x00,0x00,0x03,0x00,0x00,0x00,0x20,0x00,0x04,0x00, +0x0a,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x09,0x00,0x00,0x00, +0x3b,0x00,0x04,0x00,0x0a,0x00,0x00,0x00,0x0b,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x0c,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x20,0x00,0x04,0x00, +0x0d,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x06,0x00,0x00,0x00, +0x3b,0x00,0x04,0x00,0x0a,0x00,0x00,0x00,0x11,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0x16,0x00,0x03,0x00,0x14,0x00,0x00,0x00, +0x20,0x00,0x00,0x00,0x32,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x15,0x00,0x00,0x00,0x20,0x00,0x00,0x00,0x1c,0x00,0x04,0x00, +0x16,0x00,0x00,0x00,0x14,0x00,0x00,0x00,0x15,0x00,0x00,0x00, +0x20,0x00,0x04,0x00,0x17,0x00,0x00,0x00,0x04,0x00,0x00,0x00, +0x16,0x00,0x00,0x00,0x3b,0x00,0x04,0x00,0x17,0x00,0x00,0x00, +0x18,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x14,0x00,0x00,0x00,0x1a,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0x20,0x00,0x04,0x00,0x1b,0x00,0x00,0x00,0x04,0x00,0x00,0x00, +0x14,0x00,0x00,0x00,0x1e,0x00,0x05,0x00,0x24,0x00,0x00,0x00, +0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00, +0x20,0x00,0x04,0x00,0x25,0x00,0x00,0x00,0x09,0x00,0x00,0x00, +0x24,0x00,0x00,0x00,0x3b,0x00,0x04,0x00,0x25,0x00,0x00,0x00, +0x26,0x00,0x00,0x00,0x09,0x00,0x00,0x00,0x15,0x00,0x04,0x00, +0x27,0x00,0x00,0x00,0x20,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x27,0x00,0x00,0x00,0x28,0x00,0x00,0x00, +0x00,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x29,0x00,0x00,0x00, +0x09,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x14,0x00,0x02,0x00, +0x2d,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x32,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x3d,0x00,0x00,0x00,0x20,0x00,0x00,0x00, +0x16,0x00,0x03,0x00,0x4a,0x00,0x00,0x00,0x10,0x00,0x00,0x00, +0x15,0x00,0x04,0x00,0x4b,0x00,0x00,0x00,0x08,0x00,0x00,0x00, +0x00,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x4c,0x00,0x00,0x00,0x10,0x00,0x00,0x00,0x1c,0x00,0x04,0x00, +0x4d,0x00,0x00,0x00,0x4b,0x00,0x00,0x00,0x4c,0x00,0x00,0x00, +0x1e,0x00,0x06,0x00,0x4e,0x00,0x00,0x00,0x4a,0x00,0x00,0x00, +0x4a,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x4d,0x00,0x00,0x00, +0x1d,0x00,0x03,0x00,0x4f,0x00,0x00,0x00,0x4e,0x00,0x00,0x00, +0x1e,0x00,0x03,0x00,0x50,0x00,0x00,0x00,0x4f,0x00,0x00,0x00, +0x20,0x00,0x04,0x00,0x51,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, +0x50,0x00,0x00,0x00,0x3b,0x00,0x04,0x00,0x51,0x00,0x00,0x00, +0x52,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x20,0x00,0x04,0x00, +0x54,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x4a,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x27,0x00,0x00,0x00,0x5a,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x27,0x00,0x00,0x00, +0x60,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x20,0x00,0x04,0x00, +0x61,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x06,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x27,0x00,0x00,0x00,0x6a,0x00,0x00,0x00, +0x04,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x70,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x27,0x00,0x00,0x00,0x78,0x00,0x00,0x00,0x03,0x00,0x00,0x00, +0x20,0x00,0x04,0x00,0x7a,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, +0x4b,0x00,0x00,0x00,0x17,0x00,0x04,0x00,0x7e,0x00,0x00,0x00, +0x14,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x82,0x00,0x00,0x00,0x0f,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x8c,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0x1d,0x00,0x03,0x00,0x9c,0x00,0x00,0x00, +0x14,0x00,0x00,0x00,0x1e,0x00,0x03,0x00,0x9d,0x00,0x00,0x00, +0x9c,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x9e,0x00,0x00,0x00, +0x0c,0x00,0x00,0x00,0x9d,0x00,0x00,0x00,0x3b,0x00,0x04,0x00, +0x9e,0x00,0x00,0x00,0x9f,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, +0x20,0x00,0x04,0x00,0xa7,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, +0x14,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0xbe,0x00,0x00,0x00,0x08,0x01,0x00,0x00,0x34,0x00,0x06,0x00, +0x06,0x00,0x00,0x00,0xc0,0x00,0x00,0x00,0x86,0x00,0x00,0x00, +0x15,0x00,0x00,0x00,0x32,0x00,0x00,0x00,0x1d,0x00,0x03,0x00, +0xdd,0x00,0x00,0x00,0x14,0x00,0x00,0x00,0x1e,0x00,0x03,0x00, +0xde,0x00,0x00,0x00,0xdd,0x00,0x00,0x00,0x20,0x00,0x04,0x00, +0xdf,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0xde,0x00,0x00,0x00, +0x3b,0x00,0x04,0x00,0xdf,0x00,0x00,0x00,0xe0,0x00,0x00,0x00, +0x0c,0x00,0x00,0x00,0x32,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0xe8,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x33,0x00,0x06,0x00, +0x09,0x00,0x00,0x00,0xe9,0x00,0x00,0x00,0xe8,0x00,0x00,0x00, +0x8c,0x00,0x00,0x00,0x8c,0x00,0x00,0x00,0x36,0x00,0x05,0x00, +0x02,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0x03,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0x05,0x00,0x00,0x00, +0x41,0x00,0x05,0x00,0x0d,0x00,0x00,0x00,0x0e,0x00,0x00,0x00, +0x0b,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x0f,0x00,0x00,0x00,0x0e,0x00,0x00,0x00, +0x41,0x00,0x05,0x00,0x0d,0x00,0x00,0x00,0x12,0x00,0x00,0x00, +0x11,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x13,0x00,0x00,0x00,0x12,0x00,0x00,0x00, +0x41,0x00,0x05,0x00,0x1b,0x00,0x00,0x00,0x1c,0x00,0x00,0x00, +0x18,0x00,0x00,0x00,0x13,0x00,0x00,0x00,0x3e,0x00,0x03,0x00, +0x1c,0x00,0x00,0x00,0x1a,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, +0x1e,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0x1e,0x00,0x00,0x00, +0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0xee,0x00,0x00,0x00, +0x0c,0x00,0x00,0x00,0x05,0x00,0x00,0x00,0xbd,0x00,0x00,0x00, +0x1f,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x29,0x00,0x00,0x00, +0x2a,0x00,0x00,0x00,0x26,0x00,0x00,0x00,0x28,0x00,0x00,0x00, +0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x2b,0x00,0x00,0x00, +0x2a,0x00,0x00,0x00,0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x2c,0x00,0x00,0x00,0x2b,0x00,0x00,0x00,0x15,0x00,0x00,0x00, +0xb0,0x00,0x05,0x00,0x2d,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, +0xee,0x00,0x00,0x00,0x2c,0x00,0x00,0x00,0xf6,0x00,0x04,0x00, +0x20,0x00,0x00,0x00,0x1f,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0xfa,0x00,0x04,0x00,0x2e,0x00,0x00,0x00,0x1f,0x00,0x00,0x00, +0x20,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0x1f,0x00,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x31,0x00,0x00,0x00, +0xee,0x00,0x00,0x00,0x15,0x00,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x34,0x00,0x00,0x00,0x32,0x00,0x00,0x00, +0x13,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x35,0x00,0x00,0x00,0x31,0x00,0x00,0x00,0x34,0x00,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x3a,0x00,0x00,0x00, +0x0f,0x00,0x00,0x00,0x2b,0x00,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x3c,0x00,0x00,0x00,0x3a,0x00,0x00,0x00, +0x35,0x00,0x00,0x00,0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x3e,0x00,0x00,0x00,0x3c,0x00,0x00,0x00,0x3d,0x00,0x00,0x00, +0x89,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x41,0x00,0x00,0x00, +0x35,0x00,0x00,0x00,0x3d,0x00,0x00,0x00,0x86,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x42,0x00,0x00,0x00,0x41,0x00,0x00,0x00, +0x32,0x00,0x00,0x00,0x82,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x47,0x00,0x00,0x00,0x35,0x00,0x00,0x00,0x41,0x00,0x00,0x00, +0x41,0x00,0x07,0x00,0x54,0x00,0x00,0x00,0x55,0x00,0x00,0x00, +0x52,0x00,0x00,0x00,0x28,0x00,0x00,0x00,0x3e,0x00,0x00,0x00, +0x28,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x4a,0x00,0x00,0x00, +0x56,0x00,0x00,0x00,0x55,0x00,0x00,0x00,0x73,0x00,0x04,0x00, +0x14,0x00,0x00,0x00,0x57,0x00,0x00,0x00,0x56,0x00,0x00,0x00, +0x41,0x00,0x07,0x00,0x54,0x00,0x00,0x00,0x5b,0x00,0x00,0x00, +0x52,0x00,0x00,0x00,0x28,0x00,0x00,0x00,0x3e,0x00,0x00,0x00, +0x5a,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x4a,0x00,0x00,0x00, +0x5c,0x00,0x00,0x00,0x5b,0x00,0x00,0x00,0x73,0x00,0x04,0x00, +0x14,0x00,0x00,0x00,0x5d,0x00,0x00,0x00,0x5c,0x00,0x00,0x00, +0x41,0x00,0x07,0x00,0x61,0x00,0x00,0x00,0x62,0x00,0x00,0x00, +0x52,0x00,0x00,0x00,0x28,0x00,0x00,0x00,0x3e,0x00,0x00,0x00, +0x60,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x63,0x00,0x00,0x00,0x62,0x00,0x00,0x00,0xc2,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x69,0x00,0x00,0x00,0x63,0x00,0x00,0x00, +0x42,0x00,0x00,0x00,0xc4,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x6b,0x00,0x00,0x00,0x69,0x00,0x00,0x00,0x6a,0x00,0x00,0x00, +0xc7,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x6c,0x00,0x00,0x00, +0x6b,0x00,0x00,0x00,0x4c,0x00,0x00,0x00,0x7c,0x00,0x04,0x00, +0x27,0x00,0x00,0x00,0x6d,0x00,0x00,0x00,0x6c,0x00,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x71,0x00,0x00,0x00, +0x42,0x00,0x00,0x00,0x70,0x00,0x00,0x00,0xc2,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x72,0x00,0x00,0x00,0x63,0x00,0x00,0x00, +0x71,0x00,0x00,0x00,0xc7,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x73,0x00,0x00,0x00,0x72,0x00,0x00,0x00,0x4c,0x00,0x00,0x00, +0x7c,0x00,0x04,0x00,0x27,0x00,0x00,0x00,0x74,0x00,0x00,0x00, +0x73,0x00,0x00,0x00,0x41,0x00,0x08,0x00,0x7a,0x00,0x00,0x00, +0x7b,0x00,0x00,0x00,0x52,0x00,0x00,0x00,0x28,0x00,0x00,0x00, +0x3e,0x00,0x00,0x00,0x78,0x00,0x00,0x00,0x42,0x00,0x00,0x00, +0x3d,0x00,0x04,0x00,0x4b,0x00,0x00,0x00,0x7c,0x00,0x00,0x00, +0x7b,0x00,0x00,0x00,0x71,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x7d,0x00,0x00,0x00,0x7c,0x00,0x00,0x00,0xc7,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x83,0x00,0x00,0x00,0x7d,0x00,0x00,0x00, +0x82,0x00,0x00,0x00,0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x87,0x00,0x00,0x00,0x6d,0x00,0x00,0x00,0xc5,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x88,0x00,0x00,0x00,0x83,0x00,0x00,0x00, +0x87,0x00,0x00,0x00,0x70,0x00,0x04,0x00,0x14,0x00,0x00,0x00, +0x89,0x00,0x00,0x00,0x88,0x00,0x00,0x00,0xc2,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x8b,0x00,0x00,0x00,0x7d,0x00,0x00,0x00, +0x6a,0x00,0x00,0x00,0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x8f,0x00,0x00,0x00,0x74,0x00,0x00,0x00,0xc5,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x90,0x00,0x00,0x00,0x8b,0x00,0x00,0x00, +0x8f,0x00,0x00,0x00,0x70,0x00,0x04,0x00,0x14,0x00,0x00,0x00, +0x91,0x00,0x00,0x00,0x90,0x00,0x00,0x00,0x50,0x00,0x05,0x00, +0x7e,0x00,0x00,0x00,0x92,0x00,0x00,0x00,0x89,0x00,0x00,0x00, +0x91,0x00,0x00,0x00,0x8e,0x00,0x05,0x00,0x7e,0x00,0x00,0x00, +0x95,0x00,0x00,0x00,0x92,0x00,0x00,0x00,0x57,0x00,0x00,0x00, +0x50,0x00,0x05,0x00,0x7e,0x00,0x00,0x00,0x97,0x00,0x00,0x00, +0x5d,0x00,0x00,0x00,0x5d,0x00,0x00,0x00,0x81,0x00,0x05,0x00, +0x7e,0x00,0x00,0x00,0x98,0x00,0x00,0x00,0x95,0x00,0x00,0x00, +0x97,0x00,0x00,0x00,0x51,0x00,0x05,0x00,0x14,0x00,0x00,0x00, +0x9b,0x00,0x00,0x00,0x98,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0x41,0x00,0x05,0x00,0x29,0x00,0x00,0x00,0xa0,0x00,0x00,0x00, +0x26,0x00,0x00,0x00,0x5a,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0xa1,0x00,0x00,0x00,0xa0,0x00,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xa3,0x00,0x00,0x00, +0xa1,0x00,0x00,0x00,0x47,0x00,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xa5,0x00,0x00,0x00,0xa3,0x00,0x00,0x00, +0x42,0x00,0x00,0x00,0x41,0x00,0x06,0x00,0xa7,0x00,0x00,0x00, +0xa8,0x00,0x00,0x00,0x9f,0x00,0x00,0x00,0x28,0x00,0x00,0x00, +0xa5,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x14,0x00,0x00,0x00, +0xa9,0x00,0x00,0x00,0xa8,0x00,0x00,0x00,0x51,0x00,0x05,0x00, +0x14,0x00,0x00,0x00,0xac,0x00,0x00,0x00,0x98,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xb3,0x00,0x00,0x00,0xa5,0x00,0x00,0x00,0x4c,0x00,0x00,0x00, +0x41,0x00,0x06,0x00,0xa7,0x00,0x00,0x00,0xb4,0x00,0x00,0x00, +0x9f,0x00,0x00,0x00,0x28,0x00,0x00,0x00,0xb3,0x00,0x00,0x00, +0x3d,0x00,0x04,0x00,0x14,0x00,0x00,0x00,0xb5,0x00,0x00,0x00, +0xb4,0x00,0x00,0x00,0x85,0x00,0x05,0x00,0x14,0x00,0x00,0x00, +0xb6,0x00,0x00,0x00,0xac,0x00,0x00,0x00,0xb5,0x00,0x00,0x00, +0x0c,0x00,0x08,0x00,0x14,0x00,0x00,0x00,0xb7,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0x32,0x00,0x00,0x00,0x9b,0x00,0x00,0x00, +0xa9,0x00,0x00,0x00,0xb6,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0x14,0x00,0x00,0x00,0xb9,0x00,0x00,0x00,0x1c,0x00,0x00,0x00, +0x81,0x00,0x05,0x00,0x14,0x00,0x00,0x00,0xba,0x00,0x00,0x00, +0xb9,0x00,0x00,0x00,0xb7,0x00,0x00,0x00,0x3e,0x00,0x03,0x00, +0x1c,0x00,0x00,0x00,0xba,0x00,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xbd,0x00,0x00,0x00,0xee,0x00,0x00,0x00, +0x32,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0x1e,0x00,0x00,0x00, +0xf8,0x00,0x02,0x00,0x20,0x00,0x00,0x00,0xe0,0x00,0x04,0x00, +0x32,0x00,0x00,0x00,0x32,0x00,0x00,0x00,0xbe,0x00,0x00,0x00, +0xf9,0x00,0x02,0x00,0xc1,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, +0xc1,0x00,0x00,0x00,0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00, +0xef,0x00,0x00,0x00,0xc0,0x00,0x00,0x00,0x20,0x00,0x00,0x00, +0xd8,0x00,0x00,0x00,0xc4,0x00,0x00,0x00,0xac,0x00,0x05,0x00, +0x2d,0x00,0x00,0x00,0xc7,0x00,0x00,0x00,0xef,0x00,0x00,0x00, +0x0c,0x00,0x00,0x00,0xf6,0x00,0x04,0x00,0xc3,0x00,0x00,0x00, +0xc4,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, +0xc7,0x00,0x00,0x00,0xc2,0x00,0x00,0x00,0xc3,0x00,0x00,0x00, +0xf8,0x00,0x02,0x00,0xc2,0x00,0x00,0x00,0xb0,0x00,0x05,0x00, +0x2d,0x00,0x00,0x00,0xca,0x00,0x00,0x00,0x13,0x00,0x00,0x00, +0xef,0x00,0x00,0x00,0xf7,0x00,0x03,0x00,0xcc,0x00,0x00,0x00, +0x00,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0xca,0x00,0x00,0x00, +0xcb,0x00,0x00,0x00,0xcc,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, +0xcb,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xd0,0x00,0x00,0x00,0x13,0x00,0x00,0x00,0xef,0x00,0x00,0x00, +0x41,0x00,0x05,0x00,0x1b,0x00,0x00,0x00,0xd1,0x00,0x00,0x00, +0x18,0x00,0x00,0x00,0xd0,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0x14,0x00,0x00,0x00,0xd2,0x00,0x00,0x00,0xd1,0x00,0x00,0x00, +0x3d,0x00,0x04,0x00,0x14,0x00,0x00,0x00,0xd4,0x00,0x00,0x00, +0x1c,0x00,0x00,0x00,0x81,0x00,0x05,0x00,0x14,0x00,0x00,0x00, +0xd5,0x00,0x00,0x00,0xd4,0x00,0x00,0x00,0xd2,0x00,0x00,0x00, +0x3e,0x00,0x03,0x00,0x1c,0x00,0x00,0x00,0xd5,0x00,0x00,0x00, +0xf9,0x00,0x02,0x00,0xcc,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, +0xcc,0x00,0x00,0x00,0xe0,0x00,0x04,0x00,0x32,0x00,0x00,0x00, +0x32,0x00,0x00,0x00,0xbe,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, +0xc4,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0xc4,0x00,0x00,0x00, +0xc2,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xd8,0x00,0x00,0x00, +0xef,0x00,0x00,0x00,0x5a,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, +0xc1,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0xc3,0x00,0x00,0x00, +0xaa,0x00,0x05,0x00,0x2d,0x00,0x00,0x00,0xda,0x00,0x00,0x00, +0x13,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0xf7,0x00,0x03,0x00, +0xdc,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, +0xda,0x00,0x00,0x00,0xdb,0x00,0x00,0x00,0xdc,0x00,0x00,0x00, +0xf8,0x00,0x02,0x00,0xdb,0x00,0x00,0x00,0x41,0x00,0x05,0x00, +0x29,0x00,0x00,0x00,0xe1,0x00,0x00,0x00,0x26,0x00,0x00,0x00, +0x60,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0xe2,0x00,0x00,0x00,0xe1,0x00,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xe4,0x00,0x00,0x00,0xe2,0x00,0x00,0x00, +0x0f,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x1b,0x00,0x00,0x00, +0xe5,0x00,0x00,0x00,0x18,0x00,0x00,0x00,0x28,0x00,0x00,0x00, +0x3d,0x00,0x04,0x00,0x14,0x00,0x00,0x00,0xe6,0x00,0x00,0x00, +0xe5,0x00,0x00,0x00,0x41,0x00,0x06,0x00,0xa7,0x00,0x00,0x00, +0xe7,0x00,0x00,0x00,0xe0,0x00,0x00,0x00,0x28,0x00,0x00,0x00, +0xe4,0x00,0x00,0x00,0x3e,0x00,0x03,0x00,0xe7,0x00,0x00,0x00, +0xe6,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0xdc,0x00,0x00,0x00, +0xf8,0x00,0x02,0x00,0xdc,0x00,0x00,0x00,0xfd,0x00,0x01,0x00, +0x38,0x00,0x01,0x00, +}; +const uint64_t mul_mat_vec_q5_1_f32_len = 3592; + +unsigned char mul_mat_vec_q5_K_f32_data[] = { +0x03,0x02,0x23,0x07,0x00,0x05,0x01,0x00,0x0b,0x00,0x0d,0x00, +0x8a,0x04,0x00,0x00,0x00,0x00,0x00,0x00,0x11,0x00,0x02,0x00, +0x01,0x00,0x00,0x00,0x11,0x00,0x02,0x00,0x27,0x00,0x00,0x00, +0x11,0x00,0x02,0x00,0x51,0x11,0x00,0x00,0x11,0x00,0x02,0x00, +0x60,0x11,0x00,0x00,0x0b,0x00,0x06,0x00,0x01,0x00,0x00,0x00, +0x47,0x4c,0x53,0x4c,0x2e,0x73,0x74,0x64,0x2e,0x34,0x35,0x30, +0x00,0x00,0x00,0x00,0x0e,0x00,0x03,0x00,0x00,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0x0f,0x00,0x0c,0x00,0x05,0x00,0x00,0x00, +0x04,0x00,0x00,0x00,0x6d,0x61,0x69,0x6e,0x00,0x00,0x00,0x00, +0x0b,0x00,0x00,0x00,0x13,0x00,0x00,0x00,0x20,0x00,0x00,0x00, +0x5b,0x00,0x00,0x00,0x84,0x00,0x00,0x00,0xf7,0x01,0x00,0x00, +0x7c,0x04,0x00,0x00,0x10,0x00,0x06,0x00,0x04,0x00,0x00,0x00, +0x11,0x00,0x00,0x00,0x20,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x0b,0x00,0x00,0x00, +0x0b,0x00,0x00,0x00,0x1a,0x00,0x00,0x00,0x48,0x00,0x05,0x00, +0x11,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00, +0x00,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x11,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x04,0x00,0x00,0x00, +0x48,0x00,0x05,0x00,0x11,0x00,0x00,0x00,0x02,0x00,0x00,0x00, +0x23,0x00,0x00,0x00,0x08,0x00,0x00,0x00,0x47,0x00,0x03,0x00, +0x11,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00, +0x20,0x00,0x00,0x00,0x0b,0x00,0x00,0x00,0x1b,0x00,0x00,0x00, +0x47,0x00,0x04,0x00,0x7d,0x00,0x00,0x00,0x06,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x7e,0x00,0x00,0x00, +0x06,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x47,0x00,0x04,0x00, +0x7f,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0x48,0x00,0x05,0x00,0x80,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x48,0x00,0x05,0x00, +0x80,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x23,0x00,0x00,0x00, +0x04,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x80,0x00,0x00,0x00, +0x02,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x10,0x00,0x00,0x00, +0x48,0x00,0x05,0x00,0x80,0x00,0x00,0x00,0x03,0x00,0x00,0x00, +0x23,0x00,0x00,0x00,0x30,0x00,0x00,0x00,0x47,0x00,0x04,0x00, +0x81,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0xb0,0x00,0x00,0x00, +0x48,0x00,0x04,0x00,0x82,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0x18,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x82,0x00,0x00,0x00, +0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0x47,0x00,0x03,0x00,0x82,0x00,0x00,0x00,0x02,0x00,0x00,0x00, +0x47,0x00,0x04,0x00,0x84,0x00,0x00,0x00,0x22,0x00,0x00,0x00, +0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x84,0x00,0x00,0x00, +0x21,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00, +0xf4,0x01,0x00,0x00,0x06,0x00,0x00,0x00,0x04,0x00,0x00,0x00, +0x48,0x00,0x04,0x00,0xf5,0x01,0x00,0x00,0x00,0x00,0x00,0x00, +0x18,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0xf5,0x01,0x00,0x00, +0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0x47,0x00,0x03,0x00,0xf5,0x01,0x00,0x00,0x02,0x00,0x00,0x00, +0x47,0x00,0x04,0x00,0xf7,0x01,0x00,0x00,0x22,0x00,0x00,0x00, +0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0xf7,0x01,0x00,0x00, +0x21,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x47,0x00,0x04,0x00, +0x79,0x04,0x00,0x00,0x06,0x00,0x00,0x00,0x04,0x00,0x00,0x00, +0x48,0x00,0x04,0x00,0x7a,0x04,0x00,0x00,0x00,0x00,0x00,0x00, +0x19,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x7a,0x04,0x00,0x00, +0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0x47,0x00,0x03,0x00,0x7a,0x04,0x00,0x00,0x02,0x00,0x00,0x00, +0x47,0x00,0x04,0x00,0x7c,0x04,0x00,0x00,0x22,0x00,0x00,0x00, +0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x7c,0x04,0x00,0x00, +0x21,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00, +0x84,0x04,0x00,0x00,0x0b,0x00,0x00,0x00,0x19,0x00,0x00,0x00, +0x13,0x00,0x02,0x00,0x02,0x00,0x00,0x00,0x21,0x00,0x03,0x00, +0x03,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x15,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x20,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0x17,0x00,0x04,0x00,0x09,0x00,0x00,0x00,0x06,0x00,0x00,0x00, +0x03,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x0a,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0x09,0x00,0x00,0x00,0x3b,0x00,0x04,0x00, +0x0a,0x00,0x00,0x00,0x0b,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, +0x00,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x0d,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x1e,0x00,0x05,0x00, +0x11,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00, +0x06,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x12,0x00,0x00,0x00, +0x09,0x00,0x00,0x00,0x11,0x00,0x00,0x00,0x3b,0x00,0x04,0x00, +0x12,0x00,0x00,0x00,0x13,0x00,0x00,0x00,0x09,0x00,0x00,0x00, +0x15,0x00,0x04,0x00,0x14,0x00,0x00,0x00,0x20,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x14,0x00,0x00,0x00, +0x15,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x20,0x00,0x04,0x00, +0x16,0x00,0x00,0x00,0x09,0x00,0x00,0x00,0x06,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x19,0x00,0x00,0x00, +0x00,0x01,0x00,0x00,0x3b,0x00,0x04,0x00,0x0a,0x00,0x00,0x00, +0x20,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x02,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x2b,0x00,0x00,0x00, +0x04,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x3f,0x00,0x00,0x00,0x20,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x45,0x00,0x00,0x00,0x40,0x00,0x00,0x00, +0x15,0x00,0x04,0x00,0x4a,0x00,0x00,0x00,0x08,0x00,0x00,0x00, +0x00,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x14,0x00,0x00,0x00, +0x4d,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x15,0x00,0x04,0x00, +0x51,0x00,0x00,0x00,0x08,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x14,0x00,0x00,0x00,0x56,0x00,0x00,0x00, +0x04,0x00,0x00,0x00,0x16,0x00,0x03,0x00,0x58,0x00,0x00,0x00, +0x20,0x00,0x00,0x00,0x1c,0x00,0x04,0x00,0x59,0x00,0x00,0x00, +0x58,0x00,0x00,0x00,0x3f,0x00,0x00,0x00,0x20,0x00,0x04,0x00, +0x5a,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x59,0x00,0x00,0x00, +0x3b,0x00,0x04,0x00,0x5a,0x00,0x00,0x00,0x5b,0x00,0x00,0x00, +0x04,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x5c,0x00,0x00,0x00,0x10,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x58,0x00,0x00,0x00,0x61,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0x20,0x00,0x04,0x00,0x62,0x00,0x00,0x00,0x04,0x00,0x00,0x00, +0x58,0x00,0x00,0x00,0x14,0x00,0x02,0x00,0x6d,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x76,0x00,0x00,0x00, +0x80,0x00,0x00,0x00,0x16,0x00,0x03,0x00,0x7a,0x00,0x00,0x00, +0x10,0x00,0x00,0x00,0x17,0x00,0x04,0x00,0x7b,0x00,0x00,0x00, +0x7a,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x7c,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, +0x1c,0x00,0x04,0x00,0x7d,0x00,0x00,0x00,0x4a,0x00,0x00,0x00, +0x7c,0x00,0x00,0x00,0x1c,0x00,0x04,0x00,0x7e,0x00,0x00,0x00, +0x4a,0x00,0x00,0x00,0x3f,0x00,0x00,0x00,0x1c,0x00,0x04,0x00, +0x7f,0x00,0x00,0x00,0x4a,0x00,0x00,0x00,0x76,0x00,0x00,0x00, +0x1e,0x00,0x06,0x00,0x80,0x00,0x00,0x00,0x7b,0x00,0x00,0x00, +0x7d,0x00,0x00,0x00,0x7e,0x00,0x00,0x00,0x7f,0x00,0x00,0x00, +0x1d,0x00,0x03,0x00,0x81,0x00,0x00,0x00,0x80,0x00,0x00,0x00, +0x1e,0x00,0x03,0x00,0x82,0x00,0x00,0x00,0x81,0x00,0x00,0x00, +0x20,0x00,0x04,0x00,0x83,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, +0x82,0x00,0x00,0x00,0x3b,0x00,0x04,0x00,0x83,0x00,0x00,0x00, +0x84,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x20,0x00,0x04,0x00, +0x88,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x7a,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x90,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x9a,0x00,0x00,0x00, +0x0c,0x00,0x00,0x00,0x4a,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x14,0x00,0x00,0x00,0x9f,0x00,0x00,0x00,0x3f,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0xc5,0x00,0x00,0x00, +0x05,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0xd4,0x00,0x00,0x00,0x08,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x14,0x00,0x00,0x00,0xda,0x00,0x00,0x00,0x0f,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x14,0x00,0x00,0x00,0xe5,0x00,0x00,0x00, +0xc0,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x14,0x00,0x00,0x00, +0xe7,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0xf2,0x00,0x00,0x00,0x09,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x14,0x00,0x00,0x00,0x44,0x01,0x00,0x00, +0x03,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x6c,0x01,0x00,0x00,0x11,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0xaa,0x01,0x00,0x00,0x41,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0xb8,0x01,0x00,0x00, +0x50,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0xc6,0x01,0x00,0x00,0x51,0x00,0x00,0x00,0x1d,0x00,0x03,0x00, +0xf4,0x01,0x00,0x00,0x58,0x00,0x00,0x00,0x1e,0x00,0x03,0x00, +0xf5,0x01,0x00,0x00,0xf4,0x01,0x00,0x00,0x20,0x00,0x04,0x00, +0xf6,0x01,0x00,0x00,0x0c,0x00,0x00,0x00,0xf5,0x01,0x00,0x00, +0x3b,0x00,0x04,0x00,0xf6,0x01,0x00,0x00,0xf7,0x01,0x00,0x00, +0x0c,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0xfc,0x01,0x00,0x00, +0x0c,0x00,0x00,0x00,0x58,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x14,0x00,0x00,0x00,0x0d,0x02,0x00,0x00,0x10,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x82,0x02,0x00,0x00, +0x21,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x9f,0x02,0x00,0x00,0x30,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0xbc,0x02,0x00,0x00,0x31,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x5b,0x04,0x00,0x00, +0x08,0x01,0x00,0x00,0x1d,0x00,0x03,0x00,0x79,0x04,0x00,0x00, +0x58,0x00,0x00,0x00,0x1e,0x00,0x03,0x00,0x7a,0x04,0x00,0x00, +0x79,0x04,0x00,0x00,0x20,0x00,0x04,0x00,0x7b,0x04,0x00,0x00, +0x0c,0x00,0x00,0x00,0x7a,0x04,0x00,0x00,0x3b,0x00,0x04,0x00, +0x7b,0x04,0x00,0x00,0x7c,0x04,0x00,0x00,0x0c,0x00,0x00,0x00, +0x2c,0x00,0x06,0x00,0x09,0x00,0x00,0x00,0x84,0x04,0x00,0x00, +0x3f,0x00,0x00,0x00,0x90,0x00,0x00,0x00,0x90,0x00,0x00,0x00, 0x36,0x00,0x05,0x00,0x02,0x00,0x00,0x00,0x04,0x00,0x00,0x00, 0x00,0x00,0x00,0x00,0x03,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, -0x05,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x0e,0x00,0x00,0x00, -0x0f,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x0d,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x09,0x00,0x00,0x00,0x10,0x00,0x00,0x00, -0x0f,0x00,0x00,0x00,0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x11,0x00,0x00,0x00,0x10,0x00,0x00,0x00,0x41,0x00,0x05,0x00, -0x0e,0x00,0x00,0x00,0x14,0x00,0x00,0x00,0x13,0x00,0x00,0x00, -0x0d,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x09,0x00,0x00,0x00, -0x15,0x00,0x00,0x00,0x14,0x00,0x00,0x00,0x7c,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x16,0x00,0x00,0x00,0x15,0x00,0x00,0x00, -0x41,0x00,0x05,0x00,0x1e,0x00,0x00,0x00,0x1f,0x00,0x00,0x00, -0x1b,0x00,0x00,0x00,0x16,0x00,0x00,0x00,0x3e,0x00,0x03,0x00, -0x1f,0x00,0x00,0x00,0x1d,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, -0x22,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0x22,0x00,0x00,0x00, -0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0xc7,0x00,0x00,0x00, -0x21,0x00,0x00,0x00,0x05,0x00,0x00,0x00,0x98,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x2b,0x00,0x00,0x00, -0x2c,0x00,0x00,0x00,0x2a,0x00,0x00,0x00,0x21,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x2d,0x00,0x00,0x00, -0x2c,0x00,0x00,0x00,0x87,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x2f,0x00,0x00,0x00,0x2d,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0xb1,0x00,0x05,0x00,0x30,0x00,0x00,0x00,0x31,0x00,0x00,0x00, -0xc7,0x00,0x00,0x00,0x2f,0x00,0x00,0x00,0xf6,0x00,0x04,0x00, -0x24,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0xfa,0x00,0x04,0x00,0x31,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x24,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0x23,0x00,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x34,0x00,0x00,0x00, -0xc7,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x37,0x00,0x00,0x00,0x35,0x00,0x00,0x00, -0x16,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x38,0x00,0x00,0x00,0x34,0x00,0x00,0x00,0x37,0x00,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x3d,0x00,0x00,0x00, -0x11,0x00,0x00,0x00,0x2d,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x3f,0x00,0x00,0x00,0x3d,0x00,0x00,0x00, -0x38,0x00,0x00,0x00,0x87,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x40,0x00,0x00,0x00,0x3f,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0x8b,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x43,0x00,0x00,0x00, -0x38,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0x87,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x45,0x00,0x00,0x00,0x43,0x00,0x00,0x00, -0x44,0x00,0x00,0x00,0x82,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x4a,0x00,0x00,0x00,0x38,0x00,0x00,0x00,0x43,0x00,0x00,0x00, -0x41,0x00,0x07,0x00,0x56,0x00,0x00,0x00,0x57,0x00,0x00,0x00, -0x54,0x00,0x00,0x00,0x21,0x00,0x00,0x00,0x40,0x00,0x00,0x00, -0x21,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x4d,0x00,0x00,0x00, -0x58,0x00,0x00,0x00,0x57,0x00,0x00,0x00,0x73,0x00,0x04,0x00, -0x17,0x00,0x00,0x00,0x59,0x00,0x00,0x00,0x58,0x00,0x00,0x00, -0x41,0x00,0x08,0x00,0x5f,0x00,0x00,0x00,0x60,0x00,0x00,0x00, -0x54,0x00,0x00,0x00,0x21,0x00,0x00,0x00,0x40,0x00,0x00,0x00, -0x44,0x00,0x00,0x00,0x45,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x4e,0x00,0x00,0x00,0x61,0x00,0x00,0x00,0x60,0x00,0x00,0x00, -0x72,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x62,0x00,0x00,0x00, -0x61,0x00,0x00,0x00,0x6f,0x00,0x04,0x00,0x17,0x00,0x00,0x00, -0x63,0x00,0x00,0x00,0x62,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x66,0x00,0x00,0x00,0x45,0x00,0x00,0x00, -0x44,0x00,0x00,0x00,0x41,0x00,0x08,0x00,0x5f,0x00,0x00,0x00, -0x67,0x00,0x00,0x00,0x54,0x00,0x00,0x00,0x21,0x00,0x00,0x00, -0x40,0x00,0x00,0x00,0x44,0x00,0x00,0x00,0x66,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x4e,0x00,0x00,0x00,0x68,0x00,0x00,0x00, -0x67,0x00,0x00,0x00,0x72,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x69,0x00,0x00,0x00,0x68,0x00,0x00,0x00,0x6f,0x00,0x04,0x00, -0x17,0x00,0x00,0x00,0x6a,0x00,0x00,0x00,0x69,0x00,0x00,0x00, -0x50,0x00,0x05,0x00,0x5a,0x00,0x00,0x00,0x6b,0x00,0x00,0x00, -0x63,0x00,0x00,0x00,0x6a,0x00,0x00,0x00,0x8e,0x00,0x05,0x00, -0x5a,0x00,0x00,0x00,0x6e,0x00,0x00,0x00,0x6b,0x00,0x00,0x00, -0x59,0x00,0x00,0x00,0x51,0x00,0x05,0x00,0x17,0x00,0x00,0x00, -0x71,0x00,0x00,0x00,0x6e,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x41,0x00,0x05,0x00,0x2b,0x00,0x00,0x00,0x76,0x00,0x00,0x00, -0x2a,0x00,0x00,0x00,0x44,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x77,0x00,0x00,0x00,0x76,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x79,0x00,0x00,0x00, -0x77,0x00,0x00,0x00,0x4a,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x7b,0x00,0x00,0x00,0x79,0x00,0x00,0x00, -0x45,0x00,0x00,0x00,0x41,0x00,0x06,0x00,0x7d,0x00,0x00,0x00, -0x7e,0x00,0x00,0x00,0x75,0x00,0x00,0x00,0x21,0x00,0x00,0x00, -0x7b,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x17,0x00,0x00,0x00, -0x7f,0x00,0x00,0x00,0x7e,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x17,0x00,0x00,0x00,0x82,0x00,0x00,0x00,0x1f,0x00,0x00,0x00, -0x0c,0x00,0x08,0x00,0x17,0x00,0x00,0x00,0x83,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x32,0x00,0x00,0x00,0x71,0x00,0x00,0x00, -0x7f,0x00,0x00,0x00,0x82,0x00,0x00,0x00,0x3e,0x00,0x03,0x00, -0x1f,0x00,0x00,0x00,0x83,0x00,0x00,0x00,0x51,0x00,0x05,0x00, -0x17,0x00,0x00,0x00,0x88,0x00,0x00,0x00,0x6e,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x8f,0x00,0x00,0x00,0x7b,0x00,0x00,0x00,0x44,0x00,0x00,0x00, -0x41,0x00,0x06,0x00,0x7d,0x00,0x00,0x00,0x90,0x00,0x00,0x00, -0x75,0x00,0x00,0x00,0x21,0x00,0x00,0x00,0x8f,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x17,0x00,0x00,0x00,0x91,0x00,0x00,0x00, -0x90,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x17,0x00,0x00,0x00, -0x94,0x00,0x00,0x00,0x1f,0x00,0x00,0x00,0x0c,0x00,0x08,0x00, -0x17,0x00,0x00,0x00,0x95,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x32,0x00,0x00,0x00,0x88,0x00,0x00,0x00,0x91,0x00,0x00,0x00, -0x94,0x00,0x00,0x00,0x3e,0x00,0x03,0x00,0x1f,0x00,0x00,0x00, -0x95,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x98,0x00,0x00,0x00,0xc7,0x00,0x00,0x00,0x35,0x00,0x00,0x00, -0xf9,0x00,0x02,0x00,0x22,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, -0x24,0x00,0x00,0x00,0xe0,0x00,0x04,0x00,0x99,0x00,0x00,0x00, -0x99,0x00,0x00,0x00,0x9a,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, -0x9d,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0x9d,0x00,0x00,0x00, -0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0xc8,0x00,0x00,0x00, -0x9c,0x00,0x00,0x00,0x24,0x00,0x00,0x00,0xb4,0x00,0x00,0x00, -0xa0,0x00,0x00,0x00,0xad,0x00,0x05,0x00,0x30,0x00,0x00,0x00, -0xa3,0x00,0x00,0x00,0xc8,0x00,0x00,0x00,0x21,0x00,0x00,0x00, -0xf6,0x00,0x04,0x00,0x9f,0x00,0x00,0x00,0xa0,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0xa3,0x00,0x00,0x00, -0x9e,0x00,0x00,0x00,0x9f,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, -0x9e,0x00,0x00,0x00,0xb1,0x00,0x05,0x00,0x30,0x00,0x00,0x00, -0xa6,0x00,0x00,0x00,0x16,0x00,0x00,0x00,0xc8,0x00,0x00,0x00, -0xf7,0x00,0x03,0x00,0xa8,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0xfa,0x00,0x04,0x00,0xa6,0x00,0x00,0x00,0xa7,0x00,0x00,0x00, -0xa8,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0xa7,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xac,0x00,0x00,0x00, -0x16,0x00,0x00,0x00,0xc8,0x00,0x00,0x00,0x41,0x00,0x05,0x00, -0x1e,0x00,0x00,0x00,0xad,0x00,0x00,0x00,0x1b,0x00,0x00,0x00, -0xac,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x17,0x00,0x00,0x00, -0xae,0x00,0x00,0x00,0xad,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x17,0x00,0x00,0x00,0xb0,0x00,0x00,0x00,0x1f,0x00,0x00,0x00, -0x81,0x00,0x05,0x00,0x17,0x00,0x00,0x00,0xb1,0x00,0x00,0x00, -0xb0,0x00,0x00,0x00,0xae,0x00,0x00,0x00,0x3e,0x00,0x03,0x00, -0x1f,0x00,0x00,0x00,0xb1,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, -0xa8,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0xa8,0x00,0x00,0x00, -0xe0,0x00,0x04,0x00,0x99,0x00,0x00,0x00,0x99,0x00,0x00,0x00, -0x9a,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0xa0,0x00,0x00,0x00, -0xf8,0x00,0x02,0x00,0xa0,0x00,0x00,0x00,0xc3,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xb4,0x00,0x00,0x00,0xc8,0x00,0x00,0x00, -0x44,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0x9d,0x00,0x00,0x00, -0xf8,0x00,0x02,0x00,0x9f,0x00,0x00,0x00,0xaa,0x00,0x05,0x00, -0x30,0x00,0x00,0x00,0xb6,0x00,0x00,0x00,0x16,0x00,0x00,0x00, -0x21,0x00,0x00,0x00,0xf7,0x00,0x03,0x00,0xb8,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0xb6,0x00,0x00,0x00, -0xb7,0x00,0x00,0x00,0xb8,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, -0xb7,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x2b,0x00,0x00,0x00, -0xbd,0x00,0x00,0x00,0x2a,0x00,0x00,0x00,0x35,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0xbe,0x00,0x00,0x00, -0xbd,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xc0,0x00,0x00,0x00,0xbe,0x00,0x00,0x00,0x11,0x00,0x00,0x00, -0x41,0x00,0x05,0x00,0x1e,0x00,0x00,0x00,0xc1,0x00,0x00,0x00, -0x1b,0x00,0x00,0x00,0x21,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x17,0x00,0x00,0x00,0xc2,0x00,0x00,0x00,0xc1,0x00,0x00,0x00, -0x41,0x00,0x06,0x00,0x7d,0x00,0x00,0x00,0xc3,0x00,0x00,0x00, -0xbc,0x00,0x00,0x00,0x21,0x00,0x00,0x00,0xc0,0x00,0x00,0x00, -0x3e,0x00,0x03,0x00,0xc3,0x00,0x00,0x00,0xc2,0x00,0x00,0x00, -0xf9,0x00,0x02,0x00,0xb8,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, -0xb8,0x00,0x00,0x00,0xfd,0x00,0x01,0x00,0x38,0x00,0x01,0x00, - +0x05,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x0d,0x00,0x00,0x00, +0x0e,0x00,0x00,0x00,0x0b,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, +0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x0f,0x00,0x00,0x00, +0x0e,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x16,0x00,0x00,0x00, +0x17,0x00,0x00,0x00,0x13,0x00,0x00,0x00,0x15,0x00,0x00,0x00, +0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x18,0x00,0x00,0x00, +0x17,0x00,0x00,0x00,0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x1a,0x00,0x00,0x00,0x18,0x00,0x00,0x00,0x19,0x00,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x1e,0x00,0x00,0x00, +0x0f,0x00,0x00,0x00,0x1a,0x00,0x00,0x00,0x41,0x00,0x05,0x00, +0x0d,0x00,0x00,0x00,0x21,0x00,0x00,0x00,0x20,0x00,0x00,0x00, +0x0c,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x22,0x00,0x00,0x00,0x21,0x00,0x00,0x00,0x86,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x24,0x00,0x00,0x00,0x22,0x00,0x00,0x00, +0x23,0x00,0x00,0x00,0x89,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x28,0x00,0x00,0x00,0x22,0x00,0x00,0x00,0x23,0x00,0x00,0x00, +0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x2c,0x00,0x00,0x00, +0x24,0x00,0x00,0x00,0x2b,0x00,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x30,0x00,0x00,0x00,0x2b,0x00,0x00,0x00, +0x2c,0x00,0x00,0x00,0x82,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x31,0x00,0x00,0x00,0x24,0x00,0x00,0x00,0x30,0x00,0x00,0x00, +0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x34,0x00,0x00,0x00, +0x2c,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x89,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x37,0x00,0x00,0x00,0x2c,0x00,0x00,0x00, +0x23,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x3a,0x00,0x00,0x00,0x2b,0x00,0x00,0x00,0x31,0x00,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x3c,0x00,0x00,0x00, +0x23,0x00,0x00,0x00,0x37,0x00,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x3d,0x00,0x00,0x00,0x3a,0x00,0x00,0x00, +0x3c,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x41,0x00,0x00,0x00,0x3f,0x00,0x00,0x00,0x34,0x00,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x43,0x00,0x00,0x00, +0x41,0x00,0x00,0x00,0x3d,0x00,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x47,0x00,0x00,0x00,0x45,0x00,0x00,0x00, +0x34,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x49,0x00,0x00,0x00,0x47,0x00,0x00,0x00,0x3d,0x00,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x4f,0x00,0x00,0x00, +0x23,0x00,0x00,0x00,0x34,0x00,0x00,0x00,0xc4,0x00,0x05,0x00, +0x14,0x00,0x00,0x00,0x50,0x00,0x00,0x00,0x4d,0x00,0x00,0x00, +0x4f,0x00,0x00,0x00,0x72,0x00,0x04,0x00,0x51,0x00,0x00,0x00, +0x52,0x00,0x00,0x00,0x50,0x00,0x00,0x00,0x7c,0x00,0x04,0x00, +0x4a,0x00,0x00,0x00,0x53,0x00,0x00,0x00,0x52,0x00,0x00,0x00, +0xc4,0x00,0x05,0x00,0x4a,0x00,0x00,0x00,0x57,0x00,0x00,0x00, +0x53,0x00,0x00,0x00,0x56,0x00,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x5e,0x00,0x00,0x00,0x5c,0x00,0x00,0x00, +0x28,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x60,0x00,0x00,0x00,0x5e,0x00,0x00,0x00,0x24,0x00,0x00,0x00, +0x41,0x00,0x05,0x00,0x62,0x00,0x00,0x00,0x63,0x00,0x00,0x00, +0x5b,0x00,0x00,0x00,0x60,0x00,0x00,0x00,0x3e,0x00,0x03,0x00, +0x63,0x00,0x00,0x00,0x61,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, +0x66,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0x66,0x00,0x00,0x00, +0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0x85,0x04,0x00,0x00, +0x28,0x00,0x00,0x00,0x05,0x00,0x00,0x00,0x5a,0x04,0x00,0x00, +0x67,0x00,0x00,0x00,0xb0,0x00,0x05,0x00,0x6d,0x00,0x00,0x00, +0x6e,0x00,0x00,0x00,0x85,0x04,0x00,0x00,0x1a,0x00,0x00,0x00, +0xf6,0x00,0x04,0x00,0x68,0x00,0x00,0x00,0x67,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0x6e,0x00,0x00,0x00, +0x67,0x00,0x00,0x00,0x68,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, +0x67,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x71,0x00,0x00,0x00,0x85,0x04,0x00,0x00,0x19,0x00,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x73,0x00,0x00,0x00, +0x71,0x00,0x00,0x00,0x49,0x00,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x77,0x00,0x00,0x00,0x73,0x00,0x00,0x00, +0x76,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x87,0x00,0x00,0x00,0x1e,0x00,0x00,0x00,0x85,0x04,0x00,0x00, +0x41,0x00,0x08,0x00,0x88,0x00,0x00,0x00,0x89,0x00,0x00,0x00, +0x84,0x00,0x00,0x00,0x15,0x00,0x00,0x00,0x87,0x00,0x00,0x00, +0x15,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0x7a,0x00,0x00,0x00,0x8a,0x00,0x00,0x00,0x89,0x00,0x00,0x00, +0x73,0x00,0x04,0x00,0x58,0x00,0x00,0x00,0x8b,0x00,0x00,0x00, +0x8a,0x00,0x00,0x00,0x41,0x00,0x08,0x00,0x88,0x00,0x00,0x00, +0x91,0x00,0x00,0x00,0x84,0x00,0x00,0x00,0x15,0x00,0x00,0x00, +0x87,0x00,0x00,0x00,0x15,0x00,0x00,0x00,0x90,0x00,0x00,0x00, +0x3d,0x00,0x04,0x00,0x7a,0x00,0x00,0x00,0x92,0x00,0x00,0x00, +0x91,0x00,0x00,0x00,0x73,0x00,0x04,0x00,0x58,0x00,0x00,0x00, +0x93,0x00,0x00,0x00,0x92,0x00,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x99,0x00,0x00,0x00,0x34,0x00,0x00,0x00, +0x23,0x00,0x00,0x00,0x41,0x00,0x08,0x00,0x9a,0x00,0x00,0x00, +0x9b,0x00,0x00,0x00,0x84,0x00,0x00,0x00,0x15,0x00,0x00,0x00, +0x87,0x00,0x00,0x00,0x4d,0x00,0x00,0x00,0x99,0x00,0x00,0x00, +0x3d,0x00,0x04,0x00,0x4a,0x00,0x00,0x00,0x9c,0x00,0x00,0x00, +0x9b,0x00,0x00,0x00,0x71,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x9d,0x00,0x00,0x00,0x9c,0x00,0x00,0x00,0x7c,0x00,0x04,0x00, +0x14,0x00,0x00,0x00,0x9e,0x00,0x00,0x00,0x9d,0x00,0x00,0x00, +0xc7,0x00,0x05,0x00,0x14,0x00,0x00,0x00,0xa0,0x00,0x00,0x00, +0x9e,0x00,0x00,0x00,0x9f,0x00,0x00,0x00,0x72,0x00,0x04,0x00, +0x51,0x00,0x00,0x00,0xa1,0x00,0x00,0x00,0xa0,0x00,0x00,0x00, +0x7c,0x00,0x04,0x00,0x4a,0x00,0x00,0x00,0xa2,0x00,0x00,0x00, +0xa1,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xa9,0x00,0x00,0x00,0x99,0x00,0x00,0x00,0x90,0x00,0x00,0x00, +0x41,0x00,0x08,0x00,0x9a,0x00,0x00,0x00,0xaa,0x00,0x00,0x00, +0x84,0x00,0x00,0x00,0x15,0x00,0x00,0x00,0x87,0x00,0x00,0x00, +0x4d,0x00,0x00,0x00,0xa9,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0x4a,0x00,0x00,0x00,0xab,0x00,0x00,0x00,0xaa,0x00,0x00,0x00, +0x71,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0xac,0x00,0x00,0x00, +0xab,0x00,0x00,0x00,0x7c,0x00,0x04,0x00,0x14,0x00,0x00,0x00, +0xad,0x00,0x00,0x00,0xac,0x00,0x00,0x00,0xc7,0x00,0x05,0x00, +0x14,0x00,0x00,0x00,0xae,0x00,0x00,0x00,0xad,0x00,0x00,0x00, +0x9f,0x00,0x00,0x00,0x72,0x00,0x04,0x00,0x51,0x00,0x00,0x00, +0xaf,0x00,0x00,0x00,0xae,0x00,0x00,0x00,0x7c,0x00,0x04,0x00, +0x4a,0x00,0x00,0x00,0xb0,0x00,0x00,0x00,0xaf,0x00,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xb7,0x00,0x00,0x00, +0x99,0x00,0x00,0x00,0x2b,0x00,0x00,0x00,0x41,0x00,0x08,0x00, +0x9a,0x00,0x00,0x00,0xb8,0x00,0x00,0x00,0x84,0x00,0x00,0x00, +0x15,0x00,0x00,0x00,0x87,0x00,0x00,0x00,0x4d,0x00,0x00,0x00, +0xb7,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x4a,0x00,0x00,0x00, +0xb9,0x00,0x00,0x00,0xb8,0x00,0x00,0x00,0x71,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0xba,0x00,0x00,0x00,0xb9,0x00,0x00,0x00, +0x7c,0x00,0x04,0x00,0x14,0x00,0x00,0x00,0xbb,0x00,0x00,0x00, +0xba,0x00,0x00,0x00,0xc7,0x00,0x05,0x00,0x14,0x00,0x00,0x00, +0xbc,0x00,0x00,0x00,0xbb,0x00,0x00,0x00,0x9f,0x00,0x00,0x00, +0x72,0x00,0x04,0x00,0x51,0x00,0x00,0x00,0xbd,0x00,0x00,0x00, +0xbc,0x00,0x00,0x00,0x7c,0x00,0x04,0x00,0x4a,0x00,0x00,0x00, +0xbe,0x00,0x00,0x00,0xbd,0x00,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xc6,0x00,0x00,0x00,0x99,0x00,0x00,0x00, +0xc5,0x00,0x00,0x00,0x41,0x00,0x08,0x00,0x9a,0x00,0x00,0x00, +0xc7,0x00,0x00,0x00,0x84,0x00,0x00,0x00,0x15,0x00,0x00,0x00, +0x87,0x00,0x00,0x00,0x4d,0x00,0x00,0x00,0xc6,0x00,0x00,0x00, +0x3d,0x00,0x04,0x00,0x4a,0x00,0x00,0x00,0xc8,0x00,0x00,0x00, +0xc7,0x00,0x00,0x00,0x71,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0xc9,0x00,0x00,0x00,0xc8,0x00,0x00,0x00,0x7c,0x00,0x04,0x00, +0x14,0x00,0x00,0x00,0xca,0x00,0x00,0x00,0xc9,0x00,0x00,0x00, +0xc7,0x00,0x05,0x00,0x14,0x00,0x00,0x00,0xcb,0x00,0x00,0x00, +0xca,0x00,0x00,0x00,0x9f,0x00,0x00,0x00,0x72,0x00,0x04,0x00, +0x51,0x00,0x00,0x00,0xcc,0x00,0x00,0x00,0xcb,0x00,0x00,0x00, +0x7c,0x00,0x04,0x00,0x4a,0x00,0x00,0x00,0xcd,0x00,0x00,0x00, +0xcc,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xd5,0x00,0x00,0x00,0x99,0x00,0x00,0x00,0xd4,0x00,0x00,0x00, +0x41,0x00,0x08,0x00,0x9a,0x00,0x00,0x00,0xd6,0x00,0x00,0x00, +0x84,0x00,0x00,0x00,0x15,0x00,0x00,0x00,0x87,0x00,0x00,0x00, +0x4d,0x00,0x00,0x00,0xd5,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0x4a,0x00,0x00,0x00,0xd7,0x00,0x00,0x00,0xd6,0x00,0x00,0x00, +0x71,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0xd8,0x00,0x00,0x00, +0xd7,0x00,0x00,0x00,0x7c,0x00,0x04,0x00,0x14,0x00,0x00,0x00, +0xd9,0x00,0x00,0x00,0xd8,0x00,0x00,0x00,0xc7,0x00,0x05,0x00, +0x14,0x00,0x00,0x00,0xdb,0x00,0x00,0x00,0xd9,0x00,0x00,0x00, +0xda,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x4a,0x00,0x00,0x00, +0xe2,0x00,0x00,0x00,0x9b,0x00,0x00,0x00,0x71,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0xe3,0x00,0x00,0x00,0xe2,0x00,0x00,0x00, +0x7c,0x00,0x04,0x00,0x14,0x00,0x00,0x00,0xe4,0x00,0x00,0x00, +0xe3,0x00,0x00,0x00,0xc7,0x00,0x05,0x00,0x14,0x00,0x00,0x00, +0xe6,0x00,0x00,0x00,0xe4,0x00,0x00,0x00,0xe5,0x00,0x00,0x00, +0xc3,0x00,0x05,0x00,0x14,0x00,0x00,0x00,0xe8,0x00,0x00,0x00, +0xe6,0x00,0x00,0x00,0xe7,0x00,0x00,0x00,0xc5,0x00,0x05,0x00, +0x14,0x00,0x00,0x00,0xe9,0x00,0x00,0x00,0xdb,0x00,0x00,0x00, +0xe8,0x00,0x00,0x00,0x72,0x00,0x04,0x00,0x51,0x00,0x00,0x00, +0xea,0x00,0x00,0x00,0xe9,0x00,0x00,0x00,0x7c,0x00,0x04,0x00, +0x4a,0x00,0x00,0x00,0xeb,0x00,0x00,0x00,0xea,0x00,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xf3,0x00,0x00,0x00, +0x99,0x00,0x00,0x00,0xf2,0x00,0x00,0x00,0x41,0x00,0x08,0x00, +0x9a,0x00,0x00,0x00,0xf4,0x00,0x00,0x00,0x84,0x00,0x00,0x00, +0x15,0x00,0x00,0x00,0x87,0x00,0x00,0x00,0x4d,0x00,0x00,0x00, +0xf3,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x4a,0x00,0x00,0x00, +0xf5,0x00,0x00,0x00,0xf4,0x00,0x00,0x00,0x71,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0xf6,0x00,0x00,0x00,0xf5,0x00,0x00,0x00, +0x7c,0x00,0x04,0x00,0x14,0x00,0x00,0x00,0xf7,0x00,0x00,0x00, +0xf6,0x00,0x00,0x00,0xc7,0x00,0x05,0x00,0x14,0x00,0x00,0x00, +0xf8,0x00,0x00,0x00,0xf7,0x00,0x00,0x00,0xda,0x00,0x00,0x00, +0x3d,0x00,0x04,0x00,0x4a,0x00,0x00,0x00,0x00,0x01,0x00,0x00, +0xaa,0x00,0x00,0x00,0x71,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x01,0x01,0x00,0x00,0x00,0x01,0x00,0x00,0x7c,0x00,0x04,0x00, +0x14,0x00,0x00,0x00,0x02,0x01,0x00,0x00,0x01,0x01,0x00,0x00, +0xc7,0x00,0x05,0x00,0x14,0x00,0x00,0x00,0x03,0x01,0x00,0x00, +0x02,0x01,0x00,0x00,0xe5,0x00,0x00,0x00,0xc3,0x00,0x05,0x00, +0x14,0x00,0x00,0x00,0x04,0x01,0x00,0x00,0x03,0x01,0x00,0x00, +0xe7,0x00,0x00,0x00,0xc5,0x00,0x05,0x00,0x14,0x00,0x00,0x00, +0x05,0x01,0x00,0x00,0xf8,0x00,0x00,0x00,0x04,0x01,0x00,0x00, +0x72,0x00,0x04,0x00,0x51,0x00,0x00,0x00,0x06,0x01,0x00,0x00, +0x05,0x01,0x00,0x00,0x7c,0x00,0x04,0x00,0x4a,0x00,0x00,0x00, +0x07,0x01,0x00,0x00,0x06,0x01,0x00,0x00,0x3d,0x00,0x04,0x00, +0x4a,0x00,0x00,0x00,0x10,0x01,0x00,0x00,0xd6,0x00,0x00,0x00, +0xc2,0x00,0x05,0x00,0x4a,0x00,0x00,0x00,0x11,0x01,0x00,0x00, +0x10,0x01,0x00,0x00,0x56,0x00,0x00,0x00,0x71,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x12,0x01,0x00,0x00,0x11,0x01,0x00,0x00, +0x7c,0x00,0x04,0x00,0x14,0x00,0x00,0x00,0x13,0x01,0x00,0x00, +0x12,0x01,0x00,0x00,0xc7,0x00,0x05,0x00,0x14,0x00,0x00,0x00, +0x14,0x01,0x00,0x00,0x13,0x01,0x00,0x00,0xda,0x00,0x00,0x00, +0x3d,0x00,0x04,0x00,0x4a,0x00,0x00,0x00,0x1c,0x01,0x00,0x00, +0xb8,0x00,0x00,0x00,0x71,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x1d,0x01,0x00,0x00,0x1c,0x01,0x00,0x00,0x7c,0x00,0x04,0x00, +0x14,0x00,0x00,0x00,0x1e,0x01,0x00,0x00,0x1d,0x01,0x00,0x00, +0xc7,0x00,0x05,0x00,0x14,0x00,0x00,0x00,0x1f,0x01,0x00,0x00, +0x1e,0x01,0x00,0x00,0xe5,0x00,0x00,0x00,0xc3,0x00,0x05,0x00, +0x14,0x00,0x00,0x00,0x20,0x01,0x00,0x00,0x1f,0x01,0x00,0x00, +0xe7,0x00,0x00,0x00,0xc5,0x00,0x05,0x00,0x14,0x00,0x00,0x00, +0x21,0x01,0x00,0x00,0x14,0x01,0x00,0x00,0x20,0x01,0x00,0x00, +0x72,0x00,0x04,0x00,0x51,0x00,0x00,0x00,0x22,0x01,0x00,0x00, +0x21,0x01,0x00,0x00,0x7c,0x00,0x04,0x00,0x4a,0x00,0x00,0x00, +0x23,0x01,0x00,0x00,0x22,0x01,0x00,0x00,0x3d,0x00,0x04,0x00, +0x4a,0x00,0x00,0x00,0x2c,0x01,0x00,0x00,0xf4,0x00,0x00,0x00, +0xc2,0x00,0x05,0x00,0x4a,0x00,0x00,0x00,0x2d,0x01,0x00,0x00, +0x2c,0x01,0x00,0x00,0x56,0x00,0x00,0x00,0x71,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x2e,0x01,0x00,0x00,0x2d,0x01,0x00,0x00, +0x7c,0x00,0x04,0x00,0x14,0x00,0x00,0x00,0x2f,0x01,0x00,0x00, +0x2e,0x01,0x00,0x00,0xc7,0x00,0x05,0x00,0x14,0x00,0x00,0x00, +0x30,0x01,0x00,0x00,0x2f,0x01,0x00,0x00,0xda,0x00,0x00,0x00, +0x3d,0x00,0x04,0x00,0x4a,0x00,0x00,0x00,0x38,0x01,0x00,0x00, +0xc7,0x00,0x00,0x00,0x71,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x39,0x01,0x00,0x00,0x38,0x01,0x00,0x00,0x7c,0x00,0x04,0x00, +0x14,0x00,0x00,0x00,0x3a,0x01,0x00,0x00,0x39,0x01,0x00,0x00, +0xc7,0x00,0x05,0x00,0x14,0x00,0x00,0x00,0x3b,0x01,0x00,0x00, +0x3a,0x01,0x00,0x00,0xe5,0x00,0x00,0x00,0xc3,0x00,0x05,0x00, +0x14,0x00,0x00,0x00,0x3c,0x01,0x00,0x00,0x3b,0x01,0x00,0x00, +0xe7,0x00,0x00,0x00,0xc5,0x00,0x05,0x00,0x14,0x00,0x00,0x00, +0x3d,0x01,0x00,0x00,0x30,0x01,0x00,0x00,0x3c,0x01,0x00,0x00, +0x72,0x00,0x04,0x00,0x51,0x00,0x00,0x00,0x3e,0x01,0x00,0x00, +0x3d,0x01,0x00,0x00,0x7c,0x00,0x04,0x00,0x4a,0x00,0x00,0x00, +0x3f,0x01,0x00,0x00,0x3e,0x01,0x00,0x00,0x41,0x00,0x08,0x00, +0x9a,0x00,0x00,0x00,0x46,0x01,0x00,0x00,0x84,0x00,0x00,0x00, +0x15,0x00,0x00,0x00,0x87,0x00,0x00,0x00,0x44,0x01,0x00,0x00, +0x43,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x4a,0x00,0x00,0x00, +0x47,0x01,0x00,0x00,0x46,0x01,0x00,0x00,0x71,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x48,0x01,0x00,0x00,0x47,0x01,0x00,0x00, +0x7c,0x00,0x04,0x00,0x14,0x00,0x00,0x00,0x49,0x01,0x00,0x00, +0x48,0x01,0x00,0x00,0xc7,0x00,0x05,0x00,0x14,0x00,0x00,0x00, +0x4a,0x01,0x00,0x00,0x49,0x01,0x00,0x00,0xda,0x00,0x00,0x00, +0x72,0x00,0x04,0x00,0x51,0x00,0x00,0x00,0x4b,0x01,0x00,0x00, +0x4a,0x01,0x00,0x00,0x7c,0x00,0x04,0x00,0x4a,0x00,0x00,0x00, +0x4c,0x01,0x00,0x00,0x4b,0x01,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x52,0x01,0x00,0x00,0x43,0x00,0x00,0x00, +0x90,0x00,0x00,0x00,0x41,0x00,0x08,0x00,0x9a,0x00,0x00,0x00, +0x53,0x01,0x00,0x00,0x84,0x00,0x00,0x00,0x15,0x00,0x00,0x00, +0x87,0x00,0x00,0x00,0x44,0x01,0x00,0x00,0x52,0x01,0x00,0x00, +0x3d,0x00,0x04,0x00,0x4a,0x00,0x00,0x00,0x54,0x01,0x00,0x00, +0x53,0x01,0x00,0x00,0x71,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x55,0x01,0x00,0x00,0x54,0x01,0x00,0x00,0x7c,0x00,0x04,0x00, +0x14,0x00,0x00,0x00,0x56,0x01,0x00,0x00,0x55,0x01,0x00,0x00, +0xc7,0x00,0x05,0x00,0x14,0x00,0x00,0x00,0x57,0x01,0x00,0x00, +0x56,0x01,0x00,0x00,0xda,0x00,0x00,0x00,0x72,0x00,0x04,0x00, +0x51,0x00,0x00,0x00,0x58,0x01,0x00,0x00,0x57,0x01,0x00,0x00, +0x7c,0x00,0x04,0x00,0x4a,0x00,0x00,0x00,0x59,0x01,0x00,0x00, +0x58,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x5f,0x01,0x00,0x00,0x43,0x00,0x00,0x00,0x5c,0x00,0x00,0x00, +0x41,0x00,0x08,0x00,0x9a,0x00,0x00,0x00,0x60,0x01,0x00,0x00, +0x84,0x00,0x00,0x00,0x15,0x00,0x00,0x00,0x87,0x00,0x00,0x00, +0x44,0x01,0x00,0x00,0x5f,0x01,0x00,0x00,0x3d,0x00,0x04,0x00, +0x4a,0x00,0x00,0x00,0x61,0x01,0x00,0x00,0x60,0x01,0x00,0x00, +0x71,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x62,0x01,0x00,0x00, +0x61,0x01,0x00,0x00,0x7c,0x00,0x04,0x00,0x14,0x00,0x00,0x00, +0x63,0x01,0x00,0x00,0x62,0x01,0x00,0x00,0xc7,0x00,0x05,0x00, +0x14,0x00,0x00,0x00,0x64,0x01,0x00,0x00,0x63,0x01,0x00,0x00, +0xda,0x00,0x00,0x00,0x72,0x00,0x04,0x00,0x51,0x00,0x00,0x00, +0x65,0x01,0x00,0x00,0x64,0x01,0x00,0x00,0x7c,0x00,0x04,0x00, +0x4a,0x00,0x00,0x00,0x66,0x01,0x00,0x00,0x65,0x01,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x6d,0x01,0x00,0x00, +0x43,0x00,0x00,0x00,0x6c,0x01,0x00,0x00,0x41,0x00,0x08,0x00, +0x9a,0x00,0x00,0x00,0x6e,0x01,0x00,0x00,0x84,0x00,0x00,0x00, +0x15,0x00,0x00,0x00,0x87,0x00,0x00,0x00,0x44,0x01,0x00,0x00, +0x6d,0x01,0x00,0x00,0x3d,0x00,0x04,0x00,0x4a,0x00,0x00,0x00, +0x6f,0x01,0x00,0x00,0x6e,0x01,0x00,0x00,0x71,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x70,0x01,0x00,0x00,0x6f,0x01,0x00,0x00, +0x7c,0x00,0x04,0x00,0x14,0x00,0x00,0x00,0x71,0x01,0x00,0x00, +0x70,0x01,0x00,0x00,0xc7,0x00,0x05,0x00,0x14,0x00,0x00,0x00, +0x72,0x01,0x00,0x00,0x71,0x01,0x00,0x00,0xda,0x00,0x00,0x00, +0x72,0x00,0x04,0x00,0x51,0x00,0x00,0x00,0x73,0x01,0x00,0x00, +0x72,0x01,0x00,0x00,0x7c,0x00,0x04,0x00,0x4a,0x00,0x00,0x00, +0x74,0x01,0x00,0x00,0x73,0x01,0x00,0x00,0x3d,0x00,0x04,0x00, +0x4a,0x00,0x00,0x00,0x7b,0x01,0x00,0x00,0x46,0x01,0x00,0x00, +0xc2,0x00,0x05,0x00,0x4a,0x00,0x00,0x00,0x7c,0x01,0x00,0x00, +0x7b,0x01,0x00,0x00,0x56,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0x4a,0x00,0x00,0x00,0x84,0x01,0x00,0x00,0x53,0x01,0x00,0x00, +0xc2,0x00,0x05,0x00,0x4a,0x00,0x00,0x00,0x85,0x01,0x00,0x00, +0x84,0x01,0x00,0x00,0x56,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0x4a,0x00,0x00,0x00,0x8d,0x01,0x00,0x00,0x60,0x01,0x00,0x00, +0xc2,0x00,0x05,0x00,0x4a,0x00,0x00,0x00,0x8e,0x01,0x00,0x00, +0x8d,0x01,0x00,0x00,0x56,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0x4a,0x00,0x00,0x00,0x96,0x01,0x00,0x00,0x6e,0x01,0x00,0x00, +0xc2,0x00,0x05,0x00,0x4a,0x00,0x00,0x00,0x97,0x01,0x00,0x00, +0x96,0x01,0x00,0x00,0x56,0x00,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x9d,0x01,0x00,0x00,0x43,0x00,0x00,0x00, +0x45,0x00,0x00,0x00,0x41,0x00,0x08,0x00,0x9a,0x00,0x00,0x00, +0x9e,0x01,0x00,0x00,0x84,0x00,0x00,0x00,0x15,0x00,0x00,0x00, +0x87,0x00,0x00,0x00,0x44,0x01,0x00,0x00,0x9d,0x01,0x00,0x00, +0x3d,0x00,0x04,0x00,0x4a,0x00,0x00,0x00,0x9f,0x01,0x00,0x00, +0x9e,0x01,0x00,0x00,0x71,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0xa0,0x01,0x00,0x00,0x9f,0x01,0x00,0x00,0x7c,0x00,0x04,0x00, +0x14,0x00,0x00,0x00,0xa1,0x01,0x00,0x00,0xa0,0x01,0x00,0x00, +0xc7,0x00,0x05,0x00,0x14,0x00,0x00,0x00,0xa2,0x01,0x00,0x00, +0xa1,0x01,0x00,0x00,0xda,0x00,0x00,0x00,0x72,0x00,0x04,0x00, +0x51,0x00,0x00,0x00,0xa3,0x01,0x00,0x00,0xa2,0x01,0x00,0x00, +0x7c,0x00,0x04,0x00,0x4a,0x00,0x00,0x00,0xa4,0x01,0x00,0x00, +0xa3,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xab,0x01,0x00,0x00,0x43,0x00,0x00,0x00,0xaa,0x01,0x00,0x00, +0x41,0x00,0x08,0x00,0x9a,0x00,0x00,0x00,0xac,0x01,0x00,0x00, +0x84,0x00,0x00,0x00,0x15,0x00,0x00,0x00,0x87,0x00,0x00,0x00, +0x44,0x01,0x00,0x00,0xab,0x01,0x00,0x00,0x3d,0x00,0x04,0x00, +0x4a,0x00,0x00,0x00,0xad,0x01,0x00,0x00,0xac,0x01,0x00,0x00, +0x71,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0xae,0x01,0x00,0x00, +0xad,0x01,0x00,0x00,0x7c,0x00,0x04,0x00,0x14,0x00,0x00,0x00, +0xaf,0x01,0x00,0x00,0xae,0x01,0x00,0x00,0xc7,0x00,0x05,0x00, +0x14,0x00,0x00,0x00,0xb0,0x01,0x00,0x00,0xaf,0x01,0x00,0x00, +0xda,0x00,0x00,0x00,0x72,0x00,0x04,0x00,0x51,0x00,0x00,0x00, +0xb1,0x01,0x00,0x00,0xb0,0x01,0x00,0x00,0x7c,0x00,0x04,0x00, +0x4a,0x00,0x00,0x00,0xb2,0x01,0x00,0x00,0xb1,0x01,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xb9,0x01,0x00,0x00, +0x43,0x00,0x00,0x00,0xb8,0x01,0x00,0x00,0x41,0x00,0x08,0x00, +0x9a,0x00,0x00,0x00,0xba,0x01,0x00,0x00,0x84,0x00,0x00,0x00, +0x15,0x00,0x00,0x00,0x87,0x00,0x00,0x00,0x44,0x01,0x00,0x00, +0xb9,0x01,0x00,0x00,0x3d,0x00,0x04,0x00,0x4a,0x00,0x00,0x00, +0xbb,0x01,0x00,0x00,0xba,0x01,0x00,0x00,0x71,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0xbc,0x01,0x00,0x00,0xbb,0x01,0x00,0x00, +0x7c,0x00,0x04,0x00,0x14,0x00,0x00,0x00,0xbd,0x01,0x00,0x00, +0xbc,0x01,0x00,0x00,0xc7,0x00,0x05,0x00,0x14,0x00,0x00,0x00, +0xbe,0x01,0x00,0x00,0xbd,0x01,0x00,0x00,0xda,0x00,0x00,0x00, +0x72,0x00,0x04,0x00,0x51,0x00,0x00,0x00,0xbf,0x01,0x00,0x00, +0xbe,0x01,0x00,0x00,0x7c,0x00,0x04,0x00,0x4a,0x00,0x00,0x00, +0xc0,0x01,0x00,0x00,0xbf,0x01,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xc7,0x01,0x00,0x00,0x43,0x00,0x00,0x00, +0xc6,0x01,0x00,0x00,0x41,0x00,0x08,0x00,0x9a,0x00,0x00,0x00, +0xc8,0x01,0x00,0x00,0x84,0x00,0x00,0x00,0x15,0x00,0x00,0x00, +0x87,0x00,0x00,0x00,0x44,0x01,0x00,0x00,0xc7,0x01,0x00,0x00, +0x3d,0x00,0x04,0x00,0x4a,0x00,0x00,0x00,0xc9,0x01,0x00,0x00, +0xc8,0x01,0x00,0x00,0x71,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0xca,0x01,0x00,0x00,0xc9,0x01,0x00,0x00,0x7c,0x00,0x04,0x00, +0x14,0x00,0x00,0x00,0xcb,0x01,0x00,0x00,0xca,0x01,0x00,0x00, +0xc7,0x00,0x05,0x00,0x14,0x00,0x00,0x00,0xcc,0x01,0x00,0x00, +0xcb,0x01,0x00,0x00,0xda,0x00,0x00,0x00,0x72,0x00,0x04,0x00, +0x51,0x00,0x00,0x00,0xcd,0x01,0x00,0x00,0xcc,0x01,0x00,0x00, +0x7c,0x00,0x04,0x00,0x4a,0x00,0x00,0x00,0xce,0x01,0x00,0x00, +0xcd,0x01,0x00,0x00,0x3d,0x00,0x04,0x00,0x4a,0x00,0x00,0x00, +0xd6,0x01,0x00,0x00,0x9e,0x01,0x00,0x00,0xc2,0x00,0x05,0x00, +0x4a,0x00,0x00,0x00,0xd7,0x01,0x00,0x00,0xd6,0x01,0x00,0x00, +0x56,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x4a,0x00,0x00,0x00, +0xdf,0x01,0x00,0x00,0xac,0x01,0x00,0x00,0xc2,0x00,0x05,0x00, +0x4a,0x00,0x00,0x00,0xe0,0x01,0x00,0x00,0xdf,0x01,0x00,0x00, +0x56,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x4a,0x00,0x00,0x00, +0xe8,0x01,0x00,0x00,0xba,0x01,0x00,0x00,0xc2,0x00,0x05,0x00, +0x4a,0x00,0x00,0x00,0xe9,0x01,0x00,0x00,0xe8,0x01,0x00,0x00, +0x56,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x4a,0x00,0x00,0x00, +0xf1,0x01,0x00,0x00,0xc8,0x01,0x00,0x00,0xc2,0x00,0x05,0x00, +0x4a,0x00,0x00,0x00,0xf2,0x01,0x00,0x00,0xf1,0x01,0x00,0x00, +0x56,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x16,0x00,0x00,0x00, +0xf8,0x01,0x00,0x00,0x13,0x00,0x00,0x00,0x4d,0x00,0x00,0x00, +0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0xf9,0x01,0x00,0x00, +0xf8,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xfb,0x01,0x00,0x00,0xf9,0x01,0x00,0x00,0x73,0x00,0x00,0x00, +0x41,0x00,0x06,0x00,0xfc,0x01,0x00,0x00,0xfd,0x01,0x00,0x00, +0xf7,0x01,0x00,0x00,0x15,0x00,0x00,0x00,0xfb,0x01,0x00,0x00, +0x3d,0x00,0x04,0x00,0x58,0x00,0x00,0x00,0xfe,0x01,0x00,0x00, +0xfd,0x01,0x00,0x00,0x71,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x00,0x02,0x00,0x00,0x4c,0x01,0x00,0x00,0x7c,0x00,0x04,0x00, +0x14,0x00,0x00,0x00,0x01,0x02,0x00,0x00,0x00,0x02,0x00,0x00, +0x41,0x00,0x08,0x00,0x9a,0x00,0x00,0x00,0x06,0x02,0x00,0x00, +0x84,0x00,0x00,0x00,0x15,0x00,0x00,0x00,0x87,0x00,0x00,0x00, +0xe7,0x00,0x00,0x00,0x3d,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0x4a,0x00,0x00,0x00,0x07,0x02,0x00,0x00,0x06,0x02,0x00,0x00, +0xc7,0x00,0x05,0x00,0x4a,0x00,0x00,0x00,0x09,0x02,0x00,0x00, +0x07,0x02,0x00,0x00,0x53,0x00,0x00,0x00,0x71,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x0a,0x02,0x00,0x00,0x09,0x02,0x00,0x00, +0x7c,0x00,0x04,0x00,0x14,0x00,0x00,0x00,0x0b,0x02,0x00,0x00, +0x0a,0x02,0x00,0x00,0xab,0x00,0x05,0x00,0x6d,0x00,0x00,0x00, +0x0c,0x02,0x00,0x00,0x0b,0x02,0x00,0x00,0x15,0x00,0x00,0x00, +0xa9,0x00,0x06,0x00,0x14,0x00,0x00,0x00,0x0e,0x02,0x00,0x00, +0x0c,0x02,0x00,0x00,0x0d,0x02,0x00,0x00,0x15,0x00,0x00,0x00, +0x80,0x00,0x05,0x00,0x14,0x00,0x00,0x00,0x0f,0x02,0x00,0x00, +0x01,0x02,0x00,0x00,0x0e,0x02,0x00,0x00,0x6f,0x00,0x04,0x00, +0x58,0x00,0x00,0x00,0x10,0x02,0x00,0x00,0x0f,0x02,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x16,0x02,0x00,0x00, +0xfb,0x01,0x00,0x00,0x90,0x00,0x00,0x00,0x41,0x00,0x06,0x00, +0xfc,0x01,0x00,0x00,0x17,0x02,0x00,0x00,0xf7,0x01,0x00,0x00, +0x15,0x00,0x00,0x00,0x16,0x02,0x00,0x00,0x3d,0x00,0x04,0x00, +0x58,0x00,0x00,0x00,0x18,0x02,0x00,0x00,0x17,0x02,0x00,0x00, +0x71,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x1a,0x02,0x00,0x00, +0x59,0x01,0x00,0x00,0x7c,0x00,0x04,0x00,0x14,0x00,0x00,0x00, +0x1b,0x02,0x00,0x00,0x1a,0x02,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x20,0x02,0x00,0x00,0x3d,0x00,0x00,0x00, +0x90,0x00,0x00,0x00,0x41,0x00,0x08,0x00,0x9a,0x00,0x00,0x00, +0x21,0x02,0x00,0x00,0x84,0x00,0x00,0x00,0x15,0x00,0x00,0x00, +0x87,0x00,0x00,0x00,0xe7,0x00,0x00,0x00,0x20,0x02,0x00,0x00, +0x3d,0x00,0x04,0x00,0x4a,0x00,0x00,0x00,0x22,0x02,0x00,0x00, +0x21,0x02,0x00,0x00,0xc7,0x00,0x05,0x00,0x4a,0x00,0x00,0x00, +0x24,0x02,0x00,0x00,0x22,0x02,0x00,0x00,0x53,0x00,0x00,0x00, +0x71,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x25,0x02,0x00,0x00, +0x24,0x02,0x00,0x00,0x7c,0x00,0x04,0x00,0x14,0x00,0x00,0x00, +0x26,0x02,0x00,0x00,0x25,0x02,0x00,0x00,0xab,0x00,0x05,0x00, +0x6d,0x00,0x00,0x00,0x27,0x02,0x00,0x00,0x26,0x02,0x00,0x00, +0x15,0x00,0x00,0x00,0xa9,0x00,0x06,0x00,0x14,0x00,0x00,0x00, +0x28,0x02,0x00,0x00,0x27,0x02,0x00,0x00,0x0d,0x02,0x00,0x00, +0x15,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x14,0x00,0x00,0x00, +0x29,0x02,0x00,0x00,0x1b,0x02,0x00,0x00,0x28,0x02,0x00,0x00, +0x6f,0x00,0x04,0x00,0x58,0x00,0x00,0x00,0x2a,0x02,0x00,0x00, +0x29,0x02,0x00,0x00,0x85,0x00,0x05,0x00,0x58,0x00,0x00,0x00, +0x2b,0x02,0x00,0x00,0x18,0x02,0x00,0x00,0x2a,0x02,0x00,0x00, +0x0c,0x00,0x08,0x00,0x58,0x00,0x00,0x00,0x2c,0x02,0x00,0x00, +0x01,0x00,0x00,0x00,0x32,0x00,0x00,0x00,0xfe,0x01,0x00,0x00, +0x10,0x02,0x00,0x00,0x2b,0x02,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x31,0x02,0x00,0x00,0xfb,0x01,0x00,0x00, +0x5c,0x00,0x00,0x00,0x41,0x00,0x06,0x00,0xfc,0x01,0x00,0x00, +0x32,0x02,0x00,0x00,0xf7,0x01,0x00,0x00,0x15,0x00,0x00,0x00, +0x31,0x02,0x00,0x00,0x3d,0x00,0x04,0x00,0x58,0x00,0x00,0x00, +0x33,0x02,0x00,0x00,0x32,0x02,0x00,0x00,0x71,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x35,0x02,0x00,0x00,0x66,0x01,0x00,0x00, +0x7c,0x00,0x04,0x00,0x14,0x00,0x00,0x00,0x36,0x02,0x00,0x00, +0x35,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x3b,0x02,0x00,0x00,0x3d,0x00,0x00,0x00,0x5c,0x00,0x00,0x00, +0x41,0x00,0x08,0x00,0x9a,0x00,0x00,0x00,0x3c,0x02,0x00,0x00, +0x84,0x00,0x00,0x00,0x15,0x00,0x00,0x00,0x87,0x00,0x00,0x00, +0xe7,0x00,0x00,0x00,0x3b,0x02,0x00,0x00,0x3d,0x00,0x04,0x00, +0x4a,0x00,0x00,0x00,0x3d,0x02,0x00,0x00,0x3c,0x02,0x00,0x00, +0xc7,0x00,0x05,0x00,0x4a,0x00,0x00,0x00,0x3f,0x02,0x00,0x00, +0x3d,0x02,0x00,0x00,0x53,0x00,0x00,0x00,0x71,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x40,0x02,0x00,0x00,0x3f,0x02,0x00,0x00, +0x7c,0x00,0x04,0x00,0x14,0x00,0x00,0x00,0x41,0x02,0x00,0x00, +0x40,0x02,0x00,0x00,0xab,0x00,0x05,0x00,0x6d,0x00,0x00,0x00, +0x42,0x02,0x00,0x00,0x41,0x02,0x00,0x00,0x15,0x00,0x00,0x00, +0xa9,0x00,0x06,0x00,0x14,0x00,0x00,0x00,0x43,0x02,0x00,0x00, +0x42,0x02,0x00,0x00,0x0d,0x02,0x00,0x00,0x15,0x00,0x00,0x00, +0x80,0x00,0x05,0x00,0x14,0x00,0x00,0x00,0x44,0x02,0x00,0x00, +0x36,0x02,0x00,0x00,0x43,0x02,0x00,0x00,0x6f,0x00,0x04,0x00, +0x58,0x00,0x00,0x00,0x45,0x02,0x00,0x00,0x44,0x02,0x00,0x00, +0x0c,0x00,0x08,0x00,0x58,0x00,0x00,0x00,0x47,0x02,0x00,0x00, +0x01,0x00,0x00,0x00,0x32,0x00,0x00,0x00,0x33,0x02,0x00,0x00, +0x45,0x02,0x00,0x00,0x2c,0x02,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x4c,0x02,0x00,0x00,0xfb,0x01,0x00,0x00, +0x6c,0x01,0x00,0x00,0x41,0x00,0x06,0x00,0xfc,0x01,0x00,0x00, +0x4d,0x02,0x00,0x00,0xf7,0x01,0x00,0x00,0x15,0x00,0x00,0x00, +0x4c,0x02,0x00,0x00,0x3d,0x00,0x04,0x00,0x58,0x00,0x00,0x00, +0x4e,0x02,0x00,0x00,0x4d,0x02,0x00,0x00,0x71,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x50,0x02,0x00,0x00,0x74,0x01,0x00,0x00, +0x7c,0x00,0x04,0x00,0x14,0x00,0x00,0x00,0x51,0x02,0x00,0x00, +0x50,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x56,0x02,0x00,0x00,0x3d,0x00,0x00,0x00,0x6c,0x01,0x00,0x00, +0x41,0x00,0x08,0x00,0x9a,0x00,0x00,0x00,0x57,0x02,0x00,0x00, +0x84,0x00,0x00,0x00,0x15,0x00,0x00,0x00,0x87,0x00,0x00,0x00, +0xe7,0x00,0x00,0x00,0x56,0x02,0x00,0x00,0x3d,0x00,0x04,0x00, +0x4a,0x00,0x00,0x00,0x58,0x02,0x00,0x00,0x57,0x02,0x00,0x00, +0xc7,0x00,0x05,0x00,0x4a,0x00,0x00,0x00,0x5a,0x02,0x00,0x00, +0x58,0x02,0x00,0x00,0x53,0x00,0x00,0x00,0x71,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x5b,0x02,0x00,0x00,0x5a,0x02,0x00,0x00, +0x7c,0x00,0x04,0x00,0x14,0x00,0x00,0x00,0x5c,0x02,0x00,0x00, +0x5b,0x02,0x00,0x00,0xab,0x00,0x05,0x00,0x6d,0x00,0x00,0x00, +0x5d,0x02,0x00,0x00,0x5c,0x02,0x00,0x00,0x15,0x00,0x00,0x00, +0xa9,0x00,0x06,0x00,0x14,0x00,0x00,0x00,0x5e,0x02,0x00,0x00, +0x5d,0x02,0x00,0x00,0x0d,0x02,0x00,0x00,0x15,0x00,0x00,0x00, +0x80,0x00,0x05,0x00,0x14,0x00,0x00,0x00,0x5f,0x02,0x00,0x00, +0x51,0x02,0x00,0x00,0x5e,0x02,0x00,0x00,0x6f,0x00,0x04,0x00, +0x58,0x00,0x00,0x00,0x60,0x02,0x00,0x00,0x5f,0x02,0x00,0x00, +0x0c,0x00,0x08,0x00,0x58,0x00,0x00,0x00,0x62,0x02,0x00,0x00, +0x01,0x00,0x00,0x00,0x32,0x00,0x00,0x00,0x4e,0x02,0x00,0x00, +0x60,0x02,0x00,0x00,0x47,0x02,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x68,0x02,0x00,0x00,0xfb,0x01,0x00,0x00, +0x3f,0x00,0x00,0x00,0x41,0x00,0x06,0x00,0xfc,0x01,0x00,0x00, +0x69,0x02,0x00,0x00,0xf7,0x01,0x00,0x00,0x15,0x00,0x00,0x00, +0x68,0x02,0x00,0x00,0x3d,0x00,0x04,0x00,0x58,0x00,0x00,0x00, +0x6a,0x02,0x00,0x00,0x69,0x02,0x00,0x00,0x71,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x6c,0x02,0x00,0x00,0x7c,0x01,0x00,0x00, +0x7c,0x00,0x04,0x00,0x14,0x00,0x00,0x00,0x6d,0x02,0x00,0x00, +0x6c,0x02,0x00,0x00,0x3d,0x00,0x04,0x00,0x4a,0x00,0x00,0x00, +0x73,0x02,0x00,0x00,0x06,0x02,0x00,0x00,0xc4,0x00,0x05,0x00, +0x4a,0x00,0x00,0x00,0x75,0x02,0x00,0x00,0x53,0x00,0x00,0x00, +0x4d,0x00,0x00,0x00,0xc7,0x00,0x05,0x00,0x4a,0x00,0x00,0x00, +0x76,0x02,0x00,0x00,0x73,0x02,0x00,0x00,0x75,0x02,0x00,0x00, +0x71,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x77,0x02,0x00,0x00, +0x76,0x02,0x00,0x00,0x7c,0x00,0x04,0x00,0x14,0x00,0x00,0x00, +0x78,0x02,0x00,0x00,0x77,0x02,0x00,0x00,0xab,0x00,0x05,0x00, +0x6d,0x00,0x00,0x00,0x79,0x02,0x00,0x00,0x78,0x02,0x00,0x00, +0x15,0x00,0x00,0x00,0xa9,0x00,0x06,0x00,0x14,0x00,0x00,0x00, +0x7a,0x02,0x00,0x00,0x79,0x02,0x00,0x00,0x0d,0x02,0x00,0x00, +0x15,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x14,0x00,0x00,0x00, +0x7b,0x02,0x00,0x00,0x6d,0x02,0x00,0x00,0x7a,0x02,0x00,0x00, +0x6f,0x00,0x04,0x00,0x58,0x00,0x00,0x00,0x7c,0x02,0x00,0x00, +0x7b,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x83,0x02,0x00,0x00,0xfb,0x01,0x00,0x00,0x82,0x02,0x00,0x00, +0x41,0x00,0x06,0x00,0xfc,0x01,0x00,0x00,0x84,0x02,0x00,0x00, +0xf7,0x01,0x00,0x00,0x15,0x00,0x00,0x00,0x83,0x02,0x00,0x00, +0x3d,0x00,0x04,0x00,0x58,0x00,0x00,0x00,0x85,0x02,0x00,0x00, +0x84,0x02,0x00,0x00,0x71,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x87,0x02,0x00,0x00,0x85,0x01,0x00,0x00,0x7c,0x00,0x04,0x00, +0x14,0x00,0x00,0x00,0x88,0x02,0x00,0x00,0x87,0x02,0x00,0x00, +0x3d,0x00,0x04,0x00,0x4a,0x00,0x00,0x00,0x8f,0x02,0x00,0x00, +0x21,0x02,0x00,0x00,0xc7,0x00,0x05,0x00,0x4a,0x00,0x00,0x00, +0x92,0x02,0x00,0x00,0x8f,0x02,0x00,0x00,0x75,0x02,0x00,0x00, +0x71,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x93,0x02,0x00,0x00, +0x92,0x02,0x00,0x00,0x7c,0x00,0x04,0x00,0x14,0x00,0x00,0x00, +0x94,0x02,0x00,0x00,0x93,0x02,0x00,0x00,0xab,0x00,0x05,0x00, +0x6d,0x00,0x00,0x00,0x95,0x02,0x00,0x00,0x94,0x02,0x00,0x00, +0x15,0x00,0x00,0x00,0xa9,0x00,0x06,0x00,0x14,0x00,0x00,0x00, +0x96,0x02,0x00,0x00,0x95,0x02,0x00,0x00,0x0d,0x02,0x00,0x00, +0x15,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x14,0x00,0x00,0x00, +0x97,0x02,0x00,0x00,0x88,0x02,0x00,0x00,0x96,0x02,0x00,0x00, +0x6f,0x00,0x04,0x00,0x58,0x00,0x00,0x00,0x98,0x02,0x00,0x00, +0x97,0x02,0x00,0x00,0x85,0x00,0x05,0x00,0x58,0x00,0x00,0x00, +0x99,0x02,0x00,0x00,0x85,0x02,0x00,0x00,0x98,0x02,0x00,0x00, +0x0c,0x00,0x08,0x00,0x58,0x00,0x00,0x00,0x9a,0x02,0x00,0x00, +0x01,0x00,0x00,0x00,0x32,0x00,0x00,0x00,0x6a,0x02,0x00,0x00, +0x7c,0x02,0x00,0x00,0x99,0x02,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xa0,0x02,0x00,0x00,0xfb,0x01,0x00,0x00, +0x9f,0x02,0x00,0x00,0x41,0x00,0x06,0x00,0xfc,0x01,0x00,0x00, +0xa1,0x02,0x00,0x00,0xf7,0x01,0x00,0x00,0x15,0x00,0x00,0x00, +0xa0,0x02,0x00,0x00,0x3d,0x00,0x04,0x00,0x58,0x00,0x00,0x00, +0xa2,0x02,0x00,0x00,0xa1,0x02,0x00,0x00,0x71,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0xa4,0x02,0x00,0x00,0x8e,0x01,0x00,0x00, +0x7c,0x00,0x04,0x00,0x14,0x00,0x00,0x00,0xa5,0x02,0x00,0x00, +0xa4,0x02,0x00,0x00,0x3d,0x00,0x04,0x00,0x4a,0x00,0x00,0x00, +0xac,0x02,0x00,0x00,0x3c,0x02,0x00,0x00,0xc7,0x00,0x05,0x00, +0x4a,0x00,0x00,0x00,0xaf,0x02,0x00,0x00,0xac,0x02,0x00,0x00, +0x75,0x02,0x00,0x00,0x71,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0xb0,0x02,0x00,0x00,0xaf,0x02,0x00,0x00,0x7c,0x00,0x04,0x00, +0x14,0x00,0x00,0x00,0xb1,0x02,0x00,0x00,0xb0,0x02,0x00,0x00, +0xab,0x00,0x05,0x00,0x6d,0x00,0x00,0x00,0xb2,0x02,0x00,0x00, +0xb1,0x02,0x00,0x00,0x15,0x00,0x00,0x00,0xa9,0x00,0x06,0x00, +0x14,0x00,0x00,0x00,0xb3,0x02,0x00,0x00,0xb2,0x02,0x00,0x00, +0x0d,0x02,0x00,0x00,0x15,0x00,0x00,0x00,0x80,0x00,0x05,0x00, +0x14,0x00,0x00,0x00,0xb4,0x02,0x00,0x00,0xa5,0x02,0x00,0x00, +0xb3,0x02,0x00,0x00,0x6f,0x00,0x04,0x00,0x58,0x00,0x00,0x00, +0xb5,0x02,0x00,0x00,0xb4,0x02,0x00,0x00,0x0c,0x00,0x08,0x00, +0x58,0x00,0x00,0x00,0xb7,0x02,0x00,0x00,0x01,0x00,0x00,0x00, +0x32,0x00,0x00,0x00,0xa2,0x02,0x00,0x00,0xb5,0x02,0x00,0x00, +0x9a,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xbd,0x02,0x00,0x00,0xfb,0x01,0x00,0x00,0xbc,0x02,0x00,0x00, +0x41,0x00,0x06,0x00,0xfc,0x01,0x00,0x00,0xbe,0x02,0x00,0x00, +0xf7,0x01,0x00,0x00,0x15,0x00,0x00,0x00,0xbd,0x02,0x00,0x00, +0x3d,0x00,0x04,0x00,0x58,0x00,0x00,0x00,0xbf,0x02,0x00,0x00, +0xbe,0x02,0x00,0x00,0x71,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0xc1,0x02,0x00,0x00,0x97,0x01,0x00,0x00,0x7c,0x00,0x04,0x00, +0x14,0x00,0x00,0x00,0xc2,0x02,0x00,0x00,0xc1,0x02,0x00,0x00, +0x3d,0x00,0x04,0x00,0x4a,0x00,0x00,0x00,0xc9,0x02,0x00,0x00, +0x57,0x02,0x00,0x00,0xc7,0x00,0x05,0x00,0x4a,0x00,0x00,0x00, +0xcc,0x02,0x00,0x00,0xc9,0x02,0x00,0x00,0x75,0x02,0x00,0x00, +0x71,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0xcd,0x02,0x00,0x00, +0xcc,0x02,0x00,0x00,0x7c,0x00,0x04,0x00,0x14,0x00,0x00,0x00, +0xce,0x02,0x00,0x00,0xcd,0x02,0x00,0x00,0xab,0x00,0x05,0x00, +0x6d,0x00,0x00,0x00,0xcf,0x02,0x00,0x00,0xce,0x02,0x00,0x00, +0x15,0x00,0x00,0x00,0xa9,0x00,0x06,0x00,0x14,0x00,0x00,0x00, +0xd0,0x02,0x00,0x00,0xcf,0x02,0x00,0x00,0x0d,0x02,0x00,0x00, +0x15,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x14,0x00,0x00,0x00, +0xd1,0x02,0x00,0x00,0xc2,0x02,0x00,0x00,0xd0,0x02,0x00,0x00, +0x6f,0x00,0x04,0x00,0x58,0x00,0x00,0x00,0xd2,0x02,0x00,0x00, +0xd1,0x02,0x00,0x00,0x0c,0x00,0x08,0x00,0x58,0x00,0x00,0x00, +0xd4,0x02,0x00,0x00,0x01,0x00,0x00,0x00,0x32,0x00,0x00,0x00, +0xbf,0x02,0x00,0x00,0xd2,0x02,0x00,0x00,0xb7,0x02,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xd9,0x02,0x00,0x00, +0xf9,0x01,0x00,0x00,0x77,0x00,0x00,0x00,0x41,0x00,0x06,0x00, +0xfc,0x01,0x00,0x00,0xda,0x02,0x00,0x00,0xf7,0x01,0x00,0x00, +0x15,0x00,0x00,0x00,0xd9,0x02,0x00,0x00,0x3d,0x00,0x04,0x00, +0x58,0x00,0x00,0x00,0xdb,0x02,0x00,0x00,0xda,0x02,0x00,0x00, +0x71,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0xdd,0x02,0x00,0x00, +0xa4,0x01,0x00,0x00,0x7c,0x00,0x04,0x00,0x14,0x00,0x00,0x00, +0xde,0x02,0x00,0x00,0xdd,0x02,0x00,0x00,0x3d,0x00,0x04,0x00, +0x4a,0x00,0x00,0x00,0xe4,0x02,0x00,0x00,0x06,0x02,0x00,0x00, +0xc7,0x00,0x05,0x00,0x4a,0x00,0x00,0x00,0xe6,0x02,0x00,0x00, +0xe4,0x02,0x00,0x00,0x57,0x00,0x00,0x00,0x71,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0xe7,0x02,0x00,0x00,0xe6,0x02,0x00,0x00, +0x7c,0x00,0x04,0x00,0x14,0x00,0x00,0x00,0xe8,0x02,0x00,0x00, +0xe7,0x02,0x00,0x00,0xab,0x00,0x05,0x00,0x6d,0x00,0x00,0x00, +0xe9,0x02,0x00,0x00,0xe8,0x02,0x00,0x00,0x15,0x00,0x00,0x00, +0xa9,0x00,0x06,0x00,0x14,0x00,0x00,0x00,0xea,0x02,0x00,0x00, +0xe9,0x02,0x00,0x00,0x0d,0x02,0x00,0x00,0x15,0x00,0x00,0x00, +0x80,0x00,0x05,0x00,0x14,0x00,0x00,0x00,0xeb,0x02,0x00,0x00, +0xde,0x02,0x00,0x00,0xea,0x02,0x00,0x00,0x6f,0x00,0x04,0x00, +0x58,0x00,0x00,0x00,0xec,0x02,0x00,0x00,0xeb,0x02,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xf2,0x02,0x00,0x00, +0xd9,0x02,0x00,0x00,0x90,0x00,0x00,0x00,0x41,0x00,0x06,0x00, +0xfc,0x01,0x00,0x00,0xf3,0x02,0x00,0x00,0xf7,0x01,0x00,0x00, +0x15,0x00,0x00,0x00,0xf2,0x02,0x00,0x00,0x3d,0x00,0x04,0x00, +0x58,0x00,0x00,0x00,0xf4,0x02,0x00,0x00,0xf3,0x02,0x00,0x00, +0x71,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0xf6,0x02,0x00,0x00, +0xb2,0x01,0x00,0x00,0x7c,0x00,0x04,0x00,0x14,0x00,0x00,0x00, +0xf7,0x02,0x00,0x00,0xf6,0x02,0x00,0x00,0x3d,0x00,0x04,0x00, +0x4a,0x00,0x00,0x00,0xfe,0x02,0x00,0x00,0x21,0x02,0x00,0x00, +0xc7,0x00,0x05,0x00,0x4a,0x00,0x00,0x00,0x00,0x03,0x00,0x00, +0xfe,0x02,0x00,0x00,0x57,0x00,0x00,0x00,0x71,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x01,0x03,0x00,0x00,0x00,0x03,0x00,0x00, +0x7c,0x00,0x04,0x00,0x14,0x00,0x00,0x00,0x02,0x03,0x00,0x00, +0x01,0x03,0x00,0x00,0xab,0x00,0x05,0x00,0x6d,0x00,0x00,0x00, +0x03,0x03,0x00,0x00,0x02,0x03,0x00,0x00,0x15,0x00,0x00,0x00, +0xa9,0x00,0x06,0x00,0x14,0x00,0x00,0x00,0x04,0x03,0x00,0x00, +0x03,0x03,0x00,0x00,0x0d,0x02,0x00,0x00,0x15,0x00,0x00,0x00, +0x80,0x00,0x05,0x00,0x14,0x00,0x00,0x00,0x05,0x03,0x00,0x00, +0xf7,0x02,0x00,0x00,0x04,0x03,0x00,0x00,0x6f,0x00,0x04,0x00, +0x58,0x00,0x00,0x00,0x06,0x03,0x00,0x00,0x05,0x03,0x00,0x00, +0x85,0x00,0x05,0x00,0x58,0x00,0x00,0x00,0x07,0x03,0x00,0x00, +0xf4,0x02,0x00,0x00,0x06,0x03,0x00,0x00,0x0c,0x00,0x08,0x00, +0x58,0x00,0x00,0x00,0x08,0x03,0x00,0x00,0x01,0x00,0x00,0x00, +0x32,0x00,0x00,0x00,0xdb,0x02,0x00,0x00,0xec,0x02,0x00,0x00, +0x07,0x03,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x0d,0x03,0x00,0x00,0xd9,0x02,0x00,0x00,0x5c,0x00,0x00,0x00, +0x41,0x00,0x06,0x00,0xfc,0x01,0x00,0x00,0x0e,0x03,0x00,0x00, +0xf7,0x01,0x00,0x00,0x15,0x00,0x00,0x00,0x0d,0x03,0x00,0x00, +0x3d,0x00,0x04,0x00,0x58,0x00,0x00,0x00,0x0f,0x03,0x00,0x00, +0x0e,0x03,0x00,0x00,0x71,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x11,0x03,0x00,0x00,0xc0,0x01,0x00,0x00,0x7c,0x00,0x04,0x00, +0x14,0x00,0x00,0x00,0x12,0x03,0x00,0x00,0x11,0x03,0x00,0x00, +0x3d,0x00,0x04,0x00,0x4a,0x00,0x00,0x00,0x19,0x03,0x00,0x00, +0x3c,0x02,0x00,0x00,0xc7,0x00,0x05,0x00,0x4a,0x00,0x00,0x00, +0x1b,0x03,0x00,0x00,0x19,0x03,0x00,0x00,0x57,0x00,0x00,0x00, +0x71,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x1c,0x03,0x00,0x00, +0x1b,0x03,0x00,0x00,0x7c,0x00,0x04,0x00,0x14,0x00,0x00,0x00, +0x1d,0x03,0x00,0x00,0x1c,0x03,0x00,0x00,0xab,0x00,0x05,0x00, +0x6d,0x00,0x00,0x00,0x1e,0x03,0x00,0x00,0x1d,0x03,0x00,0x00, +0x15,0x00,0x00,0x00,0xa9,0x00,0x06,0x00,0x14,0x00,0x00,0x00, +0x1f,0x03,0x00,0x00,0x1e,0x03,0x00,0x00,0x0d,0x02,0x00,0x00, +0x15,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x14,0x00,0x00,0x00, +0x20,0x03,0x00,0x00,0x12,0x03,0x00,0x00,0x1f,0x03,0x00,0x00, +0x6f,0x00,0x04,0x00,0x58,0x00,0x00,0x00,0x21,0x03,0x00,0x00, +0x20,0x03,0x00,0x00,0x0c,0x00,0x08,0x00,0x58,0x00,0x00,0x00, +0x23,0x03,0x00,0x00,0x01,0x00,0x00,0x00,0x32,0x00,0x00,0x00, +0x0f,0x03,0x00,0x00,0x21,0x03,0x00,0x00,0x08,0x03,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x28,0x03,0x00,0x00, +0xd9,0x02,0x00,0x00,0x6c,0x01,0x00,0x00,0x41,0x00,0x06,0x00, +0xfc,0x01,0x00,0x00,0x29,0x03,0x00,0x00,0xf7,0x01,0x00,0x00, +0x15,0x00,0x00,0x00,0x28,0x03,0x00,0x00,0x3d,0x00,0x04,0x00, +0x58,0x00,0x00,0x00,0x2a,0x03,0x00,0x00,0x29,0x03,0x00,0x00, +0x71,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x2c,0x03,0x00,0x00, +0xce,0x01,0x00,0x00,0x7c,0x00,0x04,0x00,0x14,0x00,0x00,0x00, +0x2d,0x03,0x00,0x00,0x2c,0x03,0x00,0x00,0x3d,0x00,0x04,0x00, +0x4a,0x00,0x00,0x00,0x34,0x03,0x00,0x00,0x57,0x02,0x00,0x00, +0xc7,0x00,0x05,0x00,0x4a,0x00,0x00,0x00,0x36,0x03,0x00,0x00, +0x34,0x03,0x00,0x00,0x57,0x00,0x00,0x00,0x71,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x37,0x03,0x00,0x00,0x36,0x03,0x00,0x00, +0x7c,0x00,0x04,0x00,0x14,0x00,0x00,0x00,0x38,0x03,0x00,0x00, +0x37,0x03,0x00,0x00,0xab,0x00,0x05,0x00,0x6d,0x00,0x00,0x00, +0x39,0x03,0x00,0x00,0x38,0x03,0x00,0x00,0x15,0x00,0x00,0x00, +0xa9,0x00,0x06,0x00,0x14,0x00,0x00,0x00,0x3a,0x03,0x00,0x00, +0x39,0x03,0x00,0x00,0x0d,0x02,0x00,0x00,0x15,0x00,0x00,0x00, +0x80,0x00,0x05,0x00,0x14,0x00,0x00,0x00,0x3b,0x03,0x00,0x00, +0x2d,0x03,0x00,0x00,0x3a,0x03,0x00,0x00,0x6f,0x00,0x04,0x00, +0x58,0x00,0x00,0x00,0x3c,0x03,0x00,0x00,0x3b,0x03,0x00,0x00, +0x0c,0x00,0x08,0x00,0x58,0x00,0x00,0x00,0x3e,0x03,0x00,0x00, +0x01,0x00,0x00,0x00,0x32,0x00,0x00,0x00,0x2a,0x03,0x00,0x00, +0x3c,0x03,0x00,0x00,0x23,0x03,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x44,0x03,0x00,0x00,0xd9,0x02,0x00,0x00, +0x3f,0x00,0x00,0x00,0x41,0x00,0x06,0x00,0xfc,0x01,0x00,0x00, +0x45,0x03,0x00,0x00,0xf7,0x01,0x00,0x00,0x15,0x00,0x00,0x00, +0x44,0x03,0x00,0x00,0x3d,0x00,0x04,0x00,0x58,0x00,0x00,0x00, +0x46,0x03,0x00,0x00,0x45,0x03,0x00,0x00,0x71,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x48,0x03,0x00,0x00,0xd7,0x01,0x00,0x00, +0x7c,0x00,0x04,0x00,0x14,0x00,0x00,0x00,0x49,0x03,0x00,0x00, +0x48,0x03,0x00,0x00,0x3d,0x00,0x04,0x00,0x4a,0x00,0x00,0x00, +0x4f,0x03,0x00,0x00,0x06,0x02,0x00,0x00,0xc4,0x00,0x05,0x00, +0x4a,0x00,0x00,0x00,0x51,0x03,0x00,0x00,0x57,0x00,0x00,0x00, +0x4d,0x00,0x00,0x00,0xc7,0x00,0x05,0x00,0x4a,0x00,0x00,0x00, +0x52,0x03,0x00,0x00,0x4f,0x03,0x00,0x00,0x51,0x03,0x00,0x00, +0x71,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x53,0x03,0x00,0x00, +0x52,0x03,0x00,0x00,0x7c,0x00,0x04,0x00,0x14,0x00,0x00,0x00, +0x54,0x03,0x00,0x00,0x53,0x03,0x00,0x00,0xab,0x00,0x05,0x00, +0x6d,0x00,0x00,0x00,0x55,0x03,0x00,0x00,0x54,0x03,0x00,0x00, +0x15,0x00,0x00,0x00,0xa9,0x00,0x06,0x00,0x14,0x00,0x00,0x00, +0x56,0x03,0x00,0x00,0x55,0x03,0x00,0x00,0x0d,0x02,0x00,0x00, +0x15,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x14,0x00,0x00,0x00, +0x57,0x03,0x00,0x00,0x49,0x03,0x00,0x00,0x56,0x03,0x00,0x00, +0x6f,0x00,0x04,0x00,0x58,0x00,0x00,0x00,0x58,0x03,0x00,0x00, +0x57,0x03,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x5e,0x03,0x00,0x00,0xd9,0x02,0x00,0x00,0x82,0x02,0x00,0x00, +0x41,0x00,0x06,0x00,0xfc,0x01,0x00,0x00,0x5f,0x03,0x00,0x00, +0xf7,0x01,0x00,0x00,0x15,0x00,0x00,0x00,0x5e,0x03,0x00,0x00, +0x3d,0x00,0x04,0x00,0x58,0x00,0x00,0x00,0x60,0x03,0x00,0x00, +0x5f,0x03,0x00,0x00,0x71,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x62,0x03,0x00,0x00,0xe0,0x01,0x00,0x00,0x7c,0x00,0x04,0x00, +0x14,0x00,0x00,0x00,0x63,0x03,0x00,0x00,0x62,0x03,0x00,0x00, +0x3d,0x00,0x04,0x00,0x4a,0x00,0x00,0x00,0x6a,0x03,0x00,0x00, +0x21,0x02,0x00,0x00,0xc7,0x00,0x05,0x00,0x4a,0x00,0x00,0x00, +0x6d,0x03,0x00,0x00,0x6a,0x03,0x00,0x00,0x51,0x03,0x00,0x00, +0x71,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x6e,0x03,0x00,0x00, +0x6d,0x03,0x00,0x00,0x7c,0x00,0x04,0x00,0x14,0x00,0x00,0x00, +0x6f,0x03,0x00,0x00,0x6e,0x03,0x00,0x00,0xab,0x00,0x05,0x00, +0x6d,0x00,0x00,0x00,0x70,0x03,0x00,0x00,0x6f,0x03,0x00,0x00, +0x15,0x00,0x00,0x00,0xa9,0x00,0x06,0x00,0x14,0x00,0x00,0x00, +0x71,0x03,0x00,0x00,0x70,0x03,0x00,0x00,0x0d,0x02,0x00,0x00, +0x15,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x14,0x00,0x00,0x00, +0x72,0x03,0x00,0x00,0x63,0x03,0x00,0x00,0x71,0x03,0x00,0x00, +0x6f,0x00,0x04,0x00,0x58,0x00,0x00,0x00,0x73,0x03,0x00,0x00, +0x72,0x03,0x00,0x00,0x85,0x00,0x05,0x00,0x58,0x00,0x00,0x00, +0x74,0x03,0x00,0x00,0x60,0x03,0x00,0x00,0x73,0x03,0x00,0x00, +0x0c,0x00,0x08,0x00,0x58,0x00,0x00,0x00,0x75,0x03,0x00,0x00, +0x01,0x00,0x00,0x00,0x32,0x00,0x00,0x00,0x46,0x03,0x00,0x00, +0x58,0x03,0x00,0x00,0x74,0x03,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x7a,0x03,0x00,0x00,0xd9,0x02,0x00,0x00, +0x9f,0x02,0x00,0x00,0x41,0x00,0x06,0x00,0xfc,0x01,0x00,0x00, +0x7b,0x03,0x00,0x00,0xf7,0x01,0x00,0x00,0x15,0x00,0x00,0x00, +0x7a,0x03,0x00,0x00,0x3d,0x00,0x04,0x00,0x58,0x00,0x00,0x00, +0x7c,0x03,0x00,0x00,0x7b,0x03,0x00,0x00,0x71,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x7e,0x03,0x00,0x00,0xe9,0x01,0x00,0x00, +0x7c,0x00,0x04,0x00,0x14,0x00,0x00,0x00,0x7f,0x03,0x00,0x00, +0x7e,0x03,0x00,0x00,0x3d,0x00,0x04,0x00,0x4a,0x00,0x00,0x00, +0x86,0x03,0x00,0x00,0x3c,0x02,0x00,0x00,0xc7,0x00,0x05,0x00, +0x4a,0x00,0x00,0x00,0x89,0x03,0x00,0x00,0x86,0x03,0x00,0x00, +0x51,0x03,0x00,0x00,0x71,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x8a,0x03,0x00,0x00,0x89,0x03,0x00,0x00,0x7c,0x00,0x04,0x00, +0x14,0x00,0x00,0x00,0x8b,0x03,0x00,0x00,0x8a,0x03,0x00,0x00, +0xab,0x00,0x05,0x00,0x6d,0x00,0x00,0x00,0x8c,0x03,0x00,0x00, +0x8b,0x03,0x00,0x00,0x15,0x00,0x00,0x00,0xa9,0x00,0x06,0x00, +0x14,0x00,0x00,0x00,0x8d,0x03,0x00,0x00,0x8c,0x03,0x00,0x00, +0x0d,0x02,0x00,0x00,0x15,0x00,0x00,0x00,0x80,0x00,0x05,0x00, +0x14,0x00,0x00,0x00,0x8e,0x03,0x00,0x00,0x7f,0x03,0x00,0x00, +0x8d,0x03,0x00,0x00,0x6f,0x00,0x04,0x00,0x58,0x00,0x00,0x00, +0x8f,0x03,0x00,0x00,0x8e,0x03,0x00,0x00,0x0c,0x00,0x08,0x00, +0x58,0x00,0x00,0x00,0x91,0x03,0x00,0x00,0x01,0x00,0x00,0x00, +0x32,0x00,0x00,0x00,0x7c,0x03,0x00,0x00,0x8f,0x03,0x00,0x00, +0x75,0x03,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x96,0x03,0x00,0x00,0xd9,0x02,0x00,0x00,0xbc,0x02,0x00,0x00, +0x41,0x00,0x06,0x00,0xfc,0x01,0x00,0x00,0x97,0x03,0x00,0x00, +0xf7,0x01,0x00,0x00,0x15,0x00,0x00,0x00,0x96,0x03,0x00,0x00, +0x3d,0x00,0x04,0x00,0x58,0x00,0x00,0x00,0x98,0x03,0x00,0x00, +0x97,0x03,0x00,0x00,0x71,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x9a,0x03,0x00,0x00,0xf2,0x01,0x00,0x00,0x7c,0x00,0x04,0x00, +0x14,0x00,0x00,0x00,0x9b,0x03,0x00,0x00,0x9a,0x03,0x00,0x00, +0x3d,0x00,0x04,0x00,0x4a,0x00,0x00,0x00,0xa2,0x03,0x00,0x00, +0x57,0x02,0x00,0x00,0xc7,0x00,0x05,0x00,0x4a,0x00,0x00,0x00, +0xa5,0x03,0x00,0x00,0xa2,0x03,0x00,0x00,0x51,0x03,0x00,0x00, +0x71,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0xa6,0x03,0x00,0x00, +0xa5,0x03,0x00,0x00,0x7c,0x00,0x04,0x00,0x14,0x00,0x00,0x00, +0xa7,0x03,0x00,0x00,0xa6,0x03,0x00,0x00,0xab,0x00,0x05,0x00, +0x6d,0x00,0x00,0x00,0xa8,0x03,0x00,0x00,0xa7,0x03,0x00,0x00, +0x15,0x00,0x00,0x00,0xa9,0x00,0x06,0x00,0x14,0x00,0x00,0x00, +0xa9,0x03,0x00,0x00,0xa8,0x03,0x00,0x00,0x0d,0x02,0x00,0x00, +0x15,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x14,0x00,0x00,0x00, +0xaa,0x03,0x00,0x00,0x9b,0x03,0x00,0x00,0xa9,0x03,0x00,0x00, +0x6f,0x00,0x04,0x00,0x58,0x00,0x00,0x00,0xab,0x03,0x00,0x00, +0xaa,0x03,0x00,0x00,0x0c,0x00,0x08,0x00,0x58,0x00,0x00,0x00, +0xad,0x03,0x00,0x00,0x01,0x00,0x00,0x00,0x32,0x00,0x00,0x00, +0x98,0x03,0x00,0x00,0xab,0x03,0x00,0x00,0x91,0x03,0x00,0x00, +0x3d,0x00,0x04,0x00,0x58,0x00,0x00,0x00,0xb4,0x03,0x00,0x00, +0xfd,0x01,0x00,0x00,0x3d,0x00,0x04,0x00,0x58,0x00,0x00,0x00, +0xbb,0x03,0x00,0x00,0x17,0x02,0x00,0x00,0x81,0x00,0x05,0x00, +0x58,0x00,0x00,0x00,0xbc,0x03,0x00,0x00,0xb4,0x03,0x00,0x00, +0xbb,0x03,0x00,0x00,0x3d,0x00,0x04,0x00,0x58,0x00,0x00,0x00, +0xc3,0x03,0x00,0x00,0x32,0x02,0x00,0x00,0x81,0x00,0x05,0x00, +0x58,0x00,0x00,0x00,0xc4,0x03,0x00,0x00,0xbc,0x03,0x00,0x00, +0xc3,0x03,0x00,0x00,0x3d,0x00,0x04,0x00,0x58,0x00,0x00,0x00, +0xcb,0x03,0x00,0x00,0x4d,0x02,0x00,0x00,0x81,0x00,0x05,0x00, +0x58,0x00,0x00,0x00,0xcc,0x03,0x00,0x00,0xc4,0x03,0x00,0x00, +0xcb,0x03,0x00,0x00,0x70,0x00,0x04,0x00,0x58,0x00,0x00,0x00, +0xce,0x03,0x00,0x00,0xbe,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0x58,0x00,0x00,0x00,0xd6,0x03,0x00,0x00,0x69,0x02,0x00,0x00, +0x3d,0x00,0x04,0x00,0x58,0x00,0x00,0x00,0xdd,0x03,0x00,0x00, +0x84,0x02,0x00,0x00,0x81,0x00,0x05,0x00,0x58,0x00,0x00,0x00, +0xde,0x03,0x00,0x00,0xd6,0x03,0x00,0x00,0xdd,0x03,0x00,0x00, +0x3d,0x00,0x04,0x00,0x58,0x00,0x00,0x00,0xe5,0x03,0x00,0x00, +0xa1,0x02,0x00,0x00,0x81,0x00,0x05,0x00,0x58,0x00,0x00,0x00, +0xe6,0x03,0x00,0x00,0xde,0x03,0x00,0x00,0xe5,0x03,0x00,0x00, +0x3d,0x00,0x04,0x00,0x58,0x00,0x00,0x00,0xed,0x03,0x00,0x00, +0xbe,0x02,0x00,0x00,0x81,0x00,0x05,0x00,0x58,0x00,0x00,0x00, +0xee,0x03,0x00,0x00,0xe6,0x03,0x00,0x00,0xed,0x03,0x00,0x00, +0x70,0x00,0x04,0x00,0x58,0x00,0x00,0x00,0xf0,0x03,0x00,0x00, +0xcd,0x00,0x00,0x00,0x85,0x00,0x05,0x00,0x58,0x00,0x00,0x00, +0xf1,0x03,0x00,0x00,0xee,0x03,0x00,0x00,0xf0,0x03,0x00,0x00, +0x0c,0x00,0x08,0x00,0x58,0x00,0x00,0x00,0xf2,0x03,0x00,0x00, +0x01,0x00,0x00,0x00,0x32,0x00,0x00,0x00,0xcc,0x03,0x00,0x00, +0xce,0x03,0x00,0x00,0xf1,0x03,0x00,0x00,0x3d,0x00,0x04,0x00, +0x58,0x00,0x00,0x00,0xf8,0x03,0x00,0x00,0xda,0x02,0x00,0x00, +0x3d,0x00,0x04,0x00,0x58,0x00,0x00,0x00,0xff,0x03,0x00,0x00, +0xf3,0x02,0x00,0x00,0x81,0x00,0x05,0x00,0x58,0x00,0x00,0x00, +0x00,0x04,0x00,0x00,0xf8,0x03,0x00,0x00,0xff,0x03,0x00,0x00, +0x3d,0x00,0x04,0x00,0x58,0x00,0x00,0x00,0x07,0x04,0x00,0x00, +0x0e,0x03,0x00,0x00,0x81,0x00,0x05,0x00,0x58,0x00,0x00,0x00, +0x08,0x04,0x00,0x00,0x00,0x04,0x00,0x00,0x07,0x04,0x00,0x00, +0x3d,0x00,0x04,0x00,0x58,0x00,0x00,0x00,0x0f,0x04,0x00,0x00, +0x29,0x03,0x00,0x00,0x81,0x00,0x05,0x00,0x58,0x00,0x00,0x00, +0x10,0x04,0x00,0x00,0x08,0x04,0x00,0x00,0x0f,0x04,0x00,0x00, +0x70,0x00,0x04,0x00,0x58,0x00,0x00,0x00,0x12,0x04,0x00,0x00, +0x23,0x01,0x00,0x00,0x0c,0x00,0x08,0x00,0x58,0x00,0x00,0x00, +0x14,0x04,0x00,0x00,0x01,0x00,0x00,0x00,0x32,0x00,0x00,0x00, +0x10,0x04,0x00,0x00,0x12,0x04,0x00,0x00,0xf2,0x03,0x00,0x00, +0x3d,0x00,0x04,0x00,0x58,0x00,0x00,0x00,0x1b,0x04,0x00,0x00, +0x45,0x03,0x00,0x00,0x3d,0x00,0x04,0x00,0x58,0x00,0x00,0x00, +0x22,0x04,0x00,0x00,0x5f,0x03,0x00,0x00,0x81,0x00,0x05,0x00, +0x58,0x00,0x00,0x00,0x23,0x04,0x00,0x00,0x1b,0x04,0x00,0x00, +0x22,0x04,0x00,0x00,0x3d,0x00,0x04,0x00,0x58,0x00,0x00,0x00, +0x2a,0x04,0x00,0x00,0x7b,0x03,0x00,0x00,0x81,0x00,0x05,0x00, +0x58,0x00,0x00,0x00,0x2b,0x04,0x00,0x00,0x23,0x04,0x00,0x00, +0x2a,0x04,0x00,0x00,0x3d,0x00,0x04,0x00,0x58,0x00,0x00,0x00, +0x32,0x04,0x00,0x00,0x97,0x03,0x00,0x00,0x81,0x00,0x05,0x00, +0x58,0x00,0x00,0x00,0x33,0x04,0x00,0x00,0x2b,0x04,0x00,0x00, +0x32,0x04,0x00,0x00,0x70,0x00,0x04,0x00,0x58,0x00,0x00,0x00, +0x35,0x04,0x00,0x00,0x3f,0x01,0x00,0x00,0x0c,0x00,0x08,0x00, +0x58,0x00,0x00,0x00,0x37,0x04,0x00,0x00,0x01,0x00,0x00,0x00, +0x32,0x00,0x00,0x00,0x33,0x04,0x00,0x00,0x35,0x04,0x00,0x00, +0x14,0x04,0x00,0x00,0x70,0x00,0x04,0x00,0x58,0x00,0x00,0x00, +0x3f,0x04,0x00,0x00,0xa2,0x00,0x00,0x00,0x70,0x00,0x04,0x00, +0x58,0x00,0x00,0x00,0x43,0x04,0x00,0x00,0xb0,0x00,0x00,0x00, +0x85,0x00,0x05,0x00,0x58,0x00,0x00,0x00,0x44,0x04,0x00,0x00, +0xd4,0x02,0x00,0x00,0x43,0x04,0x00,0x00,0x0c,0x00,0x08,0x00, +0x58,0x00,0x00,0x00,0x45,0x04,0x00,0x00,0x01,0x00,0x00,0x00, +0x32,0x00,0x00,0x00,0x62,0x02,0x00,0x00,0x3f,0x04,0x00,0x00, +0x44,0x04,0x00,0x00,0x70,0x00,0x04,0x00,0x58,0x00,0x00,0x00, +0x48,0x04,0x00,0x00,0xeb,0x00,0x00,0x00,0x0c,0x00,0x08,0x00, +0x58,0x00,0x00,0x00,0x4a,0x04,0x00,0x00,0x01,0x00,0x00,0x00, +0x32,0x00,0x00,0x00,0x3e,0x03,0x00,0x00,0x48,0x04,0x00,0x00, +0x45,0x04,0x00,0x00,0x70,0x00,0x04,0x00,0x58,0x00,0x00,0x00, +0x4d,0x04,0x00,0x00,0x07,0x01,0x00,0x00,0x0c,0x00,0x08,0x00, +0x58,0x00,0x00,0x00,0x4f,0x04,0x00,0x00,0x01,0x00,0x00,0x00, +0x32,0x00,0x00,0x00,0xad,0x03,0x00,0x00,0x4d,0x04,0x00,0x00, +0x4a,0x04,0x00,0x00,0x85,0x00,0x05,0x00,0x58,0x00,0x00,0x00, +0x53,0x04,0x00,0x00,0x93,0x00,0x00,0x00,0x37,0x04,0x00,0x00, +0x7f,0x00,0x04,0x00,0x58,0x00,0x00,0x00,0x89,0x04,0x00,0x00, +0x53,0x04,0x00,0x00,0x0c,0x00,0x08,0x00,0x58,0x00,0x00,0x00, +0x54,0x04,0x00,0x00,0x01,0x00,0x00,0x00,0x32,0x00,0x00,0x00, +0x8b,0x00,0x00,0x00,0x4f,0x04,0x00,0x00,0x89,0x04,0x00,0x00, +0x3d,0x00,0x04,0x00,0x58,0x00,0x00,0x00,0x56,0x04,0x00,0x00, +0x63,0x00,0x00,0x00,0x81,0x00,0x05,0x00,0x58,0x00,0x00,0x00, +0x57,0x04,0x00,0x00,0x56,0x04,0x00,0x00,0x54,0x04,0x00,0x00, +0x3e,0x00,0x03,0x00,0x63,0x00,0x00,0x00,0x57,0x04,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x5a,0x04,0x00,0x00, +0x85,0x04,0x00,0x00,0x23,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, +0x66,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0x68,0x00,0x00,0x00, +0xe0,0x00,0x04,0x00,0x23,0x00,0x00,0x00,0x23,0x00,0x00,0x00, +0x5b,0x04,0x00,0x00,0xf9,0x00,0x02,0x00,0x5d,0x04,0x00,0x00, +0xf8,0x00,0x02,0x00,0x5d,0x04,0x00,0x00,0xf5,0x00,0x07,0x00, +0x06,0x00,0x00,0x00,0x86,0x04,0x00,0x00,0x5c,0x00,0x00,0x00, +0x68,0x00,0x00,0x00,0x74,0x04,0x00,0x00,0x60,0x04,0x00,0x00, +0xac,0x00,0x05,0x00,0x6d,0x00,0x00,0x00,0x63,0x04,0x00,0x00, +0x86,0x04,0x00,0x00,0x0c,0x00,0x00,0x00,0xf6,0x00,0x04,0x00, +0x5f,0x04,0x00,0x00,0x60,0x04,0x00,0x00,0x01,0x00,0x00,0x00, +0xfa,0x00,0x04,0x00,0x63,0x04,0x00,0x00,0x5e,0x04,0x00,0x00, +0x5f,0x04,0x00,0x00,0xf8,0x00,0x02,0x00,0x5e,0x04,0x00,0x00, +0xb0,0x00,0x05,0x00,0x6d,0x00,0x00,0x00,0x66,0x04,0x00,0x00, +0x24,0x00,0x00,0x00,0x86,0x04,0x00,0x00,0xf7,0x00,0x03,0x00, +0x68,0x04,0x00,0x00,0x00,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, +0x66,0x04,0x00,0x00,0x67,0x04,0x00,0x00,0x68,0x04,0x00,0x00, +0xf8,0x00,0x02,0x00,0x67,0x04,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x6c,0x04,0x00,0x00,0x24,0x00,0x00,0x00, +0x86,0x04,0x00,0x00,0x41,0x00,0x05,0x00,0x62,0x00,0x00,0x00, +0x6d,0x04,0x00,0x00,0x5b,0x00,0x00,0x00,0x6c,0x04,0x00,0x00, +0x3d,0x00,0x04,0x00,0x58,0x00,0x00,0x00,0x6e,0x04,0x00,0x00, +0x6d,0x04,0x00,0x00,0x41,0x00,0x05,0x00,0x62,0x00,0x00,0x00, +0x6f,0x04,0x00,0x00,0x5b,0x00,0x00,0x00,0x24,0x00,0x00,0x00, +0x3d,0x00,0x04,0x00,0x58,0x00,0x00,0x00,0x70,0x04,0x00,0x00, +0x6f,0x04,0x00,0x00,0x81,0x00,0x05,0x00,0x58,0x00,0x00,0x00, +0x71,0x04,0x00,0x00,0x70,0x04,0x00,0x00,0x6e,0x04,0x00,0x00, +0x3e,0x00,0x03,0x00,0x6f,0x04,0x00,0x00,0x71,0x04,0x00,0x00, +0xf9,0x00,0x02,0x00,0x68,0x04,0x00,0x00,0xf8,0x00,0x02,0x00, +0x68,0x04,0x00,0x00,0xe0,0x00,0x04,0x00,0x23,0x00,0x00,0x00, +0x23,0x00,0x00,0x00,0x5b,0x04,0x00,0x00,0xf9,0x00,0x02,0x00, +0x60,0x04,0x00,0x00,0xf8,0x00,0x02,0x00,0x60,0x04,0x00,0x00, +0xc2,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x74,0x04,0x00,0x00, +0x86,0x04,0x00,0x00,0x4d,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, +0x5d,0x04,0x00,0x00,0xf8,0x00,0x02,0x00,0x5f,0x04,0x00,0x00, +0xaa,0x00,0x05,0x00,0x6d,0x00,0x00,0x00,0x76,0x04,0x00,0x00, +0x24,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0xf7,0x00,0x03,0x00, +0x78,0x04,0x00,0x00,0x00,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, +0x76,0x04,0x00,0x00,0x77,0x04,0x00,0x00,0x78,0x04,0x00,0x00, +0xf8,0x00,0x02,0x00,0x77,0x04,0x00,0x00,0x41,0x00,0x05,0x00, +0x16,0x00,0x00,0x00,0x7d,0x04,0x00,0x00,0x13,0x00,0x00,0x00, +0xe7,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x7e,0x04,0x00,0x00,0x7d,0x04,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x80,0x04,0x00,0x00,0x7e,0x04,0x00,0x00, +0x0f,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x62,0x00,0x00,0x00, +0x81,0x04,0x00,0x00,0x5b,0x00,0x00,0x00,0x15,0x00,0x00,0x00, +0x3d,0x00,0x04,0x00,0x58,0x00,0x00,0x00,0x82,0x04,0x00,0x00, +0x81,0x04,0x00,0x00,0x41,0x00,0x06,0x00,0xfc,0x01,0x00,0x00, +0x83,0x04,0x00,0x00,0x7c,0x04,0x00,0x00,0x15,0x00,0x00,0x00, +0x80,0x04,0x00,0x00,0x3e,0x00,0x03,0x00,0x83,0x04,0x00,0x00, +0x82,0x04,0x00,0x00,0xf9,0x00,0x02,0x00,0x78,0x04,0x00,0x00, +0xf8,0x00,0x02,0x00,0x78,0x04,0x00,0x00,0xfd,0x00,0x01,0x00, +0x38,0x00,0x01,0x00, }; -const uint64_t mul_mat_vec_q8_0_f32_len = 3120; +const uint64_t mul_mat_vec_q5_K_f32_len = 12016; + +unsigned char mul_mat_vec_q6_K_f32_data[] = { +0x03,0x02,0x23,0x07,0x00,0x05,0x01,0x00,0x0b,0x00,0x0d,0x00, +0xc5,0x03,0x00,0x00,0x00,0x00,0x00,0x00,0x11,0x00,0x02,0x00, +0x01,0x00,0x00,0x00,0x11,0x00,0x02,0x00,0x27,0x00,0x00,0x00, +0x11,0x00,0x02,0x00,0x51,0x11,0x00,0x00,0x11,0x00,0x02,0x00, +0x60,0x11,0x00,0x00,0x0b,0x00,0x06,0x00,0x01,0x00,0x00,0x00, +0x47,0x4c,0x53,0x4c,0x2e,0x73,0x74,0x64,0x2e,0x34,0x35,0x30, +0x00,0x00,0x00,0x00,0x0e,0x00,0x03,0x00,0x00,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0x0f,0x00,0x0c,0x00,0x05,0x00,0x00,0x00, +0x04,0x00,0x00,0x00,0x6d,0x61,0x69,0x6e,0x00,0x00,0x00,0x00, +0x0b,0x00,0x00,0x00,0x13,0x00,0x00,0x00,0x20,0x00,0x00,0x00, +0x53,0x00,0x00,0x00,0x78,0x00,0x00,0x00,0x8f,0x00,0x00,0x00, +0xa5,0x01,0x00,0x00,0x10,0x00,0x06,0x00,0x04,0x00,0x00,0x00, +0x11,0x00,0x00,0x00,0x20,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x0b,0x00,0x00,0x00, +0x0b,0x00,0x00,0x00,0x1a,0x00,0x00,0x00,0x48,0x00,0x05,0x00, +0x11,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00, +0x00,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x11,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x04,0x00,0x00,0x00, +0x48,0x00,0x05,0x00,0x11,0x00,0x00,0x00,0x02,0x00,0x00,0x00, +0x23,0x00,0x00,0x00,0x08,0x00,0x00,0x00,0x47,0x00,0x03,0x00, +0x11,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00, +0x20,0x00,0x00,0x00,0x0b,0x00,0x00,0x00,0x1b,0x00,0x00,0x00, +0x47,0x00,0x04,0x00,0x6f,0x00,0x00,0x00,0x06,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x70,0x00,0x00,0x00, +0x06,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x47,0x00,0x04,0x00, +0x72,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0x48,0x00,0x05,0x00,0x74,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x48,0x00,0x05,0x00, +0x74,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x23,0x00,0x00,0x00, +0x80,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x74,0x00,0x00,0x00, +0x02,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0xc0,0x00,0x00,0x00, +0x48,0x00,0x05,0x00,0x74,0x00,0x00,0x00,0x03,0x00,0x00,0x00, +0x23,0x00,0x00,0x00,0xd0,0x00,0x00,0x00,0x47,0x00,0x04,0x00, +0x75,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0xd2,0x00,0x00,0x00, +0x48,0x00,0x04,0x00,0x76,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0x18,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x76,0x00,0x00,0x00, +0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0x47,0x00,0x03,0x00,0x76,0x00,0x00,0x00,0x02,0x00,0x00,0x00, +0x47,0x00,0x04,0x00,0x78,0x00,0x00,0x00,0x22,0x00,0x00,0x00, +0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x78,0x00,0x00,0x00, +0x21,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00, +0x8c,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x04,0x00,0x00,0x00, +0x48,0x00,0x04,0x00,0x8d,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0x18,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x8d,0x00,0x00,0x00, +0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0x47,0x00,0x03,0x00,0x8d,0x00,0x00,0x00,0x02,0x00,0x00,0x00, +0x47,0x00,0x04,0x00,0x8f,0x00,0x00,0x00,0x22,0x00,0x00,0x00, +0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x8f,0x00,0x00,0x00, +0x21,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x47,0x00,0x04,0x00, +0xa2,0x01,0x00,0x00,0x06,0x00,0x00,0x00,0x04,0x00,0x00,0x00, +0x48,0x00,0x04,0x00,0xa3,0x01,0x00,0x00,0x00,0x00,0x00,0x00, +0x19,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0xa3,0x01,0x00,0x00, +0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0x47,0x00,0x03,0x00,0xa3,0x01,0x00,0x00,0x02,0x00,0x00,0x00, +0x47,0x00,0x04,0x00,0xa5,0x01,0x00,0x00,0x22,0x00,0x00,0x00, +0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0xa5,0x01,0x00,0x00, +0x21,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00, +0xae,0x01,0x00,0x00,0x0b,0x00,0x00,0x00,0x19,0x00,0x00,0x00, +0x13,0x00,0x02,0x00,0x02,0x00,0x00,0x00,0x21,0x00,0x03,0x00, +0x03,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x15,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x20,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0x17,0x00,0x04,0x00,0x09,0x00,0x00,0x00,0x06,0x00,0x00,0x00, +0x03,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x0a,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0x09,0x00,0x00,0x00,0x3b,0x00,0x04,0x00, +0x0a,0x00,0x00,0x00,0x0b,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, +0x00,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x0d,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x1e,0x00,0x05,0x00, +0x11,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00, +0x06,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x12,0x00,0x00,0x00, +0x09,0x00,0x00,0x00,0x11,0x00,0x00,0x00,0x3b,0x00,0x04,0x00, +0x12,0x00,0x00,0x00,0x13,0x00,0x00,0x00,0x09,0x00,0x00,0x00, +0x15,0x00,0x04,0x00,0x14,0x00,0x00,0x00,0x20,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x14,0x00,0x00,0x00, +0x15,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x20,0x00,0x04,0x00, +0x16,0x00,0x00,0x00,0x09,0x00,0x00,0x00,0x06,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x19,0x00,0x00,0x00, +0x00,0x01,0x00,0x00,0x3b,0x00,0x04,0x00,0x0a,0x00,0x00,0x00, +0x20,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x02,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x2b,0x00,0x00,0x00, +0x08,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x33,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x3a,0x00,0x00,0x00,0x40,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x40,0x00,0x00,0x00, +0x20,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x4b,0x00,0x00,0x00,0x80,0x00,0x00,0x00,0x16,0x00,0x03,0x00, +0x50,0x00,0x00,0x00,0x20,0x00,0x00,0x00,0x1c,0x00,0x04,0x00, +0x51,0x00,0x00,0x00,0x50,0x00,0x00,0x00,0x40,0x00,0x00,0x00, +0x20,0x00,0x04,0x00,0x52,0x00,0x00,0x00,0x04,0x00,0x00,0x00, +0x51,0x00,0x00,0x00,0x3b,0x00,0x04,0x00,0x52,0x00,0x00,0x00, +0x53,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x54,0x00,0x00,0x00,0x10,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x50,0x00,0x00,0x00,0x59,0x00,0x00,0x00, +0x00,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x5a,0x00,0x00,0x00, +0x04,0x00,0x00,0x00,0x50,0x00,0x00,0x00,0x14,0x00,0x02,0x00, +0x65,0x00,0x00,0x00,0x15,0x00,0x04,0x00,0x6e,0x00,0x00,0x00, +0x08,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x1c,0x00,0x04,0x00, +0x6f,0x00,0x00,0x00,0x6e,0x00,0x00,0x00,0x4b,0x00,0x00,0x00, +0x1c,0x00,0x04,0x00,0x70,0x00,0x00,0x00,0x6e,0x00,0x00,0x00, +0x3a,0x00,0x00,0x00,0x15,0x00,0x04,0x00,0x71,0x00,0x00,0x00, +0x08,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x1c,0x00,0x04,0x00, +0x72,0x00,0x00,0x00,0x71,0x00,0x00,0x00,0x54,0x00,0x00,0x00, +0x16,0x00,0x03,0x00,0x73,0x00,0x00,0x00,0x10,0x00,0x00,0x00, +0x1e,0x00,0x06,0x00,0x74,0x00,0x00,0x00,0x6f,0x00,0x00,0x00, +0x70,0x00,0x00,0x00,0x72,0x00,0x00,0x00,0x73,0x00,0x00,0x00, +0x1d,0x00,0x03,0x00,0x75,0x00,0x00,0x00,0x74,0x00,0x00,0x00, +0x1e,0x00,0x03,0x00,0x76,0x00,0x00,0x00,0x75,0x00,0x00,0x00, +0x20,0x00,0x04,0x00,0x77,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, +0x76,0x00,0x00,0x00,0x3b,0x00,0x04,0x00,0x77,0x00,0x00,0x00, +0x78,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x14,0x00,0x00,0x00,0x7c,0x00,0x00,0x00,0x03,0x00,0x00,0x00, +0x20,0x00,0x04,0x00,0x7d,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, +0x73,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x14,0x00,0x00,0x00, +0x8a,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x1d,0x00,0x03,0x00, +0x8c,0x00,0x00,0x00,0x50,0x00,0x00,0x00,0x1e,0x00,0x03,0x00, +0x8d,0x00,0x00,0x00,0x8c,0x00,0x00,0x00,0x20,0x00,0x04,0x00, +0x8e,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x8d,0x00,0x00,0x00, +0x3b,0x00,0x04,0x00,0x8e,0x00,0x00,0x00,0x8f,0x00,0x00,0x00, +0x0c,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x14,0x00,0x00,0x00, +0x90,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x20,0x00,0x04,0x00, +0x99,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x50,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x14,0x00,0x00,0x00,0x9f,0x00,0x00,0x00, +0x02,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0xa2,0x00,0x00,0x00, +0x0c,0x00,0x00,0x00,0x71,0x00,0x00,0x00,0x20,0x00,0x04,0x00, +0xb1,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x6e,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x14,0x00,0x00,0x00,0xb6,0x00,0x00,0x00, +0x0f,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x14,0x00,0x00,0x00, +0xc9,0x00,0x00,0x00,0x20,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x42,0x01,0x00,0x00,0x60,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x4a,0x01,0x00,0x00, +0x06,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x14,0x00,0x00,0x00, +0x68,0x01,0x00,0x00,0x06,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x84,0x01,0x00,0x00,0x08,0x01,0x00,0x00, +0x1d,0x00,0x03,0x00,0xa2,0x01,0x00,0x00,0x50,0x00,0x00,0x00, +0x1e,0x00,0x03,0x00,0xa3,0x01,0x00,0x00,0xa2,0x01,0x00,0x00, +0x20,0x00,0x04,0x00,0xa4,0x01,0x00,0x00,0x0c,0x00,0x00,0x00, +0xa3,0x01,0x00,0x00,0x3b,0x00,0x04,0x00,0xa4,0x01,0x00,0x00, +0xa5,0x01,0x00,0x00,0x0c,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0xad,0x01,0x00,0x00,0x01,0x00,0x00,0x00, +0x2c,0x00,0x06,0x00,0x09,0x00,0x00,0x00,0xae,0x01,0x00,0x00, +0x40,0x00,0x00,0x00,0xad,0x01,0x00,0x00,0xad,0x01,0x00,0x00, +0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0xbb,0x03,0x00,0x00, +0x21,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0xbc,0x03,0x00,0x00,0x41,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0xbd,0x03,0x00,0x00,0x61,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0xbe,0x03,0x00,0x00, +0x22,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0xbf,0x03,0x00,0x00,0x42,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0xc0,0x03,0x00,0x00,0x62,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0xc1,0x03,0x00,0x00, +0x03,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0xc2,0x03,0x00,0x00,0x23,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0xc3,0x03,0x00,0x00,0x43,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0xc4,0x03,0x00,0x00, +0x63,0x00,0x00,0x00,0x36,0x00,0x05,0x00,0x02,0x00,0x00,0x00, +0x04,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x03,0x00,0x00,0x00, +0xf8,0x00,0x02,0x00,0x05,0x00,0x00,0x00,0x41,0x00,0x05,0x00, +0x0d,0x00,0x00,0x00,0x0e,0x00,0x00,0x00,0x0b,0x00,0x00,0x00, +0x0c,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x0f,0x00,0x00,0x00,0x0e,0x00,0x00,0x00,0x41,0x00,0x05,0x00, +0x16,0x00,0x00,0x00,0x17,0x00,0x00,0x00,0x13,0x00,0x00,0x00, +0x15,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x18,0x00,0x00,0x00,0x17,0x00,0x00,0x00,0x86,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x1a,0x00,0x00,0x00,0x18,0x00,0x00,0x00, +0x19,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x1e,0x00,0x00,0x00,0x0f,0x00,0x00,0x00,0x1a,0x00,0x00,0x00, +0x41,0x00,0x05,0x00,0x0d,0x00,0x00,0x00,0x21,0x00,0x00,0x00, +0x20,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x22,0x00,0x00,0x00,0x21,0x00,0x00,0x00, +0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x24,0x00,0x00,0x00, +0x22,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x89,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x28,0x00,0x00,0x00,0x22,0x00,0x00,0x00, +0x23,0x00,0x00,0x00,0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x2c,0x00,0x00,0x00,0x24,0x00,0x00,0x00,0x2b,0x00,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x30,0x00,0x00,0x00, +0x2b,0x00,0x00,0x00,0x2c,0x00,0x00,0x00,0x82,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x31,0x00,0x00,0x00,0x24,0x00,0x00,0x00, +0x30,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x35,0x00,0x00,0x00,0x33,0x00,0x00,0x00,0x31,0x00,0x00,0x00, +0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x38,0x00,0x00,0x00, +0x31,0x00,0x00,0x00,0x33,0x00,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x3c,0x00,0x00,0x00,0x3a,0x00,0x00,0x00, +0x2c,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x3e,0x00,0x00,0x00,0x3c,0x00,0x00,0x00,0x35,0x00,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x42,0x00,0x00,0x00, +0x40,0x00,0x00,0x00,0x2c,0x00,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x44,0x00,0x00,0x00,0x42,0x00,0x00,0x00, +0x35,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x49,0x00,0x00,0x00,0x30,0x00,0x00,0x00,0x38,0x00,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x4d,0x00,0x00,0x00, +0x4b,0x00,0x00,0x00,0x2c,0x00,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x4f,0x00,0x00,0x00,0x4d,0x00,0x00,0x00, +0x35,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x56,0x00,0x00,0x00,0x54,0x00,0x00,0x00,0x28,0x00,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x58,0x00,0x00,0x00, +0x56,0x00,0x00,0x00,0x24,0x00,0x00,0x00,0x41,0x00,0x05,0x00, +0x5a,0x00,0x00,0x00,0x5b,0x00,0x00,0x00,0x53,0x00,0x00,0x00, +0x58,0x00,0x00,0x00,0x3e,0x00,0x03,0x00,0x5b,0x00,0x00,0x00, +0x59,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0x5e,0x00,0x00,0x00, +0xf8,0x00,0x02,0x00,0x5e,0x00,0x00,0x00,0xf5,0x00,0x07,0x00, +0x06,0x00,0x00,0x00,0xaf,0x01,0x00,0x00,0x28,0x00,0x00,0x00, +0x05,0x00,0x00,0x00,0x83,0x01,0x00,0x00,0x5f,0x00,0x00,0x00, +0xb0,0x00,0x05,0x00,0x65,0x00,0x00,0x00,0x66,0x00,0x00,0x00, +0xaf,0x01,0x00,0x00,0x1a,0x00,0x00,0x00,0xf6,0x00,0x04,0x00, +0x60,0x00,0x00,0x00,0x5f,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0xfa,0x00,0x04,0x00,0x66,0x00,0x00,0x00,0x5f,0x00,0x00,0x00, +0x60,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0x5f,0x00,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x69,0x00,0x00,0x00, +0xaf,0x01,0x00,0x00,0x19,0x00,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x6b,0x00,0x00,0x00,0x69,0x00,0x00,0x00, +0x4f,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x7b,0x00,0x00,0x00,0x1e,0x00,0x00,0x00,0xaf,0x01,0x00,0x00, +0x41,0x00,0x07,0x00,0x7d,0x00,0x00,0x00,0x7e,0x00,0x00,0x00, +0x78,0x00,0x00,0x00,0x15,0x00,0x00,0x00,0x7b,0x00,0x00,0x00, +0x7c,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x73,0x00,0x00,0x00, +0x7f,0x00,0x00,0x00,0x7e,0x00,0x00,0x00,0x73,0x00,0x04,0x00, +0x50,0x00,0x00,0x00,0x80,0x00,0x00,0x00,0x7f,0x00,0x00,0x00, +0x41,0x00,0x05,0x00,0x16,0x00,0x00,0x00,0x91,0x00,0x00,0x00, +0x13,0x00,0x00,0x00,0x90,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x92,0x00,0x00,0x00,0x91,0x00,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x94,0x00,0x00,0x00, +0x92,0x00,0x00,0x00,0x6b,0x00,0x00,0x00,0x41,0x00,0x06,0x00, +0x99,0x00,0x00,0x00,0x9a,0x00,0x00,0x00,0x8f,0x00,0x00,0x00, +0x15,0x00,0x00,0x00,0x94,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0x50,0x00,0x00,0x00,0x9b,0x00,0x00,0x00,0x9a,0x00,0x00,0x00, +0x41,0x00,0x08,0x00,0xa2,0x00,0x00,0x00,0xa3,0x00,0x00,0x00, +0x78,0x00,0x00,0x00,0x15,0x00,0x00,0x00,0x7b,0x00,0x00,0x00, +0x9f,0x00,0x00,0x00,0x49,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0x71,0x00,0x00,0x00,0xa4,0x00,0x00,0x00,0xa3,0x00,0x00,0x00, +0x6f,0x00,0x04,0x00,0x50,0x00,0x00,0x00,0xa5,0x00,0x00,0x00, +0xa4,0x00,0x00,0x00,0x85,0x00,0x05,0x00,0x50,0x00,0x00,0x00, +0xa6,0x00,0x00,0x00,0x9b,0x00,0x00,0x00,0xa5,0x00,0x00,0x00, +0x85,0x00,0x05,0x00,0x50,0x00,0x00,0x00,0xa8,0x00,0x00,0x00, +0xa6,0x00,0x00,0x00,0x80,0x00,0x00,0x00,0x41,0x00,0x08,0x00, +0xb1,0x00,0x00,0x00,0xb2,0x00,0x00,0x00,0x78,0x00,0x00,0x00, +0x15,0x00,0x00,0x00,0x7b,0x00,0x00,0x00,0x15,0x00,0x00,0x00, +0x3e,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x6e,0x00,0x00,0x00, +0xb3,0x00,0x00,0x00,0xb2,0x00,0x00,0x00,0x71,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0xb4,0x00,0x00,0x00,0xb3,0x00,0x00,0x00, +0x7c,0x00,0x04,0x00,0x14,0x00,0x00,0x00,0xb5,0x00,0x00,0x00, +0xb4,0x00,0x00,0x00,0xc7,0x00,0x05,0x00,0x14,0x00,0x00,0x00, +0xb7,0x00,0x00,0x00,0xb5,0x00,0x00,0x00,0xb6,0x00,0x00,0x00, +0x41,0x00,0x08,0x00,0xb1,0x00,0x00,0x00,0xbf,0x00,0x00,0x00, +0x78,0x00,0x00,0x00,0x15,0x00,0x00,0x00,0x7b,0x00,0x00,0x00, +0x90,0x00,0x00,0x00,0x44,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0x6e,0x00,0x00,0x00,0xc0,0x00,0x00,0x00,0xbf,0x00,0x00,0x00, +0xc2,0x00,0x05,0x00,0x6e,0x00,0x00,0x00,0xc1,0x00,0x00,0x00, +0xc0,0x00,0x00,0x00,0x15,0x00,0x00,0x00,0x71,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0xc2,0x00,0x00,0x00,0xc1,0x00,0x00,0x00, +0x7c,0x00,0x04,0x00,0x14,0x00,0x00,0x00,0xc3,0x00,0x00,0x00, +0xc2,0x00,0x00,0x00,0xc7,0x00,0x05,0x00,0x14,0x00,0x00,0x00, +0xc4,0x00,0x00,0x00,0xc3,0x00,0x00,0x00,0x7c,0x00,0x00,0x00, +0xc4,0x00,0x05,0x00,0x14,0x00,0x00,0x00,0xc5,0x00,0x00,0x00, +0xc4,0x00,0x00,0x00,0x8a,0x00,0x00,0x00,0xc5,0x00,0x05,0x00, +0x14,0x00,0x00,0x00,0xc6,0x00,0x00,0x00,0xb7,0x00,0x00,0x00, +0xc5,0x00,0x00,0x00,0x72,0x00,0x04,0x00,0x71,0x00,0x00,0x00, +0xc7,0x00,0x00,0x00,0xc6,0x00,0x00,0x00,0x72,0x00,0x04,0x00, +0x14,0x00,0x00,0x00,0xc8,0x00,0x00,0x00,0xc7,0x00,0x00,0x00, +0x82,0x00,0x05,0x00,0x14,0x00,0x00,0x00,0xca,0x00,0x00,0x00, +0xc8,0x00,0x00,0x00,0xc9,0x00,0x00,0x00,0x6f,0x00,0x04,0x00, +0x50,0x00,0x00,0x00,0xcb,0x00,0x00,0x00,0xca,0x00,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xd4,0x00,0x00,0x00, +0x94,0x00,0x00,0x00,0x40,0x00,0x00,0x00,0x41,0x00,0x06,0x00, +0x99,0x00,0x00,0x00,0xd5,0x00,0x00,0x00,0x8f,0x00,0x00,0x00, +0x15,0x00,0x00,0x00,0xd4,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0x50,0x00,0x00,0x00,0xd6,0x00,0x00,0x00,0xd5,0x00,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xdb,0x00,0x00,0x00, +0x49,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x41,0x00,0x08,0x00, +0xa2,0x00,0x00,0x00,0xdc,0x00,0x00,0x00,0x78,0x00,0x00,0x00, +0x15,0x00,0x00,0x00,0x7b,0x00,0x00,0x00,0x9f,0x00,0x00,0x00, +0xdb,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x71,0x00,0x00,0x00, +0xdd,0x00,0x00,0x00,0xdc,0x00,0x00,0x00,0x6f,0x00,0x04,0x00, +0x50,0x00,0x00,0x00,0xde,0x00,0x00,0x00,0xdd,0x00,0x00,0x00, +0x85,0x00,0x05,0x00,0x50,0x00,0x00,0x00,0xdf,0x00,0x00,0x00, +0xd6,0x00,0x00,0x00,0xde,0x00,0x00,0x00,0x85,0x00,0x05,0x00, +0x50,0x00,0x00,0x00,0xe1,0x00,0x00,0x00,0xdf,0x00,0x00,0x00, +0x80,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xe9,0x00,0x00,0x00,0x3e,0x00,0x00,0x00,0x40,0x00,0x00,0x00, +0x41,0x00,0x08,0x00,0xb1,0x00,0x00,0x00,0xea,0x00,0x00,0x00, +0x78,0x00,0x00,0x00,0x15,0x00,0x00,0x00,0x7b,0x00,0x00,0x00, +0x15,0x00,0x00,0x00,0xe9,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0x6e,0x00,0x00,0x00,0xeb,0x00,0x00,0x00,0xea,0x00,0x00,0x00, +0x71,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0xec,0x00,0x00,0x00, +0xeb,0x00,0x00,0x00,0x7c,0x00,0x04,0x00,0x14,0x00,0x00,0x00, +0xed,0x00,0x00,0x00,0xec,0x00,0x00,0x00,0xc7,0x00,0x05,0x00, +0x14,0x00,0x00,0x00,0xee,0x00,0x00,0x00,0xed,0x00,0x00,0x00, +0xb6,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x6e,0x00,0x00,0x00, +0xf7,0x00,0x00,0x00,0xbf,0x00,0x00,0x00,0xc2,0x00,0x05,0x00, +0x6e,0x00,0x00,0x00,0xf8,0x00,0x00,0x00,0xf7,0x00,0x00,0x00, +0x9f,0x00,0x00,0x00,0x71,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0xf9,0x00,0x00,0x00,0xf8,0x00,0x00,0x00,0x7c,0x00,0x04,0x00, +0x14,0x00,0x00,0x00,0xfa,0x00,0x00,0x00,0xf9,0x00,0x00,0x00, +0xc7,0x00,0x05,0x00,0x14,0x00,0x00,0x00,0xfb,0x00,0x00,0x00, +0xfa,0x00,0x00,0x00,0x7c,0x00,0x00,0x00,0xc4,0x00,0x05,0x00, +0x14,0x00,0x00,0x00,0xfc,0x00,0x00,0x00,0xfb,0x00,0x00,0x00, +0x8a,0x00,0x00,0x00,0xc5,0x00,0x05,0x00,0x14,0x00,0x00,0x00, +0xfd,0x00,0x00,0x00,0xee,0x00,0x00,0x00,0xfc,0x00,0x00,0x00, +0x72,0x00,0x04,0x00,0x71,0x00,0x00,0x00,0xfe,0x00,0x00,0x00, +0xfd,0x00,0x00,0x00,0x72,0x00,0x04,0x00,0x14,0x00,0x00,0x00, +0xff,0x00,0x00,0x00,0xfe,0x00,0x00,0x00,0x82,0x00,0x05,0x00, +0x14,0x00,0x00,0x00,0x00,0x01,0x00,0x00,0xff,0x00,0x00,0x00, +0xc9,0x00,0x00,0x00,0x6f,0x00,0x04,0x00,0x50,0x00,0x00,0x00, +0x01,0x01,0x00,0x00,0x00,0x01,0x00,0x00,0x85,0x00,0x05,0x00, +0x50,0x00,0x00,0x00,0x02,0x01,0x00,0x00,0xe1,0x00,0x00,0x00, +0x01,0x01,0x00,0x00,0x0c,0x00,0x08,0x00,0x50,0x00,0x00,0x00, +0x03,0x01,0x00,0x00,0x01,0x00,0x00,0x00,0x32,0x00,0x00,0x00, +0xa8,0x00,0x00,0x00,0xcb,0x00,0x00,0x00,0x02,0x01,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x0b,0x01,0x00,0x00, +0x94,0x00,0x00,0x00,0x3a,0x00,0x00,0x00,0x41,0x00,0x06,0x00, +0x99,0x00,0x00,0x00,0x0c,0x01,0x00,0x00,0x8f,0x00,0x00,0x00, +0x15,0x00,0x00,0x00,0x0b,0x01,0x00,0x00,0x3d,0x00,0x04,0x00, +0x50,0x00,0x00,0x00,0x0d,0x01,0x00,0x00,0x0c,0x01,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x12,0x01,0x00,0x00, +0x49,0x00,0x00,0x00,0x33,0x00,0x00,0x00,0x41,0x00,0x08,0x00, +0xa2,0x00,0x00,0x00,0x13,0x01,0x00,0x00,0x78,0x00,0x00,0x00, +0x15,0x00,0x00,0x00,0x7b,0x00,0x00,0x00,0x9f,0x00,0x00,0x00, +0x12,0x01,0x00,0x00,0x3d,0x00,0x04,0x00,0x71,0x00,0x00,0x00, +0x14,0x01,0x00,0x00,0x13,0x01,0x00,0x00,0x6f,0x00,0x04,0x00, +0x50,0x00,0x00,0x00,0x15,0x01,0x00,0x00,0x14,0x01,0x00,0x00, +0x85,0x00,0x05,0x00,0x50,0x00,0x00,0x00,0x16,0x01,0x00,0x00, +0x0d,0x01,0x00,0x00,0x15,0x01,0x00,0x00,0x85,0x00,0x05,0x00, +0x50,0x00,0x00,0x00,0x18,0x01,0x00,0x00,0x16,0x01,0x00,0x00, +0x80,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x6e,0x00,0x00,0x00, +0x22,0x01,0x00,0x00,0xb2,0x00,0x00,0x00,0xc2,0x00,0x05,0x00, +0x6e,0x00,0x00,0x00,0x23,0x01,0x00,0x00,0x22,0x01,0x00,0x00, +0x8a,0x00,0x00,0x00,0x71,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x24,0x01,0x00,0x00,0x23,0x01,0x00,0x00,0x7c,0x00,0x04,0x00, +0x14,0x00,0x00,0x00,0x25,0x01,0x00,0x00,0x24,0x01,0x00,0x00, +0x3d,0x00,0x04,0x00,0x6e,0x00,0x00,0x00,0x2e,0x01,0x00,0x00, +0xbf,0x00,0x00,0x00,0xc2,0x00,0x05,0x00,0x6e,0x00,0x00,0x00, +0x2f,0x01,0x00,0x00,0x2e,0x01,0x00,0x00,0x8a,0x00,0x00,0x00, +0x71,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x30,0x01,0x00,0x00, +0x2f,0x01,0x00,0x00,0x7c,0x00,0x04,0x00,0x14,0x00,0x00,0x00, +0x31,0x01,0x00,0x00,0x30,0x01,0x00,0x00,0xc7,0x00,0x05,0x00, +0x14,0x00,0x00,0x00,0x32,0x01,0x00,0x00,0x31,0x01,0x00,0x00, +0x7c,0x00,0x00,0x00,0xc4,0x00,0x05,0x00,0x14,0x00,0x00,0x00, +0x33,0x01,0x00,0x00,0x32,0x01,0x00,0x00,0x8a,0x00,0x00,0x00, +0xc5,0x00,0x05,0x00,0x14,0x00,0x00,0x00,0x34,0x01,0x00,0x00, +0x25,0x01,0x00,0x00,0x33,0x01,0x00,0x00,0x72,0x00,0x04,0x00, +0x71,0x00,0x00,0x00,0x35,0x01,0x00,0x00,0x34,0x01,0x00,0x00, +0x72,0x00,0x04,0x00,0x14,0x00,0x00,0x00,0x36,0x01,0x00,0x00, +0x35,0x01,0x00,0x00,0x82,0x00,0x05,0x00,0x14,0x00,0x00,0x00, +0x37,0x01,0x00,0x00,0x36,0x01,0x00,0x00,0xc9,0x00,0x00,0x00, +0x6f,0x00,0x04,0x00,0x50,0x00,0x00,0x00,0x38,0x01,0x00,0x00, +0x37,0x01,0x00,0x00,0x0c,0x00,0x08,0x00,0x50,0x00,0x00,0x00, +0x3a,0x01,0x00,0x00,0x01,0x00,0x00,0x00,0x32,0x00,0x00,0x00, +0x18,0x01,0x00,0x00,0x38,0x01,0x00,0x00,0x03,0x01,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x43,0x01,0x00,0x00, +0x94,0x00,0x00,0x00,0x42,0x01,0x00,0x00,0x41,0x00,0x06,0x00, +0x99,0x00,0x00,0x00,0x44,0x01,0x00,0x00,0x8f,0x00,0x00,0x00, +0x15,0x00,0x00,0x00,0x43,0x01,0x00,0x00,0x3d,0x00,0x04,0x00, +0x50,0x00,0x00,0x00,0x45,0x01,0x00,0x00,0x44,0x01,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x4b,0x01,0x00,0x00, +0x49,0x00,0x00,0x00,0x4a,0x01,0x00,0x00,0x41,0x00,0x08,0x00, +0xa2,0x00,0x00,0x00,0x4c,0x01,0x00,0x00,0x78,0x00,0x00,0x00, +0x15,0x00,0x00,0x00,0x7b,0x00,0x00,0x00,0x9f,0x00,0x00,0x00, +0x4b,0x01,0x00,0x00,0x3d,0x00,0x04,0x00,0x71,0x00,0x00,0x00, +0x4d,0x01,0x00,0x00,0x4c,0x01,0x00,0x00,0x6f,0x00,0x04,0x00, +0x50,0x00,0x00,0x00,0x4e,0x01,0x00,0x00,0x4d,0x01,0x00,0x00, +0x85,0x00,0x05,0x00,0x50,0x00,0x00,0x00,0x4f,0x01,0x00,0x00, +0x45,0x01,0x00,0x00,0x4e,0x01,0x00,0x00,0x85,0x00,0x05,0x00, +0x50,0x00,0x00,0x00,0x51,0x01,0x00,0x00,0x4f,0x01,0x00,0x00, +0x80,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x6e,0x00,0x00,0x00, +0x5b,0x01,0x00,0x00,0xea,0x00,0x00,0x00,0xc2,0x00,0x05,0x00, +0x6e,0x00,0x00,0x00,0x5c,0x01,0x00,0x00,0x5b,0x01,0x00,0x00, +0x8a,0x00,0x00,0x00,0x71,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x5d,0x01,0x00,0x00,0x5c,0x01,0x00,0x00,0x7c,0x00,0x04,0x00, +0x14,0x00,0x00,0x00,0x5e,0x01,0x00,0x00,0x5d,0x01,0x00,0x00, +0x3d,0x00,0x04,0x00,0x6e,0x00,0x00,0x00,0x67,0x01,0x00,0x00, +0xbf,0x00,0x00,0x00,0xc2,0x00,0x05,0x00,0x6e,0x00,0x00,0x00, +0x69,0x01,0x00,0x00,0x67,0x01,0x00,0x00,0x68,0x01,0x00,0x00, +0x71,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x6a,0x01,0x00,0x00, +0x69,0x01,0x00,0x00,0x7c,0x00,0x04,0x00,0x14,0x00,0x00,0x00, +0x6b,0x01,0x00,0x00,0x6a,0x01,0x00,0x00,0xc7,0x00,0x05,0x00, +0x14,0x00,0x00,0x00,0x6c,0x01,0x00,0x00,0x6b,0x01,0x00,0x00, +0x7c,0x00,0x00,0x00,0xc4,0x00,0x05,0x00,0x14,0x00,0x00,0x00, +0x6d,0x01,0x00,0x00,0x6c,0x01,0x00,0x00,0x8a,0x00,0x00,0x00, +0xc5,0x00,0x05,0x00,0x14,0x00,0x00,0x00,0x6e,0x01,0x00,0x00, +0x5e,0x01,0x00,0x00,0x6d,0x01,0x00,0x00,0x72,0x00,0x04,0x00, +0x71,0x00,0x00,0x00,0x6f,0x01,0x00,0x00,0x6e,0x01,0x00,0x00, +0x72,0x00,0x04,0x00,0x14,0x00,0x00,0x00,0x70,0x01,0x00,0x00, +0x6f,0x01,0x00,0x00,0x82,0x00,0x05,0x00,0x14,0x00,0x00,0x00, +0x71,0x01,0x00,0x00,0x70,0x01,0x00,0x00,0xc9,0x00,0x00,0x00, +0x6f,0x00,0x04,0x00,0x50,0x00,0x00,0x00,0x72,0x01,0x00,0x00, +0x71,0x01,0x00,0x00,0x0c,0x00,0x08,0x00,0x50,0x00,0x00,0x00, +0x74,0x01,0x00,0x00,0x01,0x00,0x00,0x00,0x32,0x00,0x00,0x00, +0x51,0x01,0x00,0x00,0x72,0x01,0x00,0x00,0x3a,0x01,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xc0,0x01,0x00,0x00, +0x94,0x00,0x00,0x00,0xad,0x01,0x00,0x00,0x41,0x00,0x06,0x00, +0x99,0x00,0x00,0x00,0xc2,0x01,0x00,0x00,0x8f,0x00,0x00,0x00, +0x15,0x00,0x00,0x00,0xc0,0x01,0x00,0x00,0x3d,0x00,0x04,0x00, +0x50,0x00,0x00,0x00,0xc3,0x01,0x00,0x00,0xc2,0x01,0x00,0x00, +0x3d,0x00,0x04,0x00,0x71,0x00,0x00,0x00,0xc7,0x01,0x00,0x00, +0xa3,0x00,0x00,0x00,0x6f,0x00,0x04,0x00,0x50,0x00,0x00,0x00, +0xc8,0x01,0x00,0x00,0xc7,0x01,0x00,0x00,0x85,0x00,0x05,0x00, +0x50,0x00,0x00,0x00,0xc9,0x01,0x00,0x00,0xc3,0x01,0x00,0x00, +0xc8,0x01,0x00,0x00,0x85,0x00,0x05,0x00,0x50,0x00,0x00,0x00, +0xca,0x01,0x00,0x00,0xc9,0x01,0x00,0x00,0x80,0x00,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xcd,0x01,0x00,0x00, +0x3e,0x00,0x00,0x00,0xad,0x01,0x00,0x00,0x41,0x00,0x08,0x00, +0xb1,0x00,0x00,0x00,0xcf,0x01,0x00,0x00,0x78,0x00,0x00,0x00, +0x15,0x00,0x00,0x00,0x7b,0x00,0x00,0x00,0x15,0x00,0x00,0x00, +0xcd,0x01,0x00,0x00,0x3d,0x00,0x04,0x00,0x6e,0x00,0x00,0x00, +0xd0,0x01,0x00,0x00,0xcf,0x01,0x00,0x00,0x71,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0xd1,0x01,0x00,0x00,0xd0,0x01,0x00,0x00, +0x7c,0x00,0x04,0x00,0x14,0x00,0x00,0x00,0xd2,0x01,0x00,0x00, +0xd1,0x01,0x00,0x00,0xc7,0x00,0x05,0x00,0x14,0x00,0x00,0x00, +0xd3,0x01,0x00,0x00,0xd2,0x01,0x00,0x00,0xb6,0x00,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xd6,0x01,0x00,0x00, +0x44,0x00,0x00,0x00,0xad,0x01,0x00,0x00,0x41,0x00,0x08,0x00, +0xb1,0x00,0x00,0x00,0xd7,0x01,0x00,0x00,0x78,0x00,0x00,0x00, +0x15,0x00,0x00,0x00,0x7b,0x00,0x00,0x00,0x90,0x00,0x00,0x00, +0xd6,0x01,0x00,0x00,0x3d,0x00,0x04,0x00,0x6e,0x00,0x00,0x00, +0xd8,0x01,0x00,0x00,0xd7,0x01,0x00,0x00,0xc2,0x00,0x05,0x00, +0x6e,0x00,0x00,0x00,0xd9,0x01,0x00,0x00,0xd8,0x01,0x00,0x00, +0x15,0x00,0x00,0x00,0x71,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0xda,0x01,0x00,0x00,0xd9,0x01,0x00,0x00,0x7c,0x00,0x04,0x00, +0x14,0x00,0x00,0x00,0xdb,0x01,0x00,0x00,0xda,0x01,0x00,0x00, +0xc7,0x00,0x05,0x00,0x14,0x00,0x00,0x00,0xdc,0x01,0x00,0x00, +0xdb,0x01,0x00,0x00,0x7c,0x00,0x00,0x00,0xc4,0x00,0x05,0x00, +0x14,0x00,0x00,0x00,0xdd,0x01,0x00,0x00,0xdc,0x01,0x00,0x00, +0x8a,0x00,0x00,0x00,0xc5,0x00,0x05,0x00,0x14,0x00,0x00,0x00, +0xde,0x01,0x00,0x00,0xd3,0x01,0x00,0x00,0xdd,0x01,0x00,0x00, +0x72,0x00,0x04,0x00,0x71,0x00,0x00,0x00,0xdf,0x01,0x00,0x00, +0xde,0x01,0x00,0x00,0x72,0x00,0x04,0x00,0x14,0x00,0x00,0x00, +0xe0,0x01,0x00,0x00,0xdf,0x01,0x00,0x00,0x82,0x00,0x05,0x00, +0x14,0x00,0x00,0x00,0xe1,0x01,0x00,0x00,0xe0,0x01,0x00,0x00, +0xc9,0x00,0x00,0x00,0x6f,0x00,0x04,0x00,0x50,0x00,0x00,0x00, +0xe2,0x01,0x00,0x00,0xe1,0x01,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xe9,0x01,0x00,0x00,0x94,0x00,0x00,0x00, +0xbb,0x03,0x00,0x00,0x41,0x00,0x06,0x00,0x99,0x00,0x00,0x00, +0xea,0x01,0x00,0x00,0x8f,0x00,0x00,0x00,0x15,0x00,0x00,0x00, +0xe9,0x01,0x00,0x00,0x3d,0x00,0x04,0x00,0x50,0x00,0x00,0x00, +0xeb,0x01,0x00,0x00,0xea,0x01,0x00,0x00,0x3d,0x00,0x04,0x00, +0x71,0x00,0x00,0x00,0xef,0x01,0x00,0x00,0xdc,0x00,0x00,0x00, +0x6f,0x00,0x04,0x00,0x50,0x00,0x00,0x00,0xf0,0x01,0x00,0x00, +0xef,0x01,0x00,0x00,0x85,0x00,0x05,0x00,0x50,0x00,0x00,0x00, +0xf1,0x01,0x00,0x00,0xeb,0x01,0x00,0x00,0xf0,0x01,0x00,0x00, +0x85,0x00,0x05,0x00,0x50,0x00,0x00,0x00,0xf2,0x01,0x00,0x00, +0xf1,0x01,0x00,0x00,0x80,0x00,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xf6,0x01,0x00,0x00,0x3e,0x00,0x00,0x00, +0xbb,0x03,0x00,0x00,0x41,0x00,0x08,0x00,0xb1,0x00,0x00,0x00, +0xf7,0x01,0x00,0x00,0x78,0x00,0x00,0x00,0x15,0x00,0x00,0x00, +0x7b,0x00,0x00,0x00,0x15,0x00,0x00,0x00,0xf6,0x01,0x00,0x00, +0x3d,0x00,0x04,0x00,0x6e,0x00,0x00,0x00,0xf8,0x01,0x00,0x00, +0xf7,0x01,0x00,0x00,0x71,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0xf9,0x01,0x00,0x00,0xf8,0x01,0x00,0x00,0x7c,0x00,0x04,0x00, +0x14,0x00,0x00,0x00,0xfa,0x01,0x00,0x00,0xf9,0x01,0x00,0x00, +0xc7,0x00,0x05,0x00,0x14,0x00,0x00,0x00,0xfb,0x01,0x00,0x00, +0xfa,0x01,0x00,0x00,0xb6,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0x6e,0x00,0x00,0x00,0x00,0x02,0x00,0x00,0xd7,0x01,0x00,0x00, +0xc2,0x00,0x05,0x00,0x6e,0x00,0x00,0x00,0x01,0x02,0x00,0x00, +0x00,0x02,0x00,0x00,0x9f,0x00,0x00,0x00,0x71,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x02,0x02,0x00,0x00,0x01,0x02,0x00,0x00, +0x7c,0x00,0x04,0x00,0x14,0x00,0x00,0x00,0x03,0x02,0x00,0x00, +0x02,0x02,0x00,0x00,0xc7,0x00,0x05,0x00,0x14,0x00,0x00,0x00, +0x04,0x02,0x00,0x00,0x03,0x02,0x00,0x00,0x7c,0x00,0x00,0x00, +0xc4,0x00,0x05,0x00,0x14,0x00,0x00,0x00,0x05,0x02,0x00,0x00, +0x04,0x02,0x00,0x00,0x8a,0x00,0x00,0x00,0xc5,0x00,0x05,0x00, +0x14,0x00,0x00,0x00,0x06,0x02,0x00,0x00,0xfb,0x01,0x00,0x00, +0x05,0x02,0x00,0x00,0x72,0x00,0x04,0x00,0x71,0x00,0x00,0x00, +0x07,0x02,0x00,0x00,0x06,0x02,0x00,0x00,0x72,0x00,0x04,0x00, +0x14,0x00,0x00,0x00,0x08,0x02,0x00,0x00,0x07,0x02,0x00,0x00, +0x82,0x00,0x05,0x00,0x14,0x00,0x00,0x00,0x09,0x02,0x00,0x00, +0x08,0x02,0x00,0x00,0xc9,0x00,0x00,0x00,0x6f,0x00,0x04,0x00, +0x50,0x00,0x00,0x00,0x0a,0x02,0x00,0x00,0x09,0x02,0x00,0x00, +0x85,0x00,0x05,0x00,0x50,0x00,0x00,0x00,0x0b,0x02,0x00,0x00, +0xf2,0x01,0x00,0x00,0x0a,0x02,0x00,0x00,0x0c,0x00,0x08,0x00, +0x50,0x00,0x00,0x00,0x0c,0x02,0x00,0x00,0x01,0x00,0x00,0x00, +0x32,0x00,0x00,0x00,0xca,0x01,0x00,0x00,0xe2,0x01,0x00,0x00, +0x0b,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x12,0x02,0x00,0x00,0x94,0x00,0x00,0x00,0xbc,0x03,0x00,0x00, +0x41,0x00,0x06,0x00,0x99,0x00,0x00,0x00,0x13,0x02,0x00,0x00, +0x8f,0x00,0x00,0x00,0x15,0x00,0x00,0x00,0x12,0x02,0x00,0x00, +0x3d,0x00,0x04,0x00,0x50,0x00,0x00,0x00,0x14,0x02,0x00,0x00, +0x13,0x02,0x00,0x00,0x3d,0x00,0x04,0x00,0x71,0x00,0x00,0x00, +0x18,0x02,0x00,0x00,0x13,0x01,0x00,0x00,0x6f,0x00,0x04,0x00, +0x50,0x00,0x00,0x00,0x19,0x02,0x00,0x00,0x18,0x02,0x00,0x00, +0x85,0x00,0x05,0x00,0x50,0x00,0x00,0x00,0x1a,0x02,0x00,0x00, +0x14,0x02,0x00,0x00,0x19,0x02,0x00,0x00,0x85,0x00,0x05,0x00, +0x50,0x00,0x00,0x00,0x1b,0x02,0x00,0x00,0x1a,0x02,0x00,0x00, +0x80,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x6e,0x00,0x00,0x00, +0x21,0x02,0x00,0x00,0xcf,0x01,0x00,0x00,0xc2,0x00,0x05,0x00, +0x6e,0x00,0x00,0x00,0x22,0x02,0x00,0x00,0x21,0x02,0x00,0x00, +0x8a,0x00,0x00,0x00,0x71,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x23,0x02,0x00,0x00,0x22,0x02,0x00,0x00,0x7c,0x00,0x04,0x00, +0x14,0x00,0x00,0x00,0x24,0x02,0x00,0x00,0x23,0x02,0x00,0x00, +0x3d,0x00,0x04,0x00,0x6e,0x00,0x00,0x00,0x29,0x02,0x00,0x00, +0xd7,0x01,0x00,0x00,0xc2,0x00,0x05,0x00,0x6e,0x00,0x00,0x00, +0x2a,0x02,0x00,0x00,0x29,0x02,0x00,0x00,0x8a,0x00,0x00,0x00, +0x71,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x2b,0x02,0x00,0x00, +0x2a,0x02,0x00,0x00,0x7c,0x00,0x04,0x00,0x14,0x00,0x00,0x00, +0x2c,0x02,0x00,0x00,0x2b,0x02,0x00,0x00,0xc7,0x00,0x05,0x00, +0x14,0x00,0x00,0x00,0x2d,0x02,0x00,0x00,0x2c,0x02,0x00,0x00, +0x7c,0x00,0x00,0x00,0xc4,0x00,0x05,0x00,0x14,0x00,0x00,0x00, +0x2e,0x02,0x00,0x00,0x2d,0x02,0x00,0x00,0x8a,0x00,0x00,0x00, +0xc5,0x00,0x05,0x00,0x14,0x00,0x00,0x00,0x2f,0x02,0x00,0x00, +0x24,0x02,0x00,0x00,0x2e,0x02,0x00,0x00,0x72,0x00,0x04,0x00, +0x71,0x00,0x00,0x00,0x30,0x02,0x00,0x00,0x2f,0x02,0x00,0x00, +0x72,0x00,0x04,0x00,0x14,0x00,0x00,0x00,0x31,0x02,0x00,0x00, +0x30,0x02,0x00,0x00,0x82,0x00,0x05,0x00,0x14,0x00,0x00,0x00, +0x32,0x02,0x00,0x00,0x31,0x02,0x00,0x00,0xc9,0x00,0x00,0x00, +0x6f,0x00,0x04,0x00,0x50,0x00,0x00,0x00,0x33,0x02,0x00,0x00, +0x32,0x02,0x00,0x00,0x0c,0x00,0x08,0x00,0x50,0x00,0x00,0x00, +0x35,0x02,0x00,0x00,0x01,0x00,0x00,0x00,0x32,0x00,0x00,0x00, +0x1b,0x02,0x00,0x00,0x33,0x02,0x00,0x00,0x0c,0x02,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x3b,0x02,0x00,0x00, +0x94,0x00,0x00,0x00,0xbd,0x03,0x00,0x00,0x41,0x00,0x06,0x00, +0x99,0x00,0x00,0x00,0x3c,0x02,0x00,0x00,0x8f,0x00,0x00,0x00, +0x15,0x00,0x00,0x00,0x3b,0x02,0x00,0x00,0x3d,0x00,0x04,0x00, +0x50,0x00,0x00,0x00,0x3d,0x02,0x00,0x00,0x3c,0x02,0x00,0x00, +0x3d,0x00,0x04,0x00,0x71,0x00,0x00,0x00,0x41,0x02,0x00,0x00, +0x4c,0x01,0x00,0x00,0x6f,0x00,0x04,0x00,0x50,0x00,0x00,0x00, +0x42,0x02,0x00,0x00,0x41,0x02,0x00,0x00,0x85,0x00,0x05,0x00, +0x50,0x00,0x00,0x00,0x43,0x02,0x00,0x00,0x3d,0x02,0x00,0x00, +0x42,0x02,0x00,0x00,0x85,0x00,0x05,0x00,0x50,0x00,0x00,0x00, +0x44,0x02,0x00,0x00,0x43,0x02,0x00,0x00,0x80,0x00,0x00,0x00, +0x3d,0x00,0x04,0x00,0x6e,0x00,0x00,0x00,0x4a,0x02,0x00,0x00, +0xf7,0x01,0x00,0x00,0xc2,0x00,0x05,0x00,0x6e,0x00,0x00,0x00, +0x4b,0x02,0x00,0x00,0x4a,0x02,0x00,0x00,0x8a,0x00,0x00,0x00, +0x71,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x4c,0x02,0x00,0x00, +0x4b,0x02,0x00,0x00,0x7c,0x00,0x04,0x00,0x14,0x00,0x00,0x00, +0x4d,0x02,0x00,0x00,0x4c,0x02,0x00,0x00,0x3d,0x00,0x04,0x00, +0x6e,0x00,0x00,0x00,0x52,0x02,0x00,0x00,0xd7,0x01,0x00,0x00, +0xc2,0x00,0x05,0x00,0x6e,0x00,0x00,0x00,0x53,0x02,0x00,0x00, +0x52,0x02,0x00,0x00,0x68,0x01,0x00,0x00,0x71,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x54,0x02,0x00,0x00,0x53,0x02,0x00,0x00, +0x7c,0x00,0x04,0x00,0x14,0x00,0x00,0x00,0x55,0x02,0x00,0x00, +0x54,0x02,0x00,0x00,0xc7,0x00,0x05,0x00,0x14,0x00,0x00,0x00, +0x56,0x02,0x00,0x00,0x55,0x02,0x00,0x00,0x7c,0x00,0x00,0x00, +0xc4,0x00,0x05,0x00,0x14,0x00,0x00,0x00,0x57,0x02,0x00,0x00, +0x56,0x02,0x00,0x00,0x8a,0x00,0x00,0x00,0xc5,0x00,0x05,0x00, +0x14,0x00,0x00,0x00,0x58,0x02,0x00,0x00,0x4d,0x02,0x00,0x00, +0x57,0x02,0x00,0x00,0x72,0x00,0x04,0x00,0x71,0x00,0x00,0x00, +0x59,0x02,0x00,0x00,0x58,0x02,0x00,0x00,0x72,0x00,0x04,0x00, +0x14,0x00,0x00,0x00,0x5a,0x02,0x00,0x00,0x59,0x02,0x00,0x00, +0x82,0x00,0x05,0x00,0x14,0x00,0x00,0x00,0x5b,0x02,0x00,0x00, +0x5a,0x02,0x00,0x00,0xc9,0x00,0x00,0x00,0x6f,0x00,0x04,0x00, +0x50,0x00,0x00,0x00,0x5c,0x02,0x00,0x00,0x5b,0x02,0x00,0x00, +0x0c,0x00,0x08,0x00,0x50,0x00,0x00,0x00,0x5e,0x02,0x00,0x00, +0x01,0x00,0x00,0x00,0x32,0x00,0x00,0x00,0x44,0x02,0x00,0x00, +0x5c,0x02,0x00,0x00,0x35,0x02,0x00,0x00,0x81,0x00,0x05,0x00, +0x50,0x00,0x00,0x00,0x5f,0x02,0x00,0x00,0x74,0x01,0x00,0x00, +0x5e,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x6c,0x02,0x00,0x00,0x94,0x00,0x00,0x00,0x23,0x00,0x00,0x00, +0x41,0x00,0x06,0x00,0x99,0x00,0x00,0x00,0x6e,0x02,0x00,0x00, +0x8f,0x00,0x00,0x00,0x15,0x00,0x00,0x00,0x6c,0x02,0x00,0x00, +0x3d,0x00,0x04,0x00,0x50,0x00,0x00,0x00,0x6f,0x02,0x00,0x00, +0x6e,0x02,0x00,0x00,0x3d,0x00,0x04,0x00,0x71,0x00,0x00,0x00, +0x73,0x02,0x00,0x00,0xa3,0x00,0x00,0x00,0x6f,0x00,0x04,0x00, +0x50,0x00,0x00,0x00,0x74,0x02,0x00,0x00,0x73,0x02,0x00,0x00, +0x85,0x00,0x05,0x00,0x50,0x00,0x00,0x00,0x75,0x02,0x00,0x00, +0x6f,0x02,0x00,0x00,0x74,0x02,0x00,0x00,0x85,0x00,0x05,0x00, +0x50,0x00,0x00,0x00,0x76,0x02,0x00,0x00,0x75,0x02,0x00,0x00, +0x80,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x79,0x02,0x00,0x00,0x3e,0x00,0x00,0x00,0x23,0x00,0x00,0x00, +0x41,0x00,0x08,0x00,0xb1,0x00,0x00,0x00,0x7b,0x02,0x00,0x00, +0x78,0x00,0x00,0x00,0x15,0x00,0x00,0x00,0x7b,0x00,0x00,0x00, +0x15,0x00,0x00,0x00,0x79,0x02,0x00,0x00,0x3d,0x00,0x04,0x00, +0x6e,0x00,0x00,0x00,0x7c,0x02,0x00,0x00,0x7b,0x02,0x00,0x00, +0x71,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x7d,0x02,0x00,0x00, +0x7c,0x02,0x00,0x00,0x7c,0x00,0x04,0x00,0x14,0x00,0x00,0x00, +0x7e,0x02,0x00,0x00,0x7d,0x02,0x00,0x00,0xc7,0x00,0x05,0x00, +0x14,0x00,0x00,0x00,0x7f,0x02,0x00,0x00,0x7e,0x02,0x00,0x00, +0xb6,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x82,0x02,0x00,0x00,0x44,0x00,0x00,0x00,0x23,0x00,0x00,0x00, +0x41,0x00,0x08,0x00,0xb1,0x00,0x00,0x00,0x83,0x02,0x00,0x00, +0x78,0x00,0x00,0x00,0x15,0x00,0x00,0x00,0x7b,0x00,0x00,0x00, +0x90,0x00,0x00,0x00,0x82,0x02,0x00,0x00,0x3d,0x00,0x04,0x00, +0x6e,0x00,0x00,0x00,0x84,0x02,0x00,0x00,0x83,0x02,0x00,0x00, +0xc2,0x00,0x05,0x00,0x6e,0x00,0x00,0x00,0x85,0x02,0x00,0x00, +0x84,0x02,0x00,0x00,0x15,0x00,0x00,0x00,0x71,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x86,0x02,0x00,0x00,0x85,0x02,0x00,0x00, +0x7c,0x00,0x04,0x00,0x14,0x00,0x00,0x00,0x87,0x02,0x00,0x00, +0x86,0x02,0x00,0x00,0xc7,0x00,0x05,0x00,0x14,0x00,0x00,0x00, +0x88,0x02,0x00,0x00,0x87,0x02,0x00,0x00,0x7c,0x00,0x00,0x00, +0xc4,0x00,0x05,0x00,0x14,0x00,0x00,0x00,0x89,0x02,0x00,0x00, +0x88,0x02,0x00,0x00,0x8a,0x00,0x00,0x00,0xc5,0x00,0x05,0x00, +0x14,0x00,0x00,0x00,0x8a,0x02,0x00,0x00,0x7f,0x02,0x00,0x00, +0x89,0x02,0x00,0x00,0x72,0x00,0x04,0x00,0x71,0x00,0x00,0x00, +0x8b,0x02,0x00,0x00,0x8a,0x02,0x00,0x00,0x72,0x00,0x04,0x00, +0x14,0x00,0x00,0x00,0x8c,0x02,0x00,0x00,0x8b,0x02,0x00,0x00, +0x82,0x00,0x05,0x00,0x14,0x00,0x00,0x00,0x8d,0x02,0x00,0x00, +0x8c,0x02,0x00,0x00,0xc9,0x00,0x00,0x00,0x6f,0x00,0x04,0x00, +0x50,0x00,0x00,0x00,0x8e,0x02,0x00,0x00,0x8d,0x02,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x95,0x02,0x00,0x00, +0x94,0x00,0x00,0x00,0xbe,0x03,0x00,0x00,0x41,0x00,0x06,0x00, +0x99,0x00,0x00,0x00,0x96,0x02,0x00,0x00,0x8f,0x00,0x00,0x00, +0x15,0x00,0x00,0x00,0x95,0x02,0x00,0x00,0x3d,0x00,0x04,0x00, +0x50,0x00,0x00,0x00,0x97,0x02,0x00,0x00,0x96,0x02,0x00,0x00, +0x3d,0x00,0x04,0x00,0x71,0x00,0x00,0x00,0x9b,0x02,0x00,0x00, +0xdc,0x00,0x00,0x00,0x6f,0x00,0x04,0x00,0x50,0x00,0x00,0x00, +0x9c,0x02,0x00,0x00,0x9b,0x02,0x00,0x00,0x85,0x00,0x05,0x00, +0x50,0x00,0x00,0x00,0x9d,0x02,0x00,0x00,0x97,0x02,0x00,0x00, +0x9c,0x02,0x00,0x00,0x85,0x00,0x05,0x00,0x50,0x00,0x00,0x00, +0x9e,0x02,0x00,0x00,0x9d,0x02,0x00,0x00,0x80,0x00,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xa2,0x02,0x00,0x00, +0x3e,0x00,0x00,0x00,0xbe,0x03,0x00,0x00,0x41,0x00,0x08,0x00, +0xb1,0x00,0x00,0x00,0xa3,0x02,0x00,0x00,0x78,0x00,0x00,0x00, +0x15,0x00,0x00,0x00,0x7b,0x00,0x00,0x00,0x15,0x00,0x00,0x00, +0xa2,0x02,0x00,0x00,0x3d,0x00,0x04,0x00,0x6e,0x00,0x00,0x00, +0xa4,0x02,0x00,0x00,0xa3,0x02,0x00,0x00,0x71,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0xa5,0x02,0x00,0x00,0xa4,0x02,0x00,0x00, +0x7c,0x00,0x04,0x00,0x14,0x00,0x00,0x00,0xa6,0x02,0x00,0x00, +0xa5,0x02,0x00,0x00,0xc7,0x00,0x05,0x00,0x14,0x00,0x00,0x00, +0xa7,0x02,0x00,0x00,0xa6,0x02,0x00,0x00,0xb6,0x00,0x00,0x00, +0x3d,0x00,0x04,0x00,0x6e,0x00,0x00,0x00,0xac,0x02,0x00,0x00, +0x83,0x02,0x00,0x00,0xc2,0x00,0x05,0x00,0x6e,0x00,0x00,0x00, +0xad,0x02,0x00,0x00,0xac,0x02,0x00,0x00,0x9f,0x00,0x00,0x00, +0x71,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0xae,0x02,0x00,0x00, +0xad,0x02,0x00,0x00,0x7c,0x00,0x04,0x00,0x14,0x00,0x00,0x00, +0xaf,0x02,0x00,0x00,0xae,0x02,0x00,0x00,0xc7,0x00,0x05,0x00, +0x14,0x00,0x00,0x00,0xb0,0x02,0x00,0x00,0xaf,0x02,0x00,0x00, +0x7c,0x00,0x00,0x00,0xc4,0x00,0x05,0x00,0x14,0x00,0x00,0x00, +0xb1,0x02,0x00,0x00,0xb0,0x02,0x00,0x00,0x8a,0x00,0x00,0x00, +0xc5,0x00,0x05,0x00,0x14,0x00,0x00,0x00,0xb2,0x02,0x00,0x00, +0xa7,0x02,0x00,0x00,0xb1,0x02,0x00,0x00,0x72,0x00,0x04,0x00, +0x71,0x00,0x00,0x00,0xb3,0x02,0x00,0x00,0xb2,0x02,0x00,0x00, +0x72,0x00,0x04,0x00,0x14,0x00,0x00,0x00,0xb4,0x02,0x00,0x00, +0xb3,0x02,0x00,0x00,0x82,0x00,0x05,0x00,0x14,0x00,0x00,0x00, +0xb5,0x02,0x00,0x00,0xb4,0x02,0x00,0x00,0xc9,0x00,0x00,0x00, +0x6f,0x00,0x04,0x00,0x50,0x00,0x00,0x00,0xb6,0x02,0x00,0x00, +0xb5,0x02,0x00,0x00,0x85,0x00,0x05,0x00,0x50,0x00,0x00,0x00, +0xb7,0x02,0x00,0x00,0x9e,0x02,0x00,0x00,0xb6,0x02,0x00,0x00, +0x0c,0x00,0x08,0x00,0x50,0x00,0x00,0x00,0xb8,0x02,0x00,0x00, +0x01,0x00,0x00,0x00,0x32,0x00,0x00,0x00,0x76,0x02,0x00,0x00, +0x8e,0x02,0x00,0x00,0xb7,0x02,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xbe,0x02,0x00,0x00,0x94,0x00,0x00,0x00, +0xbf,0x03,0x00,0x00,0x41,0x00,0x06,0x00,0x99,0x00,0x00,0x00, +0xbf,0x02,0x00,0x00,0x8f,0x00,0x00,0x00,0x15,0x00,0x00,0x00, +0xbe,0x02,0x00,0x00,0x3d,0x00,0x04,0x00,0x50,0x00,0x00,0x00, +0xc0,0x02,0x00,0x00,0xbf,0x02,0x00,0x00,0x3d,0x00,0x04,0x00, +0x71,0x00,0x00,0x00,0xc4,0x02,0x00,0x00,0x13,0x01,0x00,0x00, +0x6f,0x00,0x04,0x00,0x50,0x00,0x00,0x00,0xc5,0x02,0x00,0x00, +0xc4,0x02,0x00,0x00,0x85,0x00,0x05,0x00,0x50,0x00,0x00,0x00, +0xc6,0x02,0x00,0x00,0xc0,0x02,0x00,0x00,0xc5,0x02,0x00,0x00, +0x85,0x00,0x05,0x00,0x50,0x00,0x00,0x00,0xc7,0x02,0x00,0x00, +0xc6,0x02,0x00,0x00,0x80,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0x6e,0x00,0x00,0x00,0xcd,0x02,0x00,0x00,0x7b,0x02,0x00,0x00, +0xc2,0x00,0x05,0x00,0x6e,0x00,0x00,0x00,0xce,0x02,0x00,0x00, +0xcd,0x02,0x00,0x00,0x8a,0x00,0x00,0x00,0x71,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0xcf,0x02,0x00,0x00,0xce,0x02,0x00,0x00, +0x7c,0x00,0x04,0x00,0x14,0x00,0x00,0x00,0xd0,0x02,0x00,0x00, +0xcf,0x02,0x00,0x00,0x3d,0x00,0x04,0x00,0x6e,0x00,0x00,0x00, +0xd5,0x02,0x00,0x00,0x83,0x02,0x00,0x00,0xc2,0x00,0x05,0x00, +0x6e,0x00,0x00,0x00,0xd6,0x02,0x00,0x00,0xd5,0x02,0x00,0x00, +0x8a,0x00,0x00,0x00,0x71,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0xd7,0x02,0x00,0x00,0xd6,0x02,0x00,0x00,0x7c,0x00,0x04,0x00, +0x14,0x00,0x00,0x00,0xd8,0x02,0x00,0x00,0xd7,0x02,0x00,0x00, +0xc7,0x00,0x05,0x00,0x14,0x00,0x00,0x00,0xd9,0x02,0x00,0x00, +0xd8,0x02,0x00,0x00,0x7c,0x00,0x00,0x00,0xc4,0x00,0x05,0x00, +0x14,0x00,0x00,0x00,0xda,0x02,0x00,0x00,0xd9,0x02,0x00,0x00, +0x8a,0x00,0x00,0x00,0xc5,0x00,0x05,0x00,0x14,0x00,0x00,0x00, +0xdb,0x02,0x00,0x00,0xd0,0x02,0x00,0x00,0xda,0x02,0x00,0x00, +0x72,0x00,0x04,0x00,0x71,0x00,0x00,0x00,0xdc,0x02,0x00,0x00, +0xdb,0x02,0x00,0x00,0x72,0x00,0x04,0x00,0x14,0x00,0x00,0x00, +0xdd,0x02,0x00,0x00,0xdc,0x02,0x00,0x00,0x82,0x00,0x05,0x00, +0x14,0x00,0x00,0x00,0xde,0x02,0x00,0x00,0xdd,0x02,0x00,0x00, +0xc9,0x00,0x00,0x00,0x6f,0x00,0x04,0x00,0x50,0x00,0x00,0x00, +0xdf,0x02,0x00,0x00,0xde,0x02,0x00,0x00,0x0c,0x00,0x08,0x00, +0x50,0x00,0x00,0x00,0xe1,0x02,0x00,0x00,0x01,0x00,0x00,0x00, +0x32,0x00,0x00,0x00,0xc7,0x02,0x00,0x00,0xdf,0x02,0x00,0x00, +0xb8,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xe7,0x02,0x00,0x00,0x94,0x00,0x00,0x00,0xc0,0x03,0x00,0x00, +0x41,0x00,0x06,0x00,0x99,0x00,0x00,0x00,0xe8,0x02,0x00,0x00, +0x8f,0x00,0x00,0x00,0x15,0x00,0x00,0x00,0xe7,0x02,0x00,0x00, +0x3d,0x00,0x04,0x00,0x50,0x00,0x00,0x00,0xe9,0x02,0x00,0x00, +0xe8,0x02,0x00,0x00,0x3d,0x00,0x04,0x00,0x71,0x00,0x00,0x00, +0xed,0x02,0x00,0x00,0x4c,0x01,0x00,0x00,0x6f,0x00,0x04,0x00, +0x50,0x00,0x00,0x00,0xee,0x02,0x00,0x00,0xed,0x02,0x00,0x00, +0x85,0x00,0x05,0x00,0x50,0x00,0x00,0x00,0xef,0x02,0x00,0x00, +0xe9,0x02,0x00,0x00,0xee,0x02,0x00,0x00,0x85,0x00,0x05,0x00, +0x50,0x00,0x00,0x00,0xf0,0x02,0x00,0x00,0xef,0x02,0x00,0x00, +0x80,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x6e,0x00,0x00,0x00, +0xf6,0x02,0x00,0x00,0xa3,0x02,0x00,0x00,0xc2,0x00,0x05,0x00, +0x6e,0x00,0x00,0x00,0xf7,0x02,0x00,0x00,0xf6,0x02,0x00,0x00, +0x8a,0x00,0x00,0x00,0x71,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0xf8,0x02,0x00,0x00,0xf7,0x02,0x00,0x00,0x7c,0x00,0x04,0x00, +0x14,0x00,0x00,0x00,0xf9,0x02,0x00,0x00,0xf8,0x02,0x00,0x00, +0x3d,0x00,0x04,0x00,0x6e,0x00,0x00,0x00,0xfe,0x02,0x00,0x00, +0x83,0x02,0x00,0x00,0xc2,0x00,0x05,0x00,0x6e,0x00,0x00,0x00, +0xff,0x02,0x00,0x00,0xfe,0x02,0x00,0x00,0x68,0x01,0x00,0x00, +0x71,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x00,0x03,0x00,0x00, +0xff,0x02,0x00,0x00,0x7c,0x00,0x04,0x00,0x14,0x00,0x00,0x00, +0x01,0x03,0x00,0x00,0x00,0x03,0x00,0x00,0xc7,0x00,0x05,0x00, +0x14,0x00,0x00,0x00,0x02,0x03,0x00,0x00,0x01,0x03,0x00,0x00, +0x7c,0x00,0x00,0x00,0xc4,0x00,0x05,0x00,0x14,0x00,0x00,0x00, +0x03,0x03,0x00,0x00,0x02,0x03,0x00,0x00,0x8a,0x00,0x00,0x00, +0xc5,0x00,0x05,0x00,0x14,0x00,0x00,0x00,0x04,0x03,0x00,0x00, +0xf9,0x02,0x00,0x00,0x03,0x03,0x00,0x00,0x72,0x00,0x04,0x00, +0x71,0x00,0x00,0x00,0x05,0x03,0x00,0x00,0x04,0x03,0x00,0x00, +0x72,0x00,0x04,0x00,0x14,0x00,0x00,0x00,0x06,0x03,0x00,0x00, +0x05,0x03,0x00,0x00,0x82,0x00,0x05,0x00,0x14,0x00,0x00,0x00, +0x07,0x03,0x00,0x00,0x06,0x03,0x00,0x00,0xc9,0x00,0x00,0x00, +0x6f,0x00,0x04,0x00,0x50,0x00,0x00,0x00,0x08,0x03,0x00,0x00, +0x07,0x03,0x00,0x00,0x0c,0x00,0x08,0x00,0x50,0x00,0x00,0x00, +0x0a,0x03,0x00,0x00,0x01,0x00,0x00,0x00,0x32,0x00,0x00,0x00, +0xf0,0x02,0x00,0x00,0x08,0x03,0x00,0x00,0xe1,0x02,0x00,0x00, +0x81,0x00,0x05,0x00,0x50,0x00,0x00,0x00,0x0b,0x03,0x00,0x00, +0x5f,0x02,0x00,0x00,0x0a,0x03,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x18,0x03,0x00,0x00,0x94,0x00,0x00,0x00, +0xc1,0x03,0x00,0x00,0x41,0x00,0x06,0x00,0x99,0x00,0x00,0x00, +0x1a,0x03,0x00,0x00,0x8f,0x00,0x00,0x00,0x15,0x00,0x00,0x00, +0x18,0x03,0x00,0x00,0x3d,0x00,0x04,0x00,0x50,0x00,0x00,0x00, +0x1b,0x03,0x00,0x00,0x1a,0x03,0x00,0x00,0x3d,0x00,0x04,0x00, +0x71,0x00,0x00,0x00,0x1f,0x03,0x00,0x00,0xa3,0x00,0x00,0x00, +0x6f,0x00,0x04,0x00,0x50,0x00,0x00,0x00,0x20,0x03,0x00,0x00, +0x1f,0x03,0x00,0x00,0x85,0x00,0x05,0x00,0x50,0x00,0x00,0x00, +0x21,0x03,0x00,0x00,0x1b,0x03,0x00,0x00,0x20,0x03,0x00,0x00, +0x85,0x00,0x05,0x00,0x50,0x00,0x00,0x00,0x22,0x03,0x00,0x00, +0x21,0x03,0x00,0x00,0x80,0x00,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x25,0x03,0x00,0x00,0x3e,0x00,0x00,0x00, +0xc1,0x03,0x00,0x00,0x41,0x00,0x08,0x00,0xb1,0x00,0x00,0x00, +0x27,0x03,0x00,0x00,0x78,0x00,0x00,0x00,0x15,0x00,0x00,0x00, +0x7b,0x00,0x00,0x00,0x15,0x00,0x00,0x00,0x25,0x03,0x00,0x00, +0x3d,0x00,0x04,0x00,0x6e,0x00,0x00,0x00,0x28,0x03,0x00,0x00, +0x27,0x03,0x00,0x00,0x71,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x29,0x03,0x00,0x00,0x28,0x03,0x00,0x00,0x7c,0x00,0x04,0x00, +0x14,0x00,0x00,0x00,0x2a,0x03,0x00,0x00,0x29,0x03,0x00,0x00, +0xc7,0x00,0x05,0x00,0x14,0x00,0x00,0x00,0x2b,0x03,0x00,0x00, +0x2a,0x03,0x00,0x00,0xb6,0x00,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x2e,0x03,0x00,0x00,0x44,0x00,0x00,0x00, +0xc1,0x03,0x00,0x00,0x41,0x00,0x08,0x00,0xb1,0x00,0x00,0x00, +0x2f,0x03,0x00,0x00,0x78,0x00,0x00,0x00,0x15,0x00,0x00,0x00, +0x7b,0x00,0x00,0x00,0x90,0x00,0x00,0x00,0x2e,0x03,0x00,0x00, +0x3d,0x00,0x04,0x00,0x6e,0x00,0x00,0x00,0x30,0x03,0x00,0x00, +0x2f,0x03,0x00,0x00,0xc2,0x00,0x05,0x00,0x6e,0x00,0x00,0x00, +0x31,0x03,0x00,0x00,0x30,0x03,0x00,0x00,0x15,0x00,0x00,0x00, +0x71,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x32,0x03,0x00,0x00, +0x31,0x03,0x00,0x00,0x7c,0x00,0x04,0x00,0x14,0x00,0x00,0x00, +0x33,0x03,0x00,0x00,0x32,0x03,0x00,0x00,0xc7,0x00,0x05,0x00, +0x14,0x00,0x00,0x00,0x34,0x03,0x00,0x00,0x33,0x03,0x00,0x00, +0x7c,0x00,0x00,0x00,0xc4,0x00,0x05,0x00,0x14,0x00,0x00,0x00, +0x35,0x03,0x00,0x00,0x34,0x03,0x00,0x00,0x8a,0x00,0x00,0x00, +0xc5,0x00,0x05,0x00,0x14,0x00,0x00,0x00,0x36,0x03,0x00,0x00, +0x2b,0x03,0x00,0x00,0x35,0x03,0x00,0x00,0x72,0x00,0x04,0x00, +0x71,0x00,0x00,0x00,0x37,0x03,0x00,0x00,0x36,0x03,0x00,0x00, +0x72,0x00,0x04,0x00,0x14,0x00,0x00,0x00,0x38,0x03,0x00,0x00, +0x37,0x03,0x00,0x00,0x82,0x00,0x05,0x00,0x14,0x00,0x00,0x00, +0x39,0x03,0x00,0x00,0x38,0x03,0x00,0x00,0xc9,0x00,0x00,0x00, +0x6f,0x00,0x04,0x00,0x50,0x00,0x00,0x00,0x3a,0x03,0x00,0x00, +0x39,0x03,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x41,0x03,0x00,0x00,0x94,0x00,0x00,0x00,0xc2,0x03,0x00,0x00, +0x41,0x00,0x06,0x00,0x99,0x00,0x00,0x00,0x42,0x03,0x00,0x00, +0x8f,0x00,0x00,0x00,0x15,0x00,0x00,0x00,0x41,0x03,0x00,0x00, +0x3d,0x00,0x04,0x00,0x50,0x00,0x00,0x00,0x43,0x03,0x00,0x00, +0x42,0x03,0x00,0x00,0x3d,0x00,0x04,0x00,0x71,0x00,0x00,0x00, +0x47,0x03,0x00,0x00,0xdc,0x00,0x00,0x00,0x6f,0x00,0x04,0x00, +0x50,0x00,0x00,0x00,0x48,0x03,0x00,0x00,0x47,0x03,0x00,0x00, +0x85,0x00,0x05,0x00,0x50,0x00,0x00,0x00,0x49,0x03,0x00,0x00, +0x43,0x03,0x00,0x00,0x48,0x03,0x00,0x00,0x85,0x00,0x05,0x00, +0x50,0x00,0x00,0x00,0x4a,0x03,0x00,0x00,0x49,0x03,0x00,0x00, +0x80,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x4e,0x03,0x00,0x00,0x3e,0x00,0x00,0x00,0xc2,0x03,0x00,0x00, +0x41,0x00,0x08,0x00,0xb1,0x00,0x00,0x00,0x4f,0x03,0x00,0x00, +0x78,0x00,0x00,0x00,0x15,0x00,0x00,0x00,0x7b,0x00,0x00,0x00, +0x15,0x00,0x00,0x00,0x4e,0x03,0x00,0x00,0x3d,0x00,0x04,0x00, +0x6e,0x00,0x00,0x00,0x50,0x03,0x00,0x00,0x4f,0x03,0x00,0x00, +0x71,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x51,0x03,0x00,0x00, +0x50,0x03,0x00,0x00,0x7c,0x00,0x04,0x00,0x14,0x00,0x00,0x00, +0x52,0x03,0x00,0x00,0x51,0x03,0x00,0x00,0xc7,0x00,0x05,0x00, +0x14,0x00,0x00,0x00,0x53,0x03,0x00,0x00,0x52,0x03,0x00,0x00, +0xb6,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x6e,0x00,0x00,0x00, +0x58,0x03,0x00,0x00,0x2f,0x03,0x00,0x00,0xc2,0x00,0x05,0x00, +0x6e,0x00,0x00,0x00,0x59,0x03,0x00,0x00,0x58,0x03,0x00,0x00, +0x9f,0x00,0x00,0x00,0x71,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x5a,0x03,0x00,0x00,0x59,0x03,0x00,0x00,0x7c,0x00,0x04,0x00, +0x14,0x00,0x00,0x00,0x5b,0x03,0x00,0x00,0x5a,0x03,0x00,0x00, +0xc7,0x00,0x05,0x00,0x14,0x00,0x00,0x00,0x5c,0x03,0x00,0x00, +0x5b,0x03,0x00,0x00,0x7c,0x00,0x00,0x00,0xc4,0x00,0x05,0x00, +0x14,0x00,0x00,0x00,0x5d,0x03,0x00,0x00,0x5c,0x03,0x00,0x00, +0x8a,0x00,0x00,0x00,0xc5,0x00,0x05,0x00,0x14,0x00,0x00,0x00, +0x5e,0x03,0x00,0x00,0x53,0x03,0x00,0x00,0x5d,0x03,0x00,0x00, +0x72,0x00,0x04,0x00,0x71,0x00,0x00,0x00,0x5f,0x03,0x00,0x00, +0x5e,0x03,0x00,0x00,0x72,0x00,0x04,0x00,0x14,0x00,0x00,0x00, +0x60,0x03,0x00,0x00,0x5f,0x03,0x00,0x00,0x82,0x00,0x05,0x00, +0x14,0x00,0x00,0x00,0x61,0x03,0x00,0x00,0x60,0x03,0x00,0x00, +0xc9,0x00,0x00,0x00,0x6f,0x00,0x04,0x00,0x50,0x00,0x00,0x00, +0x62,0x03,0x00,0x00,0x61,0x03,0x00,0x00,0x85,0x00,0x05,0x00, +0x50,0x00,0x00,0x00,0x63,0x03,0x00,0x00,0x4a,0x03,0x00,0x00, +0x62,0x03,0x00,0x00,0x0c,0x00,0x08,0x00,0x50,0x00,0x00,0x00, +0x64,0x03,0x00,0x00,0x01,0x00,0x00,0x00,0x32,0x00,0x00,0x00, +0x22,0x03,0x00,0x00,0x3a,0x03,0x00,0x00,0x63,0x03,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x6a,0x03,0x00,0x00, +0x94,0x00,0x00,0x00,0xc3,0x03,0x00,0x00,0x41,0x00,0x06,0x00, +0x99,0x00,0x00,0x00,0x6b,0x03,0x00,0x00,0x8f,0x00,0x00,0x00, +0x15,0x00,0x00,0x00,0x6a,0x03,0x00,0x00,0x3d,0x00,0x04,0x00, +0x50,0x00,0x00,0x00,0x6c,0x03,0x00,0x00,0x6b,0x03,0x00,0x00, +0x3d,0x00,0x04,0x00,0x71,0x00,0x00,0x00,0x70,0x03,0x00,0x00, +0x13,0x01,0x00,0x00,0x6f,0x00,0x04,0x00,0x50,0x00,0x00,0x00, +0x71,0x03,0x00,0x00,0x70,0x03,0x00,0x00,0x85,0x00,0x05,0x00, +0x50,0x00,0x00,0x00,0x72,0x03,0x00,0x00,0x6c,0x03,0x00,0x00, +0x71,0x03,0x00,0x00,0x85,0x00,0x05,0x00,0x50,0x00,0x00,0x00, +0x73,0x03,0x00,0x00,0x72,0x03,0x00,0x00,0x80,0x00,0x00,0x00, +0x3d,0x00,0x04,0x00,0x6e,0x00,0x00,0x00,0x79,0x03,0x00,0x00, +0x27,0x03,0x00,0x00,0xc2,0x00,0x05,0x00,0x6e,0x00,0x00,0x00, +0x7a,0x03,0x00,0x00,0x79,0x03,0x00,0x00,0x8a,0x00,0x00,0x00, +0x71,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x7b,0x03,0x00,0x00, +0x7a,0x03,0x00,0x00,0x7c,0x00,0x04,0x00,0x14,0x00,0x00,0x00, +0x7c,0x03,0x00,0x00,0x7b,0x03,0x00,0x00,0x3d,0x00,0x04,0x00, +0x6e,0x00,0x00,0x00,0x81,0x03,0x00,0x00,0x2f,0x03,0x00,0x00, +0xc2,0x00,0x05,0x00,0x6e,0x00,0x00,0x00,0x82,0x03,0x00,0x00, +0x81,0x03,0x00,0x00,0x8a,0x00,0x00,0x00,0x71,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x83,0x03,0x00,0x00,0x82,0x03,0x00,0x00, +0x7c,0x00,0x04,0x00,0x14,0x00,0x00,0x00,0x84,0x03,0x00,0x00, +0x83,0x03,0x00,0x00,0xc7,0x00,0x05,0x00,0x14,0x00,0x00,0x00, +0x85,0x03,0x00,0x00,0x84,0x03,0x00,0x00,0x7c,0x00,0x00,0x00, +0xc4,0x00,0x05,0x00,0x14,0x00,0x00,0x00,0x86,0x03,0x00,0x00, +0x85,0x03,0x00,0x00,0x8a,0x00,0x00,0x00,0xc5,0x00,0x05,0x00, +0x14,0x00,0x00,0x00,0x87,0x03,0x00,0x00,0x7c,0x03,0x00,0x00, +0x86,0x03,0x00,0x00,0x72,0x00,0x04,0x00,0x71,0x00,0x00,0x00, +0x88,0x03,0x00,0x00,0x87,0x03,0x00,0x00,0x72,0x00,0x04,0x00, +0x14,0x00,0x00,0x00,0x89,0x03,0x00,0x00,0x88,0x03,0x00,0x00, +0x82,0x00,0x05,0x00,0x14,0x00,0x00,0x00,0x8a,0x03,0x00,0x00, +0x89,0x03,0x00,0x00,0xc9,0x00,0x00,0x00,0x6f,0x00,0x04,0x00, +0x50,0x00,0x00,0x00,0x8b,0x03,0x00,0x00,0x8a,0x03,0x00,0x00, +0x0c,0x00,0x08,0x00,0x50,0x00,0x00,0x00,0x8d,0x03,0x00,0x00, +0x01,0x00,0x00,0x00,0x32,0x00,0x00,0x00,0x73,0x03,0x00,0x00, +0x8b,0x03,0x00,0x00,0x64,0x03,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x93,0x03,0x00,0x00,0x94,0x00,0x00,0x00, +0xc4,0x03,0x00,0x00,0x41,0x00,0x06,0x00,0x99,0x00,0x00,0x00, +0x94,0x03,0x00,0x00,0x8f,0x00,0x00,0x00,0x15,0x00,0x00,0x00, +0x93,0x03,0x00,0x00,0x3d,0x00,0x04,0x00,0x50,0x00,0x00,0x00, +0x95,0x03,0x00,0x00,0x94,0x03,0x00,0x00,0x3d,0x00,0x04,0x00, +0x71,0x00,0x00,0x00,0x99,0x03,0x00,0x00,0x4c,0x01,0x00,0x00, +0x6f,0x00,0x04,0x00,0x50,0x00,0x00,0x00,0x9a,0x03,0x00,0x00, +0x99,0x03,0x00,0x00,0x85,0x00,0x05,0x00,0x50,0x00,0x00,0x00, +0x9b,0x03,0x00,0x00,0x95,0x03,0x00,0x00,0x9a,0x03,0x00,0x00, +0x85,0x00,0x05,0x00,0x50,0x00,0x00,0x00,0x9c,0x03,0x00,0x00, +0x9b,0x03,0x00,0x00,0x80,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0x6e,0x00,0x00,0x00,0xa2,0x03,0x00,0x00,0x4f,0x03,0x00,0x00, +0xc2,0x00,0x05,0x00,0x6e,0x00,0x00,0x00,0xa3,0x03,0x00,0x00, +0xa2,0x03,0x00,0x00,0x8a,0x00,0x00,0x00,0x71,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0xa4,0x03,0x00,0x00,0xa3,0x03,0x00,0x00, +0x7c,0x00,0x04,0x00,0x14,0x00,0x00,0x00,0xa5,0x03,0x00,0x00, +0xa4,0x03,0x00,0x00,0x3d,0x00,0x04,0x00,0x6e,0x00,0x00,0x00, +0xaa,0x03,0x00,0x00,0x2f,0x03,0x00,0x00,0xc2,0x00,0x05,0x00, +0x6e,0x00,0x00,0x00,0xab,0x03,0x00,0x00,0xaa,0x03,0x00,0x00, +0x68,0x01,0x00,0x00,0x71,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0xac,0x03,0x00,0x00,0xab,0x03,0x00,0x00,0x7c,0x00,0x04,0x00, +0x14,0x00,0x00,0x00,0xad,0x03,0x00,0x00,0xac,0x03,0x00,0x00, +0xc7,0x00,0x05,0x00,0x14,0x00,0x00,0x00,0xae,0x03,0x00,0x00, +0xad,0x03,0x00,0x00,0x7c,0x00,0x00,0x00,0xc4,0x00,0x05,0x00, +0x14,0x00,0x00,0x00,0xaf,0x03,0x00,0x00,0xae,0x03,0x00,0x00, +0x8a,0x00,0x00,0x00,0xc5,0x00,0x05,0x00,0x14,0x00,0x00,0x00, +0xb0,0x03,0x00,0x00,0xa5,0x03,0x00,0x00,0xaf,0x03,0x00,0x00, +0x72,0x00,0x04,0x00,0x71,0x00,0x00,0x00,0xb1,0x03,0x00,0x00, +0xb0,0x03,0x00,0x00,0x72,0x00,0x04,0x00,0x14,0x00,0x00,0x00, +0xb2,0x03,0x00,0x00,0xb1,0x03,0x00,0x00,0x82,0x00,0x05,0x00, +0x14,0x00,0x00,0x00,0xb3,0x03,0x00,0x00,0xb2,0x03,0x00,0x00, +0xc9,0x00,0x00,0x00,0x6f,0x00,0x04,0x00,0x50,0x00,0x00,0x00, +0xb4,0x03,0x00,0x00,0xb3,0x03,0x00,0x00,0x0c,0x00,0x08,0x00, +0x50,0x00,0x00,0x00,0xb6,0x03,0x00,0x00,0x01,0x00,0x00,0x00, +0x32,0x00,0x00,0x00,0x9c,0x03,0x00,0x00,0xb4,0x03,0x00,0x00, +0x8d,0x03,0x00,0x00,0x81,0x00,0x05,0x00,0x50,0x00,0x00,0x00, +0xb7,0x03,0x00,0x00,0x0b,0x03,0x00,0x00,0xb6,0x03,0x00,0x00, +0x3d,0x00,0x04,0x00,0x50,0x00,0x00,0x00,0x7f,0x01,0x00,0x00, +0x5b,0x00,0x00,0x00,0x81,0x00,0x05,0x00,0x50,0x00,0x00,0x00, +0x80,0x01,0x00,0x00,0x7f,0x01,0x00,0x00,0xb7,0x03,0x00,0x00, +0x3e,0x00,0x03,0x00,0x5b,0x00,0x00,0x00,0x80,0x01,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x83,0x01,0x00,0x00, +0xaf,0x01,0x00,0x00,0x23,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, +0x5e,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0x60,0x00,0x00,0x00, +0xe0,0x00,0x04,0x00,0x23,0x00,0x00,0x00,0x23,0x00,0x00,0x00, +0x84,0x01,0x00,0x00,0xf9,0x00,0x02,0x00,0x86,0x01,0x00,0x00, +0xf8,0x00,0x02,0x00,0x86,0x01,0x00,0x00,0xf5,0x00,0x07,0x00, +0x06,0x00,0x00,0x00,0xb0,0x01,0x00,0x00,0x54,0x00,0x00,0x00, +0x60,0x00,0x00,0x00,0x9d,0x01,0x00,0x00,0x89,0x01,0x00,0x00, +0xac,0x00,0x05,0x00,0x65,0x00,0x00,0x00,0x8c,0x01,0x00,0x00, +0xb0,0x01,0x00,0x00,0x0c,0x00,0x00,0x00,0xf6,0x00,0x04,0x00, +0x88,0x01,0x00,0x00,0x89,0x01,0x00,0x00,0x01,0x00,0x00,0x00, +0xfa,0x00,0x04,0x00,0x8c,0x01,0x00,0x00,0x87,0x01,0x00,0x00, +0x88,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0x87,0x01,0x00,0x00, +0xb0,0x00,0x05,0x00,0x65,0x00,0x00,0x00,0x8f,0x01,0x00,0x00, +0x24,0x00,0x00,0x00,0xb0,0x01,0x00,0x00,0xf7,0x00,0x03,0x00, +0x91,0x01,0x00,0x00,0x00,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, +0x8f,0x01,0x00,0x00,0x90,0x01,0x00,0x00,0x91,0x01,0x00,0x00, +0xf8,0x00,0x02,0x00,0x90,0x01,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x95,0x01,0x00,0x00,0x24,0x00,0x00,0x00, +0xb0,0x01,0x00,0x00,0x41,0x00,0x05,0x00,0x5a,0x00,0x00,0x00, +0x96,0x01,0x00,0x00,0x53,0x00,0x00,0x00,0x95,0x01,0x00,0x00, +0x3d,0x00,0x04,0x00,0x50,0x00,0x00,0x00,0x97,0x01,0x00,0x00, +0x96,0x01,0x00,0x00,0x41,0x00,0x05,0x00,0x5a,0x00,0x00,0x00, +0x98,0x01,0x00,0x00,0x53,0x00,0x00,0x00,0x24,0x00,0x00,0x00, +0x3d,0x00,0x04,0x00,0x50,0x00,0x00,0x00,0x99,0x01,0x00,0x00, +0x98,0x01,0x00,0x00,0x81,0x00,0x05,0x00,0x50,0x00,0x00,0x00, +0x9a,0x01,0x00,0x00,0x99,0x01,0x00,0x00,0x97,0x01,0x00,0x00, +0x3e,0x00,0x03,0x00,0x98,0x01,0x00,0x00,0x9a,0x01,0x00,0x00, +0xf9,0x00,0x02,0x00,0x91,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, +0x91,0x01,0x00,0x00,0xe0,0x00,0x04,0x00,0x23,0x00,0x00,0x00, +0x23,0x00,0x00,0x00,0x84,0x01,0x00,0x00,0xf9,0x00,0x02,0x00, +0x89,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0x89,0x01,0x00,0x00, +0xc2,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x9d,0x01,0x00,0x00, +0xb0,0x01,0x00,0x00,0x90,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, +0x86,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0x88,0x01,0x00,0x00, +0xaa,0x00,0x05,0x00,0x65,0x00,0x00,0x00,0x9f,0x01,0x00,0x00, +0x24,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0xf7,0x00,0x03,0x00, +0xa1,0x01,0x00,0x00,0x00,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, +0x9f,0x01,0x00,0x00,0xa0,0x01,0x00,0x00,0xa1,0x01,0x00,0x00, +0xf8,0x00,0x02,0x00,0xa0,0x01,0x00,0x00,0x41,0x00,0x05,0x00, +0x16,0x00,0x00,0x00,0xa6,0x01,0x00,0x00,0x13,0x00,0x00,0x00, +0x9f,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0xa7,0x01,0x00,0x00,0xa6,0x01,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xa9,0x01,0x00,0x00,0xa7,0x01,0x00,0x00, +0x0f,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x5a,0x00,0x00,0x00, +0xaa,0x01,0x00,0x00,0x53,0x00,0x00,0x00,0x15,0x00,0x00,0x00, +0x3d,0x00,0x04,0x00,0x50,0x00,0x00,0x00,0xab,0x01,0x00,0x00, +0xaa,0x01,0x00,0x00,0x41,0x00,0x06,0x00,0x99,0x00,0x00,0x00, +0xac,0x01,0x00,0x00,0xa5,0x01,0x00,0x00,0x15,0x00,0x00,0x00, +0xa9,0x01,0x00,0x00,0x3e,0x00,0x03,0x00,0xac,0x01,0x00,0x00, +0xab,0x01,0x00,0x00,0xf9,0x00,0x02,0x00,0xa1,0x01,0x00,0x00, +0xf8,0x00,0x02,0x00,0xa1,0x01,0x00,0x00,0xfd,0x00,0x01,0x00, +0x38,0x00,0x01,0x00, +}; +const uint64_t mul_mat_vec_q6_K_f32_len = 10960; + +unsigned char mul_mat_vec_q8_0_f32_data[] = { +0x03,0x02,0x23,0x07,0x00,0x05,0x01,0x00,0x0b,0x00,0x0d,0x00, +0xc4,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x11,0x00,0x02,0x00, +0x01,0x00,0x00,0x00,0x11,0x00,0x02,0x00,0x51,0x11,0x00,0x00, +0x11,0x00,0x02,0x00,0x60,0x11,0x00,0x00,0x0b,0x00,0x06,0x00, +0x01,0x00,0x00,0x00,0x47,0x4c,0x53,0x4c,0x2e,0x73,0x74,0x64, +0x2e,0x34,0x35,0x30,0x00,0x00,0x00,0x00,0x0e,0x00,0x03,0x00, +0x00,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x0f,0x00,0x0c,0x00, +0x05,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x6d,0x61,0x69,0x6e, +0x00,0x00,0x00,0x00,0x0b,0x00,0x00,0x00,0x11,0x00,0x00,0x00, +0x18,0x00,0x00,0x00,0x26,0x00,0x00,0x00,0x52,0x00,0x00,0x00, +0x74,0x00,0x00,0x00,0xb5,0x00,0x00,0x00,0x10,0x00,0x06,0x00, +0x04,0x00,0x00,0x00,0x11,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x47,0x00,0x04,0x00, +0x0b,0x00,0x00,0x00,0x0b,0x00,0x00,0x00,0x1a,0x00,0x00,0x00, +0x47,0x00,0x04,0x00,0x11,0x00,0x00,0x00,0x0b,0x00,0x00,0x00, +0x1b,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x15,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x48,0x00,0x05,0x00, +0x24,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00, +0x00,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x24,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x04,0x00,0x00,0x00, +0x48,0x00,0x05,0x00,0x24,0x00,0x00,0x00,0x02,0x00,0x00,0x00, +0x23,0x00,0x00,0x00,0x08,0x00,0x00,0x00,0x47,0x00,0x03,0x00, +0x24,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00, +0x4d,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0x48,0x00,0x05,0x00,0x4e,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x48,0x00,0x05,0x00, +0x4e,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x23,0x00,0x00,0x00, +0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x4f,0x00,0x00,0x00, +0x06,0x00,0x00,0x00,0x22,0x00,0x00,0x00,0x48,0x00,0x04,0x00, +0x50,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x18,0x00,0x00,0x00, +0x48,0x00,0x05,0x00,0x50,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x03,0x00, +0x50,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00, +0x52,0x00,0x00,0x00,0x22,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0x47,0x00,0x04,0x00,0x52,0x00,0x00,0x00,0x21,0x00,0x00,0x00, +0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x71,0x00,0x00,0x00, +0x06,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x48,0x00,0x04,0x00, +0x72,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x18,0x00,0x00,0x00, +0x48,0x00,0x05,0x00,0x72,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x03,0x00, +0x72,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00, +0x74,0x00,0x00,0x00,0x22,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0x47,0x00,0x04,0x00,0x74,0x00,0x00,0x00,0x21,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0xb2,0x00,0x00,0x00, +0x06,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x48,0x00,0x04,0x00, +0xb3,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x19,0x00,0x00,0x00, +0x48,0x00,0x05,0x00,0xb3,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x03,0x00, +0xb3,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00, +0xb5,0x00,0x00,0x00,0x22,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0x47,0x00,0x04,0x00,0xb5,0x00,0x00,0x00,0x21,0x00,0x00,0x00, +0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0xbe,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00, +0xbf,0x00,0x00,0x00,0x0b,0x00,0x00,0x00,0x19,0x00,0x00,0x00, +0x13,0x00,0x02,0x00,0x02,0x00,0x00,0x00,0x21,0x00,0x03,0x00, +0x03,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x15,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x20,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0x17,0x00,0x04,0x00,0x09,0x00,0x00,0x00,0x06,0x00,0x00,0x00, +0x03,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x0a,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0x09,0x00,0x00,0x00,0x3b,0x00,0x04,0x00, +0x0a,0x00,0x00,0x00,0x0b,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, +0x00,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x0d,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x3b,0x00,0x04,0x00, +0x0a,0x00,0x00,0x00,0x11,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0x16,0x00,0x03,0x00,0x14,0x00,0x00,0x00,0x20,0x00,0x00,0x00, +0x32,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x15,0x00,0x00,0x00, +0x20,0x00,0x00,0x00,0x1c,0x00,0x04,0x00,0x16,0x00,0x00,0x00, +0x14,0x00,0x00,0x00,0x15,0x00,0x00,0x00,0x20,0x00,0x04,0x00, +0x17,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x16,0x00,0x00,0x00, +0x3b,0x00,0x04,0x00,0x17,0x00,0x00,0x00,0x18,0x00,0x00,0x00, +0x04,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x14,0x00,0x00,0x00, +0x1a,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x20,0x00,0x04,0x00, +0x1b,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x14,0x00,0x00,0x00, +0x1e,0x00,0x05,0x00,0x24,0x00,0x00,0x00,0x06,0x00,0x00,0x00, +0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x20,0x00,0x04,0x00, +0x25,0x00,0x00,0x00,0x09,0x00,0x00,0x00,0x24,0x00,0x00,0x00, +0x3b,0x00,0x04,0x00,0x25,0x00,0x00,0x00,0x26,0x00,0x00,0x00, +0x09,0x00,0x00,0x00,0x15,0x00,0x04,0x00,0x27,0x00,0x00,0x00, +0x20,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x27,0x00,0x00,0x00,0x28,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0x20,0x00,0x04,0x00,0x29,0x00,0x00,0x00,0x09,0x00,0x00,0x00, +0x06,0x00,0x00,0x00,0x14,0x00,0x02,0x00,0x2d,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x32,0x00,0x00,0x00, +0x02,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x3d,0x00,0x00,0x00,0x20,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x42,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0x16,0x00,0x03,0x00,0x4b,0x00,0x00,0x00,0x10,0x00,0x00,0x00, +0x15,0x00,0x04,0x00,0x4c,0x00,0x00,0x00,0x08,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0x1c,0x00,0x04,0x00,0x4d,0x00,0x00,0x00, +0x4c,0x00,0x00,0x00,0x3d,0x00,0x00,0x00,0x1e,0x00,0x04,0x00, +0x4e,0x00,0x00,0x00,0x4b,0x00,0x00,0x00,0x4d,0x00,0x00,0x00, +0x1d,0x00,0x03,0x00,0x4f,0x00,0x00,0x00,0x4e,0x00,0x00,0x00, +0x1e,0x00,0x03,0x00,0x50,0x00,0x00,0x00,0x4f,0x00,0x00,0x00, +0x20,0x00,0x04,0x00,0x51,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, +0x50,0x00,0x00,0x00,0x3b,0x00,0x04,0x00,0x51,0x00,0x00,0x00, +0x52,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x20,0x00,0x04,0x00, +0x54,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x4b,0x00,0x00,0x00, +0x17,0x00,0x04,0x00,0x58,0x00,0x00,0x00,0x14,0x00,0x00,0x00, +0x02,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x27,0x00,0x00,0x00, +0x5c,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x20,0x00,0x04,0x00, +0x5e,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x4c,0x00,0x00,0x00, +0x1d,0x00,0x03,0x00,0x71,0x00,0x00,0x00,0x14,0x00,0x00,0x00, +0x1e,0x00,0x03,0x00,0x72,0x00,0x00,0x00,0x71,0x00,0x00,0x00, +0x20,0x00,0x04,0x00,0x73,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, +0x72,0x00,0x00,0x00,0x3b,0x00,0x04,0x00,0x73,0x00,0x00,0x00, +0x74,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x20,0x00,0x04,0x00, +0x7c,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x14,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x93,0x00,0x00,0x00, +0x08,0x01,0x00,0x00,0x34,0x00,0x06,0x00,0x06,0x00,0x00,0x00, +0x95,0x00,0x00,0x00,0x86,0x00,0x00,0x00,0x15,0x00,0x00,0x00, +0x32,0x00,0x00,0x00,0x1d,0x00,0x03,0x00,0xb2,0x00,0x00,0x00, +0x14,0x00,0x00,0x00,0x1e,0x00,0x03,0x00,0xb3,0x00,0x00,0x00, +0xb2,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0xb4,0x00,0x00,0x00, +0x0c,0x00,0x00,0x00,0xb3,0x00,0x00,0x00,0x3b,0x00,0x04,0x00, +0xb4,0x00,0x00,0x00,0xb5,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x27,0x00,0x00,0x00,0xb6,0x00,0x00,0x00, +0x02,0x00,0x00,0x00,0x32,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0xbe,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x33,0x00,0x06,0x00, +0x09,0x00,0x00,0x00,0xbf,0x00,0x00,0x00,0xbe,0x00,0x00,0x00, +0x42,0x00,0x00,0x00,0x42,0x00,0x00,0x00,0x36,0x00,0x05,0x00, +0x02,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0x03,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0x05,0x00,0x00,0x00, +0x41,0x00,0x05,0x00,0x0d,0x00,0x00,0x00,0x0e,0x00,0x00,0x00, +0x0b,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x0f,0x00,0x00,0x00,0x0e,0x00,0x00,0x00, +0x41,0x00,0x05,0x00,0x0d,0x00,0x00,0x00,0x12,0x00,0x00,0x00, +0x11,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x13,0x00,0x00,0x00,0x12,0x00,0x00,0x00, +0x41,0x00,0x05,0x00,0x1b,0x00,0x00,0x00,0x1c,0x00,0x00,0x00, +0x18,0x00,0x00,0x00,0x13,0x00,0x00,0x00,0x3e,0x00,0x03,0x00, +0x1c,0x00,0x00,0x00,0x1a,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, +0x1e,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0x1e,0x00,0x00,0x00, +0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0xc2,0x00,0x00,0x00, +0x0c,0x00,0x00,0x00,0x05,0x00,0x00,0x00,0x92,0x00,0x00,0x00, +0x1f,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x29,0x00,0x00,0x00, +0x2a,0x00,0x00,0x00,0x26,0x00,0x00,0x00,0x28,0x00,0x00,0x00, +0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x2b,0x00,0x00,0x00, +0x2a,0x00,0x00,0x00,0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x2c,0x00,0x00,0x00,0x2b,0x00,0x00,0x00,0x15,0x00,0x00,0x00, +0xb0,0x00,0x05,0x00,0x2d,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, +0xc2,0x00,0x00,0x00,0x2c,0x00,0x00,0x00,0xf6,0x00,0x04,0x00, +0x20,0x00,0x00,0x00,0x1f,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0xfa,0x00,0x04,0x00,0x2e,0x00,0x00,0x00,0x1f,0x00,0x00,0x00, +0x20,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0x1f,0x00,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x31,0x00,0x00,0x00, +0xc2,0x00,0x00,0x00,0x15,0x00,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x34,0x00,0x00,0x00,0x32,0x00,0x00,0x00, +0x13,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x35,0x00,0x00,0x00,0x31,0x00,0x00,0x00,0x34,0x00,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x3a,0x00,0x00,0x00, +0x0f,0x00,0x00,0x00,0x2b,0x00,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x3c,0x00,0x00,0x00,0x3a,0x00,0x00,0x00, +0x35,0x00,0x00,0x00,0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x3e,0x00,0x00,0x00,0x3c,0x00,0x00,0x00,0x3d,0x00,0x00,0x00, +0x89,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x41,0x00,0x00,0x00, +0x35,0x00,0x00,0x00,0x3d,0x00,0x00,0x00,0x86,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x43,0x00,0x00,0x00,0x41,0x00,0x00,0x00, +0x42,0x00,0x00,0x00,0x82,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x48,0x00,0x00,0x00,0x35,0x00,0x00,0x00,0x41,0x00,0x00,0x00, +0x41,0x00,0x07,0x00,0x54,0x00,0x00,0x00,0x55,0x00,0x00,0x00, +0x52,0x00,0x00,0x00,0x28,0x00,0x00,0x00,0x3e,0x00,0x00,0x00, +0x28,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x4b,0x00,0x00,0x00, +0x56,0x00,0x00,0x00,0x55,0x00,0x00,0x00,0x73,0x00,0x04,0x00, +0x14,0x00,0x00,0x00,0x57,0x00,0x00,0x00,0x56,0x00,0x00,0x00, +0x41,0x00,0x08,0x00,0x5e,0x00,0x00,0x00,0x5f,0x00,0x00,0x00, +0x52,0x00,0x00,0x00,0x28,0x00,0x00,0x00,0x3e,0x00,0x00,0x00, +0x5c,0x00,0x00,0x00,0x43,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0x4c,0x00,0x00,0x00,0x60,0x00,0x00,0x00,0x5f,0x00,0x00,0x00, +0x72,0x00,0x04,0x00,0x27,0x00,0x00,0x00,0x61,0x00,0x00,0x00, +0x60,0x00,0x00,0x00,0x6f,0x00,0x04,0x00,0x14,0x00,0x00,0x00, +0x62,0x00,0x00,0x00,0x61,0x00,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x65,0x00,0x00,0x00,0x43,0x00,0x00,0x00, +0x42,0x00,0x00,0x00,0x41,0x00,0x08,0x00,0x5e,0x00,0x00,0x00, +0x66,0x00,0x00,0x00,0x52,0x00,0x00,0x00,0x28,0x00,0x00,0x00, +0x3e,0x00,0x00,0x00,0x5c,0x00,0x00,0x00,0x65,0x00,0x00,0x00, +0x3d,0x00,0x04,0x00,0x4c,0x00,0x00,0x00,0x67,0x00,0x00,0x00, +0x66,0x00,0x00,0x00,0x72,0x00,0x04,0x00,0x27,0x00,0x00,0x00, +0x68,0x00,0x00,0x00,0x67,0x00,0x00,0x00,0x6f,0x00,0x04,0x00, +0x14,0x00,0x00,0x00,0x69,0x00,0x00,0x00,0x68,0x00,0x00,0x00, +0x50,0x00,0x05,0x00,0x58,0x00,0x00,0x00,0x6a,0x00,0x00,0x00, +0x62,0x00,0x00,0x00,0x69,0x00,0x00,0x00,0x8e,0x00,0x05,0x00, +0x58,0x00,0x00,0x00,0x6d,0x00,0x00,0x00,0x6a,0x00,0x00,0x00, +0x57,0x00,0x00,0x00,0x51,0x00,0x05,0x00,0x14,0x00,0x00,0x00, +0x70,0x00,0x00,0x00,0x6d,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0x41,0x00,0x05,0x00,0x29,0x00,0x00,0x00,0x75,0x00,0x00,0x00, +0x26,0x00,0x00,0x00,0x5c,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x76,0x00,0x00,0x00,0x75,0x00,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x78,0x00,0x00,0x00, +0x76,0x00,0x00,0x00,0x48,0x00,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x7a,0x00,0x00,0x00,0x78,0x00,0x00,0x00, +0x43,0x00,0x00,0x00,0x41,0x00,0x06,0x00,0x7c,0x00,0x00,0x00, +0x7d,0x00,0x00,0x00,0x74,0x00,0x00,0x00,0x28,0x00,0x00,0x00, +0x7a,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x14,0x00,0x00,0x00, +0x7e,0x00,0x00,0x00,0x7d,0x00,0x00,0x00,0x51,0x00,0x05,0x00, +0x14,0x00,0x00,0x00,0x81,0x00,0x00,0x00,0x6d,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x88,0x00,0x00,0x00,0x7a,0x00,0x00,0x00,0x42,0x00,0x00,0x00, +0x41,0x00,0x06,0x00,0x7c,0x00,0x00,0x00,0x89,0x00,0x00,0x00, +0x74,0x00,0x00,0x00,0x28,0x00,0x00,0x00,0x88,0x00,0x00,0x00, +0x3d,0x00,0x04,0x00,0x14,0x00,0x00,0x00,0x8a,0x00,0x00,0x00, +0x89,0x00,0x00,0x00,0x85,0x00,0x05,0x00,0x14,0x00,0x00,0x00, +0x8b,0x00,0x00,0x00,0x81,0x00,0x00,0x00,0x8a,0x00,0x00,0x00, +0x0c,0x00,0x08,0x00,0x14,0x00,0x00,0x00,0x8c,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0x32,0x00,0x00,0x00,0x70,0x00,0x00,0x00, +0x7e,0x00,0x00,0x00,0x8b,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0x14,0x00,0x00,0x00,0x8e,0x00,0x00,0x00,0x1c,0x00,0x00,0x00, +0x81,0x00,0x05,0x00,0x14,0x00,0x00,0x00,0x8f,0x00,0x00,0x00, +0x8e,0x00,0x00,0x00,0x8c,0x00,0x00,0x00,0x3e,0x00,0x03,0x00, +0x1c,0x00,0x00,0x00,0x8f,0x00,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x92,0x00,0x00,0x00,0xc2,0x00,0x00,0x00, +0x32,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0x1e,0x00,0x00,0x00, +0xf8,0x00,0x02,0x00,0x20,0x00,0x00,0x00,0xe0,0x00,0x04,0x00, +0x32,0x00,0x00,0x00,0x32,0x00,0x00,0x00,0x93,0x00,0x00,0x00, +0xf9,0x00,0x02,0x00,0x96,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, +0x96,0x00,0x00,0x00,0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00, +0xc3,0x00,0x00,0x00,0x95,0x00,0x00,0x00,0x20,0x00,0x00,0x00, +0xad,0x00,0x00,0x00,0x99,0x00,0x00,0x00,0xac,0x00,0x05,0x00, +0x2d,0x00,0x00,0x00,0x9c,0x00,0x00,0x00,0xc3,0x00,0x00,0x00, +0x0c,0x00,0x00,0x00,0xf6,0x00,0x04,0x00,0x98,0x00,0x00,0x00, +0x99,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, +0x9c,0x00,0x00,0x00,0x97,0x00,0x00,0x00,0x98,0x00,0x00,0x00, +0xf8,0x00,0x02,0x00,0x97,0x00,0x00,0x00,0xb0,0x00,0x05,0x00, +0x2d,0x00,0x00,0x00,0x9f,0x00,0x00,0x00,0x13,0x00,0x00,0x00, +0xc3,0x00,0x00,0x00,0xf7,0x00,0x03,0x00,0xa1,0x00,0x00,0x00, +0x00,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0x9f,0x00,0x00,0x00, +0xa0,0x00,0x00,0x00,0xa1,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, +0xa0,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xa5,0x00,0x00,0x00,0x13,0x00,0x00,0x00,0xc3,0x00,0x00,0x00, +0x41,0x00,0x05,0x00,0x1b,0x00,0x00,0x00,0xa6,0x00,0x00,0x00, +0x18,0x00,0x00,0x00,0xa5,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0x14,0x00,0x00,0x00,0xa7,0x00,0x00,0x00,0xa6,0x00,0x00,0x00, +0x3d,0x00,0x04,0x00,0x14,0x00,0x00,0x00,0xa9,0x00,0x00,0x00, +0x1c,0x00,0x00,0x00,0x81,0x00,0x05,0x00,0x14,0x00,0x00,0x00, +0xaa,0x00,0x00,0x00,0xa9,0x00,0x00,0x00,0xa7,0x00,0x00,0x00, +0x3e,0x00,0x03,0x00,0x1c,0x00,0x00,0x00,0xaa,0x00,0x00,0x00, +0xf9,0x00,0x02,0x00,0xa1,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, +0xa1,0x00,0x00,0x00,0xe0,0x00,0x04,0x00,0x32,0x00,0x00,0x00, +0x32,0x00,0x00,0x00,0x93,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, +0x99,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0x99,0x00,0x00,0x00, +0xc2,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xad,0x00,0x00,0x00, +0xc3,0x00,0x00,0x00,0x5c,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, +0x96,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0x98,0x00,0x00,0x00, +0xaa,0x00,0x05,0x00,0x2d,0x00,0x00,0x00,0xaf,0x00,0x00,0x00, +0x13,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0xf7,0x00,0x03,0x00, +0xb1,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, +0xaf,0x00,0x00,0x00,0xb0,0x00,0x00,0x00,0xb1,0x00,0x00,0x00, +0xf8,0x00,0x02,0x00,0xb0,0x00,0x00,0x00,0x41,0x00,0x05,0x00, +0x29,0x00,0x00,0x00,0xb7,0x00,0x00,0x00,0x26,0x00,0x00,0x00, +0xb6,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0xb8,0x00,0x00,0x00,0xb7,0x00,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xba,0x00,0x00,0x00,0xb8,0x00,0x00,0x00, +0x0f,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x1b,0x00,0x00,0x00, +0xbb,0x00,0x00,0x00,0x18,0x00,0x00,0x00,0x28,0x00,0x00,0x00, +0x3d,0x00,0x04,0x00,0x14,0x00,0x00,0x00,0xbc,0x00,0x00,0x00, +0xbb,0x00,0x00,0x00,0x41,0x00,0x06,0x00,0x7c,0x00,0x00,0x00, +0xbd,0x00,0x00,0x00,0xb5,0x00,0x00,0x00,0x28,0x00,0x00,0x00, +0xba,0x00,0x00,0x00,0x3e,0x00,0x03,0x00,0xbd,0x00,0x00,0x00, +0xbc,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0xb1,0x00,0x00,0x00, +0xf8,0x00,0x02,0x00,0xb1,0x00,0x00,0x00,0xfd,0x00,0x01,0x00, +0x38,0x00,0x01,0x00, +}; +const uint64_t mul_mat_vec_q8_0_f32_len = 3124; unsigned char norm_f32_data[] = { 0x03,0x02,0x23,0x07,0x00,0x05,0x01,0x00,0x0b,0x00,0x0d,0x00, @@ -50951,112 +46385,284 @@ const uint64_t rope_neox_f32_len = 3792; unsigned char scale_f32_data[] = { 0x03,0x02,0x23,0x07,0x00,0x05,0x01,0x00,0x0b,0x00,0x0d,0x00, -0x37,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x11,0x00,0x02,0x00, +0x9c,0x01,0x00,0x00,0x00,0x00,0x00,0x00,0x11,0x00,0x02,0x00, 0x01,0x00,0x00,0x00,0x0b,0x00,0x06,0x00,0x01,0x00,0x00,0x00, 0x47,0x4c,0x53,0x4c,0x2e,0x73,0x74,0x64,0x2e,0x34,0x35,0x30, 0x00,0x00,0x00,0x00,0x0e,0x00,0x03,0x00,0x00,0x00,0x00,0x00, 0x01,0x00,0x00,0x00,0x0f,0x00,0x09,0x00,0x05,0x00,0x00,0x00, 0x04,0x00,0x00,0x00,0x6d,0x61,0x69,0x6e,0x00,0x00,0x00,0x00, -0x0b,0x00,0x00,0x00,0x14,0x00,0x00,0x00,0x22,0x00,0x00,0x00, -0x27,0x00,0x00,0x00,0x10,0x00,0x06,0x00,0x04,0x00,0x00,0x00, +0x14,0x00,0x00,0x00,0xc7,0x00,0x00,0x00,0xd7,0x00,0x00,0x00, +0xe3,0x00,0x00,0x00,0x10,0x00,0x06,0x00,0x04,0x00,0x00,0x00, 0x11,0x00,0x00,0x00,0x00,0x02,0x00,0x00,0x01,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x0b,0x00,0x00,0x00, -0x0b,0x00,0x00,0x00,0x1c,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0x12,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x12,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x04,0x00,0x00,0x00, -0x48,0x00,0x05,0x00,0x12,0x00,0x00,0x00,0x02,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x08,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0x12,0x00,0x00,0x00,0x03,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x47,0x00,0x03,0x00,0x12,0x00,0x00,0x00, -0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x1f,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x48,0x00,0x04,0x00, -0x20,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x19,0x00,0x00,0x00, -0x48,0x00,0x05,0x00,0x20,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x03,0x00, -0x20,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x22,0x00,0x00,0x00,0x22,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x22,0x00,0x00,0x00,0x21,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x24,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x48,0x00,0x04,0x00, -0x25,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x18,0x00,0x00,0x00, -0x48,0x00,0x05,0x00,0x25,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x03,0x00, -0x25,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x27,0x00,0x00,0x00,0x22,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x27,0x00,0x00,0x00,0x21,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x34,0x00,0x00,0x00, -0x0b,0x00,0x00,0x00,0x19,0x00,0x00,0x00,0x13,0x00,0x02,0x00, -0x02,0x00,0x00,0x00,0x21,0x00,0x03,0x00,0x03,0x00,0x00,0x00, -0x02,0x00,0x00,0x00,0x15,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x20,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x17,0x00,0x04,0x00, -0x09,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x03,0x00,0x00,0x00, -0x20,0x00,0x04,0x00,0x0a,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x09,0x00,0x00,0x00,0x3b,0x00,0x04,0x00,0x0a,0x00,0x00,0x00, -0x0b,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x20,0x00,0x04,0x00,0x0d,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x16,0x00,0x03,0x00,0x11,0x00,0x00,0x00, -0x20,0x00,0x00,0x00,0x1e,0x00,0x06,0x00,0x12,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x11,0x00,0x00,0x00, -0x11,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x13,0x00,0x00,0x00, -0x09,0x00,0x00,0x00,0x12,0x00,0x00,0x00,0x3b,0x00,0x04,0x00, -0x13,0x00,0x00,0x00,0x14,0x00,0x00,0x00,0x09,0x00,0x00,0x00, -0x15,0x00,0x04,0x00,0x15,0x00,0x00,0x00,0x20,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x15,0x00,0x00,0x00, -0x16,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0x17,0x00,0x00,0x00,0x09,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x14,0x00,0x02,0x00,0x1a,0x00,0x00,0x00,0x1d,0x00,0x03,0x00, -0x1f,0x00,0x00,0x00,0x11,0x00,0x00,0x00,0x1e,0x00,0x03,0x00, -0x20,0x00,0x00,0x00,0x1f,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0x21,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x20,0x00,0x00,0x00, -0x3b,0x00,0x04,0x00,0x21,0x00,0x00,0x00,0x22,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x1d,0x00,0x03,0x00,0x24,0x00,0x00,0x00, -0x11,0x00,0x00,0x00,0x1e,0x00,0x03,0x00,0x25,0x00,0x00,0x00, -0x24,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x26,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x25,0x00,0x00,0x00,0x3b,0x00,0x04,0x00, -0x26,0x00,0x00,0x00,0x27,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0x20,0x00,0x04,0x00,0x29,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x12,0x00,0x00,0x00, +0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0x48,0x00,0x05,0x00,0x12,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0x23,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x48,0x00,0x05,0x00, +0x12,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x23,0x00,0x00,0x00, +0x08,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x12,0x00,0x00,0x00, +0x03,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, +0x48,0x00,0x05,0x00,0x12,0x00,0x00,0x00,0x04,0x00,0x00,0x00, +0x23,0x00,0x00,0x00,0x10,0x00,0x00,0x00,0x48,0x00,0x05,0x00, +0x12,0x00,0x00,0x00,0x05,0x00,0x00,0x00,0x23,0x00,0x00,0x00, +0x14,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x12,0x00,0x00,0x00, +0x06,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x18,0x00,0x00,0x00, +0x48,0x00,0x05,0x00,0x12,0x00,0x00,0x00,0x07,0x00,0x00,0x00, +0x23,0x00,0x00,0x00,0x1c,0x00,0x00,0x00,0x48,0x00,0x05,0x00, +0x12,0x00,0x00,0x00,0x08,0x00,0x00,0x00,0x23,0x00,0x00,0x00, +0x20,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x12,0x00,0x00,0x00, +0x09,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x24,0x00,0x00,0x00, +0x48,0x00,0x05,0x00,0x12,0x00,0x00,0x00,0x0a,0x00,0x00,0x00, +0x23,0x00,0x00,0x00,0x28,0x00,0x00,0x00,0x48,0x00,0x05,0x00, +0x12,0x00,0x00,0x00,0x0b,0x00,0x00,0x00,0x23,0x00,0x00,0x00, +0x2c,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x12,0x00,0x00,0x00, +0x0c,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x30,0x00,0x00,0x00, +0x48,0x00,0x05,0x00,0x12,0x00,0x00,0x00,0x0d,0x00,0x00,0x00, +0x23,0x00,0x00,0x00,0x34,0x00,0x00,0x00,0x48,0x00,0x05,0x00, +0x12,0x00,0x00,0x00,0x0e,0x00,0x00,0x00,0x23,0x00,0x00,0x00, +0x38,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x12,0x00,0x00,0x00, +0x0f,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x3c,0x00,0x00,0x00, +0x48,0x00,0x05,0x00,0x12,0x00,0x00,0x00,0x10,0x00,0x00,0x00, +0x23,0x00,0x00,0x00,0x40,0x00,0x00,0x00,0x48,0x00,0x05,0x00, +0x12,0x00,0x00,0x00,0x11,0x00,0x00,0x00,0x23,0x00,0x00,0x00, +0x44,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x12,0x00,0x00,0x00, +0x12,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x48,0x00,0x00,0x00, +0x48,0x00,0x05,0x00,0x12,0x00,0x00,0x00,0x13,0x00,0x00,0x00, +0x23,0x00,0x00,0x00,0x4c,0x00,0x00,0x00,0x47,0x00,0x03,0x00, +0x12,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00, +0xc7,0x00,0x00,0x00,0x0b,0x00,0x00,0x00,0x1c,0x00,0x00,0x00, +0x47,0x00,0x04,0x00,0xd4,0x00,0x00,0x00,0x06,0x00,0x00,0x00, +0x04,0x00,0x00,0x00,0x48,0x00,0x04,0x00,0xd5,0x00,0x00,0x00, +0x00,0x00,0x00,0x00,0x19,0x00,0x00,0x00,0x48,0x00,0x05,0x00, +0xd5,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00, +0x00,0x00,0x00,0x00,0x47,0x00,0x03,0x00,0xd5,0x00,0x00,0x00, +0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0xd7,0x00,0x00,0x00, +0x22,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00, +0xd7,0x00,0x00,0x00,0x21,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0x47,0x00,0x04,0x00,0xe0,0x00,0x00,0x00,0x06,0x00,0x00,0x00, +0x04,0x00,0x00,0x00,0x48,0x00,0x04,0x00,0xe1,0x00,0x00,0x00, +0x00,0x00,0x00,0x00,0x18,0x00,0x00,0x00,0x48,0x00,0x05,0x00, +0xe1,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00, +0x00,0x00,0x00,0x00,0x47,0x00,0x03,0x00,0xe1,0x00,0x00,0x00, +0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0xe3,0x00,0x00,0x00, +0x22,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00, +0xe3,0x00,0x00,0x00,0x21,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0x47,0x00,0x04,0x00,0xf3,0x00,0x00,0x00,0x0b,0x00,0x00,0x00, +0x19,0x00,0x00,0x00,0x13,0x00,0x02,0x00,0x02,0x00,0x00,0x00, +0x21,0x00,0x03,0x00,0x03,0x00,0x00,0x00,0x02,0x00,0x00,0x00, +0x15,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x20,0x00,0x00,0x00, +0x00,0x00,0x00,0x00,0x16,0x00,0x03,0x00,0x11,0x00,0x00,0x00, +0x20,0x00,0x00,0x00,0x1e,0x00,0x16,0x00,0x12,0x00,0x00,0x00, +0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00, +0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00, +0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00, +0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00, +0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00, +0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00, +0x11,0x00,0x00,0x00,0x11,0x00,0x00,0x00,0x20,0x00,0x04,0x00, +0x13,0x00,0x00,0x00,0x09,0x00,0x00,0x00,0x12,0x00,0x00,0x00, +0x3b,0x00,0x04,0x00,0x13,0x00,0x00,0x00,0x14,0x00,0x00,0x00, +0x09,0x00,0x00,0x00,0x15,0x00,0x04,0x00,0x15,0x00,0x00,0x00, +0x20,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x15,0x00,0x00,0x00,0x16,0x00,0x00,0x00,0x03,0x00,0x00,0x00, +0x20,0x00,0x04,0x00,0x17,0x00,0x00,0x00,0x09,0x00,0x00,0x00, +0x06,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x15,0x00,0x00,0x00, +0x1a,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x15,0x00,0x00,0x00,0x1e,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x15,0x00,0x00,0x00,0x55,0x00,0x00,0x00, +0x08,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x15,0x00,0x00,0x00, +0x5a,0x00,0x00,0x00,0x07,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x15,0x00,0x00,0x00,0x60,0x00,0x00,0x00,0x06,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x15,0x00,0x00,0x00,0x66,0x00,0x00,0x00, +0x05,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x15,0x00,0x00,0x00, +0x6f,0x00,0x00,0x00,0x0b,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x15,0x00,0x00,0x00,0x72,0x00,0x00,0x00,0x0a,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x15,0x00,0x00,0x00,0x76,0x00,0x00,0x00, +0x09,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x15,0x00,0x00,0x00, +0xad,0x00,0x00,0x00,0x10,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x15,0x00,0x00,0x00,0xb2,0x00,0x00,0x00,0x0f,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x15,0x00,0x00,0x00,0xb8,0x00,0x00,0x00, +0x0e,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x15,0x00,0x00,0x00, +0xbe,0x00,0x00,0x00,0x0d,0x00,0x00,0x00,0x17,0x00,0x04,0x00, +0xc5,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x03,0x00,0x00,0x00, +0x20,0x00,0x04,0x00,0xc6,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0xc5,0x00,0x00,0x00,0x3b,0x00,0x04,0x00,0xc6,0x00,0x00,0x00, +0xc7,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0xc8,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0x20,0x00,0x04,0x00,0xc9,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0x06,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x15,0x00,0x00,0x00, +0xcc,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x14,0x00,0x02,0x00, +0xcf,0x00,0x00,0x00,0x1d,0x00,0x03,0x00,0xd4,0x00,0x00,0x00, +0x11,0x00,0x00,0x00,0x1e,0x00,0x03,0x00,0xd5,0x00,0x00,0x00, +0xd4,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0xd6,0x00,0x00,0x00, +0x0c,0x00,0x00,0x00,0xd5,0x00,0x00,0x00,0x3b,0x00,0x04,0x00, +0xd6,0x00,0x00,0x00,0xd7,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x15,0x00,0x00,0x00,0xd8,0x00,0x00,0x00, +0x11,0x00,0x00,0x00,0x1d,0x00,0x03,0x00,0xe0,0x00,0x00,0x00, +0x11,0x00,0x00,0x00,0x1e,0x00,0x03,0x00,0xe1,0x00,0x00,0x00, +0xe0,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0xe2,0x00,0x00,0x00, +0x0c,0x00,0x00,0x00,0xe1,0x00,0x00,0x00,0x3b,0x00,0x04,0x00, +0xe2,0x00,0x00,0x00,0xe3,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, +0x20,0x00,0x04,0x00,0xe8,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, 0x11,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x15,0x00,0x00,0x00, -0x2c,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0x2d,0x00,0x00,0x00,0x09,0x00,0x00,0x00,0x11,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x32,0x00,0x00,0x00, +0xeb,0x00,0x00,0x00,0x12,0x00,0x00,0x00,0x20,0x00,0x04,0x00, +0xec,0x00,0x00,0x00,0x09,0x00,0x00,0x00,0x11,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0xf1,0x00,0x00,0x00, 0x00,0x02,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x33,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x2c,0x00,0x06,0x00, -0x09,0x00,0x00,0x00,0x34,0x00,0x00,0x00,0x32,0x00,0x00,0x00, -0x33,0x00,0x00,0x00,0x33,0x00,0x00,0x00,0x36,0x00,0x05,0x00, +0xf2,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x2c,0x00,0x06,0x00, +0xc5,0x00,0x00,0x00,0xf3,0x00,0x00,0x00,0xf1,0x00,0x00,0x00, +0xf2,0x00,0x00,0x00,0xf2,0x00,0x00,0x00,0x36,0x00,0x05,0x00, 0x02,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x00,0x00,0x00,0x00, 0x03,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0x05,0x00,0x00,0x00, -0xf7,0x00,0x03,0x00,0x35,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0xfb,0x00,0x03,0x00,0x0c,0x00,0x00,0x00,0x36,0x00,0x00,0x00, -0xf8,0x00,0x02,0x00,0x36,0x00,0x00,0x00,0x41,0x00,0x05,0x00, -0x0d,0x00,0x00,0x00,0x0e,0x00,0x00,0x00,0x0b,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x0f,0x00,0x00,0x00,0x0e,0x00,0x00,0x00,0x41,0x00,0x05,0x00, -0x17,0x00,0x00,0x00,0x18,0x00,0x00,0x00,0x14,0x00,0x00,0x00, -0x16,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x19,0x00,0x00,0x00,0x18,0x00,0x00,0x00,0xae,0x00,0x05,0x00, -0x1a,0x00,0x00,0x00,0x1b,0x00,0x00,0x00,0x0f,0x00,0x00,0x00, -0x19,0x00,0x00,0x00,0xf7,0x00,0x03,0x00,0x1d,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0x1b,0x00,0x00,0x00, -0x1c,0x00,0x00,0x00,0x1d,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, -0x1c,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0x35,0x00,0x00,0x00, -0xf8,0x00,0x02,0x00,0x1d,0x00,0x00,0x00,0x41,0x00,0x06,0x00, -0x29,0x00,0x00,0x00,0x2a,0x00,0x00,0x00,0x27,0x00,0x00,0x00, -0x16,0x00,0x00,0x00,0x0f,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x11,0x00,0x00,0x00,0x2b,0x00,0x00,0x00,0x2a,0x00,0x00,0x00, -0x41,0x00,0x05,0x00,0x2d,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0x14,0x00,0x00,0x00,0x2c,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x11,0x00,0x00,0x00,0x2f,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0x85,0x00,0x05,0x00,0x11,0x00,0x00,0x00,0x30,0x00,0x00,0x00, -0x2b,0x00,0x00,0x00,0x2f,0x00,0x00,0x00,0x41,0x00,0x06,0x00, -0x29,0x00,0x00,0x00,0x31,0x00,0x00,0x00,0x22,0x00,0x00,0x00, -0x16,0x00,0x00,0x00,0x0f,0x00,0x00,0x00,0x3e,0x00,0x03,0x00, -0x31,0x00,0x00,0x00,0x30,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, -0x35,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0x35,0x00,0x00,0x00, +0xf7,0x00,0x03,0x00,0xf4,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0xfb,0x00,0x03,0x00,0xc8,0x00,0x00,0x00,0xf5,0x00,0x00,0x00, +0xf8,0x00,0x02,0x00,0xf5,0x00,0x00,0x00,0x41,0x00,0x05,0x00, +0xc9,0x00,0x00,0x00,0xca,0x00,0x00,0x00,0xc7,0x00,0x00,0x00, +0xc8,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0xcb,0x00,0x00,0x00,0xca,0x00,0x00,0x00,0x41,0x00,0x05,0x00, +0x17,0x00,0x00,0x00,0xcd,0x00,0x00,0x00,0x14,0x00,0x00,0x00, +0xcc,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0xce,0x00,0x00,0x00,0xcd,0x00,0x00,0x00,0xae,0x00,0x05,0x00, +0xcf,0x00,0x00,0x00,0xd0,0x00,0x00,0x00,0xcb,0x00,0x00,0x00, +0xce,0x00,0x00,0x00,0xf7,0x00,0x03,0x00,0xd2,0x00,0x00,0x00, +0x00,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0xd0,0x00,0x00,0x00, +0xd1,0x00,0x00,0x00,0xd2,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, +0xd1,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0xf4,0x00,0x00,0x00, +0xf8,0x00,0x02,0x00,0xd2,0x00,0x00,0x00,0x41,0x00,0x05,0x00, +0x17,0x00,0x00,0x00,0xd9,0x00,0x00,0x00,0x14,0x00,0x00,0x00, +0xd8,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0xda,0x00,0x00,0x00,0xd9,0x00,0x00,0x00,0x41,0x00,0x05,0x00, +0x17,0x00,0x00,0x00,0x03,0x01,0x00,0x00,0x14,0x00,0x00,0x00, +0x6f,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x04,0x01,0x00,0x00,0x03,0x01,0x00,0x00,0x41,0x00,0x05,0x00, +0x17,0x00,0x00,0x00,0x05,0x01,0x00,0x00,0x14,0x00,0x00,0x00, +0x72,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x06,0x01,0x00,0x00,0x05,0x01,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x07,0x01,0x00,0x00,0x04,0x01,0x00,0x00, +0x06,0x01,0x00,0x00,0x41,0x00,0x05,0x00,0x17,0x00,0x00,0x00, +0x08,0x01,0x00,0x00,0x14,0x00,0x00,0x00,0x76,0x00,0x00,0x00, +0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x09,0x01,0x00,0x00, +0x08,0x01,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x0a,0x01,0x00,0x00,0x07,0x01,0x00,0x00,0x09,0x01,0x00,0x00, +0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x0b,0x01,0x00,0x00, +0xcb,0x00,0x00,0x00,0x0a,0x01,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x0f,0x01,0x00,0x00,0x0b,0x01,0x00,0x00, +0x04,0x01,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x12,0x01,0x00,0x00,0x0f,0x01,0x00,0x00,0x06,0x01,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x15,0x01,0x00,0x00, +0x12,0x01,0x00,0x00,0x09,0x01,0x00,0x00,0x82,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x18,0x01,0x00,0x00,0xcb,0x00,0x00,0x00, +0x15,0x01,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x1d,0x01,0x00,0x00,0x06,0x01,0x00,0x00,0x09,0x01,0x00,0x00, +0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x1e,0x01,0x00,0x00, +0x18,0x01,0x00,0x00,0x1d,0x01,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x22,0x01,0x00,0x00,0x1e,0x01,0x00,0x00, +0x06,0x01,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x25,0x01,0x00,0x00,0x22,0x01,0x00,0x00,0x09,0x01,0x00,0x00, +0x82,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x2a,0x01,0x00,0x00, +0x18,0x01,0x00,0x00,0x25,0x01,0x00,0x00,0x86,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x2d,0x01,0x00,0x00,0x2a,0x01,0x00,0x00, +0x09,0x01,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x36,0x01,0x00,0x00,0x2d,0x01,0x00,0x00,0x09,0x01,0x00,0x00, +0x82,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x37,0x01,0x00,0x00, +0x2a,0x01,0x00,0x00,0x36,0x01,0x00,0x00,0x41,0x00,0x05,0x00, +0x17,0x00,0x00,0x00,0x39,0x01,0x00,0x00,0x14,0x00,0x00,0x00, +0xad,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x3a,0x01,0x00,0x00,0x39,0x01,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x3b,0x01,0x00,0x00,0x0b,0x01,0x00,0x00, +0x3a,0x01,0x00,0x00,0x41,0x00,0x05,0x00,0x17,0x00,0x00,0x00, +0x3d,0x01,0x00,0x00,0x14,0x00,0x00,0x00,0xb2,0x00,0x00,0x00, +0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x3e,0x01,0x00,0x00, +0x3d,0x01,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x3f,0x01,0x00,0x00,0x1e,0x01,0x00,0x00,0x3e,0x01,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x40,0x01,0x00,0x00, +0x3b,0x01,0x00,0x00,0x3f,0x01,0x00,0x00,0x41,0x00,0x05,0x00, +0x17,0x00,0x00,0x00,0x42,0x01,0x00,0x00,0x14,0x00,0x00,0x00, +0xb8,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x43,0x01,0x00,0x00,0x42,0x01,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x44,0x01,0x00,0x00,0x2d,0x01,0x00,0x00, +0x43,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x45,0x01,0x00,0x00,0x40,0x01,0x00,0x00,0x44,0x01,0x00,0x00, +0x41,0x00,0x05,0x00,0x17,0x00,0x00,0x00,0x47,0x01,0x00,0x00, +0x14,0x00,0x00,0x00,0xbe,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x48,0x01,0x00,0x00,0x47,0x01,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x49,0x01,0x00,0x00, +0x37,0x01,0x00,0x00,0x48,0x01,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x4a,0x01,0x00,0x00,0x45,0x01,0x00,0x00, +0x49,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xdf,0x00,0x00,0x00,0xda,0x00,0x00,0x00,0x4a,0x01,0x00,0x00, +0x41,0x00,0x05,0x00,0x17,0x00,0x00,0x00,0x54,0x01,0x00,0x00, +0x14,0x00,0x00,0x00,0x16,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x55,0x01,0x00,0x00,0x54,0x01,0x00,0x00, +0x41,0x00,0x05,0x00,0x17,0x00,0x00,0x00,0x56,0x01,0x00,0x00, +0x14,0x00,0x00,0x00,0x1a,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x57,0x01,0x00,0x00,0x56,0x01,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x58,0x01,0x00,0x00, +0x55,0x01,0x00,0x00,0x57,0x01,0x00,0x00,0x41,0x00,0x05,0x00, +0x17,0x00,0x00,0x00,0x59,0x01,0x00,0x00,0x14,0x00,0x00,0x00, +0x1e,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x5a,0x01,0x00,0x00,0x59,0x01,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x5b,0x01,0x00,0x00,0x58,0x01,0x00,0x00, +0x5a,0x01,0x00,0x00,0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x5c,0x01,0x00,0x00,0xcb,0x00,0x00,0x00,0x5b,0x01,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x60,0x01,0x00,0x00, +0x5c,0x01,0x00,0x00,0x55,0x01,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x63,0x01,0x00,0x00,0x60,0x01,0x00,0x00, +0x57,0x01,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x66,0x01,0x00,0x00,0x63,0x01,0x00,0x00,0x5a,0x01,0x00,0x00, +0x82,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x69,0x01,0x00,0x00, +0xcb,0x00,0x00,0x00,0x66,0x01,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x6e,0x01,0x00,0x00,0x57,0x01,0x00,0x00, +0x5a,0x01,0x00,0x00,0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x6f,0x01,0x00,0x00,0x69,0x01,0x00,0x00,0x6e,0x01,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x73,0x01,0x00,0x00, +0x6f,0x01,0x00,0x00,0x57,0x01,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x76,0x01,0x00,0x00,0x73,0x01,0x00,0x00, +0x5a,0x01,0x00,0x00,0x82,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x7b,0x01,0x00,0x00,0x69,0x01,0x00,0x00,0x76,0x01,0x00,0x00, +0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x7e,0x01,0x00,0x00, +0x7b,0x01,0x00,0x00,0x5a,0x01,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x87,0x01,0x00,0x00,0x7e,0x01,0x00,0x00, +0x5a,0x01,0x00,0x00,0x82,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x88,0x01,0x00,0x00,0x7b,0x01,0x00,0x00,0x87,0x01,0x00,0x00, +0x41,0x00,0x05,0x00,0x17,0x00,0x00,0x00,0x8a,0x01,0x00,0x00, +0x14,0x00,0x00,0x00,0x55,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x8b,0x01,0x00,0x00,0x8a,0x01,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x8c,0x01,0x00,0x00, +0x5c,0x01,0x00,0x00,0x8b,0x01,0x00,0x00,0x41,0x00,0x05,0x00, +0x17,0x00,0x00,0x00,0x8e,0x01,0x00,0x00,0x14,0x00,0x00,0x00, +0x5a,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x8f,0x01,0x00,0x00,0x8e,0x01,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x90,0x01,0x00,0x00,0x6f,0x01,0x00,0x00, +0x8f,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x91,0x01,0x00,0x00,0x8c,0x01,0x00,0x00,0x90,0x01,0x00,0x00, +0x41,0x00,0x05,0x00,0x17,0x00,0x00,0x00,0x93,0x01,0x00,0x00, +0x14,0x00,0x00,0x00,0x60,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x94,0x01,0x00,0x00,0x93,0x01,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x95,0x01,0x00,0x00, +0x7e,0x01,0x00,0x00,0x94,0x01,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x96,0x01,0x00,0x00,0x91,0x01,0x00,0x00, +0x95,0x01,0x00,0x00,0x41,0x00,0x05,0x00,0x17,0x00,0x00,0x00, +0x98,0x01,0x00,0x00,0x14,0x00,0x00,0x00,0x66,0x00,0x00,0x00, +0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x99,0x01,0x00,0x00, +0x98,0x01,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x9a,0x01,0x00,0x00,0x88,0x01,0x00,0x00,0x99,0x01,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x9b,0x01,0x00,0x00, +0x96,0x01,0x00,0x00,0x9a,0x01,0x00,0x00,0x41,0x00,0x06,0x00, +0xe8,0x00,0x00,0x00,0xe9,0x00,0x00,0x00,0xe3,0x00,0x00,0x00, +0xcc,0x00,0x00,0x00,0x9b,0x01,0x00,0x00,0x3d,0x00,0x04,0x00, +0x11,0x00,0x00,0x00,0xea,0x00,0x00,0x00,0xe9,0x00,0x00,0x00, +0x41,0x00,0x05,0x00,0xec,0x00,0x00,0x00,0xed,0x00,0x00,0x00, +0x14,0x00,0x00,0x00,0xeb,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0x11,0x00,0x00,0x00,0xee,0x00,0x00,0x00,0xed,0x00,0x00,0x00, +0x81,0x00,0x05,0x00,0x11,0x00,0x00,0x00,0xef,0x00,0x00,0x00, +0xea,0x00,0x00,0x00,0xee,0x00,0x00,0x00,0x41,0x00,0x06,0x00, +0xe8,0x00,0x00,0x00,0xf0,0x00,0x00,0x00,0xd7,0x00,0x00,0x00, +0xcc,0x00,0x00,0x00,0xdf,0x00,0x00,0x00,0x3e,0x00,0x03,0x00, +0xf0,0x00,0x00,0x00,0xef,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, +0xf4,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0xf4,0x00,0x00,0x00, 0xfd,0x00,0x01,0x00,0x38,0x00,0x01,0x00, }; -const uint64_t scale_f32_len = 1256; +const uint64_t scale_f32_len = 3320; unsigned char silu_f32_data[] = { 0x03,0x02,0x23,0x07,0x00,0x05,0x01,0x00,0x0b,0x00,0x0d,0x00, @@ -51170,50 +46776,65 @@ const uint64_t silu_f32_len = 1264; unsigned char soft_max_f32_data[] = { 0x03,0x02,0x23,0x07,0x00,0x05,0x01,0x00,0x0b,0x00,0x0d,0x00, -0x0e,0x01,0x00,0x00,0x00,0x00,0x00,0x00,0x11,0x00,0x02,0x00, +0x5c,0x01,0x00,0x00,0x00,0x00,0x00,0x00,0x11,0x00,0x02,0x00, 0x01,0x00,0x00,0x00,0x0b,0x00,0x06,0x00,0x01,0x00,0x00,0x00, 0x47,0x4c,0x53,0x4c,0x2e,0x73,0x74,0x64,0x2e,0x34,0x35,0x30, 0x00,0x00,0x00,0x00,0x0e,0x00,0x03,0x00,0x00,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x0f,0x00,0x0c,0x00,0x05,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0x0f,0x00,0x0d,0x00,0x05,0x00,0x00,0x00, 0x04,0x00,0x00,0x00,0x6d,0x61,0x69,0x6e,0x00,0x00,0x00,0x00, 0x0b,0x00,0x00,0x00,0x11,0x00,0x00,0x00,0x19,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x3d,0x00,0x00,0x00,0x56,0x00,0x00,0x00, -0xc2,0x00,0x00,0x00,0x10,0x00,0x06,0x00,0x04,0x00,0x00,0x00, -0x11,0x00,0x00,0x00,0x00,0x02,0x00,0x00,0x01,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x0b,0x00,0x00,0x00, -0x0b,0x00,0x00,0x00,0x1b,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x11,0x00,0x00,0x00,0x0b,0x00,0x00,0x00,0x1a,0x00,0x00,0x00, -0x48,0x00,0x05,0x00,0x17,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0x17,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x04,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x17,0x00,0x00,0x00, -0x02,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x08,0x00,0x00,0x00, -0x48,0x00,0x05,0x00,0x17,0x00,0x00,0x00,0x03,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x47,0x00,0x03,0x00, +0x5c,0x00,0x00,0x00,0x75,0x00,0x00,0x00,0x8c,0x00,0x00,0x00, +0xa3,0x00,0x00,0x00,0x09,0x01,0x00,0x00,0x10,0x00,0x06,0x00, +0x04,0x00,0x00,0x00,0x11,0x00,0x00,0x00,0x00,0x02,0x00,0x00, +0x01,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x47,0x00,0x04,0x00, +0x0b,0x00,0x00,0x00,0x0b,0x00,0x00,0x00,0x1b,0x00,0x00,0x00, +0x47,0x00,0x04,0x00,0x11,0x00,0x00,0x00,0x0b,0x00,0x00,0x00, +0x1a,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x17,0x00,0x00,0x00, +0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0x48,0x00,0x05,0x00,0x17,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0x23,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x48,0x00,0x05,0x00, +0x17,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x23,0x00,0x00,0x00, +0x08,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x17,0x00,0x00,0x00, +0x03,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, +0x48,0x00,0x05,0x00,0x17,0x00,0x00,0x00,0x04,0x00,0x00,0x00, +0x23,0x00,0x00,0x00,0x10,0x00,0x00,0x00,0x48,0x00,0x05,0x00, +0x17,0x00,0x00,0x00,0x05,0x00,0x00,0x00,0x23,0x00,0x00,0x00, +0x14,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x17,0x00,0x00,0x00, +0x06,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x18,0x00,0x00,0x00, +0x48,0x00,0x05,0x00,0x17,0x00,0x00,0x00,0x07,0x00,0x00,0x00, +0x23,0x00,0x00,0x00,0x1c,0x00,0x00,0x00,0x47,0x00,0x03,0x00, 0x17,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x3a,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x04,0x00,0x00,0x00, -0x48,0x00,0x04,0x00,0x3b,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x18,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x3b,0x00,0x00,0x00, +0x72,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x04,0x00,0x00,0x00, +0x48,0x00,0x04,0x00,0x73,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0x18,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x73,0x00,0x00,0x00, 0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x47,0x00,0x03,0x00,0x3b,0x00,0x00,0x00,0x02,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x3d,0x00,0x00,0x00,0x22,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x3d,0x00,0x00,0x00, +0x47,0x00,0x03,0x00,0x73,0x00,0x00,0x00,0x02,0x00,0x00,0x00, +0x47,0x00,0x04,0x00,0x75,0x00,0x00,0x00,0x22,0x00,0x00,0x00, +0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x75,0x00,0x00,0x00, 0x21,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x53,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x04,0x00,0x00,0x00, -0x48,0x00,0x04,0x00,0x54,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x18,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x54,0x00,0x00,0x00, +0x89,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x04,0x00,0x00,0x00, +0x48,0x00,0x04,0x00,0x8a,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0x18,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x8a,0x00,0x00,0x00, 0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x47,0x00,0x03,0x00,0x54,0x00,0x00,0x00,0x02,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x56,0x00,0x00,0x00,0x22,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x56,0x00,0x00,0x00, +0x47,0x00,0x03,0x00,0x8a,0x00,0x00,0x00,0x02,0x00,0x00,0x00, +0x47,0x00,0x04,0x00,0x8c,0x00,0x00,0x00,0x22,0x00,0x00,0x00, +0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x8c,0x00,0x00,0x00, 0x21,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0xbf,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x04,0x00,0x00,0x00, -0x48,0x00,0x05,0x00,0xc0,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0xa0,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x04,0x00,0x00,0x00, +0x48,0x00,0x04,0x00,0xa1,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0x18,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0xa1,0x00,0x00,0x00, +0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0x47,0x00,0x03,0x00,0xa1,0x00,0x00,0x00,0x02,0x00,0x00,0x00, +0x47,0x00,0x04,0x00,0xa3,0x00,0x00,0x00,0x22,0x00,0x00,0x00, +0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0xa3,0x00,0x00,0x00, +0x21,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00, +0x06,0x01,0x00,0x00,0x06,0x00,0x00,0x00,0x04,0x00,0x00,0x00, +0x48,0x00,0x05,0x00,0x07,0x01,0x00,0x00,0x00,0x00,0x00,0x00, 0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x03,0x00, -0xc0,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0xc2,0x00,0x00,0x00,0x22,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0xc2,0x00,0x00,0x00,0x21,0x00,0x00,0x00, -0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0xff,0x00,0x00,0x00, +0x07,0x01,0x00,0x00,0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00, +0x09,0x01,0x00,0x00,0x22,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0x47,0x00,0x04,0x00,0x09,0x01,0x00,0x00,0x21,0x00,0x00,0x00, +0x03,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x45,0x01,0x00,0x00, 0x0b,0x00,0x00,0x00,0x19,0x00,0x00,0x00,0x13,0x00,0x02,0x00, 0x02,0x00,0x00,0x00,0x21,0x00,0x03,0x00,0x03,0x00,0x00,0x00, 0x02,0x00,0x00,0x00,0x15,0x00,0x04,0x00,0x06,0x00,0x00,0x00, @@ -51226,264 +46847,346 @@ unsigned char soft_max_f32_data[] = { 0x20,0x00,0x04,0x00,0x0d,0x00,0x00,0x00,0x01,0x00,0x00,0x00, 0x06,0x00,0x00,0x00,0x3b,0x00,0x04,0x00,0x0a,0x00,0x00,0x00, 0x11,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x16,0x00,0x03,0x00, -0x16,0x00,0x00,0x00,0x20,0x00,0x00,0x00,0x1e,0x00,0x06,0x00, +0x16,0x00,0x00,0x00,0x20,0x00,0x00,0x00,0x1e,0x00,0x0a,0x00, 0x17,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x16,0x00,0x00,0x00,0x16,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0x18,0x00,0x00,0x00,0x09,0x00,0x00,0x00,0x17,0x00,0x00,0x00, -0x3b,0x00,0x04,0x00,0x18,0x00,0x00,0x00,0x19,0x00,0x00,0x00, -0x09,0x00,0x00,0x00,0x15,0x00,0x04,0x00,0x1a,0x00,0x00,0x00, -0x20,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x1a,0x00,0x00,0x00,0x1b,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x20,0x00,0x04,0x00,0x1c,0x00,0x00,0x00,0x09,0x00,0x00,0x00, +0x06,0x00,0x00,0x00,0x16,0x00,0x00,0x00,0x16,0x00,0x00,0x00, +0x16,0x00,0x00,0x00,0x16,0x00,0x00,0x00,0x06,0x00,0x00,0x00, +0x20,0x00,0x04,0x00,0x18,0x00,0x00,0x00,0x09,0x00,0x00,0x00, +0x17,0x00,0x00,0x00,0x3b,0x00,0x04,0x00,0x18,0x00,0x00,0x00, +0x19,0x00,0x00,0x00,0x09,0x00,0x00,0x00,0x15,0x00,0x04,0x00, +0x1a,0x00,0x00,0x00,0x20,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x1a,0x00,0x00,0x00,0x1b,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x1c,0x00,0x00,0x00, +0x09,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x16,0x00,0x00,0x00,0x22,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x1a,0x00,0x00,0x00,0x23,0x00,0x00,0x00, +0x04,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x24,0x00,0x00,0x00, +0x09,0x00,0x00,0x00,0x16,0x00,0x00,0x00,0x14,0x00,0x02,0x00, +0x27,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x1a,0x00,0x00,0x00, +0x32,0x00,0x00,0x00,0x07,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x1a,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x05,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x1a,0x00,0x00,0x00,0x3d,0x00,0x00,0x00, 0x06,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x20,0x00,0x00,0x00,0x00,0x02,0x00,0x00,0x1c,0x00,0x04,0x00, -0x21,0x00,0x00,0x00,0x16,0x00,0x00,0x00,0x20,0x00,0x00,0x00, -0x20,0x00,0x04,0x00,0x22,0x00,0x00,0x00,0x04,0x00,0x00,0x00, -0x21,0x00,0x00,0x00,0x3b,0x00,0x04,0x00,0x22,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0x27,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x16,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x1a,0x00,0x00,0x00,0x31,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x14,0x00,0x02,0x00,0x34,0x00,0x00,0x00, -0x1d,0x00,0x03,0x00,0x3a,0x00,0x00,0x00,0x16,0x00,0x00,0x00, -0x1e,0x00,0x03,0x00,0x3b,0x00,0x00,0x00,0x3a,0x00,0x00,0x00, -0x20,0x00,0x04,0x00,0x3c,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0x3b,0x00,0x00,0x00,0x3b,0x00,0x04,0x00,0x3c,0x00,0x00,0x00, -0x3d,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0x44,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x16,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x1a,0x00,0x00,0x00,0x47,0x00,0x00,0x00, -0x02,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x48,0x00,0x00,0x00, -0x09,0x00,0x00,0x00,0x16,0x00,0x00,0x00,0x1d,0x00,0x03,0x00, -0x53,0x00,0x00,0x00,0x16,0x00,0x00,0x00,0x1e,0x00,0x03,0x00, -0x54,0x00,0x00,0x00,0x53,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0x55,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x54,0x00,0x00,0x00, -0x3b,0x00,0x04,0x00,0x55,0x00,0x00,0x00,0x56,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x16,0x00,0x00,0x00, -0x60,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x67,0x00,0x00,0x00,0x02,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x68,0x00,0x00,0x00, +0x4a,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x4d,0x00,0x00,0x00,0x02,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x59,0x00,0x00,0x00, +0x00,0x02,0x00,0x00,0x1c,0x00,0x04,0x00,0x5a,0x00,0x00,0x00, +0x16,0x00,0x00,0x00,0x59,0x00,0x00,0x00,0x20,0x00,0x04,0x00, +0x5b,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x5a,0x00,0x00,0x00, +0x3b,0x00,0x04,0x00,0x5b,0x00,0x00,0x00,0x5c,0x00,0x00,0x00, +0x04,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x60,0x00,0x00,0x00, +0x04,0x00,0x00,0x00,0x16,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x1a,0x00,0x00,0x00,0x6a,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0x1d,0x00,0x03,0x00,0x72,0x00,0x00,0x00,0x16,0x00,0x00,0x00, +0x1e,0x00,0x03,0x00,0x73,0x00,0x00,0x00,0x72,0x00,0x00,0x00, +0x20,0x00,0x04,0x00,0x74,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, +0x73,0x00,0x00,0x00,0x3b,0x00,0x04,0x00,0x74,0x00,0x00,0x00, +0x75,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x20,0x00,0x04,0x00, +0x7c,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x16,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x1a,0x00,0x00,0x00,0x7f,0x00,0x00,0x00, +0x03,0x00,0x00,0x00,0x1d,0x00,0x03,0x00,0x89,0x00,0x00,0x00, +0x16,0x00,0x00,0x00,0x1e,0x00,0x03,0x00,0x8a,0x00,0x00,0x00, +0x89,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x8b,0x00,0x00,0x00, +0x0c,0x00,0x00,0x00,0x8a,0x00,0x00,0x00,0x3b,0x00,0x04,0x00, +0x8b,0x00,0x00,0x00,0x8c,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x1a,0x00,0x00,0x00,0x98,0x00,0x00,0x00, +0x02,0x00,0x00,0x00,0x1d,0x00,0x03,0x00,0xa0,0x00,0x00,0x00, +0x16,0x00,0x00,0x00,0x1e,0x00,0x03,0x00,0xa1,0x00,0x00,0x00, +0xa0,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0xa2,0x00,0x00,0x00, +0x0c,0x00,0x00,0x00,0xa1,0x00,0x00,0x00,0x3b,0x00,0x04,0x00, +0xa2,0x00,0x00,0x00,0xa3,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0xaf,0x00,0x00,0x00, 0x08,0x01,0x00,0x00,0x2b,0x00,0x04,0x00,0x1a,0x00,0x00,0x00, -0x6b,0x00,0x00,0x00,0x00,0x01,0x00,0x00,0x1d,0x00,0x03,0x00, -0xbf,0x00,0x00,0x00,0x16,0x00,0x00,0x00,0x1e,0x00,0x03,0x00, -0xc0,0x00,0x00,0x00,0xbf,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0xc1,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0xc0,0x00,0x00,0x00, -0x3b,0x00,0x04,0x00,0xc1,0x00,0x00,0x00,0xc2,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0xfe,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x2c,0x00,0x06,0x00, -0x09,0x00,0x00,0x00,0xff,0x00,0x00,0x00,0x20,0x00,0x00,0x00, -0xfe,0x00,0x00,0x00,0xfe,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x16,0x00,0x00,0x00,0x0d,0x01,0x00,0x00,0x00,0x00,0x80,0xff, -0x36,0x00,0x05,0x00,0x02,0x00,0x00,0x00,0x04,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x03,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, -0x05,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x0d,0x00,0x00,0x00, -0x0e,0x00,0x00,0x00,0x0b,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x0f,0x00,0x00,0x00, -0x0e,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x0d,0x00,0x00,0x00, -0x12,0x00,0x00,0x00,0x11,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x13,0x00,0x00,0x00, -0x12,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x1c,0x00,0x00,0x00, -0x1d,0x00,0x00,0x00,0x19,0x00,0x00,0x00,0x1b,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x1e,0x00,0x00,0x00, -0x1d,0x00,0x00,0x00,0x89,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x1f,0x00,0x00,0x00,0x13,0x00,0x00,0x00,0x1e,0x00,0x00,0x00, -0x41,0x00,0x05,0x00,0x27,0x00,0x00,0x00,0x28,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x0f,0x00,0x00,0x00,0x3e,0x00,0x03,0x00, -0x28,0x00,0x00,0x00,0x0d,0x01,0x00,0x00,0xf9,0x00,0x02,0x00, -0x2b,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0x2b,0x00,0x00,0x00, -0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0x00,0x01,0x00,0x00, -0x0f,0x00,0x00,0x00,0x05,0x00,0x00,0x00,0x66,0x00,0x00,0x00, -0x2e,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x1c,0x00,0x00,0x00, -0x32,0x00,0x00,0x00,0x19,0x00,0x00,0x00,0x31,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x33,0x00,0x00,0x00, -0x32,0x00,0x00,0x00,0xb0,0x00,0x05,0x00,0x34,0x00,0x00,0x00, -0x35,0x00,0x00,0x00,0x00,0x01,0x00,0x00,0x33,0x00,0x00,0x00, -0xf6,0x00,0x04,0x00,0x2d,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0x35,0x00,0x00,0x00, -0x2c,0x00,0x00,0x00,0x2d,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, -0x2c,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x16,0x00,0x00,0x00, -0x39,0x00,0x00,0x00,0x28,0x00,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x41,0x00,0x00,0x00,0x13,0x00,0x00,0x00, -0x33,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x43,0x00,0x00,0x00,0x41,0x00,0x00,0x00,0x00,0x01,0x00,0x00, -0x41,0x00,0x06,0x00,0x44,0x00,0x00,0x00,0x45,0x00,0x00,0x00, -0x3d,0x00,0x00,0x00,0x31,0x00,0x00,0x00,0x43,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x16,0x00,0x00,0x00,0x46,0x00,0x00,0x00, -0x45,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x48,0x00,0x00,0x00, -0x49,0x00,0x00,0x00,0x19,0x00,0x00,0x00,0x47,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x16,0x00,0x00,0x00,0x4a,0x00,0x00,0x00, -0x49,0x00,0x00,0x00,0xac,0x00,0x05,0x00,0x34,0x00,0x00,0x00, -0x4e,0x00,0x00,0x00,0x1e,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0xf7,0x00,0x03,0x00,0x52,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0xfa,0x00,0x04,0x00,0x4e,0x00,0x00,0x00,0x51,0x00,0x00,0x00, -0x5f,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0x51,0x00,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x5a,0x00,0x00,0x00, -0x1f,0x00,0x00,0x00,0x33,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x5c,0x00,0x00,0x00,0x5a,0x00,0x00,0x00, -0x00,0x01,0x00,0x00,0x41,0x00,0x06,0x00,0x44,0x00,0x00,0x00, -0x5d,0x00,0x00,0x00,0x56,0x00,0x00,0x00,0x31,0x00,0x00,0x00, -0x5c,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x16,0x00,0x00,0x00, -0x5e,0x00,0x00,0x00,0x5d,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, -0x52,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0x5f,0x00,0x00,0x00, -0xf9,0x00,0x02,0x00,0x52,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, -0x52,0x00,0x00,0x00,0xf5,0x00,0x07,0x00,0x16,0x00,0x00,0x00, -0x09,0x01,0x00,0x00,0x5e,0x00,0x00,0x00,0x51,0x00,0x00,0x00, -0x60,0x00,0x00,0x00,0x5f,0x00,0x00,0x00,0x0c,0x00,0x08,0x00, -0x16,0x00,0x00,0x00,0x62,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x32,0x00,0x00,0x00,0x46,0x00,0x00,0x00,0x4a,0x00,0x00,0x00, -0x09,0x01,0x00,0x00,0x0c,0x00,0x07,0x00,0x16,0x00,0x00,0x00, -0x63,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x28,0x00,0x00,0x00, -0x39,0x00,0x00,0x00,0x62,0x00,0x00,0x00,0x3e,0x00,0x03,0x00, -0x28,0x00,0x00,0x00,0x63,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, -0x2e,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0x2e,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x66,0x00,0x00,0x00, -0x00,0x01,0x00,0x00,0x20,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, -0x2b,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0x2d,0x00,0x00,0x00, -0xe0,0x00,0x04,0x00,0x67,0x00,0x00,0x00,0x67,0x00,0x00,0x00, -0x68,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0x6c,0x00,0x00,0x00, -0xf8,0x00,0x02,0x00,0x6c,0x00,0x00,0x00,0xf5,0x00,0x07,0x00, -0x1a,0x00,0x00,0x00,0x01,0x01,0x00,0x00,0x6b,0x00,0x00,0x00, -0x2d,0x00,0x00,0x00,0x86,0x00,0x00,0x00,0x6f,0x00,0x00,0x00, -0xad,0x00,0x05,0x00,0x34,0x00,0x00,0x00,0x72,0x00,0x00,0x00, -0x01,0x01,0x00,0x00,0x31,0x00,0x00,0x00,0xf6,0x00,0x04,0x00, -0x6e,0x00,0x00,0x00,0x6f,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0xfa,0x00,0x04,0x00,0x72,0x00,0x00,0x00,0x6d,0x00,0x00,0x00, -0x6e,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0x6d,0x00,0x00,0x00, -0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x75,0x00,0x00,0x00, -0x01,0x01,0x00,0x00,0xb0,0x00,0x05,0x00,0x34,0x00,0x00,0x00, -0x76,0x00,0x00,0x00,0x0f,0x00,0x00,0x00,0x75,0x00,0x00,0x00, -0xf7,0x00,0x03,0x00,0x78,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0xfa,0x00,0x04,0x00,0x76,0x00,0x00,0x00,0x77,0x00,0x00,0x00, -0x78,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0x77,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x16,0x00,0x00,0x00,0x7c,0x00,0x00,0x00, -0x28,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x80,0x00,0x00,0x00,0x0f,0x00,0x00,0x00,0x75,0x00,0x00,0x00, -0x41,0x00,0x05,0x00,0x27,0x00,0x00,0x00,0x81,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x80,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x16,0x00,0x00,0x00,0x82,0x00,0x00,0x00,0x81,0x00,0x00,0x00, -0x0c,0x00,0x07,0x00,0x16,0x00,0x00,0x00,0x83,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x28,0x00,0x00,0x00,0x7c,0x00,0x00,0x00, -0x82,0x00,0x00,0x00,0x3e,0x00,0x03,0x00,0x28,0x00,0x00,0x00, -0x83,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0x78,0x00,0x00,0x00, -0xf8,0x00,0x02,0x00,0x78,0x00,0x00,0x00,0xe0,0x00,0x04,0x00, -0x67,0x00,0x00,0x00,0x67,0x00,0x00,0x00,0x68,0x00,0x00,0x00, -0xf9,0x00,0x02,0x00,0x6f,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, -0x6f,0x00,0x00,0x00,0xc3,0x00,0x05,0x00,0x1a,0x00,0x00,0x00, -0x86,0x00,0x00,0x00,0x01,0x01,0x00,0x00,0x1b,0x00,0x00,0x00, -0xf9,0x00,0x02,0x00,0x6c,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, -0x6e,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x27,0x00,0x00,0x00, -0x88,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x31,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x16,0x00,0x00,0x00,0x89,0x00,0x00,0x00, -0x88,0x00,0x00,0x00,0xe0,0x00,0x04,0x00,0x67,0x00,0x00,0x00, -0x67,0x00,0x00,0x00,0x68,0x00,0x00,0x00,0x3e,0x00,0x03,0x00, -0x28,0x00,0x00,0x00,0x60,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, -0x8e,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0x8e,0x00,0x00,0x00, -0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0x02,0x01,0x00,0x00, -0x0f,0x00,0x00,0x00,0x6e,0x00,0x00,0x00,0xc7,0x00,0x00,0x00, -0x91,0x00,0x00,0x00,0xb0,0x00,0x05,0x00,0x34,0x00,0x00,0x00, -0x96,0x00,0x00,0x00,0x02,0x01,0x00,0x00,0x33,0x00,0x00,0x00, -0xf6,0x00,0x04,0x00,0x90,0x00,0x00,0x00,0x91,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0x96,0x00,0x00,0x00, -0x8f,0x00,0x00,0x00,0x90,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, -0x8f,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x9b,0x00,0x00,0x00,0x13,0x00,0x00,0x00,0x33,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x9d,0x00,0x00,0x00, -0x9b,0x00,0x00,0x00,0x02,0x01,0x00,0x00,0x41,0x00,0x06,0x00, -0x44,0x00,0x00,0x00,0xa0,0x00,0x00,0x00,0x3d,0x00,0x00,0x00, -0x31,0x00,0x00,0x00,0x9d,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x16,0x00,0x00,0x00,0xa1,0x00,0x00,0x00,0xa0,0x00,0x00,0x00, -0x41,0x00,0x05,0x00,0x48,0x00,0x00,0x00,0xa2,0x00,0x00,0x00, -0x19,0x00,0x00,0x00,0x47,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x16,0x00,0x00,0x00,0xa3,0x00,0x00,0x00,0xa2,0x00,0x00,0x00, -0xac,0x00,0x05,0x00,0x34,0x00,0x00,0x00,0xa7,0x00,0x00,0x00, +0xb2,0x00,0x00,0x00,0x00,0x01,0x00,0x00,0x1d,0x00,0x03,0x00, +0x06,0x01,0x00,0x00,0x16,0x00,0x00,0x00,0x1e,0x00,0x03,0x00, +0x07,0x01,0x00,0x00,0x06,0x01,0x00,0x00,0x20,0x00,0x04,0x00, +0x08,0x01,0x00,0x00,0x0c,0x00,0x00,0x00,0x07,0x01,0x00,0x00, +0x3b,0x00,0x04,0x00,0x08,0x01,0x00,0x00,0x09,0x01,0x00,0x00, +0x0c,0x00,0x00,0x00,0x2c,0x00,0x06,0x00,0x09,0x00,0x00,0x00, +0x45,0x01,0x00,0x00,0x59,0x00,0x00,0x00,0x4a,0x00,0x00,0x00, +0x4a,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x16,0x00,0x00,0x00, +0x5b,0x01,0x00,0x00,0x00,0x00,0x80,0xff,0x36,0x00,0x05,0x00, +0x02,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0x03,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0x05,0x00,0x00,0x00, +0x41,0x00,0x05,0x00,0x0d,0x00,0x00,0x00,0x0e,0x00,0x00,0x00, +0x0b,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x0f,0x00,0x00,0x00,0x0e,0x00,0x00,0x00, +0x41,0x00,0x05,0x00,0x0d,0x00,0x00,0x00,0x12,0x00,0x00,0x00, +0x11,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x13,0x00,0x00,0x00,0x12,0x00,0x00,0x00, +0x41,0x00,0x05,0x00,0x1c,0x00,0x00,0x00,0x1d,0x00,0x00,0x00, +0x19,0x00,0x00,0x00,0x1b,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x1e,0x00,0x00,0x00,0x1d,0x00,0x00,0x00, +0x89,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x1f,0x00,0x00,0x00, +0x13,0x00,0x00,0x00,0x1e,0x00,0x00,0x00,0x41,0x00,0x05,0x00, +0x24,0x00,0x00,0x00,0x25,0x00,0x00,0x00,0x19,0x00,0x00,0x00, +0x23,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x16,0x00,0x00,0x00, +0x26,0x00,0x00,0x00,0x25,0x00,0x00,0x00,0xba,0x00,0x05,0x00, +0x27,0x00,0x00,0x00,0x28,0x00,0x00,0x00,0x26,0x00,0x00,0x00, +0x22,0x00,0x00,0x00,0xf7,0x00,0x03,0x00,0x2a,0x00,0x00,0x00, +0x00,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0x28,0x00,0x00,0x00, +0x29,0x00,0x00,0x00,0x2a,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, +0x29,0x00,0x00,0x00,0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x2f,0x00,0x00,0x00,0x13,0x00,0x00,0x00,0x1e,0x00,0x00,0x00, +0x41,0x00,0x05,0x00,0x1c,0x00,0x00,0x00,0x33,0x00,0x00,0x00, +0x19,0x00,0x00,0x00,0x32,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x34,0x00,0x00,0x00,0x33,0x00,0x00,0x00, +0xb0,0x00,0x05,0x00,0x27,0x00,0x00,0x00,0x35,0x00,0x00,0x00, +0x2f,0x00,0x00,0x00,0x34,0x00,0x00,0x00,0xf7,0x00,0x03,0x00, +0x38,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, +0x35,0x00,0x00,0x00,0x37,0x00,0x00,0x00,0x3c,0x00,0x00,0x00, +0xf8,0x00,0x02,0x00,0x37,0x00,0x00,0x00,0x41,0x00,0x05,0x00, +0x24,0x00,0x00,0x00,0x3a,0x00,0x00,0x00,0x19,0x00,0x00,0x00, +0x39,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x16,0x00,0x00,0x00, +0x3b,0x00,0x00,0x00,0x3a,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, +0x38,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0x3c,0x00,0x00,0x00, +0x41,0x00,0x05,0x00,0x24,0x00,0x00,0x00,0x3e,0x00,0x00,0x00, +0x19,0x00,0x00,0x00,0x3d,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0x16,0x00,0x00,0x00,0x3f,0x00,0x00,0x00,0x3e,0x00,0x00,0x00, +0xf9,0x00,0x02,0x00,0x38,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, +0x38,0x00,0x00,0x00,0xf5,0x00,0x07,0x00,0x16,0x00,0x00,0x00, +0x46,0x01,0x00,0x00,0x3b,0x00,0x00,0x00,0x37,0x00,0x00,0x00, +0x3f,0x00,0x00,0x00,0x3c,0x00,0x00,0x00,0xf7,0x00,0x03,0x00, +0x48,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, +0x35,0x00,0x00,0x00,0x47,0x00,0x00,0x00,0x4c,0x00,0x00,0x00, +0xf8,0x00,0x02,0x00,0x47,0x00,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x4b,0x00,0x00,0x00,0x2f,0x00,0x00,0x00, +0x4a,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0x48,0x00,0x00,0x00, +0xf8,0x00,0x02,0x00,0x4c,0x00,0x00,0x00,0x82,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x51,0x00,0x00,0x00,0x2f,0x00,0x00,0x00, +0x34,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x52,0x00,0x00,0x00,0x4d,0x00,0x00,0x00,0x51,0x00,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x53,0x00,0x00,0x00, +0x52,0x00,0x00,0x00,0x4a,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, +0x48,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0x48,0x00,0x00,0x00, +0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0x47,0x01,0x00,0x00, +0x4b,0x00,0x00,0x00,0x47,0x00,0x00,0x00,0x53,0x00,0x00,0x00, +0x4c,0x00,0x00,0x00,0x70,0x00,0x04,0x00,0x16,0x00,0x00,0x00, +0x57,0x00,0x00,0x00,0x47,0x01,0x00,0x00,0x0c,0x00,0x07,0x00, +0x16,0x00,0x00,0x00,0x58,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0x1a,0x00,0x00,0x00,0x46,0x01,0x00,0x00,0x57,0x00,0x00,0x00, +0xf9,0x00,0x02,0x00,0x2a,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, +0x2a,0x00,0x00,0x00,0xf5,0x00,0x07,0x00,0x16,0x00,0x00,0x00, +0x54,0x01,0x00,0x00,0x22,0x00,0x00,0x00,0x05,0x00,0x00,0x00, +0x58,0x00,0x00,0x00,0x48,0x00,0x00,0x00,0x41,0x00,0x05,0x00, +0x60,0x00,0x00,0x00,0x61,0x00,0x00,0x00,0x5c,0x00,0x00,0x00, +0x0f,0x00,0x00,0x00,0x3e,0x00,0x03,0x00,0x61,0x00,0x00,0x00, +0x5b,0x01,0x00,0x00,0xf9,0x00,0x02,0x00,0x64,0x00,0x00,0x00, +0xf8,0x00,0x02,0x00,0x64,0x00,0x00,0x00,0xf5,0x00,0x07,0x00, +0x06,0x00,0x00,0x00,0x48,0x01,0x00,0x00,0x0f,0x00,0x00,0x00, +0x2a,0x00,0x00,0x00,0xae,0x00,0x00,0x00,0x67,0x00,0x00,0x00, +0x41,0x00,0x05,0x00,0x1c,0x00,0x00,0x00,0x6b,0x00,0x00,0x00, +0x19,0x00,0x00,0x00,0x6a,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x6c,0x00,0x00,0x00,0x6b,0x00,0x00,0x00, +0xb0,0x00,0x05,0x00,0x27,0x00,0x00,0x00,0x6d,0x00,0x00,0x00, +0x48,0x01,0x00,0x00,0x6c,0x00,0x00,0x00,0xf6,0x00,0x04,0x00, +0x66,0x00,0x00,0x00,0x67,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0xfa,0x00,0x04,0x00,0x6d,0x00,0x00,0x00,0x65,0x00,0x00,0x00, +0x66,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0x65,0x00,0x00,0x00, +0x3d,0x00,0x04,0x00,0x16,0x00,0x00,0x00,0x71,0x00,0x00,0x00, +0x61,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x79,0x00,0x00,0x00,0x13,0x00,0x00,0x00,0x6c,0x00,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x7b,0x00,0x00,0x00, +0x79,0x00,0x00,0x00,0x48,0x01,0x00,0x00,0x41,0x00,0x06,0x00, +0x7c,0x00,0x00,0x00,0x7d,0x00,0x00,0x00,0x75,0x00,0x00,0x00, +0x6a,0x00,0x00,0x00,0x7b,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0x16,0x00,0x00,0x00,0x7e,0x00,0x00,0x00,0x7d,0x00,0x00,0x00, +0x41,0x00,0x05,0x00,0x24,0x00,0x00,0x00,0x80,0x00,0x00,0x00, +0x19,0x00,0x00,0x00,0x7f,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0x16,0x00,0x00,0x00,0x81,0x00,0x00,0x00,0x80,0x00,0x00,0x00, +0xac,0x00,0x05,0x00,0x27,0x00,0x00,0x00,0x85,0x00,0x00,0x00, 0x1e,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0xf7,0x00,0x03,0x00, -0xaa,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, -0xa7,0x00,0x00,0x00,0xa9,0x00,0x00,0x00,0xb3,0x00,0x00,0x00, -0xf8,0x00,0x02,0x00,0xa9,0x00,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xae,0x00,0x00,0x00,0x1f,0x00,0x00,0x00, -0x33,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xb0,0x00,0x00,0x00,0xae,0x00,0x00,0x00,0x02,0x01,0x00,0x00, -0x41,0x00,0x06,0x00,0x44,0x00,0x00,0x00,0xb1,0x00,0x00,0x00, -0x56,0x00,0x00,0x00,0x31,0x00,0x00,0x00,0xb0,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x16,0x00,0x00,0x00,0xb2,0x00,0x00,0x00, -0xb1,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0xaa,0x00,0x00,0x00, -0xf8,0x00,0x02,0x00,0xb3,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, -0xaa,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0xaa,0x00,0x00,0x00, -0xf5,0x00,0x07,0x00,0x16,0x00,0x00,0x00,0x06,0x01,0x00,0x00, -0xb2,0x00,0x00,0x00,0xa9,0x00,0x00,0x00,0x60,0x00,0x00,0x00, -0xb3,0x00,0x00,0x00,0x0c,0x00,0x08,0x00,0x16,0x00,0x00,0x00, -0xb5,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x32,0x00,0x00,0x00, -0xa1,0x00,0x00,0x00,0xa3,0x00,0x00,0x00,0x06,0x01,0x00,0x00, -0x83,0x00,0x05,0x00,0x16,0x00,0x00,0x00,0xb7,0x00,0x00,0x00, -0xb5,0x00,0x00,0x00,0x89,0x00,0x00,0x00,0x0c,0x00,0x06,0x00, -0x16,0x00,0x00,0x00,0xb8,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x1b,0x00,0x00,0x00,0xb7,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x16,0x00,0x00,0x00,0xbc,0x00,0x00,0x00,0x28,0x00,0x00,0x00, -0x81,0x00,0x05,0x00,0x16,0x00,0x00,0x00,0xbd,0x00,0x00,0x00, -0xbc,0x00,0x00,0x00,0xb8,0x00,0x00,0x00,0x3e,0x00,0x03,0x00, -0x28,0x00,0x00,0x00,0xbd,0x00,0x00,0x00,0x41,0x00,0x06,0x00, -0x44,0x00,0x00,0x00,0xc5,0x00,0x00,0x00,0xc2,0x00,0x00,0x00, -0x31,0x00,0x00,0x00,0x9d,0x00,0x00,0x00,0x3e,0x00,0x03,0x00, -0xc5,0x00,0x00,0x00,0xb8,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, -0x91,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0x91,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xc7,0x00,0x00,0x00, -0x02,0x01,0x00,0x00,0x20,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, -0x8e,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0x90,0x00,0x00,0x00, -0xe0,0x00,0x04,0x00,0x67,0x00,0x00,0x00,0x67,0x00,0x00,0x00, -0x68,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0xc9,0x00,0x00,0x00, -0xf8,0x00,0x02,0x00,0xc9,0x00,0x00,0x00,0xf5,0x00,0x07,0x00, -0x1a,0x00,0x00,0x00,0x03,0x01,0x00,0x00,0x6b,0x00,0x00,0x00, -0x90,0x00,0x00,0x00,0xe2,0x00,0x00,0x00,0xcc,0x00,0x00,0x00, -0xad,0x00,0x05,0x00,0x34,0x00,0x00,0x00,0xcf,0x00,0x00,0x00, -0x03,0x01,0x00,0x00,0x31,0x00,0x00,0x00,0xf6,0x00,0x04,0x00, -0xcb,0x00,0x00,0x00,0xcc,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0xfa,0x00,0x04,0x00,0xcf,0x00,0x00,0x00,0xca,0x00,0x00,0x00, -0xcb,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0xca,0x00,0x00,0x00, -0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0xd2,0x00,0x00,0x00, -0x03,0x01,0x00,0x00,0xb0,0x00,0x05,0x00,0x34,0x00,0x00,0x00, -0xd3,0x00,0x00,0x00,0x0f,0x00,0x00,0x00,0xd2,0x00,0x00,0x00, -0xf7,0x00,0x03,0x00,0xd5,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0xfa,0x00,0x04,0x00,0xd3,0x00,0x00,0x00,0xd4,0x00,0x00,0x00, -0xd5,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0xd4,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xda,0x00,0x00,0x00, -0x0f,0x00,0x00,0x00,0xd2,0x00,0x00,0x00,0x41,0x00,0x05,0x00, -0x27,0x00,0x00,0x00,0xdb,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0xda,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x16,0x00,0x00,0x00, -0xdc,0x00,0x00,0x00,0xdb,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x16,0x00,0x00,0x00,0xde,0x00,0x00,0x00,0x28,0x00,0x00,0x00, -0x81,0x00,0x05,0x00,0x16,0x00,0x00,0x00,0xdf,0x00,0x00,0x00, -0xde,0x00,0x00,0x00,0xdc,0x00,0x00,0x00,0x3e,0x00,0x03,0x00, -0x28,0x00,0x00,0x00,0xdf,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, +0x88,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, +0x85,0x00,0x00,0x00,0x87,0x00,0x00,0x00,0x95,0x00,0x00,0x00, +0xf8,0x00,0x02,0x00,0x87,0x00,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x90,0x00,0x00,0x00,0x1f,0x00,0x00,0x00, +0x6c,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x92,0x00,0x00,0x00,0x90,0x00,0x00,0x00,0x48,0x01,0x00,0x00, +0x41,0x00,0x06,0x00,0x7c,0x00,0x00,0x00,0x93,0x00,0x00,0x00, +0x8c,0x00,0x00,0x00,0x6a,0x00,0x00,0x00,0x92,0x00,0x00,0x00, +0x3d,0x00,0x04,0x00,0x16,0x00,0x00,0x00,0x94,0x00,0x00,0x00, +0x93,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0x88,0x00,0x00,0x00, +0xf8,0x00,0x02,0x00,0x95,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, +0x88,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0x88,0x00,0x00,0x00, +0xf5,0x00,0x07,0x00,0x16,0x00,0x00,0x00,0x51,0x01,0x00,0x00, +0x94,0x00,0x00,0x00,0x87,0x00,0x00,0x00,0x22,0x00,0x00,0x00, +0x95,0x00,0x00,0x00,0x0c,0x00,0x08,0x00,0x16,0x00,0x00,0x00, +0x97,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x32,0x00,0x00,0x00, +0x7e,0x00,0x00,0x00,0x81,0x00,0x00,0x00,0x51,0x01,0x00,0x00, +0x41,0x00,0x05,0x00,0x1c,0x00,0x00,0x00,0x99,0x00,0x00,0x00, +0x19,0x00,0x00,0x00,0x98,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x9a,0x00,0x00,0x00,0x99,0x00,0x00,0x00, +0xac,0x00,0x05,0x00,0x27,0x00,0x00,0x00,0x9b,0x00,0x00,0x00, +0x9a,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0xf7,0x00,0x03,0x00, +0x9e,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, +0x9b,0x00,0x00,0x00,0x9d,0x00,0x00,0x00,0xa8,0x00,0x00,0x00, +0xf8,0x00,0x02,0x00,0x9d,0x00,0x00,0x00,0x41,0x00,0x06,0x00, +0x7c,0x00,0x00,0x00,0xa5,0x00,0x00,0x00,0xa3,0x00,0x00,0x00, +0x6a,0x00,0x00,0x00,0x48,0x01,0x00,0x00,0x3d,0x00,0x04,0x00, +0x16,0x00,0x00,0x00,0xa6,0x00,0x00,0x00,0xa5,0x00,0x00,0x00, +0x85,0x00,0x05,0x00,0x16,0x00,0x00,0x00,0xa7,0x00,0x00,0x00, +0x54,0x01,0x00,0x00,0xa6,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, +0x9e,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0xa8,0x00,0x00,0x00, +0xf9,0x00,0x02,0x00,0x9e,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, +0x9e,0x00,0x00,0x00,0xf5,0x00,0x07,0x00,0x16,0x00,0x00,0x00, +0x56,0x01,0x00,0x00,0xa7,0x00,0x00,0x00,0x9d,0x00,0x00,0x00, +0x22,0x00,0x00,0x00,0xa8,0x00,0x00,0x00,0x81,0x00,0x05,0x00, +0x16,0x00,0x00,0x00,0xaa,0x00,0x00,0x00,0x97,0x00,0x00,0x00, +0x56,0x01,0x00,0x00,0x0c,0x00,0x07,0x00,0x16,0x00,0x00,0x00, +0xab,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x28,0x00,0x00,0x00, +0x71,0x00,0x00,0x00,0xaa,0x00,0x00,0x00,0x3e,0x00,0x03,0x00, +0x61,0x00,0x00,0x00,0xab,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, +0x67,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0x67,0x00,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xae,0x00,0x00,0x00, +0x48,0x01,0x00,0x00,0x59,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, +0x64,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0x66,0x00,0x00,0x00, +0xe0,0x00,0x04,0x00,0x4d,0x00,0x00,0x00,0x4d,0x00,0x00,0x00, +0xaf,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0xb3,0x00,0x00,0x00, +0xf8,0x00,0x02,0x00,0xb3,0x00,0x00,0x00,0xf5,0x00,0x07,0x00, +0x1a,0x00,0x00,0x00,0x49,0x01,0x00,0x00,0xb2,0x00,0x00,0x00, +0x66,0x00,0x00,0x00,0xcd,0x00,0x00,0x00,0xb6,0x00,0x00,0x00, +0xad,0x00,0x05,0x00,0x27,0x00,0x00,0x00,0xb9,0x00,0x00,0x00, +0x49,0x01,0x00,0x00,0x6a,0x00,0x00,0x00,0xf6,0x00,0x04,0x00, +0xb5,0x00,0x00,0x00,0xb6,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0xfa,0x00,0x04,0x00,0xb9,0x00,0x00,0x00,0xb4,0x00,0x00,0x00, +0xb5,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0xb4,0x00,0x00,0x00, +0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0xbc,0x00,0x00,0x00, +0x49,0x01,0x00,0x00,0xb0,0x00,0x05,0x00,0x27,0x00,0x00,0x00, +0xbd,0x00,0x00,0x00,0x0f,0x00,0x00,0x00,0xbc,0x00,0x00,0x00, +0xf7,0x00,0x03,0x00,0xbf,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0xfa,0x00,0x04,0x00,0xbd,0x00,0x00,0x00,0xbe,0x00,0x00,0x00, +0xbf,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0xbe,0x00,0x00,0x00, +0x3d,0x00,0x04,0x00,0x16,0x00,0x00,0x00,0xc3,0x00,0x00,0x00, +0x61,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xc7,0x00,0x00,0x00,0x0f,0x00,0x00,0x00,0xbc,0x00,0x00,0x00, +0x41,0x00,0x05,0x00,0x60,0x00,0x00,0x00,0xc8,0x00,0x00,0x00, +0x5c,0x00,0x00,0x00,0xc7,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0x16,0x00,0x00,0x00,0xc9,0x00,0x00,0x00,0xc8,0x00,0x00,0x00, +0x0c,0x00,0x07,0x00,0x16,0x00,0x00,0x00,0xca,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0x28,0x00,0x00,0x00,0xc3,0x00,0x00,0x00, +0xc9,0x00,0x00,0x00,0x3e,0x00,0x03,0x00,0x61,0x00,0x00,0x00, +0xca,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0xbf,0x00,0x00,0x00, +0xf8,0x00,0x02,0x00,0xbf,0x00,0x00,0x00,0xe0,0x00,0x04,0x00, +0x4d,0x00,0x00,0x00,0x4d,0x00,0x00,0x00,0xaf,0x00,0x00,0x00, +0xf9,0x00,0x02,0x00,0xb6,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, +0xb6,0x00,0x00,0x00,0xc3,0x00,0x05,0x00,0x1a,0x00,0x00,0x00, +0xcd,0x00,0x00,0x00,0x49,0x01,0x00,0x00,0x1b,0x00,0x00,0x00, +0xf9,0x00,0x02,0x00,0xb3,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, +0xb5,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x60,0x00,0x00,0x00, +0xcf,0x00,0x00,0x00,0x5c,0x00,0x00,0x00,0x6a,0x00,0x00,0x00, +0x3d,0x00,0x04,0x00,0x16,0x00,0x00,0x00,0xd0,0x00,0x00,0x00, +0xcf,0x00,0x00,0x00,0xe0,0x00,0x04,0x00,0x4d,0x00,0x00,0x00, +0x4d,0x00,0x00,0x00,0xaf,0x00,0x00,0x00,0x3e,0x00,0x03,0x00, +0x61,0x00,0x00,0x00,0x22,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, 0xd5,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0xd5,0x00,0x00,0x00, -0xe0,0x00,0x04,0x00,0x67,0x00,0x00,0x00,0x67,0x00,0x00,0x00, -0x68,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0xcc,0x00,0x00,0x00, -0xf8,0x00,0x02,0x00,0xcc,0x00,0x00,0x00,0xc3,0x00,0x05,0x00, -0x1a,0x00,0x00,0x00,0xe2,0x00,0x00,0x00,0x03,0x01,0x00,0x00, -0x1b,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0xc9,0x00,0x00,0x00, -0xf8,0x00,0x02,0x00,0xcb,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x16,0x00,0x00,0x00,0xe5,0x00,0x00,0x00,0x88,0x00,0x00,0x00, -0xf9,0x00,0x02,0x00,0xe8,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, -0xe8,0x00,0x00,0x00,0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00, -0x04,0x01,0x00,0x00,0x0f,0x00,0x00,0x00,0xcb,0x00,0x00,0x00, -0xfd,0x00,0x00,0x00,0xe9,0x00,0x00,0x00,0xb0,0x00,0x05,0x00, -0x34,0x00,0x00,0x00,0xf0,0x00,0x00,0x00,0x04,0x01,0x00,0x00, -0x33,0x00,0x00,0x00,0xf6,0x00,0x04,0x00,0xea,0x00,0x00,0x00, -0xe9,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, -0xf0,0x00,0x00,0x00,0xe9,0x00,0x00,0x00,0xea,0x00,0x00,0x00, -0xf8,0x00,0x02,0x00,0xe9,0x00,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xf4,0x00,0x00,0x00,0x13,0x00,0x00,0x00, -0x33,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xf6,0x00,0x00,0x00,0xf4,0x00,0x00,0x00,0x04,0x01,0x00,0x00, -0x41,0x00,0x06,0x00,0x44,0x00,0x00,0x00,0xf8,0x00,0x00,0x00, -0xc2,0x00,0x00,0x00,0x31,0x00,0x00,0x00,0xf6,0x00,0x00,0x00, +0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0x4a,0x01,0x00,0x00, +0x0f,0x00,0x00,0x00,0xb5,0x00,0x00,0x00,0x0e,0x01,0x00,0x00, +0xd8,0x00,0x00,0x00,0xb0,0x00,0x05,0x00,0x27,0x00,0x00,0x00, +0xdd,0x00,0x00,0x00,0x4a,0x01,0x00,0x00,0x6c,0x00,0x00,0x00, +0xf6,0x00,0x04,0x00,0xd7,0x00,0x00,0x00,0xd8,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0xdd,0x00,0x00,0x00, +0xd6,0x00,0x00,0x00,0xd7,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, +0xd6,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xe2,0x00,0x00,0x00,0x13,0x00,0x00,0x00,0x6c,0x00,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xe4,0x00,0x00,0x00, +0xe2,0x00,0x00,0x00,0x4a,0x01,0x00,0x00,0x41,0x00,0x06,0x00, +0x7c,0x00,0x00,0x00,0xe7,0x00,0x00,0x00,0x75,0x00,0x00,0x00, +0x6a,0x00,0x00,0x00,0xe4,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0x16,0x00,0x00,0x00,0xe8,0x00,0x00,0x00,0xe7,0x00,0x00,0x00, +0x41,0x00,0x05,0x00,0x24,0x00,0x00,0x00,0xe9,0x00,0x00,0x00, +0x19,0x00,0x00,0x00,0x7f,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0x16,0x00,0x00,0x00,0xea,0x00,0x00,0x00,0xe9,0x00,0x00,0x00, +0xac,0x00,0x05,0x00,0x27,0x00,0x00,0x00,0xee,0x00,0x00,0x00, +0x1e,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0xf7,0x00,0x03,0x00, +0xf1,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, +0xee,0x00,0x00,0x00,0xf0,0x00,0x00,0x00,0xfa,0x00,0x00,0x00, +0xf8,0x00,0x02,0x00,0xf0,0x00,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xf5,0x00,0x00,0x00,0x1f,0x00,0x00,0x00, +0x6c,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xf7,0x00,0x00,0x00,0xf5,0x00,0x00,0x00,0x4a,0x01,0x00,0x00, +0x41,0x00,0x06,0x00,0x7c,0x00,0x00,0x00,0xf8,0x00,0x00,0x00, +0x8c,0x00,0x00,0x00,0x6a,0x00,0x00,0x00,0xf7,0x00,0x00,0x00, 0x3d,0x00,0x04,0x00,0x16,0x00,0x00,0x00,0xf9,0x00,0x00,0x00, -0xf8,0x00,0x00,0x00,0x88,0x00,0x05,0x00,0x16,0x00,0x00,0x00, -0xfa,0x00,0x00,0x00,0xf9,0x00,0x00,0x00,0xe5,0x00,0x00,0x00, -0x3e,0x00,0x03,0x00,0xf8,0x00,0x00,0x00,0xfa,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xfd,0x00,0x00,0x00, -0x04,0x01,0x00,0x00,0x20,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, -0xe8,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0xea,0x00,0x00,0x00, +0xf8,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0xf1,0x00,0x00,0x00, +0xf8,0x00,0x02,0x00,0xfa,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, +0xf1,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0xf1,0x00,0x00,0x00, +0xf5,0x00,0x07,0x00,0x16,0x00,0x00,0x00,0x4e,0x01,0x00,0x00, +0xf9,0x00,0x00,0x00,0xf0,0x00,0x00,0x00,0x22,0x00,0x00,0x00, +0xfa,0x00,0x00,0x00,0x0c,0x00,0x08,0x00,0x16,0x00,0x00,0x00, +0xfc,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x32,0x00,0x00,0x00, +0xe8,0x00,0x00,0x00,0xea,0x00,0x00,0x00,0x4e,0x01,0x00,0x00, +0x83,0x00,0x05,0x00,0x16,0x00,0x00,0x00,0xfe,0x00,0x00,0x00, +0xfc,0x00,0x00,0x00,0xd0,0x00,0x00,0x00,0x0c,0x00,0x06,0x00, +0x16,0x00,0x00,0x00,0xff,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0x1b,0x00,0x00,0x00,0xfe,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0x16,0x00,0x00,0x00,0x03,0x01,0x00,0x00,0x61,0x00,0x00,0x00, +0x81,0x00,0x05,0x00,0x16,0x00,0x00,0x00,0x04,0x01,0x00,0x00, +0x03,0x01,0x00,0x00,0xff,0x00,0x00,0x00,0x3e,0x00,0x03,0x00, +0x61,0x00,0x00,0x00,0x04,0x01,0x00,0x00,0x41,0x00,0x06,0x00, +0x7c,0x00,0x00,0x00,0x0c,0x01,0x00,0x00,0x09,0x01,0x00,0x00, +0x6a,0x00,0x00,0x00,0xe4,0x00,0x00,0x00,0x3e,0x00,0x03,0x00, +0x0c,0x01,0x00,0x00,0xff,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, +0xd8,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0xd8,0x00,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x0e,0x01,0x00,0x00, +0x4a,0x01,0x00,0x00,0x59,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, +0xd5,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0xd7,0x00,0x00,0x00, +0xe0,0x00,0x04,0x00,0x4d,0x00,0x00,0x00,0x4d,0x00,0x00,0x00, +0xaf,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0x10,0x01,0x00,0x00, +0xf8,0x00,0x02,0x00,0x10,0x01,0x00,0x00,0xf5,0x00,0x07,0x00, +0x1a,0x00,0x00,0x00,0x4b,0x01,0x00,0x00,0xb2,0x00,0x00,0x00, +0xd7,0x00,0x00,0x00,0x29,0x01,0x00,0x00,0x13,0x01,0x00,0x00, +0xad,0x00,0x05,0x00,0x27,0x00,0x00,0x00,0x16,0x01,0x00,0x00, +0x4b,0x01,0x00,0x00,0x6a,0x00,0x00,0x00,0xf6,0x00,0x04,0x00, +0x12,0x01,0x00,0x00,0x13,0x01,0x00,0x00,0x01,0x00,0x00,0x00, +0xfa,0x00,0x04,0x00,0x16,0x01,0x00,0x00,0x11,0x01,0x00,0x00, +0x12,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0x11,0x01,0x00,0x00, +0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x19,0x01,0x00,0x00, +0x4b,0x01,0x00,0x00,0xb0,0x00,0x05,0x00,0x27,0x00,0x00,0x00, +0x1a,0x01,0x00,0x00,0x0f,0x00,0x00,0x00,0x19,0x01,0x00,0x00, +0xf7,0x00,0x03,0x00,0x1c,0x01,0x00,0x00,0x00,0x00,0x00,0x00, +0xfa,0x00,0x04,0x00,0x1a,0x01,0x00,0x00,0x1b,0x01,0x00,0x00, +0x1c,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0x1b,0x01,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x21,0x01,0x00,0x00, +0x0f,0x00,0x00,0x00,0x19,0x01,0x00,0x00,0x41,0x00,0x05,0x00, +0x60,0x00,0x00,0x00,0x22,0x01,0x00,0x00,0x5c,0x00,0x00,0x00, +0x21,0x01,0x00,0x00,0x3d,0x00,0x04,0x00,0x16,0x00,0x00,0x00, +0x23,0x01,0x00,0x00,0x22,0x01,0x00,0x00,0x3d,0x00,0x04,0x00, +0x16,0x00,0x00,0x00,0x25,0x01,0x00,0x00,0x61,0x00,0x00,0x00, +0x81,0x00,0x05,0x00,0x16,0x00,0x00,0x00,0x26,0x01,0x00,0x00, +0x25,0x01,0x00,0x00,0x23,0x01,0x00,0x00,0x3e,0x00,0x03,0x00, +0x61,0x00,0x00,0x00,0x26,0x01,0x00,0x00,0xf9,0x00,0x02,0x00, +0x1c,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0x1c,0x01,0x00,0x00, +0xe0,0x00,0x04,0x00,0x4d,0x00,0x00,0x00,0x4d,0x00,0x00,0x00, +0xaf,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0x13,0x01,0x00,0x00, +0xf8,0x00,0x02,0x00,0x13,0x01,0x00,0x00,0xc3,0x00,0x05,0x00, +0x1a,0x00,0x00,0x00,0x29,0x01,0x00,0x00,0x4b,0x01,0x00,0x00, +0x1b,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0x10,0x01,0x00,0x00, +0xf8,0x00,0x02,0x00,0x12,0x01,0x00,0x00,0x3d,0x00,0x04,0x00, +0x16,0x00,0x00,0x00,0x2c,0x01,0x00,0x00,0xcf,0x00,0x00,0x00, +0xf9,0x00,0x02,0x00,0x2f,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, +0x2f,0x01,0x00,0x00,0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00, +0x4c,0x01,0x00,0x00,0x0f,0x00,0x00,0x00,0x12,0x01,0x00,0x00, +0x44,0x01,0x00,0x00,0x30,0x01,0x00,0x00,0xb0,0x00,0x05,0x00, +0x27,0x00,0x00,0x00,0x37,0x01,0x00,0x00,0x4c,0x01,0x00,0x00, +0x6c,0x00,0x00,0x00,0xf6,0x00,0x04,0x00,0x31,0x01,0x00,0x00, +0x30,0x01,0x00,0x00,0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, +0x37,0x01,0x00,0x00,0x30,0x01,0x00,0x00,0x31,0x01,0x00,0x00, +0xf8,0x00,0x02,0x00,0x30,0x01,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x3b,0x01,0x00,0x00,0x13,0x00,0x00,0x00, +0x6c,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x3d,0x01,0x00,0x00,0x3b,0x01,0x00,0x00,0x4c,0x01,0x00,0x00, +0x41,0x00,0x06,0x00,0x7c,0x00,0x00,0x00,0x3f,0x01,0x00,0x00, +0x09,0x01,0x00,0x00,0x6a,0x00,0x00,0x00,0x3d,0x01,0x00,0x00, +0x3d,0x00,0x04,0x00,0x16,0x00,0x00,0x00,0x40,0x01,0x00,0x00, +0x3f,0x01,0x00,0x00,0x88,0x00,0x05,0x00,0x16,0x00,0x00,0x00, +0x41,0x01,0x00,0x00,0x40,0x01,0x00,0x00,0x2c,0x01,0x00,0x00, +0x3e,0x00,0x03,0x00,0x3f,0x01,0x00,0x00,0x41,0x01,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x44,0x01,0x00,0x00, +0x4c,0x01,0x00,0x00,0x59,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, +0x2f,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0x31,0x01,0x00,0x00, 0xfd,0x00,0x01,0x00,0x38,0x00,0x01,0x00, }; -const uint64_t soft_max_f32_len = 3752; +const uint64_t soft_max_f32_len = 4916; unsigned char split_k_reduce_data[] = { 0x03,0x02,0x23,0x07,0x00,0x05,0x01,0x00,0x0b,0x00,0x0d,0x00, @@ -51610,105 +47313,277 @@ const uint64_t split_k_reduce_len = 1416; unsigned char sqr_f32_data[] = { 0x03,0x02,0x23,0x07,0x00,0x05,0x01,0x00,0x0b,0x00,0x0d,0x00, -0x37,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x11,0x00,0x02,0x00, +0x9c,0x01,0x00,0x00,0x00,0x00,0x00,0x00,0x11,0x00,0x02,0x00, 0x01,0x00,0x00,0x00,0x0b,0x00,0x06,0x00,0x01,0x00,0x00,0x00, 0x47,0x4c,0x53,0x4c,0x2e,0x73,0x74,0x64,0x2e,0x34,0x35,0x30, 0x00,0x00,0x00,0x00,0x0e,0x00,0x03,0x00,0x00,0x00,0x00,0x00, 0x01,0x00,0x00,0x00,0x0f,0x00,0x09,0x00,0x05,0x00,0x00,0x00, 0x04,0x00,0x00,0x00,0x6d,0x61,0x69,0x6e,0x00,0x00,0x00,0x00, -0x0b,0x00,0x00,0x00,0x14,0x00,0x00,0x00,0x24,0x00,0x00,0x00, -0x2c,0x00,0x00,0x00,0x10,0x00,0x06,0x00,0x04,0x00,0x00,0x00, +0x14,0x00,0x00,0x00,0xc7,0x00,0x00,0x00,0xd9,0x00,0x00,0x00, +0xe4,0x00,0x00,0x00,0x10,0x00,0x06,0x00,0x04,0x00,0x00,0x00, 0x11,0x00,0x00,0x00,0x00,0x02,0x00,0x00,0x01,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x0b,0x00,0x00,0x00, -0x0b,0x00,0x00,0x00,0x1c,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0x12,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x12,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x04,0x00,0x00,0x00, -0x48,0x00,0x05,0x00,0x12,0x00,0x00,0x00,0x02,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x08,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0x12,0x00,0x00,0x00,0x03,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x47,0x00,0x03,0x00,0x12,0x00,0x00,0x00, -0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x21,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x48,0x00,0x04,0x00, -0x22,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x18,0x00,0x00,0x00, -0x48,0x00,0x05,0x00,0x22,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x03,0x00, -0x22,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x24,0x00,0x00,0x00,0x22,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x24,0x00,0x00,0x00,0x21,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x29,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x48,0x00,0x04,0x00, -0x2a,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x19,0x00,0x00,0x00, -0x48,0x00,0x05,0x00,0x2a,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x03,0x00, -0x2a,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x2c,0x00,0x00,0x00,0x22,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x2c,0x00,0x00,0x00,0x21,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x34,0x00,0x00,0x00, -0x0b,0x00,0x00,0x00,0x19,0x00,0x00,0x00,0x13,0x00,0x02,0x00, -0x02,0x00,0x00,0x00,0x21,0x00,0x03,0x00,0x03,0x00,0x00,0x00, -0x02,0x00,0x00,0x00,0x15,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x20,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x17,0x00,0x04,0x00, -0x09,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x03,0x00,0x00,0x00, -0x20,0x00,0x04,0x00,0x0a,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x09,0x00,0x00,0x00,0x3b,0x00,0x04,0x00,0x0a,0x00,0x00,0x00, -0x0b,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x20,0x00,0x04,0x00,0x0d,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x16,0x00,0x03,0x00,0x11,0x00,0x00,0x00, -0x20,0x00,0x00,0x00,0x1e,0x00,0x06,0x00,0x12,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x11,0x00,0x00,0x00, -0x11,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x13,0x00,0x00,0x00, -0x09,0x00,0x00,0x00,0x12,0x00,0x00,0x00,0x3b,0x00,0x04,0x00, -0x13,0x00,0x00,0x00,0x14,0x00,0x00,0x00,0x09,0x00,0x00,0x00, -0x15,0x00,0x04,0x00,0x15,0x00,0x00,0x00,0x20,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x15,0x00,0x00,0x00, -0x16,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0x17,0x00,0x00,0x00,0x09,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x14,0x00,0x02,0x00,0x1a,0x00,0x00,0x00,0x1d,0x00,0x03,0x00, -0x21,0x00,0x00,0x00,0x11,0x00,0x00,0x00,0x1e,0x00,0x03,0x00, -0x22,0x00,0x00,0x00,0x21,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0x23,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x22,0x00,0x00,0x00, -0x3b,0x00,0x04,0x00,0x23,0x00,0x00,0x00,0x24,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x26,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x11,0x00,0x00,0x00,0x1d,0x00,0x03,0x00, -0x29,0x00,0x00,0x00,0x11,0x00,0x00,0x00,0x1e,0x00,0x03,0x00, -0x2a,0x00,0x00,0x00,0x29,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0x2b,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x2a,0x00,0x00,0x00, -0x3b,0x00,0x04,0x00,0x2b,0x00,0x00,0x00,0x2c,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x32,0x00,0x00,0x00,0x00,0x02,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x33,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x2c,0x00,0x06,0x00,0x09,0x00,0x00,0x00,0x34,0x00,0x00,0x00, -0x32,0x00,0x00,0x00,0x33,0x00,0x00,0x00,0x33,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x12,0x00,0x00,0x00, +0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0x48,0x00,0x05,0x00,0x12,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0x23,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x48,0x00,0x05,0x00, +0x12,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x23,0x00,0x00,0x00, +0x08,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x12,0x00,0x00,0x00, +0x03,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, +0x48,0x00,0x05,0x00,0x12,0x00,0x00,0x00,0x04,0x00,0x00,0x00, +0x23,0x00,0x00,0x00,0x10,0x00,0x00,0x00,0x48,0x00,0x05,0x00, +0x12,0x00,0x00,0x00,0x05,0x00,0x00,0x00,0x23,0x00,0x00,0x00, +0x14,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x12,0x00,0x00,0x00, +0x06,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x18,0x00,0x00,0x00, +0x48,0x00,0x05,0x00,0x12,0x00,0x00,0x00,0x07,0x00,0x00,0x00, +0x23,0x00,0x00,0x00,0x1c,0x00,0x00,0x00,0x48,0x00,0x05,0x00, +0x12,0x00,0x00,0x00,0x08,0x00,0x00,0x00,0x23,0x00,0x00,0x00, +0x20,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x12,0x00,0x00,0x00, +0x09,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x24,0x00,0x00,0x00, +0x48,0x00,0x05,0x00,0x12,0x00,0x00,0x00,0x0a,0x00,0x00,0x00, +0x23,0x00,0x00,0x00,0x28,0x00,0x00,0x00,0x48,0x00,0x05,0x00, +0x12,0x00,0x00,0x00,0x0b,0x00,0x00,0x00,0x23,0x00,0x00,0x00, +0x2c,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x12,0x00,0x00,0x00, +0x0c,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x30,0x00,0x00,0x00, +0x48,0x00,0x05,0x00,0x12,0x00,0x00,0x00,0x0d,0x00,0x00,0x00, +0x23,0x00,0x00,0x00,0x34,0x00,0x00,0x00,0x48,0x00,0x05,0x00, +0x12,0x00,0x00,0x00,0x0e,0x00,0x00,0x00,0x23,0x00,0x00,0x00, +0x38,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x12,0x00,0x00,0x00, +0x0f,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x3c,0x00,0x00,0x00, +0x48,0x00,0x05,0x00,0x12,0x00,0x00,0x00,0x10,0x00,0x00,0x00, +0x23,0x00,0x00,0x00,0x40,0x00,0x00,0x00,0x48,0x00,0x05,0x00, +0x12,0x00,0x00,0x00,0x11,0x00,0x00,0x00,0x23,0x00,0x00,0x00, +0x44,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x12,0x00,0x00,0x00, +0x12,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x48,0x00,0x00,0x00, +0x48,0x00,0x05,0x00,0x12,0x00,0x00,0x00,0x13,0x00,0x00,0x00, +0x23,0x00,0x00,0x00,0x4c,0x00,0x00,0x00,0x47,0x00,0x03,0x00, +0x12,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00, +0xc7,0x00,0x00,0x00,0x0b,0x00,0x00,0x00,0x1c,0x00,0x00,0x00, +0x47,0x00,0x04,0x00,0xd6,0x00,0x00,0x00,0x06,0x00,0x00,0x00, +0x04,0x00,0x00,0x00,0x48,0x00,0x04,0x00,0xd7,0x00,0x00,0x00, +0x00,0x00,0x00,0x00,0x18,0x00,0x00,0x00,0x48,0x00,0x05,0x00, +0xd7,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00, +0x00,0x00,0x00,0x00,0x47,0x00,0x03,0x00,0xd7,0x00,0x00,0x00, +0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0xd9,0x00,0x00,0x00, +0x22,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00, +0xd9,0x00,0x00,0x00,0x21,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0x47,0x00,0x04,0x00,0xe1,0x00,0x00,0x00,0x06,0x00,0x00,0x00, +0x04,0x00,0x00,0x00,0x48,0x00,0x04,0x00,0xe2,0x00,0x00,0x00, +0x00,0x00,0x00,0x00,0x19,0x00,0x00,0x00,0x48,0x00,0x05,0x00, +0xe2,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00, +0x00,0x00,0x00,0x00,0x47,0x00,0x03,0x00,0xe2,0x00,0x00,0x00, +0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0xe4,0x00,0x00,0x00, +0x22,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00, +0xe4,0x00,0x00,0x00,0x21,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0x47,0x00,0x04,0x00,0xf3,0x00,0x00,0x00,0x0b,0x00,0x00,0x00, +0x19,0x00,0x00,0x00,0x13,0x00,0x02,0x00,0x02,0x00,0x00,0x00, +0x21,0x00,0x03,0x00,0x03,0x00,0x00,0x00,0x02,0x00,0x00,0x00, +0x15,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x20,0x00,0x00,0x00, +0x00,0x00,0x00,0x00,0x16,0x00,0x03,0x00,0x11,0x00,0x00,0x00, +0x20,0x00,0x00,0x00,0x1e,0x00,0x16,0x00,0x12,0x00,0x00,0x00, +0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00, +0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00, +0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00, +0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00, +0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00, +0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00, +0x11,0x00,0x00,0x00,0x11,0x00,0x00,0x00,0x20,0x00,0x04,0x00, +0x13,0x00,0x00,0x00,0x09,0x00,0x00,0x00,0x12,0x00,0x00,0x00, +0x3b,0x00,0x04,0x00,0x13,0x00,0x00,0x00,0x14,0x00,0x00,0x00, +0x09,0x00,0x00,0x00,0x15,0x00,0x04,0x00,0x15,0x00,0x00,0x00, +0x20,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x15,0x00,0x00,0x00,0x16,0x00,0x00,0x00,0x03,0x00,0x00,0x00, +0x20,0x00,0x04,0x00,0x17,0x00,0x00,0x00,0x09,0x00,0x00,0x00, +0x06,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x15,0x00,0x00,0x00, +0x1a,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x15,0x00,0x00,0x00,0x1e,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x15,0x00,0x00,0x00,0x55,0x00,0x00,0x00, +0x08,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x15,0x00,0x00,0x00, +0x5a,0x00,0x00,0x00,0x07,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x15,0x00,0x00,0x00,0x60,0x00,0x00,0x00,0x06,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x15,0x00,0x00,0x00,0x66,0x00,0x00,0x00, +0x05,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x15,0x00,0x00,0x00, +0x6f,0x00,0x00,0x00,0x0b,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x15,0x00,0x00,0x00,0x72,0x00,0x00,0x00,0x0a,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x15,0x00,0x00,0x00,0x76,0x00,0x00,0x00, +0x09,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x15,0x00,0x00,0x00, +0xad,0x00,0x00,0x00,0x10,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x15,0x00,0x00,0x00,0xb2,0x00,0x00,0x00,0x0f,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x15,0x00,0x00,0x00,0xb8,0x00,0x00,0x00, +0x0e,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x15,0x00,0x00,0x00, +0xbe,0x00,0x00,0x00,0x0d,0x00,0x00,0x00,0x17,0x00,0x04,0x00, +0xc5,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x03,0x00,0x00,0x00, +0x20,0x00,0x04,0x00,0xc6,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0xc5,0x00,0x00,0x00,0x3b,0x00,0x04,0x00,0xc6,0x00,0x00,0x00, +0xc7,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0xc8,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0x20,0x00,0x04,0x00,0xc9,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0x06,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x15,0x00,0x00,0x00, +0xcc,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x14,0x00,0x02,0x00, +0xcf,0x00,0x00,0x00,0x1d,0x00,0x03,0x00,0xd6,0x00,0x00,0x00, +0x11,0x00,0x00,0x00,0x1e,0x00,0x03,0x00,0xd7,0x00,0x00,0x00, +0xd6,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0xd8,0x00,0x00,0x00, +0x0c,0x00,0x00,0x00,0xd7,0x00,0x00,0x00,0x3b,0x00,0x04,0x00, +0xd8,0x00,0x00,0x00,0xd9,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, +0x20,0x00,0x04,0x00,0xde,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, +0x11,0x00,0x00,0x00,0x1d,0x00,0x03,0x00,0xe1,0x00,0x00,0x00, +0x11,0x00,0x00,0x00,0x1e,0x00,0x03,0x00,0xe2,0x00,0x00,0x00, +0xe1,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0xe3,0x00,0x00,0x00, +0x0c,0x00,0x00,0x00,0xe2,0x00,0x00,0x00,0x3b,0x00,0x04,0x00, +0xe3,0x00,0x00,0x00,0xe4,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x15,0x00,0x00,0x00,0xe5,0x00,0x00,0x00, +0x11,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0xf1,0x00,0x00,0x00,0x00,0x02,0x00,0x00,0x2b,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0xf2,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0x2c,0x00,0x06,0x00,0xc5,0x00,0x00,0x00,0xf3,0x00,0x00,0x00, +0xf1,0x00,0x00,0x00,0xf2,0x00,0x00,0x00,0xf2,0x00,0x00,0x00, 0x36,0x00,0x05,0x00,0x02,0x00,0x00,0x00,0x04,0x00,0x00,0x00, 0x00,0x00,0x00,0x00,0x03,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, -0x05,0x00,0x00,0x00,0xf7,0x00,0x03,0x00,0x35,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0xfb,0x00,0x03,0x00,0x0c,0x00,0x00,0x00, -0x36,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0x36,0x00,0x00,0x00, -0x41,0x00,0x05,0x00,0x0d,0x00,0x00,0x00,0x0e,0x00,0x00,0x00, -0x0b,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x0f,0x00,0x00,0x00,0x0e,0x00,0x00,0x00, -0x41,0x00,0x05,0x00,0x17,0x00,0x00,0x00,0x18,0x00,0x00,0x00, +0x05,0x00,0x00,0x00,0xf7,0x00,0x03,0x00,0xf4,0x00,0x00,0x00, +0x00,0x00,0x00,0x00,0xfb,0x00,0x03,0x00,0xc8,0x00,0x00,0x00, +0xf5,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0xf5,0x00,0x00,0x00, +0x41,0x00,0x05,0x00,0xc9,0x00,0x00,0x00,0xca,0x00,0x00,0x00, +0xc7,0x00,0x00,0x00,0xc8,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0xcb,0x00,0x00,0x00,0xca,0x00,0x00,0x00, +0x41,0x00,0x05,0x00,0x17,0x00,0x00,0x00,0xcd,0x00,0x00,0x00, +0x14,0x00,0x00,0x00,0xcc,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0xce,0x00,0x00,0x00,0xcd,0x00,0x00,0x00, +0xae,0x00,0x05,0x00,0xcf,0x00,0x00,0x00,0xd0,0x00,0x00,0x00, +0xcb,0x00,0x00,0x00,0xce,0x00,0x00,0x00,0xf7,0x00,0x03,0x00, +0xd2,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, +0xd0,0x00,0x00,0x00,0xd1,0x00,0x00,0x00,0xd2,0x00,0x00,0x00, +0xf8,0x00,0x02,0x00,0xd1,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, +0xf4,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0xd2,0x00,0x00,0x00, +0x41,0x00,0x05,0x00,0x17,0x00,0x00,0x00,0x03,0x01,0x00,0x00, 0x14,0x00,0x00,0x00,0x16,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x19,0x00,0x00,0x00,0x18,0x00,0x00,0x00, -0xae,0x00,0x05,0x00,0x1a,0x00,0x00,0x00,0x1b,0x00,0x00,0x00, -0x0f,0x00,0x00,0x00,0x19,0x00,0x00,0x00,0xf7,0x00,0x03,0x00, -0x1d,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, -0x1b,0x00,0x00,0x00,0x1c,0x00,0x00,0x00,0x1d,0x00,0x00,0x00, -0xf8,0x00,0x02,0x00,0x1c,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, -0x35,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0x1d,0x00,0x00,0x00, -0x41,0x00,0x06,0x00,0x26,0x00,0x00,0x00,0x27,0x00,0x00,0x00, -0x24,0x00,0x00,0x00,0x16,0x00,0x00,0x00,0x0f,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x11,0x00,0x00,0x00,0x28,0x00,0x00,0x00, -0x27,0x00,0x00,0x00,0x85,0x00,0x05,0x00,0x11,0x00,0x00,0x00, -0x30,0x00,0x00,0x00,0x28,0x00,0x00,0x00,0x28,0x00,0x00,0x00, -0x41,0x00,0x06,0x00,0x26,0x00,0x00,0x00,0x31,0x00,0x00,0x00, -0x2c,0x00,0x00,0x00,0x16,0x00,0x00,0x00,0x0f,0x00,0x00,0x00, -0x3e,0x00,0x03,0x00,0x31,0x00,0x00,0x00,0x30,0x00,0x00,0x00, -0xf9,0x00,0x02,0x00,0x35,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, -0x35,0x00,0x00,0x00,0xfd,0x00,0x01,0x00,0x38,0x00,0x01,0x00, +0x06,0x00,0x00,0x00,0x04,0x01,0x00,0x00,0x03,0x01,0x00,0x00, +0x41,0x00,0x05,0x00,0x17,0x00,0x00,0x00,0x05,0x01,0x00,0x00, +0x14,0x00,0x00,0x00,0x1a,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x06,0x01,0x00,0x00,0x05,0x01,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x07,0x01,0x00,0x00, +0x04,0x01,0x00,0x00,0x06,0x01,0x00,0x00,0x41,0x00,0x05,0x00, +0x17,0x00,0x00,0x00,0x08,0x01,0x00,0x00,0x14,0x00,0x00,0x00, +0x1e,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x09,0x01,0x00,0x00,0x08,0x01,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x0a,0x01,0x00,0x00,0x07,0x01,0x00,0x00, +0x09,0x01,0x00,0x00,0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x0b,0x01,0x00,0x00,0xcb,0x00,0x00,0x00,0x0a,0x01,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x0f,0x01,0x00,0x00, +0x0b,0x01,0x00,0x00,0x04,0x01,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x12,0x01,0x00,0x00,0x0f,0x01,0x00,0x00, +0x06,0x01,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x15,0x01,0x00,0x00,0x12,0x01,0x00,0x00,0x09,0x01,0x00,0x00, +0x82,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x18,0x01,0x00,0x00, +0xcb,0x00,0x00,0x00,0x15,0x01,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x1d,0x01,0x00,0x00,0x06,0x01,0x00,0x00, +0x09,0x01,0x00,0x00,0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x1e,0x01,0x00,0x00,0x18,0x01,0x00,0x00,0x1d,0x01,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x22,0x01,0x00,0x00, +0x1e,0x01,0x00,0x00,0x06,0x01,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x25,0x01,0x00,0x00,0x22,0x01,0x00,0x00, +0x09,0x01,0x00,0x00,0x82,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x2a,0x01,0x00,0x00,0x18,0x01,0x00,0x00,0x25,0x01,0x00,0x00, +0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x2d,0x01,0x00,0x00, +0x2a,0x01,0x00,0x00,0x09,0x01,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x36,0x01,0x00,0x00,0x2d,0x01,0x00,0x00, +0x09,0x01,0x00,0x00,0x82,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x37,0x01,0x00,0x00,0x2a,0x01,0x00,0x00,0x36,0x01,0x00,0x00, +0x41,0x00,0x05,0x00,0x17,0x00,0x00,0x00,0x39,0x01,0x00,0x00, +0x14,0x00,0x00,0x00,0x55,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x3a,0x01,0x00,0x00,0x39,0x01,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x3b,0x01,0x00,0x00, +0x0b,0x01,0x00,0x00,0x3a,0x01,0x00,0x00,0x41,0x00,0x05,0x00, +0x17,0x00,0x00,0x00,0x3d,0x01,0x00,0x00,0x14,0x00,0x00,0x00, +0x5a,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x3e,0x01,0x00,0x00,0x3d,0x01,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x3f,0x01,0x00,0x00,0x1e,0x01,0x00,0x00, +0x3e,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x40,0x01,0x00,0x00,0x3b,0x01,0x00,0x00,0x3f,0x01,0x00,0x00, +0x41,0x00,0x05,0x00,0x17,0x00,0x00,0x00,0x42,0x01,0x00,0x00, +0x14,0x00,0x00,0x00,0x60,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x43,0x01,0x00,0x00,0x42,0x01,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x44,0x01,0x00,0x00, +0x2d,0x01,0x00,0x00,0x43,0x01,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x45,0x01,0x00,0x00,0x40,0x01,0x00,0x00, +0x44,0x01,0x00,0x00,0x41,0x00,0x05,0x00,0x17,0x00,0x00,0x00, +0x47,0x01,0x00,0x00,0x14,0x00,0x00,0x00,0x66,0x00,0x00,0x00, +0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x48,0x01,0x00,0x00, +0x47,0x01,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x49,0x01,0x00,0x00,0x37,0x01,0x00,0x00,0x48,0x01,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x4a,0x01,0x00,0x00, +0x45,0x01,0x00,0x00,0x49,0x01,0x00,0x00,0x41,0x00,0x06,0x00, +0xde,0x00,0x00,0x00,0xdf,0x00,0x00,0x00,0xd9,0x00,0x00,0x00, +0xcc,0x00,0x00,0x00,0x4a,0x01,0x00,0x00,0x3d,0x00,0x04,0x00, +0x11,0x00,0x00,0x00,0xe0,0x00,0x00,0x00,0xdf,0x00,0x00,0x00, +0x41,0x00,0x05,0x00,0x17,0x00,0x00,0x00,0xe6,0x00,0x00,0x00, +0x14,0x00,0x00,0x00,0xe5,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0xe7,0x00,0x00,0x00,0xe6,0x00,0x00,0x00, +0x41,0x00,0x05,0x00,0x17,0x00,0x00,0x00,0x54,0x01,0x00,0x00, +0x14,0x00,0x00,0x00,0x6f,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x55,0x01,0x00,0x00,0x54,0x01,0x00,0x00, +0x41,0x00,0x05,0x00,0x17,0x00,0x00,0x00,0x56,0x01,0x00,0x00, +0x14,0x00,0x00,0x00,0x72,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x57,0x01,0x00,0x00,0x56,0x01,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x58,0x01,0x00,0x00, +0x55,0x01,0x00,0x00,0x57,0x01,0x00,0x00,0x41,0x00,0x05,0x00, +0x17,0x00,0x00,0x00,0x59,0x01,0x00,0x00,0x14,0x00,0x00,0x00, +0x76,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x5a,0x01,0x00,0x00,0x59,0x01,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x5b,0x01,0x00,0x00,0x58,0x01,0x00,0x00, +0x5a,0x01,0x00,0x00,0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x5c,0x01,0x00,0x00,0xcb,0x00,0x00,0x00,0x5b,0x01,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x60,0x01,0x00,0x00, +0x5c,0x01,0x00,0x00,0x55,0x01,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x63,0x01,0x00,0x00,0x60,0x01,0x00,0x00, +0x57,0x01,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x66,0x01,0x00,0x00,0x63,0x01,0x00,0x00,0x5a,0x01,0x00,0x00, +0x82,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x69,0x01,0x00,0x00, +0xcb,0x00,0x00,0x00,0x66,0x01,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x6e,0x01,0x00,0x00,0x57,0x01,0x00,0x00, +0x5a,0x01,0x00,0x00,0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x6f,0x01,0x00,0x00,0x69,0x01,0x00,0x00,0x6e,0x01,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x73,0x01,0x00,0x00, +0x6f,0x01,0x00,0x00,0x57,0x01,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x76,0x01,0x00,0x00,0x73,0x01,0x00,0x00, +0x5a,0x01,0x00,0x00,0x82,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x7b,0x01,0x00,0x00,0x69,0x01,0x00,0x00,0x76,0x01,0x00,0x00, +0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x7e,0x01,0x00,0x00, +0x7b,0x01,0x00,0x00,0x5a,0x01,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x87,0x01,0x00,0x00,0x7e,0x01,0x00,0x00, +0x5a,0x01,0x00,0x00,0x82,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x88,0x01,0x00,0x00,0x7b,0x01,0x00,0x00,0x87,0x01,0x00,0x00, +0x41,0x00,0x05,0x00,0x17,0x00,0x00,0x00,0x8a,0x01,0x00,0x00, +0x14,0x00,0x00,0x00,0xad,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x8b,0x01,0x00,0x00,0x8a,0x01,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x8c,0x01,0x00,0x00, +0x5c,0x01,0x00,0x00,0x8b,0x01,0x00,0x00,0x41,0x00,0x05,0x00, +0x17,0x00,0x00,0x00,0x8e,0x01,0x00,0x00,0x14,0x00,0x00,0x00, +0xb2,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x8f,0x01,0x00,0x00,0x8e,0x01,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x90,0x01,0x00,0x00,0x6f,0x01,0x00,0x00, +0x8f,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x91,0x01,0x00,0x00,0x8c,0x01,0x00,0x00,0x90,0x01,0x00,0x00, +0x41,0x00,0x05,0x00,0x17,0x00,0x00,0x00,0x93,0x01,0x00,0x00, +0x14,0x00,0x00,0x00,0xb8,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x94,0x01,0x00,0x00,0x93,0x01,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x95,0x01,0x00,0x00, +0x7e,0x01,0x00,0x00,0x94,0x01,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x96,0x01,0x00,0x00,0x91,0x01,0x00,0x00, +0x95,0x01,0x00,0x00,0x41,0x00,0x05,0x00,0x17,0x00,0x00,0x00, +0x98,0x01,0x00,0x00,0x14,0x00,0x00,0x00,0xbe,0x00,0x00,0x00, +0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x99,0x01,0x00,0x00, +0x98,0x01,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x9a,0x01,0x00,0x00,0x88,0x01,0x00,0x00,0x99,0x01,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x9b,0x01,0x00,0x00, +0x96,0x01,0x00,0x00,0x9a,0x01,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xec,0x00,0x00,0x00,0xe7,0x00,0x00,0x00, +0x9b,0x01,0x00,0x00,0x85,0x00,0x05,0x00,0x11,0x00,0x00,0x00, +0xef,0x00,0x00,0x00,0xe0,0x00,0x00,0x00,0xe0,0x00,0x00,0x00, +0x41,0x00,0x06,0x00,0xde,0x00,0x00,0x00,0xf0,0x00,0x00,0x00, +0xe4,0x00,0x00,0x00,0xcc,0x00,0x00,0x00,0xec,0x00,0x00,0x00, +0x3e,0x00,0x03,0x00,0xf0,0x00,0x00,0x00,0xef,0x00,0x00,0x00, +0xf9,0x00,0x02,0x00,0xf4,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, +0xf4,0x00,0x00,0x00,0xfd,0x00,0x01,0x00,0x38,0x00,0x01,0x00, }; -const uint64_t sqr_f32_len = 1188; +const uint64_t sqr_f32_len = 3252; diff --git a/ggml-vulkan.cpp b/ggml-vulkan.cpp index bc316c3f3..5a1b3f477 100644 --- a/ggml-vulkan.cpp +++ b/ggml-vulkan.cpp @@ -69,6 +69,33 @@ struct vk_queue { vk::PipelineStageFlags stage_flags; }; +struct vk_pipeline_struct { + std::string name; + vk::ShaderModule shader_module; + vk::DescriptorSetLayout dsl; + std::vector descriptor_pools; + std::vector descriptor_sets; + uint32_t descriptor_set_idx; + vk::PipelineLayout layout; + vk::Pipeline pipeline; + uint32_t push_constant_size; + uint32_t parameter_count; + std::array wg_denoms; + uint32_t align; +}; + +typedef std::shared_ptr vk_pipeline; +typedef std::weak_ptr vk_pipeline_ref; + +static void ggml_vk_destroy_pipeline(vk::Device& device, vk_pipeline& pipeline); + +struct vk_matmul_pipeline_struct { + vk_pipeline l, m, s; + vk_pipeline a_l, a_m, a_s; +}; + +typedef std::shared_ptr vk_matmul_pipeline; + struct vk_device { vk::PhysicalDevice physical_device; vk::PhysicalDeviceProperties properties; @@ -84,10 +111,61 @@ struct vk_device { uint32_t subgroup_size; bool uma; + bool initialized; + size_t idx; + + vk_matmul_pipeline pipeline_matmul_f32; + vk_matmul_pipeline pipeline_matmul_f16; + vk_matmul_pipeline pipeline_matmul_f16_f32; + vk_pipeline pipeline_matmul_split_k_reduce; + + vk_matmul_pipeline pipeline_dequant_mul_mat_mat[VK_NUM_TYPES]; + + vk_pipeline pipeline_dequant[VK_NUM_TYPES]; + vk_pipeline pipeline_dequant_mul_mat_vec_f32[VK_NUM_TYPES]; + + vk_pipeline pipeline_mul_mat_vec_p021_f16_f32; + vk_pipeline pipeline_mul_mat_vec_nc_f16_f32; + vk_pipeline pipeline_get_rows[VK_NUM_TYPES]; + vk_pipeline pipeline_get_rows_f32[VK_NUM_TYPES]; + vk_pipeline pipeline_mul_f32; + vk_pipeline pipeline_add_f32; + vk_pipeline pipeline_scale_f32; + vk_pipeline pipeline_sqr_f32; + vk_pipeline pipeline_clamp_f32; + vk_pipeline pipeline_cpy_f32_f32, pipeline_cpy_f32_f16, pipeline_cpy_f16_f16; + vk_pipeline pipeline_norm_f32; + vk_pipeline pipeline_rms_norm_f32; + vk_pipeline pipeline_gelu_f32; + vk_pipeline pipeline_silu_f32; + vk_pipeline pipeline_relu_f32; + vk_pipeline pipeline_diag_mask_inf_f32; + vk_pipeline pipeline_soft_max_f32; + vk_pipeline pipeline_rope_f32, pipeline_rope_f16; + vk_pipeline pipeline_rope_neox_f32, pipeline_rope_neox_f16; + vk_pipeline pipeline_argsort_f32; + + std::vector pipelines; + ~vk_device() { #ifdef GGML_VULKAN_DEBUG std::cerr << "destroy device " << name << std::endl; #endif + device.destroyCommandPool(compute_queue.pool); + if (!single_queue) { + device.destroyCommandPool(transfer_queue.pool); + } + + for (auto& pipeline : pipelines) { + if (pipeline.expired()) { + continue; + } + + vk_pipeline pl = pipeline.lock(); + ggml_vk_destroy_pipeline(device, pl); + } + pipelines.clear(); + device.destroy(); } }; @@ -125,21 +203,6 @@ struct vk_subbuffer { uint64_t size; }; -struct vk_pipeline { - std::string name; - vk::ShaderModule shader_module; - vk::DescriptorSetLayout dsl; - std::vector descriptor_pools; - std::vector descriptor_sets; - uint32_t descriptor_set_idx; - vk::PipelineLayout layout; - vk::Pipeline pipeline; - uint32_t push_constant_size; - uint32_t parameter_count; - std::array wg_denoms; - uint32_t align; -}; - struct vk_semaphore { vk::Semaphore s; uint64_t value; @@ -160,11 +223,21 @@ struct vk_op_push_constants { float param2; }; -struct vk_op_cpy_push_constants { +struct vk_op_unary_push_constants { uint32_t ne; - uint32_t ne00; uint32_t ne01; uint32_t nb00; uint32_t nb01; uint32_t nb02; - uint32_t ne10; uint32_t ne11; uint32_t nb10; uint32_t nb11; uint32_t nb12; + uint32_t ne00; uint32_t ne01; uint32_t ne02; uint32_t ne03; uint32_t nb00; uint32_t nb01; uint32_t nb02; uint32_t nb03; + uint32_t ne10; uint32_t ne11; uint32_t ne12; uint32_t ne13; uint32_t nb10; uint32_t nb11; uint32_t nb12; uint32_t nb13; uint32_t d_offset; + float param1; float param2; +}; + +struct vk_op_binary_push_constants { + uint32_t ne; + uint32_t ne00; uint32_t ne01; uint32_t ne02; uint32_t ne03; uint32_t nb00; uint32_t nb01; uint32_t nb02; uint32_t nb03; + uint32_t ne10; uint32_t ne11; uint32_t ne12; uint32_t ne13; uint32_t nb10; uint32_t nb11; uint32_t nb12; uint32_t nb13; + uint32_t ne20; uint32_t ne21; uint32_t ne22; uint32_t ne23; uint32_t nb20; uint32_t nb21; uint32_t nb22; uint32_t nb23; + uint32_t d_offset; + float param1; float param2; }; struct vk_op_diag_mask_push_constants { @@ -196,6 +269,22 @@ struct vk_op_rope_neox_push_constants { float inv_ndims; }; +struct vk_op_soft_max_push_constants { + uint32_t KX; + uint32_t KY; + uint32_t KZ; + float scale; + float max_bias; + float m0; + float m1; + uint32_t n_head_log2; +}; + +struct vk_op_argsort_push_constants { + uint32_t ncols; + bool ascending; +}; + // Allow pre-recording command buffers struct vk_staging_memcpy { vk_staging_memcpy(void * _dst, const void * _src, size_t _n) : dst(_dst), src(_src), n(_n) {} @@ -236,7 +325,6 @@ struct ggml_tensor_extra_gpu { }; struct ggml_vk_garbage_collector { - std::vector pipelines; std::vector tl_semaphores; std::vector semaphores; std::vector events; @@ -247,35 +335,7 @@ struct ggml_vk_garbage_collector { struct ggml_backend_vk_context { std::string name; - std::weak_ptr device; - vk_pipeline pipeline_matmul_f32_l, pipeline_matmul_f32_m, pipeline_matmul_f32_s; - vk_pipeline pipeline_matmul_f32_aligned_l, pipeline_matmul_f32_aligned_m, pipeline_matmul_f32_aligned_s; - vk_pipeline pipeline_matmul_f16_l, pipeline_matmul_f16_m, pipeline_matmul_f16_s; - vk_pipeline pipeline_matmul_f16_aligned_l, pipeline_matmul_f16_aligned_m, pipeline_matmul_f16_aligned_s; - vk_pipeline pipeline_matmul_f16_f32_l, pipeline_matmul_f16_f32_m, pipeline_matmul_f16_f32_s; - vk_pipeline pipeline_matmul_f16_f32_aligned_l, pipeline_matmul_f16_f32_aligned_m, pipeline_matmul_f16_f32_aligned_s; - vk_pipeline pipeline_matmul_split_k_reduce; - vk_pipeline pipeline_dequant[VK_NUM_TYPES]; - vk_pipeline pipeline_dequant_mul_mat_vec_f32[VK_NUM_TYPES]; - vk_pipeline pipeline_mul_mat_vec_p021_f16_f32; - vk_pipeline pipeline_mul_mat_vec_nc_f16_f32; - vk_pipeline pipeline_get_rows[VK_NUM_TYPES]; - vk_pipeline pipeline_get_rows_f32[VK_NUM_TYPES]; - vk_pipeline pipeline_mul_f32; - vk_pipeline pipeline_add_f32; - vk_pipeline pipeline_scale_f32; - vk_pipeline pipeline_sqr_f32; - vk_pipeline pipeline_clamp_f32; - vk_pipeline pipeline_cpy_f32_f32, pipeline_cpy_f32_f16, pipeline_cpy_f16_f16; - vk_pipeline pipeline_norm_f32; - vk_pipeline pipeline_rms_norm_f32; - vk_pipeline pipeline_gelu_f32; - vk_pipeline pipeline_silu_f32; - vk_pipeline pipeline_relu_f32; - vk_pipeline pipeline_diag_mask_inf_f32; - vk_pipeline pipeline_soft_max_f32; - vk_pipeline pipeline_rope_f32, pipeline_rope_f16; - vk_pipeline pipeline_rope_neox_f32, pipeline_rope_neox_f16; + std::shared_ptr device; size_t semaphore_idx, event_idx; ggml_vk_garbage_collector gc; @@ -304,13 +364,31 @@ struct vk_instance { std::vector device_indices; - std::shared_ptr devices[GGML_VK_MAX_DEVICES]; ggml_backend_t backends[GGML_VK_MAX_DEVICES]; ggml_backend_vk_context contexts[GGML_VK_MAX_DEVICES]; ggml_backend_buffer_type buffer_types[GGML_VK_MAX_DEVICES]; bool initialized[GGML_VK_MAX_DEVICES]; }; +static std::shared_ptr ggml_vk_get_device(size_t idx) { +#ifdef GGML_VULKAN_DEBUG + std::cerr << "ggml_vk_get_device(" << idx << ")" << std::endl; +#endif + static std::weak_ptr devices[GGML_VK_MAX_DEVICES]; + + if (devices[idx].expired()) { +#ifdef GGML_VULKAN_DEBUG + std::cerr << "Initializing new vk_device" << std::endl; +#endif + std::shared_ptr device = std::make_shared(); + device->initialized = false; + devices[idx] = device; + return device; + } + + return devices[idx].lock(); +} + #ifdef GGML_VULKAN_CHECK_RESULTS static size_t vk_skip_checks; static size_t vk_output_tensor; @@ -334,14 +412,15 @@ static void ggml_vk_create_pipeline(ggml_backend_vk_context * ctx, vk_pipeline& GGML_ASSERT(parameter_count > 0); GGML_ASSERT(wg_denoms[0] > 0 && wg_denoms[1] > 0 && wg_denoms[2] > 0); // NOLINT - pipeline.name = name; - pipeline.parameter_count = parameter_count; - pipeline.push_constant_size = push_constant_size; - pipeline.wg_denoms = wg_denoms; - pipeline.align = align; + pipeline = std::make_shared(); + pipeline->name = name; + pipeline->parameter_count = parameter_count; + pipeline->push_constant_size = push_constant_size; + pipeline->wg_denoms = wg_denoms; + pipeline->align = align; vk::ShaderModuleCreateInfo shader_module_create_info({}, spv_size, reinterpret_cast(spv_data)); - pipeline.shader_module = ctx->device.lock()->device.createShaderModule(shader_module_create_info); + pipeline->shader_module = ctx->device->device.createShaderModule(shader_module_create_info); std::vector dsl_binding; std::vector dsl_binding_flags; @@ -355,49 +434,49 @@ static void ggml_vk_create_pipeline(ggml_backend_vk_context * ctx, vk_pipeline& vk::PushConstantRange pcr( vk::ShaderStageFlagBits::eCompute, 0, - pipeline.push_constant_size + pipeline->push_constant_size ); vk::DescriptorSetLayoutCreateInfo descriptor_set_layout_create_info( {}, dsl_binding); descriptor_set_layout_create_info.setPNext(&dslbfci); - pipeline.dsl = ctx->device.lock()->device.createDescriptorSetLayout(descriptor_set_layout_create_info); + pipeline->dsl = ctx->device->device.createDescriptorSetLayout(descriptor_set_layout_create_info); // Check if device supports multiple descriptors per pool - if (ctx->device.lock()->descriptor_set_mode == VK_DEVICE_DESCRIPTOR_POOL_MODE_UNKNOWN) { + if (ctx->device->descriptor_set_mode == VK_DEVICE_DESCRIPTOR_POOL_MODE_UNKNOWN) { const uint32_t alloc_count = 2; // Try allocating multiple sets from one pool // This fails on AMD for some reason, so add a fall back to allocating one pool per set - vk::DescriptorPoolSize descriptor_pool_size(vk::DescriptorType::eStorageBuffer, pipeline.parameter_count); + vk::DescriptorPoolSize descriptor_pool_size(vk::DescriptorType::eStorageBuffer, pipeline->parameter_count); vk::DescriptorPoolCreateInfo descriptor_pool_create_info({}, alloc_count, descriptor_pool_size); - vk::DescriptorPool pool = ctx->device.lock()->device.createDescriptorPool(descriptor_pool_create_info); + vk::DescriptorPool pool = ctx->device->device.createDescriptorPool(descriptor_pool_create_info); std::vector layouts(alloc_count); for (uint32_t i = 0; i < alloc_count; i++) { - layouts[i] = pipeline.dsl; + layouts[i] = pipeline->dsl; } try { vk::DescriptorSetAllocateInfo descriptor_set_alloc_info(pool, alloc_count, layouts.data()); - std::vector sets = ctx->device.lock()->device.allocateDescriptorSets(descriptor_set_alloc_info); + std::vector sets = ctx->device->device.allocateDescriptorSets(descriptor_set_alloc_info); } catch(vk::OutOfPoolMemoryError const&) { - ctx->device.lock()->descriptor_set_mode = VK_DEVICE_DESCRIPTOR_POOL_MODE_SINGLE; + ctx->device->descriptor_set_mode = VK_DEVICE_DESCRIPTOR_POOL_MODE_SINGLE; } - ctx->device.lock()->device.destroyDescriptorPool(pool); + ctx->device->device.destroyDescriptorPool(pool); } - if (ctx->device.lock()->descriptor_set_mode == VK_DEVICE_DESCRIPTOR_POOL_MODE_MULTI) { - vk::DescriptorPoolSize descriptor_pool_size(vk::DescriptorType::eStorageBuffer, pipeline.parameter_count); + if (ctx->device->descriptor_set_mode == VK_DEVICE_DESCRIPTOR_POOL_MODE_MULTI) { + vk::DescriptorPoolSize descriptor_pool_size(vk::DescriptorType::eStorageBuffer, pipeline->parameter_count); vk::DescriptorPoolCreateInfo descriptor_pool_create_info({}, 128, descriptor_pool_size); - pipeline.descriptor_pools.push_back(ctx->device.lock()->device.createDescriptorPool(descriptor_pool_create_info)); + pipeline->descriptor_pools.push_back(ctx->device->device.createDescriptorPool(descriptor_pool_create_info)); } - pipeline.descriptor_set_idx = 0; + pipeline->descriptor_set_idx = 0; - vk::PipelineLayoutCreateInfo pipeline_layout_create_info(vk::PipelineLayoutCreateFlags(), pipeline.dsl, pcr); - pipeline.layout = ctx->device.lock()->device.createPipelineLayout(pipeline_layout_create_info); + vk::PipelineLayoutCreateInfo pipeline_layout_create_info(vk::PipelineLayoutCreateFlags(), pipeline->dsl, pcr); + pipeline->layout = ctx->device->device.createPipelineLayout(pipeline_layout_create_info); std::vector specialization_entries(specialization_constants.size()); @@ -417,72 +496,75 @@ static void ggml_vk_create_pipeline(ggml_backend_vk_context * ctx, vk_pipeline& vk::PipelineShaderStageCreateInfo pipeline_shader_create_info( vk::PipelineShaderStageCreateFlags(), vk::ShaderStageFlagBits::eCompute, - pipeline.shader_module, + pipeline->shader_module, entrypoint.c_str(), &specialization_info); vk::ComputePipelineCreateInfo compute_pipeline_create_info( vk::PipelineCreateFlags(), pipeline_shader_create_info, - pipeline.layout); - pipeline.pipeline = ctx->device.lock()->device.createComputePipeline(VK_NULL_HANDLE, compute_pipeline_create_info).value; + pipeline->layout); + pipeline->pipeline = ctx->device->device.createComputePipeline(VK_NULL_HANDLE, compute_pipeline_create_info).value; - ctx->gc.pipelines.push_back(&pipeline); + ctx->device->pipelines.push_back(pipeline); } -static void ggml_vk_destroy_pipeline(ggml_backend_vk_context * ctx, vk_pipeline * pipeline) { +static void ggml_vk_destroy_pipeline(vk::Device& device, vk_pipeline& pipeline) { +#ifdef GGML_VULKAN_DEBUG + std::cerr << "ggml_pipeline_destroy_pipeline(" << pipeline->name << ")" << std::endl; +#endif for (auto& pool : pipeline->descriptor_pools) { - ctx->device.lock()->device.destroyDescriptorPool(pool); + device.destroyDescriptorPool(pool); } pipeline->descriptor_pools.clear(); pipeline->descriptor_sets.clear(); pipeline->descriptor_set_idx = 0; - ctx->device.lock()->device.destroyDescriptorSetLayout(pipeline->dsl); + device.destroyDescriptorSetLayout(pipeline->dsl); - ctx->device.lock()->device.destroyPipelineLayout(pipeline->layout); + device.destroyPipelineLayout(pipeline->layout); - ctx->device.lock()->device.destroyShaderModule(pipeline->shader_module); + device.destroyShaderModule(pipeline->shader_module); - ctx->device.lock()->device.destroyPipeline(pipeline->pipeline); + device.destroyPipeline(pipeline->pipeline); } static void ggml_pipeline_allocate_descriptor_sets(ggml_backend_vk_context * ctx, vk_pipeline& pipeline, uint32_t n) { #ifdef GGML_VULKAN_DEBUG - std::cerr << "ggml_pipeline_allocate_descriptor_sets(" << pipeline.name << ", " << n << ")" << std::endl; + std::cerr << "ggml_pipeline_allocate_descriptor_sets(" << pipeline->name << ", " << n << ")" << std::endl; #endif - if (pipeline.descriptor_sets.size() >= pipeline.descriptor_set_idx + n) { + if (pipeline->descriptor_sets.size() >= pipeline->descriptor_set_idx + n) { // Enough descriptors are available return; } - if (ctx->device.lock()->descriptor_set_mode == VK_DEVICE_DESCRIPTOR_POOL_MODE_MULTI) { - const uint32_t alloc_count = pipeline.descriptor_set_idx + n - pipeline.descriptor_sets.size(); + if (ctx->device->descriptor_set_mode == VK_DEVICE_DESCRIPTOR_POOL_MODE_MULTI) { + const uint32_t alloc_count = pipeline->descriptor_set_idx + n - pipeline->descriptor_sets.size(); std::vector layouts(alloc_count); for (uint32_t i = 0; i < alloc_count; i++) { - layouts[i] = pipeline.dsl; + layouts[i] = pipeline->dsl; } - vk::DescriptorSetAllocateInfo descriptor_set_alloc_info(pipeline.descriptor_pools[0], alloc_count, layouts.data()); - std::vector sets = ctx->device.lock()->device.allocateDescriptorSets(descriptor_set_alloc_info); - pipeline.descriptor_sets.insert(pipeline.descriptor_sets.end(), sets.begin(), sets.end()); + vk::DescriptorSetAllocateInfo descriptor_set_alloc_info(pipeline->descriptor_pools[0], alloc_count, layouts.data()); + std::vector sets = ctx->device->device.allocateDescriptorSets(descriptor_set_alloc_info); + pipeline->descriptor_sets.insert(pipeline->descriptor_sets.end(), sets.begin(), sets.end()); } else { - for (uint32_t i = pipeline.descriptor_sets.size(); i < pipeline.descriptor_set_idx + n; i++) { - vk::DescriptorPoolSize descriptor_pool_size(vk::DescriptorType::eStorageBuffer, pipeline.parameter_count); + for (uint32_t i = pipeline->descriptor_sets.size(); i < pipeline->descriptor_set_idx + n; i++) { + vk::DescriptorPoolSize descriptor_pool_size(vk::DescriptorType::eStorageBuffer, pipeline->parameter_count); vk::DescriptorPoolCreateInfo descriptor_pool_create_info({}, 1, descriptor_pool_size); - pipeline.descriptor_pools.push_back(ctx->device.lock()->device.createDescriptorPool(descriptor_pool_create_info)); + pipeline->descriptor_pools.push_back(ctx->device->device.createDescriptorPool(descriptor_pool_create_info)); - vk::DescriptorSetAllocateInfo descriptor_set_alloc_info(pipeline.descriptor_pools[i], 1, &pipeline.dsl); - std::vector sets = ctx->device.lock()->device.allocateDescriptorSets(descriptor_set_alloc_info); - pipeline.descriptor_sets.push_back(sets[0]); + vk::DescriptorSetAllocateInfo descriptor_set_alloc_info(pipeline->descriptor_pools[i], 1, &pipeline->dsl); + std::vector sets = ctx->device->device.allocateDescriptorSets(descriptor_set_alloc_info); + pipeline->descriptor_sets.push_back(sets[0]); } } } static void ggml_pipeline_cleanup(vk_pipeline& pipeline) { #ifdef GGML_VULKAN_DEBUG - std::cerr << "ggml_pipeline_cleanup(" << pipeline.name << ")" << std::endl; + std::cerr << "ggml_pipeline_cleanup(" << pipeline->name << ")" << std::endl; #endif - pipeline.descriptor_set_idx = 0; + pipeline->descriptor_set_idx = 0; } static vk::CommandBuffer ggml_vk_create_cmd_buffer(ggml_backend_vk_context * ctx, vk_queue& q) { @@ -498,7 +580,7 @@ static vk::CommandBuffer ggml_vk_create_cmd_buffer(ggml_backend_vk_context * ctx q.pool, vk::CommandBufferLevel::ePrimary, 1); - const std::vector cmd_buffers = ctx->device.lock()->device.allocateCommandBuffers(command_buffer_alloc_info); + const std::vector cmd_buffers = ctx->device->device.allocateCommandBuffers(command_buffer_alloc_info); auto buf = cmd_buffers.front(); q.cmd_buffers.push_back(buf); @@ -643,11 +725,11 @@ static void ggml_vk_create_queue(ggml_backend_vk_context * ctx, vk_queue& q, uin q.queue_family_index = queue_family_index; vk::CommandPoolCreateInfo command_pool_create_info_compute(vk::CommandPoolCreateFlags(VK_COMMAND_POOL_CREATE_TRANSIENT_BIT), queue_family_index); - q.pool = ctx->device.lock()->device.createCommandPool(command_pool_create_info_compute); + q.pool = ctx->device->device.createCommandPool(command_pool_create_info_compute); q.cmd_buffer_idx = 0; - q.queue = ctx->device.lock()->device.getQueue(queue_family_index, queue_index); + q.queue = ctx->device->device.getQueue(queue_family_index, queue_index); q.stage_flags = stage_flags; } @@ -671,7 +753,7 @@ static vk_semaphore * ggml_vk_create_binary_semaphore(ggml_backend_vk_context * vk::SemaphoreTypeCreateInfo tci{ vk::SemaphoreType::eBinary, 0 }; vk::SemaphoreCreateInfo ci{}; ci.setPNext(&tci); - vk::Semaphore semaphore = ctx->device.lock()->device.createSemaphore(ci); + vk::Semaphore semaphore = ctx->device->device.createSemaphore(ci); ctx->gc.semaphores.push_back({ semaphore, 0 }); return &ctx->gc.semaphores[ctx->gc.semaphores.size() - 1]; } @@ -684,7 +766,7 @@ static vk_semaphore * ggml_vk_create_timeline_semaphore(ggml_backend_vk_context vk::SemaphoreTypeCreateInfo tci{ vk::SemaphoreType::eTimeline, 0 }; vk::SemaphoreCreateInfo ci{}; ci.setPNext(&tci); - vk::Semaphore semaphore = ctx->device.lock()->device.createSemaphore(ci); + vk::Semaphore semaphore = ctx->device->device.createSemaphore(ci); ctx->gc.tl_semaphores.push_back({ semaphore, 0 }); } return &ctx->gc.tl_semaphores[ctx->semaphore_idx++]; @@ -692,7 +774,7 @@ static vk_semaphore * ggml_vk_create_timeline_semaphore(ggml_backend_vk_context static vk::Event ggml_vk_create_event(ggml_backend_vk_context * ctx) { if (ctx->event_idx >= ctx->gc.events.size()) { - ctx->gc.events.push_back(ctx->device.lock()->device.createEvent({})); + ctx->gc.events.push_back(ctx->device->device.createEvent({})); } return ctx->gc.events[ctx->event_idx++]; } @@ -703,7 +785,7 @@ static void ggml_vk_queue_cleanup(ggml_backend_vk_context * ctx, vk_queue& q) { #endif // Requires command buffers to be done - ctx->device.lock()->device.resetCommandPool(q.pool); + ctx->device->device.resetCommandPool(q.pool); q.cmd_buffer_idx = 0; } @@ -740,11 +822,11 @@ static vk_buffer ggml_vk_create_buffer(ggml_backend_vk_context * ctx, size_t siz nullptr, }; - buf->buffer = ctx->device.lock()->device.createBuffer(buffer_create_info); + buf->buffer = ctx->device->device.createBuffer(buffer_create_info); - vk::MemoryRequirements mem_req = ctx->device.lock()->device.getBufferMemoryRequirements(buf->buffer); + vk::MemoryRequirements mem_req = ctx->device->device.getBufferMemoryRequirements(buf->buffer); - vk::PhysicalDeviceMemoryProperties mem_props = ctx->device.lock()->physical_device.getMemoryProperties(); + vk::PhysicalDeviceMemoryProperties mem_props = ctx->device->physical_device.getMemoryProperties(); uint32_t memory_type_index = UINT32_MAX; @@ -757,30 +839,30 @@ static vk_buffer ggml_vk_create_buffer(ggml_backend_vk_context * ctx, size_t siz } if (memory_type_index == UINT32_MAX) { - ctx->device.lock()->device.destroyBuffer(buf->buffer); + ctx->device->device.destroyBuffer(buf->buffer); buf->size = 0; throw vk::OutOfDeviceMemoryError("No suitable memory type found"); } try { - buf->device_memory = ctx->device.lock()->device.allocateMemory({ mem_req.size, memory_type_index }); + buf->device_memory = ctx->device->device.allocateMemory({ mem_req.size, memory_type_index }); } catch (const vk::SystemError& e) { // Out of Host/Device memory, clean up buffer - ctx->device.lock()->device.destroyBuffer(buf->buffer); + ctx->device->device.destroyBuffer(buf->buffer); buf->size = 0; throw e; } buf->ptr = nullptr; if (buf->memory_property_flags & vk::MemoryPropertyFlagBits::eHostVisible) { - buf->ptr = ctx->device.lock()->device.mapMemory(buf->device_memory, 0, VK_WHOLE_SIZE); + buf->ptr = ctx->device->device.mapMemory(buf->device_memory, 0, VK_WHOLE_SIZE); } - ctx->device.lock()->device.bindBufferMemory(buf->buffer, buf->device_memory, 0); + ctx->device->device.bindBufferMemory(buf->buffer, buf->device_memory, 0); buf->ctx = ctx; - buf->device = ctx->device.lock(); + buf->device = ctx->device; #ifdef GGML_VULKAN_DEBUG std::cerr << "Created buffer " << buf->buffer << std::endl; @@ -802,7 +884,7 @@ static vk_buffer ggml_vk_create_buffer_check(ggml_backend_vk_context * ctx, size static vk_buffer ggml_vk_create_buffer_device(ggml_backend_vk_context * ctx, size_t size) { vk_buffer buf; try { - if (ctx->device.lock()->uma) { + if (ctx->device->uma) { // Fall back to host memory type buf = ggml_vk_create_buffer(ctx, size, vk::MemoryPropertyFlagBits::eDeviceLocal, vk::MemoryPropertyFlagBits::eHostVisible | vk::MemoryPropertyFlagBits::eHostCoherent); } else { @@ -883,10 +965,16 @@ static void ggml_vk_load_shaders(ggml_backend_vk_context * ctx) { std::cerr << "ggml_vk_load_shaders(" << ctx->name << ")" << std::endl; #endif + const std::shared_ptr device = ctx->device; + // mulmat - std::initializer_list warptile_l = { 128, 128, 128, 16, ctx->device.lock()->subgroup_size * 2, 64, 2, 4, 4, ctx->device.lock()->subgroup_size }; - std::initializer_list warptile_m = { 128, 64, 64, 16, ctx->device.lock()->subgroup_size, 32, 2, 4, 2, ctx->device.lock()->subgroup_size }; - std::initializer_list warptile_s = { ctx->device.lock()->subgroup_size, 32, 32, 16, 32, 32, 2, 2, 2, ctx->device.lock()->subgroup_size }; + std::initializer_list warptile_l = { 128, 128, 128, 16, device->subgroup_size * 2, 64, 2, 4, 4, device->subgroup_size }; + std::initializer_list warptile_m = { 128, 64, 64, 16, device->subgroup_size, 32, 2, 4, 2, device->subgroup_size }; + std::initializer_list warptile_s = { device->subgroup_size, 32, 32, 16, 32, 32, 2, 2, 2, device->subgroup_size }; + + std::initializer_list warptile_mmq_l = { 128, 128, 128, 32, device->subgroup_size * 2, 64, 2, 4, 4, device->subgroup_size }; + std::initializer_list warptile_mmq_m = { 128, 64, 64, 32, device->subgroup_size, 32, 2, 4, 2, device->subgroup_size }; + std::initializer_list warptile_mmq_s = { device->subgroup_size, 32, 32, 32, 32, 32, 2, 2, 2, device->subgroup_size }; std::array l_wg_denoms = {128, 128, 1 }; std::array m_wg_denoms = { 64, 64, 1 }; @@ -896,126 +984,206 @@ static void ggml_vk_load_shaders(ggml_backend_vk_context * ctx) { uint32_t m_align = 64; uint32_t s_align = 32; - if (ctx->device.lock()->fp16) { - ggml_vk_create_pipeline(ctx, ctx->pipeline_matmul_f32_l, "matmul_f32_l", matmul_f32_l_len, matmul_f32_l_data, "main", 3, 14 * sizeof(uint32_t), l_wg_denoms, warptile_l, 1); - ggml_vk_create_pipeline(ctx, ctx->pipeline_matmul_f32_m, "matmul_f32_m", matmul_f32_m_len, matmul_f32_m_data, "main", 3, 14 * sizeof(uint32_t), m_wg_denoms, warptile_m, 1); - ggml_vk_create_pipeline(ctx, ctx->pipeline_matmul_f32_s, "matmul_f32_s", matmul_f32_s_len, matmul_f32_s_data, "main", 3, 14 * sizeof(uint32_t), s_wg_denoms, warptile_s, 1); - ggml_vk_create_pipeline(ctx, ctx->pipeline_matmul_f32_aligned_l, "matmul_f32_aligned_l", matmul_f32_aligned_l_len, matmul_f32_aligned_l_data, "main", 3, 14 * sizeof(uint32_t), l_wg_denoms, warptile_l, l_align); - ggml_vk_create_pipeline(ctx, ctx->pipeline_matmul_f32_aligned_m, "matmul_f32_aligned_m", matmul_f32_aligned_m_len, matmul_f32_aligned_m_data, "main", 3, 14 * sizeof(uint32_t), m_wg_denoms, warptile_m, m_align); - ggml_vk_create_pipeline(ctx, ctx->pipeline_matmul_f32_aligned_s, "matmul_f32_aligned_s", matmul_f32_aligned_s_len, matmul_f32_aligned_s_data, "main", 3, 14 * sizeof(uint32_t), s_wg_denoms, warptile_s, s_align); + ctx->device->pipeline_matmul_f32 = std::make_shared(); + ctx->device->pipeline_matmul_f16_f32 = std::make_shared(); + ctx->device->pipeline_matmul_f16 = std::make_shared(); + ctx->device->pipeline_dequant_mul_mat_mat[GGML_TYPE_Q4_0] = std::make_shared(); + ctx->device->pipeline_dequant_mul_mat_mat[GGML_TYPE_Q4_1] = std::make_shared(); + ctx->device->pipeline_dequant_mul_mat_mat[GGML_TYPE_Q5_0] = std::make_shared(); + ctx->device->pipeline_dequant_mul_mat_mat[GGML_TYPE_Q5_1] = std::make_shared(); + ctx->device->pipeline_dequant_mul_mat_mat[GGML_TYPE_Q8_0] = std::make_shared(); - ggml_vk_create_pipeline(ctx, ctx->pipeline_matmul_f16_l, "matmul_f16_l", matmul_f16_l_len, matmul_f16_l_data, "main", 3, 14 * sizeof(uint32_t), l_wg_denoms, warptile_l, 1); - ggml_vk_create_pipeline(ctx, ctx->pipeline_matmul_f16_m, "matmul_f16_m", matmul_f16_m_len, matmul_f16_m_data, "main", 3, 14 * sizeof(uint32_t), m_wg_denoms, warptile_m, 1); - ggml_vk_create_pipeline(ctx, ctx->pipeline_matmul_f16_s, "matmul_f16_s", matmul_f16_s_len, matmul_f16_s_data, "main", 3, 14 * sizeof(uint32_t), s_wg_denoms, warptile_s, 1); - ggml_vk_create_pipeline(ctx, ctx->pipeline_matmul_f16_aligned_l, "matmul_f16_aligned_l", matmul_f16_aligned_l_len, matmul_f16_aligned_l_data, "main", 3, 14 * sizeof(uint32_t), l_wg_denoms, warptile_l, l_align); - ggml_vk_create_pipeline(ctx, ctx->pipeline_matmul_f16_aligned_m, "matmul_f16_aligned_m", matmul_f16_aligned_m_len, matmul_f16_aligned_m_data, "main", 3, 14 * sizeof(uint32_t), m_wg_denoms, warptile_m, m_align); - ggml_vk_create_pipeline(ctx, ctx->pipeline_matmul_f16_aligned_s, "matmul_f16_aligned_s", matmul_f16_aligned_s_len, matmul_f16_aligned_s_data, "main", 3, 14 * sizeof(uint32_t), s_wg_denoms, warptile_s, s_align); + if (device->fp16) { + ggml_vk_create_pipeline(ctx, ctx->device->pipeline_matmul_f32->l, "matmul_f32_l", matmul_f32_len, matmul_f32_data, "main", 3, 14 * sizeof(uint32_t), l_wg_denoms, warptile_l, 1); + ggml_vk_create_pipeline(ctx, ctx->device->pipeline_matmul_f32->m, "matmul_f32_m", matmul_f32_len, matmul_f32_data, "main", 3, 14 * sizeof(uint32_t), m_wg_denoms, warptile_m, 1); + ggml_vk_create_pipeline(ctx, ctx->device->pipeline_matmul_f32->s, "matmul_f32_s", matmul_f32_len, matmul_f32_data, "main", 3, 14 * sizeof(uint32_t), s_wg_denoms, warptile_s, 1); + ggml_vk_create_pipeline(ctx, ctx->device->pipeline_matmul_f32->a_l, "matmul_f32_aligned_l", matmul_f32_aligned_len, matmul_f32_aligned_data, "main", 3, 14 * sizeof(uint32_t), l_wg_denoms, warptile_l, l_align); + ggml_vk_create_pipeline(ctx, ctx->device->pipeline_matmul_f32->a_m, "matmul_f32_aligned_m", matmul_f32_aligned_len, matmul_f32_aligned_data, "main", 3, 14 * sizeof(uint32_t), m_wg_denoms, warptile_m, m_align); + ggml_vk_create_pipeline(ctx, ctx->device->pipeline_matmul_f32->a_s, "matmul_f32_aligned_s", matmul_f32_aligned_len, matmul_f32_aligned_data, "main", 3, 14 * sizeof(uint32_t), s_wg_denoms, warptile_s, s_align); - ggml_vk_create_pipeline(ctx, ctx->pipeline_matmul_f16_f32_l, "matmul_f16_f32_l", matmul_f16_f32_l_len, matmul_f16_f32_l_data, "main", 3, 14 * sizeof(uint32_t), l_wg_denoms, warptile_l, 1); - ggml_vk_create_pipeline(ctx, ctx->pipeline_matmul_f16_f32_m, "matmul_f16_f32_m", matmul_f16_f32_m_len, matmul_f16_f32_m_data, "main", 3, 14 * sizeof(uint32_t), m_wg_denoms, warptile_m, 1); - ggml_vk_create_pipeline(ctx, ctx->pipeline_matmul_f16_f32_s, "matmul_f16_f32_s", matmul_f16_f32_s_len, matmul_f16_f32_s_data, "main", 3, 14 * sizeof(uint32_t), s_wg_denoms, warptile_s, 1); - ggml_vk_create_pipeline(ctx, ctx->pipeline_matmul_f16_f32_aligned_l, "matmul_f16_f32_aligned_l", matmul_f16_f32_aligned_l_len, matmul_f16_f32_aligned_l_data, "main", 3, 14 * sizeof(uint32_t), l_wg_denoms, warptile_l, l_align); - ggml_vk_create_pipeline(ctx, ctx->pipeline_matmul_f16_f32_aligned_m, "matmul_f16_f32_aligned_m", matmul_f16_f32_aligned_m_len, matmul_f16_f32_aligned_m_data, "main", 3, 14 * sizeof(uint32_t), m_wg_denoms, warptile_m, m_align); - ggml_vk_create_pipeline(ctx, ctx->pipeline_matmul_f16_f32_aligned_s, "matmul_f16_f32_aligned_s", matmul_f16_f32_aligned_s_len, matmul_f16_f32_aligned_s_data, "main", 3, 14 * sizeof(uint32_t), s_wg_denoms, warptile_s, s_align); + ggml_vk_create_pipeline(ctx, ctx->device->pipeline_matmul_f16->l, "matmul_f16_l", matmul_f16_len, matmul_f16_data, "main", 3, 14 * sizeof(uint32_t), l_wg_denoms, warptile_l, 1); + ggml_vk_create_pipeline(ctx, ctx->device->pipeline_matmul_f16->m, "matmul_f16_m", matmul_f16_len, matmul_f16_data, "main", 3, 14 * sizeof(uint32_t), m_wg_denoms, warptile_m, 1); + ggml_vk_create_pipeline(ctx, ctx->device->pipeline_matmul_f16->s, "matmul_f16_s", matmul_f16_len, matmul_f16_data, "main", 3, 14 * sizeof(uint32_t), s_wg_denoms, warptile_s, 1); + ggml_vk_create_pipeline(ctx, ctx->device->pipeline_matmul_f16->a_l, "matmul_f16_aligned_l", matmul_f16_aligned_len, matmul_f16_aligned_data, "main", 3, 14 * sizeof(uint32_t), l_wg_denoms, warptile_l, l_align); + ggml_vk_create_pipeline(ctx, ctx->device->pipeline_matmul_f16->a_m, "matmul_f16_aligned_m", matmul_f16_aligned_len, matmul_f16_aligned_data, "main", 3, 14 * sizeof(uint32_t), m_wg_denoms, warptile_m, m_align); + ggml_vk_create_pipeline(ctx, ctx->device->pipeline_matmul_f16->a_s, "matmul_f16_aligned_s", matmul_f16_aligned_len, matmul_f16_aligned_data, "main", 3, 14 * sizeof(uint32_t), s_wg_denoms, warptile_s, s_align); + + ggml_vk_create_pipeline(ctx, ctx->device->pipeline_matmul_f16_f32->l, "matmul_f16_f32_l", matmul_f16_f32_len, matmul_f16_f32_data, "main", 3, 14 * sizeof(uint32_t), l_wg_denoms, warptile_l, 1); + ggml_vk_create_pipeline(ctx, ctx->device->pipeline_matmul_f16_f32->m, "matmul_f16_f32_m", matmul_f16_f32_len, matmul_f16_f32_data, "main", 3, 14 * sizeof(uint32_t), m_wg_denoms, warptile_m, 1); + ggml_vk_create_pipeline(ctx, ctx->device->pipeline_matmul_f16_f32->s, "matmul_f16_f32_s", matmul_f16_f32_len, matmul_f16_f32_data, "main", 3, 14 * sizeof(uint32_t), s_wg_denoms, warptile_s, 1); + ggml_vk_create_pipeline(ctx, ctx->device->pipeline_matmul_f16_f32->a_l, "matmul_f16_f32_aligned_l", matmul_f16_f32_aligned_len, matmul_f16_f32_aligned_data, "main", 3, 14 * sizeof(uint32_t), l_wg_denoms, warptile_l, l_align); + ggml_vk_create_pipeline(ctx, ctx->device->pipeline_matmul_f16_f32->a_m, "matmul_f16_f32_aligned_m", matmul_f16_f32_aligned_len, matmul_f16_f32_aligned_data, "main", 3, 14 * sizeof(uint32_t), m_wg_denoms, warptile_m, m_align); + ggml_vk_create_pipeline(ctx, ctx->device->pipeline_matmul_f16_f32->a_s, "matmul_f16_f32_aligned_s", matmul_f16_f32_aligned_len, matmul_f16_f32_aligned_data, "main", 3, 14 * sizeof(uint32_t), s_wg_denoms, warptile_s, s_align); + + ggml_vk_create_pipeline(ctx, ctx->device->pipeline_dequant_mul_mat_mat[GGML_TYPE_Q4_0]->l, "matmul_q4_0_f32_l", matmul_q4_0_f32_len, matmul_q4_0_f32_data, "main", 3, 14 * sizeof(uint32_t), l_wg_denoms, warptile_mmq_l, l_align); + ggml_vk_create_pipeline(ctx, ctx->device->pipeline_dequant_mul_mat_mat[GGML_TYPE_Q4_0]->m, "matmul_q4_0_f32_m", matmul_q4_0_f32_len, matmul_q4_0_f32_data, "main", 3, 14 * sizeof(uint32_t), m_wg_denoms, warptile_mmq_m, m_align); + ggml_vk_create_pipeline(ctx, ctx->device->pipeline_dequant_mul_mat_mat[GGML_TYPE_Q4_0]->s, "matmul_q4_0_f32_s", matmul_q4_0_f32_len, matmul_q4_0_f32_data, "main", 3, 14 * sizeof(uint32_t), s_wg_denoms, warptile_mmq_s, s_align); + ggml_vk_create_pipeline(ctx, ctx->device->pipeline_dequant_mul_mat_mat[GGML_TYPE_Q4_0]->a_l, "matmul_q4_0_f32_aligned_l", matmul_q4_0_f32_aligned_len, matmul_q4_0_f32_aligned_data, "main", 3, 14 * sizeof(uint32_t), l_wg_denoms, warptile_mmq_l, l_align); + ggml_vk_create_pipeline(ctx, ctx->device->pipeline_dequant_mul_mat_mat[GGML_TYPE_Q4_0]->a_m, "matmul_q4_0_f32_aligned_m", matmul_q4_0_f32_aligned_len, matmul_q4_0_f32_aligned_data, "main", 3, 14 * sizeof(uint32_t), m_wg_denoms, warptile_mmq_m, m_align); + ggml_vk_create_pipeline(ctx, ctx->device->pipeline_dequant_mul_mat_mat[GGML_TYPE_Q4_0]->a_s, "matmul_q4_0_f32_aligned_s", matmul_q4_0_f32_aligned_len, matmul_q4_0_f32_aligned_data, "main", 3, 14 * sizeof(uint32_t), s_wg_denoms, warptile_mmq_s, s_align); + + ggml_vk_create_pipeline(ctx, ctx->device->pipeline_dequant_mul_mat_mat[GGML_TYPE_Q4_1]->l, "matmul_q4_0_f32_l", matmul_q4_1_f32_len, matmul_q4_1_f32_data, "main", 3, 14 * sizeof(uint32_t), l_wg_denoms, warptile_mmq_l, l_align); + ggml_vk_create_pipeline(ctx, ctx->device->pipeline_dequant_mul_mat_mat[GGML_TYPE_Q4_1]->m, "matmul_q4_0_f32_m", matmul_q4_1_f32_len, matmul_q4_1_f32_data, "main", 3, 14 * sizeof(uint32_t), m_wg_denoms, warptile_mmq_m, m_align); + ggml_vk_create_pipeline(ctx, ctx->device->pipeline_dequant_mul_mat_mat[GGML_TYPE_Q4_1]->s, "matmul_q4_0_f32_s", matmul_q4_1_f32_len, matmul_q4_1_f32_data, "main", 3, 14 * sizeof(uint32_t), s_wg_denoms, warptile_mmq_s, s_align); + ggml_vk_create_pipeline(ctx, ctx->device->pipeline_dequant_mul_mat_mat[GGML_TYPE_Q4_1]->a_l, "matmul_q4_0_f32_aligned_l", matmul_q4_1_f32_aligned_len, matmul_q4_1_f32_aligned_data, "main", 3, 14 * sizeof(uint32_t), l_wg_denoms, warptile_mmq_l, l_align); + ggml_vk_create_pipeline(ctx, ctx->device->pipeline_dequant_mul_mat_mat[GGML_TYPE_Q4_1]->a_m, "matmul_q4_0_f32_aligned_m", matmul_q4_1_f32_aligned_len, matmul_q4_1_f32_aligned_data, "main", 3, 14 * sizeof(uint32_t), m_wg_denoms, warptile_mmq_m, m_align); + ggml_vk_create_pipeline(ctx, ctx->device->pipeline_dequant_mul_mat_mat[GGML_TYPE_Q4_1]->a_s, "matmul_q4_0_f32_aligned_s", matmul_q4_1_f32_aligned_len, matmul_q4_1_f32_aligned_data, "main", 3, 14 * sizeof(uint32_t), s_wg_denoms, warptile_mmq_s, s_align); + + ggml_vk_create_pipeline(ctx, ctx->device->pipeline_dequant_mul_mat_mat[GGML_TYPE_Q5_0]->l, "matmul_q5_0_f32_l", matmul_q5_0_f32_len, matmul_q5_0_f32_data, "main", 3, 14 * sizeof(uint32_t), l_wg_denoms, warptile_mmq_l, l_align); + ggml_vk_create_pipeline(ctx, ctx->device->pipeline_dequant_mul_mat_mat[GGML_TYPE_Q5_0]->m, "matmul_q5_0_f32_m", matmul_q5_0_f32_len, matmul_q5_0_f32_data, "main", 3, 14 * sizeof(uint32_t), m_wg_denoms, warptile_mmq_m, m_align); + ggml_vk_create_pipeline(ctx, ctx->device->pipeline_dequant_mul_mat_mat[GGML_TYPE_Q5_0]->s, "matmul_q5_0_f32_s", matmul_q5_0_f32_len, matmul_q5_0_f32_data, "main", 3, 14 * sizeof(uint32_t), s_wg_denoms, warptile_mmq_s, s_align); + ggml_vk_create_pipeline(ctx, ctx->device->pipeline_dequant_mul_mat_mat[GGML_TYPE_Q5_0]->a_l, "matmul_q5_0_f32_aligned_l", matmul_q5_0_f32_aligned_len, matmul_q5_0_f32_aligned_data, "main", 3, 14 * sizeof(uint32_t), l_wg_denoms, warptile_mmq_l, l_align); + ggml_vk_create_pipeline(ctx, ctx->device->pipeline_dequant_mul_mat_mat[GGML_TYPE_Q5_0]->a_m, "matmul_q5_0_f32_aligned_m", matmul_q5_0_f32_aligned_len, matmul_q5_0_f32_aligned_data, "main", 3, 14 * sizeof(uint32_t), m_wg_denoms, warptile_mmq_m, m_align); + ggml_vk_create_pipeline(ctx, ctx->device->pipeline_dequant_mul_mat_mat[GGML_TYPE_Q5_0]->a_s, "matmul_q5_0_f32_aligned_s", matmul_q5_0_f32_aligned_len, matmul_q5_0_f32_aligned_data, "main", 3, 14 * sizeof(uint32_t), s_wg_denoms, warptile_mmq_s, s_align); + + ggml_vk_create_pipeline(ctx, ctx->device->pipeline_dequant_mul_mat_mat[GGML_TYPE_Q5_1]->l, "matmul_q5_1_f32_l", matmul_q5_1_f32_len, matmul_q5_1_f32_data, "main", 3, 14 * sizeof(uint32_t), l_wg_denoms, warptile_mmq_l, l_align); + ggml_vk_create_pipeline(ctx, ctx->device->pipeline_dequant_mul_mat_mat[GGML_TYPE_Q5_1]->m, "matmul_q5_1_f32_m", matmul_q5_1_f32_len, matmul_q5_1_f32_data, "main", 3, 14 * sizeof(uint32_t), m_wg_denoms, warptile_mmq_m, m_align); + ggml_vk_create_pipeline(ctx, ctx->device->pipeline_dequant_mul_mat_mat[GGML_TYPE_Q5_1]->s, "matmul_q5_1_f32_s", matmul_q5_1_f32_len, matmul_q5_1_f32_data, "main", 3, 14 * sizeof(uint32_t), s_wg_denoms, warptile_mmq_s, s_align); + ggml_vk_create_pipeline(ctx, ctx->device->pipeline_dequant_mul_mat_mat[GGML_TYPE_Q5_1]->a_l, "matmul_q5_1_f32_aligned_l", matmul_q5_1_f32_aligned_len, matmul_q5_1_f32_aligned_data, "main", 3, 14 * sizeof(uint32_t), l_wg_denoms, warptile_mmq_l, l_align); + ggml_vk_create_pipeline(ctx, ctx->device->pipeline_dequant_mul_mat_mat[GGML_TYPE_Q5_1]->a_m, "matmul_q5_1_f32_aligned_m", matmul_q5_1_f32_aligned_len, matmul_q5_1_f32_aligned_data, "main", 3, 14 * sizeof(uint32_t), m_wg_denoms, warptile_mmq_m, m_align); + ggml_vk_create_pipeline(ctx, ctx->device->pipeline_dequant_mul_mat_mat[GGML_TYPE_Q5_1]->a_s, "matmul_q5_1_f32_aligned_s", matmul_q5_1_f32_aligned_len, matmul_q5_1_f32_aligned_data, "main", 3, 14 * sizeof(uint32_t), s_wg_denoms, warptile_mmq_s, s_align); + + ggml_vk_create_pipeline(ctx, ctx->device->pipeline_dequant_mul_mat_mat[GGML_TYPE_Q8_0]->l, "matmul_q8_0_f32_l", matmul_q8_0_f32_len, matmul_q8_0_f32_data, "main", 3, 14 * sizeof(uint32_t), l_wg_denoms, warptile_mmq_l, l_align); + ggml_vk_create_pipeline(ctx, ctx->device->pipeline_dequant_mul_mat_mat[GGML_TYPE_Q8_0]->m, "matmul_q8_0_f32_m", matmul_q8_0_f32_len, matmul_q8_0_f32_data, "main", 3, 14 * sizeof(uint32_t), m_wg_denoms, warptile_mmq_m, m_align); + ggml_vk_create_pipeline(ctx, ctx->device->pipeline_dequant_mul_mat_mat[GGML_TYPE_Q8_0]->s, "matmul_q8_0_f32_s", matmul_q8_0_f32_len, matmul_q8_0_f32_data, "main", 3, 14 * sizeof(uint32_t), s_wg_denoms, warptile_mmq_s, s_align); + ggml_vk_create_pipeline(ctx, ctx->device->pipeline_dequant_mul_mat_mat[GGML_TYPE_Q8_0]->a_l, "matmul_q8_0_f32_aligned_l", matmul_q8_0_f32_aligned_len, matmul_q8_0_f32_aligned_data, "main", 3, 14 * sizeof(uint32_t), l_wg_denoms, warptile_mmq_l, l_align); + ggml_vk_create_pipeline(ctx, ctx->device->pipeline_dequant_mul_mat_mat[GGML_TYPE_Q8_0]->a_m, "matmul_q8_0_f32_aligned_m", matmul_q8_0_f32_aligned_len, matmul_q8_0_f32_aligned_data, "main", 3, 14 * sizeof(uint32_t), m_wg_denoms, warptile_mmq_m, m_align); + ggml_vk_create_pipeline(ctx, ctx->device->pipeline_dequant_mul_mat_mat[GGML_TYPE_Q8_0]->a_s, "matmul_q8_0_f32_aligned_s", matmul_q8_0_f32_aligned_len, matmul_q8_0_f32_aligned_data, "main", 3, 14 * sizeof(uint32_t), s_wg_denoms, warptile_mmq_s, s_align); } else { - ggml_vk_create_pipeline(ctx, ctx->pipeline_matmul_f32_l, "matmul_f32_l", matmul_f32_l_fp32_len, matmul_f32_l_fp32_data, "main", 3, 14 * sizeof(uint32_t), l_wg_denoms, warptile_l, 1); - ggml_vk_create_pipeline(ctx, ctx->pipeline_matmul_f32_m, "matmul_f32_m", matmul_f32_m_fp32_len, matmul_f32_m_fp32_data, "main", 3, 14 * sizeof(uint32_t), m_wg_denoms, warptile_m, 1); - ggml_vk_create_pipeline(ctx, ctx->pipeline_matmul_f32_s, "matmul_f32_s", matmul_f32_s_fp32_len, matmul_f32_s_fp32_data, "main", 3, 14 * sizeof(uint32_t), s_wg_denoms, warptile_s, 1); - ggml_vk_create_pipeline(ctx, ctx->pipeline_matmul_f32_aligned_l, "matmul_f32_aligned_l", matmul_f32_aligned_l_fp32_len, matmul_f32_aligned_l_fp32_data, "main", 3, 14 * sizeof(uint32_t), l_wg_denoms, warptile_l, l_align); - ggml_vk_create_pipeline(ctx, ctx->pipeline_matmul_f32_aligned_m, "matmul_f32_aligned_m", matmul_f32_aligned_m_fp32_len, matmul_f32_aligned_m_fp32_data, "main", 3, 14 * sizeof(uint32_t), m_wg_denoms, warptile_m, m_align); - ggml_vk_create_pipeline(ctx, ctx->pipeline_matmul_f32_aligned_s, "matmul_f32_aligned_s", matmul_f32_aligned_s_fp32_len, matmul_f32_aligned_s_fp32_data, "main", 3, 14 * sizeof(uint32_t), s_wg_denoms, warptile_s, s_align); + ggml_vk_create_pipeline(ctx, ctx->device->pipeline_matmul_f32->l, "matmul_f32_l", matmul_f32_fp32_len, matmul_f32_fp32_data, "main", 3, 14 * sizeof(uint32_t), l_wg_denoms, warptile_l, 1); + ggml_vk_create_pipeline(ctx, ctx->device->pipeline_matmul_f32->m, "matmul_f32_m", matmul_f32_fp32_len, matmul_f32_fp32_data, "main", 3, 14 * sizeof(uint32_t), m_wg_denoms, warptile_m, 1); + ggml_vk_create_pipeline(ctx, ctx->device->pipeline_matmul_f32->s, "matmul_f32_s", matmul_f32_fp32_len, matmul_f32_fp32_data, "main", 3, 14 * sizeof(uint32_t), s_wg_denoms, warptile_s, 1); + ggml_vk_create_pipeline(ctx, ctx->device->pipeline_matmul_f32->a_l, "matmul_f32_aligned_l", matmul_f32_aligned_fp32_len, matmul_f32_aligned_fp32_data, "main", 3, 14 * sizeof(uint32_t), l_wg_denoms, warptile_l, l_align); + ggml_vk_create_pipeline(ctx, ctx->device->pipeline_matmul_f32->a_m, "matmul_f32_aligned_m", matmul_f32_aligned_fp32_len, matmul_f32_aligned_fp32_data, "main", 3, 14 * sizeof(uint32_t), m_wg_denoms, warptile_m, m_align); + ggml_vk_create_pipeline(ctx, ctx->device->pipeline_matmul_f32->a_s, "matmul_f32_aligned_s", matmul_f32_aligned_fp32_len, matmul_f32_aligned_fp32_data, "main", 3, 14 * sizeof(uint32_t), s_wg_denoms, warptile_s, s_align); - ggml_vk_create_pipeline(ctx, ctx->pipeline_matmul_f16_l, "matmul_f16_l", matmul_f16_l_fp32_len, matmul_f16_l_fp32_data, "main", 3, 14 * sizeof(uint32_t), l_wg_denoms, warptile_l, 1); - ggml_vk_create_pipeline(ctx, ctx->pipeline_matmul_f16_m, "matmul_f16_m", matmul_f16_m_fp32_len, matmul_f16_m_fp32_data, "main", 3, 14 * sizeof(uint32_t), m_wg_denoms, warptile_m, 1); - ggml_vk_create_pipeline(ctx, ctx->pipeline_matmul_f16_s, "matmul_f16_s", matmul_f16_s_fp32_len, matmul_f16_s_fp32_data, "main", 3, 14 * sizeof(uint32_t), s_wg_denoms, warptile_s, 1); - ggml_vk_create_pipeline(ctx, ctx->pipeline_matmul_f16_aligned_l, "matmul_f16_aligned_l", matmul_f16_aligned_l_fp32_len, matmul_f16_aligned_l_fp32_data, "main", 3, 14 * sizeof(uint32_t), l_wg_denoms, warptile_l, l_align); - ggml_vk_create_pipeline(ctx, ctx->pipeline_matmul_f16_aligned_m, "matmul_f16_aligned_m", matmul_f16_aligned_m_fp32_len, matmul_f16_aligned_m_fp32_data, "main", 3, 14 * sizeof(uint32_t), m_wg_denoms, warptile_m, m_align); - ggml_vk_create_pipeline(ctx, ctx->pipeline_matmul_f16_aligned_s, "matmul_f16_aligned_s", matmul_f16_aligned_s_fp32_len, matmul_f16_aligned_s_fp32_data, "main", 3, 14 * sizeof(uint32_t), s_wg_denoms, warptile_s, s_align); + ggml_vk_create_pipeline(ctx, ctx->device->pipeline_matmul_f16->l, "matmul_f16_l", matmul_f16_fp32_len, matmul_f16_fp32_data, "main", 3, 14 * sizeof(uint32_t), l_wg_denoms, warptile_l, 1); + ggml_vk_create_pipeline(ctx, ctx->device->pipeline_matmul_f16->m, "matmul_f16_m", matmul_f16_fp32_len, matmul_f16_fp32_data, "main", 3, 14 * sizeof(uint32_t), m_wg_denoms, warptile_m, 1); + ggml_vk_create_pipeline(ctx, ctx->device->pipeline_matmul_f16->s, "matmul_f16_s", matmul_f16_fp32_len, matmul_f16_fp32_data, "main", 3, 14 * sizeof(uint32_t), s_wg_denoms, warptile_s, 1); + ggml_vk_create_pipeline(ctx, ctx->device->pipeline_matmul_f16->a_l, "matmul_f16_aligned_l", matmul_f16_aligned_fp32_len, matmul_f16_aligned_fp32_data, "main", 3, 14 * sizeof(uint32_t), l_wg_denoms, warptile_l, l_align); + ggml_vk_create_pipeline(ctx, ctx->device->pipeline_matmul_f16->a_m, "matmul_f16_aligned_m", matmul_f16_aligned_fp32_len, matmul_f16_aligned_fp32_data, "main", 3, 14 * sizeof(uint32_t), m_wg_denoms, warptile_m, m_align); + ggml_vk_create_pipeline(ctx, ctx->device->pipeline_matmul_f16->a_s, "matmul_f16_aligned_s", matmul_f16_aligned_fp32_len, matmul_f16_aligned_fp32_data, "main", 3, 14 * sizeof(uint32_t), s_wg_denoms, warptile_s, s_align); - ggml_vk_create_pipeline(ctx, ctx->pipeline_matmul_f16_f32_l, "matmul_f16_f32_l", matmul_f16_f32_l_fp32_len, matmul_f16_f32_l_fp32_data, "main", 3, 14 * sizeof(uint32_t), l_wg_denoms, warptile_l, 1); - ggml_vk_create_pipeline(ctx, ctx->pipeline_matmul_f16_f32_m, "matmul_f16_f32_m", matmul_f16_f32_m_fp32_len, matmul_f16_f32_m_fp32_data, "main", 3, 14 * sizeof(uint32_t), m_wg_denoms, warptile_m, 1); - ggml_vk_create_pipeline(ctx, ctx->pipeline_matmul_f16_f32_s, "matmul_f16_f32_s", matmul_f16_f32_s_fp32_len, matmul_f16_f32_s_fp32_data, "main", 3, 14 * sizeof(uint32_t), s_wg_denoms, warptile_s, 1); - ggml_vk_create_pipeline(ctx, ctx->pipeline_matmul_f16_f32_aligned_l, "matmul_f16_f32_aligned_l", matmul_f16_f32_aligned_l_fp32_len, matmul_f16_f32_aligned_l_fp32_data, "main", 3, 14 * sizeof(uint32_t), l_wg_denoms, warptile_l, l_align); - ggml_vk_create_pipeline(ctx, ctx->pipeline_matmul_f16_f32_aligned_m, "matmul_f16_f32_aligned_m", matmul_f16_f32_aligned_m_fp32_len, matmul_f16_f32_aligned_m_fp32_data, "main", 3, 14 * sizeof(uint32_t), m_wg_denoms, warptile_m, m_align); - ggml_vk_create_pipeline(ctx, ctx->pipeline_matmul_f16_f32_aligned_s, "matmul_f16_f32_aligned_s", matmul_f16_f32_aligned_s_fp32_len, matmul_f16_f32_aligned_s_fp32_data, "main", 3, 14 * sizeof(uint32_t), s_wg_denoms, warptile_s, s_align); + ggml_vk_create_pipeline(ctx, ctx->device->pipeline_matmul_f16_f32->l, "matmul_f16_f32_l", matmul_f16_f32_fp32_len, matmul_f16_f32_fp32_data, "main", 3, 14 * sizeof(uint32_t), l_wg_denoms, warptile_l, 1); + ggml_vk_create_pipeline(ctx, ctx->device->pipeline_matmul_f16_f32->m, "matmul_f16_f32_m", matmul_f16_f32_fp32_len, matmul_f16_f32_fp32_data, "main", 3, 14 * sizeof(uint32_t), m_wg_denoms, warptile_m, 1); + ggml_vk_create_pipeline(ctx, ctx->device->pipeline_matmul_f16_f32->s, "matmul_f16_f32_s", matmul_f16_f32_fp32_len, matmul_f16_f32_fp32_data, "main", 3, 14 * sizeof(uint32_t), s_wg_denoms, warptile_s, 1); + ggml_vk_create_pipeline(ctx, ctx->device->pipeline_matmul_f16_f32->a_l, "matmul_f16_f32_aligned_l", matmul_f16_f32_aligned_fp32_len, matmul_f16_f32_aligned_fp32_data, "main", 3, 14 * sizeof(uint32_t), l_wg_denoms, warptile_l, l_align); + ggml_vk_create_pipeline(ctx, ctx->device->pipeline_matmul_f16_f32->a_m, "matmul_f16_f32_aligned_m", matmul_f16_f32_aligned_fp32_len, matmul_f16_f32_aligned_fp32_data, "main", 3, 14 * sizeof(uint32_t), m_wg_denoms, warptile_m, m_align); + ggml_vk_create_pipeline(ctx, ctx->device->pipeline_matmul_f16_f32->a_s, "matmul_f16_f32_aligned_s", matmul_f16_f32_aligned_fp32_len, matmul_f16_f32_aligned_fp32_data, "main", 3, 14 * sizeof(uint32_t), s_wg_denoms, warptile_s, s_align); + + ggml_vk_create_pipeline(ctx, ctx->device->pipeline_dequant_mul_mat_mat[GGML_TYPE_Q4_0]->l, "matmul_q4_0_f32_l", matmul_q4_0_f32_fp32_len, matmul_q4_0_f32_fp32_data, "main", 3, 14 * sizeof(uint32_t), l_wg_denoms, warptile_mmq_l, l_align); + ggml_vk_create_pipeline(ctx, ctx->device->pipeline_dequant_mul_mat_mat[GGML_TYPE_Q4_0]->m, "matmul_q4_0_f32_m", matmul_q4_0_f32_fp32_len, matmul_q4_0_f32_fp32_data, "main", 3, 14 * sizeof(uint32_t), m_wg_denoms, warptile_mmq_m, m_align); + ggml_vk_create_pipeline(ctx, ctx->device->pipeline_dequant_mul_mat_mat[GGML_TYPE_Q4_0]->s, "matmul_q4_0_f32_s", matmul_q4_0_f32_fp32_len, matmul_q4_0_f32_fp32_data, "main", 3, 14 * sizeof(uint32_t), s_wg_denoms, warptile_mmq_s, s_align); + ggml_vk_create_pipeline(ctx, ctx->device->pipeline_dequant_mul_mat_mat[GGML_TYPE_Q4_0]->a_l, "matmul_q4_0_f32_aligned_l", matmul_q4_0_f32_aligned_fp32_len, matmul_q4_0_f32_aligned_fp32_data, "main", 3, 14 * sizeof(uint32_t), l_wg_denoms, warptile_mmq_l, l_align); + ggml_vk_create_pipeline(ctx, ctx->device->pipeline_dequant_mul_mat_mat[GGML_TYPE_Q4_0]->a_m, "matmul_q4_0_f32_aligned_m", matmul_q4_0_f32_aligned_fp32_len, matmul_q4_0_f32_aligned_fp32_data, "main", 3, 14 * sizeof(uint32_t), m_wg_denoms, warptile_mmq_m, m_align); + ggml_vk_create_pipeline(ctx, ctx->device->pipeline_dequant_mul_mat_mat[GGML_TYPE_Q4_0]->a_s, "matmul_q4_0_f32_aligned_s", matmul_q4_0_f32_aligned_fp32_len, matmul_q4_0_f32_aligned_fp32_data, "main", 3, 14 * sizeof(uint32_t), s_wg_denoms, warptile_mmq_s, s_align); + + ggml_vk_create_pipeline(ctx, ctx->device->pipeline_dequant_mul_mat_mat[GGML_TYPE_Q4_1]->l, "matmul_q4_1_f32_l", matmul_q4_1_f32_fp32_len, matmul_q4_1_f32_fp32_data, "main", 3, 14 * sizeof(uint32_t), l_wg_denoms, warptile_mmq_l, l_align); + ggml_vk_create_pipeline(ctx, ctx->device->pipeline_dequant_mul_mat_mat[GGML_TYPE_Q4_1]->m, "matmul_q4_1_f32_m", matmul_q4_1_f32_fp32_len, matmul_q4_1_f32_fp32_data, "main", 3, 14 * sizeof(uint32_t), m_wg_denoms, warptile_mmq_m, m_align); + ggml_vk_create_pipeline(ctx, ctx->device->pipeline_dequant_mul_mat_mat[GGML_TYPE_Q4_1]->s, "matmul_q4_1_f32_s", matmul_q4_1_f32_fp32_len, matmul_q4_1_f32_fp32_data, "main", 3, 14 * sizeof(uint32_t), s_wg_denoms, warptile_mmq_s, s_align); + ggml_vk_create_pipeline(ctx, ctx->device->pipeline_dequant_mul_mat_mat[GGML_TYPE_Q4_1]->a_l, "matmul_q4_1_f32_aligned_l", matmul_q4_1_f32_aligned_fp32_len, matmul_q4_1_f32_aligned_fp32_data, "main", 3, 14 * sizeof(uint32_t), l_wg_denoms, warptile_mmq_l, l_align); + ggml_vk_create_pipeline(ctx, ctx->device->pipeline_dequant_mul_mat_mat[GGML_TYPE_Q4_1]->a_m, "matmul_q4_1_f32_aligned_m", matmul_q4_1_f32_aligned_fp32_len, matmul_q4_1_f32_aligned_fp32_data, "main", 3, 14 * sizeof(uint32_t), m_wg_denoms, warptile_mmq_m, m_align); + ggml_vk_create_pipeline(ctx, ctx->device->pipeline_dequant_mul_mat_mat[GGML_TYPE_Q4_1]->a_s, "matmul_q4_1_f32_aligned_s", matmul_q4_1_f32_aligned_fp32_len, matmul_q4_1_f32_aligned_fp32_data, "main", 3, 14 * sizeof(uint32_t), s_wg_denoms, warptile_mmq_s, s_align); + + ggml_vk_create_pipeline(ctx, ctx->device->pipeline_dequant_mul_mat_mat[GGML_TYPE_Q5_0]->l, "matmul_q5_0_f32_l", matmul_q5_0_f32_fp32_len, matmul_q5_0_f32_fp32_data, "main", 3, 14 * sizeof(uint32_t), l_wg_denoms, warptile_mmq_l, l_align); + ggml_vk_create_pipeline(ctx, ctx->device->pipeline_dequant_mul_mat_mat[GGML_TYPE_Q5_0]->m, "matmul_q5_0_f32_m", matmul_q5_0_f32_fp32_len, matmul_q5_0_f32_fp32_data, "main", 3, 14 * sizeof(uint32_t), m_wg_denoms, warptile_mmq_m, m_align); + ggml_vk_create_pipeline(ctx, ctx->device->pipeline_dequant_mul_mat_mat[GGML_TYPE_Q5_0]->s, "matmul_q5_0_f32_s", matmul_q5_0_f32_fp32_len, matmul_q5_0_f32_fp32_data, "main", 3, 14 * sizeof(uint32_t), s_wg_denoms, warptile_mmq_s, s_align); + ggml_vk_create_pipeline(ctx, ctx->device->pipeline_dequant_mul_mat_mat[GGML_TYPE_Q5_0]->a_l, "matmul_q5_0_f32_aligned_l", matmul_q5_0_f32_aligned_fp32_len, matmul_q5_0_f32_aligned_fp32_data, "main", 3, 14 * sizeof(uint32_t), l_wg_denoms, warptile_mmq_l, l_align); + ggml_vk_create_pipeline(ctx, ctx->device->pipeline_dequant_mul_mat_mat[GGML_TYPE_Q5_0]->a_m, "matmul_q5_0_f32_aligned_m", matmul_q5_0_f32_aligned_fp32_len, matmul_q5_0_f32_aligned_fp32_data, "main", 3, 14 * sizeof(uint32_t), m_wg_denoms, warptile_mmq_m, m_align); + ggml_vk_create_pipeline(ctx, ctx->device->pipeline_dequant_mul_mat_mat[GGML_TYPE_Q5_0]->a_s, "matmul_q5_0_f32_aligned_s", matmul_q5_0_f32_aligned_fp32_len, matmul_q5_0_f32_aligned_fp32_data, "main", 3, 14 * sizeof(uint32_t), s_wg_denoms, warptile_mmq_s, s_align); + + ggml_vk_create_pipeline(ctx, ctx->device->pipeline_dequant_mul_mat_mat[GGML_TYPE_Q5_1]->l, "matmul_q5_1_f32_l", matmul_q5_1_f32_fp32_len, matmul_q5_1_f32_fp32_data, "main", 3, 14 * sizeof(uint32_t), l_wg_denoms, warptile_mmq_l, l_align); + ggml_vk_create_pipeline(ctx, ctx->device->pipeline_dequant_mul_mat_mat[GGML_TYPE_Q5_1]->m, "matmul_q5_1_f32_m", matmul_q5_1_f32_fp32_len, matmul_q5_1_f32_fp32_data, "main", 3, 14 * sizeof(uint32_t), m_wg_denoms, warptile_mmq_m, m_align); + ggml_vk_create_pipeline(ctx, ctx->device->pipeline_dequant_mul_mat_mat[GGML_TYPE_Q5_1]->s, "matmul_q5_1_f32_s", matmul_q5_1_f32_fp32_len, matmul_q5_1_f32_fp32_data, "main", 3, 14 * sizeof(uint32_t), s_wg_denoms, warptile_mmq_s, s_align); + ggml_vk_create_pipeline(ctx, ctx->device->pipeline_dequant_mul_mat_mat[GGML_TYPE_Q5_1]->a_l, "matmul_q5_1_f32_aligned_l", matmul_q5_1_f32_aligned_fp32_len, matmul_q5_1_f32_aligned_fp32_data, "main", 3, 14 * sizeof(uint32_t), l_wg_denoms, warptile_mmq_l, l_align); + ggml_vk_create_pipeline(ctx, ctx->device->pipeline_dequant_mul_mat_mat[GGML_TYPE_Q5_1]->a_m, "matmul_q5_1_f32_aligned_m", matmul_q5_1_f32_aligned_fp32_len, matmul_q5_1_f32_aligned_fp32_data, "main", 3, 14 * sizeof(uint32_t), m_wg_denoms, warptile_mmq_m, m_align); + ggml_vk_create_pipeline(ctx, ctx->device->pipeline_dequant_mul_mat_mat[GGML_TYPE_Q5_1]->a_s, "matmul_q5_1_f32_aligned_s", matmul_q5_1_f32_aligned_fp32_len, matmul_q5_1_f32_aligned_fp32_data, "main", 3, 14 * sizeof(uint32_t), s_wg_denoms, warptile_mmq_s, s_align); + + ggml_vk_create_pipeline(ctx, ctx->device->pipeline_dequant_mul_mat_mat[GGML_TYPE_Q8_0]->l, "matmul_q8_0_f32_l", matmul_q8_0_f32_fp32_len, matmul_q8_0_f32_fp32_data, "main", 3, 14 * sizeof(uint32_t), l_wg_denoms, warptile_mmq_l, l_align); + ggml_vk_create_pipeline(ctx, ctx->device->pipeline_dequant_mul_mat_mat[GGML_TYPE_Q8_0]->m, "matmul_q8_0_f32_m", matmul_q8_0_f32_fp32_len, matmul_q8_0_f32_fp32_data, "main", 3, 14 * sizeof(uint32_t), m_wg_denoms, warptile_mmq_m, m_align); + ggml_vk_create_pipeline(ctx, ctx->device->pipeline_dequant_mul_mat_mat[GGML_TYPE_Q8_0]->s, "matmul_q8_0_f32_s", matmul_q8_0_f32_fp32_len, matmul_q8_0_f32_fp32_data, "main", 3, 14 * sizeof(uint32_t), s_wg_denoms, warptile_mmq_s, s_align); + ggml_vk_create_pipeline(ctx, ctx->device->pipeline_dequant_mul_mat_mat[GGML_TYPE_Q8_0]->a_l, "matmul_q8_0_f32_aligned_l", matmul_q8_0_f32_aligned_fp32_len, matmul_q8_0_f32_aligned_fp32_data, "main", 3, 14 * sizeof(uint32_t), l_wg_denoms, warptile_mmq_l, l_align); + ggml_vk_create_pipeline(ctx, ctx->device->pipeline_dequant_mul_mat_mat[GGML_TYPE_Q8_0]->a_m, "matmul_q8_0_f32_aligned_m", matmul_q8_0_f32_aligned_fp32_len, matmul_q8_0_f32_aligned_fp32_data, "main", 3, 14 * sizeof(uint32_t), m_wg_denoms, warptile_mmq_m, m_align); + ggml_vk_create_pipeline(ctx, ctx->device->pipeline_dequant_mul_mat_mat[GGML_TYPE_Q8_0]->a_s, "matmul_q8_0_f32_aligned_s", matmul_q8_0_f32_aligned_fp32_len, matmul_q8_0_f32_aligned_fp32_data, "main", 3, 14 * sizeof(uint32_t), s_wg_denoms, warptile_mmq_s, s_align); } - ggml_vk_create_pipeline(ctx, ctx->pipeline_dequant_mul_mat_vec_f32[GGML_TYPE_F16 ], "mul_mat_vec_f16_f32", mul_mat_vec_f16_f32_len, mul_mat_vec_f16_f32_data, "main", 3, 3 * sizeof(int), {1, 1, 1}, {}, 1); - ggml_vk_create_pipeline(ctx, ctx->pipeline_dequant_mul_mat_vec_f32[GGML_TYPE_Q4_0], "mul_mat_vec_q4_0_f32", mul_mat_vec_q4_0_f32_len, mul_mat_vec_q4_0_f32_data, "main", 3, 3 * sizeof(int), {1, 1, 1}, {}, 1); - ggml_vk_create_pipeline(ctx, ctx->pipeline_dequant_mul_mat_vec_f32[GGML_TYPE_Q4_1], "mul_mat_vec_q4_1_f32", mul_mat_vec_q4_1_f32_len, mul_mat_vec_q4_1_f32_data, "main", 3, 3 * sizeof(int), {1, 1, 1}, {}, 1); - ggml_vk_create_pipeline(ctx, ctx->pipeline_dequant_mul_mat_vec_f32[GGML_TYPE_Q5_0], "mul_mat_vec_q5_0_f32", mul_mat_vec_q5_0_f32_len, mul_mat_vec_q5_0_f32_data, "main", 3, 3 * sizeof(int), {1, 1, 1}, {}, 1); - ggml_vk_create_pipeline(ctx, ctx->pipeline_dequant_mul_mat_vec_f32[GGML_TYPE_Q5_1], "mul_mat_vec_q5_1_f32", mul_mat_vec_q5_1_f32_len, mul_mat_vec_q5_1_f32_data, "main", 3, 3 * sizeof(int), {1, 1, 1}, {}, 1); - ggml_vk_create_pipeline(ctx, ctx->pipeline_dequant_mul_mat_vec_f32[GGML_TYPE_Q8_0], "mul_mat_vec_q8_0_f32", mul_mat_vec_q8_0_f32_len, mul_mat_vec_q8_0_f32_data, "main", 3, 3 * sizeof(int), {1, 1, 1}, {}, 1); - ggml_vk_create_pipeline(ctx, ctx->pipeline_dequant_mul_mat_vec_f32[GGML_TYPE_Q2_K], "mul_mat_vec_q2_K_f32", mul_mat_vec_q2_K_f32_len, mul_mat_vec_q2_K_f32_data, "main", 3, 3 * sizeof(int), {1, 1, 1}, {}, 1); - ggml_vk_create_pipeline(ctx, ctx->pipeline_dequant_mul_mat_vec_f32[GGML_TYPE_Q3_K], "mul_mat_vec_q3_K_f32", mul_mat_vec_q3_K_f32_len, mul_mat_vec_q3_K_f32_data, "main", 3, 3 * sizeof(int), {1, 1, 1}, {}, 1); - ggml_vk_create_pipeline(ctx, ctx->pipeline_dequant_mul_mat_vec_f32[GGML_TYPE_Q4_K], "mul_mat_vec_q4_K_f32", mul_mat_vec_q4_K_f32_len, mul_mat_vec_q4_K_f32_data, "main", 3, 3 * sizeof(int), {1, 1, 1}, {}, 1); - ggml_vk_create_pipeline(ctx, ctx->pipeline_dequant_mul_mat_vec_f32[GGML_TYPE_Q5_K], "mul_mat_vec_q5_K_f32", mul_mat_vec_q5_K_f32_len, mul_mat_vec_q5_K_f32_data, "main", 3, 3 * sizeof(int), {1, 1, 1}, {}, 1); - ggml_vk_create_pipeline(ctx, ctx->pipeline_dequant_mul_mat_vec_f32[GGML_TYPE_Q6_K], "mul_mat_vec_q6_K_f32", mul_mat_vec_q6_K_f32_len, mul_mat_vec_q6_K_f32_data, "main", 3, 3 * sizeof(int), {1, 1, 1}, {}, 1); + ggml_vk_create_pipeline(ctx, ctx->device->pipeline_dequant_mul_mat_vec_f32[GGML_TYPE_F16 ], "mul_mat_vec_f16_f32", mul_mat_vec_f16_f32_len, mul_mat_vec_f16_f32_data, "main", 3, 3 * sizeof(uint32_t), {1, 1, 1}, { device->subgroup_size }, 1); + ggml_vk_create_pipeline(ctx, ctx->device->pipeline_dequant_mul_mat_vec_f32[GGML_TYPE_Q4_0], "mul_mat_vec_q4_0_f32", mul_mat_vec_q4_0_f32_len, mul_mat_vec_q4_0_f32_data, "main", 3, 3 * sizeof(uint32_t), {1, 1, 1}, { device->subgroup_size }, 1); + ggml_vk_create_pipeline(ctx, ctx->device->pipeline_dequant_mul_mat_vec_f32[GGML_TYPE_Q4_1], "mul_mat_vec_q4_1_f32", mul_mat_vec_q4_1_f32_len, mul_mat_vec_q4_1_f32_data, "main", 3, 3 * sizeof(uint32_t), {1, 1, 1}, { device->subgroup_size }, 1); + ggml_vk_create_pipeline(ctx, ctx->device->pipeline_dequant_mul_mat_vec_f32[GGML_TYPE_Q5_0], "mul_mat_vec_q5_0_f32", mul_mat_vec_q5_0_f32_len, mul_mat_vec_q5_0_f32_data, "main", 3, 3 * sizeof(uint32_t), {1, 1, 1}, { device->subgroup_size }, 1); + ggml_vk_create_pipeline(ctx, ctx->device->pipeline_dequant_mul_mat_vec_f32[GGML_TYPE_Q5_1], "mul_mat_vec_q5_1_f32", mul_mat_vec_q5_1_f32_len, mul_mat_vec_q5_1_f32_data, "main", 3, 3 * sizeof(uint32_t), {1, 1, 1}, { device->subgroup_size }, 1); + ggml_vk_create_pipeline(ctx, ctx->device->pipeline_dequant_mul_mat_vec_f32[GGML_TYPE_Q8_0], "mul_mat_vec_q8_0_f32", mul_mat_vec_q8_0_f32_len, mul_mat_vec_q8_0_f32_data, "main", 3, 3 * sizeof(uint32_t), {1, 1, 1}, { device->subgroup_size }, 1); + ggml_vk_create_pipeline(ctx, ctx->device->pipeline_dequant_mul_mat_vec_f32[GGML_TYPE_Q2_K], "mul_mat_vec_q2_K_f32", mul_mat_vec_q2_K_f32_len, mul_mat_vec_q2_K_f32_data, "main", 3, 3 * sizeof(uint32_t), {1, 1, 1}, { device->subgroup_size }, 1); + ggml_vk_create_pipeline(ctx, ctx->device->pipeline_dequant_mul_mat_vec_f32[GGML_TYPE_Q3_K], "mul_mat_vec_q3_K_f32", mul_mat_vec_q3_K_f32_len, mul_mat_vec_q3_K_f32_data, "main", 3, 3 * sizeof(uint32_t), {1, 1, 1}, { device->subgroup_size }, 1); + ggml_vk_create_pipeline(ctx, ctx->device->pipeline_dequant_mul_mat_vec_f32[GGML_TYPE_Q4_K], "mul_mat_vec_q4_K_f32", mul_mat_vec_q4_K_f32_len, mul_mat_vec_q4_K_f32_data, "main", 3, 3 * sizeof(uint32_t), {1, 1, 1}, { device->subgroup_size }, 1); + ggml_vk_create_pipeline(ctx, ctx->device->pipeline_dequant_mul_mat_vec_f32[GGML_TYPE_Q5_K], "mul_mat_vec_q5_K_f32", mul_mat_vec_q5_K_f32_len, mul_mat_vec_q5_K_f32_data, "main", 3, 3 * sizeof(uint32_t), {1, 1, 1}, { device->subgroup_size }, 1); + ggml_vk_create_pipeline(ctx, ctx->device->pipeline_dequant_mul_mat_vec_f32[GGML_TYPE_Q6_K], "mul_mat_vec_q6_K_f32", mul_mat_vec_q6_K_f32_len, mul_mat_vec_q6_K_f32_data, "main", 3, 3 * sizeof(uint32_t), {1, 1, 1}, { device->subgroup_size }, 1); // dequant shaders - ggml_vk_create_pipeline(ctx, ctx->pipeline_dequant[GGML_TYPE_F32 ], "f32_to_f16", f32_to_f16_len, f32_to_f16_data, "main", 2, 4 * sizeof(int), { 64, 1, 1}, {}, 1); - ggml_vk_create_pipeline(ctx, ctx->pipeline_dequant[GGML_TYPE_F16 ], "dequant_f16", dequant_f16_len, dequant_f16_data, "main", 2, 4 * sizeof(int), {256 * 32, 1, 1}, {}, 1); - ggml_vk_create_pipeline(ctx, ctx->pipeline_dequant[GGML_TYPE_Q4_0], "dequant_q4_0", dequant_q4_0_len, dequant_q4_0_data, "main", 2, 4 * sizeof(int), {256 * 32, 1, 1}, {}, 1); - ggml_vk_create_pipeline(ctx, ctx->pipeline_dequant[GGML_TYPE_Q4_1], "dequant_q4_1", dequant_q4_1_len, dequant_q4_1_data, "main", 2, 4 * sizeof(int), {256 * 32, 1, 1}, {}, 1); - ggml_vk_create_pipeline(ctx, ctx->pipeline_dequant[GGML_TYPE_Q5_0], "dequant_q5_0", dequant_q5_0_len, dequant_q5_0_data, "main", 2, 4 * sizeof(int), {256 * 32, 1, 1}, {}, 1); - ggml_vk_create_pipeline(ctx, ctx->pipeline_dequant[GGML_TYPE_Q5_1], "dequant_q5_1", dequant_q5_1_len, dequant_q5_1_data, "main", 2, 4 * sizeof(int), {256 * 32, 1, 1}, {}, 1); - ggml_vk_create_pipeline(ctx, ctx->pipeline_dequant[GGML_TYPE_Q8_0], "dequant_q8_0", dequant_q8_0_len, dequant_q8_0_data, "main", 2, 4 * sizeof(int), {256 * 32, 1, 1}, {}, 1); - ggml_vk_create_pipeline(ctx, ctx->pipeline_dequant[GGML_TYPE_Q2_K], "dequant_q2_K", dequant_q2_K_len, dequant_q2_K_data, "main", 2, 4 * sizeof(int), {256 * 64, 1, 1}, {}, 1); - ggml_vk_create_pipeline(ctx, ctx->pipeline_dequant[GGML_TYPE_Q3_K], "dequant_q3_K", dequant_q3_K_len, dequant_q3_K_data, "main", 2, 4 * sizeof(int), {256 * 64, 1, 1}, {}, 1); - ggml_vk_create_pipeline(ctx, ctx->pipeline_dequant[GGML_TYPE_Q4_K], "dequant_q4_K", dequant_q4_K_len, dequant_q4_K_data, "main", 2, 4 * sizeof(int), {256 * 32, 1, 1}, {}, 1); - ggml_vk_create_pipeline(ctx, ctx->pipeline_dequant[GGML_TYPE_Q5_K], "dequant_q5_K", dequant_q5_K_len, dequant_q5_K_data, "main", 2, 4 * sizeof(int), {256 * 64, 1, 1}, {}, 1); - ggml_vk_create_pipeline(ctx, ctx->pipeline_dequant[GGML_TYPE_Q6_K], "dequant_q6_K", dequant_q6_K_len, dequant_q6_K_data, "main", 2, 4 * sizeof(int), {256 * 64, 1, 1}, {}, 1); + ggml_vk_create_pipeline(ctx, ctx->device->pipeline_dequant[GGML_TYPE_F32 ], "f32_to_f16", dequant_f32_len, dequant_f32_data, "main", 2, 5 * sizeof(uint32_t), {256 * 16, 1, 1}, {}, 1); + ggml_vk_create_pipeline(ctx, ctx->device->pipeline_dequant[GGML_TYPE_Q4_0], "dequant_q4_0", dequant_q4_0_len, dequant_q4_0_data, "main", 2, 5 * sizeof(uint32_t), {256 * 16, 1, 1}, {}, 1); + ggml_vk_create_pipeline(ctx, ctx->device->pipeline_dequant[GGML_TYPE_Q4_1], "dequant_q4_1", dequant_q4_1_len, dequant_q4_1_data, "main", 2, 5 * sizeof(uint32_t), {256 * 16, 1, 1}, {}, 1); + ggml_vk_create_pipeline(ctx, ctx->device->pipeline_dequant[GGML_TYPE_Q5_0], "dequant_q5_0", dequant_q5_0_len, dequant_q5_0_data, "main", 2, 5 * sizeof(uint32_t), {256 * 16, 1, 1}, {}, 1); + ggml_vk_create_pipeline(ctx, ctx->device->pipeline_dequant[GGML_TYPE_Q5_1], "dequant_q5_1", dequant_q5_1_len, dequant_q5_1_data, "main", 2, 5 * sizeof(uint32_t), {256 * 16, 1, 1}, {}, 1); + ggml_vk_create_pipeline(ctx, ctx->device->pipeline_dequant[GGML_TYPE_Q8_0], "dequant_q8_0", dequant_q8_0_len, dequant_q8_0_data, "main", 2, 5 * sizeof(uint32_t), {256 * 16, 1, 1}, {}, 1); + ggml_vk_create_pipeline(ctx, ctx->device->pipeline_dequant[GGML_TYPE_Q2_K], "dequant_q2_K", dequant_q2_K_len, dequant_q2_K_data, "main", 2, 5 * sizeof(uint32_t), {256 * 64, 1, 1}, {}, 1); + ggml_vk_create_pipeline(ctx, ctx->device->pipeline_dequant[GGML_TYPE_Q3_K], "dequant_q3_K", dequant_q3_K_len, dequant_q3_K_data, "main", 2, 5 * sizeof(uint32_t), {256 * 64, 1, 1}, {}, 1); + ggml_vk_create_pipeline(ctx, ctx->device->pipeline_dequant[GGML_TYPE_Q4_K], "dequant_q4_K", dequant_q4_K_len, dequant_q4_K_data, "main", 2, 5 * sizeof(uint32_t), {256 * 32, 1, 1}, {}, 1); + ggml_vk_create_pipeline(ctx, ctx->device->pipeline_dequant[GGML_TYPE_Q5_K], "dequant_q5_K", dequant_q5_K_len, dequant_q5_K_data, "main", 2, 5 * sizeof(uint32_t), {256 * 64, 1, 1}, {}, 1); + ggml_vk_create_pipeline(ctx, ctx->device->pipeline_dequant[GGML_TYPE_Q6_K], "dequant_q6_K", dequant_q6_K_len, dequant_q6_K_data, "main", 2, 5 * sizeof(uint32_t), {256 * 64, 1, 1}, {}, 1); // get_rows - ggml_vk_create_pipeline(ctx, ctx->pipeline_get_rows[GGML_TYPE_F16 ], "get_rows_f16", get_rows_f16_len, get_rows_f16_data, "main", 3, sizeof(vk_op_push_constants), {512, 1, 1}, {}, 1); - ggml_vk_create_pipeline(ctx, ctx->pipeline_get_rows[GGML_TYPE_Q4_0], "get_rows_q4_0", get_rows_q4_0_len, get_rows_q4_0_data, "main", 3, sizeof(vk_op_push_constants), {512, 1, 1}, {}, 1); - ggml_vk_create_pipeline(ctx, ctx->pipeline_get_rows[GGML_TYPE_Q4_1], "get_rows_q4_1", get_rows_q4_1_len, get_rows_q4_1_data, "main", 3, sizeof(vk_op_push_constants), {512, 1, 1}, {}, 1); - ggml_vk_create_pipeline(ctx, ctx->pipeline_get_rows[GGML_TYPE_Q5_0], "get_rows_q5_0", get_rows_q5_0_len, get_rows_q5_0_data, "main", 3, sizeof(vk_op_push_constants), {512, 1, 1}, {}, 1); - ggml_vk_create_pipeline(ctx, ctx->pipeline_get_rows[GGML_TYPE_Q5_1], "get_rows_q5_1", get_rows_q5_1_len, get_rows_q5_1_data, "main", 3, sizeof(vk_op_push_constants), {512, 1, 1}, {}, 1); - ggml_vk_create_pipeline(ctx, ctx->pipeline_get_rows[GGML_TYPE_Q8_0], "get_rows_q8_0", get_rows_q8_0_len, get_rows_q8_0_data, "main", 3, sizeof(vk_op_push_constants), {512, 1, 1}, {}, 1); + ggml_vk_create_pipeline(ctx, ctx->device->pipeline_get_rows[GGML_TYPE_F16 ], "get_rows_f16", get_rows_f16_len, get_rows_f16_data, "main", 3, sizeof(vk_op_push_constants), {512, 1, 1}, {}, 1); + ggml_vk_create_pipeline(ctx, ctx->device->pipeline_get_rows[GGML_TYPE_Q4_0], "get_rows_q4_0", get_rows_q4_0_len, get_rows_q4_0_data, "main", 3, sizeof(vk_op_push_constants), {512, 1, 1}, {}, 1); + ggml_vk_create_pipeline(ctx, ctx->device->pipeline_get_rows[GGML_TYPE_Q4_1], "get_rows_q4_1", get_rows_q4_1_len, get_rows_q4_1_data, "main", 3, sizeof(vk_op_push_constants), {512, 1, 1}, {}, 1); + ggml_vk_create_pipeline(ctx, ctx->device->pipeline_get_rows[GGML_TYPE_Q5_0], "get_rows_q5_0", get_rows_q5_0_len, get_rows_q5_0_data, "main", 3, sizeof(vk_op_push_constants), {512, 1, 1}, {}, 1); + ggml_vk_create_pipeline(ctx, ctx->device->pipeline_get_rows[GGML_TYPE_Q5_1], "get_rows_q5_1", get_rows_q5_1_len, get_rows_q5_1_data, "main", 3, sizeof(vk_op_push_constants), {512, 1, 1}, {}, 1); + ggml_vk_create_pipeline(ctx, ctx->device->pipeline_get_rows[GGML_TYPE_Q8_0], "get_rows_q8_0", get_rows_q8_0_len, get_rows_q8_0_data, "main", 3, sizeof(vk_op_push_constants), {512, 1, 1}, {}, 1); - ggml_vk_create_pipeline(ctx, ctx->pipeline_get_rows_f32[GGML_TYPE_F32 ], "get_rows_f16_f32", get_rows_f16_f32_len, get_rows_f16_f32_data, "main", 3, sizeof(vk_op_push_constants), {512, 1, 1}, {}, 1); - ggml_vk_create_pipeline(ctx, ctx->pipeline_get_rows_f32[GGML_TYPE_Q4_0], "get_rows_q4_0_f32", get_rows_q4_0_f32_len, get_rows_q4_0_f32_data, "main", 3, sizeof(vk_op_push_constants), {512, 1, 1}, {}, 1); - ggml_vk_create_pipeline(ctx, ctx->pipeline_get_rows_f32[GGML_TYPE_Q4_1], "get_rows_q4_1_f32", get_rows_q4_1_f32_len, get_rows_q4_1_f32_data, "main", 3, sizeof(vk_op_push_constants), {512, 1, 1}, {}, 1); - ggml_vk_create_pipeline(ctx, ctx->pipeline_get_rows_f32[GGML_TYPE_Q5_0], "get_rows_q5_0_f32", get_rows_q5_0_f32_len, get_rows_q5_0_f32_data, "main", 3, sizeof(vk_op_push_constants), {512, 1, 1}, {}, 1); - ggml_vk_create_pipeline(ctx, ctx->pipeline_get_rows_f32[GGML_TYPE_Q5_1], "get_rows_q5_1_f32", get_rows_q5_1_f32_len, get_rows_q5_1_f32_data, "main", 3, sizeof(vk_op_push_constants), {512, 1, 1}, {}, 1); - ggml_vk_create_pipeline(ctx, ctx->pipeline_get_rows_f32[GGML_TYPE_Q8_0], "get_rows_q8_0_f32", get_rows_q8_0_f32_len, get_rows_q8_0_f32_data, "main", 3, sizeof(vk_op_push_constants), {512, 1, 1}, {}, 1); + ggml_vk_create_pipeline(ctx, ctx->device->pipeline_get_rows_f32[GGML_TYPE_F32 ], "get_rows_f16_f32", get_rows_f16_f32_len, get_rows_f16_f32_data, "main", 3, sizeof(vk_op_push_constants), {512, 1, 1}, {}, 1); + ggml_vk_create_pipeline(ctx, ctx->device->pipeline_get_rows_f32[GGML_TYPE_Q4_0], "get_rows_q4_0_f32", get_rows_q4_0_f32_len, get_rows_q4_0_f32_data, "main", 3, sizeof(vk_op_push_constants), {512, 1, 1}, {}, 1); + ggml_vk_create_pipeline(ctx, ctx->device->pipeline_get_rows_f32[GGML_TYPE_Q4_1], "get_rows_q4_1_f32", get_rows_q4_1_f32_len, get_rows_q4_1_f32_data, "main", 3, sizeof(vk_op_push_constants), {512, 1, 1}, {}, 1); + ggml_vk_create_pipeline(ctx, ctx->device->pipeline_get_rows_f32[GGML_TYPE_Q5_0], "get_rows_q5_0_f32", get_rows_q5_0_f32_len, get_rows_q5_0_f32_data, "main", 3, sizeof(vk_op_push_constants), {512, 1, 1}, {}, 1); + ggml_vk_create_pipeline(ctx, ctx->device->pipeline_get_rows_f32[GGML_TYPE_Q5_1], "get_rows_q5_1_f32", get_rows_q5_1_f32_len, get_rows_q5_1_f32_data, "main", 3, sizeof(vk_op_push_constants), {512, 1, 1}, {}, 1); + ggml_vk_create_pipeline(ctx, ctx->device->pipeline_get_rows_f32[GGML_TYPE_Q8_0], "get_rows_q8_0_f32", get_rows_q8_0_f32_len, get_rows_q8_0_f32_data, "main", 3, sizeof(vk_op_push_constants), {512, 1, 1}, {}, 1); - ggml_vk_create_pipeline(ctx, ctx->pipeline_matmul_split_k_reduce, "split_k_reduce", split_k_reduce_len, split_k_reduce_data, "main", 2, 2 * sizeof(uint32_t), {256, 1, 1}, {}, 1); + ggml_vk_create_pipeline(ctx, ctx->device->pipeline_matmul_split_k_reduce, "split_k_reduce", split_k_reduce_len, split_k_reduce_data, "main", 2, 2 * sizeof(uint32_t), {256, 1, 1}, {}, 1); - ggml_vk_create_pipeline(ctx, ctx->pipeline_mul_mat_vec_p021_f16_f32, "mul_mat_vec_p021_f16_f32", mul_mat_vec_p021_f16_f32_len, mul_mat_vec_p021_f16_f32_data, "main", 3, 6 * sizeof(uint32_t), {1, 1, 1}, {}, 1); - ggml_vk_create_pipeline(ctx, ctx->pipeline_mul_mat_vec_nc_f16_f32, "mul_mat_vec_nc_f16_f32", mul_mat_vec_nc_f16_f32_len, mul_mat_vec_nc_f16_f32_data, "main", 3, 7 * sizeof(uint32_t), {1, 1, 1}, {}, 1); + ggml_vk_create_pipeline(ctx, ctx->device->pipeline_mul_mat_vec_p021_f16_f32, "mul_mat_vec_p021_f16_f32", mul_mat_vec_p021_f16_f32_len, mul_mat_vec_p021_f16_f32_data, "main", 3, 6 * sizeof(uint32_t), {1, 1, 1}, {}, 1); + ggml_vk_create_pipeline(ctx, ctx->device->pipeline_mul_mat_vec_nc_f16_f32, "mul_mat_vec_nc_f16_f32", mul_mat_vec_nc_f16_f32_len, mul_mat_vec_nc_f16_f32_data, "main", 3, 7 * sizeof(uint32_t), {1, 1, 1}, {}, 1); - ggml_vk_create_pipeline(ctx, ctx->pipeline_norm_f32, "norm_f32", norm_f32_len, norm_f32_data, "main", 2, sizeof(vk_op_push_constants), {1, 1, 1}, {}, 1); - ggml_vk_create_pipeline(ctx, ctx->pipeline_rms_norm_f32, "rms_norm_f32", rms_norm_f32_len, rms_norm_f32_data, "main", 2, sizeof(vk_op_push_constants), {1, 1, 1}, {}, 1); + ggml_vk_create_pipeline(ctx, ctx->device->pipeline_norm_f32, "norm_f32", norm_f32_len, norm_f32_data, "main", 2, sizeof(vk_op_push_constants), {1, 1, 1}, {}, 1); + ggml_vk_create_pipeline(ctx, ctx->device->pipeline_rms_norm_f32, "rms_norm_f32", rms_norm_f32_len, rms_norm_f32_data, "main", 2, sizeof(vk_op_push_constants), {1, 1, 1}, {}, 1); - ggml_vk_create_pipeline(ctx, ctx->pipeline_cpy_f32_f32, "cpy_f32_f32", cpy_f32_f32_len, cpy_f32_f32_data, "main", 2, sizeof(vk_op_cpy_push_constants), {512, 1, 1}, {}, 1); - ggml_vk_create_pipeline(ctx, ctx->pipeline_cpy_f32_f16, "cpy_f32_f16", cpy_f32_f16_len, cpy_f32_f16_data, "main", 2, sizeof(vk_op_cpy_push_constants), {512, 1, 1}, {}, 1); - ggml_vk_create_pipeline(ctx, ctx->pipeline_cpy_f16_f16, "cpy_f16_f16", cpy_f16_f16_len, cpy_f16_f16_data, "main", 2, sizeof(vk_op_cpy_push_constants), {512, 1, 1}, {}, 1); + ggml_vk_create_pipeline(ctx, ctx->device->pipeline_cpy_f32_f32, "cpy_f32_f32", cpy_f32_f32_len, cpy_f32_f32_data, "main", 2, sizeof(vk_op_unary_push_constants), {512, 1, 1}, {}, 1); + ggml_vk_create_pipeline(ctx, ctx->device->pipeline_cpy_f32_f16, "cpy_f32_f16", cpy_f32_f16_len, cpy_f32_f16_data, "main", 2, sizeof(vk_op_unary_push_constants), {512, 1, 1}, {}, 1); + ggml_vk_create_pipeline(ctx, ctx->device->pipeline_cpy_f16_f16, "cpy_f16_f16", cpy_f16_f16_len, cpy_f16_f16_data, "main", 2, sizeof(vk_op_unary_push_constants), {512, 1, 1}, {}, 1); - ggml_vk_create_pipeline(ctx, ctx->pipeline_add_f32, "add_f32", add_f32_len, add_f32_data, "main", 3, sizeof(vk_op_push_constants), {512, 1, 1}, {}, 1); + ggml_vk_create_pipeline(ctx, ctx->device->pipeline_add_f32, "add_f32", add_f32_len, add_f32_data, "main", 3, sizeof(vk_op_binary_push_constants), {512, 1, 1}, {}, 1); - ggml_vk_create_pipeline(ctx, ctx->pipeline_mul_f32, "mul_f32", mul_f32_len, mul_f32_data, "main", 3, sizeof(vk_op_push_constants), {512, 1, 1}, {}, 1); + ggml_vk_create_pipeline(ctx, ctx->device->pipeline_mul_f32, "mul_f32", mul_f32_len, mul_f32_data, "main", 3, sizeof(vk_op_binary_push_constants), {512, 1, 1}, {}, 1); - ggml_vk_create_pipeline(ctx, ctx->pipeline_scale_f32, "scale_f32", scale_f32_len, scale_f32_data, "main", 2, sizeof(vk_op_push_constants), {512, 1, 1}, {}, 1); + ggml_vk_create_pipeline(ctx, ctx->device->pipeline_scale_f32, "scale_f32", scale_f32_len, scale_f32_data, "main", 2, sizeof(vk_op_unary_push_constants), {512, 1, 1}, {}, 1); - ggml_vk_create_pipeline(ctx, ctx->pipeline_sqr_f32, "sqr_f32", sqr_f32_len, sqr_f32_data, "main", 2, sizeof(vk_op_push_constants), {512, 1, 1}, {}, 1); + ggml_vk_create_pipeline(ctx, ctx->device->pipeline_sqr_f32, "sqr_f32", sqr_f32_len, sqr_f32_data, "main", 2, sizeof(vk_op_unary_push_constants), {512, 1, 1}, {}, 1); - ggml_vk_create_pipeline(ctx, ctx->pipeline_clamp_f32, "clamp_f32", clamp_f32_len, clamp_f32_data, "main", 2, sizeof(vk_op_push_constants), {512, 1, 1}, {}, 1); + ggml_vk_create_pipeline(ctx, ctx->device->pipeline_clamp_f32, "clamp_f32", clamp_f32_len, clamp_f32_data, "main", 2, sizeof(vk_op_unary_push_constants), {512, 1, 1}, {}, 1); - ggml_vk_create_pipeline(ctx, ctx->pipeline_gelu_f32, "gelu_f32", gelu_f32_len, gelu_f32_data, "main", 2, sizeof(vk_op_push_constants), {512, 1, 1}, {}, 1); - ggml_vk_create_pipeline(ctx, ctx->pipeline_silu_f32, "silu_f32", silu_f32_len, silu_f32_data, "main", 2, sizeof(vk_op_push_constants), {512, 1, 1}, {}, 1); - ggml_vk_create_pipeline(ctx, ctx->pipeline_relu_f32, "relu_f32", relu_f32_len, relu_f32_data, "main", 2, sizeof(vk_op_push_constants), {512, 1, 1}, {}, 1); + ggml_vk_create_pipeline(ctx, ctx->device->pipeline_gelu_f32, "gelu_f32", gelu_f32_len, gelu_f32_data, "main", 2, sizeof(vk_op_push_constants), {512, 1, 1}, {}, 1); + ggml_vk_create_pipeline(ctx, ctx->device->pipeline_silu_f32, "silu_f32", silu_f32_len, silu_f32_data, "main", 2, sizeof(vk_op_push_constants), {512, 1, 1}, {}, 1); + ggml_vk_create_pipeline(ctx, ctx->device->pipeline_relu_f32, "relu_f32", relu_f32_len, relu_f32_data, "main", 2, sizeof(vk_op_push_constants), {512, 1, 1}, {}, 1); - ggml_vk_create_pipeline(ctx, ctx->pipeline_diag_mask_inf_f32, "diag_mask_inf_f32", diag_mask_inf_f32_len, diag_mask_inf_f32_data, "main", 2, sizeof(vk_op_diag_mask_push_constants), {512, 1, 1}, {}, 1); + ggml_vk_create_pipeline(ctx, ctx->device->pipeline_diag_mask_inf_f32, "diag_mask_inf_f32", diag_mask_inf_f32_len, diag_mask_inf_f32_data, "main", 2, sizeof(vk_op_diag_mask_push_constants), {512, 1, 1}, {}, 1); - ggml_vk_create_pipeline(ctx, ctx->pipeline_soft_max_f32, "soft_max_f32", soft_max_f32_len, soft_max_f32_data, "main", 3, sizeof(vk_op_push_constants), {1, 1, 1}, {}, 1); + ggml_vk_create_pipeline(ctx, ctx->device->pipeline_soft_max_f32, "soft_max_f32", soft_max_f32_len, soft_max_f32_data, "main", 4, sizeof(vk_op_soft_max_push_constants), {1, 1, 1}, {}, 1); - ggml_vk_create_pipeline(ctx, ctx->pipeline_rope_f32, "rope_f32", rope_f32_len, rope_f32_data, "main", 3, sizeof(vk_op_rope_push_constants), {1, 512, 1}, {}, 1); - ggml_vk_create_pipeline(ctx, ctx->pipeline_rope_f16, "rope_f16", rope_f16_len, rope_f16_data, "main", 3, sizeof(vk_op_rope_push_constants), {1, 512, 1}, {}, 1); + ggml_vk_create_pipeline(ctx, ctx->device->pipeline_rope_f32, "rope_f32", rope_f32_len, rope_f32_data, "main", 3, sizeof(vk_op_rope_push_constants), {1, 512, 1}, {}, 1); + ggml_vk_create_pipeline(ctx, ctx->device->pipeline_rope_f16, "rope_f16", rope_f16_len, rope_f16_data, "main", 3, sizeof(vk_op_rope_push_constants), {1, 512, 1}, {}, 1); - ggml_vk_create_pipeline(ctx, ctx->pipeline_rope_neox_f32, "rope_neox_f32", rope_neox_f32_len, rope_neox_f32_data, "main", 3, sizeof(vk_op_rope_neox_push_constants), {1, 512, 1}, {}, 1); - ggml_vk_create_pipeline(ctx, ctx->pipeline_rope_neox_f16, "rope_neox_f16", rope_neox_f16_len, rope_neox_f16_data, "main", 3, sizeof(vk_op_rope_neox_push_constants), {1, 512, 1}, {}, 1); + ggml_vk_create_pipeline(ctx, ctx->device->pipeline_rope_neox_f32, "rope_neox_f32", rope_neox_f32_len, rope_neox_f32_data, "main", 3, sizeof(vk_op_rope_neox_push_constants), {1, 512, 1}, {}, 1); + ggml_vk_create_pipeline(ctx, ctx->device->pipeline_rope_neox_f16, "rope_neox_f16", rope_neox_f16_len, rope_neox_f16_data, "main", 3, sizeof(vk_op_rope_neox_push_constants), {1, 512, 1}, {}, 1); + + ggml_vk_create_pipeline(ctx, ctx->device->pipeline_argsort_f32, "argsort_f32", argsort_f32_len, argsort_f32_data, "main", 2, sizeof(vk_op_argsort_push_constants), {1024, 1, 1}, {}, 1); } static void ggml_vk_print_gpu_info(size_t idx) { @@ -1057,8 +1225,8 @@ static void ggml_vk_print_gpu_info(size_t idx) { } } - const char* GGML_VULKAN_DISABLE_F16 = getenv("GGML_VULKAN_DISABLE_F16"); - bool force_disable_f16 = GGML_VULKAN_DISABLE_F16 != nullptr; + const char* GGML_VK_DISABLE_F16 = getenv("GGML_VK_DISABLE_F16"); + bool force_disable_f16 = GGML_VK_DISABLE_F16 != nullptr; bool fp16 = !force_disable_f16 && fp16_storage && fp16_compute; @@ -1188,140 +1356,152 @@ static void ggml_vk_init(ggml_backend_vk_context * ctx, size_t idx) { throw std::runtime_error("Device not found"); } - vk_instance.devices[idx] = std::make_shared(); - ctx->device = vk_instance.devices[idx]; - ctx->device.lock()->physical_device = devices[dev_num]; - const std::vector ext_props = ctx->device.lock()->physical_device.enumerateDeviceExtensionProperties(); + ctx->device = ggml_vk_get_device(idx); + if (!ctx->device->initialized) { + ctx->device->physical_device = devices[dev_num]; + const std::vector ext_props = ctx->device->physical_device.enumerateDeviceExtensionProperties(); - bool maintenance4_support = false; + bool maintenance4_support = false; - // Check if maintenance4 is supported - for (const auto& properties : ext_props) { - if (strcmp("VK_KHR_maintenance4", properties.extensionName) == 0) { - maintenance4_support = true; + // Check if maintenance4 is supported + for (const auto& properties : ext_props) { + if (strcmp("VK_KHR_maintenance4", properties.extensionName) == 0) { + maintenance4_support = true; + } } - } - vk::PhysicalDeviceProperties2 props2; - vk::PhysicalDeviceMaintenance3Properties props3; - vk::PhysicalDeviceMaintenance4Properties props4; - vk::PhysicalDeviceSubgroupProperties subgroup_props; - props2.pNext = &props3; - props3.pNext = &subgroup_props; - if (maintenance4_support) { - subgroup_props.pNext = &props4; - } - ctx->device.lock()->physical_device.getProperties2(&props2); - ctx->device.lock()->properties = props2.properties; - - if (maintenance4_support) { - ctx->device.lock()->max_memory_allocation_size = std::min(props3.maxMemoryAllocationSize, props4.maxBufferSize); - } else { - ctx->device.lock()->max_memory_allocation_size = props3.maxMemoryAllocationSize; - } - - ctx->device.lock()->vendor_id = ctx->device.lock()->properties.vendorID; - ctx->device.lock()->subgroup_size = subgroup_props.subgroupSize; - ctx->device.lock()->uma = ctx->device.lock()->properties.deviceType == vk::PhysicalDeviceType::eIntegratedGpu; - - bool fp16_storage = false; - bool fp16_compute = false; - - for (const auto& properties : ext_props) { - if (strcmp("VK_KHR_16bit_storage", properties.extensionName) == 0) { - fp16_storage = true; - } else if (strcmp("VK_KHR_shader_float16_int8", properties.extensionName) == 0) { - fp16_compute = true; + vk::PhysicalDeviceProperties2 props2; + vk::PhysicalDeviceMaintenance3Properties props3; + vk::PhysicalDeviceMaintenance4Properties props4; + vk::PhysicalDeviceSubgroupProperties subgroup_props; + props2.pNext = &props3; + props3.pNext = &subgroup_props; + if (maintenance4_support) { + subgroup_props.pNext = &props4; } - } + ctx->device->physical_device.getProperties2(&props2); + ctx->device->properties = props2.properties; - const char* GGML_VULKAN_DISABLE_F16 = getenv("GGML_VULKAN_DISABLE_F16"); - bool force_disable_f16 = GGML_VULKAN_DISABLE_F16 != nullptr; + const char* GGML_VK_FORCE_MAX_ALLOCATION_SIZE = getenv("GGML_VK_FORCE_MAX_ALLOCATION_SIZE"); - ctx->device.lock()->fp16 = !force_disable_f16 && fp16_storage && fp16_compute; + if (GGML_VK_FORCE_MAX_ALLOCATION_SIZE != nullptr) { + ctx->device->max_memory_allocation_size = std::stoi(GGML_VK_FORCE_MAX_ALLOCATION_SIZE); + } else if (maintenance4_support) { + ctx->device->max_memory_allocation_size = std::min(props3.maxMemoryAllocationSize, props4.maxBufferSize); + } else { + ctx->device->max_memory_allocation_size = props3.maxMemoryAllocationSize; + } - std::vector queue_family_props = ctx->device.lock()->physical_device.getQueueFamilyProperties(); + ctx->device->vendor_id = ctx->device->properties.vendorID; + ctx->device->subgroup_size = subgroup_props.subgroupSize; + ctx->device->uma = ctx->device->properties.deviceType == vk::PhysicalDeviceType::eIntegratedGpu; - // Try to find a non-graphics compute queue and transfer-focused queues - const uint32_t compute_queue_family_index = ggml_vk_find_queue_family_index(queue_family_props, vk::QueueFlagBits::eCompute, vk::QueueFlagBits::eGraphics, -1, 1); - const uint32_t transfer_queue_family_index = ggml_vk_find_queue_family_index(queue_family_props, vk::QueueFlagBits::eTransfer, vk::QueueFlagBits::eCompute | vk::QueueFlagBits::eGraphics, compute_queue_family_index, 1); + bool fp16_storage = false; + bool fp16_compute = false; - const float priorities[] = { 1.0f, 1.0f }; - ctx->device.lock()->single_queue = compute_queue_family_index == transfer_queue_family_index && queue_family_props[compute_queue_family_index].queueCount == 1; + for (const auto& properties : ext_props) { + if (strcmp("VK_KHR_16bit_storage", properties.extensionName) == 0) { + fp16_storage = true; + } else if (strcmp("VK_KHR_shader_float16_int8", properties.extensionName) == 0) { + fp16_compute = true; + } + } - std::vector device_queue_create_infos; - if (compute_queue_family_index != transfer_queue_family_index) { - device_queue_create_infos.push_back({vk::DeviceQueueCreateFlags(), compute_queue_family_index, 1, priorities}); - device_queue_create_infos.push_back({vk::DeviceQueueCreateFlags(), transfer_queue_family_index, 1, priorities + 1}); - } else if(!ctx->device.lock()->single_queue) { - device_queue_create_infos.push_back({vk::DeviceQueueCreateFlags(), compute_queue_family_index, 2, priorities}); - } else { - device_queue_create_infos.push_back({vk::DeviceQueueCreateFlags(), compute_queue_family_index, 1, priorities}); - } - vk::DeviceCreateInfo device_create_info; - std::vector device_extensions; - vk::PhysicalDeviceFeatures device_features = ctx->device.lock()->physical_device.getFeatures(); + const char* GGML_VK_DISABLE_F16 = getenv("GGML_VK_DISABLE_F16"); + const bool force_disable_f16 = GGML_VK_DISABLE_F16 != nullptr; - VkPhysicalDeviceFeatures2 device_features2; - device_features2.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FEATURES_2; - device_features2.pNext = nullptr; - device_features2.features = (VkPhysicalDeviceFeatures)device_features; + ctx->device->fp16 = !force_disable_f16 && fp16_storage && fp16_compute; - VkPhysicalDeviceVulkan11Features vk11_features; - vk11_features.pNext = nullptr; - vk11_features.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_1_1_FEATURES; - device_features2.pNext = &vk11_features; + std::vector queue_family_props = ctx->device->physical_device.getQueueFamilyProperties(); - VkPhysicalDeviceVulkan12Features vk12_features; - vk12_features.pNext = nullptr; - vk12_features.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_1_2_FEATURES; - vk11_features.pNext = &vk12_features; + // Try to find a non-graphics compute queue and transfer-focused queues + const uint32_t compute_queue_family_index = ggml_vk_find_queue_family_index(queue_family_props, vk::QueueFlagBits::eCompute, vk::QueueFlagBits::eGraphics, -1, 1); + const uint32_t transfer_queue_family_index = ggml_vk_find_queue_family_index(queue_family_props, vk::QueueFlagBits::eTransfer, vk::QueueFlagBits::eCompute | vk::QueueFlagBits::eGraphics, compute_queue_family_index, 1); - vkGetPhysicalDeviceFeatures2(ctx->device.lock()->physical_device, &device_features2); + const float priorities[] = { 1.0f, 1.0f }; + ctx->device->single_queue = compute_queue_family_index == transfer_queue_family_index && queue_family_props[compute_queue_family_index].queueCount == 1; - ctx->device.lock()->fp16 = ctx->device.lock()->fp16 && vk12_features.shaderFloat16; + std::vector device_queue_create_infos; + if (compute_queue_family_index != transfer_queue_family_index) { + device_queue_create_infos.push_back({vk::DeviceQueueCreateFlags(), compute_queue_family_index, 1, priorities}); + device_queue_create_infos.push_back({vk::DeviceQueueCreateFlags(), transfer_queue_family_index, 1, priorities + 1}); + } else if(!ctx->device->single_queue) { + device_queue_create_infos.push_back({vk::DeviceQueueCreateFlags(), compute_queue_family_index, 2, priorities}); + } else { + device_queue_create_infos.push_back({vk::DeviceQueueCreateFlags(), compute_queue_family_index, 1, priorities}); + } + vk::DeviceCreateInfo device_create_info; + std::vector device_extensions; + vk::PhysicalDeviceFeatures device_features = ctx->device->physical_device.getFeatures(); - if (!vk11_features.storageBuffer16BitAccess) { - std::cerr << "ggml_vulkan: device " << GGML_VK_NAME << idx << " does not support 16-bit storage." << std::endl; - throw std::runtime_error("Unsupported device"); - } + VkPhysicalDeviceFeatures2 device_features2; + device_features2.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FEATURES_2; + device_features2.pNext = nullptr; + device_features2.features = (VkPhysicalDeviceFeatures)device_features; - device_extensions.push_back("VK_KHR_16bit_storage"); + VkPhysicalDeviceVulkan11Features vk11_features; + vk11_features.pNext = nullptr; + vk11_features.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_1_1_FEATURES; + device_features2.pNext = &vk11_features; + + VkPhysicalDeviceVulkan12Features vk12_features; + vk12_features.pNext = nullptr; + vk12_features.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_1_2_FEATURES; + vk11_features.pNext = &vk12_features; + + vkGetPhysicalDeviceFeatures2(ctx->device->physical_device, &device_features2); + + ctx->device->fp16 = ctx->device->fp16 && vk12_features.shaderFloat16; + + if (!vk11_features.storageBuffer16BitAccess) { + std::cerr << "ggml_vulkan: device " << GGML_VK_NAME << idx << " does not support 16-bit storage." << std::endl; + throw std::runtime_error("Unsupported device"); + } + + device_extensions.push_back("VK_KHR_16bit_storage"); #ifdef GGML_VULKAN_VALIDATE - device_extensions.push_back("VK_KHR_shader_non_semantic_info"); + device_extensions.push_back("VK_KHR_shader_non_semantic_info"); #endif - if (ctx->device.lock()->fp16) { - device_extensions.push_back("VK_KHR_shader_float16_int8"); - } - ctx->device.lock()->name = ctx->device.lock()->properties.deviceName.data(); + if (ctx->device->fp16) { + device_extensions.push_back("VK_KHR_shader_float16_int8"); + } + ctx->device->name = ctx->device->properties.deviceName.data(); - device_create_info = { - vk::DeviceCreateFlags(), - device_queue_create_infos, - {}, - device_extensions - }; - device_create_info.setPNext(&device_features2); - ctx->device.lock()->device = ctx->device.lock()->physical_device.createDevice(device_create_info); + device_create_info = { + vk::DeviceCreateFlags(), + device_queue_create_infos, + {}, + device_extensions + }; + device_create_info.setPNext(&device_features2); + ctx->device->device = ctx->device->physical_device.createDevice(device_create_info); - ctx->device.lock()->descriptor_set_mode = VK_DEVICE_DESCRIPTOR_POOL_MODE_UNKNOWN; + ctx->device->descriptor_set_mode = VK_DEVICE_DESCRIPTOR_POOL_MODE_UNKNOWN; - // Shaders - ggml_vk_load_shaders(ctx); + // Queues + ggml_vk_create_queue(ctx, ctx->device->compute_queue, compute_queue_family_index, 0, { vk::PipelineStageFlagBits::eComputeShader | vk::PipelineStageFlagBits::eTransfer }); - // Queues - ggml_vk_create_queue(ctx, ctx->device.lock()->compute_queue, compute_queue_family_index, 0, { vk::PipelineStageFlagBits::eComputeShader | vk::PipelineStageFlagBits::eTransfer }); - if (!ctx->device.lock()->single_queue) { - const uint32_t transfer_queue_index = compute_queue_family_index == transfer_queue_family_index ? 1 : 0; - ggml_vk_create_queue(ctx, ctx->device.lock()->transfer_queue, transfer_queue_family_index, transfer_queue_index, { vk::PipelineStageFlagBits::eTransfer }); - } else { - // TODO: Use pointer or reference to avoid copy - ctx->device.lock()->transfer_queue = ctx->device.lock()->compute_queue; + // Shaders + ggml_vk_load_shaders(ctx); + + if (!ctx->device->single_queue) { + const uint32_t transfer_queue_index = compute_queue_family_index == transfer_queue_family_index ? 1 : 0; + ggml_vk_create_queue(ctx, ctx->device->transfer_queue, transfer_queue_family_index, transfer_queue_index, { vk::PipelineStageFlagBits::eTransfer }); + } else { + // TODO: Use pointer or reference to avoid copy + ctx->device->transfer_queue = ctx->device->compute_queue; + } + + ctx->device->idx = dev_num; + ctx->device->initialized = true; + } else if (ctx->device->idx != dev_num) { + std::cerr << "ggml_vulkan: Device " << ctx->device->name << " already initialized with index " << ctx->device->idx << ", but trying to reinitialize with index " << dev_num << std::endl; + throw std::runtime_error("Device already initialized"); } - ctx->fence = ctx->device.lock()->device.createFence({}); + ctx->fence = ctx->device->device.createFence({}); ctx->compute_ctx = nullptr; ctx->transfer_ctx = nullptr; @@ -1339,7 +1519,7 @@ static void ggml_vk_init(ggml_backend_vk_context * ctx, size_t idx) { #endif } -static vk_pipeline* ggml_vk_get_to_fp16(ggml_backend_vk_context * ctx, ggml_type type) { +static vk_pipeline ggml_vk_get_to_fp16(ggml_backend_vk_context * ctx, ggml_type type) { #ifdef GGML_VULKAN_DEBUG std::cerr << "ggml_vk_get_to_fp16()" << std::endl; #endif @@ -1360,10 +1540,36 @@ static vk_pipeline* ggml_vk_get_to_fp16(ggml_backend_vk_context * ctx, ggml_type return nullptr; } - return &ctx->pipeline_dequant[type]; + return ctx->device->pipeline_dequant[type]; } -static vk_pipeline* ggml_vk_get_dequantize_mul_mat_vec(ggml_backend_vk_context * ctx, ggml_type type) { +static vk_matmul_pipeline ggml_vk_get_mul_mat_mat_pipeline(ggml_backend_vk_context * ctx, ggml_type src0_type, ggml_type src1_type) { +#ifdef GGML_VULKAN_DEBUG + std::cerr << "ggml_vk_get_mul_mat_mat_pipeline()" << std::endl; +#endif + if (src0_type == GGML_TYPE_F32 && src1_type == GGML_TYPE_F32) { + return ctx->device->pipeline_matmul_f32; + } + if (src0_type == GGML_TYPE_F16 && src1_type == GGML_TYPE_F32) { + return ctx->device->pipeline_matmul_f16_f32; + } + if (src0_type == GGML_TYPE_F16 && src1_type == GGML_TYPE_F16) { + return ctx->device->pipeline_matmul_f16; + } + + GGML_ASSERT(src1_type == GGML_TYPE_F32); + + switch (src0_type) { + case GGML_TYPE_Q4_0: + break; + default: + return nullptr; + } + + return ctx->device->pipeline_dequant_mul_mat_mat[src0_type]; +} + +static vk_pipeline ggml_vk_get_dequantize_mul_mat_vec(ggml_backend_vk_context * ctx, ggml_type type) { #ifdef GGML_VULKAN_DEBUG std::cerr << "ggml_vk_get_dequantize_mul_mat_vec()" << std::endl; #endif @@ -1384,7 +1590,7 @@ static vk_pipeline* ggml_vk_get_dequantize_mul_mat_vec(ggml_backend_vk_context * return nullptr; } - return &ctx->pipeline_dequant_mul_mat_vec_f32[type]; + return ctx->device->pipeline_dequant_mul_mat_vec_f32[type]; } static vk_buffer ggml_vk_pool_malloc(ggml_backend_vk_context * ctx, size_t size) { @@ -1463,8 +1669,8 @@ static void * ggml_vk_host_malloc(ggml_backend_vk_context * ctx, size_t size) { if(!(buf->memory_property_flags & vk::MemoryPropertyFlagBits::eHostVisible)) { fprintf(stderr, "WARNING: failed to allocate %.2f MB of pinned memory\n", size/1024.0/1024.0); - ctx->device.lock()->device.freeMemory(buf->device_memory); - ctx->device.lock()->device.destroyBuffer(buf->buffer); + ctx->device->device.freeMemory(buf->device_memory); + ctx->device->device.destroyBuffer(buf->buffer); return nullptr; } @@ -1528,30 +1734,30 @@ static vk_submission ggml_vk_begin_submission(ggml_backend_vk_context * ctx, vk_ } static void ggml_vk_dispatch_pipeline(ggml_backend_vk_context * ctx, vk_context * subctx, vk_pipeline& pipeline, std::vector&& buffers, size_t push_constant_size, const void* push_constants, std::array elements) { - const uint32_t wg0 = CEIL_DIV(elements[0], pipeline.wg_denoms[0]); - const uint32_t wg1 = CEIL_DIV(elements[1], pipeline.wg_denoms[1]); - const uint32_t wg2 = CEIL_DIV(elements[2], pipeline.wg_denoms[2]); + const uint32_t wg0 = CEIL_DIV(elements[0], pipeline->wg_denoms[0]); + const uint32_t wg1 = CEIL_DIV(elements[1], pipeline->wg_denoms[1]); + const uint32_t wg2 = CEIL_DIV(elements[2], pipeline->wg_denoms[2]); #ifdef GGML_VULKAN_DEBUG - std::cerr << "ggml_vk_dispatch_pipeline(" << pipeline.name << ", (" << wg0 << "," << wg1 << "," << wg2 << "))" << std::endl; + std::cerr << "ggml_vk_dispatch_pipeline(" << pipeline->name << ", (" << wg0 << "," << wg1 << "," << wg2 << "))" << std::endl; #endif std::vector descriptor_buffer_infos; std::vector write_descriptor_sets; - GGML_ASSERT(pipeline.descriptor_set_idx < pipeline.descriptor_sets.size()); - GGML_ASSERT(buffers.size() == pipeline.parameter_count); - vk::DescriptorSet& descriptor_set = pipeline.descriptor_sets[pipeline.descriptor_set_idx++]; - for (uint32_t i = 0; i < pipeline.parameter_count; i++) { + GGML_ASSERT(pipeline->descriptor_set_idx < pipeline->descriptor_sets.size()); + GGML_ASSERT(buffers.size() == pipeline->parameter_count); + vk::DescriptorSet& descriptor_set = pipeline->descriptor_sets[pipeline->descriptor_set_idx++]; + for (uint32_t i = 0; i < pipeline->parameter_count; i++) { descriptor_buffer_infos.push_back({buffers[i].buffer->buffer, buffers[i].offset, buffers[i].size}); } - for (uint32_t i = 0; i < pipeline.parameter_count; i++) { + for (uint32_t i = 0; i < pipeline->parameter_count; i++) { write_descriptor_sets.push_back({descriptor_set, i, 0, 1, vk::DescriptorType::eStorageBuffer, nullptr, &descriptor_buffer_infos[i]}); } - ctx->device.lock()->device.updateDescriptorSets(write_descriptor_sets, {}); + ctx->device->device.updateDescriptorSets(write_descriptor_sets, {}); - subctx->s->buffer.pushConstants(pipeline.layout, vk::ShaderStageFlagBits::eCompute, 0, push_constant_size, push_constants); - subctx->s->buffer.bindPipeline(vk::PipelineBindPoint::eCompute, pipeline.pipeline); + subctx->s->buffer.pushConstants(pipeline->layout, vk::ShaderStageFlagBits::eCompute, 0, push_constant_size, push_constants); + subctx->s->buffer.bindPipeline(vk::PipelineBindPoint::eCompute, pipeline->pipeline); subctx->s->buffer.bindDescriptorSets(vk::PipelineBindPoint::eCompute, - pipeline.layout, + pipeline->layout, 0, { descriptor_set }, {}); @@ -1810,7 +2016,7 @@ static void ggml_vk_buffer_write_2d(ggml_backend_vk_context * ctx, vk_buffer& ds memcpy((uint8_t *)dst->ptr + offset + i * width, (const uint8_t *) src + i * spitch, width); } } else { - vk_context * subctx = ggml_vk_create_context(ctx, ctx->device.lock()->transfer_queue); + vk_context * subctx = ggml_vk_create_context(ctx, ctx->device->transfer_queue); ggml_vk_ctx_begin(ctx, subctx); ggml_vk_buffer_write_2d_async(ctx, subctx, dst, offset, src, spitch, width, height, true); ggml_vk_ctx_end(subctx); @@ -1820,8 +2026,9 @@ static void ggml_vk_buffer_write_2d(ggml_backend_vk_context * ctx, vk_buffer& ds } ggml_vk_submit(subctx, ctx->fence); - VK_CHECK(ctx->device.lock()->device.waitForFences({ ctx->fence }, true, UINT64_MAX), "vk_buffer_write_2d waitForFences"); - ctx->device.lock()->device.resetFences({ ctx->fence }); + VK_CHECK(ctx->device->device.waitForFences({ ctx->fence }, true, UINT64_MAX), "vk_buffer_write_2d waitForFences"); + ctx->device->device.resetFences({ ctx->fence }); + ggml_vk_queue_cleanup(ctx, ctx->device->transfer_queue); } } @@ -1906,18 +2113,19 @@ static void ggml_vk_buffer_read(ggml_backend_vk_context * ctx, vk_buffer& src, s memcpy(dst, (uint8_t *) src->ptr + offset, size); } else { - vk_context * subctx = ggml_vk_create_context(ctx, ctx->device.lock()->transfer_queue); + vk_context * subctx = ggml_vk_create_context(ctx, ctx->device->transfer_queue); ggml_vk_ctx_begin(ctx, subctx); ggml_vk_buffer_read_async(ctx, subctx, src, offset, dst, size, true); ggml_vk_ctx_end(subctx); ggml_vk_submit(subctx, ctx->fence); - VK_CHECK(ctx->device.lock()->device.waitForFences({ ctx->fence }, true, UINT64_MAX), "vk_buffer_read waitForFences"); - ctx->device.lock()->device.resetFences({ ctx->fence }); + VK_CHECK(ctx->device->device.waitForFences({ ctx->fence }, true, UINT64_MAX), "vk_buffer_read waitForFences"); + ctx->device->device.resetFences({ ctx->fence }); for (auto& cpy : subctx->out_memcpys) { memcpy(cpy.dst, cpy.src, cpy.n); } + ggml_vk_queue_cleanup(ctx, ctx->device->transfer_queue); } } @@ -1941,15 +2149,13 @@ static void ggml_vk_buffer_copy(vk_buffer& dst, size_t dst_offset, vk_buffer& sr // Copy within the device ggml_backend_vk_context * ctx = src->ctx; - VkBufferCopy bc{ src_offset, dst_offset, size }; - - vk_context * subctx = ggml_vk_create_context(ctx, ctx->device.lock()->transfer_queue); + vk_context * subctx = ggml_vk_create_context(ctx, ctx->device->transfer_queue); ggml_vk_ctx_begin(ctx, subctx); ggml_vk_buffer_copy_async(subctx, dst, dst_offset, src, src_offset, size); ggml_vk_ctx_end(subctx); ggml_vk_submit(subctx, ctx->fence); - VK_CHECK(ctx->device.lock()->device.waitForFences({ ctx->fence }, true, UINT64_MAX), "vk_buffer_copy waitForFences"); - ctx->device.lock()->device.resetFences({ ctx->fence }); + VK_CHECK(ctx->device->device.waitForFences({ ctx->fence }, true, UINT64_MAX), "vk_buffer_copy waitForFences"); + ctx->device->device.resetFences({ ctx->fence }); } else { #ifdef GGML_VULKAN_DEBUG std::cerr << "ggml_vk_buffer_copy(MULTI_DEVICE, " << size << ")" << std::endl; @@ -1977,14 +2183,14 @@ static void ggml_vk_buffer_memset(ggml_backend_vk_context * ctx, vk_buffer& dst, // Make sure ctx owns the buffer GGML_ASSERT(dst->ctx == ctx); - vk_context * subctx = ggml_vk_create_context(ctx, ctx->device.lock()->transfer_queue); + vk_context * subctx = ggml_vk_create_context(ctx, ctx->device->transfer_queue); ggml_vk_ctx_begin(ctx, subctx); subctx->s->buffer.fillBuffer(dst->buffer, offset, size, c); ggml_vk_ctx_end(subctx); ggml_vk_submit(subctx, ctx->fence); - VK_CHECK(ctx->device.lock()->device.waitForFences({ ctx->fence }, true, UINT64_MAX), "vk_memset waitForFences"); - ctx->device.lock()->device.resetFences({ ctx->fence }); + VK_CHECK(ctx->device->device.waitForFences({ ctx->fence }, true, UINT64_MAX), "vk_memset waitForFences"); + ctx->device->device.resetFences({ ctx->fence }); } static void ggml_vk_h2d_tensor_2d(ggml_backend_vk_context * ctx, vk_context * subctx, vk_buffer& dst, size_t offset, const ggml_tensor * src, uint64_t i3, uint64_t i2, uint64_t i1) { @@ -2045,176 +2251,63 @@ static void ggml_vk_d2h_tensor_2d(ggml_backend_vk_context * ctx, vk_context * su static uint32_t ggml_vk_guess_split_k(int m, int n, int k) { #ifdef GGML_VULKAN_DEBUG - std::cerr << "ggml_vk_guess_split_k(" << m << ", " << n << ", " << k << ")"; + std::cerr << "ggml_vk_guess_split_k(" << m << ", " << n << ", " << k << ")" << std::endl; #endif if (k > 128 && (m < 128 || n < 128) && m > 2 && n > 2) { -#ifdef GGML_VULKAN_DEBUG - std::cerr << " = 4" << std::endl; -#endif return 4; } -#ifdef GGML_VULKAN_DEBUG - std::cerr << " = 1" << std::endl; -#endif return 1; } -static uint32_t ggml_vk_guess_matmul_pipeline_align(ggml_backend_vk_context * ctx, int m, int n) { +static vk_pipeline ggml_vk_guess_matmul_pipeline_amd(ggml_backend_vk_context * ctx, vk_matmul_pipeline& mmp, int m, int n, bool aligned) { + if (m <= 32 || n <= 32) { + return aligned ? mmp->a_s : mmp->s; + } + return aligned ? mmp->a_m : mmp->m; + + GGML_UNUSED(ctx); +} + +static vk_pipeline ggml_vk_guess_matmul_pipeline_apple(ggml_backend_vk_context * ctx, vk_matmul_pipeline& mmp, bool aligned) { + return aligned ? mmp->a_m : mmp->m; + + GGML_UNUSED(ctx); +} + +static vk_pipeline ggml_vk_guess_matmul_pipeline_intel(ggml_backend_vk_context * ctx, vk_matmul_pipeline& mmp, bool aligned) { + return aligned ? mmp->a_s : mmp->s; + + GGML_UNUSED(ctx); +} + +static vk_pipeline ggml_vk_guess_matmul_pipeline(ggml_backend_vk_context * ctx, vk_matmul_pipeline& mmp, int m, int n, bool aligned) { +#ifdef GGML_VULKAN_DEBUG + std::cerr << "ggml_vk_guess_matmul_pipeline(" << m << ", " << n << ", " << aligned << ")" << std::endl; +#endif + switch (ctx->device->vendor_id) { + case VK_VENDOR_ID_AMD: + return ggml_vk_guess_matmul_pipeline_amd(ctx, mmp, m, n, aligned); + case VK_VENDOR_ID_APPLE: + return ggml_vk_guess_matmul_pipeline_apple(ctx, mmp, aligned); + case VK_VENDOR_ID_INTEL: + return ggml_vk_guess_matmul_pipeline_intel(ctx, mmp, aligned); + } + + if (m <= 32 || n <= 32) { + return aligned ? mmp->a_s : mmp->s; + } + if (m <= 64 || n <= 64) { + return aligned ? mmp->a_m : mmp->m; + } + return aligned ? mmp->a_l : mmp->l; +} + +static uint32_t ggml_vk_guess_matmul_pipeline_align(ggml_backend_vk_context * ctx, vk_matmul_pipeline& mmp, int m, int n) { #ifdef GGML_VULKAN_DEBUG std::cerr << "ggml_vk_guess_matmul_pipeline_align(" << m << ", " << n << ")" << std::endl; #endif - if (m <= 32 || n <= 32) { - return ctx->pipeline_matmul_f32_aligned_s.align; - } - if (ctx->device.lock()->subgroup_size == 64 || m <= 64 || n <= 64) { - return ctx->pipeline_matmul_f32_aligned_m.align; - } - return ctx->pipeline_matmul_f32_aligned_l.align; -} - -static vk_pipeline* ggml_vk_guess_matmul_pipeline_amd(ggml_backend_vk_context * ctx, bool bit16_x, bool bit16_y, int m, int n, bool aligned) { - if (bit16_x && bit16_y) { - if (m <= 32 || n <= 32) { -#ifdef GGML_VULKAN_DEBUG - std::cerr << " S" << std::endl; -#endif - return aligned ? &ctx->pipeline_matmul_f16_aligned_s : &ctx->pipeline_matmul_f16_s; - } -#ifdef GGML_VULKAN_DEBUG - std::cerr << " M" << std::endl; -#endif - return aligned ? &ctx->pipeline_matmul_f16_aligned_m : &ctx->pipeline_matmul_f16_m; - } - if (bit16_x && !bit16_y) { - if (m <= 32 || n <= 32) { -#ifdef GGML_VULKAN_DEBUG - std::cerr << " S" << std::endl; -#endif - return aligned ? &ctx->pipeline_matmul_f16_f32_aligned_s : &ctx->pipeline_matmul_f16_f32_s; - } -#ifdef GGML_VULKAN_DEBUG - std::cerr << " M" << std::endl; -#endif - return aligned ? &ctx->pipeline_matmul_f16_f32_aligned_m : &ctx->pipeline_matmul_f16_f32_m; - } - if (!bit16_x && bit16_y) { - GGML_ASSERT(false); - } - - if (m <= 32 || n <= 32) { -#ifdef GGML_VULKAN_DEBUG - std::cerr << " S" << std::endl; -#endif - return aligned ? &ctx->pipeline_matmul_f32_aligned_s : &ctx->pipeline_matmul_f32_s; - } -#ifdef GGML_VULKAN_DEBUG - std::cerr << " M" << std::endl; -#endif - return aligned ? &ctx->pipeline_matmul_f32_aligned_m : &ctx->pipeline_matmul_f32_m; -} - -static vk_pipeline* ggml_vk_guess_matmul_pipeline_apple(ggml_backend_vk_context * ctx, bool bit16_x, bool bit16_y, bool aligned) { -#ifdef GGML_VULKAN_DEBUG - std::cerr << " M" << std::endl; -#endif - if (bit16_x && bit16_y) { - return aligned ? &ctx->pipeline_matmul_f16_aligned_m : &ctx->pipeline_matmul_f16_m; - } - if (bit16_x && !bit16_y) { - return aligned ? &ctx->pipeline_matmul_f16_f32_aligned_m : &ctx->pipeline_matmul_f16_f32_m; - } - if (!bit16_x && bit16_y) { - GGML_ASSERT(false); - } - return aligned ? &ctx->pipeline_matmul_f32_aligned_m : &ctx->pipeline_matmul_f32_m; -} - -static vk_pipeline* ggml_vk_guess_matmul_pipeline_intel(ggml_backend_vk_context * ctx, bool bit16_x, bool bit16_y, bool aligned) { -#ifdef GGML_VULKAN_DEBUG - std::cerr << " S" << std::endl; -#endif - if (bit16_x && bit16_y) { - return aligned ? &ctx->pipeline_matmul_f16_aligned_s : &ctx->pipeline_matmul_f16_s; - } - if (bit16_x && !bit16_y) { - return aligned ? &ctx->pipeline_matmul_f16_f32_aligned_s : &ctx->pipeline_matmul_f16_f32_s; - } - if (!bit16_x && bit16_y) { - GGML_ASSERT(false); - } - return aligned ? &ctx->pipeline_matmul_f32_aligned_s : &ctx->pipeline_matmul_f32_s; -} - -static vk_pipeline* ggml_vk_guess_matmul_pipeline(ggml_backend_vk_context * ctx, bool bit16_x, bool bit16_y, int m, int n, bool aligned) { -#ifdef GGML_VULKAN_DEBUG - std::cerr << "ggml_vk_guess_matmul_pipeline(" << bit16_x << ", " << bit16_y << ", " << m << ", " << n << ", " << aligned << ")"; -#endif - switch (ctx->device.lock()->vendor_id) { - case VK_VENDOR_ID_AMD: - return ggml_vk_guess_matmul_pipeline_amd(ctx, bit16_x, bit16_y, m, n, aligned); - case VK_VENDOR_ID_APPLE: - return ggml_vk_guess_matmul_pipeline_apple(ctx, bit16_x, bit16_y, aligned); - case VK_VENDOR_ID_INTEL: - return ggml_vk_guess_matmul_pipeline_intel(ctx, bit16_x, bit16_y, aligned); - } - - if (bit16_x && bit16_y) { - if (m <= 32 || n <= 32) { -#ifdef GGML_VULKAN_DEBUG - std::cerr << " S" << std::endl; -#endif - return aligned ? &ctx->pipeline_matmul_f16_aligned_s : &ctx->pipeline_matmul_f16_s; - } - if (m <= 64 || n <= 64) { -#ifdef GGML_VULKAN_DEBUG - std::cerr << " M" << std::endl; -#endif - return aligned ? &ctx->pipeline_matmul_f16_aligned_m : &ctx->pipeline_matmul_f16_m; - } -#ifdef GGML_VULKAN_DEBUG - std::cerr << " L" << std::endl; -#endif - return aligned ? &ctx->pipeline_matmul_f16_aligned_l : &ctx->pipeline_matmul_f16_l; - } - if (bit16_x && !bit16_y) { - if (m <= 32 || n <= 32) { -#ifdef GGML_VULKAN_DEBUG - std::cerr << " S" << std::endl; -#endif - return aligned ? &ctx->pipeline_matmul_f16_f32_aligned_s : &ctx->pipeline_matmul_f16_f32_s; - } - if (m <= 64 || n <= 64) { -#ifdef GGML_VULKAN_DEBUG - std::cerr << " M" << std::endl; -#endif - return aligned ? &ctx->pipeline_matmul_f16_f32_aligned_m : &ctx->pipeline_matmul_f16_f32_m; - } -#ifdef GGML_VULKAN_DEBUG - std::cerr << " L" << std::endl; -#endif - return aligned ? &ctx->pipeline_matmul_f16_f32_aligned_l : &ctx->pipeline_matmul_f16_f32_l; - } - if (!bit16_x && bit16_y) { - GGML_ASSERT(false); - } - - if (m <= 32 || n <= 32) { -#ifdef GGML_VULKAN_DEBUG - std::cerr << " S" << std::endl; -#endif - return aligned ? &ctx->pipeline_matmul_f32_aligned_s : &ctx->pipeline_matmul_f32_s; - } - if (m <= 64 || n <= 64) { -#ifdef GGML_VULKAN_DEBUG - std::cerr << " M" << std::endl; -#endif - return aligned ? &ctx->pipeline_matmul_f32_aligned_m : &ctx->pipeline_matmul_f32_m; - } -#ifdef GGML_VULKAN_DEBUG - std::cerr << " L" << std::endl; -#endif - return aligned ? &ctx->pipeline_matmul_f32_aligned_l : &ctx->pipeline_matmul_f32_l; + return ggml_vk_guess_matmul_pipeline(ctx, mmp, m, n, false)->align; } static void ggml_vk_matmul(ggml_backend_vk_context * ctx, vk_context * subctx, vk_pipeline& pipeline, vk_subbuffer&& a, vk_subbuffer&& b, vk_subbuffer&& d, vk_subbuffer&& split_k_buffer, uint32_t m, uint32_t n, uint32_t k, uint32_t stride_a, uint32_t stride_b, uint32_t stride_d, uint32_t split_k, uint32_t batch, uint32_t ne02, uint32_t ne12, uint32_t broadcast2, uint32_t broadcast3, uint32_t batch_stride_a, uint32_t batch_stride_b, uint32_t batch_stride_d) { @@ -2232,10 +2325,10 @@ static void ggml_vk_matmul(ggml_backend_vk_context * ctx, vk_context * subctx, v const std::array pc1 = { m, n, k, stride_a, stride_b, stride_d, CEIL_DIV(k, split_k), ne02, ne12, broadcast2, broadcast3, batch_stride_a, batch_stride_b, batch_stride_d }; // Make sure enough workgroups get assigned for split k to work - ggml_vk_dispatch_pipeline(ctx, subctx, pipeline, { a, b, split_k_buffer }, pc1.size() * sizeof(uint32_t), pc1.data(), { (CEIL_DIV(m, pipeline.wg_denoms[0]) * pipeline.wg_denoms[0]) * split_k, n, batch }); + ggml_vk_dispatch_pipeline(ctx, subctx, pipeline, { a, b, split_k_buffer }, pc1.size() * sizeof(uint32_t), pc1.data(), { (CEIL_DIV(m, pipeline->wg_denoms[0]) * pipeline->wg_denoms[0]) * split_k, n, batch }); ggml_vk_sync_buffers(subctx); const std::array pc2 = { (uint32_t)(m * n * batch), split_k }; - ggml_vk_dispatch_pipeline(ctx, subctx, ctx->pipeline_matmul_split_k_reduce, { split_k_buffer, d }, pc2.size() * sizeof(uint32_t), pc2.data(), { m * n * batch, 1, 1 }); + ggml_vk_dispatch_pipeline(ctx, subctx, ctx->device->pipeline_matmul_split_k_reduce, { split_k_buffer, d }, pc2.size() * sizeof(uint32_t), pc2.data(), { m * n * batch, 1, 1 }); } static bool ggml_vk_dim01_contiguous(const ggml_tensor * tensor) { @@ -2245,41 +2338,39 @@ static bool ggml_vk_dim01_contiguous(const ggml_tensor * tensor) { tensor->nb[3] == tensor->nb[2]*tensor->ne[2]; } -static vk_pipeline * ggml_vk_get_cpy_pipeline(ggml_backend_vk_context * ctx, ggml_type from, ggml_type to) { +static vk_pipeline ggml_vk_get_cpy_pipeline(ggml_backend_vk_context * ctx, ggml_type from, ggml_type to) { if (from == GGML_TYPE_F32 && to == GGML_TYPE_F32) { - return &ctx->pipeline_cpy_f32_f32; + return ctx->device->pipeline_cpy_f32_f32; } if (from == GGML_TYPE_F32 && to == GGML_TYPE_F16) { - return &ctx->pipeline_cpy_f32_f16; + return ctx->device->pipeline_cpy_f32_f16; } if (from == GGML_TYPE_F16 && to == GGML_TYPE_F16) { - return &ctx->pipeline_cpy_f16_f16; + return ctx->device->pipeline_cpy_f16_f16; } std::cerr << "Missing CPY op for types: " << ggml_type_name(from) << " " << ggml_type_name(to) << std::endl; GGML_ASSERT(false); } -static void ggml_vk_cpy_to_contiguous(ggml_backend_vk_context * ctx, vk_context * subctx, vk_pipeline * pipeline, const ggml_tensor * tensor, vk_subbuffer&& in, vk_subbuffer&& out, ggml_type buffer_type, bool aligned=true) { +static void ggml_vk_cpy_to_contiguous(ggml_backend_vk_context * ctx, vk_context * subctx, vk_pipeline pipeline, const ggml_tensor * tensor, vk_subbuffer&& in, vk_subbuffer&& out) { #ifdef GGML_VULKAN_DEBUG std::cerr << "ggml_vk_cpy_to_contiguous((" << tensor << ", type=" << tensor->type << ", backend=" << tensor->backend << ", ne0=" << tensor->ne[0] << ", ne1=" << tensor->ne[1] << ", ne2=" << tensor->ne[2] << ", ne3=" << tensor->ne[3] << ", nb0=" << tensor->nb[0] << ", nb1=" << tensor->nb[1] << ", nb2=" << tensor->nb[2] << ", nb3=" << tensor->nb[3] << "), "; std::cerr << "buffer in size=" << in.buffer->size << ", buffer out size=" << out.buffer->size << ")" << std::endl; #endif const int tensor_type_size = ggml_type_size(tensor->type); - const int dst_type_size = ggml_type_size(buffer_type); - const uint32_t ne = tensor->ne[0] * tensor->ne[1] * tensor->ne[2]; + const uint32_t ne = ggml_nelements(tensor); - const uint32_t nb2 = aligned ? ggml_vk_align_size(dst_type_size * tensor->ne[0] * tensor->ne[1], ctx->device.lock()->properties.limits.minStorageBufferOffsetAlignment) / dst_type_size : tensor->ne[0] * tensor->ne[1]; - - const vk_op_cpy_push_constants pc = { + const vk_op_unary_push_constants pc = { (uint32_t)ne, - (uint32_t)tensor->ne[0], (uint32_t)tensor->ne[1], (uint32_t)tensor->nb[0] / tensor_type_size, (uint32_t)tensor->nb[1] / tensor_type_size, (uint32_t)tensor->nb[2] / tensor_type_size, - (uint32_t)tensor->ne[0], (uint32_t)tensor->ne[1], 1 , (uint32_t)tensor->ne[0] , nb2, + (uint32_t)tensor->ne[0], (uint32_t)tensor->ne[1], (uint32_t)tensor->ne[2], (uint32_t)tensor->ne[3], (uint32_t)tensor->nb[0] / tensor_type_size, (uint32_t)tensor->nb[1] / tensor_type_size, (uint32_t)tensor->nb[2] / tensor_type_size, (uint32_t)tensor->nb[3] / tensor_type_size, + (uint32_t)tensor->ne[0], (uint32_t)tensor->ne[1], (uint32_t)tensor->ne[2], (uint32_t)tensor->ne[3], 1 , (uint32_t)tensor->ne[0] , (uint32_t)(tensor->ne[0] * tensor->ne[1]) , (uint32_t)(tensor->ne[0] * tensor->ne[1] * tensor->ne[2]), 0, + 0.0f, 0.0f, }; ggml_vk_sync_buffers(subctx); - ggml_vk_dispatch_pipeline(ctx, subctx, *pipeline, { in, out }, sizeof(vk_op_cpy_push_constants), &pc, { ne, 1, 1 }); + ggml_vk_dispatch_pipeline(ctx, subctx, pipeline, { in, out }, sizeof(vk_op_unary_push_constants), &pc, { ne, 1, 1 }); } static void ggml_vk_mul_mat_q_f16(ggml_backend_vk_context * ctx, vk_context * subctx, const ggml_tensor * src0, const ggml_tensor * src1, ggml_tensor * dst) { @@ -2319,7 +2410,7 @@ static void ggml_vk_mul_mat_q_f16(ggml_backend_vk_context * ctx, vk_context * su bool src0_uma = false; bool src1_uma = false; - if (ctx->device.lock()->uma) { + if (ctx->device->uma) { ggml_vk_host_get(ctx, src0->data, d_Qx, qx_buf_offset); ggml_vk_host_get(ctx, src1->data, d_Qy, qy_buf_offset); src0_uma = d_Qx != nullptr; @@ -2332,10 +2423,17 @@ static void ggml_vk_mul_mat_q_f16(ggml_backend_vk_context * ctx, vk_context * su const bool x_non_contig = !load_x && !ggml_vk_dim01_contiguous(src0); const bool y_non_contig = !load_y && !ggml_vk_dim01_contiguous(src1); - const bool f16_f32_kernel = src1->type == GGML_TYPE_F32 && !y_non_contig; + const bool y_f32_kernel = src1->type == GGML_TYPE_F32 && !y_non_contig; - const bool qx_needs_dequant = src0->type != GGML_TYPE_F16 || x_non_contig; - const bool qy_needs_dequant = (src1->type != GGML_TYPE_F16 && !f16_f32_kernel) || y_non_contig; + vk_matmul_pipeline mmp = ggml_vk_get_mul_mat_mat_pipeline(ctx, src0->type, y_non_contig ? GGML_TYPE_F16 : src1->type); + + const bool qx_needs_dequant = mmp == nullptr || x_non_contig; + const bool qy_needs_dequant = (src1->type != GGML_TYPE_F16 && !y_f32_kernel) || y_non_contig; + + if (mmp == nullptr) { + // Fall back to dequant + f16 mulmat + mmp = ggml_vk_get_mul_mat_mat_pipeline(ctx, GGML_TYPE_F16, y_f32_kernel ? GGML_TYPE_F32 : GGML_TYPE_F16); + } // Not implemented GGML_ASSERT(y_non_contig || !qy_needs_dequant); // NOLINT @@ -2344,17 +2442,17 @@ static void ggml_vk_mul_mat_q_f16(ggml_backend_vk_context * ctx, vk_context * su const int y_ne = ne11 * ne10; const int d_ne = ne11 * ne01; - const uint32_t kpad = ggml_vk_align_size(ne10, ggml_vk_guess_matmul_pipeline_align(ctx, ne01, ne11)); + const uint32_t kpad = ggml_vk_align_size(ne10, ggml_vk_guess_matmul_pipeline_align(ctx, mmp, ne01, ne11)); const bool aligned = ne10 == kpad; const uint32_t split_k = ggml_vk_guess_split_k(ne01, ne11, ne10); - vk_pipeline * pipeline = ggml_vk_guess_matmul_pipeline(ctx, true, !f16_f32_kernel, ne01, ne11, aligned); + vk_pipeline pipeline = ggml_vk_guess_matmul_pipeline(ctx, mmp, ne01, ne11, aligned); const uint64_t qx_sz = ggml_type_size(src0->type) * x_ne / ggml_blck_size(src0->type); const uint64_t qy_sz = ggml_type_size(src1->type) * y_ne / ggml_blck_size(src1->type); - const uint64_t x_sz = sizeof(ggml_fp16_t) * x_ne; - const uint64_t y_sz = f16_f32_kernel ? sizeof(float) * y_ne : sizeof(ggml_fp16_t) * y_ne; + const uint64_t x_sz = !qx_needs_dequant ? qx_sz : sizeof(ggml_fp16_t) * x_ne; + const uint64_t y_sz = y_f32_kernel ? sizeof(float) * y_ne : sizeof(ggml_fp16_t) * y_ne; const uint64_t d_sz = sizeof(float) * d_ne; vk_buffer d_D = extra->buffer_gpu.lock(); @@ -2385,7 +2483,7 @@ static void ggml_vk_mul_mat_q_f16(ggml_backend_vk_context * ctx, vk_context * su } else { d_X = d_Qx; x_buf_offset = qx_buf_offset; - GGML_ASSERT(qx_sz == x_sz); // NOLINT + GGML_ASSERT(qx_sz == x_sz); } if (qy_needs_dequant) { d_Y = ctx->prealloc_y; @@ -2396,8 +2494,8 @@ static void ggml_vk_mul_mat_q_f16(ggml_backend_vk_context * ctx, vk_context * su GGML_ASSERT(qy_sz == y_sz); } - vk_pipeline * to_fp16_vk_0 = nullptr; - vk_pipeline * to_fp16_vk_1 = nullptr; + vk_pipeline to_fp16_vk_0 = nullptr; + vk_pipeline to_fp16_vk_1 = nullptr; if (x_non_contig) { to_fp16_vk_0 = ggml_vk_get_cpy_pipeline(ctx, src0->type, GGML_TYPE_F16); @@ -2413,19 +2511,19 @@ static void ggml_vk_mul_mat_q_f16(ggml_backend_vk_context * ctx, vk_context * su GGML_ASSERT(!qy_needs_dequant || to_fp16_vk_1 != nullptr); // NOLINT // Allocate descriptor sets - ggml_pipeline_allocate_descriptor_sets(ctx, *pipeline, ne12 * ne13); + ggml_pipeline_allocate_descriptor_sets(ctx, pipeline, 1); if (qx_needs_dequant) { - ggml_pipeline_allocate_descriptor_sets(ctx, *to_fp16_vk_0, x_non_contig ? 1 : ne12 * ne13); + ggml_pipeline_allocate_descriptor_sets(ctx, to_fp16_vk_0, 1); } if (qy_needs_dequant) { - ggml_pipeline_allocate_descriptor_sets(ctx, *to_fp16_vk_1, y_non_contig ? 1 : ne12 * ne13); + ggml_pipeline_allocate_descriptor_sets(ctx, to_fp16_vk_1, 1); } if (split_k > 1) { - ggml_pipeline_allocate_descriptor_sets(ctx, ctx->pipeline_matmul_split_k_reduce, ne12 * ne13); + ggml_pipeline_allocate_descriptor_sets(ctx, ctx->device->pipeline_matmul_split_k_reduce, 1); } if (x_non_contig) { - ggml_vk_cpy_to_contiguous(ctx, subctx, to_fp16_vk_0, src0, { d_Qx, qx_buf_offset, VK_WHOLE_SIZE }, { d_X, 0, VK_WHOLE_SIZE }, dst->type, false); + ggml_vk_cpy_to_contiguous(ctx, subctx, to_fp16_vk_0, src0, { d_Qx, qx_buf_offset, VK_WHOLE_SIZE }, { d_X, 0, VK_WHOLE_SIZE }); } else if (load_x || qx_needs_dequant) { if (load_x) { // copy data to device @@ -2434,13 +2532,13 @@ static void ggml_vk_mul_mat_q_f16(ggml_backend_vk_context * ctx, vk_context * su } if (qx_needs_dequant) { - const std::vector pc = { (int)ne01, (int)ne10, (int)ne10, (int)ne10 }; + const std::vector pc = { (uint32_t)ne01, (uint32_t)ne10, (uint32_t)ne10, (uint32_t)ne10, (uint32_t)(ggml_nelements(src0)) }; ggml_vk_sync_buffers(subctx); - ggml_vk_dispatch_pipeline(ctx, subctx, *to_fp16_vk_0, { { d_Qx, qx_buf_offset, qx_sz * ne02 * ne03 }, { d_X, 0, x_sz * ne02 * ne03 } }, pc.size() * sizeof(int), pc.data(), { (uint32_t)(x_ne * ne02 * ne03), 1, 1}); + ggml_vk_dispatch_pipeline(ctx, subctx, to_fp16_vk_0, { { d_Qx, qx_buf_offset, qx_sz * ne02 * ne03 }, { d_X, 0, x_sz * ne02 * ne03 } }, pc.size() * sizeof(uint32_t), pc.data(), { (uint32_t)(x_ne * ne02 * ne03), 1, 1}); } } if (y_non_contig) { - ggml_vk_cpy_to_contiguous(ctx, subctx, to_fp16_vk_1, src1, { d_Qy, qy_buf_offset, VK_WHOLE_SIZE }, { d_Y, 0, VK_WHOLE_SIZE }, dst->type); + ggml_vk_cpy_to_contiguous(ctx, subctx, to_fp16_vk_1, src1, { d_Qy, qy_buf_offset, VK_WHOLE_SIZE }, { d_Y, 0, VK_WHOLE_SIZE }); } else if (load_y) { ggml_vk_h2d_tensor_2d(ctx, subctx, d_Qy, 0, src1, 0, 0, ggml_nrows(src1)); } @@ -2457,7 +2555,7 @@ static void ggml_vk_mul_mat_q_f16(ggml_backend_vk_context * ctx, vk_context * su } // compute - ggml_vk_matmul(ctx, subctx, *pipeline, { d_X, x_buf_offset, x_sz * ne02 * ne03 }, { d_Y, y_buf_offset, y_sz * ne12 * ne13 }, { d_D, d_buf_offset, d_sz * ne12 * ne13 }, { ctx->prealloc_split_k, 0, d_sz * ne12 * ne13 * split_k }, ne01, ne11, ne10, ne10, ne10, ne01, split_k, ne12*ne13, ne02, ne12, r2, r3, stride_batch_x, stride_batch_y, ne20*ne21); // NOLINT + ggml_vk_matmul(ctx, subctx, pipeline, { d_X, x_buf_offset, x_sz * ne02 * ne03 }, { d_Y, y_buf_offset, y_sz * ne12 * ne13 }, { d_D, d_buf_offset, d_sz * ne12 * ne13 }, { ctx->prealloc_split_k, 0, d_sz * ne12 * ne13 * split_k }, ne01, ne11, ne10, ne10, ne10, ne01, split_k, ne12*ne13, ne02, ne12, r2, r3, stride_batch_x, stride_batch_y, ne20*ne21); // NOLINT if (dst->backend == GGML_BACKEND_TYPE_CPU) { // copy dst to host @@ -2505,7 +2603,7 @@ static void ggml_vk_mul_mat_vec_q_f16(ggml_backend_vk_context * ctx, vk_context bool src0_uma = false; bool src1_uma = false; - if (ctx->device.lock()->uma) { + if (ctx->device->uma) { ggml_vk_host_get(ctx, src0->data, d_Qx, qx_buf_offset); ggml_vk_host_get(ctx, src1->data, d_Qy, qy_buf_offset); src0_uma = d_Qx != nullptr; @@ -2527,9 +2625,9 @@ static void ggml_vk_mul_mat_vec_q_f16(ggml_backend_vk_context * ctx, vk_context const uint64_t y_ne = ne11 * ne10; const uint64_t d_ne = ne11 * ne01; - const uint64_t qx_sz = ggml_vk_align_size(ggml_type_size(src0->type) * x_ne / ggml_blck_size(src0->type), ctx->device.lock()->properties.limits.minStorageBufferOffsetAlignment); + const uint64_t qx_sz = ggml_vk_align_size(ggml_type_size(src0->type) * x_ne / ggml_blck_size(src0->type), ctx->device->properties.limits.minStorageBufferOffsetAlignment); const uint64_t qy_sz = ggml_type_size(src1->type) * y_ne / ggml_blck_size(src1->type); - const uint64_t x_sz = x_non_contig ? ggml_vk_align_size(ggml_type_size(src0->type) * x_ne, ctx->device.lock()->properties.limits.minStorageBufferOffsetAlignment) : qx_sz; + const uint64_t x_sz = x_non_contig ? ggml_vk_align_size(ggml_type_size(src0->type) * x_ne, ctx->device->properties.limits.minStorageBufferOffsetAlignment) : qx_sz; const uint64_t y_sz = f16_f32_kernel ? sizeof(float) * y_ne : sizeof(ggml_fp16_t) * y_ne; const uint64_t d_sz = sizeof(float) * d_ne; @@ -2569,8 +2667,8 @@ static void ggml_vk_mul_mat_vec_q_f16(ggml_backend_vk_context * ctx, vk_context GGML_ASSERT(qy_sz == y_sz); } - vk_pipeline * to_fp16_vk_0 = nullptr; - vk_pipeline* to_fp16_vk_1 = nullptr; + vk_pipeline to_fp16_vk_0 = nullptr; + vk_pipeline to_fp16_vk_1 = nullptr; if (x_non_contig) { to_fp16_vk_0 = ggml_vk_get_cpy_pipeline(ctx, src0->type, src0->type); } @@ -2579,30 +2677,30 @@ static void ggml_vk_mul_mat_vec_q_f16(ggml_backend_vk_context * ctx, vk_context } else { to_fp16_vk_1 = ggml_vk_get_to_fp16(ctx, src1->type); } - vk_pipeline* dmmv = ggml_vk_get_dequantize_mul_mat_vec(ctx, src0->type); + vk_pipeline dmmv = ggml_vk_get_dequantize_mul_mat_vec(ctx, src0->type); GGML_ASSERT(!qx_needs_dequant || to_fp16_vk_0 != nullptr); // NOLINT GGML_ASSERT(!qy_needs_dequant || to_fp16_vk_1 != nullptr); // NOLINT GGML_ASSERT(dmmv != nullptr); // Allocate descriptor sets if (qx_needs_dequant) { - ggml_pipeline_allocate_descriptor_sets(ctx, *to_fp16_vk_0, 1); + ggml_pipeline_allocate_descriptor_sets(ctx, to_fp16_vk_0, 1); } if (qy_needs_dequant) { - ggml_pipeline_allocate_descriptor_sets(ctx, *to_fp16_vk_1, y_non_contig ? 1 : ne12 * ne13); + ggml_pipeline_allocate_descriptor_sets(ctx, to_fp16_vk_1, y_non_contig ? 1 : ne12 * ne13); } - ggml_pipeline_allocate_descriptor_sets(ctx, *dmmv, ne12 * ne13); + ggml_pipeline_allocate_descriptor_sets(ctx, dmmv, ne12 * ne13); if (x_non_contig) { - GGML_ASSERT(x_sz == ggml_vk_align_size(ggml_type_size(src0->type) * x_ne, ctx->device.lock()->properties.limits.minStorageBufferOffsetAlignment)); - ggml_vk_cpy_to_contiguous(ctx, subctx, to_fp16_vk_0, src0, { d_Qx, qx_buf_offset, VK_WHOLE_SIZE }, { d_X, 0, VK_WHOLE_SIZE }, src0->type); + GGML_ASSERT(x_sz == ggml_vk_align_size(ggml_type_size(src0->type) * x_ne, ctx->device->properties.limits.minStorageBufferOffsetAlignment)); + ggml_vk_cpy_to_contiguous(ctx, subctx, to_fp16_vk_0, src0, { d_Qx, qx_buf_offset, VK_WHOLE_SIZE }, { d_X, 0, VK_WHOLE_SIZE }); } else if (load_x) { // copy data to device ggml_vk_h2d_tensor_2d(ctx, subctx, d_Qx, 0, src0, 0, 0, ggml_nrows(src0)); } if (y_non_contig) { GGML_ASSERT(y_sz == ggml_type_size(src1->type) * y_ne); - ggml_vk_cpy_to_contiguous(ctx, subctx, to_fp16_vk_1, src1, { d_Qy, qy_buf_offset, VK_WHOLE_SIZE }, { d_Y, 0, VK_WHOLE_SIZE }, src1->type); + ggml_vk_cpy_to_contiguous(ctx, subctx, to_fp16_vk_1, src1, { d_Qy, qy_buf_offset, VK_WHOLE_SIZE }, { d_Y, 0, VK_WHOLE_SIZE }); } else if (load_y) { ggml_vk_h2d_tensor_2d(ctx, subctx, d_Qy, 0, src1, 0, 0, ggml_nrows(src1)); } @@ -2619,22 +2717,22 @@ static void ggml_vk_mul_mat_vec_q_f16(ggml_backend_vk_context * ctx, vk_context const uint64_t y_offset = y_buf_offset + y_sz * it_idx1; const uint64_t d_offset = d_buf_offset + d_sz * it_idx1; - const uint64_t y_buffer_offset = (y_offset / ctx->device.lock()->properties.limits.minStorageBufferOffsetAlignment) * ctx->device.lock()->properties.limits.minStorageBufferOffsetAlignment; + const uint64_t y_buffer_offset = (y_offset / ctx->device->properties.limits.minStorageBufferOffsetAlignment) * ctx->device->properties.limits.minStorageBufferOffsetAlignment; const uint64_t y_shader_offset = y_offset - y_buffer_offset; - const uint64_t d_buffer_offset = (d_offset / ctx->device.lock()->properties.limits.minStorageBufferOffsetAlignment) * ctx->device.lock()->properties.limits.minStorageBufferOffsetAlignment; + const uint64_t d_buffer_offset = (d_offset / ctx->device->properties.limits.minStorageBufferOffsetAlignment) * ctx->device->properties.limits.minStorageBufferOffsetAlignment; const uint64_t d_shader_offset = d_offset - d_buffer_offset; if (!y_non_contig && qy_needs_dequant) { - const std::vector pc = { (int)ne11, (int)ne10, (int)ne10, (int)ne10 }; + const std::vector pc = { (uint32_t)ne11, (uint32_t)ne10, (uint32_t)ne10, (uint32_t)ne10, (uint32_t)(y_ne / 32) }; ggml_vk_sync_buffers(subctx); - ggml_vk_dispatch_pipeline(ctx, subctx, *to_fp16_vk_1, { { d_Qy, qy_offset, qy_sz }, { d_Y, y_offset, y_sz } }, pc.size() * sizeof(int), pc.data(), { (uint32_t)y_ne, 1, 1}); + ggml_vk_dispatch_pipeline(ctx, subctx, to_fp16_vk_1, { { d_Qy, qy_offset, qy_sz }, { d_Y, y_offset, y_sz } }, pc.size() * sizeof(uint32_t), pc.data(), { (uint32_t)y_ne, 1, 1}); } // compute - const std::array pc = { (int)ne00, (int)(y_shader_offset / ggml_type_size(src1->type)), (int)(d_shader_offset / ggml_type_size(dst->type))}; + const std::array pc = { (uint32_t)ne00, (uint32_t)(y_shader_offset / ggml_type_size(src1->type)), (uint32_t)(d_shader_offset / ggml_type_size(dst->type))}; ggml_vk_sync_buffers(subctx); - ggml_vk_dispatch_pipeline(ctx, subctx, *dmmv, { { d_X, x_offset, x_sz }, { d_Y, y_buffer_offset, y_sz + y_shader_offset }, { d_D, d_buffer_offset, d_sz + d_shader_offset } }, 3 * sizeof(int), &pc, { (uint32_t)ne01, 1, 1}); + ggml_vk_dispatch_pipeline(ctx, subctx, dmmv, { { d_X, x_offset, x_sz }, { d_Y, y_buffer_offset, y_sz + y_shader_offset }, { d_D, d_buffer_offset, d_sz + d_shader_offset } }, 3 * sizeof(int), &pc, { (uint32_t)ne01, 1, 1}); if (dst->backend == GGML_BACKEND_TYPE_CPU) { // copy dst to host @@ -2680,7 +2778,7 @@ static void ggml_vk_mul_mat_vec_p021_f16_f32(ggml_backend_vk_context * ctx, vk_c bool src1_uma = false; - if (ctx->device.lock()->uma) { + if (ctx->device->uma) { ggml_vk_host_get(ctx, src1->data, d_Qy, qy_buf_offset); src1_uma = d_Qy != nullptr; } @@ -2691,7 +2789,7 @@ static void ggml_vk_mul_mat_vec_p021_f16_f32(ggml_backend_vk_context * ctx, vk_c const uint64_t y_ne = ne10 * ne11 * ne12; const uint64_t d_ne = ne01 * ne11 * ne12; - const uint64_t qx_sz = ggml_vk_align_size(ggml_type_size(src0->type) * x_ne / ggml_blck_size(src0->type), ctx->device.lock()->properties.limits.minStorageBufferOffsetAlignment); + const uint64_t qx_sz = ggml_vk_align_size(ggml_type_size(src0->type) * x_ne / ggml_blck_size(src0->type), ctx->device->properties.limits.minStorageBufferOffsetAlignment); const uint64_t qy_sz = ggml_type_size(src1->type) * y_ne / ggml_blck_size(src1->type); const uint64_t d_sz = sizeof(float) * d_ne; @@ -2710,12 +2808,12 @@ static void ggml_vk_mul_mat_vec_p021_f16_f32(ggml_backend_vk_context * ctx, vk_c } // Allocate descriptor sets - ggml_pipeline_allocate_descriptor_sets(ctx, ctx->pipeline_mul_mat_vec_p021_f16_f32, 1); + ggml_pipeline_allocate_descriptor_sets(ctx, ctx->device->pipeline_mul_mat_vec_p021_f16_f32, 1); - const uint64_t qy_buffer_offset = (qy_buf_offset / ctx->device.lock()->properties.limits.minStorageBufferOffsetAlignment) * ctx->device.lock()->properties.limits.minStorageBufferOffsetAlignment; + const uint64_t qy_buffer_offset = (qy_buf_offset / ctx->device->properties.limits.minStorageBufferOffsetAlignment) * ctx->device->properties.limits.minStorageBufferOffsetAlignment; const uint64_t qy_shader_offset = qy_buf_offset - qy_buffer_offset; - const uint64_t d_buffer_offset = (d_buf_offset / ctx->device.lock()->properties.limits.minStorageBufferOffsetAlignment) * ctx->device.lock()->properties.limits.minStorageBufferOffsetAlignment; + const uint64_t d_buffer_offset = (d_buf_offset / ctx->device->properties.limits.minStorageBufferOffsetAlignment) * ctx->device->properties.limits.minStorageBufferOffsetAlignment; const uint64_t d_shader_offset = d_buf_offset - d_buffer_offset; if (load_y) { @@ -2725,7 +2823,7 @@ static void ggml_vk_mul_mat_vec_p021_f16_f32(ggml_backend_vk_context * ctx, vk_c // compute const std::array pc = { (uint32_t)ne00, (uint32_t)ne01, (uint32_t)ne02, (uint32_t)ne12, (uint32_t)(qy_shader_offset / ggml_type_size(src1->type)), (uint32_t)(d_shader_offset / ggml_type_size(dst->type)) }; ggml_vk_sync_buffers(subctx); - ggml_vk_dispatch_pipeline(ctx, subctx, ctx->pipeline_mul_mat_vec_p021_f16_f32, { { d_Qx, qx_buf_offset, qx_sz }, { d_Qy, qy_buffer_offset, qy_sz + qy_shader_offset }, { d_D, d_buffer_offset, d_sz + d_shader_offset } }, 6 * sizeof(uint32_t), &pc, { 1, (uint32_t)ne01, (uint32_t)ne12 }); + ggml_vk_dispatch_pipeline(ctx, subctx, ctx->device->pipeline_mul_mat_vec_p021_f16_f32, { { d_Qx, qx_buf_offset, qx_sz }, { d_Qy, qy_buffer_offset, qy_sz + qy_shader_offset }, { d_D, d_buffer_offset, d_sz + d_shader_offset } }, 6 * sizeof(uint32_t), &pc, { 1, (uint32_t)ne01, (uint32_t)ne12 }); if (dst->backend == GGML_BACKEND_TYPE_CPU) { // copy dst to host @@ -2772,7 +2870,7 @@ static void ggml_vk_mul_mat_vec_nc_f16_f32(ggml_backend_vk_context * ctx, vk_con bool src1_uma = false; - if (ctx->device.lock()->uma) { + if (ctx->device->uma) { ggml_vk_host_get(ctx, src1->data, d_Qy, qy_buf_offset); src1_uma = d_Qy != nullptr; } @@ -2803,12 +2901,12 @@ static void ggml_vk_mul_mat_vec_nc_f16_f32(ggml_backend_vk_context * ctx, vk_con } // Allocate descriptor sets - ggml_pipeline_allocate_descriptor_sets(ctx, ctx->pipeline_mul_mat_vec_nc_f16_f32, 1); + ggml_pipeline_allocate_descriptor_sets(ctx, ctx->device->pipeline_mul_mat_vec_nc_f16_f32, 1); - const uint64_t qy_buffer_offset = (qy_buf_offset / ctx->device.lock()->properties.limits.minStorageBufferOffsetAlignment) * ctx->device.lock()->properties.limits.minStorageBufferOffsetAlignment; + const uint64_t qy_buffer_offset = (qy_buf_offset / ctx->device->properties.limits.minStorageBufferOffsetAlignment) * ctx->device->properties.limits.minStorageBufferOffsetAlignment; const uint64_t qy_shader_offset = qy_buf_offset - qy_buffer_offset; - const uint64_t d_buffer_offset = (d_buf_offset / ctx->device.lock()->properties.limits.minStorageBufferOffsetAlignment) * ctx->device.lock()->properties.limits.minStorageBufferOffsetAlignment; + const uint64_t d_buffer_offset = (d_buf_offset / ctx->device->properties.limits.minStorageBufferOffsetAlignment) * ctx->device->properties.limits.minStorageBufferOffsetAlignment; const uint64_t d_shader_offset = d_buf_offset - d_buffer_offset; if (load_y) { @@ -2818,7 +2916,7 @@ static void ggml_vk_mul_mat_vec_nc_f16_f32(ggml_backend_vk_context * ctx, vk_con // compute const std::array pc = { (uint32_t)ne00, (uint32_t)ne01, row_stride_x, channel_stride_x, (uint32_t)(ne12 / ne02), (uint32_t)(qy_shader_offset / ggml_type_size(src1->type)), (uint32_t)(d_shader_offset / ggml_type_size(dst->type)) }; ggml_vk_sync_buffers(subctx); - ggml_vk_dispatch_pipeline(ctx, subctx, ctx->pipeline_mul_mat_vec_nc_f16_f32, { { d_Qx, qx_buf_offset, qx_sz }, { d_Qy, qy_buffer_offset, qy_sz + qy_shader_offset }, { d_D, d_buffer_offset, d_sz + d_shader_offset } }, 7 * sizeof(uint32_t), &pc, { 1, (uint32_t)ne01, (uint32_t)ne12 }); + ggml_vk_dispatch_pipeline(ctx, subctx, ctx->device->pipeline_mul_mat_vec_nc_f16_f32, { { d_Qx, qx_buf_offset, qx_sz }, { d_Qy, qy_buffer_offset, qy_sz + qy_shader_offset }, { d_D, d_buffer_offset, d_sz + d_shader_offset } }, 7 * sizeof(uint32_t), &pc, { 1, (uint32_t)ne01, (uint32_t)ne12 }); if (dst->backend == GGML_BACKEND_TYPE_CPU) { // copy dst to host @@ -2856,6 +2954,10 @@ static void ggml_vk_mul_mat(ggml_backend_vk_context * ctx, vk_context * subctx, } } +// static void ggml_vk_mul_mat_id(ggml_backend_vk_context * ctx, vk_context * subctx, const struct ggml_tensor * src0, const struct ggml_tensor * src1, struct ggml_tensor * dst) { +// +// } + static void ggml_vk_op_repeat(ggml_backend_vk_context * ctx, vk_context * subctx, const ggml_tensor * src0, const ggml_tensor * src1, ggml_tensor * dst) { // guaranteed to be an integer due to the check in ggml_can_repeat const uint64_t ne0 = dst->ne[0]; @@ -2927,40 +3029,40 @@ static void ggml_vk_op_repeat(ggml_backend_vk_context * ctx, vk_context * subctx } -static vk_pipeline* ggml_vk_op_get_pipeline(ggml_backend_vk_context * ctx, const ggml_tensor * src0, const ggml_tensor * src1, ggml_tensor * dst, ggml_op op) { +static vk_pipeline ggml_vk_op_get_pipeline(ggml_backend_vk_context * ctx, const ggml_tensor * src0, const ggml_tensor * src1, const ggml_tensor * src2, ggml_tensor * dst, ggml_op op) { switch (op) { case GGML_OP_ADD: if (src0->type == GGML_TYPE_F32 && src1->type == GGML_TYPE_F32 && dst->type == GGML_TYPE_F32) { - return &ctx->pipeline_add_f32; + return ctx->device->pipeline_add_f32; } return nullptr; case GGML_OP_GET_ROWS: GGML_ASSERT(src1->type == GGML_TYPE_I32); if (dst->type == GGML_TYPE_F16) { - return &ctx->pipeline_get_rows[src0->type]; + return ctx->device->pipeline_get_rows[src0->type]; } if (dst->type == GGML_TYPE_F32) { - return &ctx->pipeline_get_rows_f32[src0->type]; + return ctx->device->pipeline_get_rows_f32[src0->type]; } return nullptr; case GGML_OP_MUL: if (src0->type == GGML_TYPE_F32 && src1->type == GGML_TYPE_F32 && dst->type == GGML_TYPE_F32) { - return &ctx->pipeline_mul_f32; + return ctx->device->pipeline_mul_f32; } return nullptr; case GGML_OP_SCALE: if (src0->type == GGML_TYPE_F32 && dst->type == GGML_TYPE_F32) { - return &ctx->pipeline_scale_f32; + return ctx->device->pipeline_scale_f32; } return nullptr; case GGML_OP_SQR: if (src0->type == GGML_TYPE_F32 && dst->type == GGML_TYPE_F32) { - return &ctx->pipeline_sqr_f32; + return ctx->device->pipeline_sqr_f32; } return nullptr; case GGML_OP_CLAMP: if (src0->type == GGML_TYPE_F32 && dst->type == GGML_TYPE_F32) { - return &ctx->pipeline_clamp_f32; + return ctx->device->pipeline_clamp_f32; } return nullptr; case GGML_OP_CPY: @@ -2969,29 +3071,29 @@ static vk_pipeline* ggml_vk_op_get_pipeline(ggml_backend_vk_context * ctx, const return ggml_vk_get_cpy_pipeline(ctx, src0->type, dst->type); case GGML_OP_NORM: if (src0->type == GGML_TYPE_F32 && dst->type == GGML_TYPE_F32) { - return &ctx->pipeline_norm_f32; + return ctx->device->pipeline_norm_f32; } return nullptr; case GGML_OP_RMS_NORM: if (src0->type == GGML_TYPE_F32 && dst->type == GGML_TYPE_F32) { - return &ctx->pipeline_rms_norm_f32; + return ctx->device->pipeline_rms_norm_f32; } return nullptr; case GGML_OP_UNARY: switch (ggml_get_unary_op(dst)) { case GGML_UNARY_OP_SILU: if (src0->type == GGML_TYPE_F32 && dst->type == GGML_TYPE_F32) { - return &ctx->pipeline_silu_f32; + return ctx->device->pipeline_silu_f32; } break; case GGML_UNARY_OP_GELU: if (src0->type == GGML_TYPE_F32 && dst->type == GGML_TYPE_F32) { - return &ctx->pipeline_gelu_f32; + return ctx->device->pipeline_gelu_f32; } break; case GGML_UNARY_OP_RELU: if (src0->type == GGML_TYPE_F32 && dst->type == GGML_TYPE_F32) { - return &ctx->pipeline_relu_f32; + return ctx->device->pipeline_relu_f32; } break; default: @@ -3000,12 +3102,12 @@ static vk_pipeline* ggml_vk_op_get_pipeline(ggml_backend_vk_context * ctx, const return nullptr; case GGML_OP_DIAG_MASK_INF: if (src0->type == GGML_TYPE_F32 && dst->type == GGML_TYPE_F32) { - return &ctx->pipeline_diag_mask_inf_f32; + return ctx->device->pipeline_diag_mask_inf_f32; } return nullptr; case GGML_OP_SOFT_MAX: - if (src0->type == GGML_TYPE_F32 && dst->type == GGML_TYPE_F32) { - return &ctx->pipeline_soft_max_f32; + if (src0->type == GGML_TYPE_F32 && (src1 == nullptr || src1->type == GGML_TYPE_F32) && (src2 == nullptr || src2->type == GGML_TYPE_F32) && dst->type == GGML_TYPE_F32) { + return ctx->device->pipeline_soft_max_f32; } return nullptr; case GGML_OP_ROPE: @@ -3020,21 +3122,26 @@ static vk_pipeline* ggml_vk_op_get_pipeline(ggml_backend_vk_context * ctx, const if (is_neox) { if (src0->type == GGML_TYPE_F32 && dst->type == GGML_TYPE_F32) { - return &ctx->pipeline_rope_neox_f32; + return ctx->device->pipeline_rope_neox_f32; } if (src0->type == GGML_TYPE_F16 && dst->type == GGML_TYPE_F16) { - return &ctx->pipeline_rope_neox_f16; + return ctx->device->pipeline_rope_neox_f16; } } else { if (src0->type == GGML_TYPE_F32 && dst->type == GGML_TYPE_F32) { - return &ctx->pipeline_rope_f32; + return ctx->device->pipeline_rope_f32; } if (src0->type == GGML_TYPE_F16 && dst->type == GGML_TYPE_F16) { - return &ctx->pipeline_rope_f16; + return ctx->device->pipeline_rope_f16; } } return nullptr; } + case GGML_OP_ARGSORT: + if (src0->type == GGML_TYPE_F32 && dst->type == GGML_TYPE_I32) { + return ctx->device->pipeline_argsort_f32; + } + return nullptr; default: return nullptr; } @@ -3050,17 +3157,19 @@ static ggml_vk_func_t ggml_vk_op_get_func(ggml_op op) { } template -static void ggml_vk_op_f32(ggml_backend_vk_context * ctx, vk_context * subctx, const ggml_tensor * src0, const ggml_tensor * src1, ggml_tensor * dst, ggml_op op, const PC&& pc) { +static void ggml_vk_op_f32(ggml_backend_vk_context * ctx, vk_context * subctx, const ggml_tensor * src0, const ggml_tensor * src1, const ggml_tensor * src2, ggml_tensor * dst, ggml_op op, const PC&& pc) { #ifdef GGML_VULKAN_DEBUG std::cerr << "ggml_vk_op_f32((" << src0 << ", name=" << src0->name << ", type=" << src0->type << ", backend=" << src0->backend << ", ne0=" << src0->ne[0] << ", ne1=" << src0->ne[1] << ", ne2=" << src0->ne[2] << ", ne3=" << src0->ne[3] << ", nb0=" << src0->nb[0] << ", nb1=" << src0->nb[1] << ", nb2=" << src0->nb[2] << ", nb3=" << src0->nb[3]; if (src1 != nullptr) { std::cerr << "), (" << src1 << ", name=" << src1->name << ", type=" << src1->type << ", backend=" << src1->backend << ", ne0=" << src1->ne[0] << ", ne1=" << src1->ne[1] << ", ne2=" << src1->ne[2] << ", ne3=" << src1->ne[3] << ", nb0=" << src1->nb[0] << ", nb1=" << src1->nb[1] << ", nb2=" << src1->nb[2] << ", nb3=" << src1->nb[3]; } + if (src2 != nullptr) { + std::cerr << "), (" << src2 << ", name=" << src2->name << ", type=" << src2->type << ", backend=" << src2->backend << ", ne0=" << src2->ne[0] << ", ne1=" << src2->ne[1] << ", ne2=" << src2->ne[2] << ", ne3=" << src2->ne[3] << ", nb0=" << src2->nb[0] << ", nb1=" << src2->nb[1] << ", nb2=" << src2->nb[2] << ", nb3=" << src2->nb[3]; + } std::cerr << "), (" << dst << ", name=" << dst->name << ", type=" << dst->type << ", backend=" << dst->backend << ", ne0=" << dst->ne[0] << ", ne1=" << dst->ne[1] << ", ne2=" << dst->ne[2] << ", ne3=" << dst->ne[3] << ", nb0=" << dst->nb[0] << ", nb1=" << dst->nb[1] << ", nb2=" << dst->nb[2] << ", nb3=" << dst->nb[3] << "), " << ggml_op_name(op) << ")" << std::endl; #endif GGML_ASSERT(!ggml_is_quantized(src0->type) && (src1 == nullptr || !ggml_is_quantized(src1->type))); // NOLINT GGML_ASSERT(op == GGML_OP_CPY || ggml_vk_dim01_contiguous(src0)); // NOLINT - GGML_ASSERT(src1 == nullptr || ggml_vk_dim01_contiguous(src1)); // NOLINT GGML_ASSERT(dst->extra != nullptr); const uint64_t ne00 = src0->ne[0]; const uint64_t ne01 = src0->ne[1]; @@ -3077,7 +3186,10 @@ static void ggml_vk_op_f32(ggml_backend_vk_context * ctx, vk_context * subctx, c const uint64_t nb2 = dst->nb[2]; const uint64_t nb3 = dst->nb[3]; - vk_pipeline * pipeline = ggml_vk_op_get_pipeline(ctx, src0, src1, dst, op); + const bool use_src2 = src2 != nullptr; + const uint64_t ne2 = use_src2 ? src2->ne[0] * src2->ne[1] : 0; + + vk_pipeline pipeline = ggml_vk_op_get_pipeline(ctx, src0, src1, src2, dst, op); ggml_vk_func_t op_func; if (pipeline == nullptr) { @@ -3098,29 +3210,39 @@ static void ggml_vk_op_f32(ggml_backend_vk_context * ctx, vk_context * subctx, c ggml_tensor_extra_gpu * extra = (ggml_tensor_extra_gpu *) dst->extra; ggml_tensor_extra_gpu * extra_src0 = (ggml_tensor_extra_gpu *) src0->extra; ggml_tensor_extra_gpu * extra_src1 = use_src1 ? (ggml_tensor_extra_gpu *) src1->extra : nullptr; + ggml_tensor_extra_gpu * extra_src2 = use_src2 ? (ggml_tensor_extra_gpu *) src2->extra : nullptr; vk_buffer d_X = nullptr; size_t x_buf_offset = 0; vk_buffer d_Y = nullptr; size_t y_buf_offset = 0; + vk_buffer d_Z = nullptr; + size_t z_buf_offset = 0; bool src0_uma = false; bool src1_uma = false; + bool src2_uma = false; - if (ctx->device.lock()->uma) { + if (ctx->device->uma) { ggml_vk_host_get(ctx, src0->data, d_X, x_buf_offset); src0_uma = d_X != nullptr; if (use_src1) { ggml_vk_host_get(ctx, src1->data, d_Y, y_buf_offset); src1_uma = d_Y != nullptr; } + if (use_src2) { + ggml_vk_host_get(ctx, src1->data, d_Z, z_buf_offset); + src2_uma = d_Z != nullptr; + } } const bool transfer_src0 = src0->backend != GGML_BACKEND_TYPE_GPU && !src0_uma; const bool transfer_src1 = use_src1 && src1->backend != GGML_BACKEND_TYPE_GPU && !src1_uma; + const bool transfer_src2 = use_src2 && src2->backend != GGML_BACKEND_TYPE_GPU && !src2_uma; - uint64_t x_sz = ggml_vk_align_size(ggml_type_size(src0->type) * ne0, ctx->device.lock()->properties.limits.minStorageBufferOffsetAlignment); - uint64_t y_sz = use_src1 ? ggml_vk_align_size(ggml_type_size(src1->type) * ne1, ctx->device.lock()->properties.limits.minStorageBufferOffsetAlignment) : 0; + uint64_t x_sz = ggml_vk_align_size(ggml_type_size(src0->type) * ne0, ctx->device->properties.limits.minStorageBufferOffsetAlignment); + uint64_t y_sz = use_src1 ? ggml_vk_align_size(ggml_type_size(src1->type) * ne1, ctx->device->properties.limits.minStorageBufferOffsetAlignment) : 0; + uint64_t z_sz = use_src2 ? ggml_vk_align_size(ggml_type_size(src2->type) * ne2, ctx->device->properties.limits.minStorageBufferOffsetAlignment) : 0; uint64_t d_sz = ggml_type_size(dst->type) * ne0; vk_buffer d_D = extra->buffer_gpu.lock(); @@ -3131,7 +3253,7 @@ static void ggml_vk_op_f32(ggml_backend_vk_context * ctx, vk_context * subctx, c } GGML_ASSERT(d_D != nullptr); - uint64_t d_buf_offset = (extra->offset / ctx->device.lock()->properties.limits.minStorageBufferOffsetAlignment) * ctx->device.lock()->properties.limits.minStorageBufferOffsetAlignment; + uint64_t d_buf_offset = (extra->offset / ctx->device->properties.limits.minStorageBufferOffsetAlignment) * ctx->device->properties.limits.minStorageBufferOffsetAlignment; GGML_ASSERT(d_buf_offset == extra->offset || op == GGML_OP_CPY); // NOLINT if (transfer_src0) { d_X = ctx->prealloc_qx; @@ -3148,6 +3270,13 @@ static void ggml_vk_op_f32(ggml_backend_vk_context * ctx, vk_context * subctx, c GGML_ASSERT(d_Y != nullptr); } + GGML_ASSERT(!transfer_src2); + if (use_src2 && !src2_uma) { + d_Z = extra_src2->buffer_gpu.lock(); + z_buf_offset = extra_src2->offset; + GGML_ASSERT(d_Z != nullptr); + } + if (op == GGML_OP_CPY) { GGML_ASSERT(!transfer_src0); GGML_ASSERT(!transfer_src1); @@ -3175,7 +3304,7 @@ static void ggml_vk_op_f32(ggml_backend_vk_context * ctx, vk_context * subctx, c // Single call if dimension 2 is contiguous if (op == GGML_OP_CPY || (ggml_is_contiguous(src0) && (src1 == nullptr || ggml_is_contiguous(src1)))) { - ggml_pipeline_allocate_descriptor_sets(ctx, *pipeline, 1); + ggml_pipeline_allocate_descriptor_sets(ctx, pipeline, 1); switch (dst->op) { case GGML_OP_NORM: @@ -3204,16 +3333,30 @@ static void ggml_vk_op_f32(ggml_backend_vk_context * ctx, vk_context * subctx, c } } - if (!use_src1 && op == GGML_OP_SOFT_MAX) { - // Empty src1 is possible on soft_max, but the shader needs a buffer + if (op == GGML_OP_SOFT_MAX) { + // Empty src1 and src2 are possible on soft_max, but the shader needs buffers + vk_subbuffer subbuf_y; + if (use_src1) { + subbuf_y = { d_Y, y_buf_offset, y_sz }; + } else { + subbuf_y = { ctx->prealloc_y, 0, ctx->prealloc_y->size }; + } + + vk_subbuffer subbuf_z; + if (use_src2) { + subbuf_z = { d_Z, z_buf_offset, z_sz }; + } else { + subbuf_z = { ctx->prealloc_y, 0, ctx->prealloc_y->size }; + } + ggml_vk_sync_buffers(subctx); - ggml_vk_dispatch_pipeline(ctx, subctx, *pipeline, { { d_X, x_buf_offset, x_sz }, { ctx->prealloc_y, 0, ctx->prealloc_y->size }, { d_D, d_buf_offset, d_sz } }, sizeof(PC), &pc, elements); + ggml_vk_dispatch_pipeline(ctx, subctx, pipeline, { { d_X, x_buf_offset, x_sz }, subbuf_y, subbuf_z, { d_D, d_buf_offset, d_sz } }, sizeof(PC), &pc, elements); } else if (use_src1) { ggml_vk_sync_buffers(subctx); - ggml_vk_dispatch_pipeline(ctx, subctx, *pipeline, { { d_X, x_buf_offset, x_sz }, { d_Y, y_buf_offset, y_sz }, { d_D, d_buf_offset, d_sz } }, sizeof(PC), &pc, elements); + ggml_vk_dispatch_pipeline(ctx, subctx, pipeline, { { d_X, x_buf_offset, x_sz }, { d_Y, y_buf_offset, y_sz }, { d_D, d_buf_offset, d_sz } }, sizeof(PC), &pc, elements); } else { ggml_vk_sync_buffers(subctx); - ggml_vk_dispatch_pipeline(ctx, subctx, *pipeline, { { d_X, x_buf_offset, x_sz }, { d_D, d_buf_offset, d_sz } }, sizeof(PC), &pc, elements); + ggml_vk_dispatch_pipeline(ctx, subctx, pipeline, { { d_X, x_buf_offset, x_sz }, { d_D, d_buf_offset, d_sz } }, sizeof(PC), &pc, elements); } if (dst->backend == GGML_BACKEND_TYPE_CPU && op == GGML_OP_CPY) { ggml_vk_d2h_tensor_2d(ctx, subctx, d_D, 0, dst); @@ -3223,7 +3366,9 @@ static void ggml_vk_op_f32(ggml_backend_vk_context * ctx, vk_context * subctx, c ggml_vk_buffer_read_async(ctx, subctx, d_D, 0, d, d_sz); } } else { - ggml_pipeline_allocate_descriptor_sets(ctx, *pipeline, ne02 * ne03); + GGML_ASSERT(op != GGML_OP_SOFT_MAX); + + ggml_pipeline_allocate_descriptor_sets(ctx, pipeline, ne02 * ne03); switch (dst->op) { case GGML_OP_NORM: @@ -3248,16 +3393,12 @@ static void ggml_vk_op_f32(ggml_backend_vk_context * ctx, vk_context * subctx, c const uint32_t y_offset = y_sz * it_idx1; const uint32_t d_offset = d_sz * it_idx0; - if (!use_src1 && op == GGML_OP_SOFT_MAX) { - // Empty src1 is possible on soft_max, but the shader needs a buffer + if (use_src1) { ggml_vk_sync_buffers(subctx); - ggml_vk_dispatch_pipeline(ctx, subctx, *pipeline, { { d_X, x_buf_offset, x_sz }, { ctx->prealloc_y, 0, ctx->prealloc_y->size }, { d_D, d_buf_offset, d_sz } }, sizeof(PC), &pc, elements); - } else if (use_src1) { - ggml_vk_sync_buffers(subctx); - ggml_vk_dispatch_pipeline(ctx, subctx, *pipeline, { { d_X, x_buf_offset + x_offset, x_sz }, { d_Y, y_buf_offset + y_offset, y_sz }, { d_D, d_buf_offset + d_offset, d_sz } }, sizeof(PC), &pc, elements); + ggml_vk_dispatch_pipeline(ctx, subctx, pipeline, { { d_X, x_buf_offset + x_offset, x_sz }, { d_Y, y_buf_offset + y_offset, y_sz }, { d_D, d_buf_offset + d_offset, d_sz } }, sizeof(PC), &pc, elements); } else { ggml_vk_sync_buffers(subctx); - ggml_vk_dispatch_pipeline(ctx, subctx, *pipeline, { { d_X, x_buf_offset + x_offset, x_sz }, { d_D, d_buf_offset + d_offset, d_sz } }, sizeof(PC), &pc, elements); + ggml_vk_dispatch_pipeline(ctx, subctx, pipeline, { { d_X, x_buf_offset + x_offset, x_sz }, { d_D, d_buf_offset + d_offset, d_sz } }, sizeof(PC), &pc, elements); } if (dst->backend == GGML_BACKEND_TYPE_CPU) { // copy dst to host @@ -3269,69 +3410,141 @@ static void ggml_vk_op_f32(ggml_backend_vk_context * ctx, vk_context * subctx, c } static void ggml_vk_repeat(ggml_backend_vk_context * ctx, vk_context * subctx, const ggml_tensor * src0, const ggml_tensor * src1, ggml_tensor * dst) { - ggml_vk_op_f32(ctx, subctx, src0, src1, dst, GGML_OP_REPEAT, { (uint32_t)ggml_nelements(src0), (uint32_t)ggml_nelements(src1), 0.0f, 0.0f }); + ggml_vk_op_f32(ctx, subctx, src0, src1, nullptr, dst, GGML_OP_REPEAT, { (uint32_t)ggml_nelements(src0), (uint32_t)ggml_nelements(src1), 0.0f, 0.0f }); } static void ggml_vk_get_rows(ggml_backend_vk_context * ctx, vk_context * subctx, const ggml_tensor * src0, const ggml_tensor * src1, ggml_tensor * dst) { - ggml_vk_op_f32(ctx, subctx, src0, src1, dst, GGML_OP_GET_ROWS, { (uint32_t)ggml_nelements(src0), (uint32_t)ggml_nelements(src1), 0.0f, 0.0f }); + ggml_vk_op_f32(ctx, subctx, src0, src1, nullptr, dst, GGML_OP_GET_ROWS, { (uint32_t)ggml_nelements(src0), (uint32_t)ggml_nelements(src1), 0.0f, 0.0f }); } static void ggml_vk_add(ggml_backend_vk_context * ctx, vk_context * subctx, const ggml_tensor * src0, const ggml_tensor * src1, ggml_tensor * dst) { - ggml_vk_op_f32(ctx, subctx, src0, src1, dst, GGML_OP_ADD, { (uint32_t)ggml_nelements(src0), (uint32_t)ggml_nelements(src1), 0.0f, 0.0f }); + const uint32_t src0_type_size = ggml_type_size(src0->type); + const uint32_t src1_type_size = ggml_type_size(src1->type); + const uint32_t dst_type_size = ggml_type_size(dst->type); + + ggml_vk_op_f32(ctx, subctx, src0, src1, nullptr, dst, GGML_OP_ADD, { + (uint32_t)ggml_nelements(src0), + (uint32_t)src0->ne[0], (uint32_t)src0->ne[1], (uint32_t)src0->ne[2],(uint32_t)src0->ne[3], (uint32_t)src0->nb[0] / src0_type_size, (uint32_t)src0->nb[1] / src0_type_size, (uint32_t)src0->nb[2] / src0_type_size, (uint32_t)src0->nb[3] / src0_type_size, + (uint32_t)src1->ne[0], (uint32_t)src1->ne[1], (uint32_t)src1->ne[2],(uint32_t)src1->ne[3], (uint32_t)src1->nb[0] / src1_type_size, (uint32_t)src1->nb[1] / src1_type_size, (uint32_t)src1->nb[2] / src1_type_size, (uint32_t)src1->nb[3] / src1_type_size, + (uint32_t) dst->ne[0], (uint32_t) dst->ne[1], (uint32_t) dst->ne[2],(uint32_t) dst->ne[3], (uint32_t) dst->nb[0] / dst_type_size, (uint32_t) dst->nb[1] / dst_type_size, (uint32_t) dst->nb[2] / dst_type_size, (uint32_t) dst->nb[3] / dst_type_size, + 0, + 0.0f, 0.0f, + }); } static void ggml_vk_mul(ggml_backend_vk_context * ctx, vk_context * subctx, const ggml_tensor * src0, const ggml_tensor * src1, ggml_tensor * dst) { - ggml_vk_op_f32(ctx, subctx, src0, src1, dst, GGML_OP_MUL, { (uint32_t)ggml_nelements(src0), (uint32_t)ggml_nelements(src1), 0.0f, 0.0f }); + const uint32_t src0_type_size = ggml_type_size(src0->type); + const uint32_t src1_type_size = ggml_type_size(src1->type); + const uint32_t dst_type_size = ggml_type_size(dst->type); + + ggml_vk_op_f32(ctx, subctx, src0, src1, nullptr, dst, GGML_OP_MUL, { + (uint32_t)ggml_nelements(src0), + (uint32_t)src0->ne[0], (uint32_t)src0->ne[1], (uint32_t)src0->ne[2],(uint32_t)src0->ne[3], (uint32_t)src0->nb[0] / src0_type_size, (uint32_t)src0->nb[1] / src0_type_size, (uint32_t)src0->nb[2] / src0_type_size, (uint32_t)src0->nb[3] / src0_type_size, + (uint32_t)src1->ne[0], (uint32_t)src1->ne[1], (uint32_t)src1->ne[2],(uint32_t)src1->ne[3], (uint32_t)src1->nb[0] / src1_type_size, (uint32_t)src1->nb[1] / src1_type_size, (uint32_t)src1->nb[2] / src1_type_size, (uint32_t)src1->nb[3] / src1_type_size, + (uint32_t) dst->ne[0], (uint32_t) dst->ne[1], (uint32_t) dst->ne[2],(uint32_t) dst->ne[3], (uint32_t) dst->nb[0] / dst_type_size, (uint32_t) dst->nb[1] / dst_type_size, (uint32_t) dst->nb[2] / dst_type_size, (uint32_t) dst->nb[3] / dst_type_size, + 0, + 0.0f, 0.0f, + }); } static void ggml_vk_scale(ggml_backend_vk_context * ctx, vk_context * subctx, const ggml_tensor * src0, ggml_tensor * dst) { float * op_params = (float *)dst->op_params; - ggml_vk_op_f32(ctx, subctx, src0, nullptr, dst, GGML_OP_SCALE, { (uint32_t)ggml_nelements(src0), 0, op_params[0], 0.0f }); + const uint32_t src0_type_size = ggml_type_size(src0->type); + const uint32_t dst_type_size = ggml_type_size(dst->type); + + ggml_vk_op_f32(ctx, subctx, src0, nullptr, nullptr, dst, GGML_OP_SCALE, { + (uint32_t)ggml_nelements(src0), + (uint32_t)src0->ne[0], (uint32_t)src0->ne[1], (uint32_t)src0->ne[2], (uint32_t)src0->ne[3], (uint32_t)src0->nb[0] / src0_type_size, (uint32_t)src0->nb[1] / src0_type_size, (uint32_t)src0->nb[2] / src0_type_size, (uint32_t)src0->nb[3] / src0_type_size, + (uint32_t) dst->ne[0], (uint32_t) dst->ne[1], (uint32_t) dst->ne[2], (uint32_t) dst->ne[3], (uint32_t) dst->nb[0] / dst_type_size, (uint32_t) dst->nb[1] / dst_type_size, (uint32_t) dst->nb[2] / dst_type_size, (uint32_t) dst->nb[3] / dst_type_size, + 0, + op_params[0], 0.0f + }); } static void ggml_vk_sqr(ggml_backend_vk_context * ctx, vk_context * subctx, const ggml_tensor * src0, ggml_tensor * dst) { - ggml_vk_op_f32(ctx, subctx, src0, nullptr, dst, GGML_OP_SQR, { (uint32_t)ggml_nelements(src0), 0, 0.0f, 0.0f }); + const uint32_t src0_type_size = ggml_type_size(src0->type); + const uint32_t dst_type_size = ggml_type_size(dst->type); + + ggml_vk_op_f32(ctx, subctx, src0, nullptr, nullptr, dst, GGML_OP_SQR, { + (uint32_t)ggml_nelements(src0), + (uint32_t)src0->ne[0], (uint32_t)src0->ne[1], (uint32_t)src0->ne[2], (uint32_t)src0->ne[3], (uint32_t)src0->nb[0] / src0_type_size, (uint32_t)src0->nb[1] / src0_type_size, (uint32_t)src0->nb[2] / src0_type_size, (uint32_t)src0->nb[3] / src0_type_size, + (uint32_t) dst->ne[0], (uint32_t) dst->ne[1], (uint32_t) dst->ne[2], (uint32_t) dst->ne[3], (uint32_t) dst->nb[0] / dst_type_size, (uint32_t) dst->nb[1] / dst_type_size, (uint32_t) dst->nb[2] / dst_type_size, (uint32_t) dst->nb[3] / dst_type_size, + 0, + 0.0f, 0.0f, + }); } static void ggml_vk_clamp(ggml_backend_vk_context * ctx, vk_context * subctx, const ggml_tensor * src0, ggml_tensor * dst) { float * op_params = (float *)dst->op_params; - ggml_vk_op_f32(ctx, subctx, src0, nullptr, dst, GGML_OP_CLAMP, { (uint32_t)ggml_nelements(src0), 0, op_params[0], op_params[1] }); + const uint32_t src0_type_size = ggml_type_size(src0->type); + const uint32_t dst_type_size = ggml_type_size(dst->type); + + ggml_vk_op_f32(ctx, subctx, src0, nullptr, nullptr, dst, GGML_OP_CLAMP, { + (uint32_t)ggml_nelements(src0), + (uint32_t)src0->ne[0], (uint32_t)src0->ne[1], (uint32_t)src0->ne[2], (uint32_t)src0->ne[3], (uint32_t)src0->nb[0] / src0_type_size, (uint32_t)src0->nb[1] / src0_type_size, (uint32_t)src0->nb[2] / src0_type_size, (uint32_t)src0->nb[3] / src0_type_size, + (uint32_t) dst->ne[0], (uint32_t) dst->ne[1], (uint32_t) dst->ne[2], (uint32_t) dst->ne[3], (uint32_t) dst->nb[0] / dst_type_size, (uint32_t) dst->nb[1] / dst_type_size, (uint32_t) dst->nb[2] / dst_type_size, (uint32_t) dst->nb[3] / dst_type_size, + 0, + op_params[0], op_params[1], + }); } static void ggml_vk_cpy(ggml_backend_vk_context * ctx, vk_context * subctx, const ggml_tensor * src0, ggml_tensor * dst) { ggml_tensor_extra_gpu * extra = (ggml_tensor_extra_gpu *) dst->extra; - const int src0_type_size = ggml_type_size(src0->type); - const int dst_type_size = ggml_type_size(dst->type); - const uint32_t d_offset = (extra->offset % ctx->device.lock()->properties.limits.minStorageBufferOffsetAlignment) / dst_type_size; - ggml_vk_op_f32(ctx, subctx, src0, nullptr, dst, GGML_OP_CPY, { + const uint32_t src0_type_size = ggml_type_size(src0->type); + const uint32_t dst_type_size = ggml_type_size(dst->type); + const uint32_t d_offset = (extra->offset % ctx->device->properties.limits.minStorageBufferOffsetAlignment) / dst_type_size; + + ggml_vk_op_f32(ctx, subctx, src0, nullptr, nullptr, dst, GGML_OP_CPY, { (uint32_t)ggml_nelements(src0), - (uint32_t)src0->ne[0], (uint32_t)src0->ne[1], (uint32_t)src0->nb[0] / src0_type_size, (uint32_t)src0->nb[1] / src0_type_size, (uint32_t)src0->nb[2] / src0_type_size, - (uint32_t) dst->ne[0], (uint32_t) dst->ne[1], (uint32_t) dst->nb[0] / dst_type_size, (uint32_t) dst->nb[1] / dst_type_size, (uint32_t) dst->nb[2] / dst_type_size, + (uint32_t)src0->ne[0], (uint32_t)src0->ne[1], (uint32_t)src0->ne[2], (uint32_t)src0->ne[3], (uint32_t)src0->nb[0] / src0_type_size, (uint32_t)src0->nb[1] / src0_type_size, (uint32_t)src0->nb[2] / src0_type_size, (uint32_t)src0->nb[3] / src0_type_size, + (uint32_t) dst->ne[0], (uint32_t) dst->ne[1], (uint32_t) dst->ne[2], (uint32_t) dst->ne[3], (uint32_t) dst->nb[0] / dst_type_size, (uint32_t) dst->nb[1] / dst_type_size, (uint32_t) dst->nb[2] / dst_type_size, (uint32_t) dst->nb[3] / dst_type_size, d_offset, + 0.0f, 0.0f, }); } static void ggml_vk_norm(ggml_backend_vk_context * ctx, vk_context * subctx, const ggml_tensor * src0, ggml_tensor * dst) { - ggml_vk_op_f32(ctx, subctx, src0, nullptr, dst, GGML_OP_NORM, { (uint32_t)src0->ne[0], (uint32_t)src0->ne[1], 0.0f, 0.0f }); + ggml_vk_op_f32(ctx, subctx, src0, nullptr, nullptr, dst, GGML_OP_NORM, { (uint32_t)src0->ne[0], (uint32_t)src0->ne[1], 0.0f, 0.0f }); } static void ggml_vk_rms_norm(ggml_backend_vk_context * ctx, vk_context * subctx, const ggml_tensor * src0, ggml_tensor * dst) { float * op_params = (float *)dst->op_params; - ggml_vk_op_f32(ctx, subctx, src0, nullptr, dst, GGML_OP_RMS_NORM, { (uint32_t)src0->ne[0], (uint32_t)src0->ne[1], op_params[0], 0.0f }); + ggml_vk_op_f32(ctx, subctx, src0, nullptr, nullptr, dst, GGML_OP_RMS_NORM, { (uint32_t)src0->ne[0], (uint32_t)src0->ne[1], op_params[0], 0.0f }); } static void ggml_vk_unary(ggml_backend_vk_context * ctx, vk_context * subctx, const ggml_tensor * src0, ggml_tensor * dst) { - ggml_vk_op_f32(ctx, subctx, src0, nullptr, dst, GGML_OP_UNARY, { (uint32_t)ggml_nelements(src0), 0, 0.0f, 0.0f }); + ggml_vk_op_f32(ctx, subctx, src0, nullptr, nullptr, dst, GGML_OP_UNARY, { (uint32_t)ggml_nelements(src0), 0, 0.0f, 0.0f }); } static void ggml_vk_diag_mask_inf(ggml_backend_vk_context * ctx, vk_context * subctx, const ggml_tensor * src0, ggml_tensor * dst) { int32_t * op_params = (int32_t *)dst->op_params; - ggml_vk_op_f32(ctx, subctx, src0, nullptr, dst, GGML_OP_DIAG_MASK_INF, { (uint32_t)src0->ne[0], (uint32_t)src0->ne[1], op_params[0] }); + ggml_vk_op_f32(ctx, subctx, src0, nullptr, nullptr, dst, GGML_OP_DIAG_MASK_INF, { (uint32_t)src0->ne[0], (uint32_t)src0->ne[1], op_params[0] }); } -static void ggml_vk_soft_max(ggml_backend_vk_context * ctx, vk_context * subctx, const ggml_tensor * src0, const ggml_tensor * src1, ggml_tensor * dst) { +static void ggml_vk_soft_max(ggml_backend_vk_context * ctx, vk_context * subctx, const ggml_tensor * src0, const ggml_tensor * src1, const ggml_tensor * src2, ggml_tensor * dst) { float * op_params = (float *)dst->op_params; - ggml_vk_op_f32(ctx, subctx, src0, src1, dst, GGML_OP_SOFT_MAX, { (uint32_t)src0->ne[0], (uint32_t)(src1 != nullptr ? ggml_nrows(src1) : 0), op_params[0], 0.0f }); + + float scale = op_params[0]; + float max_bias = op_params[1]; + + const uint32_t ncols = (uint32_t)src0->ne[0]; + const uint32_t nrows_x = (uint32_t)ggml_nrows(src0); + const uint32_t nrows_y = (uint32_t)src0->ne[1]; + + const uint32_t n_head_kv = nrows_x/nrows_y; + const uint32_t n_head_log2 = 1u << (uint32_t) floorf(log2f((float) n_head_kv)); + + const float m0 = powf(2.0f, -(max_bias ) / n_head_log2); + const float m1 = powf(2.0f, -(max_bias / 2.0f) / n_head_log2); + + ggml_vk_op_f32(ctx, subctx, src0, src1, src2, dst, GGML_OP_SOFT_MAX, { + ncols, + nrows_y, + src2 != nullptr ? (uint32_t)1 : (uint32_t)0, + scale, max_bias, + m0, m1, + n_head_log2, + }); } static void ggml_vk_rope(ggml_backend_vk_context * ctx, vk_context * subctx, const ggml_tensor * src0, const ggml_tensor * src1, ggml_tensor * dst) { @@ -3357,12 +3570,17 @@ static void ggml_vk_rope(ggml_backend_vk_context * ctx, vk_context * subctx, con if (is_neox) { const float theta_scale = powf(freq_base, -2.0f/n_dims); const float inv_ndims = -1.0f / n_dims; - ggml_vk_op_f32(ctx, subctx, src0, src1, dst, GGML_OP_ROPE, { (uint32_t)src0->ne[0], (uint32_t)n_dims, freq_scale, (uint32_t)src0->ne[1], freq_base, ext_factor, attn_factor, corr_dims[0], corr_dims[1], 0.0f, 0.0f, theta_scale, inv_ndims }); + ggml_vk_op_f32(ctx, subctx, src0, src1, nullptr, dst, GGML_OP_ROPE, { (uint32_t)src0->ne[0], (uint32_t)n_dims, freq_scale, (uint32_t)src0->ne[1], freq_base, ext_factor, attn_factor, corr_dims[0], corr_dims[1], 0.0f, 0.0f, theta_scale, inv_ndims }); } else { - ggml_vk_op_f32(ctx, subctx, src0, src1, dst, GGML_OP_ROPE, { (uint32_t)src0->ne[0], freq_scale, (uint32_t)src0->ne[1], freq_base, ext_factor, attn_factor, corr_dims[0], corr_dims[1], 0.0f, 0.0f }); + ggml_vk_op_f32(ctx, subctx, src0, src1, nullptr, dst, GGML_OP_ROPE, { (uint32_t)src0->ne[0], freq_scale, (uint32_t)src0->ne[1], freq_base, ext_factor, attn_factor, corr_dims[0], corr_dims[1], 0.0f, 0.0f }); } } +static void ggml_vk_argsort(ggml_backend_vk_context * ctx, vk_context * subctx, const ggml_tensor * src0, ggml_tensor * dst) { + int32_t * op_params = (int32_t *)dst->op_params; + ggml_vk_op_f32(ctx, subctx, src0, nullptr, nullptr, dst, GGML_OP_ARGSORT, { (uint32_t)src0->ne[0], ((ggml_sort_order) op_params[0]) == GGML_SORT_ORDER_ASC }); +} + static void ggml_vk_nop(ggml_backend_vk_context * ctx, vk_context * subctx, const ggml_tensor * src0, ggml_tensor * dst) { // If backend is CPU, data from src0 has to be copied off the device if (dst->backend == GGML_BACKEND_TYPE_CPU) { @@ -3414,43 +3632,43 @@ static void ggml_vk_test_matmul(ggml_backend_vk_context * ctx, size_t m, size_t const size_t y_ne = k * n * batch; const size_t d_ne = m * n * batch; - vk_pipeline * p; + vk_pipeline p; std::string shname; if (shader_size == 0) { if (std::is_same() && std::is_same()) { - p = &ctx->pipeline_matmul_f32_aligned_s; + p = ctx->device->pipeline_matmul_f32->a_s; shname = "F32_ALIGNED_S"; } else if (std::is_same() && std::is_same()) { - p = &ctx->pipeline_matmul_f16_f32_aligned_s; + p = ctx->device->pipeline_matmul_f16_f32->a_s; shname = "F16_F32_ALIGNED_S"; } else if (std::is_same() && std::is_same()) { - p = &ctx->pipeline_matmul_f16_aligned_s; + p = ctx->device->pipeline_matmul_f16->a_s; shname = "F16_ALIGNED_S"; } else { GGML_ASSERT(false); } } else if (shader_size == 1) { if (std::is_same() && std::is_same()) { - p = &ctx->pipeline_matmul_f32_aligned_m; + p = ctx->device->pipeline_matmul_f32->a_m; shname = "F32_ALIGNED_M"; } else if (std::is_same() && std::is_same()) { - p = &ctx->pipeline_matmul_f16_f32_aligned_m; + p = ctx->device->pipeline_matmul_f16_f32->a_m; shname = "F16_F32_ALIGNED_M"; } else if (std::is_same() && std::is_same()) { - p = &ctx->pipeline_matmul_f16_aligned_m; + p = ctx->device->pipeline_matmul_f16->a_m; shname = "F16_ALIGNED_M"; } else { GGML_ASSERT(false); } } else if (shader_size == 2) { if (std::is_same() && std::is_same()) { - p = &ctx->pipeline_matmul_f32_aligned_l; + p = ctx->device->pipeline_matmul_f32->a_l; shname = "F32_ALIGNED_L"; } else if (std::is_same() && std::is_same()) { - p = &ctx->pipeline_matmul_f16_f32_aligned_l; + p = ctx->device->pipeline_matmul_f16_f32->a_l; shname = "F16_F32_ALIGNED_L"; } else if (std::is_same() && std::is_same()) { - p = &ctx->pipeline_matmul_f16_aligned_l; + p = ctx->device->pipeline_matmul_f16->a_l; shname = "F16_ALIGNED_L"; } else { GGML_ASSERT(false); @@ -3464,43 +3682,43 @@ static void ggml_vk_test_matmul(ggml_backend_vk_context * ctx, size_t m, size_t if (k != kpad) { if (shader_size == 0) { if (std::is_same() && std::is_same()) { - p = &ctx->pipeline_matmul_f32_s; + p = ctx->device->pipeline_matmul_f32->s; shname = "F32_S"; } else if (std::is_same() && std::is_same()) { - p = &ctx->pipeline_matmul_f16_f32_s; + p = ctx->device->pipeline_matmul_f16_f32->s; shname = "F16_F32_S"; } else if (std::is_same() && std::is_same()) { - p = &ctx->pipeline_matmul_f16_s; + p = ctx->device->pipeline_matmul_f16->s; shname = "F16_S"; } } else if (shader_size == 1) { if (std::is_same() && std::is_same()) { - p = &ctx->pipeline_matmul_f32_m; + p = ctx->device->pipeline_matmul_f32->m; shname = "F32_M"; } else if (std::is_same() && std::is_same()) { - p = &ctx->pipeline_matmul_f16_f32_m; + p = ctx->device->pipeline_matmul_f16_f32->m; shname = "F16_F32_M"; } else if (std::is_same() && std::is_same()) { - p = &ctx->pipeline_matmul_f16_m; + p = ctx->device->pipeline_matmul_f16->m; shname = "F16_M"; } } else if (shader_size == 2) { if (std::is_same() && std::is_same()) { - p = &ctx->pipeline_matmul_f32_l; + p = ctx->device->pipeline_matmul_f32->l; shname = "F32_L"; } else if (std::is_same() && std::is_same()) { - p = &ctx->pipeline_matmul_f16_f32_l; + p = ctx->device->pipeline_matmul_f16_f32->l; shname = "F16_F32_L"; } else if (std::is_same() && std::is_same()) { - p = &ctx->pipeline_matmul_f16_l; + p = ctx->device->pipeline_matmul_f16->l; shname = "F16_L"; } } } - ggml_pipeline_allocate_descriptor_sets(ctx, *p, num_it); + ggml_pipeline_allocate_descriptor_sets(ctx, p, num_it); if (split_k > 1) { - ggml_pipeline_allocate_descriptor_sets(ctx, ctx->pipeline_matmul_split_k_reduce, num_it); + ggml_pipeline_allocate_descriptor_sets(ctx, ctx->device->pipeline_matmul_split_k_reduce, num_it); if (ctx->prealloc_split_k == nullptr || ctx->prealloc_split_k->size < sizeof(float) * d_ne * split_k) { // Resize buffer @@ -3530,9 +3748,11 @@ static void ggml_vk_test_matmul(ggml_backend_vk_context * ctx, size_t m, size_t } for (size_t i = 0; i < y_ne; i++) { if (std::is_same()) { - y[i] = (rand() / (float)RAND_MAX) * 2.0f - 1.0f; + // y[i] = (rand() / (float)RAND_MAX) * 2.0f - 1.0f; + y[i] = (i % k == i / k) ? 1.0f : 0.0f; } else if (std::is_same()) { - y[i] = ggml_fp32_to_fp16((rand() / (float)RAND_MAX) * 2.0f - 1.0f); + // y[i] = ggml_fp32_to_fp16((rand() / (float)RAND_MAX) * 2.0f - 1.0f); + y[i] = ggml_fp32_to_fp16((i % k == i / k) ? 1.0f : 0.0f); } else { GGML_ASSERT(false); } @@ -3541,17 +3761,17 @@ static void ggml_vk_test_matmul(ggml_backend_vk_context * ctx, size_t m, size_t ggml_vk_buffer_write(ctx, d_X, 0, x, sizeof(X_TYPE) * k * m * batch); ggml_vk_buffer_write(ctx, d_Y, 0, y, sizeof(Y_TYPE) * k * n * batch); - vk_context * subctx = ggml_vk_create_context(ctx, ctx->device.lock()->compute_queue); + vk_context * subctx = ggml_vk_create_context(ctx, ctx->device->compute_queue); for (size_t i = 0; i < num_it; i++) { ggml_vk_ctx_begin(ctx, subctx); - ggml_vk_matmul(ctx, subctx, *p, ggml_vk_subbuffer(d_X), ggml_vk_subbuffer(d_Y), ggml_vk_subbuffer(d_D), ggml_vk_subbuffer(ctx->prealloc_split_k), m, n, k, k, k, m, split_k, batch, batch, batch, 1, 1, k*m, k*n, m*n); + ggml_vk_matmul(ctx, subctx, p, ggml_vk_subbuffer(d_X), ggml_vk_subbuffer(d_Y), ggml_vk_subbuffer(d_D), ggml_vk_subbuffer(ctx->prealloc_split_k), m, n, k, k, k, m, split_k, batch, batch, batch, 1, 1, k*m, k*n, m*n); ggml_vk_ctx_end(subctx); } auto begin = std::chrono::high_resolution_clock::now(); ggml_vk_submit(subctx, ctx->fence); - VK_CHECK(ctx->device.lock()->device.waitForFences({ ctx->fence }, true, UINT64_MAX), "ggml_vk_test_matmul waitForFences"); - ctx->device.lock()->device.resetFences({ ctx->fence }); + VK_CHECK(ctx->device->device.waitForFences({ ctx->fence }, true, UINT64_MAX), "ggml_vk_test_matmul waitForFences"); + ctx->device->device.resetFences({ ctx->fence }); auto end = std::chrono::high_resolution_clock::now(); double time = std::chrono::duration_cast(end-begin).count() / 1000.0; @@ -3630,6 +3850,8 @@ static void ggml_vk_test_matmul(ggml_backend_vk_context * ctx, size_t m, size_t std::cerr << "m = " << first_err_m << " n = " << first_err_n << " b = " << first_err_b << std::endl; std::cerr << "Actual result: " << std::endl << std::endl; ggml_vk_print_matrix_area(d, GGML_TYPE_F32, m, n, first_err_m, first_err_n, first_err_b); + std::cerr << std::endl; + ggml_vk_print_matrix_area(d, GGML_TYPE_F32, m, n, first_err_m, first_err_n + 15, first_err_b); std::cerr << "Expected result: " << std::endl << std::endl; ggml_vk_print_matrix_area(d_chk, GGML_TYPE_F32, m, n, first_err_m, first_err_n, first_err_b); @@ -3655,15 +3877,15 @@ static void ggml_vk_test_matmul(ggml_backend_vk_context * ctx, size_t m, size_t free(d_chk); - ggml_vk_queue_cleanup(ctx, ctx->device.lock()->transfer_queue); - ggml_vk_queue_cleanup(ctx, ctx->device.lock()->compute_queue); + ggml_vk_queue_cleanup(ctx, ctx->device->transfer_queue); + ggml_vk_queue_cleanup(ctx, ctx->device->compute_queue); ggml_vk_destroy_buffer(d_X); ggml_vk_destroy_buffer(d_Y); ggml_vk_destroy_buffer(d_D); - ggml_pipeline_cleanup(*p); - ggml_pipeline_cleanup(ctx->pipeline_matmul_split_k_reduce); + ggml_pipeline_cleanup(p); + ggml_pipeline_cleanup(ctx->device->pipeline_matmul_split_k_reduce); free(x); free(y); @@ -3736,7 +3958,7 @@ static void ggml_vk_test_h2d_nc(ggml_backend_vk_context * ctx, size_t ne0, size_ data[i] = (rand() / (float)RAND_MAX) * 2.0f - 1.0f; } - vk_context * subctx = ggml_vk_create_context(ctx, ctx->device.lock()->compute_queue); + vk_context * subctx = ggml_vk_create_context(ctx, ctx->device->compute_queue); ggml_vk_ctx_begin(ctx, subctx); vk_buffer buffer = ggml_vk_create_buffer_check(ctx, ggml_nbytes(tensor), vk::MemoryPropertyFlagBits::eDeviceLocal); @@ -3745,8 +3967,8 @@ static void ggml_vk_test_h2d_nc(ggml_backend_vk_context * ctx, size_t ne0, size_ ggml_vk_ctx_end(subctx); ggml_vk_submit(subctx, ctx->fence); - VK_CHECK(ctx->device.lock()->device.waitForFences({ ctx->fence }, true, UINT64_MAX), "ggml_vk_test_h2d_nc waitForFences"); - ctx->device.lock()->device.resetFences({ ctx->fence }); + VK_CHECK(ctx->device->device.waitForFences({ ctx->fence }, true, UINT64_MAX), "ggml_vk_test_h2d_nc waitForFences"); + ctx->device->device.resetFences({ ctx->fence }); ggml_vk_buffer_read(ctx, buffer, 0, result_data, ggml_nbytes(tensor)); @@ -3818,7 +4040,7 @@ static void ggml_vk_test_transfer(ggml_backend_vk_context * ctx, size_t ne, bool x[i] = rand() / (float)RAND_MAX; } - vk_context * subctx = ggml_vk_create_context(ctx, ctx->device.lock()->compute_queue); + vk_context * subctx = ggml_vk_create_context(ctx, ctx->device->compute_queue); ggml_vk_ctx_begin(ctx, subctx); auto begin = std::chrono::high_resolution_clock::now(); @@ -3832,8 +4054,8 @@ static void ggml_vk_test_transfer(ggml_backend_vk_context * ctx, size_t ne, bool ggml_vk_ctx_end(subctx); ggml_vk_submit(subctx, ctx->fence); - VK_CHECK(ctx->device.lock()->device.waitForFences({ ctx->fence }, true, UINT64_MAX), "ggml_vk_test_transfer waitForFences"); - ctx->device.lock()->device.resetFences({ ctx->fence }); + VK_CHECK(ctx->device->device.waitForFences({ ctx->fence }, true, UINT64_MAX), "ggml_vk_test_transfer waitForFences"); + ctx->device->device.resetFences({ ctx->fence }); auto end = std::chrono::high_resolution_clock::now(); @@ -3847,8 +4069,8 @@ static void ggml_vk_test_transfer(ggml_backend_vk_context * ctx, size_t ne, bool ggml_vk_ctx_end(subctx); ggml_vk_submit(subctx, ctx->fence); - VK_CHECK(ctx->device.lock()->device.waitForFences({ ctx->fence }, true, UINT64_MAX), "ggml_vk_test_transfer waitForFences"); - ctx->device.lock()->device.resetFences({ ctx->fence }); + VK_CHECK(ctx->device->device.waitForFences({ ctx->fence }, true, UINT64_MAX), "ggml_vk_test_transfer waitForFences"); + ctx->device->device.resetFences({ ctx->fence }); for (auto& cpy : subctx->out_memcpys) { memcpy(cpy.dst, cpy.src, cpy.n); @@ -3879,6 +4101,48 @@ static void ggml_vk_test_transfer(ggml_backend_vk_context * ctx, size_t ne, bool } } +static void ggml_vk_quantize_data(const float * from, void * to, size_t ne, ggml_type quant) { + std::vector hist_cur(1 << 4, 0); + + switch(quant) { + case GGML_TYPE_F32: + memcpy(to, from, sizeof(float) * ne); + break; + case GGML_TYPE_Q4_0: + ggml_quantize_q4_0(from, to, ne, ne, hist_cur.data()); + break; + case GGML_TYPE_Q4_1: + ggml_quantize_q4_1(from, to, ne, ne, hist_cur.data()); + break; + case GGML_TYPE_Q5_0: + ggml_quantize_q5_0(from, to, ne, ne, hist_cur.data()); + break; + case GGML_TYPE_Q5_1: + ggml_quantize_q5_1(from, to, ne, ne, hist_cur.data()); + break; + case GGML_TYPE_Q8_0: + ggml_quantize_q8_0(from, to, ne, ne, hist_cur.data()); + break; + case GGML_TYPE_Q2_K: + ggml_quantize_q2_K(from, to, ne, ne, hist_cur.data()); + break; + case GGML_TYPE_Q3_K: + ggml_quantize_q3_K(from, to, ne, ne, hist_cur.data()); + break; + case GGML_TYPE_Q4_K: + ggml_quantize_q4_K(from, to, ne, ne, hist_cur.data()); + break; + case GGML_TYPE_Q5_K: + ggml_quantize_q5_K(from, to, ne, ne, hist_cur.data()); + break; + case GGML_TYPE_Q6_K: + ggml_quantize_q6_K(from, to, ne, ne, hist_cur.data()); + break; + default: + GGML_ASSERT(false); + } +} + static void ggml_vk_test_dequant(ggml_backend_vk_context * ctx, size_t ne, ggml_type quant) { #ifdef GGML_VULKAN_DEBUG std::cerr << "ggml_vk_test_dequant(" << ne << ")" << std::endl; @@ -3896,72 +4160,59 @@ static void ggml_vk_test_dequant(ggml_backend_vk_context * ctx, size_t ne, ggml_ x[i] = rand() / (float)RAND_MAX; } - std::vector hist_cur(1 << 4, 0); + vk_pipeline p = ctx->device->pipeline_dequant[quant]; - vk_pipeline& p = ctx->pipeline_dequant[quant]; - - switch(quant) { - case GGML_TYPE_Q4_0: - ggml_quantize_q4_0(x, qx, ne, ne, hist_cur.data()); - break; - case GGML_TYPE_Q4_1: - ggml_quantize_q4_1(x, qx, ne, ne, hist_cur.data()); - break; - case GGML_TYPE_Q5_0: - ggml_quantize_q5_0(x, qx, ne, ne, hist_cur.data()); - break; - case GGML_TYPE_Q5_1: - ggml_quantize_q4_1(x, qx, ne, ne, hist_cur.data()); - break; - case GGML_TYPE_Q8_0: - ggml_quantize_q8_0(x, qx, ne, ne, hist_cur.data()); - break; - case GGML_TYPE_Q2_K: - ggml_quantize_q2_K(x, qx, ne, ne, hist_cur.data()); - break; - case GGML_TYPE_Q3_K: - ggml_quantize_q3_K(x, qx, ne, ne, hist_cur.data()); - break; - case GGML_TYPE_Q4_K: - ggml_quantize_q4_K(x, qx, ne, ne, hist_cur.data()); - break; - case GGML_TYPE_Q5_K: - ggml_quantize_q5_K(x, qx, ne, ne, hist_cur.data()); - break; - case GGML_TYPE_Q6_K: - ggml_quantize_q6_K(x, qx, ne, ne, hist_cur.data()); - break; - default: - GGML_ASSERT(false); - } + ggml_vk_quantize_data(x, qx, ne, quant); ggml_pipeline_allocate_descriptor_sets(ctx, p, 1); ggml_vk_buffer_write(ctx, qx_buf, 0, qx, qx_sz); - vk_context * subctx = ggml_vk_create_context(ctx, ctx->device.lock()->compute_queue); + vk_context * subctx = ggml_vk_create_context(ctx, ctx->device->compute_queue); ggml_vk_ctx_begin(ctx, subctx); - const std::vector pc = { 1, (int)ne, (int)ne, (int)ne }; + const std::vector pc = { 1, (uint32_t)ne, (uint32_t)ne, (uint32_t)ne, (uint32_t)ne }; ggml_vk_dispatch_pipeline(ctx, subctx, p, { { qx_buf, 0, qx_sz }, { x_buf, 0, x_sz_f16 } }, pc.size() * sizeof(int), pc.data(), { (uint32_t)ne, 1, 1}); ggml_vk_ctx_end(subctx); auto begin = std::chrono::high_resolution_clock::now(); ggml_vk_submit(subctx, ctx->fence); - VK_CHECK(ctx->device.lock()->device.waitForFences({ ctx->fence }, true, UINT64_MAX), "ggml_vk_test_dequant waitForFences"); - ctx->device.lock()->device.resetFences({ ctx->fence }); + VK_CHECK(ctx->device->device.waitForFences({ ctx->fence }, true, UINT64_MAX), "ggml_vk_test_dequant waitForFences"); + ctx->device->device.resetFences({ ctx->fence }); auto end = std::chrono::high_resolution_clock::now(); double ms_dequant = std::chrono::duration_cast(end-begin).count() / 1000.0; ggml_vk_buffer_read(ctx, x_buf, 0, x_chk, x_sz_f16); + int first_err = -1; + double avg_err = 0.0; for (size_t i = 0; i < ne; i++) { - avg_err += std::fabs(x[i] - ggml_fp16_to_fp32(x_chk[i])); + double error = std::fabs(x[i] - ggml_fp16_to_fp32(x_chk[i])); + avg_err += error; + + if (first_err < 0 && error > 0.05) { + first_err = i; + } } - std::cerr << "TEST DEQUANT " << ggml_type_name(quant) << " time=" << ms_dequant << "ms avg_err=" << avg_err / ne << std::endl; + avg_err /= ne; + + std::cerr << "TEST DEQUANT " << ggml_type_name(quant) << " time=" << ms_dequant << "ms avg_err=" << avg_err << std::endl; + + if (avg_err > 0.1) { + std::cerr << "first_error = " << first_err << std::endl; + std::cerr << "Actual result: " << std::endl << std::endl; + for (int i = std::max(0, first_err - 5); i < std::min((int)ne, first_err + 5); i++) { + std::cerr << ggml_fp16_to_fp32(x_chk[i]) << ", "; + } + std::cerr << std::endl << "Expected result: " << std::endl << std::endl; + for (int i = std::max(0, first_err - 5); i < std::min((int)ne, first_err + 5); i++) { + std::cerr << x[i] << ", "; + } + std::cerr << std::endl; + } ggml_vk_destroy_buffer(x_buf); ggml_vk_destroy_buffer(qx_buf); @@ -3970,6 +4221,190 @@ static void ggml_vk_test_dequant(ggml_backend_vk_context * ctx, size_t ne, ggml_ free(qx); free(x_chk); } + +static void ggml_vk_test_dequant_matmul(ggml_backend_vk_context * ctx, size_t m, size_t n, size_t k, size_t batch, size_t num_it, size_t split_k, size_t shader_size, ggml_type quant) { +#ifdef GGML_VULKAN_DEBUG + std::cerr << "ggml_vk_test_dequant_matmul(" << m << ", " << n << ", " << k << ", " << batch << ", " << num_it << ", " << split_k << ", " << ggml_type_name(quant) << ")" << std::endl; +#endif + const size_t x_ne = m * k * batch; + const size_t y_ne = k * n * batch; + const size_t d_ne = m * n * batch; + + vk_pipeline p; + std::string shname; + if (shader_size == 0) { + p = ctx->device->pipeline_dequant_mul_mat_mat[quant]->a_s; + shname = std::string(ggml_type_name(quant)) + "_ALIGNED_S"; + } else if (shader_size == 1) { + p = ctx->device->pipeline_dequant_mul_mat_mat[quant]->a_m; + shname = std::string(ggml_type_name(quant)) + "_ALIGNED_M"; + } else if (shader_size == 2) { + p = ctx->device->pipeline_dequant_mul_mat_mat[quant]->a_l; + shname = std::string(ggml_type_name(quant)) + "_ALIGNED_L"; + } else { + GGML_ASSERT(0); + } + + const size_t kpad = ggml_vk_align_size(k, p->align); + + if (k != kpad) { + if (shader_size == 0) { + p = ctx->device->pipeline_dequant_mul_mat_mat[quant]->s; + shname = std::string(ggml_type_name(quant)) + "_S"; + } else if (shader_size == 1) { + p = ctx->device->pipeline_dequant_mul_mat_mat[quant]->m; + shname = std::string(ggml_type_name(quant)) + "_M"; + } else if (shader_size == 2) { + p = ctx->device->pipeline_dequant_mul_mat_mat[quant]->l; + shname = std::string(ggml_type_name(quant)) + "_L"; + } else { + GGML_ASSERT(0); + } + } + + const size_t x_sz = sizeof(float) * x_ne; + const size_t y_sz = sizeof(float) * y_ne; + const size_t qx_sz = x_ne * ggml_type_size(quant)/ggml_blck_size(quant); + const size_t d_sz = sizeof(float) * d_ne; + float * x = (float *) malloc(x_sz); + float * y = (float *) malloc(y_sz); + void * qx = malloc(qx_sz); + vk_buffer qx_buf = ggml_vk_create_buffer_check(ctx, qx_sz, vk::MemoryPropertyFlagBits::eDeviceLocal); + vk_buffer y_buf = ggml_vk_create_buffer_check(ctx, y_sz, vk::MemoryPropertyFlagBits::eDeviceLocal); + vk_buffer d_buf = ggml_vk_create_buffer_check(ctx, d_sz, vk::MemoryPropertyFlagBits::eDeviceLocal); + float * d = (float *) malloc(d_sz); + float * d_chk = (float *) malloc(d_sz); + + for (size_t i = 0; i < x_ne; i++) { + x[i] = (rand() / (float)RAND_MAX) * 2.0f - 1.0f; + } + + ggml_vk_quantize_data(x, qx, x_ne, quant); + + for (size_t i = 0; i < y_ne; i++) { + // y[i] = rand() / (float)RAND_MAX; + y[i] = (i % k == i / k) ? 1.0f : 0.0f; + } + + ggml_pipeline_allocate_descriptor_sets(ctx, p, num_it); + if (split_k > 1) { + ggml_pipeline_allocate_descriptor_sets(ctx, ctx->device->pipeline_matmul_split_k_reduce, num_it); + + if (ctx->prealloc_split_k == nullptr || ctx->prealloc_split_k->size < sizeof(float) * d_ne * split_k) { + // Resize buffer + if (ctx->prealloc_split_k != nullptr) { + ggml_vk_destroy_buffer(ctx->prealloc_split_k); + } + ctx->prealloc_split_k = ggml_vk_create_buffer_check(ctx, sizeof(float) * d_ne * split_k, vk::MemoryPropertyFlagBits::eDeviceLocal); + } + } + + ggml_vk_buffer_write(ctx, qx_buf, 0, qx, qx_sz); + ggml_vk_buffer_write(ctx, y_buf, 0, y, y_sz); + + vk_context * subctx = ggml_vk_create_context(ctx, ctx->device->compute_queue); + for (size_t i = 0; i < num_it; i++) { + ggml_vk_ctx_begin(ctx, subctx); + ggml_vk_matmul(ctx, subctx, p, ggml_vk_subbuffer(qx_buf), ggml_vk_subbuffer(y_buf), ggml_vk_subbuffer(d_buf), ggml_vk_subbuffer(ctx->prealloc_split_k), m, n, k, k, k, m, split_k, batch, batch, batch, 1, 1, k*m, k*n, m*n); + ggml_vk_ctx_end(subctx); + } + + auto begin = std::chrono::high_resolution_clock::now(); + + ggml_vk_submit(subctx, ctx->fence); + VK_CHECK(ctx->device->device.waitForFences({ ctx->fence }, true, UINT64_MAX), "ggml_vk_test_dequant waitForFences"); + ctx->device->device.resetFences({ ctx->fence }); + + auto end = std::chrono::high_resolution_clock::now(); + + double time_ms = std::chrono::duration_cast(end-begin).count() / 1000.0; + ggml_vk_buffer_read(ctx, d_buf, 0, d, d_sz); + + ggml_init_params iparams = { + /*.mem_size =*/ 1024*1024*1024, + /*.mem_buffer =*/ NULL, + /*.no_alloc =*/ true, + }; + + ggml_context * ggml_ctx = ggml_init(iparams); + + ggml_tensor * src0_ggml = ggml_new_tensor_3d(ggml_ctx, quant, k, m, batch); + ggml_tensor * src1_ggml = ggml_new_tensor_3d(ggml_ctx, GGML_TYPE_F32, k, n, batch); + ggml_tensor * tensor_ggml = ggml_mul_mat(ggml_ctx, src0_ggml, src1_ggml); + + src0_ggml->data = qx; + src1_ggml->data = y; + tensor_ggml->data = d_chk; + + ctx->disable = true; + + ggml_cgraph * cgraph = ggml_new_graph(ggml_ctx); + ggml_build_forward_expand(cgraph, tensor_ggml); + + ggml_graph_compute_with_ctx(ggml_ctx, cgraph, 1); + + ctx->disable = false; + + ggml_free(ggml_ctx); + + double avg_err = 0.0; + int first_err_n = -1; + int first_err_m = -1; + int first_err_b = -1; + + for (size_t i = 0; i < m*n*batch; i++) { + double err = std::fabs(d[i] - d_chk[i]); + avg_err += err; + + if ((err > 0.05f || std::isnan(err)) && first_err_n == -1) { + first_err_b = i / (m * n); + first_err_n = (i % (m * n)) / m; + first_err_m = (i % (m * n)) % m; + } + } + + avg_err /= m * n; + + std::cerr << "TEST MMQ " << shname << " m=" << m << " n=" << n << " k=" << k << " batch=" << batch << " split_k=" << split_k << " matmul " << time_ms / num_it << "ms avg_err=" << avg_err << std::endl; + + if (avg_err > 0.1 || std::isnan(avg_err)) { + std::cerr << "m = " << first_err_m << " n = " << first_err_n << " b = " << first_err_b << std::endl; + std::cerr << "Actual result: " << std::endl << std::endl; + ggml_vk_print_matrix_area(d, GGML_TYPE_F32, m, n, first_err_m, first_err_n, first_err_b); + std::cerr << std::endl; + std::cerr << "Expected result: " << std::endl << std::endl; + ggml_vk_print_matrix_area(d_chk, GGML_TYPE_F32, m, n, first_err_m, first_err_n, first_err_b); + + if (split_k > 1) { + float * split_k_buf = (float *) malloc(sizeof(float) * d_ne * split_k); + ggml_vk_buffer_read(ctx, ctx->prealloc_split_k, 0, split_k_buf, sizeof(float) * d_ne * split_k); + + std::cerr << "d_buf0: " << std::endl << std::endl; + ggml_vk_print_matrix_area(split_k_buf, GGML_TYPE_F32, m, n, first_err_m, first_err_n, first_err_b); + + std::cerr << "d_buf1: " << std::endl << std::endl; + ggml_vk_print_matrix_area(split_k_buf + d_ne, GGML_TYPE_F32, m, n, first_err_m, first_err_n, first_err_b); + + std::cerr << "d_buf2: " << std::endl << std::endl; + ggml_vk_print_matrix_area(split_k_buf + 2 * d_ne, GGML_TYPE_F32, m, n, first_err_m, first_err_n, first_err_b); + + std::cerr << "d_buf3: " << std::endl << std::endl; + ggml_vk_print_matrix_area(split_k_buf + 3 * d_ne, GGML_TYPE_F32, m, n, first_err_m, first_err_n, first_err_b); + + free(split_k_buf); + } + } + + ggml_vk_destroy_buffer(qx_buf); + ggml_vk_destroy_buffer(y_buf); + ggml_vk_destroy_buffer(d_buf); + + free(x); + free(qx); + free(y); + free(d); + free(d_chk); +} #endif static ggml_tensor_extra_gpu * ggml_vk_tensor_create_extra(ggml_tensor * tensor) { @@ -3982,18 +4417,8 @@ static ggml_tensor_extra_gpu * ggml_vk_tensor_create_extra(ggml_tensor * tensor) return extra; } -static ggml_tensor * ggml_vk_find_last_use(const ggml_tensor * node, ggml_cgraph * graph) { - GGML_ASSERT(node != nullptr); - - for (int i = graph->n_nodes - 1; i >= 0; i--) { - for (int j = 0; j < GGML_MAX_SRC; j++) { - if (graph->nodes[i]->src[j] == node) { - return graph->nodes[i]; - } - } - } - - return nullptr; +static bool ggml_vk_cpu_assist_op(const ggml_tensor * node) { + return node->op == GGML_OP_MUL_MAT || node->op == GGML_OP_MUL_MAT_ID; } static void ggml_vk_preallocate_buffers_graph(ggml_backend_vk_context * ctx, ggml_tensor * node){ @@ -4004,7 +4429,7 @@ static void ggml_vk_preallocate_buffers_graph(ggml_backend_vk_context * ctx, ggm || (node->src[0] != nullptr && (node->src[0]->backend == GGML_BACKEND_TYPE_GPU || node->src[0]->backend == GGML_BACKEND_TYPE_GPU_SPLIT)) || (node->src[1] != nullptr && (node->src[1]->backend == GGML_BACKEND_TYPE_GPU)); - if (ctx->disable || (!any_on_device && node->op != GGML_OP_MUL_MAT)) { + if (ctx->disable || (!any_on_device && !ggml_vk_cpu_assist_op(node))) { return; } @@ -4035,7 +4460,7 @@ static void ggml_vk_preallocate_buffers_graph(ggml_backend_vk_context * ctx, ggm const bool f16_f32_kernel = use_src1 && src1->type == GGML_TYPE_F32; int split_k; - if (node->op == GGML_OP_MUL_MAT) { + if (node->op == GGML_OP_MUL_MAT || node->op == GGML_OP_MUL_MAT_ID) { split_k = ggml_vk_guess_split_k(ne01, ne11, ne10); } else { split_k = 1; @@ -4044,11 +4469,11 @@ static void ggml_vk_preallocate_buffers_graph(ggml_backend_vk_context * ctx, ggm const uint32_t y_ne = ne10 * ne11; const uint32_t d_ne = ne20 * ne21; - const uint64_t qx_sz = use_src0 ? ggml_vk_align_size(ggml_type_size(src0->type) * x_ne / ggml_blck_size(src0->type), ctx->device.lock()->properties.limits.minStorageBufferOffsetAlignment) * ne02 * ne03 : 0; - const uint64_t qy_sz = use_src1 ? ggml_vk_align_size(ggml_type_size(src1->type) * y_ne / ggml_blck_size(src1->type), ctx->device.lock()->properties.limits.minStorageBufferOffsetAlignment) * ne12 * ne13 : 0; - const uint64_t x_sz = use_src0 ? ggml_vk_align_size(sizeof(ggml_fp16_t) * x_ne, ctx->device.lock()->properties.limits.minStorageBufferOffsetAlignment) * ne02 * ne03 : 0; - const uint64_t y_sz = use_src1 ? ggml_vk_align_size(f16_f32_kernel ? sizeof(float) * y_ne : sizeof(ggml_fp16_t) * y_ne, ctx->device.lock()->properties.limits.minStorageBufferOffsetAlignment) * ne12 * ne13 : 0; - uint64_t d_sz = ggml_vk_align_size(ggml_type_size(node->type) * d_ne, ctx->device.lock()->properties.limits.minStorageBufferOffsetAlignment) * ne22 * ne23; + const uint64_t qx_sz = use_src0 ? ggml_vk_align_size(ggml_type_size(src0->type) * x_ne / ggml_blck_size(src0->type), ctx->device->properties.limits.minStorageBufferOffsetAlignment) * ne02 * ne03 : 0; + const uint64_t qy_sz = use_src1 ? ggml_vk_align_size(ggml_type_size(src1->type) * y_ne / ggml_blck_size(src1->type), ctx->device->properties.limits.minStorageBufferOffsetAlignment) * ne12 * ne13 : 0; + const uint64_t x_sz = use_src0 ? ggml_vk_align_size(sizeof(ggml_fp16_t) * x_ne, ctx->device->properties.limits.minStorageBufferOffsetAlignment) * ne02 * ne03 : 0; + const uint64_t y_sz = use_src1 ? ggml_vk_align_size(f16_f32_kernel ? sizeof(float) * y_ne : sizeof(ggml_fp16_t) * y_ne, ctx->device->properties.limits.minStorageBufferOffsetAlignment) * ne12 * ne13 : 0; + uint64_t d_sz = ggml_vk_align_size(ggml_type_size(node->type) * d_ne, ctx->device->properties.limits.minStorageBufferOffsetAlignment) * ne22 * ne23; const uint64_t split_k_size = split_k > 1 ? d_sz * 4 : 0; if (extra->buffer_gpu.expired()) { @@ -4076,6 +4501,7 @@ static void ggml_vk_preallocate_buffers_graph(ggml_backend_vk_context * ctx, ggm case GGML_OP_DIAG_MASK_INF: case GGML_OP_SOFT_MAX: case GGML_OP_ROPE: + case GGML_OP_ARGSORT: break; case GGML_OP_UNARY: switch (ggml_get_unary_op(node)) { @@ -4088,6 +4514,7 @@ static void ggml_vk_preallocate_buffers_graph(ggml_backend_vk_context * ctx, ggm } break; case GGML_OP_MUL_MAT: + case GGML_OP_MUL_MAT_ID: if (ctx->prealloc_size_qx < qx_sz) { ctx->prealloc_size_qx = qx_sz; } @@ -4121,21 +4548,66 @@ static void ggml_vk_preallocate_buffers(ggml_backend_vk_context * ctx) { #endif #if defined(GGML_VULKAN_RUN_TESTS) ctx->staging = ggml_vk_create_buffer_check(ctx, 100ul * 1024ul * 1024ul, - vk::MemoryPropertyFlagBits::eHostVisible | vk::MemoryPropertyFlagBits::eHostCoherent | vk::MemoryPropertyFlagBits::eHostCached + vk::MemoryPropertyFlagBits::eHostVisible | vk::MemoryPropertyFlagBits::eHostCoherent | vk::MemoryPropertyFlagBits::eHostCached, vk::MemoryPropertyFlagBits::eHostVisible | vk::MemoryPropertyFlagBits::eHostCoherent); ggml_vk_test_transfer(ctx, 8192 * 1000, false); ggml_vk_test_transfer(ctx, 8192 * 1000, true); - ggml_vk_test_dequant(ctx, 2560 * 7680, GGML_TYPE_Q4_0); - ggml_vk_test_dequant(ctx, 2560 * 7680, GGML_TYPE_Q4_1); - ggml_vk_test_dequant(ctx, 2560 * 7680, GGML_TYPE_Q5_0); - ggml_vk_test_dequant(ctx, 2560 * 7680, GGML_TYPE_Q5_1); - ggml_vk_test_dequant(ctx, 2560 * 7680, GGML_TYPE_Q8_0); - ggml_vk_test_dequant(ctx, 2560 * 7680, GGML_TYPE_Q2_K); - ggml_vk_test_dequant(ctx, 2560 * 7680, GGML_TYPE_Q3_K); - ggml_vk_test_dequant(ctx, 2560 * 7680, GGML_TYPE_Q4_K); - ggml_vk_test_dequant(ctx, 2560 * 7680, GGML_TYPE_Q5_K); - ggml_vk_test_dequant(ctx, 2560 * 7680, GGML_TYPE_Q6_K); + ggml_vk_test_dequant(ctx, 7680, GGML_TYPE_F32); + ggml_vk_test_dequant(ctx, 7680, GGML_TYPE_Q4_0); + ggml_vk_test_dequant(ctx, 7680, GGML_TYPE_Q4_1); + ggml_vk_test_dequant(ctx, 7680, GGML_TYPE_Q5_0); + ggml_vk_test_dequant(ctx, 7680, GGML_TYPE_Q5_1); + ggml_vk_test_dequant(ctx, 7680, GGML_TYPE_Q8_0); + ggml_vk_test_dequant(ctx, 7680, GGML_TYPE_Q2_K); + ggml_vk_test_dequant(ctx, 7680, GGML_TYPE_Q3_K); + ggml_vk_test_dequant(ctx, 7680, GGML_TYPE_Q4_K); + ggml_vk_test_dequant(ctx, 7680, GGML_TYPE_Q5_K); + ggml_vk_test_dequant(ctx, 7680, GGML_TYPE_Q6_K); + + ggml_vk_test_matmul(ctx, 128, 512, 512, 2, 100, 1, 0); + ggml_vk_test_matmul(ctx, 128, 512, 512, 2, 100, 1, 1); + ggml_vk_test_matmul(ctx, 128, 512, 512, 2, 100, 1, 2); + ggml_vk_test_matmul(ctx, 128, 512, 512, 2, 100, 4, 0); + ggml_vk_test_matmul(ctx, 128, 512, 512, 2, 100, 4, 1); + ggml_vk_test_matmul(ctx, 128, 512, 512, 2, 100, 4, 2); + + ggml_vk_test_dequant_matmul(ctx, 128, 512, 512, 2, 100, 1, 0, GGML_TYPE_Q4_0); + ggml_vk_test_dequant_matmul(ctx, 128, 512, 512, 2, 100, 1, 1, GGML_TYPE_Q4_0); + ggml_vk_test_dequant_matmul(ctx, 128, 512, 512, 2, 100, 1, 2, GGML_TYPE_Q4_0); + ggml_vk_test_dequant_matmul(ctx, 128, 512, 512, 2, 100, 4, 0, GGML_TYPE_Q4_0); + ggml_vk_test_dequant_matmul(ctx, 128, 512, 512, 2, 100, 4, 1, GGML_TYPE_Q4_0); + ggml_vk_test_dequant_matmul(ctx, 128, 512, 512, 2, 100, 4, 2, GGML_TYPE_Q4_0); + + ggml_vk_test_dequant_matmul(ctx, 128, 512, 512, 2, 100, 1, 0, GGML_TYPE_Q4_1); + ggml_vk_test_dequant_matmul(ctx, 128, 512, 512, 2, 100, 1, 1, GGML_TYPE_Q4_1); + ggml_vk_test_dequant_matmul(ctx, 128, 512, 512, 2, 100, 1, 2, GGML_TYPE_Q4_1); + ggml_vk_test_dequant_matmul(ctx, 128, 512, 512, 2, 100, 4, 0, GGML_TYPE_Q4_1); + ggml_vk_test_dequant_matmul(ctx, 128, 512, 512, 2, 100, 4, 1, GGML_TYPE_Q4_1); + ggml_vk_test_dequant_matmul(ctx, 128, 512, 512, 2, 100, 4, 2, GGML_TYPE_Q4_1); + + ggml_vk_test_dequant_matmul(ctx, 128, 512, 512, 2, 100, 1, 0, GGML_TYPE_Q5_0); + ggml_vk_test_dequant_matmul(ctx, 128, 512, 512, 2, 100, 1, 1, GGML_TYPE_Q5_0); + ggml_vk_test_dequant_matmul(ctx, 128, 512, 512, 2, 100, 1, 2, GGML_TYPE_Q5_0); + ggml_vk_test_dequant_matmul(ctx, 128, 512, 512, 2, 100, 4, 0, GGML_TYPE_Q5_0); + ggml_vk_test_dequant_matmul(ctx, 128, 512, 512, 2, 100, 4, 1, GGML_TYPE_Q5_0); + ggml_vk_test_dequant_matmul(ctx, 128, 512, 512, 2, 100, 4, 2, GGML_TYPE_Q5_0); + + ggml_vk_test_dequant_matmul(ctx, 128, 512, 512, 2, 100, 1, 0, GGML_TYPE_Q5_1); + ggml_vk_test_dequant_matmul(ctx, 128, 512, 512, 2, 100, 1, 1, GGML_TYPE_Q5_1); + ggml_vk_test_dequant_matmul(ctx, 128, 512, 512, 2, 100, 1, 2, GGML_TYPE_Q5_1); + ggml_vk_test_dequant_matmul(ctx, 128, 512, 512, 2, 100, 4, 0, GGML_TYPE_Q5_1); + ggml_vk_test_dequant_matmul(ctx, 128, 512, 512, 2, 100, 4, 1, GGML_TYPE_Q5_1); + ggml_vk_test_dequant_matmul(ctx, 128, 512, 512, 2, 100, 4, 2, GGML_TYPE_Q5_1); + + ggml_vk_test_dequant_matmul(ctx, 128, 512, 512, 2, 100, 1, 0, GGML_TYPE_Q8_0); + ggml_vk_test_dequant_matmul(ctx, 128, 512, 512, 2, 100, 1, 1, GGML_TYPE_Q8_0); + ggml_vk_test_dequant_matmul(ctx, 128, 512, 512, 2, 100, 1, 2, GGML_TYPE_Q8_0); + ggml_vk_test_dequant_matmul(ctx, 128, 512, 512, 2, 100, 4, 0, GGML_TYPE_Q8_0); + ggml_vk_test_dequant_matmul(ctx, 128, 512, 512, 2, 100, 4, 1, GGML_TYPE_Q8_0); + ggml_vk_test_dequant_matmul(ctx, 128, 512, 512, 2, 100, 4, 2, GGML_TYPE_Q8_0); + + std::cerr << std::endl; const std::vector vals { 8, 8, 8, @@ -4225,7 +4697,7 @@ static void ggml_vk_build_graph(ggml_backend_vk_context * ctx, ggml_tensor * nod || (node->src[0] != nullptr && (node->src[0]->backend == GGML_BACKEND_TYPE_GPU || node->src[0]->backend == GGML_BACKEND_TYPE_GPU_SPLIT)) || (node->src[1] != nullptr && node->src[1]->backend == GGML_BACKEND_TYPE_GPU); - if (ctx->disable || (!any_on_device && node->op != GGML_OP_MUL_MAT) || (node->op == GGML_OP_MUL_MAT && !any_on_device && !ggml_vk_can_mul_mat(node->src[0], node->src[1], node))) { + if (ctx->disable || (!any_on_device && !ggml_vk_cpu_assist_op(node)) || (ggml_vk_cpu_assist_op(node) && !any_on_device && !ggml_vk_can_mul_mat(node->src[0], node->src[1], node))) { return; } @@ -4237,6 +4709,7 @@ static void ggml_vk_build_graph(ggml_backend_vk_context * ctx, ggml_tensor * nod const ggml_tensor * src0 = node->src[0]; const ggml_tensor * src1 = node->src[1]; + const ggml_tensor * src2 = node->src[2]; ggml_tensor_extra_gpu * extra = (ggml_tensor_extra_gpu *) node->extra; @@ -4271,7 +4744,9 @@ static void ggml_vk_build_graph(ggml_backend_vk_context * ctx, ggml_tensor * nod case GGML_OP_SOFT_MAX: case GGML_OP_ROPE: case GGML_OP_MUL_MAT: + case GGML_OP_MUL_MAT_ID: case GGML_OP_NONE: + case GGML_OP_ARGSORT: break; default: if (any_on_device) { @@ -4282,7 +4757,7 @@ static void ggml_vk_build_graph(ggml_backend_vk_context * ctx, ggml_tensor * nod } if (ctx->compute_ctx == nullptr) { - ctx->compute_ctx = ggml_vk_create_context(ctx, ctx->device.lock()->compute_queue); + ctx->compute_ctx = ggml_vk_create_context(ctx, ctx->device->compute_queue); ggml_vk_ctx_begin(ctx, ctx->compute_ctx); } @@ -4353,16 +4828,25 @@ static void ggml_vk_build_graph(ggml_backend_vk_context * ctx, ggml_tensor * nod break; case GGML_OP_SOFT_MAX: - ggml_vk_soft_max(ctx, ctx->compute_ctx, src0, src1, node); + ggml_vk_soft_max(ctx, ctx->compute_ctx, src0, src1, src2, node); break; case GGML_OP_ROPE: ggml_vk_rope(ctx, ctx->compute_ctx, src0, src1, node); + break; + case GGML_OP_ARGSORT: + ggml_vk_argsort(ctx, ctx->compute_ctx, src0, node); break; case GGML_OP_MUL_MAT: ggml_vk_mul_mat(ctx, ctx->compute_ctx, src0, src1, node); + break; + case GGML_OP_MUL_MAT_ID: + //ggml_vk_mul_mat_id(ctx, ctx->compute_ctx, src0, src1, node); + std::cerr << "ggml_vulkan: GGML_OP_MUL_MAT_ID not implemented yet." << std::endl; + GGML_ASSERT(false); + break; default: return; @@ -4389,7 +4873,7 @@ static bool ggml_vk_compute_forward(ggml_backend_vk_context * ctx, ggml_compute_ || (tensor->src[0] != nullptr && (tensor->src[0]->backend == GGML_BACKEND_TYPE_GPU || tensor->src[0]->backend == GGML_BACKEND_TYPE_GPU_SPLIT)) || (tensor->src[1] != nullptr && tensor->src[1]->backend == GGML_BACKEND_TYPE_GPU); - if (ctx->disable || (!any_on_device && tensor->op != GGML_OP_MUL_MAT)) { + if (ctx->disable || (!any_on_device && !ggml_vk_cpu_assist_op(tensor))) { return false; } @@ -4415,6 +4899,7 @@ static bool ggml_vk_compute_forward(ggml_backend_vk_context * ctx, ggml_compute_ case GGML_OP_PERMUTE: case GGML_OP_TRANSPOSE: case GGML_OP_NONE: + case GGML_OP_ARGSORT: extra = (ggml_tensor_extra_gpu *) tensor->extra; break; @@ -4430,6 +4915,7 @@ static bool ggml_vk_compute_forward(ggml_backend_vk_context * ctx, ggml_compute_ } break; case GGML_OP_MUL_MAT: + case GGML_OP_MUL_MAT_ID: if (!any_on_device && !ggml_vk_can_mul_mat(tensor->src[0], tensor->src[1], tensor)) { return false; } @@ -4475,8 +4961,8 @@ static bool ggml_vk_compute_forward(ggml_backend_vk_context * ctx, ggml_compute_ } if (tensor == subctx.exit_tensor) { - VK_CHECK(ctx->device.lock()->device.waitForFences({ ctx->fence }, true, UINT64_MAX), "ggml_vk_compute_forward waitForFences"); - ctx->device.lock()->device.resetFences({ ctx->fence }); + VK_CHECK(ctx->device->device.waitForFences({ ctx->fence }, true, UINT64_MAX), "ggml_vk_compute_forward waitForFences"); + ctx->device->device.resetFences({ ctx->fence }); // Do staging buffer copies for (auto& cpy : subctx.out_memcpys) { @@ -4504,20 +4990,25 @@ static void ggml_vk_graph_cleanup(ggml_backend_vk_context * ctx) { } ctx->gc.temp_buffers.clear(); - for (auto * pipeline : ctx->gc.pipelines) { - ggml_pipeline_cleanup(*pipeline); + for (auto& pipeline : ctx->device->pipelines) { + if (pipeline.expired()) { + continue; + } + + vk_pipeline pl = pipeline.lock(); + ggml_pipeline_cleanup(pl); } - ggml_vk_queue_cleanup(ctx, ctx->device.lock()->compute_queue); - ggml_vk_queue_cleanup(ctx, ctx->device.lock()->transfer_queue); + ggml_vk_queue_cleanup(ctx, ctx->device->compute_queue); + ggml_vk_queue_cleanup(ctx, ctx->device->transfer_queue); for (size_t i = 0; i < ctx->gc.semaphores.size(); i++) { - ctx->device.lock()->device.destroySemaphore({ ctx->gc.semaphores[i].s }); + ctx->device->device.destroySemaphore({ ctx->gc.semaphores[i].s }); } ctx->gc.semaphores.clear(); for (size_t i = 0; i < ctx->gc.tl_semaphores.size(); i++) { - ctx->device.lock()->device.destroySemaphore({ ctx->gc.tl_semaphores[i].s }); + ctx->device->device.destroySemaphore({ ctx->gc.tl_semaphores[i].s }); } ctx->gc.tl_semaphores.clear(); ctx->semaphore_idx = 0; @@ -4525,7 +5016,7 @@ static void ggml_vk_graph_cleanup(ggml_backend_vk_context * ctx) { ctx->event_idx = 0; for (auto& event : ctx->gc.events) { - ctx->device.lock()->device.resetEvent(event); + ctx->device->device.resetEvent(event); } ctx->staging_offset = 0; @@ -4562,21 +5053,11 @@ static void ggml_vk_cleanup(ggml_backend_vk_context * ctx) { ctx->staging_size = 0; for (auto& event : ctx->gc.events) { - ctx->device.lock()->device.destroyEvent(event); + ctx->device->device.destroyEvent(event); } ctx->gc.events.clear(); - for (auto* pipeline : ctx->gc.pipelines) { - ggml_vk_destroy_pipeline(ctx, pipeline); - } - ctx->gc.pipelines.clear(); - - ctx->device.lock()->device.destroyFence(ctx->fence); - - ctx->device.lock()->device.destroyCommandPool(ctx->device.lock()->compute_queue.pool); - if (!ctx->device.lock()->single_queue) { - ctx->device.lock()->device.destroyCommandPool(ctx->device.lock()->transfer_queue.pool); - } + ctx->device->device.destroyFence(ctx->fence); } GGML_CALL static int ggml_vk_get_device_count() { @@ -4787,7 +5268,6 @@ GGML_CALL static void ggml_backend_vk_buffer_get_tensor(ggml_backend_buffer_t bu GGML_CALL static bool ggml_backend_vk_buffer_cpy_tensor(ggml_backend_buffer_t buffer, const ggml_tensor * src, ggml_tensor * dst) { if (ggml_backend_buffer_is_vk(src->buffer)) { - ggml_backend_vk_buffer_context * ctx = (ggml_backend_vk_buffer_context *)buffer->context; ggml_tensor_extra_gpu * src_extra = (ggml_tensor_extra_gpu *) src->extra; ggml_tensor_extra_gpu * dst_extra = (ggml_tensor_extra_gpu *) dst->extra; @@ -4799,6 +5279,8 @@ GGML_CALL static bool ggml_backend_vk_buffer_cpy_tensor(ggml_backend_buffer_t bu return true; } return false; + + UNUSED(buffer); } GGML_CALL static void ggml_backend_vk_buffer_clear(ggml_backend_buffer_t buffer, uint8_t value) { @@ -4845,12 +5327,12 @@ GGML_CALL static ggml_backend_buffer_t ggml_backend_vk_buffer_type_alloc_buffer( GGML_CALL static size_t ggml_backend_vk_buffer_type_get_alignment(ggml_backend_buffer_type_t buft) { ggml_backend_vk_buffer_type_context * ctx = (ggml_backend_vk_buffer_type_context *) buft->context; - return ctx->ctx->device.lock()->properties.limits.minStorageBufferOffsetAlignment; + return ctx->ctx->device->properties.limits.minStorageBufferOffsetAlignment; } GGML_CALL static size_t ggml_backend_vk_buffer_type_get_max_size(ggml_backend_buffer_type_t buft) { ggml_backend_vk_buffer_type_context * ctx = (ggml_backend_vk_buffer_type_context *) buft->context; - return ctx->ctx->device.lock()->max_memory_allocation_size; + return ctx->ctx->device->max_memory_allocation_size; } GGML_CALL static size_t ggml_backend_vk_buffer_type_get_alloc_size(ggml_backend_buffer_type_t buft, const ggml_tensor * tensor) { @@ -4936,7 +5418,7 @@ GGML_CALL static ggml_backend_buffer_t ggml_backend_vk_host_buffer_type_alloc_bu } GGML_CALL static size_t ggml_backend_vk_host_buffer_type_get_alignment(ggml_backend_buffer_type_t buft) { - return vk_instance.contexts[0].device.lock()->properties.limits.minMemoryMapAlignment; + return vk_instance.contexts[0].device->properties.limits.minMemoryMapAlignment; UNUSED(buft); } @@ -4981,8 +5463,7 @@ GGML_CALL static void ggml_backend_vk_free(ggml_backend_t backend) { ggml_vk_cleanup(ctx); - // Release device - vk_instance.devices[ctx->idx].reset(); + ctx->device.reset(); ctx->initialized = false; vk_instance.initialized[idx] = false; @@ -5011,7 +5492,7 @@ GGML_CALL static void ggml_backend_vk_set_tensor_async(ggml_backend_t backend, g if (ctx->transfer_ctx == nullptr) { // Initialize new transfer context - ctx->transfer_ctx = ggml_vk_create_context(ctx, ctx->device.lock()->transfer_queue); + ctx->transfer_ctx = ggml_vk_create_context(ctx, ctx->device->transfer_queue); ggml_vk_ctx_begin(ctx, ctx->transfer_ctx); } @@ -5032,7 +5513,7 @@ GGML_CALL static void ggml_backend_vk_get_tensor_async(ggml_backend_t backend, c if (ctx->transfer_ctx == nullptr) { // Initialize new transfer context - ctx->transfer_ctx = ggml_vk_create_context(ctx, ctx->device.lock()->transfer_queue); + ctx->transfer_ctx = ggml_vk_create_context(ctx, ctx->device->transfer_queue); ggml_vk_ctx_begin(ctx, ctx->transfer_ctx); } @@ -5052,7 +5533,7 @@ GGML_CALL static bool ggml_backend_vk_cpy_tensor_async(ggml_backend_t backend, c if (ctx->transfer_ctx == nullptr) { // Initialize new transfer context - ctx->transfer_ctx = ggml_vk_create_context(ctx, ctx->device.lock()->transfer_queue); + ctx->transfer_ctx = ggml_vk_create_context(ctx, ctx->device->transfer_queue); ggml_vk_ctx_begin(ctx, ctx->transfer_ctx); } @@ -5082,8 +5563,8 @@ GGML_CALL static void ggml_backend_vk_synchronize(ggml_backend_t backend) { } ggml_vk_submit(ctx->transfer_ctx, ctx->fence); - VK_CHECK(ctx->device.lock()->device.waitForFences({ ctx->fence }, true, UINT64_MAX), "ggml_backend_vk_synchronize waitForFences"); - ctx->device.lock()->device.resetFences({ ctx->fence }); + VK_CHECK(ctx->device->device.waitForFences({ ctx->fence }, true, UINT64_MAX), "ggml_backend_vk_synchronize waitForFences"); + ctx->device->device.resetFences({ ctx->fence }); for (auto& cpy : ctx->transfer_ctx->out_memcpys) { memcpy(cpy.dst, cpy.src, cpy.n); @@ -5153,6 +5634,7 @@ GGML_CALL static bool ggml_backend_vk_supports_op(ggml_backend_t backend, const } break; case GGML_OP_MUL_MAT: + case GGML_OP_MUL_MAT_ID: { struct ggml_tensor * a; struct ggml_tensor * b; @@ -5226,6 +5708,7 @@ GGML_CALL static bool ggml_backend_vk_supports_op(ggml_backend_t backend, const case GGML_OP_CONT: case GGML_OP_DIAG_MASK_INF: case GGML_OP_SOFT_MAX: + case GGML_OP_ARGSORT: return true; default: return false; @@ -5505,6 +5988,7 @@ static void ggml_vk_check_results_0(ggml_backend_vk_context * ctx, ggml_compute_ ggml_tensor * src0 = tensor->src[0]; ggml_tensor * src1 = tensor->src[1]; + ggml_tensor * src2 = tensor->src[2]; struct ggml_init_params iparams = { /*.mem_size =*/ 1024*1024*1024, @@ -5516,13 +6000,16 @@ static void ggml_vk_check_results_0(ggml_backend_vk_context * ctx, ggml_compute_ struct ggml_tensor * src0_clone = nullptr; struct ggml_tensor * src1_clone = nullptr; + struct ggml_tensor * src2_clone = nullptr; struct ggml_tensor * tensor_clone = nullptr; size_t src0_size; size_t src1_size; + size_t src2_size; void * src0_buffer; void * src1_buffer; + void * src2_buffer; if (src0 != nullptr) { src0_clone = ggml_dup_tensor(ggml_ctx, src0); @@ -5536,12 +6023,12 @@ static void ggml_vk_check_results_0(ggml_backend_vk_context * ctx, ggml_compute_ memcpy(src0_clone->nb, src0->nb, sizeof(size_t) * GGML_MAX_DIMS); } else if (src0->backend == GGML_BACKEND_TYPE_GPU) { ggml_tensor_extra_gpu * extra = (ggml_tensor_extra_gpu *) src0->extra; + vk_buffer buffer_gpu = extra->buffer_gpu.lock(); uint64_t offset = extra->offset; if (!ggml_is_contiguous(src0) && ggml_vk_dim01_contiguous(src0)) { for (int i3 = 0; i3 < src0->ne[3]; i3++) { for (int i2 = 0; i2 < src0->ne[2]; i2++) { const int idx = i3*src0->ne[2] + i2; - vk_buffer buffer_gpu = extra->buffer_gpu.lock(); ggml_vk_buffer_read(ctx, buffer_gpu, offset + idx * src0->nb[2], ((char *)src0_clone->data + idx * src0_clone->nb[2]), src0->ne[1] * src0->nb[1]); } } @@ -5552,7 +6039,6 @@ static void ggml_vk_check_results_0(ggml_backend_vk_context * ctx, ggml_compute_ src0_clone->nb[i] = src0_clone->nb[i - 1]*src0_clone->ne[i - 1]; } } else { - vk_buffer buffer_gpu = extra->buffer_gpu.lock(); if (offset + src0_size >= buffer_gpu->size) { src0_size = buffer_gpu->size - offset; } @@ -5581,12 +6067,12 @@ static void ggml_vk_check_results_0(ggml_backend_vk_context * ctx, ggml_compute_ memcpy(src1_clone->nb, src1->nb, sizeof(size_t) * GGML_MAX_DIMS); } else if (src1->backend == GGML_BACKEND_TYPE_GPU) { ggml_tensor_extra_gpu * extra = (ggml_tensor_extra_gpu *) src1->extra; + vk_buffer buffer_gpu = extra->buffer_gpu.lock(); uint64_t offset = extra->offset; if (!ggml_is_contiguous(src1) && ggml_vk_dim01_contiguous(src1)) { for (int i3 = 0; i3 < src1->ne[3]; i3++) { for (int i2 = 0; i2 < src1->ne[2]; i2++) { const int idx = i3*src1->ne[2] + i2; - vk_buffer buffer_gpu = extra->buffer_gpu.lock(); ggml_vk_buffer_read(ctx, buffer_gpu, offset + idx * src1->nb[2], ((char *)src1_clone->data + idx * src1_clone->nb[2]), src1->ne[1] * src1->nb[1]); } } @@ -5597,7 +6083,6 @@ static void ggml_vk_check_results_0(ggml_backend_vk_context * ctx, ggml_compute_ src1_clone->nb[i] = src1_clone->nb[i - 1]*src1_clone->ne[i - 1]; } } else { - vk_buffer buffer_gpu = extra->buffer_gpu.lock(); if (offset + src1_size >= buffer_gpu->size) { src1_size = buffer_gpu->size - offset; } @@ -5630,6 +6115,66 @@ static void ggml_vk_check_results_0(ggml_backend_vk_context * ctx, ggml_compute_ ggml_vk_check_tensor(std::string(ggml_op_name(tensor->op)) + "->src1", src1_clone); } + if (src2 != nullptr) { + src2_clone = ggml_dup_tensor(ggml_ctx, src2); + + src2_size = ggml_nbytes(src2); + + src2_buffer = malloc(src2_size); + src2_clone->data = src2_buffer; + if (src2->backend == GGML_BACKEND_TYPE_CPU) { + memcpy(src2_clone->data, src2->data, src2_size); + memcpy(src2_clone->nb, src2->nb, sizeof(size_t) * GGML_MAX_DIMS); + } else if (src2->backend == GGML_BACKEND_TYPE_GPU) { + ggml_tensor_extra_gpu * extra = (ggml_tensor_extra_gpu *) src2->extra; + vk_buffer buf = extra->buffer_gpu.lock(); + uint64_t offset = extra->offset; + if (!ggml_is_contiguous(src2) && ggml_vk_dim01_contiguous(src2)) { + for (int i3 = 0; i3 < src2->ne[3]; i3++) { + for (int i2 = 0; i2 < src2->ne[2]; i2++) { + const int idx = i3*src2->ne[2] + i2; + ggml_vk_buffer_read(ctx, buf, offset + idx * src2->nb[2], ((char *)src2_clone->data + idx * src2_clone->nb[2]), src2->ne[1] * src2->nb[1]); + } + } + + src2_clone->nb[0] = src2->nb[0]; + src2_clone->nb[1] = src2->nb[1]; + for (int i = 2; i < GGML_MAX_DIMS; i++) { + src2_clone->nb[i] = src2_clone->nb[i - 1]*src2_clone->ne[i - 1]; + } + } else { + if (offset + src2_size >= buf->size) { + src2_size = buf->size - offset; + } + ggml_vk_buffer_read(ctx, buf, offset, src2_clone->data, src2_size); + memcpy(src2_clone->nb, src2->nb, sizeof(size_t) * GGML_MAX_DIMS); + } + } else { + GGML_ASSERT(false); + } + + if (vk_output_tensor > 0 && vk_output_tensor == check_counter) { + ggml_vk_print_tensor(ctx, src2, "src2"); + std::cerr << "TENSOR CHECK: " << ggml_op_name(src2_clone->op) << " (check " << check_counter << ")" << std::endl; + std::cerr << "src2_clone=" << tensor << " src2_clone->backend: " << src2_clone->backend << " src2_clone->type: " << ggml_type_name(src2_clone->type) << " ne0=" << src2_clone->ne[0] << " nb0=" << src2_clone->nb[0] << " ne1=" << src2_clone->ne[1] << " nb1=" << src2_clone->nb[1] << " ne2=" << src2_clone->ne[2] << " nb2=" << src2_clone->nb[2] << " ne3=" << src2_clone->ne[3] << " nb3=" << src2_clone->nb[3] << std::endl; + if (src2->src[0] != nullptr) { + std::cerr << "src2->src[0]=" << src2->src[0] << " op=" << ggml_op_name(src2->src[0]->op) << " type=" << ggml_type_name(src2->src[0]->type) << " backend=" << src2->src[0]->backend << " ne0=" << src2->src[0]->ne[0] << " nb0=" << src2->src[0]->nb[0] << " ne1=" << src2->src[0]->ne[1] << " nb1=" << src2->src[0]->nb[1] << " ne2=" << src2->src[0]->ne[2] << " nb2=" << src2->src[0]->nb[2] << " ne3=" << src2->src[0]->ne[3] << " nb3=" << src2->src[0]->nb[3] << std::endl; + } + if (src2->src[1] != nullptr) { + std::cerr << "src2->src[1]=" << src2->src[1] << " op=" << ggml_op_name(src2->src[1]->op) << " type=" << ggml_type_name(src2->src[1]->type) << " backend=" << src2->src[1]->backend << " ne0=" << src2->src[1]->ne[0] << " nb0=" << src2->src[1]->nb[0] << " ne1=" << src2->src[1]->ne[1] << " nb1=" << src2->src[1]->nb[1] << " ne2=" << src2->src[1]->ne[2] << " nb2=" << src2->src[1]->nb[2] << " ne3=" << src2->src[1]->ne[3] << " nb3=" << src2->src[1]->nb[3] << std::endl; + } + std::cerr << std::endl << "Result:" << std::endl; + ggml_vk_print_tensor_area(src2_clone, src2_clone->data, 5, 5, 0, 0); + std::cerr << std::endl; + std::cerr << std::endl << "Result:" << std::endl; + ggml_vk_print_tensor_area(src2_clone, src2_clone->data, 5, 5, 1, 0); + std::cerr << std::endl; + std::vector done; + ggml_vk_print_graph_origin(src2_clone, done); + } + + ggml_vk_check_tensor(std::string(ggml_op_name(tensor->op)) + "->src2", src2_clone); + } if (tensor->op == GGML_OP_MUL_MAT) { tensor_clone = ggml_mul_mat(ggml_ctx, src0_clone, src1_clone); @@ -5648,7 +6193,11 @@ static void ggml_vk_check_results_0(ggml_backend_vk_context * ctx, ggml_compute_ } else if (tensor->op == GGML_OP_RMS_NORM) { tensor_clone = ggml_rms_norm(ggml_ctx, src0_clone, *(float *)tensor->op_params); } else if (tensor->op == GGML_OP_SOFT_MAX) { + if (src1 != nullptr) { + tensor_clone = ggml_soft_max_ext(ggml_ctx, src0_clone, src1_clone, src2_clone, ((float *)tensor->op_params)[0], ((float *)tensor->op_params)[1]); + } else { tensor_clone = ggml_soft_max(ggml_ctx, src0_clone); + } } else if (tensor->op == GGML_OP_DIAG_MASK_INF) { tensor_clone = ggml_diag_mask_inf(ggml_ctx, src0_clone, *(float *)tensor->op_params); } else if (tensor->op == GGML_OP_ROPE) { @@ -5728,6 +6277,9 @@ static void ggml_vk_check_results_0(ggml_backend_vk_context * ctx, ggml_compute_ if (src1 != nullptr) { free(src1_buffer); } + if (src2 != nullptr) { + free(src1_buffer); + } ggml_free(ggml_ctx); } diff --git a/ggml-vulkan.h b/ggml-vulkan.h index 9645126b4..e4317c3e0 100644 --- a/ggml-vulkan.h +++ b/ggml-vulkan.h @@ -10,6 +10,7 @@ extern "C" { #define GGML_VK_NAME "Vulkan" #define GGML_VK_MAX_DEVICES 16 +GGML_API void ggml_vk_instance_init(void); GGML_API void ggml_vk_init_cpu_assist(void); GGML_API void ggml_vk_preallocate_buffers_graph_cpu_assist(struct ggml_tensor * node); diff --git a/ggml_vk_generate_shaders.py b/ggml_vk_generate_shaders.py index b2e86e182..4a6f5e323 100644 --- a/ggml_vk_generate_shaders.py +++ b/ggml_vk_generate_shaders.py @@ -64,6 +64,7 @@ struct block_q5_0 #define A_TYPE block_q5_0 """ shader_q5_1_defines = """ +#extension GL_EXT_shader_explicit_arithmetic_types_int16 : require #define QUANT_K 32 #define QUANT_R 2 @@ -187,7 +188,8 @@ v = (v - 16.0f) * d; shader_q5_1_dequant_func = """ #define DEQUANT_FUNC const float d = float(data_a[ib].d); \ const float m = float(data_a[ib].m); \ -const ivec2 qh = ivec2(((data_a[ib].qh >> iqs) << 4) & 0x10, (data_a[ib].qh >> (iqs + 12)) & 0x10); \ +const uint uint_qh = data_a[ib].qh; \ +const ivec2 qh = ivec2(((uint_qh >> iqs) << 4) & 0x10, (uint_qh >> (iqs + 12)) & 0x10); \ const uint vui = uint(data_a[ib].qs[iqs]); \ vec2 v = vec2((vui & 0xF) | qh.x, (vui >> 4) | qh.y); \ v = v*d + m; @@ -206,12 +208,15 @@ mulmat_head = """#version 450 #extension GL_EXT_control_flow_attributes : enable #extension GL_EXT_shader_16bit_storage : require -#ifndef LOAD_VEC -#define LOAD_VEC 1 +#ifndef LOAD_VEC_A +#define LOAD_VEC_A 1 +#endif +#ifndef LOAD_VEC_B +#define LOAD_VEC_B 1 #endif """ -mulmat_body = """ +mulmat_body1 = """ layout(local_size_x_id = 0, local_size_y = 1, local_size_z = 1) in; layout (binding = 0) readonly buffer A {A_TYPE data_a[];}; @@ -240,7 +245,7 @@ layout (push_constant) uniform parameter layout (constant_id = 1) const uint BM = 64; layout (constant_id = 2) const uint BN = 64; -layout (constant_id = 3) const uint BK = 16; +layout (constant_id = 3) const uint BK = 16; // Assumed to be 32 if working with a quant layout (constant_id = 4) const uint WM = 32; layout (constant_id = 5) const uint WN = 32; layout (constant_id = 6) const uint WMITER = 2; @@ -277,16 +282,19 @@ void main() { const uint tiwr = tiw % (WSUBM / TM); const uint tiwc = tiw / (WSUBM / TM); - const uint loadr = gl_LocalInvocationID.x % (BK / LOAD_VEC); - const uint loadc = gl_LocalInvocationID.x / (BK / LOAD_VEC); + const uint loadr_a = gl_LocalInvocationID.x % (BK / LOAD_VEC_A); + const uint loadc_a = gl_LocalInvocationID.x / (BK / LOAD_VEC_A); + const uint loadr_b = gl_LocalInvocationID.x % (BK / LOAD_VEC_B); + const uint loadc_b = gl_LocalInvocationID.x / (BK / LOAD_VEC_B); - const uint loadstride = gl_WorkGroupSize.x * LOAD_VEC / BK; + const uint loadstride_a = gl_WorkGroupSize.x * LOAD_VEC_A / BK; + const uint loadstride_b = gl_WorkGroupSize.x * LOAD_VEC_B / BK; const uint start_k = ik * p.k_split; const uint end_k = min(p.K, (ik + 1) * p.k_split); - uint pos_a = (batch_idx_a * p.batch_stride_a + ir * BM * p.stride_a + start_k) / LOAD_VEC; - uint pos_b = (gl_GlobalInvocationID.z * p.batch_stride_b + ic * BN * p.stride_b + start_k) / LOAD_VEC; + uint pos_a = (batch_idx_a * p.batch_stride_a + ir * BM * p.stride_a + start_k) / LOAD_VEC_A; + uint pos_b = (gl_GlobalInvocationID.z * p.batch_stride_b + ic * BN * p.stride_b + start_k) / LOAD_VEC_B; float sums[WMITER * TM * WNITER * TN]; FLOAT_TYPE cache_a[WMITER * TM]; @@ -297,61 +305,145 @@ void main() { } [[unroll]] for (uint block = start_k; block < end_k; block += BK) { - [[unroll]] for (uint l = 0; l < BM; l += loadstride) { -#if LOAD_VEC == 8 - const uint idx = pos_a + (loadc + l) * p.stride_a / LOAD_VEC + loadr; - buf_a[(loadc + l) * (BK+1) + loadr * LOAD_VEC + 0] = FLOAT_TYPE(data_a[idx][0].x); - buf_a[(loadc + l) * (BK+1) + loadr * LOAD_VEC + 1] = FLOAT_TYPE(data_a[idx][0].y); - buf_a[(loadc + l) * (BK+1) + loadr * LOAD_VEC + 2] = FLOAT_TYPE(data_a[idx][0].z); - buf_a[(loadc + l) * (BK+1) + loadr * LOAD_VEC + 3] = FLOAT_TYPE(data_a[idx][0].w); - buf_a[(loadc + l) * (BK+1) + loadr * LOAD_VEC + 4] = FLOAT_TYPE(data_a[idx][1].x); - buf_a[(loadc + l) * (BK+1) + loadr * LOAD_VEC + 5] = FLOAT_TYPE(data_a[idx][1].y); - buf_a[(loadc + l) * (BK+1) + loadr * LOAD_VEC + 6] = FLOAT_TYPE(data_a[idx][1].z); - buf_a[(loadc + l) * (BK+1) + loadr * LOAD_VEC + 7] = FLOAT_TYPE(data_a[idx][1].w); -#elif LOAD_VEC == 4 - const uint idx = pos_a + (loadc + l) * p.stride_a / LOAD_VEC + loadr; - buf_a[(loadc + l) * (BK+1) + loadr * LOAD_VEC + 0] = FLOAT_TYPE(data_a[idx].x); - buf_a[(loadc + l) * (BK+1) + loadr * LOAD_VEC + 1] = FLOAT_TYPE(data_a[idx].y); - buf_a[(loadc + l) * (BK+1) + loadr * LOAD_VEC + 2] = FLOAT_TYPE(data_a[idx].z); - buf_a[(loadc + l) * (BK+1) + loadr * LOAD_VEC + 3] = FLOAT_TYPE(data_a[idx].w); + [[unroll]] for (uint l = 0; l < BM; l += loadstride_a) {""" + +mulmat_load_scalar = """ +#if LOAD_VEC_A == 8 + const uint idx = pos_a + (loadc_a + l) * p.stride_a / LOAD_VEC_A + loadr_a; + const uint buf_idx = (loadc_a + l) * (BK+1) + loadr_a * LOAD_VEC_A; + buf_a[buf_idx ] = FLOAT_TYPE(data_a[idx][0].x); + buf_a[buf_idx + 1] = FLOAT_TYPE(data_a[idx][0].y); + buf_a[buf_idx + 2] = FLOAT_TYPE(data_a[idx][0].z); + buf_a[buf_idx + 3] = FLOAT_TYPE(data_a[idx][0].w); + buf_a[buf_idx + 4] = FLOAT_TYPE(data_a[idx][1].x); + buf_a[buf_idx + 5] = FLOAT_TYPE(data_a[idx][1].y); + buf_a[buf_idx + 6] = FLOAT_TYPE(data_a[idx][1].z); + buf_a[buf_idx + 7] = FLOAT_TYPE(data_a[idx][1].w); +#elif LOAD_VEC_A == 4 + const uint idx = pos_a + (loadc_a + l) * p.stride_a / LOAD_VEC_A + loadr_a; + const uint buf_idx = (loadc_a + l) * (BK+1) + loadr_a * LOAD_VEC_A; + buf_a[buf_idx ] = FLOAT_TYPE(data_a[idx].x); + buf_a[buf_idx + 1] = FLOAT_TYPE(data_a[idx].y); + buf_a[buf_idx + 2] = FLOAT_TYPE(data_a[idx].z); + buf_a[buf_idx + 3] = FLOAT_TYPE(data_a[idx].w); #else - if (ir * BM + loadc + l < p.M && block + loadr < end_k) { - buf_a[(loadc + l) * (BK+1) + loadr] = FLOAT_TYPE(data_a[pos_a + (loadc + l) * p.stride_a + loadr]); + if (ir * BM + loadc_a + l < p.M && block + loadr_a < end_k) { + buf_a[(loadc_a + l) * (BK+1) + loadr_a] = FLOAT_TYPE(data_a[pos_a + (loadc_a + l) * p.stride_a + loadr_a]); } else { - buf_a[(loadc + l) * (BK+1) + loadr] = FLOAT_TYPE(0.0f); + buf_a[(loadc_a + l) * (BK+1) + loadr_a] = FLOAT_TYPE(0.0f); } #endif +""" + +mulmat_load_q4_0 = """ + const uint idx = pos_a + (loadc_a + l) * p.stride_a / LOAD_VEC_A + loadr_a; + const uint buf_idx = (loadc_a + l) * (BK+1) + loadr_a; + + const uint ib = idx / 16; + const uint iqs = idx & 0xF; + + const float d = float(data_a[ib].d); + const uint vui = uint(data_a[ib].qs[iqs]); + const vec2 v = (vec2(vui & 0xF, vui >> 4) - 8.0f) * d; + + buf_a[buf_idx ] = FLOAT_TYPE(v.x); + buf_a[buf_idx + 16] = FLOAT_TYPE(v.y);""" + +mulmat_load_q4_1 = """ + const uint idx = pos_a + (loadc_a + l) * p.stride_a / LOAD_VEC_A + loadr_a; + const uint buf_idx = (loadc_a + l) * (BK+1) + loadr_a; + + const uint ib = idx / 16; + const uint iqs = idx & 0xF; + + const float d = float(data_a[ib].d); + const float m = float(data_a[ib].m); + const uint vui = uint(data_a[ib].qs[iqs]); + const vec2 v = vec2(vui & 0xF, vui >> 4) * d + m; + + buf_a[buf_idx ] = FLOAT_TYPE(v.x); + buf_a[buf_idx + 16] = FLOAT_TYPE(v.y);""" + +mulmat_load_q5_0 = """ + const uint idx = pos_a + (loadc_a + l) * p.stride_a / LOAD_VEC_A + loadr_a; + const uint buf_idx = (loadc_a + l) * (BK+1) + loadr_a; + + const uint ib = idx / 16; + const uint iqs = idx & 0xF; + + const float d = float(data_a[ib].d); + const uint uint_qh = uint(data_a[ib].qh[1]) << 16 | data_a[ib].qh[0]; + const ivec2 qh = ivec2(((uint_qh >> iqs) << 4) & 0x10, (uint_qh >> (iqs + 12)) & 0x10); + const uint vui = uint(data_a[ib].qs[iqs]); + const vec2 v = (vec2((vui & 0xF) | qh.x, (vui >> 4) | qh.y) - 16.0f) * d; + + buf_a[buf_idx ] = FLOAT_TYPE(v.x); + buf_a[buf_idx + 16] = FLOAT_TYPE(v.y);""" + +mulmat_load_q5_1 = """ + const uint idx = pos_a + (loadc_a + l) * p.stride_a / LOAD_VEC_A + loadr_a; + const uint buf_idx = (loadc_a + l) * (BK+1) + loadr_a; + + const uint ib = idx / 16; + const uint iqs = idx & 0xF; + + const float d = float(data_a[ib].d); + const float m = float(data_a[ib].m); + const uint uint_qh = data_a[ib].qh; + const ivec2 qh = ivec2(((uint_qh >> iqs) << 4) & 0x10, (uint_qh >> (iqs + 12)) & 0x10); + const uint vui = uint(data_a[ib].qs[iqs]); + const vec2 v = vec2((vui & 0xF) | qh.x, (vui >> 4) | qh.y) * d + m; + + buf_a[buf_idx ] = FLOAT_TYPE(v.x); + buf_a[buf_idx + 16] = FLOAT_TYPE(v.y);""" + +mulmat_load_q8_0 = """ + const uint idx = pos_a + (loadc_a + l) * p.stride_a / LOAD_VEC_A + loadr_a; + const uint buf_idx = (loadc_a + l) * (BK+1) + loadr_a * LOAD_VEC_A; + + const uint ib = idx / 16; + const uint iqs = (idx & 0xF) * 2; + + const float d = float(data_a[ib].d); + const vec2 v = vec2(int(data_a[ib].qs[iqs]), int(data_a[ib].qs[iqs + 1])) * d; + + buf_a[buf_idx ] = FLOAT_TYPE(v.x); + buf_a[buf_idx + 1] = FLOAT_TYPE(v.y);""" + +mulmat_body2 = """ } - [[unroll]] for (uint l = 0; l < BN; l += loadstride) { -#if LOAD_VEC == 8 - const uint idx = pos_b + (loadc + l) * p.stride_b / LOAD_VEC + loadr; - buf_b[(loadc + l) * (BK+1) + loadr * LOAD_VEC + 0] = FLOAT_TYPE(data_b[idx][0].x); - buf_b[(loadc + l) * (BK+1) + loadr * LOAD_VEC + 1] = FLOAT_TYPE(data_b[idx][0].y); - buf_b[(loadc + l) * (BK+1) + loadr * LOAD_VEC + 2] = FLOAT_TYPE(data_b[idx][0].z); - buf_b[(loadc + l) * (BK+1) + loadr * LOAD_VEC + 3] = FLOAT_TYPE(data_b[idx][0].w); - buf_b[(loadc + l) * (BK+1) + loadr * LOAD_VEC + 4] = FLOAT_TYPE(data_b[idx][1].x); - buf_b[(loadc + l) * (BK+1) + loadr * LOAD_VEC + 5] = FLOAT_TYPE(data_b[idx][1].y); - buf_b[(loadc + l) * (BK+1) + loadr * LOAD_VEC + 6] = FLOAT_TYPE(data_b[idx][1].z); - buf_b[(loadc + l) * (BK+1) + loadr * LOAD_VEC + 7] = FLOAT_TYPE(data_b[idx][1].w); -#elif LOAD_VEC == 4 - const uint idx = pos_b + (loadc + l) * p.stride_b / LOAD_VEC + loadr; - buf_b[(loadc + l) * (BK+1) + loadr * LOAD_VEC + 0] = FLOAT_TYPE(data_b[idx].x); - buf_b[(loadc + l) * (BK+1) + loadr * LOAD_VEC + 1] = FLOAT_TYPE(data_b[idx].y); - buf_b[(loadc + l) * (BK+1) + loadr * LOAD_VEC + 2] = FLOAT_TYPE(data_b[idx].z); - buf_b[(loadc + l) * (BK+1) + loadr * LOAD_VEC + 3] = FLOAT_TYPE(data_b[idx].w); + [[unroll]] for (uint l = 0; l < BN; l += loadstride_b) { +#if LOAD_VEC_B == 8 + const uint idx = pos_b + (loadc_b + l) * p.stride_b / LOAD_VEC_B + loadr_b; + const uint buf_idx = (loadc_b + l) * (BK+1) + loadr_b * LOAD_VEC_B; + buf_b[buf_idx + 0] = FLOAT_TYPE(data_b[idx][0].x); + buf_b[buf_idx + 1] = FLOAT_TYPE(data_b[idx][0].y); + buf_b[buf_idx + 2] = FLOAT_TYPE(data_b[idx][0].z); + buf_b[buf_idx + 3] = FLOAT_TYPE(data_b[idx][0].w); + buf_b[buf_idx + 4] = FLOAT_TYPE(data_b[idx][1].x); + buf_b[buf_idx + 5] = FLOAT_TYPE(data_b[idx][1].y); + buf_b[buf_idx + 6] = FLOAT_TYPE(data_b[idx][1].z); + buf_b[buf_idx + 7] = FLOAT_TYPE(data_b[idx][1].w); +#elif LOAD_VEC_B == 4 + const uint idx = pos_b + (loadc_b + l) * p.stride_b / LOAD_VEC_B + loadr_b; + const uint buf_idx = (loadc_b + l) * (BK+1) + loadr_b * LOAD_VEC_B; + buf_b[buf_idx + 0] = FLOAT_TYPE(data_b[idx].x); + buf_b[buf_idx + 1] = FLOAT_TYPE(data_b[idx].y); + buf_b[buf_idx + 2] = FLOAT_TYPE(data_b[idx].z); + buf_b[buf_idx + 3] = FLOAT_TYPE(data_b[idx].w); #else - if (ic * BN + loadc + l < p.N && block + loadr < end_k) { - buf_b[(loadc + l) * (BK+1) + loadr] = FLOAT_TYPE(data_b[pos_b + (loadc + l) * p.stride_b + loadr]); + if (ic * BN + loadc_b + l < p.N && block + loadr_b < end_k) { + buf_b[(loadc_b + l) * (BK+1) + loadr_b] = FLOAT_TYPE(data_b[pos_b + (loadc_b + l) * p.stride_b + loadr_b]); } else { - buf_b[(loadc + l) * (BK+1) + loadr] = FLOAT_TYPE(0.0f); + buf_b[(loadc_b + l) * (BK+1) + loadr_b] = FLOAT_TYPE(0.0f); } #endif } barrier(); - pos_a += BK / LOAD_VEC; - pos_b += BK / LOAD_VEC; + pos_a += BK / LOAD_VEC_A; + pos_b += BK / LOAD_VEC_B; for (uint i = 0; i < BK; i++) { // Load from shared into cache @@ -438,45 +530,191 @@ dequant_head = """#version 450 #extension GL_EXT_control_flow_attributes : require #extension GL_EXT_shader_16bit_storage : require -""" - -dequant_body = """ -layout(local_size_x = 256, local_size_y = 1, local_size_z = 1) in; - -layout (binding = 0) readonly buffer A {A_TYPE data_a[];}; -layout (binding = 1) writeonly buffer D {D_TYPE data_b[];}; layout (push_constant) uniform parameter { - int M; - int K; - int stride_a; - int stride_b; + uint M; + uint K; + uint stride_a; + uint stride_b; + uint nel; } p; +""" + +dequant_f32_body = """ +layout(local_size_x = 256, local_size_y = 1, local_size_z = 1) in; + +layout (binding = 0) readonly buffer A {float data_a[];}; +layout (binding = 1) writeonly buffer D {D_TYPE data_b[];}; void main() { - const int i = int(gl_GlobalInvocationID.x); + const uint i = gl_GlobalInvocationID.x * 16; - // Transposed - const int row = i % (p.K / QUANT_K); - const int col = i / (p.K / QUANT_K); - - if (row * QUANT_K >= p.K || col >= p.M) { + if (i >= p.nel) { return; } - const int stride_a = p.stride_a / QUANT_K; + [[unroll]] for (uint l = 0; l < 16; l++) { + data_b[i + l] = D_TYPE(data_a[i + l]); + } +} +""" - const int ib = col * stride_a + row; +dequant_q4_0_body = """ +layout(local_size_x = 256, local_size_y = 1, local_size_z = 1) in; - const int y_offset = QUANT_R == 1 ? 1 : QUANT_K/2; - const int step = QUANT_R == 1 ? 2 : 1; +layout (binding = 0) readonly buffer A {block_q4_0 data_a[];}; +layout (binding = 1) writeonly buffer D {D_TYPE data_b[];}; - [[unroll]] for (int iqs = 0; iqs < QUANT_K/QUANT_R; iqs += step) { - DEQUANT_FUNC +void main() { + const uint i = gl_WorkGroupID.x * 4 + gl_LocalInvocationID.x / 64; - data_b[col * p.stride_b + row*QUANT_K + iqs + 0 ] = D_TYPE(v.x); - data_b[col * p.stride_b + row*QUANT_K + iqs + y_offset] = D_TYPE(v.y); + const uint tid = gl_LocalInvocationID.x % 64; + const uint il = tid/32; + const uint ir = tid%32; + const uint ib = 32*i + ir; + if (ib >= p.nel / 32) { + return; + } + + const uint b_idx = 1024*i + 32*ir + 8*il; + + const float d = float(data_a[ib].d); + const float dm = -8.0f * d; + + const uint q_idx = 8*il; + + [[unroll]] for (uint l = 0; l < 8; ++l) { + data_b[b_idx + l + 0] = D_TYPE(d * (data_a[ib].qs[q_idx + l] & 0xF) + dm); + data_b[b_idx + l + 16] = D_TYPE(d * (data_a[ib].qs[q_idx + l] >> 4) + dm); + } +} +""" + +dequant_q4_1_body = """ +layout(local_size_x = 256, local_size_y = 1, local_size_z = 1) in; + +layout (binding = 0) readonly buffer A {block_q4_1 data_a[];}; +layout (binding = 1) writeonly buffer D {D_TYPE data_b[];}; + +void main() { + const uint i = gl_WorkGroupID.x * 4 + gl_LocalInvocationID.x / 64; + + const uint tid = gl_LocalInvocationID.x % 64; + const uint il = tid/32; + const uint ir = tid%32; + const uint ib = 32*i + ir; + if (ib >= p.nel / 32) { + return; + } + + const uint b_idx = 1024*i + 32*ir + 8*il; + + const float d = float(data_a[ib].d); + const float m = float(data_a[ib].m); + + const uint q_idx = 8*il; + + [[unroll]] for (uint l = 0; l < 8; ++l) { + data_b[b_idx + l + 0] = D_TYPE(d * (data_a[ib].qs[q_idx + l] & 0xF) + m); + data_b[b_idx + l + 16] = D_TYPE(d * (data_a[ib].qs[q_idx + l] >> 4) + m); + } +} +""" + +dequant_q5_0_body = """ +layout(local_size_x = 256, local_size_y = 1, local_size_z = 1) in; + +layout (binding = 0) readonly buffer A {block_q5_0 data_a[];}; +layout (binding = 1) writeonly buffer D {D_TYPE data_b[];}; + +void main() { + const uint i = gl_WorkGroupID.x * 4 + gl_LocalInvocationID.x / 64; + + const uint tid = gl_LocalInvocationID.x % 64; + const uint il = tid/32; + const uint ir = tid%32; + const uint ib = 32*i + ir; + if (ib >= p.nel / 32) { + return; + } + + const uint b_idx = 1024*i + 32*ir + 8*il; + + const float d = float(data_a[ib].d); + const uint qh = uint(data_a[ib].qh[1]) << 16 | data_a[ib].qh[0]; + + const uint q_idx = 8*il; + + [[unroll]] for (uint l = 0; l < 8; ++l) { + const uint iqs = q_idx + l; + const uint vui = uint(data_a[ib].qs[iqs]); + data_b[b_idx + l + 0] = D_TYPE(d * (((vui & 0xF) | (((qh >> iqs) << 4) & 0x10)) - 16.0f)); + data_b[b_idx + l + 16] = D_TYPE(d * (((vui >> 4) | ((qh >> (iqs + 12)) & 0x10)) - 16.0f)); + } +} +""" + +dequant_q5_1_body = """ +layout(local_size_x = 256, local_size_y = 1, local_size_z = 1) in; + +layout (binding = 0) readonly buffer A {block_q5_1 data_a[];}; +layout (binding = 1) writeonly buffer D {D_TYPE data_b[];}; + +void main() { + const uint i = gl_WorkGroupID.x * 4 + gl_LocalInvocationID.x / 64; + + const uint tid = gl_LocalInvocationID.x % 64; + const uint il = tid/32; + const uint ir = tid%32; + const uint ib = 32*i + ir; + if (ib >= p.nel / 32) { + return; + } + + const uint b_idx = 1024*i + 32*ir + 8*il; + + const float d = float(data_a[ib].d); + const float m = float(data_a[ib].m); + const uint qh = data_a[ib].qh; + + const uint q_idx = 8*il; + + [[unroll]] for (uint l = 0; l < 8; ++l) { + const uint iqs = q_idx + l; + const uint vui = uint(data_a[ib].qs[iqs]); + data_b[b_idx + l + 0] = D_TYPE(d * (((vui & 0xF) | (((qh >> iqs) << 4) & 0x10))) + m); + data_b[b_idx + l + 16] = D_TYPE(d * (((vui >> 4) | ((qh >> (iqs + 12)) & 0x10))) + m); + } +} +""" + +dequant_q8_0_body = """ +layout(local_size_x = 256, local_size_y = 1, local_size_z = 1) in; + +layout (binding = 0) readonly buffer A {block_q8_0 data_a[];}; +layout (binding = 1) writeonly buffer D {D_TYPE data_b[];}; + +void main() { + const uint i = gl_WorkGroupID.x * 4 + gl_LocalInvocationID.x / 64; + + const uint tid = gl_LocalInvocationID.x % 64; + const uint il = tid/32; + const uint ir = tid%32; + const uint ib = 32*i + ir; + if (ib >= p.nel / 32) { + return; + } + + const uint b_idx = 1024*i + 32*ir + 16*il; + + const float d = float(data_a[ib].d); + + const uint q_idx = 16*il; + + [[unroll]] for (uint l = 0; l < 16; l += 2) { + data_b[b_idx + l ] = D_TYPE(d * data_a[ib].qs[q_idx + l ]); + data_b[b_idx + l + 1] = D_TYPE(d * data_a[ib].qs[q_idx + l + 1]); } } """ @@ -488,29 +726,21 @@ layout(local_size_x = 64, local_size_y = 1, local_size_z = 1) in; layout (binding = 0) readonly buffer A {A_TYPE data_a[];}; layout (binding = 1) writeonly buffer D {D_TYPE data_b[];}; -layout (push_constant) uniform parameter -{ - int M; - int K; - int stride_a; - int stride_b; -} p; - void main() { - [[unroll]] for (int wgy = 0; wgy < 256; wgy++) { - const int i = int(gl_WorkGroupID.x * 256 + wgy); + [[unroll]] for (uint wgy = 0; wgy < 256; wgy++) { + const uint i = gl_WorkGroupID.x * 256 + wgy; if (i >= p.M * p.K / QUANT_K) { return; } - const int tid = int(gl_LocalInvocationID.x); - const int ip = tid / 32; - const int il = tid - 32 * ip; - const int is = 8 * ip + il / 16; + const uint tid = gl_LocalInvocationID.x; + const uint ip = tid / 32; + const uint il = tid - 32 * ip; + const uint is = 8 * ip + il / 16; - const int y_idx = i * QUANT_K + 128 * ip + il; + const uint y_idx = i * QUANT_K + 128 * ip + il; - const int ql_idx = 32 * ip + il; + const uint ql_idx = 32 * ip + il; const uint8_t qs = data_a[i].qs[32 * ip + il]; FLOAT_TYPE dall = FLOAT_TYPE(data_a[i].d.x); @@ -528,31 +758,23 @@ layout(local_size_x = 64, local_size_y = 1, local_size_z = 1) in; layout (binding = 0) readonly buffer A {A_TYPE data_a[];}; layout (binding = 1) writeonly buffer D {D_TYPE data_b[];}; -layout (push_constant) uniform parameter -{ - int M; - int K; - int stride_a; - int stride_b; -} p; - void main() { - [[unroll]] for (int wgy = 0; wgy < 256; wgy++) { - const int i = int(gl_WorkGroupID.x * 256 + wgy); + [[unroll]] for (uint wgy = 0; wgy < 256; wgy++) { + const uint i = uint(gl_WorkGroupID.x * 256 + wgy); if (i >= p.M * p.K / QUANT_K) { return; } - const int r = int(gl_LocalInvocationID.x) / 4; - const int tid = r / 2; - const int is0 = r % 2; - const int l0 = 16 * is0 + 4 * (int(gl_LocalInvocationID.x) % 4); - const int n = tid / 4; - const int j = tid - 4*n; + const uint r = gl_LocalInvocationID.x / 4; + const uint tid = r / 2; + const uint is0 = r % 2; + const uint l0 = 16 * is0 + 4 * (gl_LocalInvocationID.x % 4); + const uint n = tid / 4; + const uint j = tid - 4*n; const uint8_t m = uint8_t(1 << (4*n + j)); - const int is = 8*n + 2*j + is0; - const int shift = 2*j; + const uint is = 8*n + 2*j + is0; + const uint shift = 2*j; const int8_t us = int8_t(is < 4 ? (data_a[i].scales[is-0] & 0xF) | (((data_a[i].scales[is+8] >> 0) & 3) << 4) : is < 8 ? (data_a[i].scales[is-0] & 0xF) | (((data_a[i].scales[is+4] >> 2) & 3) << 4) : @@ -561,10 +783,10 @@ void main() { const FLOAT_TYPE d_all = FLOAT_TYPE(data_a[i].d); const FLOAT_TYPE dl = d_all * FLOAT_TYPE(us - 32); - const int y_idx = i * QUANT_K + 128 * n + 32 * j; - const int qs_idx = 32*n; + const uint y_idx = i * QUANT_K + 128 * n + 32 * j; + const uint qs_idx = 32*n; - for (int l = l0; l < l0 + 4; ++l) { + for (uint l = l0; l < l0 + 4; ++l) { data_b[y_idx + l] = D_TYPE(dl * FLOAT_TYPE(int8_t((data_a[i].qs[qs_idx + l] >> shift) & 3) - (((data_a[i].hmask[l] & m) != 0) ? 0 : 4))); } } @@ -576,32 +798,24 @@ layout(local_size_x = 32, local_size_y = 1, local_size_z = 1) in; layout (binding = 0) readonly buffer A {A_TYPE data_a[];}; layout (binding = 1) writeonly buffer D {D_TYPE data_b[];}; -layout (push_constant) uniform parameter -{ - int M; - int K; - int stride_a; - int stride_b; -} p; - void main() { - [[unroll]] for (int wgy = 0; wgy < 256; wgy++) { - const int i = int(gl_WorkGroupID.x * 256 + wgy); + [[unroll]] for (uint wgy = 0; wgy < 256; wgy++) { + const uint i = gl_WorkGroupID.x * 256 + wgy; if (i >= p.M * p.K / QUANT_K) { return; } - const int tid = int(gl_LocalInvocationID.x); - const int il = tid / 8; - const int ir = tid % 8; - const int is = 2 * il; - const int n = 4; + const uint tid = gl_LocalInvocationID.x; + const uint il = tid / 8; + const uint ir = tid % 8; + const uint is = 2 * il; + const uint n = 4; const FLOAT_TYPE dall = FLOAT_TYPE(data_a[i].d.x); const FLOAT_TYPE dmin = FLOAT_TYPE(data_a[i].d.y); - const int y_idx = i * QUANT_K + 64 * il + n * ir; - const int qs_idx = 32*il + n * ir; + const uint y_idx = i * QUANT_K + 64 * il + n * ir; + const uint qs_idx = 32*il + n * ir; uint8_t sc; uint8_t m; @@ -625,7 +839,7 @@ void main() { const FLOAT_TYPE d2 = dall * sc; const FLOAT_TYPE m2 = dmin * m; - [[unroll]] for (int l = 0; l < n; ++l) { + [[unroll]] for (uint l = 0; l < n; ++l) { data_b[y_idx + l ] = D_TYPE(d1 * FLOAT_TYPE(data_a[i].qs[qs_idx + l] & 0xF) - m1); data_b[y_idx + l + 32] = D_TYPE(d2 * FLOAT_TYPE(data_a[i].qs[qs_idx + l] >> 4) - m2); } @@ -638,32 +852,24 @@ layout(local_size_x = 64, local_size_y = 1, local_size_z = 1) in; layout (binding = 0) readonly buffer A {A_TYPE data_a[];}; layout (binding = 1) writeonly buffer D {D_TYPE data_b[];}; -layout (push_constant) uniform parameter -{ - int M; - int K; - int stride_a; - int stride_b; -} p; - void main() { - [[unroll]] for (int wgy = 0; wgy < 256; wgy++) { - const int i = int(gl_WorkGroupID.x * 256 + wgy); + [[unroll]] for (uint wgy = 0; wgy < 256; wgy++) { + const uint i = gl_WorkGroupID.x * 256 + wgy; if (i >= p.M * p.K / QUANT_K) { return; } - const int tid = int(gl_LocalInvocationID.x); - const int il = tid / 16; - const int ir = tid % 16; - const int is = 2 * il; + const uint tid = gl_LocalInvocationID.x; + const uint il = tid / 16; + const uint ir = tid % 16; + const uint is = 2 * il; const FLOAT_TYPE dall = FLOAT_TYPE(data_a[i].d.x); const FLOAT_TYPE dmin = FLOAT_TYPE(data_a[i].d.y); - const int y_idx = i * QUANT_K + 64 * il + 2 * ir; - const int qs_idx = 32*il + 2 * ir; - const int qh_idx = 2 * ir; + const uint y_idx = i * QUANT_K + 64 * il + 2 * ir; + const uint qs_idx = 32*il + 2 * ir; + const uint qh_idx = 2 * ir; uint8_t sc; uint8_t m; @@ -702,28 +908,20 @@ layout(local_size_x = 64, local_size_y = 1, local_size_z = 1) in; layout (binding = 0) readonly buffer A {A_TYPE data_a[];}; layout (binding = 1) writeonly buffer D {D_TYPE data_b[];}; -layout (push_constant) uniform parameter -{ - int M; - int K; - int stride_a; - int stride_b; -} p; - void main() { - [[unroll]] for (int wgy = 0; wgy < 256; wgy++) { - const int i = int(gl_WorkGroupID.x * 256 + wgy); + [[unroll]] for (uint wgy = 0; wgy < 256; wgy++) { + const uint i = gl_WorkGroupID.x * 256 + wgy; if (i >= p.M * p.K / QUANT_K) { return; } - const int tid = int(gl_LocalInvocationID.x); - const int ip = tid / 32; - const int il = tid - 32 * ip; - const int is = 8 * ip + il / 16; + const uint tid = gl_LocalInvocationID.x; + const uint ip = tid / 32; + const uint il = tid - 32 * ip; + const uint is = 8 * ip + il / 16; - const int y_idx = i * QUANT_K + 128 * ip + il; + const uint y_idx = i * QUANT_K + 128 * ip + il; - const int ql_idx = 64 * ip + il; + const uint ql_idx = 64 * ip + il; const uint8_t qh = data_a[i].qh[32 * ip + il]; const FLOAT_TYPE d = FLOAT_TYPE(data_a[i].d); @@ -742,49 +940,50 @@ mul_mat_vec_head = """#version 450 #extension GL_EXT_control_flow_attributes : enable #extension GL_EXT_shader_16bit_storage : require #extension GL_EXT_shader_8bit_storage : require + +layout (push_constant) uniform parameter +{ + uint ncols; + uint b_offset; + uint d_offset; +} p; """ mul_mat_vec_body = """ -layout(local_size_x = QUANT_K, local_size_y = 1, local_size_z = 1) in; +layout(local_size_x_id = 0, local_size_y = 1, local_size_z = 1) in; layout (binding = 0) readonly buffer A {A_TYPE data_a[];}; layout (binding = 1) readonly buffer B {B_TYPE data_b[];}; layout (binding = 2) writeonly buffer D {D_TYPE dst[];}; -layout (push_constant) uniform parameter -{ - int ncols; - int b_offset; - int d_offset; -} p; +layout (constant_id = 0) const uint BLOCK_SIZE = 32; -shared FLOAT_TYPE tmp[QUANT_K]; +shared FLOAT_TYPE tmp[BLOCK_SIZE]; void main() { - const int block_size = int(gl_WorkGroupSize.x); - const int row = int(gl_WorkGroupID.x); - const int tid = int(gl_LocalInvocationID.x); + const uint row = gl_WorkGroupID.x; + const uint tid = gl_LocalInvocationID.x; - const int y_offset = QUANT_R == 1 ? 1 : QUANT_K/2; + const uint y_offset = QUANT_R == 1 ? 1 : QUANT_K/2; tmp[tid] = FLOAT_TYPE(0.0f); - [[unroll]] for (int i = 0; i < p.ncols/block_size; i += 2) { - const int col = i*block_size + 2*tid; - const int ib = (row*p.ncols + col)/QUANT_K; // block index - const int iqs = (col%QUANT_K)/QUANT_R; // quant index - const int iybs = col - col%QUANT_K; // y block start index + [[unroll]] for (uint i = 0; i < p.ncols/BLOCK_SIZE; i += 2) { + const uint col = i*BLOCK_SIZE + 2*tid; + const uint ib = (row*p.ncols + col)/QUANT_K; // block index + const uint iqs = (col%QUANT_K)/QUANT_R; // quant index + const uint iybs = col - col%QUANT_K; // y block start index DEQUANT_FUNC // matrix multiplication - tmp[tid] += FLOAT_TYPE(v.x) * FLOAT_TYPE(data_b[p.b_offset + iybs + iqs + 0]); - tmp[tid] += FLOAT_TYPE(v.y) * FLOAT_TYPE(data_b[p.b_offset + iybs + iqs + y_offset]); + tmp[tid] += FLOAT_TYPE(v.x) * FLOAT_TYPE(data_b[p.b_offset + iybs + iqs + 0]) + + FLOAT_TYPE(v.y) * FLOAT_TYPE(data_b[p.b_offset + iybs + iqs + y_offset]); } // sum up partial sums and write back result barrier(); - [[unroll]] for (int s = block_size/2; s > 0; s >>= 1) { + [[unroll]] for (uint s = BLOCK_SIZE/2; s > 0; s >>= 1) { if (tid < s) { tmp[tid] += tmp[tid + s]; } @@ -804,38 +1003,31 @@ layout (binding = 0) readonly buffer A {A_TYPE data_a[];}; layout (binding = 1) readonly buffer B {B_TYPE data_b[];}; layout (binding = 2) writeonly buffer D {D_TYPE dst[];}; -layout (push_constant) uniform parameter -{ - int ncols; - int b_offset; - int d_offset; -} p; - shared FLOAT_TYPE tmp[32]; void main() { - const int row = int(gl_WorkGroupID.x); + const uint row = gl_WorkGroupID.x; - const int num_blocks_per_row = p.ncols / QUANT_K; - const int ib0 = row*num_blocks_per_row; + const uint num_blocks_per_row = p.ncols / QUANT_K; + const uint ib0 = row*num_blocks_per_row; - const int tid = int(gl_LocalInvocationID.x)/K_QUANTS_PER_ITERATION; // 0...31 or 0...16 - const int ix = int(gl_LocalInvocationID.x)%K_QUANTS_PER_ITERATION; // 0 or 0, 1 + const uint tid = gl_LocalInvocationID.x/K_QUANTS_PER_ITERATION; // 0...31 or 0...16 + const uint ix = gl_LocalInvocationID.x%K_QUANTS_PER_ITERATION; // 0 or 0, 1 - const int step = 16/K_QUANTS_PER_ITERATION; // 16 or 8 + const uint step = 16/K_QUANTS_PER_ITERATION; // 16 or 8 - const int v_im = tid/step; // 0 or 1. 0 computes 0..., 1 computes 128... - const int v_in = tid - step*v_im; // 0...15 or 0...7 + const uint v_im = tid/step; // 0 or 1. 0 computes 0..., 1 computes 128... + const uint v_in = tid - step*v_im; // 0...15 or 0...7 - const int l0 = K_QUANTS_PER_ITERATION*v_in; // 0...15 - const int q_offset = 32*v_im + l0; - const int s_offset = 8*v_im; - const int y_offset = 128*v_im + l0; + const uint l0 = K_QUANTS_PER_ITERATION*v_in; // 0...15 + const uint q_offset = 32*v_im + l0; + const uint s_offset = 8*v_im; + const uint y_offset = 128*v_im + l0; tmp[16 * ix + tid] = FLOAT_TYPE(0.0); // partial sum for thread in warp - [[unroll]] for (int i = ix; i < num_blocks_per_row; i += K_QUANTS_PER_ITERATION) { - const int y_idx = i * QUANT_K + y_offset; + [[unroll]] for (uint i = ix; i < num_blocks_per_row; i += K_QUANTS_PER_ITERATION) { + const uint y_idx = i * QUANT_K + y_offset; const FLOAT_TYPE dall = FLOAT_TYPE(data_a[ib0 + i].d.x); const FLOAT_TYPE dmin = FLOAT_TYPE(data_a[ib0 + i].d.y); @@ -865,7 +1057,7 @@ void main() { // sum up partial sums and write back result barrier(); - [[unroll]] for (int s = 16; s > 0; s >>= 1) { + [[unroll]] for (uint s = 16; s > 0; s >>= 1) { if (tid < s) { tmp[tid] += tmp[tid + s]; } @@ -883,41 +1075,34 @@ layout (binding = 0) readonly buffer A {A_TYPE data_a[];}; layout (binding = 1) readonly buffer B {B_TYPE data_b[];}; layout (binding = 2) writeonly buffer D {D_TYPE dst[];}; -layout (push_constant) uniform parameter -{ - int ncols; - int b_offset; - int d_offset; -} p; - shared FLOAT_TYPE tmp[32]; void main() { - const int row = int(gl_WorkGroupID.x); + const uint row = gl_WorkGroupID.x; - const int num_blocks_per_row = p.ncols / QUANT_K; - const int ib0 = row*num_blocks_per_row; + const uint num_blocks_per_row = p.ncols / QUANT_K; + const uint ib0 = row*num_blocks_per_row; - const int tid = int(gl_LocalInvocationID.x)/K_QUANTS_PER_ITERATION; // 0...31 or 0...16 - const int ix = int(gl_LocalInvocationID.x)%K_QUANTS_PER_ITERATION; // 0 or 0, 1 + const uint tid = gl_LocalInvocationID.x/K_QUANTS_PER_ITERATION; // 0...31 or 0...16 + const uint ix = gl_LocalInvocationID.x%K_QUANTS_PER_ITERATION; // 0 or 0, 1 - const int step = 16/K_QUANTS_PER_ITERATION; // 16 or 8 + const uint step = 16/K_QUANTS_PER_ITERATION; // 16 or 8 - const int v_im = tid/step; // 0 or 1. 0 computes 0..., 1 computes 128... - const int v_in = tid - step*v_im; // 0...15 or 0...7 + const uint v_im = tid/step; // 0 or 1. 0 computes 0..., 1 computes 128... + const uint v_in = tid - step*v_im; // 0...15 or 0...7 const uint8_t m = uint8_t(1 << (4 * v_im)); - const int l0 = K_QUANTS_PER_ITERATION*v_in; // 0...15 - const int q_offset = 32*v_im + l0; - const int y_offset = 128*v_im + l0; + const uint l0 = K_QUANTS_PER_ITERATION*v_in; // 0...15 + const uint q_offset = 32*v_im + l0; + const uint y_offset = 128*v_im + l0; tmp[16 * ix + tid] = FLOAT_TYPE(0.0); // partial sum for thread in warp const uint s_shift = 4 * v_im; - [[unroll]] for (int i = ix; i < num_blocks_per_row; i += K_QUANTS_PER_ITERATION) { - const int y_idx = i * QUANT_K + y_offset; + [[unroll]] for (uint i = ix; i < num_blocks_per_row; i += K_QUANTS_PER_ITERATION) { + const uint y_idx = i * QUANT_K + y_offset; const FLOAT_TYPE d = FLOAT_TYPE(data_a[ib0 + i].d); @@ -937,7 +1122,7 @@ void main() { // sum up partial sums and write back result barrier(); - [[unroll]] for (int s = 16; s > 0; s >>= 1) { + [[unroll]] for (uint s = 16; s > 0; s >>= 1) { if (tid < s) { tmp[tid] += tmp[tid + s]; } @@ -955,42 +1140,35 @@ layout (binding = 0) readonly buffer A {A_TYPE data_a[];}; layout (binding = 1) readonly buffer B {B_TYPE data_b[];}; layout (binding = 2) writeonly buffer D {D_TYPE dst[];}; -layout (push_constant) uniform parameter -{ - int ncols; - int b_offset; - int d_offset; -} p; - shared FLOAT_TYPE tmp[32]; void main() { - const int row = int(gl_WorkGroupID.x); + const uint row = gl_WorkGroupID.x; - const int num_blocks_per_row = p.ncols / QUANT_K; - const int ib0 = row*num_blocks_per_row; + const uint num_blocks_per_row = p.ncols / QUANT_K; + const uint ib0 = row*num_blocks_per_row; - const int tid = int(gl_LocalInvocationID.x)/K_QUANTS_PER_ITERATION; // 0...31 or 0...16 - const int ix = int(gl_LocalInvocationID.x)%K_QUANTS_PER_ITERATION; // 0 or 0, 1 + const uint tid = gl_LocalInvocationID.x/K_QUANTS_PER_ITERATION; // 0...31 or 0...16 + const uint ix = gl_LocalInvocationID.x%K_QUANTS_PER_ITERATION; // 0 or 0, 1 - const int step = 8/K_QUANTS_PER_ITERATION; // 8 or 4 + const uint step = 8/K_QUANTS_PER_ITERATION; // 8 or 4 - const int il = tid/step; // 0...3 - const int ir = tid - step*il; // 0...7 or 0...3 - const int n = 2 * K_QUANTS_PER_ITERATION; // 2 or 4 + const uint il = tid/step; // 0...3 + const uint ir = tid - step*il; // 0...7 or 0...3 + const uint n = 2 * K_QUANTS_PER_ITERATION; // 2 or 4 - const int v_im = il / 2; // 0 or 1. 0 computes 0,32 + 128,160, 1 computes 64,96 + 192,224 - const int v_in = il % 2; + const uint v_im = il / 2; // 0 or 1. 0 computes 0,32 + 128,160, 1 computes 64,96 + 192,224 + const uint v_in = il % 2; - const int l0 = n * (2 * ir + v_in); // 0...15 - const int q_offset = 32*v_im + l0; - const int y_offset = 64*v_im + l0; + const uint l0 = n * (2 * ir + v_in); // 0...15 + const uint q_offset = 32*v_im + l0; + const uint y_offset = 64*v_im + l0; tmp[16 * ix + tid] = FLOAT_TYPE(0.0); // partial sum for thread in warp - [[unroll]] for (int i = ix; i < num_blocks_per_row; i += K_QUANTS_PER_ITERATION) { - const int y1_idx = i * QUANT_K + y_offset; - const int y2_idx = y1_idx + 128; + [[unroll]] for (uint i = ix; i < num_blocks_per_row; i += K_QUANTS_PER_ITERATION) { + const uint y1_idx = i * QUANT_K + y_offset; + const uint y2_idx = y1_idx + 128; const FLOAT_TYPE dall = FLOAT_TYPE(data_a[ib0 + i].d.x); const FLOAT_TYPE dmin = FLOAT_TYPE(data_a[ib0 + i].d.y); @@ -1058,7 +1236,7 @@ void main() { // sum up partial sums and write back result barrier(); - [[unroll]] for (int s = 16; s > 0; s >>= 1) { + [[unroll]] for (uint s = 16; s > 0; s >>= 1) { if (tid < s) { tmp[tid] += tmp[tid + s]; } @@ -1076,42 +1254,35 @@ layout (binding = 0) readonly buffer A {A_TYPE data_a[];}; layout (binding = 1) readonly buffer B {B_TYPE data_b[];}; layout (binding = 2) writeonly buffer D {D_TYPE dst[];}; -layout (push_constant) uniform parameter -{ - int ncols; - int b_offset; - int d_offset; -} p; - shared FLOAT_TYPE tmp[32]; void main() { - const int row = int(gl_WorkGroupID.x); + const uint row = gl_WorkGroupID.x; - const int num_blocks_per_row = p.ncols / QUANT_K; - const int ib0 = row*num_blocks_per_row; + const uint num_blocks_per_row = p.ncols / QUANT_K; + const uint ib0 = row*num_blocks_per_row; - const int tid = int(gl_LocalInvocationID.x)/2; // 0...31 or 0...16 - const int ix = int(gl_LocalInvocationID.x)%2; // 0 or 0, 1 + const uint tid = gl_LocalInvocationID.x/2; // 0...31 or 0...16 + const uint ix = gl_LocalInvocationID.x%2; // 0 or 0, 1 - const int il = tid/4; // 0...3 - const int ir = tid - 4*il; // 0...7 or 0...3 + const uint il = tid/4; // 0...3 + const uint ir = tid - 4*il; // 0...7 or 0...3 - const int v_im = il / 2; // 0 or 1. 0 computes 0,32 + 128,160, 1 computes 64,96 + 192,224 - const int v_in = il % 2; + const uint v_im = il / 2; // 0 or 1. 0 computes 0,32 + 128,160, 1 computes 64,96 + 192,224 + const uint v_in = il % 2; - const int l0 = 4*ir + 2*v_in; // 0...15 - const int q_offset = 32*v_im + l0; - const int y_offset = 64*v_im + l0; + const uint l0 = 4*ir + 2*v_in; // 0...15 + const uint q_offset = 32*v_im + l0; + const uint y_offset = 64*v_im + l0; const uint8_t hm1 = uint8_t(1 << (2*v_im)); const uint8_t hm2 = uint8_t(hm1 << 4); tmp[16 * ix + tid] = FLOAT_TYPE(0.0); // partial sum for thread in warp - [[unroll]] for (int i = ix; i < num_blocks_per_row; i += 2) { - const int y1_idx = i * QUANT_K + y_offset; - const int y2_idx = y1_idx + 128; + [[unroll]] for (uint i = ix; i < num_blocks_per_row; i += 2) { + const uint y1_idx = i * QUANT_K + y_offset; + const uint y2_idx = y1_idx + 128; const FLOAT_TYPE dall = FLOAT_TYPE(data_a[ib0 + i].d.x); const FLOAT_TYPE dmin = FLOAT_TYPE(data_a[ib0 + i].d.y); @@ -1175,7 +1346,7 @@ void main() { // sum up partial sums and write back result barrier(); - [[unroll]] for (int s = 16; s > 0; s >>= 1) { + [[unroll]] for (uint s = 16; s > 0; s >>= 1) { if (tid < s) { tmp[tid] += tmp[tid + s]; } @@ -1193,46 +1364,40 @@ layout (binding = 0) readonly buffer A {A_TYPE data_a[];}; layout (binding = 1) readonly buffer B {B_TYPE data_b[];}; layout (binding = 2) writeonly buffer D {D_TYPE dst[];}; -layout (push_constant) uniform parameter -{ - int ncols; - int b_offset; - int d_offset; -} p; - shared FLOAT_TYPE tmp[32]; void main() { - const int row = int(gl_WorkGroupID.x); + const uint block_size = gl_WorkGroupSize.x; + const uint row = gl_WorkGroupID.x; - const int num_blocks_per_row = p.ncols / QUANT_K; - const int ib0 = row*num_blocks_per_row; + const uint num_blocks_per_row = p.ncols / QUANT_K; + const uint ib0 = row*num_blocks_per_row; - const int tid = int(gl_LocalInvocationID.x)/K_QUANTS_PER_ITERATION; // 0...31 or 0...16 - const int ix = int(gl_LocalInvocationID.x)%K_QUANTS_PER_ITERATION; // 0 or 0, 1 + const uint tid = gl_LocalInvocationID.x/K_QUANTS_PER_ITERATION; // 0...31 or 0...16 + const uint ix = gl_LocalInvocationID.x%K_QUANTS_PER_ITERATION; // 0 or 0, 1 - const int step = 16/K_QUANTS_PER_ITERATION; // 16 or 8 + const uint step = 16/K_QUANTS_PER_ITERATION; // 16 or 8 - const int v_im = tid/step; // 0 or 1. 0 computes 0..., 1 computes 128... - const int v_in = tid - step*v_im; // 0...15 or 0...7 + const uint v_im = tid/step; // 0 or 1. 0 computes 0..., 1 computes 128... + const uint v_in = tid - step*v_im; // 0...15 or 0...7 #if K_QUANTS_PER_ITERATION == 1 - const int l0 = K_QUANTS_PER_ITERATION*v_in; // 0...15 - const int is = 0; + const uint l0 = v_in; // 0...15 + const uint is = 0; #else - const int l0 = 4 * v_in; // 0, 4, 8, ..., 28 - const int is = v_in / 4; + const uint l0 = 4 * v_in; // 0, 4, 8, ..., 28 + const uint is = v_in / 4; #endif - const int ql_offset = 64*v_im + l0; - const int qh_offset = 32*v_im + l0; - const int s_offset = 8*v_im + is; - const int y_offset = 128*v_im + l0; + const uint ql_offset = 64*v_im + l0; + const uint qh_offset = 32*v_im + l0; + const uint s_offset = 8*v_im + is; + const uint y_offset = 128*v_im + l0; tmp[16 * ix + tid] = FLOAT_TYPE(0.0); // partial sum for thread in warp - [[unroll]] for (int i = ix; i < num_blocks_per_row; i += K_QUANTS_PER_ITERATION) { - const int y_idx = i * QUANT_K + y_offset; + [[unroll]] for (uint i = ix; i < num_blocks_per_row; i += K_QUANTS_PER_ITERATION) { + const uint y_idx = i * QUANT_K + y_offset; const FLOAT_TYPE d = FLOAT_TYPE(data_a[ib0 + i].d); @@ -1260,7 +1425,7 @@ void main() { // sum up partial sums and write back result barrier(); - [[unroll]] for (int s = 16; s > 0; s >>= 1) { + [[unroll]] for (uint s = 16; s > 0; s >>= 1) { if (tid < s) { tmp[tid] += tmp[tid + s]; } @@ -1421,34 +1586,6 @@ void main() { } """ -# F16 to F32 -f32_to_f16_src = """#version 450 - -#extension GL_EXT_shader_16bit_storage : require - -layout(local_size_x = 64, local_size_y = 1, local_size_z = 1) in; - -layout (binding = 0) readonly buffer A {float data_a[];}; -layout (binding = 1) writeonly buffer D {float16_t data_b[];}; - -layout (push_constant) uniform parameter -{ - int M; - int K; - int stride_a; - int stride_b; -} p; - -void main() { - const int row = int(gl_GlobalInvocationID.x % p.K); - const int col = int(gl_GlobalInvocationID.x / p.K); - - if (row < p.K && col < p.M) { - data_b[col * p.stride_b + row] = float16_t(data_a[col * p.stride_a + row]); - } -} -""" - generic_head = """ #version 450 @@ -1463,107 +1600,17 @@ layout (push_constant) uniform parameter } p; """ -# MUL F32 -mul_body = """layout(local_size_x = 512, local_size_y = 1, local_size_z = 1) in; - -layout (binding = 0) readonly buffer X {A_TYPE data_a[];}; -layout (binding = 1) readonly buffer Y {B_TYPE data_b[];}; -layout (binding = 2) writeonly buffer D {D_TYPE data_d[];}; - -void main() { - const uint idx = gl_GlobalInvocationID.x; - - if (idx >= p.KX) { - return; - } - - data_d[idx] = D_TYPE(FLOAT_TYPE(data_a[idx]) * FLOAT_TYPE(data_b[idx % p.KY])); -} -""" - -# ADD -add_body = """ -layout(local_size_x = 512, local_size_y = 1, local_size_z = 1) in; - -layout (binding = 0) readonly buffer X {A_TYPE data_a[];}; -layout (binding = 1) readonly buffer Y {B_TYPE data_b[];}; -layout (binding = 2) writeonly buffer D {D_TYPE data_d[];}; - -void main() { - const uint idx = gl_GlobalInvocationID.x; - - if (idx >= p.KX) { - return; - } - - data_d[idx] = D_TYPE(FLOAT_TYPE(data_a[idx]) + FLOAT_TYPE(data_b[idx % p.KY])); -} -""" - -# SCALE -scale_body = """layout(local_size_x = 512, local_size_y = 1, local_size_z = 1) in; - -layout (binding = 0) readonly buffer X {A_TYPE data_a[];}; -layout (binding = 1) writeonly buffer D {D_TYPE data_d[];}; - -void main() { - const uint idx = gl_GlobalInvocationID.x; - - if (idx >= p.KX) { - return; - } - - data_d[idx] = D_TYPE(FLOAT_TYPE(data_a[idx]) * FLOAT_TYPE(p.param1)); -} -""" - -# SQR -sqr_body = """layout(local_size_x = 512, local_size_y = 1, local_size_z = 1) in; - -layout (binding = 0) readonly buffer X {A_TYPE data_a[];}; -layout (binding = 1) writeonly buffer D {D_TYPE data_d[];}; - -void main() { - const uint idx = gl_GlobalInvocationID.x; - - if (idx >= p.KX) { - return; - } - - const FLOAT_TYPE val = FLOAT_TYPE(data_a[idx]); - data_d[idx] = D_TYPE(val * val); -} -""" - -# CLAMP -clamp_body = """layout(local_size_x = 512, local_size_y = 1, local_size_z = 1) in; - -layout (binding = 0) readonly buffer X {A_TYPE data_a[];}; -layout (binding = 1) writeonly buffer D {D_TYPE data_d[];}; - -void main() { - const uint idx = gl_GlobalInvocationID.x; - - if (idx >= p.KX) { - return; - } - - const FLOAT_TYPE val = FLOAT_TYPE(data_a[idx]); - data_d[idx] = D_TYPE(val < p.param1 ? p.param1 : (val > p.param2 ? p.param2 : val)); -} -""" - -# CPY -cpy_src = """#version 450 +generic_unary_op_head = """#version 450 #extension GL_EXT_shader_16bit_storage : require layout (push_constant) uniform parameter { uint ne; - uint ne00; uint ne01; uint nb00; uint nb01; uint nb02; - uint ne10; uint ne11; uint nb10; uint nb11; uint nb12; + uint ne00; uint ne01; uint ne02; uint ne03; uint nb00; uint nb01; uint nb02; uint nb03; + uint ne10; uint ne11; uint ne12; uint ne13; uint nb10; uint nb11; uint nb12; uint nb13; uint d_offset; + float param1; float param2; } p; layout(local_size_x = 512, local_size_y = 1, local_size_z = 1) in; @@ -1571,28 +1618,129 @@ layout(local_size_x = 512, local_size_y = 1, local_size_z = 1) in; layout (binding = 0) readonly buffer A {A_TYPE data_a[];}; layout (binding = 1) writeonly buffer D {D_TYPE data_d[];}; +uint src0_idx(uint idx) { + const uint i03 = idx / (p.ne02*p.ne01*p.ne00); + const uint i03_offset = i03 * p.ne02*p.ne01*p.ne00; + const uint i02 = (idx - i03_offset) / (p.ne01*p.ne00); + const uint i02_offset = i02*p.ne01*p.ne00; + const uint i01 = (idx - i03_offset - i02_offset) / p.ne00; + const uint i00 = idx - i03_offset - i02_offset - i01*p.ne00; + return i03*p.nb03 + i02*p.nb02 + i01*p.nb01 + i00*p.nb00; +} + +uint dst_idx(uint idx) { + const uint i13 = idx / (p.ne12*p.ne11*p.ne10); + const uint i13_offset = i13 * p.ne12*p.ne11*p.ne10; + const uint i12 = (idx - i13_offset) / (p.ne11*p.ne10); + const uint i12_offset = i12*p.ne11*p.ne10; + const uint i11 = (idx - i13_offset - i12_offset) / p.ne10; + const uint i10 = idx - i13_offset - i12_offset - i11*p.ne10; + return i13*p.nb13 + i12*p.nb12 + i11*p.nb11 + i10*p.nb10; +} + void main() { if (gl_GlobalInvocationID.x >= p.ne) { return; } - - const uint i02 = gl_GlobalInvocationID.x / (p.ne00*p.ne01); - const uint i01 = (gl_GlobalInvocationID.x - i02*p.ne01*p.ne00) / p.ne00; - const uint i00 = gl_GlobalInvocationID.x - i02*p.ne01*p.ne00 - i01*p.ne00; - const uint a_idx = i00*p.nb00 + i01*p.nb01 + i02*p.nb02; - - const uint i12 = gl_GlobalInvocationID.x / (p.ne10*p.ne11); - const uint i11 = (gl_GlobalInvocationID.x - i12*p.ne11*p.ne10) / p.ne10; - const uint i10 = gl_GlobalInvocationID.x - i12*p.ne11*p.ne10 - i11*p.ne10; - const uint d_idx = i10*p.nb10 + i11*p.nb11 + i12*p.nb12; """ + +generic_binary_op_head = """#version 450 + +#extension GL_EXT_shader_16bit_storage : require + +layout (push_constant) uniform parameter +{ + uint ne; + uint ne00; uint ne01; uint ne02; uint ne03; uint nb00; uint nb01; uint nb02; uint nb03; + uint ne10; uint ne11; uint ne12; uint ne13; uint nb10; uint nb11; uint nb12; uint nb13; + uint ne20; uint ne21; uint ne22; uint ne23; uint nb20; uint nb21; uint nb22; uint nb23; + uint d_offset; + uint param1; uint param2; +} p; + +layout(local_size_x = 512, local_size_y = 1, local_size_z = 1) in; + +layout (binding = 0) readonly buffer A {A_TYPE data_a[];}; +layout (binding = 1) readonly buffer B {A_TYPE data_b[];}; +layout (binding = 2) writeonly buffer D {D_TYPE data_d[];}; + +uint src0_idx(uint idx) { + const uint i03 = idx / (p.ne02*p.ne01*p.ne00); + const uint i03_offset = i03 * p.ne02*p.ne01*p.ne00; + const uint i02 = (idx - i03_offset) / (p.ne01*p.ne00); + const uint i02_offset = i02*p.ne01*p.ne00; + const uint i01 = (idx - i03_offset - i02_offset) / p.ne00; + const uint i00 = idx - i03_offset - i02_offset - i01*p.ne00; + return i03*p.nb03 + i02*p.nb02 + i01*p.nb01 + i00*p.nb00; +} + +uint src1_idx(uint idx) { + const uint i03 = idx / (p.ne02*p.ne01*p.ne00); + const uint i03_offset = i03 * p.ne02*p.ne01*p.ne00; + const uint i02 = (idx - i03_offset) / (p.ne01*p.ne00); + const uint i02_offset = i02*p.ne01*p.ne00; + const uint i01 = (idx - i03_offset - i02_offset) / p.ne00; + const uint i00 = idx - i03_offset - i02_offset - i01*p.ne00; + + return (i03 % p.ne13)*p.nb13 + (i02 % p.ne12)*p.nb12 + (i01 % p.ne11)*p.nb11 + (i00 % p.ne10)*p.nb10; +} + +uint dst_idx(uint idx) { + const uint i23 = idx / (p.ne22*p.ne21*p.ne20); + const uint i23_offset = i23 * p.ne22*p.ne21*p.ne20; + const uint i22 = (idx - i23_offset) / (p.ne21*p.ne20); + const uint i22_offset = i22*p.ne21*p.ne20; + const uint i21 = (idx - i23_offset - i22_offset) / p.ne20; + const uint i20 = idx - i23_offset - i22_offset - i21*p.ne20; + return i23*p.nb23 + i22*p.nb22 + i21*p.nb21 + i20*p.nb20; +} + +void main() { + if (gl_GlobalInvocationID.x >= p.ne) { + return; + } +""" + +# MUL F32 +mul_body = """ + data_d[p.d_offset + dst_idx(gl_GlobalInvocationID.x)] = D_TYPE(FLOAT_TYPE(data_a[src0_idx(gl_GlobalInvocationID.x)]) * FLOAT_TYPE(data_b[src1_idx(gl_GlobalInvocationID.x)])); +} +""" + +# ADD +add_body = """ + data_d[p.d_offset + dst_idx(gl_GlobalInvocationID.x)] = D_TYPE(FLOAT_TYPE(data_a[src0_idx(gl_GlobalInvocationID.x)]) + FLOAT_TYPE(data_b[src1_idx(gl_GlobalInvocationID.x)])); +} +""" + +# SCALE +scale_body = """ + data_d[p.d_offset + dst_idx(gl_GlobalInvocationID.x)] = D_TYPE(FLOAT_TYPE(data_a[src0_idx(gl_GlobalInvocationID.x)]) + FLOAT_TYPE(p.param1)); +} +""" + +# SQR +sqr_body = """ + const FLOAT_TYPE val = FLOAT_TYPE(data_a[src0_idx(gl_GlobalInvocationID.x)]); + data_d[p.d_offset + dst_idx(gl_GlobalInvocationID.x)] = D_TYPE(val * val); +} +""" + +# CLAMP +clamp_body = """ + const FLOAT_TYPE val = FLOAT_TYPE(data_a[src0_idx(gl_GlobalInvocationID.x)]); + data_d[p.d_offset + dst_idx(gl_GlobalInvocationID.x)] = D_TYPE(val < p.param1 ? p.param1 : (val > p.param2 ? p.param2 : val)); +} +""" + +# CPY cpy_end = """ - data_d[p.d_offset + d_idx] = D_TYPE(data_a[a_idx]); + data_d[p.d_offset + dst_idx(gl_GlobalInvocationID.x)] = D_TYPE(data_a[src0_idx(gl_GlobalInvocationID.x)]); } """ # Causes an optimization error otherwise cpy_f16_f16_end = """ - data_d[p.d_offset + d_idx] = data_a[a_idx]; + data_d[p.d_offset + dst_idx(gl_GlobalInvocationID.x)] = data_a[src0_idx(gl_GlobalInvocationID.x)]; } """ @@ -1815,6 +1963,24 @@ void main() { """ # SOFT_MAX +soft_max_head = """ +#version 450 + +#extension GL_EXT_shader_16bit_storage : require + +layout (push_constant) uniform parameter +{ + uint KX; + uint KY; + uint KZ; + float scale; + float max_bias; + float m0; + float m1; + uint n_head_log2; +} p; +""" + soft_max_body = """ #extension GL_EXT_control_flow_attributes : enable #define BLOCK_SIZE 512 @@ -1823,7 +1989,8 @@ layout(local_size_x = BLOCK_SIZE, local_size_y = 1, local_size_z = 1) in; layout (binding = 0) readonly buffer X {A_TYPE data_a[];}; layout (binding = 1) readonly buffer Y {B_TYPE data_b[];}; -layout (binding = 2) buffer D {D_TYPE data_d[];}; +layout (binding = 2) readonly buffer Z {C_TYPE data_c[];}; +layout (binding = 3) buffer D {D_TYPE data_d[];}; shared FLOAT_TYPE vals[BLOCK_SIZE]; @@ -1832,11 +1999,23 @@ void main() { const uint rowx = gl_WorkGroupID.x; const uint rowy = rowx % p.KY; + float slope = 0.0f; + + // ALiBi + if (p.max_bias > 0.0f) { + const uint h = rowx/p.KY; // head index + + const float base = h < p.n_head_log2 ? p.m0 : p.m1; + const uint exp = h < p.n_head_log2 ? h + 1 : 2*(h - p.n_head_log2) + 1; + + slope = pow(base, exp); + } + // Find max vals[tid] = uintBitsToFloat(0xFF800000); [[unroll]] for (uint col = tid; col < p.KX; col += BLOCK_SIZE) { - vals[tid] = max(vals[tid], FLOAT_TYPE(data_a[rowx * p.KX + col]) * p.param1 + (p.KY > 0 ? FLOAT_TYPE(data_b[rowy * p.KX + col]) : FLOAT_TYPE(0.0f))); + vals[tid] = max(vals[tid], FLOAT_TYPE(data_a[rowx * p.KX + col]) * p.scale + (p.KY > 0 ? FLOAT_TYPE(data_b[rowy * p.KX + col]) : FLOAT_TYPE(0.0f)) + (p.KZ > 0 ? slope * data_c[col] : 0.0f)); } barrier(); @@ -1855,7 +2034,7 @@ void main() { [[unroll]] for (uint col = tid; col < p.KX; col += BLOCK_SIZE) { const uint i = rowx * p.KX + col; - const FLOAT_TYPE val = exp(FLOAT_TYPE(data_a[i]) * p.param1 + (p.KY > 0 ? FLOAT_TYPE(data_b[rowy * p.KX + col]) : FLOAT_TYPE(0.0f)) - max_val); + const FLOAT_TYPE val = exp(FLOAT_TYPE(data_a[i]) * p.scale + (p.KY > 0 ? FLOAT_TYPE(data_b[rowy * p.KX + col]) : FLOAT_TYPE(0.0f)) - max_val); vals[tid] += val; data_d[i] = D_TYPE(val); } @@ -2028,6 +2207,65 @@ void main() { } """ +argsort_src = """ +#version 450 + +#extension GL_EXT_shader_16bit_storage : require + +layout(local_size_x = 1024, local_size_y = 1, local_size_z = 1) in; + +layout (binding = 0) readonly buffer A {A_TYPE data_a[];}; +layout (binding = 1) buffer D {int data_d[];}; + +layout (push_constant) uniform parameter { + uint ncols; + bool ascending; +} p; + +void swap(uint idx0, uint idx1) { + int tmp = data_d[idx0]; + data_d[idx0] = data_d[idx1]; + data_d[idx1] = tmp; +} + +void main() { + // bitonic sort + const int col = int(gl_LocalInvocationID.x); + const uint row = gl_WorkGroupID.y; + + if (col >= p.ncols) { + return; + } + + const uint a_idx = row * p.ncols; + const uint d_idx = row * p.ncols; + + // initialize indices + if (col < p.ncols) { + data_d[col] = col; + } + barrier(); + + for (uint k = 2; k <= p.ncols; k *= 2) { + for (uint j = k / 2; j > 0; j /= 2) { + const uint ixj = col ^ j; + if (ixj > col) { + if ((col & k) == 0) { + if (p.ascending ? data_a[a_idx + data_d[d_idx + col]] > data_a[a_idx + data_d[d_idx + ixj]] : data_a[a_idx + data_d[d_idx + col]] < data_a[a_idx + data_d[d_idx + ixj]]) { + swap(d_idx + col, d_idx + ixj); + } + } else { + if (p.ascending ? data_a[a_idx + data_d[d_idx + col]] < data_a[a_idx + data_d[d_idx + ixj]] : data_a[a_idx + data_d[d_idx + col]] > data_a[a_idx + data_d[d_idx + ixj]]) { + swap(d_idx + col, d_idx + ixj); + } + } + } + barrier(); + } + } +} +""" + GLSLC = "glslc" VK_NUM_TYPES = 16 @@ -2129,6 +2367,8 @@ async def main(): tasks = [] + stream = [] + for fp16 in (False, True): # mulmat if fp16: @@ -2142,28 +2382,41 @@ async def main(): vec_type_f16 = "f16vec4" vec_type = "vec4" - stream = [] - stream.extend((mulmat_head, shader_float_type, mulmat_body)) - tasks.append(string_to_spv("matmul_f32_l", "".join(stream), {"A_TYPE": "float", "B_TYPE": "float", "D_TYPE": "float"}, fp16)) - tasks.append(string_to_spv("matmul_f32_m", "".join(stream), {"A_TYPE": "float", "B_TYPE": "float", "D_TYPE": "float"}, fp16)) - tasks.append(string_to_spv("matmul_f32_s", "".join(stream), {"A_TYPE": "float", "B_TYPE": "float", "D_TYPE": "float"}, fp16)) - tasks.append(string_to_spv("matmul_f32_aligned_l", "".join(stream), {"LOAD_VEC": load_vec, "A_TYPE": vec_type, "B_TYPE": vec_type, "D_TYPE": "float"}, fp16)) - tasks.append(string_to_spv("matmul_f32_aligned_m", "".join(stream), {"LOAD_VEC": load_vec, "A_TYPE": vec_type, "B_TYPE": vec_type, "D_TYPE": "float"}, fp16)) - tasks.append(string_to_spv("matmul_f32_aligned_s", "".join(stream), {"LOAD_VEC": load_vec, "A_TYPE": vec_type, "B_TYPE": vec_type, "D_TYPE": "float"}, fp16)) + stream.clear() + stream.extend((mulmat_head, shader_float_type, mulmat_body1, mulmat_load_scalar, mulmat_body2)) + tasks.append(string_to_spv("matmul_f32", "".join(stream), {"A_TYPE": "float", "B_TYPE": "float", "D_TYPE": "float"}, fp16)) + tasks.append(string_to_spv("matmul_f32_aligned", "".join(stream), {"LOAD_VEC_A": load_vec, "LOAD_VEC_B": load_vec, "A_TYPE": vec_type, "B_TYPE": vec_type, "D_TYPE": "float"}, fp16)) - tasks.append(string_to_spv("matmul_f16_l", "".join(stream), {"A_TYPE": "float16_t", "B_TYPE": "float16_t", "D_TYPE": "float"}, fp16)) - tasks.append(string_to_spv("matmul_f16_m", "".join(stream), {"A_TYPE": "float16_t", "B_TYPE": "float16_t", "D_TYPE": "float"}, fp16)) - tasks.append(string_to_spv("matmul_f16_s", "".join(stream), {"A_TYPE": "float16_t", "B_TYPE": "float16_t", "D_TYPE": "float"}, fp16)) - tasks.append(string_to_spv("matmul_f16_aligned_l", "".join(stream), {"LOAD_VEC": load_vec, "A_TYPE": vec_type_f16, "B_TYPE": vec_type_f16, "D_TYPE": "float"}, fp16)) - tasks.append(string_to_spv("matmul_f16_aligned_m", "".join(stream), {"LOAD_VEC": load_vec, "A_TYPE": vec_type_f16, "B_TYPE": vec_type_f16, "D_TYPE": "float"}, fp16)) - tasks.append(string_to_spv("matmul_f16_aligned_s", "".join(stream), {"LOAD_VEC": load_vec, "A_TYPE": vec_type_f16, "B_TYPE": vec_type_f16, "D_TYPE": "float"}, fp16)) + tasks.append(string_to_spv("matmul_f16", "".join(stream), {"A_TYPE": "float16_t", "B_TYPE": "float16_t", "D_TYPE": "float"}, fp16)) + tasks.append(string_to_spv("matmul_f16_aligned", "".join(stream), {"LOAD_VEC_A": load_vec, "LOAD_VEC_B": load_vec, "A_TYPE": vec_type_f16, "B_TYPE": vec_type_f16, "D_TYPE": "float"}, fp16)) - tasks.append(string_to_spv("matmul_f16_f32_l", "".join(stream), {"A_TYPE": "float16_t", "B_TYPE": "float", "D_TYPE": "float"}, fp16)) - tasks.append(string_to_spv("matmul_f16_f32_m", "".join(stream), {"A_TYPE": "float16_t", "B_TYPE": "float", "D_TYPE": "float"}, fp16)) - tasks.append(string_to_spv("matmul_f16_f32_s", "".join(stream), {"A_TYPE": "float16_t", "B_TYPE": "float", "D_TYPE": "float"}, fp16)) - tasks.append(string_to_spv("matmul_f16_f32_aligned_l", "".join(stream), {"LOAD_VEC": load_vec, "A_TYPE": vec_type_f16, "B_TYPE": vec_type, "D_TYPE": "float"}, fp16)) - tasks.append(string_to_spv("matmul_f16_f32_aligned_m", "".join(stream), {"LOAD_VEC": load_vec, "A_TYPE": vec_type_f16, "B_TYPE": vec_type, "D_TYPE": "float"}, fp16)) - tasks.append(string_to_spv("matmul_f16_f32_aligned_s", "".join(stream), {"LOAD_VEC": load_vec, "A_TYPE": vec_type_f16, "B_TYPE": vec_type, "D_TYPE": "float"}, fp16)) + tasks.append(string_to_spv("matmul_f16_f32", "".join(stream), {"A_TYPE": "float16_t", "B_TYPE": "float", "D_TYPE": "float"}, fp16)) + tasks.append(string_to_spv("matmul_f16_f32_aligned", "".join(stream), {"LOAD_VEC_A": load_vec, "LOAD_VEC_B": load_vec, "A_TYPE": vec_type_f16, "B_TYPE": vec_type, "D_TYPE": "float"}, fp16)) + + stream.clear() + stream.extend((mulmat_head, shader_int8_ext, shader_float_type, shader_q4_0_defines, mulmat_body1, mulmat_load_q4_0, mulmat_body2)) + tasks.append(string_to_spv("matmul_q4_0_f32", "".join(stream), {"LOAD_VEC_A": 2, "A_TYPE": "block_q4_0", "B_TYPE": "float", "D_TYPE": "float"}, fp16)) + tasks.append(string_to_spv("matmul_q4_0_f32_aligned", "".join(stream), {"LOAD_VEC_A": 2, "LOAD_VEC_B": load_vec, "A_TYPE": "block_q4_0", "B_TYPE": vec_type, "D_TYPE": "float"}, fp16)) + + stream.clear() + stream.extend((mulmat_head, shader_int8_ext, shader_float_type, shader_q4_1_defines, mulmat_body1, mulmat_load_q4_1, mulmat_body2)) + tasks.append(string_to_spv("matmul_q4_1_f32", "".join(stream), {"LOAD_VEC_A": 2, "A_TYPE": "block_q4_1", "B_TYPE": "float", "D_TYPE": "float"}, fp16)) + tasks.append(string_to_spv("matmul_q4_1_f32_aligned", "".join(stream), {"LOAD_VEC_A": 2, "LOAD_VEC_B": load_vec, "A_TYPE": "block_q4_1", "B_TYPE": vec_type, "D_TYPE": "float"}, fp16)) + + stream.clear() + stream.extend((mulmat_head, shader_int8_ext, shader_float_type, shader_q5_0_defines, mulmat_body1, mulmat_load_q5_0, mulmat_body2)) + tasks.append(string_to_spv("matmul_q5_0_f32", "".join(stream), {"LOAD_VEC_A": 2, "A_TYPE": "block_q5_0", "B_TYPE": "float", "D_TYPE": "float"}, fp16)) + tasks.append(string_to_spv("matmul_q5_0_f32_aligned", "".join(stream), {"LOAD_VEC_A": 2, "LOAD_VEC_B": load_vec, "A_TYPE": "block_q5_0", "B_TYPE": vec_type, "D_TYPE": "float"}, fp16)) + + stream.clear() + stream.extend((mulmat_head, shader_int8_ext, shader_float_type, shader_q5_1_defines, mulmat_body1, mulmat_load_q5_1, mulmat_body2)) + tasks.append(string_to_spv("matmul_q5_1_f32", "".join(stream), {"LOAD_VEC_A": 2, "A_TYPE": "block_q5_1", "B_TYPE": "float", "D_TYPE": "float"}, fp16)) + tasks.append(string_to_spv("matmul_q5_1_f32_aligned", "".join(stream), {"LOAD_VEC_A": 2, "LOAD_VEC_B": load_vec, "A_TYPE": "block_q5_1", "B_TYPE": vec_type, "D_TYPE": "float"}, fp16)) + + stream.clear() + stream.extend((mulmat_head, shader_int8_ext, shader_float_type, shader_q8_0_defines, mulmat_body1, mulmat_load_q8_0, mulmat_body2)) + tasks.append(string_to_spv("matmul_q8_0_f32", "".join(stream), {"LOAD_VEC_A": 2, "A_TYPE": "block_q8_0", "B_TYPE": "float", "D_TYPE": "float"}, fp16)) + tasks.append(string_to_spv("matmul_q8_0_f32_aligned", "".join(stream), {"LOAD_VEC_A": 2, "LOAD_VEC_B": load_vec, "A_TYPE": "block_q8_0", "B_TYPE": vec_type, "D_TYPE": "float"}, fp16)) # Shaders where precision is needed, so no fp16 version @@ -2205,18 +2458,18 @@ async def main(): stream.extend((dequant_head, shader_int8_ext, shader_f32)) - if i == GGML_TYPE_F16: - stream.extend((shader_f16_defines, shader_f16_dequant_func, dequant_body)) + if i == GGML_TYPE_F32: + stream.append(dequant_f32_body) elif i == GGML_TYPE_Q4_0: - stream.extend((shader_q4_0_defines, shader_q4_0_dequant_func, dequant_body)) + stream.extend((shader_q4_0_defines, dequant_q4_0_body)) elif i == GGML_TYPE_Q4_1: - stream.extend((shader_q4_1_defines, shader_q4_1_dequant_func, dequant_body)) + stream.extend((shader_q4_1_defines, dequant_q4_1_body)) elif i == GGML_TYPE_Q5_0: - stream.extend((shader_q5_0_defines, shader_q5_0_dequant_func, dequant_body)) + stream.extend((shader_q5_0_defines, dequant_q5_0_body)) elif i == GGML_TYPE_Q5_1: - stream.extend((shader_q5_1_defines, shader_q5_1_dequant_func, dequant_body)) + stream.extend((shader_q5_1_defines, dequant_q5_1_body)) elif i == GGML_TYPE_Q8_0: - stream.extend((shader_q8_0_defines, shader_q8_0_dequant_func, dequant_body)) + stream.extend((shader_q8_0_defines, dequant_q8_0_body)) elif i == GGML_TYPE_Q2_K: stream.extend((shader_q2_K_defines, dequant_q2_K_body)) elif i == GGML_TYPE_Q3_K: @@ -2232,8 +2485,6 @@ async def main(): tasks.append(string_to_spv(f"dequant_{type_names[i]}", "".join(stream), {"D_TYPE": "float16_t"})) - tasks.append(string_to_spv("f32_to_f16", f32_to_f16_src, {})) - # get_rows for i in range(0, VK_NUM_TYPES): stream.clear() @@ -2264,20 +2515,20 @@ async def main(): tasks.append(string_to_spv("norm_f32", f"{generic_head}\n{shader_f32}\n{norm_body}", {"A_TYPE": "float", "D_TYPE": "float"})) tasks.append(string_to_spv("rms_norm_f32", f"{generic_head}\n{shader_f32}\n{rms_norm_body}", {"A_TYPE": "float", "D_TYPE": "float"})) - tasks.append(string_to_spv("cpy_f32_f32", f"{cpy_src}\n{cpy_end}", {"A_TYPE": "float", "D_TYPE": "float"})) - tasks.append(string_to_spv("cpy_f32_f16", f"{cpy_src}\n{cpy_end}", {"A_TYPE": "float", "D_TYPE": "float16_t"})) - tasks.append(string_to_spv("cpy_f16_f16", f"{cpy_src}\n{cpy_f16_f16_end}", {"A_TYPE": "float16_t", "D_TYPE": "float16_t"})) + tasks.append(string_to_spv("cpy_f32_f32", f"{generic_unary_op_head}\n{cpy_end}", {"A_TYPE": "float", "D_TYPE": "float"})) + tasks.append(string_to_spv("cpy_f32_f16", f"{generic_unary_op_head}\n{cpy_end}", {"A_TYPE": "float", "D_TYPE": "float16_t"})) + tasks.append(string_to_spv("cpy_f16_f16", f"{generic_unary_op_head}\n{cpy_f16_f16_end}", {"A_TYPE": "float16_t", "D_TYPE": "float16_t"})) - tasks.append(string_to_spv("add_f32", f"{generic_head}\n{shader_f32}\n{add_body}", {"A_TYPE": "float", "B_TYPE": "float", "D_TYPE": "float"})) + tasks.append(string_to_spv("add_f32", f"{generic_binary_op_head}\n{add_body}", {"A_TYPE": "float", "B_TYPE": "float", "D_TYPE": "float", "FLOAT_TYPE": "float"})) tasks.append(string_to_spv("split_k_reduce", mulmat_split_k_reduce_src, {})) - tasks.append(string_to_spv("mul_f32", f"{generic_head}\n{shader_f32}\n{mul_body}", {"A_TYPE": "float", "B_TYPE": "float", "D_TYPE": "float"})) + tasks.append(string_to_spv("mul_f32", f"{generic_binary_op_head}\n{mul_body}", {"A_TYPE": "float", "B_TYPE": "float", "D_TYPE": "float", "FLOAT_TYPE": "float"})) - tasks.append(string_to_spv("scale_f32", f"{generic_head}\n{shader_f32}\n{scale_body}", {"A_TYPE": "float", "D_TYPE": "float"})) + tasks.append(string_to_spv("scale_f32", f"{generic_unary_op_head}\n{scale_body}", {"A_TYPE": "float", "D_TYPE": "float", "FLOAT_TYPE": "float"})) - tasks.append(string_to_spv("sqr_f32", f"{generic_head}\n{shader_f32}\n{sqr_body}", {"A_TYPE": "float", "D_TYPE": "float"})) + tasks.append(string_to_spv("sqr_f32", f"{generic_unary_op_head}\n{sqr_body}", {"A_TYPE": "float", "D_TYPE": "float", "FLOAT_TYPE": "float"})) - tasks.append(string_to_spv("clamp_f32", f"{generic_head}\n{shader_f32}\n{clamp_body}", {"A_TYPE": "float", "D_TYPE": "float"})) + tasks.append(string_to_spv("clamp_f32", f"{generic_unary_op_head}\n{clamp_body}", {"A_TYPE": "float", "D_TYPE": "float", "FLOAT_TYPE": "float"})) tasks.append(string_to_spv("gelu_f32", f"{generic_head}\n{shader_f32}\n{gelu_body}", {"A_TYPE": "float", "D_TYPE": "float"})) tasks.append(string_to_spv("silu_f32", f"{generic_head}\n{shader_f32}\n{silu_body}", {"A_TYPE": "float", "D_TYPE": "float"})) @@ -2285,7 +2536,7 @@ async def main(): tasks.append(string_to_spv("diag_mask_inf_f32", f"{diag_mask_inf_head}\n{shader_f32}\n{diag_mask_inf_body}", {"A_TYPE": "float", "D_TYPE": "float"})) - tasks.append(string_to_spv("soft_max_f32", f"{generic_head}\n{shader_f32}\n{soft_max_body}", {"A_TYPE": "float", "B_TYPE": "float", "D_TYPE": "float"})) + tasks.append(string_to_spv("soft_max_f32", f"{soft_max_head}\n{shader_f32}\n{soft_max_body}", {"A_TYPE": "float", "B_TYPE": "float", "C_TYPE": "float", "D_TYPE": "float"})) tasks.append(string_to_spv("rope_f32", rope_src, {"A_TYPE": "float", "D_TYPE": "float"})) tasks.append(string_to_spv("rope_f16", rope_src, {"A_TYPE": "float16_t", "D_TYPE": "float16_t"})) @@ -2293,6 +2544,8 @@ async def main(): tasks.append(string_to_spv("rope_neox_f32", rope_neox_src, {"A_TYPE": "float", "D_TYPE": "float"})) tasks.append(string_to_spv("rope_neox_f16", rope_neox_src, {"A_TYPE": "float16_t", "D_TYPE": "float16_t"})) + tasks.append(string_to_spv("argsort_f32", argsort_src, {"A_TYPE": "float"})) + # Helper to decorate tasks with semaphore acquisition. async def withSemaphore(sem, task): async with sem: diff --git a/llama.cpp b/llama.cpp index 76afcbc13..e9192b4fa 100644 --- a/llama.cpp +++ b/llama.cpp @@ -5014,8 +5014,8 @@ static struct ggml_tensor * llm_build_kqv( ggml_mul_mat_set_prec(kq, GGML_PREC_F32); } -#if defined(GGML_USE_VULKAN) || defined(GGML_USE_KOMPUTE) -#pragma message("TODO: ALiBi support in ggml_soft_max_ext is not implemented for Vulkan, and Kompute") +#if defined(GGML_USE_KOMPUTE) +#pragma message("TODO: ALiBi support in ggml_soft_max_ext is not implemented for Kompute") #pragma message(" Falling back to ggml_alibi(). Will become an error in Mar 2024") #pragma message("ref: https://github.com/ggerganov/llama.cpp/pull/5488") if (hparams.f_max_alibi_bias > 0.0f) { From b1a4e994fde929300d4aeb1deb8320c59cb6edec Mon Sep 17 00:00:00 2001 From: ExtReMLapin <3909752+ExtReMLapin@users.noreply.github.com> Date: Tue, 5 Mar 2024 14:44:29 +0100 Subject: [PATCH 792/811] grammars : don't allow to output unescaped new line in string (#5885) * Don't allow grammar json array to output unescaped new line in string * Don't allow new line in json object string --- grammars/json.gbnf | 2 +- grammars/json_arr.gbnf | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/grammars/json.gbnf b/grammars/json.gbnf index a9537cdf9..34e014d55 100644 --- a/grammars/json.gbnf +++ b/grammars/json.gbnf @@ -15,7 +15,7 @@ array ::= string ::= "\"" ( - [^"\\] | + [^"\\\n] | "\\" (["\\/bfnrt] | "u" [0-9a-fA-F] [0-9a-fA-F] [0-9a-fA-F] [0-9a-fA-F]) # escapes )* "\"" ws diff --git a/grammars/json_arr.gbnf b/grammars/json_arr.gbnf index ef53e77a0..fd5740eeb 100644 --- a/grammars/json_arr.gbnf +++ b/grammars/json_arr.gbnf @@ -24,7 +24,7 @@ array ::= string ::= "\"" ( - [^"\\] | + [^"\\\n] | "\\" (["\\/bfnrt] | "u" [0-9a-fA-F] [0-9a-fA-F] [0-9a-fA-F] [0-9a-fA-F]) # escapes )* "\"" ws From 82cb31eb93fd19b74115e0f0133225d1dfdbfdbc Mon Sep 17 00:00:00 2001 From: Georgi Gerganov Date: Tue, 5 Mar 2024 15:56:24 +0200 Subject: [PATCH 793/811] Revert "grammars : don't allow to output unescaped new line in string (#5885)" This reverts commit b1a4e994fde929300d4aeb1deb8320c59cb6edec. --- grammars/json.gbnf | 2 +- grammars/json_arr.gbnf | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/grammars/json.gbnf b/grammars/json.gbnf index 34e014d55..a9537cdf9 100644 --- a/grammars/json.gbnf +++ b/grammars/json.gbnf @@ -15,7 +15,7 @@ array ::= string ::= "\"" ( - [^"\\\n] | + [^"\\] | "\\" (["\\/bfnrt] | "u" [0-9a-fA-F] [0-9a-fA-F] [0-9a-fA-F] [0-9a-fA-F]) # escapes )* "\"" ws diff --git a/grammars/json_arr.gbnf b/grammars/json_arr.gbnf index fd5740eeb..ef53e77a0 100644 --- a/grammars/json_arr.gbnf +++ b/grammars/json_arr.gbnf @@ -24,7 +24,7 @@ array ::= string ::= "\"" ( - [^"\\\n] | + [^"\\] | "\\" (["\\/bfnrt] | "u" [0-9a-fA-F] [0-9a-fA-F] [0-9a-fA-F] [0-9a-fA-F]) # escapes )* "\"" ws From 3de31677d36aa4f82d4d99898902d7bcf398e666 Mon Sep 17 00:00:00 2001 From: ExtReMLapin <3909752+ExtReMLapin@users.noreply.github.com> Date: Tue, 5 Mar 2024 17:33:08 +0100 Subject: [PATCH 794/811] grammars : blacklists character control set (#5888) * Prevent control characters from being served in json string * Prevent control characters from being served in json string (array) --- grammars/json.gbnf | 2 +- grammars/json_arr.gbnf | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/grammars/json.gbnf b/grammars/json.gbnf index a9537cdf9..a8a80752e 100644 --- a/grammars/json.gbnf +++ b/grammars/json.gbnf @@ -15,7 +15,7 @@ array ::= string ::= "\"" ( - [^"\\] | + [^"\\\x7F\x00-\x1F] | "\\" (["\\/bfnrt] | "u" [0-9a-fA-F] [0-9a-fA-F] [0-9a-fA-F] [0-9a-fA-F]) # escapes )* "\"" ws diff --git a/grammars/json_arr.gbnf b/grammars/json_arr.gbnf index ef53e77a0..31a3202f8 100644 --- a/grammars/json_arr.gbnf +++ b/grammars/json_arr.gbnf @@ -24,7 +24,7 @@ array ::= string ::= "\"" ( - [^"\\] | + [^"\\\x7F\x00-\x1F] | "\\" (["\\/bfnrt] | "u" [0-9a-fA-F] [0-9a-fA-F] [0-9a-fA-F] [0-9a-fA-F]) # escapes )* "\"" ws From bd836944f826f07e19b7edcf994a78728da49c1c Mon Sep 17 00:00:00 2001 From: Jared Van Bortel Date: Tue, 5 Mar 2024 11:56:37 -0500 Subject: [PATCH 795/811] quants : use MM256_SET_M128I consistently to fix gcc 7 build (#5889) --- ggml-quants.c | 27 ++++++++++++++------------- 1 file changed, 14 insertions(+), 13 deletions(-) diff --git a/ggml-quants.c b/ggml-quants.c index 2a8881d73..e0c125d43 100644 --- a/ggml-quants.c +++ b/ggml-quants.c @@ -51,6 +51,7 @@ #define UNUSED GGML_UNUSED +// some compilers don't provide _mm256_set_m128i, e.g. gcc 7 #define MM256_SET_M128I(a, b) _mm256_insertf128_si256(_mm256_castsi128_si256(b), (a), 1) #if defined(__AVX__) || defined(__AVX2__) || defined(__AVX512F__) || defined(__SSSE3__) @@ -9563,7 +9564,7 @@ void ggml_vec_dot_iq2_xs_q8_K(int n, float * restrict s, size_t bs, const void * const __m128i odd_bits = _mm_shuffle_epi8(bit_helper, partial_sign_bits_for_counting); const __m128i full_sign_bits = _mm_or_si128(partial_sign_bits, odd_bits); - const __m256i full_signs = _mm256_set_m128i(full_sign_bits, full_sign_bits); + const __m256i full_signs = MM256_SET_M128I(full_sign_bits, full_sign_bits); const __m256i q8_1 = _mm256_loadu_si256((const __m256i *)y[i].qs); const __m256i q8_2 = _mm256_loadu_si256((const __m256i *)(y[i].qs+32)); @@ -9585,8 +9586,8 @@ void ggml_vec_dot_iq2_xs_q8_K(int n, float * restrict s, size_t bs, const void * const __m256i dot1 = _mm256_maddubs_epi16(q2_1, q8s_1); const __m256i dot2 = _mm256_maddubs_epi16(q2_2, q8s_2); - const __m256i sc1 = _mm256_set_m128i(_mm_set1_epi16(2*(x[i].scales[0] >> 4)+1), _mm_set1_epi16(2*(x[i].scales[0] & 0xf)+1)); - const __m256i sc2 = _mm256_set_m128i(_mm_set1_epi16(2*(x[i].scales[1] >> 4)+1), _mm_set1_epi16(2*(x[i].scales[1] & 0xf)+1)); + const __m256i sc1 = MM256_SET_M128I(_mm_set1_epi16(2*(x[i].scales[0] >> 4)+1), _mm_set1_epi16(2*(x[i].scales[0] & 0xf)+1)); + const __m256i sc2 = MM256_SET_M128I(_mm_set1_epi16(2*(x[i].scales[1] >> 4)+1), _mm_set1_epi16(2*(x[i].scales[1] & 0xf)+1)); const __m256i sum = _mm256_add_epi32(_mm256_madd_epi16(sc1, dot1), _mm256_madd_epi16(sc2, dot2)); @@ -9653,8 +9654,8 @@ void ggml_vec_dot_iq2_xs_q8_K(int n, float * restrict s, size_t bs, const void * const __m128i full_signs_l = _mm256_castsi256_si128(full_sign_bits); const __m128i full_signs_h = _mm256_extractf128_si256(full_sign_bits, 1); - const __m256i full_signs_1 = _mm256_set_m128i(full_signs_l, full_signs_l); - const __m256i full_signs_2 = _mm256_set_m128i(full_signs_h, full_signs_h); + const __m256i full_signs_1 = MM256_SET_M128I(full_signs_l, full_signs_l); + const __m256i full_signs_2 = MM256_SET_M128I(full_signs_h, full_signs_h); __m256i signs; signs = _mm256_shuffle_epi8(full_signs_1, block_sign_shuffle_1); @@ -10551,10 +10552,10 @@ void ggml_vec_dot_iq4_nl_q8_0(int n, float * restrict s, size_t bs, const void * const __m128i q4bits_2 = _mm_loadu_si128((const __m128i*)x[1].qs); const __m256i q8b_1 = _mm256_loadu_si256((const __m256i *)y[0].qs); const __m256i q8b_2 = _mm256_loadu_si256((const __m256i *)y[1].qs); - const __m256i q4b_1 = _mm256_set_m128i(_mm_shuffle_epi8(values128, _mm_and_si128(_mm_srli_epi16(q4bits_1, 4), m4b)), - _mm_shuffle_epi8(values128, _mm_and_si128(q4bits_1, m4b))); - const __m256i q4b_2 = _mm256_set_m128i(_mm_shuffle_epi8(values128, _mm_and_si128(_mm_srli_epi16(q4bits_2, 4), m4b)), - _mm_shuffle_epi8(values128, _mm_and_si128(q4bits_2, m4b))); + const __m256i q4b_1 = MM256_SET_M128I(_mm_shuffle_epi8(values128, _mm_and_si128(_mm_srli_epi16(q4bits_1, 4), m4b)), + _mm_shuffle_epi8(values128, _mm_and_si128(q4bits_1, m4b))); + const __m256i q4b_2 = MM256_SET_M128I(_mm_shuffle_epi8(values128, _mm_and_si128(_mm_srli_epi16(q4bits_2, 4), m4b)), + _mm_shuffle_epi8(values128, _mm_and_si128(q4bits_2, m4b))); const __m256i p16_1 = mul_add_epi8(q4b_1, q8b_1); const __m256i p16_2 = mul_add_epi8(q4b_2, q8b_2); const __m256i p_1 = _mm256_madd_epi16(p16_1, mone); @@ -10661,10 +10662,10 @@ void ggml_vec_dot_iq4_xs_q8_K(int n, float * restrict s, size_t bs, const void * const __m128i q4bits_2 = _mm_loadu_si128((const __m128i*)qs); qs += 16; const __m256i q8b_1 = _mm256_loadu_si256((const __m256i *)q8); q8 += 32; const __m256i q8b_2 = _mm256_loadu_si256((const __m256i *)q8); q8 += 32; - const __m256i q4b_1 = _mm256_set_m128i(_mm_shuffle_epi8(values128, _mm_and_si128(_mm_srli_epi16(q4bits_1, 4), m4b)), - _mm_shuffle_epi8(values128, _mm_and_si128(q4bits_1, m4b))); - const __m256i q4b_2 = _mm256_set_m128i(_mm_shuffle_epi8(values128, _mm_and_si128(_mm_srli_epi16(q4bits_2, 4), m4b)), - _mm_shuffle_epi8(values128, _mm_and_si128(q4bits_2, m4b))); + const __m256i q4b_1 = MM256_SET_M128I(_mm_shuffle_epi8(values128, _mm_and_si128(_mm_srli_epi16(q4bits_1, 4), m4b)), + _mm_shuffle_epi8(values128, _mm_and_si128(q4bits_1, m4b))); + const __m256i q4b_2 = MM256_SET_M128I(_mm_shuffle_epi8(values128, _mm_and_si128(_mm_srli_epi16(q4bits_2, 4), m4b)), + _mm_shuffle_epi8(values128, _mm_and_si128(q4bits_2, m4b))); const __m256i p16_1 = mul_add_epi8(q4b_1, q8b_1); const __m256i p16_2 = mul_add_epi8(q4b_2, q8b_2); const int16_t ls1 = ((x[ibl].scales_l[ib/2] & 0xf) | ((sh << 4) & 0x30)) - 32; From 652ca2bded3c818320d92c70d2b67f64bdbff5e5 Mon Sep 17 00:00:00 2001 From: slaren Date: Tue, 5 Mar 2024 22:27:29 +0100 Subject: [PATCH 796/811] compare-llama-bench.py : remove mul_mat_q (#5892) --- scripts/compare-llama-bench.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/scripts/compare-llama-bench.py b/scripts/compare-llama-bench.py index 39c3e52e5..54a7771d8 100755 --- a/scripts/compare-llama-bench.py +++ b/scripts/compare-llama-bench.py @@ -18,7 +18,7 @@ except ImportError as e: KEY_PROPERTIES = [ "cpu_info", "gpu_info", "n_gpu_layers", "main_gpu", "cuda", "opencl", "metal", "gpu_blas", "blas", "model_filename", "model_type", "model_size", "model_n_params", "n_batch", "n_threads", - "type_k", "type_v", "no_kv_offload", "mul_mat_q", "tensor_split", "n_prompt", "n_gen" + "type_k", "type_v", "no_kv_offload", "tensor_split", "n_prompt", "n_gen" ] # Properties that are boolean and are converted to Yes/No for the table: From 8ced9f7e3225adb8501e9821ed1bbd92e3a5c7ae Mon Sep 17 00:00:00 2001 From: Neo Zhang Jianyu Date: Wed, 6 Mar 2024 12:08:32 +0800 Subject: [PATCH 797/811] add wait() to make code stable (#5895) --- ci/run.sh | 3 ++- ggml-sycl.cpp | 59 +++++++++++++++++++++++++++++++++++++++------------ 2 files changed, 48 insertions(+), 14 deletions(-) diff --git a/ci/run.sh b/ci/run.sh index 35eb3c7aa..51f4c74cc 100755 --- a/ci/run.sh +++ b/ci/run.sh @@ -45,7 +45,8 @@ fi if [ ! -z ${GG_BUILD_SYCL} ]; then if [ -z ${ONEAPI_ROOT} ]; then - echo "Not detected ONEAPI_ROOT, please install oneAPI base toolkit and enable it by:\n source /opt/intel/oneapi/setvars.sh" + echo "Not detected ONEAPI_ROOT, please install oneAPI base toolkit and enable it by:" + echo "source /opt/intel/oneapi/setvars.sh" exit 1 fi diff --git a/ggml-sycl.cpp b/ggml-sycl.cpp index 477f5cb02..ddd951dd6 100644 --- a/ggml-sycl.cpp +++ b/ggml-sycl.cpp @@ -3769,8 +3769,42 @@ void log_ggml_var_device(const char*name, float *src, size_t total_elements, boo std::ofstream logfile; logfile.open(filename); for(size_t i=0; imemcpy(local_buf, src, total_size).wait(); + } + else { + local_buf = (sycl::half *)src; + } + + std::ofstream logfile; + logfile.open(filename); + for(size_t i=0; iwait(); const to_fp32_sycl_t to_fp32_sycl = ggml_get_to_fp32_sycl(GGML_TYPE_F16); to_fp32_sycl(dst_f16.get(), dst_dd_i, row_diff*src1_ncols, stream); } @@ -14159,6 +14193,7 @@ inline void ggml_sycl_op_mul_mat_sycl( dpct::get_value(&alpha, *g_sycl_handles[id]), src0_ddf_i, ne00, src1_ddf1_i, ne10, dpct::get_value(&beta, *g_sycl_handles[id]), dst_dd_i, ldc))); + g_sycl_handles[id]->wait(); } (void) dst; (void) src1_ddq_i; @@ -15295,8 +15330,8 @@ static void ggml_sycl_mul_mat_batched_sycl(const ggml_tensor *src0, sycl_pool_alloc dst_f16; char * dst_t; - dpct::library_data_t cu_compute_type = dpct::library_data_t::real_half; - dpct::library_data_t cu_data_type = dpct::library_data_t::real_half; + dpct::library_data_t cu_compute_type = dpct::library_data_t::real_float; + dpct::library_data_t cu_data_type = dpct::library_data_t::real_float; // dst strides size_t nbd2 = dst->nb[2]; @@ -15308,15 +15343,13 @@ static void ggml_sycl_mul_mat_batched_sycl(const ggml_tensor *src0, const float alpha_f32 = 1.0f; const float beta_f32 = 0.0f; - const void * alpha = &alpha_f16; - const void * beta = &beta_f16; + const void * alpha = &alpha_f32; + const void * beta = &beta_f32; // TODO: Renable (dst->op_params[0] =! GGML_PREC_DEFAULT) pathway - // once oneMKL open source supports half, half, float, float: datatypes - dst_t = (char *) dst_f16.alloc(ne_dst); + // oneMKL open source supports half, half, float, float: datatypes - nbd2 /= sizeof(float) / sizeof(sycl::half); - nbd3 /= sizeof(float) / sizeof(sycl::half); + dst_t = (char *) dst_ddf; GGML_ASSERT(ne12 % ne02 == 0); GGML_ASSERT(ne13 % ne03 == 0); @@ -15356,6 +15389,7 @@ static void ggml_sycl_mul_mat_batched_sycl(const ggml_tensor *src0, nb11 / nb10, nb12 / nb10, beta, (char *)dst_t, cu_data_type, ne01, nb2 / nb0, ne12 * ne13, cu_compute_type))); + g_sycl_handles[g_main_device]->wait(); } else { const int ne23 = ne12*ne13; @@ -15386,7 +15420,7 @@ static void ggml_sycl_mul_mat_batched_sycl(const ggml_tensor *src0, nb02, nb03, nb12_scaled, nb13_scaled, nbd2, nbd3, r2, r3, item_ct1); }); - }); + }).wait(); } SYCL_CHECK(CHECK_TRY_ERROR(dpct::gemm_batch( *g_sycl_handles[g_main_device], oneapi::mkl::transpose::trans, @@ -15397,11 +15431,10 @@ static void ggml_sycl_mul_mat_batched_sycl(const ggml_tensor *src0, dpct::library_data_t::real_half, nb11 / nb10, beta, (void **)(ptrs_dst.get() + 0 * ne23), cu_data_type, ne01, ne23, cu_compute_type))); + g_sycl_handles[g_main_device]->wait(); } #endif - const to_fp32_sycl_t to_fp32_sycl = ggml_get_to_fp32_sycl(GGML_TYPE_F16); - to_fp32_sycl(dst_f16.get(), dst_ddf, ne_dst, main_stream); } catch (sycl::exception const &exc) { std::cerr << exc.what() << "Exception caught at file:" << __FILE__ From 1e35d619a6fb0b9c5e3dc955345980ff056ddbaf Mon Sep 17 00:00:00 2001 From: Georgi Gerganov Date: Wed, 6 Mar 2024 09:12:25 +0200 Subject: [PATCH 798/811] convert : remove AWQ remnants (#5768) --- convert.py | 13 ------------- 1 file changed, 13 deletions(-) diff --git a/convert.py b/convert.py index 6e3a0319b..c15f8c47e 100755 --- a/convert.py +++ b/convert.py @@ -1377,7 +1377,6 @@ def main(args_in: list[str] | None = None) -> None: # We currently only support Q8_0 output on little endian systems. output_choices.append("q8_0") parser = argparse.ArgumentParser(description="Convert a LLaMA model to a GGML compatible file") - parser.add_argument("--awq-path", type=Path, help="Path to scale awq cache file", default=None) parser.add_argument("--dump", action="store_true", help="don't convert, just show what's in the model") parser.add_argument("--dump-single", action="store_true", help="don't convert, just show what's in a single model file") parser.add_argument("--vocab-only", action="store_true", help="extract only the vocab") @@ -1393,18 +1392,6 @@ def main(args_in: list[str] | None = None) -> None: parser.add_argument("--skip-unknown", action="store_true", help="skip unknown tensor names instead of failing") args = parser.parse_args(args_in) - if args.awq_path: - sys.path.insert(1, str(Path(__file__).parent / 'awq-py')) - from awq.apply_awq import add_scale_weights # type: ignore[import-not-found] - tmp_model_path = args.model / "weighted_model" - if tmp_model_path.is_dir(): - print(f"{tmp_model_path} exists as a weighted model.") - else: - tmp_model_path.mkdir(parents=True, exist_ok=True) - print("Saving new weighted model ...") - add_scale_weights(str(args.model), str(args.awq_path), str(tmp_model_path)) - print(f"Saved weighted model at {tmp_model_path}.") - args.model = tmp_model_path if args.dump_single: model_plus = lazy_load_file(args.model) From e25fb4b18fcedb9bed6be4585cf842e9a669b28b Mon Sep 17 00:00:00 2001 From: bobqianic <129547291+bobqianic@users.noreply.github.com> Date: Wed, 6 Mar 2024 07:35:07 +0000 Subject: [PATCH 799/811] ggml : use `uint8x16_t` return type for `ggml_vqtbl1q_u8` (#5894) * use uint8x16_t * Update ggml-quants.c --- ggml-quants.c | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/ggml-quants.c b/ggml-quants.c index e0c125d43..9dcb76def 100644 --- a/ggml-quants.c +++ b/ggml-quants.c @@ -464,8 +464,8 @@ inline static int8x16_t ggml_vqtbl1q_s8(int8x16_t a, uint8x16_t b) { } // NOTE: not tested -inline static int8x16_t ggml_vqtbl1q_u8(uint8x16_t a, uint8x16_t b) { - int8x16_t res; +inline static uint8x16_t ggml_vqtbl1q_u8(uint8x16_t a, uint8x16_t b) { + uint8x16_t res; res[ 0] = a[b[ 0]]; res[ 1] = a[b[ 1]]; From e04e04f8fad549bb0b3ec1c91f0413aeb08baf29 Mon Sep 17 00:00:00 2001 From: Jared Van Bortel Date: Wed, 6 Mar 2024 15:42:23 -0500 Subject: [PATCH 800/811] ggml : use SYS_get_cpu if SYS_getcpu is not defined (#5906) Fixes #5694 Fixes ggerganov/whisper.cpp#1894 --- ggml.c | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/ggml.c b/ggml.c index 6a10bbcb4..92b17ee6e 100644 --- a/ggml.c +++ b/ggml.c @@ -2154,7 +2154,10 @@ void ggml_numa_init(enum ggml_numa_strategy numa_flag) { getcpu_ret = getcpu(¤t_cpu, &g_state.numa.current_node); #else // old glibc doesn't have a wrapper for this call. Fall back on direct syscall - getcpu_ret = syscall(SYS_getcpu,¤t_cpu,&g_state.numa.current_node); +# if !defined(SYS_getcpu) && defined(SYS_get_cpu) +# define SYS_getcpu SYS_get_cpu // some older glibc versions use this name +# endif + getcpu_ret = syscall(SYS_getcpu, ¤t_cpu, &g_state.numa.current_node); #endif if (g_state.numa.n_nodes < 1 || g_state.numa.total_cpus < 1 || getcpu_ret != 0) { From ceca1aef0738b57951cd12c603c3477e75312dec Mon Sep 17 00:00:00 2001 From: Neo Zhang Jianyu Date: Thu, 7 Mar 2024 16:34:31 +0800 Subject: [PATCH 801/811] [SYCL] fix error when set main gpu to non-zero (#5901) * fix error when set main gpu to non-zero * fix delete condition --- ggml-sycl.cpp | 178 ++++++++++++++++++++++++++++++-------------------- ggml-sycl.h | 1 + llama.cpp | 16 +++-- 3 files changed, 119 insertions(+), 76 deletions(-) diff --git a/ggml-sycl.cpp b/ggml-sycl.cpp index ddd951dd6..221d67b8d 100644 --- a/ggml-sycl.cpp +++ b/ggml-sycl.cpp @@ -3559,12 +3559,31 @@ class sycl_gpu_mgr { int work_group_size = 0; std::string gpus_list = ""; + /* + Use all GPU with same top max compute units + */ sycl_gpu_mgr() { detect_sycl_gpu_list_with_max_cu(); get_allow_gpus(); create_context_with_gpus(); } + /* + Use the assigned GPU as only one + */ + sycl_gpu_mgr(int main_gpu_id) { + sycl::device device = dpct::dev_mgr::instance().get_device(main_gpu_id); + dpct::device_info prop; + dpct::get_device_info(prop, device); + gpus.push_back(main_gpu_id); + devices.push_back(device); + work_group_size = prop.get_max_work_group_size(); + max_compute_units = prop.get_max_compute_units(); + + get_allow_gpus(); + create_context_with_gpus(); + } + void create_context_with_gpus() { sycl::context ctx = sycl::context(devices); assert(gpus.size() > 0); @@ -3580,7 +3599,7 @@ class sycl_gpu_mgr { gpus_list += std::to_string(gpus[i]); gpus_list += ","; } - if (gpus_list.length() > 2) { + if (gpus_list.length() > 1) { gpus_list.pop_back(); } } @@ -3629,8 +3648,8 @@ class sycl_gpu_mgr { if (gpus[i] == id) return i; } - assert(false); - return -1; + printf("miss to get device index by id=%d\n", id); + GGML_ASSERT(false); } int get_next_index(int id) { @@ -3639,8 +3658,7 @@ class sycl_gpu_mgr { if (gpus[i] == id) return i; } - assert(false); - return -1; + GGML_ASSERT(false); } }; @@ -3649,6 +3667,7 @@ static int g_device_count = -1; static int g_all_sycl_device_count = -1; static int g_main_device = -1; static int g_main_device_id = -1; +static bool g_ggml_backend_sycl_buffer_type_initialized = false; static std::array g_default_tensor_split = {}; @@ -13225,7 +13244,7 @@ void ggml_backend_sycl_print_sycl_devices() { } void print_gpu_device_list() { - fprintf(stderr, "detect %d SYCL GPUs: [%s] with Max compute units:%d\n", + fprintf(stderr, "detect %d SYCL GPUs: [%s] with top Max compute units:%d\n", g_sycl_gpu_mgr->get_gpu_count(), g_sycl_gpu_mgr->gpus_list.c_str(), g_sycl_gpu_mgr->max_compute_units); @@ -13264,23 +13283,6 @@ void ggml_init_sycl() try { #else fprintf(stderr, "%s: GGML_SYCL_F16: no\n", __func__); #endif - if (CHECK_TRY_ERROR(g_all_sycl_device_count = - dpct::dev_mgr::instance().device_count()) != 0) { - initialized = true; - g_sycl_loaded = false; - return; - } - GGML_ASSERT(g_all_sycl_device_count <= GGML_SYCL_MAX_DEVICES); - ggml_backend_sycl_print_sycl_devices(); - - if (!g_sycl_gpu_mgr) g_sycl_gpu_mgr = new sycl_gpu_mgr(); - - g_device_count = g_sycl_gpu_mgr->get_gpu_count(); - g_work_group_size = g_sycl_gpu_mgr->work_group_size; - - print_gpu_device_list(); - - int64_t total_vram = 0; /* NOT REMOVE, keep it for next optimize for XMX. #if defined(SYCL_USE_XMX) @@ -13289,52 +13291,71 @@ void ggml_init_sycl() try { fprintf(stderr, "%s: SYCL_USE_XMX: no\n", __func__); #endif */ - for (int id = 0; id < GGML_SYCL_MAX_DEVICES; ++id) { - g_device_caps[id].vmm = 0; - g_device_caps[id].device_id = -1; - g_device_caps[id].cc = 0; - g_tensor_split[id] = 0; - g_default_tensor_split[id] = 0; + + if (CHECK_TRY_ERROR(g_all_sycl_device_count = + dpct::dev_mgr::instance().device_count()) != 0) { + initialized = true; + g_sycl_loaded = false; + return; } - - for (int i = 0; i < g_device_count; ++i) { - int device_id = g_sycl_gpu_mgr->gpus[i]; - g_device_caps[i].vmm = 0; - - dpct::device_info prop; - SYCL_CHECK(CHECK_TRY_ERROR(dpct::get_device_info( - prop, dpct::dev_mgr::instance().get_device(device_id)))); - - g_default_tensor_split[i] = total_vram; - total_vram += prop.get_global_mem_size(); - - g_device_caps[i].cc = - 100 * prop.get_major_version() + 10 * prop.get_minor_version(); - } - - for (int i = 0; i < g_device_count; ++i) { - g_default_tensor_split[i] /= total_vram; - } - - for (int i = 0; i < g_device_count; ++i) { - SYCL_CHECK(ggml_sycl_set_device(i)); - - // create sycl streams - for (int is = 0; is < MAX_STREAMS; ++is) { - SYCL_CHECK(CHECK_TRY_ERROR( - g_syclStreams[i][is] = - dpct::get_current_device().create_queue( - g_sycl_gpu_mgr->get_co_ctx(), dpct::get_current_device()))); - } - - const dpct::queue_ptr stream = g_syclStreams[i][0]; - // create sycl handle - SYCL_CHECK(CHECK_TRY_ERROR(g_sycl_handles[i] = stream)); - } - + GGML_ASSERT(g_all_sycl_device_count <= GGML_SYCL_MAX_DEVICES); + ggml_backend_sycl_print_sycl_devices(); + if (!g_sycl_gpu_mgr) g_sycl_gpu_mgr = new sycl_gpu_mgr(); + print_gpu_device_list(); initialized = true; g_sycl_loaded = true; } + + + + g_device_count = g_sycl_gpu_mgr->get_gpu_count(); + g_work_group_size = g_sycl_gpu_mgr->work_group_size; + + int64_t total_vram = 0; + + + for (int id = 0; id < GGML_SYCL_MAX_DEVICES; ++id) { + g_device_caps[id].vmm = 0; + g_device_caps[id].device_id = -1; + g_device_caps[id].cc = 0; + g_tensor_split[id] = 0; + g_default_tensor_split[id] = 0; + } + + for (int i = 0; i < g_device_count; ++i) { + int device_id = g_sycl_gpu_mgr->gpus[i]; + g_device_caps[i].vmm = 0; + + dpct::device_info prop; + SYCL_CHECK(CHECK_TRY_ERROR(dpct::get_device_info( + prop, dpct::dev_mgr::instance().get_device(device_id)))); + + g_default_tensor_split[i] = total_vram; + total_vram += prop.get_global_mem_size(); + + g_device_caps[i].cc = + 100 * prop.get_major_version() + 10 * prop.get_minor_version(); + } + + for (int i = 0; i < g_device_count; ++i) { + g_default_tensor_split[i] /= total_vram; + } + + for (int i = 0; i < g_device_count; ++i) { + SYCL_CHECK(ggml_sycl_set_device(i)); + + // create sycl streams + for (int is = 0; is < MAX_STREAMS; ++is) { + SYCL_CHECK(CHECK_TRY_ERROR( + g_syclStreams[i][is] = + dpct::get_current_device().create_queue( + g_sycl_gpu_mgr->get_co_ctx(), dpct::get_current_device()))); + } + + const dpct::queue_ptr stream = g_syclStreams[i][0]; + // create sycl handle + SYCL_CHECK(CHECK_TRY_ERROR(g_sycl_handles[i] = stream)); + } } catch (sycl::exception const &exc) { std::cerr << exc.what() << "Exception caught at file:" << __FILE__ @@ -16732,22 +16753,24 @@ static ggml_backend_buffer_type_i ggml_backend_sycl_buffer_type_interface = { /* .is_host = */ nullptr, }; -ggml_backend_buffer_type_t ggml_backend_sycl_buffer_type(int device) { +ggml_backend_buffer_type_t ggml_backend_sycl_buffer_type(int device_index) { + if (device_index>=g_device_count or device_index<0) { + printf("ggml_backend_sycl_buffer_type error: device_index:%d is out of range [0, %d], miss to call ggml_backend_sycl_set_single_device()\n", + device_index, g_device_count-1); + GGML_ASSERT(device_indexgpus[i])}, }; } - ggml_backend_sycl_buffer_type_initialized = true; + g_ggml_backend_sycl_buffer_type_initialized = true; } - - return &ggml_backend_sycl_buffer_types[device]; + return &ggml_backend_sycl_buffer_types[device_index]; } // sycl split buffer type @@ -17496,6 +17519,17 @@ GGML_API GGML_CALL int ggml_backend_sycl_get_device_index(int device_id) { return g_sycl_gpu_mgr->get_index(device_id); } +GGML_API GGML_CALL void ggml_backend_sycl_set_single_device(int main_gpu_id) { + GGML_ASSERT(main_gpu_idbackends.push_back(backend); } else { // LLAMA_SPLIT_LAYER requires a backend for each GPU - int id_list[GGML_SYCL_MAX_DEVICES]; - ggml_sycl_get_gpu_list(id_list, GGML_SYCL_MAX_DEVICES); + for (int i = 0; i < ggml_backend_sycl_get_device_count(); ++i) { - int device_id = id_list[i]; ggml_backend_t backend = ggml_backend_sycl_init(i); if (backend == nullptr) { - LLAMA_LOG_ERROR("%s: failed to initialize SYCL%d (index %d)backend\n", __func__, device_id, i); + int id_list[GGML_SYCL_MAX_DEVICES]; + ggml_sycl_get_gpu_list(id_list, GGML_SYCL_MAX_DEVICES); + LLAMA_LOG_ERROR("%s: failed to initialize SYCL%d (index %d)backend\n", __func__, id_list[i], i); llama_free(ctx); return nullptr; } From 2002bc96bf2cbf5ab981a17d7e994d817c9801f5 Mon Sep 17 00:00:00 2001 From: Georgi Gerganov Date: Thu, 7 Mar 2024 11:41:53 +0200 Subject: [PATCH 802/811] server : refactor (#5882) * server : refactoring (wip) * server : remove llava/clip objects from build * server : fix empty prompt handling + all slots idle logic * server : normalize id vars * server : code style * server : simplify model chat template validation * server : code style * server : minor * llama : llama_chat_apply_template support null buf * server : do not process embedding requests when disabled * server : reorganize structs and enums + naming fixes * server : merge oai.hpp in utils.hpp * server : refactor system prompt update at start * server : disable cached prompts with self-extend * server : do not process more than n_batch tokens per iter * server: tests: embeddings use a real embeddings model (#5908) * server, tests : bump batch to fit 1 embedding prompt * server: tests: embeddings fix build type Debug is randomly failing (#5911) * server: tests: embeddings, use different KV Cache size * server: tests: embeddings, fixed prompt do not exceed n_batch, increase embedding timeout, reduce number of concurrent embeddings * server: tests: embeddings, no need to wait for server idle as it can timout * server: refactor: clean up http code (#5912) * server : avoid n_available var ggml-ci * server: refactor: better http codes * server : simplify json parsing + add comment about t_last * server : rename server structs * server : allow to override FQDN in tests ggml-ci * server : add comments --------- Co-authored-by: Pierrick Hymbert --- .github/workflows/server.yml | 3 +- Makefile | 5 +- examples/server-embd.py | 2 +- examples/server/CMakeLists.txt | 4 +- examples/server/README.md | 2 +- examples/server/oai.hpp | 225 - examples/server/server.cpp | 3895 ++++++++--------- .../server/tests/features/embeddings.feature | 94 + .../server/tests/features/parallel.feature | 46 - examples/server/tests/features/server.feature | 28 - examples/server/tests/features/steps/steps.py | 86 +- examples/server/tests/requirements.txt | 1 + examples/server/utils.hpp | 703 ++- llama.cpp | 6 +- 14 files changed, 2327 insertions(+), 2773 deletions(-) delete mode 100644 examples/server/oai.hpp create mode 100644 examples/server/tests/features/embeddings.feature diff --git a/.github/workflows/server.yml b/.github/workflows/server.yml index 04e3fc0c1..f9aeefaa8 100644 --- a/.github/workflows/server.yml +++ b/.github/workflows/server.yml @@ -58,7 +58,8 @@ jobs: cmake \ python3-pip \ wget \ - psmisc + psmisc \ + language-pack-en - name: Build id: cmake_build diff --git a/Makefile b/Makefile index 4f26c0463..efce10bb8 100644 --- a/Makefile +++ b/Makefile @@ -724,10 +724,9 @@ save-load-state: examples/save-load-state/save-load-state.cpp ggml.o llama.o $(C $(CXX) $(CXXFLAGS) -c $< -o $(call GET_OBJ_FILE, $<) $(CXX) $(CXXFLAGS) $(filter-out %.h $<,$^) $(call GET_OBJ_FILE, $<) -o $@ $(LDFLAGS) -server: examples/server/server.cpp examples/server/oai.hpp examples/server/utils.hpp examples/server/httplib.h examples/server/json.hpp examples/server/index.html.hpp examples/server/index.js.hpp examples/server/completion.js.hpp examples/llava/clip.cpp examples/llava/clip.h examples/llava/llava.h examples/llava/llava.cpp common/stb_image.h ggml.o llama.o $(COMMON_DEPS) grammar-parser.o $(OBJS) +server: examples/server/server.cpp examples/server/utils.hpp examples/server/httplib.h examples/server/json.hpp examples/server/index.html.hpp examples/server/index.js.hpp examples/server/completion.js.hpp common/stb_image.h ggml.o llama.o $(COMMON_DEPS) grammar-parser.o $(OBJS) $(CXX) $(CXXFLAGS) -c $< -o $(call GET_OBJ_FILE, $<) - $(CXX) $(CXXFLAGS) -c examples/llava/clip.cpp -o $(call GET_OBJ_FILE, examples/llava/clip.cpp) -Wno-cast-qual - $(CXX) $(CXXFLAGS) -Iexamples/server $(filter-out %.h %.hpp $< examples/llava/clip.cpp,$^) $(call GET_OBJ_FILE, $<) $(call GET_OBJ_FILE, examples/llava/clip.cpp) -o $@ $(LDFLAGS) $(LWINSOCK2) + $(CXX) $(CXXFLAGS) $(filter-out %.h %.hpp $<,$^) -Iexamples/server $(call GET_OBJ_FILE, $<) -o $@ $(LDFLAGS) $(LWINSOCK2) gguf: examples/gguf/gguf.cpp ggml.o $(OBJS) $(CXX) $(CXXFLAGS) -c $< -o $(call GET_OBJ_FILE, $<) diff --git a/examples/server-embd.py b/examples/server-embd.py index c5c4ea87b..118e04271 100644 --- a/examples/server-embd.py +++ b/examples/server-embd.py @@ -13,7 +13,7 @@ async def main(): model_url = "http://127.0.0.1:6900" responses: list[requests.Response] = await asyncio.gather(*[requests_post_async( url= f"{model_url}/embedding", - json= {"content": str(i)*1024} + json= {"content": str(0)*1024} ) for i in range(n)]) for response in responses: diff --git a/examples/server/CMakeLists.txt b/examples/server/CMakeLists.txt index cc13b2d63..c21eba634 100644 --- a/examples/server/CMakeLists.txt +++ b/examples/server/CMakeLists.txt @@ -1,12 +1,12 @@ set(TARGET server) option(LLAMA_SERVER_VERBOSE "Build verbose logging option for Server" ON) include_directories(${CMAKE_CURRENT_SOURCE_DIR}) -add_executable(${TARGET} server.cpp oai.hpp utils.hpp json.hpp httplib.h) +add_executable(${TARGET} server.cpp utils.hpp json.hpp httplib.h) install(TARGETS ${TARGET} RUNTIME) target_compile_definitions(${TARGET} PRIVATE SERVER_VERBOSE=$ ) -target_link_libraries(${TARGET} PRIVATE common llava ${CMAKE_THREAD_LIBS_INIT}) +target_link_libraries(${TARGET} PRIVATE common ${CMAKE_THREAD_LIBS_INIT}) if (WIN32) TARGET_LINK_LIBRARIES(${TARGET} PRIVATE ws2_32) endif() diff --git a/examples/server/README.md b/examples/server/README.md index 21da7a0a0..591f748f8 100644 --- a/examples/server/README.md +++ b/examples/server/README.md @@ -436,7 +436,7 @@ Notice that each `probs` is an array of length `n_probs`. "next_token": { "has_next_token": true, "n_remain": -1, - "num_tokens_predicted": 0, + "n_decoded": 0, "stopped_eos": false, "stopped_limit": false, "stopped_word": false, diff --git a/examples/server/oai.hpp b/examples/server/oai.hpp deleted file mode 100644 index ff4ad6994..000000000 --- a/examples/server/oai.hpp +++ /dev/null @@ -1,225 +0,0 @@ -#pragma once - -#include -#include -#include -#include -#include -#include - -#include "json.hpp" -#include "utils.hpp" - -#define DEFAULT_OAICOMPAT_MODEL "gpt-3.5-turbo-0613" - -using json = nlohmann::json; - -inline static json oaicompat_completion_params_parse( - const struct llama_model * model, - const json &body, /* openai api json semantics */ - const std::string &chat_template) -{ - json llama_params; - - llama_params["__oaicompat"] = true; - - // Map OpenAI parameters to llama.cpp parameters - // - // For parameters that are defined by the OpenAI documentation (e.g. - // temperature), we explicitly specify OpenAI's intended default; we - // need to do that because sometimes OpenAI disagrees with llama.cpp - // - // https://platform.openai.com/docs/api-reference/chat/create - llama_sampling_params default_sparams; - llama_params["model"] = json_value(body, "model", std::string("unknown")); - llama_params["prompt"] = format_chat(model, chat_template, body["messages"]); - llama_params["cache_prompt"] = json_value(body, "cache_prompt", false); - llama_params["temperature"] = json_value(body, "temperature", 0.0); - llama_params["top_k"] = json_value(body, "top_k", default_sparams.top_k); - llama_params["top_p"] = json_value(body, "top_p", 1.0); - llama_params["n_predict"] = json_value(body, "max_tokens", -1); - llama_params["logit_bias"] = json_value(body, "logit_bias",json::object()); - llama_params["frequency_penalty"] = json_value(body, "frequency_penalty", 0.0); - llama_params["presence_penalty"] = json_value(body, "presence_penalty", 0.0); - llama_params["seed"] = json_value(body, "seed", LLAMA_DEFAULT_SEED); - llama_params["stream"] = json_value(body, "stream", false); - llama_params["mirostat"] = json_value(body, "mirostat", default_sparams.mirostat); - llama_params["mirostat_tau"] = json_value(body, "mirostat_tau", default_sparams.mirostat_tau); - llama_params["mirostat_eta"] = json_value(body, "mirostat_eta", default_sparams.mirostat_eta); - llama_params["penalize_nl"] = json_value(body, "penalize_nl", default_sparams.penalize_nl); - llama_params["typical_p"] = json_value(body, "typical_p", default_sparams.typical_p); - llama_params["repeat_last_n"] = json_value(body, "repeat_last_n", default_sparams.penalty_last_n); - llama_params["ignore_eos"] = json_value(body, "ignore_eos", false); - llama_params["tfs_z"] = json_value(body, "tfs_z", default_sparams.tfs_z); - - if (body.count("grammar") != 0) { - llama_params["grammar"] = json_value(body, "grammar", json::object()); - } - - // Handle 'stop' field - if (body.contains("stop") && body["stop"].is_string()) { - llama_params["stop"] = json::array({body["stop"].get()}); - } else { - llama_params["stop"] = json_value(body, "stop", json::array()); - } - - // Ensure there is ChatML-specific end sequence among stop words - llama_params["stop"].push_back("<|im_end|>"); - - return llama_params; -} - -inline static json format_final_response_oaicompat(const json &request, const task_result &response, bool streaming = false) -{ - json result = response.result_json; - - bool stopped_word = result.count("stopped_word") != 0; - bool stopped_eos = json_value(result, "stopped_eos", false); - int num_tokens_predicted = json_value(result, "tokens_predicted", 0); - int num_prompt_tokens = json_value(result, "tokens_evaluated", 0); - std::string content = json_value(result, "content", std::string("")); - - std::string finish_reason = "length"; - if (stopped_word || stopped_eos) { - finish_reason = "stop"; - } - - json choices = - streaming ? json::array({json{{"finish_reason", finish_reason}, - {"index", 0}, - {"delta", json::object()}}}) - : json::array({json{{"finish_reason", finish_reason}, - {"index", 0}, - {"message", json{{"content", content}, - {"role", "assistant"}}}}}); - - std::time_t t = std::time(0); - - json res = - json{{"choices", choices}, - {"created", t}, - {"model", - json_value(request, "model", std::string(DEFAULT_OAICOMPAT_MODEL))}, - {"object", streaming ? "chat.completion.chunk" : "chat.completion"}, - {"usage", - json{{"completion_tokens", num_tokens_predicted}, - {"prompt_tokens", num_prompt_tokens}, - {"total_tokens", num_tokens_predicted + num_prompt_tokens}}}, - {"id", gen_chatcmplid()}}; - - if (server_verbose) { - res["__verbose"] = result; - } - - if (result.contains("completion_probabilities")) { - res["completion_probabilities"] = json_value(result, "completion_probabilities", json::array()); - } - - return res; -} - -// return value is vector as there is one case where we might need to generate two responses -inline static std::vector format_partial_response_oaicompat(const task_result &response) { - json result = response.result_json; - - if (!result.contains("model") || !result.contains("oaicompat_token_ctr")) { - return std::vector({response.result_json}); - } - - bool first = json_value(result, "oaicompat_token_ctr", 0) == 0; - std::string modelname = json_value(result, "model", std::string(DEFAULT_OAICOMPAT_MODEL)); - - bool stopped_word = json_value(result, "stopped_word", false); - bool stopped_eos = json_value(result, "stopped_eos", false); - bool stopped_limit = json_value(result, "stopped_limit", false); - std::string content = json_value(result, "content", std::string("")); - - std::string finish_reason; - if (stopped_word || stopped_eos) { - finish_reason = "stop"; - } - if (stopped_limit) { - finish_reason = "length"; - } - - std::time_t t = std::time(0); - - json choices; - - if (!finish_reason.empty()) { - choices = json::array({json{{"finish_reason", finish_reason}, - {"index", 0}, - {"delta", json::object()}}}); - } else { - if (first) { - if (content.empty()) { - choices = json::array({json{{"finish_reason", nullptr}, - {"index", 0}, - {"delta", json{{"role", "assistant"}}}}}); - } else { - // We have to send this as two updates to conform to openai behavior - json initial_ret = json{{"choices", json::array({json{ - {"finish_reason", nullptr}, - {"index", 0}, - {"delta", json{ - {"role", "assistant"} - }}}})}, - {"created", t}, - {"id", gen_chatcmplid()}, - {"model", modelname}, - {"object", "chat.completion.chunk"}}; - - json second_ret = json{ - {"choices", json::array({json{{"finish_reason", nullptr}, - {"index", 0}, - {"delta", json{ - {"content", content}}} - }})}, - {"created", t}, - {"id", gen_chatcmplid()}, - {"model", modelname}, - {"object", "chat.completion.chunk"}}; - - return std::vector({initial_ret, second_ret}); - } - } else { - // Some idiosyncrasy in task processing logic makes several trailing calls - // with empty content, we ignore these at the calee site. - if (content.empty()) { - return std::vector({json::object()}); - } - - choices = json::array({json{ - {"finish_reason", nullptr}, - {"index", 0}, - {"delta", - json{ - {"content", content}, - }}, - }}); - } - } - - json ret = json{{"choices", choices}, - {"created", t}, - {"id", gen_chatcmplid()}, - {"model", modelname}, - {"object", "chat.completion.chunk"}}; - - return std::vector({ret}); -} - -inline static json format_embeddings_response_oaicompat(const json &request, const json &embeddings) -{ - json res = - json{ - {"model", json_value(request, "model", std::string(DEFAULT_OAICOMPAT_MODEL))}, - {"object", "list"}, - {"usage", - json{{"prompt_tokens", 0}, - {"total_tokens", 0}}}, - {"data", embeddings} - }; - return res; -} - diff --git a/examples/server/server.cpp b/examples/server/server.cpp index 8fe5e0b19..3bdbde954 100644 --- a/examples/server/server.cpp +++ b/examples/server/server.cpp @@ -1,13 +1,8 @@ +#include "utils.hpp" + #include "common.h" #include "llama.h" #include "grammar-parser.h" -#include "utils.hpp" -#include "oai.hpp" - -#include "../llava/clip.h" -#include "../llava/llava.h" - -#include "stb_image.h" #ifndef NDEBUG // crash the server in debug mode, otherwise send an http 500 error @@ -24,46 +19,76 @@ #include "completion.js.hpp" #include "json-schema-to-grammar.mjs.hpp" -#include -#include +#include #include #include -#include +#include +#include +#include +#include #include using json = nlohmann::json; -struct server_params { - std::string hostname = "127.0.0.1"; - std::vector api_keys; - std::string public_path = "examples/server/public"; - std::string chat_template = ""; - int32_t port = 8080; - int32_t read_timeout = 600; - int32_t write_timeout = 600; - bool slots_endpoint = true; - bool metrics_endpoint = false; - int n_threads_http = -1; -}; - bool server_verbose = false; bool server_log_json = true; enum stop_type { - STOP_FULL, - STOP_PARTIAL, + STOP_TYPE_FULL, + STOP_TYPE_PARTIAL, }; -// TODO: can become bool if we can't find use of more states enum slot_state { - IDLE, - PROCESSING, + SLOT_STATE_IDLE, + SLOT_STATE_PROCESSING, }; enum slot_command { - NONE, - LOAD_PROMPT, - RELEASE, + SLOT_COMMAND_NONE, + SLOT_COMMAND_LOAD_PROMPT, + SLOT_COMMAND_RELEASE, +}; + +enum server_state { + SERVER_STATE_LOADING_MODEL, // Server is starting up, model not fully loaded yet + SERVER_STATE_READY, // Server is ready and model is loaded + SERVER_STATE_ERROR // An error occurred, load_model failed +}; + +enum server_task_type { + SERVER_TASK_TYPE_COMPLETION, + SERVER_TASK_TYPE_CANCEL, + SERVER_TASK_TYPE_NEXT_RESPONSE, + SERVER_TASK_TYPE_METRICS +}; + +struct server_task { + int id = -1; // to be filled by server_queue + int id_multi = -1; + int id_target = -1; + + server_task_type type; + json data; + + bool infill = false; + bool embedding = false; +}; + +struct server_task_result { + int id = -1; + int id_multi = -1; + + json data; + + bool stop; + bool error; +}; + +struct server_task_multi { + int id = -1; + + std::set subtasks_remaining; + std::vector results; }; struct slot_params { @@ -80,26 +105,32 @@ struct slot_params { json input_suffix; }; -struct slot_image { - int32_t id; +struct server_params { + int32_t port = 8080; + int32_t read_timeout = 600; + int32_t write_timeout = 600; + int32_t n_threads_http = -1; - bool request_encode_image = false; - float * image_embedding = nullptr; - int32_t image_tokens = 0; + std::string hostname = "127.0.0.1"; + std::string public_path = "examples/server/public"; + std::string chat_template = ""; + std::string system_prompt = ""; - clip_image_u8 * img_data; + std::vector api_keys; - std::string prefix_prompt; // before of this image + bool slots_endpoint = true; + bool metrics_endpoint = false; }; struct server_slot { int id; - int task_id = -1; + int id_task = -1; + int id_multi = -1; struct slot_params params; - slot_state state = IDLE; - slot_command command = NONE; + slot_state state = SLOT_STATE_IDLE; + slot_command command = SLOT_COMMAND_NONE; // used to determine the slot that has been used the longest int64_t t_last_used = -1; @@ -116,27 +147,31 @@ struct server_slot { int32_t n_prompt_tokens_processed = 0; json prompt; + + // when a task is submitted, we first tokenize the prompt and store it here + std::vector prompt_tokens; + std::string generated_text; - llama_token sampled; std::vector cache_tokens; std::vector generated_token_probs; - bool infill = false; - bool embedding = false; + bool infill = false; + bool embedding = false; bool has_next_token = true; - bool truncated = false; - bool stopped_eos = false; - bool stopped_word = false; - bool stopped_limit = false; + bool truncated = false; + bool stopped_eos = false; + bool stopped_word = false; + bool stopped_limit = false; bool oaicompat = false; - std::string oaicompat_model; + std::string oaicompat_model; std::string stopping_word; // sampling + llama_token sampled; struct llama_sampling_params sparams; - llama_sampling_context *ctx_sampling = nullptr; + llama_sampling_context * ctx_sampling = nullptr; int32_t ga_i = 0; // group-attention state int32_t ga_n = 1; // group-attention factor @@ -144,48 +179,32 @@ struct server_slot { int32_t n_past_se = 0; // self-extend - // multimodal - std::vector images; - // stats size_t n_sent_text = 0; // number of sent text character size_t n_sent_token_probs = 0; int64_t t_start_process_prompt; - int64_t t_start_genereration; + int64_t t_start_generation; double t_prompt_processing; // ms double t_token_generation; // ms - // multitasks - int multitask_id = -1; - void reset() { - n_prompt_tokens = 0; - generated_text = ""; - truncated = false; - stopped_eos = false; - stopped_word = false; - stopped_limit = false; - stopping_word = ""; - n_past = 0; - n_sent_text = 0; - n_sent_token_probs = 0; - infill = false; - ga_i = 0; - n_past_se = 0; + n_prompt_tokens = 0; + generated_text = ""; + truncated = false; + stopped_eos = false; + stopped_word = false; + stopped_limit = false; + stopping_word = ""; + n_past = 0; + n_sent_text = 0; + n_sent_token_probs = 0; + infill = false; + ga_i = 0; + n_past_se = 0; generated_token_probs.clear(); - - for (slot_image & img : images) { - free(img.image_embedding); - if (img.img_data) { - clip_image_u8_free(img.img_data); - } - img.prefix_prompt = ""; - } - - images.clear(); } bool has_budget(gpt_params &global_params) { @@ -205,32 +224,29 @@ struct server_slot { } bool available() const { - return state == IDLE && command == NONE; + return state == SLOT_STATE_IDLE && command == SLOT_COMMAND_NONE; } bool is_processing() const { - return (state == IDLE && command == LOAD_PROMPT) || state == PROCESSING; + return (state == SLOT_STATE_IDLE && command == SLOT_COMMAND_LOAD_PROMPT) || state == SLOT_STATE_PROCESSING; } - void add_token_string(const completion_token_output &token) { - if (command == RELEASE) { + void add_token_string(const completion_token_output & token) { + if (command == SLOT_COMMAND_RELEASE) { return; } - cache_tokens.push_back(token.tok); generated_token_probs.push_back(token); } void release() { - if (state == PROCESSING) - { - t_token_generation = (ggml_time_us() - t_start_genereration) / 1e3; - command = RELEASE; + if (state == SLOT_STATE_PROCESSING) { + t_token_generation = (ggml_time_us() - t_start_generation) / 1e3; + command = SLOT_COMMAND_RELEASE; } } - json get_formated_timings() { - return json - { + json get_formated_timings() const { + return json { {"prompt_n", n_prompt_tokens_processed}, {"prompt_ms", t_prompt_processing}, {"prompt_per_token_ms", t_prompt_processing / n_prompt_tokens_processed}, @@ -243,16 +259,47 @@ struct server_slot { }; } + size_t find_stopping_strings(const std::string & text, const size_t last_token_size, const stop_type type) { + size_t stop_pos = std::string::npos; + + for (const std::string & word : params.antiprompt) { + size_t pos; + + if (type == STOP_TYPE_FULL) { + const size_t tmp = word.size() + last_token_size; + const size_t from_pos = text.size() > tmp ? text.size() - tmp : 0; + + pos = text.find(word, from_pos); + } else { + pos = find_partial_stop_string(word, text); + } + + if (pos != std::string::npos && (stop_pos == std::string::npos || pos < stop_pos)) { + if (type == STOP_TYPE_FULL) { + stopped_word = true; + stopping_word = word; + has_next_token = false; + } + stop_pos = pos; + } + } + + return stop_pos; + } + void print_timings() const { - char buffer[512]; + char buffer[512]; + double t_token = t_prompt_processing / n_prompt_tokens_processed; double n_tokens_second = 1e3 / t_prompt_processing * n_prompt_tokens_processed; - sprintf(buffer, "prompt eval time = %10.2f ms / %5d tokens (%8.2f ms per token, %8.2f tokens per second)", + + snprintf(buffer, 512, "prompt eval time = %10.2f ms / %5d tokens (%8.2f ms per token, %8.2f tokens per second)", t_prompt_processing, n_prompt_tokens_processed, t_token, n_tokens_second); + LOG_INFO(buffer, { - {"slot_id", id}, - {"task_id", task_id}, + {"id_slot", id}, + {"id_task", id_task}, {"t_prompt_processing", t_prompt_processing}, {"n_prompt_tokens_processed", n_prompt_tokens_processed}, {"t_token", t_token}, @@ -261,22 +308,25 @@ struct server_slot { t_token = t_token_generation / n_decoded; n_tokens_second = 1e3 / t_token_generation * n_decoded; - sprintf(buffer, "generation eval time = %10.2f ms / %5d runs (%8.2f ms per token, %8.2f tokens per second)", + + snprintf(buffer, 512, "generation eval time = %10.2f ms / %5d runs (%8.2f ms per token, %8.2f tokens per second)", t_token_generation, n_decoded, t_token, n_tokens_second); + LOG_INFO(buffer, { - {"slot_id", id}, - {"task_id", task_id}, + {"id_slot", id}, + {"id_task", id_task}, {"t_token_generation", t_token_generation}, {"n_decoded", n_decoded}, {"t_token", t_token}, {"n_tokens_second", n_tokens_second}, }); - sprintf(buffer, " total time = %10.2f ms", t_prompt_processing + t_token_generation); + snprintf(buffer, 512, " total time = %10.2f ms", t_prompt_processing + t_token_generation); + LOG_INFO(buffer, { - {"slot_id", id}, - {"task_id", task_id}, + {"id_slot", id}, + {"id_task", id_task}, {"t_prompt_processing", t_prompt_processing}, {"t_token_generation", t_token_generation}, {"t_total", t_prompt_processing + t_token_generation}, @@ -291,9 +341,8 @@ struct server_metrics { uint64_t n_prompt_tokens_processed = 0; uint64_t t_prompt_processing = 0; - uint64_t n_tokens_predicted = 0; - uint64_t t_tokens_generation = 0; - + uint64_t n_tokens_predicted = 0; + uint64_t t_tokens_generation = 0; void on_prompt_eval(const server_slot &slot) { n_prompt_tokens_processed_total += slot.n_prompt_tokens_processed; @@ -315,23 +364,261 @@ struct server_metrics { } }; -struct llama_server_context -{ - llama_model *model = nullptr; - llama_context *ctx = nullptr; +struct server_queue { + int id = 0; + bool running; - clip_ctx *clp_ctx = nullptr; + // queues + std::vector queue_tasks; + std::vector queue_tasks_deferred; + + std::vector queue_multitasks; + + std::mutex mutex_tasks; + std::condition_variable condition_tasks; + + // callback functions + std::function callback_new_task; + std::function callback_finish_multitask; + std::function callback_run_slots; + + // Add a new task to the end of the queue + int post(server_task task) { + std::unique_lock lock(mutex_tasks); + if (task.id == -1) { + task.id = id++; + LOG_VERBOSE("new task id", {{"new_id", task.id}}); + } + queue_tasks.push_back(std::move(task)); + condition_tasks.notify_one(); + return task.id; + } + + // Add a new task, but defer until one slot is available + void defer(server_task task) { + std::unique_lock lock(mutex_tasks); + queue_tasks_deferred.push_back(std::move(task)); + } + + // Get the next id for creating anew task + int get_new_id() { + std::unique_lock lock(mutex_tasks); + int new_id = id++; + LOG_VERBOSE("new task id", {{"new_id", new_id}}); + return new_id; + } + + // Register function to process a new task + void on_new_task(std::function callback) { + callback_new_task = std::move(callback); + } + + // Register function to process a multitask when it is finished + void on_finish_multitask(std::function callback) { + callback_finish_multitask = std::move(callback); + } + + // Register the function to be called when all slots data is ready to be processed + void on_run_slots(std::function callback) { + callback_run_slots = std::move(callback); + } + + // Call when the state of one slot is changed + void notify_slot_changed() { + // move deferred tasks back to main loop + std::unique_lock lock(mutex_tasks); + for (auto & task : queue_tasks_deferred) { + queue_tasks.push_back(std::move(task)); + } + queue_tasks_deferred.clear(); + } + + // end the start_loop routine + void terminate() { + std::unique_lock lock(mutex_tasks); + running = false; + condition_tasks.notify_all(); + } + + /** + * Main loop consists of these steps: + * - Wait until a new task arrives + * - Process the task (i.e. maybe copy data into slot) + * - Check if multitask is finished + * - Run all slots + */ + void start_loop() { + running = true; + + while (true) { + LOG_VERBOSE("new task may arrive", {}); + + while (true) { + std::unique_lock lock(mutex_tasks); + if (queue_tasks.empty()) { + lock.unlock(); + break; + } + server_task task = queue_tasks.front(); + queue_tasks.erase(queue_tasks.begin()); + lock.unlock(); + LOG_VERBOSE("callback_new_task", {{"id_task", task.id}}); + callback_new_task(task); + } + + LOG_VERBOSE("update_multitasks", {}); + + // check if we have any finished multitasks + auto queue_iterator = queue_multitasks.begin(); + while (queue_iterator != queue_multitasks.end()) { + if (queue_iterator->subtasks_remaining.empty()) { + // all subtasks done == multitask is done + server_task_multi current_multitask = *queue_iterator; + callback_finish_multitask(current_multitask); + // remove this multitask + queue_iterator = queue_multitasks.erase(queue_iterator); + } else { + ++queue_iterator; + } + } + + // all tasks in the current loop is processed, slots data is now ready + LOG_VERBOSE("callback_run_slots", {}); + + callback_run_slots(); + + LOG_VERBOSE("wait for new task", {}); + { + std::unique_lock lock(mutex_tasks); + if (queue_tasks.empty()) { + if (!running) { + LOG_VERBOSE("ending start_loop", {}); + return; + } + condition_tasks.wait(lock, [&]{ + return (!queue_tasks.empty() || !running); + }); + } + } + } + } + + // + // functions to manage multitasks + // + + // add a multitask by specifying the id of all subtask (subtask is a server_task) + void add_multitask(int id_multi, std::vector & sub_ids) { + std::lock_guard lock(mutex_tasks); + server_task_multi multi; + multi.id = id_multi; + std::copy(sub_ids.begin(), sub_ids.end(), std::inserter(multi.subtasks_remaining, multi.subtasks_remaining.end())); + queue_multitasks.push_back(multi); + } + + // updatethe remaining subtasks, while appending results to multitask + void update_multitask(int id_multi, int id_sub, server_task_result & result) { + std::lock_guard lock(mutex_tasks); + for (auto & multitask : queue_multitasks) { + if (multitask.id == id_multi) { + multitask.subtasks_remaining.erase(id_sub); + multitask.results.push_back(result); + } + } + } +}; + +struct server_response { + typedef std::function callback_multitask_t; + callback_multitask_t callback_update_multitask; + + // for keeping track of all tasks waiting for the result + std::set waiting_task_ids; + + // the main result queue + std::vector queue_results; + + std::mutex mutex_results; + std::condition_variable condition_results; + + // add the id_task to the list of tasks waiting for response + void add_waiting_task_id(int id_task) { + LOG_VERBOSE("waiting for task id", {{"id_task", id_task}}); + + std::unique_lock lock(mutex_results); + waiting_task_ids.insert(id_task); + } + + // when the request is finished, we can remove task associated with it + void remove_waiting_task_id(int id_task) { + LOG_VERBOSE("remove waiting for task id", {{"id_task", id_task}}); + + std::unique_lock lock(mutex_results); + waiting_task_ids.erase(id_task); + } + + // This function blocks the thread until there is a response for this id_task + server_task_result recv(int id_task) { + while (true) { + std::unique_lock lock(mutex_results); + condition_results.wait(lock, [&]{ + return !queue_results.empty(); + }); + + for (int i = 0; i < (int) queue_results.size(); i++) { + if (queue_results[i].id == id_task) { + assert(queue_results[i].id_multi == -1); + server_task_result res = queue_results[i]; + queue_results.erase(queue_results.begin() + i); + return res; + } + } + } + + // should never reach here + } + + // Register the function to update multitask + void on_multitask_update(callback_multitask_t callback) { + callback_update_multitask = std::move(callback); + } + + // Send a new result to a waiting id_task + void send(server_task_result result) { + LOG_VERBOSE("send new result", {{"id_task", result.id}}); + + std::unique_lock lock(mutex_results); + for (const auto & id_task : waiting_task_ids) { + // LOG_TEE("waiting task id %i \n", id_task); + // for now, tasks that have associated parent multitasks just get erased once multitask picks up the result + if (result.id_multi == id_task) { + LOG_VERBOSE("callback_update_multitask", {{"id_task", id_task}}); + callback_update_multitask(id_task, result.id, result); + continue; + } + + if (result.id == id_task) { + LOG_VERBOSE("queue_results.push_back", {{"id_task", id_task}}); + queue_results.push_back(result); + condition_results.notify_all(); + return; + } + } + } +}; + +struct server_context { + llama_model * model = nullptr; + llama_context * ctx = nullptr; gpt_params params; llama_batch batch; - bool multimodal = false; - bool clean_kv_cache = true; - bool all_slots_are_idle = false; - bool add_bos_token = true; + bool clean_kv_cache = true; + bool add_bos_token = true; - int32_t n_ctx; // total context for all clients / slots + int32_t n_ctx; // total context for all clients / slots // system prompt bool system_need_update = false; @@ -346,60 +633,32 @@ struct llama_server_context std::vector slots; json default_generation_settings_for_props; - llama_server_queue queue_tasks; - llama_server_response queue_results; + server_queue queue_tasks; + server_response queue_results; server_metrics metrics; - ~llama_server_context() - { - if (ctx) - { + ~server_context() { + if (ctx) { llama_free(ctx); ctx = nullptr; } - if (model) - { + + if (model) { llama_free_model(model); model = nullptr; } } - bool load_model(const gpt_params ¶ms_) - { + bool load_model(const gpt_params & params_) { params = params_; - if (!params.mmproj.empty()) { - multimodal = true; - LOG_INFO("Multi Modal Mode Enabled", {}); - clp_ctx = clip_model_load(params.mmproj.c_str(), /*verbosity=*/ 1); - if(clp_ctx == nullptr) { - LOG_ERROR("unable to load clip model", {{"model", params.mmproj}}); - return false; - } - - if (params.n_ctx < 2048) { // request larger context for the image embedding - params.n_ctx = 2048; - } - } std::tie(model, ctx) = llama_init_from_gpt_params(params); - if (model == nullptr) - { + if (model == nullptr) { LOG_ERROR("unable to load model", {{"model", params.model}}); return false; } - if (multimodal) { - const int n_embd_clip = clip_n_mmproj_embd(clp_ctx); - const int n_embd_llm = llama_n_embd(model); - if (n_embd_clip != n_embd_llm) { - LOG_TEE("%s: embedding dim of the multimodal projector (%d) is not equal to that of LLaMA (%d). Make sure that you use the correct mmproj file.\n", __func__, n_embd_clip, n_embd_llm); - llama_free(ctx); - llama_free_model(model); - return false; - } - } - n_ctx = llama_n_ctx(ctx); add_bos_token = llama_should_add_bos_token(model); @@ -407,25 +666,19 @@ struct llama_server_context return true; } - void validate_model_chat_template(server_params & sparams) { + bool validate_model_chat_template() const { llama_chat_message chat[] = {{"user", "test"}}; - std::vector buf(1); - int res = llama_chat_apply_template(model, nullptr, chat, 1, true, buf.data(), buf.size()); - if (res < 0) { - LOG_ERROR("The chat template comes with this model is not yet supported, falling back to chatml. This may cause the model to output suboptimal responses", {}); - sparams.chat_template = "chatml"; - } + + const int res = llama_chat_apply_template(model, nullptr, chat, 1, true, nullptr, 0); + + return res > 0; } void initialize() { - // create slots - all_slots_are_idle = true; - const int32_t n_ctx_slot = n_ctx / params.n_parallel; LOG_INFO("initializing slots", {{"n_slots", params.n_parallel}}); - for (int i = 0; i < params.n_parallel; i++) - { + for (int i = 0; i < params.n_parallel; i++) { server_slot slot; slot.id = i; @@ -433,7 +686,7 @@ struct llama_server_context slot.n_predict = params.n_predict; LOG_INFO("new slot", { - {"slot_id", slot.id}, + {"id_slot", slot.id}, {"n_ctx_slot", slot.n_ctx} }); @@ -447,9 +700,9 @@ struct llama_server_context //GGML_ASSERT(n_ctx >= n_ctx_train * ga_n && "n_ctx must be at least n_ctx_train * ga_n"); // NOLINT LOG_INFO("slot self-extend", { - {"slot_id", slot.id}, - {"ga_n", ga_n}, - {"ga_w", ga_w} + {"id_slot", slot.id}, + {"ga_n", ga_n}, + {"ga_w", ga_w} }); } @@ -468,8 +721,7 @@ struct llama_server_context batch = llama_batch_init(n_ctx, 0, params.n_parallel); } - std::vector tokenize(const json & json_prompt, bool add_bos) const - { + std::vector tokenize(const json & json_prompt, bool add_bos) const { // TODO: currently, we tokenize using special tokens by default // this is not always correct (see https://github.com/ggerganov/llama.cpp/pull/4160#issuecomment-1824826216) // but it's better compared to completely ignoring ChatML and other chat templates @@ -479,38 +731,30 @@ struct llama_server_context // or the first element of the json_prompt array is a string. std::vector prompt_tokens; - if (json_prompt.is_array()) - { + if (json_prompt.is_array()) { bool first = true; - for (const auto& p : json_prompt) - { - if (p.is_string()) - { + for (const auto & p : json_prompt) { + if (p.is_string()) { auto s = p.template get(); + std::vector p; - if (first) - { + if (first) { p = ::llama_tokenize(ctx, s, add_bos, TMP_FORCE_SPECIAL); first = false; - } - else - { + } else { p = ::llama_tokenize(ctx, s, false, TMP_FORCE_SPECIAL); } + prompt_tokens.insert(prompt_tokens.end(), p.begin(), p.end()); - } - else - { - if (first) - { + } else { + if (first) { first = false; } + prompt_tokens.push_back(p.template get()); } } - } - else - { + } else { auto s = json_prompt.template get(); prompt_tokens = ::llama_tokenize(ctx, s, add_bos, TMP_FORCE_SPECIAL); } @@ -518,19 +762,18 @@ struct llama_server_context return prompt_tokens; } - server_slot* get_slot(int id) { + server_slot * get_slot(int id) { int64_t t_last = ggml_time_us(); - server_slot *last_used = nullptr; - for (server_slot & slot : slots) - { - if (slot.id == id && slot.available()) - { + server_slot * last_used = nullptr; + + for (server_slot & slot : slots) { + if (slot.id == id && slot.available()) { return &slot; } - if (slot.available() && slot.t_last_used < t_last) - { + // among all available slots, find the one that has been least recently used + if (slot.available() && slot.t_last_used < t_last) { last_used = &slot; t_last = slot.t_last_used; } @@ -539,295 +782,204 @@ struct llama_server_context return last_used; } - bool launch_slot_with_data(server_slot* &slot, json data) { + bool launch_slot_with_data(server_slot & slot, json data) const { slot_params default_params; llama_sampling_params default_sparams; if (data.count("__oaicompat") != 0) { - slot->oaicompat = true; - slot->oaicompat_model = json_value(data, "model", std::string(DEFAULT_OAICOMPAT_MODEL)); + slot.oaicompat = true; + slot.oaicompat_model = json_value(data, "model", std::string(DEFAULT_OAICOMPAT_MODEL)); } else { - slot->oaicompat = false; - slot->oaicompat_model = ""; + slot.oaicompat = false; + slot.oaicompat_model = ""; } - slot->params.stream = json_value(data, "stream", false); - slot->params.cache_prompt = json_value(data, "cache_prompt", false); - slot->params.n_predict = json_value(data, "n_predict", default_params.n_predict); - slot->sparams.top_k = json_value(data, "top_k", default_sparams.top_k); - slot->sparams.top_p = json_value(data, "top_p", default_sparams.top_p); - slot->sparams.min_p = json_value(data, "min_p", default_sparams.min_p); - slot->sparams.tfs_z = json_value(data, "tfs_z", default_sparams.tfs_z); - slot->sparams.typical_p = json_value(data, "typical_p", default_sparams.typical_p); - slot->sparams.temp = json_value(data, "temperature", default_sparams.temp); - slot->sparams.dynatemp_range = json_value(data, "dynatemp_range", default_sparams.dynatemp_range); - slot->sparams.dynatemp_exponent = json_value(data, "dynatemp_exponent", default_sparams.dynatemp_exponent); - slot->sparams.penalty_last_n = json_value(data, "repeat_last_n", default_sparams.penalty_last_n); - slot->sparams.penalty_repeat = json_value(data, "repeat_penalty", default_sparams.penalty_repeat); - slot->sparams.penalty_freq = json_value(data, "frequency_penalty", default_sparams.penalty_freq); - slot->sparams.penalty_present = json_value(data, "presence_penalty", default_sparams.penalty_present); - slot->sparams.mirostat = json_value(data, "mirostat", default_sparams.mirostat); - slot->sparams.mirostat_tau = json_value(data, "mirostat_tau", default_sparams.mirostat_tau); - slot->sparams.mirostat_eta = json_value(data, "mirostat_eta", default_sparams.mirostat_eta); - slot->sparams.penalize_nl = json_value(data, "penalize_nl", default_sparams.penalize_nl); - slot->params.n_keep = json_value(data, "n_keep", slot->params.n_keep); - slot->params.seed = json_value(data, "seed", default_params.seed); - slot->sparams.grammar = json_value(data, "grammar", default_sparams.grammar); - slot->sparams.n_probs = json_value(data, "n_probs", default_sparams.n_probs); - slot->sparams.min_keep = json_value(data, "min_keep", default_sparams.min_keep); + slot.params.stream = json_value(data, "stream", false); + slot.params.cache_prompt = json_value(data, "cache_prompt", false); + slot.params.n_predict = json_value(data, "n_predict", default_params.n_predict); + slot.sparams.top_k = json_value(data, "top_k", default_sparams.top_k); + slot.sparams.top_p = json_value(data, "top_p", default_sparams.top_p); + slot.sparams.min_p = json_value(data, "min_p", default_sparams.min_p); + slot.sparams.tfs_z = json_value(data, "tfs_z", default_sparams.tfs_z); + slot.sparams.typical_p = json_value(data, "typical_p", default_sparams.typical_p); + slot.sparams.temp = json_value(data, "temperature", default_sparams.temp); + slot.sparams.dynatemp_range = json_value(data, "dynatemp_range", default_sparams.dynatemp_range); + slot.sparams.dynatemp_exponent = json_value(data, "dynatemp_exponent", default_sparams.dynatemp_exponent); + slot.sparams.penalty_last_n = json_value(data, "repeat_last_n", default_sparams.penalty_last_n); + slot.sparams.penalty_repeat = json_value(data, "repeat_penalty", default_sparams.penalty_repeat); + slot.sparams.penalty_freq = json_value(data, "frequency_penalty", default_sparams.penalty_freq); + slot.sparams.penalty_present = json_value(data, "presence_penalty", default_sparams.penalty_present); + slot.sparams.mirostat = json_value(data, "mirostat", default_sparams.mirostat); + slot.sparams.mirostat_tau = json_value(data, "mirostat_tau", default_sparams.mirostat_tau); + slot.sparams.mirostat_eta = json_value(data, "mirostat_eta", default_sparams.mirostat_eta); + slot.sparams.penalize_nl = json_value(data, "penalize_nl", default_sparams.penalize_nl); + slot.params.n_keep = json_value(data, "n_keep", slot.params.n_keep); + slot.params.seed = json_value(data, "seed", default_params.seed); + slot.sparams.grammar = json_value(data, "grammar", default_sparams.grammar); + slot.sparams.n_probs = json_value(data, "n_probs", default_sparams.n_probs); + slot.sparams.min_keep = json_value(data, "min_keep", default_sparams.min_keep); - if (slot->n_predict > 0 && slot->params.n_predict > slot->n_predict) { + if (slot.params.cache_prompt && slot.ga_n != 1) { + LOG_WARNING("cache_prompt is not supported with group-attention", {}); + slot.params.cache_prompt = false; + } + + if (slot.n_predict > 0 && slot.params.n_predict > slot.n_predict) { // Might be better to reject the request with a 400 ? LOG_WARNING("Max tokens to predict exceeds server configuration", { - {"params.n_predict", slot->params.n_predict}, - {"slot.n_predict", slot->n_predict}, + {"params.n_predict", slot.params.n_predict}, + {"slot.n_predict", slot.n_predict}, }); - slot->params.n_predict = slot->n_predict; + slot.params.n_predict = slot.n_predict; } // infill - if (data.count("input_prefix") != 0) - { - slot->params.input_prefix = data["input_prefix"]; - } - else - { - slot->params.input_prefix = ""; - } + slot.params.input_prefix = json_value(data, "input_prefix", default_params.input_prefix); + slot.params.input_suffix = json_value(data, "input_suffix", default_params.input_suffix); + slot.prompt = json_value(data, "prompt", std::string("")); - if (data.count("input_suffix") != 0) + // penalize user-provided tokens { - slot->params.input_suffix = data["input_suffix"]; - } - else - { - slot->params.input_suffix = ""; - } + slot.sparams.penalty_prompt_tokens.clear(); + slot.sparams.use_penalty_prompt_tokens = false; - if (data.count("prompt") != 0) - { - slot->prompt = data["prompt"]; - } - else - { - slot->prompt = ""; - } + const auto & penalty_prompt = data.find("penalty_prompt"); - slot->sparams.penalty_prompt_tokens.clear(); - slot->sparams.use_penalty_prompt_tokens = false; - const auto &penalty_prompt = data.find("penalty_prompt"); - if (penalty_prompt != data.end()) - { - if (penalty_prompt->is_string()) - { - const auto penalty_prompt_string = penalty_prompt->get(); - auto penalty_tokens = llama_tokenize(model, penalty_prompt_string, false); - slot->sparams.penalty_prompt_tokens.swap(penalty_tokens); - if (slot->params.n_predict > 0) - { - slot->sparams.penalty_prompt_tokens.reserve(slot->sparams.penalty_prompt_tokens.size() + slot->params.n_predict); - } - slot->sparams.use_penalty_prompt_tokens = true; - } - else if (penalty_prompt->is_array()) - { - const auto n_tokens = penalty_prompt->size(); - slot->sparams.penalty_prompt_tokens.reserve(n_tokens + std::max(0, slot->params.n_predict)); - const int n_vocab = llama_n_vocab(model); - for (const auto &penalty_token : *penalty_prompt) - { - if (penalty_token.is_number_integer()) - { - const auto tok = penalty_token.get(); - if (tok >= 0 && tok < n_vocab) - { - slot->sparams.penalty_prompt_tokens.push_back(tok); - } + if (penalty_prompt != data.end()) { + if (penalty_prompt->is_string()) { + const auto penalty_prompt_string = penalty_prompt->get(); + slot.sparams.penalty_prompt_tokens = llama_tokenize(model, penalty_prompt_string, false); + + if (slot.params.n_predict > 0) { + slot.sparams.penalty_prompt_tokens.reserve(slot.sparams.penalty_prompt_tokens.size() + slot.params.n_predict); } - } - slot->sparams.use_penalty_prompt_tokens = true; - } - } + slot.sparams.use_penalty_prompt_tokens = true; - slot->sparams.logit_bias.clear(); - - if (json_value(data, "ignore_eos", false)) - { - slot->sparams.logit_bias[llama_token_eos(model)] = -INFINITY; - } - - const auto &logit_bias = data.find("logit_bias"); - if (logit_bias != data.end() && logit_bias->is_array()) - { - const int n_vocab = llama_n_vocab(model); - for (const auto &el : *logit_bias) - { - if (el.is_array() && el.size() == 2) - { - float bias; - if (el[1].is_number()) - { - bias = el[1].get(); - } - else if (el[1].is_boolean() && !el[1].get()) - { - bias = -INFINITY; - } - else - { - continue; - } - - if (el[0].is_number_integer()) - { - llama_token tok = el[0].get(); - if (tok >= 0 && tok < n_vocab) - { - slot->sparams.logit_bias[tok] = bias; - } - } - else if (el[0].is_string()) - { - auto toks = llama_tokenize(model, el[0].get(), false); - for (auto tok : toks) - { - slot->sparams.logit_bias[tok] = bias; - } - } - } - } - } - - slot->params.antiprompt.clear(); - - const auto &stop = data.find("stop"); - if (stop != data.end() && stop->is_array()) - { - for (const auto &word : *stop) - { - if (!word.empty()) - { - slot->params.antiprompt.push_back(word); - } - } - } - - const auto &samplers_sequence = data.find("samplers"); - if (samplers_sequence != data.end() && samplers_sequence->is_array()) - { - std::vector sampler_names; - for (const auto &sampler_name : *samplers_sequence) - { - if (sampler_name.is_string()) - { - sampler_names.emplace_back(sampler_name); - } - } - slot->sparams.samplers_sequence = sampler_types_from_names(sampler_names, false); - } - else - { - slot->sparams.samplers_sequence = default_sparams.samplers_sequence; - } - - if (multimodal) - { - const auto &images_data = data.find("image_data"); - if (images_data != data.end() && images_data->is_array()) - { - for (const auto &img : *images_data) - { - const std::vector image_buffer = base64_decode(img["data"].get()); - - slot_image img_sl; - img_sl.id = img.count("id") != 0 ? img["id"].get() : slot->images.size(); - img_sl.img_data = clip_image_u8_init(); - if (!clip_image_load_from_bytes(image_buffer.data(), image_buffer.size(), img_sl.img_data)) - { - LOG_ERROR("failed to load image", { - {"slot_id", slot->id}, - {"img_sl_id", img_sl.id} - }); - return false; - } - LOG_VERBOSE("image loaded", { - {"slot_id", slot->id}, - {"img_sl_id", img_sl.id} + LOG_VERBOSE("penalty_prompt_tokens", { + {"id_slot", slot.id}, + {"tokens", slot.sparams.penalty_prompt_tokens}, }); - img_sl.request_encode_image = true; - slot->images.push_back(img_sl); } - // process prompt - // example: system prompt [img-102] user [img-103] describe [img-134] -> [{id: 102, prefix: 'system prompt '}, {id: 103, prefix: ' user '}, {id: 134, prefix: ' describe '}]} - if (slot->images.size() > 0 && !slot->prompt.is_array()) - { - std::string prompt = slot->prompt.get(); - size_t pos = 0, begin_prefix = 0; - std::string pattern = "[img-"; - while ((pos = prompt.find(pattern, pos)) != std::string::npos) { - size_t end_prefix = pos; - pos += pattern.length(); - size_t end_pos = prompt.find(']', pos); - if (end_pos != std::string::npos) - { - std::string image_id = prompt.substr(pos, end_pos - pos); - try - { - int img_id = std::stoi(image_id); - bool found = false; - for (slot_image &img : slot->images) - { - if (img.id == img_id) { - found = true; - img.prefix_prompt = prompt.substr(begin_prefix, end_prefix - begin_prefix); - begin_prefix = end_pos + 1; - break; - } - } - if (!found) { - LOG_TEE("ERROR: Image with id: %i, not found.\n", img_id); - slot->images.clear(); - return false; - } - } catch (const std::invalid_argument& e) { - LOG_TEE("Invalid image number id in prompt\n"); - slot->images.clear(); - return false; + else if (penalty_prompt->is_array()) { + const auto n_tokens = penalty_prompt->size(); + slot.sparams.penalty_prompt_tokens.reserve(n_tokens + std::max(0, slot.params.n_predict)); + + const int n_vocab = llama_n_vocab(model); + for (const auto & penalty_token : *penalty_prompt) { + if (penalty_token.is_number_integer()) { + const auto tok = penalty_token.get(); + if (tok >= 0 && tok < n_vocab) { + slot.sparams.penalty_prompt_tokens.push_back(tok); } } } - slot->prompt = ""; - slot->params.input_suffix = prompt.substr(begin_prefix); - slot->params.cache_prompt = false; // multimodal doesn't support cache prompt + slot.sparams.use_penalty_prompt_tokens = true; + + LOG_VERBOSE("penalty_prompt_tokens", { + {"id_slot", slot.id}, + {"tokens", slot.sparams.penalty_prompt_tokens}, + }); } } } - if (slot->ctx_sampling != nullptr) { - llama_sampling_free(slot->ctx_sampling); - } - slot->ctx_sampling = llama_sampling_init(slot->sparams); - llama_set_rng_seed(ctx, slot->params.seed); - slot->command = LOAD_PROMPT; + slot.sparams.logit_bias.clear(); - all_slots_are_idle = false; + if (json_value(data, "ignore_eos", false)) { + slot.sparams.logit_bias[llama_token_eos(model)] = -INFINITY; + } + + const auto & logit_bias = data.find("logit_bias"); + if (logit_bias != data.end() && logit_bias->is_array()) { + const int n_vocab = llama_n_vocab(model); + for (const auto & el : *logit_bias) { + if (el.is_array() && el.size() == 2) { + float bias; + if (el[1].is_number()) { + bias = el[1].get(); + } else if (el[1].is_boolean() && !el[1].get()) { + bias = -INFINITY; + } else { + continue; + } + + if (el[0].is_number_integer()) { + llama_token tok = el[0].get(); + if (tok >= 0 && tok < n_vocab) { + slot.sparams.logit_bias[tok] = bias; + } + } else if (el[0].is_string()) { + auto toks = llama_tokenize(model, el[0].get(), false); + for (auto tok : toks) { + slot.sparams.logit_bias[tok] = bias; + } + } + } + } + } + } + + { + slot.params.antiprompt.clear(); + + const auto & stop = data.find("stop"); + if (stop != data.end() && stop->is_array()) { + for (const auto & word : *stop) { + if (!word.empty()) { + slot.params.antiprompt.push_back(word); + } + } + } + } + + { + const auto & samplers_sequence = data.find("samplers"); + if (samplers_sequence != data.end() && samplers_sequence->is_array()) { + std::vector sampler_names; + for (const auto & sampler_name : *samplers_sequence) { + if (sampler_name.is_string()) { + sampler_names.emplace_back(sampler_name); + } + } + slot.sparams.samplers_sequence = sampler_types_from_names(sampler_names, false); + } else { + slot.sparams.samplers_sequence = default_sparams.samplers_sequence; + } + } + + { + if (slot.ctx_sampling != nullptr) { + llama_sampling_free(slot.ctx_sampling); + } + slot.ctx_sampling = llama_sampling_init(slot.sparams); + llama_set_rng_seed(ctx, slot.params.seed); + } + + slot.command = SLOT_COMMAND_LOAD_PROMPT; + slot.prompt_tokens.clear(); LOG_INFO("slot is processing task", { - {"slot_id", slot->id}, - {"task_id", slot->task_id}, + {"id_slot", slot.id}, + {"id_task", slot.id_task}, }); return true; } void kv_cache_clear() { + LOG_VERBOSE("clearing KV cache", {}); + // clear the entire KV cache llama_kv_cache_clear(ctx); clean_kv_cache = false; } void system_prompt_update() { + LOG_VERBOSE("system prompt update", { + {"system_prompt", system_prompt}, + }); + kv_cache_clear(); system_tokens.clear(); @@ -836,13 +988,11 @@ struct llama_server_context llama_batch_clear(batch); - for (int i = 0; i < (int)system_tokens.size(); ++i) - { + for (int i = 0; i < (int)system_tokens.size(); ++i) { llama_batch_add(batch, system_tokens[i], i, { 0 }, false); } - for (int32_t i = 0; i < (int32_t) batch.n_tokens; i += params.n_batch) - { + for (int32_t i = 0; i < (int32_t) batch.n_tokens; i += params.n_batch) { const int32_t n_tokens = std::min(params.n_batch, (int32_t) (batch.n_tokens - i)); llama_batch batch_view = { n_tokens, @@ -854,78 +1004,42 @@ struct llama_server_context batch.logits + i, 0, 0, 0, // unused }; - if (llama_decode(ctx, batch_view) != 0) - { + + if (llama_decode(ctx, batch_view) != 0) { LOG_TEE("%s: llama_decode() failed\n", __func__); return; } } // assign the system KV cache to all parallel sequences - for (int32_t i = 1; i < params.n_parallel; ++i) - { + for (int32_t i = 1; i < params.n_parallel; ++i) { llama_kv_cache_seq_cp(ctx, 0, i, 0, system_tokens.size()); } } - LOG_TEE("system prompt updated\n"); system_need_update = false; } - void system_prompt_notify() { + void system_prompt_set(const json & sys_props) { + system_prompt = sys_props.value("prompt", ""); + name_user = sys_props.value("anti_prompt", ""); + name_assistant = sys_props.value("assistant_name", ""); + + LOG_VERBOSE("system prompt process", { + {"system_prompt", system_prompt}, + {"name_user", name_user}, + {"name_assistant", name_assistant}, + }); + // release all slots - for (server_slot &slot : slots) - { + for (server_slot & slot : slots) { slot.release(); } system_need_update = true; } - void system_prompt_process(const json &sys_props) { - system_prompt = sys_props.value("prompt", ""); - name_user = sys_props.value("anti_prompt", ""); - name_assistant = sys_props.value("assistant_name", ""); - - - system_prompt_notify(); - } - - static size_t find_stopping_strings(const std::string &text, const size_t last_token_size, - const stop_type type, server_slot &slot) - { - size_t stop_pos = std::string::npos; - - for (const std::string &word : slot.params.antiprompt) - { - size_t pos; - if (type == STOP_FULL) - { - const size_t tmp = word.size() + last_token_size; - const size_t from_pos = text.size() > tmp ? text.size() - tmp : 0; - pos = text.find(word, from_pos); - } - else - { - pos = find_partial_stop_string(word, text); - } - if (pos != std::string::npos && - (stop_pos == std::string::npos || pos < stop_pos)) - { - if (type == STOP_FULL) - { - slot.stopped_word = true; - slot.stopping_word = word; - slot.has_next_token = false; - } - stop_pos = pos; - } - } - - return stop_pos; - } - - bool process_token(completion_token_output &result, server_slot &slot) { + bool process_token(completion_token_output & result, server_slot & slot) { // remember which tokens were sampled - used for repetition penalties during sampling const std::string token_str = llama_token_to_piece(ctx, result.tok); slot.sampled = result.tok; @@ -934,34 +1048,26 @@ struct llama_server_context slot.generated_text += token_str; slot.has_next_token = true; - if (slot.ctx_sampling->params.use_penalty_prompt_tokens && result.tok != -1) - { + if (slot.ctx_sampling->params.use_penalty_prompt_tokens && result.tok != -1) { // we can change penalty_prompt_tokens because it is always created from scratch each request slot.ctx_sampling->params.penalty_prompt_tokens.push_back(result.tok); } // check if there is incomplete UTF-8 character at the end bool incomplete = false; - for (unsigned i = 1; i < 5 && i <= slot.generated_text.size(); ++i) - { + for (unsigned i = 1; i < 5 && i <= slot.generated_text.size(); ++i) { unsigned char c = slot.generated_text[slot.generated_text.size() - i]; - if ((c & 0xC0) == 0x80) - { + if ((c & 0xC0) == 0x80) { // continuation byte: 10xxxxxx continue; } - if ((c & 0xE0) == 0xC0) - { + if ((c & 0xE0) == 0xC0) { // 2-byte character: 110xxxxx ... incomplete = i < 2; - } - else if ((c & 0xF0) == 0xE0) - { + } else if ((c & 0xF0) == 0xE0) { // 3-byte character: 1110xxxx ... incomplete = i < 3; - } - else if ((c & 0xF8) == 0xF0) - { + } else if ((c & 0xF8) == 0xF0) { // 4-byte character: 11110xxx ... incomplete = i < 4; } @@ -969,206 +1075,181 @@ struct llama_server_context break; } - if (!incomplete) - { + if (!incomplete) { size_t pos = std::min(slot.n_sent_text, slot.generated_text.size()); + const std::string str_test = slot.generated_text.substr(pos); bool is_stop_full = false; - size_t stop_pos = find_stopping_strings(str_test, token_str.size(), STOP_FULL, slot); - if (stop_pos != std::string::npos) - { + + size_t stop_pos = slot.find_stopping_strings(str_test, token_str.size(), STOP_TYPE_FULL); + if (stop_pos != std::string::npos) { is_stop_full = true; slot.generated_text.erase( slot.generated_text.begin() + pos + stop_pos, slot.generated_text.end()); pos = std::min(slot.n_sent_text, slot.generated_text.size()); - } - else - { + } else { is_stop_full = false; - stop_pos = find_stopping_strings(str_test, token_str.size(), STOP_PARTIAL, slot); + stop_pos = slot.find_stopping_strings(str_test, token_str.size(), STOP_TYPE_PARTIAL); } // check if there is any token to predict - if (stop_pos == std::string::npos || (!slot.has_next_token && !is_stop_full && stop_pos > 0)) - { + if (stop_pos == std::string::npos || (!slot.has_next_token && !is_stop_full && stop_pos > 0)) { // no send the stop word in the response result.text_to_send = slot.generated_text.substr(pos, std::string::npos); slot.n_sent_text += result.text_to_send.size(); // add the token to slot queue and cache } + slot.add_token_string(result); - if (slot.params.stream) - { + if (slot.params.stream) { send_partial_response(slot, result); } } - if (incomplete) - { + if (incomplete) { slot.has_next_token = true; } // check the limits - if (slot.n_decoded > 0 && slot.has_next_token && !slot.has_budget(params)) - { - slot.stopped_limit = true; + if (slot.n_decoded > 0 && slot.has_next_token && !slot.has_budget(params)) { + slot.stopped_limit = true; slot.has_next_token = false; + + LOG_VERBOSE("stopped by limit", { + {"id_slot", slot.id}, + {"n_decoded", slot.n_decoded}, + {"n_predict", slot.params.n_predict}, + }); } - if (!slot.cache_tokens.empty() && result.tok == llama_token_eos(model)) - { - slot.stopped_eos = true; + if (!slot.cache_tokens.empty() && result.tok == llama_token_eos(model)) { + slot.stopped_eos = true; slot.has_next_token = false; + LOG_VERBOSE("eos token found", {}); } LOG_VERBOSE("next token", { - {"token", result.tok}, - {"token_text", tokens_to_output_formatted_string(ctx, result.tok)}, - {"has_next_token", slot.has_next_token}, - {"n_remain", slot.n_remaining}, - {"num_tokens_predicted", slot.n_decoded}, - {"stopped_eos", slot.stopped_eos}, - {"stopped_word", slot.stopped_word}, - {"stopped_limit", slot.stopped_limit}, - {"stopping_word", slot.stopping_word}, - }); + {"token", result.tok}, + {"token_text", tokens_to_output_formatted_string(ctx, result.tok)}, + {"has_next_token", slot.has_next_token}, + {"n_remain", slot.n_remaining}, + {"n_decoded", slot.n_decoded}, + {"stopped_eos", slot.stopped_eos}, + {"stopped_word", slot.stopped_word}, + {"stopped_limit", slot.stopped_limit}, + {"stopping_word", slot.stopping_word}, + }); return slot.has_next_token; // continue } - bool process_images(server_slot &slot) const - { - for (slot_image &img : slot.images) - { - if (!img.request_encode_image) - { - continue; - } - - if (!llava_image_embed_make_with_clip_img(clp_ctx, params.n_threads, img.img_data, &img.image_embedding, &img.image_tokens)) { - LOG_TEE("Error processing the given image"); - return false; - } - - - img.request_encode_image = false; - } - - return slot.images.size() > 0; - } - - void send_error(task_server& task, const std::string &error) - { - LOG_TEE("task %i - error: %s\n", task.id, error.c_str()); - task_result res; - res.id = task.id; - res.multitask_id = task.multitask_id; - res.stop = false; - res.error = true; - res.result_json = { { "content", error } }; - queue_results.send(res); - } - - json get_formated_generation(server_slot &slot) - { + json get_formated_generation(const server_slot & slot) const { const auto eos_bias = slot.sparams.logit_bias.find(llama_token_eos(model)); - const bool ignore_eos = eos_bias != slot.sparams.logit_bias.end() && - eos_bias->second < 0.0f && std::isinf(eos_bias->second); + const bool ignore_eos = eos_bias != slot.sparams.logit_bias.end() && eos_bias->second < 0.0f && std::isinf(eos_bias->second); + std::vector samplers_sequence; - for (const auto &sampler_type : slot.sparams.samplers_sequence) - { + samplers_sequence.reserve(slot.sparams.samplers_sequence.size()); + for (const auto & sampler_type : slot.sparams.samplers_sequence) { samplers_sequence.emplace_back(sampler_type_to_name_string(sampler_type)); } return json { - {"n_ctx", slot.n_ctx}, - {"n_predict", slot.n_predict}, - {"model", params.model_alias}, - {"seed", slot.params.seed}, - {"temperature", slot.sparams.temp}, - {"dynatemp_range", slot.sparams.dynatemp_range}, - {"dynatemp_exponent", slot.sparams.dynatemp_exponent}, - {"top_k", slot.sparams.top_k}, - {"top_p", slot.sparams.top_p}, - {"min_p", slot.sparams.min_p}, - {"tfs_z", slot.sparams.tfs_z}, - {"typical_p", slot.sparams.typical_p}, - {"repeat_last_n", slot.sparams.penalty_last_n}, - {"repeat_penalty", slot.sparams.penalty_repeat}, - {"presence_penalty", slot.sparams.penalty_present}, - {"frequency_penalty", slot.sparams.penalty_freq}, - {"penalty_prompt_tokens", slot.sparams.penalty_prompt_tokens}, + {"n_ctx", slot.n_ctx}, + {"n_predict", slot.n_predict}, + {"model", params.model_alias}, + {"seed", slot.params.seed}, + {"temperature", slot.sparams.temp}, + {"dynatemp_range", slot.sparams.dynatemp_range}, + {"dynatemp_exponent", slot.sparams.dynatemp_exponent}, + {"top_k", slot.sparams.top_k}, + {"top_p", slot.sparams.top_p}, + {"min_p", slot.sparams.min_p}, + {"tfs_z", slot.sparams.tfs_z}, + {"typical_p", slot.sparams.typical_p}, + {"repeat_last_n", slot.sparams.penalty_last_n}, + {"repeat_penalty", slot.sparams.penalty_repeat}, + {"presence_penalty", slot.sparams.penalty_present}, + {"frequency_penalty", slot.sparams.penalty_freq}, + {"penalty_prompt_tokens", slot.sparams.penalty_prompt_tokens}, {"use_penalty_prompt_tokens", slot.sparams.use_penalty_prompt_tokens}, - {"mirostat", slot.sparams.mirostat}, - {"mirostat_tau", slot.sparams.mirostat_tau}, - {"mirostat_eta", slot.sparams.mirostat_eta}, - {"penalize_nl", slot.sparams.penalize_nl}, - {"stop", slot.params.antiprompt}, - {"n_predict", slot.params.n_predict}, - {"n_keep", params.n_keep}, - {"ignore_eos", ignore_eos}, - {"stream", slot.params.stream}, - {"logit_bias", slot.sparams.logit_bias}, - {"n_probs", slot.sparams.n_probs}, - {"min_keep", slot.sparams.min_keep}, - {"grammar", slot.sparams.grammar}, - {"samplers", samplers_sequence} + {"mirostat", slot.sparams.mirostat}, + {"mirostat_tau", slot.sparams.mirostat_tau}, + {"mirostat_eta", slot.sparams.mirostat_eta}, + {"penalize_nl", slot.sparams.penalize_nl}, + {"stop", slot.params.antiprompt}, + {"n_predict", slot.params.n_predict}, + {"n_keep", params.n_keep}, + {"ignore_eos", ignore_eos}, + {"stream", slot.params.stream}, + {"logit_bias", slot.sparams.logit_bias}, + {"n_probs", slot.sparams.n_probs}, + {"min_keep", slot.sparams.min_keep}, + {"grammar", slot.sparams.grammar}, + {"samplers", samplers_sequence} }; } - void send_partial_response(server_slot &slot, completion_token_output tkn) - { - task_result res; - res.id = slot.task_id; - res.multitask_id = slot.multitask_id; - res.error = false; - res.stop = false; + void send_error(const server_task & task, const std::string & error) { + LOG_TEE("task %i - error: %s\n", task.id, error.c_str()); - res.result_json = json - { + server_task_result res; + res.id = task.id; + res.id_multi = task.id_multi; + res.stop = false; + res.error = true; + res.data = { { "content", error } }; + + queue_results.send(res); + } + + void send_partial_response(server_slot & slot, completion_token_output tkn) { + server_task_result res; + res.id = slot.id_task; + res.id_multi = slot.id_multi; + res.error = false; + res.stop = false; + res.data = json { {"content", tkn.text_to_send}, {"stop", false}, - {"slot_id", slot.id}, - {"multimodal", multimodal} + {"id_slot", slot.id}, + {"multimodal", false} }; - if (slot.sparams.n_probs > 0) - { - std::vector probs_output = {}; + if (slot.sparams.n_probs > 0) { const std::vector to_send_toks = llama_tokenize(ctx, tkn.text_to_send, false); - size_t probs_pos = std::min(slot.n_sent_token_probs, slot.generated_token_probs.size()); - size_t probs_stop_pos = std::min(slot.n_sent_token_probs + to_send_toks.size(), slot.generated_token_probs.size()); - if (probs_pos < probs_stop_pos) - { - probs_output = std::vector(slot.generated_token_probs.begin() + probs_pos, slot.generated_token_probs.begin() + probs_stop_pos); + const size_t probs_pos = std::min(slot.n_sent_token_probs, slot.generated_token_probs.size()); + const size_t probs_stop_pos = std::min(slot.n_sent_token_probs + to_send_toks.size(), slot.generated_token_probs.size()); + + std::vector probs_output; + if (probs_pos < probs_stop_pos) { + probs_output = std::vector( + slot.generated_token_probs.begin() + probs_pos, + slot.generated_token_probs.begin() + probs_stop_pos); } slot.n_sent_token_probs = probs_stop_pos; - res.result_json["completion_probabilities"] = probs_vector_to_json(ctx, probs_output); + + res.data["completion_probabilities"] = probs_vector_to_json(ctx, probs_output); } - if (slot.oaicompat) - { - res.result_json["oaicompat_token_ctr"] = slot.n_decoded; - res.result_json["model"] = slot.oaicompat_model; + if (slot.oaicompat) { + res.data["oaicompat_token_ctr"] = slot.n_decoded; + res.data["model"] = slot.oaicompat_model; } queue_results.send(res); } - void send_final_response(server_slot &slot) - { - task_result res; - res.id = slot.task_id; - res.multitask_id = slot.multitask_id; - res.error = false; - res.stop = true; - - res.result_json = json - { + void send_final_response(const server_slot & slot) { + server_task_result res; + res.id = slot.id_task; + res.id_multi = slot.id_multi; + res.error = false; + res.stop = true; + res.data = json { {"content", !slot.params.stream ? slot.generated_text : ""}, - {"slot_id", slot.id}, + {"id_slot", slot.id}, {"stop", true}, {"model", params.model_alias}, {"tokens_predicted", slot.n_decoded}, @@ -1184,96 +1265,87 @@ struct llama_server_context {"timings", slot.get_formated_timings()} }; - if (slot.sparams.n_probs > 0) - { - std::vector probs = {}; - if (!slot.params.stream && slot.stopped_word) - { + if (slot.sparams.n_probs > 0) { + std::vector probs; + if (!slot.params.stream && slot.stopped_word) { const std::vector stop_word_toks = llama_tokenize(ctx, slot.stopping_word, false); - probs = std::vector(slot.generated_token_probs.begin(), slot.generated_token_probs.end() - stop_word_toks.size()); - } - else - { + probs = std::vector( - slot.generated_token_probs.begin(), - slot.generated_token_probs.end()); + slot.generated_token_probs.begin(), + slot.generated_token_probs.end() - stop_word_toks.size()); + } else { + probs = std::vector( + slot.generated_token_probs.begin(), + slot.generated_token_probs.end()); } - res.result_json["completion_probabilities"] = probs_vector_to_json(ctx, probs); + + res.data["completion_probabilities"] = probs_vector_to_json(ctx, probs); } - if (slot.oaicompat) - { - res.result_json["oaicompat_token_ctr"] = slot.n_decoded; - res.result_json["model"] = slot.oaicompat_model; + if (slot.oaicompat) { + res.data["oaicompat_token_ctr"] = slot.n_decoded; + res.data["model"] = slot.oaicompat_model; } queue_results.send(res); } - void send_embedding(server_slot & slot, const llama_batch & batch) - { - task_result res; - res.id = slot.task_id; - res.multitask_id = slot.multitask_id; - res.error = false; - res.stop = true; + void send_embedding(const server_slot & slot, const llama_batch & batch) { + server_task_result res; + res.id = slot.id_task; + res.id_multi = slot.id_multi; + res.error = false; + res.stop = true; const int n_embd = llama_n_embd(model); - if (!params.embedding) - { - LOG_WARNING("embedding disabled", {{"params.embedding", params.embedding}}); - res.result_json = json - { - {"embedding", std::vector(n_embd, 0.0f)}, + for (int i = 0; i < batch.n_tokens; ++i) { + if (!batch.logits[i] || batch.seq_id[i][0] != slot.id) { + continue; + } + + const float * embd = llama_get_embeddings_seq(ctx, batch.seq_id[i][0]); + if (embd == NULL) { + embd = llama_get_embeddings_ith(ctx, i); + } + + if (embd == NULL) { + LOG_ERROR("failed to get embeddings", { + {"token", batch.token [i]}, + {"seq_id", batch.seq_id[i][0]} + }); + + res.data = json { + {"embedding", std::vector(n_embd, 0.0f)}, + }; + + continue; + } + + res.data = json { + {"embedding", std::vector(embd, embd + n_embd)}, }; } - else - { - for (int i = 0; i < batch.n_tokens; ++i) { - if (!batch.logits[i] || batch.seq_id[i][0] != slot.id) { - continue; - } - const float * embd = llama_get_embeddings_seq(ctx, batch.seq_id[i][0]); - if (embd == NULL) { - embd = llama_get_embeddings_ith(ctx, i); - if (embd == NULL) { - LOG_ERROR("failed to get embeddings for token", {{"token", batch.token[i]}, {"seq_id", batch.seq_id[i][0]}}); - res.result_json = json - { - {"embedding", std::vector(n_embd, 0.0f)}, - }; - continue; - } - } - - res.result_json = json - { - {"embedding", std::vector(embd, embd + n_embd)}, - }; - } - } queue_results.send(res); } - void request_completion(int task_id, json data, bool infill, bool embedding, int multitask_id) - { - task_server task; - task.id = task_id; - task.target_id = 0; - task.data = std::move(data); - task.infill_mode = infill; - task.embedding_mode = embedding; - task.type = TASK_TYPE_COMPLETION; - task.multitask_id = multitask_id; + void request_completion(int id_task, int id_multi, json data, bool infill, bool embedding) { + server_task task; + task.id = id_task; + task.id_multi = id_multi; + task.id_target = 0; + task.data = std::move(data); + task.infill = infill; + task.embedding = embedding; + task.type = SERVER_TASK_TYPE_COMPLETION; // when a completion task's prompt array is not a singleton, we split it into multiple requests // otherwise, it's a single-prompt task, we actually queue it // if there's numbers in the prompt array it will be treated as an array of tokens if (task.data.count("prompt") != 0 && task.data.at("prompt").size() > 1) { bool numbers = false; - for (const auto& e : task.data.at("prompt")) { + for (const auto & e : task.data.at("prompt")) { if (e.is_number()) { numbers = true; break; @@ -1288,106 +1360,23 @@ struct llama_server_context if (numbers) { queue_tasks.post(task); } else { - split_multiprompt_task(task_id, task); + split_multiprompt_task(id_task, task); } } else { - // an empty prompt can make slot become buggy - if (task.data.contains("prompt") && task.data["prompt"].is_string() && task.data["prompt"].get().empty()) { - task.data["prompt"] = " "; // add a space so that we have one token - } queue_tasks.post(task); } } - // for multiple images processing - bool ingest_images(server_slot &slot, int n_batch) - { - int image_idx = 0; + void request_cancel(int id_task) { + server_task task; + task.type = SERVER_TASK_TYPE_CANCEL; + task.id_target = id_task; - while (image_idx < (int) slot.images.size()) - { - slot_image &img = slot.images[image_idx]; - - // process prefix prompt - for (int32_t i = 0; i < (int32_t) batch.n_tokens; i += n_batch) - { - const int32_t n_tokens = std::min(n_batch, (int32_t) (batch.n_tokens - i)); - llama_batch batch_view = { - n_tokens, - batch.token + i, - nullptr, - batch.pos + i, - batch.n_seq_id + i, - batch.seq_id + i, - batch.logits + i, - 0, 0, 0, // unused - }; - if (llama_decode(ctx, batch_view)) - { - LOG_TEE("%s : failed to eval\n", __func__); - return false; - } - } - - // process image with llm - for (int i = 0; i < img.image_tokens; i += n_batch) - { - int n_eval = img.image_tokens - i; - if (n_eval > n_batch) - { - n_eval = n_batch; - } - - const int n_embd = llama_n_embd(model); - llama_batch batch_img = { - n_eval, - nullptr, - (img.image_embedding + i * n_embd), - nullptr, - nullptr, - nullptr, - nullptr, - slot.n_past, - 1, 0 - }; - if (llama_decode(ctx, batch_img)) - { - LOG_TEE("%s : failed to eval image\n", __func__); - return false; - } - slot.n_past += n_eval; - } - image_idx++; - - llama_batch_clear(batch); - - // append prefix of next image - const auto json_prompt = (image_idx >= (int) slot.images.size()) ? - slot.params.input_suffix : // no more images, then process suffix prompt - (json)(slot.images[image_idx].prefix_prompt); - - std::vector append_tokens = tokenize(json_prompt, false); // has next image - for (int i = 0; i < (int) append_tokens.size(); ++i) - { - llama_batch_add(batch, append_tokens[i], system_tokens.size() + slot.n_past, { slot.id }, true); - slot.n_past += 1; - } - } - - return true; - } - - void request_cancel(int task_id) - { - task_server task; - task.type = TASK_TYPE_CANCEL; - task.target_id = task_id; queue_tasks.post(task); } - void split_multiprompt_task(int multitask_id, task_server& multiprompt_task) - { - int prompt_count = multiprompt_task.data.at("prompt").size(); + void split_multiprompt_task(int id_multi, const server_task & multiprompt_task) { + const int prompt_count = multiprompt_task.data.at("prompt").size(); if (prompt_count <= 1) { send_error(multiprompt_task, "error while handling multiple prompts"); return; @@ -1395,127 +1384,121 @@ struct llama_server_context // generate all the ID for subtask std::vector subtask_ids(prompt_count); - for (int i = 0; i < prompt_count; i++) - { + for (int i = 0; i < prompt_count; i++) { subtask_ids[i] = queue_tasks.get_new_id(); } // queue up the multitask so we can track its subtask progression - queue_tasks.add_multitask(multitask_id, subtask_ids); + queue_tasks.add_multitask(id_multi, subtask_ids); // add subtasks - for (int i = 0; i < prompt_count; i++) - { + for (int i = 0; i < prompt_count; i++) { json subtask_data = multiprompt_task.data; subtask_data["prompt"] = subtask_data["prompt"][i]; // subtasks inherit everything else (infill mode, embedding mode, etc.) - request_completion(subtask_ids[i], subtask_data, multiprompt_task.infill_mode, multiprompt_task.embedding_mode, multitask_id); + request_completion(subtask_ids[i], id_multi, subtask_data, multiprompt_task.infill, multiprompt_task.embedding); } } - void process_single_task(task_server& task) - { - switch (task.type) - { - case TASK_TYPE_COMPLETION: { - server_slot *slot = get_slot(json_value(task.data, "slot_id", -1)); - if (slot == nullptr) + void process_single_task(const server_task & task) { + switch (task.type) { + case SERVER_TASK_TYPE_COMPLETION: { - // if no slot is available, we defer this task for processing later - LOG_VERBOSE("no slot is available", {{"task_id", task.id}}); - queue_tasks.defer(task); - break; - } - - if (task.data.contains("system_prompt")) - { - if (!all_slots_are_idle) { - send_error(task, "system prompt can only be updated when all slots are idle"); + server_slot * slot = get_slot(json_value(task.data, "id_slot", -1)); + if (slot == nullptr) { + // if no slot is available, we defer this task for processing later + LOG_VERBOSE("no slot is available", {{"id_task", task.id}}); + queue_tasks.defer(task); break; } - system_prompt_process(task.data["system_prompt"]); - // reset cache_tokens for all slots - for (server_slot &slot : slots) - { - slot.cache_tokens.clear(); - slot.n_past = 0; - slot.n_past_se = 0; + if (task.data.contains("system_prompt")) { + system_prompt_set(task.data["system_prompt"]); + + for (server_slot & slot : slots) { + slot.n_past = 0; + slot.n_past_se = 0; + } } - } - slot->reset(); + slot->reset(); - slot->infill = task.infill_mode; - slot->embedding = task.embedding_mode; - slot->task_id = task.id; - slot->multitask_id = task.multitask_id; + slot->id_task = task.id; + slot->id_multi = task.id_multi; + slot->infill = task.infill; + slot->embedding = task.embedding; - if (!launch_slot_with_data(slot, task.data)) - { - // send error result - send_error(task, "internal_error"); - break; - } - } break; - case TASK_TYPE_CANCEL: { // release slot linked with the task id - for (auto & slot : slots) - { - if (slot.task_id == task.target_id) - { - slot.release(); + if (!launch_slot_with_data(*slot, task.data)) { + // send error result + send_error(task, "internal_error"); break; } - } - } break; - case TASK_TYPE_NEXT_RESPONSE: { - // do nothing - } break; - case TASK_TYPE_METRICS: { - json slots_data = json::array(); - int n_idle_slots = 0; - int n_processing_slots = 0; - - for (server_slot &slot: slots) { - json slot_data = get_formated_generation(slot); - slot_data["id"] = slot.id; - slot_data["task_id"] = slot.task_id; - slot_data["state"] = slot.state; - slot_data["prompt"] = slot.prompt; - slot_data["next_token"] = { - {"has_next_token", slot.has_next_token}, - {"n_remain", slot.n_remaining}, - {"num_tokens_predicted", slot.n_decoded}, - {"stopped_eos", slot.stopped_eos}, - {"stopped_word", slot.stopped_word}, - {"stopped_limit", slot.stopped_limit}, - {"stopping_word", slot.stopping_word}, - }; - if (slot_data["state"] == IDLE) { - n_idle_slots++; - } else { - n_processing_slots++; + } break; + case SERVER_TASK_TYPE_CANCEL: + { + // release slot linked with the task id + for (auto & slot : slots) { + if (slot.id_task == task.id_target) { + slot.release(); + break; + } } - slots_data.push_back(slot_data); - } - LOG_INFO("slot data", { - {"task_id", task.id}, - {"n_idle_slots", n_idle_slots}, - {"n_processing_slots", n_processing_slots} - }); - LOG_VERBOSE("slot data", { - {"task_id", task.id}, - {"n_idle_slots", n_idle_slots}, - {"n_processing_slots", n_processing_slots}, - {"slots", slots_data} - }); - task_result res; - res.id = task.id; - res.multitask_id = task.multitask_id; - res.stop = true; - res.error = false; - res.result_json = { + } break; + case SERVER_TASK_TYPE_NEXT_RESPONSE: + { + // do nothing + } break; + case SERVER_TASK_TYPE_METRICS: + { + json slots_data = json::array(); + + int n_idle_slots = 0; + int n_processing_slots = 0; + + for (server_slot & slot : slots) { + json slot_data = get_formated_generation(slot); + slot_data["id"] = slot.id; + slot_data["id_task"] = slot.id_task; + slot_data["state"] = slot.state; + slot_data["prompt"] = slot.prompt; + slot_data["next_token"] = { + {"has_next_token", slot.has_next_token}, + {"n_remain", slot.n_remaining}, + {"n_decoded", slot.n_decoded}, + {"stopped_eos", slot.stopped_eos}, + {"stopped_word", slot.stopped_word}, + {"stopped_limit", slot.stopped_limit}, + {"stopping_word", slot.stopping_word}, + }; + + if (slot_data["state"] == SLOT_STATE_IDLE) { + n_idle_slots++; + } else { + n_processing_slots++; + } + + slots_data.push_back(slot_data); + } + LOG_INFO("slot data", { + {"id_task", task.id}, + {"n_idle_slots", n_idle_slots}, + {"n_processing_slots", n_processing_slots} + }); + + LOG_VERBOSE("slot data", { + {"id_task", task.id}, + {"n_idle_slots", n_idle_slots}, + {"n_processing_slots", n_processing_slots}, + {"slots", slots_data} + }); + + server_task_result res; + res.id = task.id; + res.id_multi = task.id_multi; + res.stop = true; + res.error = false; + res.data = { { "idle", n_idle_slots }, { "processing", n_processing_slots }, { "deferred", queue_tasks.queue_tasks_deferred.size() }, @@ -1532,71 +1515,104 @@ struct llama_server_context { "kv_cache_used_cells", llama_get_kv_cache_used_cells(ctx)}, { "slots", slots_data }, - }; - metrics.reset_bucket(); - queue_results.send(res); - } break; + }; + + metrics.reset_bucket(); + queue_results.send(res); + } break; } } - void on_finish_multitask(task_multi& multitask) - { + void on_finish_multitask(const server_task_multi & multitask) { // all subtasks done == multitask is done - task_result result; - result.id = multitask.id; - result.stop = true; + server_task_result result; + result.id = multitask.id; + result.stop = true; result.error = false; // collect json results into one json result std::vector result_jsons; - for (auto& subres : multitask.results) - { - result_jsons.push_back(subres.result_json); + for (const auto & subres : multitask.results) { + result_jsons.push_back(subres.data); result.error = result.error && subres.error; } - result.result_json = json{ { "results", result_jsons } }; + result.data = json { + { "results", result_jsons } + }; + queue_results.send(result); } bool update_slots() { - if (system_need_update) - { - LOG_INFO("updating system prompt", {}); + if (system_need_update) { system_prompt_update(); } - llama_batch_clear(batch); + // release slots + for (auto & slot : slots) { + if (slot.command == SLOT_COMMAND_RELEASE) { + slot.state = SLOT_STATE_IDLE; + slot.command = SLOT_COMMAND_NONE; + slot.t_last_used = ggml_time_us(); - if (all_slots_are_idle) - { - if (system_prompt.empty() && clean_kv_cache) - { - LOG_INFO("all slots are idle and system prompt is empty, clear the KV cache", {}); - kv_cache_clear(); + LOG_INFO("slot released", { + {"id_slot", slot.id}, + {"id_task", slot.id_task}, + {"n_ctx", n_ctx}, + {"n_past", slot.n_past}, + {"n_system_tokens", system_tokens.size()}, + {"n_cache_tokens", slot.cache_tokens.size()}, + {"truncated", slot.truncated} + }); + + queue_tasks.notify_slot_changed(); } - return true; } - LOG_VERBOSE("posting NEXT_RESPONSE", {}); - task_server task; - task.type = TASK_TYPE_NEXT_RESPONSE; - task.target_id = -1; - queue_tasks.post(task); - - for (server_slot &slot : slots) + // check if all slots are idle { - if (slot.ga_n == 1) - { - if (slot.is_processing() && system_tokens.size() + slot.cache_tokens.size() >= (size_t) slot.n_ctx) - { + bool all_idle = true; + + for (auto & slot : slots) { + if (slot.state != SLOT_STATE_IDLE || slot.command != SLOT_COMMAND_NONE) { + all_idle = false; + break; + } + } + + if (all_idle) { + LOG_INFO("all slots are idle", {}); + if (system_prompt.empty() && clean_kv_cache) { + kv_cache_clear(); + } + + return true; + } + } + + { + LOG_VERBOSE("posting NEXT_RESPONSE", {}); + + server_task task; + task.type = SERVER_TASK_TYPE_NEXT_RESPONSE; + task.id_target = -1; + + queue_tasks.post(task); + } + + // apply context-shift if needed + // TODO: simplify and improve + for (server_slot & slot : slots) { + if (slot.ga_n == 1) { + if (slot.is_processing() && (int) system_tokens.size() + slot.n_past >= slot.n_ctx - 1) { // Shift context const int n_keep = slot.params.n_keep + add_bos_token; const int n_left = (int) system_tokens.size() + slot.n_past - n_keep; const int n_discard = n_left / 2; LOG_INFO("slot context shift", { - {"slot_id", slot.id}, - {"task_id", slot.task_id}, + {"id_slot", slot.id}, + {"id_task", slot.id_task}, {"n_keep", n_keep}, {"n_left", n_left}, {"n_discard", n_discard}, @@ -1605,15 +1621,17 @@ struct llama_server_context {"n_system_tokens", system_tokens.size()}, {"n_cache_tokens", slot.cache_tokens.size()} }); + llama_kv_cache_seq_rm (ctx, slot.id, n_keep , n_keep + n_discard); llama_kv_cache_seq_add(ctx, slot.id, n_keep + n_discard, system_tokens.size() + slot.n_past, -n_discard); - for (size_t i = n_keep + n_discard; i < slot.cache_tokens.size(); i++) - { - slot.cache_tokens[i - n_discard] = slot.cache_tokens[i]; - } + if (slot.params.cache_prompt) { + for (size_t i = n_keep + n_discard; i < slot.cache_tokens.size(); i++) { + slot.cache_tokens[i - n_discard] = slot.cache_tokens[i]; + } - slot.cache_tokens.resize(slot.cache_tokens.size() - n_discard); + slot.cache_tokens.resize(slot.cache_tokens.size() - n_discard); + } slot.n_past -= n_discard; @@ -1622,33 +1640,12 @@ struct llama_server_context } } - // decode any currently ongoing sequences - LOG_VERBOSE("decoding ongoing sequences", {}); - for (auto & slot : slots) - { - // release the slot - if (slot.command == RELEASE) - { - slot.state = IDLE; - slot.command = NONE; - slot.t_last_used = ggml_time_us(); + // start populating the batch for this iteration + llama_batch_clear(batch); - LOG_INFO("slot released", { - {"slot_id", slot.id}, - {"task_id", slot.task_id}, - {"n_ctx", n_ctx}, - {"n_past", slot.n_past}, - {"n_system_tokens", system_tokens.size()}, - {"n_cache_tokens", slot.cache_tokens.size()}, - {"truncated", slot.truncated} - }); - queue_tasks.notify_slot_changed(); - - continue; - } - - if (slot.state == IDLE) - { + // frist, add sampled tokens from any ongoing sequences + for (auto & slot : slots) { + if (slot.state == SLOT_STATE_IDLE) { continue; } @@ -1659,193 +1656,184 @@ struct llama_server_context // TODO: we always have to take into account the "system_tokens" // this is not great and needs to be improved somehow llama_batch_add(batch, slot.sampled, system_tokens.size() + slot_npast, { slot.id }, true); + slot.n_past += 1; + + if (slot.params.cache_prompt) { + slot.cache_tokens.push_back(slot.sampled); + } + + LOG_VERBOSE("slot decode token", { + {"id_slot", slot.id}, + {"id_task", slot.id_task}, + {"n_ctx", n_ctx}, + {"n_past", slot.n_past}, + {"n_system_tokens", system_tokens.size()}, + {"n_cache_tokens", slot.cache_tokens.size()}, + {"truncated", slot.truncated} + }); } // process in chunks of params.n_batch int32_t n_batch = params.n_batch; - // assign workload to the slots - if (params.cont_batching || batch.n_tokens == 0) - { - for (auto & slot : slots) - { - const bool has_prompt = slot.prompt.is_array() || (slot.prompt.is_string() && !slot.prompt.get().empty()) || !slot.images.empty(); + // next, batch any pending prompts without exceeding n_batch + if (params.cont_batching || batch.n_tokens == 0) { + for (auto & slot : slots) { + const bool has_prompt = slot.prompt.is_array() || (slot.prompt.is_string() && !slot.prompt.get().empty()); // empty prompt passed -> release the slot and send empty response // note: infill mode allows empty prompt - if (slot.state == IDLE && slot.command == LOAD_PROMPT && !has_prompt && !slot.infill) - { + if (slot.state == SLOT_STATE_IDLE && slot.command == SLOT_COMMAND_LOAD_PROMPT && !has_prompt && !slot.infill) { + slot.state = SLOT_STATE_PROCESSING; + slot.command = SLOT_COMMAND_NONE; slot.release(); slot.print_timings(); send_final_response(slot); continue; } - // need process the prompt - if (slot.state == IDLE && slot.command == LOAD_PROMPT) - { - slot.state = PROCESSING; - slot.command = NONE; - std::vector prompt_tokens; - slot.t_start_process_prompt = ggml_time_us(); - slot.t_start_genereration = 0; + // this slot still has a prompt to be processed + if (slot.state == SLOT_STATE_IDLE && slot.command == SLOT_COMMAND_LOAD_PROMPT) { + auto & prompt_tokens = slot.prompt_tokens; - if (slot.infill) - { - bool suff_rm_leading_spc = true; - if (params.input_suffix.find_first_of(' ') == 0 && params.input_suffix.size() > 1) - { - params.input_suffix.erase(0, 1); - suff_rm_leading_spc = false; - } - auto prefix_tokens = tokenize(slot.params.input_prefix, false); - auto suffix_tokens = tokenize(slot.params.input_suffix, false); - - const int space_token = 29871; // TODO: this should not be hardcoded - if (suff_rm_leading_spc && !suffix_tokens.empty() && suffix_tokens[0] == space_token) { - suffix_tokens.erase(suffix_tokens.begin()); - } - - prefix_tokens.insert(prefix_tokens.begin(), llama_token_prefix(model)); - prefix_tokens.insert(prefix_tokens.begin(), llama_token_bos(model)); // always add BOS - prefix_tokens.insert(prefix_tokens.end(), llama_token_suffix(model)); - prefix_tokens.insert(prefix_tokens.end(), suffix_tokens.begin(), suffix_tokens.end()); - prefix_tokens.push_back(llama_token_middle(model)); - prompt_tokens = prefix_tokens; - } - else - { - prompt_tokens = tokenize(slot.prompt, system_prompt.empty() && add_bos_token); // add BOS if there isn't system prompt - } - - slot.n_prompt_tokens = prompt_tokens.size(); - - if (slot.params.n_keep < 0) - { - slot.params.n_keep = slot.n_prompt_tokens; - } - slot.params.n_keep = std::min(slot.n_ctx - 4, slot.params.n_keep); - - // if input prompt is too big, truncate it, if group attention self-extend is disabled - if (slot.ga_n == 1 && slot.n_prompt_tokens >= slot.n_ctx) - { - const int n_left = slot.n_ctx - slot.params.n_keep; - const int n_block_size = n_left / 2; - const int erased_blocks = (slot.n_prompt_tokens - slot.params.n_keep - n_block_size) / n_block_size; - - std::vector new_tokens( - prompt_tokens.begin(), - prompt_tokens.begin() + slot.params.n_keep); - new_tokens.insert( - new_tokens.end(), - prompt_tokens.begin() + slot.params.n_keep + erased_blocks * n_block_size, - prompt_tokens.end()); - - LOG_VERBOSE("input truncated", { - {"n_ctx", slot.n_ctx}, - {"n_keep", slot.params.n_keep}, - {"n_left", n_left}, - {"new_tokens", tokens_to_str(ctx, new_tokens.cbegin(), new_tokens.cend())}, + // we haven't tokenized the prompt yet - do it now: + if (prompt_tokens.empty()) { + LOG_VERBOSE("tokenizing prompt", { + {"id_slot", slot.id}, + {"id_task", slot.id_task} }); - slot.truncated = true; - prompt_tokens = new_tokens; - slot.n_prompt_tokens = prompt_tokens.size(); - GGML_ASSERT(slot.n_prompt_tokens < slot.n_ctx); - } + slot.t_start_process_prompt = ggml_time_us(); + slot.t_start_generation = 0; - if (!slot.params.cache_prompt) - { - llama_sampling_reset(slot.ctx_sampling); - - slot.n_past = 0; - slot.n_past_se = 0; - slot.ga_i = 0; - slot.n_prompt_tokens_processed = slot.n_prompt_tokens; - } - else - { - // push the prompt into the sampling context (do not apply grammar) - for (auto &token : prompt_tokens) - { - llama_sampling_accept(slot.ctx_sampling, ctx, token, false); - } - - slot.n_past = common_part(slot.cache_tokens, prompt_tokens); - - // the last token of the cache is not in the KV cache until the next call to llama_decode - // (it was sampled, pushed into the "cache_tokens", but not yet put in the context) - if (slot.n_past > 0 && slot.n_past == (int32_t) slot.cache_tokens.size()) - { - slot.n_past -= 1; - } - - slot.n_prompt_tokens_processed = slot.n_prompt_tokens - slot.n_past; - - if (slot.ga_n != 1) - { - int ga_i = 0; - int32_t ga_n = slot.ga_n; - int32_t ga_w = slot.ga_w; - int32_t slot_npast = 0; - for (int k = 0; k < slot.n_past; ++k) - { - while (slot_npast >= ga_i + ga_w) { - const int bd = (ga_w/ga_n)*(ga_n - 1); - slot_npast -= bd; - ga_i += ga_w/ga_n; - } - slot_npast++; + if (slot.infill) { + bool suff_rm_leading_spc = true; + if (params.input_suffix.find_first_of(' ') == 0 && params.input_suffix.size() > 1) { + params.input_suffix.erase(0, 1); + suff_rm_leading_spc = false; } - slot.n_past_se = slot_npast; - slot.ga_i = ga_i; + + auto prefix_tokens = tokenize(slot.params.input_prefix, false); + auto suffix_tokens = tokenize(slot.params.input_suffix, false); + + const int space_token = 29871; // TODO: this should not be hardcoded + if (suff_rm_leading_spc && !suffix_tokens.empty() && suffix_tokens[0] == space_token) { + suffix_tokens.erase(suffix_tokens.begin()); + } + + prefix_tokens.insert(prefix_tokens.begin(), llama_token_prefix(model)); + prefix_tokens.insert(prefix_tokens.begin(), llama_token_bos(model)); // always add BOS + prefix_tokens.insert(prefix_tokens.end(), llama_token_suffix(model)); + prefix_tokens.insert(prefix_tokens.end(), suffix_tokens.begin(), suffix_tokens.end()); + prefix_tokens.push_back(llama_token_middle(model)); + prompt_tokens = prefix_tokens; + } else { + prompt_tokens = tokenize(slot.prompt, system_prompt.empty() && add_bos_token); // add BOS if there isn't system prompt } - LOG_INFO("slot progression", { - { "slot_id", slot.id }, - { "task_id", slot.task_id }, - { "n_past", slot.n_past }, - { "n_past_se", slot.n_past_se }, - { "ga_i", slot.ga_i }, - { "n_prompt_tokens_processed", slot.n_prompt_tokens_processed } - }); + slot.n_past = 0; + slot.n_prompt_tokens = prompt_tokens.size(); + + if (slot.embedding) { + // this prompt is too large to process - discard it + if (slot.n_prompt_tokens > n_batch) { + slot.state = SLOT_STATE_PROCESSING; + slot.command = SLOT_COMMAND_NONE; + slot.release(); + slot.print_timings(); + send_final_response(slot); + continue; + } + } else { + if (slot.params.n_keep < 0) { + slot.params.n_keep = slot.n_prompt_tokens; + } + slot.params.n_keep = std::min(slot.n_ctx - 4, slot.params.n_keep); + + // if input prompt is too big, truncate it (if group attention self-extend is disabled) + if (slot.ga_n == 1 && slot.n_prompt_tokens >= slot.n_ctx) { + const int n_left = slot.n_ctx - slot.params.n_keep; + + const int n_block_size = n_left / 2; + const int erased_blocks = (slot.n_prompt_tokens - slot.params.n_keep - n_block_size) / n_block_size; + + std::vector new_tokens( + prompt_tokens.begin(), + prompt_tokens.begin() + slot.params.n_keep); + + new_tokens.insert( + new_tokens.end(), + prompt_tokens.begin() + slot.params.n_keep + erased_blocks * n_block_size, + prompt_tokens.end()); + + prompt_tokens = std::move(new_tokens); + + slot.truncated = true; + slot.n_prompt_tokens = prompt_tokens.size(); + + LOG_VERBOSE("input truncated", { + {"n_ctx", slot.n_ctx}, + {"n_keep", slot.params.n_keep}, + {"n_left", n_left}, + {"prompt_tokens", tokens_to_str(ctx, prompt_tokens.cbegin(), prompt_tokens.cend())}, + }); + + GGML_ASSERT(slot.n_prompt_tokens < slot.n_ctx); + } + + llama_sampling_reset(slot.ctx_sampling); + + if (!slot.params.cache_prompt) { + slot.n_past_se = 0; + slot.ga_i = 0; + } else { + GGML_ASSERT(slot.ga_n == 1); + + // reuse any previously computed tokens that are common with the new prompt + slot.n_past = common_part(slot.cache_tokens, prompt_tokens); + + // remove the non-common part from the cache + slot.cache_tokens.resize(slot.n_past); + + // push the prompt into the sampling context (do not apply grammar) + for (int i = 0; i < slot.n_past; ++i) { + llama_sampling_accept(slot.ctx_sampling, ctx, slot.cache_tokens[i], false); + } + } + } + + if (slot.n_past == slot.n_prompt_tokens && slot.n_past > 0) { + // we have to evaluate at least 1 token to generate logits. + LOG_INFO("we have to evaluate at least 1 token to generate logits", { + { "id_slot", slot.id }, + { "id_task", slot.id_task } + }); + + slot.n_past--; + if (slot.ga_i > 0) { + slot.n_past_se--; + } + } + + slot.n_prompt_tokens_processed = 0; } - slot.cache_tokens = prompt_tokens; - - if (slot.n_past == slot.n_prompt_tokens && slot.n_past > 0) - { - // we have to evaluate at least 1 token to generate logits. - LOG_INFO("we have to evaluate at least 1 token to generate logits", { - { "slot_id", slot.id }, - { "task_id", slot.task_id } - }); - slot.n_past--; - if (slot.ga_i > 0) - { - slot.n_past_se--; + if (slot.embedding) { + // cannot fit the prompt in the current batch - will try next iter + if (batch.n_tokens + slot.n_prompt_tokens > n_batch) { + continue; } } - int p0 = (int) system_tokens.size() + slot.n_past; - LOG_INFO("kv cache rm [p0, end)", { - { "slot_id", slot.id }, - { "task_id", slot.task_id }, - { "p0", p0 } - }); + const int p0 = (int) system_tokens.size() + slot.n_past; llama_kv_cache_seq_rm(ctx, slot.id, p0, -1); - LOG_VERBOSE("prompt ingested", { - {"n_past", slot.n_past}, - {"cached", tokens_to_str(ctx, slot.cache_tokens.cbegin(), slot.cache_tokens.cbegin() + slot.n_past)}, - {"to_eval", tokens_to_str(ctx, slot.cache_tokens.cbegin() + slot.n_past, slot.cache_tokens.cend())}, - }); - - const bool has_images = process_images(slot); - - // process the prefix of first image - std::vector prefix_tokens = has_images ? tokenize(slot.images[0].prefix_prompt, add_bos_token) : prompt_tokens; + LOG_INFO("kv cache rm [p0, end)", { + { "id_slot", slot.id }, + { "id_task", slot.id_task }, + { "p0", p0 } + }); int32_t slot_npast = slot.n_past_se > 0 ? slot.n_past_se : slot.n_past; @@ -1853,61 +1841,82 @@ struct llama_server_context int32_t ga_n = slot.ga_n; int32_t ga_w = slot.ga_w; - for (; slot.n_past < (int) prefix_tokens.size(); ++slot.n_past) - { - if (slot.ga_n != 1) - { + // add prompt tokens for processing in the current batch + // TODO: the self-extend stuff here is a mess - simplify and/or abstract it somehow + for (; slot.n_past < slot.n_prompt_tokens && batch.n_tokens < n_batch; ++slot.n_past) { + if (slot.ga_n != 1) { while (slot_npast >= ga_i + ga_w) { const int bd = (ga_w/ga_n)*(ga_n - 1); slot_npast -= bd; ga_i += ga_w/ga_n; } } - llama_batch_add(batch, prefix_tokens[slot.n_past], system_tokens.size() + slot_npast, { slot.id }, false); + + llama_batch_add(batch, prompt_tokens[slot.n_past], system_tokens.size() + slot_npast, { slot.id }, false); + + if (slot.params.cache_prompt) { + slot.cache_tokens.push_back(prompt_tokens[slot.n_past]); + } + + slot.n_prompt_tokens_processed++; slot_npast++; } - if (has_images && !ingest_images(slot, n_batch)) - { - LOG_ERROR("failed processing images", { - {"slot_id", slot.id}, - {"task_id", slot.task_id}, - }); - // FIXME @phymbert: to be properly tested - // early returning without changing the slot state will block the slot for ever - // no one at the moment is checking the return value - return false; - } + LOG_VERBOSE("prompt processing progress", { + {"id_slot", slot.id}, + {"n_past", slot.n_past}, + {"n_ctx", n_ctx}, + {"n_tokens", batch.n_tokens}, + {"progress", (float) slot.n_prompt_tokens_processed / slot.n_prompt_tokens}, + }); - // extract the logits only for the last token - if (batch.n_tokens > 0) - { + // entire prompt has been processed - start decoding new tokens + if (slot.n_past == slot.n_prompt_tokens) { + slot.state = SLOT_STATE_PROCESSING; + slot.command = SLOT_COMMAND_NONE; + + GGML_ASSERT(batch.n_tokens > 0); + + // extract the logits only for the last token batch.logits[batch.n_tokens - 1] = true; - } - slot.n_decoded = 0; - slot.i_batch = batch.n_tokens - 1; + slot.n_decoded = 0; + slot.i_batch = batch.n_tokens - 1; + + LOG_VERBOSE("prompt done", { + {"id_slot", slot.id}, + {"n_past", slot.n_past}, + {"n_ctx", n_ctx}, + {"n_tokens", batch.n_tokens}, + }); + } + } + + if (batch.n_tokens >= n_batch) { + break; } } } - if (batch.n_tokens == 0) - { - all_slots_are_idle = true; + if (batch.n_tokens == 0) { + LOG_VERBOSE("no tokens to decode", {}); + return true; } - for (int32_t i = 0; i < (int32_t) batch.n_tokens; i += n_batch) - { + LOG_VERBOSE("decoding batch", { + {"n_tokens", batch.n_tokens}, + }); + + // process the created batch of tokens + for (int32_t i = 0; i < (int32_t) batch.n_tokens; i += n_batch) { const int32_t n_tokens = std::min(n_batch, batch.n_tokens - i); - for (auto & slot : slots) - { - if (slot.ga_n != 1) - { + for (auto & slot : slots) { + if (slot.ga_n != 1) { // context extension via Self-Extend - while (slot.n_past_se >= slot.ga_i + slot.ga_w) - { + // TODO: simplify and/or abstract this + while (slot.n_past_se >= slot.ga_i + slot.ga_w) { const int ib = (slot.ga_n * slot.ga_i) / slot.ga_w; const int bd = (slot.ga_w / slot.ga_n) * (slot.ga_n - 1); const int dd = (slot.ga_w / slot.ga_n) - ib * bd - slot.ga_w; @@ -1918,8 +1927,8 @@ struct llama_server_context LOG_TEE("shift: [%6d, %6d] + %6d -> [%6d, %6d]\n", slot.ga_i + ib * bd + slot.ga_w, slot.n_past_se + ib * bd, dd, slot.ga_i + ib * bd + slot.ga_w + dd, slot.n_past_se + ib * bd + dd); llama_kv_cache_seq_add(ctx, slot.id, slot.ga_i, slot.n_past_se, ib * bd); - llama_kv_cache_seq_div(ctx, slot.id, slot.ga_i + ib * bd, slot.ga_i + ib * bd + slot.ga_w,slot.ga_n); - llama_kv_cache_seq_add(ctx, slot.id, slot.ga_i + ib * bd + slot.ga_w,slot.n_past_se + ib * bd, dd); + llama_kv_cache_seq_div(ctx, slot.id, slot.ga_i + ib * bd, slot.ga_i + ib * bd + slot.ga_w, slot.ga_n); + llama_kv_cache_seq_add(ctx, slot.id, slot.ga_i + ib * bd + slot.ga_w, slot.n_past_se + ib * bd, dd); slot.n_past_se -= bd; @@ -1927,12 +1936,12 @@ struct llama_server_context LOG_TEE("\nn_past_old = %d, n_past = %d, ga_i = %d\n\n", slot.n_past_se + bd, slot.n_past_se, slot.ga_i); } + slot.n_past_se += n_tokens; } } - llama_batch batch_view = - { + llama_batch batch_view = { n_tokens, batch.token + i, nullptr, @@ -1945,10 +1954,8 @@ struct llama_server_context const int ret = llama_decode(ctx, batch_view); - if (ret != 0) - { - if (n_batch == 1 || ret < 0) - { + if (ret != 0) { + if (n_batch == 1 || ret < 0) { // if you get here, it means the KV cache is full - try increasing it via the context size LOG_TEE("%s : failed to decode the batch, n_batch = %d, ret = %d\n", __func__, n_batch, ret); return false; @@ -1959,19 +1966,17 @@ struct llama_server_context // retry with half the batch size to try to find a free slot in the KV cache n_batch /= 2; i -= n_batch; + continue; } - for (auto & slot : slots) - { - if (slot.i_batch < (int) i || slot.i_batch >= (int) (i + n_tokens)) - { + for (auto & slot : slots) { + if (slot.state != SLOT_STATE_PROCESSING || slot.i_batch < (int) i || slot.i_batch >= (int) (i + n_tokens)) { continue; } // prompt evaluated for embedding - if (slot.embedding) - { + if (slot.embedding) { send_embedding(slot, batch_view); slot.release(); slot.i_batch = -1; @@ -1984,10 +1989,9 @@ struct llama_server_context llama_sampling_accept(slot.ctx_sampling, ctx, id, true); slot.n_decoded += 1; - if (slot.n_decoded == 1) - { - slot.t_start_genereration = ggml_time_us(); - slot.t_prompt_processing = (slot.t_start_genereration - slot.t_start_process_prompt) / 1e3; + if (slot.n_decoded == 1) { + slot.t_start_generation = ggml_time_us(); + slot.t_prompt_processing = (slot.t_start_generation - slot.t_start_process_prompt) / 1e3; metrics.on_prompt_eval(slot); } @@ -1995,19 +1999,19 @@ struct llama_server_context result.tok = id; const int32_t n_probs = slot.sparams.n_probs; - if (slot.sparams.temp <= 0 && n_probs > 0) - { + if (slot.sparams.temp <= 0 && n_probs > 0) { // for llama_sample_token_greedy we need to sort candidates llama_sample_softmax(ctx, &cur_p); } - for (size_t i = 0; i < std::min(cur_p.size, (size_t)n_probs); ++i) - { - result.probs.push_back({cur_p.data[i].id, cur_p.data[i].p}); + for (size_t i = 0; i < std::min(cur_p.size, (size_t) n_probs); ++i) { + result.probs.push_back({ + cur_p.data[i].id, + cur_p.data[i].p + }); } - if (!process_token(result, slot)) - { + if (!process_token(result, slot)) { slot.release(); slot.print_timings(); send_final_response(slot); @@ -2019,24 +2023,23 @@ struct llama_server_context } LOG_VERBOSE("slots updated", {}); + return true; } - json model_meta() { - return json{ - {"vocab_type", llama_vocab_type(model)}, - {"n_vocab", llama_n_vocab(model)}, - {"n_ctx_train", llama_n_ctx_train(model)}, - {"n_embd", llama_n_embd(model)}, - {"n_params", llama_model_n_params(model)}, - {"size", llama_model_size(model)}, + json model_meta() const { + return json { + {"vocab_type", llama_vocab_type (model)}, + {"n_vocab", llama_n_vocab (model)}, + {"n_ctx_train", llama_n_ctx_train (model)}, + {"n_embd", llama_n_embd (model)}, + {"n_params", llama_model_n_params(model)}, + {"size", llama_model_size (model)}, }; } }; -static void server_print_usage(const char *argv0, const gpt_params ¶ms, - const server_params &sparams) -{ +static void server_print_usage(const char * argv0, const gpt_params & params, const server_params & sparams) { printf("usage: %s [options]\n", argv0); printf("\n"); printf("options:\n"); @@ -2054,17 +2057,14 @@ static void server_print_usage(const char *argv0, const gpt_params ¶ms, printf(" --yarn-attn-factor N YaRN: scale sqrt(t) or attention magnitude (default: 1.0)\n"); printf(" --yarn-beta-slow N YaRN: high correction dim or alpha (default: %.1f)\n", params.yarn_beta_slow); printf(" --yarn-beta-fast N YaRN: low correction dim or beta (default: %.1f)\n", params.yarn_beta_fast); - printf(" --pooling {none,mean,cls}\n"); - printf(" pooling type for embeddings, use model default if unspecified\n"); + printf(" --pooling {none,mean,cls} pooling type for embeddings, use model default if unspecified\n"); printf(" -b N, --batch-size N batch size for prompt processing (default: %d)\n", params.n_batch); printf(" --memory-f32 use f32 instead of f16 for memory key+value (default: disabled)\n"); printf(" not recommended: doubles context memory required and no measurable increase in quality\n"); - if (llama_supports_mlock()) - { + if (llama_supports_mlock()) { printf(" --mlock force system to keep model in RAM rather than swapping or compressing\n"); } - if (llama_supports_mmap()) - { + if (llama_supports_mmap()) { printf(" --no-mmap do not memory-map model (slower load but may reduce pageouts if not using mlock)\n"); } printf(" --numa TYPE attempt optimizations that help on some NUMA systems\n"); @@ -2096,7 +2096,7 @@ static void server_print_usage(const char *argv0, const gpt_params ¶ms, printf(" --api-key API_KEY optional api key to enhance server security. If set, requests must include this key for access.\n"); printf(" --api-key-file FNAME path to file containing api keys delimited by new lines. If set, requests must include one of the keys for access.\n"); printf(" -to N, --timeout N server read/write timeout in seconds (default: %d)\n", sparams.read_timeout); - printf(" --embedding enable embedding vector output (default: %s)\n", params.embedding ? "enabled" : "disabled"); + printf(" --embeddings enable embedding vector output (default: %s)\n", params.embedding ? "enabled" : "disabled"); printf(" -np N, --parallel N number of slots for process requests (default: %d)\n", params.n_parallel); printf(" -cb, --cont-batching enable continuous batching (a.k.a dynamic batching) (default: disabled)\n"); printf(" -spf FNAME, --system-prompt-file FNAME\n"); @@ -2105,7 +2105,6 @@ static void server_print_usage(const char *argv0, const gpt_params ¶ms, printf(" KV cache data type for K (default: f16)\n"); printf(" -ctv TYPE, --cache-type-v TYPE\n"); printf(" KV cache data type for V (default: f16)\n"); - printf(" --mmproj MMPROJ_FILE path to a multimodal projector file for LLaVA.\n"); printf(" --log-format log output format: json or text (default: json)\n"); printf(" --log-disable disables logging to a file.\n"); printf(" --slots-endpoint-disable disables slots monitoring endpoint.\n"); @@ -2123,57 +2122,41 @@ static void server_print_usage(const char *argv0, const gpt_params ¶ms, printf("\n"); } -static void server_params_parse(int argc, char **argv, server_params &sparams, - gpt_params ¶ms, llama_server_context& llama) -{ - gpt_params default_params; +static void server_params_parse(int argc, char ** argv, server_params & sparams, gpt_params & params) { + gpt_params default_params; server_params default_sparams; + std::string arg; bool invalid_param = false; - for (int i = 1; i < argc; i++) - { + for (int i = 1; i < argc; i++) { arg = argv[i]; - if (arg == "--port") - { - if (++i >= argc) - { + if (arg == "--port") { + if (++i >= argc) { invalid_param = true; break; } sparams.port = std::stoi(argv[i]); - } - else if (arg == "--host") - { - if (++i >= argc) - { + } else if (arg == "--host") { + if (++i >= argc) { invalid_param = true; break; } sparams.hostname = argv[i]; - } - else if (arg == "--path") - { - if (++i >= argc) - { + } else if (arg == "--path") { + if (++i >= argc) { invalid_param = true; break; } sparams.public_path = argv[i]; - } - else if (arg == "--api-key") - { - if (++i >= argc) - { + } else if (arg == "--api-key") { + if (++i >= argc) { invalid_param = true; break; } sparams.api_keys.emplace_back(argv[i]); - } - else if (arg == "--api-key-file") - { - if (++i >= argc) - { + } else if (arg == "--api-key-file") { + if (++i >= argc) { invalid_param = true; break; } @@ -2190,53 +2173,36 @@ static void server_params_parse(int argc, char **argv, server_params &sparams, } } key_file.close(); - } - else if (arg == "--timeout" || arg == "-to") - { - if (++i >= argc) - { + } else if (arg == "--timeout" || arg == "-to") { + if (++i >= argc) { invalid_param = true; break; } sparams.read_timeout = std::stoi(argv[i]); sparams.write_timeout = std::stoi(argv[i]); - } - else if (arg == "-m" || arg == "--model") - { - if (++i >= argc) - { + } else if (arg == "-m" || arg == "--model") { + if (++i >= argc) { invalid_param = true; break; } params.model = argv[i]; - } - else if (arg == "-a" || arg == "--alias") - { - if (++i >= argc) - { + } else if (arg == "-a" || arg == "--alias") { + if (++i >= argc) { invalid_param = true; break; } params.model_alias = argv[i]; - } - else if (arg == "-h" || arg == "--help") - { + } else if (arg == "-h" || arg == "--help") { server_print_usage(argv[0], default_params, default_sparams); exit(0); - } - else if (arg == "-c" || arg == "--ctx-size" || arg == "--ctx_size") - { - if (++i >= argc) - { + } else if (arg == "-c" || arg == "--ctx-size" || arg == "--ctx_size") { + if (++i >= argc) { invalid_param = true; break; } params.n_ctx = std::stoi(argv[i]); - } - else if (arg == "--rope-scaling") - { - if (++i >= argc) - { + } else if (arg == "--rope-scaling") { + if (++i >= argc) { invalid_param = true; break; } @@ -2245,59 +2211,44 @@ static void server_params_parse(int argc, char **argv, server_params &sparams, else if (value == "linear") { params.rope_scaling_type = LLAMA_ROPE_SCALING_TYPE_LINEAR; } else if (value == "yarn") { params.rope_scaling_type = LLAMA_ROPE_SCALING_TYPE_YARN; } else { invalid_param = true; break; } - } - else if (arg == "--rope-freq-base") - { - if (++i >= argc) - { + } else if (arg == "--rope-freq-base") { + if (++i >= argc) { invalid_param = true; break; } params.rope_freq_base = std::stof(argv[i]); - } - else if (arg == "--rope-freq-scale") - { - if (++i >= argc) - { + } else if (arg == "--rope-freq-scale") { + if (++i >= argc) { invalid_param = true; break; } params.rope_freq_scale = std::stof(argv[i]); - } - else if (arg == "--yarn-ext-factor") - { + } else if (arg == "--yarn-ext-factor") { if (++i >= argc) { invalid_param = true; break; } params.yarn_ext_factor = std::stof(argv[i]); } - else if (arg == "--yarn-attn-factor") - { + else if (arg == "--yarn-attn-factor") { if (++i >= argc) { invalid_param = true; break; } params.yarn_attn_factor = std::stof(argv[i]); - } - else if (arg == "--yarn-beta-fast") - { + } else if (arg == "--yarn-beta-fast") { if (++i >= argc) { invalid_param = true; break; } params.yarn_beta_fast = std::stof(argv[i]); - } - else if (arg == "--yarn-beta-slow") - { + } else if (arg == "--yarn-beta-slow") { if (++i >= argc) { invalid_param = true; break; } params.yarn_beta_slow = std::stof(argv[i]); - } - else if (arg == "--pooling") - { + } else if (arg == "--pooling") { if (++i >= argc) { invalid_param = true; break; @@ -2307,108 +2258,79 @@ static void server_params_parse(int argc, char **argv, server_params &sparams, else if (value == "mean") { params.pooling_type = LLAMA_POOLING_TYPE_MEAN; } else if (value == "cls") { params.pooling_type = LLAMA_POOLING_TYPE_CLS; } else { invalid_param = true; break; } - } - else if (arg == "--threads" || arg == "-t") - { + } else if (arg == "--threads" || arg == "-t") { if (++i >= argc) { invalid_param = true; break; } params.n_threads = std::stoi(argv[i]); - } - else if (arg == "--grp-attn-n" || arg == "-gan") - { + } else if (arg == "--grp-attn-n" || arg == "-gan") { if (++i >= argc) { invalid_param = true; break; } params.grp_attn_n = std::stoi(argv[i]); - } - else if (arg == "--grp-attn-w" || arg == "-gaw") - { - if (++i >= argc) - { + } else if (arg == "--grp-attn-w" || arg == "-gaw") { + if (++i >= argc) { invalid_param = true; break; } params.grp_attn_w = std::stoi(argv[i]); - } - else if (arg == "--threads-batch" || arg == "-tb") - { - if (++i >= argc) - { + } else if (arg == "--threads-batch" || arg == "-tb") { + if (++i >= argc) { invalid_param = true; break; } params.n_threads_batch = std::stoi(argv[i]); - } - else if (arg == "--threads-http") - { - if (++i >= argc) - { + } else if (arg == "--threads-http") { + if (++i >= argc) { invalid_param = true; break; } sparams.n_threads_http = std::stoi(argv[i]); - } - else if (arg == "-b" || arg == "--batch-size") - { - if (++i >= argc) - { + } else if (arg == "-b" || arg == "--batch-size") { + if (++i >= argc) { invalid_param = true; break; } params.n_batch = std::stoi(argv[i]); - } - else if (arg == "--gpu-layers" || arg == "-ngl" || arg == "--n-gpu-layers") - { - if (++i >= argc) - { + } else if (arg == "--gpu-layers" || arg == "-ngl" || arg == "--n-gpu-layers") { + if (++i >= argc) { invalid_param = true; break; } if (llama_supports_gpu_offload()) { params.n_gpu_layers = std::stoi(argv[i]); } else { - LOG_WARNING("Not compiled with GPU offload support, --n-gpu-layers option will be ignored. " - "See main README.md for information on enabling GPU BLAS support", - {{"n_gpu_layers", params.n_gpu_layers}}); + LOG_WARNING( + "Not compiled with GPU offload support, --n-gpu-layers option will be ignored. " + "See main README.md for information on enabling GPU BLAS support", + {{"n_gpu_layers", params.n_gpu_layers}}); } - } - else if (arg == "--split-mode" || arg == "-sm") - { + } else if (arg == "--split-mode" || arg == "-sm") { if (++i >= argc) { invalid_param = true; break; } std::string arg_next = argv[i]; - if (arg_next == "none") - { + if (arg_next == "none") { params.split_mode = LLAMA_SPLIT_MODE_NONE; - } - else if (arg_next == "layer") - { + } else if (arg_next == "layer") { params.split_mode = LLAMA_SPLIT_MODE_LAYER; - } - else if (arg_next == "row") - { + } else if (arg_next == "row") { params.split_mode = LLAMA_SPLIT_MODE_ROW; - } - else { + } else { invalid_param = true; break; } #ifndef GGML_USE_CUBLAS fprintf(stderr, "warning: llama.cpp was compiled without cuBLAS. Setting the split mode has no effect.\n"); #endif // GGML_USE_CUBLAS - } - else if (arg == "--tensor-split" || arg == "-ts") - { - if (++i >= argc) - { + } else if (arg == "--tensor-split" || arg == "-ts") { + if (++i >= argc) { invalid_param = true; break; } @@ -2421,25 +2343,18 @@ static void server_params_parse(int argc, char **argv, server_params &sparams, std::vector split_arg{it, {}}; GGML_ASSERT(split_arg.size() <= llama_max_devices()); - for (size_t i_device = 0; i_device < llama_max_devices(); ++i_device) - { - if (i_device < split_arg.size()) - { + for (size_t i_device = 0; i_device < llama_max_devices(); ++i_device) { + if (i_device < split_arg.size()) { params.tensor_split[i_device] = std::stof(split_arg[i_device]); - } - else - { + } else { params.tensor_split[i_device] = 0.0f; } } #else LOG_WARNING("llama.cpp was compiled without cuBLAS. It is not possible to set a tensor split.\n", {}); #endif // GGML_USE_CUBLAS - } - else if (arg == "--main-gpu" || arg == "-mg") - { - if (++i >= argc) - { + } else if (arg == "--main-gpu" || arg == "-mg") { + if (++i >= argc) { invalid_param = true; break; } @@ -2448,98 +2363,70 @@ static void server_params_parse(int argc, char **argv, server_params &sparams, #else LOG_WARNING("llama.cpp was compiled without cuBLAS. It is not possible to set a main GPU.", {}); #endif - } - else if (arg == "--lora") - { - if (++i >= argc) - { + } else if (arg == "--lora") { + if (++i >= argc) { invalid_param = true; break; } params.lora_adapter.emplace_back(argv[i], 1.0f); params.use_mmap = false; - } - else if (arg == "--lora-scaled") - { - if (++i >= argc) - { + } else if (arg == "--lora-scaled") { + if (++i >= argc) { invalid_param = true; break; } const char * lora_adapter = argv[i]; - if (++i >= argc) - { + if (++i >= argc) { invalid_param = true; break; } params.lora_adapter.emplace_back(lora_adapter, std::stof(argv[i])); params.use_mmap = false; - } - else if (arg == "--lora-base") - { - if (++i >= argc) - { + } else if (arg == "--lora-base") { + if (++i >= argc) { invalid_param = true; break; } params.lora_base = argv[i]; - } - else if (arg == "-v" || arg == "--verbose") - { + } else if (arg == "-v" || arg == "--verbose") { #if SERVER_VERBOSE != 1 LOG_WARNING("server.cpp is not built with verbose logging.", {}); #else server_verbose = true; #endif - } - else if (arg == "--mlock") - { + } else if (arg == "--mlock") { params.use_mlock = true; - } - else if (arg == "--no-mmap") - { + } else if (arg == "--no-mmap") { params.use_mmap = false; - } - else if (arg == "--numa") { + } else if (arg == "--numa") { if (++i >= argc) { invalid_param = true; break; } else { std::string value(argv[i]); /**/ if (value == "distribute" || value == "" ) { params.numa = GGML_NUMA_STRATEGY_DISTRIBUTE; } - else if (value == "isolate") { params.numa = GGML_NUMA_STRATEGY_ISOLATE; } - else if (value == "numactl") { params.numa = GGML_NUMA_STRATEGY_NUMACTL; } + else if (value == "isolate") { params.numa = GGML_NUMA_STRATEGY_ISOLATE; } + else if (value == "numactl") { params.numa = GGML_NUMA_STRATEGY_NUMACTL; } else { invalid_param = true; break; } } - } - else if (arg == "--embedding") - { + } else if (arg == "--embedding" || arg == "--embeddings") { params.embedding = true; - } - else if (arg == "-cb" || arg == "--cont-batching") - { + } else if (arg == "-cb" || arg == "--cont-batching") { params.cont_batching = true; - } - else if (arg == "-np" || arg == "--parallel") - { - if (++i >= argc) - { + } else if (arg == "-np" || arg == "--parallel") { + if (++i >= argc) { invalid_param = true; break; } params.n_parallel = std::stoi(argv[i]); - } else if (arg == "-n" || arg == "--n-predict") - { - if (++i >= argc) - { + } else if (arg == "-n" || arg == "--n-predict") { + if (++i >= argc) { invalid_param = true; break; } params.n_predict = std::stoi(argv[i]); - } else if (arg == "-spf" || arg == "--system-prompt-file") - { - if (++i >= argc) - { + } else if (arg == "-spf" || arg == "--system-prompt-file") { + if (++i >= argc) { invalid_param = true; break; } @@ -2549,67 +2436,39 @@ static void server_params_parse(int argc, char **argv, server_params &sparams, invalid_param = true; break; } - std::string systm_content; + std::string system_prompt; std::copy( std::istreambuf_iterator(file), std::istreambuf_iterator(), - std::back_inserter(systm_content) + std::back_inserter(system_prompt) ); - llama.system_prompt_process(json::parse(systm_content)); - } - else if (arg == "-ctk" || arg == "--cache-type-k") { + sparams.system_prompt = system_prompt; + } else if (arg == "-ctk" || arg == "--cache-type-k") { params.cache_type_k = argv[++i]; - } - else if (arg == "-ctv" || arg == "--cache-type-v") { + } else if (arg == "-ctv" || arg == "--cache-type-v") { params.cache_type_v = argv[++i]; - } - else if(arg == "--mmproj") - { - if (++i >= argc) - { + } else if (arg == "--log-format") { + if (++i >= argc) { invalid_param = true; break; } - params.mmproj = argv[i]; - } - else if (arg == "--log-format") - { - if (++i >= argc) - { - invalid_param = true; - break; - } - if (std::strcmp(argv[i], "json") == 0) - { + if (std::strcmp(argv[i], "json") == 0) { server_log_json = true; - } - else if (std::strcmp(argv[i], "text") == 0) - { + } else if (std::strcmp(argv[i], "text") == 0) { server_log_json = false; - } - else - { + } else { invalid_param = true; break; } - } - else if (arg == "--log-disable") - { + } else if (arg == "--log-disable") { log_set_target(stdout); LOG_INFO("logging to file is disabled.", {}); - } - else if (arg == "--slots-endpoint-disable") - { + } else if (arg == "--slots-endpoint-disable") { sparams.slots_endpoint = false; - } - else if (arg == "--metrics") - { + } else if (arg == "--metrics") { sparams.metrics_endpoint = true; - } - else if (arg == "--chat-template") - { - if (++i >= argc) - { + } else if (arg == "--chat-template") { + if (++i >= argc) { invalid_param = true; break; } @@ -2620,9 +2479,7 @@ static void server_params_parse(int argc, char **argv, server_params &sparams, break; } sparams.chat_template = argv[i]; - } - else if (arg == "--override-kv") - { + } else if (arg == "--override-kv") { if (++i >= argc) { invalid_param = true; break; @@ -2633,6 +2490,7 @@ static void server_params_parse(int argc, char **argv, server_params &sparams, invalid_param = true; break; } + struct llama_model_kv_override kvo; std::strncpy(kvo.key, argv[i], sep - argv[i]); kvo.key[sep - argv[i]] = 0; @@ -2663,67 +2521,28 @@ static void server_params_parse(int argc, char **argv, server_params &sparams, break; } params.kv_overrides.push_back(kvo); - } - else - { + } else { fprintf(stderr, "error: unknown argument: %s\n", arg.c_str()); server_print_usage(argv[0], default_params, default_sparams); exit(1); } } + if (!params.kv_overrides.empty()) { params.kv_overrides.emplace_back(); params.kv_overrides.back().key[0] = 0; } - if (invalid_param) - { + if (invalid_param) { fprintf(stderr, "error: invalid parameter for argument: %s\n", arg.c_str()); server_print_usage(argv[0], default_params, default_sparams); exit(1); } } -/* llama.cpp completion api semantics */ -static json format_partial_response( - llama_server_context &llama, server_slot *slot, const std::string &content, const std::vector &probs -) { - json res = json - { - {"content", content }, - {"stop", false}, - {"slot_id", slot->id }, - {"multimodal", llama.multimodal } - }; - - if (slot->sparams.n_probs > 0) - { - res["completion_probabilities"] = probs_vector_to_json(llama.ctx, probs); - } - - return res; -} - -static json format_tokenizer_response(const std::vector &tokens) -{ - return json { - {"tokens", tokens} - }; -} - -static json format_detokenized_response(std::string content) -{ - return json { - {"content", content} - }; -} - - -static void log_server_request(const httplib::Request &req, const httplib::Response &res) -{ +static void log_server_request(const httplib::Request & req, const httplib::Response & res) { // skip GH copilot requests when using default port - if (req.path == "/v1/health" || req.path == "/v1/completions") - { + if (req.path == "/v1/health" || req.path == "/v1/completions") { return; } @@ -2742,24 +2561,9 @@ static void log_server_request(const httplib::Request &req, const httplib::Respo }); } -static void append_to_generated_text_from_generated_token_probs(llama_server_context &llama, server_slot *slot) -{ - auto & gtps = slot->generated_token_probs; - auto translator = token_translator{llama.ctx}; - auto add_strlen = [=](size_t sum, const completion_token_output & cto) { return sum + translator(cto).size(); }; - const size_t len = std::accumulate(gtps.begin(), gtps.end(), size_t(0), add_strlen); - if (slot->generated_text.capacity() < slot->generated_text.size() + len) - { - slot->generated_text.reserve(slot->generated_text.size() + len); - } - for (const completion_token_output & cto : gtps) - { - slot->generated_text += translator(cto); - } -} - std::function shutdown_handler; std::atomic_flag is_terminating = ATOMIC_FLAG_INIT; + inline void signal_handler(int signal) { if (is_terminating.test_and_set()) { // in case it hangs, we can force terminate the server by hitting Ctrl+C twice @@ -2767,40 +2571,45 @@ inline void signal_handler(int signal) { fprintf(stderr, "Received second interrupt, terminating immediately.\n"); exit(1); } + shutdown_handler(signal); } -int main(int argc, char **argv) -{ +int main(int argc, char ** argv) { #if SERVER_VERBOSE != 1 log_disable(); #endif // own arguments required by this example - gpt_params params; + gpt_params params; server_params sparams; // struct that contains llama context and inference - llama_server_context llama; + server_context ctx_server; - server_params_parse(argc, argv, sparams, params, llama); + server_params_parse(argc, argv, sparams, params); - if (params.model_alias == "unknown") - { + if (!sparams.system_prompt.empty()) { + ctx_server.system_prompt_set(json::parse(sparams.system_prompt)); + } + + if (params.model_alias == "unknown") { params.model_alias = params.model; } llama_backend_init(); llama_numa_init(params.numa); - LOG_INFO("build info", {{"build", LLAMA_BUILD_NUMBER}, - {"commit", LLAMA_COMMIT}}); + LOG_INFO("build info", { + {"build", LLAMA_BUILD_NUMBER}, + {"commit", LLAMA_COMMIT} + }); LOG_INFO("system info", { - {"n_threads", params.n_threads}, - {"n_threads_batch", params.n_threads_batch}, - {"total_threads", std::thread::hardware_concurrency()}, - {"system_info", llama_print_system_info()}, - }); + {"n_threads", params.n_threads}, + {"n_threads_batch", params.n_threads_batch}, + {"total_threads", std::thread::hardware_concurrency()}, + {"system_info", llama_print_system_info()}, + }); httplib::Server svr; @@ -2809,152 +2618,163 @@ int main(int argc, char **argv) svr.set_default_headers({{"Server", "llama.cpp"}}); // CORS preflight - svr.Options(R"(.*)", [](const httplib::Request &req, httplib::Response &res) { - res.set_header("Access-Control-Allow-Origin", req.get_header_value("Origin")); + svr.Options(R"(.*)", [](const httplib::Request & req, httplib::Response & res) { + res.set_header("Access-Control-Allow-Origin", req.get_header_value("Origin")); res.set_header("Access-Control-Allow-Credentials", "true"); - res.set_header("Access-Control-Allow-Methods", "POST"); - res.set_header("Access-Control-Allow-Headers", "*"); + res.set_header("Access-Control-Allow-Methods", "POST"); + res.set_header("Access-Control-Allow-Headers", "*"); }); - svr.Get("/health", [&](const httplib::Request& req, httplib::Response& res) { + svr.Get("/health", [&](const httplib::Request & req, httplib::Response & res) { server_state current_state = state.load(); - switch(current_state) { - case SERVER_STATE_READY: { - // request slots data using task queue - task_server task; - task.id = llama.queue_tasks.get_new_id(); - task.type = TASK_TYPE_METRICS; - task.target_id = -1; + switch (current_state) { + case SERVER_STATE_READY: + { + // request slots data using task queue + server_task task; + task.id = ctx_server.queue_tasks.get_new_id(); + task.type = SERVER_TASK_TYPE_METRICS; + task.id_target = -1; - llama.queue_results.add_waiting_task_id(task.id); - llama.queue_tasks.post(task); + ctx_server.queue_results.add_waiting_task_id(task.id); + ctx_server.queue_tasks.post(task); - // get the result - task_result result = llama.queue_results.recv(task.id); - llama.queue_results.remove_waiting_task_id(task.id); + // get the result + server_task_result result = ctx_server.queue_results.recv(task.id); + ctx_server.queue_results.remove_waiting_task_id(task.id); - int n_idle_slots = result.result_json["idle"]; - int n_processing_slots = result.result_json["processing"]; + const int n_idle_slots = result.data["idle"]; + const int n_processing_slots = result.data["processing"]; - json health = { + json health = { {"status", "ok"}, {"slots_idle", n_idle_slots}, - {"slots_processing", n_processing_slots}}; - res.status = 200; // HTTP OK - if (sparams.slots_endpoint && req.has_param("include_slots")) { - health["slots"] = result.result_json["slots"]; - } + {"slots_processing", n_processing_slots} + }; - if (n_idle_slots == 0) { - health["status"] = "no slot available"; - if (req.has_param("fail_on_no_slot")) { - res.status = 503; // HTTP Service Unavailable + res.status = 200; // HTTP OK + if (sparams.slots_endpoint && req.has_param("include_slots")) { + health["slots"] = result.data["slots"]; } + + if (n_idle_slots == 0) { + health["status"] = "no slot available"; + if (req.has_param("fail_on_no_slot")) { + res.status = 503; // HTTP Service Unavailable + } + } + + res.set_content(health.dump(), "application/json"); + break; } - res.set_content(health.dump(), "application/json"); - break; - } case SERVER_STATE_LOADING_MODEL: - res.set_content(R"({"status": "loading model"})", "application/json"); - res.status = 503; // HTTP Service Unavailable - break; + { + res.set_content(R"({"status": "loading model"})", "application/json"); + res.status = 503; // HTTP Service Unavailable + } break; case SERVER_STATE_ERROR: - res.set_content(R"({"status": "error", "error": "Model failed to load"})", "application/json"); - res.status = 500; // HTTP Internal Server Error - break; + { + res.set_content(R"({"status": "error", "error": "Model failed to load"})", "application/json"); + res.status = 500; // HTTP Internal Server Error + } break; } }); if (sparams.slots_endpoint) { - svr.Get("/slots", [&](const httplib::Request&, httplib::Response& res) { + svr.Get("/slots", [&](const httplib::Request &, httplib::Response & res) { // request slots data using task queue - task_server task; - task.id = llama.queue_tasks.get_new_id(); - task.type = TASK_TYPE_METRICS; - task.target_id = -1; + server_task task; + task.id = ctx_server.queue_tasks.get_new_id(); + task.id_multi = -1; + task.id_target = -1; + task.type = SERVER_TASK_TYPE_METRICS; - llama.queue_results.add_waiting_task_id(task.id); - llama.queue_tasks.post(task); + ctx_server.queue_results.add_waiting_task_id(task.id); + ctx_server.queue_tasks.post(task); // get the result - task_result result = llama.queue_results.recv(task.id); - llama.queue_results.remove_waiting_task_id(task.id); + server_task_result result = ctx_server.queue_results.recv(task.id); + ctx_server.queue_results.remove_waiting_task_id(task.id); - res.set_content(result.result_json["slots"].dump(), "application/json"); + res.set_content(result.data["slots"].dump(), "application/json"); res.status = 200; // HTTP OK }); } if (sparams.metrics_endpoint) { - svr.Get("/metrics", [&](const httplib::Request&, httplib::Response& res) { + svr.Get("/metrics", [&](const httplib::Request &, httplib::Response & res) { // request slots data using task queue - task_server task; - task.id = llama.queue_tasks.get_new_id(); - task.type = TASK_TYPE_METRICS; - task.target_id = -1; + server_task task; + task.id = ctx_server.queue_tasks.get_new_id(); + task.id_multi = -1; + task.id_target = -1; + task.type = SERVER_TASK_TYPE_METRICS; - llama.queue_results.add_waiting_task_id(task.id); - llama.queue_tasks.post(task); + ctx_server.queue_results.add_waiting_task_id(task.id); + ctx_server.queue_tasks.post(task); // get the result - task_result result = llama.queue_results.recv(task.id); - llama.queue_results.remove_waiting_task_id(task.id); + server_task_result result = ctx_server.queue_results.recv(task.id); + ctx_server.queue_results.remove_waiting_task_id(task.id); - json data = result.result_json; + json data = result.data; - uint64_t n_prompt_tokens_processed = data["n_prompt_tokens_processed"]; - uint64_t t_prompt_processing = data["t_prompt_processing"]; + const uint64_t n_prompt_tokens_processed = data["n_prompt_tokens_processed"]; + const uint64_t t_prompt_processing = data["t_prompt_processing"]; - uint64_t n_tokens_predicted = data["n_tokens_predicted"]; - uint64_t t_tokens_generation = data["t_tokens_generation"]; + const uint64_t n_tokens_predicted = data["n_tokens_predicted"]; + const uint64_t t_tokens_generation = data["t_tokens_generation"]; - int32_t kv_cache_used_cells = data["kv_cache_used_cells"]; + const int32_t kv_cache_used_cells = data["kv_cache_used_cells"]; // metrics definition: https://prometheus.io/docs/practices/naming/#metric-names json all_metrics_def = json { - {"counter", {{ - {"name", "prompt_tokens_total"}, - {"help", "Number of prompt tokens processed."}, - {"value", data["n_prompt_tokens_processed_total"]} - }, { - {"name", "tokens_predicted_total"}, - {"help", "Number of generation tokens processed."}, - {"value", data["n_tokens_predicted_total"]} - }}}, - {"gauge", {{ - {"name", "prompt_tokens_seconds"}, - {"help", "Average prompt throughput in tokens/s."}, - {"value", n_prompt_tokens_processed ? 1e3 / t_prompt_processing * n_prompt_tokens_processed : 0} - },{ - {"name", "predicted_tokens_seconds"}, - {"help", "Average generation throughput in tokens/s."}, - {"value", n_tokens_predicted ? 1e3 / t_tokens_generation * n_tokens_predicted : 0} - },{ - {"name", "kv_cache_usage_ratio"}, - {"help", "KV-cache usage. 1 means 100 percent usage."}, - {"value", 1. * kv_cache_used_cells / params.n_ctx} - },{ - {"name", "kv_cache_tokens"}, - {"help", "KV-cache tokens."}, - {"value", data["kv_cache_tokens_count"]} - },{ - {"name", "requests_processing"}, - {"help", "Number of request processing."}, - {"value", data["processing"]} - },{ - {"name", "requests_deferred"}, - {"help", "Number of request deferred."}, - {"value", data["deferred"]} - }}} + {"counter", {{ + {"name", "prompt_tokens_total"}, + {"help", "Number of prompt tokens processed."}, + {"value", data["n_prompt_tokens_processed_total"]} + }, { + {"name", "tokens_predicted_total"}, + {"help", "Number of generation tokens processed."}, + {"value", data["n_tokens_predicted_total"]} + }}}, + {"gauge", {{ + {"name", "prompt_tokens_seconds"}, + {"help", "Average prompt throughput in tokens/s."}, + {"value", n_prompt_tokens_processed ? 1e3 / t_prompt_processing * n_prompt_tokens_processed : 0} + },{ + {"name", "predicted_tokens_seconds"}, + {"help", "Average generation throughput in tokens/s."}, + {"value", n_tokens_predicted ? 1e3 / t_tokens_generation * n_tokens_predicted : 0} + },{ + {"name", "kv_cache_usage_ratio"}, + {"help", "KV-cache usage. 1 means 100 percent usage."}, + {"value", 1. * kv_cache_used_cells / params.n_ctx} + },{ + {"name", "kv_cache_tokens"}, + {"help", "KV-cache tokens."}, + {"value", data["kv_cache_tokens_count"]} + },{ + {"name", "requests_processing"}, + {"help", "Number of request processing."}, + {"value", data["processing"]} + },{ + {"name", "requests_deferred"}, + {"help", "Number of request deferred."}, + {"value", data["deferred"]} + }}} }; std::stringstream prometheus; - for (const auto& el : all_metrics_def.items()) { - const auto& type = el.key(); - const auto& metrics_def = el.value(); - for (const auto& metric_def : metrics_def) { - std::string name = metric_def["name"]; - std::string help = metric_def["help"]; + + for (const auto & el : all_metrics_def.items()) { + const auto & type = el.key(); + const auto & metrics_def = el.value(); + + for (const auto & metric_def : metrics_def) { + const std::string name = metric_def["name"]; + const std::string help = metric_def["help"]; + auto value = json_value(metric_def, "value", 0); prometheus << "# HELP llamacpp:" << name << " " << help << "\n" << "# TYPE llamacpp:" << name << " " << type << "\n" @@ -2969,49 +2789,39 @@ int main(int argc, char **argv) svr.set_logger(log_server_request); - svr.set_exception_handler([](const httplib::Request &, httplib::Response &res, std::exception_ptr ep) - { - const char fmt[] = "500 Internal Server Error\n%s"; - char buf[BUFSIZ]; - try - { - std::rethrow_exception(std::move(ep)); - } - catch (std::exception &e) - { - snprintf(buf, sizeof(buf), fmt, e.what()); - } - catch (...) - { - snprintf(buf, sizeof(buf), fmt, "Unknown Exception"); - } - res.set_content(buf, "text/plain; charset=utf-8"); - res.status = 500; - }); + svr.set_exception_handler([](const httplib::Request &, httplib::Response & res, std::exception_ptr ep) { + const char fmt[] = "500 Internal Server Error\n%s"; - svr.set_error_handler([](const httplib::Request &, httplib::Response &res) - { - if (res.status == 401) - { - res.set_content("Unauthorized", "text/plain; charset=utf-8"); - } - if (res.status == 400) - { - res.set_content("Invalid request", "text/plain; charset=utf-8"); - } - else if (res.status == 404) - { - res.set_content("File Not Found", "text/plain; charset=utf-8"); - res.status = 404; - } - }); + char buf[BUFSIZ]; + try { + std::rethrow_exception(std::move(ep)); + } catch (std::exception &e) { + snprintf(buf, sizeof(buf), fmt, e.what()); + } catch (...) { + snprintf(buf, sizeof(buf), fmt, "Unknown Exception"); + } + + res.set_content(buf, "text/plain; charset=utf-8"); + res.status = 500; + }); + + svr.set_error_handler([](const httplib::Request &, httplib::Response & res) { + if (res.status == 401) { + res.set_content("Unauthorized", "text/plain; charset=utf-8"); + } + if (res.status == 400) { + res.set_content("Invalid request", "text/plain; charset=utf-8"); + } + if (res.status == 404) { + res.set_content("File Not Found", "text/plain; charset=utf-8"); + } + }); // set timeouts and change hostname and port svr.set_read_timeout (sparams.read_timeout); svr.set_write_timeout(sparams.write_timeout); - if (!svr.bind_to_port(sparams.hostname, sparams.port)) - { + if (!svr.bind_to_port(sparams.hostname, sparams.port)) { fprintf(stderr, "\ncouldn't bind to server socket: hostname=%s port=%d\n\n", sparams.hostname.c_str(), sparams.port); return 1; } @@ -3020,8 +2830,9 @@ int main(int argc, char **argv) svr.set_base_dir(sparams.public_path); std::unordered_map log_data; + log_data["hostname"] = sparams.hostname; - log_data["port"] = std::to_string(sparams.port); + log_data["port"] = std::to_string(sparams.port); if (sparams.api_keys.size() == 1) { log_data["api_key"] = "api_key: ****" + sparams.api_keys[0].substr(sparams.api_keys[0].length() - 4); @@ -3030,20 +2841,23 @@ int main(int argc, char **argv) } // load the model - if (!llama.load_model(params)) - { + if (!ctx_server.load_model(params)) { state.store(SERVER_STATE_ERROR); return 1; } else { - llama.initialize(); + ctx_server.initialize(); state.store(SERVER_STATE_READY); - LOG_INFO("model loaded", {}); } - const auto model_meta = llama.model_meta(); + + LOG_INFO("model loaded", {}); + + const auto model_meta = ctx_server.model_meta(); if (sparams.chat_template.empty()) { // custom chat template is not supplied - // check if the template comes with the model is supported by us - llama.validate_model_chat_template(sparams); + if (!ctx_server.validate_model_chat_template()) { + LOG_ERROR("The chat template that comes with this model is not yet supported, falling back to chatml. This may cause the model to output suboptimal responses", {}); + sparams.chat_template = "chatml"; + } } // Middleware for API key validation @@ -3055,6 +2869,7 @@ int main(int argc, char **argv) // Check for API key in the header auto auth_header = req.get_header_value("Authorization"); + std::string prefix = "Bearer "; if (auth_header.substr(0, prefix.size()) == prefix) { std::string received_api_key = auth_header.substr(prefix.size()); @@ -3073,179 +2888,173 @@ int main(int argc, char **argv) }; // this is only called if no index.html is found in the public --path - svr.Get("/", [](const httplib::Request &, httplib::Response &res) - { - res.set_content(reinterpret_cast(&index_html), index_html_len, "text/html; charset=utf-8"); - return false; - }); + svr.Get("/", [](const httplib::Request &, httplib::Response & res) { + res.set_content(reinterpret_cast(&index_html), index_html_len, "text/html; charset=utf-8"); + return false; + }); // this is only called if no index.js is found in the public --path - svr.Get("/index.js", [](const httplib::Request &, httplib::Response &res) - { - res.set_content(reinterpret_cast(&index_js), index_js_len, "text/javascript; charset=utf-8"); - return false; - }); + svr.Get("/index.js", [](const httplib::Request &, httplib::Response & res) { + res.set_content(reinterpret_cast(&index_js), index_js_len, "text/javascript; charset=utf-8"); + return false; + }); // this is only called if no index.html is found in the public --path - svr.Get("/completion.js", [](const httplib::Request &, httplib::Response &res) - { - res.set_content(reinterpret_cast(&completion_js), completion_js_len, "application/javascript; charset=utf-8"); - return false; - }); + svr.Get("/completion.js", [](const httplib::Request &, httplib::Response & res) { + res.set_content(reinterpret_cast(&completion_js), completion_js_len, "application/javascript; charset=utf-8"); + return false; + }); // this is only called if no index.html is found in the public --path - svr.Get("/json-schema-to-grammar.mjs", [](const httplib::Request &, httplib::Response &res) - { - res.set_content(reinterpret_cast(&json_schema_to_grammar_mjs), json_schema_to_grammar_mjs_len, "application/javascript; charset=utf-8"); - return false; - }); + svr.Get("/json-schema-to-grammar.mjs", [](const httplib::Request &, httplib::Response & res) { + res.set_content(reinterpret_cast(&json_schema_to_grammar_mjs), json_schema_to_grammar_mjs_len, "application/javascript; charset=utf-8"); + return false; + }); - svr.Get("/props", [&llama](const httplib::Request & req, httplib::Response &res) - { - res.set_header("Access-Control-Allow-Origin", req.get_header_value("Origin")); - json data = { - { "user_name", llama.name_user.c_str() }, - { "assistant_name", llama.name_assistant.c_str() }, - { "default_generation_settings", llama.default_generation_settings_for_props }, - { "total_slots", llama.params.n_parallel } - }; - res.set_content(data.dump(), "application/json; charset=utf-8"); - }); + svr.Get("/props", [&ctx_server](const httplib::Request & req, httplib::Response & res) { + res.set_header("Access-Control-Allow-Origin", req.get_header_value("Origin")); + json data = { + { "user_name", ctx_server.name_user.c_str() }, + { "assistant_name", ctx_server.name_assistant.c_str() }, + { "default_generation_settings", ctx_server.default_generation_settings_for_props }, + { "total_slots", ctx_server.params.n_parallel } + }; - svr.Post("/completion", [&llama, &validate_api_key](const httplib::Request &req, httplib::Response &res) - { - res.set_header("Access-Control-Allow-Origin", req.get_header_value("Origin")); - if (!validate_api_key(req, res)) { - return; - } - json data = json::parse(req.body); - const int task_id = llama.queue_tasks.get_new_id(); - llama.queue_results.add_waiting_task_id(task_id); - llama.request_completion(task_id, data, false, false, -1); - if (!json_value(data, "stream", false)) { - std::string completion_text; - task_result result = llama.queue_results.recv(task_id); - if (!result.error && result.stop) { - res.set_content(result.result_json.dump(-1, ' ', false, json::error_handler_t::replace), "application/json; charset=utf-8"); - } - else - { - res.status = 404; - res.set_content(result.result_json["content"], "text/plain; charset=utf-8"); - } - llama.queue_results.remove_waiting_task_id(task_id); - } else { - const auto chunked_content_provider = [task_id, &llama](size_t, httplib::DataSink & sink) - { - while (true) - { - task_result result = llama.queue_results.recv(task_id); - if (!result.error) { - const std::string str = - "data: " + - result.result_json.dump(-1, ' ', false, json::error_handler_t::replace) + - "\n\n"; - LOG_VERBOSE("data stream", { - { "to_send", str } - }); - if (!sink.write(str.c_str(), str.size())) - { - llama.queue_results.remove_waiting_task_id(task_id); - return false; - } - if (result.stop) { - break; - } - } else { - const std::string str = - "error: " + - result.result_json.dump(-1, ' ', false, json::error_handler_t::replace) + - "\n\n"; - LOG_VERBOSE("data stream", { - { "to_send", str } - }); - if (!sink.write(str.c_str(), str.size())) - { - llama.queue_results.remove_waiting_task_id(task_id); - return false; - } - break; - } - } + res.set_content(data.dump(), "application/json; charset=utf-8"); + }); - llama.queue_results.remove_waiting_task_id(task_id); - sink.done(); - return true; - }; - - auto on_complete = [task_id, &llama] (bool) - { - // cancel - llama.request_cancel(task_id); - llama.queue_results.remove_waiting_task_id(task_id); - }; - - res.set_chunked_content_provider("text/event-stream", chunked_content_provider, on_complete); - } - }); - - svr.Get("/v1/models", [¶ms, &model_meta](const httplib::Request& req, httplib::Response& res) - { - res.set_header("Access-Control-Allow-Origin", req.get_header_value("Origin")); - std::time_t t = std::time(0); - - json models = { - {"object", "list"}, - {"data", { - { - {"id", params.model_alias}, - {"object", "model"}, - {"created", t}, - {"owned_by", "llamacpp"}, - {"meta", model_meta} - }, - }} - }; - - res.set_content(models.dump(), "application/json; charset=utf-8"); - }); - - const auto chat_completions = [&llama, &validate_api_key, &sparams](const httplib::Request &req, httplib::Response &res) - { + svr.Post("/completion", [&ctx_server, &validate_api_key](const httplib::Request & req, httplib::Response & res) { res.set_header("Access-Control-Allow-Origin", req.get_header_value("Origin")); if (!validate_api_key(req, res)) { return; } - json data = oaicompat_completion_params_parse(llama.model, json::parse(req.body), sparams.chat_template); - const int task_id = llama.queue_tasks.get_new_id(); - llama.queue_results.add_waiting_task_id(task_id); - llama.request_completion(task_id, data, false, false, -1); + json data = json::parse(req.body); + + const int id_task = ctx_server.queue_tasks.get_new_id(); + + ctx_server.queue_results.add_waiting_task_id(id_task); + ctx_server.request_completion(id_task, -1, data, false, false); if (!json_value(data, "stream", false)) { - std::string completion_text; - task_result result = llama.queue_results.recv(task_id); - + server_task_result result = ctx_server.queue_results.recv(id_task); if (!result.error && result.stop) { - json oaicompat_result = format_final_response_oaicompat(data, result); - - res.set_content(oaicompat_result.dump(-1, ' ', false, - json::error_handler_t::replace), - "application/json; charset=utf-8"); + res.set_content(result.data.dump(-1, ' ', false, json::error_handler_t::replace), "application/json; charset=utf-8"); } else { res.status = 500; - res.set_content(result.result_json["content"], "text/plain; charset=utf-8"); + res.set_content(result.data["content"], "text/plain; charset=utf-8"); } - llama.queue_results.remove_waiting_task_id(task_id); - } else { - const auto chunked_content_provider = [task_id, &llama](size_t, httplib::DataSink &sink) { - while (true) { - task_result llama_result = llama.queue_results.recv(task_id); - if (!llama_result.error) { - std::vector result_array = format_partial_response_oaicompat( llama_result); - for (auto it = result_array.begin(); it != result_array.end(); ++it) - { + ctx_server.queue_results.remove_waiting_task_id(id_task); + } else { + const auto chunked_content_provider = [id_task, &ctx_server](size_t, httplib::DataSink & sink) { + while (true) { + server_task_result result = ctx_server.queue_results.recv(id_task); + if (!result.error) { + const std::string str = + "data: " + + result.data.dump(-1, ' ', false, json::error_handler_t::replace) + + "\n\n"; + + LOG_VERBOSE("data stream", { + { "to_send", str } + }); + + if (!sink.write(str.c_str(), str.size())) { + ctx_server.queue_results.remove_waiting_task_id(id_task); + return false; + } + + if (result.stop) { + break; + } + } else { + const std::string str = + "error: " + + result.data.dump(-1, ' ', false, json::error_handler_t::replace) + + "\n\n"; + + LOG_VERBOSE("data stream", { + { "to_send", str } + }); + + if (!sink.write(str.c_str(), str.size())) { + ctx_server.queue_results.remove_waiting_task_id(id_task); + return false; + } + + break; + } + } + + ctx_server.queue_results.remove_waiting_task_id(id_task); + sink.done(); + + return true; + }; + + auto on_complete = [id_task, &ctx_server] (bool) { + // cancel + ctx_server.request_cancel(id_task); + ctx_server.queue_results.remove_waiting_task_id(id_task); + }; + + res.set_chunked_content_provider("text/event-stream", chunked_content_provider, on_complete); + } + }); + + svr.Get("/v1/models", [¶ms, &model_meta](const httplib::Request & req, httplib::Response & res) { + res.set_header("Access-Control-Allow-Origin", req.get_header_value("Origin")); + + json models = { + {"object", "list"}, + {"data", { + { + {"id", params.model_alias}, + {"object", "model"}, + {"created", std::time(0)}, + {"owned_by", "llamacpp"}, + {"meta", model_meta} + }, + }} + }; + + res.set_content(models.dump(), "application/json; charset=utf-8"); + }); + + const auto chat_completions = [&ctx_server, &validate_api_key, &sparams](const httplib::Request & req, httplib::Response & res) { + res.set_header("Access-Control-Allow-Origin", req.get_header_value("Origin")); + if (!validate_api_key(req, res)) { + return; + } + + json data = oaicompat_completion_params_parse(ctx_server.model, json::parse(req.body), sparams.chat_template); + + const int id_task = ctx_server.queue_tasks.get_new_id(); + + ctx_server.queue_results.add_waiting_task_id(id_task); + ctx_server.request_completion(id_task, -1, data, false, false); + + if (!json_value(data, "stream", false)) { + server_task_result result = ctx_server.queue_results.recv(id_task); + + if (!result.error && result.stop) { + json result_oai = format_final_response_oaicompat(data, result.data); + + res.set_content(result_oai.dump(-1, ' ', false, json::error_handler_t::replace), "application/json; charset=utf-8"); + } else { + res.status = 500; + res.set_content(result.data["content"], "text/plain; charset=utf-8"); + } + ctx_server.queue_results.remove_waiting_task_id(id_task); + } else { + const auto chunked_content_provider = [id_task, &ctx_server](size_t, httplib::DataSink & sink) { + while (true) { + server_task_result result = ctx_server.queue_results.recv(id_task); + if (!result.error) { + std::vector result_array = format_partial_response_oaicompat(result.data); + + for (auto it = result_array.begin(); it != result_array.end(); ++it) { if (!it->empty()) { const std::string str = "data: " + @@ -3253,251 +3062,235 @@ int main(int argc, char **argv) "\n\n"; LOG_VERBOSE("data stream", {{"to_send", str}}); if (!sink.write(str.c_str(), str.size())) { - llama.queue_results.remove_waiting_task_id(task_id); + ctx_server.queue_results.remove_waiting_task_id(id_task); return false; } } } - if (llama_result.stop) { + if (result.stop) { break; } } else { const std::string str = "error: " + - llama_result.result_json.dump(-1, ' ', false, - json::error_handler_t::replace) + + result.data.dump(-1, ' ', false, json::error_handler_t::replace) + "\n\n"; LOG_VERBOSE("data stream", {{"to_send", str}}); if (!sink.write(str.c_str(), str.size())) { - llama.queue_results.remove_waiting_task_id(task_id); + ctx_server.queue_results.remove_waiting_task_id(id_task); return false; } break; } } sink.done(); - llama.queue_results.remove_waiting_task_id(task_id); + ctx_server.queue_results.remove_waiting_task_id(id_task); return true; }; - auto on_complete = [task_id, &llama](bool) { + auto on_complete = [id_task, &ctx_server](bool) { // cancel request - llama.request_cancel(task_id); - llama.queue_results.remove_waiting_task_id(task_id); + ctx_server.request_cancel(id_task); + ctx_server.queue_results.remove_waiting_task_id(id_task); }; res.set_chunked_content_provider("text/event-stream", chunked_content_provider, on_complete); } }; - svr.Post("/chat/completions", chat_completions); + svr.Post("/chat/completions", chat_completions); svr.Post("/v1/chat/completions", chat_completions); - svr.Post("/infill", [&llama, &validate_api_key](const httplib::Request &req, httplib::Response &res) - { - res.set_header("Access-Control-Allow-Origin", req.get_header_value("Origin")); - if (!validate_api_key(req, res)) { - return; - } - json data = json::parse(req.body); - const int task_id = llama.queue_tasks.get_new_id(); - llama.queue_results.add_waiting_task_id(task_id); - llama.request_completion(task_id, data, true, false, -1); - if (!json_value(data, "stream", false)) { - std::string completion_text; - task_result result = llama.queue_results.recv(task_id); - if (!result.error && result.stop) - { - res.set_content(result.result_json.dump(-1, ' ', false, json::error_handler_t::replace), "application/json; charset=utf-8"); - } - else - { - res.status = 404; - res.set_content(result.result_json["content"], "text/plain; charset=utf-8"); - } - llama.queue_results.remove_waiting_task_id(task_id); - } else { - const auto chunked_content_provider = [task_id, &llama](size_t, httplib::DataSink & sink) { - while (true) - { - task_result result = llama.queue_results.recv(task_id); - if (!result.error) { - const std::string str = - "data: " + - result.result_json.dump(-1, ' ', false, json::error_handler_t::replace) + - "\n\n"; - LOG_VERBOSE("data stream", { - { "to_send", str } - }); - if (!sink.write(str.c_str(), str.size())) - { - llama.queue_results.remove_waiting_task_id(task_id); - return false; - } - if (result.stop) - { - break; - } - } - else - { - break; - } - } - - llama.queue_results.remove_waiting_task_id(task_id); - sink.done(); - return true; - }; - - auto on_complete = [task_id, &llama] (bool) - { - // cancel - llama.request_cancel(task_id); - }; - - res.set_chunked_content_provider("text/event-stream", chunked_content_provider, on_complete); - } - }); - - svr.Options(R"(/.*)", [](const httplib::Request &, httplib::Response &res) - { return res.set_content("", "application/json; charset=utf-8"); }); - - svr.Post("/tokenize", [&llama](const httplib::Request &req, httplib::Response &res) - { - res.set_header("Access-Control-Allow-Origin", req.get_header_value("Origin")); - const json body = json::parse(req.body); - std::vector tokens; - if (body.count("content") != 0) - { - tokens = llama.tokenize(body["content"], false); - } - const json data = format_tokenizer_response(tokens); - return res.set_content(data.dump(), "application/json; charset=utf-8"); - }); - - svr.Post("/detokenize", [&llama](const httplib::Request &req, httplib::Response &res) - { - res.set_header("Access-Control-Allow-Origin", req.get_header_value("Origin")); - const json body = json::parse(req.body); - std::string content; - if (body.count("tokens") != 0) - { - const std::vector tokens = body["tokens"]; - content = tokens_to_str(llama.ctx, tokens.cbegin(), tokens.cend()); - } - - const json data = format_detokenized_response(content); - return res.set_content(data.dump(), "application/json; charset=utf-8"); - }); - - svr.Post("/embedding", [&llama](const httplib::Request &req, httplib::Response &res) - { - res.set_header("Access-Control-Allow-Origin", req.get_header_value("Origin")); - const json body = json::parse(req.body); - json prompt; - if (body.count("content") != 0) - { - prompt = body["content"]; - } - else - { - prompt = ""; - } - - json image_data; - if (body.count("image_data") != 0) { - image_data = body["image_data"]; - } - else - { - image_data = ""; - } - - // create and queue the task - const int task_id = llama.queue_tasks.get_new_id(); - llama.queue_results.add_waiting_task_id(task_id); - llama.request_completion(task_id, { {"prompt", prompt}, { "n_predict", 0}, {"image_data", image_data} }, false, true, -1); - - // get the result - task_result result = llama.queue_results.recv(task_id); - llama.queue_results.remove_waiting_task_id(task_id); - - // send the result - return res.set_content(result.result_json.dump(), "application/json; charset=utf-8"); - }); - - svr.Post("/v1/embeddings", [&llama](const httplib::Request &req, httplib::Response &res) - { - res.set_header("Access-Control-Allow-Origin", req.get_header_value("Origin")); - const json body = json::parse(req.body); - - json prompt; - if (body.count("input") != 0) - { - prompt = body["input"]; - // batch - if(prompt.is_array()) { - json data = json::array(); - int i = 0; - for (const json &elem : prompt) { - const int task_id = llama.queue_tasks.get_new_id(); - llama.queue_results.add_waiting_task_id(task_id); - llama.request_completion(task_id, { {"prompt", elem}, { "n_predict", 0} }, false, true, -1); - - // get the result - task_result result = llama.queue_results.recv(task_id); - llama.queue_results.remove_waiting_task_id(task_id); - - json embedding = json{ - {"embedding", json_value(result.result_json, "embedding", json::array())}, - {"index", i++}, - {"object", "embedding"} - }; - data.push_back(embedding); - } - json result = format_embeddings_response_oaicompat(body, data); - return res.set_content(result.dump(), "application/json; charset=utf-8"); - } - } - else - { - prompt = ""; - } - - // create and queue the task - const int task_id = llama.queue_tasks.get_new_id(); - llama.queue_results.add_waiting_task_id(task_id); - llama.request_completion(task_id, { {"prompt", prompt}, { "n_predict", 0}}, false, true, -1); - - // get the result - task_result result = llama.queue_results.recv(task_id); - llama.queue_results.remove_waiting_task_id(task_id); - - json data = json::array({json{ - {"embedding", json_value(result.result_json, "embedding", json::array())}, - {"index", 0}, - {"object", "embedding"} - }} - ); - - json root = format_embeddings_response_oaicompat(body, data); - - // send the result - return res.set_content(root.dump(), "application/json; charset=utf-8"); - }); - - // GG: if I put the main loop inside a thread, it crashes on the first request when build in Debug!? - // "Bus error: 10" - this is on macOS, it does not crash on Linux - //std::thread t2([&]() - /*{ - bool running = true; - while (running) - { - running = llama.update_slots(); + svr.Post("/infill", [&ctx_server, &validate_api_key](const httplib::Request & req, httplib::Response & res) { + res.set_header("Access-Control-Allow-Origin", req.get_header_value("Origin")); + if (!validate_api_key(req, res)) { + return; } - }*/ - //); + + json data = json::parse(req.body); + + const int id_task = ctx_server.queue_tasks.get_new_id(); + + ctx_server.queue_results.add_waiting_task_id(id_task); + ctx_server.request_completion(id_task, -1, data, true, false); + + if (!json_value(data, "stream", false)) { + server_task_result result = ctx_server.queue_results.recv(id_task); + if (!result.error && result.stop) { + res.set_content(result.data.dump(-1, ' ', false, json::error_handler_t::replace), "application/json; charset=utf-8"); + } else { + res.status = 404; + res.set_content(result.data["content"], "text/plain; charset=utf-8"); + } + + ctx_server.queue_results.remove_waiting_task_id(id_task); + } else { + const auto chunked_content_provider = [id_task, &ctx_server](size_t, httplib::DataSink & sink) { + while (true) { + server_task_result result = ctx_server.queue_results.recv(id_task); + if (!result.error) { + const std::string str = + "data: " + + result.data.dump(-1, ' ', false, json::error_handler_t::replace) + + "\n\n"; + + LOG_VERBOSE("data stream", { + { "to_send", str } + }); + + if (!sink.write(str.c_str(), str.size())) { + ctx_server.queue_results.remove_waiting_task_id(id_task); + return false; + } + + if (result.stop) { + break; + } + } else { + break; + } + } + + ctx_server.queue_results.remove_waiting_task_id(id_task); + sink.done(); + + return true; + }; + + auto on_complete = [id_task, &ctx_server] (bool) { + ctx_server.request_cancel(id_task); + }; + + res.set_chunked_content_provider("text/event-stream", chunked_content_provider, on_complete); + } + }); + + svr.Options(R"(/.*)", [](const httplib::Request &, httplib::Response & res) { + return res.set_content("", "application/json; charset=utf-8"); + }); + + svr.Post("/tokenize", [&ctx_server](const httplib::Request & req, httplib::Response & res) { + res.set_header("Access-Control-Allow-Origin", req.get_header_value("Origin")); + const json body = json::parse(req.body); + + std::vector tokens; + if (body.count("content") != 0) { + tokens = ctx_server.tokenize(body["content"], false); + } + const json data = format_tokenizer_response(tokens); + return res.set_content(data.dump(), "application/json; charset=utf-8"); + }); + + svr.Post("/detokenize", [&ctx_server](const httplib::Request & req, httplib::Response & res) { + res.set_header("Access-Control-Allow-Origin", req.get_header_value("Origin")); + const json body = json::parse(req.body); + + std::string content; + if (body.count("tokens") != 0) { + const std::vector tokens = body["tokens"]; + content = tokens_to_str(ctx_server.ctx, tokens.cbegin(), tokens.cend()); + } + + const json data = format_detokenized_response(content); + return res.set_content(data.dump(), "application/json; charset=utf-8"); + }); + + svr.Post("/embedding", [¶ms, &ctx_server](const httplib::Request & req, httplib::Response & res) { + res.set_header("Access-Control-Allow-Origin", req.get_header_value("Origin")); + if (!params.embedding) { + res.status = 501; + res.set_content("This server does not support embeddings. Start it with `--embeddings`", "text/plain; charset=utf-8"); + return; + } + + const json body = json::parse(req.body); + + json prompt; + if (body.count("content") != 0) { + prompt = body["content"]; + } else { + prompt = ""; + } + + // create and queue the task + const int id_task = ctx_server.queue_tasks.get_new_id(); + + ctx_server.queue_results.add_waiting_task_id(id_task); + ctx_server.request_completion(id_task, -1, { {"prompt", prompt}, { "n_predict", 0} }, false, true); + + // get the result + server_task_result result = ctx_server.queue_results.recv(id_task); + ctx_server.queue_results.remove_waiting_task_id(id_task); + + // send the result + return res.set_content(result.data.dump(), "application/json; charset=utf-8"); + }); + + svr.Post("/v1/embeddings", [¶ms, &ctx_server](const httplib::Request & req, httplib::Response & res) { + res.set_header("Access-Control-Allow-Origin", req.get_header_value("Origin")); + if (!params.embedding) { + res.status = 501; + res.set_content("This server does not support embeddings. Start it with `--embeddings`", "text/plain; charset=utf-8"); + return; + } + + const json body = json::parse(req.body); + + json prompt; + if (body.count("input") != 0) { + prompt = body["input"]; + if (prompt.is_array()) { + json data = json::array(); + + int i = 0; + for (const json & elem : prompt) { + const int id_task = ctx_server.queue_tasks.get_new_id(); + + ctx_server.queue_results.add_waiting_task_id(id_task); + ctx_server.request_completion(id_task, -1, { {"prompt", elem}, { "n_predict", 0} }, false, true); + + // get the result + server_task_result result = ctx_server.queue_results.recv(id_task); + ctx_server.queue_results.remove_waiting_task_id(id_task); + + json embedding = json{ + {"embedding", json_value(result.data, "embedding", json::array())}, + {"index", i++}, + {"object", "embedding"} + }; + + data.push_back(embedding); + } + + json result = format_embeddings_response_oaicompat(body, data); + + return res.set_content(result.dump(), "application/json; charset=utf-8"); + } + } else { + prompt = ""; + } + + // create and queue the task + const int id_task = ctx_server.queue_tasks.get_new_id(); + + ctx_server.queue_results.add_waiting_task_id(id_task); + ctx_server.request_completion(id_task, -1, { {"prompt", prompt}, { "n_predict", 0}}, false, true); + + // get the result + server_task_result result = ctx_server.queue_results.recv(id_task); + ctx_server.queue_results.remove_waiting_task_id(id_task); + + json data = json::array({json{ + {"embedding", json_value(result.data, "embedding", json::array())}, + {"index", 0}, + {"object", "embedding"} + }} + ); + + json root = format_embeddings_response_oaicompat(body, data); + + return res.set_content(root.dump(), "application/json; charset=utf-8"); + }); if (sparams.n_threads_http < 1) { // +2 threads for monitoring endpoints @@ -3507,34 +3300,33 @@ int main(int argc, char **argv) svr.new_task_queue = [&sparams] { return new httplib::ThreadPool(sparams.n_threads_http); }; LOG_INFO("HTTP server listening", log_data); + // run the HTTP server in a thread - see comment below - std::thread t([&]() - { - if (!svr.listen_after_bind()) - { - state.store(SERVER_STATE_ERROR); - return 1; - } + std::thread t([&]() { + if (!svr.listen_after_bind()) { + state.store(SERVER_STATE_ERROR); + return 1; + } - return 0; - }); + return 0; + }); - llama.queue_tasks.on_new_task(std::bind( - &llama_server_context::process_single_task, &llama, std::placeholders::_1)); - llama.queue_tasks.on_finish_multitask(std::bind( - &llama_server_context::on_finish_multitask, &llama, std::placeholders::_1)); - llama.queue_tasks.on_run_slots(std::bind( - &llama_server_context::update_slots, &llama)); - llama.queue_results.on_multitask_update(std::bind( - &llama_server_queue::update_multitask, - &llama.queue_tasks, + ctx_server.queue_tasks.on_new_task(std::bind( + &server_context::process_single_task, &ctx_server, std::placeholders::_1)); + ctx_server.queue_tasks.on_finish_multitask(std::bind( + &server_context::on_finish_multitask, &ctx_server, std::placeholders::_1)); + ctx_server.queue_tasks.on_run_slots(std::bind( + &server_context::update_slots, &ctx_server)); + ctx_server.queue_results.on_multitask_update(std::bind( + &server_queue::update_multitask, + &ctx_server.queue_tasks, std::placeholders::_1, std::placeholders::_2, std::placeholders::_3 )); shutdown_handler = [&](int) { - llama.queue_tasks.terminate(); + ctx_server.queue_tasks.terminate(); }; #if defined (__unix__) || (defined (__APPLE__) && defined (__MACH__)) @@ -3549,10 +3341,13 @@ int main(int argc, char **argv) }; SetConsoleCtrlHandler(reinterpret_cast(console_ctrl_handler), true); #endif - llama.queue_tasks.start_loop(); + + ctx_server.queue_tasks.start_loop(); + svr.stop(); t.join(); llama_backend_free(); + return 0; } diff --git a/examples/server/tests/features/embeddings.feature b/examples/server/tests/features/embeddings.feature new file mode 100644 index 000000000..b47661e94 --- /dev/null +++ b/examples/server/tests/features/embeddings.feature @@ -0,0 +1,94 @@ +@llama.cpp +@embeddings +Feature: llama.cpp server + + Background: Server startup + Given a server listening on localhost:8080 + And a model file bert-bge-small/ggml-model-f16.gguf from HF repo ggml-org/models + And a model alias bert-bge-small + And 42 as server seed + And 2 slots + And 1024 as batch size + And 2048 KV cache size + And embeddings extraction + Then the server is starting + Then the server is healthy + + Scenario: Embedding + When embeddings are computed for: + """ + What is the capital of Bulgaria ? + """ + Then embeddings are generated + + Scenario: OAI Embeddings compatibility + Given a model bert-bge-small + When an OAI compatible embeddings computation request for: + """ + What is the capital of Spain ? + """ + Then embeddings are generated + + Scenario: OAI Embeddings compatibility with multiple inputs + Given a model bert-bge-small + Given a prompt: + """ + In which country Paris is located ? + """ + And a prompt: + """ + Is Madrid the capital of Spain ? + """ + When an OAI compatible embeddings computation request for multiple inputs + Then embeddings are generated + + Scenario: Multi users embeddings + Given a prompt: + """ + Write a very long story about AI. + """ + And a prompt: + """ + Write another very long music lyrics. + """ + And a prompt: + """ + Write a very long poem. + """ + And a prompt: + """ + Write a very long joke. + """ + Given concurrent embedding requests + Then the server is busy + Then the server is idle + Then all embeddings are generated + + Scenario: Multi users OAI compatibility embeddings + Given a prompt: + """ + In which country Paris is located ? + """ + And a prompt: + """ + Is Madrid the capital of Spain ? + """ + And a prompt: + """ + What is the biggest US city ? + """ + And a prompt: + """ + What is the capital of Bulgaria ? + """ + And a model bert-bge-small + Given concurrent OAI embedding requests + Then the server is busy + Then the server is idle + Then all embeddings are generated + + Scenario: All embeddings should be the same + Given 10 fixed prompts + And a model bert-bge-small + Given concurrent OAI embedding requests + Then all embeddings are the same diff --git a/examples/server/tests/features/parallel.feature b/examples/server/tests/features/parallel.feature index 86cdf7282..066698c8e 100644 --- a/examples/server/tests/features/parallel.feature +++ b/examples/server/tests/features/parallel.feature @@ -9,7 +9,6 @@ Feature: Parallel And 512 as batch size And 64 KV cache size And 2 slots - And embeddings extraction And continuous batching Then the server is starting Then the server is healthy @@ -99,48 +98,3 @@ Feature: Parallel Then the server is busy Then the server is idle Then all prompts are predicted - - Scenario: Multi users embeddings - Given a prompt: - """ - Write a very long story about AI. - """ - And a prompt: - """ - Write another very long music lyrics. - """ - And a prompt: - """ - Write a very long poem. - """ - And a prompt: - """ - Write a very long joke. - """ - Given concurrent embedding requests - Then the server is busy - Then the server is idle - Then all embeddings are generated - - Scenario: Multi users OAI compatibility embeddings - Given a prompt: - """ - In which country Paris is located ? - """ - And a prompt: - """ - Is Madrid the capital of Spain ? - """ - And a prompt: - """ - What is the biggest US city ? - """ - And a prompt: - """ - What is the capital of Bulgaria ? - """ - And a model tinyllama-2 - Given concurrent OAI embedding requests - Then the server is busy - Then the server is idle - Then all embeddings are generated diff --git a/examples/server/tests/features/server.feature b/examples/server/tests/features/server.feature index 7c977bcce..f3b758c79 100644 --- a/examples/server/tests/features/server.feature +++ b/examples/server/tests/features/server.feature @@ -49,34 +49,6 @@ Feature: llama.cpp server | llama-2 | Book | What is the best book | 8 | (Mom\|what)+ | 8 | disabled | | codellama70b | You are a coding assistant. | Write the fibonacci function in c++. | 64 | (thanks\|happy\|bird)+ | 32 | enabled | - Scenario: Embedding - When embeddings are computed for: - """ - What is the capital of Bulgaria ? - """ - Then embeddings are generated - - Scenario: OAI Embeddings compatibility - Given a model tinyllama-2 - When an OAI compatible embeddings computation request for: - """ - What is the capital of Spain ? - """ - Then embeddings are generated - - Scenario: OAI Embeddings compatibility with multiple inputs - Given a model tinyllama-2 - Given a prompt: - """ - In which country Paris is located ? - """ - And a prompt: - """ - Is Madrid the capital of Spain ? - """ - When an OAI compatible embeddings computation request for multiple inputs - Then embeddings are generated - Scenario: Tokenize / Detokenize When tokenizing: """ diff --git a/examples/server/tests/features/steps/steps.py b/examples/server/tests/features/steps/steps.py index 319527802..a0b2ffdfe 100644 --- a/examples/server/tests/features/steps/steps.py +++ b/examples/server/tests/features/steps/steps.py @@ -10,6 +10,7 @@ from contextlib import closing from re import RegexFlag import aiohttp +import numpy as np import openai from behave import step from behave.api.async_step import async_run_until_complete @@ -24,6 +25,9 @@ def step_server_config(context, server_fqdn, server_port): if 'PORT' in os.environ: context.server_port = int(os.environ['PORT']) print(f"$PORT set, overriding server port with to {context.server_port}") + if 'FQDN' in os.environ: + context.server_fqdn = os.environ['FQDN'] + print(f"$FQDN set, overriding server fqdn with to {context.server_fqdn}") context.base_url = f'http://{context.server_fqdn}:{context.server_port}' @@ -34,6 +38,7 @@ def step_server_config(context, server_fqdn, server_port): context.n_ga_w = None context.n_gpu_layer = None context.n_predict = None + context.n_prompts = 0 context.n_server_predict = None context.n_slots = None context.prompt_prefix = None @@ -202,6 +207,7 @@ def step_n_tokens_predicted(context, predicted_n): @step(u'a user prompt {user_prompt}') def step_user_prompt(context, user_prompt): context.prompts.append(user_prompt) + context.n_prompts = len(context.prompts) @step(u'a system prompt {system_prompt}') @@ -290,6 +296,12 @@ def step_prompt_passkey(context): context.prompt_passkey = context.text +@step(u'{n_prompts:d} fixed prompts') +def step_fixed_prompts(context, n_prompts): + context.prompts.extend([str(0)*(context.n_batch if context.n_batch is not None else 512) for i in range(n_prompts)]) + context.n_prompts = n_prompts + + @step(u'a "{passkey}" passkey challenge prompt with the passkey inserted every {i_pos:d} junk') def step_prompt_passkey(context, passkey, i_pos): prompt = "" @@ -301,6 +313,7 @@ def step_prompt_passkey(context, passkey, i_pos): passkey_highlight = "\x1b[33m" + passkey + "\x1b[0m" print(f"Passkey challenge:\n```{prompt.replace(passkey, passkey_highlight)}```\n") context.prompts.append(context.prompt_prefix + prompt + context.prompt_suffix) + context.n_prompts = len(context.prompts) @step(u'an OAI compatible chat completions request with {api_error} api error') @@ -341,11 +354,13 @@ async def step_oai_chat_completions(context, api_error): @step(u'a prompt') def step_a_prompt(context): context.prompts.append(context.text) + context.n_prompts = len(context.prompts) @step(u'a prompt {prompt}') def step_a_prompt_prompt(context, prompt): context.prompts.append(prompt) + context.n_prompts = len(context.prompts) @step(u'concurrent completion requests') @@ -430,25 +445,47 @@ async def all_prompts_are_predicted(context, expected_predicted_n=None): @step(u'embeddings are computed for') @async_run_until_complete async def step_compute_embedding(context): + context.n_prompts = 1 context.embeddings = await request_embedding(context.text, base_url=context.base_url) +@step(u'all embeddings are the same') +@async_run_until_complete +async def step_all_embeddings_are_the_same(context): + n_embedding_requests = await gather_tasks_results(context) + assert n_embedding_requests > 0 + embeddings = [] + for i in range(n_embedding_requests): + embedding = context.tasks_result.pop().pop() + embeddings.append(embedding) + assert_embeddings(embedding) + n = len(embeddings) + for i in range(n-1): + for j in range(i+1, n): + embedding1 = np.array(embeddings[i]) + embedding2 = np.array(embeddings[j]) + if context.debug: + print(f"embedding1: {embedding1[-8:]}\n") + print(f"embedding2: {embedding2[-8:]}\n") + similarity = np.dot(embedding1, embedding2) / (np.linalg.norm(embedding1) * np.linalg.norm(embedding2)) + msg = f"Similarity between {i} and {j}: {similarity:.10f}" + if context.debug: + print(f"{msg}\n") + assert np.isclose(similarity, 1.0, rtol=1e-05, atol=1e-08, equal_nan=False), msg + @step(u'embeddings are generated') def step_assert_embeddings(context): - if len(context.prompts) == 0: - assert_embeddings(context.embeddings) - else: - assert len(context.embeddings) == len(context.prompts), (f"unexpected response:\n" - f"context.prompts={context.prompts}\n" - f"context.embeddings={context.embeddings}") - for embedding in context.embeddings: - context.prompts.pop() - assert_embeddings(embedding) + assert context.n_prompts == len(context.embeddings), (f"unexpected response:\n" + f"context.n_prompts={context.n_prompts}\n" + f"context.embeddings={context.embeddings}") + for embedding in context.embeddings: + assert_embeddings(embedding) @step(u'an OAI compatible embeddings computation request for') @async_run_until_complete async def step_oai_compute_embeddings(context): + context.n_prompts = 1 context.embeddings = await request_oai_embeddings(context.text, base_url=context.base_url, user_api_key=context.user_api_key, @@ -462,6 +499,7 @@ async def step_oai_compute_embeddings_multiple_inputs(context): base_url=context.base_url, user_api_key=context.user_api_key, model=context.model) + context.prompts.clear() @step(u'concurrent embedding requests') @@ -488,9 +526,9 @@ async def step_concurrent_oai_embedding_requests(context): @async_run_until_complete() async def all_embeddings_are_generated(context): n_embedding_requests = await gather_tasks_results(context) - assert n_embedding_requests > 0 + assert n_embedding_requests == context.n_prompts for i in range(n_embedding_requests): - assert_embeddings(context.tasks_result.pop()) + assert_embeddings(context.tasks_result.pop().pop()) @step(u'tokenizing') @@ -588,11 +626,11 @@ def step_supported_models(context, i_model, param, preposition, param_value): async def concurrent_requests(context, f_completion, *args, **kwargs): - n_prompts = len(context.prompts) + context.n_prompts = len(context.prompts) if context.debug: - print(f"starting {n_prompts} concurrent completion requests...") - assert n_prompts > 0 - for prompt_no in range(n_prompts): + print(f"starting {context.n_prompts} concurrent completion requests...") + assert context.n_prompts > 0 + for prompt_no in range(context.n_prompts): shifted_args = [context.prompts.pop(), *args] context.concurrent_tasks.append(asyncio.create_task(f_completion(*shifted_args, **kwargs))) await asyncio.sleep(0.1) @@ -765,7 +803,7 @@ async def request_embedding(content, base_url=None): }) as response: assert response.status == 200 response_json = await response.json() - return response_json['embedding'] + return [response_json['embedding']] async def request_oai_embeddings(input, @@ -775,6 +813,7 @@ async def request_oai_embeddings(input, user_api_key = user_api_key if user_api_key is not None else 'nope' if async_client: origin = 'llama.cpp' + headers=[] if user_api_key is not None: headers = {'Authorization': f'Bearer {user_api_key}', 'Origin': origin} async with aiohttp.ClientSession() as session: @@ -783,14 +822,21 @@ async def request_oai_embeddings(input, "input": input, "model": model, }, - headers=headers) as response: + headers=headers, + timeout=3600) as response: assert response.status == 200, f"received status code not expected: {response.status}" assert response.headers['Access-Control-Allow-Origin'] == origin assert response.headers['Content-Type'] == "application/json; charset=utf-8" response_json = await response.json() assert response_json['model'] == model, f"invalid model received: {response_json['model']}" assert response_json['object'] == 'list' - return response_json['data'] + if isinstance(input, collections.abc.Sequence): + embeddings = [] + for an_oai_embeddings in response_json['data']: + embeddings.append(an_oai_embeddings['embedding']) + else: + embeddings = [response_json['data']['embedding']] + return embeddings else: openai.api_key = user_api_key openai.api_base = f'{base_url}/v1' @@ -804,7 +850,7 @@ async def request_oai_embeddings(input, for an_oai_embeddings in oai_embeddings.data: embeddings.append(an_oai_embeddings.embedding) else: - embeddings = oai_embeddings.data.embedding + embeddings = [oai_embeddings.data.embedding] return embeddings @@ -899,6 +945,8 @@ def assert_embeddings(embeddings): assert len(embeddings) > 0 embeddings_computed = False for emb in embeddings: + if not isinstance(emb, float): + assert False, f"Bad embeddings: {embeddings}" if emb != 0: embeddings_computed = True assert embeddings_computed, f"Embeddings: {embeddings}" diff --git a/examples/server/tests/requirements.txt b/examples/server/tests/requirements.txt index 5d4210164..2e4f42ad2 100644 --- a/examples/server/tests/requirements.txt +++ b/examples/server/tests/requirements.txt @@ -1,5 +1,6 @@ aiohttp~=3.9.3 behave~=1.2.6 huggingface_hub~=0.20.3 +numpy~=1.24.4 openai~=0.25.0 prometheus-client~=0.20.0 diff --git a/examples/server/utils.hpp b/examples/server/utils.hpp index b6e49d8b9..df0a27782 100644 --- a/examples/server/utils.hpp +++ b/examples/server/utils.hpp @@ -1,15 +1,16 @@ #pragma once -#include -#include -#include -#include -#include -#include +#include "llama.h" +#include "common.h" #include "json.hpp" -#include "../llava/clip.h" +#include +#include +#include +#include + +#define DEFAULT_OAICOMPAT_MODEL "gpt-3.5-turbo-0613" using json = nlohmann::json; @@ -37,83 +38,35 @@ extern bool server_log_json; #define LOG_WARNING(MSG, ...) server_log("WARN", __func__, __LINE__, MSG, __VA_ARGS__) #define LOG_INFO( MSG, ...) server_log("INFO", __func__, __LINE__, MSG, __VA_ARGS__) -enum server_state { - SERVER_STATE_LOADING_MODEL, // Server is starting up, model not fully loaded yet - SERVER_STATE_READY, // Server is ready and model is loaded - SERVER_STATE_ERROR // An error occurred, load_model failed -}; - -enum task_type { - TASK_TYPE_COMPLETION, - TASK_TYPE_CANCEL, - TASK_TYPE_NEXT_RESPONSE, - TASK_TYPE_METRICS -}; - -struct task_server { - int id = -1; // to be filled by llama_server_queue - int target_id; - task_type type; - json data; - bool infill_mode = false; - bool embedding_mode = false; - int multitask_id = -1; -}; - -struct task_result { - int id; - int multitask_id = -1; - bool stop; - bool error; - json result_json; -}; - -struct task_multi { - int id; - std::set subtasks_remaining{}; - std::vector results{}; -}; - -// completion token output with probabilities -struct completion_token_output { - struct token_prob - { - llama_token tok; - float prob; - }; - - std::vector probs; - llama_token tok; - std::string text_to_send; -}; - -struct token_translator { - llama_context * ctx; - std::string operator()(llama_token tok) const { return llama_token_to_piece(ctx, tok); } - std::string operator()(const completion_token_output &cto) const { return (*this)(cto.tok); } -}; +template +static T json_value(const json &body, const std::string &key, const T &default_value) { + // Fallback null to default value + return body.contains(key) && !body.at(key).is_null() + ? body.value(key, default_value) + : default_value; +} static inline void server_log(const char *level, const char *function, int line, const char *message, const nlohmann::ordered_json &extra) { std::stringstream ss_tid; ss_tid << std::this_thread::get_id(); json log = nlohmann::ordered_json{ - {"tid", ss_tid.str()}, + {"tid", ss_tid.str()}, {"timestamp", time(nullptr)}, }; if (server_log_json) { - log.merge_patch( - { - {"level", level}, - {"function", function}, - {"line", line}, - {"msg", message}, - }); + log.merge_patch( { + {"level", level}, + {"function", function}, + {"line", line}, + {"msg", message}, + }); + if (!extra.empty()) { log.merge_patch(extra); } - std::cout << log.dump(-1, ' ', false, json::error_handler_t::replace) << "\n" << std::flush; + printf("%s\n", log.dump(-1, ' ', false, json::error_handler_t::replace).c_str()); } else { char buf[1024]; snprintf(buf, 1024, "%4s [%24s] %s", level, function, message); @@ -136,22 +89,13 @@ static inline void server_log(const char *level, const char *function, int line, } // -// server utils +// chat template utils // -template -static T json_value(const json &body, const std::string &key, const T &default_value) { - // Fallback null to default value - return body.contains(key) && !body.at(key).is_null() - ? body.value(key, default_value) - : default_value; -} - // Check if the template supplied via "--chat-template" is supported or not. Returns true if it's valid inline bool verify_custom_template(const std::string & tmpl) { llama_chat_message chat[] = {{"user", "test"}}; - std::vector buf(1); - int res = llama_chat_apply_template(nullptr, tmpl.c_str(), chat, 1, true, buf.data(), buf.size()); + int res = llama_chat_apply_template(nullptr, tmpl.c_str(), chat, 1, true, nullptr, 0); return res >= 0; } @@ -163,7 +107,7 @@ inline std::string format_chat(const struct llama_model * model, const std::stri std::vector chat(messages.size()); for (size_t i = 0; i < messages.size(); ++i) { - auto &curr_msg = messages[i]; + const auto & curr_msg = messages[i]; str[i*2 + 0] = json_value(curr_msg, "role", std::string("")); str[i*2 + 1] = json_value(curr_msg, "content", std::string("")); alloc_size += str[i*2 + 1].length(); @@ -183,261 +127,13 @@ inline std::string format_chat(const struct llama_model * model, const std::stri res = llama_chat_apply_template(model, ptr_tmpl, chat.data(), chat.size(), true, buf.data(), buf.size()); } - std::string formatted_chat(buf.data(), res); + const std::string formatted_chat(buf.data(), res); + LOG_VERBOSE("formatted_chat", {{"text", formatted_chat.c_str()}}); return formatted_chat; } -// -// work queue utils -// - -struct llama_server_queue { - int id = 0; - std::mutex mutex_tasks; - bool running; - // queues - std::vector queue_tasks; - std::vector queue_tasks_deferred; - std::vector queue_multitasks; - std::condition_variable condition_tasks; - // callback functions - std::function callback_new_task; - std::function callback_finish_multitask; - std::function callback_run_slots; - - // Add a new task to the end of the queue - int post(task_server task) { - std::unique_lock lock(mutex_tasks); - if (task.id == -1) { - task.id = id++; - LOG_VERBOSE("new task id", {{"new_id", task.id}}); - } - queue_tasks.push_back(std::move(task)); - condition_tasks.notify_one(); - return task.id; - } - - // Add a new task, but defer until one slot is available - void defer(task_server task) { - std::unique_lock lock(mutex_tasks); - queue_tasks_deferred.push_back(std::move(task)); - } - - // Get the next id for creating anew task - int get_new_id() { - std::unique_lock lock(mutex_tasks); - int new_id = id++; - LOG_VERBOSE("new task id", {{"new_id", new_id}}); - return new_id; - } - - // Register function to process a new task - void on_new_task(std::function callback) { - callback_new_task = callback; - } - - // Register function to process a multitask when it is finished - void on_finish_multitask(std::function callback) { - callback_finish_multitask = callback; - } - - // Register the function to be called when all slots data is ready to be processed - void on_run_slots(std::function callback) { - callback_run_slots = callback; - } - - // Call when the state of one slot is changed - void notify_slot_changed() { - // move deferred tasks back to main loop - std::unique_lock lock(mutex_tasks); - for (auto & task : queue_tasks_deferred) { - queue_tasks.push_back(std::move(task)); - } - queue_tasks_deferred.clear(); - } - - // end the start_loop routine - void terminate() { - { - std::unique_lock lock(mutex_tasks); - running = false; - } - condition_tasks.notify_all(); - } - - /** - * Main loop consists of these steps: - * - Wait until a new task arrives - * - Process the task (i.e. maybe copy data into slot) - * - Check if multitask is finished - * - Run all slots - */ - void start_loop() { - running = true; - while (true) { - LOG_VERBOSE("new task may arrive", {}); - { - while (true) - { - std::unique_lock lock(mutex_tasks); - if (queue_tasks.empty()) { - lock.unlock(); - break; - } - task_server task = queue_tasks.front(); - queue_tasks.erase(queue_tasks.begin()); - lock.unlock(); - LOG_VERBOSE("callback_new_task", {{"task_id", task.id}}); - callback_new_task(task); - } - LOG_VERBOSE("update_multitasks", {}); - // check if we have any finished multitasks - auto queue_iterator = queue_multitasks.begin(); - while (queue_iterator != queue_multitasks.end()) - { - if (queue_iterator->subtasks_remaining.empty()) - { - // all subtasks done == multitask is done - task_multi current_multitask = *queue_iterator; - callback_finish_multitask(current_multitask); - // remove this multitask - queue_iterator = queue_multitasks.erase(queue_iterator); - } - else - { - ++queue_iterator; - } - } - // all tasks in the current loop is processed, slots data is now ready - LOG_VERBOSE("callback_run_slots", {}); - callback_run_slots(); - } - LOG_VERBOSE("wait for new task", {}); - // wait for new task - { - std::unique_lock lock(mutex_tasks); - if (queue_tasks.empty()) { - if (!running) { - LOG_VERBOSE("ending start_loop", {}); - return; - } - condition_tasks.wait(lock, [&]{ - return (!queue_tasks.empty() || !running); - }); - } - } - } - } - - // - // functions to manage multitasks - // - - // add a multitask by specifying the id of all subtask (subtask is a task_server) - void add_multitask(int multitask_id, std::vector& sub_ids) - { - std::lock_guard lock(mutex_tasks); - task_multi multi; - multi.id = multitask_id; - std::copy(sub_ids.begin(), sub_ids.end(), std::inserter(multi.subtasks_remaining, multi.subtasks_remaining.end())); - queue_multitasks.push_back(multi); - } - - // updatethe remaining subtasks, while appending results to multitask - void update_multitask(int multitask_id, int subtask_id, task_result& result) - { - std::lock_guard lock(mutex_tasks); - for (auto& multitask : queue_multitasks) - { - if (multitask.id == multitask_id) - { - multitask.subtasks_remaining.erase(subtask_id); - multitask.results.push_back(result); - } - } - } -}; - -struct llama_server_response { - typedef std::function callback_multitask_t; - callback_multitask_t callback_update_multitask; - // for keeping track of all tasks waiting for the result - std::set waiting_task_ids; - // the main result queue - std::vector queue_results; - std::mutex mutex_results; - std::condition_variable condition_results; - - // add the task_id to the list of tasks waiting for response - void add_waiting_task_id(int task_id) { - LOG_VERBOSE("waiting for task id", {{"task_id", task_id}}); - std::unique_lock lock(mutex_results); - waiting_task_ids.insert(task_id); - } - - // when the request is finished, we can remove task associated with it - void remove_waiting_task_id(int task_id) { - LOG_VERBOSE("remove waiting for task id", {{"task_id", task_id}}); - std::unique_lock lock(mutex_results); - waiting_task_ids.erase(task_id); - } - - // This function blocks the thread until there is a response for this task_id - task_result recv(int task_id) { - while (true) - { - std::unique_lock lock(mutex_results); - condition_results.wait(lock, [&]{ - return !queue_results.empty(); - }); - - for (int i = 0; i < (int) queue_results.size(); i++) - { - if (queue_results[i].id == task_id) - { - assert(queue_results[i].multitask_id == -1); - task_result res = queue_results[i]; - queue_results.erase(queue_results.begin() + i); - return res; - } - } - } - - // should never reach here - } - - // Register the function to update multitask - void on_multitask_update(callback_multitask_t callback) { - callback_update_multitask = callback; - } - - // Send a new result to a waiting task_id - void send(task_result result) { - std::unique_lock lock(mutex_results); - LOG_VERBOSE("send new result", {{"task_id", result.id}}); - for (auto& task_id : waiting_task_ids) { - // LOG_TEE("waiting task id %i \n", task_id); - // for now, tasks that have associated parent multitasks just get erased once multitask picks up the result - if (result.multitask_id == task_id) - { - LOG_VERBOSE("callback_update_multitask", {{"task_id", task_id}}); - callback_update_multitask(task_id, result.id, result); - continue; - } - - if (result.id == task_id) - { - LOG_VERBOSE("queue_results.push_back", {{"task_id", task_id}}); - queue_results.push_back(result); - condition_results.notify_all(); - return; - } - } - } -}; - // // base64 utils (TODO: move to common in the future) // @@ -447,13 +143,11 @@ static const std::string base64_chars = "abcdefghijklmnopqrstuvwxyz" "0123456789+/"; -static inline bool is_base64(uint8_t c) -{ +static inline bool is_base64(uint8_t c) { return (isalnum(c) || (c == '+') || (c == '/')); } -static inline std::vector base64_decode(const std::string & encoded_string) -{ +static inline std::vector base64_decode(const std::string & encoded_string) { int i = 0; int j = 0; int in_ = 0; @@ -465,13 +159,10 @@ static inline std::vector base64_decode(const std::string & encoded_str std::vector ret; - while (in_len-- && (encoded_string[in_] != '=') && is_base64(encoded_string[in_])) - { + while (in_len-- && (encoded_string[in_] != '=') && is_base64(encoded_string[in_])) { char_array_4[i++] = encoded_string[in_]; in_++; - if (i == 4) - { - for (i = 0; i <4; i++) - { + if (i == 4) { + for (i = 0; i < 4; i++) { char_array_4[i] = base64_chars.find(char_array_4[i]); } @@ -479,23 +170,20 @@ static inline std::vector base64_decode(const std::string & encoded_str char_array_3[1] = ((char_array_4[1] & 0xf) << 4) + ((char_array_4[2] & 0x3c) >> 2); char_array_3[2] = ((char_array_4[2] & 0x3) << 6) + char_array_4[3]; - for (i = 0; (i < 3); i++) - { + for (i = 0; (i < 3); i++) { ret.push_back(char_array_3[i]); } + i = 0; } } - if (i) - { - for (j = i; j <4; j++) - { + if (i) { + for (j = i; j < 4; j++) { char_array_4[j] = 0; } - for (j = 0; j <4; j++) - { + for (j = 0; j < 4; j++) { char_array_4[j] = base64_chars.find(char_array_4[j]); } @@ -503,8 +191,7 @@ static inline std::vector base64_decode(const std::string & encoded_str char_array_3[1] = ((char_array_4[1] & 0xf) << 4) + ((char_array_4[2] & 0x3c) >> 2); char_array_3[2] = ((char_array_4[2] & 0x3) << 6) + char_array_4[3]; - for (j = 0; (j < i - 1); j++) - { + for (j = 0; j < i - 1; j++) { ret.push_back(char_array_3[j]); } } @@ -516,8 +203,7 @@ static inline std::vector base64_decode(const std::string & encoded_str // random string / id // -static std::string random_string() -{ +static std::string random_string() { static const std::string str("0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz"); std::random_device rd; @@ -532,10 +218,10 @@ static std::string random_string() return result; } -static std::string gen_chatcmplid() -{ +static std::string gen_chatcmplid() { std::stringstream chatcmplid; chatcmplid << "chatcmpl-" << random_string(); + return chatcmplid.str(); } @@ -543,91 +229,316 @@ static std::string gen_chatcmplid() // other common utils // -static size_t common_part(const std::vector &a, const std::vector &b) -{ +static size_t common_part(const std::vector & a, const std::vector & b) { size_t i; - for (i = 0; i < a.size() && i < b.size() && a[i] == b[i]; i++) - { - } + for (i = 0; i < a.size() && i < b.size() && a[i] == b[i]; i++) {} + return i; } -static bool ends_with(const std::string &str, const std::string &suffix) -{ - return str.size() >= suffix.size() && - 0 == str.compare(str.size() - suffix.size(), suffix.size(), suffix); +static bool ends_with(const std::string & str, const std::string & suffix) { + return str.size() >= suffix.size() && 0 == str.compare(str.size() - suffix.size(), suffix.size(), suffix); } -static size_t find_partial_stop_string(const std::string &stop, - const std::string &text) -{ - if (!text.empty() && !stop.empty()) - { +static size_t find_partial_stop_string(const std::string &stop, const std::string &text) { + if (!text.empty() && !stop.empty()) { const char text_last_char = text.back(); - for (int64_t char_index = stop.size() - 1; char_index >= 0; char_index--) - { - if (stop[char_index] == text_last_char) - { + for (int64_t char_index = stop.size() - 1; char_index >= 0; char_index--) { + if (stop[char_index] == text_last_char) { const std::string current_partial = stop.substr(0, char_index + 1); - if (ends_with(text, current_partial)) - { + if (ends_with(text, current_partial)) { return text.size() - char_index - 1; } } } } + return std::string::npos; } // TODO: reuse llama_detokenize template -static std::string tokens_to_str(llama_context *ctx, Iter begin, Iter end) -{ +static std::string tokens_to_str(llama_context * ctx, Iter begin, Iter end) { std::string ret; - for (; begin != end; ++begin) - { + for (; begin != end; ++begin) { ret += llama_token_to_piece(ctx, *begin); } + return ret; } // format incomplete utf-8 multibyte character for output -static std::string tokens_to_output_formatted_string(const llama_context *ctx, const llama_token token) -{ +static std::string tokens_to_output_formatted_string(const llama_context * ctx, const llama_token token) { std::string out = token == -1 ? "" : llama_token_to_piece(ctx, token); + // if the size is 1 and first bit is 1, meaning it's a partial character // (size > 1 meaning it's already a known token) - if (out.size() == 1 && (out[0] & 0x80) == 0x80) - { + if (out.size() == 1 && (out[0] & 0x80) == 0x80) { std::stringstream ss; ss << std::hex << (out[0] & 0xff); std::string res(ss.str()); out = "byte: \\x" + res; } + return out; } +struct completion_token_output { + llama_token tok; + std::string text_to_send; + + struct token_prob { + llama_token tok; + float prob; + }; + + std::vector probs; +}; + // convert a vector of completion_token_output to json -static json probs_vector_to_json(const llama_context *ctx, const std::vector &probs) -{ +static json probs_vector_to_json(const llama_context * ctx, const std::vector & probs) { json out = json::array(); - for (const auto &prob : probs) - { + + for (const auto & prob : probs) { json probs_for_token = json::array(); - for (const auto &p : prob.probs) - { - std::string tok_str = tokens_to_output_formatted_string(ctx, p.tok); - probs_for_token.push_back(json - { + + for (const auto & p : prob.probs) { + const std::string tok_str = tokens_to_output_formatted_string(ctx, p.tok); + probs_for_token.push_back(json { {"tok_str", tok_str}, {"prob", p.prob}, }); } - std::string tok_str = tokens_to_output_formatted_string(ctx, prob.tok); - out.push_back(json{ + + const std::string tok_str = tokens_to_output_formatted_string(ctx, prob.tok); + out.push_back(json { {"content", tok_str}, {"probs", probs_for_token}, }); } + return out; } + +// +// OAI utils +// + +static json oaicompat_completion_params_parse( + const struct llama_model * model, + const json & body, /* openai api json semantics */ + const std::string & chat_template) { + json llama_params; + + llama_params["__oaicompat"] = true; + + // Map OpenAI parameters to llama.cpp parameters + // + // For parameters that are defined by the OpenAI documentation (e.g. + // temperature), we explicitly specify OpenAI's intended default; we + // need to do that because sometimes OpenAI disagrees with llama.cpp + // + // https://platform.openai.com/docs/api-reference/chat/create + llama_sampling_params default_sparams; + llama_params["model"] = json_value(body, "model", std::string("unknown")); + llama_params["prompt"] = format_chat(model, chat_template, body["messages"]); + llama_params["cache_prompt"] = json_value(body, "cache_prompt", false); + llama_params["temperature"] = json_value(body, "temperature", 0.0); + llama_params["top_k"] = json_value(body, "top_k", default_sparams.top_k); + llama_params["top_p"] = json_value(body, "top_p", 1.0); + llama_params["n_predict"] = json_value(body, "max_tokens", -1); + llama_params["logit_bias"] = json_value(body, "logit_bias", json::object()); + llama_params["frequency_penalty"] = json_value(body, "frequency_penalty", 0.0); + llama_params["presence_penalty"] = json_value(body, "presence_penalty", 0.0); + llama_params["seed"] = json_value(body, "seed", LLAMA_DEFAULT_SEED); + llama_params["stream"] = json_value(body, "stream", false); + llama_params["mirostat"] = json_value(body, "mirostat", default_sparams.mirostat); + llama_params["mirostat_tau"] = json_value(body, "mirostat_tau", default_sparams.mirostat_tau); + llama_params["mirostat_eta"] = json_value(body, "mirostat_eta", default_sparams.mirostat_eta); + llama_params["penalize_nl"] = json_value(body, "penalize_nl", default_sparams.penalize_nl); + llama_params["typical_p"] = json_value(body, "typical_p", default_sparams.typical_p); + llama_params["repeat_last_n"] = json_value(body, "repeat_last_n", default_sparams.penalty_last_n); + llama_params["ignore_eos"] = json_value(body, "ignore_eos", false); + llama_params["tfs_z"] = json_value(body, "tfs_z", default_sparams.tfs_z); + + if (body.count("grammar") != 0) { + llama_params["grammar"] = json_value(body, "grammar", json::object()); + } + + // Handle 'stop' field + if (body.contains("stop") && body["stop"].is_string()) { + llama_params["stop"] = json::array({body["stop"].get()}); + } else { + llama_params["stop"] = json_value(body, "stop", json::array()); + } + + // Ensure there is ChatML-specific end sequence among stop words + llama_params["stop"].push_back("<|im_end|>"); + + return llama_params; +} + +static json format_final_response_oaicompat(const json & request, json result, bool streaming = false) { + bool stopped_word = result.count("stopped_word") != 0; + bool stopped_eos = json_value(result, "stopped_eos", false); + int num_tokens_predicted = json_value(result, "tokens_predicted", 0); + int num_prompt_tokens = json_value(result, "tokens_evaluated", 0); + std::string content = json_value(result, "content", std::string("")); + + std::string finish_reason = "length"; + if (stopped_word || stopped_eos) { + finish_reason = "stop"; + } + + json choices = + streaming ? json::array({json{{"finish_reason", finish_reason}, + {"index", 0}, + {"delta", json::object()}}}) + : json::array({json{{"finish_reason", finish_reason}, + {"index", 0}, + {"message", json{{"content", content}, + {"role", "assistant"}}}}}); + + std::time_t t = std::time(0); + + json res = json { + {"choices", choices}, + {"created", t}, + {"model", + json_value(request, "model", std::string(DEFAULT_OAICOMPAT_MODEL))}, + {"object", streaming ? "chat.completion.chunk" : "chat.completion"}, + {"usage", json { + {"completion_tokens", num_tokens_predicted}, + {"prompt_tokens", num_prompt_tokens}, + {"total_tokens", num_tokens_predicted + num_prompt_tokens} + }}, + {"id", gen_chatcmplid()} + }; + + if (server_verbose) { + res["__verbose"] = result; + } + + if (result.contains("completion_probabilities")) { + res["completion_probabilities"] = json_value(result, "completion_probabilities", json::array()); + } + + return res; +} + +// return value is vector as there is one case where we might need to generate two responses +static std::vector format_partial_response_oaicompat(json result) { + if (!result.contains("model") || !result.contains("oaicompat_token_ctr")) { + return std::vector({result}); + } + + bool first = json_value(result, "oaicompat_token_ctr", 0) == 0; + std::string modelname = json_value(result, "model", std::string(DEFAULT_OAICOMPAT_MODEL)); + + bool stopped_word = json_value(result, "stopped_word", false); + bool stopped_eos = json_value(result, "stopped_eos", false); + bool stopped_limit = json_value(result, "stopped_limit", false); + std::string content = json_value(result, "content", std::string("")); + + std::string finish_reason; + if (stopped_word || stopped_eos) { + finish_reason = "stop"; + } + if (stopped_limit) { + finish_reason = "length"; + } + + std::time_t t = std::time(0); + + json choices; + + if (!finish_reason.empty()) { + choices = json::array({json{{"finish_reason", finish_reason}, + {"index", 0}, + {"delta", json::object()}}}); + } else { + if (first) { + if (content.empty()) { + choices = json::array({json{{"finish_reason", nullptr}, + {"index", 0}, + {"delta", json{{"role", "assistant"}}}}}); + } else { + // We have to send this as two updates to conform to openai behavior + json initial_ret = json{{"choices", json::array({json{ + {"finish_reason", nullptr}, + {"index", 0}, + {"delta", json{ + {"role", "assistant"} + }}}})}, + {"created", t}, + {"id", gen_chatcmplid()}, + {"model", modelname}, + {"object", "chat.completion.chunk"}}; + + json second_ret = json{ + {"choices", json::array({json{{"finish_reason", nullptr}, + {"index", 0}, + {"delta", json{ + {"content", content}}} + }})}, + {"created", t}, + {"id", gen_chatcmplid()}, + {"model", modelname}, + {"object", "chat.completion.chunk"}}; + + return std::vector({initial_ret, second_ret}); + } + } else { + // Some idiosyncrasy in task processing logic makes several trailing calls + // with empty content, we ignore these at the calee site. + if (content.empty()) { + return std::vector({json::object()}); + } + + choices = json::array({json{ + {"finish_reason", nullptr}, + {"index", 0}, + {"delta", + json{ + {"content", content}, + }}, + }}); + } + } + + json ret = json { + {"choices", choices}, + {"created", t}, + {"id", gen_chatcmplid()}, + {"model", modelname}, + {"object", "chat.completion.chunk"} + }; + + return std::vector({ret}); +} + +static json format_embeddings_response_oaicompat(const json & request, const json & embeddings) { + json res = json { + {"model", json_value(request, "model", std::string(DEFAULT_OAICOMPAT_MODEL))}, + {"object", "list"}, + {"usage", json { + {"prompt_tokens", 0}, + {"total_tokens", 0} + }}, + {"data", embeddings} + }; + + return res; +} + +static json format_tokenizer_response(const std::vector & tokens) { + return json { + {"tokens", tokens} + }; +} + +static json format_detokenized_response(const std::string & content) { + return json { + {"content", content} + }; +} diff --git a/llama.cpp b/llama.cpp index b27aa2728..478099648 100644 --- a/llama.cpp +++ b/llama.cpp @@ -13541,18 +13541,22 @@ LLAMA_API int32_t llama_chat_apply_template( curr_tmpl = std::string(model_template.data(), model_template.size()); } } + // format the chat to string std::vector chat_vec; chat_vec.resize(n_msg); for (size_t i = 0; i < n_msg; i++) { chat_vec[i] = &chat[i]; } + std::string formatted_chat; int32_t res = llama_chat_apply_template_internal(curr_tmpl, chat_vec, formatted_chat, add_ass); if (res < 0) { return res; } - strncpy(buf, formatted_chat.c_str(), length); + if (buf && length > 0) { + strncpy(buf, formatted_chat.c_str(), length); + } return res; } From 55a2a900ff4a02fc33708ac7858d595d289a3f2a Mon Sep 17 00:00:00 2001 From: Minsoo Cheong <54794500+mscheong01@users.noreply.github.com> Date: Thu, 7 Mar 2024 19:42:39 +0900 Subject: [PATCH 803/811] server : add `/v1/completions` endpoint (#5914) * add-`/v1/completions`-endpoint * add legacy comment to `/completion` endpoint --- examples/server/server.cpp | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/examples/server/server.cpp b/examples/server/server.cpp index 3bdbde954..f255ad764 100644 --- a/examples/server/server.cpp +++ b/examples/server/server.cpp @@ -2923,7 +2923,7 @@ int main(int argc, char ** argv) { res.set_content(data.dump(), "application/json; charset=utf-8"); }); - svr.Post("/completion", [&ctx_server, &validate_api_key](const httplib::Request & req, httplib::Response & res) { + const auto completions = [&ctx_server, &validate_api_key](const httplib::Request & req, httplib::Response & res) { res.set_header("Access-Control-Allow-Origin", req.get_header_value("Origin")); if (!validate_api_key(req, res)) { return; @@ -3001,7 +3001,11 @@ int main(int argc, char ** argv) { res.set_chunked_content_provider("text/event-stream", chunked_content_provider, on_complete); } - }); + }; + + svr.Post("/completion", completions); // legacy + svr.Post("/completions", completions); + svr.Post("/v1/completions", completions); svr.Get("/v1/models", [¶ms, &model_meta](const httplib::Request & req, httplib::Response & res) { res.set_header("Access-Control-Allow-Origin", req.get_header_value("Origin")); From 89fb735fcfd21781a8194b211cf32824beb3f71f Mon Sep 17 00:00:00 2001 From: Neo Zhang Jianyu Date: Thu, 7 Mar 2024 19:14:49 +0800 Subject: [PATCH 804/811] Revert "[SYCL] fix error when set main gpu to non-zero (#5901)" (#5918) This reverts commit ceca1aef0738b57951cd12c603c3477e75312dec. --- ggml-sycl.cpp | 172 ++++++++++++++++++++------------------------------ ggml-sycl.h | 1 - llama.cpp | 16 ++--- 3 files changed, 73 insertions(+), 116 deletions(-) diff --git a/ggml-sycl.cpp b/ggml-sycl.cpp index 221d67b8d..ddd951dd6 100644 --- a/ggml-sycl.cpp +++ b/ggml-sycl.cpp @@ -3559,31 +3559,12 @@ class sycl_gpu_mgr { int work_group_size = 0; std::string gpus_list = ""; - /* - Use all GPU with same top max compute units - */ sycl_gpu_mgr() { detect_sycl_gpu_list_with_max_cu(); get_allow_gpus(); create_context_with_gpus(); } - /* - Use the assigned GPU as only one - */ - sycl_gpu_mgr(int main_gpu_id) { - sycl::device device = dpct::dev_mgr::instance().get_device(main_gpu_id); - dpct::device_info prop; - dpct::get_device_info(prop, device); - gpus.push_back(main_gpu_id); - devices.push_back(device); - work_group_size = prop.get_max_work_group_size(); - max_compute_units = prop.get_max_compute_units(); - - get_allow_gpus(); - create_context_with_gpus(); - } - void create_context_with_gpus() { sycl::context ctx = sycl::context(devices); assert(gpus.size() > 0); @@ -3599,7 +3580,7 @@ class sycl_gpu_mgr { gpus_list += std::to_string(gpus[i]); gpus_list += ","; } - if (gpus_list.length() > 1) { + if (gpus_list.length() > 2) { gpus_list.pop_back(); } } @@ -3648,8 +3629,8 @@ class sycl_gpu_mgr { if (gpus[i] == id) return i; } - printf("miss to get device index by id=%d\n", id); - GGML_ASSERT(false); + assert(false); + return -1; } int get_next_index(int id) { @@ -3658,7 +3639,8 @@ class sycl_gpu_mgr { if (gpus[i] == id) return i; } - GGML_ASSERT(false); + assert(false); + return -1; } }; @@ -3667,7 +3649,6 @@ static int g_device_count = -1; static int g_all_sycl_device_count = -1; static int g_main_device = -1; static int g_main_device_id = -1; -static bool g_ggml_backend_sycl_buffer_type_initialized = false; static std::array g_default_tensor_split = {}; @@ -13244,7 +13225,7 @@ void ggml_backend_sycl_print_sycl_devices() { } void print_gpu_device_list() { - fprintf(stderr, "detect %d SYCL GPUs: [%s] with top Max compute units:%d\n", + fprintf(stderr, "detect %d SYCL GPUs: [%s] with Max compute units:%d\n", g_sycl_gpu_mgr->get_gpu_count(), g_sycl_gpu_mgr->gpus_list.c_str(), g_sycl_gpu_mgr->max_compute_units); @@ -13283,15 +13264,6 @@ void ggml_init_sycl() try { #else fprintf(stderr, "%s: GGML_SYCL_F16: no\n", __func__); #endif - -/* NOT REMOVE, keep it for next optimize for XMX. -#if defined(SYCL_USE_XMX) - fprintf(stderr, "%s: SYCL_USE_XMX: yes\n", __func__); -#else - fprintf(stderr, "%s: SYCL_USE_XMX: no\n", __func__); -#endif -*/ - if (CHECK_TRY_ERROR(g_all_sycl_device_count = dpct::dev_mgr::instance().device_count()) != 0) { initialized = true; @@ -13300,61 +13272,68 @@ void ggml_init_sycl() try { } GGML_ASSERT(g_all_sycl_device_count <= GGML_SYCL_MAX_DEVICES); ggml_backend_sycl_print_sycl_devices(); + if (!g_sycl_gpu_mgr) g_sycl_gpu_mgr = new sycl_gpu_mgr(); + + g_device_count = g_sycl_gpu_mgr->get_gpu_count(); + g_work_group_size = g_sycl_gpu_mgr->work_group_size; + print_gpu_device_list(); - initialized = true; - g_sycl_loaded = true; - } + int64_t total_vram = 0; - - g_device_count = g_sycl_gpu_mgr->get_gpu_count(); - g_work_group_size = g_sycl_gpu_mgr->work_group_size; - - int64_t total_vram = 0; - - - for (int id = 0; id < GGML_SYCL_MAX_DEVICES; ++id) { - g_device_caps[id].vmm = 0; - g_device_caps[id].device_id = -1; - g_device_caps[id].cc = 0; - g_tensor_split[id] = 0; - g_default_tensor_split[id] = 0; - } - - for (int i = 0; i < g_device_count; ++i) { - int device_id = g_sycl_gpu_mgr->gpus[i]; - g_device_caps[i].vmm = 0; - - dpct::device_info prop; - SYCL_CHECK(CHECK_TRY_ERROR(dpct::get_device_info( - prop, dpct::dev_mgr::instance().get_device(device_id)))); - - g_default_tensor_split[i] = total_vram; - total_vram += prop.get_global_mem_size(); - - g_device_caps[i].cc = - 100 * prop.get_major_version() + 10 * prop.get_minor_version(); - } - - for (int i = 0; i < g_device_count; ++i) { - g_default_tensor_split[i] /= total_vram; - } - - for (int i = 0; i < g_device_count; ++i) { - SYCL_CHECK(ggml_sycl_set_device(i)); - - // create sycl streams - for (int is = 0; is < MAX_STREAMS; ++is) { - SYCL_CHECK(CHECK_TRY_ERROR( - g_syclStreams[i][is] = - dpct::get_current_device().create_queue( - g_sycl_gpu_mgr->get_co_ctx(), dpct::get_current_device()))); +/* NOT REMOVE, keep it for next optimize for XMX. +#if defined(SYCL_USE_XMX) + fprintf(stderr, "%s: SYCL_USE_XMX: yes\n", __func__); +#else + fprintf(stderr, "%s: SYCL_USE_XMX: no\n", __func__); +#endif +*/ + for (int id = 0; id < GGML_SYCL_MAX_DEVICES; ++id) { + g_device_caps[id].vmm = 0; + g_device_caps[id].device_id = -1; + g_device_caps[id].cc = 0; + g_tensor_split[id] = 0; + g_default_tensor_split[id] = 0; } - const dpct::queue_ptr stream = g_syclStreams[i][0]; - // create sycl handle - SYCL_CHECK(CHECK_TRY_ERROR(g_sycl_handles[i] = stream)); + for (int i = 0; i < g_device_count; ++i) { + int device_id = g_sycl_gpu_mgr->gpus[i]; + g_device_caps[i].vmm = 0; + + dpct::device_info prop; + SYCL_CHECK(CHECK_TRY_ERROR(dpct::get_device_info( + prop, dpct::dev_mgr::instance().get_device(device_id)))); + + g_default_tensor_split[i] = total_vram; + total_vram += prop.get_global_mem_size(); + + g_device_caps[i].cc = + 100 * prop.get_major_version() + 10 * prop.get_minor_version(); + } + + for (int i = 0; i < g_device_count; ++i) { + g_default_tensor_split[i] /= total_vram; + } + + for (int i = 0; i < g_device_count; ++i) { + SYCL_CHECK(ggml_sycl_set_device(i)); + + // create sycl streams + for (int is = 0; is < MAX_STREAMS; ++is) { + SYCL_CHECK(CHECK_TRY_ERROR( + g_syclStreams[i][is] = + dpct::get_current_device().create_queue( + g_sycl_gpu_mgr->get_co_ctx(), dpct::get_current_device()))); + } + + const dpct::queue_ptr stream = g_syclStreams[i][0]; + // create sycl handle + SYCL_CHECK(CHECK_TRY_ERROR(g_sycl_handles[i] = stream)); + } + + initialized = true; + g_sycl_loaded = true; } } catch (sycl::exception const &exc) { @@ -16753,24 +16732,22 @@ static ggml_backend_buffer_type_i ggml_backend_sycl_buffer_type_interface = { /* .is_host = */ nullptr, }; -ggml_backend_buffer_type_t ggml_backend_sycl_buffer_type(int device_index) { - if (device_index>=g_device_count or device_index<0) { - printf("ggml_backend_sycl_buffer_type error: device_index:%d is out of range [0, %d], miss to call ggml_backend_sycl_set_single_device()\n", - device_index, g_device_count-1); - GGML_ASSERT(device_indexgpus[i])}, }; } - g_ggml_backend_sycl_buffer_type_initialized = true; + ggml_backend_sycl_buffer_type_initialized = true; } - return &ggml_backend_sycl_buffer_types[device_index]; + + return &ggml_backend_sycl_buffer_types[device]; } // sycl split buffer type @@ -17519,17 +17496,6 @@ GGML_API GGML_CALL int ggml_backend_sycl_get_device_index(int device_id) { return g_sycl_gpu_mgr->get_index(device_id); } -GGML_API GGML_CALL void ggml_backend_sycl_set_single_device(int main_gpu_id) { - GGML_ASSERT(main_gpu_idbackends.push_back(backend); } else { // LLAMA_SPLIT_LAYER requires a backend for each GPU - + int id_list[GGML_SYCL_MAX_DEVICES]; + ggml_sycl_get_gpu_list(id_list, GGML_SYCL_MAX_DEVICES); for (int i = 0; i < ggml_backend_sycl_get_device_count(); ++i) { + int device_id = id_list[i]; ggml_backend_t backend = ggml_backend_sycl_init(i); if (backend == nullptr) { - int id_list[GGML_SYCL_MAX_DEVICES]; - ggml_sycl_get_gpu_list(id_list, GGML_SYCL_MAX_DEVICES); - LLAMA_LOG_ERROR("%s: failed to initialize SYCL%d (index %d)backend\n", __func__, id_list[i], i); + LLAMA_LOG_ERROR("%s: failed to initialize SYCL%d (index %d)backend\n", __func__, device_id, i); llama_free(ctx); return nullptr; } From 6cdabe652695167263c8b447520987b11856f7ca Mon Sep 17 00:00:00 2001 From: Georgi Gerganov Date: Thu, 7 Mar 2024 16:32:38 +0200 Subject: [PATCH 805/811] llama-bench : add embeddings option (#5924) * llama-bench : add embeddings option * llama-bench : do not hard code embd default value --------- Co-authored-by: slaren --- examples/llama-bench/llama-bench.cpp | 30 +++++++++++++++++++++++++--- 1 file changed, 27 insertions(+), 3 deletions(-) diff --git a/examples/llama-bench/llama-bench.cpp b/examples/llama-bench/llama-bench.cpp index aa79d002a..2ff86ef6f 100644 --- a/examples/llama-bench/llama-bench.cpp +++ b/examples/llama-bench/llama-bench.cpp @@ -173,6 +173,7 @@ struct cmd_params { std::vector no_kv_offload; std::vector> tensor_split; std::vector use_mmap; + std::vector embeddings; int reps; bool verbose; output_formats output_format; @@ -192,6 +193,7 @@ static const cmd_params cmd_params_defaults = { /* no_kv_offload */ {false}, /* tensor_split */ {std::vector(llama_max_devices(), 0.0f)}, /* use_mmap */ {true}, + /* embeddings */ {false}, /* reps */ 5, /* verbose */ false, /* output_format */ MARKDOWN @@ -214,6 +216,7 @@ static void print_usage(int /* argc */, char ** argv) { printf(" -mg, --main-gpu (default: %s)\n", join(cmd_params_defaults.main_gpu, ",").c_str()); printf(" -nkvo, --no-kv-offload <0|1> (default: %s)\n", join(cmd_params_defaults.no_kv_offload, ",").c_str()); printf(" -mmp, --mmap <0|1> (default: %s)\n", join(cmd_params_defaults.use_mmap, ",").c_str()); + printf(" -embd, --embeddings <0|1> (default: %s)\n", join(cmd_params_defaults.embeddings, ",").c_str()); printf(" -ts, --tensor_split (default: 0)\n"); printf(" -r, --repetitions (default: %d)\n", cmd_params_defaults.reps); printf(" -o, --output (default: %s)\n", output_format_str(cmd_params_defaults.output_format)); @@ -382,6 +385,13 @@ static cmd_params parse_cmd_params(int argc, char ** argv) { } auto p = split(argv[i], split_delim); params.use_mmap.insert(params.use_mmap.end(), p.begin(), p.end()); + } else if (arg == "-embd" || arg == "--embeddings") { + if (++i >= argc) { + invalid_param = true; + break; + } + auto p = split(argv[i], split_delim); + params.embeddings.insert(params.embeddings.end(), p.begin(), p.end()); } else if (arg == "-ts" || arg == "--tensor-split") { if (++i >= argc) { invalid_param = true; @@ -453,6 +463,7 @@ static cmd_params parse_cmd_params(int argc, char ** argv) { if (params.no_kv_offload.empty()){ params.no_kv_offload = cmd_params_defaults.no_kv_offload; } if (params.tensor_split.empty()) { params.tensor_split = cmd_params_defaults.tensor_split; } if (params.use_mmap.empty()) { params.use_mmap = cmd_params_defaults.use_mmap; } + if (params.embeddings.empty()) { params.embeddings = cmd_params_defaults.embeddings; } if (params.n_threads.empty()) { params.n_threads = cmd_params_defaults.n_threads; } return params; @@ -472,6 +483,7 @@ struct cmd_params_instance { bool no_kv_offload; std::vector tensor_split; bool use_mmap; + bool embeddings; llama_model_params to_llama_mparams() const { llama_model_params mparams = llama_model_default_params(); @@ -502,6 +514,7 @@ struct cmd_params_instance { cparams.type_k = type_k; cparams.type_v = type_v; cparams.offload_kqv = !no_kv_offload; + cparams.embeddings = embeddings; return cparams; } @@ -517,6 +530,7 @@ static std::vector get_cmd_params_instances(const cmd_param for (const auto & mg : params.main_gpu) for (const auto & ts : params.tensor_split) for (const auto & mmp : params.use_mmap) + for (const auto & embd : params.embeddings) for (const auto & nb : params.n_batch) for (const auto & tk : params.type_k) for (const auto & tv : params.type_v) @@ -540,6 +554,7 @@ static std::vector get_cmd_params_instances(const cmd_param /* .no_kv_offload= */ nkvo, /* .tensor_split = */ ts, /* .use_mmap = */ mmp, + /* .embeddings = */ embd, }; instances.push_back(instance); } @@ -562,6 +577,7 @@ static std::vector get_cmd_params_instances(const cmd_param /* .no_kv_offload= */ nkvo, /* .tensor_split = */ ts, /* .use_mmap = */ mmp, + /* .embeddings = */ embd, }; instances.push_back(instance); } @@ -597,6 +613,7 @@ struct test { bool no_kv_offload; std::vector tensor_split; bool use_mmap; + bool embeddings; int n_prompt; int n_gen; std::string test_time; @@ -619,6 +636,7 @@ struct test { no_kv_offload = inst.no_kv_offload; tensor_split = inst.tensor_split; use_mmap = inst.use_mmap; + embeddings = inst.embeddings; n_prompt = inst.n_prompt; n_gen = inst.n_gen; // RFC 3339 date-time format @@ -690,7 +708,7 @@ struct test { "n_batch", "n_threads", "type_k", "type_v", "n_gpu_layers", "split_mode", "main_gpu", "no_kv_offload", - "tensor_split", "use_mmap", + "tensor_split", "use_mmap", "embeddings", "n_prompt", "n_gen", "test_time", "avg_ns", "stddev_ns", "avg_ts", "stddev_ts" @@ -710,7 +728,7 @@ struct test { } if (field == "cuda" || field == "opencl" || field == "vulkan" || field == "kompute" || field == "metal" || field == "gpu_blas" || field == "blas" || field == "sycl" ||field == "f16_kv" || field == "no_kv_offload" || - field == "use_mmap") { + field == "use_mmap" || field == "embeddings") { return BOOL; } if (field == "avg_ts" || field == "stddev_ts") { @@ -744,7 +762,7 @@ struct test { std::to_string(n_batch), std::to_string(n_threads), ggml_type_name(type_k), ggml_type_name(type_v), std::to_string(n_gpu_layers), split_mode_str(split_mode), std::to_string(main_gpu), std::to_string(no_kv_offload), - tensor_split_str, std::to_string(use_mmap), + tensor_split_str, std::to_string(use_mmap), std::to_string(embeddings), std::to_string(n_prompt), std::to_string(n_gen), test_time, std::to_string(avg_ns()), std::to_string(stdev_ns()), std::to_string(avg_ts()), std::to_string(stdev_ts()) @@ -914,6 +932,9 @@ struct markdown_printer : public printer { if (field == "use_mmap") { return "mmap"; } + if (field == "embeddings") { + return "embd"; + } if (field == "tensor_split") { return "ts"; } @@ -957,6 +978,9 @@ struct markdown_printer : public printer { if (params.use_mmap.size() > 1 || params.use_mmap != cmd_params_defaults.use_mmap) { fields.emplace_back("use_mmap"); } + if (params.embeddings.size() > 1 || params.embeddings != cmd_params_defaults.embeddings) { + fields.emplace_back("embeddings"); + } fields.emplace_back("test"); fields.emplace_back("t/s"); From 581ed5c4fe3a8909aaa8313633ac443f471ba755 Mon Sep 17 00:00:00 2001 From: "UEXTM.com" <84163508+uextm@users.noreply.github.com> Date: Fri, 8 Mar 2024 04:35:04 -0500 Subject: [PATCH 806/811] log : fix MSVC compile errors (#5643) MSVC gives the following error with the existing macros: `Error C2059 : syntax error: ','` This patch adds `##` as a prefix to `__VA_ARGS__` to address this error. --- common/log.h | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/common/log.h b/common/log.h index e4e1b9f4f..eb111e784 100644 --- a/common/log.h +++ b/common/log.h @@ -297,7 +297,7 @@ inline std::string log_filename_generator_impl(LogTriState multilog, const std:: #ifndef _MSC_VER #define LOG(...) LOG_IMPL(__VA_ARGS__, "") #else - #define LOG(str, ...) LOG_IMPL("%s" str, "", __VA_ARGS__, "") + #define LOG(str, ...) LOG_IMPL("%s" str, "", ##__VA_ARGS__, "") #endif // Main TEE macro. @@ -311,7 +311,7 @@ inline std::string log_filename_generator_impl(LogTriState multilog, const std:: #ifndef _MSC_VER #define LOG_TEE(...) LOG_TEE_IMPL(__VA_ARGS__, "") #else - #define LOG_TEE(str, ...) LOG_TEE_IMPL("%s" str, "", __VA_ARGS__, "") + #define LOG_TEE(str, ...) LOG_TEE_IMPL("%s" str, "", ##__VA_ARGS__, "") #endif // LOG macro variants with auto endline. @@ -319,8 +319,8 @@ inline std::string log_filename_generator_impl(LogTriState multilog, const std:: #define LOGLN(...) LOG_IMPL(__VA_ARGS__, "\n") #define LOG_TEELN(...) LOG_TEE_IMPL(__VA_ARGS__, "\n") #else - #define LOGLN(str, ...) LOG_IMPL("%s" str, "", __VA_ARGS__, "\n") - #define LOG_TEELN(str, ...) LOG_TEE_IMPL("%s" str, "", __VA_ARGS__, "\n") + #define LOGLN(str, ...) LOG_IMPL("%s" str, "", ##__VA_ARGS__, "\n") + #define LOG_TEELN(str, ...) LOG_TEE_IMPL("%s" str, "", ##__VA_ARGS__, "\n") #endif // INTERNAL, DO NOT USE From af37fd8b30e37ccbffdd82e6f48559e2fb7ce7dd Mon Sep 17 00:00:00 2001 From: Georgi Gerganov Date: Fri, 8 Mar 2024 12:40:02 +0200 Subject: [PATCH 807/811] server : fix EOS token detection with disabled cache (#5938) --- examples/server/server.cpp | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/examples/server/server.cpp b/examples/server/server.cpp index f255ad764..1434095fc 100644 --- a/examples/server/server.cpp +++ b/examples/server/server.cpp @@ -1123,7 +1123,7 @@ struct server_context { }); } - if (!slot.cache_tokens.empty() && result.tok == llama_token_eos(model)) { + if (result.tok == llama_token_eos(model)) { slot.stopped_eos = true; slot.has_next_token = false; From e457fb3540e0aaec47cfde0abf784c213f9216ee Mon Sep 17 00:00:00 2001 From: Don Mahurin Date: Fri, 8 Mar 2024 02:41:50 -0800 Subject: [PATCH 808/811] llama : assume tied weights if lm_head/output weights is missing (#5824) This is to support model configurations with "tie_word_embeddings" set to true. Co-authored-by: Don Mahurin <2797413+dmahurin@users.noreply.github.com> --- llama.cpp | 8 +++++++- 1 file changed, 7 insertions(+), 1 deletion(-) diff --git a/llama.cpp b/llama.cpp index 4225f9555..458382b21 100644 --- a/llama.cpp +++ b/llama.cpp @@ -3888,7 +3888,13 @@ static bool llm_load_tensors( { model.output_norm = ml.create_tensor(ctx_output, tn(LLM_TENSOR_OUTPUT_NORM, "weight"), {n_embd}); if (model.arch != LLM_ARCH_MINICPM){ - model.output = ml.create_tensor(ctx_output_split, tn(LLM_TENSOR_OUTPUT, "weight"), {n_embd, n_vocab}); + model.output = ml.create_tensor(ctx_output_split, tn(LLM_TENSOR_OUTPUT, "weight"), {n_embd, n_vocab}, false); + // if output is NULL, init from the input tok embed + if (model.output == NULL) { + model.output = ml.create_tensor(ctx_output, tn(LLM_TENSOR_TOKEN_EMBD, "weight"), {n_embd, n_vocab}); + ml.n_created--; // artificial tensor + ml.size_data += ggml_nbytes(model.output); + } } } From 76e868821a94072fbc87cb1fcca291694319eae8 Mon Sep 17 00:00:00 2001 From: Pierrick Hymbert Date: Fri, 8 Mar 2024 12:25:04 +0100 Subject: [PATCH 809/811] server: metrics: add llamacpp:prompt_seconds_total and llamacpp:tokens_predicted_seconds_total, reset bucket only on /metrics. Fix values cast to int. Add Process-Start-Time-Unix header. (#5937) Closes #5850 --- examples/server/server.cpp | 47 ++++++++++++++----- examples/server/tests/features/server.feature | 1 + examples/server/tests/features/steps/steps.py | 11 ++++- 3 files changed, 46 insertions(+), 13 deletions(-) diff --git a/examples/server/server.cpp b/examples/server/server.cpp index 1434095fc..109ff7175 100644 --- a/examples/server/server.cpp +++ b/examples/server/server.cpp @@ -335,8 +335,12 @@ struct server_slot { }; struct server_metrics { + const int64_t t_start = ggml_time_us(); + uint64_t n_prompt_tokens_processed_total = 0; + uint64_t t_prompt_processing_total = 0; uint64_t n_tokens_predicted_total = 0; + uint64_t t_tokens_generation_total = 0; uint64_t n_prompt_tokens_processed = 0; uint64_t t_prompt_processing = 0; @@ -348,12 +352,14 @@ struct server_metrics { n_prompt_tokens_processed_total += slot.n_prompt_tokens_processed; n_prompt_tokens_processed += slot.n_prompt_tokens_processed; t_prompt_processing += slot.t_prompt_processing; + t_prompt_processing_total += slot.t_prompt_processing; } void on_prediction(const server_slot &slot) { - n_tokens_predicted_total += slot.n_decoded; - n_tokens_predicted += slot.n_decoded; - t_tokens_generation += slot.t_token_generation; + n_tokens_predicted_total += slot.n_decoded; + n_tokens_predicted += slot.n_decoded; + t_tokens_generation += slot.t_token_generation; + t_tokens_generation_total += slot.t_token_generation; } void reset_bucket() { @@ -1502,9 +1508,12 @@ struct server_context { { "idle", n_idle_slots }, { "processing", n_processing_slots }, { "deferred", queue_tasks.queue_tasks_deferred.size() }, + { "t_start", metrics.t_start}, { "n_prompt_tokens_processed_total", metrics.n_prompt_tokens_processed_total}, + { "t_tokens_generation_total", metrics.t_tokens_generation_total}, { "n_tokens_predicted_total", metrics.n_tokens_predicted_total}, + { "t_prompt_processing_total", metrics.t_prompt_processing_total}, { "n_prompt_tokens_processed", metrics.n_prompt_tokens_processed}, { "t_prompt_processing", metrics.t_prompt_processing}, @@ -1517,7 +1526,9 @@ struct server_context { { "slots", slots_data }, }; - metrics.reset_bucket(); + if (json_value(task.data, "reset_bucket", false)) { + metrics.reset_bucket(); + } queue_results.send(res); } break; } @@ -2709,6 +2720,7 @@ int main(int argc, char ** argv) { task.id_multi = -1; task.id_target = -1; task.type = SERVER_TASK_TYPE_METRICS; + task.data.push_back({{"reset_bucket", true}}); ctx_server.queue_results.add_waiting_task_id(task.id); ctx_server.queue_tasks.post(task); @@ -2732,20 +2744,28 @@ int main(int argc, char ** argv) { {"counter", {{ {"name", "prompt_tokens_total"}, {"help", "Number of prompt tokens processed."}, - {"value", data["n_prompt_tokens_processed_total"]} + {"value", (uint64_t) data["n_prompt_tokens_processed_total"]} + }, { + {"name", "prompt_seconds_total"}, + {"help", "Prompt process time"}, + {"value", (uint64_t) data["t_prompt_processing_total"] / 1.e3} }, { {"name", "tokens_predicted_total"}, {"help", "Number of generation tokens processed."}, - {"value", data["n_tokens_predicted_total"]} + {"value", (uint64_t) data["n_tokens_predicted_total"]} + }, { + {"name", "tokens_predicted_seconds_total"}, + {"help", "Predict process time"}, + {"value", (uint64_t) data["t_tokens_generation_total"] / 1.e3} }}}, {"gauge", {{ {"name", "prompt_tokens_seconds"}, {"help", "Average prompt throughput in tokens/s."}, - {"value", n_prompt_tokens_processed ? 1e3 / t_prompt_processing * n_prompt_tokens_processed : 0} + {"value", n_prompt_tokens_processed ? 1.e3 / t_prompt_processing * n_prompt_tokens_processed : 0.} },{ {"name", "predicted_tokens_seconds"}, {"help", "Average generation throughput in tokens/s."}, - {"value", n_tokens_predicted ? 1e3 / t_tokens_generation * n_tokens_predicted : 0} + {"value", n_tokens_predicted ? 1.e3 / t_tokens_generation * n_tokens_predicted : 0.} },{ {"name", "kv_cache_usage_ratio"}, {"help", "KV-cache usage. 1 means 100 percent usage."}, @@ -2753,15 +2773,15 @@ int main(int argc, char ** argv) { },{ {"name", "kv_cache_tokens"}, {"help", "KV-cache tokens."}, - {"value", data["kv_cache_tokens_count"]} + {"value", (uint64_t) data["kv_cache_tokens_count"]} },{ {"name", "requests_processing"}, {"help", "Number of request processing."}, - {"value", data["processing"]} + {"value", (uint64_t) data["processing"]} },{ {"name", "requests_deferred"}, {"help", "Number of request deferred."}, - {"value", data["deferred"]} + {"value", (uint64_t) data["deferred"]} }}} }; @@ -2775,13 +2795,16 @@ int main(int argc, char ** argv) { const std::string name = metric_def["name"]; const std::string help = metric_def["help"]; - auto value = json_value(metric_def, "value", 0); + auto value = json_value(metric_def, "value", 0.); prometheus << "# HELP llamacpp:" << name << " " << help << "\n" << "# TYPE llamacpp:" << name << " " << type << "\n" << "llamacpp:" << name << " " << value << "\n"; } } + const int64_t t_start = data["t_start"]; + res.set_header("Process-Start-Time-Unix", std::to_string(t_start)); + res.set_content(prometheus.str(), "text/plain; version=0.0.4"); res.status = 200; // HTTP OK }); diff --git a/examples/server/tests/features/server.feature b/examples/server/tests/features/server.feature index f3b758c79..878ac1363 100644 --- a/examples/server/tests/features/server.feature +++ b/examples/server/tests/features/server.feature @@ -29,6 +29,7 @@ Feature: llama.cpp server And a completion request with no api error Then tokens are predicted matching And prometheus metrics are exposed + And metric llamacpp:tokens_predicted is Examples: Prompts | prompt | n_predict | re_content | n_predicted | diff --git a/examples/server/tests/features/steps/steps.py b/examples/server/tests/features/steps/steps.py index a0b2ffdfe..d7f005836 100644 --- a/examples/server/tests/features/steps/steps.py +++ b/examples/server/tests/features/steps/steps.py @@ -586,14 +586,24 @@ async def step_prometheus_metrics_exported(context): metric_exported = False if context.debug: print(f"/metrics answer:\n{metrics_raw}\n") + context.metrics = {} for metric in parser.text_string_to_metric_families(metrics_raw): match metric.name: case "llamacpp:kv_cache_usage_ratio": assert len(metric.samples) > 0 metric_exported = True + context.metrics[metric.name] = metric + assert int(metrics_response.headers["Process-Start-Time-Unix"]) > 0, "no header process start time" assert metric_exported, "No metrics exported" +@step(u'metric {metric_name} is {metric_value:d}') +def step_assert_metric_value(context, metric_name, metric_value): + if metric_name not in context.metrics: + assert False, f"no metric {metric_name} in {context.metrics.keys()}" + assert context.metrics[metric_name].samples[0].value == metric_value, f"metric: {context.metrics[metric_name]}" + + @step(u'available models') def step_available_models(context): # openai client always expects an api_key @@ -879,7 +889,6 @@ def assert_n_tokens_predicted(completion_response, expected_predicted_n=None, re f' {n_predicted} <> {expected_predicted_n}') - async def gather_tasks_results(context): n_tasks = len(context.concurrent_tasks) if context.debug: From 515f7d0d4fce41c752fc253acf30707c3be2531e Mon Sep 17 00:00:00 2001 From: compilade <113953597+compilade@users.noreply.github.com> Date: Fri, 8 Mar 2024 10:53:37 -0500 Subject: [PATCH 810/811] llama : fix quantization of shared token_embd (#5944) --- llama.cpp | 9 +++++++-- 1 file changed, 7 insertions(+), 2 deletions(-) diff --git a/llama.cpp b/llama.cpp index 458382b21..4a20b7928 100644 --- a/llama.cpp +++ b/llama.cpp @@ -10973,6 +10973,9 @@ struct quantize_state_internal { bool has_imatrix = false; + // used to figure out if a model shares tok_embd with the output weight + bool has_output = false; + quantize_state_internal(const llama_model & model, const llama_model_quantize_params * params) : model(model) , params(params) @@ -11070,8 +11073,7 @@ static ggml_type get_k_quant_type(quantize_state_internal & qs, ggml_type new_ty // for arches that share the same tensor between the token embeddings and the output, we quantize the token embeddings // with the quantization of the output tensor - if (name == tn(LLM_TENSOR_OUTPUT, "weight") || - (LLM_TENSOR_NAMES.at(arch).find(LLM_TENSOR_OUTPUT) == LLM_TENSOR_NAMES.at(arch).end() && name == "token_embd.weight")) { + if (name == tn(LLM_TENSOR_OUTPUT, "weight") || (!qs.has_output && name == tn(LLM_TENSOR_TOKEN_EMBD, "weight"))) { int nx = tensor->ne[0]; if (arch == LLM_ARCH_FALCON || nx % QK_K != 0) { new_type = GGML_TYPE_Q8_0; @@ -11460,6 +11462,9 @@ static void llama_model_quantize_internal(const std::string & fname_inp, const s else if (name.find("ffn_up") != std::string::npos) { ++qs.n_ffn_up; } + else if (name == LLM_TN(model.arch)(LLM_TENSOR_OUTPUT, "weight")) { + qs.has_output = true; + } } if (qs.n_attention_wv != qs.n_ffn_down || (uint32_t)qs.n_attention_wv != model.hparams.n_layer) { LLAMA_LOG_WARN("%s ============ Strange model: n_attention_wv = %d, n_ffn_down = %d, hparams.n_layer = %d\n", From c2101a2e909ac7c08976d414e64e96c90ee5fa9e Mon Sep 17 00:00:00 2001 From: compilade <113953597+compilade@users.noreply.github.com> Date: Fri, 8 Mar 2024 17:31:00 -0500 Subject: [PATCH 811/811] llama : support Mamba Selective State Space Models (#5328) * mamba : begin working on support for Mamba SSM * mamba : begin figuring out how to (ab)use the kv cache for Mamba * mamba : recurrent inference almost works, but incoherent * mamba : recurrent inference WORKS!!! * convert : optionally use d_conv and d_state from config.json for Mamba * mamba : refactor recurrent conv, resulting in 20% perf increase It's still slower than I'd like, but I did not really optimize `ggml_exp` yet. I also refactored `ggml_exp` to work with tensors with more than 2 dimensions. * ggml : parallelize ggml_exp This results in 8% faster token generation for Mamba-130M. * mamba : simplify the conv step with a self-overlapping view Turns out the conv_state can be made smaller by one column. Note that this breaks existing GGUFs of Mamba, because the key_value_length field is tied to the conv_state size. Convolution with a self-overlapping view is cool! And it's much simpler than what I initially thought would be necessary to make the convolution step work with more than 1 token at a time. Next step is to make the SSM step work on batches of tokens too, and thus I need to figure out a way to make a parallel selective scan which will keep the ssm_state small and won't make it bigger by a factor of (n_layer * batch_size). * llama : fix Mamba KV self size wrongly displaying as f16 instead of f32 Relatedly, I also tried to see if other types than f32 worked for the states, but they don't, because of the operators used. It's probably better anyway to keep lots of precision there, since the states are small anyway. * mamba : fix self-overlapping view depth stride * mamba : handle batches of more than 1 token This means running Mamba no longer crashes when using the default settings! And probably also slightly faster prompt processing. Both batched and non-batched processing yield the same output. Previously, the state was not cleared when starting a sequence. Next step is to make the KV cache API work as expected for Mamba models. * ggml: add ggml_ssm_scan to help with parallel selective scan If the selective scan was implemented without a custom operator, there would be waaay too many nodes in the graph. For example, for Mamba-130M, with a batch size of 512 (the default), a naive selective scan could add at least 24*512=12288 nodes, which is more than LLAMA_MAX_NODES (8192), and that's only for the smallest Mamba model. So it's much cleaner with a custom operator. Not sure about the name, though. * ggml : in ggml_ssm_scan, merge multiple rows in the same vec operation This will help with performance on CPU if ggml_vec_mul_f32 and ggml_vec_add_f32 are ever optimized with SIMD. * mamba : very basic quantization support Mostly works, but there is currently no difference between the variants of a k-quant (e.g. Q4_K_S and Q4_K_M are the same). Most of the SSM-specific weights can be kept in f32 without affecting the size that much, since they are relatively small. (the linear projection weights are responsible for most of Mamba's size) Too much quantization seems to make the state degrade quite fast, and the model begins to output gibberish. It seems to affect bigger models to a lesser extent than small models, but I'm not sure by how much. Experimentation will be needed to figure out which weights are more important for the _M (and _L?) variants of k-quants for Mamba. * convert : fix wrong name for layer norm weight of offical Mamba models I was using Q-bert/Mamba-* models before, which have a slighlty different naming scheme for the weights. (they start with "model.layers" instead of "backbone.layers") * mamba : fuse more steps of the SSM scan in the ggml_ssm_scan operator This increases performance on CPU by around 30% for prompt processing, and by around 20% for text generation. However, it also makes the ggml_exp and ggml_soft_plus operators unused. Whether or not they should be kept will be decided later. * convert : for Mamba, also consider the "MambaLMHeadModel" arch name It's the name of the class of the official implementation, though they don't use it (yet) in the "architectures" field of config.json * mamba : fix vocab size problems with official models The perplexity was waaaay to high for models with a non-round vocab size. Not sure why, but it needed to be fixed in the metadata. Note that this breaks existing GGUF-converted Mamba models, but **only if** the vocab size was not already rounded. * ggml : remove ggml_exp and ggml_soft_plus They did not exist anyway outside of this branch, and since ggml_ssm_scan fused operations together, they are unused. It's always possible to bring them back if needed. * mamba : remove some useless comments No code change. * convert : fix flake8 linter errors * mamba : apply suggestions from code review * mamba : remove unecessary branch for row-wise ssm_state and C multiplication It was previously done to avoid permuting when only one token is processed at a time (like when generating text), but permuting is cheap, and dynamically changing the compute graph is not future-proof. * ggml : in ggml_ssm_scan, use more appropriate asserts * ggml : rename the destination pointer in ggml_compute_forward_ssm_scan_f32 * mamba : multiple sequences, but one at a time This is a step towards making this Mamba implementation usable with the server example (the way the system prompt is kept when clearing the client slots will need to be changed before this can work, though). The KV cache size for this kind of model is tied to the maximum number of sequences kept at any single time. For now, this number is obtained from n_parallel (plus one, to have an extra sequence to dedicate to the system prompt), but there might be a better way to do this which won't also make the main example use 2 cells even if only 1 is really used. (for this specific case, --parallel 0 helps) Simultaneous sequence processing will probably require changes to ggml_ssm_scan, and possibly a new operator for the conv step. * mamba : support llama_kv_cache_seq_cp This (mis)uses the logic around K shifts, because tokens in a state can't be shifted anyway, and because inp_K_shift has the right shape and type. Using ggml_get_rows is a nice way to do copies, but copy chains can't work. Fortunately, copy chains don't really seem to be used in the examples. Each KV cell is dedicated to the sequence ID corresponding to its own index. * mamba : use a state mask It's cleaner than the previous heuristic of checking for the pos of the first token in the batch. inp_KQ_mask could not be re-used for this, because it has the wrong shape and because it seems more suited to the next step of simultaneous sequence processing (helping with the problem of remembering which token belongs to which sequence(s)/state(s)). * llama : replace the usage of n_ctx with kv_self.size in many places * mamba : use n_tokens directly instead of n_tok * mamba : in comments, properly refer to KV cells instead of slots * mamba : reduce memory usage of ggml_ssm_scan From 290.37 MiB to 140.68 MiB of CPU compute buffer size with Mamba 3B with a batch size of 512. The result tensor of ggml_ssm_scan was previously a big part of the CPU compute buffer size. To make it smaller, it does not contain the intermediate ssm states anymore. Both y and the last ssm state are combined in the result tensor, because it seems only a single tensor can be returned by an operator with the way the graph is built. * mamba : simultaneous sequence processing A batch can now contain tokens from multiple sequences. This is necessary for at least the parallel example, the server example, and the HellaSwag test in the perplexity example. However, for this to be useful, uses of llama_kv_cache_seq_rm/cp will need to be changed to work on whole sequences. * ggml : add ggml_ssm_conv as a new operator for the conv step of Mamba This operator makes it possible to use and update the correct states for each token of the batch in the same way as ggml_ssm_scan. Other solutions which use existing operators would need loops which would add too many nodes to the graph (at least the ones I thought of). Using this operator further reduces the size of the CPU compute buffer from 140.68 MiB to 103.20 MiB with Mamba 3B with a batch size of 512. And (at least on CPU), it's a bit faster than before. Note that "ggml_ssm_conv" is probably not the most appropriate name, and it could be changed if a better one is found. * llama : add inp_s_seq as a new input tensor The most convenient implementation to select the correct state (for Mamba) for each token is to directly get the correct index from a tensor. This is why inp_s_seq is storing int32_t and not floats. The other, less convenient way to select the correct state would be to have inp_KQ_mask contain 1.0f for each state used by a token and 0.0f otherwise. This complicates quickly fetching the first used state of a token, and is also less efficient because a whole row of the mask would always need to be read for each token. Using indexes makes it easy to stop searching when there are no more sequences for a token, and the first sequence assigned is always very quickly available (it's the first element of each row). * mamba : support llama_kv_cache_seq_cp copy chains * mamba : support shifting and dividing the kv cache pos * mamba : make the server and parallel examples work with whole sequences A seq_id is dedicated to the system prompt in both cases. * llama : make llama_kv_cache_seq_rm return whether it succeeded or not * mamba : dedicate an input tensor for state copy indices This is cleaner and makes it easier to adapt when/if token positions (and by extension, inp_K_shift) are no longer integers. * mamba : adapt perplexity, batched, and batched-bench examples * perplexity : limit the max number of sequences This adapts to what the loaded model can provide. * llama : add llama_n_max_seq to get the upper limit for seq_ids Used by the perplexity example. * batched : pass n_parallel to the model's context params This should have been there already, but it wasn't. * batched-bench : reserve sequences to support Mamba * batched-bench : fix tokens being put in wrong sequences Generation quality isn't what's measured in there anyway, but at least using the correct sequences avoids using non-consecutive token positions. * mamba : stop abusing attention metadata This breaks existing converted-to-GGUF Mamba models, but will allow supporting mixed architectures like MambaFormer without needing to break Mamba models. This will also allow changing the size of Mamba's states without having to reconvert models in the future. (e.g. using something else than d_conv - 1 columns for the conv_states will not require breaking existing converted Mamba models again) * gguf-py : add new KV metadata key-value pairs for Mamba * llama : add new metadata key-value pairs for Mamba * llama : guard against divisions by zero when n_head is 0 * mamba : rename "unlimited" KV cache property to "recurrent" * mamba : more correctly update the "used" field of the KV cache * ggml : in ggml_ssm_scan, use a threshold for soft_plus This is how the official Mamba implementation does it, and it's also what torch.nn.Softplus does. * convert : for Mamba, fallback to internal NeoX tokenizer The resulting models are exactly the same as if the tokenizer.json and tokenizer_config.json of GPT-NeoX were there. * mamba : support state saving and restoring * ggml : implicitly pass src tensors through dst for Mamba-related ops * mamba : clarify some comments * server : fix cache_tokens not getting correctly resized Otherwise, when the "we have to evaluate at least 1 token" special case was triggered, an extra token was kept in cache_tokens even if it was removed from the KV cache. For Mamba, this caused useless prompt reprocessing when the previous request triggered the above case. * convert-hf : support new metadata keys for Mamba For the models available at https://huggingface.co/collections/state-spaces/transformers-compatible-mamba-65e7b40ab87e5297e45ae406 * mamba : rename metadata to be more similar to transformers library This breaks existing converted-to-GGUF models, but the metadata names are more "standard". * mamba : support mamba-*-hf models These models share their token_embd.weight with their output.weight * mamba : add missing spaces This is purely a formatting change. * convert-hf : omit output.weight when identical with token_embd.weight Only for Mamba for now, but it might be relevant for other models eventually. Most Mamba models actually share these two tensors, albeit implicitly. * readme : add Mamba to supported models, and add recent API changes * mamba : move state_seq and state_mask views outside layer loop A few tensors were also missing `struct` in front of `ggml_tensor`. --- README.md | 2 + common/common.cpp | 1 + convert-hf-to-gguf.py | 118 ++++ examples/batched-bench/batched-bench.cpp | 13 +- examples/batched/batched.cpp | 3 +- examples/parallel/parallel.cpp | 20 +- examples/perplexity/perplexity.cpp | 9 +- examples/server/server.cpp | 51 +- ggml.c | 379 +++++++++++- ggml.h | 19 + gguf-py/gguf/constants.py | 41 ++ gguf-py/gguf/gguf_writer.py | 12 + gguf-py/gguf/tensor_mapping.py | 46 +- llama.cpp | 698 +++++++++++++++++++++-- llama.h | 4 +- 15 files changed, 1342 insertions(+), 74 deletions(-) diff --git a/README.md b/README.md index f754022de..d7dba73e6 100644 --- a/README.md +++ b/README.md @@ -10,6 +10,7 @@ Inference of Meta's [LLaMA](https://arxiv.org/abs/2302.13971) model (and others) ### Recent API changes +- [2024 Mar 8] `llama_kv_cache_seq_rm()` returns a `bool` instead of `void`, and new `llama_n_max_seq()` returns the upper limit of acceptable `seq_id` in batches (relevant when dealing with multiple sequences) https://github.com/ggerganov/llama.cpp/pull/5328 - [2024 Mar 4] Embeddings API updated https://github.com/ggerganov/llama.cpp/pull/5796 - [2024 Mar 3] `struct llama_context_params` https://github.com/ggerganov/llama.cpp/pull/5849 @@ -110,6 +111,7 @@ Typically finetunes of the base models below are supported as well. - [x] [InternLM2](https://huggingface.co/models?search=internlm2) - [x] [CodeShell](https://github.com/WisdomShell/codeshell) - [x] [Gemma](https://ai.google.dev/gemma) +- [x] [Mamba](https://github.com/state-spaces/mamba) **Multimodal models:** diff --git a/common/common.cpp b/common/common.cpp index c244db644..d7f650ef4 100644 --- a/common/common.cpp +++ b/common/common.cpp @@ -1288,6 +1288,7 @@ struct llama_context_params llama_context_params_from_gpt_params(const gpt_param cparams.n_ctx = params.n_ctx; cparams.n_batch = params.n_batch; + cparams.n_parallel = params.n_parallel; cparams.n_threads = params.n_threads; cparams.n_threads_batch = params.n_threads_batch == -1 ? params.n_threads : params.n_threads_batch; cparams.seed = params.seed; diff --git a/convert-hf-to-gguf.py b/convert-hf-to-gguf.py index f6369af38..5eee32016 100755 --- a/convert-hf-to-gguf.py +++ b/convert-hf-to-gguf.py @@ -1847,6 +1847,124 @@ class StarCoder2Model(Model): model_arch = gguf.MODEL_ARCH.STARCODER2 +@Model.register("MambaForCausalLM", "MambaLMHeadModel") +class MambaModel(Model): + model_arch = gguf.MODEL_ARCH.MAMBA + + def set_vocab(self): + vocab_size = self.hparams["vocab_size"] + # Round vocab size to next multiple of 8 + pad_vocab = self.hparams.get("pad_vocab_size_multiple", 8) + # pad using ceiling division + # ref: https://stackoverflow.com/a/17511341/22827863 + vocab_size = -(vocab_size // -pad_vocab) * pad_vocab + self.hparams["vocab_size"] = vocab_size + + if (self.dir_model / "tokenizer.json").is_file(): + self._set_vocab_gpt2() + else: + # Use the GPT-NeoX tokenizer when no tokenizer files are present + tokenizer_path = Path(sys.path[0]) / "models" / "ggml-vocab-gpt-neox.gguf" + print(f"Using tokenizer from '{os.path.relpath(tokenizer_path, os.getcwd())}'") + neox_reader = gguf.GGUFReader(tokenizer_path, "r") + + field = neox_reader.get_field(gguf.Keys.Tokenizer.MODEL) + self.gguf_writer.add_tokenizer_model(bytes(field.parts[-1])) + field = neox_reader.get_field(gguf.Keys.Tokenizer.LIST) + self.gguf_writer.add_token_list([bytes(field.parts[i]) for i in field.data][:vocab_size]) + field = neox_reader.get_field(gguf.Keys.Tokenizer.TOKEN_TYPE) + self.gguf_writer.add_token_types([field.parts[i].tolist()[0] for i in field.data][:vocab_size]) + field = neox_reader.get_field(gguf.Keys.Tokenizer.MERGES) + self.gguf_writer.add_token_merges([bytes(field.parts[i]) for i in field.data]) + field = neox_reader.get_field(gguf.Keys.Tokenizer.BOS_ID) + self.gguf_writer.add_bos_token_id(field.parts[-1].tolist()[0]) + field = neox_reader.get_field(gguf.Keys.Tokenizer.EOS_ID) + self.gguf_writer.add_eos_token_id(field.parts[-1].tolist()[0]) + field = neox_reader.get_field(gguf.Keys.Tokenizer.UNK_ID) + self.gguf_writer.add_unk_token_id(field.parts[-1].tolist()[0]) + + def set_gguf_parameters(self): + d_model = self.find_hparam(["hidden_size", "d_model"]) + d_conv = self.find_hparam(["conv_kernel", "d_conv"], optional=True) or 4 + d_inner = self.find_hparam(["intermediate_size", "d_inner"], optional=True) or 2 * d_model + d_state = self.find_hparam(["state_size", "d_state"], optional=True) or 16 + # ceiling division + # ref: https://stackoverflow.com/a/17511341/22827863 + # ref: https://github.com/state-spaces/mamba/blob/ce59daea3a090d011d6476c6e5b97f6d58ddad8b/mamba_ssm/modules/mamba_simple.py#L58 + dt_rank = self.find_hparam(["time_step_rank", "dt_rank"], optional=True) or -(d_model // -16) + rms_norm_eps = self.find_hparam(["layer_norm_epsilon", "rms_norm_eps"], optional=True) or 1e-5 + + # Fail early for models which don't have a block expansion factor of 2 + assert d_inner == 2 * d_model + + self.gguf_writer.add_name(self.dir_model.name) + self.gguf_writer.add_context_length(2**20) # arbitrary value; for those who use the default + self.gguf_writer.add_embedding_length(d_model) + self.gguf_writer.add_feed_forward_length(0) # unused, but seemingly required when loading + self.gguf_writer.add_head_count(0) # unused, but seemingly required when loading + self.gguf_writer.add_block_count(self.hparams["n_layer"]) + self.gguf_writer.add_ssm_conv_kernel(d_conv) + self.gguf_writer.add_ssm_inner_size(d_inner) + self.gguf_writer.add_ssm_state_size(d_state) + self.gguf_writer.add_ssm_time_step_rank(dt_rank) + self.gguf_writer.add_layer_norm_rms_eps(rms_norm_eps) + self.gguf_writer.add_file_type(self.ftype) + + def write_tensors(self): + block_count = self.hparams["n_layer"] + tensor_map = gguf.get_tensor_name_map(self.model_arch, block_count) + + tok_embd = None + tok_embd_name = gguf.TENSOR_NAMES[gguf.MODEL_TENSOR.TOKEN_EMBD] + ".weight" + output_name = gguf.TENSOR_NAMES[gguf.MODEL_TENSOR.OUTPUT] + ".weight" + + for name, data_torch in self.get_tensors(): + old_dtype = data_torch.dtype + + # convert any unsupported data types to float32 + if data_torch.dtype not in (torch.float16, torch.float32): + data_torch = data_torch.to(torch.float32) + + # map tensor names + new_name = tensor_map.get_name(name, try_suffixes=(".weight", ".bias")) + if new_name is None: + print(f"Can not map tensor {name!r}") + sys.exit() + + if name.endswith(".A_log"): + print("A_log --> A ==> " + new_name) + data_torch = -torch.exp(data_torch) + + # assuming token_embd.weight is seen before output.weight + if tok_embd is not None and new_name == output_name: + if torch.equal(tok_embd, data_torch): + print(f"{output_name} is equivalent to {tok_embd_name}, omitting") + continue + if new_name == tok_embd_name: + tok_embd = data_torch + + data = data_torch.squeeze().numpy() + + n_dims = len(data.shape) + data_dtype = data.dtype + + # if f32 desired, convert any float16 to float32 + if self.ftype == 0 and data_dtype == np.float16: + data = data.astype(np.float32) + + # TODO: Why cant we use these float16 as-is? There should be not reason to store float16 as float32 + if self.ftype == 1 and data_dtype == np.float16 and n_dims == 1: + data = data.astype(np.float32) + + # if f16 desired, convert big float32 2-dim weight tensors to float16 + if self.ftype == 1 and data_dtype == np.float32 and new_name.removesuffix(".weight").endswith((".ssm_in", ".ssm_out", "token_embd", "output")) and n_dims == 2: + data = data.astype(np.float16) + + print(f"{new_name}, n_dims = {n_dims}, {old_dtype} --> {data.dtype}") + + self.gguf_writer.add_tensor(new_name, data) + + ###### CONVERSION LOGIC ###### diff --git a/examples/batched-bench/batched-bench.cpp b/examples/batched-bench/batched-bench.cpp index 19aff18ae..dff6c68ec 100644 --- a/examples/batched-bench/batched-bench.cpp +++ b/examples/batched-bench/batched-bench.cpp @@ -105,6 +105,9 @@ int main(int argc, char ** argv) { ctx_params.n_threads = params.n_threads; ctx_params.n_threads_batch = params.n_threads_batch == -1 ? params.n_threads : params.n_threads_batch; + // ensure enough sequences are available + ctx_params.n_parallel = *std::max_element(n_pl.begin(), n_pl.end()); + llama_context * ctx = llama_new_context_with_model(model, ctx_params); if (ctx == NULL) { @@ -174,10 +177,10 @@ int main(int argc, char ** argv) { llama_batch_clear(batch); - const int n_tokens = is_pp_shared ? pp : pl*pp; - - for (int i = 0; i < n_tokens; ++i) { - llama_batch_add(batch, 0, i, { 0 }, false); + for (int i = 0; i < pp; ++i) { + for (int j = 0; j < (is_pp_shared ? 1 : pl); ++j) { + llama_batch_add(batch, 0, i, { j }, false); + } } batch.logits[batch.n_tokens - 1] = true; @@ -192,7 +195,7 @@ int main(int argc, char ** argv) { if (is_pp_shared) { for (int32_t i = 1; i < pl; ++i) { - llama_kv_cache_seq_cp(ctx, 0, i, 0, pp); + llama_kv_cache_seq_cp(ctx, 0, i, -1, -1); } } diff --git a/examples/batched/batched.cpp b/examples/batched/batched.cpp index 9be7eb56b..dde4d5a06 100644 --- a/examples/batched/batched.cpp +++ b/examples/batched/batched.cpp @@ -80,6 +80,7 @@ int main(int argc, char ** argv) { ctx_params.seed = 1234; ctx_params.n_ctx = n_kv_req; ctx_params.n_batch = std::max(n_len, n_parallel); + ctx_params.n_parallel = n_parallel; ctx_params.n_threads = params.n_threads; ctx_params.n_threads_batch = params.n_threads_batch == -1 ? params.n_threads : params.n_threads_batch; @@ -132,7 +133,7 @@ int main(int argc, char ** argv) { // assign the system KV cache to all parallel sequences // this way, the parallel sequences will "reuse" the prompt tokens without having to copy them for (int32_t i = 1; i < n_parallel; ++i) { - llama_kv_cache_seq_cp(ctx, 0, i, 0, batch.n_tokens); + llama_kv_cache_seq_cp(ctx, 0, i, -1, -1); } if (n_parallel > 1) { diff --git a/examples/parallel/parallel.cpp b/examples/parallel/parallel.cpp index 7d11fcd59..a2ef0fb03 100644 --- a/examples/parallel/parallel.cpp +++ b/examples/parallel/parallel.cpp @@ -107,6 +107,9 @@ int main(int argc, char ** argv) { // number of simultaneous "clients" to simulate const int32_t n_clients = params.n_parallel; + // dedicate one sequence to the system prompt + params.n_parallel += 1; + // requests to simulate const int32_t n_seq = params.n_sequences; @@ -196,8 +199,8 @@ int main(int argc, char ** argv) { } // assign the system KV cache to all parallel sequences - for (int32_t i = 1; i < n_clients; ++i) { - llama_kv_cache_seq_cp(ctx, 0, i, 0, n_tokens_system); + for (int32_t i = 1; i <= n_clients; ++i) { + llama_kv_cache_seq_cp(ctx, 0, i, -1, -1); } LOG_TEE("\n"); @@ -221,15 +224,17 @@ int main(int argc, char ** argv) { client.i_batch = batch.n_tokens; - llama_batch_add(batch, client.sampled, n_tokens_system + client.n_prompt + client.n_decoded, { client.id }, true); + llama_batch_add(batch, client.sampled, n_tokens_system + client.n_prompt + client.n_decoded, { client.id + 1 }, true); client.n_decoded += 1; } if (batch.n_tokens == 0) { // all sequences have ended - clear the entire KV cache - for (int i = 0; i < n_clients; ++i) { - llama_kv_cache_seq_rm(ctx, i, n_tokens_system, -1); + for (int i = 1; i <= n_clients; ++i) { + llama_kv_cache_seq_rm(ctx, i, -1, -1); + // but keep the system prompt + llama_kv_cache_seq_cp(ctx, 0, i, -1, -1); } LOG_TEE("%s: clearing the KV cache\n", __func__); @@ -255,7 +260,7 @@ int main(int argc, char ** argv) { tokens_prompt = ::llama_tokenize(ctx, client.prompt, false); for (size_t i = 0; i < tokens_prompt.size(); ++i) { - llama_batch_add(batch, tokens_prompt[i], i + n_tokens_system, { client.id }, false); + llama_batch_add(batch, tokens_prompt[i], i + n_tokens_system, { client.id + 1 }, false); } // extract the logits only for the last token @@ -366,7 +371,8 @@ int main(int argc, char ** argv) { } // delete only the generated part of the sequence, i.e. keep the system prompt in the cache - llama_kv_cache_seq_rm(ctx, client.id, n_tokens_system, -1); + llama_kv_cache_seq_rm(ctx, client.id + 1, -1, -1); + llama_kv_cache_seq_cp(ctx, 0, client.id + 1, -1, -1); const auto t_main_end = ggml_time_us(); diff --git a/examples/perplexity/perplexity.cpp b/examples/perplexity/perplexity.cpp index 9ec989389..52789ee63 100644 --- a/examples/perplexity/perplexity.cpp +++ b/examples/perplexity/perplexity.cpp @@ -809,7 +809,7 @@ static void hellaswag_score(llama_context * ctx, const gpt_params & params) { const int n_batch = params.n_batch; const int max_tasks_per_batch = 32; - const int max_seq = 4*max_tasks_per_batch; + const int max_seq = std::min(4*max_tasks_per_batch, (int) llama_n_max_seq(ctx)); llama_batch batch = llama_batch_init(n_ctx, 0, max_seq); @@ -1086,7 +1086,7 @@ static void winogrande_score(llama_context * ctx, const gpt_params & params) { const int n_batch = params.n_batch; const int max_tasks_per_batch = 128; - const int max_seq = 2*max_tasks_per_batch; + const int max_seq = std::min(2*max_tasks_per_batch, (int) llama_n_max_seq(ctx)); llama_batch batch = llama_batch_init(n_ctx, 0, max_seq); @@ -1438,7 +1438,7 @@ static void multiple_choice_score(llama_context * ctx, const gpt_params & params const int n_batch = params.n_batch; const int max_tasks_per_batch = 32; - const int max_seq = 4*max_tasks_per_batch; + const int max_seq = std::min(4*max_tasks_per_batch, (int) llama_n_max_seq(ctx)); llama_batch batch = llama_batch_init(n_ctx, 0, max_seq); @@ -1815,6 +1815,9 @@ int main(int argc, char ** argv) { llama_model * model; llama_context * ctx; + // ensure there's at least enough seq_ids for HellaSwag + params.n_parallel = std::max(4, params.n_parallel); + // load the model and apply lora adapter, if any std::tie(model, ctx) = llama_init_from_gpt_params(params); if (model == NULL) { diff --git a/examples/server/server.cpp b/examples/server/server.cpp index 109ff7175..59a59d56b 100644 --- a/examples/server/server.cpp +++ b/examples/server/server.cpp @@ -659,7 +659,11 @@ struct server_context { bool load_model(const gpt_params & params_) { params = params_; + // dedicate one sequence to the system prompt + params.n_parallel += 1; + std::tie(model, ctx) = llama_init_from_gpt_params(params); + params.n_parallel -= 1; // but be sneaky about it if (model == nullptr) { LOG_ERROR("unable to load model", {{"model", params.model}}); return false; @@ -1018,8 +1022,8 @@ struct server_context { } // assign the system KV cache to all parallel sequences - for (int32_t i = 1; i < params.n_parallel; ++i) { - llama_kv_cache_seq_cp(ctx, 0, i, 0, system_tokens.size()); + for (int32_t i = 1; i <= params.n_parallel; ++i) { + llama_kv_cache_seq_cp(ctx, 0, i, -1, -1); } } @@ -1306,7 +1310,7 @@ struct server_context { const int n_embd = llama_n_embd(model); for (int i = 0; i < batch.n_tokens; ++i) { - if (!batch.logits[i] || batch.seq_id[i][0] != slot.id) { + if (!batch.logits[i] || batch.seq_id[i][0] != slot.id + 1) { continue; } @@ -1633,8 +1637,8 @@ struct server_context { {"n_cache_tokens", slot.cache_tokens.size()} }); - llama_kv_cache_seq_rm (ctx, slot.id, n_keep , n_keep + n_discard); - llama_kv_cache_seq_add(ctx, slot.id, n_keep + n_discard, system_tokens.size() + slot.n_past, -n_discard); + llama_kv_cache_seq_rm (ctx, slot.id + 1, n_keep , n_keep + n_discard); + llama_kv_cache_seq_add(ctx, slot.id + 1, n_keep + n_discard, system_tokens.size() + slot.n_past, -n_discard); if (slot.params.cache_prompt) { for (size_t i = n_keep + n_discard; i < slot.cache_tokens.size(); i++) { @@ -1666,7 +1670,7 @@ struct server_context { // TODO: we always have to take into account the "system_tokens" // this is not great and needs to be improved somehow - llama_batch_add(batch, slot.sampled, system_tokens.size() + slot_npast, { slot.id }, true); + llama_batch_add(batch, slot.sampled, system_tokens.size() + slot_npast, { slot.id + 1 }, true); slot.n_past += 1; @@ -1804,9 +1808,6 @@ struct server_context { // reuse any previously computed tokens that are common with the new prompt slot.n_past = common_part(slot.cache_tokens, prompt_tokens); - // remove the non-common part from the cache - slot.cache_tokens.resize(slot.n_past); - // push the prompt into the sampling context (do not apply grammar) for (int i = 0; i < slot.n_past; ++i) { llama_sampling_accept(slot.ctx_sampling, ctx, slot.cache_tokens[i], false); @@ -1837,8 +1838,28 @@ struct server_context { } } - const int p0 = (int) system_tokens.size() + slot.n_past; - llama_kv_cache_seq_rm(ctx, slot.id, p0, -1); + // keep only the common part + int p0 = (int) system_tokens.size() + slot.n_past; + if (!llama_kv_cache_seq_rm(ctx, slot.id + 1, p0, -1)) { + // could not partially delete (likely using a non-Transformer model) + llama_kv_cache_seq_rm(ctx, slot.id + 1, -1, -1); + + p0 = (int) system_tokens.size(); + if (p0 != 0) { + // copy over the system prompt when there is one + llama_kv_cache_seq_cp(ctx, 0, slot.id + 1, -1, -1); + } + + // there is no common part left (except for the system prompt) + slot.n_past = 0; + slot.n_past_se = 0; + slot.ga_i = 0; + // TODO: is the system prompt ever in the sampling context? + llama_sampling_reset(slot.ctx_sampling); + } + + // remove the non-common part from the cache + slot.cache_tokens.resize(slot.n_past); LOG_INFO("kv cache rm [p0, end)", { { "id_slot", slot.id }, @@ -1863,7 +1884,7 @@ struct server_context { } } - llama_batch_add(batch, prompt_tokens[slot.n_past], system_tokens.size() + slot_npast, { slot.id }, false); + llama_batch_add(batch, prompt_tokens[slot.n_past], system_tokens.size() + slot_npast, { slot.id + 1 }, false); if (slot.params.cache_prompt) { slot.cache_tokens.push_back(prompt_tokens[slot.n_past]); @@ -1937,9 +1958,9 @@ struct server_context { LOG_TEE("div: [%6d, %6d] / %6d -> [%6d, %6d]\n", slot.ga_i + ib * bd, slot.ga_i + ib * bd + slot.ga_w, slot.ga_n, (slot.ga_i + ib * bd) / slot.ga_n, (slot.ga_i + ib * bd + slot.ga_w) / slot.ga_n); LOG_TEE("shift: [%6d, %6d] + %6d -> [%6d, %6d]\n", slot.ga_i + ib * bd + slot.ga_w, slot.n_past_se + ib * bd, dd, slot.ga_i + ib * bd + slot.ga_w + dd, slot.n_past_se + ib * bd + dd); - llama_kv_cache_seq_add(ctx, slot.id, slot.ga_i, slot.n_past_se, ib * bd); - llama_kv_cache_seq_div(ctx, slot.id, slot.ga_i + ib * bd, slot.ga_i + ib * bd + slot.ga_w, slot.ga_n); - llama_kv_cache_seq_add(ctx, slot.id, slot.ga_i + ib * bd + slot.ga_w, slot.n_past_se + ib * bd, dd); + llama_kv_cache_seq_add(ctx, slot.id + 1, slot.ga_i, slot.n_past_se, ib * bd); + llama_kv_cache_seq_div(ctx, slot.id + 1, slot.ga_i + ib * bd, slot.ga_i + ib * bd + slot.ga_w, slot.ga_n); + llama_kv_cache_seq_add(ctx, slot.id + 1, slot.ga_i + ib * bd + slot.ga_w, slot.n_past_se + ib * bd, dd); slot.n_past_se -= bd; diff --git a/ggml.c b/ggml.c index 92b17ee6e..6eff98ab6 100644 --- a/ggml.c +++ b/ggml.c @@ -1841,6 +1841,8 @@ static const char * GGML_OP_NAME[GGML_OP_COUNT] = { "FLASH_ATTN", "FLASH_FF", "FLASH_ATTN_BACK", + "SSM_CONV", + "SSM_SCAN", "WIN_PART", "WIN_UNPART", "GET_REL_POS", @@ -1863,7 +1865,7 @@ static const char * GGML_OP_NAME[GGML_OP_COUNT] = { "CROSS_ENTROPY_LOSS_BACK", }; -static_assert(GGML_OP_COUNT == 74, "GGML_OP_COUNT != 74"); +static_assert(GGML_OP_COUNT == 76, "GGML_OP_COUNT != 76"); static const char * GGML_OP_SYMBOL[GGML_OP_COUNT] = { "none", @@ -1929,6 +1931,8 @@ static const char * GGML_OP_SYMBOL[GGML_OP_COUNT] = { "flash_attn(x)", "flash_ff(x)", "flash_attn_back(x)", + "ssm_conv(x)", + "ssm_scan(x)", "win_part(x)", "win_unpart(x)", "get_rel_pos(x)", @@ -1951,7 +1955,7 @@ static const char * GGML_OP_SYMBOL[GGML_OP_COUNT] = { "cross_entropy_loss_back(x,y)", }; -static_assert(GGML_OP_COUNT == 74, "GGML_OP_COUNT != 74"); +static_assert(GGML_OP_COUNT == 76, "GGML_OP_COUNT != 76"); static_assert(GGML_OP_POOL_COUNT == 2, "GGML_OP_POOL_COUNT != 2"); @@ -6154,6 +6158,108 @@ struct ggml_tensor * ggml_flash_attn_back( return result; } +// ggml_ssm_conv + +struct ggml_tensor * ggml_ssm_conv( + struct ggml_context * ctx, + struct ggml_tensor * s, + struct ggml_tensor * x, + struct ggml_tensor * c, + struct ggml_tensor * sq) { + GGML_ASSERT(ggml_is_3d(s)); + GGML_ASSERT(ggml_is_matrix(x)); + GGML_ASSERT(ggml_is_matrix(c)); + GGML_ASSERT(ggml_is_matrix(sq)); + GGML_ASSERT(sq->type == GGML_TYPE_I32); + + const int64_t d_conv = c->ne[0]; + const int64_t d_inner = c->ne[1]; + const int64_t n_tokens = x->ne[1]; + const int64_t n_kv = s->ne[2]; + + GGML_ASSERT( s->ne[0] == d_conv - 1); + GGML_ASSERT( s->ne[1] == d_inner); + GGML_ASSERT( x->ne[0] == d_inner); + GGML_ASSERT(sq->ne[0] == n_kv); + GGML_ASSERT(sq->ne[1] == n_tokens); + + bool is_node = false; + + if (s->grad || x->grad || c->grad || sq->grad) { + GGML_ASSERT(false); // TODO: implement + is_node = true; + } + + // 2-in-1 concatenated x and conv_states, {d_inner, n_tokens} with {d_conv, d_inner, n_kv} + struct ggml_tensor * result = ggml_new_tensor_1d(ctx, GGML_TYPE_F32, (d_inner*n_tokens) + (d_conv*d_inner*n_kv)); + + result->op = GGML_OP_SSM_CONV; + result->grad = is_node ? ggml_dup_tensor(ctx, result) : NULL; + result->src[0] = s; + result->src[1] = x; + result->src[2] = c; + result->src[3] = sq; + + return result; +} + +// ggml_ssm_scan + +struct ggml_tensor * ggml_ssm_scan( + struct ggml_context * ctx, + struct ggml_tensor * s, + struct ggml_tensor * x, + struct ggml_tensor * dt, + struct ggml_tensor * A, + struct ggml_tensor * B, + struct ggml_tensor * C, + struct ggml_tensor * sq) { + GGML_ASSERT(ggml_is_contiguous(s)); + GGML_ASSERT(ggml_is_contiguous(x)); + GGML_ASSERT(ggml_is_contiguous(dt)); + GGML_ASSERT(ggml_is_contiguous(A)); + GGML_ASSERT(sq->type == GGML_TYPE_I32); + GGML_ASSERT(B->nb[0] == ggml_type_size(B->type)); + GGML_ASSERT(C->nb[0] == ggml_type_size(C->type)); + GGML_ASSERT(ggml_are_same_shape(x, dt)); + + { + const int64_t d_state = s->ne[0]; + const int64_t d_inner = s->ne[1]; + const int64_t n_tokens = x->ne[1]; + + GGML_ASSERT(x->ne[0] == d_inner); + GGML_ASSERT(A->ne[0] == d_state); + GGML_ASSERT(A->ne[1] == d_inner); + GGML_ASSERT(B->ne[0] == d_state); + GGML_ASSERT(B->ne[1] == n_tokens); + GGML_ASSERT(C->ne[0] == d_state); + GGML_ASSERT(C->ne[1] == n_tokens); + } + + bool is_node = false; + + if (s->grad || x->grad || dt->grad || A->grad || B->grad || C->grad || sq->grad) { + GGML_ASSERT(false); // TODO: implement + is_node = true; + } + + // 2-in-1 concatenated y and ssm_states, {d_inner, n_tokens} with {d_state, d_inner, n_kv} + struct ggml_tensor * result = ggml_new_tensor_1d(ctx, GGML_TYPE_F32, ggml_nelements(x) + ggml_nelements(s)); + + result->op = GGML_OP_SSM_SCAN; + result->grad = is_node ? ggml_dup_tensor(ctx, result) : NULL; + result->src[0] = s; + result->src[1] = x; + result->src[2] = dt; + result->src[3] = A; + result->src[4] = B; + result->src[5] = C; + result->src[6] = sq; + + return result; +} + // ggml_win_part struct ggml_tensor * ggml_win_part( @@ -14771,6 +14877,257 @@ static void ggml_compute_forward_flash_attn_back( } } +// ggml_compute_forward_ssm_conv + +static void ggml_compute_forward_ssm_conv_f32( + const struct ggml_compute_params * params, + struct ggml_tensor * dst) { + if (params->type == GGML_TASK_TYPE_INIT || params->type == GGML_TASK_TYPE_FINALIZE) { + return; + } + + const struct ggml_tensor * src0 = dst->src[0]; // conv_state + const struct ggml_tensor * src1 = dst->src[1]; // x + const struct ggml_tensor * src2 = dst->src[2]; // conv1d.weight + const struct ggml_tensor * src3 = dst->src[3]; // state_seq + + const int ith = params->ith; + const int nth = params->nth; + + const int nc = src2->ne[0]; // d_conv + const int nr = src0->ne[1]; // d_inner + const int n_t = src1->ne[1]; // n_tokens + const int n_kv = src0->ne[2]; // max number of sequences in the batch + + GGML_ASSERT((nr*n_t) + (nc*nr*n_kv) == ggml_nelements(dst)); + GGML_ASSERT(src0->nb[0] == sizeof(float)); + GGML_ASSERT(src1->nb[0] == sizeof(float)); + GGML_ASSERT(src2->nb[0] == sizeof(float)); + GGML_ASSERT(src3->nb[0] == sizeof(int32_t)); + GGML_ASSERT(src0->nb[1] == src0->ne[0]*sizeof(float)); + // for use with the destination state offset between sequences + GGML_ASSERT(src2->nb[2] == src2->ne[1]*src2->ne[0]*sizeof(float)); + + // rows per thread + const int dr = (nr + nth - 1)/nth; + + // row range for this thread + const int ir0 = dr*ith; + const int ir1 = MIN(ir0 + dr, nr); + const int ir = ir1 - ir0; + + if (n_kv > 1) { + // multiple sequences means it's hard to know when it's the first time a state is read, + // so copy them all over to the destination, just to be sure. + for (int i3 = 0; i3 < n_kv; ++i3) { + float * s0 = (float *) ((char *) src0->data + ir0*(src0->nb[1]) + i3*(src0->nb[2])); + float * s = (float *) ((char *) dst->data + ir0*(src2->nb[1]) + i3*(src2->nb[2]) + nr*n_t*sizeof(float)); + // can't use memcpy because of d_conv vs d_conv - 1 + for (int i1 = 0; i1 < ir; ++i1) { + for (int i0 = 0; i0 < nc - 1; ++i0) { + // copy s0 to last (d_conv - 1) columns of s + s[1 + i0 + i1*nc] = s0[i0 + i1*(nc - 1)]; + } + } + } + } + + for (int i2 = 0; i2 < n_t; ++i2) { + int32_t * sq = (int32_t *) ((char *) src3->data + i2*(src3->nb[1])); // {n_kv, n_tokens} + float * x = (float *) ((char *) dst->data + ir0*sizeof(float) + i2*(nr*sizeof(float))); // {d_inner, n_tokens} + float * s = (float *) ((char *) dst->data + ir0*(src2->nb[1]) + sq[0]*(src2->nb[2]) + nr*n_t*sizeof(float)); // {d_conv, d_inner, n_kv} + float * s0; // {d_conv - 1, d_inner, n_kv} + float * x0 = (float *) ((char *) src1->data + ir0*(src1->nb[0]) + i2*(src1->nb[1])); // {d_inner, n_tokens} + float * c = (float *) ((char *) src2->data + ir0*(src2->nb[1])); // {d_conv, d_inner} + int ne0s0; + + GGML_ASSERT(0 <= sq[0] && sq[0] < n_kv); + + // avoid needing to copy the state for the first token + if (i2 == 0) { + s0 = (float *) ((char *) src0->data + ir0*(src0->nb[1]) + sq[0]*(src0->nb[2])); // {d_conv - 1, d_inner, n_kv} + ne0s0 = src0->ne[0]; + } else { + // the source is the last (d_conv - 1) columns of the destination + s0 = s + 1; + ne0s0 = nc; + } + + // d_inner + for (int i1 = 0; i1 < ir; ++i1) { + // shift state left + for (int i0 = 0; i0 < nc - 1; ++i0) { + s[i0 + i1*nc] = s0[i0 + i1*ne0s0]; + } + // insert x on the last column + s[(nc - 1) + i1*nc] = x0[i1]; + } + + // handle copies when there are multiple output states + for (int i3 = 1; i3 < n_kv; ++i3) { + int32_t seq = sq[i3]; + if (0 <= seq && seq < n_kv) { + float * s1 = s + (seq - sq[0])*nc*nr; + memcpy(s1, s, nc*ir*sizeof(float)); + } else { + // stop at negative or too big seq_ids + break; + } + } + + // it seems a little faster when this is separate from the state shift + for (int i1 = 0; i1 < ir; ++i1) { + // rowwise dot product + float sumf = 0.0f; + for (int i0 = 0; i0 < nc; ++i0) { + int i = i0 + i1*nc; + sumf += s[i] * c[i]; + } + x[i1] = sumf; + } + } +} + +static void ggml_compute_forward_ssm_conv( + const struct ggml_compute_params * params, + struct ggml_tensor * dst) { + switch (dst->src[0]->type) { + case GGML_TYPE_F32: + { + ggml_compute_forward_ssm_conv_f32(params, dst); + } break; + default: + { + GGML_ASSERT(false); + } break; + } +} + +// ggml_compute_forward_ssm_scan + +static void ggml_compute_forward_ssm_scan_f32( + const struct ggml_compute_params * params, + struct ggml_tensor * dst) { + if (params->type == GGML_TASK_TYPE_INIT || params->type == GGML_TASK_TYPE_FINALIZE) { + return; + } + + const struct ggml_tensor * src0 = dst->src[0]; // s + const struct ggml_tensor * src1 = dst->src[1]; // x + const struct ggml_tensor * src2 = dst->src[2]; // dt + const struct ggml_tensor * src3 = dst->src[3]; // A + const struct ggml_tensor * src4 = dst->src[4]; // B + const struct ggml_tensor * src5 = dst->src[5]; // C + const struct ggml_tensor * src6 = dst->src[6]; // sq + + const int ith = params->ith; + const int nth = params->nth; + + const int64_t nc = src0->ne[0]; // d_state + const int64_t nr = src0->ne[1]; // d_inner + const int64_t n_t = src1->ne[1]; // number of tokens in the batch + const int64_t n_kv = src0->ne[2]; // max number of sequences in the batch + + GGML_ASSERT(ggml_nelements(src1) + ggml_nelements(src0) == ggml_nelements(dst)); + GGML_ASSERT(src0->nb[0] == sizeof(float)); + GGML_ASSERT(src1->nb[0] == sizeof(float)); + GGML_ASSERT(src2->nb[0] == sizeof(float)); + GGML_ASSERT(src3->nb[0] == sizeof(float)); + GGML_ASSERT(src4->nb[0] == sizeof(float)); + GGML_ASSERT(src5->nb[0] == sizeof(float)); + // required for the dot product between s and C, and when copying the states + GGML_ASSERT(src0->nb[1] == src0->ne[0]*sizeof(float)); + // required for per-sequence offsets for states + GGML_ASSERT(src0->nb[2] == src0->ne[0]*src0->ne[1]*sizeof(float)); + // required to get correct offset for state destination (i.e. src1->nb[2]) + GGML_ASSERT(src1->nb[2] == src1->ne[0]*src1->ne[1]*sizeof(float)); + + // rows per thread + const int dr = (nr + nth - 1)/nth; + + // row range for this thread + const int ir0 = dr*ith; + const int ir1 = MIN(ir0 + dr, nr); + const int ir = ir1 - ir0; + + if (n_kv > 1) { + // it's hard to know if the source states have already been copied + // when there are multiple, so copy them already. + for (int i3 = 0; i3 < n_kv; ++i3) { + float * s0 = (float *) ((char *) src0->data + ir0*(src0->nb[1]) + i3*(src0->nb[2])); + float * s = (float *) ((char *) dst->data + ir0*(src0->nb[1]) + i3*(src0->nb[2]) + src1->nb[2]); + memcpy(s, s0, nc*ir*sizeof(float)); + } + } + + for (int i2 = 0; i2 < n_t; ++i2) { + int32_t * sq = (int32_t *) ((char *) src6->data + i2*(src6->nb[1])); // {n_kv, n_tokens} + float * y = (float *) ((char *) dst->data + ir0*(src1->nb[0]) + i2*(src1->nb[1])); // {d_inner, n_tokens} + float * s = (float *) ((char *) dst->data + ir0*(src0->nb[1]) + sq[0]*(src0->nb[2]) + src1->nb[2]); // {d_state, d_inner, n_kv} + float * s0; + float * x = (float *) ((char *) src1->data + ir0*(src1->nb[0]) + i2*(src1->nb[1])); // {d_inner, n_tokens} + float * dt = (float *) ((char *) src2->data + ir0*(src2->nb[0]) + i2*(src2->nb[1])); // {d_inner, n_tokens} + float * A = (float *) ((char *) src3->data + ir0*(src3->nb[1])); // {d_state, d_inner} + float * B = (float *) ((char *) src4->data + i2*(src4->nb[1])); // {d_state, n_tokens} + float * C = (float *) ((char *) src5->data + i2*(src5->nb[1])); // {d_state, n_tokens} + + GGML_ASSERT(0 <= sq[0] && sq[0] < n_kv); + + // avoid needing to copy the state for the first token + if (i2 == 0) { + s0 = (float *) ((char *) src0->data + ir0*(src0->nb[1]) + sq[0]*(src0->nb[2])); // {d_state, d_inner, n_kv} + } else { + // otherwise the source is the same as the destination + s0 = s; + } + + // d_inner + for (int i1 = 0; i1 < ir; ++i1) { + // ref: https://github.com/state-spaces/mamba/blob/34076d664838588a3c97727b263478ab9f621a07/mamba_ssm/ops/triton/selective_state_update.py#L78 + float dt_soft_plus = dt[i1] <= 20.0f ? log1pf(expf(dt[i1])) : dt[i1]; + float x_dt = x[i1] * dt_soft_plus; + float sumf = 0.0f; + // d_state + for (int i0 = 0; i0 < nc; ++i0) { + int i = i0 + i1*nc; + // state = prev_state * dA + dB * x + float state = (s0[i] * expf(dt_soft_plus * A[i])) + (B[i0] * x_dt); + // y = rowwise_dotprod(state, C) + sumf += state * C[i0]; + s[i] = state; + } + y[i1] = sumf; + } + + // handle copies when there are multiple output states + for (int i3 = 1; i3 < n_kv; ++i3) { + int32_t seq = sq[i3]; + if (0 <= seq && seq < n_kv) { + float * s1 = s + (seq - sq[0])*nc*nr; + memcpy(s1, s, nc*ir*sizeof(float)); + } else { + // stop at negative or too big seq_ids + break; + } + } + } +} + +static void ggml_compute_forward_ssm_scan( + const struct ggml_compute_params * params, + struct ggml_tensor * dst) { + switch (dst->src[0]->type) { + case GGML_TYPE_F32: + { + ggml_compute_forward_ssm_scan_f32(params, dst); + } break; + default: + { + GGML_ASSERT(false); + } break; + } +} + // ggml_compute_forward_win_part static void ggml_compute_forward_win_part_f32( @@ -15830,6 +16187,14 @@ static void ggml_compute_forward(struct ggml_compute_params * params, struct ggm bool masked = t != 0; ggml_compute_forward_flash_attn_back(params, masked, tensor); } break; + case GGML_OP_SSM_CONV: + { + ggml_compute_forward_ssm_conv(params, tensor); + } break; + case GGML_OP_SSM_SCAN: + { + ggml_compute_forward_ssm_scan(params, tensor); + } break; case GGML_OP_WIN_PART: { ggml_compute_forward_win_part(params, tensor); @@ -16884,6 +17249,11 @@ static void ggml_compute_backward(struct ggml_context * ctx, struct ggml_tensor { GGML_ASSERT(false); // not supported } break; + case GGML_OP_SSM_CONV: + case GGML_OP_SSM_SCAN: + { + GGML_ASSERT(false); // TODO: not implemented + } break; case GGML_OP_WIN_PART: case GGML_OP_WIN_UNPART: case GGML_OP_UNARY: @@ -17590,6 +17960,11 @@ static int ggml_get_n_tasks(struct ggml_tensor * node, int n_threads) { { n_tasks = n_threads; } break; + case GGML_OP_SSM_CONV: + case GGML_OP_SSM_SCAN: + { + n_tasks = n_threads; + } break; case GGML_OP_WIN_PART: case GGML_OP_WIN_UNPART: case GGML_OP_GET_REL_POS: diff --git a/ggml.h b/ggml.h index 0ea4f8847..a13b0cec4 100644 --- a/ggml.h +++ b/ggml.h @@ -472,6 +472,8 @@ extern "C" { GGML_OP_FLASH_ATTN, GGML_OP_FLASH_FF, GGML_OP_FLASH_ATTN_BACK, + GGML_OP_SSM_CONV, + GGML_OP_SSM_SCAN, GGML_OP_WIN_PART, GGML_OP_WIN_UNPART, GGML_OP_GET_REL_POS, @@ -1728,6 +1730,23 @@ extern "C" { struct ggml_tensor * c0, struct ggml_tensor * c1); + GGML_API struct ggml_tensor * ggml_ssm_conv( + struct ggml_context * ctx, + struct ggml_tensor * s, + struct ggml_tensor * x, + struct ggml_tensor * c, + struct ggml_tensor * sq); + + GGML_API struct ggml_tensor * ggml_ssm_scan( + struct ggml_context * ctx, + struct ggml_tensor * s, + struct ggml_tensor * x, + struct ggml_tensor * dt, + struct ggml_tensor * A, + struct ggml_tensor * B, + struct ggml_tensor * C, + struct ggml_tensor * sq); + // partition into non-overlapping windows with padding if needed // example: // a: 768 64 64 1 diff --git a/gguf-py/gguf/constants.py b/gguf-py/gguf/constants.py index a62139811..b23badb10 100644 --- a/gguf-py/gguf/constants.py +++ b/gguf-py/gguf/constants.py @@ -61,6 +61,12 @@ class Keys: SCALING_ORIG_CTX_LEN = "{arch}.rope.scaling.original_context_length" SCALING_FINETUNED = "{arch}.rope.scaling.finetuned" + class SSM: + CONV_KERNEL = "{arch}.ssm.conv_kernel" + INNER_SIZE = "{arch}.ssm.inner_size" + STATE_SIZE = "{arch}.ssm.state_size" + TIME_STEP_RANK = "{arch}.ssm.time_step_rank" + class Tokenizer: MODEL = "tokenizer.ggml.model" LIST = "tokenizer.ggml.tokens" @@ -113,6 +119,7 @@ class MODEL_ARCH(IntEnum): MINICPM = auto() GEMMA = auto() STARCODER2 = auto() + MAMBA = auto() class MODEL_TENSOR(IntEnum): @@ -144,6 +151,13 @@ class MODEL_TENSOR(IntEnum): ATTN_Q_NORM = auto() ATTN_K_NORM = auto() LAYER_OUT_NORM = auto() + SSM_IN = auto() + SSM_CONV1D = auto() + SSM_X = auto() + SSM_DT = auto() + SSM_A = auto() + SSM_D = auto() + SSM_OUT = auto() MODEL_ARCH_NAMES: dict[MODEL_ARCH, str] = { @@ -171,6 +185,7 @@ MODEL_ARCH_NAMES: dict[MODEL_ARCH, str] = { MODEL_ARCH.MINICPM: "minicpm", MODEL_ARCH.GEMMA: "gemma", MODEL_ARCH.STARCODER2: "starcoder2", + MODEL_ARCH.MAMBA: "mamba", } TENSOR_NAMES: dict[MODEL_TENSOR, str] = { @@ -202,6 +217,13 @@ TENSOR_NAMES: dict[MODEL_TENSOR, str] = { MODEL_TENSOR.FFN_DOWN_EXP: "blk.{bid}.ffn_down.{xid}", MODEL_TENSOR.FFN_UP_EXP: "blk.{bid}.ffn_up.{xid}", MODEL_TENSOR.LAYER_OUT_NORM: "blk.{bid}.layer_output_norm", + MODEL_TENSOR.SSM_IN: "blk.{bid}.ssm_in", + MODEL_TENSOR.SSM_CONV1D: "blk.{bid}.ssm_conv1d", + MODEL_TENSOR.SSM_X: "blk.{bid}.ssm_x", + MODEL_TENSOR.SSM_DT: "blk.{bid}.ssm_dt", + MODEL_TENSOR.SSM_A: "blk.{bid}.ssm_a", + MODEL_TENSOR.SSM_D: "blk.{bid}.ssm_d", + MODEL_TENSOR.SSM_OUT: "blk.{bid}.ssm_out", } MODEL_TENSORS: dict[MODEL_ARCH, list[MODEL_TENSOR]] = { @@ -543,6 +565,19 @@ MODEL_TENSORS: dict[MODEL_ARCH, list[MODEL_TENSOR]] = { MODEL_TENSOR.FFN_DOWN, MODEL_TENSOR.FFN_UP, ], + MODEL_ARCH.MAMBA: [ + MODEL_TENSOR.TOKEN_EMBD, + MODEL_TENSOR.OUTPUT_NORM, + MODEL_TENSOR.OUTPUT, + MODEL_TENSOR.ATTN_NORM, + MODEL_TENSOR.SSM_IN, + MODEL_TENSOR.SSM_CONV1D, + MODEL_TENSOR.SSM_X, + MODEL_TENSOR.SSM_DT, + MODEL_TENSOR.SSM_A, + MODEL_TENSOR.SSM_D, + MODEL_TENSOR.SSM_OUT, + ], # TODO } @@ -734,6 +769,12 @@ KEY_ROPE_SCALING_FACTOR = Keys.Rope.SCALING_FACTOR KEY_ROPE_SCALING_ORIG_CTX_LEN = Keys.Rope.SCALING_ORIG_CTX_LEN KEY_ROPE_SCALING_FINETUNED = Keys.Rope.SCALING_FINETUNED +# SSM +KEY_SSM_CONV_KERNEL = Keys.SSM.CONV_KERNEL +KEY_SSM_INNER_SIZE = Keys.SSM.INNER_SIZE +KEY_SSM_STATE_SIZE = Keys.SSM.STATE_SIZE +KEY_SSM_TIME_STEP_RANK = Keys.SSM.TIME_STEP_RANK + # tokenization KEY_TOKENIZER_MODEL = Keys.Tokenizer.MODEL KEY_TOKENIZER_LIST = Keys.Tokenizer.LIST diff --git a/gguf-py/gguf/gguf_writer.py b/gguf-py/gguf/gguf_writer.py index 801160832..e49c5db68 100644 --- a/gguf-py/gguf/gguf_writer.py +++ b/gguf-py/gguf/gguf_writer.py @@ -382,6 +382,18 @@ class GGUFWriter: def add_rope_scaling_finetuned(self, value: bool) -> None: self.add_bool(Keys.Rope.SCALING_FINETUNED.format(arch=self.arch), value) + def add_ssm_conv_kernel(self, value: int) -> None: + self.add_uint32(Keys.SSM.CONV_KERNEL.format(arch=self.arch), value) + + def add_ssm_inner_size(self, value: int) -> None: + self.add_uint32(Keys.SSM.INNER_SIZE.format(arch=self.arch), value) + + def add_ssm_state_size(self, value: int) -> None: + self.add_uint32(Keys.SSM.STATE_SIZE.format(arch=self.arch), value) + + def add_ssm_time_step_rank(self, value: int) -> None: + self.add_uint32(Keys.SSM.TIME_STEP_RANK.format(arch=self.arch), value) + def add_tokenizer_model(self, model: str) -> None: self.add_string(Keys.Tokenizer.MODEL, model) diff --git a/gguf-py/gguf/tensor_mapping.py b/gguf-py/gguf/tensor_mapping.py index db2ec9704..ed89955d8 100644 --- a/gguf-py/gguf/tensor_mapping.py +++ b/gguf-py/gguf/tensor_mapping.py @@ -20,6 +20,9 @@ class TensorNameMap: "wte", # gpt2 "transformer.embd.wte", # phi2 "model.tok_embeddings", # internlm2 + "model.embedding", # mamba-qbert + "backbone.embedding", # mamba + "backbone.embeddings", # mamba-hf ), # Token type embeddings @@ -44,7 +47,7 @@ class TensorNameMap: # Output MODEL_TENSOR.OUTPUT: ( "embed_out", # gptneox - "lm_head", # gpt2 mpt falcon llama-hf baichuan qwen + "lm_head", # gpt2 mpt falcon llama-hf baichuan qwen mamba "output", # llama-pth bloom internlm2 "word_embeddings_for_head", # persimmon "lm_head.linear", # phi2 @@ -61,6 +64,8 @@ class TensorNameMap: "language_model.encoder.final_layernorm", # persimmon "model.final_layernorm", # persimmon "lm_head.ln", # phi2 + "model.norm_f", # mamba-qbert + "backbone.norm_f", # mamba ), # Rope frequencies @@ -86,6 +91,8 @@ class TensorNameMap: "transformer.h.{bid}.ln", # phi2 "model.layers.layers.{bid}.norm", # plamo "model.layers.{bid}.attention_norm", # internlm2 + "model.layers.{bid}.norm", # mamba-qbert + "backbone.layers.{bid}.norm", # mamba ), # Attention norm 2 @@ -282,7 +289,42 @@ class TensorNameMap: MODEL_TENSOR.LAYER_OUT_NORM: ( "encoder.layer.{bid}.output.LayerNorm", # bert "encoder.layers.{bid}.norm2", # nomic-bert - ) + ), + + MODEL_TENSOR.SSM_IN: ( + "model.layers.{bid}.in_proj", + "backbone.layers.{bid}.mixer.in_proj", + ), + + MODEL_TENSOR.SSM_CONV1D: ( + "model.layers.{bid}.conv1d", + "backbone.layers.{bid}.mixer.conv1d", + ), + + MODEL_TENSOR.SSM_X: ( + "model.layers.{bid}.x_proj", + "backbone.layers.{bid}.mixer.x_proj", + ), + + MODEL_TENSOR.SSM_DT: ( + "model.layers.{bid}.dt_proj", + "backbone.layers.{bid}.mixer.dt_proj", + ), + + MODEL_TENSOR.SSM_A: ( + "model.layers.{bid}.A_log", + "backbone.layers.{bid}.mixer.A_log", + ), + + MODEL_TENSOR.SSM_D: ( + "model.layers.{bid}.D", + "backbone.layers.{bid}.mixer.D", + ), + + MODEL_TENSOR.SSM_OUT: ( + "model.layers.{bid}.out_proj", + "backbone.layers.{bid}.mixer.out_proj", + ), } mapping: dict[str, tuple[MODEL_TENSOR, str]] diff --git a/llama.cpp b/llama.cpp index 4a20b7928..8c147a42b 100644 --- a/llama.cpp +++ b/llama.cpp @@ -213,6 +213,7 @@ enum llm_arch { LLM_ARCH_MINICPM, LLM_ARCH_GEMMA, LLM_ARCH_STARCODER2, + LLM_ARCH_MAMBA, LLM_ARCH_UNKNOWN, }; @@ -241,6 +242,7 @@ static const std::map LLM_ARCH_NAMES = { { LLM_ARCH_MINICPM, "minicpm" }, { LLM_ARCH_GEMMA, "gemma" }, { LLM_ARCH_STARCODER2, "starcoder2" }, + { LLM_ARCH_MAMBA, "mamba" }, { LLM_ARCH_UNKNOWN, "(unknown)" }, }; @@ -284,6 +286,11 @@ enum llm_kv { LLM_KV_ROPE_SCALING_ORIG_CTX_LEN, LLM_KV_ROPE_SCALING_FINETUNED, + LLM_KV_SSM_INNER_SIZE, + LLM_KV_SSM_CONV_KERNEL, + LLM_KV_SSM_STATE_SIZE, + LLM_KV_SSM_TIME_STEP_RANK, + LLM_KV_TOKENIZER_MODEL, LLM_KV_TOKENIZER_LIST, LLM_KV_TOKENIZER_TOKEN_TYPE, @@ -342,6 +349,11 @@ static const std::map LLM_KV_NAMES = { { LLM_KV_ROPE_SCALING_ORIG_CTX_LEN, "%s.rope.scaling.original_context_length" }, { LLM_KV_ROPE_SCALING_FINETUNED, "%s.rope.scaling.finetuned" }, + { LLM_KV_SSM_CONV_KERNEL, "%s.ssm.conv_kernel" }, + { LLM_KV_SSM_INNER_SIZE, "%s.ssm.inner_size" }, + { LLM_KV_SSM_STATE_SIZE, "%s.ssm.state_size" }, + { LLM_KV_SSM_TIME_STEP_RANK, "%s.ssm.time_step_rank" }, + { LLM_KV_TOKENIZER_MODEL, "tokenizer.ggml.model" }, { LLM_KV_TOKENIZER_LIST, "tokenizer.ggml.tokens" }, { LLM_KV_TOKENIZER_TOKEN_TYPE, "tokenizer.ggml.token_type" }, @@ -399,6 +411,13 @@ enum llm_tensor { LLM_TENSOR_ATTN_Q_NORM, LLM_TENSOR_ATTN_K_NORM, LLM_TENSOR_LAYER_OUT_NORM, + LLM_TENSOR_SSM_IN, + LLM_TENSOR_SSM_CONV1D, + LLM_TENSOR_SSM_X, + LLM_TENSOR_SSM_DT, + LLM_TENSOR_SSM_A, + LLM_TENSOR_SSM_D, + LLM_TENSOR_SSM_OUT, }; static const std::map> LLM_TENSOR_NAMES = { @@ -801,6 +820,22 @@ static const std::map> LLM_TENSOR_NA { LLM_TENSOR_FFN_UP, "blk.%d.ffn_up" }, }, }, + { + LLM_ARCH_MAMBA, + { + { LLM_TENSOR_TOKEN_EMBD, "token_embd" }, + { LLM_TENSOR_OUTPUT_NORM, "output_norm" }, + { LLM_TENSOR_OUTPUT, "output" }, + { LLM_TENSOR_ATTN_NORM, "blk.%d.attn_norm" }, + { LLM_TENSOR_SSM_IN, "blk.%d.ssm_in" }, + { LLM_TENSOR_SSM_CONV1D, "blk.%d.ssm_conv1d" }, + { LLM_TENSOR_SSM_X, "blk.%d.ssm_x" }, + { LLM_TENSOR_SSM_DT, "blk.%d.ssm_dt" }, + { LLM_TENSOR_SSM_A, "blk.%d.ssm_a" }, + { LLM_TENSOR_SSM_D, "blk.%d.ssm_d" }, + { LLM_TENSOR_SSM_OUT, "blk.%d.ssm_out" }, + }, + }, { LLM_ARCH_UNKNOWN, { @@ -1613,6 +1648,12 @@ struct llama_hparams { float rope_freq_scale_train; uint32_t n_yarn_orig_ctx; + // for State Space Models + uint32_t ssm_d_conv = 0; + uint32_t ssm_d_inner = 0; + uint32_t ssm_d_state = 0; + uint32_t ssm_dt_rank = 0; + float f_clamp_kqv = 0.0f; float f_max_alibi_bias = 0.0f; @@ -1641,6 +1682,11 @@ struct llama_hparams { if (this->rope_finetuned != other.rope_finetuned) return true; if (this->n_yarn_orig_ctx != other.n_yarn_orig_ctx) return true; + if (this->ssm_d_conv != other.ssm_d_conv) return true; + if (this->ssm_d_inner != other.ssm_d_inner) return true; + if (this->ssm_d_state != other.ssm_d_state) return true; + if (this->ssm_dt_rank != other.ssm_dt_rank) return true; + const float EPSILON = 1e-9f; if (!is_float_close(this->f_norm_eps, other.f_norm_eps, EPSILON)) return true; @@ -1652,6 +1698,9 @@ struct llama_hparams { } uint32_t n_gqa() const { + if (n_head_kv == 0) { + return 0; + } return n_head/n_head_kv; } @@ -1662,6 +1711,18 @@ struct llama_hparams { uint32_t n_embd_v_gqa() const { // dimension of value embeddings across all k-v heads return n_embd_head_v * n_head_kv; } + + uint32_t n_embd_k_s() const { // dimension of the rolling state embeddings + // corresponds to Mamba's conv_states size + // TODO: maybe support other convolution strides than 1 + // NOTE: since the first column of the conv_state is shifted out each time, it's not actually needed + return (ssm_d_conv > 0 ? ssm_d_conv - 1 : 0) * ssm_d_inner; + } + + uint32_t n_embd_v_s() const { // dimension of the recurrent state embeddings + // corresponds to Mamba's ssm_states size + return ssm_d_state * ssm_d_inner; + } }; struct llama_cparams { @@ -1739,11 +1800,27 @@ struct llama_layer { struct ggml_tensor * ffn_down_b; // b2 struct ggml_tensor * ffn_up_b; // b3 struct ggml_tensor * ffn_act; + + // mamba proj + struct ggml_tensor * ssm_in; + struct ggml_tensor * ssm_x; + struct ggml_tensor * ssm_dt; + struct ggml_tensor * ssm_out; + + // mamba + struct ggml_tensor * ssm_conv1d; + struct ggml_tensor * ssm_a; + struct ggml_tensor * ssm_d; + + // mamba bias + struct ggml_tensor * ssm_conv1d_b; + struct ggml_tensor * ssm_dt_b; }; struct llama_kv_cell { llama_pos pos = -1; llama_pos delta = 0; + int32_t src = 0; // used by recurrent state models to copy states std::set seq_id; @@ -1764,6 +1841,9 @@ struct llama_kv_cell { struct llama_kv_cache { bool has_shift = false; bool do_defrag = false; + bool do_copy = false; + // with recurrent state models, a cell can hold the state for more than one past token + bool recurrent = false; // Note: The value of head isn't only used to optimize searching // for a free KV slot. llama_decode_internal also uses it, so it @@ -2003,11 +2083,14 @@ struct llama_context { struct ggml_tensor * inp_tokens; // I32 [n_batch] struct ggml_tensor * inp_embd; // F32 [n_embd, n_batch] struct ggml_tensor * inp_pos; // I32 [n_batch] - struct ggml_tensor * inp_KQ_mask; // F32 [n_ctx, n_batch] - struct ggml_tensor * inp_KQ_pos; // F32 [n_ctx] - struct ggml_tensor * inp_K_shift; // I32 [n_ctx] + struct ggml_tensor * inp_KQ_mask; // F32 [kv_size, n_batch] + struct ggml_tensor * inp_KQ_pos; // F32 [kv_size] + struct ggml_tensor * inp_K_shift; // I32 [kv_size] struct ggml_tensor * inp_mean; // F32 [n_batch, n_batch] struct ggml_tensor * inp_cls; // I32 [n_batch] + struct ggml_tensor * inp_s_copy; // I32 [kv_size] + struct ggml_tensor * inp_s_mask; // F32 [kv_size] + struct ggml_tensor * inp_s_seq; // I32 [kv_size, n_batch] #ifdef GGML_USE_MPI ggml_mpi_context * ctx_mpi = NULL; @@ -2023,25 +2106,42 @@ static bool llama_kv_cache_init( const llama_model & model, ggml_type type_k, ggml_type type_v, - uint32_t n_ctx, + uint32_t kv_size, bool offload) { const struct llama_hparams & hparams = model.hparams; - const uint32_t n_embd_k_gqa = hparams.n_embd_k_gqa(); - const uint32_t n_embd_v_gqa = hparams.n_embd_v_gqa(); + const uint32_t n_embd_k_gqa = hparams.n_embd_k_gqa() + hparams.n_embd_k_s(); + const uint32_t n_embd_v_gqa = hparams.n_embd_v_gqa() + hparams.n_embd_v_s(); const int64_t n_layer = hparams.n_layer; cache.has_shift = false; + // TODO: find a nicer way to add other recurrent model architectures + cache.recurrent = model.arch == LLM_ARCH_MAMBA; + + // TODO: support mixed reccurent Transformer architectues + // NOTE: (!a || b) is a logical implication (a -> b) + GGML_ASSERT(!cache.recurrent || n_embd_k_gqa == hparams.n_embd_k_s()); + GGML_ASSERT(!cache.recurrent || n_embd_v_gqa == hparams.n_embd_v_s()); + GGML_ASSERT( cache.recurrent || n_embd_k_gqa == hparams.n_embd_k_gqa()); + GGML_ASSERT( cache.recurrent || n_embd_v_gqa == hparams.n_embd_v_gqa()); + cache.head = 0; - cache.size = n_ctx; + cache.size = kv_size; cache.used = 0; cache.type_k = type_k; cache.type_v = type_v; cache.cells.clear(); - cache.cells.resize(n_ctx); + cache.cells.resize(kv_size); + + if (cache.recurrent) { + // init state copy sources + for (uint32_t i = 0; i < cache.size; ++i) { + cache.cells[i].src = i; + } + } #ifdef GGML_USE_CLBLAST offload = false; @@ -2080,8 +2180,8 @@ static bool llama_kv_cache_init( for (int i = 0; i < (int) n_layer; i++) { struct ggml_context * ctx = offload ? ctx_map.at(model.buft_layer[i].buft) : cache.ctxs.front(); - ggml_tensor * k = ggml_new_tensor_1d(ctx, type_k, n_embd_k_gqa*n_ctx); - ggml_tensor * v = ggml_new_tensor_1d(ctx, type_v, n_embd_v_gqa*n_ctx); + ggml_tensor * k = ggml_new_tensor_1d(ctx, type_k, n_embd_k_gqa*kv_size); + ggml_tensor * v = ggml_new_tensor_1d(ctx, type_v, n_embd_v_gqa*kv_size); ggml_format_name(k, "cache_k_l%d", i); ggml_format_name(v, "cache_v_l%d", i); cache.k_l.push_back(k); @@ -2115,6 +2215,54 @@ static bool llama_kv_cache_find_slot( const uint32_t n_ctx = cache.size; const uint32_t n_tokens = batch.n_tokens; + if (cache.recurrent) { + // For recurrent state architectures (like Mamba), + // each KV cache cell can store the state for a whole sequence. + + llama_seq_id min = cache.size - 1; + llama_seq_id max = 0; + + for (uint32_t i = 0; i < n_tokens; ++i) { + for (int32_t j = 0; j < batch.n_seq_id[i]; ++j) { + llama_seq_id seq_id = batch.seq_id[i][j]; + // make sure it's a valid seq_id + if ((uint32_t) seq_id < cache.size) { + if (seq_id > max) { + max = seq_id; + } + if (seq_id < min) { + min = seq_id; + } + // Assuming the tokens are in-order + if (batch.pos[i] != cache.cells[seq_id].pos + 1) { + // What should happen when the pos backtracks or skips a value? + // Clearing the state mid-batch would require special-casing which isn't done. + LLAMA_LOG_WARN("%s: non-consecutive token position %d after %d for sequence %d\n", + __func__, batch.pos[i], cache.cells[seq_id].pos, seq_id); + } + if (cache.cells[seq_id].pos < 0 && 0 <= batch.pos[i]) { + cache.used += 1; + } + cache.cells[seq_id].pos = batch.pos[i]; + // NOTE: seq_ids are not inserted here; they are handled when the input tensors are set + } else { + // too big seq_id + // TODO: would it be possible to resize the KV cache size instead? + LLAMA_LOG_ERROR("%s: seq_id=%d >= kv_size=%d Try using a bigger --parallel value\n", __func__, seq_id, cache.size); + return false; + } + } + } + + // allow getting the range of used cells, from head to head + n + cache.head = min; + cache.n = max - min + 1; + + // sanity check + return max >= min; + } + // otherwise, one cell per token. + if (n_tokens > n_ctx) { LLAMA_LOG_ERROR("%s: n_tokens=%d > n_ctx=%d\n", __func__, n_tokens, n_ctx); return false; @@ -2184,7 +2332,7 @@ static void llama_kv_cache_clear(struct llama_kv_cache & cache) { cache.used = 0; } -static void llama_kv_cache_seq_rm( +static bool llama_kv_cache_seq_rm( struct llama_kv_cache & cache, llama_seq_id seq_id, llama_pos p0, @@ -2194,6 +2342,25 @@ static void llama_kv_cache_seq_rm( if (p0 < 0) p0 = 0; if (p1 < 0) p1 = std::numeric_limits::max(); + // models like Mamba can't have a state partially erased + if (cache.recurrent) { + if (seq_id >= (int64_t) cache.size) { + // could be fatal + return false; + } + if (0 <= seq_id) { + // partial intersection is invalid + if ((0 < p0 && p0 <= cache.cells[seq_id].pos) || (0 < p1 && p1 <= cache.cells[seq_id].pos)) { + return false; + } + } else { + // seq_id is negative, then the range should include everything or nothing + if (p0 != p1 && (p0 != 0 || p1 != std::numeric_limits::max())) { + return false; + } + } + } + for (uint32_t i = 0; i < cache.size; ++i) { if (cache.cells[i].pos >= p0 && cache.cells[i].pos < p1) { if (seq_id < 0) { @@ -2215,6 +2382,8 @@ static void llama_kv_cache_seq_rm( // If we freed up a slot, set head to it so searching can start there. if (new_head != cache.size && new_head < cache.head) cache.head = new_head; + + return true; } static void llama_kv_cache_seq_cp( @@ -2226,6 +2395,29 @@ static void llama_kv_cache_seq_cp( if (p0 < 0) p0 = 0; if (p1 < 0) p1 = std::numeric_limits::max(); + if (cache.recurrent) { + if ((uint32_t) seq_id_dst < cache.size && (uint32_t) seq_id_src < cache.size) { + seq_id_src = cache.cells[seq_id_src].src; + GGML_ASSERT((uint32_t) seq_id_src < cache.size); + // intent to "copy from" + // supports copy chains thanks to taking the source of the source + cache.cells[seq_id_dst].src = seq_id_src; + + // preserve the "keep or clear" status of the copied sequence + if (cache.cells[seq_id_src].has_seq_id(seq_id_src)) { + cache.cells[seq_id_dst].seq_id.insert(seq_id_dst); + } else { + cache.cells[seq_id_dst].seq_id.erase(seq_id_dst); + } + + cache.do_copy = true; + + cache.cells[seq_id_dst].pos = cache.cells[seq_id_src].pos; + } + return; + } + // otherwise, this is the KV cache of a Transformer-like model + cache.head = 0; for (uint32_t i = 0; i < cache.size; ++i) { @@ -2265,6 +2457,17 @@ static void llama_kv_cache_seq_add( if (p0 < 0) p0 = 0; if (p1 < 0) p1 = std::numeric_limits::max(); + if (cache.recurrent) { + // for Mamba-like models, only the pos needs to be shifted + if (0 <= seq_id && seq_id < (int64_t) cache.size) { + llama_kv_cell & cell = cache.cells[seq_id]; + if (cell.has_seq_id(seq_id) && p0 <= cell.pos && cell.pos < p1) { + cell.pos += delta; + } + } + return; + } + for (uint32_t i = 0; i < cache.size; ++i) { if (cache.cells[i].has_seq_id(seq_id) && cache.cells[i].pos >= p0 && cache.cells[i].pos < p1) { cache.has_shift = true; @@ -2298,6 +2501,17 @@ static void llama_kv_cache_seq_div( if (p0 < 0) p0 = 0; if (p1 < 0) p1 = std::numeric_limits::max(); + if (cache.recurrent) { + // for Mamba-like models, only the pos needs to be changed + if (0 <= seq_id && seq_id < (int64_t) cache.size) { + llama_kv_cell & cell = cache.cells[seq_id]; + if (cell.has_seq_id(seq_id) && p0 <= cell.pos && cell.pos < p1) { + cell.pos /= d; + } + } + return; + } + for (uint32_t i = 0; i < cache.size; ++i) { if (cache.cells[i].has_seq_id(seq_id) && cache.cells[i].pos >= p0 && cache.cells[i].pos < p1) { cache.has_shift = true; @@ -3117,7 +3331,7 @@ static void llm_load_hparams( // sanity check for n_rot (optional) { - hparams.n_rot = hparams.n_embd / hparams.n_head; + hparams.n_rot = (hparams.n_head == 0) ? 0 : hparams.n_embd / hparams.n_head; ml.get_key(LLM_KV_ROPE_DIMENSION_COUNT, hparams.n_rot, false); @@ -3130,10 +3344,10 @@ static void llm_load_hparams( // gpt-j n_rot = rotary_dim } - hparams.n_embd_head_k = hparams.n_embd / hparams.n_head; + hparams.n_embd_head_k = (hparams.n_head == 0) ? 0 : hparams.n_embd / hparams.n_head; ml.get_key(LLM_KV_ATTENTION_KEY_LENGTH, hparams.n_embd_head_k, false); - hparams.n_embd_head_v = hparams.n_embd / hparams.n_head; + hparams.n_embd_head_v = (hparams.n_head == 0) ? 0 : hparams.n_embd / hparams.n_head; ml.get_key(LLM_KV_ATTENTION_VALUE_LENGTH, hparams.n_embd_head_v, false); // arch-specific KVs @@ -3383,6 +3597,36 @@ static void llm_load_hparams( default: model.type = e_model::MODEL_UNKNOWN; } } break; + case LLM_ARCH_MAMBA: + { + ml.get_key(LLM_KV_SSM_CONV_KERNEL, hparams.ssm_d_conv); + ml.get_key(LLM_KV_SSM_INNER_SIZE, hparams.ssm_d_inner); + ml.get_key(LLM_KV_SSM_STATE_SIZE, hparams.ssm_d_state); + ml.get_key(LLM_KV_SSM_TIME_STEP_RANK, hparams.ssm_dt_rank); + + ml.get_key(LLM_KV_ATTENTION_LAYERNORM_RMS_EPS, hparams.f_norm_rms_eps); + + switch (hparams.n_layer) { + case 24: + switch (hparams.n_embd) { + case 768: model.type = e_model::MODEL_SMALL; break; + default: model.type = e_model::MODEL_UNKNOWN; + } break; + case 48: + switch (hparams.n_embd) { + case 1024: model.type = e_model::MODEL_MEDIUM; break; + case 1536: model.type = e_model::MODEL_LARGE; break; + case 2048: model.type = e_model::MODEL_XL; break; + default: model.type = e_model::MODEL_UNKNOWN; + } break; + case 64: + switch (hparams.n_embd) { + case 2560: model.type = e_model::MODEL_3B; break; + default: model.type = e_model::MODEL_UNKNOWN; + } break; + default: model.type = e_model::MODEL_UNKNOWN; + } + } break; default: (void)0; } @@ -3702,6 +3946,10 @@ static void llm_load_print_meta(llama_model_loader & ml, llama_model & model) { LLAMA_LOG_INFO("%s: freq_scale_train = %g\n", __func__, hparams.rope_freq_scale_train); LLAMA_LOG_INFO("%s: n_yarn_orig_ctx = %u\n", __func__, hparams.n_yarn_orig_ctx); LLAMA_LOG_INFO("%s: rope_finetuned = %s\n", __func__, hparams.rope_finetuned ? "yes" : "unknown"); + LLAMA_LOG_INFO("%s: ssm_d_conv = %u\n", __func__, hparams.ssm_d_conv); + LLAMA_LOG_INFO("%s: ssm_d_inner = %u\n", __func__, hparams.ssm_d_inner); + LLAMA_LOG_INFO("%s: ssm_d_state = %u\n", __func__, hparams.ssm_d_state); + LLAMA_LOG_INFO("%s: ssm_dt_rank = %u\n", __func__, hparams.ssm_dt_rank); LLAMA_LOG_INFO("%s: model type = %s\n", __func__, llama_model_type_name(model.type)); LLAMA_LOG_INFO("%s: model ftype = %s\n", __func__, llama_model_ftype_name(model.ftype).c_str()); if (ml.n_elements >= 1e12) { @@ -4609,6 +4857,57 @@ static bool llm_load_tensors( layer.ffn_up_b = ml.create_tensor(ctx_layer, tn(LLM_TENSOR_FFN_UP , "bias", i), { n_ff}); } } break; + case LLM_ARCH_MAMBA: + { + const int64_t d_conv = hparams.ssm_d_conv; + const int64_t d_inner = hparams.ssm_d_inner; + const int64_t d_state = hparams.ssm_d_state; + const int64_t dt_rank = hparams.ssm_dt_rank; + // only an expansion factor of 2 is supported for now + GGML_ASSERT(2 * n_embd == d_inner); + + model.tok_embd = ml.create_tensor(ctx_input, tn(LLM_TENSOR_TOKEN_EMBD, "weight"), {n_embd, n_vocab}); + + // output + { + model.output_norm = ml.create_tensor(ctx_output, tn(LLM_TENSOR_OUTPUT_NORM, "weight"), {n_embd}); + + model.output = ml.create_tensor(ctx_output_split, tn(LLM_TENSOR_OUTPUT, "weight"), {n_embd, n_vocab}, false); + // if output is NULL, init from the input tok embed, duplicated to allow offloading + if (model.output == NULL) { + model.output = ml.create_tensor(ctx_output_split, tn(LLM_TENSOR_TOKEN_EMBD, "weight"), {n_embd, n_vocab}); + ml.n_created--; // artificial tensor + ml.size_data += ggml_nbytes(model.output); + } + } + + for (int i = 0; i < n_layer; ++i) { + ggml_context * ctx_layer = ctx_for_layer(i); + ggml_context * ctx_split = ctx_for_layer_split(i); + + auto & layer = model.layers[i]; + + // norm + layer.attn_norm = ml.create_tensor(ctx_layer, tn(LLM_TENSOR_ATTN_NORM, "weight", i), {n_embd}); + + layer.ssm_in = ml.create_tensor(ctx_split, tn(LLM_TENSOR_SSM_IN, "weight", i), {n_embd, 2*d_inner}); + + layer.ssm_conv1d = ml.create_tensor(ctx_split, tn(LLM_TENSOR_SSM_CONV1D, "weight", i), {d_conv, d_inner}); + layer.ssm_conv1d_b = ml.create_tensor(ctx_layer, tn(LLM_TENSOR_SSM_CONV1D, "bias", i), {d_inner}); + + layer.ssm_x = ml.create_tensor(ctx_split, tn(LLM_TENSOR_SSM_X, "weight", i), {d_inner, dt_rank + 2*d_state}); + + layer.ssm_dt = ml.create_tensor(ctx_split, tn(LLM_TENSOR_SSM_DT, "weight", i), {dt_rank, d_inner}); + layer.ssm_dt_b = ml.create_tensor(ctx_layer, tn(LLM_TENSOR_SSM_DT, "bias", i), {d_inner}); + + // no "weight" suffix for these + layer.ssm_a = ml.create_tensor(ctx_split, tn(LLM_TENSOR_SSM_A, i), {d_state, d_inner}); + layer.ssm_d = ml.create_tensor(ctx_layer, tn(LLM_TENSOR_SSM_D, i), {d_inner}); + + // out_proj + layer.ssm_out = ml.create_tensor(ctx_split, tn(LLM_TENSOR_SSM_OUT, "weight", i), {d_inner, n_embd}); + } + } break; default: throw std::runtime_error("unknown architecture"); } @@ -4834,6 +5133,8 @@ static void llm_build_kv_store( const int64_t n_embd_k_gqa = hparams.n_embd_k_gqa(); const int64_t n_embd_v_gqa = hparams.n_embd_v_gqa(); + GGML_ASSERT(kv.size == n_ctx); + // compute the transposed [n_tokens, n_embd] V matrix struct ggml_tensor * v_cur_t = ggml_transpose(ctx, ggml_reshape_2d(ctx, v_cur, n_embd_v_gqa, n_tokens)); //struct ggml_tensor * v_cur_t = ggml_transpose(ctx, v_cur); // TODO: reshape above is likely not needed @@ -5043,6 +5344,8 @@ static struct ggml_tensor * llm_build_kqv( cb(kq, "kq_soft_max_ext", il); } + GGML_ASSERT(kv.size == n_ctx); + // split cached v into n_head heads struct ggml_tensor * v = ggml_view_3d(ctx, kv.v_l[il], @@ -5190,8 +5493,8 @@ struct llm_build_context { norm_eps (hparams.f_norm_eps), norm_rms_eps (hparams.f_norm_rms_eps), n_tokens (batch.n_tokens), - n_kv (worst_case ? n_ctx : kv_self.n), - kv_head (worst_case ? n_ctx - n_tokens : kv_self.head), + n_kv (worst_case ? kv_self.size : kv_self.n), + kv_head (worst_case ? (kv_self.recurrent ? 0 : kv_self.size - n_tokens) : kv_self.head), n_orig_ctx (cparams.n_yarn_orig_ctx), pooling_type (cparams.pooling_type), rope_type (hparams.rope_type), @@ -5220,6 +5523,8 @@ struct llm_build_context { struct ggml_cgraph * build_k_shift() { struct ggml_cgraph * gf = ggml_new_graph_custom(ctx0, LLAMA_MAX_NODES, false); + GGML_ASSERT(kv_self.size == n_ctx); + for (int il = 0; il < n_layer; ++il) { struct ggml_tensor * tmp = // we rotate only the first n_rot dimensions @@ -5238,6 +5543,27 @@ struct llm_build_context { return gf; } + struct ggml_cgraph * build_s_copy() { + struct ggml_cgraph * gf = ggml_new_graph_custom(ctx0, LLAMA_MAX_NODES, false); + + GGML_ASSERT(kv_self.recurrent); + + for (int il = 0; il < n_layer; ++il) { + struct ggml_tensor * conv_states = ggml_reshape_2d(ctx0, kv_self.k_l[il], hparams.n_embd_k_s(), kv_self.size); + struct ggml_tensor * ssm_states = ggml_reshape_2d(ctx0, kv_self.v_l[il], hparams.n_embd_v_s(), kv_self.size); + + conv_states = ggml_get_rows(ctx0, conv_states, lctx.inp_s_copy); + ssm_states = ggml_get_rows(ctx0, ssm_states, lctx.inp_s_copy); + + // TODO: name the intermediate tensors with cb() + + ggml_build_forward_expand(gf, ggml_cpy(ctx0, conv_states, kv_self.k_l[il])); + ggml_build_forward_expand(gf, ggml_cpy(ctx0, ssm_states, kv_self.v_l[il])); + } + + return gf; + } + struct ggml_cgraph * build_defrag(const std::vector & ids) { struct ggml_cgraph * gf = ggml_new_graph_custom(ctx0, LLAMA_MAX_NODES, false); @@ -7835,6 +8161,145 @@ struct llm_build_context { return gf; } + + struct ggml_cgraph * build_mamba() { + struct ggml_cgraph * gf = ggml_new_graph_custom(ctx0, LLAMA_MAX_NODES, false); + + const int64_t d_model = n_embd; + const int64_t d_conv = hparams.ssm_d_conv; + const int64_t d_inner = hparams.ssm_d_inner; + GGML_ASSERT(2 * d_model == d_inner); + const int64_t d_state = hparams.ssm_d_state; + const int64_t dt_rank = hparams.ssm_dt_rank; + + struct ggml_tensor * cur; + struct ggml_tensor * inpL; + + // {n_embd, n_tokens} + inpL = llm_build_inp_embd(ctx0, hparams, batch, model.tok_embd, lctx.inp_tokens, lctx.inp_embd, cb); + cb(inpL, "inp_embd", -1); + + struct ggml_tensor * state_mask = ggml_view_2d(ctx0, lctx.inp_s_mask, 1, n_kv, lctx.inp_s_mask->nb[0], 0); + struct ggml_tensor * state_seq = ggml_view_2d(ctx0, lctx.inp_s_seq, n_kv, n_tokens, n_kv*ggml_element_size(lctx.inp_s_seq), 0); + + for (int il = 0; il < n_layer; ++il) { + // (ab)using the KV cache to store the states + struct ggml_tensor * conv_states = ggml_reshape_2d(ctx0, kv_self.k_l[il], hparams.n_embd_k_s(), kv_self.size); + struct ggml_tensor * ssm_states = ggml_reshape_2d(ctx0, kv_self.v_l[il], hparams.n_embd_v_s(), kv_self.size); + + // clear states of sequences which are starting at the beginning of this batch + { + conv_states = ggml_mul(ctx0, + ggml_view_2d(ctx0, conv_states, conv_states->ne[0], n_kv, conv_states->nb[1], kv_head*conv_states->nb[1]), + state_mask); + ssm_states = ggml_mul(ctx0, + ggml_view_2d(ctx0, ssm_states, ssm_states->ne[0], n_kv, ssm_states->nb[1], kv_head*ssm_states->nb[1]), + state_mask); + } + + conv_states = ggml_reshape_3d(ctx0, conv_states, d_conv - 1, d_inner, n_kv); + ssm_states = ggml_reshape_3d(ctx0, ssm_states, d_state, d_inner, n_kv); + + // norm + cur = llm_build_norm(ctx0, inpL, hparams, + model.layers[il].attn_norm, NULL, + LLM_NORM_RMS, cb, il); + cb(cur, "attn_norm", il); + + // {n_embd, 2*d_inner} * {n_embd, n_tokens} => {2*d_inner, n_tokens} + struct ggml_tensor * xz = ggml_mul_mat(ctx0, model.layers[il].ssm_in, cur); + // split the above in two + // => {d_inner, n_tokens} + struct ggml_tensor * x = ggml_view_2d(ctx0, xz, d_inner, xz->ne[1], xz->nb[1], 0); + struct ggml_tensor * z = ggml_view_2d(ctx0, xz, d_inner, xz->ne[1], xz->nb[1], ggml_element_size(xz)*d_inner); + + // conv + { + // Custom operator which is needed only to ease simultaneous sequence processing. + // For a single sequence, the equivalent is to concatenate the columns of conv_states and x, + // then make a self-overlapping view of that over d_conv columns at each stride in the 3rd dimension, + // then element-wise multiply that with the conv1d weigth, + // then sum the elements of each row, + // (the last two steps are a dot product over rows (also doable with mul_mat)) + // then permute away the ne[0] dimension, + // and then you're left with the resulting x tensor. + // The new conv_states is the last (d_conv - 1) columns + // of the last 3rd dimensional "layer" of the self-overlapping view. + // For simultaneous sequences, it's more complicated. + struct ggml_tensor * x_conv = ggml_ssm_conv(ctx0, conv_states, x, model.layers[il].ssm_conv1d, state_seq); + + // store last (d_conv - 1) columns of the conv_state part of x_conv back into the KV cache + ggml_build_forward_expand(gf, + ggml_cpy(ctx0, + ggml_view_2d(ctx0, x_conv, d_conv - 1, d_inner*n_kv, d_conv*ggml_element_size(x_conv), (1+d_inner*n_tokens)*ggml_element_size(x_conv)), + ggml_view_1d(ctx0, kv_self.k_l[il], (d_conv - 1)*(d_inner)*(n_kv), kv_self.head*(d_conv - 1)*(d_inner)*ggml_element_size(x_conv)))); + + // extract x from x_conv + x = ggml_view_2d(ctx0, x_conv, d_inner, n_tokens, d_inner*ggml_element_size(x_conv), 0); + + // bias + x = ggml_add(ctx0, x, model.layers[il].ssm_conv1d_b); + + x = ggml_silu(ctx0, x); + } + + // ssm + { + // {d_inner, dt_rank + 2*d_state} * {d_inner, n_tokens} => {dt_rank + 2*d_state, n_tokens} + struct ggml_tensor * x_db = ggml_mul_mat(ctx0, model.layers[il].ssm_x, x); + // split + struct ggml_tensor * dt = ggml_view_2d(ctx0, x_db, dt_rank, n_tokens, x_db->nb[1], 0); + struct ggml_tensor * B = ggml_view_2d(ctx0, x_db, d_state, n_tokens, x_db->nb[1], ggml_element_size(x_db)*dt_rank); + struct ggml_tensor * C = ggml_view_2d(ctx0, x_db, d_state, n_tokens, x_db->nb[1], ggml_element_size(x_db)*(dt_rank+d_state)); + + // {dt_rank, d_inner} * {dt_rank, n_tokens} => {d_inner, n_tokens} + dt = ggml_mul_mat(ctx0, model.layers[il].ssm_dt, dt); + dt = ggml_add(ctx0, dt, model.layers[il].ssm_dt_b); + + // Custom operator to optimize the parallel associative scan + // as described in the Annex D of the Mamba paper. + // => {d_inner, n_tokens} and {d_state, d_inner, n_kv} combined, + // because only a single tensor can be returned. + struct ggml_tensor * y_ssm_states = ggml_ssm_scan(ctx0, ssm_states, x, dt, model.layers[il].ssm_a, B, C, state_seq); + + // store last states (the second part of y_ssm_states) + ggml_build_forward_expand(gf, + ggml_cpy(ctx0, + ggml_view_1d(ctx0, y_ssm_states, d_state*d_inner*n_kv, d_inner*n_tokens*ggml_element_size(y_ssm_states)), + ggml_view_1d(ctx0, kv_self.v_l[il], d_state*d_inner*n_kv, kv_self.head*d_state*d_inner*ggml_element_size(ssm_states)))); + + struct ggml_tensor * y = ggml_view_2d(ctx0, y_ssm_states, d_inner, n_tokens, d_inner*ggml_element_size(y_ssm_states), 0); + + // {d_inner, n_tokens} * {d_inner} => {d_inner, n_tokens} + y = ggml_add(ctx0, y, ggml_mul(ctx0, x, model.layers[il].ssm_d)); + y = ggml_mul(ctx0, y, ggml_silu(ctx0, z)); + + // {d_inner, n_embd} * {d_inner, n_tokens} => {n_embd, n_tokens} + cur = ggml_mul_mat(ctx0, model.layers[il].ssm_out, y); + } + + // residual + cur = ggml_add(ctx0, cur, inpL); + cb(cur, "l_out", il); + + // input for next layer + inpL = cur; + } + + // final rmsnorm + cur = llm_build_norm(ctx0, inpL, hparams, + model.output_norm, NULL, + LLM_NORM_RMS, cb, -1); + cb(cur, "result_norm", -1); + + // lm_head + cur = ggml_mul_mat(ctx0, model.output, cur); + cb(cur, "result_output", -1); + + ggml_build_forward_expand(gf, cur); + + return gf; + } }; static struct ggml_cgraph * llama_build_graph_defrag(llama_context & lctx, const std::vector & ids) { @@ -7871,6 +8336,23 @@ static struct ggml_cgraph * llama_build_graph_k_shift(llama_context & lctx) { return result; } +static struct ggml_cgraph * llama_build_graph_s_copy(llama_context & lctx) { + llama_batch dummy; + dummy.n_tokens = 0; + + llm_build_cb cb = [&](struct ggml_tensor * , const char * , int ) { }; + + struct llm_build_context llm(lctx, dummy, cb, false); + + llm.init(); + + struct ggml_cgraph * result = llm.build_s_copy(); + + llm.free(); + + return result; +} + static struct ggml_cgraph * llama_build_graph( llama_context & lctx, const llama_batch & batch, @@ -7985,6 +8467,10 @@ static struct ggml_cgraph * llama_build_graph( { result = llm.build_starcoder2(); } break; + case LLM_ARCH_MAMBA: + { + result = llm.build_mamba(); + } break; default: GGML_ASSERT(false); } @@ -7995,19 +8481,29 @@ static struct ggml_cgraph * llama_build_graph( } static void llama_set_k_shift(llama_context & lctx) { - const auto & cparams = lctx.cparams; - - const int64_t n_ctx = cparams.n_ctx; + const int64_t kv_size = lctx.kv_self.size; assert(ggml_backend_buffer_is_host(lctx.inp_K_shift->buffer)); int32_t * data = (int32_t *) lctx.inp_K_shift->data; - for (int i = 0; i < n_ctx; ++i) { + for (int i = 0; i < kv_size; ++i) { data[i] = lctx.kv_self.cells[i].delta; } } +static void llama_set_s_copy(llama_context & lctx) { + const int64_t kv_size = lctx.kv_self.size; + + assert(ggml_backend_buffer_is_host(lctx.inp_s_copy->buffer)); + + int32_t * data = (int32_t *) lctx.inp_s_copy->data; + + for (int i = 0; i < kv_size; ++i) { + data[i] = lctx.kv_self.cells[i].src; + } +} + static void llama_set_inputs(llama_context & lctx, const llama_batch & batch) { // // set input data @@ -8044,6 +8540,9 @@ static void llama_set_inputs(llama_context & lctx, const llama_batch & batch) { float * data = (float *) lctx.inp_KQ_mask->data; + // For causal attention, use only the previous KV cells + // of the correct sequence for each token of the batch. + // It's assumed that if a token in the batch has multiple sequences, they are equivalent. for (int h = 0; h < 1; ++h) { for (int j = 0; j < n_tokens; ++j) { const llama_pos pos = batch.pos[j]; @@ -8149,6 +8648,53 @@ static void llama_set_inputs(llama_context & lctx, const llama_batch & batch) { } } } + + if (kv_self.recurrent) { + const int64_t n_kv = kv_self.n; + + { + GGML_ASSERT(ggml_backend_buffer_is_host(lctx.inp_s_mask->buffer)); + float * data = (float *) lctx.inp_s_mask->data; + + // states which are not affected by the current batch are left untouched + for (int i = 0; i < n_kv; ++i) { + llama_seq_id seq_id = i + lctx.kv_self.head; + llama_kv_cell & kv_cell = lctx.kv_self.cells[seq_id]; + bool has_self_seq = kv_cell.has_seq_id(seq_id); + + data[i] = (float) has_self_seq; + + // ensure current sequences will be kept + if (!has_self_seq && kv_cell.pos >= 0) { + kv_cell.seq_id.insert(seq_id); + } + } + } + // For Mamba (and other recurrent architectures), + // update the correct state(s)/sequence(s) for each token of the batch. + // Like with the KQ_mask, if a token in the batch has multiple sequences, + // they are assumed to be equivalent (not here, but in ggml_ssm_scan and ggml_ssm_conv). + { + const int64_t n_tokens = batch.n_tokens; + + GGML_ASSERT(ggml_backend_buffer_is_host(lctx.inp_s_seq->buffer)); + int32_t * data = (int32_t *) lctx.inp_s_seq->data; + + for (int j = 0; j < n_tokens; ++j) { + const int32_t n_seq = batch.n_seq_id[j]; + GGML_ASSERT(0 < n_seq); // a token should be part of at least 1 sequence + + for (int i = 0; i < n_kv; ++i) { + if (i < n_seq) { + // for this type of model, the head is the minimum seq_id of the batch + data[j*n_kv + i] = batch.seq_id[j][i] - kv_self.head; + } else { + data[j*n_kv + i] = -1; + } + } + } + } + } } static void llama_graph_compute( @@ -8271,11 +8817,13 @@ static int llama_decode_internal( return 1; } - // a heuristic, to avoid attending the full cache if it is not yet utilized - // after enough generations, the benefit from this heuristic disappears - // if we start defragmenting the cache, the benefit from this will be more important - kv_self.n = std::min(cparams.n_ctx, std::max(32u, GGML_PAD(llama_kv_cache_cell_max(kv_self), 32))); - //kv_self.n = llama_kv_cache_cell_max(kv_self); + if (!kv_self.recurrent) { + // a heuristic, to avoid attending the full cache if it is not yet utilized + // after enough generations, the benefit from this heuristic disappears + // if we start defragmenting the cache, the benefit from this will be more important + kv_self.n = std::min(kv_self.size, std::max(32u, GGML_PAD(llama_kv_cache_cell_max(kv_self), 32))); + //kv_self.n = llama_kv_cache_cell_max(kv_self); + } } //printf("kv_self.n = %5d, kv_self.used = %5d, kv_self.head = %5d\n", kv_self.n, kv_self.used, kv_self.head); @@ -8701,6 +9249,26 @@ static void llama_kv_cache_update_internal(struct llama_context & lctx) { } } + if (lctx.kv_self.recurrent && lctx.kv_self.do_copy) { + llama_set_s_copy(lctx); + + { + ggml_cgraph * gf = llama_build_graph_s_copy(lctx); + + llama_graph_compute(lctx, gf, lctx.cparams.n_threads); + } + + { + auto & kv_self = lctx.kv_self; + + kv_self.do_copy = false; + + for (uint32_t i = 0; i < kv_self.size; ++i) { + kv_self.cells[i].src = i; + } + } + } + // defragment the KV cache if needed if (lctx.kv_self.do_defrag) { llama_kv_cache_defrag_internal(lctx); @@ -11535,6 +12103,12 @@ static void llama_model_quantize_internal(const std::string & fname_inp, const s quantize &= name != LLM_TN(model.arch)(LLM_TENSOR_POS_EMBD, "weight"); quantize &= name != LLM_TN(model.arch)(LLM_TENSOR_TOKEN_TYPES, "weight"); + // do not quantize Mamba's small yet 2D weights + // NOTE: can't use LLM_TN here because the layer number is not known + quantize &= name.find("ssm_conv1d.weight") == std::string::npos; + quantize &= name.find("ssm_x.weight") == std::string::npos; + quantize &= name.find("ssm_dt.weight") == std::string::npos; + enum ggml_type new_type; void * new_data; size_t new_size; @@ -11985,6 +12559,7 @@ struct llama_context_params llama_context_default_params() { /*.seed =*/ LLAMA_DEFAULT_SEED, /*.n_ctx =*/ 512, /*.n_batch =*/ 512, + /*.n_parallel =*/ 1, /*.n_threads =*/ GGML_DEFAULT_N_THREADS, // TODO: better default /*.n_threads_batch =*/ GGML_DEFAULT_N_THREADS, /*.rope_scaling_type =*/ LLAMA_ROPE_SCALING_TYPE_UNSPECIFIED, @@ -12146,6 +12721,7 @@ struct llama_context * llama_new_context_with_model( auto & cparams = ctx->cparams; cparams.n_batch = params.n_batch; + // TODO: maybe add n_parallel here too cparams.n_threads = params.n_threads; cparams.n_threads_batch = params.n_threads_batch; cparams.yarn_ext_factor = params.yarn_ext_factor; @@ -12203,8 +12779,18 @@ struct llama_context * llama_new_context_with_model( ctx->rng = std::mt19937(params.seed); ctx->logits_all = params.logits_all; - const ggml_type type_k = params.type_k; - const ggml_type type_v = params.type_v; + uint32_t kv_size = cparams.n_ctx; + ggml_type type_k = params.type_k; + ggml_type type_v = params.type_v; + + // Mamba only needs a constant number of KV cache cells per sequence + if (model->arch == LLM_ARCH_MAMBA) { + // Mamba needs at least as many KV cells as there are sequences kept at any time + kv_size = std::max((uint32_t) 1, params.n_parallel); + // it's probably best to keep as much precision as possible for the states + type_k = GGML_TYPE_F32; // required by ggml_ssm_conv for Mamba's conv_states + type_v = GGML_TYPE_F32; // required by ggml_ssm_scan for Mamba's ssm_states + } GGML_ASSERT(hparams.n_embd_head_k % ggml_blck_size(type_k) == 0); GGML_ASSERT(hparams.n_embd_head_v % ggml_blck_size(type_v) == 0); @@ -12304,7 +12890,7 @@ struct llama_context * llama_new_context_with_model( } ctx->backends.push_back(ctx->backend_cpu); - if (!llama_kv_cache_init(ctx->kv_self, ctx->model, type_k, type_v, cparams.n_ctx, cparams.offload_kqv)) { + if (!llama_kv_cache_init(ctx->kv_self, ctx->model, type_k, type_v, kv_size, cparams.offload_kqv)) { LLAMA_LOG_ERROR("%s: llama_kv_cache_init() failed for self-attention cache\n", __func__); llama_free(ctx); return nullptr; @@ -12338,7 +12924,7 @@ struct llama_context * llama_new_context_with_model( // graph inputs { ggml_init_params init_params = { - /* .mem_size */ ggml_tensor_overhead()*8, + /* .mem_size */ ggml_tensor_overhead()*(8 + 3*(ctx->kv_self.recurrent)), /* .mem_buffer */ nullptr, /* .no_alloc */ true, }; @@ -12347,11 +12933,16 @@ struct llama_context * llama_new_context_with_model( ctx->inp_tokens = ggml_new_tensor_1d(ctx->ctx_input, GGML_TYPE_I32, cparams.n_batch); ctx->inp_embd = ggml_new_tensor_2d(ctx->ctx_input, GGML_TYPE_F32, hparams.n_embd, cparams.n_batch); ctx->inp_pos = ggml_new_tensor_1d(ctx->ctx_input, GGML_TYPE_I32, cparams.n_batch); - ctx->inp_KQ_mask = ggml_new_tensor_2d(ctx->ctx_input, GGML_TYPE_F32, cparams.n_ctx, cparams.n_batch); - ctx->inp_KQ_pos = ggml_new_tensor_1d(ctx->ctx_input, GGML_TYPE_F32, cparams.n_ctx); - ctx->inp_K_shift = ggml_new_tensor_1d(ctx->ctx_input, GGML_TYPE_I32, cparams.n_ctx); + ctx->inp_KQ_mask = ggml_new_tensor_2d(ctx->ctx_input, GGML_TYPE_F32, kv_size, cparams.n_batch); + ctx->inp_KQ_pos = ggml_new_tensor_1d(ctx->ctx_input, GGML_TYPE_F32, kv_size); + ctx->inp_K_shift = ggml_new_tensor_1d(ctx->ctx_input, GGML_TYPE_I32, kv_size); ctx->inp_mean = ggml_new_tensor_2d(ctx->ctx_input, GGML_TYPE_F32, cparams.n_batch, cparams.n_batch); ctx->inp_cls = ggml_new_tensor_1d(ctx->ctx_input, GGML_TYPE_I32, cparams.n_batch); + if (ctx->kv_self.recurrent) { + ctx->inp_s_copy = ggml_new_tensor_1d(ctx->ctx_input, GGML_TYPE_I32, kv_size); + ctx->inp_s_mask = ggml_new_tensor_1d(ctx->ctx_input, GGML_TYPE_F32, kv_size); + ctx->inp_s_seq = ggml_new_tensor_2d(ctx->ctx_input, GGML_TYPE_I32, kv_size, cparams.n_batch); + } ggml_set_name(ctx->inp_tokens, "inp_tokens"); ggml_set_name(ctx->inp_embd, "inp_embd"); @@ -12361,6 +12952,11 @@ struct llama_context * llama_new_context_with_model( ggml_set_name(ctx->inp_K_shift, "inp_K_shift"); ggml_set_name(ctx->inp_mean, "inp_mean"); ggml_set_name(ctx->inp_cls, "inp_cls"); + if (ctx->kv_self.recurrent) { + ggml_set_name(ctx->inp_s_copy, "inp_s_copy"); + ggml_set_name(ctx->inp_s_mask, "inp_s_mask"); + ggml_set_name(ctx->inp_s_seq, "inp_s_seq"); + } ctx->buf_input = ggml_backend_alloc_ctx_tensors_from_buft(ctx->ctx_input, llama_default_buffer_type_cpu(true)); LLAMA_LOG_INFO("%s: %10s input buffer size = %8.2f MiB\n", __func__, @@ -12447,6 +13043,10 @@ uint32_t llama_n_batch(const struct llama_context * ctx) { return ctx->cparams.n_batch; } +uint32_t llama_n_max_seq(const struct llama_context * ctx) { + return ctx->kv_self.size; +} + enum llama_vocab_type llama_vocab_type(const struct llama_model * model) { return model->vocab.type; } @@ -12460,6 +13060,7 @@ enum llama_rope_type llama_rope_type(const struct llama_model * model) { case LLM_ARCH_MPT: case LLM_ARCH_REFACT: case LLM_ARCH_BLOOM: + case LLM_ARCH_MAMBA: return LLAMA_ROPE_TYPE_NONE; // use what we call a normal RoPE, operating on pairs of consecutive head values @@ -12713,8 +13314,8 @@ void llama_kv_cache_clear(struct llama_context * ctx) { llama_kv_cache_clear(ctx->kv_self); } -void llama_kv_cache_seq_rm(struct llama_context * ctx, llama_seq_id seq_id, llama_pos p0, llama_pos p1) { - llama_kv_cache_seq_rm(ctx->kv_self, seq_id, p0, p1); +bool llama_kv_cache_seq_rm(struct llama_context * ctx, llama_seq_id seq_id, llama_pos p0, llama_pos p1) { + return llama_kv_cache_seq_rm(ctx->kv_self, seq_id, p0, p1); } void llama_kv_cache_seq_cp(struct llama_context * ctx, llama_seq_id seq_id_src, llama_seq_id seq_id_dst, llama_pos p0, llama_pos p1) { @@ -12891,8 +13492,8 @@ static void llama_copy_state_data_internal(struct llama_context * ctx, llama_dat const auto & hparams = ctx->model.hparams; const uint32_t n_layer = hparams.n_layer; - const uint32_t n_embd_k_gqa = hparams.n_embd_k_gqa(); - const uint32_t n_embd_v_gqa = hparams.n_embd_v_gqa(); + const uint32_t n_embd_k_gqa = hparams.n_embd_k_gqa() + hparams.n_embd_k_s(); + const uint32_t n_embd_v_gqa = hparams.n_embd_v_gqa() + hparams.n_embd_v_s(); const size_t kv_buf_size = kv_self.total_size(); const uint32_t kv_head = llama_kv_cache_cell_max(kv_self); @@ -12913,6 +13514,17 @@ static void llama_copy_state_data_internal(struct llama_context * ctx, llama_dat ggml_backend_tensor_get(kv_self.k_l[il], tmp_buf.data(), 0, tmp_buf.size()); data_ctx->write(tmp_buf.data(), tmp_buf.size()); + if (kv_self.recurrent) { + // v is contiguous for recurrent models + // TODO: use other tensors for state models than k and v + const size_t v_size = ggml_row_size(kv_self.v_l[il]->type, n_embd_v_gqa*kv_head); + + tmp_buf.resize(v_size); + ggml_backend_tensor_get(kv_self.v_l[il], tmp_buf.data(), 0, tmp_buf.size()); + data_ctx->write(tmp_buf.data(), tmp_buf.size()); + continue; + } + // v is not contiguous, copy row by row const size_t v_row_size = ggml_row_size(kv_self.v_l[il]->type, kv_head); const size_t v_row_stride = ggml_row_size(kv_self.v_l[il]->type, kv_size); @@ -13005,8 +13617,8 @@ size_t llama_set_state_data(struct llama_context * ctx, const uint8_t * src) { const auto & hparams = ctx->model.hparams; const uint32_t n_layer = hparams.n_layer; - const uint32_t n_embd_k_gqa = hparams.n_embd_k_gqa(); - const uint32_t n_embd_v_gqa = hparams.n_embd_v_gqa(); + const uint32_t n_embd_k_gqa = hparams.n_embd_k_gqa() + hparams.n_embd_k_s(); + const uint32_t n_embd_v_gqa = hparams.n_embd_v_gqa() + hparams.n_embd_v_s(); size_t kv_buf_size; uint32_t kv_head; @@ -13027,6 +13639,16 @@ size_t llama_set_state_data(struct llama_context * ctx, const uint8_t * src) { ggml_backend_tensor_set(kv_self.k_l[il], inp, 0, k_size); inp += k_size; + if (kv_self.recurrent) { + // v is contiguous for recurrent models + // TODO: use other tensors for state models than k and v + const size_t v_size = ggml_row_size(kv_self.v_l[il]->type, n_embd_v_gqa*kv_head); + + ggml_backend_tensor_set(kv_self.v_l[il], inp, 0, v_size); + inp += v_size; + continue; + } + // v is not contiguous, copy row by row const size_t v_row_size = ggml_row_size(kv_self.v_l[il]->type, kv_head); const size_t v_row_stride = ggml_row_size(kv_self.v_l[il]->type, kv_size); diff --git a/llama.h b/llama.h index 3dc162b07..7a107c7f3 100644 --- a/llama.h +++ b/llama.h @@ -235,6 +235,7 @@ extern "C" { uint32_t seed; // RNG seed, -1 for random uint32_t n_ctx; // text context, 0 = from model uint32_t n_batch; // prompt processing maximum batch size + uint32_t n_parallel; // number of parallel sequences (i.e. distinct states for recurrent models) uint32_t n_threads; // number of threads to use for generation uint32_t n_threads_batch; // number of threads to use for batch processing @@ -376,6 +377,7 @@ extern "C" { LLAMA_API uint32_t llama_n_ctx (const struct llama_context * ctx); LLAMA_API uint32_t llama_n_batch (const struct llama_context * ctx); + LLAMA_API uint32_t llama_n_max_seq (const struct llama_context * ctx); LLAMA_API enum llama_vocab_type llama_vocab_type(const struct llama_model * model); LLAMA_API enum llama_rope_type llama_rope_type (const struct llama_model * model); @@ -502,7 +504,7 @@ extern "C" { // seq_id < 0 : match any sequence // p0 < 0 : [0, p1] // p1 < 0 : [p0, inf) - LLAMA_API void llama_kv_cache_seq_rm( + LLAMA_API bool llama_kv_cache_seq_rm( struct llama_context * ctx, llama_seq_id seq_id, llama_pos p0,
  • Get the Code